From f402090c1ebec9601e5fef6e45879d3a0a015dbd Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 26 Oct 2023 21:44:32 +0530 Subject: [PATCH 001/792] feat(ingest): support view lineage for all sqlalchemy sources (#9039) --- metadata-ingestion/setup.py | 52 +- .../src/datahub/configuration/common.py | 2 +- .../datahub/emitter/sql_parsing_builder.py | 5 +- .../api/incremental_lineage_helper.py | 13 +- .../src/datahub/ingestion/api/source.py | 1 + .../ingestion/source/dbt/dbt_common.py | 5 + .../source/snowflake/snowflake_lineage_v2.py | 14 +- .../src/datahub/ingestion/source/sql/hive.py | 83 ++- .../datahub/ingestion/source/sql/postgres.py | 20 +- .../ingestion/source/sql/sql_common.py | 126 +++- .../ingestion/source/sql/sql_config.py | 19 +- .../datahub/ingestion/source/sql/teradata.py | 54 +- .../source/sql/two_tier_sql_source.py | 6 +- .../datahub/ingestion/source/sql/vertica.py | 2 +- .../source/state/stateful_ingestion_base.py | 3 +- .../ingestion/source_config/sql/snowflake.py | 12 +- .../src/datahub/utilities/sqlglot_lineage.py | 21 +- .../hive/hive_mces_all_db_golden.json | 581 +++++++++++++++--- .../integration/hive/hive_mces_golden.json | 530 ++++++++++++++-- .../tests/integration/hive/hive_setup.sql | 22 +- .../mysql/mysql_mces_no_db_golden.json | 272 ++++++-- .../postgres_all_db_mces_with_db_golden.json | 324 ++++++++-- ..._db_to_file_with_db_estimate_row_count.yml | 2 +- .../postgres_mces_with_db_golden.json | 264 +++++++- ...res_to_file_with_db_estimate_row_count.yml | 2 +- .../snowflake/test_snowflake_failures.py | 3 +- .../trino/trino_hive_mces_golden.json | 211 +++++-- .../test_incremental_lineage_helper.py | 21 + 28 files changed, 2193 insertions(+), 477 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 0b8661b0df5f5..7f7826abe2095 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -101,22 +101,36 @@ "grpcio-tools>=1.44.0,<2", } -sql_common = { - # Required for all SQL sources. - # This is temporary lower bound that we're open to loosening/tightening as requirements show up - "sqlalchemy>=1.4.39, <2", - # Required for SQL profiling. - "great-expectations>=0.15.12, <=0.15.50", - # scipy version restricted to reduce backtracking, used by great-expectations, - "scipy>=1.7.2", - # GE added handling for higher version of jinja2 - # https://github.com/great-expectations/great_expectations/pull/5382/files - # datahub does not depend on traitlets directly but great expectations does. - # https://github.com/ipython/traitlets/issues/741 - "traitlets<5.2.2", - "greenlet", +usage_common = { + "sqlparse", +} + +sqlglot_lib = { + # Using an Acryl fork of sqlglot. + # https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:hsheth?expand=1 + "acryl-sqlglot==18.5.2.dev45", } +sql_common = ( + { + # Required for all SQL sources. + # This is temporary lower bound that we're open to loosening/tightening as requirements show up + "sqlalchemy>=1.4.39, <2", + # Required for SQL profiling. + "great-expectations>=0.15.12, <=0.15.50", + # scipy version restricted to reduce backtracking, used by great-expectations, + "scipy>=1.7.2", + # GE added handling for higher version of jinja2 + # https://github.com/great-expectations/great_expectations/pull/5382/files + # datahub does not depend on traitlets directly but great expectations does. + # https://github.com/ipython/traitlets/issues/741 + "traitlets<5.2.2", + "greenlet", + } + | usage_common + | sqlglot_lib +) + sqllineage_lib = { "sqllineage==1.3.8", # We don't have a direct dependency on sqlparse but it is a dependency of sqllineage. @@ -125,12 +139,6 @@ "sqlparse==0.4.4", } -sqlglot_lib = { - # Using an Acryl fork of sqlglot. - # https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:hsheth?expand=1 - "acryl-sqlglot==18.5.2.dev45", -} - aws_common = { # AWS Python SDK "boto3", @@ -243,10 +251,6 @@ powerbi_report_server = {"requests", "requests_ntlm"} -usage_common = { - "sqlparse", -} - databricks = { # 0.1.11 appears to have authentication issues with azure databricks "databricks-sdk>=0.9.0", diff --git a/metadata-ingestion/src/datahub/configuration/common.py b/metadata-ingestion/src/datahub/configuration/common.py index c909b89eb0c2d..73ac4baac48c0 100644 --- a/metadata-ingestion/src/datahub/configuration/common.py +++ b/metadata-ingestion/src/datahub/configuration/common.py @@ -283,7 +283,7 @@ class VersionedConfig(ConfigModel): class LineageConfig(ConfigModel): incremental_lineage: bool = Field( - default=True, + default=False, description="When enabled, emits lineage as incremental to existing lineage already in DataHub. When disabled, re-states lineage on each run.", ) diff --git a/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py b/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py index dedcfa0385f75..cedaa4fbbd7f6 100644 --- a/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py +++ b/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py @@ -106,6 +106,7 @@ def process_sql_parsing_result( user: Optional[UserUrn] = None, custom_operation_type: Optional[str] = None, include_urns: Optional[Set[DatasetUrn]] = None, + include_column_lineage: bool = True, ) -> Iterable[MetadataWorkUnit]: """Process a single query and yield any generated workunits. @@ -130,7 +131,9 @@ def process_sql_parsing_result( _merge_lineage_data( downstream_urn=downstream_urn, upstream_urns=result.in_tables, - column_lineage=result.column_lineage, + column_lineage=result.column_lineage + if include_column_lineage + else None, upstream_edges=self._lineage_map[downstream_urn], query_timestamp=query_timestamp, is_view_ddl=is_view_ddl, diff --git a/metadata-ingestion/src/datahub/ingestion/api/incremental_lineage_helper.py b/metadata-ingestion/src/datahub/ingestion/api/incremental_lineage_helper.py index 9478c5cf7efa2..945b201ca5758 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/incremental_lineage_helper.py +++ b/metadata-ingestion/src/datahub/ingestion/api/incremental_lineage_helper.py @@ -130,10 +130,13 @@ def auto_incremental_lineage( if len(wu.metadata.proposedSnapshot.aspects) > 0: yield wu - yield _lineage_wu_via_read_modify_write( - graph, urn, lineage_aspect, wu.metadata.systemMetadata - ) if lineage_aspect.fineGrainedLineages else _convert_upstream_lineage_to_patch( - urn, lineage_aspect, wu.metadata.systemMetadata - ) + if lineage_aspect.fineGrainedLineages: + yield _lineage_wu_via_read_modify_write( + graph, urn, lineage_aspect, wu.metadata.systemMetadata + ) + elif lineage_aspect.upstreams: + yield _convert_upstream_lineage_to_patch( + urn, lineage_aspect, wu.metadata.systemMetadata + ) else: yield wu diff --git a/metadata-ingestion/src/datahub/ingestion/api/source.py b/metadata-ingestion/src/datahub/ingestion/api/source.py index b86844b1c4c83..8940642f7008a 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/source.py +++ b/metadata-ingestion/src/datahub/ingestion/api/source.py @@ -215,6 +215,7 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: ) ): auto_lowercase_dataset_urns = auto_lowercase_urns + return [ auto_lowercase_dataset_urns, auto_status_aspect, diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index 48d2118a9b091..c4de24bf192f1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -280,6 +280,11 @@ class DBTCommonConfig( default=False, description="When enabled, dbt test warnings will be treated as failures.", ) + # override fault value to True. + incremental_lineage: bool = Field( + default=True, + description="When enabled, emits lineage as incremental to existing lineage already in DataHub. When disabled, re-states lineage on each run.", + ) @validator("target_platform") def validate_target_platform_value(cls, target_platform: str) -> str: diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_lineage_v2.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_lineage_v2.py index 0a15c352fc842..9649054dbe6cb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_lineage_v2.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_lineage_v2.py @@ -136,7 +136,6 @@ def get_workunits( return self._populate_external_lineage_map(discovered_tables) - if self.config.include_view_lineage: if len(discovered_views) > 0: yield from self.get_view_upstream_workunits( @@ -200,14 +199,15 @@ def _gen_workunit_from_sql_parsing_result( self, dataset_identifier: str, result: SqlParsingResult, - ) -> MetadataWorkUnit: + ) -> Iterable[MetadataWorkUnit]: upstreams, fine_upstreams = self.get_upstreams_from_sql_parsing_result( self.dataset_urn_builder(dataset_identifier), result ) - self.report.num_views_with_upstreams += 1 - return self._create_upstream_lineage_workunit( - dataset_identifier, upstreams, fine_upstreams - ) + if upstreams: + self.report.num_views_with_upstreams += 1 + yield self._create_upstream_lineage_workunit( + dataset_identifier, upstreams, fine_upstreams + ) def _gen_workunits_from_query_result( self, @@ -251,7 +251,7 @@ def get_view_upstream_workunits( ) if result: views_processed.add(view_identifier) - yield self._gen_workunit_from_sql_parsing_result( + yield from self._gen_workunit_from_sql_parsing_result( view_identifier, result ) self.report.view_lineage_parse_secs = timer.elapsed_seconds() diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/hive.py b/metadata-ingestion/src/datahub/ingestion/source/sql/hive.py index 63b21bc82eddd..d081acb6c1eff 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/hive.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/hive.py @@ -1,15 +1,18 @@ import json import logging import re -from typing import Any, Dict, List, Optional +from typing import Any, Dict, Iterable, List, Optional, Union from pydantic.class_validators import validator from pydantic.fields import Field # This import verifies that the dependencies are available. from pyhive import hive # noqa: F401 -from pyhive.sqlalchemy_hive import HiveDate, HiveDecimal, HiveTimestamp +from pyhive.sqlalchemy_hive import HiveDate, HiveDecimal, HiveDialect, HiveTimestamp +from sqlalchemy.engine.reflection import Inspector +from datahub.emitter.mce_builder import make_dataset_urn_with_platform_instance +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.decorators import ( SourceCapability, SupportStatus, @@ -18,8 +21,10 @@ platform_name, support_status, ) +from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.extractor import schema_util -from datahub.ingestion.source.sql.sql_common import register_custom_type +from datahub.ingestion.source.sql.sql_common import SqlWorkUnit, register_custom_type +from datahub.ingestion.source.sql.sql_config import SQLCommonConfig from datahub.ingestion.source.sql.two_tier_sql_source import ( TwoTierSQLAlchemyConfig, TwoTierSQLAlchemySource, @@ -31,6 +36,7 @@ SchemaField, TimeTypeClass, ) +from datahub.metadata.schema_classes import ViewPropertiesClass from datahub.utilities import config_clean from datahub.utilities.hive_schema_to_avro import get_avro_schema_for_hive_column @@ -90,19 +96,34 @@ def dbapi_get_columns_patched(self, connection, table_name, schema=None, **kw): logger.warning(f"Failed to patch method due to {e}") +@reflection.cache # type: ignore +def get_view_names_patched(self, connection, schema=None, **kw): + query = "SHOW VIEWS" + if schema: + query += " IN " + self.identifier_preparer.quote_identifier(schema) + return [row[0] for row in connection.execute(query)] + + +@reflection.cache # type: ignore +def get_view_definition_patched(self, connection, view_name, schema=None, **kw): + full_table = self.identifier_preparer.quote_identifier(view_name) + if schema: + full_table = "{}.{}".format( + self.identifier_preparer.quote_identifier(schema), + self.identifier_preparer.quote_identifier(view_name), + ) + row = connection.execute("SHOW CREATE TABLE {}".format(full_table)).fetchone() + return row[0] + + +HiveDialect.get_view_names = get_view_names_patched +HiveDialect.get_view_definition = get_view_definition_patched + + class HiveConfig(TwoTierSQLAlchemyConfig): # defaults scheme = Field(default="hive", hidden_from_docs=True) - # Hive SQLAlchemy connector returns views as tables. - # See https://github.com/dropbox/PyHive/blob/b21c507a24ed2f2b0cf15b0b6abb1c43f31d3ee0/pyhive/sqlalchemy_hive.py#L270-L273. - # Disabling views helps us prevent this duplication. - include_views = Field( - default=False, - hidden_from_docs=True, - description="Hive SQLAlchemy connector returns views as tables. See https://github.com/dropbox/PyHive/blob/b21c507a24ed2f2b0cf15b0b6abb1c43f31d3ee0/pyhive/sqlalchemy_hive.py#L270-L273. Disabling views helps us prevent this duplication.", - ) - @validator("host_port") def clean_host_port(cls, v): return config_clean.remove_protocol(v) @@ -174,3 +195,41 @@ def get_schema_fields_for_column( return new_fields return fields + + # Hive SQLAlchemy connector returns views as tables in get_table_names. + # See https://github.com/dropbox/PyHive/blob/b21c507a24ed2f2b0cf15b0b6abb1c43f31d3ee0/pyhive/sqlalchemy_hive.py#L270-L273. + # This override makes sure that we ingest view definitions for views + def _process_view( + self, + dataset_name: str, + inspector: Inspector, + schema: str, + view: str, + sql_config: SQLCommonConfig, + ) -> Iterable[Union[SqlWorkUnit, MetadataWorkUnit]]: + dataset_urn = make_dataset_urn_with_platform_instance( + self.platform, + dataset_name, + self.config.platform_instance, + self.config.env, + ) + + try: + view_definition = inspector.get_view_definition(view, schema) + if view_definition is None: + view_definition = "" + else: + # Some dialects return a TextClause instead of a raw string, + # so we need to convert them to a string. + view_definition = str(view_definition) + except NotImplementedError: + view_definition = "" + + if view_definition: + view_properties_aspect = ViewPropertiesClass( + materialized=False, viewLanguage="SQL", viewLogic=view_definition + ) + yield MetadataChangeProposalWrapper( + entityUrn=dataset_urn, + aspect=view_properties_aspect, + ).as_workunit() diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py b/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py index a6a9d8e2c8597..4f133c6459a0f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py @@ -103,10 +103,6 @@ class BasePostgresConfig(BasicSQLAlchemyConfig): class PostgresConfig(BasePostgresConfig): - include_view_lineage = Field( - default=False, description="Include table lineage for views" - ) - database_pattern: AllowDenyPattern = Field( default=AllowDenyPattern.allow_all(), description=( @@ -183,9 +179,10 @@ def get_inspectors(self) -> Iterable[Inspector]: def get_workunits_internal(self) -> Iterable[Union[MetadataWorkUnit, SqlWorkUnit]]: yield from super().get_workunits_internal() - for inspector in self.get_inspectors(): - if self.config.include_view_lineage: - yield from self._get_view_lineage_workunits(inspector) + if self.views_failed_parsing: + for inspector in self.get_inspectors(): + if self.config.include_view_lineage: + yield from self._get_view_lineage_workunits(inspector) def _get_view_lineage_elements( self, inspector: Inspector @@ -245,11 +242,14 @@ def _get_view_lineage_workunits( dependent_view, dependent_schema = key # Construct a lineage object. + view_identifier = self.get_identifier( + schema=dependent_schema, entity=dependent_view, inspector=inspector + ) + if view_identifier not in self.views_failed_parsing: + return urn = mce_builder.make_dataset_urn_with_platform_instance( platform=self.platform, - name=self.get_identifier( - schema=dependent_schema, entity=dependent_view, inspector=inspector - ), + name=view_identifier, platform_instance=self.config.platform_instance, env=self.config.env, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index fad9b9e8018a5..51909eaf4ed55 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -2,12 +2,14 @@ import logging import traceback from dataclasses import dataclass, field +from functools import partial from typing import ( TYPE_CHECKING, Any, Dict, Iterable, List, + MutableMapping, Optional, Set, Tuple, @@ -29,7 +31,9 @@ make_tag_urn, ) from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.emitter.sql_parsing_builder import SqlParsingBuilder from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.api.incremental_lineage_helper import auto_incremental_lineage from datahub.ingestion.api.source import MetadataWorkUnitProcessor from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import ( @@ -86,9 +90,16 @@ ViewPropertiesClass, ) from datahub.telemetry import telemetry +from datahub.utilities.file_backed_collections import FileBackedDict from datahub.utilities.lossy_collections import LossyList from datahub.utilities.registries.domain_registry import DomainRegistry from datahub.utilities.sqlalchemy_query_combiner import SQLAlchemyQueryCombinerReport +from datahub.utilities.sqlglot_lineage import ( + SchemaResolver, + SqlParsingResult, + sqlglot_lineage, + view_definition_lineage_helper, +) if TYPE_CHECKING: from datahub.ingestion.source.ge_data_profiler import ( @@ -110,6 +121,11 @@ class SQLSourceReport(StaleEntityRemovalSourceReport): query_combiner: Optional[SQLAlchemyQueryCombinerReport] = None + num_view_definitions_parsed: int = 0 + num_view_definitions_failed_parsing: int = 0 + num_view_definitions_failed_column_parsing: int = 0 + view_definitions_parsing_failures: LossyList[str] = field(default_factory=LossyList) + def report_entity_scanned(self, name: str, ent_type: str = "table") -> None: """ Entity could be a view or a table @@ -319,6 +335,18 @@ def __init__(self, config: SQLCommonConfig, ctx: PipelineContext, platform: str) cached_domains=[k for k in self.config.domain], graph=self.ctx.graph ) + self.views_failed_parsing: Set[str] = set() + self.schema_resolver: SchemaResolver = SchemaResolver( + platform=self.platform, + platform_instance=self.config.platform_instance, + env=self.config.env, + ) + self._view_definition_cache: MutableMapping[str, str] + if self.config.use_file_backed_cache: + self._view_definition_cache = FileBackedDict[str]() + else: + self._view_definition_cache = {} + def warn(self, log: logging.Logger, key: str, reason: str) -> None: self.report.report_warning(key, reason[:100]) log.warning(f"{key} => {reason}") @@ -455,6 +483,11 @@ def get_schema_level_workunits( def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: return [ *super().get_workunit_processors(), + partial( + auto_incremental_lineage, + self.ctx.graph, + self.config.incremental_lineage, + ), StaleEntityRemovalHandler.create( self, self.config, self.ctx ).workunit_processor, @@ -512,6 +545,35 @@ def get_workunits_internal(self) -> Iterable[Union[MetadataWorkUnit, SqlWorkUnit profile_requests, profiler, platform=self.platform ) + if self.config.include_view_lineage: + yield from self.get_view_lineage() + + def get_view_lineage(self) -> Iterable[MetadataWorkUnit]: + builder = SqlParsingBuilder( + generate_lineage=True, + generate_usage_statistics=False, + generate_operations=False, + ) + for dataset_name in self._view_definition_cache.keys(): + view_definition = self._view_definition_cache[dataset_name] + result = self._run_sql_parser( + dataset_name, + view_definition, + self.schema_resolver, + ) + if result and result.out_tables: + # This does not yield any workunits but we use + # yield here to execute this method + yield from builder.process_sql_parsing_result( + result=result, + query=view_definition, + is_view_ddl=True, + include_column_lineage=self.config.include_view_column_lineage, + ) + else: + self.views_failed_parsing.add(dataset_name) + yield from builder.gen_workunits() + def get_identifier( self, *, schema: str, entity: str, inspector: Inspector, **kwargs: Any ) -> str: @@ -658,6 +720,8 @@ def _process_table( schema_fields, ) dataset_snapshot.aspects.append(schema_metadata) + if self.config.include_view_lineage: + self.schema_resolver.add_schema_metadata(dataset_urn, schema_metadata) db_name = self.get_db_name(inspector) yield from self.add_table_to_schema_container( @@ -862,6 +926,12 @@ def _process_view( view: str, sql_config: SQLCommonConfig, ) -> Iterable[Union[SqlWorkUnit, MetadataWorkUnit]]: + dataset_urn = make_dataset_urn_with_platform_instance( + self.platform, + dataset_name, + self.config.platform_instance, + self.config.env, + ) try: columns = inspector.get_columns(view, schema) except KeyError: @@ -877,6 +947,8 @@ def _process_view( columns, canonical_schema=schema_fields, ) + if self.config.include_view_lineage: + self.schema_resolver.add_schema_metadata(dataset_urn, schema_metadata) description, properties, _ = self.get_table_properties(inspector, schema, view) try: view_definition = inspector.get_view_definition(view, schema) @@ -890,12 +962,9 @@ def _process_view( view_definition = "" properties["view_definition"] = view_definition properties["is_view"] = "True" - dataset_urn = make_dataset_urn_with_platform_instance( - self.platform, - dataset_name, - self.config.platform_instance, - self.config.env, - ) + if view_definition and self.config.include_view_lineage: + self._view_definition_cache[dataset_name] = view_definition + dataset_snapshot = DatasetSnapshot( urn=dataset_urn, aspects=[StatusClass(removed=False)], @@ -942,6 +1011,51 @@ def _process_view( domain_registry=self.domain_registry, ) + def _run_sql_parser( + self, view_identifier: str, query: str, schema_resolver: SchemaResolver + ) -> Optional[SqlParsingResult]: + try: + database, schema = self.get_db_schema(view_identifier) + except ValueError: + logger.warning(f"Invalid view identifier: {view_identifier}") + return None + raw_lineage = sqlglot_lineage( + query, + schema_resolver=schema_resolver, + default_db=database, + default_schema=schema, + ) + view_urn = make_dataset_urn_with_platform_instance( + self.platform, + view_identifier, + self.config.platform_instance, + self.config.env, + ) + + if raw_lineage.debug_info.table_error: + logger.debug( + f"Failed to parse lineage for view {view_identifier}: " + f"{raw_lineage.debug_info.table_error}" + ) + self.report.num_view_definitions_failed_parsing += 1 + self.report.view_definitions_parsing_failures.append( + f"Table-level sql parsing error for view {view_identifier}: {raw_lineage.debug_info.table_error}" + ) + return None + + elif raw_lineage.debug_info.column_error: + self.report.num_view_definitions_failed_column_parsing += 1 + self.report.view_definitions_parsing_failures.append( + f"Column-level sql parsing error for view {view_identifier}: {raw_lineage.debug_info.column_error}" + ) + else: + self.report.num_view_definitions_parsed += 1 + return view_definition_lineage_helper(raw_lineage, view_urn) + + def get_db_schema(self, dataset_identifier: str) -> Tuple[Optional[str], str]: + database, schema, _view = dataset_identifier.split(".") + return database, schema + def get_profiler_instance(self, inspector: Inspector) -> "DatahubGEProfiler": from datahub.ingestion.source.ge_data_profiler import DatahubGEProfiler diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py index 57aae32b361cf..095b8e6443171 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py @@ -6,7 +6,7 @@ from pydantic import Field from sqlalchemy.engine import URL -from datahub.configuration.common import AllowDenyPattern, ConfigModel +from datahub.configuration.common import AllowDenyPattern, ConfigModel, LineageConfig from datahub.configuration.source_common import ( DatasetSourceConfigMixin, LowerCaseDatasetUrnConfigMixin, @@ -28,6 +28,7 @@ class SQLCommonConfig( StatefulIngestionConfigBase, DatasetSourceConfigMixin, LowerCaseDatasetUrnConfigMixin, + LineageConfig, ): options: dict = pydantic.Field( default_factory=dict, @@ -70,6 +71,22 @@ class SQLCommonConfig( description="If the source supports it, include table lineage to the underlying storage location.", ) + include_view_lineage: bool = Field( + default=True, + description="Populates view->view and table->view lineage using DataHub's sql parser.", + ) + + include_view_column_lineage: bool = Field( + default=True, + description="Populates column-level lineage for view->view and table->view lineage using DataHub's sql parser." + " Requires `include_view_lineage` to be enabled.", + ) + + use_file_backed_cache: bool = Field( + default=True, + description="Whether to use a file backed cache for the view definitions.", + ) + profiling: GEProfilingConfig = GEProfilingConfig() # Custom Stateful Ingestion settings stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = None diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py b/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py index e628e4dbd3446..899a7b6697c0a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py @@ -1,7 +1,7 @@ import logging from dataclasses import dataclass from datetime import datetime -from typing import Iterable, MutableMapping, Optional, Union +from typing import Iterable, Optional, Union # This import verifies that the dependencies are available. import teradatasqlalchemy # noqa: F401 @@ -33,14 +33,11 @@ from datahub.ingestion.source.usage.usage_common import BaseUsageConfig from datahub.ingestion.source_report.ingestion_stage import IngestionStageReport from datahub.ingestion.source_report.time_window import BaseTimeWindowReport -from datahub.metadata._schema_classes import SchemaMetadataClass, ViewPropertiesClass from datahub.metadata.com.linkedin.pegasus2avro.schema import ( BytesTypeClass, TimeTypeClass, ) -from datahub.utilities.file_backed_collections import FileBackedDict from datahub.utilities.sqlglot_lineage import SchemaResolver, sqlglot_lineage -from datahub.utilities.urns.dataset_urn import DatasetUrn logger: logging.Logger = logging.getLogger(__name__) @@ -87,11 +84,6 @@ class TeradataConfig(BaseTeradataConfig, BaseTimeWindowConfig): "This requires to have the table lineage feature enabled.", ) - include_view_lineage = Field( - default=True, - description="Whether to include view lineage in the ingestion. " - "This requires to have the view lineage feature enabled.", - ) usage: BaseUsageConfig = Field( description="The usage config to use when generating usage statistics", default=BaseUsageConfig(), @@ -107,11 +99,6 @@ class TeradataConfig(BaseTeradataConfig, BaseTimeWindowConfig): description="Generate usage statistic.", ) - use_file_backed_cache: bool = Field( - default=True, - description="Whether to use a file backed cache for the view definitions.", - ) - @platform_name("Teradata") @config_class(TeradataConfig) @@ -143,8 +130,6 @@ class TeradataSource(TwoTierSQLAlchemySource): and "timestamp" < TIMESTAMP '{end_time}' """ - _view_definition_cache: MutableMapping[str, str] - def __init__(self, config: TeradataConfig, ctx: PipelineContext): super().__init__(config, ctx, "teradata") @@ -167,34 +152,11 @@ def __init__(self, config: TeradataConfig, ctx: PipelineContext): env=self.config.env, ) - if self.config.use_file_backed_cache: - self._view_definition_cache = FileBackedDict[str]() - else: - self._view_definition_cache = {} - @classmethod def create(cls, config_dict, ctx): config = TeradataConfig.parse_obj(config_dict) return cls(config, ctx) - def get_view_lineage(self) -> Iterable[MetadataWorkUnit]: - for key in self._view_definition_cache.keys(): - view_definition = self._view_definition_cache[key] - dataset_urn = DatasetUrn.create_from_string(key) - - db_name: Optional[str] = None - # We need to get the default db from the dataset urn otherwise the builder generates the wrong urns - if "." in dataset_urn.get_dataset_name(): - db_name = dataset_urn.get_dataset_name().split(".", 1)[0] - - self.report.num_view_ddl_parsed += 1 - if self.report.num_view_ddl_parsed % 1000 == 0: - logger.info(f"Parsed {self.report.num_queries_parsed} view ddl") - - yield from self.gen_lineage_from_query( - query=view_definition, default_database=db_name, is_view_ddl=True - ) - def get_audit_log_mcps(self) -> Iterable[MetadataWorkUnit]: engine = self.get_metadata_engine() for entry in engine.execute( @@ -252,19 +214,7 @@ def get_metadata_engine(self) -> Engine: def get_workunits_internal(self) -> Iterable[Union[MetadataWorkUnit, SqlWorkUnit]]: # Add all schemas to the schema resolver - for wu in super().get_workunits_internal(): - urn = wu.get_urn() - schema_metadata = wu.get_aspect_of_type(SchemaMetadataClass) - if schema_metadata: - self.schema_resolver.add_schema_metadata(urn, schema_metadata) - view_properties = wu.get_aspect_of_type(ViewPropertiesClass) - if view_properties and self.config.include_view_lineage: - self._view_definition_cache[urn] = view_properties.viewLogic - yield wu - - if self.config.include_view_lineage: - self.report.report_ingestion_stage_start("view lineage extraction") - yield from self.get_view_lineage() + yield from super().get_workunits_internal() if self.config.include_table_lineage or self.config.include_usage_statistics: self.report.report_ingestion_stage_start("audit log extraction") diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/two_tier_sql_source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/two_tier_sql_source.py index 7a49551dc1235..efb1d3ffe119f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/two_tier_sql_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/two_tier_sql_source.py @@ -1,6 +1,6 @@ import typing import urllib.parse -from typing import Any, Dict, Iterable, Optional +from typing import Any, Dict, Iterable, Optional, Tuple from pydantic.fields import Field from sqlalchemy import create_engine, inspect @@ -71,6 +71,10 @@ def __init__(self, config, ctx, platform): super().__init__(config, ctx, platform) self.config: TwoTierSQLAlchemyConfig = config + def get_db_schema(self, dataset_identifier: str) -> Tuple[Optional[str], str]: + schema, _view = dataset_identifier.split(".", 1) + return None, schema + def get_database_container_key(self, db_name: str, schema: str) -> ContainerKey: # Because our overridden get_allowed_schemas method returns db_name as the schema name, # the db_name and schema here will be the same. Hence, we just ignore the schema parameter. diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/vertica.py b/metadata-ingestion/src/datahub/ingestion/source/sql/vertica.py index a417cae2b1ab0..b89db755853bc 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/vertica.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/vertica.py @@ -86,7 +86,7 @@ class VerticaConfig(BasicSQLAlchemyConfig): default=True, description="Whether Models should be ingested." ) - include_view_lineage: Optional[bool] = pydantic.Field( + include_view_lineage: bool = pydantic.Field( default=True, description="If the source supports it, include view lineage to the underlying storage location.", ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py index be97e9380f1f5..7fb2cf9813cab 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py +++ b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py @@ -11,7 +11,6 @@ ConfigModel, ConfigurationError, DynamicTypedConfig, - LineageConfig, ) from datahub.configuration.time_window_config import BaseTimeWindowConfig from datahub.configuration.validate_field_rename import pydantic_renamed_field @@ -100,7 +99,7 @@ class StatefulIngestionConfigBase(GenericModel, Generic[CustomConfig]): ) -class StatefulLineageConfigMixin(LineageConfig): +class StatefulLineageConfigMixin: enable_stateful_lineage_ingestion: bool = Field( default=True, description="Enable stateful lineage ingestion." diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py index 0d72fc52da0ca..c3e8c175f1de5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py @@ -166,13 +166,17 @@ def _check_oauth_config(oauth_config: Optional[OAuthConfiguration]) -> None: "but should be set when using use_certificate false for oauth_config" ) - @pydantic.validator("include_view_lineage") - def validate_include_view_lineage(cls, v, values): - if not values.get("include_table_lineage") and v: + @pydantic.root_validator() + def validate_include_view_lineage(cls, values): + if ( + "include_table_lineage" in values + and not values.get("include_table_lineage") + and values.get("include_view_lineage") + ): raise ValueError( "include_table_lineage must be True for include_view_lineage to be set." ) - return v + return values def get_sql_alchemy_url( self, diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index 526d90b2a1bfa..1d74b20569814 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -623,9 +623,9 @@ def _schema_aware_fuzzy_column_resolve( statement = sqlglot.optimizer.annotate_types.annotate_types( statement, schema=sqlglot_db_schema ) - except sqlglot.errors.OptimizeError as e: + except (sqlglot.errors.OptimizeError, sqlglot.errors.ParseError) as e: # This is not a fatal error, so we can continue. - logger.debug("sqlglot failed to annotate types: %s", e) + logger.debug("sqlglot failed to annotate or parse types: %s", e) try: assert isinstance(statement, _SupportedColumnLineageTypesTuple) @@ -1156,3 +1156,20 @@ def create_lineage_sql_parsed_result( finally: if needs_close: schema_resolver.close() + + +def view_definition_lineage_helper( + result: SqlParsingResult, view_urn: str +) -> SqlParsingResult: + if result.query_type is QueryType.SELECT: + # Some platforms (e.g. postgres) store only ` . For such view definitions, `result.out_tables` and + # `result.column_lineage[].downstream` are empty in `sqlglot_lineage` response, whereas upstream + # details and downstream column details are extracted correctly. + # Here, we inject view V's urn in `result.out_tables` and `result.column_lineage[].downstream` + # to get complete lineage result. + result.out_tables = [view_urn] + if result.column_lineage: + for col_result in result.column_lineage: + col_result.downstream.table = view_urn + return result diff --git a/metadata-ingestion/tests/integration/hive/hive_mces_all_db_golden.json b/metadata-ingestion/tests/integration/hive/hive_mces_all_db_golden.json index f3b6d2b8138cc..6774d4c7055b9 100644 --- a/metadata-ingestion/tests/integration/hive/hive_mces_all_db_golden.json +++ b/metadata-ingestion/tests/integration/hive/hive_mces_all_db_golden.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -93,7 +98,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +117,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:26 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:12 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", @@ -121,7 +127,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578706", + "Table Parameters: transient_lastDdlTime": "1697721972", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -187,7 +193,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -204,7 +211,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -224,7 +232,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -239,7 +248,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -257,17 +267,19 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:26 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:12 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", "Table Type:": "MANAGED_TABLE", "Table Parameters: COLUMN_STATS_ACCURATE": "{\\\"BASIC_STATS\\\":\\\"true\\\"}", + "Table Parameters: another.comment": "This table has no partitions", + "Table Parameters: comment": "This table has array of structs", "Table Parameters: numFiles": "1", "Table Parameters: numRows": "1", "Table Parameters: rawDataSize": "32", "Table Parameters: totalSize": "33", - "Table Parameters: transient_lastDdlTime": "1688578710", + "Table Parameters: transient_lastDdlTime": "1697721976", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -278,6 +290,7 @@ "Storage Desc Params: serialization.format": "1" }, "name": "array_struct_test", + "description": "This table has array of structs", "tags": [] } }, @@ -304,6 +317,7 @@ { "fieldPath": "property_id", "nullable": true, + "description": "id of property", "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -316,6 +330,7 @@ { "fieldPath": "[version=2.0].[type=struct].[type=array].[type=struct].service", "nullable": true, + "description": "service types and providers", "type": { "type": { "com.linkedin.pegasus2avro.schema.ArrayType": { @@ -368,7 +383,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -385,7 +401,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -405,7 +422,189 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:ded36d15fcfbbb939830549697122661" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "Database:": "db1", + "Owner:": "root", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", + "LastAccessTime:": "UNKNOWN", + "Retention:": "0", + "Table Type:": "VIRTUAL_VIEW", + "Table Parameters: transient_lastDdlTime": "1697721978", + "SerDe Library:": "null", + "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", + "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", + "Compressed:": "No", + "Num Buckets:": "-1", + "Bucket Columns:": "[]", + "Sort Columns:": "[]", + "View Original Text:": "select * from db1.array_struct_test", + "View Expanded Text:": "select `array_struct_test`.`property_id`, `array_struct_test`.`service` from `db1`.`array_struct_test`", + "View Rewrite Enabled:": "No" + }, + "name": "array_struct_test_view", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "db1.array_struct_test_view", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=array].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=array].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=array].[type=struct].service.[type=array].[type=int].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": { + "nestedType": [ + "int" + ] + } + } + }, + "nativeDataType": "array", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array\"}" + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:ded36d15fcfbbb939830549697122661", + "urn": "urn:li:container:ded36d15fcfbbb939830549697122661" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -420,7 +619,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -438,7 +638,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:30 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", @@ -448,7 +648,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578710", + "Table Parameters: transient_lastDdlTime": "1697721978", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -518,7 +718,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -535,7 +736,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -555,7 +757,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -570,7 +773,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -588,7 +792,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:30 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", @@ -598,7 +802,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578710", + "Table Parameters: transient_lastDdlTime": "1697721978", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -717,7 +921,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -734,7 +939,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -754,7 +960,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -769,7 +976,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -787,16 +995,17 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:22 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:08 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", "Table Type:": "MANAGED_TABLE", "Table Parameters: numFiles": "1", + "Table Parameters: numPartitions": "1", "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "5812", - "Table Parameters: transient_lastDdlTime": "1688578704", + "Table Parameters: transient_lastDdlTime": "1697721968", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -853,6 +1062,18 @@ "nativeDataType": "string", "recursive": false, "isPartOfKey": false + }, + { + "fieldPath": "baz", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false } ] } @@ -862,7 +1083,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -879,7 +1101,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -899,7 +1122,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -914,7 +1138,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -932,7 +1157,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:26 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:12 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", @@ -942,7 +1167,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578706", + "Table Parameters: transient_lastDdlTime": "1697721972", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -1039,7 +1264,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1056,7 +1282,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1076,7 +1303,188 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.struct_test_view_materialized,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:ded36d15fcfbbb939830549697122661" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.struct_test_view_materialized,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "Database:": "db1", + "Owner:": "root", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", + "LastAccessTime:": "UNKNOWN", + "Retention:": "0", + "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test_view_materialized", + "Table Type:": "MATERIALIZED_VIEW", + "Table Parameters: numFiles": "0", + "Table Parameters: totalSize": "0", + "Table Parameters: transient_lastDdlTime": "1697721978", + "SerDe Library:": "org.apache.hadoop.hive.ql.io.orc.OrcSerde", + "InputFormat:": "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", + "OutputFormat:": "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", + "Compressed:": "No", + "Num Buckets:": "-1", + "Bucket Columns:": "[]", + "Sort Columns:": "[]", + "View Original Text:": "select * from db1.struct_test", + "View Expanded Text:": "null", + "View Rewrite Enabled:": "No" + }, + "name": "struct_test_view_materialized", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "db1.struct_test_view_materialized", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.RecordType": {} + } + }, + "nativeDataType": "struct>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=array].[type=int].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": { + "nestedType": [ + "int" + ] + } + } + }, + "nativeDataType": "array", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array\"}" + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.struct_test_view_materialized,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.struct_test_view_materialized,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:ded36d15fcfbbb939830549697122661", + "urn": "urn:li:container:ded36d15fcfbbb939830549697122661" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1091,7 +1499,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1109,7 +1518,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:30 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", @@ -1119,10 +1528,10 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578710", - "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", - "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", - "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", + "Table Parameters: transient_lastDdlTime": "1697721978", + "SerDe Library:": "org.apache.hadoop.hive.ql.io.orc.OrcSerde", + "InputFormat:": "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", + "OutputFormat:": "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", "Compressed:": "No", "Num Buckets:": "-1", "Bucket Columns:": "[]", @@ -1285,7 +1694,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1302,7 +1712,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1322,7 +1733,26 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "CREATE VIEW `db1.array_struct_test_view` AS select `array_struct_test`.`property_id`, `array_struct_test`.`service` from `db1`.`array_struct_test`", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1342,7 +1772,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1357,7 +1788,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1372,7 +1804,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1389,7 +1822,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1404,7 +1838,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1419,7 +1854,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1437,7 +1873,7 @@ "customProperties": { "Database:": "db2", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:24 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:10 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db2.db/pokes", @@ -1446,7 +1882,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "5812", - "Table Parameters: transient_lastDdlTime": "1688578706", + "Table Parameters: transient_lastDdlTime": "1697721971", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -1454,10 +1890,7 @@ "Num Buckets:": "-1", "Bucket Columns:": "[]", "Sort Columns:": "[]", - "Storage Desc Params: serialization.format": "1", - "Table:": "db2.pokes", - "Constraint Name:": "pk_1173723383_1683022998392_0", - "Column Names:": "foo" + "Storage Desc Params: serialization.format": "1" }, "name": "pokes", "tags": [] @@ -1515,7 +1948,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1532,7 +1966,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1552,7 +1987,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1572,7 +2008,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1587,7 +2024,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1602,7 +2040,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1619,7 +2058,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1634,7 +2074,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/hive/hive_mces_golden.json b/metadata-ingestion/tests/integration/hive/hive_mces_golden.json index 08f281f398909..e93924049f626 100644 --- a/metadata-ingestion/tests/integration/hive/hive_mces_golden.json +++ b/metadata-ingestion/tests/integration/hive/hive_mces_golden.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -93,7 +98,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +117,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:26 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:12 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", @@ -121,7 +127,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578706", + "Table Parameters: transient_lastDdlTime": "1697721972", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -187,7 +193,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -204,7 +211,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -224,7 +232,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -239,7 +248,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -257,17 +267,19 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:26 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:12 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", "Table Type:": "MANAGED_TABLE", "Table Parameters: COLUMN_STATS_ACCURATE": "{\\\"BASIC_STATS\\\":\\\"true\\\"}", + "Table Parameters: another.comment": "This table has no partitions", + "Table Parameters: comment": "This table has array of structs", "Table Parameters: numFiles": "1", "Table Parameters: numRows": "1", "Table Parameters: rawDataSize": "32", "Table Parameters: totalSize": "33", - "Table Parameters: transient_lastDdlTime": "1688578710", + "Table Parameters: transient_lastDdlTime": "1697721976", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -278,6 +290,7 @@ "Storage Desc Params: serialization.format": "1" }, "name": "array_struct_test", + "description": "This table has array of structs", "tags": [] } }, @@ -304,6 +317,7 @@ { "fieldPath": "property_id", "nullable": true, + "description": "id of property", "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -316,6 +330,7 @@ { "fieldPath": "[version=2.0].[type=struct].[type=array].[type=struct].service", "nullable": true, + "description": "service types and providers", "type": { "type": { "com.linkedin.pegasus2avro.schema.ArrayType": { @@ -368,7 +383,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -385,7 +401,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -405,7 +422,189 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:ded36d15fcfbbb939830549697122661" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "Database:": "db1", + "Owner:": "root", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", + "LastAccessTime:": "UNKNOWN", + "Retention:": "0", + "Table Type:": "VIRTUAL_VIEW", + "Table Parameters: transient_lastDdlTime": "1697721978", + "SerDe Library:": "null", + "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", + "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", + "Compressed:": "No", + "Num Buckets:": "-1", + "Bucket Columns:": "[]", + "Sort Columns:": "[]", + "View Original Text:": "select * from db1.array_struct_test", + "View Expanded Text:": "select `array_struct_test`.`property_id`, `array_struct_test`.`service` from `db1`.`array_struct_test`", + "View Rewrite Enabled:": "No" + }, + "name": "array_struct_test_view", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "db1.array_struct_test_view", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=array].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=array].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=array].[type=struct].service.[type=array].[type=int].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": { + "nestedType": [ + "int" + ] + } + } + }, + "nativeDataType": "array", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array\"}" + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:ded36d15fcfbbb939830549697122661", + "urn": "urn:li:container:ded36d15fcfbbb939830549697122661" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -420,7 +619,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -438,7 +638,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:30 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", @@ -448,7 +648,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578710", + "Table Parameters: transient_lastDdlTime": "1697721978", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -518,7 +718,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -535,7 +736,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -555,7 +757,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -570,7 +773,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -588,7 +792,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:30 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", @@ -598,7 +802,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578710", + "Table Parameters: transient_lastDdlTime": "1697721978", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -717,7 +921,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -734,7 +939,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -754,7 +960,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -769,7 +976,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -787,16 +995,17 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:22 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:08 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", "Table Type:": "MANAGED_TABLE", "Table Parameters: numFiles": "1", + "Table Parameters: numPartitions": "1", "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "5812", - "Table Parameters: transient_lastDdlTime": "1688578704", + "Table Parameters: transient_lastDdlTime": "1697721968", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -853,6 +1062,18 @@ "nativeDataType": "string", "recursive": false, "isPartOfKey": false + }, + { + "fieldPath": "baz", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false } ] } @@ -862,7 +1083,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -879,7 +1101,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -899,7 +1122,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -914,7 +1138,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -932,7 +1157,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:26 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:12 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", @@ -942,7 +1167,7 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578706", + "Table Parameters: transient_lastDdlTime": "1697721972", "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", @@ -1039,7 +1264,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1056,7 +1282,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1076,7 +1303,188 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.struct_test_view_materialized,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:ded36d15fcfbbb939830549697122661" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.struct_test_view_materialized,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "Database:": "db1", + "Owner:": "root", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", + "LastAccessTime:": "UNKNOWN", + "Retention:": "0", + "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test_view_materialized", + "Table Type:": "MATERIALIZED_VIEW", + "Table Parameters: numFiles": "0", + "Table Parameters: totalSize": "0", + "Table Parameters: transient_lastDdlTime": "1697721978", + "SerDe Library:": "org.apache.hadoop.hive.ql.io.orc.OrcSerde", + "InputFormat:": "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", + "OutputFormat:": "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", + "Compressed:": "No", + "Num Buckets:": "-1", + "Bucket Columns:": "[]", + "Sort Columns:": "[]", + "View Original Text:": "select * from db1.struct_test", + "View Expanded Text:": "null", + "View Rewrite Enabled:": "No" + }, + "name": "struct_test_view_materialized", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "db1.struct_test_view_materialized", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.RecordType": {} + } + }, + "nativeDataType": "struct>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=array].[type=int].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": { + "nestedType": [ + "int" + ] + } + } + }, + "nativeDataType": "array", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array\"}" + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.struct_test_view_materialized,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.struct_test_view_materialized,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:ded36d15fcfbbb939830549697122661", + "urn": "urn:li:container:ded36d15fcfbbb939830549697122661" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1091,7 +1499,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1109,7 +1518,7 @@ "customProperties": { "Database:": "db1", "Owner:": "root", - "CreateTime:": "Wed Jul 05 17:38:30 UTC 2023", + "CreateTime:": "Thu Oct 19 13:26:18 UTC 2023", "LastAccessTime:": "UNKNOWN", "Retention:": "0", "Location:": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", @@ -1119,10 +1528,10 @@ "Table Parameters: numRows": "0", "Table Parameters: rawDataSize": "0", "Table Parameters: totalSize": "0", - "Table Parameters: transient_lastDdlTime": "1688578710", - "SerDe Library:": "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", - "InputFormat:": "org.apache.hadoop.mapred.TextInputFormat", - "OutputFormat:": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", + "Table Parameters: transient_lastDdlTime": "1697721978", + "SerDe Library:": "org.apache.hadoop.hive.ql.io.orc.OrcSerde", + "InputFormat:": "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", + "OutputFormat:": "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", "Compressed:": "No", "Num Buckets:": "-1", "Bucket Columns:": "[]", @@ -1285,7 +1694,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1302,7 +1712,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1322,7 +1733,26 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "hive-test" + "runId": "hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "CREATE VIEW `db1.array_struct_test_view` AS select `array_struct_test`.`property_id`, `array_struct_test`.`service` from `db1`.`array_struct_test`", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/hive/hive_setup.sql b/metadata-ingestion/tests/integration/hive/hive_setup.sql index 8fb8498894bc0..323a78e24d10b 100644 --- a/metadata-ingestion/tests/integration/hive/hive_setup.sql +++ b/metadata-ingestion/tests/integration/hive/hive_setup.sql @@ -1,10 +1,10 @@ CREATE DATABASE IF NOT EXISTS db1; CREATE DATABASE IF NOT EXISTS db2; -- Setup a "pokes" example table. -CREATE TABLE IF NOT EXISTS db1.pokes (foo INT, bar STRING); -LOAD DATA LOCAL INPATH '/opt/hive/examples/files/kv1.txt' OVERWRITE INTO TABLE db1.pokes; +CREATE TABLE IF NOT EXISTS db1.pokes (foo INT, bar STRING) PARTITIONED BY (baz STRING); +LOAD DATA LOCAL INPATH '/opt/hive/examples/files/kv1.txt' OVERWRITE INTO TABLE db1.pokes PARTITION (baz='dummy'); -CREATE TABLE IF NOT EXISTS db2.pokes (foo INT, bar STRING, CONSTRAINT pk_1173723383_1683022998392_0 primary key(foo) DISABLE NOVALIDATE NORELY); +CREATE TABLE IF NOT EXISTS db2.pokes (foo INT, bar STRING); LOAD DATA LOCAL INPATH '/opt/hive/examples/files/kv1.txt' OVERWRITE INTO TABLE db2.pokes; -- Setup a table with a special character. @@ -23,12 +23,12 @@ CREATE TABLE IF NOT EXISTS db1.struct_test CREATE TABLE IF NOT EXISTS db1.array_struct_test ( - property_id INT, + property_id INT COMMENT 'id of property', service array - >> -); + >> COMMENT 'service types and providers' +) TBLPROPERTIES ('comment' = 'This table has array of structs', 'another.comment' = 'This table has no partitions');; WITH test_data as ( @@ -39,6 +39,9 @@ test_data as ( INSERT INTO TABLE db1.array_struct_test select * from test_data; +CREATE MATERIALIZED VIEW db1.struct_test_view_materialized as select * from db1.struct_test; +CREATE VIEW db1.array_struct_test_view as select * from db1.array_struct_test; + CREATE TABLE IF NOT EXISTS db1.nested_struct_test ( property_id INT, @@ -50,9 +53,6 @@ CREATE TABLE IF NOT EXISTS db1.nested_struct_test CREATE TABLE db1.union_test( foo UNIONTYPE, struct, struct> -); +) STORED AS ORC ; -CREATE TABLE db1.map_test( - KeyValue String, - RecordId map -); \ No newline at end of file +CREATE TABLE db1.map_test(KeyValue String, RecordId map); \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json b/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json index 4aaefb48d33e1..38b03ce238d1c 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json +++ b/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -93,7 +98,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -213,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -230,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -250,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -265,7 +274,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -361,7 +371,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -378,7 +389,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -398,7 +410,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -554,7 +567,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -969,7 +983,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -989,7 +1004,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1004,7 +1020,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1019,7 +1036,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1036,7 +1054,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1053,7 +1072,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1068,7 +1088,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1083,7 +1104,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1215,7 +1237,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1232,7 +1255,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1249,7 +1273,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1269,7 +1294,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1284,7 +1310,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1418,7 +1445,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1435,7 +1463,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1452,7 +1481,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1472,7 +1502,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1487,7 +1518,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1586,7 +1618,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1603,7 +1636,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1620,7 +1654,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1637,7 +1672,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1657,7 +1693,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1677,7 +1714,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1692,7 +1730,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1707,7 +1746,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1724,7 +1764,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1739,7 +1780,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1754,7 +1796,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1874,7 +1917,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1891,7 +1935,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1911,7 +1956,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1926,7 +1972,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2022,7 +2069,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2039,7 +2087,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2059,7 +2108,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2182,7 +2232,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2233,7 +2284,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2253,7 +2305,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2268,7 +2321,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2283,7 +2337,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2300,7 +2355,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2315,7 +2371,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2330,7 +2387,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2390,7 +2448,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2407,7 +2466,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2427,7 +2487,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2442,7 +2503,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2502,7 +2564,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2519,7 +2582,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2539,7 +2603,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2568,7 +2633,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2597,7 +2663,79 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index_view,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index,PROD),doubleVal)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index_view,PROD),doubleVal)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index,PROD),id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index_view,PROD),id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index,PROD),path)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index_view,PROD),path)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index,PROD),urn)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mysql,metagalaxy.metadata_index_view,PROD),urn)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/postgres/postgres_all_db_mces_with_db_golden.json b/metadata-ingestion/tests/integration/postgres/postgres_all_db_mces_with_db_golden.json index 535ce964c6058..b9b2a3b2141a8 100644 --- a/metadata-ingestion/tests/integration/postgres/postgres_all_db_mces_with_db_golden.json +++ b/metadata-ingestion/tests/integration/postgres/postgres_all_db_mces_with_db_golden.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -99,7 +104,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -114,7 +120,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -129,7 +136,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -146,7 +154,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -161,7 +170,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -181,7 +191,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -201,7 +212,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -216,7 +228,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -231,7 +244,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -248,7 +262,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -263,7 +278,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -284,7 +300,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -299,7 +316,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -314,7 +332,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -331,7 +350,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -346,7 +366,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -366,7 +387,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -381,7 +403,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -537,7 +560,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -554,7 +578,186 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:a6097853edba03be190d99ece4b307ff", + "urn": "urn:li:container:a6097853edba03be190d99ece4b307ff" + }, + { + "id": "urn:li:container:51904fc8cd5cc729bc630decff284525", + "urn": "urn:li:container:51904fc8cd5cc729bc630decff284525" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:51904fc8cd5cc729bc630decff284525" + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "view_definition": " SELECT metadata_aspect_v2.urn,\n metadata_aspect_v2.aspect\n FROM metadata_aspect_v2\n WHERE (metadata_aspect_v2.version = 0);", + "is_view": "True" + }, + "name": "metadata_aspect_view", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "postgrestest.public.metadata_aspect_view", + "platform": "urn:li:dataPlatform:postgres", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "urn", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(length=500)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "aspect", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(length=200)", + "recursive": false, + "isPartOfKey": false + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": " SELECT metadata_aspect_v2.urn,\n metadata_aspect_v2.aspect\n FROM metadata_aspect_v2\n WHERE (metadata_aspect_v2.version = 0);", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:a6097853edba03be190d99ece4b307ff", + "urn": "urn:li:container:a6097853edba03be190d99ece4b307ff" + }, + { + "id": "urn:li:container:51904fc8cd5cc729bc630decff284525", + "urn": "urn:li:container:51904fc8cd5cc729bc630decff284525" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -634,31 +837,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:a6097853edba03be190d99ece4b307ff", - "urn": "urn:li:container:a6097853edba03be190d99ece4b307ff" - }, - { - "id": "urn:li:container:51904fc8cd5cc729bc630decff284525", - "urn": "urn:li:container:51904fc8cd5cc729bc630decff284525" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -675,29 +855,39 @@ "actor": "urn:li:corpuser:unknown" }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD)", - "type": "TRANSFORMED" + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD),aspect)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD),aspect)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD),urn)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD),urn)" + ], + "confidenceScore": 1.0 } ] } }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/postgres/postgres_all_db_to_file_with_db_estimate_row_count.yml b/metadata-ingestion/tests/integration/postgres/postgres_all_db_to_file_with_db_estimate_row_count.yml index b390d9246677e..2bfa39a65363b 100644 --- a/metadata-ingestion/tests/integration/postgres/postgres_all_db_to_file_with_db_estimate_row_count.yml +++ b/metadata-ingestion/tests/integration/postgres/postgres_all_db_to_file_with_db_estimate_row_count.yml @@ -25,7 +25,7 @@ source: include_field_distinct_value_frequencies: false include_field_histogram: false catch_exceptions: true - include_views: false + include_views: true sink: type: file config: diff --git a/metadata-ingestion/tests/integration/postgres/postgres_mces_with_db_golden.json b/metadata-ingestion/tests/integration/postgres/postgres_mces_with_db_golden.json index bf36a39a8c103..f6fa0a0ed032e 100644 --- a/metadata-ingestion/tests/integration/postgres/postgres_mces_with_db_golden.json +++ b/metadata-ingestion/tests/integration/postgres/postgres_mces_with_db_golden.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -99,7 +104,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -114,7 +120,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -129,7 +136,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -146,7 +154,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -161,7 +170,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -181,7 +191,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -196,7 +207,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -352,7 +364,8 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -369,7 +382,186 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:a6097853edba03be190d99ece4b307ff", + "urn": "urn:li:container:a6097853edba03be190d99ece4b307ff" + }, + { + "id": "urn:li:container:51904fc8cd5cc729bc630decff284525", + "urn": "urn:li:container:51904fc8cd5cc729bc630decff284525" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:51904fc8cd5cc729bc630decff284525" + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "view_definition": " SELECT metadata_aspect_v2.urn,\n metadata_aspect_v2.aspect\n FROM metadata_aspect_v2\n WHERE (metadata_aspect_v2.version = 0);", + "is_view": "True" + }, + "name": "metadata_aspect_view", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "postgrestest.public.metadata_aspect_view", + "platform": "urn:li:dataPlatform:postgres", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "urn", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(length=500)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "aspect", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(length=200)", + "recursive": false, + "isPartOfKey": false + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": " SELECT metadata_aspect_v2.urn,\n metadata_aspect_v2.aspect\n FROM metadata_aspect_v2\n WHERE (metadata_aspect_v2.version = 0);", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:a6097853edba03be190d99ece4b307ff", + "urn": "urn:li:container:a6097853edba03be190d99ece4b307ff" + }, + { + "id": "urn:li:container:51904fc8cd5cc729bc630decff284525", + "urn": "urn:li:container:51904fc8cd5cc729bc630decff284525" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1646575200000, + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { @@ -391,31 +583,57 @@ }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "upstreamLineage", "aspect": { "json": { - "path": [ + "upstreams": [ { - "id": "urn:li:container:a6097853edba03be190d99ece4b307ff", - "urn": "urn:li:container:a6097853edba03be190d99ece4b307ff" + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD),aspect)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD),aspect)" + ], + "confidenceScore": 1.0 }, { - "id": "urn:li:container:51904fc8cd5cc729bc630decff284525", - "urn": "urn:li:container:51904fc8cd5cc729bc630decff284525" + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_v2,PROD),urn)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgrestest.public.metadata_aspect_view,PROD),urn)" + ], + "confidenceScore": 1.0 } ] } }, "systemMetadata": { "lastObserved": 1646575200000, - "runId": "postgres-test" + "runId": "postgres-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/postgres/postgres_to_file_with_db_estimate_row_count.yml b/metadata-ingestion/tests/integration/postgres/postgres_to_file_with_db_estimate_row_count.yml index a489877d52a23..4a2cc543f2d01 100644 --- a/metadata-ingestion/tests/integration/postgres/postgres_to_file_with_db_estimate_row_count.yml +++ b/metadata-ingestion/tests/integration/postgres/postgres_to_file_with_db_estimate_row_count.yml @@ -13,7 +13,7 @@ source: profile_table_row_count_estimate_only: true turn_off_expensive_profiling_metrics: true catch_exceptions: true - include_views: false + include_views: true sink: type: file config: diff --git a/metadata-ingestion/tests/integration/snowflake/test_snowflake_failures.py b/metadata-ingestion/tests/integration/snowflake/test_snowflake_failures.py index cd53b8f7db4f6..4b0dd2b1045a3 100644 --- a/metadata-ingestion/tests/integration/snowflake/test_snowflake_failures.py +++ b/metadata-ingestion/tests/integration/snowflake/test_snowflake_failures.py @@ -287,8 +287,9 @@ def test_snowflake_unexpected_snowflake_view_lineage_error_causes_pipeline_warni SnowflakeV2Config, cast(PipelineConfig, snowflake_pipeline_config1).source.config, ) + config.include_table_lineage = True config.include_view_lineage = True - config.incremental_lineage = False + pipeline = Pipeline(snowflake_pipeline_config1) pipeline.run() pipeline.raise_from_status() # pipeline should not fail diff --git a/metadata-ingestion/tests/integration/trino/trino_hive_mces_golden.json b/metadata-ingestion/tests/integration/trino/trino_hive_mces_golden.json index 19961e48b4a33..c43223c68a6b6 100644 --- a/metadata-ingestion/tests/integration/trino/trino_hive_mces_golden.json +++ b/metadata-ingestion/tests/integration/trino/trino_hive_mces_golden.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -99,7 +104,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -114,7 +120,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -129,7 +136,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -146,7 +154,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -161,7 +170,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -181,7 +191,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -196,7 +207,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -219,7 +231,7 @@ "numrows": "1", "rawdatasize": "32", "totalsize": "33", - "transient_lastddltime": "1688422059" + "transient_lastddltime": "1698223433" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -315,7 +327,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -332,7 +345,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -356,7 +370,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -371,7 +386,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -392,7 +408,7 @@ "numrows": "0", "rawdatasize": "0", "totalsize": "0", - "transient_lastddltime": "1688422063" + "transient_lastddltime": "1698223435" }, "name": "map_test", "tags": [] @@ -454,7 +470,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -471,7 +488,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -495,7 +513,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -510,7 +529,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -531,7 +551,7 @@ "numrows": "0", "rawdatasize": "0", "totalsize": "0", - "transient_lastddltime": "1688422062" + "transient_lastddltime": "1698223435" }, "name": "nested_struct_test", "tags": [] @@ -642,7 +662,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -659,7 +680,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -683,7 +705,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -698,7 +721,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -714,7 +738,7 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastddltime": "1688421792" + "transient_lastddltime": "1698223429" }, "name": "pokes", "tags": [] @@ -784,7 +808,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -801,7 +826,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -825,7 +851,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -840,7 +867,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -861,7 +889,7 @@ "numrows": "0", "rawdatasize": "0", "totalsize": "0", - "transient_lastddltime": "1688421808" + "transient_lastddltime": "1698223431" }, "name": "struct_test", "tags": [] @@ -950,7 +978,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -967,7 +996,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -991,7 +1021,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1006,7 +1037,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1024,7 +1056,7 @@ "customProperties": { "numfiles": "0", "totalsize": "0", - "transient_lastddltime": "1688422062" + "transient_lastddltime": "1698223435" }, "name": "struct_test_view_materialized", "tags": [] @@ -1113,7 +1145,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1130,7 +1163,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1154,7 +1188,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1169,7 +1204,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1190,7 +1226,7 @@ "numrows": "0", "rawdatasize": "0", "totalsize": "0", - "transient_lastddltime": "1688421807" + "transient_lastddltime": "1698223431" }, "name": "_test_table_underscore", "tags": [] @@ -1248,7 +1284,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1265,7 +1302,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1289,7 +1327,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1304,7 +1343,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1325,7 +1365,7 @@ "numrows": "0", "rawdatasize": "0", "totalsize": "0", - "transient_lastddltime": "1688422062" + "transient_lastddltime": "1698223435" }, "name": "union_test", "tags": [] @@ -1467,7 +1507,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1484,7 +1525,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,7 +1550,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1523,7 +1566,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1539,7 +1583,7 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastddltime": "1688422062", + "transient_lastddltime": "1698223435", "view_definition": "SELECT \"property_id\", \"service\"\nFROM \"db1\".\"array_struct_test\"", "is_view": "True" }, @@ -1634,7 +1678,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1651,7 +1696,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1668,7 +1714,57 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:trino,hivedb.db1.array_struct_test_view,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:trino,hivedb.db1.array_struct_test,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:trino,hivedb.db1.array_struct_test,PROD),property_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:trino,hivedb.db1.array_struct_test_view,PROD),property_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:trino,hivedb.db1.array_struct_test,PROD),service)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:trino,hivedb.db1.array_struct_test_view,PROD),service)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1632398400000, + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1692,7 +1788,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "trino-hive-test" + "runId": "trino-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/api/source_helpers/test_incremental_lineage_helper.py b/metadata-ingestion/tests/unit/api/source_helpers/test_incremental_lineage_helper.py index 54a22d860285c..e8485106c6a81 100644 --- a/metadata-ingestion/tests/unit/api/source_helpers/test_incremental_lineage_helper.py +++ b/metadata-ingestion/tests/unit/api/source_helpers/test_incremental_lineage_helper.py @@ -104,6 +104,27 @@ def test_incremental_table_lineage(tmp_path, pytestconfig): ) +def test_incremental_table_lineage_empty_upstreams(tmp_path, pytestconfig): + + urn = make_dataset_urn(platform, "dataset1") + aspect = make_lineage_aspect( + "dataset1", + upstreams=[], + ) + + processed_wus = auto_incremental_lineage( + graph=None, + incremental_lineage=True, + stream=[ + MetadataChangeProposalWrapper( + entityUrn=urn, aspect=aspect, systemMetadata=system_metadata + ).as_workunit() + ], + ) + + assert [wu.metadata for wu in processed_wus] == [] + + @pytest.mark.parametrize( "gms_aspect,current_aspect,output_aspect", [ From a96a512166564cf9c40af4b83e7138dcb48c914d Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Thu, 26 Oct 2023 18:46:10 +0200 Subject: [PATCH 002/792] fix(ingest/bigquery): Fixing lineage filter query (#9114) --- .../ingestion/source/bigquery_v2/bigquery_config.py | 1 + .../datahub/ingestion/source/bigquery_v2/lineage.py | 8 ++++++-- .../src/datahub/ingestion/source/bigquery_v2/usage.py | 10 ++++++---- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py index 6203192769750..f762d451849ab 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py @@ -309,6 +309,7 @@ def backward_compatibility_configs_set(cls, values: Dict) -> Dict: "dataset_pattern is not set but schema_pattern is set, using schema_pattern as dataset_pattern. schema_pattern will be deprecated, please use dataset_pattern instead." ) values["dataset_pattern"] = schema_pattern + dataset_pattern = schema_pattern elif ( dataset_pattern != AllowDenyPattern.allow_all() and schema_pattern != AllowDenyPattern.allow_all() diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index aa462435b8105..e9acf5ea86044 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -20,6 +20,7 @@ from google.cloud.datacatalog import lineage_v1 from google.cloud.logging_v2.client import Client as GCPLoggingClient +from datahub.configuration.pattern_utils import is_schema_allowed from datahub.emitter import mce_builder from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.workunit import MetadataWorkUnit @@ -683,8 +684,11 @@ def _create_lineage_map( self.report.num_skipped_lineage_entries_missing_data[e.project_id] += 1 continue - if not self.config.dataset_pattern.allowed( - destination_table.table_identifier.dataset + if not is_schema_allowed( + self.config.dataset_pattern, + destination_table.table_identifier.dataset, + destination_table.table_identifier.project_id, + self.config.match_fully_qualified_names, ) or not self.config.table_pattern.allowed( destination_table.table_identifier.get_table_name() ): diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py index 7fc38991e5928..65b559550ffc5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py @@ -21,6 +21,7 @@ import humanfriendly +from datahub.configuration.pattern_utils import is_schema_allowed from datahub.configuration.time_window_config import ( BaseTimeWindowConfig, get_time_bucket, @@ -335,10 +336,11 @@ def get_time_window(self) -> Tuple[datetime, datetime]: def _is_table_allowed(self, table_ref: Optional[BigQueryTableRef]) -> bool: return ( table_ref is not None - and self.config.dataset_pattern.allowed( - f"{table_ref.table_identifier.project_id}.{table_ref.table_identifier.dataset}" - if self.config.match_fully_qualified_names - else table_ref.table_identifier.dataset + and is_schema_allowed( + self.config.dataset_pattern, + table_ref.table_identifier.dataset, + table_ref.table_identifier.project_id, + self.config.match_fully_qualified_names, ) and self.config.table_pattern.allowed(str(table_ref.table_identifier)) ) From 852267972c8efc1ceb5a0cbd71594d7ea2529d49 Mon Sep 17 00:00:00 2001 From: "nicholas.fwang" Date: Fri, 27 Oct 2023 01:57:43 +0900 Subject: [PATCH 003/792] refactor(ingestion/mongodb): Add platform_instance to mongodb (#8663) Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/mongodb.py | 16 +++++++++++++--- .../integration/mongodb/mongodb_mces_golden.json | 16 ++++++++-------- .../tests/integration/mongodb/test_mongodb.py | 1 + 3 files changed, 22 insertions(+), 11 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py index f02b6845e40b5..890c5c64bd5e6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py @@ -11,7 +11,11 @@ from pymongo.mongo_client import MongoClient from datahub.configuration.common import AllowDenyPattern -from datahub.configuration.source_common import EnvConfigMixin +from datahub.configuration.source_common import ( + EnvConfigMixin, + PlatformInstanceConfigMixin, +) +from datahub.emitter.mce_builder import make_dataset_urn_with_platform_instance from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SourceCapability, @@ -55,7 +59,7 @@ DENY_DATABASE_LIST = set(["admin", "config", "local"]) -class MongoDBConfig(EnvConfigMixin): +class MongoDBConfig(PlatformInstanceConfigMixin, EnvConfigMixin): # See the MongoDB authentication docs for details and examples. # https://pymongo.readthedocs.io/en/stable/examples/authentication.html connect_uri: str = Field( @@ -199,6 +203,7 @@ def construct_schema_pymongo( @platform_name("MongoDB") @config_class(MongoDBConfig) @support_status(SupportStatus.CERTIFIED) +@capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default") @capability(SourceCapability.SCHEMA_METADATA, "Enabled by default") @dataclass class MongoDBSource(Source): @@ -320,7 +325,12 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: self.report.report_dropped(dataset_name) continue - dataset_urn = f"urn:li:dataset:(urn:li:dataPlatform:{platform},{dataset_name},{self.config.env})" + dataset_urn = make_dataset_urn_with_platform_instance( + platform=platform, + name=dataset_name, + env=self.config.env, + platform_instance=self.config.platform_instance, + ) dataset_snapshot = DatasetSnapshot( urn=dataset_urn, diff --git a/metadata-ingestion/tests/integration/mongodb/mongodb_mces_golden.json b/metadata-ingestion/tests/integration/mongodb/mongodb_mces_golden.json index 1f662cfe514e2..e16101b137ac9 100644 --- a/metadata-ingestion/tests/integration/mongodb/mongodb_mces_golden.json +++ b/metadata-ingestion/tests/integration/mongodb/mongodb_mces_golden.json @@ -2,7 +2,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,mngdb.emptyCollection,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.emptyCollection,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { @@ -41,7 +41,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,mngdb.firstCollection,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.firstCollection,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { @@ -345,7 +345,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,mngdb.largeCollection,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.largeCollection,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { @@ -3988,7 +3988,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,mngdb.secondCollection,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.secondCollection,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { @@ -4135,7 +4135,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,mngdb.emptyCollection,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.emptyCollection,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4150,7 +4150,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,mngdb.firstCollection,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.firstCollection,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4165,7 +4165,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,mngdb.largeCollection,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.largeCollection,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4180,7 +4180,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,mngdb.secondCollection,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.secondCollection,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { diff --git a/metadata-ingestion/tests/integration/mongodb/test_mongodb.py b/metadata-ingestion/tests/integration/mongodb/test_mongodb.py index 5228c21223e24..56fb471d4c9f1 100644 --- a/metadata-ingestion/tests/integration/mongodb/test_mongodb.py +++ b/metadata-ingestion/tests/integration/mongodb/test_mongodb.py @@ -25,6 +25,7 @@ def test_mongodb_ingest(docker_compose_runner, pytestconfig, tmp_path, mock_time "username": "mongoadmin", "password": "examplepass", "maxDocumentSize": 25000, + "platform_instance": "instance", }, }, "sink": { From ce6f833be444497972f17fd8bfe170f00af4bca6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mert=20Tun=C3=A7?= Date: Fri, 27 Oct 2023 01:06:37 +0300 Subject: [PATCH 004/792] fix(kafka-setup): Don't set truststore pass for PEM files (#8656) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- docker/kafka-setup/kafka-setup.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/kafka-setup/kafka-setup.sh b/docker/kafka-setup/kafka-setup.sh index 629e9bc9484ee..b5024e49e59f1 100755 --- a/docker/kafka-setup/kafka-setup.sh +++ b/docker/kafka-setup/kafka-setup.sh @@ -36,7 +36,9 @@ if [[ $KAFKA_PROPERTIES_SECURITY_PROTOCOL == "SSL" ]]; then fi if [[ -n $KAFKA_PROPERTIES_SSL_TRUSTSTORE_LOCATION ]]; then echo "ssl.truststore.location=$KAFKA_PROPERTIES_SSL_TRUSTSTORE_LOCATION" >> $CONNECTION_PROPERTIES_PATH - echo "ssl.truststore.password=$KAFKA_PROPERTIES_SSL_TRUSTSTORE_PASSWORD" >> $CONNECTION_PROPERTIES_PATH + if [[ $KAFKA_PROPERTIES_SSL_TRUSTSTORE_TYPE != "PEM" ]]; then + echo "ssl.truststore.password=$KAFKA_PROPERTIES_SSL_TRUSTSTORE_PASSWORD" >> $CONNECTION_PROPERTIES_PATH + fi if [[ -n $KAFKA_PROPERTIES_SSL_TRUSTSTORE_TYPE ]]; then echo "ssl.truststore.type=$KAFKA_PROPERTIES_SSL_TRUSTSTORE_TYPE" >> $CONNECTION_PROPERTIES_PATH fi From 12f6fe0f906dd21fbc3985cfd13ceac4fc9ac8f0 Mon Sep 17 00:00:00 2001 From: Tony Ouyang Date: Thu, 26 Oct 2023 15:07:36 -0700 Subject: [PATCH 005/792] fix(ingest): Fix roll back failure when REST_API_AUTHORIZATION_ENABLED is set to true (#9092) --- metadata-service/war/src/main/resources/boot/policies.json | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/metadata-service/war/src/main/resources/boot/policies.json b/metadata-service/war/src/main/resources/boot/policies.json index 410596cc30cbe..18cb48bfcf1f0 100644 --- a/metadata-service/war/src/main/resources/boot/policies.json +++ b/metadata-service/war/src/main/resources/boot/policies.json @@ -64,7 +64,8 @@ "GET_TIMELINE_PRIVILEGE", "PRODUCE_PLATFORM_EVENT_PRIVILEGE", "MANAGE_DATA_PRODUCTS", - "MANAGE_GLOBAL_OWNERSHIP_TYPES" + "MANAGE_GLOBAL_OWNERSHIP_TYPES", + "DELETE_ENTITY" ], "displayName":"Root User - Edit and View All Resources", "description":"Grants full edit and view privileges for all resources to root 'datahub' root user.", @@ -263,7 +264,8 @@ "GET_ENTITY_PRIVILEGE", "GET_TIMELINE_PRIVILEGE", "PRODUCE_PLATFORM_EVENT_PRIVILEGE", - "MANAGE_DATA_PRODUCTS" + "MANAGE_DATA_PRODUCTS", + "DELETE_ENTITY" ], "displayName":"Admins - Metadata Policy", "description":"Admins have all metadata privileges.", From 1ac831f07aa2bdab555acf50431f6466bb291f61 Mon Sep 17 00:00:00 2001 From: Raj Tekal Date: Thu, 26 Oct 2023 19:33:09 -0400 Subject: [PATCH 006/792] (fix): Avoid java.util.ConcurrentModificationException (#9090) Co-authored-by: Pedro Silva --- .../authorization/DataHubAuthorizer.java | 105 +++++++++++------- 1 file changed, 64 insertions(+), 41 deletions(-) diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index 4553139e3ca54..e30fb93109915 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -19,6 +19,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -53,6 +55,7 @@ public enum AuthorizationMode { // Maps privilege name to the associated set of policies for fast access. // Not concurrent data structure because writes are always against the entire thing. private final Map> _policyCache = new HashMap<>(); // Shared Policy Cache. + private final ReadWriteLock _lockPolicyCache = new ReentrantReadWriteLock(); private final ScheduledExecutorService _refreshExecutorService = Executors.newScheduledThreadPool(1); private final PolicyRefreshRunnable _policyRefreshRunnable; @@ -71,7 +74,7 @@ public DataHubAuthorizer( _systemAuthentication = Objects.requireNonNull(systemAuthentication); _mode = Objects.requireNonNull(mode); _policyEngine = new PolicyEngine(systemAuthentication, Objects.requireNonNull(entityClient)); - _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache); + _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache, _lockPolicyCache); _refreshExecutorService.scheduleAtFixedRate(_policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); } @@ -90,31 +93,41 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request Optional resolvedResourceSpec = request.getResourceSpec().map(_entitySpecResolver::resolve); - // 1. Fetch the policies relevant to the requested privilege. - final List policiesToEvaluate = _policyCache.getOrDefault(request.getPrivilege(), new ArrayList<>()); - - // 2. Evaluate each policy. - for (DataHubPolicyInfo policy : policiesToEvaluate) { - if (isRequestGranted(policy, request, resolvedResourceSpec)) { - // Short circuit if policy has granted privileges to this actor. - return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, - String.format("Granted by policy with type: %s", policy.getType())); + _lockPolicyCache.readLock().lock(); + try { + // 1. Fetch the policies relevant to the requested privilege. + final List policiesToEvaluate = _policyCache.getOrDefault(request.getPrivilege(), new ArrayList<>()); + + // 2. Evaluate each policy. + for (DataHubPolicyInfo policy : policiesToEvaluate) { + if (isRequestGranted(policy, request, resolvedResourceSpec)) { + // Short circuit if policy has granted privileges to this actor. + return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, + String.format("Granted by policy with type: %s", policy.getType())); + } } + return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); + } finally { + _lockPolicyCache.readLock().unlock(); } - return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); } public List getGrantedPrivileges(final String actor, final Optional resourceSpec) { - // 1. Fetch all policies - final List policiesToEvaluate = _policyCache.getOrDefault(ALL, new ArrayList<>()); + _lockPolicyCache.readLock().lock(); + try { + // 1. Fetch all policies + final List policiesToEvaluate = _policyCache.getOrDefault(ALL, new ArrayList<>()); - Urn actorUrn = UrnUtils.getUrn(actor); - final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); + Urn actorUrn = UrnUtils.getUrn(actor); + final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); - Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); - return _policyEngine.getGrantedPrivileges(policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); + return _policyEngine.getGrantedPrivileges(policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); + } finally { + _lockPolicyCache.readLock().unlock(); + } } /** @@ -124,36 +137,42 @@ public List getGrantedPrivileges(final String actor, final Optional resourceSpec) { - // Step 1: Find policies granting the privilege. - final List policiesToEvaluate = _policyCache.getOrDefault(privilege, new ArrayList<>()); - - Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); final List authorizedUsers = new ArrayList<>(); final List authorizedGroups = new ArrayList<>(); boolean allUsers = false; boolean allGroups = false; - // Step 2: For each policy, determine whether the resource is a match. - for (DataHubPolicyInfo policy : policiesToEvaluate) { - if (!PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState())) { - // Policy is not active, skip. - continue; - } + _lockPolicyCache.readLock().lock(); + try { + // Step 1: Find policies granting the privilege. + final List policiesToEvaluate = _policyCache.getOrDefault(privilege, new ArrayList<>()); - final PolicyEngine.PolicyActors matchingActors = _policyEngine.getMatchingActors(policy, resolvedResourceSpec); + Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); - // Step 3: For each matching policy, add actors that are authorized. - authorizedUsers.addAll(matchingActors.getUsers()); - authorizedGroups.addAll(matchingActors.getGroups()); - if (matchingActors.allUsers()) { - allUsers = true; - } - if (matchingActors.allGroups()) { - allGroups = true; + + // Step 2: For each policy, determine whether the resource is a match. + for (DataHubPolicyInfo policy : policiesToEvaluate) { + if (!PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState())) { + // Policy is not active, skip. + continue; + } + + final PolicyEngine.PolicyActors matchingActors = _policyEngine.getMatchingActors(policy, resolvedResourceSpec); + + // Step 3: For each matching policy, add actors that are authorized. + authorizedUsers.addAll(matchingActors.getUsers()); + authorizedGroups.addAll(matchingActors.getGroups()); + if (matchingActors.allUsers()) { + allUsers = true; + } + if (matchingActors.allGroups()) { + allGroups = true; + } } + } finally { + _lockPolicyCache.readLock().unlock(); } - // Step 4: Return all authorized users and groups. return new AuthorizedActors(privilege, authorizedUsers, authorizedGroups, allUsers, allGroups); } @@ -228,6 +247,7 @@ static class PolicyRefreshRunnable implements Runnable { private final Authentication _systemAuthentication; private final PolicyFetcher _policyFetcher; private final Map> _policyCache; + private final ReadWriteLock _lockPolicyCache; @Override public void run() { @@ -253,10 +273,13 @@ public void run() { "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. start: {}, count: {}", start, count, e); return; } - synchronized (_policyCache) { - _policyCache.clear(); - _policyCache.putAll(newCache); - } + } + _lockPolicyCache.writeLock().lock(); + try { + _policyCache.clear(); + _policyCache.putAll(newCache); + } finally { + _lockPolicyCache.writeLock().unlock(); } log.debug(String.format("Successfully fetched %s policies.", total)); } catch (Exception e) { From cf617d77f383a19bd6a9bce00bb2cfbd6a226e55 Mon Sep 17 00:00:00 2001 From: David Sanchez Date: Fri, 27 Oct 2023 07:01:30 +0200 Subject: [PATCH 007/792] Fix(ingest/bigquery): fix extracting comments from complex types (#8950) Co-authored-by: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> --- .../datahub/ingestion/source/bigquery_v2/bigquery.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py index 692d8c4f81bb6..6959a48313010 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py @@ -1050,8 +1050,14 @@ def gen_schema_fields(self, columns: List[BigqueryColumn]) -> List[SchemaField]: for idx, field in enumerate(schema_fields): # Remove all the [version=2.0].[type=struct]. tags to get the field path if ( - re.sub(r"\[.*?\]\.", "", field.fieldPath, 0, re.MULTILINE) - == col.field_path + re.sub( + r"\[.*?\]\.", + "", + field.fieldPath.lower(), + 0, + re.MULTILINE, + ) + == col.field_path.lower() ): field.description = col.comment schema_fields[idx] = field From cc7511501b051b9a9f66dbcc4dc4ab16ce2668e5 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Fri, 27 Oct 2023 19:42:53 +0900 Subject: [PATCH 008/792] docs: add versions 0.12.0 (#9125) --- docs-website/versions.json | 1 + 1 file changed, 1 insertion(+) diff --git a/docs-website/versions.json b/docs-website/versions.json index a5493c26a4c65..a66607b67ddd5 100644 --- a/docs-website/versions.json +++ b/docs-website/versions.json @@ -1,4 +1,5 @@ [ + "0.12.0", "0.11.0", "0.10.5" ] From 07a5e4c81b9e7b46faf9dbc830c9bac9648e5161 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Fri, 27 Oct 2023 07:52:24 -0400 Subject: [PATCH 009/792] fix(ui) Fix filtering logic for everwhere generating OR filters (#9116) --- .../src/app/search/useGetSearchQueryInputs.ts | 12 ++---- .../utils/__tests__/generateOrFilters.test.ts | 38 ++++++++++++++----- .../src/app/search/utils/generateOrFilters.ts | 16 +++++--- 3 files changed, 43 insertions(+), 23 deletions(-) diff --git a/datahub-web-react/src/app/search/useGetSearchQueryInputs.ts b/datahub-web-react/src/app/search/useGetSearchQueryInputs.ts index 05419e5abed35..9a3af8fb8d56c 100644 --- a/datahub-web-react/src/app/search/useGetSearchQueryInputs.ts +++ b/datahub-web-react/src/app/search/useGetSearchQueryInputs.ts @@ -3,7 +3,7 @@ import { useLocation, useParams } from 'react-router'; import { useMemo } from 'react'; import { FacetFilterInput, EntityType } from '../../types.generated'; import { useEntityRegistry } from '../useEntityRegistry'; -import { ENTITY_FILTER_NAME, FILTER_DELIMITER, UnionType } from './utils/constants'; +import { ENTITY_FILTER_NAME, UnionType } from './utils/constants'; import { useUserContext } from '../context/useUserContext'; import useFilters from './utils/useFilters'; import { generateOrFilters } from './utils/generateOrFilters'; @@ -27,12 +27,6 @@ export default function useGetSearchQueryInputs(excludedFilterFields?: Array = useFilters(params); - const nonNestedFilters = filters.filter( - (f) => !f.field.includes(FILTER_DELIMITER) && !excludedFilterFields?.includes(f.field), - ); - const nestedFilters = filters.filter( - (f) => f.field.includes(FILTER_DELIMITER) && !excludedFilterFields?.includes(f.field), - ); const entityFilters: Array = useMemo( () => filters @@ -43,8 +37,8 @@ export default function useGetSearchQueryInputs(excludedFilterFields?: Array generateOrFilters(unionType, nonNestedFilters, nestedFilters), - [nonNestedFilters, nestedFilters, unionType], + () => generateOrFilters(unionType, filters, excludedFilterFields), + [filters, excludedFilterFields, unionType], ); return { entityFilters, query, unionType, filters, orFilters, viewUrn, page, activeType, sortInput }; diff --git a/datahub-web-react/src/app/search/utils/__tests__/generateOrFilters.test.ts b/datahub-web-react/src/app/search/utils/__tests__/generateOrFilters.test.ts index 505c50efb289f..fd5a5691b454e 100644 --- a/datahub-web-react/src/app/search/utils/__tests__/generateOrFilters.test.ts +++ b/datahub-web-react/src/app/search/utils/__tests__/generateOrFilters.test.ts @@ -1,7 +1,7 @@ import { DOMAINS_FILTER_NAME, ENTITY_SUB_TYPE_FILTER_NAME, - ENTITY_TYPE_FILTER_NAME, + ENTITY_FILTER_NAME, TAGS_FILTER_NAME, UnionType, } from '../constants'; @@ -10,7 +10,7 @@ import { generateOrFilters } from '../generateOrFilters'; describe('generateOrFilters', () => { it('should generate orFilters with UnionType.AND', () => { const filters = [ - { field: ENTITY_TYPE_FILTER_NAME, values: ['DATASET', 'CONTAINER'] }, + { field: ENTITY_FILTER_NAME, values: ['DATASET', 'CONTAINER'] }, { field: TAGS_FILTER_NAME, values: ['urn:li:tag:tag1'] }, ]; const orFilters = generateOrFilters(UnionType.AND, filters); @@ -24,7 +24,7 @@ describe('generateOrFilters', () => { it('should generate orFilters with UnionType.OR', () => { const filters = [ - { field: ENTITY_TYPE_FILTER_NAME, values: ['DATASET', 'CONTAINER'] }, + { field: ENTITY_FILTER_NAME, values: ['DATASET', 'CONTAINER'] }, { field: TAGS_FILTER_NAME, values: ['urn:li:tag:tag1'] }, ]; const orFilters = generateOrFilters(UnionType.OR, filters); @@ -43,17 +43,23 @@ describe('generateOrFilters', () => { const filters = [ { field: TAGS_FILTER_NAME, values: ['urn:li:tag:tag1'] }, { field: DOMAINS_FILTER_NAME, values: ['urn:li:domains:domain1'] }, + { field: ENTITY_SUB_TYPE_FILTER_NAME, values: ['CONTAINER', 'DATASET␞table'] }, ]; - const nestedFilters = [{ field: ENTITY_SUB_TYPE_FILTER_NAME, values: ['CONTAINER', 'DATASET␞table'] }]; - const orFilters = generateOrFilters(UnionType.AND, filters, nestedFilters); + // const nestedFilters = [{ field: ENTITY_SUB_TYPE_FILTER_NAME, values: ['CONTAINER', 'DATASET␞table'] }]; + const orFilters = generateOrFilters(UnionType.AND, filters); expect(orFilters).toMatchObject([ { - and: [...filters, { field: '_entityType', values: ['CONTAINER'] }], + and: [ + { field: TAGS_FILTER_NAME, values: ['urn:li:tag:tag1'] }, + { field: DOMAINS_FILTER_NAME, values: ['urn:li:domains:domain1'] }, + { field: '_entityType', values: ['CONTAINER'] }, + ], }, { and: [ - ...filters, + { field: TAGS_FILTER_NAME, values: ['urn:li:tag:tag1'] }, + { field: DOMAINS_FILTER_NAME, values: ['urn:li:domains:domain1'] }, { field: '_entityType', values: ['DATASET'] }, { field: 'typeNames', values: ['table'] }, ], @@ -65,9 +71,9 @@ describe('generateOrFilters', () => { const filters = [ { field: TAGS_FILTER_NAME, values: ['urn:li:tag:tag1'] }, { field: DOMAINS_FILTER_NAME, values: ['urn:li:domains:domain1'] }, + { field: ENTITY_SUB_TYPE_FILTER_NAME, values: ['CONTAINER', 'DATASET␞table'] }, ]; - const nestedFilters = [{ field: ENTITY_SUB_TYPE_FILTER_NAME, values: ['CONTAINER', 'DATASET␞table'] }]; - const orFilters = generateOrFilters(UnionType.OR, filters, nestedFilters); + const orFilters = generateOrFilters(UnionType.OR, filters); expect(orFilters).toMatchObject([ { @@ -87,4 +93,18 @@ describe('generateOrFilters', () => { }, ]); }); + + it('should generate orFilters and exclude filters with a provided exclude field', () => { + const filters = [ + { field: ENTITY_FILTER_NAME, values: ['DATASET', 'CONTAINER'] }, + { field: TAGS_FILTER_NAME, values: ['urn:li:tag:tag1'] }, + ]; + const orFilters = generateOrFilters(UnionType.AND, filters, [ENTITY_FILTER_NAME]); + + expect(orFilters).toMatchObject([ + { + and: [{ field: TAGS_FILTER_NAME, values: ['urn:li:tag:tag1'] }], + }, + ]); + }); }); diff --git a/datahub-web-react/src/app/search/utils/generateOrFilters.ts b/datahub-web-react/src/app/search/utils/generateOrFilters.ts index b665a2e0f0495..fa2939b3436f5 100644 --- a/datahub-web-react/src/app/search/utils/generateOrFilters.ts +++ b/datahub-web-react/src/app/search/utils/generateOrFilters.ts @@ -26,20 +26,26 @@ function generateInputWithNestedFilters(filters: FacetFilterInput[], nestedFilte export function generateOrFilters( unionType: UnionType, filters: FacetFilterInput[], - nestedFilters: FacetFilterInput[] = [], + excludedFilterFields: string[] = [], ): AndFilterInput[] { - if ((filters?.length || 0) === 0 && nestedFilters.length === 0) { + if ((filters?.length || 0) === 0) { return []; } + const nonNestedFilters = filters.filter( + (f) => !f.field.includes(FILTER_DELIMITER) && !excludedFilterFields?.includes(f.field), + ); + const nestedFilters = filters.filter( + (f) => f.field.includes(FILTER_DELIMITER) && !excludedFilterFields?.includes(f.field), + ); if (unionType === UnionType.OR) { const orFiltersWithNestedFilters = generateInputWithNestedFilters([], nestedFilters); - const orFilters = filters.map((filter) => ({ + const orFilters = nonNestedFilters.map((filter) => ({ and: [filter], })); return [...orFilters, ...orFiltersWithNestedFilters]; } - const andFiltersWithNestedFilters = generateInputWithNestedFilters(filters, nestedFilters); + const andFiltersWithNestedFilters = generateInputWithNestedFilters(nonNestedFilters, nestedFilters); if (andFiltersWithNestedFilters.length) { return andFiltersWithNestedFilters; @@ -47,7 +53,7 @@ export function generateOrFilters( return [ { - and: filters, + and: nonNestedFilters, }, ]; } From 379ffc8d9457bb86029383e857aaa41eae40f329 Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Fri, 27 Oct 2023 17:17:27 +0100 Subject: [PATCH 010/792] build(release): Update files for 0.12.0 release (#9130) --- .../src/app/ingest/source/builder/NameSourceStep.tsx | 2 +- gradle/versioning/versioning.gradle | 2 +- .../tests/cypress/cypress/e2e/mutations/managed_ingestion.js | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/src/app/ingest/source/builder/NameSourceStep.tsx b/datahub-web-react/src/app/ingest/source/builder/NameSourceStep.tsx index f4c048bcaf0d2..3092364bb8bdd 100644 --- a/datahub-web-react/src/app/ingest/source/builder/NameSourceStep.tsx +++ b/datahub-web-react/src/app/ingest/source/builder/NameSourceStep.tsx @@ -190,7 +190,7 @@ export const NameSourceStep = ({ state, updateState, prev, submit }: StepProps) setVersion(event.target.value)} /> diff --git a/gradle/versioning/versioning.gradle b/gradle/versioning/versioning.gradle index 1fac894d165a8..39a8a3faf8011 100644 --- a/gradle/versioning/versioning.gradle +++ b/gradle/versioning/versioning.gradle @@ -21,7 +21,7 @@ Produces the following variables and supports token replacement import org.apache.tools.ant.filters.ReplaceTokens def detailedVersionString = "0.0.0-unknown-SNAPSHOT" -def cliMajorVersion = "0.10.5" // base default cli major version +def cliMajorVersion = "0.12.0" // base default cli major version def snapshotVersion = false if (project.hasProperty("releaseVersion")) { version = releaseVersion diff --git a/smoke-test/tests/cypress/cypress/e2e/mutations/managed_ingestion.js b/smoke-test/tests/cypress/cypress/e2e/mutations/managed_ingestion.js index 24a24cc21138d..3d052695e818f 100644 --- a/smoke-test/tests/cypress/cypress/e2e/mutations/managed_ingestion.js +++ b/smoke-test/tests/cypress/cypress/e2e/mutations/managed_ingestion.js @@ -7,7 +7,7 @@ describe("run managed ingestion", () => { it("create run managed ingestion source", () => { let number = Math.floor(Math.random() * 100000); let testName = `cypress test source ${number}` - let cli_version = "0.10.5.4"; + let cli_version = "0.12.0"; cy.login(); cy.goToIngestionPage(); cy.clickOptionWithText("Create new source"); From 5166d90433123891bc8f9555d4c6660a2b5c1451 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Fri, 27 Oct 2023 22:49:51 +0530 Subject: [PATCH 011/792] fix(ingest/sql-server): update queries to use escaped procedure name (#9127) --- .../ingestion/source/sql/mssql/source.py | 4 +- .../golden_mces_mssql_no_db_to_file.json | 774 +++++++++++------- .../golden_mces_mssql_no_db_with_filter.json | 471 +++++++---- .../golden_mces_mssql_to_file.json | 471 +++++++---- ...golden_mces_mssql_with_lower_case_urn.json | 365 ++++++--- .../integration/sql_server/setup/setup.sql | 2 +- 6 files changed, 1324 insertions(+), 763 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py index 685d4fb3074c9..710825c8ba55d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py @@ -530,7 +530,7 @@ def _get_procedure_inputs( def _get_procedure_code( conn: Connection, procedure: StoredProcedure ) -> Tuple[Optional[str], Optional[str]]: - query = f"EXEC [{procedure.db}].dbo.sp_helptext '{procedure.full_name}'" + query = f"EXEC [{procedure.db}].dbo.sp_helptext '{procedure.escape_full_name}'" try: code_data = conn.execute(query) except ProgrammingError: @@ -567,7 +567,7 @@ def _get_procedure_properties( create_date as date_created, modify_date as date_modified FROM sys.procedures - WHERE object_id = object_id('{procedure.full_name}') + WHERE object_id = object_id('{procedure.escape_full_name}') """ ) properties = {} diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json index a495d04c4e398..2fe7a76fd01ae 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,24 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -80,7 +100,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -91,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1df94c0f-15fd-4b68-8ca3-6053a0332362", + "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-03-10 16:27:54.970000", - "date_modified": "2023-03-10 16:27:55.097000", + "date_created": "2023-10-27 10:11:55.540000", + "date_modified": "2023-10-27 10:11:55.667000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -110,7 +131,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -127,22 +149,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [] - } - }, - "systemMetadata": { - "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -163,7 +171,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -178,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -193,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -210,7 +221,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -225,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -245,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -266,7 +280,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -281,7 +296,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -296,7 +312,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -313,7 +330,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -328,7 +346,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -348,7 +367,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -369,7 +389,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -384,7 +405,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -399,7 +421,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -416,7 +439,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -431,7 +455,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -451,7 +476,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -472,7 +498,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -487,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -502,7 +530,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -519,7 +548,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,7 +564,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -554,7 +585,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -575,7 +607,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -590,7 +623,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -605,7 +639,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -622,7 +657,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -637,7 +673,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +694,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -678,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -693,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -708,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -725,7 +766,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -740,7 +782,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -760,7 +803,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -781,7 +825,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -796,7 +841,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -811,7 +857,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -828,7 +875,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -843,7 +891,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -863,7 +912,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -884,7 +934,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -899,7 +950,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -914,7 +966,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -931,7 +984,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -946,7 +1000,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -966,7 +1021,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -987,7 +1043,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1002,7 +1059,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1017,7 +1075,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1034,7 +1093,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1049,7 +1109,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1069,7 +1130,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1090,7 +1152,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1105,7 +1168,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1120,7 +1184,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1137,7 +1202,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1152,7 +1218,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1172,7 +1239,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1187,7 +1255,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1259,7 +1328,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1276,7 +1346,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1300,7 +1371,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1321,7 +1393,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1336,7 +1409,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1351,7 +1425,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1368,7 +1443,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1383,7 +1459,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1403,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1418,7 +1496,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1491,7 +1570,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,7 +1588,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1532,7 +1613,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1547,7 +1629,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1644,7 +1727,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1661,7 +1745,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1685,7 +1770,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1700,7 +1786,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1796,7 +1883,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1813,7 +1901,33 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1830,12 +1944,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { @@ -1843,14 +1958,14 @@ "customProperties": { "procedure_depends_on": "{}", "depending_on_procedure": "{}", - "code": "CREATE PROCEDURE Foo.DBs @ID INT\nAS\n SELECT @ID AS ThatDB;\n", + "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-03-10 16:27:54.907000", - "date_modified": "2023-03-10 16:27:54.907000" + "date_created": "2023-10-27 10:11:55.460000", + "date_modified": "2023-10-27 10:11:55.460000" }, "externalUrl": "", - "name": "demodata.Foo.DBs", + "name": "demodata.Foo.Proc.With.SpecialChar", "type": { "string": "MSSQL_STORED_PROCEDURE" } @@ -1858,12 +1973,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -1875,31 +1991,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", - "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" - }, - { - "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", - "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1920,7 +2013,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1935,7 +2029,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1950,7 +2045,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1967,7 +2063,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1982,7 +2079,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2002,7 +2100,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2023,7 +2122,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2038,7 +2138,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2053,7 +2154,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2070,7 +2172,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2085,7 +2188,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2105,7 +2209,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2126,7 +2231,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2141,7 +2247,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2156,7 +2263,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2173,7 +2281,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2188,7 +2297,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2208,7 +2318,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2228,7 +2339,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2243,7 +2355,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2258,7 +2371,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2275,7 +2389,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2290,7 +2405,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2311,7 +2427,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2326,7 +2443,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2341,7 +2459,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2358,7 +2477,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2373,7 +2493,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2393,7 +2514,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2414,7 +2536,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2429,7 +2552,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2444,7 +2568,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2461,7 +2586,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2476,7 +2602,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2496,7 +2623,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2517,7 +2645,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2532,7 +2661,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2547,7 +2677,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2564,7 +2695,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2579,7 +2711,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2599,7 +2732,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2620,7 +2754,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2635,7 +2770,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2650,7 +2786,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2667,7 +2804,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2682,7 +2820,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2702,7 +2841,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2723,7 +2863,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2738,7 +2879,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2753,7 +2895,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2770,7 +2913,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2785,7 +2929,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2805,7 +2950,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2826,7 +2972,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2841,7 +2988,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2856,7 +3004,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2873,7 +3022,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2888,7 +3038,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2908,7 +3059,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2929,7 +3081,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2944,7 +3097,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2959,7 +3113,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2976,7 +3131,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2991,7 +3147,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3011,7 +3168,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3032,7 +3190,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3047,7 +3206,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3062,7 +3222,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3079,7 +3240,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3094,7 +3256,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3114,7 +3277,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3135,7 +3299,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3150,7 +3315,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3165,7 +3331,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3182,7 +3349,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3197,7 +3365,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3217,7 +3386,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3238,7 +3408,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3253,7 +3424,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3268,7 +3440,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3285,7 +3458,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3300,7 +3474,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3320,7 +3495,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3335,7 +3511,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3407,7 +3584,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3424,7 +3602,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3448,7 +3627,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3469,7 +3649,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3484,7 +3665,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3499,7 +3681,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3516,7 +3699,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3531,7 +3715,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3551,7 +3736,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3566,7 +3752,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3638,7 +3825,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3655,7 +3843,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3679,7 +3868,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3694,7 +3884,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3790,7 +3981,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3807,7 +3999,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3831,7 +4024,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3852,7 +4046,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3867,7 +4062,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3882,7 +4078,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3899,7 +4096,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3914,7 +4112,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3934,7 +4133,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3955,7 +4155,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3970,7 +4171,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3985,7 +4187,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4002,7 +4205,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4017,7 +4221,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4037,7 +4242,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4058,7 +4264,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4073,7 +4280,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4088,7 +4296,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4105,7 +4314,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4120,27 +4330,34 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:c6627af82d44de89492e1a9315ae9f4b", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4150,12 +4367,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4165,12 +4383,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4180,27 +4399,24 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "container", - "entityUrn": "urn:li:container:c6627af82d44de89492e1a9315ae9f4b", + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", - "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" - } - ] + "removed": false } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_with_filter.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_with_filter.json index 8277ff8bf7e89..c1984828750eb 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_with_filter.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_with_filter.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,24 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -80,7 +100,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -91,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1df94c0f-15fd-4b68-8ca3-6053a0332362", + "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-03-10 16:27:54.970000", - "date_modified": "2023-03-10 16:27:55.097000", + "date_created": "2023-10-27 10:11:55.540000", + "date_modified": "2023-10-27 10:11:55.667000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -110,7 +131,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -127,22 +149,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [] - } - }, - "systemMetadata": { - "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -163,7 +171,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -178,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -193,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -210,7 +221,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -225,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -245,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -266,7 +280,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -281,7 +296,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -296,7 +312,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -313,7 +330,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -328,7 +346,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -348,7 +367,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -369,7 +389,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -384,7 +405,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -399,7 +421,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -416,7 +439,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -431,7 +455,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -451,7 +476,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -472,7 +498,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -487,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -502,7 +530,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -519,7 +548,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,7 +564,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -554,7 +585,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -575,7 +607,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -590,7 +623,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -605,7 +639,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -622,7 +657,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -637,7 +673,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +694,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -678,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -693,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -708,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -725,7 +766,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -740,7 +782,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -760,7 +803,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -781,7 +825,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -796,7 +841,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -811,7 +857,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -828,7 +875,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -843,7 +891,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -863,7 +912,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -884,7 +934,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -899,7 +950,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -914,7 +966,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -931,7 +984,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -946,7 +1000,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -966,7 +1021,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -987,7 +1043,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1002,7 +1059,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1017,7 +1075,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1034,7 +1093,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1049,7 +1109,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1069,7 +1130,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1090,7 +1152,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1105,7 +1168,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1120,7 +1184,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1137,7 +1202,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1152,7 +1218,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1172,7 +1239,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1187,7 +1255,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1259,7 +1328,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1276,7 +1346,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1300,7 +1371,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1321,7 +1393,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1336,7 +1409,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1351,7 +1425,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1368,7 +1443,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1383,7 +1459,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1403,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1418,7 +1496,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1491,7 +1570,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,7 +1588,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1532,7 +1613,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1547,7 +1629,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1644,7 +1727,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1661,7 +1745,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1685,7 +1770,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1700,7 +1786,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1796,7 +1883,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1813,7 +1901,33 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1830,12 +1944,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { @@ -1843,14 +1958,14 @@ "customProperties": { "procedure_depends_on": "{}", "depending_on_procedure": "{}", - "code": "CREATE PROCEDURE Foo.DBs @ID INT\nAS\n SELECT @ID AS ThatDB;\n", + "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-03-10 16:27:54.907000", - "date_modified": "2023-03-10 16:27:54.907000" + "date_created": "2023-10-27 10:11:55.460000", + "date_modified": "2023-10-27 10:11:55.460000" }, "externalUrl": "", - "name": "demodata.Foo.DBs", + "name": "demodata.Foo.Proc.With.SpecialChar", "type": { "string": "MSSQL_STORED_PROCEDURE" } @@ -1858,12 +1973,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -1875,31 +1991,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", - "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" - }, - { - "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", - "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1920,7 +2013,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1935,7 +2029,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1950,7 +2045,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1967,7 +2063,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1982,7 +2079,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2002,7 +2100,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2023,7 +2122,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2038,7 +2138,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2053,7 +2154,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2070,7 +2172,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2085,7 +2188,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2105,7 +2209,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2126,7 +2231,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2141,7 +2247,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2156,7 +2263,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2173,7 +2281,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2188,27 +2297,34 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:3f157d8292fb473142f19e2250af537f", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2218,12 +2334,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2233,12 +2350,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2248,27 +2366,24 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "container", - "entityUrn": "urn:li:container:3f157d8292fb473142f19e2250af537f", + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", - "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" - } - ] + "removed": false } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json index f3714bba6364d..804a8d74d0d51 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,24 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -80,7 +100,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -91,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1df94c0f-15fd-4b68-8ca3-6053a0332362", + "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-03-10 16:27:54.970000", - "date_modified": "2023-03-10 16:27:55.097000", + "date_created": "2023-10-27 10:11:55.540000", + "date_modified": "2023-10-27 10:11:55.667000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -110,7 +131,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -127,22 +149,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [] - } - }, - "systemMetadata": { - "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -163,7 +171,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -178,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -193,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -210,7 +221,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -225,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -245,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -266,7 +280,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -281,7 +296,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -296,7 +312,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -313,7 +330,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -328,7 +346,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -348,7 +367,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -369,7 +389,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -384,7 +405,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -399,7 +421,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -416,7 +439,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -431,7 +455,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -451,7 +476,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -472,7 +498,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -487,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -502,7 +530,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -519,7 +548,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,7 +564,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -554,7 +585,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -575,7 +607,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -590,7 +623,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -605,7 +639,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -622,7 +657,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -637,7 +673,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +694,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -678,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -693,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -708,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -725,7 +766,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -740,7 +782,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -760,7 +803,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -781,7 +825,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -796,7 +841,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -811,7 +857,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -828,7 +875,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -843,7 +891,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -863,7 +912,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -884,7 +934,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -899,7 +950,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -914,7 +966,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -931,7 +984,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -946,7 +1000,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -966,7 +1021,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -987,7 +1043,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1002,7 +1059,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1017,7 +1075,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1034,7 +1093,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1049,7 +1109,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1069,7 +1130,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1090,7 +1152,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1105,7 +1168,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1120,7 +1184,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1137,7 +1202,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1152,7 +1218,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1172,7 +1239,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1187,7 +1255,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1259,7 +1328,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1276,7 +1346,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1300,7 +1371,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1321,7 +1393,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1336,7 +1409,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1351,7 +1425,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1368,7 +1443,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1383,7 +1459,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1403,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1418,7 +1496,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1491,7 +1570,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,7 +1588,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1532,7 +1613,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1547,7 +1629,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1644,7 +1727,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1661,7 +1745,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1685,7 +1770,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1700,7 +1786,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1796,7 +1883,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1813,7 +1901,33 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1830,12 +1944,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { @@ -1843,14 +1958,14 @@ "customProperties": { "procedure_depends_on": "{}", "depending_on_procedure": "{}", - "code": "CREATE PROCEDURE Foo.DBs @ID INT\nAS\n SELECT @ID AS ThatDB;\n", + "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-03-10 16:27:54.907000", - "date_modified": "2023-03-10 16:27:54.907000" + "date_created": "2023-10-27 10:11:55.460000", + "date_modified": "2023-10-27 10:11:55.460000" }, "externalUrl": "", - "name": "demodata.Foo.DBs", + "name": "demodata.Foo.Proc.With.SpecialChar", "type": { "string": "MSSQL_STORED_PROCEDURE" } @@ -1858,12 +1973,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -1875,31 +1991,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", - "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" - }, - { - "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", - "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1920,7 +2013,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1935,7 +2029,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1950,7 +2045,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1967,7 +2063,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1982,7 +2079,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2002,7 +2100,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2023,7 +2122,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2038,7 +2138,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2053,7 +2154,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2070,7 +2172,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2085,7 +2188,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2105,7 +2209,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2126,7 +2231,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2141,7 +2247,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2156,7 +2263,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2173,7 +2281,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2188,27 +2297,34 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:3f157d8292fb473142f19e2250af537f", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2218,12 +2334,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2233,12 +2350,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2248,27 +2366,24 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { - "entityType": "container", - "entityUrn": "urn:li:container:3f157d8292fb473142f19e2250af537f", + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", - "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" - } - ] + "removed": false } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json index d25d23daae2ea..9d1b288057a16 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -95,7 +100,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -106,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "b6a0c1e2-f90a-4c86-a226-bf7ca59ad79f", + "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-08-06 21:01:05.157000", - "date_modified": "2023-08-06 21:01:05.283000", + "date_created": "2023-10-27 10:11:55.540000", + "date_modified": "2023-10-27 10:11:55.667000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -125,7 +131,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -142,7 +149,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -163,7 +171,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -178,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -193,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -210,7 +221,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -225,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -245,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -266,7 +280,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -281,7 +296,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -296,7 +312,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -313,7 +330,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -328,7 +346,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -348,7 +367,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -369,7 +389,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -384,7 +405,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -399,7 +421,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -416,7 +439,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -431,7 +455,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -451,7 +476,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -472,7 +498,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -487,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -502,7 +530,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -519,7 +548,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,7 +564,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -554,7 +585,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -575,7 +607,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -590,7 +623,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -605,7 +639,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -622,7 +657,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -637,7 +673,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +694,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -678,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -693,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -708,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -725,7 +766,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -740,7 +782,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -760,7 +803,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -781,7 +825,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -796,7 +841,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -811,7 +857,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -828,7 +875,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -843,7 +891,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -863,7 +912,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -884,7 +934,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -899,7 +950,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -914,7 +966,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -931,7 +984,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -946,7 +1000,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -966,7 +1021,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -987,7 +1043,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1002,7 +1059,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1017,7 +1075,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1034,7 +1093,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1049,7 +1109,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1069,7 +1130,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1090,7 +1152,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1105,7 +1168,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1120,7 +1184,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1137,7 +1202,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1152,7 +1218,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1172,7 +1239,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1187,7 +1255,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1259,7 +1328,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1276,7 +1346,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1300,7 +1371,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1321,7 +1393,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1336,7 +1409,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1351,7 +1425,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1368,7 +1443,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1383,7 +1459,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1403,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1418,7 +1496,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1491,7 +1570,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,7 +1588,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1532,7 +1613,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1547,7 +1629,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1644,7 +1727,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1661,7 +1745,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1685,7 +1770,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1700,7 +1786,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1796,7 +1883,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1813,7 +1901,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1837,7 +1926,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1854,12 +1944,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { @@ -1867,14 +1958,14 @@ "customProperties": { "procedure_depends_on": "{}", "depending_on_procedure": "{}", - "code": "CREATE PROCEDURE Foo.DBs @ID INT\nAS\n SELECT @ID AS ThatDB;\n", + "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-08-06 21:01:05.093000", - "date_modified": "2023-08-06 21:01:05.093000" + "date_created": "2023-10-27 10:11:55.460000", + "date_modified": "2023-10-27 10:11:55.460000" }, "externalUrl": "", - "name": "demodata.Foo.DBs", + "name": "demodata.Foo.Proc.With.SpecialChar", "type": { "string": "MSSQL_STORED_PROCEDURE" } @@ -1882,12 +1973,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -1899,7 +1991,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1920,7 +2013,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1935,7 +2029,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1950,7 +2045,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1967,7 +2063,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1982,7 +2079,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2002,7 +2100,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2023,7 +2122,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2038,7 +2138,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2053,7 +2154,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2070,7 +2172,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2085,7 +2188,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2105,7 +2209,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2126,7 +2231,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2141,7 +2247,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2156,7 +2263,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2173,7 +2281,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2188,7 +2297,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2208,7 +2318,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2223,7 +2334,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2238,7 +2350,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2253,12 +2366,13 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),DBs)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2268,7 +2382,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mssql-test" + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/sql_server/setup/setup.sql b/metadata-ingestion/tests/integration/sql_server/setup/setup.sql index c1347a7c8caca..a17d52f9a39b1 100644 --- a/metadata-ingestion/tests/integration/sql_server/setup/setup.sql +++ b/metadata-ingestion/tests/integration/sql_server/setup/setup.sql @@ -45,7 +45,7 @@ CREATE TABLE Foo.SalesReason ) ; GO -CREATE PROCEDURE Foo.DBs @ID INT +CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT AS SELECT @ID AS ThatDB; GO From 8b1d2094aa768d1f795a2d240de888e65c26f6fc Mon Sep 17 00:00:00 2001 From: richenc <125420929+richenc@users.noreply.github.com> Date: Fri, 27 Oct 2023 10:36:47 -0700 Subject: [PATCH 012/792] feat(airflow): retry callback, support ExternalTaskSensor subclasses (#8514) Co-authored-by: Richie Chen Co-authored-by: Harshal Sheth --- .../client/airflow_generator.py | 7 +++- .../datahub_plugin_v22.py | 36 ++++++++++++++++++- .../integration/goldens/v1_basic_iolets.json | 7 +++- .../integration/goldens/v1_simple_dag.json | 14 ++++++-- .../integration/goldens/v2_basic_iolets.json | 7 +++- .../v2_basic_iolets_no_dag_listener.json | 7 +++- .../integration/goldens/v2_simple_dag.json | 12 +++++-- .../v2_simple_dag_no_dag_listener.json | 14 ++++++-- .../goldens/v2_snowflake_operator.json | 7 +++- .../goldens/v2_sqlite_operator.json | 27 +++++++++++--- .../v2_sqlite_operator_no_dag_listener.json | 35 +++++++++++++++--- 11 files changed, 151 insertions(+), 22 deletions(-) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py index 16585f70e820b..e1d53be7bae6b 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py @@ -98,7 +98,7 @@ def _get_dependencies( # It is possible to tie an external sensor to DAG if external_task_id is omitted but currently we can't tie # jobflow to anothet jobflow. external_task_upstreams = [] - if task.task_type == "ExternalTaskSensor": + if isinstance(task, ExternalTaskSensor): task = cast(ExternalTaskSensor, task) if hasattr(task, "external_task_id") and task.external_task_id is not None: external_task_upstreams = [ @@ -155,6 +155,8 @@ def generate_dataflow( "_concurrency", # "_default_view", "catchup", + "description", + "doc_md", "fileloc", "is_paused_upon_creation", "start_date", @@ -431,6 +433,9 @@ def run_datajob( job_property_bag["operator"] = str(ti.operator) job_property_bag["priority_weight"] = str(ti.priority_weight) job_property_bag["log_url"] = ti.log_url + job_property_bag["orchestrator"] = "airflow" + job_property_bag["dag_id"] = str(dag.dag_id) + job_property_bag["task_id"] = str(ti.task_id) dpi.properties.update(job_property_bag) dpi.url = ti.log_url diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py index 046fbb5efaa03..f9a2119f51e32 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py @@ -23,6 +23,7 @@ TASK_ON_FAILURE_CALLBACK = "on_failure_callback" TASK_ON_SUCCESS_CALLBACK = "on_success_callback" +TASK_ON_RETRY_CALLBACK = "on_retry_callback" def get_task_inlets_advanced(task: BaseOperator, context: Any) -> Iterable[Any]: @@ -259,6 +260,28 @@ def custom_on_success_callback(context): return custom_on_success_callback +def _wrap_on_retry_callback(on_retry_callback): + def custom_on_retry_callback(context): + config = get_lineage_config() + if config.enabled: + context["_datahub_config"] = config + try: + datahub_task_status_callback( + context, status=InstanceRunResult.UP_FOR_RETRY + ) + except Exception as e: + if not config.graceful_exceptions: + raise e + else: + print(f"Exception: {traceback.format_exc()}") + + # Call original policy + if on_retry_callback: + on_retry_callback(context) + + return custom_on_retry_callback + + def task_policy(task: Union[BaseOperator, MappedOperator]) -> None: task.log.debug(f"Setting task policy for Dag: {task.dag_id} Task: {task.task_id}") # task.add_inlets(["auto"]) @@ -274,7 +297,14 @@ def task_policy(task: Union[BaseOperator, MappedOperator]) -> None: on_success_callback_prop: property = getattr( MappedOperator, TASK_ON_SUCCESS_CALLBACK ) - if not on_failure_callback_prop.fset or not on_success_callback_prop.fset: + on_retry_callback_prop: property = getattr( + MappedOperator, TASK_ON_RETRY_CALLBACK + ) + if ( + not on_failure_callback_prop.fset + or not on_success_callback_prop.fset + or not on_retry_callback_prop.fset + ): task.log.debug( "Using MappedOperator's partial_kwargs instead of callback properties" ) @@ -284,10 +314,14 @@ def task_policy(task: Union[BaseOperator, MappedOperator]) -> None: task.partial_kwargs[TASK_ON_SUCCESS_CALLBACK] = _wrap_on_success_callback( task.on_success_callback ) + task.partial_kwargs[TASK_ON_RETRY_CALLBACK] = _wrap_on_retry_callback( + task.on_retry_callback + ) return task.on_failure_callback = _wrap_on_failure_callback(task.on_failure_callback) # type: ignore task.on_success_callback = _wrap_on_success_callback(task.on_success_callback) # type: ignore + task.on_retry_callback = _wrap_on_retry_callback(task.on_retry_callback) # type: ignore # task.pre_execute = _wrap_pre_execution(task.pre_execute) diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json index 26aa2afaa831a..a4c17c73e9c7e 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -373,7 +375,10 @@ "state": "success", "operator": "BashOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets", + "orchestrator": "airflow", + "dag_id": "basic_iolets", + "task_id": "run_data_task" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets", "name": "basic_iolets_run_data_task_manual_run_test", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json index b2e3a1fe47da7..a0a95716a0993 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "'A simple DAG that runs a few fake data tasks.'", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -302,7 +304,10 @@ "state": "success", "operator": "BashOperator", "priority_weight": "2", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag", + "orchestrator": "airflow", + "dag_id": "simple_dag", + "task_id": "task_1" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag", "name": "simple_dag_task_1_manual_run_test", @@ -433,6 +438,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "'A simple DAG that runs a few fake data tasks.'", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -654,7 +661,10 @@ "state": "success", "operator": "BashOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag", + "orchestrator": "airflow", + "dag_id": "simple_dag", + "task_id": "run_another_data_task" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag", "name": "simple_dag_run_another_data_task_manual_run_test", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json index 2e733c2ad40a9..1974f1f085df0 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -224,7 +226,10 @@ "state": "running", "operator": "BashOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets&map_index=-1", + "orchestrator": "airflow", + "dag_id": "basic_iolets", + "task_id": "run_data_task" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets&map_index=-1", "name": "basic_iolets_run_data_task_manual_run_test", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json index 44b288efda954..d02951bc9e82d 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -224,7 +226,10 @@ "state": "running", "operator": "BashOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets&map_index=-1", + "orchestrator": "airflow", + "dag_id": "basic_iolets", + "task_id": "run_data_task" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_data_task&dag_id=basic_iolets&map_index=-1", "name": "basic_iolets_run_data_task_manual_run_test", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json index 454c509279e11..9acc47ec1321e 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "'A simple DAG that runs a few fake data tasks.'", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -189,7 +191,10 @@ "state": "running", "operator": "BashOperator", "priority_weight": "2", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag&map_index=-1", + "orchestrator": "airflow", + "dag_id": "simple_dag", + "task_id": "task_1" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag&map_index=-1", "name": "simple_dag_task_1_manual_run_test", @@ -523,7 +528,10 @@ "state": "running", "operator": "BashOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag&map_index=-1", + "orchestrator": "airflow", + "dag_id": "simple_dag", + "task_id": "run_another_data_task" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag&map_index=-1", "name": "simple_dag_run_another_data_task_manual_run_test", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json index 73b5765e96b7d..03299c483f57f 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "'A simple DAG that runs a few fake data tasks.'", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -189,7 +191,10 @@ "state": "running", "operator": "BashOperator", "priority_weight": "2", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag&map_index=-1", + "orchestrator": "airflow", + "dag_id": "simple_dag", + "task_id": "task_1" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=task_1&dag_id=simple_dag&map_index=-1", "name": "simple_dag_task_1_manual_run_test", @@ -435,6 +440,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "'A simple DAG that runs a few fake data tasks.'", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -579,7 +586,10 @@ "state": "running", "operator": "BashOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag&map_index=-1", + "orchestrator": "airflow", + "dag_id": "simple_dag", + "task_id": "run_another_data_task" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=run_another_data_task&dag_id=simple_dag&map_index=-1", "name": "simple_dag_run_another_data_task_manual_run_test", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json index affc395d421da..11a0b17b45b95 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/snowflake_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -234,7 +236,10 @@ "state": "running", "operator": "SnowflakeOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=snowflake_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=snowflake_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "snowflake_operator", + "task_id": "transform_cost_table" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=snowflake_operator&map_index=-1", "name": "snowflake_operator_transform_cost_table_manual_run_test", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json index 81d0a71b651d9..19e4aac9fb95e 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -201,7 +203,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "5", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=create_cost_table&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=create_cost_table&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "create_cost_table" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=create_cost_table&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_create_cost_table_manual_run_test", @@ -562,7 +567,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "4", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=populate_cost_table&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=populate_cost_table&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "populate_cost_table" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=populate_cost_table&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_populate_cost_table_manual_run_test", @@ -922,7 +930,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "3", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "transform_cost_table" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_transform_cost_table_manual_run_test", @@ -1364,7 +1375,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_costs&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_costs&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "cleanup_costs" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_costs&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_cleanup_costs_manual_run_test", @@ -1658,7 +1672,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_processed_costs&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_processed_costs&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "cleanup_processed_costs" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_processed_costs&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json index 96a0f02ccec17..b67464b385335 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json @@ -9,6 +9,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -201,7 +203,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "5", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=create_cost_table&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=create_cost_table&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "create_cost_table" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=create_cost_table&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_create_cost_table_manual_run_test", @@ -460,6 +465,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -617,7 +624,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "4", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=populate_cost_table&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=populate_cost_table&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "populate_cost_table" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=populate_cost_table&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_populate_cost_table_manual_run_test", @@ -805,6 +815,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -1032,7 +1044,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "3", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "transform_cost_table" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=transform_cost_table&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_transform_cost_table_manual_run_test", @@ -1370,6 +1385,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -1529,7 +1546,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_costs&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_costs&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "cleanup_costs" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_costs&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_cleanup_costs_manual_run_test", @@ -1719,6 +1739,8 @@ "customProperties": { "_access_control": "None", "catchup": "False", + "description": "None", + "doc_md": "None", "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", @@ -1878,7 +1900,10 @@ "state": "running", "operator": "SqliteOperator", "priority_weight": "1", - "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_processed_costs&dag_id=sqlite_operator&map_index=-1" + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_processed_costs&dag_id=sqlite_operator&map_index=-1", + "orchestrator": "airflow", + "dag_id": "sqlite_operator", + "task_id": "cleanup_processed_costs" }, "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=cleanup_processed_costs&dag_id=sqlite_operator&map_index=-1", "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", From 649f6d031789252fb9ac97d932fd71396f4875f2 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Sat, 28 Oct 2023 04:02:43 +0900 Subject: [PATCH 013/792] docs: fix saasonly flags for some pages (#9124) --- docs-website/sidebars.js | 29 +++++++++++++++++++----- docs/managed-datahub/chrome-extension.md | 2 -- 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 31d69aec46d8b..39eaea57444ed 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -442,11 +442,29 @@ module.exports = { }, "docs/act-on-metadata/impact-analysis", { - Observability: [ - "docs/managed-datahub/observe/freshness-assertions", - "docs/managed-datahub/observe/volume-assertions", - "docs/managed-datahub/observe/custom-sql-assertions", - "docs/managed-datahub/observe/column-assertions", + label: "Observability", + type: "category", + items: [ + { + type: "doc", + id: "docs/managed-datahub/observe/freshness-assertions", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/observe/volume-assertions", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/observe/custom-sql-assertions", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/observe/column-assertions", + className: "saasOnly", + }, ], }, { @@ -606,7 +624,6 @@ module.exports = { { type: "doc", id: "docs/managed-datahub/chrome-extension", - className: "saasOnly", }, { "Managed DataHub Release History": [ diff --git a/docs/managed-datahub/chrome-extension.md b/docs/managed-datahub/chrome-extension.md index 0aa0860d03b67..a4560bc8cc09b 100644 --- a/docs/managed-datahub/chrome-extension.md +++ b/docs/managed-datahub/chrome-extension.md @@ -1,10 +1,8 @@ --- description: Learn how to upload and use the Acryl DataHub Chrome extension (beta) locally before it's available on the Chrome store. --- -import FeatureAvailability from '@site/src/components/FeatureAvailability'; # Acryl DataHub Chrome Extension - ## Installing the Extension From e02b9096bd68c14944e640dbd3a235651ecebbaf Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Sat, 28 Oct 2023 00:33:43 +0530 Subject: [PATCH 014/792] fix(ingest/snowflake): missing view downstream cll if platform instance is set (#8966) --- .../source/snowflake/snowflake_lineage_v2.py | 75 +- .../tests/integration/snowflake/common.py | 2 +- .../snowflake/snowflake_golden.json | 706 ++++-- .../snowflake_privatelink_golden.json | 2075 ++++++++++++----- .../integration/snowflake/test_snowflake.py | 5 +- 5 files changed, 2055 insertions(+), 808 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_lineage_v2.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_lineage_v2.py index 9649054dbe6cb..4219533dc217c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_lineage_v2.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_lineage_v2.py @@ -20,12 +20,12 @@ import datahub.emitter.mce_builder as builder from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.emitter.sql_parsing_builder import SqlParsingBuilder from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.aws.s3_util import make_s3_urn_for_lineage from datahub.ingestion.source.snowflake.constants import ( LINEAGE_PERMISSION_ERROR, SnowflakeEdition, - SnowflakeObjectDomain, ) from datahub.ingestion.source.snowflake.snowflake_config import SnowflakeV2Config from datahub.ingestion.source.snowflake.snowflake_query import SnowflakeQuery @@ -53,7 +53,6 @@ sqlglot_lineage, ) from datahub.utilities.time import ts_millis_to_datetime -from datahub.utilities.urns.dataset_urn import DatasetUrn logger: logging.Logger = logging.getLogger(__name__) @@ -195,20 +194,6 @@ def get_table_upstream_workunits( f"Upstream lineage detected for {self.report.num_tables_with_upstreams} tables.", ) - def _gen_workunit_from_sql_parsing_result( - self, - dataset_identifier: str, - result: SqlParsingResult, - ) -> Iterable[MetadataWorkUnit]: - upstreams, fine_upstreams = self.get_upstreams_from_sql_parsing_result( - self.dataset_urn_builder(dataset_identifier), result - ) - if upstreams: - self.report.num_views_with_upstreams += 1 - yield self._create_upstream_lineage_workunit( - dataset_identifier, upstreams, fine_upstreams - ) - def _gen_workunits_from_query_result( self, discovered_assets: Collection[str], @@ -242,18 +227,31 @@ def get_view_upstream_workunits( schema_resolver: SchemaResolver, view_definitions: MutableMapping[str, str], ) -> Iterable[MetadataWorkUnit]: - views_processed = set() + views_failed_parsing = set() if self.config.include_view_column_lineage: with PerfTimer() as timer: + builder = SqlParsingBuilder( + generate_lineage=True, + generate_usage_statistics=False, + generate_operations=False, + ) for view_identifier, view_definition in view_definitions.items(): result = self._run_sql_parser( view_identifier, view_definition, schema_resolver ) - if result: - views_processed.add(view_identifier) - yield from self._gen_workunit_from_sql_parsing_result( - view_identifier, result + if result and result.out_tables: + self.report.num_views_with_upstreams += 1 + # This does not yield any workunits but we use + # yield here to execute this method + yield from builder.process_sql_parsing_result( + result=result, + query=view_definition, + is_view_ddl=True, ) + else: + views_failed_parsing.add(view_identifier) + + yield from builder.gen_workunits() self.report.view_lineage_parse_secs = timer.elapsed_seconds() with PerfTimer() as timer: @@ -261,7 +259,7 @@ def get_view_upstream_workunits( if results: yield from self._gen_workunits_from_query_result( - set(discovered_views) - views_processed, + views_failed_parsing, results, upstream_for_view=True, ) @@ -349,39 +347,6 @@ def get_upstreams_from_query_result_row( return upstreams, fine_upstreams - def get_upstreams_from_sql_parsing_result( - self, downstream_table_urn: str, result: SqlParsingResult - ) -> Tuple[List[UpstreamClass], List[FineGrainedLineage]]: - # Note: This ignores the out_tables section of the sql parsing result. - upstreams = [ - UpstreamClass(dataset=upstream_table_urn, type=DatasetLineageTypeClass.VIEW) - for upstream_table_urn in set(result.in_tables) - ] - - # Maps downstream_col -> [upstream_col] - fine_lineage: Dict[str, Set[SnowflakeColumnId]] = defaultdict(set) - for column_lineage in result.column_lineage or []: - out_column = column_lineage.downstream.column - for upstream_column_info in column_lineage.upstreams: - upstream_table_name = DatasetUrn.create_from_string( - upstream_column_info.table - ).get_dataset_name() - fine_lineage[out_column].add( - SnowflakeColumnId( - columnName=upstream_column_info.column, - objectName=upstream_table_name, - objectDomain=SnowflakeObjectDomain.VIEW.value, - ) - ) - fine_upstreams = [ - self.build_finegrained_lineage( - downstream_table_urn, downstream_col, upstream_cols - ) - for downstream_col, upstream_cols in fine_lineage.items() - ] - - return upstreams, list(filter(None, fine_upstreams)) - def _populate_external_lineage_map(self, discovered_tables: List[str]) -> None: with PerfTimer() as timer: self.report.num_external_table_edges_scanned = 0 diff --git a/metadata-ingestion/tests/integration/snowflake/common.py b/metadata-ingestion/tests/integration/snowflake/common.py index 81e307a78ae9e..ff448eca01071 100644 --- a/metadata-ingestion/tests/integration/snowflake/common.py +++ b/metadata-ingestion/tests/integration/snowflake/common.py @@ -94,7 +94,7 @@ def default_query_results( # noqa: C901 "name": "VIEW_{}".format(view_idx), "created_on": datetime(2021, 6, 8, 0, 0, 0, 0), "comment": "Comment for View", - "text": None, + "text": f"create view view_{view_idx} as select * from table_{view_idx}", } for view_idx in range(1, num_views + 1) ] diff --git a/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json b/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json index a424b258e68ff..c7273fee5a2e5 100644 --- a/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json +++ b/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json @@ -24,7 +24,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -39,7 +40,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -54,7 +56,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -71,7 +74,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -90,7 +94,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -105,7 +110,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -121,7 +127,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -150,7 +157,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -165,7 +173,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -180,7 +189,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -197,7 +207,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -216,7 +227,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -231,7 +243,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -251,7 +264,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -266,7 +280,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -462,7 +477,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -488,7 +504,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -503,7 +520,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -520,7 +538,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -544,7 +563,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -559,7 +579,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -755,7 +776,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -781,7 +803,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -796,7 +819,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -813,7 +837,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -837,7 +862,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -852,7 +878,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1048,7 +1075,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1074,7 +1102,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -1089,7 +1118,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1106,7 +1136,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1130,7 +1161,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1145,7 +1177,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1341,7 +1374,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1367,7 +1401,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -1382,7 +1417,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1399,7 +1435,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1423,7 +1460,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1438,7 +1476,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1634,7 +1673,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1660,7 +1700,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -1675,7 +1716,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1692,7 +1734,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1716,7 +1759,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1731,7 +1775,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1927,7 +1972,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1953,7 +1999,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -1968,7 +2015,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1985,7 +2033,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2009,7 +2058,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2024,7 +2074,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2220,7 +2271,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2246,7 +2298,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -2261,7 +2314,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2278,7 +2332,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2302,7 +2357,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2317,7 +2373,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2513,7 +2570,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2539,7 +2597,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -2554,7 +2613,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2571,7 +2631,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2595,7 +2656,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2610,7 +2672,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2806,7 +2869,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2832,7 +2896,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -2847,7 +2912,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2864,7 +2930,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2888,7 +2955,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2903,7 +2971,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3099,7 +3168,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3125,7 +3195,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -3140,7 +3211,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3157,7 +3229,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3181,7 +3254,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3197,7 +3271,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3212,7 +3287,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3382,7 +3458,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3408,7 +3485,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -3423,7 +3501,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3440,7 +3519,26 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "create view view_1 as select * from table_1", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "snowflake-2023_10_06-17_59_03", + "lastRunId": "no-run-id-provided" } }, { @@ -3464,7 +3562,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3480,7 +3579,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3496,7 +3596,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3512,7 +3613,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3527,7 +3629,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3690,7 +3793,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3716,7 +3820,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28" + "runId": "snowflake-2023_08_04-09_52_28", + "lastRunId": "no-run-id-provided" } }, { @@ -3731,7 +3836,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3748,7 +3854,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3773,7 +3880,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3797,7 +3905,26 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "create view view_2 as select * from table_2", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "snowflake-2023_10_06-17_59_03", + "lastRunId": "no-run-id-provided" } }, { @@ -3819,7 +3946,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3841,7 +3969,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3863,7 +3992,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3885,7 +4015,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3907,7 +4038,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3929,7 +4061,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3951,7 +4084,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3973,7 +4107,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3995,7 +4130,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -4017,7 +4153,145 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_1)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_1)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_10)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_10)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_2)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_2)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_3)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_3)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_4)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_4)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_5)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_5)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_6)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_6)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_7)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_7)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_8)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_8)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD),col_9)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_1,PROD),col_9)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "snowflake-2023_10_06-17_59_03", + "lastRunId": "no-run-id-provided" } }, { @@ -4034,14 +4308,127 @@ "actor": "urn:li:corpuser:unknown" }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_1)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_1)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_10)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_10)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_2)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_2)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_3)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_3)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_4)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_4)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_5)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_5)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_6)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_6)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_7)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_7)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_8)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_8)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD),col_9)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.view_2,PROD),col_9)" + ], + "confidenceScore": 1.0 } ] } }, "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "lastObserved": 1615443388097, + "runId": "snowflake-2023_10_06-17_59_03", + "lastRunId": "no-run-id-provided" } }, { @@ -4204,7 +4591,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -4340,7 +4728,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -4476,7 +4865,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -4612,7 +5002,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -4748,7 +5139,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -4884,7 +5276,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5020,7 +5413,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5156,7 +5550,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5292,7 +5687,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5428,7 +5824,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5456,7 +5853,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5484,7 +5882,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5512,7 +5911,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5540,7 +5940,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5568,7 +5969,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5596,7 +5998,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5624,7 +6027,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5652,7 +6056,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5680,7 +6085,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5708,7 +6114,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5736,7 +6143,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5764,7 +6172,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5786,7 +6195,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5808,7 +6218,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5830,7 +6241,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5852,7 +6264,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5874,7 +6287,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5896,7 +6310,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5918,7 +6333,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5940,7 +6356,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5962,7 +6379,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5984,7 +6402,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -5999,7 +6418,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -6014,7 +6434,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -6029,7 +6450,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -6044,7 +6466,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -6059,7 +6482,8 @@ }, "systemMetadata": { "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00" + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/snowflake/snowflake_privatelink_golden.json b/metadata-ingestion/tests/integration/snowflake/snowflake_privatelink_golden.json index 5057dacd5b0c8..5e55860483d24 100644 --- a/metadata-ingestion/tests/integration/snowflake/snowflake_privatelink_golden.json +++ b/metadata-ingestion/tests/integration/snowflake/snowflake_privatelink_golden.json @@ -1,13 +1,14 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", + "entityUrn": "urn:li:container:900b1327253068cb1537b1b3c807ddab", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { "json": { "customProperties": { "platform": "snowflake", + "instance": "instance1", "env": "PROD", "database": "test_db" }, @@ -29,13 +30,14 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_3,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "removed": false + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" } }, "systemMetadata": { @@ -46,12 +48,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", + "entityUrn": "urn:li:container:900b1327253068cb1537b1b3c807ddab", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "status", "aspect": { "json": { - "platform": "urn:li:dataPlatform:snowflake" + "removed": false } }, "systemMetadata": { @@ -61,15 +63,13 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_3,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Database" - ] + "removed": false } }, "systemMetadata": { @@ -80,12 +80,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", + "entityUrn": "urn:li:container:900b1327253068cb1537b1b3c807ddab", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "subTypes", "aspect": { "json": { - "path": [] + "typeNames": [ + "Database" + ] } }, "systemMetadata": { @@ -96,26 +98,17 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", + "entityUrn": "urn:li:container:900b1327253068cb1537b1b3c807ddab", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "browsePathsV2", "aspect": { "json": { - "customProperties": { - "platform": "snowflake", - "env": "PROD", - "database": "test_db", - "schema": "test_schema" - }, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/", - "name": "TEST_SCHEMA", - "description": "comment for TEST_DB.TEST_SCHEMA", - "created": { - "time": 1623110400000 - }, - "lastModified": { - "time": 1623110400000 - } + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + } + ] } }, "systemMetadata": { @@ -126,12 +119,13 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", + "entityUrn": "urn:li:container:900b1327253068cb1537b1b3c807ddab", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "removed": false + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" } }, "systemMetadata": { @@ -141,13 +135,24 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_3,PROD)", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "datasetProperties", "aspect": { "json": { - "platform": "urn:li:dataPlatform:snowflake" + "customProperties": {}, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_3/", + "name": "TABLE_3", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_3", + "description": "Comment for Table", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + }, + "tags": [] } }, "systemMetadata": { @@ -157,14 +162,14 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_3,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Schema" + "Table" ] } }, @@ -175,32 +180,24 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_3,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } ] } @@ -212,13 +209,28 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "containerProperties", "aspect": { "json": { - "removed": false + "customProperties": { + "platform": "snowflake", + "instance": "instance1", + "env": "PROD", + "database": "test_db", + "schema": "test_schema" + }, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/", + "name": "TEST_SCHEMA", + "description": "comment for TEST_DB.TEST_SCHEMA", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + } } }, "systemMetadata": { @@ -229,12 +241,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_3,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_1", + "schemaName": "test_db.test_schema.table_3", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -392,24 +404,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "status", "aspect": { "json": { - "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_1/", - "name": "TABLE_1", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_1", - "description": "Comment for Table", - "created": { - "time": 1623110400000 - }, - "lastModified": { - "time": 1623110400000 - }, - "tags": [] + "removed": false } }, "systemMetadata": { @@ -420,12 +421,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_3,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } }, "systemMetadata": { @@ -435,14 +436,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Table" + "Schema" ] } }, @@ -453,20 +454,53 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" }, { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" } ] } @@ -479,7 +513,85 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "create view view_1 as select * from table_1", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": {}, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_1/", + "name": "VIEW_1", + "qualifiedName": "TEST_DB.TEST_SCHEMA.VIEW_1", + "description": "Comment for View", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -495,12 +607,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_2", + "schemaName": "test_db.test_schema.table_1", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -659,23 +771,14 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_2/", - "name": "TABLE_2", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_2", - "description": "Comment for Table", - "created": { - "time": 1623110400000 - }, - "lastModified": { - "time": 1623110400000 - }, - "tags": [] + "typeNames": [ + "View" + ] } }, "systemMetadata": { @@ -686,12 +789,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } }, "systemMetadata": { @@ -702,39 +805,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "datasetProperties", "aspect": { "json": { - "typeNames": [ - "Table" - ] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" - }, - { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" - } - ] + "customProperties": {}, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_1/", + "name": "TABLE_1", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_1", + "description": "Comment for Table", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + }, + "tags": [] } }, "systemMetadata": { @@ -745,12 +832,25 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_3,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + ] } }, "systemMetadata": { @@ -761,12 +861,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_3,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_3", + "schemaName": "test_db.test_schema.view_1", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -925,23 +1025,43 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_3,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_3/", - "name": "TABLE_3", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_3", - "description": "Comment for Table", - "created": { - "time": 1623110400000 - }, - "lastModified": { - "time": 1623110400000 - }, - "tags": [] + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + ] } }, "systemMetadata": { @@ -952,12 +1072,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_3,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } }, "systemMetadata": { @@ -968,14 +1088,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_3,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "typeNames": [ - "Table" - ] + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" } }, "systemMetadata": { @@ -986,19 +1105,131 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_3,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "upstreamLineage", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" - }, + "upstreams": [ { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_1)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_1)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_10)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_10)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_2)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_2)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_3)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_3)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_4)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_4)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_5)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_5)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_6)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_6)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_7)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_7)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_8)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_8)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_1,PROD),col_9)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_1,PROD),col_9)" + ], + "confidenceScore": 1.0 } ] } @@ -1011,7 +1242,40 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_4,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_10,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_10,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -1027,12 +1291,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_4,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_4", + "schemaName": "test_db.test_schema.table_2", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -1191,15 +1455,15 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_4,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_10,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_4/", - "name": "TABLE_4", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_4", + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_10/", + "name": "TABLE_10", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_10", "description": "Comment for Table", "created": { "time": 1623110400000 @@ -1218,66 +1482,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_4,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_4,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Table" - ] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_4,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" - }, - { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_5,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_5,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -1293,7 +1498,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_5,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_5,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { @@ -1457,7 +1662,39 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_5,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_5,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_5,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -1484,12 +1721,41 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_5,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD)", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": {}, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_2/", + "name": "TABLE_2", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_2", + "description": "Comment for Table", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_10,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", "aspect": { "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "typeNames": [ + "Table" + ] } }, "systemMetadata": { @@ -1500,7 +1766,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_5,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_5,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1518,19 +1784,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_5,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_5,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" }, { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } ] } @@ -1543,12 +1813,25 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_6,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_10,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + ] } }, "systemMetadata": { @@ -1559,12 +1842,76 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_6,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_5,PROD)", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_6", + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_10,PROD)", + "changeType": "UPSERT", + "aspectName": "schemaMetadata", + "aspect": { + "json": { + "schemaName": "test_db.test_schema.table_10", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -1723,93 +2070,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_6,PROD)", - "changeType": "UPSERT", - "aspectName": "datasetProperties", - "aspect": { - "json": { - "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_6/", - "name": "TABLE_6", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_6", - "description": "Comment for Table", - "created": { - "time": 1623110400000 - }, - "lastModified": { - "time": 1623110400000 - }, - "tags": [] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_6,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_6,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Table" - ] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_6,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" - }, - { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_7,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_6,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -1825,12 +2086,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_7,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_6,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_7", + "schemaName": "test_db.test_schema.table_6", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -1989,15 +2250,48 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_7,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_6,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_6,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_7/", - "name": "TABLE_7", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_7", + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_6/", + "name": "TABLE_6", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_6", "description": "Comment for Table", "created": { "time": 1623110400000 @@ -2016,12 +2310,29 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_7,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_10,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_7,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" } }, "systemMetadata": { @@ -2032,7 +2343,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_7,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_6,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2050,19 +2361,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_7,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_6,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" }, { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } ] } @@ -2075,7 +2390,40 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_8,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_7,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_6,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_4,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2091,12 +2439,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_8,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_4,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_8", + "schemaName": "test_db.test_schema.table_4", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -2255,15 +2603,15 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_8,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_7,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_8/", - "name": "TABLE_8", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_8", + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_7/", + "name": "TABLE_7", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_7", "description": "Comment for Table", "created": { "time": 1623110400000 @@ -2282,12 +2630,39 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_8,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_4,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_4,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": {}, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_4/", + "name": "TABLE_4", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_4", + "description": "Comment for Table", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + }, + "tags": [] } }, "systemMetadata": { @@ -2298,7 +2673,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_8,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_7,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2316,19 +2691,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_8,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_7,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" }, { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } ] } @@ -2341,12 +2720,43 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_9,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_4,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "subTypes", "aspect": { "json": { - "removed": false + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_4,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + ] } }, "systemMetadata": { @@ -2357,12 +2767,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_9,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_7,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_9", + "schemaName": "test_db.test_schema.table_7", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -2521,93 +2931,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_9,PROD)", - "changeType": "UPSERT", - "aspectName": "datasetProperties", - "aspect": { - "json": { - "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_9/", - "name": "TABLE_9", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_9", - "description": "Comment for Table", - "created": { - "time": 1623110400000 - }, - "lastModified": { - "time": 1623110400000 - }, - "tags": [] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_9,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_9,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Table" - ] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_9,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" - }, - { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_10,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_8,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2623,12 +2947,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_10,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_8,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "test_db.test_schema.table_10", + "schemaName": "test_db.test_schema.table_8", "platform": "urn:li:dataPlatform:snowflake", "version": 0, "created": { @@ -2787,23 +3111,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_10,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_4,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "customProperties": {}, - "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_10/", - "name": "TABLE_10", - "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_10", - "description": "Comment for Table", - "created": { - "time": 1623110400000 - }, - "lastModified": { - "time": 1623110400000 - }, - "tags": [] + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" } }, "systemMetadata": { @@ -2814,12 +3128,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_10,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_8,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } }, "systemMetadata": { @@ -2830,14 +3144,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_10,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_8,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "datasetProperties", "aspect": { "json": { - "typeNames": [ - "Table" - ] + "customProperties": {}, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_8/", + "name": "TABLE_8", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_8", + "description": "Comment for Table", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + }, + "tags": [] } }, "systemMetadata": { @@ -2848,21 +3171,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_10,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_7,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "container", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:5e359958be02ce647cd9ac196dbd4585", - "urn": "urn:li:container:5e359958be02ce647cd9ac196dbd4585" - }, - { - "id": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c", - "urn": "urn:li:container:94c696a054bab40b73e640a7f82e3b1c" - } - ] + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } }, "systemMetadata": { @@ -2873,21 +3187,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_9,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" - } - ] + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" } }, "systemMetadata": { @@ -2898,20 +3204,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_8,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "subTypes", "aspect": { "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" - } + "typeNames": [ + "Table" ] } }, @@ -2923,19 +3222,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_3,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_8,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "browsePathsV2", "aspect": { "json": { - "upstreams": [ + "path": [ { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } ] } @@ -2948,21 +3251,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_4,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_9,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "status", "aspect": { "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" - } - ] + "removed": false } }, "systemMetadata": { @@ -2973,21 +3267,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_5,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_8,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" - } - ] + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" } }, "systemMetadata": { @@ -2998,21 +3284,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_6,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_9,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "datasetProperties", "aspect": { "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" - } - ] + "customProperties": {}, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_9/", + "name": "TABLE_9", + "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_9", + "description": "Comment for Table", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + }, + "tags": [] } }, "systemMetadata": { @@ -3023,20 +3311,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_7,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_9,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "subTypes", "aspect": { "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" - } + "typeNames": [ + "Table" ] } }, @@ -3048,19 +3329,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_8,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_9,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "browsePathsV2", "aspect": { "json": { - "upstreams": [ + "path": [ { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" } ] } @@ -3073,20 +3358,300 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_9,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_9,PROD)", "changeType": "UPSERT", - "aspectName": "upstreamLineage", + "aspectName": "schemaMetadata", "aspect": { "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" - } + "schemaName": "test_db.test_schema.table_9", + "platform": "urn:li:dataPlatform:snowflake", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "col_1", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "NUMBER(38,0)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_2", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_3", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_4", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_5", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_6", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_7", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_8", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_9", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_10", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_9,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "create view view_2 as select * from table_2", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": {}, + "externalUrl": "https://app.abc12345.ap-south-1.privatelink.snowflakecomputing.com/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_2/", + "name": "VIEW_2", + "qualifiedName": "TEST_DB.TEST_SCHEMA.VIEW_2", + "description": "Comment for View", + "created": { + "time": 1623110400000 + }, + "lastModified": { + "time": 1623110400000 + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:snowflake,instance1)" + }, + { + "id": "urn:li:container:900b1327253068cb1537b1b3c807ddab", + "urn": "urn:li:container:900b1327253068cb1537b1b3c807ddab" + }, + { + "id": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f", + "urn": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } ] } }, @@ -3098,7 +3663,187 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_10,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "schemaMetadata", + "aspect": { + "json": { + "schemaName": "test_db.test_schema.view_2", + "platform": "urn:li:dataPlatform:snowflake", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "col_1", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "NUMBER(38,0)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_2", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_3", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_4", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_5", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_6", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_7", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_8", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_9", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "col_10", + "nullable": false, + "description": "Comment for column", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR(255)", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:eac598ee71ef1b5e24448d650c08aa5f" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "snowflake-2022_06_07-17_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD)", "changeType": "UPSERT", "aspectName": "upstreamLineage", "aspect": { @@ -3109,8 +3854,120 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_db.test_schema.table_2,PROD)", - "type": "TRANSFORMED" + "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_1)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_1)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_10)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_10)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_2)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_2)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_3)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_3)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_4)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_4)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_5)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_5)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_6)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_6)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_7)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_7)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_8)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_8)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.table_2,PROD),col_9)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,instance1.test_db.test_schema.view_2,PROD),col_9)" + ], + "confidenceScore": 1.0 } ] } diff --git a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py index 3dafe85ef950a..4c00e48ede9fb 100644 --- a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py +++ b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py @@ -211,11 +211,12 @@ def test_snowflake_private_link(pytestconfig, tmp_path, mock_time, mock_datahub_ include_technical_schema=True, include_table_lineage=True, include_column_lineage=False, - include_views=False, - include_view_lineage=False, + include_views=True, + include_view_lineage=True, include_usage_stats=False, incremental_lineage=False, include_operational_stats=False, + platform_instance="instance1", start_time=datetime(2022, 6, 6, 0, 0, 0, 0).replace( tzinfo=timezone.utc ), From 4d2c009d400406b3cc41767864b07e9933dfe841 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Sat, 28 Oct 2023 01:32:11 +0530 Subject: [PATCH 015/792] feat: Add flag to hide/display the autocomplete query for search bar (#9104) Co-authored-by: John Joyce --- datahub-web-react/src/app/home/HomePageHeader.tsx | 1 + datahub-web-react/src/app/search/SearchBar.tsx | 6 ++++-- datahub-web-react/src/app/search/SearchHeader.tsx | 1 + 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/home/HomePageHeader.tsx b/datahub-web-react/src/app/home/HomePageHeader.tsx index 5919d2dbf5b7e..e5c01252a865b 100644 --- a/datahub-web-react/src/app/home/HomePageHeader.tsx +++ b/datahub-web-react/src/app/home/HomePageHeader.tsx @@ -275,6 +275,7 @@ export const HomePageHeader = () => { viewsEnabled={viewsEnabled} combineSiblings showQuickFilters + showViewAllResults /> {searchResultsToShow && searchResultsToShow.length > 0 && ( diff --git a/datahub-web-react/src/app/search/SearchBar.tsx b/datahub-web-react/src/app/search/SearchBar.tsx index b4699994bc460..5f797e68fe0e8 100644 --- a/datahub-web-react/src/app/search/SearchBar.tsx +++ b/datahub-web-react/src/app/search/SearchBar.tsx @@ -119,6 +119,7 @@ interface Props { setIsSearchBarFocused?: (isSearchBarFocused: boolean) => void; onFocus?: () => void; onBlur?: () => void; + showViewAllResults?: boolean; } const defaultProps = { @@ -146,6 +147,7 @@ export const SearchBar = ({ setIsSearchBarFocused, onFocus, onBlur, + showViewAllResults = false, }: Props) => { const history = useHistory(); const [searchQuery, setSearchQuery] = useState(initialQuery); @@ -203,7 +205,7 @@ export const SearchBar = ({ const { quickFilters, selectedQuickFilter, setSelectedQuickFilter } = useQuickFiltersContext(); const autoCompleteQueryOptions = useMemo(() => { - if (effectiveQuery === '') return []; + if (effectiveQuery === '' || !showViewAllResults) return []; return [ { @@ -212,7 +214,7 @@ export const SearchBar = ({ type: EXACT_AUTOCOMPLETE_OPTION_TYPE, }, ]; - }, [effectiveQuery]); + }, [effectiveQuery, showViewAllResults]); const autoCompleteEntityOptions = useMemo(() => { return suggestions.map((suggestion: AutoCompleteResultForEntity) => { diff --git a/datahub-web-react/src/app/search/SearchHeader.tsx b/datahub-web-react/src/app/search/SearchHeader.tsx index 74bc562e275d1..91f9753a3d601 100644 --- a/datahub-web-react/src/app/search/SearchHeader.tsx +++ b/datahub-web-react/src/app/search/SearchHeader.tsx @@ -107,6 +107,7 @@ export const SearchHeader = ({ combineSiblings fixAutoComplete showQuickFilters + showViewAllResults /> From aceff13ebb2d6758a5e42b592f4b5eb7d5af29e3 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Sat, 28 Oct 2023 04:55:57 +0530 Subject: [PATCH 016/792] docs(timeline): correct markdown heading level (#9126) --- docs/dev-guides/timeline.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/dev-guides/timeline.md b/docs/dev-guides/timeline.md index 829aef1d3eefa..6a8e158d40ebf 100644 --- a/docs/dev-guides/timeline.md +++ b/docs/dev-guides/timeline.md @@ -228,7 +228,7 @@ http://localhost:8080/openapi/timeline/v1/urn%3Ali%3Adataset%3A%28urn%3Ali%3Adat REMOVE GLOSSARY_TERM dataset:hive:testTimelineDataset (urn:li:glossaryTerm:SavingsAccount): The GlossaryTerm 'SavingsAccount' for the entity 'urn:li:dataset:(urn:li:dataPlatform:hive,testTimelineDataset,PROD)' has been removed. ``` -# Explore the API +## Explore the API The API is browse-able via the UI through through the dropdown. Here are a few screenshots showing how to navigate to it. You can try out the API and send example requests. @@ -243,7 +243,7 @@ Here are a few screenshots showing how to navigate to it. You can try out the AP

-# Future Work +## Future Work - Supporting versions as start and end parameters as part of the call to the timeline API - Supporting entities beyond Datasets From 9ae0e93d82eac2040af2c3d23d52878e57e19df1 Mon Sep 17 00:00:00 2001 From: Ellie O'Neil <110510035+eboneil@users.noreply.github.com> Date: Fri, 27 Oct 2023 20:18:31 -0700 Subject: [PATCH 017/792] docs(graphql): Correct mutation -> query for searchAcrossLineage examples (#9134) --- docs/api/tutorials/lineage.md | 8 ++------ metadata-ingestion/examples/library/read_lineage_rest.py | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/docs/api/tutorials/lineage.md b/docs/api/tutorials/lineage.md index 4baad09099d07..13ec716b7870b 100644 --- a/docs/api/tutorials/lineage.md +++ b/docs/api/tutorials/lineage.md @@ -113,12 +113,10 @@ Expected Response: You can now see the lineage between `fct_users_deleted` and `logging_events`. -

- ## Add Column-level Lineage @@ -135,12 +133,10 @@ You can now see the lineage between `fct_users_deleted` and `logging_events`. You can now see the column-level lineage between datasets. Note that you have to enable `Show Columns` to be able to see the column-level lineage. -

- ## Read Lineage @@ -180,7 +176,7 @@ query searchAcrossLineage { } ``` -This example shows using lineage degrees as a filter, but additional search filters can be included here as well. +This example shows using lineage degrees as a filter, but additional search filters can be included here as well. @@ -188,7 +184,7 @@ This example shows using lineage degrees as a filter, but additional search filt ```shell curl --location --request POST 'http://localhost:8080/api/graphql' \ --header 'Authorization: Bearer ' \ ---header 'Content-Type: application/json' --data-raw '{ { "query": "mutation searchAcrossLineage { searchAcrossLineage( input: { query: \"*\" urn: \"urn:li:dataset:(urn:li:dataPlatform:dbt,long_tail_companions.adoption.human_profiles,PROD)\" start: 0 count: 10 direction: DOWNSTREAM orFilters: [ { and: [ { condition: EQUAL negated: false field: \"degree\" values: [\"1\", \"2\", \"3+\"] } ] } ] } ) { searchResults { degree entity { urn type } } }}" +--header 'Content-Type: application/json' --data-raw '{ { "query": "query searchAcrossLineage { searchAcrossLineage( input: { query: \"*\" urn: \"urn:li:dataset:(urn:li:dataPlatform:dbt,long_tail_companions.adoption.human_profiles,PROD)\" start: 0 count: 10 direction: DOWNSTREAM orFilters: [ { and: [ { condition: EQUAL negated: false field: \"degree\" values: [\"1\", \"2\", \"3+\"] } ] } ] } ) { searchResults { degree entity { urn type } } }}" }}' ``` diff --git a/metadata-ingestion/examples/library/read_lineage_rest.py b/metadata-ingestion/examples/library/read_lineage_rest.py index 34437ed86280d..bd9b4e8651dba 100644 --- a/metadata-ingestion/examples/library/read_lineage_rest.py +++ b/metadata-ingestion/examples/library/read_lineage_rest.py @@ -6,7 +6,7 @@ # Query multiple aspects from entity query = """ -mutation searchAcrossLineage { +query searchAcrossLineage { searchAcrossLineage( input: { query: "*" From 3f4ab44a91bff734e0a0437622d7579410875ec5 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Sun, 29 Oct 2023 16:26:05 -0500 Subject: [PATCH 018/792] feat(kafka): increase kafka message size and enable compression (#9038) Co-authored-by: Pedro Silva --- .github/workflows/docker-unified.yml | 10 +++- build.gradle | 2 +- .../app/client/KafkaTrackingProducer.java | 10 ++-- .../app/config/ConfigurationProvider.java | 6 ++- docker/broker/env/docker.env | 4 +- docker/datahub-frontend/Dockerfile | 4 +- docker/datahub-gms/Dockerfile | 4 +- docker/datahub-mae-consumer/Dockerfile | 4 +- docker/datahub-mce-consumer/Dockerfile | 4 +- docker/datahub-upgrade/Dockerfile | 4 +- docker/kafka-setup/kafka-config.sh | 2 + docker/kafka-setup/kafka-setup.sh | 46 ++++++++++++------- docker/kafka-setup/kafka-topic-workers.sh | 10 +++- .../docker-compose-m1.quickstart.yml | 2 + ...er-compose-without-neo4j-m1.quickstart.yml | 2 + ...ocker-compose-without-neo4j.quickstart.yml | 2 + .../quickstart/docker-compose.quickstart.yml | 2 + docs/deploy/environment-vars.md | 22 +++++---- .../config/kafka/ConsumerConfiguration.java | 10 ++++ .../config/kafka/KafkaConfiguration.java | 2 + .../config/kafka/ProducerConfiguration.java | 4 ++ .../src/main/resources/application.yml | 4 ++ .../kafka/DataHubKafkaProducerFactory.java | 2 + .../kafka/KafkaEventConsumerFactory.java | 4 ++ .../kafka/SimpleKafkaConsumerFactory.java | 9 +++- 25 files changed, 135 insertions(+), 40 deletions(-) create mode 100644 metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 8666a5e2e2171..5f5a62de6288c 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -851,8 +851,14 @@ jobs: if: failure() run: | docker ps -a - docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log - docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log + docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log || true + docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log || true + docker logs datahub-mae-consumer >& mae-${{ matrix.test_strategy }}.log || true + docker logs datahub-mce-consumer >& mce-${{ matrix.test_strategy }}.log || true + docker logs broker >& broker-${{ matrix.test_strategy }}.log || true + docker logs mysql >& mysql-${{ matrix.test_strategy }}.log || true + docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true + docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true - name: Upload logs uses: actions/upload-artifact@v3 if: failure() diff --git a/build.gradle b/build.gradle index cf55a59cfe694..bd282535fa13c 100644 --- a/build.gradle +++ b/build.gradle @@ -39,7 +39,7 @@ buildscript { plugins { id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' id 'com.github.johnrengelman.shadow' version '6.1.0' - id 'com.palantir.docker' version '0.35.0' + id 'com.palantir.docker' version '0.35.0' apply false // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" } diff --git a/datahub-frontend/app/client/KafkaTrackingProducer.java b/datahub-frontend/app/client/KafkaTrackingProducer.java index fab17f9215d4a..59e91a6d5a0f7 100644 --- a/datahub-frontend/app/client/KafkaTrackingProducer.java +++ b/datahub-frontend/app/client/KafkaTrackingProducer.java @@ -1,6 +1,8 @@ package client; +import com.linkedin.metadata.config.kafka.ProducerConfiguration; import com.typesafe.config.Config; +import config.ConfigurationProvider; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -35,12 +37,12 @@ public class KafkaTrackingProducer { private final KafkaProducer _producer; @Inject - public KafkaTrackingProducer(@Nonnull Config config, ApplicationLifecycle lifecycle) { + public KafkaTrackingProducer(@Nonnull Config config, ApplicationLifecycle lifecycle, final ConfigurationProvider configurationProvider) { _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); if (_isEnabled) { _logger.debug("Analytics tracking is enabled"); - _producer = createKafkaProducer(config); + _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); lifecycle.addStopHook( () -> { @@ -62,13 +64,15 @@ public void send(ProducerRecord record) { _producer.send(record); } - private static KafkaProducer createKafkaProducer(Config config) { + private static KafkaProducer createKafkaProducer(Config config, ProducerConfiguration producerConfiguration) { final Properties props = new Properties(); props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.getString("analytics.kafka.delivery.timeout.ms")); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("analytics.kafka.bootstrap.server")); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. + props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); + props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); final String securityProtocolConfig = "analytics.kafka.security.protocol"; if (config.hasPath(securityProtocolConfig) diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 00a5472ec3476..8f526c831b5c9 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -1,6 +1,7 @@ package config; import com.linkedin.metadata.config.cache.CacheConfiguration; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; @@ -11,7 +12,6 @@ /** * Minimal sharing between metadata-service and frontend - * Initially for use of client caching configuration. * Does not use the factories module to avoid transitive dependencies. */ @EnableConfigurationProperties @@ -19,6 +19,10 @@ @ConfigurationProperties @Data public class ConfigurationProvider { + /** + * Kafka related configs. + */ + private KafkaConfiguration kafka; /** * Configuration for caching diff --git a/docker/broker/env/docker.env b/docker/broker/env/docker.env index 18115697c2832..6eb958609daf1 100644 --- a/docker/broker/env/docker.env +++ b/docker/broker/env/docker.env @@ -5,4 +5,6 @@ KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9 KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 KAFKA_HEAP_OPTS=-Xms256m -Xmx256m -KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false \ No newline at end of file +KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false +KAFKA_MESSAGE_MAX_BYTES=5242880 +KAFKA_MAX_MESSAGE_BYTES=5242880 \ No newline at end of file diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 9efc0d2ce8753..9c13e73078042 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -8,10 +8,12 @@ RUN addgroup -S datahub && adduser -S datahub -G datahub # Upgrade Alpine and base packages # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ - && apk --no-cache add curl sqlite \ + && apk --no-cache add curl sqlite libc6-compat java-snappy \ && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ +ENV LD_LIBRARY_PATH="/lib:/lib64" + FROM base as prod-install COPY ./datahub-frontend.zip / diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index f5428f7480403..e271188a703cc 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -18,7 +18,7 @@ FROM alpine:3 AS base ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ - && apk --no-cache add curl bash coreutils gcompat sqlite \ + && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ @@ -29,6 +29,8 @@ RUN apk --no-cache --update-cache --available upgrade \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin +ENV LD_LIBRARY_PATH="/lib:/lib64" + FROM base as prod-install COPY war.war /datahub/datahub-gms/bin/war.war COPY metadata-models/src/main/resources/entity-registry.yml /datahub/datahub-gms/resources/entity-registry.yml diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 4b321b1639c1b..ec3da4de71d15 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -18,7 +18,7 @@ FROM alpine:3 AS base ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ - && apk --no-cache add curl bash coreutils sqlite \ + && apk --no-cache add curl bash coreutils sqlite libc6-compat java-snappy \ && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ @@ -26,6 +26,8 @@ RUN apk --no-cache --update-cache --available upgrade \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin +ENV LD_LIBRARY_PATH="/lib:/lib64" + FROM base as prod-install COPY mae-consumer-job.jar /datahub/datahub-mae-consumer/bin/ COPY metadata-models/src/main/resources/entity-registry.yml /datahub/datahub-mae-consumer/resources/entity-registry.yml diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index 4d38ee6daa235..f9c47f77a98f5 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -18,7 +18,7 @@ FROM alpine:3 AS base ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ - && apk --no-cache add curl bash sqlite \ + && apk --no-cache add curl bash sqlite libc6-compat java-snappy \ && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ @@ -33,6 +33,8 @@ COPY docker/datahub-mce-consumer/start.sh /datahub/datahub-mce-consumer/scripts/ COPY docker/monitoring/client-prometheus-config.yaml /datahub/datahub-mce-consumer/scripts/prometheus-config.yaml RUN chmod +x /datahub/datahub-mce-consumer/scripts/start.sh +ENV LD_LIBRARY_PATH="/lib:/lib64" + FROM base as dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index 945be54678a24..f08e7268e4018 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -18,7 +18,7 @@ FROM alpine:3 AS base ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ - && apk --no-cache add curl bash coreutils gcompat sqlite \ + && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ @@ -28,6 +28,8 @@ RUN apk --no-cache --update-cache --available upgrade \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin +ENV LD_LIBRARY_PATH="/lib:/lib64" + FROM base as prod-install COPY datahub-upgrade.jar /datahub/datahub-upgrade/bin/ COPY metadata-models/src/main/resources/entity-registry.yml /datahub/datahub-gms/resources/entity-registry.yml diff --git a/docker/kafka-setup/kafka-config.sh b/docker/kafka-setup/kafka-config.sh index 2ba8e2d7c5d47..4d5698ccc3856 100644 --- a/docker/kafka-setup/kafka-config.sh +++ b/docker/kafka-setup/kafka-config.sh @@ -2,6 +2,7 @@ : ${PARTITIONS:=1} : ${REPLICATION_FACTOR:=1} +: ${MAX_MESSAGE_BYTES:=5242880} : ${KAFKA_PROPERTIES_SECURITY_PROTOCOL:=PLAINTEXT} @@ -12,3 +13,4 @@ export KAFKA_HEAP_OPTS="-Xmx64M" CONNECTION_PROPERTIES_PATH=/tmp/connection.properties WORKERS=4 +DELIMITER=";" diff --git a/docker/kafka-setup/kafka-setup.sh b/docker/kafka-setup/kafka-setup.sh index b5024e49e59f1..439ffb4d4d829 100755 --- a/docker/kafka-setup/kafka-setup.sh +++ b/docker/kafka-setup/kafka-setup.sh @@ -102,24 +102,43 @@ exec 4<&- send() { work_id=$1 topic_args=$2 - echo sending $work_id $topic_args - echo "$work_id" "$topic_args" 1>&3 ## the fifo is fd 3 + topic_config=$3 + + echo -e "sending $work_id\n worker_args: ${topic_args}${DELIMITER}${topic_config}" + echo "$work_id" "${topic_args}${DELIMITER}${topic_config}" 1>&3 ## the fifo is fd 3 } ## Produce the jobs to run. -send "$METADATA_AUDIT_EVENT_NAME" "--partitions $PARTITIONS --topic $METADATA_AUDIT_EVENT_NAME" -send "$METADATA_CHANGE_EVENT_NAME" "--partitions $PARTITIONS --topic $METADATA_CHANGE_EVENT_NAME" -send "$FAILED_METADATA_CHANGE_EVENT_NAME" "--partitions $PARTITIONS --topic $FAILED_METADATA_CHANGE_EVENT_NAME" -send "$METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME" "--partitions $PARTITIONS --topic $METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME" +send "$METADATA_AUDIT_EVENT_NAME" "--partitions $PARTITIONS --topic $METADATA_AUDIT_EVENT_NAME" \ + "--entity-type topics --entity-name $METADATA_AUDIT_EVENT_NAME --alter --add-config max.message.bytes=$MAX_MESSAGE_BYTES" + +send "$METADATA_CHANGE_EVENT_NAME" "--partitions $PARTITIONS --topic $METADATA_CHANGE_EVENT_NAME" \ + "--entity-type topics --entity-name $METADATA_CHANGE_EVENT_NAME --alter --add-config max.message.bytes=$MAX_MESSAGE_BYTES" +send "$FAILED_METADATA_CHANGE_EVENT_NAME" "--partitions $PARTITIONS --topic $FAILED_METADATA_CHANGE_EVENT_NAME" \ + "--entity-type topics --entity-name $FAILED_METADATA_CHANGE_EVENT_NAME --alter --add-config max.message.bytes=$MAX_MESSAGE_BYTES" + +send "$METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME" "--partitions $PARTITIONS --topic $METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME" \ + "--entity-type topics --entity-name $METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME --alter --add-config max.message.bytes=$MAX_MESSAGE_BYTES" # Set retention to 90 days -send "$METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME" "--partitions $PARTITIONS --config retention.ms=7776000000 --topic $METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME" -send "$METADATA_CHANGE_PROPOSAL_TOPIC_NAME" "--partitions $PARTITIONS --topic $METADATA_CHANGE_PROPOSAL_TOPIC_NAME" -send "$FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME" "--partitions $PARTITIONS --topic $FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME" -send "$PLATFORM_EVENT_TOPIC_NAME" "--partitions $PARTITIONS --topic $PLATFORM_EVENT_TOPIC_NAME" +send "$METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME" "--partitions $PARTITIONS --config retention.ms=7776000000 --topic $METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME" \ + "--entity-type topics --entity-name $METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME --alter --add-config max.message.bytes=$MAX_MESSAGE_BYTES" + +send "$METADATA_CHANGE_PROPOSAL_TOPIC_NAME" "--partitions $PARTITIONS --topic $METADATA_CHANGE_PROPOSAL_TOPIC_NAME" \ + "--entity-type topics --entity-name $METADATA_CHANGE_PROPOSAL_TOPIC_NAME --alter --add-config max.message.bytes=$MAX_MESSAGE_BYTES" +send "$FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME" "--partitions $PARTITIONS --topic $FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME" \ + "--entity-type topics --entity-name $FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME --alter --add-config max.message.bytes=$MAX_MESSAGE_BYTES" + +send "$PLATFORM_EVENT_TOPIC_NAME" "--partitions $PARTITIONS --topic $PLATFORM_EVENT_TOPIC_NAME" \ + "--entity-type topics --entity-name $PLATFORM_EVENT_TOPIC_NAME --alter --add-config max.message.bytes=$MAX_MESSAGE_BYTES" # Infinite retention upgrade topic -send "$DATAHUB_UPGRADE_HISTORY_TOPIC_NAME" "--partitions 1 --config retention.ms=-1 --topic $DATAHUB_UPGRADE_HISTORY_TOPIC_NAME" + # Make sure the retention.ms config for $DATAHUB_UPGRADE_HISTORY_TOPIC_NAME is configured to infinite + # Please see the bug report below for details + # https://github.com/datahub-project/datahub/issues/7882 +send "$DATAHUB_UPGRADE_HISTORY_TOPIC_NAME" "--partitions 1 --config retention.ms=-1 --topic $DATAHUB_UPGRADE_HISTORY_TOPIC_NAME" \ + "--entity-type topics --entity-name "$DATAHUB_UPGRADE_HISTORY_TOPIC_NAME" --alter --add-config retention.ms=-1" + # Create topic for datahub usage event if [[ $DATAHUB_ANALYTICS_ENABLED == true ]]; then send "$DATAHUB_USAGE_EVENT_NAME" "--partitions $PARTITIONS --topic $DATAHUB_USAGE_EVENT_NAME" @@ -150,8 +169,3 @@ if [[ $USE_CONFLUENT_SCHEMA_REGISTRY == "TRUE" ]]; then --entity-name _schemas \ --alter --add-config cleanup.policy=compact fi - -# Make sure the retention.ms config for $DATAHUB_UPGRADE_HISTORY_TOPIC_NAME is configured to infinite -# Please see the bug report below for details -# https://github.com/datahub-project/datahub/issues/7882 -kafka-configs.sh --command-config $CONNECTION_PROPERTIES_PATH --bootstrap-server $KAFKA_BOOTSTRAP_SERVER --entity-type topics --entity-name "$DATAHUB_UPGRADE_HISTORY_TOPIC_NAME" --alter --add-config retention.ms=-1 diff --git a/docker/kafka-setup/kafka-topic-workers.sh b/docker/kafka-setup/kafka-topic-workers.sh index fd0d45c3f4611..3ddf41abbabf5 100644 --- a/docker/kafka-setup/kafka-topic-workers.sh +++ b/docker/kafka-setup/kafka-topic-workers.sh @@ -11,10 +11,18 @@ START_LOCK=$4 ## the queue workers are supposed to be doing job() { i=$1 - topic_args=$2 + worker_args=$2 + topic_args=$(echo $worker_args | cut -d "$DELIMITER" -f 1) + topic_config=$(echo $worker_args | cut -d "$DELIMITER" -f 2) + + echo " $i: kafka-topics.sh --create --if-not-exist $topic_args" kafka-topics.sh --create --if-not-exists --command-config $CONNECTION_PROPERTIES_PATH --bootstrap-server $KAFKA_BOOTSTRAP_SERVER \ --replication-factor $REPLICATION_FACTOR \ $topic_args + if [[ ! -z "$topic_config" ]]; then + echo " $i: kafka-configs.sh $topic_config" + kafka-configs.sh --command-config $CONNECTION_PROPERTIES_PATH --bootstrap-server $KAFKA_BOOTSTRAP_SERVER $topic_config + fi } ## This is the worker to read from the queue. diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 89e9aaa0defd6..c5de687d335b9 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -16,6 +16,8 @@ services: - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 - KAFKA_HEAP_OPTS=-Xms256m -Xmx256m - KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false + - KAFKA_MESSAGE_MAX_BYTES=5242880 + - KAFKA_MAX_MESSAGE_BYTES=5242880 healthcheck: interval: 1s retries: 5 diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index f6284edc83648..b6935f24c5ce2 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -16,6 +16,8 @@ services: - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 - KAFKA_HEAP_OPTS=-Xms256m -Xmx256m - KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false + - KAFKA_MESSAGE_MAX_BYTES=5242880 + - KAFKA_MAX_MESSAGE_BYTES=5242880 healthcheck: interval: 1s retries: 5 diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 4e3503e35c0db..4ff8bbd70da85 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -16,6 +16,8 @@ services: - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 - KAFKA_HEAP_OPTS=-Xms256m -Xmx256m - KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false + - KAFKA_MESSAGE_MAX_BYTES=5242880 + - KAFKA_MAX_MESSAGE_BYTES=5242880 healthcheck: interval: 1s retries: 5 diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index e2f52064389e0..f2950ebab2c9d 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -16,6 +16,8 @@ services: - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 - KAFKA_HEAP_OPTS=-Xms256m -Xmx256m - KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false + - KAFKA_MESSAGE_MAX_BYTES=5242880 + - KAFKA_MAX_MESSAGE_BYTES=5242880 healthcheck: interval: 1s retries: 5 diff --git a/docs/deploy/environment-vars.md b/docs/deploy/environment-vars.md index 779c3d3d7c432..4c7b249349ca0 100644 --- a/docs/deploy/environment-vars.md +++ b/docs/deploy/environment-vars.md @@ -67,15 +67,19 @@ In general, there are **lots** of Kafka configuration environment variables for These environment variables follow the standard Spring representation of properties as environment variables. Simply replace the dot, `.`, with an underscore, `_`, and convert to uppercase. -| Variable | Default | Unit/Type | Components | Description | -|-----------------------------------------------------|----------------------------------------------|-----------|-----------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `KAFKA_LISTENER_CONCURRENCY` | 1 | integer | [`GMS`, `MCE Consumer`, `MAE Consumer`] | Number of Kafka consumer threads. Optimize throughput by matching to topic partitions. | -| `SPRING_KAFKA_PRODUCER_PROPERTIES_MAX_REQUEST_SIZE` | 1048576 | bytes | [`GMS`, `MCE Consumer`, `MAE Consumer`] | Max produced message size. Note that the topic configuration is not controlled by this variable. | -| `SCHEMA_REGISTRY_TYPE` | `INTERNAL` | string | [`GMS`, `MCE Consumer`, `MAE Consumer`] | Schema registry implementation. One of `INTERNAL` or `KAFKA` or `AWS_GLUE` | -| `KAFKA_SCHEMAREGISTRY_URL` | `http://localhost:8080/schema-registry/api/` | string | [`GMS`, `MCE Consumer`, `MAE Consumer`] | Schema registry url. Used for `INTERNAL` and `KAFKA`. The default value is for the `GMS` component. The `MCE Consumer` and `MAE Consumer` should be the `GMS` hostname and port. | -| `AWS_GLUE_SCHEMA_REGISTRY_REGION` | `us-east-1` | string | [`GMS`, `MCE Consumer`, `MAE Consumer`] | If using `AWS_GLUE` in the `SCHEMA_REGISTRY_TYPE` variable for the schema registry implementation. | -| `AWS_GLUE_SCHEMA_REGISTRY_NAME` | `` | string | [`GMS`, `MCE Consumer`, `MAE Consumer`] | If using `AWS_GLUE` in the `SCHEMA_REGISTRY_TYPE` variable for the schema registry. | -| `USE_CONFLUENT_SCHEMA_REGISTRY` | `true` | boolean | [`kafka-setup`] | Enable Confluent schema registry configuration. | +| Variable | Default | Unit/Type | Components | Description | +|-----------------------------------------------------|----------------------------------------------|-----------|--------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `KAFKA_LISTENER_CONCURRENCY` | 1 | integer | [`GMS`, `MCE Consumer`, `MAE Consumer`] | Number of Kafka consumer threads. Optimize throughput by matching to topic partitions. | +| `SPRING_KAFKA_PRODUCER_PROPERTIES_MAX_REQUEST_SIZE` | 1048576 | bytes | [`GMS`, `MCE Consumer`, `MAE Consumer`] | Max produced message size. Note that the topic configuration is not controlled by this variable. | +| `SCHEMA_REGISTRY_TYPE` | `INTERNAL` | string | [`GMS`, `MCE Consumer`, `MAE Consumer`] | Schema registry implementation. One of `INTERNAL` or `KAFKA` or `AWS_GLUE` | +| `KAFKA_SCHEMAREGISTRY_URL` | `http://localhost:8080/schema-registry/api/` | string | [`GMS`, `MCE Consumer`, `MAE Consumer`] | Schema registry url. Used for `INTERNAL` and `KAFKA`. The default value is for the `GMS` component. The `MCE Consumer` and `MAE Consumer` should be the `GMS` hostname and port. | +| `AWS_GLUE_SCHEMA_REGISTRY_REGION` | `us-east-1` | string | [`GMS`, `MCE Consumer`, `MAE Consumer`] | If using `AWS_GLUE` in the `SCHEMA_REGISTRY_TYPE` variable for the schema registry implementation. | +| `AWS_GLUE_SCHEMA_REGISTRY_NAME` | `` | string | [`GMS`, `MCE Consumer`, `MAE Consumer`] | If using `AWS_GLUE` in the `SCHEMA_REGISTRY_TYPE` variable for the schema registry. | +| `USE_CONFLUENT_SCHEMA_REGISTRY` | `true` | boolean | [`kafka-setup`] | Enable Confluent schema registry configuration. | +| `KAFKA_PRODUCER_MAX_REQUEST_SIZE` | `5242880` | integer | [`Frontend`, `GMS`, `MCE Consumer`, `MAE Consumer`] | Max produced message size. Note that the topic configuration is not controlled by this variable. | +| `KAFKA_CONSUMER_MAX_PARTITION_FETCH_BYTES` | `5242880` | integer | [`GMS`, `MCE Consumer`, `MAE Consumer`] | The maximum amount of data per-partition the server will return. Records are fetched in batches by the consumer. If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress. | +| `MAX_MESSAGE_BYTES` | `5242880` | integer | [`kafka-setup`] | Sets the max message size on the kakfa topics. | +| `KAFKA_PRODUCER_COMPRESSION_TYPE` | `snappy` | string | [`Frontend`, `GMS`, `MCE Consumer`, `MAE Consumer`] | The compression used by the producer. | ## Frontend diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java new file mode 100644 index 0000000000000..7a93119226a2d --- /dev/null +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java @@ -0,0 +1,10 @@ +package com.linkedin.metadata.config.kafka; + +import lombok.Data; + + +@Data +public class ConsumerConfiguration { + + private int maxPartitionFetchBytes; +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/KafkaConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/KafkaConfiguration.java index 2966abfc63396..2345f88352c17 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/KafkaConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/KafkaConfiguration.java @@ -12,4 +12,6 @@ public class KafkaConfiguration { private SchemaRegistryConfiguration schemaRegistry; private ProducerConfiguration producer; + + private ConsumerConfiguration consumer; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java index 2bf4cea3f0c18..26a8c6b649133 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java @@ -13,4 +13,8 @@ public class ProducerConfiguration { private int requestTimeout; private int backoffTimeout; + + private String compressionType; + + private int maxRequestSize; } diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 5d72e24748072..b817208672e08 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -228,6 +228,10 @@ kafka: deliveryTimeout: ${KAFKA_PRODUCER_DELIVERY_TIMEOUT:30000} requestTimeout: ${KAFKA_PRODUCER_REQUEST_TIMEOUT:3000} backoffTimeout: ${KAFKA_PRODUCER_BACKOFF_TIMEOUT:500} + compressionType: ${KAFKA_PRODUCER_COMPRESSION_TYPE:snappy} # producer's compression algorithm + maxRequestSize: ${KAFKA_PRODUCER_MAX_REQUEST_SIZE:5242880} # the max bytes sent by the producer, also see kafka-setup MAX_MESSAGE_BYTES for matching value + consumer: + maxPartitionFetchBytes: ${KAFKA_CONSUMER_MAX_PARTITION_FETCH_BYTES:5242880} # the max bytes consumed per partition schemaRegistry: type: ${SCHEMA_REGISTRY_TYPE:KAFKA} # INTERNAL or KAFKA or AWS_GLUE url: ${KAFKA_SCHEMAREGISTRY_URL:http://localhost:8081} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java index c67a2e704681f..78b3de501e0e5 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java @@ -59,6 +59,8 @@ public static Map buildProducerProperties(SchemaRegistryConfig s props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, kafkaConfiguration.getProducer().getDeliveryTimeout()); props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, kafkaConfiguration.getProducer().getRequestTimeout()); props.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, kafkaConfiguration.getProducer().getBackoffTimeout()); + props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, kafkaConfiguration.getProducer().getCompressionType()); + props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, kafkaConfiguration.getProducer().getMaxRequestSize()); // Override KafkaProperties with SchemaRegistryConfig only for non-empty values schemaRegistryConfig.getProperties().entrySet() diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index ba18be6834d14..7a9e80781d639 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -70,6 +70,7 @@ private static Map buildCustomizedProperties(KafkaProperties bas consumerProps.setEnableAutoCommit(true); consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); + // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); @@ -84,6 +85,9 @@ private static Map buildCustomizedProperties(KafkaProperties bas .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) .forEach(entry -> customizedProperties.put(entry.getKey(), entry.getValue())); + customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, + kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); + return customizedProperties; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java index 05ebfdddf8b80..e12cbec87fe45 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java @@ -4,8 +4,11 @@ import com.linkedin.gms.factory.config.ConfigurationProvider; import java.time.Duration; import java.util.Arrays; +import java.util.Map; + import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.StringDeserializer; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.kafka.KafkaProperties; @@ -40,10 +43,14 @@ protected KafkaListenerContainerFactory createInstance(@Qualifier("configurat consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); } // else we rely on KafkaProperties which defaults to localhost:9092 + Map customizedProperties = consumerProps.buildProperties(); + customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, + kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); + ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); - factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(properties.buildConsumerProperties())); + factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(customizedProperties)); log.info("Simple KafkaListenerContainerFactory built successfully"); From 758ed47644b330efbbee4e61dde71f6ff5808e23 Mon Sep 17 00:00:00 2001 From: Dmytro Kulyk <34435869+KulykDmytro@users.noreply.github.com> Date: Mon, 30 Oct 2023 06:14:32 +0200 Subject: [PATCH 019/792] feat(ingest/jsonschema) enable schema-aware `JsonSchemaTranslator` (#8971) Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/extractor/json_schema_util.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/extractor/json_schema_util.py b/metadata-ingestion/src/datahub/ingestion/extractor/json_schema_util.py index c943b83a887ed..360ddf1129154 100644 --- a/metadata-ingestion/src/datahub/ingestion/extractor/json_schema_util.py +++ b/metadata-ingestion/src/datahub/ingestion/extractor/json_schema_util.py @@ -598,7 +598,8 @@ def get_fields_from_schema( jsonref_schema_dict = schema_dict else: # first validate the schema using a json validator - jsonschema.Draft7Validator.check_schema(schema_dict) + validator = jsonschema.validators.validator_for(schema_dict) + validator.check_schema(schema_dict) # then apply jsonref jsonref_schema_dict = jsonref.loads(schema_string) except Exception as e: From 2c019148ad451752eff582c3206df75c83fe2a63 Mon Sep 17 00:00:00 2001 From: Alex Klavens <123000295+alexklavensnyt@users.noreply.github.com> Date: Mon, 30 Oct 2023 04:43:52 -0400 Subject: [PATCH 020/792] =?UTF-8?q?fix(metadata-ingestion):=20adds=20defau?= =?UTF-8?q?lt=20value=20to=20=5Fresolved=5Fdomain=5Furn=20i=E2=80=A6=20(#9?= =?UTF-8?q?115)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Alex Klavens --- .../src/datahub/api/entities/dataproduct/dataproduct.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/api/entities/dataproduct/dataproduct.py b/metadata-ingestion/src/datahub/api/entities/dataproduct/dataproduct.py index 2d9b14ceb2d06..28e4a03b8f75f 100644 --- a/metadata-ingestion/src/datahub/api/entities/dataproduct/dataproduct.py +++ b/metadata-ingestion/src/datahub/api/entities/dataproduct/dataproduct.py @@ -104,7 +104,7 @@ class DataProduct(ConfigModel): id: str domain: str - _resolved_domain_urn: Optional[str] + _resolved_domain_urn: Optional[str] = None assets: Optional[List[str]] = None display_name: Optional[str] = None owners: Optional[List[Union[str, Ownership]]] = None From f5c8192cca6eacc7e21e62204883854f0f6bcbdb Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Mon, 30 Oct 2023 18:32:51 +0530 Subject: [PATCH 021/792] ci: tweak to only run relevant workflows (#9052) --- .github/workflows/airflow-plugin.yml | 2 +- .github/workflows/check-datahub-jars.yml | 12 ++++-------- .github/workflows/documentation.yml | 8 ++++++++ .github/workflows/metadata-ingestion.yml | 2 +- .github/workflows/metadata-model.yml | 5 ++--- 5 files changed, 16 insertions(+), 13 deletions(-) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index 54042d104d906..d0c0f52781b9a 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -12,7 +12,7 @@ on: branches: - "**" paths: - - ".github/**" + - ".github/workflows/airflow-plugin.yml" - "metadata-ingestion-modules/airflow-plugin/**" - "metadata-ingestion/**" - "metadata-models/**" diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 9a17a70e7f8d4..41f9ea91a94e2 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -4,17 +4,13 @@ on: push: branches: - master - paths-ignore: - - "docker/**" - - "docs/**" - - "**.md" + paths: + - "metadata-integration" pull_request: branches: - "**" - paths-ignore: - - "docker/**" - - "docs/**" - - "**.md" + paths: + - "metadata-integration" release: types: [published] diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index ebe2990f3a3cd..c94282938120e 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -4,9 +4,17 @@ on: pull_request: branches: - "**" + paths: + - "metadata-ingestion/**" + - "metadata-models/**" + - "docs-website/**" push: branches: - master + paths: + - "metadata-ingestion/**" + - "metadata-models/**" + - "docs-website/**" # release: # types: [published, edited] diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index 699ca330ce0ac..ec6bd4141cc6f 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -11,7 +11,7 @@ on: branches: - "**" paths: - - ".github/**" + - ".github/workflows/metadata-ingestion.yml" - "metadata-ingestion/**" - "metadata-models/**" release: diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index 9d54c88eee591..4bae5ccc9a266 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -3,9 +3,8 @@ on: push: branches: - master - paths-ignore: - - "docs/**" - - "**.md" + paths: + - "metadata-models/**" release: types: [published] From 9c72bd9ed7f1c2b4228fc656c5f8b6f31bf0d431 Mon Sep 17 00:00:00 2001 From: Kos Korchak <97058061+kkorchak@users.noreply.github.com> Date: Mon, 30 Oct 2023 14:12:07 -0400 Subject: [PATCH 022/792] fix(test): Fix for flaky download_lineage_results cypress test (#9132) --- .../cypress/cypress/e2e/lineage/download_lineage_results.js | 3 +++ .../tests/cypress/cypress/e2e/mutations/dataset_ownership.js | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/smoke-test/tests/cypress/cypress/e2e/lineage/download_lineage_results.js b/smoke-test/tests/cypress/cypress/e2e/lineage/download_lineage_results.js index 315aa7b22b9da..dc6efc9f7df66 100644 --- a/smoke-test/tests/cypress/cypress/e2e/lineage/download_lineage_results.js +++ b/smoke-test/tests/cypress/cypress/e2e/lineage/download_lineage_results.js @@ -27,6 +27,9 @@ const downloadCsvFile = (filename) => { }; describe("download lineage results to .csv file", () => { + beforeEach(() => { + cy.on('uncaught:exception', (err, runnable) => { return false; }); + }); it("download and verify lineage results for 1st, 2nd and 3+ degree of dependencies", () => { cy.loginWithCredentials(); diff --git a/smoke-test/tests/cypress/cypress/e2e/mutations/dataset_ownership.js b/smoke-test/tests/cypress/cypress/e2e/mutations/dataset_ownership.js index 99ad9a68d35e1..465d7998b9f9a 100644 --- a/smoke-test/tests/cypress/cypress/e2e/mutations/dataset_ownership.js +++ b/smoke-test/tests/cypress/cypress/e2e/mutations/dataset_ownership.js @@ -29,6 +29,10 @@ const addOwner = (owner, type, elementId) => { } describe("add, remove ownership for dataset", () => { + beforeEach(() => { + cy.on('uncaught:exception', (err, runnable) => { return false; }); + }); + it("create test user and test group, add user to a group", () => { cy.loginWithCredentials(); cy.createUser(username, password, email); From 300cea373d6a94f05cf3bd95ab69bc503a28538e Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Mon, 30 Oct 2023 20:50:42 +0000 Subject: [PATCH 023/792] docs: Update updating-datahub.md (#9131) --- docs/how/updating-datahub.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 4d1535f28fa0a..28f11e4b6d707 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -4,10 +4,20 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ## Next -- #9010 - In Redshift source's config `incremental_lineage` is set default to off. +### Breaking Changes + +### Potential Downtime + +### Deprecations + +### Other Notable Changes + +## 0.12.0 ### Breaking Changes +- #9044 - GraphQL APIs for adding ownership now expect either an `ownershipTypeUrn` referencing a customer ownership type or a (deprecated) `type`. Where before adding an ownership without a concrete type was allowed, this is no longer the case. For simplicity you can use the `type` parameter which will get translated to a custom ownership type internally if one exists for the type being added. +- #9010 - In Redshift source's config `incremental_lineage` is set default to off. - #8810 - Removed support for SQLAlchemy 1.3.x. Only SQLAlchemy 1.4.x is supported now. - #8942 - Removed `urn:li:corpuser:datahub` owner for the `Measure`, `Dimension` and `Temporal` tags emitted by Looker and LookML source connectors. From 58bcedcd6a091263c6dc3e1181c260233a80575d Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 30 Oct 2023 14:18:48 -0700 Subject: [PATCH 024/792] fix(ingest/clickhouse): pin version to solve column reflection regression (#9143) --- metadata-ingestion/setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 7f7826abe2095..b1c5510efd923 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -173,7 +173,9 @@ clickhouse_common = { # Clickhouse 0.2.0 adds support for SQLAlchemy 1.4.x - "clickhouse-sqlalchemy>=0.2.0", + # Disallow 0.2.5 because of https://github.com/xzkostyan/clickhouse-sqlalchemy/issues/272. + # Note that there's also a known issue around nested map types: https://github.com/xzkostyan/clickhouse-sqlalchemy/issues/269. + "clickhouse-sqlalchemy>=0.2.0,<0.2.5", } redshift_common = { From 51d6d1f4531dad133e06db75267fbea77e424d00 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 30 Oct 2023 14:19:52 -0700 Subject: [PATCH 025/792] feat(ingest/looker): cleanup error handling (#9135) --- .../src/datahub/ingestion/api/workunit.py | 6 +++++- .../ingestion/source/looker/looker_lib_wrapper.py | 8 ++++++-- .../ingestion/source/looker/looker_source.py | 14 ++------------ .../ingestion/source/looker/lookml_source.py | 5 +---- 4 files changed, 14 insertions(+), 19 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/api/workunit.py b/metadata-ingestion/src/datahub/ingestion/api/workunit.py index 8eea3514a22af..b1c003ee27e12 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/workunit.py +++ b/metadata-ingestion/src/datahub/ingestion/api/workunit.py @@ -22,7 +22,11 @@ class MetadataWorkUnit(WorkUnit): metadata: Union[ MetadataChangeEvent, MetadataChangeProposal, MetadataChangeProposalWrapper ] - # A workunit creator can determine if this workunit is allowed to fail + + # A workunit creator can determine if this workunit is allowed to fail. + # TODO: This flag was initially added during the rollout of the subType aspect + # to improve backwards compatibility, but is not really needed anymore and so + # should be removed. treat_errors_as_warnings: bool = False # When this is set to false, this MWU will be ignored by automatic helpers diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py index cf132b7ef27f7..b00f74b71e792 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py @@ -123,8 +123,12 @@ def get_user(self, id_: str, user_fields: str) -> Optional[User]: transport_options=self.transport_options, ) except SDKError as e: - logger.warning(f"Could not find user with id {id_}") - logger.warning(f"Failure was {e}") + if "Looker Not Found (404)" in str(e): + # User not found + logger.info(f"Could not find user with id {id_}: 404 error") + else: + logger.warning(f"Could not find user with id {id_}") + logger.warning(f"Failure was {e}") # User not found return None diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index a3df977582ca4..09683d790c14c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -926,14 +926,7 @@ def process_metrics_dimensions_and_fields_for_dashboard( mcps = chart_mcps mcps.append(dashboard_mcp) - workunits = [ - MetadataWorkUnit( - id=f"looker-{mcp.aspectName}-{mcp.entityUrn}", - mcp=mcp, - treat_errors_as_warnings=True, - ) - for mcp in mcps - ] + workunits = [mcp.as_workunit() for mcp in mcps] return workunits @@ -1320,10 +1313,7 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: id=f"looker-{event.proposedSnapshot.urn}", mce=event ) elif isinstance(event, MetadataChangeProposalWrapper): - # We want to treat subtype aspects as optional, so allowing failures in this aspect to be treated as warnings rather than failures - yield event.as_workunit( - treat_errors_as_warnings=event.aspectName in ["subTypes"] - ) + yield event.as_workunit() else: raise Exception(f"Unexpected type of event {event}") self.reporter.report_stage_end("explore_metadata") diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index e69c3b6e601bd..e6b78cc7a7745 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -2171,10 +2171,7 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 for mcp in self._build_dataset_mcps( maybe_looker_view ): - # We want to treat mcp aspects as optional, so allowing failures in this aspect to be treated as warnings rather than failures - yield mcp.as_workunit( - treat_errors_as_warnings=True - ) + yield mcp.as_workunit() else: ( prev_model_name, From 0bd2d9a36cdf18575ac4e54126db5be33ec59d8a Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 30 Oct 2023 14:22:05 -0700 Subject: [PATCH 026/792] feat(ingest): add `entity_supports_aspect` helper (#9120) --- .../src/datahub/emitter/mcp_builder.py | 13 ++++++++++++- metadata-ingestion/tests/unit/test_mcp_builder.py | 9 +++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/emitter/mcp_builder.py b/metadata-ingestion/src/datahub/emitter/mcp_builder.py index 65e0c0d6ba60d..d50feba8b119c 100644 --- a/metadata-ingestion/src/datahub/emitter/mcp_builder.py +++ b/metadata-ingestion/src/datahub/emitter/mcp_builder.py @@ -1,9 +1,10 @@ -from typing import Dict, Iterable, List, Optional, TypeVar +from typing import Dict, Iterable, List, Optional, Type, TypeVar from pydantic.fields import Field from pydantic.main import BaseModel from datahub.emitter.mce_builder import ( + Aspect, datahub_guid, make_container_urn, make_data_platform_urn, @@ -18,6 +19,7 @@ ) from datahub.metadata.com.linkedin.pegasus2avro.container import ContainerProperties from datahub.metadata.schema_classes import ( + KEY_ASPECTS, ContainerClass, DomainsClass, EmbedClass, @@ -306,3 +308,12 @@ def create_embed_mcp(urn: str, embed_url: str) -> MetadataChangeProposalWrapper: entityUrn=urn, aspect=EmbedClass(renderUrl=embed_url), ) + + +def entity_supports_aspect(entity_type: str, aspect_type: Type[Aspect]) -> bool: + entity_key_aspect = KEY_ASPECTS[entity_type] + aspect_name = aspect_type.get_aspect_name() + + supported_aspects = entity_key_aspect.ASPECT_INFO["entityAspects"] + + return aspect_name in supported_aspects diff --git a/metadata-ingestion/tests/unit/test_mcp_builder.py b/metadata-ingestion/tests/unit/test_mcp_builder.py index 561b782ef9e46..e304edb24789c 100644 --- a/metadata-ingestion/tests/unit/test_mcp_builder.py +++ b/metadata-ingestion/tests/unit/test_mcp_builder.py @@ -1,4 +1,5 @@ import datahub.emitter.mcp_builder as builder +from datahub.metadata.schema_classes import StatusClass, TelemetryClientIdClass def test_guid_generator(): @@ -83,3 +84,11 @@ def test_guid_generators(): guid = key.guid() assert guid == guid_datahub + + +def test_entity_supports_aspect(): + assert builder.entity_supports_aspect("dataset", StatusClass) + assert not builder.entity_supports_aspect("telemetry", StatusClass) + + assert not builder.entity_supports_aspect("dataset", TelemetryClientIdClass) + assert builder.entity_supports_aspect("telemetry", TelemetryClientIdClass) From ce0f36b8bc74e3f0bab447408096347617804d92 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 30 Oct 2023 14:23:19 -0700 Subject: [PATCH 027/792] feat(sqlparser): support more update syntaxes + fix bug with subqueries (#9105) --- .../src/datahub/utilities/sqlglot_lineage.py | 57 ++++++++++- .../test_postgres_select_subquery.json | 64 ++++++++++++ .../test_snowflake_update_from_table.json | 1 + .../test_snowflake_update_hardcoded.json | 4 +- .../goldens/test_snowflake_update_self.json | 29 ++++++ .../unit/sql_parsing/test_sqlglot_lineage.py | 98 +++++++++++++++++++ 6 files changed, 247 insertions(+), 6 deletions(-) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_postgres_select_subquery.json create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_self.json diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index 1d74b20569814..388388f9f4b38 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -12,8 +12,8 @@ import sqlglot.errors import sqlglot.lineage import sqlglot.optimizer.annotate_types +import sqlglot.optimizer.optimizer import sqlglot.optimizer.qualify -import sqlglot.optimizer.qualify_columns from pydantic import BaseModel from typing_extensions import TypedDict @@ -48,6 +48,19 @@ SQL_PARSE_RESULT_CACHE_SIZE = 1000 +RULES_BEFORE_TYPE_ANNOTATION: tuple = tuple( + filter( + # Skip pushdown_predicates because it sometimes throws exceptions, and we + # don't actually need it for anything. + lambda func: func.__name__ not in {"pushdown_predicates"}, + itertools.takewhile( + lambda func: func != sqlglot.optimizer.annotate_types.annotate_types, + sqlglot.optimizer.optimizer.RULES, + ), + ) +) + + class GraphQLSchemaField(TypedDict): fieldPath: str nativeDataType: str @@ -289,6 +302,10 @@ def _table_level_lineage( ) # TODO: If a CTAS has "LIMIT 0", it's not really lineage, just copying the schema. + # Update statements implicitly read from the table being updated, so add those back in. + if isinstance(statement, sqlglot.exp.Update): + tables = tables | modified + return tables, modified @@ -568,17 +585,20 @@ def _schema_aware_fuzzy_column_resolve( # - the select instead of the full outer statement # - schema info # - column qualification enabled + # - running the full pre-type annotation optimizer # logger.debug("Schema: %s", sqlglot_db_schema.mapping) - statement = sqlglot.optimizer.qualify.qualify( + statement = sqlglot.optimizer.optimizer.optimize( statement, dialect=dialect, schema=sqlglot_db_schema, + qualify_columns=True, validate_qualify_columns=False, identify=True, # sqlglot calls the db -> schema -> table hierarchy "catalog", "db", "table". catalog=default_db, db=default_schema, + rules=RULES_BEFORE_TYPE_ANNOTATION, ) except (sqlglot.errors.OptimizeError, ValueError) as e: raise SqlUnderstandingError( @@ -748,6 +768,7 @@ def _extract_select_from_create( _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT: Set[str] = set( sqlglot.exp.Update.arg_types.keys() ) - set(sqlglot.exp.Select.arg_types.keys()) +_UPDATE_FROM_TABLE_ARGS_TO_MOVE = {"joins", "laterals", "pivot"} def _extract_select_from_update( @@ -774,17 +795,43 @@ def _extract_select_from_update( # they'll get caught later. new_expressions.append(expr) - return sqlglot.exp.Select( + # Special translation for the `from` clause. + extra_args = {} + original_from = statement.args.get("from") + if original_from and isinstance(original_from.this, sqlglot.exp.Table): + # Move joins, laterals, and pivots from the Update->From->Table->field + # to the top-level Select->field. + + for k in _UPDATE_FROM_TABLE_ARGS_TO_MOVE: + if k in original_from.this.args: + # Mutate the from table clause in-place. + extra_args[k] = original_from.this.args.pop(k) + + select_statement = sqlglot.exp.Select( **{ **{ k: v for k, v in statement.args.items() if k not in _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT }, + **extra_args, "expressions": new_expressions, } ) + # Update statements always implicitly have the updated table in context. + # TODO: Retain table name alias. + if select_statement.args.get("from"): + # select_statement = sqlglot.parse_one(select_statement.sql(dialect=dialect)) + + select_statement = select_statement.join( + statement.this, append=True, join_kind="cross" + ) + else: + select_statement = select_statement.from_(statement.this) + + return select_statement + def _is_create_table_ddl(statement: sqlglot.exp.Expression) -> bool: return isinstance(statement, sqlglot.exp.Create) and isinstance( @@ -955,7 +1002,7 @@ def _sqlglot_lineage_inner( # Fetch schema info for the relevant tables. table_name_urn_mapping: Dict[_TableName, str] = {} table_name_schema_mapping: Dict[_TableName, SchemaInfo] = {} - for table in itertools.chain(tables, modified): + for table in tables | modified: # For select statements, qualification will be a no-op. For other statements, this # is where the qualification actually happens. qualified_table = table.qualified( @@ -971,7 +1018,7 @@ def _sqlglot_lineage_inner( # Also include the original, non-qualified table name in the urn mapping. table_name_urn_mapping[table] = urn - total_tables_discovered = len(tables) + len(modified) + total_tables_discovered = len(tables | modified) total_schemas_resolved = len(table_name_schema_mapping) debug_info = SqlParsingDebugInfo( confidence=0.9 if total_tables_discovered == total_schemas_resolved diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_postgres_select_subquery.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_postgres_select_subquery.json new file mode 100644 index 0000000000000..0c40ce120c934 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_postgres_select_subquery.json @@ -0,0 +1,64 @@ +{ + "query_type": "SELECT", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.table1,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.table2,PROD)" + ], + "out_tables": [], + "column_lineage": [ + { + "downstream": { + "table": null, + "column": "a", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "native_column_type": "INT" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.table1,PROD)", + "column": "a" + } + ] + }, + { + "downstream": { + "table": null, + "column": "b", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "native_column_type": "INT" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.table1,PROD)", + "column": "b" + } + ] + }, + { + "downstream": { + "table": null, + "column": "c", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": {} + } + }, + "native_column_type": "INT[]" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.table2,PROD)", + "column": "c" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_from_table.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_from_table.json index e2baa34e7fe28..d51001f969799 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_from_table.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_from_table.json @@ -1,6 +1,7 @@ { "query_type": "UPDATE", "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.my_table,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table1,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table2,PROD)" ], diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_hardcoded.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_hardcoded.json index b41ed61b37cdb..f421b28530c64 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_hardcoded.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_hardcoded.json @@ -1,6 +1,8 @@ { "query_type": "UPDATE", - "in_tables": [], + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)" + ], "out_tables": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)" ], diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_self.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_self.json new file mode 100644 index 0000000000000..c8cc32164a3eb --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_update_self.json @@ -0,0 +1,29 @@ +{ + "query_type": "UPDATE", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)" + ], + "out_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)" + ], + "column_lineage": [ + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)", + "column": "orderkey", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "native_column_type": "DECIMAL" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)", + "column": "orderkey" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index dfc5b486abd35..5559ebe1756a6 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -768,3 +768,101 @@ def test_snowflake_update_from_table(): }, expected_file=RESOURCE_DIR / "test_snowflake_update_from_table.json", ) + + +def test_snowflake_update_self(): + assert_sql_result( + """ +UPDATE snowflake_sample_data.tpch_sf1.orders +SET orderkey = orderkey + 1 +""", + dialect="snowflake", + schemas={ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)": { + "orderkey": "NUMBER(38,0)", + "totalprice": "NUMBER(12,2)", + }, + }, + expected_file=RESOURCE_DIR / "test_snowflake_update_self.json", + ) + + +def test_postgres_select_subquery(): + assert_sql_result( + """ +SELECT + a, + b, + (SELECT c FROM table2 WHERE table2.id = table1.id) as c +FROM table1 +""", + dialect="postgres", + default_db="my_db", + default_schema="my_schema", + schemas={ + "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.table1,PROD)": { + "id": "INTEGER", + "a": "INTEGER", + "b": "INTEGER", + }, + "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.table2,PROD)": { + "id": "INTEGER", + "c": "INTEGER", + }, + }, + expected_file=RESOURCE_DIR / "test_postgres_select_subquery.json", + ) + + +@pytest.mark.skip(reason="We can't parse column-list syntax with sub-selects yet") +def test_postgres_update_subselect(): + assert_sql_result( + """ +UPDATE accounts SET sales_person_name = + (SELECT name FROM employees + WHERE employees.id = accounts.sales_person_id) +""", + dialect="postgres", + default_db="my_db", + default_schema="my_schema", + schemas={ + "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.accounts,PROD)": { + "id": "INTEGER", + "sales_person_id": "INTEGER", + "sales_person_name": "VARCHAR(16777216)", + }, + "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.employees,PROD)": { + "id": "INTEGER", + "name": "VARCHAR(16777216)", + }, + }, + expected_file=RESOURCE_DIR / "test_postgres_update_subselect.json", + ) + + +@pytest.mark.skip(reason="We can't parse column-list syntax with sub-selects yet") +def test_postgres_complex_update(): + # Example query from the postgres docs: + # https://www.postgresql.org/docs/current/sql-update.html + assert_sql_result( + """ +UPDATE accounts SET (contact_first_name, contact_last_name) = + (SELECT first_name, last_name FROM employees + WHERE employees.id = accounts.sales_person); +""", + dialect="postgres", + schemas={ + "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.accounts,PROD)": { + "id": "INTEGER", + "contact_first_name": "VARCHAR(16777216)", + "contact_last_name": "VARCHAR(16777216)", + "sales_person": "INTEGER", + }, + "urn:li:dataset:(urn:li:dataPlatform:postgres,my_db.my_schema.employees,PROD)": { + "id": "INTEGER", + "first_name": "VARCHAR(16777216)", + "last_name": "VARCHAR(16777216)", + }, + }, + expected_file=RESOURCE_DIR / "test_postgres_complex_update.json", + ) From 94d438d44f2d18def4a422cd60150d2c9a78be49 Mon Sep 17 00:00:00 2001 From: sachinsaju <33017477+sachinsaju@users.noreply.github.com> Date: Tue, 31 Oct 2023 08:54:56 +0530 Subject: [PATCH 028/792] docs: correct broken doc links (#9137) Co-authored-by: Hyejin Yoon <0327jane@gmail.com> --- docs/deploy/aws.md | 2 +- docs/what-is-datahub/datahub-concepts.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/deploy/aws.md b/docs/deploy/aws.md index e0f57b4a0b0cb..6598b93c25e9a 100644 --- a/docs/deploy/aws.md +++ b/docs/deploy/aws.md @@ -15,7 +15,7 @@ This guide requires the following tools: - [kubectl](https://kubernetes.io/docs/tasks/tools/) to manage kubernetes resources - [helm](https://helm.sh/docs/intro/install/) to deploy the resources based on helm charts. Note, we only support Helm 3. -- [eksctl](https://eksctl.io/introduction/#installation) to create and manage clusters on EKS +- [eksctl](https://eksctl.io/installation/) to create and manage clusters on EKS - [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) to manage AWS resources To use the above tools, you need to set up AWS credentials by following diff --git a/docs/what-is-datahub/datahub-concepts.md b/docs/what-is-datahub/datahub-concepts.md index 6328d97fa6a50..03b86fab0ede4 100644 --- a/docs/what-is-datahub/datahub-concepts.md +++ b/docs/what-is-datahub/datahub-concepts.md @@ -99,7 +99,7 @@ List of Data Platforms - Tableau - Vertica -Reference : [data_platforms.json](https://github.com/acryldata/datahub-fork/blob/acryl-main/metadata-service/war/src/main/resources/boot/data_platforms.json) +Reference : [data_platforms.json](https://github.com/datahub-project/datahub/blob/master/metadata-service/war/src/main/resources/boot/data_platforms.json) From ea1273281e3a65ab4d94d002ee19f91907a3eb84 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 30 Oct 2023 20:57:59 -0700 Subject: [PATCH 029/792] feat(ingest): sql parser perf + asyncio fixes (#9119) --- metadata-ingestion/setup.py | 2 +- .../src/datahub/cli/docker_cli.py | 5 ++ .../src/datahub/upgrade/upgrade.py | 12 ++--- .../src/datahub/utilities/sqlglot_lineage.py | 5 +- .../goldens/test_select_from_union.json | 2 +- .../test_teradata_strange_operators.json | 46 +++++++++++++++++++ .../unit/sql_parsing/test_sqlglot_lineage.py | 14 ++++++ 7 files changed, 73 insertions(+), 13 deletions(-) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_teradata_strange_operators.json diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index b1c5510efd923..151842bd84d0a 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -108,7 +108,7 @@ sqlglot_lib = { # Using an Acryl fork of sqlglot. # https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:hsheth?expand=1 - "acryl-sqlglot==18.5.2.dev45", + "acryl-sqlglot==18.17.1.dev16", } sql_common = ( diff --git a/metadata-ingestion/src/datahub/cli/docker_cli.py b/metadata-ingestion/src/datahub/cli/docker_cli.py index 4afccfe711e34..77e3285d359ef 100644 --- a/metadata-ingestion/src/datahub/cli/docker_cli.py +++ b/metadata-ingestion/src/datahub/cli/docker_cli.py @@ -5,6 +5,7 @@ import os import pathlib import platform +import signal import subprocess import sys import tempfile @@ -770,6 +771,10 @@ def quickstart( # noqa: C901 logger.debug("docker compose up still running, sending SIGKILL") up_process.kill() up_process.wait() + else: + # If the docker process got a keyboard interrupt, raise one here. + if up_process.returncode in {128 + signal.SIGINT, -signal.SIGINT}: + raise KeyboardInterrupt # Check docker health every few seconds. status = check_docker_quickstart() diff --git a/metadata-ingestion/src/datahub/upgrade/upgrade.py b/metadata-ingestion/src/datahub/upgrade/upgrade.py index 30f19b8b84f35..acc7954ad25a6 100644 --- a/metadata-ingestion/src/datahub/upgrade/upgrade.py +++ b/metadata-ingestion/src/datahub/upgrade/upgrade.py @@ -1,6 +1,5 @@ import asyncio import contextlib -import functools import logging import sys from datetime import datetime, timedelta, timezone @@ -374,17 +373,14 @@ def check_upgrade(func: Callable[..., T]) -> Callable[..., T]: @wraps(func) def async_wrapper(*args: Any, **kwargs: Any) -> Any: async def run_inner_func(): - loop = asyncio.get_event_loop() - return await loop.run_in_executor( - None, functools.partial(func, *args, **kwargs) - ) + return func(*args, **kwargs) async def run_func_check_upgrade(): version_stats_future = asyncio.ensure_future(retrieve_version_stats()) - the_one_future = asyncio.ensure_future(run_inner_func()) - ret = await the_one_future + main_func_future = asyncio.ensure_future(run_inner_func()) + ret = await main_func_future - # the one future has returned + # the main future has returned # we check the other futures quickly try: version_stats = await asyncio.wait_for(version_stats_future, 0.5) diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index 388388f9f4b38..6413275ac63a6 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -106,6 +106,7 @@ def get_query_type_of_sql(expression: sqlglot.exp.Expression) -> QueryType: sqlglot.exp.Update: QueryType.UPDATE, sqlglot.exp.Delete: QueryType.DELETE, sqlglot.exp.Merge: QueryType.MERGE, + sqlglot.exp.Subqueryable: QueryType.SELECT, # unions, etc. are also selects } for cls, query_type in mapping.items(): @@ -820,10 +821,8 @@ def _extract_select_from_update( ) # Update statements always implicitly have the updated table in context. - # TODO: Retain table name alias. + # TODO: Retain table name alias, if one was present. if select_statement.args.get("from"): - # select_statement = sqlglot.parse_one(select_statement.sql(dialect=dialect)) - select_statement = select_statement.join( statement.this, append=True, join_kind="cross" ) diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_select_from_union.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_select_from_union.json index 902aa010c8afc..5d1d421f49a2a 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_select_from_union.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_select_from_union.json @@ -1,5 +1,5 @@ { - "query_type": "UNKNOWN", + "query_type": "SELECT", "in_tables": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf10.orders,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf100.orders,PROD)" diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_teradata_strange_operators.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_teradata_strange_operators.json new file mode 100644 index 0000000000000..4b21a2512ccd1 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_teradata_strange_operators.json @@ -0,0 +1,46 @@ +{ + "query_type": "SELECT", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:teradata,dbc.table1,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:teradata,dbc.table2,PROD)" + ], + "out_tables": [], + "column_lineage": [ + { + "downstream": { + "table": null, + "column": "col1", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:teradata,dbc.table1,PROD)", + "column": "col1" + }, + { + "table": "urn:li:dataset:(urn:li:dataPlatform:teradata,dbc.table2,PROD)", + "column": "col1" + } + ] + }, + { + "downstream": { + "table": null, + "column": "col2", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:teradata,dbc.table1,PROD)", + "column": "col2" + }, + { + "table": "urn:li:dataset:(urn:li:dataPlatform:teradata,dbc.table2,PROD)", + "column": "col2" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index 5559ebe1756a6..3b9fa0d55f18d 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -675,6 +675,20 @@ def test_teradata_default_normalization(): ) +def test_teradata_strange_operators(): + assert_sql_result( + """ +select col1, col2 from dbc.table1 +where col1 eq 'value1' +minus +select col1, col2 from dbc.table2 +""", + dialect="teradata", + default_schema="dbc", + expected_file=RESOURCE_DIR / "test_teradata_strange_operators.json", + ) + + def test_snowflake_update_hardcoded(): assert_sql_result( """ From b565a657d2235b82e65dfbe0bfcc11c97c3d9b79 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 30 Oct 2023 23:35:12 -0700 Subject: [PATCH 030/792] feat(quickstart): fix broker InconsistentClusterIdException issues (#9148) --- docker/docker-compose-with-cassandra.yml | 6 +++++- docker/docker-compose-without-neo4j.yml | 6 +++++- docker/docker-compose.yml | 6 +++++- docker/quickstart/docker-compose-m1.quickstart.yml | 4 +++- .../docker-compose-without-neo4j-m1.quickstart.yml | 4 +++- .../quickstart/docker-compose-without-neo4j.quickstart.yml | 4 +++- docker/quickstart/docker-compose.quickstart.yml | 4 +++- 7 files changed, 27 insertions(+), 7 deletions(-) diff --git a/docker/docker-compose-with-cassandra.yml b/docker/docker-compose-with-cassandra.yml index 9543e67da07f2..39f4341600572 100644 --- a/docker/docker-compose-with-cassandra.yml +++ b/docker/docker-compose-with-cassandra.yml @@ -200,7 +200,10 @@ services: retries: 5 timeout: 5s volumes: - - zkdata:/var/lib/zookeeper + # See https://stackoverflow.com/a/61008432 for why we need two volumes. + # See also: https://docs.confluent.io/platform/current/installation/docker/operations/external-volumes.html#data-volumes-for-kafka-and-zk + - zkdata:/var/lib/zookeeper/data + - zklogs:/var/lib/zookeeper/log networks: default: name: datahub_network @@ -210,3 +213,4 @@ volumes: neo4jdata: broker: zkdata: + zklogs: diff --git a/docker/docker-compose-without-neo4j.yml b/docker/docker-compose-without-neo4j.yml index 022362782f742..235e89e340551 100644 --- a/docker/docker-compose-without-neo4j.yml +++ b/docker/docker-compose-without-neo4j.yml @@ -174,7 +174,10 @@ services: retries: 3 timeout: 5s volumes: - - zkdata:/var/lib/zookeeper + # See https://stackoverflow.com/a/61008432 for why we need two volumes. + # See also: https://docs.confluent.io/platform/current/installation/docker/operations/external-volumes.html#data-volumes-for-kafka-and-zk + - zkdata:/var/lib/zookeeper/data + - zklogs:/var/lib/zookeeper/log networks: default: name: datahub_network @@ -182,3 +185,4 @@ volumes: esdata: broker: zkdata: + zklogs: diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index a486689e050a2..46da8c6fdbd2a 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -195,7 +195,10 @@ services: retries: 3 timeout: 5s volumes: - - zkdata:/var/lib/zookeeper + # See https://stackoverflow.com/a/61008432 for why we need two volumes. + # See also: https://docs.confluent.io/platform/current/installation/docker/operations/external-volumes.html#data-volumes-for-kafka-and-zk + - zkdata:/var/lib/zookeeper/data + - zklogs:/var/lib/zookeeper/log networks: default: name: datahub_network @@ -204,3 +207,4 @@ volumes: neo4jdata: broker: zkdata: + zklogs: diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index c5de687d335b9..3b6d02c83d0f0 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -300,7 +300,8 @@ services: ports: - ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181 volumes: - - zkdata:/var/lib/zookeeper + - zkdata:/var/lib/zookeeper/data + - zklogs:/var/lib/zookeeper/log version: '3.9' volumes: broker: null @@ -308,3 +309,4 @@ volumes: mysqldata: null neo4jdata: null zkdata: null + zklogs: null diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index b6935f24c5ce2..e45bafc3da480 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -274,10 +274,12 @@ services: ports: - ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181 volumes: - - zkdata:/var/lib/zookeeper + - zkdata:/var/lib/zookeeper/data + - zklogs:/var/lib/zookeeper/log version: '3.9' volumes: broker: null esdata: null mysqldata: null zkdata: null + zklogs: null diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 4ff8bbd70da85..020ef5e9a97b9 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -274,10 +274,12 @@ services: ports: - ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181 volumes: - - zkdata:/var/lib/zookeeper + - zkdata:/var/lib/zookeeper/data + - zklogs:/var/lib/zookeeper/log version: '3.9' volumes: broker: null esdata: null mysqldata: null zkdata: null + zklogs: null diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index f2950ebab2c9d..8adc2b9063b84 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -300,7 +300,8 @@ services: ports: - ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181 volumes: - - zkdata:/var/lib/zookeeper + - zkdata:/var/lib/zookeeper/data + - zklogs:/var/lib/zookeeper/log version: '3.9' volumes: broker: null @@ -308,3 +309,4 @@ volumes: mysqldata: null neo4jdata: null zkdata: null + zklogs: null From 2e8954f33a10f3e11af22fe6198fea43d65d580c Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Tue, 31 Oct 2023 22:25:48 +0530 Subject: [PATCH 031/792] fix(policies): remove non-existent policies, fix name (#9150) --- .../war/src/main/resources/boot/policies.json | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/metadata-service/war/src/main/resources/boot/policies.json b/metadata-service/war/src/main/resources/boot/policies.json index 18cb48bfcf1f0..b7ffc11c08f05 100644 --- a/metadata-service/war/src/main/resources/boot/policies.json +++ b/metadata-service/war/src/main/resources/boot/policies.json @@ -56,7 +56,7 @@ "EDIT_ENTITY", "VIEW_ENTITY_PAGE", "EDIT_LINEAGE", - "EDIT_ENTITY_ASSERTIONS_PRIVILEGE", + "EDIT_ENTITY_ASSERTIONS", "SEARCH_PRIVILEGE", "GET_COUNTS_PRIVILEGE", "GET_TIMESERIES_ASPECT_PRIVILEGE", @@ -251,11 +251,6 @@ "EDIT_GROUP_MEMBERS", "EDIT_USER_PROFILE", "EDIT_CONTACT_INFO", - "MANAGE_ENTITY_TAGS_PRIVILEGE", - "MANAGE_ENTITY_GLOSSARY_TERMS_PRIVILEGE", - "MANAGE_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE", - "MANAGE_DATASET_COL_TAGS_PRIVILEGE", - "EDIT_ENTITY_ASSERTIONS_PRIVILEGE", "EDIT_LINEAGE", "EDIT_ENTITY_QUERIES", "SEARCH_PRIVILEGE", @@ -336,11 +331,6 @@ "EDIT_GROUP_MEMBERS", "EDIT_USER_PROFILE", "EDIT_CONTACT_INFO", - "MANAGE_ENTITY_TAGS_PRIVILEGE", - "MANAGE_ENTITY_GLOSSARY_TERMS_PRIVILEGE", - "MANAGE_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE", - "MANAGE_DATASET_COL_TAGS_PRIVILEGE", - "EDIT_ENTITY_ASSERTIONS_PRIVILEGE", "EDIT_LINEAGE", "EDIT_ENTITY_QUERIES", "SEARCH_PRIVILEGE", @@ -441,11 +431,6 @@ "EDIT_GROUP_MEMBERS", "EDIT_USER_PROFILE", "EDIT_CONTACT_INFO", - "MANAGE_ENTITY_TAGS_PRIVILEGE", - "MANAGE_ENTITY_GLOSSARY_TERMS_PRIVILEGE", - "MANAGE_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE", - "MANAGE_DATASET_COL_TAGS_PRIVILEGE", - "EDIT_ENTITY_ASSERTIONS_PRIVILEGE", "EDIT_LINEAGE", "EDIT_ENTITY_QUERIES", "GET_TIMELINE_PRIVILEGE", From b8dcc86281d06fcde35773fd4ef4933f5b553fd7 Mon Sep 17 00:00:00 2001 From: Kos Korchak <97058061+kkorchak@users.noreply.github.com> Date: Tue, 31 Oct 2023 15:48:34 -0400 Subject: [PATCH 032/792] refactor(smoke): Fix for a test that passed on Oss and failed on Saas (#9147) --- .../cypress/cypress/e2e/lineage/download_lineage_results.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/lineage/download_lineage_results.js b/smoke-test/tests/cypress/cypress/e2e/lineage/download_lineage_results.js index dc6efc9f7df66..ed4167b87c506 100644 --- a/smoke-test/tests/cypress/cypress/e2e/lineage/download_lineage_results.js +++ b/smoke-test/tests/cypress/cypress/e2e/lineage/download_lineage_results.js @@ -37,7 +37,7 @@ describe("download lineage results to .csv file", () => { cy.openEntityTab("Lineage"); // Verify 1st degree of dependencies - cy.contains(/1 - 3 of 3/); + cy.contains(/1 - [3-4] of [3-4]/); downloadCsvFile("first_degree_results.csv"); let first_degree_csv = cy.readFile('cypress/downloads/first_degree_results.csv'); first_degree.forEach(function (urn) { @@ -52,7 +52,7 @@ describe("download lineage results to .csv file", () => { // Verify 1st and 2nd degree of dependencies cy.get('[data-testid="facet-degree-2"]').click().wait(5000); - cy.contains(/1 - 7 of 7/); + cy.contains(/1 - [7-8] of [7-8]/); downloadCsvFile("second_degree_results.csv"); let second_degree_csv = cy.readFile('cypress/downloads/second_degree_results.csv'); first_degree.forEach(function (urn) { @@ -67,7 +67,7 @@ describe("download lineage results to .csv file", () => { // Verify 1st 2nd and 3+ degree of dependencies(Verify multi page download) cy.get('[data-testid="facet-degree-3+"]').click().wait(5000); - cy.contains(/1 - 10 of 13/); + cy.contains(/1 - 10 of 1[3-4]/); downloadCsvFile("third_plus_degree_results.csv"); let third_degree_csv = cy.readFile('cypress/downloads/third_plus_degree_results.csv'); first_degree.forEach(function (urn) { From dae320c9bc28b80c6110395092d4223e9a37258b Mon Sep 17 00:00:00 2001 From: sachinsaju <33017477+sachinsaju@users.noreply.github.com> Date: Wed, 1 Nov 2023 04:28:39 +0530 Subject: [PATCH 033/792] docs(teradata): teradata doc external link 404 fix (#9152) --- metadata-ingestion/docs/sources/teradata/teradata_pre.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/docs/sources/teradata/teradata_pre.md b/metadata-ingestion/docs/sources/teradata/teradata_pre.md index 7263a59f5ea3d..7b4da1255d575 100644 --- a/metadata-ingestion/docs/sources/teradata/teradata_pre.md +++ b/metadata-ingestion/docs/sources/teradata/teradata_pre.md @@ -25,4 +25,4 @@ will fit for your queries (the default query text size Teradata captures is max REPLACE QUERY LOGGING LIMIT SQLTEXT=2000 ON ALL; ``` See more here about query logging: - [https://docs.teradata.com/r/Teradata-VantageCloud-Lake/Database-Reference/Database-Administration/Tracking-Query-Behavior-with-Database-Query-Logging-Operational-DBAs]() + [https://docs.teradata.com/r/Teradata-VantageCloud-Lake/Database-Reference/Database-Administration/Tracking-Query-Behavior-with-Database-Query-Logging-Operational-DBAs](https://docs.teradata.com/r/Teradata-VantageCloud-Lake/Database-Reference/Database-Administration/Tracking-Query-Behavior-with-Database-Query-Logging-Operational-DBAs) From 7a31950f794b2b0527ad685cbd08e967b524bfec Mon Sep 17 00:00:00 2001 From: "Jia (Jason) Teoh" Date: Tue, 31 Oct 2023 18:41:16 -0700 Subject: [PATCH 034/792] fix(datahub-client): Include relocation for snakeyaml dependency. (#8911) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- metadata-integration/java/datahub-client/build.gradle | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index e6210f1f073f6..0bf6b18fa5073 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -97,6 +97,7 @@ shadowJar { // we can move to automatic relocation using ConfigureShadowRelocation after we get to a good place on these first relocate 'org.springframework', 'datahub.shaded.org.springframework' relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson' + relocate 'org.yaml', 'io.acryl.shaded.org.yaml' // Required for shading snakeyaml relocate 'net.jcip.annotations', 'datahub.shaded.annotations' relocate 'javassist', 'datahub.shaded.javassist' relocate 'edu.umd.cs.findbugs', 'datahub.shaded.findbugs' @@ -242,4 +243,4 @@ checkstyleMain.exclude '**/generated/**' clean { project.delete("$projectDir/generated") -} \ No newline at end of file +} From 73514ad9c5643cc5fbbb1edb0991d4aea0812459 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 31 Oct 2023 21:28:38 -0700 Subject: [PATCH 035/792] fix(ingest): cleanup large images in CI (#9153) --- .../tests/integration/sql_server/test_sql_server.py | 5 ++++- metadata-ingestion/tests/integration/vertica/test_vertica.py | 5 ++++- metadata-ingestion/tests/test_helpers/docker_helpers.py | 5 +++++ 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/tests/integration/sql_server/test_sql_server.py b/metadata-ingestion/tests/integration/sql_server/test_sql_server.py index 099690fed34c2..f439a322c2677 100644 --- a/metadata-ingestion/tests/integration/sql_server/test_sql_server.py +++ b/metadata-ingestion/tests/integration/sql_server/test_sql_server.py @@ -6,7 +6,7 @@ from tests.test_helpers import mce_helpers from tests.test_helpers.click_helpers import run_datahub_cmd -from tests.test_helpers.docker_helpers import wait_for_port +from tests.test_helpers.docker_helpers import cleanup_image, wait_for_port @pytest.fixture(scope="module") @@ -29,6 +29,9 @@ def mssql_runner(docker_compose_runner, pytestconfig): assert ret.returncode == 0 yield docker_services + # The image is pretty large, so we remove it after the test. + cleanup_image("mcr.microsoft.com/mssql/server") + SOURCE_FILES_PATH = "./tests/integration/sql_server/source_files" config_file = os.listdir(SOURCE_FILES_PATH) diff --git a/metadata-ingestion/tests/integration/vertica/test_vertica.py b/metadata-ingestion/tests/integration/vertica/test_vertica.py index 94ad33ba21ce4..d7b4c390f75d9 100644 --- a/metadata-ingestion/tests/integration/vertica/test_vertica.py +++ b/metadata-ingestion/tests/integration/vertica/test_vertica.py @@ -6,7 +6,7 @@ from tests.test_helpers import mce_helpers from tests.test_helpers.click_helpers import run_datahub_cmd -from tests.test_helpers.docker_helpers import wait_for_port +from tests.test_helpers.docker_helpers import cleanup_image, wait_for_port FROZEN_TIME = "2020-04-14 07:00:00" @@ -49,6 +49,9 @@ def vertica_runner(docker_compose_runner, test_resources_dir): yield docker_services + # The image is pretty large, so we remove it after the test. + cleanup_image("vertica/vertica-ce") + @freeze_time(FROZEN_TIME) @pytest.mark.integration diff --git a/metadata-ingestion/tests/test_helpers/docker_helpers.py b/metadata-ingestion/tests/test_helpers/docker_helpers.py index 30157c3a78094..2eb61068196a2 100644 --- a/metadata-ingestion/tests/test_helpers/docker_helpers.py +++ b/metadata-ingestion/tests/test_helpers/docker_helpers.py @@ -1,5 +1,6 @@ import contextlib import logging +import os import subprocess from typing import Callable, Optional, Union @@ -78,6 +79,10 @@ def run( def cleanup_image(image_name: str) -> None: assert ":" not in image_name, "image_name should not contain a tag" + if not os.environ.get("CI"): + logger.debug("Not cleaning up images to speed up local development") + return + images_proc = subprocess.run( f"docker image ls --filter 'reference={image_name}*' -q", shell=True, From d2314976033e42c13b7897f46ea0f227afb7c90b Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 31 Oct 2023 21:37:11 -0700 Subject: [PATCH 036/792] build: increase gradle retries (#9091) --- gradle.properties | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/gradle.properties b/gradle.properties index 2b211e725359a..1cd349344b432 100644 --- a/gradle.properties +++ b/gradle.properties @@ -6,9 +6,16 @@ org.gradle.caching=false # Increase gradle JVM memory to 3GB to allow tests to run locally org.gradle.jvmargs=-Xmx3000m # Increase retries to 5 (from default of 3) and increase interval from 125ms to 1s. +# Based on this thread https://github.com/gradle/gradle/issues/4629, it's unclear +# if we should be using systemProp or not. We're using both for now. org.gradle.internal.repository.max.retries=5 org.gradle.internal.repository.max.tentatives=5 org.gradle.internal.repository.initial.backoff=1000 +systemProp.org.gradle.internal.http.connectionTimeout=120000 +systemProp.org.gradle.internal.http.socketTimeout=120000 +systemProp.org.gradle.internal.repository.max.retries=5 +systemProp.org.gradle.internal.repository.max.tentatives=5 +systemProp.org.gradle.internal.repository.initial.backoff=1000 # Needed to publish to Nexus from a sub-module gnsp.disableApplyOnlyOnRootProjectEnforcement=true From 55f14530a397f75a9201db11c13bd7bbbb25162c Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 1 Nov 2023 00:12:52 -0700 Subject: [PATCH 037/792] feat(ingest): bump sqlglot parser (#9155) --- metadata-ingestion/setup.py | 2 +- .../unit/sql_parsing/test_sqlglot_lineage.py | 19 ++++++++++++++++++- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 151842bd84d0a..afce8dcee840b 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -108,7 +108,7 @@ sqlglot_lib = { # Using an Acryl fork of sqlglot. # https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:hsheth?expand=1 - "acryl-sqlglot==18.17.1.dev16", + "acryl-sqlglot==19.0.2.dev10", } sql_common = ( diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index 3b9fa0d55f18d..c420f2b8438ce 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -676,9 +676,13 @@ def test_teradata_default_normalization(): def test_teradata_strange_operators(): + # This is a test for the following operators: + # - `SEL` (select) + # - `EQ` (equals) + # - `MINUS` (except) assert_sql_result( """ -select col1, col2 from dbc.table1 +sel col1, col2 from dbc.table1 where col1 eq 'value1' minus select col1, col2 from dbc.table2 @@ -689,6 +693,19 @@ def test_teradata_strange_operators(): ) +@pytest.mark.skip("sqlglot doesn't support this cast syntax yet") +def test_teradata_cast_syntax(): + assert_sql_result( + """ +SELECT my_table.date_col MONTH(4) AS month_col +FROM my_table +""", + dialect="teradata", + default_schema="dbc", + expected_file=RESOURCE_DIR / "test_teradata_cast_syntax.json", + ) + + def test_snowflake_update_hardcoded(): assert_sql_result( """ From 876de214c9a11f8928d8eafe5c7f658d5b9dc61f Mon Sep 17 00:00:00 2001 From: Tony Ouyang Date: Wed, 1 Nov 2023 00:13:17 -0700 Subject: [PATCH 038/792] feat(ingest/mongodb): support stateful ingestion (#9118) --- .../src/datahub/ingestion/source/mongodb.py | 74 +- .../mongodb/mongodb_mces_golden.json | 8320 +++++++++-------- 2 files changed, 4270 insertions(+), 4124 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py index 890c5c64bd5e6..ce2b9ce2981e0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py @@ -15,7 +15,12 @@ EnvConfigMixin, PlatformInstanceConfigMixin, ) -from datahub.emitter.mce_builder import make_dataset_urn_with_platform_instance +from datahub.emitter.mce_builder import ( + make_data_platform_urn, + make_dataplatform_instance_urn, + make_dataset_urn_with_platform_instance, +) +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SourceCapability, @@ -25,14 +30,21 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import Source, SourceReport +from datahub.ingestion.api.source import MetadataWorkUnitProcessor from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.schema_inference.object import ( SchemaDescription, construct_schema, ) -from datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot -from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StaleEntityRemovalHandler, + StaleEntityRemovalSourceReport, + StatefulIngestionConfigBase, + StatefulStaleMetadataRemovalConfig, +) +from datahub.ingestion.source.state.stateful_ingestion_base import ( + StatefulIngestionSourceBase, +) from datahub.metadata.com.linkedin.pegasus2avro.schema import ( ArrayTypeClass, BooleanTypeClass, @@ -48,7 +60,10 @@ TimeTypeClass, UnionTypeClass, ) -from datahub.metadata.schema_classes import DatasetPropertiesClass +from datahub.metadata.schema_classes import ( + DataPlatformInstanceClass, + DatasetPropertiesClass, +) logger = logging.getLogger(__name__) @@ -59,7 +74,9 @@ DENY_DATABASE_LIST = set(["admin", "config", "local"]) -class MongoDBConfig(PlatformInstanceConfigMixin, EnvConfigMixin): +class MongoDBConfig( + PlatformInstanceConfigMixin, EnvConfigMixin, StatefulIngestionConfigBase +): # See the MongoDB authentication docs for details and examples. # https://pymongo.readthedocs.io/en/stable/examples/authentication.html connect_uri: str = Field( @@ -99,6 +116,8 @@ class MongoDBConfig(PlatformInstanceConfigMixin, EnvConfigMixin): default=AllowDenyPattern.allow_all(), description="regex patterns for collections to filter in ingestion.", ) + # Custom Stateful Ingestion settings + stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = None @validator("maxDocumentSize") def check_max_doc_size_filter_is_valid(cls, doc_size_filter_value): @@ -108,7 +127,7 @@ def check_max_doc_size_filter_is_valid(cls, doc_size_filter_value): @dataclass -class MongoDBSourceReport(SourceReport): +class MongoDBSourceReport(StaleEntityRemovalSourceReport): filtered: List[str] = field(default_factory=list) def report_dropped(self, name: str) -> None: @@ -129,6 +148,7 @@ def report_dropped(self, name: str) -> None: bson.timestamp.Timestamp: "timestamp", bson.dbref.DBRef: "dbref", bson.objectid.ObjectId: "oid", + bson.Decimal128: "numberDecimal", "mixed": "mixed", } @@ -145,6 +165,7 @@ def report_dropped(self, name: str) -> None: bson.timestamp.Timestamp: TimeTypeClass, bson.dbref.DBRef: BytesTypeClass, bson.objectid.ObjectId: BytesTypeClass, + bson.Decimal128: NumberTypeClass, dict: RecordTypeClass, "mixed": UnionTypeClass, } @@ -206,7 +227,7 @@ def construct_schema_pymongo( @capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default") @capability(SourceCapability.SCHEMA_METADATA, "Enabled by default") @dataclass -class MongoDBSource(Source): +class MongoDBSource(StatefulIngestionSourceBase): """ This plugin extracts the following: @@ -227,7 +248,7 @@ class MongoDBSource(Source): mongo_client: MongoClient def __init__(self, ctx: PipelineContext, config: MongoDBConfig): - super().__init__(ctx) + super().__init__(config, ctx) self.config = config self.report = MongoDBSourceReport() @@ -254,6 +275,14 @@ def create(cls, config_dict: dict, ctx: PipelineContext) -> "MongoDBSource": config = MongoDBConfig.parse_obj(config_dict) return cls(ctx, config) + def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: + return [ + *super().get_workunit_processors(), + StaleEntityRemovalHandler.create( + self, self.config, self.ctx + ).workunit_processor, + ] + def get_pymongo_type_string( self, field_type: Union[Type, str], collection_name: str ) -> str: @@ -332,16 +361,18 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: platform_instance=self.config.platform_instance, ) - dataset_snapshot = DatasetSnapshot( - urn=dataset_urn, - aspects=[], - ) + if self.config.platform_instance: + data_platform_instance = DataPlatformInstanceClass( + platform=make_data_platform_urn(platform), + instance=make_dataplatform_instance_urn( + platform, self.config.platform_instance + ), + ) dataset_properties = DatasetPropertiesClass( tags=[], customProperties={}, ) - dataset_snapshot.aspects.append(dataset_properties) if self.config.enableSchemaInference: assert self.config.maxDocumentSize is not None @@ -412,13 +443,20 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: fields=canonical_schema, ) - dataset_snapshot.aspects.append(schema_metadata) - # TODO: use list_indexes() or index_information() to get index information # See https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.list_indexes. - mce = MetadataChangeEvent(proposedSnapshot=dataset_snapshot) - yield MetadataWorkUnit(id=dataset_name, mce=mce) + yield from [ + mcp.as_workunit() + for mcp in MetadataChangeProposalWrapper.construct_many( + entityUrn=dataset_urn, + aspects=[ + schema_metadata, + dataset_properties, + data_platform_instance, + ], + ) + ] def is_server_version_gte_4_4(self) -> bool: try: diff --git a/metadata-ingestion/tests/integration/mongodb/mongodb_mces_golden.json b/metadata-ingestion/tests/integration/mongodb/mongodb_mces_golden.json index e16101b137ac9..ec3fd80e6a6ea 100644 --- a/metadata-ingestion/tests/integration/mongodb/mongodb_mces_golden.json +++ b/metadata-ingestion/tests/integration/mongodb/mongodb_mces_golden.json @@ -1,4136 +1,4240 @@ [ { - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.emptyCollection,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": {}, - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "emptyCollection", - "platform": "urn:li:dataPlatform:mongodb", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.Schemaless": {} - }, - "fields": [] - } + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.emptyCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "schemaMetadata", + "aspect": { + "json": { + "schemaName": "emptyCollection", + "platform": "urn:li:dataPlatform:mongodb", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.Schemaless": {} + }, + "fields": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.emptyCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": {}, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.emptyCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:mongodb", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:mongodb,instance)" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.firstCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "schemaMetadata", + "aspect": { + "json": { + "schemaName": "firstCollection", + "platform": "urn:li:dataPlatform:mongodb", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.Schemaless": {} + }, + "fields": [ + { + "fieldPath": "_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.BytesType": {} + } + }, + "nativeDataType": "oid", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "age", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "float", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "canSwim", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "emptyObject", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "OBJECT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteColor", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "OBJECT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood.calories", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood.emptyObject", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "OBJECT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood.ingredients", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": {} + } + }, + "nativeDataType": "ARRAY", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood.ingredients.color", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood.ingredients.from", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood.ingredients.name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood.name", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "favoriteFood.servings", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "float", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "legs", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "mixedType", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.UnionType": {} + } + }, + "nativeDataType": "mixed", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "name", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "seen", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "float", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "servings", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "sometimesNull", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "tags", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": {} + } + }, + "nativeDataType": "ARRAY", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "type", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.firstCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": {}, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.firstCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:mongodb", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:mongodb,instance)" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.largeCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "schemaMetadata", + "aspect": { + "json": { + "schemaName": "largeCollection", + "platform": "urn:li:dataPlatform:mongodb", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.Schemaless": {} + }, + "fields": [ + { + "fieldPath": "_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.BytesType": {} + } + }, + "nativeDataType": "oid", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_200", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_201", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_202", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_203", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_204", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_205", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_206", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_207", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_208", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_209", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_210", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_211", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_212", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_213", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_214", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_215", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_216", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_217", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_218", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_219", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_220", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_221", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_222", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_223", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_224", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_225", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_226", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_227", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_228", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_229", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_230", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_231", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_232", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_233", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_234", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_235", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_236", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_237", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_238", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_239", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_240", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_241", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_242", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_243", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_244", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_245", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_246", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_247", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_248", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_249", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_250", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_251", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_252", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_253", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_254", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_255", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_256", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_257", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_258", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_259", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_260", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_261", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_262", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_263", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_264", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_265", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_266", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_267", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_268", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_269", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_270", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_271", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_272", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_273", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_274", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_275", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_276", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_277", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_278", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_279", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_280", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_281", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_282", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_283", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_284", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_285", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_286", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_287", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_288", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_289", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_290", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_291", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_292", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_293", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_294", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_295", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_296", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_297", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_298", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_299", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_300", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_301", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_302", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_303", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_304", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_305", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_306", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_307", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_308", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_309", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_310", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_311", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_312", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_313", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_314", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_315", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_316", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_317", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_318", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_319", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_320", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_321", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_322", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_323", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_324", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_325", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_326", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_327", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_328", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_329", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_330", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_331", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_332", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_333", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_334", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_335", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_336", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_337", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_338", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_339", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_340", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_341", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_342", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_343", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_344", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_345", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_346", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_347", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_348", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_349", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_350", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_351", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_352", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_353", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_354", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_355", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_356", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_357", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_358", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_359", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_360", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_361", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_362", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_363", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_364", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_365", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_366", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_367", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_368", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_369", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_370", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_371", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_372", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_374", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_375", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_376", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_377", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_378", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_379", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_380", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_381", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_382", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_383", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_384", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_385", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_386", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_387", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_388", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_389", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_390", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_391", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_392", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_393", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_394", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_395", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_396", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_397", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_398", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_399", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_400", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_401", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_402", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_403", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_404", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_405", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_406", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_407", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_408", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_409", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_410", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_411", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_412", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_413", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_414", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_415", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_416", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_417", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_418", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_419", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_420", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_421", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_422", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_423", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_424", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_425", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_426", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_427", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_428", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_429", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_430", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_431", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_432", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_433", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_434", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_435", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_436", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_437", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_438", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_439", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_440", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_441", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_442", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_443", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_444", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_445", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_446", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_447", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_448", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_449", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_450", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_451", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_452", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_453", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_454", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_455", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_456", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_457", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_458", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_459", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_460", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_461", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_462", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_463", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_464", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_465", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_466", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_467", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_468", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_469", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_470", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_471", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_472", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_473", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_474", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_475", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_476", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_477", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_478", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_479", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_480", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_481", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_482", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_483", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_484", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_485", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_486", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_487", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_488", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_489", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_490", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_491", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_492", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_493", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_494", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_495", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_496", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_497", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_498", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "field_499", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false } ] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mongodb-test" + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" } }, { - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.firstCollection,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": {}, - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "firstCollection", - "platform": "urn:li:dataPlatform:mongodb", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.Schemaless": {} - }, - "fields": [ - { - "fieldPath": "_id", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.BytesType": {} - } - }, - "nativeDataType": "oid", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "age", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "float", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "canSwim", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.BooleanType": {} - } - }, - "nativeDataType": "boolean", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "emptyObject", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.RecordType": {} - } - }, - "nativeDataType": "OBJECT", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteColor", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.RecordType": {} - } - }, - "nativeDataType": "OBJECT", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood.calories", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "integer", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood.emptyObject", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.RecordType": {} - } - }, - "nativeDataType": "OBJECT", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood.ingredients", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": {} - } - }, - "nativeDataType": "ARRAY", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood.ingredients.color", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood.ingredients.from", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood.ingredients.name", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood.name", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "favoriteFood.servings", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "float", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "legs", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "integer", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "mixedType", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.UnionType": {} - } - }, - "nativeDataType": "mixed", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "name", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "seen", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "float", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "servings", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "integer", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "sometimesNull", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "tags", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": {} - } - }, - "nativeDataType": "ARRAY", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "type", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - } - ] - } - } - ] + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.largeCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": { + "schema.downsampled": "True", + "schema.totalFields": "501" + }, + "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mongodb-test" + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" } }, { - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.largeCollection,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": { - "schema.downsampled": "True", - "schema.totalFields": "501" - }, - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "largeCollection", - "platform": "urn:li:dataPlatform:mongodb", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.Schemaless": {} - }, - "fields": [ - { - "fieldPath": "_id", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.BytesType": {} - } - }, - "nativeDataType": "oid", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_200", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_201", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_202", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_203", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_204", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_205", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_206", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_207", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_208", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_209", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_210", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_211", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_212", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_213", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_214", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_215", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_216", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_217", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_218", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_219", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_220", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_221", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_222", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_223", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_224", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_225", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_226", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_227", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_228", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_229", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_230", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_231", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_232", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_233", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_234", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_235", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_236", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_237", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_238", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_239", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_240", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_241", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_242", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_243", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_244", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_245", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_246", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_247", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_248", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_249", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_250", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_251", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_252", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_253", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_254", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_255", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_256", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_257", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_258", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_259", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_260", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_261", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_262", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_263", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_264", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_265", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_266", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_267", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_268", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_269", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_270", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_271", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_272", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_273", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_274", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_275", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_276", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_277", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_278", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_279", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_280", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_281", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_282", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_283", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_284", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_285", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_286", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_287", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_288", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_289", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_290", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_291", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_292", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_293", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_294", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_295", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_296", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_297", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_298", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_299", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_300", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_301", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_302", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_303", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_304", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_305", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_306", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_307", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_308", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_309", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_310", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_311", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_312", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_313", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_314", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_315", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_316", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_317", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_318", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_319", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_320", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_321", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_322", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_323", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_324", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_325", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_326", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_327", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_328", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_329", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_330", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_331", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_332", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_333", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_334", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_335", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_336", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_337", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_338", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_339", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_340", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_341", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_342", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_343", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_344", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_345", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_346", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_347", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_348", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_349", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_350", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_351", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_352", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_353", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_354", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_355", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_356", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_357", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_358", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_359", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_360", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_361", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_362", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_363", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_364", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_365", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_366", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_367", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_368", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_369", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_370", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_371", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_372", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_374", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_375", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_376", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_377", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_378", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_379", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_380", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_381", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_382", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_383", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_384", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_385", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_386", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_387", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_388", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_389", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_390", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_391", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_392", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_393", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_394", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_395", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_396", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_397", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_398", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_399", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_400", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_401", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_402", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_403", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_404", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_405", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_406", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_407", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_408", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_409", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_410", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_411", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_412", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_413", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_414", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_415", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_416", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_417", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_418", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_419", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_420", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_421", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_422", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_423", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_424", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_425", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_426", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_427", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_428", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_429", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_430", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_431", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_432", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_433", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_434", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_435", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_436", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_437", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_438", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_439", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_440", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_441", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_442", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_443", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_444", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_445", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_446", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_447", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_448", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_449", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_450", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_451", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_452", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_453", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_454", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_455", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_456", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_457", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_458", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_459", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_460", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_461", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_462", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_463", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_464", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_465", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_466", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_467", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_468", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_469", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_470", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_471", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_472", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_473", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_474", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_475", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_476", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_477", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_478", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_479", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_480", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_481", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_482", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_483", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_484", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_485", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_486", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_487", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_488", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_489", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_490", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_491", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_492", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_493", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_494", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_495", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_496", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_497", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_498", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "field_499", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - } - ] - } - } - ] + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.largeCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:mongodb", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:mongodb,instance)" } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mongodb-test" + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" } }, { - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.secondCollection,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": {}, - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "secondCollection", - "platform": "urn:li:dataPlatform:mongodb", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.Schemaless": {} - }, - "fields": [ - { - "fieldPath": "_id", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.BytesType": {} - } - }, - "nativeDataType": "oid", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "mixedType", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.UnionType": {} - } - }, - "nativeDataType": "mixed", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "mixedType.fieldA", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "mixedType.fieldTwo", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "integer", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "name", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "nullableMixedType", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.UnionType": {} - } - }, - "nativeDataType": "mixed", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "rating", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "float", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "tasty", - "nullable": false, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.BooleanType": {} - } - }, - "nativeDataType": "boolean", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "varieties", - "nullable": true, - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": {} - } - }, - "nativeDataType": "ARRAY", - "recursive": false, - "isPartOfKey": false - } - ] - } + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.secondCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "schemaMetadata", + "aspect": { + "json": { + "schemaName": "secondCollection", + "platform": "urn:li:dataPlatform:mongodb", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.Schemaless": {} + }, + "fields": [ + { + "fieldPath": "_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.BytesType": {} + } + }, + "nativeDataType": "oid", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "mixedType", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.UnionType": {} + } + }, + "nativeDataType": "mixed", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "mixedType.fieldA", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "mixedType.fieldTwo", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "name", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "nullableMixedType", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.UnionType": {} + } + }, + "nativeDataType": "mixed", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "rating", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "float", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "tasty", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "varieties", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.ArrayType": {} + } + }, + "nativeDataType": "ARRAY", + "recursive": false, + "isPartOfKey": false } ] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mongodb-test" + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.secondCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": {}, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mongodb,instance.mngdb.secondCollection,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:mongodb", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:mongodb,instance)" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4145,7 +4249,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mongodb-test" + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4160,7 +4265,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mongodb-test" + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4175,7 +4281,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mongodb-test" + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4190,7 +4297,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "mongodb-test" + "runId": "mongodb-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file From f2eb0cf3073967d505004e9f2df58d5475cadec1 Mon Sep 17 00:00:00 2001 From: Kos Korchak <97058061+kkorchak@users.noreply.github.com> Date: Wed, 1 Nov 2023 15:41:02 -0400 Subject: [PATCH 039/792] smoke-test: API test for managing secrets privilege (#9121) --- .../tests/privileges/test_privileges.py | 241 ++++++++++++++++++ smoke-test/tests/privileges/utils.py | 218 ++++++++++++++++ 2 files changed, 459 insertions(+) create mode 100644 smoke-test/tests/privileges/test_privileges.py create mode 100644 smoke-test/tests/privileges/utils.py diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py new file mode 100644 index 0000000000000..13d6b6cf3415a --- /dev/null +++ b/smoke-test/tests/privileges/test_privileges.py @@ -0,0 +1,241 @@ +import pytest +import tenacity + +from tests.utils import (get_frontend_session, wait_for_writes_to_sync, wait_for_healthcheck_util, + get_frontend_url, get_admin_credentials,get_sleep_info) +from tests.privileges.utils import * + +sleep_sec, sleep_times = get_sleep_info() + +@pytest.fixture(scope="session") +def wait_for_healthchecks(): + wait_for_healthcheck_util() + yield + + +@pytest.mark.dependency() +def test_healthchecks(wait_for_healthchecks): + # Call to wait_for_healthchecks fixture will do the actual functionality. + pass + + +@pytest.fixture(scope="session") +def admin_session(wait_for_healthchecks): + yield get_frontend_session() + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +@pytest.fixture(scope="module", autouse=True) +def privileges_and_test_user_setup(admin_session): + """Fixture to execute setup before and tear down after all tests are run""" + # Disable 'All users' privileges + set_base_platform_privileges_policy_status("INACTIVE", admin_session) + set_view_dataset_sensitive_info_policy_status("INACTIVE", admin_session) + set_view_entity_profile_privileges_policy_status("INACTIVE", admin_session) + # Sleep for eventual consistency + wait_for_writes_to_sync() + + # Create a new user + admin_session = create_user(admin_session, "user", "user") + + yield + + # Remove test user + remove_user(admin_session, "urn:li:corpuser:user") + + # Restore All users privileges + set_base_platform_privileges_policy_status("ACTIVE", admin_session) + set_view_dataset_sensitive_info_policy_status("ACTIVE", admin_session) + set_view_entity_profile_privileges_policy_status("ACTIVE", admin_session) + + # Sleep for eventual consistency + wait_for_writes_to_sync() + + +@tenacity.retry( + stop=tenacity.stop_after_attempt(10), wait=tenacity.wait_fixed(sleep_sec) +) +def _ensure_can_create_secret(session, json, urn): + create_secret_success = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=json) + create_secret_success.raise_for_status() + secret_data = create_secret_success.json() + + assert secret_data + assert secret_data["data"] + assert secret_data["data"]["createSecret"] + assert secret_data["data"]["createSecret"] == urn + + +@tenacity.retry( + stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) +) +def _ensure_cant_create_secret(session, json): + create_secret_response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=json) + create_secret_response.raise_for_status() + create_secret_data = create_secret_response.json() + + assert create_secret_data["errors"][0]["extensions"]["code"] == 403 + assert create_secret_data["errors"][0]["extensions"]["type"] == "UNAUTHORIZED" + assert create_secret_data["data"]["createSecret"] == None + + +@tenacity.retry( + stop=tenacity.stop_after_attempt(10), wait=tenacity.wait_fixed(sleep_sec) +) +def _ensure_can_create_ingestion_source(session, json): + create_ingestion_success = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=json) + create_ingestion_success.raise_for_status() + ingestion_data = create_ingestion_success.json() + + assert ingestion_data + assert ingestion_data["data"] + assert ingestion_data["data"]["createIngestionSource"] + assert ingestion_data["data"]["createIngestionSource"] is not None + + return ingestion_data["data"]["createIngestionSource"] + + +@tenacity.retry( + stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) +) +def _ensure_cant_create_ingestion_source(session, json): + create_source_response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=json) + create_source_response.raise_for_status() + create_source_data = create_source_response.json() + + assert create_source_data["errors"][0]["extensions"]["code"] == 403 + assert create_source_data["errors"][0]["extensions"]["type"] == "UNAUTHORIZED" + assert create_source_data["data"]["createIngestionSource"] == None + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_privilege_to_create_and_manage_secrets(): + + (admin_user, admin_pass) = get_admin_credentials() + admin_session = login_as(admin_user, admin_pass) + user_session = login_as("user", "user") + secret_urn = "urn:li:dataHubSecret:TestSecretName" + + # Verify new user can't create secrets + create_secret = { + "query": """mutation createSecret($input: CreateSecretInput!) {\n + createSecret(input: $input)\n}""", + "variables": { + "input":{ + "name":"TestSecretName", + "value":"Test Secret Value", + "description":"Test Secret Description" + } + }, + } + _ensure_cant_create_secret(user_session, create_secret) + + + # Assign privileges to the new user to manage secrets + policy_urn = create_user_policy("urn:li:corpuser:user", ["MANAGE_SECRETS"], admin_session) + + # Verify new user can create and manage secrets + # Create a secret + _ensure_can_create_secret(user_session, create_secret, secret_urn) + + + # Remove a secret + remove_secret = { + "query": """mutation deleteSecret($urn: String!) {\n + deleteSecret(urn: $urn)\n}""", + "variables": { + "urn": secret_urn + }, + } + + remove_secret_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_secret) + remove_secret_response.raise_for_status() + secret_data = remove_secret_response.json() + + assert secret_data + assert secret_data["data"] + assert secret_data["data"]["deleteSecret"] + assert secret_data["data"]["deleteSecret"] == secret_urn + + + # Remove the policy + remove_policy(policy_urn, admin_session) + + # Ensure user can't create secret after policy is removed + _ensure_cant_create_secret(user_session, create_secret) + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_privilege_to_create_and_manage_ingestion_source(): + + (admin_user, admin_pass) = get_admin_credentials() + admin_session = login_as(admin_user, admin_pass) + user_session = login_as("user", "user") + + # Verify new user can't create ingestion source + create_ingestion_source = { + "query": """mutation createIngestionSource($input: UpdateIngestionSourceInput!) {\n + createIngestionSource(input: $input)\n}""", + "variables": {"input":{"type":"snowflake","name":"test","config": + {"recipe": + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"account_id\":null,\"include_table_lineage\":true,\"include_view_lineage\":true,\"include_tables\":true,\"include_views\":true,\"profiling\":{\"enabled\":true,\"profile_table_level_only\":true},\"stateful_ingestion\":{\"enabled\":true}}}}", + "executorId":"default","debugMode":False,"extraArgs":[]}}}, + } + + _ensure_cant_create_ingestion_source(user_session, create_ingestion_source) + + + # Assign privileges to the new user to manage ingestion source + policy_urn = create_user_policy("urn:li:corpuser:user", ["MANAGE_INGESTION"], admin_session) + + # Verify new user can create and manage ingestion source(edit, delete) + ingestion_source_urn = _ensure_can_create_ingestion_source(user_session, create_ingestion_source) + + # Edit ingestion source + update_ingestion_source = { + "query": """mutation updateIngestionSource($urn: String!, $input: UpdateIngestionSourceInput!) {\n + updateIngestionSource(urn: $urn, input: $input)\n}""", + "variables": {"urn":ingestion_source_urn, + "input":{"type":"snowflake","name":"test updated", + "config":{"recipe":"{\"source\":{\"type\":\"snowflake\",\"config\":{\"account_id\":null,\"include_table_lineage\":true,\"include_view_lineage\":true,\"include_tables\":true,\"include_views\":true,\"profiling\":{\"enabled\":true,\"profile_table_level_only\":true},\"stateful_ingestion\":{\"enabled\":true}}}}", + "executorId":"default","debugMode":False,"extraArgs":[]}}} + } + + update_ingestion_success = user_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=update_ingestion_source) + update_ingestion_success.raise_for_status() + ingestion_data = update_ingestion_success.json() + + assert ingestion_data + assert ingestion_data["data"] + assert ingestion_data["data"]["updateIngestionSource"] + assert ingestion_data["data"]["updateIngestionSource"] == ingestion_source_urn + + + # Delete ingestion source + remove_ingestion_source = { + "query": """mutation deleteIngestionSource($urn: String!) {\n + deleteIngestionSource(urn: $urn)\n}""", + "variables": { + "urn": ingestion_source_urn + }, + } + + remove_ingestion_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_ingestion_source) + remove_ingestion_response.raise_for_status() + ingestion_data = remove_ingestion_response.json() + + assert ingestion_data + assert ingestion_data["data"] + assert ingestion_data["data"]["deleteIngestionSource"] + assert ingestion_data["data"]["deleteIngestionSource"] == ingestion_source_urn + + # Remove the policy + remove_policy(policy_urn, admin_session) + + # Ensure that user can't create ingestion source after policy is removed + _ensure_cant_create_ingestion_source(user_session, create_ingestion_source) \ No newline at end of file diff --git a/smoke-test/tests/privileges/utils.py b/smoke-test/tests/privileges/utils.py new file mode 100644 index 0000000000000..ea1f565f6f5ac --- /dev/null +++ b/smoke-test/tests/privileges/utils.py @@ -0,0 +1,218 @@ +import requests_wrapper as requests +from tests.consistency_utils import wait_for_writes_to_sync +from tests.utils import (get_frontend_url, wait_for_writes_to_sync, get_admin_credentials) + + +def set_base_platform_privileges_policy_status(status, session): + base_platform_privileges = { + "query": """mutation updatePolicy($urn: String!, $input: PolicyUpdateInput!) {\n + updatePolicy(urn: $urn, input: $input) }""", + "variables": { + "urn": "urn:li:dataHubPolicy:7", + "input": { + "type": "PLATFORM", + "state": status, + "name": "All Users - Base Platform Privileges", + "description": "Grants base platform privileges to ALL users of DataHub. Change this policy to alter that behavior.", + "privileges": ["MANAGE_INGESTION", + "MANAGE_SECRETS", + "MANAGE_USERS_AND_GROUPS", + "VIEW_ANALYTICS", + "GENERATE_PERSONAL_ACCESS_TOKENS", + "MANAGE_DOMAINS", + "MANAGE_GLOBAL_ANNOUNCEMENTS", + "MANAGE_TESTS", + "MANAGE_GLOSSARIES", + "MANAGE_TAGS", + "MANAGE_GLOBAL_VIEWS", + "MANAGE_GLOBAL_OWNERSHIP_TYPES"], + "actors": { + "users": [], + "groups": None, + "resourceOwners": False, + "allUsers": True, + "allGroups": False, + "resourceOwnersTypes": None, + }, + }, + }, + } + base_privileges_response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=base_platform_privileges) + base_privileges_response.raise_for_status() + base_res_data = base_privileges_response.json() + assert base_res_data["data"]["updatePolicy"] == "urn:li:dataHubPolicy:7" + +def set_view_dataset_sensitive_info_policy_status(status, session): + dataset_sensitive_information = { + "query": """mutation updatePolicy($urn: String!, $input: PolicyUpdateInput!) {\n + updatePolicy(urn: $urn, input: $input) }""", + "variables": { + "urn": "urn:li:dataHubPolicy:view-dataset-sensitive", + "input": { + "type": "METADATA", + "state": status, + "name": "All Users - View Dataset Sensitive Information", + "description": "Grants viewing privileges of usage and profile information of all datasets for all users", + "privileges": ["VIEW_DATASET_USAGE","VIEW_DATASET_PROFILE"], + "actors": { + "users": [], + "groups": None, + "resourceOwners": False, + "allUsers": True, + "allGroups": False, + "resourceOwnersTypes": None, + }, + }, + }, + } + sensitive_info_response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=dataset_sensitive_information) + sensitive_info_response.raise_for_status() + sens_info_data = sensitive_info_response.json() + assert sens_info_data["data"]["updatePolicy"] == "urn:li:dataHubPolicy:view-dataset-sensitive" + +def set_view_entity_profile_privileges_policy_status(status, session): + view_entity_page = { + "query": """mutation updatePolicy($urn: String!, $input: PolicyUpdateInput!) {\n + updatePolicy(urn: $urn, input: $input) }""", + "variables": { + "urn": "urn:li:dataHubPolicy:view-entity-page-all", + "input": { + "type": "METADATA", + "state": status, + "name": "All Users - View Entity Page", + "description": "Grants entity view to all users", + "privileges": ["VIEW_ENTITY_PAGE", + "SEARCH_PRIVILEGE", + "GET_COUNTS_PRIVILEGE", + "GET_TIMESERIES_ASPECT_PRIVILEGE", + "GET_ENTITY_PRIVILEGE", + "GET_TIMELINE_PRIVILEGE"], + "actors": { + "users": [], + "groups": None, + "resourceOwners": False, + "allUsers": True, + "allGroups": False, + "resourceOwnersTypes": None, + }, + }, + }, + } + view_entity_response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=view_entity_page) + view_entity_response.raise_for_status() + view_entity_data = view_entity_response.json() + assert view_entity_data["data"]["updatePolicy"] == "urn:li:dataHubPolicy:view-entity-page-all" + +def create_user(session, email, password): + # Remove user if exists + res_data = remove_user(session, f"urn:li:corpuser:{email}") + assert res_data + assert "error" not in res_data + # Get the invite token + get_invite_token_json = { + "query": """query getInviteToken($input: GetInviteTokenInput!) {\n + getInviteToken(input: $input){\n + inviteToken\n + }\n + }""", + "variables": {"input": {}}, + } + get_invite_token_response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=get_invite_token_json + ) + get_invite_token_response.raise_for_status() + get_invite_token_res_data = get_invite_token_response.json() + invite_token = get_invite_token_res_data["data"]["getInviteToken"]["inviteToken"] + assert invite_token is not None + assert "error" not in invite_token + # Create a new user using the invite token + sign_up_json = { + "fullName": "Test User", + "email": email, + "password": password, + "title": "Data Engineer", + "inviteToken": invite_token, + } + sign_up_response = session.post( + f"{get_frontend_url()}/signUp", json=sign_up_json + ) + sign_up_response.raise_for_status() + assert sign_up_response + assert "error" not in sign_up_response + wait_for_writes_to_sync() + session.cookies.clear() + (admin_user, admin_pass) = get_admin_credentials() + admin_session = login_as(admin_user, admin_pass) + return admin_session + + +def login_as(username, password): + session = requests.Session() + headers = { + "Content-Type": "application/json", + } + data = '{"username":"' + username + '", "password":"' + password + '"}' + response = session.post(f"{get_frontend_url()}/logIn", headers=headers, data=data) + response.raise_for_status() + return session + +def remove_user(session, urn): + json = { + "query": """mutation removeUser($urn: String!) {\n + removeUser(urn: $urn) + }""", + "variables": {"urn": urn}, + } + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + return response.json() + +def create_user_policy(user_urn, privileges, session): + policy = { + "query": """mutation createPolicy($input: PolicyUpdateInput!) {\n + createPolicy(input: $input) }""", + "variables": { + "input": { + "type": "PLATFORM", + "name": "Policy Name", + "description": "Policy Description", + "state": "ACTIVE", + "resources": {"filter":{"criteria":[]}}, + "privileges": privileges, + "actors": { + "users": [user_urn], + "resourceOwners": False, + "allUsers": False, + "allGroups": False, + }, + } + }, + } + + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=policy) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert res_data["data"] + assert res_data["data"]["createPolicy"] + return res_data["data"]["createPolicy"] + +def remove_policy(urn, session): + remove_policy_json = { + "query": """mutation deletePolicy($urn: String!) {\n + deletePolicy(urn: $urn) }""", + "variables": {"urn": urn}, + } + + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_policy_json) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert res_data["data"] + assert res_data["data"]["deletePolicy"] + assert res_data["data"]["deletePolicy"] == urn \ No newline at end of file From 95d9ff2cc2b71c5062454f6da1eca5084d6dd6eb Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 2 Nov 2023 01:44:16 +0530 Subject: [PATCH 040/792] fix(ingest): handle exceptions in min, max, mean profiling (#9129) --- .../ingestion/source/ge_data_profiler.py | 36 +++++++++++++++++-- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py index 9f6ac9dd21164..6b97d2eb456da 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py @@ -406,22 +406,52 @@ def _get_dataset_rows(self, dataset_profile: DatasetProfileClass) -> None: def _get_dataset_column_min( self, column_profile: DatasetFieldProfileClass, column: str ) -> None: - if self.config.include_field_min_value: + if not self.config.include_field_min_value: + return + try: column_profile.min = str(self.dataset.get_column_min(column)) + except Exception as e: + logger.debug( + f"Caught exception while attempting to get column min for column {column}. {e}" + ) + self.report.report_warning( + "Profiling - Unable to get column min", + f"{self.dataset_name}.{column}", + ) @_run_with_query_combiner def _get_dataset_column_max( self, column_profile: DatasetFieldProfileClass, column: str ) -> None: - if self.config.include_field_max_value: + if not self.config.include_field_max_value: + return + try: column_profile.max = str(self.dataset.get_column_max(column)) + except Exception as e: + logger.debug( + f"Caught exception while attempting to get column max for column {column}. {e}" + ) + self.report.report_warning( + "Profiling - Unable to get column max", + f"{self.dataset_name}.{column}", + ) @_run_with_query_combiner def _get_dataset_column_mean( self, column_profile: DatasetFieldProfileClass, column: str ) -> None: - if self.config.include_field_mean_value: + if not self.config.include_field_mean_value: + return + try: column_profile.mean = str(self.dataset.get_column_mean(column)) + except Exception as e: + logger.debug( + f"Caught exception while attempting to get column mean for column {column}. {e}" + ) + self.report.report_warning( + "Profiling - Unable to get column mean", + f"{self.dataset_name}.{column}", + ) @_run_with_query_combiner def _get_dataset_column_median( From 932eebea353cf6f31bc489428feb54b43d647075 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Thu, 2 Nov 2023 01:50:15 +0530 Subject: [PATCH 041/792] feat: rename Assets tab to Owner Of (#9141) Co-authored-by: John Joyce --- datahub-web-react/src/app/entity/group/GroupProfile.tsx | 2 +- datahub-web-react/src/app/entity/user/UserProfile.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/group/GroupProfile.tsx b/datahub-web-react/src/app/entity/group/GroupProfile.tsx index 53d2062277dec..11ed31e00003f 100644 --- a/datahub-web-react/src/app/entity/group/GroupProfile.tsx +++ b/datahub-web-react/src/app/entity/group/GroupProfile.tsx @@ -16,7 +16,7 @@ import NonExistentEntityPage from '../shared/entity/NonExistentEntityPage'; const messageStyle = { marginTop: '10%' }; export enum TabType { - Assets = 'Assets', + Assets = 'Owner Of', Members = 'Members', } diff --git a/datahub-web-react/src/app/entity/user/UserProfile.tsx b/datahub-web-react/src/app/entity/user/UserProfile.tsx index 1d20072c4ea8f..e8284ba61afe4 100644 --- a/datahub-web-react/src/app/entity/user/UserProfile.tsx +++ b/datahub-web-react/src/app/entity/user/UserProfile.tsx @@ -17,7 +17,7 @@ export interface Props { } export enum TabType { - Assets = 'Assets', + Assets = 'Owner Of', Groups = 'Groups', } const ENABLED_TAB_TYPES = [TabType.Assets, TabType.Groups]; From 50789224a12e0f48d6b4ca2ef3876498f7738d9e Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 1 Nov 2023 16:58:37 -0700 Subject: [PATCH 042/792] fix(ingest/mongodb): fix schema inference for lists of values (#9145) --- .../datahub/ingestion/source/schema_inference/object.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/schema_inference/object.py b/metadata-ingestion/src/datahub/ingestion/source/schema_inference/object.py index 5797d66aa4d19..b58bdf41ccaa5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/schema_inference/object.py +++ b/metadata-ingestion/src/datahub/ingestion/source/schema_inference/object.py @@ -16,7 +16,7 @@ class SchemaDescription(BasicSchemaDescription): nullable: bool # if field is ever missing -def is_field_nullable(doc: Dict[str, Any], field_path: Tuple) -> bool: +def is_field_nullable(doc: Dict[str, Any], field_path: Tuple[str, ...]) -> bool: """ Check if a nested field is nullable in a document from a collection. @@ -54,7 +54,10 @@ def is_field_nullable(doc: Dict[str, Any], field_path: Tuple) -> bool: # count empty lists of nested objects as nullable if len(value) == 0: return True - return any(is_field_nullable(x, remaining_fields) for x in doc[field]) + return any( + isinstance(x, dict) and is_field_nullable(x, remaining_fields) + for x in doc[field] + ) # any other types to check? # raise ValueError("Nested type not 'list' or 'dict' encountered") From f7cd80283ad768afe14e3cf53b9c38fe912be570 Mon Sep 17 00:00:00 2001 From: deepgarg-visa <149145061+deepgarg-visa@users.noreply.github.com> Date: Thu, 2 Nov 2023 09:16:58 +0530 Subject: [PATCH 043/792] fix(ingest/db2): fix handling for table properties (#9128) Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/sql/sql_common.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 51909eaf4ed55..80f828e9ea2fd 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -20,6 +20,7 @@ import sqlalchemy.dialects.postgresql.base from sqlalchemy import create_engine, inspect from sqlalchemy.engine.reflection import Inspector +from sqlalchemy.engine.row import LegacyRow from sqlalchemy.exc import ProgrammingError from sqlalchemy.sql import sqltypes as types from sqlalchemy.types import TypeDecorator, TypeEngine @@ -784,7 +785,7 @@ def get_table_properties( table_info: dict = inspector.get_table_comment(table, f'"{schema}"') # type: ignore description = table_info.get("text") - if type(description) is tuple: + if isinstance(description, LegacyRow): # Handling for value type tuple which is coming for dialect 'db2+ibm_db' description = table_info["text"][0] From bab9d1c93196b7a7181525609e854640c9a13712 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 1 Nov 2023 20:47:18 -0700 Subject: [PATCH 044/792] fix(ingest): fully support MCPs in urn_iter primitive (#9157) --- .../src/datahub/utilities/urns/urn_iter.py | 16 +++++++------- .../tests/unit/serde/test_urn_iterator.py | 21 +++++++++++++------ 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py index e13d439161064..169a4ac3649a3 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py @@ -117,17 +117,17 @@ def _modify_at_path( if isinstance(path[0], int): assert isinstance(model, list) model[path[0]] = new_value - elif isinstance(model, MetadataChangeProposalWrapper): - setattr(model, path[0], new_value) - else: - assert isinstance(model, DictWrapper) + elif isinstance(model, DictWrapper): model._inner_dict[path[0]] = new_value + else: # MCPW + setattr(model, path[0], new_value) elif isinstance(path[0], int): assert isinstance(model, list) - return _modify_at_path(model[path[0]], path[1:], new_value) - else: - assert isinstance(model, DictWrapper) - return _modify_at_path(model._inner_dict[path[0]], path[1:], new_value) + _modify_at_path(model[path[0]], path[1:], new_value) + elif isinstance(model, DictWrapper): + _modify_at_path(model._inner_dict[path[0]], path[1:], new_value) + else: # MCPW + _modify_at_path(getattr(model, path[0]), path[1:], new_value) def _lowercase_dataset_urn(dataset_urn: str) -> str: diff --git a/metadata-ingestion/tests/unit/serde/test_urn_iterator.py b/metadata-ingestion/tests/unit/serde/test_urn_iterator.py index 9657ac45068da..135580dcdff13 100644 --- a/metadata-ingestion/tests/unit/serde/test_urn_iterator.py +++ b/metadata-ingestion/tests/unit/serde/test_urn_iterator.py @@ -1,4 +1,5 @@ import datahub.emitter.mce_builder as builder +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.metadata.com.linkedin.pegasus2avro.dataset import ( DatasetLineageTypeClass, FineGrainedLineage, @@ -10,11 +11,11 @@ from datahub.utilities.urns.urn_iter import list_urns_with_path, lowercase_dataset_urns -def _datasetUrn(tbl): +def _datasetUrn(tbl: str) -> str: return builder.make_dataset_urn("bigquery", tbl, "PROD") -def _fldUrn(tbl, fld): +def _fldUrn(tbl: str, fld: str) -> str: return builder.make_schema_field_urn(_datasetUrn(tbl), fld) @@ -114,8 +115,10 @@ def test_upstream_lineage_urn_iterator(): ] -def _make_test_lineage_obj(upstream: str, downstream: str) -> UpstreamLineage: - return UpstreamLineage( +def _make_test_lineage_obj( + table: str, upstream: str, downstream: str +) -> MetadataChangeProposalWrapper: + lineage = UpstreamLineage( upstreams=[ Upstream( dataset=_datasetUrn(upstream), @@ -132,11 +135,17 @@ def _make_test_lineage_obj(upstream: str, downstream: str) -> UpstreamLineage: ], ) + return MetadataChangeProposalWrapper(entityUrn=_datasetUrn(table), aspect=lineage) + def test_dataset_urn_lowercase_transformer(): - original = _make_test_lineage_obj("upstreamTable", "downstreamTable") + original = _make_test_lineage_obj( + "mainTableName", "upstreamTable", "downstreamTable" + ) - expected = _make_test_lineage_obj("upstreamtable", "downstreamtable") + expected = _make_test_lineage_obj( + "maintablename", "upstreamtable", "downstreamtable" + ) assert original != expected # sanity check From 12b41713b46ab474f0d55ea81fe0e854526036a9 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 2 Nov 2023 10:05:24 +0530 Subject: [PATCH 045/792] =?UTF-8?q?fix(ingest/bigquery):=20use=20correct?= =?UTF-8?q?=20row=20count=20in=20null=20count=20profiling=20c=E2=80=A6=20(?= =?UTF-8?q?#9123)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Harshal Sheth Co-authored-by: Aseem Bansal --- .../datahub/ingestion/source/ge_data_profiler.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py index 6b97d2eb456da..c334a97680e3e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py @@ -659,7 +659,16 @@ def generate_dataset_profile( # noqa: C901 (complexity) self.query_combiner.flush() assert profile.rowCount is not None - row_count: int = profile.rowCount + row_count: int # used for null counts calculation + if profile.partitionSpec and "SAMPLE" in profile.partitionSpec.partition: + # We can alternatively use `self._get_dataset_rows(profile)` to get + # exact count of rows in sample, as actual rows involved in sample + # may be slightly different (more or less) than configured `sample_size`. + # However not doing so to start with, as that adds another query overhead + # plus approximate metrics should work for sampling based profiling. + row_count = self.config.sample_size + else: + row_count = profile.rowCount for column_spec in columns_profiling_queue: column = column_spec.column @@ -811,7 +820,7 @@ def update_dataset_batch_use_sampling(self, profile: DatasetProfileClass) -> Non sample_pc = 100 * self.config.sample_size / profile.rowCount sql = ( f"SELECT * FROM {str(self.dataset._table)} " - + f"TABLESAMPLE SYSTEM ({sample_pc:.3f} percent)" + + f"TABLESAMPLE SYSTEM ({sample_pc:.8f} percent)" ) temp_table_name = create_bigquery_temp_table( self, From 11d8988868357b956e7b6ccfa905689d6163f814 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Thu, 2 Nov 2023 17:46:27 +0900 Subject: [PATCH 046/792] docs: add feature guides for subscriptions and notifications (#9122) --- docs-website/sidebars.js | 5 + docs/managed-datahub/saas-slack-setup.md | 4 +- .../subscription-and-notification.md | 130 ++++++++++++++++++ 3 files changed, 137 insertions(+), 2 deletions(-) create mode 100644 docs/managed-datahub/subscription-and-notification.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 39eaea57444ed..ab4c1311d5fc7 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -625,6 +625,11 @@ module.exports = { type: "doc", id: "docs/managed-datahub/chrome-extension", }, + { + type: "doc", + id: "docs/managed-datahub/subscription-and-notification", + className: "saasOnly", + }, { "Managed DataHub Release History": [ "docs/managed-datahub/release-notes/v_0_2_12", diff --git a/docs/managed-datahub/saas-slack-setup.md b/docs/managed-datahub/saas-slack-setup.md index 68f947f171715..8d4519b878cd8 100644 --- a/docs/managed-datahub/saas-slack-setup.md +++ b/docs/managed-datahub/saas-slack-setup.md @@ -1,6 +1,6 @@ import FeatureAvailability from '@site/src/components/FeatureAvailability'; -# Configure Slack Notifications +# Configure Slack For Notifications @@ -108,4 +108,4 @@ For now we support sending notifications to - Click on “More” - Click on “Copy member ID” -![](https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/integrations/slack/slack_user_id.png) +![](https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/integrations/slack/slack_user_id.png) \ No newline at end of file diff --git a/docs/managed-datahub/subscription-and-notification.md b/docs/managed-datahub/subscription-and-notification.md new file mode 100644 index 0000000000000..b30a03de16511 --- /dev/null +++ b/docs/managed-datahub/subscription-and-notification.md @@ -0,0 +1,130 @@ +import FeatureAvailability from '@site/src/components/FeatureAvailability'; + +# Subscriptions & Notifications + + + +DataHub's Subscriptions and Notifications feature gives you real-time change alerts on data assets of your choice. +With this feature, you can set up subscriptions to specific changes for an Entity – and DataHub will notify you when those changes happen. Currently, DataHub supports notifications on Slack, with support for Microsoft Teams and email subscriptions forthcoming. + +

+ +

+ +This feature is especially useful in helping you stay on top of any upstream changes that could impact the assets you or your stakeholders rely on. It eliminates the need for you and your team to manually check for upstream changes, or for upstream stakeholders to identify and notify impacted users. +As a user, you can subscribe to and receive notifications about changes such as deprecations, schema changes, changes in ownership, assertions, or incidents. You’ll always been in the know about potential data quality issues so you can proactively manage your data resources. + +## Prerequisites + +Once you have [configured Slack within your DataHub instance](saas-slack-setup.md), you will be able to subscribe to any Entity in DataHub and begin recieving notifications via DM. +If you want to create and manage group-level Subscriptions for your team, you will need [the following privileges](../../docs/authorization/roles.md#role-privileges): + +- Manage Group Notification Settings +- Manage Group Subscriptions + +## Using DataHub’s Subscriptions and Notifications Feature + +The first step is identifying the assets you want to subscribe to. +DataHub’s [Lineage and Impact Analysis features](../../docs/act-on-metadata/impact-analysis.md#lineage-impact-analysis-setup-prerequisites-and-permissions) can help you identify upstream entities that could impact the assets you use and are responsible for. +You can use the Subscriptions and Notifications feature to sign up for updates for your entire team, or just for yourself. + +### Subscribing Your Team/Group to Notifications + +The dropdown menu next to the Subscribe button lets you choose who the subscription is for. To create a group subscription, click on Manage Group Subscriptions. + +

+ +

+ +Next, customize the group’s subscriptions by selecting the types of changes you want the group to be notified about. + +

+ +

+ +Connect to Slack. Currently, Acryl's Subscriptions and Notifications feature integrates only with Slack. Add your group’s Slack Channel ID to receive notifications on Slack. +(You can find your Channel ID in the About section of your channel on Slack.) + +

+ +

+ +### Individually Subscribing to an Entity + +Select the **Subscribe Me** option in the Subscriptions dropdown menu. + +

+ +

+ +Pick the updates you want to be notified about, and connect your Slack account by using your Slack Member ID. + +

+ +

+ +:::note +You can find your Slack Member ID in your profile settings. + +

+ +

+::: + +### Managing Your Subscriptions + +You can enable, disable, or manage notifications at any time to ensure that you receive relevant updates. + +Simply use the Dropdown menu next to the Subscribe button to unsubscribe from the asset, or to manage/modify your subscription (say, to modify the changes you want to be updated about). + +

+ +

+ +You can also view and manage your subscriptions in your DataHub settings page. + +

+ +

+ +You can view and manage the group’s subscriptions on the group’s page on DataHub. + +

+ +

+ +## FAQ + +
+ +What changes can I be notified about using this feature? + +You can subscribe to deprecations, Assertion status changes, Incident status changes, Schema changes, Ownership changes, Glossary Term changes, and Tag changes. +

+ +

+
+ +
+ +What if I no longer want to receive updates about a data asset? + +You can unsubscribe from any asset to stop receiving notifications about it. On the asset’s DataHub page, simply use the dropdown menu next to the Subscribe button to unsubscribe from the asset. + +

+ +

+
+ +
+ +What if I want to be notified about different changes? + +To modify your subscription, use the dropdown menu next to the Subscribe button to modify the changes you want to be notified about. +
+ +## Reference + +- [DataHub Blog - Simplifying Data Monitoring & Management with Subscriptions and Notifications with Acryl DataHub](https://www.acryldata.io/blog/simplifying-data-monitoring-and-management-with-subscriptions-and-notifications-with-acryl-datahub) +- Video Guide - Getting Started with Subscription & Notifications + From f42cb95b928c071b8309cf7c3e9a0fe8b41d3a90 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Thu, 2 Nov 2023 17:46:49 +0900 Subject: [PATCH 047/792] docs: unify oidc guides using tabs (#9068) Co-authored-by: Harshal Sheth --- docs-website/sidebars.js | 11 +- .../guides/sso/configure-oidc-behind-proxy.md | 18 +- .../guides/sso/configure-oidc-react-azure.md | 127 ------- .../guides/sso/configure-oidc-react-google.md | 118 ------ .../guides/sso/configure-oidc-react-okta.md | 124 ------ .../guides/sso/configure-oidc-react.md | 355 +++++++++++++----- 6 files changed, 263 insertions(+), 490 deletions(-) delete mode 100644 docs/authentication/guides/sso/configure-oidc-react-azure.md delete mode 100644 docs/authentication/guides/sso/configure-oidc-react-google.md delete mode 100644 docs/authentication/guides/sso/configure-oidc-react-okta.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index ab4c1311d5fc7..9cc035f3e29e0 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -171,15 +171,8 @@ module.exports = { { "Frontend Authentication": [ "docs/authentication/guides/jaas", - { - "OIDC Authentication": [ - "docs/authentication/guides/sso/configure-oidc-react", - "docs/authentication/guides/sso/configure-oidc-react-google", - "docs/authentication/guides/sso/configure-oidc-react-okta", - "docs/authentication/guides/sso/configure-oidc-react-azure", - "docs/authentication/guides/sso/configure-oidc-behind-proxy", - ], - }, + "docs/authentication/guides/sso/configure-oidc-react", + "docs/authentication/guides/sso/configure-oidc-behind-proxy", ], }, "docs/authentication/introducing-metadata-service-authentication", diff --git a/docs/authentication/guides/sso/configure-oidc-behind-proxy.md b/docs/authentication/guides/sso/configure-oidc-behind-proxy.md index c998816e04735..684bf768f2baf 100644 --- a/docs/authentication/guides/sso/configure-oidc-behind-proxy.md +++ b/docs/authentication/guides/sso/configure-oidc-behind-proxy.md @@ -1,8 +1,9 @@ -# Configuring Frontend to use a Proxy when communicating with SSO Provider -*Authored on 22/08/2023* +# OIDC Proxy Configuration -The `datahub-frontend-react` server can be configured to use an http proxy when retrieving the openid-configuration. -This can be needed if your infrastructure is locked down and disallows connectivity by default, using proxies for fine-grained egress control. +_Authored on 22/08/2023_ + +The `datahub-frontend-react` server can be configured to use an http proxy when retrieving the openid-configuration. +This can be needed if your infrastructure is locked down and disallows connectivity by default, using proxies for fine-grained egress control. ## Configure http proxy and non proxy hosts @@ -17,7 +18,8 @@ HTTP_NON_PROXY_HOSTS=localhost|datahub-gms (or any other hosts that you would li ``` ## Optional: provide custom truststore -If your upstream proxy performs SSL termination to inspect traffic, this will result in different (self-signed) certificates for HTTPS connections. + +If your upstream proxy performs SSL termination to inspect traffic, this will result in different (self-signed) certificates for HTTPS connections. The default truststore used in the `datahub-frontend-react` docker image will not trust these kinds of connections. To address this, you can copy or mount your own truststore (provided by the proxy or network administrators) into the docker container. @@ -36,8 +38,8 @@ FROM linkedin/datahub-frontend-react: COPY /truststore-directory /certificates ``` -Building this Dockerfile will result in your own custom docker image on your local machine. -You will then be able to tag it, publish it to your own registry, etc. +Building this Dockerfile will result in your own custom docker image on your local machine. +You will then be able to tag it, publish it to your own registry, etc. #### Option b) Mount truststore from your host machine using a docker volume @@ -51,7 +53,7 @@ Adapt your docker-compose.yml to include a new volume mount in the `datahub-fron - /truststore-directory:/certificates ``` -### Reference new truststore +### Reference new truststore Add the following environment values to the `datahub-frontend-react` container: diff --git a/docs/authentication/guides/sso/configure-oidc-react-azure.md b/docs/authentication/guides/sso/configure-oidc-react-azure.md deleted file mode 100644 index 177387327c0e8..0000000000000 --- a/docs/authentication/guides/sso/configure-oidc-react-azure.md +++ /dev/null @@ -1,127 +0,0 @@ -# Configuring Azure Authentication for React App (OIDC) -*Authored on 21/12/2021* - -`datahub-frontend` server can be configured to authenticate users over OpenID Connect (OIDC). As such, it can be configured to -delegate authentication responsibility to identity providers like Microsoft Azure. - -This guide will provide steps for configuring DataHub authentication using Microsoft Azure. - -:::caution -Even when OIDC is configured, the root user can still login without OIDC by going -to `/login` URL endpoint. It is recommended that you don't use the default -credentials by mounting a different file in the front end container. To do this -please see [this guide](../jaas.md) to mount a custom user.props file for a JAAS authenticated deployment. -::: - -## Steps - -### 1. Create an application registration in Microsoft Azure portal - -a. Using an account linked to your organization, navigate to the [Microsoft Azure Portal](https://portal.azure.com). - -b. Select **App registrations**, then **New registration** to register a new app. - -c. Name your app registration and choose who can access your application. - -d. Select `Web` as the **Redirect URI** type and enter the following: -``` -https://your-datahub-domain.com/callback/oidc -``` -If you are just testing locally, the following can be used: `http://localhost:9002/callback/oidc`. -Azure supports more than one redirect URI, so both can be configured at the same time from the **Authentication** tab once the registration is complete. - -At this point, your app registration should look like the following: - - -

- -

- - -e. Click **Register**. - -### 2. Configure Authentication (optional) - -Once registration is done, you will land on the app registration **Overview** tab. On the left-side navigation bar, click on **Authentication** under **Manage** and add extra redirect URIs if need be (if you want to support both local testing and Azure deployments). - - -

- -

- - -Click **Save**. - -### 3. Configure Certificates & secrets - -On the left-side navigation bar, click on **Certificates & secrets** under **Manage**. -Select **Client secrets**, then **New client secret**. Type in a meaningful description for your secret and select an expiry. Click the **Add** button when you are done. - -**IMPORTANT:** Copy the `value` of your newly create secret since Azure will never display its value afterwards. - - -

- -

- - -### 4. Configure API permissions - -On the left-side navigation bar, click on **API permissions** under **Manage**. DataHub requires the following four Microsoft Graph APIs: - -1. `User.Read` *(should be already configured)* -2. `profile` -3. `email` -4. `openid` - -Click on **Add a permission**, then from the **Microsoft APIs** tab select **Microsoft Graph**, then **Delegated permissions**. From the **OpenId permissions** category, select `email`, `openid`, `profile` and click **Add permissions**. - -At this point, you should be looking at a screen like the following: - - -

- -

- - -### 5. Obtain Application (Client) ID - -On the left-side navigation bar, go back to the **Overview** tab. You should see the `Application (client) ID`. Save its value for the next step. - -### 6. Obtain Discovery URI - -On the same page, you should see a `Directory (tenant) ID`. Your OIDC discovery URI will be formatted as follows: - -``` -https://login.microsoftonline.com/{tenant ID}/v2.0/.well-known/openid-configuration -``` - -### 7. Configure `datahub-frontend` to enable OIDC authentication - -a. Open the file `docker/datahub-frontend/env/docker.env` - -b. Add the following configuration values to the file: - -``` -AUTH_OIDC_ENABLED=true -AUTH_OIDC_CLIENT_ID=your-client-id -AUTH_OIDC_CLIENT_SECRET=your-client-secret -AUTH_OIDC_DISCOVERY_URI=https://login.microsoftonline.com/{tenant ID}/v2.0/.well-known/openid-configuration -AUTH_OIDC_BASE_URL=your-datahub-url -AUTH_OIDC_SCOPE="openid profile email" -``` - -Replacing the placeholders above with the client id (step 5), client secret (step 3) and tenant ID (step 6) received from Microsoft Azure. - -### 9. Restart `datahub-frontend-react` docker container - -Now, simply restart the `datahub-frontend-react` container to enable the integration. - -``` -docker-compose -p datahub -f docker-compose.yml -f docker-compose.override.yml up datahub-frontend-react -``` - -Navigate to your DataHub domain to see SSO in action. - -## Resources -- [Microsoft identity platform and OpenID Connect protocol](https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-protocols-oidc/) \ No newline at end of file diff --git a/docs/authentication/guides/sso/configure-oidc-react-google.md b/docs/authentication/guides/sso/configure-oidc-react-google.md deleted file mode 100644 index af62185e6e787..0000000000000 --- a/docs/authentication/guides/sso/configure-oidc-react-google.md +++ /dev/null @@ -1,118 +0,0 @@ -# Configuring Google Authentication for React App (OIDC) -*Authored on 3/10/2021* - -`datahub-frontend` server can be configured to authenticate users over OpenID Connect (OIDC). As such, it can be configured to delegate -authentication responsibility to identity providers like Google. - -This guide will provide steps for configuring DataHub authentication using Google. - -:::caution -Even when OIDC is configured, the root user can still login without OIDC by going -to `/login` URL endpoint. It is recommended that you don't use the default -credentials by mounting a different file in the front end container. To do this -please see [this guide](../jaas.md) to mount a custom user.props file for a JAAS authenticated deployment. -::: - -## Steps - -### 1. Create a project in the Google API Console - -Using an account linked to your organization, navigate to the [Google API Console](https://console.developers.google.com/) and select **New project**. -Within this project, we will configure the OAuth2.0 screen and credentials. - -### 2. Create OAuth2.0 consent screen - -a. Navigate to `OAuth consent screen`. This is where you'll configure the screen your users see when attempting to -log in to DataHub. - -b. Select `Internal` (if you only want your company users to have access) and then click **Create**. -Note that in order to complete this step you should be logged into a Google account associated with your organization. - -c. Fill out the details in the App Information & Domain sections. Make sure the 'Application Home Page' provided matches where DataHub is deployed -at your organization. - - -

- -

- - -Once you've completed this, **Save & Continue**. - -d. Configure the scopes: Next, click **Add or Remove Scopes**. Select the following scopes: - - - `.../auth/userinfo.email` - - `.../auth/userinfo.profile` - - `openid` - -Once you've selected these, **Save & Continue**. - -### 3. Configure client credentials - -Now navigate to the **Credentials** tab. This is where you'll obtain your client id & secret, as well as configure info -like the redirect URI used after a user is authenticated. - -a. Click **Create Credentials** & select `OAuth client ID` as the credential type. - -b. On the following screen, select `Web application` as your Application Type. - -c. Add the domain where DataHub is hosted to your 'Authorized Javascript Origins'. - -``` -https://your-datahub-domain.com -``` - -d. Add the domain where DataHub is hosted with the path `/callback/oidc` appended to 'Authorized Redirect URLs'. - -``` -https://your-datahub-domain.com/callback/oidc -``` - -e. Click **Create** - -f. You will now receive a pair of values, a client id and a client secret. Bookmark these for the next step. - -At this point, you should be looking at a screen like the following: - - -

- -

- - -Success! - -### 4. Configure `datahub-frontend` to enable OIDC authentication - -a. Open the file `docker/datahub-frontend/env/docker.env` - -b. Add the following configuration values to the file: - -``` -AUTH_OIDC_ENABLED=true -AUTH_OIDC_CLIENT_ID=your-client-id -AUTH_OIDC_CLIENT_SECRET=your-client-secret -AUTH_OIDC_DISCOVERY_URI=https://accounts.google.com/.well-known/openid-configuration -AUTH_OIDC_BASE_URL=your-datahub-url -AUTH_OIDC_SCOPE="openid profile email" -AUTH_OIDC_USER_NAME_CLAIM=email -AUTH_OIDC_USER_NAME_CLAIM_REGEX=([^@]+) -``` - -Replacing the placeholders above with the client id & client secret received from Google in Step 3f. - - -### 5. Restart `datahub-frontend-react` docker container - -Now, simply restart the `datahub-frontend-react` container to enable the integration. - -``` -docker-compose -p datahub -f docker-compose.yml -f docker-compose.override.yml up datahub-frontend-react -``` - -Navigate to your DataHub domain to see SSO in action. - - -## References - -- [OpenID Connect in Google Identity](https://developers.google.com/identity/protocols/oauth2/openid-connect) \ No newline at end of file diff --git a/docs/authentication/guides/sso/configure-oidc-react-okta.md b/docs/authentication/guides/sso/configure-oidc-react-okta.md deleted file mode 100644 index 320b887a28f16..0000000000000 --- a/docs/authentication/guides/sso/configure-oidc-react-okta.md +++ /dev/null @@ -1,124 +0,0 @@ -# Configuring Okta Authentication for React App (OIDC) -*Authored on 3/10/2021* - -`datahub-frontend` server can be configured to authenticate users over OpenID Connect (OIDC). As such, it can be configured to -delegate authentication responsibility to identity providers like Okta. - -This guide will provide steps for configuring DataHub authentication using Okta. - -:::caution -Even when OIDC is configured, the root user can still login without OIDC by going -to `/login` URL endpoint. It is recommended that you don't use the default -credentials by mounting a different file in the front end container. To do this -please see [this guide](../jaas.md) to mount a custom user.props file for a JAAS authenticated deployment. -::: - -## Steps - -### 1. Create an application in Okta Developer Console - -a. Log in to your Okta admin account & navigate to the developer console - -b. Select **Applications**, then **Add Application**, the **Create New App** to create a new app. - -c. Select `Web` as the **Platform**, and `OpenID Connect` as the **Sign on method** - -d. Click **Create** - -e. Under 'General Settings', name your application - -f. Below, add a **Login Redirect URI**. This should be formatted as - -``` -https://your-datahub-domain.com/callback/oidc -``` - -If you're just testing locally, this can be `http://localhost:9002/callback/oidc`. - -g. Below, add a **Logout Redirect URI**. This should be formatted as - -``` -https://your-datahub-domain.com -``` - -h. [Optional] If you're enabling DataHub login as an Okta tile, you'll need to provide the **Initiate Login URI**. You -can set if to - -``` -https://your-datahub-domain.com/authenticate -``` - -If you're just testing locally, this can be `http://localhost:9002`. - -i. Click **Save** - - -### 2. Obtain Client Credentials - -On the subsequent screen, you should see the client credentials. Bookmark the `Client id` and `Client secret` for the next step. - -### 3. Obtain Discovery URI - -On the same page, you should see an `Okta Domain`. Your OIDC discovery URI will be formatted as follows: - -``` -https://your-okta-domain.com/.well-known/openid-configuration -``` - -for example, `https://dev-33231928.okta.com/.well-known/openid-configuration`. - -At this point, you should be looking at a screen like the following: - - -

- -

- - -

- -

- - -Success! - -### 4. Configure `datahub-frontend` to enable OIDC authentication - -a. Open the file `docker/datahub-frontend/env/docker.env` - -b. Add the following configuration values to the file: - -``` -AUTH_OIDC_ENABLED=true -AUTH_OIDC_CLIENT_ID=your-client-id -AUTH_OIDC_CLIENT_SECRET=your-client-secret -AUTH_OIDC_DISCOVERY_URI=https://your-okta-domain.com/.well-known/openid-configuration -AUTH_OIDC_BASE_URL=your-datahub-url -AUTH_OIDC_SCOPE="openid profile email groups" -``` - -Replacing the placeholders above with the client id & client secret received from Okta in Step 2. - -> **Pro Tip!** You can easily enable Okta to return the groups that a user is associated with, which will be provisioned in DataHub, along with the user logging in. This can be enabled by setting the `AUTH_OIDC_EXTRACT_GROUPS_ENABLED` flag to `true`. -> if they do not already exist in DataHub. You can enable your Okta application to return a 'groups' claim from the Okta Console at Applications > Your Application -> Sign On -> OpenID Connect ID Token Settings (Requires an edit). -> -> By default, we assume that the groups will appear in a claim named "groups". This can be customized using the `AUTH_OIDC_GROUPS_CLAIM` container configuration. -> -> -

- -

- - -### 5. Restart `datahub-frontend-react` docker container - -Now, simply restart the `datahub-frontend-react` container to enable the integration. - -``` -docker-compose -p datahub -f docker-compose.yml -f docker-compose.override.yml up datahub-frontend-react -``` - -Navigate to your DataHub domain to see SSO in action. - -## Resources -- [OAuth 2.0 and OpenID Connect Overview](https://developer.okta.com/docs/concepts/oauth-openid/) diff --git a/docs/authentication/guides/sso/configure-oidc-react.md b/docs/authentication/guides/sso/configure-oidc-react.md index 1671673c09318..9b4af80bb0ccd 100644 --- a/docs/authentication/guides/sso/configure-oidc-react.md +++ b/docs/authentication/guides/sso/configure-oidc-react.md @@ -1,59 +1,201 @@ -# Overview +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# OIDC Authentication The DataHub React application supports OIDC authentication built on top of the [Pac4j Play](https://github.com/pac4j/play-pac4j) library. This enables operators of DataHub to integrate with 3rd party identity providers like Okta, Google, Keycloak, & more to authenticate their users. -When configured, OIDC auth will be enabled between clients of the DataHub UI & `datahub-frontend` server. Beyond this point is considered -to be a secure environment and as such authentication is validated & enforced only at the "front door" inside datahub-frontend. +## 1. Register an app with your Identity Provider -:::caution -Even if OIDC is configured the root user can still login without OIDC by going -to `/login` URL endpoint. It is recommended that you don't use the default -credentials by mounting a different file in the front end container. To do this -please see [this guide](../jaas.md) to mount a custom user.props file for a JAAS authenticated deployment. + + + +#### Create a project in the Google API Console + +Using an account linked to your organization, navigate to the [Google API Console](https://console.developers.google.com/) and select **New project**. +Within this project, we will configure the OAuth2.0 screen and credentials. + +#### Create OAuth2.0 consent screen + +Navigate to **OAuth consent screen**. This is where you'll configure the screen your users see when attempting to +log in to DataHub. Select **Internal** (if you only want your company users to have access) and then click **Create**. +Note that in order to complete this step you should be logged into a Google account associated with your organization. + +Fill out the details in the App Information & Domain sections. Make sure the 'Application Home Page' provided matches where DataHub is deployed +at your organization. Once you've completed this, **Save & Continue**. + +

+ +

+ +#### Configure the scopes + +Next, click **Add or Remove Scopes**. Select the following scope and click **Save & Continue**. + +- .../auth/userinfo.email +- .../auth/userinfo.profile +- openid + +
+ + +#### Create an application in Okta Developer Console + +Log in to your Okta admin account & navigate to the developer console. Select **Applications**, then **Add Application**, the **Create New App** to create a new app. +Select `Web` as the **Platform**, and `OpenID Connect` as the **Sign on method**. + +Click **Create** and name your application under **General Settings** and save. + +- **Login Redirect URI** : `https://your-datahub-domain.com/callback/oidc`. +- **Logout Redirect URI**. `https://your-datahub-domain.com` + +

+ +

+ +:::note Optional +If you're enabling DataHub login as an Okta tile, you'll need to provide the **Initiate Login URI**. You +can set if to `https://your-datahub-domain.com/authenticate`. If you're just testing locally, this can be `http://localhost:9002`. ::: -## Provider-Specific Guides +
+ -1. [Configuring OIDC using Google](configure-oidc-react-google.md) -2. [Configuring OIDC using Okta](configure-oidc-react-okta.md) -3. [Configuring OIDC using Azure](configure-oidc-react-azure.md) +#### Create an application registration in Microsoft Azure portal -## Configuring OIDC in React +Using an account linked to your organization, navigate to the [Microsoft Azure Portal](https://portal.azure.com). Select **App registrations**, then **New registration** to register a new app. -### 1. Register an app with your Identity Provider +Name your app registration and choose who can access your application. -To configure OIDC in React, you will most often need to register yourself as a client with your identity provider (Google, Okta, etc). Each provider may -have their own instructions. Provided below are links to examples for Okta, Google, Azure AD, & Keycloak. +- **Redirect URI** : Select **Web** as type and enter `https://your-datahub-domain.com/callback/oidc` -- [Registering an App in Okta](https://developer.okta.com/docs/guides/add-an-external-idp/openidconnect/main/) -- [OpenID Connect in Google Identity](https://developers.google.com/identity/protocols/oauth2/openid-connect) -- [OpenID Connect authentication with Azure Active Directory](https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/auth-oidc) -- [Keycloak - Securing Applications and Services Guide](https://www.keycloak.org/docs/latest/securing_apps/) +Azure supports more than one redirect URI, so both can be configured at the same time from the **Authentication** tab once the registration is complete. +At this point, your app registration should look like the following. Finally, click **Register**. + +

+ +

-During the registration process, you'll need to provide a login redirect URI to the identity provider. This tells the identity provider -where to redirect to once they've authenticated the end user. +:::note Optional +Once registration is done, you will land on the app registration **Overview** tab. +On the left-side navigation bar, click on **Authentication** under **Manage** and add extra redirect URIs if need be (if you want to support both local testing and Azure deployments). Finally, click **Save**. -By default, the URL will be constructed as follows: +

+ +

-> "http://your-datahub-domain.com/callback/oidc" +::: + +#### Configure Certificates & secrets + +On the left-side navigation bar, click on **Certificates & secrets** under **Manage**. +Select **Client secrets**, then **New client secret**. Type in a meaningful description for your secret and select an expiry. Click the **Add** button when you are done. +Copy the value of your newly create secret since Azure will never display its value afterwards. + +

+ +

+ +#### Configure API permissions + +On the left-side navigation bar, click on **API permissions** under **Manage**. DataHub requires the following four Microsoft Graph APIs: -For example, if you're hosted DataHub at `datahub.myorg.com`, this -value would be `http://datahub.myorg.com/callback/oidc`. For testing purposes you can also specify localhost as the domain name -directly: `http://localhost:9002/callback/oidc` +- User.Read _(should be already configured)_ +- profile +- email +- openid + +Click on **Add a permission**, then from the **Microsoft APIs** tab select **Microsoft Graph**, then **Delegated permissions**. From the **OpenId permissions** category, select `email`, `openid`, `profile` and click **Add permissions**. + +At this point, you should be looking at a screen like the following: + +

+ +

+ +
+
+ +## 2. Obtain Client Credentials & Discovery URL The goal of this step should be to obtain the following values, which will need to be configured before deploying DataHub: -1. **Client ID** - A unique identifier for your application with the identity provider -2. **Client Secret** - A shared secret to use for exchange between you and your identity provider -3. **Discovery URL** - A URL where the OIDC API of your identity provider can be discovered. This should suffixed by - `.well-known/openid-configuration`. Sometimes, identity providers will not explicitly include this URL in their setup guides, though - this endpoint *will* exist as per the OIDC specification. For more info see http://openid.net/specs/openid-connect-discovery-1_0.html. +- **Client ID** - A unique identifier for your application with the identity provider +- **Client Secret** - A shared secret to use for exchange between you and your identity provider +- **Discovery URL** - A URL where the OIDC API of your identity provider can be discovered. This should suffixed by + `.well-known/openid-configuration`. Sometimes, identity providers will not explicitly include this URL in their setup guides, though + this endpoint _will_ exist as per the OIDC specification. For more info see http://openid.net/specs/openid-connect-discovery-1_0.html. + + + + + +**Obtain Client Credentials** + +Navigate to the **Credentials** tab. Click **Create Credentials** & select **OAuth client ID** as the credential type. + +On the following screen, select **Web application** as your Application Type. +Add the domain where DataHub is hosted to your 'Authorized Javascript Origins'. + +``` +https://your-datahub-domain.com +``` + +Add the domain where DataHub is hosted with the path `/callback/oidc` appended to 'Authorized Redirect URLs'. Finally, click **Create** + +``` +https://your-datahub-domain.com/callback/oidc +``` + +You will now receive a pair of values, a client id and a client secret. Bookmark these for the next step. + + + + +**Obtain Client Credentials** + +After registering the app, you should see the client credentials. Bookmark the `Client id` and `Client secret` for the next step. + +**Obtain Discovery URI** + +On the same page, you should see an `Okta Domain`. Your OIDC discovery URI will be formatted as follows: + +``` +https://your-okta-domain.com/.well-known/openid-configuration +``` + +For example, `https://dev-33231928.okta.com/.well-known/openid-configuration`. + +At this point, you should be looking at a screen like the following: + +

+ +

+
+ -### 2. Configure DataHub Frontend Server +**Obtain Application (Client) ID** -The second step to enabling OIDC involves configuring `datahub-frontend` to enable OIDC authentication with your Identity Provider. +On the left-side navigation bar, go back to the **Overview** tab. You should see the `Application (client) ID`. Save its value for the next step. + +**Obtain Discovery URI** + +On the same page, you should see a `Directory (tenant) ID`. Your OIDC discovery URI will be formatted as follows: + +``` +https://login.microsoftonline.com/{tenant ID}/v2.0/.well-known/openid-configuration +``` + + +
+ +## 3. Configure DataHub Frontend Server + +### Docker + +The next step to enabling OIDC involves configuring `datahub-frontend` to enable OIDC authentication with your Identity Provider. To do so, you must update the `datahub-frontend` [docker.env](../../../../docker/datahub-frontend/env/docker.env) file with the values received from your identity provider: @@ -67,22 +209,29 @@ AUTH_OIDC_DISCOVERY_URI=your-provider-discovery-url AUTH_OIDC_BASE_URL=your-datahub-url ``` -- `AUTH_OIDC_ENABLED`: Enable delegating authentication to OIDC identity provider -- `AUTH_OIDC_CLIENT_ID`: Unique client id received from identity provider -- `AUTH_OIDC_CLIENT_SECRET`: Unique client secret received from identity provider -- `AUTH_OIDC_DISCOVERY_URI`: Location of the identity provider OIDC discovery API. Suffixed with `.well-known/openid-configuration` -- `AUTH_OIDC_BASE_URL`: The base URL of your DataHub deployment, e.g. https://yourorgdatahub.com (prod) or http://localhost:9002 (testing) -- `AUTH_SESSION_TTL_HOURS`: The length of time in hours before a user will be prompted to login again. Controls the actor cookie expiration time in the browser. Numeric value converted to hours, default 24. -- `MAX_SESSION_TOKEN_AGE`: Determines the expiration time of a session token. Session tokens are stateless so this determines at what time a session token may no longer be used and a valid session token can be used until this time has passed. Accepts a valid relative Java date style String, default 24h. +| Configuration | Description | Default | +| ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------- | +| AUTH_OIDC_ENABLED | Enable delegating authentication to OIDC identity provider | | +| AUTH_OIDC_CLIENT_ID | Unique client id received from identity provider | | +| AUTH_OIDC_CLIENT_SECRET | Unique client secret received from identity provider | | +| AUTH_OIDC_DISCOVERY_URI | Location of the identity provider OIDC discovery API. Suffixed with `.well-known/openid-configuration` | | +| AUTH_OIDC_BASE_URL | The base URL of your DataHub deployment, e.g. https://yourorgdatahub.com (prod) or http://localhost:9002 (testing) | | +| AUTH_SESSION_TTL_HOURS | The length of time in hours before a user will be prompted to login again. Controls the actor cookie expiration time in the browser. Numeric value converted to hours. | 24 | +| MAX_SESSION_TOKEN_AGE | Determines the expiration time of a session token. Session tokens are stateless so this determines at what time a session token may no longer be used and a valid session token can be used until this time has passed. Accepts a valid relative Java date style String. | 24h | Providing these configs will cause DataHub to delegate authentication to your identity provider, requesting the "oidc email profile" scopes and parsing the "preferred_username" claim from the authenticated profile as the DataHub CorpUser identity. +:::note + +By default, the login callback endpoint exposed by DataHub will be located at `${AUTH_OIDC_BASE_URL}/callback/oidc`. This must **exactly** match the login redirect URL you've registered with your identity provider in step 1. + +::: -> By default, the login callback endpoint exposed by DataHub will be located at `${AUTH_OIDC_BASE_URL}/callback/oidc`. This must **exactly** match the login redirect URL you've registered with your identity provider in step 1. +### Kubernetes -In kubernetes, you can add the above env variables in the values.yaml as follows. +In Kubernetes, you can add the above env variables in the `values.yaml` as follows. ```yaml datahub-frontend: @@ -102,20 +251,21 @@ datahub-frontend: You can also package OIDC client secrets into a k8s secret by running -```kubectl create secret generic datahub-oidc-secret --from-literal=secret=<>``` +``` +kubectl create secret generic datahub-oidc-secret --from-literal=secret=<> +``` Then set the secret env as follows. ```yaml - - name: AUTH_OIDC_CLIENT_SECRET - valueFrom: - secretKeyRef: - name: datahub-oidc-secret - key: secret +- name: AUTH_OIDC_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: datahub-oidc-secret + key: secret ``` - -#### Advanced +### Advanced OIDC Configurations You can optionally customize the flow further using advanced configurations. These allow you to specify the OIDC scopes requested, how the DataHub username is parsed from the claims returned by the identity provider, and how users and groups are extracted and provisioned from the OIDC claim set. @@ -128,23 +278,15 @@ AUTH_OIDC_SCOPE=your-custom-scope AUTH_OIDC_CLIENT_AUTHENTICATION_METHOD=authentication-method ``` -- `AUTH_OIDC_USER_NAME_CLAIM`: The attribute that will contain the username used on the DataHub platform. By default, this is "email" provided - as part of the standard `email` scope. -- `AUTH_OIDC_USER_NAME_CLAIM_REGEX`: A regex string used for extracting the username from the userNameClaim attribute. For example, if - the userNameClaim field will contain an email address, and we want to omit the domain name suffix of the email, we can specify a custom - regex to do so. (e.g. `([^@]+)`) -- `AUTH_OIDC_SCOPE`: a string representing the scopes to be requested from the identity provider, granted by the end user. For more info, - see [OpenID Connect Scopes](https://auth0.com/docs/scopes/openid-connect-scopes). -- `AUTH_OIDC_CLIENT_AUTHENTICATION_METHOD`: a string representing the token authentication method to use with the identity provider. Default value - is `client_secret_basic`, which uses HTTP Basic authentication. Another option is `client_secret_post`, which includes the client_id and secret_id - as form parameters in the HTTP POST request. For more info, see [OAuth 2.0 Client Authentication](https://darutk.medium.com/oauth-2-0-client-authentication-4b5f929305d4) - -Additional OIDC Options: +| Configuration | Description | Default | +| -------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------- | +| AUTH_OIDC_USER_NAME_CLAIM | The attribute that will contain the username used on the DataHub platform. By default, this is "email" providedas part of the standard `email` scope. | | +| AUTH_OIDC_USER_NAME_CLAIM_REGEX | A regex string used for extracting the username from the userNameClaim attribute. For example, if the userNameClaim field will contain an email address, and we want to omit the domain name suffix of the email, we can specify a customregex to do so. (e.g. `([^@]+)`) | | +| AUTH_OIDC_SCOPE | A string representing the scopes to be requested from the identity provider, granted by the end user. For more info, see [OpenID Connect Scopes](https://auth0.com/docs/scopes/openid-connect-scopes). | | +| AUTH_OIDC_CLIENT_AUTHENTICATION_METHOD | a string representing the token authentication method to use with the identity provider. Default value is `client_secret_basic`, which uses HTTP Basic authentication. Another option is `client_secret_post`, which includes the client_id and secret_id as form parameters in the HTTP POST request. For more info, see [OAuth 2.0 Client Authentication](https://darutk.medium.com/oauth-2-0-client-authentication-4b5f929305d4) | client_secret_basic | +| AUTH_OIDC_PREFERRED_JWS_ALGORITHM | Can be used to select a preferred signing algorithm for id tokens. Examples include: `RS256` or `HS256`. If your IdP includes `none` before `RS256`/`HS256` in the list of signing algorithms, then this value **MUST** be set. | | -- `AUTH_OIDC_PREFERRED_JWS_ALGORITHM` - Can be used to select a preferred signing algorithm for id tokens. Examples include: `RS256` or `HS256`. If -your IdP includes `none` before `RS256`/`HS256` in the list of signing algorithms, then this value **MUST** be set. - -##### User & Group Provisioning (JIT Provisioning) +### User & Group Provisioning (JIT Provisioning) By default, DataHub will optimistically attempt to provision users and groups that do not already exist at the time of login. For users, we extract information like first name, last name, display name, & email to construct a basic user profile. If a groups claim is present, @@ -160,26 +302,30 @@ AUTH_OIDC_EXTRACT_GROUPS_ENABLED=false AUTH_OIDC_GROUPS_CLAIM= ``` -- `AUTH_OIDC_JIT_PROVISIONING_ENABLED`: Whether DataHub users & groups should be provisioned on login if they do not exist. Defaults to true. -- `AUTH_OIDC_PRE_PROVISIONING_REQUIRED`: Whether the user should already exist in DataHub when they login, failing login if they are not. This is appropriate for situations in which users and groups are batch ingested and tightly controlled inside your environment. Defaults to false. -- `AUTH_OIDC_EXTRACT_GROUPS_ENABLED`: Only applies if `AUTH_OIDC_JIT_PROVISIONING_ENABLED` is set to true. This determines whether we should attempt to extract a list of group names from a particular claim in the OIDC attributes. Note that if this is enabled, each login will re-sync group membership with the groups in your Identity Provider, clearing the group membership that has been assigned through the DataHub UI. Enable with care! Defaults to false. -- `AUTH_OIDC_GROUPS_CLAIM`: Only applies if `AUTH_OIDC_EXTRACT_GROUPS_ENABLED` is set to true. This determines which OIDC claims will contain a list of string group names. Accepts multiple claim names with comma-separated values. I.e: `groups, teams, departments`. Defaults to 'groups'. +| Configuration | Description | Default | +| ----------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | +| AUTH_OIDC_JIT_PROVISIONING_ENABLED | Whether DataHub users & groups should be provisioned on login if they do not exist. | true | +| AUTH_OIDC_PRE_PROVISIONING_REQUIRED | Whether the user should already exist in DataHub when they login, failing login if they are not. This is appropriate for situations in which users and groups are batch ingested and tightly controlled inside your environment. | false | +| AUTH_OIDC_EXTRACT_GROUPS_ENABLED | Only applies if `AUTH_OIDC_JIT_PROVISIONING_ENABLED` is set to true. This determines whether we should attempt to extract a list of group names from a particular claim in the OIDC attributes. Note that if this is enabled, each login will re-sync group membership with the groups in your Identity Provider, clearing the group membership that has been assigned through the DataHub UI. Enable with care! | false | +| AUTH_OIDC_GROUPS_CLAIM | Only applies if `AUTH_OIDC_EXTRACT_GROUPS_ENABLED` is set to true. This determines which OIDC claims will contain a list of string group names. Accepts multiple claim names with comma-separated values. I.e: `groups, teams, departments`. | groups | +## 4. Restart datahub-frontend-react -Once configuration has been updated, `datahub-frontend-react` will need to be restarted to pick up the new environment variables: +Once configured, restarting the `datahub-frontend-react` container will enable an indirect authentication flow in which DataHub delegates authentication to the specified identity provider. ``` docker-compose -p datahub -f docker-compose.yml -f docker-compose.override.yml up datahub-frontend-react ``` ->Note that by default, enabling OIDC will *not* disable the dummy JAAS authentication path, which can be reached at the `/login` -route of the React app. To disable this authentication path, additionally specify the following config: -> `AUTH_JAAS_ENABLED=false` +Navigate to your DataHub domain to see SSO in action. -### Summary +:::caution +By default, enabling OIDC will _not_ disable the dummy JAAS authentication path, which can be reached at the `/login` +route of the React app. To disable this authentication path, additionally specify the following config: +`AUTH_JAAS_ENABLED=false` +::: -Once configured, deploying the `datahub-frontend-react` container will enable an indirect authentication flow in which DataHub delegates -authentication to the specified identity provider. +## Summary Once a user is authenticated by the identity provider, DataHub will extract a username from the provided claims and grant DataHub access to the user by setting a pair of session cookies. @@ -196,44 +342,45 @@ A brief summary of the steps that occur when the user navigates to the React app 7. DataHub sets session cookies for the newly authenticated user 8. DataHub redirects the user to the homepage ("/") -## FAQ +## Troubleshooting -**No users can log in. Instead, I get redirected to the login page with an error. What do I do?** +
+No users can log in. Instead, I get redirected to the login page with an error. What do I do? This can occur for a variety of reasons, but most often it is due to misconfiguration of Single-Sign On, either on the DataHub -side or on the Identity Provider side. - -First, verify that all values are consistent across them (e.g. the host URL where DataHub is deployed), and that no values -are misspelled (client id, client secret). +side or on the Identity Provider side. -Next, verify that the scopes requested are supported by your Identity Provider -and that the claim (i.e. attribute) DataHub uses for uniquely identifying the user is supported by your Identity Provider (refer to Identity Provider OpenID Connect documentation). By default, this claim is `email`. +- Verify that all values are consistent across them (e.g. the host URL where DataHub is deployed), and that no values are misspelled (client id, client secret). +- Verify that the scopes requested are supported by your Identity Provider and that the claim (i.e. attribute) DataHub uses for uniquely identifying the user is supported by your Identity Provider (refer to Identity Provider OpenID Connect documentation). By default, this claim is `email`. +- Make sure the Discovery URI you've configured (`AUTH_OIDC_DISCOVERY_URI`) is accessible where the datahub-frontend container is running. You can do this by issuing a basic CURL to the address (**Pro-Tip**: you may also visit the address in your browser to check more specific details about your Identity Provider). +- Check the container logs for the `datahub-frontend` container. This should hopefully provide some additional context around why exactly the login handoff is not working. -Then, make sure the Discovery URI you've configured (`AUTH_OIDC_DISCOVERY_URI`) is accessible where the datahub-frontend container is running. You -can do this by issuing a basic CURL to the address (**Pro-Tip**: you may also visit the address in your browser to check more specific details about your Identity Provider). +If all else fails, feel free to reach out to the DataHub Community on Slack for real-time support. -Finally, check the container logs for the `datahub-frontend` container. This should hopefully provide some additional context -around why exactly the login handoff is not working. +
-If all else fails, feel free to reach out to the DataHub Community on Slack for -real-time support - - - -**I'm seeing an error in the `datahub-frontend` logs when a user tries to login** -```shell -Caused by: java.lang.RuntimeException: Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: 'email', Regex: '(.*)', Profile: { ... -``` -**what do I do?** +
+ +I'm seeing an error in the `datahub-frontend` logs when a user tries to login: Caused by: java.lang.RuntimeException: Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: 'email', Regex: '(.*)', Profile: { .... + This indicates that your Identity Provider does not provide the claim with name 'email', which DataHub uses by default to uniquely identify users within your organization. -To fix this, you may need to +To fix this, you may need to -1. Change the claim that is used as the unique user identifier to something else by changing the `AUTH_OIDC_USER_NAME_CLAIM` (e.g. to "name" or "preferred_username") _OR_ +1. Change the claim that is used as the unique user identifier to something else by changing the `AUTH_OIDC_USER_NAME_CLAIM` (e.g. to "name" or "preferred*username") \_OR* 2. Change the environment variable `AUTH_OIDC_SCOPE` to include the scope required to retrieve the claim with name "email" -For the `datahub-frontend` container / pod. +For the `datahub-frontend` container / pod. + +
+ +## Reference -**Pro-Tip**: Check the documentation for your Identity Provider to learn more about the scope claims supported. +Check the documentation for your Identity Provider to learn more about the scope claims supported. + +- [Registering an App in Okta](https://developer.okta.com/docs/guides/add-an-external-idp/openidconnect/main/) +- [OpenID Connect in Google Identity](https://developers.google.com/identity/protocols/oauth2/openid-connect) +- [OpenID Connect authentication with Azure Active Directory](https://docs.microsoft.com/en-us/azure/active-directory/fundamentals/auth-oidc) +- [Keycloak - Securing Applications and Services Guide](https://www.keycloak.org/docs/latest/securing_apps/) From ec9725026dca7b89d6a6464ea9b5c547debf42e5 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 2 Nov 2023 09:39:08 -0700 Subject: [PATCH 048/792] chore(ingest): remove legacy memory_leak_detector (#9158) --- .../src/datahub/cli/ingest_cli.py | 4 - metadata-ingestion/src/datahub/entrypoints.py | 15 --- .../ingestion/source/looker/looker_config.py | 6 +- .../datahub/utilities/memory_leak_detector.py | 106 ------------------ .../tests/integration/snowflake/common.py | 3 +- .../tests/unit/test_snowflake_source.py | 15 +-- 6 files changed, 10 insertions(+), 139 deletions(-) delete mode 100644 metadata-ingestion/src/datahub/utilities/memory_leak_detector.py diff --git a/metadata-ingestion/src/datahub/cli/ingest_cli.py b/metadata-ingestion/src/datahub/cli/ingest_cli.py index 9b5716408f3e4..dd0287004a368 100644 --- a/metadata-ingestion/src/datahub/cli/ingest_cli.py +++ b/metadata-ingestion/src/datahub/cli/ingest_cli.py @@ -27,7 +27,6 @@ from datahub.ingestion.run.pipeline import Pipeline from datahub.telemetry import telemetry from datahub.upgrade import upgrade -from datahub.utilities import memory_leak_detector logger = logging.getLogger(__name__) @@ -98,7 +97,6 @@ def ingest() -> None: @click.option( "--no-spinner", type=bool, is_flag=True, default=False, help="Turn off spinner" ) -@click.pass_context @telemetry.with_telemetry( capture_kwargs=[ "dry_run", @@ -109,9 +107,7 @@ def ingest() -> None: "no_spinner", ] ) -@memory_leak_detector.with_leak_detection def run( - ctx: click.Context, config: str, dry_run: bool, preview: bool, diff --git a/metadata-ingestion/src/datahub/entrypoints.py b/metadata-ingestion/src/datahub/entrypoints.py index 5bfab3b841fa3..0cd37cc939854 100644 --- a/metadata-ingestion/src/datahub/entrypoints.py +++ b/metadata-ingestion/src/datahub/entrypoints.py @@ -70,21 +70,10 @@ version=datahub_package.nice_version_name(), prog_name=datahub_package.__package_name__, ) -@click.option( - "-dl", - "--detect-memory-leaks", - type=bool, - is_flag=True, - default=False, - help="Run memory leak detection.", -) -@click.pass_context def datahub( - ctx: click.Context, debug: bool, log_file: Optional[str], debug_vars: bool, - detect_memory_leaks: bool, ) -> None: if debug_vars: # debug_vars implies debug. This option isn't actually used here, but instead @@ -109,10 +98,6 @@ def datahub( _logging_configured = configure_logging(debug=debug, log_file=log_file) _logging_configured.__enter__() - # Setup the context for the memory_leak_detector decorator. - ctx.ensure_object(dict) - ctx.obj["detect_memory_leaks"] = detect_memory_leaks - @datahub.command() @telemetry.with_telemetry() diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py index 96c405f7257d0..98d58c9fc9d87 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py @@ -121,7 +121,10 @@ class LookerCommonConfig(DatasetSourceConfigMixin): "discoverable. When disabled, adds this information to the description of the column.", ) platform_name: str = Field( - "looker", description="Default platform name. Don't change." + # TODO: This shouldn't be part of the config. + "looker", + description="Default platform name.", + hidden_from_docs=True, ) extract_column_level_lineage: bool = Field( True, @@ -213,7 +216,6 @@ def external_url_defaults_to_api_config_base_url( def stateful_ingestion_should_be_enabled( cls, v: Optional[bool], *, values: Dict[str, Any], **kwargs: Dict[str, Any] ) -> Optional[bool]: - stateful_ingestion: StatefulStaleMetadataRemovalConfig = cast( StatefulStaleMetadataRemovalConfig, values.get("stateful_ingestion") ) diff --git a/metadata-ingestion/src/datahub/utilities/memory_leak_detector.py b/metadata-ingestion/src/datahub/utilities/memory_leak_detector.py deleted file mode 100644 index 85ad0fb4938eb..0000000000000 --- a/metadata-ingestion/src/datahub/utilities/memory_leak_detector.py +++ /dev/null @@ -1,106 +0,0 @@ -import fnmatch -import gc -import logging -import sys -import tracemalloc -from collections import defaultdict -from functools import wraps -from typing import Any, Callable, Dict, List, TypeVar, Union, cast - -import click -from typing_extensions import Concatenate, ParamSpec - -logger = logging.getLogger(__name__) -T = TypeVar("T") -P = ParamSpec("P") - - -def _trace_has_file(trace: tracemalloc.Traceback, file_pattern: str) -> bool: - for frame_index in range(len(trace)): - cur_frame = trace[frame_index] - if fnmatch.fnmatch(cur_frame.filename, file_pattern): - return True - return False - - -def _init_leak_detection() -> None: - # Initialize trace malloc to track up to 25 stack frames. - tracemalloc.start(25) - if sys.version_info >= (3, 9): - # Nice to reset peak to 0. Available for versions >= 3.9. - tracemalloc.reset_peak() - # Enable leak debugging in the garbage collector. - gc.set_debug(gc.DEBUG_LEAK) - - -def _perform_leak_detection() -> None: - # Log potentially useful memory usage metrics - logger.info(f"GC count before collect {gc.get_count()}") - traced_memory_size, traced_memory_peak = tracemalloc.get_traced_memory() - logger.info(f"Traced Memory: size={traced_memory_size}, peak={traced_memory_peak}") - num_unreacheable_objects = gc.collect() - logger.info(f"Number of unreachable objects = {num_unreacheable_objects}") - logger.info(f"GC count after collect {gc.get_count()}") - - # Collect unique traces of all live objects in the garbage - these have potential leaks. - unique_traces_to_objects: Dict[ - Union[tracemalloc.Traceback, int], List[object] - ] = defaultdict(list) - for obj in gc.garbage: - obj_trace = tracemalloc.get_object_traceback(obj) - if obj_trace is not None: - if _trace_has_file(obj_trace, "*datahub/*.py"): - # Leaking object - unique_traces_to_objects[obj_trace].append(obj) - else: - unique_traces_to_objects[id(obj)].append(obj) - logger.info("Potentially leaking objects start") - for key, obj_list in sorted( - unique_traces_to_objects.items(), - key=lambda item: sum( - [sys.getsizeof(o) for o in item[1]] - ), # TODO: add support for deep sizeof - reverse=True, - ): - if isinstance(key, tracemalloc.Traceback): - obj_traceback: tracemalloc.Traceback = cast(tracemalloc.Traceback, key) - logger.info( - f"#Objects:{len(obj_list)}; Total memory:{sum([sys.getsizeof(obj) for obj in obj_list])};" - + " Allocation Trace:\n\t" - + "\n\t".join(obj_traceback.format(limit=25)) - ) - else: - logger.info( - f"#Objects:{len(obj_list)}; Total memory:{sum([sys.getsizeof(obj) for obj in obj_list])};" - + " No Allocation Trace available!" - ) - logger.info("Potentially leaking objects end") - - tracemalloc.stop() - - -def with_leak_detection( - func: Callable[Concatenate[click.Context, P], T] -) -> Callable[Concatenate[click.Context, P], T]: - @wraps(func) - def wrapper(ctx: click.Context, *args: P.args, **kwargs: P.kwargs) -> Any: - detect_leaks: bool = ctx.obj.get("detect_memory_leaks", False) - if detect_leaks: - logger.info( - f"Initializing memory leak detection on command: {func.__module__}.{func.__name__}" - ) - _init_leak_detection() - - try: - return func(ctx, *args, **kwargs) - finally: - if detect_leaks: - logger.info( - f"Starting memory leak detection on command: {func.__module__}.{func.__name__}" - ) - _perform_leak_detection() - logger.info( - f"Finished memory leak detection on command: {func.__module__}.{func.__name__}" - ) - - return wrapper diff --git a/metadata-ingestion/tests/integration/snowflake/common.py b/metadata-ingestion/tests/integration/snowflake/common.py index ff448eca01071..78e5499697311 100644 --- a/metadata-ingestion/tests/integration/snowflake/common.py +++ b/metadata-ingestion/tests/integration/snowflake/common.py @@ -565,5 +565,4 @@ def default_query_results( # noqa: C901 "DOMAIN": "DATABASE", }, ] - # Unreachable code - raise Exception(f"Unknown query {query}") + raise ValueError(f"Unexpected query: {query}") diff --git a/metadata-ingestion/tests/unit/test_snowflake_source.py b/metadata-ingestion/tests/unit/test_snowflake_source.py index 888a7c0441554..aaff878b81eee 100644 --- a/metadata-ingestion/tests/unit/test_snowflake_source.py +++ b/metadata-ingestion/tests/unit/test_snowflake_source.py @@ -368,8 +368,7 @@ def default_query_results(query): return [('{"roles":"","value":""}',)] elif query == "select current_warehouse()": return [("TEST_WAREHOUSE")] - # Unreachable code - raise Exception() + raise ValueError(f"Unexpected query: {query}") connection_mock = MagicMock() cursor_mock = MagicMock() @@ -397,8 +396,7 @@ def query_results(query): ] elif query == 'show grants to role "PUBLIC"': return [] - # Unreachable code - raise Exception() + raise ValueError(f"Unexpected query: {query}") config = { "username": "user", @@ -441,8 +439,7 @@ def query_results(query): return [("", "USAGE", "DATABASE", "DB1")] elif query == 'show grants to role "PUBLIC"': return [] - # Unreachable code - raise Exception() + raise ValueError(f"Unexpected query: {query}") setup_mock_connect(mock_connect, query_results) @@ -485,8 +482,7 @@ def query_results(query): ] elif query == 'show grants to role "PUBLIC"': return [] - # Unreachable code - raise Exception() + raise ValueError(f"Unexpected query: {query}") setup_mock_connect(mock_connect, query_results) @@ -536,8 +532,7 @@ def query_results(query): ["", "USAGE", "VIEW", "SNOWFLAKE.ACCOUNT_USAGE.ACCESS_HISTORY"], ["", "USAGE", "VIEW", "SNOWFLAKE.ACCOUNT_USAGE.OBJECT_DEPENDENCIES"], ] - # Unreachable code - raise Exception() + raise ValueError(f"Unexpected query: {query}") setup_mock_connect(mock_connect, query_results) From 148ad1ad9f00d6eb43d6acb270b9a90a745c8af3 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 2 Nov 2023 09:44:35 -0700 Subject: [PATCH 049/792] feat(ingest/looker): support emitting unused explores (#9159) --- .../ingestion/source/looker/looker_common.py | 2 +- .../ingestion/source/looker/looker_config.py | 4 ++ .../source/looker/looker_lib_wrapper.py | 7 +++ .../ingestion/source/looker/looker_source.py | 46 +++++++++++++------ 4 files changed, 45 insertions(+), 14 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py index 30c38720dd96c..7ca5ce49019ab 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py @@ -388,7 +388,7 @@ def _get_field_type( # if still not found, log and continue if type_class is None: - logger.info( + logger.debug( f"The type '{native_type}' is not recognized for field type, setting as NullTypeClass.", ) type_class = NullTypeClass diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py index 98d58c9fc9d87..e6ddea9a30489 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py @@ -205,6 +205,10 @@ class LookerDashboardSourceConfig( False, description="Extract looks which are not part of any Dashboard. To enable this flag the stateful_ingestion should also be enabled.", ) + emit_used_explores_only: bool = Field( + True, + description="When enabled, only explores that are used by a Dashboard/Look will be ingested.", + ) @validator("external_base_url", pre=True, always=True) def external_url_defaults_to_api_config_base_url( diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py index b00f74b71e792..988caba1c0d74 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py @@ -59,6 +59,7 @@ class LookerAPIStats(BaseModel): lookml_model_calls: int = 0 all_dashboards_calls: int = 0 all_looks_calls: int = 0 + all_models_calls: int = 0 get_query_calls: int = 0 search_looks_calls: int = 0 search_dashboards_calls: int = 0 @@ -155,6 +156,12 @@ def dashboard(self, dashboard_id: str, fields: Union[str, List[str]]) -> Dashboa transport_options=self.transport_options, ) + def all_lookml_models(self) -> Sequence[LookmlModel]: + self.client_stats.all_models_calls += 1 + return self.client.all_lookml_models( + transport_options=self.transport_options, + ) + def lookml_model_explore(self, model: str, explore_name: str) -> LookmlModelExplore: self.client_stats.explore_calls += 1 return self.client.lookml_model_explore( diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index 09683d790c14c..4a98e8874bca0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -147,9 +147,12 @@ def __init__(self, config: LookerDashboardSourceConfig, ctx: PipelineContext): ) self.reporter._looker_explore_registry = self.explore_registry self.reporter._looker_api = self.looker_api + self.reachable_look_registry = set() - self.explores_to_fetch_set: Dict[Tuple[str, str], List[str]] = {} + # (model, explore) -> list of charts/looks/dashboards that reference this explore + # The list values are used purely for debugging purposes. + self.reachable_explores: Dict[Tuple[str, str], List[str]] = {} # Keep stat generators to generate entity stat aspect later stat_generator_config: looker_usage.StatGeneratorConfig = ( @@ -378,11 +381,11 @@ def _get_input_fields_from_query( return result - def add_explore_to_fetch(self, model: str, explore: str, via: str) -> None: - if (model, explore) not in self.explores_to_fetch_set: - self.explores_to_fetch_set[(model, explore)] = [] + def add_reachable_explore(self, model: str, explore: str, via: str) -> None: + if (model, explore) not in self.reachable_explores: + self.reachable_explores[(model, explore)] = [] - self.explores_to_fetch_set[(model, explore)].append(via) + self.reachable_explores[(model, explore)].append(via) def _get_looker_dashboard_element( # noqa: C901 self, element: DashboardElement @@ -403,7 +406,7 @@ def _get_looker_dashboard_element( # noqa: C901 f"Element {element.title}: Explores added via query: {explores}" ) for exp in explores: - self.add_explore_to_fetch( + self.add_reachable_explore( model=element.query.model, explore=exp, via=f"look:{element.look_id}:query:{element.dashboard_id}", @@ -439,7 +442,7 @@ def _get_looker_dashboard_element( # noqa: C901 explores = [element.look.query.view] logger.debug(f"Element {title}: Explores added via look: {explores}") for exp in explores: - self.add_explore_to_fetch( + self.add_reachable_explore( model=element.look.query.model, explore=exp, via=f"Look:{element.look_id}:query:{element.dashboard_id}", @@ -483,7 +486,7 @@ def _get_looker_dashboard_element( # noqa: C901 ) for exp in explores: - self.add_explore_to_fetch( + self.add_reachable_explore( model=element.result_maker.query.model, explore=exp, via=f"Look:{element.look_id}:resultmaker:query", @@ -495,7 +498,7 @@ def _get_looker_dashboard_element( # noqa: C901 if filterable.view is not None and filterable.model is not None: model = filterable.model explores.append(filterable.view) - self.add_explore_to_fetch( + self.add_reachable_explore( model=filterable.model, explore=filterable.view, via=f"Look:{element.look_id}:resultmaker:filterable", @@ -694,20 +697,26 @@ def _make_dashboard_metadata_events( def _make_explore_metadata_events( self, ) -> Iterable[Union[MetadataChangeEvent, MetadataChangeProposalWrapper]]: + if self.source_config.emit_used_explores_only: + explores_to_fetch = list(self.reachable_explores.keys()) + else: + explores_to_fetch = list(self.list_all_explores()) + explores_to_fetch.sort() + with concurrent.futures.ThreadPoolExecutor( max_workers=self.source_config.max_threads ) as async_executor: - self.reporter.total_explores = len(self.explores_to_fetch_set) + self.reporter.total_explores = len(explores_to_fetch) explore_futures = { async_executor.submit(self.fetch_one_explore, model, explore): ( model, explore, ) - for (model, explore) in self.explores_to_fetch_set + for (model, explore) in explores_to_fetch } - for future in concurrent.futures.as_completed(explore_futures): + for future in concurrent.futures.wait(explore_futures).done: events, explore_id, start_time, end_time = future.result() del explore_futures[future] self.reporter.explores_scanned += 1 @@ -717,6 +726,17 @@ def _make_explore_metadata_events( f"Running time of fetch_one_explore for {explore_id}: {(end_time - start_time).total_seconds()}" ) + def list_all_explores(self) -> Iterable[Tuple[str, str]]: + # returns a list of (model, explore) tuples + + for model in self.looker_api.all_lookml_models(): + if model.name is None or model.explores is None: + continue + for explore in model.explores: + if explore.name is None: + continue + yield (model.name, explore.name) + def fetch_one_explore( self, model: str, explore: str ) -> Tuple[ @@ -954,7 +974,7 @@ def _input_fields_from_dashboard_element( ) if explore is not None: # add this to the list of explores to finally generate metadata for - self.add_explore_to_fetch( + self.add_reachable_explore( input_field.model, input_field.explore, entity_urn ) entity_urn = explore.get_explore_urn(self.source_config) From 7ff48b37aaea165ba3c3cb6f9f9f742ea2e37654 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 3 Nov 2023 10:23:37 -0500 Subject: [PATCH 050/792] refactor(policy): refactor policy locking, no functional difference (#9163) --- .../authorization/DataHubAuthorizer.java | 111 +++++++++--------- 1 file changed, 55 insertions(+), 56 deletions(-) diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index e30fb93109915..f8b28f6c182a7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -19,6 +19,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.annotation.Nonnull; @@ -55,7 +56,8 @@ public enum AuthorizationMode { // Maps privilege name to the associated set of policies for fast access. // Not concurrent data structure because writes are always against the entire thing. private final Map> _policyCache = new HashMap<>(); // Shared Policy Cache. - private final ReadWriteLock _lockPolicyCache = new ReentrantReadWriteLock(); + private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); + private final Lock readLock = readWriteLock.readLock(); private final ScheduledExecutorService _refreshExecutorService = Executors.newScheduledThreadPool(1); private final PolicyRefreshRunnable _policyRefreshRunnable; @@ -74,7 +76,7 @@ public DataHubAuthorizer( _systemAuthentication = Objects.requireNonNull(systemAuthentication); _mode = Objects.requireNonNull(mode); _policyEngine = new PolicyEngine(systemAuthentication, Objects.requireNonNull(entityClient)); - _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache, _lockPolicyCache); + _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache, readWriteLock.writeLock()); _refreshExecutorService.scheduleAtFixedRate(_policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); } @@ -93,41 +95,30 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request Optional resolvedResourceSpec = request.getResourceSpec().map(_entitySpecResolver::resolve); - _lockPolicyCache.readLock().lock(); - try { - // 1. Fetch the policies relevant to the requested privilege. - final List policiesToEvaluate = _policyCache.getOrDefault(request.getPrivilege(), new ArrayList<>()); - - // 2. Evaluate each policy. - for (DataHubPolicyInfo policy : policiesToEvaluate) { - if (isRequestGranted(policy, request, resolvedResourceSpec)) { - // Short circuit if policy has granted privileges to this actor. - return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, - String.format("Granted by policy with type: %s", policy.getType())); - } + // 1. Fetch the policies relevant to the requested privilege. + final List policiesToEvaluate = getOrDefault(request.getPrivilege(), new ArrayList<>()); + + // 2. Evaluate each policy. + for (DataHubPolicyInfo policy : policiesToEvaluate) { + if (isRequestGranted(policy, request, resolvedResourceSpec)) { + // Short circuit if policy has granted privileges to this actor. + return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, + String.format("Granted by policy with type: %s", policy.getType())); } - return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); - } finally { - _lockPolicyCache.readLock().unlock(); } + return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); } public List getGrantedPrivileges(final String actor, final Optional resourceSpec) { + // 1. Fetch all policies + final List policiesToEvaluate = getOrDefault(ALL, new ArrayList<>()); - _lockPolicyCache.readLock().lock(); - try { - // 1. Fetch all policies - final List policiesToEvaluate = _policyCache.getOrDefault(ALL, new ArrayList<>()); - - Urn actorUrn = UrnUtils.getUrn(actor); - final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); + Urn actorUrn = UrnUtils.getUrn(actor); + final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); - Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); - return _policyEngine.getGrantedPrivileges(policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); - } finally { - _lockPolicyCache.readLock().unlock(); - } + return _policyEngine.getGrantedPrivileges(policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); } /** @@ -143,36 +134,31 @@ public AuthorizedActors authorizedActors( boolean allUsers = false; boolean allGroups = false; - _lockPolicyCache.readLock().lock(); - try { - // Step 1: Find policies granting the privilege. - final List policiesToEvaluate = _policyCache.getOrDefault(privilege, new ArrayList<>()); - - Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + // Step 1: Find policies granting the privilege. + final List policiesToEvaluate = getOrDefault(privilege, new ArrayList<>()); + Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); - // Step 2: For each policy, determine whether the resource is a match. - for (DataHubPolicyInfo policy : policiesToEvaluate) { - if (!PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState())) { - // Policy is not active, skip. - continue; - } + // Step 2: For each policy, determine whether the resource is a match. + for (DataHubPolicyInfo policy : policiesToEvaluate) { + if (!PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState())) { + // Policy is not active, skip. + continue; + } - final PolicyEngine.PolicyActors matchingActors = _policyEngine.getMatchingActors(policy, resolvedResourceSpec); + final PolicyEngine.PolicyActors matchingActors = _policyEngine.getMatchingActors(policy, resolvedResourceSpec); - // Step 3: For each matching policy, add actors that are authorized. - authorizedUsers.addAll(matchingActors.getUsers()); - authorizedGroups.addAll(matchingActors.getGroups()); - if (matchingActors.allUsers()) { - allUsers = true; - } - if (matchingActors.allGroups()) { - allGroups = true; - } + // Step 3: For each matching policy, add actors that are authorized. + authorizedUsers.addAll(matchingActors.getUsers()); + authorizedGroups.addAll(matchingActors.getGroups()); + if (matchingActors.allUsers()) { + allUsers = true; + } + if (matchingActors.allGroups()) { + allGroups = true; } - } finally { - _lockPolicyCache.readLock().unlock(); } + // Step 4: Return all authorized users and groups. return new AuthorizedActors(privilege, authorizedUsers, authorizedGroups, allUsers, allGroups); } @@ -234,6 +220,16 @@ private Optional getUrnFromRequestActor(String actor) { } } + private List getOrDefault(String key, List defaultValue) { + readLock.lock(); + try { + return _policyCache.getOrDefault(key, defaultValue); + } finally { + // To unlock the acquired read thread + readLock.unlock(); + } + } + /** * A {@link Runnable} used to periodically fetch a new instance of the policies Cache. * @@ -247,7 +243,7 @@ static class PolicyRefreshRunnable implements Runnable { private final Authentication _systemAuthentication; private final PolicyFetcher _policyFetcher; private final Map> _policyCache; - private final ReadWriteLock _lockPolicyCache; + private final Lock writeLock; @Override public void run() { @@ -274,13 +270,16 @@ public void run() { return; } } - _lockPolicyCache.writeLock().lock(); + + writeLock.lock(); try { _policyCache.clear(); _policyCache.putAll(newCache); } finally { - _lockPolicyCache.writeLock().unlock(); + // To unlock the acquired write thread + writeLock.unlock(); } + log.debug(String.format("Successfully fetched %s policies.", total)); } catch (Exception e) { log.error("Caught exception while loading Policy cache. Will retry on next scheduled attempt.", e); From 07311115c5ca436f64fad9c685cfc586cc5d4180 Mon Sep 17 00:00:00 2001 From: Kos Korchak <97058061+kkorchak@users.noreply.github.com> Date: Fri, 3 Nov 2023 13:00:15 -0400 Subject: [PATCH 051/792] API test for managing access token privilege (#9167) --- .../tests/privileges/test_privileges.py | 155 ++++++++++++++---- 1 file changed, 127 insertions(+), 28 deletions(-) diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index 13d6b6cf3415a..740311754678e 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -52,6 +52,20 @@ def privileges_and_test_user_setup(admin_session): wait_for_writes_to_sync() +@tenacity.retry( + stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) +) +def _ensure_cant_perform_action(session, json,assertion_key): + action_response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=json) + action_response.raise_for_status() + action_data = action_response.json() + + assert action_data["errors"][0]["extensions"]["code"] == 403 + assert action_data["errors"][0]["extensions"]["type"] == "UNAUTHORIZED" + assert action_data["data"][assertion_key] == None + + @tenacity.retry( stop=tenacity.stop_after_attempt(10), wait=tenacity.wait_fixed(sleep_sec) ) @@ -67,20 +81,6 @@ def _ensure_can_create_secret(session, json, urn): assert secret_data["data"]["createSecret"] == urn -@tenacity.retry( - stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) -) -def _ensure_cant_create_secret(session, json): - create_secret_response = session.post( - f"{get_frontend_url()}/api/v2/graphql", json=json) - create_secret_response.raise_for_status() - create_secret_data = create_secret_response.json() - - assert create_secret_data["errors"][0]["extensions"]["code"] == 403 - assert create_secret_data["errors"][0]["extensions"]["type"] == "UNAUTHORIZED" - assert create_secret_data["data"]["createSecret"] == None - - @tenacity.retry( stop=tenacity.stop_after_attempt(10), wait=tenacity.wait_fixed(sleep_sec) ) @@ -99,17 +99,19 @@ def _ensure_can_create_ingestion_source(session, json): @tenacity.retry( - stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) + stop=tenacity.stop_after_attempt(10), wait=tenacity.wait_fixed(sleep_sec) ) -def _ensure_cant_create_ingestion_source(session, json): - create_source_response = session.post( +def _ensure_can_create_access_token(session, json): + create_access_token_success = session.post( f"{get_frontend_url()}/api/v2/graphql", json=json) - create_source_response.raise_for_status() - create_source_data = create_source_response.json() + create_access_token_success.raise_for_status() + ingestion_data = create_access_token_success.json() - assert create_source_data["errors"][0]["extensions"]["code"] == 403 - assert create_source_data["errors"][0]["extensions"]["type"] == "UNAUTHORIZED" - assert create_source_data["data"]["createIngestionSource"] == None + assert ingestion_data + assert ingestion_data["data"] + assert ingestion_data["data"]["createAccessToken"] + assert ingestion_data["data"]["createAccessToken"]["accessToken"] is not None + assert ingestion_data["data"]["createAccessToken"]["__typename"] == "AccessToken" @pytest.mark.dependency(depends=["test_healthchecks"]) @@ -132,7 +134,7 @@ def test_privilege_to_create_and_manage_secrets(): } }, } - _ensure_cant_create_secret(user_session, create_secret) + _ensure_cant_perform_action(user_session, create_secret,"createSecret") # Assign privileges to the new user to manage secrets @@ -166,7 +168,7 @@ def test_privilege_to_create_and_manage_secrets(): remove_policy(policy_urn, admin_session) # Ensure user can't create secret after policy is removed - _ensure_cant_create_secret(user_session, create_secret) + _ensure_cant_perform_action(user_session, create_secret,"createSecret") @pytest.mark.dependency(depends=["test_healthchecks"]) @@ -182,11 +184,18 @@ def test_privilege_to_create_and_manage_ingestion_source(): createIngestionSource(input: $input)\n}""", "variables": {"input":{"type":"snowflake","name":"test","config": {"recipe": - "{\"source\":{\"type\":\"snowflake\",\"config\":{\"account_id\":null,\"include_table_lineage\":true,\"include_view_lineage\":true,\"include_tables\":true,\"include_views\":true,\"profiling\":{\"enabled\":true,\"profile_table_level_only\":true},\"stateful_ingestion\":{\"enabled\":true}}}}", + """{\"source\":{\"type\":\"snowflake\",\"config\":{ + \"account_id\":null, + \"include_table_lineage\":true, + \"include_view_lineage\":true, + \"include_tables\":true, + \"include_views\":true, + \"profiling\":{\"enabled\":true,\"profile_table_level_only\":true}, + \"stateful_ingestion\":{\"enabled\":true}}}}""", "executorId":"default","debugMode":False,"extraArgs":[]}}}, } - _ensure_cant_create_ingestion_source(user_session, create_ingestion_source) + _ensure_cant_perform_action(user_session, create_ingestion_source, "createIngestionSource") # Assign privileges to the new user to manage ingestion source @@ -201,7 +210,14 @@ def test_privilege_to_create_and_manage_ingestion_source(): updateIngestionSource(urn: $urn, input: $input)\n}""", "variables": {"urn":ingestion_source_urn, "input":{"type":"snowflake","name":"test updated", - "config":{"recipe":"{\"source\":{\"type\":\"snowflake\",\"config\":{\"account_id\":null,\"include_table_lineage\":true,\"include_view_lineage\":true,\"include_tables\":true,\"include_views\":true,\"profiling\":{\"enabled\":true,\"profile_table_level_only\":true},\"stateful_ingestion\":{\"enabled\":true}}}}", + "config":{"recipe":"""{\"source\":{\"type\":\"snowflake\",\"config\":{ + \"account_id\":null, + \"include_table_lineage\":true, + \"include_view_lineage\":true, + \"include_tables\":true, + \"include_views\":true, + \"profiling\":{\"enabled\":true,\"profile_table_level_only\":true}, + \"stateful_ingestion\":{\"enabled\":true}}}}""", "executorId":"default","debugMode":False,"extraArgs":[]}}} } @@ -238,4 +254,87 @@ def test_privilege_to_create_and_manage_ingestion_source(): remove_policy(policy_urn, admin_session) # Ensure that user can't create ingestion source after policy is removed - _ensure_cant_create_ingestion_source(user_session, create_ingestion_source) \ No newline at end of file + _ensure_cant_perform_action(user_session, create_ingestion_source, "createIngestionSource") + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_privilege_to_create_and_manage_access_tokens(): + + (admin_user, admin_pass) = get_admin_credentials() + admin_session = login_as(admin_user, admin_pass) + user_session = login_as("user", "user") + + + # Verify new user can't create access token + create_access_token = { + "query": """mutation createAccessToken($input: CreateAccessTokenInput!) {\n + createAccessToken(input: $input) {\n accessToken\n __typename\n }\n}\n""", + "variables": {"input":{"actorUrn":"urn:li:corpuser:user", + "type":"PERSONAL", + "duration":"ONE_MONTH", + "name":"test", + "description":"test"}} + } + + _ensure_cant_perform_action(user_session, create_access_token,"createAccessToken") + + + # Assign privileges to the new user to create and manage access tokens + policy_urn = create_user_policy("urn:li:corpuser:user", ["MANAGE_ACCESS_TOKENS"], admin_session) + + + # Verify new user can create and manage access token(create, revoke) + # Create a access token + _ensure_can_create_access_token(user_session, create_access_token) + + + # List access tokens first to get token id + list_access_tokens = { + "query": """query listAccessTokens($input: ListAccessTokenInput!) {\n + listAccessTokens(input: $input) {\n + start\n count\n total\n tokens {\n urn\n type\n + id\n name\n description\n actorUrn\n ownerUrn\n + createdAt\n expiresAt\n __typename\n }\n __typename\n }\n}\n""", + "variables": { + "input":{ + "start":0,"count":10,"filters":[{ + "field":"ownerUrn", + "values":["urn:li:corpuser:user"]}]} + } + } + + list_tokens_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=list_access_tokens) + list_tokens_response.raise_for_status() + list_tokens_data = list_tokens_response.json() + + assert list_tokens_data + assert list_tokens_data["data"] + assert list_tokens_data["data"]["listAccessTokens"]["tokens"][0]["id"] is not None + + access_token_id = list_tokens_data["data"]["listAccessTokens"]["tokens"][0]["id"] + + + # Revoke access token + revoke_access_token = { + "query": "mutation revokeAccessToken($tokenId: String!) {\n revokeAccessToken(tokenId: $tokenId)\n}\n", + "variables": { + "tokenId": access_token_id + }, + } + + revoke_token_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=revoke_access_token) + revoke_token_response.raise_for_status() + revoke_token_data = revoke_token_response.json() + + assert revoke_token_data + assert revoke_token_data["data"] + assert revoke_token_data["data"]["revokeAccessToken"] + assert revoke_token_data["data"]["revokeAccessToken"] is True + + + # Remove the policy + remove_policy(policy_urn, admin_session) + + + # Ensure that user can't create access token after policy is removed + _ensure_cant_perform_action(user_session, create_access_token,"createAccessToken") \ No newline at end of file From ddb4e1b5ffa01763d7d3353a506d4329faf11e25 Mon Sep 17 00:00:00 2001 From: Davi Arnaut Date: Fri, 3 Nov 2023 10:26:11 -0700 Subject: [PATCH 052/792] fix(mysql-setup): quote database name (#9169) --- docker/mysql-setup/init.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/mysql-setup/init.sql b/docker/mysql-setup/init.sql index b789329ddfd17..b6a1d47fb2a02 100644 --- a/docker/mysql-setup/init.sql +++ b/docker/mysql-setup/init.sql @@ -1,6 +1,6 @@ -- create datahub database -CREATE DATABASE IF NOT EXISTS DATAHUB_DB_NAME CHARACTER SET utf8mb4 COLLATE utf8mb4_bin; -USE DATAHUB_DB_NAME; +CREATE DATABASE IF NOT EXISTS `DATAHUB_DB_NAME` CHARACTER SET utf8mb4 COLLATE utf8mb4_bin; +USE `DATAHUB_DB_NAME`; -- create metadata aspect table create table if not exists metadata_aspect_v2 ( From c2bc41d15eed31f89076913f641298ded5219a4f Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 3 Nov 2023 12:29:31 -0500 Subject: [PATCH 053/792] fix(health): fix health check url authentication (#9117) --- .../authentication/AuthenticationRequest.java | 12 ++++ .../filter/AuthenticationFilter.java | 13 ++++- .../HealthStatusAuthenticator.java | 55 +++++++++++++++++++ .../src/main/resources/application.yml | 2 + metadata-service/health-servlet/build.gradle | 22 -------- .../openapi/config/SpringWebConfig.java | 2 - .../health}/HealthCheckController.java | 30 ++++++---- metadata-service/war/build.gradle | 1 - .../webapp/WEB-INF/openapiServlet-servlet.xml | 2 +- settings.gradle | 1 - 10 files changed, 101 insertions(+), 39 deletions(-) create mode 100644 metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java delete mode 100644 metadata-service/health-servlet/build.gradle rename metadata-service/{health-servlet/src/main/java/com/datahub/health/controller => openapi-servlet/src/main/java/io/datahubproject/openapi/health}/HealthCheckController.java (79%) diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java index 91f15f9d5ae61..5673bac5442b2 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java @@ -1,6 +1,8 @@ package com.datahub.authentication; import com.datahub.plugins.auth.authentication.Authenticator; +import lombok.Getter; + import java.util.Map; import java.util.Objects; import java.util.TreeMap; @@ -13,14 +15,24 @@ * Currently, this class only hold the inbound request's headers, but could certainly be extended * to contain additional information like the request parameters, body, ip, etc as needed. */ +@Getter public class AuthenticationRequest { private final Map caseInsensitiveHeaders; + private final String servletInfo; + private final String pathInfo; + public AuthenticationRequest(@Nonnull final Map requestHeaders) { + this("", "", requestHeaders); + } + + public AuthenticationRequest(@Nonnull String servletInfo, @Nonnull String pathInfo, @Nonnull final Map requestHeaders) { Objects.requireNonNull(requestHeaders); caseInsensitiveHeaders = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); caseInsensitiveHeaders.putAll(requestHeaders); + this.servletInfo = servletInfo; + this.pathInfo = pathInfo; } /** diff --git a/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java b/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java index e15918a813158..8c7b3ac8b98f0 100644 --- a/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java +++ b/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java @@ -2,6 +2,7 @@ import com.datahub.authentication.authenticator.AuthenticatorChain; import com.datahub.authentication.authenticator.DataHubSystemAuthenticator; +import com.datahub.authentication.authenticator.HealthStatusAuthenticator; import com.datahub.authentication.authenticator.NoOpAuthenticator; import com.datahub.authentication.token.StatefulTokenService; import com.datahub.plugins.PluginConstant; @@ -29,6 +30,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -148,7 +150,7 @@ private void buildAuthenticatorChain() { } private AuthenticationRequest buildAuthContext(HttpServletRequest request) { - return new AuthenticationRequest(Collections.list(request.getHeaderNames()) + return new AuthenticationRequest(request.getServletPath(), request.getPathInfo(), Collections.list(request.getHeaderNames()) .stream() .collect(Collectors.toMap(headerName -> headerName, request::getHeader))); } @@ -242,7 +244,14 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, final Authenticator authenticator = clazz.newInstance(); // Successfully created authenticator. Now init and register it. log.debug(String.format("Initializing Authenticator with name %s", type)); - authenticator.init(configs, authenticatorContext); + if (authenticator instanceof HealthStatusAuthenticator) { + Map authenticatorConfig = new HashMap<>(Map.of(SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId())); + authenticatorConfig.putAll(Optional.ofNullable(internalAuthenticatorConfig.getConfigs()).orElse(Collections.emptyMap())); + authenticator.init(authenticatorConfig, authenticatorContext); + } else { + authenticator.init(configs, authenticatorContext); + } log.info(String.format("Registering Authenticator with name %s", type)); authenticatorChain.register(authenticator); } catch (Exception e) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java new file mode 100644 index 0000000000000..5749eacf5d25d --- /dev/null +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java @@ -0,0 +1,55 @@ +package com.datahub.authentication.authenticator; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationException; +import com.datahub.authentication.AuthenticationRequest; +import com.datahub.authentication.AuthenticatorContext; +import com.datahub.plugins.auth.authentication.Authenticator; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import static com.datahub.authentication.AuthenticationConstants.SYSTEM_CLIENT_ID_CONFIG; + + +/** + * This Authenticator is used for allowing access for unauthenticated health check endpoints + * + * It exists to support load balancers, liveness/readiness checks + * + */ +@Slf4j +public class HealthStatusAuthenticator implements Authenticator { + private static final Set HEALTH_ENDPOINTS = Set.of( + "/openapi/check/", + "/openapi/up/" + ); + private String systemClientId; + + @Override + public void init(@Nonnull final Map config, @Nullable final AuthenticatorContext context) { + Objects.requireNonNull(config, "Config parameter cannot be null"); + this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); + } + + @Override + public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + Objects.requireNonNull(context); + if (HEALTH_ENDPOINTS.stream().anyMatch(prefix -> String.join("", context.getServletInfo(), context.getPathInfo()).startsWith(prefix))) { + return new Authentication( + new Actor(ActorType.USER, systemClientId), + "", + Collections.emptyMap() + ); + } + throw new AuthenticationException("Authorization not allowed. Non-health check endpoint."); + } +} diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index b817208672e08..91b10a75c922e 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -11,6 +11,8 @@ authentication: # Key used to validate incoming tokens. Should typically be the same as authentication.tokenService.signingKey signingKey: ${DATAHUB_TOKEN_SERVICE_SIGNING_KEY:WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94=} salt: ${DATAHUB_TOKEN_SERVICE_SALT:ohDVbJBvHHVJh9S/UA4BYF9COuNnqqVhr9MLKEGXk1O=} + # Required for unauthenticated health check endpoints - best not to remove. + - type: com.datahub.authentication.authenticator.HealthStatusAuthenticator # Normally failures are only warnings, enable this to throw them. logAuthenticatorExceptions: ${METADATA_SERVICE_AUTHENTICATOR_EXCEPTIONS_ENABLED:false} diff --git a/metadata-service/health-servlet/build.gradle b/metadata-service/health-servlet/build.gradle deleted file mode 100644 index 6095f724b3cd4..0000000000000 --- a/metadata-service/health-servlet/build.gradle +++ /dev/null @@ -1,22 +0,0 @@ -apply plugin: 'java' - -dependencies { - - implementation project(':metadata-service:factories') - - implementation externalDependency.guava - implementation externalDependency.reflections - implementation externalDependency.springBoot - implementation externalDependency.springCore - implementation externalDependency.springDocUI - implementation externalDependency.springWeb - implementation externalDependency.springWebMVC - implementation externalDependency.springBeans - implementation externalDependency.springContext - implementation externalDependency.slf4jApi - compileOnly externalDependency.lombok - implementation externalDependency.antlr4Runtime - implementation externalDependency.antlr4 - - annotationProcessor externalDependency.lombok -} \ No newline at end of file diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java index 71e8c79a2275a..e4f49df90c392 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java @@ -44,7 +44,6 @@ public GroupedOpenApi defaultOpenApiGroup() { .group("default") .packagesToExclude( "io.datahubproject.openapi.operations", - "com.datahub.health", "io.datahubproject.openapi.health" ).build(); } @@ -55,7 +54,6 @@ public GroupedOpenApi operationsOpenApiGroup() { .group("operations") .packagesToScan( "io.datahubproject.openapi.operations", - "com.datahub.health", "io.datahubproject.openapi.health" ).build(); } diff --git a/metadata-service/health-servlet/src/main/java/com/datahub/health/controller/HealthCheckController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java similarity index 79% rename from metadata-service/health-servlet/src/main/java/com/datahub/health/controller/HealthCheckController.java rename to metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java index c200e63e0d497..c90603bf88c31 100644 --- a/metadata-service/health-servlet/src/main/java/com/datahub/health/controller/HealthCheckController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java @@ -1,5 +1,6 @@ -package com.datahub.health.controller; +package io.datahubproject.openapi.health; +import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.linkedin.gms.factory.config.ConfigurationProvider; import io.swagger.v3.oas.annotations.tags.Tag; @@ -9,7 +10,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; @@ -27,7 +27,7 @@ @RestController -@RequestMapping("/check") +@RequestMapping("/") @Tag(name = "HealthCheck", description = "An API for checking health of GMS and its clients.") public class HealthCheckController { @Autowired @@ -41,6 +41,12 @@ public HealthCheckController(ConfigurationProvider config) { this::getElasticHealth, config.getHealthCheck().getCacheDurationSeconds(), TimeUnit.SECONDS); } + @GetMapping(path = "/check/ready", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity getCombinedHealthCheck(String... checks) { + return ResponseEntity.status(getCombinedDebug(checks).getStatusCode()) + .body(getCombinedDebug(checks).getStatusCode().is2xxSuccessful()); + } + /** * Combined health check endpoint for checking GMS clients. * For now, just checks the health of the ElasticSearch client @@ -48,11 +54,10 @@ public HealthCheckController(ConfigurationProvider config) { * that component). The status code will be 200 if all components are okay, and 500 if one or more components are not * healthy. */ - @GetMapping(path = "/ready", produces = MediaType.APPLICATION_JSON_VALUE) - public ResponseEntity>> getCombinedHealthCheck(String... checks) { - + @GetMapping(path = "/debug/ready", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity>> getCombinedDebug(String... checks) { Map>> healthChecks = new HashMap<>(); - healthChecks.put("elasticsearch", this::getElasticHealthWithCache); + healthChecks.put("elasticsearch", this::getElasticDebugWithCache); // Add new components here List componentsToCheck = checks != null && checks.length > 0 @@ -67,7 +72,6 @@ public ResponseEntity>> getCombinedHealthChec .get()); } - boolean isHealthy = componentHealth.values().stream().allMatch(resp -> resp.getStatusCode() == HttpStatus.OK); if (isHealthy) { return ResponseEntity.ok(componentHealth); @@ -75,12 +79,18 @@ public ResponseEntity>> getCombinedHealthChec return ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE).body(componentHealth); } + @GetMapping(path = "/check/elastic", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity getElasticHealthWithCache() { + return ResponseEntity.status(getElasticDebugWithCache().getStatusCode()) + .body(getElasticDebugWithCache().getStatusCode().is2xxSuccessful()); + } + /** * Checks the memoized cache for the latest elastic health check result * @return The ResponseEntity containing the health check result */ - @GetMapping(path = "/elastic", produces = MediaType.APPLICATION_JSON_VALUE) - public ResponseEntity getElasticHealthWithCache() { + @GetMapping(path = "/debug/elastic", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity getElasticDebugWithCache() { return this.memoizedSupplier.get(); } diff --git a/metadata-service/war/build.gradle b/metadata-service/war/build.gradle index 122c2b9d5357b..54e95fdcfe579 100644 --- a/metadata-service/war/build.gradle +++ b/metadata-service/war/build.gradle @@ -17,7 +17,6 @@ dependencies { runtimeOnly project(':metadata-service:servlet') runtimeOnly project(':metadata-service:auth-servlet-impl') runtimeOnly project(':metadata-service:graphql-servlet-impl') - runtimeOnly project(':metadata-service:health-servlet') runtimeOnly project(':metadata-service:openapi-servlet') runtimeOnly project(':metadata-service:openapi-entity-servlet') runtimeOnly project(':metadata-service:openapi-analytics-servlet') diff --git a/metadata-service/war/src/main/webapp/WEB-INF/openapiServlet-servlet.xml b/metadata-service/war/src/main/webapp/WEB-INF/openapiServlet-servlet.xml index 3077cfb062638..fb2bc6c0336cd 100644 --- a/metadata-service/war/src/main/webapp/WEB-INF/openapiServlet-servlet.xml +++ b/metadata-service/war/src/main/webapp/WEB-INF/openapiServlet-servlet.xml @@ -3,7 +3,7 @@ xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd"> - + diff --git a/settings.gradle b/settings.gradle index 52de461383b5e..d2844fe00cdbc 100644 --- a/settings.gradle +++ b/settings.gradle @@ -8,7 +8,6 @@ include 'metadata-service:auth-config' include 'metadata-service:auth-impl' include 'metadata-service:auth-filter' include 'metadata-service:auth-servlet-impl' -include 'metadata-service:health-servlet' include 'metadata-service:restli-api' include 'metadata-service:restli-client' include 'metadata-service:restli-servlet-impl' From efd73a5f5766872ebd4997bbb261d2f95d295dd6 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 3 Nov 2023 16:19:39 -0500 Subject: [PATCH 054/792] fix(elasticsearch): fix elasticsearch-setup for dropped 000001 index (#9074) --- docker/elasticsearch-setup/create-indices.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/elasticsearch-setup/create-indices.sh b/docker/elasticsearch-setup/create-indices.sh index 343013402394f..5c4eb3ce3851e 100755 --- a/docker/elasticsearch-setup/create-indices.sh +++ b/docker/elasticsearch-setup/create-indices.sh @@ -129,7 +129,7 @@ function create_datahub_usage_event_aws_elasticsearch() { if [ $USAGE_EVENT_STATUS -eq 200 ]; then USAGE_EVENT_DEFINITION=$(curl "${CURL_ARGS[@]}" "$ELASTICSEARCH_URL/${PREFIX}datahub_usage_event") # the definition is expected to contain "datahub_usage_event-000001" string - if [[ $USAGE_EVENT_DEFINITION != *"datahub_usage_event-$INDEX_SUFFIX"* ]]; then + if [[ $USAGE_EVENT_DEFINITION != *"datahub_usage_event-"* ]]; then # ... if it doesn't, we need to drop it echo -e "\n>>> deleting invalid datahub_usage_event ..." curl "${CURL_ARGS[@]}" -XDELETE "$ELASTICSEARCH_URL/${PREFIX}datahub_usage_event" From ac1a5a6d184d7991d0006bfe33d31b4471b64729 Mon Sep 17 00:00:00 2001 From: Kos Korchak <97058061+kkorchak@users.noreply.github.com> Date: Sat, 4 Nov 2023 15:06:09 -0400 Subject: [PATCH 055/792] fix(tests): Origin/fix flaky glossary navigation cypress test (#9175) --- .../tests/cypress/cypress/e2e/glossary/glossary_navigation.js | 4 ++-- .../tests/cypress/cypress/e2e/lineage/impact_analysis.js | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js index aeceaf99be889..c6e9d93f71b8c 100644 --- a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js +++ b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js @@ -27,7 +27,7 @@ describe("glossary sidebar navigation test", () => { cy.waitTextVisible("Moved Glossary Term!"); // Ensure the new term is under the parent term group in the navigation sidebar - cy.get('[data-testid="glossary-browser-sidebar"]').contains(glossaryTermGroup).click(); + cy.get('[data-testid="glossary-browser-sidebar"]').contains(glossaryTermGroup).click().wait(3000); cy.get('*[class^="GlossaryEntitiesList"]').contains(glossaryTerm).should("be.visible"); // Move a term group from the root level to be under a parent term group @@ -41,7 +41,7 @@ describe("glossary sidebar navigation test", () => { cy.waitTextVisible("Moved Term Group!"); // Ensure it is no longer on the sidebar navigator at the top level but shows up under the new parent - cy.get('[data-testid="glossary-browser-sidebar"]').contains(glossaryParentGroup).click(); + cy.get('[data-testid="glossary-browser-sidebar"]').contains(glossaryParentGroup).click().wait(3000); cy.get('*[class^="GlossaryEntitiesList"]').contains(glossaryTermGroup).should("be.visible"); // Delete a term group diff --git a/smoke-test/tests/cypress/cypress/e2e/lineage/impact_analysis.js b/smoke-test/tests/cypress/cypress/e2e/lineage/impact_analysis.js index defb786d1fa5d..784ccf8f0f87d 100644 --- a/smoke-test/tests/cypress/cypress/e2e/lineage/impact_analysis.js +++ b/smoke-test/tests/cypress/cypress/e2e/lineage/impact_analysis.js @@ -21,6 +21,10 @@ const startAtDataSetLineage = () => { } describe("impact analysis", () => { + beforeEach(() => { + cy.on('uncaught:exception', (err, runnable) => { return false; }); + }); + it("can see 1 hop of lineage by default", () => { startAtDataSetLineage() From 60131a85438efc3c5d75fe6d4ed4cff634792325 Mon Sep 17 00:00:00 2001 From: Alex Waldron <51122673+walter9388@users.noreply.github.com> Date: Sat, 4 Nov 2023 19:07:00 +0000 Subject: [PATCH 056/792] fix: bad lineage link in `LineageGraphOnboardingConfig.tsx` (#9162) --- .../src/app/onboarding/config/LineageGraphOnboardingConfig.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/onboarding/config/LineageGraphOnboardingConfig.tsx b/datahub-web-react/src/app/onboarding/config/LineageGraphOnboardingConfig.tsx index 54bae6978a4a9..89a01ab3bd241 100644 --- a/datahub-web-react/src/app/onboarding/config/LineageGraphOnboardingConfig.tsx +++ b/datahub-web-react/src/app/onboarding/config/LineageGraphOnboardingConfig.tsx @@ -23,7 +23,7 @@ export const LineageGraphOnboardingConfig: OnboardingStep[] = [ here. From 7cfe3c79794e2c5660c405cdb447086ed32b52f4 Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Sun, 5 Nov 2023 01:02:23 +0530 Subject: [PATCH 057/792] =?UTF-8?q?OBS-191=20|=20Viewing=20domains=20page?= =?UTF-8?q?=20should=20not=20require=20Manage=20Domains=20priv=E2=80=A6=20?= =?UTF-8?q?(#9156)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- datahub-web-react/src/app/shared/admin/HeaderLinks.tsx | 3 --- 1 file changed, 3 deletions(-) diff --git a/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx b/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx index ce1ad93565ba4..3f46f35889fd1 100644 --- a/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx +++ b/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx @@ -73,7 +73,6 @@ export function HeaderLinks(props: Props) { const showSettings = true; const showIngestion = isIngestionEnabled && me && me.platformPrivileges?.manageIngestion && me.platformPrivileges?.manageSecrets; - const showDomains = me?.platformPrivileges?.createDomains || me?.platformPrivileges?.manageDomains; useUpdateEducationStepIdsAllowlist(!!showIngestion, HOME_PAGE_INGESTION_ID); @@ -106,7 +105,6 @@ export function HeaderLinks(props: Props) { View and modify your data dictionary - {showDomains && ( @@ -121,7 +119,6 @@ export function HeaderLinks(props: Props) { Manage related groups of data assets - )} } > From 81daae815af4498192f487418941379b2170762c Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Sun, 5 Nov 2023 01:16:39 +0530 Subject: [PATCH 058/792] fix: expand the stats row in search preview cards (#9140) --- .../app/entity/dashboard/shared/DashboardStatsSummary.tsx | 3 +++ .../src/app/entity/dataset/shared/DatasetStatsSummary.tsx | 3 +++ .../src/app/entity/dataset/shared/ExpandingStat.tsx | 5 ++--- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx b/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx index e8fb4c16aca9c..fb6364cffac8b 100644 --- a/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx +++ b/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx @@ -11,6 +11,9 @@ import ExpandingStat from '../../dataset/shared/ExpandingStat'; const StatText = styled.span` color: ${ANTD_GRAY[8]}; + @media (min-width: 1024px) { + width: 100%; + white-space: nowrap; `; const HelpIcon = styled(QuestionCircleOutlined)` diff --git a/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx b/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx index 14f550de25be7..3dcd41a3f8a41 100644 --- a/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx +++ b/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx @@ -12,6 +12,9 @@ import ExpandingStat from './ExpandingStat'; const StatText = styled.span<{ color: string }>` color: ${(props) => props.color}; + @media (min-width: 1160px) { + width: 100%; + white-space: nowrap; `; const PopoverContent = styled.div` diff --git a/datahub-web-react/src/app/entity/dataset/shared/ExpandingStat.tsx b/datahub-web-react/src/app/entity/dataset/shared/ExpandingStat.tsx index 8101a696bf274..4e223b6e54058 100644 --- a/datahub-web-react/src/app/entity/dataset/shared/ExpandingStat.tsx +++ b/datahub-web-react/src/app/entity/dataset/shared/ExpandingStat.tsx @@ -2,9 +2,7 @@ import React, { ReactNode, useEffect, useRef, useState } from 'react'; import styled from 'styled-components'; const ExpandingStatContainer = styled.span<{ disabled: boolean; expanded: boolean; width: string }>` - overflow: hidden; - white-space: nowrap; - width: ${(props) => props.width}; + max-width: 100%; transition: width 250ms ease; `; @@ -13,6 +11,7 @@ const ExpandingStat = ({ render, }: { disabled?: boolean; + render: (isExpanded: boolean) => ReactNode; }) => { const contentRef = useRef(null); From 02156662b5e7f24f3db908d4d19f8d1bb94a32b5 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 6 Nov 2023 12:47:07 -0800 Subject: [PATCH 059/792] docs(ingest): clarify adding source guide (#9161) --- metadata-ingestion/adding-source.md | 32 ++++++++++++++++------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/metadata-ingestion/adding-source.md b/metadata-ingestion/adding-source.md index a0930102c6827..6baddf6b2010d 100644 --- a/metadata-ingestion/adding-source.md +++ b/metadata-ingestion/adding-source.md @@ -6,7 +6,7 @@ There are two ways of adding a metadata ingestion source. 2. You are writing the custom source for yourself and are not going to contribute back (yet). If you are going for case (1) just follow the steps 1 to 9 below. In case you are building it for yourself you can skip -steps 4-9 (but maybe write tests and docs for yourself as well) and follow the documentation +steps 4-8 (but maybe write tests and docs for yourself as well) and follow the documentation on [how to use custom ingestion sources](../docs/how/add-custom-ingestion-source.md) without forking Datahub. @@ -27,6 +27,7 @@ from `ConfigModel`. The [file source](./src/datahub/ingestion/source/file.py) is We use [pydantic](https://pydantic-docs.helpmanual.io) conventions for documenting configuration flags. Use the `description` attribute to write rich documentation for your configuration field. For example, the following code: + ```python from pydantic import Field from datahub.api.configuration.common import ConfigModel @@ -49,12 +50,10 @@ generates the following documentation:

- :::note Inline markdown or code snippets are not yet supported for field level documentation. ::: - ### 2. Set up the reporter The reporter interface enables the source to report statistics, warnings, failures, and other information about the run. @@ -71,6 +70,8 @@ some [convenience methods](./src/datahub/emitter/mce_builder.py) for commonly us ### 4. Set up the dependencies +Note: Steps 4-8 are only required if you intend to contribute the source back to the Datahub project. + Declare the source's pip dependencies in the `plugins` variable of the [setup script](./setup.py). ### 5. Enable discoverability @@ -119,37 +120,38 @@ from datahub.ingestion.api.decorators import ( @capability(SourceCapability.LINEAGE_COARSE, "Enabled by default") class FileSource(Source): """ - - The File Source can be used to produce all kinds of metadata from a generic metadata events file. + + The File Source can be used to produce all kinds of metadata from a generic metadata events file. :::note Events in this file can be in MCE form or MCP form. ::: - + """ ... source code goes here ``` - #### 7.2 Write custom documentation -- Create a copy of [`source-docs-template.md`](./source-docs-template.md) and edit all relevant components. +- Create a copy of [`source-docs-template.md`](./source-docs-template.md) and edit all relevant components. - Name the document as `` and move it to `metadata-ingestion/docs/sources//.md`. For example for the Kafka platform, under the `kafka` plugin, move the document to `metadata-ingestion/docs/sources/kafka/kafka.md`. - Add a quickstart recipe corresponding to the plugin under `metadata-ingestion/docs/sources//_recipe.yml`. For example, for the Kafka platform, under the `kafka` plugin, there is a quickstart recipe located at `metadata-ingestion/docs/sources/kafka/kafka_recipe.yml`. - To write platform-specific documentation (that is cross-plugin), write the documentation under `metadata-ingestion/docs/sources//README.md`. For example, cross-plugin documentation for the BigQuery platform is located under `metadata-ingestion/docs/sources/bigquery/README.md`. #### 7.3 Viewing the Documentation -Documentation for the source can be viewed by running the documentation generator from the `docs-website` module. +Documentation for the source can be viewed by running the documentation generator from the `docs-website` module. ##### Step 1: Build the Ingestion docs + ```console # From the root of DataHub repo ./gradlew :metadata-ingestion:docGen ``` If this finishes successfully, you will see output messages like: + ```console Ingestion Documentation Generation Complete ############################################ @@ -170,7 +172,8 @@ Ingestion Documentation Generation Complete You can also find documentation files generated at `./docs/generated/ingestion/sources` relative to the root of the DataHub repo. You should be able to locate your specific source's markdown file here and investigate it to make sure things look as expected. #### Step 2: Build the Entire Documentation -To view how this documentation looks in the browser, there is one more step. Just build the entire docusaurus page from the `docs-website` module. + +To view how this documentation looks in the browser, there is one more step. Just build the entire docusaurus page from the `docs-website` module. ```console # From the root of DataHub repo @@ -178,6 +181,7 @@ To view how this documentation looks in the browser, there is one more step. Jus ``` This will generate messages like: + ```console ... > Task :docs-website:yarnGenerate @@ -219,15 +223,15 @@ BUILD SUCCESSFUL in 35s 36 actionable tasks: 16 executed, 20 up-to-date ``` -After this you need to run the following script from the `docs-website` module. +After this you need to run the following script from the `docs-website` module. + ```console cd docs-website npm run serve ``` -Now, browse to http://localhost:3000 or whichever port npm is running on, to browse the docs. -Your source should show up on the left sidebar under `Metadata Ingestion / Sources`. - +Now, browse to http://localhost:3000 or whichever port npm is running on, to browse the docs. +Your source should show up on the left sidebar under `Metadata Ingestion / Sources`. ### 8. Add SQL Alchemy mapping (if applicable) From 4a4c29030c0cfd2da9eab01798bc74a94fbb8c1d Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 6 Nov 2023 12:47:24 -0800 Subject: [PATCH 060/792] chore: stop ingestion-smoke CI errors on forks (#9160) --- .github/workflows/docker-ingestion-smoke.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/docker-ingestion-smoke.yml b/.github/workflows/docker-ingestion-smoke.yml index 8d52c23792857..82b57d23609a5 100644 --- a/.github/workflows/docker-ingestion-smoke.yml +++ b/.github/workflows/docker-ingestion-smoke.yml @@ -47,6 +47,7 @@ jobs: name: Build and Push Docker Image to Docker Hub runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: actions/checkout@v3 From 86d2b08d2bbecc90e9adffd250c894abe54667e7 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 6 Nov 2023 12:58:07 -0800 Subject: [PATCH 061/792] docs(ingest): inherit capabilities from superclasses (#9174) --- metadata-ingestion-modules/airflow-plugin/setup.py | 4 ++++ .../src/datahub/ingestion/api/decorators.py | 12 +++++++++++- .../source/state/stateful_ingestion_base.py | 8 +++++++- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion-modules/airflow-plugin/setup.py b/metadata-ingestion-modules/airflow-plugin/setup.py index a5af881022d8c..e88fc870cb333 100644 --- a/metadata-ingestion-modules/airflow-plugin/setup.py +++ b/metadata-ingestion-modules/airflow-plugin/setup.py @@ -101,6 +101,10 @@ def get_long_description(): f"acryl-datahub[testing-utils]{_self_pin}", # Extra requirements for loading our test dags. "apache-airflow[snowflake]>=2.0.2", + # Connexion's new version breaks Airflow: + # See https://github.com/apache/airflow/issues/35234. + # TODO: We should transition to using Airflow's constraints file. + "connexion<3", # https://github.com/snowflakedb/snowflake-sqlalchemy/issues/350 # Eventually we want to set this to "snowflake-sqlalchemy>=1.4.3". # However, that doesn't work with older versions of Airflow. Instead diff --git a/metadata-ingestion/src/datahub/ingestion/api/decorators.py b/metadata-ingestion/src/datahub/ingestion/api/decorators.py index 5e4427047104f..b390ffb9dd036 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/decorators.py +++ b/metadata-ingestion/src/datahub/ingestion/api/decorators.py @@ -93,10 +93,20 @@ def capability( """ def wrapper(cls: Type) -> Type: - if not hasattr(cls, "__capabilities"): + if not hasattr(cls, "__capabilities") or any( + # It's from this class and not a superclass. + cls.__capabilities is getattr(base, "__capabilities", None) + for base in cls.__bases__ + ): cls.__capabilities = {} cls.get_capabilities = lambda: cls.__capabilities.values() + # If the superclasses have capability annotations, copy those over. + for base in cls.__bases__: + base_caps = getattr(base, "__capabilities", None) + if base_caps: + cls.__capabilities.update(base_caps) + cls.__capabilities[capability_name] = CapabilitySetting( capability=capability_name, description=description, supported=supported ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py index 7fb2cf9813cab..d11b1f9ad6a53 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py +++ b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py @@ -15,11 +15,12 @@ from datahub.configuration.time_window_config import BaseTimeWindowConfig from datahub.configuration.validate_field_rename import pydantic_renamed_field from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.api.decorators import capability from datahub.ingestion.api.ingestion_job_checkpointing_provider_base import ( IngestionCheckpointingProviderBase, JobId, ) -from datahub.ingestion.api.source import Source, SourceReport +from datahub.ingestion.api.source import Source, SourceCapability, SourceReport from datahub.ingestion.source.state.checkpoint import Checkpoint, StateType from datahub.ingestion.source.state.use_case_handler import ( StatefulIngestionUsecaseHandlerBase, @@ -177,6 +178,11 @@ class StatefulIngestionReport(SourceReport): pass +@capability( + SourceCapability.DELETION_DETECTION, + "Optionally enabled via `stateful_ingestion.remove_stale_metadata`", + supported=True, +) class StatefulIngestionSourceBase(Source): """ Defines the base class for all stateful sources. From 2c58c63780970606e50ba95b382dc9ffbde17bfc Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Mon, 6 Nov 2023 15:58:57 -0500 Subject: [PATCH 062/792] fix(ingest/datahub-source): Order by version in memory (#9185) --- .../source/datahub/datahub_database_reader.py | 100 ++++++++++++++---- .../tests/unit/test_datahub_source.py | 51 +++++++++ 2 files changed, 133 insertions(+), 18 deletions(-) create mode 100644 metadata-ingestion/tests/unit/test_datahub_source.py diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py index 96184d8d445e4..e4f1bb275487e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py +++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py @@ -1,9 +1,11 @@ import json import logging from datetime import datetime -from typing import Dict, Iterable, Optional, Tuple +from typing import Any, Generic, Iterable, List, Optional, Tuple, TypeVar from sqlalchemy import create_engine +from sqlalchemy.engine import Row +from typing_extensions import Protocol from datahub.emitter.aspect import ASPECT_MAP from datahub.emitter.mcp import MetadataChangeProposalWrapper @@ -20,6 +22,62 @@ DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" +class VersionOrderable(Protocol): + createdon: Any # Should restrict to only orderable types + version: int + + +ROW = TypeVar("ROW", bound=VersionOrderable) + + +class VersionOrderer(Generic[ROW]): + """Orders rows by (createdon, version == 0). + + That is, orders rows first by createdon, and for equal timestamps, puts version 0 rows last. + """ + + def __init__(self, enabled: bool): + # Stores all version 0 aspects for a given createdon timestamp + # Once we have emitted all aspects for a given timestamp, we can emit the version 0 aspects + # Guaranteeing that, for a given timestamp, we always ingest version 0 aspects last + self.queue: Optional[Tuple[datetime, List[ROW]]] = None + self.enabled = enabled + + def __call__(self, rows: Iterable[ROW]) -> Iterable[ROW]: + for row in rows: + yield from self._process_row(row) + yield from self._flush_queue() + + def _process_row(self, row: ROW) -> Iterable[ROW]: + if not self.enabled: + yield row + return + + yield from self._attempt_queue_flush(row) + if row.version == 0: + self._add_to_queue(row) + else: + yield row + + def _add_to_queue(self, row: ROW) -> None: + if self.queue is None: + self.queue = (row.createdon, [row]) + else: + self.queue[1].append(row) + + def _attempt_queue_flush(self, row: ROW) -> Iterable[ROW]: + if self.queue is None: + return + + if row.createdon > self.queue[0]: + yield from self._flush_queue() + + def _flush_queue(self) -> Iterable[ROW]: + if self.queue is not None: + yield from self.queue[1] + self.queue = None + + class DataHubDatabaseReader: def __init__( self, @@ -40,13 +98,14 @@ def query(self) -> str: # Offset is generally 0, unless we repeat the same createdon twice # Ensures stable order, chronological per (urn, aspect) - # Version 0 last, only when createdon is the same. Otherwise relies on createdon order + # Relies on createdon order to reflect version order + # Ordering of entries with the same createdon is handled by VersionOrderer return f""" - SELECT urn, aspect, metadata, systemmetadata, createdon + SELECT urn, aspect, metadata, systemmetadata, createdon, version FROM {self.engine.dialect.identifier_preparer.quote(self.config.database_table_name)} WHERE createdon >= %(since_createdon)s {"" if self.config.include_all_versions else "AND version = 0"} - ORDER BY createdon, urn, aspect, CASE WHEN version = 0 THEN 1 ELSE 0 END, version + ORDER BY createdon, urn, aspect, version LIMIT %(limit)s OFFSET %(offset)s """ @@ -54,6 +113,14 @@ def query(self) -> str: def get_aspects( self, from_createdon: datetime, stop_time: datetime ) -> Iterable[Tuple[MetadataChangeProposalWrapper, datetime]]: + orderer = VersionOrderer[Row](enabled=self.config.include_all_versions) + rows = self._get_rows(from_createdon=from_createdon, stop_time=stop_time) + for row in orderer(rows): + mcp = self._parse_row(row) + if mcp: + yield mcp, row.createdon + + def _get_rows(self, from_createdon: datetime, stop_time: datetime) -> Iterable[Row]: with self.engine.connect() as conn: ts = from_createdon offset = 0 @@ -69,34 +136,31 @@ def get_aspects( return for i, row in enumerate(rows): - row_dict = row._asdict() - mcp = self._parse_row(row_dict) - if mcp: - yield mcp, row_dict["createdon"] + yield row - if ts == row_dict["createdon"]: - offset += i + if ts == row.createdon: + offset += i + 1 else: - ts = row_dict["createdon"] + ts = row.createdon offset = 0 - def _parse_row(self, d: Dict) -> Optional[MetadataChangeProposalWrapper]: + def _parse_row(self, row: Row) -> Optional[MetadataChangeProposalWrapper]: try: - json_aspect = post_json_transform(json.loads(d["metadata"])) - json_metadata = post_json_transform(json.loads(d["systemmetadata"] or "{}")) + json_aspect = post_json_transform(json.loads(row.metadata)) + json_metadata = post_json_transform(json.loads(row.systemmetadata or "{}")) system_metadata = SystemMetadataClass.from_obj(json_metadata) return MetadataChangeProposalWrapper( - entityUrn=d["urn"], - aspect=ASPECT_MAP[d["aspect"]].from_obj(json_aspect), + entityUrn=row.urn, + aspect=ASPECT_MAP[row.aspect].from_obj(json_aspect), systemMetadata=system_metadata, changeType=ChangeTypeClass.UPSERT, ) except Exception as e: logger.warning( - f"Failed to parse metadata for {d['urn']}: {e}", exc_info=True + f"Failed to parse metadata for {row.urn}: {e}", exc_info=True ) self.report.num_database_parse_errors += 1 self.report.database_parse_errors.setdefault( str(e), LossyDict() - ).setdefault(d["aspect"], LossyList()).append(d["urn"]) + ).setdefault(row.aspect, LossyList()).append(row.urn) return None diff --git a/metadata-ingestion/tests/unit/test_datahub_source.py b/metadata-ingestion/tests/unit/test_datahub_source.py new file mode 100644 index 0000000000000..adc131362b326 --- /dev/null +++ b/metadata-ingestion/tests/unit/test_datahub_source.py @@ -0,0 +1,51 @@ +from dataclasses import dataclass + +import pytest + +from datahub.ingestion.source.datahub.datahub_database_reader import ( + VersionOrderable, + VersionOrderer, +) + + +@dataclass +class MockRow(VersionOrderable): + createdon: int + version: int + urn: str + + +@pytest.fixture +def rows(): + return [ + MockRow(0, 0, "one"), + MockRow(0, 1, "one"), + MockRow(0, 0, "two"), + MockRow(0, 0, "three"), + MockRow(0, 1, "three"), + MockRow(0, 2, "three"), + MockRow(0, 1, "two"), + MockRow(0, 4, "three"), + MockRow(0, 5, "three"), + MockRow(1, 6, "three"), + MockRow(1, 0, "four"), + MockRow(2, 0, "five"), + MockRow(2, 1, "six"), + MockRow(2, 0, "six"), + MockRow(3, 0, "seven"), + MockRow(3, 0, "eight"), + ] + + +def test_version_orderer(rows): + orderer = VersionOrderer[MockRow](enabled=True) + ordered_rows = list(orderer(rows)) + assert ordered_rows == sorted( + ordered_rows, key=lambda x: (x.createdon, x.version == 0) + ) + + +def test_version_orderer_disabled(rows): + orderer = VersionOrderer[MockRow](enabled=False) + ordered_rows = list(orderer(rows)) + assert ordered_rows == rows From f2ce3ab62cc29bd0d4d4cade2577a50a39fa0f32 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Mon, 6 Nov 2023 15:19:55 -0600 Subject: [PATCH 063/792] lint(frontend): fix HeaderLinks lint error (#9189) --- .../src/app/shared/admin/HeaderLinks.tsx | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx b/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx index 3f46f35889fd1..4a7a4938ea970 100644 --- a/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx +++ b/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx @@ -105,20 +105,20 @@ export function HeaderLinks(props: Props) { View and modify your data dictionary - - - - - Domains - - Manage related groups of data assets - - + + + + + Domains + + Manage related groups of data assets + + } > From 34aa08b7f38d733adcfe31ca97131e1ea52b49e6 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Mon, 6 Nov 2023 16:51:05 -0800 Subject: [PATCH 064/792] refactor(ui): Refactor entity page loading indicators (#9195) unrelated smoke test failing. --- .../src/app/entity/EntityPage.tsx | 4 +- .../containers/profile/EntityProfile.tsx | 3 -- .../profile/header/EntityHeader.tsx | 46 +++++++++++-------- .../header/EntityHeaderLoadingSection.tsx | 29 ++++++++++++ .../src/app/lineage/LineageExplorer.tsx | 7 +-- .../src/app/lineage/LineageLoadingSection.tsx | 27 +++++++++++ 6 files changed, 86 insertions(+), 30 deletions(-) create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeaderLoadingSection.tsx create mode 100644 datahub-web-react/src/app/lineage/LineageLoadingSection.tsx diff --git a/datahub-web-react/src/app/entity/EntityPage.tsx b/datahub-web-react/src/app/entity/EntityPage.tsx index 09233dbd89f69..916fa41795412 100644 --- a/datahub-web-react/src/app/entity/EntityPage.tsx +++ b/datahub-web-react/src/app/entity/EntityPage.tsx @@ -8,7 +8,6 @@ import { useEntityRegistry } from '../useEntityRegistry'; import analytics, { EventType } from '../analytics'; import { decodeUrn } from './shared/utils'; import { useGetGrantedPrivilegesQuery } from '../../graphql/policy.generated'; -import { Message } from '../shared/Message'; import { UnauthorizedPage } from '../authorization/UnauthorizedPage'; import { ErrorSection } from '../shared/error/ErrorSection'; import { VIEW_ENTITY_PAGE } from './shared/constants'; @@ -34,7 +33,7 @@ export const EntityPage = ({ entityType }: Props) => { const isLineageSupported = entity.isLineageEnabled(); const isLineageMode = useIsLineageMode(); const authenticatedUserUrn = useUserContext()?.user?.urn; - const { loading, error, data } = useGetGrantedPrivilegesQuery({ + const { error, data } = useGetGrantedPrivilegesQuery({ variables: { input: { actorUrn: authenticatedUserUrn as string, @@ -71,7 +70,6 @@ export const EntityPage = ({ entityType }: Props) => { return ( <> - {loading && } {error && } {data && !canViewEntityPage && } {canViewEntityPage && diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx index 5384eb94429ed..74c127cb05dd9 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx @@ -4,7 +4,6 @@ import { MutationHookOptions, MutationTuple, QueryHookOptions, QueryResult } fro import styled from 'styled-components/macro'; import { useHistory } from 'react-router'; import { EntityType, Exact } from '../../../../../types.generated'; -import { Message } from '../../../../shared/Message'; import { getEntityPath, getOnboardingStepIdsForEntityType, @@ -274,7 +273,6 @@ export const EntityProfile = ({ }} > <> - {loading && } {(error && ) || (!loading && ( @@ -323,7 +321,6 @@ export const EntityProfile = ({ banner /> )} - {loading && } {(error && ) || ( {isLineageMode ? ( diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx index 97595a515b34d..69389f5dcf6fc 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx @@ -16,6 +16,7 @@ import ShareButton from '../../../../../shared/share/ShareButton'; import { capitalizeFirstLetterOnly } from '../../../../../shared/textUtil'; import { useUserContext } from '../../../../../context/useUserContext'; import { useEntityRegistry } from '../../../../../useEntityRegistry'; +import EntityHeaderLoadingSection from './EntityHeaderLoadingSection'; const TitleWrapper = styled.div` display: flex; @@ -81,7 +82,7 @@ type Props = { }; export const EntityHeader = ({ headerDropdownItems, headerActionItems, isNameEditable, subHeader }: Props) => { - const { urn, entityType, entityData } = useEntityData(); + const { urn, entityType, entityData, loading } = useEntityData(); const refetch = useRefetch(); const me = useUserContext(); const platformName = getPlatformName(entityData); @@ -99,25 +100,32 @@ export const EntityHeader = ({ headerDropdownItems, headerActionItems, isNameEdi <> - - - - {entityData?.deprecation?.deprecated && ( - - )} - {entityData?.health && ( - ) || ( + <> + + + + {entityData?.deprecation?.deprecated && ( + + )} + {entityData?.health && ( + + )} + + - )} - - + + )} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeaderLoadingSection.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeaderLoadingSection.tsx new file mode 100644 index 0000000000000..bbf813804edd4 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeaderLoadingSection.tsx @@ -0,0 +1,29 @@ +import * as React from 'react'; +import { Skeleton, Space } from 'antd'; +import styled from 'styled-components'; +import { ANTD_GRAY } from '../../../constants'; + +const ContextSkeleton = styled(Skeleton.Input)` + && { + width: 320px; + border-radius: 4px; + background-color: ${ANTD_GRAY[3]}; + } +`; + +const NameSkeleton = styled(Skeleton.Input)` + && { + width: 240px; + border-radius: 4px; + background-color: ${ANTD_GRAY[3]}; + } +`; + +export default function EntityHeaderLoadingSection() { + return ( + + + + + ); +} diff --git a/datahub-web-react/src/app/lineage/LineageExplorer.tsx b/datahub-web-react/src/app/lineage/LineageExplorer.tsx index ed0b26bde11ef..f59d1843b8a99 100644 --- a/datahub-web-react/src/app/lineage/LineageExplorer.tsx +++ b/datahub-web-react/src/app/lineage/LineageExplorer.tsx @@ -3,7 +3,6 @@ import { useHistory } from 'react-router'; import { Button, Drawer } from 'antd'; import { InfoCircleOutlined } from '@ant-design/icons'; import styled from 'styled-components'; -import { Message } from '../shared/Message'; import { useEntityRegistry } from '../useEntityRegistry'; import CompactContext from '../shared/CompactContext'; import { EntityAndType, EntitySelectParams, FetchedEntities } from './types'; @@ -18,12 +17,10 @@ import { ErrorSection } from '../shared/error/ErrorSection'; import usePrevious from '../shared/usePrevious'; import { useGetLineageTimeParams } from './utils/useGetLineageTimeParams'; import analytics, { EventType } from '../analytics'; +import LineageLoadingSection from './LineageLoadingSection'; const DEFAULT_DISTANCE_FROM_TOP = 106; -const LoadingMessage = styled(Message)` - margin-top: 10%; -`; const FooterButtonGroup = styled.div` display: flex; justify-content: space-between; @@ -167,7 +164,7 @@ export default function LineageExplorer({ urn, type }: Props) { return ( <> {error && } - {loading && } + {loading && } {!!data && (
+ + + ); +} From 279fdd50d7870cc404a58a5c9afbf6b3c7c432ec Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Mon, 6 Nov 2023 19:51:20 -0600 Subject: [PATCH 065/792] fix(security): fix for zookeeper CVE-2023-44981 (#9190) --- build.gradle | 4 ++-- metadata-service/restli-api/build.gradle | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index bd282535fa13c..31e005e001cf0 100644 --- a/build.gradle +++ b/build.gradle @@ -1,7 +1,7 @@ buildscript { ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md - ext.pegasusVersion = '29.45.0' + ext.pegasusVersion = '29.46.8' ext.mavenVersion = '3.6.3' ext.springVersion = '5.3.29' ext.springBootVersion = '2.7.14' @@ -212,7 +212,7 @@ project.ext.externalDependency = [ 'testContainersOpenSearch': 'org.opensearch:opensearch-testcontainers:2.0.0', 'typesafeConfig':'com.typesafe:config:1.4.1', 'wiremock':'com.github.tomakehurst:wiremock:2.10.0', - 'zookeeper': 'org.apache.zookeeper:zookeeper:3.4.14', + 'zookeeper': 'org.apache.zookeeper:zookeeper:3.7.2', 'wire': 'com.squareup.wire:wire-compiler:3.7.1', 'charle': 'com.charleskorn.kaml:kaml:0.53.0', 'common': 'commons-io:commons-io:2.7', diff --git a/metadata-service/restli-api/build.gradle b/metadata-service/restli-api/build.gradle index ed4f4118dba30..f182d11b6baeb 100644 --- a/metadata-service/restli-api/build.gradle +++ b/metadata-service/restli-api/build.gradle @@ -8,4 +8,10 @@ dependencies { restClientCompile spec.product.pegasus.d2 restClientCompile spec.product.pegasus.restliClient + + constraints { + restClientCompile(externalDependency.zookeeper) { + because("CVE-2023-44981") + } + } } \ No newline at end of file From ac9a0140570b3ada060ce716304f33ff62a1348a Mon Sep 17 00:00:00 2001 From: John Joyce Date: Mon, 6 Nov 2023 18:33:02 -0800 Subject: [PATCH 066/792] refactor(ui): Rename "dataset details" button text to "view details" on lineage sidebar profile (#9196) --- datahub-web-react/src/app/lineage/LineageExplorer.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/lineage/LineageExplorer.tsx b/datahub-web-react/src/app/lineage/LineageExplorer.tsx index f59d1843b8a99..28cd7025f51f4 100644 --- a/datahub-web-react/src/app/lineage/LineageExplorer.tsx +++ b/datahub-web-react/src/app/lineage/LineageExplorer.tsx @@ -217,7 +217,7 @@ export default function LineageExplorer({ urn, type }: Props) { Close ) From 45770013c9bdaadfb49950c67a838aef879a8e8a Mon Sep 17 00:00:00 2001 From: John Joyce Date: Mon, 6 Nov 2023 18:33:13 -0800 Subject: [PATCH 067/792] feat(ui): Add command-k icons to search bar (#9194) --- .../src/app/home/HomePageHeader.tsx | 1 + datahub-web-react/src/app/search/CommandK.tsx | 29 +++++++++++++++ .../src/app/search/SearchBar.tsx | 37 ++++++++++++------- .../src/app/search/SearchHeader.tsx | 1 + 4 files changed, 55 insertions(+), 13 deletions(-) create mode 100644 datahub-web-react/src/app/search/CommandK.tsx diff --git a/datahub-web-react/src/app/home/HomePageHeader.tsx b/datahub-web-react/src/app/home/HomePageHeader.tsx index e5c01252a865b..0052d54f562eb 100644 --- a/datahub-web-react/src/app/home/HomePageHeader.tsx +++ b/datahub-web-react/src/app/home/HomePageHeader.tsx @@ -276,6 +276,7 @@ export const HomePageHeader = () => { combineSiblings showQuickFilters showViewAllResults + showCommandK /> {searchResultsToShow && searchResultsToShow.length > 0 && ( diff --git a/datahub-web-react/src/app/search/CommandK.tsx b/datahub-web-react/src/app/search/CommandK.tsx new file mode 100644 index 0000000000000..13e55a0e3f266 --- /dev/null +++ b/datahub-web-react/src/app/search/CommandK.tsx @@ -0,0 +1,29 @@ +import React from 'react'; +import styled from 'styled-components'; +import { ANTD_GRAY } from '../entity/shared/constants'; + +const Container = styled.div` + color: ${ANTD_GRAY[6]}; + background-color: #ffffff; + opacity: 0.9; + border-color: black; + border-radius: 6px; + border: 1px solid ${ANTD_GRAY[6]}; + padding-right: 6px; + padding-left: 6px; + margin-right: 4px; + margin-left: 4px; +`; + +const Letter = styled.span` + padding: 2px; +`; + +export const CommandK = () => { + return ( + + + K + + ); +}; diff --git a/datahub-web-react/src/app/search/SearchBar.tsx b/datahub-web-react/src/app/search/SearchBar.tsx index 5f797e68fe0e8..a23ead83caf54 100644 --- a/datahub-web-react/src/app/search/SearchBar.tsx +++ b/datahub-web-react/src/app/search/SearchBar.tsx @@ -23,6 +23,7 @@ import { navigateToSearchUrl } from './utils/navigateToSearchUrl'; import ViewAllSearchItem from './ViewAllSearchItem'; import { ViewSelect } from '../entity/view/select/ViewSelect'; import { combineSiblingsInAutoComplete } from './utils/combineSiblingsInAutoComplete'; +import { CommandK } from './CommandK'; const StyledAutoComplete = styled(AutoComplete)` width: 100%; @@ -114,6 +115,7 @@ interface Props { fixAutoComplete?: boolean; hideRecommendations?: boolean; showQuickFilters?: boolean; + showCommandK?: boolean; viewsEnabled?: boolean; combineSiblings?: boolean; setIsSearchBarFocused?: (isSearchBarFocused: boolean) => void; @@ -142,6 +144,7 @@ export const SearchBar = ({ fixAutoComplete, hideRecommendations, showQuickFilters, + showCommandK = false, viewsEnabled = false, combineSiblings = false, setIsSearchBarFocused, @@ -153,6 +156,8 @@ export const SearchBar = ({ const [searchQuery, setSearchQuery] = useState(initialQuery); const [selected, setSelected] = useState(); const [isDropdownVisible, setIsDropdownVisible] = useState(false); + const [isFocused, setIsFocused] = useState(false); + useEffect(() => setSelected(initialQuery), [initialQuery]); const searchEntityTypes = entityRegistry.getSearchEntityTypes(); @@ -277,11 +282,13 @@ export const SearchBar = ({ function handleFocus() { if (onFocus) onFocus(); handleSearchBarClick(true); + setIsFocused(true); } function handleBlur() { if (onBlur) onBlur(); handleSearchBarClick(false); + setIsFocused(false); } function handleSearch(query: string, type?: EntityType, appliedQuickFilters?: FacetFilterInput[]) { @@ -294,18 +301,21 @@ export const SearchBar = ({ const searchInputRef = useRef(null); useEffect(() => { - const handleKeyDown = (event) => { - // Support command-k to select the search bar. - // 75 is the keyCode for 'k' - if ((event.metaKey || event.ctrlKey) && event.keyCode === 75) { - (searchInputRef?.current as any)?.focus(); - } - }; - document.addEventListener('keydown', handleKeyDown); - return () => { - document.removeEventListener('keydown', handleKeyDown); - }; - }, []); + if (showCommandK) { + const handleKeyDown = (event) => { + // Support command-k to select the search bar. + // 75 is the keyCode for 'k' + if ((event.metaKey || event.ctrlKey) && event.keyCode === 75) { + (searchInputRef?.current as any)?.focus(); + } + }; + document.addEventListener('keydown', handleKeyDown); + return () => { + document.removeEventListener('keydown', handleKeyDown); + }; + } + return () => null; + }, [showCommandK]); return ( @@ -377,7 +387,7 @@ export const SearchBar = ({ data-testid="search-input" onFocus={handleFocus} onBlur={handleBlur} - allowClear={{ clearIcon: }} + allowClear={(isFocused && { clearIcon: }) || false} prefix={ <> {viewsEnabled && ( @@ -411,6 +421,7 @@ export const SearchBar = ({ } ref={searchInputRef} + suffix={(showCommandK && !isFocused && ) || null} /> diff --git a/datahub-web-react/src/app/search/SearchHeader.tsx b/datahub-web-react/src/app/search/SearchHeader.tsx index 91f9753a3d601..76e78a11d3e9d 100644 --- a/datahub-web-react/src/app/search/SearchHeader.tsx +++ b/datahub-web-react/src/app/search/SearchHeader.tsx @@ -108,6 +108,7 @@ export const SearchHeader = ({ fixAutoComplete showQuickFilters showViewAllResults + showCommandK /> From 88cde08d060041bfb6f585ed7a486f6ba5886733 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Mon, 6 Nov 2023 21:34:17 -0500 Subject: [PATCH 068/792] feat(ui): Update Apollo cache to work with union types (#9193) --- datahub-web-react/codegen.yml | 3 ++ datahub-web-react/package.json | 1 + datahub-web-react/src/App.tsx | 3 ++ datahub-web-react/yarn.lock | 73 ++++++++++++++++++++++++++++++++++ 4 files changed, 80 insertions(+) diff --git a/datahub-web-react/codegen.yml b/datahub-web-react/codegen.yml index 96a2bd6137920..35728e8aeb7d4 100644 --- a/datahub-web-react/codegen.yml +++ b/datahub-web-react/codegen.yml @@ -20,6 +20,9 @@ generates: src/types.generated.ts: plugins: - 'typescript' + src/possibleTypes.generated.ts: + plugins: + - 'fragment-matcher' src/: preset: near-operation-file presetConfig: diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 2d9329919fdc1..019295f3e6ffe 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -11,6 +11,7 @@ "@apollo/client": "^3.3.19", "@craco/craco": "^6.1.1", "@data-ui/xy-chart": "^0.0.84", + "@graphql-codegen/fragment-matcher": "^5.0.0", "@miragejs/graphql": "^0.1.11", "@monaco-editor/react": "^4.3.1", "@react-hook/window-size": "^3.0.7", diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index b6bc608dccbbb..342a89f350429 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -36,6 +36,7 @@ import { DataPlatformEntity } from './app/entity/dataPlatform/DataPlatformEntity import { DataProductEntity } from './app/entity/dataProduct/DataProductEntity'; import { DataPlatformInstanceEntity } from './app/entity/dataPlatformInstance/DataPlatformInstanceEntity'; import { RoleEntity } from './app/entity/Access/RoleEntity'; +import possibleTypesResult from './possibleTypes.generated'; /* Construct Apollo Client @@ -77,6 +78,8 @@ const client = new ApolloClient({ }, }, }, + // need to define possibleTypes to allow us to use Apollo cache with union types + possibleTypes: possibleTypesResult.possibleTypes, }), credentials: 'include', defaultOptions: { diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 590f3ebcef8c3..ce0f2f514dad1 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -2298,6 +2298,14 @@ "@graphql-tools/utils" "^6" tslib "~2.0.1" +"@graphql-codegen/fragment-matcher@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@graphql-codegen/fragment-matcher/-/fragment-matcher-5.0.0.tgz#2a016715e42e8f21aa08830f34a4d0a930e660fe" + integrity sha512-mbash9E8eY6RSMSNrrO+C9JJEn8rdr8ORaxMpgdWL2qe2q/TlLUCE3ZvQvHkSc7GjBnMEk36LncA8ApwHR2BHg== + dependencies: + "@graphql-codegen/plugin-helpers" "^5.0.0" + tslib "~2.5.0" + "@graphql-codegen/near-operation-file-preset@^1.17.13": version "1.18.6" resolved "https://registry.yarnpkg.com/@graphql-codegen/near-operation-file-preset/-/near-operation-file-preset-1.18.6.tgz#2378ac75feaeaa1cfd2146bd84bf839b1fe20d9d" @@ -2331,6 +2339,18 @@ lodash "~4.17.0" tslib "~2.3.0" +"@graphql-codegen/plugin-helpers@^5.0.0": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@graphql-codegen/plugin-helpers/-/plugin-helpers-5.0.1.tgz#e2429fcfba3f078d5aa18aa062d46c922bbb0d55" + integrity sha512-6L5sb9D8wptZhnhLLBcheSPU7Tg//DGWgc5tQBWX46KYTOTQHGqDpv50FxAJJOyFVJrveN9otWk9UT9/yfY4ww== + dependencies: + "@graphql-tools/utils" "^10.0.0" + change-case-all "1.0.15" + common-tags "1.8.2" + import-from "4.0.0" + lodash "~4.17.0" + tslib "~2.5.0" + "@graphql-codegen/typescript-operations@1.17.13": version "1.17.13" resolved "https://registry.yarnpkg.com/@graphql-codegen/typescript-operations/-/typescript-operations-1.17.13.tgz#a5b08c1573b9507ca5a9e66e795aecc40ddc5305" @@ -2584,6 +2604,16 @@ dependencies: tslib "^2.4.0" +"@graphql-tools/utils@^10.0.0": + version "10.0.8" + resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-10.0.8.tgz#c7b84275ec83dc42ad9f3d4ffc424ff682075759" + integrity sha512-yjyA8ycSa1WRlJqyX/aLqXeE5DvF/H02+zXMUFnCzIDrj0UvLMUrxhmVFnMK0Q2n3bh4uuTeY3621m5za9ovXw== + dependencies: + "@graphql-typed-document-node/core" "^3.1.1" + cross-inspect "1.0.0" + dset "^3.1.2" + tslib "^2.4.0" + "@graphql-tools/utils@^6": version "6.2.4" resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-6.2.4.tgz#38a2314d2e5e229ad4f78cca44e1199e18d55856" @@ -2618,6 +2648,11 @@ resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950" integrity sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg== +"@graphql-typed-document-node/core@^3.1.1": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" + integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== + "@hapi/address@2.x.x": version "2.1.4" resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5" @@ -7001,6 +7036,22 @@ change-case-all@1.0.14: upper-case "^2.0.2" upper-case-first "^2.0.2" +change-case-all@1.0.15: + version "1.0.15" + resolved "https://registry.yarnpkg.com/change-case-all/-/change-case-all-1.0.15.tgz#de29393167fc101d646cd76b0ef23e27d09756ad" + integrity sha512-3+GIFhk3sNuvFAJKU46o26OdzudQlPNBCu1ZQi3cMeMHhty1bhDxu2WrEilVNYaGvqUtR1VSigFcJOiS13dRhQ== + dependencies: + change-case "^4.1.2" + is-lower-case "^2.0.2" + is-upper-case "^2.0.2" + lower-case "^2.0.2" + lower-case-first "^2.0.2" + sponge-case "^1.0.1" + swap-case "^2.0.2" + title-case "^3.0.3" + upper-case "^2.0.2" + upper-case-first "^2.0.2" + change-case@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/change-case/-/change-case-4.1.2.tgz#fedfc5f136045e2398c0410ee441f95704641e12" @@ -7357,6 +7408,11 @@ common-tags@1.8.0, common-tags@^1.8.0: resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" integrity sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw== +common-tags@1.8.2: + version "1.8.2" + resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== + commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" @@ -7698,6 +7754,13 @@ cross-fetch@^3.1.5: dependencies: node-fetch "2.6.7" +cross-inspect@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/cross-inspect/-/cross-inspect-1.0.0.tgz#5fda1af759a148594d2d58394a9e21364f6849af" + integrity sha512-4PFfn4b5ZN6FMNGSZlyb7wUhuN8wvj8t/VQHZdM4JsDcruGJ8L2kf9zao98QIrBPFCpdk27qst/AGTl7pL3ypQ== + dependencies: + tslib "^2.4.0" + cross-spawn@7.0.3, cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" @@ -8595,6 +8658,11 @@ dotenv@^8.2.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== +dset@^3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/dset/-/dset-3.1.3.tgz#c194147f159841148e8e34ca41f638556d9542d2" + integrity sha512-20TuZZHCEZ2O71q9/+8BwKwZ0QtD9D8ObhrihJPr+vLLYlSuAU3/zL4cSlgbfeoGHTjCSJBa7NGcrF9/Bx/WJQ== + duplexer3@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" @@ -18712,6 +18780,11 @@ tslib@~2.3.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== +tslib@~2.5.0: + version "2.5.3" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.3.tgz#24944ba2d990940e6e982c4bea147aba80209913" + integrity sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w== + tsutils@^3.17.1: version "3.21.0" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" From 23c98ecf7a88d11e3b195d457ab42c763818df47 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 7 Nov 2023 14:40:48 -0600 Subject: [PATCH 069/792] feat(policy): enable support for 10k+ policies (#9177) Co-authored-by: Pedro Silva --- .../policy/ListPoliciesResolver.java | 26 ++---- .../metadata/client/JavaEntityClient.java | 2 +- .../metadata/search/SearchService.java | 18 ++-- .../authorization/DataHubAuthorizer.java | 21 ++--- .../datahub/authorization/PolicyFetcher.java | 62 +++++++++++--- .../authorization/DataHubAuthorizerTest.java | 82 +++++++++++++------ .../src/main/resources/application.yml | 1 + .../auth/DataHubAuthorizerFactory.java | 5 +- .../linkedin/entity/client/EntityClient.java | 2 +- .../entity/client/RestliEntityClient.java | 7 +- .../cypress/e2e/settings/managing_groups.js | 2 +- 11 files changed, 153 insertions(+), 75 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index 516d6fa2d3137..b44da1c2f832c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -40,23 +40,15 @@ public CompletableFuture get(final DataFetchingEnvironment e final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all policy Urns. - final PolicyFetcher.PolicyFetchResult policyFetchResult = - _policyFetcher.fetchPolicies(start, count, query, context.getAuthentication()); - - // Now that we have entities we can bind this to a result. - final ListPoliciesResult result = new ListPoliciesResult(); - result.setStart(start); - result.setCount(count); - result.setTotal(policyFetchResult.getTotal()); - result.setPolicies(mapEntities(policyFetchResult.getPolicies())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list policies", e); - } - }); + return _policyFetcher.fetchPolicies(start, query, count, context.getAuthentication()) + .thenApply(policyFetchResult -> { + final ListPoliciesResult result = new ListPoliciesResult(); + result.setStart(start); + result.setCount(count); + result.setTotal(policyFetchResult.getTotal()); + result.setPolicies(mapEntities(policyFetchResult.getPolicies())); + return result; + }); } throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index a69c6008fea47..dff9a22de8efd 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -381,7 +381,7 @@ public SearchResult searchAcrossEntities( @Nonnull @Override public ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nonnull String keepAlive, int count, + @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) throws RemoteInvocationException { final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java index 94b8d57efcc16..c99e4a94feb29 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java @@ -147,15 +147,23 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul return result; } + /** + * If no entities are provided, fallback to the list of non-empty entities + * @param inputEntities the requested entities + * @return some entities to search + */ private List getEntitiesToSearch(@Nonnull List inputEntities) { List nonEmptyEntities; List lowercaseEntities = inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { - nonEmptyEntities = _entityDocCountCache.getNonEmptyEntities(); - } - if (!inputEntities.isEmpty()) { - nonEmptyEntities = nonEmptyEntities.stream().filter(lowercaseEntities::contains).collect(Collectors.toList()); + + if (lowercaseEntities.isEmpty()) { + try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { + nonEmptyEntities = _entityDocCountCache.getNonEmptyEntities(); + } + } else { + nonEmptyEntities = lowercaseEntities; } + return nonEmptyEntities; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index f8b28f6c182a7..f8f99475de23e 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -72,11 +72,13 @@ public DataHubAuthorizer( final EntityClient entityClient, final int delayIntervalSeconds, final int refreshIntervalSeconds, - final AuthorizationMode mode) { + final AuthorizationMode mode, + final int policyFetchSize) { _systemAuthentication = Objects.requireNonNull(systemAuthentication); _mode = Objects.requireNonNull(mode); _policyEngine = new PolicyEngine(systemAuthentication, Objects.requireNonNull(entityClient)); - _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache, readWriteLock.writeLock()); + _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache, + readWriteLock.writeLock(), policyFetchSize); _refreshExecutorService.scheduleAtFixedRate(_policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); } @@ -244,29 +246,28 @@ static class PolicyRefreshRunnable implements Runnable { private final PolicyFetcher _policyFetcher; private final Map> _policyCache; private final Lock writeLock; + private final int count; @Override public void run() { try { // Populate new cache and swap. Map> newCache = new HashMap<>(); + Integer total = null; + String scrollId = null; - int start = 0; - int count = 30; - int total = 30; - - while (start < total) { + while (total == null || scrollId != null) { try { final PolicyFetcher.PolicyFetchResult - policyFetchResult = _policyFetcher.fetchPolicies(start, count, _systemAuthentication); + policyFetchResult = _policyFetcher.fetchPolicies(count, scrollId, _systemAuthentication); addPoliciesToCache(newCache, policyFetchResult.getPolicies()); total = policyFetchResult.getTotal(); - start = start + count; + scrollId = policyFetchResult.getScrollId(); } catch (Exception e) { log.error( - "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. start: {}, count: {}", start, count, e); + "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. count: {}, scrollId: {}", count, scrollId, e); return; } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java index 92d12bad41c9f..c06da4d245f91 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java @@ -8,8 +8,8 @@ import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.ScrollResult; import com.linkedin.metadata.search.SearchEntity; -import com.linkedin.metadata.search.SearchResult; import com.linkedin.policy.DataHubPolicyInfo; import com.linkedin.r2.RemoteInvocationException; import java.net.URISyntaxException; @@ -18,11 +18,14 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; +import javax.annotation.Nullable; + import static com.linkedin.metadata.Constants.DATAHUB_POLICY_INFO_ASPECT_NAME; import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; @@ -38,22 +41,53 @@ public class PolicyFetcher { private static final SortCriterion POLICY_SORT_CRITERION = new SortCriterion().setField("lastUpdatedTimestamp").setOrder(SortOrder.DESCENDING); - public PolicyFetchResult fetchPolicies(int start, int count, Authentication authentication) - throws RemoteInvocationException, URISyntaxException { - return fetchPolicies(start, count, "", authentication); + /** + * This is to provide a scroll implementation using the start/count api. It is not efficient + * and the scroll native functions should be used instead. This does fix a failure to fetch + * policies when deep pagination happens where there are >10k policies. + * Exists primarily to prevent breaking change to the graphql api. + */ + @Deprecated + public CompletableFuture fetchPolicies(int start, String query, int count, Authentication authentication) { + return CompletableFuture.supplyAsync(() -> { + try { + PolicyFetchResult result = PolicyFetchResult.EMPTY; + String scrollId = ""; + int fetchedResults = 0; + + while (PolicyFetchResult.EMPTY.equals(result) && scrollId != null) { + PolicyFetchResult tmpResult = fetchPolicies(query, count, scrollId.isEmpty() ? null : scrollId, authentication); + fetchedResults += tmpResult.getPolicies().size(); + scrollId = tmpResult.getScrollId(); + if (fetchedResults > start) { + result = tmpResult; + } + } + + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list policies", e); + } + }); } - public PolicyFetchResult fetchPolicies(int start, int count, String query, Authentication authentication) + public PolicyFetchResult fetchPolicies(int count, @Nullable String scrollId, Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + return fetchPolicies("", count, scrollId, authentication); + } + + public PolicyFetchResult fetchPolicies(String query, int count, @Nullable String scrollId, Authentication authentication) throws RemoteInvocationException, URISyntaxException { - log.debug(String.format("Batch fetching policies. start: %s, count: %s ", start, count)); - // First fetch all policy urns from start - start + count - SearchResult result = - _entityClient.search(POLICY_ENTITY_NAME, query, null, POLICY_SORT_CRITERION, start, count, authentication, - new SearchFlags().setFulltext(true)); + log.debug(String.format("Batch fetching policies. count: %s, scroll: %s", count, scrollId)); + + // First fetch all policy urns + ScrollResult result = _entityClient.scrollAcrossEntities(List.of(POLICY_ENTITY_NAME), query, null, scrollId, + null, count, new SearchFlags().setSkipCache(true).setSkipAggregates(true) + .setSkipHighlighting(true).setFulltext(true), authentication); List policyUrns = result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); if (policyUrns.isEmpty()) { - return new PolicyFetchResult(Collections.emptyList(), 0); + return PolicyFetchResult.EMPTY; } // Fetch DataHubPolicyInfo aspects for each urn @@ -64,7 +98,7 @@ public PolicyFetchResult fetchPolicies(int start, int count, String query, Authe .filter(Objects::nonNull) .map(this::extractPolicy) .filter(Objects::nonNull) - .collect(Collectors.toList()), result.getNumEntities()); + .collect(Collectors.toList()), result.getNumEntities(), result.getScrollId()); } private Policy extractPolicy(EntityResponse entityResponse) { @@ -82,6 +116,10 @@ private Policy extractPolicy(EntityResponse entityResponse) { public static class PolicyFetchResult { List policies; int total; + @Nullable + String scrollId; + + public static final PolicyFetchResult EMPTY = new PolicyFetchResult(Collections.emptyList(), 0, null); } @Value diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index 24ecfa6fefc85..babb1c5d00ee8 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -22,6 +22,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.search.ScrollResult; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -35,6 +36,8 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; + import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -89,30 +92,58 @@ public void setupTest() throws Exception { final EnvelopedAspectMap childDomainPolicyAspectMap = new EnvelopedAspectMap(); childDomainPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(childDomainPolicy.data()))); - final SearchResult policySearchResult = new SearchResult(); - policySearchResult.setNumEntities(3); - policySearchResult.setEntities( - new SearchEntityArray( - ImmutableList.of( - new SearchEntity().setEntity(activePolicyUrn), - new SearchEntity().setEntity(inactivePolicyUrn), - new SearchEntity().setEntity(parentDomainPolicyUrn), - new SearchEntity().setEntity(childDomainPolicyUrn) - ) - ) - ); - - when(_entityClient.search(eq("dataHubPolicy"), eq(""), isNull(), any(), anyInt(), anyInt(), any(), - eq(new SearchFlags().setFulltext(true)))).thenReturn(policySearchResult); - when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), - eq(ImmutableSet.of(activePolicyUrn, inactivePolicyUrn, parentDomainPolicyUrn, childDomainPolicyUrn)), eq(null), any())).thenReturn( - ImmutableMap.of( - activePolicyUrn, new EntityResponse().setUrn(activePolicyUrn).setAspects(activeAspectMap), - inactivePolicyUrn, new EntityResponse().setUrn(inactivePolicyUrn).setAspects(inactiveAspectMap), - parentDomainPolicyUrn, new EntityResponse().setUrn(parentDomainPolicyUrn).setAspects(parentDomainPolicyAspectMap), - childDomainPolicyUrn, new EntityResponse().setUrn(childDomainPolicyUrn).setAspects(childDomainPolicyAspectMap) - ) - ); + final ScrollResult policySearchResult1 = new ScrollResult() + .setScrollId("1") + .setNumEntities(4) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(activePolicyUrn)))); + + final ScrollResult policySearchResult2 = new ScrollResult() + .setScrollId("2") + .setNumEntities(4) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(inactivePolicyUrn)))); + + final ScrollResult policySearchResult3 = new ScrollResult() + .setScrollId("3") + .setNumEntities(4) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(parentDomainPolicyUrn)))); + + final ScrollResult policySearchResult4 = new ScrollResult() + .setNumEntities(4) + .setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(childDomainPolicyUrn)))); + + when(_entityClient.scrollAcrossEntities(eq(List.of("dataHubPolicy")), eq(""), isNull(), any(), isNull(), + anyInt(), eq(new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipHighlighting(true).setSkipCache(true)), any())) + .thenReturn(policySearchResult1) + .thenReturn(policySearchResult2) + .thenReturn(policySearchResult3) + .thenReturn(policySearchResult4); + + when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), any(), eq(null), any())).thenAnswer(args -> { + Set inputUrns = args.getArgument(1); + Urn urn = inputUrns.stream().findFirst().get(); + + switch (urn.toString()) { + case "urn:li:dataHubPolicy:0": + return Map.of(activePolicyUrn, new EntityResponse().setUrn(activePolicyUrn).setAspects(activeAspectMap)); + case "urn:li:dataHubPolicy:1": + return Map.of(inactivePolicyUrn, new EntityResponse().setUrn(inactivePolicyUrn).setAspects(inactiveAspectMap)); + case "urn:li:dataHubPolicy:2": + return Map.of(parentDomainPolicyUrn, new EntityResponse().setUrn(parentDomainPolicyUrn).setAspects(parentDomainPolicyAspectMap)); + case "urn:li:dataHubPolicy:3": + return Map.of(childDomainPolicyUrn, new EntityResponse().setUrn(childDomainPolicyUrn).setAspects(childDomainPolicyAspectMap)); + default: + throw new IllegalStateException(); + } + }); final List userUrns = ImmutableList.of(Urn.createFromString("urn:li:corpuser:user3"), Urn.createFromString("urn:li:corpuser:user4")); final List groupUrns = ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group3"), Urn.createFromString("urn:li:corpGroup:group4")); @@ -146,7 +177,8 @@ childDomainPolicyUrn, new EntityResponse().setUrn(childDomainPolicyUrn).setAspec _entityClient, 10, 10, - DataHubAuthorizer.AuthorizationMode.DEFAULT + DataHubAuthorizer.AuthorizationMode.DEFAULT, + 1 // force pagination logic ); _dataHubAuthorizer.init(Collections.emptyMap(), createAuthorizerContext(systemAuthentication, _entityClient)); _dataHubAuthorizer.invalidateCache(); diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 91b10a75c922e..e9113d339e81d 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -39,6 +39,7 @@ authorization: defaultAuthorizer: enabled: ${AUTH_POLICIES_ENABLED:true} cacheRefreshIntervalSecs: ${POLICY_CACHE_REFRESH_INTERVAL_SECONDS:120} + cachePolicyFetchSize: ${POLICY_CACHE_FETCH_SIZE:1000} # Enables authorization of reads, writes, and deletes on REST APIs. Defaults to false for backwards compatibility, but should become true down the road restApiAuthorization: ${REST_API_AUTHORIZATION_ENABLED:false} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java index 5b298a453547a..663234e2519fa 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java @@ -32,6 +32,9 @@ public class DataHubAuthorizerFactory { @Value("${authorization.defaultAuthorizer.cacheRefreshIntervalSecs}") private Integer policyCacheRefreshIntervalSeconds; + @Value("${authorization.defaultAuthorizer.cachePolicyFetchSize}") + private Integer policyCacheFetchSize; + @Value("${authorization.defaultAuthorizer.enabled:true}") private Boolean policiesEnabled; @@ -44,6 +47,6 @@ protected DataHubAuthorizer getInstance() { : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; return new DataHubAuthorizer(systemAuthentication, entityClient, 10, - policyCacheRefreshIntervalSeconds, mode); + policyCacheRefreshIntervalSeconds, mode, policyCacheFetchSize); } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index b9661ec75e1b1..84d0ed6b9594d 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -241,7 +241,7 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul */ @Nonnull ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nonnull String keepAlive, int count, @Nullable SearchFlags searchFlags, + @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) throws RemoteInvocationException; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index 47a00e711a935..2716e27518fcc 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -482,11 +482,11 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul @Nonnull @Override public ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nonnull String keepAlive, int count, + @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) throws RemoteInvocationException { final EntitiesDoScrollAcrossEntitiesRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionScrollAcrossEntities().inputParam(input).countParam(count).keepAliveParam(keepAlive); + ENTITIES_REQUEST_BUILDERS.actionScrollAcrossEntities().inputParam(input).countParam(count); if (entities != null) { requestBuilder.entitiesParam(new StringArray(entities)); @@ -500,6 +500,9 @@ public ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnul if (searchFlags != null) { requestBuilder.searchFlagsParam(searchFlags); } + if (keepAlive != null) { + requestBuilder.keepAliveParam(keepAlive); + } return sendClientRequest(requestBuilder, authentication).getEntity(); } diff --git a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js index 9559435ff01c8..8d689c7e2303c 100644 --- a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js +++ b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js @@ -81,7 +81,7 @@ describe("create and manage group", () => { cy.focused().type(expected_name); cy.get(".ant-select-item-option").contains(expected_name, { matchCase: false }).click(); cy.focused().blur(); - cy.contains(expected_name).should("have.length", 1); + cy.contains(expected_name, { matchCase: false }).should("have.length", 1); cy.get('[role="dialog"] button').contains("Done").click(); cy.waitTextVisible("Owners Added"); cy.contains(expected_name, { matchCase: false }).should("be.visible"); From 353584c10acbee7554c2eb255512173f24e86785 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 7 Nov 2023 18:22:18 -0600 Subject: [PATCH 070/792] feat(browsepathv2): Allow system-update to reprocess browse paths v2 (#9200) --- .../steps/BackfillBrowsePathsV2Step.java | 86 ++++++++++++++----- .../env/docker-without-neo4j.env | 1 + docker/datahub-upgrade/env/docker.env | 1 + docker/docker-compose.dev.yml | 4 + .../docker-compose-m1.quickstart.yml | 1 + ...er-compose-without-neo4j-m1.quickstart.yml | 1 + ...ocker-compose-without-neo4j.quickstart.yml | 1 + .../quickstart/docker-compose.quickstart.yml | 1 + .../client/CachingEntitySearchService.java | 16 ++-- .../elasticsearch/query/ESSearchDAO.java | 4 +- .../query/request/SearchRequestHandler.java | 8 +- .../src/main/resources/application.yml | 2 + .../metadata/search/EntitySearchService.java | 4 +- 13 files changed, 94 insertions(+), 36 deletions(-) diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 7547186ccfb23..08a752d9597f4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -6,6 +6,7 @@ import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringArray; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -13,6 +14,7 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -37,6 +39,8 @@ public class BackfillBrowsePathsV2Step implements UpgradeStep { public static final String BACKFILL_BROWSE_PATHS_V2 = "BACKFILL_BROWSE_PATHS_V2"; + public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; + public static final String DEFAULT_BROWSE_PATH_V2 = "␟Default"; private static final Set ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( Constants.DATASET_ENTITY_NAME, @@ -81,27 +85,14 @@ public Function executable() { private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, String scrollId) { - // Condition: has `browsePaths` AND does NOT have `browsePathV2` - Criterion missingBrowsePathV2 = new Criterion(); - missingBrowsePathV2.setCondition(Condition.IS_NULL); - missingBrowsePathV2.setField("browsePathV2"); - // Excludes entities without browsePaths - Criterion hasBrowsePathV1 = new Criterion(); - hasBrowsePathV1.setCondition(Condition.EXISTS); - hasBrowsePathV1.setField("browsePaths"); - - CriterionArray criterionArray = new CriterionArray(); - criterionArray.add(missingBrowsePathV2); - criterionArray.add(hasBrowsePathV1); - - ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); - conjunctiveCriterion.setAnd(criterionArray); + final Filter filter; - ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); - conjunctiveCriterionArray.add(conjunctiveCriterion); - - Filter filter = new Filter(); - filter.setOr(conjunctiveCriterionArray); + if (System.getenv().containsKey(REPROCESS_DEFAULT_BROWSE_PATHS_V2) + && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { + filter = backfillDefaultBrowsePathsV2Filter(); + } else { + filter = backfillBrowsePathsV2Filter(); + } final ScrollResult scrollResult = _searchService.scrollAcrossEntities( ImmutableList.of(entityType), @@ -109,9 +100,9 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S filter, null, scrollId, - "5m", + null, BATCH_SIZE, - null + new SearchFlags().setFulltext(true).setSkipCache(true).setSkipHighlighting(true).setSkipAggregates(true) ); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; @@ -129,6 +120,55 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S return scrollResult.getScrollId(); } + private Filter backfillBrowsePathsV2Filter() { + // Condition: has `browsePaths` AND does NOT have `browsePathV2` + Criterion missingBrowsePathV2 = new Criterion(); + missingBrowsePathV2.setCondition(Condition.IS_NULL); + missingBrowsePathV2.setField("browsePathV2"); + // Excludes entities without browsePaths + Criterion hasBrowsePathV1 = new Criterion(); + hasBrowsePathV1.setCondition(Condition.EXISTS); + hasBrowsePathV1.setField("browsePaths"); + + CriterionArray criterionArray = new CriterionArray(); + criterionArray.add(missingBrowsePathV2); + criterionArray.add(hasBrowsePathV1); + + ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); + conjunctiveCriterion.setAnd(criterionArray); + + ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); + conjunctiveCriterionArray.add(conjunctiveCriterion); + + Filter filter = new Filter(); + filter.setOr(conjunctiveCriterionArray); + return filter; + } + + private Filter backfillDefaultBrowsePathsV2Filter() { + // Condition: has default `browsePathV2` + Criterion hasDefaultBrowsePathV2 = new Criterion(); + hasDefaultBrowsePathV2.setCondition(Condition.EQUAL); + hasDefaultBrowsePathV2.setField("browsePathV2"); + StringArray values = new StringArray(); + values.add(DEFAULT_BROWSE_PATH_V2); + hasDefaultBrowsePathV2.setValues(values); + hasDefaultBrowsePathV2.setValue(DEFAULT_BROWSE_PATH_V2); // not used, but required field? + + CriterionArray criterionArray = new CriterionArray(); + criterionArray.add(hasDefaultBrowsePathV2); + + ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); + conjunctiveCriterion.setAnd(criterionArray); + + ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); + conjunctiveCriterionArray.add(conjunctiveCriterion); + + Filter filter = new Filter(); + filter.setOr(conjunctiveCriterionArray); + return filter; + } + private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exception { BrowsePathsV2 browsePathsV2 = _entityService.buildDefaultBrowsePathV2(urn, true); log.debug(String.format("Adding browse path v2 for urn %s with value %s", urn, browsePathsV2)); @@ -142,7 +182,7 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio _entityService.ingestProposal( proposal, auditStamp, - false + true ); } diff --git a/docker/datahub-upgrade/env/docker-without-neo4j.env b/docker/datahub-upgrade/env/docker-without-neo4j.env index c399f71b7b15c..04d888f076cd6 100644 --- a/docker/datahub-upgrade/env/docker-without-neo4j.env +++ b/docker/datahub-upgrade/env/docker-without-neo4j.env @@ -21,6 +21,7 @@ DATAHUB_GMS_PORT=8080 ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml BACKFILL_BROWSE_PATHS_V2=true +REPROCESS_DEFAULT_BROWSE_PATHS_V2=${REPROCESS_DEFAULT_BROWSE_PATHS_V2:-false} # Uncomment and set these to support SSL connection to Elasticsearch # ELASTICSEARCH_USE_SSL= diff --git a/docker/datahub-upgrade/env/docker.env b/docker/datahub-upgrade/env/docker.env index 491470406153b..b2a0d01e5d4ae 100644 --- a/docker/datahub-upgrade/env/docker.env +++ b/docker/datahub-upgrade/env/docker.env @@ -25,6 +25,7 @@ DATAHUB_GMS_PORT=8080 ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml BACKFILL_BROWSE_PATHS_V2=true +REPROCESS_DEFAULT_BROWSE_PATHS_V2=${REPROCESS_DEFAULT_BROWSE_PATHS_V2:-false} # Uncomment and set these to support SSL connection to Elasticsearch # ELASTICSEARCH_USE_SSL= diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index c4e5ee7fa0cae..774c4e17bee21 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -54,6 +54,8 @@ services: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-upgrade: image: acryldata/datahub-upgrade:debug + ports: + - ${DATAHUB_MAPPED_UPGRADE_DEBUG_PORT:-5003}:5003 build: context: datahub-upgrade dockerfile: Dockerfile @@ -63,6 +65,8 @@ services: - SKIP_ELASTICSEARCH_CHECK=false - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-dev} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} + - REPROCESS_DEFAULT_BROWSE_PATHS_V2=${REPROCESS_DEFAULT_BROWSE_PATHS_V2:-false} + - JAVA_TOOL_OPTIONS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5003 volumes: - ../datahub-upgrade/build/libs/:/datahub/datahub-upgrade/bin/ - ../metadata-models/src/main/resources/:/datahub/datahub-gms/resources diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 3b6d02c83d0f0..c96baf37551b2 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -151,6 +151,7 @@ services: - DATAHUB_GMS_PORT=8080 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - BACKFILL_BROWSE_PATHS_V2=true + - REPROCESS_DEFAULT_BROWSE_PATHS_V2=false hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} labels: diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index e45bafc3da480..b1cb6c208a42d 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -144,6 +144,7 @@ services: - DATAHUB_GMS_PORT=8080 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - BACKFILL_BROWSE_PATHS_V2=true + - REPROCESS_DEFAULT_BROWSE_PATHS_V2=false hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} labels: diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 020ef5e9a97b9..ab5182bf98ae5 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -144,6 +144,7 @@ services: - DATAHUB_GMS_PORT=8080 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - BACKFILL_BROWSE_PATHS_V2=true + - REPROCESS_DEFAULT_BROWSE_PATHS_V2=false hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} labels: diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index 8adc2b9063b84..8a66521cbb522 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -151,6 +151,7 @@ services: - DATAHUB_GMS_PORT=8080 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - BACKFILL_BROWSE_PATHS_V2=true + - REPROCESS_DEFAULT_BROWSE_PATHS_V2=false hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} labels: diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java index ceaf37a1289d9..db414d70603dc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java @@ -16,7 +16,7 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; -import org.javatuples.Quintet; +import org.javatuples.Septet; import org.javatuples.Sextet; import org.springframework.cache.Cache; import org.springframework.cache.CacheManager; @@ -154,8 +154,9 @@ public SearchResult getCachedSearchResults( batchSize, querySize -> getRawSearchResults(entityNames, query, filters, sortCriterion, querySize.getFrom(), querySize.getSize(), flags, facets), - querySize -> Sextet.with(entityNames, query, filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, facets, querySize), flags, enableCache).getSearchResults(from, size); + querySize -> Septet.with(entityNames, query, filters != null ? toJsonString(filters) : null, + sortCriterion != null ? toJsonString(sortCriterion) : null, flags != null ? toJsonString(flags) : null, + facets, querySize), flags, enableCache).getSearchResults(from, size); } @@ -175,7 +176,8 @@ public AutoCompleteResult getCachedAutoCompleteResults( if (enableCache(flags)) { try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults_cache").time()) { Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time(); - Object cacheKey = Quintet.with(entityName, input, field, filters != null ? toJsonString(filters) : null, limit); + Object cacheKey = Sextet.with(entityName, input, field, filters != null ? toJsonString(filters) : null, + flags != null ? toJsonString(flags) : null, limit); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(AutoCompleteResult.class, json) : null; cacheAccess.stop(); @@ -210,7 +212,8 @@ public BrowseResult getCachedBrowseResults( if (enableCache(flags)) { try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedBrowseResults_cache").time()) { Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "browse_cache_access").time(); - Object cacheKey = Quintet.with(entityName, path, filters != null ? toJsonString(filters) : null, from, size); + Object cacheKey = Sextet.with(entityName, path, filters != null ? toJsonString(filters) : null, + flags != null ? toJsonString(flags) : null, from, size); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(BrowseResult.class, json) : null; cacheAccess.stop(); @@ -247,9 +250,10 @@ public ScrollResult getCachedScrollResults( ScrollResult result; if (enableCache(flags)) { Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "scroll_cache_access").time(); - Object cacheKey = Sextet.with(entities, query, + Object cacheKey = Septet.with(entities, query, filters != null ? toJsonString(filters) : null, sortCriterion != null ? toJsonString(sortCriterion) : null, + flags != null ? toJsonString(flags) : null, scrollId, size); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(ScrollResult.class, json) : null; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index cbaf70ca22617..290e8c60deb00 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -157,7 +157,7 @@ private AggregationMetadataArray transformIndexIntoEntityName(AggregationMetadat @Nonnull @WithSpan private ScrollResult executeAndExtract(@Nonnull List entitySpecs, @Nonnull SearchRequest searchRequest, @Nullable Filter filter, - @Nullable String scrollId, @Nonnull String keepAlive, int size) { + @Nullable String scrollId, @Nullable String keepAlive, int size) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "executeAndExtract_scroll").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well @@ -166,7 +166,7 @@ private ScrollResult executeAndExtract(@Nonnull List entitySpecs, @N .extractScrollResult(searchResponse, filter, scrollId, keepAlive, size, supportsPointInTime())); } catch (Exception e) { - log.error("Search query failed", e); + log.error("Search query failed: {}", searchRequest, e); throw new ESQueryException("Search query failed:", e); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 49571a60d5f21..0df6afd49c373 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -241,7 +241,9 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi BoolQueryBuilder filterQuery = getFilterQuery(filter); searchSourceBuilder.query(QueryBuilders.boolQuery().must(getQuery(input, finalSearchFlags.isFulltext())).filter(filterQuery)); - _aggregationQueryBuilder.getAggregations().forEach(searchSourceBuilder::aggregation); + if (!finalSearchFlags.isSkipAggregates()) { + _aggregationQueryBuilder.getAggregations().forEach(searchSourceBuilder::aggregation); + } if (!finalSearchFlags.isSkipHighlighting()) { searchSourceBuilder.highlighter(_highlights); } @@ -366,7 +368,7 @@ public SearchResult extractResult(@Nonnull SearchResponse searchResponse, Filter @WithSpan public ScrollResult extractScrollResult(@Nonnull SearchResponse searchResponse, Filter filter, @Nullable String scrollId, - @Nonnull String keepAlive, int size, boolean supportsPointInTime) { + @Nullable String keepAlive, int size, boolean supportsPointInTime) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; List resultList = getResults(searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); @@ -376,7 +378,7 @@ public ScrollResult extractScrollResult(@Nonnull SearchResponse searchResponse, if (searchHits.length == size) { Object[] sort = searchHits[searchHits.length - 1].getSortValues(); long expirationTimeMs = 0L; - if (supportsPointInTime) { + if (keepAlive != null && supportsPointInTime) { expirationTimeMs = TimeValue.parseTimeValue(keepAlive, "expirationTime").getMillis() + System.currentTimeMillis(); } nextScrollId = new SearchAfterWrapper(sort, searchResponse.pointInTimeId(), expirationTimeMs).toScrollId(); diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index e9113d339e81d..a06891699607b 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -285,6 +285,8 @@ bootstrap: enabled: ${UPGRADE_DEFAULT_BROWSE_PATHS_ENABLED:false} # enable to run the upgrade to migrate legacy default browse paths to new ones backfillBrowsePathsV2: enabled: ${BACKFILL_BROWSE_PATHS_V2:false} # Enables running the backfill of browsePathsV2 upgrade step. There are concerns about the load of this step so hiding it behind a flag. Deprecating in favor of running through SystemUpdate + reprocessDefaultBrowsePathsV2: + enabled: ${REPROCESS_DEFAULT_BROWSE_PATHS_V2:false} # reprocess V2 browse paths which were set to the default: {"path":[{"id":"Default"}]} policies: file: ${BOOTSTRAP_POLICIES_FILE:classpath:boot/policies.json} # eg for local file diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 64f59780b887f..cbfeeaef860d3 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -193,7 +193,7 @@ BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable */ @Nonnull ScrollResult fullTextScroll(@Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int size, @Nullable SearchFlags searchFlags); + @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags); /** * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the @@ -210,7 +210,7 @@ ScrollResult fullTextScroll(@Nonnull List entities, @Nonnull String inpu */ @Nonnull ScrollResult structuredScroll(@Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int size, @Nullable SearchFlags searchFlags); + @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags); /** * Max result size returned by the underlying search backend From e73e92699947084b5ecb1f5d3e0c5762dc446bbf Mon Sep 17 00:00:00 2001 From: Shubham Jagtap <132359390+shubhamjagtap639@users.noreply.github.com> Date: Wed, 8 Nov 2023 12:32:41 +0530 Subject: [PATCH 071/792] feat(integration/fivetran): Fivetran connector integration (#9018) Co-authored-by: Harshal Sheth --- .../app/ingest/source/builder/constants.ts | 4 + .../app/ingest/source/builder/sources.json | 7 + datahub-web-react/src/images/fivetranlogo.png | Bin 0 -> 10230 bytes .../docs/sources/fivetran/fivetran_pre.md | 86 +++ .../docs/sources/fivetran/fivetran_recipe.yml | 43 ++ metadata-ingestion/setup.py | 3 + .../datahub/api/entities/datajob/datajob.py | 25 +- .../dataprocess/dataprocess_instance.py | 27 +- metadata-ingestion/src/datahub/emitter/mcp.py | 4 +- .../datahub/ingestion/api/source_helpers.py | 13 +- .../ingestion/source/fivetran/__init__.py | 0 .../ingestion/source/fivetran/config.py | 145 ++++ .../ingestion/source/fivetran/data_classes.py | 36 + .../ingestion/source/fivetran/fivetran.py | 289 ++++++++ .../source/fivetran/fivetran_log_api.py | 147 ++++ .../source/fivetran/fivetran_query.py | 76 ++ .../ingestion/source_config/sql/snowflake.py | 82 ++- .../integration/fivetran/fivetran_golden.json | 658 ++++++++++++++++++ .../integration/fivetran/test_fivetran.py | 192 +++++ .../main/resources/boot/data_platforms.json | 10 + 20 files changed, 1777 insertions(+), 70 deletions(-) create mode 100644 datahub-web-react/src/images/fivetranlogo.png create mode 100644 metadata-ingestion/docs/sources/fivetran/fivetran_pre.md create mode 100644 metadata-ingestion/docs/sources/fivetran/fivetran_recipe.yml create mode 100644 metadata-ingestion/src/datahub/ingestion/source/fivetran/__init__.py create mode 100644 metadata-ingestion/src/datahub/ingestion/source/fivetran/config.py create mode 100644 metadata-ingestion/src/datahub/ingestion/source/fivetran/data_classes.py create mode 100644 metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py create mode 100644 metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_log_api.py create mode 100644 metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_query.py create mode 100644 metadata-ingestion/tests/integration/fivetran/fivetran_golden.json create mode 100644 metadata-ingestion/tests/integration/fivetran/test_fivetran.py diff --git a/datahub-web-react/src/app/ingest/source/builder/constants.ts b/datahub-web-react/src/app/ingest/source/builder/constants.ts index dba8e8bb1dce6..fdb094d721304 100644 --- a/datahub-web-react/src/app/ingest/source/builder/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/constants.ts @@ -29,6 +29,7 @@ import databricksLogo from '../../../../images/databrickslogo.png'; import verticaLogo from '../../../../images/verticalogo.png'; import mlflowLogo from '../../../../images/mlflowlogo.png'; import dynamodbLogo from '../../../../images/dynamodblogo.png'; +import fivetranLogo from '../../../../images/fivetranlogo.png'; export const ATHENA = 'athena'; export const ATHENA_URN = `urn:li:dataPlatform:${ATHENA}`; @@ -105,6 +106,8 @@ export const DBT_CLOUD = 'dbt-cloud'; export const DBT_CLOUD_URN = `urn:li:dataPlatform:dbt`; export const VERTICA = 'vertica'; export const VERTICA_URN = `urn:li:dataPlatform:${VERTICA}`; +export const FIVETRAN = 'fivetran'; +export const FIVETRAN_URN = `urn:li:dataPlatform:${FIVETRAN}`; export const PLATFORM_URN_TO_LOGO = { [ATHENA_URN]: athenaLogo, @@ -138,6 +141,7 @@ export const PLATFORM_URN_TO_LOGO = { [SUPERSET_URN]: supersetLogo, [UNITY_CATALOG_URN]: databricksLogo, [VERTICA_URN]: verticaLogo, + [FIVETRAN_URN]: fivetranLogo, }; export const SOURCE_TO_PLATFORM_URN = { diff --git a/datahub-web-react/src/app/ingest/source/builder/sources.json b/datahub-web-react/src/app/ingest/source/builder/sources.json index b18384909c33f..9619abebbd54e 100644 --- a/datahub-web-react/src/app/ingest/source/builder/sources.json +++ b/datahub-web-react/src/app/ingest/source/builder/sources.json @@ -216,6 +216,13 @@ "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/vertica/", "recipe": "source:\n type: vertica\n config:\n # Coordinates\n host_port: localhost:5433\n # The name of the vertica database\n database: Database_Name\n # Credentials\n username: Vertica_User\n password: Vertica_Password\n\n include_tables: true\n include_views: true\n include_projections: true\n include_models: true\n include_view_lineage: true\n include_projection_lineage: true\n profiling:\n enabled: false\n stateful_ingestion:\n enabled: true " }, + { + "urn": "urn:li:dataPlatform:fivetran", + "name": "fivetran", + "displayName": "Fivetran", + "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/fivetran/", + "recipe": "source:\n type: fivetran\n config:\n # Fivetran log connector destination server configurations\n fivetran_log_config:\n destination_platform: snowflake\n destination_config:\n # Coordinates\n account_id: snowflake_account_id\n warehouse: warehouse_name\n database: snowflake_db\n log_schema: fivetran_log_schema\n\n # Credentials\n username: ${SNOWFLAKE_USER}\n password: ${SNOWFLAKE_PASS}\n role: snowflake_role\n\n # Optional - filter for certain connector names instead of ingesting everything.\n # connector_patterns:\n # allow:\n # - connector_name\n\n # Optional -- This mapping is optional and only required to configure platform-instance for source\n # A mapping of Fivetran connector id to data platform instance\n # sources_to_platform_instance:\n # calendar_elected:\n # platform_instance: cloud_postgres_instance\n # env: DEV\n\n # Optional -- This mapping is optional and only required to configure platform-instance for destination.\n # A mapping of Fivetran destination id to data platform instance\n # destination_to_platform_instance:\n # calendar_elected:\n # platform_instance: cloud_postgres_instance\n # env: DEV" + }, { "urn": "urn:li:dataPlatform:custom", "name": "custom", diff --git a/datahub-web-react/src/images/fivetranlogo.png b/datahub-web-react/src/images/fivetranlogo.png new file mode 100644 index 0000000000000000000000000000000000000000..d5c999ad2d86e99273971dd0d31a18fd5e94733b GIT binary patch literal 10230 zcmZX42{@G9`@c1NmIh-dEmUN(?;)BZ%h*YT?6QWj#Sp!d7?E8eOZG^zXW!RES(+?a zLUvgye&?BaU)T44{jY1T>ABCj&;7a2=U&cvO#kR=UuU4@q$MFCVYq?3h9V&$1CL}R z)a2lwmfSO)BqZb{x>^S6KqT=#BJukFJ`avaJdQ{_|7ZO8`S{^@mCw8c~zvKOiu5ycXyamw<0D2ylt||NBkECHe!l z29^Ugq7fhxM|}T7Bw9O0NB^h!zlgx#v4=!`LxLKSieqSC1kiYF3TP6sjvVPRQPIQ2%|G!g2CyAQJ)&M8RYl+K= z;_)DO08{{dzyCDB2od_&2+=a}`f6X30106TQ6wxdgKb`_cz=tUQ z?;!E{cosYmfsP-5DFV9H!xSk{0!Q8`ZG959;I7r*v$+~H&UK52B{zg(K5g%P#&WvU z^IV^QbMi#&pLJcFu;t$#*iG9>dZ~lXgaz7VpZ3e16Qq}(Wy!gXG(O!-YNp!JzZ21F z$W%r)edR5yoeEsumGRdL_p$E$tyBFEwaiOJ`6;SUPrj2!bKg@#c*i3%hF(7q+tsx2 zx2gHT)g;t5$GM7qCs})z;^FX+sBdwKk|`T#y+?j6wbC8| z>Mwa!QybVdm#r{r0 z>HRg+$h6x!3ZwrF^cg+3%6Vn|AyB5gSA^lKKD9pyiD2%HYbpkQ<3H2)KP1{5cEos| zk-NYks9EbYh++21dZs1B>0EhcOf}^a^N_IfYdcj`RQye3Wvqz#m{X_i%C$#DEnlqR zyPtc*))=?^CKq;;UVdJ#`LkI%NxD?iEKlKUjK^2st5CR$wNw)RURTd#aJDqs1%yZ5v!Sl_YHI|q?Y ze?+rl)_G^<@WIl z9_83;Q?7ygm}*y1u;z*!auRU2yBsxU9x_ISI#|aw;3HtgL7KAUe=h%wMEqUaO!~H? z7XKIBNeU)P?gS3X@jOOQZ6vK&AsuzZyBRm6vK@BVWxRyixqW0}WXVt`=LD|%bD^qE ztgFa(hj4V-L`OtMhXt#sMQ{9i<<2ml-c@7WTSnIFf%8;glx>>8Wj(N_F|Ifg`4f6a z%9e`PizPd)S4|t(b*G6Yi+F)WwD@V&eiKA`9XJU(i?xCUl4-u@FRO z)NX26Bk?Ax1Je`zV+wXQ)IU1ame9ZljZGD2>rPsk$PU{D)$Vf=HlGOF+oI@Ci$Q$2 zq-ef;G;#{1v4y(;EF{d32StT?vBYrDeHav-;(JS1&mere#m|16qg*X!7QvxC@#-Vl z(JSneItF1gEaUbQL~ig!UN4$#YEfRQ002k9x2Q;;BbvpmVAMeH_U4{Zyo0h`G{fM&JPo_7~i5am&`DlUKGZrU?)iM>~dapgy1K0zo<;zq=&sumUdr?$?WN`zs3ObQpn^E zwFp351%>QRuAqU^z4KhObdM-w#(sj0imGVW&rv^j)3g4njn(K5!(ep_>^UNtgk8y zVaOrn7PSZ{iKC~(DqG#dYvMbm=za?@ICPq*HJRV8iSs>IOKwv`BW8#@z4$1&=)Hit zbGmlmV84X>VKW4NPr)|hy}+YoGbM)sP7UOzX+)K7)ru_V6{lFByX=HdUNMR?Jy+$| z{HR%eg9$R~e^?qgD0Ri1Vi*genz7a9>83{exvx%KpFtd700nW6l6O-Y6w9bfs5qCE zmUwV$T&Awc;Qov{RD*Mxpg(ZQ?D#?4c{liRz(VMT zDw*p~;a(RII*0bts?(qSI+vmTG8Z_w9`(MwJed%YLp3PT+}8WyH>D{~<%m3LcH6|U z0=;5uySnGk>xtQ@ERlxcPQ}lsHvyD;7tvAE&Tr}Z8ihN{Zn($Z7y2q>ZrD#qTjdHb zC_c!S#t)1`ek6j}#q z6|d09?VDV2Vqb^9 z3*Hf>eT?YG0xbfft|IQaC)4-aUr1Bv&%ayp?VjvP*=V z5jh=k?Oc=Pz*r#pf*(}Wl*xA!bjGXB>@AOZKFv+Ch>okIxFxo>0}_em(7%8FZj`l; ztoHapcx2if@NH(fp!@@u7-nov#<9>-p3{0i`>Ce=dcR4@5Rp21Ue5+1EqRYZ1%W-f zM>^jJDn(vac-+u6zE`brs#g>cu-2vKWMTJ(7L+nalwPlB2iVoypXP)tkDkaqK?e}3 zbWPkV6&t}Bvm>__mD7X)o`C91a~*(=ZW_7!=A4%Z+nSD0uuTA=d>$9z3x4m!hP~R}E6Q7H;>RnK1 z*A)t+>`Pkyd3}0G$lZ=TN-ql62-@?A7o(dpHj&?QR-8QI$b*yi(SE@~JKhIYI#ee< zMgt$Vgi^v~#7nb9+d9rVyjA!_KM53B>GN;N8rFSU)NY~eIpG>e`gufz>hy&r9~-S= zXV`{YP?vn%(?+<;ci>4~ql-OlX(Q4uaViS;`hjHKBV=sp#?w&oXB|_j)7dQB z#DjuaV!JSCn1>eV&YDN*Rh)TEkoYZa{t*y&fydNT-!5dv0xeC^j4iz@tIk?+ z22gP0ED~3GH@|?X;*1!K_Wa0gY#>9R@p;D^A#{)8oJ7?dPBaw8{f}nVf0ATkMqk2Q ziT2Pl5_V`EM#?<5R!&8e?6y#v0fxf0@{;cZYyw4bo<1bqOXyW1X#Wy(j*31q*2D6~Hd#2MJsH_D5T>RQRhbeKG2 zeCAXX<7Z!PgU}dymNELy`_i?<)h2oI!N%kPn@fb%Akh+PYwFQj7hor|hAH~F!x3Qb z`MNUU2Y+M1Ze6xKdE;mwg*ouyld(_4>=M-4v~+{+;z%NU#2;D|E?kI9qj^zX0g2w!_;}bGzk@^a?m+(F@du{GL{=jz}0KwX!~*3XoR32fwU4b`}<8v!D>Vb z-}~90fQ#j=6BI9k?ko%6iYEaKsv(EI1R0Yrh!a_2#wX`7#QFJ76VUJr>0L_gNe?K9 zTws}5bU0-Si53~r_PHmUlm3;ADf$)uqaw3A(a*jHE+f4~Kyw2wKWs@FBZJ%$P`BBY zMra_4&au)`#b%WK?ukfzYh^-UP{n33hl?s71E%KHgQ!#>vdgl1#BG4EkMhws$7j6o3)_xXur)Y74 zeEQigZ-bVlix`1oSd;48zUhhABP@%=WtG*=%>_~rl5(45EeZ%?L;=^x(b7}x$xW0L z2~&au;S;FhP0t1tjw1ntWwB8klt7}FJuxowplSD@iK+bhGN?56_CGgpVJVR2{sXjq zMZ-B}EhBl6i_U zQsOsUCY6{W;1!l$G71OpBe2f^(m=Zfu71)9L@?71vswR^J}p-P3gHkEpL{MNaVwE1 zp+S@1hrjAnxdrhm^o`4AD)3($v*~3~IFpwIXgQ0i-nM_$RJ^6uv>3@X2%8C8F3>+o?Tu^+BA3yLU@ElIC>?>r}Cu^-3qA`Kcrc`4@OO6@$QXe}XC<%y69O zwKoUSeN+(!G4$y@v)sEtMgS4&56JnyWtGqIw6Bi|sIg+a;bK zF#)rMAiY&j^H7H4vI5u4$QM|6hnwY%JeIWZ$(w3oc3iw(Sl`z(pLhUO>2SeLm7l9T z1<12P-{jjve%#Qn{1t7GePFasD?@7IW5>}`PtIDH@NSkb=3v>dJU$d_^lhE7 z@h>)_CQ4P~11QR^dP7x@BMFwaW^~gUrL+hF;9@g{+9KU2Q93P}vN_V{zr#i;?Q1diGjtx3Y!Z-a&Ltt%b17KrOJ`2SN z+lameVK4)yIVj?&L2t+|0)bz0xz9YT7A-s6U4D5^?ke8LRo6a=4+s7@L>mp(TkVT zOKBniP!@Dy&tpb51|$2@)Oby|EkBZC^b&yzMnGVG^mH*I-i^j=m;rPnWI^@n_u?)# z)$U5Nn3AUPVvvXyx8=GY;#d6 z_iypf(UYEK2w=#%NI6O$9&d;tk7R>#Le{wcR|HP)MZ7&B z3DC7c{5ioFUu;(TzCJx-gyCEG#;@)^CH4RZ^?^GWsws7(9=A;0G)S$3LfoQDK##lFf(hOcy)u1|X|L^Rk&)?s_Y_-SRn^UBRa7e$o>~ zgQDMSO1_I#VJ(DEDaeD%{|ek=V?=o8X@NkKD!cOMyb8h8>Ysa_K{JbXZlCQHYs$b5 zKoO#FTX12?j$+eYpPb2N$@Y`~z#t6n34ZrIP{J&A=?uF&x6s&3`Fj8)in)1r$C6eE znxi>*YZax-j@0u$_q$u(Aa;ohq~5pB5Bti!8Q|U&)94)Rnj?9!x#C6uvI!TBh##PgkNa2hzlsI0 z8xX7wfbHR*v*Z2v!x+f=3v|38Y`81tSH~caZ+e(U%2ylQ@6ccJhg!jWy*-~AHVal9 z3)P1;{&yU%+(Z}allre{@F(<~kh-SendU`(e2NQaw$RO?Rf7MwM;}DM)P=_tAy7<@ zTnp8>E$=f8WO(dM`Kkso`Ao(XIKf%6tBTASgEJT$8};2avDv5!Gw`a$=iuu1IP zt~z*s85;SF<2cu;C`Nzr<^`2gXlHuy23R$fpNSrbqW2CDi7>dX>uW&Ue8BccX5nhL zse&!|lP}LOseRTnZB} zOxA4$m1i4V0OYcs!oAzjjXwtz$qAh2>yVG%pa7>fa;DXkCYg@)5JM)kOe>6u=`-4a zf^-6eA&6gnqipOmF}~CQQc3E=7J#g=Sf9O3EzT}&4#xvC4T89Os z$nig^+E1& zQzGe>7MXjh(zX$b&1&wEGbU#pYD_mGCr1Kkc}W6|w;Dn)^04a#e<_<%V`~qpF(Vrm zPqQvEtm<+Xj&C*93Lmu|x-^{%S#zSWaHM~`EciUDW+rZOWPrKoP;xRX>tf$5Uc=83 zml3}^KRLo$9O&Otn*z>k+7}jlh0zOP($XJlW+;PqRy3<3r72+wwMwY;OVs$0>cWR- zO#UiWMH*AyqjvM>n$DU3Fdz0_&Uc50^8Bp`k8c)46aC-Aj1R#pE5=9(493t2mHGkQ zXv|b`uZ6`>H&x8+?&hSre-~%Gho3*QsggC?r5Pu)BYoIQaW}NeS|cn85~}(mgKv(0Jt8#oE+c zT^2#1Ry#V=yyLV&4E0afpJlDBKB+GJaM6Uv9{eD3ZyC)som&&Qey3lt%=nCpt$Cy3 zO1qlV8Z}KDF4EV__@$7^--}g|wUqG}+yv{g;hVUJ>A7%0gIkmzbx?ubtE;ZzsZujx z#(fXQCI2jqnN?ANn~kLU$=Jeojah$|D;z=iWAr`Nyq~{kzLUb=Ngr?-kaQ##!MCuh zFem-aC%pBNlHZ7yE&F#~uHdw!<*;JFM7so#B4QcD^Q+)*?OVrf!X0T zA9efPBCg36Uj+&sd|Y%hn@Lfty?+T5LtZw3A^o>Ru^;3)JLVCN9J?8TeVF#Z;H>X?%owb0Cw%k?Wv4 z0U=QaJ@jGvv}Opw&~Sc=en}!6dtv3bJ~%mPtEkk&#&6(Q@?mQOY=n%S?iB+=UDPba z6l(*2m=*p;(7p&MoaUuSW_ZO05-?|$f^9BlikbePC|7!C0A(w68>*?L_cLLM%9!hj z#Z&eXhWJ+f1Ab(M9EK79$#Slw1QZY5WQ!|VHw+Y1DGf$i#CJ!WqEII?KzF+?nN&FT z=Fm`%*XmLX&0)8g#P+G}y6XHk^=R&NMA4PV-8g(p~>YADlW<)d-N5mK-*B z!4*P~RLKKth+$S(p0gXhpzT=hI>k%DLjyF6{MD?6>Ir*#8XlnQeP(YGA``W@CGO0c z{*Q-Uk~Jr1iF%YF26p(T*Q^VkIqlA9xz3~}Vx5i3USF^u4dyrb>r)l^kXERQ4S^EA z;=Hg>((ta4vqd)Dij#8{#aGC=)l_)yt(6o3(=i#p?0A{_GI*Xi74D4ks&<51iT1F#3OS$=_f$= z%_&ExXtmos+8IFebmZr&J6C-@ADgYHMww^jjh1|LKUQJ^-JEKR|GQoiAl=J6ElZ>bBMh_4JlKWCrjg>`T|1(o=*8Z$M=mviT=iQ)$+ zPt#9ab!7b!T+jV)~QRcA-sgsjIq>5mpq;0@hK=1^1qQJ|iUg zkaMk!%(>vlt-o_(<2T9Jm;JbZP(KnQAv?SHr2q9%-*NUB-s1;-X!M=`DbRMxrLMDsZICgSOCC^&OR zr|^eZQ^_hxiz1e^&vWc!7m5(uzg`ZgxjWaGyekHc{qzI_W#I>g-8b(;Fy_PR1#i?V zT`5Pkc*1|V=UW-|odx{=U;)unucUP@m!9GkT@Zk*ISIX4Y?>V5V=KZ_uHJnbf{9Gf zDA0fZJ#6>Z>XJEiQIoJdc0SzC!Lf*?7oBgC_P)wCib;EnS>k-rUKrFgVPnCmwKW+L z6S(qM4%#LiP}9S@Y>BtPVvK#nCe;FXH&e~zRcEwA)j!Xb941)pfJVy8s#^nfnFn$k zP39@JHK#Y$7f(7chUG>~s;#g{{4(EtQ`dIZCY(1s;LGLjyw=8+q3ZjIK9~gWv9+nA zfdI;{YTr?|5pzcz^{Q64DrLvjw(`_g1$-=;sMZ=Lp_MhwikBFwBfpG11w~Ov()MPd z#bd_ji(;#2!?Uedf7%%5i_4+{7;7TG7)Cz*x@WilWcO9xjmp&7`p7TuWK&t+RMw27 zDqb(S?=;8i!&p{`?O~u0e!$MHR;-k}f7Hlp{h4+~r%93bdg15u^EWExhsV1_R={Y3 zuN04YH}?yB`G?MqZ$Ei7J<~f|q~z3vQqZb" to role fivetran_datahub; + +// Grant access to view database and schema in which your log and metadata tables exist +grant usage on DATABASE "" to role fivetran_datahub; +grant usage on SCHEMA ""."" to role fivetran_datahub; + +// Grant access to execute select query on schema in which your log and metadata tables exist +grant select on all tables in SCHEMA ""."" to role fivetran_datahub; + +// Grant the fivetran_datahub to the snowflake user. +grant role fivetran_datahub to user snowflake_user; +``` + +## Advanced Configurations + +### Working with Platform Instances +If you've multiple instances of source/destination systems that are referred in your `fivetran` setup, you'd need to configure platform instance for these systems in `fivetran` recipe to generate correct lineage edges. Refer the document [Working with Platform Instances](https://datahubproject.io/docs/platform-instances) to understand more about this. + +While configuration of platform instance for source system you need to provide connector id as key and for destination system provide destination id as key. + +#### Example - Multiple Postgres Source Connectors each reading from different postgres instance +```yml + # Map of connector source to platform instance + sources_to_platform_instance: + postgres_connector_id1: + platform_instance: cloud_postgres_instance + env: PROD + + postgres_connector_id2: + platform_instance: local_postgres_instance + env: DEV +``` + +#### Example - Multiple Snowflake Destinations each writing to different snowflake instance +```yml + # Map of destination to platform instance + destination_to_platform_instance: + snowflake_destination_id1: + platform_instance: prod_snowflake_instance + env: PROD + + snowflake_destination_id2: + platform_instance: dev_snowflake_instance + env: PROD +``` + + + diff --git a/metadata-ingestion/docs/sources/fivetran/fivetran_recipe.yml b/metadata-ingestion/docs/sources/fivetran/fivetran_recipe.yml new file mode 100644 index 0000000000000..7c654df59723c --- /dev/null +++ b/metadata-ingestion/docs/sources/fivetran/fivetran_recipe.yml @@ -0,0 +1,43 @@ +source: + type: fivetran + config: + # Fivetran log connector destination server configurations + fivetran_log_config: + destination_platform: snowflake + destination_config: + # Coordinates + account_id: "abc48144" + warehouse: "COMPUTE_WH" + database: "MY_SNOWFLAKE_DB" + log_schema: "FIVETRAN_LOG" + + # Credentials + username: "${SNOWFLAKE_USER}" + password: "${SNOWFLAKE_PASS}" + role: "snowflake_role" + + # Optional - filter for certain connector names instead of ingesting everything. + # connector_patterns: + # allow: + # - connector_name + + # Optional -- A mapping of the connector's all sources to its database. + # sources_to_database: + # connector_id: source_db + + # Optional -- This mapping is optional and only required to configure platform-instance for source + # A mapping of Fivetran connector id to data platform instance + # sources_to_platform_instance: + # connector_id: + # platform_instance: cloud_instance + # env: DEV + + # Optional -- This mapping is optional and only required to configure platform-instance for destination. + # A mapping of Fivetran destination id to data platform instance + # destination_to_platform_instance: + # destination_id: + # platform_instance: cloud_instance + # env: DEV + +sink: + # sink configs diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index afce8dcee840b..2392fce058061 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -395,6 +395,7 @@ "powerbi-report-server": powerbi_report_server, "vertica": sql_common | {"vertica-sqlalchemy-dialect[vertica-python]==0.0.8.1"}, "unity-catalog": databricks | sqllineage_lib, + "fivetran": snowflake_common, } # This is mainly used to exclude plugins from the Docker image. @@ -525,6 +526,7 @@ "nifi", "vertica", "mode", + "fivetran", "kafka-connect", ] if plugin @@ -629,6 +631,7 @@ "unity-catalog = datahub.ingestion.source.unity.source:UnityCatalogSource", "gcs = datahub.ingestion.source.gcs.gcs_source:GCSSource", "sql-queries = datahub.ingestion.source.sql_queries:SqlQueriesSource", + "fivetran = datahub.ingestion.source.fivetran.fivetran:FivetranSource", ], "datahub.ingestion.transformer.plugins": [ "simple_remove_dataset_ownership = datahub.ingestion.transformer.remove_dataset_ownership:SimpleRemoveDatasetOwnership", diff --git a/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py b/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py index 0face6415bacc..6c42e830e223b 100644 --- a/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py +++ b/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py @@ -100,7 +100,9 @@ def generate_tags_aspect(self) -> Iterable[GlobalTagsClass]: ) return [tags] - def generate_mcp(self) -> Iterable[MetadataChangeProposalWrapper]: + def generate_mcp( + self, materialize_iolets: bool = True + ) -> Iterable[MetadataChangeProposalWrapper]: mcp = MetadataChangeProposalWrapper( entityUrn=str(self.urn), aspect=DataJobInfoClass( @@ -113,7 +115,9 @@ def generate_mcp(self) -> Iterable[MetadataChangeProposalWrapper]: ) yield mcp - yield from self.generate_data_input_output_mcp() + yield from self.generate_data_input_output_mcp( + materialize_iolets=materialize_iolets + ) for owner in self.generate_ownership_aspect(): mcp = MetadataChangeProposalWrapper( @@ -144,7 +148,9 @@ def emit( for mcp in self.generate_mcp(): emitter.emit(mcp, callback) - def generate_data_input_output_mcp(self) -> Iterable[MetadataChangeProposalWrapper]: + def generate_data_input_output_mcp( + self, materialize_iolets: bool + ) -> Iterable[MetadataChangeProposalWrapper]: mcp = MetadataChangeProposalWrapper( entityUrn=str(self.urn), aspect=DataJobInputOutputClass( @@ -157,10 +163,9 @@ def generate_data_input_output_mcp(self) -> Iterable[MetadataChangeProposalWrapp yield mcp # Force entity materialization - for iolet in self.inlets + self.outlets: - mcp = MetadataChangeProposalWrapper( - entityUrn=str(iolet), - aspect=StatusClass(removed=False), - ) - - yield mcp + if materialize_iolets: + for iolet in self.inlets + self.outlets: + yield MetadataChangeProposalWrapper( + entityUrn=str(iolet), + aspect=StatusClass(removed=False), + ) diff --git a/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py b/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py index cf6080c7072e6..2f07e4a112f93 100644 --- a/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py +++ b/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py @@ -220,12 +220,10 @@ def emit_process_end( self._emit_mcp(mcp, emitter, callback) def generate_mcp( - self, created_ts_millis: Optional[int] = None + self, created_ts_millis: Optional[int] = None, materialize_iolets: bool = True ) -> Iterable[MetadataChangeProposalWrapper]: - """ - Generates mcps from the object - :rtype: Iterable[MetadataChangeProposalWrapper] - """ + """Generates mcps from the object""" + mcp = MetadataChangeProposalWrapper( entityUrn=str(self.urn), aspect=DataProcessInstanceProperties( @@ -253,7 +251,7 @@ def generate_mcp( ) yield mcp - yield from self.generate_inlet_outlet_mcp() + yield from self.generate_inlet_outlet_mcp(materialize_iolets=materialize_iolets) @staticmethod def _emit_mcp( @@ -329,7 +327,9 @@ def from_dataflow(dataflow: DataFlow, id: str) -> "DataProcessInstance": dpi._template_object = dataflow return dpi - def generate_inlet_outlet_mcp(self) -> Iterable[MetadataChangeProposalWrapper]: + def generate_inlet_outlet_mcp( + self, materialize_iolets: bool + ) -> Iterable[MetadataChangeProposalWrapper]: if self.inlets: mcp = MetadataChangeProposalWrapper( entityUrn=str(self.urn), @@ -349,10 +349,9 @@ def generate_inlet_outlet_mcp(self) -> Iterable[MetadataChangeProposalWrapper]: yield mcp # Force entity materialization - for iolet in self.inlets + self.outlets: - mcp = MetadataChangeProposalWrapper( - entityUrn=str(iolet), - aspect=StatusClass(removed=False), - ) - - yield mcp + if materialize_iolets: + for iolet in self.inlets + self.outlets: + yield MetadataChangeProposalWrapper( + entityUrn=str(iolet), + aspect=StatusClass(removed=False), + ) diff --git a/metadata-ingestion/src/datahub/emitter/mcp.py b/metadata-ingestion/src/datahub/emitter/mcp.py index 9085ac152ea0b..d6aa695665e4e 100644 --- a/metadata-ingestion/src/datahub/emitter/mcp.py +++ b/metadata-ingestion/src/datahub/emitter/mcp.py @@ -240,7 +240,7 @@ def from_obj_require_wrapper( return mcp def as_workunit( - self, *, treat_errors_as_warnings: bool = False + self, *, treat_errors_as_warnings: bool = False, is_primary_source: bool = True ) -> "MetadataWorkUnit": from datahub.ingestion.api.workunit import MetadataWorkUnit @@ -254,10 +254,12 @@ def as_workunit( id=f"{self.entityUrn}-{self.aspectName}-{ts}", mcp=self, treat_errors_as_warnings=treat_errors_as_warnings, + is_primary_source=is_primary_source, ) return MetadataWorkUnit( id=f"{self.entityUrn}-{self.aspectName}", mcp=self, treat_errors_as_warnings=treat_errors_as_warnings, + is_primary_source=is_primary_source, ) diff --git a/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py b/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py index 2ce9e07bc57bc..fae260226195c 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py +++ b/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py @@ -17,6 +17,7 @@ from datahub.configuration.time_window_config import BaseTimeWindowConfig from datahub.emitter.mce_builder import make_dataplatform_instance_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.emitter.mcp_builder import entity_supports_aspect from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.metadata.schema_classes import ( BrowsePathEntryClass, @@ -64,9 +65,9 @@ def auto_status_aspect( """ For all entities that don't have a status aspect, add one with removed set to false. """ - all_urns: Set[str] = set() status_urns: Set[str] = set() + skip_urns: Set[str] = set() for wu in stream: urn = wu.get_urn() all_urns.add(urn) @@ -89,9 +90,17 @@ def auto_status_aspect( else: raise ValueError(f"Unexpected type {type(wu.metadata)}") + if not isinstance( + wu.metadata, MetadataChangeEventClass + ) and not entity_supports_aspect(wu.metadata.entityType, StatusClass): + # If any entity does not support aspect 'status' then skip that entity from adding status aspect. + # Example like dataProcessInstance doesn't suppport status aspect. + # If not skipped gives error: java.lang.RuntimeException: Unknown aspect status for entity dataProcessInstance + skip_urns.add(urn) + yield wu - for urn in sorted(all_urns - status_urns): + for urn in sorted(all_urns - status_urns - skip_urns): yield MetadataChangeProposalWrapper( entityUrn=urn, aspect=StatusClass(removed=False), diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/__init__.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/config.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/config.py new file mode 100644 index 0000000000000..b0843182c5cac --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/fivetran/config.py @@ -0,0 +1,145 @@ +import logging +from dataclasses import dataclass, field as dataclass_field +from typing import Dict, List, Optional + +import pydantic +from pydantic import Field, root_validator + +from datahub.configuration.common import AllowDenyPattern, ConfigModel +from datahub.configuration.source_common import DEFAULT_ENV, DatasetSourceConfigMixin +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StaleEntityRemovalSourceReport, + StatefulStaleMetadataRemovalConfig, +) +from datahub.ingestion.source.state.stateful_ingestion_base import ( + StatefulIngestionConfigBase, +) +from datahub.ingestion.source_config.sql.snowflake import BaseSnowflakeConfig + +logger = logging.getLogger(__name__) + + +class Constant: + """ + keys used in fivetran plugin + """ + + ORCHESTRATOR = "fivetran" + # table column name + SOURCE_SCHEMA_NAME = "source_schema_name" + SOURCE_TABLE_NAME = "source_table_name" + SOURCE_TABLE_ID = "source_table_id" + SOURCE_COLUMN_NAME = "source_column_name" + DESTINATION_SCHEMA_NAME = "destination_schema_name" + DESTINATION_TABLE_NAME = "destination_table_name" + DESTINATION_TABLE_ID = "destination_table_id" + DESTINATION_COLUMN_NAME = "destination_column_name" + SYNC_ID = "sync_id" + MESSAGE_DATA = "message_data" + TIME_STAMP = "time_stamp" + STATUS = "status" + USER_ID = "user_id" + GIVEN_NAME = "given_name" + FAMILY_NAME = "family_name" + CONNECTOR_ID = "connector_id" + CONNECTOR_NAME = "connector_name" + CONNECTOR_TYPE_ID = "connector_type_id" + PAUSED = "paused" + SYNC_FREQUENCY = "sync_frequency" + DESTINATION_ID = "destination_id" + CONNECTING_USER_ID = "connecting_user_id" + # Job status constants + SUCCESSFUL = "SUCCESSFUL" + FAILURE_WITH_TASK = "FAILURE_WITH_TASK" + CANCELED = "CANCELED" + + +KNOWN_DATA_PLATFORM_MAPPING = { + "postgres": "postgres", + "snowflake": "snowflake", +} + + +class DestinationConfig(BaseSnowflakeConfig): + database: str = Field(description="The fivetran connector log database.") + log_schema: str = Field(description="The fivetran connector log schema.") + + +class FivetranLogConfig(ConfigModel): + destination_platform: str = pydantic.Field( + default="snowflake", + description="The destination platform where fivetran connector log tables are dumped.", + ) + destination_config: Optional[DestinationConfig] = pydantic.Field( + default=None, + description="If destination platform is 'snowflake', provide snowflake configuration.", + ) + + @root_validator(pre=True) + def validate_destination_platfrom_and_config(cls, values: Dict) -> Dict: + destination_platform = values["destination_platform"] + if destination_platform == "snowflake": + if "destination_config" not in values: + raise ValueError( + "If destination platform is 'snowflake', user must provide snowflake destination configuration in the recipe." + ) + else: + raise ValueError( + f"Destination platform '{destination_platform}' is not yet supported." + ) + return values + + +@dataclass +class FivetranSourceReport(StaleEntityRemovalSourceReport): + connectors_scanned: int = 0 + filtered_connectors: List[str] = dataclass_field(default_factory=list) + + def report_connectors_scanned(self, count: int = 1) -> None: + self.connectors_scanned += count + + def report_connectors_dropped(self, model: str) -> None: + self.filtered_connectors.append(model) + + +class PlatformDetail(ConfigModel): + platform_instance: Optional[str] = pydantic.Field( + default=None, + description="The instance of the platform that all assets produced by this recipe belong to", + ) + env: str = pydantic.Field( + default=DEFAULT_ENV, + description="The environment that all assets produced by DataHub platform ingestion source belong to", + ) + + +class FivetranSourceConfig(StatefulIngestionConfigBase, DatasetSourceConfigMixin): + fivetran_log_config: FivetranLogConfig = pydantic.Field( + description="Fivetran log connector destination server configurations.", + ) + connector_patterns: AllowDenyPattern = Field( + default=AllowDenyPattern.allow_all(), + description="Regex patterns for connectors to filter in ingestion.", + ) + include_column_lineage: bool = Field( + default=True, + description="Populates table->table column lineage.", + ) + sources_to_database: Dict[str, str] = pydantic.Field( + default={}, + description="A mapping of the connector's all sources to its database. Use connector id as key.", + ) + # Configuration for stateful ingestion + stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = pydantic.Field( + default=None, description="Airbyte Stateful Ingestion Config." + ) + # Fivetran connector all sources to platform instance mapping + sources_to_platform_instance: Dict[str, PlatformDetail] = pydantic.Field( + default={}, + description="A mapping of the connector's all sources dataset to platform instance. Use connector id as key.", + ) + # Fivetran destination to platform instance mapping + destination_to_platform_instance: Dict[str, PlatformDetail] = pydantic.Field( + default={}, + description="A mapping of destination dataset to platform instance. Use destination id as key.", + ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/data_classes.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/data_classes.py new file mode 100644 index 0000000000000..82bb5f3467c2a --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/fivetran/data_classes.py @@ -0,0 +1,36 @@ +from dataclasses import dataclass +from typing import List + + +@dataclass +class ColumnLineage: + source_column: str + destination_column: str + + +@dataclass +class TableLineage: + source_table: str + destination_table: str + column_lineage: List[ColumnLineage] + + +@dataclass +class Connector: + connector_id: str + connector_name: str + connector_type: str + paused: bool + sync_frequency: int + destination_id: str + user_name: str + table_lineage: List[TableLineage] + jobs: List["Job"] + + +@dataclass +class Job: + job_id: str + start_time: int + end_time: int + status: str diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py new file mode 100644 index 0000000000000..c0395b4e4e796 --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py @@ -0,0 +1,289 @@ +import logging +from typing import Dict, Iterable, List, Optional + +import datahub.emitter.mce_builder as builder +from datahub.api.entities.datajob import DataFlow, DataJob +from datahub.api.entities.dataprocess.dataprocess_instance import ( + DataProcessInstance, + InstanceRunResult, +) +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.api.decorators import ( + SourceCapability, + SupportStatus, + capability, + config_class, + platform_name, + support_status, +) +from datahub.ingestion.api.source import MetadataWorkUnitProcessor, Source, SourceReport +from datahub.ingestion.api.workunit import MetadataWorkUnit +from datahub.ingestion.source.fivetran.config import ( + KNOWN_DATA_PLATFORM_MAPPING, + Constant, + FivetranSourceConfig, + FivetranSourceReport, + PlatformDetail, +) +from datahub.ingestion.source.fivetran.data_classes import Connector, Job +from datahub.ingestion.source.fivetran.fivetran_log_api import FivetranLogAPI +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StaleEntityRemovalHandler, +) +from datahub.ingestion.source.state.stateful_ingestion_base import ( + StatefulIngestionSourceBase, +) +from datahub.metadata.com.linkedin.pegasus2avro.dataset import ( + FineGrainedLineage, + FineGrainedLineageDownstreamType, + FineGrainedLineageUpstreamType, +) +from datahub.metadata.schema_classes import StatusClass +from datahub.utilities.urns.data_flow_urn import DataFlowUrn +from datahub.utilities.urns.dataset_urn import DatasetUrn + +# Logger instance +logger = logging.getLogger(__name__) + + +@platform_name("Fivetran") +@config_class(FivetranSourceConfig) +@support_status(SupportStatus.INCUBATING) +@capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default") +@capability( + SourceCapability.LINEAGE_FINE, + "Enabled by default, can be disabled via configuration `include_column_lineage`", +) +class FivetranSource(StatefulIngestionSourceBase): + """ + This plugin extracts fivetran users, connectors, destinations and sync history. + This plugin is in beta and has only been tested on Snowflake connector. + """ + + config: FivetranSourceConfig + report: FivetranSourceReport + platform: str = "fivetran" + + def __init__(self, config: FivetranSourceConfig, ctx: PipelineContext): + super(FivetranSource, self).__init__(config, ctx) + self.config = config + self.report = FivetranSourceReport() + + self.audit_log = FivetranLogAPI(self.config.fivetran_log_config) + + # Create and register the stateful ingestion use-case handler. + self.stale_entity_removal_handler = StaleEntityRemovalHandler.create( + self, self.config, self.ctx + ) + + def _extend_lineage(self, connector: Connector, datajob: DataJob) -> None: + input_dataset_urn_list: List[DatasetUrn] = [] + output_dataset_urn_list: List[DatasetUrn] = [] + fine_grained_lineage: List[FineGrainedLineage] = [] + + source_platform_detail: PlatformDetail = PlatformDetail() + destination_platform_detail: PlatformDetail = PlatformDetail() + # Get platform details for connector source + source_platform_detail = self.config.sources_to_platform_instance.get( + connector.connector_id, PlatformDetail() + ) + + # Get platform details for destination + destination_platform_detail = self.config.destination_to_platform_instance.get( + connector.destination_id, PlatformDetail() + ) + + # Get database for connector source + # TODO: Once Fivetran exposes this, we shouldn't ask for it via config. + source_database: Optional[str] = self.config.sources_to_database.get( + connector.connector_id + ) + + if connector.connector_type in KNOWN_DATA_PLATFORM_MAPPING: + source_platform = KNOWN_DATA_PLATFORM_MAPPING[connector.connector_type] + else: + source_platform = connector.connector_type + logger.info( + f"Fivetran connector source type: {connector.connector_type} is not supported to mapped with Datahub dataset entity." + ) + + for table_lineage in connector.table_lineage: + input_dataset_urn = DatasetUrn.create_from_ids( + platform_id=source_platform, + table_name=f"{source_database.lower()}.{table_lineage.source_table}" + if source_database + else table_lineage.source_table, + env=source_platform_detail.env, + platform_instance=source_platform_detail.platform_instance, + ) + input_dataset_urn_list.append(input_dataset_urn) + + output_dataset_urn: Optional[DatasetUrn] = None + if self.audit_log.fivetran_log_database: + output_dataset_urn = DatasetUrn.create_from_ids( + platform_id=self.config.fivetran_log_config.destination_platform, + table_name=f"{self.audit_log.fivetran_log_database.lower()}.{table_lineage.destination_table}", + env=destination_platform_detail.env, + platform_instance=destination_platform_detail.platform_instance, + ) + output_dataset_urn_list.append(output_dataset_urn) + + if self.config.include_column_lineage: + for column_lineage in table_lineage.column_lineage: + fine_grained_lineage.append( + FineGrainedLineage( + upstreamType=FineGrainedLineageUpstreamType.FIELD_SET, + upstreams=[ + builder.make_schema_field_urn( + str(input_dataset_urn), + column_lineage.source_column, + ) + ] + if input_dataset_urn + else [], + downstreamType=FineGrainedLineageDownstreamType.FIELD, + downstreams=[ + builder.make_schema_field_urn( + str(output_dataset_urn), + column_lineage.destination_column, + ) + ] + if output_dataset_urn + else [], + ) + ) + + datajob.inlets.extend(input_dataset_urn_list) + datajob.outlets.extend(output_dataset_urn_list) + datajob.fine_grained_lineages.extend(fine_grained_lineage) + return None + + def _generate_dataflow_from_connector(self, connector: Connector) -> DataFlow: + return DataFlow( + orchestrator=Constant.ORCHESTRATOR, + id=connector.connector_id, + env=self.config.env, + name=connector.connector_name, + platform_instance=self.config.platform_instance, + ) + + def _generate_datajob_from_connector(self, connector: Connector) -> DataJob: + dataflow_urn = DataFlowUrn.create_from_ids( + orchestrator=Constant.ORCHESTRATOR, + flow_id=connector.connector_id, + env=self.config.env, + platform_instance=self.config.platform_instance, + ) + datajob = DataJob( + id=connector.connector_id, + flow_urn=dataflow_urn, + name=connector.connector_name, + owners={connector.user_name}, + ) + + job_property_bag: Dict[str, str] = {} + allowed_connection_keys = [ + Constant.PAUSED, + Constant.SYNC_FREQUENCY, + Constant.DESTINATION_ID, + ] + for key in allowed_connection_keys: + if hasattr(connector, key) and getattr(connector, key) is not None: + job_property_bag[key] = repr(getattr(connector, key)) + datajob.properties = job_property_bag + + # Map connector source and destination table with dataset entity + # Also extend the fine grained lineage of column if include_column_lineage is True + self._extend_lineage(connector=connector, datajob=datajob) + + # TODO: Add fine grained lineages of dataset after FineGrainedLineageDownstreamType.DATASET enabled + + return datajob + + def _generate_dpi_from_job(self, job: Job, datajob: DataJob) -> DataProcessInstance: + return DataProcessInstance.from_datajob( + datajob=datajob, + id=job.job_id, + clone_inlets=True, + clone_outlets=True, + ) + + def _get_dpi_workunits( + self, job: Job, dpi: DataProcessInstance + ) -> Iterable[MetadataWorkUnit]: + status_result_map: Dict[str, InstanceRunResult] = { + Constant.SUCCESSFUL: InstanceRunResult.SUCCESS, + Constant.FAILURE_WITH_TASK: InstanceRunResult.FAILURE, + Constant.CANCELED: InstanceRunResult.SKIPPED, + } + if job.status not in status_result_map: + logger.debug( + f"Status should be either SUCCESSFUL, FAILURE_WITH_TASK or CANCELED and it was " + f"{job.status}" + ) + return [] + result = status_result_map[job.status] + start_timestamp_millis = job.start_time * 1000 + for mcp in dpi.generate_mcp( + created_ts_millis=start_timestamp_millis, materialize_iolets=False + ): + yield mcp.as_workunit() + for mcp in dpi.start_event_mcp(start_timestamp_millis): + yield mcp.as_workunit() + for mcp in dpi.end_event_mcp( + end_timestamp_millis=job.end_time * 1000, + result=result, + result_type=Constant.ORCHESTRATOR, + ): + yield mcp.as_workunit() + + def _get_connector_workunits( + self, connector: Connector + ) -> Iterable[MetadataWorkUnit]: + self.report.report_connectors_scanned() + # Create dataflow entity with same name as connector name + dataflow = self._generate_dataflow_from_connector(connector) + for mcp in dataflow.generate_mcp(): + yield mcp.as_workunit() + + # Map Fivetran's connector entity with Datahub's datajob entity + datajob = self._generate_datajob_from_connector(connector) + for mcp in datajob.generate_mcp(materialize_iolets=True): + if mcp.entityType == "dataset" and isinstance(mcp.aspect, StatusClass): + # While we "materialize" the referenced datasets, we don't want them + # to be tracked by stateful ingestion. + yield mcp.as_workunit(is_primary_source=False) + else: + yield mcp.as_workunit() + + # Map Fivetran's job/sync history entity with Datahub's data process entity + for job in connector.jobs: + dpi = self._generate_dpi_from_job(job, datajob) + yield from self._get_dpi_workunits(job, dpi) + + @classmethod + def create(cls, config_dict: dict, ctx: PipelineContext) -> Source: + config = FivetranSourceConfig.parse_obj(config_dict) + return cls(config, ctx) + + def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: + return [ + *super().get_workunit_processors(), + self.stale_entity_removal_handler.workunit_processor, + ] + + def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: + """ + Datahub Ingestion framework invoke this method + """ + logger.info("Fivetran plugin execution is started") + connectors = self.audit_log.get_connectors_list() + for connector in connectors: + if not self.config.connector_patterns.allowed(connector.connector_name): + self.report.report_connectors_dropped(connector.connector_name) + continue + logger.info(f"Processing connector id: {connector.connector_id}") + yield from self._get_connector_workunits(connector) + + def get_report(self) -> SourceReport: + return self.report diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_log_api.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_log_api.py new file mode 100644 index 0000000000000..d5d146559d918 --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_log_api.py @@ -0,0 +1,147 @@ +import json +import logging +from typing import Any, Dict, List, Optional + +from sqlalchemy import create_engine + +from datahub.ingestion.source.fivetran.config import Constant, FivetranLogConfig +from datahub.ingestion.source.fivetran.data_classes import ( + ColumnLineage, + Connector, + Job, + TableLineage, +) +from datahub.ingestion.source.fivetran.fivetran_query import FivetranLogQuery + +logger: logging.Logger = logging.getLogger(__name__) + + +class FivetranLogAPI: + def __init__(self, fivetran_log_config: FivetranLogConfig) -> None: + self.fivetran_log_database: Optional[str] = None + self.fivetran_log_config = fivetran_log_config + self.engine = self._get_log_destination_engine() + + def _get_log_destination_engine(self) -> Any: + destination_platform = self.fivetran_log_config.destination_platform + engine = None + # For every destination, create sqlalchemy engine, + # select the database and schema and set fivetran_log_database class variable + if destination_platform == "snowflake": + snowflake_destination_config = self.fivetran_log_config.destination_config + if snowflake_destination_config is not None: + engine = create_engine( + snowflake_destination_config.get_sql_alchemy_url(), + **snowflake_destination_config.get_options(), + ) + engine.execute( + FivetranLogQuery.use_schema( + snowflake_destination_config.database, + snowflake_destination_config.log_schema, + ) + ) + self.fivetran_log_database = snowflake_destination_config.database + return engine + + def _query(self, query: str) -> List[Dict]: + logger.debug("Query : {}".format(query)) + resp = self.engine.execute(query) + return [row for row in resp] + + def _get_table_lineage(self, connector_id: str) -> List[TableLineage]: + table_lineage_result = self._query( + FivetranLogQuery.get_table_lineage_query(connector_id=connector_id) + ) + table_lineage_list: List[TableLineage] = [] + for table_lineage in table_lineage_result: + column_lineage_result = self._query( + FivetranLogQuery.get_column_lineage_query( + source_table_id=table_lineage[Constant.SOURCE_TABLE_ID], + destination_table_id=table_lineage[Constant.DESTINATION_TABLE_ID], + ) + ) + column_lineage_list: List[ColumnLineage] = [ + ColumnLineage( + source_column=column_lineage[Constant.SOURCE_COLUMN_NAME], + destination_column=column_lineage[Constant.DESTINATION_COLUMN_NAME], + ) + for column_lineage in column_lineage_result + ] + table_lineage_list.append( + TableLineage( + source_table=f"{table_lineage[Constant.SOURCE_SCHEMA_NAME]}.{table_lineage[Constant.SOURCE_TABLE_NAME]}", + destination_table=f"{table_lineage[Constant.DESTINATION_SCHEMA_NAME]}.{table_lineage[Constant.DESTINATION_TABLE_NAME]}", + column_lineage=column_lineage_list, + ) + ) + + return table_lineage_list + + def _get_jobs_list(self, connector_id: str) -> List[Job]: + jobs: List[Job] = [] + sync_start_logs = { + row[Constant.SYNC_ID]: row + for row in self._query( + FivetranLogQuery.get_sync_start_logs_query(connector_id=connector_id) + ) + } + sync_end_logs = { + row[Constant.SYNC_ID]: row + for row in self._query( + FivetranLogQuery.get_sync_end_logs_query(connector_id=connector_id) + ) + } + for sync_id in sync_start_logs.keys(): + if sync_end_logs.get(sync_id) is None: + # If no sync-end event log for this sync id that means sync is still in progress + continue + + message_data = json.loads(sync_end_logs[sync_id][Constant.MESSAGE_DATA]) + if isinstance(message_data, str): + # Sometimes message_data contains json string inside string + # Ex: '"{\"status\":\"SUCCESSFUL\"}"' + # Hence, need to do json loads twice. + message_data = json.loads(message_data) + + jobs.append( + Job( + job_id=sync_id, + start_time=round( + sync_start_logs[sync_id][Constant.TIME_STAMP].timestamp() + ), + end_time=round( + sync_end_logs[sync_id][Constant.TIME_STAMP].timestamp() + ), + status=message_data[Constant.STATUS], + ) + ) + return jobs + + def _get_user_name(self, user_id: str) -> str: + user_details = self._query(FivetranLogQuery.get_user_query(user_id=user_id))[0] + return ( + f"{user_details[Constant.GIVEN_NAME]} {user_details[Constant.FAMILY_NAME]}" + ) + + def get_connectors_list(self) -> List[Connector]: + connectors: List[Connector] = [] + connector_list = self._query(FivetranLogQuery.get_connectors_query()) + for connector in connector_list: + connectors.append( + Connector( + connector_id=connector[Constant.CONNECTOR_ID], + connector_name=connector[Constant.CONNECTOR_NAME], + connector_type=connector[Constant.CONNECTOR_TYPE_ID], + paused=connector[Constant.PAUSED], + sync_frequency=connector[Constant.SYNC_FREQUENCY], + destination_id=connector[Constant.DESTINATION_ID], + user_name=self._get_user_name( + connector[Constant.CONNECTING_USER_ID] + ), + table_lineage=self._get_table_lineage( + connector[Constant.CONNECTOR_ID] + ), + jobs=self._get_jobs_list(connector[Constant.CONNECTOR_ID]), + ) + ) + return connectors diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_query.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_query.py new file mode 100644 index 0000000000000..4f52fcd5d884f --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_query.py @@ -0,0 +1,76 @@ +class FivetranLogQuery: + @staticmethod + def use_schema(db_name: str, schema_name: str) -> str: + return f'use schema "{db_name}"."{schema_name}"' + + @staticmethod + def get_connectors_query() -> str: + return """ + SELECT connector_id as "CONNECTOR_ID", + connecting_user_id as "CONNECTING_USER_ID", + connector_type_id as "CONNECTOR_TYPE_ID", + connector_name as "CONNECTOR_NAME", + paused as "PAUSED", + sync_frequency as "SYNC_FREQUENCY", + destination_id as "DESTINATION_ID" + FROM CONNECTOR + WHERE _fivetran_deleted = FALSE""" + + @staticmethod + def get_user_query(user_id: str) -> str: + return f""" + SELECT id as "USER_ID", + given_name as "GIVEN_NAME", + family_name as "FAMILY_NAME" + FROM USER + WHERE id = '{user_id}'""" + + @staticmethod + def get_sync_start_logs_query( + connector_id: str, + ) -> str: + return f""" + SELECT time_stamp as "TIME_STAMP", + sync_id as "SYNC_ID" + FROM LOG + WHERE message_event = 'sync_start' + and connector_id = '{connector_id}' order by time_stamp""" + + @staticmethod + def get_sync_end_logs_query(connector_id: str) -> str: + return f""" + SELECT time_stamp as "TIME_STAMP", + sync_id as "SYNC_ID", + message_data as "MESSAGE_DATA" + FROM LOG + WHERE message_event = 'sync_end' + and connector_id = '{connector_id}' order by time_stamp""" + + @staticmethod + def get_table_lineage_query(connector_id: str) -> str: + return f""" + SELECT stm.id as "SOURCE_TABLE_ID", + stm.name as "SOURCE_TABLE_NAME", + ssm.name as "SOURCE_SCHEMA_NAME", + dtm.id as "DESTINATION_TABLE_ID", + dtm.name as "DESTINATION_TABLE_NAME", + dsm.name as "DESTINATION_SCHEMA_NAME" + FROM table_lineage as tl + JOIN source_table_metadata as stm on tl.source_table_id = stm.id + JOIN destination_table_metadata as dtm on tl.destination_table_id = dtm.id + JOIN source_schema_metadata as ssm on stm.schema_id = ssm.id + JOIN destination_schema_metadata as dsm on dtm.schema_id = dsm.id + WHERE stm.connector_id = '{connector_id}'""" + + @staticmethod + def get_column_lineage_query( + source_table_id: str, destination_table_id: str + ) -> str: + return f""" + SELECT scm.name as "SOURCE_COLUMN_NAME", + dcm.name as "DESTINATION_COLUMN_NAME" + FROM column_lineage as cl + JOIN source_column_metadata as scm on + (cl.source_column_id = scm.id and scm.table_id = {source_table_id}) + JOIN destination_column_metadata as dcm on + (cl.destination_column_id = dcm.id and dcm.table_id = {destination_table_id})""" diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py index c3e8c175f1de5..9fc697018ecd6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py @@ -12,7 +12,7 @@ OAUTH_AUTHENTICATOR, ) -from datahub.configuration.common import AllowDenyPattern +from datahub.configuration.common import AllowDenyPattern, ConfigModel from datahub.configuration.oauth import OAuthConfiguration, OAuthIdentityProvider from datahub.configuration.time_window_config import BaseTimeWindowConfig from datahub.configuration.validate_field_rename import pydantic_renamed_field @@ -42,9 +42,14 @@ SNOWFLAKE_HOST_SUFFIX = ".snowflakecomputing.com" -class BaseSnowflakeConfig(BaseTimeWindowConfig): +class BaseSnowflakeConfig(ConfigModel): # Note: this config model is also used by the snowflake-usage source. + options: dict = pydantic.Field( + default_factory=dict, + description="Any options specified here will be passed to [SQLAlchemy.create_engine](https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine) as kwargs.", + ) + scheme: str = "snowflake" username: Optional[str] = pydantic.Field( default=None, description="Snowflake username." @@ -82,14 +87,6 @@ class BaseSnowflakeConfig(BaseTimeWindowConfig): default=None, description="Snowflake warehouse." ) role: Optional[str] = pydantic.Field(default=None, description="Snowflake role.") - include_table_lineage: bool = pydantic.Field( - default=True, - description="If enabled, populates the snowflake table-to-table and s3-to-snowflake table lineage. Requires appropriate grants given to the role and Snowflake Enterprise Edition or above.", - ) - include_view_lineage: bool = pydantic.Field( - default=True, - description="If enabled, populates the snowflake view->table and table->view lineages. Requires appropriate grants given to the role, and include_table_lineage to be True. view->table lineage requires Snowflake Enterprise Edition or above.", - ) connect_args: Optional[Dict[str, Any]] = pydantic.Field( default=None, description="Connect args to pass to Snowflake SqlAlchemy driver", @@ -166,18 +163,6 @@ def _check_oauth_config(oauth_config: Optional[OAuthConfiguration]) -> None: "but should be set when using use_certificate false for oauth_config" ) - @pydantic.root_validator() - def validate_include_view_lineage(cls, values): - if ( - "include_table_lineage" in values - and not values.get("include_table_lineage") - and values.get("include_view_lineage") - ): - raise ValueError( - "include_table_lineage must be True for include_view_lineage to be set." - ) - return values - def get_sql_alchemy_url( self, database: Optional[str] = None, @@ -261,28 +246,8 @@ def get_connect_args(self) -> dict: self._computed_connect_args = connect_args return connect_args - -class SnowflakeConfig(BaseSnowflakeConfig, SQLCommonConfig): - database_pattern: AllowDenyPattern = AllowDenyPattern( - deny=[r"^UTIL_DB$", r"^SNOWFLAKE$", r"^SNOWFLAKE_SAMPLE_DATA$"] - ) - - ignore_start_time_lineage: bool = False - upstream_lineage_in_report: bool = False - - def get_sql_alchemy_url( - self, - database: Optional[str] = None, - username: Optional[str] = None, - password: Optional[pydantic.SecretStr] = None, - role: Optional[str] = None, - ) -> str: - return super().get_sql_alchemy_url( - database=database, username=username, password=password, role=role - ) - def get_options(self) -> dict: - options_connect_args: Dict = super().get_connect_args() + options_connect_args: Dict = self.get_connect_args() options_connect_args.update(self.options.get("connect_args", {})) self.options["connect_args"] = options_connect_args return self.options @@ -372,3 +337,34 @@ def get_connection(self) -> snowflake.connector.SnowflakeConnection: else: # not expected to be here raise Exception("Not expected to be here.") + + +class SnowflakeConfig(BaseSnowflakeConfig, BaseTimeWindowConfig, SQLCommonConfig): + + include_table_lineage: bool = pydantic.Field( + default=True, + description="If enabled, populates the snowflake table-to-table and s3-to-snowflake table lineage. Requires appropriate grants given to the role and Snowflake Enterprise Edition or above.", + ) + include_view_lineage: bool = pydantic.Field( + default=True, + description="If enabled, populates the snowflake view->table and table->view lineages. Requires appropriate grants given to the role, and include_table_lineage to be True. view->table lineage requires Snowflake Enterprise Edition or above.", + ) + + database_pattern: AllowDenyPattern = AllowDenyPattern( + deny=[r"^UTIL_DB$", r"^SNOWFLAKE$", r"^SNOWFLAKE_SAMPLE_DATA$"] + ) + + ignore_start_time_lineage: bool = False + upstream_lineage_in_report: bool = False + + @pydantic.root_validator() + def validate_include_view_lineage(cls, values): + if ( + "include_table_lineage" in values + and not values.get("include_table_lineage") + and values.get("include_view_lineage") + ): + raise ValueError( + "include_table_lineage must be True for include_view_lineage to be set." + ) + return values diff --git a/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json b/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json new file mode 100644 index 0000000000000..a72c960a72296 --- /dev/null +++ b/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json @@ -0,0 +1,658 @@ +[ +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(fivetran,calendar_elected,PROD)", + "changeType": "UPSERT", + "aspectName": "dataFlowInfo", + "aspect": { + "json": { + "customProperties": {}, + "name": "postgres" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(fivetran,calendar_elected,PROD)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(fivetran,calendar_elected,PROD)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "dataJobInfo", + "aspect": { + "json": { + "customProperties": { + "paused": "False", + "sync_frequency": "1440", + "destination_id": "'interval_unconstitutional'" + }, + "name": "postgres", + "type": { + "string": "COMMAND" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "dataJobInputOutput", + "aspect": { + "json": { + "inputDatasets": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)" + ], + "outputDatasets": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)" + ], + "inputDatajobs": [], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV),id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD),id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV),name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD),name)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV),id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD),id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV),name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD),name)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:Shubham Jagtap", + "type": "DEVELOPER", + "source": { + "type": "SERVICE" + } + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceProperties", + "aspect": { + "json": { + "customProperties": {}, + "name": "4c9a03d6-eded-4422-a46a-163266e58243", + "type": "BATCH_SCHEDULED", + "created": { + "time": 1695191853000, + "actor": "urn:li:corpuser:datahub" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRelationships", + "aspect": { + "json": { + "parentTemplate": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "upstreamInstances": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceInput", + "aspect": { + "json": { + "inputs": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceOutput", + "aspect": { + "json": { + "outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1695191853000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "STARTED" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1695191885000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "COMPLETE", + "result": { + "type": "SUCCESS", + "nativeResultType": "fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceProperties", + "aspect": { + "json": { + "customProperties": {}, + "name": "f773d1e9-c791-48f4-894f-8cf9b3dfc834", + "type": "BATCH_SCHEDULED", + "created": { + "time": 1696343730000, + "actor": "urn:li:corpuser:datahub" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRelationships", + "aspect": { + "json": { + "parentTemplate": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "upstreamInstances": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceInput", + "aspect": { + "json": { + "inputs": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceOutput", + "aspect": { + "json": { + "outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1696343730000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "STARTED" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1696343732000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "COMPLETE", + "result": { + "type": "SKIPPED", + "nativeResultType": "fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceProperties", + "aspect": { + "json": { + "customProperties": {}, + "name": "63c2fc85-600b-455f-9ba0-f576522465be", + "type": "BATCH_SCHEDULED", + "created": { + "time": 1696343755000, + "actor": "urn:li:corpuser:datahub" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRelationships", + "aspect": { + "json": { + "parentTemplate": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "upstreamInstances": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceInput", + "aspect": { + "json": { + "inputs": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceOutput", + "aspect": { + "json": { + "outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1696343755000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "STARTED" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1696343790000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "COMPLETE", + "result": { + "type": "FAILURE", + "nativeResultType": "fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(fivetran,calendar_elected,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/fivetran/test_fivetran.py b/metadata-ingestion/tests/integration/fivetran/test_fivetran.py new file mode 100644 index 0000000000000..62b3df12e1b9d --- /dev/null +++ b/metadata-ingestion/tests/integration/fivetran/test_fivetran.py @@ -0,0 +1,192 @@ +import datetime +from unittest import mock +from unittest.mock import MagicMock + +import pytest +from freezegun import freeze_time + +from datahub.ingestion.run.pipeline import Pipeline +from datahub.ingestion.source.fivetran.config import DestinationConfig +from datahub.ingestion.source.fivetran.fivetran_query import FivetranLogQuery +from tests.test_helpers import mce_helpers + +FROZEN_TIME = "2022-06-07 17:00:00" + + +def default_query_results(query): + if query == FivetranLogQuery.use_schema("TEST_DATABASE", "TEST_SCHEMA"): + return [] + elif query == FivetranLogQuery.get_connectors_query(): + return [ + { + "connector_id": "calendar_elected", + "connecting_user_id": "reapply_phone", + "connector_type_id": "postgres", + "connector_name": "postgres", + "paused": False, + "sync_frequency": 1440, + "destination_id": "interval_unconstitutional", + }, + ] + elif query == FivetranLogQuery.get_table_lineage_query("calendar_elected"): + return [ + { + "source_table_id": "10040", + "source_table_name": "employee", + "source_schema_name": "public", + "destination_table_id": "7779", + "destination_table_name": "employee", + "destination_schema_name": "postgres_public", + }, + { + "source_table_id": "10041", + "source_table_name": "company", + "source_schema_name": "public", + "destination_table_id": "7780", + "destination_table_name": "company", + "destination_schema_name": "postgres_public", + }, + ] + elif query == FivetranLogQuery.get_column_lineage_query( + "10040", "7779" + ) or query == FivetranLogQuery.get_column_lineage_query("10041", "7780"): + return [ + { + "source_column_name": "id", + "destination_column_name": "id", + }, + { + "source_column_name": "name", + "destination_column_name": "name", + }, + ] + elif query == FivetranLogQuery.get_user_query("reapply_phone"): + return [ + { + "user_id": "reapply_phone", + "given_name": "Shubham", + "family_name": "Jagtap", + } + ] + elif query == FivetranLogQuery.get_sync_start_logs_query("calendar_elected"): + return [ + { + "time_stamp": datetime.datetime(2023, 9, 20, 6, 37, 32, 606000), + "sync_id": "4c9a03d6-eded-4422-a46a-163266e58243", + }, + { + "time_stamp": datetime.datetime(2023, 10, 3, 14, 35, 30, 345000), + "sync_id": "f773d1e9-c791-48f4-894f-8cf9b3dfc834", + }, + { + "time_stamp": datetime.datetime(2023, 10, 3, 14, 35, 55, 401000), + "sync_id": "63c2fc85-600b-455f-9ba0-f576522465be", + }, + ] + elif query == FivetranLogQuery.get_sync_end_logs_query("calendar_elected"): + return [ + { + "time_stamp": datetime.datetime(2023, 9, 20, 6, 38, 5, 56000), + "sync_id": "4c9a03d6-eded-4422-a46a-163266e58243", + "message_data": '"{\\"status\\":\\"SUCCESSFUL\\"}"', + }, + { + "time_stamp": datetime.datetime(2023, 10, 3, 14, 35, 31, 512000), + "sync_id": "f773d1e9-c791-48f4-894f-8cf9b3dfc834", + "message_data": '"{\\"reason\\":\\"Sync has been cancelled because of a user action in the dashboard.Standard Config updated.\\",\\"status\\":\\"CANCELED\\"}"', + }, + { + "time_stamp": datetime.datetime(2023, 10, 3, 14, 36, 29, 678000), + "sync_id": "63c2fc85-600b-455f-9ba0-f576522465be", + "message_data": '"{\\"reason\\":\\"java.lang.RuntimeException: FATAL: too many connections for role \\\\\\"hxwraqld\\\\\\"\\",\\"taskType\\":\\"reconnect\\",\\"status\\":\\"FAILURE_WITH_TASK\\"}"', + }, + ] + # Unreachable code + raise Exception(f"Unknown query {query}") + + +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_fivetran_basic(pytestconfig, tmp_path): + test_resources_dir = pytestconfig.rootpath / "tests/integration/fivetran" + + # Run the metadata ingestion pipeline. + output_file = tmp_path / "fivetran_test_events.json" + golden_file = test_resources_dir / "fivetran_golden.json" + + with mock.patch( + "datahub.ingestion.source.fivetran.fivetran_log_api.create_engine" + ) as mock_create_engine: + connection_magic_mock = MagicMock() + connection_magic_mock.execute.side_effect = default_query_results + + mock_create_engine.return_value = connection_magic_mock + + pipeline = Pipeline.create( + { + "run_id": "powerbi-test", + "source": { + "type": "fivetran", + "config": { + "fivetran_log_config": { + "destination_platform": "snowflake", + "destination_config": { + "account_id": "TESTID", + "warehouse": "TEST_WH", + "username": "test", + "password": "test@123", + "database": "TEST_DATABASE", + "role": "TESTROLE", + "log_schema": "TEST_SCHEMA", + }, + }, + "connector_patterns": { + "allow": [ + "postgres", + ] + }, + "sources_to_database": { + "calendar_elected": "postgres_db", + }, + "sources_to_platform_instance": { + "calendar_elected": { + "env": "DEV", + } + }, + }, + }, + "sink": { + "type": "file", + "config": { + "filename": f"{output_file}", + }, + }, + } + ) + + pipeline.run() + pipeline.raise_from_status() + golden_file = "fivetran_golden.json" + + mce_helpers.check_golden_file( + pytestconfig, + output_path=f"{output_file}", + golden_path=f"{test_resources_dir}/{golden_file}", + ) + + +@freeze_time(FROZEN_TIME) +def test_fivetran_snowflake_destination_config(pytestconfig, tmp_path): + snowflake_dest = DestinationConfig( + account_id="TESTID", + warehouse="TEST_WH", + username="test", + password="test@123", + database="TEST_DATABASE", + role="TESTROLE", + log_schema="TEST_SCHEMA", + ) + assert ( + snowflake_dest.get_sql_alchemy_url() + == "snowflake://test:test%40123@TESTID?application=acryl_datahub&authenticator=SNOWFLAKE&role=TESTROLE&warehouse=TEST_WH" + ) diff --git a/metadata-service/war/src/main/resources/boot/data_platforms.json b/metadata-service/war/src/main/resources/boot/data_platforms.json index 3d956c5774ded..3c70eda8561b8 100644 --- a/metadata-service/war/src/main/resources/boot/data_platforms.json +++ b/metadata-service/war/src/main/resources/boot/data_platforms.json @@ -564,5 +564,15 @@ "type": "KEY_VALUE_STORE", "logoUrl": "/assets/platforms/dynamodblogo.png" } + }, + { + "urn": "urn:li:dataPlatform:fivetran", + "aspect": { + "datasetNameDelimiter": ".", + "name": "fivetran", + "displayName": "Fivetran", + "type": "OTHERS", + "logoUrl": "/assets/platforms/fivetranlogo.png" + } } ] From 399e032dfa2b4bf87b7b406e7b009e34e99a1003 Mon Sep 17 00:00:00 2001 From: deepgarg-visa <149145061+deepgarg-visa@users.noreply.github.com> Date: Wed, 8 Nov 2023 22:32:13 +0530 Subject: [PATCH 072/792] feat(neo4j): Allow datahub to connect to specific neo4j database (#9179) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- docker/docker-compose.override.yml | 4 ++ .../docker-compose-m1.quickstart.yml | 54 +++++++++---------- .../quickstart/docker-compose.quickstart.yml | 54 +++++++++---------- .../src/main/resources/application.yml | 1 + .../common/Neo4jGraphServiceFactory.java | 7 ++- 5 files changed, 65 insertions(+), 55 deletions(-) diff --git a/docker/docker-compose.override.yml b/docker/docker-compose.override.yml index 225aa01fa4e4f..0907f47d70c3c 100644 --- a/docker/docker-compose.override.yml +++ b/docker/docker-compose.override.yml @@ -7,8 +7,12 @@ services: environment: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} + - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins + datahub-upgrade: + environment: + - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} mysql-setup: container_name: mysql-setup hostname: mysql-setup diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index c96baf37551b2..613718306abef 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -81,32 +81,32 @@ services: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-gms - - EBEAN_DATASOURCE_USERNAME=datahub - - EBEAN_DATASOURCE_PASSWORD=datahub + - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver - EBEAN_DATASOURCE_HOST=mysql:3306 + - EBEAN_DATASOURCE_PASSWORD=datahub - EBEAN_DATASOURCE_URL=jdbc:mysql://mysql:3306/datahub?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8&enabledTLSProtocols=TLSv1.2 - - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver - - KAFKA_BOOTSTRAP_SERVER=broker:29092 - - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 + - EBEAN_DATASOURCE_USERNAME=datahub - ELASTICSEARCH_HOST=elasticsearch - - ELASTICSEARCH_PORT=9200 - - ES_BULK_REFRESH_POLICY=WAIT_UNTIL - - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true - ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX=true - - NEO4J_HOST=http://neo4j:7474 - - NEO4J_URI=bolt://neo4j - - NEO4J_USERNAME=neo4j - - NEO4J_PASSWORD=datahub - - JAVA_OPTS=-Xms1g -Xmx1g - - GRAPH_SERVICE_DIFF_MODE_ENABLED=true - - GRAPH_SERVICE_IMPL=neo4j + - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true + - ELASTICSEARCH_PORT=9200 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - ENTITY_SERVICE_ENABLE_RETENTION=true + - ES_BULK_REFRESH_POLICY=WAIT_UNTIL + - GRAPH_SERVICE_DIFF_MODE_ENABLED=true + - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} + - JAVA_OPTS=-Xms1g -Xmx1g + - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true + - METADATA_SERVICE_AUTH_ENABLED=false + - NEO4J_HOST=http://neo4j:7474 + - NEO4J_PASSWORD=datahub + - NEO4J_URI=bolt://neo4j + - NEO4J_USERNAME=neo4j - PE_CONSUMER_ENABLED=true - UI_INGESTION_ENABLED=true - - METADATA_SERVICE_AUTH_ENABLED=false healthcheck: interval: 1s retries: 3 @@ -134,23 +134,23 @@ services: neo4j: condition: service_healthy environment: - - EBEAN_DATASOURCE_USERNAME=datahub - - EBEAN_DATASOURCE_PASSWORD=datahub + - BACKFILL_BROWSE_PATHS_V2=true + - DATAHUB_GMS_HOST=datahub-gms + - DATAHUB_GMS_PORT=8080 + - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver - EBEAN_DATASOURCE_HOST=mysql:3306 + - EBEAN_DATASOURCE_PASSWORD=datahub - EBEAN_DATASOURCE_URL=jdbc:mysql://mysql:3306/datahub?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8 - - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver - - KAFKA_BOOTSTRAP_SERVER=broker:29092 - - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 + - EBEAN_DATASOURCE_USERNAME=datahub + - ELASTICSEARCH_BUILD_INDICES_CLONE_INDICES=false - ELASTICSEARCH_HOST=elasticsearch - - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX=true - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true - - ELASTICSEARCH_BUILD_INDICES_CLONE_INDICES=false - - GRAPH_SERVICE_IMPL=elasticsearch - - DATAHUB_GMS_HOST=datahub-gms - - DATAHUB_GMS_PORT=8080 + - ELASTICSEARCH_PORT=9200 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - - BACKFILL_BROWSE_PATHS_V2=true + - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} + - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - REPROCESS_DEFAULT_BROWSE_PATHS_V2=false hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index 8a66521cbb522..30ccbae59be74 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -81,32 +81,32 @@ services: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-gms - - EBEAN_DATASOURCE_USERNAME=datahub - - EBEAN_DATASOURCE_PASSWORD=datahub + - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver - EBEAN_DATASOURCE_HOST=mysql:3306 + - EBEAN_DATASOURCE_PASSWORD=datahub - EBEAN_DATASOURCE_URL=jdbc:mysql://mysql:3306/datahub?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8&enabledTLSProtocols=TLSv1.2 - - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver - - KAFKA_BOOTSTRAP_SERVER=broker:29092 - - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 + - EBEAN_DATASOURCE_USERNAME=datahub - ELASTICSEARCH_HOST=elasticsearch - - ELASTICSEARCH_PORT=9200 - - ES_BULK_REFRESH_POLICY=WAIT_UNTIL - - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true - ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX=true - - NEO4J_HOST=http://neo4j:7474 - - NEO4J_URI=bolt://neo4j - - NEO4J_USERNAME=neo4j - - NEO4J_PASSWORD=datahub - - JAVA_OPTS=-Xms1g -Xmx1g - - GRAPH_SERVICE_DIFF_MODE_ENABLED=true - - GRAPH_SERVICE_IMPL=neo4j + - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true + - ELASTICSEARCH_PORT=9200 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - ENTITY_SERVICE_ENABLE_RETENTION=true + - ES_BULK_REFRESH_POLICY=WAIT_UNTIL + - GRAPH_SERVICE_DIFF_MODE_ENABLED=true + - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} + - JAVA_OPTS=-Xms1g -Xmx1g + - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true + - METADATA_SERVICE_AUTH_ENABLED=false + - NEO4J_HOST=http://neo4j:7474 + - NEO4J_PASSWORD=datahub + - NEO4J_URI=bolt://neo4j + - NEO4J_USERNAME=neo4j - PE_CONSUMER_ENABLED=true - UI_INGESTION_ENABLED=true - - METADATA_SERVICE_AUTH_ENABLED=false healthcheck: interval: 1s retries: 3 @@ -134,23 +134,23 @@ services: neo4j: condition: service_healthy environment: - - EBEAN_DATASOURCE_USERNAME=datahub - - EBEAN_DATASOURCE_PASSWORD=datahub + - BACKFILL_BROWSE_PATHS_V2=true + - DATAHUB_GMS_HOST=datahub-gms + - DATAHUB_GMS_PORT=8080 + - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver - EBEAN_DATASOURCE_HOST=mysql:3306 + - EBEAN_DATASOURCE_PASSWORD=datahub - EBEAN_DATASOURCE_URL=jdbc:mysql://mysql:3306/datahub?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8 - - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver - - KAFKA_BOOTSTRAP_SERVER=broker:29092 - - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 + - EBEAN_DATASOURCE_USERNAME=datahub + - ELASTICSEARCH_BUILD_INDICES_CLONE_INDICES=false - ELASTICSEARCH_HOST=elasticsearch - - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX=true - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true - - ELASTICSEARCH_BUILD_INDICES_CLONE_INDICES=false - - GRAPH_SERVICE_IMPL=elasticsearch - - DATAHUB_GMS_HOST=datahub-gms - - DATAHUB_GMS_PORT=8080 + - ELASTICSEARCH_PORT=9200 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - - BACKFILL_BROWSE_PATHS_V2=true + - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} + - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - REPROCESS_DEFAULT_BROWSE_PATHS_V2=false hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index a06891699607b..46aa02d98572e 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -251,6 +251,7 @@ neo4j: username: ${NEO4J_USERNAME:neo4j} password: ${NEO4J_PASSWORD:datahub} uri: ${NEO4J_URI:bolt://localhost} + database: ${NEO4J_DATABASE:graph.db} maxConnectionPoolSize: ${NEO4J_MAX_CONNECTION_POOL_SIZE:100} maxConnectionAcquisitionTimeout: ${NEO4J_MAX_CONNECTION_ACQUISITION_TIMEOUT_IN_SECONDS:60} maxConnectionLifetimeInSeconds: ${NEO4j_MAX_CONNECTION_LIFETIME_IN_SECONDS:3600} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java index e62dfd50f897d..87670ce10f481 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java @@ -6,8 +6,10 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import javax.annotation.Nonnull; import org.neo4j.driver.Driver; +import org.neo4j.driver.SessionConfig; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -24,10 +26,13 @@ public class Neo4jGraphServiceFactory { @Qualifier("entityRegistry") private EntityRegistry entityRegistry; + @Value("${neo4j.database}") + private String neo4jDatabase; + @Bean(name = "neo4jGraphService") @Nonnull protected Neo4jGraphService getInstance() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - return new Neo4jGraphService(lineageRegistry, neo4jDriver); + return new Neo4jGraphService(lineageRegistry, neo4jDriver, SessionConfig.forDatabase(neo4jDatabase)); } } From 332d4afaab39e4b9e9ff73a48e3bfec9b21fe0b5 Mon Sep 17 00:00:00 2001 From: Gabe Lyons Date: Wed, 8 Nov 2023 10:22:09 -0800 Subject: [PATCH 073/792] feat(subtypes): support subtypes for charts in the UI (#9186) --- .../java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java | 4 ++++ datahub-graphql-core/src/main/resources/entity.graphql | 5 +++++ datahub-web-react/src/app/entity/chart/ChartEntity.tsx | 4 ++++ .../src/app/entity/chart/preview/ChartPreview.tsx | 5 ++++- datahub-web-react/src/graphql/chart.graphql | 3 +++ datahub-web-react/src/graphql/lineage.graphql | 3 +++ datahub-web-react/src/graphql/search.graphql | 6 ++++++ metadata-models/src/main/resources/entity-registry.yml | 1 + 8 files changed, 30 insertions(+), 1 deletion(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index b99f712034fe0..b0b26f073876c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1433,6 +1433,10 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("subTypes", new SubTypesResolver( + this.entityClient, + "chart", + "subTypes")) ); builder.type("ChartInfo", typeWiring -> typeWiring .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index b37a8f34fa056..035f756a10d55 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -5249,6 +5249,11 @@ type Chart implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Sub Types that this entity implements + """ + subTypes: SubTypes } """ diff --git a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx index 0f1b6dbf3d660..fc898dec9d93a 100644 --- a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx +++ b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx @@ -154,10 +154,12 @@ export class ChartEntity implements Entity { getOverridePropertiesFromEntity = (chart?: Chart | null): GenericEntityProperties => { // TODO: Get rid of this once we have correctly formed platform coming back. const name = chart?.properties?.name; + const subTypes = chart?.subTypes; const externalUrl = chart?.properties?.externalUrl; return { name, externalUrl, + entityTypeOverride: subTypes ? capitalizeFirstLetterOnly(subTypes.typeNames?.[0]) : '', }; }; @@ -187,6 +189,7 @@ export class ChartEntity implements Entity { return ( { type: EntityType.Chart, icon: entity?.platform?.properties?.logoUrl || undefined, platform: entity?.platform, + subtype: entity?.subTypes?.typeNames?.[0] || undefined, }; }; diff --git a/datahub-web-react/src/app/entity/chart/preview/ChartPreview.tsx b/datahub-web-react/src/app/entity/chart/preview/ChartPreview.tsx index 7d0fc143043e2..b7fbd63ee231e 100644 --- a/datahub-web-react/src/app/entity/chart/preview/ChartPreview.tsx +++ b/datahub-web-react/src/app/entity/chart/preview/ChartPreview.tsx @@ -15,6 +15,7 @@ import { EntityPath, } from '../../../../types.generated'; import DefaultPreviewCard from '../../../preview/DefaultPreviewCard'; +import { capitalizeFirstLetterOnly } from '../../../shared/textUtil'; import { useEntityRegistry } from '../../../useEntityRegistry'; import { IconStyleType } from '../../Entity'; import { ChartStatsSummary as ChartStatsSummaryView } from '../shared/ChartStatsSummary'; @@ -43,6 +44,7 @@ export const ChartPreview = ({ snippet, degree, paths, + subType, }: { urn: string; platform?: string; @@ -67,6 +69,7 @@ export const ChartPreview = ({ snippet?: React.ReactNode | null; degree?: number; paths?: EntityPath[]; + subType?: string | null; }): JSX.Element => { const entityRegistry = useEntityRegistry(); @@ -76,7 +79,7 @@ export const ChartPreview = ({ name={name || ''} urn={urn} description={description || ''} - type="Chart" + type={capitalizeFirstLetterOnly(subType) || 'Chart'} typeIcon={entityRegistry.getIcon(EntityType.Chart, 14, IconStyleType.ACCENT)} logoUrl={logoUrl || ''} platform={platform} diff --git a/datahub-web-react/src/graphql/chart.graphql b/datahub-web-react/src/graphql/chart.graphql index d4d3c3c918408..a4b430720fa3d 100644 --- a/datahub-web-react/src/graphql/chart.graphql +++ b/datahub-web-react/src/graphql/chart.graphql @@ -100,6 +100,9 @@ query getChart($urn: String!) { canEditLineage canEditEmbed } + subTypes { + typeNames + } } } diff --git a/datahub-web-react/src/graphql/lineage.graphql b/datahub-web-react/src/graphql/lineage.graphql index 52385dee8631a..8fdfb696e0894 100644 --- a/datahub-web-react/src/graphql/lineage.graphql +++ b/datahub-web-react/src/graphql/lineage.graphql @@ -165,6 +165,9 @@ fragment lineageNodeProperties on EntityWithRelationships { status { removed } + subTypes { + typeNames + } } ... on Dataset { name diff --git a/datahub-web-react/src/graphql/search.graphql b/datahub-web-react/src/graphql/search.graphql index 2297c2d0c1d07..876be12fd335b 100644 --- a/datahub-web-react/src/graphql/search.graphql +++ b/datahub-web-react/src/graphql/search.graphql @@ -105,6 +105,9 @@ fragment autoCompleteFields on Entity { parentContainers { ...parentContainersFields } + subTypes { + typeNames + } } ... on DataFlow { orchestrator @@ -550,6 +553,9 @@ fragment searchResultFields on Entity { } } } + subTypes { + typeNames + } } ... on DataFlow { flowId diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index 11d0f74305d7b..a5296d074093b 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -120,6 +120,7 @@ entities: - globalTags - glossaryTerms - browsePathsV2 + - subTypes - name: dashboard keyAspect: dashboardKey aspects: From 72135914109a241aa11ceaeb68b9ac56134e7e64 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Wed, 8 Nov 2023 14:36:33 -0500 Subject: [PATCH 074/792] feat(ui) Debounce auto-complete in search bar (#9205) --- datahub-web-react/src/app/home/HomePageHeader.tsx | 6 ++++-- datahub-web-react/src/app/search/SearchablePage.tsx | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/datahub-web-react/src/app/home/HomePageHeader.tsx b/datahub-web-react/src/app/home/HomePageHeader.tsx index 0052d54f562eb..c881109f6e419 100644 --- a/datahub-web-react/src/app/home/HomePageHeader.tsx +++ b/datahub-web-react/src/app/home/HomePageHeader.tsx @@ -1,6 +1,7 @@ import React, { useEffect, useMemo, useState } from 'react'; import { useHistory } from 'react-router'; import { Typography, Image, Row, Button, Tag } from 'antd'; +import { debounce } from 'lodash'; import styled, { useTheme } from 'styled-components/macro'; import { RightOutlined } from '@ant-design/icons'; import { ManageAccount } from '../shared/ManageAccount'; @@ -24,6 +25,7 @@ import { getAutoCompleteInputFromQuickFilter } from '../search/utils/filterUtils import { useUserContext } from '../context/useUserContext'; import AcrylDemoBanner from './AcrylDemoBanner'; import DemoButton from '../entity/shared/components/styled/DemoButton'; +import { HALF_SECOND_IN_MS } from '../entity/shared/tabs/Dataset/Queries/utils/constants'; const Background = styled.div` width: 100%; @@ -176,7 +178,7 @@ export const HomePageHeader = () => { }); }; - const onAutoComplete = (query: string) => { + const onAutoComplete = debounce((query: string) => { if (query && query.trim() !== '') { getAutoCompleteResultsForMultiple({ variables: { @@ -189,7 +191,7 @@ export const HomePageHeader = () => { }, }); } - }; + }, HALF_SECOND_IN_MS); const onClickExploreAll = () => { analytics.event({ diff --git a/datahub-web-react/src/app/search/SearchablePage.tsx b/datahub-web-react/src/app/search/SearchablePage.tsx index 489687050c749..9d02d85d3634c 100644 --- a/datahub-web-react/src/app/search/SearchablePage.tsx +++ b/datahub-web-react/src/app/search/SearchablePage.tsx @@ -1,5 +1,6 @@ import React, { useEffect, useState } from 'react'; import { useHistory, useLocation } from 'react-router'; +import { debounce } from 'lodash'; import * as QueryString from 'query-string'; import { useTheme } from 'styled-components'; import { SearchHeader } from './SearchHeader'; @@ -17,6 +18,7 @@ import { getAutoCompleteInputFromQuickFilter } from './utils/filterUtils'; import { useQuickFiltersContext } from '../../providers/QuickFiltersContext'; import { useUserContext } from '../context/useUserContext'; import { useSelectedSortOption } from './context/SearchContext'; +import { HALF_SECOND_IN_MS } from '../entity/shared/tabs/Dataset/Queries/utils/constants'; const styles = { children: { @@ -93,7 +95,7 @@ export const SearchablePage = ({ onSearch, onAutoComplete, children }: Props) => }); }; - const autoComplete = (query: string) => { + const autoComplete = debounce((query: string) => { if (query && query.trim() !== '') { getAutoCompleteResults({ variables: { @@ -105,7 +107,7 @@ export const SearchablePage = ({ onSearch, onAutoComplete, children }: Props) => }, }); } - }; + }, HALF_SECOND_IN_MS); // Load correct autocomplete results on initial page load. useEffect(() => { From 70692b44e995eab252a2344496141acdf6181908 Mon Sep 17 00:00:00 2001 From: Gabe Lyons Date: Wed, 8 Nov 2023 12:49:23 -0800 Subject: [PATCH 075/792] fix(lineage): magical lineage layout fix (#9187) --- .../src/app/lineage/utils/layoutTree.ts | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/datahub-web-react/src/app/lineage/utils/layoutTree.ts b/datahub-web-react/src/app/lineage/utils/layoutTree.ts index cc704007049c2..a972a62308f07 100644 --- a/datahub-web-react/src/app/lineage/utils/layoutTree.ts +++ b/datahub-web-react/src/app/lineage/utils/layoutTree.ts @@ -32,6 +32,21 @@ function getParentRelationship(direction: Direction, parent: VizNode | null, nod return directionRelationships?.find((r) => r?.entity?.urn === node?.urn); } +// this utility function is to help make sure layouts that contain many references to the same URN don't struggle laying out that URN. +function firstAppearanceIndices(arr) { + const seen = new Set(); // To track which strings have been seen + const result = [] as number[]; + + for (let i = 0; i < arr.length; i++) { + if (!seen.has(arr[i])) { + seen.add(arr[i]); // Add the string to the set + result.push(i); // Save the index + } + } + + return result; +} + function layoutNodesForOneDirection( data: NodeData, direction: Direction, @@ -54,12 +69,10 @@ function layoutNodesForOneDirection( while (nodesInCurrentLayer.length > 0) { // if we've already added a node to the viz higher up dont add it again const urnsToAddInCurrentLayer = Array.from(new Set(nodesInCurrentLayer.map(({ node }) => node.urn || ''))); - const nodesToAddInCurrentLayer = urnsToAddInCurrentLayer - .filter((urn, pos) => urnsToAddInCurrentLayer.indexOf(urn) === pos) - .filter((urn) => !nodesByUrn[urn || '']); + const positionsToAddInCurrentLayer = firstAppearanceIndices(urnsToAddInCurrentLayer); const filteredNodesInCurrentLayer = nodesInCurrentLayer - .filter(({ node }) => nodesToAddInCurrentLayer.indexOf(node.urn || '') > -1) + .filter((_, idx) => positionsToAddInCurrentLayer.indexOf(idx) > -1) .filter(({ node }) => node.status?.removed !== true); const layerSize = filteredNodesInCurrentLayer.length; From f87983d69dc62db5c58dc114f8796dcb9eb1cc95 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Wed, 8 Nov 2023 13:29:37 -0800 Subject: [PATCH 076/792] refactor(pdl): Refactoring Assertion model enums out (#9191) Co-authored-by: Harshal Sheth --- .../linkedin/assertion/AssertionResult.pdl | 19 +-------------- .../assertion/AssertionResultType.pdl | 23 +++++++++++++++++++ .../linkedin/assertion/AssertionRunEvent.pdl | 7 +----- .../linkedin/assertion/AssertionRunStatus.pdl | 12 ++++++++++ 4 files changed, 37 insertions(+), 24 deletions(-) create mode 100644 metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionResultType.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionRunStatus.pdl diff --git a/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionResult.pdl b/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionResult.pdl index ded84e1969153..935f3e5976dfa 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionResult.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionResult.pdl @@ -9,24 +9,7 @@ record AssertionResult { */ @TimeseriesField = {} @Searchable = {} - type: enum AssertionResultType { - /** - * The Assertion has not yet been fully evaluated - */ - INIT - /** - * The Assertion Succeeded - */ - SUCCESS - /** - * The Assertion Failed - */ - FAILURE - /** - * The Assertion encountered an Error - */ - ERROR - } + type: AssertionResultType /** * Number of rows for evaluated batch diff --git a/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionResultType.pdl b/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionResultType.pdl new file mode 100644 index 0000000000000..8954d94cced7b --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionResultType.pdl @@ -0,0 +1,23 @@ +namespace com.linkedin.assertion + +/** +* The final result of evaluating an assertion, e.g. SUCCESS, FAILURE, or ERROR. +*/ +enum AssertionResultType { + /** + * The Assertion has not yet been fully evaluated + */ + INIT + /** + * The Assertion Succeeded + */ + SUCCESS + /** + * The Assertion Failed + */ + FAILURE + /** + * The Assertion encountered an Error + */ + ERROR +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionRunEvent.pdl b/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionRunEvent.pdl index 14f1204232740..55bcae77273db 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionRunEvent.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionRunEvent.pdl @@ -39,12 +39,7 @@ record AssertionRunEvent { * The status of the assertion run as per this timeseries event. */ @TimeseriesField = {} - status: enum AssertionRunStatus { - /** - * The Assertion Run has completed - */ - COMPLETE - } + status: AssertionRunStatus /** * Results of assertion, present if the status is COMPLETE diff --git a/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionRunStatus.pdl b/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionRunStatus.pdl new file mode 100644 index 0000000000000..e4e17925ede82 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/assertion/AssertionRunStatus.pdl @@ -0,0 +1,12 @@ +namespace com.linkedin.assertion + + +/** +* The lifecycle status of an assertion run. +*/ +enum AssertionRunStatus { + /** + * The Assertion Run has completed + */ + COMPLETE +} \ No newline at end of file From f38c8087bb508a779d94d04967a9c449f6d93126 Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Wed, 8 Nov 2023 22:38:15 +0000 Subject: [PATCH 077/792] feat(auth): Add roles to policy engine validation logic (#9178) --- .../authorization/AuthorizedActors.java | 1 + .../authorization/AuthorizerChain.java | 5 + .../authorization/DataHubAuthorizer.java | 8 +- .../datahub/authorization/PolicyEngine.java | 43 +++----- .../authorization/DataHubAuthorizerTest.java | 97 ++++++++++++++++--- .../authorization/PolicyEngineTest.java | 54 ++++++++++- .../datahub/plugins/test/TestAuthorizer.java | 2 +- 7 files changed, 162 insertions(+), 48 deletions(-) diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java index aec99e1b1e57a..5a9990552bb34 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java @@ -15,6 +15,7 @@ public class AuthorizedActors { String privilege; List users; List groups; + List roles; boolean allUsers; boolean allGroups; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java index f8eca541e1efb..7e7a1de176f06 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java @@ -126,11 +126,16 @@ private AuthorizedActors mergeAuthorizedActors(@Nullable AuthorizedActors origin mergedGroups = new ArrayList<>(groups); } + Set roles = new HashSet<>(original.getRoles()); + roles.addAll(other.getRoles()); + List mergedRoles = new ArrayList<>(roles); + return AuthorizedActors.builder() .allUsers(original.isAllUsers() || other.isAllUsers()) .allGroups(original.isAllGroups() || other.isAllGroups()) .users(mergedUsers) .groups(mergedGroups) + .roles(mergedRoles) .build(); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index f8f99475de23e..956d635c7901a 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -133,6 +133,7 @@ public AuthorizedActors authorizedActors( final List authorizedUsers = new ArrayList<>(); final List authorizedGroups = new ArrayList<>(); + final List authorizedRoles = new ArrayList<>(); boolean allUsers = false; boolean allGroups = false; @@ -153,16 +154,17 @@ public AuthorizedActors authorizedActors( // Step 3: For each matching policy, add actors that are authorized. authorizedUsers.addAll(matchingActors.getUsers()); authorizedGroups.addAll(matchingActors.getGroups()); - if (matchingActors.allUsers()) { + authorizedRoles.addAll(matchingActors.getRoles()); + if (matchingActors.getAllUsers()) { allUsers = true; } - if (matchingActors.allGroups()) { + if (matchingActors.getAllGroups()) { allGroups = true; } } // Step 4: Return all authorized users and groups. - return new AuthorizedActors(privilege, authorizedUsers, authorizedGroups, allUsers, allGroups); + return new AuthorizedActors(privilege, authorizedUsers, authorizedGroups, authorizedRoles, allUsers, allGroups); } /** diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java index f8c017ea74e1f..da0ae26f2b1da 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java @@ -32,7 +32,10 @@ import java.util.stream.Stream; import javax.annotation.Nullable; +import lombok.AccessLevel; +import lombok.AllArgsConstructor; import lombok.RequiredArgsConstructor; +import lombok.Value; import lombok.extern.slf4j.Slf4j; import static com.linkedin.metadata.Constants.*; @@ -75,6 +78,7 @@ public PolicyActors getMatchingActors( final Optional resource) { final List users = new ArrayList<>(); final List groups = new ArrayList<>(); + final List roles = new ArrayList<>(); boolean allUsers = false; boolean allGroups = false; if (policyMatchesResource(policy, resource)) { @@ -96,6 +100,9 @@ public PolicyActors getMatchingActors( if (actorFilter.getGroups() != null) { groups.addAll(actorFilter.getGroups()); } + if (actorFilter.getRoles() != null) { + roles.addAll(actorFilter.getRoles()); + } // 2. Fetch Actors based on resource ownership. if (actorFilter.isResourceOwners() && resource.isPresent()) { @@ -104,7 +111,7 @@ public PolicyActors getMatchingActors( groups.addAll(groupOwners(owners)); } } - return new PolicyActors(users, groups, allUsers, allGroups); + return new PolicyActors(users, groups, roles, allUsers, allGroups); } private boolean isPolicyApplicable( @@ -438,34 +445,14 @@ public boolean isGranted() { /** * Class used to represent all valid users of a policy. */ + @Value + @AllArgsConstructor(access = AccessLevel.PUBLIC) public static class PolicyActors { - final List _users; - final List _groups; - final Boolean _allUsers; - final Boolean _allGroups; - - public PolicyActors(final List users, final List groups, final Boolean allUsers, final Boolean allGroups) { - _users = users; - _groups = groups; - _allUsers = allUsers; - _allGroups = allGroups; - } - - public List getUsers() { - return _users; - } - - public List getGroups() { - return _groups; - } - - public Boolean allUsers() { - return _allUsers; - } - - public Boolean allGroups() { - return _allGroups; - } + List users; + List groups; + List roles; + Boolean allUsers; + Boolean allGroups; } private List userOwners(final Set owners) { diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index babb1c5d00ee8..b0b206001209c 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -21,6 +21,7 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.RoleMembership; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.ScrollResult; import com.linkedin.metadata.search.SearchEntity; @@ -55,6 +56,7 @@ import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; +import static org.testng.Assert.assertFalse; public class DataHubAuthorizerTest { @@ -63,6 +65,7 @@ public class DataHubAuthorizerTest { private static final Urn PARENT_DOMAIN_URN = UrnUtils.getUrn("urn:li:domain:parent"); private static final Urn CHILD_DOMAIN_URN = UrnUtils.getUrn("urn:li:domain:child"); + private static final Urn USER_WITH_ADMIN_ROLE = UrnUtils.getUrn("urn:li:corpuser:user-with-admin"); private EntityClient _entityClient; private DataHubAuthorizer _dataHubAuthorizer; @@ -92,40 +95,56 @@ public void setupTest() throws Exception { final EnvelopedAspectMap childDomainPolicyAspectMap = new EnvelopedAspectMap(); childDomainPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(childDomainPolicy.data()))); + final Urn adminPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:4"); + final DataHubActorFilter actorFilter = new DataHubActorFilter(); + actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Admin")))); + final DataHubPolicyInfo adminPolicy = createDataHubPolicyInfoFor(true, ImmutableList.of("EDIT_USER_PROFILE"), null, actorFilter); + final EnvelopedAspectMap adminPolicyAspectMap = new EnvelopedAspectMap(); + adminPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(adminPolicy.data()))); + final ScrollResult policySearchResult1 = new ScrollResult() .setScrollId("1") - .setNumEntities(4) + .setNumEntities(5) .setEntities( new SearchEntityArray( ImmutableList.of(new SearchEntity().setEntity(activePolicyUrn)))); final ScrollResult policySearchResult2 = new ScrollResult() .setScrollId("2") - .setNumEntities(4) + .setNumEntities(5) .setEntities( new SearchEntityArray( ImmutableList.of(new SearchEntity().setEntity(inactivePolicyUrn)))); final ScrollResult policySearchResult3 = new ScrollResult() .setScrollId("3") - .setNumEntities(4) + .setNumEntities(5) .setEntities( new SearchEntityArray( ImmutableList.of(new SearchEntity().setEntity(parentDomainPolicyUrn)))); final ScrollResult policySearchResult4 = new ScrollResult() - .setNumEntities(4) + .setScrollId("4") + .setNumEntities(5) .setEntities( new SearchEntityArray( ImmutableList.of( new SearchEntity().setEntity(childDomainPolicyUrn)))); + final ScrollResult policySearchResult5 = new ScrollResult() + .setNumEntities(5) + .setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(adminPolicyUrn)))); + when(_entityClient.scrollAcrossEntities(eq(List.of("dataHubPolicy")), eq(""), isNull(), any(), isNull(), anyInt(), eq(new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipHighlighting(true).setSkipCache(true)), any())) .thenReturn(policySearchResult1) .thenReturn(policySearchResult2) .thenReturn(policySearchResult3) - .thenReturn(policySearchResult4); + .thenReturn(policySearchResult4) + .thenReturn(policySearchResult5); when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), any(), eq(null), any())).thenAnswer(args -> { Set inputUrns = args.getArgument(1); @@ -140,6 +159,8 @@ public void setupTest() throws Exception { return Map.of(parentDomainPolicyUrn, new EntityResponse().setUrn(parentDomainPolicyUrn).setAspects(parentDomainPolicyAspectMap)); case "urn:li:dataHubPolicy:3": return Map.of(childDomainPolicyUrn, new EntityResponse().setUrn(childDomainPolicyUrn).setAspects(childDomainPolicyAspectMap)); + case "urn:li:dataHubPolicy:4": + return Map.of(adminPolicyUrn, new EntityResponse().setUrn(adminPolicyUrn).setAspects(adminPolicyAspectMap)); default: throw new IllegalStateException(); } @@ -167,6 +188,10 @@ public void setupTest() throws Exception { when(_entityClient.batchGetV2(any(), eq(Collections.singleton(PARENT_DOMAIN_URN)), eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), any())) .thenReturn(createDomainPropertiesBatchResponse(null)); + // Mocks to reach role membership for a user urn + when(_entityClient.batchGetV2(any(), eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any()) + ).thenReturn(createUserRoleMembershipBatchResponse(USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + final Authentication systemAuthentication = new Authentication( new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), "" @@ -302,6 +327,32 @@ public void testAuthorizedActorsActivePolicy() throws Exception { )); } + @Test + public void testAuthorizedRoleActivePolicy() throws Exception { + final AuthorizedActors actors = + _dataHubAuthorizer.authorizedActors("EDIT_USER_PROFILE", // Should be inside the active policy. + Optional.of(new EntitySpec("dataset", "urn:li:dataset:1"))); + + assertFalse(actors.isAllUsers()); + assertFalse(actors.isAllGroups()); + assertEquals(new HashSet<>(actors.getUsers()), ImmutableSet.of()); + assertEquals(new HashSet<>(actors.getGroups()), ImmutableSet.of()); + assertEquals(new HashSet<>(actors.getRoles()), ImmutableSet.of(UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + } + + @Test + public void testAuthorizationBasedOnRoleIsAllowed() { + EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); + + AuthorizationRequest request = new AuthorizationRequest( + USER_WITH_ADMIN_ROLE.toString(), + "EDIT_USER_PROFILE", + Optional.of(resourceSpec) + ); + + assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); + } + @Test public void testAuthorizationOnDomainWithPrivilegeIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); @@ -342,13 +393,6 @@ public void testAuthorizationOnDomainWithoutPrivilegeIsDenied() { } private DataHubPolicyInfo createDataHubPolicyInfo(boolean active, List privileges, @Nullable final Urn domain) throws Exception { - final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo(); - dataHubPolicyInfo.setType(METADATA_POLICY_TYPE); - dataHubPolicyInfo.setState(active ? ACTIVE_POLICY_STATE : INACTIVE_POLICY_STATE); - dataHubPolicyInfo.setPrivileges(new StringArray(privileges)); - dataHubPolicyInfo.setDisplayName("My Test Display"); - dataHubPolicyInfo.setDescription("My test display!"); - dataHubPolicyInfo.setEditable(true); List users = ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2")); List groups = ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), Urn.createFromString("urn:li:corpGroup:group2")); @@ -359,6 +403,20 @@ private DataHubPolicyInfo createDataHubPolicyInfo(boolean active, List p actorFilter.setAllGroups(true); actorFilter.setUsers(new UrnArray(users)); actorFilter.setGroups(new UrnArray(groups)); + + return createDataHubPolicyInfoFor(active, privileges, domain, actorFilter); + } + + private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List privileges, + @Nullable final Urn domain, DataHubActorFilter actorFilter) throws Exception { + final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo(); + dataHubPolicyInfo.setType(METADATA_POLICY_TYPE); + dataHubPolicyInfo.setState(active ? ACTIVE_POLICY_STATE : INACTIVE_POLICY_STATE); + dataHubPolicyInfo.setPrivileges(new StringArray(privileges)); + dataHubPolicyInfo.setDisplayName("My Test Display"); + dataHubPolicyInfo.setDescription("My test display!"); + dataHubPolicyInfo.setEditable(true); + dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -429,6 +487,21 @@ private Map createDomainPropertiesBatchResponse(@Nullable f return batchResponse; } + private Map createUserRoleMembershipBatchResponse(final Urn userUrn, @Nullable final Urn roleUrn) { + final Map batchResponse = new HashMap<>(); + final EntityResponse response = new EntityResponse(); + EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + final RoleMembership membership = new RoleMembership(); + if (roleUrn != null) { + membership.setRoles(new UrnArray(roleUrn)); + } + aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect() + .setValue(new com.linkedin.entity.Aspect(membership.data()))); + response.setAspects(aspectMap); + batchResponse.put(userUrn, response); + return batchResponse; + } + private AuthorizerContext createAuthorizerContext(final Authentication systemAuthentication, final EntityClient entityClient) { return new AuthorizerContext(Collections.emptyMap(), new DefaultEntitySpecResolver(systemAuthentication, entityClient)); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java index be8c948f8ef89..2790c16ba75e6 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java @@ -1041,6 +1041,7 @@ public void testGetMatchingActorsResourceMatch() throws Exception { Urn.createFromString("urn:li:corpuser:user2")))); actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), Urn.createFromString("urn:li:corpGroup:group2")))); + actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:role:Admin")))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -1056,8 +1057,8 @@ public void testGetMatchingActorsResourceMatch() throws Exception { Collections.emptySet(), Collections.emptySet()); PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); - assertTrue(actors.allUsers()); - assertTrue(actors.allGroups()); + assertTrue(actors.getAllUsers()); + assertTrue(actors.getAllGroups()); assertEquals(actors.getUsers(), ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2"), @@ -1068,6 +1069,8 @@ public void testGetMatchingActorsResourceMatch() throws Exception { Urn.createFromString("urn:li:corpGroup:group2"), Urn.createFromString(AUTHORIZED_GROUP) // Resource Owner )); + assertEquals(actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:role:Admin"))); + // Verify aspect client called, entity client not called. verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), eq(null), any()); @@ -1106,15 +1109,58 @@ public void testGetMatchingActorsNoResourceMatch() throws Exception { buildEntityResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy. PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); - assertFalse(actors.allUsers()); - assertFalse(actors.allGroups()); + assertFalse(actors.getAllUsers()); + assertFalse(actors.getAllGroups()); assertEquals(actors.getUsers(), Collections.emptyList()); assertEquals(actors.getGroups(), Collections.emptyList()); + //assertEquals(actors.getRoles(), Collections.emptyList()); // Verify no network calls verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any()); } + @Test + public void testGetMatchingActorsByRoleResourceMatch() throws Exception { + final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo(); + dataHubPolicyInfo.setType(METADATA_POLICY_TYPE); + dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE); + dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS")); + dataHubPolicyInfo.setDisplayName("My Test Display"); + dataHubPolicyInfo.setDescription("My test display!"); + dataHubPolicyInfo.setEditable(true); + + final DataHubActorFilter actorFilter = new DataHubActorFilter(); + actorFilter.setResourceOwners(true); + actorFilter.setAllUsers(false); + actorFilter.setAllGroups(false); + actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor")))); + dataHubPolicyInfo.setActors(actorFilter); + + final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); + resourceFilter.setAllResources(false); + resourceFilter.setType("dataset"); + StringArray resourceUrns = new StringArray(); + resourceUrns.add(RESOURCE_URN); + resourceFilter.setResources(resourceUrns); + dataHubPolicyInfo.setResources(resourceFilter); + + ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(), + Collections.emptySet(), Collections.emptySet()); + + PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + + assertFalse(actors.getAllUsers()); + assertFalse(actors.getAllGroups()); + + assertEquals(actors.getUsers(), ImmutableList.of()); + assertEquals(actors.getGroups(), ImmutableList.of()); + assertEquals(actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor"))); + + // Verify aspect client called, entity client not called. + verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), + eq(null), any()); + } + private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolean addGroupOwner) throws Exception { final Ownership ownershipAspect = new Ownership(); final OwnerArray owners = new OwnerArray(); diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java index 442ac1b0d287b..e5f3e223ff505 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java @@ -75,7 +75,7 @@ public AuthorizationResult authorize(@Nonnull AuthorizationRequest request) { @Override public AuthorizedActors authorizedActors(String privilege, Optional resourceSpec) { - return new AuthorizedActors("ALL", null, null, true, true); + return new AuthorizedActors("ALL", null, null, null, true, true); } } From f73ecfdcbbc35437fcb80c9e27e78908dae23ea7 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Wed, 8 Nov 2023 18:17:49 -0500 Subject: [PATCH 078/792] style(ingest/tableau): Rename tableau_constant to c (#9207) --- .../src/datahub/ingestion/source/tableau.py | 597 ++++++++---------- .../ingestion/source/tableau_common.py | 14 +- 2 files changed, 272 insertions(+), 339 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index 4bc40b0aac964..08df7599510f4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -59,7 +59,7 @@ ) from datahub.ingestion.api.source import MetadataWorkUnitProcessor, Source from datahub.ingestion.api.workunit import MetadataWorkUnit -from datahub.ingestion.source import tableau_constant +from datahub.ingestion.source import tableau_constant as c from datahub.ingestion.source.common.subtypes import ( BIContainerSubTypes, DatasetSubTypes, @@ -720,16 +720,12 @@ def get_connection_object_page( query, connection_type, query_filter, count, offset, False ) - if tableau_constant.ERRORS in query_data: - errors = query_data[tableau_constant.ERRORS] + if c.ERRORS in query_data: + errors = query_data[c.ERRORS] if all( # The format of the error messages is highly unpredictable, so we have to # be extra defensive with our parsing. - error - and (error.get(tableau_constant.EXTENSIONS) or {}).get( - tableau_constant.SEVERITY - ) - == tableau_constant.WARNING + error and (error.get(c.EXTENSIONS) or {}).get(c.SEVERITY) == c.WARNING for error in errors ): self.report.report_warning(key=connection_type, reason=f"{errors}") @@ -737,14 +733,14 @@ def get_connection_object_page( raise RuntimeError(f"Query {connection_type} error: {errors}") connection_object = ( - query_data.get(tableau_constant.DATA).get(connection_type, {}) - if query_data.get(tableau_constant.DATA) + query_data.get(c.DATA).get(connection_type, {}) + if query_data.get(c.DATA) else {} ) - total_count = connection_object.get(tableau_constant.TOTAL_COUNT, 0) - has_next_page = connection_object.get(tableau_constant.PAGE_INFO, {}).get( - tableau_constant.HAS_NEXT_PAGE, False + total_count = connection_object.get(c.TOTAL_COUNT, 0) + has_next_page = connection_object.get(c.PAGE_INFO, {}).get( + c.HAS_NEXT_PAGE, False ) return connection_object, total_count, has_next_page @@ -781,7 +777,7 @@ def get_connection_objects( offset += count - for obj in connection_objects.get(tableau_constant.NODES) or []: + for obj in connection_objects.get(c.NODES) or []: yield obj def emit_workbooks(self) -> Iterable[MetadataWorkUnit]: @@ -790,11 +786,11 @@ def emit_workbooks(self) -> Iterable[MetadataWorkUnit]: project.name for project in self.tableau_project_registry.values() ] project_names_str: str = json.dumps(project_names) - projects = f"{tableau_constant.PROJECT_NAME_WITH_IN}: {project_names_str}" + projects = f"{c.PROJECT_NAME_WITH_IN}: {project_names_str}" for workbook in self.get_connection_objects( workbook_graphql_query, - tableau_constant.WORKBOOKS_CONNECTION, + c.WORKBOOKS_CONNECTION, projects, page_size_override=self.config.workbook_page_size, ): @@ -804,11 +800,9 @@ def emit_workbooks(self) -> Iterable[MetadataWorkUnit]: # however Tableau supports projectLuidWithin in Tableau Cloud June 2022 / Server 2022.3 and later. project_luid: Optional[str] = self._get_workbook_project_luid(workbook) if project_luid not in self.tableau_project_registry.keys(): - wrk_name: Optional[str] = workbook.get(tableau_constant.NAME) - wrk_id: Optional[str] = workbook.get(tableau_constant.ID) - prj_name: Optional[str] = workbook.get( - tableau_constant.PROJECT_NAME - ) + wrk_name: Optional[str] = workbook.get(c.NAME) + wrk_id: Optional[str] = workbook.get(c.ID) + prj_name: Optional[str] = workbook.get(c.PROJECT_NAME) logger.debug( f"Skipping workbook {wrk_name}({wrk_id}) as it is project {prj_name}({project_luid}) not " @@ -818,25 +812,22 @@ def emit_workbooks(self) -> Iterable[MetadataWorkUnit]: yield from self.emit_workbook_as_container(workbook) - for sheet in workbook.get(tableau_constant.SHEETS, []): - self.sheet_ids.append(sheet[tableau_constant.ID]) + for sheet in workbook.get(c.SHEETS, []): + self.sheet_ids.append(sheet[c.ID]) - for dashboard in workbook.get(tableau_constant.DASHBOARDS, []): - self.dashboard_ids.append(dashboard[tableau_constant.ID]) + for dashboard in workbook.get(c.DASHBOARDS, []): + self.dashboard_ids.append(dashboard[c.ID]) - for ds in workbook.get(tableau_constant.EMBEDDED_DATA_SOURCES, []): - self.embedded_datasource_ids_being_used.append( - ds[tableau_constant.ID] - ) + for ds in workbook.get(c.EMBEDDED_DATA_SOURCES, []): + self.embedded_datasource_ids_being_used.append(ds[c.ID]) def _track_custom_sql_ids(self, field: dict) -> None: # Tableau shows custom sql datasource as a table in ColumnField's upstreamColumns. - for column in field.get(tableau_constant.UPSTREAM_COLUMNS, []): + for column in field.get(c.UPSTREAM_COLUMNS, []): table_id = ( - column.get(tableau_constant.TABLE, {}).get(tableau_constant.ID) - if column.get(tableau_constant.TABLE) - and column[tableau_constant.TABLE][tableau_constant.TYPE_NAME] - == tableau_constant.CUSTOM_SQL_TABLE + column.get(c.TABLE, {}).get(c.ID) + if column.get(c.TABLE) + and column[c.TABLE][c.TYPE_NAME] == c.CUSTOM_SQL_TABLE else None ) @@ -861,15 +852,15 @@ def _create_upstream_table_lineage( # and published datasource have same upstreamTables in this case. if upstream_tables and is_embedded_ds: logger.debug( - f"Embedded datasource {datasource.get(tableau_constant.ID)} has upstreamDatasources.\ + f"Embedded datasource {datasource.get(c.ID)} has upstreamDatasources.\ Setting only upstreamDatasources lineage. The upstreamTables lineage \ will be set via upstream published datasource." ) else: # This adds an edge to upstream DatabaseTables using `upstreamTables` upstreams, id_to_urn = self.get_upstream_tables( - datasource.get(tableau_constant.UPSTREAM_TABLES, []), - datasource.get(tableau_constant.NAME), + datasource.get(c.UPSTREAM_TABLES, []), + datasource.get(c.NAME), browse_path, is_custom_sql=False, ) @@ -878,23 +869,23 @@ def _create_upstream_table_lineage( # This adds an edge to upstream CustomSQLTables using `fields`.`upstreamColumns`.`table` csql_upstreams, csql_id_to_urn = self.get_upstream_csql_tables( - datasource.get(tableau_constant.FIELDS) or [], + datasource.get(c.FIELDS) or [], ) upstream_tables.extend(csql_upstreams) table_id_to_urn.update(csql_id_to_urn) logger.debug( - f"A total of {len(upstream_tables)} upstream table edges found for datasource {datasource[tableau_constant.ID]}" + f"A total of {len(upstream_tables)} upstream table edges found for datasource {datasource[c.ID]}" ) datasource_urn = builder.make_dataset_urn_with_platform_instance( platform=self.platform, - name=datasource[tableau_constant.ID], + name=datasource[c.ID], platform_instance=self.config.platform_instance, env=self.config.env, ) - if datasource.get(tableau_constant.FIELDS): + if datasource.get(c.FIELDS): if self.config.extract_column_level_lineage: # Find fine grained lineage for datasource column to datasource column edge, # upstream columns may be from same datasource @@ -912,20 +903,20 @@ def _create_upstream_table_lineage( fine_grained_lineages.extend(upstream_columns) logger.debug( - f"A total of {len(fine_grained_lineages)} upstream column edges found for datasource {datasource[tableau_constant.ID]}" + f"A total of {len(fine_grained_lineages)} upstream column edges found for datasource {datasource[c.ID]}" ) return upstream_tables, fine_grained_lineages def get_upstream_datasources(self, datasource: dict) -> List[Upstream]: upstream_tables = [] - for ds in datasource.get(tableau_constant.UPSTREAM_DATA_SOURCES, []): - if ds[tableau_constant.ID] not in self.datasource_ids_being_used: - self.datasource_ids_being_used.append(ds[tableau_constant.ID]) + for ds in datasource.get(c.UPSTREAM_DATA_SOURCES, []): + if ds[c.ID] not in self.datasource_ids_being_used: + self.datasource_ids_being_used.append(ds[c.ID]) upstream_ds_urn = builder.make_dataset_urn_with_platform_instance( platform=self.platform, - name=ds[tableau_constant.ID], + name=ds[c.ID], platform_instance=self.config.platform_instance, env=self.config.env, ) @@ -943,20 +934,15 @@ def get_upstream_csql_tables( csql_id_to_urn = {} for field in fields: - if not field.get(tableau_constant.UPSTREAM_COLUMNS): + if not field.get(c.UPSTREAM_COLUMNS): continue - for upstream_col in field[tableau_constant.UPSTREAM_COLUMNS]: + for upstream_col in field[c.UPSTREAM_COLUMNS]: if ( upstream_col - and upstream_col.get(tableau_constant.TABLE) - and upstream_col.get(tableau_constant.TABLE)[ - tableau_constant.TYPE_NAME - ] - == tableau_constant.CUSTOM_SQL_TABLE + and upstream_col.get(c.TABLE) + and upstream_col.get(c.TABLE)[c.TYPE_NAME] == c.CUSTOM_SQL_TABLE ): - upstream_table_id = upstream_col.get(tableau_constant.TABLE)[ - tableau_constant.ID - ] + upstream_table_id = upstream_col.get(c.TABLE)[c.ID] csql_urn = builder.make_dataset_urn_with_platform_instance( platform=self.platform, @@ -986,18 +972,18 @@ def get_upstream_tables( for table in tables: # skip upstream tables when there is no column info when retrieving datasource # Lineage and Schema details for these will be taken care in self.emit_custom_sql_datasources() - num_tbl_cols: Optional[int] = table.get( - tableau_constant.COLUMNS_CONNECTION - ) and table[tableau_constant.COLUMNS_CONNECTION].get("totalCount") + num_tbl_cols: Optional[int] = table.get(c.COLUMNS_CONNECTION) and table[ + c.COLUMNS_CONNECTION + ].get("totalCount") if not is_custom_sql and not num_tbl_cols: logger.debug( - f"Skipping upstream table with id {table[tableau_constant.ID]}, no columns: {table}" + f"Skipping upstream table with id {table[c.ID]}, no columns: {table}" ) continue - elif table[tableau_constant.NAME] is None: + elif table[c.NAME] is None: self.report.num_upstream_table_skipped_no_name += 1 logger.warning( - f"Skipping upstream table {table[tableau_constant.ID]} from lineage since its name is none: {table}" + f"Skipping upstream table {table[c.ID]} from lineage since its name is none: {table}" ) continue @@ -1014,7 +1000,7 @@ def get_upstream_tables( self.config.platform_instance_map, self.config.lineage_overrides, ) - table_id_to_urn[table[tableau_constant.ID]] = table_urn + table_id_to_urn[table[c.ID]] = table_urn upstream_table = Upstream( dataset=table_urn, @@ -1029,13 +1015,13 @@ def get_upstream_tables( if table_urn not in self.database_tables: self.database_tables[table_urn] = DatabaseTable( urn=table_urn, - id=table[tableau_constant.ID], + id=table[c.ID], num_cols=num_tbl_cols, paths={table_path} if table_path else set(), ) else: self.database_tables[table_urn].update_table( - table[tableau_constant.ID], num_tbl_cols, table_path + table[c.ID], num_tbl_cols, table_path ) return upstream_tables, table_id_to_urn @@ -1047,24 +1033,24 @@ def get_upstream_columns_of_fields_in_datasource( table_id_to_urn: Dict[str, str], ) -> List[FineGrainedLineage]: fine_grained_lineages = [] - for field in datasource.get(tableau_constant.FIELDS) or []: - field_name = field.get(tableau_constant.NAME) + for field in datasource.get(c.FIELDS) or []: + field_name = field.get(c.NAME) # upstreamColumns lineage will be set via upstreamFields. # such as for CalculatedField if ( not field_name - or not field.get(tableau_constant.UPSTREAM_COLUMNS) - or field.get(tableau_constant.UPSTREAM_FIELDS) + or not field.get(c.UPSTREAM_COLUMNS) + or field.get(c.UPSTREAM_FIELDS) ): continue input_columns = [] - for upstream_col in field.get(tableau_constant.UPSTREAM_COLUMNS): + for upstream_col in field.get(c.UPSTREAM_COLUMNS): if not upstream_col: continue - name = upstream_col.get(tableau_constant.NAME) + name = upstream_col.get(c.NAME) upstream_table_id = ( - upstream_col.get(tableau_constant.TABLE)[tableau_constant.ID] - if upstream_col.get(tableau_constant.TABLE) + upstream_col.get(c.TABLE)[c.ID] + if upstream_col.get(c.TABLE) else None ) if ( @@ -1110,23 +1096,21 @@ def get_upstream_fields_of_field_in_datasource( self, datasource: dict, datasource_urn: str ) -> List[FineGrainedLineage]: fine_grained_lineages = [] - for field in datasource.get(tableau_constant.FIELDS) or []: - field_name = field.get(tableau_constant.NAME) + for field in datasource.get(c.FIELDS) or []: + field_name = field.get(c.NAME) # It is observed that upstreamFields gives one-hop field # lineage, and not multi-hop field lineage # This behavior is as desired in our case. - if not field_name or not field.get(tableau_constant.UPSTREAM_FIELDS): + if not field_name or not field.get(c.UPSTREAM_FIELDS): continue input_fields = [] - for upstream_field in field.get(tableau_constant.UPSTREAM_FIELDS): + for upstream_field in field.get(c.UPSTREAM_FIELDS): if not upstream_field: continue - name = upstream_field.get(tableau_constant.NAME) + name = upstream_field.get(c.NAME) upstream_ds_id = ( - upstream_field.get(tableau_constant.DATA_SOURCE)[ - tableau_constant.ID - ] - if upstream_field.get(tableau_constant.DATA_SOURCE) + upstream_field.get(c.DATA_SOURCE)[c.ID] + if upstream_field.get(c.DATA_SOURCE) else None ) if name and upstream_ds_id: @@ -1212,35 +1196,37 @@ def get_upstream_fields_from_custom_sql( return fine_grained_lineages def get_transform_operation(self, field: dict) -> str: - field_type = field[tableau_constant.TYPE_NAME] + field_type = field[c.TYPE_NAME] if field_type in ( - tableau_constant.DATA_SOURCE_FIELD, - tableau_constant.COLUMN_FIELD, + c.DATA_SOURCE_FIELD, + c.COLUMN_FIELD, ): - op = tableau_constant.IDENTITY # How to specify exact same - elif field_type == tableau_constant.CALCULATED_FIELD: + op = c.IDENTITY # How to specify exact same + elif field_type == c.CALCULATED_FIELD: op = field_type - if field.get(tableau_constant.FORMULA): - op += f"formula: {field.get(tableau_constant.FORMULA)}" + if field.get(c.FORMULA): + op += f"formula: {field.get(c.FORMULA)}" else: op = field_type # BinField, CombinedField, etc return op def emit_custom_sql_datasources(self) -> Iterable[MetadataWorkUnit]: - custom_sql_filter = f"{tableau_constant.ID_WITH_IN}: {json.dumps(self.custom_sql_ids_being_used)}" + custom_sql_filter = ( + f"{c.ID_WITH_IN}: {json.dumps(self.custom_sql_ids_being_used)}" + ) custom_sql_connection = list( self.get_connection_objects( custom_sql_graphql_query, - tableau_constant.CUSTOM_SQL_TABLE_CONNECTION, + c.CUSTOM_SQL_TABLE_CONNECTION, custom_sql_filter, ) ) unique_custom_sql = get_unique_custom_sql(custom_sql_connection) for csql in unique_custom_sql: - csql_id: str = csql[tableau_constant.ID] + csql_id: str = csql[c.ID] csql_urn = builder.make_dataset_urn_with_platform_instance( platform=self.platform, name=csql_id, @@ -1256,40 +1242,33 @@ def emit_custom_sql_datasources(self) -> Iterable[MetadataWorkUnit]: datasource_name = None project = None - if len(csql[tableau_constant.DATA_SOURCES]) > 0: + if len(csql[c.DATA_SOURCES]) > 0: # CustomSQLTable id owned by exactly one tableau data source logger.debug( - f"Number of datasources referencing CustomSQLTable: {len(csql[tableau_constant.DATA_SOURCES])}" + f"Number of datasources referencing CustomSQLTable: {len(csql[c.DATA_SOURCES])}" ) - datasource = csql[tableau_constant.DATA_SOURCES][0] - datasource_name = datasource.get(tableau_constant.NAME) + datasource = csql[c.DATA_SOURCES][0] + datasource_name = datasource.get(c.NAME) if datasource.get( - tableau_constant.TYPE_NAME - ) == tableau_constant.EMBEDDED_DATA_SOURCE and datasource.get( - tableau_constant.WORKBOOK - ): + c.TYPE_NAME + ) == c.EMBEDDED_DATA_SOURCE and datasource.get(c.WORKBOOK): datasource_name = ( - f"{datasource.get(tableau_constant.WORKBOOK).get(tableau_constant.NAME)}/{datasource_name}" - if datasource_name - and datasource.get(tableau_constant.WORKBOOK).get( - tableau_constant.NAME - ) + f"{datasource.get(c.WORKBOOK).get(c.NAME)}/{datasource_name}" + if datasource_name and datasource.get(c.WORKBOOK).get(c.NAME) else None ) logger.debug( f"Adding datasource {datasource_name}({datasource.get('id')}) to container" ) yield from add_entity_to_container( - self.gen_workbook_key( - datasource[tableau_constant.WORKBOOK][tableau_constant.ID] - ), - tableau_constant.DATASET, + self.gen_workbook_key(datasource[c.WORKBOOK][c.ID]), + c.DATASET, dataset_snapshot.urn, ) project = self._get_project_browse_path_name(datasource) - tables = csql.get(tableau_constant.TABLES, []) + tables = csql.get(c.TABLES, []) if tables: # lineage from custom sql -> datasets/tables # @@ -1306,9 +1285,8 @@ def emit_custom_sql_datasources(self) -> Iterable[MetadataWorkUnit]: # Schema Metadata # if condition is needed as graphQL return "cloumns": None columns: List[Dict[Any, Any]] = ( - cast(List[Dict[Any, Any]], csql.get(tableau_constant.COLUMNS)) - if tableau_constant.COLUMNS in csql - and csql.get(tableau_constant.COLUMNS) is not None + cast(List[Dict[Any, Any]], csql.get(c.COLUMNS)) + if c.COLUMNS in csql and csql.get(c.COLUMNS) is not None else [] ) schema_metadata = self.get_schema_metadata_for_custom_sql(columns) @@ -1320,7 +1298,7 @@ def emit_custom_sql_datasources(self) -> Iterable[MetadataWorkUnit]: if project and datasource_name: browse_paths = BrowsePathsClass( paths=[ - f"/{self.config.env.lower()}/{self.platform}/{project}/{datasource[tableau_constant.NAME]}" + f"/{self.config.env.lower()}/{self.platform}/{project}/{datasource[c.NAME]}" ] ) dataset_snapshot.aspects.append(browse_paths) @@ -1328,27 +1306,25 @@ def emit_custom_sql_datasources(self) -> Iterable[MetadataWorkUnit]: logger.debug(f"Browse path not set for Custom SQL table {csql_id}") dataset_properties = DatasetPropertiesClass( - name=csql.get(tableau_constant.NAME), - description=csql.get(tableau_constant.DESCRIPTION), + name=csql.get(c.NAME), + description=csql.get(c.DESCRIPTION), ) dataset_snapshot.aspects.append(dataset_properties) - if csql.get(tableau_constant.QUERY): + if csql.get(c.QUERY): view_properties = ViewPropertiesClass( materialized=False, - viewLanguage=tableau_constant.SQL, - viewLogic=clean_query(csql[tableau_constant.QUERY]), + viewLanguage=c.SQL, + viewLogic=clean_query(csql[c.QUERY]), ) dataset_snapshot.aspects.append(view_properties) yield self.get_metadata_change_event(dataset_snapshot) yield self.get_metadata_change_proposal( dataset_snapshot.urn, - aspect_name=tableau_constant.SUB_TYPES, - aspect=SubTypesClass( - typeNames=[DatasetSubTypes.VIEW, tableau_constant.CUSTOM_SQL] - ), + aspect_name=c.SUB_TYPES, + aspect=SubTypesClass(typeNames=[DatasetSubTypes.VIEW, c.CUSTOM_SQL]), ) def get_schema_metadata_for_custom_sql( @@ -1359,21 +1335,19 @@ def get_schema_metadata_for_custom_sql( for field in columns: # Datasource fields - if field.get(tableau_constant.NAME) is None: + if field.get(c.NAME) is None: self.report.num_csql_field_skipped_no_name += 1 logger.warning( - f"Skipping field {field[tableau_constant.ID]} from schema since its name is none" + f"Skipping field {field[c.ID]} from schema since its name is none" ) continue - nativeDataType = field.get( - tableau_constant.REMOTE_TYPE, tableau_constant.UNKNOWN - ) + nativeDataType = field.get(c.REMOTE_TYPE, c.UNKNOWN) TypeClass = FIELD_TYPE_MAPPING.get(nativeDataType, NullTypeClass) schema_field = SchemaField( - fieldPath=field[tableau_constant.NAME], + fieldPath=field[c.NAME], type=SchemaFieldDataType(type=TypeClass()), nativeDataType=nativeDataType, - description=field.get(tableau_constant.DESCRIPTION), + description=field.get(c.DESCRIPTION), ) fields.append(schema_field) @@ -1391,28 +1365,25 @@ def _get_published_datasource_project_luid(self, ds: dict) -> Optional[str]: # This is fallback in case "get all datasources" query fails for some reason. # It is possible due to https://github.com/tableau/server-client-python/issues/1210 if ( - ds.get(tableau_constant.LUID) - and ds[tableau_constant.LUID] not in self.datasource_project_map.keys() + ds.get(c.LUID) + and ds[c.LUID] not in self.datasource_project_map.keys() and self.report.get_all_datasources_query_failed ): logger.debug( - f"published datasource {ds.get(tableau_constant.NAME)} project_luid not found." - f" Running get datasource query for {ds[tableau_constant.LUID]}" + f"published datasource {ds.get(c.NAME)} project_luid not found." + f" Running get datasource query for {ds[c.LUID]}" ) # Query and update self.datasource_project_map with luid - self._query_published_datasource_for_project_luid(ds[tableau_constant.LUID]) + self._query_published_datasource_for_project_luid(ds[c.LUID]) if ( - ds.get(tableau_constant.LUID) - and ds[tableau_constant.LUID] in self.datasource_project_map.keys() - and self.datasource_project_map[ds[tableau_constant.LUID]] - in self.tableau_project_registry + ds.get(c.LUID) + and ds[c.LUID] in self.datasource_project_map.keys() + and self.datasource_project_map[ds[c.LUID]] in self.tableau_project_registry ): - return self.datasource_project_map[ds[tableau_constant.LUID]] + return self.datasource_project_map[ds[c.LUID]] - logger.debug( - f"published datasource {ds.get(tableau_constant.NAME)} project_luid not found" - ) + logger.debug(f"published datasource {ds.get(c.NAME)} project_luid not found") return None @@ -1437,60 +1408,52 @@ def _query_published_datasource_for_project_luid(self, ds_luid: str) -> None: logger.debug("Error stack trace", exc_info=True) def _get_workbook_project_luid(self, wb: dict) -> Optional[str]: - if wb.get(tableau_constant.LUID) and self.workbook_project_map.get( - wb[tableau_constant.LUID] - ): - return self.workbook_project_map[wb[tableau_constant.LUID]] + if wb.get(c.LUID) and self.workbook_project_map.get(wb[c.LUID]): + return self.workbook_project_map[wb[c.LUID]] - logger.debug(f"workbook {wb.get(tableau_constant.NAME)} project_luid not found") + logger.debug(f"workbook {wb.get(c.NAME)} project_luid not found") return None def _get_embedded_datasource_project_luid(self, ds: dict) -> Optional[str]: - if ds.get(tableau_constant.WORKBOOK): + if ds.get(c.WORKBOOK): project_luid: Optional[str] = self._get_workbook_project_luid( - ds[tableau_constant.WORKBOOK] + ds[c.WORKBOOK] ) if project_luid and project_luid in self.tableau_project_registry: return project_luid - logger.debug( - f"embedded datasource {ds.get(tableau_constant.NAME)} project_luid not found" - ) + logger.debug(f"embedded datasource {ds.get(c.NAME)} project_luid not found") return None def _get_datasource_project_luid(self, ds: dict) -> Optional[str]: # Only published and embedded data-sources are supported - ds_type: Optional[str] = ds.get(tableau_constant.TYPE_NAME) + ds_type: Optional[str] = ds.get(c.TYPE_NAME) if ds_type not in ( - tableau_constant.PUBLISHED_DATA_SOURCE, - tableau_constant.EMBEDDED_DATA_SOURCE, + c.PUBLISHED_DATA_SOURCE, + c.EMBEDDED_DATA_SOURCE, ): logger.debug( - f"datasource {ds.get(tableau_constant.NAME)} type {ds.get(tableau_constant.TYPE_NAME)} is " + f"datasource {ds.get(c.NAME)} type {ds.get(c.TYPE_NAME)} is " f"unsupported" ) return None func_selector: Any = { - tableau_constant.PUBLISHED_DATA_SOURCE: self._get_published_datasource_project_luid, - tableau_constant.EMBEDDED_DATA_SOURCE: self._get_embedded_datasource_project_luid, + c.PUBLISHED_DATA_SOURCE: self._get_published_datasource_project_luid, + c.EMBEDDED_DATA_SOURCE: self._get_embedded_datasource_project_luid, } return func_selector[ds_type](ds) @staticmethod def _get_datasource_project_name(ds: dict) -> Optional[str]: - if ds.get( - tableau_constant.TYPE_NAME - ) == tableau_constant.EMBEDDED_DATA_SOURCE and ds.get( - tableau_constant.WORKBOOK - ): - return ds[tableau_constant.WORKBOOK].get(tableau_constant.PROJECT_NAME) - if ds.get(tableau_constant.TYPE_NAME) == tableau_constant.PUBLISHED_DATA_SOURCE: - return ds.get(tableau_constant.PROJECT_NAME) + if ds.get(c.TYPE_NAME) == c.EMBEDDED_DATA_SOURCE and ds.get(c.WORKBOOK): + return ds[c.WORKBOOK].get(c.PROJECT_NAME) + if ds.get(c.TYPE_NAME) == c.PUBLISHED_DATA_SOURCE: + return ds.get(c.PROJECT_NAME) return None def _get_project_browse_path_name(self, ds: dict) -> Optional[str]: @@ -1502,7 +1465,7 @@ def _get_project_browse_path_name(self, ds: dict) -> Optional[str]: project_luid = self._get_datasource_project_luid(ds) if project_luid is None: logger.warning( - f"Could not load project hierarchy for datasource {ds.get(tableau_constant.NAME)}. Please check permissions." + f"Could not load project hierarchy for datasource {ds.get(c.NAME)}. Please check permissions." ) logger.debug(f"datasource = {ds}") return None @@ -1515,7 +1478,7 @@ def _create_lineage_to_upstream_tables( # This adds an edge to upstream DatabaseTables using `upstreamTables` upstream_tables, _ = self.get_upstream_tables( tables, - datasource.get(tableau_constant.NAME) or "", + datasource.get(c.NAME) or "", self._get_project_browse_path_name(datasource), is_custom_sql=True, ) @@ -1524,7 +1487,7 @@ def _create_lineage_to_upstream_tables( upstream_lineage = UpstreamLineage(upstreams=upstream_tables) yield self.get_metadata_change_proposal( csql_urn, - aspect_name=tableau_constant.UPSTREAM_LINEAGE, + aspect_name=c.UPSTREAM_LINEAGE, aspect=upstream_lineage, ) @@ -1547,22 +1510,19 @@ def parse_custom_sql( ] ], ) -> Optional["SqlParsingResult"]: - database_info = datasource.get(tableau_constant.DATABASE) or {} + database_info = datasource.get(c.DATABASE) or {} - if datasource.get(tableau_constant.IS_UNSUPPORTED_CUSTOM_SQL) in (None, False): + if datasource.get(c.IS_UNSUPPORTED_CUSTOM_SQL) in (None, False): logger.debug(f"datasource {datasource_urn} is not created from custom sql") return None - if ( - tableau_constant.NAME not in database_info - or tableau_constant.CONNECTION_TYPE not in database_info - ): + if c.NAME not in database_info or c.CONNECTION_TYPE not in database_info: logger.debug( f"database information is missing from datasource {datasource_urn}" ) return None - query = datasource.get(tableau_constant.QUERY) + query = datasource.get(c.QUERY) if query is None: logger.debug( f"raw sql query is not available for datasource {datasource_urn}" @@ -1571,13 +1531,13 @@ def parse_custom_sql( logger.debug(f"Parsing sql={query}") - upstream_db = database_info.get(tableau_constant.NAME) + upstream_db = database_info.get(c.NAME) if func_overridden_info is not None: # Override the information as per configuration upstream_db, platform_instance, platform, _ = func_overridden_info( - database_info[tableau_constant.CONNECTION_TYPE], - database_info.get(tableau_constant.NAME), + database_info[c.CONNECTION_TYPE], + database_info.get(c.NAME), self.config.platform_instance_map, self.config.lineage_overrides, ) @@ -1631,7 +1591,7 @@ def _create_lineage_from_unsupported_csql( yield self.get_metadata_change_proposal( csql_urn, - aspect_name=tableau_constant.UPSTREAM_LINEAGE, + aspect_name=c.UPSTREAM_LINEAGE, aspect=upstream_lineage, ) @@ -1642,10 +1602,10 @@ def _get_schema_metadata_for_datasource( for field in datasource_fields: # check datasource - custom sql relations from a field being referenced self._track_custom_sql_ids(field) - if field.get(tableau_constant.NAME) is None: + if field.get(c.NAME) is None: self.report.num_upstream_table_skipped_no_name += 1 logger.warning( - f"Skipping field {field[tableau_constant.ID]} from schema since its name is none" + f"Skipping field {field[c.ID]} from schema since its name is none" ) continue @@ -1678,7 +1638,7 @@ def get_metadata_change_proposal( aspect: Union["UpstreamLineage", "SubTypesClass"], ) -> MetadataWorkUnit: return MetadataChangeProposalWrapper( - entityType=tableau_constant.DATASET, + entityType=c.DATASET, changeType=ChangeTypeClass.UPSERT, entityUrn=urn, aspectName=aspect_name, @@ -1696,10 +1656,8 @@ def emit_datasource( datasource_info = datasource browse_path = self._get_project_browse_path_name(datasource) - logger.debug( - f"datasource {datasource.get(tableau_constant.NAME)} browse-path {browse_path}" - ) - datasource_id = datasource[tableau_constant.ID] + logger.debug(f"datasource {datasource.get(c.NAME)} browse-path {browse_path}") + datasource_id = datasource[c.ID] datasource_urn = builder.make_dataset_urn_with_platform_instance( self.platform, datasource_id, self.config.platform_instance, self.config.env ) @@ -1713,13 +1671,10 @@ def emit_datasource( # Browse path - if ( - browse_path - and is_embedded_ds - and workbook - and workbook.get(tableau_constant.NAME) - ): - browse_path = f"{browse_path}/{workbook[tableau_constant.NAME].replace('/', REPLACE_SLASH_CHAR)}" + if browse_path and is_embedded_ds and workbook and workbook.get(c.NAME): + browse_path = ( + f"{browse_path}/{workbook[c.NAME].replace('/', REPLACE_SLASH_CHAR)}" + ) if browse_path: browse_paths = BrowsePathsClass( @@ -1729,12 +1684,10 @@ def emit_datasource( # Ownership owner = ( - self._get_ownership( - datasource_info[tableau_constant.OWNER][tableau_constant.USERNAME] - ) + self._get_ownership(datasource_info[c.OWNER][c.USERNAME]) if datasource_info - and datasource_info.get(tableau_constant.OWNER) - and datasource_info[tableau_constant.OWNER].get(tableau_constant.USERNAME) + and datasource_info.get(c.OWNER) + and datasource_info[c.OWNER].get(c.USERNAME) else None ) if owner is not None: @@ -1742,24 +1695,22 @@ def emit_datasource( # Dataset properties dataset_props = DatasetPropertiesClass( - name=datasource.get(tableau_constant.NAME), - description=datasource.get(tableau_constant.DESCRIPTION), + name=datasource.get(c.NAME), + description=datasource.get(c.DESCRIPTION), customProperties=self.get_custom_props_from_dict( datasource, [ - tableau_constant.HAS_EXTRACTS, - tableau_constant.EXTRACT_LAST_REFRESH_TIME, - tableau_constant.EXTRACT_LAST_INCREMENTAL_UPDATE_TIME, - tableau_constant.EXTRACT_LAST_UPDATE_TIME, + c.HAS_EXTRACTS, + c.EXTRACT_LAST_REFRESH_TIME, + c.EXTRACT_LAST_INCREMENTAL_UPDATE_TIME, + c.EXTRACT_LAST_UPDATE_TIME, ], ), ) dataset_snapshot.aspects.append(dataset_props) # Upstream Tables - if datasource.get(tableau_constant.UPSTREAM_TABLES) or datasource.get( - tableau_constant.UPSTREAM_DATA_SOURCES - ): + if datasource.get(c.UPSTREAM_TABLES) or datasource.get(c.UPSTREAM_DATA_SOURCES): # datasource -> db table relations ( upstream_tables, @@ -1779,13 +1730,13 @@ def emit_datasource( ) yield self.get_metadata_change_proposal( datasource_urn, - aspect_name=tableau_constant.UPSTREAM_LINEAGE, + aspect_name=c.UPSTREAM_LINEAGE, aspect=upstream_lineage, ) # Datasource Fields schema_metadata = self._get_schema_metadata_for_datasource( - datasource.get(tableau_constant.FIELDS, []) + datasource.get(c.FIELDS, []) ) if schema_metadata is not None: dataset_snapshot.aspects.append(schema_metadata) @@ -1793,7 +1744,7 @@ def emit_datasource( yield self.get_metadata_change_event(dataset_snapshot) yield self.get_metadata_change_proposal( dataset_snapshot.urn, - aspect_name=tableau_constant.SUB_TYPES, + aspect_name=c.SUB_TYPES, aspect=SubTypesClass( typeNames=( ["Embedded Data Source"] @@ -1809,7 +1760,7 @@ def emit_datasource( if container_key is not None: yield from add_entity_to_container( container_key, - tableau_constant.DATASET, + c.DATASET, dataset_snapshot.urn, ) @@ -1822,10 +1773,10 @@ def _get_datasource_container_key( container_key: Optional[ContainerKey] = None if is_embedded_ds: # It is embedded then parent is container is workbook if workbook is not None: - container_key = self.gen_workbook_key(workbook[tableau_constant.ID]) + container_key = self.gen_workbook_key(workbook[c.ID]) else: logger.warning( - f"Parent container not set for embedded datasource {datasource[tableau_constant.ID]}" + f"Parent container not set for embedded datasource {datasource[c.ID]}" ) else: parent_project_luid = self._get_published_datasource_project_luid( @@ -1836,17 +1787,19 @@ def _get_datasource_container_key( container_key = self.gen_project_key(parent_project_luid) else: logger.warning( - f"Parent container not set for published datasource {datasource[tableau_constant.ID]}" + f"Parent container not set for published datasource {datasource[c.ID]}" ) return container_key def emit_published_datasources(self) -> Iterable[MetadataWorkUnit]: - datasource_filter = f"{tableau_constant.ID_WITH_IN}: {json.dumps(self.datasource_ids_being_used)}" + datasource_filter = ( + f"{c.ID_WITH_IN}: {json.dumps(self.datasource_ids_being_used)}" + ) for datasource in self.get_connection_objects( published_datasource_graphql_query, - tableau_constant.PUBLISHED_DATA_SOURCES_CONNECTION, + c.PUBLISHED_DATA_SOURCES_CONNECTION, datasource_filter, ): yield from self.emit_datasource(datasource) @@ -1855,11 +1808,13 @@ def emit_upstream_tables(self) -> Iterable[MetadataWorkUnit]: database_table_id_to_urn_map: Dict[str, str] = dict() for urn, tbl in self.database_tables.items(): database_table_id_to_urn_map[tbl.id] = urn - tables_filter = f"{tableau_constant.ID_WITH_IN}: {json.dumps(list(database_table_id_to_urn_map.keys()))}" + tables_filter = ( + f"{c.ID_WITH_IN}: {json.dumps(list(database_table_id_to_urn_map.keys()))}" + ) for table in self.get_connection_objects( database_tables_graphql_query, - tableau_constant.DATABASE_TABLES_CONNECTION, + c.DATABASE_TABLES_CONNECTION, tables_filter, ): yield from self.emit_table(table, database_table_id_to_urn_map) @@ -1867,11 +1822,9 @@ def emit_upstream_tables(self) -> Iterable[MetadataWorkUnit]: def emit_table( self, table: dict, database_table_id_to_urn_map: Dict[str, str] ) -> Iterable[MetadataWorkUnit]: - database_table = self.database_tables[ - database_table_id_to_urn_map[table[tableau_constant.ID]] - ] - columns = table.get(tableau_constant.COLUMNS, []) - is_embedded = table.get(tableau_constant.IS_EMBEDDED) or False + database_table = self.database_tables[database_table_id_to_urn_map[table[c.ID]]] + columns = table.get(c.COLUMNS, []) + is_embedded = table.get(c.IS_EMBEDDED) or False if not is_embedded and not self.config.ingest_tables_external: logger.debug( f"Skipping external table {database_table.urn} as ingest_tables_external is set to False" @@ -1907,21 +1860,19 @@ def get_schema_metadata_for_table( if columns: fields = [] for field in columns: - if field.get(tableau_constant.NAME) is None: + if field.get(c.NAME) is None: self.report.num_table_field_skipped_no_name += 1 logger.warning( - f"Skipping field {field[tableau_constant.ID]} from schema since its name is none" + f"Skipping field {field[c.ID]} from schema since its name is none" ) continue - nativeDataType = field.get( - tableau_constant.REMOTE_TYPE, tableau_constant.UNKNOWN - ) + nativeDataType = field.get(c.REMOTE_TYPE, c.UNKNOWN) TypeClass = FIELD_TYPE_MAPPING.get(nativeDataType, NullTypeClass) schema_field = SchemaField( - fieldPath=field[tableau_constant.NAME], + fieldPath=field[c.NAME], type=SchemaFieldDataType(type=TypeClass()), - description=field.get(tableau_constant.DESCRIPTION), + description=field.get(c.DESCRIPTION), nativeDataType=nativeDataType, ) @@ -1941,11 +1892,9 @@ def get_schema_metadata_for_table( def get_sheetwise_upstream_datasources(self, sheet: dict) -> set: sheet_upstream_datasources = set() - for field in sheet.get(tableau_constant.DATA_SOURCE_FIELDS) or []: - if field and field.get(tableau_constant.DATA_SOURCE): - sheet_upstream_datasources.add( - field[tableau_constant.DATA_SOURCE][tableau_constant.ID] - ) + for field in sheet.get(c.DATA_SOURCE_FIELDS) or []: + if field and field.get(c.DATA_SOURCE): + sheet_upstream_datasources.add(field[c.DATA_SOURCE][c.ID]) return sheet_upstream_datasources @@ -1961,20 +1910,20 @@ def _create_datahub_chart_usage_stat( def _get_chart_stat_wu( self, sheet: dict, sheet_urn: str ) -> Optional[MetadataWorkUnit]: - luid: Optional[str] = sheet.get(tableau_constant.LUID) + luid: Optional[str] = sheet.get(c.LUID) if luid is None: logger.debug( "stat:luid is none for sheet %s(id:%s)", - sheet.get(tableau_constant.NAME), - sheet.get(tableau_constant.ID), + sheet.get(c.NAME), + sheet.get(c.ID), ) return None usage_stat: Optional[UsageStat] = self.tableau_stat_registry.get(luid) if usage_stat is None: logger.debug( "stat:UsageStat is not available in tableau_stat_registry for sheet %s(id:%s)", - sheet.get(tableau_constant.NAME), - sheet.get(tableau_constant.ID), + sheet.get(c.NAME), + sheet.get(c.ID), ) return None @@ -1983,8 +1932,8 @@ def _get_chart_stat_wu( ) logger.debug( "stat: Chart usage stat work unit is created for %s(id:%s)", - sheet.get(tableau_constant.NAME), - sheet.get(tableau_constant.ID), + sheet.get(c.NAME), + sheet.get(c.ID), ) return MetadataChangeProposalWrapper( aspect=aspect, @@ -1992,22 +1941,20 @@ def _get_chart_stat_wu( ).as_workunit() def emit_sheets(self) -> Iterable[MetadataWorkUnit]: - sheets_filter = f"{tableau_constant.ID_WITH_IN}: {json.dumps(self.sheet_ids)}" + sheets_filter = f"{c.ID_WITH_IN}: {json.dumps(self.sheet_ids)}" for sheet in self.get_connection_objects( sheet_graphql_query, - tableau_constant.SHEETS_CONNECTION, + c.SHEETS_CONNECTION, sheets_filter, ): - yield from self.emit_sheets_as_charts( - sheet, sheet.get(tableau_constant.WORKBOOK) - ) + yield from self.emit_sheets_as_charts(sheet, sheet.get(c.WORKBOOK)) def emit_sheets_as_charts( self, sheet: dict, workbook: Optional[Dict] ) -> Iterable[MetadataWorkUnit]: sheet_urn: str = builder.make_chart_urn( - self.platform, sheet[tableau_constant.ID], self.config.platform_instance + self.platform, sheet[c.ID], self.config.platform_instance ) chart_snapshot = ChartSnapshot( urn=sheet_urn, @@ -2015,34 +1962,32 @@ def emit_sheets_as_charts( ) creator: Optional[str] = None - if workbook is not None and workbook.get(tableau_constant.OWNER) is not None: - creator = workbook[tableau_constant.OWNER].get(tableau_constant.USERNAME) - created_at = sheet.get(tableau_constant.CREATED_AT, datetime.now()) - updated_at = sheet.get(tableau_constant.UPDATED_AT, datetime.now()) + if workbook is not None and workbook.get(c.OWNER) is not None: + creator = workbook[c.OWNER].get(c.USERNAME) + created_at = sheet.get(c.CREATED_AT, datetime.now()) + updated_at = sheet.get(c.UPDATED_AT, datetime.now()) last_modified = self.get_last_modified(creator, created_at, updated_at) - if sheet.get(tableau_constant.PATH): + if sheet.get(c.PATH): site_part = f"/site/{self.config.site}" if self.config.site else "" - sheet_external_url = f"{self.config.connect_uri}/#{site_part}/views/{sheet.get(tableau_constant.PATH)}" - elif ( - sheet.get(tableau_constant.CONTAINED_IN_DASHBOARDS) is not None - and len(sheet[tableau_constant.CONTAINED_IN_DASHBOARDS]) > 0 - and sheet[tableau_constant.CONTAINED_IN_DASHBOARDS][0] is not None - and sheet[tableau_constant.CONTAINED_IN_DASHBOARDS][0].get( - tableau_constant.PATH + sheet_external_url = ( + f"{self.config.connect_uri}/#{site_part}/views/{sheet.get(c.PATH)}" ) + elif ( + sheet.get(c.CONTAINED_IN_DASHBOARDS) is not None + and len(sheet[c.CONTAINED_IN_DASHBOARDS]) > 0 + and sheet[c.CONTAINED_IN_DASHBOARDS][0] is not None + and sheet[c.CONTAINED_IN_DASHBOARDS][0].get(c.PATH) ): # sheet contained in dashboard site_part = f"/t/{self.config.site}" if self.config.site else "" - dashboard_path = sheet[tableau_constant.CONTAINED_IN_DASHBOARDS][0][ - tableau_constant.PATH - ] - sheet_external_url = f"{self.config.connect_uri}{site_part}/authoring/{dashboard_path}/{sheet.get(tableau_constant.NAME, '')}" + dashboard_path = sheet[c.CONTAINED_IN_DASHBOARDS][0][c.PATH] + sheet_external_url = f"{self.config.connect_uri}{site_part}/authoring/{dashboard_path}/{sheet.get(c.NAME, '')}" else: # hidden or viz-in-tooltip sheet sheet_external_url = None input_fields: List[InputField] = [] - if sheet.get(tableau_constant.DATA_SOURCE_FIELDS): + if sheet.get(c.DATA_SOURCE_FIELDS): self.populate_sheet_upstream_fields(sheet, input_fields) # datasource urn @@ -2060,15 +2005,13 @@ def emit_sheets_as_charts( # Chart Info chart_info = ChartInfoClass( description="", - title=sheet.get(tableau_constant.NAME) or "", + title=sheet.get(c.NAME) or "", lastModified=last_modified, externalUrl=sheet_external_url if self.config.ingest_external_links_for_charts else None, inputs=sorted(datasource_urn), - customProperties=self.get_custom_props_from_dict( - sheet, [tableau_constant.LUID] - ), + customProperties=self.get_custom_props_from_dict(sheet, [c.LUID]), ) chart_snapshot.aspects.append(chart_info) # chart_snapshot doesn't support the stat aspect as list element and hence need to emit MCP @@ -2083,7 +2026,7 @@ def emit_sheets_as_charts( chart_snapshot.aspects.append(browse_paths) else: logger.warning( - f"Could not set browse path for workbook {sheet[tableau_constant.ID]}. Please check permissions." + f"Could not set browse path for workbook {sheet[c.ID]}. Please check permissions." ) # Ownership @@ -2107,9 +2050,7 @@ def emit_sheets_as_charts( ) if workbook is not None: yield from add_entity_to_container( - self.gen_workbook_key(workbook[tableau_constant.ID]), - tableau_constant.CHART, - chart_snapshot.urn, + self.gen_workbook_key(workbook[c.ID]), c.CHART, chart_snapshot.urn ) if input_fields: @@ -2134,14 +2075,12 @@ def _get_project_path(self, project: TableauProject) -> str: def populate_sheet_upstream_fields( self, sheet: dict, input_fields: List[InputField] ) -> None: - for field in sheet.get(tableau_constant.DATA_SOURCE_FIELDS): # type: ignore + for field in sheet.get(c.DATA_SOURCE_FIELDS): # type: ignore if not field: continue - name = field.get(tableau_constant.NAME) + name = field.get(c.NAME) upstream_ds_id = ( - field.get(tableau_constant.DATA_SOURCE)[tableau_constant.ID] - if field.get(tableau_constant.DATA_SOURCE) - else None + field.get(c.DATA_SOURCE)[c.ID] if field.get(c.DATA_SOURCE) else None ) if name and upstream_ds_id: input_fields.append( @@ -2162,10 +2101,8 @@ def populate_sheet_upstream_fields( ) def emit_workbook_as_container(self, workbook: Dict) -> Iterable[MetadataWorkUnit]: - workbook_container_key = self.gen_workbook_key(workbook[tableau_constant.ID]) - creator = workbook.get(tableau_constant.OWNER, {}).get( - tableau_constant.USERNAME - ) + workbook_container_key = self.gen_workbook_key(workbook[c.ID]) + creator = workbook.get(c.OWNER, {}).get(c.USERNAME) owner_urn = ( builder.make_user_urn(creator) @@ -2191,17 +2128,17 @@ def emit_workbook_as_container(self, workbook: Dict) -> Iterable[MetadataWorkUni if project_luid and project_luid in self.tableau_project_registry.keys(): parent_key = self.gen_project_key(project_luid) else: - workbook_id: Optional[str] = workbook.get(tableau_constant.ID) - workbook_name: Optional[str] = workbook.get(tableau_constant.NAME) + workbook_id: Optional[str] = workbook.get(c.ID) + workbook_name: Optional[str] = workbook.get(c.NAME) logger.warning( f"Could not load project hierarchy for workbook {workbook_name}({workbook_id}). Please check permissions." ) yield from gen_containers( container_key=workbook_container_key, - name=workbook.get(tableau_constant.NAME) or "", + name=workbook.get(c.NAME) or "", parent_container_key=parent_key, - description=workbook.get(tableau_constant.DESCRIPTION), + description=workbook.get(c.DESCRIPTION), sub_types=[BIContainerSubTypes.TABLEAU_WORKBOOK], owner_urn=owner_urn, external_url=workbook_external_url, @@ -2237,20 +2174,20 @@ def _create_datahub_dashboard_usage_stat( def _get_dashboard_stat_wu( self, dashboard: dict, dashboard_urn: str ) -> Optional[MetadataWorkUnit]: - luid: Optional[str] = dashboard.get(tableau_constant.LUID) + luid: Optional[str] = dashboard.get(c.LUID) if luid is None: logger.debug( "stat:luid is none for dashboard %s(id:%s)", - dashboard.get(tableau_constant.NAME), - dashboard.get(tableau_constant.ID), + dashboard.get(c.NAME), + dashboard.get(c.ID), ) return None usage_stat: Optional[UsageStat] = self.tableau_stat_registry.get(luid) if usage_stat is None: logger.debug( "stat:UsageStat is not available in tableau_stat_registry for dashboard %s(id:%s)", - dashboard.get(tableau_constant.NAME), - dashboard.get(tableau_constant.ID), + dashboard.get(c.NAME), + dashboard.get(c.ID), ) return None @@ -2259,8 +2196,8 @@ def _get_dashboard_stat_wu( ) logger.debug( "stat: Dashboard usage stat is created for %s(id:%s)", - dashboard.get(tableau_constant.NAME), - dashboard.get(tableau_constant.ID), + dashboard.get(c.NAME), + dashboard.get(c.ID), ) return MetadataChangeProposalWrapper( @@ -2288,26 +2225,20 @@ def new_work_unit(self, mcp: MetadataChangeProposalWrapper) -> MetadataWorkUnit: ) def emit_dashboards(self) -> Iterable[MetadataWorkUnit]: - dashboards_filter = ( - f"{tableau_constant.ID_WITH_IN}: {json.dumps(self.dashboard_ids)}" - ) + dashboards_filter = f"{c.ID_WITH_IN}: {json.dumps(self.dashboard_ids)}" for dashboard in self.get_connection_objects( dashboard_graphql_query, - tableau_constant.DASHBOARDS_CONNECTION, + c.DASHBOARDS_CONNECTION, dashboards_filter, ): - yield from self.emit_dashboard( - dashboard, dashboard.get(tableau_constant.WORKBOOK) - ) + yield from self.emit_dashboard(dashboard, dashboard.get(c.WORKBOOK)) def get_tags(self, obj: dict) -> Optional[List[str]]: - tag_list = obj.get(tableau_constant.TAGS, []) + tag_list = obj.get(c.TAGS, []) if tag_list and self.config.ingest_tags: tag_list_str = [ - t[tableau_constant.NAME] - for t in tag_list - if t is not None and t.get(tableau_constant.NAME) + t[c.NAME] for t in tag_list if t is not None and t.get(c.NAME) ] return tag_list_str @@ -2317,7 +2248,7 @@ def emit_dashboard( self, dashboard: dict, workbook: Optional[Dict] ) -> Iterable[MetadataWorkUnit]: dashboard_urn: str = builder.make_dashboard_urn( - self.platform, dashboard[tableau_constant.ID], self.config.platform_instance + self.platform, dashboard[c.ID], self.config.platform_instance ) dashboard_snapshot = DashboardSnapshot( urn=dashboard_urn, @@ -2325,26 +2256,28 @@ def emit_dashboard( ) creator: Optional[str] = None - if workbook is not None and workbook.get(tableau_constant.OWNER) is not None: - creator = workbook[tableau_constant.OWNER].get(tableau_constant.USERNAME) - created_at = dashboard.get(tableau_constant.CREATED_AT, datetime.now()) - updated_at = dashboard.get(tableau_constant.UPDATED_AT, datetime.now()) + if workbook is not None and workbook.get(c.OWNER) is not None: + creator = workbook[c.OWNER].get(c.USERNAME) + created_at = dashboard.get(c.CREATED_AT, datetime.now()) + updated_at = dashboard.get(c.UPDATED_AT, datetime.now()) last_modified = self.get_last_modified(creator, created_at, updated_at) site_part = f"/site/{self.config.site}" if self.config.site else "" - dashboard_external_url = f"{self.config.connect_uri}/#{site_part}/views/{dashboard.get(tableau_constant.PATH, '')}" + dashboard_external_url = ( + f"{self.config.connect_uri}/#{site_part}/views/{dashboard.get(c.PATH, '')}" + ) title = ( - dashboard[tableau_constant.NAME].replace("/", REPLACE_SLASH_CHAR) - if dashboard.get(tableau_constant.NAME) + dashboard[c.NAME].replace("/", REPLACE_SLASH_CHAR) + if dashboard.get(c.NAME) else "" ) chart_urns = [ builder.make_chart_urn( self.platform, - sheet.get(tableau_constant.ID), + sheet.get(c.ID), self.config.platform_instance, ) - for sheet in dashboard.get(tableau_constant.SHEETS, []) + for sheet in dashboard.get(c.SHEETS, []) ] dashboard_info_class = DashboardInfoClass( description="", @@ -2354,9 +2287,7 @@ def emit_dashboard( dashboardUrl=dashboard_external_url if self.config.ingest_external_links_for_dashboards else None, - customProperties=self.get_custom_props_from_dict( - dashboard, [tableau_constant.LUID] - ), + customProperties=self.get_custom_props_from_dict(dashboard, [c.LUID]), ) dashboard_snapshot.aspects.append(dashboard_info_class) @@ -2377,7 +2308,7 @@ def emit_dashboard( dashboard_snapshot.aspects.append(browse_paths) else: logger.warning( - f"Could not set browse path for dashboard {dashboard[tableau_constant.ID]}. Please check permissions." + f"Could not set browse path for dashboard {dashboard[c.ID]}. Please check permissions." ) # Ownership @@ -2397,8 +2328,8 @@ def emit_dashboard( if workbook is not None: yield from add_entity_to_container( - self.gen_workbook_key(workbook[tableau_constant.ID]), - tableau_constant.DASHBOARD, + self.gen_workbook_key(workbook[c.ID]), + c.DASHBOARD, dashboard_snapshot.urn, ) @@ -2406,38 +2337,40 @@ def get_browse_paths_aspect( self, workbook: Optional[Dict] ) -> Optional[BrowsePathsClass]: browse_paths: Optional[BrowsePathsClass] = None - if workbook and workbook.get(tableau_constant.NAME): + if workbook and workbook.get(c.NAME): project_luid: Optional[str] = self._get_workbook_project_luid(workbook) if project_luid in self.tableau_project_registry: browse_paths = BrowsePathsClass( paths=[ f"/{self.platform}/{self._project_luid_to_browse_path_name(project_luid)}" - f"/{workbook[tableau_constant.NAME].replace('/', REPLACE_SLASH_CHAR)}" + f"/{workbook[c.NAME].replace('/', REPLACE_SLASH_CHAR)}" ] ) - elif workbook.get(tableau_constant.PROJECT_NAME): + elif workbook.get(c.PROJECT_NAME): # browse path browse_paths = BrowsePathsClass( paths=[ - f"/{self.platform}/{workbook[tableau_constant.PROJECT_NAME].replace('/', REPLACE_SLASH_CHAR)}" - f"/{workbook[tableau_constant.NAME].replace('/', REPLACE_SLASH_CHAR)}" + f"/{self.platform}/{workbook[c.PROJECT_NAME].replace('/', REPLACE_SLASH_CHAR)}" + f"/{workbook[c.NAME].replace('/', REPLACE_SLASH_CHAR)}" ] ) return browse_paths def emit_embedded_datasources(self) -> Iterable[MetadataWorkUnit]: - datasource_filter = f"{tableau_constant.ID_WITH_IN}: {json.dumps(self.embedded_datasource_ids_being_used)}" + datasource_filter = ( + f"{c.ID_WITH_IN}: {json.dumps(self.embedded_datasource_ids_being_used)}" + ) for datasource in self.get_connection_objects( embedded_datasource_graphql_query, - tableau_constant.EMBEDDED_DATA_SOURCES_CONNECTION, + c.EMBEDDED_DATA_SOURCES_CONNECTION, datasource_filter, ): yield from self.emit_datasource( datasource, - datasource.get(tableau_constant.WORKBOOK), + datasource.get(c.WORKBOOK), is_embedded_ds=True, ) @@ -2483,7 +2416,7 @@ def emit_project_containers(self) -> Iterable[MetadataWorkUnit]: container_key=self.gen_project_key(_id), name=project.name, description=project.description, - sub_types=[tableau_constant.PROJECT], + sub_types=[c.PROJECT], parent_container_key=self.gen_project_key(project.parent_id) if project.parent_id else None, @@ -2498,7 +2431,7 @@ def emit_project_containers(self) -> Iterable[MetadataWorkUnit]: yield from gen_containers( container_key=self.gen_project_key(project.parent_id), name=cast(str, project.parent_name), - sub_types=[tableau_constant.PROJECT], + sub_types=[c.PROJECT], ) def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py index 7c4852042ce7c..65d779b7f4516 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py @@ -8,7 +8,7 @@ import datahub.emitter.mce_builder as builder from datahub.configuration.common import ConfigModel -from datahub.ingestion.source import tableau_constant as tc +from datahub.ingestion.source import tableau_constant as c from datahub.metadata.com.linkedin.pegasus2avro.dataset import ( DatasetLineageType, FineGrainedLineage, @@ -591,12 +591,12 @@ def create( cls, d: dict, default_schema_map: Optional[Dict[str, str]] = None ) -> "TableauUpstreamReference": # Values directly from `table` object from Tableau - database = t_database = d.get(tc.DATABASE, {}).get(tc.NAME) - schema = t_schema = d.get(tc.SCHEMA) - table = t_table = d.get(tc.NAME) or "" - t_full_name = d.get(tc.FULL_NAME) - t_connection_type = d[tc.CONNECTION_TYPE] # required to generate urn - t_id = d[tc.ID] + database = t_database = d.get(c.DATABASE, {}).get(c.NAME) + schema = t_schema = d.get(c.SCHEMA) + table = t_table = d.get(c.NAME) or "" + t_full_name = d.get(c.FULL_NAME) + t_connection_type = d[c.CONNECTION_TYPE] # required to generate urn + t_id = d[c.ID] parsed_full_name = cls.parse_full_name(t_full_name) if parsed_full_name and len(parsed_full_name) == 3: From 9174301719122c2597db75c8bb6b60c4d1a74f77 Mon Sep 17 00:00:00 2001 From: sachinsaju <33017477+sachinsaju@users.noreply.github.com> Date: Thu, 9 Nov 2023 10:37:09 +0530 Subject: [PATCH 079/792] docs: update broken link in metadata-modelling (#9184) Co-authored-by: Hyejin Yoon <0327jane@gmail.com> Co-authored-by: John Joyce --- docs/modeling/metadata-model.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/modeling/metadata-model.md b/docs/modeling/metadata-model.md index a8958985a0a72..4c97cadc88417 100644 --- a/docs/modeling/metadata-model.md +++ b/docs/modeling/metadata-model.md @@ -625,7 +625,7 @@ curl --location --request POST 'http://localhost:8080/analytics?action=getTimese } } ``` -For more examples on the complex types of group-by/aggregations, refer to the tests in the group `getAggregatedStats` of [ElasticSearchTimeseriesAspectServiceTest.java](https://github.com/datahub-project/datahub/blob/master/metadata-io/src/test/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectServiceTest.java). +For more examples on the complex types of group-by/aggregations, refer to the tests in the group `getAggregatedStats` of [TimeseriesAspectServiceTestBase.java](https://github.com/datahub-project/datahub/blob/master/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java). From e494a9cc102f863bc51fcf80674bd6d3d36d726c Mon Sep 17 00:00:00 2001 From: Kos Korchak <97058061+kkorchak@users.noreply.github.com> Date: Thu, 9 Nov 2023 00:23:17 -0500 Subject: [PATCH 080/792] test(): Test policy to create and manage privileges (#9173) --- .../tests/privileges/test_privileges.py | 112 +++++++++++++++++- 1 file changed, 111 insertions(+), 1 deletion(-) diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index 740311754678e..d0f00734ae9f3 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -114,6 +114,21 @@ def _ensure_can_create_access_token(session, json): assert ingestion_data["data"]["createAccessToken"]["__typename"] == "AccessToken" +@tenacity.retry( + stop=tenacity.stop_after_attempt(10), wait=tenacity.wait_fixed(sleep_sec) +) +def _ensure_can_create_user_policy(session, json): + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert res_data["data"] + assert res_data["data"]["createPolicy"] is not None + + return res_data["data"]["createPolicy"] + + @pytest.mark.dependency(depends=["test_healthchecks"]) def test_privilege_to_create_and_manage_secrets(): @@ -337,4 +352,99 @@ def test_privilege_to_create_and_manage_access_tokens(): # Ensure that user can't create access token after policy is removed - _ensure_cant_perform_action(user_session, create_access_token,"createAccessToken") \ No newline at end of file + _ensure_cant_perform_action(user_session, create_access_token,"createAccessToken") + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_privilege_to_create_and_manage_policies(): + + (admin_user, admin_pass) = get_admin_credentials() + admin_session = login_as(admin_user, admin_pass) + user_session = login_as("user", "user") + + + # Verify new user can't create a policy + create_policy = { + "query": """mutation createPolicy($input: PolicyUpdateInput!) {\n + createPolicy(input: $input) }""", + "variables": { + "input": { + "type": "PLATFORM", + "name": "Policy Name", + "description": "Policy Description", + "state": "ACTIVE", + "resources": {"filter":{"criteria":[]}}, + "privileges": ["MANAGE_POLICIES"], + "actors": { + "users": [], + "resourceOwners": False, + "allUsers": True, + "allGroups": False, + }, + } + }, + } + + _ensure_cant_perform_action(user_session, create_policy,"createPolicy") + + + # Assign privileges to the new user to create and manage policies + admin_policy_urn = create_user_policy("urn:li:corpuser:user", ["MANAGE_POLICIES"], admin_session) + + + # Verify new user can create and manage policy(create, edit, delete) + # Create a policy + user_policy_urn = _ensure_can_create_user_policy(user_session, create_policy) + + # Edit a policy + edit_policy = { + "query": """mutation updatePolicy($urn: String!, $input: PolicyUpdateInput!) {\n + updatePolicy(urn: $urn, input: $input) }""", + "variables": { + "urn": user_policy_urn, + "input": { + "type": "PLATFORM", + "state": "INACTIVE", + "name": "Policy Name test", + "description": "Policy Description updated", + "privileges": ["MANAGE_POLICIES"], + "actors": { + "users": [], + "groups": None, + "resourceOwners": False, + "allUsers": True, + "allGroups": False, + "resourceOwnersTypes": None, + }, + }, + }, + } + edit_policy_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=edit_policy) + edit_policy_response.raise_for_status() + res_data = edit_policy_response.json() + + assert res_data + assert res_data["data"] + assert res_data["data"]["updatePolicy"] == user_policy_urn + + # Delete a policy + remove_user_policy = { + "query": "mutation deletePolicy($urn: String!) {\n deletePolicy(urn: $urn)\n}\n", + "variables":{"urn":user_policy_urn} + } + + remove_policy_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_user_policy) + remove_policy_response.raise_for_status() + res_data = remove_policy_response.json() + + assert res_data + assert res_data["data"] + assert res_data["data"]["deletePolicy"] == user_policy_urn + + + # Remove the user privilege by admin + remove_policy(admin_policy_urn, admin_session) + + + # Ensure that user can't create a policy after privilege is removed by admin + _ensure_cant_perform_action(user_session, create_policy,"createPolicy") \ No newline at end of file From 2187d24b54493953ab66b70f9a4b4fe0fd8841e1 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Thu, 9 Nov 2023 13:58:12 -0600 Subject: [PATCH 081/792] docs(security): add security doc to website (#9209) --- docs-website/generateDocsDir.ts | 1 - docs-website/sidebars.js | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-website/generateDocsDir.ts b/docs-website/generateDocsDir.ts index a321146e10efa..e19f09530665a 100644 --- a/docs-website/generateDocsDir.ts +++ b/docs-website/generateDocsDir.ts @@ -125,7 +125,6 @@ function list_markdown_files(): string[] { /^docker\/(?!README|datahub-upgrade|airflow\/local_airflow)/, // Drop all but a few docker docs. /^docs\/docker\/README\.md/, // This one is just a pointer to another file. /^docs\/README\.md/, // This one is just a pointer to the hosted docs site. - /^SECURITY\.md$/, /^\s*$/, //Empty string ]; diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 9cc035f3e29e0..4d2420256ebff 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -546,6 +546,7 @@ module.exports = { "docs/CONTRIBUTING", "docs/links", "docs/rfc", + "SECURITY", ], }, { From 5911a7b45ed726292b2aa77c9e307d0e8683603a Mon Sep 17 00:00:00 2001 From: sachinsaju <33017477+sachinsaju@users.noreply.github.com> Date: Fri, 10 Nov 2023 01:54:53 +0530 Subject: [PATCH 082/792] docs(java-sdk-dataset): add dataset via java sdk example (#9136) Co-authored-by: Hyejin Yoon <0327jane@gmail.com> --- docs/api/tutorials/datasets.md | 7 ++ .../datahubproject/examples/DatasetAdd.java | 84 +++++++++++++++++++ 2 files changed, 91 insertions(+) create mode 100644 metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java diff --git a/docs/api/tutorials/datasets.md b/docs/api/tutorials/datasets.md index 7c6d4a88d4190..39b0fdce1bdb5 100644 --- a/docs/api/tutorials/datasets.md +++ b/docs/api/tutorials/datasets.md @@ -28,6 +28,13 @@ For detailed steps, please refer to [Datahub Quickstart Guide](/docs/quickstart. > 🚫 Creating a dataset via `graphql` is currently not supported. > Please check out [API feature comparison table](/docs/api/datahub-apis.md#datahub-api-comparison) for more information. + + + +```java +{{ inline /metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java show_path_as_comment }} +``` + diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java new file mode 100644 index 0000000000000..ac368972e8dc9 --- /dev/null +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java @@ -0,0 +1,84 @@ +package io.datahubproject.examples; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.CorpuserUrn; +import com.linkedin.common.urn.DataPlatformUrn; +import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.schema.DateType; +import com.linkedin.schema.OtherSchema; +import com.linkedin.schema.SchemaField; +import com.linkedin.schema.SchemaFieldArray; +import com.linkedin.schema.SchemaFieldDataType; +import com.linkedin.schema.SchemaMetadata; +import com.linkedin.schema.StringType; +import datahub.client.MetadataWriteResponse; +import datahub.client.rest.RestEmitter; +import datahub.event.MetadataChangeProposalWrapper; + +import java.io.IOException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; + +public class DatasetAdd { + + private DatasetAdd() { + + } + + public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { + DatasetUrn datasetUrn = UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD"); + CorpuserUrn userUrn = new CorpuserUrn("ingestion"); + AuditStamp lastModified = new AuditStamp().setTime(1640692800000L).setActor(userUrn); + + SchemaMetadata schemaMetadata = new SchemaMetadata() + .setSchemaName("customer") + .setPlatform(new DataPlatformUrn("hive")) + .setVersion(0L) + .setHash("") + .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new OtherSchema().setRawSchema("__insert raw schema here__"))) + .setLastModified(lastModified); + + SchemaFieldArray fields = new SchemaFieldArray(); + + SchemaField field1 = new SchemaField() + .setFieldPath("address.zipcode") + .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("VARCHAR(50)") + .setDescription("This is the zipcode of the address. Specified using extended form and limited to addresses in the United States") + .setLastModified(lastModified); + fields.add(field1); + + SchemaField field2 = new SchemaField().setFieldPath("address.street") + .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("VARCHAR(100)") + .setDescription("Street corresponding to the address") + .setLastModified(lastModified); + fields.add(field2); + + SchemaField field3 = new SchemaField().setFieldPath("last_sold_date") + .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new DateType()))) + .setNativeDataType("Date") + .setDescription("Date of the last sale date for this property") + .setLastModified(lastModified); + fields.add(field3); + + schemaMetadata.setFields(fields); + + MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn(datasetUrn) + .upsert() + .aspect(schemaMetadata) + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create( + b -> b.server("http://localhost:8080") + .token(token) + ); + Future response = emitter.emit(mcpw, null); + System.out.println(response.get().getResponseContent()); + } + +} \ No newline at end of file From d6cb106fab4a4d49193afd0efd8ff7d90a8d3fa8 Mon Sep 17 00:00:00 2001 From: sachinsaju <33017477+sachinsaju@users.noreply.github.com> Date: Fri, 10 Nov 2023 02:10:55 +0530 Subject: [PATCH 083/792] doc(java-sdk-example):example to create tag via java-sdk (#9151) --- docs/api/tutorials/tags.md | 8 ++++ .../io/datahubproject/examples/TagCreate.java | 40 +++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java diff --git a/docs/api/tutorials/tags.md b/docs/api/tutorials/tags.md index b2234bf00bcb9..24d583dc26dac 100644 --- a/docs/api/tutorials/tags.md +++ b/docs/api/tutorials/tags.md @@ -78,6 +78,14 @@ Expected Response: + + +```java +{{ inline /metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java show_path_as_comment }} +``` + + + ```python diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java new file mode 100644 index 0000000000000..077489a9e02d9 --- /dev/null +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java @@ -0,0 +1,40 @@ +package io.datahubproject.examples; + +import com.linkedin.tag.TagProperties; +import datahub.client.MetadataWriteResponse; +import datahub.client.rest.RestEmitter; +import datahub.event.MetadataChangeProposalWrapper; + +import java.io.IOException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; + +public class TagCreate { + + private TagCreate() { + + } + + public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { + TagProperties tagProperties = new TagProperties() + .setName("Deprecated") + .setDescription("Having this tag means this column or table is deprecated."); + + MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() + .entityType("tag") + .entityUrn("urn:li:tag:deprecated") + .upsert() + .aspect(tagProperties) + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create( + b -> b.server("http://localhost:8080") + .token(token) + ); + Future response = emitter.emit(mcpw, null); + System.out.println(response.get().getResponseContent()); + + + } +} From 107713846f56e761011fd811fd8ac3b0b87a40bd Mon Sep 17 00:00:00 2001 From: Teppo Naakka Date: Fri, 10 Nov 2023 02:48:06 +0200 Subject: [PATCH 084/792] fix(ingest/powerbi): use dataset workspace id as key for parent container (#8994) --- .../ingestion/source/powerbi/powerbi.py | 42 +- .../powerbi/golden_test_container.json | 1089 +++++++++++++---- ..._config_and_modified_since_admin_only.json | 210 +++- .../tests/integration/powerbi/test_powerbi.py | 4 + 4 files changed, 1004 insertions(+), 341 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py index 4611a8eed4782..dc4394efcf245 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py @@ -4,7 +4,7 @@ # ######################################################### import logging -from typing import Iterable, List, Optional, Set, Tuple, Union +from typing import Iterable, List, Optional, Tuple, Union import datahub.emitter.mce_builder as builder import datahub.ingestion.source.powerbi.rest_api_wrapper.data_classes as powerbi_data_classes @@ -110,8 +110,7 @@ def __init__( self.__config = config self.__reporter = reporter self.__dataplatform_instance_resolver = dataplatform_instance_resolver - self.processed_datasets: Set[powerbi_data_classes.PowerBIDataset] = set() - self.workspace_key: ContainerKey + self.workspace_key: Optional[ContainerKey] = None @staticmethod def urn_to_lowercase(value: str, flag: bool) -> str: @@ -374,6 +373,9 @@ def to_datahub_dataset( f"Mapping dataset={dataset.name}(id={dataset.id}) to datahub dataset" ) + if self.__config.extract_datasets_to_containers: + dataset_mcps.extend(self.generate_container_for_dataset(dataset)) + for table in dataset.tables: # Create a URN for dataset ds_urn = builder.make_dataset_urn_with_platform_instance( @@ -461,7 +463,6 @@ def to_datahub_dataset( self.append_container_mcp( dataset_mcps, - workspace, ds_urn, dataset, ) @@ -473,8 +474,6 @@ def to_datahub_dataset( dataset.tags, ) - self.processed_datasets.add(dataset) - return dataset_mcps @staticmethod @@ -572,7 +571,6 @@ def tile_custom_properties(tile: powerbi_data_classes.Tile) -> dict: self.append_container_mcp( result_mcps, - workspace, chart_urn, ) @@ -695,7 +693,6 @@ def chart_custom_properties(dashboard: powerbi_data_classes.Dashboard) -> dict: self.append_container_mcp( list_of_mcps, - workspace, dashboard_urn, ) @@ -711,7 +708,6 @@ def chart_custom_properties(dashboard: powerbi_data_classes.Dashboard) -> dict: def append_container_mcp( self, list_of_mcps: List[MetadataChangeProposalWrapper], - workspace: powerbi_data_classes.Workspace, entity_urn: str, dataset: Optional[powerbi_data_classes.PowerBIDataset] = None, ) -> None: @@ -719,12 +715,8 @@ def append_container_mcp( dataset, powerbi_data_classes.PowerBIDataset ): container_key = dataset.get_dataset_key(self.__config.platform_name) - elif self.__config.extract_workspaces_to_containers: - container_key = workspace.get_workspace_key( - platform_name=self.__config.platform_name, - platform_instance=self.__config.platform_instance, - workspace_id_as_urn_part=self.__config.workspace_id_as_urn_part, - ) + elif self.__config.extract_workspaces_to_containers and self.workspace_key: + container_key = self.workspace_key else: return None @@ -743,6 +735,7 @@ def generate_container_for_workspace( ) -> Iterable[MetadataWorkUnit]: self.workspace_key = workspace.get_workspace_key( platform_name=self.__config.platform_name, + platform_instance=self.__config.platform_instance, workspace_id_as_urn_part=self.__config.workspace_id_as_urn_part, ) container_work_units = gen_containers( @@ -754,7 +747,7 @@ def generate_container_for_workspace( def generate_container_for_dataset( self, dataset: powerbi_data_classes.PowerBIDataset - ) -> Iterable[MetadataWorkUnit]: + ) -> Iterable[MetadataChangeProposalWrapper]: dataset_key = dataset.get_dataset_key(self.__config.platform_name) container_work_units = gen_containers( container_key=dataset_key, @@ -762,7 +755,13 @@ def generate_container_for_dataset( parent_container_key=self.workspace_key, sub_types=[BIContainerSubTypes.POWERBI_DATASET], ) - return container_work_units + + # The if statement here is just to satisfy mypy + return [ + wu.metadata + for wu in container_work_units + if isinstance(wu.metadata, MetadataChangeProposalWrapper) + ] def append_tag_mcp( self, @@ -965,7 +964,6 @@ def to_chart_mcps( self.append_container_mcp( list_of_mcps, - workspace, chart_urn, ) @@ -1086,7 +1084,6 @@ def report_to_dashboard( self.append_container_mcp( list_of_mcps, - workspace, dashboard_urn, ) @@ -1220,10 +1217,6 @@ def validate_dataset_type_mapping(self): f"Dataset lineage would get ingested for data-platform = {self.source_config.dataset_type_mapping}" ) - def extract_datasets_as_containers(self): - for dataset in self.mapper.processed_datasets: - yield from self.mapper.generate_container_for_dataset(dataset) - def extract_independent_datasets( self, workspace: powerbi_data_classes.Workspace ) -> Iterable[MetadataWorkUnit]: @@ -1270,9 +1263,6 @@ def get_workspace_workunit( ): yield work_unit - if self.source_config.extract_datasets_to_containers: - yield from self.extract_datasets_as_containers() - yield from self.extract_independent_datasets(workspace) def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: diff --git a/metadata-ingestion/tests/integration/powerbi/golden_test_container.json b/metadata-ingestion/tests/integration/powerbi/golden_test_container.json index 850816bf80807..91b5499eaadcb 100644 --- a/metadata-ingestion/tests/integration/powerbi/golden_test_container.json +++ b/metadata-ingestion/tests/integration/powerbi/golden_test_container.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30,7 +31,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -45,7 +47,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -62,7 +65,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -77,7 +81,44 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "powerbi", + "dataset": "05169CD2-E713-41E6-9600-1D8066D95445" + }, + "name": "library-dataset" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -94,7 +135,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -115,7 +157,79 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:powerbi" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "PowerBI Dataset" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", + "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -130,7 +244,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -148,7 +263,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -158,12 +274,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -177,13 +294,18 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "urn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -200,7 +322,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -221,7 +344,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -236,7 +360,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -254,7 +379,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,12 +390,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -283,13 +410,18 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "urn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -306,7 +438,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -327,7 +460,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -342,7 +476,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -360,7 +495,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -370,12 +506,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -389,13 +526,18 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "urn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -412,7 +554,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -433,7 +576,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -448,7 +592,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -466,7 +611,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -476,12 +622,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -495,13 +642,18 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "urn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -518,7 +670,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -539,7 +692,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -554,7 +708,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -572,7 +727,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -582,12 +738,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -601,13 +758,18 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "urn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -624,7 +786,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -645,7 +808,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -660,7 +824,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -678,7 +843,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -688,12 +854,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -707,13 +874,18 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "urn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -730,7 +902,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -751,7 +924,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -766,7 +940,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -784,7 +959,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -794,12 +970,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -813,13 +990,54 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2", + "urn": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "powerbi", + "dataset": "ba0130a1-5b03-40de-9535-b34e778ea6ed" + }, + "name": "hr_pbi_test" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -836,7 +1054,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -857,7 +1076,79 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:powerbi" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "PowerBI Dataset" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", + "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -872,7 +1163,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -890,7 +1182,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -900,12 +1193,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -919,13 +1213,18 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", + "urn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -942,7 +1241,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -963,7 +1263,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -978,7 +1279,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -996,7 +1298,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1006,12 +1309,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "container": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1025,13 +1329,18 @@ { "id": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9", "urn": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + }, + { + "id": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", + "urn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" } ] } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1046,7 +1355,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1061,7 +1371,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1089,6 +1400,9 @@ } }, "inputs": [ + { + "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" + }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, @@ -1115,7 +1429,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1130,7 +1445,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1146,7 +1462,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1163,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1178,7 +1496,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1198,7 +1517,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1226,6 +1546,9 @@ } }, "inputs": [ + { + "string": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" + }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)" }, @@ -1237,7 +1560,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1252,7 +1576,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1268,7 +1593,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1285,7 +1611,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1300,7 +1627,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1320,7 +1648,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1337,7 +1666,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1374,7 +1704,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1389,7 +1720,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1405,7 +1737,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1433,7 +1766,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1448,7 +1782,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1468,7 +1803,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1485,7 +1821,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1506,7 +1843,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1521,7 +1859,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1539,22 +1878,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1571,7 +1896,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1592,7 +1918,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1607,7 +1934,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1625,22 +1953,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.SNOWFLAKE_TESTTABLE,DEV)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1657,7 +1971,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1678,7 +1993,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1693,7 +2009,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1711,22 +2028,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query,DEV)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1743,7 +2046,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1764,7 +2068,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1779,7 +2084,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1797,46 +2103,33 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.big-query-with-parameter,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "viewProperties", "aspect": { "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" + "materialized": false, + "viewLogic": "let\n Source = Value.NativeQuery(Snowflake.Databases(\"xaa48144.snowflakecomputing.com\",\"GSL_TEST_WH\",[Role=\"ACCOUNTADMIN\"]){[Name=\"GSL_TEST_DB\"]}[Data], \"select A.name from GSL_TEST_DB.PUBLIC.SALES_ANALYST as A inner join GSL_TEST_DB.PUBLIC.SALES_FORECAST as B on A.name = B.name where startswith(A.name, 'mo')\", null, [EnableFolding=true])\nin\n Source", + "viewLanguage": "m_query" } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)", "changeType": "UPSERT", - "aspectName": "viewProperties", - "aspect": { - "json": { - "materialized": false, - "viewLogic": "let\n Source = Value.NativeQuery(Snowflake.Databases(\"xaa48144.snowflakecomputing.com\",\"GSL_TEST_WH\",[Role=\"ACCOUNTADMIN\"]){[Name=\"GSL_TEST_DB\"]}[Data], \"select A.name from GSL_TEST_DB.PUBLIC.SALES_ANALYST as A inner join GSL_TEST_DB.PUBLIC.SALES_FORECAST as B on A.name = B.name where startswith(A.name, 'mo')\", null, [EnableFolding=true])\nin\n Source", - "viewLanguage": "m_query" - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)", - "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": { @@ -1850,7 +2143,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1865,7 +2159,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1883,22 +2178,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1915,7 +2196,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1936,7 +2218,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1951,7 +2234,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1969,22 +2253,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.job-history,DEV)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2001,7 +2271,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2022,7 +2293,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2037,7 +2309,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2055,22 +2328,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.postgres_test_table,DEV)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:a4ed52f9abd3ff9cc34960c0c41f72e9" - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2085,7 +2344,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2100,7 +2360,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2126,6 +2387,9 @@ } }, "inputs": [ + { + "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" + }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, @@ -2152,7 +2416,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2167,7 +2432,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2184,7 +2450,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2199,7 +2466,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2219,7 +2487,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2245,6 +2514,9 @@ } }, "inputs": [ + { + "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" + }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, @@ -2271,7 +2543,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2286,7 +2559,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2303,7 +2577,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2318,7 +2593,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2338,7 +2614,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2355,7 +2632,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2388,7 +2666,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2403,7 +2682,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2419,7 +2699,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2436,7 +2717,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2464,7 +2746,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2479,7 +2762,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2499,7 +2783,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2514,7 +2799,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2529,7 +2815,310 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "corpuser", + "entityUrn": "urn:li:corpuser:users.User4@foo.com", + "changeType": "UPSERT", + "aspectName": "corpUserKey", + "aspect": { + "json": { + "username": "User4@foo.com" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "corpuser", + "entityUrn": "urn:li:corpuser:users.User4@foo.com", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "corpuser", + "entityUrn": "urn:li:corpuser:users.User3@foo.com", + "changeType": "UPSERT", + "aspectName": "corpUserKey", + "aspect": { + "json": { + "username": "User3@foo.com" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "corpuser", + "entityUrn": "urn:li:corpuser:users.User3@foo.com", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:33c7cab6ea0e58930cd6f943d0a4111e", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:33c7cab6ea0e58930cd6f943d0a4111e", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "powerbi", + "workspace": "second-demo-workspace" + }, + "name": "second-demo-workspace" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:33c7cab6ea0e58930cd6f943d0a4111e", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:33c7cab6ea0e58930cd6f943d0a4111e", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:powerbi" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:33c7cab6ea0e58930cd6f943d0a4111e", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Workspace" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-8FFC-4505-9215-655BCA5BEBAE)", + "changeType": "UPSERT", + "aspectName": "browsePaths", + "aspect": { + "json": { + "paths": [ + "/powerbi/second-demo-workspace" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-8FFC-4505-9215-655BCA5BEBAE)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-8FFC-4505-9215-655BCA5BEBAE)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:33c7cab6ea0e58930cd6f943d0a4111e", + "urn": "urn:li:container:33c7cab6ea0e58930cd6f943d0a4111e" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-8FFC-4505-9215-655BCA5BEBAE)", + "changeType": "UPSERT", + "aspectName": "dashboardInfo", + "aspect": { + "json": { + "customProperties": { + "chartCount": "0", + "workspaceName": "second-demo-workspace", + "workspaceId": "64ED5CAD-7C22-4684-8180-826122881108" + }, + "title": "test_dashboard2", + "description": "", + "charts": [], + "datasets": [], + "lastModified": { + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + }, + "dashboardUrl": "https://localhost/dashboards/web/1" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-8FFC-4505-9215-655BCA5BEBAE)", + "changeType": "UPSERT", + "aspectName": "dashboardKey", + "aspect": { + "json": { + "dashboardTool": "powerbi", + "dashboardId": "powerbi.linkedin.com/dashboards/7D668CAD-8FFC-4505-9215-655BCA5BEBAE" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-8FFC-4505-9215-655BCA5BEBAE)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:33c7cab6ea0e58930cd6f943d0a4111e" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-8FFC-4505-9215-655BCA5BEBAE)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:users.User3@foo.com", + "type": "NONE" + }, + { + "owner": "urn:li:corpuser:users.User4@foo.com", + "type": "NONE" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json b/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json index a4527b9715704..b301ca1c1b988 100644 --- a/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json +++ b/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30,7 +31,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -45,7 +47,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -62,7 +65,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -77,7 +81,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -94,7 +99,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -126,7 +132,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -147,7 +154,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -162,7 +170,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -180,7 +189,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -204,7 +214,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -219,7 +230,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -238,7 +250,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -249,6 +262,10 @@ "aspect": { "json": { "path": [ + { + "id": "urn:li:container:e3dc21b5c79f9d594f639a9f57d7f2c3", + "urn": "urn:li:container:e3dc21b5c79f9d594f639a9f57d7f2c3" + }, { "id": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", "urn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" @@ -258,7 +275,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -275,7 +293,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -307,7 +326,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -328,7 +348,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -343,7 +364,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -361,7 +383,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -385,7 +408,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -400,7 +424,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -419,7 +444,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -430,6 +456,10 @@ "aspect": { "json": { "path": [ + { + "id": "urn:li:container:e3dc21b5c79f9d594f639a9f57d7f2c3", + "urn": "urn:li:container:e3dc21b5c79f9d594f639a9f57d7f2c3" + }, { "id": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", "urn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" @@ -439,7 +469,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -456,7 +487,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -540,7 +572,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -561,7 +594,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -576,7 +610,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -594,7 +629,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -618,7 +654,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -633,7 +670,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -652,7 +690,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -663,6 +702,10 @@ "aspect": { "json": { "path": [ + { + "id": "urn:li:container:e3dc21b5c79f9d594f639a9f57d7f2c3", + "urn": "urn:li:container:e3dc21b5c79f9d594f639a9f57d7f2c3" + }, { "id": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc", "urn": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" @@ -672,7 +715,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -704,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -719,7 +764,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -735,7 +781,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -752,7 +799,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -767,7 +815,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -787,7 +836,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -815,6 +865,9 @@ } }, "inputs": [ + { + "string": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" + }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)" }, @@ -829,7 +882,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -844,7 +898,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -860,7 +915,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -877,7 +933,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -892,7 +949,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -912,7 +970,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -929,7 +988,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -966,7 +1026,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -981,7 +1042,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -997,7 +1059,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1012,7 +1075,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1032,7 +1096,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1049,7 +1114,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1079,7 +1145,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1094,7 +1161,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1110,7 +1178,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1127,7 +1196,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1151,7 +1221,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1166,7 +1237,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1186,7 +1258,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1205,7 +1278,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1220,7 +1294,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1235,7 +1310,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1252,7 +1328,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1267,7 +1344,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1287,7 +1365,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1302,7 +1381,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "powerbi-test" + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py index 7232d2a38da1d..c9b0ded433749 100644 --- a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py +++ b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py @@ -1039,7 +1039,11 @@ def test_workspace_container( "type": "powerbi", "config": { **default_source_config(), + "workspace_id_pattern": { + "deny": ["64ED5CAD-7322-4684-8180-826122881108"], + }, "extract_workspaces_to_containers": True, + "extract_datasets_to_containers": True, "extract_reports": True, }, }, From bfa1769d4dd4f5281d751c6998c586e4e021897d Mon Sep 17 00:00:00 2001 From: John Joyce Date: Thu, 9 Nov 2023 17:56:33 -0800 Subject: [PATCH 085/792] refactor(schema tab): Remove last observed timestamps from schema tab (#9188) --- .../schema/SchemaTimeStamps.test.tsx | 23 ------- .../schema/components/SchemaHeader.tsx | 6 -- .../schema/components/SchemaTimeStamps.tsx | 64 ------------------- .../shared/tabs/Dataset/Schema/SchemaTab.tsx | 5 -- 4 files changed, 98 deletions(-) delete mode 100644 datahub-web-react/src/app/entity/dataset/profile/__tests__/schema/SchemaTimeStamps.test.tsx delete mode 100644 datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaTimeStamps.tsx diff --git a/datahub-web-react/src/app/entity/dataset/profile/__tests__/schema/SchemaTimeStamps.test.tsx b/datahub-web-react/src/app/entity/dataset/profile/__tests__/schema/SchemaTimeStamps.test.tsx deleted file mode 100644 index c8bb5d8100f2a..0000000000000 --- a/datahub-web-react/src/app/entity/dataset/profile/__tests__/schema/SchemaTimeStamps.test.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import { render } from '@testing-library/react'; -import React from 'react'; -import { toRelativeTimeString } from '../../../../../shared/time/timeUtils'; -import SchemaTimeStamps from '../../schema/components/SchemaTimeStamps'; - -describe('SchemaTimeStamps', () => { - it('should render last observed text if lastObserved is not null', () => { - const { getByText, queryByText } = render(); - expect(getByText(`Last observed ${toRelativeTimeString(123)}`)).toBeInTheDocument(); - expect(queryByText(`Reported ${toRelativeTimeString(123)}`)).toBeNull(); - }); - - it('should render last updated text if lastObserved is null', () => { - const { getByText, queryByText } = render(); - expect(queryByText(`Last observed ${toRelativeTimeString(123)}`)).toBeNull(); - expect(getByText(`Reported ${toRelativeTimeString(123)}`)).toBeInTheDocument(); - }); - - it('should return null if lastUpdated and lastObserved are both null', () => { - const { container } = render(); - expect(container.firstChild).toBeNull(); - }); -}); diff --git a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaHeader.tsx b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaHeader.tsx index 9e9e0ede2a1ce..2fc8fc11cd1b2 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaHeader.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaHeader.tsx @@ -17,7 +17,6 @@ import { SemanticVersionStruct } from '../../../../../../types.generated'; import { toRelativeTimeString } from '../../../../../shared/time/timeUtils'; import { ANTD_GRAY, REDESIGN_COLORS } from '../../../../shared/constants'; import { navigateToVersionedDatasetUrl } from '../../../../shared/tabs/Dataset/Schema/utils/navigateToVersionedDatasetUrl'; -import SchemaTimeStamps from './SchemaTimeStamps'; import getSchemaFilterFromQueryString from '../../../../shared/tabs/Dataset/Schema/utils/getSchemaFilterFromQueryString'; const SchemaHeaderContainer = styled.div` @@ -137,8 +136,6 @@ type Props = { hasKeySchema: boolean; showKeySchema: boolean; setShowKeySchema: (show: boolean) => void; - lastUpdated?: number | null; - lastObserved?: number | null; selectedVersion: string; versionList: Array; showSchemaAuditView: boolean; @@ -158,8 +155,6 @@ export default function SchemaHeader({ hasKeySchema, showKeySchema, setShowKeySchema, - lastUpdated, - lastObserved, selectedVersion, versionList, showSchemaAuditView, @@ -255,7 +250,6 @@ export default function SchemaHeader({ )} - - {lastObserved && ( - Last observed on {toLocalDateTimeString(lastObserved)}. - )} - {lastUpdated &&
First reported on {toLocalDateTimeString(lastUpdated)}.
} - - } - > - - {lastObserved && ( - - Last observed {toRelativeTimeString(lastObserved)} - - )} - {!lastObserved && lastUpdated && ( - - - Reported {toRelativeTimeString(lastUpdated)} - - )} - - - ); -} - -export default SchemaTimeStamps; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTab.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTab.tsx index 4bdb2dac033e7..75027e17b6d0c 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTab.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTab.tsx @@ -151,9 +151,6 @@ export const SchemaTab = ({ properties }: { properties?: any }) => { return groupByFieldPath(filteredRows, { showKeySchema }); }, [showKeySchema, filteredRows]); - const lastUpdated = getSchemaBlameData?.getSchemaBlame?.version?.semanticVersionTimestamp; - const lastObserved = versionedDatasetData.data?.versionedDataset?.schema?.lastObserved; - const schemaFieldBlameList: Array = (getSchemaBlameData?.getSchemaBlame?.schemaFieldBlameList as Array) || []; @@ -167,8 +164,6 @@ export const SchemaTab = ({ properties }: { properties?: any }) => { hasKeySchema={hasKeySchema} showKeySchema={showKeySchema} setShowKeySchema={setShowKeySchema} - lastObserved={lastObserved} - lastUpdated={lastUpdated} selectedVersion={selectedVersion} versionList={versionList} showSchemaAuditView={showSchemaAuditView} From 9c0f4de38241477524682943c815d5c03259e1a5 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Fri, 10 Nov 2023 16:06:06 +0900 Subject: [PATCH 086/792] docs: adjust sidebar & create new admin section (#9064) --- docs-website/sidebars.js | 213 ++++++++++++++++++++------------------ docs/CODE_OF_CONDUCT.md | 2 +- docs/saas.md | 14 --- docs/townhall-history.md | 216 +++++++++++++++++++++++---------------- docs/townhalls.md | 11 +- 5 files changed, 253 insertions(+), 203 deletions(-) delete mode 100644 docs/saas.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 4d2420256ebff..f15f2927379c5 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -9,17 +9,13 @@ module.exports = { overviewSidebar: [ { - label: "Getting Started", + label: "What Is DataHub?", type: "category", collapsed: true, + link: { type: "doc", id: "docs/features" }, items: [ // By the end of this section, readers should understand the core use cases that DataHub addresses, // target end-users, high-level architecture, & hosting options - { - type: "doc", - label: "Introduction", - id: "docs/features", - }, { type: "doc", label: "Quickstart", @@ -31,7 +27,6 @@ module.exports = { href: "https://demo.datahubproject.io/", }, "docs/what-is-datahub/datahub-concepts", - "docs/saas", ], }, { @@ -161,7 +156,15 @@ module.exports = { "docs/deploy/azure", "docker/README", "docs/deploy/kubernetes", + "docs/deploy/confluent-cloud", "docs/deploy/environment-vars", + "docs/how/extract-container-logs", + ], + }, + { + type: "category", + label: "Admin", + items: [ { Authentication: [ "docs/authentication/README", @@ -195,20 +198,91 @@ module.exports = { "docs/how/restore-indices", "docs/advanced/db-retention", "docs/advanced/monitoring", - "docs/how/extract-container-logs", "docs/deploy/telemetry", "docs/how/kafka-config", - "docs/deploy/confluent-cloud", "docs/advanced/no-code-upgrade", "docs/how/jattach-guide", ], }, - "docs/how/updating-datahub", ], }, { - API: [ - "docs/api/datahub-apis", + Developers: [ + // The purpose of this section is to provide developers & technical users with + // concrete tutorials for how to work with the DataHub CLI & APIs + { + Architecture: [ + "docs/architecture/architecture", + "docs/components", + "docs/architecture/metadata-ingestion", + "docs/architecture/metadata-serving", + "docs/architecture/docker-containers", + ], + }, + { + "Metadata Model": [ + "docs/modeling/metadata-model", + "docs/modeling/extending-the-metadata-model", + "docs/what/mxe", + { + Entities: [ + { + type: "autogenerated", + dirName: "docs/generated/metamodel/entities", // '.' means the current docs folder + }, + ], + }, + ], + }, + { + "Developing on DataHub": [ + "docs/developers", + "docs/docker/development", + "metadata-ingestion/developing", + "docs/api/graphql/graphql-endpoint-development", + { + Modules: [ + "datahub-web-react/README", + "datahub-frontend/README", + "datahub-graphql-core/README", + "metadata-service/README", + "metadata-jobs/mae-consumer-job/README", + "metadata-jobs/mce-consumer-job/README", + ], + }, + ], + }, + "docs/plugins", + { + Troubleshooting: [ + "docs/troubleshooting/quickstart", + "docs/troubleshooting/build", + "docs/troubleshooting/general", + ], + }, + { + Advanced: [ + "metadata-ingestion/docs/dev_guides/reporting_telemetry", + "docs/advanced/mcp-mcl", + "docker/datahub-upgrade/README", + "docs/advanced/no-code-modeling", + "datahub-web-react/src/app/analytics/README", + "docs/how/migrating-graph-service-implementation", + "docs/advanced/field-path-spec-v2", + "metadata-ingestion/adding-source", + "docs/how/add-custom-ingestion-source", + "docs/how/add-custom-data-platform", + "docs/advanced/browse-paths-upgrade", + "docs/browseV2/browse-paths-v2", + ], + }, + ], + }, + { + type: "category", + label: "API", + link: { type: "doc", id: "docs/api/datahub-apis" }, + items: [ { "GraphQL API": [ { @@ -466,92 +540,14 @@ module.exports = { ], }, { - Develop: [ - // The purpose of this section is to provide developers & technical users with - // concrete tutorials for how to work with the DataHub CLI & APIs - { - "DataHub Metadata Model": [ - "docs/modeling/metadata-model", - "docs/modeling/extending-the-metadata-model", - "docs/what/mxe", - { - Entities: [ - { - type: "autogenerated", - dirName: "docs/generated/metamodel/entities", // '.' means the current docs folder - }, - ], - }, - ], - }, - { - Architecture: [ - "docs/architecture/architecture", - "docs/components", - "docs/architecture/metadata-ingestion", - "docs/architecture/metadata-serving", - "docs/architecture/docker-containers", - ], - }, - { - "Developing on DataHub": [ - "docs/developers", - "docs/docker/development", - "metadata-ingestion/developing", - "docs/api/graphql/graphql-endpoint-development", - { - Modules: [ - "datahub-web-react/README", - "datahub-frontend/README", - "datahub-graphql-core/README", - "metadata-service/README", - "metadata-jobs/mae-consumer-job/README", - "metadata-jobs/mce-consumer-job/README", - ], - }, - ], - }, - "docs/plugins", - { - Troubleshooting: [ - "docs/troubleshooting/quickstart", - "docs/troubleshooting/build", - "docs/troubleshooting/general", - ], - }, - { - Advanced: [ - "metadata-ingestion/docs/dev_guides/reporting_telemetry", - "docs/advanced/mcp-mcl", - "docker/datahub-upgrade/README", - "docs/advanced/no-code-modeling", - "datahub-web-react/src/app/analytics/README", - "docs/how/migrating-graph-service-implementation", - "docs/advanced/field-path-spec-v2", - "metadata-ingestion/adding-source", - "docs/how/add-custom-ingestion-source", - "docs/how/add-custom-data-platform", - "docs/advanced/browse-paths-upgrade", - "docs/browseV2/browse-paths-v2", - ], - }, - ], - }, - { - Community: [ - "docs/slack", - "docs/townhalls", - "docs/townhall-history", - "docs/CODE_OF_CONDUCT", - "docs/CONTRIBUTING", - "docs/links", - "docs/rfc", - "SECURITY", - ], - }, - { - "Managed DataHub": [ - "docs/managed-datahub/managed-datahub-overview", + label: "Managed DataHub", + type: "category", + collapsed: true, + link: { + type: "doc", + id: "docs/managed-datahub/managed-datahub-overview", + }, + items: [ "docs/managed-datahub/welcome-acryl", { type: "doc", @@ -648,7 +644,26 @@ module.exports = { ], }, { - "Release History": ["releases"], + label: "Community", + type: "category", + collapsed: true, + link: { + type: "generated-index", + title: "Community", + description: "Learn about DataHub community.", + }, + items: [ + "docs/slack", + "docs/townhalls", + // "docs/townhall-history", + "docs/CODE_OF_CONDUCT", + "docs/CONTRIBUTING", + "docs/links", + "docs/rfc", + ], + }, + { + "Release History": ["releases", "docs/how/updating-datahub"], }, // "Candidates for Deprecation": [ diff --git a/docs/CODE_OF_CONDUCT.md b/docs/CODE_OF_CONDUCT.md index 1c4fd659f14e0..ca899dc26d5f7 100644 --- a/docs/CODE_OF_CONDUCT.md +++ b/docs/CODE_OF_CONDUCT.md @@ -1,4 +1,4 @@ -# Contributor Covenant Code of Conduct +# Code of Conduct ## Our Pledge diff --git a/docs/saas.md b/docs/saas.md deleted file mode 100644 index de57b5617e062..0000000000000 --- a/docs/saas.md +++ /dev/null @@ -1,14 +0,0 @@ -# DataHub SaaS - -Sign up for fully managed, hassle-free and secure SaaS service for DataHub, provided by [Acryl Data](https://www.acryl.io/). - -

- - Sign up - -

- -Refer to [Managed Datahub Exclusives](/docs/managed-datahub/managed-datahub-overview.md) for more information. diff --git a/docs/townhall-history.md b/docs/townhall-history.md index d92905af0cd72..0242e4ec2cee1 100644 --- a/docs/townhall-history.md +++ b/docs/townhall-history.md @@ -1,22 +1,55 @@ -# Town Hall History +# Town Hall History -A list of previous Town Halls, their planned schedule, and the recording of the meeting. +:::note +For the Town Hall meetings after June 2023, please refer to our [LinkedIn Live event history](https://www.linkedin.com/company/acryl-data/events/). +::: -## 03/23/2023 -[Full YouTube video](https://youtu.be/BTX8rIBe0yo) +### June 2023 +[Full YouTube video](https://www.youtube.com/watch?v=1QVcUmRQK5E) + +- Community & Project Updates - Maggie Hays & Shirshanka Das (Acryl Data) +- Community Case Study: Dataset Joins - Raj Tekal & Bobbie-Jean Nowak (Optum) +- DataHub 201: Column-Level Lineage - Hyejin Yoon (Acryl Data) +- Sneak Peek: BigQuery Column-Level Lineage with SQL Parsing - Harshal Sheth (Acryl Data) +- DataHub Performance Tuning – Indy Prentice (Acryl Data) + + +### May 2023 +[Full YouTube video](https://www.youtube.com/watch?v=KHNPjSbbZR8) + +**Agenda** +- Community - Maggie Hays & Shirshanka Das (Acryl Data) +- Community Case Study: Jira + DataHub for Access Requests - Joshua Garza (Sharp Healthcare) +- Sneak Peek: Use your own ownership types - Pedro Silva (Acryl Data) +- Sneak Peek: Data Contracts are coming! – John Joyce, Shirshanka (Acryl Data) +- Bring DataHub into your BI Tools — Chris Collins (Acryl Data) + +### Apr 2023 +[Full YouTube video](https://www.youtube.com/watch?v=D5YYGu-ZIBo) + +**Agenda** +- Community & Roadmap Updates - Maggie Hays & Shirshanka Das (Acryl Data) +- DataHub 201: Python SDK - Hyejin Yoon (Acryl Data) +- Streamlined Search & Browse Experience - Chris Collins (Acryl Data) +- Acryl's DataHub GitHub Actions - Harshal Sheth (Acryl Data) +- Data Products in DataHub - Shirshanka Das & Chris Collins (Acryl Data) +- DataHub Docs Bot - Maggie Hays (Acryl Data) + +### Mar 2023 -### Agenda +[Full YouTube video](https://youtu.be/BTX8rIBe0yo) +**Agenda** - Community & Roadmap Update - Recent Releases - Community Case Study — Jumio’s DataHub adoption journey - DataHub 201: Data Debugging - Sneak Peek: Streamlined Filtering Experience -## 02/23/2023 +### Feb 2023 [Full YouTube video](https://youtu.be/UItt4ppJSFc) -### Agenda +**Agenda** - Community & Roadmap Update - Recent Releases @@ -27,20 +60,20 @@ A list of previous Town Halls, their planned schedule, and the recording of the - Simplifying Metadata Ingestion - DataHub 201: Rolling Out DataHub -## 01/26/2023 +### Jan 2023 (26th) [Full YouTube video](https://youtu.be/A3mSiGHZ6Rc) -### Agenda +**Agenda** - What’s to Come - Q1 2023 Roadmap: Data Products, Data Contracts and more - Community Case Study - Notion: Automating annotations and metadata propagation - Community Contribution - Grab: Improvements to documentation editing - Simplifying DataHub - Removing Schema Registry requirement and introducing DataHub Lite -## 01/05/2023 +### Jan 2023 (5th) [Full YouTube video](https://youtu.be/ECxIMbKwuOY) -### Agenda +**Agenda** - DataHub Community: 2022 in Review - Our Community of Data Practitioners is one of a kind. We’ll take the time to celebrate who we are, what we’ve built, and how we’ve collaborated in the past 12 months. - Search Improvements - Learn how we’re making the Search experience smarter and faster to connect you with the most relevant resources during data discovery. @@ -49,13 +82,12 @@ A list of previous Town Halls, their planned schedule, and the recording of the - Sneak Peek: Time-based Lineage - Get a preview of how you’ll soon be able to trace lineage between datasets across different points in time to understand how interdependencies have evolved. - Sneak Peek: Chrome Extension - Soon, you’ll be able to quickly access rich metadata from DataHub while exploring resources in Looker via our upcoming Chrome Extension. -## 12/01/2022 +### Dec 2023 [Full YouTube video](https://youtu.be/BlCLhG8lGoY) -### Agenda +**Agenda** November Town Hall (in December!) - - Community Case Study - The Pinterest Team will share how they have integrated DataHub + Thrift and extended the Metadata Model with a Data Element entity to capture semantic types. - NEW! Ingestion Quickstart Guides - DataHub newbies, this one is for you! We’re rolling out ingestion quickstart guides to help you quickly get up and running with DataHub + Snowflake, BigQuery, and more! - NEW! In-App Product Tours - We’re making it easier than ever for end-users to get familiar with all that DataHub has to offer - hear all about the in-product onboarding resources we’re rolling out soon! @@ -64,10 +96,10 @@ November Town Hall (in December!) - NEW! Slack + Microsoft Teams Integrations - Send automated alerts to Slack and/or Teams to keep track of critical events and changes within DataHub. - Hacktoberfest Winners Announced - We’ll recap this year’s Hacktoberfest and announce three winners of a $250 Amazon gift card & DataHub Swag. -## 10/27/2022 +### Oct 2022 [Full YouTube video](https://youtu.be/B74WHxX5EMk) -### Agenda +**Agenda** - Conquer Data Governance with Acryl Data’s Metadata Tests - Learn how to tackle Data Governance with incremental, automation-driven governance using Metadata Tests provided in Acryl Data’s managed DataHub offering - Community Case Study - The Grab Team shares how they are using DataHub for data discoverability, automated classification and governance workflows, data quality observability, and beyond! @@ -75,20 +107,19 @@ November Town Hall (in December!) - Sneak Peek! Saved Views - Learn how you can soon use Saved Views to help end-users navigate entities in DataHub with more precision and focus - Performance Improvements - Hear about the latest upgrades to DataHub performance -## 9/29/2022 +### Sep 2022 [Full YouTube video](https://youtu.be/FjkNySWkghY) -### Agenda - +**Agenda** - Column Level Lineage is here! - Demo of column-level lineage and impact analysis in the DataHub UI - Community Case Study - The Stripe Team shares how they leverage DataHub to power observability within their Airflow-based ecosystem - Sneak Peek! Automated PII Classification - Preview upcoming functionality to automatically identify data fields that likely contain sensitive data - Ingestion Improvements Galore - Improved performance and functionality for dbt, Looker, Tableau, and Presto ingestion sources -## 8/25/2022 +### Aug 2022 [Full YouTube video](https://youtu.be/EJCKxKBvCwo) -### Agenda +**Agenda** - Community Case Study - The Etsy Team shares their journey of adopting DataHub - Looker & DataHub Improvements - surface the most relevant Looks and Dashboards @@ -97,10 +128,11 @@ November Town Hall (in December!) - Patch Support - Native support for PATCH in the metadata protocol to support efficient updates to add & remove owners, lineage, tags and more - Sneak Peek! Advanced Search -## 7/28/2022 +### Jul 2022 + [Full YouTube video](https://youtu.be/Zrkf3Mzcvc4) -### Agenda +**Agenda** - Community Updates - Project Updates @@ -109,21 +141,20 @@ November Town Hall (in December!) - Streamlined Metadata Ingestion - DataHub 201: Metadata Enrichment -## 6/30/2022 +### Jun 2022 [Full YouTube video](https://youtu.be/fAD53fEJ6m0) -### Agenda - +**Agenda** - Community Updates - Project Updates - dbt Integration Updates - CSV Ingestion Support - DataHub 201 - Glossary Term Deep Dive -## 5/26/2022 +### May 2022 [Full YouTube video](https://youtu.be/taKb_zyowEE) -### Agenda +**Agenda** - Community Case Study: Hear how the G-Research team is using Cassandra as DataHub’s Backend - Creating & Editing Glossary Terms from the DataHub UI @@ -132,20 +163,22 @@ November Town Hall (in December!) - Sneak Peek: Data Reliability with DataHub - Metadata Day Hackathon Winners -## 4/28/2022 +### Apr 2022 [Full YouTube video](https://www.youtube.com/watch?v=7iwNxHgqxtg) -### Agenda +**Agenda** + - Community Case Study: Hear from Included Health about how they are embedding external tools into the DataHub UI - New! Actions Framework: run custom code when changes happen within DataHub - UI Refresh for ML Entities - Improved deletion support for time-series aspects, tags, terms, & more - OpenAPI Improvements -## 3/31/2022 +### Mar 2022 [Full YouTube video](https://www.youtube.com/watch?v=IVazVgcNRdw) -### Agenda +**Agenda** + - Community Case Study: Hear from Zendesk about how they are applying “shift left” principles by authoring metadata in their Protobuf schemas - RBAC Functionality: View-Based Policies - Schema Version History - surfacing the history of schema changes in DataHub's UI @@ -154,20 +187,22 @@ November Town Hall (in December!) - Delete API -## 2/25/2022 +### Feb 2022 [Full YouTube video](https://www.youtube.com/watch?v=enBqB2Dbuv4) -### Agenda +**Agenda** + - Lineage Impact Analysis - using DataHub to understand the impact of changes on downstream dependencies - Displaying Data Quality Checks in the UI - Roadmap update: Schema Version History & Column-Level Lineage - Community Case Study: Managing Lineage via YAML -## 1/28/2022 +### Jan 2022 [Full YouTube video](https://youtu.be/ShlSR3dMUnE) -### Agenda +**Agenda** + - Community & Roadmap Updates by Maggie Hays (Acryl Data) - Project Updates by Shirshanka Das (Acryl Data) @@ -176,10 +211,11 @@ November Town Hall (in December!) - DataHub Basics — Data Profiling & Usage Stats 101 by Maggie Hays & Tamás Németh (Acryl Data) - Demo: Spark Lineage by Mugdha Hardikar (GS Lab) & Shirshanka Das -## 12/17/2021 +### Dec 2021 [Full YouTube video](https://youtu.be/rYInKCwxu7o) -### Agenda +**Agenda** + - Community & Roadmap Updates by Maggie Hays (Acryl Data) - Project Updates by Shirshanka Das (Acryl Data) - 2021 DataHub Community in Review by Maggie Hays @@ -189,10 +225,11 @@ November Town Hall (in December!) - Top DataHub Contributors of 2021 - Maggie Hays - Final Surprise! We Interviewed a 10yo and a 70yo about DataHub -## 11/19/2021 +### Nov 2021 [Full YouTube video](https://youtu.be/to80sEDZz7k) -### Agenda +**Agenda** + - Community & Roadmap Updates by Maggie Hays (Acryl Data) - Project Updates by Shirshanka Das (Acryl Data) - DataHub Basics -- Lineage 101 by John Joyce & Surya Lanka (Acryl Data) @@ -200,10 +237,11 @@ November Town Hall (in December!) - DataHub API Authentication by John Joyce (Acryl Data) - Case Study: LinkedIn pilot to extend the OSS UI by Aikepaer Abuduweili & Joshua Shinavier -## 10/29/2021 +### Oct 2021 [Full YouTube video](https://youtu.be/GrS_uZhYNm0) -### Agenda +**Agenda** + - DataHub Community & Roadmap Update - Maggie Hays (Acryl Data) - October Project Updates - Shirshanka Das (Acryl Data) - Introducing Recommendations - John Joyce & Dexter Lee (Acryl Data) @@ -211,10 +249,11 @@ November Town Hall (in December!) - Data Profiling Improvements - Surya Lanka & Harshal Sheth (Acryl Data) - Lineage Improvements & BigQuery Dataset Lineage by Gabe Lyons & Varun Bharill (Acryl Data) -## 9/24/2021 +### Sep 2021 [Full YouTube video](https://youtu.be/nQDiKPKnLLQ) -### Agenda +**Agenda** + - Project Updates and Callouts by Shirshanka - GraphQL Public API Annoucement - Demo: Faceted Search by Gabe Lyons (Acryl Data) @@ -224,10 +263,11 @@ November Town Hall (in December!) - Offline - Foreign Key and Related Term Mapping by Gabe Lyons (Acryl Data) [video](https://www.loom.com/share/79f27c2d9f6c4a3b8aacbc48c19add18) -## 8/27/2021 +### Aug 2021 [Full YouTube video](https://youtu.be/3joZINi3ti4) -### Agenda +**Agenda** + - Project Updates and Callouts by Shirshanka - Business Glossary Demo - 0.8.12 Upcoming Release Highlights @@ -239,12 +279,13 @@ November Town Hall (in December!) - Performance Monitoring by Dexter Lee (Acryl Data) [video](https://youtu.be/6Xfr_Y9abZo) -## 7/23/2021 +### Jul 2021 [Full YouTube video](https://www.youtube.com/watch?v=rZsiB8z5rG4) [Medium Post](https://medium.com/datahub-project/datahub-project-updates-f4299cd3602e?source=friends_link&sk=27af7637f7ae44786ede694c3af512a5) -### Agenda +**Agenda** + - Project Updates by Shirshanka - Release highlights @@ -253,12 +294,13 @@ November Town Hall (in December!) - Demo: AWS SageMaker integration for Models and Features by Kevin Hu (Acryl Data) -## 6/25/2021 +### Jun 2021 [Full YouTube video](https://www.youtube.com/watch?v=xUHOdDfdFpY) [Medium Post](https://medium.com/datahub-project/datahub-project-updates-ed3155476408?source=friends_link&sk=02816a16ff2acd688e6db8eb55808d31) -#### Agenda +**Agenda** + - Project Updates by Shirshanka - Release notes @@ -269,12 +311,13 @@ November Town Hall (in December!) - Developer Session: Simplified Deployment for DataHub by John Joyce, Gabe Lyons (Acryl Data) -## 5/27/2021 +### May 2021 [Full YouTube video](https://www.youtube.com/watch?v=qgW_xpIr1Ho) [Medium Post](https://medium.com/datahub-project/linkedin-datahub-project-updates-ed98cdf913c1?source=friends_link&sk=9930ec5579299b155ea87c747683d1ad) -#### Agenda +**Agenda** + - Project Updates by Shirshanka - 10 mins - 0.8.0 Release @@ -284,12 +327,13 @@ November Town Hall (in December!) - Deep Dive: No Code Metadata Engine by John Joyce (Acryl Data) - 20 mins - General Q&A and closing remarks -## 4/23/2021 +### Apr 2021 [Full YouTube video](https://www.youtube.com/watch?v=dlFa4ubJ9ho) [Medium Digest](https://medium.com/datahub-project/linkedin-datahub-project-updates-2b0d26066b8f?source=friends_link&sk=686c47219ed294e0838ae3e2fe29084d) -#### Agenda +**Agenda** + - Welcome - 5 mins - Project Updates by Shirshanka - 10 mins @@ -302,12 +346,13 @@ November Town Hall (in December!) - General Q&A and closing remarks - 5 mins -## 3/19/2021 +### Mar 2021 [YouTube video](https://www.youtube.com/watch?v=xE8Uc27VTG4) - + [Medium Digest](https://medium.com/datahub-project/linkedin-datahub-project-updates-697f0faddd10?source=friends_link&sk=9888633c5c7219b875125e87a703ec4d) -#### Agenda +**Agenda** + * Welcome - 5 mins * Project Updates ([slides](https://drive.google.com/file/d/1c3BTP3oDAzJr07l6pY6CkDZi5nT0cLRs/view?usp=sharing)) by [Shirshanka](https://www.linkedin.com/in/shirshankadas/) - 10 mins @@ -320,11 +365,11 @@ November Town Hall (in December!) * Closing remarks - 5 mins -## 2/19/2021 +### Feb 2021 [YouTube video](https://www.youtube.com/watch?v=Z9ImbcsAVl0) - + [Medium Digest](https://medium.com/datahub-project/linkedin-datahub-project-updates-february-2021-edition-338d2c6021f0) -#### Agenda +**Agenda** * Welcome - 5 mins * Latest React App Demo! ([video](https://www.youtube.com/watch?v=RQBEJhcen5E)) by John Joyce and Gabe Lyons - 5 mins @@ -334,12 +379,12 @@ November Town Hall (in December!) * Closing remarks - 5 mins -## 1/15/2021 +### Jan 2021 [Full Recording](https://youtu.be/r862MZTLAJ0) [Slide-deck](https://docs.google.com/presentation/d/e/2PACX-1vQ2B0iHb2uwege1wlkXHOgQer0myOMEE5EGnzRjyqw0xxS5SaAc8VMZ_1XVOHuTZCJYzZZW4i9YnzSN/pub?start=false&loop=false&delayms=3000) -Agenda +**Agenda** - Announcements - 2 mins - Community Updates ([video](https://youtu.be/r862MZTLAJ0?t=99)) - 10 mins @@ -349,10 +394,10 @@ Agenda - General Q&A from sign up sheet, slack, and participants - 15 mins - Closing remarks - 5 minutes -## 12/04/2020 +### Dec 2020 [Recording](https://linkedin.zoom.us/rec/share/8E7-lFnCi_kQ8OvXR9kW6fn-AjvV8VlqOO2xYR8b5Y_UeWI_ODcKFlxlHqYgBP7j.S-c8C1YMrz7d3Mjq) -Agenda +**Agenda** - Quick intro - 5 mins - [Why did Grofers choose DataHub for their data catalog?](https://github.com/acryldata/static-assets-test/raw/master/imgs/demo/Datahub_at_Grofers.pdf) by [Shubham Gupta](https://www.linkedin.com/in/shubhamg931/) - 15 minutes @@ -360,11 +405,11 @@ Agenda - General Q&A from sign up sheet, slack, and participants - 15 mins - Closing remarks - 5 minutes -## 11/06/2020 +### Nov 2020 [Recording](https://linkedin.zoom.us/rec/share/0yvjZ2fOzVmD8aaDo3lC59fXivmYG3EnF0U9tMVgKs827595usvSoIhtFUPjZCsU.b915nLRkw6iQlnoD) -Agenda +**Agenda** - Quick intro - 5 mins - [Lightning talk on Metadata use-cases at LinkedIn](https://github.com/acryldata/static-assets-test/raw/master/imgs/demo/Metadata_Use-Cases_at_LinkedIn_-_Lightning_Talk.pdf) by [Shirshanka Das](https://www.linkedin.com/in/shirshankadas/) (LinkedIn) - 5 mins @@ -374,11 +419,11 @@ Agenda - Closing remarks - 5 minutes -## 09/25/2020 +### Sep 2020 [Recording](https://linkedin.zoom.us/rec/share/uEQ2pRY0BHbVqk_sOTVRm05VXJ0xM_zKJ26yzfCBqNZItiBht__k_juCCahJ37QK.IKAU9qA_0qdURX4_) -Agenda +**Agenda** - Quick intro - 5 mins - [Data Discoverability at SpotHero](https://github.com/acryldata/static-assets-test/raw/master/imgs/demo/Data_Discoverability_at_SpotHero.pdf) by [Maggie Hays](https://www.linkedin.com/in/maggie-hays/) (SpotHero) - 20 mins @@ -386,23 +431,23 @@ Agenda - General Q&A from sign up sheet, slack, and participants - 15 mins - Closing remarks - 5 mins -## 08/28/2020 +### Aug 2020 [Recording](https://linkedin.zoom.us/rec/share/vMBfcb31825IBZ3T71_wffM_GNv3T6a8hicf8_dcfzQlhfFxl5i_CPVKcmYaZA) -Agenda +**Agenda** - Quick intro - 5 mins - [Data Governance look for a Digital Bank](https://www.slideshare.net/SheetalPratik/linkedinsaxobankdataworkbench) by [Sheetal Pratik](https://www.linkedin.com/in/sheetalpratik/) (Saxo Bank) - 20 mins - Column level lineage for datasets demo by [Nagarjuna Kanamarlapudi](https://www.linkedin.com/in/nagarjunak/) (LinkedIn) - 15 mins - General Q&A from sign up sheet and participants - 15 mins - Closing remarks - 5 mins -## 07/31/20 +### Jul 2020 [Recording](https://bluejeans.com/s/wjnDRJevi5z/) -Agenda +**Agenda** * Quick intro - 5 mins * Showcasing new entities onboarded to internal LinkedIn DataHub (Data Concepts, Schemas) by [Nagarjuna Kanamarlapudi](https://www.linkedin.com/in/nagarjunak) (LinkedIn) - 15 mins * Showcasing new Lineage UI in internal LinkedIn DataHub By [Ignacio Bona](https://www.linkedin.com/in/ignaciobona) (LinkedIn) - 10 mins @@ -410,12 +455,12 @@ Agenda * Answering questions from the signup sheet - 13 mins * Questions from the participants - 10 mins * Closing remarks - 5 mins - -## 06/26/20 + +### June 2020 [Recording](https://bluejeans.com/s/yILyR/) -Agenda +**Agenda** * Quick intro - 5 mins * Onboarding Data Process entity by [Liangjun Jiang](https://github.com/liangjun-jiang) (Expedia) - 15 mins * How to onboard a new relationship to metadata graph by [Kerem Sahin](https://github.com/keremsahin1) (Linkedin) - 15 mins @@ -423,11 +468,11 @@ Agenda * Questions from the participants - 10 mins * Closing remarks - 5 mins -## 05/29/20 +### May 2020 [Recording](https://bluejeans.com/s/GCAzY) -Agenda +**Agenda** * Quick intro - 5 mins * How to add a new aspect/feature for an existing entity in UI by [Charlie Tran](https://www.linkedin.com/in/charlie-tran/) (LinkedIn) - 10 mins * How to search over a new field by [Jyoti Wadhwani](https://www.linkedin.com/in/jyotiwadhwani/) (LinkedIn) - 10 mins @@ -435,11 +480,11 @@ Agenda * Questions from the participants - 10 mins * Closing remarks - 5 mins -## 04/17/20 +### Apr 2020 (17th) [Recording](https://bluejeans.com/s/eYRD4) -Agenda +**Agenda** * Quick intro - 5 mins * [DataHub Journey with Expedia Group](https://www.youtube.com/watch?v=ajcRdB22s5o&ab_channel=ArunVasudevan) by [Arun Vasudevan](https://www.linkedin.com/in/arun-vasudevan-55117368/) (Expedia) - 10 mins * Deploying DataHub using Nix by [Larry Luo](https://github.com/clojurians-org) (Shanghai HuaRui Bank) - 10 mins @@ -447,13 +492,13 @@ Agenda * Questions from the participants - 10 mins * Closing remarks - 5 mins -## 04/03/20 +### Apr 2020 (3rd) [Recording](https://bluejeans.com/s/vzYpa) [Q&A](https://docs.google.com/document/d/1ChF9jiJWv9wj3HLLkFYRg7NSYg8Kb0PT7COd7Hf9Zpk/edit?usp=sharing) -- Agenda +- **Agenda** * Quick intro - 5 mins * Creating Helm charts for deploying DataHub on Kubernetes by [Bharat Akkinepalli](https://www.linkedin.com/in/bharat-akkinepalli-ba0b7223/) (ThoughtWorks) - 10 mins * How to onboard a new metadata aspect by [Mars Lan](https://www.linkedin.com/in/marslan) (LinkedIn) - 10 mins @@ -461,13 +506,13 @@ Agenda * Questions from the participants - 10 mins * Closing remarks - 5 mins -## 03/20/20 +### Mar 2020 (20th) [Recording](https://bluejeans.com/s/FSKEF) [Q&A](https://docs.google.com/document/d/1vQ6tAGXsVafnPIcZv1GSYgnTJJXFOACa1aWzOQjiGHI/edit) -Agenda +**Agenda** * Quick intro - 5 mins * Internal DataHub demo - 10 mins * What's coming up next for DataHub (what roadmap items we are working on) - 10 mins @@ -475,9 +520,8 @@ Agenda * Questions from the participants - 10 mins * Closing remarks - 5 mins -## 03/06/20 +### Mar 2020 (6th) [Recording](https://bluejeans.com/s/vULMG) -[Q&A](https://docs.google.com/document/d/1N_VGqlH9CD-54LBsVlpcK2Cf2Mgmuzq79EvN9qgBqtQ/edit) - +[Q&A](https://docs.google.com/document/d/1N_VGqlH9CD-54LBsVlpcK2Cf2Mgmuzq79EvN9qgBqtQ/edit) \ No newline at end of file diff --git a/docs/townhalls.md b/docs/townhalls.md index f9c3bb16150cd..c80d198e5184c 100644 --- a/docs/townhalls.md +++ b/docs/townhalls.md @@ -7,8 +7,13 @@ From time to time we also use the opportunity to showcase upcoming features. ## Meeting Invite & Agenda -You can join with this link https://zoom.datahubproject.io, or [RSVP](https://rsvp.datahubproject.io/) to get a calendar invite - this will always have the most up-to-date agenda for upcoming sessions. +You can join with [this link](https://zoom.datahubproject.io) or [RSVP](https://rsvp.datahubproject.io/) to get a calendar invite - this will always have the most up-to-date agenda for upcoming sessions. + +## Town Hall History + +See our Town Hall history for the recordings and summaries of the past town halls. + +* [Town Hall Events (July 2023~)](https://www.linkedin.com/company/acryl-data/events/) +* [Town Hall Events (~June 2023)](townhall-history.md) -## Past Meetings -See [Town Hall History](townhall-history.md) for recordings of past town halls. From 179f103412d036212a1155d436a507def4f4928f Mon Sep 17 00:00:00 2001 From: Xuelei Li <115022112+lix-mms@users.noreply.github.com> Date: Fri, 10 Nov 2023 17:58:38 +0100 Subject: [PATCH 087/792] fix(metadata-io): in Neo4j service use proper algorithm to get lineage (#8687) Co-authored-by: RyanHolstien Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- build.gradle | 8 +- docker/neo4j/env/docker.env | 1 + .../docker-compose-m1.quickstart.yml | 1 + .../quickstart/docker-compose.quickstart.yml | 1 + docs/how/updating-datahub.md | 1 + metadata-io/build.gradle | 3 + .../graph/neo4j/Neo4jGraphService.java | 231 +++++++++--------- .../graph/neo4j/Neo4jGraphServiceTest.java | 130 ++++++++++ .../graph/neo4j/Neo4jTestServerBuilder.java | 6 +- 9 files changed, 267 insertions(+), 115 deletions(-) diff --git a/build.gradle b/build.gradle index 31e005e001cf0..54802917d05a5 100644 --- a/build.gradle +++ b/build.gradle @@ -7,6 +7,8 @@ buildscript { ext.springBootVersion = '2.7.14' ext.openTelemetryVersion = '1.18.0' ext.neo4jVersion = '4.4.9' + ext.neo4jTestVersion = '4.4.25' + ext.neo4jApocVersion = '4.4.0.20:all' ext.testContainersVersion = '1.17.4' ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x ext.jacksonVersion = '2.15.2' @@ -154,8 +156,10 @@ project.ext.externalDependency = [ 'mockServer': 'org.mock-server:mockserver-netty:5.11.2', 'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2', 'mysqlConnector': 'mysql:mysql-connector-java:8.0.20', - 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jVersion, + 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jTestVersion, 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion, + 'neo4jTestJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jTestVersion, + 'neo4jApoc': 'org.neo4j.procedure:apoc:' + neo4jApocVersion, 'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:' + openTelemetryVersion, 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion, 'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15', @@ -218,7 +222,7 @@ project.ext.externalDependency = [ 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' - + ] allprojects { diff --git a/docker/neo4j/env/docker.env b/docker/neo4j/env/docker.env index 961a5ffcf5483..c8f2a4878900f 100644 --- a/docker/neo4j/env/docker.env +++ b/docker/neo4j/env/docker.env @@ -1,3 +1,4 @@ NEO4J_AUTH=neo4j/datahub NEO4J_dbms_default__database=graph.db NEO4J_dbms_allow__upgrade=true +NEO4JLABS_PLUGINS="[\"apoc\"]" diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 613718306abef..4df32395cf82d 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -253,6 +253,7 @@ services: - NEO4J_AUTH=neo4j/datahub - NEO4J_dbms_default__database=graph.db - NEO4J_dbms_allow__upgrade=true + - NEO4JLABS_PLUGINS=["apoc"] healthcheck: interval: 1s retries: 5 diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index 30ccbae59be74..29c980532d46f 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -253,6 +253,7 @@ services: - NEO4J_AUTH=neo4j/datahub - NEO4J_dbms_default__database=graph.db - NEO4J_dbms_allow__upgrade=true + - NEO4JLABS_PLUGINS=["apoc"] healthcheck: interval: 1s retries: 5 diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 28f11e4b6d707..90b53161950e8 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -16,6 +16,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ### Breaking Changes +- #8687 (datahub-helm #365 #353) - If Helm is used for installation and Neo4j is enabled, update the prerequisites Helm chart to version >=0.1.2 and adjust your value overrides in the `neo4j:` section according to the new structure. - #9044 - GraphQL APIs for adding ownership now expect either an `ownershipTypeUrn` referencing a customer ownership type or a (deprecated) `type`. Where before adding an ownership without a concrete type was allowed, this is no longer the case. For simplicity you can use the `type` parameter which will get translated to a custom ownership type internally if one exists for the type being added. - #9010 - In Redshift source's config `incremental_lineage` is set default to off. - #8810 - Removed support for SQLAlchemy 1.3.x. Only SQLAlchemy 1.4.x is supported now. diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index 740fed61f13d5..4b36f533476f7 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -57,6 +57,9 @@ dependencies { testImplementation externalDependency.h2 testImplementation externalDependency.mysqlConnector testImplementation externalDependency.neo4jHarness + testImplementation (externalDependency.neo4jApoc) { + exclude group: 'org.yaml', module: 'snakeyaml' + } testImplementation externalDependency.mockito testImplementation externalDependency.mockitoInline testImplementation externalDependency.iStackCommons diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index 41d39cca4edda..ac57fb7db2b78 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -5,6 +5,7 @@ import com.datahub.util.exception.RetryLimitReached; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; import com.linkedin.common.UrnArray; import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.Urn; @@ -25,17 +26,20 @@ import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; import com.linkedin.metadata.utils.metrics.MetricUtils; +import com.linkedin.util.Pair; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.StringJoiner; -import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.AllArgsConstructor; @@ -50,8 +54,7 @@ import org.neo4j.driver.Session; import org.neo4j.driver.SessionConfig; import org.neo4j.driver.exceptions.Neo4jException; -import org.neo4j.driver.internal.InternalRelationship; -import org.neo4j.driver.types.Node; +import org.neo4j.driver.types.Relationship; @Slf4j @@ -62,9 +65,6 @@ public class Neo4jGraphService implements GraphService { private final Driver _driver; private SessionConfig _sessionConfig; - private static final String SOURCE = "source"; - private static final String UI = "UI"; - public Neo4jGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull Driver driver) { this(lineageRegistry, driver, SessionConfig.defaultConfig()); } @@ -234,53 +234,36 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi @Nullable Long endTimeMillis) { log.debug(String.format("Neo4j getLineage maxHops = %d", maxHops)); - final String statement = - generateLineageStatement(entityUrn, direction, graphFilters, maxHops, startTimeMillis, endTimeMillis); + final var statementAndParams = + generateLineageStatementAndParameters(entityUrn, direction, graphFilters, maxHops, startTimeMillis, endTimeMillis); + + final var statement = statementAndParams.getFirst(); + final var parameters = statementAndParams.getSecond(); List neo4jResult = - statement != null ? runQuery(buildStatement(statement, new HashMap<>())).list() : new ArrayList<>(); - - // It is possible to have more than 1 path from node A to node B in the graph and previous query returns all the paths. - // We convert the List into Map with only the shortest paths. "item.get(i).size()" is the path size between two nodes in relation. - // The key for mapping is the destination node as the source node is always the same, and it is defined by parameter. - neo4jResult = neo4jResult.stream() - .collect(Collectors.toMap(item -> item.values().get(2).asNode().get("urn").asString(), Function.identity(), - (item1, item2) -> item1.get(1).size() < item2.get(1).size() ? item1 : item2)) - .values() - .stream() - .collect(Collectors.toList()); + statement != null ? runQuery(buildStatement(statement, parameters)).list() : new ArrayList<>(); LineageRelationshipArray relations = new LineageRelationshipArray(); neo4jResult.stream().skip(offset).limit(count).forEach(item -> { String urn = item.values().get(2).asNode().get("urn").asString(); - String relationType = ((InternalRelationship) item.get(1).asList().get(0)).type().split("r_")[1]; - int numHops = item.get(1).size(); try { - // Generate path from r in neo4jResult - List pathFromRelationships = - item.values().get(1).asList(Collections.singletonList(new ArrayList())).stream().map(t -> createFromString( - // Get real upstream node/downstream node by direction - ((InternalRelationship) t).get(direction == LineageDirection.UPSTREAM ? "startUrn" : "endUrn") - .asString())).collect(Collectors.toList()); - if (direction == LineageDirection.UPSTREAM) { - // For ui to show path correctly, reverse path for UPSTREAM direction - Collections.reverse(pathFromRelationships); - // Add missing original node to the end since we generate path from relationships - pathFromRelationships.add(Urn.createFromString(item.values().get(0).asNode().get("urn").asString())); - } else { - // Add missing original node to the beginning since we generate path from relationships - pathFromRelationships.add(0, Urn.createFromString(item.values().get(0).asNode().get("urn").asString())); - } + final var path = item.get(1).asPath(); + final List nodeListAsPath = StreamSupport.stream( + path.nodes().spliterator(), false) + .map(node -> createFromString(node.get("urn").asString())) + .collect(Collectors.toList()); + + final var firstRelationship = Optional.ofNullable(Iterables.getFirst(path.relationships(), null)); relations.add(new LineageRelationship().setEntity(Urn.createFromString(urn)) - .setType(relationType) - .setDegree(numHops) - .setPaths(new UrnArrayArray(new UrnArray(pathFromRelationships)))); + // although firstRelationship should never be absent, provide "" as fallback value + .setType(firstRelationship.map(Relationship::type).orElse("")) + .setDegree(path.length()) + .setPaths(new UrnArrayArray(new UrnArray(nodeListAsPath)))); } catch (URISyntaxException ignored) { log.warn(String.format("Can't convert urn = %s, Error = %s", urn, ignored.getMessage())); } }); - EntityLineageResult result = new EntityLineageResult().setStart(offset) .setCount(relations.size()) .setRelationships(relations) @@ -290,31 +273,104 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi return result; } - private String generateLineageStatement(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - String statement; - final String allowedEntityTypes = String.join(" OR b:", graphFilters.getAllowedEntityTypes()); - - final String multiHopMatchTemplateIndirect = "MATCH p = shortestPath((a {urn: '%s'})<-[r*1..%d]-(b)) "; - final String multiHopMatchTemplateDirect = "MATCH p = shortestPath((a {urn: '%s'})-[r*1..%d]->(b)) "; - // directionFilterTemplate should apply to all condition. - final String multiHopMatchTemplate = - direction == LineageDirection.UPSTREAM ? multiHopMatchTemplateIndirect : multiHopMatchTemplateDirect; - final String fullQueryTemplate = generateFullQueryTemplate(multiHopMatchTemplate, startTimeMillis, endTimeMillis); - - if (startTimeMillis != null && endTimeMillis != null) { - statement = - String.format(fullQueryTemplate, startTimeMillis, endTimeMillis, entityUrn, maxHops, allowedEntityTypes, - entityUrn); - } else if (startTimeMillis != null) { - statement = String.format(fullQueryTemplate, startTimeMillis, entityUrn, maxHops, allowedEntityTypes, entityUrn); - } else if (endTimeMillis != null) { - statement = String.format(fullQueryTemplate, endTimeMillis, entityUrn, maxHops, allowedEntityTypes, entityUrn); + private String getPathFindingLabelFilter(List entityNames) { + return entityNames.stream().map(x -> String.format("+%s", x)).collect(Collectors.joining("|")); + } + + private String getPathFindingRelationshipFilter(@Nonnull List entityNames, @Nullable LineageDirection direction) { + // relationshipFilter supports mixing different directions for various relation types, + // so simply transform entries lineage registry into format of filter + final var filterComponents = new HashSet(); + for (final var entityName : entityNames) { + if (direction != null) { + for (final var edgeInfo : _lineageRegistry.getLineageRelationships(entityName, direction)) { + final var type = edgeInfo.getType(); + if (edgeInfo.getDirection() == RelationshipDirection.INCOMING) { + filterComponents.add("<" + type); + } else { + filterComponents.add(type + ">"); + } + } + } else { + // return disjunctive combination of edge types regardless of direction + for (final var direction1 : List.of(LineageDirection.UPSTREAM, LineageDirection.DOWNSTREAM)) { + for (final var edgeInfo : _lineageRegistry.getLineageRelationships(entityName, direction1)) { + filterComponents.add(edgeInfo.getType()); + } + } + } + } + return String.join("|", filterComponents); + } + + private Pair> generateLineageStatementAndParameters( + @Nonnull Urn entityUrn, @Nonnull LineageDirection direction, + GraphFilters graphFilters, int maxHops, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + + final var parameterMap = new HashMap(Map.of( + "urn", entityUrn.toString(), + "labelFilter", getPathFindingLabelFilter(graphFilters.getAllowedEntityTypes()), + "relationshipFilter", getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), direction), + "maxHops", maxHops + )); + + if (startTimeMillis == null && endTimeMillis == null) { + // if no time filtering required, simply find all expansion paths to other nodes + final var statement = "MATCH (a {urn: $urn}) " + + "CALL apoc.path.spanningTree(a, { " + + " relationshipFilter: $relationshipFilter, " + + " labelFilter: $labelFilter, " + + " minLevel: 1, " + + " maxLevel: $maxHops " + + "}) " + + "YIELD path " + + "WITH a, path AS path " + + "RETURN a, path, last(nodes(path));"; + return Pair.of(statement, parameterMap); } else { - statement = String.format(fullQueryTemplate, entityUrn, maxHops, allowedEntityTypes, entityUrn); + // when needing time filtering, possibility on multiple paths between two + // nodes must be considered, and we need to construct more complex query + + // use r_ edges until they are no longer useful + final var relationFilter = getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), null) + .replaceAll("(\\w+)", "r_$1"); + final var relationshipPattern = + String.format( + (direction == LineageDirection.UPSTREAM ? "<-[:%s*1..%d]-" : "-[:%s*1..%d]->"), + relationFilter, maxHops); + + // two steps: + // 1. find list of nodes reachable within maxHops + // 2. find the shortest paths from start node to every other node in these nodes + // (note: according to the docs of shortestPath, WHERE conditions are applied during path exploration, not + // after path exploration is done) + final var statement = "MATCH (a {urn: $urn}) " + + "CALL apoc.path.subgraphNodes(a, { " + + " relationshipFilter: $relationshipFilter, " + + " labelFilter: $labelFilter, " + + " minLevel: 1, " + + " maxLevel: $maxHops " + + "}) " + + "YIELD node AS b " + + "WITH a, b " + + "MATCH path = shortestPath((a)" + relationshipPattern + "(b)) " + + "WHERE a <> b " + + " AND ALL(rt IN relationships(path) WHERE " + + " (EXISTS(rt.source) AND rt.source = 'UI') OR " + + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " + + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " + + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " + + " ) " + + "RETURN a, path, b;"; + + // provide dummy start/end time when not provided, so no need to + // format clause differently if either of them is missing + parameterMap.put("startTimeMillis", startTimeMillis == null ? 0 : startTimeMillis); + parameterMap.put("endTimeMillis", endTimeMillis == null ? System.currentTimeMillis() : endTimeMillis); + + return Pair.of(statement, parameterMap); } - - return statement; } @Nonnull @@ -583,15 +639,6 @@ private Result runQuery(@Nonnull Statement statement) { } } - @Nonnull - private static String toCriterionWhereString(@Nonnull String key, @Nonnull Object value) { - if (ClassUtils.isPrimitiveOrWrapper(value.getClass())) { - return key + " = " + value; - } - - return key + " = \"" + value.toString() + "\""; - } - // Returns "key:value" String, if value is not primitive, then use toString() and double quote it @Nonnull private static String toCriterionString(@Nonnull String key, @Nonnull Object value) { @@ -715,44 +762,4 @@ Urn createFromString(@Nonnull String rawUrn) { return null; } } - - private String generateFullQueryTemplate(@Nonnull String multiHopMatchTemplate, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - final String sourceUiCheck = String.format("(EXISTS(rt.%s) AND rt.%s = '%s') ", SOURCE, SOURCE, UI); - final String whereTemplate = "WHERE (b:%s) AND b.urn <> '%s' "; - final String returnTemplate = "RETURN a,r,b"; - String withTimeTemplate = ""; - String timeFilterConditionTemplate = "AND ALL(rt IN relationships(p) WHERE left(type(rt), 2)='r_')"; - - if (startTimeMillis != null && endTimeMillis != null) { - withTimeTemplate = "WITH %d as startTimeMillis, %d as endTimeMillis "; - timeFilterConditionTemplate = - "AND ALL(rt IN relationships(p) WHERE " + sourceUiCheck + "OR " - + "(NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " - + "((rt.createdOn >= startTimeMillis AND rt.createdOn <= endTimeMillis) OR " - + "(rt.updatedOn >= startTimeMillis AND rt.updatedOn <= endTimeMillis))) " - + "AND ALL(rt IN relationships(p) WHERE left(type(rt), 2)='r_')"; - } else if (startTimeMillis != null) { - withTimeTemplate = "WITH %d as startTimeMillis "; - timeFilterConditionTemplate = - "AND ALL(rt IN relationships(p) WHERE " + sourceUiCheck + "OR " - + "(NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " - + "(rt.createdOn >= startTimeMillis OR rt.updatedOn >= startTimeMillis)) " - + "AND ALL(rt IN relationships(p) WHERE left(type(rt), 2)='r_')"; - } else if (endTimeMillis != null) { - withTimeTemplate = "WITH %d as endTimeMillis "; - timeFilterConditionTemplate = - "AND ALL(rt IN relationships(p) WHERE " + sourceUiCheck + "OR " - + "(NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " - + "(rt.createdOn <= endTimeMillis OR rt.updatedOn <= endTimeMillis)) " - + "AND ALL(rt IN relationships(p) WHERE left(type(rt), 2)='r_')"; - } - final StringJoiner fullQueryTemplateJoiner = new StringJoiner(" "); - fullQueryTemplateJoiner.add(withTimeTemplate); - fullQueryTemplateJoiner.add(multiHopMatchTemplate); - fullQueryTemplateJoiner.add(whereTemplate); - fullQueryTemplateJoiner.add(timeFilterConditionTemplate); - fullQueryTemplateJoiner.add(returnTemplate); - - return fullQueryTemplateJoiner.toString(); - } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java index d36f05cfb039d..6f63209f9c380 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.graph.neo4j; import com.linkedin.common.FabricType; +import com.linkedin.common.UrnArray; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.TagUrn; @@ -17,6 +18,7 @@ import com.linkedin.metadata.query.filter.RelationshipFilter; import java.util.Arrays; import java.util.Collections; + import org.neo4j.driver.Driver; import org.neo4j.driver.GraphDatabase; import org.testng.SkipException; @@ -29,6 +31,8 @@ import java.util.Comparator; import java.util.HashSet; import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; import static com.linkedin.metadata.search.utils.QueryUtils.*; import static org.testng.Assert.assertEquals; @@ -194,11 +198,82 @@ public void testRemoveEdge() throws Exception { assertEquals(result.getTotal(), 0); } + private Set getPathUrnArraysFromLineageResult(EntityLineageResult result) { + return result.getRelationships() + .stream() + .map(x -> x.getPaths().get(0)) + .collect(Collectors.toSet()); + } + + @Test + public void testGetLineage() { + GraphService service = getGraphService(); + + List edges = Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d5 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + new Edge(datasetFiveUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null), + + // another path between d2 and d5 which is shorter + // d1 <-DownstreamOf- d4 <-DownstreamOf- d5 + new Edge(datasetFourUrn, datasetOneUrn, downstreamOf, 13L, null, 13L, null, null), + new Edge(datasetFiveUrn, datasetFourUrn, downstreamOf, 13L, null, 13L, null, null) + ); + edges.forEach(service::addEdge); + + // simple path finding + final var upstreamLineageDataset3Hop3 = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); + assertEquals(upstreamLineageDataset3Hop3.getTotal().intValue(), 3); + assertEquals( + getPathUrnArraysFromLineageResult(upstreamLineageDataset3Hop3), + Set.of( + new UrnArray(datasetThreeUrn, datasetTwoUrn), + new UrnArray(datasetThreeUrn, datasetTwoUrn, dataJobOneUrn), + new UrnArray(datasetThreeUrn, datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); + + // simple path finding + final var upstreamLineageDatasetFiveHop2 = service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + assertEquals(upstreamLineageDatasetFiveHop2.getTotal().intValue(), 4); + assertEquals( + getPathUrnArraysFromLineageResult(upstreamLineageDatasetFiveHop2), + Set.of( + new UrnArray(datasetFiveUrn, datasetThreeUrn), + new UrnArray(datasetFiveUrn, datasetThreeUrn, datasetTwoUrn), + new UrnArray(datasetFiveUrn, datasetFourUrn), + new UrnArray(datasetFiveUrn, datasetFourUrn, datasetOneUrn))); + + // there are two paths from p5 to p1, one longer and one shorter, and the longer one is discarded from result + final var upstreamLineageDataset5Hop5 = service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 5); + assertEquals(upstreamLineageDataset5Hop5.getTotal().intValue(), 5); + assertEquals( + getPathUrnArraysFromLineageResult(upstreamLineageDataset5Hop5), + Set.of( + new UrnArray(datasetFiveUrn, datasetThreeUrn), + new UrnArray(datasetFiveUrn, datasetThreeUrn, datasetTwoUrn), + new UrnArray(datasetFiveUrn, datasetThreeUrn, datasetTwoUrn, dataJobOneUrn), + new UrnArray(datasetFiveUrn, datasetFourUrn), + new UrnArray(datasetFiveUrn, datasetFourUrn, datasetOneUrn))); + + // downstream lookup + final var downstreamLineageDataset1Hop2 = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + assertEquals(downstreamLineageDataset1Hop2.getTotal().intValue(), 4); + assertEquals( + getPathUrnArraysFromLineageResult(downstreamLineageDataset1Hop2), + Set.of( + new UrnArray(datasetOneUrn, dataJobOneUrn), + new UrnArray(datasetOneUrn, dataJobOneUrn, datasetTwoUrn), + new UrnArray(datasetOneUrn, datasetFourUrn), + new UrnArray(datasetOneUrn, datasetFourUrn, datasetFiveUrn))); + } + @Test public void testGetLineageTimeFilterQuery() throws Exception { GraphService service = getGraphService(); List edges = Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d4 new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), @@ -206,21 +281,76 @@ public void testGetLineageTimeFilterQuery() throws Exception { ); edges.forEach(service::addEdge); + // no time filtering EntityLineageResult upstreamLineageTwoHops = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2); assertEquals(upstreamLineageTwoHops.getTotal().intValue(), 2); assertEquals(upstreamLineageTwoHops.getRelationships().size(), 2); + assertEquals( + getPathUrnArraysFromLineageResult(upstreamLineageTwoHops), + Set.of( + new UrnArray(datasetFourUrn, datasetThreeUrn), + new UrnArray(datasetFourUrn, datasetThreeUrn, datasetTwoUrn))); + // with time filtering EntityLineageResult upstreamLineageTwoHopsWithTimeFilter = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2, 10L, 12L); assertEquals(upstreamLineageTwoHopsWithTimeFilter.getTotal().intValue(), 1); assertEquals(upstreamLineageTwoHopsWithTimeFilter.getRelationships().size(), 1); + assertEquals( + getPathUrnArraysFromLineageResult(upstreamLineageTwoHopsWithTimeFilter), + Set.of( + new UrnArray(datasetFourUrn, datasetThreeUrn))); + // with time filtering EntityLineageResult upstreamLineageTimeFilter = service.getLineage(datasetTwoUrn, LineageDirection.UPSTREAM, 0, 1000, 4, 2L, 6L); assertEquals(upstreamLineageTimeFilter.getTotal().intValue(), 2); assertEquals(upstreamLineageTimeFilter.getRelationships().size(), 2); + assertEquals( + getPathUrnArraysFromLineageResult(upstreamLineageTimeFilter), + Set.of( + new UrnArray(datasetTwoUrn, dataJobOneUrn), + new UrnArray(datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); + // with time filtering EntityLineageResult downstreamLineageTimeFilter = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 4, 0L, 4L); assertEquals(downstreamLineageTimeFilter.getTotal().intValue(), 1); assertEquals(downstreamLineageTimeFilter.getRelationships().size(), 1); + assertEquals( + getPathUrnArraysFromLineageResult(downstreamLineageTimeFilter), + Set.of( + new UrnArray(datasetOneUrn, dataJobOneUrn))); + } + + @Test + public void testGetLineageTimeFilteringSkipsShorterButNonMatchingPaths() { + GraphService service = getGraphService(); + List edges = Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 5L, null, 5L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 7L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + + // d1 <-DownstreamOf- d3 (shorter path from d3 to d1, but with very old time) + new Edge(datasetThreeUrn, datasetOneUrn, downstreamOf, 1L, null, 2L, null, null) + ); + edges.forEach(service::addEdge); + + // no time filtering, shorter path from d3 to d1 is returned + EntityLineageResult upstreamLineageNoTimeFiltering = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); + assertEquals( + getPathUrnArraysFromLineageResult(upstreamLineageNoTimeFiltering), + Set.of( + new UrnArray(datasetThreeUrn, datasetTwoUrn), + new UrnArray(datasetThreeUrn, datasetTwoUrn, dataJobOneUrn), + new UrnArray(datasetThreeUrn, datasetOneUrn))); + + // with time filtering, shorter path from d3 to d1 is excluded so longer path is returned + EntityLineageResult upstreamLineageTimeFiltering = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3, 3L, 17L); + assertEquals( + getPathUrnArraysFromLineageResult(upstreamLineageTimeFiltering), + Set.of( + new UrnArray(datasetThreeUrn, datasetTwoUrn), + new UrnArray(datasetThreeUrn, datasetTwoUrn, dataJobOneUrn), + new UrnArray(datasetThreeUrn, datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java index 4d6d15255b922..ba4e4cec37914 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java @@ -2,6 +2,8 @@ import java.io.File; import java.net.URI; + +import apoc.path.PathExplorer; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.harness.Neo4j; import org.neo4j.harness.Neo4jBuilder; @@ -17,7 +19,9 @@ private Neo4jTestServerBuilder(Neo4jBuilder builder) { } public Neo4jTestServerBuilder() { - this(new InProcessNeo4jBuilder()); + this(new InProcessNeo4jBuilder() + .withProcedure(PathExplorer.class) + ); } public Neo4jTestServerBuilder(File workingDirectory) { From b851d59e208d6f1f9c33f90d43d49933f6e557be Mon Sep 17 00:00:00 2001 From: purnimagarg1 <139125209+purnimagarg1@users.noreply.github.com> Date: Fri, 10 Nov 2023 22:52:04 +0530 Subject: [PATCH 088/792] Managed Ingestion UX Improvements (#9216) --- .../source/IngestionSourceTableColumns.tsx | 12 ++++-- .../ExecutionRequestDetailsModal.tsx | 38 ++++++++++++++----- 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/datahub-web-react/src/app/ingest/source/IngestionSourceTableColumns.tsx b/datahub-web-react/src/app/ingest/source/IngestionSourceTableColumns.tsx index c47d08d5b6003..155e75f1895f5 100644 --- a/datahub-web-react/src/app/ingest/source/IngestionSourceTableColumns.tsx +++ b/datahub-web-react/src/app/ingest/source/IngestionSourceTableColumns.tsx @@ -61,6 +61,14 @@ const CliBadge = styled.span` margin-right: 5px; } `; +const StatusText = styled(Typography.Text)` + font-weight: bold; + margin-left: 8px; + color: ${(props) => props.color}; + &:hover { + text-decoration: underline; + }, +`; interface TypeColumnProps { type: string; record: any; @@ -124,9 +132,7 @@ export function LastStatusColumn({ status, record, setFocusExecutionUrn }: LastS type="link" onClick={() => setFocusExecutionUrn(record.lastExecUrn)} > - - {text || 'Pending...'} - + {text || 'Pending...'} ); diff --git a/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx b/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx index 00fdc89964f88..96dfc05e39153 100644 --- a/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx +++ b/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx @@ -83,6 +83,17 @@ const ShowMoreButton = styled(Button)` padding: 0px; `; +const LogsContainer = styled.div` + margin-bottom: -25px; + ${(props) => + props.areLogsExpandable && + !props.showExpandedLogs && + ` + -webkit-mask-image: linear-gradient(to bottom, rgba(0,0,0,1) 50%, rgba(255,0,0,0.5) 60%, rgba(255,0,0,0) 90% ); + mask-image: linear-gradient(to bottom, rgba(0,0,0,1) 50%, rgba(255,0,0,0.5) 60%, rgba(255,0,0,0) 90%); + `} +`; + const modalStyle = { top: 100, }; @@ -91,6 +102,11 @@ const modalBodyStyle = { padding: 0, }; +type LogsContainerProps = { + showExpandedLogs: boolean; + areLogsExpandable: boolean; +}; + type Props = { urn: string; visible: boolean; @@ -108,7 +124,7 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { downloadFile(output, `exec-${urn}.log`); }; - const logs = (showExpandedLogs && output) || output.slice(0, 100); + const logs = (showExpandedLogs && output) || output.slice(0, 250); const result = data?.executionRequest?.result?.status; useEffect(() => { @@ -140,7 +156,7 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { } const recipe = showExpandedRecipe ? recipeYaml : recipeYaml?.split('\n').slice(0, 1).join('\n'); - const areLogsExpandable = output.length > 100; + const areLogsExpandable = output.length > 250; const isRecipeExpandable = recipeYaml?.includes('\n'); return ( @@ -181,14 +197,16 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { Download - -
{`${logs}${!showExpandedLogs && areLogsExpandable ? '...' : ''}`}
- {areLogsExpandable && ( - setShowExpandedLogs(!showExpandedLogs)}> - {showExpandedLogs ? 'Hide' : 'Show More'} - - )} -
+ + +
{`${logs}${!showExpandedLogs && areLogsExpandable ? '...' : ''}`}
+
+
+ {areLogsExpandable && ( + setShowExpandedLogs(!showExpandedLogs)}> + {showExpandedLogs ? 'Hide' : 'Show More'} + + )} {recipe && ( From 89dff8f7bddee15d578170f5c1db586c628cabf4 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 10 Nov 2023 09:34:08 -0800 Subject: [PATCH 089/792] chore(ingest): start working on pydantic v2 support (#9220) --- metadata-ingestion/scripts/avro_codegen.py | 2 +- metadata-ingestion/setup.cfg | 1 + .../src/datahub/cli/cli_utils.py | 2 +- .../src/datahub/cli/lite_cli.py | 2 +- .../src/datahub/configuration/_config_enum.py | 26 ++++-- .../src/datahub/configuration/common.py | 27 ++++-- .../src/datahub/configuration/oauth.py | 6 +- .../pydantic_migration_helpers.py | 30 +++++++ .../src/datahub/emitter/mcp_builder.py | 2 +- .../ingestion/extractor/mce_extractor.py | 6 +- .../datahub/ingestion/run/pipeline_config.py | 2 +- .../source/bigquery_v2/bigquery_config.py | 2 +- .../source/data_lake_common/path_spec.py | 2 +- .../ingestion/source/datahub/config.py | 2 +- .../datahub/ingestion/source/dbt/dbt_cloud.py | 1 + .../ingestion/source/dbt/dbt_common.py | 6 +- .../src/datahub/ingestion/source/kafka.py | 4 +- .../ingestion/source/looker/lookml_source.py | 7 +- .../src/datahub/ingestion/source/nifi.py | 4 +- .../ingestion/source/powerbi/config.py | 5 +- .../report_server_domain.py | 84 ++++++++++--------- .../ingestion/source/redshift/config.py | 6 +- .../src/datahub/ingestion/source/s3/config.py | 2 +- .../source/s3/datalake_profiler_config.py | 2 +- .../datahub/ingestion/source/salesforce.py | 2 +- .../source/snowflake/snowflake_usage_v2.py | 20 ++--- .../ingestion/source/sql/clickhouse.py | 11 +-- .../src/datahub/ingestion/source/sql/druid.py | 2 +- .../src/datahub/ingestion/source/sql/hive.py | 2 +- .../src/datahub/ingestion/source/sql/mysql.py | 4 +- .../datahub/ingestion/source/sql/postgres.py | 6 +- .../datahub/ingestion/source/sql/presto.py | 2 +- .../datahub/ingestion/source/sql/redshift.py | 2 +- .../ingestion/source/sql/sql_config.py | 2 +- .../datahub/ingestion/source/sql/teradata.py | 2 +- .../src/datahub/ingestion/source/sql/trino.py | 2 +- .../source/state/stateful_ingestion_base.py | 10 +-- .../src/datahub/ingestion/source/superset.py | 2 +- .../datahub/ingestion/source/unity/config.py | 2 +- .../ingestion/source_config/sql/snowflake.py | 3 +- .../source_config/usage/bigquery_usage.py | 2 +- .../src/datahub/upgrade/upgrade.py | 8 +- .../datahub/utilities/lossy_collections.py | 12 +++ .../src/datahub/utilities/sqlglot_lineage.py | 26 +++--- 44 files changed, 216 insertions(+), 139 deletions(-) create mode 100644 metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py diff --git a/metadata-ingestion/scripts/avro_codegen.py b/metadata-ingestion/scripts/avro_codegen.py index 021ebd4a31eb3..de8836559217b 100644 --- a/metadata-ingestion/scripts/avro_codegen.py +++ b/metadata-ingestion/scripts/avro_codegen.py @@ -192,7 +192,7 @@ def add_avro_python3_warning(filepath: Path) -> None: # This means that installation order matters, which is a pretty unintuitive outcome. # See https://github.com/pypa/pip/issues/4625 for details. try: - from avro.schema import SchemaFromJSONData + from avro.schema import SchemaFromJSONData # type: ignore import warnings warnings.warn("It seems like 'avro-python3' is installed, which conflicts with the 'avro' package used by datahub. " diff --git a/metadata-ingestion/setup.cfg b/metadata-ingestion/setup.cfg index 8b78e4d3c9c6f..b3fc53ccfaf58 100644 --- a/metadata-ingestion/setup.cfg +++ b/metadata-ingestion/setup.cfg @@ -88,6 +88,7 @@ filterwarnings = ignore:Deprecated call to \`pkg_resources.declare_namespace:DeprecationWarning ignore:pkg_resources is deprecated as an API:DeprecationWarning ignore:Did not recognize type:sqlalchemy.exc.SAWarning + ignore::datahub.configuration.pydantic_migration_helpers.PydanticDeprecatedSince20 [coverage:run] # Because of some quirks in the way setup.cfg, coverage.py, pytest-cov, diff --git a/metadata-ingestion/src/datahub/cli/cli_utils.py b/metadata-ingestion/src/datahub/cli/cli_utils.py index a7ea5b4f65785..8ac9a101121be 100644 --- a/metadata-ingestion/src/datahub/cli/cli_utils.py +++ b/metadata-ingestion/src/datahub/cli/cli_utils.py @@ -47,7 +47,7 @@ class GmsConfig(BaseModel): server: str - token: Optional[str] + token: Optional[str] = None class DatahubConfig(BaseModel): diff --git a/metadata-ingestion/src/datahub/cli/lite_cli.py b/metadata-ingestion/src/datahub/cli/lite_cli.py index b49284bb627f2..8636187a51d09 100644 --- a/metadata-ingestion/src/datahub/cli/lite_cli.py +++ b/metadata-ingestion/src/datahub/cli/lite_cli.py @@ -40,7 +40,7 @@ class DuckDBLiteConfigWrapper(DuckDBLiteConfig): class LiteCliConfig(DatahubConfig): lite: LiteLocalConfig = LiteLocalConfig( - type="duckdb", config=DuckDBLiteConfigWrapper() + type="duckdb", config=DuckDBLiteConfigWrapper().dict() ) diff --git a/metadata-ingestion/src/datahub/configuration/_config_enum.py b/metadata-ingestion/src/datahub/configuration/_config_enum.py index b4fb93dae4439..190a006b077d9 100644 --- a/metadata-ingestion/src/datahub/configuration/_config_enum.py +++ b/metadata-ingestion/src/datahub/configuration/_config_enum.py @@ -4,6 +4,8 @@ import pydantic.types import pydantic.validators +from datahub.configuration.pydantic_migration_helpers import PYDANTIC_VERSION_2 + class ConfigEnum(Enum): # Ideally we would use @staticmethod here, but some versions of Python don't support it. @@ -15,11 +17,25 @@ def _generate_next_value_( # type: ignore # From https://stackoverflow.com/a/44785241/5004662. return name - @classmethod - def __get_validators__(cls) -> "pydantic.types.CallableGenerator": - # We convert the text to uppercase before attempting to match it to an enum value. - yield cls.validate - yield pydantic.validators.enum_member_validator + if PYDANTIC_VERSION_2: + # if TYPE_CHECKING: + # from pydantic import GetCoreSchemaHandler + + @classmethod + def __get_pydantic_core_schema__(cls, source_type, handler): # type: ignore + from pydantic_core import core_schema + + return core_schema.no_info_before_validator_function( + cls.validate, handler(source_type) + ) + + else: + + @classmethod + def __get_validators__(cls) -> "pydantic.types.CallableGenerator": + # We convert the text to uppercase before attempting to match it to an enum value. + yield cls.validate + yield pydantic.validators.enum_member_validator @classmethod def validate(cls, v): # type: ignore[no-untyped-def] diff --git a/metadata-ingestion/src/datahub/configuration/common.py b/metadata-ingestion/src/datahub/configuration/common.py index 73ac4baac48c0..f225856ca43ce 100644 --- a/metadata-ingestion/src/datahub/configuration/common.py +++ b/metadata-ingestion/src/datahub/configuration/common.py @@ -11,6 +11,7 @@ from typing_extensions import Protocol, runtime_checkable from datahub.configuration._config_enum import ConfigEnum +from datahub.configuration.pydantic_migration_helpers import PYDANTIC_VERSION_2 from datahub.utilities.dedup_list import deduplicate_list _ConfigSelf = TypeVar("_ConfigSelf", bound="ConfigModel") @@ -71,14 +72,8 @@ def redact_raw_config(obj: Any) -> Any: class ConfigModel(BaseModel): class Config: - extra = Extra.forbid - underscore_attrs_are_private = True - keep_untouched = ( - cached_property, - ) # needed to allow cached_property to work. See https://github.com/samuelcolvin/pydantic/issues/1241 for more info. - @staticmethod - def schema_extra(schema: Dict[str, Any], model: Type["ConfigModel"]) -> None: + def _schema_extra(schema: Dict[str, Any], model: Type["ConfigModel"]) -> None: # We use the custom "hidden_from_docs" attribute to hide fields from the # autogenerated docs. remove_fields = [] @@ -89,6 +84,19 @@ def schema_extra(schema: Dict[str, Any], model: Type["ConfigModel"]) -> None: for key in remove_fields: del schema["properties"][key] + # This is purely to suppress pydantic's warnings, since this class is used everywhere. + if PYDANTIC_VERSION_2: + extra = "forbid" + ignored_types = (cached_property,) + json_schema_extra = _schema_extra + else: + extra = Extra.forbid + underscore_attrs_are_private = True + keep_untouched = ( + cached_property, + ) # needed to allow cached_property to work. See https://github.com/samuelcolvin/pydantic/issues/1241 for more info. + schema_extra = _schema_extra + @classmethod def parse_obj_allow_extras(cls: Type[_ConfigSelf], obj: Any) -> _ConfigSelf: with unittest.mock.patch.object(cls.Config, "extra", pydantic.Extra.allow): @@ -102,7 +110,10 @@ class PermissiveConfigModel(ConfigModel): # It is usually used for argument bags that are passed through to third-party libraries. class Config: - extra = Extra.allow + if PYDANTIC_VERSION_2: + extra = "allow" + else: + extra = Extra.allow class TransformerSemantics(ConfigEnum): diff --git a/metadata-ingestion/src/datahub/configuration/oauth.py b/metadata-ingestion/src/datahub/configuration/oauth.py index 9a1ddbf437913..61a06580299db 100644 --- a/metadata-ingestion/src/datahub/configuration/oauth.py +++ b/metadata-ingestion/src/datahub/configuration/oauth.py @@ -24,11 +24,11 @@ class OAuthConfiguration(ConfigModel): default=False, ) client_secret: Optional[SecretStr] = Field( - description="client secret of the application if use_certificate = false" + None, description="client secret of the application if use_certificate = false" ) encoded_oauth_public_key: Optional[str] = Field( - description="base64 encoded certificate content if use_certificate = true" + None, description="base64 encoded certificate content if use_certificate = true" ) encoded_oauth_private_key: Optional[str] = Field( - description="base64 encoded private key content if use_certificate = true" + None, description="base64 encoded private key content if use_certificate = true" ) diff --git a/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py b/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py new file mode 100644 index 0000000000000..f1876b500598b --- /dev/null +++ b/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py @@ -0,0 +1,30 @@ +import pydantic.version +from packaging.version import Version + +PYDANTIC_VERSION_2: bool +if Version(pydantic.version.VERSION) >= Version("2.0"): + PYDANTIC_VERSION_2 = True +else: + PYDANTIC_VERSION_2 = False + + +# This can be used to silence deprecation warnings while we migrate. +if PYDANTIC_VERSION_2: + from pydantic import PydanticDeprecatedSince20 # type: ignore +else: + + class PydanticDeprecatedSince20(Warning): # type: ignore + pass + + +if PYDANTIC_VERSION_2: + from pydantic import BaseModel as GenericModel +else: + from pydantic.generics import GenericModel # type: ignore + + +__all__ = [ + "PYDANTIC_VERSION_2", + "PydanticDeprecatedSince20", + "GenericModel", +] diff --git a/metadata-ingestion/src/datahub/emitter/mcp_builder.py b/metadata-ingestion/src/datahub/emitter/mcp_builder.py index d50feba8b119c..a7578e39374ac 100644 --- a/metadata-ingestion/src/datahub/emitter/mcp_builder.py +++ b/metadata-ingestion/src/datahub/emitter/mcp_builder.py @@ -127,7 +127,7 @@ class BucketKey(ContainerKey): class NotebookKey(DatahubKey): notebook_id: int platform: str - instance: Optional[str] + instance: Optional[str] = None def as_urn(self) -> str: return make_dataset_urn_with_platform_instance( diff --git a/metadata-ingestion/src/datahub/ingestion/extractor/mce_extractor.py b/metadata-ingestion/src/datahub/ingestion/extractor/mce_extractor.py index 36450dda153d7..7ad68c0fcf8ea 100644 --- a/metadata-ingestion/src/datahub/ingestion/extractor/mce_extractor.py +++ b/metadata-ingestion/src/datahub/ingestion/extractor/mce_extractor.py @@ -26,11 +26,11 @@ def _try_reformat_with_black(code: str) -> str: class WorkUnitRecordExtractorConfig(ConfigModel): - set_system_metadata = True - set_system_metadata_pipeline_name = ( + set_system_metadata: bool = True + set_system_metadata_pipeline_name: bool = ( False # false for now until the models are available in OSS ) - unpack_mces_into_mcps = False + unpack_mces_into_mcps: bool = False class WorkUnitRecordExtractor( diff --git a/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py b/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py index da3cee8ad9c1b..f22f94c9e9351 100644 --- a/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py +++ b/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py @@ -72,7 +72,7 @@ class PipelineConfig(ConfigModel): source: SourceConfig sink: DynamicTypedConfig - transformers: Optional[List[DynamicTypedConfig]] + transformers: Optional[List[DynamicTypedConfig]] = None flags: FlagsConfig = Field(default=FlagsConfig(), hidden_from_docs=True) reporting: List[ReporterConfig] = [] run_id: str = DEFAULT_RUN_ID diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py index f762d451849ab..cbe68a454ea43 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py @@ -265,7 +265,7 @@ def validate_column_lineage(cls, v: bool, values: Dict[str, Any]) -> bool: description="Option to exclude empty projects from being ingested.", ) - @root_validator(pre=False) + @root_validator(skip_on_failure=True) def profile_default_settings(cls, values: Dict) -> Dict: # Extra default SQLAlchemy option for better connection pooling and threading. # https://docs.sqlalchemy.org/en/14/core/pooling.html#sqlalchemy.pool.QueuePool.params.max_overflow diff --git a/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py b/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py index a35fb94614f72..05b1b6b7cc040 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py +++ b/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py @@ -214,7 +214,7 @@ def glob_include(self): logger.debug(f"Setting _glob_include: {glob_include}") return glob_include - @pydantic.root_validator() + @pydantic.root_validator(skip_on_failure=True) def validate_path_spec(cls, values: Dict) -> Dict[str, Any]: # validate that main fields are populated required_fields = ["include", "file_types", "default_extension"] diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/config.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/config.py index 83958dc76754f..a2bd6fd1e5558 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/datahub/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/config.py @@ -80,7 +80,7 @@ class DataHubSourceConfig(StatefulIngestionConfigBase): hidden_from_docs=True, ) - @root_validator + @root_validator(skip_on_failure=True) def check_ingesting_data(cls, values): if ( not values.get("database_connection") diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py index da1ea8ecb4678..a9685b2554553 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py @@ -46,6 +46,7 @@ class DBTCloudConfig(DBTCommonConfig): description="The ID of the job to ingest metadata from.", ) run_id: Optional[int] = Field( + None, description="The ID of the run to ingest metadata from. If not specified, we'll default to the latest run.", ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index c4de24bf192f1..76cb82aaa5b4b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -150,7 +150,7 @@ class DBTEntitiesEnabled(ConfigModel): description="Emit metadata for test results when set to Yes or Only", ) - @root_validator + @root_validator(skip_on_failure=True) def process_only_directive(cls, values): # Checks that at most one is set to ONLY, and then sets the others to NO. @@ -229,7 +229,7 @@ class DBTCommonConfig( default={}, description="mapping rules that will be executed against dbt column meta properties. Refer to the section below on dbt meta automated mappings.", ) - enable_meta_mapping = Field( + enable_meta_mapping: bool = Field( default=True, description="When enabled, applies the mappings that are defined through the meta_mapping directives.", ) @@ -237,7 +237,7 @@ class DBTCommonConfig( default={}, description="mapping rules that will be executed against dbt query_tag meta properties. Refer to the section below on dbt meta automated mappings.", ) - enable_query_tag_mapping = Field( + enable_query_tag_mapping: bool = Field( default=True, description="When enabled, applies the mappings that are defined through the `query_tag_mapping` directives.", ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/kafka.py b/metadata-ingestion/src/datahub/ingestion/source/kafka.py index 23770ff3cf812..25520e7aa66ff 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/kafka.py +++ b/metadata-ingestion/src/datahub/ingestion/source/kafka.py @@ -100,11 +100,11 @@ class KafkaSourceConfig( default="datahub.ingestion.source.confluent_schema_registry.ConfluentSchemaRegistry", description="The fully qualified implementation class(custom) that implements the KafkaSchemaRegistryBase interface.", ) - schema_tags_field = pydantic.Field( + schema_tags_field: str = pydantic.Field( default="tags", description="The field name in the schema metadata that contains the tags to be added to the dataset.", ) - enable_meta_mapping = pydantic.Field( + enable_meta_mapping: bool = pydantic.Field( default=True, description="When enabled, applies the mappings that are defined through the meta_mapping directives.", ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index e6b78cc7a7745..9d7c972612777 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -275,7 +275,7 @@ def convert_string_to_connection_def(cls, conn_map): ) return conn_map - @root_validator() + @root_validator(skip_on_failure=True) def check_either_connection_map_or_connection_provided(cls, values): """Validate that we must either have a connection map or an api credential""" if not values.get("connection_to_platform_map", {}) and not values.get( @@ -286,7 +286,7 @@ def check_either_connection_map_or_connection_provided(cls, values): ) return values - @root_validator() + @root_validator(skip_on_failure=True) def check_either_project_name_or_api_provided(cls, values): """Validate that we must either have a project name or an api credential to fetch project names""" if not values.get("project_name") and not values.get("api"): @@ -1070,7 +1070,6 @@ def _get_fields( def determine_view_file_path( cls, base_folder_path: str, absolute_file_path: str ) -> str: - splits: List[str] = absolute_file_path.split(base_folder_path, 1) if len(splits) != 2: logger.debug( @@ -1104,7 +1103,6 @@ def from_looker_dict( populate_sql_logic_in_descriptions: bool = False, process_isolation_for_sql_parsing: bool = False, ) -> Optional["LookerView"]: - view_name = looker_view["name"] logger.debug(f"Handling view {view_name} in model {model_name}") # The sql_table_name might be defined in another view and this view is extending that view, @@ -2087,7 +2085,6 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 ) if looker_viewfile is not None: - for raw_view in looker_viewfile.views: raw_view_name = raw_view["name"] if LookerRefinementResolver.is_refinement(raw_view_name): diff --git a/metadata-ingestion/src/datahub/ingestion/source/nifi.py b/metadata-ingestion/src/datahub/ingestion/source/nifi.py index bc05edbb3c623..ab418b1705956 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/nifi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/nifi.py @@ -126,7 +126,7 @@ class NifiSourceConfig(EnvConfigMixin): description="Path to PEM file containing certs for the root CA(s) for the NiFi", ) - @root_validator + @root_validator(skip_on_failure=True) def validate_auth_params(cla, values): if values.get("auth") is NifiAuthType.CLIENT_CERT and not values.get( "client_cert_file" @@ -143,7 +143,7 @@ def validate_auth_params(cla, values): ) return values - @root_validator(pre=False) + @root_validator(skip_on_failure=True) def validator_site_url_to_site_name(cls, values): site_url_to_site_name = values.get("site_url_to_site_name") site_url = values.get("site_url") diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py index 96729f4c60c6c..b8cc34c234ffa 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py @@ -405,8 +405,7 @@ class PowerBiDashboardSourceConfig( "Works for M-Query where native SQL is used for transformation.", ) - @root_validator - @classmethod + @root_validator(skip_on_failure=True) def validate_extract_column_level_lineage(cls, values: Dict) -> Dict: flags = [ "native_query_parsing", @@ -445,7 +444,7 @@ def map_data_platform(cls, value): return value - @root_validator(pre=False) + @root_validator(skip_on_failure=True) def workspace_id_backward_compatibility(cls, values: Dict) -> Dict: workspace_id = values.get("workspace_id") workspace_id_pattern = values.get("workspace_id_pattern") diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi_report_server/report_server_domain.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi_report_server/report_server_domain.py index 60426fc5bd660..ee87d93774b3d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi_report_server/report_server_domain.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi_report_server/report_server_domain.py @@ -12,21 +12,21 @@ class CatalogItem(BaseModel): id: str = Field(alias="Id") name: str = Field(alias="Name") - description: Optional[str] = Field(alias="Description") + description: Optional[str] = Field(None, alias="Description") path: str = Field(alias="Path") - type: Any = Field(alias="Type") + type: Any = Field(None, alias="Type") hidden: bool = Field(alias="Hidden") size: int = Field(alias="Size") - modified_by: Optional[str] = Field(alias="ModifiedBy") - modified_date: Optional[datetime] = Field(alias="ModifiedDate") - created_by: Optional[str] = Field(alias="CreatedBy") - created_date: Optional[datetime] = Field(alias="CreatedDate") - parent_folder_id: Optional[str] = Field(alias="ParentFolderId") - content_type: Optional[str] = Field(alias="ContentType") + modified_by: Optional[str] = Field(None, alias="ModifiedBy") + modified_date: Optional[datetime] = Field(None, alias="ModifiedDate") + created_by: Optional[str] = Field(None, alias="CreatedBy") + created_date: Optional[datetime] = Field(None, alias="CreatedDate") + parent_folder_id: Optional[str] = Field(None, alias="ParentFolderId") + content_type: Optional[str] = Field(None, alias="ContentType") content: str = Field(alias="Content") is_favorite: bool = Field(alias="IsFavorite") - user_info: Any = Field(alias="UserInfo") - display_name: Optional[str] = Field(alias="DisplayName") + user_info: Any = Field(None, alias="UserInfo") + display_name: Optional[str] = Field(None, alias="DisplayName") has_data_sources: bool = Field(default=False, alias="HasDataSources") data_sources: Optional[List["DataSource"]] = Field( default_factory=list, alias="DataSources" @@ -72,12 +72,12 @@ def __hash__(self): class DataModelDataSource(BaseModel): - auth_type: Optional[str] = Field(alias="AuthType") + auth_type: Optional[str] = Field(None, alias="AuthType") supported_auth_types: List[Optional[str]] = Field(alias="SupportedAuthTypes") kind: str = Field(alias="Kind") model_connection_name: str = Field(alias="ModelConnectionName") secret: str = Field(alias="Secret") - type: Optional[str] = Field(alias="Type") + type: Optional[str] = Field(None, alias="Type") username: str = Field(alias="Username") @@ -135,21 +135,23 @@ class DataSource(CatalogItem): is_enabled: bool = Field(alias="IsEnabled") connection_string: str = Field(alias="ConnectionString") data_model_data_source: Optional[DataModelDataSource] = Field( - alias="DataModelDataSource" + None, alias="DataModelDataSource" ) - data_source_sub_type: Optional[str] = Field(alias="DataSourceSubType") - data_source_type: Optional[str] = Field(alias="DataSourceType") + data_source_sub_type: Optional[str] = Field(None, alias="DataSourceSubType") + data_source_type: Optional[str] = Field(None, alias="DataSourceType") is_original_connection_string_expression_based: bool = Field( alias="IsOriginalConnectionStringExpressionBased" ) is_connection_string_overridden: bool = Field(alias="IsConnectionStringOverridden") - credentials_by_user: Optional[CredentialsByUser] = Field(alias="CredentialsByUser") + credentials_by_user: Optional[CredentialsByUser] = Field( + None, alias="CredentialsByUser" + ) credentials_in_server: Optional[CredentialsInServer] = Field( - alias="CredentialsInServer" + None, alias="CredentialsInServer" ) is_reference: bool = Field(alias="IsReference") - subscriptions: Optional[Subscription] = Field(alias="Subscriptions") - meta_data: Optional[MetaData] = Field(alias="MetaData") + subscriptions: Optional[Subscription] = Field(None, alias="Subscriptions") + meta_data: Optional[MetaData] = Field(None, alias="MetaData") def __members(self): return (self.id,) @@ -274,15 +276,15 @@ def __hash__(self): class CorpUserEditableInfo(BaseModel): display_name: str = Field(alias="displayName") title: str - about_me: Optional[str] = Field(alias="aboutMe") - teams: Optional[List[str]] - skills: Optional[List[str]] - picture_link: Optional[str] = Field(alias="pictureLink") + about_me: Optional[str] = Field(None, alias="aboutMe") + teams: Optional[List[str]] = None + skills: Optional[List[str]] = None + picture_link: Optional[str] = Field(None, alias="pictureLink") class CorpUserEditableProperties(CorpUserEditableInfo): - slack: Optional[str] - phone: Optional[str] + slack: Optional[str] = None + phone: Optional[str] = None email: str @@ -305,21 +307,21 @@ class EntityRelationshipsResult(BaseModel): start: int count: int total: int - relationships: Optional[EntityRelationship] + relationships: Optional[EntityRelationship] = None class CorpUserProperties(BaseModel): active: bool display_name: str = Field(alias="displayName") email: str - title: Optional[str] - manager: Optional["CorpUser"] - department_id: Optional[int] = Field(alias="departmentId") - department_name: Optional[str] = Field(alias="departmentName") - first_name: Optional[str] = Field(alias="firstName") - last_name: Optional[str] = Field(alias="lastName") - full_name: Optional[str] = Field(alias="fullName") - country_code: Optional[str] = Field(alias="countryCode") + title: Optional[str] = None + manager: Optional["CorpUser"] = None + department_id: Optional[int] = Field(None, alias="departmentId") + department_name: Optional[str] = Field(None, alias="departmentName") + first_name: Optional[str] = Field(None, alias="firstName") + last_name: Optional[str] = Field(None, alias="lastName") + full_name: Optional[str] = Field(None, alias="fullName") + country_code: Optional[str] = Field(None, alias="countryCode") class CorpUser(BaseModel): @@ -328,13 +330,13 @@ class CorpUser(BaseModel): username: str properties: CorpUserProperties editable_properties: Optional[CorpUserEditableProperties] = Field( - alias="editableProperties" + None, alias="editableProperties" ) - status: Optional[CorpUserStatus] - tags: Optional[GlobalTags] - relationships: Optional[EntityRelationshipsResult] - editableInfo: Optional[CorpUserEditableInfo] = Field(alias="editableInfo") - global_tags: Optional[GlobalTags] = Field(alias="globalTags") + status: Optional[CorpUserStatus] = None + tags: Optional[GlobalTags] = None + relationships: Optional[EntityRelationshipsResult] = None + editableInfo: Optional[CorpUserEditableInfo] = Field(None, alias="editableInfo") + global_tags: Optional[GlobalTags] = Field(None, alias="globalTags") def get_urn_part(self): return "{}".format(self.username) @@ -353,7 +355,7 @@ def __hash__(self): class OwnershipData(BaseModel): existing_owners: Optional[List[OwnerClass]] = [] - owner_to_add: Optional[CorpUser] + owner_to_add: Optional[CorpUser] = None class Config: arbitrary_types_allowed = True diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 79b044841e054..9cbf1823db939 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -81,7 +81,7 @@ class RedshiftConfig( # Because of this behavior, it uses dramatically fewer round trips for # large Redshift warehouses. As an example, see this query for the columns: # https://github.com/sqlalchemy-redshift/sqlalchemy-redshift/blob/60b4db04c1d26071c291aeea52f1dcb5dd8b0eb0/sqlalchemy_redshift/dialect.py#L745. - scheme = Field( + scheme: str = Field( default="redshift+psycopg2", description="", hidden_from_schema=True, @@ -150,14 +150,14 @@ def check_email_is_set_on_usage(cls, values): ), "email_domain needs to be set if usage is enabled" return values - @root_validator() + @root_validator(skip_on_failure=True) def check_database_or_database_alias_set(cls, values): assert values.get("database") or values.get( "database_alias" ), "either database or database_alias must be set" return values - @root_validator(pre=False) + @root_validator(skip_on_failure=True) def backward_compatibility_configs_set(cls, values: Dict) -> Dict: match_fully_qualified_names = values.get("match_fully_qualified_names") diff --git a/metadata-ingestion/src/datahub/ingestion/source/s3/config.py b/metadata-ingestion/src/datahub/ingestion/source/s3/config.py index 3ef6476078f6f..f752a33b42d9c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/s3/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/s3/config.py @@ -144,7 +144,7 @@ def platform_not_empty(cls, platform: str, values: dict) -> str: raise ValueError("platform must not be empty") return platform - @pydantic.root_validator() + @pydantic.root_validator(skip_on_failure=True) def ensure_profiling_pattern_is_passed_to_profiling( cls, values: Dict[str, Any] ) -> Dict[str, Any]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/s3/datalake_profiler_config.py b/metadata-ingestion/src/datahub/ingestion/source/s3/datalake_profiler_config.py index 9f6d13a08b182..89c092875e449 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/s3/datalake_profiler_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/s3/datalake_profiler_config.py @@ -72,7 +72,7 @@ class DataLakeProfilerConfig(ConfigModel): description="Whether to profile for the sample values for all columns.", ) - @pydantic.root_validator() + @pydantic.root_validator(skip_on_failure=True) def ensure_field_level_settings_are_normalized( cls: "DataLakeProfilerConfig", values: Dict[str, Any] ) -> Dict[str, Any]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/salesforce.py b/metadata-ingestion/src/datahub/ingestion/source/salesforce.py index 3475c9f2881c1..6d52646f85d0a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/salesforce.py +++ b/metadata-ingestion/src/datahub/ingestion/source/salesforce.py @@ -83,7 +83,7 @@ class SalesforceProfilingConfig(ConfigModel): class SalesforceConfig(DatasetSourceConfigMixin): - platform = "salesforce" + platform: str = "salesforce" auth: SalesforceAuthType = SalesforceAuthType.USERNAME_PASSWORD diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py index 1cbd4a3b3ea24..8f571313f1888 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py @@ -79,30 +79,30 @@ class SnowflakeColumnReference(PermissiveModel): class SnowflakeObjectAccessEntry(PermissiveModel): - columns: Optional[List[SnowflakeColumnReference]] + columns: Optional[List[SnowflakeColumnReference]] = None objectDomain: str objectName: str # Seems like it should never be null, but in practice have seen null objectIds - objectId: Optional[int] - stageKind: Optional[str] + objectId: Optional[int] = None + stageKind: Optional[str] = None class SnowflakeJoinedAccessEvent(PermissiveModel): query_start_time: datetime query_text: str query_type: str - rows_inserted: Optional[int] - rows_updated: Optional[int] - rows_deleted: Optional[int] + rows_inserted: Optional[int] = None + rows_updated: Optional[int] = None + rows_deleted: Optional[int] = None base_objects_accessed: List[SnowflakeObjectAccessEntry] direct_objects_accessed: List[SnowflakeObjectAccessEntry] objects_modified: List[SnowflakeObjectAccessEntry] user_name: str - first_name: Optional[str] - last_name: Optional[str] - display_name: Optional[str] - email: Optional[str] + first_name: Optional[str] = None + last_name: Optional[str] = None + display_name: Optional[str] = None + email: Optional[str] = None role_name: str diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/clickhouse.py b/metadata-ingestion/src/datahub/ingestion/source/sql/clickhouse.py index 8873038079bad..30893fd03be22 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/clickhouse.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/clickhouse.py @@ -5,12 +5,11 @@ from enum import Enum from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union -import clickhouse_driver # noqa: F401 +import clickhouse_driver import clickhouse_sqlalchemy.types as custom_types import pydantic from clickhouse_sqlalchemy.drivers import base from clickhouse_sqlalchemy.drivers.base import ClickHouseDialect -from pydantic.class_validators import root_validator from pydantic.fields import Field from sqlalchemy import create_engine, text from sqlalchemy.engine import reflection @@ -59,6 +58,8 @@ UpstreamClass, ) +assert clickhouse_driver + # adding extra types not handled by clickhouse-sqlalchemy 0.1.8 base.ischema_names["DateTime64(0)"] = DATETIME base.ischema_names["DateTime64(1)"] = DATETIME @@ -126,8 +127,8 @@ class ClickHouseConfig( TwoTierSQLAlchemyConfig, BaseTimeWindowConfig, DatasetLineageProviderConfigBase ): # defaults - host_port = Field(default="localhost:8123", description="ClickHouse host URL.") - scheme = Field(default="clickhouse", description="", hidden_from_docs=True) + host_port: str = Field(default="localhost:8123", description="ClickHouse host URL.") + scheme: str = Field(default="clickhouse", description="", hidden_from_docs=True) password: pydantic.SecretStr = Field( default=pydantic.SecretStr(""), description="password" ) @@ -165,7 +166,7 @@ def get_sql_alchemy_url(self, current_db=None): return str(url) # pre = True because we want to take some decision before pydantic initialize the configuration to default values - @root_validator(pre=True) + @pydantic.root_validator(pre=True) def projects_backward_compatibility(cls, values: Dict) -> Dict: secure = values.get("secure") protocol = values.get("protocol") diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/druid.py b/metadata-ingestion/src/datahub/ingestion/source/sql/druid.py index 1dfa44f549135..3f20e0a0f18b6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/druid.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/druid.py @@ -32,7 +32,7 @@ def get_table_names(self, connection, schema=None, **kwargs): class DruidConfig(BasicSQLAlchemyConfig): # defaults - scheme = "druid" + scheme: str = "druid" schema_pattern: AllowDenyPattern = Field( default=AllowDenyPattern(deny=["^(lookup|sysgit|view).*"]), description="regex patterns for schemas to filter in ingestion.", diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/hive.py b/metadata-ingestion/src/datahub/ingestion/source/sql/hive.py index d081acb6c1eff..003732236ba80 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/hive.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/hive.py @@ -122,7 +122,7 @@ def get_view_definition_patched(self, connection, view_name, schema=None, **kw): class HiveConfig(TwoTierSQLAlchemyConfig): # defaults - scheme = Field(default="hive", hidden_from_docs=True) + scheme: str = Field(default="hive", hidden_from_docs=True) @validator("host_port") def clean_host_port(cls, v): diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py index e4969ce946f78..891b64066721b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py @@ -48,8 +48,8 @@ class MySQLConnectionConfig(SQLAlchemyConnectionConfig): # defaults - host_port = Field(default="localhost:3306", description="MySQL host URL.") - scheme = "mysql+pymysql" + host_port: str = Field(default="localhost:3306", description="MySQL host URL.") + scheme: str = "mysql+pymysql" class MySQLConfig(MySQLConnectionConfig, TwoTierSQLAlchemyConfig): diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py b/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py index 4f133c6459a0f..c8418075928ef 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py @@ -98,8 +98,10 @@ class ViewLineageEntry(BaseModel): class BasePostgresConfig(BasicSQLAlchemyConfig): - scheme = Field(default="postgresql+psycopg2", description="database scheme") - schema_pattern = Field(default=AllowDenyPattern(deny=["information_schema"])) + scheme: str = Field(default="postgresql+psycopg2", description="database scheme") + schema_pattern: AllowDenyPattern = Field( + default=AllowDenyPattern(deny=["information_schema"]) + ) class PostgresConfig(BasePostgresConfig): diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/presto.py b/metadata-ingestion/src/datahub/ingestion/source/sql/presto.py index c7331b4e53e5e..9333c6edd1fa5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/presto.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/presto.py @@ -85,7 +85,7 @@ def _get_full_table( # type: ignore class PrestoConfig(TrinoConfig): # defaults - scheme = Field(default="presto", description="", hidden_from_docs=True) + scheme: str = Field(default="presto", description="", hidden_from_docs=True) @platform_name("Presto", doc_order=1) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/sql/redshift.py index cdab52ebc3935..33d517c8589e9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/redshift.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/redshift.py @@ -145,7 +145,7 @@ def get_identifier(self, schema: str, table: str) -> str: # Because of this behavior, it uses dramatically fewer round trips for # large Redshift warehouses. As an example, see this query for the columns: # https://github.com/sqlalchemy-redshift/sqlalchemy-redshift/blob/60b4db04c1d26071c291aeea52f1dcb5dd8b0eb0/sqlalchemy_redshift/dialect.py#L745. - scheme = Field( + scheme: str = Field( default="redshift+psycopg2", description="", hidden_from_docs=True, diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py index 095b8e6443171..6a76ae847218d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py @@ -107,7 +107,7 @@ def view_pattern_is_table_pattern_unless_specified( values["view_pattern"] = table_pattern return values - @pydantic.root_validator() + @pydantic.root_validator(skip_on_failure=True) def ensure_profiling_pattern_is_passed_to_profiling( cls, values: Dict[str, Any] ) -> Dict[str, Any]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py b/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py index 899a7b6697c0a..8aeb1e50cd0b3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py @@ -70,7 +70,7 @@ class TeradataReport(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowRep class BaseTeradataConfig(TwoTierSQLAlchemyConfig): - scheme = Field(default="teradatasql", description="database scheme") + scheme: str = Field(default="teradatasql", description="database scheme") class TeradataConfig(BaseTeradataConfig, BaseTimeWindowConfig): diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py b/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py index 3b80cbed86c02..2b693d9d80d91 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py @@ -133,7 +133,7 @@ def _get_columns(self, connection, table_name, schema: str = None, **kw): # typ class TrinoConfig(BasicSQLAlchemyConfig): # defaults - scheme = Field(default="trino", description="", hidden_from_docs=True) + scheme: str = Field(default="trino", description="", hidden_from_docs=True) def get_identifier(self: BasicSQLAlchemyConfig, schema: str, table: str) -> str: regular = f"{schema}.{table}" diff --git a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py index d11b1f9ad6a53..b1d2b276130a9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py +++ b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py @@ -5,13 +5,13 @@ import pydantic from pydantic import root_validator from pydantic.fields import Field -from pydantic.generics import GenericModel from datahub.configuration.common import ( ConfigModel, ConfigurationError, DynamicTypedConfig, ) +from datahub.configuration.pydantic_migration_helpers import GenericModel from datahub.configuration.time_window_config import BaseTimeWindowConfig from datahub.configuration.validate_field_rename import pydantic_renamed_field from datahub.ingestion.api.common import PipelineContext @@ -77,7 +77,7 @@ class StatefulIngestionConfig(ConfigModel): hidden_from_docs=True, ) - @pydantic.root_validator() + @pydantic.root_validator(skip_on_failure=True) def validate_config(cls, values: Dict[str, Any]) -> Dict[str, Any]: if values.get("enabled"): if values.get("state_provider") is None: @@ -112,7 +112,7 @@ class StatefulLineageConfigMixin: "store_last_lineage_extraction_timestamp", "enable_stateful_lineage_ingestion" ) - @root_validator(pre=False) + @root_validator(skip_on_failure=True) def lineage_stateful_option_validator(cls, values: Dict) -> Dict: sti = values.get("stateful_ingestion") if not sti or not sti.enabled: @@ -137,7 +137,7 @@ class StatefulProfilingConfigMixin(ConfigModel): "store_last_profiling_timestamps", "enable_stateful_profiling" ) - @root_validator(pre=False) + @root_validator(skip_on_failure=True) def profiling_stateful_option_validator(cls, values: Dict) -> Dict: sti = values.get("stateful_ingestion") if not sti or not sti.enabled: @@ -161,7 +161,7 @@ class StatefulUsageConfigMixin(BaseTimeWindowConfig): "store_last_usage_extraction_timestamp", "enable_stateful_usage_ingestion" ) - @root_validator(pre=False) + @root_validator(skip_on_failure=True) def last_usage_extraction_stateful_option_validator(cls, values: Dict) -> Dict: sti = values.get("stateful_ingestion") if not sti or not sti.enabled: diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index e491a1e8b82fa..1ae971e4a82d0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -105,7 +105,7 @@ class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): def remove_trailing_slash(cls, v): return config_clean.remove_trailing_slashes(v) - @root_validator + @root_validator(skip_on_failure=True) def default_display_uri_to_connect_uri(cls, values): base = values.get("display_uri") if base is None: diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index 16820c37d546e..7073830318abe 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -76,7 +76,7 @@ class UnityCatalogProfilerConfig(ConfigModel): description="Number of worker threads to use for profiling. Set to 1 to disable.", ) - @pydantic.root_validator + @pydantic.root_validator(skip_on_failure=True) def warehouse_id_required_for_profiling( cls, values: Dict[str, Any] ) -> Dict[str, Any]: diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py index 9fc697018ecd6..2e9a15063661e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py @@ -340,7 +340,6 @@ def get_connection(self) -> snowflake.connector.SnowflakeConnection: class SnowflakeConfig(BaseSnowflakeConfig, BaseTimeWindowConfig, SQLCommonConfig): - include_table_lineage: bool = pydantic.Field( default=True, description="If enabled, populates the snowflake table-to-table and s3-to-snowflake table lineage. Requires appropriate grants given to the role and Snowflake Enterprise Edition or above.", @@ -357,7 +356,7 @@ class SnowflakeConfig(BaseSnowflakeConfig, BaseTimeWindowConfig, SQLCommonConfig ignore_start_time_lineage: bool = False upstream_lineage_in_report: bool = False - @pydantic.root_validator() + @pydantic.root_validator(skip_on_failure=True) def validate_include_view_lineage(cls, values): if ( "include_table_lineage" in values diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py b/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py index 6037490acb267..5eb9c83236e4f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py @@ -44,7 +44,7 @@ class BigQueryCredential(ConfigModel): description="If not set it will be default to https://www.googleapis.com/robot/v1/metadata/x509/client_email", ) - @pydantic.root_validator() + @pydantic.root_validator(skip_on_failure=True) def validate_config(cls, values: Dict[str, Any]) -> Dict[str, Any]: if values.get("client_x509_cert_url") is None: values[ diff --git a/metadata-ingestion/src/datahub/upgrade/upgrade.py b/metadata-ingestion/src/datahub/upgrade/upgrade.py index acc7954ad25a6..075bfd29008f6 100644 --- a/metadata-ingestion/src/datahub/upgrade/upgrade.py +++ b/metadata-ingestion/src/datahub/upgrade/upgrade.py @@ -23,18 +23,18 @@ class VersionStats(BaseModel, arbitrary_types_allowed=True): version: Version - release_date: Optional[datetime] + release_date: Optional[datetime] = None class ServerVersionStats(BaseModel): current: VersionStats - latest: Optional[VersionStats] - current_server_type: Optional[str] + latest: Optional[VersionStats] = None + current_server_type: Optional[str] = None class ClientVersionStats(BaseModel): current: VersionStats - latest: Optional[VersionStats] + latest: Optional[VersionStats] = None class DataHubVersionStats(BaseModel): diff --git a/metadata-ingestion/src/datahub/utilities/lossy_collections.py b/metadata-ingestion/src/datahub/utilities/lossy_collections.py index f0c1e0da40552..0542a9dfd51f9 100644 --- a/metadata-ingestion/src/datahub/utilities/lossy_collections.py +++ b/metadata-ingestion/src/datahub/utilities/lossy_collections.py @@ -1,6 +1,8 @@ import random from typing import Dict, Iterator, List, Set, TypeVar, Union +from datahub.configuration.pydantic_migration_helpers import PYDANTIC_VERSION_2 + T = TypeVar("T") _KT = TypeVar("_KT") _VT = TypeVar("_VT") @@ -41,6 +43,16 @@ def __repr__(self) -> str: def __str__(self) -> str: return repr(self) + if PYDANTIC_VERSION_2: + # With pydantic 2, it doesn't recognize that this is a list subclass, + # so we need to make it explicit. + + @classmethod + def __get_pydantic_core_schema__(cls, source_type, handler): # type: ignore + from pydantic_core import core_schema + + return core_schema.no_info_after_validator_function(cls, handler(list)) + def as_obj(self) -> List[Union[T, str]]: base_list: List[Union[T, str]] = list(self.__iter__()) if self.sampled: diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index 6413275ac63a6..cdffb684d958e 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -17,6 +17,7 @@ from pydantic import BaseModel from typing_extensions import TypedDict +from datahub.configuration.pydantic_migration_helpers import PYDANTIC_VERSION_2 from datahub.emitter.mce_builder import ( DEFAULT_ENV, make_dataset_urn_with_platform_instance, @@ -122,12 +123,17 @@ class _ParserBaseModel( SchemaFieldDataTypeClass: lambda v: v.to_obj(), }, ): - pass + def json(self, *args: Any, **kwargs: Any) -> str: + if PYDANTIC_VERSION_2: + return super().model_dump_json(*args, **kwargs) # type: ignore + else: + return super().json(*args, **kwargs) @functools.total_ordering class _FrozenModel(_ParserBaseModel, frozen=True): def __lt__(self, other: "_FrozenModel") -> bool: + # TODO: The __fields__ attribute is deprecated in Pydantic v2. for field in self.__fields__: self_v = getattr(self, field) other_v = getattr(other, field) @@ -138,8 +144,8 @@ def __lt__(self, other: "_FrozenModel") -> bool: class _TableName(_FrozenModel): - database: Optional[str] - db_schema: Optional[str] + database: Optional[str] = None + db_schema: Optional[str] = None table: str def as_sqlglot_table(self) -> sqlglot.exp.Table: @@ -187,16 +193,16 @@ class ColumnRef(_ParserBaseModel): class _DownstreamColumnRef(_ParserBaseModel): - table: Optional[_TableName] + table: Optional[_TableName] = None column: str - column_type: Optional[sqlglot.exp.DataType] + column_type: Optional[sqlglot.exp.DataType] = None class DownstreamColumnRef(_ParserBaseModel): - table: Optional[Urn] + table: Optional[Urn] = None column: str - column_type: Optional[SchemaFieldDataTypeClass] - native_column_type: Optional[str] + column_type: Optional[SchemaFieldDataTypeClass] = None + native_column_type: Optional[str] = None @pydantic.validator("column_type", pre=True) def _load_column_type( @@ -213,7 +219,7 @@ class _ColumnLineageInfo(_ParserBaseModel): downstream: _DownstreamColumnRef upstreams: List[_ColumnRef] - logic: Optional[str] + logic: Optional[str] = None class ColumnLineageInfo(_ParserBaseModel): @@ -244,7 +250,7 @@ class SqlParsingResult(_ParserBaseModel): in_tables: List[Urn] out_tables: List[Urn] - column_lineage: Optional[List[ColumnLineageInfo]] + column_lineage: Optional[List[ColumnLineageInfo]] = None # TODO include formatted original sql logic # TODO include list of referenced columns From a187127ac5e5a3aebd9ef217e3facadc159f59fa Mon Sep 17 00:00:00 2001 From: Shubham Jagtap <132359390+shubhamjagtap639@users.noreply.github.com> Date: Sat, 11 Nov 2023 04:06:00 +0530 Subject: [PATCH 090/792] feat(ingestion): file-based state checkpoint provider (#9029) --- metadata-ingestion/setup.py | 1 + ...gestion_job_checkpointing_provider_base.py | 2 +- .../src/datahub/ingestion/graph/client.py | 2 +- .../source/state/stateful_ingestion_base.py | 17 +- ...atahub_ingestion_checkpointing_provider.py | 27 +- .../file_ingestion_checkpointing_provider.py | 108 +++ .../integration/lookml/golden_test_state.json | 26 + .../lookml_mces_golden_deleted_stateful.json | 650 ------------------ .../tests/integration/lookml/test_lookml.py | 116 +--- ...atahub_ingestion_checkpointing_provider.py | 170 ----- .../provider/test_provider.py | 183 +++++ .../state/golden_test_checkpoint_state.json | 26 + ...n_test_checkpoint_state_after_deleted.json | 26 + .../state/golden_test_stateful_ingestion.json | 50 ++ ...test_stateful_ingestion_after_deleted.json | 50 ++ .../state/test_stateful_ingestion.py | 227 ++++++ .../unit/stateful_ingestion/test_configs.py | 15 +- 17 files changed, 739 insertions(+), 957 deletions(-) create mode 100644 metadata-ingestion/src/datahub/ingestion/source/state_provider/file_ingestion_checkpointing_provider.py create mode 100644 metadata-ingestion/tests/integration/lookml/golden_test_state.json delete mode 100644 metadata-ingestion/tests/integration/lookml/lookml_mces_golden_deleted_stateful.json delete mode 100644 metadata-ingestion/tests/unit/stateful_ingestion/provider/test_datahub_ingestion_checkpointing_provider.py create mode 100644 metadata-ingestion/tests/unit/stateful_ingestion/provider/test_provider.py create mode 100644 metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_checkpoint_state.json create mode 100644 metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_checkpoint_state_after_deleted.json create mode 100644 metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_stateful_ingestion.json create mode 100644 metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_stateful_ingestion_after_deleted.json create mode 100644 metadata-ingestion/tests/unit/stateful_ingestion/state/test_stateful_ingestion.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 2392fce058061..5f44f14c3d74c 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -666,6 +666,7 @@ ], "datahub.ingestion.checkpointing_provider.plugins": [ "datahub = datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider:DatahubIngestionCheckpointingProvider", + "file = datahub.ingestion.source.state_provider.file_ingestion_checkpointing_provider:FileIngestionCheckpointingProvider", ], "datahub.ingestion.reporting_provider.plugins": [ "datahub = datahub.ingestion.reporting.datahub_ingestion_run_summary_provider:DatahubIngestionRunSummaryProvider", diff --git a/metadata-ingestion/src/datahub/ingestion/api/ingestion_job_checkpointing_provider_base.py b/metadata-ingestion/src/datahub/ingestion/api/ingestion_job_checkpointing_provider_base.py index ca02b88ab6324..285ad9c088447 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/ingestion_job_checkpointing_provider_base.py +++ b/metadata-ingestion/src/datahub/ingestion/api/ingestion_job_checkpointing_provider_base.py @@ -35,7 +35,7 @@ def __init__( @classmethod @abstractmethod def create( - cls: Type[_Self], config_dict: Dict[str, Any], ctx: PipelineContext, name: str + cls: Type[_Self], config_dict: Dict[str, Any], ctx: PipelineContext ) -> "_Self": pass diff --git a/metadata-ingestion/src/datahub/ingestion/graph/client.py b/metadata-ingestion/src/datahub/ingestion/graph/client.py index ccff677c3a471..d91165ac9777c 100644 --- a/metadata-ingestion/src/datahub/ingestion/graph/client.py +++ b/metadata-ingestion/src/datahub/ingestion/graph/client.py @@ -756,7 +756,7 @@ def get_latest_pipeline_checkpoint( DatahubIngestionCheckpointingProvider, ) - checkpoint_provider = DatahubIngestionCheckpointingProvider(self, "graph") + checkpoint_provider = DatahubIngestionCheckpointingProvider(self) job_name = StaleEntityRemovalHandler.compute_job_id(platform) raw_checkpoint = checkpoint_provider.get_latest_checkpoint( diff --git a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py index b1d2b276130a9..8a448f40e95b4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py +++ b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py @@ -1,6 +1,6 @@ import logging from dataclasses import dataclass -from typing import Any, Dict, Generic, Optional, Type, TypeVar, cast +from typing import Any, Dict, Generic, Optional, Type, TypeVar import pydantic from pydantic import root_validator @@ -39,10 +39,8 @@ class DynamicTypedStateProviderConfig(DynamicTypedConfig): type: str = Field( description="The type of the state provider to use. For DataHub use `datahub`", ) - # This config type is declared Optional[Any] here. The eventual parser for the - # specified type is responsible for further validation. - config: Optional[Any] = Field( - default=None, + config: Dict[str, Any] = Field( + default={}, description="The configuration required for initializing the state provider. Default: The datahub_api config if set at pipeline level. Otherwise, the default DatahubClientConfig. See the defaults (https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/graph/client.py#L19).", ) @@ -82,7 +80,7 @@ def validate_config(cls, values: Dict[str, Any]) -> Dict[str, Any]: if values.get("enabled"): if values.get("state_provider") is None: values["state_provider"] = DynamicTypedStateProviderConfig( - type="datahub", config=None + type="datahub", config={} ) return values @@ -252,15 +250,10 @@ def _initialize_checkpointing_state_provider(self) -> None: f"Cannot find checkpoint provider class of type={self.stateful_ingestion_config.state_provider.type} " " in the registry! Please check the type of the checkpointing provider in your config." ) - config_dict: Dict[str, Any] = cast( - Dict[str, Any], - self.stateful_ingestion_config.state_provider.dict().get("config", {}), - ) self.ingestion_checkpointing_state_provider = ( checkpointing_state_provider_class.create( - config_dict=config_dict, + config_dict=self.stateful_ingestion_config.state_provider.config, ctx=self.ctx, - name=checkpointing_state_provider_class.__name__, ) ) assert self.ingestion_checkpointing_state_provider diff --git a/metadata-ingestion/src/datahub/ingestion/source/state_provider/datahub_ingestion_checkpointing_provider.py b/metadata-ingestion/src/datahub/ingestion/source/state_provider/datahub_ingestion_checkpointing_provider.py index d7ebcba2c6695..442abb3aaf4cf 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/state_provider/datahub_ingestion_checkpointing_provider.py +++ b/metadata-ingestion/src/datahub/ingestion/source/state_provider/datahub_ingestion_checkpointing_provider.py @@ -17,14 +17,17 @@ class DatahubIngestionStateProviderConfig(IngestionCheckpointingProviderConfig): - datahub_api: Optional[DatahubClientConfig] = DatahubClientConfig() + datahub_api: DatahubClientConfig = DatahubClientConfig() class DatahubIngestionCheckpointingProvider(IngestionCheckpointingProviderBase): orchestrator_name: str = "datahub" - def __init__(self, graph: DataHubGraph, name: str): - super().__init__(name) + def __init__( + self, + graph: DataHubGraph, + ): + super().__init__(self.__class__.__name__) self.graph = graph if not self._is_server_stateful_ingestion_capable(): raise ConfigurationError( @@ -34,24 +37,14 @@ def __init__(self, graph: DataHubGraph, name: str): @classmethod def create( - cls, config_dict: Dict[str, Any], ctx: PipelineContext, name: str + cls, config_dict: Dict[str, Any], ctx: PipelineContext ) -> "DatahubIngestionCheckpointingProvider": + config = DatahubIngestionStateProviderConfig.parse_obj(config_dict) if ctx.graph: # Use the pipeline-level graph if set - return cls(ctx.graph, name) - elif config_dict is None: - raise ConfigurationError("Missing provider configuration.") + return cls(ctx.graph) else: - provider_config = ( - DatahubIngestionStateProviderConfig.parse_obj_allow_extras(config_dict) - ) - if provider_config.datahub_api: - graph = DataHubGraph(provider_config.datahub_api) - return cls(graph, name) - else: - raise ConfigurationError( - "Missing datahub_api. Provide either a global one or under the state_provider." - ) + return cls(DataHubGraph(config.datahub_api)) def _is_server_stateful_ingestion_capable(self) -> bool: server_config = self.graph.get_config() if self.graph else None diff --git a/metadata-ingestion/src/datahub/ingestion/source/state_provider/file_ingestion_checkpointing_provider.py b/metadata-ingestion/src/datahub/ingestion/source/state_provider/file_ingestion_checkpointing_provider.py new file mode 100644 index 0000000000000..a37774773b84d --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/state_provider/file_ingestion_checkpointing_provider.py @@ -0,0 +1,108 @@ +import logging +import pathlib +from datetime import datetime +from typing import Any, Dict, List, Optional + +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.api.ingestion_job_checkpointing_provider_base import ( + IngestionCheckpointingProviderBase, + IngestionCheckpointingProviderConfig, + JobId, +) +from datahub.ingestion.sink.file import write_metadata_file +from datahub.ingestion.source.file import read_metadata_file +from datahub.metadata.schema_classes import DatahubIngestionCheckpointClass + +logger = logging.getLogger(__name__) + + +class FileIngestionStateProviderConfig(IngestionCheckpointingProviderConfig): + filename: str + + +class FileIngestionCheckpointingProvider(IngestionCheckpointingProviderBase): + orchestrator_name: str = "file" + + def __init__(self, config: FileIngestionStateProviderConfig): + super().__init__(self.__class__.__name__) + self.config = config + + @classmethod + def create( + cls, config_dict: Dict[str, Any], ctx: PipelineContext + ) -> "FileIngestionCheckpointingProvider": + config = FileIngestionStateProviderConfig.parse_obj(config_dict) + return cls(config) + + def get_latest_checkpoint( + self, + pipeline_name: str, + job_name: JobId, + ) -> Optional[DatahubIngestionCheckpointClass]: + logger.debug( + f"Querying for the latest ingestion checkpoint for pipelineName:'{pipeline_name}'," + f" job_name:'{job_name}'" + ) + + data_job_urn = self.get_data_job_urn( + self.orchestrator_name, pipeline_name, job_name + ) + latest_checkpoint: Optional[DatahubIngestionCheckpointClass] = None + try: + for obj in read_metadata_file(pathlib.Path(self.config.filename)): + if ( + isinstance(obj, MetadataChangeProposalWrapper) + and obj.entityUrn == data_job_urn + and obj.aspect + and isinstance(obj.aspect, DatahubIngestionCheckpointClass) + and obj.aspect.get("pipelineName", "") == pipeline_name + ): + latest_checkpoint = obj.aspect + break + except FileNotFoundError: + logger.debug(f"File {self.config.filename} not found") + + if latest_checkpoint: + logger.debug( + f"The last committed ingestion checkpoint for pipelineName:'{pipeline_name}'," + f" job_name:'{job_name}' found with start_time:" + f" {datetime.utcfromtimestamp(latest_checkpoint.timestampMillis/1000)}" + ) + return latest_checkpoint + else: + logger.debug( + f"No committed ingestion checkpoint for pipelineName:'{pipeline_name}'," + f" job_name:'{job_name}' found" + ) + + return None + + def commit(self) -> None: + if not self.state_to_commit: + logger.warning(f"No state available to commit for {self.name}") + return None + + checkpoint_workunits: List[MetadataChangeProposalWrapper] = [] + for job_name, checkpoint in self.state_to_commit.items(): + # Emit the ingestion state for each job + logger.debug( + f"Committing ingestion checkpoint for pipeline:'{checkpoint.pipelineName}', " + f"job:'{job_name}'" + ) + datajob_urn = self.get_data_job_urn( + self.orchestrator_name, + checkpoint.pipelineName, + job_name, + ) + checkpoint_workunits.append( + MetadataChangeProposalWrapper( + entityUrn=datajob_urn, + aspect=checkpoint, + ) + ) + write_metadata_file(pathlib.Path(self.config.filename), checkpoint_workunits) + self.committed = True + logger.debug( + f"Committed all ingestion checkpoints for pipeline:'{checkpoint.pipelineName}'" + ) diff --git a/metadata-ingestion/tests/integration/lookml/golden_test_state.json b/metadata-ingestion/tests/integration/lookml/golden_test_state.json new file mode 100644 index 0000000000000..c62106ac10089 --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/golden_test_state.json @@ -0,0 +1,26 @@ +[ +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(file,lookml_stateful,prod),lookml_stale_entity_removal)", + "changeType": "UPSERT", + "aspectName": "datahubIngestionCheckpoint", + "aspect": { + "json": { + "timestampMillis": 1586847600000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "pipelineName": "lookml_stateful", + "platformInstanceId": "", + "config": "", + "state": { + "formatVersion": "1.0", + "serde": "base85-bz2-json", + "payload": "LRx4!F+o`-Q(4)<4JiNuUmt)_WdINa0@Mn>@BivB0a-v1sF;Ar&}h&A0K-EjK*+=xnKU%Oib;?JVrrXB7?aRqCarWwpZm8v5Yh+DsN{|c*msMh9%WJXjKPvIPsDn^@g3;DD9Q9kBh?*|=8M4uRW$_0HKn3XhN;RhAcLIBhLnO2%UA@Ykl;h&Xx(^@2;Y9C#d4g3K_2CA-I*M)h{NMA8Nu4C3XjEQYdh{nR--&lfRUsTL}OOkOO435f=1nKzYJ^9)mbBljM0}gaqy26URw1=q<80Eb9y)y?Vl88kG;g~MToq#r%6trK9U`U?k}RS<@^?i@1M1@9*%tk}1N3hRzUaNB" + }, + "runId": "lookml-test" + } + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/lookml/lookml_mces_golden_deleted_stateful.json b/metadata-ingestion/tests/integration/lookml/lookml_mces_golden_deleted_stateful.json deleted file mode 100644 index a323118666940..0000000000000 --- a/metadata-ingestion/tests/integration/lookml/lookml_mces_golden_deleted_stateful.json +++ /dev/null @@ -1,650 +0,0 @@ -[ -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_view,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.BrowsePaths": { - "paths": [ - "/prod/looker/lkml_samples/views" - ] - } - }, - { - "com.linkedin.pegasus2avro.common.Status": { - "removed": false - } - }, - { - "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { - "upstreams": [ - { - "auditStamp": { - "time": 1586847600000, - "actor": "urn:li:corpuser:datahub" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:conn,..my_table,PROD)", - "type": "VIEW" - } - ] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "my_view", - "platform": "urn:li:dataPlatform:looker", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "" - } - }, - "fields": [ - { - "fieldPath": "country", - "nullable": false, - "description": "The country", - "label": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:Dimension" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "city", - "nullable": false, - "description": "City", - "label": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:Dimension" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "is_latest", - "nullable": false, - "description": "Is latest data", - "label": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.BooleanType": {} - } - }, - "nativeDataType": "yesno", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:Dimension" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "timestamp", - "nullable": false, - "description": "Timestamp of measurement", - "label": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "time", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:Dimension" - }, - { - "tag": "urn:li:tag:Temporal" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "average_measurement", - "nullable": false, - "description": "My measurement", - "label": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "average", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:Measure" - } - ] - }, - "isPartOfKey": false - } - ], - "primaryKeys": [] - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": { - "looker.file.path": "foo.view.lkml" - }, - "name": "my_view", - "tags": [] - } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_view,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "View" - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_view,PROD)", - "changeType": "UPSERT", - "aspectName": "viewProperties", - "aspect": { - "json": { - "materialized": false, - "viewLogic": "SELECT\n is_latest,\n country,\n city,\n timestamp,\n measurement\n FROM\n my_table", - "viewLanguage": "sql" - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_view,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "looker" - }, - { - "id": "lkml_samples" - }, - { - "id": "views" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.owners,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.BrowsePaths": { - "paths": [ - "/prod/looker/lkml_samples/views" - ] - } - }, - { - "com.linkedin.pegasus2avro.common.Status": { - "removed": false - } - }, - { - "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { - "upstreams": [ - { - "auditStamp": { - "time": 1586847600000, - "actor": "urn:li:corpuser:datahub" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:conn,..owners,PROD)", - "type": "VIEW" - } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:conn,..owners,PROD),id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.owners,PROD),id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:conn,..owners,PROD),owner_name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.owners,PROD),owner_name)" - ], - "confidenceScore": 1.0 - } - ] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "owners", - "platform": "urn:li:dataPlatform:looker", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "" - } - }, - "fields": [ - { - "fieldPath": "id", - "nullable": false, - "description": "", - "label": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:Dimension" - } - ] - }, - "isPartOfKey": true - }, - { - "fieldPath": "owner_name", - "nullable": false, - "description": "", - "label": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:Dimension" - } - ] - }, - "isPartOfKey": false - } - ], - "primaryKeys": [ - "id" - ] - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": { - "looker.file.path": "owners.view.lkml" - }, - "name": "owners", - "tags": [] - } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.owners,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "View" - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.owners,PROD)", - "changeType": "UPSERT", - "aspectName": "viewProperties", - "aspect": { - "json": { - "materialized": false, - "viewLogic": "view: owners {\n dimension: id {\n primary_key: yes\n sql: ${TABLE}.id ;;\n }\n dimension: owner_name {\n sql: ${TABLE}.owner_name ;;\n }\n}", - "viewLanguage": "lookml" - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.owners,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "looker" - }, - { - "id": "lkml_samples" - }, - { - "id": "views" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "tag", - "entityUrn": "urn:li:tag:Dimension", - "changeType": "UPSERT", - "aspectName": "tagKey", - "aspect": { - "json": { - "name": "Dimension" - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "tag", - "entityUrn": "urn:li:tag:Measure", - "changeType": "UPSERT", - "aspectName": "tagKey", - "aspect": { - "json": { - "name": "Measure" - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "tag", - "entityUrn": "urn:li:tag:Temporal", - "changeType": "UPSERT", - "aspectName": "tagKey", - "aspect": { - "json": { - "name": "Temporal" - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.flights,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.include_able_view,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.customer_facts,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.ability,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.looker_events,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.extending_looker_events,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.autodetect_sql_name_based_on_view_name,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_derived_view,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.fragment_derived_view,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.view_derived_explore,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.test_include_external_view,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": true - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "lookml-test" - } -} -] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/lookml/test_lookml.py b/metadata-ingestion/tests/integration/lookml/test_lookml.py index 21a0b19849d97..b1853cfa2b3c0 100644 --- a/metadata-ingestion/tests/integration/lookml/test_lookml.py +++ b/metadata-ingestion/tests/integration/lookml/test_lookml.py @@ -1,6 +1,6 @@ import logging import pathlib -from typing import Any, Dict, List, cast +from typing import Any, List from unittest import mock import pydantic @@ -17,17 +17,13 @@ LookerRefinementResolver, LookMLSourceConfig, ) -from datahub.ingestion.source.state.entity_removal_state import GenericCheckpointState from datahub.metadata.schema_classes import ( DatasetSnapshotClass, MetadataChangeEventClass, UpstreamLineageClass, ) from tests.test_helpers import mce_helpers -from tests.test_helpers.state_helpers import ( - get_current_checkpoint_from_pipeline, - validate_all_providers_have_committed_successfully, -) +from tests.test_helpers.state_helpers import get_current_checkpoint_from_pipeline logging.getLogger("lkml").setLevel(logging.INFO) @@ -728,11 +724,10 @@ def test_hive_platform_drops_ids(pytestconfig, tmp_path, mock_time): @freeze_time(FROZEN_TIME) -def test_lookml_ingest_stateful(pytestconfig, tmp_path, mock_time, mock_datahub_graph): +def test_lookml_stateful_ingestion(pytestconfig, tmp_path, mock_time): output_file_name: str = "lookml_mces.json" - golden_file_name: str = "expected_output.json" - output_file_deleted_name: str = "lookml_mces_deleted_stateful.json" - golden_file_deleted_name: str = "lookml_mces_golden_deleted_stateful.json" + state_file_name: str = "lookml_state_mces.json" + golden_file_name: str = "golden_test_state.json" test_resources_dir = pytestconfig.rootpath / "tests/integration/lookml" @@ -754,106 +749,37 @@ def test_lookml_ingest_stateful(pytestconfig, tmp_path, mock_time, mock_datahub_ "remove_stale_metadata": True, "fail_safe_threshold": 100.0, "state_provider": { - "type": "datahub", - "config": {"datahub_api": {"server": GMS_SERVER}}, + "type": "file", + "config": { + "filename": f"{tmp_path}/{state_file_name}", + }, }, }, }, }, "sink": { "type": "file", - "config": {}, + "config": { + "filename": f"{tmp_path}/{output_file_name}", + }, }, } - pipeline_run1 = None - with mock.patch( - "datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider.DataHubGraph", - mock_datahub_graph, - ) as mock_checkpoint: - mock_checkpoint.return_value = mock_datahub_graph - pipeline_run1_config: Dict[str, Dict[str, Dict[str, Any]]] = dict( # type: ignore - base_pipeline_config # type: ignore - ) - # Set the special properties for this run - pipeline_run1_config["source"]["config"]["emit_reachable_views_only"] = False - pipeline_run1_config["sink"]["config"][ - "filename" - ] = f"{tmp_path}/{output_file_name}" - pipeline_run1 = Pipeline.create(pipeline_run1_config) - pipeline_run1.run() - pipeline_run1.raise_from_status() - pipeline_run1.pretty_print_summary() + pipeline_run1 = Pipeline.create(base_pipeline_config) + pipeline_run1.run() + pipeline_run1.raise_from_status() + pipeline_run1.pretty_print_summary() - mce_helpers.check_golden_file( - pytestconfig, - output_path=tmp_path / output_file_name, - golden_path=f"{test_resources_dir}/{golden_file_name}", - ) + mce_helpers.check_golden_file( + pytestconfig, + output_path=f"{tmp_path}/{state_file_name}", + golden_path=f"{test_resources_dir}/{golden_file_name}", + ) checkpoint1 = get_current_checkpoint_from_pipeline(pipeline_run1) assert checkpoint1 assert checkpoint1.state - pipeline_run2 = None - with mock.patch( - "datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider.DataHubGraph", - mock_datahub_graph, - ) as mock_checkpoint: - mock_checkpoint.return_value = mock_datahub_graph - pipeline_run2_config: Dict[str, Dict[str, Dict[str, Any]]] = dict(base_pipeline_config) # type: ignore - # Set the special properties for this run - pipeline_run2_config["source"]["config"]["emit_reachable_views_only"] = True - pipeline_run2_config["sink"]["config"][ - "filename" - ] = f"{tmp_path}/{output_file_deleted_name}" - pipeline_run2 = Pipeline.create(pipeline_run2_config) - pipeline_run2.run() - pipeline_run2.raise_from_status() - pipeline_run2.pretty_print_summary() - - mce_helpers.check_golden_file( - pytestconfig, - output_path=tmp_path / output_file_deleted_name, - golden_path=f"{test_resources_dir}/{golden_file_deleted_name}", - ) - checkpoint2 = get_current_checkpoint_from_pipeline(pipeline_run2) - assert checkpoint2 - assert checkpoint2.state - - # Validate that all providers have committed successfully. - validate_all_providers_have_committed_successfully( - pipeline=pipeline_run1, expected_providers=1 - ) - validate_all_providers_have_committed_successfully( - pipeline=pipeline_run2, expected_providers=1 - ) - - # Perform all assertions on the states. The deleted table should not be - # part of the second state - state1 = cast(GenericCheckpointState, checkpoint1.state) - state2 = cast(GenericCheckpointState, checkpoint2.state) - - difference_dataset_urns = list( - state1.get_urns_not_in(type="dataset", other_checkpoint_state=state2) - ) - # the difference in dataset urns are all the views that are not reachable from the model file - assert len(difference_dataset_urns) == 11 - deleted_dataset_urns: List[str] = [ - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.fragment_derived_view,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_derived_view,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.test_include_external_view,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.extending_looker_events,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.customer_facts,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.include_able_view,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.autodetect_sql_name_based_on_view_name,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.ability,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.looker_events,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.view_derived_explore,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.flights,PROD)", - ] - assert sorted(deleted_dataset_urns) == sorted(difference_dataset_urns) - def test_lookml_base_folder(): fake_api = { diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/provider/test_datahub_ingestion_checkpointing_provider.py b/metadata-ingestion/tests/unit/stateful_ingestion/provider/test_datahub_ingestion_checkpointing_provider.py deleted file mode 100644 index 600985266043b..0000000000000 --- a/metadata-ingestion/tests/unit/stateful_ingestion/provider/test_datahub_ingestion_checkpointing_provider.py +++ /dev/null @@ -1,170 +0,0 @@ -import types -import unittest -from typing import Dict, List, Optional, Type -from unittest.mock import MagicMock, patch - -from avrogen.dict_wrapper import DictWrapper - -from datahub.emitter.mcp import MetadataChangeProposalWrapper -from datahub.ingestion.api.common import PipelineContext -from datahub.ingestion.api.ingestion_job_checkpointing_provider_base import ( - CheckpointJobStateType, - JobId, -) -from datahub.ingestion.source.state.checkpoint import Checkpoint -from datahub.ingestion.source.state.sql_common_state import ( - BaseSQLAlchemyCheckpointState, -) -from datahub.ingestion.source.state.usage_common_state import ( - BaseTimeWindowCheckpointState, -) -from datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider import ( - DatahubIngestionCheckpointingProvider, -) -from tests.test_helpers.type_helpers import assert_not_null - - -class TestDatahubIngestionCheckpointProvider(unittest.TestCase): - # Static members for the tests - pipeline_name: str = "test_pipeline" - job_names: List[JobId] = [JobId("job1"), JobId("job2")] - run_id: str = "test_run" - - def setUp(self) -> None: - self._setup_mock_graph() - self.provider = self._create_provider() - assert self.provider - - def _setup_mock_graph(self) -> None: - """ - Setup monkey-patched graph client. - """ - self.patcher = patch( - "datahub.ingestion.graph.client.DataHubGraph", autospec=True - ) - self.addCleanup(self.patcher.stop) - self.mock_graph = self.patcher.start() - # Make server stateful ingestion capable - self.mock_graph.get_config.return_value = {"statefulIngestionCapable": True} - # Bind mock_graph's emit_mcp to testcase's monkey_patch_emit_mcp so that we can emulate emits. - self.mock_graph.emit_mcp = types.MethodType( - self.monkey_patch_emit_mcp, self.mock_graph - ) - # Bind mock_graph's get_latest_timeseries_value to monkey_patch_get_latest_timeseries_value - self.mock_graph.get_latest_timeseries_value = types.MethodType( - self.monkey_patch_get_latest_timeseries_value, self.mock_graph - ) - # Tracking for emitted mcps. - self.mcps_emitted: Dict[str, MetadataChangeProposalWrapper] = {} - - def _create_provider(self) -> DatahubIngestionCheckpointingProvider: - ctx: PipelineContext = PipelineContext( - run_id=self.run_id, pipeline_name=self.pipeline_name - ) - ctx.graph = self.mock_graph - return DatahubIngestionCheckpointingProvider.create( - {}, ctx, name=DatahubIngestionCheckpointingProvider.__name__ - ) - - def monkey_patch_emit_mcp( - self, graph_ref: MagicMock, mcpw: MetadataChangeProposalWrapper - ) -> None: - """ - Mockey patched implementation of DatahubGraph.emit_mcp that caches the mcp locally in memory. - """ - self.assertIsNotNone(graph_ref) - if mcpw.aspectName != "status": - self.assertEqual(mcpw.entityType, "dataJob") - self.assertEqual(mcpw.aspectName, "datahubIngestionCheckpoint") - # Cache the mcpw against the entityUrn - assert mcpw.entityUrn is not None - self.mcps_emitted[mcpw.entityUrn] = mcpw - - def monkey_patch_get_latest_timeseries_value( - self, - graph_ref: MagicMock, - entity_urn: str, - aspect_type: Type[DictWrapper], - filter_criteria_map: Dict[str, str], - ) -> Optional[DictWrapper]: - """ - Monkey patched implementation of DatahubGraph.get_latest_timeseries_value that returns the latest cached aspect - for a given entity urn. - """ - self.assertIsNotNone(graph_ref) - self.assertEqual(aspect_type, CheckpointJobStateType) - self.assertEqual( - filter_criteria_map, - { - "pipelineName": self.pipeline_name, - }, - ) - # Retrieve the cached mcpw and return its aspect value. - mcpw = self.mcps_emitted.get(entity_urn) - if mcpw: - return mcpw.aspect - return None - - def test_provider(self): - # 1. Create the individual job checkpoints with appropriate states. - # Job1 - Checkpoint with a BaseSQLAlchemyCheckpointState state - job1_state_obj = BaseSQLAlchemyCheckpointState() - job1_checkpoint = Checkpoint( - job_name=self.job_names[0], - pipeline_name=self.pipeline_name, - run_id=self.run_id, - state=job1_state_obj, - ) - # Job2 - Checkpoint with a BaseTimeWindowCheckpointState state - job2_state_obj = BaseTimeWindowCheckpointState( - begin_timestamp_millis=10, end_timestamp_millis=100 - ) - job2_checkpoint = Checkpoint( - job_name=self.job_names[1], - pipeline_name=self.pipeline_name, - run_id=self.run_id, - state=job2_state_obj, - ) - - # 2. Set the provider's state_to_commit. - self.provider.state_to_commit = { - # NOTE: state_to_commit accepts only the aspect version of the checkpoint. - self.job_names[0]: assert_not_null( - job1_checkpoint.to_checkpoint_aspect(max_allowed_state_size=2**20) - ), - self.job_names[1]: assert_not_null( - job2_checkpoint.to_checkpoint_aspect(max_allowed_state_size=2**20) - ), - } - - # 3. Perform the commit - # NOTE: This will commit the state to the in-memory self.mcps_emitted because of the monkey-patching. - self.provider.commit() - self.assertTrue(self.provider.committed) - - # 4. Get last committed state. This must match what has been committed earlier. - # NOTE: This will retrieve from in-memory self.mcps_emitted because of the monkey-patching. - job1_last_state = self.provider.get_latest_checkpoint( - self.pipeline_name, self.job_names[0] - ) - job2_last_state = self.provider.get_latest_checkpoint( - self.pipeline_name, self.job_names[1] - ) - - # 5. Validate individual job checkpoint state values that have been committed and retrieved - # against the original values. - self.assertIsNotNone(job1_last_state) - job1_last_checkpoint = Checkpoint.create_from_checkpoint_aspect( - job_name=self.job_names[0], - checkpoint_aspect=job1_last_state, - state_class=type(job1_state_obj), - ) - self.assertEqual(job1_last_checkpoint, job1_checkpoint) - - self.assertIsNotNone(job2_last_state) - job2_last_checkpoint = Checkpoint.create_from_checkpoint_aspect( - job_name=self.job_names[1], - checkpoint_aspect=job2_last_state, - state_class=type(job2_state_obj), - ) - self.assertEqual(job2_last_checkpoint, job2_checkpoint) diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/provider/test_provider.py b/metadata-ingestion/tests/unit/stateful_ingestion/provider/test_provider.py new file mode 100644 index 0000000000000..4387e5a17790f --- /dev/null +++ b/metadata-ingestion/tests/unit/stateful_ingestion/provider/test_provider.py @@ -0,0 +1,183 @@ +import tempfile +import types +import unittest +from typing import Dict, List, Optional, Type +from unittest.mock import MagicMock, patch + +from avrogen.dict_wrapper import DictWrapper + +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.api.ingestion_job_checkpointing_provider_base import ( + CheckpointJobStateType, + IngestionCheckpointingProviderBase, + JobId, +) +from datahub.ingestion.source.state.checkpoint import Checkpoint +from datahub.ingestion.source.state.sql_common_state import ( + BaseSQLAlchemyCheckpointState, +) +from datahub.ingestion.source.state.usage_common_state import ( + BaseTimeWindowCheckpointState, +) +from datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider import ( + DatahubIngestionCheckpointingProvider, +) +from datahub.ingestion.source.state_provider.file_ingestion_checkpointing_provider import ( + FileIngestionCheckpointingProvider, +) +from tests.test_helpers.type_helpers import assert_not_null + + +class TestIngestionCheckpointProviders(unittest.TestCase): + # Static members for the tests + pipeline_name: str = "test_pipeline" + job_names: List[JobId] = [JobId("job1"), JobId("job2")] + run_id: str = "test_run" + + def setUp(self) -> None: + self._setup_mock_graph() + self._create_providers() + + def _setup_mock_graph(self) -> None: + """ + Setup monkey-patched graph client. + """ + self.patcher = patch( + "datahub.ingestion.graph.client.DataHubGraph", autospec=True + ) + self.addCleanup(self.patcher.stop) + self.mock_graph = self.patcher.start() + # Make server stateful ingestion capable + self.mock_graph.get_config.return_value = {"statefulIngestionCapable": True} + # Bind mock_graph's emit_mcp to testcase's monkey_patch_emit_mcp so that we can emulate emits. + self.mock_graph.emit_mcp = types.MethodType( + self.monkey_patch_emit_mcp, self.mock_graph + ) + # Bind mock_graph's get_latest_timeseries_value to monkey_patch_get_latest_timeseries_value + self.mock_graph.get_latest_timeseries_value = types.MethodType( + self.monkey_patch_get_latest_timeseries_value, self.mock_graph + ) + # Tracking for emitted mcps. + self.mcps_emitted: Dict[str, MetadataChangeProposalWrapper] = {} + + def _create_providers(self) -> None: + ctx: PipelineContext = PipelineContext( + run_id=self.run_id, pipeline_name=self.pipeline_name + ) + ctx.graph = self.mock_graph + self.providers: List[IngestionCheckpointingProviderBase] = [ + DatahubIngestionCheckpointingProvider.create({}, ctx), + FileIngestionCheckpointingProvider.create( + {"filename": f"{tempfile.mkdtemp()}/checkpoint_mces.json"}, + ctx, + ), + ] + + def monkey_patch_emit_mcp( + self, graph_ref: MagicMock, mcpw: MetadataChangeProposalWrapper + ) -> None: + """ + Mockey patched implementation of DatahubGraph.emit_mcp that caches the mcp locally in memory. + """ + self.assertIsNotNone(graph_ref) + if mcpw.aspectName != "status": + self.assertEqual(mcpw.entityType, "dataJob") + self.assertEqual(mcpw.aspectName, "datahubIngestionCheckpoint") + # Cache the mcpw against the entityUrn + assert mcpw.entityUrn is not None + self.mcps_emitted[mcpw.entityUrn] = mcpw + + def monkey_patch_get_latest_timeseries_value( + self, + graph_ref: MagicMock, + entity_urn: str, + aspect_type: Type[DictWrapper], + filter_criteria_map: Dict[str, str], + ) -> Optional[DictWrapper]: + """ + Monkey patched implementation of DatahubGraph.get_latest_timeseries_value that returns the latest cached aspect + for a given entity urn. + """ + self.assertIsNotNone(graph_ref) + self.assertEqual(aspect_type, CheckpointJobStateType) + self.assertEqual( + filter_criteria_map, + { + "pipelineName": self.pipeline_name, + }, + ) + # Retrieve the cached mcpw and return its aspect value. + mcpw = self.mcps_emitted.get(entity_urn) + if mcpw: + return mcpw.aspect + return None + + def test_providers(self): + self.assertEqual(len(self.providers), 2) + for provider in self.providers: + assert provider + # 1. Create the individual job checkpoints with appropriate states. + # Job1 - Checkpoint with a BaseSQLAlchemyCheckpointState state + job1_state_obj = BaseSQLAlchemyCheckpointState() + job1_checkpoint = Checkpoint( + job_name=self.job_names[0], + pipeline_name=self.pipeline_name, + run_id=self.run_id, + state=job1_state_obj, + ) + # Job2 - Checkpoint with a BaseTimeWindowCheckpointState state + job2_state_obj = BaseTimeWindowCheckpointState( + begin_timestamp_millis=10, end_timestamp_millis=100 + ) + job2_checkpoint = Checkpoint( + job_name=self.job_names[1], + pipeline_name=self.pipeline_name, + run_id=self.run_id, + state=job2_state_obj, + ) + + # 2. Set the provider's state_to_commit. + provider.state_to_commit = { + # NOTE: state_to_commit accepts only the aspect version of the checkpoint. + self.job_names[0]: assert_not_null( + job1_checkpoint.to_checkpoint_aspect(max_allowed_state_size=2**20) + ), + self.job_names[1]: assert_not_null( + job2_checkpoint.to_checkpoint_aspect(max_allowed_state_size=2**20) + ), + } + + # 3. Perform the commit + # NOTE: This will commit the state to + # In-memory self.mcps_emitted because of the monkey-patching for datahub ingestion checkpointer provider. + # And to temp directory json file for file ingestion checkpointer provider. + provider.commit() + self.assertTrue(provider.committed) + + # 4. Get last committed state. This must match what has been committed earlier. + # NOTE: This will retrieve the state form where it is committed. + job1_last_state = provider.get_latest_checkpoint( + self.pipeline_name, self.job_names[0] + ) + job2_last_state = provider.get_latest_checkpoint( + self.pipeline_name, self.job_names[1] + ) + + # 5. Validate individual job checkpoint state values that have been committed and retrieved + # against the original values. + self.assertIsNotNone(job1_last_state) + job1_last_checkpoint = Checkpoint.create_from_checkpoint_aspect( + job_name=self.job_names[0], + checkpoint_aspect=job1_last_state, + state_class=type(job1_state_obj), + ) + self.assertEqual(job1_last_checkpoint, job1_checkpoint) + + self.assertIsNotNone(job2_last_state) + job2_last_checkpoint = Checkpoint.create_from_checkpoint_aspect( + job_name=self.job_names[1], + checkpoint_aspect=job2_last_state, + state_class=type(job2_state_obj), + ) + self.assertEqual(job2_last_checkpoint, job2_checkpoint) diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_checkpoint_state.json b/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_checkpoint_state.json new file mode 100644 index 0000000000000..4e62492918bfb --- /dev/null +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_checkpoint_state.json @@ -0,0 +1,26 @@ +[ +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(file,dummy_stateful,prod),default_stale_entity_removal)", + "changeType": "UPSERT", + "aspectName": "datahubIngestionCheckpoint", + "aspect": { + "json": { + "timestampMillis": 1586847600000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "pipelineName": "dummy_stateful", + "platformInstanceId": "", + "config": "", + "state": { + "formatVersion": "1.0", + "serde": "base85-bz2-json", + "payload": "LRx4!F+o`-Q(1w>5G4QrYoCBnWH=B60MH7jr`{?c0BA?5L)2-AGyu>6y;V<9hz%Mv0Bt1*)lOMzr>a0|Iq-4VtTsYONQsFPLn1EpdQS;HIy|&CvSAlRvAJwmtCEM+Rx(v_)~sVvkx3V@WX4O`=losC6yZWb2OL0@" + }, + "runId": "dummy-test-stateful-ingestion" + } + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_checkpoint_state_after_deleted.json b/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_checkpoint_state_after_deleted.json new file mode 100644 index 0000000000000..6ecd43483d948 --- /dev/null +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_checkpoint_state_after_deleted.json @@ -0,0 +1,26 @@ +[ +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(file,dummy_stateful,prod),default_stale_entity_removal)", + "changeType": "UPSERT", + "aspectName": "datahubIngestionCheckpoint", + "aspect": { + "json": { + "timestampMillis": 1586847600000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "pipelineName": "dummy_stateful", + "platformInstanceId": "", + "config": "", + "state": { + "formatVersion": "1.0", + "serde": "base85-bz2-json", + "payload": "LRx4!F+o`-Q(317h`0a%NgsevWH1l}0MH7jr`{?c0B9vdZ9%mLfYG4P6;f$2G%+v`9z&~6n|e(JEPC2_Iix~CA_im)jR-zsjEK*yo|HQz#IUUHtf@DYVEme-lUW9{Xmmt~y^2jCdyY95az!{$kf#WUxB" + }, + "runId": "dummy-test-stateful-ingestion" + } + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_stateful_ingestion.json b/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_stateful_ingestion.json new file mode 100644 index 0000000000000..4a77651c93066 --- /dev/null +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_stateful_ingestion.json @@ -0,0 +1,50 @@ +[ +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,dummy_dataset1,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "dummy-test-stateful-ingestion", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,dummy_dataset2,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "dummy-test-stateful-ingestion", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,dummy_dataset3,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "dummy-test-stateful-ingestion", + "lastRunId": "no-run-id-provided" + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_stateful_ingestion_after_deleted.json b/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_stateful_ingestion_after_deleted.json new file mode 100644 index 0000000000000..9d6f755374462 --- /dev/null +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/golden_test_stateful_ingestion_after_deleted.json @@ -0,0 +1,50 @@ +[ +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,dummy_dataset1,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "dummy-test-stateful-ingestion", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,dummy_dataset2,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "dummy-test-stateful-ingestion", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,dummy_dataset3,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": true + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "dummy-test-stateful-ingestion", + "lastRunId": "no-run-id-provided" + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_stateful_ingestion.py b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_stateful_ingestion.py new file mode 100644 index 0000000000000..2b811d5e5e3a3 --- /dev/null +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_stateful_ingestion.py @@ -0,0 +1,227 @@ +from dataclasses import dataclass, field as dataclass_field +from typing import Any, Dict, Iterable, List, Optional, cast + +import pydantic +from freezegun import freeze_time +from pydantic import Field + +from datahub.configuration.common import AllowDenyPattern +from datahub.configuration.source_common import DEFAULT_ENV, DatasetSourceConfigMixin +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceReport +from datahub.ingestion.api.workunit import MetadataWorkUnit +from datahub.ingestion.run.pipeline import Pipeline +from datahub.ingestion.source.state.entity_removal_state import GenericCheckpointState +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StaleEntityRemovalHandler, + StaleEntityRemovalSourceReport, + StatefulStaleMetadataRemovalConfig, +) +from datahub.ingestion.source.state.stateful_ingestion_base import ( + StatefulIngestionConfigBase, + StatefulIngestionSourceBase, +) +from datahub.metadata.schema_classes import StatusClass +from datahub.utilities.urns.dataset_urn import DatasetUrn +from tests.test_helpers import mce_helpers +from tests.test_helpers.state_helpers import ( + get_current_checkpoint_from_pipeline, + validate_all_providers_have_committed_successfully, +) + +FROZEN_TIME = "2020-04-14 07:00:00" + +dummy_datasets: List = ["dummy_dataset1", "dummy_dataset2", "dummy_dataset3"] + + +@dataclass +class DummySourceReport(StaleEntityRemovalSourceReport): + datasets_scanned: int = 0 + filtered_datasets: List[str] = dataclass_field(default_factory=list) + + def report_datasets_scanned(self, count: int = 1) -> None: + self.datasets_scanned += count + + def report_datasets_dropped(self, model: str) -> None: + self.filtered_datasets.append(model) + + +class DummySourceConfig(StatefulIngestionConfigBase, DatasetSourceConfigMixin): + dataset_patterns: AllowDenyPattern = Field( + default=AllowDenyPattern.allow_all(), + description="Regex patterns for datasets to filter in ingestion.", + ) + # Configuration for stateful ingestion + stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = pydantic.Field( + default=None, description="Dummy source Ingestion Config." + ) + + +class DummySource(StatefulIngestionSourceBase): + """ + This is dummy source which only extract dummy datasets + """ + + source_config: DummySourceConfig + reporter: DummySourceReport + + def __init__(self, config: DummySourceConfig, ctx: PipelineContext): + super(DummySource, self).__init__(config, ctx) + self.source_config = config + self.reporter = DummySourceReport() + # Create and register the stateful ingestion use-case handler. + self.stale_entity_removal_handler = StaleEntityRemovalHandler.create( + self, self.source_config, self.ctx + ) + + @classmethod + def create(cls, config_dict, ctx): + config = DummySourceConfig.parse_obj(config_dict) + return cls(config, ctx) + + def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: + return [ + *super().get_workunit_processors(), + self.stale_entity_removal_handler.workunit_processor, + ] + + def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: + for dataset in dummy_datasets: + if not self.source_config.dataset_patterns.allowed(dataset): + self.reporter.report_datasets_dropped(dataset) + continue + else: + self.reporter.report_datasets_scanned() + dataset_urn = DatasetUrn.create_from_ids( + platform_id="postgres", + table_name=dataset, + env=DEFAULT_ENV, + ) + yield MetadataChangeProposalWrapper( + entityUrn=str(dataset_urn), + aspect=StatusClass(removed=False), + ).as_workunit() + + def get_report(self) -> SourceReport: + return self.reporter + + +@freeze_time(FROZEN_TIME) +def test_stateful_ingestion(pytestconfig, tmp_path, mock_time): + # test stateful ingestion using dummy source + state_file_name: str = "checkpoint_state_mces.json" + golden_state_file_name: str = "golden_test_checkpoint_state.json" + golden_state_file_name_after_deleted: str = ( + "golden_test_checkpoint_state_after_deleted.json" + ) + output_file_name: str = "dummy_mces.json" + golden_file_name: str = "golden_test_stateful_ingestion.json" + output_file_name_after_deleted: str = "dummy_mces_stateful_after_deleted.json" + golden_file_name_after_deleted: str = ( + "golden_test_stateful_ingestion_after_deleted.json" + ) + + test_resources_dir = pytestconfig.rootpath / "tests/unit/stateful_ingestion/state" + + base_pipeline_config = { + "run_id": "dummy-test-stateful-ingestion", + "pipeline_name": "dummy_stateful", + "source": { + "type": "tests.unit.stateful_ingestion.state.test_stateful_ingestion.DummySource", + "config": { + "stateful_ingestion": { + "enabled": True, + "remove_stale_metadata": True, + "state_provider": { + "type": "file", + "config": { + "filename": f"{tmp_path}/{state_file_name}", + }, + }, + }, + }, + }, + "sink": { + "type": "file", + "config": {}, + }, + } + + pipeline_run1 = None + pipeline_run1_config: Dict[str, Dict[str, Dict[str, Any]]] = dict( # type: ignore + base_pipeline_config # type: ignore + ) + pipeline_run1_config["sink"]["config"][ + "filename" + ] = f"{tmp_path}/{output_file_name}" + pipeline_run1 = Pipeline.create(pipeline_run1_config) + pipeline_run1.run() + pipeline_run1.raise_from_status() + pipeline_run1.pretty_print_summary() + + # validate both dummy source mces and checkpoint state mces files + mce_helpers.check_golden_file( + pytestconfig, + output_path=tmp_path / output_file_name, + golden_path=f"{test_resources_dir}/{golden_file_name}", + ) + mce_helpers.check_golden_file( + pytestconfig, + output_path=tmp_path / state_file_name, + golden_path=f"{test_resources_dir}/{golden_state_file_name}", + ) + checkpoint1 = get_current_checkpoint_from_pipeline(pipeline_run1) + assert checkpoint1 + assert checkpoint1.state + + pipeline_run2 = None + pipeline_run2_config: Dict[str, Dict[str, Dict[str, Any]]] = dict(base_pipeline_config) # type: ignore + pipeline_run2_config["source"]["config"]["dataset_patterns"] = { + "allow": ["dummy_dataset1", "dummy_dataset2"], + } + pipeline_run2_config["sink"]["config"][ + "filename" + ] = f"{tmp_path}/{output_file_name_after_deleted}" + pipeline_run2 = Pipeline.create(pipeline_run2_config) + pipeline_run2.run() + pipeline_run2.raise_from_status() + pipeline_run2.pretty_print_summary() + + # validate both updated dummy source mces and checkpoint state mces files after deleting dataset + mce_helpers.check_golden_file( + pytestconfig, + output_path=tmp_path / output_file_name_after_deleted, + golden_path=f"{test_resources_dir}/{golden_file_name_after_deleted}", + ) + mce_helpers.check_golden_file( + pytestconfig, + output_path=tmp_path / state_file_name, + golden_path=f"{test_resources_dir}/{golden_state_file_name_after_deleted}", + ) + checkpoint2 = get_current_checkpoint_from_pipeline(pipeline_run2) + assert checkpoint2 + assert checkpoint2.state + + # Validate that all providers have committed successfully. + validate_all_providers_have_committed_successfully( + pipeline=pipeline_run1, expected_providers=1 + ) + validate_all_providers_have_committed_successfully( + pipeline=pipeline_run2, expected_providers=1 + ) + + # Perform all assertions on the states. The deleted table should not be + # part of the second state + state1 = cast(GenericCheckpointState, checkpoint1.state) + state2 = cast(GenericCheckpointState, checkpoint2.state) + + difference_dataset_urns = list( + state1.get_urns_not_in(type="dataset", other_checkpoint_state=state2) + ) + # the difference in dataset urns is the dataset which is not allowed to ingest + assert len(difference_dataset_urns) == 1 + deleted_dataset_urns: List[str] = [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,dummy_dataset3,PROD)", + ] + assert sorted(deleted_dataset_urns) == sorted(difference_dataset_urns) diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/test_configs.py b/metadata-ingestion/tests/unit/stateful_ingestion/test_configs.py index 9edfe8c4a957b..0e6d60e3440b2 100644 --- a/metadata-ingestion/tests/unit/stateful_ingestion/test_configs.py +++ b/metadata-ingestion/tests/unit/stateful_ingestion/test_configs.py @@ -3,9 +3,10 @@ import pytest from pydantic import ValidationError -from datahub.configuration.common import ConfigModel, DynamicTypedConfig +from datahub.configuration.common import ConfigModel from datahub.ingestion.graph.client import DatahubClientConfig from datahub.ingestion.source.state.stateful_ingestion_base import ( + DynamicTypedStateProviderConfig, StatefulIngestionConfig, ) from datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider import ( @@ -23,7 +24,6 @@ }, "simple": {}, "default": {}, - "none": None, } @@ -81,13 +81,6 @@ ), False, ), - # None - "checkpointing_bad_config": ( - DatahubIngestionStateProviderConfig, - datahub_client_configs["none"], - None, - True, - ), } @@ -119,7 +112,7 @@ max_checkpoint_state_size=1024, ignore_old_state=True, ignore_new_state=True, - state_provider=DynamicTypedConfig( + state_provider=DynamicTypedStateProviderConfig( type="datahub", config=datahub_client_configs["full"], ), @@ -148,7 +141,7 @@ max_checkpoint_state_size=2**24, ignore_old_state=False, ignore_new_state=False, - state_provider=DynamicTypedConfig(type="datahub", config=None), + state_provider=DynamicTypedStateProviderConfig(type="datahub"), ), False, ), From bb7300251f6d65024b3440379d8eef3e2413a2b0 Mon Sep 17 00:00:00 2001 From: Shubham Jagtap <132359390+shubhamjagtap639@users.noreply.github.com> Date: Sat, 11 Nov 2023 05:29:18 +0530 Subject: [PATCH 091/792] feat(ingestion/airflow): support datajobs as task inlets (#9211) Co-authored-by: Harshal Sheth --- .../datahub_listener.py | 14 ++-- .../src/datahub_airflow_plugin/entities.py | 28 ++++++-- .../example_dags/lineage_backend_demo.py | 1 + .../lineage_backend_taskflow_demo.py | 1 + .../lineage/_lineage_core.py | 18 ++--- .../tests/integration/dags/basic_iolets.py | 1 + .../tests/integration/dags/simple_dag.py | 1 + .../integration/goldens/v1_basic_iolets.json | 56 ++++++++++++--- .../integration/goldens/v1_simple_dag.json | 70 ++++++++++++++----- .../integration/goldens/v2_basic_iolets.json | 26 ++++--- .../v2_basic_iolets_no_dag_listener.json | 24 ++++--- .../integration/goldens/v2_simple_dag.json | 38 +++++----- .../v2_simple_dag_no_dag_listener.json | 32 +++++---- .../goldens/v2_snowflake_operator.json | 2 +- .../goldens/v2_sqlite_operator.json | 2 +- .../v2_sqlite_operator_no_dag_listener.json | 40 +++++------ .../airflow-plugin/tests/unit/test_airflow.py | 31 ++++++-- 17 files changed, 261 insertions(+), 124 deletions(-) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index a3f5cb489e29f..d00b10bbe1756 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -17,7 +17,6 @@ ) from datahub.telemetry import telemetry from datahub.utilities.sqlglot_lineage import SqlParsingResult -from datahub.utilities.urns.dataset_urn import DatasetUrn from openlineage.airflow.listener import TaskHolder from openlineage.airflow.utils import redact_with_exclusions from openlineage.client.serde import Serde @@ -32,7 +31,11 @@ from datahub_airflow_plugin._datahub_ol_adapter import translate_ol_to_datahub_urn from datahub_airflow_plugin._extractors import SQL_PARSING_RESULT_KEY, ExtractorManager from datahub_airflow_plugin.client.airflow_generator import AirflowGenerator -from datahub_airflow_plugin.entities import _Entity +from datahub_airflow_plugin.entities import ( + _Entity, + entities_to_datajob_urn_list, + entities_to_dataset_urn_list, +) _F = TypeVar("_F", bound=Callable[..., None]) if TYPE_CHECKING: @@ -272,10 +275,9 @@ def _extract_lineage( ) # Write the lineage to the datajob object. - datajob.inlets.extend(DatasetUrn.create_from_string(urn) for urn in input_urns) - datajob.outlets.extend( - DatasetUrn.create_from_string(urn) for urn in output_urns - ) + datajob.inlets.extend(entities_to_dataset_urn_list(input_urns)) + datajob.outlets.extend(entities_to_dataset_urn_list(output_urns)) + datajob.upstream_urns.extend(entities_to_datajob_urn_list(input_urns)) datajob.fine_grained_lineages.extend(fine_grained_lineages) # Merge in extra stuff that was present in the DataJob we constructed diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/entities.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/entities.py index 69f667cad3241..5a4bcb0097a8c 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/entities.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/entities.py @@ -1,8 +1,10 @@ from abc import abstractmethod -from typing import Optional +from typing import List, Optional import attr import datahub.emitter.mce_builder as builder +from datahub.utilities.urns.data_job_urn import DataJobUrn +from datahub.utilities.urns.dataset_urn import DatasetUrn from datahub.utilities.urns.urn import guess_entity_type @@ -38,10 +40,28 @@ class Urn(_Entity): def _validate_urn(self, attribute, value): if not value.startswith("urn:"): raise ValueError("invalid urn provided: urns must start with 'urn:'") - if guess_entity_type(value) != "dataset": - # This is because DataJobs only support Dataset lineage. - raise ValueError("Airflow lineage currently only supports datasets") + if guess_entity_type(value) not in ["dataset", "dataJob"]: + # This is because DataJobs only support Dataset and upstream Datajob lineage. + raise ValueError( + "Airflow lineage currently only supports datasets and upstream datajobs" + ) @property def urn(self): return self._urn + + +def entities_to_dataset_urn_list(iolets: List[str]) -> List[DatasetUrn]: + dataset_urn_list: List[DatasetUrn] = [] + for let in iolets: + if guess_entity_type(let) == "dataset": + dataset_urn_list.append(DatasetUrn.create_from_string(let)) + return dataset_urn_list + + +def entities_to_datajob_urn_list(inlets: List[str]) -> List[DataJobUrn]: + datajob_urn_list: List[DataJobUrn] = [] + for let in inlets: + if guess_entity_type(let) == "dataJob": + datajob_urn_list.append(DataJobUrn.create_from_string(let)) + return datajob_urn_list diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py index 3caea093b932d..ce161d6a415e9 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py @@ -46,6 +46,7 @@ Urn( "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ), + Urn("urn:li:dataJob:(urn:li:dataFlow:(airflow,dag1,prod),task1)"), ], outlets=[Dataset("snowflake", "mydb.schema.tableD")], ) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py index ceb0f452b540a..80df7053a49f9 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py @@ -37,6 +37,7 @@ def datahub_lineage_backend_taskflow_demo(): Urn( "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ), + Urn("urn:li:dataJob:(urn:li:dataFlow:(airflow,dag1,prod),task1)"), ], outlets=[Dataset("snowflake", "mydb.schema.tableD")], ) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/_lineage_core.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/_lineage_core.py index f5f519fa23b11..75fc79443e49e 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/_lineage_core.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/_lineage_core.py @@ -2,11 +2,14 @@ from typing import TYPE_CHECKING, Dict, List from datahub.api.entities.dataprocess.dataprocess_instance import InstanceRunResult -from datahub.utilities.urns.dataset_urn import DatasetUrn from datahub_airflow_plugin._config import DatahubLineageConfig from datahub_airflow_plugin.client.airflow_generator import AirflowGenerator -from datahub_airflow_plugin.entities import _Entity +from datahub_airflow_plugin.entities import ( + _Entity, + entities_to_datajob_urn_list, + entities_to_dataset_urn_list, +) if TYPE_CHECKING: from airflow import DAG @@ -16,10 +19,6 @@ from datahub_airflow_plugin._airflow_shims import Operator -def _entities_to_urn_list(iolets: List[_Entity]) -> List[DatasetUrn]: - return [DatasetUrn.create_from_string(let.urn) for let in iolets] - - def send_lineage_to_datahub( config: DatahubLineageConfig, operator: "Operator", @@ -53,8 +52,11 @@ def send_lineage_to_datahub( capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, ) - datajob.inlets.extend(_entities_to_urn_list(inlets)) - datajob.outlets.extend(_entities_to_urn_list(outlets)) + datajob.inlets.extend(entities_to_dataset_urn_list([let.urn for let in inlets])) + datajob.outlets.extend(entities_to_dataset_urn_list([let.urn for let in outlets])) + datajob.upstream_urns.extend( + entities_to_datajob_urn_list([let.urn for let in inlets]) + ) datajob.emit(emitter) operator.log.info(f"Emitted from Lineage: {datajob}") diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py b/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py index 8b0803ab98422..11b3731c52bca 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py @@ -26,6 +26,7 @@ Urn( "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ), + Urn("urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)"), ], outlets=[ Dataset("snowflake", "mydb.schema.tableD"), diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py b/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py index 1dd047f0a6dcc..71b462159ac60 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py @@ -21,6 +21,7 @@ Urn( "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ), + Urn("urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)"), ], outlets=[Dataset("snowflake", "mydb.schema.tableD")], ) diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json index a4c17c73e9c7e..6b460e99b1f28 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -95,7 +95,8 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -150,6 +151,17 @@ } } }, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -245,7 +257,8 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -300,6 +313,17 @@ } } }, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -365,9 +389,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.176536", - "start_date": "2023-09-30 00:49:56.670239+00:00", - "end_date": "2023-09-30 00:49:56.846775+00:00", + "duration": "0.143271", + "start_date": "2023-11-08 09:55:05.801617+00:00", + "end_date": "2023-11-08 09:55:05.944888+00:00", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -384,7 +408,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696034996670, + "time": 1699437305801, "actor": "urn:li:corpuser:datahub" } } @@ -413,7 +437,8 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" ] } } @@ -476,6 +501,17 @@ } } }, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -505,7 +541,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696034996670, + "timestampMillis": 1699437305801, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -522,7 +558,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696034996846, + "timestampMillis": 1699437305944, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json index a0a95716a0993..7ec172e3678dc 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -94,7 +94,8 @@ "json": { "inputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" @@ -126,6 +127,17 @@ } } }, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -208,7 +220,8 @@ "json": { "inputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" @@ -240,6 +253,17 @@ } } }, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -294,9 +318,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.175983", - "start_date": "2023-09-30 00:48:58.943850+00:00", - "end_date": "2023-09-30 00:48:59.119833+00:00", + "duration": "0.120524", + "start_date": "2023-11-08 09:54:06.065112+00:00", + "end_date": "2023-11-08 09:54:06.185636+00:00", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -313,7 +337,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696034938943, + "time": 1699437246065, "actor": "urn:li:corpuser:datahub" } } @@ -340,7 +364,8 @@ "json": { "inputs": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" ] } } @@ -380,6 +405,17 @@ } } }, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -398,7 +434,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696034938943, + "timestampMillis": 1699437246065, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -415,7 +451,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696034939119, + "timestampMillis": 1699437246185, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -440,7 +476,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -651,9 +687,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.129888", - "start_date": "2023-09-30 00:49:02.158752+00:00", - "end_date": "2023-09-30 00:49:02.288640+00:00", + "duration": "0.099975", + "start_date": "2023-11-08 09:54:09.744583+00:00", + "end_date": "2023-11-08 09:54:09.844558+00:00", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -670,7 +706,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696034942158, + "time": 1699437249744, "actor": "urn:li:corpuser:datahub" } } @@ -695,7 +731,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696034942158, + "timestampMillis": 1699437249744, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -712,7 +748,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696034942288, + "timestampMillis": 1699437249844, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json index 1974f1f085df0..6767a368f366a 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -73,9 +73,9 @@ "trigger_rule": "", "wait_for_downstream": "False", "downstream_task_ids": "[]", - "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)')]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -102,7 +102,9 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -217,7 +219,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-09-30 01:13:14.266272+00:00", + "start_date": "2023-10-30 13:07:55.311482+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -235,7 +237,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696036394266, + "time": 1698671275311, "actor": "urn:li:corpuser:datahub" } } @@ -356,7 +358,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696036394266, + "timestampMillis": 1698671275311, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -383,9 +385,9 @@ "trigger_rule": "", "wait_for_downstream": "False", "downstream_task_ids": "[]", - "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)')]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -412,7 +414,9 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -524,7 +528,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696036394833, + "timestampMillis": 1698671276777, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json index d02951bc9e82d..63b0a05935554 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json @@ -73,9 +73,9 @@ "trigger_rule": "", "wait_for_downstream": "False", "downstream_task_ids": "[]", - "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)')]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_exit_code\": 99, \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_exit_code\": 99, \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -102,7 +102,9 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -217,7 +219,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-09-30 06:59:52.401211+00:00", + "start_date": "2023-11-10 19:11:17.444435+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -235,7 +237,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696057192401, + "time": 1699643477444, "actor": "urn:li:corpuser:datahub" } } @@ -356,7 +358,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696057192401, + "timestampMillis": 1699643477444, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -383,9 +385,9 @@ "trigger_rule": "", "wait_for_downstream": "False", "downstream_task_ids": "[]", - "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)')]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_exit_code\": 99, \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_exit_code\": 99, \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -412,7 +414,9 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -524,7 +528,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696057192982, + "timestampMillis": 1699643478123, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json index 9acc47ec1321e..c558f79c32e15 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -74,9 +74,9 @@ "trigger_rule": "", "wait_for_downstream": "False", "downstream_task_ids": "['run_another_data_task']", - "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)')]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -100,7 +100,9 @@ "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -182,7 +184,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-09-30 06:53:58.219003+00:00", + "start_date": "2023-10-30 13:06:07.193282+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -200,7 +202,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696056838219, + "time": 1698671167193, "actor": "urn:li:corpuser:datahub" } } @@ -285,7 +287,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056838219, + "timestampMillis": 1698671167193, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -312,9 +314,9 @@ "trigger_rule": "", "wait_for_downstream": "False", "downstream_task_ids": "['run_another_data_task']", - "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)')]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -338,7 +340,9 @@ "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -417,7 +421,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056838648, + "timestampMillis": 1698671168726, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -449,7 +453,7 @@ "downstream_task_ids": "[]", "inlets": "[]", "outlets": "[]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=run_another_data_task", "name": "run_another_data_task", @@ -519,7 +523,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-09-30 06:54:02.407515+00:00", + "start_date": "2023-10-30 13:06:19.970466+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -537,7 +541,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696056842407, + "time": 1698671179970, "actor": "urn:li:corpuser:datahub" } } @@ -562,7 +566,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056842407, + "timestampMillis": 1698671179970, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -591,7 +595,7 @@ "downstream_task_ids": "[]", "inlets": "[]", "outlets": "[]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=run_another_data_task", "name": "run_another_data_task", @@ -658,7 +662,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056842831, + "timestampMillis": 1698671180730, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json index 03299c483f57f..ec0f3cab1e81f 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json @@ -74,9 +74,9 @@ "trigger_rule": "", "wait_for_downstream": "False", "downstream_task_ids": "['run_another_data_task']", - "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)')]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_exit_code\": 99, \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_exit_code\": 99, \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -100,7 +100,9 @@ "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -182,7 +184,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-09-30 06:58:56.105026+00:00", + "start_date": "2023-11-10 19:10:10.856995+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -200,7 +202,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696057136105, + "time": 1699643410856, "actor": "urn:li:corpuser:datahub" } } @@ -285,7 +287,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696057136105, + "timestampMillis": 1699643410856, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -312,9 +314,9 @@ "trigger_rule": "", "wait_for_downstream": "False", "downstream_task_ids": "['run_another_data_task']", - "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)')]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_exit_code\": 99, \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_exit_code\": 99, \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -338,7 +340,9 @@ "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -417,7 +421,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696057136612, + "timestampMillis": 1699643411390, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -577,7 +581,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-09-30 06:58:59.567004+00:00", + "start_date": "2023-11-10 19:10:15.128009+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -595,7 +599,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696057139567, + "time": 1699643415128, "actor": "urn:li:corpuser:datahub" } } @@ -620,7 +624,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696057139567, + "timestampMillis": 1699643415128, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -716,7 +720,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696057140164, + "timestampMillis": 1699643415856, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json index 11a0b17b45b95..0a704ed10c911 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/snowflake_operator.py'", + "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/snowflake_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json index 19e4aac9fb95e..3b4b60174f99f 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json index b67464b385335..99a8aadb7fd9c 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json @@ -194,7 +194,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-10-15 20:27:26.883178+00:00", + "start_date": "2023-11-10 19:12:17.805860+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -212,7 +212,7 @@ "name": "sqlite_operator_create_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401646883, + "time": 1699643537805, "actor": "urn:li:corpuser:datahub" } } @@ -261,7 +261,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401646883, + "timestampMillis": 1699643537805, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -442,7 +442,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401647826, + "timestampMillis": 1699643538759, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -615,7 +615,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-10-15 20:27:31.398799+00:00", + "start_date": "2023-11-10 19:12:22.560376+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -633,7 +633,7 @@ "name": "sqlite_operator_populate_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401651398, + "time": 1699643542560, "actor": "urn:li:corpuser:datahub" } } @@ -682,7 +682,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401651398, + "timestampMillis": 1699643542560, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -792,7 +792,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401652651, + "timestampMillis": 1699643543925, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1035,7 +1035,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-10-15 20:27:37.697995+00:00", + "start_date": "2023-11-10 19:12:29.429032+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -1053,7 +1053,7 @@ "name": "sqlite_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401657697, + "time": 1699643549429, "actor": "urn:li:corpuser:datahub" } } @@ -1126,7 +1126,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401657697, + "timestampMillis": 1699643549429, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1362,7 +1362,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401659496, + "timestampMillis": 1699643551423, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1537,7 +1537,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-10-15 20:27:45.670215+00:00", + "start_date": "2023-11-10 19:12:37.423556+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -1555,7 +1555,7 @@ "name": "sqlite_operator_cleanup_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401665670, + "time": 1699643557423, "actor": "urn:li:corpuser:datahub" } } @@ -1604,7 +1604,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401665670, + "timestampMillis": 1699643557423, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1716,7 +1716,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401667670, + "timestampMillis": 1699643559607, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1891,7 +1891,7 @@ "customProperties": { "run_id": "manual_run_test", "duration": "None", - "start_date": "2023-10-15 20:27:51.559194+00:00", + "start_date": "2023-11-10 19:12:43.792375+00:00", "end_date": "None", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", @@ -1909,7 +1909,7 @@ "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401671559, + "time": 1699643563792, "actor": "urn:li:corpuser:datahub" } } @@ -1958,7 +1958,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401671559, + "timestampMillis": 1699643563792, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -2070,7 +2070,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401673788, + "timestampMillis": 1699643566350, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py index d8620e74d7e30..7fbf707995994 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py @@ -188,10 +188,17 @@ def test_entities(): == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableConsumed,PROD)" ) + assert ( + Urn("urn:li:dataJob:(urn:li:dataFlow:(airflow,testDag,PROD),testTask)").urn + == "urn:li:dataJob:(urn:li:dataFlow:(airflow,testDag,PROD),testTask)" + ) + with pytest.raises(ValueError, match="invalid"): Urn("not a URN") - with pytest.raises(ValueError, match="only supports datasets"): + with pytest.raises( + ValueError, match="only supports datasets and upstream datajobs" + ): Urn("urn:li:mlModel:(urn:li:dataPlatform:science,scienceModel,PROD)") @@ -199,13 +206,19 @@ def test_entities(): ["inlets", "outlets", "capture_executions"], [ pytest.param( - [Dataset("snowflake", "mydb.schema.tableConsumed")], + [ + Dataset("snowflake", "mydb.schema.tableConsumed"), + Urn("urn:li:dataJob:(urn:li:dataFlow:(airflow,testDag,PROD),testTask)"), + ], [Dataset("snowflake", "mydb.schema.tableProduced")], False, id="airflow-lineage-no-executions", ), pytest.param( - [Dataset("snowflake", "mydb.schema.tableConsumed")], + [ + Dataset("snowflake", "mydb.schema.tableConsumed"), + Urn("urn:li:dataJob:(urn:li:dataFlow:(airflow,testDag,PROD),testTask)"), + ], [Dataset("snowflake", "mydb.schema.tableProduced")], True, id="airflow-lineage-capture-executions", @@ -293,9 +306,13 @@ def test_lineage_backend(mock_emit, inlets, outlets, capture_executions): # Verify that the inlets and outlets are registered and recognized by Airflow correctly, # or that our lineage backend forces it to. - assert len(op2.inlets) == 1 + assert len(op2.inlets) == 2 assert len(op2.outlets) == 1 - assert all(map(lambda let: isinstance(let, Dataset), op2.inlets)) + assert all( + map( + lambda let: isinstance(let, Dataset) or isinstance(let, Urn), op2.inlets + ) + ) assert all(map(lambda let: isinstance(let, Dataset), op2.outlets)) # Check that the right things were emitted. @@ -338,6 +355,10 @@ def test_lineage_backend(mock_emit, inlets, outlets, capture_executions): mock_emitter.method_calls[4].args[0].aspect.inputDatajobs[0] == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task1_upstream)" ) + assert ( + mock_emitter.method_calls[4].args[0].aspect.inputDatajobs[1] + == "urn:li:dataJob:(urn:li:dataFlow:(airflow,testDag,PROD),testTask)" + ) assert ( mock_emitter.method_calls[4].args[0].aspect.inputDatasets[0] == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableConsumed,PROD)" From 0e3efabd2c19e24bcfb81602f897802be1cb1d06 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 10 Nov 2023 16:00:20 -0800 Subject: [PATCH 092/792] fix(build): set `@cliMajorVersion@` correctly (#9228) --- .github/workflows/build-and-test.yml | 6 +-- .github/workflows/check-datahub-jars.yml | 4 +- .github/workflows/code-checks.yml | 4 +- .github/workflows/docker-ingestion-smoke.yml | 4 +- .github/workflows/docker-postgres-setup.yml | 4 +- .github/workflows/docker-unified.yml | 52 +++++-------------- .github/workflows/publish-datahub-jars.yml | 4 +- .github/workflows/spark-smoke-test.yml | 5 +- metadata-service/configuration/build.gradle | 7 ++- .../src/main/resources/application.yml | 9 ++-- metadata-service/factories/build.gradle | 5 -- .../tests/read_only/test_services_up.py | 8 +++ 12 files changed, 39 insertions(+), 73 deletions(-) diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 25f3957e8f086..10c137a206531 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -27,7 +27,7 @@ jobs: command: [ # metadata-ingestion and airflow-plugin each have dedicated build jobs "except_metadata_ingestion", - "frontend" + "frontend", ] timezone: ["UTC", "America/New_York"] runs-on: ubuntu-latest @@ -36,9 +36,7 @@ jobs: - uses: szenius/set-timezone@v1.0 with: timezoneLinux: ${{ matrix.timezone }} - - uses: actions/checkout@v3 - with: - fetch-depth: 800 + - uses: hsheth2/sane-checkout-action@v1 - name: Set up JDK 11 uses: actions/setup-java@v3 with: diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 41f9ea91a94e2..8e507ea40fd96 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -27,9 +27,7 @@ jobs: command: ["datahub-client", "datahub-protobuf", "spark-lineage"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 800 + - uses: hsheth2/sane-checkout-action@v1 - name: Set up JDK 11 uses: actions/setup-java@v3 with: diff --git a/.github/workflows/code-checks.yml b/.github/workflows/code-checks.yml index e12971b8a6208..38f0946678034 100644 --- a/.github/workflows/code-checks.yml +++ b/.github/workflows/code-checks.yml @@ -31,9 +31,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/docker-ingestion-smoke.yml b/.github/workflows/docker-ingestion-smoke.yml index 82b57d23609a5..803ddc6fcec75 100644 --- a/.github/workflows/docker-ingestion-smoke.yml +++ b/.github/workflows/docker-ingestion-smoke.yml @@ -50,9 +50,7 @@ jobs: if: ${{ needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: diff --git a/.github/workflows/docker-postgres-setup.yml b/.github/workflows/docker-postgres-setup.yml index fda4349f90bf7..e4d6cfc106f81 100644 --- a/.github/workflows/docker-postgres-setup.yml +++ b/.github/workflows/docker-postgres-setup.yml @@ -46,9 +46,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 5f5a62de6288c..18cb946b951dd 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -74,9 +74,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image run: | ./gradlew :metadata-service:war:build -x test --parallel @@ -132,9 +130,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image run: | ./gradlew :metadata-jobs:mae-consumer-job:build -x test --parallel @@ -190,9 +186,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image run: | ./gradlew :metadata-jobs:mce-consumer-job:build -x test --parallel @@ -248,9 +242,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image run: | ./gradlew :datahub-upgrade:build -x test --parallel @@ -306,9 +298,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image run: | ./gradlew :datahub-frontend:dist -x test -x yarnTest -x yarnLint --parallel @@ -366,9 +356,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: @@ -388,9 +376,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: @@ -410,9 +396,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: @@ -434,9 +418,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 id: filter with: @@ -468,9 +450,7 @@ jobs: needs: [setup, datahub_ingestion_base_build] steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 id: filter with: @@ -510,9 +490,7 @@ jobs: needs: [setup, datahub_ingestion_base_build] steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 id: filter with: @@ -554,9 +532,7 @@ jobs: needs: [setup, datahub_ingestion_base_slim_build] steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 id: filter with: @@ -637,9 +613,7 @@ jobs: needs: [setup, datahub_ingestion_base_full_build] steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 id: filter with: diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index 7cd07b130dd80..ec7985ef3b3d0 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -48,9 +48,7 @@ jobs: needs: ["check-secret", "setup"] if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }} steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 800 + - uses: hsheth2/sane-checkout-action@v1 - name: Set up JDK 11 uses: actions/setup-java@v3 with: diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 541b2019b93ef..70b66d6452b26 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -29,10 +29,7 @@ jobs: spark-smoke-test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 800 - fetch-tags: true + - uses: hsheth2/sane-checkout-action@v1 - name: Set up JDK 11 uses: actions/setup-java@v3 with: diff --git a/metadata-service/configuration/build.gradle b/metadata-service/configuration/build.gradle index bf79469633b0f..80cf6541261c2 100644 --- a/metadata-service/configuration/build.gradle +++ b/metadata-service/configuration/build.gradle @@ -1,6 +1,7 @@ plugins { id 'java' } +apply from: "../../gradle/versioning/versioning.gradle" dependencies { implementation externalDependency.jacksonDataBind @@ -12,4 +13,8 @@ dependencies { compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok -} \ No newline at end of file +} + +processResources.configure { + finalizedBy printVersionDetails // always print version details +} diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 46aa02d98572e..40674e13e647f 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -44,6 +44,7 @@ authorization: restApiAuthorization: ${REST_API_AUTHORIZATION_ENABLED:false} ingestion: + # The value of cliMajorVersion is substituted in by the processResources Gradle task. enabled: ${UI_INGESTION_ENABLED:true} defaultCliVersion: "${UI_INGESTION_DEFAULT_CLI_VERSION:@cliMajorVersion@}" maxSerializedStringLength: "${INGESTION_MAX_SERIALIZED_STRING_LENGTH:16000000}" # Indicates the maximum allowed JSON String length Jackson will handle, impacts the maximum size of ingested aspects @@ -74,7 +75,7 @@ datahub: plugin: pluginSecurityMode: ${PLUGIN_SECURITY_MODE:RESTRICTED} # Possible value RESTRICTED or LENIENT, default to RESTRICTED entityRegistry: - path: ${ENTITY_REGISTRY_PLUGIN_PATH:/etc/datahub/plugins/models} + path: ${ENTITY_REGISTRY_PLUGIN_PATH:/etc/datahub/plugins/models} retention: path: ${RETENTION_PLUGIN_PATH:/etc/datahub/plugins/retention} auth: @@ -280,14 +281,13 @@ updateIndices: ingestionScheduler: enabled: ${ENABLE_INGESTION_SCHEDULER_HOOK:true} # enable to execute ingestion scheduling - bootstrap: upgradeDefaultBrowsePaths: enabled: ${UPGRADE_DEFAULT_BROWSE_PATHS_ENABLED:false} # enable to run the upgrade to migrate legacy default browse paths to new ones backfillBrowsePathsV2: enabled: ${BACKFILL_BROWSE_PATHS_V2:false} # Enables running the backfill of browsePathsV2 upgrade step. There are concerns about the load of this step so hiding it behind a flag. Deprecating in favor of running through SystemUpdate reprocessDefaultBrowsePathsV2: - enabled: ${REPROCESS_DEFAULT_BROWSE_PATHS_V2:false} # reprocess V2 browse paths which were set to the default: {"path":[{"id":"Default"}]} + enabled: ${REPROCESS_DEFAULT_BROWSE_PATHS_V2:false} # reprocess V2 browse paths which were set to the default: {"path":[{"id":"Default"}]} policies: file: ${BOOTSTRAP_POLICIES_FILE:classpath:boot/policies.json} # eg for local file @@ -295,7 +295,6 @@ bootstrap: servlets: waitTimeout: ${BOOTSTRAP_SERVLETS_WAITTIMEOUT:60} # Total waiting time in seconds for servlets to initialize - systemUpdate: initialBackOffMs: ${BOOTSTRAP_SYSTEM_UPDATE_INITIAL_BACK_OFF_MILLIS:5000} maxBackOffs: ${BOOTSTRAP_SYSTEM_UPDATE_MAX_BACK_OFFS:50} @@ -371,4 +370,4 @@ cache: corpUserCredentials: 20 corpUserSettings: 20 -springdoc.api-docs.groups.enabled: true \ No newline at end of file +springdoc.api-docs.groups.enabled: true diff --git a/metadata-service/factories/build.gradle b/metadata-service/factories/build.gradle index f848a5e339781..2e99def17c3c5 100644 --- a/metadata-service/factories/build.gradle +++ b/metadata-service/factories/build.gradle @@ -1,5 +1,4 @@ apply plugin: 'java-library' -apply from: "../../gradle/versioning/versioning.gradle" dependencies { api project(':metadata-io') @@ -65,7 +64,3 @@ configurations.all{ exclude group: "commons-io", module:"commons-io" exclude group: "jline", module:"jline" } - -processResources.configure { - finalizedBy printVersionDetails // always print version details -} diff --git a/smoke-test/tests/read_only/test_services_up.py b/smoke-test/tests/read_only/test_services_up.py index e48df52bb9864..cbe92625f4689 100644 --- a/smoke-test/tests/read_only/test_services_up.py +++ b/smoke-test/tests/read_only/test_services_up.py @@ -23,3 +23,11 @@ def test_gms_config_accessible(): assert gms_config["versions"]["linkedin/datahub"]["version"] == DATAHUB_VERSION else: print("[WARN] TEST_DATAHUB_VERSION is not set") + + # Make sure that the default CLI version gets generated properly. + # While we don't want to hardcode the actual value, we can make + # sure it mostly looks like a version string. + default_cli_version: str = gms_config["managedIngestion"]["defaultCliVersion"] + print(f"Default CLI version: {default_cli_version}") + assert not default_cli_version.startswith("@") + assert "." in default_cli_version From ebd2e2312bdd23a92bbf403a26f64194807f70e6 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 10 Nov 2023 22:10:00 -0600 Subject: [PATCH 093/792] fix(datahub-ingestion): remove old jars, sync pyspark version (#9217) --- docker/datahub-ingestion-base/build.gradle | 7 ++++--- docker/datahub-ingestion/Dockerfile | 16 ++++++++++++++-- docker/datahub-ingestion/pyspark_jars.sh | 22 ++++++++++++++++++++++ metadata-ingestion/setup.py | 4 ++-- 4 files changed, 42 insertions(+), 7 deletions(-) create mode 100755 docker/datahub-ingestion/pyspark_jars.sh diff --git a/docker/datahub-ingestion-base/build.gradle b/docker/datahub-ingestion-base/build.gradle index bbd8242553cc5..64635671343ef 100644 --- a/docker/datahub-ingestion-base/build.gradle +++ b/docker/datahub-ingestion-base/build.gradle @@ -9,20 +9,21 @@ ext { docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry docker_repo = 'datahub-ingestion-base' docker_dir = 'datahub-ingestion-base' + docker_target = project.getProperties().getOrDefault("dockerTarget", "slim") revision = 2 // increment to trigger rebuild } docker { - name "${docker_registry}/${docker_repo}:v${version}-slim" - version "v${version}-slim" + name "${docker_registry}/${docker_repo}:v${version}-${docker_target}" + version "v${version}-${docker_target}" dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile") files fileTree(rootProject.projectDir) { include "docker/${docker_dir}/*" }.exclude { i -> i.file.isHidden() || i.file == buildDir } - buildArgs([APP_ENV: 'slim']) + buildArgs([APP_ENV: docker_target]) } tasks.getByName('docker').dependsOn('build') diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 0132ceaa9b1a9..2abd4e2f33bef 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -22,10 +22,22 @@ ENV PATH="/datahub-ingestion/.local/bin:$PATH" FROM base as slim-install RUN pip install --no-cache --user ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]" -FROM base as full-install +FROM base as full-install-build + +USER 0 +RUN apt-get update && apt-get install -y -qq maven + +USER datahub +COPY ./docker/datahub-ingestion/pyspark_jars.sh . + RUN pip install --no-cache --user ".[base]" && \ pip install --no-cache --user "./airflow-plugin[acryl-datahub-airflow-plugin]" && \ - pip install --no-cache --user ".[all]" + pip install --no-cache --user ".[all]" && \ + ./pyspark_jars.sh + +FROM base as full-install + +COPY --from=full-install-build /datahub-ingestion/.local /datahub-ingestion/.local FROM base as dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. diff --git a/docker/datahub-ingestion/pyspark_jars.sh b/docker/datahub-ingestion/pyspark_jars.sh new file mode 100755 index 0000000000000..ecd24e78c4105 --- /dev/null +++ b/docker/datahub-ingestion/pyspark_jars.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -ex + +HADOOP_CLIENT_DEPENDENCY="${HADOOP_CLIENT_DEPENDENCY:-org.apache.hadoop:hadoop-client:3.3.6}" +ZOOKEEPER_DEPENDENCY="${ZOOKEEPER_DEPENDENCY:-org.apache.zookeeper:zookeeper:3.7.2}" +PYSPARK_JARS="$(python -m site --user-site)/pyspark/jars" + +# Remove conflicting versions +echo "Removing version conflicts from $PYSPARK_JARS" +CONFLICTS="zookeeper hadoop- slf4j-" +for jar in $CONFLICTS; do + rm "$PYSPARK_JARS/$jar"*.jar +done + +# Fetch dependencies +mvn dependency:get -Dtransitive=true -Dartifact="$HADOOP_CLIENT_DEPENDENCY" +mvn dependency:get -Dtransitive=true -Dartifact="$ZOOKEEPER_DEPENDENCY" + +# Move to pyspark location +echo "Moving jars to $PYSPARK_JARS" +find "$HOME/.m2" -type f -name "*.jar" -exec mv {} "$PYSPARK_JARS/" \; diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 5f44f14c3d74c..f3782abe576d3 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -242,7 +242,7 @@ } data_lake_profiling = { - "pydeequ==1.1.0", + "pydeequ~=1.1.0", "pyspark~=3.3.0", } @@ -256,7 +256,7 @@ databricks = { # 0.1.11 appears to have authentication issues with azure databricks "databricks-sdk>=0.9.0", - "pyspark", + "pyspark~=3.3.0", "requests", } From 7ba54fdb9820cd79296801c48d05dc177e3739f1 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Mon, 13 Nov 2023 12:33:34 +0900 Subject: [PATCH 094/792] fix: re-add security.md to sidebar (#9229) --- SECURITY.md | 2 +- docs-website/sidebars.js | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/SECURITY.md b/SECURITY.md index 3ca87b08d844d..0e301d3748373 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,4 +1,4 @@ -# Reporting security issues +# Reporting Security Issues If you think you have found a security vulnerability, please send a report to security@datahubproject.io. This address can be used for all of Acryl Data’s open source and commercial products (including but not limited to DataHub and Acryl Data). We can accept only vulnerability reports at this address. diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index f15f2927379c5..801e0fbd07d36 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -660,6 +660,7 @@ module.exports = { "docs/CONTRIBUTING", "docs/links", "docs/rfc", + "SECURITY", ], }, { From 582eebe739ef7f8fc7651a78eee9306143360b68 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Mon, 13 Nov 2023 10:57:49 -0600 Subject: [PATCH 095/792] feat(policies): reduce default access for all users (#9067) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- .github/scripts/check_policies.py | 28 ++++-- .../war/src/main/resources/boot/policies.json | 89 ++++++++++--------- .../cypress/e2e/settings/managing_groups.js | 2 +- 3 files changed, 70 insertions(+), 49 deletions(-) diff --git a/.github/scripts/check_policies.py b/.github/scripts/check_policies.py index 2ad5f7fff015b..cc3576e05413c 100644 --- a/.github/scripts/check_policies.py +++ b/.github/scripts/check_policies.py @@ -20,7 +20,7 @@ elif urn == "urn:li:dataHubPolicy:editor-platform-policy": editor_platform_policy_privileges = policy["info"]["privileges"] elif urn == "urn:li:dataHubPolicy:7": - all_user_platform_policy_privilges = policy["info"]["privileges"] + all_user_platform_policy_privileges = policy["info"]["privileges"] try: doc_type = policy["info"]["type"] privileges = policy["info"]["privileges"] @@ -54,10 +54,22 @@ ) assert len(diff_policies) == 0, f"Missing privileges for root user are {diff_policies}" -diff_policies = set(editor_platform_policy_privileges).difference( - set(all_user_platform_policy_privilges) -) -assert "MANAGE_POLICIES" not in all_user_platform_policy_privilges -assert ( - len(diff_policies) == 0 -), f"Missing privileges for all user policies are {diff_policies}" +# All users privileges checks +assert "MANAGE_POLICIES" not in all_user_platform_policy_privileges +assert "MANAGE_USERS_AND_GROUPS" not in all_user_platform_policy_privileges +assert "MANAGE_SECRETS" not in all_user_platform_policy_privileges +assert "MANAGE_USER_CREDENTIALS" not in all_user_platform_policy_privileges +assert "MANAGE_ACCESS_TOKENS" not in all_user_platform_policy_privileges +assert "EDIT_ENTITY" not in all_user_platform_policy_privileges +assert "DELETE_ENTITY" not in all_user_platform_policy_privileges + +# Editor checks +assert "MANAGE_POLICIES" not in editor_platform_policy_privileges +assert "MANAGE_USERS_AND_GROUPS" not in editor_platform_policy_privileges +assert "MANAGE_SECRETS" not in editor_platform_policy_privileges +assert "MANAGE_USER_CREDENTIALS" not in editor_platform_policy_privileges +assert "MANAGE_ACCESS_TOKENS" not in editor_platform_policy_privileges +# These don't prevent a user from modifying entities they are an asset owner of, i.e. their own profile info +assert "EDIT_CONTACT_INFO" not in editor_platform_policy_privileges +assert "EDIT_USER_PROFILE" not in editor_platform_policy_privileges +assert "EDIT_ENTITY_OWNERS" not in editor_platform_policy_privileges diff --git a/metadata-service/war/src/main/resources/boot/policies.json b/metadata-service/war/src/main/resources/boot/policies.json index b7ffc11c08f05..32e68e7b13343 100644 --- a/metadata-service/war/src/main/resources/boot/policies.json +++ b/metadata-service/war/src/main/resources/boot/policies.json @@ -74,21 +74,6 @@ "editable":false } }, - { - "urn": "urn:li:dataHubPolicy:2" - }, - { - "urn": "urn:li:dataHubPolicy:3" - }, - { - "urn": "urn:li:dataHubPolicy:4" - }, - { - "urn": "urn:li:dataHubPolicy:5" - }, - { - "urn": "urn:li:dataHubPolicy:6" - }, { "urn": "urn:li:dataHubPolicy:7", "info": { @@ -99,18 +84,8 @@ "users":[] }, "privileges":[ - "MANAGE_INGESTION", - "MANAGE_SECRETS", - "MANAGE_USERS_AND_GROUPS", "VIEW_ANALYTICS", - "GENERATE_PERSONAL_ACCESS_TOKENS", - "MANAGE_DOMAINS", - "MANAGE_GLOBAL_ANNOUNCEMENTS", - "MANAGE_TESTS", - "MANAGE_GLOSSARIES", - "MANAGE_TAGS", - "MANAGE_GLOBAL_VIEWS", - "MANAGE_GLOBAL_OWNERSHIP_TYPES" + "GENERATE_PERSONAL_ACCESS_TOKENS" ], "displayName":"All Users - Base Platform Privileges", "description":"Grants base platform privileges to ALL users of DataHub. Change this policy to alter that behavior.", @@ -119,15 +94,6 @@ "editable":true } }, - { - "urn": "urn:li:dataHubPolicy:8" - }, - { - "urn": "urn:li:dataHubPolicy:9" - }, - { - "urn": "urn:li:dataHubPolicy:10" - }, { "urn": "urn:li:dataHubPolicy:view-entity-page-all", "info": { @@ -313,7 +279,6 @@ "VIEW_ENTITY_PAGE", "EDIT_ENTITY_TAGS", "EDIT_ENTITY_GLOSSARY_TERMS", - "EDIT_ENTITY_OWNERS", "EDIT_ENTITY_DOCS", "EDIT_ENTITY_DOC_LINKS", "EDIT_ENTITY_STATUS", @@ -321,16 +286,12 @@ "EDIT_ENTITY_DATA_PRODUCTS", "EDIT_DEPRECATION_PRIVILEGE", "EDIT_ENTITY_ASSERTIONS", - "EDIT_ENTITY", "EDIT_DATASET_COL_TAGS", "EDIT_DATASET_COL_GLOSSARY_TERMS", "EDIT_DATASET_COL_DESCRIPTION", "VIEW_DATASET_USAGE", "VIEW_DATASET_PROFILE", "EDIT_TAG_COLOR", - "EDIT_GROUP_MEMBERS", - "EDIT_USER_PROFILE", - "EDIT_CONTACT_INFO", "EDIT_LINEAGE", "EDIT_ENTITY_QUERIES", "SEARCH_PRIVILEGE", @@ -348,6 +309,54 @@ "editable":false } }, + { + "urn": "urn:li:dataHubPolicy:editor-metadata-policy-entities", + "info": { + "actors":{ + "resourceOwners":false, + "allUsers":false, + "allGroups":false, + "users":[], + "groups":[], + "roles":[ + "urn:li:dataHubRole:Editor" + ] + }, + "resources": { + "allResources": false, + "filter": { + "criteria": [ + { + "field": "RESOURCE_TYPE", + "values": [ + "dataset", + "chart", + "dashboard", + "dataFlow", + "dataJob", + "tag", + "container", + "domain", + "glossaryTerm", + "glossaryNode", + "notebook", + "dataProduct" + ], + "condition": "EQUALS" + } + ] + } + }, + "privileges":[ + "EDIT_ENTITY" + ], + "displayName":"Editors - Edit Metadata Entities", + "description":"Editors can edit primary metadata entities.", + "state":"ACTIVE", + "type":"METADATA", + "editable":true + } + }, { "urn": "urn:li:dataHubPolicy:reader-platform-policy", "info": { diff --git a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js index 8d689c7e2303c..70219a550cd8b 100644 --- a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js +++ b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js @@ -96,7 +96,7 @@ describe("create and manage group", () => { }); it("test user verify group participation", () => { - cy.loginWithCredentials(email,password); + cy.loginWithCredentials(); cy.visit("/settings/identities/groups"); cy.hideOnboardingTour(); cy.clickOptionWithText(`Test group EDITED ${test_id}`); From 4461b60583235c27bfe6244e6e0f12d08638aee5 Mon Sep 17 00:00:00 2001 From: Yuriy Gavrilov <44679014+YuriyGavrilov@users.noreply.github.com> Date: Mon, 13 Nov 2023 21:15:13 +0300 Subject: [PATCH 096/792] Update add new company s7 airlines (#9019) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 79f85433fbc18..6b8fa520e432e 100644 --- a/README.md +++ b/README.md @@ -142,6 +142,7 @@ Here are the companies that have officially adopted DataHub. Please feel free to - [SpotHero](https://spothero.com) - [Stash](https://www.stash.com) - [Shanghai HuaRui Bank](https://www.shrbank.com) +- [s7 Airlines](https://www.s7.ru/) - [ThoughtWorks](https://www.thoughtworks.com) - [TypeForm](http://typeform.com) - [Udemy](https://www.udemy.com/) From 3844b78fa220a92cb2ec9dd8599d9109106f8a24 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Mon, 13 Nov 2023 16:19:41 -0600 Subject: [PATCH 097/792] docs(debug): add debug information for cli (#9208) --- docs/actions/README.md | 3 ++- docs/cli.md | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/actions/README.md b/docs/actions/README.md index 23596ec67514e..4fa44eec588be 100644 --- a/docs/actions/README.md +++ b/docs/actions/README.md @@ -162,7 +162,8 @@ datahub actions -c -c ### Running in debug mode -Simply append the `--debug` flag to the CLI to run your action in debug mode. +Simply append the `--debug` flag to the CLI to run your action in debug mode. NOTE: This will reveal sensitive information in the logs, do not share the logs with outside resources and ensure untrusted +users will not have access to logs through UI ingestions before enabling on instances. ``` datahub actions -c --debug diff --git a/docs/cli.md b/docs/cli.md index 267f289d9f54a..7dfac1e9b2bff 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -134,7 +134,7 @@ The environment variables listed below take precedence over the DataHub CLI conf - `DATAHUB_GMS_TOKEN` (default `None`) - Used for communicating with DataHub Cloud. - `DATAHUB_TELEMETRY_ENABLED` (default `true`) - Set to `false` to disable telemetry. If CLI is being run in an environment with no access to public internet then this should be disabled. - `DATAHUB_TELEMETRY_TIMEOUT` (default `10`) - Set to a custom integer value to specify timeout in secs when sending telemetry. -- `DATAHUB_DEBUG` (default `false`) - Set to `true` to enable debug logging for CLI. Can also be achieved through `--debug` option of the CLI. +- `DATAHUB_DEBUG` (default `false`) - Set to `true` to enable debug logging for CLI. Can also be achieved through `--debug` option of the CLI. This exposes sensitive information in logs, enabling on production instances should be avoided especially if UI ingestion is in use as logs can be made available for runs through the UI. - `DATAHUB_VERSION` (default `head`) - Set to a specific version to run quickstart with the particular version of docker images. - `ACTIONS_VERSION` (default `head`) - Set to a specific version to run quickstart with that image tag of `datahub-actions` container. - `DATAHUB_ACTIONS_IMAGE` (default `acryldata/datahub-actions`) - Set to `-slim` to run a slimmer actions container without pyspark/deequ features. From ff90fb633da78a25f19f33dca0dae58df2b5ff82 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Mon, 13 Nov 2023 16:26:53 -0600 Subject: [PATCH 098/792] =?UTF-8?q?fix(datahub-ingestion):=20prevent=20tra?= =?UTF-8?q?nsitive=20deps,=20bump=20addtional=20pyspa=E2=80=A6=20(#9233)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .dockerignore | 5 +-- datahub-frontend/build.gradle | 3 +- datahub-upgrade/build.gradle | 3 +- docker/datahub-ingestion-base/build.gradle | 8 +++-- docker/datahub-ingestion/Dockerfile | 4 +-- docker/datahub-ingestion/README.md | 7 ++++ docker/datahub-ingestion/build.gradle | 16 ++++----- docker/datahub-ingestion/pyspark_jars.sh | 40 +++++++++++++-------- docker/elasticsearch-setup/build.gradle | 5 +-- docker/kafka-setup/build.gradle | 3 +- docker/mysql-setup/build.gradle | 3 +- docker/postgres-setup/build.gradle | 3 +- metadata-jobs/mae-consumer-job/build.gradle | 3 +- metadata-jobs/mce-consumer-job/build.gradle | 3 +- metadata-service/war/build.gradle | 3 +- 15 files changed, 70 insertions(+), 39 deletions(-) diff --git a/.dockerignore b/.dockerignore index 29c6c45bb0653..701263f5fedde 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,7 @@ **/node_modules/ -datahub-frontend/build/ -metadata-ingestion/venv/ +*/build/ +*/*/build/ +*/venv/ out **/*.class # Have to copy gradle/wrapper/gradle-wrapper.jar, can't exclude ALL jars diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index fdf13bac0accc..eb81b31745536 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -77,10 +77,11 @@ docker { version "v${version}" dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile") files fileTree(rootProject.projectDir) { + include '.dockerignore' include 'docker/monitoring/*' include "docker/${docker_dir}/*" }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 5d0edf3ee8427..81e6e79c2a5e5 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -88,10 +88,11 @@ docker { dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile") files bootJar.outputs.files files fileTree(rootProject.projectDir) { + include '.dockerignore' include "docker/${docker_repo}/*" include 'metadata-models/src/main/resources/*' }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") diff --git a/docker/datahub-ingestion-base/build.gradle b/docker/datahub-ingestion-base/build.gradle index 64635671343ef..c4d8a962dcd32 100644 --- a/docker/datahub-ingestion-base/build.gradle +++ b/docker/datahub-ingestion-base/build.gradle @@ -10,18 +10,20 @@ ext { docker_repo = 'datahub-ingestion-base' docker_dir = 'datahub-ingestion-base' docker_target = project.getProperties().getOrDefault("dockerTarget", "slim") + docker_version = "${version}${docker_target == 'slim' ? '-slim' : ''}" revision = 2 // increment to trigger rebuild } docker { - name "${docker_registry}/${docker_repo}:v${version}-${docker_target}" - version "v${version}-${docker_target}" + name "${docker_registry}/${docker_repo}:v${docker_version}" + version "v${docker_version}" dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile") files fileTree(rootProject.projectDir) { + include '.dockerignore' include "docker/${docker_dir}/*" }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } buildArgs([APP_ENV: docker_target]) } diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 2abd4e2f33bef..1aee79a428a98 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -32,8 +32,8 @@ COPY ./docker/datahub-ingestion/pyspark_jars.sh . RUN pip install --no-cache --user ".[base]" && \ pip install --no-cache --user "./airflow-plugin[acryl-datahub-airflow-plugin]" && \ - pip install --no-cache --user ".[all]" && \ - ./pyspark_jars.sh + pip install --no-cache --user ".[all]" +RUN ./pyspark_jars.sh FROM base as full-install diff --git a/docker/datahub-ingestion/README.md b/docker/datahub-ingestion/README.md index 6580199bcce21..ed856314c5cc0 100644 --- a/docker/datahub-ingestion/README.md +++ b/docker/datahub-ingestion/README.md @@ -2,3 +2,10 @@ [![datahub-ingestion docker](https://github.com/datahub-project/datahub/actions/workflows/docker-ingestion.yml/badge.svg)](https://github.com/datahub-project/datahub/actions/workflows/docker-ingestion.yml) Refer to the [metadata ingestion framework](../../metadata-ingestion) to understand the architecture and responsibilities of this service. + +## Slim vs Full Image Build + +There are two versions of this image. One includes pyspark and Oracle dependencies and is larger due to the java dependencies. + +Running the standard build results in the `slim` image without pyspark being generated by default. In order to build the full +image with pyspark use the following project property `-PdockerTarget=full`. diff --git a/docker/datahub-ingestion/build.gradle b/docker/datahub-ingestion/build.gradle index fed33752a4b81..247b896d6955c 100644 --- a/docker/datahub-ingestion/build.gradle +++ b/docker/datahub-ingestion/build.gradle @@ -9,6 +9,8 @@ ext { docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry docker_repo = 'datahub-ingestion' docker_dir = 'datahub-ingestion' + docker_target = project.getProperties().getOrDefault("dockerTarget", "slim") + docker_version = "${version}${docker_target == 'slim' ? '-slim' : ''}" revision = 2 // increment to trigger rebuild } @@ -19,21 +21,19 @@ dependencies { } docker { - name "${docker_registry}/${docker_repo}:v${version}-slim" - version "v${version}-slim" - dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile-slim-only") + name "${docker_registry}/${docker_repo}:v${docker_version}" + version "v${docker_version}" + dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile${docker_target == "slim" ? "-slim-only" : ""}") files fileTree(rootProject.projectDir) { + include '.dockerignore' include "docker/${docker_dir}/*" include "metadata-ingestion/**" include "metadata-ingestion-modules/**" }.exclude { - i -> i.file.isHidden() || - i.file == buildDir || - i.file == project(':metadata-ingestion').buildDir || - i.file == project(':metadata-ingestion-modules').buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } buildArgs([DOCKER_VERSION: version, - RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace('-slim', '')]) + RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", '')]) } tasks.getByName('docker').dependsOn(['build', ':docker:datahub-ingestion-base:docker', diff --git a/docker/datahub-ingestion/pyspark_jars.sh b/docker/datahub-ingestion/pyspark_jars.sh index ecd24e78c4105..ab4b223f0358a 100755 --- a/docker/datahub-ingestion/pyspark_jars.sh +++ b/docker/datahub-ingestion/pyspark_jars.sh @@ -2,21 +2,33 @@ set -ex -HADOOP_CLIENT_DEPENDENCY="${HADOOP_CLIENT_DEPENDENCY:-org.apache.hadoop:hadoop-client:3.3.6}" -ZOOKEEPER_DEPENDENCY="${ZOOKEEPER_DEPENDENCY:-org.apache.zookeeper:zookeeper:3.7.2}" PYSPARK_JARS="$(python -m site --user-site)/pyspark/jars" -# Remove conflicting versions -echo "Removing version conflicts from $PYSPARK_JARS" -CONFLICTS="zookeeper hadoop- slf4j-" -for jar in $CONFLICTS; do - rm "$PYSPARK_JARS/$jar"*.jar -done +function replace_jar { + JAR_PREFIX=$1 + TRANSITIVE=$2 + DEPENDENCY=$3 -# Fetch dependencies -mvn dependency:get -Dtransitive=true -Dartifact="$HADOOP_CLIENT_DEPENDENCY" -mvn dependency:get -Dtransitive=true -Dartifact="$ZOOKEEPER_DEPENDENCY" + echo "Removing version conflicts for $PYSPARK_JARS/$JAR_PREFIX*.jar" + ls "$PYSPARK_JARS/$JAR_PREFIX"*.jar || true + rm "$PYSPARK_JARS/$JAR_PREFIX"*.jar || true + rm -r "$HOME/.m2" || true -# Move to pyspark location -echo "Moving jars to $PYSPARK_JARS" -find "$HOME/.m2" -type f -name "*.jar" -exec mv {} "$PYSPARK_JARS/" \; + if [ ! -z "$DEPENDENCY" ]; then + echo "Resolving $DEPENDENCY" + mvn dependency:get -Dtransitive=$TRANSITIVE -Dartifact="$DEPENDENCY" >/dev/null + + echo "Moving jars to $PYSPARK_JARS" + find "$HOME/.m2" -type f -name "$JAR_PREFIX*.jar" -exec echo "{}" \; + find "$HOME/.m2" -type f -name "$JAR_PREFIX*.jar" -exec cp {} "$PYSPARK_JARS/" \; + fi +} + +replace_jar "zookeeper-" "false" "${ZOOKEEPER_DEPENDENCY:-org.apache.zookeeper:zookeeper:3.7.2}" +replace_jar "hadoop-client-" "true" "${HADOOP_CLIENT_API_DEPENDENCY:-org.apache.hadoop:hadoop-client-api:3.3.6}" +replace_jar "hadoop-client-" "true" "${HADOOP_CLIENT_RUNTIME_DEPENDENCY:-org.apache.hadoop:hadoop-client-runtime:3.3.6}" +replace_jar "hadoop-yarn-" "true" "${HADOOP_YARN_DEPENDENCY:-org.apache.hadoop:hadoop-yarn-server-web-proxy:3.3.6}" +replace_jar "snappy-java-" "false" "${SNAPPY_JAVA_DEPENDENCY:-org.xerial.snappy:snappy-java:1.1.10.5}" +replace_jar "libthrift-" "false" "${LIBTHRIFT_DEPENDENCY:-org.apache.thrift:libthrift:0.19.0}" +replace_jar "ivy-" "false" "${IVY_DEPENDENCY:-org.apache.ivy:ivy:2.5.2}" +replace_jar "parquet-jackson-" "false" "${PARQUET_JACKSON_DEPENDENCY:-org.apache.parquet:parquet-jackson:1.13.1}" diff --git a/docker/elasticsearch-setup/build.gradle b/docker/elasticsearch-setup/build.gradle index ffee3b9c65cf4..ac935ca42fd12 100644 --- a/docker/elasticsearch-setup/build.gradle +++ b/docker/elasticsearch-setup/build.gradle @@ -15,10 +15,11 @@ docker { version "v${version}" dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile") files fileTree(rootProject.projectDir) { + include '.dockerignore' include "docker/${docker_dir}/*" - include "metadata-service/restli-servlet-impl/src/main/resources/index/**" + include 'metadata-service/restli-servlet-impl/src/main/resources/index/**' }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") diff --git a/docker/kafka-setup/build.gradle b/docker/kafka-setup/build.gradle index 573ef21c88bf9..25f9847190de3 100644 --- a/docker/kafka-setup/build.gradle +++ b/docker/kafka-setup/build.gradle @@ -15,9 +15,10 @@ docker { version "v${version}" dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile") files fileTree(rootProject.projectDir) { + include '.dockerignore' include "docker/${docker_dir}/*" }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") diff --git a/docker/mysql-setup/build.gradle b/docker/mysql-setup/build.gradle index 0d8941cce4833..1598866914c0e 100644 --- a/docker/mysql-setup/build.gradle +++ b/docker/mysql-setup/build.gradle @@ -16,9 +16,10 @@ docker { version "v${version}" dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile") files fileTree(rootProject.projectDir) { + include '.dockerignore' include "docker/${docker_dir}/*" }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") diff --git a/docker/postgres-setup/build.gradle b/docker/postgres-setup/build.gradle index 8a026be09d2b4..e24e206c99145 100644 --- a/docker/postgres-setup/build.gradle +++ b/docker/postgres-setup/build.gradle @@ -16,9 +16,10 @@ docker { version "v${version}" dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile") files fileTree(rootProject.projectDir) { + include '.dockerignore' include "docker/${docker_dir}/*" }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") diff --git a/metadata-jobs/mae-consumer-job/build.gradle b/metadata-jobs/mae-consumer-job/build.gradle index 51c758f434328..5e735e118493c 100644 --- a/metadata-jobs/mae-consumer-job/build.gradle +++ b/metadata-jobs/mae-consumer-job/build.gradle @@ -45,11 +45,12 @@ docker { dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile") files bootJar.outputs.files files fileTree(rootProject.projectDir) { + include '.dockerignore' include 'docker/monitoring/*' include "docker/${docker_repo}/*" include 'metadata-models/src/main/resources/*' }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") diff --git a/metadata-jobs/mce-consumer-job/build.gradle b/metadata-jobs/mce-consumer-job/build.gradle index daf41a1e0303e..ef042188bc3d8 100644 --- a/metadata-jobs/mce-consumer-job/build.gradle +++ b/metadata-jobs/mce-consumer-job/build.gradle @@ -56,11 +56,12 @@ docker { dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile") files bootJar.outputs.files files fileTree(rootProject.projectDir) { + include '.dockerignore' include 'docker/monitoring/*' include "docker/${docker_repo}/*" include 'metadata-models/src/main/resources/*' }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") diff --git a/metadata-service/war/build.gradle b/metadata-service/war/build.gradle index 54e95fdcfe579..35730ad6dfa9f 100644 --- a/metadata-service/war/build.gradle +++ b/metadata-service/war/build.gradle @@ -70,11 +70,12 @@ docker { dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile") files war.outputs.files files fileTree(rootProject.projectDir) { + include '.dockerignore' include 'docker/monitoring/*' include "docker/${docker_repo}/*" include 'metadata-models/src/main/resources/*' }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") From 19aa21506886692ff221f1b859e0633df995fb43 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 13 Nov 2023 19:00:30 -0500 Subject: [PATCH 099/792] feat(ingest/dbt): dbt column-level lineage (#8991) --- .../airflow-plugin/setup.py | 2 +- metadata-ingestion/setup.py | 4 +- .../api/incremental_lineage_helper.py | 16 +- .../ingestion/source/dbt/dbt_common.py | 499 ++++++++++-- .../datahub/ingestion/source/dbt/dbt_core.py | 3 +- .../src/datahub/utilities/sqlglot_lineage.py | 81 +- .../src/datahub/utilities/topological_sort.py | 49 ++ .../dbt_enabled_with_schemas_mces_golden.json | 255 ++++-- .../dbt_test_column_meta_mapping_golden.json | 283 +++++-- .../dbt/dbt_test_events_golden.json | 731 ++++++++++++------ ...th_complex_owner_patterns_mces_golden.json | 240 +++++- ...th_data_platform_instance_mces_golden.json | 246 ++++-- ...h_non_incremental_lineage_mces_golden.json | 198 ++++- ..._target_platform_instance_mces_golden.json | 246 ++++-- .../tests/integration/dbt/test_dbt.py | 8 +- .../test_snowflake_cte_name_collision.json | 47 ++ ...owflake_full_table_name_col_reference.json | 55 ++ .../goldens/test_snowflake_unused_cte.json | 39 + .../unit/sql_parsing/test_sqlglot_lineage.py | 128 ++- .../tests/unit/test_topological_sort.py | 33 + 20 files changed, 2550 insertions(+), 613 deletions(-) create mode 100644 metadata-ingestion/src/datahub/utilities/topological_sort.py create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_cte_name_collision.json create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_full_table_name_col_reference.json create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_unused_cte.json create mode 100644 metadata-ingestion/tests/unit/test_topological_sort.py diff --git a/metadata-ingestion-modules/airflow-plugin/setup.py b/metadata-ingestion-modules/airflow-plugin/setup.py index e88fc870cb333..838322f83833b 100644 --- a/metadata-ingestion-modules/airflow-plugin/setup.py +++ b/metadata-ingestion-modules/airflow-plugin/setup.py @@ -14,7 +14,7 @@ def get_long_description(): return pathlib.Path(os.path.join(root, "README.md")).read_text() -_version = package_metadata["__version__"] +_version: str = package_metadata["__version__"] _self_pin = f"=={_version}" if not _version.endswith("dev0") else "" diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index f3782abe576d3..ebe180703051f 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -305,8 +305,8 @@ "datahub-lineage-file": set(), "datahub-business-glossary": set(), "delta-lake": {*data_lake_profiling, *delta_lake}, - "dbt": {"requests"} | aws_common, - "dbt-cloud": {"requests"}, + "dbt": {"requests"} | sqlglot_lib | aws_common, + "dbt-cloud": {"requests"} | sqlglot_lib, "druid": sql_common | {"pydruid>=0.6.2"}, "dynamodb": aws_common, # Starting with 7.14.0 python client is checking if it is connected to elasticsearch client. If its not it throws diff --git a/metadata-ingestion/src/datahub/ingestion/api/incremental_lineage_helper.py b/metadata-ingestion/src/datahub/ingestion/api/incremental_lineage_helper.py index 945b201ca5758..479486ce22899 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/incremental_lineage_helper.py +++ b/metadata-ingestion/src/datahub/ingestion/api/incremental_lineage_helper.py @@ -15,7 +15,7 @@ from datahub.specific.dataset import DatasetPatchBuilder -def _convert_upstream_lineage_to_patch( +def convert_upstream_lineage_to_patch( urn: str, aspect: UpstreamLineageClass, system_metadata: Optional[SystemMetadataClass], @@ -86,16 +86,11 @@ def _merge_upstream_lineage( def _lineage_wu_via_read_modify_write( - graph: Optional[DataHubGraph], + graph: DataHubGraph, urn: str, aspect: UpstreamLineageClass, system_metadata: Optional[SystemMetadataClass], ) -> MetadataWorkUnit: - if graph is None: - raise ValueError( - "Failed to handle incremental lineage, DataHubGraph is missing. " - "Use `datahub-rest` sink OR provide `datahub-api` config in recipe. " - ) gms_aspect = graph.get_aspect(urn, UpstreamLineageClass) if gms_aspect: new_aspect = _merge_upstream_lineage(aspect, gms_aspect) @@ -131,11 +126,16 @@ def auto_incremental_lineage( yield wu if lineage_aspect.fineGrainedLineages: + if graph is None: + raise ValueError( + "Failed to handle incremental lineage, DataHubGraph is missing. " + "Use `datahub-rest` sink OR provide `datahub-api` config in recipe. " + ) yield _lineage_wu_via_read_modify_write( graph, urn, lineage_aspect, wu.metadata.systemMetadata ) elif lineage_aspect.upstreams: - yield _convert_upstream_lineage_to_patch( + yield convert_upstream_lineage_to_patch( urn, lineage_aspect, wu.metadata.systemMetadata ) else: diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index 76cb82aaa5b4b..94df0a4f8a166 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -1,3 +1,4 @@ +import itertools import logging import re from abc import abstractmethod @@ -30,6 +31,9 @@ platform_name, support_status, ) +from datahub.ingestion.api.incremental_lineage_helper import ( + convert_upstream_lineage_to_patch, +) from datahub.ingestion.api.source import MetadataWorkUnitProcessor from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import DatasetSubTypes @@ -67,6 +71,9 @@ ) from datahub.metadata.com.linkedin.pegasus2avro.dataset import ( DatasetLineageTypeClass, + FineGrainedLineage, + FineGrainedLineageDownstreamType, + FineGrainedLineageUpstreamType, UpstreamClass, UpstreamLineage, ) @@ -100,9 +107,17 @@ UpstreamLineageClass, ViewPropertiesClass, ) -from datahub.specific.dataset import DatasetPatchBuilder from datahub.utilities.mapping import Constants, OperationProcessor +from datahub.utilities.sqlglot_lineage import ( + SchemaInfo, + SchemaResolver, + SqlParsingDebugInfo, + SqlParsingResult, + detach_ctes, + sqlglot_lineage, +) from datahub.utilities.time import datetime_to_ts_millis +from datahub.utilities.topological_sort import topological_sort logger = logging.getLogger(__name__) DBT_PLATFORM = "dbt" @@ -280,10 +295,19 @@ class DBTCommonConfig( default=False, description="When enabled, dbt test warnings will be treated as failures.", ) - # override fault value to True. + infer_dbt_schemas: bool = Field( + default=True, + description="When enabled, schemas will be inferred from the dbt node definition.", + ) + include_column_lineage: bool = Field( + default=False, + description="When enabled, column-level lineage will be extracted from the dbt node definition. Requires `infer_dbt_schemas` to be enabled. " + "If you run into issues where the column name casing does not match up with properly, providing a datahub_api or using the rest sink will improve accuracy.", + ) + # override default value to True. incremental_lineage: bool = Field( default=True, - description="When enabled, emits lineage as incremental to existing lineage already in DataHub. When disabled, re-states lineage on each run.", + description="When enabled, emits incremental/patch lineage for non-dbt entities. When disabled, re-states lineage on each run.", ) @validator("target_platform") @@ -340,6 +364,17 @@ def meta_mapping_validator( ) return meta_mapping + @validator("include_column_lineage") + def validate_include_column_lineage( + cls, include_column_lineage: bool, values: Dict + ) -> bool: + if include_column_lineage and not values.get("infer_dbt_schemas"): + raise ValueError( + "`infer_dbt_schemas` must be enabled to use `include_column_lineage`" + ) + + return include_column_lineage + @dataclass class DBTColumn: @@ -352,6 +387,16 @@ class DBTColumn: meta: Dict[str, Any] = field(default_factory=dict) tags: List[str] = field(default_factory=list) + datahub_data_type: Optional[SchemaFieldDataType] = None + + +@dataclass +class DBTColumnLineageInfo: + upstream_dbt_name: str + + upstream_col: str + downstream_col: str + @dataclass class DBTNode: @@ -383,7 +428,9 @@ class DBTNode: owner: Optional[str] columns: List[DBTColumn] = field(default_factory=list) - upstream_nodes: List[str] = field(default_factory=list) + upstream_nodes: List[str] = field(default_factory=list) # list of upstream dbt_name + upstream_cll: List[DBTColumnLineageInfo] = field(default_factory=list) + cll_debug_info: Optional[SqlParsingDebugInfo] = None meta: Dict[str, Any] = field(default_factory=dict) query_tag: Dict[str, Any] = field(default_factory=dict) @@ -394,17 +441,23 @@ class DBTNode: test_info: Optional["DBTTest"] = None # only populated if node_type == 'test' test_result: Optional["DBTTestResult"] = None + @staticmethod + def _join_parts(parts: List[Optional[str]]) -> str: + joined = ".".join([part for part in parts if part]) + assert joined + return joined + def get_db_fqn(self) -> str: - if self.database: - fqn = f"{self.database}.{self.schema}.{self.name}" - else: - fqn = f"{self.schema}.{self.name}" + # Database might be None, but schema and name should always be present. + fqn = self._join_parts([self.database, self.schema, self.name]) return fqn.replace('"', "") def get_urn( self, target_platform: str, env: str, + # If target_platform = dbt, this is the dbt platform instance. + # Otherwise, it's the target platform instance. data_platform_instance: Optional[str], ) -> str: db_fqn = self.get_db_fqn() @@ -417,6 +470,80 @@ def get_urn( env=env, ) + def is_ephemeral_model(self) -> bool: + return self.materialization == "ephemeral" + + def get_fake_ephemeral_table_name(self) -> str: + assert self.is_ephemeral_model() + + # Similar to get_db_fqn. + fqn = self._join_parts( + [self.database, self.schema, f"__datahub__dbt__ephemeral__{self.name}"] + ) + return fqn.replace('"', "") + + def get_urn_for_upstream_lineage( + self, + dbt_platform_instance: Optional[str], + target_platform: str, + target_platform_instance: Optional[str], + env: str, + ) -> str: + """ + Get the urn to use when referencing this node in a dbt node's upstream lineage. + + If the node is a source or an ephemeral dbt node, we should point at the dbt node. + Otherwise, the node is materialized in the target platform, and so lineage should + point there. + """ + # TODO: This logic shouldn't live in the DBTNode class. It should be moved to the source. + + platform_value = DBT_PLATFORM + platform_instance_value = dbt_platform_instance + + materialized = self.materialization + if materialized in { + "view", + "materialized_view", + "table", + "incremental", + "snapshot", + }: + # upstream urns point to the target platform + platform_value = target_platform + platform_instance_value = target_platform_instance + + return self.get_urn( + target_platform=platform_value, + env=env, + data_platform_instance=platform_instance_value, + ) + + @property + def exists_in_target_platform(self): + return not (self.is_ephemeral_model() or self.node_type == "test") + + def columns_setdefault(self, schema_fields: List[SchemaField]) -> None: + """ + Update the column list if they are not already set. + """ + + if self.columns: + # If we already have columns, don't overwrite them. + return + + self.columns = [ + DBTColumn( + name=schema_field.fieldPath, + comment="", + description="", + index=i, + data_type=schema_field.nativeDataType, + datahub_data_type=schema_field.type, + ) + for i, schema_field in enumerate(schema_fields) + ] + def get_custom_properties(node: DBTNode) -> Dict[str, str]: # initialize custom properties to node's meta props @@ -442,6 +569,31 @@ def get_custom_properties(node: DBTNode) -> Dict[str, str]: return custom_properties +def _get_dbt_cte_names(name: str, target_platform: str) -> List[str]: + # Match the dbt CTE naming scheme: + # The default is defined here https://github.com/dbt-labs/dbt-core/blob/4122f6c308c88be4a24c1ea490802239a4c1abb8/core/dbt/adapters/base/relation.py#L222 + # However, since this PR https://github.com/dbt-labs/dbt-core/pull/2712, it's also possible + # for adapters to override this default. Only a handful actually do though: + # https://github.com/search?type=code&q=add_ephemeral_prefix+path:/%5Edbt%5C/adapters%5C// + + # Regardless, we need to keep the original name to work with older dbt versions. + default_cte_name = f"__dbt__cte__{name}" + + adapter_cte_names = { + "hive": f"tmp__dbt__cte__{name}", + "oracle": f"dbt__cte__{name}__", + "netezza": f"dbt__cte__{name}", + "exasol": f"dbt__CTE__{name}", + "db2": f"DBT_CTE__{name}", # ibm db2 + } + + cte_names = [default_cte_name] + if target_platform in adapter_cte_names: + cte_names.append(adapter_cte_names[target_platform]) + + return cte_names + + def get_upstreams( upstreams: List[str], all_nodes: Dict[str, DBTNode], @@ -462,21 +614,12 @@ def get_upstreams( upstream_manifest_node = all_nodes[upstream] # This logic creates lineages among dbt nodes. - platform_value = DBT_PLATFORM - platform_instance_value = platform_instance - - materialized = upstream_manifest_node.materialization - - if materialized in {"view", "table", "incremental", "snapshot"}: - # upstream urns point to the target platform - platform_value = target_platform - platform_instance_value = target_platform_instance - upstream_urns.append( - upstream_manifest_node.get_urn( - platform_value, - environment, - platform_instance_value, + upstream_manifest_node.get_urn_for_upstream_lineage( + dbt_platform_instance=platform_instance, + target_platform=target_platform, + target_platform_instance=target_platform_instance, + env=environment, ) ) return upstream_urns @@ -553,7 +696,7 @@ def get_column_type( @support_status(SupportStatus.CERTIFIED) @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion") @capability(SourceCapability.LINEAGE_COARSE, "Enabled by default") -@capability(SourceCapability.USAGE_STATS, "", supported=False) +@capability(SourceCapability.LINEAGE_FINE, "Enabled using `include_column_lineage`") class DBTSourceBase(StatefulIngestionSourceBase): def __init__(self, config: DBTCommonConfig, ctx: PipelineContext, platform: str): super().__init__(config, ctx) @@ -614,9 +757,10 @@ def create_test_entity_mcps( target_platform=self.config.target_platform, target_platform_instance=self.config.target_platform_instance, environment=self.config.env, - platform_instance=None, + platform_instance=self.config.platform_instance, ) + # In case a dbt test depends on multiple tables, we create separate assertions for each. for upstream_urn in sorted(upstream_urns): if self.config.entities_enabled.can_emit_node_type("test"): yield make_assertion_from_test( @@ -651,23 +795,24 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: ] def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: - if self.config.write_semantics == "PATCH" and not self.ctx.graph: - raise ConfigurationError( - "With PATCH semantics, dbt source requires a datahub_api to connect to. " - "Consider using the datahub-rest sink or provide a datahub_api: configuration on your ingestion recipe." - ) + if self.config.write_semantics == "PATCH": + self.ctx.require_graph("Using dbt with write_semantics=PATCH") all_nodes, additional_custom_props = self.load_nodes() all_nodes_map = {node.dbt_name: node for node in all_nodes} - nodes = self.filter_nodes(all_nodes) - additional_custom_props_filtered = { key: value for key, value in additional_custom_props.items() if value is not None } + # We need to run this before filtering nodes, because the info generated + # for a filtered node may be used by an unfiltered node. + # NOTE: This method mutates the DBTNode objects directly. + self._infer_schemas_and_update_cll(all_nodes_map) + + nodes = self._filter_nodes(all_nodes) non_test_nodes = [ dataset_node for dataset_node in nodes if dataset_node.node_type != "test" ] @@ -695,7 +840,7 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: all_nodes_map, ) - def filter_nodes(self, all_nodes: List[DBTNode]) -> List[DBTNode]: + def _filter_nodes(self, all_nodes: List[DBTNode]) -> List[DBTNode]: nodes = [] for node in all_nodes: key = node.dbt_name @@ -707,6 +852,193 @@ def filter_nodes(self, all_nodes: List[DBTNode]) -> List[DBTNode]: return nodes + @staticmethod + def _to_schema_info(schema_fields: List[SchemaField]) -> SchemaInfo: + return {column.fieldPath: column.nativeDataType for column in schema_fields} + + def _infer_schemas_and_update_cll(self, all_nodes_map: Dict[str, DBTNode]) -> None: + """Annotate the DBTNode objects with schema information and column-level lineage. + + Note that this mutates the DBTNode objects directly. + + This method does the following: + 1. Iterate over the dbt nodes in topological order. + 2. For each node, either load the schema from the graph or from the dbt catalog info. + We also add this schema to the schema resolver. + 3. Run sql parser to infer the schema + generate column lineage. + 4. Write the schema and column lineage back to the DBTNode object. + 5. If we haven't already added the node's schema to the schema resolver, do that. + """ + + if not self.config.infer_dbt_schemas: + if self.config.include_column_lineage: + raise ConfigurationError( + "`infer_dbt_schemas` must be enabled to use `include_column_lineage`" + ) + return + + graph = self.ctx.graph + + schema_resolver = SchemaResolver( + platform=self.config.target_platform, + platform_instance=self.config.target_platform_instance, + env=self.config.env, + ) + + target_platform_urn_to_dbt_name: Dict[str, str] = {} + + # Iterate over the dbt nodes in topological order. + # This ensures that we process upstream nodes before downstream nodes. + for dbt_name in topological_sort( + list(all_nodes_map.keys()), + edges=list( + (upstream, node.dbt_name) + for node in all_nodes_map.values() + for upstream in node.upstream_nodes + ), + ): + node = all_nodes_map[dbt_name] + + target_node_urn = None + should_fetch_target_node_schema = False + if node.exists_in_target_platform: + target_node_urn = node.get_urn( + self.config.target_platform, + self.config.env, + self.config.target_platform_instance, + ) + should_fetch_target_node_schema = True + elif node.is_ephemeral_model(): + # For ephemeral nodes, we "pretend" that they exist in the target platform + # for schema resolution purposes. + target_node_urn = mce_builder.make_dataset_urn_with_platform_instance( + platform=self.config.target_platform, + name=node.get_fake_ephemeral_table_name(), + platform_instance=self.config.target_platform_instance, + env=self.config.env, + ) + if target_node_urn: + target_platform_urn_to_dbt_name[target_node_urn] = node.dbt_name + + # Our schema resolver preference is: + # 1. graph + # 2. dbt catalog + # 3. inferred + # Exception: if convert_column_urns_to_lowercase is enabled, swap 1 and 2. + # Cases 1 and 2 are handled here, and case 3 is handled after schema inference has occurred. + schema_fields: Optional[List[SchemaField]] = None + + # Fetch the schema from the graph. + if target_node_urn and should_fetch_target_node_schema and graph: + schema_metadata = graph.get_aspect(target_node_urn, SchemaMetadata) + if schema_metadata: + schema_fields = schema_metadata.fields + + # Otherwise, load the schema from the dbt catalog. + # Note that this might get the casing wrong relative to DataHub, but + # has a more up-to-date column list. + if node.columns and ( + not schema_fields or self.config.convert_column_urns_to_lowercase + ): + schema_fields = [ + SchemaField( + fieldPath=column.name.lower() + if self.config.convert_column_urns_to_lowercase + else column.name, + type=column.datahub_data_type + or SchemaFieldDataType(type=NullTypeClass()), + nativeDataType=column.data_type, + ) + for column in node.columns + ] + + # Add the node to the schema resolver, so that we can get column + # casing to match the upstream platform. + added_to_schema_resolver = False + if target_node_urn and schema_fields: + schema_resolver.add_raw_schema_info( + target_node_urn, self._to_schema_info(schema_fields) + ) + added_to_schema_resolver = True + + # Run sql parser to infer the schema + generate column lineage. + sql_result = None + if node.compiled_code: + try: + # Add CTE stops based on the upstreams list. + preprocessed_sql = detach_ctes( + node.compiled_code, + platform=schema_resolver.platform, + cte_mapping={ + cte_name: upstream_node.get_fake_ephemeral_table_name() + for upstream_node in [ + all_nodes_map[upstream_node_name] + for upstream_node_name in node.upstream_nodes + if upstream_node_name in all_nodes_map + ] + if upstream_node.is_ephemeral_model() + for cte_name in _get_dbt_cte_names( + upstream_node.name, schema_resolver.platform + ) + }, + ) + except Exception as e: + sql_result = SqlParsingResult.make_from_error(e) + else: + sql_result = sqlglot_lineage( + preprocessed_sql, schema_resolver=schema_resolver + ) + + # Save the column lineage. + if self.config.include_column_lineage and sql_result: + # We only save the debug info here. We're report errors based on it later, after + # applying the configured node filters. + node.cll_debug_info = sql_result.debug_info + + if sql_result.column_lineage: + node.upstream_cll = [ + DBTColumnLineageInfo( + upstream_dbt_name=target_platform_urn_to_dbt_name[ + upstream_column.table + ], + upstream_col=upstream_column.column, + downstream_col=column_lineage_info.downstream.column, + ) + for column_lineage_info in sql_result.column_lineage + for upstream_column in column_lineage_info.upstreams + # Only include the CLL if the table in in the upstream list. + if target_platform_urn_to_dbt_name.get(upstream_column.table) + in node.upstream_nodes + ] + + # If we didn't fetch the schema from the graph, use the inferred schema. + inferred_schema_fields = None + if sql_result and sql_result.column_lineage: + inferred_schema_fields = [ + SchemaField( + fieldPath=column_lineage.downstream.column, + type=column_lineage.downstream.column_type + or SchemaFieldDataType(type=NullTypeClass()), + nativeDataType=column_lineage.downstream.native_column_type + or "", + ) + for column_lineage in sql_result.column_lineage + ] + + # Conditionally add the inferred schema to the schema resolver. + if ( + not added_to_schema_resolver + and target_node_urn + and inferred_schema_fields + ): + schema_resolver.add_raw_schema_info( + target_node_urn, self._to_schema_info(inferred_schema_fields) + ) + + # Save the inferred schema fields into the dbt node. + if inferred_schema_fields: + node.columns_setdefault(inferred_schema_fields) + def create_platform_mces( self, dbt_nodes: List[DBTNode], @@ -762,7 +1094,7 @@ def create_platform_mces( ) # mutates meta_aspects if mce_platform == DBT_PLATFORM: - aspects = self._generate_base_aspects( + aspects = self._generate_base_dbt_aspects( node, additional_custom_props_filtered, mce_platform, meta_aspects ) @@ -786,7 +1118,7 @@ def create_platform_mces( else: # We are creating empty node for platform and only add lineage/keyaspect. aspects = [] - if node.materialization == "ephemeral" or node.node_type == "test": + if not node.exists_in_target_platform: continue # This code block is run when we are generating entities of platform type. @@ -799,19 +1131,15 @@ def create_platform_mces( self.config.platform_instance, ) upstreams_lineage_class = get_upstream_lineage([upstream_dbt_urn]) - if self.config.incremental_lineage: - patch_builder: DatasetPatchBuilder = DatasetPatchBuilder( - urn=node_datahub_urn + if not is_primary_source and self.config.incremental_lineage: + # We only generate incremental lineage for non-dbt nodes. + wu = convert_upstream_lineage_to_patch( + urn=node_datahub_urn, + aspect=upstreams_lineage_class, + system_metadata=None, ) - for upstream in upstreams_lineage_class.upstreams: - patch_builder.add_upstream_lineage(upstream) - - for mcp in patch_builder.build(): - yield MetadataWorkUnit( - id=f"upstreamLineage-for-{node_datahub_urn}", - mcp_raw=mcp, - is_primary_source=is_primary_source, - ) + wu.is_primary_source = is_primary_source + yield wu else: aspects.append(upstreams_lineage_class) @@ -918,7 +1246,7 @@ def _create_view_properties_aspect( ) return view_properties - def _generate_base_aspects( + def _generate_base_dbt_aspects( self, node: DBTNode, additional_custom_props_filtered: Dict[str, str], @@ -926,8 +1254,7 @@ def _generate_base_aspects( meta_aspects: Dict[str, Any], ) -> List[Any]: """ - There are some common aspects that get generated for both dbt node and platform node depending on whether dbt - node creation is enabled or not. + Some common aspects that get generated for dbt nodes. """ # create an empty list of aspects and keep adding to it. Initializing with Any to avoid a @@ -987,6 +1314,8 @@ def get_schema_metadata( self.config.strip_user_ids_from_email, ) + # TODO if infer_dbt_schemas, load from saved schemas too + canonical_schema: List[SchemaField] = [] for column in node.columns: description = None @@ -1034,7 +1363,8 @@ def get_schema_metadata( field = SchemaField( fieldPath=field_name, nativeDataType=column.data_type, - type=get_column_type( + type=column.datahub_data_type + or get_column_type( report, node.dbt_name, column.data_type, node.dbt_adapter ), description=description, @@ -1140,27 +1470,78 @@ def _create_lineage_aspect_for_dbt_node( """ This method creates lineage amongst dbt nodes. A dbt node can be linked to other dbt nodes or a platform node. """ - upstream_urns = get_upstreams( - node.upstream_nodes, - all_nodes_map, - self.config.target_platform, - self.config.target_platform_instance, - self.config.env, - self.config.platform_instance, - ) # if a node is of type source in dbt, its upstream lineage should have the corresponding table/view # from the platform. This code block is executed when we are generating entities of type "dbt". if node.node_type == "source": - upstream_urns.append( + upstream_urns = [ node.get_urn( self.config.target_platform, self.config.env, self.config.target_platform_instance, ) + ] + cll = None + else: + upstream_urns = get_upstreams( + node.upstream_nodes, + all_nodes_map, + self.config.target_platform, + self.config.target_platform_instance, + self.config.env, + self.config.platform_instance, + ) + + node_urn = node.get_urn( + target_platform=DBT_PLATFORM, + env=self.config.env, + data_platform_instance=self.config.platform_instance, ) + + def _translate_dbt_name_to_upstream_urn(dbt_name: str) -> str: + return all_nodes_map[dbt_name].get_urn_for_upstream_lineage( + dbt_platform_instance=self.config.platform_instance, + target_platform=self.config.target_platform, + target_platform_instance=self.config.target_platform_instance, + env=self.config.env, + ) + + if node.cll_debug_info and node.cll_debug_info.error: + self.report.report_warning( + node.dbt_name, + f"Error parsing column lineage: {node.cll_debug_info.error}", + ) + cll = [ + FineGrainedLineage( + upstreamType=FineGrainedLineageUpstreamType.FIELD_SET, + downstreamType=FineGrainedLineageDownstreamType.FIELD_SET, + upstreams=[ + mce_builder.make_schema_field_urn( + _translate_dbt_name_to_upstream_urn( + upstream_column.upstream_dbt_name + ), + upstream_column.upstream_col, + ) + for upstream_column in upstreams + ], + downstreams=[ + mce_builder.make_schema_field_urn(node_urn, downstream) + ], + confidenceScore=node.cll_debug_info.confidence + if node.cll_debug_info + else None, + ) + for downstream, upstreams in itertools.groupby( + node.upstream_cll, lambda x: x.downstream_col + ) + ] + if upstream_urns: upstreams_lineage_class = get_upstream_lineage(upstream_urns) + + if self.config.include_column_lineage and cll: + upstreams_lineage_class.fineGrainedLineages = cll + return upstreams_lineage_class return None diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py index dc3a84847beb2..a7703b203bcee 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py @@ -171,7 +171,8 @@ def extract_dbt_entities( catalog_type = None if catalog_node is None: - if materialization != "test": + if materialization not in {"test", "ephemeral"}: + # Test and ephemeral nodes will never show up in the catalog. report.report_warning( key, f"Entity {key} ({name}) is in manifest but missing from catalog", diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index cdffb684d958e..d1209f3ec7b75 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -260,6 +260,16 @@ class SqlParsingResult(_ParserBaseModel): exclude=True, ) + @classmethod + def make_from_error(cls, error: Exception) -> "SqlParsingResult": + return cls( + in_tables=[], + out_tables=[], + debug_info=SqlParsingDebugInfo( + table_error=error, + ), + ) + def _parse_statement(sql: sqlglot.exp.ExpOrStr, dialect: str) -> sqlglot.Expression: statement: sqlglot.Expression = sqlglot.maybe_parse( @@ -1154,14 +1164,60 @@ def sqlglot_lineage( default_schema=default_schema, ) except Exception as e: - return SqlParsingResult( - in_tables=[], - out_tables=[], - column_lineage=None, - debug_info=SqlParsingDebugInfo( - table_error=e, - ), - ) + return SqlParsingResult.make_from_error(e) + + +def detach_ctes( + sql: sqlglot.exp.ExpOrStr, platform: str, cte_mapping: Dict[str, str] +) -> sqlglot.exp.Expression: + """Replace CTE references with table references. + + For example, with cte_mapping = {"__cte_0": "_my_cte_table"}, the following SQL + + WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id + + is transformed into + + WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table ON table2.id = _my_cte_table.id + + Note that the original __cte_0 definition remains in the query, but is simply not referenced. + The query optimizer should be able to remove it. + + This method makes a major assumption: that no other table/column has the same name as a + key in the cte_mapping. + """ + + dialect = _get_dialect(platform) + statement = _parse_statement(sql, dialect=dialect) + + def replace_cte_refs(node: sqlglot.exp.Expression) -> sqlglot.exp.Expression: + if ( + isinstance(node, sqlglot.exp.Identifier) + and node.parent + and not isinstance(node.parent.parent, sqlglot.exp.CTE) + and node.name in cte_mapping + ): + full_new_name = cte_mapping[node.name] + table_expr = sqlglot.maybe_parse( + full_new_name, dialect=dialect, into=sqlglot.exp.Table + ) + + # We expect node.parent to be a Table or Column. + # Either way, it should support catalog/db/name. + parent = node.parent + + if "catalog" in parent.arg_types: + parent.set("catalog", table_expr.catalog) + if "db" in parent.arg_types: + parent.set("db", table_expr.db) + + new_node = sqlglot.exp.Identifier(this=table_expr.name) + + return new_node + else: + return node + + return statement.transform(replace_cte_refs, copy=False) def create_lineage_sql_parsed_result( @@ -1197,14 +1253,7 @@ def create_lineage_sql_parsed_result( default_schema=schema, ) except Exception as e: - return SqlParsingResult( - in_tables=[], - out_tables=[], - column_lineage=None, - debug_info=SqlParsingDebugInfo( - table_error=e, - ), - ) + return SqlParsingResult.make_from_error(e) finally: if needs_close: schema_resolver.close() diff --git a/metadata-ingestion/src/datahub/utilities/topological_sort.py b/metadata-ingestion/src/datahub/utilities/topological_sort.py new file mode 100644 index 0000000000000..f807dfe96063a --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/topological_sort.py @@ -0,0 +1,49 @@ +from collections import deque +from typing import Dict, Iterable, List, Tuple, TypeVar + +_K = TypeVar("_K") + + +def topological_sort(nodes: List[_K], edges: List[Tuple[_K, _K]]) -> Iterable[_K]: + """Topological sort of a directed acyclic graph or forest. + + This is an implementation of Kahn's algorithm. + + Args: + nodes: List of nodes. + edges: List of edges, as tuples of (source, target). + + Returns: + List of nodes in topological order. + """ + + # Build adjacency list. + adj_list: Dict[_K, List[_K]] = {node: [] for node in nodes} + for source, target in edges: + adj_list[source].append(target) + + # Build in-degree map. + in_degrees: Dict[_K, int] = {node: 0 for node in nodes} + for _source, target in edges: + in_degrees[target] += 1 + + # Initialize queue with nodes with in-degree 0. + queue = deque(node for node in nodes if in_degrees[node] == 0) + + results = 0 + while queue: + node = queue.popleft() + + results += 1 + yield node + + # Decrement in-degree of each neighbor. + for neighbor in adj_list[node]: + in_degrees[neighbor] -= 1 + + # If in-degree is 0, add to queue. + if in_degrees[neighbor] == 0: + queue.append(neighbor) + + if results != len(nodes): + raise ValueError("Graph contains cycles.") diff --git a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json index 16df7b8e51b24..e4f01ef7a6c53 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json @@ -14,7 +14,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -131,7 +132,92 @@ "tableSchema": "" } }, - "fields": [] + "fields": [ + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "full_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "email", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "city", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "postal_code", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "phone", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + } + ] } }, { @@ -176,7 +262,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -195,7 +282,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -355,7 +443,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -373,7 +462,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -575,7 +665,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -594,7 +685,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -712,7 +804,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -730,7 +823,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -882,7 +976,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -900,7 +995,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1070,7 +1166,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1088,7 +1185,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1198,7 +1296,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1216,7 +1315,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1338,7 +1438,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1356,7 +1457,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1486,7 +1588,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1504,7 +1607,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1698,7 +1802,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1716,7 +1821,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1862,7 +1968,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -1880,7 +1987,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2047,7 +2155,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2065,7 +2174,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2211,7 +2321,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2229,7 +2340,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2375,7 +2487,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2393,7 +2506,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2539,7 +2653,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2557,7 +2672,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2703,7 +2819,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2712,12 +2829,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-monthly-billing%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-monthly-billing%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2726,12 +2856,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-payments%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-payments%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2740,12 +2883,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.payments_by_customer_by_month%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.payments_by_customer_by_month%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2760,7 +2916,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2775,7 +2932,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2790,7 +2948,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2805,7 +2964,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } }, { @@ -2820,7 +2980,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-schemas-dbt-enabled" + "runId": "dbt-test-with-schemas-dbt-enabled", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json index 4557cb0324829..4d5b008b695f9 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json @@ -14,7 +14,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -65,7 +66,104 @@ "tableSchema": "" } }, - "fields": [] + "fields": [ + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "full_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "initial_full_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NullType": {} + } + }, + "nativeDataType": "", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "email", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "city", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "postal_code", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "phone", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + } + ] } }, { @@ -118,7 +216,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -137,7 +236,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -275,7 +375,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -293,7 +394,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -487,7 +589,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -506,7 +609,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -652,7 +756,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -670,7 +775,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -920,7 +1026,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -938,7 +1045,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1084,7 +1192,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1102,7 +1211,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1272,7 +1382,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1290,7 +1401,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1400,7 +1512,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1418,7 +1531,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1540,7 +1654,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1558,7 +1673,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1688,7 +1804,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1706,7 +1823,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1900,7 +2018,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -1918,7 +2037,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2064,7 +2184,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2082,7 +2203,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2249,7 +2371,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2267,7 +2390,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2413,7 +2537,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2431,7 +2556,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2577,7 +2703,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2595,7 +2722,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2741,7 +2869,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2759,7 +2888,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2905,7 +3035,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2914,12 +3045,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.public.an-aliased-view-for-monthly-billing%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.public.an-aliased-view-for-monthly-billing%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2928,12 +3072,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.public.an_aliased_view_for_payments%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.public.an_aliased_view_for_payments%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2942,12 +3099,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.public.payments_by_customer_by_month%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payments_by_customer_by_month,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.public.payments_by_customer_by_month%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payments_by_customer_by_month,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2956,12 +3126,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.public.customer_snapshot%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.public.customer_snapshot%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2976,7 +3159,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } }, { @@ -2991,7 +3175,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-column-meta-mapping" + "runId": "dbt-column-meta-mapping", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_events_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_events_golden.json index 086c5a78e92a4..3e8ddf317f387 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_events_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_events_golden.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -203,7 +204,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -222,7 +224,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -429,7 +432,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -447,7 +451,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -565,7 +570,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -583,7 +589,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -713,7 +720,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -731,7 +739,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -861,7 +870,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -878,7 +888,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -975,7 +986,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -992,7 +1004,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1101,7 +1114,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1118,7 +1132,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1227,7 +1242,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1236,12 +1252,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.customers%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.customers,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.customers%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.customers,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1250,12 +1279,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.orders%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.orders,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.orders%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.orders,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1264,12 +1306,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.stg_customers%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.stg_customers,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.stg_customers%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.stg_customers,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1278,12 +1333,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.stg_orders%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.stg_orders,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.stg_orders%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.stg_orders,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1292,12 +1360,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.stg_payments%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.stg_payments,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.stg_payments%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.stg_payments,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1306,12 +1387,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.raw_customers%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.raw_customers,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.raw_customers%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.raw_customers,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1320,12 +1414,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.raw_orders%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.raw_orders,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.raw_orders%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.raw_orders,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1334,12 +1441,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.raw_payments%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.raw_payments,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Ccalm-pagoda-323403.jaffle_shop.raw_payments%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,calm-pagoda-323403.jaffle_shop.raw_payments,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1354,7 +1474,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1397,7 +1518,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1408,23 +1530,24 @@ "aspect": { "json": { "timestampMillis": 1655565131058, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:b052a324c05327985f3b579a19ad7579", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:b052a324c05327985f3b579a19ad7579", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1439,7 +1562,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1482,7 +1606,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1493,23 +1618,24 @@ "aspect": { "json": { "timestampMillis": 1655565131075, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:da743330013b7e3e3707ac6e526ab408", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.stg_orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:da743330013b7e3e3707ac6e526ab408", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1524,7 +1650,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1567,7 +1694,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1578,23 +1706,24 @@ "aspect": { "json": { "timestampMillis": 1655565131073, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:2887b9c826e0be6296a37833bdc380bd", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.stg_payments,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:2887b9c826e0be6296a37833bdc380bd", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1609,7 +1738,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1640,7 +1770,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1651,23 +1782,24 @@ "aspect": { "json": { "timestampMillis": 1655565131077, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:591d8dc8939e0cf9bf0fd03264ad1a0e", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:591d8dc8939e0cf9bf0fd03264ad1a0e", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1682,7 +1814,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1732,7 +1865,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1747,7 +1881,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1791,7 +1926,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1802,12 +1938,7 @@ "aspect": { "json": { "timestampMillis": 1655565137668, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:bf7fd2b46d2c32ee9bb036acd1559782", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.customers,PROD)", "status": "COMPLETE", "result": { @@ -1815,12 +1946,18 @@ "nativeResults": { "message": "Database Error in test dbt_expectations_expect_column_values_to_be_in_set_customers_customer_id__customer_id_is_not_null__0__1__2 (models/schema.yml)\n No matching signature for operator = for argument types: INT64, STRING. Supported signature: ANY = ANY at [46:25]\n compiled SQL at target/run/jaffle_shop/models/schema.yml/dbt_expectations_expect_column_e42202dc29e1149de0f5c3966219796d.sql" } + }, + "assertionUrn": "urn:li:assertion:bf7fd2b46d2c32ee9bb036acd1559782", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1835,7 +1972,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1874,7 +2012,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1885,12 +2024,7 @@ "aspect": { "json": { "timestampMillis": 1655565137668, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:1c217b7587a0cad47a07a09bfe154055", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { @@ -1898,12 +2032,18 @@ "nativeResults": { "message": "Database Error in test dbt_expectations_expect_column_values_to_not_be_in_set_orders_credit_card_amount__credit_card_amount_is_not_null__0 (models/schema.yml)\n No matching signature for operator = for argument types: FLOAT64, STRING. Supported signature: ANY = ANY at [36:25]\n compiled SQL at target/run/jaffle_shop/models/schema.yml/dbt_expectations_expect_column_fdf581b1071168614662824120d65b90.sql" } + }, + "assertionUrn": "urn:li:assertion:1c217b7587a0cad47a07a09bfe154055", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1918,7 +2058,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1954,7 +2095,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1965,23 +2107,24 @@ "aspect": { "json": { "timestampMillis": 1655565132560, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:44519aa345bf3ea896179f9f352ae946", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.customers,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:44519aa345bf3ea896179f9f352ae946", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -1996,7 +2139,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2032,7 +2176,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2043,23 +2188,24 @@ "aspect": { "json": { "timestampMillis": 1655565133585, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:bbd78a070092f54313153abec49f6f31", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:bbd78a070092f54313153abec49f6f31", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2074,7 +2220,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2110,7 +2257,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2121,23 +2269,24 @@ "aspect": { "json": { "timestampMillis": 1655565133591, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:52d06197762e3608d94609e96f03a0a7", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:52d06197762e3608d94609e96f03a0a7", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2152,7 +2301,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2188,7 +2338,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2199,23 +2350,24 @@ "aspect": { "json": { "timestampMillis": 1655565133595, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:ca065a99637630468f688717590beeab", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:ca065a99637630468f688717590beeab", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2230,7 +2382,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2266,7 +2419,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2277,23 +2431,24 @@ "aspect": { "json": { "timestampMillis": 1655565134031, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:7a305acc5fc049dc9bbd141b814461d0", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:7a305acc5fc049dc9bbd141b814461d0", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2308,7 +2463,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2344,7 +2500,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2355,23 +2512,24 @@ "aspect": { "json": { "timestampMillis": 1655565134482, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:11087a3d7ae178df22c42922ac8ef8ad", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:11087a3d7ae178df22c42922ac8ef8ad", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2386,7 +2544,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2422,7 +2581,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2433,23 +2593,24 @@ "aspect": { "json": { "timestampMillis": 1655565134485, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:b301bb47cc4ebce4e78a194b3de11f25", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:b301bb47cc4ebce4e78a194b3de11f25", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2464,7 +2625,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2500,7 +2662,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2511,23 +2674,24 @@ "aspect": { "json": { "timestampMillis": 1655565134493, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:2e9117138dcc9facda66f1efd55a8cd7", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:2e9117138dcc9facda66f1efd55a8cd7", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2542,7 +2706,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2578,7 +2743,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2589,23 +2755,24 @@ "aspect": { "json": { "timestampMillis": 1655565134966, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:25ebf4faa9b1654ef54c46d975ca0a81", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.stg_customers,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:25ebf4faa9b1654ef54c46d975ca0a81", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2620,7 +2787,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2656,7 +2824,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2667,23 +2836,24 @@ "aspect": { "json": { "timestampMillis": 1655565135368, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:b03abcc447aac70bbebb22a8a9d7dbbe", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.stg_orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:b03abcc447aac70bbebb22a8a9d7dbbe", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2698,7 +2868,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2734,7 +2905,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2745,23 +2917,24 @@ "aspect": { "json": { "timestampMillis": 1655565135377, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:c1eebc71f36690e4523adca30314e927", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.stg_payments,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:c1eebc71f36690e4523adca30314e927", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2776,7 +2949,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2821,7 +2995,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2832,23 +3007,24 @@ "aspect": { "json": { "timestampMillis": 1655565135510, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:b210dbd31c2ee4efc0c24a9e4cf125ef", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.customers,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:b210dbd31c2ee4efc0c24a9e4cf125ef", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2893,7 +3069,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2904,23 +3081,24 @@ "aspect": { "json": { "timestampMillis": 1655565135510, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:b210dbd31c2ee4efc0c24a9e4cf125ef", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:b210dbd31c2ee4efc0c24a9e4cf125ef", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2935,7 +3113,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2977,7 +3156,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -2988,23 +3168,24 @@ "aspect": { "json": { "timestampMillis": 1655565135836, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:c51ca9c4b5a1f964bef748f0b8968e71", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.customers,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:c51ca9c4b5a1f964bef748f0b8968e71", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3019,7 +3200,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3061,7 +3243,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3072,23 +3255,24 @@ "aspect": { "json": { "timestampMillis": 1655565136269, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:caa9b8060e214cecab88a92dc39c2e60", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:caa9b8060e214cecab88a92dc39c2e60", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3103,7 +3287,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3145,7 +3330,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3156,23 +3342,24 @@ "aspect": { "json": { "timestampMillis": 1655565136230, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:54bac90e6785bdefd8685ebf8814c429", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.stg_customers,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:54bac90e6785bdefd8685ebf8814c429", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3187,7 +3374,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3229,7 +3417,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3240,23 +3429,24 @@ "aspect": { "json": { "timestampMillis": 1655565136395, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:815963e1332b46a203504ba46ebfab24", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.stg_orders,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:815963e1332b46a203504ba46ebfab24", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3271,7 +3461,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3313,7 +3504,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3324,23 +3516,24 @@ "aspect": { "json": { "timestampMillis": 1655565136719, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, "runId": "c7a6b778-0e0f-4789-b567-ca7e124a6840", - "assertionUrn": "urn:li:assertion:fac27f352406b941125292413afa8096", "asserteeUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,calm-pagoda-323403.jaffle_shop.stg_payments,PROD)", "status": "COMPLETE", "result": { "type": "SUCCESS", "nativeResults": {} + }, + "assertionUrn": "urn:li:assertion:fac27f352406b941125292413afa8096", + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" } } }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3355,7 +3548,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3370,7 +3564,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3385,7 +3580,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3400,7 +3596,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3415,7 +3612,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3430,7 +3628,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3445,7 +3644,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3460,7 +3660,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3475,7 +3676,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3490,7 +3692,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3505,7 +3708,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3520,7 +3724,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3535,7 +3740,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3550,7 +3756,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3565,7 +3772,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3580,7 +3788,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3595,7 +3804,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3610,7 +3820,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3625,7 +3836,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3640,7 +3852,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3655,7 +3868,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3670,7 +3884,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3685,7 +3900,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -3700,7 +3916,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-2022_02_03-07_00_00" + "runId": "dbt-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json index 19bfb60e62a08..0bdd5e3c895c2 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json @@ -14,7 +14,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -95,7 +96,92 @@ "tableSchema": "" } }, - "fields": [] + "fields": [ + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "full_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "email", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "city", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "postal_code", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "phone", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + } + ] } }, { @@ -140,7 +226,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -159,7 +246,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -302,7 +390,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -320,7 +409,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -522,7 +612,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -541,7 +632,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -659,7 +751,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -677,7 +770,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -826,7 +920,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -844,7 +939,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1014,7 +1110,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1032,7 +1129,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1142,7 +1240,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1160,7 +1259,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1282,7 +1382,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1300,7 +1401,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1427,7 +1529,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1445,7 +1548,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1639,7 +1743,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1657,7 +1762,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1803,7 +1909,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1821,7 +1928,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -1985,7 +2093,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2003,7 +2112,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2149,7 +2259,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2167,7 +2278,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2313,7 +2425,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2331,7 +2444,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2477,7 +2591,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2486,12 +2601,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-monthly-billing%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-monthly-billing%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2500,12 +2628,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-payments%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-payments%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2514,12 +2655,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.payments_by_customer_by_month%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.payments_by_customer_by_month%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2534,7 +2688,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } }, { @@ -2549,7 +2704,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-complex-owner-patterns" + "runId": "dbt-test-with-complex-owner-patterns", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json index 242c83003b181..5ab0b11e37771 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json @@ -14,7 +14,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +97,92 @@ "tableSchema": "" } }, - "fields": [] + "fields": [ + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "full_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "email", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "city", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "postal_code", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "phone", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + } + ] } }, { @@ -141,7 +227,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -160,7 +247,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -303,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -321,7 +410,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -523,7 +613,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -542,7 +633,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -660,7 +752,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -678,7 +771,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -827,7 +921,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -845,7 +940,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1015,7 +1111,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1033,7 +1130,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1143,7 +1241,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1161,7 +1260,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1283,7 +1383,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1301,7 +1402,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1428,7 +1530,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1446,7 +1549,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1640,7 +1744,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1658,7 +1763,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1804,7 +1910,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1822,7 +1929,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1986,7 +2094,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2004,7 +2113,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2150,7 +2260,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2168,7 +2279,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2314,7 +2426,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2332,7 +2445,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2478,7 +2592,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2496,7 +2611,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2642,7 +2758,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2651,12 +2768,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cdbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cdbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2665,12 +2795,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cdbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cdbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2679,12 +2822,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cdbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cdbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2699,7 +2855,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2714,7 +2871,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-data-platform-instance" + "runId": "dbt-test-with-data-platform-instance", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json index d98b63b9da62f..3725e590fee9e 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json @@ -14,7 +14,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +97,92 @@ "tableSchema": "" } }, - "fields": [] + "fields": [ + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "full_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "email", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "city", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "postal_code", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "phone", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + } + ] } }, { @@ -141,7 +227,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -160,7 +247,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -303,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -321,7 +410,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -523,7 +613,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -542,7 +633,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -660,7 +752,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -678,7 +771,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -827,7 +921,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -845,7 +940,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1015,7 +1111,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1033,7 +1130,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1143,7 +1241,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1161,7 +1260,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1283,7 +1383,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1301,7 +1402,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1428,7 +1530,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1446,7 +1549,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1640,7 +1744,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1658,7 +1763,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1804,7 +1910,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1822,7 +1929,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -1986,7 +2094,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2004,7 +2113,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2150,7 +2260,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2168,7 +2279,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2314,7 +2426,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2332,7 +2445,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2478,7 +2592,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2496,7 +2611,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2642,7 +2758,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2669,7 +2786,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2696,7 +2814,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2723,7 +2842,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2738,7 +2858,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } }, { @@ -2753,7 +2874,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-non-incremental-lineage" + "runId": "dbt-test-with-non-incremental-lineage", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json index 7c024f93641b9..a47abab6b40f7 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json @@ -14,7 +14,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +97,92 @@ "tableSchema": "" } }, - "fields": [] + "fields": [ + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "full_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "VARCHAR", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "email", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "city", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "postal_code", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "phone", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "TEXT", + "recursive": false, + "isPartOfKey": false + } + ] } }, { @@ -141,7 +227,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -160,7 +247,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -303,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -321,7 +410,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -523,7 +613,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -542,7 +633,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -660,7 +752,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -678,7 +771,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -827,7 +921,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -845,7 +940,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1015,7 +1111,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1033,7 +1130,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1143,7 +1241,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1161,7 +1260,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1283,7 +1383,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1301,7 +1402,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1428,7 +1530,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1446,7 +1549,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1640,7 +1744,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1658,7 +1763,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1804,7 +1910,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1822,7 +1929,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -1986,7 +2094,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2004,7 +2113,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2150,7 +2260,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2168,7 +2279,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2314,7 +2426,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2332,7 +2445,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2478,7 +2592,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2496,7 +2611,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2642,7 +2758,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2651,12 +2768,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-monthly-billing%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-monthly-billing%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2665,12 +2795,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-payments%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.an-aliased-view-for-payments%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2679,12 +2822,25 @@ "changeType": "PATCH", "aspectName": "upstreamLineage", "aspect": { - "value": "[{\"op\": \"add\", \"path\": \"/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.payments_by_customer_by_month%2CPROD%29\", \"value\": {\"auditStamp\": {\"time\": 1643871600000, \"actor\": \"urn:li:corpuser:unknown\"}, \"dataset\": \"urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD)\", \"type\": \"TRANSFORMED\"}}]", - "contentType": "application/json-patch+json" + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Adbt%2Cpagila.dbt_postgres.payments_by_customer_by_month%2CPROD%29", + "value": { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", + "type": "TRANSFORMED" + } + } + ] }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2699,7 +2855,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } }, { @@ -2714,7 +2871,8 @@ }, "systemMetadata": { "lastObserved": 1643871600000, - "runId": "dbt-test-with-target-platform-instance" + "runId": "dbt-test-with-target-platform-instance", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/dbt/test_dbt.py b/metadata-ingestion/tests/integration/dbt/test_dbt.py index a970ff6a5de7a..95b5374bbb41d 100644 --- a/metadata-ingestion/tests/integration/dbt/test_dbt.py +++ b/metadata-ingestion/tests/integration/dbt/test_dbt.py @@ -361,11 +361,11 @@ def test_dbt_tests_only_assertions(pytestconfig, tmp_path, mock_time, **kwargs): test_results_path=str( (test_resources_dir / "jaffle_shop_test_results.json").resolve() ), - # this is just here to avoid needing to access datahub server - write_semantics="OVERRIDE", entities_enabled=DBTEntitiesEnabled( test_results=EmitDirective.ONLY ), + # this is just here to avoid needing to access datahub server + write_semantics="OVERRIDE", ), ), sink=DynamicTypedConfig(type="file", config={"filename": str(output_file)}), @@ -440,13 +440,13 @@ def test_dbt_only_test_definitions_and_results( test_results_path=str( (test_resources_dir / "jaffle_shop_test_results.json").resolve() ), - # this is just here to avoid needing to access datahub server - write_semantics="OVERRIDE", entities_enabled=DBTEntitiesEnabled( sources=EmitDirective.NO, seeds=EmitDirective.NO, models=EmitDirective.NO, ), + # this is just here to avoid needing to access datahub server + write_semantics="OVERRIDE", ), ), sink=DynamicTypedConfig(type="file", config={"filename": str(output_file)}), diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_cte_name_collision.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_cte_name_collision.json new file mode 100644 index 0000000000000..44f1075c058ad --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_cte_name_collision.json @@ -0,0 +1,47 @@ +{ + "query_type": "SELECT", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table1,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table2,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table3,PROD)" + ], + "out_tables": [], + "column_lineage": [ + { + "downstream": { + "table": null, + "column": "col2", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "native_column_type": "VARCHAR(16777216)" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table2,PROD)", + "column": "col2" + } + ] + }, + { + "downstream": { + "table": null, + "column": "col1", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "native_column_type": "VARCHAR(16777216)" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table3,PROD)", + "column": "col1" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_full_table_name_col_reference.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_full_table_name_col_reference.json new file mode 100644 index 0000000000000..f8301f1e8189e --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_full_table_name_col_reference.json @@ -0,0 +1,55 @@ +{ + "query_type": "SELECT", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.my_table,PROD)" + ], + "out_tables": [], + "column_lineage": [ + { + "downstream": { + "table": null, + "column": "id", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.my_table,PROD)", + "column": "id" + } + ] + }, + { + "downstream": { + "table": null, + "column": "id_gt_100", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "native_column_type": "INT" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.my_table,PROD)", + "column": "id" + } + ] + }, + { + "downstream": { + "table": null, + "column": "struct_field1", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.my_table,PROD)", + "column": "struct_field.field1" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_unused_cte.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_unused_cte.json new file mode 100644 index 0000000000000..3916c6dc7c5ef --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_unused_cte.json @@ -0,0 +1,39 @@ +{ + "query_type": "SELECT", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,table1,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,table2,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,table3,PROD)" + ], + "out_tables": [], + "column_lineage": [ + { + "downstream": { + "table": null, + "column": "col1", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,table1,PROD)", + "column": "col1" + } + ] + }, + { + "downstream": { + "table": null, + "column": "col6", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,table3,PROD)", + "column": "col6" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index c420f2b8438ce..7f69e358f8f11 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -3,11 +3,59 @@ import pytest from datahub.testing.check_sql_parser_result import assert_sql_result -from datahub.utilities.sqlglot_lineage import _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT +from datahub.utilities.sqlglot_lineage import ( + _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT, + detach_ctes, +) RESOURCE_DIR = pathlib.Path(__file__).parent / "goldens" +def test_detach_ctes_simple(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "_my_cte_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table ON table2.id = _my_cte_table.id" + ) + + +def test_detach_ctes_with_alias(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 AS tablealias ON table2.id = tablealias.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "_my_cte_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table AS tablealias ON table2.id = tablealias.id" + ) + + +def test_detach_ctes_with_multipart_replacement(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "my_db.my_schema.my_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN my_db.my_schema.my_table ON table2.id = my_db.my_schema.my_table.id" + ) + + def test_select_max(): # The COL2 should get normalized to col2. assert_sql_result( @@ -630,6 +678,84 @@ def test_snowflake_column_cast(): ) +def test_snowflake_unused_cte(): + # For this, we expect table level lineage to include table1, but CLL should not. + assert_sql_result( + """ +WITH cte1 AS ( + SELECT col1, col2 + FROM table1 + WHERE col1 = 'value1' +), cte2 AS ( + SELECT col3, col4 + FROM table2 + WHERE col2 = 'value2' +) +SELECT cte1.col1, table3.col6 +FROM cte1 +JOIN table3 ON table3.col5 = cte1.col2 +""", + dialect="snowflake", + expected_file=RESOURCE_DIR / "test_snowflake_unused_cte.json", + ) + + +def test_snowflake_cte_name_collision(): + # In this example, output col1 should come from table3 and not table1, since the cte is unused. + # We'll still generate table-level lineage that includes table1. + assert_sql_result( + """ +WITH cte_alias AS ( + SELECT col1, col2 + FROM table1 +) +SELECT table2.col2, cte_alias.col1 +FROM table2 +JOIN table3 AS cte_alias ON cte_alias.col2 = cte_alias.col2 +""", + dialect="snowflake", + default_db="my_db", + default_schema="my_schema", + schemas={ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table1,PROD)": { + "col1": "NUMBER(38,0)", + "col2": "VARCHAR(16777216)", + }, + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table2,PROD)": { + "col2": "VARCHAR(16777216)", + }, + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.table3,PROD)": { + "col1": "VARCHAR(16777216)", + "col2": "VARCHAR(16777216)", + }, + }, + expected_file=RESOURCE_DIR / "test_snowflake_cte_name_collision.json", + ) + + +def test_snowflake_full_table_name_col_reference(): + assert_sql_result( + """ +SELECT + my_db.my_schema.my_table.id, + case when my_db.my_schema.my_table.id > 100 then 1 else 0 end as id_gt_100, + my_db.my_schema.my_table.struct_field.field1 as struct_field1, +FROM my_db.my_schema.my_table +""", + dialect="snowflake", + default_db="my_db", + default_schema="my_schema", + schemas={ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my_db.my_schema.my_db.my_schema.my_table,PROD)": { + "id": "NUMBER(38,0)", + "struct_field": "struct", + }, + }, + expected_file=RESOURCE_DIR + / "test_snowflake_full_table_name_col_reference.json", + ) + + # TODO: Add a test for setting platform_instance or env diff --git a/metadata-ingestion/tests/unit/test_topological_sort.py b/metadata-ingestion/tests/unit/test_topological_sort.py new file mode 100644 index 0000000000000..4300816b6c48f --- /dev/null +++ b/metadata-ingestion/tests/unit/test_topological_sort.py @@ -0,0 +1,33 @@ +import pytest + +from datahub.utilities.topological_sort import topological_sort + + +def test_topological_sort_valid(): + nodes = ["a", "b", "c", "d", "e", "f"] + edges = [ + ("a", "d"), + ("f", "b"), + ("b", "d"), + ("f", "a"), + ("d", "c"), + ] + + # This isn't the only valid topological sort order. + expected_order = ["e", "f", "b", "a", "d", "c"] + assert list(topological_sort(nodes, edges)) == expected_order + + +def test_topological_sort_invalid(): + nodes = ["a", "b", "c", "d", "e", "f"] + edges = [ + ("a", "d"), + ("f", "b"), + ("b", "d"), + ("f", "a"), + ("d", "c"), + ("c", "f"), + ] + + with pytest.raises(ValueError, match="cycle"): + list(topological_sort(nodes, edges)) From 906a5b91a8adbd3165c01607e9301c84b886106f Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 13 Nov 2023 21:51:11 -0500 Subject: [PATCH 100/792] chore(ingest): cleanup various methods (#9221) --- metadata-events/mxe-schemas/rename-namespace.sh | 2 +- metadata-ingestion/scripts/docgen.py | 11 +++-------- .../datahub/ingestion/source/looker/looker_common.py | 2 +- .../src/datahub/ingestion/source/mode.py | 2 ++ 4 files changed, 7 insertions(+), 10 deletions(-) diff --git a/metadata-events/mxe-schemas/rename-namespace.sh b/metadata-events/mxe-schemas/rename-namespace.sh index 6402e09b65c07..ef04868a6bd15 100755 --- a/metadata-events/mxe-schemas/rename-namespace.sh +++ b/metadata-events/mxe-schemas/rename-namespace.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]:-$0}" )" >/dev/null && pwd )" diff --git a/metadata-ingestion/scripts/docgen.py b/metadata-ingestion/scripts/docgen.py index 1a4db09e961ce..3e4595650d46a 100644 --- a/metadata-ingestion/scripts/docgen.py +++ b/metadata-ingestion/scripts/docgen.py @@ -7,11 +7,10 @@ import sys import textwrap from importlib.metadata import metadata, requires -from typing import Any, Dict, Iterable, List, Optional, Tuple, Union +from typing import Any, Dict, Iterable, List, Optional import click from pydantic import BaseModel, Field -from pydantic.dataclasses import dataclass from datahub.configuration.common import ConfigModel from datahub.ingestion.api.decorators import ( @@ -94,7 +93,6 @@ class Component(BaseModel): @staticmethod def map_field_path_to_components(field_path: str) -> List[Component]: - m = re.match(FieldRow._V2_FIELD_PATH_TOKEN_MATCHER_PREFIX, field_path) v = re.match(FieldRow._V2_FIELD_PATH_FIELD_NAME_MATCHER, field_path) components: List[FieldRow.Component] = [] @@ -197,7 +195,7 @@ def get_checkbox(self) -> str: # Using a non-breaking space to prevent the checkbox from being # broken into a new line. if not self.parent: # None and empty string both count - return f' ' + return ' ' else: return f' ' else: @@ -356,7 +354,6 @@ def priority_value(path: str) -> str: def gen_md_table_from_struct(schema_dict: Dict[str, Any]) -> List[str]: - from datahub.ingestion.extractor.json_schema_util import JsonSchemaTranslator # we don't want default field values to be injected into the description of the field @@ -460,7 +457,6 @@ def get_additional_deps_for_extra(extra_name: str) -> List[str]: def relocate_path(orig_path: str, relative_path: str, relocated_path: str) -> str: - newPath = os.path.join(os.path.dirname(orig_path), relative_path) assert os.path.exists(newPath) @@ -515,7 +511,6 @@ def generate( if extra_docs: for path in glob.glob(f"{extra_docs}/**/*[.md|.yaml|.yml]", recursive=True): - m = re.search("/docs/sources/(.*)/(.*).md", path) if m: platform_name = m.group(1).lower() @@ -741,7 +736,7 @@ def generate( i += 1 f.write(f"---\nsidebar_position: {i}\n---\n\n") f.write( - f"import Tabs from '@theme/Tabs';\nimport TabItem from '@theme/TabItem';\n\n" + "import Tabs from '@theme/Tabs';\nimport TabItem from '@theme/TabItem';\n\n" ) f.write(f"# {platform_docs['name']}\n") diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py index 7ca5ce49019ab..e440750cba0d0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py @@ -828,7 +828,7 @@ def from_api( # noqa: C901 ) else: logger.warning( - f"Failed to extract explore {explore_name} from model {model}.", e + f"Failed to extract explore {explore_name} from model {model}: {e}" ) except AssertionError: diff --git a/metadata-ingestion/src/datahub/ingestion/source/mode.py b/metadata-ingestion/src/datahub/ingestion/source/mode.py index c46b56da422d9..e4ea3b2ed099f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mode.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mode.py @@ -218,6 +218,8 @@ def construct_dashboard( if creator is not None: modified_actor = builder.make_user_urn(creator) if report_info.get("last_saved_at") is None: + # Sometimes mode returns null for last_saved_at. + # In that case, we use the created_at timestamp instead. report_info["last_saved_at"] = report_info.get("created_at") modified_ts = int( From f1b6aa782277e3611eeb64edf6c123598de109df Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 13 Nov 2023 21:51:27 -0500 Subject: [PATCH 101/792] docs: clarify how to disable telemetry (#9236) --- docs/deploy/telemetry.md | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/docs/deploy/telemetry.md b/docs/deploy/telemetry.md index c5458cc5df05e..a1b25337de767 100644 --- a/docs/deploy/telemetry.md +++ b/docs/deploy/telemetry.md @@ -4,7 +4,25 @@ To effectively build and maintain the DataHub Project, we must understand how end-users work within DataHub. Beginning in version 0.8.35, DataHub collects anonymous usage statistics and errors to inform our roadmap priorities and to enable us to proactively address errors. -Deployments are assigned a UUID which is sent along with event details, Java version, OS, and timestamp; telemetry collection is enabled by default and can be disabled by setting `DATAHUB_TELEMETRY_ENABLED=false` in your Docker Compose config. +Both the DataHub backend and the ingestion framework collect telemetry. +## DataHub Backend Telemetry -The source code is available [here.](../../metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java) \ No newline at end of file +Deployments are assigned a UUID which is sent along with event details, Java version, OS, and timestamp. +The source code is available [here](../../metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java). + +## Ingestion Framework Telemetry + +The ingestion framework collects telemetry including CLI invocations, source/sink types, error types, versions, and timestamps. If you run with `datahub --debug`, all telemetry calls will be logged. + +On first invocation, the CLI will generate a randomized UUID, which will be sent alongside every telemetry event. This config is stored in `~/.datahub/telemetry-config.json`. + +The source code is available [here](../../metadata-ingestion/src/datahub/telemetry/telemetry.py). + +## Disabling Telemetry + +Telemetry is enabled by default. While we are careful to anonymize all telemetry data and encourage users to keep it enabled so that we can improve DataHub, we understand that some users may wish to disable it. + +You can disable backend telemetry by setting the `DATAHUB_TELEMETRY_ENABLED` environment variable to `false`. You'll need to set this on both the datahub-gms and datahub-actions containers. + +If you're using the DataHub CLI, ingestion framework telemetry will be disabled when the `DATAHUB_TELEMETRY_ENABLED` environment variable is set to `false`. To persist this change for your machine, run `datahub telemetry disable`. From cfeecd799dd58793e12deb941cb51d590f7a6f4f Mon Sep 17 00:00:00 2001 From: Tony Ouyang Date: Tue, 14 Nov 2023 09:12:39 -0800 Subject: [PATCH 102/792] feat(ingest/mongodb): support AWS DocumentDB for MongoDB (#9201) --- .../src/datahub/ingestion/source/mongodb.py | 38 ++++++++++++++++--- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py index ce2b9ce2981e0..2aa8b1d37d477 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py @@ -1,5 +1,6 @@ import logging from dataclasses import dataclass, field +from enum import Enum from typing import Dict, Iterable, List, Optional, Tuple, Type, Union, ValuesView import bson.timestamp @@ -74,6 +75,12 @@ DENY_DATABASE_LIST = set(["admin", "config", "local"]) +class HostingEnvironment(Enum): + SELF_HOSTED = "SELF_HOSTED" + ATLAS = "ATLAS" + AWS_DOCUMENTDB = "AWS_DOCUMENTDB" + + class MongoDBConfig( PlatformInstanceConfigMixin, EnvConfigMixin, StatefulIngestionConfigBase ): @@ -108,6 +115,11 @@ class MongoDBConfig( # errors out with "16793600" as the maximum size supported. maxDocumentSize: Optional[PositiveInt] = Field(default=16793600, description="") + hostingEnvironment: Optional[HostingEnvironment] = Field( + default=HostingEnvironment.SELF_HOSTED, + description="Hosting environment of MongoDB, default is SELF_HOSTED, currently support `SELF_HOSTED`, `ATLAS`, `AWS_DOCUMENTDB`", + ) + database_pattern: AllowDenyPattern = Field( default=AllowDenyPattern.allow_all(), description="regex patterns for databases to filter in ingestion.", @@ -176,7 +188,7 @@ def construct_schema_pymongo( delimiter: str, use_random_sampling: bool, max_document_size: int, - is_version_gte_4_4: bool, + should_add_document_size_filter: bool, sample_size: Optional[int] = None, ) -> Dict[Tuple[str, ...], SchemaDescription]: """ @@ -191,15 +203,19 @@ def construct_schema_pymongo( the PyMongo collection delimiter: string to concatenate field names by + use_random_sampling: + boolean to indicate if random sampling should be added to aggregation + max_document_size: + maximum size of the document that will be considered for generating the schema. + should_add_document_size_filter: + boolean to indicate if document size filter should be added to aggregation sample_size: number of items in the collection to sample (reads entire collection if not provided) - max_document_size: - maximum size of the document that will be considered for generating the schema. """ aggregations: List[Dict] = [] - if is_version_gte_4_4: + if should_add_document_size_filter: doc_size_field = "temporary_doc_size_field" # create a temporary field to store the size of the document. filter on it and then remove it. aggregations = [ @@ -381,7 +397,7 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: delimiter=".", use_random_sampling=self.config.useRandomSampling, max_document_size=self.config.maxDocumentSize, - is_version_gte_4_4=self.is_server_version_gte_4_4(), + should_add_document_size_filter=self.should_add_document_size_filter(), sample_size=self.config.schemaSamplingSize, ) @@ -475,6 +491,18 @@ def is_server_version_gte_4_4(self) -> bool: return False + def is_hosted_on_aws_documentdb(self) -> bool: + return self.config.hostingEnvironment == HostingEnvironment.AWS_DOCUMENTDB + + def should_add_document_size_filter(self) -> bool: + # the operation $bsonsize is only available in server version greater than 4.4 + # and is not supported by AWS DocumentDB, we should only add this operation to + # aggregation for mongodb that doesn't run on AWS DocumentDB and version is greater than 4.4 + # https://docs.aws.amazon.com/documentdb/latest/developerguide/mongo-apis.html + return ( + self.is_server_version_gte_4_4() and not self.is_hosted_on_aws_documentdb() + ) + def get_report(self) -> MongoDBSourceReport: return self.report From ec13847f54fb167571359bb233489b8b353bad02 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 14 Nov 2023 14:25:26 -0500 Subject: [PATCH 103/792] feat(airflow): make RUN_IN_THREAD configurable (#9226) --- docs/lineage/airflow.md | 1 + .../src/datahub_airflow_plugin/datahub_listener.py | 8 ++++++-- metadata-ingestion/src/datahub/cli/docker_cli.py | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 19ed1598d4c5a..3a13aefa834a4 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -193,6 +193,7 @@ In order to use this example, you must first configure the Datahub hook. Like in If you're not seeing lineage in DataHub, check the following: - Validate that the plugin is loaded in Airflow. Go to Admin -> Plugins and check that the DataHub plugin is listed. +- With the v2 plugin, it should also print a log line like `INFO [datahub_airflow_plugin.datahub_listener] DataHub plugin v2 using DataHubRestEmitter: configured to talk to ` during Airflow startup, and the `airflow plugins` command should list `datahub_plugin` with a listener enabled. - If using the v2 plugin's automatic lineage, ensure that the `enable_extractors` config is set to true and that automatic lineage is supported for your operator. - If using manual lineage annotation, ensure that you're using the `datahub_airflow_plugin.entities.Dataset` or `datahub_airflow_plugin.entities.Urn` classes for your inlets and outlets. diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index d00b10bbe1756..c39eef2635658 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -1,6 +1,7 @@ import copy import functools import logging +import os import threading from typing import TYPE_CHECKING, Callable, Dict, List, Optional, TypeVar, cast @@ -55,7 +56,10 @@ def hookimpl(f: _F) -> _F: # type: ignore[misc] # noqa: F811 _airflow_listener_initialized = False _airflow_listener: Optional["DataHubListener"] = None -_RUN_IN_THREAD = True +_RUN_IN_THREAD = os.getenv("DATAHUB_AIRFLOW_PLUGIN_RUN_IN_THREAD", "true").lower() in ( + "true", + "1", +) _RUN_IN_THREAD_TIMEOUT = 30 @@ -133,7 +137,7 @@ def __init__(self, config: DatahubLineageConfig): self._emitter = config.make_emitter_hook().make_emitter() self._graph: Optional[DataHubGraph] = None - logger.info(f"DataHub plugin using {repr(self._emitter)}") + logger.info(f"DataHub plugin v2 using {repr(self._emitter)}") # See discussion here https://github.com/OpenLineage/OpenLineage/pull/508 for # why we need to keep track of tasks ourselves. diff --git a/metadata-ingestion/src/datahub/cli/docker_cli.py b/metadata-ingestion/src/datahub/cli/docker_cli.py index 77e3285d359ef..08f3faae8abb2 100644 --- a/metadata-ingestion/src/datahub/cli/docker_cli.py +++ b/metadata-ingestion/src/datahub/cli/docker_cli.py @@ -766,7 +766,7 @@ def quickstart( # noqa: C901 logger.debug("docker compose up timed out, sending SIGTERM") up_process.terminate() try: - up_process.wait(timeout=3) + up_process.wait(timeout=8) except subprocess.TimeoutExpired: logger.debug("docker compose up still running, sending SIGKILL") up_process.kill() From 486e394cb8727470e6efe564ef04359f4ec6d1b1 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Tue, 14 Nov 2023 14:06:33 -0600 Subject: [PATCH 104/792] fix(signup): prevent invalid email signup (#9234) --- .../app/auth/NativeAuthenticationConfigs.java | 16 ++++++++++++---- .../controllers/AuthenticationController.java | 8 ++++++++ datahub-frontend/conf/application.conf | 4 ++++ .../authentication/user/NativeUserService.java | 10 +++++++++- .../user/NativeUserServiceTest.java | 15 ++++++++++++++- .../authentication/AuthServiceController.java | 8 ++++++++ .../factory/auth/NativeUserServiceFactory.java | 7 ++++++- 7 files changed, 61 insertions(+), 7 deletions(-) diff --git a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java index db17313d67f9a..3114da92d7d79 100644 --- a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java +++ b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java @@ -6,18 +6,26 @@ public class NativeAuthenticationConfigs { public static final String NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH = "auth.native.enabled"; + public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = "auth.native.signUp.enforceValidEmail"; private Boolean _isEnabled = true; + private Boolean _isEnforceValidEmailEnabled = true; public NativeAuthenticationConfigs(final com.typesafe.config.Config configs) { - if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH) - && Boolean.FALSE.equals( - Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()))) { - _isEnabled = false; + if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH)) { + _isEnabled = Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); + } + if (configs.hasPath(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH)) { + _isEnforceValidEmailEnabled = + Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH).toString()); } } public boolean isNativeAuthenticationEnabled() { return _isEnabled; } + + public boolean isEnforceValidEmailEnabled() { + return _isEnforceValidEmailEnabled; + } } diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index 4f89f4f67e149..e28d4ba2ee37e 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -27,6 +27,7 @@ import org.pac4j.play.store.PlaySessionStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import play.data.validation.Constraints; import play.libs.Json; import play.mvc.Controller; import play.mvc.Http; @@ -203,6 +204,13 @@ public Result signUp(Http.Request request) { JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); return Results.badRequest(invalidCredsJson); } + if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { + Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); + if (!emailValidator.isValid(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } + } if (StringUtils.isBlank(password)) { JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); diff --git a/datahub-frontend/conf/application.conf b/datahub-frontend/conf/application.conf index 1a62c8547e721..0f4ddb7c497e6 100644 --- a/datahub-frontend/conf/application.conf +++ b/datahub-frontend/conf/application.conf @@ -196,6 +196,10 @@ auth.oidc.preferredJwsAlgorithm = ${?AUTH_OIDC_PREFERRED_JWS_ALGORITHM} # Which # auth.jaas.enabled = ${?AUTH_JAAS_ENABLED} auth.native.enabled = ${?AUTH_NATIVE_ENABLED} + +# Enforces the usage of a valid email for user sign up +auth.native.signUp.enforceValidEmail = true +auth.native.signUp.enforceValidEmail = ${?ENFORCE_VALID_EMAIL} # # To disable all authentication to the app, and proxy all users through a master "datahub" account, make sure that, # jaas, native and oidc auth are disabled: diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java index 7f0c16d28f121..bff675ddd9cb2 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java @@ -1,7 +1,9 @@ package com.datahub.authentication.user; import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationConfiguration; import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; @@ -34,6 +36,7 @@ public class NativeUserService { private final EntityService _entityService; private final EntityClient _entityClient; private final SecretService _secretService; + private final AuthenticationConfiguration _authConfig; public void createNativeUser(@Nonnull String userUrnString, @Nonnull String fullName, @Nonnull String email, @Nonnull String title, @Nonnull String password, @Nonnull Authentication authentication) throws Exception { @@ -45,7 +48,12 @@ public void createNativeUser(@Nonnull String userUrnString, @Nonnull String full Objects.requireNonNull(authentication, "authentication must not be null!"); final Urn userUrn = Urn.createFromString(userUrnString); - if (_entityService.exists(userUrn) || userUrn.toString().equals(SYSTEM_ACTOR)) { + if (_entityService.exists(userUrn) + // Should never fail these due to Controller level check, but just in case more usages get put in + || userUrn.toString().equals(SYSTEM_ACTOR) + || userUrn.toString().equals(new CorpuserUrn(_authConfig.getSystemClientId()).toString()) + || userUrn.toString().equals(DATAHUB_ACTOR) + || userUrn.toString().equals(UNKNOWN_ACTOR)) { throw new RuntimeException("This user already exists! Cannot create a new user."); } updateCorpUserInfo(userUrn, fullName, email, title, authentication); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java index b0b10af82155a..0102311ff3b61 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java @@ -3,6 +3,7 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationConfiguration; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.entity.client.EntityClient; @@ -48,8 +49,10 @@ public void setupTest() throws Exception { _entityService = mock(EntityService.class); _entityClient = mock(EntityClient.class); _secretService = mock(SecretService.class); + AuthenticationConfiguration authenticationConfiguration = new AuthenticationConfiguration(); + authenticationConfiguration.setSystemClientId("someCustomId"); - _nativeUserService = new NativeUserService(_entityService, _entityClient, _secretService); + _nativeUserService = new NativeUserService(_entityService, _entityClient, _secretService, authenticationConfiguration); } @Test @@ -74,6 +77,16 @@ public void testCreateNativeUserUserAlreadyExists() throws Exception { _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } + @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + public void testCreateNativeUserUserDatahub() throws Exception { + _nativeUserService.createNativeUser(DATAHUB_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + } + + @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + public void testCreateNativeUserUserSystemUser() throws Exception { + _nativeUserService.createNativeUser(SYSTEM_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + } + @Test public void testCreateNativeUserPasses() throws Exception { when(_entityService.exists(any())).thenReturn(false); diff --git a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java index b5ce99902108a..34354a47b7f04 100644 --- a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java +++ b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java @@ -12,6 +12,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.gms.factory.config.ConfigurationProvider; import java.util.concurrent.CompletableFuture; @@ -28,6 +29,8 @@ import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.HttpClientErrorException; +import static com.linkedin.metadata.Constants.*; + @Slf4j @RestController @@ -177,6 +180,11 @@ CompletableFuture> signUp(final HttpEntity httpEn } String userUrnString = userUrn.asText(); + String systemClientUser = new CorpuserUrn(_configProvider.getAuthentication().getSystemClientId()).toString(); + + if (userUrnString.equals(systemClientUser) || userUrnString.equals(DATAHUB_ACTOR) || userUrnString.equals(UNKNOWN_ACTOR)) { + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } String fullNameString = fullName.asText(); String emailString = email.asText(); String titleString = title.asText(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java index 3df499ea9392e..a0df661852935 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java @@ -3,6 +3,7 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.user.NativeUserService; +import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; @@ -31,10 +32,14 @@ public class NativeUserServiceFactory { @Qualifier("dataHubSecretService") private SecretService _secretService; + @Autowired + private ConfigurationProvider _configurationProvider; + @Bean(name = "nativeUserService") @Scope("singleton") @Nonnull protected NativeUserService getInstance() throws Exception { - return new NativeUserService(this._entityService, this._javaEntityClient, this._secretService); + return new NativeUserService(_entityService, _javaEntityClient, _secretService, + _configurationProvider.getAuthentication()); } } \ No newline at end of file From f70d8a45b5237fde7f4be8bc859b8bef0785752e Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 14 Nov 2023 19:00:22 -0600 Subject: [PATCH 105/792] chore(security): version adjustments for security vulns (#9243) --- build.gradle | 27 ++++++++++--------- datahub-upgrade/build.gradle | 10 ++++++- metadata-events/mxe-registration/build.gradle | 2 +- metadata-events/mxe-utils-avro/build.gradle | 2 +- metadata-io/build.gradle | 12 +++++++-- metadata-service/factories/build.gradle | 1 + metadata-service/restli-api/build.gradle | 3 +++ 7 files changed, 39 insertions(+), 18 deletions(-) diff --git a/build.gradle b/build.gradle index 54802917d05a5..9eecb1696bb19 100644 --- a/build.gradle +++ b/build.gradle @@ -19,7 +19,7 @@ buildscript { ext.logbackClassic = '1.2.12' ext.hadoop3Version = '3.3.5' ext.kafkaVersion = '2.3.0' - ext.hazelcastVersion = '5.3.1' + ext.hazelcastVersion = '5.3.6' ext.ebeanVersion = '12.16.1' ext.docker_registry = 'linkedin' @@ -53,7 +53,7 @@ project.ext.spec = [ 'pegasus' : [ 'd2' : 'com.linkedin.pegasus:d2:' + pegasusVersion, 'data' : 'com.linkedin.pegasus:data:' + pegasusVersion, - 'dataAvro1_6' : 'com.linkedin.pegasus:data-avro-1_6:' + pegasusVersion, + 'dataAvro': 'com.linkedin.pegasus:data-avro:' + pegasusVersion, 'generator': 'com.linkedin.pegasus:generator:' + pegasusVersion, 'restliCommon' : 'com.linkedin.pegasus:restli-common:' + pegasusVersion, 'restliClient' : 'com.linkedin.pegasus:restli-client:' + pegasusVersion, @@ -71,22 +71,21 @@ project.ext.externalDependency = [ 'assertJ': 'org.assertj:assertj-core:3.11.1', 'avro': 'org.apache.avro:avro:1.11.3', 'avroCompiler': 'org.apache.avro:avro-compiler:1.11.3', - 'awsGlueSchemaRegistrySerde': 'software.amazon.glue:schema-registry-serde:1.1.10', - 'awsMskIamAuth': 'software.amazon.msk:aws-msk-iam-auth:1.1.1', - 'awsSecretsManagerJdbc': 'com.amazonaws.secretsmanager:aws-secretsmanager-jdbc:1.0.8', - 'awsPostgresIamAuth': 'software.amazon.jdbc:aws-advanced-jdbc-wrapper:1.0.0', + 'awsGlueSchemaRegistrySerde': 'software.amazon.glue:schema-registry-serde:1.1.17', + 'awsMskIamAuth': 'software.amazon.msk:aws-msk-iam-auth:1.1.9', + 'awsSecretsManagerJdbc': 'com.amazonaws.secretsmanager:aws-secretsmanager-jdbc:1.0.13', + 'awsPostgresIamAuth': 'software.amazon.jdbc:aws-advanced-jdbc-wrapper:1.0.2', 'awsRds':'software.amazon.awssdk:rds:2.18.24', - 'cacheApi' : 'javax.cache:cache-api:1.1.0', + 'cacheApi': 'javax.cache:cache-api:1.1.0', 'commonsCli': 'commons-cli:commons-cli:1.5.0', 'commonsIo': 'commons-io:commons-io:2.4', 'commonsLang': 'commons-lang:commons-lang:2.6', 'commonsText': 'org.apache.commons:commons-text:1.10.0', 'commonsCollections': 'commons-collections:commons-collections:3.2.2', - 'data' : 'com.linkedin.pegasus:data:' + pegasusVersion, 'datastaxOssNativeProtocol': 'com.datastax.oss:native-protocol:1.5.1', 'datastaxOssCore': 'com.datastax.oss:java-driver-core:4.14.1', 'datastaxOssQueryBuilder': 'com.datastax.oss:java-driver-query-builder:4.14.1', - 'dgraph4j' : 'io.dgraph:dgraph4j:21.03.1', + 'dgraph4j' : 'io.dgraph:dgraph4j:21.12.0', 'dropwizardMetricsCore': 'io.dropwizard.metrics:metrics-core:4.2.3', 'dropwizardMetricsJmx': 'io.dropwizard.metrics:metrics-jmx:4.2.3', 'ebean': 'io.ebean:ebean:' + ebeanVersion, @@ -131,7 +130,7 @@ project.ext.externalDependency = [ 'jsonPatch': 'com.github.java-json-tools:json-patch:1.13', 'jsonSimple': 'com.googlecode.json-simple:json-simple:1.1.1', 'jsonSmart': 'net.minidev:json-smart:2.4.9', - 'json': 'org.json:json:20230227', + 'json': 'org.json:json:20231013', 'junit': 'junit:junit:4.13.2', 'junitJupiterApi': "org.junit.jupiter:junit-jupiter-api:$junitJupiterVersion", 'junitJupiterParams': "org.junit.jupiter:junit-jupiter-params:$junitJupiterVersion", @@ -140,7 +139,7 @@ project.ext.externalDependency = [ 'kafkaAvroSerde': 'io.confluent:kafka-streams-avro-serde:5.5.1', 'kafkaAvroSerializer': 'io.confluent:kafka-avro-serializer:5.1.4', 'kafkaClients': "org.apache.kafka:kafka-clients:$kafkaVersion", - 'snappy': 'org.xerial.snappy:snappy-java:1.1.10.3', + 'snappy': 'org.xerial.snappy:snappy-java:1.1.10.4', 'logbackClassic': "ch.qos.logback:logback-classic:$logbackClassic", 'slf4jApi': "org.slf4j:slf4j-api:$slf4jVersion", 'log4jCore': "org.apache.logging.log4j:log4j-core:$log4jVersion", @@ -164,6 +163,7 @@ project.ext.externalDependency = [ 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion, 'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15', 'parquet': 'org.apache.parquet:parquet-avro:1.12.3', + 'parquetHadoop': 'org.apache.parquet:parquet-hadoop:1.13.1', 'picocli': 'info.picocli:picocli:4.5.0', 'playCache': "com.typesafe.play:play-cache_2.12:$playVersion", 'playWs': 'com.typesafe.play:play-ahc-ws-standalone_2.12:2.1.10', @@ -178,6 +178,7 @@ project.ext.externalDependency = [ 'playPac4j': 'org.pac4j:play-pac4j_2.12:9.0.2', 'postgresql': 'org.postgresql:postgresql:42.3.8', 'protobuf': 'com.google.protobuf:protobuf-java:3.19.6', + 'grpcProtobuf': 'io.grpc:grpc-protobuf:1.53.0', 'rangerCommons': 'org.apache.ranger:ranger-plugins-common:2.3.0', 'reflections': 'org.reflections:reflections:0.9.9', 'resilience4j': 'io.github.resilience4j:resilience4j-retry:1.7.1', @@ -201,7 +202,7 @@ project.ext.externalDependency = [ 'springBootStarterJetty': "org.springframework.boot:spring-boot-starter-jetty:$springBootVersion", 'springBootStarterCache': "org.springframework.boot:spring-boot-starter-cache:$springBootVersion", 'springBootStarterValidation': "org.springframework.boot:spring-boot-starter-validation:$springBootVersion", - 'springKafka': 'org.springframework.kafka:spring-kafka:2.8.11', + 'springKafka': 'org.springframework.kafka:spring-kafka:2.9.13', 'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion", 'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.2.15', 'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46', @@ -263,7 +264,7 @@ subprojects { plugins.withType(JavaPlugin) { dependencies { constraints { - implementation('io.netty:netty-all:4.1.86.Final') + implementation('io.netty:netty-all:4.1.100.Final') implementation('org.apache.commons:commons-compress:1.21') implementation('org.apache.velocity:velocity-engine-core:2.3') implementation('org.hibernate:hibernate-validator:6.0.20.Final') diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 81e6e79c2a5e5..3356445cda7e1 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -66,7 +66,9 @@ dependencies { runtimeOnly externalDependency.mysqlConnector runtimeOnly externalDependency.postgresql - implementation externalDependency.awsMskIamAuth + implementation(externalDependency.awsMskIamAuth) { + exclude group: 'software.amazon.awssdk', module: 'third-party-jackson-core' + } annotationProcessor externalDependency.lombok annotationProcessor externalDependency.picocli @@ -75,6 +77,12 @@ dependencies { testImplementation externalDependency.mockito testImplementation externalDependency.testng testRuntimeOnly externalDependency.logbackClassic + + constraints { + implementation(implementation externalDependency.parquetHadoop) { + because("CVE-2022-42003") + } + } } bootJar { diff --git a/metadata-events/mxe-registration/build.gradle b/metadata-events/mxe-registration/build.gradle index 032870d93329f..2842dd935c7ee 100644 --- a/metadata-events/mxe-registration/build.gradle +++ b/metadata-events/mxe-registration/build.gradle @@ -7,7 +7,7 @@ configurations { dependencies { implementation project(':metadata-events:mxe-avro') implementation project(':metadata-models') - implementation spec.product.pegasus.dataAvro1_6 + implementation spec.product.pegasus.dataAvro testImplementation project(':test-models') testImplementation project(path: ':test-models', configuration: 'testDataTemplate') diff --git a/metadata-events/mxe-utils-avro/build.gradle b/metadata-events/mxe-utils-avro/build.gradle index a7bf287ab224d..3493797ab4f97 100644 --- a/metadata-events/mxe-utils-avro/build.gradle +++ b/metadata-events/mxe-utils-avro/build.gradle @@ -3,7 +3,7 @@ apply plugin: 'java-library' dependencies { api project(':metadata-events:mxe-avro') api project(':metadata-models') - api spec.product.pegasus.dataAvro1_6 + api spec.product.pegasus.dataAvro testImplementation externalDependency.testng testImplementation project(':test-models') diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index 4b36f533476f7..48f80f06d07c2 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -22,13 +22,18 @@ dependencies { implementation externalDependency.guava implementation externalDependency.reflections implementation externalDependency.jsonPatch - api externalDependency.dgraph4j exclude group: 'com.google.guava', module: 'guava' + api(externalDependency.dgraph4j) { + exclude group: 'com.google.guava', module: 'guava' + exclude group: 'io.grpc', module: 'grpc-protobuf' + } implementation externalDependency.slf4jApi runtimeOnly externalDependency.logbackClassic compileOnly externalDependency.lombok implementation externalDependency.commonsCollections api externalDependency.datastaxOssNativeProtocol - api externalDependency.datastaxOssCore + api(externalDependency.datastaxOssCore) { + exclude group: 'com.fasterxml.jackson.core' + } api externalDependency.datastaxOssQueryBuilder api externalDependency.elasticSearchRest api externalDependency.elasticSearchJava @@ -101,6 +106,9 @@ dependencies { implementation(externalDependency.snappy) { because("previous versions are vulnerable to CVE-2023-34453 through CVE-2023-34455") } + implementation(externalDependency.grpcProtobuf) { + because("CVE-2023-1428, CVE-2023-32731") + } } } diff --git a/metadata-service/factories/build.gradle b/metadata-service/factories/build.gradle index 2e99def17c3c5..86644e3b034da 100644 --- a/metadata-service/factories/build.gradle +++ b/metadata-service/factories/build.gradle @@ -63,4 +63,5 @@ dependencies { configurations.all{ exclude group: "commons-io", module:"commons-io" exclude group: "jline", module:"jline" + exclude group: 'software.amazon.awssdk', module: 'third-party-jackson-core' } diff --git a/metadata-service/restli-api/build.gradle b/metadata-service/restli-api/build.gradle index f182d11b6baeb..352738d01f8da 100644 --- a/metadata-service/restli-api/build.gradle +++ b/metadata-service/restli-api/build.gradle @@ -13,5 +13,8 @@ dependencies { restClientCompile(externalDependency.zookeeper) { because("CVE-2023-44981") } + restClientCompile(externalDependency.grpcProtobuf) { + because("CVE-2023-1428, CVE-2023-32731") + } } } \ No newline at end of file From 8475fc92b41e9a67841d7e5ecf114fd073499cb0 Mon Sep 17 00:00:00 2001 From: Pierre Guivarch <3973133+PGuiv@users.noreply.github.com> Date: Tue, 14 Nov 2023 21:42:13 -0500 Subject: [PATCH 106/792] docs(ingest): fix typo in snowflake ingestion docs (#9239) --- .../src/datahub/ingestion/source_config/sql/snowflake.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py index 2e9a15063661e..ccc4e115729a2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py @@ -59,7 +59,7 @@ class BaseSnowflakeConfig(ConfigModel): ) private_key: Optional[str] = pydantic.Field( default=None, - description="Private key in a form of '-----BEGIN PRIVATE KEY-----\\nprivate-key\\n-----END PRIVATE KEY-----\\n' if using key pair authentication. Encrypted version of private key will be in a form of '-----BEGIN ENCRYPTED PRIVATE KEY-----\\nencrypted-private-key\\n-----END ECNCRYPTED PRIVATE KEY-----\\n' See: https://docs.snowflake.com/en/user-guide/key-pair-auth.html", + description="Private key in a form of '-----BEGIN PRIVATE KEY-----\\nprivate-key\\n-----END PRIVATE KEY-----\\n' if using key pair authentication. Encrypted version of private key will be in a form of '-----BEGIN ENCRYPTED PRIVATE KEY-----\\nencrypted-private-key\\n-----END ENCRYPTED PRIVATE KEY-----\\n' See: https://docs.snowflake.com/en/user-guide/key-pair-auth.html", ) private_key_path: Optional[str] = pydantic.Field( From 4201e541ca48ad617f1cfa63c4a6487288973589 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 15 Nov 2023 06:28:40 -0600 Subject: [PATCH 107/792] chore(security): jre to headless, removes x11 dependency (#9245) --- docker/datahub-frontend/Dockerfile | 2 +- docker/datahub-gms/Dockerfile | 2 +- docker/datahub-mae-consumer/Dockerfile | 2 +- docker/datahub-mce-consumer/Dockerfile | 2 +- docker/datahub-upgrade/Dockerfile | 2 +- docker/kafka-setup/Dockerfile | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 9c13e73078042..aaace5ae38ca3 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -9,7 +9,7 @@ RUN addgroup -S datahub && adduser -S datahub -G datahub # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ + && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index e271188a703cc..c5696bbd2d1d2 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -19,7 +19,7 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ + && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index ec3da4de71d15..07af7c66a7783 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -19,7 +19,7 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ + && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index f9c47f77a98f5..97861d6be3141 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -19,7 +19,7 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ + && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index f08e7268e4018..fa8e65009662b 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -19,7 +19,7 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ + && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index a9c75521fead1..e7f084739a576 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -18,7 +18,7 @@ ENV SCALA_VERSION 2.13 LABEL name="kafka" version=${KAFKA_VERSION} RUN apk add --no-cache bash coreutils -RUN apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community +RUN apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community RUN apk add --no-cache -t .build-deps git curl ca-certificates jq gcc musl-dev libffi-dev zip RUN mkdir -p /opt \ From 6655918923deefad5486d037a1be158ec1a1856c Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Wed, 15 Nov 2023 19:53:50 +0000 Subject: [PATCH 108/792] feat(recomendations): Make top platforms account only for searchable entities (#9240) --- .../elasticsearch/ElasticSearchService.java | 8 ++--- .../elasticsearch/query/ESSearchDAO.java | 14 ++++---- .../metadata/search/TestEntityTestBase.java | 15 +++++---- .../EntitySearchAggregationSource.java | 7 +++- .../candidatesource/TopPlatformsSource.java | 32 +++++++++++++------ .../metadata/search/EntitySearchService.java | 6 ++-- 6 files changed, 51 insertions(+), 31 deletions(-) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index 024cf2b0abec2..9b43642d7621c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -142,11 +142,11 @@ public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull Stri @Nonnull @Override - public Map aggregateByValue(@Nullable String entityName, @Nonnull String field, + public Map aggregateByValue(@Nullable List entityNames, @Nonnull String field, @Nullable Filter requestParams, int limit) { - log.debug("Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", entityName, field, requestParams, - limit); - return esSearchDAO.aggregateByValue(entityName, field, requestParams, limit); + log.debug("Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", entityNames.toString(), field, + requestParams, limit); + return esSearchDAO.aggregateByValue(entityNames, field, requestParams, limit); } @Nonnull diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 290e8c60deb00..960a5b38826b1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -31,6 +31,7 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; @@ -263,17 +264,16 @@ public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull Stri * @return */ @Nonnull - public Map aggregateByValue(@Nullable String entityName, @Nonnull String field, + public Map aggregateByValue(@Nullable List entityNames, @Nonnull String field, @Nullable Filter requestParams, int limit) { final SearchRequest searchRequest = SearchRequestHandler.getAggregationRequest(field, transformFilterForEntities(requestParams, indexConvention), limit); - String indexName; - if (entityName == null) { - indexName = indexConvention.getAllEntityIndicesPattern(); + if (entityNames == null) { + String indexName = indexConvention.getAllEntityIndicesPattern(); + searchRequest.indices(indexName); } else { - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - indexName = indexConvention.getIndexName(entitySpec); + Stream stream = entityNames.stream().map(entityRegistry::getEntitySpec).map(indexConvention::getIndexName); + searchRequest.indices(stream.toArray(String[]::new)); } - searchRequest.indices(indexName); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java index d358c03c612d0..a4c359b3595c2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java @@ -3,6 +3,7 @@ import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; @@ -99,7 +100,7 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { BrowseResult browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ENTITY_NAME, "textField", null, 10).size(), 0); + assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); Urn urn = new TestEntityUrn("test", "urn1", "VALUE_1"); ObjectNode document = JsonNodeFactory.instance.objectNode(); @@ -124,7 +125,7 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 1); - assertEquals(_elasticSearchService.aggregateByValue(ENTITY_NAME, "textFieldOverride", null, 10), + assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -147,7 +148,7 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 2); - assertEquals(_elasticSearchService.aggregateByValue(ENTITY_NAME, "textFieldOverride", null, 10), + assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); @@ -158,7 +159,7 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ENTITY_NAME, "textField", null, 10).size(), 0); + assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); } @Test @@ -181,7 +182,7 @@ public void testElasticSearchServiceFulltext() throws Exception { assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 1); - assertEquals(_elasticSearchService.aggregateByValue(ENTITY_NAME, "textFieldOverride", null, 10), + assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -198,7 +199,7 @@ public void testElasticSearchServiceFulltext() throws Exception { assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 2); - assertEquals(_elasticSearchService.aggregateByValue(ENTITY_NAME, "textFieldOverride", null, 10), + assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); @@ -208,6 +209,6 @@ public void testElasticSearchServiceFulltext() throws Exception { assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ENTITY_NAME, "textField", null, 10).size(), 0); + assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java index 9fb0c18f1b621..e1ebc6d5e97be 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java @@ -82,7 +82,7 @@ protected boolean isValidCandidate(T candidate) { public List getRecommendations(@Nonnull Urn userUrn, @Nullable RecommendationRequestContext requestContext) { Map aggregationResult = - _entitySearchService.aggregateByValue(null, getSearchFieldName(), null, getMaxContent()); + _entitySearchService.aggregateByValue(getEntityNames(), getSearchFieldName(), null, getMaxContent()); if (aggregationResult.isEmpty()) { return Collections.emptyList(); @@ -116,6 +116,11 @@ public List getRecommendations(@Nonnull Urn userUrn, .collect(Collectors.toList()); } + protected List getEntityNames() { + // By default, no list is applied which means searching across entities. + return null; + } + // Get top K entries with the most count private List> getTopKValues(Map countMap) { final PriorityQueue> queue = diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java index f81a91be0660a..9562440889f63 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java @@ -1,15 +1,16 @@ package com.linkedin.metadata.recommendation.candidatesource; -import com.google.common.collect.ImmutableSet; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.dataplatform.DataPlatformInfo; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; import com.linkedin.metadata.search.EntitySearchService; -import java.util.Set; +import java.util.List; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -18,12 +19,24 @@ public class TopPlatformsSource extends EntitySearchAggregationSource { /** - * TODO: Remove this once we permit specifying set of entities in aggregation API (filter out assertions) + * Set of entities that we want to consider for defining the top platform sources. + * This must match SearchUtils.SEARCHABLE_ENTITY_TYPES */ - private static final Set FILTERED_DATA_PLATFORM_URNS = ImmutableSet.of( - "urn:li:dataPlatform:great-expectations" + private static final List SEARCHABLE_ENTITY_TYPES = ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_PRIMARY_KEY_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.TAG_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.NOTEBOOK_ENTITY_NAME ); - private final EntityService _entityService; private static final String PLATFORM = "platform"; @@ -52,6 +65,10 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo return requestContext.getScenario() == ScenarioType.HOME; } + protected List getEntityNames() { + return SEARCHABLE_ENTITY_TYPES; + } + @Override protected String getSearchFieldName() { return PLATFORM; @@ -69,9 +86,6 @@ protected boolean isValueUrn() { @Override protected boolean isValidCandidateUrn(Urn urn) { - if (FILTERED_DATA_PLATFORM_URNS.contains(urn.toString())) { - return false; - } RecordTemplate dataPlatformInfo = _entityService.getLatestAspect(urn, "dataPlatformInfo"); if (dataPlatformInfo == null) { return false; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index cbfeeaef860d3..9cd865bd888e2 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -131,15 +131,15 @@ AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String quer /** * Returns number of documents per field value given the field and filters * - * @param entityName name of the entity, if empty aggregate over all entities + * @param entityNames list of name of entities to aggregate across, if empty aggregate over all entities * @param field the field name for aggregate * @param requestParams filters to apply before aggregating * @param limit the number of aggregations to return * @return */ @Nonnull - Map aggregateByValue(@Nullable String entityName, @Nonnull String field, @Nullable Filter requestParams, - int limit); + Map aggregateByValue(@Nullable List entityNames, @Nonnull String field, + @Nullable Filter requestParams, int limit); /** * Gets a list of groups/entities that match given browse request. From 5dd09dd62e1df262ac4962f0a420fac04e47288b Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Thu, 16 Nov 2023 22:53:07 +0530 Subject: [PATCH 109/792] Feature/prd 770 (#9224) --- .../src/app/domain/EmptyDomainDescription.tsx | 39 +++++++++++ .../src/app/domain/EmptyDomainsSection.tsx | 69 +++++++++++++++++++ .../nestedDomains/ManageDomainsPageV2.tsx | 2 +- .../app/domain/nestedDomains/RootDomains.tsx | 16 ++++- 4 files changed, 124 insertions(+), 2 deletions(-) create mode 100644 datahub-web-react/src/app/domain/EmptyDomainDescription.tsx create mode 100644 datahub-web-react/src/app/domain/EmptyDomainsSection.tsx diff --git a/datahub-web-react/src/app/domain/EmptyDomainDescription.tsx b/datahub-web-react/src/app/domain/EmptyDomainDescription.tsx new file mode 100644 index 0000000000000..6a5f304e565be --- /dev/null +++ b/datahub-web-react/src/app/domain/EmptyDomainDescription.tsx @@ -0,0 +1,39 @@ +import { Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components/macro'; +import { ANTD_GRAY } from '../entity/shared/constants'; + +const StyledParagraph = styled(Typography.Paragraph)` + text-align: justify; + text-justify: inter-word; + margin: 40px 0; + font-size: 15px; +`; + +function EmptyDomainDescription() { + return ( + <> + + Welcome to your Data Domains! It looks like this space + is ready to be transformed into a well-organized data universe. Start by creating your first domain - a + high-level category for your data assets. + + + Create Nested Domains: Want to dive deeper? You can + also create nested domains to add granularity and structure. Just like nesting Russian dolls, its all + about refining your organization. + + + Build Data Products: Once your domains are set, go a + step further! Organize your data assets into data products to realize a data mesh architecture. Data + products empower you to treat data as a product, making it more accessible and manageable. + + + Ready to embark on this data adventure? Click the Create Domain button to begin shaping your data + landscape! + + + ); +} + +export default EmptyDomainDescription; diff --git a/datahub-web-react/src/app/domain/EmptyDomainsSection.tsx b/datahub-web-react/src/app/domain/EmptyDomainsSection.tsx new file mode 100644 index 0000000000000..f232d259c20da --- /dev/null +++ b/datahub-web-react/src/app/domain/EmptyDomainsSection.tsx @@ -0,0 +1,69 @@ +import { PlusOutlined } from '@ant-design/icons'; +import { Button, Empty, Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components/macro'; +import { ANTD_GRAY } from '../entity/shared/constants'; + +const EmptyDomainContainer = styled.div` + display: flex; + justify-content: center; + align-items: center; +`; + +const StyledEmpty = styled(Empty)` + width: 35vw; + @media screen and (max-width: 1300px) { + width: 50vw; + } + @media screen and (max-width: 896px) { + overflow-y: auto; + max-height: 75vh; + &::-webkit-scrollbar { + width: 5px; + background: #d6d6d6; + } + } + padding: 60px 40px; + .ant-empty-image { + display: none; + } +`; + +const StyledButton = styled(Button)` + margin: 18px 8px 0 0; +`; + +const IconContainer = styled.span` + color: ${ANTD_GRAY[7]}; + font-size: 40px; +`; + +interface Props { + title?: string; + setIsCreatingDomain: React.Dispatch>; + description?: React.ReactNode; + icon?: React.ReactNode; +} + +function EmptyDomainsSection(props: Props) { + const { title, description, setIsCreatingDomain, icon } = props; + return ( + + + {icon} + {title} + {description} + + } + > + setIsCreatingDomain(true)}> + Create Domain + + + + ); +} + +export default EmptyDomainsSection; diff --git a/datahub-web-react/src/app/domain/nestedDomains/ManageDomainsPageV2.tsx b/datahub-web-react/src/app/domain/nestedDomains/ManageDomainsPageV2.tsx index b69f0c5458b5d..f5fc0cba2d8ec 100644 --- a/datahub-web-react/src/app/domain/nestedDomains/ManageDomainsPageV2.tsx +++ b/datahub-web-react/src/app/domain/nestedDomains/ManageDomainsPageV2.tsx @@ -51,7 +51,7 @@ export default function ManageDomainsPageV2() { New Domain - + {isCreatingDomain && ( setIsCreatingDomain(false)} diff --git a/datahub-web-react/src/app/domain/nestedDomains/RootDomains.tsx b/datahub-web-react/src/app/domain/nestedDomains/RootDomains.tsx index 757119919e336..75c38cd4951ef 100644 --- a/datahub-web-react/src/app/domain/nestedDomains/RootDomains.tsx +++ b/datahub-web-react/src/app/domain/nestedDomains/RootDomains.tsx @@ -1,17 +1,23 @@ import React from 'react'; import styled from 'styled-components'; +import { ReadOutlined } from '@ant-design/icons'; import { Message } from '../../shared/Message'; import { ResultWrapper } from '../../search/SearchResultList'; import { useEntityRegistry } from '../../useEntityRegistry'; import { EntityType } from '../../../types.generated'; import useListDomains from '../useListDomains'; +import EmptyDomainsSection from '../EmptyDomainsSection'; +import EmptyDomainDescription from '../EmptyDomainDescription'; const DomainsWrapper = styled.div` overflow: auto; padding: 0 28px 16px 28px; `; -export default function RootDomains() { +interface Props { + setIsCreatingDomain: React.Dispatch>; +} +export default function RootDomains({ setIsCreatingDomain }: Props) { const entityRegistry = useEntityRegistry(); const { loading, error, data, sortedDomains } = useListDomains({}); @@ -19,6 +25,14 @@ export default function RootDomains() { <> {!data && loading && } {error && } + {!loading && (!data || !data?.listDomains?.domains?.length) && ( + } + title="Organize your data" + description={} + setIsCreatingDomain={setIsCreatingDomain} + /> + )} {sortedDomains?.map((domain) => ( From 8451c758e700e147d9653f2ce7bddc9eb2ef314c Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Thu, 16 Nov 2023 22:54:47 +0530 Subject: [PATCH 110/792] fix(search): fix search on paginated lists (#9198) --- datahub-web-react/src/app/identity/group/GroupList.tsx | 5 ++++- datahub-web-react/src/app/identity/user/UserList.tsx | 5 ++++- datahub-web-react/src/app/ingest/secret/SecretsList.tsx | 9 ++++++--- .../src/app/ingest/source/IngestionSourceList.tsx | 9 ++++++--- .../src/app/permissions/policy/ManagePolicies.tsx | 7 +++++-- .../src/app/permissions/roles/ManageRoles.tsx | 7 +++++-- 6 files changed, 30 insertions(+), 12 deletions(-) diff --git a/datahub-web-react/src/app/identity/group/GroupList.tsx b/datahub-web-react/src/app/identity/group/GroupList.tsx index db9901a53b26b..5ef77b4dfc8a8 100644 --- a/datahub-web-react/src/app/identity/group/GroupList.tsx +++ b/datahub-web-react/src/app/identity/group/GroupList.tsx @@ -92,7 +92,10 @@ export const GroupList = () => { fontSize: 12, }} onSearch={() => null} - onQueryChange={(q) => setQuery(q)} + onQueryChange={(q) => { + setPage(1); + setQuery(q); + }} entityRegistry={entityRegistry} hideRecommendations /> diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index 55ef27b8458fa..e50005b08377e 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -135,7 +135,10 @@ export const UserList = () => { fontSize: 12, }} onSearch={() => null} - onQueryChange={(q) => setQuery(q)} + onQueryChange={(q) => { + setPage(1); + setQuery(q); + }} entityRegistry={entityRegistry} hideRecommendations /> diff --git a/datahub-web-react/src/app/ingest/secret/SecretsList.tsx b/datahub-web-react/src/app/ingest/secret/SecretsList.tsx index 8e5b601e2a809..2728fff0ccba3 100644 --- a/datahub-web-react/src/app/ingest/secret/SecretsList.tsx +++ b/datahub-web-react/src/app/ingest/secret/SecretsList.tsx @@ -54,10 +54,10 @@ export const SecretsList = () => { input: { start, count: pageSize, - query: query && query.length > 0 ? query : undefined, + query: (query?.length && query) || undefined, }, }, - fetchPolicy: query && query.length > 0 ? 'no-cache' : 'cache-first', + fetchPolicy: (query?.length || 0) > 0 ? 'no-cache' : 'cache-first', }); const totalSecrets = data?.listSecrets?.total || 0; @@ -197,7 +197,10 @@ export const SecretsList = () => { fontSize: 12, }} onSearch={() => null} - onQueryChange={(q) => setQuery(q)} + onQueryChange={(q) => { + setPage(1); + setQuery(q); + }} entityRegistry={entityRegistry} hideRecommendations /> diff --git a/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx b/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx index 13af19b0b6ac2..6188845694f9e 100644 --- a/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx +++ b/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx @@ -107,10 +107,10 @@ export const IngestionSourceList = () => { input: { start, count: pageSize, - query, + query: (query?.length && query) || undefined, }, }, - fetchPolicy: 'cache-first', + fetchPolicy: (query?.length || 0) > 0 ? 'no-cache' : 'cache-first', }); const [createIngestionSource] = useCreateIngestionSourceMutation(); const [updateIngestionSource] = useUpdateIngestionSourceMutation(); @@ -399,7 +399,10 @@ export const IngestionSourceList = () => { fontSize: 12, }} onSearch={() => null} - onQueryChange={(q) => setQuery(q)} + onQueryChange={(q) => { + setPage(1); + setQuery(q); + }} entityRegistry={entityRegistry} hideRecommendations /> diff --git a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx index 08327d40a7165..49b0ec922fd57 100644 --- a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx +++ b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx @@ -166,7 +166,6 @@ export const ManagePolicies = () => { data: policiesData, refetch: policiesRefetch, } = useListPoliciesQuery({ - fetchPolicy: 'no-cache', variables: { input: { start, @@ -174,6 +173,7 @@ export const ManagePolicies = () => { query, }, }, + fetchPolicy: (query?.length || 0) > 0 ? 'no-cache' : 'cache-first', }); // Any time a policy is removed, edited, or created, refetch the list. @@ -476,7 +476,10 @@ export const ManagePolicies = () => { fontSize: 12, }} onSearch={() => null} - onQueryChange={(q) => setQuery(q)} + onQueryChange={(q) => { + setPage(1); + setQuery(q); + }} entityRegistry={entityRegistry} hideRecommendations /> diff --git a/datahub-web-react/src/app/permissions/roles/ManageRoles.tsx b/datahub-web-react/src/app/permissions/roles/ManageRoles.tsx index ccdfb7002c67d..011109e2eb915 100644 --- a/datahub-web-react/src/app/permissions/roles/ManageRoles.tsx +++ b/datahub-web-react/src/app/permissions/roles/ManageRoles.tsx @@ -72,7 +72,6 @@ export const ManageRoles = () => { data: rolesData, refetch: rolesRefetch, } = useListRolesQuery({ - fetchPolicy: 'cache-first', variables: { input: { start, @@ -80,6 +79,7 @@ export const ManageRoles = () => { query, }, }, + fetchPolicy: (query?.length || 0) > 0 ? 'no-cache' : 'cache-first', }); const totalRoles = rolesData?.listRoles?.total || 0; @@ -238,7 +238,10 @@ export const ManageRoles = () => { fontSize: 12, }} onSearch={() => null} - onQueryChange={(q) => setQuery(q)} + onQueryChange={(q) => { + setPage(1); + setQuery(q); + }} entityRegistry={entityRegistry} /> {isBatchAddRolesModalVisible && ( From ee9ee406662018aabce0f7859f5d14d235bfe7f1 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Thu, 16 Nov 2023 23:05:48 +0530 Subject: [PATCH 111/792] fix(): increase the search bar highlight border to double the width (#9251) Co-authored-by: John Joyce --- datahub-web-react/src/app/search/SearchBar.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/search/SearchBar.tsx b/datahub-web-react/src/app/search/SearchBar.tsx index a23ead83caf54..15457c006c61b 100644 --- a/datahub-web-react/src/app/search/SearchBar.tsx +++ b/datahub-web-react/src/app/search/SearchBar.tsx @@ -45,7 +45,7 @@ const StyledSearchBar = styled(Input)` border: 2px solid transparent; &:focus-within { - border: 1.5px solid ${REDESIGN_COLORS.BLUE}; + border: 2px solid ${REDESIGN_COLORS.BLUE}; } } > .ant-input::placeholder { From e6305c0d95b4ed2960a5be89dcf1e97a3b3d3787 Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 16 Nov 2023 23:07:23 +0530 Subject: [PATCH 112/792] feat: Add loading indicator to Manage Domains sidebar (#9142) Co-authored-by: apptware --- .../src/app/domain/DomainSearch.tsx | 93 +++++++------------ .../src/app/domain/DomainSearchResultItem.tsx | 68 ++++++++++++++ .../domainNavigator/DomainNavigator.tsx | 28 ++++-- .../profile/sidebar/Domain/SetDomainModal.tsx | 36 ++++++- 4 files changed, 151 insertions(+), 74 deletions(-) create mode 100644 datahub-web-react/src/app/domain/DomainSearchResultItem.tsx diff --git a/datahub-web-react/src/app/domain/DomainSearch.tsx b/datahub-web-react/src/app/domain/DomainSearch.tsx index e82dae9c2c9e6..5036a795ea5e4 100644 --- a/datahub-web-react/src/app/domain/DomainSearch.tsx +++ b/datahub-web-react/src/app/domain/DomainSearch.tsx @@ -1,17 +1,12 @@ -import React, { CSSProperties, useRef, useState } from 'react'; -import { Link } from 'react-router-dom'; +import React, { useRef, useState } from 'react'; +import { LoadingOutlined } from '@ant-design/icons'; import styled from 'styled-components/macro'; -import Highlight from 'react-highlighter'; import { useGetSearchResultsForMultipleQuery } from '../../graphql/search.generated'; import { EntityType } from '../../types.generated'; -import { IconStyleType } from '../entity/Entity'; -import { ANTD_GRAY } from '../entity/shared/constants'; import { SearchBar } from '../search/SearchBar'; import ClickOutside from '../shared/ClickOutside'; import { useEntityRegistry } from '../useEntityRegistry'; -import DomainIcon from './DomainIcon'; -import ParentEntities from '../search/filters/ParentEntities'; -import { getParentDomains } from './utils'; +import DomainSearchResultItem from './DomainSearchResultItem'; const DomainSearchWrapper = styled.div` position: relative; @@ -33,34 +28,19 @@ const ResultsWrapper = styled.div` z-index: 1; `; -const SearchResult = styled(Link)` - color: #262626; +const LoadingWrapper = styled.div` display: flex; align-items: center; - gap: 8px; - height: 100%; - padding: 6px 8px; - width: 100%; - &:hover { - background-color: ${ANTD_GRAY[3]}; - color: #262626; - } + justify-content: center; + height: 350px; + font-size: 30px; `; -const IconWrapper = styled.span``; - -const highlightMatchStyle: CSSProperties = { - fontWeight: 'bold', - background: 'none', - padding: 0, -}; - function DomainSearch() { const [query, setQuery] = useState(''); const [isSearchBarFocused, setIsSearchBarFocused] = useState(false); const entityRegistry = useEntityRegistry(); - - const { data } = useGetSearchResultsForMultipleQuery({ + const { data, loading } = useGetSearchResultsForMultipleQuery({ variables: { input: { types: [EntityType.Domain], @@ -69,11 +49,11 @@ function DomainSearch() { count: 50, }, }, - skip: !query, }); const searchResults = data?.searchAcrossEntities?.searchResults; const timerRef = useRef(-1); + const handleQueryChange = (q: string) => { window.clearTimeout(timerRef.current); timerRef.current = window.setTimeout(() => { @@ -81,6 +61,26 @@ function DomainSearch() { }, 250); }; + const renderLoadingIndicator = () => ( + + + + ); + + const renderSearchResults = () => ( + + {searchResults?.map((result) => ( + setIsSearchBarFocused(false)} + /> + ))} + + ); + return ( setIsSearchBarFocused(false)}> @@ -102,39 +102,8 @@ function DomainSearch() { entityRegistry={entityRegistry} onFocus={() => setIsSearchBarFocused(true)} /> - {isSearchBarFocused && searchResults && !!searchResults.length && ( - - {searchResults.map((result) => { - return ( - setIsSearchBarFocused(false)} - > - - {result.entity.type === EntityType.Domain ? ( - - ) : ( - entityRegistry.getIcon(result.entity.type, 12, IconStyleType.ACCENT) - )} - -
- - - {entityRegistry.getDisplayName(result.entity.type, result.entity)} - -
-
- ); - })} -
- )} + {loading && renderLoadingIndicator()} + {!loading && isSearchBarFocused && !!searchResults?.length && renderSearchResults()}
); diff --git a/datahub-web-react/src/app/domain/DomainSearchResultItem.tsx b/datahub-web-react/src/app/domain/DomainSearchResultItem.tsx new file mode 100644 index 0000000000000..dc33ea173e0ae --- /dev/null +++ b/datahub-web-react/src/app/domain/DomainSearchResultItem.tsx @@ -0,0 +1,68 @@ +// Create a new component called SearchResultItem.js +import React from 'react'; +import { Link } from 'react-router-dom'; +import Highlight from 'react-highlighter'; +import styled from 'styled-components/macro'; +import { Entity, EntityType } from '../../types.generated'; +import { IconStyleType } from '../entity/Entity'; +import { ANTD_GRAY } from '../entity/shared/constants'; +import DomainIcon from './DomainIcon'; +import ParentEntities from '../search/filters/ParentEntities'; +import { getParentDomains } from './utils'; +import EntityRegistry from '../entity/EntityRegistry'; + +type Props = { + entity: Entity; + entityRegistry: EntityRegistry; + query: string; + onResultClick: () => void; +}; + +const SearchResult = styled(Link)` + color: #262626; + display: flex; + align-items: center; + gap: 8px; + height: 100%; + padding: 6px 8px; + width: 100%; + &:hover { + background-color: ${ANTD_GRAY[3]}; + color: #262626; + } +`; + +const IconWrapper = styled.span``; + +const highlightMatchStyle = { + fontWeight: 'bold', + background: 'none', + padding: 0, +}; + +function DomainSearchResultItem({ entity, entityRegistry, query, onResultClick }: Props) { + return ( + + + {entity.type === EntityType.Domain ? ( + + ) : ( + entityRegistry.getIcon(entity.type, 12, IconStyleType.ACCENT) + )} + +
+ + + {entityRegistry.getDisplayName(entity.type, entity)} + +
+
+ ); +} + +export default DomainSearchResultItem; diff --git a/datahub-web-react/src/app/domain/nestedDomains/domainNavigator/DomainNavigator.tsx b/datahub-web-react/src/app/domain/nestedDomains/domainNavigator/DomainNavigator.tsx index 0fbcffb9a260c..8decc2840a379 100644 --- a/datahub-web-react/src/app/domain/nestedDomains/domainNavigator/DomainNavigator.tsx +++ b/datahub-web-react/src/app/domain/nestedDomains/domainNavigator/DomainNavigator.tsx @@ -1,9 +1,10 @@ -import { Alert } from 'antd'; +import { Alert, Empty } from 'antd'; import React from 'react'; import styled from 'styled-components'; import useListDomains from '../../useListDomains'; import DomainNode from './DomainNode'; import { Domain } from '../../../../types.generated'; +import { ANTD_GRAY } from '../../../entity/shared/constants'; const NavigatorWrapper = styled.div` font-size: 14px; @@ -19,19 +20,28 @@ interface Props { export default function DomainNavigator({ domainUrnToHide, selectDomainOverride }: Props) { const { sortedDomains, error } = useListDomains({}); + const noDomainsFound: boolean = !sortedDomains || sortedDomains.length === 0; return ( {error && } - {sortedDomains?.map((domain) => ( - - ))} + )} + {!noDomainsFound && + sortedDomains?.map((domain) => ( + + ))} ); } diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/Domain/SetDomainModal.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/Domain/SetDomainModal.tsx index 405442e8d7f50..3d9a7d7f08425 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/Domain/SetDomainModal.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/Domain/SetDomainModal.tsx @@ -1,6 +1,8 @@ import React, { useRef, useState } from 'react'; -import { Button, Form, message, Modal, Select } from 'antd'; +import { Button, Form, message, Modal, Select, Empty } from 'antd'; +import { LoadingOutlined } from '@ant-design/icons'; +import styled from 'styled-components/macro'; import { useGetSearchResultsLazyQuery } from '../../../../../../../graphql/search.generated'; import { Domain, Entity, EntityType } from '../../../../../../../types.generated'; import { useBatchSetDomainMutation } from '../../../../../../../graphql/mutations.generated'; @@ -12,6 +14,7 @@ import { tagRender } from '../tagRenderer'; import { BrowserWrapper } from '../../../../../../shared/tags/AddTagsTermsModal'; import DomainNavigator from '../../../../../../domain/nestedDomains/domainNavigator/DomainNavigator'; import ClickOutside from '../../../../../../shared/ClickOutside'; +import { ANTD_GRAY } from '../../../../constants'; type Props = { urns: string[]; @@ -28,6 +31,18 @@ type SelectedDomain = { urn: string; }; +const LoadingWrapper = styled.div` + padding: 8px; + display: flex; + justify-content: center; + + svg { + height: 15px; + width: 15px; + color: ${ANTD_GRAY[8]}; + } +`; + export const SetDomainModal = ({ urns, onCloseModal, refetch, defaultValue, onOkOverride, titleOverride }: Props) => { const entityRegistry = useEntityRegistry(); const [isFocusedOnInput, setIsFocusedOnInput] = useState(false); @@ -41,7 +56,7 @@ export const SetDomainModal = ({ urns, onCloseModal, refetch, defaultValue, onOk } : undefined, ); - const [domainSearch, { data: domainSearchData }] = useGetSearchResultsLazyQuery(); + const [domainSearch, { data: domainSearchData, loading }] = useGetSearchResultsLazyQuery(); const domainSearchResults = domainSearchData?.search?.searchResults?.map((searchResult) => searchResult.entity) || []; const [batchSetDomainMutation] = useBatchSetDomainMutation(); @@ -206,8 +221,23 @@ export const SetDomainModal = ({ urns, onCloseModal, refetch, defaultValue, onOk onBlur={handleBlur} onFocus={() => setIsFocusedOnInput(true)} dropdownStyle={isShowingDomainNavigator ? { display: 'none' } : {}} + notFoundContent={ + + } > - {domainSearchOptions} + {loading ? ( + + + + + + ) : ( + domainSearchOptions + )} From 15efa72728bb7ab48fb8609236f32f80f56c4817 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20L=C3=BCdin?= <13187726+Masterchen09@users.noreply.github.com> Date: Thu, 16 Nov 2023 18:39:14 +0100 Subject: [PATCH 113/792] fix(ui): show external url also in entity profile of containers (#8834) --- datahub-web-react/src/app/entity/container/ContainerEntity.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/datahub-web-react/src/app/entity/container/ContainerEntity.tsx b/datahub-web-react/src/app/entity/container/ContainerEntity.tsx index 9aecf6900f634..6c683a27295bd 100644 --- a/datahub-web-react/src/app/entity/container/ContainerEntity.tsx +++ b/datahub-web-react/src/app/entity/container/ContainerEntity.tsx @@ -167,6 +167,7 @@ export class ContainerEntity implements Entity { getOverridePropertiesFromEntity = (data: Container) => { return { name: this.displayName(data), + externalUrl: data.properties?.externalUrl, entityCount: data.entities?.total, }; }; From 78abeb9bebee0c01a9d5725b6933016061a5009b Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 16 Nov 2023 12:41:12 -0500 Subject: [PATCH 114/792] feat(ingest/unity): Support specifying catalogs directly; pass env correctly (#9110) --- docs/how/updating-datahub.md | 3 +++ .../datahub/ingestion/source/unity/config.py | 10 ++++++- .../datahub/ingestion/source/unity/proxy.py | 9 +++++++ .../datahub/ingestion/source/unity/source.py | 27 ++++++++++++++++--- 4 files changed, 44 insertions(+), 5 deletions(-) diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 90b53161950e8..4a82e30103f81 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -68,6 +68,9 @@ qualified dataset name, i.e. `.`. We attempt to supp pattern format by prepending `.*\\.` to dataset patterns lacking a period, so in most cases this should not cause any issues. However, if you have a complex dataset pattern, we recommend you manually convert it to the fully qualified format to avoid any potential issues. +- #9110 - The Unity Catalog source will now generate urns based on `env` properly. If you have +been setting `env` in your recipe to something besides `PROD`, we will now generate urns +with that new env variable, invalidating your existing urns. ### Potential Downtime diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index 7073830318abe..4e3deedddbc43 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -1,7 +1,7 @@ import logging import os from datetime import datetime, timedelta, timezone -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional import pydantic from pydantic import Field @@ -132,6 +132,14 @@ class UnityCatalogSourceConfig( _metastore_id_pattern_removed = pydantic_removed_field("metastore_id_pattern") + catalogs: Optional[List[str]] = pydantic.Field( + default=None, + description=( + "Fixed list of catalogs to ingest." + " If not specified, catalogs will be ingested based on `catalog_pattern`." + ), + ) + catalog_pattern: AllowDenyPattern = Field( default=AllowDenyPattern.allow_all(), description="Regex patterns for catalogs to filter in ingestion. Specify regex to match the full `metastore.catalog` name.", diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py index 3fb77ce512ed2..375c76db8e971 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py @@ -112,6 +112,15 @@ def catalogs(self, metastore: Optional[Metastore]) -> Iterable[Catalog]: for catalog in response: yield self._create_catalog(metastore, catalog) + def catalog( + self, catalog_name: str, metastore: Optional[Metastore] + ) -> Optional[Catalog]: + response = self._workspace_client.catalogs.get(catalog_name) + if not response: + logger.info(f"Catalog {catalog_name} not found") + return None + return self._create_catalog(metastore, response) + def schemas(self, catalog: Catalog) -> Iterable[Schema]: response = self._workspace_client.schemas.list(catalog_name=catalog.name) if not response: diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index b63cf65d55dc8..44b5bbbcb0ceb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -188,9 +188,10 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: ] def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: - self.report.report_ingestion_stage_start("Start warehouse") + self.report.report_ingestion_stage_start("Ingestion Setup") wait_on_warehouse = None if self.config.is_profiling_enabled(): + self.report.report_ingestion_stage_start("Start warehouse") # Can take several minutes, so start now and wait later wait_on_warehouse = self.unity_catalog_api_proxy.start_warehouse() if wait_on_warehouse is None: @@ -200,8 +201,9 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: ) return - self.report.report_ingestion_stage_start("Ingest service principals") - self.build_service_principal_map() + if self.config.include_ownership: + self.report.report_ingestion_stage_start("Ingest service principals") + self.build_service_principal_map() if self.config.include_notebooks: self.report.report_ingestion_stage_start("Ingest notebooks") yield from self.process_notebooks() @@ -317,7 +319,7 @@ def process_metastores(self) -> Iterable[MetadataWorkUnit]: def process_catalogs( self, metastore: Optional[Metastore] ) -> Iterable[MetadataWorkUnit]: - for catalog in self.unity_catalog_api_proxy.catalogs(metastore=metastore): + for catalog in self._get_catalogs(metastore): if not self.config.catalog_pattern.allowed(catalog.id): self.report.catalogs.dropped(catalog.id) continue @@ -327,6 +329,17 @@ def process_catalogs( self.report.catalogs.processed(catalog.id) + def _get_catalogs(self, metastore: Optional[Metastore]) -> Iterable[Catalog]: + if self.config.catalogs: + for catalog_name in self.config.catalogs: + catalog = self.unity_catalog_api_proxy.catalog( + catalog_name, metastore=metastore + ) + if catalog: + yield catalog + else: + yield from self.unity_catalog_api_proxy.catalogs(metastore=metastore) + def process_schemas(self, catalog: Catalog) -> Iterable[MetadataWorkUnit]: for schema in self.unity_catalog_api_proxy.schemas(catalog=catalog): if not self.config.schema_pattern.allowed(schema.id): @@ -509,6 +522,7 @@ def gen_dataset_urn(self, table_ref: TableReference) -> str: platform=self.platform, platform_instance=self.platform_instance_name, name=str(table_ref), + env=self.config.env, ) def gen_notebook_urn(self, notebook: Union[Notebook, NotebookId]) -> str: @@ -576,6 +590,7 @@ def gen_schema_key(self, schema: Schema) -> ContainerKey: instance=self.config.platform_instance, catalog=schema.catalog.name, metastore=schema.catalog.metastore.name, + env=self.config.env, ) else: return UnitySchemaKey( @@ -583,6 +598,7 @@ def gen_schema_key(self, schema: Schema) -> ContainerKey: platform=self.platform, instance=self.config.platform_instance, catalog=schema.catalog.name, + env=self.config.env, ) def gen_metastore_key(self, metastore: Metastore) -> MetastoreKey: @@ -590,6 +606,7 @@ def gen_metastore_key(self, metastore: Metastore) -> MetastoreKey: metastore=metastore.name, platform=self.platform, instance=self.config.platform_instance, + env=self.config.env, ) def gen_catalog_key(self, catalog: Catalog) -> ContainerKey: @@ -600,12 +617,14 @@ def gen_catalog_key(self, catalog: Catalog) -> ContainerKey: metastore=catalog.metastore.name, platform=self.platform, instance=self.config.platform_instance, + env=self.config.env, ) else: return CatalogKey( catalog=catalog.name, platform=self.platform, instance=self.config.platform_instance, + env=self.config.env, ) def _gen_domain_urn(self, dataset_name: str) -> Optional[str]: From e15e28e2d62111e00e9e1f0aa4cd0356d8d09f9f Mon Sep 17 00:00:00 2001 From: Patrick Franco Braz Date: Thu, 16 Nov 2023 14:49:40 -0300 Subject: [PATCH 115/792] refactor(datahub-web-react): allows proxying to external datahub-frontend servers (#9250) --- datahub-web-react/.env | 3 ++- datahub-web-react/README.md | 8 ++++++++ datahub-web-react/src/setupProxy.js | 8 +++++--- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/datahub-web-react/.env b/datahub-web-react/.env index d503159ecaf97..e5529bbdaa56d 100644 --- a/datahub-web-react/.env +++ b/datahub-web-react/.env @@ -1,4 +1,5 @@ PUBLIC_URL=/assets REACT_APP_THEME_CONFIG=theme_light.config.json SKIP_PREFLIGHT_CHECK=true -BUILD_PATH=build/yarn \ No newline at end of file +BUILD_PATH=build/yarn +REACT_APP_PROXY_TARGET=http://localhost:9002 \ No newline at end of file diff --git a/datahub-web-react/README.md b/datahub-web-react/README.md index 8bf592b11a0ae..560f5315b2c71 100644 --- a/datahub-web-react/README.md +++ b/datahub-web-react/README.md @@ -51,6 +51,14 @@ need to be deployed, still at `http://localhost:9002`, to service GraphQL API re Optionally you could also start the app with the mock server without running the docker containers by executing `yarn start:mock`. See [here](src/graphql-mock/fixtures/searchResult/userSearchResult.ts#L6) for available login users. +### Testing your customizations + +There is two options to test your customizations: +* **Option 1**: Initialize the docker containers with the `quickstart.sh` script (or if any custom docker-compose file) and then run `yarn start` in this directory. This will start a forwarding server at `localhost:3000` that will use the `datahub-frontend` server at `http://localhost:9002` to fetch real data. +* **Option 2**: Change the environment variable `REACT_APP_PROXY_TARGET` in the `.env` file to point to your `datahub-frontend` server (ex: https://my_datahub_host.com) and then run `yarn start` in this directory. This will start a forwarding server at `localhost:3000` that will use the `datahub-frontend` server at some domain to fetch real data. + +The option 2 is useful if you want to test your React customizations without having to run the hole DataHub stack locally. However, if you changed other components of the DataHub stack, you will need to run the hole stack locally (building the docker images) and use the option 1. + ### Functional testing In order to start a server and run frontend unit tests using react-testing-framework, run: diff --git a/datahub-web-react/src/setupProxy.js b/datahub-web-react/src/setupProxy.js index 478c015705a13..165e394a507f3 100644 --- a/datahub-web-react/src/setupProxy.js +++ b/datahub-web-react/src/setupProxy.js @@ -2,6 +2,8 @@ const logInFilter = function (pathname, req) { return pathname.match('^/logIn') && req.method === 'POST'; }; +const proxyTarget = process.env.REACT_APP_PROXY_TARGET || 'http://localhost:9002'; + if (process.env.REACT_APP_MOCK === 'true' || process.env.REACT_APP_MOCK === 'cy') { // no proxy needed, MirageJS will intercept all http requests module.exports = function () {}; @@ -13,21 +15,21 @@ if (process.env.REACT_APP_MOCK === 'true' || process.env.REACT_APP_MOCK === 'cy' app.use( '/logIn', createProxyMiddleware(logInFilter, { - target: 'http://localhost:9002', + target: proxyTarget, changeOrigin: true, }), ); app.use( '/authenticate', createProxyMiddleware({ - target: 'http://localhost:9002', + target: proxyTarget, changeOrigin: true, }), ); app.use( '/api/v2/graphql', createProxyMiddleware({ - target: 'http://localhost:9002', + target: proxyTarget, changeOrigin: true, }), ); From cd789b8d769b25a10870b75d10047aa09ca0977a Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 16 Nov 2023 12:09:28 -0600 Subject: [PATCH 116/792] chore(node): update node to non-EOL version (#9252) --- datahub-web-react/build.gradle | 4 ++-- datahub-web-react/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index ae96ed130c1d1..13eabe90ee509 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -16,10 +16,10 @@ node { } // Version of node to use. - version = '16.8.0' + version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.0' + yarnVersion = '1.22.1' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 019295f3e6ffe..5afbc8fa5892d 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -95,7 +95,7 @@ "start:mock": "yarn run generate && BROWSER=none REACT_APP_MOCK=true craco start", "start:e2e": "REACT_APP_MOCK=cy BROWSER=none PORT=3010 craco start", "ec2-dev": "yarn run generate && CI=true;export CI;BROWSER=none craco start", - "build": "yarn run generate && CI=false REACT_APP_MOCK=false craco build && rm -rf dist/ && cp -r build/yarn/ dist/ && rm -r build/yarn/", + "build": "yarn run generate && NODE_OPTIONS=--openssl-legacy-provider CI=false REACT_APP_MOCK=false craco build && rm -rf dist/ && cp -r build/yarn/ dist/ && rm -r build/yarn/", "test": "craco test", "pretest:e2e:ci": "yarn generate", "test:e2e": "start-server-and-test start:e2e 3010", From 9d41a8f9f096c2809afd5026c6f40cf4368669af Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 16 Nov 2023 13:33:35 -0500 Subject: [PATCH 117/792] fix(ingest): drop redshift-legacy and redshift-usage-legacy sources (#9244) --- docs/how/updating-datahub.md | 2 + metadata-ingestion/setup.py | 6 - .../ingestion/source/redshift/query.py | 2 +- .../ingestion/source/source_registry.py | 13 +- .../datahub/ingestion/source/sql/redshift.py | 1198 ----------------- .../ingestion/source/usage/redshift_usage.py | 397 ------ 6 files changed, 4 insertions(+), 1614 deletions(-) delete mode 100644 metadata-ingestion/src/datahub/ingestion/source/sql/redshift.py delete mode 100644 metadata-ingestion/src/datahub/ingestion/source/usage/redshift_usage.py diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 4a82e30103f81..54f5775d8331f 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -6,6 +6,8 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ### Breaking Changes +- #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. + ### Potential Downtime ### Deprecations diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index ebe180703051f..04ae03cd440ac 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -366,8 +366,6 @@ | usage_common | {"redshift-connector"} | sqlglot_lib, - "redshift-legacy": sql_common | redshift_common | sqlglot_lib, - "redshift-usage-legacy": sql_common | redshift_common | sqlglot_lib | usage_common, "s3": {*s3_base, *data_lake_profiling}, "gcs": {*s3_base, *data_lake_profiling}, "sagemaker": aws_common, @@ -510,8 +508,6 @@ "presto", "redash", "redshift", - "redshift-legacy", - "redshift-usage-legacy", "s3", "snowflake", "tableau", @@ -608,8 +604,6 @@ "postgres = datahub.ingestion.source.sql.postgres:PostgresSource", "redash = datahub.ingestion.source.redash:RedashSource", "redshift = datahub.ingestion.source.redshift.redshift:RedshiftSource", - "redshift-legacy = datahub.ingestion.source.sql.redshift:RedshiftSource", - "redshift-usage-legacy = datahub.ingestion.source.usage.redshift_usage:RedshiftUsageSource", "snowflake = datahub.ingestion.source.snowflake.snowflake_v2:SnowflakeV2Source", "superset = datahub.ingestion.source.superset:SupersetSource", "tableau = datahub.ingestion.source.tableau:TableauSource", diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py index 0b57c41131714..a96171caf9835 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py @@ -1,6 +1,6 @@ from datetime import datetime -from datahub.ingestion.source.sql.redshift import redshift_datetime_format +redshift_datetime_format = "%Y-%m-%d %H:%M:%S" class RedshiftQuery: diff --git a/metadata-ingestion/src/datahub/ingestion/source/source_registry.py b/metadata-ingestion/src/datahub/ingestion/source/source_registry.py index 37f088bcd7b50..c3fbab3f9a012 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/source_registry.py +++ b/metadata-ingestion/src/datahub/ingestion/source/source_registry.py @@ -1,6 +1,3 @@ -import warnings - -from datahub.configuration.common import ConfigurationWarning from datahub.ingestion.api.registry import PluginRegistry from datahub.ingestion.api.source import Source @@ -8,15 +5,7 @@ source_registry.register_from_entrypoint("datahub.ingestion.source.plugins") # Deprecations. -source_registry.register_alias( - "redshift-usage", - "redshift-usage-legacy", - lambda: warnings.warn( - "source type redshift-usage is deprecated, use redshift source instead as usage was merged into the main source", - ConfigurationWarning, - stacklevel=3, - ), -) +# source_registry.register_alias(, , ) # The MSSQL source has two possible sets of dependencies. We alias # the second to the first so that we maintain the 1:1 mapping between diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/sql/redshift.py deleted file mode 100644 index 33d517c8589e9..0000000000000 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/redshift.py +++ /dev/null @@ -1,1198 +0,0 @@ -import logging -from collections import defaultdict -from dataclasses import dataclass, field -from enum import Enum -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union -from urllib.parse import urlparse - -# These imports verify that the dependencies are available. -import psycopg2 # noqa: F401 -import sqlalchemy -import sqlalchemy_redshift # noqa: F401 -from pydantic.fields import Field -from sqlalchemy import create_engine, inspect -from sqlalchemy.engine import Connection, reflection -from sqlalchemy.engine.reflection import Inspector -from sqlalchemy_redshift.dialect import RedshiftDialect, RelationKey -from sqllineage.runner import LineageRunner - -import datahub.emitter.mce_builder as builder -from datahub.configuration import ConfigModel -from datahub.configuration.source_common import DatasetLineageProviderConfigBase -from datahub.configuration.time_window_config import BaseTimeWindowConfig -from datahub.emitter import mce_builder -from datahub.emitter.mcp import MetadataChangeProposalWrapper -from datahub.ingestion.api.common import PipelineContext -from datahub.ingestion.api.decorators import ( - SourceCapability, - SupportStatus, - capability, - config_class, - platform_name, - support_status, -) -from datahub.ingestion.api.workunit import MetadataWorkUnit -from datahub.ingestion.source.aws.s3_util import strip_s3_prefix -from datahub.ingestion.source.data_lake_common.path_spec import PathSpec -from datahub.ingestion.source.sql.postgres import BasePostgresConfig -from datahub.ingestion.source.sql.sql_common import ( - SQLAlchemySource, - SQLSourceReport, - SqlWorkUnit, -) - -# TRICKY: it's necessary to import the Postgres source because -# that module has some side effects that we care about here. -from datahub.metadata.com.linkedin.pegasus2avro.dataset import UpstreamLineage -from datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot -from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent -from datahub.metadata.schema_classes import ( - ChangeTypeClass, - DatasetLineageTypeClass, - DatasetPropertiesClass, - DatasetSnapshotClass, - UpstreamClass, -) - -logger: logging.Logger = logging.getLogger(__name__) - - -class LineageMode(Enum): - SQL_BASED = "sql_based" - STL_SCAN_BASED = "stl_scan_based" - MIXED = "mixed" - - -class LineageCollectorType(Enum): - QUERY_SCAN = "query_scan" - QUERY_SQL_PARSER = "query_sql_parser" - VIEW = "view" - NON_BINDING_VIEW = "non-binding-view" - COPY = "copy" - UNLOAD = "unload" - - -class LineageDatasetPlatform(Enum): - S3 = "s3" - REDSHIFT = "redshift" - - -@dataclass(frozen=True, eq=True) -class LineageDataset: - platform: LineageDatasetPlatform - path: str - - -@dataclass -class LineageItem: - dataset: LineageDataset - upstreams: Set[LineageDataset] - collector_type: LineageCollectorType - dataset_lineage_type: str = field(init=False) - query_parser_failed_sqls: List[str] - - def __post_init__(self): - if self.collector_type == LineageCollectorType.COPY: - self.dataset_lineage_type = DatasetLineageTypeClass.COPY - elif self.collector_type in [ - LineageCollectorType.VIEW, - LineageCollectorType.NON_BINDING_VIEW, - ]: - self.dataset_lineage_type = DatasetLineageTypeClass.VIEW - else: - self.dataset_lineage_type = DatasetLineageTypeClass.TRANSFORMED - - -class S3LineageProviderConfig(ConfigModel): - """ - Any source that produces s3 lineage from/to Datasets should inherit this class. - """ - - path_specs: List[PathSpec] = Field( - description="List of PathSpec. See below the details about PathSpec" - ) - - -class DatasetS3LineageProviderConfigBase(ConfigModel): - """ - Any source that produces s3 lineage from/to Datasets should inherit this class. - """ - - s3_lineage_config: Optional[S3LineageProviderConfig] = Field( - default=None, description="Common config for S3 lineage generation" - ) - - -class RedshiftConfig( - BasePostgresConfig, - BaseTimeWindowConfig, - DatasetLineageProviderConfigBase, - DatasetS3LineageProviderConfigBase, -): - def get_identifier(self, schema: str, table: str) -> str: - regular = f"{schema}.{table}" - if self.database_alias: - return f"{self.database_alias}.{regular}" - if self.database: - return f"{self.database}.{regular}" - return regular - - # Although Amazon Redshift is compatible with Postgres's wire format, - # we actually want to use the sqlalchemy-redshift package and dialect - # because it has better caching behavior. In particular, it queries - # the full table, column, and constraint information in a single larger - # query, and then simply pulls out the relevant information as needed. - # Because of this behavior, it uses dramatically fewer round trips for - # large Redshift warehouses. As an example, see this query for the columns: - # https://github.com/sqlalchemy-redshift/sqlalchemy-redshift/blob/60b4db04c1d26071c291aeea52f1dcb5dd8b0eb0/sqlalchemy_redshift/dialect.py#L745. - scheme: str = Field( - default="redshift+psycopg2", - description="", - hidden_from_docs=True, - ) - - default_schema: str = Field( - default="public", - description="The default schema to use if the sql parser fails to parse the schema with `sql_based` lineage collector", - ) - - include_table_lineage: Optional[bool] = Field( - default=True, description="Whether table lineage should be ingested." - ) - include_copy_lineage: Optional[bool] = Field( - default=True, - description="Whether lineage should be collected from copy commands", - ) - include_unload_lineage: Optional[bool] = Field( - default=True, - description="Whether lineage should be collected from unload commands", - ) - capture_lineage_query_parser_failures: Optional[bool] = Field( - default=False, - description="Whether to capture lineage query parser errors with dataset properties for debuggings", - ) - - table_lineage_mode: Optional[LineageMode] = Field( - default=LineageMode.STL_SCAN_BASED, - description="Which table lineage collector mode to use. Available modes are: [stl_scan_based, sql_based, mixed]", - ) - - -# reflection.cache uses eval and other magic to partially rewrite the function. -# mypy can't handle it, so we ignore it for now. -@reflection.cache # type: ignore -def _get_all_table_comments(self, connection, **kw): - COMMENT_SQL = """ - SELECT n.nspname as schema, - c.relname as table_name, - pgd.description as table_comment - FROM pg_catalog.pg_class c - LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace - LEFT JOIN pg_catalog.pg_description pgd ON pgd.objsubid = 0 AND pgd.objoid = c.oid - WHERE c.relkind in ('r', 'v', 'm', 'f', 'p') - AND pgd.description IS NOT NULL - ORDER BY "schema", "table_name"; - """ - - all_table_comments: Dict[RelationKey, str] = {} - - result = connection.execute(COMMENT_SQL) - for table in result: - key = RelationKey(table.table_name, table.schema, connection) - all_table_comments[key] = table.table_comment - - return all_table_comments - - -@reflection.cache # type: ignore -def get_table_comment(self, connection, table_name, schema=None, **kw): - all_table_comments = self._get_all_table_comments(connection, **kw) - key = RelationKey(table_name, schema, connection) - if key not in all_table_comments.keys(): - key = key.unquoted() - return {"text": all_table_comments.get(key)} - - -# gets all the relations for internal schemas and external schemas -# by UNION of internal schemas (excluding namespaces starting with pg_) -# and external schemas -@reflection.cache # type: ignore -def _get_all_relation_info(self, connection, **kw): - result = connection.execute( - """ - SELECT c.relkind, - n.oid AS "schema_oid", - n.nspname AS "schema", - c.oid AS "rel_oid", - c.relname, - CASE c.reldiststyle - WHEN 0 THEN 'EVEN' - WHEN 1 THEN 'KEY' - WHEN 8 THEN 'ALL' - END AS "diststyle", - c.relowner AS "owner_id", - u.usename AS "owner_name", - TRIM(TRAILING ';' FROM pg_catalog.pg_get_viewdef (c.oid,TRUE)) AS "view_definition", - pg_catalog.array_to_string(c.relacl,'\n') AS "privileges" - FROM pg_catalog.pg_class c - LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace - JOIN pg_catalog.pg_user u ON u.usesysid = c.relowner - WHERE c.relkind IN ('r','v','m','S','f') - AND n.nspname !~ '^pg_' - AND n.nspname != 'information_schema' - UNION - SELECT 'r' AS "relkind", - NULL AS "schema_oid", - schemaname AS "schema", - NULL AS "rel_oid", - tablename AS "relname", - NULL AS "diststyle", - NULL AS "owner_id", - NULL AS "owner_name", - NULL AS "view_definition", - NULL AS "privileges" - FROM pg_catalog.svv_external_tables - ORDER BY "schema", - "relname";""" - ) - relations = {} - for rel in result: - key = RelationKey(rel.relname, rel.schema, connection) - relations[key] = rel - return relations - - -# workaround to get external tables -# Rewriting some external table types to match redshift type based on -# this redshift-sqlalchemy pull request: -# https://github.com/sqlalchemy-redshift/sqlalchemy-redshift/pull/163/files -# The mapping of external types to redshift types: -# (https://docs.aws.amazon.com/redshift/latest/dg/r_CREATE_EXTERNAL_TABLE.html): -# External type -> Redshift type -# int -> integer -# decimal -> numeric -# char -> character -# float -> real -# double -> float -@reflection.cache # type: ignore -def _get_schema_column_info(self, connection, schema=None, **kw): - schema_clause = "AND schema = '{schema}'".format(schema=schema) if schema else "" - all_columns = defaultdict(list) - - with connection.connect() as cc: - result = cc.execute( - """ - SELECT - n.nspname as "schema", - c.relname as "table_name", - att.attname as "name", - format_encoding(att.attencodingtype::integer) as "encode", - format_type(att.atttypid, att.atttypmod) as "type", - att.attisdistkey as "distkey", - att.attsortkeyord as "sortkey", - att.attnotnull as "notnull", - pg_catalog.col_description(att.attrelid, att.attnum) - as "comment", - adsrc, - attnum, - pg_catalog.format_type(att.atttypid, att.atttypmod), - pg_catalog.pg_get_expr(ad.adbin, ad.adrelid) AS DEFAULT, - n.oid as "schema_oid", - c.oid as "table_oid" - FROM pg_catalog.pg_class c - LEFT JOIN pg_catalog.pg_namespace n - ON n.oid = c.relnamespace - JOIN pg_catalog.pg_attribute att - ON att.attrelid = c.oid - LEFT JOIN pg_catalog.pg_attrdef ad - ON (att.attrelid, att.attnum) = (ad.adrelid, ad.adnum) - WHERE n.nspname !~ '^pg_' - AND att.attnum > 0 - AND NOT att.attisdropped - {schema_clause} - UNION - SELECT - view_schema as "schema", - view_name as "table_name", - col_name as "name", - null as "encode", - col_type as "type", - null as "distkey", - 0 as "sortkey", - null as "notnull", - null as "comment", - null as "adsrc", - null as "attnum", - col_type as "format_type", - null as "default", - null as "schema_oid", - null as "table_oid" - FROM pg_get_late_binding_view_cols() cols( - view_schema name, - view_name name, - col_name name, - col_type varchar, - col_num int) - WHERE 1 {schema_clause} - UNION - SELECT - schemaname as "schema", - tablename as "table_name", - columnname as "name", - null as "encode", - -- Spectrum represents data types differently. - -- Standardize, so we can infer types. - CASE - WHEN external_type = 'int' THEN 'integer' - ELSE - regexp_replace( - replace( - replace( - replace( - replace( - replace( - replace(external_type, 'decimal', 'numeric'), - 'varchar', 'character varying'), - 'string', 'character varying'), - 'char(', 'character('), - 'float', 'real'), - 'double', 'float'), - '^array<(.*)>$', '$1[]', 1, 'p') - END AS "type", - null as "distkey", - 0 as "sortkey", - null as "notnull", - null as "comment", - null as "adsrc", - null as "attnum", - CASE - WHEN external_type = 'int' THEN 'integer' - ELSE - regexp_replace( - replace( - replace( - replace( - replace( - replace( - replace(external_type, 'decimal', 'numeric'), - 'varchar', 'character varying'), - 'string', 'character varying'), - 'char(', 'character('), - 'float', 'real'), - 'double', 'float'), - '^array<(.*)>$', '$1[]', 1, 'p') - END AS "format_type", - null as "default", - null as "schema_oid", - null as "table_oid" - FROM SVV_EXTERNAL_COLUMNS - WHERE 1 {schema_clause} - ORDER BY "schema", "table_name", "attnum" - """.format( - schema_clause=schema_clause - ) - ) - for col in result: - key = RelationKey(col.table_name, col.schema, connection) - all_columns[key].append(col) - return dict(all_columns) - - -def _get_external_db_mapping(connection): - # SQL query to get mapping of external schemas in redshift to its external database. - return connection.execute( - """ - select * from svv_external_schemas - """ - ) - - -# This monkey-patching enables us to batch fetch the table descriptions, rather than -# fetching them one at a time. -RedshiftDialect._get_all_table_comments = _get_all_table_comments -RedshiftDialect.get_table_comment = get_table_comment -RedshiftDialect._get_all_relation_info = _get_all_relation_info -RedshiftDialect._get_schema_column_info = _get_schema_column_info - -redshift_datetime_format = "%Y-%m-%d %H:%M:%S" - - -@dataclass -class RedshiftReport(SQLSourceReport): - # https://forums.aws.amazon.com/ann.jspa?annID=9105 - saas_version: str = "" - upstream_lineage: Dict[str, List[str]] = field(default_factory=dict) - - -@platform_name("Redshift") -@config_class(RedshiftConfig) -@support_status(SupportStatus.CERTIFIED) -@capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default") -@capability(SourceCapability.DOMAINS, "Supported via the `domain` config field") -@capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration") -@capability(SourceCapability.DESCRIPTIONS, "Enabled by default") -@capability(SourceCapability.LINEAGE_COARSE, "Optionally enabled via configuration") -@capability( - SourceCapability.USAGE_STATS, - "Not provided by this module, use `redshift-usage` for that.", - supported=False, -) -@capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion") -class RedshiftSource(SQLAlchemySource): - """ - This plugin extracts the following: - - - Metadata for databases, schemas, views and tables - - Column types associated with each table - - Also supports PostGIS extensions - - Table, row, and column statistics via optional SQL profiling - - Table lineage - - :::tip - - You can also get fine-grained usage statistics for Redshift using the `redshift-usage` source described below. - - ::: - - ### Prerequisites - - This source needs to access system tables that require extra permissions. - To grant these permissions, please alter your datahub Redshift user the following way: - ```sql - ALTER USER datahub_user WITH SYSLOG ACCESS UNRESTRICTED; - GRANT SELECT ON pg_catalog.svv_table_info to datahub_user; - GRANT SELECT ON pg_catalog.svl_user_info to datahub_user; - ``` - :::note - - Giving a user unrestricted access to system tables gives the user visibility to data generated by other users. For example, STL_QUERY and STL_QUERYTEXT contain the full text of INSERT, UPDATE, and DELETE statements. - - ::: - - ### Lineage - - There are multiple lineage collector implementations as Redshift does not support table lineage out of the box. - - #### stl_scan_based - The stl_scan based collector uses Redshift's [stl_insert](https://docs.aws.amazon.com/redshift/latest/dg/r_STL_INSERT.html) and [stl_scan](https://docs.aws.amazon.com/redshift/latest/dg/r_STL_SCAN.html) system tables to - discover lineage between tables. - Pros: - - Fast - - Reliable - - Cons: - - Does not work with Spectrum/external tables because those scans do not show up in stl_scan table. - - If a table is depending on a view then the view won't be listed as dependency. Instead the table will be connected with the view's dependencies. - - #### sql_based - The sql_based based collector uses Redshift's [stl_insert](https://docs.aws.amazon.com/redshift/latest/dg/r_STL_INSERT.html) to discover all the insert queries - and uses sql parsing to discover the dependecies. - - Pros: - - Works with Spectrum tables - - Views are connected properly if a table depends on it - - Cons: - - Slow. - - Less reliable as the query parser can fail on certain queries - - #### mixed - Using both collector above and first applying the sql based and then the stl_scan based one. - - Pros: - - Works with Spectrum tables - - Views are connected properly if a table depends on it - - A bit more reliable than the sql_based one only - - Cons: - - Slow - - May be incorrect at times as the query parser can fail on certain queries - - :::note - - The redshift stl redshift tables which are used for getting data lineage only retain approximately two to five days of log history. This means you cannot extract lineage from queries issued outside that window. - - ::: - - """ - - eskind_to_platform = {1: "glue", 2: "hive", 3: "postgres", 4: "redshift"} - - def __init__(self, config: RedshiftConfig, ctx: PipelineContext): - super().__init__(config, ctx, "redshift") - self.catalog_metadata: Dict = {} - self.config: RedshiftConfig = config - self._lineage_map: Optional[Dict[str, LineageItem]] = None - self._all_tables_set: Optional[Set[str]] = None - self.report: RedshiftReport = RedshiftReport() - - @classmethod - def create(cls, config_dict, ctx): - config = RedshiftConfig.parse_obj(config_dict) - return cls(config, ctx) - - def get_catalog_metadata(self, conn: Connection) -> None: - try: - catalog_metadata = _get_external_db_mapping(conn) - except Exception as e: - self.error(logger, "external-svv_external_schemas", f"Error was {e}") - return - - db_name = self.get_db_name() - - external_schema_mapping = {} - for rel in catalog_metadata: - if rel.eskind != 1: - logger.debug( - f"Skipping {rel.schemaname} for mapping to external database as currently we only " - f"support glue" - ) - continue - external_schema_mapping[rel.schemaname] = { - "eskind": rel.eskind, - "external_database": rel.databasename, - "esoptions": rel.esoptions, - "esoid": rel.esoid, - "esowner": rel.esowner, - } - self.catalog_metadata[db_name] = external_schema_mapping - - def get_inspectors(self) -> Iterable[Inspector]: - # This method can be overridden in the case that you want to dynamically - # run on multiple databases. - engine = self.get_metadata_engine() - with engine.connect() as conn: - self.get_catalog_metadata(conn) - inspector = inspect(conn) - yield inspector - - def get_metadata_engine(self) -> sqlalchemy.engine.Engine: - url = self.config.get_sql_alchemy_url() - logger.debug(f"sql_alchemy_url={url}") - return create_engine(url, **self.config.options) - - def inspect_version(self) -> Any: - db_engine = self.get_metadata_engine() - logger.info("Checking current version") - for db_row in db_engine.execute("select version()"): - self.report.saas_version = db_row[0] - - def get_workunits_internal(self) -> Iterable[Union[MetadataWorkUnit, SqlWorkUnit]]: - try: - self.inspect_version() - except Exception as e: - self.report.report_failure("version", f"Error: {e}") - return - - for wu in super().get_workunits_internal(): - yield wu - if ( - isinstance(wu, SqlWorkUnit) - and isinstance(wu.metadata, MetadataChangeEvent) - and isinstance(wu.metadata.proposedSnapshot, DatasetSnapshot) - ): - lineage_mcp = None - lineage_properties_aspect: Optional[DatasetPropertiesClass] = None - - dataset_snapshot: DatasetSnapshotClass = wu.metadata.proposedSnapshot - assert dataset_snapshot - - if self.config.include_table_lineage: - lineage_mcp, lineage_properties_aspect = self.get_lineage_mcp( - wu.metadata.proposedSnapshot.urn - ) - - if lineage_mcp is not None: - yield lineage_mcp.as_workunit() - - if lineage_properties_aspect: - aspects = dataset_snapshot.aspects - if aspects is None: - aspects = [] - - dataset_properties_aspect: Optional[DatasetPropertiesClass] = None - - for aspect in aspects: - if isinstance(aspect, DatasetPropertiesClass): - dataset_properties_aspect = aspect - - if dataset_properties_aspect is None: - dataset_properties_aspect = DatasetPropertiesClass() - aspects.append(dataset_properties_aspect) - - custom_properties = ( - { - **dataset_properties_aspect.customProperties, - **lineage_properties_aspect.customProperties, - } - if dataset_properties_aspect.customProperties - else lineage_properties_aspect.customProperties - ) - dataset_properties_aspect.customProperties = custom_properties - dataset_snapshot.aspects = aspects - - dataset_snapshot.aspects.append(dataset_properties_aspect) - - def _get_all_tables(self) -> Set[str]: - all_tables_query: str = """ - select - table_schema as schemaname, - table_name as tablename - from - pg_catalog.svv_tables - where - table_type = 'BASE TABLE' - and table_schema not in ('information_schema', 'pg_catalog', 'pg_internal') - union - select - distinct schemaname, - tablename - from - svv_external_tables - union - SELECT - n.nspname AS schemaname - ,c.relname AS tablename - FROM - pg_catalog.pg_class AS c - INNER JOIN - pg_catalog.pg_namespace AS n - ON c.relnamespace = n.oid - WHERE relkind = 'v' - and - n.nspname not in ('pg_catalog', 'information_schema') - - """ - db_name = self.get_db_name() - all_tables_set = set() - - engine = self.get_metadata_engine() - for db_row in engine.execute(all_tables_query): - all_tables_set.add( - f'{db_name}.{db_row["schemaname"]}.{db_row["tablename"]}' - ) - - return all_tables_set - - def _get_sources_from_query(self, db_name: str, query: str) -> List[LineageDataset]: - sources = list() - - parser = LineageRunner(query) - - for table in parser.source_tables: - source_schema, source_table = str(table).split(".") - if source_schema == "": - source_schema = str(self.config.default_schema) - - source = LineageDataset( - platform=LineageDatasetPlatform.REDSHIFT, - path=f"{db_name}.{source_schema}.{source_table}", - ) - sources.append(source) - - return sources - - def get_db_name(self, inspector: Optional[Inspector] = None) -> str: - db_name = self.config.database - db_alias = self.config.database_alias - if db_alias: - db_name = db_alias - assert db_name - return db_name - - def _get_s3_path(self, path: str) -> str: - if self.config.s3_lineage_config: - for path_spec in self.config.s3_lineage_config.path_specs: - if path_spec.allowed(path): - table_name, table_path = path_spec.extract_table_name_and_path(path) - return table_path - return path - - def _build_s3_path_from_row(self, db_row): - path = db_row["filename"].strip() - if urlparse(path).scheme != "s3": - raise ValueError( - f"Only s3 source supported with copy/unload. The source was: {path}" - ) - return strip_s3_prefix(self._get_s3_path(path)) - - def _populate_lineage_map( - self, query: str, lineage_type: LineageCollectorType - ) -> None: - """ - This method generate table level lineage based with the given query. - The query should return the following columns: target_schema, target_table, source_table, source_schema - source_table and source_schema can be omitted if the sql_field is set because then it assumes the source_table - and source_schema will be extracted from the sql_field by sql parsing. - - :param query: The query to run to extract lineage. - :type query: str - :param lineage_type: The way the lineage should be processed - :type lineage_type: LineageType - return: The method does not return with anything as it directly modify the self._lineage_map property. - :rtype: None - """ - assert self._lineage_map is not None - - if not self._all_tables_set: - self._all_tables_set = self._get_all_tables() - - engine = self.get_metadata_engine() - - db_name = self.get_db_name() - - try: - for db_row in engine.execute(query): - if lineage_type != LineageCollectorType.UNLOAD: - if not self.config.schema_pattern.allowed( - db_row["target_schema"] - ) or not self.config.table_pattern.allowed(db_row["target_table"]): - continue - - # Target - if lineage_type == LineageCollectorType.UNLOAD: - try: - target_platform = LineageDatasetPlatform.S3 - # Following call requires 'filename' key in db_row - target_path = self._build_s3_path_from_row(db_row) - except ValueError as e: - self.warn(logger, "non-s3-lineage", str(e)) - continue - else: - target_platform = LineageDatasetPlatform.REDSHIFT - target_path = ( - f'{db_name}.{db_row["target_schema"]}.{db_row["target_table"]}' - ) - - target = LineageItem( - dataset=LineageDataset(platform=target_platform, path=target_path), - upstreams=set(), - collector_type=lineage_type, - query_parser_failed_sqls=list(), - ) - - # Source - sources: List[LineageDataset] = list() - if lineage_type in { - lineage_type.QUERY_SQL_PARSER, - lineage_type.NON_BINDING_VIEW, - }: - try: - sources = self._get_sources_from_query( - db_name=db_name, query=db_row["ddl"] - ) - except Exception as e: - target.query_parser_failed_sqls.append(db_row["ddl"]) - self.warn( - logger, - "parsing-query", - f'Error parsing query {db_row["ddl"]} for getting lineage .' - f"\nError was {e}.", - ) - else: - if lineage_type == lineage_type.COPY: - try: - platform = LineageDatasetPlatform.S3 - # Following call requires 'filename' key in db_row - path = self._build_s3_path_from_row(db_row) - except ValueError as e: - self.warn(logger, "non-s3-lineage", str(e)) - continue - else: - platform = LineageDatasetPlatform.REDSHIFT - path = f'{db_name}.{db_row["source_schema"]}.{db_row["source_table"]}' - - sources = [ - LineageDataset( - platform=platform, - path=path, - ) - ] - - for source in sources: - # Filtering out tables which does not exist in Redshift - # It was deleted in the meantime or query parser did not capture well the table name - if ( - source.platform == LineageDatasetPlatform.REDSHIFT - and source.path not in self._all_tables_set - ): - self.warn( - logger, "missing-table", f"{source.path} missing table" - ) - continue - - target.upstreams.add(source) - - # Merging downstreams if dataset already exists and has downstreams - if target.dataset.path in self._lineage_map: - self._lineage_map[ - target.dataset.path - ].upstreams = self._lineage_map[ - target.dataset.path - ].upstreams.union( - target.upstreams - ) - - else: - self._lineage_map[target.dataset.path] = target - - logger.info( - f"Lineage[{target}]:{self._lineage_map[target.dataset.path]}" - ) - - except Exception as e: - self.warn(logger, f"extract-{lineage_type.name}", f"Error was {e}") - - def _populate_lineage(self) -> None: - stl_scan_based_lineage_query: str = """ - select - distinct cluster, - target_schema, - target_table, - username as username, - source_schema, - source_table - from - ( - select - distinct tbl as target_table_id, - sti.schema as target_schema, - sti.table as target_table, - sti.database as cluster, - query, - starttime - from - stl_insert - join SVV_TABLE_INFO sti on - sti.table_id = tbl - where starttime >= '{start_time}' - and starttime < '{end_time}' - and cluster = '{db_name}' - ) as target_tables - join ( ( - select - sui.usename as username, - ss.tbl as source_table_id, - sti.schema as source_schema, - sti.table as source_table, - scan_type, - sq.query as query - from - ( - select - distinct userid, - query, - tbl, - type as scan_type - from - stl_scan - ) ss - join SVV_TABLE_INFO sti on - sti.table_id = ss.tbl - left join stl_query sq on - ss.query = sq.query - left join svl_user_info sui on - sq.userid = sui.usesysid - where - sui.usename <> 'rdsdb') - ) as source_tables - using (query) - where - scan_type in (1, 2, 3) - order by cluster, target_schema, target_table, starttime asc - """.format( - # We need the original database name for filtering - db_name=self.config.database, - start_time=self.config.start_time.strftime(redshift_datetime_format), - end_time=self.config.end_time.strftime(redshift_datetime_format), - ) - view_lineage_query = """ - select - distinct - srcnsp.nspname as source_schema - , - srcobj.relname as source_table - , - tgtnsp.nspname as target_schema - , - tgtobj.relname as target_table - from - pg_catalog.pg_class as srcobj - inner join - pg_catalog.pg_depend as srcdep - on - srcobj.oid = srcdep.refobjid - inner join - pg_catalog.pg_depend as tgtdep - on - srcdep.objid = tgtdep.objid - join - pg_catalog.pg_class as tgtobj - on - tgtdep.refobjid = tgtobj.oid - and srcobj.oid <> tgtobj.oid - left outer join - pg_catalog.pg_namespace as srcnsp - on - srcobj.relnamespace = srcnsp.oid - left outer join - pg_catalog.pg_namespace tgtnsp - on - tgtobj.relnamespace = tgtnsp.oid - where - tgtdep.deptype = 'i' - --dependency_internal - and tgtobj.relkind = 'v' - --i=index, v=view, s=sequence - and tgtnsp.nspname not in ('pg_catalog', 'information_schema') - order by target_schema, target_table asc - """ - - list_late_binding_views_query = """ - SELECT - n.nspname AS target_schema - ,c.relname AS target_table - , COALESCE(pg_get_viewdef(c.oid, TRUE), '') AS ddl - FROM - pg_catalog.pg_class AS c - INNER JOIN - pg_catalog.pg_namespace AS n - ON c.relnamespace = n.oid - WHERE relkind = 'v' - and ddl ilike '%%with no schema binding%%' - and - n.nspname not in ('pg_catalog', 'information_schema') - """ - - list_insert_create_queries_sql = """ - select - distinct cluster, - target_schema, - target_table, - username, - querytxt as ddl - from - ( - select - distinct tbl as target_table_id, - sti.schema as target_schema, - sti.table as target_table, - sti.database as cluster, - sui.usename as username, - querytxt, - si.starttime as starttime - from - stl_insert as si - join SVV_TABLE_INFO sti on - sti.table_id = tbl - left join svl_user_info sui on - si.userid = sui.usesysid - left join stl_query sq on - si.query = sq.query - left join stl_load_commits slc on - slc.query = si.query - where - sui.usename <> 'rdsdb' - and sq.aborted = 0 - and slc.query IS NULL - and cluster = '{db_name}' - and si.starttime >= '{start_time}' - and si.starttime < '{end_time}' - ) as target_tables - order by cluster, target_schema, target_table, starttime asc - """.format( - # We need the original database name for filtering - db_name=self.config.database, - start_time=self.config.start_time.strftime(redshift_datetime_format), - end_time=self.config.end_time.strftime(redshift_datetime_format), - ) - - list_copy_commands_sql = """ - select - distinct - "schema" as target_schema, - "table" as target_table, - filename - from - stl_insert as si - join stl_load_commits as c on - si.query = c.query - join SVV_TABLE_INFO sti on - sti.table_id = tbl - where - database = '{db_name}' - and si.starttime >= '{start_time}' - and si.starttime < '{end_time}' - order by target_schema, target_table, starttime asc - """.format( - # We need the original database name for filtering - db_name=self.config.database, - start_time=self.config.start_time.strftime(redshift_datetime_format), - end_time=self.config.end_time.strftime(redshift_datetime_format), - ) - - list_unload_commands_sql = """ - select - distinct - sti.database as cluster, - sti.schema as source_schema, - sti."table" as source_table, - unl.path as filename - from - stl_unload_log unl - join stl_scan sc on - sc.query = unl.query and - sc.starttime >= '{start_time}' and - sc.endtime < '{end_time}' - join SVV_TABLE_INFO sti on - sti.table_id = sc.tbl - where - unl.start_time >= '{start_time}' and - unl.end_time < '{end_time}' and - sti.database = '{db_name}' - and sc.type in (1, 2, 3) - order by cluster, source_schema, source_table, filename, unl.start_time asc - """.format( - # We need the original database name for filtering - db_name=self.config.database, - start_time=self.config.start_time.strftime(redshift_datetime_format), - end_time=self.config.end_time.strftime(redshift_datetime_format), - ) - - if not self._lineage_map: - self._lineage_map = defaultdict() - - if self.config.table_lineage_mode == LineageMode.STL_SCAN_BASED: - # Populate table level lineage by getting upstream tables from stl_scan redshift table - self._populate_lineage_map( - query=stl_scan_based_lineage_query, - lineage_type=LineageCollectorType.QUERY_SCAN, - ) - elif self.config.table_lineage_mode == LineageMode.SQL_BASED: - # Populate table level lineage by parsing table creating sqls - self._populate_lineage_map( - query=list_insert_create_queries_sql, - lineage_type=LineageCollectorType.QUERY_SQL_PARSER, - ) - elif self.config.table_lineage_mode == LineageMode.MIXED: - # Populate table level lineage by parsing table creating sqls - self._populate_lineage_map( - query=list_insert_create_queries_sql, - lineage_type=LineageCollectorType.QUERY_SQL_PARSER, - ) - # Populate table level lineage by getting upstream tables from stl_scan redshift table - self._populate_lineage_map( - query=stl_scan_based_lineage_query, - lineage_type=LineageCollectorType.QUERY_SCAN, - ) - - if self.config.include_views: - # Populate table level lineage for views - self._populate_lineage_map( - query=view_lineage_query, lineage_type=LineageCollectorType.VIEW - ) - - # Populate table level lineage for late binding views - self._populate_lineage_map( - query=list_late_binding_views_query, - lineage_type=LineageCollectorType.NON_BINDING_VIEW, - ) - if self.config.include_copy_lineage: - self._populate_lineage_map( - query=list_copy_commands_sql, lineage_type=LineageCollectorType.COPY - ) - if self.config.include_unload_lineage: - self._populate_lineage_map( - query=list_unload_commands_sql, lineage_type=LineageCollectorType.UNLOAD - ) - - def get_lineage_mcp( - self, dataset_urn: str - ) -> Tuple[ - Optional[MetadataChangeProposalWrapper], Optional[DatasetPropertiesClass] - ]: - dataset_key = mce_builder.dataset_urn_to_key(dataset_urn) - if dataset_key is None: - return None, None - - if self._lineage_map is None: - logger.debug("Populating lineage") - self._populate_lineage() - assert self._lineage_map is not None - - upstream_lineage: List[UpstreamClass] = [] - custom_properties: Dict[str, str] = {} - - if dataset_key.name in self._lineage_map: - item = self._lineage_map[dataset_key.name] - if ( - self.config.capture_lineage_query_parser_failures - and item.query_parser_failed_sqls - ): - custom_properties["lineage_sql_parser_failed_queries"] = ",".join( - item.query_parser_failed_sqls - ) - for upstream in item.upstreams: - upstream_table = UpstreamClass( - dataset=builder.make_dataset_urn_with_platform_instance( - upstream.platform.value, - upstream.path, - platform_instance=self.config.platform_instance_map.get( - upstream.platform.value - ) - if self.config.platform_instance_map - else None, - env=self.config.env, - ), - type=item.dataset_lineage_type, - ) - upstream_lineage.append(upstream_table) - - dataset_params = dataset_key.name.split(".") - db_name = dataset_params[0] - schemaname = dataset_params[1] - tablename = dataset_params[2] - if db_name in self.catalog_metadata: - if schemaname in self.catalog_metadata[db_name]: - external_db_params = self.catalog_metadata[db_name][schemaname] - upstream_platform = self.eskind_to_platform[ - external_db_params["eskind"] - ] - catalog_upstream = UpstreamClass( - mce_builder.make_dataset_urn_with_platform_instance( - upstream_platform, - "{database}.{table}".format( - database=external_db_params["external_database"], - table=tablename, - ), - platform_instance=self.config.platform_instance_map.get( - upstream_platform - ) - if self.config.platform_instance_map - else None, - env=self.config.env, - ), - DatasetLineageTypeClass.COPY, - ) - upstream_lineage.append(catalog_upstream) - - properties = None - if custom_properties: - properties = DatasetPropertiesClass(customProperties=custom_properties) - - if upstream_lineage: - self.report.upstream_lineage[dataset_urn] = [ - u.dataset for u in upstream_lineage - ] - else: - return None, properties - - mcp = MetadataChangeProposalWrapper( - entityType="dataset", - changeType=ChangeTypeClass.UPSERT, - entityUrn=dataset_urn, - aspectName="upstreamLineage", - aspect=UpstreamLineage(upstreams=upstream_lineage), - ) - - return mcp, properties diff --git a/metadata-ingestion/src/datahub/ingestion/source/usage/redshift_usage.py b/metadata-ingestion/src/datahub/ingestion/source/usage/redshift_usage.py deleted file mode 100644 index 691eaa8211054..0000000000000 --- a/metadata-ingestion/src/datahub/ingestion/source/usage/redshift_usage.py +++ /dev/null @@ -1,397 +0,0 @@ -import collections -import dataclasses -import logging -import time -from datetime import datetime -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set - -from pydantic.fields import Field -from pydantic.main import BaseModel -from sqlalchemy import create_engine -from sqlalchemy.engine import Engine - -import datahub.emitter.mce_builder as builder -from datahub.configuration.source_common import EnvConfigMixin -from datahub.configuration.time_window_config import get_time_bucket -from datahub.emitter.mcp import MetadataChangeProposalWrapper -from datahub.ingestion.api.common import PipelineContext -from datahub.ingestion.api.decorators import ( - SourceCapability, - SupportStatus, - capability, - config_class, - platform_name, - support_status, -) -from datahub.ingestion.api.source import Source, SourceReport -from datahub.ingestion.api.workunit import MetadataWorkUnit -from datahub.ingestion.source.sql.redshift import RedshiftConfig -from datahub.ingestion.source.usage.usage_common import ( - BaseUsageConfig, - GenericAggregatedDataset, -) -from datahub.metadata.schema_classes import OperationClass, OperationTypeClass - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - try: - from sqlalchemy.engine import Row # type: ignore - except ImportError: - # See https://github.com/python/mypy/issues/1153. - from sqlalchemy.engine.result import RowProxy as Row # type: ignore - -REDSHIFT_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" - - -# Add this join to the sql query for more metrics on completed queries -# LEFT JOIN svl_query_metrics_summary sqms ON ss.query = sqms.query -# Reference: https://docs.aws.amazon.com/redshift/latest/dg/r_SVL_QUERY_METRICS_SUMMARY.html - -# this sql query joins stl_scan over table info, -# querytext, and user info to get usage stats -# using non-LEFT joins here to limit the results to -# queries run by the user on user-defined tables. -REDSHIFT_USAGE_QUERY_TEMPLATE: str = """ -SELECT DISTINCT ss.userid as userid, - ss.query as query, - sui.usename as username, - ss.tbl as tbl, - sq.querytxt as querytxt, - sti.database as database, - sti.schema as schema, - sti.table as table, - sq.starttime as starttime, - sq.endtime as endtime -FROM stl_scan ss - JOIN svv_table_info sti ON ss.tbl = sti.table_id - JOIN stl_query sq ON ss.query = sq.query - JOIN svl_user_info sui ON sq.userid = sui.usesysid -WHERE ss.starttime >= '{start_time}' -AND ss.starttime < '{end_time}' -AND sti.database = '{database}' -AND sq.aborted = 0 -ORDER BY ss.endtime DESC; -""".strip() - -REDSHIFT_OPERATION_ASPECT_QUERY_TEMPLATE: str = """ - (SELECT - DISTINCT si.userid AS userid, - si.query AS query, - si.rows AS rows, - sui.usename AS username, - si.tbl AS tbl, - sq.querytxt AS querytxt, - sti.database AS database, - sti.schema AS schema, - sti.table AS table, - sq.starttime AS starttime, - sq.endtime AS endtime, - 'insert' AS operation_type - FROM - stl_insert si - JOIN svv_table_info sti ON si.tbl = sti.table_id - JOIN stl_query sq ON si.query = sq.query - JOIN svl_user_info sui ON sq.userid = sui.usesysid - WHERE - si.starttime >= '{start_time}' - AND si.starttime < '{end_time}' - AND si.rows > 0 - AND sq.aborted = 0) -UNION - (SELECT - DISTINCT sd.userid AS userid, - sd.query AS query, - sd.rows AS ROWS, - sui.usename AS username, - sd.tbl AS tbl, - sq.querytxt AS querytxt, - sti.database AS database, - sti.schema AS schema, - sti.table AS table, - sq.starttime AS starttime, - sq.endtime AS endtime, - 'delete' AS operation_type - FROM - stl_delete sd - JOIN svv_table_info sti ON sd.tbl = sti.table_id - JOIN stl_query sq ON sd.query = sq.query - JOIN svl_user_info sui ON sq.userid = sui.usesysid - WHERE - sd.starttime >= '{start_time}' - AND sd.starttime < '{end_time}' - AND sd.rows > 0 - AND sq.aborted = 0) -ORDER BY - endtime DESC -""".strip() - -RedshiftTableRef = str -AggregatedDataset = GenericAggregatedDataset[RedshiftTableRef] -AggregatedAccessEvents = Dict[datetime, Dict[RedshiftTableRef, AggregatedDataset]] - - -class RedshiftAccessEvent(BaseModel): - userid: int - username: str - query: int - tbl: int - text: Optional[str] = Field(None, alias="querytxt") - database: str - schema_: str = Field(alias="schema") - table: str - operation_type: Optional[str] = None - starttime: datetime - endtime: datetime - - -class RedshiftUsageConfig(RedshiftConfig, BaseUsageConfig, EnvConfigMixin): - email_domain: str = Field( - description="Email domain of your organisation so users can be displayed on UI appropriately." - ) - options: Dict = Field( - default={}, - description="Any options specified here will be passed to SQLAlchemy's create_engine as kwargs." - "See https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine for details.", - ) - - def get_sql_alchemy_url(self): - return super().get_sql_alchemy_url() - - -@dataclasses.dataclass -class RedshiftUsageSourceReport(SourceReport): - filtered: Set[str] = dataclasses.field(default_factory=set) - num_usage_workunits_emitted: Optional[int] = None - num_operational_stats_workunits_emitted: Optional[int] = None - - def report_dropped(self, key: str) -> None: - self.filtered.add(key) - - -@platform_name("Redshift") -@config_class(RedshiftUsageConfig) -@support_status(SupportStatus.CERTIFIED) -@capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default") -class RedshiftUsageSource(Source): - """ - This plugin extracts usage statistics for datasets in Amazon Redshift. - - Note: Usage information is computed by querying the following system tables - - 1. stl_scan - 2. svv_table_info - 3. stl_query - 4. svl_user_info - - To grant access this plugin for all system tables, please alter your datahub Redshift user the following way: - ```sql - ALTER USER datahub_user WITH SYSLOG ACCESS UNRESTRICTED; - ``` - This plugin has the below functionalities - - 1. For a specific dataset this plugin ingests the following statistics - - 1. top n queries. - 2. top users. - 2. Aggregation of these statistics into buckets, by day or hour granularity. - - :::note - - This source only does usage statistics. To get the tables, views, and schemas in your Redshift warehouse, ingest using the `redshift` source described above. - - ::: - - :::note - - Redshift system tables have some latency in getting data from queries. In addition, these tables only maintain logs for 2-5 days. You can find more information from the official documentation [here](https://aws.amazon.com/premiumsupport/knowledge-center/logs-redshift-database-cluster/). - - ::: - - """ - - def __init__(self, config: RedshiftUsageConfig, ctx: PipelineContext): - super().__init__(ctx) - self.config: RedshiftUsageConfig = config - self.report: RedshiftUsageSourceReport = RedshiftUsageSourceReport() - - @classmethod - def create(cls, config_dict: Dict, ctx: PipelineContext) -> "RedshiftUsageSource": - config = RedshiftUsageConfig.parse_obj(config_dict) - return cls(config, ctx) - - def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: - """Gets Redshift usage stats as work units""" - engine: Engine = self._make_sql_engine() - if self.config.include_operational_stats: - # Generate operation aspect workunits - yield from self._gen_operation_aspect_workunits(engine) - - # Generate aggregate events - query: str = REDSHIFT_USAGE_QUERY_TEMPLATE.format( - start_time=self.config.start_time.strftime(REDSHIFT_DATETIME_FORMAT), - end_time=self.config.end_time.strftime(REDSHIFT_DATETIME_FORMAT), - database=self.config.database, - ) - access_events_iterable: Iterable[ - RedshiftAccessEvent - ] = self._gen_access_events_from_history_query(query, engine) - - aggregated_events: AggregatedAccessEvents = self._aggregate_access_events( - access_events_iterable - ) - # Generate usage workunits from aggregated events. - self.report.num_usage_workunits_emitted = 0 - for time_bucket in aggregated_events.values(): - for aggregate in time_bucket.values(): - yield self._make_usage_stat(aggregate) - self.report.num_usage_workunits_emitted += 1 - - def _gen_operation_aspect_workunits( - self, engine: Engine - ) -> Iterable[MetadataWorkUnit]: - # Generate access events - query: str = REDSHIFT_OPERATION_ASPECT_QUERY_TEMPLATE.format( - start_time=self.config.start_time.strftime(REDSHIFT_DATETIME_FORMAT), - end_time=self.config.end_time.strftime(REDSHIFT_DATETIME_FORMAT), - ) - access_events_iterable: Iterable[ - RedshiftAccessEvent - ] = self._gen_access_events_from_history_query(query, engine) - - # Generate operation aspect work units from the access events - yield from self._gen_operation_aspect_workunits_from_access_events( - access_events_iterable - ) - - def _make_sql_engine(self) -> Engine: - url: str = self.config.get_sql_alchemy_url() - logger.debug(f"sql_alchemy_url = {url}") - return create_engine(url, **self.config.options) - - def _should_process_row(self, row: "Row") -> bool: - # Check for mandatory proerties being present first. - missing_props: List[str] = [ - prop - for prop in ["database", "schema", "table", "username"] - if not row[prop] - ] - if missing_props: - logging.info( - f"Access event parameter(s):[{','.join(missing_props)}] missing. Skipping ...." - ) - return False - # Check schema/table allow/deny patterns - full_table_name: str = f"{row['database']}.{row['schema']}.{row['table']}" - if not self.config.schema_pattern.allowed(row["schema"]): - logger.debug(f"Filtering out {full_table_name} due to schema_pattern.") - self.report.report_dropped(full_table_name) - return False - if not self.config.table_pattern.allowed(full_table_name): - logger.debug(f"Filtering out {full_table_name} due to table_pattern.") - self.report.report_dropped(full_table_name) - return False - # Passed all checks. - return True - - def _gen_access_events_from_history_query( - self, query: str, engine: Engine - ) -> Iterable[RedshiftAccessEvent]: - results = engine.execute(query) - for row in results: - if not self._should_process_row(row): - continue - row = row._asdict() - access_event = RedshiftAccessEvent(**dict(row.items())) - # Replace database name with the alias name if one is provided in the config. - if self.config.database_alias: - access_event.database = self.config.database_alias - yield access_event - - def _gen_operation_aspect_workunits_from_access_events( - self, - events_iterable: Iterable[RedshiftAccessEvent], - ) -> Iterable[MetadataWorkUnit]: - self.report.num_operational_stats_workunits_emitted = 0 - for event in events_iterable: - if not ( - event.database - and event.username - and event.schema_ - and event.table - and event.endtime - and event.operation_type - ): - continue - - assert event.operation_type in ["insert", "delete"] - - resource: str = f"{event.database}.{event.schema_}.{event.table}" - reported_time: int = int(time.time() * 1000) - last_updated_timestamp: int = int(event.endtime.timestamp() * 1000) - user_email: str = event.username - operation_aspect = OperationClass( - timestampMillis=reported_time, - lastUpdatedTimestamp=last_updated_timestamp, - actor=builder.make_user_urn(user_email.split("@")[0]), - operationType=( - OperationTypeClass.INSERT - if event.operation_type == "insert" - else OperationTypeClass.DELETE - ), - ) - yield MetadataChangeProposalWrapper( - entityUrn=builder.make_dataset_urn_with_platform_instance( - "redshift", - resource.lower(), - self.config.platform_instance, - self.config.env, - ), - aspect=operation_aspect, - ).as_workunit() - self.report.num_operational_stats_workunits_emitted += 1 - - def _aggregate_access_events( - self, events_iterable: Iterable[RedshiftAccessEvent] - ) -> AggregatedAccessEvents: - datasets: AggregatedAccessEvents = collections.defaultdict(dict) - for event in events_iterable: - floored_ts: datetime = get_time_bucket( - event.starttime, self.config.bucket_duration - ) - resource: str = f"{event.database}.{event.schema_}.{event.table}" - # Get a reference to the bucket value(or initialize not yet in dict) and update it. - agg_bucket: AggregatedDataset = datasets[floored_ts].setdefault( - resource, - AggregatedDataset( - bucket_start_time=floored_ts, - resource=resource, - ), - ) - # current limitation in user stats UI, we need to provide email to show users - user_email: str = f"{event.username if event.username else 'unknown'}" - if "@" not in user_email: - user_email += f"@{self.config.email_domain}" - logger.info(f"user_email: {user_email}") - agg_bucket.add_read_entry( - user_email, - event.text, - [], # TODO: not currently supported by redshift; find column level changes - user_email_pattern=self.config.user_email_pattern, - ) - return datasets - - def _make_usage_stat(self, agg: AggregatedDataset) -> MetadataWorkUnit: - return agg.make_usage_workunit( - self.config.bucket_duration, - lambda resource: builder.make_dataset_urn_with_platform_instance( - "redshift", - resource.lower(), - self.config.platform_instance, - self.config.env, - ), - self.config.top_n_queries, - self.config.format_sql_queries, - self.config.include_top_n_queries, - self.config.queries_character_limit, - ) - - def get_report(self) -> RedshiftUsageSourceReport: - return self.report From 417ffb12d8093a809ef11e9362786b91b6a9f1f6 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 16 Nov 2023 13:34:18 -0500 Subject: [PATCH 118/792] feat(ingest): support advanced configs for aws (#9237) --- .../ingestion/source/aws/aws_common.py | 29 +++++++++++++++---- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py b/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py index d61975694f541..0fb211a5d7b16 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py @@ -1,8 +1,8 @@ -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union import boto3 from boto3.session import Session -from botocore.config import Config +from botocore.config import DEFAULT_TIMEOUT, Config from botocore.utils import fix_s3_host from pydantic.fields import Field @@ -104,6 +104,16 @@ class AwsConnectionConfig(ConfigModel): description="A set of proxy configs to use with AWS. See the [botocore.config](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) docs for details.", ) + read_timeout: float = Field( + default=DEFAULT_TIMEOUT, + description="The timeout for reading from the connection (in seconds).", + ) + + aws_advanced_config: Dict[str, Any] = Field( + default_factory=dict, + description="Advanced AWS configuration options. These are passed directly to [botocore.config.Config](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html).", + ) + def _normalized_aws_roles(self) -> List[AwsAssumeRoleConfig]: if not self.aws_role: return [] @@ -167,13 +177,20 @@ def get_credentials(self) -> Dict[str, str]: } return {} + def _aws_config(self) -> Config: + return Config( + proxies=self.aws_proxy, + read_timeout=self.read_timeout, + **self.aws_advanced_config, + ) + def get_s3_client( self, verify_ssl: Optional[Union[bool, str]] = None ) -> "S3Client": return self.get_session().client( "s3", endpoint_url=self.aws_endpoint_url, - config=Config(proxies=self.aws_proxy), + config=self._aws_config(), verify=verify_ssl, ) @@ -183,7 +200,7 @@ def get_s3_resource( resource = self.get_session().resource( "s3", endpoint_url=self.aws_endpoint_url, - config=Config(proxies=self.aws_proxy), + config=self._aws_config(), verify=verify_ssl, ) # according to: https://stackoverflow.com/questions/32618216/override-s3-endpoint-using-boto3-configuration-file @@ -195,10 +212,10 @@ def get_s3_resource( return resource def get_glue_client(self) -> "GlueClient": - return self.get_session().client("glue") + return self.get_session().client("glue", config=self._aws_config()) def get_sagemaker_client(self) -> "SageMakerClient": - return self.get_session().client("sagemaker") + return self.get_session().client("sagemaker", config=self._aws_config()) class AwsSourceConfig(EnvConfigMixin, AwsConnectionConfig): From d0fa5de9f051a77c729f19f2e2df74f8581ccefd Mon Sep 17 00:00:00 2001 From: Adriano Vega Llobell Date: Thu, 16 Nov 2023 19:58:49 +0100 Subject: [PATCH 119/792] fix(sql-parser): convert platform instance to lowercase when building table urns (#9181) --- .../src/datahub/utilities/sqlglot_lineage.py | 6 +++- .../unit/sql_parsing/test_schemaresolver.py | 33 +++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/test_schemaresolver.py diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index d1209f3ec7b75..efe2d26aae3d9 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -360,8 +360,12 @@ def get_urn_for_table(self, table: _TableName, lower: bool = False) -> str: table_name = ".".join( filter(None, [table.database, table.db_schema, table.table]) ) + + platform_instance = self.platform_instance + if lower: table_name = table_name.lower() + platform_instance = platform_instance.lower() if platform_instance else None if self.platform == "bigquery": # Normalize shard numbers and other BigQuery weirdness. @@ -372,7 +376,7 @@ def get_urn_for_table(self, table: _TableName, lower: bool = False) -> str: urn = make_dataset_urn_with_platform_instance( platform=self.platform, - platform_instance=self.platform_instance, + platform_instance=platform_instance, env=self.env, name=table_name, ) diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_schemaresolver.py b/metadata-ingestion/tests/unit/sql_parsing/test_schemaresolver.py new file mode 100644 index 0000000000000..3fd5d72b4d41a --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/test_schemaresolver.py @@ -0,0 +1,33 @@ +from datahub.utilities.sqlglot_lineage import SchemaResolver, _TableName + + +def test_get_urn_for_table_lowercase(): + schema_resolver = SchemaResolver( + platform="mssql", + platform_instance="Uppercased-Instance", + env="PROD", + graph=None, + ) + + table = _TableName(database="Database", db_schema="DataSet", table="Table") + + assert ( + schema_resolver.get_urn_for_table(table=table, lower=True) + == "urn:li:dataset:(urn:li:dataPlatform:mssql,uppercased-instance.database.dataset.table,PROD)" + ) + + +def test_get_urn_for_table_not_lower_should_keep_capital_letters(): + schema_resolver = SchemaResolver( + platform="mssql", + platform_instance="Uppercased-Instance", + env="PROD", + graph=None, + ) + + table = _TableName(database="Database", db_schema="DataSet", table="Table") + + assert ( + schema_resolver.get_urn_for_table(table=table, lower=False) + == "urn:li:dataset:(urn:li:dataPlatform:mssql,Uppercased-Instance.Database.DataSet.Table,PROD)" + ) From 5cb04981b26a7a5045fe43ee074c2a99f30c015f Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 16 Nov 2023 17:18:17 -0500 Subject: [PATCH 120/792] test(ingest/unity): Update goldens (#9254) --- .../unity/unity_catalog_mces_golden.json | 499 ++++++++++++------ 1 file changed, 337 insertions(+), 162 deletions(-) diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 0c14096345d7e..2e92215d70b99 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -8,6 +8,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data", @@ -16,7 +17,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +33,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +49,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +67,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -87,7 +92,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -102,7 +108,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -114,6 +121,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "main" }, @@ -124,7 +132,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -139,7 +148,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -154,7 +164,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -171,7 +182,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -195,7 +207,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -210,7 +223,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -230,7 +244,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -242,6 +257,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "main", "unity_schema": "default" @@ -253,7 +269,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -268,7 +285,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -283,7 +301,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -300,7 +319,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -324,7 +344,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -339,7 +360,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -363,7 +385,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -378,7 +401,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -420,7 +444,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -437,7 +462,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -494,7 +520,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -516,7 +543,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -540,7 +568,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -568,7 +597,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -580,6 +610,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "main", "unity_schema": "information_schema" @@ -591,7 +622,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -606,7 +638,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -621,7 +654,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -638,7 +672,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -662,7 +697,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -677,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -701,7 +738,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -716,7 +754,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -758,7 +797,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -775,7 +815,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,7 +873,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -854,7 +896,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -878,7 +921,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -906,7 +950,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -918,6 +963,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "main", "unity_schema": "quickstart_schema" @@ -929,7 +975,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -944,7 +991,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -959,7 +1007,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -976,7 +1025,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1000,7 +1050,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1015,7 +1066,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1039,7 +1091,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1054,7 +1107,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1096,7 +1150,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1113,7 +1168,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1170,7 +1226,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1192,7 +1249,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1216,7 +1274,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1244,7 +1303,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1256,6 +1316,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "quickstart_catalog" }, @@ -1266,7 +1327,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1281,7 +1343,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1296,7 +1359,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1313,7 +1377,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1337,7 +1402,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1352,7 +1418,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1372,7 +1439,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1384,6 +1452,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "quickstart_catalog", "unity_schema": "default" @@ -1395,7 +1464,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1410,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1425,7 +1496,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1442,7 +1514,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1466,7 +1539,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1481,7 +1555,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1505,7 +1580,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1520,7 +1596,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1562,7 +1639,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1579,7 +1657,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1636,7 +1715,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1658,7 +1738,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1682,7 +1763,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1710,7 +1792,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1722,6 +1805,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "quickstart_catalog", "unity_schema": "information_schema" @@ -1733,7 +1817,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1748,7 +1833,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1763,7 +1849,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1780,7 +1867,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1804,7 +1892,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1819,7 +1908,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1843,7 +1933,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1858,7 +1949,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1900,7 +1992,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1917,7 +2010,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1974,7 +2068,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1996,7 +2091,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2020,7 +2116,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2048,7 +2145,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2060,6 +2158,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "quickstart_catalog", "unity_schema": "quickstart_schema" @@ -2071,7 +2170,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2086,7 +2186,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2101,7 +2202,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2118,7 +2220,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2142,7 +2245,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2157,7 +2261,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2181,7 +2286,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2196,7 +2302,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2238,7 +2345,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2255,7 +2363,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2312,7 +2421,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2334,7 +2444,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2358,7 +2469,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2386,7 +2498,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2398,6 +2511,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "system" }, @@ -2408,7 +2522,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2423,7 +2538,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2438,7 +2554,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2455,7 +2572,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2479,7 +2597,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2494,7 +2613,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2514,7 +2634,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2526,6 +2647,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "system", "unity_schema": "default" @@ -2537,7 +2659,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2552,7 +2675,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2567,7 +2691,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2584,7 +2709,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2608,7 +2734,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2623,7 +2750,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2647,7 +2775,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2662,7 +2791,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2704,7 +2834,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2721,7 +2852,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2778,7 +2910,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2800,7 +2933,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2824,7 +2958,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2852,7 +2987,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2864,6 +3000,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "system", "unity_schema": "information_schema" @@ -2875,7 +3012,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2890,7 +3028,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2905,7 +3044,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2922,7 +3062,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2946,7 +3087,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2961,7 +3103,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2985,7 +3128,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3000,7 +3144,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3042,7 +3187,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3059,7 +3205,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3116,7 +3263,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3138,7 +3286,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3162,7 +3311,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3190,7 +3340,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3202,6 +3353,7 @@ "json": { "customProperties": { "platform": "databricks", + "env": "PROD", "metastore": "acryl metastore", "catalog": "system", "unity_schema": "quickstart_schema" @@ -3213,7 +3365,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3228,7 +3381,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3243,7 +3397,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3260,7 +3415,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3284,7 +3440,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3299,7 +3456,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3323,7 +3481,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3338,7 +3497,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3380,7 +3540,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3397,7 +3558,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3454,7 +3616,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3476,7 +3639,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3500,7 +3664,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3528,7 +3693,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3543,7 +3709,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3558,7 +3725,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3573,7 +3741,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3588,7 +3757,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3603,7 +3773,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3618,7 +3789,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3633,7 +3805,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3648,7 +3821,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3663,7 +3837,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "unity-catalog-test" + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file From b778def10aa53cf7852206078834a2cd4499c181 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 16 Nov 2023 17:19:25 -0500 Subject: [PATCH 121/792] build(ingest/hive): Update thrift pin (#8964) Co-authored-by: Harshal Sheth --- metadata-ingestion/setup.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 04ae03cd440ac..2b002164a49b9 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -213,11 +213,14 @@ # - 0.6.12 adds support for Spark Thrift Server # - 0.6.13 adds a small fix for Databricks # - 0.6.14 uses pure-sasl instead of sasl so it builds on Python 3.11 - "acryl-pyhive[hive_pure_sasl]==0.6.14", + # - 0.6.15 adds support for thrift > 0.14 (cherry-picked from https://github.com/apache/thrift/pull/2491) + "acryl-pyhive[hive_pure_sasl]==0.6.15", # As per https://github.com/datahub-project/datahub/issues/8405 - # and https://github.com/dropbox/PyHive/issues/417, new versions - # of thrift break PyHive's hive+http transport. - "thrift<0.14.0", + # and https://github.com/dropbox/PyHive/issues/417, version 0.14.0 + # of thrift broke PyHive's hive+http transport. + # Fixed by https://github.com/apache/thrift/pull/2491 in version 0.17.0 + # which is unfortunately not on PyPi. + # Instead, we put the fix in our PyHive fork, so no thrift pin is needed. } microsoft_common = {"msal==1.22.0"} From c348f841c6151f85bcc1f299e95cc91a8415c750 Mon Sep 17 00:00:00 2001 From: "Jia (Jason) Teoh" Date: Thu, 16 Nov 2023 16:05:56 -0800 Subject: [PATCH 122/792] docs(airflow): update plugin setup docs to include UI setup approach (#9253) --- docs/lineage/airflow.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 3a13aefa834a4..32da518d6c04c 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -37,12 +37,24 @@ pip install 'acryl-datahub-airflow-plugin[plugin-v2]' ### Configuration -Set up a DataHub connection in Airflow. +Set up a DataHub connection in Airflow, either via command line or the Airflow UI. + +#### Command Line ```shell airflow connections add --conn-type 'datahub-rest' 'datahub_rest_default' --conn-host 'http://datahub-gms:8080' --conn-password '' ``` +#### Airflow UI + +On the Airflow UI, go to Admin -> Connections and click the "+" symbol to create a new connection. Select "DataHub REST Server" from the dropdown for "Connection Type" and enter the appropriate values. + +

+ +

+ +#### Optional Configurations + No additional configuration is required to use the plugin. However, there are some optional configuration parameters that can be set in the `airflow.cfg` file. ```ini title="airflow.cfg" From b03515fbc7ececfadbbee8f93c19c577298bffe4 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 16 Nov 2023 18:39:36 -0600 Subject: [PATCH 123/792] feat(usageclient): updates for usageclient (#9255) --- build.gradle | 1 + .../src/main/resources/application.yml | 3 +- .../gms/factory/usage/UsageClientFactory.java | 14 +- metadata-service/restli-client/build.gradle | 1 + .../linkedin/common/client/ClientCache.java | 8 +- .../entity/client/EntityClientCache.java | 6 +- .../java/com/linkedin/usage/UsageClient.java | 11 +- .../com/linkedin/usage/UsageClientCache.java | 4 +- .../resources/usage/UsageStats.java.latest | 316 ------------------ .../restli/DefaultRestliClientFactory.java | 18 +- 10 files changed, 47 insertions(+), 335 deletions(-) delete mode 100644 metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java.latest diff --git a/build.gradle b/build.gradle index 9eecb1696bb19..7c5deb4783943 100644 --- a/build.gradle +++ b/build.gradle @@ -82,6 +82,7 @@ project.ext.externalDependency = [ 'commonsLang': 'commons-lang:commons-lang:2.6', 'commonsText': 'org.apache.commons:commons-text:1.10.0', 'commonsCollections': 'commons-collections:commons-collections:3.2.2', + 'caffeine': 'com.github.ben-manes.caffeine:caffeine:3.1.8', 'datastaxOssNativeProtocol': 'com.datastax.oss:native-protocol:1.5.1', 'datastaxOssCore': 'com.datastax.oss:java-driver-core:4.14.1', 'datastaxOssQueryBuilder': 'com.datastax.oss:java-driver-query-builder:4.14.1', diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 40674e13e647f..571cb66c84aa8 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -332,7 +332,8 @@ entityClient: usageClient: retryInterval: ${USAGE_CLIENT_RETRY_INTERVAL:2} - numRetries: ${USAGE_CLIENT_NUM_RETRIES:3} + numRetries: ${USAGE_CLIENT_NUM_RETRIES:0} + timeoutMs: ${USAGE_CLIENT_TIMEOUT_MS:3000} cache: primary: diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java index e83cbc82d8067..d2bd89de8767a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java @@ -5,6 +5,7 @@ import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.restli.DefaultRestliClientFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; +import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.Client; import com.linkedin.usage.UsageClient; import org.springframework.beans.factory.annotation.Autowired; @@ -14,6 +15,9 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; +import java.util.HashMap; +import java.util.Map; + @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -34,16 +38,22 @@ public class UsageClientFactory { @Value("${usageClient.retryInterval:2}") private int retryInterval; - @Value("${usageClient.numRetries:3}") + @Value("${usageClient.numRetries:0}") private int numRetries; + @Value("${usageClient.timeoutMs:3000}") + private long timeoutMs; + @Autowired @Qualifier("configurationProvider") private ConfigurationProvider configurationProvider; @Bean("usageClient") public UsageClient getUsageClient(@Qualifier("systemAuthentication") final Authentication systemAuthentication) { - Client restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); + Map params = new HashMap<>(); + params.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, String.valueOf(timeoutMs)); + + Client restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); return new UsageClient(restClient, new ExponentialBackoff(retryInterval), numRetries, systemAuthentication, configurationProvider.getCache().getClient().getUsageClient()); } diff --git a/metadata-service/restli-client/build.gradle b/metadata-service/restli-client/build.gradle index b1b778b45c0b5..7cad1981ad911 100644 --- a/metadata-service/restli-client/build.gradle +++ b/metadata-service/restli-client/build.gradle @@ -9,6 +9,7 @@ dependencies { api project(':metadata-utils') implementation project(':metadata-service:configuration') + implementation externalDependency.caffeine implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java index 8aa0984be57b9..79d473d1b0090 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java @@ -14,8 +14,8 @@ import lombok.extern.slf4j.Slf4j; import org.checkerframework.checker.nullness.qual.Nullable; -import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; @@ -63,15 +63,15 @@ private ClientCache build() { public ClientCache build(Class metricClazz) { // loads data from entity client - CacheLoader loader = new CacheLoader<>() { + CacheLoader loader = new CacheLoader() { @Override public V load(@NonNull K key) { - return loadAll(List.of(key)).get(key); + return loadAll(Set.of(key)).get(key); } @Override @NonNull - public Map loadAll(@NonNull Iterable keys) { + public Map loadAll(@NonNull Set keys) { return loadFunction.apply(keys); } }; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java index 6006f3a9a87f6..8e103cff283ea 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java @@ -81,16 +81,14 @@ private EntityClientCacheBuilder cache(LoadingCache cache) public EntityClientCache build(Class metricClazz) { // estimate size Weigher weighByEstimatedSize = (key, value) -> - value.getValue().data().values().parallelStream() - .mapToInt(o -> o.toString().getBytes().length) - .sum(); + value.getValue().data().toString().getBytes().length; // batch loads data from entity client (restli or java) Function, Map> loader = (Iterable keys) -> { Map> keysByEntity = StreamSupport.stream(keys.spliterator(), true) .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())); - Map results = keysByEntity.entrySet().parallelStream() + Map results = keysByEntity.entrySet().stream() .flatMap(entry -> { Set urns = entry.getValue().stream() .map(Key::getUrn) diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java index d2b8499615e8d..850847bfd262a 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java @@ -9,6 +9,7 @@ import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.restli.client.Client; + import java.net.URISyntaxException; import javax.annotation.Nonnull; @@ -51,10 +52,12 @@ public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTi private UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range, @Nonnull Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final UsageStatsDoQueryRangeRequestBuilder requestBuilder = USAGE_STATS_REQUEST_BUILDERS.actionQueryRange() - .resourceParam(resource) - .durationParam(WindowDuration.DAY) - .rangeFromEndParam(range); + + final UsageStatsDoQueryRangeRequestBuilder requestBuilder = USAGE_STATS_REQUEST_BUILDERS + .actionQueryRange() + .resourceParam(resource) + .durationParam(WindowDuration.DAY) + .rangeFromEndParam(range); return sendClientRequest(requestBuilder, authentication).getEntity(); } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java index a04c1e90fb4a3..10a1ebb6dcccb 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java @@ -42,9 +42,7 @@ private UsageClientCacheBuilder cache(LoadingCache cache) public UsageClientCache build() { // estimate size Weigher weighByEstimatedSize = (key, value) -> - value.data().values().parallelStream() - .mapToInt(o -> o.toString().getBytes().length) - .sum(); + value.data().toString().getBytes().length; // batch loads data from usage client Function, Map> loader = (Iterable keys) -> diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java.latest b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java.latest deleted file mode 100644 index 91f74c12e6aad..0000000000000 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java.latest +++ /dev/null @@ -1,316 +0,0 @@ -package com.linkedin.metadata.resources.usage; - -import com.linkedin.common.WindowDuration; -import com.linkedin.common.urn.Urn; -import com.linkedin.data.template.SetMode; -import com.linkedin.data.template.StringArray; -import com.linkedin.data.template.StringArrayArray; -import com.linkedin.metadata.query.Condition; -import com.linkedin.metadata.query.Criterion; -import com.linkedin.metadata.query.CriterionArray; -import com.linkedin.metadata.query.Filter; -import com.linkedin.metadata.timeseries.elastic.ElasticSearchTimeseriesAspectService; -import com.linkedin.metadata.usage.UsageService; -import com.linkedin.timeseries.AggregationSpec; -import com.linkedin.timeseries.AggregationType; -import com.linkedin.timeseries.CalendarInterval; -import com.linkedin.timeseries.DateGroupingBucket; -import com.linkedin.timeseries.GenericTable; -import com.linkedin.metadata.restli.RestliUtils; -import com.linkedin.parseq.Task; -import com.linkedin.restli.server.annotations.Action; -import com.linkedin.restli.server.annotations.ActionParam; -import com.linkedin.restli.server.annotations.RestLiSimpleResource; -import com.linkedin.restli.server.resources.SimpleResourceTemplate; -import com.linkedin.timeseries.GroupingBucket; -import com.linkedin.timeseries.MetricAggregation; -import com.linkedin.timeseries.StringGroupingBucket; -import com.linkedin.usage.FieldUsageCounts; -import com.linkedin.usage.FieldUsageCountsArray; -import com.linkedin.usage.UsageAggregation; -import com.linkedin.usage.UsageAggregationArray; -import com.linkedin.usage.UsageAggregationMetrics; -import com.linkedin.usage.UsageQueryResult; -import com.linkedin.usage.UsageQueryResultAggregations; -import com.linkedin.usage.UsageTimeRange; -import com.linkedin.usage.UserUsageCounts; -import com.linkedin.usage.UserUsageCountsArray; -import com.linkedin.util.Pair; -import java.net.URISyntaxException; -import java.util.ArrayList; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.Nonnull; -import javax.inject.Inject; -import javax.inject.Named; -import java.time.Instant; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; - - -/** - * Rest.li entry point: /usageStats - */ -@RestLiSimpleResource(name = "usageStats", namespace = "com.linkedin.usage") -public class UsageStats extends SimpleResourceTemplate { - private static final String ACTION_BATCH_INGEST = "batchIngest"; - private static final String PARAM_BUCKETS = "buckets"; - - private static final String ACTION_QUERY = "query"; - private static final String PARAM_RESOURCE = "resource"; - private static final String PARAM_DURATION = "duration"; - private static final String PARAM_START_TIME = "startTime"; - private static final String PARAM_END_TIME = "endTime"; - private static final String PARAM_MAX_BUCKETS = "maxBuckets"; - - private static final String ACTION_QUERY_RANGE = "queryRange"; - private static final String PARAM_RANGE = "rangeFromEnd"; - private static final String USAGE_STATS_ENTITY_NAME = "dataset"; - private static final String USAGE_STATS_ASPECT_NAME = "datasetUsageStatistics"; - private static final String ES_FIELD_TIMESTAMP = "timestampMillis"; - private final Logger _logger = LoggerFactory.getLogger(UsageStats.class.getName()); - @Inject - @Named("usageService") - private UsageService _usageService; - @Inject - @Named("elasticSearchTimeseriesAspectService") - private ElasticSearchTimeseriesAspectService _elasticSearchTimeseriesAspectService; - - @Action(name = ACTION_BATCH_INGEST) - @Nonnull - public Task batchIngest(@ActionParam(PARAM_BUCKETS) @Nonnull UsageAggregation[] buckets) { - _logger.info("Ingesting {} usage stats aggregations", buckets.length); - return RestliUtils.toTask(() -> { - for (UsageAggregation agg : buckets) { - this.ingest(agg); - } - return null; - }); - } - - private CalendarInterval windowToInterval(@Nonnull WindowDuration duration) { - switch (duration) { - case HOUR: - return CalendarInterval.HOUR; - case DAY: - return CalendarInterval.DAY; - case WEEK: - return CalendarInterval.WEEK; - case MONTH: - return CalendarInterval.MONTH; - case YEAR: - return CalendarInterval.YEAR; - default: - throw new IllegalArgumentException("Unsupported duration value" + duration); - } - } - - private UsageAggregationArray getBuckets(@Nonnull String resource, @Nonnull WindowDuration duration, Long startTime, Long endTime) { - // Populate the filter - Filter filter = new Filter(); - ArrayList criteria = new ArrayList<>(); - Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(resource); - criteria.add(hasUrnCriterion); - if (startTime != null) { - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(startTime.toString()); - criteria.add(startTimeCriterion); - } - if (endTime != null) { - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(endTime.toString()); - criteria.add(endTimeCriterion); - } - filter.setCriteria(new CriterionArray(criteria)); - // Populate the aggregation specs - ArrayList aggregationSpecs = new ArrayList<>(); - aggregationSpecs.add(new AggregationSpec().setAggregationType(AggregationType.LATEST).setMemberName("uniqueUserCount")); - aggregationSpecs.add(new AggregationSpec().setAggregationType(AggregationType.LATEST).setMemberName("totalSqlQueries")); - aggregationSpecs.add(new AggregationSpec().setAggregationType(AggregationType.LATEST).setMemberName("topSqlQueries")); - /* - aggregationSpecs.add(new AggregationSpec().setAggregationType(AggregationType.SUM).setMemberName("totalSqlQueries")); - aggregationSpecs.add(new AggregationSpec().setAggregationType(AggregationType.SUM).setMemberName("userCounts.count")); - aggregationSpecs.add(new AggregationSpec().setAggregationType(AggregationType.SUM).setMemberName("fieldCounts.count")); - */ - - // Populate the Grouping buckets - ArrayList groupingBuckets = new ArrayList<>(); - // ts bucket - GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setDateGroupingBucket( - new DateGroupingBucket().setKey(ES_FIELD_TIMESTAMP).setGranularity(windowToInterval(duration))); - groupingBuckets.add(timestampBucket); - /* - // user counts bucket - GroupingBucket userGroupsBucket = new GroupingBucket(); - userGroupsBucket.setStringGroupingBucket( new StringGroupingBucket().setKey("userCounts.user") ); - groupingBuckets.add(userGroupsBucket); - // field counts bucket - GroupingBucket fieldCountGroupBucket = new GroupingBucket(); - fieldCountGroupBucket.setStringGroupingBucket(new StringGroupingBucket().setKey("fieldCounts.fieldName")); - groupingBuckets.add(fieldCountGroupBucket); - */ - - GenericTable result = - _elasticSearchTimeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, - (AggregationSpec[]) aggregationSpecs.toArray(), filter, (GroupingBucket[]) groupingBuckets.toArray()); - UsageAggregationArray buckets = new UsageAggregationArray(); - for(StringArray row: result.getRows()) { - UsageAggregation usageAggregation = new UsageAggregation(); - usageAggregation.setBucket(Long.valueOf(row.get(0))); - usageAggregation.setDuration(duration); - try { - usageAggregation.setResource(new Urn(resource)); - } catch (URISyntaxException e) { - throw new IllegalArgumentException("Invalid resource" + e); - } - UsageAggregationMetrics usageAggregationMetrics = new UsageAggregationMetrics(); - usageAggregationMetrics.setUniqueUserCount(Integer.valueOf(row.get(1))); - usageAggregationMetrics.setTotalSqlQueries(Integer.valueOf(row.get(2))); - //usageAggregationMetrics.setTopSqlQueries(row.get(3)); - usageAggregation.setMetrics(usageAggregationMetrics); - } - return buckets; - } - - private UsageQueryResultAggregations getAggregations(String resource, WindowDuration duration, Long startTime, Long endTime) { - // TODO: make the aggregation computation logic reusable - UsageQueryResultAggregations aggregations = new UsageQueryResultAggregations(); - - /* - // Compute aggregations for users and unique user count. - { - Map, Integer> userAgg = new HashMap<>(); - buckets.forEach((bucket) -> { - Optional.ofNullable(bucket.getMetrics().getUsers()).ifPresent(usersUsageCounts -> { - usersUsageCounts.forEach((userCount -> { - Pair key = new Pair<>(userCount.getUser(), userCount.getUserEmail()); - int count = userAgg.getOrDefault(key, 0); - count += userCount.getCount(); - userAgg.put(key, count); - })); - }); - }); - - if (!userAgg.isEmpty()) { - UserUsageCountsArray users = new UserUsageCountsArray(); - users.addAll(userAgg.entrySet() - .stream() - .map((mapping) -> new UserUsageCounts().setUser(mapping.getKey().getFirst(), SetMode.REMOVE_IF_NULL) - .setUserEmail(mapping.getKey().getSecond(), SetMode.REMOVE_IF_NULL) - .setCount(mapping.getValue())) - .collect(Collectors.toList())); - aggregations.setUsers(users); - aggregations.setUniqueUserCount(userAgg.size()); - } - } - - // Compute aggregation for total query count. - { - Integer totalQueryCount = null; - - for (UsageAggregation bucket : buckets) { - if (bucket.getMetrics().getTotalSqlQueries() != null) { - if (totalQueryCount == null) { - totalQueryCount = 0; - } - totalQueryCount += bucket.getMetrics().getTotalSqlQueries(); - } - } - - if (totalQueryCount != null) { - aggregations.setTotalSqlQueries(totalQueryCount); - } - } - - // Compute aggregations for field usage counts. - { - Map fieldAgg = new HashMap<>(); - buckets.forEach((bucket) -> { - Optional.ofNullable(bucket.getMetrics().getFields()).ifPresent(fieldUsageCounts -> { - fieldUsageCounts.forEach((fieldCount -> { - String key = fieldCount.getFieldName(); - int count = fieldAgg.getOrDefault(key, 0); - count += fieldCount.getCount(); - fieldAgg.put(key, count); - })); - }); - }); - - if (!fieldAgg.isEmpty()) { - FieldUsageCountsArray fields = new FieldUsageCountsArray(); - fields.addAll(fieldAgg.entrySet() - .stream() - .map((mapping) -> new FieldUsageCounts().setFieldName(mapping.getKey()).setCount(mapping.getValue())) - .collect(Collectors.toList())); - aggregations.setFields(fields); - } - } - */ - return aggregations; - } - - @Action(name = ACTION_QUERY) - @Nonnull - public Task query(@ActionParam(PARAM_RESOURCE) @Nonnull String resource, - @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, - @ActionParam(PARAM_START_TIME) @com.linkedin.restli.server.annotations.Optional Long startTime, - @ActionParam(PARAM_END_TIME) @com.linkedin.restli.server.annotations.Optional Long endTime, - @ActionParam(PARAM_MAX_BUCKETS) @com.linkedin.restli.server.annotations.Optional Integer maxBuckets) { - _logger.info("Attempting to query usage stats"); - return RestliUtils.toTask(() -> { - UsageAggregationArray buckets = getBuckets(resource, duration, startTime, endTime); - UsageQueryResultAggregations aggregations = getAggregations(resource, duration, startTime, endTime); - return new UsageQueryResult().setBuckets(buckets).setAggregations(aggregations); - }); - } - - - @Action(name = ACTION_QUERY_RANGE) - @Nonnull - public Task queryRange(@ActionParam(PARAM_RESOURCE) @Nonnull String resource, - @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, @ActionParam(PARAM_RANGE) UsageTimeRange range) { - final long now = Instant.now().toEpochMilli(); - return this.query(resource, duration, convertRangeToStartTime(range, now), now, null); - } - - private void ingest(@Nonnull UsageAggregation bucket) { - // TODO attempt to resolve users into emails - _usageService.upsertDocument(bucket); - } - - @Nonnull - Long convertRangeToStartTime(@Nonnull UsageTimeRange range, long currentEpochMillis) { - // TRICKY: since start_time must be before the bucket's start, we actually - // need to subtract extra from the current time to ensure that we get precisely - // what we're looking for. Note that start_time and end_time are both inclusive, - // so we must also do an off-by-one adjustment. - final long oneHourMillis = 60 * 60 * 1000; - final long oneDayMillis = 24 * oneHourMillis; - - if (range == UsageTimeRange.HOUR) { - return currentEpochMillis - (2 * oneHourMillis + 1); - } else if (range == UsageTimeRange.DAY) { - return currentEpochMillis - (2 * oneDayMillis + 1); - } else if (range == UsageTimeRange.WEEK) { - return currentEpochMillis - (8 * oneDayMillis + 1); - } else if (range == UsageTimeRange.MONTH) { - // Assuming month is last 30 days. - return currentEpochMillis - (31 * oneDayMillis + 1); - } else if (range == UsageTimeRange.QUARTER) { - // Assuming a quarter is 91 days. - return currentEpochMillis - (92 * oneDayMillis + 1); - } else if (range == UsageTimeRange.YEAR) { - return currentEpochMillis - (366 * oneDayMillis + 1); - } else if (range == UsageTimeRange.ALL) { - return 0L; - } else { - throw new IllegalArgumentException("invalid UsageTimeRange enum state: " + range.name()); - } - } -} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java b/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java index 436c7ae5d77b5..2d4e355a93e53 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java @@ -44,18 +44,34 @@ public static RestClient getRestLiD2Client(@Nonnull String restLiClientD2ZkHost, @Nonnull public static RestClient getRestLiClient(@Nonnull String restLiServerHost, int restLiServerPort, boolean useSSL, @Nullable String sslProtocol) { + return getRestLiClient(restLiServerHost, restLiServerPort, useSSL, sslProtocol, null); + } + + @Nonnull + public static RestClient getRestLiClient(@Nonnull String restLiServerHost, int restLiServerPort, boolean useSSL, + @Nullable String sslProtocol, @Nullable Map params) { return getRestLiClient( URI.create(String.format("%s://%s:%s", useSSL ? "https" : "http", restLiServerHost, restLiServerPort)), - sslProtocol); + sslProtocol, + params); } @Nonnull public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String sslProtocol) { + return getRestLiClient(gmsUri, sslProtocol, null); + } + + @Nonnull + public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String sslProtocol, + @Nullable Map inputParams) { if (StringUtils.isBlank(gmsUri.getHost()) || gmsUri.getPort() <= 0) { throw new InvalidParameterException("Invalid restli server host name or port!"); } Map params = new HashMap<>(); + if (inputParams != null) { + params.putAll(inputParams); + } if ("https".equals(gmsUri.getScheme())) { try { From bc89a1c48ef8b60e9b2a3bf07968ebc20a1a5caf Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 17 Nov 2023 10:06:26 -0600 Subject: [PATCH 124/792] fix(graphql): prevent duplicate index queries for dataproducts (#9260) --- .../resolvers/dataproduct/ListDataProductAssetsResolver.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index e727ebe185838..831d449bef9ef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -79,11 +79,11 @@ public CompletableFuture get(DataFetchingEnvironment environment) } // 2. Get list of entities that we should query based on filters or assets from aspect. - List entitiesToQuery = assetUrns.stream().map(Urn::getEntityType).collect(Collectors.toList()); + List entitiesToQuery = assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); final List inputEntityTypes = (input.getTypes() == null || input.getTypes().isEmpty()) ? ImmutableList.of() : input.getTypes(); - final List inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + final List inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).distinct().collect(Collectors.toList()); final List finalEntityNames = inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; From 2b0811b9875d7d7ea11fb01d0157a21fdd67f020 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 17 Nov 2023 10:50:13 -0600 Subject: [PATCH 125/792] logging(search): log level highlight value urn detection (#9262) --- .../com/linkedin/datahub/graphql/types/mappers/MapperUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 5ba32b0c2a77c..2a615b24eaac2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -70,7 +70,7 @@ public static List getMatchedFieldEntry(List Date: Fri, 17 Nov 2023 14:57:30 -0600 Subject: [PATCH 126/792] docs(development): Add Python version in Developer README (#9268) --- .gitignore | 1 + docs/developers.md | 1 + 2 files changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 49ab5c475096c..1fcca8751131f 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,7 @@ venv.bak/ dmypy.json MANIFEST *.pyc +.python-version # Generated files **/bin diff --git a/docs/developers.md b/docs/developers.md index c5aaa9e28ca87..52fd7d356a44c 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -6,6 +6,7 @@ title: "Local Development" ## Pre-requirements - [Java 11 SDK](https://openjdk.org/projects/jdk/11/) + - [Python 3.10] (https://www.python.org/downloads/release/python-3100/) - [Docker](https://www.docker.com/) - [Docker Compose](https://docs.docker.com/compose/) - Docker engine with at least 8GB of memory to run tests. From b7df9e09968b71ff6315184e618d43ecfd251308 Mon Sep 17 00:00:00 2001 From: noggi Date: Fri, 17 Nov 2023 16:53:19 -0800 Subject: [PATCH 127/792] Sync datahub-head on merge (#9267) --- .github/scripts/docker_helpers.sh | 6 +++--- .github/workflows/docker-unified.yml | 24 ++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/.github/scripts/docker_helpers.sh b/.github/scripts/docker_helpers.sh index f238c5c409184..334465532db06 100755 --- a/.github/scripts/docker_helpers.sh +++ b/.github/scripts/docker_helpers.sh @@ -12,15 +12,15 @@ export SHORT_SHA=$(get_short_sha) echo "SHORT_SHA: $SHORT_SHA" function get_tag { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}\,${SHORT_SHA},g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG},g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1,g'),${SHORT_SHA} } function get_tag_slim { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-slim\,${SHORT_SHA}-slim,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-slim,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g'),${SHORT_SHA}-slim } function get_tag_full { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-full\,${SHORT_SHA}-full,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-full,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g'),${SHORT_SHA}-full } function get_python_docker_release_v { diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 18cb946b951dd..35cd5363293f8 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -46,6 +46,9 @@ jobs: unique_full_tag: ${{ steps.tag.outputs.unique_full_tag }} publish: ${{ steps.publish.outputs.publish }} python_release_version: ${{ steps.tag.outputs.python_release_version }} + short_sha: ${{ steps.tag.outputs.short_sha }} + branch_name: ${{ steps.tag.outputs.branch_name }} + repository_name: ${{ steps.tag.outputs.repository_name }} steps: - name: Checkout uses: actions/checkout@v3 @@ -53,6 +56,7 @@ jobs: id: tag run: | source .github/scripts/docker_helpers.sh + echo "short_sha=${SHORT_SHA}" >> $GITHUB_OUTPUT echo "tag=$(get_tag)" >> $GITHUB_OUTPUT echo "slim_tag=$(get_tag_slim)" >> $GITHUB_OUTPUT echo "full_tag=$(get_tag_full)" >> $GITHUB_OUTPUT @@ -60,6 +64,8 @@ jobs: echo "unique_slim_tag=$(get_unique_tag_slim)" >> $GITHUB_OUTPUT echo "unique_full_tag=$(get_unique_tag_full)" >> $GITHUB_OUTPUT echo "python_release_version=$(get_python_docker_release_v)" >> $GITHUB_OUTPUT + echo "branch_name=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + echo "repository_name=${GITHUB_REPOSITORY#*/}" >> $GITHUB_OUTPUT - name: Check whether publishing enabled id: publish env: @@ -860,3 +866,21 @@ jobs: job-status: ${{ job.status }} slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} channel: github-activities + deploy_datahub_head: + name: Deploy to Datahub HEAD + runs-on: ubuntu-latest + needs: + [ + setup, + smoke_test + ] + steps: + - uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SQS_ACCESS_KEY }} + aws-region: us-west-2 + - uses: isbang/sqs-action@v0.2.0 + with: + sqs-url: ${{ secrets.DATAHUB_HEAD_SYNC_QUEUE }} + message: '{ "command": "git-sync", "args" : {"repoName": "${{ needs.setup.outputs.repository_name }}", "repoOrg": "${{ github.repository_owner }}", "repoBranch": "${{ needs.setup.outputs.branch_name }}", "repoShaShort": "${{ needs.setup.outputs.short_sha }}" }}' From a704290be419280618a6acc7c0cc440bc083c667 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Mon, 20 Nov 2023 10:06:21 +0530 Subject: [PATCH 128/792] PRD-742/fix:Settings tab should have 2 scrollable sections (#9218) --- .../src/app/entity/ownership/ManageOwnership.tsx | 9 ++++++++- datahub-web-react/src/app/entity/view/ManageViews.tsx | 9 ++++++++- .../src/app/identity/ManageIdentities.tsx | 9 ++++++++- .../src/app/identity/group/GroupList.tsx | 9 ++++++++- datahub-web-react/src/app/identity/user/UserList.tsx | 9 ++++++++- .../src/app/permissions/ManagePermissions.tsx | 7 ++++++- .../src/app/permissions/policy/ManagePolicies.tsx | 9 ++++++++- .../src/app/permissions/roles/ManageRoles.tsx | 9 ++++++++- datahub-web-react/src/app/settings/AccessTokens.tsx | 3 +++ datahub-web-react/src/app/settings/SettingsPage.tsx | 9 +++++++-- .../src/app/settings/posts/ManagePosts.tsx | 10 ++++++++-- .../src/app/settings/posts/PostsList.tsx | 6 +++++- datahub-web-react/src/app/shared/RoutedTabs.tsx | 11 +++++++++-- 13 files changed, 94 insertions(+), 15 deletions(-) diff --git a/datahub-web-react/src/app/entity/ownership/ManageOwnership.tsx b/datahub-web-react/src/app/entity/ownership/ManageOwnership.tsx index cff10a3d96b30..a3304ab015faa 100644 --- a/datahub-web-react/src/app/entity/ownership/ManageOwnership.tsx +++ b/datahub-web-react/src/app/entity/ownership/ManageOwnership.tsx @@ -6,6 +6,9 @@ import { OwnershipList } from './OwnershipList'; const PageContainer = styled.div` padding-top: 20px; width: 100%; + display: flex; + flex-direction: column; + overflow: auto; `; const PageHeaderContainer = styled.div` @@ -20,7 +23,11 @@ const PageTitle = styled(Typography.Title)` } `; -const ListContainer = styled.div``; +const ListContainer = styled.div` + display: flex; + flex-direction: column; + overflow: auto; +`; /** * Component used for displaying the 'Manage Ownership' experience. diff --git a/datahub-web-react/src/app/entity/view/ManageViews.tsx b/datahub-web-react/src/app/entity/view/ManageViews.tsx index b31d3869ab158..f32c479e00bea 100644 --- a/datahub-web-react/src/app/entity/view/ManageViews.tsx +++ b/datahub-web-react/src/app/entity/view/ManageViews.tsx @@ -6,6 +6,9 @@ import { ViewsList } from './ViewsList'; const PageContainer = styled.div` padding-top: 20px; width: 100%; + display: flex; + flex-direction: column; + overflow: auto; `; const PageHeaderContainer = styled.div` @@ -20,7 +23,11 @@ const PageTitle = styled(Typography.Title)` } `; -const ListContainer = styled.div``; +const ListContainer = styled.div` + display: flex; + flex-direction: column; + overflow: auto; +`; /** * Component used for displaying the 'Manage Views' experience. diff --git a/datahub-web-react/src/app/identity/ManageIdentities.tsx b/datahub-web-react/src/app/identity/ManageIdentities.tsx index 907687cec36bc..7aafe2676f3f6 100644 --- a/datahub-web-react/src/app/identity/ManageIdentities.tsx +++ b/datahub-web-react/src/app/identity/ManageIdentities.tsx @@ -8,6 +8,10 @@ import { UserList } from './user/UserList'; const PageContainer = styled.div` padding-top: 20px; width: 100%; + overflow: auto; + flex: 1; + display: flex; + flex-direction: column; `; const PageHeaderContainer = styled.div` @@ -23,11 +27,14 @@ const PageTitle = styled(Typography.Title)` `; const Content = styled.div` + display: flex; + flex-direction: column; + overflow: auto; &&& .ant-tabs-nav { margin: 0; } color: #262626; - height: calc(100vh - 60px); + // height: calc(100vh - 60px); &&& .ant-tabs > .ant-tabs-nav .ant-tabs-nav-wrap { padding-left: 28px; diff --git a/datahub-web-react/src/app/identity/group/GroupList.tsx b/datahub-web-react/src/app/identity/group/GroupList.tsx index 5ef77b4dfc8a8..788b9eccafc0a 100644 --- a/datahub-web-react/src/app/identity/group/GroupList.tsx +++ b/datahub-web-react/src/app/identity/group/GroupList.tsx @@ -17,9 +17,16 @@ import { GROUPS_CREATE_GROUP_ID, GROUPS_INTRO_ID } from '../../onboarding/config import { OnboardingTour } from '../../onboarding/OnboardingTour'; import { addGroupToListGroupsCache, DEFAULT_GROUP_LIST_PAGE_SIZE, removeGroupFromListGroupsCache } from './cacheUtils'; -const GroupContainer = styled.div``; +const GroupContainer = styled.div` + display: flex; + flex-direction: column; + overflow: auto; +`; const GroupStyledList = styled(List)` + display: flex; + flex-direction: column; + overflow: auto; &&& { width: 100%; border-color: ${(props) => props.theme.styles['border-color-base']}; diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index e50005b08377e..dce3aa2c68a8d 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -25,9 +25,16 @@ import { useUpdateEducationStepIdsAllowlist } from '../../onboarding/useUpdateEd import { DEFAULT_USER_LIST_PAGE_SIZE, removeUserFromListUsersCache } from './cacheUtils'; import { useUserContext } from '../../context/useUserContext'; -const UserContainer = styled.div``; +const UserContainer = styled.div` + display: flex; + flex-direction: column; + overflow: auto; +`; const UserStyledList = styled(List)` + display: flex; + flex-direction: column; + overflow: auto; &&& { width: 100%; border-color: ${(props) => props.theme.styles['border-color-base']}; diff --git a/datahub-web-react/src/app/permissions/ManagePermissions.tsx b/datahub-web-react/src/app/permissions/ManagePermissions.tsx index 7bf517e153fa8..737af3fdee3e4 100644 --- a/datahub-web-react/src/app/permissions/ManagePermissions.tsx +++ b/datahub-web-react/src/app/permissions/ManagePermissions.tsx @@ -8,6 +8,9 @@ import { ManageRoles } from './roles/ManageRoles'; const PageContainer = styled.div` padding-top: 20px; width: 100%; + display: flex; + flex-direction: column; + overflow: auto; `; const PageHeaderContainer = styled.div` @@ -27,7 +30,9 @@ const Content = styled.div` margin: 0; } color: #262626; - height: calc(100vh - 60px); + display: flex; + flex-direction: column; + overflow: auto; &&& .ant-tabs > .ant-tabs-nav .ant-tabs-nav-wrap { padding-left: 28px; diff --git a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx index 49b0ec922fd57..2f0c284fc4e8f 100644 --- a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx +++ b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx @@ -38,7 +38,11 @@ import analytics, { EventType } from '../../analytics'; import { POLICIES_CREATE_POLICY_ID, POLICIES_INTRO_ID } from '../../onboarding/config/PoliciesOnboardingConfig'; import { OnboardingTour } from '../../onboarding/OnboardingTour'; -const SourceContainer = styled.div``; +const SourceContainer = styled.div` + overflow: auto; + display: flex; + flex-direction: column; +`; const PaginationContainer = styled.div` display: flex; @@ -75,6 +79,9 @@ const EditPolicyButton = styled(Button)` const PageContainer = styled.span` width: 100%; + display: flex; + flex-direction: column; + overflow: auto; `; const DEFAULT_PAGE_SIZE = 10; diff --git a/datahub-web-react/src/app/permissions/roles/ManageRoles.tsx b/datahub-web-react/src/app/permissions/roles/ManageRoles.tsx index 011109e2eb915..4982be27fa421 100644 --- a/datahub-web-react/src/app/permissions/roles/ManageRoles.tsx +++ b/datahub-web-react/src/app/permissions/roles/ManageRoles.tsx @@ -21,7 +21,11 @@ import { OnboardingTour } from '../../onboarding/OnboardingTour'; import { ROLES_INTRO_ID } from '../../onboarding/config/RolesOnboardingConfig'; import { clearUserListCache } from '../../identity/user/cacheUtils'; -const SourceContainer = styled.div``; +const SourceContainer = styled.div` + overflow: auto; + display: flex; + flex-direction: column; +`; const PaginationContainer = styled.div` display: flex; @@ -35,6 +39,9 @@ const RoleName = styled.span` const PageContainer = styled.span` width: 100%; + display: flex; + flex-direction: column; + overflow: auto; `; const ActionsContainer = styled.div` diff --git a/datahub-web-react/src/app/settings/AccessTokens.tsx b/datahub-web-react/src/app/settings/AccessTokens.tsx index c7a015de392da..2c986f90aba48 100644 --- a/datahub-web-react/src/app/settings/AccessTokens.tsx +++ b/datahub-web-react/src/app/settings/AccessTokens.tsx @@ -21,6 +21,9 @@ const SourceContainer = styled.div` padding-top: 20px; padding-right: 40px; padding-left: 40px; + display: flex; + flex-direction: column; + overflow: auto; `; const TokensContainer = styled.div` diff --git a/datahub-web-react/src/app/settings/SettingsPage.tsx b/datahub-web-react/src/app/settings/SettingsPage.tsx index 06592656ac719..aba054a7fb94f 100644 --- a/datahub-web-react/src/app/settings/SettingsPage.tsx +++ b/datahub-web-react/src/app/settings/SettingsPage.tsx @@ -24,12 +24,15 @@ import ManagePosts from './posts/ManagePosts'; const PageContainer = styled.div` display: flex; + overflow: auto; `; const SettingsBarContainer = styled.div` padding-top: 20px; - min-height: 100vh; + max-height: 100vh; border-right: 1px solid ${ANTD_GRAY[5]}; + display: flex; + flex-direction: column; `; const SettingsBarHeader = styled.div` @@ -54,6 +57,8 @@ const ItemTitle = styled.span` margin-left: 8px; `; +const menuStyle = { width: 256, 'margin-top': 8, overflow: 'hidden auto' }; + /** * URL Paths for each settings page. */ @@ -108,7 +113,7 @@ export const SettingsPage = () => { { history.replace(`${url}/${newPath.key}`); diff --git a/datahub-web-react/src/app/settings/posts/ManagePosts.tsx b/datahub-web-react/src/app/settings/posts/ManagePosts.tsx index e0f694c192c62..0c9e9a7a595b5 100644 --- a/datahub-web-react/src/app/settings/posts/ManagePosts.tsx +++ b/datahub-web-react/src/app/settings/posts/ManagePosts.tsx @@ -6,7 +6,9 @@ import { PostList } from './PostsList'; const PageContainer = styled.div` padding-top: 20px; width: 100%; - height: 100%; + display: flex; + flex-direction: column; + overflow: auto; `; const PageHeaderContainer = styled.div` @@ -21,7 +23,11 @@ const PageTitle = styled(Typography.Title)` } `; -const ListContainer = styled.div``; +const ListContainer = styled.div` + display: flex; + flex-direction: column; + overflow: auto; +`; export default function ManagePosts() { return ( diff --git a/datahub-web-react/src/app/settings/posts/PostsList.tsx b/datahub-web-react/src/app/settings/posts/PostsList.tsx index 5ae2be1547f9b..849a3765a94b0 100644 --- a/datahub-web-react/src/app/settings/posts/PostsList.tsx +++ b/datahub-web-react/src/app/settings/posts/PostsList.tsx @@ -17,7 +17,11 @@ import { SearchBar } from '../../search/SearchBar'; import { StyledTable } from '../../entity/shared/components/styled/StyledTable'; import { POST_TYPE_TO_DISPLAY_TEXT } from './constants'; -const PostsContainer = styled.div``; +const PostsContainer = styled.div` + display: flex; + flex-direction: column; + overflow: auto; +`; export const PostsPaginationContainer = styled.div` display: flex; diff --git a/datahub-web-react/src/app/shared/RoutedTabs.tsx b/datahub-web-react/src/app/shared/RoutedTabs.tsx index b349ef759940c..574f372e53513 100644 --- a/datahub-web-react/src/app/shared/RoutedTabs.tsx +++ b/datahub-web-react/src/app/shared/RoutedTabs.tsx @@ -3,6 +3,7 @@ import { Route, Switch, useRouteMatch, useLocation } from 'react-router-dom'; import { Redirect, useHistory } from 'react-router'; import { Tabs } from 'antd'; import { TabsProps } from 'antd/lib/tabs'; +import styled from 'styled-components'; const { TabPane } = Tabs; @@ -19,6 +20,12 @@ interface Props extends TabsProps { onTabChange?: (selectedTab: string) => void; } +const RoutedTabsStyle = styled.div` + display: flex; + flex-direction: column; + overflow: auto; +`; + /** * A tab view where each tab is associated with a route mounted on top of the current path. * This permits direct navigation to a particular tab via URL. @@ -33,7 +40,7 @@ export const RoutedTabs = ({ defaultPath, tabs, onTabChange, ...props }: Props) const providedPath = splitPathName[splitPathName.length - 1]; const activePath = subRoutes.includes(providedPath) ? providedPath : defaultPath.replace('/', ''); return ( -
+ ))} -
+ ); }; From 1ad4f961aa8a61a6c31add573ac486895787978e Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Mon, 20 Nov 2023 18:02:49 +0900 Subject: [PATCH 129/792] feat: add ingestion overview pages (#9210) --- docs-website/sidebars.js | 34 ++-- docs/cli.md | 61 +++++++ metadata-ingestion/README.md | 235 +++----------------------- metadata-ingestion/cli-ingestion.md | 59 +++++++ metadata-ingestion/recipe_overview.md | 124 ++++++++++++++ metadata-ingestion/sink_overview.md | 33 ++++ metadata-ingestion/source_overview.md | 37 ++++ 7 files changed, 359 insertions(+), 224 deletions(-) create mode 100644 metadata-ingestion/cli-ingestion.md create mode 100644 metadata-ingestion/recipe_overview.md create mode 100644 metadata-ingestion/sink_overview.md create mode 100644 metadata-ingestion/source_overview.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 801e0fbd07d36..be12aa3a827f2 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -30,17 +30,16 @@ module.exports = { ], }, { - Integrations: [ + type: "category", + label: "Integrations", + link: { type: "doc", id: "metadata-ingestion/README" }, + items: [ // The purpose of this section is to provide a deeper understanding of how ingestion works. // Readers should be able to find details for ingesting from all systems, apply transformers, understand sinks, // and understand key concepts of the Ingestion Framework (Sources, Sinks, Transformers, and Recipes) - { - type: "doc", - label: "Introduction", - id: "metadata-ingestion/README", - }, { "Quickstart Guides": [ + "metadata-ingestion/cli-ingestion", { BigQuery: [ "docs/quick-ingestion-guides/bigquery/overview", @@ -85,15 +84,18 @@ module.exports = { }, ], }, + "metadata-ingestion/recipe_overview", { - Sources: [ + type: "category", + label: "Sources", + link: { type: "doc", id: "metadata-ingestion/source_overview" }, + items: [ // collapse these; add push-based at top { type: "doc", id: "docs/lineage/airflow", label: "Airflow", }, - //"docker/airflow/local_airflow", "metadata-integration/java/spark-lineage/README", "metadata-ingestion/integration_docs/great-expectations", @@ -106,7 +108,10 @@ module.exports = { ], }, { - Sinks: [ + type: "category", + label: "Sinks", + link: { type: "doc", id: "metadata-ingestion/sink_overview" }, + items: [ { type: "autogenerated", dirName: "metadata-ingestion/sink_docs", @@ -114,10 +119,13 @@ module.exports = { ], }, { - Transformers: [ - "metadata-ingestion/docs/transformer/intro", - "metadata-ingestion/docs/transformer/dataset_transformer", - ], + type: "category", + label: "Transformers", + link: { + type: "doc", + id: "metadata-ingestion/docs/transformer/intro", + }, + items: ["metadata-ingestion/docs/transformer/dataset_transformer"], }, { "Advanced Guides": [ diff --git a/docs/cli.md b/docs/cli.md index 7dfac1e9b2bff..8845ed5a6dac7 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -99,6 +99,36 @@ Command Options: --strict-warnings If enabled, ingestion runs with warnings will yield a non-zero error code --test-source-connection When set, ingestion will only test the source connection details from the recipe ``` +#### ingest --dry-run + +The `--dry-run` option of the `ingest` command performs all of the ingestion steps, except writing to the sink. This is useful to validate that the +ingestion recipe is producing the desired metadata events before ingesting them into datahub. + +```shell +# Dry run +datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml --dry-run +# Short-form +datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml -n +``` + +#### ingest --preview + +The `--preview` option of the `ingest` command performs all of the ingestion steps, but limits the processing to only the first 10 workunits produced by the source. +This option helps with quick end-to-end smoke testing of the ingestion recipe. + +```shell +# Preview +datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml --preview +# Preview with dry-run +datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml -n --preview +``` + +By default `--preview` creates 10 workunits. But if you wish to try producing more workunits you can use another option `--preview-workunits` + +```shell +# Preview 20 workunits without sending anything to sink +datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml -n --preview --preview-workunits=20 +``` #### ingest deploy @@ -115,6 +145,37 @@ To update an existing recipe please use the `--urn` parameter to specify the id **Note:** Updating a recipe will result in a replacement of the existing options with what was specified in the cli command. I.e: Not specifying a schedule in the cli update command will remove the schedule from the recipe to be updated. +#### ingest --no-default-report +By default, the cli sends an ingestion report to DataHub, which allows you to see the result of all cli-based ingestion in the UI. This can be turned off with the `--no-default-report` flag. + +```shell +# Running ingestion with reporting to DataHub turned off +datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yaml --no-default-report +``` + +The reports include the recipe that was used for ingestion. This can be turned off by adding an additional section to the ingestion recipe. + +```yaml +source: + # source configs + +sink: + # sink configs + +# Add configuration for the datahub reporter +reporting: + - type: datahub + config: + report_recipe: false + +# Optional log to put failed JSONs into a file +# Helpful in case you are trying to debug some issue with specific ingestion failing +failure_log: + enabled: false + log_config: + filename: ./path/to/failure.json +``` + ### init The init command is used to tell `datahub` about where your DataHub instance is located. The CLI will point to localhost DataHub by default. diff --git a/metadata-ingestion/README.md b/metadata-ingestion/README.md index a0fef614528cb..54478fddbe2d0 100644 --- a/metadata-ingestion/README.md +++ b/metadata-ingestion/README.md @@ -1,228 +1,41 @@ # Introduction to Metadata Ingestion - - Find Integration Source - - -## Integration Options - -DataHub supports both **push-based** and **pull-based** metadata integration. - -Push-based integrations allow you to emit metadata directly from your data systems when metadata changes, while pull-based integrations allow you to "crawl" or "ingest" metadata from the data systems by connecting to them and extracting metadata in a batch or incremental-batch manner. Supporting both mechanisms means that you can integrate with all your systems in the most flexible way possible. - -Examples of push-based integrations include [Airflow](../docs/lineage/airflow.md), [Spark](../metadata-integration/java/spark-lineage/README.md), [Great Expectations](./integration_docs/great-expectations.md) and [Protobuf Schemas](../metadata-integration/java/datahub-protobuf/README.md). This allows you to get low-latency metadata integration from the "active" agents in your data ecosystem. Examples of pull-based integrations include BigQuery, Snowflake, Looker, Tableau and many others. - -This document describes the pull-based metadata ingestion system that is built into DataHub for easy integration with a wide variety of sources in your data stack. - -## Getting Started - -### Prerequisites - -Before running any metadata ingestion job, you should make sure that DataHub backend services are all running. You can either run ingestion via the [UI](../docs/ui-ingestion.md) or via the [CLI](../docs/cli.md). You can reference the CLI usage guide given there as you go through this page. - -## Core Concepts - -### Sources - -Please see our [Integrations page](https://datahubproject.io/integrations) to browse our ingestion sources and filter on their features. - -Data systems that we are extracting metadata from are referred to as **Sources**. The `Sources` tab on the left in the sidebar shows you all the sources that are available for you to ingest metadata from. For example, we have sources for [BigQuery](https://datahubproject.io/docs/generated/ingestion/sources/bigquery), [Looker](https://datahubproject.io/docs/generated/ingestion/sources/looker), [Tableau](https://datahubproject.io/docs/generated/ingestion/sources/tableau) and many others. - -#### Metadata Ingestion Source Status - -We apply a Support Status to each Metadata Source to help you understand the integration reliability at a glance. - -![Certified](https://img.shields.io/badge/support%20status-certified-brightgreen): Certified Sources are well-tested & widely-adopted by the DataHub Community. We expect the integration to be stable with few user-facing issues. - -![Incubating](https://img.shields.io/badge/support%20status-incubating-blue): Incubating Sources are ready for DataHub Community adoption but have not been tested for a wide variety of edge-cases. We eagerly solicit feedback from the Community to streghten the connector; minor version changes may arise in future releases. - -![Testing](https://img.shields.io/badge/support%20status-testing-lightgrey): Testing Sources are available for experiementation by DataHub Community members, but may change without notice. - -### Sinks - -Sinks are destinations for metadata. When configuring ingestion for DataHub, you're likely to be sending the metadata to DataHub over either the [REST (datahub-sink)](./sink_docs/datahub.md#datahub-rest) or the [Kafka (datahub-kafka)](./sink_docs/datahub.md#datahub-kafka) sink. In some cases, the [File](./sink_docs/file.md) sink is also helpful to store a persistent offline copy of the metadata during debugging. - -The default sink that most of the ingestion systems and guides assume is the `datahub-rest` sink, but you should be able to adapt all of them for the other sinks as well! - -### Recipes - -A recipe is the main configuration file that puts it all together. It tells our ingestion scripts where to pull data from (source) and where to put it (sink). - -:::tip -Name your recipe with **.dhub.yaml** extension like _myrecipe.dhub.yaml_ to use vscode or intellij as a recipe editor with autocomplete -and syntax validation. - -Make sure yaml plugin is installed for your editor: - -- For vscode install [Redhat's yaml plugin](https://marketplace.visualstudio.com/items?itemName=redhat.vscode-yaml) -- For intellij install [official yaml plugin](https://plugins.jetbrains.com/plugin/13126-yaml) - +:::tip Find Integration Source +Please see our **[Integrations page](https://datahubproject.io/integrations)** to browse our ingestion sources and filter on their features. ::: -Since `acryl-datahub` version `>=0.8.33.2`, the default sink is assumed to be a DataHub REST endpoint: - -- Hosted at "http://localhost:8080" or the environment variable `${DATAHUB_GMS_URL}` if present -- With an empty auth token or the environment variable `${DATAHUB_GMS_TOKEN}` if present. - -Here's a simple recipe that pulls metadata from MSSQL (source) and puts it into the default sink (datahub rest). - -```yaml -# The simplest recipe that pulls metadata from MSSQL and puts it into DataHub -# using the Rest API. -source: - type: mssql - config: - username: sa - password: ${MSSQL_PASSWORD} - database: DemoData -# sink section omitted as we want to use the default datahub-rest sink -``` - -Running this recipe is as simple as: +## Integration Methods -```shell -datahub ingest -c recipe.dhub.yaml -``` +DataHub offers three methods for data ingestion: -or if you want to override the default endpoints, you can provide the environment variables as part of the command like below: +- [UI Ingestion](../docs/ui-ingestion.md) : Easily configure and execute a metadata ingestion pipeline through the UI. +- [CLI Ingestion guide](cli-ingestion.md) : Configure the ingestion pipeline using YAML and execute by it through CLI. +- SDK-based ingestion : Use [Python Emitter](./as-a-library.md) or [Java emitter](../metadata-integration/java/as-a-library.md) to programmatically control the ingestion pipelines. -```shell -DATAHUB_GMS_URL="https://my-datahub-server:8080" DATAHUB_GMS_TOKEN="my-datahub-token" datahub ingest -c recipe.dhub.yaml -``` +## Types of Integration -A number of recipes are included in the [examples/recipes](./examples/recipes) directory. For full info and context on each source and sink, see the pages described in the [table of plugins](../docs/cli.md#installing-plugins). +Integration can be divided into two concepts based on the method: -> Note that one recipe file can only have 1 source and 1 sink. If you want multiple sources then you will need multiple recipe files. +### Push-based Integration -### Handling sensitive information in recipes +Push-based integrations allow you to emit metadata directly from your data systems when metadata changes. +Examples of push-based integrations include [Airflow](../docs/lineage/airflow.md), [Spark](../metadata-integration/java/spark-lineage/README.md), [Great Expectations](./integration_docs/great-expectations.md) and [Protobuf Schemas](../metadata-integration/java/datahub-protobuf/README.md). This allows you to get low-latency metadata integration from the "active" agents in your data ecosystem. -We automatically expand environment variables in the config (e.g. `${MSSQL_PASSWORD}`), -similar to variable substitution in GNU bash or in docker-compose files. For details, see -https://docs.docker.com/compose/compose-file/compose-file-v2/#variable-substitution. This environment variable substitution should be used to mask sensitive information in recipe files. As long as you can get env variables securely to the ingestion process there would not be any need to store sensitive information in recipes. +### Pull-based Integration -### Basic Usage of CLI for ingestion +Pull-based integrations allow you to "crawl" or "ingest" metadata from the data systems by connecting to them and extracting metadata in a batch or incremental-batch manner. +Examples of pull-based integrations include BigQuery, Snowflake, Looker, Tableau and many others. -```shell -pip install 'acryl-datahub[datahub-rest]' # install the required plugin -datahub ingest -c ./examples/recipes/mssql_to_datahub.dhub.yml -``` - -The `--dry-run` option of the `ingest` command performs all of the ingestion steps, except writing to the sink. This is useful to validate that the -ingestion recipe is producing the desired metadata events before ingesting them into datahub. - -```shell -# Dry run -datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml --dry-run -# Short-form -datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml -n -``` - -The `--preview` option of the `ingest` command performs all of the ingestion steps, but limits the processing to only the first 10 workunits produced by the source. -This option helps with quick end-to-end smoke testing of the ingestion recipe. - -```shell -# Preview -datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml --preview -# Preview with dry-run -datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml -n --preview -``` - -By default `--preview` creates 10 workunits. But if you wish to try producing more workunits you can use another option `--preview-workunits` - -```shell -# Preview 20 workunits without sending anything to sink -datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yml -n --preview --preview-workunits=20 -``` - -#### Reporting - -By default, the cli sends an ingestion report to DataHub, which allows you to see the result of all cli-based ingestion in the UI. This can be turned off with the `--no-default-report` flag. - -```shell -# Running ingestion with reporting to DataHub turned off -datahub ingest -c ./examples/recipes/example_to_datahub_rest.dhub.yaml --no-default-report -``` - -The reports include the recipe that was used for ingestion. This can be turned off by adding an additional section to the ingestion recipe. - -```yaml -source: - # source configs - -sink: - # sink configs - -# Add configuration for the datahub reporter -reporting: - - type: datahub - config: - report_recipe: false - -# Optional log to put failed JSONs into a file -# Helpful in case you are trying to debug some issue with specific ingestion failing -failure_log: - enabled: false - log_config: - filename: ./path/to/failure.json -``` - -#### Deploying and scheduling ingestion to the UI - -The `deploy` subcommand of the `ingest` command tree allows users to upload their recipes and schedule them in the server. - -```shell -datahub ingest deploy -n -c recipe.yaml -``` - -By default, no schedule is done unless explicitly configured with the `--schedule` parameter. Schedule timezones are UTC by default and can be overriden with `--time-zone` flag. -```shell -datahub ingest deploy -n test --schedule "0 * * * *" --time-zone "Europe/London" -c recipe.yaml -``` - -## Transformations - -If you'd like to modify data before it reaches the ingestion sinks – for instance, adding additional owners or tags – you can use a transformer to write your own module and integrate it with DataHub. Transformers require extending the recipe with a new section to describe the transformers that you want to run. - -For example, a pipeline that ingests metadata from MSSQL and applies a default "important" tag to all datasets is described below: - -```yaml -# A recipe to ingest metadata from MSSQL and apply default tags to all tables -source: - type: mssql - config: - username: sa - password: ${MSSQL_PASSWORD} - database: DemoData - -transformers: # an array of transformers applied sequentially - - type: simple_add_dataset_tags - config: - tag_urns: - - "urn:li:tag:Important" -# default sink, no config needed -``` - -Check out the [transformers guide](./docs/transformer/intro.md) to learn more about how you can create really flexible pipelines for processing metadata using Transformers! - -## Using as a library (SDK) - -In some cases, you might want to construct Metadata events directly and use programmatic ways to emit that metadata to DataHub. In this case, take a look at the [Python emitter](./as-a-library.md) and the [Java emitter](../metadata-integration/java/as-a-library.md) libraries which can be called from your own code. - -### Programmatic Pipeline - -In some cases, you might want to configure and run a pipeline entirely from within your custom Python script. Here is an example of how to do it. - -- [programmatic_pipeline.py](./examples/library/programatic_pipeline.py) - a basic mysql to REST programmatic pipeline. - -## Developing +## Core Concepts -See the guides on [developing](./developing.md), [adding a source](./adding-source.md) and [using transformers](./docs/transformer/intro.md). +The following are the core concepts related to ingestion: -## Compatibility +- [Sources](source_overview.md): Data systems from which extract metadata. (e.g. BigQuery, MySQL) +- [Sinks](sink_overview.md): Destination for metadata (e.g. File, DataHub) +- [Recipe](recipe_overview.md): The main configuration for ingestion in the form or .yaml file -DataHub server uses a 3 digit versioning scheme, while the CLI uses a 4 digit scheme. For example, if you're using DataHub server version 0.10.0, you should use CLI version 0.10.0.x, where x is a patch version. -We do this because we do CLI releases at a much higher frequency than server releases, usually every few days vs twice a month. +For more advanced guides, please refer to the following: -For ingestion sources, any breaking changes will be highlighted in the [release notes](../docs/how/updating-datahub.md). When fields are deprecated or otherwise changed, we will try to maintain backwards compatibility for two server releases, which is about 4-6 weeks. The CLI will also print warnings whenever deprecated options are used. +- [Developing on Metadata Ingestion](./developing.md) +- [Adding a Metadata Ingestion Source](./adding-source.md) +- [Using Transformers](./docs/transformer/intro.md) diff --git a/metadata-ingestion/cli-ingestion.md b/metadata-ingestion/cli-ingestion.md new file mode 100644 index 0000000000000..cbdde2cd30167 --- /dev/null +++ b/metadata-ingestion/cli-ingestion.md @@ -0,0 +1,59 @@ +# CLI Ingestion + +## Installing the CLI + +Make sure you have installed DataHub CLI before following this guide. +```shell +# Requires Python 3.7+ +python3 -m pip install --upgrade pip wheel setuptools +python3 -m pip install --upgrade acryl-datahub +# validate that the install was successful +datahub version +# If you see "command not found", try running this instead: python3 -m datahub version +``` +Check out the [CLI Installation Guide](../docs/cli.md#installation) for more installation options and troubleshooting tips. + +After that, install the required plugin for the ingestion. + +```shell +pip install 'acryl-datahub[datahub-rest]' # install the required plugin +``` +Check out the [alternative installation options](../docs/cli.md#alternate-installation-options) for more reference. + +## Configuring a Recipe +Create a recipe.yml file that defines the source and sink for metadata, as shown below. +```yaml +# my_reipe.yml +source: + type: + config: + option_1: + ... + +sink: + type: + config: + ... +``` + +For more information and examples on configuring recipes, please refer to [Recipes](recipe_overview.md). + +## Ingesting Metadata +You can run ingestion using `datahub ingest` like below. + +```shell +datahub ingest -c +``` + +## Reference + +Please refer the following pages for advanced guids on CLI ingestion. +- [Reference for `datahub ingest` command](../docs/cli.md#ingest) +- [UI Ingestion Guide](../docs/ui-ingestion.md) + +:::Tip Compatibility +DataHub server uses a 3 digit versioning scheme, while the CLI uses a 4 digit scheme. For example, if you're using DataHub server version 0.10.0, you should use CLI version 0.10.0.x, where x is a patch version. +We do this because we do CLI releases at a much higher frequency than server releases, usually every few days vs twice a month. + +For ingestion sources, any breaking changes will be highlighted in the [release notes](../docs/how/updating-datahub.md). When fields are deprecated or otherwise changed, we will try to maintain backwards compatibility for two server releases, which is about 4-6 weeks. The CLI will also print warnings whenever deprecated options are used. +::: \ No newline at end of file diff --git a/metadata-ingestion/recipe_overview.md b/metadata-ingestion/recipe_overview.md new file mode 100644 index 0000000000000..a748edbf3bb44 --- /dev/null +++ b/metadata-ingestion/recipe_overview.md @@ -0,0 +1,124 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Recipes + +A recipe is the main configuration file for metadata ingestion. It tells our ingestion scripts where to pull data from (source) and where to put it (sink). + +

+ +

+ + +## Configuring Recipe + +The basic form of the recipe file consists of: + +- `source`, which contains the configuration of the data source. (See [Sources](source_overview.md)) +- `sink`, which defines the destination of the metadata (See [Sinks](sink_overview.md)) + +Here's a simple recipe that pulls metadata from MSSQL (source) and puts it into the default sink (datahub rest). + +```yaml +# The simplest recipe that pulls metadata from MSSQL and puts it into DataHub +# using the Rest API. +source: + type: mssql + config: + username: sa + password: ${MSSQL_PASSWORD} + database: DemoData +# sink section omitted as we want to use the default datahub-rest sink +sink: + type: "datahub-rest" + config: + server: "http://localhost:8080" +``` + +A number of recipes are included in the [examples/recipes](./examples/recipes) directory. For full info and context on each source and sink, see the pages described in the [table of plugins](../docs/cli.md#installing-plugins). + +:::note One Source/Sink for One Recipe! +Note that one recipe file can only have 1 source and 1 sink. If you want multiple sources then you will need multiple recipe files. +::: + +## Running a Recipe + +DataHub supports running recipes via the CLI or UI. + + + + +Install CLI and the plugin for the ingestion. +```shell +python3 -m pip install --upgrade acryl-datahub +pip install 'acryl-datahub[datahub-rest]' +``` +Running this recipe is as simple as: + +```shell +datahub ingest -c recipe.dhub.yaml +``` +For a detailed guide on running recipes via CLI, please refer to [CLI Ingestion Guide](cli-ingestion.md). + + + + + +You can configure and run the recipe in **Ingestion** tab in DataHub. + +

+ +

+ +* Make sure you have the **Manage Metadata Ingestion & Manage Secret** privileges. +* Navigate to **Ingestion** tab in DataHub. +* Create an ingestion source & configure the recipe via UI. +* Hit **Execute**. + +For a detailed guide on running recipes via UI, please refer to [UI Ingestion Guide](../docs/ui-ingestion.md). + +
+
+ + +## Advanced Configuration + +### Handling Sensitive Information in Recipes + +We automatically expand environment variables in the config (e.g. `${MSSQL_PASSWORD}`), +similar to variable substitution in GNU bash or in docker-compose files. +For details, see [variable-substitution](https://docs.docker.com/compose/compose-file/compose-file-v2/#variable-substitution). +This environment variable substitution should be used to mask sensitive information in recipe files. As long as you can get env variables securely to the ingestion process there would not be any need to store sensitive information in recipes. + +### Transformations + +If you'd like to modify data before it reaches the ingestion sinks – for instance, adding additional owners or tags – you can use a transformer to write your own module and integrate it with DataHub. Transformers require extending the recipe with a new section to describe the transformers that you want to run. + +For example, a pipeline that ingests metadata from MSSQL and applies a default "important" tag to all datasets is described below: + +```yaml +# A recipe to ingest metadata from MSSQL and apply default tags to all tables +source: + type: mssql + config: + username: sa + password: ${MSSQL_PASSWORD} + database: DemoData + +transformers: # an array of transformers applied sequentially + - type: simple_add_dataset_tags + config: + tag_urns: + - "urn:li:tag:Important" +# default sink, no config needed +``` + +Check out the [transformers guide](./docs/transformer/intro.md) to learn more about how you can create really flexible pipelines for processing metadata using Transformers! + +### Autocomplete and Syntax Validation + +Name your recipe with **.dhub.yaml** extension like `myrecipe.dhub.yaml_` to use vscode or intellij as a recipe editor with autocomplete +and syntax validation. Make sure yaml plugin is installed for your editor: + +- For vscode install [Redhat's yaml plugin](https://marketplace.visualstudio.com/items?itemName=redhat.vscode-yaml) +- For intellij install [official yaml plugin](https://plugins.jetbrains.com/plugin/13126-yaml) diff --git a/metadata-ingestion/sink_overview.md b/metadata-ingestion/sink_overview.md new file mode 100644 index 0000000000000..c71ba1f97932c --- /dev/null +++ b/metadata-ingestion/sink_overview.md @@ -0,0 +1,33 @@ +# Sinks + +Sinks are **destinations for metadata**. + +

+ +

+ +In general, the sink will be defined in the [recipe](./recipe_overview.md) after the [source](./source-docs-template.md) like below. + +```yaml +source: ... + +sink: + type: + config: ... +``` + +## Types of Sinks + +When configuring ingestion for DataHub, you're likely to be sending the metadata to DataHub over either one of the following. + +- [REST (datahub-rest)](sink_docs/datahub.md#datahub-rest) +- [Kafka (datahub-kafka)](sink_docs/datahub.md#datahub-kafka) + +For debugging purposes or troubleshooting, the following sinks can be useful: + +- [File](sink_docs/file.md) +- [Console](sink_docs/console.md) + +## Default Sink + +Since `acryl-datahub` version `>=0.8.33.2`, the default sink is assumed to be a `datahub-rest` endpoint. diff --git a/metadata-ingestion/source_overview.md b/metadata-ingestion/source_overview.md new file mode 100644 index 0000000000000..9647fbdde0a0f --- /dev/null +++ b/metadata-ingestion/source_overview.md @@ -0,0 +1,37 @@ +# Sources + + +Sources are **the data systems that we are extracting metadata from.** + +

+ +

+ +In general, the source will be defined at the top of the [recipe](./recipe_overview.md) like below. + + +```yaml +#my_recipe.yml +source: + type: + config: + option_1: + ... +``` + +## Types of Source +The `Sources` tab on the left in the sidebar shows you all the sources that are available for you to ingest metadata from. For example, we have sources for [BigQuery](https://datahubproject.io/docs/generated/ingestion/sources/bigquery), [Looker](https://datahubproject.io/docs/generated/ingestion/sources/looker), [Tableau](https://datahubproject.io/docs/generated/ingestion/sources/tableau) and many others. + +:::tip Find an Integration Source +See the full **[list of integrations](https://datahubproject.io/integrations)** and filter on their features. +::: + +## Metadata Ingestion Source Status + +We apply a Support Status to each Metadata Source to help you understand the integration reliability at a glance. + +![Certified](https://img.shields.io/badge/support%20status-certified-brightgreen): Certified Sources are well-tested & widely-adopted by the DataHub Community. We expect the integration to be stable with few user-facing issues. + +![Incubating](https://img.shields.io/badge/support%20status-incubating-blue): Incubating Sources are ready for DataHub Community adoption but have not been tested for a wide variety of edge-cases. We eagerly solicit feedback from the Community to streghten the connector; minor version changes may arise in future releases. + +![Testing](https://img.shields.io/badge/support%20status-testing-lightgrey): Testing Sources are available for experiementation by DataHub Community members, but may change without notice. From 01874808afd0c65518963a22966b1e1592d75182 Mon Sep 17 00:00:00 2001 From: Tim <50115603+bossenti@users.noreply.github.com> Date: Mon, 20 Nov 2023 16:57:48 +0100 Subject: [PATCH 130/792] fix(ingest/athena): detect decimal type correctly (#9270) --- .../src/datahub/ingestion/source/sql/athena.py | 4 ++++ metadata-ingestion/tests/unit/test_athena_source.py | 10 ++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py index 75e8fe1d6f7a6..ac0e2bd4bb8a9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py @@ -183,6 +183,10 @@ def _get_column_type( # can also be returned, so we need to extend the handling here as well elif type_name in ["bigint", "long"]: detected_col_type = types.BIGINT + elif type_name in ["decimal"]: + detected_col_type = types.DECIMAL + precision, scale = type_meta_information.split(",") + args = [int(precision), int(scale)] else: return super()._get_column_type(type_name) return detected_col_type(*args) diff --git a/metadata-ingestion/tests/unit/test_athena_source.py b/metadata-ingestion/tests/unit/test_athena_source.py index 23dd7dd5a6e45..875cf3800daf8 100644 --- a/metadata-ingestion/tests/unit/test_athena_source.py +++ b/metadata-ingestion/tests/unit/test_athena_source.py @@ -166,7 +166,6 @@ def test_get_column_type_map(): def test_column_type_struct(): - result = CustomAthenaRestDialect()._get_column_type(type_="struct") assert isinstance(result, STRUCT) @@ -175,8 +174,15 @@ def test_column_type_struct(): assert isinstance(result._STRUCT_fields[0][1], types.String) -def test_column_type_complex_combination(): +def test_column_type_decimal(): + result = CustomAthenaRestDialect()._get_column_type(type_="decimal(10,2)") + + assert isinstance(result, types.DECIMAL) + assert 10 == result.precision + assert 2 == result.scale + +def test_column_type_complex_combination(): result = CustomAthenaRestDialect()._get_column_type( type_="struct>>" ) From d9de854d276c118afc55264ecc9e2712b91b4ab2 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 21 Nov 2023 07:34:34 +0530 Subject: [PATCH 131/792] fix(ui): Do not show manage in settings when no permissions for it Fix/prd 787 (#9261) --- .../src/app/settings/SettingsPage.tsx | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/datahub-web-react/src/app/settings/SettingsPage.tsx b/datahub-web-react/src/app/settings/SettingsPage.tsx index aba054a7fb94f..69d4eb2b10b4d 100644 --- a/datahub-web-react/src/app/settings/SettingsPage.tsx +++ b/datahub-web-react/src/app/settings/SettingsPage.tsx @@ -141,24 +141,25 @@ export const SettingsPage = () => { )} )} - - - {showViews && ( - - My Views - - )} - {showOwnershipTypes && ( - - Ownership Types - - )} - {showHomePagePosts && ( - - Home Page Posts - - )} - + {(showViews || showOwnershipTypes || showHomePagePosts) && ( + + {showViews && ( + + My Views + + )} + {showOwnershipTypes && ( + + Ownership Types + + )} + {showHomePagePosts && ( + + Home Page Posts + + )} + + )} From b51cfc3a408b1d923dc7c8023fc54859e34cc5ba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Nov 2023 08:59:50 -0800 Subject: [PATCH 132/792] build(deps): bump @babel/traverse from 7.22.11 to 7.23.2 in /docs-website (#9022) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs-website/yarn.lock | 73 +++++++++++++++++++++++++++++++++++------- 1 file changed, 62 insertions(+), 11 deletions(-) diff --git a/docs-website/yarn.lock b/docs-website/yarn.lock index 5698029bff70a..d06dbcbec6154 100644 --- a/docs-website/yarn.lock +++ b/docs-website/yarn.lock @@ -220,7 +220,7 @@ dependencies: tslib "~2.0.1" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.10", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.10", "@babel/code-frame@^7.22.13", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": version "7.22.13" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== @@ -286,6 +286,16 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" +"@babel/generator@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.0.tgz#df5c386e2218be505b34837acbcb874d7a983420" + integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== + dependencies: + "@babel/types" "^7.23.0" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + "@babel/helper-annotate-as-pure@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" @@ -346,6 +356,11 @@ lodash.debounce "^4.0.8" resolve "^1.14.2" +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== + "@babel/helper-environment-visitor@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" @@ -359,6 +374,14 @@ "@babel/template" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== + dependencies: + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" + "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" @@ -452,6 +475,11 @@ resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + "@babel/helper-validator-identifier@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" @@ -494,6 +522,11 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.14.tgz#c7de58e8de106e88efca42ce17f0033209dfd245" integrity sha512-1KucTHgOvaw/LzCVrEOAyXkr9rQlp0A1HiHRYnSUE9dmb8PvPW7o5sscg+5169r54n3vGlbx6GevTE/Iw/P3AQ== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719" + integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.5.tgz#87245a21cd69a73b0b81bcda98d443d6df08f05e" @@ -1269,19 +1302,28 @@ "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/traverse@^7.12.9", "@babel/traverse@^7.18.8", "@babel/traverse@^7.22.11": - version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.11.tgz#71ebb3af7a05ff97280b83f05f8865ac94b2027c" - integrity sha512-mzAenteTfomcB7mfPtyi+4oe5BZ6MXxWcn4CX+h4IRJ+OOGXBrWU6jDQavkQI9Vuc5P+donFabBfFCcmWka9lQ== +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== dependencies: - "@babel/code-frame" "^7.22.10" - "@babel/generator" "^7.22.10" - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-function-name" "^7.22.5" + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + +"@babel/traverse@^7.12.9", "@babel/traverse@^7.18.8", "@babel/traverse@^7.22.11": + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.2.tgz#329c7a06735e144a506bdb2cad0268b7f46f4ad8" + integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.0" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" "@babel/helper-hoist-variables" "^7.22.5" "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.22.11" - "@babel/types" "^7.22.11" + "@babel/parser" "^7.23.0" + "@babel/types" "^7.23.0" debug "^4.1.0" globals "^11.1.0" @@ -1294,6 +1336,15 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" +"@babel/types@^7.22.15", "@babel/types@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.0.tgz#8c1f020c9df0e737e4e247c0619f58c68458aaeb" + integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg== + dependencies: + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@colors/colors@1.5.0": version "1.5.0" resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" From 38adff869356d072f0eef080f6dc7ebaa007dc4c Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 21 Nov 2023 15:14:52 -0600 Subject: [PATCH 133/792] fix(gha): fix gha for single tag (#9283) --- .../actions/docker-custom-build-and-push/action.yml | 11 ++++++++++- .github/workflows/docker-unified.yml | 2 ++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index bd6bb842b1fb8..ca0796180cd57 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -70,11 +70,20 @@ runs: push: false cache-from: type=registry,ref=${{ steps.docker_meta.outputs.tags }} cache-to: type=inline + - name: Single Tag + if: ${{ inputs.publish != 'true' }} + shell: bash + run: | + TAGS=""" + ${{ steps.docker_meta.outputs.tags }} + """ + echo "SINGLE_TAG=$(echo $TAGS | tr '\n' ' ' | awk -F' ' '{ print $1 }')" >> $GITHUB_OUTPUT + id: single_tag - name: Upload image locally for testing (if not publishing) uses: ishworkh/docker-image-artifact-upload@v1 if: ${{ inputs.publish != 'true' }} with: - image: ${{ steps.docker_meta.outputs.tags }} + image: ${{ steps.single_tag.outputs.SINGLE_TAG }} # Code for building multi-platform images and pushing to Docker Hub. - name: Set up QEMU diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 35cd5363293f8..8bb82a0a0608c 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -876,11 +876,13 @@ jobs: ] steps: - uses: aws-actions/configure-aws-credentials@v1 + if: ${{ needs.setup.outputs.publish != 'false' }} with: aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SQS_ACCESS_KEY }} aws-region: us-west-2 - uses: isbang/sqs-action@v0.2.0 + if: ${{ needs.setup.outputs.publish != 'false' }} with: sqs-url: ${{ secrets.DATAHUB_HEAD_SYNC_QUEUE }} message: '{ "command": "git-sync", "args" : {"repoName": "${{ needs.setup.outputs.repository_name }}", "repoOrg": "${{ github.repository_owner }}", "repoBranch": "${{ needs.setup.outputs.branch_name }}", "repoShaShort": "${{ needs.setup.outputs.short_sha }}" }}' From 15e68bb77199ba54b77df8ffa0df13b7c1fe16ad Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 21 Nov 2023 16:23:21 -0600 Subject: [PATCH 134/792] fix(node): fix node_options (#9281) --- datahub-web-react/build.gradle | 2 +- datahub-web-react/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index 13eabe90ee509..fd36e5ac4bc2c 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -73,7 +73,7 @@ task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnTest, yarnLint]) { } task yarnQuickBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - environment = [NODE_OPTIONS: "--max-old-space-size=3072"] + environment = [NODE_OPTIONS: "--max-old-space-size=3072 --openssl-legacy-provider"] args = ['run', 'build'] } diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 5afbc8fa5892d..a72d9c0a898f8 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -95,7 +95,7 @@ "start:mock": "yarn run generate && BROWSER=none REACT_APP_MOCK=true craco start", "start:e2e": "REACT_APP_MOCK=cy BROWSER=none PORT=3010 craco start", "ec2-dev": "yarn run generate && CI=true;export CI;BROWSER=none craco start", - "build": "yarn run generate && NODE_OPTIONS=--openssl-legacy-provider CI=false REACT_APP_MOCK=false craco build && rm -rf dist/ && cp -r build/yarn/ dist/ && rm -r build/yarn/", + "build": "yarn run generate && NODE_OPTIONS='--max-old-space-size=3072 --openssl-legacy-provider' CI=false REACT_APP_MOCK=false craco build && rm -rf dist/ && cp -r build/yarn/ dist/ && rm -r build/yarn/", "test": "craco test", "pretest:e2e:ci": "yarn generate", "test:e2e": "start-server-and-test start:e2e 3010", From fd129c7d5d7efd205a0d1ddf3544a9367fb0fe80 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Wed, 22 Nov 2023 19:00:34 +0900 Subject: [PATCH 135/792] fix: Revamp features page (#8839) --- .../FeatureCard/featurecard.module.scss | 38 ++++++ .../docs/_components/FeatureCard/index.jsx | 21 +++ .../featurecardsection.module.scss | 36 +++++ .../_components/FeatureCardSection/index.jsx | 65 +++++++++ .../docs/_components/QuickstartCard/index.jsx | 22 +++ .../QuickstartCard/quickstartcard.module.scss | 48 +++++++ .../_components/QuickstartCards/index.jsx | 35 +++++ .../quickstartcards.module.scss | 34 +++++ docs/features.md | 127 ++++-------------- 9 files changed, 324 insertions(+), 102 deletions(-) create mode 100644 docs-website/src/pages/docs/_components/FeatureCard/featurecard.module.scss create mode 100644 docs-website/src/pages/docs/_components/FeatureCard/index.jsx create mode 100644 docs-website/src/pages/docs/_components/FeatureCardSection/featurecardsection.module.scss create mode 100644 docs-website/src/pages/docs/_components/FeatureCardSection/index.jsx create mode 100644 docs-website/src/pages/docs/_components/QuickstartCard/index.jsx create mode 100644 docs-website/src/pages/docs/_components/QuickstartCard/quickstartcard.module.scss create mode 100644 docs-website/src/pages/docs/_components/QuickstartCards/index.jsx create mode 100644 docs-website/src/pages/docs/_components/QuickstartCards/quickstartcards.module.scss diff --git a/docs-website/src/pages/docs/_components/FeatureCard/featurecard.module.scss b/docs-website/src/pages/docs/_components/FeatureCard/featurecard.module.scss new file mode 100644 index 0000000000000..61739d5b6922c --- /dev/null +++ b/docs-website/src/pages/docs/_components/FeatureCard/featurecard.module.scss @@ -0,0 +1,38 @@ +.feature { + flex-direction: row; + padding: 1.75rem; + color: var(--ifm-hero-text-color); + margin: 0rem 2rem 1rem 0rem; + min-height: 14rem; + max-height: 15rem; + overflow: hidden; + text-decoration: none !important; + + img { + width: 1.5rem; + height: 1.5rem; + margin-right: 0.75rem; + } + svg { + width: 1.5rem; + height: 1.5rem; + margin-right: 0.75rem; + } + strong, + span { + display: block; + margin-bottom: 0.75rem; + } + strong { + font-weight: 600; + font-size: 1.1rem; + } + + span { + font-size: 1rem; + line-height: 1.25em; + } + &:hover { + border-color: var(--ifm-color-primary); + } +} diff --git a/docs-website/src/pages/docs/_components/FeatureCard/index.jsx b/docs-website/src/pages/docs/_components/FeatureCard/index.jsx new file mode 100644 index 0000000000000..407e8eb401987 --- /dev/null +++ b/docs-website/src/pages/docs/_components/FeatureCard/index.jsx @@ -0,0 +1,21 @@ +import React from "react"; +import clsx from "clsx"; +import styles from "./featurecard.module.scss"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import Link from "@docusaurus/Link"; + +const FeatureCard = ({icon, title, description, to}) => { +return ( +
+ +
+ {icon} + {title} → + {description} +
+ +
+ ); +}; + +export default FeatureCard; diff --git a/docs-website/src/pages/docs/_components/FeatureCardSection/featurecardsection.module.scss b/docs-website/src/pages/docs/_components/FeatureCardSection/featurecardsection.module.scss new file mode 100644 index 0000000000000..9e08c789c9068 --- /dev/null +++ b/docs-website/src/pages/docs/_components/FeatureCardSection/featurecardsection.module.scss @@ -0,0 +1,36 @@ + + +.feature { + flex-direction: row; + padding: 0.675rem; + color: var(--ifm-text-color); + margin: 0.5rem; + min-height: calc(100% - 1rem); + text-decoration: none !important; + img { + width: 1.5rem; + height: 1.5rem; + margin-right: 0.75rem; + } + svg { + width: 1.5rem; + height: 1.5rem; + margin-right: 0.75rem; + } + strong, + span { + display: block; + margin-bottom: 0.25rem; + } + strong { + font-weight: 600; + } + + span { + font-size: 0.875rem; + line-height: 1.25em; + } + &:hover { + border-color: var(--ifm-color-primary); + } +} diff --git a/docs-website/src/pages/docs/_components/FeatureCardSection/index.jsx b/docs-website/src/pages/docs/_components/FeatureCardSection/index.jsx new file mode 100644 index 0000000000000..bac97a805d2d7 --- /dev/null +++ b/docs-website/src/pages/docs/_components/FeatureCardSection/index.jsx @@ -0,0 +1,65 @@ +import React from "react"; +import FeatureCard from '../FeatureCard' +import { + EyeTwoTone, + HeartTwoTone, + ApiTwoTone, + AlertTwoTone, + CompassTwoTone, + ProfileTwoTone, +} from "@ant-design/icons"; + +const featureCardContent = [ +{ + title: "Data Discovery", + description: "Search your entire data ecosystem, including dashboards, datasets, ML models, and raw files.", + to: "docs/how/search", + icon: + }, +{ + title: "Data Governance", + description: "Define ownership and track PII.", + to: "https://www.acryldata.io/blog/the-3-must-haves-of-metadata-management-part-2?utm_source=datahub&utm_medium=referral&utm_content=blog", + icon: + }, +{ + title: "Data Quality Control", + description: "Improve data quality through metadata tests, assertions, data freshness checks, and data contracts.", + to: "https://www.acryldata.io/blog/data-contracts-in-datahub-combining-verifiability-with-holistic-data-management?utm_source=datahub&utm_medium=referral&utm_content=blog", + icon: + }, +{ + title: "UI-based Ingestion", + description: "Easily set up integrations in minutes using DataHub's intuitive UI-based ingestion feature.", + to: "docs/ui-ingestion", + icon: +}, +{ + title: "APIs and SDKs", + description: "For users who prefer programmatic control, DataHub offers a comprehensive set of APIs and SDKs.", + to: "docs/api/datahub-apis", + icon: +}, +{ + title: "Vibrant Community", + description: "Our community provides support through office hours, workshops, and a Slack channel.", + to: "docs/slack", + icon: +} +] + +const FeatureCards = () => { +return ( +
+
+
+ {featureCardContent.map((props, idx) => ( + + ))} +
+
+
+ ); +}; + +export default FeatureCards; diff --git a/docs-website/src/pages/docs/_components/QuickstartCard/index.jsx b/docs-website/src/pages/docs/_components/QuickstartCard/index.jsx new file mode 100644 index 0000000000000..b4e3895fa40e7 --- /dev/null +++ b/docs-website/src/pages/docs/_components/QuickstartCard/index.jsx @@ -0,0 +1,22 @@ +import React from "react"; +import clsx from "clsx"; +import styles from "./quickstartcard.module.scss"; +import Link from "@docusaurus/Link"; +import useBaseUrl from "@docusaurus/useBaseUrl"; + + +const QuickstartCard = ({ icon, title, to, color, fontColor }) => { + return ( +
+ + +
+ {title} → +
+ +
+ ); +}; + + +export default QuickstartCard; diff --git a/docs-website/src/pages/docs/_components/QuickstartCard/quickstartcard.module.scss b/docs-website/src/pages/docs/_components/QuickstartCard/quickstartcard.module.scss new file mode 100644 index 0000000000000..fd35a4b777c99 --- /dev/null +++ b/docs-website/src/pages/docs/_components/QuickstartCard/quickstartcard.module.scss @@ -0,0 +1,48 @@ +.feature { + flex-direction: row; + height: 10rem; + flex-shrink: 0; + padding: 3rem; + color: var(--ifm-text-color); + margin: 0rem 2rem 1rem 0rem; + min-height: calc(100% - 1rem); + text-decoration: none !important; + + + img { + width: 3rem; + height: 3rem; + margin: auto 1rem; + } + svg { + width: 1.5rem; + height: 1.5rem; + margin-right: 0.75rem; + } + strong, + span { + display: block; + margin-bottom: 0.25rem; + } + strong { + font-weight: 600; + padding: auto 0; + } + + span { + font-size: 0.875rem; + line-height: 1.25em; + } + &:hover { + border-color: var(--ifm-color-primary); + } + + .quickstart-text { + margin: auto 0; + } + +} + +.quickstart-text { + margin: auto 0; +} \ No newline at end of file diff --git a/docs-website/src/pages/docs/_components/QuickstartCards/index.jsx b/docs-website/src/pages/docs/_components/QuickstartCards/index.jsx new file mode 100644 index 0000000000000..bcb77c043f1d0 --- /dev/null +++ b/docs-website/src/pages/docs/_components/QuickstartCards/index.jsx @@ -0,0 +1,35 @@ +import React from "react"; +import QuickstartCard from '../QuickstartCard' + +const quickstartContent = [ +{ + title: "Quickstart with DataHub", + icon: "datahub-logo-color-mark", + to: "quickstart", + color: '#FFF', + fontColor: '#091013', + }, +{ + title: "Learn about Managed DataHub", + icon: "acryl-logo-transparent-mark", + to: "managed-datahub/managed-datahub-overview", + color: '#091013', + fontColor: '#FFF', +} +] + +const QuickstartCards = () => { +return ( +
+
+
+ {quickstartContent.map((props, idx) => ( + + ))} +
+
+
+ ); +}; + +export default QuickstartCards; diff --git a/docs-website/src/pages/docs/_components/QuickstartCards/quickstartcards.module.scss b/docs-website/src/pages/docs/_components/QuickstartCards/quickstartcards.module.scss new file mode 100644 index 0000000000000..4fbbc4583d662 --- /dev/null +++ b/docs-website/src/pages/docs/_components/QuickstartCards/quickstartcards.module.scss @@ -0,0 +1,34 @@ +.feature { + flex-direction: row; + padding: 0.675rem; + color: var(--ifm-text-color); + margin: 0.5rem; + min-height: calc(100% - 1rem); + text-decoration: none !important; + img { + width: 1.5rem; + height: 1.5rem; + margin-right: 0.75rem; + } + svg { + width: 1.5rem; + height: 1.5rem; + margin-right: 0.75rem; + } + strong, + span { + display: block; + margin-bottom: 0.25rem; + } + strong { + font-weight: 600; + } + + span { + font-size: 0.875rem; + line-height: 1.25em; + } + &:hover { + border-color: var(--ifm-color-primary); + } +} diff --git a/docs/features.md b/docs/features.md index a06789f28f82b..9ce85d83ee54a 100644 --- a/docs/features.md +++ b/docs/features.md @@ -1,118 +1,41 @@ ---- -title: "Features" ---- +import QuickstartCards from '@site/src/pages/docs/_components/QuickstartCards'; +import FeatureCardSection from '@site/src/pages/docs/_components/FeatureCardSection'; -# DataHub Features Overview +# What is DataHub? -DataHub is a modern data catalog built to enable end-to-end data discovery, data observability, and data governance. This extensible metadata platform is built for developers to tame the complexity of their rapidly evolving data ecosystems and for data practitioners to leverage the total value of data within their organization. +DataHub is a modern data catalog built to enable end-to-end data discovery, data observability, and data governance. +This extensible metadata platform is built for developers to tame the complexity of their rapidly evolving data ecosystems and for data practitioners to leverage the total value of data within their organization. -Here’s an overview of DataHub’s current functionality. Check out our [roadmap](https://feature-requests.datahubproject.io/roadmap) to see what's to come. +## Quickstart ---- + -## Search and Discovery +## Key Features -### **Search All Corners of Your Data Stack** + -DataHub's unified search experience surfaces results across databases, data lakes, BI platforms, ML feature stores, orchestration tools, and more. +## Get Started -

- -

+### Deployment -### **Trace End-to-End Lineage** +To get started with DataHub, you can use a simple CLI command. Alternatively, you can deploy the instance to production using Docker or Helm charts. -Quickly understand the end-to-end journey of data by tracing lineage across platforms, datasets, ETL/ELT pipelines, charts, dashboards, and beyond. +- [Quickstart](quickstart.md) +- [Self-hosted DataHub](deploy/kubernetes.md) +- [Managed DataHub](managed-datahub/managed-datahub-overview.md) -

- -

+### Ingestion -### **Understand the Impact of Breaking Changes on Downstream Dependencies** +DataHub supports ingestion by UI and CLI. -Proactively identify which entities may be impacted by a breaking change using Impact Analysis. +- [UI-based Ingestion](ui-ingestion.md) +- [CLI-based Ingestion](../metadata-ingestion/cli-ingestion.md) -

- -

+## Join the Community -### **View Metadata 360 at a Glance** +For additional information and assistance, feel free to visit one of these channels! -Combine *technical* and *logical* metadata to provide a 360º view of your data entities. - -Generate **Dataset Stats** to understand the shape & distribution of the data - -

- -

- -Capture historical **Data Validation Outcomes** from tools like Great Expectations - -

- -

- -Leverage DataHub's **Schema Version History** to track changes to the physical structure of data over time - -

- -

- ---- - -## Modern Data Governance - -### **Govern in Real Time** - -[The Actions Framework](./actions/README.md) powers the following real-time use cases: - -* **Notifications:** Generate organization-specific notifications when a change is made on DataHub. For example, send an email to the governance team when a "PII" tag is added to any data asset. -* **Workflow Integration:** Integrate DataHub into your organization's internal workflows. For example, create a Jira ticket when specific Tags or Terms are proposed on a Dataset. -* **Synchronization:** Sync changes made in DataHub into a 3rd party system. For example, reflect Tag additions in DataHub into Snowflake. -* **Auditing:** Audit who is making what changes on DataHub through time. - -

- -

- -### **Manage Entity Ownership** -Quickly and easily assign entity ownership to users and user groups. - -

- -

- -### **Govern with Tags, Glossary Terms, and Domains** -Empower data owners to govern their data entities with: - -1. **Tags:** Informal, loosely controlled labels that serve as a tool for search & discovery. No formal, central management. -2. **Glossary Terms:** A controlled vocabulary with optional hierarchy, commonly used to describe core business concepts and measurements. -3. **Domains:** Curated, top-level folders or categories, widely used in Data Mesh to organize entities by department (i.e., Finance, Marketing) or Data Products. - -

- -

- ---- -## DataHub Administration - -### **Create Users, Groups, & Access Policies** - -DataHub admins can create Policies to define who can perform what action against which resource(s). When you create a new Policy, you will be able to define the following: - -* **Policy Type** - Platform (top-level DataHub Platform privileges, i.e., managing users, groups, and policies) or Metadata (ability to manipulate ownership, tags, documentation, and more) -* **Resource Type** - Specify the type of resources, such as Datasets, Dashboards, Pipelines, and beyond -* **Privileges** - Choose the set of permissions, such as Edit Owners, Edit Documentation, Edit Links -* **Users and/or Groups** - Assign relevant Users and Groups; you can also assign the Policy to Resource Owners, regardless of which Group they belong - -

- -

- -### **Ingest Metadata from the UI** - -Create, configure, schedule, & execute batch metadata ingestion using the DataHub user interface. This makes getting metadata into DataHub easier by minimizing the overhead required to operate custom integration pipelines. - -

- -

\ No newline at end of file +- [Slack](https://datahubspace.slack.com) +- [Blog](https://blog.datahubproject.io/) +- [LinkedIn](https://www.linkedin.com/company/acryl-data/) +- Our champions - [Data Practitioners Guild](https://datahubproject.io/guild/?_gl=1*11cd6n0*_gcl_au*ODQyMTk0NTI5LjE2OTQ2NjI1MjM.) From cda980bfb279a70ba85786e4dae8eadffbb2cde9 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 22 Nov 2023 21:58:44 +0530 Subject: [PATCH 136/792] docs(acryl cloud): release notes 0.2.13 (#9291) --- docs-website/sidebars.js | 1 + .../managed-datahub/release-notes/v_0_2_13.md | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 docs/managed-datahub/release-notes/v_0_2_13.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index be12aa3a827f2..c70a609a4cc4b 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -630,6 +630,7 @@ module.exports = { }, { "Managed DataHub Release History": [ + "docs/managed-datahub/release-notes/v_0_2_13", "docs/managed-datahub/release-notes/v_0_2_12", "docs/managed-datahub/release-notes/v_0_2_11", "docs/managed-datahub/release-notes/v_0_2_10", diff --git a/docs/managed-datahub/release-notes/v_0_2_13.md b/docs/managed-datahub/release-notes/v_0_2_13.md new file mode 100644 index 0000000000000..65cea863d9714 --- /dev/null +++ b/docs/managed-datahub/release-notes/v_0_2_13.md @@ -0,0 +1,19 @@ +# v0.2.13 +--- + +Release Availability Date +--- +22-Nov-2023 + +Recommended CLI/SDK +--- +- `v0.12.0.2` with release notes at https://github.com/acryldata/datahub/releases/tag/v0.12.0.2= + +If you are using an older CLI/SDK version then please upgrade it. This applies for all CLI/SDK usages, if you are using it through your terminal, github actions, airflow, in python SDK somewhere, Java SKD etc. This is a strong recommendation to upgrade as we keep on pushing fixes in the CLI and it helps us support you better. + +## Release Changelog +--- +- Since `v0.2.12` these changes from OSS DataHub https://github.com/datahub-project/datahub/compare/75252a3d9f6a576904be5a0790d644b9ae2df6ac...d9de854d276c118afc55264ecc9e2712b91b4ab2 have been pulled in. + +## Some notable features in this SaaS release +- Data Contract support added. This is currently disabled by default. If you wish to use this feature please reach out to your rep. From 308de491200b01caa7a3bd314bf5b2086af72138 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Thu, 23 Nov 2023 00:55:32 +0530 Subject: [PATCH 137/792] fix(): stats are spaced out too far (#9292) --- .../src/app/entity/dashboard/shared/DashboardStatsSummary.tsx | 1 - .../src/app/entity/dataset/shared/DatasetStatsSummary.tsx | 1 - 2 files changed, 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx b/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx index fb6364cffac8b..a359d658d27f7 100644 --- a/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx +++ b/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx @@ -12,7 +12,6 @@ import ExpandingStat from '../../dataset/shared/ExpandingStat'; const StatText = styled.span` color: ${ANTD_GRAY[8]}; @media (min-width: 1024px) { - width: 100%; white-space: nowrap; `; diff --git a/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx b/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx index 3dcd41a3f8a41..1a5c01df5bde2 100644 --- a/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx +++ b/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx @@ -13,7 +13,6 @@ import ExpandingStat from './ExpandingStat'; const StatText = styled.span<{ color: string }>` color: ${(props) => props.color}; @media (min-width: 1160px) { - width: 100%; white-space: nowrap; `; From a62d52e6b19599e4a4e0a0a9c4fbaf578933ff35 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Wed, 22 Nov 2023 13:54:12 -0600 Subject: [PATCH 138/792] feat(mysql): upgrade to version 8.2 for quickstart (#9241) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- .../docker-compose-without-neo4j.override.yml | 2 +- docker/docker-compose.override.yml | 2 +- docker/mysql/docker-compose.mysql.yml | 2 +- ...ocker-compose-without-neo4j.quickstart.yml | 2 +- .../quickstart/docker-compose.quickstart.yml | 2 +- .../quickstart_version_mapping.yaml | 9 ++++++ docs/how/extract-container-logs.md | 2 +- docs/how/updating-datahub.md | 1 + docs/troubleshooting/quickstart.md | 2 +- .../src/datahub/cli/docker_cli.py | 4 +++ .../src/datahub/cli/quickstart_versioning.py | 14 +++++--- .../cli/test_quickstart_version_mapping.py | 32 +++++++++++++++---- 12 files changed, 57 insertions(+), 17 deletions(-) diff --git a/docker/docker-compose-without-neo4j.override.yml b/docker/docker-compose-without-neo4j.override.yml index 24f83301351ba..36f3c974b93af 100644 --- a/docker/docker-compose-without-neo4j.override.yml +++ b/docker/docker-compose-without-neo4j.override.yml @@ -48,7 +48,7 @@ services: mysql: container_name: mysql hostname: mysql - image: mysql:5.7 + image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=mysql_native_password ports: - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 diff --git a/docker/docker-compose.override.yml b/docker/docker-compose.override.yml index 0907f47d70c3c..ef13b86a3d151 100644 --- a/docker/docker-compose.override.yml +++ b/docker/docker-compose.override.yml @@ -32,7 +32,7 @@ services: mysql: container_name: mysql hostname: mysql - image: mysql:5.7 + image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=mysql_native_password ports: - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 diff --git a/docker/mysql/docker-compose.mysql.yml b/docker/mysql/docker-compose.mysql.yml index 70287433bb541..853d0c425ea61 100644 --- a/docker/mysql/docker-compose.mysql.yml +++ b/docker/mysql/docker-compose.mysql.yml @@ -5,7 +5,7 @@ services: mysql: container_name: mysql hostname: mysql - image: mysql:5.7 + image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} env_file: env/docker.env command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin ports: diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index ab5182bf98ae5..6eac53229e82a 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -218,7 +218,7 @@ services: test: mysqladmin ping -h mysql -u $$MYSQL_USER --password=$$MYSQL_PASSWORD timeout: 5s hostname: mysql - image: mysql:5.7 + image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} ports: - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 restart: on-failure diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index 29c980532d46f..86d70abd2b815 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -225,7 +225,7 @@ services: test: mysqladmin ping -h mysql -u $$MYSQL_USER --password=$$MYSQL_PASSWORD timeout: 5s hostname: mysql - image: mysql:5.7 + image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} ports: - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 restart: on-failure diff --git a/docker/quickstart/quickstart_version_mapping.yaml b/docker/quickstart/quickstart_version_mapping.yaml index 824e1f8237453..9948bd55fdc0b 100644 --- a/docker/quickstart/quickstart_version_mapping.yaml +++ b/docker/quickstart/quickstart_version_mapping.yaml @@ -23,6 +23,7 @@ quickstart_version_map: default: composefile_git_ref: master docker_tag: head + mysql_tag: 5.7 # default: # Use this to pin default to a specific version. # composefile_git_ref: fd1bd51541a132017a648f4a2f037eec8f70ba26 # v0.10.0 + quickstart compose file fixes # docker_tag: v0.10.0 @@ -30,11 +31,19 @@ quickstart_version_map: head: composefile_git_ref: master docker_tag: head + mysql_tag: 5.7 + + # v0.13.0 we upgraded MySQL image for EOL + v0.13.0: + composefile_git_ref: master + docker_tag: head + mysql_tag: 8.2 # v0.9.6 images contain security vulnerabilities v0.9.6: composefile_git_ref: v0.9.6.1 docker_tag: v0.9.6.1 + mysql_tag: 5.7 # If stable is not defined the latest released version will be used. # stable: diff --git a/docs/how/extract-container-logs.md b/docs/how/extract-container-logs.md index b93c077eb21f5..9251d0665c02c 100644 --- a/docs/how/extract-container-logs.md +++ b/docs/how/extract-container-logs.md @@ -21,7 +21,7 @@ CONTAINER ID IMAGE COMMAND 3680fcaef3ed confluentinc/cp-kafka:5.4.0 "/etc/confluent/dock…" 5 days ago Up 5 days 0.0.0.0:9092->9092/tcp, 0.0.0.0:29092->29092/tcp broker 9d6730ddd4c4 neo4j:4.0.6 "/sbin/tini -g -- /d…" 5 days ago Up 5 days 0.0.0.0:7474->7474/tcp, 7473/tcp, 0.0.0.0:7687->7687/tcp neo4j c97edec663af confluentinc/cp-zookeeper:5.4.0 "/etc/confluent/dock…" 5 days ago Up 5 days 2888/tcp, 0.0.0.0:2181->2181/tcp, 3888/tcp zookeeper -150ba161cf26 mysql:5.7 "docker-entrypoint.s…" 5 days ago Up 5 days 0.0.0.0:3306->3306/tcp, 33060/tcp mysql +150ba161cf26 mysql:8.2 "docker-entrypoint.s…" 5 days ago Up 5 days 0.0.0.0:3306->3306/tcp, 33060/tcp mysql 4b72a3eab73f elasticsearch:7.9.3 "/tini -- /usr/local…" 5 days ago Up 5 days (healthy) 0.0.0.0:9200->9200/tcp, 9300/tcp elasticsearch ``` diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 54f5775d8331f..21c4cef2e848b 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -6,6 +6,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ### Breaking Changes +- Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. - #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. ### Potential Downtime diff --git a/docs/troubleshooting/quickstart.md b/docs/troubleshooting/quickstart.md index 64684c5ffa36c..0392ffc426a6c 100644 --- a/docs/troubleshooting/quickstart.md +++ b/docs/troubleshooting/quickstart.md @@ -99,7 +99,7 @@ c267c287a235 landoop/schema-registry-ui:latest "/run. 943e60f9b4d0 neo4j:4.0.6 "/sbin/tini -g -- /d…" 10 hours ago Up 10 hours 0.0.0.0:7474->7474/tcp, 7473/tcp, 0.0.0.0:7687->7687/tcp neo4j 6d79b6f02735 confluentinc/cp-zookeeper:5.2.1 "/etc/confluent/dock…" 10 hours ago Up 10 hours 2888/tcp, 0.0.0.0:2181->2181/tcp, 3888/tcp zookeeper 491d9f2b2e9e docker.elastic.co/elasticsearch/elasticsearch:5.6.8 "/bin/bash bin/es-do…" 10 hours ago Up 10 hours 0.0.0.0:9200->9200/tcp, 9300/tcp elasticsearch -ce14b9758eb3 mysql:5.7 +ce14b9758eb3 mysql:8.2 ``` Also you can check individual Docker container logs by running `docker logs <>`. For `datahub-gms`, you should see a log similar to this at the end of the initialization: diff --git a/metadata-ingestion/src/datahub/cli/docker_cli.py b/metadata-ingestion/src/datahub/cli/docker_cli.py index 08f3faae8abb2..0e0bc37c61573 100644 --- a/metadata-ingestion/src/datahub/cli/docker_cli.py +++ b/metadata-ingestion/src/datahub/cli/docker_cli.py @@ -158,6 +158,7 @@ def should_use_neo4j_for_graph_service(graph_service_override: Optional[str]) -> def _set_environment_variables( version: Optional[str], + mysql_version: Optional[str], mysql_port: Optional[pydantic.PositiveInt], zk_port: Optional[pydantic.PositiveInt], kafka_broker_port: Optional[pydantic.PositiveInt], @@ -172,6 +173,8 @@ def _set_environment_variables( ) version = f"v{version}" os.environ["DATAHUB_VERSION"] = version + if mysql_version is not None: + os.environ["DATAHUB_MYSQL_VERSION"] = mysql_version if mysql_port is not None: os.environ["DATAHUB_MAPPED_MYSQL_PORT"] = str(mysql_port) @@ -675,6 +678,7 @@ def quickstart( # noqa: C901 # set version _set_environment_variables( version=quickstart_execution_plan.docker_tag, + mysql_version=quickstart_execution_plan.mysql_tag, mysql_port=mysql_port, zk_port=zk_port, kafka_broker_port=kafka_broker_port, diff --git a/metadata-ingestion/src/datahub/cli/quickstart_versioning.py b/metadata-ingestion/src/datahub/cli/quickstart_versioning.py index dc7bd1ecb4a89..be7439f330dfb 100644 --- a/metadata-ingestion/src/datahub/cli/quickstart_versioning.py +++ b/metadata-ingestion/src/datahub/cli/quickstart_versioning.py @@ -21,6 +21,7 @@ class QuickstartExecutionPlan(BaseModel): composefile_git_ref: str docker_tag: str + mysql_tag: Optional[str] def _is_it_a_version(version: str) -> bool: @@ -81,7 +82,7 @@ def fetch_quickstart_config(cls) -> "QuickstartVersionMappingConfig": return QuickstartVersionMappingConfig( quickstart_version_map={ "default": QuickstartExecutionPlan( - composefile_git_ref="master", docker_tag="head" + composefile_git_ref="master", docker_tag="head", mysql_tag="5.7" ), } ) @@ -93,7 +94,7 @@ def fetch_quickstart_config(cls) -> "QuickstartVersionMappingConfig": try: release = cls._fetch_latest_version() config.quickstart_version_map["stable"] = QuickstartExecutionPlan( - composefile_git_ref=release, docker_tag=release + composefile_git_ref=release, docker_tag=release, mysql_tag=release ) except Exception: click.echo( @@ -103,7 +104,8 @@ def fetch_quickstart_config(cls) -> "QuickstartVersionMappingConfig": return config def get_quickstart_execution_plan( - self, requested_version: Optional[str] + self, + requested_version: Optional[str], ) -> QuickstartExecutionPlan: """ From the requested version and stable flag, returns the execution plan for the quickstart. @@ -114,10 +116,14 @@ def get_quickstart_execution_plan( requested_version = "default" composefile_git_ref = requested_version docker_tag = requested_version + # Default to 5.7 if not specified in version map + mysql_tag = "5.7" result = self.quickstart_version_map.get( requested_version, QuickstartExecutionPlan( - composefile_git_ref=composefile_git_ref, docker_tag=docker_tag + composefile_git_ref=composefile_git_ref, + docker_tag=docker_tag, + mysql_tag=mysql_tag, ), ) # new CLI version is downloading the composefile corresponding to the requested version diff --git a/metadata-ingestion/tests/unit/cli/test_quickstart_version_mapping.py b/metadata-ingestion/tests/unit/cli/test_quickstart_version_mapping.py index 2fd56560c933d..3b06e48522955 100644 --- a/metadata-ingestion/tests/unit/cli/test_quickstart_version_mapping.py +++ b/metadata-ingestion/tests/unit/cli/test_quickstart_version_mapping.py @@ -6,16 +6,30 @@ example_version_mapper = QuickstartVersionMappingConfig.parse_obj( { "quickstart_version_map": { - "default": {"composefile_git_ref": "master", "docker_tag": "latest"}, + "default": { + "composefile_git_ref": "master", + "docker_tag": "latest", + "mysql_tag": "5.7", + }, "v0.9.6": { "composefile_git_ref": "v0.9.6.1", "docker_tag": "v0.9.6.1", + "mysql_tag": "5.7", + }, + "v2.0.0": { + "composefile_git_ref": "v2.0.1", + "docker_tag": "v2.0.0", + "mysql_tag": "5.7", + }, + "v1.0.0": { + "composefile_git_ref": "v1.0.0", + "docker_tag": "v1.0.0", + "mysql_tag": "5.7", }, - "v2.0.0": {"composefile_git_ref": "v2.0.1", "docker_tag": "v2.0.0"}, - "v1.0.0": {"composefile_git_ref": "v1.0.0", "docker_tag": "v1.0.0"}, "stable": { "composefile_git_ref": "v1.0.1", "docker_tag": "latest", + "mysql_tag": "5.7", }, }, } @@ -27,6 +41,7 @@ def test_quickstart_version_config(): expected = QuickstartExecutionPlan( docker_tag="v1.0.0", composefile_git_ref="v1.0.0", + mysql_tag="5.7", ) assert execution_plan == expected @@ -36,6 +51,7 @@ def test_quickstart_version_config_default(): expected = QuickstartExecutionPlan( docker_tag="v2.0.0", composefile_git_ref="v2.0.1", + mysql_tag="5.7", ) assert execution_plan == expected @@ -43,20 +59,22 @@ def test_quickstart_version_config_default(): def test_quickstart_version_config_stable(): execution_plan = example_version_mapper.get_quickstart_execution_plan("stable") expected = QuickstartExecutionPlan( - docker_tag="latest", - composefile_git_ref="v1.0.1", + docker_tag="latest", composefile_git_ref="v1.0.1", mysql_tag="5.7" ) assert execution_plan == expected def test_quickstart_forced_stable(): example_version_mapper.quickstart_version_map["default"] = QuickstartExecutionPlan( - composefile_git_ref="v1.0.1", docker_tag="latest" + composefile_git_ref="v1.0.1", + docker_tag="latest", + mysql_tag="5.7", ) execution_plan = example_version_mapper.get_quickstart_execution_plan(None) expected = QuickstartExecutionPlan( docker_tag="latest", composefile_git_ref="v1.0.1", + mysql_tag="5.7", ) assert execution_plan == expected @@ -74,6 +92,7 @@ def test_quickstart_forced_not_a_version_tag(): expected = QuickstartExecutionPlan( docker_tag="NOT A VERSION", composefile_git_ref="NOT A VERSION", + mysql_tag="5.7", ) assert execution_plan == expected @@ -83,5 +102,6 @@ def test_quickstart_get_older_version(): expected = QuickstartExecutionPlan( docker_tag="v0.9.6.1", composefile_git_ref="v0.9.6.1", + mysql_tag="5.7", ) assert execution_plan == expected From 37ea292aa619bad2b6796c7cca0f78a683176a13 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Thu, 23 Nov 2023 10:32:19 +0900 Subject: [PATCH 139/792] feat: add townhall RSVP link on the main page (#9277) --- .../src/pages/_components/Hero/index.js | 2 ++ .../_components/TownhallButton/index.jsx | 31 +++++++++++++++++++ .../TownhallButton/townhallbutton.module.scss | 14 +++++++++ 3 files changed, 47 insertions(+) create mode 100644 docs-website/src/pages/_components/TownhallButton/index.jsx create mode 100644 docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss diff --git a/docs-website/src/pages/_components/Hero/index.js b/docs-website/src/pages/_components/Hero/index.js index ffa298b27a822..97a04eb21fa73 100644 --- a/docs-website/src/pages/_components/Hero/index.js +++ b/docs-website/src/pages/_components/Hero/index.js @@ -8,6 +8,7 @@ import { QuestionCircleOutlined } from "@ant-design/icons"; import styles from "./hero.module.scss"; import CodeBlock from "@theme/CodeBlock"; import CardCTAs from "../CardCTAs"; +import TownhallButton from "../TownhallButton"; const HeroAnnouncement = ({ message, linkUrl, linkText }) => (
@@ -46,6 +47,7 @@ const Hero = ({}) => { Join our Slack +
diff --git a/docs-website/src/pages/_components/TownhallButton/index.jsx b/docs-website/src/pages/_components/TownhallButton/index.jsx new file mode 100644 index 0000000000000..11dc2dc5c8476 --- /dev/null +++ b/docs-website/src/pages/_components/TownhallButton/index.jsx @@ -0,0 +1,31 @@ +import React from 'react'; +import styles from "./townhallbutton.module.scss"; +import clsx from "clsx"; +import Link from "@docusaurus/Link"; + +const TownhallButton = () => { + const today = new Date(); + const currentDay = today.getDate(); + const lastDayOfMonth = new Date(today.getFullYear(), today.getMonth() + 1, 0); + const lastThursday = lastDayOfMonth.getDate() - ((lastDayOfMonth.getDay() + 7 - 4) % 7); + + const daysUntilLastThursday = lastThursday - currentDay; + + let showButton = false; + let currentMonth = ''; + + if (daysUntilLastThursday > 0 && daysUntilLastThursday <= 14) { + showButton = true; + currentMonth = new Intl.DateTimeFormat('en-US', { month: 'long' }).format(today); + } + + return ( + showButton && ( + + Join {currentMonth} Townhall! ✨ + + ) + ); +}; + +export default TownhallButton; diff --git a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss new file mode 100644 index 0000000000000..951bc99015302 --- /dev/null +++ b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss @@ -0,0 +1,14 @@ +.feature { + color: white; + border: 1px solid transparent; + background-image: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + background-origin: border-box; + opacity: 90%; + + &:hover { + opacity: 100%; + background: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + background-image: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + background-origin: border-box; + } +} From f794a905159c8a299fecdfbc00360a23c5c8e44c Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Thu, 23 Nov 2023 09:45:25 +0100 Subject: [PATCH 140/792] fix(ingest/snowflake): Apply email filter on all usage metrics (#9269) --- .../source/snowflake/snowflake_query.py | 38 ++++++++++++++++ .../source/snowflake/snowflake_usage_v2.py | 2 + .../tests/integration/snowflake/common.py | 3 ++ .../tests/unit/test_snowflake_source.py | 43 +++++++++++++++++++ 4 files changed, 86 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py index 0f89324f5efc6..267f7cf074909 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py @@ -1,5 +1,6 @@ from typing import List, Optional +from datahub.configuration.common import AllowDenyPattern from datahub.configuration.time_window_config import BucketDuration from datahub.ingestion.source.snowflake.constants import SnowflakeObjectDomain from datahub.ingestion.source.snowflake.snowflake_config import DEFAULT_TABLES_DENY_LIST @@ -551,6 +552,8 @@ def usage_per_object_per_time_bucket_for_time_window( use_base_objects: bool, top_n_queries: int, include_top_n_queries: bool, + email_domain: Optional[str], + email_filter: AllowDenyPattern, ) -> str: if not include_top_n_queries: top_n_queries = 0 @@ -561,6 +564,9 @@ def usage_per_object_per_time_bucket_for_time_window( objects_column = ( "BASE_OBJECTS_ACCESSED" if use_base_objects else "DIRECT_OBJECTS_ACCESSED" ) + email_filter_query = SnowflakeQuery.gen_email_filter_query(email_filter) + + email_domain = f"@{email_domain}" if email_domain else "" return f""" WITH object_access_history AS @@ -578,12 +584,16 @@ def usage_per_object_per_time_bucket_for_time_window( query_id, query_start_time, user_name, + NVL(USERS.email, CONCAT(user_name, '{email_domain}')) AS user_email, {objects_column} from snowflake.account_usage.access_history + LEFT JOIN + snowflake.account_usage.users USERS WHERE query_start_time >= to_timestamp_ltz({start_time_millis}, 3) AND query_start_time < to_timestamp_ltz({end_time_millis}, 3) + {email_filter_query} ) t, lateral flatten(input => t.{objects_column}) object @@ -705,6 +715,34 @@ def usage_per_object_per_time_bucket_for_time_window( basic_usage_counts.bucket_start_time """ + @staticmethod + def gen_email_filter_query(email_filter: AllowDenyPattern) -> str: + allow_filters = [] + allow_filter = "" + if len(email_filter.allow) == 1 and email_filter.allow[0] == ".*": + allow_filter = "" + else: + for allow_pattern in email_filter.allow: + allow_filters.append( + f"rlike(user_name, '{allow_pattern}','{'i' if email_filter.ignoreCase else 'c'}')" + ) + if allow_filters: + allow_filter = " OR ".join(allow_filters) + allow_filter = f"AND ({allow_filter})" + deny_filters = [] + deny_filter = "" + for deny_pattern in email_filter.deny: + deny_filters.append( + f"rlike(user_name, '{deny_pattern}','{'i' if email_filter.ignoreCase else 'c'}')" + ) + if deny_filters: + deny_filter = " OR ".join(deny_filters) + deny_filter = f"({deny_filter})" + email_filter_query = allow_filter + ( + " AND" + f" NOT {deny_filter}" if deny_filter else "" + ) + return email_filter_query + @staticmethod def table_upstreams_with_column_lineage( start_time_millis: int, diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py index 8f571313f1888..f75e994303954 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py @@ -214,6 +214,8 @@ def _get_workunits_internal( use_base_objects=self.config.apply_view_usage_to_tables, top_n_queries=self.config.top_n_queries, include_top_n_queries=self.config.include_top_n_queries, + email_domain=self.config.email_domain, + email_filter=self.config.user_email_pattern, ), ) except Exception as e: diff --git a/metadata-ingestion/tests/integration/snowflake/common.py b/metadata-ingestion/tests/integration/snowflake/common.py index 78e5499697311..b21cea5f0988d 100644 --- a/metadata-ingestion/tests/integration/snowflake/common.py +++ b/metadata-ingestion/tests/integration/snowflake/common.py @@ -1,6 +1,7 @@ import json from datetime import datetime, timezone +from datahub.configuration.common import AllowDenyPattern from datahub.configuration.time_window_config import BucketDuration from datahub.ingestion.source.snowflake import snowflake_query from datahub.ingestion.source.snowflake.snowflake_query import SnowflakeQuery @@ -263,6 +264,8 @@ def default_query_results( # noqa: C901 top_n_queries=10, include_top_n_queries=True, time_bucket_size=BucketDuration.DAY, + email_domain=None, + email_filter=AllowDenyPattern.allow_all(), ) ): return [] diff --git a/metadata-ingestion/tests/unit/test_snowflake_source.py b/metadata-ingestion/tests/unit/test_snowflake_source.py index aaff878b81eee..343f4466fd6fd 100644 --- a/metadata-ingestion/tests/unit/test_snowflake_source.py +++ b/metadata-ingestion/tests/unit/test_snowflake_source.py @@ -3,6 +3,7 @@ import pytest from pydantic import ValidationError +from datahub.configuration.common import AllowDenyPattern from datahub.configuration.oauth import OAuthConfiguration from datahub.configuration.pattern_utils import UUID_REGEX from datahub.ingestion.api.source import SourceCapability @@ -16,6 +17,7 @@ SnowflakeV2Config, ) from datahub.ingestion.source.snowflake.snowflake_query import ( + SnowflakeQuery, create_deny_regex_sql_filter, ) from datahub.ingestion.source.snowflake.snowflake_usage_v2 import ( @@ -661,3 +663,44 @@ def test_snowflake_temporary_patterns_config_rename(): } ) assert conf.temporary_tables_pattern == [".*tmp.*"] + + +def test_email_filter_query_generation_with_one_deny(): + email_filter = AllowDenyPattern(deny=[".*@example.com"]) + filter_query = SnowflakeQuery.gen_email_filter_query(email_filter) + assert filter_query == " AND NOT (rlike(user_name, '.*@example.com','i'))" + + +def test_email_filter_query_generation_without_any_filter(): + email_filter = AllowDenyPattern() + filter_query = SnowflakeQuery.gen_email_filter_query(email_filter) + assert filter_query == "" + + +def test_email_filter_query_generation_one_allow(): + email_filter = AllowDenyPattern(allow=[".*@example.com"]) + filter_query = SnowflakeQuery.gen_email_filter_query(email_filter) + assert filter_query == "AND (rlike(user_name, '.*@example.com','i'))" + + +def test_email_filter_query_generation_one_allow_and_deny(): + email_filter = AllowDenyPattern( + allow=[".*@example.com", ".*@example2.com"], + deny=[".*@example2.com", ".*@example4.com"], + ) + filter_query = SnowflakeQuery.gen_email_filter_query(email_filter) + assert ( + filter_query + == "AND (rlike(user_name, '.*@example.com','i') OR rlike(user_name, '.*@example2.com','i')) AND NOT (rlike(user_name, '.*@example2.com','i') OR rlike(user_name, '.*@example4.com','i'))" + ) + + +def test_email_filter_query_generation_with_case_insensitive_filter(): + email_filter = AllowDenyPattern( + allow=[".*@example.com"], deny=[".*@example2.com"], ignoreCase=False + ) + filter_query = SnowflakeQuery.gen_email_filter_query(email_filter) + assert ( + filter_query + == "AND (rlike(user_name, '.*@example.com','c')) AND NOT (rlike(user_name, '.*@example2.com','c'))" + ) From 1c5871c1699bd3b3c464b7d576572e06f29649ef Mon Sep 17 00:00:00 2001 From: Simon Osipov Date: Fri, 24 Nov 2023 12:52:35 +0200 Subject: [PATCH 141/792] docs(ingestion): Added mention of host without protocol (#9301) --- .../src/app/ingest/source/builder/RecipeForm/trino.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/trino.ts b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/trino.ts index ed3c7ee73b819..1af84c0131d4a 100644 --- a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/trino.ts +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/trino.ts @@ -6,7 +6,7 @@ export const TRINO_HOST_PORT: RecipeField = { name: 'host_port', label: 'Host and Port', tooltip: - "The host and port where Trino is running. For example, 'trino-server:5432'. Note: this host must be accessible on the network where DataHub is running (or allowed via an IP Allow List, AWS PrivateLink, etc).", + "The host (without protocol and ://) and port where Trino is running. For example, 'trino-server:5432'. Note: this host must be accessible on the network where DataHub is running (or allowed via an IP Allow List, AWS PrivateLink, etc).", type: FieldType.TEXT, fieldPath: 'source.config.host_port', placeholder: 'trino-server:5432', From 5ccb30e8104a948d0440e4bda3ea9807c26c91f8 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Fri, 24 Nov 2023 16:31:37 +0100 Subject: [PATCH 142/792] fix(ingest/teradata): Teradata speed up changes (#9059) Co-authored-by: Andrew Sikowitz --- .../datahub/emitter/sql_parsing_builder.py | 3 + .../datahub/ingestion/source/sql/teradata.py | 715 +++++++++++++++++- 2 files changed, 688 insertions(+), 30 deletions(-) diff --git a/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py b/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py index cedaa4fbbd7f6..ea5ebf705707a 100644 --- a/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py +++ b/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py @@ -195,6 +195,9 @@ def _gen_lineage_mcps(self) -> Iterable[MetadataChangeProposalWrapper]: upstreams.append(edge.gen_upstream_aspect()) fine_upstreams.extend(edge.gen_fine_grained_lineage_aspects()) + if not upstreams: + continue + upstream_lineage = UpstreamLineageClass( upstreams=sorted(upstreams, key=lambda x: x.dataset), fineGrainedLineages=sorted( diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py b/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py index 8aeb1e50cd0b3..4ea8dbe236c53 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py @@ -1,14 +1,32 @@ import logging +from collections import defaultdict from dataclasses import dataclass from datetime import datetime -from typing import Iterable, Optional, Union +from functools import lru_cache +from itertools import groupby +from typing import ( + Any, + Dict, + Iterable, + List, + MutableMapping, + Optional, + Set, + Tuple, + Union, +) # This import verifies that the dependencies are available. import teradatasqlalchemy # noqa: F401 import teradatasqlalchemy.types as custom_types from pydantic.fields import Field -from sqlalchemy import create_engine +from sqlalchemy import create_engine, inspect from sqlalchemy.engine import Engine +from sqlalchemy.engine.base import Connection +from sqlalchemy.engine.reflection import Inspector +from sqlalchemy.sql.expression import text +from teradatasqlalchemy.dialect import TeradataDialect +from teradatasqlalchemy.options import configure from datahub.configuration.common import AllowDenyPattern from datahub.configuration.time_window_config import BaseTimeWindowConfig @@ -22,9 +40,11 @@ platform_name, support_status, ) +from datahub.ingestion.api.source_helpers import auto_lowercase_urns from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.graph.client import DataHubGraph from datahub.ingestion.source.sql.sql_common import SqlWorkUnit, register_custom_type +from datahub.ingestion.source.sql.sql_config import SQLCommonConfig from datahub.ingestion.source.sql.sql_generic_profiler import ProfilingSqlReport from datahub.ingestion.source.sql.two_tier_sql_source import ( TwoTierSQLAlchemyConfig, @@ -33,6 +53,7 @@ from datahub.ingestion.source.usage.usage_common import BaseUsageConfig from datahub.ingestion.source_report.ingestion_stage import IngestionStageReport from datahub.ingestion.source_report.time_window import BaseTimeWindowReport +from datahub.metadata._schema_classes import SchemaMetadataClass from datahub.metadata.com.linkedin.pegasus2avro.schema import ( BytesTypeClass, TimeTypeClass, @@ -62,6 +83,249 @@ register_custom_type(custom_types.XML, BytesTypeClass) +@dataclass +class TeradataTable: + database: str + name: str + description: Optional[str] + object_type: str + create_timestamp: datetime + last_alter_name: Optional[str] + last_alter_timestamp: Optional[datetime] + request_text: Optional[str] + + +# lru cache is set to 1 which work only in single threaded environment but it keeps the memory footprint lower +@lru_cache(maxsize=1) +def get_schema_columns( + self: Any, connection: Connection, dbc_columns: str, schema: str +) -> Dict[str, List[Any]]: + columns: Dict[str, List[Any]] = {} + columns_query = f"select * from dbc.{dbc_columns} where DatabaseName (NOT CASESPECIFIC) = '{schema}' (NOT CASESPECIFIC) order by TableName, ColumnId" + rows = connection.execute(text(columns_query)).fetchall() + for row in rows: + row_mapping = row._mapping + if row_mapping.TableName not in columns: + columns[row_mapping.TableName] = [] + + columns[row_mapping.TableName].append(row_mapping) + + return columns + + +# lru cache is set to 1 which work only in single threaded environment but it keeps the memory footprint lower +@lru_cache(maxsize=1) +def get_schema_pk_constraints( + self: Any, connection: Connection, schema: str +) -> Dict[str, List[Any]]: + dbc_indices = "IndicesV" + "X" if configure.usexviews else "IndicesV" + primary_keys: Dict[str, List[Any]] = {} + stmt = f"select * from dbc.{dbc_indices} where DatabaseName (NOT CASESPECIFIC) = '{schema}' (NOT CASESPECIFIC) and IndexType = 'K' order by IndexNumber" + rows = connection.execute(text(stmt)).fetchall() + for row in rows: + row_mapping = row._mapping + if row_mapping.TableName not in primary_keys: + primary_keys[row_mapping.TableName] = [] + + primary_keys[row_mapping.TableName].append(row_mapping) + + return primary_keys + + +def optimized_get_pk_constraint( + self: Any, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + **kw: Dict[str, Any], +) -> Dict: + """ + Override + TODO: Check if we need PRIMARY Indices or PRIMARY KEY Indices + TODO: Check for border cases (No PK Indices) + """ + + if schema is None: + schema = self.default_schema_name + + # Default value for 'usexviews' is False so use dbc.IndicesV by default + # dbc_indices = self.__get_xviews_obj("IndicesV") + + # table_obj = table( + # dbc_indices, column("ColumnName"), column("IndexName"), schema="dbc" + # ) + + res = [] + pk_keys = self.get_schema_pk_constraints(connection, schema) + res = pk_keys.get(table_name, []) + + index_columns = list() + index_name = None + + for index_column in res: + index_columns.append(self.normalize_name(index_column.ColumnName)) + index_name = self.normalize_name( + index_column.IndexName + ) # There should be just one IndexName + + return {"constrained_columns": index_columns, "name": index_name} + + +def optimized_get_columns( + self: Any, + connection: Connection, + table_name: str, + schema: Optional[str] = None, + tables_cache: MutableMapping[str, List[TeradataTable]] = {}, + use_qvci: bool = False, + **kw: Dict[str, Any], +) -> List[Dict]: + if schema is None: + schema = self.default_schema_name + + # Using 'help schema.table.*' statements has been considered. + # The DBC.ColumnsV provides the default value which is not available + # with the 'help column' commands result. + + td_table: Optional[TeradataTable] = None + # Check if the object is a view + for t in tables_cache[schema]: + if t.name == table_name: + td_table = t + break + + if td_table is None: + logger.warning( + f"Table {table_name} not found in cache for schema {schema}, not getting columns" + ) + return [] + + res = [] + if td_table.object_type == "View" and not use_qvci: + # Volatile table definition is not stored in the dictionary. + # We use the 'help schema.table.*' command instead to get information for all columns. + # We have to do the same for views since we need the type information + # which is not available in dbc.ColumnsV. + res = self._get_column_help(connection, schema, table_name, column_name=None) + + # If this is a view, get types for individual columns (dbc.ColumnsV won't have types for view columns). + # For a view or a volatile table, we have to set the default values as the 'help' command does not have it. + col_info_list = [] + for r in res: + updated_column_info_dict = self._update_column_help_info(r._mapping) + col_info_list.append(dict(r._mapping, **(updated_column_info_dict))) + res = col_info_list + else: + # Default value for 'usexviews' is False so use dbc.ColumnsV by default + dbc_columns = "columnsQV" if use_qvci else "columnsV" + dbc_columns = dbc_columns + "X" if configure.usexviews else dbc_columns + res = self.get_schema_columns(connection, dbc_columns, schema).get( + table_name, [] + ) + + final_column_info = [] + # Don't care about ART tables now + # Ignore the non-functional column in a PTI table + for row in res: + col_info = self._get_column_info(row) + if "TSColumnType" in col_info and col_info["TSColumnType"] is not None: + if ( + col_info["ColumnName"] == "TD_TIMEBUCKET" + and col_info["TSColumnType"].strip() == "TB" + ): + continue + final_column_info.append(col_info) + + return final_column_info + + +# lru cache is set to 1 which work only in single threaded environment but it keeps the memory footprint lower +@lru_cache(maxsize=1) +def get_schema_foreign_keys( + self: Any, connection: Connection, schema: str +) -> Dict[str, List[Any]]: + dbc_child_parent_table = ( + "All_RI_ChildrenV" + "X" if configure.usexviews else "All_RI_ChildrenV" + ) + foreign_keys: Dict[str, List[Any]] = {} + stmt = f""" + SELECT dbc."All_RI_ChildrenV"."ChildDB", dbc."All_RI_ChildrenV"."ChildTable", dbc."All_RI_ChildrenV"."IndexID", dbc."{dbc_child_parent_table}"."IndexName", dbc."{dbc_child_parent_table}"."ChildKeyColumn", dbc."{dbc_child_parent_table}"."ParentDB", dbc."{dbc_child_parent_table}"."ParentTable", dbc."{dbc_child_parent_table}"."ParentKeyColumn" + FROM dbc."{dbc_child_parent_table}" + WHERE ChildDB = '{schema}' ORDER BY "IndexID" ASC + """ + rows = connection.execute(text(stmt)).fetchall() + for row in rows: + row_mapping = row._mapping + if row_mapping.ChildTable not in foreign_keys: + foreign_keys[row_mapping.ChildTable] = [] + + foreign_keys[row_mapping.ChildTable].append(row_mapping) + + return foreign_keys + + +def optimized_get_foreign_keys(self, connection, table_name, schema=None, **kw): + """ + Overrides base class method + """ + + if schema is None: + schema = self.default_schema_name + # Default value for 'usexviews' is False so use DBC.All_RI_ChildrenV by default + res = self.get_schema_foreign_keys(connection, schema).get(table_name, []) + + def grouper(fk_row): + return { + "name": fk_row.IndexName or fk_row.IndexID, # ID if IndexName is None + "schema": fk_row.ParentDB, + "table": fk_row.ParentTable, + } + + # TODO: Check if there's a better way + fk_dicts = list() + for constraint_info, constraint_cols in groupby(res, grouper): + fk_dict = { + "name": str(constraint_info["name"]), + "constrained_columns": list(), + "referred_table": constraint_info["table"], + "referred_schema": constraint_info["schema"], + "referred_columns": list(), + } + + for constraint_col in constraint_cols: + fk_dict["constrained_columns"].append( + self.normalize_name(constraint_col.ChildKeyColumn) + ) + fk_dict["referred_columns"].append( + self.normalize_name(constraint_col.ParentKeyColumn) + ) + + fk_dicts.append(fk_dict) + + return fk_dicts + + +def optimized_get_view_definition( + self: Any, + connection: Connection, + view_name: str, + schema: Optional[str] = None, + tables_cache: MutableMapping[str, List[TeradataTable]] = {}, + **kw: Dict[str, Any], +) -> Optional[str]: + if schema is None: + schema = self.default_schema_name + + if schema not in tables_cache: + return None + + for table in tables_cache[schema]: + if table.name == view_name: + return self.normalize_name(table.request_text) + + return None + + @dataclass class TeradataReport(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowReport): num_queries_parsed: int = 0 @@ -74,8 +338,62 @@ class BaseTeradataConfig(TwoTierSQLAlchemyConfig): class TeradataConfig(BaseTeradataConfig, BaseTimeWindowConfig): + databases: Optional[List[str]] = Field( + default=None, + description=( + "List of databases to ingest. If not specified, all databases will be ingested." + " Even if this is specified, databases will still be filtered by `database_pattern`." + ), + ) + database_pattern = Field( - default=AllowDenyPattern(deny=["dbc"]), + default=AllowDenyPattern( + deny=[ + "All", + "Crashdumps", + "Default", + "DemoNow_Monitor", + "EXTUSER", + "External_AP", + "GLOBAL_FUNCTIONS", + "LockLogShredder", + "PUBLIC", + "SQLJ", + "SYSBAR", + "SYSJDBC", + "SYSLIB", + "SYSSPATIAL", + "SYSUDTLIB", + "SYSUIF", + "SysAdmin", + "Sys_Calendar", + "SystemFe", + "TDBCMgmt", + "TDMaps", + "TDPUSER", + "TDQCD", + "TDStats", + "TD_ANALYTICS_DB", + "TD_SERVER_DB", + "TD_SYSFNLIB", + "TD_SYSGPL", + "TD_SYSXML", + "TDaaS_BAR", + "TDaaS_DB", + "TDaaS_Maint", + "TDaaS_Monitor", + "TDaaS_Support", + "TDaaS_TDBCMgmt1", + "TDaaS_TDBCMgmt2", + "dbcmngr", + "mldb", + "system", + "tapidb", + "tdwm", + "val", + "dbc", + ] + ), description="Regex patterns for databases to filter in ingestion.", ) include_table_lineage = Field( @@ -84,6 +402,11 @@ class TeradataConfig(BaseTeradataConfig, BaseTimeWindowConfig): "This requires to have the table lineage feature enabled.", ) + include_view_lineage = Field( + default=True, + description="Whether to include view lineage in the ingestion. " + "This requires to have the view lineage feature enabled.", + ) usage: BaseUsageConfig = Field( description="The usage config to use when generating usage statistics", default=BaseUsageConfig(), @@ -99,6 +422,16 @@ class TeradataConfig(BaseTeradataConfig, BaseTimeWindowConfig): description="Generate usage statistic.", ) + use_file_backed_cache: bool = Field( + default=True, + description="Whether to use a file backed cache for the view definitions.", + ) + + use_qvci: bool = Field( + default=False, + description="Whether to use QVCI to get column information. This is faster but requires to have QVCI enabled.", + ) + @platform_name("Teradata") @config_class(TeradataConfig) @@ -122,13 +455,116 @@ class TeradataSource(TwoTierSQLAlchemySource): config: TeradataConfig - LINEAGE_QUERY: str = """SELECT ProcID, UserName as "user", StartTime AT TIME ZONE 'GMT' as "timestamp", DefaultDatabase as default_database, QueryText as query - FROM "DBC".DBQLogTbl - where ErrorCode = 0 - and QueryText like 'create table demo_user.test_lineage%' - and "timestamp" >= TIMESTAMP '{start_time}' - and "timestamp" < TIMESTAMP '{end_time}' - """ + LINEAGE_QUERY_DATABASE_FILTER: str = """and default_database IN ({databases})""" + + LINEAGE_TIMESTAMP_BOUND_QUERY: str = """ + SELECT MIN(CollectTimeStamp) as "min_ts", MAX(CollectTimeStamp) as "max_ts" from DBC.DBQLogTbl + """.strip() + + QUERY_TEXT_QUERY: str = """ + SELECT + s.QueryID as "query_id", + UserName as "user", + StartTime AT TIME ZONE 'GMT' as "timestamp", + DefaultDatabase as default_database, + s.SqlTextInfo as "query_text", + s.SqlRowNo as "row_no" + FROM "DBC".DBQLogTbl as l + JOIN "DBC".DBQLSqlTbl as s on s.QueryID = l.QueryID + WHERE + l.ErrorCode = 0 + AND l.statementtype not in ( + 'Unrecognized type', + 'Create Database/User', + 'Help', + 'Modify Database', + 'Drop Table', + 'Show', + 'Not Applicable', + 'Grant', + 'Abort', + 'Database', + 'Flush Query Logging', + 'Null', + 'Begin/End DBQL', + 'Revoke' + ) + and "timestamp" >= TIMESTAMP '{start_time}' + and "timestamp" < TIMESTAMP '{end_time}' + and s.CollectTimeStamp >= TIMESTAMP '{start_time}' + and default_database not in ('DEMONOW_MONITOR') + {databases_filter} + ORDER BY "query_id", "row_no" + """.strip() + + TABLES_AND_VIEWS_QUERY: str = """ +SELECT + t.DatabaseName, + t.TableName as name, + t.CommentString as description, + CASE t.TableKind + WHEN 'I' THEN 'Join index' + WHEN 'N' THEN 'Hash index' + WHEN 'T' THEN 'Table' + WHEN 'V' THEN 'View' + WHEN 'O' THEN 'NoPI Table' + WHEN 'Q' THEN 'Queue table' + END AS object_type, + t.CreateTimeStamp, + t.LastAlterName, + t.LastAlterTimeStamp, + t.RequestText +FROM dbc.Tables t +WHERE DatabaseName NOT IN ( + 'All', + 'Crashdumps', + 'Default', + 'DemoNow_Monitor', + 'EXTUSER', + 'External_AP', + 'GLOBAL_FUNCTIONS', + 'LockLogShredder', + 'PUBLIC', + 'SQLJ', + 'SYSBAR', + 'SYSJDBC', + 'SYSLIB', + 'SYSSPATIAL', + 'SYSUDTLIB', + 'SYSUIF', + 'SysAdmin', + 'Sys_Calendar', + 'SystemFe', + 'TDBCMgmt', + 'TDMaps', + 'TDPUSER', + 'TDQCD', + 'TDStats', + 'TD_ANALYTICS_DB', + 'TD_SERVER_DB', + 'TD_SYSFNLIB', + 'TD_SYSGPL', + 'TD_SYSXML', + 'TDaaS_BAR', + 'TDaaS_DB', + 'TDaaS_Maint', + 'TDaaS_Monitor', + 'TDaaS_Support', + 'TDaaS_TDBCMgmt1', + 'TDaaS_TDBCMgmt2', + 'dbcmngr', + 'mldb', + 'system', + 'tapidb', + 'tdwm', + 'val', + 'dbc' +) +AND t.TableKind in ('T', 'V', 'Q', 'O') +ORDER by DatabaseName, TableName; + """.strip() + + _tables_cache: MutableMapping[str, List[TeradataTable]] = defaultdict(list) def __init__(self, config: TeradataConfig, ctx: PipelineContext): super().__init__(config, ctx, "teradata") @@ -145,36 +581,246 @@ def __init__(self, config: TeradataConfig, ctx: PipelineContext): generate_operations=self.config.usage.include_operational_stats, ) - self.schema_resolver = SchemaResolver( - platform=self.platform, - platform_instance=self.config.platform_instance, - graph=None, - env=self.config.env, - ) + self.schema_resolver = self._init_schema_resolver() + + if self.config.include_tables or self.config.include_views: + self.cache_tables_and_views() + logger.info(f"Found {len(self._tables_cache)} tables and views") + setattr(self, "loop_tables", self.cached_loop_tables) # noqa: B010 + setattr(self, "loop_views", self.cached_loop_views) # noqa: B010 + setattr( # noqa: B010 + self, "get_table_properties", self.cached_get_table_properties + ) + + tables_cache = self._tables_cache + setattr( # noqa: B010 + TeradataDialect, + "get_columns", + lambda self, connection, table_name, schema=None, use_qvci=self.config.use_qvci, **kw: optimized_get_columns( + self, + connection, + table_name, + schema, + tables_cache=tables_cache, + use_qvci=use_qvci, + **kw, + ), + ) + + setattr( # noqa: B010 + TeradataDialect, + "get_pk_constraint", + lambda self, connection, table_name, schema=None, **kw: optimized_get_pk_constraint( + self, connection, table_name, schema, **kw + ), + ) + + setattr( # noqa: B010 + TeradataDialect, + "get_foreign_keys", + lambda self, connection, table_name, schema=None, **kw: optimized_get_foreign_keys( + self, connection, table_name, schema, **kw + ), + ) + + setattr( # noqa: B010 + TeradataDialect, + "get_schema_columns", + lambda self, connection, dbc_columns, schema: get_schema_columns( + self, connection, dbc_columns, schema + ), + ) + + setattr( # noqa: B010 + TeradataDialect, + "get_view_definition", + lambda self, connection, view_name, schema=None, **kw: optimized_get_view_definition( + self, connection, view_name, schema, tables_cache=tables_cache, **kw + ), + ) + + setattr( # noqa: B010 + TeradataDialect, + "get_schema_pk_constraints", + lambda self, connection, schema: get_schema_pk_constraints( + self, connection, schema + ), + ) + + setattr( # noqa: B010 + TeradataDialect, + "get_schema_foreign_keys", + lambda self, connection, schema: get_schema_foreign_keys( + self, connection, schema + ), + ) + else: + logger.info( + "Disabling stale entity removal as tables and views are disabled" + ) + if self.config.stateful_ingestion: + self.config.stateful_ingestion.remove_stale_metadata = False @classmethod def create(cls, config_dict, ctx): config = TeradataConfig.parse_obj(config_dict) return cls(config, ctx) - def get_audit_log_mcps(self) -> Iterable[MetadataWorkUnit]: + def _init_schema_resolver(self) -> SchemaResolver: + if not self.config.include_tables or not self.config.include_views: + if self.ctx.graph: + return self.ctx.graph.initialize_schema_resolver_from_datahub( + platform=self.platform, + platform_instance=self.config.platform_instance, + env=self.config.env, + ) + else: + logger.warning( + "Failed to load schema info from DataHub as DataHubGraph is missing.", + ) + return SchemaResolver( + platform=self.platform, + platform_instance=self.config.platform_instance, + env=self.config.env, + ) + + def get_inspectors(self): + # This method can be overridden in the case that you want to dynamically + # run on multiple databases. + url = self.config.get_sql_alchemy_url() + logger.debug(f"sql_alchemy_url={url}") + engine = create_engine(url, **self.config.options) + with engine.connect() as conn: + inspector = inspect(conn) + if self.config.database and self.config.database != "": + databases = [self.config.database] + elif self.config.databases: + databases = self.config.databases + else: + databases = inspector.get_schema_names() + for db in databases: + if self.config.database_pattern.allowed(db): + # url = self.config.get_sql_alchemy_url(current_db=db) + # with create_engine(url, **self.config.options).connect() as conn: + # inspector = inspect(conn) + inspector._datahub_database = db + yield inspector + + def get_db_name(self, inspector: Inspector) -> str: + if hasattr(inspector, "_datahub_database"): + return inspector._datahub_database + + engine = inspector.engine + + if engine and hasattr(engine, "url") and hasattr(engine.url, "database"): + return str(engine.url.database).strip('"') + else: + raise Exception("Unable to get database name from Sqlalchemy inspector") + + def cached_loop_tables( # noqa: C901 + self, + inspector: Inspector, + schema: str, + sql_config: SQLCommonConfig, + ) -> Iterable[Union[SqlWorkUnit, MetadataWorkUnit]]: + setattr( # noqa: B010 + inspector, + "get_table_names", + lambda schema: [ + i.name + for i in filter( + lambda t: t.object_type != "View", self._tables_cache[schema] + ) + ], + ) + yield from super().loop_tables(inspector, schema, sql_config) + + def cached_get_table_properties( + self, inspector: Inspector, schema: str, table: str + ) -> Tuple[Optional[str], Dict[str, str], Optional[str]]: + description: Optional[str] = None + properties: Dict[str, str] = {} + + # The location cannot be fetched generically, but subclasses may override + # this method and provide a location. + location: Optional[str] = None + + for entry in self._tables_cache[schema]: + if entry.name == table: + description = entry.description + if entry.object_type == "View" and entry.request_text: + properties["view_definition"] = entry.request_text + break + return description, properties, location + + def cached_loop_views( # noqa: C901 + self, + inspector: Inspector, + schema: str, + sql_config: SQLCommonConfig, + ) -> Iterable[Union[SqlWorkUnit, MetadataWorkUnit]]: + setattr( # noqa: B010 + inspector, + "get_view_names", + lambda schema: [ + i.name + for i in filter( + lambda t: t.object_type == "View", self._tables_cache[schema] + ) + ], + ) + yield from super().loop_views(inspector, schema, sql_config) + + def cache_tables_and_views(self) -> None: engine = self.get_metadata_engine() - for entry in engine.execute( - self.LINEAGE_QUERY.format( - start_time=self.config.start_time, end_time=self.config.end_time + for entry in engine.execute(self.TABLES_AND_VIEWS_QUERY): + table = TeradataTable( + database=entry.DatabaseName.strip(), + name=entry.name.strip(), + description=entry.description.strip() if entry.description else None, + object_type=entry.object_type, + create_timestamp=entry.CreateTimeStamp, + last_alter_name=entry.LastAlterName, + last_alter_timestamp=entry.LastAlterTimeStamp, + request_text=entry.RequestText.strip() + if entry.object_type == "View" and entry.RequestText + else None, ) - ): + if table.database not in self._tables_cache: + self._tables_cache[table.database] = [] + + self._tables_cache[table.database].append(table) + + def get_audit_log_mcps(self, urns: Set[str]) -> Iterable[MetadataWorkUnit]: + engine = self.get_metadata_engine() + for entry in engine.execute(self._make_lineage_query()): self.report.num_queries_parsed += 1 if self.report.num_queries_parsed % 1000 == 0: logger.info(f"Parsed {self.report.num_queries_parsed} queries") yield from self.gen_lineage_from_query( - query=entry.query, + query=entry.query_text, default_database=entry.default_database, timestamp=entry.timestamp, user=entry.user, - is_view_ddl=False, + urns=urns, + ) + + def _make_lineage_query(self) -> str: + databases_filter = ( + "" + if not self.config.databases + else "and default_database in ({databases})".format( + databases=",".join([f"'{db}'" for db in self.config.databases]) ) + ) + + query = self.QUERY_TEXT_QUERY.format( + start_time=self.config.start_time, + end_time=self.config.end_time, + databases_filter=databases_filter, + ) + return query def gen_lineage_from_query( self, @@ -182,10 +828,12 @@ def gen_lineage_from_query( default_database: Optional[str] = None, timestamp: Optional[datetime] = None, user: Optional[str] = None, - is_view_ddl: bool = False, + view_urn: Optional[str] = None, + urns: Optional[Set[str]] = None, ) -> Iterable[MetadataWorkUnit]: result = sqlglot_lineage( - sql=query, + # With this clever hack we can make the query parser to not fail on queries with CASESPECIFIC + sql=query.replace("(NOT CASESPECIFIC)", ""), schema_resolver=self.schema_resolver, default_db=None, default_schema=default_database @@ -194,17 +842,17 @@ def gen_lineage_from_query( ) if result.debug_info.table_error: logger.debug( - f"Error parsing table lineage, {result.debug_info.table_error}" + f"Error parsing table lineage ({view_urn}):\n{result.debug_info.table_error}" ) self.report.num_table_parse_failures += 1 else: yield from self.builder.process_sql_parsing_result( result, query=query, - is_view_ddl=is_view_ddl, + is_view_ddl=view_urn is not None, query_timestamp=timestamp, user=f"urn:li:corpuser:{user}", - include_urns=self.schema_resolver.get_urns(), + include_urns=urns, ) def get_metadata_engine(self) -> Engine: @@ -214,10 +862,17 @@ def get_metadata_engine(self) -> Engine: def get_workunits_internal(self) -> Iterable[Union[MetadataWorkUnit, SqlWorkUnit]]: # Add all schemas to the schema resolver - yield from super().get_workunits_internal() + # Sql parser operates on lowercase urns so we need to lowercase the urns + for wu in auto_lowercase_urns(super().get_workunits_internal()): + urn = wu.get_urn() + schema_metadata = wu.get_aspect_of_type(SchemaMetadataClass) + if schema_metadata: + self.schema_resolver.add_schema_metadata(urn, schema_metadata) + yield wu + urns = self.schema_resolver.get_urns() if self.config.include_table_lineage or self.config.include_usage_statistics: self.report.report_ingestion_stage_start("audit log extraction") - yield from self.get_audit_log_mcps() + yield from self.get_audit_log_mcps(urns=urns) yield from self.builder.gen_workunits() From 514c2fb157e14608eace62fc6fb632ccfb7a7157 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 24 Nov 2023 14:12:11 -0600 Subject: [PATCH 143/792] fix(kafka): fix consumer properties on due consumer (#9304) --- .../kafka/SimpleKafkaConsumerFactory.java | 2 +- .../kafka/SimpleKafkaConsumerFactoryTest.java | 32 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java index e12cbec87fe45..14ffc01d75781 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java @@ -43,7 +43,7 @@ protected KafkaListenerContainerFactory createInstance(@Qualifier("configurat consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); } // else we rely on KafkaProperties which defaults to localhost:9092 - Map customizedProperties = consumerProps.buildProperties(); + Map customizedProperties = properties.buildConsumerProperties(); customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java new file mode 100644 index 0000000000000..408c7b67b25f0 --- /dev/null +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java @@ -0,0 +1,32 @@ +package com.linkedin.gms.factory.kafka; + +import com.linkedin.gms.factory.config.ConfigurationProvider; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +@SpringBootTest( + properties = { + "spring.kafka.properties.security.protocol=SSL" + }, + classes = { + SimpleKafkaConsumerFactory.class, + ConfigurationProvider.class + }) +@EnableConfigurationProperties(ConfigurationProvider.class) +public class SimpleKafkaConsumerFactoryTest extends AbstractTestNGSpringContextTests { + @Autowired + ConcurrentKafkaListenerContainerFactory testFactory; + + @Test + void testInitialization() { + assertNotNull(testFactory); + assertEquals(testFactory.getConsumerFactory().getConfigurationProperties().get("security.protocol"), "SSL"); + } +} From 298b9becb009ad775c31122ae9933bb2b02b92b3 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Sat, 25 Nov 2023 04:19:34 +0530 Subject: [PATCH 144/792] fix(dbt-cloud): do not pass macros to sorting nodes (#9302) Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/dbt/dbt_common.py | 1 + 1 file changed, 1 insertion(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index 94df0a4f8a166..919ba5a4b285a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -895,6 +895,7 @@ def _infer_schemas_and_update_cll(self, all_nodes_map: Dict[str, DBTNode]) -> No (upstream, node.dbt_name) for node in all_nodes_map.values() for upstream in node.upstream_nodes + if upstream in all_nodes_map ), ): node = all_nodes_map[dbt_name] From a34fdfd8b73b5eeefbf8751ed920f2e5110753b6 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Sat, 25 Nov 2023 04:29:24 +0530 Subject: [PATCH 145/792] fix(ingest/lookml): emit all views with same name and different file path (#9279) --- .../ingestion/source/looker/lookml_source.py | 23 +- .../data.model.lkml | 7 + .../data2.model.lkml | 6 + .../path1/foo.view.lkml | 47 ++ .../path2/foo.view.lkml | 41 ++ ...l_same_name_views_different_file_path.json | 587 ++++++++++++++++++ .../tests/integration/lookml/test_lookml.py | 50 ++ 7 files changed, 755 insertions(+), 6 deletions(-) create mode 100644 metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/data.model.lkml create mode 100644 metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/data2.model.lkml create mode 100644 metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/path1/foo.view.lkml create mode 100644 metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/path2/foo.view.lkml create mode 100644 metadata-ingestion/tests/integration/lookml/lookml_same_name_views_different_file_path.json diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 9d7c972612777..2bd469b3f9bcd 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -1982,9 +1982,16 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 self.reporter, ) - # some views can be mentioned by multiple 'include' statements and can be included via different connections. - # So this set is used to prevent creating duplicate events + # Some views can be mentioned by multiple 'include' statements and can be included via different connections. + + # This map is used to keep track of which views files have already been processed + # for a connection in order to prevent creating duplicate events. + # Key: connection name, Value: view file paths processed_view_map: Dict[str, Set[str]] = {} + + # This map is used to keep track of the connection that a view is processed with. + # Key: view unique identifier - determined by variables present in config `view_naming_pattern` + # Value: Tuple(model file name, connection name) view_connection_map: Dict[str, Tuple[str, str]] = {} # The ** means "this directory and all subdirectories", and hence should @@ -2148,13 +2155,17 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 if self.source_config.view_pattern.allowed( maybe_looker_view.id.view_name ): + view_urn = maybe_looker_view.id.get_urn( + self.source_config + ) view_connection_mapping = view_connection_map.get( - maybe_looker_view.id.view_name + view_urn ) if not view_connection_mapping: - view_connection_map[ - maybe_looker_view.id.view_name - ] = (model_name, model.connection) + view_connection_map[view_urn] = ( + model_name, + model.connection, + ) # first time we are discovering this view logger.debug( f"Generating MCP for view {raw_view['name']}" diff --git a/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/data.model.lkml b/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/data.model.lkml new file mode 100644 index 0000000000000..183b16b2a3c1d --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/data.model.lkml @@ -0,0 +1,7 @@ +connection: "my_connection" + +include: "path1/foo.view.lkml" + +explore: aliased_explore { + from: my_view +} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/data2.model.lkml b/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/data2.model.lkml new file mode 100644 index 0000000000000..6a4a96e2630fa --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/data2.model.lkml @@ -0,0 +1,6 @@ +connection: "my_connection" +include: "path2/foo.view.lkml" + +explore: duplicate_explore { + from: my_view +} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/path1/foo.view.lkml b/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/path1/foo.view.lkml new file mode 100644 index 0000000000000..40a981ebc7eb0 --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/path1/foo.view.lkml @@ -0,0 +1,47 @@ +view: my_view { + derived_table: { + sql: + SELECT + is_latest, + country, + city, + timestamp, + measurement + FROM + my_table ;; + } + + dimension: country { + type: string + description: "The country" + sql: ${TABLE}.country ;; + } + + dimension: city { + type: string + description: "City" + sql: ${TABLE}.city ;; + } + + dimension: is_latest { + type: yesno + description: "Is latest data" + sql: ${TABLE}.is_latest ;; + } + + dimension_group: timestamp { + group_label: "Timestamp" + type: time + description: "Timestamp of measurement" + sql: ${TABLE}.timestamp ;; + timeframes: [hour, date, week, day_of_week] + } + + measure: average_measurement { + group_label: "Measurement" + type: average + description: "My measurement" + sql: ${TABLE}.measurement ;; + } + +} diff --git a/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/path2/foo.view.lkml b/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/path2/foo.view.lkml new file mode 100644 index 0000000000000..8bd8138f97386 --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/lkml_same_name_views_different_file_path_samples/path2/foo.view.lkml @@ -0,0 +1,41 @@ +view: my_view { + derived_table: { + sql: + SELECT + is_latest, + country, + city, + timestamp, + measurement + FROM + my_table ;; + } + + dimension: city { + type: string + description: "City" + sql: ${TABLE}.city ;; + } + + dimension: is_latest { + type: yesno + description: "Is latest data" + sql: ${TABLE}.is_latest ;; + } + + dimension_group: timestamp { + group_label: "Timestamp" + type: time + description: "Timestamp of measurement" + sql: ${TABLE}.timestamp ;; + timeframes: [hour, date, week, day_of_week] + } + + measure: average_measurement { + group_label: "Measurement" + type: average + description: "My measurement" + sql: ${TABLE}.measurement ;; + } + +} diff --git a/metadata-ingestion/tests/integration/lookml/lookml_same_name_views_different_file_path.json b/metadata-ingestion/tests/integration/lookml/lookml_same_name_views_different_file_path.json new file mode 100644 index 0000000000000..c212cc33b66d4 --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/lookml_same_name_views_different_file_path.json @@ -0,0 +1,587 @@ +[ +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.path1.foo.view.my_view,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/prod/looker/lkml_samples/path1/foo.view.lkml/views" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1586847600000, + "actor": "urn:li:corpuser:datahub" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,warehouse.default_db.default_schema.my_table,DEV)", + "type": "VIEW" + } + ] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "my_view", + "platform": "urn:li:dataPlatform:looker", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "country", + "nullable": false, + "description": "The country", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "city", + "nullable": false, + "description": "City", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "is_latest", + "nullable": false, + "description": "Is latest data", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.BooleanType": {} + } + }, + "nativeDataType": "yesno", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "timestamp", + "nullable": false, + "description": "Timestamp of measurement", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "time", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + }, + { + "tag": "urn:li:tag:Temporal" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "average_measurement", + "nullable": false, + "description": "My measurement", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "average", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Measure" + } + ] + }, + "isPartOfKey": false + } + ], + "primaryKeys": [] + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "looker.file.path": "path1/foo.view.lkml" + }, + "name": "my_view", + "tags": [] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.path1.foo.view.my_view,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.path1.foo.view.my_view,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "SELECT\n is_latest,\n country,\n city,\n timestamp,\n measurement\n FROM\n my_table", + "viewLanguage": "sql" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.path1.foo.view.my_view,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "looker" + }, + { + "id": "lkml_samples" + }, + { + "id": "path1" + }, + { + "id": "foo.view.lkml" + }, + { + "id": "views" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.path2.foo.view.my_view,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/prod/looker/lkml_samples/path2/foo.view.lkml/views" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1586847600000, + "actor": "urn:li:corpuser:datahub" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:snowflake,warehouse.default_db.default_schema.my_table,DEV)", + "type": "VIEW" + } + ] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "my_view", + "platform": "urn:li:dataPlatform:looker", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "city", + "nullable": false, + "description": "City", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "is_latest", + "nullable": false, + "description": "Is latest data", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.BooleanType": {} + } + }, + "nativeDataType": "yesno", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "timestamp", + "nullable": false, + "description": "Timestamp of measurement", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "time", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + }, + { + "tag": "urn:li:tag:Temporal" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "average_measurement", + "nullable": false, + "description": "My measurement", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "average", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Measure" + } + ] + }, + "isPartOfKey": false + } + ], + "primaryKeys": [] + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "looker.file.path": "path2/foo.view.lkml" + }, + "name": "my_view", + "tags": [] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.path2.foo.view.my_view,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.path2.foo.view.my_view,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "SELECT\n is_latest,\n country,\n city,\n timestamp,\n measurement\n FROM\n my_table", + "viewLanguage": "sql" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.path2.foo.view.my_view,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "looker" + }, + { + "id": "lkml_samples" + }, + { + "id": "path2" + }, + { + "id": "foo.view.lkml" + }, + { + "id": "views" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.TagSnapshot": { + "urn": "urn:li:tag:Dimension", + "aspects": [ + { + "com.linkedin.pegasus2avro.tag.TagProperties": { + "name": "Dimension", + "description": "A tag that is applied to all dimension fields." + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.TagSnapshot": { + "urn": "urn:li:tag:Temporal", + "aspects": [ + { + "com.linkedin.pegasus2avro.tag.TagProperties": { + "name": "Temporal", + "description": "A tag that is applied to all time-based (temporal) fields such as timestamps or durations." + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.TagSnapshot": { + "urn": "urn:li:tag:Measure", + "aspects": [ + { + "com.linkedin.pegasus2avro.tag.TagProperties": { + "name": "Measure", + "description": "A tag that is applied to all measures (metrics). Measures are typically the columns that you aggregate on" + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:Dimension", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:Measure", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:Temporal", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/lookml/test_lookml.py b/metadata-ingestion/tests/integration/lookml/test_lookml.py index b1853cfa2b3c0..a71b597863148 100644 --- a/metadata-ingestion/tests/integration/lookml/test_lookml.py +++ b/metadata-ingestion/tests/integration/lookml/test_lookml.py @@ -802,3 +802,53 @@ def test_lookml_base_folder(): pydantic.ValidationError, match=r"base_folder.+not provided.+deploy_key" ): LookMLSourceConfig.parse_obj({"api": fake_api}) + + +@freeze_time(FROZEN_TIME) +def test_same_name_views_different_file_path(pytestconfig, tmp_path, mock_time): + """Test for reachable views""" + test_resources_dir = pytestconfig.rootpath / "tests/integration/lookml" + mce_out = "lookml_same_name_views_different_file_path.json" + pipeline = Pipeline.create( + { + "run_id": "lookml-test", + "source": { + "type": "lookml", + "config": { + "base_folder": str( + test_resources_dir + / "lkml_same_name_views_different_file_path_samples" + ), + "connection_to_platform_map": { + "my_connection": { + "platform": "snowflake", + "platform_instance": "warehouse", + "platform_env": "dev", + "default_db": "default_db", + "default_schema": "default_schema", + }, + }, + "parse_table_names_from_sql": True, + "project_name": "lkml_samples", + "process_refinements": False, + "view_naming_pattern": "{project}.{file_path}.view.{name}", + "view_browse_pattern": "/{env}/{platform}/{project}/{file_path}/views", + }, + }, + "sink": { + "type": "file", + "config": { + "filename": f"{tmp_path}/{mce_out}", + }, + }, + } + ) + pipeline.run() + pipeline.pretty_print_summary() + pipeline.raise_from_status(raise_warnings=True) + + mce_helpers.check_golden_file( + pytestconfig, + output_path=tmp_path / mce_out, + golden_path=test_resources_dir / mce_out, + ) From cacddf7e983198beb7ab54b3f74daf10017692b8 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Mon, 27 Nov 2023 14:26:27 +0530 Subject: [PATCH 146/792] fix(deprecation): bring frontend in-sync with model (#9303) --- .../EntityDropdown/UpdateDeprecationModal.tsx | 2 +- .../components/styled/DeprecationPill.tsx | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx index 512735e60b2c3..6ae893e12575f 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx @@ -27,7 +27,7 @@ export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { resources: [...urns.map((urn) => ({ resourceUrn: urn }))], deprecated: true, note: formData.note, - decommissionTime: formData.decommissionTime && formData.decommissionTime.unix(), + decommissionTime: formData.decommissionTime && formData.decommissionTime.unix() * 1000, }, }, }); diff --git a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx index ffc32c1538259..f60a74247ebcc 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx @@ -83,15 +83,24 @@ export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: * Deprecation Decommission Timestamp */ const localeTimezone = getLocaleTimezone(); + + let decommissionTimeSeconds; + if (deprecation.decommissionTime) { + if (deprecation.decommissionTime < 943920000000) { + // Time is set in way past if it was milli-second so considering this as set in seconds + decommissionTimeSeconds = deprecation.decommissionTime; + } else { + decommissionTimeSeconds = deprecation.decommissionTime / 1000; + } + } const decommissionTimeLocal = - (deprecation.decommissionTime && + (decommissionTimeSeconds && `Scheduled to be decommissioned on ${moment - .unix(deprecation.decommissionTime) + .unix(decommissionTimeSeconds) .format('DD/MMM/YYYY')} (${localeTimezone})`) || undefined; const decommissionTimeGMT = - deprecation.decommissionTime && - moment.unix(deprecation.decommissionTime).utc().format('dddd, DD/MMM/YYYY HH:mm:ss z'); + decommissionTimeSeconds && moment.unix(decommissionTimeSeconds).utc().format('dddd, DD/MMM/YYYY HH:mm:ss z'); const hasDetails = deprecation.note !== '' || deprecation.decommissionTime !== null; const isDividerNeeded = deprecation.note !== '' && deprecation.decommissionTime !== null; From ae5969fcfb22e25e8b08987285ab2087d1373580 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Mon, 27 Nov 2023 14:27:16 +0530 Subject: [PATCH 147/792] fix(settings): fix the settings height when there are not many items (#9294) --- datahub-web-react/src/app/settings/SettingsPage.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/settings/SettingsPage.tsx b/datahub-web-react/src/app/settings/SettingsPage.tsx index 69d4eb2b10b4d..e0a15c73a626d 100644 --- a/datahub-web-react/src/app/settings/SettingsPage.tsx +++ b/datahub-web-react/src/app/settings/SettingsPage.tsx @@ -25,11 +25,11 @@ import ManagePosts from './posts/ManagePosts'; const PageContainer = styled.div` display: flex; overflow: auto; + flex: 1; `; const SettingsBarContainer = styled.div` padding-top: 20px; - max-height: 100vh; border-right: 1px solid ${ANTD_GRAY[5]}; display: flex; flex-direction: column; From 3806dc184ec372c2b48a2bc5f37dfd09e3e5809e Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Mon, 27 Nov 2023 16:12:44 +0530 Subject: [PATCH 148/792] docs: update recommended CLI (#9307) --- docs/managed-datahub/release-notes/v_0_2_13.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/managed-datahub/release-notes/v_0_2_13.md b/docs/managed-datahub/release-notes/v_0_2_13.md index 65cea863d9714..c4fb1f7dcc252 100644 --- a/docs/managed-datahub/release-notes/v_0_2_13.md +++ b/docs/managed-datahub/release-notes/v_0_2_13.md @@ -3,11 +3,11 @@ Release Availability Date --- -22-Nov-2023 +27-Nov-2023 Recommended CLI/SDK --- -- `v0.12.0.2` with release notes at https://github.com/acryldata/datahub/releases/tag/v0.12.0.2= +- `v0.12.0.3` with release notes at https://github.com/acryldata/datahub/releases/tag/v0.12.0.3 If you are using an older CLI/SDK version then please upgrade it. This applies for all CLI/SDK usages, if you are using it through your terminal, github actions, airflow, in python SDK somewhere, Java SKD etc. This is a strong recommendation to upgrade as we keep on pushing fixes in the CLI and it helps us support you better. From a9650b650c59c67fffb1df1292874bb5c8e884d8 Mon Sep 17 00:00:00 2001 From: Amanda Ng <10681923+ngamanda@users.noreply.github.com> Date: Tue, 28 Nov 2023 00:45:21 +0800 Subject: [PATCH 149/792] feat(ui): bump frontend dependencies (#8353) Co-authored-by: Chris Collins --- datahub-web-react/.eslintrc.js | 38 +- datahub-web-react/craco.config.js | 40 +- datahub-web-react/package.json | 55 +- datahub-web-react/src/__mocks__/styleMock.js | 1 + .../components/BarChart.tsx | 10 +- .../analyticsDashboard/components/Legend.tsx | 2 +- .../components/TimeSeriesChart.tsx | 96 +- .../components/legacy/MarkdownViewer.tsx | 1 + .../Stats/historical/charts/StatChart.tsx | 4 +- .../Dataset/Validations/BooleanTimeline.tsx | 8 +- .../src/app/lineage/ColumnNode.tsx | 2 +- .../src/app/lineage/LineageEntityColumns.tsx | 2 +- .../src/app/lineage/LineageEntityEdge.tsx | 6 +- .../src/app/lineage/LineageEntityNode.tsx | 4 +- .../src/app/lineage/LineageTree.tsx | 2 +- .../LineageTreeNodeAndEdgeRenderer.tsx | 4 +- .../src/app/lineage/LineageViz.tsx | 4 +- .../src/app/lineage/LineageVizInsideZoom.tsx | 4 +- .../src/app/lineage/LineageVizRootSvg.tsx | 4 +- .../src/app/lineage/NodeColumnsHeader.tsx | 2 +- .../lineage/__tests__/LineageTree.test.tsx | 2 +- .../__tests__/adjustVXTreeLayout.test.tsx | 2 +- .../app/lineage/utils/adjustVXTreeLayout.ts | 2 +- .../app/shared/share/items/EmailMenuItem.tsx | 31 +- .../src/conf/theme/styled-components.d.ts | 2 +- datahub-web-react/src/graphql-mock/server.ts | 1 + datahub-web-react/tsconfig.json | 4 +- datahub-web-react/yarn.lock | 12838 ++++++---------- 28 files changed, 5296 insertions(+), 7875 deletions(-) create mode 100644 datahub-web-react/src/__mocks__/styleMock.js diff --git a/datahub-web-react/.eslintrc.js b/datahub-web-react/.eslintrc.js index 3cdc747d100b5..2806942dd1053 100644 --- a/datahub-web-react/.eslintrc.js +++ b/datahub-web-react/.eslintrc.js @@ -1,15 +1,14 @@ module.exports = { parser: '@typescript-eslint/parser', // Specifies the ESLint parser extends: [ - 'react-app', - 'plugin:react/recommended', // Uses the recommended rules from @eslint-plugin-react - 'plugin:@typescript-eslint/recommended', // Uses the recommended rules from @typescript-eslint/eslint-plugin - 'plugin:jest/recommended', + 'airbnb', 'airbnb-typescript', 'airbnb/hooks', + 'plugin:@typescript-eslint/recommended', + 'plugin:jest/recommended', 'prettier', - 'plugin:prettier/recommended', ], + plugins: ['@typescript-eslint'], parserOptions: { ecmaVersion: 2020, // Allows for the parsing of modern ECMAScript features sourceType: 'module', // Allows for the use of imports @@ -19,18 +18,27 @@ module.exports = { project: './tsconfig.json', }, rules: { - eqeqeq: ['error', 'always'], - 'react/destructuring-assignment': 'off', - 'no-console': 'off', - 'no-debugger': 'warn', - 'require-await': 'warn', + '@typescript-eslint/no-explicit-any': 'off', + 'arrow-body-style': 'off', + 'class-methods-use-this': 'off', + 'import/no-extraneous-dependencies': 'off', 'import/prefer-default-export': 'off', // TODO: remove this lint rule - 'import/extensions': 'off', - 'react/jsx-props-no-spreading': 'off', + 'no-console': 'off', 'no-plusplus': 'off', 'no-prototype-builtins': 'off', - 'react/require-default-props': 'off', + 'no-restricted-exports': ['off', { restrictedNamedExports: ['default', 'then'] }], 'no-underscore-dangle': 'off', + 'no-unsafe-optional-chaining': 'off', + 'prefer-exponentiation-operator': 'off', + 'prefer-regex-literals': 'off', + 'react/destructuring-assignment': 'off', + 'react/function-component-definition': 'off', + 'react/jsx-no-bind': 'off', + 'react/jsx-no-constructed-context-values': 'off', + 'react/jsx-no-useless-fragment': 'off', + 'react/jsx-props-no-spreading': 'off', + 'react/no-unstable-nested-components': 'off', + 'react/require-default-props': 'off', '@typescript-eslint/no-unused-vars': [ 'error', { @@ -38,10 +46,6 @@ module.exports = { argsIgnorePattern: '^_', }, ], - '@typescript-eslint/no-empty-interface': 'off', - "@typescript-eslint/explicit-module-boundary-types": "off", - "@typescript-eslint/no-explicit-any": 'off', - "import/no-extraneous-dependencies": 'off' }, settings: { react: { diff --git a/datahub-web-react/craco.config.js b/datahub-web-react/craco.config.js index d3ed895cf840e..6ede45902128f 100644 --- a/datahub-web-react/craco.config.js +++ b/datahub-web-react/craco.config.js @@ -1,8 +1,11 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ require('dotenv').config(); +const { whenProd } = require('@craco/craco'); const CracoAntDesignPlugin = require('craco-antd'); const path = require('path'); const CopyWebpackPlugin = require('copy-webpack-plugin'); +// eslint-disable-next-line import/no-dynamic-require const themeConfig = require(`./src/conf/theme/${process.env.REACT_APP_THEME_CONFIG}`); function addLessPrefixToKeys(styles) { @@ -15,6 +18,23 @@ function addLessPrefixToKeys(styles) { module.exports = { webpack: { + configure: { + optimization: whenProd(() => ({ + splitChunks: { + cacheGroups: { + vendor: { + test: /[\\/]node_modules[\\/]/, + name: 'vendors', + chunks: 'all', + }, + }, + }, + })), + // Webpack 5 no longer automatically pollyfill core Node.js modules + resolve: { fallback: { fs: false } }, + // Ignore Webpack 5's missing source map warnings from node_modules + ignoreWarnings: [{ module: /node_modules/, message: /source-map-loader/ }], + }, plugins: { add: [ // Self host images by copying them to the build directory @@ -24,8 +44,8 @@ module.exports = { // Copy monaco-editor files to the build directory new CopyWebpackPlugin({ patterns: [ - { from: "node_modules/monaco-editor/min/vs/", to: "monaco-editor/vs" }, - { from: "node_modules/monaco-editor/min-maps/vs/", to: "monaco-editor/min-maps/vs" }, + { from: 'node_modules/monaco-editor/min/vs/', to: 'monaco-editor/vs' }, + { from: 'node_modules/monaco-editor/min-maps/vs/', to: 'monaco-editor/min-maps/vs' }, ], }), ], @@ -41,13 +61,15 @@ module.exports = { }, ], jest: { - configure: (jestConfig) => { - jestConfig.transformIgnorePatterns = [ - // Ensures that lib0 and y-protocol libraries are transformed through babel as well - 'node_modules/(?!(lib0|y-protocols)).+\\.(js|jsx|mjs|cjs|ts|tsx)$', - '^.+\\.module\\.(css|sass|scss)$', - ]; - return jestConfig; + configure: { + // Use dist files instead of source files + moduleNameMapper: { + '^d3-interpolate-path': `d3-interpolate-path/build/d3-interpolate-path`, + '^d3-(.*)$': `d3-$1/dist/d3-$1`, + '^lib0/((?!dist).*)$': 'lib0/dist/$1.cjs', + '^y-protocols/(.*)$': 'y-protocols/dist/$1.cjs', + '\\.(css|less)$': '/src/__mocks__/styleMock.js', + }, }, }, }; diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index a72d9c0a898f8..f55588e46c9c7 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -9,12 +9,12 @@ "@ant-design/colors": "^5.0.0", "@ant-design/icons": "^4.3.0", "@apollo/client": "^3.3.19", - "@craco/craco": "^6.1.1", - "@data-ui/xy-chart": "^0.0.84", + "@craco/craco": "^7.1.0", "@graphql-codegen/fragment-matcher": "^5.0.0", "@miragejs/graphql": "^0.1.11", "@monaco-editor/react": "^4.3.1", "@react-hook/window-size": "^3.0.7", + "@react-spring/web": "^9.7.3", "@remirror/pm": "^2.0.3", "@remirror/react": "^2.0.24", "@remirror/styles": "^2.0.3", @@ -30,21 +30,22 @@ "@types/react-router": "^5.1.8", "@types/react-router-dom": "^5.1.6", "@uiw/react-md-editor": "^3.3.4", - "@vx/axis": "^0.0.199", - "@vx/glyph": "^0.0.199", - "@vx/group": "^0.0.199", - "@vx/hierarchy": "^0.0.199", - "@vx/legend": "^0.0.199", - "@vx/scale": "^0.0.199", - "@vx/shape": "^0.0.199", - "@vx/tooltip": "^0.0.199", - "@vx/zoom": "^0.0.199", + "@visx/axis": "^3.1.0", + "@visx/curve": "^3.0.0", + "@visx/group": "^3.0.0", + "@visx/hierarchy": "^3.0.0", + "@visx/legend": "^3.2.0", + "@visx/scale": "^3.2.0", + "@visx/shape": "^3.2.0", + "@visx/xychart": "^3.2.0", + "@visx/zoom": "^3.1.1", "analytics": "^0.8.9", "antd": "4.24.7", "color-hash": "^2.0.1", - "craco-antd": "^1.19.0", + "craco-antd": "^2.0.0", + "craco-babel-loader": "^1.0.4", "cronstrue": "^1.122.0", - "d3-scale": "^3.3.0", + "d3-scale": "^4.0.2", "dayjs": "^1.11.7", "deepmerge": "^4.2.2", "diff": "^5.0.0", @@ -67,14 +68,13 @@ "react": "^17.0.0", "react-color": "^2.19.3", "react-dom": "^17.0.0", - "react-email-share-link": "^1.0.3", "react-helmet-async": "^1.3.0", "react-highlighter": "^0.4.3", "react-icons": "4.3.1", "react-js-cron": "^2.1.0", "react-router": "^5.2.0", "react-router-dom": "^5.1.6", - "react-scripts": "4.0.3", + "react-scripts": "^5.0.1", "react-syntax-highlighter": "^15.4.4", "react-visibility-sensor": "^5.1.1", "reactour": "1.18.7", @@ -83,7 +83,7 @@ "start-server-and-test": "1.12.2", "styled-components": "^5.2.1", "turndown-plugin-gfm": "^1.0.2", - "typescript": "^4.1.3", + "typescript": "^4.8.4", "uuid": "^8.3.2", "virtualizedtableforantd4": "^1.2.1", "web-vitals": "^0.2.4", @@ -123,6 +123,7 @@ ] }, "devDependencies": { + "@babel/plugin-proposal-private-property-in-object": "^7.21.11", "@graphql-codegen/cli": "1.20.0", "@graphql-codegen/near-operation-file-preset": "^1.17.13", "@graphql-codegen/typescript-operations": "1.17.13", @@ -130,17 +131,19 @@ "@types/graphql": "^14.5.0", "@types/query-string": "^6.3.0", "@types/styled-components": "^5.1.7", - "@typescript-eslint/eslint-plugin": "^4.25.0", - "@typescript-eslint/parser": "^4.25.0", - "babel-loader": "8.2.2", - "copy-webpack-plugin": "6.4.1", - "eslint": "^7.27.0", - "eslint-config-airbnb-typescript": "^12.3.1", - "eslint-config-prettier": "^8.3.0", - "eslint-plugin-prettier": "^3.4.0", - "eslint-plugin-react": "^7.23.2", + "@typescript-eslint/eslint-plugin": "^5.38.1", + "@typescript-eslint/parser": "^5.38.1", + "copy-webpack-plugin": "^11.0.0", + "eslint": "^8.2.0", + "eslint-config-airbnb": "19.0.4", + "eslint-config-airbnb-typescript": "^17.0.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-import": "^2.25.3", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.28.0", + "eslint-plugin-react-hooks": "^4.3.0", "http-proxy-middleware": "2.0.0", - "prettier": "^2.3.0", + "prettier": "^2.8.8", "source-map-explorer": "^2.5.2" }, "resolutions": { diff --git a/datahub-web-react/src/__mocks__/styleMock.js b/datahub-web-react/src/__mocks__/styleMock.js new file mode 100644 index 0000000000000..f053ebf7976e3 --- /dev/null +++ b/datahub-web-react/src/__mocks__/styleMock.js @@ -0,0 +1 @@ +module.exports = {}; diff --git a/datahub-web-react/src/app/analyticsDashboard/components/BarChart.tsx b/datahub-web-react/src/app/analyticsDashboard/components/BarChart.tsx index f178b09afcccc..0c9909313e27e 100644 --- a/datahub-web-react/src/app/analyticsDashboard/components/BarChart.tsx +++ b/datahub-web-react/src/app/analyticsDashboard/components/BarChart.tsx @@ -1,8 +1,8 @@ import React, { useMemo } from 'react'; -import { BarStack } from '@vx/shape'; -import { scaleOrdinal, scaleLinear, scaleBand } from '@vx/scale'; -import { Group } from '@vx/group'; -import { AxisBottom, AxisRight } from '@vx/axis'; +import { BarStack } from '@visx/shape'; +import { scaleOrdinal, scaleLinear, scaleBand } from '@visx/scale'; +import { Group } from '@visx/group'; +import { AxisBottom, AxisRight } from '@visx/axis'; import { BarChart as BarChartType } from '../../../types.generated'; import { lineColors } from './lineColors'; @@ -85,7 +85,7 @@ export const BarChart = ({ chartData, width, height }: Props) => { - + data={transformedChartData} keys={keys} x={(data) => data.displayName} diff --git a/datahub-web-react/src/app/analyticsDashboard/components/Legend.tsx b/datahub-web-react/src/app/analyticsDashboard/components/Legend.tsx index c4ef51fb3938b..7796713961ca3 100644 --- a/datahub-web-react/src/app/analyticsDashboard/components/Legend.tsx +++ b/datahub-web-react/src/app/analyticsDashboard/components/Legend.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { Col, Row } from 'antd'; -import { LegendOrdinal, LegendItem, LegendLabel } from '@vx/legend'; +import { LegendOrdinal, LegendItem, LegendLabel } from '@visx/legend'; import { ScaleOrdinal } from 'd3-scale/src/ordinal'; import styled from 'styled-components'; diff --git a/datahub-web-react/src/app/analyticsDashboard/components/TimeSeriesChart.tsx b/datahub-web-react/src/app/analyticsDashboard/components/TimeSeriesChart.tsx index 6b9b808abfd0f..68851a950bcc5 100644 --- a/datahub-web-react/src/app/analyticsDashboard/components/TimeSeriesChart.tsx +++ b/datahub-web-react/src/app/analyticsDashboard/components/TimeSeriesChart.tsx @@ -1,17 +1,15 @@ import React, { useMemo } from 'react'; -import { XYChart, LineSeries, CrossHair, XAxis, YAxis } from '@data-ui/xy-chart'; -import { scaleOrdinal } from '@vx/scale'; +import styled from 'styled-components'; +import { AxisScaleOutput } from '@visx/axis'; +import { Axis, LineSeries, XYChart, Tooltip, GlyphSeries } from '@visx/xychart'; +import { curveMonotoneX } from '@visx/curve'; +import { ScaleConfig, scaleOrdinal } from '@visx/scale'; import { TimeSeriesChart as TimeSeriesChartType, NumericDataPoint, NamedLine } from '../../../types.generated'; import { lineColors } from './lineColors'; import Legend from './Legend'; import { addInterval } from '../../shared/time/timeUtils'; import { formatNumber } from '../../shared/formatNumber'; -type ScaleConfig = { - type: 'time' | 'timeUtc' | 'linear' | 'band' | 'ordinal'; - includeZero?: boolean; -}; - type AxisConfig = { formatter: (tick: number) => string; }; @@ -29,10 +27,15 @@ type Props = { crossHairLineColor?: string; }; insertBlankPoints?: boolean; - yScale?: ScaleConfig; + yScale?: ScaleConfig; yAxis?: AxisConfig; }; +const StyledTooltip = styled(Tooltip)` + font-family: inherit !important; + font-weight: 400 !important; +`; + const MARGIN = { TOP: 40, RIGHT: 45, @@ -40,6 +43,11 @@ const MARGIN = { LEFT: 40, }; +const accessors = { + xAccessor: (d) => d.x, + yAccessor: (d) => d.y, +}; + function insertBlankAt(ts: number, newLine: Array) { const dateString = new Date(ts).toISOString(); for (let i = 0; i < newLine.length; i++) { @@ -96,41 +104,61 @@ export const TimeSeriesChart = ({ return ( <> ( -
-
{new Date(Number(datum.x)).toDateString()}
-
{datum.y}
-
- )} - snapTooltipToDataX={false} + yScale={yScale ?? { type: 'linear' }} > - - + (yAxis?.formatter ? yAxis.formatter(tick) : formatNumber(tick))} + tickLabelProps={{ fill: 'black', fontFamily: 'inherit', fontSize: 10 }} + numTicks={3} /> {lines.map((line, i) => ( - ({ x: new Date(point.x).getTime().toString(), y: point.y }))} - stroke={(style && style.lineColor) || lineColors[i]} - /> + <> + ({ x: new Date(point.x), y: point.y }))} + stroke={(style && style.lineColor) || lineColors[i]} + curve={curveMonotoneX} + {...accessors} + /> + ({ x: new Date(point.x), y: point.y }))} + {...accessors} + /> + ))} - + tooltipData?.nearestDatum && ( +
+
+ {new Date( + Number(accessors.xAccessor(tooltipData.nearestDatum.datum)), + ).toDateString()} +
+
{accessors.yAccessor(tooltipData.nearestDatum.datum)}
+
+ ) + } /> {!hideLegend && } diff --git a/datahub-web-react/src/app/entity/shared/components/legacy/MarkdownViewer.tsx b/datahub-web-react/src/app/entity/shared/components/legacy/MarkdownViewer.tsx index 669e32520b038..579c58214aaa1 100644 --- a/datahub-web-react/src/app/entity/shared/components/legacy/MarkdownViewer.tsx +++ b/datahub-web-react/src/app/entity/shared/components/legacy/MarkdownViewer.tsx @@ -76,6 +76,7 @@ export const MarkdownView = styled(MDEditor.Markdown)` export type Props = { source: string; limit?: number; + // eslint-disable-next-line react/no-unused-prop-types isCompact?: boolean; editable?: boolean; onEditClicked?: () => void; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/historical/charts/StatChart.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/historical/charts/StatChart.tsx index db5b1a59759b1..e3955bc27b9d7 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/historical/charts/StatChart.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/historical/charts/StatChart.tsx @@ -16,7 +16,7 @@ const ChartTitle = styled(Typography.Text)` const ChartCard = styled(Card)<{ visible: boolean }>` box-shadow: ${(props) => props.theme.styles['box-shadow']}; - visibility: ${(props) => (props.visible ? 'visible' : 'hidden')}; ; + visibility: ${(props) => (props.visible ? 'visible' : 'hidden')}; `; type Point = { @@ -87,7 +87,7 @@ export default function StatChart({ title, values, tickInterval: interval, dateR }} width={360} height={300} - yScale={{ type: 'linear', includeZero: false }} + yScale={{ type: 'linear', zero: false }} yAxis={yAxis} /> diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/BooleanTimeline.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/BooleanTimeline.tsx index cd6f08fc133cf..27db7b0e96612 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/BooleanTimeline.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/BooleanTimeline.tsx @@ -1,9 +1,9 @@ import React, { useMemo } from 'react'; import { Popover } from 'antd'; -import { Bar } from '@vx/shape'; -import { Group } from '@vx/group'; -import { AxisBottom } from '@vx/axis'; -import { scaleUtc } from '@vx/scale'; +import { Bar } from '@visx/shape'; +import { Group } from '@visx/group'; +import { AxisBottom } from '@visx/axis'; +import { scaleUtc } from '@visx/scale'; import { ANTD_GRAY } from '../../../constants'; export type BooleanResult = { diff --git a/datahub-web-react/src/app/lineage/ColumnNode.tsx b/datahub-web-react/src/app/lineage/ColumnNode.tsx index bee79badf1ed6..bfec978c60106 100644 --- a/datahub-web-react/src/app/lineage/ColumnNode.tsx +++ b/datahub-web-react/src/app/lineage/ColumnNode.tsx @@ -1,6 +1,6 @@ import React, { useContext, useState } from 'react'; import styled from 'styled-components/macro'; -import { Group } from '@vx/group'; +import { Group } from '@visx/group'; import { SchemaField } from '../../types.generated'; import { downgradeV2FieldPath } from '../entity/dataset/profile/schema/utils/utils'; import { NodeData } from './types'; diff --git a/datahub-web-react/src/app/lineage/LineageEntityColumns.tsx b/datahub-web-react/src/app/lineage/LineageEntityColumns.tsx index 5f288c6e993ca..281d6381741f8 100644 --- a/datahub-web-react/src/app/lineage/LineageEntityColumns.tsx +++ b/datahub-web-react/src/app/lineage/LineageEntityColumns.tsx @@ -1,5 +1,5 @@ import React, { useContext, useEffect, useState } from 'react'; -import { Group } from '@vx/group'; +import { Group } from '@visx/group'; import { Pagination } from 'antd'; import styled from 'styled-components'; import { NodeData, EntitySelectParams } from './types'; diff --git a/datahub-web-react/src/app/lineage/LineageEntityEdge.tsx b/datahub-web-react/src/app/lineage/LineageEntityEdge.tsx index 1505ce71f7027..a0a7db63381df 100644 --- a/datahub-web-react/src/app/lineage/LineageEntityEdge.tsx +++ b/datahub-web-react/src/app/lineage/LineageEntityEdge.tsx @@ -4,9 +4,9 @@ import { ClockCircleOutlined, EyeOutlined } from '@ant-design/icons'; import dayjs from 'dayjs'; import LocalizedFormat from 'dayjs/plugin/localizedFormat'; import styled from 'styled-components'; -import { Group } from '@vx/group'; -import { curveBasis } from '@vx/curve'; -import { LinePath } from '@vx/shape'; +import { Group } from '@visx/group'; +import { curveBasis } from '@visx/curve'; +import { LinePath } from '@visx/shape'; import { VizEdge } from './types'; import { ANTD_GRAY } from '../entity/shared/constants'; diff --git a/datahub-web-react/src/app/lineage/LineageEntityNode.tsx b/datahub-web-react/src/app/lineage/LineageEntityNode.tsx index f5be1d57db070..9b6475b648ca1 100644 --- a/datahub-web-react/src/app/lineage/LineageEntityNode.tsx +++ b/datahub-web-react/src/app/lineage/LineageEntityNode.tsx @@ -1,6 +1,6 @@ import React, { useContext, useEffect, useMemo, useState } from 'react'; -import { Group } from '@vx/group'; -import { LinkHorizontal } from '@vx/shape'; +import { Group } from '@visx/group'; +import { LinkHorizontal } from '@visx/shape'; import styled from 'styled-components'; import { useEntityRegistry } from '../useEntityRegistry'; diff --git a/datahub-web-react/src/app/lineage/LineageTree.tsx b/datahub-web-react/src/app/lineage/LineageTree.tsx index 5e74c7f072947..8b5de4e78ff17 100644 --- a/datahub-web-react/src/app/lineage/LineageTree.tsx +++ b/datahub-web-react/src/app/lineage/LineageTree.tsx @@ -1,5 +1,5 @@ import React, { useContext, useEffect, useMemo, useState } from 'react'; -import { TransformMatrix } from '@vx/zoom/lib/types'; +import { TransformMatrix } from '@visx/zoom/lib/types'; import { NodeData, EntitySelectParams, TreeProps, EntityAndType, FetchedEntity, UpdatedLineages } from './types'; import LineageTreeNodeAndEdgeRenderer from './LineageTreeNodeAndEdgeRenderer'; diff --git a/datahub-web-react/src/app/lineage/LineageTreeNodeAndEdgeRenderer.tsx b/datahub-web-react/src/app/lineage/LineageTreeNodeAndEdgeRenderer.tsx index 638a207999e2b..bec83c80107b3 100644 --- a/datahub-web-react/src/app/lineage/LineageTreeNodeAndEdgeRenderer.tsx +++ b/datahub-web-react/src/app/lineage/LineageTreeNodeAndEdgeRenderer.tsx @@ -1,6 +1,6 @@ import React, { useContext } from 'react'; -import { Group } from '@vx/group'; -import { TransformMatrix } from '@vx/zoom/lib/types'; +import { Group } from '@visx/group'; +import { TransformMatrix } from '@visx/zoom/lib/types'; import { NodeData, EntitySelectParams, TreeProps, VizNode, VizEdge, EntityAndType, UpdatedLineages } from './types'; import LineageEntityNode from './LineageEntityNode'; diff --git a/datahub-web-react/src/app/lineage/LineageViz.tsx b/datahub-web-react/src/app/lineage/LineageViz.tsx index 05b269ccabb47..d70bf72e4cbaf 100644 --- a/datahub-web-react/src/app/lineage/LineageViz.tsx +++ b/datahub-web-react/src/app/lineage/LineageViz.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { useWindowSize } from '@react-hook/window-size'; -import { Zoom } from '@vx/zoom'; +import { Zoom } from '@visx/zoom'; import { TreeProps } from './types'; import LineageVizInsideZoom from './LineageVizInsideZoom'; @@ -42,7 +42,7 @@ export default function LineageViz({ scaleXMax={2} scaleYMin={1 / 8} scaleYMax={2} - transformMatrix={initialTransform} + initialTransformMatrix={initialTransform} > {(zoom) => ( void; onLineageExpand: (data: EntityAndType) => void; selectedEntity?: EntitySelectParams; - zoom: ProvidedZoom & { + zoom: ProvidedZoom & { transformMatrix: TransformMatrix; isDragging: boolean; }; diff --git a/datahub-web-react/src/app/lineage/LineageVizRootSvg.tsx b/datahub-web-react/src/app/lineage/LineageVizRootSvg.tsx index 0d4bd483358aa..434fb1562bc2a 100644 --- a/datahub-web-react/src/app/lineage/LineageVizRootSvg.tsx +++ b/datahub-web-react/src/app/lineage/LineageVizRootSvg.tsx @@ -1,4 +1,4 @@ -import { ProvidedZoom, TransformMatrix } from '@vx/zoom/lib/types'; +import { ProvidedZoom, TransformMatrix } from '@visx/zoom/lib/types'; import React, { SVGProps, useEffect, useMemo, useState } from 'react'; import styled from 'styled-components/macro'; @@ -15,7 +15,7 @@ type Props = { onEntityCenter: (EntitySelectParams) => void; onLineageExpand: (data: EntityAndType) => void; selectedEntity?: EntitySelectParams; - zoom: ProvidedZoom & { + zoom: ProvidedZoom & { transformMatrix: TransformMatrix; isDragging: boolean; }; diff --git a/datahub-web-react/src/app/lineage/NodeColumnsHeader.tsx b/datahub-web-react/src/app/lineage/NodeColumnsHeader.tsx index 4b8c4303cec5a..ea2ae4a1a6ee6 100644 --- a/datahub-web-react/src/app/lineage/NodeColumnsHeader.tsx +++ b/datahub-web-react/src/app/lineage/NodeColumnsHeader.tsx @@ -1,6 +1,6 @@ import React, { useContext, useState } from 'react'; import { Button, Input } from 'antd'; -import { Group } from '@vx/group'; +import { Group } from '@visx/group'; import styled from 'styled-components'; import { DownOutlined, SearchOutlined, UpOutlined } from '@ant-design/icons'; import { blue } from '@ant-design/colors'; diff --git a/datahub-web-react/src/app/lineage/__tests__/LineageTree.test.tsx b/datahub-web-react/src/app/lineage/__tests__/LineageTree.test.tsx index 64b6e4f8df520..594a7ade24264 100644 --- a/datahub-web-react/src/app/lineage/__tests__/LineageTree.test.tsx +++ b/datahub-web-react/src/app/lineage/__tests__/LineageTree.test.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { render } from '@testing-library/react'; -import { Zoom } from '@vx/zoom'; +import { Zoom } from '@visx/zoom'; import { MockedProvider } from '@apollo/client/testing'; import { dataset3WithLineage, diff --git a/datahub-web-react/src/app/lineage/__tests__/adjustVXTreeLayout.test.tsx b/datahub-web-react/src/app/lineage/__tests__/adjustVXTreeLayout.test.tsx index cd97ba11bc50e..27d8d72d2375b 100644 --- a/datahub-web-react/src/app/lineage/__tests__/adjustVXTreeLayout.test.tsx +++ b/datahub-web-react/src/app/lineage/__tests__/adjustVXTreeLayout.test.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { Tree, hierarchy } from '@vx/hierarchy'; +import { Tree, hierarchy } from '@visx/hierarchy'; import { render } from '@testing-library/react'; import { diff --git a/datahub-web-react/src/app/lineage/utils/adjustVXTreeLayout.ts b/datahub-web-react/src/app/lineage/utils/adjustVXTreeLayout.ts index 307f98b688102..3fde422494555 100644 --- a/datahub-web-react/src/app/lineage/utils/adjustVXTreeLayout.ts +++ b/datahub-web-react/src/app/lineage/utils/adjustVXTreeLayout.ts @@ -1,4 +1,4 @@ -import { HierarchyPointNode } from '@vx/hierarchy/lib/types'; +import { HierarchyPointNode } from '@visx/hierarchy/lib/types'; import { NodeData, Direction } from '../types'; // eslint-disable-next-line @typescript-eslint/no-unused-vars import { width as nodeWidth } from '../constants'; diff --git a/datahub-web-react/src/app/shared/share/items/EmailMenuItem.tsx b/datahub-web-react/src/app/shared/share/items/EmailMenuItem.tsx index e172e961c4fa0..f65e25b140aeb 100644 --- a/datahub-web-react/src/app/shared/share/items/EmailMenuItem.tsx +++ b/datahub-web-react/src/app/shared/share/items/EmailMenuItem.tsx @@ -2,7 +2,7 @@ import React, { useState } from 'react'; import styled from 'styled-components'; import { Tooltip } from 'antd'; import { CheckOutlined, MailOutlined } from '@ant-design/icons'; -import EmailShare from 'react-email-share-link'; +import qs from 'query-string'; import MenuItem from 'antd/lib/menu/MenuItem'; import { ANTD_GRAY } from '../../../entity/shared/constants'; @@ -31,26 +31,29 @@ export default function EmailMenuItem({ urn, name, type, key }: EmailMenuItemPro const [isClicked, setIsClicked] = useState(false); const linkText = window.location.href; + const link = qs.stringifyUrl({ + url: 'mailto:', + query: { + subject: `${name} | ${type}`, + body: `Check out this ${type} on DataHub: ${linkText}. Urn: ${urn}`, + }, + }); + return ( { - navigator.clipboard.writeText(urn); setIsClicked(true); }} > - - {(link) => ( - - {isClicked ? : } - - - Email - - - - )} - + + {isClicked ? : } + + + Email + + + ); } diff --git a/datahub-web-react/src/conf/theme/styled-components.d.ts b/datahub-web-react/src/conf/theme/styled-components.d.ts index 220f9853572a9..3f9c7c4a8348d 100644 --- a/datahub-web-react/src/conf/theme/styled-components.d.ts +++ b/datahub-web-react/src/conf/theme/styled-components.d.ts @@ -1,6 +1,6 @@ import { Theme } from './types'; declare module 'styled-components' { - // eslint:disable-next-line @typescript-eslint/no-empty-interface + // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface DefaultTheme extends Theme {} } diff --git a/datahub-web-react/src/graphql-mock/server.ts b/datahub-web-react/src/graphql-mock/server.ts index a4896e7290548..678527599238b 100644 --- a/datahub-web-react/src/graphql-mock/server.ts +++ b/datahub-web-react/src/graphql-mock/server.ts @@ -69,6 +69,7 @@ export function makeServerForCypress() { routes() { for (const domain of ['/*', ...otherDomains]) { for (const method of methods) { + // eslint-disable-next-line @typescript-eslint/no-loop-func this[method](`${domain}`, async (_schema, request) => { // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore diff --git a/datahub-web-react/tsconfig.json b/datahub-web-react/tsconfig.json index e10aca8d5c7b2..760c992f2ca3a 100644 --- a/datahub-web-react/tsconfig.json +++ b/datahub-web-react/tsconfig.json @@ -1,6 +1,6 @@ { "compilerOptions": { - "target": "es5", + "target": "es2017", "lib": ["dom", "dom.iterable", "esnext"], "allowJs": true, "skipLibCheck": true, @@ -17,5 +17,5 @@ "noEmit": true, "jsx": "react-jsx" }, - "include": ["src", "src/conf/theme/styled-components.d.ts"] + "include": ["src", "src/conf/theme/styled-components.d.ts", "craco.config.js", ".eslintrc.js"] } diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index ce0f2f514dad1..fbc800c93c460 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -2,12 +2,17 @@ # yarn lockfile v1 -"@ampproject/remapping@^2.1.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" - integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== +"@alloc/quick-lru@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30" + integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw== + +"@ampproject/remapping@^2.2.0": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== dependencies: - "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" "@analytics/amplitude@0.0.3": @@ -94,28 +99,12 @@ dependencies: "@ctrl/tinycolor" "^3.3.1" -"@ant-design/icons-svg@^4.0.0": - version "4.1.0" - resolved "https://registry.yarnpkg.com/@ant-design/icons-svg/-/icons-svg-4.1.0.tgz#480b025f4b20ef7fe8f47d4a4846e4fee84ea06c" - integrity sha512-Fi03PfuUqRs76aI3UWYpP864lkrfPo0hluwGqh7NJdLhvH4iRDc3jbJqZIvRDLHKbXrvAfPPV3+zjUccfFvWOQ== - "@ant-design/icons-svg@^4.2.1": version "4.2.1" resolved "https://registry.yarnpkg.com/@ant-design/icons-svg/-/icons-svg-4.2.1.tgz#8630da8eb4471a4aabdaed7d1ff6a97dcb2cf05a" integrity sha512-EB0iwlKDGpG93hW8f85CTJTs4SvMX7tt5ceupvhALp1IF44SeUFOMhKUOYqpsoYWQKAOuTRDMqn75rEaKDp0Xw== -"@ant-design/icons@^4.3.0": - version "4.6.2" - resolved "https://registry.yarnpkg.com/@ant-design/icons/-/icons-4.6.2.tgz#290f2e8cde505ab081fda63e511e82d3c48be982" - integrity sha512-QsBG2BxBYU/rxr2eb8b2cZ4rPKAPBpzAR+0v6rrZLp/lnyvflLH3tw1vregK+M7aJauGWjIGNdFmUfpAOtw25A== - dependencies: - "@ant-design/colors" "^6.0.0" - "@ant-design/icons-svg" "^4.0.0" - "@babel/runtime" "^7.11.2" - classnames "^2.2.6" - rc-util "^5.9.4" - -"@ant-design/icons@^4.7.0": +"@ant-design/icons@^4.3.0", "@ant-design/icons@^4.7.0": version "4.7.0" resolved "https://registry.yarnpkg.com/@ant-design/icons/-/icons-4.7.0.tgz#8c3cbe0a556ba92af5dc7d1e70c0b25b5179af0f" integrity sha512-aoB4Z7JA431rt6d4u+8xcNPPCrdufSRMUOpxa1ab6mz1JCQZOEVolj2WVs/tDFmN62zzK30mNelEsprLYsSF3g== @@ -137,6 +126,15 @@ lodash "^4.17.21" resize-observer-polyfill "^1.5.1" +"@apideck/better-ajv-errors@^0.3.1": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.6.tgz#957d4c28e886a64a8141f7522783be65733ff097" + integrity sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA== + dependencies: + json-schema "^0.4.0" + jsonpointer "^5.0.0" + leven "^3.1.0" + "@apollo/client@^3.3.19": version "3.3.19" resolved "https://registry.yarnpkg.com/@apollo/client/-/client-3.3.19.tgz#f1172dc9b9d7eae04c8940b047fd3b452ef92d2c" @@ -186,549 +184,262 @@ signedsource "^1.0.0" yargs "^15.3.1" -"@babel/code-frame@7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a" - integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== - dependencies: - "@babel/highlight" "^7.10.4" - -"@babel/code-frame@7.12.11": - version "7.12.11" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" - integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== - dependencies: - "@babel/highlight" "^7.10.4" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.5.5": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.13.tgz#dcfc826beef65e75c50e21d3837d7d95798dd658" - integrity sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g== - dependencies: - "@babel/highlight" "^7.12.13" - -"@babel/code-frame@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb" - integrity sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw== - dependencies: - "@babel/highlight" "^7.14.5" - -"@babel/code-frame@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" - integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== - dependencies: - "@babel/highlight" "^7.18.6" - -"@babel/compat-data@^7.12.1", "@babel/compat-data@^7.13.11", "@babel/compat-data@^7.13.15", "@babel/compat-data@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.14.0.tgz#a901128bce2ad02565df95e6ecbf195cf9465919" - integrity sha512-vu9V3uMM/1o5Hl5OekMUowo3FqXLJSw+s+66nt0fSWVWTtmosdzn45JHOB3cPtZoe6CTBDzvSw0RdOY85Q37+Q== - -"@babel/compat-data@^7.20.5": - version "7.20.10" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.20.10.tgz#9d92fa81b87542fff50e848ed585b4212c1d34ec" - integrity sha512-sEnuDPpOJR/fcafHMjpcpGN5M2jbUGUHwmuWKM/YdPzeEDJg8bgmbcWQFUfE32MQjti1koACvoPVsDe8Uq+idg== - -"@babel/core@7.12.3": - version "7.12.3" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.12.3.tgz#1b436884e1e3bff6fb1328dc02b208759de92ad8" - integrity sha512-0qXcZYKZp3/6N2jKYVxZv0aNCsxTSVCiK72DTiTYZAu7sjg73W0/aynWjMbiGd87EQL4WyA8reiJVh92AVla9g== - dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/generator" "^7.12.1" - "@babel/helper-module-transforms" "^7.12.1" - "@babel/helpers" "^7.12.1" - "@babel/parser" "^7.12.3" - "@babel/template" "^7.10.4" - "@babel/traverse" "^7.12.1" - "@babel/types" "^7.12.1" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.1" - json5 "^2.1.2" - lodash "^4.17.19" - resolve "^1.3.2" - semver "^5.4.1" - source-map "^0.5.0" - -"@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.7.5", "@babel/core@^7.8.4": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.14.3.tgz#5395e30405f0776067fbd9cf0884f15bfb770a38" - integrity sha512-jB5AmTKOCSJIZ72sd78ECEhuPiDMKlQdDI/4QRI6lzYATx5SSogS1oQA2AoPecRCknm30gHi2l+QVvNUu3wZAg== - dependencies: - "@babel/code-frame" "^7.12.13" - "@babel/generator" "^7.14.3" - "@babel/helper-compilation-targets" "^7.13.16" - "@babel/helper-module-transforms" "^7.14.2" - "@babel/helpers" "^7.14.0" - "@babel/parser" "^7.14.3" - "@babel/template" "^7.12.13" - "@babel/traverse" "^7.14.2" - "@babel/types" "^7.14.2" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.1.2" - semver "^6.3.0" - source-map "^0.5.0" - -"@babel/core@^7.14.0": - version "7.20.12" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.20.12.tgz#7930db57443c6714ad216953d1356dac0eb8496d" - integrity sha512-XsMfHovsUYHFMdrIHkZphTN/2Hzzi78R08NuHfDBehym2VsPDL6Zn/JAD/JQdnRvbSsbQc4mVaU1m6JgtTEElg== - dependencies: - "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.20.7" - "@babel/helper-compilation-targets" "^7.20.7" - "@babel/helper-module-transforms" "^7.20.11" - "@babel/helpers" "^7.20.7" - "@babel/parser" "^7.20.7" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.20.12" - "@babel/types" "^7.20.7" +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.5.tgz#234d98e1551960604f1246e6475891a570ad5658" + integrity sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ== + dependencies: + "@babel/highlight" "^7.22.5" + +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.5", "@babel/compat-data@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" + integrity sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA== + +"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.14.0", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.5.tgz#d67d9747ecf26ee7ecd3ebae1ee22225fe902a89" + integrity sha512-SBuTAjg91A3eKOvD+bPEz3LlhHZRNu1nFOVts9lzDJTXshHTjII0BAtDS3Y2DAkdZdDKWVZGVwkDfc4Clxn1dg== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.22.5" + "@babel/generator" "^7.22.5" + "@babel/helper-compilation-targets" "^7.22.5" + "@babel/helper-module-transforms" "^7.22.5" + "@babel/helpers" "^7.22.5" + "@babel/parser" "^7.22.5" + "@babel/template" "^7.22.5" + "@babel/traverse" "^7.22.5" + "@babel/types" "^7.22.5" convert-source-map "^1.7.0" debug "^4.1.0" gensync "^1.0.0-beta.2" json5 "^2.2.2" semver "^6.3.0" -"@babel/generator@^7.12.1", "@babel/generator@^7.12.13", "@babel/generator@^7.14.2", "@babel/generator@^7.14.3": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.3.tgz#0c2652d91f7bddab7cccc6ba8157e4f40dcedb91" - integrity sha512-bn0S6flG/j0xtQdz3hsjJ624h3W0r3llttBMfyHX3YrZ/KtLYr15bjA0FXkgW7FpvrDuTuElXeVjiKlYRpnOFA== +"@babel/eslint-parser@^7.16.3": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.22.5.tgz#fa032503b9e2d188e25b1b95d29e8b8431042d78" + integrity sha512-C69RWYNYtrgIRE5CmTd77ZiLDXqgBipahJc/jHP3sLcAGj6AJzxNIuKNpVnICqbyK7X3pFUfEvL++rvtbQpZkQ== dependencies: - "@babel/types" "^7.14.2" - jsesc "^2.5.1" - source-map "^0.5.0" + "@nicolo-ribaudo/eslint-scope-5-internals" "5.1.1-v1" + eslint-visitor-keys "^2.1.0" + semver "^6.3.0" -"@babel/generator@^7.14.0", "@babel/generator@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.20.7.tgz#f8ef57c8242665c5929fe2e8d82ba75460187b4a" - integrity sha512-7wqMOJq8doJMZmP4ApXTzLxSr7+oO2jroJURrVEp6XShrQUObV8Tq/D0NCcoYg2uHqUrjzO0zwBjoYzelxK+sw== +"@babel/generator@^7.12.13", "@babel/generator@^7.14.0", "@babel/generator@^7.22.5", "@babel/generator@^7.7.2": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.5.tgz#1e7bf768688acfb05cf30b2369ef855e82d984f7" + integrity sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA== dependencies: - "@babel/types" "^7.20.7" + "@babel/types" "^7.22.5" "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/generator@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.14.5.tgz#848d7b9f031caca9d0cd0af01b063f226f52d785" - integrity sha512-y3rlP+/G25OIX3mYKKIOlQRcqj7YgrvHxOLbVmyLJ9bPmi5ttvUmpydVjcFjZphOktWuA7ovbx91ECloWTfjIA== - dependencies: - "@babel/types" "^7.14.5" - jsesc "^2.5.1" - source-map "^0.5.0" - -"@babel/helper-annotate-as-pure@^7.0.0", "@babel/helper-annotate-as-pure@^7.10.4", "@babel/helper-annotate-as-pure@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.12.13.tgz#0f58e86dfc4bb3b1fcd7db806570e177d439b6ab" - integrity sha512-7YXfX5wQ5aYM/BOlbSccHDbuXXFPxeoUmfWtz8le2yTkTZc+BxsiEnENFoi2SlmA8ewDkG2LgIMIVzzn2h8kfw== +"@babel/helper-annotate-as-pure@^7.0.0", "@babel/helper-annotate-as-pure@^7.18.6", "@babel/helper-annotate-as-pure@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" + integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== dependencies: - "@babel/types" "^7.12.13" + "@babel/types" "^7.22.5" -"@babel/helper-annotate-as-pure@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" - integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== +"@babel/helper-builder-binary-assignment-operator-visitor@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.5.tgz#a3f4758efdd0190d8927fcffd261755937c71878" + integrity sha512-m1EP3lVOPptR+2DwD125gziZNcmoNSHGmJROKoy87loWUQyJaVXDgpmruWqDARZSmtYQ+Dl25okU8+qhVzuykw== dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" -"@babel/helper-builder-binary-assignment-operator-visitor@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.12.13.tgz#6bc20361c88b0a74d05137a65cac8d3cbf6f61fc" - integrity sha512-CZOv9tGphhDRlVjVkAgm8Nhklm9RzSmWpX2my+t7Ua/KT616pEzXsQCjinzvkRvHWJ9itO4f296efroX23XCMA== - dependencies: - "@babel/helper-explode-assignable-expression" "^7.12.13" - "@babel/types" "^7.12.13" - -"@babel/helper-compilation-targets@^7.12.1", "@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.13.16": - version "7.13.16" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.13.16.tgz#6e91dccf15e3f43e5556dffe32d860109887563c" - integrity sha512-3gmkYIrpqsLlieFwjkGgLaSHmhnvlAYzZLlYVjlW+QwI+1zE17kGxuJGmIqDQdYp56XdmGeD+Bswx0UTyG18xA== - dependencies: - "@babel/compat-data" "^7.13.15" - "@babel/helper-validator-option" "^7.12.17" - browserslist "^4.14.5" - semver "^6.3.0" - -"@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.7.tgz#a6cd33e93629f5eb473b021aac05df62c4cd09bb" - integrity sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ== +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.20.7", "@babel/helper-compilation-targets@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.5.tgz#fc7319fc54c5e2fa14b2909cf3c5fd3046813e02" + integrity sha512-Ji+ywpHeuqxB8WDxraCiqR0xfhYjiDE/e6k7FuIaANnoOFxAHskHChz4vA1mJC9Lbm01s1PVAGhQY4FUKSkGZw== dependencies: - "@babel/compat-data" "^7.20.5" - "@babel/helper-validator-option" "^7.18.6" + "@babel/compat-data" "^7.22.5" + "@babel/helper-validator-option" "^7.22.5" browserslist "^4.21.3" lru-cache "^5.1.1" semver "^6.3.0" -"@babel/helper-create-class-features-plugin@^7.12.1", "@babel/helper-create-class-features-plugin@^7.13.0", "@babel/helper-create-class-features-plugin@^7.14.0", "@babel/helper-create-class-features-plugin@^7.14.3": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.14.3.tgz#832111bcf4f57ca57a4c5b1a000fc125abc6554a" - integrity sha512-BnEfi5+6J2Lte9LeiL6TxLWdIlEv9Woacc1qXzXBgbikcOzMRM2Oya5XGg/f/ngotv1ej2A/b+3iJH8wbS1+lQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-function-name" "^7.14.2" - "@babel/helper-member-expression-to-functions" "^7.13.12" - "@babel/helper-optimise-call-expression" "^7.12.13" - "@babel/helper-replace-supers" "^7.14.3" - "@babel/helper-split-export-declaration" "^7.12.13" - -"@babel/helper-create-class-features-plugin@^7.18.6": - version "7.20.12" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.20.12.tgz#4349b928e79be05ed2d1643b20b99bb87c503819" - integrity sha512-9OunRkbT0JQcednL0UFvbfXpAsUXiGjUk0a7sN8fUXX7Mue79cUSMjHGDRRi/Vz9vYlpIhLV5fMD5dKoMhhsNQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-member-expression-to-functions" "^7.20.7" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-replace-supers" "^7.20.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" - "@babel/helper-split-export-declaration" "^7.18.6" +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.21.0", "@babel/helper-create-class-features-plugin@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.5.tgz#2192a1970ece4685fbff85b48da2c32fcb130b7c" + integrity sha512-xkb58MyOYIslxu3gKmVXmjTtUPvBU4odYzbiIQbWwLKIHCsx6UGZGX6F1IznMFVnDdirseUZopzN+ZRt8Xb33Q== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-function-name" "^7.22.5" + "@babel/helper-member-expression-to-functions" "^7.22.5" + "@babel/helper-optimise-call-expression" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.5" + semver "^6.3.0" -"@babel/helper-create-regexp-features-plugin@^7.12.13": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.14.3.tgz#149aa6d78c016e318c43e2409a0ae9c136a86688" - integrity sha512-JIB2+XJrb7v3zceV2XzDhGIB902CmKGSpSl4q2C6agU9SNLG/2V1RtFRGPG1Ajh9STj3+q6zJMOC+N/pp2P9DA== +"@babel/helper-create-regexp-features-plugin@^7.12.13", "@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.5.tgz#bb2bf0debfe39b831986a4efbf4066586819c6e4" + integrity sha512-1VpEFOIbMRaXyDeUwUfmTIxExLwQ+zkW+Bh5zXpApA3oQedBx9v/updixWxnx/bZpKw7u8VxWjb/qWpIcmPq8A== dependencies: - "@babel/helper-annotate-as-pure" "^7.12.13" - regexpu-core "^4.7.1" + "@babel/helper-annotate-as-pure" "^7.22.5" + regexpu-core "^5.3.1" + semver "^6.3.0" -"@babel/helper-define-polyfill-provider@^0.2.2": - version "0.2.3" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.2.3.tgz#0525edec5094653a282688d34d846e4c75e9c0b6" - integrity sha512-RH3QDAfRMzj7+0Nqu5oqgO5q9mFtQEVvCRsi8qCEfzLR9p2BHfn5FzhSB2oj1fF7I2+DcTORkYaQ6aTR9Cofew== +"@babel/helper-define-polyfill-provider@^0.4.0": + version "0.4.0" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.4.0.tgz#487053f103110f25b9755c5980e031e93ced24d8" + integrity sha512-RnanLx5ETe6aybRi1cO/edaRH+bNYWaryCEmjDDYyNr4wnSzyOp8T0dWipmqVHKEY3AbVKUom50AKSlj1zmKbg== dependencies: - "@babel/helper-compilation-targets" "^7.13.0" - "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/traverse" "^7.13.0" + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" debug "^4.1.1" lodash.debounce "^4.0.8" resolve "^1.14.2" semver "^6.1.2" -"@babel/helper-environment-visitor@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" - integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== - -"@babel/helper-explode-assignable-expression@^7.12.13": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.13.0.tgz#17b5c59ff473d9f956f40ef570cf3a76ca12657f" - integrity sha512-qS0peLTDP8kOisG1blKbaoBg/o9OSa1qoumMjTK5pM+KDTtpxpsiubnCGP34vK8BXGcb2M9eigwgvoJryrzwWA== - dependencies: - "@babel/types" "^7.13.0" - -"@babel/helper-function-name@^7.12.13", "@babel/helper-function-name@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.14.2.tgz#397688b590760b6ef7725b5f0860c82427ebaac2" - integrity sha512-NYZlkZRydxw+YT56IlhIcS8PAhb+FEUiOzuhFTfqDyPmzAhRge6ua0dQYT/Uh0t/EDHq05/i+e5M2d4XvjgarQ== - dependencies: - "@babel/helper-get-function-arity" "^7.12.13" - "@babel/template" "^7.12.13" - "@babel/types" "^7.14.2" - -"@babel/helper-function-name@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.14.5.tgz#89e2c474972f15d8e233b52ee8c480e2cfcd50c4" - integrity sha512-Gjna0AsXWfFvrAuX+VKcN/aNNWonizBj39yGwUzVDVTlMYJMK2Wp6xdpy72mfArFq5uK+NOuexfzZlzI1z9+AQ== - dependencies: - "@babel/helper-get-function-arity" "^7.14.5" - "@babel/template" "^7.14.5" - "@babel/types" "^7.14.5" - -"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0": - version "7.19.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" - integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== - dependencies: - "@babel/template" "^7.18.10" - "@babel/types" "^7.19.0" - -"@babel/helper-get-function-arity@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.13.tgz#bc63451d403a3b3082b97e1d8b3fe5bd4091e583" - integrity sha512-DjEVzQNz5LICkzN0REdpD5prGoidvbdYk1BVgRUOINaWJP2t6avB27X1guXK1kXNrX0WMfsrm1A/ZBthYuIMQg== - dependencies: - "@babel/types" "^7.12.13" - -"@babel/helper-get-function-arity@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.14.5.tgz#25fbfa579b0937eee1f3b805ece4ce398c431815" - integrity sha512-I1Db4Shst5lewOM4V+ZKJzQ0JGGaZ6VY1jYvMghRjqs6DWgxLCIyFt30GlnKkfUeFLpJt2vzbMVEXVSXlIFYUg== - dependencies: - "@babel/types" "^7.14.5" - -"@babel/helper-hoist-variables@^7.13.0": - version "7.13.16" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.13.16.tgz#1b1651249e94b51f8f0d33439843e33e39775b30" - integrity sha512-1eMtTrXtrwscjcAeO4BVK+vvkxaLJSPFz1w1KLawz6HLNi9bPFGBNwwDyVfiu1Tv/vRRFYfoGaKhmAQPGPn5Wg== - dependencies: - "@babel/traverse" "^7.13.15" - "@babel/types" "^7.13.16" - -"@babel/helper-hoist-variables@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.14.5.tgz#e0dd27c33a78e577d7c8884916a3e7ef1f7c7f8d" - integrity sha512-R1PXiz31Uc0Vxy4OEOm07x0oSjKAdPPCh3tPivn/Eo8cvz6gveAeuyUUPB21Hoiif0uoPQSSdhIPS3352nvdyQ== - dependencies: - "@babel/types" "^7.14.5" - -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-member-expression-to-functions@^7.13.12": - version "7.13.12" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.13.12.tgz#dfe368f26d426a07299d8d6513821768216e6d72" - integrity sha512-48ql1CLL59aKbU94Y88Xgb2VFy7a95ykGRbJJaaVv+LX5U8wFpLfiGXJJGUozsmA1oEh/o5Bp60Voq7ACyA/Sw== - dependencies: - "@babel/types" "^7.13.12" - -"@babel/helper-member-expression-to-functions@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.20.7.tgz#a6f26e919582275a93c3aa6594756d71b0bb7f05" - integrity sha512-9J0CxJLq315fEdi4s7xK5TQaNYjZw+nDVpVqr1axNGKzdrdwYBD5b4uKv3n75aABG0rCCTK8Im8Ww7eYfMrZgw== - dependencies: - "@babel/types" "^7.20.7" - -"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.0.0-beta.49", "@babel/helper-module-imports@^7.12.1", "@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.13.12": - version "7.13.12" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.13.12.tgz#c6a369a6f3621cb25da014078684da9196b61977" - integrity sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA== - dependencies: - "@babel/types" "^7.13.12" - -"@babel/helper-module-imports@^7.16.7", "@babel/helper-module-imports@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" - integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-module-transforms@^7.12.1", "@babel/helper-module-transforms@^7.13.0", "@babel/helper-module-transforms@^7.14.0", "@babel/helper-module-transforms@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.14.2.tgz#ac1cc30ee47b945e3e0c4db12fa0c5389509dfe5" - integrity sha512-OznJUda/soKXv0XhpvzGWDnml4Qnwp16GN+D/kZIdLsWoHj05kyu8Rm5kXmMef+rVJZ0+4pSGLkeixdqNUATDA== - dependencies: - "@babel/helper-module-imports" "^7.13.12" - "@babel/helper-replace-supers" "^7.13.12" - "@babel/helper-simple-access" "^7.13.12" - "@babel/helper-split-export-declaration" "^7.12.13" - "@babel/helper-validator-identifier" "^7.14.0" - "@babel/template" "^7.12.13" - "@babel/traverse" "^7.14.2" - "@babel/types" "^7.14.2" - -"@babel/helper-module-transforms@^7.20.11": - version "7.20.11" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.20.11.tgz#df4c7af713c557938c50ea3ad0117a7944b2f1b0" - integrity sha512-uRy78kN4psmji1s2QtbtcCSaj/LILFDp0f/ymhpQH5QY3nljUZCaNWz9X1dEj/8MBdBEFECs7yRhKn8i7NjZgg== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-simple-access" "^7.20.2" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/helper-validator-identifier" "^7.19.1" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.20.10" - "@babel/types" "^7.20.7" - -"@babel/helper-optimise-call-expression@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz#5c02d171b4c8615b1e7163f888c1c81c30a2aaea" - integrity sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA== - dependencies: - "@babel/types" "^7.12.13" - -"@babel/helper-optimise-call-expression@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" - integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz#806526ce125aed03373bc416a828321e3a6a33af" - integrity sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ== - -"@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.20.2": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz#d1b9000752b18d0877cff85a5c376ce5c3121629" - integrity sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ== - -"@babel/helper-remap-async-to-generator@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.13.0.tgz#376a760d9f7b4b2077a9dd05aa9c3927cadb2209" - integrity sha512-pUQpFBE9JvC9lrQbpX0TmeNIy5s7GnZjna2lhhcHC7DzgBs6fWn722Y5cfwgrtrqc7NAJwMvOa0mKhq6XaE4jg== - dependencies: - "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-wrap-function" "^7.13.0" - "@babel/types" "^7.13.0" - -"@babel/helper-replace-supers@^7.12.13", "@babel/helper-replace-supers@^7.13.12", "@babel/helper-replace-supers@^7.14.3": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.14.3.tgz#ca17b318b859d107f0e9b722d58cf12d94436600" - integrity sha512-Rlh8qEWZSTfdz+tgNV/N4gz1a0TMNwCUcENhMjHTHKp3LseYH5Jha0NSlyTQWMnjbYcwFt+bqAMqSLHVXkQ6UA== - dependencies: - "@babel/helper-member-expression-to-functions" "^7.13.12" - "@babel/helper-optimise-call-expression" "^7.12.13" - "@babel/traverse" "^7.14.2" - "@babel/types" "^7.14.2" - -"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.20.7.tgz#243ecd2724d2071532b2c8ad2f0f9f083bcae331" - integrity sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-member-expression-to-functions" "^7.20.7" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.20.7" - "@babel/types" "^7.20.7" - -"@babel/helper-simple-access@^7.13.12": - version "7.13.12" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.13.12.tgz#dd6c538afb61819d205a012c31792a39c7a5eaf6" - integrity sha512-7FEjbrx5SL9cWvXioDbnlYTppcZGuCY6ow3/D5vMggb2Ywgu4dMrpTJX0JdQAIcRRUElOIxF3yEooa9gUb9ZbA== - dependencies: - "@babel/types" "^7.13.12" - -"@babel/helper-simple-access@^7.20.2": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz#0ab452687fe0c2cfb1e2b9e0015de07fc2d62dd9" - integrity sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA== - dependencies: - "@babel/types" "^7.20.2" - -"@babel/helper-skip-transparent-expression-wrappers@^7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.12.1.tgz#462dc63a7e435ade8468385c63d2b84cce4b3cbf" - integrity sha512-Mf5AUuhG1/OCChOJ/HcADmvcHM42WJockombn8ATJG3OnyiSxBK/Mm5x78BQWvmtXZKHgbjdGL2kin/HOLlZGA== - dependencies: - "@babel/types" "^7.12.1" - -"@babel/helper-skip-transparent-expression-wrappers@^7.20.0": - version "7.20.0" - resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.20.0.tgz#fbe4c52f60518cab8140d77101f0e63a8a230684" - integrity sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg== - dependencies: - "@babel/types" "^7.20.0" - -"@babel/helper-split-export-declaration@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.13.tgz#e9430be00baf3e88b0e13e6f9d4eaf2136372b05" - integrity sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg== - dependencies: - "@babel/types" "^7.12.13" - -"@babel/helper-split-export-declaration@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.14.5.tgz#22b23a54ef51c2b7605d851930c1976dd0bc693a" - integrity sha512-hprxVPu6e5Kdp2puZUmvOGjaLv9TCe58E/Fl6hRq4YiVQxIcNvuq6uTM2r1mT/oPskuS9CgR+I94sqAYv0NGKA== - dependencies: - "@babel/types" "^7.14.5" - -"@babel/helper-split-export-declaration@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" - integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-string-parser@^7.19.4": - version "7.19.4" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" - integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== - -"@babel/helper-validator-identifier@^7.12.11", "@babel/helper-validator-identifier@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz#d26cad8a47c65286b15df1547319a5d0bcf27288" - integrity sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A== - -"@babel/helper-validator-identifier@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz#d0f0e277c512e0c938277faa85a3968c9a44c0e8" - integrity sha512-5lsetuxCLilmVGyiLEfoHBRX8UCFD+1m2x3Rj97WrW3V7H3u4RWRXA4evMjImCsin2J2YT0QaVDGf+z8ondbAg== - -"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": - version "7.19.1" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" - integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== - -"@babel/helper-validator-option@^7.12.1", "@babel/helper-validator-option@^7.12.17": - version "7.12.17" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.12.17.tgz#d1fbf012e1a79b7eebbfdc6d270baaf8d9eb9831" - integrity sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw== - -"@babel/helper-validator-option@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" - integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== - -"@babel/helper-wrap-function@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.13.0.tgz#bdb5c66fda8526ec235ab894ad53a1235c79fcc4" - integrity sha512-1UX9F7K3BS42fI6qd2A4BjKzgGjToscyZTdp1DjknHLCIvpgne6918io+aL5LXFcER/8QWiwpoY902pVEqgTXA== - dependencies: - "@babel/helper-function-name" "^7.12.13" - "@babel/template" "^7.12.13" - "@babel/traverse" "^7.13.0" - "@babel/types" "^7.13.0" - -"@babel/helpers@^7.12.1", "@babel/helpers@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.14.0.tgz#ea9b6be9478a13d6f961dbb5f36bf75e2f3b8f62" - integrity sha512-+ufuXprtQ1D1iZTO/K9+EBRn+qPWMJjZSw/S0KlFrxCw4tkrzv9grgpDHkY9MeQTjTY8i2sp7Jep8DfU6tN9Mg== - dependencies: - "@babel/template" "^7.12.13" - "@babel/traverse" "^7.14.0" - "@babel/types" "^7.14.0" - -"@babel/helpers@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.20.7.tgz#04502ff0feecc9f20ecfaad120a18f011a8e6dce" - integrity sha512-PBPjs5BppzsGaxHQCDKnZ6Gd9s6xl8bBCluz3vEInLGRJmnZan4F6BYCeqtyXqkk4W5IlPmjK4JlOuZkpJ3xZA== - dependencies: - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.20.7" - "@babel/types" "^7.20.7" - -"@babel/highlight@^7.10.4", "@babel/highlight@^7.12.13": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.0.tgz#3197e375711ef6bf834e67d0daec88e4f46113cf" - integrity sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg== - dependencies: - "@babel/helper-validator-identifier" "^7.14.0" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/highlight@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz#6861a52f03966405001f6aa534a01a24d99e8cd9" - integrity sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg== - dependencies: - "@babel/helper-validator-identifier" "^7.14.5" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/highlight@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" - integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== - dependencies: - "@babel/helper-validator-identifier" "^7.18.6" +"@babel/helper-environment-visitor@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" + integrity sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q== + +"@babel/helper-function-name@^7.12.13", "@babel/helper-function-name@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz#ede300828905bb15e582c037162f99d5183af1be" + integrity sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ== + dependencies: + "@babel/template" "^7.22.5" + "@babel/types" "^7.22.5" + +"@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-member-expression-to-functions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.22.5.tgz#0a7c56117cad3372fbf8d2fb4bf8f8d64a1e76b2" + integrity sha512-aBiH1NKMG0H2cGZqspNvsaBe6wNGjbJjuLy29aU+eDZjSbbN53BaxlpB02xm9v34pLTZ1nIQPFYn2qMZoa5BQQ== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.16.7", "@babel/helper-module-imports@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz#1a8f4c9f4027d23f520bd76b364d44434a72660c" + integrity sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-module-transforms@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz#0f65daa0716961b6e96b164034e737f60a80d2ef" + integrity sha512-+hGKDt/Ze8GFExiVHno/2dvG5IdstpzCq0y4Qc9OJ25D4q3pKfiIP/4Vp3/JvhDkLKsDK2api3q3fpIgiIF5bw== + dependencies: + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.5" + "@babel/template" "^7.22.5" + "@babel/traverse" "^7.22.5" + "@babel/types" "^7.22.5" + +"@babel/helper-optimise-call-expression@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" + integrity sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" + integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== + +"@babel/helper-remap-async-to-generator@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.5.tgz#14a38141a7bf2165ad38da61d61cf27b43015da2" + integrity sha512-cU0Sq1Rf4Z55fgz7haOakIyM7+x/uCFwXpLPaeRzfoUtAEAuUZjZvFPjL/rk5rW693dIgn2hng1W7xbT7lWT4g== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-wrap-function" "^7.22.5" + "@babel/types" "^7.22.5" + +"@babel/helper-replace-supers@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.22.5.tgz#71bc5fb348856dea9fdc4eafd7e2e49f585145dc" + integrity sha512-aLdNM5I3kdI/V9xGNyKSF3X/gTyMUBohTZ+/3QdQKAA9vxIiy12E+8E2HoOP1/DjeqU+g6as35QHJNMDDYpuCg== + dependencies: + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-member-expression-to-functions" "^7.22.5" + "@babel/helper-optimise-call-expression" "^7.22.5" + "@babel/template" "^7.22.5" + "@babel/traverse" "^7.22.5" + "@babel/types" "^7.22.5" + +"@babel/helper-simple-access@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" + integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-skip-transparent-expression-wrappers@^7.20.0", "@babel/helper-skip-transparent-expression-wrappers@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz#007f15240b5751c537c40e77abb4e89eeaaa8847" + integrity sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-split-export-declaration@^7.12.13", "@babel/helper-split-export-declaration@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.5.tgz#88cf11050edb95ed08d596f7a044462189127a08" + integrity sha512-thqK5QFghPKWLhAV321lxF95yCg2K3Ob5yw+M3VHWfdia0IkPXUtoLH8x/6Fh486QUvzhb8YOWHChTVen2/PoQ== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-string-parser@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== + +"@babel/helper-validator-identifier@^7.12.11", "@babel/helper-validator-identifier@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" + integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== + +"@babel/helper-validator-option@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" + integrity sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw== + +"@babel/helper-wrap-function@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.22.5.tgz#44d205af19ed8d872b4eefb0d2fa65f45eb34f06" + integrity sha512-bYqLIBSEshYcYQyfks8ewYA8S30yaGSeRslcvKMvoUk6HHPySbxHq9YRi6ghhzEU+yhQv9bP/jXnygkStOcqZw== + dependencies: + "@babel/helper-function-name" "^7.22.5" + "@babel/template" "^7.22.5" + "@babel/traverse" "^7.22.5" + "@babel/types" "^7.22.5" + +"@babel/helpers@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.22.5.tgz#74bb4373eb390d1ceed74a15ef97767e63120820" + integrity sha512-pSXRmfE1vzcUIDFQcSGA5Mr+GxBV9oiRKDuDxXvWQQBCh8HoIjs/2DlDB7H8smac1IVrB9/xdXj2N3Wol9Cr+Q== + dependencies: + "@babel/template" "^7.22.5" + "@babel/traverse" "^7.22.5" + "@babel/types" "^7.22.5" + +"@babel/highlight@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.5.tgz#aa6c05c5407a67ebce408162b7ede789b4d22031" + integrity sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw== + dependencies: + "@babel/helper-validator-identifier" "^7.22.5" chalk "^2.0.0" js-tokens "^4.0.0" @@ -737,48 +448,28 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.16.tgz#cc31257419d2c3189d394081635703f549fc1ed4" integrity sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw== -"@babel/parser@^7.1.0", "@babel/parser@^7.12.13", "@babel/parser@^7.12.3", "@babel/parser@^7.14.2", "@babel/parser@^7.14.3", "@babel/parser@^7.7.0": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.3.tgz#9b530eecb071fd0c93519df25c5ff9f14759f298" - integrity sha512-7MpZDIfI7sUC5zWo2+foJ50CSI5lcqDehZ0lVgIhSi4bFEk94fLAKlF3Q0nzSQQ+ca0lm+O6G9ztKVBeu8PMRQ== - -"@babel/parser@^7.1.6", "@babel/parser@^7.14.5", "@babel/parser@^7.14.7": - version "7.14.7" - resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595" - integrity sha512-X67Z5y+VBJuHB/RjwECp8kSl5uYi0BvRbNeWqkaJCVh+LiTPl19WBUfG627psSgp9rSf6ojuXghQM3ha6qHHdA== - -"@babel/parser@^7.14.0", "@babel/parser@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.20.7.tgz#66fe23b3c8569220817d5feb8b9dcdc95bb4f71b" - integrity sha512-T3Z9oHybU+0vZlY9CiDSJQTD5ZapcW18ZctFMi0MOAl/4BjFF4ul7NVSARLdbGO5vDqy9eQiGTV0LtKfvCYvcg== - -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.13.12": - version "7.13.12" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.13.12.tgz#a3484d84d0b549f3fc916b99ee4783f26fabad2a" - integrity sha512-d0u3zWKcoZf379fOeJdr1a5WPDny4aOFZ6hlfKivgK0LY7ZxNfoaHL2fWwdGtHyVvra38FC+HVYkO+byfSA8AQ== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1" - "@babel/plugin-proposal-optional-chaining" "^7.13.12" +"@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.12.13", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" + integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== -"@babel/plugin-proposal-async-generator-functions@^7.12.1", "@babel/plugin-proposal-async-generator-functions@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.14.2.tgz#3a2085abbf5d5f962d480dbc81347385ed62eb1e" - integrity sha512-b1AM4F6fwck4N8ItZ/AtC4FP/cqZqmKRQ4FaTDutwSYyjuhtvsGEMLK4N/ztV/ImP40BjIDyMgBQAeAMsQYVFQ== +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.5.tgz#87245a21cd69a73b0b81bcda98d443d6df08f05e" + integrity sha512-NP1M5Rf+u2Gw9qfSO4ihjcTGW5zXTi36ITLd4/EoAcEhIZ0yjMqmftDNl3QC19CX7olhrjpyU454g/2W7X0jvQ== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-remap-async-to-generator" "^7.13.0" - "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-proposal-class-properties@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.12.1.tgz#a082ff541f2a29a4821065b8add9346c0c16e5de" - integrity sha512-cKp3dlQsFsEs5CWKnN7BnSHOd0EOW8EKpEjkoz1pO2E5KzIDNV9Ros1b0CnmbVgAGXJubOYVBOGCT1OmJwOI7w== +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.22.5.tgz#fef09f9499b1f1c930da8a0c419db42167d792ca" + integrity sha512-31Bb65aZaUwqCbWMnZPduIZxCBngHFlzyN6Dq6KAJjtx+lx6ohKHubc61OomYi7XwVD4Ol0XCVz4h+pYFR048g== dependencies: - "@babel/helper-create-class-features-plugin" "^7.12.1" - "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/plugin-transform-optional-chaining" "^7.22.5" -"@babel/plugin-proposal-class-properties@^7.0.0": +"@babel/plugin-proposal-class-properties@^7.0.0", "@babel/plugin-proposal-class-properties@^7.16.0": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== @@ -786,94 +477,31 @@ "@babel/helper-create-class-features-plugin" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" -"@babel/plugin-proposal-class-properties@^7.12.1", "@babel/plugin-proposal-class-properties@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.13.0.tgz#146376000b94efd001e57a40a88a525afaab9f37" - integrity sha512-KnTDjFNC1g+45ka0myZNvSBFLhNCLN+GeGYLDEA8Oq7MZ6yMgfLoIRh86GRT0FjtJhZw8JyUskP9uvj5pHM9Zg== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.13.0" - "@babel/helper-plugin-utils" "^7.13.0" - -"@babel/plugin-proposal-class-static-block@^7.13.11": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.14.3.tgz#5a527e2cae4a4753119c3a3e7f64ecae8ccf1360" - integrity sha512-HEjzp5q+lWSjAgJtSluFDrGGosmwTgKwCXdDQZvhKsRlwv3YdkUEqxNrrjesJd+B9E9zvr1PVPVBvhYZ9msjvQ== +"@babel/plugin-proposal-decorators@^7.16.4": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.22.5.tgz#dc8cdda048e5aea947efda920e030199806b868d" + integrity sha512-h8hlezQ4dl6ixodgXkH8lUfcD7x+WAuIqPUjwGoItynrXOAv4a4Tci1zA/qjzQjjcl0v3QpLdc2LM6ZACQuY7A== dependencies: - "@babel/helper-create-class-features-plugin" "^7.14.3" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-class-static-block" "^7.12.13" - -"@babel/plugin-proposal-decorators@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.12.1.tgz#59271439fed4145456c41067450543aee332d15f" - integrity sha512-knNIuusychgYN8fGJHONL0RbFxLGawhXOJNLBk75TniTsZZeA+wdkDuv6wp4lGwzQEKjZi6/WYtnb3udNPmQmQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.12.1" - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-decorators" "^7.12.1" - -"@babel/plugin-proposal-dynamic-import@^7.12.1", "@babel/plugin-proposal-dynamic-import@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.14.2.tgz#01ebabd7c381cff231fa43e302939a9de5be9d9f" - integrity sha512-oxVQZIWFh91vuNEMKltqNsKLFWkOIyJc95k2Gv9lWVyDfPUQGSSlbDEgWuJUU1afGE9WwlzpucMZ3yDRHIItkA== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.5" + "@babel/plugin-syntax-decorators" "^7.22.5" -"@babel/plugin-proposal-export-namespace-from@^7.12.1", "@babel/plugin-proposal-export-namespace-from@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.14.2.tgz#62542f94aa9ce8f6dba79eec698af22112253791" - integrity sha512-sRxW3z3Zp3pFfLAgVEvzTFutTXax837oOatUIvSG9o5gRj9mKwm3br1Se5f4QalTQs9x4AzlA/HrCWbQIHASUQ== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - -"@babel/plugin-proposal-json-strings@^7.12.1", "@babel/plugin-proposal-json-strings@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.14.2.tgz#830b4e2426a782e8b2878fbfe2cba85b70cbf98c" - integrity sha512-w2DtsfXBBJddJacXMBhElGEYqCZQqN99Se1qeYn8DVLB33owlrlLftIbMzn5nz1OITfDVknXF433tBrLEAOEjA== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-json-strings" "^7.8.3" - -"@babel/plugin-proposal-logical-assignment-operators@^7.12.1", "@babel/plugin-proposal-logical-assignment-operators@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.14.2.tgz#222348c080a1678e0e74ea63fe76f275882d1fd7" - integrity sha512-1JAZtUrqYyGsS7IDmFeaem+/LJqujfLZ2weLR9ugB0ufUPjzf8cguyVT1g5im7f7RXxuLq1xUxEzvm68uYRtGg== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - -"@babel/plugin-proposal-nullish-coalescing-operator@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.12.1.tgz#3ed4fff31c015e7f3f1467f190dbe545cd7b046c" - integrity sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" - -"@babel/plugin-proposal-nullish-coalescing-operator@^7.12.1", "@babel/plugin-proposal-nullish-coalescing-operator@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.14.2.tgz#425b11dc62fc26939a2ab42cbba680bdf5734546" - integrity sha512-ebR0zU9OvI2N4qiAC38KIAK75KItpIPTpAtd2r4OZmMFeKbKJpUFLYP2EuDut82+BmYi8sz42B+TfTptJ9iG5Q== +"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.18.6" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" -"@babel/plugin-proposal-numeric-separator@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.1.tgz#0e2c6774c4ce48be412119b4d693ac777f7685a6" - integrity sha512-MR7Ok+Af3OhNTCxYVjJZHS0t97ydnJZt/DbR4WISO39iDnhiD8XHrY12xuSJ90FFEGjir0Fzyyn7g/zY6hxbxA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - -"@babel/plugin-proposal-numeric-separator@^7.12.1", "@babel/plugin-proposal-numeric-separator@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.14.2.tgz#82b4cc06571143faf50626104b335dd71baa4f9e" - integrity sha512-DcTQY9syxu9BpU3Uo94fjCB3LN9/hgPS8oUL7KrSW3bA2ePrKZZPJcc5y0hoJAM9dft3pGfErtEUvxXQcfLxUg== +"@babel/plugin-proposal-numeric-separator@^7.16.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.18.6" "@babel/plugin-syntax-numeric-separator" "^7.10.4" "@babel/plugin-proposal-object-rest-spread@^7.0.0": @@ -887,62 +515,39 @@ "@babel/plugin-syntax-object-rest-spread" "^7.8.3" "@babel/plugin-transform-parameters" "^7.20.7" -"@babel/plugin-proposal-object-rest-spread@^7.12.1", "@babel/plugin-proposal-object-rest-spread@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.14.2.tgz#e17d418f81cc103fedd4ce037e181c8056225abc" - integrity sha512-hBIQFxwZi8GIp934+nj5uV31mqclC1aYDhctDu5khTi9PCCUOczyy0b34W0oE9U/eJXiqQaKyVsmjeagOaSlbw== - dependencies: - "@babel/compat-data" "^7.14.0" - "@babel/helper-compilation-targets" "^7.13.16" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.14.2" - -"@babel/plugin-proposal-optional-catch-binding@^7.12.1", "@babel/plugin-proposal-optional-catch-binding@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.14.2.tgz#150d4e58e525b16a9a1431bd5326c4eed870d717" - integrity sha512-XtkJsmJtBaUbOxZsNk0Fvrv8eiqgneug0A6aqLFZ4TSkar2L5dSXWcnUKHgmjJt49pyB/6ZHvkr3dPgl9MOWRQ== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - -"@babel/plugin-proposal-optional-chaining@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.1.tgz#cce122203fc8a32794296fc377c6dedaf4363797" - integrity sha512-c2uRpY6WzaVDzynVY9liyykS+kVU+WRZPMPYpkelXH8KBt1oXoI89kPbZKKG/jDT5UK92FTW2fZkZaJhdiBabw== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1" - "@babel/plugin-syntax-optional-chaining" "^7.8.0" - -"@babel/plugin-proposal-optional-chaining@^7.12.1", "@babel/plugin-proposal-optional-chaining@^7.13.12", "@babel/plugin-proposal-optional-chaining@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.14.2.tgz#df8171a8b9c43ebf4c1dabe6311b432d83e1b34e" - integrity sha512-qQByMRPwMZJainfig10BoaDldx/+VDtNcrA7qdNaEOAj6VXud+gfrkA8j4CRAU5HjnWREXqIpSpH30qZX1xivA== +"@babel/plugin-proposal-optional-chaining@^7.16.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz#886f5c8978deb7d30f678b2e24346b287234d3ea" + integrity sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" "@babel/plugin-syntax-optional-chaining" "^7.8.3" -"@babel/plugin-proposal-private-methods@^7.12.1", "@babel/plugin-proposal-private-methods@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.13.0.tgz#04bd4c6d40f6e6bbfa2f57e2d8094bad900ef787" - integrity sha512-MXyyKQd9inhx1kDYPkFRVOBXQ20ES8Pto3T7UZ92xj2mY0EVD8oAVzeyYuVfy/mxAdTSIayOvg+aVzcHV2bn6Q== +"@babel/plugin-proposal-private-methods@^7.16.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== dependencies: - "@babel/helper-create-class-features-plugin" "^7.13.0" - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" -"@babel/plugin-proposal-private-property-in-object@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.14.0.tgz#b1a1f2030586b9d3489cc26179d2eb5883277636" - integrity sha512-59ANdmEwwRUkLjB7CRtwJxxwtjESw+X2IePItA+RGQh+oy5RmpCh/EvVVvh5XQc3yxsm5gtv0+i9oBZhaDNVTg== +"@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": + version "7.21.0-placeholder-for-preset-env.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz#7844f9289546efa9febac2de4cfe358a050bd703" + integrity sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w== + +"@babel/plugin-proposal-private-property-in-object@^7.21.11": + version "7.21.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.11.tgz#69d597086b6760c4126525cfa154f34631ff272c" + integrity sha512-0QZ8qP/3RLDVBwBFoWAwCtgcDZJVwA5LUJRZU8x2YFfKNuFq161wK3cuGrALu5yiPu+vzwTAg/sMWVNeWeNyaw== dependencies: - "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-create-class-features-plugin" "^7.14.0" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-private-property-in-object" "^7.14.0" + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" -"@babel/plugin-proposal-unicode-property-regex@^7.12.1", "@babel/plugin-proposal-unicode-property-regex@^7.12.13", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": +"@babel/plugin-proposal-unicode-property-regex@^7.4.4": version "7.12.13" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.12.13.tgz#bebde51339be829c17aaaaced18641deb62b39ba" integrity sha512-XyJmZidNfofEkqFV5VC/bLabGmO5QzenPO/YOfGuEbgU+2sSwMmio3YLb4WtBgcmmdwZHyVyv8on77IUjQ5Gvg== @@ -950,7 +555,7 @@ "@babel/helper-create-regexp-features-plugin" "^7.12.13" "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-syntax-async-generators@^7.8.0", "@babel/plugin-syntax-async-generators@^7.8.4": +"@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== @@ -964,28 +569,28 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-class-properties@^7.0.0", "@babel/plugin-syntax-class-properties@^7.12.1", "@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": +"@babel/plugin-syntax-class-properties@^7.0.0", "@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": version "7.12.13" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-syntax-class-static-block@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.12.13.tgz#8e3d674b0613e67975ceac2776c97b60cafc5c9c" - integrity sha512-ZmKQ0ZXR0nYpHZIIuj9zE7oIqCx2hw9TKi+lIo73NNrMPAZGHfS92/VRV0ZmPj6H2ffBgyFHXvJ5NYsNeEaP2A== +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.14.5" -"@babel/plugin-syntax-decorators@^7.12.1": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.12.13.tgz#fac829bf3c7ef4a1bc916257b403e58c6bdaf648" - integrity sha512-Rw6aIXGuqDLr6/LoBBYE57nKOzQpz/aDkKlMqEwH+Vp0MXbG6H/TfRjaY343LKxzAKAMXIHsQ8JzaZKuDZ9MwA== +"@babel/plugin-syntax-decorators@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.22.5.tgz#329fe2907c73de184033775637dbbc507f09116a" + integrity sha512-avpUOBS7IU6al8MmF1XpAyj9QYeLPuSDJI5D4pVMSMdL7xQokKqJPYQC67RCT0aCTashUXPiGwMJ0DEXXCEmMA== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-dynamic-import@^7.8.0", "@babel/plugin-syntax-dynamic-import@^7.8.3": +"@babel/plugin-syntax-dynamic-import@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== @@ -999,47 +604,47 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.3" -"@babel/plugin-syntax-flow@^7.0.0", "@babel/plugin-syntax-flow@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.18.6.tgz#774d825256f2379d06139be0c723c4dd444f3ca1" - integrity sha512-LUbR+KNTBWCUAqRG9ex5Gnzu2IOkt8jRJbHHXFT9q+L9zm7M/QQbEqXyw1n1pohYvOyWC8CjeyjrSaIwiYjK7A== +"@babel/plugin-syntax-flow@^7.0.0", "@babel/plugin-syntax-flow@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.22.5.tgz#163b820b9e7696ce134df3ee716d9c0c98035859" + integrity sha512-9RdCl0i+q0QExayk2nOS7853w08yLucnnPML6EN9S8fgMPVtdLDCdx/cOQ/i44Lb9UeQX9A35yaqBBOMMZxPxQ== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-flow@^7.12.1": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.12.13.tgz#5df9962503c0a9c918381c929d51d4d6949e7e86" - integrity sha512-J/RYxnlSLXZLVR7wTRsozxKT8qbsx1mNKJzXEEjQ0Kjx1ZACcyHgbanNWNCFtc36IzuWhYWPpvJFFoexoOWFmA== +"@babel/plugin-syntax-import-assertions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz#07d252e2aa0bc6125567f742cd58619cb14dce98" + integrity sha512-rdV97N7KqsRzeNGoWUOK6yUsWarLjE5Su/Snk9IYPU9CwkWHs4t+rTGOvffTR8XGkJMTAdLfO0xVnXm8wugIJg== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-import-meta@^7.8.3": +"@babel/plugin-syntax-import-attributes@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.22.5.tgz#ab840248d834410b829f569f5262b9e517555ecb" + integrity sha512-KwvoWDeNKPETmozyFE0P2rOLqh39EoQHNjqizrI5B8Vt0ZNS7M56s7dAiAqbYfiAYOuIzIh96z3iR2ktgu3tEg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-syntax-import-meta@^7.10.4", "@babel/plugin-syntax-import-meta@^7.8.3": version "7.10.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-json-strings@^7.8.0", "@babel/plugin-syntax-json-strings@^7.8.3": +"@babel/plugin-syntax-json-strings@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-jsx@^7.0.0", "@babel/plugin-syntax-jsx@^7.17.12", "@babel/plugin-syntax-jsx@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" - integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== +"@babel/plugin-syntax-jsx@^7.0.0", "@babel/plugin-syntax-jsx@^7.17.12", "@babel/plugin-syntax-jsx@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" + integrity sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-jsx@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.13.tgz#044fb81ebad6698fe62c478875575bcbb9b70f15" - integrity sha512-d4HM23Q1K7oq/SLNmG6mRt85l2csmQ0cHRaxRXjKW0YFdEXqlZ5kzFQKH5Uc3rDJECgu+yCRgPkG04Mm98R/1g== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": version "7.10.4" @@ -1048,7 +653,7 @@ dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.0", "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== @@ -1062,358 +667,369 @@ dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-object-rest-spread@^7.0.0", "@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": +"@babel/plugin-syntax-object-rest-spread@^7.0.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-optional-catch-binding@^7.8.0", "@babel/plugin-syntax-optional-catch-binding@^7.8.3": +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-optional-chaining@^7.8.0", "@babel/plugin-syntax-optional-chaining@^7.8.3": +"@babel/plugin-syntax-optional-chaining@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-private-property-in-object@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.0.tgz#762a4babec61176fec6c88480dec40372b140c0b" - integrity sha512-bda3xF8wGl5/5btF794utNOL0Jw+9jE5C1sLZcoK7c4uonE/y3iQiyG+KbkF3WBV/paX58VCpjhxLPkdj5Fe4w== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - -"@babel/plugin-syntax-top-level-await@^7.12.1", "@babel/plugin-syntax-top-level-await@^7.12.13", "@babel/plugin-syntax-top-level-await@^7.8.3": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.12.13.tgz#c5f0fa6e249f5b739727f923540cf7a806130178" - integrity sha512-A81F9pDwyS7yM//KwbCSDqy3Uj4NMIurtplxphWxoYtNPov7cJsDkAFNNyVlIZ3jwGycVsurZ+LtOA8gZ376iQ== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-syntax-typescript@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.12.13.tgz#9dff111ca64154cef0f4dc52cf843d9f12ce4474" - integrity sha512-cHP3u1JiUiG2LFDKbXnwVad81GvfyIOmCD6HIEId6ojrY0Drfy2q1jw7BwN7dE84+kTnBjLkXoL3IEy/3JPu2w== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-transform-arrow-functions@^7.0.0": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.20.7.tgz#bea332b0e8b2dab3dafe55a163d8227531ab0551" - integrity sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ== +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-plugin-utils" "^7.14.5" -"@babel/plugin-transform-arrow-functions@^7.12.1", "@babel/plugin-transform-arrow-functions@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.13.0.tgz#10a59bebad52d637a027afa692e8d5ceff5e3dae" - integrity sha512-96lgJagobeVmazXFaDrbmCLQxBysKu7U6Do3mLsx27gf5Dk85ezysrs2BZUpXD703U/Su1xTBDxxar2oa4jAGg== +"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.14.5" -"@babel/plugin-transform-async-to-generator@^7.12.1", "@babel/plugin-transform-async-to-generator@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.13.0.tgz#8e112bf6771b82bf1e974e5e26806c5c99aa516f" - integrity sha512-3j6E004Dx0K3eGmhxVJxwwI89CTJrce7lg3UrtFuDAVQ/2+SJ/h/aSFOeE6/n0WB1GsOffsJp6MnPQNQ8nmwhg== +"@babel/plugin-syntax-typescript@^7.22.5", "@babel/plugin-syntax-typescript@^7.7.2": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" + integrity sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ== dependencies: - "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-remap-async-to-generator" "^7.13.0" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoped-functions@^7.0.0": +"@babel/plugin-syntax-unicode-sets-regex@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" - integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz#d49a3b3e6b52e5be6740022317580234a6a47357" + integrity sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg== dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" -"@babel/plugin-transform-block-scoped-functions@^7.12.1", "@babel/plugin-transform-block-scoped-functions@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.12.13.tgz#a9bf1836f2a39b4eb6cf09967739de29ea4bf4c4" - integrity sha512-zNyFqbc3kI/fVpqwfqkg6RvBgFpC4J18aKKMmv7KdQ/1GgREapSJAykLMVNwfRGO3BtHj3YQZl8kxCXPcVMVeg== +"@babel/plugin-transform-arrow-functions@^7.0.0", "@babel/plugin-transform-arrow-functions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.22.5.tgz#e5ba566d0c58a5b2ba2a8b795450641950b71958" + integrity sha512-26lTNXoVRdAnsaDXPpvCNUq+OVWEVC6bx7Vvz9rC53F2bagUWW4u4ii2+h8Fejfh7RYqPxn+libeFBBck9muEw== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoping@^7.0.0": - version "7.20.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.20.11.tgz#9f5a3424bd112a3f32fe0cf9364fbb155cff262a" - integrity sha512-tA4N427a7fjf1P0/2I4ScsHGc5jcHPbb30xMbaTke2gxDuWpUfXDuX1FEymJwKk4tuGUvGcejAR6HdZVqmmPyw== +"@babel/plugin-transform-async-generator-functions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.22.5.tgz#7336356d23380eda9a56314974f053a020dab0c3" + integrity sha512-gGOEvFzm3fWoyD5uZq7vVTD57pPJ3PczPUD/xCFGjzBpUosnklmXyKnGQbbbGs1NPNPskFex0j93yKbHt0cHyg== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - -"@babel/plugin-transform-block-scoping@^7.12.1", "@babel/plugin-transform-block-scoping@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.14.2.tgz#761cb12ab5a88d640ad4af4aa81f820e6b5fdf5c" - integrity sha512-neZZcP19NugZZqNwMTH+KoBjx5WyvESPSIOQb4JHpfd+zPfqcH65RMu5xJju5+6q/Y2VzYrleQTr+b6METyyxg== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - -"@babel/plugin-transform-classes@^7.0.0": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.20.7.tgz#f438216f094f6bb31dc266ebfab8ff05aecad073" - integrity sha512-LWYbsiXTPKl+oBlXUGlwNlJZetXD5Am+CyBdqhPsDVjM9Jc8jwBJFrKhHf900Kfk2eZG1y9MAG3UNajol7A4VQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-compilation-targets" "^7.20.7" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-replace-supers" "^7.20.7" - "@babel/helper-split-export-declaration" "^7.18.6" - globals "^11.1.0" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-remap-async-to-generator" "^7.22.5" + "@babel/plugin-syntax-async-generators" "^7.8.4" -"@babel/plugin-transform-classes@^7.12.1", "@babel/plugin-transform-classes@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.14.2.tgz#3f1196c5709f064c252ad056207d87b7aeb2d03d" - integrity sha512-7oafAVcucHquA/VZCsXv/gmuiHeYd64UJyyTYU+MPfNu0KeNlxw06IeENBO8bJjXVbolu+j1MM5aKQtH1OMCNg== - dependencies: - "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-function-name" "^7.14.2" - "@babel/helper-optimise-call-expression" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-replace-supers" "^7.13.12" - "@babel/helper-split-export-declaration" "^7.12.13" +"@babel/plugin-transform-async-to-generator@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.22.5.tgz#c7a85f44e46f8952f6d27fe57c2ed3cc084c3775" + integrity sha512-b1A8D8ZzE/VhNDoV1MSJTnpKkCG5bJo+19R4o4oy03zM7ws8yEMK755j61Dc3EyvdysbqH5BOOTquJ7ZX9C6vQ== + dependencies: + "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-remap-async-to-generator" "^7.22.5" + +"@babel/plugin-transform-block-scoped-functions@^7.0.0", "@babel/plugin-transform-block-scoped-functions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.22.5.tgz#27978075bfaeb9fa586d3cb63a3d30c1de580024" + integrity sha512-tdXZ2UdknEKQWKJP1KMNmuF5Lx3MymtMN/pvA+p/VEkhK8jVcQ1fzSy8KM9qRYhAf2/lV33hoMPKI/xaI9sADA== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-block-scoping@^7.0.0", "@babel/plugin-transform-block-scoping@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.22.5.tgz#8bfc793b3a4b2742c0983fadc1480d843ecea31b" + integrity sha512-EcACl1i5fSQ6bt+YGuU/XGCeZKStLmyVGytWkpyhCLeQVA0eu6Wtiw92V+I1T/hnezUv7j74dA/Ro69gWcU+hg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-class-properties@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.22.5.tgz#97a56e31ad8c9dc06a0b3710ce7803d5a48cca77" + integrity sha512-nDkQ0NfkOhPTq8YCLiWNxp1+f9fCobEjCb0n8WdbNUBc4IB5V7P1QnX9IjpSoquKrXF5SKojHleVNs2vGeHCHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-class-static-block@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.22.5.tgz#3e40c46f048403472d6f4183116d5e46b1bff5ba" + integrity sha512-SPToJ5eYZLxlnp1UzdARpOGeC2GbHvr9d/UV0EukuVx8atktg194oe+C5BqQ8jRTkgLRVOPYeXRSBg1IlMoVRA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-transform-classes@^7.0.0", "@babel/plugin-transform-classes@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.22.5.tgz#635d4e98da741fad814984639f4c0149eb0135e1" + integrity sha512-2edQhLfibpWpsVBx2n/GKOz6JdGQvLruZQfGr9l1qes2KQaWswjBzhQF7UDUZMNaMMQeYnQzxwOMPsbYF7wqPQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-compilation-targets" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-function-name" "^7.22.5" + "@babel/helper-optimise-call-expression" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.5" globals "^11.1.0" -"@babel/plugin-transform-computed-properties@^7.0.0": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.20.7.tgz#704cc2fd155d1c996551db8276d55b9d46e4d0aa" - integrity sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ== +"@babel/plugin-transform-computed-properties@^7.0.0", "@babel/plugin-transform-computed-properties@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.22.5.tgz#cd1e994bf9f316bd1c2dafcd02063ec261bb3869" + integrity sha512-4GHWBgRf0krxPX+AaPtgBAlTgTeZmqDynokHOX7aqqAB4tHs3U2Y02zH6ETFdLZGcg9UQSD1WCmkVrE9ErHeOg== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/template" "^7.20.7" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/template" "^7.22.5" -"@babel/plugin-transform-computed-properties@^7.12.1", "@babel/plugin-transform-computed-properties@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.13.0.tgz#845c6e8b9bb55376b1fa0b92ef0bdc8ea06644ed" - integrity sha512-RRqTYTeZkZAz8WbieLTvKUEUxZlUTdmL5KGMyZj7FnMfLNKV4+r5549aORG/mgojRmFlQMJDUupwAMiF2Q7OUg== +"@babel/plugin-transform-destructuring@^7.0.0", "@babel/plugin-transform-destructuring@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.22.5.tgz#d3aca7438f6c26c78cdd0b0ba920a336001b27cc" + integrity sha512-GfqcFuGW8vnEqTUBM7UtPd5A4q797LTvvwKxXTgRsFjoqaJiEg9deBG6kWeQYkVEL569NpnmpC0Pkr/8BLKGnQ== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-destructuring@^7.0.0": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.20.7.tgz#8bda578f71620c7de7c93af590154ba331415454" - integrity sha512-Xwg403sRrZb81IVB79ZPqNQME23yhugYVqgTxAhT99h485F4f+GMELFhhOsscDUB7HCswepKeCKLn/GZvUKoBA== +"@babel/plugin-transform-dotall-regex@^7.22.5", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.22.5.tgz#dbb4f0e45766eb544e193fb00e65a1dd3b2a4165" + integrity sha512-5/Yk9QxCQCl+sOIB1WelKnVRxTJDSAIxtJLL2/pqL14ZVlbH0fUQUZa/T5/UnQtBNgghR7mfB8ERBKyKPCi7Vw== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-destructuring@^7.12.1", "@babel/plugin-transform-destructuring@^7.13.17": - version "7.13.17" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.13.17.tgz#678d96576638c19d5b36b332504d3fd6e06dea27" - integrity sha512-UAUqiLv+uRLO+xuBKKMEpC+t7YRNVRqBsWWq1yKXbBZBje/t3IXCiSinZhjn/DC3qzBfICeYd2EFGEbHsh5RLA== +"@babel/plugin-transform-duplicate-keys@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.22.5.tgz#b6e6428d9416f5f0bba19c70d1e6e7e0b88ab285" + integrity sha512-dEnYD+9BBgld5VBXHnF/DbYGp3fqGMsyxKbtD1mDyIA7AkTSpKXFhCVuj/oQVOoALfBs77DudA0BE4d5mcpmqw== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-dotall-regex@^7.12.1", "@babel/plugin-transform-dotall-regex@^7.12.13", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.12.13.tgz#3f1601cc29905bfcb67f53910f197aeafebb25ad" - integrity sha512-foDrozE65ZFdUC2OfgeOCrEPTxdB3yjqxpXh8CH+ipd9CHd4s/iq81kcUpyH8ACGNEPdFqbtzfgzbT/ZGlbDeQ== +"@babel/plugin-transform-dynamic-import@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.22.5.tgz#d6908a8916a810468c4edff73b5b75bda6ad393e" + integrity sha512-0MC3ppTB1AMxd8fXjSrbPa7LT9hrImt+/fcj+Pg5YMD7UQyWp/02+JWpdnCymmsXwIx5Z+sYn1bwCn4ZJNvhqQ== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.12.13" - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" -"@babel/plugin-transform-duplicate-keys@^7.12.1", "@babel/plugin-transform-duplicate-keys@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.12.13.tgz#6f06b87a8b803fd928e54b81c258f0a0033904de" - integrity sha512-NfADJiiHdhLBW3pulJlJI2NB0t4cci4WTZ8FtdIuNc2+8pslXdPtRRAEWqUY+m9kNOk2eRYbTAOipAxlrOcwwQ== +"@babel/plugin-transform-exponentiation-operator@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.22.5.tgz#402432ad544a1f9a480da865fda26be653e48f6a" + integrity sha512-vIpJFNM/FjZ4rh1myqIya9jXwrwwgFRHPjT3DkUA9ZLHuzox8jiXkOLvwm1H+PQIP3CqfC++WPKeuDi0Sjdj1g== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-exponentiation-operator@^7.12.1", "@babel/plugin-transform-exponentiation-operator@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.12.13.tgz#4d52390b9a273e651e4aba6aee49ef40e80cd0a1" - integrity sha512-fbUelkM1apvqez/yYx1/oICVnGo2KM5s63mhGylrmXUxK/IAXSIf87QIxVfZldWf4QsOafY6vV3bX8aMHSvNrA== +"@babel/plugin-transform-export-namespace-from@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.22.5.tgz#57c41cb1d0613d22f548fddd8b288eedb9973a5b" + integrity sha512-X4hhm7FRnPgd4nDA4b/5V280xCx6oL7Oob5+9qVS5C13Zq4bh1qq7LU0GgRU6b5dBWBvhGaXYVB4AcN6+ol6vg== dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.12.13" - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" -"@babel/plugin-transform-flow-strip-types@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.12.1.tgz#8430decfa7eb2aea5414ed4a3fa6e1652b7d77c4" - integrity sha512-8hAtkmsQb36yMmEtk2JZ9JnVyDSnDOdlB+0nEGzIDLuK4yR3JcEjfuFPYkdEPSh8Id+rAMeBEn+X0iVEyho6Hg== +"@babel/plugin-transform-flow-strip-types@^7.0.0", "@babel/plugin-transform-flow-strip-types@^7.16.0": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.22.5.tgz#0bb17110c7bf5b35a60754b2f00c58302381dee2" + integrity sha512-tujNbZdxdG0/54g/oua8ISToaXTFBf8EnSb5PgQSciIXWOWKX3S4+JR7ZE9ol8FZwf9kxitzkGQ+QWeov/mCiA== dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-flow" "^7.12.1" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-flow" "^7.22.5" -"@babel/plugin-transform-flow-strip-types@^7.0.0": - version "7.19.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.19.0.tgz#e9e8606633287488216028719638cbbb2f2dde8f" - integrity sha512-sgeMlNaQVbCSpgLSKP4ZZKfsJVnFnNQlUSk6gPYzR/q7tzCgQF2t8RBKAP6cKJeZdveei7Q7Jm527xepI8lNLg== +"@babel/plugin-transform-for-of@^7.0.0", "@babel/plugin-transform-for-of@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.22.5.tgz#ab1b8a200a8f990137aff9a084f8de4099ab173f" + integrity sha512-3kxQjX1dU9uudwSshyLeEipvrLjBCVthCgeTp6CzE/9JYrlAIaeekVxRpCWsDDfYTfRZRoCeZatCQvwo+wvK8A== dependencies: - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/plugin-syntax-flow" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-for-of@^7.0.0": - version "7.18.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1" - integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== +"@babel/plugin-transform-function-name@^7.0.0", "@babel/plugin-transform-function-name@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.22.5.tgz#935189af68b01898e0d6d99658db6b164205c143" + integrity sha512-UIzQNMS0p0HHiQm3oelztj+ECwFnj+ZRV4KnguvlsD2of1whUeM6o7wGNj6oLwcDoAXQ8gEqfgC24D+VdIcevg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-compilation-targets" "^7.22.5" + "@babel/helper-function-name" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-for-of@^7.12.1", "@babel/plugin-transform-for-of@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.13.0.tgz#c799f881a8091ac26b54867a845c3e97d2696062" - integrity sha512-IHKT00mwUVYE0zzbkDgNRP6SRzvfGCYsOxIRz8KsiaaHCcT9BWIkO+H9QRJseHBLOGBZkHUdHiqj6r0POsdytg== +"@babel/plugin-transform-json-strings@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.22.5.tgz#14b64352fdf7e1f737eed68de1a1468bd2a77ec0" + integrity sha512-DuCRB7fu8MyTLbEQd1ew3R85nx/88yMoqo2uPSjevMj3yoN7CDM8jkgrY0wmVxfJZyJ/B9fE1iq7EQppWQmR5A== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-json-strings" "^7.8.3" -"@babel/plugin-transform-function-name@^7.0.0": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" - integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== +"@babel/plugin-transform-literals@^7.0.0", "@babel/plugin-transform-literals@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.22.5.tgz#e9341f4b5a167952576e23db8d435849b1dd7920" + integrity sha512-fTLj4D79M+mepcw3dgFBTIDYpbcB9Sm0bpm4ppXPaO+U+PKFFyV9MGRvS0gvGw62sd10kT5lRMKXAADb9pWy8g== dependencies: - "@babel/helper-compilation-targets" "^7.18.9" - "@babel/helper-function-name" "^7.18.9" - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-function-name@^7.12.1", "@babel/plugin-transform-function-name@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.12.13.tgz#bb024452f9aaed861d374c8e7a24252ce3a50051" - integrity sha512-6K7gZycG0cmIwwF7uMK/ZqeCikCGVBdyP2J5SKNCXO5EOHcqi+z7Jwf8AmyDNcBgxET8DrEtCt/mPKPyAzXyqQ== +"@babel/plugin-transform-logical-assignment-operators@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.22.5.tgz#66ae5f068fd5a9a5dc570df16f56c2a8462a9d6c" + integrity sha512-MQQOUW1KL8X0cDWfbwYP+TbVbZm16QmQXJQ+vndPtH/BoO0lOKpVoEDMI7+PskYxH+IiE0tS8xZye0qr1lGzSA== dependencies: - "@babel/helper-function-name" "^7.12.13" - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" -"@babel/plugin-transform-literals@^7.0.0": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" - integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== +"@babel/plugin-transform-member-expression-literals@^7.0.0", "@babel/plugin-transform-member-expression-literals@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.22.5.tgz#4fcc9050eded981a468347dd374539ed3e058def" + integrity sha512-RZEdkNtzzYCFl9SE9ATaUMTj2hqMb4StarOJLrZRbqqU4HSBE7UlBw9WBWQiDzrJZJdUWiMTVDI6Gv/8DPvfew== dependencies: - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-literals@^7.12.1", "@babel/plugin-transform-literals@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.12.13.tgz#2ca45bafe4a820197cf315794a4d26560fe4bdb9" - integrity sha512-FW+WPjSR7hiUxMcKqyNjP05tQ2kmBCdpEpZHY1ARm96tGQCCBvXKnpjILtDplUnJ/eHZ0lALLM+d2lMFSpYJrQ== +"@babel/plugin-transform-modules-amd@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.22.5.tgz#4e045f55dcf98afd00f85691a68fc0780704f526" + integrity sha512-R+PTfLTcYEmb1+kK7FNkhQ1gP4KgjpSO6HfH9+f8/yfp2Nt3ggBjiVpRwmwTlfqZLafYKJACy36yDXlEmI9HjQ== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-module-transforms" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-member-expression-literals@^7.0.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" - integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== +"@babel/plugin-transform-modules-commonjs@^7.0.0", "@babel/plugin-transform-modules-commonjs@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.22.5.tgz#7d9875908d19b8c0536085af7b053fd5bd651bfa" + integrity sha512-B4pzOXj+ONRmuaQTg05b3y/4DuFz3WcCNAXPLb2Q0GT0TrGKGxNKV4jwsXts+StaM0LQczZbOpj8o1DLPDJIiA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-module-transforms" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-simple-access" "^7.22.5" -"@babel/plugin-transform-member-expression-literals@^7.12.1", "@babel/plugin-transform-member-expression-literals@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.12.13.tgz#5ffa66cd59b9e191314c9f1f803b938e8c081e40" - integrity sha512-kxLkOsg8yir4YeEPHLuO2tXP9R/gTjpuTOjshqSpELUN3ZAg2jfDnKUvzzJxObun38sw3wm4Uu69sX/zA7iRvg== +"@babel/plugin-transform-modules-systemjs@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.22.5.tgz#18c31410b5e579a0092638f95c896c2a98a5d496" + integrity sha512-emtEpoaTMsOs6Tzz+nbmcePl6AKVtS1yC4YNAeMun9U8YCsgadPNxnOPQ8GhHFB2qdx+LZu9LgoC0Lthuu05DQ== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-module-transforms" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.5" -"@babel/plugin-transform-modules-amd@^7.12.1", "@babel/plugin-transform-modules-amd@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.14.2.tgz#6622806fe1a7c07a1388444222ef9535f2ca17b0" - integrity sha512-hPC6XBswt8P3G2D1tSV2HzdKvkqOpmbyoy+g73JG0qlF/qx2y3KaMmXb1fLrpmWGLZYA0ojCvaHdzFWjlmV+Pw== +"@babel/plugin-transform-modules-umd@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.22.5.tgz#4694ae40a87b1745e3775b6a7fe96400315d4f98" + integrity sha512-+S6kzefN/E1vkSsKx8kmQuqeQsvCKCd1fraCM7zXm4SFoggI099Tr4G8U81+5gtMdUeMQ4ipdQffbKLX0/7dBQ== dependencies: - "@babel/helper-module-transforms" "^7.14.2" - "@babel/helper-plugin-utils" "^7.13.0" - babel-plugin-dynamic-import-node "^2.3.3" + "@babel/helper-module-transforms" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-commonjs@^7.0.0": - version "7.20.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.20.11.tgz#8cb23010869bf7669fd4b3098598b6b2be6dc607" - integrity sha512-S8e1f7WQ7cimJQ51JkAaDrEtohVEitXjgCGAS2N8S31Y42E+kWwfSz83LYz57QdBm7q9diARVqanIaH2oVgQnw== +"@babel/plugin-transform-named-capturing-groups-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz#67fe18ee8ce02d57c855185e27e3dc959b2e991f" + integrity sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ== dependencies: - "@babel/helper-module-transforms" "^7.20.11" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-simple-access" "^7.20.2" + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-commonjs@^7.12.1", "@babel/plugin-transform-modules-commonjs@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.14.0.tgz#52bc199cb581e0992edba0f0f80356467587f161" - integrity sha512-EX4QePlsTaRZQmw9BsoPeyh5OCtRGIhwfLquhxGp5e32w+dyL8htOcDwamlitmNFK6xBZYlygjdye9dbd9rUlQ== +"@babel/plugin-transform-new-target@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.22.5.tgz#1b248acea54ce44ea06dfd37247ba089fcf9758d" + integrity sha512-AsF7K0Fx/cNKVyk3a+DW0JLo+Ua598/NxMRvxDnkpCIGFh43+h/v2xyhRUYf6oD8gE4QtL83C7zZVghMjHd+iw== dependencies: - "@babel/helper-module-transforms" "^7.14.0" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-simple-access" "^7.13.12" - babel-plugin-dynamic-import-node "^2.3.3" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-systemjs@^7.12.1", "@babel/plugin-transform-modules-systemjs@^7.13.8": - version "7.13.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.13.8.tgz#6d066ee2bff3c7b3d60bf28dec169ad993831ae3" - integrity sha512-hwqctPYjhM6cWvVIlOIe27jCIBgHCsdH2xCJVAYQm7V5yTMoilbVMi9f6wKg0rpQAOn6ZG4AOyvCqFF/hUh6+A== +"@babel/plugin-transform-nullish-coalescing-operator@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.22.5.tgz#f8872c65776e0b552e0849d7596cddd416c3e381" + integrity sha512-6CF8g6z1dNYZ/VXok5uYkkBBICHZPiGEl7oDnAx2Mt1hlHVHOSIKWJaXHjQJA5VB43KZnXZDIexMchY4y2PGdA== dependencies: - "@babel/helper-hoist-variables" "^7.13.0" - "@babel/helper-module-transforms" "^7.13.0" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-validator-identifier" "^7.12.11" - babel-plugin-dynamic-import-node "^2.3.3" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" -"@babel/plugin-transform-modules-umd@^7.12.1", "@babel/plugin-transform-modules-umd@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.14.0.tgz#2f8179d1bbc9263665ce4a65f305526b2ea8ac34" - integrity sha512-nPZdnWtXXeY7I87UZr9VlsWme3Y0cfFFE41Wbxz4bbaexAjNMInXPFUpRRUJ8NoMm0Cw+zxbqjdPmLhcjfazMw== +"@babel/plugin-transform-numeric-separator@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.22.5.tgz#57226a2ed9e512b9b446517ab6fa2d17abb83f58" + integrity sha512-NbslED1/6M+sXiwwtcAB/nieypGw02Ejf4KtDeMkCEpP6gWFMX1wI9WKYua+4oBneCCEmulOkRpwywypVZzs/g== dependencies: - "@babel/helper-module-transforms" "^7.14.0" - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" -"@babel/plugin-transform-named-capturing-groups-regex@^7.12.1", "@babel/plugin-transform-named-capturing-groups-regex@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.12.13.tgz#2213725a5f5bbbe364b50c3ba5998c9599c5c9d9" - integrity sha512-Xsm8P2hr5hAxyYblrfACXpQKdQbx4m2df9/ZZSQ8MAhsadw06+jW7s9zsSw6he+mJZXRlVMyEnVktJo4zjk1WA== +"@babel/plugin-transform-object-rest-spread@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.22.5.tgz#9686dc3447df4753b0b2a2fae7e8bc33cdc1f2e1" + integrity sha512-Kk3lyDmEslH9DnvCDA1s1kkd3YWQITiBOHngOtDL9Pt6BZjzqb6hiOlb8VfjiiQJ2unmegBqZu0rx5RxJb5vmQ== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.12.13" + "@babel/compat-data" "^7.22.5" + "@babel/helper-compilation-targets" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.22.5" -"@babel/plugin-transform-new-target@^7.12.1", "@babel/plugin-transform-new-target@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.12.13.tgz#e22d8c3af24b150dd528cbd6e685e799bf1c351c" - integrity sha512-/KY2hbLxrG5GTQ9zzZSc3xWiOy379pIETEhbtzwZcw9rvuaVV4Fqy7BYGYOWZnaoXIQYbbJ0ziXLa/sKcGCYEQ== +"@babel/plugin-transform-object-super@^7.0.0", "@babel/plugin-transform-object-super@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.22.5.tgz#794a8d2fcb5d0835af722173c1a9d704f44e218c" + integrity sha512-klXqyaT9trSjIUrcsYIfETAzmOEZL3cBYqOYLJxBHfMFFggmXOv+NYSX/Jbs9mzMVESw/WycLFPRx8ba/b2Ipw== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.5" -"@babel/plugin-transform-object-super@^7.0.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" - integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== +"@babel/plugin-transform-optional-catch-binding@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.22.5.tgz#842080be3076703be0eaf32ead6ac8174edee333" + integrity sha512-pH8orJahy+hzZje5b8e2QIlBWQvGpelS76C63Z+jhZKsmzfNaPQ+LaW6dcJ9bxTpo1mtXbgHwy765Ro3jftmUg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" -"@babel/plugin-transform-object-super@^7.12.1", "@babel/plugin-transform-object-super@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.12.13.tgz#b4416a2d63b8f7be314f3d349bd55a9c1b5171f7" - integrity sha512-JzYIcj3XtYspZDV8j9ulnoMPZZnF/Cj0LUxPOjR89BdBVx+zYJI9MdMIlUZjbXDX+6YVeS6I3e8op+qQ3BYBoQ== +"@babel/plugin-transform-optional-chaining@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.22.5.tgz#1003762b9c14295501beb41be72426736bedd1e0" + integrity sha512-AconbMKOMkyG+xCng2JogMCDcqW8wedQAqpVIL4cOSescZ7+iW8utC6YDZLMCSUIReEA733gzRSaOSXMAt/4WQ== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - "@babel/helper-replace-supers" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" -"@babel/plugin-transform-parameters@^7.0.0", "@babel/plugin-transform-parameters@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.20.7.tgz#0ee349e9d1bc96e78e3b37a7af423a4078a7083f" - integrity sha512-WiWBIkeHKVOSYPO0pWkxGPfKeWrCJyD3NJ53+Lrp/QMSZbsVPovrVl2aWZ19D/LTVnaDv5Ap7GJ/B2CTOZdrfA== +"@babel/plugin-transform-parameters@^7.0.0", "@babel/plugin-transform-parameters@^7.20.7", "@babel/plugin-transform-parameters@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.22.5.tgz#c3542dd3c39b42c8069936e48717a8d179d63a18" + integrity sha512-AVkFUBurORBREOmHRKo06FjHYgjrabpdqRSwq6+C7R5iTCZOsM4QbcB27St0a4U6fffyAOqh3s/qEfybAhfivg== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-parameters@^7.12.1", "@babel/plugin-transform-parameters@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.14.2.tgz#e4290f72e0e9e831000d066427c4667098decc31" - integrity sha512-NxoVmA3APNCC1JdMXkdYXuQS+EMdqy0vIwyDHeKHiJKRxmp1qGSdb0JLEIoPRhkx6H/8Qi3RJ3uqOCYw8giy9A== +"@babel/plugin-transform-private-methods@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.22.5.tgz#21c8af791f76674420a147ae62e9935d790f8722" + integrity sha512-PPjh4gyrQnGe97JTalgRGMuU4icsZFnWkzicB/fUtzlKUqvsWBKEpPPfr5a2JiyirZkHxnAqkQMO5Z5B2kK3fA== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-property-literals@^7.0.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" - integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== +"@babel/plugin-transform-private-property-in-object@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.22.5.tgz#07a77f28cbb251546a43d175a1dda4cf3ef83e32" + integrity sha512-/9xnaTTJcVoBtSSmrVyhtSvO3kbqS2ODoh2juEU72c3aYonNF0OMGiaz2gjukyKM2wBBYJP38S4JiE0Wfb5VMQ== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" -"@babel/plugin-transform-property-literals@^7.12.1", "@babel/plugin-transform-property-literals@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.12.13.tgz#4e6a9e37864d8f1b3bc0e2dce7bf8857db8b1a81" - integrity sha512-nqVigwVan+lR+g8Fj8Exl0UQX2kymtjcWfMOYM1vTYEKujeyv2SkMgazf2qNcK7l4SDiKyTA/nHCPqL4e2zo1A== +"@babel/plugin-transform-property-literals@^7.0.0", "@babel/plugin-transform-property-literals@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.22.5.tgz#b5ddabd73a4f7f26cd0e20f5db48290b88732766" + integrity sha512-TiOArgddK3mK/x1Qwf5hay2pxI6wCZnvQqrFSqbtg1GLl2JcNMitVH/YnqjP+M31pLUeTfzY1HAXFDnUBV30rQ== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-react-constant-elements@^7.12.1": version "7.13.13" @@ -1422,286 +1038,163 @@ dependencies: "@babel/helper-plugin-utils" "^7.13.0" -"@babel/plugin-transform-react-display-name@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.12.1.tgz#1cbcd0c3b1d6648c55374a22fc9b6b7e5341c00d" - integrity sha512-cAzB+UzBIrekfYxyLlFqf/OagTvHLcVBb5vpouzkYkBclRPraiygVnafvAoipErZLI8ANv8Ecn6E/m5qPXD26w== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-react-display-name@^7.0.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" - integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-react-display-name@^7.12.1", "@babel/plugin-transform-react-display-name@^7.12.13": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.14.2.tgz#2e854544d42ab3bb9c21f84e153d62e800fbd593" - integrity sha512-zCubvP+jjahpnFJvPaHPiGVfuVUjXHhFvJKQdNnsmSsiU9kR/rCZ41jHc++tERD2zV+p7Hr6is+t5b6iWTCqSw== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - -"@babel/plugin-transform-react-jsx-development@^7.12.1", "@babel/plugin-transform-react-jsx-development@^7.12.17": - version "7.12.17" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.12.17.tgz#f510c0fa7cd7234153539f9a362ced41a5ca1447" - integrity sha512-BPjYV86SVuOaudFhsJR1zjgxxOhJDt6JHNoD48DxWEIxUCAMjV1ys6DYw4SDYZh0b1QsS2vfIA9t/ZsQGsDOUQ== +"@babel/plugin-transform-react-display-name@^7.0.0", "@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.22.5.tgz#3c4326f9fce31c7968d6cb9debcaf32d9e279a2b" + integrity sha512-PVk3WPYudRF5z4GKMEYUrLjPl38fJSKNaEOkFuoprioowGuWN6w2RKznuFNSlJx7pzzXXStPUnNSOEO0jL5EVw== dependencies: - "@babel/plugin-transform-react-jsx" "^7.12.17" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-react-jsx-self@^7.12.1": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.12.13.tgz#422d99d122d592acab9c35ea22a6cfd9bf189f60" - integrity sha512-FXYw98TTJ125GVCCkFLZXlZ1qGcsYqNQhVBQcZjyrwf8FEUtVfKIoidnO8S0q+KBQpDYNTmiGo1gn67Vti04lQ== +"@babel/plugin-transform-react-jsx-development@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz#e716b6edbef972a92165cd69d92f1255f7e73e87" + integrity sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/plugin-transform-react-jsx" "^7.22.5" -"@babel/plugin-transform-react-jsx-source@^7.12.1": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.14.2.tgz#2620b57e7de775c0687f65d464026d15812941da" - integrity sha512-OMorspVyjxghAjzgeAWc6O7W7vHbJhV69NeTGdl9Mxgz6PaweAuo7ffB9T5A1OQ9dGcw0As4SYMUhyNC4u7mVg== +"@babel/plugin-transform-react-jsx@^7.0.0", "@babel/plugin-transform-react-jsx@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.5.tgz#932c291eb6dd1153359e2a90cb5e557dcf068416" + integrity sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-jsx" "^7.22.5" + "@babel/types" "^7.22.5" -"@babel/plugin-transform-react-jsx@^7.0.0": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.20.7.tgz#025d85a1935fd7e19dfdcb1b1d4df34d4da484f7" - integrity sha512-Tfq7qqD+tRj3EoDhY00nn2uP2hsRxgYGi5mLQ5TimKav0a9Lrpd4deE+fcLXU8zFYRjlKPHZhpCvfEA6qnBxqQ== +"@babel/plugin-transform-react-pure-annotations@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.22.5.tgz#1f58363eef6626d6fa517b95ac66fe94685e32c0" + integrity sha512-gP4k85wx09q+brArVinTXhWiyzLl9UpmGva0+mWyKxk6JZequ05x3eUcIUE+FyttPKJFRRVtAvQaJ6YF9h1ZpA== dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-jsx" "^7.18.6" - "@babel/types" "^7.20.7" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-react-jsx@^7.12.1", "@babel/plugin-transform-react-jsx@^7.12.17", "@babel/plugin-transform-react-jsx@^7.13.12": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.14.3.tgz#0e26597805cf0862da735f264550933c38babb66" - integrity sha512-uuxuoUNVhdgYzERiHHFkE4dWoJx+UFVyuAl0aqN8P2/AKFHwqgUC5w2+4/PjpKXJsFgBlYAFXlUmDQ3k3DUkXw== +"@babel/plugin-transform-regenerator@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.22.5.tgz#cd8a68b228a5f75fa01420e8cc2fc400f0fc32aa" + integrity sha512-rR7KePOE7gfEtNTh9Qw+iO3Q/e4DEsoQ+hdvM6QUDH7JRJ5qxq5AA52ZzBWbI5i9lfNuvySgOGP8ZN7LAmaiPw== dependencies: - "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-module-imports" "^7.13.12" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-jsx" "^7.12.13" - "@babel/types" "^7.14.2" + "@babel/helper-plugin-utils" "^7.22.5" + regenerator-transform "^0.15.1" -"@babel/plugin-transform-react-pure-annotations@^7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.12.1.tgz#05d46f0ab4d1339ac59adf20a1462c91b37a1a42" - integrity sha512-RqeaHiwZtphSIUZ5I85PEH19LOSzxfuEazoY7/pWASCAIBuATQzpSVD+eT6MebeeZT2F4eSL0u4vw6n4Nm0Mjg== +"@babel/plugin-transform-reserved-words@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.22.5.tgz#832cd35b81c287c4bcd09ce03e22199641f964fb" + integrity sha512-DTtGKFRQUDm8svigJzZHzb/2xatPc6TzNvAIJ5GqOKDsGFYgAskjRulbR/vGsPKq3OPqtexnz327qYpP57RFyA== dependencies: - "@babel/helper-annotate-as-pure" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-regenerator@^7.12.1", "@babel/plugin-transform-regenerator@^7.13.15": - version "7.13.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.13.15.tgz#e5eb28945bf8b6563e7f818945f966a8d2997f39" - integrity sha512-Bk9cOLSz8DiurcMETZ8E2YtIVJbFCPGW28DJWUakmyVWtQSm6Wsf0p4B4BfEr/eL2Nkhe/CICiUiMOCi1TPhuQ== +"@babel/plugin-transform-runtime@^7.16.4": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.22.5.tgz#ca975fb5e260044473c8142e1b18b567d33c2a3b" + integrity sha512-bg4Wxd1FWeFx3daHFTWk1pkSWK/AyQuiyAoeZAOkAOUBjnZPH6KT7eMxouV47tQ6hl6ax2zyAWBdWZXbrvXlaw== dependencies: - regenerator-transform "^0.14.2" + "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + babel-plugin-polyfill-corejs2 "^0.4.3" + babel-plugin-polyfill-corejs3 "^0.8.1" + babel-plugin-polyfill-regenerator "^0.5.0" + semver "^6.3.0" -"@babel/plugin-transform-reserved-words@^7.12.1", "@babel/plugin-transform-reserved-words@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.12.13.tgz#7d9988d4f06e0fe697ea1d9803188aa18b472695" - integrity sha512-xhUPzDXxZN1QfiOy/I5tyye+TRz6lA7z6xaT4CLOjPRMVg1ldRf0LHw0TDBpYL4vG78556WuHdyO9oi5UmzZBg== +"@babel/plugin-transform-shorthand-properties@^7.0.0", "@babel/plugin-transform-shorthand-properties@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.22.5.tgz#6e277654be82b5559fc4b9f58088507c24f0c624" + integrity sha512-vM4fq9IXHscXVKzDv5itkO1X52SmdFBFcMIBZ2FRn2nqVYqw6dBexUgMvAjHW+KXpPPViD/Yo3GrDEBaRC0QYA== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-runtime@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.12.1.tgz#04b792057eb460389ff6a4198e377614ea1e7ba5" - integrity sha512-Ac/H6G9FEIkS2tXsZjL4RAdS3L3WHxci0usAnz7laPWUmFiGtj7tIASChqKZMHTSQTQY6xDbOq+V1/vIq3QrWg== +"@babel/plugin-transform-spread@^7.0.0", "@babel/plugin-transform-spread@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.22.5.tgz#6487fd29f229c95e284ba6c98d65eafb893fea6b" + integrity sha512-5ZzDQIGyvN4w8+dMmpohL6MBo+l2G7tfC/O2Dg7/hjpgeWvUx8FzfeOKxGog9IimPa4YekaQ9PlDqTLOljkcxg== dependencies: - "@babel/helper-module-imports" "^7.12.1" - "@babel/helper-plugin-utils" "^7.10.4" - resolve "^1.8.1" - semver "^5.5.1" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" -"@babel/plugin-transform-shorthand-properties@^7.0.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" - integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== +"@babel/plugin-transform-sticky-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.22.5.tgz#295aba1595bfc8197abd02eae5fc288c0deb26aa" + integrity sha512-zf7LuNpHG0iEeiyCNwX4j3gDg1jgt1k3ZdXBKbZSoA3BbGQGvMiSvfbZRR3Dr3aeJe3ooWFZxOOG3IRStYp2Bw== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-shorthand-properties@^7.12.1", "@babel/plugin-transform-shorthand-properties@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.12.13.tgz#db755732b70c539d504c6390d9ce90fe64aff7ad" - integrity sha512-xpL49pqPnLtf0tVluuqvzWIgLEhuPpZzvs2yabUHSKRNlN7ScYU7aMlmavOeyXJZKgZKQRBlh8rHbKiJDraTSw== +"@babel/plugin-transform-template-literals@^7.0.0", "@babel/plugin-transform-template-literals@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.22.5.tgz#8f38cf291e5f7a8e60e9f733193f0bcc10909bff" + integrity sha512-5ciOehRNf+EyUeewo8NkbQiUs4d6ZxiHo6BcBcnFlgiJfu16q0bQUw9Jvo0b0gBKFG1SMhDSjeKXSYuJLeFSMA== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-spread@^7.0.0": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.20.7.tgz#c2d83e0b99d3bf83e07b11995ee24bf7ca09401e" - integrity sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw== +"@babel/plugin-transform-typeof-symbol@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.22.5.tgz#5e2ba478da4b603af8673ff7c54f75a97b716b34" + integrity sha512-bYkI5lMzL4kPii4HHEEChkD0rkc+nvnlR6+o/qdqR6zrm0Sv/nodmyLhlq2DO0YKLUNd2VePmPRjJXSBh9OIdA== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-spread@^7.12.1", "@babel/plugin-transform-spread@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.13.0.tgz#84887710e273c1815ace7ae459f6f42a5d31d5fd" - integrity sha512-V6vkiXijjzYeFmQTr3dBxPtZYLPcUfY34DebOU27jIl2M/Y8Egm52Hw82CSjjPqd54GTlJs5x+CR7HeNr24ckg== +"@babel/plugin-transform-typescript@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.22.5.tgz#5c0f7adfc1b5f38c4dbc8f79b1f0f8074134bd7d" + integrity sha512-SMubA9S7Cb5sGSFFUlqxyClTA9zWJ8qGQrppNUm05LtFuN1ELRFNndkix4zUJrC9F+YivWwa1dHMSyo0e0N9dA== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-typescript" "^7.22.5" -"@babel/plugin-transform-sticky-regex@^7.12.1", "@babel/plugin-transform-sticky-regex@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.12.13.tgz#760ffd936face73f860ae646fb86ee82f3d06d1f" - integrity sha512-Jc3JSaaWT8+fr7GRvQP02fKDsYk4K/lYwWq38r/UGfaxo89ajud321NH28KRQ7xy1Ybc0VUE5Pz8psjNNDUglg== +"@babel/plugin-transform-unicode-escapes@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.22.5.tgz#ce0c248522b1cb22c7c992d88301a5ead70e806c" + integrity sha512-biEmVg1IYB/raUO5wT1tgfacCef15Fbzhkx493D3urBI++6hpJ+RFG4SrWMn0NEZLfvilqKf3QDrRVZHo08FYg== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-template-literals@^7.0.0": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" - integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== +"@babel/plugin-transform-unicode-property-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.22.5.tgz#098898f74d5c1e86660dc112057b2d11227f1c81" + integrity sha512-HCCIb+CbJIAE6sXn5CjFQXMwkCClcOfPCzTlilJ8cUatfzwHlWQkbtV0zD338u9dZskwvuOYTuuaMaA8J5EI5A== dependencies: - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-template-literals@^7.12.1", "@babel/plugin-transform-template-literals@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.13.0.tgz#a36049127977ad94438dee7443598d1cefdf409d" - integrity sha512-d67umW6nlfmr1iehCcBv69eSUSySk1EsIS8aTDX4Xo9qajAh6mYtcl4kJrBkGXuxZPEgVr7RVfAvNW6YQkd4Mw== +"@babel/plugin-transform-unicode-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.22.5.tgz#ce7e7bb3ef208c4ff67e02a22816656256d7a183" + integrity sha512-028laaOKptN5vHJf9/Arr/HiJekMd41hOEZYvNsrsXqJ7YPYuX2bQxh31fkZzGmq3YqHRJzYFFAVYvKfMPKqyg== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-typeof-symbol@^7.12.1", "@babel/plugin-transform-typeof-symbol@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.12.13.tgz#785dd67a1f2ea579d9c2be722de8c84cb85f5a7f" - integrity sha512-eKv/LmUJpMnu4npgfvs3LiHhJua5fo/CysENxa45YCQXZwKnGCQKAg87bvoqSW1fFT+HA32l03Qxsm8ouTY3ZQ== +"@babel/plugin-transform-unicode-sets-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.22.5.tgz#77788060e511b708ffc7d42fdfbc5b37c3004e91" + integrity sha512-lhMfi4FC15j13eKrh3DnYHjpGj6UKQHtNKTbtc1igvAhRy4+kLhV07OpLcsN0VgDEw/MjAvJO4BdMJsHwMhzCg== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-typescript@^7.12.1": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.14.3.tgz#44f67f725a60cccee33d9d6fee5e4f338258f34f" - integrity sha512-G5Bb5pY6tJRTC4ag1visSgiDoGgJ1u1fMUgmc2ijLkcIdzP83Q1qyZX4ggFQ/SkR+PNOatkaYC+nKcTlpsX4ag== +"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.22.5.tgz#3da66078b181f3d62512c51cf7014392c511504e" + integrity sha512-fj06hw89dpiZzGZtxn+QybifF07nNiZjZ7sazs2aVDcysAZVGjW7+7iFYxg6GLNM47R/thYfLdrXc+2f11Vi9A== dependencies: - "@babel/helper-create-class-features-plugin" "^7.14.3" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-typescript" "^7.12.13" - -"@babel/plugin-transform-unicode-escapes@^7.12.1", "@babel/plugin-transform-unicode-escapes@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.12.13.tgz#840ced3b816d3b5127dd1d12dcedc5dead1a5e74" - integrity sha512-0bHEkdwJ/sN/ikBHfSmOXPypN/beiGqjo+o4/5K+vxEFNPRPdImhviPakMKG4x96l85emoa0Z6cDflsdBusZbw== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-transform-unicode-regex@^7.12.1", "@babel/plugin-transform-unicode-regex@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.12.13.tgz#b52521685804e155b1202e83fc188d34bb70f5ac" - integrity sha512-mDRzSNY7/zopwisPZ5kM9XKCfhchqIYwAKRERtEnhYscZB79VRekuRSoYbN0+KVe3y8+q1h6A4svXtP7N+UoCA== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.12.13" - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/preset-env@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.12.1.tgz#9c7e5ca82a19efc865384bb4989148d2ee5d7ac2" - integrity sha512-H8kxXmtPaAGT7TyBvSSkoSTUK6RHh61So05SyEbpmr0MCZrsNYn7mGMzzeYoOUCdHzww61k8XBft2TaES+xPLg== - dependencies: - "@babel/compat-data" "^7.12.1" - "@babel/helper-compilation-targets" "^7.12.1" - "@babel/helper-module-imports" "^7.12.1" - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-validator-option" "^7.12.1" - "@babel/plugin-proposal-async-generator-functions" "^7.12.1" - "@babel/plugin-proposal-class-properties" "^7.12.1" - "@babel/plugin-proposal-dynamic-import" "^7.12.1" - "@babel/plugin-proposal-export-namespace-from" "^7.12.1" - "@babel/plugin-proposal-json-strings" "^7.12.1" - "@babel/plugin-proposal-logical-assignment-operators" "^7.12.1" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.12.1" - "@babel/plugin-proposal-numeric-separator" "^7.12.1" - "@babel/plugin-proposal-object-rest-spread" "^7.12.1" - "@babel/plugin-proposal-optional-catch-binding" "^7.12.1" - "@babel/plugin-proposal-optional-chaining" "^7.12.1" - "@babel/plugin-proposal-private-methods" "^7.12.1" - "@babel/plugin-proposal-unicode-property-regex" "^7.12.1" - "@babel/plugin-syntax-async-generators" "^7.8.0" - "@babel/plugin-syntax-class-properties" "^7.12.1" - "@babel/plugin-syntax-dynamic-import" "^7.8.0" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.0" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.0" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" - "@babel/plugin-syntax-optional-chaining" "^7.8.0" - "@babel/plugin-syntax-top-level-await" "^7.12.1" - "@babel/plugin-transform-arrow-functions" "^7.12.1" - "@babel/plugin-transform-async-to-generator" "^7.12.1" - "@babel/plugin-transform-block-scoped-functions" "^7.12.1" - "@babel/plugin-transform-block-scoping" "^7.12.1" - "@babel/plugin-transform-classes" "^7.12.1" - "@babel/plugin-transform-computed-properties" "^7.12.1" - "@babel/plugin-transform-destructuring" "^7.12.1" - "@babel/plugin-transform-dotall-regex" "^7.12.1" - "@babel/plugin-transform-duplicate-keys" "^7.12.1" - "@babel/plugin-transform-exponentiation-operator" "^7.12.1" - "@babel/plugin-transform-for-of" "^7.12.1" - "@babel/plugin-transform-function-name" "^7.12.1" - "@babel/plugin-transform-literals" "^7.12.1" - "@babel/plugin-transform-member-expression-literals" "^7.12.1" - "@babel/plugin-transform-modules-amd" "^7.12.1" - "@babel/plugin-transform-modules-commonjs" "^7.12.1" - "@babel/plugin-transform-modules-systemjs" "^7.12.1" - "@babel/plugin-transform-modules-umd" "^7.12.1" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.12.1" - "@babel/plugin-transform-new-target" "^7.12.1" - "@babel/plugin-transform-object-super" "^7.12.1" - "@babel/plugin-transform-parameters" "^7.12.1" - "@babel/plugin-transform-property-literals" "^7.12.1" - "@babel/plugin-transform-regenerator" "^7.12.1" - "@babel/plugin-transform-reserved-words" "^7.12.1" - "@babel/plugin-transform-shorthand-properties" "^7.12.1" - "@babel/plugin-transform-spread" "^7.12.1" - "@babel/plugin-transform-sticky-regex" "^7.12.1" - "@babel/plugin-transform-template-literals" "^7.12.1" - "@babel/plugin-transform-typeof-symbol" "^7.12.1" - "@babel/plugin-transform-unicode-escapes" "^7.12.1" - "@babel/plugin-transform-unicode-regex" "^7.12.1" - "@babel/preset-modules" "^0.1.3" - "@babel/types" "^7.12.1" - core-js-compat "^3.6.2" - semver "^5.5.0" - -"@babel/preset-env@^7.12.1", "@babel/preset-env@^7.8.4": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.14.2.tgz#e80612965da73579c84ad2f963c2359c71524ed5" - integrity sha512-7dD7lVT8GMrE73v4lvDEb85cgcQhdES91BSD7jS/xjC6QY8PnRhux35ac+GCpbiRhp8crexBvZZqnaL6VrY8TQ== - dependencies: - "@babel/compat-data" "^7.14.0" - "@babel/helper-compilation-targets" "^7.13.16" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-validator-option" "^7.12.17" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.13.12" - "@babel/plugin-proposal-async-generator-functions" "^7.14.2" - "@babel/plugin-proposal-class-properties" "^7.13.0" - "@babel/plugin-proposal-class-static-block" "^7.13.11" - "@babel/plugin-proposal-dynamic-import" "^7.14.2" - "@babel/plugin-proposal-export-namespace-from" "^7.14.2" - "@babel/plugin-proposal-json-strings" "^7.14.2" - "@babel/plugin-proposal-logical-assignment-operators" "^7.14.2" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.14.2" - "@babel/plugin-proposal-numeric-separator" "^7.14.2" - "@babel/plugin-proposal-object-rest-spread" "^7.14.2" - "@babel/plugin-proposal-optional-catch-binding" "^7.14.2" - "@babel/plugin-proposal-optional-chaining" "^7.14.2" - "@babel/plugin-proposal-private-methods" "^7.13.0" - "@babel/plugin-proposal-private-property-in-object" "^7.14.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.12.13" + "@babel/compat-data" "^7.22.5" + "@babel/helper-compilation-targets" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-option" "^7.22.5" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.22.5" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.22.5" + "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2" "@babel/plugin-syntax-async-generators" "^7.8.4" "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" "@babel/plugin-syntax-dynamic-import" "^7.8.3" "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.22.5" + "@babel/plugin-syntax-import-attributes" "^7.22.5" + "@babel/plugin-syntax-import-meta" "^7.10.4" "@babel/plugin-syntax-json-strings" "^7.8.3" "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" @@ -1709,52 +1202,69 @@ "@babel/plugin-syntax-object-rest-spread" "^7.8.3" "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.0" - "@babel/plugin-syntax-top-level-await" "^7.12.13" - "@babel/plugin-transform-arrow-functions" "^7.13.0" - "@babel/plugin-transform-async-to-generator" "^7.13.0" - "@babel/plugin-transform-block-scoped-functions" "^7.12.13" - "@babel/plugin-transform-block-scoping" "^7.14.2" - "@babel/plugin-transform-classes" "^7.14.2" - "@babel/plugin-transform-computed-properties" "^7.13.0" - "@babel/plugin-transform-destructuring" "^7.13.17" - "@babel/plugin-transform-dotall-regex" "^7.12.13" - "@babel/plugin-transform-duplicate-keys" "^7.12.13" - "@babel/plugin-transform-exponentiation-operator" "^7.12.13" - "@babel/plugin-transform-for-of" "^7.13.0" - "@babel/plugin-transform-function-name" "^7.12.13" - "@babel/plugin-transform-literals" "^7.12.13" - "@babel/plugin-transform-member-expression-literals" "^7.12.13" - "@babel/plugin-transform-modules-amd" "^7.14.2" - "@babel/plugin-transform-modules-commonjs" "^7.14.0" - "@babel/plugin-transform-modules-systemjs" "^7.13.8" - "@babel/plugin-transform-modules-umd" "^7.14.0" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.12.13" - "@babel/plugin-transform-new-target" "^7.12.13" - "@babel/plugin-transform-object-super" "^7.12.13" - "@babel/plugin-transform-parameters" "^7.14.2" - "@babel/plugin-transform-property-literals" "^7.12.13" - "@babel/plugin-transform-regenerator" "^7.13.15" - "@babel/plugin-transform-reserved-words" "^7.12.13" - "@babel/plugin-transform-shorthand-properties" "^7.12.13" - "@babel/plugin-transform-spread" "^7.13.0" - "@babel/plugin-transform-sticky-regex" "^7.12.13" - "@babel/plugin-transform-template-literals" "^7.13.0" - "@babel/plugin-transform-typeof-symbol" "^7.12.13" - "@babel/plugin-transform-unicode-escapes" "^7.12.13" - "@babel/plugin-transform-unicode-regex" "^7.12.13" - "@babel/preset-modules" "^0.1.4" - "@babel/types" "^7.14.2" - babel-plugin-polyfill-corejs2 "^0.2.0" - babel-plugin-polyfill-corejs3 "^0.2.0" - babel-plugin-polyfill-regenerator "^0.2.0" - core-js-compat "^3.9.0" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" + "@babel/plugin-transform-arrow-functions" "^7.22.5" + "@babel/plugin-transform-async-generator-functions" "^7.22.5" + "@babel/plugin-transform-async-to-generator" "^7.22.5" + "@babel/plugin-transform-block-scoped-functions" "^7.22.5" + "@babel/plugin-transform-block-scoping" "^7.22.5" + "@babel/plugin-transform-class-properties" "^7.22.5" + "@babel/plugin-transform-class-static-block" "^7.22.5" + "@babel/plugin-transform-classes" "^7.22.5" + "@babel/plugin-transform-computed-properties" "^7.22.5" + "@babel/plugin-transform-destructuring" "^7.22.5" + "@babel/plugin-transform-dotall-regex" "^7.22.5" + "@babel/plugin-transform-duplicate-keys" "^7.22.5" + "@babel/plugin-transform-dynamic-import" "^7.22.5" + "@babel/plugin-transform-exponentiation-operator" "^7.22.5" + "@babel/plugin-transform-export-namespace-from" "^7.22.5" + "@babel/plugin-transform-for-of" "^7.22.5" + "@babel/plugin-transform-function-name" "^7.22.5" + "@babel/plugin-transform-json-strings" "^7.22.5" + "@babel/plugin-transform-literals" "^7.22.5" + "@babel/plugin-transform-logical-assignment-operators" "^7.22.5" + "@babel/plugin-transform-member-expression-literals" "^7.22.5" + "@babel/plugin-transform-modules-amd" "^7.22.5" + "@babel/plugin-transform-modules-commonjs" "^7.22.5" + "@babel/plugin-transform-modules-systemjs" "^7.22.5" + "@babel/plugin-transform-modules-umd" "^7.22.5" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.22.5" + "@babel/plugin-transform-new-target" "^7.22.5" + "@babel/plugin-transform-nullish-coalescing-operator" "^7.22.5" + "@babel/plugin-transform-numeric-separator" "^7.22.5" + "@babel/plugin-transform-object-rest-spread" "^7.22.5" + "@babel/plugin-transform-object-super" "^7.22.5" + "@babel/plugin-transform-optional-catch-binding" "^7.22.5" + "@babel/plugin-transform-optional-chaining" "^7.22.5" + "@babel/plugin-transform-parameters" "^7.22.5" + "@babel/plugin-transform-private-methods" "^7.22.5" + "@babel/plugin-transform-private-property-in-object" "^7.22.5" + "@babel/plugin-transform-property-literals" "^7.22.5" + "@babel/plugin-transform-regenerator" "^7.22.5" + "@babel/plugin-transform-reserved-words" "^7.22.5" + "@babel/plugin-transform-shorthand-properties" "^7.22.5" + "@babel/plugin-transform-spread" "^7.22.5" + "@babel/plugin-transform-sticky-regex" "^7.22.5" + "@babel/plugin-transform-template-literals" "^7.22.5" + "@babel/plugin-transform-typeof-symbol" "^7.22.5" + "@babel/plugin-transform-unicode-escapes" "^7.22.5" + "@babel/plugin-transform-unicode-property-regex" "^7.22.5" + "@babel/plugin-transform-unicode-regex" "^7.22.5" + "@babel/plugin-transform-unicode-sets-regex" "^7.22.5" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.22.5" + babel-plugin-polyfill-corejs2 "^0.4.3" + babel-plugin-polyfill-corejs3 "^0.8.1" + babel-plugin-polyfill-regenerator "^0.5.0" + core-js-compat "^3.30.2" semver "^6.3.0" -"@babel/preset-modules@^0.1.3", "@babel/preset-modules@^0.1.4": - version "0.1.4" - resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.4.tgz#362f2b68c662842970fdb5e254ffc8fc1c2e415e" - integrity sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg== +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" @@ -1762,129 +1272,64 @@ "@babel/types" "^7.4.4" esutils "^2.0.2" -"@babel/preset-react@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.12.1.tgz#7f022b13f55b6dd82f00f16d1c599ae62985358c" - integrity sha512-euCExymHCi0qB9u5fKw7rvlw7AZSjw/NaB9h7EkdTt5+yHRrXdiRTh7fkG3uBPpJg82CqLfp1LHLqWGSCrab+g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-transform-react-display-name" "^7.12.1" - "@babel/plugin-transform-react-jsx" "^7.12.1" - "@babel/plugin-transform-react-jsx-development" "^7.12.1" - "@babel/plugin-transform-react-jsx-self" "^7.12.1" - "@babel/plugin-transform-react-jsx-source" "^7.12.1" - "@babel/plugin-transform-react-pure-annotations" "^7.12.1" - -"@babel/preset-react@^7.12.5": - version "7.13.13" - resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.13.13.tgz#fa6895a96c50763fe693f9148568458d5a839761" - integrity sha512-gx+tDLIE06sRjKJkVtpZ/t3mzCDOnPG+ggHZG9lffUbX8+wC739x20YQc9V35Do6ZAxaUc/HhVHIiOzz5MvDmA== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-validator-option" "^7.12.17" - "@babel/plugin-transform-react-display-name" "^7.12.13" - "@babel/plugin-transform-react-jsx" "^7.13.12" - "@babel/plugin-transform-react-jsx-development" "^7.12.17" - "@babel/plugin-transform-react-pure-annotations" "^7.12.1" - -"@babel/preset-typescript@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.12.1.tgz#86480b483bb97f75036e8864fe404cc782cc311b" - integrity sha512-hNK/DhmoJPsksdHuI/RVrcEws7GN5eamhi28JkO52MqIxU8Z0QpmiSOQxZHWOHV7I3P4UjHV97ay4TcamMA6Kw== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-transform-typescript" "^7.12.1" +"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.22.5.tgz#c4d6058fbf80bccad02dd8c313a9aaa67e3c3dd6" + integrity sha512-M+Is3WikOpEJHgR385HbuCITPTaPRaNkibTEa9oiofmJvIsrceb4yp9RL9Kb+TE8LznmeyZqpP+Lopwcx59xPQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-option" "^7.22.5" + "@babel/plugin-transform-react-display-name" "^7.22.5" + "@babel/plugin-transform-react-jsx" "^7.22.5" + "@babel/plugin-transform-react-jsx-development" "^7.22.5" + "@babel/plugin-transform-react-pure-annotations" "^7.22.5" + +"@babel/preset-typescript@^7.16.0": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.22.5.tgz#16367d8b01d640e9a507577ed4ee54e0101e51c8" + integrity sha512-YbPaal9LxztSGhmndR46FmAbkJ/1fAsw293tSU+I5E5h+cnJ3d4GTwyUgGYmOXJYdGA+uNePle4qbaRzj2NISQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-option" "^7.22.5" + "@babel/plugin-syntax-jsx" "^7.22.5" + "@babel/plugin-transform-modules-commonjs" "^7.22.5" + "@babel/plugin-transform-typescript" "^7.22.5" + +"@babel/regjsgen@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" + integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== "@babel/runtime-corejs3@^7.10.2": version "7.14.0" resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.14.0.tgz#6bf5fbc0b961f8e3202888cb2cd0fb7a0a9a3f66" - integrity sha512-0R0HTZWHLk6G8jIk0FtoX+AatCtKnswS98VhXwGImFc759PJRp4Tru0PQYZofyijTFUr+gT8Mu7sgXVJLQ0ceg== - dependencies: - core-js-pure "^3.0.0" - regenerator-runtime "^0.13.4" - -"@babel/runtime@7.12.1": - version "7.12.1" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.12.1.tgz#b4116a6b6711d010b2dad3b7b6e43bf1b9954740" - integrity sha512-J5AIf3vPj3UwXaAzb5j1xM4WAQDX3EMgemF8rjCP3SoW09LfRKAXQKt6CoVYl230P6iWdRcBbnLDDdnqWxZSCA== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/runtime@7.13.10": - version "7.13.10" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.13.10.tgz#47d42a57b6095f4468da440388fdbad8bebf0d7d" - integrity sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.5", "@babel/runtime@^7.14.0", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.14.0.tgz#46794bc20b612c5f75e62dd071e24dfd95f1cbe6" - integrity sha512-JELkvo/DlpNdJ7dlyw/eY7E0suy5i5GQH+Vlxaq1nsNJ+H7f4Vtv3jMeCEgRhZZQFXTjldYfQgv2qmM6M1v5wA== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/runtime@^7.12.13": - version "7.20.1" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.20.1.tgz#1148bb33ab252b165a06698fde7576092a78b4a9" - integrity sha512-mrzLkl6U9YLF8qpqI7TB82PESyEGjm/0Ly91jG575eVxMMlb8fYfOXFZIJ8XfLrJZQbm7dlKry2bJmXBUEkdFg== - dependencies: - regenerator-runtime "^0.13.10" - -"@babel/runtime@^7.13.10", "@babel/runtime@^7.20.1", "@babel/runtime@^7.8.7": - version "7.20.13" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.20.13.tgz#7055ab8a7cff2b8f6058bf6ae45ff84ad2aded4b" - integrity sha512-gt3PKXs0DBoL9xCvOIIZ2NEqAGZqHjAnmVbfQtB620V0uReIQutpel14KcneZuer7UioY8ALKZ7iocavvzTNFA== - dependencies: - regenerator-runtime "^0.13.11" - -"@babel/runtime@^7.16.7", "@babel/runtime@^7.18.0", "@babel/runtime@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.20.7.tgz#fcb41a5a70550e04a7b708037c7c32f7f356d8fd" - integrity sha512-UF0tvkUtxwAgZ5W/KrkHf0Rn0fdnLDU9ScxBrEVNUprE/MzirjK4MJUX1/BVDv00Sv8cljtukVK1aky++X1SjQ== - dependencies: - regenerator-runtime "^0.13.11" - -"@babel/runtime@^7.18.3", "@babel/runtime@^7.20.0": - version "7.20.0" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.20.0.tgz#824a9ef325ffde6f78056059db3168c08785e24a" - integrity sha512-NDYdls71fTXoU8TZHfbBWg7DiZfNzClcKui/+kyi6ppD2L1qnWW3VV6CjtaBXSUGGhiTWJ6ereOIkUvenif66Q== - dependencies: - regenerator-runtime "^0.13.10" - -"@babel/runtime@^7.3.1": - version "7.15.3" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.15.3.tgz#2e1c2880ca118e5b2f9988322bd8a7656a32502b" - integrity sha512-OvwMLqNXkCXSz1kSm58sEsNuhqOx/fKpnUnKnFB5v8uDda5bLNEHNgKPvhDN6IU0LDcnHQ90LlJ0Q6jnyBSIBA== + integrity sha512-0R0HTZWHLk6G8jIk0FtoX+AatCtKnswS98VhXwGImFc759PJRp4Tru0PQYZofyijTFUr+gT8Mu7sgXVJLQ0ceg== dependencies: + core-js-pure "^3.0.0" regenerator-runtime "^0.13.4" -"@babel/template@^7.10.4", "@babel/template@^7.12.13", "@babel/template@^7.3.3": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.12.13.tgz#530265be8a2589dbb37523844c5bcb55947fb327" - integrity sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA== +"@babel/runtime@7.13.10": + version "7.13.10" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.13.10.tgz#47d42a57b6095f4468da440388fdbad8bebf0d7d" + integrity sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw== dependencies: - "@babel/code-frame" "^7.12.13" - "@babel/parser" "^7.12.13" - "@babel/types" "^7.12.13" + regenerator-runtime "^0.13.4" -"@babel/template@^7.14.5", "@babel/template@^7.4.4": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/template/-/template-7.14.5.tgz#a9bc9d8b33354ff6e55a9c60d1109200a68974f4" - integrity sha512-6Z3Po85sfxRGachLULUhOmvAaOo7xCvqGQtxINai2mEGPFm6pQ4z5QInFnUrRpfoSV60BnjyF5F3c+15fxFV1g== +"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.14.0", "@babel/runtime@^7.16.3", "@babel/runtime@^7.16.7", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.1", "@babel/runtime@^7.20.7", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.5.tgz#8564dd588182ce0047d55d7a75e93921107b57ec" + integrity sha512-ecjvYlnAaZ/KVneE/OdKYBYfgXV3Ptu6zQWmgEF7vwKhQnvVS6bjMD2XYgj+SNvQ1GfK/pjgokfPkC/2CO8CuA== dependencies: - "@babel/code-frame" "^7.14.5" - "@babel/parser" "^7.14.5" - "@babel/types" "^7.14.5" + regenerator-runtime "^0.13.11" -"@babel/template@^7.18.10", "@babel/template@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.20.7.tgz#a15090c2839a83b02aa996c0b4994005841fd5a8" - integrity sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw== +"@babel/template@^7.22.5", "@babel/template@^7.3.3", "@babel/template@^7.4.4": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" + integrity sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw== dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" + "@babel/code-frame" "^7.22.5" + "@babel/parser" "^7.22.5" + "@babel/types" "^7.22.5" "@babel/traverse@7.12.13": version "7.12.13" @@ -1901,48 +1346,19 @@ globals "^11.1.0" lodash "^4.17.19" -"@babel/traverse@^7.1.0", "@babel/traverse@^7.12.1", "@babel/traverse@^7.13.0", "@babel/traverse@^7.13.15", "@babel/traverse@^7.14.0", "@babel/traverse@^7.14.2", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.0": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.2.tgz#9201a8d912723a831c2679c7ebbf2fe1416d765b" - integrity sha512-TsdRgvBFHMyHOOzcP9S6QU0QQtjxlRpEYOy3mcCO5RgmC305ki42aSAmfZEMSSYBla2oZ9BMqYlncBaKmD/7iA== - dependencies: - "@babel/code-frame" "^7.12.13" - "@babel/generator" "^7.14.2" - "@babel/helper-function-name" "^7.14.2" - "@babel/helper-split-export-declaration" "^7.12.13" - "@babel/parser" "^7.14.2" - "@babel/types" "^7.14.2" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/traverse@^7.1.6": - version "7.14.7" - resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.14.7.tgz#64007c9774cfdc3abd23b0780bc18a3ce3631753" - integrity sha512-9vDr5NzHu27wgwejuKL7kIOm4bwEtaPQ4Z6cpCmjSuaRqpH/7xc4qcGEscwMqlkwgcXl6MvqoAjZkQ24uSdIZQ== - dependencies: - "@babel/code-frame" "^7.14.5" - "@babel/generator" "^7.14.5" - "@babel/helper-function-name" "^7.14.5" - "@babel/helper-hoist-variables" "^7.14.5" - "@babel/helper-split-export-declaration" "^7.14.5" - "@babel/parser" "^7.14.7" - "@babel/types" "^7.14.5" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/traverse@^7.20.10", "@babel/traverse@^7.20.12", "@babel/traverse@^7.20.7": - version "7.20.12" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.20.12.tgz#7f0f787b3a67ca4475adef1f56cb94f6abd4a4b5" - integrity sha512-MsIbFN0u+raeja38qboyF8TIT7K0BFzz/Yd/77ta4MsUsmP2RAnidIlwq7d5HFQrH/OZJecGV6B71C4zAgpoSQ== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.20.7" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" +"@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.22.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.5.tgz#44bd276690db6f4940fdb84e1cb4abd2f729ccd1" + integrity sha512-7DuIjPgERaNo6r+PZwItpjCZEa5vyw4eJGufeLxrPdBXBoLcCJCIasvK6pK/9DVNrLZTLFhUGqaC6X/PA007TQ== + dependencies: + "@babel/code-frame" "^7.22.5" + "@babel/generator" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-function-name" "^7.22.5" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.5" + "@babel/parser" "^7.22.5" + "@babel/types" "^7.22.5" debug "^4.1.0" globals "^11.1.0" @@ -1955,29 +1371,13 @@ lodash "^4.17.19" to-fast-properties "^2.0.0" -"@babel/types@^7.0.0", "@babel/types@^7.0.0-beta.49", "@babel/types@^7.12.1", "@babel/types@^7.12.13", "@babel/types@^7.12.6", "@babel/types@^7.13.0", "@babel/types@^7.13.12", "@babel/types@^7.13.16", "@babel/types@^7.14.0", "@babel/types@^7.14.2", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4", "@babel/types@^7.7.0": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.14.2.tgz#4208ae003107ef8a057ea8333e56eb64d2f6a2c3" - integrity sha512-SdjAG/3DikRHpUOjxZgnkbR11xUlyDMUFJdvnIgZEE16mqmY0BINMmc4//JMJglEmn6i7sq6p+mGrFWyZ98EEw== - dependencies: - "@babel/helper-validator-identifier" "^7.14.0" - to-fast-properties "^2.0.0" - -"@babel/types@^7.1.6", "@babel/types@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/types/-/types-7.14.5.tgz#3bb997ba829a2104cedb20689c4a5b8121d383ff" - integrity sha512-M/NzBpEL95I5Hh4dwhin5JlE7EzO5PHMAuzjxss3tiOBD46KfQvVedN/3jEPZvdRvtsK2222XfdHogNIttFgcg== - dependencies: - "@babel/helper-validator-identifier" "^7.14.5" - to-fast-properties "^2.0.0" - -"@babel/types@^7.18.6", "@babel/types@^7.19.0", "@babel/types@^7.20.0", "@babel/types@^7.20.2", "@babel/types@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.20.7.tgz#54ec75e252318423fc07fb644dc6a58a64c09b7f" - integrity sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg== +"@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.13", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" + integrity sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA== dependencies: - "@babel/helper-string-parser" "^7.19.4" - "@babel/helper-validator-identifier" "^7.19.1" + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" "@bcoe/v8-coverage@^0.2.3": @@ -1985,84 +1385,142 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@cnakazawa/watch@^1.0.3": - version "1.0.4" - resolved "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.4.tgz#f864ae85004d0fcab6f50be9141c4da368d1656a" - integrity sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ== +"@craco/craco@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@craco/craco/-/craco-7.1.0.tgz#12bd394c7f0334e214302e4d35a1768f68042fbb" + integrity sha512-oRAcPIKYrfPXp9rSzlsDNeOaVtDiKhoyqSXUoqiK24jCkHr4T8m/a2f74yXIzCbIheoUWDOIfWZyRgFgT+cpqA== dependencies: - exec-sh "^0.3.2" - minimist "^1.2.0" + autoprefixer "^10.4.12" + cosmiconfig "^7.0.1" + cosmiconfig-typescript-loader "^1.0.0" + cross-spawn "^7.0.3" + lodash "^4.17.21" + semver "^7.3.7" + webpack-merge "^5.8.0" -"@craco/craco@^6.1.1": - version "6.1.2" - resolved "https://registry.yarnpkg.com/@craco/craco/-/craco-6.1.2.tgz#30e45288e4609ac6b8cf828085b34acebdc60e69" - integrity sha512-GlQZn+g+yNlaDvIL5m6mcCoBGyFDwO4kkNx3fNFf98wuldkdWyBFoQbtOFOIb4gvkTh4VntOOxtJEoZfKs7XXw== +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== dependencies: - cross-spawn "^7.0.0" - lodash "^4.17.15" - semver "^7.3.2" - webpack-merge "^4.2.2" + "@jridgewell/trace-mapping" "0.3.9" -"@csstools/convert-colors@^1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@csstools/convert-colors/-/convert-colors-1.4.0.tgz#ad495dc41b12e75d588c6db8b9834f08fa131eb7" - integrity sha512-5a6wqoJV/xEdbRNKVo6I4hO3VjyDq//8q2f9I6PBAvMesJHFauXDorcNCsr9RzvsZnaWi5NYCcfyqP1QeFHFbw== +"@csstools/normalize.css@*": + version "12.0.0" + resolved "https://registry.yarnpkg.com/@csstools/normalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" + integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== -"@csstools/normalize.css@^10.1.0": - version "10.1.0" - resolved "https://registry.yarnpkg.com/@csstools/normalize.css/-/normalize.css-10.1.0.tgz#f0950bba18819512d42f7197e56c518aa491cf18" - integrity sha512-ij4wRiunFfaJxjB0BdrYHIH8FxBJpOwNPhhAcunlmPdXudL1WQV1qoP9un6JsEBAgQH+7UXyyjh0g7jTxXK6tg== +"@csstools/postcss-cascade-layers@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz#8a997edf97d34071dd2e37ea6022447dd9e795ad" + integrity sha512-+KdYrpKC5TgomQr2DlZF4lDEpHcoxnj5IGddYYfBWJAKfj1JtuHUIqMa+E1pJJ+z3kvDViWMqyqPlG4Ja7amQA== + dependencies: + "@csstools/selector-specificity" "^2.0.2" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-color-function@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@csstools/postcss-color-function/-/postcss-color-function-1.1.1.tgz#2bd36ab34f82d0497cfacdc9b18d34b5e6f64b6b" + integrity sha512-Bc0f62WmHdtRDjf5f3e2STwRAl89N2CLb+9iAwzrv4L2hncrbDwnQD9PCq0gtAt7pOI2leIV08HIBUd4jxD8cw== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-font-format-keywords@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz#677b34e9e88ae997a67283311657973150e8b16a" + integrity sha512-ZgrlzuUAjXIOc2JueK0X5sZDjCtgimVp/O5CEqTcs5ShWBa6smhWYbS0x5cVc/+rycTDbjjzoP0KTDnUneZGOg== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-hwb-function@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@csstools/postcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz#ab54a9fce0ac102c754854769962f2422ae8aa8b" + integrity sha512-YHdEru4o3Rsbjmu6vHy4UKOXZD+Rn2zmkAmLRfPet6+Jz4Ojw8cbWxe1n42VaXQhD3CQUXXTooIy8OkVbUcL+w== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-ic-unit@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@csstools/postcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz#28237d812a124d1a16a5acc5c3832b040b303e58" + integrity sha512-Ot1rcwRAaRHNKC9tAqoqNZhjdYBzKk1POgWfhN4uCOE47ebGcLRqXjKkApVDpjifL6u2/55ekkpnFcp+s/OZUw== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-is-pseudo-class@^2.0.7": + version "2.0.7" + resolved "https://registry.yarnpkg.com/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz#846ae6c0d5a1eaa878fce352c544f9c295509cd1" + integrity sha512-7JPeVVZHd+jxYdULl87lvjgvWldYu+Bc62s9vD/ED6/QTGjy0jy0US/f6BG53sVMTBJ1lzKZFpYmofBN9eaRiA== + dependencies: + "@csstools/selector-specificity" "^2.0.0" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-nested-calc@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz#d7e9d1d0d3d15cf5ac891b16028af2a1044d0c26" + integrity sha512-JCsQsw1wjYwv1bJmgjKSoZNvf7R6+wuHDAbi5f/7MbFhl2d/+v+TvBTU4BJH3G1X1H87dHl0mh6TfYogbT/dJQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-normalize-display-values@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz#15da54a36e867b3ac5163ee12c1d7f82d4d612c3" + integrity sha512-jcOanIbv55OFKQ3sYeFD/T0Ti7AMXc9nM1hZWu8m/2722gOTxFg7xYu4RDLJLeZmPUVQlGzo4jhzvTUq3x4ZUw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-oklab-function@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@csstools/postcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz#88cee0fbc8d6df27079ebd2fa016ee261eecf844" + integrity sha512-nJpJgsdA3dA9y5pgyb/UfEzE7W5Ka7u0CX0/HIMVBNWzWemdcTH3XwANECU6anWv/ao4vVNLTMxhiPNZsTK6iA== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" + integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-stepped-value-functions@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz#f8772c3681cc2befed695e2b0b1d68e22f08c4f4" + integrity sha512-dz0LNoo3ijpTOQqEJLY8nyaapl6umbmDcgj4AD0lgVQ572b2eqA1iGZYTTWhrcrHztWDDRAX2DGYyw2VBjvCvQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-text-decoration-shorthand@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz#ea96cfbc87d921eca914d3ad29340d9bcc4c953f" + integrity sha512-c1XwKJ2eMIWrzQenN0XbcfzckOLLJiczqy+YvfGmzoVXd7pT9FfObiSEfzs84bpE/VqfpEuAZ9tCRbZkZxxbdw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-trigonometric-functions@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz#94d3e4774c36d35dcdc88ce091336cb770d32756" + integrity sha512-woKaLO///4bb+zZC2s80l+7cm07M7268MsyG3M0ActXXEFi6SuhvriQYcb58iiKGbjwwIU7n45iRLEHypB47Og== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-unset-value@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@csstools/postcss-unset-value/-/postcss-unset-value-1.0.2.tgz#c99bb70e2cdc7312948d1eb41df2412330b81f77" + integrity sha512-c8J4roPBILnelAsdLr4XOAR/GsTm0GJi4XpcfvoWk3U6KiTCqiFYc63KhRMQQX35jYMp4Ao8Ij9+IZRgMfJp1g== + +"@csstools/selector-specificity@^2.0.0", "@csstools/selector-specificity@^2.0.2": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@csstools/selector-specificity/-/selector-specificity-2.2.0.tgz#2cbcf822bf3764c9658c4d2e568bd0c0cb748016" + integrity sha512-+OJ9konv95ClSTOJCmMZqpd5+YGsB2S+x6w3E1oaM8UuR5j8nTNHYSz8c9BEPGDOCMQYIEEGlVPj/VY64iTbGw== "@ctrl/tinycolor@^3.3.1", "@ctrl/tinycolor@^3.4.0": version "3.4.0" resolved "https://registry.yarnpkg.com/@ctrl/tinycolor/-/tinycolor-3.4.0.tgz#c3c5ae543c897caa9c2a68630bed355be5f9990f" integrity sha512-JZButFdZ1+/xAfpguQHoabIXkcqRRKpMrWKBkpEZZyxfY9C1DpADFB8PEqGSTeFr135SaTRfKqGKx5xSCLI7ZQ== -"@data-ui/shared@^0.0.84": - version "0.0.84" - resolved "https://registry.yarnpkg.com/@data-ui/shared/-/shared-0.0.84.tgz#42bd025d677f9be2beada3e1a84a53d33ac0eb10" - integrity sha512-MsDLsFzBHFEREr/eF2/RX1o/cXioEg+VQTsM8gViW5ywGQ7Xo5+EqUOaBSrwqKAkvp3e8PaEZVkchPC54IBhrA== - dependencies: - "@data-ui/theme" "^0.0.84" - "@vx/event" "^0.0.165" - "@vx/group" "^0.0.165" - "@vx/shape" "^0.0.168" - "@vx/tooltip" "0.0.165" - d3-array "^1.2.1" - prop-types "^15.5.10" - -"@data-ui/theme@^0.0.84": - version "0.0.84" - resolved "https://registry.yarnpkg.com/@data-ui/theme/-/theme-0.0.84.tgz#b75c23d7f38c582adbb8d2159d0d703159f8e3b2" - integrity sha512-jIoHftC/5c/LVJYF4VSBjjVjrjc0yj4mLkGe8p0eVO7qUYKVvlWx7PrpM7ucyefvuAaKIwlr+Nh2xPGPdADjaA== - -"@data-ui/xy-chart@^0.0.84": - version "0.0.84" - resolved "https://registry.yarnpkg.com/@data-ui/xy-chart/-/xy-chart-0.0.84.tgz#28bc41592d81d7be1da65ddd50fe3b79ea65649a" - integrity sha512-4mRWEGfeQJ2kFXmQ81k1gDPx2zdkty6lt0+srui4zleSyhnBv1dmm9J03dq+qwr7+bpzjfq77nINV5HXWb31Bg== - dependencies: - "@data-ui/shared" "^0.0.84" - "@data-ui/theme" "^0.0.84" - "@vx/axis" "^0.0.175" - "@vx/curve" "^0.0.165" - "@vx/event" "^0.0.165" - "@vx/glyph" "^0.0.165" - "@vx/gradient" "^0.0.165" - "@vx/grid" "^0.0.180" - "@vx/group" "^0.0.165" - "@vx/pattern" "^0.0.165" - "@vx/point" "^0.0.165" - "@vx/responsive" "^0.0.192" - "@vx/scale" "^0.0.165" - "@vx/shape" "^0.0.165" - "@vx/stats" "^0.0.165" - "@vx/text" "^0.0.192" - "@vx/threshold" "0.0.170" - "@vx/tooltip" "^0.0.165" - "@vx/voronoi" "^0.0.165" - d3-array "^1.2.0" - prop-types "^15.5.10" - "@emotion/babel-plugin@^11.10.5": version "11.10.5" resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.10.5.tgz#65fa6e1790ddc9e23cc22658a4c5dea423c55c3c" @@ -2214,21 +1672,38 @@ ts-node "^9" tslib "^2" -"@eslint/eslintrc@^0.4.1": - version "0.4.1" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.1.tgz#442763b88cecbe3ee0ec7ca6d6dd6168550cbf14" - integrity sha512-5v7TDE9plVhvxQeWLXDTvFvJBdH6pEsdnl2g/dAptmuFEPedQ4Erq5rsDsX+mvAM610IhNaO2W5V1dOOnDKxkQ== +"@eslint-community/eslint-utils@^4.2.0": + version "4.4.0" + resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" + integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== + dependencies: + eslint-visitor-keys "^3.3.0" + +"@eslint-community/regexpp@^4.4.0": + version "4.5.1" + resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.5.1.tgz#cdd35dce4fa1a89a4fd42b1599eb35b3af408884" + integrity sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ== + +"@eslint/eslintrc@^2.0.3": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.0.3.tgz#4910db5505f4d503f27774bf356e3704818a0331" + integrity sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ== dependencies: ajv "^6.12.4" - debug "^4.1.1" - espree "^7.3.0" - globals "^12.1.0" - ignore "^4.0.6" + debug "^4.3.2" + espree "^9.5.2" + globals "^13.19.0" + ignore "^5.2.0" import-fresh "^3.2.1" - js-yaml "^3.13.1" - minimatch "^3.0.4" + js-yaml "^4.1.0" + minimatch "^3.1.2" strip-json-comments "^3.1.1" +"@eslint/js@8.43.0": + version "8.43.0" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.43.0.tgz#559ca3d9ddbd6bf907ad524320a0d14b85586af0" + integrity sha512-s2UHCoiXfxMvmfzqoN+vrQ84ahUSYde9qNO1MdxmoEhyHWsfmwOpFlwYV+ePJEVc7gFnATGUi376WowX1N7tFg== + "@graphql-codegen/add@^2.0.2": version "2.0.2" resolved "https://registry.yarnpkg.com/@graphql-codegen/add/-/add-2.0.2.tgz#4acbb95be9ebb859a3cebfe7132fdf49ffe06dd8" @@ -2317,18 +1792,7 @@ parse-filepath "^1.0.2" tslib "~2.3.0" -"@graphql-codegen/plugin-helpers@^1.18.2": - version "1.18.7" - resolved "https://registry.yarnpkg.com/@graphql-codegen/plugin-helpers/-/plugin-helpers-1.18.7.tgz#465af3e5b02de89e49ddc76ad2546b880fe240f2" - integrity sha512-8ICOrXlsvyL1dpVz8C9b7H31d4DJpDd75WfjMn6Xjqz81Ah8xDn1Bi+7YXRCCILCBmvI94k6fi8qpsIVhFBBjQ== - dependencies: - "@graphql-tools/utils" "^7.9.1" - common-tags "1.8.0" - import-from "3.0.0" - lodash "~4.17.0" - tslib "~2.2.0" - -"@graphql-codegen/plugin-helpers@^1.18.8": +"@graphql-codegen/plugin-helpers@^1.18.2", "@graphql-codegen/plugin-helpers@^1.18.8": version "1.18.8" resolved "https://registry.yarnpkg.com/@graphql-codegen/plugin-helpers/-/plugin-helpers-1.18.8.tgz#39aac745b9e22e28c781cc07cf74836896a3a905" integrity sha512-mb4I9j9lMGqvGggYuZ0CV+Hme08nar68xkpPbAVotg/ZBmlhZIok/HqW2BcMQi7Rj+Il5HQMeQ1wQ1M7sv/TlQ== @@ -2653,43 +2117,11 @@ resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== -"@hapi/address@2.x.x": - version "2.1.4" - resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5" - integrity sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ== - -"@hapi/bourne@1.x.x": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@hapi/bourne/-/bourne-1.3.2.tgz#0a7095adea067243ce3283e1b56b8a8f453b242a" - integrity sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA== - -"@hapi/hoek@8.x.x", "@hapi/hoek@^8.3.0": - version "8.5.1" - resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-8.5.1.tgz#fde96064ca446dec8c55a8c2f130957b070c6e06" - integrity sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow== - "@hapi/hoek@^9.0.0": version "9.2.0" resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.0.tgz#f3933a44e365864f4dad5db94158106d511e8131" integrity sha512-sqKVVVOe5ivCaXDWivIJYVSaEgdQK9ul7a4Kity5Iw7u9+wBAPbX1RMSnLLmp7O4Vzj0WOWwMAJsTL00xwaNug== -"@hapi/joi@^15.1.0": - version "15.1.1" - resolved "https://registry.yarnpkg.com/@hapi/joi/-/joi-15.1.1.tgz#c675b8a71296f02833f8d6d243b34c57b8ce19d7" - integrity sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ== - dependencies: - "@hapi/address" "2.x.x" - "@hapi/bourne" "1.x.x" - "@hapi/hoek" "8.x.x" - "@hapi/topo" "3.x.x" - -"@hapi/topo@3.x.x": - version "3.1.6" - resolved "https://registry.yarnpkg.com/@hapi/topo/-/topo-3.1.6.tgz#68d935fa3eae7fdd5ab0d7f953f3205d8b2bfc29" - integrity sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ== - dependencies: - "@hapi/hoek" "^8.3.0" - "@hapi/topo@^5.0.0": version "5.1.0" resolved "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz#dc448e332c6c6e37a4dc02fd84ba8d44b9afb012" @@ -2697,6 +2129,25 @@ dependencies: "@hapi/hoek" "^9.0.0" +"@humanwhocodes/config-array@^0.11.10": + version "0.11.10" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.10.tgz#5a3ffe32cc9306365fb3fd572596cd602d5e12d2" + integrity sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.5" + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + "@iarna/toml@^2.2.5": version "2.2.5" resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" @@ -2723,167 +2174,194 @@ resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== -"@jest/console@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-26.6.2.tgz#4e04bc464014358b03ab4937805ee36a0aeb98f2" - integrity sha512-IY1R2i2aLsLr7Id3S6p2BA82GNWryt4oSvEXLAKc+L2zdi89dSkE8xC1C+0kpATG4JhBJREnQOH7/zmccM2B0g== +"@jest/console@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" + integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== dependencies: - "@jest/types" "^26.6.2" + "@jest/types" "^27.5.1" "@types/node" "*" chalk "^4.0.0" - jest-message-util "^26.6.2" - jest-util "^26.6.2" + jest-message-util "^27.5.1" + jest-util "^27.5.1" slash "^3.0.0" -"@jest/core@^26.6.0", "@jest/core@^26.6.3": - version "26.6.3" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-26.6.3.tgz#7639fcb3833d748a4656ada54bde193051e45fad" - integrity sha512-xvV1kKbhfUqFVuZ8Cyo+JPpipAHHAV3kcDBftiduK8EICXmTFddryy3P7NfZt8Pv37rA9nEJBKCCkglCPt/Xjw== +"@jest/console@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" + integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== dependencies: - "@jest/console" "^26.6.2" - "@jest/reporters" "^26.6.2" - "@jest/test-result" "^26.6.2" - "@jest/transform" "^26.6.2" - "@jest/types" "^26.6.2" + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^28.1.3" + jest-util "^28.1.3" + slash "^3.0.0" + +"@jest/core@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" + integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/reporters" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" "@types/node" "*" ansi-escapes "^4.2.1" chalk "^4.0.0" + emittery "^0.8.1" exit "^0.1.2" - graceful-fs "^4.2.4" - jest-changed-files "^26.6.2" - jest-config "^26.6.3" - jest-haste-map "^26.6.2" - jest-message-util "^26.6.2" - jest-regex-util "^26.0.0" - jest-resolve "^26.6.2" - jest-resolve-dependencies "^26.6.3" - jest-runner "^26.6.3" - jest-runtime "^26.6.3" - jest-snapshot "^26.6.2" - jest-util "^26.6.2" - jest-validate "^26.6.2" - jest-watcher "^26.6.2" - micromatch "^4.0.2" - p-each-series "^2.1.0" + graceful-fs "^4.2.9" + jest-changed-files "^27.5.1" + jest-config "^27.5.1" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-resolve-dependencies "^27.5.1" + jest-runner "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + jest-watcher "^27.5.1" + micromatch "^4.0.4" rimraf "^3.0.0" slash "^3.0.0" strip-ansi "^6.0.0" -"@jest/environment@^26.6.0", "@jest/environment@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-26.6.2.tgz#ba364cc72e221e79cc8f0a99555bf5d7577cf92c" - integrity sha512-nFy+fHl28zUrRsCeMB61VDThV1pVTtlEokBRgqPrcT1JNq4yRNIyTHfyht6PqtUvY9IsuLGTrbG8kPXjSZIZwA== +"@jest/environment@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" + integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== dependencies: - "@jest/fake-timers" "^26.6.2" - "@jest/types" "^26.6.2" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" "@types/node" "*" - jest-mock "^26.6.2" + jest-mock "^27.5.1" -"@jest/fake-timers@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-26.6.2.tgz#459c329bcf70cee4af4d7e3f3e67848123535aad" - integrity sha512-14Uleatt7jdzefLPYM3KLcnUl1ZNikaKq34enpb5XG9i81JpppDb5muZvonvKyrl7ftEHkKS5L5/eB/kxJ+bvA== +"@jest/fake-timers@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" + integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== dependencies: - "@jest/types" "^26.6.2" - "@sinonjs/fake-timers" "^6.0.1" + "@jest/types" "^27.5.1" + "@sinonjs/fake-timers" "^8.0.1" "@types/node" "*" - jest-message-util "^26.6.2" - jest-mock "^26.6.2" - jest-util "^26.6.2" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-util "^27.5.1" -"@jest/globals@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-26.6.2.tgz#5b613b78a1aa2655ae908eba638cc96a20df720a" - integrity sha512-85Ltnm7HlB/KesBUuALwQ68YTU72w9H2xW9FjZ1eL1U3lhtefjjl5c2MiUbpXt/i6LaPRvoOFJ22yCBSfQ0JIA== +"@jest/globals@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" + integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== dependencies: - "@jest/environment" "^26.6.2" - "@jest/types" "^26.6.2" - expect "^26.6.2" + "@jest/environment" "^27.5.1" + "@jest/types" "^27.5.1" + expect "^27.5.1" -"@jest/reporters@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-26.6.2.tgz#1f518b99637a5f18307bd3ecf9275f6882a667f6" - integrity sha512-h2bW53APG4HvkOnVMo8q3QXa6pcaNt1HkwVsOPMBV6LD/q9oSpxNSYZQYkAnjdMjrJ86UuYeLo+aEZClV6opnw== +"@jest/reporters@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" + integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== dependencies: "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^26.6.2" - "@jest/test-result" "^26.6.2" - "@jest/transform" "^26.6.2" - "@jest/types" "^26.6.2" + "@jest/console" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" chalk "^4.0.0" collect-v8-coverage "^1.0.0" exit "^0.1.2" glob "^7.1.2" - graceful-fs "^4.2.4" + graceful-fs "^4.2.9" istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^4.0.3" + istanbul-lib-instrument "^5.1.0" istanbul-lib-report "^3.0.0" istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.0.2" - jest-haste-map "^26.6.2" - jest-resolve "^26.6.2" - jest-util "^26.6.2" - jest-worker "^26.6.2" + istanbul-reports "^3.1.3" + jest-haste-map "^27.5.1" + jest-resolve "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" slash "^3.0.0" source-map "^0.6.0" string-length "^4.0.1" terminal-link "^2.0.0" - v8-to-istanbul "^7.0.0" - optionalDependencies: - node-notifier "^8.0.0" + v8-to-istanbul "^8.1.0" -"@jest/source-map@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-26.6.2.tgz#29af5e1e2e324cafccc936f218309f54ab69d535" - integrity sha512-YwYcCwAnNmOVsZ8mr3GfnzdXDAl4LaenZP5z+G0c8bzC9/dugL8zRmxZzdoTl4IaS3CryS1uWnROLPFmb6lVvA== +"@jest/schemas@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/source-map@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" + integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== dependencies: callsites "^3.0.0" - graceful-fs "^4.2.4" + graceful-fs "^4.2.9" source-map "^0.6.0" -"@jest/test-result@^26.6.0", "@jest/test-result@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-26.6.2.tgz#55da58b62df134576cc95476efa5f7949e3f5f18" - integrity sha512-5O7H5c/7YlojphYNrK02LlDIV2GNPYisKwHm2QTKjNZeEzezCbwYs9swJySv2UfPMyZ0VdsmMv7jIlD/IKYQpQ== +"@jest/test-result@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" + integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== dependencies: - "@jest/console" "^26.6.2" - "@jest/types" "^26.6.2" + "@jest/console" "^27.5.1" + "@jest/types" "^27.5.1" "@types/istanbul-lib-coverage" "^2.0.0" collect-v8-coverage "^1.0.0" -"@jest/test-sequencer@^26.6.3": - version "26.6.3" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-26.6.3.tgz#98e8a45100863886d074205e8ffdc5a7eb582b17" - integrity sha512-YHlVIjP5nfEyjlrSr8t/YdNfU/1XEt7c5b4OxcXCjyRhjzLYu/rO69/WHPuYcbCWkz8kAeZVZp2N2+IOLLEPGw== +"@jest/test-result@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" + integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== dependencies: - "@jest/test-result" "^26.6.2" - graceful-fs "^4.2.4" - jest-haste-map "^26.6.2" - jest-runner "^26.6.3" - jest-runtime "^26.6.3" + "@jest/console" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" -"@jest/transform@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-26.6.2.tgz#5ac57c5fa1ad17b2aae83e73e45813894dcf2e4b" - integrity sha512-E9JjhUgNzvuQ+vVAL21vlyfy12gP0GhazGgJC4h6qUt1jSdUXGWJ1wfu/X7Sd8etSgxV4ovT1pb9v5D6QW4XgA== +"@jest/test-sequencer@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" + integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== + dependencies: + "@jest/test-result" "^27.5.1" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-runtime "^27.5.1" + +"@jest/transform@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" + integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== dependencies: "@babel/core" "^7.1.0" - "@jest/types" "^26.6.2" - babel-plugin-istanbul "^6.0.0" + "@jest/types" "^27.5.1" + babel-plugin-istanbul "^6.1.1" chalk "^4.0.0" convert-source-map "^1.4.0" fast-json-stable-stringify "^2.0.0" - graceful-fs "^4.2.4" - jest-haste-map "^26.6.2" - jest-regex-util "^26.0.0" - jest-util "^26.6.2" - micromatch "^4.0.2" - pirates "^4.0.1" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-regex-util "^27.5.1" + jest-util "^27.5.1" + micromatch "^4.0.4" + pirates "^4.0.4" slash "^3.0.0" source-map "^0.6.1" write-file-atomic "^3.0.0" -"@jest/types@^26.6.0", "@jest/types@^26.6.2": +"@jest/types@^26.6.2": version "26.6.2" resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e" integrity sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ== @@ -2894,18 +2372,33 @@ "@types/yargs" "^15.0.0" chalk "^4.0.0" -"@jridgewell/gen-mapping@^0.1.0": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" - integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== +"@jest/types@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" + integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== dependencies: - "@jridgewell/set-array" "^1.0.0" - "@jridgewell/sourcemap-codec" "^1.4.10" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^16.0.0" + chalk "^4.0.0" -"@jridgewell/gen-mapping@^0.3.2": - version "0.3.2" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" - integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== +"@jest/types@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" + integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== + dependencies: + "@jest/schemas" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== dependencies: "@jridgewell/set-array" "^1.0.1" "@jridgewell/sourcemap-codec" "^1.4.10" @@ -2916,24 +2409,50 @@ resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== -"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" + integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== + +"@jridgewell/set-array@^1.0.1": version "1.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== +"@jridgewell/source-map@^0.3.3": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.3.tgz#8108265659d4c33e72ffe14e33d6cc5eb59f2fda" + integrity sha512-b+fsZXeLYi9fEULmfBrhxn4IrPlINf8fiNarzTof004v3lFdntdwa9PF7vFJqm3mg7s+ScJMxXaE3Acp1irZcg== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== -"@jridgewell/trace-mapping@^0.3.9": - version "0.3.17" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz#793041277af9073b0951a7fe0f0d8c4c98c36985" - integrity sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g== +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.18" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" + integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== dependencies: "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@leichtgewicht/ip-codec@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== + "@linaria/core@3.0.0-beta.13": version "3.0.0-beta.13" resolved "https://registry.yarnpkg.com/@linaria/core/-/core-3.0.0-beta.13.tgz#049c5be5faa67e341e413a0f6b641d5d78d91056" @@ -3081,35 +2600,34 @@ prop-types "^15.8.1" react-is "^18.2.0" -"@nodelib/fs.scandir@2.1.4": - version "2.1.4" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz#d4b3549a5db5de2683e0c1071ab4f140904bbf69" - integrity sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA== +"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1": + version "5.1.1-v1" + resolved "https://registry.yarnpkg.com/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129" + integrity sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== + dependencies: + eslint-scope "5.1.1" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== dependencies: - "@nodelib/fs.stat" "2.0.4" + "@nodelib/fs.stat" "2.0.5" run-parallel "^1.1.9" -"@nodelib/fs.stat@2.0.4", "@nodelib/fs.stat@^2.0.2": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz#a3f2dd61bab43b8db8fa108a121cfffe4c676655" - integrity sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q== +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== -"@nodelib/fs.walk@^1.2.3": - version "1.2.6" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz#cce9396b30aa5afe9e3756608f5831adcb53d063" - integrity sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow== +"@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== dependencies: - "@nodelib/fs.scandir" "2.1.4" + "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" -"@npmcli/move-file@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-1.1.2.tgz#1a82c3e372f7cae9253eb66d72543d6b8685c674" - integrity sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg== - dependencies: - mkdirp "^1.0.4" - rimraf "^3.0.2" - "@ocavue/svgmoji-cjs@^0.1.1": version "0.1.1" resolved "https://registry.yarnpkg.com/@ocavue/svgmoji-cjs/-/svgmoji-cjs-0.1.1.tgz#7240e6860c907187e4cd95b39858507384e4025f" @@ -3117,16 +2635,19 @@ dependencies: svgmoji "^3.2.0" -"@pmmmwh/react-refresh-webpack-plugin@0.4.3": - version "0.4.3" - resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.4.3.tgz#1eec460596d200c0236bf195b078a5d1df89b766" - integrity sha512-br5Qwvh8D2OQqSXpd1g/xqXKnK0r+Jz6qVKBbWmpUcrbGOxUrf39V5oZ1876084CGn18uMdR5uvPqBv9UqtBjQ== +"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": + version "0.5.10" + resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.10.tgz#2eba163b8e7dbabb4ce3609ab5e32ab63dda3ef8" + integrity sha512-j0Ya0hCFZPd4x40qLzbhGsh9TMtdb+CJQiso+WxLOPNasohq9cc5SNUcwsZaRH6++Xh91Xkm/xHCkuIiIu0LUA== dependencies: - ansi-html "^0.0.7" + ansi-html-community "^0.0.8" + common-path-prefix "^3.0.0" + core-js-pure "^3.23.3" error-stack-parser "^2.0.6" - html-entities "^1.2.1" - native-url "^0.2.6" - schema-utils "^2.6.5" + find-up "^5.0.0" + html-entities "^2.1.0" + loader-utils "^2.0.4" + schema-utils "^3.0.0" source-map "^0.7.3" "@popperjs/core@^2.11.6", "@popperjs/core@^2.9.2": @@ -3199,6 +2720,45 @@ "@react-hook/event" "^1.2.1" "@react-hook/throttle" "^2.2.0" +"@react-spring/animated@~9.7.3": + version "9.7.3" + resolved "https://registry.yarnpkg.com/@react-spring/animated/-/animated-9.7.3.tgz#4211b1a6d48da0ff474a125e93c0f460ff816e0f" + integrity sha512-5CWeNJt9pNgyvuSzQH+uy2pvTg8Y4/OisoscZIR8/ZNLIOI+CatFBhGZpDGTF/OzdNFsAoGk3wiUYTwoJ0YIvw== + dependencies: + "@react-spring/shared" "~9.7.3" + "@react-spring/types" "~9.7.3" + +"@react-spring/core@~9.7.3": + version "9.7.3" + resolved "https://registry.yarnpkg.com/@react-spring/core/-/core-9.7.3.tgz#60056bcb397f2c4f371c6c9a5f882db77ae90095" + integrity sha512-IqFdPVf3ZOC1Cx7+M0cXf4odNLxDC+n7IN3MDcVCTIOSBfqEcBebSv+vlY5AhM0zw05PDbjKrNmBpzv/AqpjnQ== + dependencies: + "@react-spring/animated" "~9.7.3" + "@react-spring/shared" "~9.7.3" + "@react-spring/types" "~9.7.3" + +"@react-spring/shared@~9.7.3": + version "9.7.3" + resolved "https://registry.yarnpkg.com/@react-spring/shared/-/shared-9.7.3.tgz#4cf29797847c689912aec4e62e34c99a4d5d9e53" + integrity sha512-NEopD+9S5xYyQ0pGtioacLhL2luflh6HACSSDUZOwLHoxA5eku1UPuqcJqjwSD6luKjjLfiLOspxo43FUHKKSA== + dependencies: + "@react-spring/types" "~9.7.3" + +"@react-spring/types@~9.7.3": + version "9.7.3" + resolved "https://registry.yarnpkg.com/@react-spring/types/-/types-9.7.3.tgz#ea78fd447cbc2612c1f5d55852e3c331e8172a0b" + integrity sha512-Kpx/fQ/ZFX31OtlqVEFfgaD1ACzul4NksrvIgYfIFq9JpDHFwQkMVZ10tbo0FU/grje4rcL4EIrjekl3kYwgWw== + +"@react-spring/web@^9.7.3": + version "9.7.3" + resolved "https://registry.yarnpkg.com/@react-spring/web/-/web-9.7.3.tgz#d9f4e17fec259f1d65495a19502ada4f5b57fa3d" + integrity sha512-BXt6BpS9aJL/QdVqEIX9YoUy8CE6TJrU0mNCqSoxdXlIeNcEBWOfIyE6B14ENNsyQKS3wOWkiJfco0tCr/9tUg== + dependencies: + "@react-spring/animated" "~9.7.3" + "@react-spring/core" "~9.7.3" + "@react-spring/shared" "~9.7.3" + "@react-spring/types" "~9.7.3" + "@remirror/core-constants@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@remirror/core-constants/-/core-constants-2.0.0.tgz#a52f89059d93955e00810023cc76b4f7db9650bf" @@ -4076,18 +3636,27 @@ dependencies: type-fest "^2.0.0" -"@rollup/plugin-node-resolve@^7.1.1": - version "7.1.3" - resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-7.1.3.tgz#80de384edfbd7bfc9101164910f86078151a3eca" - integrity sha512-RxtSL3XmdTAE2byxekYLnx+98kEUOrPHF/KRVjLH+DEIHy6kjIw7YINQzn+NXiH/NTrQLAwYs0GWB+csWygA9Q== +"@rollup/plugin-babel@^5.2.0": + version "5.3.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" + integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== dependencies: - "@rollup/pluginutils" "^3.0.8" - "@types/resolve" "0.0.8" + "@babel/helper-module-imports" "^7.10.4" + "@rollup/pluginutils" "^3.1.0" + +"@rollup/plugin-node-resolve@^11.2.1": + version "11.2.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" + integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + "@types/resolve" "1.17.1" builtin-modules "^3.1.0" + deepmerge "^4.2.2" is-module "^1.0.0" - resolve "^1.14.2" + resolve "^1.19.0" -"@rollup/plugin-replace@^2.3.1": +"@rollup/plugin-replace@^2.4.1": version "2.4.2" resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== @@ -4095,7 +3664,7 @@ "@rollup/pluginutils" "^3.1.0" magic-string "^0.25.7" -"@rollup/pluginutils@^3.0.8", "@rollup/pluginutils@^3.1.0": +"@rollup/pluginutils@^3.1.0": version "3.1.0" resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== @@ -4109,6 +3678,11 @@ resolved "https://registry.yarnpkg.com/@rooks/use-mutation-observer/-/use-mutation-observer-4.11.2.tgz#a0466c4338e0a4487ea19253c86bcd427c29f4af" integrity sha512-vpsdrZdr6TkB1zZJcHx+fR1YC/pHs2BaqcuYiEGjBVbwY5xcC49+h0hAUtQKHth3oJqXfIX/Ng8S7s5HFHdM/A== +"@rushstack/eslint-patch@^1.1.0": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.3.2.tgz#31b9c510d8cada9683549e1dbb4284cca5001faf" + integrity sha512-V+MvGwaHH03hYhY+k6Ef/xKd6RYlc4q8WBx+2ANmipHJcKuktNcI/NgEsJgdSUF6Lw32njT6OnrRsKYCdgHjYw== + "@samverschueren/stream-to-observable@^0.3.0": version "0.3.1" resolved "https://registry.yarnpkg.com/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.1.tgz#a21117b19ee9be70c379ec1877537ef2e1c63301" @@ -4138,6 +3712,11 @@ resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz#cff8ffadc372ad29fd3f78277aeb29e632cc70df" integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== +"@sinclair/typebox@^0.24.1": + version "0.24.51" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" + integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== + "@sindresorhus/is@^0.14.0": version "0.14.0" resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" @@ -4150,13 +3729,6 @@ dependencies: type-detect "4.0.8" -"@sinonjs/fake-timers@^6.0.1": - version "6.0.1" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40" - integrity sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA== - dependencies: - "@sinonjs/commons" "^1.7.0" - "@sinonjs/fake-timers@^7.0.4", "@sinonjs/fake-timers@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-7.1.0.tgz#8f13af27d842cbf51ad4502e05562fe9391d084e" @@ -4164,6 +3736,13 @@ dependencies: "@sinonjs/commons" "^1.7.0" +"@sinonjs/fake-timers@^8.0.1": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" + integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== + dependencies: + "@sinonjs/commons" "^1.7.0" + "@sinonjs/samsam@^6.0.2": version "6.0.2" resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-6.0.2.tgz#a0117d823260f282c04bff5f8704bdc2ac6910bb" @@ -4178,13 +3757,15 @@ resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz#8da5c6530915653f3a1f38fd5f101d8c3f8079c5" integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ== -"@surma/rollup-plugin-off-main-thread@^1.1.1": - version "1.4.2" - resolved "https://registry.yarnpkg.com/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-1.4.2.tgz#e6786b6af5799f82f7ab3a82e53f6182d2b91a58" - integrity sha512-yBMPqmd1yEJo/280PAMkychuaALyQ9Lkb5q1ck3mjJrFuEobIfhnQ4J3mbvBoISmR3SWMWV+cGB/I0lCQee79A== +"@surma/rollup-plugin-off-main-thread@^2.2.3": + version "2.2.3" + resolved "https://registry.yarnpkg.com/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" + integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== dependencies: - ejs "^2.6.1" + ejs "^3.1.6" + json5 "^2.2.0" magic-string "^0.25.0" + string.prototype.matchall "^4.0.6" "@svgmoji/blob@^3.2.0": version "3.2.0" @@ -4319,7 +3900,7 @@ deepmerge "^4.2.2" svgo "^1.2.2" -"@svgr/webpack@5.5.0": +"@svgr/webpack@^5.5.0": version "5.5.0" resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== @@ -4381,23 +3962,48 @@ resolved "https://registry.yarnpkg.com/@tommoor/remove-markdown/-/remove-markdown-0.3.2.tgz#5288ddd0e26b6b173e76ebb31c94653b0dcff45d" integrity sha512-awcc9hfLZqyyZHOGzAHbnjgZJpQGS1W1oZZ5GXOTTnbKVdKQ4OWYbrRWPUvXI2YAKJazrcS8rxPh67PX3rpGkQ== -"@tootallnate/once@1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" - integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@trysound/sax@0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== + +"@tsconfig/node10@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== "@types/aria-query@^4.2.0": version "4.2.1" resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.1.tgz#78b5433344e2f92e8b306c06a5622c50c245bf6b" integrity sha512-S6oPal772qJZHoRZLFc/XoZW2gFvwXusYUmXPXkgxJLuEk2vOt7jc4Yo6z/vtI0EBkbPBVrJJ0B+prLIKiWqHg== -"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.7": - version "7.1.14" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.14.tgz#faaeefc4185ec71c389f4501ee5ec84b170cc402" - integrity sha512-zGZJzzBUVDo/eV6KgbE0f0ZI7dInEYvo12Rb70uNQDshC3SkRMb67ja0GgRHZgAX3Za6rhaWlvbDO8rrGyAb1g== +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.1.tgz#916ecea274b0c776fec721e333e55762d3a9614b" + integrity sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw== dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" + "@babel/parser" "^7.20.7" + "@babel/types" "^7.20.7" "@types/babel__generator" "*" "@types/babel__template" "*" "@types/babel__traverse" "*" @@ -4424,12 +4030,20 @@ dependencies: "@babel/types" "^7.3.0" -"@types/classnames@^2.2.9": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@types/classnames/-/classnames-2.3.1.tgz#3c2467aa0f1a93f1f021e3b9bcf938bd5dfdc0dd" - integrity sha512-zeOWb0JGBoVmlQoznvqXbE0tEC/HONsnoUNH19Hc96NFsTAwTXbTqb8FMYkru1F/iqp7a18Ws3nWJvtA1sHD1A== +"@types/body-parser@*": + version "1.19.2" + resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== dependencies: - classnames "*" + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" "@types/codemirror@^5.60.2": version "5.60.5" @@ -4438,34 +4052,59 @@ dependencies: "@types/tern" "*" -"@types/d3-color@^1": - version "1.4.1" - resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-1.4.1.tgz#0d9746c84dfef28807b2989eed4f37b2575e1f33" - integrity sha512-xkPLi+gbgUU9ED6QX4g6jqYL2KCB0/3AlM+ncMGqn49OgH0gFMY/ITGqPF8HwEiLzJaC+2L0I+gNwBgABv1Pvg== +"@types/connect-history-api-fallback@^1.3.5": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.0.tgz#9fd20b3974bdc2bcd4ac6567e2e0f6885cb2cf41" + integrity sha512-4x5FkPpLipqwthjPsF7ZRbOv3uoLUFkTA9G9v583qi4pACvq0uTELrB8OLUzPWUI4IJIyvM85vzkV1nyiI2Lig== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/d3-array@3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.0.3.tgz#87d990bf504d14ad6b16766979d04e943c046dac" + integrity sha512-Reoy+pKnvsksN0lQUlcH6dOGjRZ/3WRwXR//m+/8lt1BXeI4xyaUZoqULNjyXXRuh0Mj4LNpkCvhUpQlY3X5xQ== + +"@types/d3-color@*", "@types/d3-color@3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-3.1.0.tgz#6594da178ded6c7c3842f3cc0ac84b156f12f2d4" + integrity sha512-HKuicPHJuvPgCD+np6Se9MQvS6OCbJmOjGvylzMJRlDwUXjKTTXs6Pwgk79O09Vj/ho3u1ofXnhFOaEWWPrlwA== + +"@types/d3-format@3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-format/-/d3-format-3.0.1.tgz#194f1317a499edd7e58766f96735bdc0216bb89d" + integrity sha512-5KY70ifCCzorkLuIkDe0Z9YTf9RR2CjBX1iaJG+rgM/cPP+sO+q9YdQ9WdhQcgPj1EQiJ2/0+yUkkziTG6Lubg== "@types/d3-hierarchy@^1.1.6": version "1.1.7" resolved "https://registry.yarnpkg.com/@types/d3-hierarchy/-/d3-hierarchy-1.1.7.tgz#14a57b0539f8929015f8ad96490de50a16211040" integrity sha512-fvht6DOYKzqmXjMb/+xfgkmrWM4SD7rMA/ZbM+gGwr9ZTuIDfky95J8CARtaJo/ExeWyS0xGVdL2gqno2zrQ0Q== -"@types/d3-interpolate@^1.3.1": - version "1.4.2" - resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-1.4.2.tgz#88902a205f682773a517612299a44699285eed7b" - integrity sha512-ylycts6llFf8yAEs1tXzx2loxxzDZHseuhPokrqKprTQSTcD3JbJI1omZP1rphsELZO3Q+of3ff0ZS7+O6yVzg== +"@types/d3-interpolate@3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-3.0.1.tgz#e7d17fa4a5830ad56fe22ce3b4fac8541a9572dc" + integrity sha512-jx5leotSeac3jr0RePOH1KdR9rISG91QIE4Q2PYTu4OymLTZfA3SrnURSLzKH48HmXVUru50b8nje4E79oQSQw== dependencies: - "@types/d3-color" "^1" + "@types/d3-color" "*" "@types/d3-path@^1", "@types/d3-path@^1.0.8": version "1.0.9" resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-1.0.9.tgz#73526b150d14cd96e701597cbf346cfd1fd4a58c" integrity sha512-NaIeSIBiFgSC6IGUBjZWcscUJEq7vpVu7KthHN8eieTV9d9MqkSOZLH4chq1PmcKy06PNe3axLeKmRIyxJ+PZQ== -"@types/d3-scale@^2.1.1": - version "2.2.4" - resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-2.2.4.tgz#ca0d4b84d2f88fe058480f81354d14041a667b96" - integrity sha512-wkQXT+IfgfAnKB5rtS1qMJg3FS32r1rVFHvqtiqk8pX8o5aQR3VwX1P7ErHjzNIicTlkWsaMiUTrYB+E75HFeA== +"@types/d3-scale@4.0.2": + version "4.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.2.tgz#41be241126af4630524ead9cb1008ab2f0f26e69" + integrity sha512-Yk4htunhPAwN0XGlIwArRomOjdoBFXC3+kCxK2Ubg7I9shQlVSJy/pG/Ht5ASN+gdMIalpk8TJ5xV74jFsetLA== dependencies: - "@types/d3-time" "^1" + "@types/d3-time" "*" "@types/d3-shape@^1.3.1": version "1.3.5" @@ -4474,10 +4113,20 @@ dependencies: "@types/d3-path" "^1" -"@types/d3-time@^1", "@types/d3-time@^1.0.10": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-1.1.1.tgz#6cf3a4242c3bbac00440dfb8ba7884f16bedfcbf" - integrity sha512-ULX7LoqXTCYtM+tLYOaeAJK7IwCT+4Gxlm2MaH0ErKLi07R5lh8NHCAyWcDkCCmx1AfRcBEV6H9QE9R25uP7jw== +"@types/d3-time-format@2.1.0": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@types/d3-time-format/-/d3-time-format-2.1.0.tgz#011e0fb7937be34a9a8f580ae1e2f2f1336a8a22" + integrity sha512-/myT3I7EwlukNOX2xVdMzb8FRgNzRMpsZddwst9Ld/VFe6LyJyRp0s32l/V9XoUzk+Gqu56F/oGk6507+8BxrA== + +"@types/d3-time@*", "@types/d3-time@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.0.tgz#e1ac0f3e9e195135361fa1a1d62f795d87e6e819" + integrity sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg== + +"@types/d3-voronoi@^1.1.9": + version "1.1.9" + resolved "https://registry.yarnpkg.com/@types/d3-voronoi/-/d3-voronoi-1.1.9.tgz#7bbc210818a3a5c5e0bafb051420df206617c9e5" + integrity sha512-DExNQkaHd1F3dFPvGA/Aw2NGyjMln6E9QzsiqOcBgnE+VInYnFBHBBySbZQts6z6xD+5jTfKCP7M4OqMyVjdwQ== "@types/diff@^5.0.0": version "5.0.0" @@ -4496,31 +4145,51 @@ dependencies: "@types/trusted-types" "*" -"@types/eslint@^7.2.6": - version "7.2.11" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.2.11.tgz#180b58f5bb7d7376e39d22496e2b08901aa52fd2" - integrity sha512-WYhv//5K8kQtsSc9F1Kn2vHzhYor6KpwPbARH7hwYe3C3ETD0EVx/3P5qQybUoaBEuUa9f/02JjBiXFWalYUmw== +"@types/eslint-scope@^3.7.3": + version "3.7.4" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" + integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": + version "8.40.2" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.40.2.tgz#2833bc112d809677864a4b0e7d1de4f04d7dac2d" + integrity sha512-PRVjQ4Eh9z9pmmtaq8nTjZjQwKFk7YIHIud3lRoKRBgUQjgjRmoGxxGEPXQkF+lH7QkHJRNr5F4aBgYCW0lqpQ== dependencies: "@types/estree" "*" "@types/json-schema" "*" -"@types/estree@*": - version "0.0.47" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.47.tgz#d7a51db20f0650efec24cd04994f523d93172ed4" - integrity sha512-c5ciR06jK8u9BstrmJyO97m+klJrrhCf9u3rLu3DEAJBirxRqSCvDQoYKmxuYwQI5SZChAWu+tq9oVlGRuzPAg== +"@types/estree@*", "@types/estree@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" + integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== "@types/estree@0.0.39": version "0.0.39" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== -"@types/glob@^7.1.1": - version "7.1.3" - resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.3.tgz#e6ba80f36b7daad2c685acd9266382e68985c183" - integrity sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w== +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": + version "4.17.35" + resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.35.tgz#c95dd4424f0d32e525d23812aa8ab8e4d3906c4f" + integrity sha512-wALWQwrgiB2AWTT91CB62b6Yt0sNHpznUXeZEcnPU3DRdlDIz74x8Qg1UUYKSVFi+va5vKOLYRBI1bRKiLLKIg== dependencies: - "@types/minimatch" "*" "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + "@types/send" "*" + +"@types/express@*", "@types/express@^4.17.13": + version "4.17.17" + resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.17.tgz#01d5437f6ef9cfa8668e616e13c2f2ac9a491ae4" + integrity sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.33" + "@types/qs" "*" + "@types/serve-static" "*" "@types/graceful-fs@^4.1.2": version "4.1.5" @@ -4556,10 +4225,15 @@ "@types/react" "*" hoist-non-react-statics "^3.3.0" -"@types/html-minifier-terser@^5.0.0": - version "5.1.1" - resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-5.1.1.tgz#3c9ee980f1a10d6021ae6632ca3e79ca2ec4fb50" - integrity sha512-giAlZwstKbmvMk1OO7WXSj4OZ0keXAcl2TQq4LWHiiPH2ByaH7WeUzng+Qej8UPxxv+8lRTuouo0iaNDBuzIBA== +"@types/html-minifier-terser@^6.0.0": + version "6.1.0" + resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-errors@*": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" + integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== "@types/http-proxy-agent@^2.0.2": version "2.0.2" @@ -4568,10 +4242,10 @@ dependencies: "@types/node" "*" -"@types/http-proxy@^1.17.5": - version "1.17.7" - resolved "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.7.tgz#30ea85cc2c868368352a37f0d0d3581e24834c6f" - integrity sha512-9hdj6iXH64tHSLTY+Vt2eYOGzSogC+JQ2H7bdPWkuh7KXP5qLllWx++t+K9Wk556c3dkDdPws/SpMRi0sdCT1w== +"@types/http-proxy@^1.17.5", "@types/http-proxy@^1.17.8": + version "1.17.11" + resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.11.tgz#0ca21949a5588d55ac2b659b69035c84bd5da293" + integrity sha512-HC8G7c1WmaF2ekqpnFq626xd3Zz0uvaqFmBJNRZCGEZCXkvSdJoNFn/8Ygbd9fKNQj8UzLdCETaI0UWPAjK7IA== dependencies: "@types/node" "*" @@ -4612,10 +4286,10 @@ resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-4.0.1.tgz#5544730b65a480b18ace6b6ce914e519cec2d43b" integrity sha512-xdOvNmXmrZqqPy3kuCQ+fz6wA0xU5pji9cd1nDrflWaAWtYLLGk5ykW0H6yg5TVyehHP1pfmuuSaZkhP+kspVA== -"@types/json-schema@*", "@types/json-schema@^7.0.3", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.6": - version "7.0.7" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad" - integrity sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA== +"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.12" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" + integrity sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA== "@types/json-stable-stringify@^1.0.32": version "1.0.32" @@ -4634,10 +4308,10 @@ dependencies: "@types/node" "*" -"@types/lodash@^4.14.146", "@types/lodash@^4.14.160": - version "4.14.170" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.170.tgz#0d67711d4bf7f4ca5147e9091b847479b87925d6" - integrity sha512-bpcvu/MKHHeYX+qeEN8GE7DIravODWdACVA1ctevD8CN24RhPZIKMn9ntfAsrvLfSX3cR5RrBKAbYm9bGs0A+Q== +"@types/lodash@^4.14.172": + version "4.14.195" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.195.tgz#bafc975b252eb6cea78882ce8a7b6bf22a6de632" + integrity sha512-Hwx9EUgdwf2GLarOjQp5ZH8ZmblzcbTBC2wtQWNKARBSxM9ezRIAUpeDTgoQRAFB0+8CNWXVA9+MaSOzOF3nPg== "@types/marked@^4.0.2": version "4.0.7" @@ -4651,16 +4325,21 @@ dependencies: "@types/unist" "*" +"@types/mime@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/mime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" + integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== + +"@types/mime@^1": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" + integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== + "@types/min-document@^2.19.0": version "2.19.0" resolved "https://registry.yarnpkg.com/@types/min-document/-/min-document-2.19.0.tgz#4f9919e789917c00de967a2c38fa8d234cbcd7d6" integrity sha512-lsYeSW1zfNqHTL1RuaOgfAhoiOWV1RAQDKT0BZ26z4Faz8llVIj1r1ablUo5QY6yzHMketuvu4+N0sv0eZpXTg== -"@types/minimatch@*": - version "3.0.4" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.4.tgz#f0ec25dbf2f0e4b18647313ac031134ca5b24b21" - integrity sha512-1z8k4wzFnNjVK/tlxvrWuK5WMt6mydWWP7+zvH5eFep4oj+UkrfiJTRtjCeBXNpwaA/FYqqtb4/QS4ianFpIRA== - "@types/node@*": version "15.6.1" resolved "https://registry.yarnpkg.com/@types/node/-/node-15.6.1.tgz#32d43390d5c62c5b6ec486a9bc9c59544de39a08" @@ -4671,11 +4350,6 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.13.tgz#e743bae112bd779ac9650f907197dd2caa7f0364" integrity sha512-1x8W5OpxPq+T85OUsHRP6BqXeosKmeXRtjoF39STcdf/UWLqUsoehstZKOi0CunhVqHG17AyZgpj20eRVooK6A== -"@types/normalize-package-data@^2.4.0": - version "2.4.0" - resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" - integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== - "@types/object.omit@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/object.omit/-/object.omit-3.0.0.tgz#0d31e1208eac8fe2ad5c9499a1016a8273bbfafc" @@ -4696,22 +4370,17 @@ resolved "https://registry.yarnpkg.com/@types/parse5/-/parse5-5.0.3.tgz#e7b5aebbac150f8b5fdd4a46e7f0bd8e65e19109" integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw== -"@types/prettier@^2.0.0": - version "2.2.3" - resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.3.tgz#ef65165aea2924c9359205bf748865b8881753c0" - integrity sha512-PijRCG/K3s3w1We6ynUKdxEc5AcuuH3NBmMDP8uvKVp6X43UY7NQlTzczakXP3DJR0F4dfNQIGjU2cUeRYs2AA== +"@types/prettier@^2.1.5": + version "2.7.3" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.3.tgz#3e51a17e291d01d17d3fc61422015a933af7a08f" + integrity sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA== "@types/prismjs@*": version "1.26.0" resolved "https://registry.yarnpkg.com/@types/prismjs/-/prismjs-1.26.0.tgz#a1c3809b0ad61c62cac6d4e0c56d610c910b7654" integrity sha512-ZTaqn/qSqUuAq1YwvOFQfVW1AR/oQJlLSZVustdjwI+GZ8kr0MSHBj0tsXPW1EqHubx50gtBEjbPGsdZwQwCjQ== -"@types/prop-types@*": - version "15.7.3" - resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.3.tgz#2ab0d5da2e5815f94b0b9d4b95d1e5f243ab2ca7" - integrity sha512-KfRL3PuHmqQLOG+2tGpRO26Ctg+Cq1E01D2DMriKEATHgWLfeNDmq9e29Q9WIky0dQ3NPkd1mzYH8Lm936Z9qw== - -"@types/prop-types@^15.7.5": +"@types/prop-types@*", "@types/prop-types@^15.7.5": version "15.7.5" resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== @@ -4721,6 +4390,11 @@ resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.4.tgz#15925414e0ad2cd765bfef58842f7e26a7accb24" integrity sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug== +"@types/qs@*": + version "6.9.7" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + "@types/query-string@^6.3.0": version "6.3.0" resolved "https://registry.yarnpkg.com/@types/query-string/-/query-string-6.3.0.tgz#b6fa172a01405abcaedac681118e78429d62ea39" @@ -4733,6 +4407,11 @@ resolved "https://registry.yarnpkg.com/@types/querystringify/-/querystringify-2.0.0.tgz#d1eab3214ee2b57c3bd7eba0ab94b231028522fb" integrity sha512-9WgEGTevECrXJC2LSWPqiPYWq8BRmeaOyZn47js/3V6UF0PWtcVfvvR43YjeO8BzBsthTz98jMczujOwTw+WYg== +"@types/range-parser@*": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + "@types/react-color@^3.0.6": version "3.0.6" resolved "https://registry.yarnpkg.com/@types/react-color/-/react-color-3.0.6.tgz#602fed023802b2424e7cd6ff3594ccd3d5055f9a" @@ -4742,9 +4421,9 @@ "@types/reactcss" "*" "@types/react-dom@*": - version "18.0.10" - resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.0.10.tgz#3b66dec56aa0f16a6cc26da9e9ca96c35c0b4352" - integrity sha512-E42GW/JA4Qv15wQdqJq8DL4JhNpB3prJgjgapN3qJT9K2zO5IIAQh4VXvCEDupoqAwnz0cY4RlXeC/ajX5SFHg== + version "18.2.6" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.2.6.tgz#ad621fa71a8db29af7c31b41b2ea3d8a6f4144d1" + integrity sha512-2et4PDvg6PVCyS7fuTc4gPoksV58bW0RwSxWKcPRcHZf0PRUGq03TKcD/rUHe3azfV6/5/biUBJw+HhCQjaP0A== dependencies: "@types/react" "*" @@ -4809,22 +4488,58 @@ dependencies: "@types/prismjs" "*" -"@types/resolve@0.0.8": - version "0.0.8" - resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194" - integrity sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ== +"@types/resolve@1.17.1": + version "1.17.1" + resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" + integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== dependencies: "@types/node" "*" +"@types/retry@0.12.0": + version "0.12.0" + resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + "@types/scheduler@*": version "0.16.1" resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.1.tgz#18845205e86ff0038517aab7a18a62a6b9f71275" integrity sha512-EaCxbanVeyxDRTQBkdLb3Bvl/HK7PBK6UJjsSixB0iHKoWxE5uu2Q/DgtpOhPIojN0Zl1whvOd7PoHs2P0s5eA== -"@types/source-list-map@*": - version "0.1.2" - resolved "https://registry.yarnpkg.com/@types/source-list-map/-/source-list-map-0.1.2.tgz#0078836063ffaf17412349bba364087e0ac02ec9" - integrity sha512-K5K+yml8LTo9bWJI/rECfIPrGgxdpeNbj+d53lwN4QjW1MCwlkhUms+gtdzigTeUyBr09+u8BwOIY3MXvHdcsA== +"@types/semver@^7.3.12": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.0.tgz#591c1ce3a702c45ee15f47a42ade72c2fd78978a" + integrity sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw== + +"@types/send@*": + version "0.17.1" + resolved "https://registry.yarnpkg.com/@types/send/-/send-0.17.1.tgz#ed4932b8a2a805f1fe362a70f4e62d0ac994e301" + integrity sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q== + dependencies: + "@types/mime" "^1" + "@types/node" "*" + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*", "@types/serve-static@^1.13.10": + version "1.15.2" + resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.2.tgz#3e5419ecd1e40e7405d34093f10befb43f63381a" + integrity sha512-J2LqtvFYCzaj8pVYKw8klQXrLLk7TBZmQ4ShlcdkELFKGwGMfevMLneMMRkMgZxotOD9wg497LpC7O8PcvAmfw== + dependencies: + "@types/http-errors" "*" + "@types/mime" "*" + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" "@types/stack-utils@^2.0.0": version "2.0.0" @@ -4845,11 +4560,6 @@ "@types/react" "*" csstype "^3.0.2" -"@types/tapable@^1", "@types/tapable@^1.0.5": - version "1.0.7" - resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.7.tgz#545158342f949e8fd3bfd813224971ecddc3fac4" - integrity sha512-0VBprVqfgFD7Ehb2vd8Lh9TG3jP98gvr8rgehQqzztZNI7o8zS8Ad4jyZneKELphpuE212D8J70LnSNQSyO6bQ== - "@types/tern@*": version "0.23.4" resolved "https://registry.yarnpkg.com/@types/tern/-/tern-0.23.4.tgz#03926eb13dbeaf3ae0d390caf706b2643a0127fb" @@ -4869,49 +4579,21 @@ resolved "https://registry.yarnpkg.com/@types/throttle-debounce/-/throttle-debounce-2.1.0.tgz#1c3df624bfc4b62f992d3012b84c56d41eab3776" integrity sha512-5eQEtSCoESnh2FsiLTxE121IiE60hnMqcb435fShf4bpLRjEu1Eoekht23y6zXS9Ts3l+Szu3TARnTsA0GkOkQ== -"@types/trusted-types@*": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" - integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== +"@types/trusted-types@*", "@types/trusted-types@^2.0.2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.3.tgz#a136f83b0758698df454e328759dbd3d44555311" + integrity sha512-NfQ4gyz38SL8sDNrSixxU2Os1a5xcdFxipAFxYEuLUlvU2uDwS4NUpsImcf1//SlWItCVMMLiylsxbmNMToV/g== "@types/turndown@^5.0.1": version "5.0.1" resolved "https://registry.yarnpkg.com/@types/turndown/-/turndown-5.0.1.tgz#fcda7b02cda4c9d445be1440036df20f335b9387" integrity sha512-N8Ad4e3oJxh9n9BiZx9cbe/0M3kqDpOTm2wzj13wdDUxDPjfjloWIJaquZzWE1cYTAHpjOH3rcTnXQdpEfS/SQ== -"@types/uglify-js@*": - version "3.13.0" - resolved "https://registry.yarnpkg.com/@types/uglify-js/-/uglify-js-3.13.0.tgz#1cad8df1fb0b143c5aba08de5712ea9d1ff71124" - integrity sha512-EGkrJD5Uy+Pg0NUR8uA4bJ5WMfljyad0G+784vLCNUkD+QwOJXUbBYExXfVGf7YtyzdQp3L/XMYcliB987kL5Q== - dependencies: - source-map "^0.6.1" - "@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": version "2.0.3" resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== -"@types/webpack-sources@*": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@types/webpack-sources/-/webpack-sources-2.1.0.tgz#8882b0bd62d1e0ce62f183d0d01b72e6e82e8c10" - integrity sha512-LXn/oYIpBeucgP1EIJbKQ2/4ZmpvRl+dlrFdX7+94SKRUV3Evy3FsfMZY318vGhkWUS5MPhtOM3w1/hCOAOXcg== - dependencies: - "@types/node" "*" - "@types/source-list-map" "*" - source-map "^0.7.3" - -"@types/webpack@^4.41.8": - version "4.41.29" - resolved "https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.29.tgz#2e66c1de8223c440366469415c50a47d97625773" - integrity sha512-6pLaORaVNZxiB3FSHbyBiWM7QdazAWda1zvAq4SbZObZqHSDbWLi62iFdblVea6SK9eyBIVp5yHhKt/yNQdR7Q== - dependencies: - "@types/node" "*" - "@types/tapable" "^1" - "@types/uglify-js" "*" - "@types/webpack-sources" "*" - anymatch "^3.0.0" - source-map "^0.6.0" - "@types/websocket@1.0.2": version "1.0.2" resolved "https://registry.yarnpkg.com/@types/websocket/-/websocket-1.0.2.tgz#d2855c6a312b7da73ed16ba6781815bf30c6187a" @@ -4919,6 +4601,13 @@ dependencies: "@types/node" "*" +"@types/ws@^8.5.5": + version "8.5.5" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.5.tgz#af587964aa06682702ee6dcbc7be41a80e4b28eb" + integrity sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg== + dependencies: + "@types/node" "*" + "@types/yargs-parser@*": version "20.2.0" resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-20.2.0.tgz#dd3e6699ba3237f0348cd085e4698780204842f9" @@ -4931,117 +4620,115 @@ dependencies: "@types/yargs-parser" "*" -"@types/zen-observable@^0.8.0": - version "0.8.2" - resolved "https://registry.yarnpkg.com/@types/zen-observable/-/zen-observable-0.8.2.tgz#808c9fa7e4517274ed555fa158f2de4b4f468e71" - integrity sha512-HrCIVMLjE1MOozVoD86622S7aunluLb2PJdPfb3nYiEtohm8mIB/vyv0Fd37AdeMFrTUQXEunw78YloMA3Qilg== - -"@typescript-eslint/eslint-plugin@^4.25.0", "@typescript-eslint/eslint-plugin@^4.5.0": - version "4.25.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.25.0.tgz#d82657b6ab4caa4c3f888ff923175fadc2f31f2a" - integrity sha512-Qfs3dWkTMKkKwt78xp2O/KZQB8MPS1UQ5D3YW2s6LQWBE1074BE+Rym+b1pXZIX3M3fSvPUDaCvZLKV2ylVYYQ== - dependencies: - "@typescript-eslint/experimental-utils" "4.25.0" - "@typescript-eslint/scope-manager" "4.25.0" - debug "^4.1.1" - functional-red-black-tree "^1.0.1" - lodash "^4.17.15" - regexpp "^3.0.0" - semver "^7.3.2" - tsutils "^3.17.1" - -"@typescript-eslint/experimental-utils@4.25.0", "@typescript-eslint/experimental-utils@^4.0.1": - version "4.25.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.25.0.tgz#b2febcfa715d2c1806fd5f0335193a6cd270df54" - integrity sha512-f0doRE76vq7NEEU0tw+ajv6CrmPelw5wLoaghEHkA2dNLFb3T/zJQqGPQ0OYt5XlZaS13MtnN+GTPCuUVg338w== - dependencies: - "@types/json-schema" "^7.0.3" - "@typescript-eslint/scope-manager" "4.25.0" - "@typescript-eslint/types" "4.25.0" - "@typescript-eslint/typescript-estree" "4.25.0" - eslint-scope "^5.0.0" - eslint-utils "^2.0.0" - -"@typescript-eslint/experimental-utils@^3.10.1": - version "3.10.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-3.10.1.tgz#e179ffc81a80ebcae2ea04e0332f8b251345a686" - integrity sha512-DewqIgscDzmAfd5nOGe4zm6Bl7PKtMG2Ad0KG8CUZAHlXfAKTF9Ol5PXhiMh39yRL2ChRH1cuuUGOcVyyrhQIw== - dependencies: - "@types/json-schema" "^7.0.3" - "@typescript-eslint/types" "3.10.1" - "@typescript-eslint/typescript-estree" "3.10.1" - eslint-scope "^5.0.0" - eslint-utils "^2.0.0" - -"@typescript-eslint/parser@^4.25.0", "@typescript-eslint/parser@^4.4.1", "@typescript-eslint/parser@^4.5.0": - version "4.25.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.25.0.tgz#6b2cb6285aa3d55bfb263c650739091b0f19aceb" - integrity sha512-OZFa1SKyEJpAhDx8FcbWyX+vLwh7OEtzoo2iQaeWwxucyfbi0mT4DijbOSsTgPKzGHr6GrF2V5p/CEpUH/VBxg== - dependencies: - "@typescript-eslint/scope-manager" "4.25.0" - "@typescript-eslint/types" "4.25.0" - "@typescript-eslint/typescript-estree" "4.25.0" - debug "^4.1.1" - -"@typescript-eslint/scope-manager@4.25.0": - version "4.25.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.25.0.tgz#9d86a5bcc46ef40acd03d85ad4e908e5aab8d4ca" - integrity sha512-2NElKxMb/0rya+NJG1U71BuNnp1TBd1JgzYsldsdA83h/20Tvnf/HrwhiSlNmuq6Vqa0EzidsvkTArwoq+tH6w== +"@types/yargs@^16.0.0": + version "16.0.5" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.5.tgz#12cc86393985735a283e387936398c2f9e5f88e3" + integrity sha512-AxO/ADJOBFJScHbWhq2xAhlWP24rY4aCEG/NFaMvbT3X2MgRsLjhjQwsn0Zi5zn0LG9jUhCCZMeX9Dkuw6k+vQ== dependencies: - "@typescript-eslint/types" "4.25.0" - "@typescript-eslint/visitor-keys" "4.25.0" - -"@typescript-eslint/types@3.10.1": - version "3.10.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-3.10.1.tgz#1d7463fa7c32d8a23ab508a803ca2fe26e758727" - integrity sha512-+3+FCUJIahE9q0lDi1WleYzjCwJs5hIsbugIgnbB+dSCYUxl8L6PwmsyOPFZde2hc1DlTo/xnkOgiTLSyAbHiQ== - -"@typescript-eslint/types@4.25.0": - version "4.25.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.25.0.tgz#0e444a5c5e3c22d7ffa5e16e0e60510b3de5af87" - integrity sha512-+CNINNvl00OkW6wEsi32wU5MhHti2J25TJsJJqgQmJu3B3dYDBcmOxcE5w9cgoM13TrdE/5ND2HoEnBohasxRQ== + "@types/yargs-parser" "*" -"@typescript-eslint/typescript-estree@3.10.1": - version "3.10.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-3.10.1.tgz#fd0061cc38add4fad45136d654408569f365b853" - integrity sha512-QbcXOuq6WYvnB3XPsZpIwztBoquEYLXh2MtwVU+kO8jgYCiv4G5xrSP/1wg4tkvrEE+esZVquIPX/dxPlePk1w== +"@types/yargs@^17.0.8": + version "17.0.24" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" + integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== dependencies: - "@typescript-eslint/types" "3.10.1" - "@typescript-eslint/visitor-keys" "3.10.1" - debug "^4.1.1" - glob "^7.1.6" - is-glob "^4.0.1" - lodash "^4.17.15" - semver "^7.3.2" - tsutils "^3.17.1" + "@types/yargs-parser" "*" -"@typescript-eslint/typescript-estree@4.25.0": - version "4.25.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.25.0.tgz#942e4e25888736bff5b360d9b0b61e013d0cfa25" - integrity sha512-1B8U07TGNAFMxZbSpF6jqiDs1cVGO0izVkf18Q/SPcUAc9LhHxzvSowXDTvkHMWUVuPpagupaW63gB6ahTXVlg== - dependencies: - "@typescript-eslint/types" "4.25.0" - "@typescript-eslint/visitor-keys" "4.25.0" - debug "^4.1.1" - globby "^11.0.1" - is-glob "^4.0.1" - semver "^7.3.2" - tsutils "^3.17.1" +"@types/zen-observable@^0.8.0": + version "0.8.2" + resolved "https://registry.yarnpkg.com/@types/zen-observable/-/zen-observable-0.8.2.tgz#808c9fa7e4517274ed555fa158f2de4b4f468e71" + integrity sha512-HrCIVMLjE1MOozVoD86622S7aunluLb2PJdPfb3nYiEtohm8mIB/vyv0Fd37AdeMFrTUQXEunw78YloMA3Qilg== -"@typescript-eslint/visitor-keys@3.10.1": - version "3.10.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-3.10.1.tgz#cd4274773e3eb63b2e870ac602274487ecd1e931" - integrity sha512-9JgC82AaQeglebjZMgYR5wgmfUdUc+EitGUUMW8u2nDckaeimzW+VsoLV6FoimPv2id3VQzfjwBxEMVz08ameQ== - dependencies: - eslint-visitor-keys "^1.1.0" +"@typescript-eslint/eslint-plugin@^5.38.1", "@typescript-eslint/eslint-plugin@^5.5.0": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.60.1.tgz#81382d6ecb92b8dda70e91f9035611cb2fecd1c3" + integrity sha512-KSWsVvsJsLJv3c4e73y/Bzt7OpqMCADUO846bHcuWYSYM19bldbAeDv7dYyV0jwkbMfJ2XdlzwjhXtuD7OY6bw== + dependencies: + "@eslint-community/regexpp" "^4.4.0" + "@typescript-eslint/scope-manager" "5.60.1" + "@typescript-eslint/type-utils" "5.60.1" + "@typescript-eslint/utils" "5.60.1" + debug "^4.3.4" + grapheme-splitter "^1.0.4" + ignore "^5.2.0" + natural-compare-lite "^1.4.0" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/experimental-utils@^5.0.0": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.60.1.tgz#d783bb63b9183541019a945eda6a9d96b096d985" + integrity sha512-TXUdLxv2t8181nh5yLXl/Gr/zKj1ZofQ7m+ZdmG2+El0TYOHCvlZfc35D4nturemC3RUnf3KmLuFp3bVBjkG5w== + dependencies: + "@typescript-eslint/utils" "5.60.1" + +"@typescript-eslint/parser@^5.38.1", "@typescript-eslint/parser@^5.5.0": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.60.1.tgz#0f2f58209c0862a73e3d5a56099abfdfa21d0fd3" + integrity sha512-pHWlc3alg2oSMGwsU/Is8hbm3XFbcrb6P5wIxcQW9NsYBfnrubl/GhVVD/Jm/t8HXhA2WncoIRfBtnCgRGV96Q== + dependencies: + "@typescript-eslint/scope-manager" "5.60.1" + "@typescript-eslint/types" "5.60.1" + "@typescript-eslint/typescript-estree" "5.60.1" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@5.60.1": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.60.1.tgz#35abdb47f500c68c08f2f2b4f22c7c79472854bb" + integrity sha512-Dn/LnN7fEoRD+KspEOV0xDMynEmR3iSHdgNsarlXNLGGtcUok8L4N71dxUgt3YvlO8si7E+BJ5Fe3wb5yUw7DQ== + dependencies: + "@typescript-eslint/types" "5.60.1" + "@typescript-eslint/visitor-keys" "5.60.1" + +"@typescript-eslint/type-utils@5.60.1": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.60.1.tgz#17770540e98d65ab4730c7aac618003f702893f4" + integrity sha512-vN6UztYqIu05nu7JqwQGzQKUJctzs3/Hg7E2Yx8rz9J+4LgtIDFWjjl1gm3pycH0P3mHAcEUBd23LVgfrsTR8A== + dependencies: + "@typescript-eslint/typescript-estree" "5.60.1" + "@typescript-eslint/utils" "5.60.1" + debug "^4.3.4" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.60.1": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.60.1.tgz#a17473910f6b8d388ea83c9d7051af89c4eb7561" + integrity sha512-zDcDx5fccU8BA0IDZc71bAtYIcG9PowaOwaD8rjYbqwK7dpe/UMQl3inJ4UtUK42nOCT41jTSCwg76E62JpMcg== + +"@typescript-eslint/typescript-estree@5.60.1": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.60.1.tgz#8c71824b7165b64d5ebd7aa42968899525959834" + integrity sha512-hkX70J9+2M2ZT6fhti5Q2FoU9zb+GeZK2SLP1WZlvUDqdMbEKhexZODD1WodNRyO8eS+4nScvT0dts8IdaBzfw== + dependencies: + "@typescript-eslint/types" "5.60.1" + "@typescript-eslint/visitor-keys" "5.60.1" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.60.1", "@typescript-eslint/utils@^5.58.0": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.60.1.tgz#6861ebedbefba1ac85482d2bdef6f2ff1eb65b80" + integrity sha512-tiJ7FFdFQOWssFa3gqb94Ilexyw0JVxj6vBzaSpfN/8IhoKkDuSAenUKvsSHw2A/TMpJb26izIszTXaqygkvpQ== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@types/json-schema" "^7.0.9" + "@types/semver" "^7.3.12" + "@typescript-eslint/scope-manager" "5.60.1" + "@typescript-eslint/types" "5.60.1" + "@typescript-eslint/typescript-estree" "5.60.1" + eslint-scope "^5.1.1" + semver "^7.3.7" -"@typescript-eslint/visitor-keys@4.25.0": - version "4.25.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.25.0.tgz#863e7ed23da4287c5b469b13223255d0fde6aaa7" - integrity sha512-AmkqV9dDJVKP/TcZrbf6s6i1zYXt5Hl8qOLrRDTFfRNae4+LB8A4N3i+FLZPW85zIxRy39BgeWOfMS3HoH5ngg== +"@typescript-eslint/visitor-keys@5.60.1": + version "5.60.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.60.1.tgz#19a877358bf96318ec35d90bfe6bd1445cce9434" + integrity sha512-xEYIxKcultP6E/RMKqube11pGjXH1DCo60mQoWhVYyKfLkwbIVVjYxmOenNMxILx0TjCujPTjjnTIVzm09TXIw== dependencies: - "@typescript-eslint/types" "4.25.0" - eslint-visitor-keys "^2.0.0" + "@typescript-eslint/types" "5.60.1" + eslint-visitor-keys "^3.3.0" "@uiw/react-markdown-preview@3.0.6": version "3.0.6" @@ -5063,540 +4750,402 @@ "@uiw/react-markdown-preview" "3.0.6" rehype "11.0.0" -"@vx/axis@^0.0.175": - version "0.0.175" - resolved "https://registry.yarnpkg.com/@vx/axis/-/axis-0.0.175.tgz#312c07b81e8b043876436cab3bd8e90f30a1f7ec" - integrity sha512-qVRIHurnbPnRF4p0KQITArOUSF564tWW1pc48giLz+DJGlcJ4H9RfOSTpV6rnnP15xto6pQdQehBgBAvFRmoig== +"@use-gesture/core@10.2.27": + version "10.2.27" + resolved "https://registry.yarnpkg.com/@use-gesture/core/-/core-10.2.27.tgz#0f24b17c036cd828ba07e3451ff45e2df959c6f5" + integrity sha512-V4XV7hn9GAD2MYu8yBBVi5iuWBsAMfjPRMsEVzoTNGYH72tf0kFP+OKqGKc8YJFQIJx6yj+AOqxmEHOmx2/MEA== + +"@use-gesture/react@^10.0.0-beta.22": + version "10.2.27" + resolved "https://registry.yarnpkg.com/@use-gesture/react/-/react-10.2.27.tgz#7fbd50d14449ec5bc49c9b6cfef8a2845f5e0608" + integrity sha512-7E5vnWCxeslWlxwZ8uKIcnUZVMTRMZ8cvSnLLKF1NkyNb3PnNiAzoXM4G1vTKJKRhgOTeI6wK1YsEpwo9ABV5w== dependencies: - "@vx/group" "0.0.170" - "@vx/point" "0.0.165" - "@vx/shape" "0.0.175" - "@vx/text" "0.0.175" - classnames "^2.2.5" - prop-types "^15.6.0" + "@use-gesture/core" "10.2.27" -"@vx/axis@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/axis/-/axis-0.0.199.tgz#b7dc829e21cb49773f11d29f8fe901ae2794f215" - integrity sha512-pDP5Lf7bzVneh4YrPmJnIdiheHqiiVqWku8LKxTckIrvR55E3S1etlK9RSiT/3I9iERC9l/CXi7W7pJLKi7TaQ== +"@visx/annotation@3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@visx/annotation/-/annotation-3.0.1.tgz#007c0030155d95897a74422a9d3aad65c7ef82a7" + integrity sha512-otf2AZDlt/XCpOBG0gkPowXVerLJW5yXPFis94Km0bj629htyKOsY5GeJKlBeB89ddxdi4zWHkaFxGFJ79Pqog== dependencies: - "@types/classnames" "^2.2.9" "@types/react" "*" - "@vx/group" "0.0.199" - "@vx/point" "0.0.199" - "@vx/scale" "0.0.199" - "@vx/shape" "0.0.199" - "@vx/text" "0.0.199" - classnames "^2.2.5" - prop-types "^15.6.0" + "@visx/drag" "3.0.1" + "@visx/group" "3.0.0" + "@visx/text" "3.0.0" + classnames "^2.3.1" + prop-types "^15.5.10" + react-use-measure "^2.0.4" -"@vx/bounds@0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/bounds/-/bounds-0.0.165.tgz#75f107a6deb58223c6878db5053382eff3174567" - integrity sha512-ZvRb72/4QNs1ZrytZTZxd0hfAb/KKfhsdkcYtIQkmdF6dTsjigMQZ+h2bLvLnbZb/RxyCCoxdiZSGXd+T1c//Q== +"@visx/axis@3.2.0", "@visx/axis@^3.1.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@visx/axis/-/axis-3.2.0.tgz#6c6fc9bae92388f94a4b1c97b913f77b9b352fae" + integrity sha512-EqRsJshvXDB2R2o9U8BHiWNLz+HtHsVcPF0PKNO6BN20okV5KogOl1KPb0wUB0xD4vbAzc94fIm+MBaijpEWQg== dependencies: - prop-types "^15.5.10" + "@types/react" "*" + "@visx/group" "3.0.0" + "@visx/point" "3.0.1" + "@visx/scale" "3.2.0" + "@visx/shape" "3.2.0" + "@visx/text" "3.0.0" + classnames "^2.3.1" + prop-types "^15.6.0" -"@vx/bounds@0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/bounds/-/bounds-0.0.199.tgz#9a57aa5ea59c96d7f2369921ee6d7751d65f731d" - integrity sha512-3hq9MmfiSB5ciKTzHhB4/UV77JIIUEd1hDj5EBfLUUOL27wXgM6+RSOB+FxFt3tRJniltp3iZs5Gb75SDecpGw== +"@visx/bounds@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@visx/bounds/-/bounds-3.0.0.tgz#cf357cbff90a1fe5f95eb9d9288dd8794b744a7f" + integrity sha512-YQaSSER9erxlhppzRms6cvYdKqcIwk6eksrGdbJkBoHobhPo1JCIUXlmrA4qgrEnXInPJpueGE+PE5F+Dk12DA== dependencies: "@types/react" "*" "@types/react-dom" "*" prop-types "^15.5.10" -"@vx/clip-path@0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/clip-path/-/clip-path-0.0.165.tgz#93cd65cc6a35319c7e403ce7b973ac1c8045b741" - integrity sha512-mBCbgguLMVyGvar5FbxqyyY4NQFlnXoSLF0TrhgWYkF/FCXdE1CzBC+Y4iXIJOY0ZTtluqL9XrNdIDpx49AmuA== - -"@vx/curve@0.0.165", "@vx/curve@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/curve/-/curve-0.0.165.tgz#330d1512dceae0af43dd3eb4c85523132030a3a0" - integrity sha512-fiQAGrKNGjJbL+eixUckJqIZDWXH/1NtIyyDbSz3J7ksk0QpYr5BgWcNJN76HLNt7wfcLwNzCHeNs4iVYyFGTg== - dependencies: - d3-shape "^1.0.6" - -"@vx/curve@0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/curve/-/curve-0.0.199.tgz#c462f11989068390333607fc254ff178a2f5227a" - integrity sha512-dJ84gIbFdZ/3KvYmmCrXiA4kDmM23NdhB3/8DGEFqsMn16b881hiN2YtXvkluaXfs01e/s4WcHLgAR7Fn9KYaA== +"@visx/curve@3.0.0", "@visx/curve@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@visx/curve/-/curve-3.0.0.tgz#c54568472e00a38483c58cf52e4a6ddb2887c2d4" + integrity sha512-kvHJDLBeczTQ87ZExSTfRxej06l6o6UiQ0NHf9+xpAin06y6Qk1ThOHHWJTGM6KGzwlu7jEauJGHwZs6nMhDvA== dependencies: "@types/d3-shape" "^1.3.1" d3-shape "^1.0.6" -"@vx/event@0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/event/-/event-0.0.199.tgz#f8cc129fe3a6dfd9aa416cce4c0c2d843ad01fec" - integrity sha512-y/5z9hBGQ4XDfruGObzk3vFsafJyUmUea9X9JWpMNqD8uyHzgmitNtGxcno0zUXjJAUv65ALHPmcxdLzsEMfsQ== +"@visx/drag@3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@visx/drag/-/drag-3.0.1.tgz#753d5f471d4e31679ca4fddcb476cf2d5c7eb6e4" + integrity sha512-yi2AB/unUfNYBRKS4pmUOuz8MjaAAYjsQGYcD/s4LqeQjd+lBZF7CuNcYZ/maGNQAEUfgLr2czIzADanOMtMaw== dependencies: "@types/react" "*" - "@vx/point" "0.0.199" - -"@vx/event@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/event/-/event-0.0.165.tgz#675d89fdfdc08d0c99c36ff1a381ea50fccfba2e" - integrity sha512-FsQiw0f3s5DQB6aBQmBcoWk9e4q65LcDobHIyV8qrmpW2QgV2NvQFM1w0Q300ohpRMgJDzGk68HHHQgFOJvApw== - dependencies: - "@vx/point" "0.0.165" + "@visx/event" "3.0.1" + "@visx/point" "3.0.1" + prop-types "^15.5.10" -"@vx/glyph@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/glyph/-/glyph-0.0.165.tgz#ba6fe31700dae852c60468e00dd732fa7521d1fc" - integrity sha512-kccUm40e/VCtayxqvcwc2K2M6oNXO7IafwIfw1RRv6Fj4Iutto9ZpI+PGOf/zPnYVueoLnWBXT/HE7IRS+C2gw== +"@visx/event@3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@visx/event/-/event-3.0.1.tgz#d5358f52ff5ef30036d955bd2b68b96472ff2d6f" + integrity sha512-tK1EUYQLLStBuoCMbm8LJ3VbDyCVI8HjT0pMRQxm+C75FSIVWvrThgrfrC9sWOFnEMEYWspZO7hI5zjsPKjLQA== dependencies: - "@vx/group" "0.0.165" - classnames "^2.2.5" - d3-shape "^1.2.0" + "@types/react" "*" + "@visx/point" "3.0.1" -"@vx/glyph@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/glyph/-/glyph-0.0.199.tgz#1af503d97cbf8713f32374fce274e3be1514b0e6" - integrity sha512-/IKLRa3jvycJsdNwyqksUwOQisLWWE46DNQCTHuRe6uDtlRLsHcjUF1/vxDW4g7W//8os8TSKFwuWLqjwQyO6A== +"@visx/glyph@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@visx/glyph/-/glyph-3.0.0.tgz#218a96aa0ccba95dc77e46ab08d26ad89198f3a8" + integrity sha512-r1B0IocfWfhTABKjam0qqsWKjxLxZfGwefnwn8IcfELSd9iAUtLbI/46nP4roQRHhB/Wl3RBbgA97fZw8f1MxA== dependencies: - "@types/classnames" "^2.2.9" "@types/d3-shape" "^1.3.1" "@types/react" "*" - "@vx/group" "0.0.199" - classnames "^2.2.5" + "@visx/group" "3.0.0" + classnames "^2.3.1" d3-shape "^1.2.0" prop-types "^15.6.2" -"@vx/gradient@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/gradient/-/gradient-0.0.165.tgz#0cc0fe873e6acded4943fa274f68601ad5a50a38" - integrity sha512-FjRXMTmcy7k0TWsfDzWWXw6T9WXKP+6LS/GRgnguq271pab/P+AdOJThsVxtBgUc8ZOAPbub3/2Gggz9d8tocg== - dependencies: - classnames "^2.2.5" - prop-types "^15.5.7" - -"@vx/grid@^0.0.180": - version "0.0.180" - resolved "https://registry.yarnpkg.com/@vx/grid/-/grid-0.0.180.tgz#31f2f73c9055d0ab2bf38f03a0c7dc47b9c12327" - integrity sha512-+ugS0c6GbwHr6pFU0znnOG3/zTwRRadvWwj3E4ZOHmKUSz6ZEN6JNo+rD3WSZckYwLis6UivmYfJ5cV6AM4ufg== +"@visx/grid@3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@visx/grid/-/grid-3.2.0.tgz#1db936e0c20bf45d2e8280f8c70b62982c67fb59" + integrity sha512-f9EkYOB/acGVh4DwIzVZ39+uChvWiPRwO029L8Dlc6uTvG00sgnKttfZdDP2iYnGrAqyFTHsPLsuTQs4hKP9jA== dependencies: - "@vx/group" "0.0.170" - "@vx/point" "0.0.165" - "@vx/shape" "0.0.179" - classnames "^2.2.5" + "@types/react" "*" + "@visx/curve" "3.0.0" + "@visx/group" "3.0.0" + "@visx/point" "3.0.1" + "@visx/scale" "3.2.0" + "@visx/shape" "3.2.0" + classnames "^2.3.1" prop-types "^15.6.2" -"@vx/group@0.0.165", "@vx/group@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/group/-/group-0.0.165.tgz#2342523225de94859b5be49c3072dc6bb6795e78" - integrity sha512-gi1DSg8AAaVRseyWiq8y4bzyvKiQIXT6vDUYBVRmv2LBcpHocBGaxNiNK0X602RgLG0XmNyRv6qSCWLOaBs3Mg== - dependencies: - classnames "^2.2.5" - -"@vx/group@0.0.170": - version "0.0.170" - resolved "https://registry.yarnpkg.com/@vx/group/-/group-0.0.170.tgz#8b30b3ea07c348fe22253812fe7cb6d4200d725d" - integrity sha512-RnDdRoy0YI5hokk+YWXc8t39Kp51i4BdCpiwkDJU4YypGycTYnDFjicam6jigUmZ/6wyMirDf/aQboWviFLt2Q== - dependencies: - classnames "^2.2.5" - -"@vx/group@0.0.199", "@vx/group@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/group/-/group-0.0.199.tgz#167410a83c83aa2011f6e7cabafd68ed5659c690" - integrity sha512-QSHHQn1a5z9H/vrZP8FIlJTznWjuyDcloXU/1jocf1FL9BxWV7RErvZ/IBWr+bIZVXLZXUpfVpX9JMry+eESIg== +"@visx/group@3.0.0", "@visx/group@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@visx/group/-/group-3.0.0.tgz#e7f9752599bcc7e141ff5317a2a9a502577ab8df" + integrity sha512-SFjXhTMcsaVAb1/TVL1KM5vn8gQTIVgSx0ATdDl4BJSFp2ym1lO8LY4jpV4SFweaHnWxVwrrfGLTn5QsYnvmjQ== dependencies: - "@types/classnames" "^2.2.9" "@types/react" "*" - classnames "^2.2.5" + classnames "^2.3.1" prop-types "^15.6.2" -"@vx/hierarchy@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/hierarchy/-/hierarchy-0.0.199.tgz#cfb88a82984eac6a47613b4a6f134f50e3a89df7" - integrity sha512-cAkRnudQXqvqV2Fpg5yrOm4GD48c0pNJbnAY6cSGReIoxXySt8mPBAyI6VnlF/zfcs9w30AVkiUUekzo1NjaAA== +"@visx/hierarchy@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@visx/hierarchy/-/hierarchy-3.0.0.tgz#77c25f7f653542295f6c124563440e1d491b0e36" + integrity sha512-AZEHIiBdxgDhQqMQMwP7rFJlHiHN6KuNJK+SGjVYe1s+xoVisKBG7P/WGd3HCRNzh7hc6gffFow9enDT573uHQ== dependencies: - "@types/classnames" "^2.2.9" "@types/d3-hierarchy" "^1.1.6" "@types/react" "*" - "@vx/group" "0.0.199" - classnames "^2.2.5" + "@visx/group" "3.0.0" + classnames "^2.3.1" d3-hierarchy "^1.1.4" prop-types "^15.6.1" -"@vx/legend@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/legend/-/legend-0.0.199.tgz#8c2611ccc50c710effe395941fa13efe2bf9d641" - integrity sha512-plGergquRrefNE00HrGlofo0kmX1iMDTs6VlRBW+OKWrgJVtCgCD6jAWxz9No/MbwleMgsQsWd3nzKj6oKFAhw== +"@visx/legend@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@visx/legend/-/legend-3.2.0.tgz#de8453b89ca4bc89f4c00a48b23c848fef570fcb" + integrity sha512-XyZjTDpTlzcTTFdrV16I58NKl1cvYtmFdr4Lb1dvpQkAJkFyWzjrWEqOSGw1yJkaimQKlmaemn8uRgzLXLfKhw== dependencies: - "@types/classnames" "^2.2.9" "@types/react" "*" - "@vx/group" "0.0.199" - "@vx/scale" "0.0.199" - classnames "^2.2.5" - prop-types "^15.5.10" - -"@vx/pattern@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/pattern/-/pattern-0.0.165.tgz#0d317cb9a13205e35691f702442739ff0256711d" - integrity sha512-h5nmfcYlQYYzNhlhqaYUvVnkmGnC0yWv5yU1snjHweGmIHTovV3RAbKgVFAP7kB3i2rbEtC3O8WkJN++cZdLzA== - dependencies: - classnames "^2.2.5" + "@visx/group" "3.0.0" + "@visx/scale" "3.2.0" + classnames "^2.3.1" prop-types "^15.5.10" -"@vx/point@0.0.165", "@vx/point@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/point/-/point-0.0.165.tgz#7ebde5da3d86954fe31a56f923f31550f0b4b867" - integrity sha512-spoHilhjcWNgccrSzBUPw+PXV81tYxeyEWBkgr35aGVU4m7YT86Ywvfemwp7AVVGPn+XJHrhB0ujAhDoyqFPoA== - -"@vx/point@0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/point/-/point-0.0.199.tgz#2ae16d99ca8d351fe09857533ba1e9c995696706" - integrity sha512-8mlOHgaCqT8qKhMSAA5U7tsBdOQBku7DIJ5fteTWeZt6OOafVSGvfT8LjphN8NA1xbY3ZY3L1bn9ZnZurYHDJQ== +"@visx/point@3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@visx/point/-/point-3.0.1.tgz#77587ddaabf6f3023f09f8a0ce33a2c27c9d64c8" + integrity sha512-S5WOBMgEP2xHcgs3A2BFB2vwzrk0tMmn3PGZAbQJ+lu4HlnalDP72klUnxLTH8xclNNvpUHtHM5eLIJXyHx6Pw== -"@vx/responsive@^0.0.192": - version "0.0.192" - resolved "https://registry.yarnpkg.com/@vx/responsive/-/responsive-0.0.192.tgz#721d032bec38b9e3ff5fde2e4d5d8ee5a81cc517" - integrity sha512-HaXVwhSJXUfRbzRV+glxsX0ki2Hi1mdpz42iuGArVQgDPJEmBHjkXyoiXU8U6v66M7FAH+OyKgtc5j2bfhyYzA== +"@visx/react-spring@3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@visx/react-spring/-/react-spring-3.2.0.tgz#7174ce2a89cc91efd6f0a43e2c90169800ebb6c5" + integrity sha512-gWr9/ERz+nq9B2PtaUe8keuj9tn1oXQfzuU9z8/n4PFx2xq6csx4IVEnQRtfLrQhsv3iU9GQ3wbQUcYsdJTdvQ== dependencies: - lodash "^4.17.10" - prop-types "^15.6.1" - resize-observer-polyfill "1.5.0" - -"@vx/scale@0.0.165", "@vx/scale@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/scale/-/scale-0.0.165.tgz#8575880c48296a80c0e9459057f826e8f903bc07" - integrity sha512-5jSgXJDU6J/KWIyCbpjHqysPCddp7tG3LbTV7UmtB1Qleb4m4slShTVSE7+EKU+zgiQPDGm0+E2ht4cet+7F7A== - dependencies: - d3-scale "^2.0.0" - -"@vx/scale@0.0.199", "@vx/scale@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/scale/-/scale-0.0.199.tgz#bd3b0b7add5e99c0b8240ca72edfad3b96bbd700" - integrity sha512-AbrPbQUEYFR0oxMHry7LqvLbyXxXek2aDAnwzzp7C+Yh1wfUk63hSuaRp13CO7+3r3gIy+99yrsaYPsgt2I6Nw== - dependencies: - "@types/d3-interpolate" "^1.3.1" - "@types/d3-scale" "^2.1.1" - "@types/d3-time" "^1.0.10" - d3-interpolate "^1.4.0" - d3-scale "^3.0.1" - d3-time "^1.1.0" - -"@vx/shape@0.0.170": - version "0.0.170" - resolved "https://registry.yarnpkg.com/@vx/shape/-/shape-0.0.170.tgz#55cf5968f7a6465484aac1a5541275b4813bb9a2" - integrity sha512-rm8oVRP0ejgwGhQTVhqP5awqphWX60FgbnRt9X+YBUqgv7Qyedfgs/CHd/5QFZX3aPp8d4F+b4+lghbIYiMgmQ== - dependencies: - "@vx/curve" "0.0.165" - "@vx/group" "0.0.170" - "@vx/point" "0.0.165" - classnames "^2.2.5" - d3-path "^1.0.5" - d3-shape "^1.2.0" - prop-types "^15.5.10" + "@types/react" "*" + "@visx/axis" "3.2.0" + "@visx/grid" "3.2.0" + "@visx/scale" "3.2.0" + "@visx/text" "3.0.0" + classnames "^2.3.1" + prop-types "^15.6.2" -"@vx/shape@0.0.175": - version "0.0.175" - resolved "https://registry.yarnpkg.com/@vx/shape/-/shape-0.0.175.tgz#099bcd4fdc890988fa373526b170c11942af2255" - integrity sha512-bjAJoIIpKjUEPDV2xmTYGUvSvwRztv+6rd1c6NPZG/nIuqsMHFnFig/2xTcQJEQhRg6aKzvxIUo43zPSSq3fWA== +"@visx/responsive@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@visx/responsive/-/responsive-3.0.0.tgz#e183c54ce04cffe756378872d30ac88c66a137ac" + integrity sha512-immnxQwOWlrxbnlCIqJWuDpPfrM6tglgMTN1WsyXyGluLMJqhuuxqxllfXaRPkQFS4fcvs66KCEELdazh96U2w== dependencies: - "@vx/curve" "0.0.165" - "@vx/group" "0.0.170" - "@vx/point" "0.0.165" - classnames "^2.2.5" - d3-path "^1.0.5" - d3-shape "^1.2.0" - prop-types "^15.5.10" + "@types/lodash" "^4.14.172" + "@types/react" "*" + lodash "^4.17.21" + prop-types "^15.6.1" -"@vx/shape@0.0.179": - version "0.0.179" - resolved "https://registry.yarnpkg.com/@vx/shape/-/shape-0.0.179.tgz#038c449743d1e05b7b2d20151e9ab6e739f73516" - integrity sha512-YHVNx4xGpbjolkW3Lb5pEgJB0+u349vfnLI976DJlinY0hRNa4TZbWXOB4ywLIrYzQEXXPMUR8WtdubNxg6g0w== +"@visx/scale@3.2.0", "@visx/scale@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@visx/scale/-/scale-3.2.0.tgz#a10a41e77331061819e4c9568e76158f3489a951" + integrity sha512-8XOZ+LDr/QVC61EBtO4xvcRJQ/M0bfQHme/EJsiZU5DabrD3cHJAOxNW6pLF5PkvDGQII2V3delijNPDqTiTNg== dependencies: - "@vx/curve" "0.0.165" - "@vx/group" "0.0.170" - "@vx/point" "0.0.165" - classnames "^2.2.5" - d3-path "^1.0.5" - d3-shape "^1.2.0" - prop-types "^15.5.10" + "@visx/vendor" "3.2.0" -"@vx/shape@0.0.199", "@vx/shape@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/shape/-/shape-0.0.199.tgz#b25cc7c0b5a02e72d5f3ce95484e023d2e280fde" - integrity sha512-Nuod/HGbrs4n5BFKs7gp2YUmhbivkxXOfGRR4yZd72+l5nEdvTB/fj/69YkF6O2xvtbiJr2fKPJWe4wTZZQV5A== +"@visx/shape@3.2.0", "@visx/shape@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@visx/shape/-/shape-3.2.0.tgz#2ddc9bbb17bcea32835fea6d2a09b20a3d119791" + integrity sha512-NJw5lWokh6tvViOtj5eiZKJuqlE7QPhJUTi0CiBI5h3vH0h84PGxqvdH+PNYr+Hze4U+qKuppDmjFkFx/8MjTw== dependencies: - "@types/classnames" "^2.2.9" "@types/d3-path" "^1.0.8" "@types/d3-shape" "^1.3.1" - "@types/lodash" "^4.14.146" + "@types/lodash" "^4.14.172" "@types/react" "*" - "@vx/curve" "0.0.199" - "@vx/group" "0.0.199" - "@vx/scale" "0.0.199" - classnames "^2.2.5" - d3-path "^1.0.5" - d3-shape "^1.2.0" - lodash "^4.17.15" - prop-types "^15.5.10" - -"@vx/shape@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/shape/-/shape-0.0.165.tgz#3424121f45d83dd8e7ba62b6adf8b0aea2fb9cf8" - integrity sha512-D9naH/glDtw8J8IcdumpRz1ihaoCAYMwFNh2KTv73HiTKrLQSXvIjwYFv9C0b8BCPNOXkDZS8s+AlgMSqGlZNQ== - dependencies: - "@vx/curve" "0.0.165" - "@vx/group" "0.0.165" - "@vx/point" "0.0.165" - classnames "^2.2.5" - d3-path "^1.0.5" - d3-shape "^1.2.0" - prop-types "^15.5.10" - -"@vx/shape@^0.0.168": - version "0.0.168" - resolved "https://registry.yarnpkg.com/@vx/shape/-/shape-0.0.168.tgz#172bc1cf4dade47076018efd559e0ecc4e959aec" - integrity sha512-urKZkwSafMpPQ0wI/L5FJmufRiAR4UsgYUCKxROjfE1Cf4jWNlK6mlVIIASxCdHlh9CGBbIrRMdl5Yv5lzqhjA== - dependencies: - "@vx/curve" "0.0.165" - "@vx/group" "0.0.165" - "@vx/point" "0.0.165" - classnames "^2.2.5" + "@visx/curve" "3.0.0" + "@visx/group" "3.0.0" + "@visx/scale" "3.2.0" + classnames "^2.3.1" d3-path "^1.0.5" d3-shape "^1.2.0" + lodash "^4.17.21" prop-types "^15.5.10" -"@vx/stats@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/stats/-/stats-0.0.165.tgz#32413a144383367e51628bbea61b3b0ce980f1fd" - integrity sha512-FRW5N+7pXLZrQxT8JA8OH28PGKq7YfiycmnSG7jzXOnvw+sPm9MRKCoyRDTpFrCiggcOhHhvqhE8RiO2qF7d3Q== - dependencies: - "@vx/group" "0.0.165" - "@vx/scale" "0.0.165" - classnames "^2.2.5" - d3-shape "^1.2.0" - -"@vx/text@0.0.175": - version "0.0.175" - resolved "https://registry.yarnpkg.com/@vx/text/-/text-0.0.175.tgz#70ff63b01abfc148132d36ca7cdebfb040bb48c5" - integrity sha512-SOBhctXXAGhhpCOiTjxOM/8NDaDqGRk3OGfsJ714Mt1UJX6VQaKxFocZJwn6IMw3mNG6/p7O4Eao/gGDcoM6+A== - dependencies: - babel-plugin-lodash "^3.3.2" - classnames "^2.2.5" - lodash "^4.17.4" - reduce-css-calc "^1.3.0" - -"@vx/text@0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/text/-/text-0.0.199.tgz#5d4fd172a7e1f5cc1d8ad6c175d816d6e9aa654f" - integrity sha512-YUIkATaN+GEyiyfMsMN3VqopmOHFjPZe2AtXPnIhjrLfOY/SjpPqqT7L2S3pgGbqZYiJbZd9VzGxXYXRtGZOzw== +"@visx/text@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@visx/text/-/text-3.0.0.tgz#9099c3605027b9ab4c54bde97518a648136c3629" + integrity sha512-LW6v5T/gpd9RGw83/ScXncYc6IlcfzXTpaN8WbbxLRI65gdvSqrykwAMR0cbpQmzoVFuZXljqOf0QslHGnBg1w== dependencies: - "@types/classnames" "^2.2.9" - "@types/lodash" "^4.14.160" + "@types/lodash" "^4.14.172" "@types/react" "*" - classnames "^2.2.5" - lodash "^4.17.20" - prop-types "^15.7.2" - reduce-css-calc "^1.3.0" - -"@vx/text@^0.0.192": - version "0.0.192" - resolved "https://registry.yarnpkg.com/@vx/text/-/text-0.0.192.tgz#cb71261e003c9d951d242ec43049cff2c4509302" - integrity sha512-lyy7eXfmQ8SJF7Qx+bCRcaEgvVSa18Lp6eRMo3GMANumUh9kSe7LwgqRFSdBJ85WkPqX+UOkJVyCH7AOlt0IWA== - dependencies: - classnames "^2.2.5" - lodash "^4.17.15" + classnames "^2.3.1" + lodash "^4.17.21" prop-types "^15.7.2" reduce-css-calc "^1.3.0" -"@vx/threshold@0.0.170": - version "0.0.170" - resolved "https://registry.yarnpkg.com/@vx/threshold/-/threshold-0.0.170.tgz#59baeabc7e1687042cf4955130fc7a72704030f7" - integrity sha512-A3yWJrFqckbleXg3Q3iSsU6mdtHbMxEnE4jGZd8og4m9r2RDVTvFVP6ZRo4vunlfWj5YuMnNsKhx4ZSWKVMtXg== - dependencies: - "@vx/clip-path" "0.0.165" - "@vx/shape" "0.0.170" - classnames "^2.2.5" - prop-types "^15.5.10" - -"@vx/tooltip@0.0.165", "@vx/tooltip@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/tooltip/-/tooltip-0.0.165.tgz#0d17a1b445a7bc70d7840e36593b780a6e7b40e2" - integrity sha512-/x1NZc67QGQ4e/WNT7Ks5LYRyeLSqp8lG04gX5J6leUS0zscAVzo3aE5u65Qqbc0cnMyMPRZ2Qtb4klWTLg+eQ== - dependencies: - "@vx/bounds" "0.0.165" - classnames "^2.2.5" - prop-types "^15.5.10" - -"@vx/tooltip@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/tooltip/-/tooltip-0.0.199.tgz#e60147412eb5be4d8dc98664abaee1ac63ee6570" - integrity sha512-IZ6ExnEyTZ4jq9Ml+XECnrtQ52SnLi8WD1dbxnVMoAzNbMKNfnkET5ZqzYM/fM/yPEsKMwNadZWizdKBMjIGVw== +"@visx/tooltip@3.1.2": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@visx/tooltip/-/tooltip-3.1.2.tgz#6c7bb36a296f4501adb99b59487412e39fe06f44" + integrity sha512-p46qztGRNkEDbxzc3V1virahvz3UQ29TzddUjA0oaTIBCrOd9UJuLvv1Tq9OpeUYPdbrO/ZRwaEeri2pbwv04Q== dependencies: - "@types/classnames" "^2.2.9" "@types/react" "*" - "@vx/bounds" "0.0.199" - classnames "^2.2.5" + "@visx/bounds" "3.0.0" + classnames "^2.3.1" prop-types "^15.5.10" - react-use-measure "2.0.1" + react-use-measure "^2.0.4" -"@vx/voronoi@^0.0.165": - version "0.0.165" - resolved "https://registry.yarnpkg.com/@vx/voronoi/-/voronoi-0.0.165.tgz#11ab585199b0dccf403544a6ad378a505bfb913b" - integrity sha512-oZT9KBAjDLCEcOrrqW01TPz8pLtrNNAFPa7mB9ignXvgntqEd3yVXCBkxXScfZLS+O8UQc+7/pawu0PPkE2eMw== +"@visx/vendor@3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@visx/vendor/-/vendor-3.2.0.tgz#05f1ab97ced594d456abe60d2136b298c08d6f1b" + integrity sha512-QBvachNdlwnz5aimPiXJwErGaGL/FjP/lzXT1uoHuGQQeeSxvodUd6tuSt/sxaoVFzqup557qJdVLqxSCQYs5A== + dependencies: + "@types/d3-array" "3.0.3" + "@types/d3-color" "3.1.0" + "@types/d3-format" "3.0.1" + "@types/d3-interpolate" "3.0.1" + "@types/d3-scale" "4.0.2" + "@types/d3-time" "3.0.0" + "@types/d3-time-format" "2.1.0" + d3-array "3.2.1" + d3-color "3.1.0" + d3-format "3.1.0" + d3-interpolate "3.0.1" + d3-scale "4.0.2" + d3-time "3.1.0" + d3-time-format "4.1.0" + internmap "2.0.3" + +"@visx/voronoi@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@visx/voronoi/-/voronoi-3.0.0.tgz#24a3513252dde1d6e85b88144e7a65b6d5936275" + integrity sha512-ySX7+Ic+irfgZQMij/0RJnryETonuKDWA3Upw3V6YtIiodPOEQ5w8FW8TvEbhaBlAUfSwQtHJ5ECvv3ZDrJa2A== dependencies: - "@vx/group" "0.0.165" - classnames "^2.2.5" + "@types/d3-voronoi" "^1.1.9" + "@types/react" "*" + classnames "^2.3.1" d3-voronoi "^1.1.2" prop-types "^15.6.1" -"@vx/zoom@^0.0.199": - version "0.0.199" - resolved "https://registry.yarnpkg.com/@vx/zoom/-/zoom-0.0.199.tgz#e5c500fd6d3bfe2c2e01df0eca93ac1069dda083" - integrity sha512-1naGZJ+UYynQENK1NqGP34i1r30Crrl5GSvvx7NaDXTpql7mowzy827UJdMXKBwgBh5ca5cONWOdPGh68SqqiA== +"@visx/xychart@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@visx/xychart/-/xychart-3.2.0.tgz#20ecc4a7452bc68215eda97047193aeda79e579b" + integrity sha512-7OdPUeYJkKPlqZqpt4cc6qcpE6edE35rcAvvWmdurfCEpTyYOpUaIMgM4t0NX6mNHoAx6K6XAeS5ROCE7vpCNg== dependencies: + "@types/lodash" "^4.14.172" "@types/react" "*" - "@vx/event" "0.0.199" + "@visx/annotation" "3.0.1" + "@visx/axis" "3.2.0" + "@visx/event" "3.0.1" + "@visx/glyph" "3.0.0" + "@visx/grid" "3.2.0" + "@visx/react-spring" "3.2.0" + "@visx/responsive" "3.0.0" + "@visx/scale" "3.2.0" + "@visx/shape" "3.2.0" + "@visx/text" "3.0.0" + "@visx/tooltip" "3.1.2" + "@visx/vendor" "3.2.0" + "@visx/voronoi" "3.0.0" + classnames "^2.3.1" + d3-interpolate-path "2.2.1" + d3-shape "^2.0.0" + lodash "^4.17.21" + mitt "^2.1.0" prop-types "^15.6.2" -"@webassemblyjs/ast@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.9.0.tgz#bd850604b4042459a5a41cd7d338cbed695ed964" - integrity sha512-C6wW5L+b7ogSDVqymbkkvuW9kruN//YisMED04xzeBBqjHa2FYnmvOlS6Xj68xWQRgWvI9cIglsjFowH/RJyEA== +"@visx/zoom@^3.1.1": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@visx/zoom/-/zoom-3.1.1.tgz#24b5e966e95c0f71842668337b66bda0eb7ebf0f" + integrity sha512-/jwCAx3oHweZiF+VmdSz6kf4OnCnZI5NTP5i4vImvstt6M/8mWVZv5ICwOooZOWmceBOOwLYgEO35tJN9E34Cg== dependencies: - "@webassemblyjs/helper-module-context" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/wast-parser" "1.9.0" - -"@webassemblyjs/floating-point-hex-parser@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.9.0.tgz#3c3d3b271bddfc84deb00f71344438311d52ffb4" - integrity sha512-TG5qcFsS8QB4g4MhrxK5TqfdNe7Ey/7YL/xN+36rRjl/BlGE/NcBvJcqsRgCP6Z92mRE+7N50pRIi8SmKUbcQA== + "@types/react" "*" + "@use-gesture/react" "^10.0.0-beta.22" + "@visx/event" "3.0.1" + prop-types "^15.6.2" -"@webassemblyjs/helper-api-error@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.9.0.tgz#203f676e333b96c9da2eeab3ccef33c45928b6a2" - integrity sha512-NcMLjoFMXpsASZFxJ5h2HZRcEhDkvnNFOAKneP5RbKRzaWJN36NC4jqQHKwStIhGXu5mUWlUUk7ygdtrO8lbmw== +"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" + integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" -"@webassemblyjs/helper-buffer@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.9.0.tgz#a1442d269c5feb23fcbc9ef759dac3547f29de00" - integrity sha512-qZol43oqhq6yBPx7YM3m9Bv7WMV9Eevj6kMi6InKOuZxhw+q9hOkvq5e/PpKSiLfyetpaBnogSbNCfBwyB00CA== +"@webassemblyjs/floating-point-hex-parser@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== -"@webassemblyjs/helper-code-frame@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.9.0.tgz#647f8892cd2043a82ac0c8c5e75c36f1d9159f27" - integrity sha512-ERCYdJBkD9Vu4vtjUYe8LZruWuNIToYq/ME22igL+2vj2dQ2OOujIZr3MEFvfEaqKoVqpsFKAGsRdBSBjrIvZA== - dependencies: - "@webassemblyjs/wast-printer" "1.9.0" +"@webassemblyjs/helper-api-error@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== -"@webassemblyjs/helper-fsm@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.9.0.tgz#c05256b71244214671f4b08ec108ad63b70eddb8" - integrity sha512-OPRowhGbshCb5PxJ8LocpdX9Kl0uB4XsAjl6jH/dWKlk/mzsANvhwbiULsaiqT5GZGT9qinTICdj6PLuM5gslw== +"@webassemblyjs/helper-buffer@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" + integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== -"@webassemblyjs/helper-module-context@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.9.0.tgz#25d8884b76839871a08a6c6f806c3979ef712f07" - integrity sha512-MJCW8iGC08tMk2enck1aPW+BE5Cw8/7ph/VGZxwyvGbJwjktKkDK7vy7gAmMDx88D7mhDTCNKAW5tED+gZ0W8g== +"@webassemblyjs/helper-numbers@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: - "@webassemblyjs/ast" "1.9.0" + "@webassemblyjs/floating-point-hex-parser" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" + "@xtuc/long" "4.2.2" -"@webassemblyjs/helper-wasm-bytecode@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.9.0.tgz#4fed8beac9b8c14f8c58b70d124d549dd1fe5790" - integrity sha512-R7FStIzyNcd7xKxCZH5lE0Bqy+hGTwS3LJjuv1ZVxd9O7eHCedSdrId/hMOd20I+v8wDXEn+bjfKDLzTepoaUw== +"@webassemblyjs/helper-wasm-bytecode@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== -"@webassemblyjs/helper-wasm-section@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.9.0.tgz#5a4138d5a6292ba18b04c5ae49717e4167965346" - integrity sha512-XnMB8l3ek4tvrKUUku+IVaXNHz2YsJyOOmz+MMkZvh8h1uSJpSen6vYnw3IoQ7WwEuAhL8Efjms1ZWjqh2agvw== +"@webassemblyjs/helper-wasm-section@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" + integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-buffer" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/wasm-gen" "1.9.0" + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/wasm-gen" "1.11.6" -"@webassemblyjs/ieee754@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.9.0.tgz#15c7a0fbaae83fb26143bbacf6d6df1702ad39e4" - integrity sha512-dcX8JuYU/gvymzIHc9DgxTzUUTLexWwt8uCTWP3otys596io0L5aW02Gb1RjYpx2+0Jus1h4ZFqjla7umFniTg== +"@webassemblyjs/ieee754@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.9.0.tgz#f19ca0b76a6dc55623a09cffa769e838fa1e1c95" - integrity sha512-ENVzM5VwV1ojs9jam6vPys97B/S65YQtv/aanqnU7D8aSoHFX8GyhGg0CMfyKNIHBuAVjy3tlzd5QMMINa7wpw== +"@webassemblyjs/leb128@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.9.0.tgz#04d33b636f78e6a6813227e82402f7637b6229ab" - integrity sha512-GZbQlWtopBTP0u7cHrEx+73yZKrQoBMpwkGEIqlacljhXCkVM1kMQge/Mf+csMJAjEdSwhOyLAS0AoR3AG5P8w== +"@webassemblyjs/utf8@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== -"@webassemblyjs/wasm-edit@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.9.0.tgz#3fe6d79d3f0f922183aa86002c42dd256cfee9cf" - integrity sha512-FgHzBm80uwz5M8WKnMTn6j/sVbqilPdQXTWraSjBwFXSYGirpkSWE2R9Qvz9tNiTKQvoKILpCuTjBKzOIm0nxw== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-buffer" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/helper-wasm-section" "1.9.0" - "@webassemblyjs/wasm-gen" "1.9.0" - "@webassemblyjs/wasm-opt" "1.9.0" - "@webassemblyjs/wasm-parser" "1.9.0" - "@webassemblyjs/wast-printer" "1.9.0" - -"@webassemblyjs/wasm-gen@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.9.0.tgz#50bc70ec68ded8e2763b01a1418bf43491a7a49c" - integrity sha512-cPE3o44YzOOHvlsb4+E9qSqjc9Qf9Na1OO/BHFy4OI91XDE14MjFN4lTMezzaIWdPqHnsTodGGNP+iRSYfGkjA== +"@webassemblyjs/wasm-edit@^1.11.5": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" + integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== + dependencies: + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.11.6" + "@webassemblyjs/wasm-gen" "1.11.6" + "@webassemblyjs/wasm-opt" "1.11.6" + "@webassemblyjs/wasm-parser" "1.11.6" + "@webassemblyjs/wast-printer" "1.11.6" + +"@webassemblyjs/wasm-gen@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" + integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/ieee754" "1.9.0" - "@webassemblyjs/leb128" "1.9.0" - "@webassemblyjs/utf8" "1.9.0" + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" -"@webassemblyjs/wasm-opt@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.9.0.tgz#2211181e5b31326443cc8112eb9f0b9028721a61" - integrity sha512-Qkjgm6Anhm+OMbIL0iokO7meajkzQD71ioelnfPEj6r4eOFuqm4YC3VBPqXjFyyNwowzbMD+hizmprP/Fwkl2A== +"@webassemblyjs/wasm-opt@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" + integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-buffer" "1.9.0" - "@webassemblyjs/wasm-gen" "1.9.0" - "@webassemblyjs/wasm-parser" "1.9.0" + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/wasm-gen" "1.11.6" + "@webassemblyjs/wasm-parser" "1.11.6" -"@webassemblyjs/wasm-parser@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.9.0.tgz#9d48e44826df4a6598294aa6c87469d642fff65e" - integrity sha512-9+wkMowR2AmdSWQzsPEjFU7njh8HTO5MqO8vjwEHuM+AMHioNqSBONRdr0NQQ3dVQrzp0s8lTcYqzUdb7YgELA== +"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" + integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-api-error" "1.9.0" - "@webassemblyjs/helper-wasm-bytecode" "1.9.0" - "@webassemblyjs/ieee754" "1.9.0" - "@webassemblyjs/leb128" "1.9.0" - "@webassemblyjs/utf8" "1.9.0" + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" -"@webassemblyjs/wast-parser@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.9.0.tgz#3031115d79ac5bd261556cecc3fa90a3ef451914" - integrity sha512-qsqSAP3QQ3LyZjNC/0jBJ/ToSxfYJ8kYyuiGvtn/8MK89VrNEfwj7BPQzJVHi0jGTRK2dGdJ5PRqhtjzoww+bw== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/floating-point-hex-parser" "1.9.0" - "@webassemblyjs/helper-api-error" "1.9.0" - "@webassemblyjs/helper-code-frame" "1.9.0" - "@webassemblyjs/helper-fsm" "1.9.0" - "@xtuc/long" "4.2.2" - -"@webassemblyjs/wast-printer@1.9.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.9.0.tgz#4935d54c85fef637b00ce9f52377451d00d47899" - integrity sha512-2J0nE95rHXHyQ24cWjMKJ1tqB/ds8z/cyeOZxJhcb+rW+SQASVjuznUSmdz5GpVJTzU8JkhYut0D3siFDD6wsA== +"@webassemblyjs/wast-printer@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" + integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/wast-parser" "1.9.0" + "@webassemblyjs/ast" "1.11.6" "@xtuc/long" "4.2.2" "@wry/context@^0.6.0": @@ -5672,40 +5221,45 @@ acorn-globals@^6.0.0: acorn "^7.1.1" acorn-walk "^7.1.1" -acorn-jsx@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.1.tgz#fc8661e11b7ac1539c47dbfea2e72b3af34d267b" - integrity sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng== +acorn-import-assertions@^1.9.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" + integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== acorn-walk@^7.1.1: version "7.2.0" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== -acorn@^6.4.1: - version "6.4.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" - integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== -acorn@^7.1.0, acorn@^7.1.1, acorn@^7.4.0: +acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== -acorn@^8.2.4: - version "8.2.4" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.2.4.tgz#caba24b08185c3b56e3168e97d15ed17f4d31fd0" - integrity sha512-Ibt84YwBDDA890eDiDCEqcbwvHlBvzzDkU2cGBBDDI1QWT12jTiXIOn2CIw5KK4i6N5Z2HUxwYjzriDyqaqqZg== +acorn@^8.2.4, acorn@^8.4.1, acorn@^8.7.1, acorn@^8.8.0, acorn@^8.8.2: + version "8.9.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.9.0.tgz#78a16e3b2bcc198c10822786fa6679e245db5b59" + integrity sha512-jaVNAFBHNLXspO543WnNNPZFRtavh3skAkITqD0/2aeMkKZTN+254PyhwxFYrk3vQ1xfY+2wbesJMs/JC8/PwQ== -address@1.1.2, address@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6" - integrity sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA== +address@^1.0.1, address@^1.1.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/address/-/address-1.2.2.tgz#2b5248dac5485a6390532c6a517fda2e3faac89e" + integrity sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA== -adjust-sourcemap-loader@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-3.0.0.tgz#5ae12fb5b7b1c585e80bbb5a63ec163a1a45e61e" - integrity sha512-YBrGyT2/uVQ/c6Rr+t6ZJXniY03YtHGMJQYal368burRGYKqhx9qGTWqcBU5s1CwYY9E/ri63RYyG1IacMZtqw== +adjust-sourcemap-loader@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" + integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== dependencies: loader-utils "^2.0.0" regex-parser "^2.2.11" @@ -5717,25 +5271,26 @@ agent-base@6: dependencies: debug "4" -aggregate-error@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" - integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== +ajv-formats@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== dependencies: - clean-stack "^2.0.0" - indent-string "^4.0.0" - -ajv-errors@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" - integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== + ajv "^8.0.0" -ajv-keywords@^3.1.0, ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: version "3.5.2" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: +ajv-keywords@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -5745,21 +5300,16 @@ ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^8.0.1: - version "8.5.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.5.0.tgz#695528274bcb5afc865446aa275484049a18ae4b" - integrity sha512-Y2l399Tt1AguU3BPRP9Fn4eN+Or+StUGWCUpbnFyXSo8NZ9S4uj+AG2pjs5apK+ZMOwYOz1+a+VKvKH7CudXgQ== +ajv@^8.0.0, ajv@^8.6.0, ajv@^8.9.0: + version "8.12.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" + integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== dependencies: fast-deep-equal "^3.1.1" json-schema-traverse "^1.0.0" require-from-string "^2.0.2" uri-js "^4.2.2" -alphanum-sort@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3" - integrity sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM= - analytics-utils@^1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/analytics-utils/-/analytics-utils-1.0.12.tgz#07bd63471d238e80f42d557fba039365f09c50db" @@ -5776,16 +5326,6 @@ analytics@^0.8.9: "@analytics/core" "^0.12.7" "@analytics/storage-utils" "^0.4.2" -ansi-colors@^3.0.0: - version "3.2.4" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" - integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== - -ansi-colors@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" - integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== - ansi-escapes@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" @@ -5798,10 +5338,10 @@ ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: dependencies: type-fest "^0.21.3" -ansi-html@0.0.7, ansi-html@^0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" - integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== ansi-regex@^2.0.0: version "2.1.1" @@ -5813,22 +5353,22 @@ ansi-regex@^3.0.0: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= -ansi-regex@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" - integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== +ansi-regex@^5.0.0, ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-regex@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" - integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== +ansi-regex@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= -ansi-styles@^3.2.0, ansi-styles@^3.2.1: +ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== @@ -5842,6 +5382,11 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0: dependencies: color-convert "^2.0.1" +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + antd@4.24.7: version "4.24.7" resolved "https://registry.yarnpkg.com/antd/-/antd-4.24.7.tgz#ad90cc2d6225fe3e0030aeccdc64de6c26edc3e7" @@ -5896,32 +5441,29 @@ any-observable@^0.3.0: resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.3.0.tgz#af933475e5806a67d0d7df090dd5e8bef65d119b" integrity sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog== -anymatch@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" - integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== - dependencies: - micromatch "^3.1.4" - normalize-path "^2.1.1" +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== -anymatch@^3.0.0, anymatch@^3.0.3, anymatch@~3.1.1: - version "3.1.2" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" -aproba@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" - integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== - arg@^4.1.0: version "4.1.3" resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== +arg@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -5942,10 +5484,12 @@ aria-query@^4.2.2: "@babel/runtime" "^7.10.2" "@babel/runtime-corejs3" "^7.10.2" -arity-n@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/arity-n/-/arity-n-1.0.4.tgz#d9e76b11733e08569c0847ae7b39b2860b30b745" - integrity sha1-2edrEXM+CFacCEeuezmyhgswt0U= +aria-query@^5.1.3: + version "5.3.0" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.3.0.tgz#650c569e41ad90b51b3d7df5e5eed1c7549c103e" + integrity sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A== + dependencies: + dequal "^2.0.3" arr-diff@^4.0.0: version "4.0.0" @@ -5967,88 +5511,73 @@ array-flatten@1.1.1: resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== -array-flatten@^2.1.0: +array-flatten@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== -array-includes@^3.1.1, array-includes@^3.1.2, array-includes@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.3.tgz#c7f619b382ad2afaf5326cddfdc0afc61af7690a" - integrity sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A== +array-includes@^3.1.5, array-includes@^3.1.6: + version "3.1.6" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.6.tgz#9e9e720e194f198266ba9e18c29e6a9b0e4b225f" + integrity sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw== dependencies: call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" - get-intrinsic "^1.1.1" - is-string "^1.0.5" + define-properties "^1.1.4" + es-abstract "^1.20.4" + get-intrinsic "^1.1.3" + is-string "^1.0.7" array-tree-filter@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/array-tree-filter/-/array-tree-filter-2.1.0.tgz#873ac00fec83749f255ac8dd083814b4f6329190" integrity sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw== -array-union@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" - integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= - dependencies: - array-uniq "^1.0.1" - array-union@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -array-uniq@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" - integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= - array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= -array.prototype.flat@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz#6ef638b43312bd401b4c6199fdec7e2dc9e9a123" - integrity sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg== +array.prototype.flat@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz#ffc6576a7ca3efc2f46a143b9d1dda9b4b3cf5e2" + integrity sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA== dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.1" + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-shim-unscopables "^1.0.0" -array.prototype.flatmap@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.2.4.tgz#94cfd47cc1556ec0747d97f7c7738c58122004c9" - integrity sha512-r9Z0zYoxqHz60vvQbWEdXIEtCwHF0yxaWfno9qzXeNHvfyl3BZqygmGzb84dsubyaXLH4husF+NFgMSdpZhk2Q== +array.prototype.flatmap@^1.2.4, array.prototype.flatmap@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz#1aae7903c2100433cb8261cd4ed310aab5c4a183" + integrity sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ== dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.1" - function-bind "^1.1.1" + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-shim-unscopables "^1.0.0" -arrify@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" - integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== +array.prototype.tosorted@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz#ccf44738aa2b5ac56578ffda97c03fd3e23dd532" + integrity sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-shim-unscopables "^1.0.0" + get-intrinsic "^1.1.3" asap@~2.0.3, asap@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= -asn1.js@^5.2.0: - version "5.4.1" - resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07" - integrity sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA== - dependencies: - bn.js "^4.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - safer-buffer "^2.1.0" - asn1@~0.2.3: version "0.2.4" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" @@ -6061,14 +5590,6 @@ assert-plus@1.0.0, assert-plus@^1.0.0: resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= -assert@^1.1.1: - version "1.5.0" - resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" - integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== - dependencies: - object-assign "^4.1.1" - util "0.10.3" - assign-symbols@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" @@ -6079,16 +5600,6 @@ ast-types-flow@^0.0.7: resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= -astral-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" - integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== - -async-each@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" - integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== - async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" @@ -6099,17 +5610,10 @@ async-validator@^4.1.0: resolved "https://registry.yarnpkg.com/async-validator/-/async-validator-4.2.5.tgz#c96ea3332a521699d0afaaceed510a54656c6339" integrity sha512-7HhHjtERjqlNbZtqNqy2rckN/SpOOlmDliet+lP7k+eKZEjPk3DgyeU9lIXLdeLz0uBbbVp+9Qdow9wJWgwwfg== -async@0.9.x: - version "0.9.2" - resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" - integrity sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0= - -async@^2.6.2: - version "2.6.3" - resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" - integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== - dependencies: - lodash "^4.17.14" +async@^3.2.3: + version "3.2.4" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== asynckit@^0.4.0: version "0.4.0" @@ -6131,18 +5635,17 @@ auto-bind@~4.0.0: resolved "https://registry.yarnpkg.com/auto-bind/-/auto-bind-4.0.0.tgz#e3589fc6c2da8f7ca43ba9f84fa52a744fc997fb" integrity sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ== -autoprefixer@^9.6.1: - version "9.8.6" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.8.6.tgz#3b73594ca1bf9266320c5acf1588d74dea74210f" - integrity sha512-XrvP4VVHdRBCdX1S3WXVD8+RyG9qeb1D5Sn1DeLiG2xfSpzellk5k54xbUERJ3M5DggQxes39UGOTP8CFrEGbg== +autoprefixer@^10.4.12, autoprefixer@^10.4.13: + version "10.4.14" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.14.tgz#e28d49902f8e759dd25b153264e862df2705f79d" + integrity sha512-FQzyfOsTlwVzjHxKEqRIAdJx9niO6VCBCoEwax/VLSoQF29ggECcPuBqUMZ+u8jCZOPSy8b8/8KnuFbp0SaFZQ== dependencies: - browserslist "^4.12.0" - caniuse-lite "^1.0.30001109" - colorette "^1.2.1" + browserslist "^4.21.5" + caniuse-lite "^1.0.30001464" + fraction.js "^4.2.0" normalize-range "^0.1.2" - num2fraction "^1.2.2" - postcss "^7.0.32" - postcss-value-parser "^4.1.0" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" aws-sign2@~0.7.0: version "0.7.0" @@ -6154,10 +5657,10 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== -axe-core@^4.0.2: - version "4.2.1" - resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.2.1.tgz#2e50bcf10ee5b819014f6e342e41e45096239e34" - integrity sha512-evY7DN8qSIbsW2H/TWQ1bX3sXN1d4MNb5Vb4n7BzPuCwRHdkZ1H2eNLuSh73EoQqkGKUtju2G2HCcjCfhvZIAA== +axe-core@^4.6.2: + version "4.7.2" + resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" + integrity sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g== axios@^0.21.1: version "0.21.4" @@ -6166,42 +5669,25 @@ axios@^0.21.1: dependencies: follow-redirects "^1.14.0" -axobject-query@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" - integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== - -babel-eslint@^10.1.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-10.1.0.tgz#6968e568a910b78fb3779cdd8b6ac2f479943232" - integrity sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg== - dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/parser" "^7.7.0" - "@babel/traverse" "^7.7.0" - "@babel/types" "^7.7.0" - eslint-visitor-keys "^1.0.0" - resolve "^1.12.0" - -babel-extract-comments@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/babel-extract-comments/-/babel-extract-comments-1.0.0.tgz#0a2aedf81417ed391b85e18b4614e693a0351a21" - integrity sha512-qWWzi4TlddohA91bFwgt6zO/J0X+io7Qp184Fw0m2JYRSTZnJbFR8+07KmzudHCZgOiKRCrjhylwv9Xd8gfhVQ== +axobject-query@^3.1.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-3.2.1.tgz#39c378a6e3b06ca679f29138151e45b2b32da62a" + integrity sha512-jsyHu61e6N4Vbz/v18DHwWYKK0bSWLqn47eeDSKPB7m8tqMHF9YJ+mhIk2lVteyZrY8tnSj/jHOv4YiTCuCJgg== dependencies: - babylon "^6.18.0" + dequal "^2.0.3" -babel-jest@^26.6.0, babel-jest@^26.6.3: - version "26.6.3" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056" - integrity sha512-pl4Q+GAVOHwvjrck6jKjvmGhnO3jHX/xuB9d27f+EJZ/6k+6nMuPjorrYp7s++bKKdANwzElBWnLWaObvTnaZA== +babel-jest@^27.4.2, babel-jest@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" + integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== dependencies: - "@jest/transform" "^26.6.2" - "@jest/types" "^26.6.2" - "@types/babel__core" "^7.1.7" - babel-plugin-istanbul "^6.0.0" - babel-preset-jest "^26.6.2" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^27.5.1" chalk "^4.0.0" - graceful-fs "^4.2.4" + graceful-fs "^4.2.9" slash "^3.0.0" babel-literal-to-ast@^2.1.0: @@ -6213,75 +5699,45 @@ babel-literal-to-ast@^2.1.0: "@babel/traverse" "^7.1.6" "@babel/types" "^7.1.6" -babel-loader@8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.1.0.tgz#c611d5112bd5209abe8b9fa84c3e4da25275f1c3" - integrity sha512-7q7nC1tYOrqvUrN3LQK4GwSk/TQorZSOlO9C+RZDZpODgyN4ZlCqE5q9cDsyWOliN+aU9B4JX01xK9eJXowJLw== - dependencies: - find-cache-dir "^2.1.0" - loader-utils "^1.4.0" - mkdirp "^0.5.3" - pify "^4.0.1" - schema-utils "^2.6.5" - -babel-loader@8.2.2: - version "8.2.2" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.2.2.tgz#9363ce84c10c9a40e6c753748e1441b60c8a0b81" - integrity sha512-JvTd0/D889PQBtUXJ2PXaKU/pjZDMtHA9V2ecm+eNRmmBCMR09a+fmpGTNwnJtFmFl5Ei7Vy47LjBb+L0wQ99g== +babel-loader@^8.2.3: + version "8.3.0" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.3.0.tgz#124936e841ba4fe8176786d6ff28add1f134d6a8" + integrity sha512-H8SvsMF+m9t15HNLMipppzkC+Y2Yq+v3SonZyU70RBL/h1gxPkH08Ot8pEE9Z4Kd+czyWJClmFS8qzIP9OZ04Q== dependencies: find-cache-dir "^3.3.1" - loader-utils "^1.4.0" + loader-utils "^2.0.0" make-dir "^3.1.0" schema-utils "^2.6.5" -babel-plugin-dynamic-import-node@^2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" - integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== - dependencies: - object.assign "^4.1.0" - -babel-plugin-import@^1.13.1: - version "1.13.3" - resolved "https://registry.yarnpkg.com/babel-plugin-import/-/babel-plugin-import-1.13.3.tgz#9dbbba7d1ac72bd412917a830d445e00941d26d7" - integrity sha512-1qCWdljJOrDRH/ybaCZuDgySii4yYrtQ8OJQwrcDqdt0y67N30ng3X3nABg6j7gR7qUJgcMa9OMhc4AGViDwWw== +babel-plugin-import@1.13.5: + version "1.13.5" + resolved "https://registry.yarnpkg.com/babel-plugin-import/-/babel-plugin-import-1.13.5.tgz#42eed1c5afd9a35ee1b1f8fe922b07c44077d753" + integrity sha512-IkqnoV+ov1hdJVofly9pXRJmeDm9EtROfrc5i6eII0Hix2xMs5FEm8FG3ExMvazbnZBbgHIt6qdO8And6lCloQ== dependencies: "@babel/helper-module-imports" "^7.0.0" - "@babel/runtime" "^7.0.0" -babel-plugin-istanbul@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.0.0.tgz#e159ccdc9af95e0b570c75b4573b7c34d671d765" - integrity sha512-AF55rZXpe7trmEylbaE1Gv54wn6rwU03aptvRoVIGP8YykoSxqdVLV1TfwflBCE/QtHmqtP8SWlTENqbK8GCSQ== +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@istanbuljs/load-nyc-config" "^1.0.0" "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^4.0.0" + istanbul-lib-instrument "^5.0.4" test-exclude "^6.0.0" -babel-plugin-jest-hoist@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-26.6.2.tgz#8185bd030348d254c6d7dd974355e6a28b21e62d" - integrity sha512-PO9t0697lNTmcEHH69mdtYiOIkkOlj9fySqfO3K1eCcdISevLAE0xY59VLLUj0SoiPiTX/JU2CYFpILydUa5Lw== +babel-plugin-jest-hoist@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" + integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== dependencies: "@babel/template" "^7.3.3" "@babel/types" "^7.3.3" "@types/babel__core" "^7.0.0" "@types/babel__traverse" "^7.0.6" -babel-plugin-lodash@^3.3.2: - version "3.3.4" - resolved "https://registry.yarnpkg.com/babel-plugin-lodash/-/babel-plugin-lodash-3.3.4.tgz#4f6844358a1340baed182adbeffa8df9967bc196" - integrity sha512-yDZLjK7TCkWl1gpBeBGmuaDIFhZKmkoL+Cu2MUUjv5VxUZx/z7tBGBCBcQs5RI1Bkz5LLmNdjx7paOyQtMovyg== - dependencies: - "@babel/helper-module-imports" "^7.0.0-beta.49" - "@babel/types" "^7.0.0-beta.49" - glob "^7.1.1" - lodash "^4.17.10" - require-package-name "^2.0.1" - -babel-plugin-macros@2.8.0, babel-plugin-macros@^2.5.0: +babel-plugin-macros@^2.5.0: version "2.8.0" resolved "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz#0f958a7cc6556b1e65344465d99111a1e5e10138" integrity sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg== @@ -6299,34 +5755,34 @@ babel-plugin-macros@^3.1.0: cosmiconfig "^7.0.0" resolve "^1.19.0" -babel-plugin-named-asset-import@^0.3.7: - version "0.3.7" - resolved "https://registry.yarnpkg.com/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.7.tgz#156cd55d3f1228a5765774340937afc8398067dd" - integrity sha512-squySRkf+6JGnvjoUtDEjSREJEBirnXi9NqP6rjSYsylxQxqBTz+pkmf395i9E2zsvmYUaI40BHo6SqZUdydlw== +babel-plugin-named-asset-import@^0.3.8: + version "0.3.8" + resolved "https://registry.yarnpkg.com/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" + integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== -babel-plugin-polyfill-corejs2@^0.2.0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.2.2.tgz#e9124785e6fd94f94b618a7954e5693053bf5327" - integrity sha512-kISrENsJ0z5dNPq5eRvcctITNHYXWOA4DUZRFYCz3jYCcvTb/A546LIddmoGNMVYg2U38OyFeNosQwI9ENTqIQ== +babel-plugin-polyfill-corejs2@^0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.3.tgz#75044d90ba5043a5fb559ac98496f62f3eb668fd" + integrity sha512-bM3gHc337Dta490gg+/AseNB9L4YLHxq1nGKZZSHbhXv4aTYU2MD2cjza1Ru4S6975YLTaL1K8uJf6ukJhhmtw== dependencies: - "@babel/compat-data" "^7.13.11" - "@babel/helper-define-polyfill-provider" "^0.2.2" + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.4.0" semver "^6.1.1" -babel-plugin-polyfill-corejs3@^0.2.0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.2.2.tgz#7424a1682ee44baec817327710b1b094e5f8f7f5" - integrity sha512-l1Cf8PKk12eEk5QP/NQ6TH8A1pee6wWDJ96WjxrMXFLHLOBFzYM4moG80HFgduVhTqAFez4alnZKEhP/bYHg0A== +babel-plugin-polyfill-corejs3@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.8.1.tgz#39248263c38191f0d226f928d666e6db1b4b3a8a" + integrity sha512-ikFrZITKg1xH6pLND8zT14UPgjKHiGLqex7rGEZCH2EvhsneJaJPemmpQaIZV5AL03II+lXylw3UmddDK8RU5Q== dependencies: - "@babel/helper-define-polyfill-provider" "^0.2.2" - core-js-compat "^3.9.1" + "@babel/helper-define-polyfill-provider" "^0.4.0" + core-js-compat "^3.30.1" -babel-plugin-polyfill-regenerator@^0.2.0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.2.2.tgz#b310c8d642acada348c1fa3b3e6ce0e851bee077" - integrity sha512-Goy5ghsc21HgPDFtzRkSirpZVW35meGoTmTOb2bxqdl60ghub4xOidgNTHaZfQ2FaxQsKmwvXtOAkcIS4SMBWg== +babel-plugin-polyfill-regenerator@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.5.0.tgz#e7344d88d9ef18a3c47ded99362ae4a757609380" + integrity sha512-hDJtKjMLVa7Z+LwnTCxoDLQj6wdc+B8dun7ayF2fYieI6OzfuvcLMB32ihJZ4UhCBwNYGl5bg/x/P9cMdnkc2g== dependencies: - "@babel/helper-define-polyfill-provider" "^0.2.2" + "@babel/helper-define-polyfill-provider" "^0.4.0" "babel-plugin-styled-components@>= 1.12.0": version "1.12.0" @@ -6343,25 +5799,12 @@ babel-plugin-syntax-jsx@^6.18.0: resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946" integrity sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY= -babel-plugin-syntax-object-rest-spread@^6.8.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5" - integrity sha1-/WU28rzhODb/o6VFjEkDpZe7O/U= - babel-plugin-syntax-trailing-function-commas@^7.0.0-beta.0: version "7.0.0-beta.0" resolved "https://registry.yarnpkg.com/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-7.0.0-beta.0.tgz#aa213c1435e2bffeb6fca842287ef534ad05d5cf" integrity sha512-Xj9XuRuz3nTSbaTXWv3itLOcxyF4oPD8douBBmj7U9BBC6nEBYfyOJYQMf/8PJAFotC62UY5dFfIGEPr7WswzQ== -babel-plugin-transform-object-rest-spread@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz#0f36692d50fef6b7e2d4b3ac1478137a963b7b06" - integrity sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY= - dependencies: - babel-plugin-syntax-object-rest-spread "^6.8.0" - babel-runtime "^6.26.0" - -babel-plugin-transform-react-remove-prop-types@0.4.24: +babel-plugin-transform-react-remove-prop-types@^0.4.24: version "0.4.24" resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== @@ -6417,47 +5860,35 @@ babel-preset-fbjs@^3.4.0: "@babel/plugin-transform-template-literals" "^7.0.0" babel-plugin-syntax-trailing-function-commas "^7.0.0-beta.0" -babel-preset-jest@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-26.6.2.tgz#747872b1171df032252426586881d62d31798fee" - integrity sha512-YvdtlVm9t3k777c5NPQIv6cxFFFapys25HiUmuSgHwIZhfifweR5c5Sf5nwE3MAbfu327CYSvps8Yx6ANLyleQ== +babel-preset-jest@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" + integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== dependencies: - babel-plugin-jest-hoist "^26.6.2" + babel-plugin-jest-hoist "^27.5.1" babel-preset-current-node-syntax "^1.0.0" -babel-preset-react-app@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/babel-preset-react-app/-/babel-preset-react-app-10.0.0.tgz#689b60edc705f8a70ce87f47ab0e560a317d7045" - integrity sha512-itL2z8v16khpuKutx5IH8UdCdSTuzrOhRFTEdIhveZ2i1iBKDrVE0ATa4sFVy+02GLucZNVBWtoarXBy0Msdpg== - dependencies: - "@babel/core" "7.12.3" - "@babel/plugin-proposal-class-properties" "7.12.1" - "@babel/plugin-proposal-decorators" "7.12.1" - "@babel/plugin-proposal-nullish-coalescing-operator" "7.12.1" - "@babel/plugin-proposal-numeric-separator" "7.12.1" - "@babel/plugin-proposal-optional-chaining" "7.12.1" - "@babel/plugin-transform-flow-strip-types" "7.12.1" - "@babel/plugin-transform-react-display-name" "7.12.1" - "@babel/plugin-transform-runtime" "7.12.1" - "@babel/preset-env" "7.12.1" - "@babel/preset-react" "7.12.1" - "@babel/preset-typescript" "7.12.1" - "@babel/runtime" "7.12.1" - babel-plugin-macros "2.8.0" - babel-plugin-transform-react-remove-prop-types "0.4.24" - -babel-runtime@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" - integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= - dependencies: - core-js "^2.4.0" - regenerator-runtime "^0.11.0" - -babylon@^6.18.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" - integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== +babel-preset-react-app@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" + integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== + dependencies: + "@babel/core" "^7.16.0" + "@babel/plugin-proposal-class-properties" "^7.16.0" + "@babel/plugin-proposal-decorators" "^7.16.4" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" + "@babel/plugin-proposal-numeric-separator" "^7.16.0" + "@babel/plugin-proposal-optional-chaining" "^7.16.0" + "@babel/plugin-proposal-private-methods" "^7.16.0" + "@babel/plugin-transform-flow-strip-types" "^7.16.0" + "@babel/plugin-transform-react-display-name" "^7.16.0" + "@babel/plugin-transform-runtime" "^7.16.4" + "@babel/preset-env" "^7.16.4" + "@babel/preset-react" "^7.16.0" + "@babel/preset-typescript" "^7.16.0" + "@babel/runtime" "^7.16.3" + babel-plugin-macros "^3.1.0" + babel-plugin-transform-react-remove-prop-types "^0.4.24" backo2@^1.0.2: version "1.0.2" @@ -6479,7 +5910,7 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base64-js@^1.0.2, base64-js@^1.3.1: +base64-js@^1.3.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== @@ -6524,23 +5955,11 @@ big.js@^5.2.2: resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== -binary-extensions@^1.0.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" - integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== - binary-extensions@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== -bindings@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" - integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== - dependencies: - file-uri-to-path "1.0.0" - blacklist@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/blacklist/-/blacklist-1.1.4.tgz#b2dd09d6177625b2caa69835a37b28995fa9a2f2" @@ -6551,16 +5970,6 @@ bluebird@3.7.2, bluebird@^3.5.5: resolved "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== -bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9: - version "4.12.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" - integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== - -bn.js@^5.0.0, bn.js@^5.1.1: - version "5.2.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.0.tgz#358860674396c6997771a9d051fcc1b57d4ae002" - integrity sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw== - body-parser@1.20.1: version "1.20.1" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" @@ -6579,17 +5988,15 @@ body-parser@1.20.1: type-is "~1.6.18" unpipe "1.0.0" -bonjour@^3.5.0: - version "3.5.0" - resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" - integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= +bonjour-service@^1.0.11: + version "1.1.1" + resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.1.1.tgz#960948fa0e0153f5d26743ab15baf8e33752c135" + integrity sha512-Z/5lQRMOG9k7W+FkeGTNjh7htqn/2LMnfOvBZ8pynNZCM9MwkQkI3zeI4oz09uWdcgmgHugVvBqxGg4VQJ5PCg== dependencies: - array-flatten "^2.1.0" - deep-equal "^1.0.1" + array-flatten "^2.1.2" dns-equal "^1.0.0" - dns-txt "^2.0.2" - multicast-dns "^6.0.1" - multicast-dns-service-types "^1.1.0" + fast-deep-equal "^3.1.3" + multicast-dns "^7.2.5" boolbase@^1.0.0, boolbase@~1.0.0: version "1.0.0" @@ -6604,130 +6011,50 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -braces@^2.3.1, braces@^2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== - dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" - -braces@^3.0.1, braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -brorand@^1.0.1, brorand@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" - integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= - -browser-process-hrtime@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" - integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== - -browserify-aes@^1.0.0, browserify-aes@^1.0.4: - version "1.2.0" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" - integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== - dependencies: - buffer-xor "^1.0.3" - cipher-base "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.3" - inherits "^2.0.1" - safe-buffer "^5.0.1" - -browserify-cipher@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" - integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== - dependencies: - browserify-aes "^1.0.4" - browserify-des "^1.0.0" - evp_bytestokey "^1.0.0" - -browserify-des@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" - integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== - dependencies: - cipher-base "^1.0.1" - des.js "^1.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - -browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: - version "4.1.0" - resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.1.0.tgz#b2fd06b5b75ae297f7ce2dc651f918f5be158c8d" - integrity sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog== - dependencies: - bn.js "^5.0.0" - randombytes "^2.0.1" - -browserify-sign@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.1.tgz#eaf4add46dd54be3bb3b36c0cf15abbeba7956c3" - integrity sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg== - dependencies: - bn.js "^5.1.1" - browserify-rsa "^4.0.1" - create-hash "^1.2.0" - create-hmac "^1.1.7" - elliptic "^6.5.3" - inherits "^2.0.4" - parse-asn1 "^5.1.5" - readable-stream "^3.6.0" - safe-buffer "^5.2.0" - -browserify-zlib@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" - integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== dependencies: - pako "~1.0.5" + balanced-match "^1.0.0" -browserslist@4.14.2: - version "4.14.2" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.14.2.tgz#1b3cec458a1ba87588cc5e9be62f19b6d48813ce" - integrity sha512-HI4lPveGKUR0x2StIz+2FXfDk9SfVMrxn6PLh1JeGUwcuoDkdKZebWiyLRJ68iIPDpMI4JLVDf7S7XzslgWOhw== +braces@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: - caniuse-lite "^1.0.30001125" - electron-to-chromium "^1.3.564" - escalade "^3.0.2" - node-releases "^1.1.61" + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" -browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.6.2, browserslist@^4.6.4: - version "4.16.6" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" - integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: - caniuse-lite "^1.0.30001219" - colorette "^1.2.2" - electron-to-chromium "^1.3.723" - escalade "^3.1.1" - node-releases "^1.1.71" + fill-range "^7.0.1" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@^4.21.3: - version "4.21.4" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" - integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5: + version "4.21.9" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.9.tgz#e11bdd3c313d7e2a9e87e8b4b0c7872b13897635" + integrity sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg== dependencies: - caniuse-lite "^1.0.30001400" - electron-to-chromium "^1.4.251" - node-releases "^2.0.6" - update-browserslist-db "^1.0.9" + caniuse-lite "^1.0.30001503" + electron-to-chromium "^1.4.431" + node-releases "^2.0.12" + update-browserslist-db "^1.0.11" bser@2.1.1: version "2.1.1" @@ -6751,25 +6078,6 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== -buffer-indexof@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" - integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== - -buffer-xor@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" - integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= - -buffer@^4.3.0: - version "4.9.2" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" - integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== - dependencies: - base64-js "^1.0.2" - ieee754 "^1.1.4" - isarray "^1.0.0" - buffer@^5.7.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" @@ -6783,11 +6091,6 @@ builtin-modules@^3.1.0: resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== -builtin-status-codes@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" - integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= - bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" @@ -6798,50 +6101,6 @@ bytes@3.1.2: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== -cacache@^12.0.2: - version "12.0.4" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.4.tgz#668bcbd105aeb5f1d92fe25570ec9525c8faa40c" - integrity sha512-a0tMB40oefvuInr4Cwb3GerbL9xTj1D5yg0T5xrjGCGyfvbxseIXX7BAO/u/hIXdafzOI5JC3wDwHyf24buOAQ== - dependencies: - bluebird "^3.5.5" - chownr "^1.1.1" - figgy-pudding "^3.5.1" - glob "^7.1.4" - graceful-fs "^4.1.15" - infer-owner "^1.0.3" - lru-cache "^5.1.1" - mississippi "^3.0.0" - mkdirp "^0.5.1" - move-concurrently "^1.0.1" - promise-inflight "^1.0.1" - rimraf "^2.6.3" - ssri "^6.0.1" - unique-filename "^1.1.1" - y18n "^4.0.0" - -cacache@^15.0.5: - version "15.2.0" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-15.2.0.tgz#73af75f77c58e72d8c630a7a2858cb18ef523389" - integrity sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw== - dependencies: - "@npmcli/move-file" "^1.0.1" - chownr "^2.0.0" - fs-minipass "^2.0.0" - glob "^7.1.4" - infer-owner "^1.0.4" - lru-cache "^6.0.0" - minipass "^3.1.1" - minipass-collect "^1.0.2" - minipass-flush "^1.0.5" - minipass-pipeline "^1.2.2" - mkdirp "^1.0.3" - p-map "^4.0.0" - promise-inflight "^1.0.1" - rimraf "^3.0.2" - ssri "^8.0.1" - tar "^6.0.2" - unique-filename "^1.1.1" - cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" @@ -6878,25 +6137,6 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" -caller-callsite@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" - integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= - dependencies: - callsites "^2.0.0" - -caller-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" - integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= - dependencies: - caller-callsite "^2.0.0" - -callsites@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" - integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= - callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" @@ -6918,15 +6158,20 @@ camel-case@4.1.2, camel-case@^4.1.1, camel-case@^4.1.2: pascal-case "^3.1.2" tslib "^2.0.3" -camelcase@5.3.1, camelcase@^5.0.0, camelcase@^5.3.1: +camelcase-css@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +camelcase@^5.0.0, camelcase@^5.3.1: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -camelcase@^6.0.0, camelcase@^6.1.0, camelcase@^6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.0.tgz#924af881c9d525ac9d87f40d964e5cea982a1809" - integrity sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg== +camelcase@^6.2.0, camelcase@^6.2.1: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== camelize@^1.0.0: version "1.0.0" @@ -6943,10 +6188,10 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000981, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001125, caniuse-lite@^1.0.30001219, caniuse-lite@^1.0.30001400: - version "1.0.30001518" - resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001518.tgz" - integrity sha512-rup09/e3I0BKjncL+FesTayKtPrdwKhUufQFd3riFw1hHg8JmIFoInYfB102cFcY/pPgGmdyl/iy+jgiDi2vdA== +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001503: + version "1.0.30001508" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001508.tgz#4461bbc895c692a96da399639cc1e146e7302a33" + integrity sha512-sdQZOJdmt3GJs1UMNpCCCyeuS2IEGLXnHyAo9yIO5JJDjbjoVRij4M1qep6P6gFpptD1PqIYgzM+gwJbOi92mw== capital-case@^1.0.4: version "1.0.4" @@ -6957,22 +6202,15 @@ capital-case@^1.0.4: tslib "^2.0.3" upper-case-first "^2.0.2" -capture-exit@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/capture-exit/-/capture-exit-2.0.0.tgz#fb953bfaebeb781f62898239dabb426d08a509a4" - integrity sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g== - dependencies: - rsvp "^4.8.4" - case-anything@^2.1.10: version "2.1.10" resolved "https://registry.yarnpkg.com/case-anything/-/case-anything-2.1.10.tgz#d18a6ca968d54ec3421df71e3e190f3bced23410" integrity sha512-JczJwVrCP0jPKh05McyVsuOg6AYosrB9XWZKbQzXeDAm2ClE/PJE/BcrrQrVyGYH7Jg8V/LDupmyL4kFlVsVFQ== -case-sensitive-paths-webpack-plugin@2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.3.0.tgz#23ac613cc9a856e4f88ff8bb73bbb5e989825cf7" - integrity sha512-/4YgnZS8y1UXXmC02xD5rRrBEu6T5ub+mQHLNRj0fzTRbgdBYhsNo2V5EqwgqrExjxsjtF/OpAKAMkKsxbD5XQ== +case-sensitive-paths-webpack-plugin@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" + integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== caseless@~0.12.0: version "0.12.0" @@ -6984,15 +6222,6 @@ ccount@^1.0.0: resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== -chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - chalk@^1.0.0, chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" @@ -7004,6 +6233,15 @@ chalk@^1.0.0, chalk@^1.1.3: strip-ansi "^3.0.0" supports-color "^2.0.0" +chalk@^2.0.0, chalk@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + chalk@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" @@ -7012,10 +6250,10 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" - integrity sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg== +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== dependencies: ansi-styles "^4.1.0" supports-color "^7.1.0" @@ -7075,6 +6313,11 @@ char-regex@^1.0.2: resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== +char-regex@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" + integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== + character-entities-html4@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-1.1.4.tgz#0e64b0a3753ddbf1fdc044c5fd01d0199a02e125" @@ -7110,72 +6353,35 @@ check-types@^11.1.1: resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== -chokidar@^2.1.8: - version "2.1.8" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" - integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== - dependencies: - anymatch "^2.0.0" - async-each "^1.0.1" - braces "^2.3.2" - glob-parent "^3.1.0" - inherits "^2.0.3" - is-binary-path "^1.0.0" - is-glob "^4.0.0" - normalize-path "^3.0.0" - path-is-absolute "^1.0.0" - readdirp "^2.2.1" - upath "^1.1.1" - optionalDependencies: - fsevents "^1.2.7" - -chokidar@^3.4.1, chokidar@^3.4.3: - version "3.5.1" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a" - integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw== +chokidar@^3.4.2, chokidar@^3.4.3, chokidar@^3.5.3: + version "3.5.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== dependencies: - anymatch "~3.1.1" + anymatch "~3.1.2" braces "~3.0.2" - glob-parent "~5.1.0" + glob-parent "~5.1.2" is-binary-path "~2.1.0" is-glob "~4.0.1" normalize-path "~3.0.0" - readdirp "~3.5.0" + readdirp "~3.6.0" optionalDependencies: - fsevents "~2.3.1" - -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - -chownr@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" - integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== + fsevents "~2.3.2" chrome-trace-event@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== -ci-info@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" - integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== - -cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" - integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" +ci-info@^3.2.0: + version "3.8.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" + integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== -cjs-module-lexer@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-0.6.0.tgz#4186fcca0eae175970aee870b9fe2d6cf8d5655f" - integrity sha512-uc2Vix1frTfnuzxxu1Hp4ktSvM3QaI4oXl4ZUqL1wjTu/BGki9TrCWoqLTg/drR1KwAEarXuRFCG2Svr1GxPFw== +cjs-module-lexer@^1.0.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" + integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== class-utils@^0.3.5: version "0.3.6" @@ -7187,28 +6393,23 @@ class-utils@^0.3.5: isobject "^3.0.0" static-extend "^0.1.1" -classnames@*, classnames@2.3.1, classnames@2.x, classnames@^2.2.1, classnames@^2.2.3, classnames@^2.2.5, classnames@^2.2.6: +classnames@2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" integrity sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA== -classnames@^2.3.1, classnames@^2.3.2: +classnames@2.x, classnames@^2.2.1, classnames@^2.2.3, classnames@^2.2.5, classnames@^2.2.6, classnames@^2.3.1, classnames@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.2.tgz#351d813bf0137fcc6a76a16b88208d2560a0d924" integrity sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw== -clean-css@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.3.tgz#507b5de7d97b48ee53d84adb0160ff6216380f78" - integrity sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA== +clean-css@^5.2.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.2.tgz#70ecc7d4d4114921f5d298349ff86a31a9975224" + integrity sha512-JVJbM+f3d3Q704rF4bqQ5UUyTtuJ0JRKNbTKVEeujCCBoMdkEi+V+e8oktO9qGQNSvHrFTM6JZRXrUvGR1czww== dependencies: source-map "~0.6.0" -clean-stack@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" - integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== - cli-cursor@^2.0.0, cli-cursor@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" @@ -7245,15 +6446,6 @@ clipboard@^2.0.0: select "^1.1.2" tiny-emitter "^2.0.0" -cliui@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" - integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== - dependencies: - string-width "^3.1.0" - strip-ansi "^5.2.0" - wrap-ansi "^5.1.0" - cliui@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" @@ -7272,6 +6464,15 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +clone-deep@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + clone-response@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" @@ -7279,11 +6480,6 @@ clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" -clone@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" - integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= - clsx@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" @@ -7326,7 +6522,7 @@ collection-visit@^1.0.0: map-visit "^1.0.0" object-visit "^1.0.0" -color-convert@^1.9.0, color-convert@^1.9.1: +color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== @@ -7350,36 +6546,25 @@ color-name@1.1.3: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= -color-name@^1.0.0, color-name@~1.1.4: +color-name@~1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -color-string@^1.5.4: - version "1.5.5" - resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.5.tgz#65474a8f0e7439625f3d27a6a19d89fc45223014" - integrity sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg== - dependencies: - color-name "^1.0.0" - simple-swizzle "^0.2.2" - color2k@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/color2k/-/color2k-2.0.0.tgz#86992c82e248c29f524023ed0822bc152c4fa670" integrity sha512-DWX9eXOC4fbJNiuvdH4QSHvvfLWyFo9TuFp7V9OzdsbPAdrWAuYc8qvFP2bIQ/LKh4LrAVnJ6vhiQYPvAHdtTg== -color@^3.0.0: - version "3.1.3" - resolved "https://registry.yarnpkg.com/color/-/color-3.1.3.tgz#ca67fb4e7b97d611dcde39eceed422067d91596e" - integrity sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ== - dependencies: - color-convert "^1.9.1" - color-string "^1.5.4" +colord@^2.9.1: + version "2.9.3" + resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" + integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== -colorette@^1.2.1, colorette@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" - integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== +colorette@^2.0.10: + version "2.0.20" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" + integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: version "1.0.8" @@ -7398,11 +6583,26 @@ commander@^2.20.0: resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== -commander@^4.1.1: +commander@^4.0.0: version "4.1.1" resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== +commander@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +common-path-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== + common-tags@1.8.0, common-tags@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" @@ -7423,13 +6623,6 @@ component-emitter@^1.2.1: resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== -compose-function@3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/compose-function/-/compose-function-3.0.3.tgz#9ed675f13cc54501d30950a486ff6a7ba3ab185f" - integrity sha1-ntZ18TzFRQHTCVCkhv9qe6OrGF8= - dependencies: - arity-n "^1.0.4" - compressible@~2.0.16: version "2.0.18" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" @@ -7460,30 +6653,15 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-stream@^1.5.0: - version "1.6.2" - resolved "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" - integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== - dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - -confusing-browser-globals@^1.0.10: - version "1.0.10" - resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.10.tgz#30d1e7f3d1b882b25ec4933d1d1adac353d20a59" - integrity sha512-gNld/3lySHwuhaVluJUKLePYirM3QNCKzVxqAdhJII9/WXKVX5PURzMVJspS1jTslSqjeuG4KMVTSouit5YPHA== - -connect-history-api-fallback@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" - integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== +confusing-browser-globals@^1.0.10, confusing-browser-globals@^1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== -console-browserify@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" - integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== +connect-history-api-fallback@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" + integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== constant-case@^3.0.3, constant-case@^3.0.4: version "3.0.4" @@ -7494,11 +6672,6 @@ constant-case@^3.0.3, constant-case@^3.0.4: tslib "^2.0.3" upper-case "^2.0.2" -constants-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" - integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= - content-disposition@0.5.4: version "0.5.4" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" @@ -7511,19 +6684,7 @@ content-type@~1.0.4: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== -convert-source-map@1.7.0, convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" - integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== - dependencies: - safe-buffer "~5.1.1" - -convert-source-map@^0.3.3: - version "0.3.5" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-0.3.5.tgz#f1d802950af7dd2631a1febe0596550c86ab3190" - integrity sha1-8dgClQr33SYxof6+BZZVDIarMZA= - -convert-source-map@^1.5.0: +convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: version "1.9.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== @@ -7545,76 +6706,46 @@ copy-anything@^2.0.1: dependencies: is-what "^3.12.0" -copy-concurrently@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" - integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== - dependencies: - aproba "^1.1.1" - fs-write-stream-atomic "^1.0.8" - iferr "^0.1.5" - mkdirp "^0.5.1" - rimraf "^2.5.4" - run-queue "^1.0.0" - copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= -copy-to-clipboard@^3.2.0: - version "3.3.1" - resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz#115aa1a9998ffab6196f93076ad6da3b913662ae" - integrity sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw== - dependencies: - toggle-selection "^1.0.6" - -copy-to-clipboard@^3.3.1: +copy-to-clipboard@^3.2.0, copy-to-clipboard@^3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz#55ac43a1db8ae639a4bd99511c148cdd1b83a1b0" integrity sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA== dependencies: toggle-selection "^1.0.6" -copy-webpack-plugin@6.4.1: - version "6.4.1" - resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-6.4.1.tgz#138cd9b436dbca0a6d071720d5414848992ec47e" - integrity sha512-MXyPCjdPVx5iiWyl40Va3JGh27bKzOTNY3NjUTrosD2q7dR/cLD0013uqJ3BpFbUjyONINjb6qI7nDIJujrMbA== +copy-webpack-plugin@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz#96d4dbdb5f73d02dd72d0528d1958721ab72e04a" + integrity sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ== dependencies: - cacache "^15.0.5" - fast-glob "^3.2.4" - find-cache-dir "^3.3.1" - glob-parent "^5.1.1" - globby "^11.0.1" - loader-utils "^2.0.0" + fast-glob "^3.2.11" + glob-parent "^6.0.1" + globby "^13.1.1" normalize-path "^3.0.0" - p-limit "^3.0.2" - schema-utils "^3.0.0" - serialize-javascript "^5.0.1" - webpack-sources "^1.4.3" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" -core-js-compat@^3.6.2, core-js-compat@^3.9.0, core-js-compat@^3.9.1: - version "3.13.0" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.13.0.tgz#a88f5fa81d8e9b15d7f98abc4447a4dfca2a358f" - integrity sha512-jhbI2zpVskgfDC9mGRaDo1gagd0E0i/kYW0+WvibL/rafEHKAHO653hEXIxJHqRlRLITluXtRH3AGTL5qJmifQ== +core-js-compat@^3.30.1, core-js-compat@^3.30.2: + version "3.31.0" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.31.0.tgz#4030847c0766cc0e803dcdfb30055d7ef2064bf1" + integrity sha512-hM7YCu1cU6Opx7MXNu0NuumM0ezNeAeRKadixyiQELWY3vT3De9S4J5ZBMraWV2vZnrE1Cirl0GtFtDtMUXzPw== dependencies: - browserslist "^4.16.6" - semver "7.0.0" + browserslist "^4.21.5" -core-js-pure@^3.0.0: - version "3.13.0" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.13.0.tgz#9d267fb47d1d7046cfbc05e7b67bb235b6735355" - integrity sha512-7VTvXbsMxROvzPAVczLgfizR8CyYnvWPrb1eGrtlZAJfjQWEHLofVfCKljLHdpazTfpaziRORwUH/kfGDKvpdA== +core-js-pure@^3.0.0, core-js-pure@^3.23.3: + version "3.31.0" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.31.0.tgz#052fd9e82fbaaf86457f5db1fadcd06f15966ff2" + integrity sha512-/AnE9Y4OsJZicCzIe97JP5XoPKQJfTuEG43aEVLFJGOJpyqELod+pE6LEl63DfG1Mp8wX97LDaDpy1GmLEUxlg== -core-js@^2.4.0: - version "2.6.12" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec" - integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== - -core-js@^3.6.5: - version "3.13.0" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.13.0.tgz#58ca436bf01d6903aee3d364089868d0d89fe58d" - integrity sha512-iWDbiyha1M5vFwPFmQnvRv+tJzGbFAm6XimJUT0NgHYW3xZEs1SkCAcasWSVFxpI2Xb/V1DDJckq3v90+bQnog== +core-js@^3.19.2: + version "3.31.0" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.31.0.tgz#4471dd33e366c79d8c0977ed2d940821719db344" + integrity sha512-NIp2TQSGfR6ba5aalZD+ZQ1fSxGhDo/s1w0nx3RYzf2pnJxt7YynxFlFScP6eV7+GZsKO95NSjGxyJsU3DZgeQ== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" @@ -7628,7 +6759,15 @@ cosmiconfig-toml-loader@1.0.0: dependencies: "@iarna/toml" "^2.2.5" -cosmiconfig@7.0.0, cosmiconfig@^7.0.0: +cosmiconfig-typescript-loader@^1.0.0: + version "1.0.9" + resolved "https://registry.yarnpkg.com/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-1.0.9.tgz#69c523f7e8c3d9f27f563d02bbeadaf2f27212d3" + integrity sha512-tRuMRhxN4m1Y8hP9SNYfz7jRwt8lZdWxdjg/ohg5esKmsndJIn4yT96oJVcf5x0eA11taXl+sIp+ielu529k6g== + dependencies: + cosmiconfig "^7" + ts-node "^10.7.0" + +cosmiconfig@7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3" integrity sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA== @@ -7639,16 +6778,6 @@ cosmiconfig@7.0.0, cosmiconfig@^7.0.0: path-type "^4.0.0" yaml "^1.10.0" -cosmiconfig@^5.0.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" - integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== - dependencies: - import-fresh "^2.0.0" - is-directory "^0.3.1" - js-yaml "^3.13.1" - parse-json "^4.0.0" - cosmiconfig@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" @@ -7660,22 +6789,38 @@ cosmiconfig@^6.0.0: path-type "^4.0.0" yaml "^1.7.2" -craco-antd@^1.19.0: - version "1.19.0" - resolved "https://registry.yarnpkg.com/craco-antd/-/craco-antd-1.19.0.tgz#ad3b95853a29a20c6de4003874517ae2a995064b" - integrity sha512-MpbF2LQxb/POiR003oOkuAhHwpyRx1w5opyg7SA4/2og/FMRR/2oca/eqKYQ7vre2dOpt64gkQ5xWETktHWCQQ== +cosmiconfig@^7, cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6" + integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +craco-antd@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/craco-antd/-/craco-antd-2.0.0.tgz#f38977f4de1714e984ad4f68aae2bcce81bdab79" + integrity sha512-qrWBvsDM6ZmR5sBzEpB+rpyN3ZicewL4DTjAnKNR0YxzMzGrM6HqYgRgy+SplGtVvowFzo1a2PPmb8jfolWGrQ== dependencies: - babel-plugin-import "^1.13.1" - craco-less "1.17.0" - less-vars-to-js "^1.3.0" + babel-plugin-import "1.13.5" + craco-less "2.0.0" + less-vars-to-js "1.3.0" + +craco-babel-loader@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/craco-babel-loader/-/craco-babel-loader-1.0.4.tgz#b887dff41f4ad1e8be84f90c1277af16475ada78" + integrity sha512-qoCsRWV/cMcucuIe+m09GRudCwAWtEhZl830MR2N0/weXBK+0VybsL19pUQtH0TwC33v9ll4myu/4TGBxnqZeA== -craco-less@1.17.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/craco-less/-/craco-less-1.17.0.tgz#3d92e59966e1fdecef4d173b2389985a75cb6bb9" - integrity sha512-G+GPEKPPKiSvYDsnQWuj1C4CIuaY8w+iHvULHkNf5QWLE0LkPfSRf3frhRDJjFxtkThpLPSLjWndD9kx8bCWzw== +craco-less@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/craco-less/-/craco-less-2.0.0.tgz#a2df18c32e97ebf00f62c3f2ea4cd97035f5f640" + integrity sha512-980mQaZVrC4ZsvOwvud6/AgvW7fLY3mW5m5+gR4sw4krxszgHb+qoRyOjqsYPD0F4oUmQoSiZSrlYY/bFGD9kQ== dependencies: - less "^3.11.1" - less-loader "^6.1.0" + less "^4.1.1" + less-loader "^7.3.0" create-context-state@^2.0.0: version "2.0.0" @@ -7684,37 +6829,6 @@ create-context-state@^2.0.0: dependencies: "@babel/runtime" "^7.13.10" -create-ecdh@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e" - integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A== - dependencies: - bn.js "^4.1.0" - elliptic "^6.5.3" - -create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" - integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== - dependencies: - cipher-base "^1.0.1" - inherits "^2.0.1" - md5.js "^1.3.4" - ripemd160 "^2.0.1" - sha.js "^2.4.0" - -create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" - integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== - dependencies: - cipher-base "^1.0.3" - create-hash "^1.1.0" - inherits "^2.0.1" - ripemd160 "^2.0.0" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - create-react-class@^15.6.2: version "15.7.0" resolved "https://registry.yarnpkg.com/create-react-class/-/create-react-class-15.7.0.tgz#7499d7ca2e69bb51d13faf59bd04f0c65a1d6c1e" @@ -7740,14 +6854,14 @@ cross-fetch@3.0.6: dependencies: node-fetch "2.6.1" -cross-fetch@3.1.4, cross-fetch@^3.0.6: +cross-fetch@3.1.4: version "3.1.4" resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.4.tgz#9723f3a3a247bf8b89039f3a380a9244e8fa2f39" integrity sha512-1eAtFWdIubi6T4XPy6ei9iUFoKpUkIF971QLN8lIvvvwueI65+Nw5haMNKUwfJxabqlIIDODJKGrQ66gxC0PbQ== dependencies: node-fetch "2.6.1" -cross-fetch@^3.1.5: +cross-fetch@^3.0.6, cross-fetch@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== @@ -7761,7 +6875,7 @@ cross-inspect@1.0.0: dependencies: tslib "^2.4.0" -cross-spawn@7.0.3, cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: +cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -7770,71 +6884,34 @@ cross-spawn@7.0.3, cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: shebang-command "^2.0.0" which "^2.0.1" -cross-spawn@^6.0.0: - version "6.0.5" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" - integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== - dependencies: - nice-try "^1.0.4" - path-key "^2.0.1" - semver "^5.5.0" - shebang-command "^1.2.0" - which "^1.2.9" - -crypto-browserify@^3.11.0: - version "3.12.0" - resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" - integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== - dependencies: - browserify-cipher "^1.0.0" - browserify-sign "^4.0.0" - create-ecdh "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.0" - diffie-hellman "^5.0.0" - inherits "^2.0.1" - pbkdf2 "^3.0.3" - public-encrypt "^4.0.0" - randombytes "^2.0.0" - randomfill "^1.0.3" - -crypto-random-string@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e" - integrity sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4= +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== -css-blank-pseudo@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-0.1.4.tgz#dfdefd3254bf8a82027993674ccf35483bfcb3c5" - integrity sha512-LHz35Hr83dnFeipc7oqFDmsjHdljj3TQtxGGiNWSOsTLIAubSm4TEz8qCaKFpk7idaQ1GfWscF4E6mgpBysA1w== +css-blank-pseudo@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" + integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== dependencies: - postcss "^7.0.5" + postcss-selector-parser "^6.0.9" css-color-keywords@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/css-color-keywords/-/css-color-keywords-1.0.0.tgz#fea2616dc676b2962686b3af8dbdbe180b244e05" integrity sha1-/qJhbcZ2spYmhrOvjb2+GAskTgU= -css-color-names@0.0.4, css-color-names@^0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0" - integrity sha1-gIrcLnnPhHOAabZGyyDsJ762KeA= - -css-declaration-sorter@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-4.0.1.tgz#c198940f63a76d7e36c1e71018b001721054cb22" - integrity sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA== - dependencies: - postcss "^7.0.1" - timsort "^0.3.0" +css-declaration-sorter@^6.3.1: + version "6.4.0" + resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.4.0.tgz#630618adc21724484b3e9505bce812def44000ad" + integrity sha512-jDfsatwWMWN0MODAFuHszfjphEXfNw9JUAhmY4pLu3TyTU+ohUpsbVtbU+1MZn4a47D9kqh03i4eyOm+74+zew== -css-has-pseudo@^0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-0.10.0.tgz#3c642ab34ca242c59c41a125df9105841f6966ee" - integrity sha512-Z8hnfsZu4o/kt+AuFzeGpLVhFOGO9mluyHBaA2bA8aCGTwah5sT3WV/fTHH8UNZUytOIImuGPrl/prlb4oX4qQ== +css-has-pseudo@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" + integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== dependencies: - postcss "^7.0.6" - postcss-selector-parser "^5.0.0-rc.4" + postcss-selector-parser "^6.0.9" css-in-js-utils@^3.1.0: version "3.1.0" @@ -7843,37 +6920,43 @@ css-in-js-utils@^3.1.0: dependencies: hyphenate-style-name "^1.0.3" -css-loader@4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-4.3.0.tgz#c888af64b2a5b2e85462c72c0f4a85c7e2e0821e" - integrity sha512-rdezjCjScIrsL8BSYszgT4s476IcNKt6yX69t0pHjJVnPUTDpn4WfIpDQTN3wCJvUvfsz/mFjuGOekf3PY3NUg== - dependencies: - camelcase "^6.0.0" - cssesc "^3.0.0" - icss-utils "^4.1.1" - loader-utils "^2.0.0" - postcss "^7.0.32" - postcss-modules-extract-imports "^2.0.0" - postcss-modules-local-by-default "^3.0.3" - postcss-modules-scope "^2.2.0" - postcss-modules-values "^3.0.0" - postcss-value-parser "^4.1.0" - schema-utils "^2.7.1" - semver "^7.3.2" +css-loader@^6.5.1: + version "6.8.1" + resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.8.1.tgz#0f8f52699f60f5e679eab4ec0fcd68b8e8a50a88" + integrity sha512-xDAXtEVGlD0gJ07iclwWVkLoZOpEvAWaSyf6W18S2pOC//K8+qUDIx8IIT3D+HjnmkJPQeesOPv5aiUaJsCM2g== + dependencies: + icss-utils "^5.1.0" + postcss "^8.4.21" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.3" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.2.0" + semver "^7.3.8" + +css-minimizer-webpack-plugin@^3.2.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" + integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== + dependencies: + cssnano "^5.0.6" + jest-worker "^27.0.2" + postcss "^8.3.5" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" -css-prefers-color-scheme@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-3.1.1.tgz#6f830a2714199d4f0d0d0bb8a27916ed65cff1f4" - integrity sha512-MTu6+tMs9S3EUqzmqLXEcgNRbNkkD/TGFvowpeoWJn5Vfq7FMgsmRQs9X5NXAURiOBmOxm/lLjsDNXDE6k9bhg== - dependencies: - postcss "^7.0.5" +css-prefers-color-scheme@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" + integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== css-select-base-adapter@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== -css-select@^2.0.0, css-select@^2.0.2: +css-select@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== @@ -7883,6 +6966,17 @@ css-select@^2.0.0, css-select@^2.0.2: domutils "^1.7.0" nth-check "^1.0.2" +css-select@^4.1.3: + version "4.3.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== + dependencies: + boolbase "^1.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" + css-to-react-native@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/css-to-react-native/-/css-to-react-native-3.0.0.tgz#62dbe678072a824a689bcfee011fc96e02a7d756" @@ -7900,7 +6994,7 @@ css-tree@1.0.0-alpha.37: mdn-data "2.0.4" source-map "^0.6.1" -css-tree@^1.1.2: +css-tree@^1.1.2, css-tree@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== @@ -7913,21 +7007,16 @@ css-what@^3.2.1: resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== +css-what@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + css.escape@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" integrity sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s= -css@^2.0.0: - version "2.2.4" - resolved "https://registry.yarnpkg.com/css/-/css-2.2.4.tgz#c646755c73971f2bba6a601e2cf2fd71b1298929" - integrity sha512-oUnjmWpy0niI3x/mPL8dVEI1l7MnG3+HHyRPHf+YFSbK+svOhXpmSOcDURUh2aOCgl2grzrOPt1nHLuCVFULLw== - dependencies: - inherits "^2.0.3" - source-map "^0.6.1" - source-map-resolve "^0.5.2" - urix "^0.1.0" - css@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" @@ -7937,90 +7026,66 @@ css@^3.0.0: source-map "^0.6.1" source-map-resolve "^0.6.0" -cssdb@^4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-4.4.0.tgz#3bf2f2a68c10f5c6a08abd92378331ee803cddb0" - integrity sha512-LsTAR1JPEM9TpGhl/0p3nQecC2LJ0kD8X5YARu1hk/9I1gril5vDtMZyNxcEpxxDj34YNck/ucjuoUd66K03oQ== - -cssesc@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-2.0.0.tgz#3b13bd1bb1cb36e1bcb5a4dcd27f54c5dcb35703" - integrity sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg== +cssdb@^7.1.0: + version "7.6.0" + resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-7.6.0.tgz#beac8f7a5f676db62d3c33da517ef4c9eb008f8b" + integrity sha512-Nna7rph8V0jC6+JBY4Vk4ndErUmfJfV6NJCaZdurL0omggabiy+QB2HCQtu5c/ACLZ0I7REv7A4QyPIoYzZx0w== cssesc@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== -cssnano-preset-default@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-4.0.8.tgz#920622b1fc1e95a34e8838203f1397a504f2d3ff" - integrity sha512-LdAyHuq+VRyeVREFmuxUZR1TXjQm8QQU/ktoo/x7bz+SdOge1YKc5eMN6pRW7YWBmyq59CqYba1dJ5cUukEjLQ== - dependencies: - css-declaration-sorter "^4.0.1" - cssnano-util-raw-cache "^4.0.1" - postcss "^7.0.0" - postcss-calc "^7.0.1" - postcss-colormin "^4.0.3" - postcss-convert-values "^4.0.1" - postcss-discard-comments "^4.0.2" - postcss-discard-duplicates "^4.0.2" - postcss-discard-empty "^4.0.1" - postcss-discard-overridden "^4.0.1" - postcss-merge-longhand "^4.0.11" - postcss-merge-rules "^4.0.3" - postcss-minify-font-values "^4.0.2" - postcss-minify-gradients "^4.0.2" - postcss-minify-params "^4.0.2" - postcss-minify-selectors "^4.0.2" - postcss-normalize-charset "^4.0.1" - postcss-normalize-display-values "^4.0.2" - postcss-normalize-positions "^4.0.2" - postcss-normalize-repeat-style "^4.0.2" - postcss-normalize-string "^4.0.2" - postcss-normalize-timing-functions "^4.0.2" - postcss-normalize-unicode "^4.0.1" - postcss-normalize-url "^4.0.1" - postcss-normalize-whitespace "^4.0.2" - postcss-ordered-values "^4.1.2" - postcss-reduce-initial "^4.0.3" - postcss-reduce-transforms "^4.0.2" - postcss-svgo "^4.0.3" - postcss-unique-selectors "^4.0.1" - -cssnano-util-get-arguments@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/cssnano-util-get-arguments/-/cssnano-util-get-arguments-4.0.0.tgz#ed3a08299f21d75741b20f3b81f194ed49cc150f" - integrity sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8= - -cssnano-util-get-match@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/cssnano-util-get-match/-/cssnano-util-get-match-4.0.0.tgz#c0e4ca07f5386bb17ec5e52250b4f5961365156d" - integrity sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0= - -cssnano-util-raw-cache@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/cssnano-util-raw-cache/-/cssnano-util-raw-cache-4.0.1.tgz#b26d5fd5f72a11dfe7a7846fb4c67260f96bf282" - integrity sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA== - dependencies: - postcss "^7.0.0" - -cssnano-util-same-parent@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/cssnano-util-same-parent/-/cssnano-util-same-parent-4.0.1.tgz#574082fb2859d2db433855835d9a8456ea18bbf3" - integrity sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q== +cssnano-preset-default@^5.2.14: + version "5.2.14" + resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.2.14.tgz#309def4f7b7e16d71ab2438052093330d9ab45d8" + integrity sha512-t0SFesj/ZV2OTylqQVOrFgEh5uanxbO6ZAdeCrNsUQ6fVuXwYTxJPNAGvGTxHbD68ldIJNec7PyYZDBrfDQ+6A== + dependencies: + css-declaration-sorter "^6.3.1" + cssnano-utils "^3.1.0" + postcss-calc "^8.2.3" + postcss-colormin "^5.3.1" + postcss-convert-values "^5.1.3" + postcss-discard-comments "^5.1.2" + postcss-discard-duplicates "^5.1.0" + postcss-discard-empty "^5.1.1" + postcss-discard-overridden "^5.1.0" + postcss-merge-longhand "^5.1.7" + postcss-merge-rules "^5.1.4" + postcss-minify-font-values "^5.1.0" + postcss-minify-gradients "^5.1.1" + postcss-minify-params "^5.1.4" + postcss-minify-selectors "^5.2.1" + postcss-normalize-charset "^5.1.0" + postcss-normalize-display-values "^5.1.0" + postcss-normalize-positions "^5.1.1" + postcss-normalize-repeat-style "^5.1.1" + postcss-normalize-string "^5.1.0" + postcss-normalize-timing-functions "^5.1.0" + postcss-normalize-unicode "^5.1.1" + postcss-normalize-url "^5.1.0" + postcss-normalize-whitespace "^5.1.1" + postcss-ordered-values "^5.1.3" + postcss-reduce-initial "^5.1.2" + postcss-reduce-transforms "^5.1.0" + postcss-svgo "^5.1.0" + postcss-unique-selectors "^5.1.1" + +cssnano-utils@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== -cssnano@^4.1.10: - version "4.1.11" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-4.1.11.tgz#c7b5f5b81da269cb1fd982cb960c1200910c9a99" - integrity sha512-6gZm2htn7xIPJOHY824ERgj8cNPgPxyCSnkXc4v7YvNW+TdVfzgngHcEhy/8D11kUWRUMbke+tC+AUcUsnMz2g== +cssnano@^5.0.6: + version "5.1.15" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.1.15.tgz#ded66b5480d5127fcb44dac12ea5a983755136bf" + integrity sha512-j+BKgDcLDQA+eDifLx0EO4XSA56b7uut3BQFH+wbSaSTuGLuiyTa/wbRYthUXX8LC9mLg+WWKe8h+qJuwTAbHw== dependencies: - cosmiconfig "^5.0.0" - cssnano-preset-default "^4.0.8" - is-resolvable "^1.0.0" - postcss "^7.0.0" + cssnano-preset-default "^5.2.14" + lilconfig "^2.0.3" + yaml "^1.10.2" -csso@^4.0.2: +csso@^4.0.2, csso@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== @@ -8041,107 +7106,75 @@ cssstyle@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== - dependencies: - cssom "~0.3.6" - -csstype@^3.0.2: - version "3.0.8" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.8.tgz#d2266a792729fb227cd216fb572f43728e1ad340" - integrity sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw== - -csstype@^3.0.6, csstype@^3.0.7, csstype@^3.1.0, csstype@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" - integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== - -cyclist@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" - integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= - -d3-array@2, d3-array@^2.3.0: - version "2.12.1" - resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-2.12.1.tgz#e20b41aafcdffdf5d50928004ececf815a465e81" - integrity sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ== - dependencies: - internmap "^1.0.0" - -d3-array@^1.2.0, d3-array@^1.2.1: - version "1.2.4" - resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-1.2.4.tgz#635ce4d5eea759f6f605863dbcfc30edc737f71f" - integrity sha512-KHW6M86R+FUPYGb3R5XiYjXPq7VzwxZ22buHhAEVG5ztoEcZZMLov530mmccaqA1GghZArjQV46fuc8kUqhhHw== + dependencies: + cssom "~0.3.6" -d3-collection@1: - version "1.0.7" - resolved "https://registry.yarnpkg.com/d3-collection/-/d3-collection-1.0.7.tgz#349bd2aa9977db071091c13144d5e4f16b5b310e" - integrity sha512-ii0/r5f4sjKNTfh84Di+DpztYwqKhEyUlKoPrzUFfeSkWxjW49xU2QzO9qrPrNkpdI0XJkfzvmTu8V2Zylln6A== +csstype@^3.0.2, csstype@^3.0.6, csstype@^3.0.7, csstype@^3.1.0, csstype@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" + integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== -d3-color@1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-1.4.1.tgz#c52002bf8846ada4424d55d97982fef26eb3bc8a" - integrity sha512-p2sTHSLCJI2QKunbGb7ocOh7DgTAn8IrLx21QRc/BSnodXM4sv6aLQlnfpvehFMLZEfBc6g9pH9SWQccFYfJ9Q== +"d3-array@2 - 3", "d3-array@2.10.0 - 3": + version "3.2.4" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5" + integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== + dependencies: + internmap "1 - 2" -"d3-color@1 - 2": - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-2.0.0.tgz#8d625cab42ed9b8f601a1760a389f7ea9189d62e" - integrity sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ== +d3-array@3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.1.tgz#39331ea706f5709417d31bbb6ec152e0328b39b3" + integrity sha512-gUY/qeHq/yNqqoCKNq4vtpFLdoCdvyNpWoC/KNjhGbhDuQpAM9sIQQKkXSNpXa9h5KySs/gzm7R88WkUutgwWQ== + dependencies: + internmap "1 - 2" -d3-format@1: - version "1.4.5" - resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-1.4.5.tgz#374f2ba1320e3717eb74a9356c67daee17a7edb4" - integrity sha512-J0piedu6Z8iB6TbIGfZgDzfXxUFN3qQRMofy2oPdXzQibYGqPB/9iMcxr/TGalU+2RsyDO+U4f33id8tbnSRMQ== +"d3-color@1 - 3", d3-color@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2" + integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA== -"d3-format@1 - 2": - version "2.0.0" - resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-2.0.0.tgz#a10bcc0f986c372b729ba447382413aabf5b0767" - integrity sha512-Ab3S6XuE/Q+flY96HXT0jOXcM4EAClYFnRGY5zsjRGNy6qCYrQsMffs7cV5Q9xejb35zxW5hf/guKw34kvIKsA== +"d3-format@1 - 3", d3-format@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641" + integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== d3-hierarchy@^1.1.4: version "1.1.9" resolved "https://registry.yarnpkg.com/d3-hierarchy/-/d3-hierarchy-1.1.9.tgz#2f6bee24caaea43f8dc37545fa01628559647a83" integrity sha512-j8tPxlqh1srJHAtxfvOUwKNYJkQuBFdM1+JAUfq6xqH5eAqf93L7oG1NVqDa4CpFZNvnNKtCYEUC8KY9yEn9lQ== -d3-interpolate@1, d3-interpolate@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-1.4.0.tgz#526e79e2d80daa383f9e0c1c1c7dcc0f0583e987" - integrity sha512-V9znK0zc3jOPV4VD2zZn0sDhZU3WAE2bmlxdIwwQPPzPjvyLkd8B3JUVdS1IDUFDkWZ72c9qnv1GK2ZagTZ8EA== - dependencies: - d3-color "1" +d3-interpolate-path@2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/d3-interpolate-path/-/d3-interpolate-path-2.2.1.tgz#fd8ff20a90aff3f380bcd1c15305e7b531e55d07" + integrity sha512-6qLLh/KJVzls0XtMsMpcxhqMhgVEN7VIbR/6YGZe2qlS8KDgyyVB20XcmGnDyB051HcefQXM/Tppa9vcANEA4Q== -"d3-interpolate@1.2.0 - 2": - version "2.0.1" - resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-2.0.1.tgz#98be499cfb8a3b94d4ff616900501a64abc91163" - integrity sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ== +"d3-interpolate@1.2.0 - 3", d3-interpolate@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d" + integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g== dependencies: - d3-color "1 - 2" + d3-color "1 - 3" d3-path@1, d3-path@^1.0.5: version "1.0.9" resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-1.0.9.tgz#48c050bb1fe8c262493a8caf5524e3e9591701cf" integrity sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg== -d3-scale@^2.0.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-2.2.2.tgz#4e880e0b2745acaaddd3ede26a9e908a9e17b81f" - integrity sha512-LbeEvGgIb8UMcAa0EATLNX0lelKWGYDQiPdHj+gLblGVhGLyNbaCn3EvrJf0A3Y/uOOU5aD6MTh5ZFCdEwGiCw== - dependencies: - d3-array "^1.2.0" - d3-collection "1" - d3-format "1" - d3-interpolate "1" - d3-time "1" - d3-time-format "2" +"d3-path@1 - 2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-2.0.0.tgz#55d86ac131a0548adae241eebfb56b4582dd09d8" + integrity sha512-ZwZQxKhBnv9yHaiWd6ZU4x5BtCQ7pXszEV9CU6kRgwIQVQGLMv1oiL4M+MK/n79sYzsj+gcgpPQSctJUsLN7fA== -d3-scale@^3.0.1, d3-scale@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-3.3.0.tgz#28c600b29f47e5b9cd2df9749c206727966203f3" - integrity sha512-1JGp44NQCt5d1g+Yy+GeOnZP7xHo0ii8zsQp6PGzd+C1/dl0KGsp9A7Mxwp+1D1o4unbTTxVdU/ZOIEBoeZPbQ== +d3-scale@4.0.2, d3-scale@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396" + integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ== dependencies: - d3-array "^2.3.0" - d3-format "1 - 2" - d3-interpolate "1.2.0 - 2" - d3-time "^2.1.1" - d3-time-format "2 - 3" + d3-array "2.10.0 - 3" + d3-format "1 - 3" + d3-interpolate "1.2.0 - 3" + d3-time "2.1.1 - 3" + d3-time-format "2 - 4" d3-shape@^1.0.6, d3-shape@^1.2.0: version "1.3.7" @@ -8150,49 +7183,36 @@ d3-shape@^1.0.6, d3-shape@^1.2.0: dependencies: d3-path "1" -d3-time-format@2: - version "2.3.0" - resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-2.3.0.tgz#107bdc028667788a8924ba040faf1fbccd5a7850" - integrity sha512-guv6b2H37s2Uq/GefleCDtbe0XZAuy7Wa49VGkPVPMfLL9qObgBST3lEHJBMUp8S7NdLQAGIvr2KXk8Hc98iKQ== +d3-shape@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-2.1.0.tgz#3b6a82ccafbc45de55b57fcf956c584ded3b666f" + integrity sha512-PnjUqfM2PpskbSLTJvAzp2Wv4CZsnAgTfcVRTwW03QR3MkXF8Uo7B1y/lWkAsmbKwuecto++4NlsYcvYpXpTHA== dependencies: - d3-time "1" + d3-path "1 - 2" -"d3-time-format@2 - 3": - version "3.0.0" - resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-3.0.0.tgz#df8056c83659e01f20ac5da5fdeae7c08d5f1bb6" - integrity sha512-UXJh6EKsHBTjopVqZBhFysQcoXSv/5yLONZvkQ5Kk3qbwiUYkdX17Xa1PT6U1ZWXGGfB1ey5L8dKMlFq2DO0Ag== +"d3-time-format@2 - 4", d3-time-format@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a" + integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== dependencies: - d3-time "1 - 2" - -d3-time@1, d3-time@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-1.1.0.tgz#b1e19d307dae9c900b7e5b25ffc5dcc249a8a0f1" - integrity sha512-Xh0isrZ5rPYYdqhAVk8VLnMEidhz5aP7htAADH6MfzgmmicPkTo8LhkLxci61/lCB7n7UmE3bN0leRt+qvkLxA== + d3-time "1 - 3" -"d3-time@1 - 2", d3-time@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-2.1.1.tgz#e9d8a8a88691f4548e68ca085e5ff956724a6682" - integrity sha512-/eIQe/eR4kCQwq7yxi7z4c6qEXf2IYGcjoWB5OOQy4Tq9Uv39/947qlDcN2TLkiTzQWzvnsuYPB9TrWaNfipKQ== +"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.1.0.tgz#9310db56e992e3c0175e1ef385e545e48a9bb5c7" + integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q== dependencies: - d3-array "2" + d3-array "2 - 3" d3-voronoi@^1.1.2: version "1.1.4" resolved "https://registry.yarnpkg.com/d3-voronoi/-/d3-voronoi-1.1.4.tgz#dd3c78d7653d2bb359284ae478645d95944c8297" integrity sha512-dArJ32hchFsrQ8uMiTBLq256MpnZjeuBtdHpaDlYuQyjU0CVzCJl/BVW+SkszaAeH95D/8gxqAhgx0ouAWAfRg== -d@1, d@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" - integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== - dependencies: - es5-ext "^0.10.50" - type "^1.0.1" - -damerau-levenshtein@^1.0.6: - version "1.0.7" - resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.7.tgz#64368003512a1a6992593741a09a9d31a836f55d" - integrity sha512-VvdQIPGdWP0SqFXghj79Wf/5LArmreyMsGLa6FG6iC4t3j7j5s71TrwWmT/4akbDQIqjfACkLZmjXhA7g2oUZw== +damerau-levenshtein@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== dash-get@^1.0.2: version "1.0.2" @@ -8230,32 +7250,27 @@ date-fns@^1.27.2: resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.30.1.tgz#2e71bf0b119153dbb4cc4e88d9ea5acfb50dc05c" integrity sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw== -dayjs@1.x: - version "1.11.6" - resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.6.tgz#2e79a226314ec3ec904e3ee1dd5a4f5e5b1c7afb" - integrity sha512-zZbY5giJAinCG+7AGaw0wIhNZ6J8AhWuSXKvuc1KAyMiRsvGQWqh4L+MomvhdAYjN+lqvVCMq1I41e3YHvXkyQ== - -dayjs@^1.11.7: +dayjs@1.x, dayjs@^1.11.7: version "1.11.7" resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.7.tgz#4b296922642f70999544d1144a2c25730fce63e2" integrity sha512-+Yw9U6YO5TQohxLcIkrXBeY73WP3ejHWVvx8XCk3gxvQDCTEmS48ZrSZCKciI7Bhl/uCMyxYtE9UqRILmFphkQ== -debounce@^1.2.0: +debounce@^1.2.0, debounce@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/debounce/-/debounce-1.2.1.tgz#38881d8f4166a5c5848020c11827b834bcb3e0a5" integrity sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug== -debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0, debug@^2.6.9: +debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" -debug@4, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: - version "4.3.2" - resolved "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" - integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== +debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" @@ -8273,7 +7288,7 @@ debug@4.3.1: dependencies: ms "2.1.2" -debug@^3.1.1, debug@^3.2.6, debug@^3.2.7: +debug@^3.2.6, debug@^3.2.7: version "3.2.7" resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== @@ -8307,18 +7322,6 @@ dedent@^0.7.0: resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= -deep-equal@^1.0.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" - integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== - dependencies: - is-arguments "^1.0.4" - is-date-object "^1.0.1" - is-regex "^1.0.4" - object-is "^1.0.1" - object-keys "^1.1.1" - regexp.prototype.flags "^1.2.0" - deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" @@ -8334,27 +7337,24 @@ deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -default-gateway@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" - integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== +default-gateway@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== dependencies: - execa "^1.0.0" - ip-regex "^2.1.0" + execa "^5.0.0" defer-to-connect@^1.0.1: version "1.1.3" resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== -define-properties@^1.1.2, define-properties@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" - integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== - dependencies: - object-keys "^1.0.12" +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== -define-properties@^1.1.4: +define-properties@^1.1.3, define-properties@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== @@ -8384,19 +7384,6 @@ define-property@^2.0.2: is-descriptor "^1.0.2" isobject "^3.0.1" -del@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" - integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== - dependencies: - "@types/glob" "^7.1.1" - globby "^6.1.0" - is-path-cwd "^2.0.0" - is-path-in-cwd "^2.0.0" - p-map "^2.0.0" - pify "^4.0.1" - rimraf "^2.6.3" - delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" @@ -8427,13 +7414,10 @@ dependency-graph@^0.9.0: resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318" integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w== -des.js@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" - integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== - dependencies: - inherits "^2.0.1" - minimalistic-assert "^1.0.0" +dequal@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" + integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== destroy@1.2.0: version "1.2.0" @@ -8460,7 +7444,7 @@ detect-node@^2.0.4: resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== -detect-port-alt@1.1.6: +detect-port-alt@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== @@ -8468,11 +7452,21 @@ detect-port-alt@1.1.6: address "^1.0.1" debug "^2.6.0" +didyoumean@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + diff-sequences@^26.6.2: version "26.6.2" resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-26.6.2.tgz#48ba99157de1923412eed41db6b6d4aa9ca7c0b1" integrity sha512-Mv/TDa3nZ9sbc5soK+OoA74BsS3mL37yixCvUAQkiuA4Wz6YtwP/K47n2rv2ovzHZvoiQeA5FTQOschKkEwB0Q== +diff-sequences@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" + integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== + diff@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" @@ -8483,15 +7477,6 @@ diff@^5.0.0: resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b" integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== -diffie-hellman@^5.0.0: - version "5.0.3" - resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" - integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== - dependencies: - bn.js "^4.1.0" - miller-rabin "^4.0.0" - randombytes "^2.0.0" - dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -8514,20 +7499,12 @@ dns-equal@^1.0.0: resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= -dns-packet@^1.3.1: - version "1.3.4" - resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" - integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== - dependencies: - ip "^1.1.0" - safe-buffer "^5.0.1" - -dns-txt@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" - integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= +dns-packet@^5.2.2: + version "5.6.0" + resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.6.0.tgz#2202c947845c7a63c23ece58f2f70ff6ab4c2f7d" + integrity sha512-rza3UH1LwdHh9qyPXp8lkwpjSNk/AMD3dPytUoRoqnypDUhY0xvbdmVhWOfxO68frEfV9BU8V12Ez7ZsHGZpCQ== dependencies: - buffer-indexof "^1.0.0" + "@leichtgewicht/ip-codec" "^2.0.1" doctrine@^2.1.0: version "2.1.0" @@ -8553,7 +7530,7 @@ dom-align@^1.7.0: resolved "https://registry.yarnpkg.com/dom-align/-/dom-align-1.12.2.tgz#0f8164ebd0c9c21b0c790310493cd855892acd4b" integrity sha512-pHuazgqrsTFrGU2WLDdXxCFabkdQDx72ddkraZNih1KsMcN5qsRSTR9O4VJRlwTPCPb5COYg3LOfiMHHcPInHg== -dom-converter@^0.2: +dom-converter@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== @@ -8576,25 +7553,29 @@ dom-serializer@0: domelementtype "^2.0.1" entities "^2.0.0" +dom-serializer@^1.0.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + dom-walk@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/dom-walk/-/dom-walk-0.1.2.tgz#0c548bef048f4d1f2a97249002236060daa3fd84" integrity sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w== -domain-browser@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" - integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== - -domelementtype@1, domelementtype@^1.3.1: +domelementtype@1: version "1.3.1" resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== -domelementtype@^2.0.1: - version "2.2.0" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.2.0.tgz#9a0b6c2782ed6a1c7323d42267183df9bd8b1d57" - integrity sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A== +domelementtype@^2.0.1, domelementtype@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== domexception@^2.0.1: version "2.0.1" @@ -8603,12 +7584,12 @@ domexception@^2.0.1: dependencies: webidl-conversions "^5.0.0" -domhandler@^2.3.0: - version "2.4.2" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" - integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== dependencies: - domelementtype "1" + domelementtype "^2.2.0" domino@^2.1.6: version "2.1.6" @@ -8620,7 +7601,7 @@ dompurify@^2.3.8: resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.8.tgz#224fe9ae57d7ebd9a1ae1ac18c1c1ca3f532226f" integrity sha512-eVhaWoVibIzqdGYjwsBWodIQIaXFSB+cKDf4cfxLMsK0xiud6SE+/WCVx/Xw/UwQsa4cS3T2eITcdtmTg2UKcw== -domutils@^1.5.1, domutils@^1.7.0: +domutils@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== @@ -8628,6 +7609,15 @@ domutils@^1.5.1, domutils@^1.7.0: dom-serializer "0" domelementtype "1" +domutils@^2.5.2, domutils@^2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + dot-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" @@ -8636,22 +7626,15 @@ dot-case@^3.0.4: no-case "^3.0.4" tslib "^2.0.3" -dot-prop@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" - integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== - dependencies: - is-obj "^2.0.0" - -dotenv-expand@5.1.0: +dotenv-expand@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== -dotenv@8.2.0: - version "8.2.0" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.2.0.tgz#97e619259ada750eea3e4ea3e26bceea5424b16a" - integrity sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw== +dotenv@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" + integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== dotenv@^8.2.0: version "8.6.0" @@ -8668,21 +7651,11 @@ duplexer3@^0.1.4: resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= -duplexer@^0.1.1, duplexer@^0.1.2, duplexer@~0.1.1: +duplexer@^0.1.2, duplexer@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== -duplexify@^3.4.2, duplexify@^3.6.0: - version "3.7.1" - resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" - integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== - dependencies: - end-of-stream "^1.0.0" - inherits "^2.0.1" - readable-stream "^2.0.0" - stream-shift "^1.0.0" - ecc-jsbn@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" @@ -8703,62 +7676,39 @@ ee-first@1.1.1: resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== -ejs@^2.6.1: - version "2.7.4" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba" - integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA== - -ejs@^3.1.5: - version "3.1.6" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.6.tgz#5bfd0a0689743bb5268b3550cceeebbc1702822a" - integrity sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw== +ejs@^3.1.5, ejs@^3.1.6: + version "3.1.9" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.9.tgz#03c9e8777fe12686a9effcef22303ca3d8eeb361" + integrity sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ== dependencies: - jake "^10.6.1" - -electron-to-chromium@^1.3.564, electron-to-chromium@^1.3.723: - version "1.3.739" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.739.tgz#f07756aa92cabd5a6eec6f491525a64fe62f98b9" - integrity sha512-+LPJVRsN7hGZ9EIUUiWCpO7l4E3qBYHNadazlucBfsXBbccDFNKUBAgzE68FnkWGJPwD/AfKhSzL+G+Iqb8A4A== + jake "^10.8.5" -electron-to-chromium@^1.4.251: - version "1.4.284" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz#61046d1e4cab3a25238f6bf7413795270f125592" - integrity sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA== +electron-to-chromium@^1.4.431: + version "1.4.441" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.441.tgz#94dd9c1cbf081d83f032a4f1cd9f787e21fc24ce" + integrity sha512-LlCgQ8zgYZPymf5H4aE9itwiIWH4YlCiv1HFLmmcBeFYi5E+3eaIFnjHzYtcFQbaKfAW+CqZ9pgxo33DZuoqPg== elegant-spinner@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" integrity sha1-2wQ1IcldfjA/2PNFvtwzSc+wcp4= -elliptic@^6.5.3: - version "6.5.4" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" - integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== - dependencies: - bn.js "^4.11.9" - brorand "^1.1.0" - hash.js "^1.0.0" - hmac-drbg "^1.0.1" - inherits "^2.0.4" - minimalistic-assert "^1.0.1" - minimalistic-crypto-utils "^1.0.1" +emittery@^0.10.2: + version "0.10.2" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== -emittery@^0.7.1: - version "0.7.2" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.7.2.tgz#25595908e13af0f5674ab419396e2fb394cdfa82" - integrity sha512-A8OG5SR/ij3SsJdWDJdkkSYUjQdCUx6APQXem0SaEePBSRg4eymGYwBkKo1Y6DU+af/Jn2dBQqDBvjnr9Vi8nQ== - -emoji-regex@^7.0.1: - version "7.0.3" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" - integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== +emittery@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" + integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== emoji-regex@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -emoji-regex@^9.0.0: +emoji-regex@^9.2.2: version "9.2.2" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== @@ -8788,11 +7738,6 @@ emojibase@^6.0.0: resolved "https://registry.yarnpkg.com/emojibase/-/emojibase-6.1.0.tgz#c3bc281e998a0e06398416090c23bac8c5ed3ee8" integrity sha512-1GkKJPXP6tVkYJHOBSJHoGOr/6uaDxZ9xJ6H7m6PfdGXTmQgbALHLWaVRY4Gi/qf5x/gT/NUXLPuSHYLqtLtrQ== -emojis-list@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" - integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= - emojis-list@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" @@ -8803,40 +7748,27 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -end-of-stream@^1.0.0, end-of-stream@^1.1.0: +end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" -enhanced-resolve@^4.3.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.5.0.tgz#2f3cfd84dbe3b487f18f2db2ef1e064a571ca5ec" - integrity sha512-Nv9m36S/vxpsI+Hc4/ZGRs0n9mXqSWGGq49zxb/cJfPAQMbUtttJAlNPS4AQzaBdw/pKskw5bMbekT/Y7W/Wlg== - dependencies: - graceful-fs "^4.1.2" - memory-fs "^0.5.0" - tapable "^1.0.0" - -enquirer@^2.3.5: - version "2.3.6" - resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" - integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== +enhanced-resolve@^5.15.0: + version "5.15.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" + integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== dependencies: - ansi-colors "^4.1.1" - -entities@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" - integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== + graceful-fs "^4.2.4" + tapable "^2.2.0" entities@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== -errno@^0.1.1, errno@^0.1.3, errno@~0.1.7: +errno@^0.1.1: version "0.1.8" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== @@ -8857,29 +7789,7 @@ error-stack-parser@^2.0.6: dependencies: stackframe "^1.1.1" -es-abstract@^1.17.2, es-abstract@^1.18.0-next.1, es-abstract@^1.18.0-next.2, es-abstract@^1.18.2: - version "1.18.2" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.2.tgz#6eb518b640262e8ddcbd48e0bc8549f82efd48a7" - integrity sha512-byRiNIQXE6HWNySaU6JohoNXzYgbBjztwFnBLUTiJmWXjaU9bSq3urQLUlNLQ292tc+gc07zYZXNZjaOoAX3sw== - dependencies: - call-bind "^1.0.2" - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - get-intrinsic "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.2" - is-callable "^1.2.3" - is-negative-zero "^2.0.1" - is-regex "^1.1.3" - is-string "^1.0.6" - object-inspect "^1.10.3" - object-keys "^1.1.1" - object.assign "^4.1.2" - string.prototype.trimend "^1.0.4" - string.prototype.trimstart "^1.0.4" - unbox-primitive "^1.0.1" - -es-abstract@^1.19.0, es-abstract@^1.20.4: +es-abstract@^1.17.2, es-abstract@^1.18.0-next.2, es-abstract@^1.19.0, es-abstract@^1.20.4: version "1.20.4" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== @@ -8909,6 +7819,18 @@ es-abstract@^1.19.0, es-abstract@^1.20.4: string.prototype.trimstart "^1.0.5" unbox-primitive "^1.0.2" +es-module-lexer@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.3.0.tgz#6be9c9e0b4543a60cd166ff6f8b4e9dae0b0c16f" + integrity sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA== + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + es-to-primitive@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" @@ -8918,33 +7840,7 @@ es-to-primitive@^1.2.1: is-date-object "^1.0.1" is-symbol "^1.0.2" -es5-ext@^0.10.35, es5-ext@^0.10.50: - version "0.10.53" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" - integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== - dependencies: - es6-iterator "~2.0.3" - es6-symbol "~3.1.3" - next-tick "~1.0.0" - -es6-iterator@2.0.3, es6-iterator@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" - integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= - dependencies: - d "1" - es5-ext "^0.10.35" - es6-symbol "^3.1.1" - -es6-symbol@^3.1.1, es6-symbol@~3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" - integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== - dependencies: - d "^1.0.1" - ext "^1.1.2" - -escalade@^3.0.2, escalade@^3.1.1: +escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== @@ -8954,16 +7850,16 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@2.0.0, escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" @@ -8981,160 +7877,165 @@ escodegen@^2.0.0: optionalDependencies: source-map "~0.6.1" -eslint-config-airbnb-base@^14.2.0, eslint-config-airbnb-base@^14.2.1: - version "14.2.1" - resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-14.2.1.tgz#8a2eb38455dc5a312550193b319cdaeef042cd1e" - integrity sha512-GOrQyDtVEc1Xy20U7vsB2yAoB4nBlfH5HZJeatRXHleO+OS5Ot+MWij4Dpltw4/DyIkqUfqz1epfhVR5XWWQPA== +eslint-config-airbnb-base@^15.0.0: + version "15.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz#6b09add90ac79c2f8d723a2580e07f3925afd236" + integrity sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig== dependencies: confusing-browser-globals "^1.0.10" object.assign "^4.1.2" - object.entries "^1.1.2" + object.entries "^1.1.5" + semver "^6.3.0" -eslint-config-airbnb-typescript@^12.3.1: - version "12.3.1" - resolved "https://registry.yarnpkg.com/eslint-config-airbnb-typescript/-/eslint-config-airbnb-typescript-12.3.1.tgz#83ab40d76402c208eb08516260d1d6fac8f8acbc" - integrity sha512-ql/Pe6/hppYuRp4m3iPaHJqkBB7dgeEmGPQ6X0UNmrQOfTF+dXw29/ZjU2kQ6RDoLxaxOA+Xqv07Vbef6oVTWw== +eslint-config-airbnb-typescript@^17.0.0: + version "17.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb-typescript/-/eslint-config-airbnb-typescript-17.0.0.tgz#360dbcf810b26bbcf2ff716198465775f1c49a07" + integrity sha512-elNiuzD0kPAPTXjFWg+lE24nMdHMtuxgYoD30OyMD6yrW1AhFZPAg27VX7d3tzOErw+dgJTNWfRSDqEcXb4V0g== dependencies: - "@typescript-eslint/parser" "^4.4.1" - eslint-config-airbnb "^18.2.0" - eslint-config-airbnb-base "^14.2.0" + eslint-config-airbnb-base "^15.0.0" -eslint-config-airbnb@^18.2.0: - version "18.2.1" - resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-18.2.1.tgz#b7fe2b42f9f8173e825b73c8014b592e449c98d9" - integrity sha512-glZNDEZ36VdlZWoxn/bUR1r/sdFKPd1mHPbqUtkctgNG4yT2DLLtJ3D+yCV+jzZCc2V1nBVkmdknOJBZ5Hc0fg== +eslint-config-airbnb@19.0.4: + version "19.0.4" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-19.0.4.tgz#84d4c3490ad70a0ffa571138ebcdea6ab085fdc3" + integrity sha512-T75QYQVQX57jiNgpF9r1KegMICE94VYwoFQyMGhrvc+lB8YF2E/M/PYDaQe1AJcWaEgqLE+ErXV1Og/+6Vyzew== dependencies: - eslint-config-airbnb-base "^14.2.1" + eslint-config-airbnb-base "^15.0.0" object.assign "^4.1.2" - object.entries "^1.1.2" - -eslint-config-prettier@^8.3.0: - version "8.3.0" - resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.3.0.tgz#f7471b20b6fe8a9a9254cc684454202886a2dd7a" - integrity sha512-BgZuLUSeKzvlL/VUjx/Yb787VQ26RU3gGjA3iiFvdsp/2bMfVIWUVP7tjxtjS0e+HP409cPlPvNkQloz8C91ew== + object.entries "^1.1.5" -eslint-config-react-app@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-6.0.0.tgz#ccff9fc8e36b322902844cbd79197982be355a0e" - integrity sha512-bpoAAC+YRfzq0dsTk+6v9aHm/uqnDwayNAXleMypGl6CpxI9oXXscVHo4fk3eJPIn+rsbtNetB4r/ZIidFIE8A== - dependencies: - confusing-browser-globals "^1.0.10" +eslint-config-prettier@^8.8.0: + version "8.8.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz#bfda738d412adc917fd7b038857110efe98c9348" + integrity sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA== -eslint-import-resolver-node@^0.3.4: - version "0.3.4" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz#85ffa81942c25012d8231096ddf679c03042c717" - integrity sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA== +eslint-config-react-app@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" + integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== + dependencies: + "@babel/core" "^7.16.0" + "@babel/eslint-parser" "^7.16.3" + "@rushstack/eslint-patch" "^1.1.0" + "@typescript-eslint/eslint-plugin" "^5.5.0" + "@typescript-eslint/parser" "^5.5.0" + babel-preset-react-app "^10.0.1" + confusing-browser-globals "^1.0.11" + eslint-plugin-flowtype "^8.0.3" + eslint-plugin-import "^2.25.3" + eslint-plugin-jest "^25.3.0" + eslint-plugin-jsx-a11y "^6.5.1" + eslint-plugin-react "^7.27.1" + eslint-plugin-react-hooks "^4.3.0" + eslint-plugin-testing-library "^5.0.1" + +eslint-import-resolver-node@^0.3.7: + version "0.3.7" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz#83b375187d412324a1963d84fa664377a23eb4d7" + integrity sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA== dependencies: - debug "^2.6.9" - resolve "^1.13.1" + debug "^3.2.7" + is-core-module "^2.11.0" + resolve "^1.22.1" -eslint-module-utils@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz#b51be1e473dd0de1c5ea638e22429c2490ea8233" - integrity sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A== +eslint-module-utils@^2.7.4: + version "2.8.0" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz#e439fee65fc33f6bba630ff621efc38ec0375c49" + integrity sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw== dependencies: debug "^3.2.7" - pkg-dir "^2.0.0" -eslint-plugin-flowtype@^5.2.0: - version "5.7.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-5.7.2.tgz#482a42fe5d15ee614652ed256d37543d584d7bc0" - integrity sha512-7Oq/N0+3nijBnYWQYzz/Mp/7ZCpwxYvClRyW/PLAmimY9uLCBvoXsNsERcJdkKceyOjgRbFhhxs058KTrne9Mg== +eslint-plugin-flowtype@^8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" + integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== dependencies: - lodash "^4.17.15" + lodash "^4.17.21" string-natural-compare "^3.0.1" -eslint-plugin-import@^2.22.1: - version "2.23.3" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.23.3.tgz#8a1b073289fff03c4af0f04b6df956b7d463e191" - integrity sha512-wDxdYbSB55F7T5CC7ucDjY641VvKmlRwT0Vxh7PkY1mI4rclVRFWYfsrjDgZvwYYDZ5ee0ZtfFKXowWjqvEoRQ== +eslint-plugin-import@^2.25.3: + version "2.27.5" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz#876a6d03f52608a3e5bb439c2550588e51dd6c65" + integrity sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow== dependencies: - array-includes "^3.1.3" - array.prototype.flat "^1.2.4" - debug "^2.6.9" + array-includes "^3.1.6" + array.prototype.flat "^1.3.1" + array.prototype.flatmap "^1.3.1" + debug "^3.2.7" doctrine "^2.1.0" - eslint-import-resolver-node "^0.3.4" - eslint-module-utils "^2.6.1" - find-up "^2.0.0" + eslint-import-resolver-node "^0.3.7" + eslint-module-utils "^2.7.4" has "^1.0.3" - is-core-module "^2.4.0" - minimatch "^3.0.4" - object.values "^1.1.3" - pkg-up "^2.0.0" - read-pkg-up "^3.0.0" - resolve "^1.20.0" - tsconfig-paths "^3.9.0" + is-core-module "^2.11.0" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.6" + resolve "^1.22.1" + semver "^6.3.0" + tsconfig-paths "^3.14.1" -eslint-plugin-jest@^24.1.0: - version "24.3.6" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-24.3.6.tgz#5f0ca019183c3188c5ad3af8e80b41de6c8e9173" - integrity sha512-WOVH4TIaBLIeCX576rLcOgjNXqP+jNlCiEmRgFTfQtJ52DpwnIQKAVGlGPAN7CZ33bW6eNfHD6s8ZbEUTQubJg== +eslint-plugin-jest@^25.3.0: + version "25.7.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" + integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== dependencies: - "@typescript-eslint/experimental-utils" "^4.0.1" + "@typescript-eslint/experimental-utils" "^5.0.0" -eslint-plugin-jsx-a11y@^6.3.1: - version "6.4.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.4.1.tgz#a2d84caa49756942f42f1ffab9002436391718fd" - integrity sha512-0rGPJBbwHoGNPU73/QCLP/vveMlM1b1Z9PponxO87jfr6tuH5ligXbDT6nHSSzBC8ovX2Z+BQu7Bk5D/Xgq9zg== +eslint-plugin-jsx-a11y@^6.5.1: + version "6.7.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.7.1.tgz#fca5e02d115f48c9a597a6894d5bcec2f7a76976" + integrity sha512-63Bog4iIethyo8smBklORknVjB0T2dwB8Mr/hIC+fBS0uyHdYYpzM/Ed+YC8VxTjlXHEWFOdmgwcDn1U2L9VCA== dependencies: - "@babel/runtime" "^7.11.2" - aria-query "^4.2.2" - array-includes "^3.1.1" + "@babel/runtime" "^7.20.7" + aria-query "^5.1.3" + array-includes "^3.1.6" + array.prototype.flatmap "^1.3.1" ast-types-flow "^0.0.7" - axe-core "^4.0.2" - axobject-query "^2.2.0" - damerau-levenshtein "^1.0.6" - emoji-regex "^9.0.0" + axe-core "^4.6.2" + axobject-query "^3.1.1" + damerau-levenshtein "^1.0.8" + emoji-regex "^9.2.2" has "^1.0.3" - jsx-ast-utils "^3.1.0" - language-tags "^1.0.5" - -eslint-plugin-prettier@^3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-3.4.0.tgz#cdbad3bf1dbd2b177e9825737fe63b476a08f0c7" - integrity sha512-UDK6rJT6INSfcOo545jiaOwB701uAIt2/dR7WnFQoGCVl1/EMqdANBmwUaqqQ45aXprsTGzSa39LI1PyuRBxxw== - dependencies: - prettier-linter-helpers "^1.0.0" + jsx-ast-utils "^3.3.3" + language-tags "=1.0.5" + minimatch "^3.1.2" + object.entries "^1.1.6" + object.fromentries "^2.0.6" + semver "^6.3.0" -eslint-plugin-react-hooks@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.2.0.tgz#8c229c268d468956334c943bb45fc860280f5556" - integrity sha512-623WEiZJqxR7VdxFCKLI6d6LLpwJkGPYKODnkH3D7WpOG5KM8yWueBd8TLsNAetEJNF5iJmolaAKO3F8yzyVBQ== +eslint-plugin-react-hooks@^4.3.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" + integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== -eslint-plugin-react@^7.21.5, eslint-plugin-react@^7.23.2: - version "7.23.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.23.2.tgz#2d2291b0f95c03728b55869f01102290e792d494" - integrity sha512-AfjgFQB+nYszudkxRkTFu0UR1zEQig0ArVMPloKhxwlwkzaw/fBiH0QWcBBhZONlXqQC51+nfqFrkn4EzHcGBw== +eslint-plugin-react@^7.27.1, eslint-plugin-react@^7.28.0: + version "7.32.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.32.2.tgz#e71f21c7c265ebce01bcbc9d0955170c55571f10" + integrity sha512-t2fBMa+XzonrrNkyVirzKlvn5RXzzPwRHtMvLAtVZrt8oxgnTQaYbU6SXTOO1mwQgp1y5+toMSKInnzGr0Knqg== dependencies: - array-includes "^3.1.3" - array.prototype.flatmap "^1.2.4" + array-includes "^3.1.6" + array.prototype.flatmap "^1.3.1" + array.prototype.tosorted "^1.1.1" doctrine "^2.1.0" - has "^1.0.3" + estraverse "^5.3.0" jsx-ast-utils "^2.4.1 || ^3.0.0" - minimatch "^3.0.4" - object.entries "^1.1.3" - object.fromentries "^2.0.4" - object.values "^1.1.3" - prop-types "^15.7.2" - resolve "^2.0.0-next.3" - string.prototype.matchall "^4.0.4" - -eslint-plugin-testing-library@^3.9.2: - version "3.10.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-3.10.2.tgz#609ec2b0369da7cf2e6d9edff5da153cc31d87bd" - integrity sha512-WAmOCt7EbF1XM8XfbCKAEzAPnShkNSwcIsAD2jHdsMUT9mZJPjLCG7pMzbcC8kK366NOuGip8HKLDC+Xk4yIdA== - dependencies: - "@typescript-eslint/experimental-utils" "^3.10.1" + minimatch "^3.1.2" + object.entries "^1.1.6" + object.fromentries "^2.0.6" + object.hasown "^1.1.2" + object.values "^1.1.6" + prop-types "^15.8.1" + resolve "^2.0.0-next.4" + semver "^6.3.0" + string.prototype.matchall "^4.0.8" -eslint-scope@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" - integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== +eslint-plugin-testing-library@^5.0.1: + version "5.11.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.11.0.tgz#0bad7668e216e20dd12f8c3652ca353009163121" + integrity sha512-ELY7Gefo+61OfXKlQeXNIDVVLPcvKTeiQOoMZG9TeuWa7Ln4dUNRv8JdRWBQI9Mbb427XGlVB1aa1QPZxBJM8Q== dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" + "@typescript-eslint/utils" "^5.58.0" -eslint-scope@^5.0.0, eslint-scope@^5.1.1: +eslint-scope@5.1.1, eslint-scope@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== @@ -9142,102 +8043,102 @@ eslint-scope@^5.0.0, eslint-scope@^5.1.1: esrecurse "^4.3.0" estraverse "^4.1.1" -eslint-utils@^2.0.0, eslint-utils@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" - integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== +eslint-scope@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.0.tgz#f21ebdafda02352f103634b96dd47d9f81ca117b" + integrity sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw== dependencies: - eslint-visitor-keys "^1.1.0" - -eslint-visitor-keys@^1.0.0, eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" - integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + esrecurse "^4.3.0" + estraverse "^5.2.0" -eslint-visitor-keys@^2.0.0: +eslint-visitor-keys@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== -eslint-webpack-plugin@^2.5.2: - version "2.5.4" - resolved "https://registry.yarnpkg.com/eslint-webpack-plugin/-/eslint-webpack-plugin-2.5.4.tgz#473b84932f1a8e2c2b8e66a402d0497bf440b986" - integrity sha512-7rYh0m76KyKSDE+B+2PUQrlNS4HJ51t3WKpkJg6vo2jFMbEPTG99cBV0Dm7LXSHucN4WGCG65wQcRiTFrj7iWw== - dependencies: - "@types/eslint" "^7.2.6" - arrify "^2.0.1" - jest-worker "^26.6.2" - micromatch "^4.0.2" - normalize-path "^3.0.0" - schema-utils "^3.0.0" +eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz#c22c48f48942d08ca824cc526211ae400478a994" + integrity sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA== -eslint@^7.11.0, eslint@^7.27.0: - version "7.27.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.27.0.tgz#665a1506d8f95655c9274d84bd78f7166b07e9c7" - integrity sha512-JZuR6La2ZF0UD384lcbnd0Cgg6QJjiCwhMD6eU4h/VGPcVGwawNNzKU41tgokGXnfjOOyI6QIffthhJTPzzuRA== +eslint-webpack-plugin@^3.1.1: + version "3.2.0" + resolved "https://registry.yarnpkg.com/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz#1978cdb9edc461e4b0195a20da950cf57988347c" + integrity sha512-avrKcGncpPbPSUHX6B3stNGzkKFto3eL+DKM4+VyMrVnhPc3vRczVlCq3uhuFOdRvDHTVXuzwk1ZKUrqDQHQ9w== dependencies: - "@babel/code-frame" "7.12.11" - "@eslint/eslintrc" "^0.4.1" + "@types/eslint" "^7.29.0 || ^8.4.1" + jest-worker "^28.0.2" + micromatch "^4.0.5" + normalize-path "^3.0.0" + schema-utils "^4.0.0" + +eslint@^8.2.0, eslint@^8.3.0: + version "8.43.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.43.0.tgz#3e8c6066a57097adfd9d390b8fc93075f257a094" + integrity sha512-aaCpf2JqqKesMFGgmRPessmVKjcGXqdlAYLLC3THM8t5nBRZRQ+st5WM/hoJXkdioEXLLbXgclUpM0TXo5HX5Q== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@eslint-community/regexpp" "^4.4.0" + "@eslint/eslintrc" "^2.0.3" + "@eslint/js" "8.43.0" + "@humanwhocodes/config-array" "^0.11.10" + "@humanwhocodes/module-importer" "^1.0.1" + "@nodelib/fs.walk" "^1.2.8" ajv "^6.10.0" chalk "^4.0.0" cross-spawn "^7.0.2" - debug "^4.0.1" + debug "^4.3.2" doctrine "^3.0.0" - enquirer "^2.3.5" escape-string-regexp "^4.0.0" - eslint-scope "^5.1.1" - eslint-utils "^2.1.0" - eslint-visitor-keys "^2.0.0" - espree "^7.3.1" - esquery "^1.4.0" + eslint-scope "^7.2.0" + eslint-visitor-keys "^3.4.1" + espree "^9.5.2" + esquery "^1.4.2" esutils "^2.0.2" fast-deep-equal "^3.1.3" file-entry-cache "^6.0.1" - functional-red-black-tree "^1.0.1" - glob-parent "^5.0.0" - globals "^13.6.0" - ignore "^4.0.6" + find-up "^5.0.0" + glob-parent "^6.0.2" + globals "^13.19.0" + graphemer "^1.4.0" + ignore "^5.2.0" import-fresh "^3.0.0" imurmurhash "^0.1.4" is-glob "^4.0.0" - js-yaml "^3.13.1" + is-path-inside "^3.0.3" + js-yaml "^4.1.0" json-stable-stringify-without-jsonify "^1.0.1" levn "^0.4.1" lodash.merge "^4.6.2" - minimatch "^3.0.4" + minimatch "^3.1.2" natural-compare "^1.4.0" optionator "^0.9.1" - progress "^2.0.0" - regexpp "^3.1.0" - semver "^7.2.1" - strip-ansi "^6.0.0" + strip-ansi "^6.0.1" strip-json-comments "^3.1.0" - table "^6.0.9" text-table "^0.2.0" - v8-compile-cache "^2.0.3" -espree@^7.3.0, espree@^7.3.1: - version "7.3.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" - integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== +espree@^9.5.2: + version "9.5.2" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.5.2.tgz#e994e7dc33a082a7a82dceaf12883a829353215b" + integrity sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw== dependencies: - acorn "^7.4.0" - acorn-jsx "^5.3.1" - eslint-visitor-keys "^1.3.0" + acorn "^8.8.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.4.1" esprima@^4.0.0, esprima@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== -esquery@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" - integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== +esquery@^1.4.2: + version "1.5.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" + integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== dependencies: estraverse "^5.1.0" -esrecurse@^4.1.0, esrecurse@^4.3.0: +esrecurse@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== @@ -9249,15 +8150,10 @@ estraverse@^4.1.1: resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== -estraverse@^5.1.0, estraverse@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" - integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== - -estree-walker@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.6.1.tgz#53049143f40c6eb918b23671d1fe3219f3a1b362" - integrity sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w== +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== estree-walker@^1.0.1: version "1.0.1" @@ -9302,31 +8198,11 @@ eventemitter3@^4.0.0: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== -events@^3.0.0: +events@^3.2.0: version "3.3.0" resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== -eventsource@^1.0.7: - version "1.1.1" - resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.1.1.tgz#4544a35a57d7120fba4fa4c86cb4023b2c09df2f" - integrity sha512-qV5ZC0h7jYIAOhArFJgSfdyz6rALJyb270714o7ZtNnw2WSJ+eexhKtE0O8LYPRsHZHf2osHKZBxGPvm3kPkCA== - dependencies: - original "^1.0.0" - -evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" - integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== - dependencies: - md5.js "^1.3.4" - safe-buffer "^5.1.1" - -exec-sh@^0.3.2: - version "0.3.6" - resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.3.6.tgz#ff264f9e325519a60cb5e273692943483cca63bc" - integrity sha512-nQn+hI3yp+oD0huYhKwvYI32+JFeq+XkNcD1GAo3Y/MjxsfVGmrrzrnzjWiNY6f+pUCP440fThsFh5gZrRAU/w== - execa@5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/execa/-/execa-5.0.0.tgz#4029b0007998a841fbd1032e5f4de86a3c1e3376" @@ -9342,32 +8218,19 @@ execa@5.0.0: signal-exit "^3.0.3" strip-final-newline "^2.0.0" -execa@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" - integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== - dependencies: - cross-spawn "^6.0.0" - get-stream "^4.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^4.0.0: - version "4.1.0" - resolved "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a" - integrity sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA== +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== dependencies: - cross-spawn "^7.0.0" - get-stream "^5.0.0" - human-signals "^1.1.1" + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" is-stream "^2.0.0" merge-stream "^2.0.0" - npm-run-path "^4.0.0" - onetime "^5.1.0" - signal-exit "^3.0.2" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" strip-final-newline "^2.0.0" exit@^0.1.2: @@ -9388,19 +8251,17 @@ expand-brackets@^2.1.4: snapdragon "^0.8.1" to-regex "^3.0.1" -expect@^26.6.0, expect@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/expect/-/expect-26.6.2.tgz#c6b996bf26bf3fe18b67b2d0f51fc981ba934417" - integrity sha512-9/hlOBkQl2l/PLHJx6JjoDF6xPKcJEsUlWKb23rKE7KzeDqUZKXKNMW27KIue5JMdBV9HgmoJPcc8HtO85t9IA== +expect@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" + integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== dependencies: - "@jest/types" "^26.6.2" - ansi-styles "^4.0.0" - jest-get-type "^26.3.0" - jest-matcher-utils "^26.6.2" - jest-message-util "^26.6.2" - jest-regex-util "^26.0.0" + "@jest/types" "^27.5.1" + jest-get-type "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" -express@^4.17.1: +express@^4.17.3: version "4.18.2" resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== @@ -9437,13 +8298,6 @@ express@^4.17.1: utils-merge "1.0.1" vary "~1.1.2" -ext@^1.1.2: - version "1.4.0" - resolved "https://registry.yarnpkg.com/ext/-/ext-1.4.0.tgz#89ae7a07158f79d35517882904324077e4379244" - integrity sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A== - dependencies: - type "^2.0.0" - extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" @@ -9522,27 +8376,21 @@ fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-diff@^1.1.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03" - integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== - -fast-glob@^3.1.1: - version "3.2.5" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661" - integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg== +fast-glob@^3.1.1, fast-glob@^3.2.12, fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.0" + glob-parent "^5.1.2" merge2 "^1.3.0" - micromatch "^4.0.2" - picomatch "^2.2.1" + micromatch "^4.0.4" -fast-glob@^3.2.4: - version "3.2.11" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" - integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== +fast-glob@^3.2.11: + version "3.3.0" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.0.tgz#7c40cb491e1e2ed5664749e87bfb516dbe8727c0" + integrity sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -9621,11 +8469,6 @@ fbjs@^3.0.0: setimmediate "^1.0.5" ua-parser-js "^0.7.30" -figgy-pudding@^3.5.1: - version "3.5.2" - resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" - integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== - figures@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" @@ -9655,30 +8498,25 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" -file-loader@6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.1.1.tgz#a6f29dfb3f5933a1c350b2dbaa20ac5be0539baa" - integrity sha512-Klt8C4BjWSXYQAfhpYYkG4qHNTna4toMHEbWrI5IuVoxbU6uiDKeKAP99R8mmbJi3lvewn/jQBOgU4+NS3tDQw== +file-loader@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== dependencies: loader-utils "^2.0.0" schema-utils "^3.0.0" -file-uri-to-path@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" - integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== - -filelist@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.2.tgz#80202f21462d4d1c2e214119b1807c1bc0380e5b" - integrity sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ== +filelist@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" + integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== dependencies: - minimatch "^3.0.4" + minimatch "^5.0.1" -filesize@6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/filesize/-/filesize-6.1.0.tgz#e81bdaa780e2451d714d71c0d7a4f3238d37ad00" - integrity sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg== +filesize@^8.0.6: + version "8.0.7" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" + integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== fill-range@^4.0.0: version "4.0.0" @@ -9715,15 +8553,6 @@ finalhandler@1.2.0: statuses "2.0.1" unpipe "~1.0.0" -find-cache-dir@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" - integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== - dependencies: - commondir "^1.0.1" - make-dir "^2.0.0" - pkg-dir "^3.0.0" - find-cache-dir@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.1.tgz#89b33fad4a4670daa94f855f7fbe31d6d84fe880" @@ -9738,7 +8567,14 @@ find-root@^1.1.0: resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== -find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0: +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== @@ -9746,19 +8582,13 @@ find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" -find-up@^2.0.0, find-up@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" - integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= - dependencies: - locate-path "^2.0.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== dependencies: - locate-path "^3.0.0" + locate-path "^6.0.0" + path-exists "^4.0.0" find-webpack@2.2.1: version "2.2.1" @@ -9790,19 +8620,6 @@ flatted@^3.1.0: resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469" integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== -flatten@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.3.tgz#c1283ac9f27b368abc1e36d1ff7b04501a30356b" - integrity sha512-dVsPA/UwQ8+2uoFe5GHtiBMu48dWLTdsuEd7CKGlZlD78r1TTWBvDuFaFGKCo/ZfEr95Uk56vZoX86OsHkUeIg== - -flush-write-stream@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" - integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== - dependencies: - inherits "^2.0.3" - readable-stream "^2.3.6" - focus-lock@^0.9.1: version "0.9.2" resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.9.2.tgz#9d30918aaa99b1b97677731053d017f82a540d5b" @@ -9830,18 +8647,24 @@ forever-agent@~0.6.1: resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= -fork-ts-checker-webpack-plugin@4.1.6: - version "4.1.6" - resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-4.1.6.tgz#5055c703febcf37fa06405d400c122b905167fc5" - integrity sha512-DUxuQaKoqfNne8iikd14SAkh5uw4+8vNifp6gmA73yYNS6ywLIWSLD/n/mBzHQRpW3J7rbATEakmiA8JvkTyZw== +fork-ts-checker-webpack-plugin@^6.5.0: + version "6.5.3" + resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz#eda2eff6e22476a2688d10661688c47f611b37f3" + integrity sha512-SbH/l9ikmMWycd5puHJKTkZJKddF4iRLyW3DeZ08HTI7NGyLS38MXd/KGgeWumQO7YNQbW2u/NtPT2YowbPaGQ== dependencies: - "@babel/code-frame" "^7.5.5" - chalk "^2.4.1" - micromatch "^3.1.10" + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" minimatch "^3.0.4" - semver "^5.6.0" + schema-utils "2.7.0" + semver "^7.3.2" tapable "^1.0.0" - worker-rpc "^0.1.0" form-data@4.0.0: version "4.0.0" @@ -9880,6 +8703,11 @@ forwarded@0.2.0: resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== +fraction.js@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" + integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== + fragment-cache@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" @@ -9892,19 +8720,20 @@ fresh@0.5.2: resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== -from2@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" - integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= - dependencies: - inherits "^2.0.1" - readable-stream "^2.0.0" - from@~0: version "0.1.7" resolved "https://registry.npmjs.org/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe" integrity sha1-g8YK/Fi5xWmXAH7Rp2izqzA6RP4= +fs-extra@^10.0.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + fs-extra@^4.0.3: version "4.0.3" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-4.0.3.tgz#0d852122e5bc5beb453fb028e9c0c9bf36340c94" @@ -9914,27 +8743,9 @@ fs-extra@^4.0.3: jsonfile "^4.0.0" universalify "^0.1.0" -fs-extra@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" - integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs-extra@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" - integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs-extra@^9.0.1: +fs-extra@^9.0.0, fs-extra@^9.0.1: version "9.1.0" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== dependencies: at-least-node "^1.0.0" @@ -9942,37 +8753,17 @@ fs-extra@^9.0.1: jsonfile "^6.0.1" universalify "^2.0.0" -fs-minipass@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" - integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== - dependencies: - minipass "^3.0.0" - -fs-write-stream-atomic@^1.0.8: - version "1.0.10" - resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" - integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= - dependencies: - graceful-fs "^4.1.2" - iferr "^0.1.5" - imurmurhash "^0.1.4" - readable-stream "1 || 2" +fs-monkey@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.4.tgz#ee8c1b53d3fe8bb7e5d2c5c5dfc0168afdd2f747" + integrity sha512-INM/fWAxMICjttnD0DX1rBvinKskj5G1w+oy/pnm9u/tSlnBrzFonJMcalKJ30P8RRsPzKcCG7Q8l0jx5Fh9YQ== fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= -fsevents@^1.2.7: - version "1.2.13" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38" - integrity sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw== - dependencies: - bindings "^1.5.0" - nan "^2.12.1" - -fsevents@^2.1.2, fsevents@^2.1.3, fsevents@~2.3.1: +fsevents@^2.3.2, fsevents@~2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== @@ -9992,17 +8783,12 @@ function.prototype.name@^1.1.5: es-abstract "^1.19.0" functions-have-names "^1.2.2" -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= - functions-have-names@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== -gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2: +gensync@^1.0.0-beta.2: version "1.0.0-beta.2" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== @@ -10017,16 +8803,7 @@ get-dom-document@^0.1.3: resolved "https://registry.yarnpkg.com/get-dom-document/-/get-dom-document-0.1.3.tgz#d0188090e43d38dd146c467ac6e3e1f2ace7af52" integrity sha512-bZ0O00gSQgMo+wz7gU6kbbWCPh4dfDsL9ZOmVhA8TOXszl5GV56TpTuW1/Qq/QctgpjK56yyvB1vBO+wzz8Szw== -get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" - integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.1" - -get-intrinsic@^1.1.3: +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== @@ -10045,14 +8822,14 @@ get-package-type@^0.1.0: resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== -get-stream@^4.0.0, get-stream@^4.1.0: +get-stream@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== dependencies: pump "^3.0.0" -get-stream@^5.0.0, get-stream@^5.1.0: +get-stream@^5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== @@ -10084,25 +8861,29 @@ getpass@^0.1.1: dependencies: assert-plus "^1.0.0" -glob-parent@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" - integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= - dependencies: - is-glob "^3.1.0" - path-dirname "^1.0.0" - -glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@^5.1.2, glob-parent@~5.1.0: +glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" -glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: - version "7.1.7" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" - integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== +glob-parent@^6.0.1, glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@7.1.6: + version "7.1.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -10111,7 +8892,7 @@ glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.0.5: +glob@^7.0.5, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: version "7.2.0" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== @@ -10123,7 +8904,7 @@ glob@^7.0.5: once "^1.3.0" path-is-absolute "^1.0.0" -global-modules@2.0.0: +global-modules@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== @@ -10144,24 +8925,17 @@ globals@^11.1.0: resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== -globals@^12.1.0: - version "12.4.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8" - integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== - dependencies: - type-fest "^0.8.1" - -globals@^13.6.0: - version "13.8.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.8.0.tgz#3e20f504810ce87a8d72e55aecf8435b50f4c1b3" - integrity sha512-rHtdA6+PDBIjeEvA91rpqzEvk/k3/i7EeNQiryiWuJH0Hw9cpyJMAt2jtbAwUaRdhD+573X4vWw6IcjKPasi9Q== +globals@^13.19.0: + version "13.20.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.20.0.tgz#ea276a1e508ffd4f1612888f9d1bad1e2717bf82" + integrity sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ== dependencies: type-fest "^0.20.2" -globby@11.0.1: - version "11.0.1" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.1.tgz#9a2bf107a068f3ffeabc49ad702c79ede8cfd357" - integrity sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ== +globby@11.0.3: + version "11.0.3" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" + integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== dependencies: array-union "^2.1.0" dir-glob "^3.0.1" @@ -10170,28 +8944,28 @@ globby@11.0.1: merge2 "^1.3.0" slash "^3.0.0" -globby@11.0.3, globby@^11.0.1: - version "11.0.3" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" - integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== +globby@^11.0.4, globby@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== dependencies: array-union "^2.1.0" dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" slash "^3.0.0" -globby@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" - integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= +globby@^13.1.1: + version "13.2.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-13.2.0.tgz#7dd5678d765c4680c2e6d106230d86cb727cb1af" + integrity sha512-jWsQfayf13NvqKUIL3Ta+CIqMnvlaIDFveWE/dpOZ9+3AMEJozsxDvKA02zync9UuvOM8rOXzsD5GqKP4OnWPQ== dependencies: - array-union "^1.0.1" - glob "^7.0.3" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" + dir-glob "^3.0.1" + fast-glob "^3.2.11" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^4.0.0" good-listener@^1.2.2: version "1.2.2" @@ -10217,10 +8991,20 @@ got@^9.6.0: to-readable-stream "^1.0.0" url-parse-lax "^3.0.0" -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4: - version "4.2.6" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" - integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +grapheme-splitter@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + +graphemer@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" + integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== graphql-config@^3.2.0: version "3.3.0" @@ -10253,27 +9037,13 @@ graphql-tag@2.10.3: resolved "https://registry.npmjs.org/graphql-tag/-/graphql-tag-2.10.3.tgz#ea1baba5eb8fc6339e4c4cf049dabe522b0edf03" integrity sha512-4FOv3ZKfA4WdOKJeHdz6B3F/vxBLSgmBcGeAFPf4n1F64ltJUvOOerNj0rsJxONQGdhUMynQIvd6LzB+1J5oKA== -graphql-tag@^2.10.1: - version "2.12.5" - resolved "https://registry.npmjs.org/graphql-tag/-/graphql-tag-2.12.5.tgz#5cff974a67b417747d05c8d9f5f3cb4495d0db8f" - integrity sha512-5xNhP4063d16Pz3HBtKprutsPrmHZi5IdUGOWRxA2B6VF7BIRGOHZ5WQvDmJXZuPcBg7rYwaFxvQYjqkSdR3TQ== - dependencies: - tslib "^2.1.0" - -graphql-tag@^2.11.0: +graphql-tag@^2.10.1, graphql-tag@^2.11.0, graphql-tag@^2.12.0: version "2.12.6" resolved "https://registry.yarnpkg.com/graphql-tag/-/graphql-tag-2.12.6.tgz#d441a569c1d2537ef10ca3d1633b48725329b5f1" integrity sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg== dependencies: tslib "^2.1.0" -graphql-tag@^2.12.0: - version "2.12.4" - resolved "https://registry.yarnpkg.com/graphql-tag/-/graphql-tag-2.12.4.tgz#d34066688a4f09e72d6f4663c74211e9b4b7c4bf" - integrity sha512-VV1U4O+9x99EkNpNmCUV5RZwq6MnK4+pGbRYWG+lA/m3uo7TSqJF81OkcOP148gFP6fzdl7JWYBrwWVTS9jXww== - dependencies: - tslib "^2.1.0" - graphql-ws@^4.4.1: version "4.5.1" resolved "https://registry.yarnpkg.com/graphql-ws/-/graphql-ws-4.5.1.tgz#d9dc6e047c6d4ddb928ccbfb3ca3022580a89925" @@ -10289,29 +9059,11 @@ graphql.macro@^1.4.2: babel-plugin-macros "^2.5.0" graphql-tag "^2.10.1" -graphql@*: - version "15.5.0" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.5.0.tgz#39d19494dbe69d1ea719915b578bf920344a69d5" - integrity sha512-OmaM7y0kaK31NKG31q4YbD2beNYa6jBBKtMFT6gLYJljHLJr42IqJ8KX08u3Li/0ifzTU5HjmoOOrwa5BRLeDA== - -graphql@^15.0.0, graphql@^15.5.0: +graphql@*, graphql@^15.0.0, graphql@^15.5.0: version "15.5.1" resolved "https://registry.npmjs.org/graphql/-/graphql-15.5.1.tgz#f2f84415d8985e7b84731e7f3536f8bb9d383aad" integrity sha512-FeTRX67T3LoE3LWAxxOlW2K3Bz+rMYAC18rRguK4wgXaTZMiJwSUwDmPFo3UadAKbzirKIg5Qy+sNJXbpPRnQw== -growly@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" - integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE= - -gzip-size@5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-5.1.1.tgz#cb9bee692f87c0612b232840a873904e4c135274" - integrity sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA== - dependencies: - duplexer "^0.1.1" - pify "^4.0.1" - gzip-size@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" @@ -10349,11 +9101,6 @@ has-ansi@^2.0.0: dependencies: ansi-regex "^2.0.0" -has-bigints@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" - integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== - has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -10376,12 +9123,7 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" -has-symbols@^1.0.1, has-symbols@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" - integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== - -has-symbols@^1.0.3: +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== @@ -10424,30 +9166,13 @@ has-values@^1.0.0: is-number "^3.0.0" kind-of "^4.0.0" -has@^1.0.0, has@^1.0.3: +has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" -hash-base@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" - integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== - dependencies: - inherits "^2.0.4" - readable-stream "^3.6.0" - safe-buffer "^5.2.0" - -hash.js@^1.0.0, hash.js@^1.0.3: - version "1.1.7" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" - integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== - dependencies: - inherits "^2.0.3" - minimalistic-assert "^1.0.1" - hast-to-hyperscript@^9.0.0: version "9.0.1" resolved "https://registry.yarnpkg.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d" @@ -10561,11 +9286,6 @@ header-case@^2.0.4: capital-case "^1.0.4" tslib "^2.0.3" -hex-color-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e" - integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ== - highlight.js@^10.4.1, highlight.js@~10.7.0: version "10.7.3" resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.7.3.tgz#697272e3991356e40c3cac566a74eef681756531" @@ -10590,15 +9310,6 @@ history@^5.0.0: dependencies: "@babel/runtime" "^7.7.6" -hmac-drbg@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" - integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= - dependencies: - hash.js "^1.0.3" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.1" - hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.1.0, hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" @@ -10611,11 +9322,6 @@ hoopy@^0.1.4: resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== -hosted-git-info@^2.1.4: - version "2.8.9" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" - integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== - hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" @@ -10626,16 +9332,6 @@ hpack.js@^2.1.6: readable-stream "^2.0.1" wbuf "^1.1.0" -hsl-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/hsl-regex/-/hsl-regex-1.0.0.tgz#d49330c789ed819e276a4c0d272dffa30b18fe6e" - integrity sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4= - -hsla-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/hsla-regex/-/hsla-regex-1.0.0.tgz#c1ce7a3168c8c6614033a4b5f7877f3b225f9c38" - integrity sha1-wc56MWjIxmFAM6S194d/OyJfnDg= - html-encoding-sniffer@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" @@ -10643,60 +9339,54 @@ html-encoding-sniffer@^2.0.1: dependencies: whatwg-encoding "^1.0.5" -html-entities@^1.2.1, html-entities@^1.3.1: - version "1.4.0" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.4.0.tgz#cfbd1b01d2afaf9adca1b10ae7dffab98c71d2dc" - integrity sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA== +html-entities@^2.1.0, html-entities@^2.3.2: + version "2.4.0" + resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.4.0.tgz#edd0cee70402584c8c76cc2c0556db09d1f45061" + integrity sha512-igBTJcNNNhvZFRtm8uA6xMY6xYleeDwn3PeBCkDz7tHttv4F2hsDI2aPgNERWzvRcNYHNT3ymRaQzllmXj4YsQ== html-escaper@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== -html-minifier-terser@^5.0.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-5.1.1.tgz#922e96f1f3bb60832c2634b79884096389b1f054" - integrity sha512-ZPr5MNObqnV/T9akshPKbVgyOqLmy+Bxo7juKCfTfnjNniTAMdy4hz21YQqoofMBJD2kdREaqPPdThoR78Tgxg== +html-minifier-terser@^6.0.2: + version "6.1.0" + resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== dependencies: - camel-case "^4.1.1" - clean-css "^4.2.3" - commander "^4.1.1" + camel-case "^4.1.2" + clean-css "^5.2.2" + commander "^8.3.0" he "^1.2.0" - param-case "^3.0.3" + param-case "^3.0.4" relateurl "^0.2.7" - terser "^4.6.3" + terser "^5.10.0" html-void-elements@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-1.0.5.tgz#ce9159494e86d95e45795b166c2021c2cfca4483" integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== -html-webpack-plugin@4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-4.5.0.tgz#625097650886b97ea5dae331c320e3238f6c121c" - integrity sha512-MouoXEYSjTzCrjIxWwg8gxL5fE2X2WZJLmBYXlaJhQUH5K/b5OrqmV7T4dB7iu0xkmJ6JlUuV6fFVtnqbPopZw== - dependencies: - "@types/html-minifier-terser" "^5.0.0" - "@types/tapable" "^1.0.5" - "@types/webpack" "^4.41.8" - html-minifier-terser "^5.0.1" - loader-utils "^1.2.3" - lodash "^4.17.15" - pretty-error "^2.1.1" - tapable "^1.1.3" - util.promisify "1.0.0" - -htmlparser2@^3.10.1: - version "3.10.1" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" - integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== - dependencies: - domelementtype "^1.3.1" - domhandler "^2.3.0" - domutils "^1.5.1" - entities "^1.1.1" - inherits "^2.0.1" - readable-stream "^3.1.1" +html-webpack-plugin@^5.5.0: + version "5.5.3" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.3.tgz#72270f4a78e222b5825b296e5e3e1328ad525a3e" + integrity sha512-6YrDKTuqaP/TquFH7h4srYWsZx+x6k6+FbsTm0ziCwGHDP78Unr1r9F/H4+sGmMbX08GQcJ+K64x55b+7VM/jg== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" http-cache-semantics@^4.0.0: version "4.1.1" @@ -10743,16 +9433,6 @@ http-proxy-agent@^4.0.1: agent-base "6" debug "4" -http-proxy-middleware@0.19.1: - version "0.19.1" - resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" - integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== - dependencies: - http-proxy "^1.17.0" - is-glob "^4.0.0" - lodash "^4.17.11" - micromatch "^3.1.10" - http-proxy-middleware@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.0.tgz#20d1ac3409199c83e5d0383ba6436b04e7acb9fe" @@ -10764,7 +9444,18 @@ http-proxy-middleware@2.0.0: is-plain-obj "^3.0.0" micromatch "^4.0.2" -http-proxy@^1.17.0, http-proxy@^1.18.1: +http-proxy-middleware@^2.0.3: + version "2.0.6" + resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" + integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== @@ -10782,11 +9473,6 @@ http-signature@~1.2.0: jsprim "^1.2.2" sshpk "^1.7.0" -https-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" - integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= - https-proxy-agent@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" @@ -10795,11 +9481,6 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" -human-signals@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" - integrity sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw== - human-signals@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" @@ -10817,12 +9498,17 @@ iconv-lite@0.4.24, iconv-lite@^0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" -icss-utils@^4.0.0, icss-utils@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467" - integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA== +iconv-lite@^0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== dependencies: - postcss "^7.0.14" + safer-buffer ">= 2.1.2 < 3.0.0" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== idb-keyval@^5.0.2: version "5.1.5" @@ -10831,63 +9517,43 @@ idb-keyval@^5.0.2: dependencies: safari-14-idb-fix "^1.0.6" -identity-obj-proxy@3.0.0: +idb@^7.0.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/idb/-/idb-7.1.1.tgz#d910ded866d32c7ced9befc5bfdf36f572ced72b" + integrity sha512-gchesWBzyvGHRO9W8tzUWFDycow5gwjvFKfyV9FF32Y7F50yZMp7mP+T2mJIWFx49zicqyC4uefHM17o6xKIVQ== + +identity-obj-proxy@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" - integrity sha1-lNK9qWCERT7zb7xarsN+D3nx/BQ= + integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== dependencies: harmony-reflect "^1.4.6" -ieee754@^1.1.13, ieee754@^1.1.4: +ieee754@^1.1.13: version "1.2.1" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -iferr@^0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" - integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= - -ignore@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== - -ignore@^5.1.4: - version "5.1.8" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" - integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== +ignore@^5.1.4, ignore@^5.2.0: + version "5.2.4" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= -immer@8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/immer/-/immer-8.0.1.tgz#9c73db683e2b3975c424fb0572af5889877ae656" - integrity sha512-aqXhGP7//Gui2+UrEtvxZxSquQVXTpZ7KDxfCcKAF3Vysvw0CViVaW9RZ1j1xlIYqaaaipBoqdqeibkc18PNvA== +immer@^9.0.7: + version "9.0.21" + resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.21.tgz#1e025ea31a40f24fb064f1fef23e931496330176" + integrity sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA== immutable@~3.7.6: version "3.7.6" resolved "https://registry.yarnpkg.com/immutable/-/immutable-3.7.6.tgz#13b4d3cb12befa15482a26fe1b2ebae640071e4b" integrity sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw== -import-cwd@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" - integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= - dependencies: - import-from "^2.1.0" - -import-fresh@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" - integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= - dependencies: - caller-path "^2.0.0" - resolve-from "^3.0.0" - import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: version "3.3.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" @@ -10908,21 +9574,6 @@ import-from@4.0.0: resolved "https://registry.yarnpkg.com/import-from/-/import-from-4.0.0.tgz#2710b8d66817d232e16f4166e319248d3d5492e2" integrity sha512-P9J71vT5nLlDeV8FHs5nNxaLbrpfAV5cF5srvbZfpwpcJoM/xZR3hiv+q+SAnuSmuGbXMWud063iIMx/V/EWZQ== -import-from@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" - integrity sha1-M1238qev/VOqpHHUuAId7ja387E= - dependencies: - resolve-from "^3.0.0" - -import-local@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" - integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== - dependencies: - pkg-dir "^3.0.0" - resolve-cwd "^2.0.0" - import-local@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.0.2.tgz#a8cfd0431d1de4a2199703d003e3e62364fa6db6" @@ -10946,16 +9597,6 @@ indent-string@^4.0.0: resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -indexes-of@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" - integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= - -infer-owner@^1.0.3, infer-owner@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" - integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== - inflected@^2.0.4: version "2.1.0" resolved "https://registry.npmjs.org/inflected/-/inflected-2.1.0.tgz#2816ac17a570bbbc8303ca05bca8bf9b3f959687" @@ -10969,16 +9610,11 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== -inherits@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" - integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= - inherits@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" @@ -11021,14 +9657,6 @@ inquirer@^7.3.3: strip-ansi "^6.0.0" through "^2.3.6" -internal-ip@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" - integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== - dependencies: - default-gateway "^4.2.0" - ipaddr.js "^1.9.0" - internal-slot@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" @@ -11038,10 +9666,10 @@ internal-slot@^1.0.3: has "^1.0.3" side-channel "^1.0.4" -internmap@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/internmap/-/internmap-1.0.1.tgz#0017cc8a3b99605f0302f2b198d272e015e5df95" - integrity sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw== +"internmap@1 - 2", internmap@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" + integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== invariant@^2.2.4: version "2.2.4" @@ -11050,30 +9678,15 @@ invariant@^2.2.4: dependencies: loose-envify "^1.0.0" -ip-regex@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" - integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= - -ip@^1.1.0, ip@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" - integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= - -ipaddr.js@1.9.1, ipaddr.js@^1.9.0: +ipaddr.js@1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== -is-absolute-url@^2.0.0: +ipaddr.js@^2.0.1: version "2.1.0" - resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6" - integrity sha1-UFMN+4T8yap9vnhS6Do3uTufKqY= - -is-absolute-url@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" - integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.1.0.tgz#2119bc447ff8c257753b196fc5f1ce08a4cdf39f" + integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== is-absolute@^1.0.0: version "1.0.0" @@ -11110,35 +9723,16 @@ is-alphanumerical@^1.0.0: is-alphabetical "^1.0.0" is-decimal "^1.0.0" -is-arguments@^1.0.4: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.0.tgz#62353031dfbee07ceb34656a6bde59efecae8dd9" - integrity sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg== - dependencies: - call-bind "^1.0.0" - is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= -is-arrayish@^0.3.1: - version "0.3.2" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" - integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== - is-bigint@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.2.tgz#ffb381442503235ad245ea89e45b3dbff040ee5a" integrity sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA== -is-binary-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" - integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= - dependencies: - binary-extensions "^1.0.0" - is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" @@ -11163,46 +9757,15 @@ is-buffer@^2.0.0: resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== -is-callable@^1.1.4, is-callable@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e" - integrity sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ== - -is-callable@^1.2.7: +is-callable@^1.1.4, is-callable@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== -is-ci@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" - integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== - dependencies: - ci-info "^2.0.0" - -is-color-stop@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-color-stop/-/is-color-stop-1.1.0.tgz#cfff471aee4dd5c9e158598fbe12967b5cdad345" - integrity sha1-z/9HGu5N1cnhWFmPvhKWe1za00U= - dependencies: - css-color-names "^0.0.4" - hex-color-regex "^1.1.0" - hsl-regex "^1.0.0" - hsla-regex "^1.0.0" - rgb-regex "^1.0.1" - rgba-regex "^1.0.0" - -is-core-module@^2.0.0, is-core-module@^2.2.0, is-core-module@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.4.0.tgz#8e9fc8e15027b011418026e98f0e6f4d86305cc1" - integrity sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A== - dependencies: - has "^1.0.3" - -is-core-module@^2.9.0: - version "2.11.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144" - integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== +is-core-module@^2.11.0, is-core-module@^2.9.0: + version "2.12.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.12.1.tgz#0c0b6885b6f80011c71541ce15c8d66cf5a4f9fd" + integrity sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg== dependencies: has "^1.0.3" @@ -11248,12 +9811,7 @@ is-descriptor@^1.0.0, is-descriptor@^1.0.2: is-data-descriptor "^1.0.0" kind-of "^6.0.2" -is-directory@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" - integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= - -is-docker@^2.0.0: +is-docker@^2.0.0, is-docker@^2.1.1: version "2.2.1" resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== @@ -11270,7 +9828,7 @@ is-extendable@^1.0.0, is-extendable@^1.0.1: dependencies: is-plain-object "^2.0.4" -is-extglob@^2.1.0, is-extglob@^2.1.1: +is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= @@ -11309,19 +9867,19 @@ is-generator-fn@^2.0.0: resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== -is-glob@4.0.1, is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: +is-glob@4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== dependencies: is-extglob "^2.1.1" -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" - integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: - is-extglob "^2.1.0" + is-extglob "^2.1.1" is-hexadecimal@^1.0.0: version "1.0.4" @@ -11347,11 +9905,6 @@ is-module@^1.0.0: resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= -is-negative-zero@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24" - integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== - is-negative-zero@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" @@ -11379,11 +9932,6 @@ is-obj@^1.0.1: resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= -is-obj@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" - integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== - is-observable@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-1.1.0.tgz#b3e986c8f44de950867cab5403f5a3465005975e" @@ -11391,29 +9939,10 @@ is-observable@^1.1.0: dependencies: symbol-observable "^1.1.0" -is-path-cwd@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" - integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== - -is-path-in-cwd@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" - integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== - dependencies: - is-path-inside "^2.1.0" - -is-path-inside@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" - integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== - dependencies: - path-is-inside "^1.0.2" - -is-plain-obj@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" - integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= +is-path-inside@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== is-plain-obj@^2.0.0: version "2.1.0" @@ -11447,14 +9976,6 @@ is-promise@^2.1.0: resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== -is-regex@^1.0.4, is-regex@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.3.tgz#d029f9aff6448b93ebbe3f33dac71511fdcbef9f" - integrity sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ== - dependencies: - call-bind "^1.0.2" - has-symbols "^1.0.2" - is-regex@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" @@ -11475,12 +9996,7 @@ is-relative@^1.0.0: dependencies: is-unc-path "^1.0.0" -is-resolvable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" - integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== - -is-root@2.1.0: +is-root@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== @@ -11502,12 +10018,7 @@ is-stream@^2.0.0: resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw== -is-string@^1.0.5, is-string@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.6.tgz#3fe5d5992fb0d93404f32584d4b0179a71b54a5f" - integrity sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w== - -is-string@^1.0.7: +is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== @@ -11562,11 +10073,6 @@ is-windows@^1.0.1, is-windows@^1.0.2: resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== -is-wsl@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" - integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= - is-wsl@^2.1.1, is-wsl@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" @@ -11579,7 +10085,7 @@ isarray@0.0.1: resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= -isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: +isarray@1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= @@ -11624,19 +10130,20 @@ isstream@~0.1.2: resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= -istanbul-lib-coverage@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec" - integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg== +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== -istanbul-lib-instrument@^4.0.0, istanbul-lib-instrument@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d" - integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== dependencies: - "@babel/core" "^7.7.5" + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.0.0" + istanbul-lib-coverage "^3.2.0" semver "^6.3.0" istanbul-lib-report@^3.0.0: @@ -11657,10 +10164,10 @@ istanbul-lib-source-maps@^4.0.0: istanbul-lib-coverage "^3.0.0" source-map "^0.6.1" -istanbul-reports@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b" - integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== +istanbul-reports@^3.1.3: + version "3.1.5" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== dependencies: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" @@ -11670,96 +10177,99 @@ iterall@^1.2.1: resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea" integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg== -jake@^10.6.1: - version "10.8.2" - resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.2.tgz#ebc9de8558160a66d82d0eadc6a2e58fbc500a7b" - integrity sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A== - dependencies: - async "0.9.x" - chalk "^2.4.2" - filelist "^1.0.1" - minimatch "^3.0.4" - -jest-changed-files@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-26.6.2.tgz#f6198479e1cc66f22f9ae1e22acaa0b429c042d0" - integrity sha512-fDS7szLcY9sCtIip8Fjry9oGf3I2ht/QT21bAHm5Dmf0mD4X3ReNUf17y+bO6fR8WgbIZTlbyG1ak/53cbRzKQ== - dependencies: - "@jest/types" "^26.6.2" - execa "^4.0.0" - throat "^5.0.0" - -jest-circus@26.6.0: - version "26.6.0" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-26.6.0.tgz#7d9647b2e7f921181869faae1f90a2629fd70705" - integrity sha512-L2/Y9szN6FJPWFK8kzWXwfp+FOR7xq0cUL4lIsdbIdwz3Vh6P1nrpcqOleSzr28zOtSHQNV9Z7Tl+KkuK7t5Ng== - dependencies: - "@babel/traverse" "^7.1.0" - "@jest/environment" "^26.6.0" - "@jest/test-result" "^26.6.0" - "@jest/types" "^26.6.0" - "@types/babel__traverse" "^7.0.4" +jake@^10.8.5: + version "10.8.7" + resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.7.tgz#63a32821177940c33f356e0ba44ff9d34e1c7d8f" + integrity sha512-ZDi3aP+fG/LchyBzUM804VjddnwfSfsdeYkwt8NcbKRvo4rFkjhs456iLFn3k2ZUWvNe4i48WACDbza8fhq2+w== + dependencies: + async "^3.2.3" + chalk "^4.0.2" + filelist "^1.0.4" + minimatch "^3.1.2" + +jest-changed-files@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" + integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== + dependencies: + "@jest/types" "^27.5.1" + execa "^5.0.0" + throat "^6.0.1" + +jest-circus@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" + integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" "@types/node" "*" chalk "^4.0.0" co "^4.6.0" dedent "^0.7.0" - expect "^26.6.0" + expect "^27.5.1" is-generator-fn "^2.0.0" - jest-each "^26.6.0" - jest-matcher-utils "^26.6.0" - jest-message-util "^26.6.0" - jest-runner "^26.6.0" - jest-runtime "^26.6.0" - jest-snapshot "^26.6.0" - jest-util "^26.6.0" - pretty-format "^26.6.0" - stack-utils "^2.0.2" - throat "^5.0.0" - -jest-cli@^26.6.0: - version "26.6.3" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-26.6.3.tgz#43117cfef24bc4cd691a174a8796a532e135e92a" - integrity sha512-GF9noBSa9t08pSyl3CY4frMrqp+aQXFGFkf5hEPbh/pIUFYWMK6ZLTfbmadxJVcJrdRoChlWQsA2VkJcDFK8hg== - dependencies: - "@jest/core" "^26.6.3" - "@jest/test-result" "^26.6.2" - "@jest/types" "^26.6.2" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + throat "^6.0.1" + +jest-cli@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" + integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== + dependencies: + "@jest/core" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" chalk "^4.0.0" exit "^0.1.2" - graceful-fs "^4.2.4" + graceful-fs "^4.2.9" import-local "^3.0.2" - is-ci "^2.0.0" - jest-config "^26.6.3" - jest-util "^26.6.2" - jest-validate "^26.6.2" + jest-config "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" prompts "^2.0.1" - yargs "^15.4.1" + yargs "^16.2.0" -jest-config@^26.6.3: - version "26.6.3" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-26.6.3.tgz#64f41444eef9eb03dc51d5c53b75c8c71f645349" - integrity sha512-t5qdIj/bCj2j7NFVHb2nFB4aUdfucDn3JRKgrZnplb8nieAirAzRSHP8uDEd+qV6ygzg9Pz4YG7UTJf94LPSyg== +jest-config@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" + integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== dependencies: - "@babel/core" "^7.1.0" - "@jest/test-sequencer" "^26.6.3" - "@jest/types" "^26.6.2" - babel-jest "^26.6.3" + "@babel/core" "^7.8.0" + "@jest/test-sequencer" "^27.5.1" + "@jest/types" "^27.5.1" + babel-jest "^27.5.1" chalk "^4.0.0" + ci-info "^3.2.0" deepmerge "^4.2.2" glob "^7.1.1" - graceful-fs "^4.2.4" - jest-environment-jsdom "^26.6.2" - jest-environment-node "^26.6.2" - jest-get-type "^26.3.0" - jest-jasmine2 "^26.6.3" - jest-regex-util "^26.0.0" - jest-resolve "^26.6.2" - jest-util "^26.6.2" - jest-validate "^26.6.2" - micromatch "^4.0.2" - pretty-format "^26.6.2" + graceful-fs "^4.2.9" + jest-circus "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-get-type "^27.5.1" + jest-jasmine2 "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runner "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^27.5.1" + slash "^3.0.0" + strip-json-comments "^3.1.1" -jest-diff@^26.0.0, jest-diff@^26.6.2: +jest-diff@^26.0.0: version "26.6.2" resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-26.6.2.tgz#1aa7468b52c3a68d7d5c5fdcdfcd5e49bd164394" integrity sha512-6m+9Z3Gv9wN0WFVasqjCL/06+EFCMTqDEUl/b87HYK2rAPTyfz4ZIuSlPhY51PIQRWx5TaxeF1qmXKe9gfN3sA== @@ -11769,138 +10279,166 @@ jest-diff@^26.0.0, jest-diff@^26.6.2: jest-get-type "^26.3.0" pretty-format "^26.6.2" -jest-docblock@^26.0.0: - version "26.0.0" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-26.0.0.tgz#3e2fa20899fc928cb13bd0ff68bd3711a36889b5" - integrity sha512-RDZ4Iz3QbtRWycd8bUEPxQsTlYazfYn/h5R65Fc6gOfwozFhoImx+affzky/FFBuqISPTqjXomoIGJVKBWoo0w== - dependencies: - detect-newline "^3.0.0" - -jest-each@^26.6.0, jest-each@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-26.6.2.tgz#02526438a77a67401c8a6382dfe5999952c167cb" - integrity sha512-Mer/f0KaATbjl8MCJ+0GEpNdqmnVmDYqCTJYTvoo7rqmRiDllmp2AYN+06F93nXcY3ur9ShIjS+CO/uD+BbH4A== +jest-diff@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" + integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== dependencies: - "@jest/types" "^26.6.2" chalk "^4.0.0" - jest-get-type "^26.3.0" - jest-util "^26.6.2" - pretty-format "^26.6.2" + diff-sequences "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" -jest-environment-jsdom@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-26.6.2.tgz#78d09fe9cf019a357009b9b7e1f101d23bd1da3e" - integrity sha512-jgPqCruTlt3Kwqg5/WVFyHIOJHsiAvhcp2qiR2QQstuG9yWox5+iHpU3ZrcBxW14T4fe5Z68jAfLRh7joCSP2Q== +jest-docblock@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" + integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== dependencies: - "@jest/environment" "^26.6.2" - "@jest/fake-timers" "^26.6.2" - "@jest/types" "^26.6.2" - "@types/node" "*" - jest-mock "^26.6.2" - jest-util "^26.6.2" - jsdom "^16.4.0" + detect-newline "^3.0.0" -jest-environment-node@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-26.6.2.tgz#824e4c7fb4944646356f11ac75b229b0035f2b0c" - integrity sha512-zhtMio3Exty18dy8ee8eJ9kjnRyZC1N4C1Nt/VShN1apyXc8rWGtJ9lI7vqiWcyyXS4BVSEn9lxAM2D+07/Tag== +jest-each@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" + integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== dependencies: - "@jest/environment" "^26.6.2" - "@jest/fake-timers" "^26.6.2" - "@jest/types" "^26.6.2" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + jest-get-type "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + +jest-environment-jsdom@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" + integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" "@types/node" "*" - jest-mock "^26.6.2" - jest-util "^26.6.2" + jest-mock "^27.5.1" + jest-util "^27.5.1" + jsdom "^16.6.0" + +jest-environment-node@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" + integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" jest-get-type@^26.3.0: version "26.3.0" resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0" integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig== -jest-haste-map@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-26.6.2.tgz#dd7e60fe7dc0e9f911a23d79c5ff7fb5c2cafeaa" - integrity sha512-easWIJXIw71B2RdR8kgqpjQrbMRWQBgiBwXYEhtGUTaX+doCjBheluShdDMeR8IMfJiTqH4+zfhtg29apJf/8w== +jest-get-type@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" + integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== + +jest-haste-map@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" + integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== dependencies: - "@jest/types" "^26.6.2" + "@jest/types" "^27.5.1" "@types/graceful-fs" "^4.1.2" "@types/node" "*" anymatch "^3.0.3" fb-watchman "^2.0.0" - graceful-fs "^4.2.4" - jest-regex-util "^26.0.0" - jest-serializer "^26.6.2" - jest-util "^26.6.2" - jest-worker "^26.6.2" - micromatch "^4.0.2" - sane "^4.0.3" + graceful-fs "^4.2.9" + jest-regex-util "^27.5.1" + jest-serializer "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + micromatch "^4.0.4" walker "^1.0.7" optionalDependencies: - fsevents "^2.1.2" + fsevents "^2.3.2" -jest-jasmine2@^26.6.3: - version "26.6.3" - resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-26.6.3.tgz#adc3cf915deacb5212c93b9f3547cd12958f2edd" - integrity sha512-kPKUrQtc8aYwBV7CqBg5pu+tmYXlvFlSFYn18ev4gPFtrRzB15N2gW/Roew3187q2w2eHuu0MU9TJz6w0/nPEg== +jest-jasmine2@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" + integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== dependencies: - "@babel/traverse" "^7.1.0" - "@jest/environment" "^26.6.2" - "@jest/source-map" "^26.6.2" - "@jest/test-result" "^26.6.2" - "@jest/types" "^26.6.2" + "@jest/environment" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" "@types/node" "*" chalk "^4.0.0" co "^4.6.0" - expect "^26.6.2" + expect "^27.5.1" is-generator-fn "^2.0.0" - jest-each "^26.6.2" - jest-matcher-utils "^26.6.2" - jest-message-util "^26.6.2" - jest-runtime "^26.6.3" - jest-snapshot "^26.6.2" - jest-util "^26.6.2" - pretty-format "^26.6.2" - throat "^5.0.0" - -jest-leak-detector@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-26.6.2.tgz#7717cf118b92238f2eba65054c8a0c9c653a91af" - integrity sha512-i4xlXpsVSMeKvg2cEKdfhh0H39qlJlP5Ex1yQxwF9ubahboQYMgTtz5oML35AVA3B4Eu+YsmwaiKVev9KCvLxg== + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + throat "^6.0.1" + +jest-leak-detector@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" + integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== + dependencies: + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-matcher-utils@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" + integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== dependencies: - jest-get-type "^26.3.0" - pretty-format "^26.6.2" + chalk "^4.0.0" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" -jest-matcher-utils@^26.6.0, jest-matcher-utils@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-26.6.2.tgz#8e6fd6e863c8b2d31ac6472eeb237bc595e53e7a" - integrity sha512-llnc8vQgYcNqDrqRDXWwMr9i7rS5XFiCwvh6DTP7Jqa2mqpcCBBlpCbn+trkG0KNhPu/h8rzyBkriOtBstvWhw== +jest-message-util@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" + integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^27.5.1" + "@types/stack-utils" "^2.0.0" chalk "^4.0.0" - jest-diff "^26.6.2" - jest-get-type "^26.3.0" - pretty-format "^26.6.2" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" -jest-message-util@^26.6.0, jest-message-util@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-26.6.2.tgz#58173744ad6fc0506b5d21150b9be56ef001ca07" - integrity sha512-rGiLePzQ3AzwUshu2+Rn+UMFk0pHN58sOG+IaJbk5Jxuqo3NYO1U2/MIR4S1sKgsoYSXSzdtSa0TgrmtUwEbmA== +jest-message-util@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" + integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== dependencies: - "@babel/code-frame" "^7.0.0" - "@jest/types" "^26.6.2" + "@babel/code-frame" "^7.12.13" + "@jest/types" "^28.1.3" "@types/stack-utils" "^2.0.0" chalk "^4.0.0" - graceful-fs "^4.2.4" - micromatch "^4.0.2" - pretty-format "^26.6.2" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^28.1.3" slash "^3.0.0" - stack-utils "^2.0.2" + stack-utils "^2.0.3" -jest-mock@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-26.6.2.tgz#d6cb712b041ed47fe0d9b6fc3474bc6543feb302" - integrity sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew== +jest-mock@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" + integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== dependencies: - "@jest/types" "^26.6.2" + "@jest/types" "^27.5.1" "@types/node" "*" jest-pnp-resolver@^1.2.2: @@ -11908,196 +10446,209 @@ jest-pnp-resolver@^1.2.2: resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== -jest-regex-util@^26.0.0: - version "26.0.0" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-26.0.0.tgz#d25e7184b36e39fd466c3bc41be0971e821fee28" - integrity sha512-Gv3ZIs/nA48/Zvjrl34bf+oD76JHiGDUxNOVgUjh3j890sblXryjY4rss71fPtD/njchl6PSE2hIhvyWa1eT0A== +jest-regex-util@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" + integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== -jest-resolve-dependencies@^26.6.3: - version "26.6.3" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-26.6.3.tgz#6680859ee5d22ee5dcd961fe4871f59f4c784fb6" - integrity sha512-pVwUjJkxbhe4RY8QEWzN3vns2kqyuldKpxlxJlzEYfKSvY6/bMvxoFrYYzUO1Gx28yKWN37qyV7rIoIp2h8fTg== - dependencies: - "@jest/types" "^26.6.2" - jest-regex-util "^26.0.0" - jest-snapshot "^26.6.2" +jest-regex-util@^28.0.0: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== -jest-resolve@26.6.0: - version "26.6.0" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-26.6.0.tgz#070fe7159af87b03e50f52ea5e17ee95bbee40e1" - integrity sha512-tRAz2bwraHufNp+CCmAD8ciyCpXCs1NQxB5EJAmtCFy6BN81loFEGWKzYu26Y62lAJJe4X4jg36Kf+NsQyiStQ== +jest-resolve-dependencies@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" + integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== dependencies: - "@jest/types" "^26.6.0" - chalk "^4.0.0" - graceful-fs "^4.2.4" - jest-pnp-resolver "^1.2.2" - jest-util "^26.6.0" - read-pkg-up "^7.0.1" - resolve "^1.17.0" - slash "^3.0.0" + "@jest/types" "^27.5.1" + jest-regex-util "^27.5.1" + jest-snapshot "^27.5.1" -jest-resolve@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-26.6.2.tgz#a3ab1517217f469b504f1b56603c5bb541fbb507" - integrity sha512-sOxsZOq25mT1wRsfHcbtkInS+Ek7Q8jCHUB0ZUTP0tc/c41QHriU/NunqMfCUWsL4H3MHpvQD4QR9kSYhS7UvQ== +jest-resolve@^27.4.2, jest-resolve@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" + integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== dependencies: - "@jest/types" "^26.6.2" + "@jest/types" "^27.5.1" chalk "^4.0.0" - graceful-fs "^4.2.4" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" jest-pnp-resolver "^1.2.2" - jest-util "^26.6.2" - read-pkg-up "^7.0.1" - resolve "^1.18.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + resolve "^1.20.0" + resolve.exports "^1.1.0" slash "^3.0.0" -jest-runner@^26.6.0, jest-runner@^26.6.3: - version "26.6.3" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-26.6.3.tgz#2d1fed3d46e10f233fd1dbd3bfaa3fe8924be159" - integrity sha512-atgKpRHnaA2OvByG/HpGA4g6CSPS/1LK0jK3gATJAoptC1ojltpmVlYC3TYgdmGp+GLuhzpH30Gvs36szSL2JQ== +jest-runner@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" + integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== dependencies: - "@jest/console" "^26.6.2" - "@jest/environment" "^26.6.2" - "@jest/test-result" "^26.6.2" - "@jest/types" "^26.6.2" + "@jest/console" "^27.5.1" + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" "@types/node" "*" chalk "^4.0.0" - emittery "^0.7.1" - exit "^0.1.2" - graceful-fs "^4.2.4" - jest-config "^26.6.3" - jest-docblock "^26.0.0" - jest-haste-map "^26.6.2" - jest-leak-detector "^26.6.2" - jest-message-util "^26.6.2" - jest-resolve "^26.6.2" - jest-runtime "^26.6.3" - jest-util "^26.6.2" - jest-worker "^26.6.2" + emittery "^0.8.1" + graceful-fs "^4.2.9" + jest-docblock "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-haste-map "^27.5.1" + jest-leak-detector "^27.5.1" + jest-message-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runtime "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" source-map-support "^0.5.6" - throat "^5.0.0" - -jest-runtime@^26.6.0, jest-runtime@^26.6.3: - version "26.6.3" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-26.6.3.tgz#4f64efbcfac398331b74b4b3c82d27d401b8fa2b" - integrity sha512-lrzyR3N8sacTAMeonbqpnSka1dHNux2uk0qqDXVkMv2c/A3wYnvQ4EXuI013Y6+gSKSCxdaczvf4HF0mVXHRdw== - dependencies: - "@jest/console" "^26.6.2" - "@jest/environment" "^26.6.2" - "@jest/fake-timers" "^26.6.2" - "@jest/globals" "^26.6.2" - "@jest/source-map" "^26.6.2" - "@jest/test-result" "^26.6.2" - "@jest/transform" "^26.6.2" - "@jest/types" "^26.6.2" - "@types/yargs" "^15.0.0" + throat "^6.0.1" + +jest-runtime@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" + integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/globals" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" chalk "^4.0.0" - cjs-module-lexer "^0.6.0" + cjs-module-lexer "^1.0.0" collect-v8-coverage "^1.0.0" - exit "^0.1.2" + execa "^5.0.0" glob "^7.1.3" - graceful-fs "^4.2.4" - jest-config "^26.6.3" - jest-haste-map "^26.6.2" - jest-message-util "^26.6.2" - jest-mock "^26.6.2" - jest-regex-util "^26.0.0" - jest-resolve "^26.6.2" - jest-snapshot "^26.6.2" - jest-util "^26.6.2" - jest-validate "^26.6.2" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" slash "^3.0.0" strip-bom "^4.0.0" - yargs "^15.4.1" -jest-serializer@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-26.6.2.tgz#d139aafd46957d3a448f3a6cdabe2919ba0742d1" - integrity sha512-S5wqyz0DXnNJPd/xfIzZ5Xnp1HrJWBczg8mMfMpN78OJ5eDxXyf+Ygld9wX1DnUWbIbhM1YDY95NjR4CBXkb2g== +jest-serializer@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" + integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== dependencies: "@types/node" "*" - graceful-fs "^4.2.4" + graceful-fs "^4.2.9" -jest-snapshot@^26.6.0, jest-snapshot@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-26.6.2.tgz#f3b0af1acb223316850bd14e1beea9837fb39c84" - integrity sha512-OLhxz05EzUtsAmOMzuupt1lHYXCNib0ECyuZ/PZOx9TrZcC8vL0x+DUG3TL+GLX3yHG45e6YGjIm0XwDc3q3og== +jest-snapshot@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" + integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== dependencies: + "@babel/core" "^7.7.2" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" "@babel/types" "^7.0.0" - "@jest/types" "^26.6.2" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" "@types/babel__traverse" "^7.0.4" - "@types/prettier" "^2.0.0" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" chalk "^4.0.0" - expect "^26.6.2" - graceful-fs "^4.2.4" - jest-diff "^26.6.2" - jest-get-type "^26.3.0" - jest-haste-map "^26.6.2" - jest-matcher-utils "^26.6.2" - jest-message-util "^26.6.2" - jest-resolve "^26.6.2" + expect "^27.5.1" + graceful-fs "^4.2.9" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + jest-haste-map "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-util "^27.5.1" natural-compare "^1.4.0" - pretty-format "^26.6.2" + pretty-format "^27.5.1" semver "^7.3.2" -jest-util@^26.6.0, jest-util@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1" - integrity sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q== +jest-util@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" + integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== dependencies: - "@jest/types" "^26.6.2" + "@jest/types" "^27.5.1" "@types/node" "*" chalk "^4.0.0" - graceful-fs "^4.2.4" - is-ci "^2.0.0" - micromatch "^4.0.2" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" -jest-validate@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-26.6.2.tgz#23d380971587150467342911c3d7b4ac57ab20ec" - integrity sha512-NEYZ9Aeyj0i5rQqbq+tpIOom0YS1u2MVu6+euBsvpgIme+FOfRmoC4R5p0JiAUpaFvFy24xgrpMknarR/93XjQ== +jest-util@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" + integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== dependencies: - "@jest/types" "^26.6.2" - camelcase "^6.0.0" + "@jest/types" "^28.1.3" + "@types/node" "*" chalk "^4.0.0" - jest-get-type "^26.3.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" + integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== + dependencies: + "@jest/types" "^27.5.1" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^27.5.1" leven "^3.1.0" - pretty-format "^26.6.2" + pretty-format "^27.5.1" -jest-watch-typeahead@0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/jest-watch-typeahead/-/jest-watch-typeahead-0.6.1.tgz#45221b86bb6710b7e97baaa1640ae24a07785e63" - integrity sha512-ITVnHhj3Jd/QkqQcTqZfRgjfyRhDFM/auzgVo2RKvSwi18YMvh0WvXDJFoFED6c7jd/5jxtu4kSOb9PTu2cPVg== +jest-watch-typeahead@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" + integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== dependencies: ansi-escapes "^4.3.1" chalk "^4.0.0" - jest-regex-util "^26.0.0" - jest-watcher "^26.3.0" - slash "^3.0.0" + jest-regex-util "^28.0.0" + jest-watcher "^28.0.0" + slash "^4.0.0" + string-length "^5.0.1" + strip-ansi "^7.0.1" + +jest-watcher@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" + integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== + dependencies: + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^27.5.1" string-length "^4.0.1" - strip-ansi "^6.0.0" -jest-watcher@^26.3.0, jest-watcher@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-26.6.2.tgz#a5b683b8f9d68dbcb1d7dae32172d2cca0592975" - integrity sha512-WKJob0P/Em2csiVthsI68p6aGKTIcsfjH9Gsx1f0A3Italz43e3ho0geSAVsmj09RWOELP1AZ/DXyJgOgDKxXQ== +jest-watcher@^28.0.0: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" + integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== dependencies: - "@jest/test-result" "^26.6.2" - "@jest/types" "^26.6.2" + "@jest/test-result" "^28.1.3" + "@jest/types" "^28.1.3" "@types/node" "*" ansi-escapes "^4.2.1" chalk "^4.0.0" - jest-util "^26.6.2" + emittery "^0.10.2" + jest-util "^28.1.3" string-length "^4.0.1" -jest-worker@^24.9.0: - version "24.9.0" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-24.9.0.tgz#5dbfdb5b2d322e98567898238a9697bcce67b3e5" - integrity sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw== - dependencies: - merge-stream "^2.0.0" - supports-color "^6.1.0" - -jest-worker@^26.5.0, jest-worker@^26.6.2: +jest-worker@^26.2.1: version "26.6.2" resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== @@ -12106,14 +10657,37 @@ jest-worker@^26.5.0, jest-worker@^26.6.2: merge-stream "^2.0.0" supports-color "^7.0.0" -jest@26.6.0: - version "26.6.0" - resolved "https://registry.yarnpkg.com/jest/-/jest-26.6.0.tgz#546b25a1d8c888569dbbe93cae131748086a4a25" - integrity sha512-jxTmrvuecVISvKFFhOkjsWRZV7sFqdSUAd1ajOKY+/QE/aLBVstsJ/dX8GczLzwiT6ZEwwmZqtCUHLHHQVzcfA== +jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest-worker@^28.0.2: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" + integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^27.4.3: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" + integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== dependencies: - "@jest/core" "^26.6.0" + "@jest/core" "^27.5.1" import-local "^3.0.2" - jest-cli "^26.6.0" + jest-cli "^27.5.1" + +jiti@^1.18.2: + version "1.18.2" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" + integrity sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg== joi@^17.3.0: version "17.4.1" @@ -12144,7 +10718,7 @@ js-yaml@^3.13.1: argparse "^1.0.7" esprima "^4.0.0" -js-yaml@^4.0.0: +js-yaml@^4.0.0, js-yaml@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== @@ -12156,10 +10730,10 @@ jsbn@~0.1.0: resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= -jsdom@^16.4.0: - version "16.6.0" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.6.0.tgz#f79b3786682065492a3da6a60a4695da983805ac" - integrity sha512-Ty1vmF4NHJkolaEmdjtxTfSfkdb8Ywarwf63f+F8/mDD1uLSSWDxDuMiZxiPhwunLrn9LOSVItWj4bLYsLN3Dg== +jsdom@^16.6.0: + version "16.7.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== dependencies: abab "^2.0.5" acorn "^8.2.4" @@ -12186,7 +10760,7 @@ jsdom@^16.4.0: whatwg-encoding "^1.0.5" whatwg-mimetype "^2.3.0" whatwg-url "^8.5.0" - ws "^7.4.5" + ws "^7.4.6" xml-name-validator "^3.0.0" jsesc@^2.5.1: @@ -12204,12 +10778,7 @@ json-buffer@3.0.0: resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= -json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" - integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== - -json-parse-even-better-errors@^2.3.0: +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== @@ -12229,6 +10798,11 @@ json-schema@0.2.3: resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= +json-schema@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" @@ -12261,26 +10835,14 @@ json2mq@^0.2.0: dependencies: string-convert "^0.2.0" -json3@^3.3.3: - version "3.3.3" - resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" - integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== - -json5@^1.0.1: +json5@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" -json5@^2.1.2: - version "2.2.0" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3" - integrity sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA== - dependencies: - minimist "^1.2.5" - -json5@^2.2.2: +json5@^2.1.2, json5@^2.2.0, json5@^2.2.2: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== @@ -12306,6 +10868,11 @@ jsonify@~0.0.0: resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" integrity sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM= +jsonpointer@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" + integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== + jsonwebtoken@^8.5.1: version "8.5.1" resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" @@ -12332,13 +10899,13 @@ jsprim@^1.2.2: json-schema "0.2.3" verror "1.10.0" -"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.2.0.tgz#41108d2cec408c3453c1bbe8a4aae9e1e2bd8f82" - integrity sha512-EIsmt3O3ljsU6sot/J4E1zDRxfBNrhjyf/OKjlydwgEimQuznlM4Wv7U+ueONJMyEn1WRE0K8dhi3dVAXYT24Q== +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz#76b3e6e6cece5c69d49a5792c3d01bd1a0cdc7ea" + integrity sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw== dependencies: - array-includes "^3.1.2" - object.assign "^4.1.2" + array-includes "^3.1.5" + object.assign "^4.1.3" jsx-dom-cjs@^8.0.0: version "8.0.3" @@ -12376,11 +10943,6 @@ keyv@^3.0.0: dependencies: json-buffer "3.0.0" -killable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" - integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== - kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" @@ -12410,31 +10972,23 @@ kleur@^3.0.3: resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== -klona@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.4.tgz#7bb1e3affb0cb8624547ef7e8f6708ea2e39dfc0" - integrity sha512-ZRbnvdg/NxqzC7L9Uyqzf4psi1OM4Cuc+sJAkQPjO6XkQIJTNbfK2Rsmbw8fx1p2mkZdp2FZYo2+LwXYY/uwIA== +klona@^2.0.4, klona@^2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.6.tgz#85bffbf819c03b2f53270412420a4555ef882e22" + integrity sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== language-subtag-registry@~0.3.2: version "0.3.21" resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz#04ac218bea46f04cb039084602c6da9e788dd45a" integrity sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg== -language-tags@^1.0.5: +language-tags@=1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" - integrity sha1-0yHbxNowuovzAk4ED6XBRmH5GTo= + integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== dependencies: language-subtag-registry "~0.3.2" -last-call-webpack-plugin@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/last-call-webpack-plugin/-/last-call-webpack-plugin-3.0.0.tgz#9742df0e10e3cf46e5c0381c2de90d3a7a2d7555" - integrity sha512-7KI2l2GIZa9p2spzPIVZBYyNKkN+e/SQPpnjlTiPhdbDW3F86tdKKELxKpzJ5sgU19wQWsACULZmpTPYHeWO5w== - dependencies: - lodash "^4.17.5" - webpack-sources "^1.1.0" - latest-version@5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face" @@ -12442,42 +10996,50 @@ latest-version@5.1.0: dependencies: package-json "^6.3.0" +launch-editor@^2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.6.0.tgz#4c0c1a6ac126c572bd9ff9a30da1d2cae66defd7" + integrity sha512-JpDCcQnyAAzZZaZ7vEiSqL690w7dAEyLao+KC96zBplnYbJS7TYNjvM3M7y3dGz+v7aIsJk3hllWuc0kWAjyRQ== + dependencies: + picocolors "^1.0.0" + shell-quote "^1.7.3" + lazy-ass@1.6.0: version "1.6.0" resolved "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz#7999655e8646c17f089fdd187d150d3324d54513" integrity sha1-eZllXoZGwX8In90YfRUNMyTVRRM= -less-loader@^6.1.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-6.2.0.tgz#8b26f621c155b342eefc24f5bd6e9dc40c42a719" - integrity sha512-Cl5h95/Pz/PWub/tCBgT1oNMFeH1WTD33piG80jn5jr12T4XbxZcjThwNXDQ7AG649WEynuIzO4b0+2Tn9Qolg== +less-loader@^7.3.0: + version "7.3.0" + resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-7.3.0.tgz#f9d6d36d18739d642067a05fb5bd70c8c61317e5" + integrity sha512-Mi8915g7NMaLlgi77mgTTQvK022xKRQBIVDSyfl3ErTuBhmZBQab0mjeJjNNqGbdR+qrfTleKXqbGI4uEFavxg== dependencies: - clone "^2.1.2" - less "^3.11.3" + klona "^2.0.4" loader-utils "^2.0.0" - schema-utils "^2.7.0" + schema-utils "^3.0.0" -less-vars-to-js@^1.3.0: +less-vars-to-js@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/less-vars-to-js/-/less-vars-to-js-1.3.0.tgz#c322cf43a3c8fc3fab655da3e51a14c1499ab571" integrity sha512-xeiLLn/IMCGtdyCkYQnW8UuzoW2oYMCKg9boZRaGI58fLz5r90bNJDlqGzmVt/1Uqk75/DxIVtQSNCMkE5fRZQ== dependencies: strip-json-comments "^2.0.1" -less@^3.11.1, less@^3.11.3: - version "3.13.1" - resolved "https://registry.yarnpkg.com/less/-/less-3.13.1.tgz#0ebc91d2a0e9c0c6735b83d496b0ab0583077909" - integrity sha512-SwA1aQXGUvp+P5XdZslUOhhLnClSLIjWvJhmd+Vgib5BFIr9lMNlQwmwUNOjXThF/A0x+MCYYPeWEfeWiLRnTw== +less@^4.1.1: + version "4.1.3" + resolved "https://registry.yarnpkg.com/less/-/less-4.1.3.tgz#175be9ddcbf9b250173e0a00b4d6920a5b770246" + integrity sha512-w16Xk/Ta9Hhyei0Gpz9m7VS8F28nieJaL/VyShID7cYvP6IL5oHeL6p4TXSDJqZE/lNv0oJ2pGVjJsRkfwm5FA== dependencies: copy-anything "^2.0.1" - tslib "^1.10.0" + parse-node-version "^1.0.1" + tslib "^2.3.0" optionalDependencies: errno "^0.1.1" graceful-fs "^4.1.2" image-size "~0.5.0" make-dir "^2.1.0" mime "^1.4.1" - native-request "^1.0.5" + needle "^3.1.0" source-map "~0.6.0" leven@^3.1.0: @@ -12508,6 +11070,11 @@ lib0@^0.2.42, lib0@^0.2.49: dependencies: isomorphic.js "^0.2.4" +lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" + integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== + lines-and-columns@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" @@ -12557,55 +11124,24 @@ listr@^0.14.3: p-map "^2.0.0" rxjs "^6.3.3" -load-json-file@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" - integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs= - dependencies: - graceful-fs "^4.1.2" - parse-json "^4.0.0" - pify "^3.0.0" - strip-bom "^3.0.0" - -loader-runner@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" - integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== - -loader-utils@1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" - integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== - dependencies: - big.js "^5.2.2" - emojis-list "^2.0.0" - json5 "^1.0.1" +loader-runner@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== -loader-utils@2.0.0, loader-utils@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.0.tgz#e4cace5b816d425a166b5f097e10cd12b36064b0" - integrity sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ== +loader-utils@^2.0.0, loader-utils@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" + integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" json5 "^2.1.2" -loader-utils@^1.1.0, loader-utils@^1.2.3, loader-utils@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613" - integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^1.0.1" - -locate-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" - integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= - dependencies: - p-locate "^2.0.0" - path-exists "^3.0.0" +loader-utils@^3.2.0: + version "3.2.1" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-3.2.1.tgz#4fb104b599daafd82ef3e1a41fb9265f87e1f576" + integrity sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw== locate-path@^3.0.0: version "3.0.0" @@ -12622,16 +11158,18 @@ locate-path@^5.0.0: dependencies: p-locate "^4.1.0" +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + lodash-es@^4.17.15: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== -lodash._reinterpolate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d" - integrity sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= - lodash.assign@^4.2.0: version "4.2.0" resolved "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7" @@ -12772,25 +11310,10 @@ lodash.snakecase@^4.1.1: resolved "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d" integrity sha1-OdcUo1NXFHg3rv1ktdy7Fr7Nj40= -lodash.template@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab" - integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== - dependencies: - lodash._reinterpolate "^3.0.0" - lodash.templatesettings "^4.0.0" - -lodash.templatesettings@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz#e481310f049d3cf6d47e912ad09313b154f0fb33" - integrity sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ== - dependencies: - lodash._reinterpolate "^3.0.0" - -lodash.truncate@^4.4.2: - version "4.4.2" - resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" - integrity sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM= +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== lodash.uniq@^4.5.0: version "4.5.0" @@ -12807,7 +11330,7 @@ lodash.values@^4.3.0: resolved "https://registry.npmjs.org/lodash.values/-/lodash.values-4.3.0.tgz#a3a6c2b0ebecc5c2cba1c17e6e620fe81b53d347" integrity sha1-o6bCsOvsxcLLocF+bmIP6BtT00c= -lodash@4.17.21, "lodash@>=3.5 <5", lodash@^4.0.1, lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.7.0, lodash@~4.17.0: +lodash@4.17.21, lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -12836,11 +11359,6 @@ log-update@^2.3.0: cli-cursor "^2.0.0" wrap-ansi "^3.0.1" -loglevel@^1.6.8: - version "1.7.1" - resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.7.1.tgz#005fde2f5e6e47068f935ff28573e125ef72f197" - integrity sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw== - longest-streak@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/longest-streak/-/longest-streak-2.0.4.tgz#b8599957da5b5dab64dee3fe316fa774597d90e4" @@ -12911,7 +11429,7 @@ magic-string@^0.25.0, magic-string@^0.25.7: dependencies: sourcemap-codec "^1.4.4" -make-dir@^2.0.0, make-dir@^2.1.0: +make-dir@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== @@ -12990,15 +11508,6 @@ math-expression-evaluator@^1.2.14: resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-1.3.7.tgz#1b62225db86af06f7ea1fd9576a34af605a5b253" integrity sha512-nrbaifCl42w37hYd6oRLvoymFK42tWB+WQTMFtksDGQMi5GvlJwnz/CsS30FFAISFLtX+A0csJ0xLiuuyyec7w== -md5.js@^1.3.4: - version "1.3.5" - resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" - integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - mdast-util-definitions@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz#c5c1a84db799173b4dcf7643cda999e440c24db2" @@ -13119,21 +11628,12 @@ media-typer@0.3.0: resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== -memory-fs@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" - integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= - dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" - -memory-fs@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" - integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== +memfs@^3.1.2, memfs@^3.4.3: + version "3.6.0" + resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.6.0.tgz#d7a2110f86f79dd950a8b6df6d57bc984aa185f6" + integrity sha512-EGowvkkgbMcIChjMTMkESFDbZeSh8xZ7kNSF0hAiAN4Jh6jgHCRS0Ga/+C8y6Au+oqpezRHCfPsmJ2+DwAgiwQ== dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" + fs-monkey "^1.0.4" merge-descriptors@1.0.1: version "1.0.1" @@ -13145,7 +11645,7 @@ merge-stream@^2.0.0: resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== -merge2@^1.3.0: +merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== @@ -13165,11 +11665,6 @@ methods@~1.1.2: resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== -microevent.ts@~0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/microevent.ts/-/microevent.ts-0.1.1.tgz#70b09b83f43df5172d0205a63025bce0f7357fa0" - integrity sha512-jo1OfR4TaEwd5HOrt5+tAZ9mqT4jmpNAusXtyfNzqVm9uiSYFZlKM1wYL4oU7azZW/PxQW53wM0S6OR1JHNa2g== - micromark-extension-gfm-autolink-literal@~0.5.0: version "0.5.7" resolved "https://registry.yarnpkg.com/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz#53866c1f0c7ef940ae7ca1f72c6faef8fed9f204" @@ -13223,7 +11718,7 @@ micromark@^2.11.3, micromark@~2.11.0, micromark@~2.11.3: debug "^4.0.0" parse-entities "^2.0.0" -micromatch@^3.1.10, micromatch@^3.1.4: +micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== @@ -13242,40 +11737,20 @@ micromatch@^3.1.10, micromatch@^3.1.4: snapdragon "^0.8.1" to-regex "^3.0.2" -micromatch@^4.0.2, micromatch@^4.0.4: - version "4.0.4" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" - integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== - dependencies: - braces "^3.0.1" - picomatch "^2.2.3" - -miller-rabin@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" - integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== +micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== dependencies: - bn.js "^4.0.0" - brorand "^1.0.1" + braces "^3.0.2" + picomatch "^2.3.1" -mime-db@1.47.0, "mime-db@>= 1.43.0 < 2": - version "1.47.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.47.0.tgz#8cb313e59965d3c05cfbf898915a267af46a335c" - integrity sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw== - -mime-db@1.52.0: +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": version "1.52.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== -mime-types@^2.1.12, mime-types@^2.1.27, mime-types@~2.1.17, mime-types@~2.1.19: - version "2.1.30" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.30.tgz#6e7be8b4c479825f85ed6326695db73f9305d62d" - integrity sha512-crmjA4bLtR8m9qLpHvgxSChT+XoSlZi8J4n/aIdn3z92e/U47Z0V/yl+Wh9W046GgFVAmoNR/fmdbZYcSSIUeg== - dependencies: - mime-db "1.47.0" - -mime-types@~2.1.24, mime-types@~2.1.34: +mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: version "2.1.35" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== @@ -13287,11 +11762,6 @@ mime@1.6.0, mime@^1.4.1: resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== -mime@^2.4.4: - version "2.5.2" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.2.tgz#6e3dc6cc2b9510643830e5f19d5cb753da5eeabe" - integrity sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg== - mimic-fn@^1.0.0: version "1.2.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" @@ -13327,73 +11797,43 @@ mini-create-react-context@^0.4.0: "@babel/runtime" "^7.12.1" tiny-warning "^1.0.3" -mini-css-extract-plugin@0.11.3: - version "0.11.3" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.11.3.tgz#15b0910a7f32e62ffde4a7430cfefbd700724ea6" - integrity sha512-n9BA8LonkOkW1/zn+IbLPQmovsL0wMb9yx75fMJQZf2X1Zoec9yTZtyMePcyu19wPkmFbzZZA6fLTotpFhQsOA== +mini-css-extract-plugin@^2.4.5: + version "2.7.6" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.7.6.tgz#282a3d38863fddcd2e0c220aaed5b90bc156564d" + integrity sha512-Qk7HcgaPkGG6eD77mLvZS1nmxlao3j+9PkrT9Uc7HAE1id3F41+DdBRYRYkbyfNRGzm8/YWtzhw7nVPmwhqTQw== dependencies: - loader-utils "^1.1.0" - normalize-url "1.9.1" - schema-utils "^1.0.0" - webpack-sources "^1.1.0" + schema-utils "^4.0.0" -minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: +minimalistic-assert@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimalistic-crypto-utils@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" - integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= - -minimatch@3.0.4, minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.5: - version "1.2.7" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" - integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== - -minipass-collect@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617" - integrity sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA== - dependencies: - minipass "^3.0.0" - -minipass-flush@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373" - integrity sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw== +minimatch@3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: - minipass "^3.0.0" + brace-expansion "^1.1.7" -minipass-pipeline@^1.2.2: - version "1.2.4" - resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz#68472f79711c084657c067c5c6ad93cddea8214c" - integrity sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A== +minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: - minipass "^3.0.0" + brace-expansion "^1.1.7" -minipass@^3.0.0, minipass@^3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd" - integrity sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg== +minimatch@^5.0.1: + version "5.1.6" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" + integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== dependencies: - yallist "^4.0.0" + brace-expansion "^2.0.1" -minizlib@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" - integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== - dependencies: - minipass "^3.0.0" - yallist "^4.0.0" +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: + version "1.2.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== miragejs@^0.1.0, miragejs@^0.1.41: version "0.1.41" @@ -13427,21 +11867,10 @@ miragejs@^0.1.0, miragejs@^0.1.41: lodash.values "^4.3.0" pretender "^3.4.3" -mississippi@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" - integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== - dependencies: - concat-stream "^1.5.0" - duplexify "^3.4.2" - end-of-stream "^1.1.0" - flush-write-stream "^1.0.0" - from2 "^2.1.0" - parallel-transform "^1.1.0" - pump "^3.0.0" - pumpify "^1.3.3" - stream-each "^1.1.0" - through2 "^2.0.0" +mitt@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mitt/-/mitt-2.1.0.tgz#f740577c23176c6205b121b2973514eade1b2230" + integrity sha512-ILj2TpLiysu2wkBbWjAmww7TkZb65aiQO+DkVdUTBpBXq+MHYiETENkKFMtsJZX1Lf4pe4QOrTSjIfUwN5lRdg== mixin-deep@^1.2.0: version "1.3.2" @@ -13451,14 +11880,14 @@ mixin-deep@^1.2.0: for-in "^1.0.2" is-extendable "^1.0.1" -mkdirp@^0.5.1, mkdirp@^0.5.3, mkdirp@^0.5.5, mkdirp@~0.5.1: +mkdirp@^0.5.1, mkdirp@~0.5.1: version "0.5.5" resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" -mkdirp@^1.0.3, mkdirp@^1.0.4: +mkdirp@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== @@ -13480,12 +11909,7 @@ moment-timezone@^0.5.35: dependencies: moment ">= 2.9.0" -"moment@>= 2.9.0", moment@^2.24.0: - version "2.29.1" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" - integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== - -moment@^2.29.2, moment@^2.29.4: +"moment@>= 2.9.0", moment@^2.24.0, moment@^2.29.2, moment@^2.29.4: version "2.29.4" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== @@ -13495,18 +11919,6 @@ monaco-editor@^0.28.1: resolved "https://registry.yarnpkg.com/monaco-editor/-/monaco-editor-0.28.1.tgz#732788ff2172d59e6d436b206da8cac715413940" integrity sha512-P1vPqxB4B1ZFzTeR1ScggSp9/5NoQrLCq88fnlNUsuRAP1usEBN4TIpI2lw0AYIZNVIanHk0qwjze2uJwGOHUw== -move-concurrently@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" - integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= - dependencies: - aproba "^1.1.1" - copy-concurrently "^1.0.0" - fs-write-stream-atomic "^1.0.8" - mkdirp "^0.5.1" - rimraf "^2.5.4" - run-queue "^1.0.3" - ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -13522,17 +11934,12 @@ ms@2.1.3, ms@^2.1.1: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== -multicast-dns-service-types@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" - integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= - -multicast-dns@^6.0.1: - version "6.2.3" - resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" - integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== +multicast-dns@^7.2.5: + version "7.2.5" + resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" + integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== dependencies: - dns-packet "^1.3.1" + dns-packet "^5.2.2" thunky "^1.0.2" multishift@^2.0.5: @@ -13556,10 +11963,14 @@ mute-stream@0.0.8: resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== -nan@^2.12.1: - version "2.14.2" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.2.tgz#f5376400695168f4cc694ac9393d0c9585eeea19" - integrity sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ== +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" nano-css@^5.3.1: version "5.3.5" @@ -13580,10 +11991,10 @@ nanoevents@^5.1.13: resolved "https://registry.yarnpkg.com/nanoevents/-/nanoevents-5.1.13.tgz#0e49c30acbcf847c10f29f1d1e7147c0f88fa3d1" integrity sha512-JFAeG9fp0QZnRoESHjkbVFbZ9BkOXkkagUVwZVo/pkSX+Fq1VKlY+5og/8X9CYc6C7vje/CV+bwJ5M2X0+IY9Q== -nanoid@^3.1.23: - version "3.1.23" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.23.tgz#f744086ce7c2bc47ee0a8472574d5c78e4183a81" - integrity sha512-FiB0kzdP0FFVGDKlRLEQ1BgDzU87dy5NnzjeW9YZNt+/c3+q82EQDUwniSAUxp/F0gFNI1ZhKU1FqYsMuqZVnw== +nanoid@^3.3.6: + version "3.3.6" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c" + integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== nanomatch@^1.2.9: version "1.2.13" @@ -13602,43 +12013,35 @@ nanomatch@^1.2.9: snapdragon "^0.8.1" to-regex "^3.0.1" -native-request@^1.0.5: - version "1.0.8" - resolved "https://registry.yarnpkg.com/native-request/-/native-request-1.0.8.tgz#8f66bf606e0f7ea27c0e5995eb2f5d03e33ae6fb" - integrity sha512-vU2JojJVelUGp6jRcLwToPoWGxSx23z/0iX+I77J3Ht17rf2INGjrhOoQnjVo60nQd8wVsgzKkPfRXBiVdD2ag== - -native-url@^0.2.6: - version "0.2.6" - resolved "https://registry.yarnpkg.com/native-url/-/native-url-0.2.6.tgz#ca1258f5ace169c716ff44eccbddb674e10399ae" - integrity sha512-k4bDC87WtgrdD362gZz6zoiXQrl40kYlBmpfmSjwRO1VU0V5ccwJTlxuE72F6m3V0vc1xOf6n3UCP9QyerRqmA== - dependencies: - querystring "^0.2.0" +natural-compare-lite@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz#17b09581988979fddafe0201e931ba933c96cbb4" + integrity sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g== natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= +needle@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/needle/-/needle-3.2.0.tgz#07d240ebcabfd65c76c03afae7f6defe6469df44" + integrity sha512-oUvzXnyLiVyVGoianLijF9O/RecZUf7TkBfimjGrLM4eQhXyeJwM6GeAWccwfQ9aa4gMCZKqhAOuLaMIcQxajQ== + dependencies: + debug "^3.2.6" + iconv-lite "^0.6.3" + sax "^1.2.4" + negotiator@0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== -neo-async@^2.5.0, neo-async@^2.6.1, neo-async@^2.6.2: +neo-async@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== -next-tick@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" - integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= - -nice-try@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" - integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== - nise@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.0.tgz#713ef3ed138252daef20ec035ab62b7a28be645c" @@ -13658,93 +12061,32 @@ no-case@^3.0.4: lower-case "^2.0.2" tslib "^2.0.3" -node-fetch@2.6.1, node-fetch@^2.6.1: +node-fetch@2.6.1: version "2.6.1" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== -node-fetch@2.6.7: +node-fetch@2.6.7, node-fetch@^2.6.1: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" -node-forge@^0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" - integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== +node-forge@^1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" + integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== node-int64@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= -node-libs-browser@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" - integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== - dependencies: - assert "^1.1.1" - browserify-zlib "^0.2.0" - buffer "^4.3.0" - console-browserify "^1.1.0" - constants-browserify "^1.0.0" - crypto-browserify "^3.11.0" - domain-browser "^1.1.1" - events "^3.0.0" - https-browserify "^1.0.0" - os-browserify "^0.3.0" - path-browserify "0.0.1" - process "^0.11.10" - punycode "^1.2.4" - querystring-es3 "^0.2.0" - readable-stream "^2.3.3" - stream-browserify "^2.0.1" - stream-http "^2.7.2" - string_decoder "^1.0.0" - timers-browserify "^2.0.4" - tty-browserify "0.0.0" - url "^0.11.0" - util "^0.11.0" - vm-browserify "^1.0.1" - -node-modules-regexp@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40" - integrity sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA= - -node-notifier@^8.0.0: - version "8.0.2" - resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-8.0.2.tgz#f3167a38ef0d2c8a866a83e318c1ba0efeb702c5" - integrity sha512-oJP/9NAdd9+x2Q+rfphB2RJCHjod70RcRLjosiPMMu5gjIfwVnOUGq2nbTjTUbmy0DJ/tFIVT30+Qe3nzl4TJg== - dependencies: - growly "^1.3.0" - is-wsl "^2.2.0" - semver "^7.3.2" - shellwords "^0.1.1" - uuid "^8.3.0" - which "^2.0.2" - -node-releases@^1.1.61, node-releases@^1.1.71: - version "1.1.72" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" - integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== - -node-releases@^2.0.6: - version "2.0.8" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.8.tgz#0f349cdc8fcfa39a92ac0be9bc48b7706292b9ae" - integrity sha512-dFSmB8fFHEH/s81Xi+Y/15DQY6VHW81nXRj86EMSL3lmuTmK1e+aT4wrFCkTbm+gSwkw4KpX+rT/pMM2c1mF+A== - -normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" - integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== - dependencies: - hosted-git-info "^2.1.4" - resolve "^1.10.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" +node-releases@^2.0.12: + version "2.0.12" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" + integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== normalize-path@^2.1.1: version "2.1.1" @@ -13763,34 +12105,17 @@ normalize-range@^0.1.2: resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= -normalize-url@1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-1.9.1.tgz#2cc0d66b31ea23036458436e3620d85954c66c3c" - integrity sha1-LMDWazHqIwNkWENuNiDYWVTGbDw= - dependencies: - object-assign "^4.0.1" - prepend-http "^1.0.0" - query-string "^4.1.0" - sort-keys "^1.0.0" - -normalize-url@^3.0.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559" - integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg== - normalize-url@^4.1.0: version "4.5.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= - dependencies: - path-key "^2.0.0" +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== -npm-run-path@^4.0.0, npm-run-path@^4.0.1: +npm-run-path@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== @@ -13804,16 +12129,18 @@ nth-check@^1.0.2: dependencies: boolbase "~1.0.0" +nth-check@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + nullthrows@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/nullthrows/-/nullthrows-1.1.1.tgz#7818258843856ae971eae4208ad7d7eb19a431b1" integrity sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw== -num2fraction@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" - integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= - number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" @@ -13843,25 +12170,17 @@ object-copy@^0.1.0: define-property "^0.2.5" kind-of "^3.0.3" -object-inspect@^1.10.3: - version "1.10.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.10.3.tgz#c2aa7d2d09f50c99375704f7a0adf24c5782d369" - integrity sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw== +object-hash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== object-inspect@^1.12.2, object-inspect@^1.9.0: version "1.12.2" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== -object-is@^1.0.1: - version "1.1.5" - resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" - integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -object-keys@^1.0.12, object-keys@^1.1.1: +object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== @@ -13873,17 +12192,7 @@ object-visit@^1.0.0: dependencies: isobject "^3.0.0" -object.assign@^4.1.0, object.assign@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" - integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== - dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - has-symbols "^1.0.1" - object-keys "^1.1.1" - -object.assign@^4.1.4: +object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.4: version "4.1.4" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== @@ -13893,27 +12202,25 @@ object.assign@^4.1.4: has-symbols "^1.0.3" object-keys "^1.1.1" -object.entries@^1.1.0, object.entries@^1.1.2, object.entries@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.3.tgz#c601c7f168b62374541a07ddbd3e2d5e4f7711a6" - integrity sha512-ym7h7OZebNS96hn5IJeyUmaWhaSM4SVtAPPfNLQEI2MYWCO2egsITb9nab2+i/Pwibx+R0mtn+ltKJXRSeTMGg== +object.entries@^1.1.5, object.entries@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.6.tgz#9737d0e5b8291edd340a3e3264bb8a3b00d5fa23" + integrity sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w== dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.1" - has "^1.0.3" + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" -object.fromentries@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.4.tgz#26e1ba5c4571c5c6f0890cef4473066456a120b8" - integrity sha512-EsFBshs5RUUpQEY1D4q/m59kMfz4YJvxuNCJcv/jWwOJr34EaVnG11ZrZa0UHB3wnzV1wx8m58T4hQL8IuNXlQ== +object.fromentries@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.6.tgz#cdb04da08c539cffa912dcd368b886e0904bfa73" + integrity sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg== dependencies: call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" - has "^1.0.3" + define-properties "^1.1.4" + es-abstract "^1.20.4" -object.getownpropertydescriptors@^2.0.3, object.getownpropertydescriptors@^2.1.0: +object.getownpropertydescriptors@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz#1bd63aeacf0d5d2d2f31b5e393b03a7c601a23f7" integrity sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ== @@ -13922,6 +12229,14 @@ object.getownpropertydescriptors@^2.0.3, object.getownpropertydescriptors@^2.1.0 define-properties "^1.1.3" es-abstract "^1.18.0-next.2" +object.hasown@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.2.tgz#f919e21fad4eb38a57bc6345b3afd496515c3f92" + integrity sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw== + dependencies: + define-properties "^1.1.4" + es-abstract "^1.20.4" + object.omit@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-3.0.0.tgz#0e3edc2fce2ba54df5577ff529f6d97bd8a522af" @@ -13936,15 +12251,14 @@ object.pick@^1.3.0: dependencies: isobject "^3.0.1" -object.values@^1.1.0, object.values@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.3.tgz#eaa8b1e17589f02f698db093f7c62ee1699742ee" - integrity sha512-nkF6PfDB9alkOUxpf1HNm/QlkeW3SReqL5WXeBLpEJJnlPSvRaDQpW3gQTksTN3fgJX4hL42RzKyOin6ff3tyw== +object.values@^1.1.0, object.values@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.6.tgz#4abbaa71eba47d63589d402856f908243eea9b1d" + integrity sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw== dependencies: call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" - has "^1.0.3" + define-properties "^1.1.4" + es-abstract "^1.20.4" obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" @@ -13984,7 +12298,7 @@ onetime@^5.1.0, onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -open@^7.0.2, open@^7.3.1: +open@^7.3.1: version "7.4.2" resolved "https://registry.yarnpkg.com/open/-/open-7.4.2.tgz#b8147e26dcf3e426316c730089fd71edd29c2321" integrity sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q== @@ -13992,12 +12306,14 @@ open@^7.0.2, open@^7.3.1: is-docker "^2.0.0" is-wsl "^2.1.1" -opn@^5.5.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" - integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== +open@^8.0.9, open@^8.4.0: + version "8.4.2" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" + integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== dependencies: - is-wsl "^1.1.0" + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" optimism@^0.16.0: version "0.16.1" @@ -14007,14 +12323,6 @@ optimism@^0.16.0: "@wry/context" "^0.6.0" "@wry/trie" "^0.3.0" -optimize-css-assets-webpack-plugin@5.0.4: - version "5.0.4" - resolved "https://registry.yarnpkg.com/optimize-css-assets-webpack-plugin/-/optimize-css-assets-webpack-plugin-5.0.4.tgz#85883c6528aaa02e30bbad9908c92926bb52dc90" - integrity sha512-wqd6FdI2a5/FdoiCNNkEvLeA//lHHfG24Ln2Xm2qqdIk4aOlsR18jwpyOihqQ8849W3qu2DX8fOYxpvTMj+93A== - dependencies: - cssnano "^4.1.10" - last-call-webpack-plugin "^3.0.0" - optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" @@ -14044,18 +12352,6 @@ orderedmap@^2.0.0: resolved "https://registry.yarnpkg.com/orderedmap/-/orderedmap-2.1.0.tgz#819457082fa3a06abd316d83a281a1ca467437cd" integrity sha512-/pIFexOm6S70EPdznemIz3BQZoJ4VTFrhqzu0ACBqBgeLsLxq8e6Jim63ImIfwW/zAD1AlXpRMlOv3aghmo4dA== -original@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" - integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== - dependencies: - url-parse "^1.4.3" - -os-browserify@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" - integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= - os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" @@ -14066,16 +12362,6 @@ p-cancelable@^1.0.0: resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== -p-each-series@^2.1.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-each-series/-/p-each-series-2.2.0.tgz#105ab0357ce72b202a8a8b94933672657b5e2a9a" - integrity sha512-ycIL2+1V32th+8scbpTvyHNaHe02z0sjgh91XXjAk+ZeXoPN4Z46DVUnzdso0aX4KckKw0FNNFHdjZ2UsZvxiA== - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= - p-limit@3.1.0, p-limit@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" @@ -14083,13 +12369,6 @@ p-limit@3.1.0, p-limit@^3.0.2: dependencies: yocto-queue "^0.1.0" -p-limit@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" - integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== - dependencies: - p-try "^1.0.0" - p-limit@^2.0.0, p-limit@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" @@ -14097,13 +12376,6 @@ p-limit@^2.0.0, p-limit@^2.2.0: dependencies: p-try "^2.0.0" -p-locate@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" - integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= - dependencies: - p-limit "^1.1.0" - p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" @@ -14118,29 +12390,25 @@ p-locate@^4.1.0: dependencies: p-limit "^2.2.0" +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + p-map@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== -p-map@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" - integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== - dependencies: - aggregate-error "^3.0.0" - -p-retry@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" - integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== +p-retry@^4.5.0: + version "4.6.2" + resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== dependencies: - retry "^0.12.0" - -p-try@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" - integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + "@types/retry" "0.12.0" + retry "^0.13.1" p-try@^2.0.0: version "2.2.0" @@ -14157,21 +12425,7 @@ package-json@^6.3.0: registry-url "^5.0.0" semver "^6.2.0" -pako@~1.0.5: - version "1.0.11" - resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" - integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== - -parallel-transform@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" - integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== - dependencies: - cyclist "^1.0.1" - inherits "^2.0.3" - readable-stream "^2.1.5" - -param-case@^3.0.3, param-case@^3.0.4: +param-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== @@ -14191,17 +12445,6 @@ parenthesis@^3.1.8: resolved "https://registry.yarnpkg.com/parenthesis/-/parenthesis-3.1.8.tgz#3457fccb8f05db27572b841dad9d2630b912f125" integrity sha512-KF/U8tk54BgQewkJPvB4s/US3VQY68BRDpH638+7O/n58TpnwiwnOtGIOsT2/i+M78s61BBpeC83STB88d8sqw== -parse-asn1@^5.0.0, parse-asn1@^5.1.5: - version "5.1.6" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.6.tgz#385080a3ec13cb62a62d39409cb3e88844cdaed4" - integrity sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw== - dependencies: - asn1.js "^5.2.0" - browserify-aes "^1.0.0" - evp_bytestokey "^1.0.0" - pbkdf2 "^3.0.3" - safe-buffer "^5.1.1" - parse-entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-2.0.0.tgz#53c6eb5b9314a1f4ec99fa0fdf7ce01ecda0cbe8" @@ -14228,15 +12471,7 @@ parse-filepath@^1.0.2: map-cache "^0.2.0" path-root "^0.1.1" -parse-json@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" - integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= - dependencies: - error-ex "^1.3.1" - json-parse-better-errors "^1.0.1" - -parse-json@^5.0.0: +parse-json@^5.0.0, parse-json@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== @@ -14246,6 +12481,11 @@ parse-json@^5.0.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" +parse-node-version@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parse-node-version/-/parse-node-version-1.0.1.tgz#e2b5dbede00e7fa9bc363607f53327e8b073189b" + integrity sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA== + parse5@6.0.1, parse5@^6.0.0: version "6.0.1" resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" @@ -14269,11 +12509,6 @@ pascalcase@^0.1.1: resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= -path-browserify@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" - integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== - path-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/path-case/-/path-case-3.0.4.tgz#9168645334eb942658375c56f80b4c0cb5f82c6f" @@ -14282,11 +12517,6 @@ path-case@^3.0.4: dot-case "^3.0.4" tslib "^2.0.3" -path-dirname@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" - integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= - path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" @@ -14302,22 +12532,12 @@ path-is-absolute@^1.0.0: resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= -path-is-inside@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" - integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= - -path-key@^2.0.0, path-key@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= - path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== -path-parse@^1.0.6, path-parse@^1.0.7: +path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== @@ -14346,13 +12566,6 @@ path-to-regexp@^1.7.0: dependencies: isarray "0.0.1" -path-type@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" - integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== - dependencies: - pify "^3.0.0" - path-type@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" @@ -14365,79 +12578,40 @@ pause-stream@0.0.11: dependencies: through "~2.3" -pbkdf2@^3.0.3: - version "3.1.2" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075" - integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== - dependencies: - create-hash "^1.1.2" - create-hmac "^1.1.4" - ripemd160 "^2.0.1" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= +picocolors@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== + picocolors@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3: - version "2.3.0" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972" - integrity sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw== +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== -pify@^2.0.0: +pify@^2.3.0: version "2.3.0" - resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= - -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= - -pirates@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87" - integrity sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA== - dependencies: - node-modules-regexp "^1.0.0" - -pkg-dir@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" - integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= - dependencies: - find-up "^2.1.0" - -pkg-dir@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" - integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== - dependencies: - find-up "^3.0.0" +pirates@^4.0.1, pirates@^4.0.4: + version "4.0.6" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" + integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== pkg-dir@^4.1.0, pkg-dir@^4.2.0: version "4.2.0" @@ -14446,707 +12620,569 @@ pkg-dir@^4.1.0, pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" -pkg-up@3.1.0: +pkg-up@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== dependencies: find-up "^3.0.0" -pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-2.0.0.tgz#c819ac728059a461cab1c3889a2be3c49a004d7f" - integrity sha1-yBmscoBZpGHKscOImivjxJoATX8= - dependencies: - find-up "^2.1.0" - -pnp-webpack-plugin@1.6.4: - version "1.6.4" - resolved "https://registry.yarnpkg.com/pnp-webpack-plugin/-/pnp-webpack-plugin-1.6.4.tgz#c9711ac4dc48a685dabafc86f8b6dd9f8df84149" - integrity sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg== - dependencies: - ts-pnp "^1.1.6" - -portfinder@^1.0.26: - version "1.0.28" - resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.28.tgz#67c4622852bd5374dd1dd900f779f53462fac778" - integrity sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA== - dependencies: - async "^2.6.2" - debug "^3.1.1" - mkdirp "^0.5.5" - posix-character-classes@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= -postcss-attribute-case-insensitive@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-4.0.2.tgz#d93e46b504589e94ac7277b0463226c68041a880" - integrity sha512-clkFxk/9pcdb4Vkn0hAHq3YnxBQ2p0CGD1dy24jN+reBck+EWxMbxSUqN4Yj7t0w8csl87K6p0gxBe1utkJsYA== +postcss-attribute-case-insensitive@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741" + integrity sha512-XIidXV8fDr0kKt28vqki84fRK8VW8eTuIa4PChv2MqKuT6C9UjmSKzen6KaWhWEoYvwxFCa7n/tC1SZ3tyq4SQ== dependencies: - postcss "^7.0.2" - postcss-selector-parser "^6.0.2" + postcss-selector-parser "^6.0.10" -postcss-browser-comments@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-browser-comments/-/postcss-browser-comments-3.0.0.tgz#1248d2d935fb72053c8e1f61a84a57292d9f65e9" - integrity sha512-qfVjLfq7HFd2e0HW4s1dvU8X080OZdG46fFbIBFjW7US7YPDcWfRvdElvwMJr2LI6hMmD+7LnH2HcmXTs+uOig== - dependencies: - postcss "^7" +postcss-browser-comments@^4: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" + integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== -postcss-calc@^7.0.1: - version "7.0.5" - resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-7.0.5.tgz#f8a6e99f12e619c2ebc23cf6c486fdc15860933e" - integrity sha512-1tKHutbGtLtEZF6PT4JSihCHfIVldU72mZ8SdZHIYriIZ9fh9k9aWSppaT8rHsyI3dX+KSR+W+Ix9BMY3AODrg== +postcss-calc@^8.2.3: + version "8.2.4" + resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== dependencies: - postcss "^7.0.27" - postcss-selector-parser "^6.0.2" - postcss-value-parser "^4.0.2" + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" -postcss-color-functional-notation@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0" - integrity sha512-ZBARCypjEDofW4P6IdPVTLhDNXPRn8T2s1zHbZidW6rPaaZvcnCS2soYFIQJrMZSxiePJ2XIYTlcb2ztr/eT2g== +postcss-clamp@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" + integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== dependencies: - postcss "^7.0.2" - postcss-values-parser "^2.0.0" + postcss-value-parser "^4.2.0" -postcss-color-gray@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-color-gray/-/postcss-color-gray-5.0.0.tgz#532a31eb909f8da898ceffe296fdc1f864be8547" - integrity sha512-q6BuRnAGKM/ZRpfDascZlIZPjvwsRye7UDNalqVz3s7GDxMtqPY6+Q871liNxsonUw8oC61OG+PSaysYpl1bnw== +postcss-color-functional-notation@^4.2.4: + version "4.2.4" + resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz#21a909e8d7454d3612d1659e471ce4696f28caec" + integrity sha512-2yrTAUZUab9s6CpxkxC4rVgFEVaR6/2Pipvi6qcgvnYiVqZcbDHEoBDhrXzyb7Efh2CCfHQNtcqWcIruDTIUeg== dependencies: - "@csstools/convert-colors" "^1.4.0" - postcss "^7.0.5" - postcss-values-parser "^2.0.0" + postcss-value-parser "^4.2.0" -postcss-color-hex-alpha@^5.0.3: - version "5.0.3" - resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-5.0.3.tgz#a8d9ca4c39d497c9661e374b9c51899ef0f87388" - integrity sha512-PF4GDel8q3kkreVXKLAGNpHKilXsZ6xuu+mOQMHWHLPNyjiUBOr75sp5ZKJfmv1MCus5/DWUGcK9hm6qHEnXYw== +postcss-color-hex-alpha@^8.0.4: + version "8.0.4" + resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz#c66e2980f2fbc1a63f5b079663340ce8b55f25a5" + integrity sha512-nLo2DCRC9eE4w2JmuKgVA3fGL3d01kGq752pVALF68qpGLmx2Qrk91QTKkdUqqp45T1K1XV8IhQpcu1hoAQflQ== dependencies: - postcss "^7.0.14" - postcss-values-parser "^2.0.1" + postcss-value-parser "^4.2.0" -postcss-color-mod-function@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/postcss-color-mod-function/-/postcss-color-mod-function-3.0.3.tgz#816ba145ac11cc3cb6baa905a75a49f903e4d31d" - integrity sha512-YP4VG+xufxaVtzV6ZmhEtc+/aTXH3d0JLpnYfxqTvwZPbJhWqp8bSY3nfNzNRFLgB4XSaBA82OE4VjOOKpCdVQ== +postcss-color-rebeccapurple@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz#63fdab91d878ebc4dd4b7c02619a0c3d6a56ced0" + integrity sha512-pGxkuVEInwLHgkNxUc4sdg4g3py7zUeCQ9sMfwyHAT+Ezk8a4OaaVZ8lIY5+oNqA/BXXgLyXv0+5wHP68R79hg== dependencies: - "@csstools/convert-colors" "^1.4.0" - postcss "^7.0.2" - postcss-values-parser "^2.0.0" + postcss-value-parser "^4.2.0" -postcss-color-rebeccapurple@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-4.0.1.tgz#c7a89be872bb74e45b1e3022bfe5748823e6de77" - integrity sha512-aAe3OhkS6qJXBbqzvZth2Au4V3KieR5sRQ4ptb2b2O8wgvB3SJBsdG+jsn2BZbbwekDG8nTfcCNKcSfe/lEy8g== +postcss-colormin@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.3.1.tgz#86c27c26ed6ba00d96c79e08f3ffb418d1d1988f" + integrity sha512-UsWQG0AqTFQmpBegeLLc1+c3jIqBNB0zlDGRWR+dQ3pRKJL1oeMzyqmH3o2PIfn9MBdNrVPWhDbT769LxCTLJQ== dependencies: - postcss "^7.0.2" - postcss-values-parser "^2.0.0" + browserslist "^4.21.4" + caniuse-api "^3.0.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" -postcss-colormin@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-4.0.3.tgz#ae060bce93ed794ac71264f08132d550956bd381" - integrity sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw== +postcss-convert-values@^5.1.3: + version "5.1.3" + resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-5.1.3.tgz#04998bb9ba6b65aa31035d669a6af342c5f9d393" + integrity sha512-82pC1xkJZtcJEfiLw6UXnXVXScgtBrjlO5CBmuDQc+dlb88ZYheFsjTn40+zBVi3DkfF7iezO0nJUPLcJK3pvA== dependencies: - browserslist "^4.0.0" - color "^3.0.0" - has "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + browserslist "^4.21.4" + postcss-value-parser "^4.2.0" -postcss-convert-values@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-4.0.1.tgz#ca3813ed4da0f812f9d43703584e449ebe189a7f" - integrity sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ== +postcss-custom-media@^8.0.2: + version "8.0.2" + resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz#c8f9637edf45fef761b014c024cee013f80529ea" + integrity sha512-7yi25vDAoHAkbhAzX9dHx2yc6ntS4jQvejrNcC+csQJAXjj15e7VcWfMgLqBNAbOvqi5uIa9huOVwdHbf+sKqg== dependencies: - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-value-parser "^4.2.0" -postcss-custom-media@^7.0.8: - version "7.0.8" - resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-7.0.8.tgz#fffd13ffeffad73621be5f387076a28b00294e0c" - integrity sha512-c9s5iX0Ge15o00HKbuRuTqNndsJUbaXdiNsksnVH8H4gdc+zbLzr/UasOwNG6CTDpLFekVY4672eWdiiWu2GUg== +postcss-custom-properties@^12.1.10: + version "12.1.11" + resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-12.1.11.tgz#d14bb9b3989ac4d40aaa0e110b43be67ac7845cf" + integrity sha512-0IDJYhgU8xDv1KY6+VgUwuQkVtmYzRwu+dMjnmdMafXYv86SWqfxkc7qdDvWS38vsjaEtv8e0vGOUQrAiMBLpQ== dependencies: - postcss "^7.0.14" + postcss-value-parser "^4.2.0" -postcss-custom-properties@^8.0.11: - version "8.0.11" - resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-8.0.11.tgz#2d61772d6e92f22f5e0d52602df8fae46fa30d97" - integrity sha512-nm+o0eLdYqdnJ5abAJeXp4CEU1c1k+eB2yMCvhgzsds/e0umabFrN6HoTy/8Q4K5ilxERdl/JD1LO5ANoYBeMA== +postcss-custom-selectors@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz#1ab4684d65f30fed175520f82d223db0337239d9" + integrity sha512-fgVkmyiWDwmD3JbpCmB45SvvlCD6z9CG6Ie6Iere22W5aHea6oWa7EM2bpnv2Fj3I94L3VbtvX9KqwSi5aFzSg== dependencies: - postcss "^7.0.17" - postcss-values-parser "^2.0.1" + postcss-selector-parser "^6.0.4" -postcss-custom-selectors@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-5.1.2.tgz#64858c6eb2ecff2fb41d0b28c9dd7b3db4de7fba" - integrity sha512-DSGDhqinCqXqlS4R7KGxL1OSycd1lydugJ1ky4iRXPHdBRiozyMHrdu0H3o7qNOCiZwySZTUI5MV0T8QhCLu+w== +postcss-dir-pseudo-class@^6.0.5: + version "6.0.5" + resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz#2bf31de5de76added44e0a25ecf60ae9f7c7c26c" + integrity sha512-eqn4m70P031PF7ZQIvSgy9RSJ5uI2171O/OO/zcRNYpJbvaeKFUlar1aJ7rmgiQtbm0FSPsRewjpdS0Oew7MPA== dependencies: - postcss "^7.0.2" - postcss-selector-parser "^5.0.0-rc.3" + postcss-selector-parser "^6.0.10" -postcss-dir-pseudo-class@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-5.0.0.tgz#6e3a4177d0edb3abcc85fdb6fbb1c26dabaeaba2" - integrity sha512-3pm4oq8HYWMZePJY+5ANriPs3P07q+LW6FAdTlkFH2XqDdP4HeeJYMOzn0HYLhRSjBO3fhiqSwwU9xEULSrPgw== - dependencies: - postcss "^7.0.2" - postcss-selector-parser "^5.0.0-rc.3" +postcss-discard-comments@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" + integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== -postcss-discard-comments@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-4.0.2.tgz#1fbabd2c246bff6aaad7997b2b0918f4d7af4033" - integrity sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg== - dependencies: - postcss "^7.0.0" +postcss-discard-duplicates@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== -postcss-discard-duplicates@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-4.0.2.tgz#3fe133cd3c82282e550fc9b239176a9207b784eb" - integrity sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ== - dependencies: - postcss "^7.0.0" +postcss-discard-empty@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== -postcss-discard-empty@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-4.0.1.tgz#c8c951e9f73ed9428019458444a02ad90bb9f765" - integrity sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w== - dependencies: - postcss "^7.0.0" +postcss-discard-overridden@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== -postcss-discard-overridden@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-4.0.1.tgz#652aef8a96726f029f5e3e00146ee7a4e755ff57" - integrity sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg== +postcss-double-position-gradients@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz#b96318fdb477be95997e86edd29c6e3557a49b91" + integrity sha512-GX+FuE/uBR6eskOK+4vkXgT6pDkexLokPaz/AbJna9s5Kzp/yl488pKPjhy0obB475ovfT1Wv8ho7U/cHNaRgQ== dependencies: - postcss "^7.0.0" + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" -postcss-double-position-gradients@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-1.0.0.tgz#fc927d52fddc896cb3a2812ebc5df147e110522e" - integrity sha512-G+nV8EnQq25fOI8CH/B6krEohGWnF5+3A6H/+JEpOncu5dCnkS1QQ6+ct3Jkaepw1NGVqqOZH6lqrm244mCftA== +postcss-env-function@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" + integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== dependencies: - postcss "^7.0.5" - postcss-values-parser "^2.0.0" + postcss-value-parser "^4.2.0" -postcss-env-function@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/postcss-env-function/-/postcss-env-function-2.0.2.tgz#0f3e3d3c57f094a92c2baf4b6241f0b0da5365d7" - integrity sha512-rwac4BuZlITeUbiBq60h/xbLzXY43qOsIErngWa4l7Mt+RaSkT7QBjXVGTcBHupykkblHMDrBFh30zchYPaOUw== - dependencies: - postcss "^7.0.2" - postcss-values-parser "^2.0.0" +postcss-flexbugs-fixes@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" + integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== -postcss-flexbugs-fixes@4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-4.2.1.tgz#9218a65249f30897deab1033aced8578562a6690" - integrity sha512-9SiofaZ9CWpQWxOwRh1b/r85KD5y7GgvsNt1056k6OYLvWUun0czCvogfJgylC22uJTwW1KzY3Gz65NZRlvoiQ== +postcss-focus-visible@^6.0.4: + version "6.0.4" + resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" + integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== dependencies: - postcss "^7.0.26" + postcss-selector-parser "^6.0.9" -postcss-focus-visible@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-4.0.0.tgz#477d107113ade6024b14128317ade2bd1e17046e" - integrity sha512-Z5CkWBw0+idJHSV6+Bgf2peDOFf/x4o+vX/pwcNYrWpXFrSfTkQ3JQ1ojrq9yS+upnAlNRHeg8uEwFTgorjI8g== +postcss-focus-within@^5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" + integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== dependencies: - postcss "^7.0.2" + postcss-selector-parser "^6.0.9" -postcss-focus-within@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-3.0.0.tgz#763b8788596cee9b874c999201cdde80659ef680" - integrity sha512-W0APui8jQeBKbCGZudW37EeMCjDeVxKgiYfIIEo8Bdh5SpB9sxds/Iq8SEuzS0Q4YFOlG7EPFulbbxujpkrV2w== - dependencies: - postcss "^7.0.2" +postcss-font-variant@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" + integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== -postcss-font-variant@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-4.0.1.tgz#42d4c0ab30894f60f98b17561eb5c0321f502641" - integrity sha512-I3ADQSTNtLTTd8uxZhtSOrTCQ9G4qUVKPjHiDk0bV75QSxXjVWiJVJ2VLdspGUi9fbW9BcjKJoRvxAH1pckqmA== - dependencies: - postcss "^7.0.2" +postcss-gap-properties@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz#f7e3cddcf73ee19e94ccf7cb77773f9560aa2fff" + integrity sha512-IuE6gKSdoUNcvkGIqdtjtcMtZIFyXZhmFd5RUlg97iVEvp1BZKV5ngsAjCjrVy+14uhGBQl9tzmi1Qwq4kqVOg== -postcss-gap-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-2.0.0.tgz#431c192ab3ed96a3c3d09f2ff615960f902c1715" - integrity sha512-QZSqDaMgXCHuHTEzMsS2KfVDOq7ZFiknSpkrPJY6jmxbugUPTuSzs/vuE5I3zv0WAS+3vhrlqhijiprnuQfzmg== +postcss-image-set-function@^4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz#08353bd756f1cbfb3b6e93182c7829879114481f" + integrity sha512-9T2r9rsvYzm5ndsBE8WgtrMlIT7VbtTfE7b3BQnudUqnBcBo7L758oc+o+pdj/dUV0l5wjwSdjeOH2DZtfv8qw== dependencies: - postcss "^7.0.2" + postcss-value-parser "^4.2.0" -postcss-image-set-function@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-3.0.1.tgz#28920a2f29945bed4c3198d7df6496d410d3f288" - integrity sha512-oPTcFFip5LZy8Y/whto91L9xdRHCWEMs3e1MdJxhgt4jy2WYXfhkng59fH5qLXSCPN8k4n94p1Czrfe5IOkKUw== +postcss-import@^15.1.0: + version "15.1.0" + resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-15.1.0.tgz#41c64ed8cc0e23735a9698b3249ffdbf704adc70" + integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew== dependencies: - postcss "^7.0.2" - postcss-values-parser "^2.0.0" + postcss-value-parser "^4.0.0" + read-cache "^1.0.0" + resolve "^1.1.7" -postcss-initial@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/postcss-initial/-/postcss-initial-3.0.4.tgz#9d32069a10531fe2ecafa0b6ac750ee0bc7efc53" - integrity sha512-3RLn6DIpMsK1l5UUy9jxQvoDeUN4gP939tDcKUHD/kM8SGSKbFAnvkpFpj3Bhtz3HGk1jWY5ZNWX6mPta5M9fg== - dependencies: - postcss "^7.0.2" +postcss-initial@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" + integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== -postcss-lab-function@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-2.0.1.tgz#bb51a6856cd12289ab4ae20db1e3821ef13d7d2e" - integrity sha512-whLy1IeZKY+3fYdqQFuDBf8Auw+qFuVnChWjmxm/UhHWqNHZx+B99EwxTvGYmUBqe3Fjxs4L1BoZTJmPu6usVg== +postcss-js@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.1.tgz#61598186f3703bab052f1c4f7d805f3991bee9d2" + integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw== dependencies: - "@csstools/convert-colors" "^1.4.0" - postcss "^7.0.2" - postcss-values-parser "^2.0.0" + camelcase-css "^2.0.1" -postcss-load-config@^2.0.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.2.tgz#c5ea504f2c4aef33c7359a34de3573772ad7502a" - integrity sha512-/rDeGV6vMUo3mwJZmeHfEDvwnTKKqQ0S7OHUi/kJvvtx3aWtyWG2/0ZWnzCt2keEclwN6Tf0DST2v9kITdOKYw== +postcss-lab-function@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz#6fe4c015102ff7cd27d1bd5385582f67ebdbdc98" + integrity sha512-xuXll4isR03CrQsmxyz92LJB2xX9n+pZJ5jE9JgcnmsCammLyKdlzrBin+25dy6wIjfhJpKBAN80gsTlCgRk2w== dependencies: - cosmiconfig "^5.0.0" - import-cwd "^2.0.0" + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" -postcss-loader@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" - integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== +postcss-load-config@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.1.tgz#152383f481c2758274404e4962743191d73875bd" + integrity sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA== dependencies: - loader-utils "^1.1.0" - postcss "^7.0.0" - postcss-load-config "^2.0.0" - schema-utils "^1.0.0" + lilconfig "^2.0.5" + yaml "^2.1.1" -postcss-logical@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-3.0.0.tgz#2495d0f8b82e9f262725f75f9401b34e7b45d5b5" - integrity sha512-1SUKdJc2vuMOmeItqGuNaC+N8MzBWFWEkAnRnLpFYj1tGGa7NqyVBujfRtgNa2gXR+6RkGUiB2O5Vmh7E2RmiA== +postcss-loader@^6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" + integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== dependencies: - postcss "^7.0.2" + cosmiconfig "^7.0.0" + klona "^2.0.5" + semver "^7.3.5" -postcss-media-minmax@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-media-minmax/-/postcss-media-minmax-4.0.0.tgz#b75bb6cbc217c8ac49433e12f22048814a4f5ed5" - integrity sha512-fo9moya6qyxsjbFAYl97qKO9gyre3qvbMnkOZeZwlsW6XYFsvs2DMGDlchVLfAd8LHPZDxivu/+qW2SMQeTHBw== - dependencies: - postcss "^7.0.2" +postcss-logical@^5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" + integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== -postcss-merge-longhand@^4.0.11: - version "4.0.11" - resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-4.0.11.tgz#62f49a13e4a0ee04e7b98f42bb16062ca2549e24" - integrity sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw== +postcss-media-minmax@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" + integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== + +postcss-merge-longhand@^5.1.7: + version "5.1.7" + resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-5.1.7.tgz#24a1bdf402d9ef0e70f568f39bdc0344d568fb16" + integrity sha512-YCI9gZB+PLNskrK0BB3/2OzPnGhPkBEwmwhfYk1ilBHYVAZB7/tkTHFBAnCrvBBOmeYyMYw3DMjT55SyxMBzjQ== dependencies: - css-color-names "0.0.4" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - stylehacks "^4.0.0" + postcss-value-parser "^4.2.0" + stylehacks "^5.1.1" -postcss-merge-rules@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-4.0.3.tgz#362bea4ff5a1f98e4075a713c6cb25aefef9a650" - integrity sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ== +postcss-merge-rules@^5.1.4: + version "5.1.4" + resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.1.4.tgz#2f26fa5cacb75b1402e213789f6766ae5e40313c" + integrity sha512-0R2IuYpgU93y9lhVbO/OylTtKMVcHb67zjWIfCiKR9rWL3GUk1677LAqD/BcHizukdZEjT8Ru3oHRoAYoJy44g== dependencies: - browserslist "^4.0.0" + browserslist "^4.21.4" caniuse-api "^3.0.0" - cssnano-util-same-parent "^4.0.0" - postcss "^7.0.0" - postcss-selector-parser "^3.0.0" - vendors "^1.0.0" + cssnano-utils "^3.1.0" + postcss-selector-parser "^6.0.5" -postcss-minify-font-values@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-4.0.2.tgz#cd4c344cce474343fac5d82206ab2cbcb8afd5a6" - integrity sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg== +postcss-minify-font-values@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== dependencies: - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-value-parser "^4.2.0" -postcss-minify-gradients@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-4.0.2.tgz#93b29c2ff5099c535eecda56c4aa6e665a663471" - integrity sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q== +postcss-minify-gradients@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== dependencies: - cssnano-util-get-arguments "^4.0.0" - is-color-stop "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + colord "^2.9.1" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" -postcss-minify-params@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-4.0.2.tgz#6b9cef030c11e35261f95f618c90036d680db874" - integrity sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg== +postcss-minify-params@^5.1.4: + version "5.1.4" + resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-5.1.4.tgz#c06a6c787128b3208b38c9364cfc40c8aa5d7352" + integrity sha512-+mePA3MgdmVmv6g+30rn57USjOGSAyuxUmkfiWpzalZ8aiBkdPYjXWtHuwJGm1v5Ojy0Z0LaSYhHaLJQB0P8Jw== dependencies: - alphanum-sort "^1.0.0" - browserslist "^4.0.0" - cssnano-util-get-arguments "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - uniqs "^2.0.0" + browserslist "^4.21.4" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" -postcss-minify-selectors@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-4.0.2.tgz#e2e5eb40bfee500d0cd9243500f5f8ea4262fbd8" - integrity sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g== +postcss-minify-selectors@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" + integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== dependencies: - alphanum-sort "^1.0.0" - has "^1.0.0" - postcss "^7.0.0" - postcss-selector-parser "^3.0.0" + postcss-selector-parser "^6.0.5" -postcss-modules-extract-imports@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e" - integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ== - dependencies: - postcss "^7.0.5" +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== -postcss-modules-local-by-default@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-3.0.3.tgz#bb14e0cc78279d504dbdcbfd7e0ca28993ffbbb0" - integrity sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw== +postcss-modules-local-by-default@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.3.tgz#b08eb4f083050708998ba2c6061b50c2870ca524" + integrity sha512-2/u2zraspoACtrbFRnTijMiQtb4GW4BvatjaG/bCjYQo8kLTdevCUlwuBHx2sCnSyrI3x3qj4ZK1j5LQBgzmwA== dependencies: - icss-utils "^4.1.1" - postcss "^7.0.32" + icss-utils "^5.0.0" postcss-selector-parser "^6.0.2" postcss-value-parser "^4.1.0" -postcss-modules-scope@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.2.0.tgz#385cae013cc7743f5a7d7602d1073a89eaae62ee" - integrity sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ== - dependencies: - postcss "^7.0.6" - postcss-selector-parser "^6.0.0" - -postcss-modules-values@^3.0.0: +postcss-modules-scope@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-3.0.0.tgz#5b5000d6ebae29b4255301b4a3a54574423e7f10" - integrity sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg== - dependencies: - icss-utils "^4.0.0" - postcss "^7.0.6" - -postcss-nesting@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-7.0.1.tgz#b50ad7b7f0173e5b5e3880c3501344703e04c052" - integrity sha512-FrorPb0H3nuVq0Sff7W2rnc3SmIcruVC6YwpcS+k687VxyxO33iE1amna7wHuRVzM8vfiYofXSBHNAZ3QhLvYg== + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== dependencies: - postcss "^7.0.2" + postcss-selector-parser "^6.0.4" -postcss-normalize-charset@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-4.0.1.tgz#8b35add3aee83a136b0471e0d59be58a50285dd4" - integrity sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g== +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== dependencies: - postcss "^7.0.0" + icss-utils "^5.0.0" -postcss-normalize-display-values@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.2.tgz#0dbe04a4ce9063d4667ed2be476bb830c825935a" - integrity sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ== +postcss-nested@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.0.1.tgz#f83dc9846ca16d2f4fa864f16e9d9f7d0961662c" + integrity sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ== dependencies: - cssnano-util-get-match "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-selector-parser "^6.0.11" -postcss-normalize-positions@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-4.0.2.tgz#05f757f84f260437378368a91f8932d4b102917f" - integrity sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA== +postcss-nesting@^10.2.0: + version "10.2.0" + resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-10.2.0.tgz#0b12ce0db8edfd2d8ae0aaf86427370b898890be" + integrity sha512-EwMkYchxiDiKUhlJGzWsD9b2zvq/r2SSubcRrgP+jujMXFzqvANLt16lJANC+5uZ6hjI7lpRmI6O8JIl+8l1KA== dependencies: - cssnano-util-get-arguments "^4.0.0" - has "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + "@csstools/selector-specificity" "^2.0.0" + postcss-selector-parser "^6.0.10" -postcss-normalize-repeat-style@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-4.0.2.tgz#c4ebbc289f3991a028d44751cbdd11918b17910c" - integrity sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q== - dependencies: - cssnano-util-get-arguments "^4.0.0" - cssnano-util-get-match "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" +postcss-normalize-charset@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== -postcss-normalize-string@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-4.0.2.tgz#cd44c40ab07a0c7a36dc5e99aace1eca4ec2690c" - integrity sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA== +postcss-normalize-display-values@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== dependencies: - has "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-value-parser "^4.2.0" -postcss-normalize-timing-functions@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-4.0.2.tgz#8e009ca2a3949cdaf8ad23e6b6ab99cb5e7d28d9" - integrity sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A== +postcss-normalize-positions@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92" + integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== dependencies: - cssnano-util-get-match "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-value-parser "^4.2.0" -postcss-normalize-unicode@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-4.0.1.tgz#841bd48fdcf3019ad4baa7493a3d363b52ae1cfb" - integrity sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg== +postcss-normalize-repeat-style@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2" + integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== dependencies: - browserslist "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-value-parser "^4.2.0" -postcss-normalize-url@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-4.0.1.tgz#10e437f86bc7c7e58f7b9652ed878daaa95faae1" - integrity sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA== +postcss-normalize-string@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== dependencies: - is-absolute-url "^2.0.0" - normalize-url "^3.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-value-parser "^4.2.0" -postcss-normalize-whitespace@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-4.0.2.tgz#bf1d4070fe4fcea87d1348e825d8cc0c5faa7d82" - integrity sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA== +postcss-normalize-timing-functions@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== dependencies: - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-value-parser "^4.2.0" -postcss-normalize@8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize/-/postcss-normalize-8.0.1.tgz#90e80a7763d7fdf2da6f2f0f82be832ce4f66776" - integrity sha512-rt9JMS/m9FHIRroDDBGSMsyW1c0fkvOJPy62ggxSHUldJO7B195TqFMqIf+lY5ezpDcYOV4j86aUp3/XbxzCCQ== +postcss-normalize-unicode@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.1.tgz#f67297fca3fea7f17e0d2caa40769afc487aa030" + integrity sha512-qnCL5jzkNUmKVhZoENp1mJiGNPcsJCs1aaRmURmeJGES23Z/ajaln+EPTD+rBeNkSryI+2WTdW+lwcVdOikrpA== dependencies: - "@csstools/normalize.css" "^10.1.0" - browserslist "^4.6.2" - postcss "^7.0.17" - postcss-browser-comments "^3.0.0" - sanitize.css "^10.0.0" + browserslist "^4.21.4" + postcss-value-parser "^4.2.0" -postcss-ordered-values@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-4.1.2.tgz#0cf75c820ec7d5c4d280189559e0b571ebac0eee" - integrity sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw== +postcss-normalize-url@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== dependencies: - cssnano-util-get-arguments "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + normalize-url "^6.0.1" + postcss-value-parser "^4.2.0" -postcss-overflow-shorthand@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-2.0.0.tgz#31ecf350e9c6f6ddc250a78f0c3e111f32dd4c30" - integrity sha512-aK0fHc9CBNx8jbzMYhshZcEv8LtYnBIRYQD5i7w/K/wS9c2+0NSR6B3OVMu5y0hBHYLcMGjfU+dmWYNKH0I85g== +postcss-normalize-whitespace@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== dependencies: - postcss "^7.0.2" + postcss-value-parser "^4.2.0" -postcss-page-break@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-2.0.0.tgz#add52d0e0a528cabe6afee8b46e2abb277df46bf" - integrity sha512-tkpTSrLpfLfD9HvgOlJuigLuk39wVTbbd8RKcy8/ugV2bNBUW3xU+AIqyxhDrQr1VUj1RmyJrBn1YWrqUm9zAQ== +postcss-normalize@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" + integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== dependencies: - postcss "^7.0.2" + "@csstools/normalize.css" "*" + postcss-browser-comments "^4" + sanitize.css "*" -postcss-place@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-4.0.1.tgz#e9f39d33d2dc584e46ee1db45adb77ca9d1dcc62" - integrity sha512-Zb6byCSLkgRKLODj/5mQugyuj9bvAAw9LqJJjgwz5cYryGeXfFZfSXoP1UfveccFmeq0b/2xxwcTEVScnqGxBg== - dependencies: - postcss "^7.0.2" - postcss-values-parser "^2.0.0" - -postcss-preset-env@6.7.0: - version "6.7.0" - resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-6.7.0.tgz#c34ddacf8f902383b35ad1e030f178f4cdf118a5" - integrity sha512-eU4/K5xzSFwUFJ8hTdTQzo2RBLbDVt83QZrAvI07TULOkmyQlnYlpwep+2yIK+K+0KlZO4BvFcleOCCcUtwchg== - dependencies: - autoprefixer "^9.6.1" - browserslist "^4.6.4" - caniuse-lite "^1.0.30000981" - css-blank-pseudo "^0.1.4" - css-has-pseudo "^0.10.0" - css-prefers-color-scheme "^3.1.1" - cssdb "^4.4.0" - postcss "^7.0.17" - postcss-attribute-case-insensitive "^4.0.1" - postcss-color-functional-notation "^2.0.1" - postcss-color-gray "^5.0.0" - postcss-color-hex-alpha "^5.0.3" - postcss-color-mod-function "^3.0.3" - postcss-color-rebeccapurple "^4.0.1" - postcss-custom-media "^7.0.8" - postcss-custom-properties "^8.0.11" - postcss-custom-selectors "^5.1.2" - postcss-dir-pseudo-class "^5.0.0" - postcss-double-position-gradients "^1.0.0" - postcss-env-function "^2.0.2" - postcss-focus-visible "^4.0.0" - postcss-focus-within "^3.0.0" - postcss-font-variant "^4.0.0" - postcss-gap-properties "^2.0.0" - postcss-image-set-function "^3.0.1" - postcss-initial "^3.0.0" - postcss-lab-function "^2.0.1" - postcss-logical "^3.0.0" - postcss-media-minmax "^4.0.0" - postcss-nesting "^7.0.0" - postcss-overflow-shorthand "^2.0.0" - postcss-page-break "^2.0.0" - postcss-place "^4.0.1" - postcss-pseudo-class-any-link "^6.0.0" - postcss-replace-overflow-wrap "^3.0.0" - postcss-selector-matches "^4.0.0" - postcss-selector-not "^4.0.0" - -postcss-pseudo-class-any-link@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-6.0.0.tgz#2ed3eed393b3702879dec4a87032b210daeb04d1" - integrity sha512-lgXW9sYJdLqtmw23otOzrtbDXofUdfYzNm4PIpNE322/swES3VU9XlXHeJS46zT2onFO7V1QFdD4Q9LiZj8mew== - dependencies: - postcss "^7.0.2" - postcss-selector-parser "^5.0.0-rc.3" +postcss-opacity-percentage@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.3.tgz#5b89b35551a556e20c5d23eb5260fbfcf5245da6" + integrity sha512-An6Ba4pHBiDtyVpSLymUUERMo2cU7s+Obz6BTrS+gxkbnSBNKSuD0AVUc+CpBMrpVPKKfoVz0WQCX+Tnst0i4A== -postcss-reduce-initial@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-4.0.3.tgz#7fd42ebea5e9c814609639e2c2e84ae270ba48df" - integrity sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA== +postcss-ordered-values@^5.1.3: + version "5.1.3" + resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38" + integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== dependencies: - browserslist "^4.0.0" - caniuse-api "^3.0.0" - has "^1.0.0" - postcss "^7.0.0" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" -postcss-reduce-transforms@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-4.0.2.tgz#17efa405eacc6e07be3414a5ca2d1074681d4e29" - integrity sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg== +postcss-overflow-shorthand@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz#7ed6486fec44b76f0eab15aa4866cda5d55d893e" + integrity sha512-otYl/ylHK8Y9bcBnPLo3foYFLL6a6Ak+3EQBPOTR7luMYCOsiVTUk1iLvNf6tVPNGXcoL9Hoz37kpfriRIFb4A== dependencies: - cssnano-util-get-match "^4.0.0" - has "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" + postcss-value-parser "^4.2.0" -postcss-replace-overflow-wrap@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-3.0.0.tgz#61b360ffdaedca84c7c918d2b0f0d0ea559ab01c" - integrity sha512-2T5hcEHArDT6X9+9dVSPQdo7QHzG4XKclFT8rU5TzJPDN7RIRTbO9c4drUISOVemLj03aezStHCR2AIcr8XLpw== - dependencies: - postcss "^7.0.2" +postcss-page-break@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" + integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== -postcss-safe-parser@5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-safe-parser/-/postcss-safe-parser-5.0.2.tgz#459dd27df6bc2ba64608824ba39e45dacf5e852d" - integrity sha512-jDUfCPJbKOABhwpUKcqCVbbXiloe/QXMcbJ6Iipf3sDIihEzTqRCeMBfRaOHxhBuTYqtASrI1KJWxzztZU4qUQ== +postcss-place@^7.0.5: + version "7.0.5" + resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-7.0.5.tgz#95dbf85fd9656a3a6e60e832b5809914236986c4" + integrity sha512-wR8igaZROA6Z4pv0d+bvVrvGY4GVHihBCBQieXFY3kuSuMyOmEnnfFzHl/tQuqHZkfkIVBEbDvYcFfHmpSet9g== dependencies: - postcss "^8.1.0" + postcss-value-parser "^4.2.0" -postcss-selector-matches@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff" - integrity sha512-LgsHwQR/EsRYSqlwdGzeaPKVT0Ml7LAT6E75T8W8xLJY62CE4S/l03BWIt3jT8Taq22kXP08s2SfTSzaraoPww== +postcss-preset-env@^7.0.1: + version "7.8.3" + resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-7.8.3.tgz#2a50f5e612c3149cc7af75634e202a5b2ad4f1e2" + integrity sha512-T1LgRm5uEVFSEF83vHZJV2z19lHg4yJuZ6gXZZkqVsqv63nlr6zabMH3l4Pc01FQCyfWVrh2GaUeCVy9Po+Aag== + dependencies: + "@csstools/postcss-cascade-layers" "^1.1.1" + "@csstools/postcss-color-function" "^1.1.1" + "@csstools/postcss-font-format-keywords" "^1.0.1" + "@csstools/postcss-hwb-function" "^1.0.2" + "@csstools/postcss-ic-unit" "^1.0.1" + "@csstools/postcss-is-pseudo-class" "^2.0.7" + "@csstools/postcss-nested-calc" "^1.0.0" + "@csstools/postcss-normalize-display-values" "^1.0.1" + "@csstools/postcss-oklab-function" "^1.1.1" + "@csstools/postcss-progressive-custom-properties" "^1.3.0" + "@csstools/postcss-stepped-value-functions" "^1.0.1" + "@csstools/postcss-text-decoration-shorthand" "^1.0.0" + "@csstools/postcss-trigonometric-functions" "^1.0.2" + "@csstools/postcss-unset-value" "^1.0.2" + autoprefixer "^10.4.13" + browserslist "^4.21.4" + css-blank-pseudo "^3.0.3" + css-has-pseudo "^3.0.4" + css-prefers-color-scheme "^6.0.3" + cssdb "^7.1.0" + postcss-attribute-case-insensitive "^5.0.2" + postcss-clamp "^4.1.0" + postcss-color-functional-notation "^4.2.4" + postcss-color-hex-alpha "^8.0.4" + postcss-color-rebeccapurple "^7.1.1" + postcss-custom-media "^8.0.2" + postcss-custom-properties "^12.1.10" + postcss-custom-selectors "^6.0.3" + postcss-dir-pseudo-class "^6.0.5" + postcss-double-position-gradients "^3.1.2" + postcss-env-function "^4.0.6" + postcss-focus-visible "^6.0.4" + postcss-focus-within "^5.0.4" + postcss-font-variant "^5.0.0" + postcss-gap-properties "^3.0.5" + postcss-image-set-function "^4.0.7" + postcss-initial "^4.0.1" + postcss-lab-function "^4.2.1" + postcss-logical "^5.0.4" + postcss-media-minmax "^5.0.0" + postcss-nesting "^10.2.0" + postcss-opacity-percentage "^1.1.2" + postcss-overflow-shorthand "^3.0.4" + postcss-page-break "^3.0.4" + postcss-place "^7.0.5" + postcss-pseudo-class-any-link "^7.1.6" + postcss-replace-overflow-wrap "^4.0.0" + postcss-selector-not "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-pseudo-class-any-link@^7.1.6: + version "7.1.6" + resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz#2693b221902da772c278def85a4d9a64b6e617ab" + integrity sha512-9sCtZkO6f/5ML9WcTLcIyV1yz9D1rf0tWc+ulKcvV30s0iZKS/ONyETvoWsr6vnrmW+X+KmuK3gV/w5EWnT37w== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-reduce-initial@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.2.tgz#798cd77b3e033eae7105c18c9d371d989e1382d6" + integrity sha512-dE/y2XRaqAi6OvjzD22pjTUQ8eOfc6m/natGHgKFBK9DxFmIm69YmaRVQrGgFlEfc1HePIurY0TmDeROK05rIg== dependencies: - balanced-match "^1.0.0" - postcss "^7.0.2" + browserslist "^4.21.4" + caniuse-api "^3.0.0" -postcss-selector-not@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-4.0.1.tgz#263016eef1cf219e0ade9a913780fc1f48204cbf" - integrity sha512-YolvBgInEK5/79C+bdFMyzqTg6pkYqDbzZIST/PDMqa/o3qtXenD05apBG2jLgT0/BQ77d4U2UK12jWpilqMAQ== +postcss-reduce-transforms@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== dependencies: - balanced-match "^1.0.0" - postcss "^7.0.2" + postcss-value-parser "^4.2.0" -postcss-selector-parser@^3.0.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz#b310f5c4c0fdaf76f94902bbaa30db6aa84f5270" - integrity sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA== - dependencies: - dot-prop "^5.2.0" - indexes-of "^1.0.1" - uniq "^1.0.1" +postcss-replace-overflow-wrap@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" + integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== -postcss-selector-parser@^5.0.0-rc.3, postcss-selector-parser@^5.0.0-rc.4: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz#249044356697b33b64f1a8f7c80922dddee7195c" - integrity sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ== +postcss-selector-not@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz#8f0a709bf7d4b45222793fc34409be407537556d" + integrity sha512-1i9affjAe9xu/y9uqWH+tD4r6/hDaXJruk8xn2x1vzxC2U3J3LKO3zJW4CyxlNhA56pADJ/djpEwpH1RClI2rQ== dependencies: - cssesc "^2.0.0" - indexes-of "^1.0.1" - uniq "^1.0.1" + postcss-selector-parser "^6.0.10" -postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2: - version "6.0.6" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.6.tgz#2c5bba8174ac2f6981ab631a42ab0ee54af332ea" - integrity sha512-9LXrvaaX3+mcv5xkg5kFwqSzSH1JIObIx51PrndZwlmznwXRfxMddDvo9gve3gVR8ZTKgoFDdWkbRFmEhT4PMg== +postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.9: + version "6.0.13" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz#d05d8d76b1e8e173257ef9d60b706a8e5e99bf1b" + integrity sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ== dependencies: cssesc "^3.0.0" util-deprecate "^1.0.2" -postcss-svgo@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-4.0.3.tgz#343a2cdbac9505d416243d496f724f38894c941e" - integrity sha512-NoRbrcMWTtUghzuKSoIm6XV+sJdvZ7GZSc3wdBN0W19FTtp2ko8NqLsgoh/m9CzNhU3KLPvQmjIwtaNFkaFTvw== - dependencies: - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - svgo "^1.0.0" - -postcss-unique-selectors@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-4.0.1.tgz#9446911f3289bfd64c6d680f073c03b1f9ee4bac" - integrity sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg== +postcss-svgo@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== dependencies: - alphanum-sort "^1.0.0" - postcss "^7.0.0" - uniqs "^2.0.0" - -postcss-value-parser@^3.0.0: - version "3.3.1" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" - integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== + postcss-value-parser "^4.2.0" + svgo "^2.7.0" -postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz#443f6a20ced6481a2bda4fa8532a6e55d789a2cb" - integrity sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ== - -postcss-values-parser@^2.0.0, postcss-values-parser@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/postcss-values-parser/-/postcss-values-parser-2.0.1.tgz#da8b472d901da1e205b47bdc98637b9e9e550e5f" - integrity sha512-2tLuBsA6P4rYTNKCXYG/71C7j1pU6pK503suYOmn4xYrQIzW+opD+7FAFNuGSdZC/3Qfy334QbeMu7MEb8gOxg== +postcss-unique-selectors@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== dependencies: - flatten "^1.0.2" - indexes-of "^1.0.1" - uniq "^1.0.1" + postcss-selector-parser "^6.0.5" -postcss@7.0.21: - version "7.0.21" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.21.tgz#06bb07824c19c2021c5d056d5b10c35b989f7e17" - integrity sha512-uIFtJElxJo29QC753JzhidoAhvp/e/Exezkdhfmt8AymWT6/5B7W1WmponYWkHk2eg6sONyTch0A3nkMPun3SQ== - dependencies: - chalk "^2.4.2" - source-map "^0.6.1" - supports-color "^6.1.0" +postcss-value-parser@^4.0.0, postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== -postcss@^7, postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.14, postcss@^7.0.17, postcss@^7.0.2, postcss@^7.0.26, postcss@^7.0.27, postcss@^7.0.32, postcss@^7.0.5, postcss@^7.0.6: - version "7.0.35" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.35.tgz#d2be00b998f7f211d8a276974079f2e92b970e24" - integrity sha512-3QT8bBJeX/S5zKTTjTCIjRF3If4avAT6kqxcASlTWEtAFCb9NH0OUxNDfgZSWdP5fJnBYCMEWkIFfWeugjzYMg== +postcss@^7.0.35: + version "7.0.39" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== dependencies: - chalk "^2.4.2" + picocolors "^0.2.1" source-map "^0.6.1" - supports-color "^6.1.0" -postcss@^8.1.0: - version "8.3.0" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.3.0.tgz#b1a713f6172ca427e3f05ef1303de8b65683325f" - integrity sha512-+ogXpdAjWGa+fdYY5BQ96V/6tAo+TdSSIMP5huJBIygdWwKtVoB5JWZ7yUd4xZ8r+8Kvvx4nyg/PQ071H4UtcQ== +postcss@^8.3.5, postcss@^8.4.21, postcss@^8.4.23, postcss@^8.4.4: + version "8.4.24" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.24.tgz#f714dba9b2284be3cc07dbd2fc57ee4dc972d2df" + integrity sha512-M0RzbcI0sO/XJNucsGjvWU9ERWxb/ytp1w6dKtxTKgixdtQDq4rmx/g8W1hnaheq9jgwL/oyEdH5Bc4WwJKMqg== dependencies: - colorette "^1.2.2" - nanoid "^3.1.23" - source-map-js "^0.6.2" + nanoid "^3.3.6" + picocolors "^1.0.0" + source-map-js "^1.0.2" precision@~1.0.0: version "1.0.1" @@ -15166,11 +13202,6 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= -prepend-http@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" - integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= - prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" @@ -15184,32 +13215,25 @@ pretender@^3.4.3: fake-xml-http-request "^2.1.1" route-recognizer "^0.3.3" -prettier-linter-helpers@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" - integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== - dependencies: - fast-diff "^1.1.2" - -prettier@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.3.0.tgz#b6a5bf1284026ae640f17f7ff5658a7567fc0d18" - integrity sha512-kXtO4s0Lz/DW/IJ9QdWhAf7/NmPWQXkFr/r/WkR3vyI+0v8amTDxiaQSLzs8NBlytfLWX/7uQUMIW677yLKl4w== +prettier@^2.8.8: + version "2.8.8" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" + integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== -pretty-bytes@^5.3.0: +pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: version "5.6.0" - resolved "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== -pretty-error@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.2.tgz#be89f82d81b1c86ec8fdfbc385045882727f93b6" - integrity sha512-EY5oDzmsX5wvuynAByrmY0P0hcp+QpnAKbJng2A2MPjVKXCxrDSUkzghVJ4ZGPIv+JC4gX8fPUWscC0RtjsWGw== +pretty-error@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== dependencies: lodash "^4.17.20" - renderkid "^2.0.4" + renderkid "^3.0.0" -pretty-format@^26.0.0, pretty-format@^26.6.0, pretty-format@^26.6.2: +pretty-format@^26.0.0, pretty-format@^26.6.2: version "26.6.2" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-26.6.2.tgz#e35c2705f14cb7fe2fe94fa078345b444120fc93" integrity sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg== @@ -15219,6 +13243,25 @@ pretty-format@^26.0.0, pretty-format@^26.6.0, pretty-format@^26.6.2: ansi-styles "^4.0.0" react-is "^17.0.1" +pretty-format@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" + integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== + dependencies: + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + +pretty-format@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" + integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== + dependencies: + "@jest/schemas" "^28.1.3" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^18.0.0" + prismjs@^1.22.0: version "1.24.1" resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.24.1.tgz#c4d7895c4d6500289482fa8936d9cdd192684036" @@ -15236,21 +13279,6 @@ process-nextick-args@~2.0.0: resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -process@^0.11.10: - version "0.11.10" - resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" - integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= - -progress@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== - -promise-inflight@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" - integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= - promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" @@ -15265,23 +13293,15 @@ promise@^8.1.0: dependencies: asap "~2.0.6" -prompts@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.0.tgz#4aa5de0723a231d1ee9121c40fdf663df73f61d7" - integrity sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - -prompts@^2.0.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.1.tgz#befd3b1195ba052f9fd2fde8a486c4e82ee77f61" - integrity sha512-EQyfIuO2hPDsX1L/blblV+H7I0knhgAd82cVneCwcdND9B8AuCDuRcBH6yIcG4dFzlOUqbazQqwGjx5xmsNLuQ== +prompts@^2.0.1, prompts@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== dependencies: kleur "^3.0.3" sisteransi "^1.0.5" -prop-types@15.7.2, prop-types@^15.5.10, prop-types@^15.5.7, prop-types@^15.6.0, prop-types@^15.6.1, prop-types@^15.6.2, prop-types@^15.7.2: +prop-types@15.7.2: version "15.7.2" resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== @@ -15290,7 +13310,7 @@ prop-types@15.7.2, prop-types@^15.5.10, prop-types@^15.5.7, prop-types@^15.6.0, object-assign "^4.1.1" react-is "^16.8.1" -prop-types@^15.8.1: +prop-types@^15.5.10, prop-types@^15.6.0, prop-types@^15.6.1, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: version "15.8.1" resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== @@ -15494,26 +13514,6 @@ psl@^1.1.28, psl@^1.1.33: resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== -public-encrypt@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" - integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== - dependencies: - bn.js "^4.1.0" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - parse-asn1 "^5.0.0" - randombytes "^2.0.1" - safe-buffer "^5.1.2" - -pump@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" - integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" @@ -15522,25 +13522,6 @@ pump@^3.0.0: end-of-stream "^1.1.0" once "^1.3.1" -pumpify@^1.3.3: - version "1.5.1" - resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" - integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== - dependencies: - duplexify "^3.6.0" - inherits "^2.0.3" - pump "^2.0.0" - -punycode@1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" - integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= - -punycode@^1.2.4: - version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" - integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= - punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" @@ -15573,14 +13554,6 @@ query-string@*: split-on-first "^1.0.0" strict-uri-encode "^2.0.0" -query-string@^4.1.0: - version "4.3.4" - resolved "https://registry.yarnpkg.com/query-string/-/query-string-4.3.4.tgz#bbb693b9ca915c232515b228b1a02b609043dbeb" - integrity sha1-u7aTucqRXCMlFbIosaArYJBD2+s= - dependencies: - object-assign "^4.1.0" - strict-uri-encode "^1.0.0" - query-string@^6.13.8: version "6.14.1" resolved "https://registry.yarnpkg.com/query-string/-/query-string-6.14.1.tgz#7ac2dca46da7f309449ba0f86b1fd28255b0c86a" @@ -15591,22 +13564,7 @@ query-string@^6.13.8: split-on-first "^1.0.0" strict-uri-encode "^2.0.0" -querystring-es3@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" - integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= - -querystring@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" - integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= - -querystring@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.1.tgz#40d77615bb09d16902a85c3e38aa8b5ed761c2dd" - integrity sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg== - -querystringify@^2.1.1, querystringify@^2.2.0: +querystringify@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== @@ -15628,21 +13586,13 @@ ramda@0.26.1: resolved "https://registry.npmjs.org/ramda/-/ramda-0.26.1.tgz#8d41351eb8111c55353617fc3bbffad8e4d35d06" integrity sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ== -randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0: +randombytes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" -randomfill@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" - integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== - dependencies: - randombytes "^2.0.5" - safe-buffer "^5.1.0" - range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" @@ -15796,16 +13746,7 @@ rc-menu@~9.8.0: rc-util "^5.12.0" shallowequal "^1.1.0" -rc-motion@^2.0.0, rc-motion@^2.0.1, rc-motion@^2.2.0, rc-motion@^2.3.0, rc-motion@^2.3.4, rc-motion@^2.4.3: - version "2.4.3" - resolved "https://registry.yarnpkg.com/rc-motion/-/rc-motion-2.4.3.tgz#2afd129da8764ee0372ba83442949d8ecb1c7ad2" - integrity sha512-GZLLFXHl/VqTfI7bSZNNZozcblNmDka1AAoQig7EZ6s0rWg5y0RlgrcHWO+W+nrOVbYfJDxoaQUoP2fEmvCWmA== - dependencies: - "@babel/runtime" "^7.11.1" - classnames "^2.2.1" - rc-util "^5.2.1" - -rc-motion@^2.4.4, rc-motion@^2.6.1, rc-motion@^2.6.2: +rc-motion@^2.0.0, rc-motion@^2.0.1, rc-motion@^2.2.0, rc-motion@^2.3.0, rc-motion@^2.3.4, rc-motion@^2.4.3, rc-motion@^2.4.4, rc-motion@^2.6.1, rc-motion@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/rc-motion/-/rc-motion-2.6.2.tgz#3d31f97e41fb8e4f91a4a4189b6a98ac63342869" integrity sha512-4w1FaX3dtV749P8GwfS4fYnFG4Rb9pxvCYPc/b2fw1cmlHJWNNgOFIz7ysiD+eOrzJSvnLJWlNQQncpNMXwwpg== @@ -15824,17 +13765,7 @@ rc-notification@~4.6.0: rc-motion "^2.2.0" rc-util "^5.20.1" -rc-overflow@^1.0.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/rc-overflow/-/rc-overflow-1.2.1.tgz#ea13f1e2ec152d8adcfc3bf1e53fa2b62a1d2cff" - integrity sha512-TCB8QiEnmNUbsJZX1GU8ZvIgVxk42eu3yaRdDZc2HFjVeT3HBSfscVaCVzBuH3NR5IWrQLodI0p6MZEGFn+KiA== - dependencies: - "@babel/runtime" "^7.11.1" - classnames "^2.2.1" - rc-resize-observer "^1.0.0" - rc-util "^5.5.1" - -rc-overflow@^1.2.8: +rc-overflow@^1.0.0, rc-overflow@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/rc-overflow/-/rc-overflow-1.2.8.tgz#40f140fabc244118543e627cdd1ef750d9481a88" integrity sha512-QJ0UItckWPQ37ZL1dMEBAdY1dhfTXFL9k6oTTcyydVwoUNMnMqCGqnRNA98axSr/OeDKqR6DVFyi8eA5RQI/uQ== @@ -15884,17 +13815,7 @@ rc-rate@~2.9.0: classnames "^2.2.5" rc-util "^5.0.1" -rc-resize-observer@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/rc-resize-observer/-/rc-resize-observer-1.0.0.tgz#97fb89856f62fec32ab6e40933935cf58e2e102d" - integrity sha512-RgKGukg1mlzyGdvzF7o/LGFC8AeoMH9aGzXTUdp6m+OApvmRdUuOscq/Y2O45cJA+rXt1ApWlpFoOIioXL3AGg== - dependencies: - "@babel/runtime" "^7.10.1" - classnames "^2.2.1" - rc-util "^5.0.0" - resize-observer-polyfill "^1.5.1" - -rc-resize-observer@^1.1.0, rc-resize-observer@^1.2.0: +rc-resize-observer@^1.0.0, rc-resize-observer@^1.1.0, rc-resize-observer@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/rc-resize-observer/-/rc-resize-observer-1.2.1.tgz#7f9715b5d1afe126ade3c107aafd2cebf8a57a99" integrity sha512-g53PnWLeVOmt4XWkt2x+QlIdf/PhJSd7JqHhtMrUY370e7wJ+kxbgXicYqvENUcgFiiOiMCd07YsC2GNsoSbnA== @@ -15955,18 +13876,7 @@ rc-switch@~3.2.0: classnames "^2.2.1" rc-util "^5.0.1" -rc-table@^7.13.1: - version "7.15.1" - resolved "https://registry.yarnpkg.com/rc-table/-/rc-table-7.15.1.tgz#e5dde23289f55c9e0d4b217a61b9487bb4325b2a" - integrity sha512-RJ+BkBrrHt9/M4Dz5zIXYyAzrrqB/FjTGDGAfJv7P8LHczpLEt1yRTlz2wjX1ktxMpj16gZlssIchbKTqdKjlw== - dependencies: - "@babel/runtime" "^7.10.1" - classnames "^2.2.5" - rc-resize-observer "^1.0.0" - rc-util "^5.13.0" - shallowequal "^1.1.0" - -rc-table@~7.26.0: +rc-table@^7.13.1, rc-table@~7.26.0: version "7.26.0" resolved "https://registry.yarnpkg.com/rc-table/-/rc-table-7.26.0.tgz#9d517e7fa512e7571fdcc453eb1bf19edfac6fbc" integrity sha512-0cD8e6S+DTGAt5nBZQIPFYEaIukn17sfa5uFL98faHlH/whZzD8ii3dbFL4wmUDEL4BLybhYop+QUfZJ4CPvNQ== @@ -16032,29 +13942,7 @@ rc-tree@~5.7.0: rc-util "^5.16.1" rc-virtual-list "^3.4.8" -rc-trigger@^5.0.0, rc-trigger@^5.0.4, rc-trigger@^5.1.2: - version "5.2.8" - resolved "https://registry.yarnpkg.com/rc-trigger/-/rc-trigger-5.2.8.tgz#27c8291c24518b8f11d76c848f5424e0c429e94a" - integrity sha512-Tn84oGmvNBLXI+ptpzxyJx4ArKTduuB6l74ShDLhDaJaF9f5JAMizfx31L30ELVIzRr3Ze4sekG7rzwPGwVOdw== - dependencies: - "@babel/runtime" "^7.11.2" - classnames "^2.2.6" - rc-align "^4.0.0" - rc-motion "^2.0.0" - rc-util "^5.5.0" - -rc-trigger@^5.2.10: - version "5.3.3" - resolved "https://registry.yarnpkg.com/rc-trigger/-/rc-trigger-5.3.3.tgz#166013df79e6a4ce64515391bd6d4f8386839761" - integrity sha512-IC4nuTSAME7RJSgwvHCNDQrIzhvGMKf6NDu5veX+zk1MG7i1UnwTWWthcP9WHw3+FZfP3oZGvkrHFPu/EGkFKw== - dependencies: - "@babel/runtime" "^7.18.3" - classnames "^2.2.6" - rc-align "^4.0.0" - rc-motion "^2.0.0" - rc-util "^5.19.2" - -rc-trigger@^5.3.1: +rc-trigger@^5.0.0, rc-trigger@^5.0.4, rc-trigger@^5.1.2, rc-trigger@^5.2.10, rc-trigger@^5.3.1: version "5.3.4" resolved "https://registry.yarnpkg.com/rc-trigger/-/rc-trigger-5.3.4.tgz#6b4b26e32825677c837d1eb4d7085035eecf9a61" integrity sha512-mQv+vas0TwKcjAO2izNPkqR4j86OemLRmvL2nOzdP9OWNWA1ivoTt5hzFqYNW9zACwmTezRiN8bttrC7cZzYSw== @@ -16074,25 +13962,7 @@ rc-upload@~4.3.0: classnames "^2.2.5" rc-util "^5.2.0" -rc-util@^5.0.0, rc-util@^5.0.1, rc-util@^5.0.6, rc-util@^5.0.7, rc-util@^5.12.0, rc-util@^5.13.0, rc-util@^5.2.0, rc-util@^5.2.1, rc-util@^5.3.0, rc-util@^5.4.0, rc-util@^5.5.0, rc-util@^5.5.1, rc-util@^5.6.1, rc-util@^5.8.0, rc-util@^5.9.4: - version "5.13.1" - resolved "https://registry.yarnpkg.com/rc-util/-/rc-util-5.13.1.tgz#03e74955b5c46a58cbc6236e4d30dd462c755290" - integrity sha512-Dws2tjXBBihfjVQFlG5JzZ/5O3Wutctm0W94Wb1+M7GD2roWJPrQdSa4AkWm2pn0Ms32zoVPPkWodFeAYZPLfA== - dependencies: - "@babel/runtime" "^7.12.5" - react-is "^16.12.0" - shallowequal "^1.1.0" - -rc-util@^5.15.0, rc-util@^5.16.1, rc-util@^5.17.0, rc-util@^5.19.2, rc-util@^5.21.0, rc-util@^5.23.0: - version "5.24.4" - resolved "https://registry.yarnpkg.com/rc-util/-/rc-util-5.24.4.tgz#a4126f01358c86f17c1bf380a1d83d6c9155ae65" - integrity sha512-2a4RQnycV9eV7lVZPEJ7QwJRPlZNc06J7CwcwZo4vIHr3PfUqtYgl1EkUV9ETAc6VRRi8XZOMFhYG63whlIC9Q== - dependencies: - "@babel/runtime" "^7.18.3" - react-is "^16.12.0" - shallowequal "^1.1.0" - -rc-util@^5.16.0, rc-util@^5.18.1, rc-util@^5.20.1, rc-util@^5.21.2, rc-util@^5.22.5, rc-util@^5.24.4, rc-util@^5.27.0: +rc-util@^5.0.1, rc-util@^5.0.6, rc-util@^5.12.0, rc-util@^5.15.0, rc-util@^5.16.0, rc-util@^5.16.1, rc-util@^5.17.0, rc-util@^5.18.1, rc-util@^5.19.2, rc-util@^5.2.0, rc-util@^5.2.1, rc-util@^5.20.1, rc-util@^5.21.0, rc-util@^5.21.2, rc-util@^5.22.5, rc-util@^5.23.0, rc-util@^5.24.4, rc-util@^5.27.0, rc-util@^5.3.0, rc-util@^5.4.0, rc-util@^5.6.1, rc-util@^5.8.0, rc-util@^5.9.4: version "5.27.1" resolved "https://registry.yarnpkg.com/rc-util/-/rc-util-5.27.1.tgz#d12f02b9577b04299c0f1a235c8acbcf56e2824b" integrity sha512-PsjHA+f+KBCz+YTZxrl3ukJU5RoNKoe3KSNMh0xGiISbR67NaM9E9BiMjCwxa3AcCUOg/rZ+V0ZKLSimAA+e3w== @@ -16100,16 +13970,7 @@ rc-util@^5.16.0, rc-util@^5.18.1, rc-util@^5.20.1, rc-util@^5.21.2, rc-util@^5.2 "@babel/runtime" "^7.18.3" react-is "^16.12.0" -rc-virtual-list@^3.2.0: - version "3.2.6" - resolved "https://registry.yarnpkg.com/rc-virtual-list/-/rc-virtual-list-3.2.6.tgz#2c92a40f4425e19881b38134d6bd286a11137d2d" - integrity sha512-8FiQLDzm3c/tMX0d62SQtKDhLH7zFlSI6pWBAPt+TUntEqd3Lz9zFAmpvTu8gkvUom/HCsDSZs4wfV4wDPWC0Q== - dependencies: - classnames "^2.2.6" - rc-resize-observer "^1.0.0" - rc-util "^5.0.7" - -rc-virtual-list@^3.4.8: +rc-virtual-list@^3.2.0, rc-virtual-list@^3.4.8: version "3.4.13" resolved "https://registry.yarnpkg.com/rc-virtual-list/-/rc-virtual-list-3.4.13.tgz#20acc934b263abcf7b7c161f50ef82281b2f7e8d" integrity sha512-cPOVDmcNM7rH6ANotanMDilW/55XnFPw0Jh/GQYtrzZSy3AmWvCnqVNyNC/pgg3lfVmX2994dlzAhuUrd4jG7w== @@ -16129,17 +13990,17 @@ rc@^1.2.8: minimist "^1.2.0" strip-json-comments "~2.0.1" -react-app-polyfill@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-2.0.0.tgz#a0bea50f078b8a082970a9d853dc34b6dcc6a3cf" - integrity sha512-0sF4ny9v/B7s6aoehwze9vJNWcmCemAUYBVasscVr92+UYiEqDXOxfKjXN685mDaMRNF3WdhHQs76oTODMocFA== +react-app-polyfill@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" + integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== dependencies: - core-js "^3.6.5" + core-js "^3.19.2" object-assign "^4.1.1" promise "^8.1.0" raf "^3.4.1" - regenerator-runtime "^0.13.7" - whatwg-fetch "^3.4.1" + regenerator-runtime "^0.13.9" + whatwg-fetch "^3.6.2" react-clientside-effect@^1.2.5: version "1.2.6" @@ -16161,35 +14022,35 @@ react-color@^2.19.3: reactcss "^1.2.0" tinycolor2 "^1.4.1" -react-dev-utils@^11.0.3: - version "11.0.4" - resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-11.0.4.tgz#a7ccb60257a1ca2e0efe7a83e38e6700d17aa37a" - integrity sha512-dx0LvIGHcOPtKbeiSUM4jqpBl3TcY7CDjZdfOIcKeznE7BWr9dg0iPG90G5yfVQ+p/rGNMXdbfStvzQZEVEi4A== - dependencies: - "@babel/code-frame" "7.10.4" - address "1.1.2" - browserslist "4.14.2" - chalk "2.4.2" - cross-spawn "7.0.3" - detect-port-alt "1.1.6" - escape-string-regexp "2.0.0" - filesize "6.1.0" - find-up "4.1.0" - fork-ts-checker-webpack-plugin "4.1.6" - global-modules "2.0.0" - globby "11.0.1" - gzip-size "5.1.1" - immer "8.0.1" - is-root "2.1.0" - loader-utils "2.0.0" - open "^7.0.2" - pkg-up "3.1.0" - prompts "2.4.0" - react-error-overlay "^6.0.9" - recursive-readdir "2.2.2" - shell-quote "1.7.2" - strip-ansi "6.0.0" - text-table "0.2.0" +react-dev-utils@^12.0.1: + version "12.0.1" + resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" + integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== + dependencies: + "@babel/code-frame" "^7.16.0" + address "^1.1.2" + browserslist "^4.18.1" + chalk "^4.1.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^4.0.0" + filesize "^8.0.6" + find-up "^5.0.0" + fork-ts-checker-webpack-plugin "^6.5.0" + global-modules "^2.0.0" + globby "^11.0.4" + gzip-size "^6.0.0" + immer "^9.0.7" + is-root "^2.1.0" + loader-utils "^3.2.0" + open "^8.4.0" + pkg-up "^3.1.0" + prompts "^2.4.2" + react-error-overlay "^6.0.11" + recursive-readdir "^2.2.2" + shell-quote "^1.7.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" react-dom@^17.0.0: version "17.0.2" @@ -16200,17 +14061,10 @@ react-dom@^17.0.0: object-assign "^4.1.1" scheduler "^0.20.2" -react-email-share-link@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/react-email-share-link/-/react-email-share-link-1.0.3.tgz#745d09fa9bca8a1c3820f7a8bc43237bb4e5c05e" - integrity sha512-MNWNN4OHbrE3ia3l3a5EHds4dXbu9o7j1F4P+UkQ4y15AdkU/laMDD2EXE3onVRePtk2Et3TUHKyEleocUfN2g== - dependencies: - prop-types "^15.7.2" - -react-error-overlay@^6.0.9: - version "6.0.9" - resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.9.tgz#3c743010c9359608c375ecd6bc76f35d93995b0a" - integrity sha512-nQTTcUu+ATDbrSD1BZHr5kgSD4oF8OFjxun8uAaL8RwPBacGBNPf/yAuVVdx17N8XNzRDMrZ9XcKZHCjPW+9ew== +react-error-overlay@^6.0.11: + version "6.0.11" + resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" + integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== react-fast-compare@^3.2.0: version "3.2.1" @@ -16265,7 +14119,7 @@ react-is@^17.0.0, react-is@^17.0.1: resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== -react-is@^18.2.0: +react-is@^18.0.0, react-is@^18.2.0: version "18.2.0" resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== @@ -16294,10 +14148,10 @@ react-markdown@6.0.2: unist-util-visit "^2.0.0" vfile "^4.0.0" -react-refresh@^0.8.3: - version "0.8.3" - resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f" - integrity sha512-X8jZHc7nCMjaCqoU+V2I0cOhNW+QMBwSUkeXnTi8IPe6zaRWfn60ZzvFDZqWPfmSJfjub7dDW1SP0jaHWLu/hg== +react-refresh@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" + integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== react-router-dom@^5.1.6: version "5.2.0" @@ -16328,71 +14182,60 @@ react-router@5.2.0, react-router@^5.2.0: tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-scripts@4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-4.0.3.tgz#b1cafed7c3fa603e7628ba0f187787964cb5d345" - integrity sha512-S5eO4vjUzUisvkIPB7jVsKtuH2HhWcASREYWHAQ1FP5HyCv3xgn+wpILAEWkmy+A+tTNbSZClhxjT3qz6g4L1A== - dependencies: - "@babel/core" "7.12.3" - "@pmmmwh/react-refresh-webpack-plugin" "0.4.3" - "@svgr/webpack" "5.5.0" - "@typescript-eslint/eslint-plugin" "^4.5.0" - "@typescript-eslint/parser" "^4.5.0" - babel-eslint "^10.1.0" - babel-jest "^26.6.0" - babel-loader "8.1.0" - babel-plugin-named-asset-import "^0.3.7" - babel-preset-react-app "^10.0.0" +react-scripts@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" + integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== + dependencies: + "@babel/core" "^7.16.0" + "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" + "@svgr/webpack" "^5.5.0" + babel-jest "^27.4.2" + babel-loader "^8.2.3" + babel-plugin-named-asset-import "^0.3.8" + babel-preset-react-app "^10.0.1" bfj "^7.0.2" - camelcase "^6.1.0" - case-sensitive-paths-webpack-plugin "2.3.0" - css-loader "4.3.0" - dotenv "8.2.0" - dotenv-expand "5.1.0" - eslint "^7.11.0" - eslint-config-react-app "^6.0.0" - eslint-plugin-flowtype "^5.2.0" - eslint-plugin-import "^2.22.1" - eslint-plugin-jest "^24.1.0" - eslint-plugin-jsx-a11y "^6.3.1" - eslint-plugin-react "^7.21.5" - eslint-plugin-react-hooks "^4.2.0" - eslint-plugin-testing-library "^3.9.2" - eslint-webpack-plugin "^2.5.2" - file-loader "6.1.1" - fs-extra "^9.0.1" - html-webpack-plugin "4.5.0" - identity-obj-proxy "3.0.0" - jest "26.6.0" - jest-circus "26.6.0" - jest-resolve "26.6.0" - jest-watch-typeahead "0.6.1" - mini-css-extract-plugin "0.11.3" - optimize-css-assets-webpack-plugin "5.0.4" - pnp-webpack-plugin "1.6.4" - postcss-flexbugs-fixes "4.2.1" - postcss-loader "3.0.0" - postcss-normalize "8.0.1" - postcss-preset-env "6.7.0" - postcss-safe-parser "5.0.2" - prompts "2.4.0" - react-app-polyfill "^2.0.0" - react-dev-utils "^11.0.3" - react-refresh "^0.8.3" - resolve "1.18.1" - resolve-url-loader "^3.1.2" - sass-loader "^10.0.5" - semver "7.3.2" - style-loader "1.3.0" - terser-webpack-plugin "4.2.3" - ts-pnp "1.2.0" - url-loader "4.1.1" - webpack "4.44.2" - webpack-dev-server "3.11.1" - webpack-manifest-plugin "2.2.0" - workbox-webpack-plugin "5.1.4" + browserslist "^4.18.1" + camelcase "^6.2.1" + case-sensitive-paths-webpack-plugin "^2.4.0" + css-loader "^6.5.1" + css-minimizer-webpack-plugin "^3.2.0" + dotenv "^10.0.0" + dotenv-expand "^5.1.0" + eslint "^8.3.0" + eslint-config-react-app "^7.0.1" + eslint-webpack-plugin "^3.1.1" + file-loader "^6.2.0" + fs-extra "^10.0.0" + html-webpack-plugin "^5.5.0" + identity-obj-proxy "^3.0.0" + jest "^27.4.3" + jest-resolve "^27.4.2" + jest-watch-typeahead "^1.0.0" + mini-css-extract-plugin "^2.4.5" + postcss "^8.4.4" + postcss-flexbugs-fixes "^5.0.2" + postcss-loader "^6.2.1" + postcss-normalize "^10.0.1" + postcss-preset-env "^7.0.1" + prompts "^2.4.2" + react-app-polyfill "^3.0.0" + react-dev-utils "^12.0.1" + react-refresh "^0.11.0" + resolve "^1.20.0" + resolve-url-loader "^4.0.0" + sass-loader "^12.3.0" + semver "^7.3.5" + source-map-loader "^3.0.0" + style-loader "^3.3.1" + tailwindcss "^3.0.2" + terser-webpack-plugin "^5.2.5" + webpack "^5.64.4" + webpack-dev-server "^4.6.0" + webpack-manifest-plugin "^4.0.2" + workbox-webpack-plugin "^6.4.1" optionalDependencies: - fsevents "^2.1.3" + fsevents "^2.3.2" react-syntax-highlighter@^15.4.4: version "15.4.4" @@ -16420,12 +14263,12 @@ react-universal-interface@^0.6.2: resolved "https://registry.yarnpkg.com/react-universal-interface/-/react-universal-interface-0.6.2.tgz#5e8d438a01729a4dbbcbeeceb0b86be146fe2b3b" integrity sha512-dg8yXdcQmvgR13RIlZbTRQOoUrDciFVoSBZILwjE2LFISxZZ8loVJKAkuzswl5js8BHda79bIb2b84ehU8IjXw== -react-use-measure@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/react-use-measure/-/react-use-measure-2.0.1.tgz#4f23f94c832cd4512da55acb300d1915dcbf3ae8" - integrity sha512-lFfHiqcXbJ2/6aUkZwt8g5YYM7EGqNVxJhMqMPqv1BVXRKp8D7jYLlmma0SvhRY4WYxxkZpCdbJvhDylb5gcEA== +react-use-measure@^2.0.4: + version "2.1.1" + resolved "https://registry.yarnpkg.com/react-use-measure/-/react-use-measure-2.1.1.tgz#5824537f4ee01c9469c45d5f7a8446177c6cc4ba" + integrity sha512-nocZhN26cproIiIduswYpV5y5lQpSQS1y/4KuvUCjSKmw7ZWIS/+g3aFnX3WdBkyuGUtTLif3UTqnLLhbDoQig== dependencies: - debounce "^1.2.0" + debounce "^1.2.1" react-use@^17.3.2: version "17.4.0" @@ -16482,45 +14325,16 @@ reactour@1.18.7: prop-types "15.7.2" react-focus-lock "2.5.2" scroll-smooth "1.1.1" - scrollparent "2.0.1" - -read-pkg-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07" - integrity sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc= - dependencies: - find-up "^2.0.0" - read-pkg "^3.0.0" - -read-pkg-up@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" - integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== - dependencies: - find-up "^4.1.0" - read-pkg "^5.2.0" - type-fest "^0.8.1" - -read-pkg@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389" - integrity sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k= - dependencies: - load-json-file "^4.0.0" - normalize-package-data "^2.3.2" - path-type "^3.0.0" + scrollparent "2.0.1" -read-pkg@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" - integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== +read-cache@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" + integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== dependencies: - "@types/normalize-package-data" "^2.4.0" - normalize-package-data "^2.5.0" - parse-json "^5.0.0" - type-fest "^0.6.0" + pify "^2.3.0" -"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: +readable-stream@^2.0.1: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== @@ -16533,7 +14347,7 @@ read-pkg@^5.2.0: string_decoder "~1.1.1" util-deprecate "~1.0.1" -readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.6.0: +readable-stream@^3.0.6: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -16542,28 +14356,19 @@ readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.6.0: string_decoder "^1.1.1" util-deprecate "^1.0.1" -readdirp@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" - integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== - dependencies: - graceful-fs "^4.1.11" - micromatch "^3.1.10" - readable-stream "^2.0.2" - -readdirp@~3.5.0: - version "3.5.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e" - integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ== +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== dependencies: picomatch "^2.2.1" -recursive-readdir@2.2.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" - integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== +recursive-readdir@^2.2.2: + version "2.2.3" + resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.3.tgz#e726f328c0d69153bcabd5c322d3195252379372" + integrity sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA== dependencies: - minimatch "3.0.4" + minimatch "^3.0.5" redent@^3.0.0: version "3.0.0" @@ -16598,42 +14403,27 @@ refractor@3.3.1, refractor@^3.2.0, refractor@^3.3.1: parse-entities "^2.0.0" prismjs "~1.23.0" -regenerate-unicode-properties@^8.2.0: - version "8.2.0" - resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.2.0.tgz#e5de7111d655e7ba60c057dbe9ff37c87e65cdec" - integrity sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA== +regenerate-unicode-properties@^10.1.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" + integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== dependencies: - regenerate "^1.4.0" + regenerate "^1.4.2" -regenerate@^1.4.0: +regenerate@^1.4.2: version "1.4.2" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== -regenerator-runtime@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" - integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== - -regenerator-runtime@^0.13.10: - version "0.13.10" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz#ed07b19616bcbec5da6274ebc75ae95634bfc2ee" - integrity sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw== - -regenerator-runtime@^0.13.11: +regenerator-runtime@^0.13.11, regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: version "0.13.11" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== -regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.7: - version "0.13.7" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" - integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew== - -regenerator-transform@^0.14.2: - version "0.14.5" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.5.tgz#c98da154683671c9c4dcb16ece736517e1b7feb4" - integrity sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw== +regenerator-transform@^0.15.1: + version "0.15.1" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.1.tgz#f6c4e99fc1b4591f780db2586328e4d9a9d8dc56" + integrity sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg== dependencies: "@babel/runtime" "^7.8.4" @@ -16650,14 +14440,6 @@ regex-parser@^2.2.11: resolved "https://registry.yarnpkg.com/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== -regexp.prototype.flags@^1.2.0, regexp.prototype.flags@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz#7ef352ae8d159e758c0eadca6f8fcb4eef07be26" - integrity sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - regexp.prototype.flags@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" @@ -16667,22 +14449,17 @@ regexp.prototype.flags@^1.4.3: define-properties "^1.1.3" functions-have-names "^1.2.2" -regexpp@^3.0.0, regexpp@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2" - integrity sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q== - -regexpu-core@^4.7.1: - version "4.7.1" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.7.1.tgz#2dea5a9a07233298fbf0db91fa9abc4c6e0f8ad6" - integrity sha512-ywH2VUraA44DZQuRKzARmw6S66mr48pQVva4LBeRhcOltJ6hExvWly5ZjFLYo67xbIxb6W1q4bAGtgfEl20zfQ== +regexpu-core@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.2.tgz#11a2b06884f3527aec3e93dbbf4a3b958a95546b" + integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ== dependencies: - regenerate "^1.4.0" - regenerate-unicode-properties "^8.2.0" - regjsgen "^0.5.1" - regjsparser "^0.6.4" - unicode-match-property-ecmascript "^1.0.4" - unicode-match-property-value-ecmascript "^1.2.0" + "@babel/regjsgen" "^0.8.0" + regenerate "^1.4.2" + regenerate-unicode-properties "^10.1.0" + regjsparser "^0.9.1" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.1.0" registry-auth-token@^4.0.0: version "4.2.1" @@ -16698,15 +14475,10 @@ registry-url@^5.0.0: dependencies: rc "^1.2.8" -regjsgen@^0.5.1: - version "0.5.2" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.2.tgz#92ff295fb1deecbf6ecdab2543d207e91aa33733" - integrity sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A== - -regjsparser@^0.6.4: - version "0.6.9" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.9.tgz#b489eef7c9a2ce43727627011429cf833a7183e6" - integrity sha512-ZqbNRz1SNjLAiYuwY0zoXW8Ne675IX5q+YHioAGbCw4X96Mjl2+dcX9B2ciaeyYjViDAfvIjFpQjJgLttTEERQ== +regjsparser@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== dependencies: jsesc "~0.5.0" @@ -16870,16 +14642,16 @@ remove-trailing-spaces@^1.0.6: resolved "https://registry.yarnpkg.com/remove-trailing-spaces/-/remove-trailing-spaces-1.0.8.tgz#4354d22f3236374702f58ee373168f6d6887ada7" integrity sha512-O3vsMYfWighyFbTd8hk8VaSj9UAGENxAtX+//ugIst2RMk5e03h6RoIS+0ylsFxY1gvmPuAY/PO4It+gPEeySA== -renderkid@^2.0.4: - version "2.0.5" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.5.tgz#483b1ac59c6601ab30a7a596a5965cabccfdd0a5" - integrity sha512-ccqoLg+HLOHq1vdfYNm4TBeaCDIi1FLt3wGojTDSvdewUv65oTmI3cnT2E4hRjl1gzKZIPK+KZrXzlUYKnR+vQ== +renderkid@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== dependencies: - css-select "^2.0.2" - dom-converter "^0.2" - htmlparser2 "^3.10.1" - lodash "^4.17.20" - strip-ansi "^3.0.0" + css-select "^4.1.3" + dom-converter "^0.2.0" + htmlparser2 "^6.1.0" + lodash "^4.17.21" + strip-ansi "^6.0.1" repeat-element@^1.1.2: version "1.1.4" @@ -16937,33 +14709,16 @@ require-main-filename@^2.0.0: resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== -require-package-name@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/require-package-name/-/require-package-name-2.0.1.tgz#c11e97276b65b8e2923f75dabf5fb2ef0c3841b9" - integrity sha1-wR6XJ2tluOKSP3Xav1+y7ww4Qbk= - requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= -resize-observer-polyfill@1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.0.tgz#660ff1d9712a2382baa2cad450a4716209f9ca69" - integrity sha512-M2AelyJDVR/oLnToJLtuDJRBBWUGUvvGigj1411hXhAdyFWqMaqHp7TixW3FpiLuVaikIcR1QL+zqoJoZlOgpg== - resize-observer-polyfill@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz#0e9020dd3d21024458d4ebd27e23e40269810464" integrity sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg== -resolve-cwd@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" - integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= - dependencies: - resolve-from "^3.0.0" - resolve-cwd@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" @@ -16976,11 +14731,6 @@ resolve-from@5.0.0, resolve-from@^5.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== -resolve-from@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" - integrity sha1-six699nWiBvItuZTM17rywoYh0g= - resolve-from@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" @@ -16991,20 +14741,15 @@ resolve-pathname@^3.0.0: resolved "https://registry.yarnpkg.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd" integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== -resolve-url-loader@^3.1.2: - version "3.1.3" - resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-3.1.3.tgz#49ec68340f67d8d2ab6b401948d5def3ab2d0367" - integrity sha512-WbDSNFiKPPLem1ln+EVTE+bFUBdTTytfQZWbmghroaFNFaAVmGq0Saqw6F/306CwgPXsGwXVxbODE+3xAo/YbA== - dependencies: - adjust-sourcemap-loader "3.0.0" - camelcase "5.3.1" - compose-function "3.0.3" - convert-source-map "1.7.0" - es6-iterator "2.0.3" - loader-utils "1.2.3" - postcss "7.0.21" - rework "1.0.1" - rework-visit "1.0.0" +resolve-url-loader@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" + integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== + dependencies: + adjust-sourcemap-loader "^4.0.0" + convert-source-map "^1.7.0" + loader-utils "^2.0.0" + postcss "^7.0.35" source-map "0.6.1" resolve-url@^0.2.1: @@ -17012,39 +14757,29 @@ resolve-url@^0.2.1: resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= -resolve@1.18.1: - version "1.18.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.18.1.tgz#018fcb2c5b207d2a6424aee361c5a266da8f4130" - integrity sha512-lDfCPaMKfOJXjy0dPayzPdF1phampNWr3qFCjAu+rw/qbQmr5jWH5xN2hwh9QKfw9E5v4hwV7A+jrCmL8yjjqA== - dependencies: - is-core-module "^2.0.0" - path-parse "^1.0.6" +resolve.exports@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.1.tgz#05cfd5b3edf641571fd46fa608b610dda9ead999" + integrity sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ== -resolve@^1.10.0, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.18.1, resolve@^1.20.0, resolve@^1.3.2, resolve@^1.8.1: - version "1.20.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" - integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== +resolve@^1.1.7, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.22.2: + version "1.22.2" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.2.tgz#0ed0943d4e301867955766c9f3e1ae6d01c6845f" + integrity sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g== dependencies: - is-core-module "^2.2.0" - path-parse "^1.0.6" + is-core-module "^2.11.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" -resolve@^1.19.0: - version "1.22.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" - integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== +resolve@^2.0.0-next.4: + version "2.0.0-next.4" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" + integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== dependencies: is-core-module "^2.9.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -resolve@^2.0.0-next.3: - version "2.0.0-next.3" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46" - integrity sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q== - dependencies: - is-core-module "^2.2.0" - path-parse "^1.0.6" - responselike@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" @@ -17073,46 +14808,16 @@ ret@~0.1.10: resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== -retry@^0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" - integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= +retry@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== reusify@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== -rework-visit@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/rework-visit/-/rework-visit-1.0.0.tgz#9945b2803f219e2f7aca00adb8bc9f640f842c9a" - integrity sha1-mUWygD8hni96ygCtuLyfZA+ELJo= - -rework@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/rework/-/rework-1.0.1.tgz#30806a841342b54510aa4110850cd48534144aa7" - integrity sha1-MIBqhBNCtUUQqkEQhQzUhTQUSqc= - dependencies: - convert-source-map "^0.3.3" - css "^2.0.0" - -rgb-regex@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/rgb-regex/-/rgb-regex-1.0.1.tgz#c0e0d6882df0e23be254a475e8edd41915feaeb1" - integrity sha1-wODWiC3w4jviVKR16O3UGRX+rrE= - -rgba-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/rgba-regex/-/rgba-regex-1.0.0.tgz#43374e2e2ca0968b0ef1523460b7d730ff22eeb3" - integrity sha1-QzdOLiyglosO8VI0YLfXMP8i7rM= - -rimraf@^2.5.4, rimraf@^2.6.3: - version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" - integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== - dependencies: - glob "^7.1.3" - rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" @@ -17127,48 +14832,22 @@ rimraf@~2.6.2: dependencies: glob "^7.1.3" -ripemd160@^2.0.0, ripemd160@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" - integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - -rollup-plugin-babel@^4.3.3: - version "4.4.0" - resolved "https://registry.yarnpkg.com/rollup-plugin-babel/-/rollup-plugin-babel-4.4.0.tgz#d15bd259466a9d1accbdb2fe2fff17c52d030acb" - integrity sha512-Lek/TYp1+7g7I+uMfJnnSJ7YWoD58ajo6Oarhlex7lvUce+RCKRuGRSgztDO3/MF/PuGKmUL5iTHKf208UNszw== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - rollup-pluginutils "^2.8.1" - -rollup-plugin-terser@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-5.3.1.tgz#8c650062c22a8426c64268548957463bf981b413" - integrity sha512-1pkwkervMJQGFYvM9nscrUoncPwiKR/K+bHdjv6PFgRo3cgPHoRT83y2Aa3GvINj4539S15t/tpFPb775TDs6w== +rollup-plugin-terser@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" + integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== dependencies: - "@babel/code-frame" "^7.5.5" - jest-worker "^24.9.0" - rollup-pluginutils "^2.8.2" + "@babel/code-frame" "^7.10.4" + jest-worker "^26.2.1" serialize-javascript "^4.0.0" - terser "^4.6.2" - -rollup-pluginutils@^2.8.1, rollup-pluginutils@^2.8.2: - version "2.8.2" - resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz#72f2af0748b592364dbd3389e600e5a9444a351e" - integrity sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ== - dependencies: - estree-walker "^0.6.1" + terser "^5.0.0" -rollup@^1.31.1: - version "1.32.1" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-1.32.1.tgz#4480e52d9d9e2ae4b46ba0d9ddeaf3163940f9c4" - integrity sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A== - dependencies: - "@types/estree" "*" - "@types/node" "*" - acorn "^7.1.0" +rollup@^2.43.1: + version "2.79.1" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" + integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== + optionalDependencies: + fsevents "~2.3.2" rope-sequence@^1.3.0: version "1.3.3" @@ -17196,11 +14875,6 @@ route-recognizer@^0.3.3: resolved "https://registry.npmjs.org/route-recognizer/-/route-recognizer-0.3.4.tgz#39ab1ffbce1c59e6d2bdca416f0932611e4f3ca3" integrity sha512-2+MhsfPhvauN1O8KaXpXAOfR/fwe8dnUXVM+xw7yt40lJRfPVQxV6yryZm0cgRvAj5fMF/mdRZbL2ptwbs5i2g== -rsvp@^4.8.4: - version "4.8.5" - resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734" - integrity sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA== - rtl-css-js@^1.14.0: version "1.16.0" resolved "https://registry.yarnpkg.com/rtl-css-js/-/rtl-css-js-1.16.0.tgz#e8d682982441aadb63cabcb2f7385f3fb78ff26e" @@ -17220,13 +14894,6 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -run-queue@^1.0.0, run-queue@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" - integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= - dependencies: - aproba "^1.1.1" - rxjs@^6.3.3, rxjs@^6.6.0, rxjs@^6.6.3: version "6.6.7" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" @@ -17244,7 +14911,7 @@ safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -17265,43 +14932,25 @@ safe-regex@^1.1.0: dependencies: ret "~0.1.10" -"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -sane@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/sane/-/sane-4.1.0.tgz#ed881fd922733a6c461bc189dc2b6c006f3ffded" - integrity sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA== - dependencies: - "@cnakazawa/watch" "^1.0.3" - anymatch "^2.0.0" - capture-exit "^2.0.0" - exec-sh "^0.3.2" - execa "^1.0.0" - fb-watchman "^2.0.0" - micromatch "^3.1.4" - minimist "^1.1.1" - walker "~1.0.5" - -sanitize.css@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/sanitize.css/-/sanitize.css-10.0.0.tgz#b5cb2547e96d8629a60947544665243b1dc3657a" - integrity sha512-vTxrZz4dX5W86M6oVWVdOVe72ZiPs41Oi7Z6Km4W5Turyz28mrXSJhhEBZoRtzJWIv3833WKVwLSDWWkEfupMg== +sanitize.css@*: + version "13.0.0" + resolved "https://registry.yarnpkg.com/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" + integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== -sass-loader@^10.0.5: - version "10.2.0" - resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-10.2.0.tgz#3d64c1590f911013b3fa48a0b22a83d5e1494716" - integrity sha512-kUceLzC1gIHz0zNJPpqRsJyisWatGYNFRmv2CKZK2/ngMJgLqxTbXwe/hJ85luyvZkgqU3VlJ33UVF2T/0g6mw== +sass-loader@^12.3.0: + version "12.6.0" + resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" + integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== dependencies: klona "^2.0.4" - loader-utils "^2.0.0" neo-async "^2.6.2" - schema-utils "^3.0.0" - semver "^7.3.2" -sax@~1.2.4: +sax@^1.2.4, sax@~1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== @@ -17321,16 +14970,16 @@ scheduler@^0.20.2: loose-envify "^1.1.0" object-assign "^4.1.1" -schema-utils@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" - integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== dependencies: - ajv "^6.1.0" - ajv-errors "^1.0.0" - ajv-keywords "^3.1.0" + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" -schema-utils@^2.6.5, schema-utils@^2.7.0, schema-utils@^2.7.1: +schema-utils@^2.6.5: version "2.7.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== @@ -17339,15 +14988,25 @@ schema-utils@^2.6.5, schema-utils@^2.7.0, schema-utils@^2.7.1: ajv "^6.12.4" ajv-keywords "^3.5.2" -schema-utils@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.0.0.tgz#67502f6aa2b66a2d4032b4279a2944978a0913ef" - integrity sha512-6D82/xSzO094ajanoOSbe4YvXWMfn2A//8Y1+MUqFAJul5Bs+yn36xbK9OtNDcRVSBJ9jjeoXftM6CfztsjOAA== +schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== dependencies: - "@types/json-schema" "^7.0.6" + "@types/json-schema" "^7.0.8" ajv "^6.12.5" ajv-keywords "^3.5.2" +schema-utils@^4.0.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.2.0.tgz#70d7c93e153a273a805801882ebd3bff20d89c8b" + integrity sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.9.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.1.0" + screenfull@^5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/screenfull/-/screenfull-5.2.0.tgz#6533d524d30621fc1283b9692146f3f13a93d1ba" @@ -17385,37 +15044,27 @@ select@^1.1.2: resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0= -selfsigned@^1.10.8: - version "1.10.11" - resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.11.tgz#24929cd906fe0f44b6d01fb23999a739537acbe9" - integrity sha512-aVmbPOfViZqOZPgRBT0+3u4yZFHpmnIghLMlAcb5/xhp5ZtB/RVnKhz5vl2M32CLXAqR4kha9zfhNg0Lf/sxKA== +selfsigned@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" + integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== dependencies: - node-forge "^0.10.0" + node-forge "^1" -"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: +semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== -semver@7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" - integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== - -semver@7.3.2: - version "7.3.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.2.tgz#604962b052b81ed0786aae84389ffba70ffd3938" - integrity sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ== - semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.2.1, semver@^7.3.2: - version "7.3.5" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" - integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== +semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: + version "7.5.3" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" + integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== dependencies: lru-cache "^6.0.0" @@ -17454,10 +15103,10 @@ serialize-javascript@^4.0.0: dependencies: randombytes "^2.1.0" -serialize-javascript@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-5.0.1.tgz#7886ec848049a462467a97d3d918ebb2aaf934f4" - integrity sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA== +serialize-javascript@^6.0.0, serialize-javascript@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.1.tgz#b206efb27c3da0b0ab6b52f48d170b7996458e5c" + integrity sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w== dependencies: randombytes "^2.1.0" @@ -17504,7 +15153,7 @@ set-value@^2.0.0, set-value@^2.0.1: is-plain-object "^2.0.3" split-string "^3.0.1" -setimmediate@^1.0.4, setimmediate@^1.0.5: +setimmediate@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= @@ -17519,26 +15168,18 @@ setprototypeof@1.2.0: resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== -sha.js@^2.4.0, sha.js@^2.4.8: - version "2.4.11" - resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" - integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== +shallow-clone@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" + kind-of "^6.0.2" shallowequal@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8" integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= - dependencies: - shebang-regex "^1.0.0" - shebang-command@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" @@ -17546,25 +15187,15 @@ shebang-command@^2.0.0: dependencies: shebang-regex "^3.0.0" -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= - shebang-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== -shell-quote@1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2" - integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== - -shellwords@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" - integrity sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww== +shell-quote@^1.7.3: + version "1.8.1" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" + integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== side-channel@^1.0.4: version "1.0.4" @@ -17575,7 +15206,7 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" -signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3: +signal-exit@^3.0.2, signal-exit@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== @@ -17585,13 +15216,6 @@ signedsource@^1.0.0: resolved "https://registry.yarnpkg.com/signedsource/-/signedsource-1.0.0.tgz#1ddace4981798f93bd833973803d80d52e93ad6a" integrity sha512-6+eerH9fEnNmi/hyM1DXcRK3pWdoMQtlkQ+ns0ntzunjKqp5i3sKCc80ym8Fib3iaYhdJUOPdhlJWj1tvge2Ww== -simple-swizzle@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" - integrity sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo= - dependencies: - is-arrayish "^0.3.1" - sinon@^11.1.1: version "11.1.1" resolved "https://registry.yarnpkg.com/sinon/-/sinon-11.1.1.tgz#99a295a8b6f0fadbbb7e004076f3ae54fc6eab91" @@ -17614,20 +15238,16 @@ slash@^3.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== +slash@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== + slice-ansi@0.0.4: version "0.0.4" resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" integrity sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU= -slice-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" - integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== - dependencies: - ansi-styles "^4.0.0" - astral-regex "^2.0.0" - is-fullwidth-code-point "^3.0.0" - snake-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c" @@ -17666,35 +15286,16 @@ snapdragon@^0.8.1: source-map-resolve "^0.5.0" use "^3.1.0" -sockjs-client@^1.5.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.5.1.tgz#256908f6d5adfb94dabbdbd02c66362cca0f9ea6" - integrity sha512-VnVAb663fosipI/m6pqRXakEOw7nvd7TUgdr3PlR/8V2I95QIdwT8L4nMxhyU8SmDBHYXU1TOElaKOmKLfYzeQ== - dependencies: - debug "^3.2.6" - eventsource "^1.0.7" - faye-websocket "^0.11.3" - inherits "^2.0.4" - json3 "^3.3.3" - url-parse "^1.5.1" - -sockjs@^0.3.21: - version "0.3.21" - resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.21.tgz#b34ffb98e796930b60a0cfa11904d6a339a7d417" - integrity sha512-DhbPFGpxjc6Z3I+uX07Id5ZO2XwYsWOrYjaSeieES78cq+JaJvVe5q/m1uvjIQhXinhIeCFRH6JgXe+mvVMyXw== +sockjs@^0.3.24: + version "0.3.24" + resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== dependencies: faye-websocket "^0.11.3" - uuid "^3.4.0" + uuid "^8.3.2" websocket-driver "^0.7.4" -sort-keys@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad" - integrity sha1-RBttTTRnmPG05J6JIK37oOVD+a0= - dependencies: - is-plain-obj "^1.0.0" - -source-list-map@^2.0.0: +source-list-map@^2.0.0, source-list-map@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== @@ -17717,12 +15318,21 @@ source-map-explorer@^2.5.2: temp "^0.9.4" yargs "^16.2.0" -source-map-js@^0.6.2: - version "0.6.2" - resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-0.6.2.tgz#0bb5de631b41cfbda6cfba8bd05a80efdfd2385e" - integrity sha512-/3GptzWzu0+0MBQFrDKzw/DvvMTUORvgY6k6jd/VS6iCR4RDTKWH6v6WPwQoUO8667uQEf9Oe38DxAYWY5F/Ug== +source-map-js@^1.0.1, source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-loader@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/source-map-loader/-/source-map-loader-3.0.2.tgz#af23192f9b344daa729f6772933194cc5fa54fee" + integrity sha512-BokxPoLjyl3iOrgkWaakaxqnelAJSS+0V+De0kKIq6lyWrXuiPgYTGp6z3iHmqljKAaLXwZa+ctD8GccRJeVvg== + dependencies: + abab "^2.0.5" + iconv-lite "^0.6.3" + source-map-js "^1.0.1" -source-map-resolve@^0.5.0, source-map-resolve@^0.5.2: +source-map-resolve@^0.5.0: version "0.5.3" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== @@ -17741,7 +15351,7 @@ source-map-resolve@^0.6.0: atob "^2.1.2" decode-uri-component "^0.2.0" -source-map-support@^0.5.17, source-map-support@^0.5.6, source-map-support@~0.5.12, source-map-support@~0.5.19: +source-map-support@^0.5.17, source-map-support@^0.5.6, source-map-support@~0.5.20: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== @@ -17764,16 +15374,23 @@ source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, sourc resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== -source-map@^0.5.0, source-map@^0.5.6, source-map@^0.5.7: +source-map@^0.5.6, source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= -source-map@^0.7.3, source-map@~0.7.2: +source-map@^0.7.3: version "0.7.3" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== +source-map@^0.8.0-beta.0: + version "0.8.0-beta.0" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== + dependencies: + whatwg-url "^7.0.0" + sourcemap-codec@^1.4.4, sourcemap-codec@^1.4.8: version "1.4.8" resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" @@ -17784,32 +15401,6 @@ space-separated-tokens@^1.0.0, space-separated-tokens@^1.1.0: resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz#85f32c3d10d9682007e917414ddc5c26d1aa6899" integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== -spdx-correct@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" - integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== - dependencies: - spdx-expression-parse "^3.0.0" - spdx-license-ids "^3.0.0" - -spdx-exceptions@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" - integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== - -spdx-expression-parse@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" - integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== - dependencies: - spdx-exceptions "^2.1.0" - spdx-license-ids "^3.0.0" - -spdx-license-ids@^3.0.0: - version "3.0.9" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.9.tgz#8a595135def9592bda69709474f1cbeea7c2467f" - integrity sha512-Ki212dKK4ogX+xDo4CtOZBVIwhsKBEfsEEcwmJfLQzirgc2jIWdzg40Unxz/HzEUqM1WFzVlQSMF9kZZ2HboLQ== - spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" @@ -17879,20 +15470,6 @@ sshpk@^1.7.0: safer-buffer "^2.0.2" tweetnacl "~0.14.0" -ssri@^6.0.1: - version "6.0.2" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" - integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== - dependencies: - figgy-pudding "^3.5.1" - -ssri@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.1.tgz#638e4e439e2ffbd2cd289776d5ca457c4f51a2af" - integrity sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ== - dependencies: - minipass "^3.1.1" - stable@^0.1.8: version "0.1.8" resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" @@ -17905,19 +15482,14 @@ stack-generator@^2.0.5: dependencies: stackframe "^1.3.4" -stack-utils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277" - integrity sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw== +stack-utils@^2.0.3: + version "2.0.6" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" + integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== dependencies: escape-string-regexp "^2.0.0" -stackframe@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.2.0.tgz#52429492d63c62eb989804c11552e3d22e779303" - integrity sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA== - -stackframe@^1.3.4: +stackframe@^1.1.1, stackframe@^1.3.4: version "1.3.4" resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== @@ -17975,14 +15547,6 @@ statuses@2.0.1: resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= -stream-browserify@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" - integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== - dependencies: - inherits "~2.0.1" - readable-stream "^2.0.2" - stream-combiner@~0.0.4: version "0.0.4" resolved "https://registry.npmjs.org/stream-combiner/-/stream-combiner-0.0.4.tgz#4d5e433c185261dde623ca3f44c586bcf5c4ad14" @@ -17990,35 +15554,6 @@ stream-combiner@~0.0.4: dependencies: duplexer "~0.1.1" -stream-each@^1.1.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" - integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== - dependencies: - end-of-stream "^1.1.0" - stream-shift "^1.0.0" - -stream-http@^2.7.2: - version "2.8.3" - resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" - integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== - dependencies: - builtin-status-codes "^3.0.0" - inherits "^2.0.1" - readable-stream "^2.3.6" - to-arraybuffer "^1.0.0" - xtend "^4.0.0" - -stream-shift@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" - integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== - -strict-uri-encode@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" - integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= - strict-uri-encode@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" @@ -18042,6 +15577,14 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" +string-length@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" + integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== + dependencies: + char-regex "^2.0.0" + strip-ansi "^7.0.1" + string-natural-compare@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" @@ -18064,15 +15607,6 @@ string-width@^2.1.1: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" -string-width@^3.0.0, string-width@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" - integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== - dependencies: - emoji-regex "^7.0.1" - is-fullwidth-code-point "^2.0.0" - strip-ansi "^5.1.0" - string-width@^4.1.0, string-width@^4.2.0: version "4.2.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" @@ -18082,21 +15616,7 @@ string-width@^4.1.0, string-width@^4.2.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" -string.prototype.matchall@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.5.tgz#59370644e1db7e4c0c045277690cf7b01203c4da" - integrity sha512-Z5ZaXO0svs0M2xd/6By3qpeKpLKd9mO4v4q3oMEQrk8Ck4xOD5d5XeBOOjGrmVZZ/AHB1S0CgG4N5r1G9N3E2Q== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.2" - get-intrinsic "^1.1.1" - has-symbols "^1.0.2" - internal-slot "^1.0.3" - regexp.prototype.flags "^1.3.1" - side-channel "^1.0.4" - -string.prototype.matchall@^4.0.7: +string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7, string.prototype.matchall@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz#3bf85722021816dcd1bf38bb714915887ca79fd3" integrity sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg== @@ -18110,14 +15630,6 @@ string.prototype.matchall@^4.0.7: regexp.prototype.flags "^1.4.3" side-channel "^1.0.4" -string.prototype.trimend@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" - integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - string.prototype.trimend@^1.0.5: version "1.0.6" resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533" @@ -18127,14 +15639,6 @@ string.prototype.trimend@^1.0.5: define-properties "^1.1.4" es-abstract "^1.20.4" -string.prototype.trimstart@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" - integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - string.prototype.trimstart@^1.0.5: version "1.0.6" resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz#e90ab66aa8e4007d92ef591bbf3cd422c56bdcf4" @@ -18144,7 +15648,7 @@ string.prototype.trimstart@^1.0.5: define-properties "^1.1.4" es-abstract "^1.20.4" -string_decoder@^1.0.0, string_decoder@^1.1.1: +string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== @@ -18176,13 +15680,6 @@ stringify-object@^3.3.0: is-obj "^1.0.1" is-regexp "^1.0.0" -strip-ansi@6.0.0, strip-ansi@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" - integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== - dependencies: - ansi-regex "^5.0.0" - strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" @@ -18197,12 +15694,19 @@ strip-ansi@^4.0.0: dependencies: ansi-regex "^3.0.0" -strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" - integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== dependencies: - ansi-regex "^4.1.0" + ansi-regex "^6.0.1" strip-bom@^3.0.0: version "3.0.0" @@ -18214,18 +15718,10 @@ strip-bom@^4.0.0: resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== -strip-comments@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-1.0.2.tgz#82b9c45e7f05873bee53f37168af930aa368679d" - integrity sha512-kL97alc47hoyIQSV165tTt9rG5dn4w1dNnBhOQ3bOU1Nc1hel09jnXANaHJ7vzHLd4Ju8kseDGzlev96pghLFw== - dependencies: - babel-extract-comments "^1.0.0" - babel-plugin-transform-object-rest-spread "^6.26.0" - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= +strip-comments@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" + integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== strip-final-newline@^2.0.0: version "2.0.0" @@ -18249,13 +15745,10 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -style-loader@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.3.0.tgz#828b4a3b3b7e7aa5847ce7bae9e874512114249e" - integrity sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q== - dependencies: - loader-utils "^2.0.0" - schema-utils "^2.7.0" +style-loader@^3.3.1: + version "3.3.3" + resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.3.tgz#bba8daac19930169c0c9c96706749a597ae3acff" + integrity sha512-53BiGLXAcll9maCYtZi2RCQZKa8NQQai5C4horqKyRmHj9H7QmcUyucrH+4KW/gBQbXM2AsB0axoEcFZPlfPcw== style-to-object@^0.3.0: version "0.3.0" @@ -18280,14 +15773,13 @@ styled-components@^5.2.1: shallowequal "^1.1.0" supports-color "^5.5.0" -stylehacks@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-4.0.3.tgz#6718fcaf4d1e07d8a1318690881e8d96726a71d5" - integrity sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g== +stylehacks@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.1.1.tgz#7934a34eb59d7152149fa69d6e9e56f2fc34bcc9" + integrity sha512-sBpcd5Hx7G6seo7b1LkpttvTz7ikD0LlH5RmdcBNb6fFR0Fl7LQwHDFr300q4cwUqi+IYrFGmsIHieMBfnN/Bw== dependencies: - browserslist "^4.0.0" - postcss "^7.0.0" - postcss-selector-parser "^3.0.0" + browserslist "^4.21.4" + postcss-selector-parser "^6.0.4" stylis@4.1.3, stylis@^4.0.6: version "4.1.3" @@ -18305,6 +15797,19 @@ subscriptions-transport-ws@^0.9.18: symbol-observable "^1.0.4" ws "^5.2.0" +sucrase@^3.32.0: + version "3.32.0" + resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.32.0.tgz#c4a95e0f1e18b6847127258a75cf360bc568d4a7" + integrity sha512-ydQOU34rpSyj2TGyz4D2p8rbktIOZ8QY9s+DGLvFU1i5pWJE8vkpruCjGCMHsdXwnD7JDcS+noSwM/a7zyNFDQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.2" + commander "^4.0.0" + glob "7.1.6" + lines-and-columns "^1.1.6" + mz "^2.7.0" + pirates "^4.0.1" + ts-interface-checker "^0.1.9" + supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" @@ -18317,13 +15822,6 @@ supports-color@^5.3.0, supports-color@^5.5.0: dependencies: has-flag "^3.0.0" -supports-color@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" - integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== - dependencies: - has-flag "^3.0.0" - supports-color@^7.0.0, supports-color@^7.1.0, supports-color@^7.2.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" @@ -18331,6 +15829,13 @@ supports-color@^7.0.0, supports-color@^7.1.0, supports-color@^7.2.0: dependencies: has-flag "^4.0.0" +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + supports-hyperlinks@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" @@ -18361,7 +15866,7 @@ svgmoji@^3.2.0: "@svgmoji/openmoji" "^3.2.0" "@svgmoji/twemoji" "^3.2.0" -svgo@^1.0.0, svgo@^1.2.2: +svgo@^1.2.2: version "1.3.2" resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== @@ -18380,6 +15885,19 @@ svgo@^1.0.0, svgo@^1.2.2: unquote "~1.1.1" util.promisify "~1.0.0" +svgo@^2.7.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== + dependencies: + "@trysound/sax" "0.2.0" + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" + csso "^4.2.0" + picocolors "^1.0.0" + stable "^0.1.8" + swap-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/swap-case/-/swap-case-2.0.2.tgz#671aedb3c9c137e2985ef51c51f9e98445bf70d9" @@ -18410,39 +15928,49 @@ sync-fetch@0.3.0: buffer "^5.7.0" node-fetch "^2.6.1" -table@^6.0.9: - version "6.7.1" - resolved "https://registry.yarnpkg.com/table/-/table-6.7.1.tgz#ee05592b7143831a8c94f3cee6aae4c1ccef33e2" - integrity sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg== - dependencies: - ajv "^8.0.1" - lodash.clonedeep "^4.5.0" - lodash.truncate "^4.4.2" - slice-ansi "^4.0.0" - string-width "^4.2.0" - strip-ansi "^6.0.0" - -tapable@^1.0.0, tapable@^1.1.3: +tailwindcss@^3.0.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.2.tgz#2f9e35d715fdf0bbf674d90147a0684d7054a2d3" + integrity sha512-9jPkMiIBXvPc2KywkraqsUfbfj+dHDb+JPWtSJa9MLFdrPyazI7q6WX2sUrm7R9eVR7qqv3Pas7EvQFzxKnI6w== + dependencies: + "@alloc/quick-lru" "^5.2.0" + arg "^5.0.2" + chokidar "^3.5.3" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.2.12" + glob-parent "^6.0.2" + is-glob "^4.0.3" + jiti "^1.18.2" + lilconfig "^2.1.0" + micromatch "^4.0.5" + normalize-path "^3.0.0" + object-hash "^3.0.0" + picocolors "^1.0.0" + postcss "^8.4.23" + postcss-import "^15.1.0" + postcss-js "^4.0.1" + postcss-load-config "^4.0.1" + postcss-nested "^6.0.1" + postcss-selector-parser "^6.0.11" + postcss-value-parser "^4.2.0" + resolve "^1.22.2" + sucrase "^3.32.0" + +tapable@^1.0.0: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== -tar@^6.0.2: - version "6.1.0" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.0.tgz#d1724e9bcc04b977b18d5c573b333a2207229a83" - integrity sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^3.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== -temp-dir@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d" - integrity sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0= +temp-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" + integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== temp@^0.9.4: version "0.9.4" @@ -18452,14 +15980,15 @@ temp@^0.9.4: mkdirp "^0.5.1" rimraf "~2.6.2" -tempy@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.3.0.tgz#6f6c5b295695a16130996ad5ab01a8bd726e8bf8" - integrity sha512-WrH/pui8YCwmeiAoxV+lpRH9HpRtgBhSR2ViBPgpGb/wnYDzp21R4MN45fsCGvLROvY67o3byhJRYRONJyImVQ== +tempy@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" + integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== dependencies: - temp-dir "^1.0.0" - type-fest "^0.3.1" - unique-string "^1.0.0" + is-stream "^2.0.0" + temp-dir "^2.0.0" + type-fest "^0.16.0" + unique-string "^2.0.0" terminal-link@^2.0.0: version "2.1.1" @@ -18469,53 +15998,26 @@ terminal-link@^2.0.0: ansi-escapes "^4.2.1" supports-hyperlinks "^2.0.0" -terser-webpack-plugin@4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-4.2.3.tgz#28daef4a83bd17c1db0297070adc07fc8cfc6a9a" - integrity sha512-jTgXh40RnvOrLQNgIkwEKnQ8rmHjHK4u+6UBEi+W+FPmvb+uo+chJXntKe7/3lW5mNysgSWD60KyesnhW8D6MQ== - dependencies: - cacache "^15.0.5" - find-cache-dir "^3.3.1" - jest-worker "^26.5.0" - p-limit "^3.0.2" - schema-utils "^3.0.0" - serialize-javascript "^5.0.1" - source-map "^0.6.1" - terser "^5.3.4" - webpack-sources "^1.4.3" - -terser-webpack-plugin@^1.4.3: - version "1.4.5" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz#a217aefaea330e734ffacb6120ec1fa312d6040b" - integrity sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw== - dependencies: - cacache "^12.0.2" - find-cache-dir "^2.1.0" - is-wsl "^1.1.0" - schema-utils "^1.0.0" - serialize-javascript "^4.0.0" - source-map "^0.6.1" - terser "^4.1.2" - webpack-sources "^1.4.0" - worker-farm "^1.7.0" - -terser@^4.1.2, terser@^4.6.2, terser@^4.6.3: - version "4.8.1" - resolved "https://registry.yarnpkg.com/terser/-/terser-4.8.1.tgz#a00e5634562de2239fd404c649051bf6fc21144f" - integrity sha512-4GnLC0x667eJG0ewJTa6z/yXrbLGv80D9Ru6HIpCQmO+Q4PfEtBFi0ObSckqwL6VyQv/7ENJieXHo2ANmdQwgw== +terser-webpack-plugin@^5.2.5, terser-webpack-plugin@^5.3.7: + version "5.3.9" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz#832536999c51b46d468067f9e37662a3b96adfe1" + integrity sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA== dependencies: - commander "^2.20.0" - source-map "~0.6.1" - source-map-support "~0.5.12" + "@jridgewell/trace-mapping" "^0.3.17" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.16.8" -terser@^5.3.4: - version "5.7.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.7.0.tgz#a761eeec206bc87b605ab13029876ead938ae693" - integrity sha512-HP5/9hp2UaZt5fYkuhNBR8YyRcT8juw8+uFbAme53iN9hblvKnLUTKkmwJG6ocWpIKf8UK4DoeWG4ty0J6S6/g== +terser@^5.0.0, terser@^5.10.0, terser@^5.16.8: + version "5.18.1" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.18.1.tgz#6d8642508ae9fb7b48768e48f16d675c89a78460" + integrity sha512-j1n0Ao919h/Ai5r43VAnfV/7azUYW43GPxK7qSATzrsERfW7+y2QW9Cp9ufnRF5CQUWbnLSo7UJokSWCqg4tsQ== dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" commander "^2.20.0" - source-map "~0.7.2" - source-map-support "~0.5.19" + source-map-support "~0.5.20" test-exclude@^6.0.0: version "6.0.0" @@ -18526,29 +16028,35 @@ test-exclude@^6.0.0: glob "^7.1.4" minimatch "^3.0.4" -text-table@0.2.0, text-table@^0.2.0: +text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= -throat@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/throat/-/throat-5.0.0.tgz#c5199235803aad18754a667d659b5e72ce16764b" - integrity sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA== +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + +throat@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.2.tgz#51a3fbb5e11ae72e2cf74861ed5c8020f89f29fe" + integrity sha512-WKexMoJj3vEuK0yFEapj8y64V0A6xcuPuK9Gt1d0R+dzCSJc0lHqQytAbSB4cDAK0dWh4T0E2ETkoLE2WZ41OQ== throttle-debounce@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-3.0.1.tgz#32f94d84dfa894f786c9a1f290e7a645b6a19abb" integrity sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg== -through2@^2.0.0: - version "2.0.5" - resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" - integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== - dependencies: - readable-stream "~2.3.6" - xtend "~4.0.1" - through@2, through@^2.3.6, through@~2.3, through@~2.3.1: version "2.3.8" resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" @@ -18559,18 +16067,6 @@ thunky@^1.0.2: resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== -timers-browserify@^2.0.4: - version "2.0.12" - resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.12.tgz#44a45c11fbf407f34f97bccd1577c652361b00ee" - integrity sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ== - dependencies: - setimmediate "^1.0.4" - -timsort@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/timsort/-/timsort-0.3.0.tgz#405411a8e7e6339fe64db9a234de11dc31e02bd4" - integrity sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q= - tiny-emitter@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" @@ -18610,11 +16106,6 @@ tmpl@1.0.x: resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= -to-arraybuffer@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" - integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= - to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" @@ -18684,6 +16175,13 @@ tough-cookie@~2.5.0: psl "^1.1.28" punycode "^2.1.1" +tr46@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== + dependencies: + punycode "^2.1.0" + tr46@^2.0.2: version "2.1.0" resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" @@ -18711,6 +16209,11 @@ ts-easing@^0.2.0: resolved "https://registry.yarnpkg.com/ts-easing/-/ts-easing-0.2.0.tgz#c8a8a35025105566588d87dbda05dd7fbfa5a4ec" integrity sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ== +ts-interface-checker@^0.1.9: + version "0.1.13" + resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" + integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== + ts-invariant@^0.7.0: version "0.7.3" resolved "https://registry.yarnpkg.com/ts-invariant/-/ts-invariant-0.7.3.tgz#13aae22a4a165393aaf5cecdee45ef4128d358b8" @@ -18723,6 +16226,25 @@ ts-log@^2.2.3: resolved "https://registry.yarnpkg.com/ts-log/-/ts-log-2.2.3.tgz#4da5640fe25a9fb52642cd32391c886721318efb" integrity sha512-XvB+OdKSJ708Dmf9ore4Uf/q62AYDTzFcAdxc8KNML1mmAWywRFVt/dn1KYJH8Agt5UJNujfM3znU5PxgAzA2w== +ts-node@^10.7.0: + version "10.9.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + ts-node@^9: version "9.1.1" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-9.1.1.tgz#51a9a450a3e959401bda5f004a72d54b936d376d" @@ -18735,19 +16257,14 @@ ts-node@^9: source-map-support "^0.5.17" yn "3.1.1" -ts-pnp@1.2.0, ts-pnp@^1.1.6: - version "1.2.0" - resolved "https://registry.yarnpkg.com/ts-pnp/-/ts-pnp-1.2.0.tgz#a500ad084b0798f1c3071af391e65912c86bca92" - integrity sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw== - -tsconfig-paths@^3.9.0: - version "3.9.0" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz#098547a6c4448807e8fcb8eae081064ee9a3c90b" - integrity sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw== +tsconfig-paths@^3.14.1: + version "3.14.2" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" + integrity sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g== dependencies: "@types/json5" "^0.0.29" - json5 "^1.0.1" - minimist "^1.2.0" + json5 "^1.0.2" + minimist "^1.2.6" strip-bom "^3.0.0" tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0: @@ -18755,12 +16272,7 @@ tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2, tslib@^2.0.3, tslib@^2.1.0, tslib@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.2.0.tgz#fb2c475977e35e241311ede2693cee1ec6698f5c" - integrity sha512-gS9GVHRU+RGn5KQM2rllAlR3dU6m7AcpJKdtH8gFvQiC4Otgk98XnmMU+nZenHt/+VhnBPWwgrJsyrdcw6i23w== - -tslib@^2.0.0, tslib@^2.3.0, tslib@^2.4.0: +tslib@^2, tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0: version "2.4.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.1.tgz#0d0bfbaac2880b91e22df0768e55be9753a5b17e" integrity sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA== @@ -18775,6 +16287,11 @@ tslib@~2.1.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== +tslib@~2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.2.0.tgz#fb2c475977e35e241311ede2693cee1ec6698f5c" + integrity sha512-gS9GVHRU+RGn5KQM2rllAlR3dU6m7AcpJKdtH8gFvQiC4Otgk98XnmMU+nZenHt/+VhnBPWwgrJsyrdcw6i23w== + tslib@~2.3.0: version "2.3.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" @@ -18785,18 +16302,13 @@ tslib@~2.5.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.3.tgz#24944ba2d990940e6e982c4bea147aba80209913" integrity sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w== -tsutils@^3.17.1: +tsutils@^3.21.0: version "3.21.0" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== dependencies: tslib "^1.8.1" -tty-browserify@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" - integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= - tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" @@ -18840,6 +16352,11 @@ type-detect@4.0.8, type-detect@^4.0.8: resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== +type-fest@^0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" + integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== + type-fest@^0.20.2: version "0.20.2" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" @@ -18850,21 +16367,6 @@ type-fest@^0.21.3: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== -type-fest@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" - integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== - -type-fest@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" - integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== - -type-fest@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" - integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== - type-fest@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-1.4.0.tgz#e9fb813fe3bf1744ec359d55d1affefa76f14be1" @@ -18883,16 +16385,6 @@ type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" -type@^1.0.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" - integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== - -type@^2.0.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/type/-/type-2.5.0.tgz#0a2e78c2e77907b252abe5f298c1b01c63f0db3d" - integrity sha512-180WMDQaIMm3+7hGXWf12GtdniDEy7nYcyFMKJn/eZz/6tSLXrUN9V0wKSbMjej0I1WHWbpREDEKHtqPQa9NNw== - typedarray-to-buffer@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" @@ -18900,16 +16392,6 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= - -typescript@^4.1.3: - version "4.2.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.2.4.tgz#8610b59747de028fda898a8aef0e103f156d0961" - integrity sha512-V+evlYHZnQkaz8TRBuxTA92yZBPotr5H+WhQ7bD3hZUndx5tGOa1fuCgeSjxAzM1RiN5IzvadIXTVefuuwZCRg== - typescript@^4.8.4: version "4.9.4" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.4.tgz#a2a3d2756c079abda241d75f149df9d561091e78" @@ -18920,16 +16402,6 @@ ua-parser-js@^0.7.30: resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz#1d04acb4ccef9293df6f70f2c3d22f3030d8b532" integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw== -unbox-primitive@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" - integrity sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw== - dependencies: - function-bind "^1.1.1" - has-bigints "^1.0.1" - has-symbols "^1.0.2" - which-boxed-primitive "^1.0.2" - unbox-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" @@ -18945,28 +16417,28 @@ unc-path-regex@^0.1.2: resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" integrity sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg== -unicode-canonical-property-names-ecmascript@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" - integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== -unicode-match-property-ecmascript@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" - integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== dependencies: - unicode-canonical-property-names-ecmascript "^1.0.4" - unicode-property-aliases-ecmascript "^1.0.4" + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" -unicode-match-property-value-ecmascript@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.2.0.tgz#0d91f600eeeb3096aa962b1d6fc88876e64ea531" - integrity sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ== +unicode-match-property-value-ecmascript@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz#cb5fffdcd16a05124f5a4b0bf7c3770208acbbe0" + integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA== -unicode-property-aliases-ecmascript@^1.0.4: - version "1.1.0" - resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.1.0.tgz#dd57a99f6207bedff4628abefb94c50db941c8f4" - integrity sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg== +unicode-property-aliases-ecmascript@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== unified@^9.0.0: version "9.2.1" @@ -18990,36 +16462,12 @@ union-value@^1.0.0: is-extendable "^0.1.1" set-value "^2.0.1" -uniq@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" - integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= - -uniqs@^2.0.0: +unique-string@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02" - integrity sha1-/+3ks2slKQaW5uFl1KWe25mOawI= - -unique-filename@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" - integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== - dependencies: - unique-slug "^2.0.0" - -unique-slug@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" - integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== - dependencies: - imurmurhash "^0.1.4" - -unique-string@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-1.0.0.tgz#9e1057cca851abb93398f8b33ae187b99caec11a" - integrity sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo= + resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== dependencies: - crypto-random-string "^1.0.0" + crypto-random-string "^2.0.0" unist-builder@^2.0.0: version "2.0.3" @@ -19109,15 +16557,15 @@ unset-value@^1.0.0: has-value "^0.3.1" isobject "^3.0.0" -upath@^1.1.1, upath@^1.1.2, upath@^1.2.0: +upath@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== -update-browserslist-db@^1.0.9: - version "1.0.10" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" - integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== +update-browserslist-db@^1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" + integrity sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA== dependencies: escalade "^3.1.1" picocolors "^1.0.0" @@ -19148,15 +16596,6 @@ urix@^0.1.0: resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= -url-loader@4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-4.1.1.tgz#28505e905cae158cf07c92ca622d7f237e70a4e2" - integrity sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA== - dependencies: - loader-utils "^2.0.0" - mime-types "^2.1.27" - schema-utils "^3.0.0" - url-parse-lax@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" @@ -19164,30 +16603,6 @@ url-parse-lax@^3.0.0: dependencies: prepend-http "^2.0.0" -url-parse@^1.4.3: - version "1.5.10" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" - integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== - dependencies: - querystringify "^2.1.1" - requires-port "^1.0.0" - -url-parse@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.1.tgz#d5fa9890af8a5e1f274a2c98376510f6425f6e3b" - integrity sha512-HOfCOUJt7iSYzEx/UqgtwKRMC6EU91NFhsCHMv9oM03VJcVo2Qrp8T8kI9D7amFf1cu+/3CEhgb3rF9zL7k85Q== - dependencies: - querystringify "^2.1.1" - requires-port "^1.0.0" - -url@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" - integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= - dependencies: - punycode "1.3.2" - querystring "0.2.0" - use-callback-ref@^1.2.5: version "1.3.0" resolved "https://registry.yarnpkg.com/use-callback-ref/-/use-callback-ref-1.3.0.tgz#772199899b9c9a50526fedc4993fc7fa1f7e32d5" @@ -19225,14 +16640,6 @@ util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= -util.promisify@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" - integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== - dependencies: - define-properties "^1.1.2" - object.getownpropertydescriptors "^2.0.3" - util.promisify@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" @@ -19243,20 +16650,6 @@ util.promisify@~1.0.0: has-symbols "^1.0.1" object.getownpropertydescriptors "^2.1.0" -util@0.10.3: - version "0.10.3" - resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" - integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= - dependencies: - inherits "2.0.1" - -util@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" - integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== - dependencies: - inherits "2.0.3" - utila@~0.4: version "0.4.0" resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" @@ -19267,25 +16660,25 @@ utils-merge@1.0.1: resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== -uuid@^3.0.0, uuid@^3.3.2, uuid@^3.4.0: +uuid@^3.0.0, uuid@^3.3.2: version "3.4.0" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^8.3.0, uuid@^8.3.2: +uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -v8-compile-cache@^2.0.3: - version "2.3.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" - integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== -v8-to-istanbul@^7.0.0: - version "7.1.2" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz#30898d1a7fa0c84d225a2c1434fb958f290883c1" - integrity sha512-TxNb7YEUwkLXCQYeudi6lgQ/SZrzNO4kMdlqVxaZPUIUjCv6iSSypUQX70kNBSERpQ8fk48+d61FXk+tgqcWow== +v8-to-istanbul@^8.1.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" + integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== dependencies: "@types/istanbul-lib-coverage" "^2.0.1" convert-source-map "^1.6.0" @@ -19296,14 +16689,6 @@ valid-url@1.0.9, valid-url@^1.0.9: resolved "https://registry.yarnpkg.com/valid-url/-/valid-url-1.0.9.tgz#1c14479b40f1397a75782f115e4086447433a200" integrity sha1-HBRHm0DxOXp1eC8RXkCGRHQzogA= -validate-npm-package-license@^3.0.1: - version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" - integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== - dependencies: - spdx-correct "^3.0.0" - spdx-expression-parse "^3.0.0" - value-equal@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" @@ -19319,11 +16704,6 @@ vary@~1.1.2: resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== -vendors@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.4.tgz#e2b800a53e7a29b93506c3cf41100d16c4c4ad8e" - integrity sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w== - verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" @@ -19361,11 +16741,6 @@ virtualizedtableforantd4@^1.2.1: resolved "https://registry.yarnpkg.com/virtualizedtableforantd4/-/virtualizedtableforantd4-1.2.1.tgz#331e8d2f203cdee6667cb5b9cbd7af823f99f65a" integrity sha512-Hl21jF3WZESanz/iKIjvbjeZ5gGX2t85h2cWQFJAagOQnN7t/pvC4kXhfYNseJtaiU6QHOm5RgX3ud+oXeST1Q== -vm-browserify@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" - integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== - w3c-hr-time@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" @@ -19396,30 +16771,20 @@ wait-on@5.3.0: minimist "^1.2.5" rxjs "^6.6.3" -walker@^1.0.7, walker@~1.0.5: +walker@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" integrity sha1-L3+bj9ENZ3JisYqITijRlhjgKPs= dependencies: makeerror "1.0.x" -watchpack-chokidar2@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/watchpack-chokidar2/-/watchpack-chokidar2-2.0.1.tgz#38500072ee6ece66f3769936950ea1771be1c957" - integrity sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww== - dependencies: - chokidar "^2.1.8" - -watchpack@^1.7.4: - version "1.7.5" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.7.5.tgz#1267e6c55e0b9b5be44c2023aed5437a2c26c453" - integrity sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ== +watchpack@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== dependencies: + glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" - neo-async "^2.5.0" - optionalDependencies: - chokidar "^3.4.1" - watchpack-chokidar2 "^2.0.1" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" @@ -19443,6 +16808,11 @@ webidl-conversions@^3.0.0: resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + webidl-conversions@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" @@ -19453,82 +16823,70 @@ webidl-conversions@^6.1.0: resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== -webpack-dev-middleware@^3.7.2: - version "3.7.3" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.3.tgz#0639372b143262e2b84ab95d3b91a7597061c2c5" - integrity sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ== +webpack-dev-middleware@^5.3.1: + version "5.3.3" + resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" + integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== dependencies: - memory-fs "^0.4.1" - mime "^2.4.4" - mkdirp "^0.5.1" + colorette "^2.0.10" + memfs "^3.4.3" + mime-types "^2.1.31" range-parser "^1.2.1" - webpack-log "^2.0.0" - -webpack-dev-server@3.11.1: - version "3.11.1" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.11.1.tgz#c74028bf5ba8885aaf230e48a20e8936ab8511f0" - integrity sha512-u4R3mRzZkbxQVa+MBWi2uVpB5W59H3ekZAJsQlKUTdl7Elcah2EhygTPLmeFXybQkf9i2+L0kn7ik9SnXa6ihQ== - dependencies: - ansi-html "0.0.7" - bonjour "^3.5.0" - chokidar "^2.1.8" + schema-utils "^4.0.0" + +webpack-dev-server@^4.6.0: + version "4.15.1" + resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz#8944b29c12760b3a45bdaa70799b17cb91b03df7" + integrity sha512-5hbAst3h3C3L8w6W4P96L5vaV0PxSmJhxZvWKYIdgxOQm8pNZ5dEOmmSLBVpP85ReeyRt6AS1QJNyo/oFFPeVA== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/express" "^4.17.13" + "@types/serve-index" "^1.9.1" + "@types/serve-static" "^1.13.10" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.5.5" + ansi-html-community "^0.0.8" + bonjour-service "^1.0.11" + chokidar "^3.5.3" + colorette "^2.0.10" compression "^1.7.4" - connect-history-api-fallback "^1.6.0" - debug "^4.1.1" - del "^4.1.1" - express "^4.17.1" - html-entities "^1.3.1" - http-proxy-middleware "0.19.1" - import-local "^2.0.0" - internal-ip "^4.3.0" - ip "^1.1.5" - is-absolute-url "^3.0.3" - killable "^1.0.1" - loglevel "^1.6.8" - opn "^5.5.0" - p-retry "^3.0.1" - portfinder "^1.0.26" - schema-utils "^1.0.0" - selfsigned "^1.10.8" - semver "^6.3.0" + connect-history-api-fallback "^2.0.0" + default-gateway "^6.0.3" + express "^4.17.3" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.3" + ipaddr.js "^2.0.1" + launch-editor "^2.6.0" + open "^8.0.9" + p-retry "^4.5.0" + rimraf "^3.0.2" + schema-utils "^4.0.0" + selfsigned "^2.1.1" serve-index "^1.9.1" - sockjs "^0.3.21" - sockjs-client "^1.5.0" + sockjs "^0.3.24" spdy "^4.0.2" - strip-ansi "^3.0.1" - supports-color "^6.1.0" - url "^0.11.0" - webpack-dev-middleware "^3.7.2" - webpack-log "^2.0.0" - ws "^6.2.1" - yargs "^13.3.2" - -webpack-log@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" - integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== - dependencies: - ansi-colors "^3.0.0" - uuid "^3.3.2" + webpack-dev-middleware "^5.3.1" + ws "^8.13.0" -webpack-manifest-plugin@2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-2.2.0.tgz#19ca69b435b0baec7e29fbe90fb4015de2de4f16" - integrity sha512-9S6YyKKKh/Oz/eryM1RyLVDVmy3NSPV0JXMRhZ18fJsq+AwGxUY34X54VNwkzYcEmEkDwNxuEOboCZEebJXBAQ== +webpack-manifest-plugin@^4.0.2: + version "4.1.1" + resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" + integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== dependencies: - fs-extra "^7.0.0" - lodash ">=3.5 <5" - object.entries "^1.1.0" - tapable "^1.0.0" + tapable "^2.0.0" + webpack-sources "^2.2.0" -webpack-merge@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-4.2.2.tgz#a27c52ea783d1398afd2087f547d7b9d2f43634d" - integrity sha512-TUE1UGoTX2Cd42j3krGYqObZbOD+xF7u28WB7tfUordytSjbWTIjK/8V0amkBfTYN4/pB/GIDlJZZ657BGG19g== +webpack-merge@^5.8.0: + version "5.9.0" + resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.9.0.tgz#dc160a1c4cf512ceca515cc231669e9ddb133826" + integrity sha512-6NbRQw4+Sy50vYNTw7EyOn41OZItPiXB8GNv3INSoe3PSFaHJEz3SHTrYVaRm2LilNGnFUzh0FAwqPEmU/CwDg== dependencies: - lodash "^4.17.15" + clone-deep "^4.0.1" + wildcard "^2.0.0" -webpack-sources@^1.1.0, webpack-sources@^1.3.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1, webpack-sources@^1.4.3: +webpack-sources@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== @@ -19536,34 +16894,48 @@ webpack-sources@^1.1.0, webpack-sources@^1.3.0, webpack-sources@^1.4.0, webpack- source-list-map "^2.0.0" source-map "~0.6.1" -webpack@4.44.2: - version "4.44.2" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.44.2.tgz#6bfe2b0af055c8b2d1e90ed2cd9363f841266b72" - integrity sha512-6KJVGlCxYdISyurpQ0IPTklv+DULv05rs2hseIXer6D7KrUicRDLFb4IUM1S6LUAKypPM/nSiVSuv8jHu1m3/Q== - dependencies: - "@webassemblyjs/ast" "1.9.0" - "@webassemblyjs/helper-module-context" "1.9.0" - "@webassemblyjs/wasm-edit" "1.9.0" - "@webassemblyjs/wasm-parser" "1.9.0" - acorn "^6.4.1" - ajv "^6.10.2" - ajv-keywords "^3.4.1" +webpack-sources@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" + integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== + dependencies: + source-list-map "^2.0.1" + source-map "^0.6.1" + +webpack-sources@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.64.4: + version "5.88.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.88.0.tgz#a07aa2f8e7a64a8f1cec0c6c2e180e3cb34440c8" + integrity sha512-O3jDhG5e44qIBSi/P6KpcCcH7HD+nYIHVBhdWFxcLOcIGN8zGo5nqF3BjyNCxIh4p1vFdNnreZv2h2KkoAw3lw== + dependencies: + "@types/eslint-scope" "^3.7.3" + "@types/estree" "^1.0.0" + "@webassemblyjs/ast" "^1.11.5" + "@webassemblyjs/wasm-edit" "^1.11.5" + "@webassemblyjs/wasm-parser" "^1.11.5" + acorn "^8.7.1" + acorn-import-assertions "^1.9.0" + browserslist "^4.14.5" chrome-trace-event "^1.0.2" - enhanced-resolve "^4.3.0" - eslint-scope "^4.0.3" - json-parse-better-errors "^1.0.2" - loader-runner "^2.4.0" - loader-utils "^1.2.3" - memory-fs "^0.4.1" - micromatch "^3.1.10" - mkdirp "^0.5.3" - neo-async "^2.6.1" - node-libs-browser "^2.2.1" - schema-utils "^1.0.0" - tapable "^1.1.3" - terser-webpack-plugin "^1.4.3" - watchpack "^1.7.4" - webpack-sources "^1.4.1" + enhanced-resolve "^5.15.0" + es-module-lexer "^1.2.1" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.9" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.2.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.3.7" + watchpack "^2.4.0" + webpack-sources "^3.2.3" websocket-driver@>=0.5.1, websocket-driver@^0.7.4: version "0.7.4" @@ -19586,7 +16958,7 @@ whatwg-encoding@^1.0.5: dependencies: iconv-lite "0.4.24" -whatwg-fetch@^3.4.1: +whatwg-fetch@^3.4.1, whatwg-fetch@^3.6.2: version "3.6.2" resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== @@ -19604,6 +16976,15 @@ whatwg-url@^5.0.0: tr46 "~0.0.3" webidl-conversions "^3.0.0" +whatwg-url@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + whatwg-url@^8.0.0, whatwg-url@^8.5.0: version "8.5.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.5.0.tgz#7752b8464fc0903fec89aa9846fc9efe07351fd3" @@ -19629,191 +17010,198 @@ which-module@^2.0.0: resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= -which@^1.2.9, which@^1.3.1: +which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" -which@^2.0.1, which@^2.0.2: +which@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" +wildcard@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.1.tgz#5ab10d02487198954836b6349f74fff961e10f67" + integrity sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ== + word-wrap@^1.2.3, word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== -workbox-background-sync@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-background-sync/-/workbox-background-sync-5.1.4.tgz#5ae0bbd455f4e9c319e8d827c055bb86c894fd12" - integrity sha512-AH6x5pYq4vwQvfRDWH+vfOePfPIYQ00nCEB7dJRU1e0n9+9HMRyvI63FlDvtFT2AvXVRsXvUt7DNMEToyJLpSA== +workbox-background-sync@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-background-sync/-/workbox-background-sync-6.6.1.tgz#08d603a33717ce663e718c30cc336f74909aff2f" + integrity sha512-trJd3ovpWCvzu4sW0E8rV3FUyIcC0W8G+AZ+VcqzzA890AsWZlUGOTSxIMmIHVusUw/FDq1HFWfy/kC/WTRqSg== dependencies: - workbox-core "^5.1.4" + idb "^7.0.1" + workbox-core "6.6.1" -workbox-broadcast-update@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-broadcast-update/-/workbox-broadcast-update-5.1.4.tgz#0eeb89170ddca7f6914fa3523fb14462891f2cfc" - integrity sha512-HTyTWkqXvHRuqY73XrwvXPud/FN6x3ROzkfFPsRjtw/kGZuZkPzfeH531qdUGfhtwjmtO/ZzXcWErqVzJNdXaA== +workbox-broadcast-update@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-broadcast-update/-/workbox-broadcast-update-6.6.1.tgz#0fad9454cf8e4ace0c293e5617c64c75d8a8c61e" + integrity sha512-fBhffRdaANdeQ1V8s692R9l/gzvjjRtydBOvR6WCSB0BNE2BacA29Z4r9/RHd9KaXCPl6JTdI9q0bR25YKP8TQ== dependencies: - workbox-core "^5.1.4" + workbox-core "6.6.1" -workbox-build@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-build/-/workbox-build-5.1.4.tgz#23d17ed5c32060c363030c8823b39d0eabf4c8c7" - integrity sha512-xUcZn6SYU8usjOlfLb9Y2/f86Gdo+fy1fXgH8tJHjxgpo53VVsqRX0lUDw8/JuyzNmXuo8vXX14pXX2oIm9Bow== +workbox-build@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-build/-/workbox-build-6.6.1.tgz#6010e9ce550910156761448f2dbea8cfcf759cb0" + integrity sha512-INPgDx6aRycAugUixbKgiEQBWD0MPZqU5r0jyr24CehvNuLPSXp/wGOpdRJmts656lNiXwqV7dC2nzyrzWEDnw== dependencies: - "@babel/core" "^7.8.4" - "@babel/preset-env" "^7.8.4" - "@babel/runtime" "^7.8.4" - "@hapi/joi" "^15.1.0" - "@rollup/plugin-node-resolve" "^7.1.1" - "@rollup/plugin-replace" "^2.3.1" - "@surma/rollup-plugin-off-main-thread" "^1.1.1" + "@apideck/better-ajv-errors" "^0.3.1" + "@babel/core" "^7.11.1" + "@babel/preset-env" "^7.11.0" + "@babel/runtime" "^7.11.2" + "@rollup/plugin-babel" "^5.2.0" + "@rollup/plugin-node-resolve" "^11.2.1" + "@rollup/plugin-replace" "^2.4.1" + "@surma/rollup-plugin-off-main-thread" "^2.2.3" + ajv "^8.6.0" common-tags "^1.8.0" fast-json-stable-stringify "^2.1.0" - fs-extra "^8.1.0" + fs-extra "^9.0.1" glob "^7.1.6" - lodash.template "^4.5.0" + lodash "^4.17.20" pretty-bytes "^5.3.0" - rollup "^1.31.1" - rollup-plugin-babel "^4.3.3" - rollup-plugin-terser "^5.3.1" - source-map "^0.7.3" - source-map-url "^0.4.0" + rollup "^2.43.1" + rollup-plugin-terser "^7.0.0" + source-map "^0.8.0-beta.0" stringify-object "^3.3.0" - strip-comments "^1.0.2" - tempy "^0.3.0" + strip-comments "^2.0.1" + tempy "^0.6.0" upath "^1.2.0" - workbox-background-sync "^5.1.4" - workbox-broadcast-update "^5.1.4" - workbox-cacheable-response "^5.1.4" - workbox-core "^5.1.4" - workbox-expiration "^5.1.4" - workbox-google-analytics "^5.1.4" - workbox-navigation-preload "^5.1.4" - workbox-precaching "^5.1.4" - workbox-range-requests "^5.1.4" - workbox-routing "^5.1.4" - workbox-strategies "^5.1.4" - workbox-streams "^5.1.4" - workbox-sw "^5.1.4" - workbox-window "^5.1.4" - -workbox-cacheable-response@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-cacheable-response/-/workbox-cacheable-response-5.1.4.tgz#9ff26e1366214bdd05cf5a43da9305b274078a54" - integrity sha512-0bfvMZs0Of1S5cdswfQK0BXt6ulU5kVD4lwer2CeI+03czHprXR3V4Y8lPTooamn7eHP8Iywi5QjyAMjw0qauA== - dependencies: - workbox-core "^5.1.4" - -workbox-core@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-5.1.4.tgz#8bbfb2362ecdff30e25d123c82c79ac65d9264f4" - integrity sha512-+4iRQan/1D8I81nR2L5vcbaaFskZC2CL17TLbvWVzQ4qiF/ytOGF6XeV54pVxAvKUtkLANhk8TyIUMtiMw2oDg== - -workbox-expiration@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-5.1.4.tgz#92b5df461e8126114943a3b15c55e4ecb920b163" - integrity sha512-oDO/5iC65h2Eq7jctAv858W2+CeRW5e0jZBMNRXpzp0ZPvuT6GblUiHnAsC5W5lANs1QS9atVOm4ifrBiYY7AQ== - dependencies: - workbox-core "^5.1.4" - -workbox-google-analytics@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-google-analytics/-/workbox-google-analytics-5.1.4.tgz#b3376806b1ac7d7df8418304d379707195fa8517" - integrity sha512-0IFhKoEVrreHpKgcOoddV+oIaVXBFKXUzJVBI+nb0bxmcwYuZMdteBTp8AEDJacENtc9xbR0wa9RDCnYsCDLjA== - dependencies: - workbox-background-sync "^5.1.4" - workbox-core "^5.1.4" - workbox-routing "^5.1.4" - workbox-strategies "^5.1.4" - -workbox-navigation-preload@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-navigation-preload/-/workbox-navigation-preload-5.1.4.tgz#30d1b720d26a05efc5fa11503e5cc1ed5a78902a" - integrity sha512-Wf03osvK0wTflAfKXba//QmWC5BIaIZARU03JIhAEO2wSB2BDROWI8Q/zmianf54kdV7e1eLaIEZhth4K4MyfQ== - dependencies: - workbox-core "^5.1.4" - -workbox-precaching@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-precaching/-/workbox-precaching-5.1.4.tgz#874f7ebdd750dd3e04249efae9a1b3f48285fe6b" - integrity sha512-gCIFrBXmVQLFwvAzuGLCmkUYGVhBb7D1k/IL7pUJUO5xacjLcFUaLnnsoVepBGAiKw34HU1y/YuqvTKim9qAZA== - dependencies: - workbox-core "^5.1.4" - -workbox-range-requests@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-range-requests/-/workbox-range-requests-5.1.4.tgz#7066a12c121df65bf76fdf2b0868016aa2bab859" - integrity sha512-1HSujLjgTeoxHrMR2muDW2dKdxqCGMc1KbeyGcmjZZAizJTFwu7CWLDmLv6O1ceWYrhfuLFJO+umYMddk2XMhw== - dependencies: - workbox-core "^5.1.4" - -workbox-routing@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-5.1.4.tgz#3e8cd86bd3b6573488d1a2ce7385e547b547e970" - integrity sha512-8ljknRfqE1vEQtnMtzfksL+UXO822jJlHTIR7+BtJuxQ17+WPZfsHqvk1ynR/v0EHik4x2+826Hkwpgh4GKDCw== - dependencies: - workbox-core "^5.1.4" - -workbox-strategies@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-5.1.4.tgz#96b1418ccdfde5354612914964074d466c52d08c" - integrity sha512-VVS57LpaJTdjW3RgZvPwX0NlhNmscR7OQ9bP+N/34cYMDzXLyA6kqWffP6QKXSkca1OFo/v6v7hW7zrrguo6EA== - dependencies: - workbox-core "^5.1.4" - workbox-routing "^5.1.4" - -workbox-streams@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-streams/-/workbox-streams-5.1.4.tgz#05754e5e3667bdc078df2c9315b3f41210d8cac0" - integrity sha512-xU8yuF1hI/XcVhJUAfbQLa1guQUhdLMPQJkdT0kn6HP5CwiPOGiXnSFq80rAG4b1kJUChQQIGPrq439FQUNVrw== - dependencies: - workbox-core "^5.1.4" - workbox-routing "^5.1.4" - -workbox-sw@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-5.1.4.tgz#2bb34c9f7381f90d84cef644816d45150011d3db" - integrity sha512-9xKnKw95aXwSNc8kk8gki4HU0g0W6KXu+xks7wFuC7h0sembFnTrKtckqZxbSod41TDaGh+gWUA5IRXrL0ECRA== - -workbox-webpack-plugin@5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-webpack-plugin/-/workbox-webpack-plugin-5.1.4.tgz#7bfe8c16e40fe9ed8937080ac7ae9c8bde01e79c" - integrity sha512-PZafF4HpugZndqISi3rZ4ZK4A4DxO8rAqt2FwRptgsDx7NF8TVKP86/huHquUsRjMGQllsNdn4FNl8CD/UvKmQ== - dependencies: - "@babel/runtime" "^7.5.5" - fast-json-stable-stringify "^2.0.0" - source-map-url "^0.4.0" - upath "^1.1.2" - webpack-sources "^1.3.0" - workbox-build "^5.1.4" - -workbox-window@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/workbox-window/-/workbox-window-5.1.4.tgz#2740f7dea7f93b99326179a62f1cc0ca2c93c863" - integrity sha512-vXQtgTeMCUq/4pBWMfQX8Ee7N2wVC4Q7XYFqLnfbXJ2hqew/cU1uMTD2KqGEgEpE4/30luxIxgE+LkIa8glBYw== + workbox-background-sync "6.6.1" + workbox-broadcast-update "6.6.1" + workbox-cacheable-response "6.6.1" + workbox-core "6.6.1" + workbox-expiration "6.6.1" + workbox-google-analytics "6.6.1" + workbox-navigation-preload "6.6.1" + workbox-precaching "6.6.1" + workbox-range-requests "6.6.1" + workbox-recipes "6.6.1" + workbox-routing "6.6.1" + workbox-strategies "6.6.1" + workbox-streams "6.6.1" + workbox-sw "6.6.1" + workbox-window "6.6.1" + +workbox-cacheable-response@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-cacheable-response/-/workbox-cacheable-response-6.6.1.tgz#284c2b86be3f4fd191970ace8c8e99797bcf58e9" + integrity sha512-85LY4veT2CnTCDxaVG7ft3NKaFbH6i4urZXgLiU4AiwvKqS2ChL6/eILiGRYXfZ6gAwDnh5RkuDbr/GMS4KSag== + dependencies: + workbox-core "6.6.1" + +workbox-core@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-6.6.1.tgz#7184776d4134c5ed2f086878c882728fc9084265" + integrity sha512-ZrGBXjjaJLqzVothoE12qTbVnOAjFrHDXpZe7coCb6q65qI/59rDLwuFMO4PcZ7jcbxY+0+NhUVztzR/CbjEFw== + +workbox-expiration@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-6.6.1.tgz#a841fa36676104426dbfb9da1ef6a630b4f93739" + integrity sha512-qFiNeeINndiOxaCrd2DeL1Xh1RFug3JonzjxUHc5WkvkD2u5abY3gZL1xSUNt3vZKsFFGGORItSjVTVnWAZO4A== + dependencies: + idb "^7.0.1" + workbox-core "6.6.1" + +workbox-google-analytics@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-google-analytics/-/workbox-google-analytics-6.6.1.tgz#a07a6655ab33d89d1b0b0a935ffa5dea88618c5d" + integrity sha512-1TjSvbFSLmkpqLcBsF7FuGqqeDsf+uAXO/pjiINQKg3b1GN0nBngnxLcXDYo1n/XxK4N7RaRrpRlkwjY/3ocuA== + dependencies: + workbox-background-sync "6.6.1" + workbox-core "6.6.1" + workbox-routing "6.6.1" + workbox-strategies "6.6.1" + +workbox-navigation-preload@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-navigation-preload/-/workbox-navigation-preload-6.6.1.tgz#61a34fe125558dd88cf09237f11bd966504ea059" + integrity sha512-DQCZowCecO+wRoIxJI2V6bXWK6/53ff+hEXLGlQL4Rp9ZaPDLrgV/32nxwWIP7QpWDkVEtllTAK5h6cnhxNxDA== + dependencies: + workbox-core "6.6.1" + +workbox-precaching@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-precaching/-/workbox-precaching-6.6.1.tgz#dedeeba10a2d163d990bf99f1c2066ac0d1a19e2" + integrity sha512-K4znSJ7IKxCnCYEdhNkMr7X1kNh8cz+mFgx9v5jFdz1MfI84pq8C2zG+oAoeE5kFrUf7YkT5x4uLWBNg0DVZ5A== + dependencies: + workbox-core "6.6.1" + workbox-routing "6.6.1" + workbox-strategies "6.6.1" + +workbox-range-requests@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-range-requests/-/workbox-range-requests-6.6.1.tgz#ddaf7e73af11d362fbb2f136a9063a4c7f507a39" + integrity sha512-4BDzk28govqzg2ZpX0IFkthdRmCKgAKreontYRC5YsAPB2jDtPNxqx3WtTXgHw1NZalXpcH/E4LqUa9+2xbv1g== + dependencies: + workbox-core "6.6.1" + +workbox-recipes@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-recipes/-/workbox-recipes-6.6.1.tgz#ea70d2b2b0b0bce8de0a9d94f274d4a688e69fae" + integrity sha512-/oy8vCSzromXokDA+X+VgpeZJvtuf8SkQ8KL0xmRivMgJZrjwM3c2tpKTJn6PZA6TsbxGs3Sc7KwMoZVamcV2g== + dependencies: + workbox-cacheable-response "6.6.1" + workbox-core "6.6.1" + workbox-expiration "6.6.1" + workbox-precaching "6.6.1" + workbox-routing "6.6.1" + workbox-strategies "6.6.1" + +workbox-routing@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-6.6.1.tgz#cba9a1c7e0d1ea11e24b6f8c518840efdc94f581" + integrity sha512-j4ohlQvfpVdoR8vDYxTY9rA9VvxTHogkIDwGdJ+rb2VRZQ5vt1CWwUUZBeD/WGFAni12jD1HlMXvJ8JS7aBWTg== + dependencies: + workbox-core "6.6.1" + +workbox-strategies@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-6.6.1.tgz#38d0f0fbdddba97bd92e0c6418d0b1a2ccd5b8bf" + integrity sha512-WQLXkRnsk4L81fVPkkgon1rZNxnpdO5LsO+ws7tYBC6QQQFJVI6v98klrJEjFtZwzw/mB/HT5yVp7CcX0O+mrw== + dependencies: + workbox-core "6.6.1" + +workbox-streams@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-streams/-/workbox-streams-6.6.1.tgz#b2f7ba7b315c27a6e3a96a476593f99c5d227d26" + integrity sha512-maKG65FUq9e4BLotSKWSTzeF0sgctQdYyTMq529piEN24Dlu9b6WhrAfRpHdCncRS89Zi2QVpW5V33NX8PgH3Q== + dependencies: + workbox-core "6.6.1" + workbox-routing "6.6.1" + +workbox-sw@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-6.6.1.tgz#d4c4ca3125088e8b9fd7a748ed537fa0247bd72c" + integrity sha512-R7whwjvU2abHH/lR6kQTTXLHDFU2izht9kJOvBRYK65FbwutT4VvnUAJIgHvfWZ/fokrOPhfoWYoPCMpSgUKHQ== + +workbox-webpack-plugin@^6.4.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-webpack-plugin/-/workbox-webpack-plugin-6.6.1.tgz#4f81cc1ad4e5d2cd7477a86ba83c84ee2d187531" + integrity sha512-zpZ+ExFj9NmiI66cFEApyjk7hGsfJ1YMOaLXGXBoZf0v7Iu6hL0ZBe+83mnDq3YYWAfA3fnyFejritjOHkFcrA== dependencies: - workbox-core "^5.1.4" - -worker-farm@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" - integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== - dependencies: - errno "~0.1.7" + fast-json-stable-stringify "^2.1.0" + pretty-bytes "^5.4.1" + upath "^1.2.0" + webpack-sources "^1.4.3" + workbox-build "6.6.1" -worker-rpc@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/worker-rpc/-/worker-rpc-0.1.1.tgz#cb565bd6d7071a8f16660686051e969ad32f54d5" - integrity sha512-P1WjMrUB3qgJNI9jfmpZ/htmBEjFh//6l/5y8SD9hg1Ef5zTTVVoRjTrTEzPrNBQvmhMxkoTsjOXN10GWU7aCg== +workbox-window@6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/workbox-window/-/workbox-window-6.6.1.tgz#f22a394cbac36240d0dadcbdebc35f711bb7b89e" + integrity sha512-wil4nwOY58nTdCvif/KEZjQ2NP8uk3gGeRNy2jPBbzypU4BT4D9L8xiwbmDBpZlSgJd2xsT9FvSNU0gsxV51JQ== dependencies: - microevent.ts "~0.1.1" + "@types/trusted-types" "^2.0.2" + workbox-core "6.6.1" wrap-ansi@^3.0.1: version "3.0.1" @@ -19823,15 +17211,6 @@ wrap-ansi@^3.0.1: string-width "^2.1.1" strip-ansi "^4.0.0" -wrap-ansi@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" - integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== - dependencies: - ansi-styles "^3.2.0" - string-width "^3.0.0" - strip-ansi "^5.0.0" - wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" @@ -19877,17 +17256,15 @@ ws@^5.2.0: dependencies: async-limiter "~1.0.0" -ws@^6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" - integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== - dependencies: - async-limiter "~1.0.0" +ws@^7.4.6: + version "7.5.9" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== -ws@^7.4.5: - version "7.4.6" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" - integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== +ws@^8.13.0: + version "8.13.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" + integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== xml-name-validator@^3.0.0: version "3.0.0" @@ -19899,7 +17276,7 @@ xmlchars@^2.2.0: resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== -xtend@^4.0.0, xtend@~4.0.1: +xtend@^4.0.0: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== @@ -19944,11 +17321,16 @@ yaml-ast-parser@^0.0.43: resolved "https://registry.yarnpkg.com/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz#e8a23e6fb4c38076ab92995c5dca33f3d3d7c9bb" integrity sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A== -yaml@^1.10.0, yaml@^1.7.2: +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: version "1.10.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== +yaml@^2.1.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.1.tgz#02fe0975d23cd441242aa7204e09fc28ac2ac33b" + integrity sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ== + yamljs@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/yamljs/-/yamljs-0.3.0.tgz#dc060bf267447b39f7304e9b2bfbe8b5a7ddb03b" @@ -19957,14 +17339,6 @@ yamljs@^0.3.0: argparse "^1.0.7" glob "^7.0.5" -yargs-parser@^13.1.2: - version "13.1.2" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" - integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - yargs-parser@^18.1.2: version "18.1.3" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" @@ -19978,23 +17352,7 @@ yargs-parser@^20.2.2: resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== -yargs@^13.3.2: - version "13.3.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd" - integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw== - dependencies: - cliui "^5.0.0" - find-up "^3.0.0" - get-caller-file "^2.0.1" - require-directory "^2.1.1" - require-main-filename "^2.0.0" - set-blocking "^2.0.0" - string-width "^3.0.0" - which-module "^2.0.0" - y18n "^4.0.0" - yargs-parser "^13.1.2" - -yargs@^15.3.1, yargs@^15.4.1: +yargs@^15.3.1: version "15.4.1" resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== From 27127eb9148d0fbff89869c274f253e4d5d93eda Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Mon, 27 Nov 2023 15:50:33 -0500 Subject: [PATCH 150/792] fix(java) Fixes NPE ES service (#9311) --- .../metadata/search/elasticsearch/ElasticSearchService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index 9b43642d7621c..68a5483fa469c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -144,7 +144,7 @@ public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull Stri @Override public Map aggregateByValue(@Nullable List entityNames, @Nonnull String field, @Nullable Filter requestParams, int limit) { - log.debug("Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", entityNames.toString(), field, + log.debug("Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", entityNames != null ? entityNames.toString() : null, field, requestParams, limit); return esSearchDAO.aggregateByValue(entityNames, field, requestParams, limit); } From 03be68ca789a80cc7ce2164b384050aa528e9efd Mon Sep 17 00:00:00 2001 From: skrydal Date: Tue, 28 Nov 2023 05:59:28 +0100 Subject: [PATCH 151/792] feat(config): Configurable bootstrap of ownership types (#9308) --- .../src/main/resources/application.yml | 2 + .../factories/BootstrapManagerFactory.java | 5 ++- .../boot/steps/IngestOwnershipTypesStep.java | 44 ++++++------------- 3 files changed, 19 insertions(+), 32 deletions(-) diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 571cb66c84aa8..a52b705cb8da6 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -292,6 +292,8 @@ bootstrap: file: ${BOOTSTRAP_POLICIES_FILE:classpath:boot/policies.json} # eg for local file # file: "file:///datahub/datahub-gms/resources/custom-policies.json" + ownershipTypes: + file: ${BOOTSTRAP_OWNERSHIP_TYPES_FILE:classpath:boot/ownership_types.json} servlets: waitTimeout: ${BOOTSTRAP_SERVLETS_WAITTIMEOUT:60} # Total waiting time in seconds for servlets to initialize diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java index 3a761bd12647e..c4e6c941303c8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java @@ -94,6 +94,9 @@ public class BootstrapManagerFactory { @Value("${bootstrap.policies.file}") private Resource _policiesResource; + @Value("${bootstrap.ownershipTypes.file}") + private Resource _ownershipTypesResource; + @Bean(name = "bootstrapManager") @Scope("singleton") @Nonnull @@ -116,7 +119,7 @@ protected BootstrapManager createInstance() { final IngestDefaultGlobalSettingsStep ingestSettingsStep = new IngestDefaultGlobalSettingsStep(_entityService); final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); - final IngestOwnershipTypesStep ingestOwnershipTypesStep = new IngestOwnershipTypesStep(_entityService); + final IngestOwnershipTypesStep ingestOwnershipTypesStep = new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); final List finalSteps = new ArrayList<>(ImmutableList.of( waitForSystemUpdateStep, diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java index 55d612618ff9f..6d64ceea32339 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java @@ -7,7 +7,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.boot.UpgradeStep; +import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.models.AspectSpec; @@ -16,9 +16,10 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.ownership.OwnershipTypeInfo; -import javax.annotation.Nonnull; + +import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; import java.util.List; @@ -33,17 +34,12 @@ * If not, it ingests the ownership type into DataHub. */ @Slf4j -public class IngestOwnershipTypesStep extends UpgradeStep { - - private static final String UPGRADE_ID = "ingest-default-metadata-ownership-types"; - private static final String VERSION = "1"; - private static final int SLEEP_SECONDS = 60; +@RequiredArgsConstructor +public class IngestOwnershipTypesStep implements BootstrapStep { private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); - - public IngestOwnershipTypesStep(EntityService entityService) { - super(entityService, VERSION, UPGRADE_ID); - } + private final EntityService _entityService; + private final Resource _ownershipTypesResource; @Override public String name() { @@ -51,24 +47,17 @@ public String name() { } @Override - public void upgrade() throws Exception { - log.info("Ingesting default ownership types..."); - - // Sleep to ensure deployment process finishes. - Thread.sleep(SLEEP_SECONDS * 1000); + public void execute() throws Exception { + log.info("Ingesting default ownership types from {}...", _ownershipTypesResource); // 1. Read from the file into JSON. - final JsonNode ownershipTypesObj = JSON_MAPPER.readTree(new ClassPathResource("./boot/ownership_types.json") - .getFile()); + final JsonNode ownershipTypesObj = JSON_MAPPER.readTree(_ownershipTypesResource.getFile()); if (!ownershipTypesObj.isArray()) { throw new RuntimeException(String.format("Found malformed ownership file, expected an Array but found %s", ownershipTypesObj.getNodeType())); } - final AspectSpec ownershipTypeInfoAspectSpec = _entityService.getEntityRegistry() - .getEntitySpec(OWNERSHIP_TYPE_ENTITY_NAME) - .getAspectSpec(OWNERSHIP_TYPE_INFO_ASPECT_NAME); final AuditStamp auditStamp = new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); @@ -79,14 +68,13 @@ public void upgrade() throws Exception { final OwnershipTypeInfo info = RecordUtils.toRecordTemplate(OwnershipTypeInfo.class, roleObj.get("info") .toString()); log.info(String.format("Ingesting default ownership type with urn %s", urn)); - ingestOwnershipType(urn, info, auditStamp, ownershipTypeInfoAspectSpec); + ingestOwnershipType(urn, info, auditStamp); numIngested++; } log.info("Ingested {} new ownership types", numIngested); } - private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipTypeInfo info, final AuditStamp auditStamp, - final AspectSpec ownershipTypeInfoAspectSpec) { + private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipTypeInfo info, final AuditStamp auditStamp) { // 3. Write key & aspect MCPs. final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); @@ -112,10 +100,4 @@ private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipType .mcps(List.of(keyAspectProposal, proposal), _entityService.getEntityRegistry()).build(), auditStamp, false); } - - @Nonnull - @Override - public ExecutionMode getExecutionMode() { - return ExecutionMode.ASYNC; - } } From ac7fa5624ffab4b369e66977e21cc430b468ce2d Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Tue, 28 Nov 2023 10:45:14 +0530 Subject: [PATCH 152/792] fix(sec): update the "json-schema" version from package.json to solve json-schema vulnerability (#9289) --- datahub-web-react/package.json | 3 ++- datahub-web-react/yarn.lock | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index f55588e46c9c7..62186125b4ad2 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -148,6 +148,7 @@ }, "resolutions": { "@ant-design/colors": "6.0.0", - "refractor": "3.3.1" + "refractor": "3.3.1", + "json-schema": "0.4.0" } } diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index fbc800c93c460..b9a6c62c88de3 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -10793,10 +10793,10 @@ json-schema-traverse@^1.0.0: resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== -json-schema@0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" - integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= +json-schema@0.2.3, json-schema@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== json-schema@^0.4.0: version "0.4.0" From ff9876f5a2e404dde6b5983fe772aecb9d030aed Mon Sep 17 00:00:00 2001 From: terratrue-daniel <97548386+terratrue-daniel@users.noreply.github.com> Date: Tue, 28 Nov 2023 11:43:52 -0800 Subject: [PATCH 153/792] fix(ingest/mssql): Add MONEY and SMALLMONEY data types as Number (#9313) --- .../ingestion/source/sql/mssql/source.py | 3 ++ .../golden_mces_mssql_no_db_to_file.json | 34 ++++++++++++++++--- .../integration/sql_server/setup/setup.sql | 4 +-- 3 files changed, 34 insertions(+), 7 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py index 710825c8ba55d..fa5310b1110e0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py @@ -48,6 +48,7 @@ ) from datahub.metadata.schema_classes import ( BooleanTypeClass, + NumberTypeClass, StringTypeClass, UnionTypeClass, ) @@ -55,6 +56,8 @@ logger: logging.Logger = logging.getLogger(__name__) register_custom_type(sqlalchemy.dialects.mssql.BIT, BooleanTypeClass) +register_custom_type(sqlalchemy.dialects.mssql.MONEY, NumberTypeClass) +register_custom_type(sqlalchemy.dialects.mssql.SMALLMONEY, NumberTypeClass) register_custom_type(sqlalchemy.dialects.mssql.SQL_VARIANT, UnionTypeClass) register_custom_type(sqlalchemy.dialects.mssql.UNIQUEIDENTIFIER, StringTypeClass) diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json index 2fe7a76fd01ae..66ef9b097c973 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json @@ -112,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", + "job_id": "3565ea3e-9a3a-4cb0-acd5-213d740479a0", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-10-27 10:11:55.540000", - "date_modified": "2023-10-27 10:11:55.667000", + "date_created": "2023-11-27 23:08:29.350000", + "date_modified": "2023-11-27 23:08:29.833000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-10-27 10:11:55.460000", - "date_modified": "2023-10-27 10:11:55.460000" + "date_created": "2023-11-27 23:08:29.077000", + "date_modified": "2023-11-27 23:08:29.077000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", @@ -3575,6 +3575,18 @@ "nativeDataType": "NVARCHAR()", "recursive": false, "isPartOfKey": false + }, + { + "fieldPath": "Price", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "MONEY()", + "recursive": false, + "isPartOfKey": false } ] } @@ -3816,6 +3828,18 @@ "nativeDataType": "NVARCHAR()", "recursive": false, "isPartOfKey": false + }, + { + "fieldPath": "Price", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "SMALLMONEY()", + "recursive": false, + "isPartOfKey": false } ] } diff --git a/metadata-ingestion/tests/integration/sql_server/setup/setup.sql b/metadata-ingestion/tests/integration/sql_server/setup/setup.sql index a17d52f9a39b1..77ecabc5a3fff 100644 --- a/metadata-ingestion/tests/integration/sql_server/setup/setup.sql +++ b/metadata-ingestion/tests/integration/sql_server/setup/setup.sql @@ -2,11 +2,11 @@ CREATE DATABASE NewData; GO USE NewData; GO -CREATE TABLE ProductsNew (ID int, ProductName nvarchar(max)); +CREATE TABLE ProductsNew (ID int, ProductName nvarchar(max), Price money); GO CREATE SCHEMA FooNew; GO -CREATE TABLE FooNew.ItemsNew (ID int, ItemName nvarchar(max)); +CREATE TABLE FooNew.ItemsNew (ID int, ItemName nvarchar(max), Price smallmoney); GO CREATE TABLE FooNew.PersonsNew ( ID int NOT NULL PRIMARY KEY, From 08fb730676dc5c807e43d8c8be4f8cab8ad830d0 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Wed, 29 Nov 2023 02:19:49 +0530 Subject: [PATCH 154/792] fix(ingest): drop deprecated database_alias from sql sources (#9299) Co-authored-by: Harshal Sheth --- docs/how/updating-datahub.md | 2 +- .../src/datahub/ingestion/source/metabase.py | 2 + .../ingestion/source/redshift/common.py | 12 - .../ingestion/source/redshift/config.py | 13 +- .../ingestion/source/redshift/lineage.py | 3 +- .../ingestion/source/redshift/redshift.py | 7 +- .../ingestion/source/redshift/usage.py | 4 - .../ingestion/source/sql/mssql/source.py | 7 +- .../src/datahub/ingestion/source/sql/mysql.py | 6 +- .../datahub/ingestion/source/sql/oracle.py | 2 - .../datahub/ingestion/source/sql/postgres.py | 3 - .../ingestion/source/sql/presto_on_hive.py | 2 - .../ingestion/source/sql/sql_config.py | 11 +- .../src/datahub/ingestion/source/sql/trino.py | 11 +- .../src/datahub/ingestion/source/superset.py | 2 + .../mysql/mysql_to_file_dbalias.yml | 1 - .../tests/integration/mysql/test_mysql.py | 24 - .../presto_on_hive_mces_golden_1.json | 317 ++++++++------ .../presto_on_hive_mces_golden_2.json | 292 +++++++------ .../presto_on_hive_mces_golden_3.json | 411 ++++++++++-------- .../presto_on_hive_mces_golden_4.json | 374 +++++++++------- .../presto_on_hive_mces_golden_5.json | 317 ++++++++------ .../presto-on-hive/presto_on_hive_to_file.yml | 1 - .../presto-on-hive/test_presto_on_hive.py | 4 +- .../integration/snowflake/test_snowflake.py | 1 + .../golden_mces_mssql_to_file.json | 56 +-- ...golden_mces_mssql_with_lower_case_urn.json | 56 +-- .../sql_server/source_files/mssql_to_file.yml | 1 - .../mssql_with_lower_case_urn.yml | 1 - .../tests/integration/trino/test_trino.py | 1 - .../tests/unit/test_postgres_source.py | 17 - 31 files changed, 1052 insertions(+), 909 deletions(-) delete mode 100644 metadata-ingestion/src/datahub/ingestion/source/redshift/common.py diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 21c4cef2e848b..3263a9f7c15fb 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -8,7 +8,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. - #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. - +- `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. ### Potential Downtime ### Deprecations diff --git a/metadata-ingestion/src/datahub/ingestion/source/metabase.py b/metadata-ingestion/src/datahub/ingestion/source/metabase.py index 24145d60210ff..9f09a4322bb5d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metabase.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metabase.py @@ -54,6 +54,8 @@ class MetabaseConfig(DatasetLineageProviderConfigBase): password: Optional[pydantic.SecretStr] = Field( default=None, description="Metabase password." ) + # TODO: Check and remove this if no longer needed. + # Config database_alias is removed from sql sources. database_alias_map: Optional[dict] = Field( default=None, description="Database name map to use when constructing dataset URN.", diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/common.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/common.py deleted file mode 100644 index 80657c69f88fa..0000000000000 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/common.py +++ /dev/null @@ -1,12 +0,0 @@ -from datahub.ingestion.source.redshift.config import RedshiftConfig - -redshift_datetime_format = "%Y-%m-%d %H:%M:%S" - - -def get_db_name(config: RedshiftConfig) -> str: - db_name = config.database - db_alias = config.database_alias - - db_name = db_alias or db_name - assert db_name is not None, "database name or alias must be specified" - return db_name diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 9cbf1823db939..95038ef2c6212 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -8,7 +8,7 @@ from datahub.configuration import ConfigModel from datahub.configuration.common import AllowDenyPattern from datahub.configuration.source_common import DatasetLineageProviderConfigBase -from datahub.configuration.validate_field_deprecation import pydantic_field_deprecated +from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.data_lake_common.path_spec import PathSpec from datahub.ingestion.source.sql.postgres import BasePostgresConfig from datahub.ingestion.source.state.stateful_ingestion_base import ( @@ -87,10 +87,7 @@ class RedshiftConfig( hidden_from_schema=True, ) - _database_alias_deprecation = pydantic_field_deprecated( - "database_alias", - message="database_alias is deprecated. Use platform_instance instead.", - ) + _database_alias_removed = pydantic_removed_field("database_alias") default_schema: str = Field( default="public", @@ -151,10 +148,8 @@ def check_email_is_set_on_usage(cls, values): return values @root_validator(skip_on_failure=True) - def check_database_or_database_alias_set(cls, values): - assert values.get("database") or values.get( - "database_alias" - ), "either database or database_alias must be set" + def check_database_is_set(cls, values): + assert values.get("database"), "database must be set" return values @root_validator(skip_on_failure=True) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py index c9ddfbe92ab2a..05011b2d7a769 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py @@ -16,7 +16,6 @@ from datahub.emitter.mce_builder import make_dataset_urn_with_platform_instance from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.source.aws.s3_util import strip_s3_prefix -from datahub.ingestion.source.redshift.common import get_db_name from datahub.ingestion.source.redshift.config import LineageMode, RedshiftConfig from datahub.ingestion.source.redshift.query import RedshiftQuery from datahub.ingestion.source.redshift.redshift_schema import ( @@ -266,7 +265,7 @@ def _populate_lineage_map( try: cll: Optional[List[sqlglot_l.ColumnLineageInfo]] = None raw_db_name = database - alias_db_name = get_db_name(self.config) + alias_db_name = self.config.database for lineage_row in RedshiftDataDictionary.get_lineage_rows( conn=connection, query=query diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py index c7d01021773b1..0b1bde6ca8c0a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py @@ -38,7 +38,6 @@ DatasetContainerSubTypes, DatasetSubTypes, ) -from datahub.ingestion.source.redshift.common import get_db_name from datahub.ingestion.source.redshift.config import RedshiftConfig from datahub.ingestion.source.redshift.lineage import RedshiftLineageExtractor from datahub.ingestion.source.redshift.profile import RedshiftProfiler @@ -393,8 +392,8 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: def get_workunits_internal(self) -> Iterable[Union[MetadataWorkUnit, SqlWorkUnit]]: connection = RedshiftSource.get_redshift_connection(self.config) - database = get_db_name(self.config) - logger.info(f"Processing db {self.config.database} with name {database}") + database = self.config.database + logger.info(f"Processing db {database}") self.report.report_ingestion_stage_start(METADATA_EXTRACTION) self.db_tables[database] = defaultdict() self.db_views[database] = defaultdict() @@ -628,7 +627,7 @@ def gen_view_dataset_workunits( ) -> Iterable[MetadataWorkUnit]: yield from self.gen_dataset_workunits( table=view, - database=get_db_name(self.config), + database=self.config.database, schema=schema, sub_type=DatasetSubTypes.VIEW, custom_properties={}, diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py index bbb1876102578..c789e605b9c29 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py @@ -359,10 +359,6 @@ def _gen_access_events_from_history_query( self.report.num_usage_stat_skipped += 1 continue - # Replace database name with the alias name if one is provided in the config. - if self.config.database_alias: - access_event.database = self.config.database_alias - if not self._should_process_event(access_event, all_tables=all_tables): self.report.num_usage_stat_skipped += 1 continue diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py index fa5310b1110e0..6eea5a4c31fa6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py @@ -138,7 +138,7 @@ def host(self): @property def db(self): - return self.database_alias or self.database + return self.database @platform_name("Microsoft SQL Server", id="mssql") @@ -660,10 +660,7 @@ def get_identifier( regular = f"{schema}.{entity}" qualified_table_name = regular if self.config.database: - if self.config.database_alias: - qualified_table_name = f"{self.config.database_alias}.{regular}" - else: - qualified_table_name = f"{self.config.database}.{regular}" + qualified_table_name = f"{self.config.database}.{regular}" if self.current_database: qualified_table_name = f"{self.current_database}.{regular}" return ( diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py index 891b64066721b..2126717f835a2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py @@ -54,11 +54,7 @@ class MySQLConnectionConfig(SQLAlchemyConnectionConfig): class MySQLConfig(MySQLConnectionConfig, TwoTierSQLAlchemyConfig): def get_identifier(self, *, schema: str, table: str) -> str: - regular = f"{schema}.{table}" - if self.database_alias: - return f"{self.database_alias}.{table}" - else: - return regular + return f"{schema}.{table}" @platform_name("MySQL") diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py b/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py index f2e1fe00ec8a3..7ee54200c6493 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py @@ -88,8 +88,6 @@ def get_sql_alchemy_url(self): def get_identifier(self, schema: str, table: str) -> str: regular = f"{schema}.{table}" if self.add_database_name_to_urn: - if self.database_alias: - return f"{self.database_alias}.{regular}" if self.database: return f"{self.database}.{regular}" return regular diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py b/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py index c8418075928ef..5d1e37fbb68a3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py @@ -139,7 +139,6 @@ class PostgresSource(SQLAlchemySource): - Metadata for databases, schemas, views, and tables - Column types associated with each table - Also supports PostGIS extensions - - database_alias (optional) can be used to change the name of database to be ingested - Table, row, and column statistics via optional SQL profiling """ @@ -271,8 +270,6 @@ def get_identifier( ) -> str: regular = f"{schema}.{entity}" if self.config.database: - if self.config.database_alias: - return f"{self.config.database_alias}.{regular}" return f"{self.config.database}.{regular}" current_database = self.get_db_name(inspector) return f"{current_database}.{regular}" diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/presto_on_hive.py b/metadata-ingestion/src/datahub/ingestion/source/sql/presto_on_hive.py index ceb9ecacb25d2..9657fdab9e2e3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/presto_on_hive.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/presto_on_hive.py @@ -329,8 +329,6 @@ def __init__(self, config: PrestoOnHiveConfig, ctx: PipelineContext) -> None: ) def get_db_name(self, inspector: Inspector) -> str: - if self.config.database_alias: - return f"{self.config.database_alias}" if self.config.database: return f"{self.config.database}" else: diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py index 6a76ae847218d..54edab6f3b84b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py @@ -11,7 +11,7 @@ DatasetSourceConfigMixin, LowerCaseDatasetUrnConfigMixin, ) -from datahub.configuration.validate_field_deprecation import pydantic_field_deprecated +from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.ge_profiling_config import GEProfilingConfig from datahub.ingestion.source.state.stale_entity_removal_handler import ( StatefulStaleMetadataRemovalConfig, @@ -129,10 +129,6 @@ class SQLAlchemyConnectionConfig(ConfigModel): host_port: str = Field(description="host URL") database: Optional[str] = Field(default=None, description="database (catalog)") - database_alias: Optional[str] = Field( - default=None, - description="[Deprecated] Alias to apply to database when ingesting.", - ) scheme: str = Field(description="scheme") sqlalchemy_uri: Optional[str] = Field( default=None, @@ -149,10 +145,7 @@ class SQLAlchemyConnectionConfig(ConfigModel): ), ) - _database_alias_deprecation = pydantic_field_deprecated( - "database_alias", - message="database_alias is deprecated. Use platform_instance instead.", - ) + _database_alias_removed = pydantic_removed_field("database_alias") def get_sql_alchemy_url( self, uri_opts: Optional[Dict[str, Any]] = None, database: Optional[str] = None diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py b/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py index 2b693d9d80d91..cb2e05765bfff 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py @@ -136,12 +136,9 @@ class TrinoConfig(BasicSQLAlchemyConfig): scheme: str = Field(default="trino", description="", hidden_from_docs=True) def get_identifier(self: BasicSQLAlchemyConfig, schema: str, table: str) -> str: - regular = f"{schema}.{table}" - identifier = regular - if self.database_alias: - identifier = f"{self.database_alias}.{regular}" - elif self.database: - identifier = f"{self.database}.{regular}" + identifier = f"{schema}.{table}" + if self.database: # TODO: this should be required field + identifier = f"{self.database}.{identifier}" return ( f"{self.platform_instance}.{identifier}" if self.platform_instance @@ -173,8 +170,6 @@ def __init__( super().__init__(config, ctx, platform) def get_db_name(self, inspector: Inspector) -> str: - if self.config.database_alias: - return f"{self.config.database_alias}" if self.config.database: return f"{self.config.database}" else: diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index 1ae971e4a82d0..7f607666db313 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -96,6 +96,8 @@ class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): default=DEFAULT_ENV, description="Environment to use in namespace when constructing URNs", ) + # TODO: Check and remove this if no longer needed. + # Config database_alias is removed from sql sources. database_alias: Dict[str, str] = Field( default={}, description="Can be used to change mapping for database names in superset to what you have in datahub", diff --git a/metadata-ingestion/tests/integration/mysql/mysql_to_file_dbalias.yml b/metadata-ingestion/tests/integration/mysql/mysql_to_file_dbalias.yml index 1c324641fe158..89b87505ab527 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_to_file_dbalias.yml +++ b/metadata-ingestion/tests/integration/mysql/mysql_to_file_dbalias.yml @@ -6,7 +6,6 @@ source: username: root password: example database: metagalaxy - database_alias: foogalaxy host_port: localhost:53307 schema_pattern: allow: diff --git a/metadata-ingestion/tests/integration/mysql/test_mysql.py b/metadata-ingestion/tests/integration/mysql/test_mysql.py index 8c8626a2d2297..23fd97ff2671e 100644 --- a/metadata-ingestion/tests/integration/mysql/test_mysql.py +++ b/metadata-ingestion/tests/integration/mysql/test_mysql.py @@ -75,27 +75,3 @@ def test_mysql_ingest_no_db( output_path=tmp_path / "mysql_mces.json", golden_path=test_resources_dir / golden_file, ) - - -@freeze_time(FROZEN_TIME) -@pytest.mark.integration -def test_mysql_ingest_with_db_alias( - mysql_runner, pytestconfig, test_resources_dir, tmp_path, mock_time -): - # Run the metadata ingestion pipeline. - config_file = (test_resources_dir / "mysql_to_file_dbalias.yml").resolve() - run_datahub_cmd(["ingest", "-c", f"{config_file}"], tmp_path=tmp_path) - - # Verify the output. - # Assert that all events generated have instance specific urns - import re - - urn_pattern = "^" + re.escape( - "urn:li:dataset:(urn:li:dataPlatform:mysql,foogalaxy." - ) - mce_helpers.assert_mcp_entity_urn( - filter="ALL", - entity_type="dataset", - regex_pattern=urn_pattern, - file=tmp_path / "mysql_mces_dbalias.json", - ) diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_1.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_1.json index 45d13229b2d85..5607075ed568f 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_1.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_1.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,42 +154,45 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "container": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -191,12 +202,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,7 +295,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -300,7 +313,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -312,19 +326,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -334,12 +349,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,7 +499,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -500,7 +517,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,19 +530,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,12 +553,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,7 +695,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -692,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -704,19 +726,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -726,12 +749,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395011", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,7 +878,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -871,7 +896,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -883,19 +909,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -905,12 +932,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1005,15 +1033,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,7 +1052,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1041,7 +1070,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1053,19 +1083,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1075,12 +1106,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1145,15 +1177,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,7 +1196,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1181,7 +1214,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1193,19 +1227,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1215,12 +1250,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1688395005", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-07-03", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,7 +1350,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1331,7 +1368,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1343,19 +1381,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1365,12 +1404,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1440,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1457,7 +1498,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1474,7 +1516,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1486,19 +1529,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,12 +1552,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1630,7 +1675,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1647,7 +1693,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1664,7 +1711,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1676,19 +1724,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_2.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_2.json index 4ec71eb8c39c6..45f78eb61c15b 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_2.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_2.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "presto-on-hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "presto-on-hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,42 +154,45 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "container": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -191,12 +202,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,7 +295,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -300,7 +313,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -312,19 +326,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -334,12 +349,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,7 +499,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -500,7 +517,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,19 +530,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,12 +553,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,7 +695,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -692,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -704,19 +726,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -726,12 +749,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395011", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,7 +878,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -871,7 +896,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -883,19 +909,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -905,12 +932,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1005,15 +1033,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,7 +1052,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1041,7 +1070,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1053,19 +1083,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1075,12 +1106,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1145,15 +1177,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,7 +1196,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1181,7 +1214,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1193,19 +1227,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1215,12 +1250,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1688395005", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-07-03", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,7 +1350,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1331,7 +1368,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1343,19 +1381,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1365,12 +1404,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1453,7 +1493,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1470,7 +1511,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1487,7 +1529,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1499,19 +1542,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_3.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_3.json index 824524782a8e3..ad1e46eb8fbb0 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_3.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_3.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,63 +154,67 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "container": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.map_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.map_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -211,7 +223,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.map_test", + "schemaName": "metastore.db1.map_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,12 +295,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -300,52 +313,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.union_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.union_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -354,7 +370,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.union_test", + "schemaName": "metastore.db1.union_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,12 +499,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -500,52 +517,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.nested_struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.nested_struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -554,7 +574,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.nested_struct_test", + "schemaName": "metastore.db1.nested_struct_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,12 +695,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -692,52 +713,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -746,7 +770,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test", + "schemaName": "metastore.db1.array_struct_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395011", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,12 +878,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -871,52 +896,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -925,7 +953,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.struct_test", + "schemaName": "metastore.db1.struct_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1005,15 +1033,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,12 +1052,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1041,52 +1070,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1._test_table_underscore,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1._test_table_underscore,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1095,7 +1127,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1._test_table_underscore", + "schemaName": "metastore.db1._test_table_underscore", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1145,15 +1177,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,12 +1196,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1181,52 +1214,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.pokes,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.pokes,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1235,7 +1271,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.pokes", + "schemaName": "metastore.db1.pokes", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1688395005", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-07-03", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,12 +1350,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1331,52 +1368,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1385,7 +1425,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test_presto_view", + "schemaName": "metastore.db1.array_struct_test_presto_view", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1440,12 +1480,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1457,12 +1498,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "viewProperties", "aspect": { @@ -1474,52 +1516,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1528,7 +1573,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test_view", + "schemaName": "metastore.db1.array_struct_test_view", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1630,12 +1675,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1647,12 +1693,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "changeType": "UPSERT", "aspectName": "viewProperties", "aspect": { @@ -1664,31 +1711,33 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_4.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_4.json index 3f2980457daa4..007f45238e23f 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_4.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_4.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "presto-on-hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "presto-on-hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,63 +154,67 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "container": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.map_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.map_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -211,7 +223,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.map_test", + "schemaName": "metastore.db1.map_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,12 +295,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -300,52 +313,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.union_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.union_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -354,7 +370,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.union_test", + "schemaName": "metastore.db1.union_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,12 +499,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -500,52 +517,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.nested_struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.nested_struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -554,7 +574,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.nested_struct_test", + "schemaName": "metastore.db1.nested_struct_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,12 +695,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -692,52 +713,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -746,7 +770,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test", + "schemaName": "metastore.db1.array_struct_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395011", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,12 +878,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -871,52 +896,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -925,7 +953,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.struct_test", + "schemaName": "metastore.db1.struct_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -1005,15 +1033,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,12 +1052,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1041,52 +1070,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1._test_table_underscore,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1._test_table_underscore,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1095,7 +1127,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1._test_table_underscore", + "schemaName": "metastore.db1._test_table_underscore", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -1145,15 +1177,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,12 +1196,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1181,52 +1214,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.pokes,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.pokes,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1235,7 +1271,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.pokes", + "schemaName": "metastore.db1.pokes", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1688395005", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-07-03", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,12 +1350,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1331,52 +1368,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1385,7 +1425,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test_presto_view", + "schemaName": "metastore.db1.array_struct_test_presto_view", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -1453,12 +1493,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1470,12 +1511,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "viewProperties", "aspect": { @@ -1487,31 +1529,33 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_5.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_5.json index a0dd4ab82bf24..111fc0038bdb8 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_5.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_5.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,42 +154,45 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "container": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -191,12 +202,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956983", + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,7 +295,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -300,7 +313,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -312,19 +326,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -334,12 +349,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956983", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,7 +499,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -500,7 +517,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,19 +530,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,12 +553,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956983", + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,7 +695,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -692,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -704,19 +726,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -726,12 +749,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "transient_lastDdlTime": "1690956980", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,7 +878,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -871,7 +896,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -883,19 +909,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -905,12 +932,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1005,7 +1033,7 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956977", + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "rawDataSize": "0", @@ -1013,7 +1041,7 @@ "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,7 +1052,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1041,7 +1070,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1053,19 +1083,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1075,12 +1106,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1145,7 +1177,7 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956977", + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "rawDataSize": "0", @@ -1153,7 +1185,7 @@ "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,7 +1196,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1181,7 +1214,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1193,19 +1227,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1215,12 +1250,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956974", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-08-02", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,7 +1350,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1331,7 +1368,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1343,19 +1381,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1365,12 +1404,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1440,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1457,7 +1498,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1474,7 +1516,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1486,19 +1529,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,12 +1552,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1630,7 +1675,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1647,7 +1693,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1664,7 +1711,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1676,19 +1724,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_to_file.yml b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_to_file.yml index d4df1364513c8..233fb7fa36057 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_to_file.yml +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_to_file.yml @@ -5,7 +5,6 @@ source: config: host_port: localhost:5432 database: metastore - database_alias: hive username: postgres scheme: "postgresql+psycopg2" diff --git a/metadata-ingestion/tests/integration/presto-on-hive/test_presto_on_hive.py b/metadata-ingestion/tests/integration/presto-on-hive/test_presto_on_hive.py index 31d801ccf7dee..23110ef12ae54 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/test_presto_on_hive.py +++ b/metadata-ingestion/tests/integration/presto-on-hive/test_presto_on_hive.py @@ -88,9 +88,8 @@ def test_presto_on_hive_ingest( "type": data_platform, "config": { "host_port": "localhost:5432", - "database": "db1", "metastore_db_name": "metastore", - "database_alias": "hive", + "database_pattern": {"allow": ["db1"]}, "username": "postgres", "scheme": "postgresql+psycopg2", "include_views": True, @@ -152,7 +151,6 @@ def test_presto_on_hive_instance_ingest( "config": { "host_port": "localhost:5432", "database": "metastore", - "database_alias": "hive", "username": "postgres", "scheme": "postgresql+psycopg2", "include_views": True, diff --git a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py index 4c00e48ede9fb..1b58696e4014c 100644 --- a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py +++ b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py @@ -142,6 +142,7 @@ def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph): type="datahub", config=datahub_classifier_config ) ], + max_workers=1, ), profiling=GEProfilingConfig( enabled=True, diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json index 804a8d74d0d51..9ce3664eff6a1 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json @@ -112,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", + "job_id": "3b767c17-c921-4331-93d9-eb0e006045a4", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-10-27 10:11:55.540000", - "date_modified": "2023-10-27 10:11:55.667000", + "date_created": "2023-11-23 11:04:47.927000", + "date_modified": "2023-11-23 11:04:48.090000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -1245,7 +1245,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.dbo.Products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1262,7 +1262,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.dbo.Products,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1278,7 +1278,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "DemoDataAlias.dbo.Products", + "schemaName": "DemoData.dbo.Products", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1334,7 +1334,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.dbo.Products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1352,7 +1352,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.dbo.Products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1486,7 +1486,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1503,7 +1503,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Items,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1520,7 +1520,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "DemoDataAlias.Foo.Items", + "schemaName": "DemoData.Foo.Items", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1576,7 +1576,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1594,7 +1594,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1619,7 +1619,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1636,7 +1636,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1652,7 +1652,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "DemoDataAlias.Foo.Persons", + "schemaName": "DemoData.Foo.Persons", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1733,7 +1733,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1751,7 +1751,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1776,7 +1776,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1793,7 +1793,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1809,7 +1809,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "DemoDataAlias.Foo.SalesReason", + "schemaName": "DemoData.Foo.SalesReason", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1868,12 +1868,12 @@ { "name": "FK_TempSales_SalesReason", "foreignFields": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD),ID)" + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD),ID)" ], "sourceFields": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD),TempID)" + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD),TempID)" ], - "foreignDataset": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)" + "foreignDataset": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)" } ] } @@ -1889,7 +1889,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1907,7 +1907,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-10-27 10:11:55.460000", - "date_modified": "2023-10-27 10:11:55.460000" + "date_created": "2023-11-23 11:04:47.857000", + "date_modified": "2023-11-23 11:04:47.857000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json index 9d1b288057a16..037a341b7d66e 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json @@ -112,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", + "job_id": "3b767c17-c921-4331-93d9-eb0e006045a4", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-10-27 10:11:55.540000", - "date_modified": "2023-10-27 10:11:55.667000", + "date_created": "2023-11-23 11:04:47.927000", + "date_modified": "2023-11-23 11:04:48.090000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -1245,7 +1245,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.dbo.products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1262,7 +1262,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.dbo.products,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1278,7 +1278,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "demodataalias.dbo.products", + "schemaName": "demodata.dbo.products", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1334,7 +1334,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.dbo.products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1352,7 +1352,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.dbo.products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1486,7 +1486,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1503,7 +1503,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.items,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1520,7 +1520,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "demodataalias.foo.items", + "schemaName": "demodata.foo.items", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1576,7 +1576,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1594,7 +1594,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1619,7 +1619,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1636,7 +1636,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1652,7 +1652,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "demodataalias.foo.persons", + "schemaName": "demodata.foo.persons", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1733,7 +1733,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1751,7 +1751,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1776,7 +1776,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1793,7 +1793,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1809,7 +1809,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "demodataalias.foo.salesreason", + "schemaName": "demodata.foo.salesreason", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1868,12 +1868,12 @@ { "name": "FK_TempSales_SalesReason", "foreignFields": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD),ID)" + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD),ID)" ], "sourceFields": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD),TempID)" + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD),TempID)" ], - "foreignDataset": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)" + "foreignDataset": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)" } ] } @@ -1889,7 +1889,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1907,7 +1907,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-10-27 10:11:55.460000", - "date_modified": "2023-10-27 10:11:55.460000" + "date_created": "2023-11-23 11:04:47.857000", + "date_modified": "2023-11-23 11:04:47.857000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", diff --git a/metadata-ingestion/tests/integration/sql_server/source_files/mssql_to_file.yml b/metadata-ingestion/tests/integration/sql_server/source_files/mssql_to_file.yml index d347422353d47..c53e3cf6b8045 100644 --- a/metadata-ingestion/tests/integration/sql_server/source_files/mssql_to_file.yml +++ b/metadata-ingestion/tests/integration/sql_server/source_files/mssql_to_file.yml @@ -7,7 +7,6 @@ source: password: test!Password database: DemoData host_port: localhost:51433 - database_alias: DemoDataAlias # use_odbc: True # uri_args: # driver: "ODBC Driver 17 for SQL Server" diff --git a/metadata-ingestion/tests/integration/sql_server/source_files/mssql_with_lower_case_urn.yml b/metadata-ingestion/tests/integration/sql_server/source_files/mssql_with_lower_case_urn.yml index 8d17c49163ca1..4e96d137670ba 100644 --- a/metadata-ingestion/tests/integration/sql_server/source_files/mssql_with_lower_case_urn.yml +++ b/metadata-ingestion/tests/integration/sql_server/source_files/mssql_with_lower_case_urn.yml @@ -7,7 +7,6 @@ source: password: test!Password database: DemoData host_port: localhost:51433 - database_alias: DemoDataAlias convert_urns_to_lowercase: true # use_odbc: True # uri_args: diff --git a/metadata-ingestion/tests/integration/trino/test_trino.py b/metadata-ingestion/tests/integration/trino/test_trino.py index 177c273c0d242..8ab3ed8056e90 100644 --- a/metadata-ingestion/tests/integration/trino/test_trino.py +++ b/metadata-ingestion/tests/integration/trino/test_trino.py @@ -70,7 +70,6 @@ def test_trino_ingest( "config": TrinoConfig( host_port="localhost:5300", database="postgresqldb", - database_alias="library_catalog", username="foo", schema_pattern=AllowDenyPattern(allow=["^librarydb"]), profile_pattern=AllowDenyPattern( diff --git a/metadata-ingestion/tests/unit/test_postgres_source.py b/metadata-ingestion/tests/unit/test_postgres_source.py index fac491cbaea04..91a62b603bb58 100644 --- a/metadata-ingestion/tests/unit/test_postgres_source.py +++ b/metadata-ingestion/tests/unit/test_postgres_source.py @@ -65,23 +65,6 @@ def tests_get_inspectors_with_sqlalchemy_uri_provided(create_engine_mock): assert create_engine_mock.call_args_list[0][0][0] == "custom_url" -def test_database_alias_takes_precendence(): - config = PostgresConfig.parse_obj( - { - **_base_config(), - "database_alias": "ops_database", - "database": "postgres", - } - ) - mock_inspector = mock.MagicMock() - assert ( - PostgresSource(config, PipelineContext(run_id="test")).get_identifier( - schema="superset", entity="logs", inspector=mock_inspector - ) - == "ops_database.superset.logs" - ) - - def test_database_in_identifier(): config = PostgresConfig.parse_obj({**_base_config(), "database": "postgres"}) mock_inspector = mock.MagicMock() From 966cb175f7d826ee9331cea652164249c9cf6bfb Mon Sep 17 00:00:00 2001 From: Hendrik Richert Date: Tue, 28 Nov 2023 21:52:11 +0100 Subject: [PATCH 155/792] feat(dev): Make repositories configurable for enterprise developers (#9230) Co-authored-by: Hendrik Richert Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- datahub-frontend/build.gradle | 18 +++++++++- datahub-upgrade/build.gradle | 16 +++++++++ docker/datahub-frontend/Dockerfile | 16 ++++++--- docker/datahub-gms/Dockerfile | 35 +++++++++++++++---- docker/datahub-ingestion-base/Dockerfile | 24 +++++++++++-- docker/datahub-ingestion-base/build.gradle | 21 +++++++++-- docker/datahub-ingestion/Dockerfile | 9 +++++ docker/datahub-ingestion/Dockerfile-slim-only | 5 +++ docker/datahub-ingestion/build.gradle | 16 +++++++-- docker/datahub-mae-consumer/Dockerfile | 27 +++++++++++--- docker/datahub-mce-consumer/Dockerfile | 27 +++++++++++--- docker/datahub-upgrade/Dockerfile | 31 ++++++++++++---- docker/elasticsearch-setup/Dockerfile | 14 ++++++++ docker/elasticsearch-setup/build.gradle | 12 ++++++- docker/kafka-setup/Dockerfile | 25 +++++++++---- docker/kafka-setup/build.gradle | 19 ++++++++++ docker/mysql-setup/Dockerfile | 13 +++++++ docker/mysql-setup/build.gradle | 10 ++++++ docker/postgres-setup/Dockerfile | 13 +++++++ docker/postgres-setup/build.gradle | 10 ++++++ .../custom-test-model/build.gradle | 6 +++- .../datahub-protobuf-example/build.gradle | 6 +++- metadata-jobs/mae-consumer-job/build.gradle | 18 +++++++++- metadata-jobs/mce-consumer-job/build.gradle | 18 +++++++++- metadata-models-custom/build.gradle | 6 +++- metadata-service/war/build.gradle | 16 +++++++++ repositories.gradle | 30 ++++++++++++---- 27 files changed, 409 insertions(+), 52 deletions(-) diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index eb81b31745536..9a5fb3210a311 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -89,6 +89,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } task unversionZip(type: Copy, dependsOn: [':datahub-web-react:build', dist]) { @@ -104,4 +120,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 3356445cda7e1..71baa8af99468 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -108,6 +108,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":datahub-upgrade:docker").dependsOn([bootJar]) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index aaace5ae38ca3..9c26d73f4f40b 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -3,14 +3,22 @@ ARG APP_ENV=prod FROM alpine:3 AS base +# Configurable repositories +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + RUN addgroup -S datahub && adduser -S datahub -G datahub +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # Upgrade Alpine and base packages # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ ENV LD_LIBRARY_PATH="/lib:/lib64" @@ -22,8 +30,8 @@ COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/ RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend ENV JMX_VERSION=0.18.0 -RUN wget https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ - && wget https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar +RUN wget ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ + && wget ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar FROM base as dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index c5696bbd2d1d2..1e13fa492c7f0 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -1,11 +1,23 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + +FROM golang:1-alpine3.18 AS binary FROM golang:1-alpine3.18 AS binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -16,16 +28,25 @@ FROM alpine:3 AS base # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 + +# Re-declaring args from above to make them available in this stage (will inherit default values) +ARG ALPINE_REPO_URL +ARG GITHUB_REPO_URL +ARG MAVEN_CENTRAL_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ - && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ - && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ + && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ + && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index 25afe9b8b3dce..e0f9fdc997071 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -1,11 +1,23 @@ ARG APP_ENV=full ARG BASE_IMAGE=base +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG DEBIAN_REPO_URL=http://deb.debian.org/debian +ARG PIP_MIRROR_URL=null + FROM golang:1-alpine3.18 AS dockerize-binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -14,11 +26,19 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM python:3.10 as base +ARG DEBIAN_REPO_URL +ARG PIP_MIRROR_URL +ARG GITHUB_REPO_URL + ENV LIBRDKAFKA_VERSION=1.6.2 ENV CONFLUENT_KAFKA_VERSION=1.6.1 ENV DEBIAN_FRONTEND noninteractive +# Optionally set corporate mirror for apk and pip +RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i "s#http.*://deb.debian.org/debian#${DEBIAN_REPO_URL}#g" /etc/apt/sources.list.d/debian.sources ; fi +RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi + RUN apt-get update && apt-get install -y -qq \ make \ python3-ldap \ @@ -33,7 +53,7 @@ RUN apt-get update && apt-get install -y -qq \ unzip \ ldap-utils \ && python -m pip install --no-cache --upgrade pip wheel setuptools \ - && wget -q https://github.com/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz -O - | \ + && wget -q ${GITHUB_REPO_URL}/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz -O - | \ tar -xz -C /root \ && cd /root/librdkafka-${LIBRDKAFKA_VERSION} \ && ./configure --prefix /usr && make && make install && cd .. && rm -rf /root/librdkafka-${LIBRDKAFKA_VERSION} \ @@ -84,4 +104,4 @@ FROM ${BASE_IMAGE} as slim-install FROM ${APP_ENV}-install USER datahub -ENV PATH="/datahub-ingestion/.local/bin:$PATH" \ No newline at end of file +ENV PATH="/datahub-ingestion/.local/bin:$PATH" diff --git a/docker/datahub-ingestion-base/build.gradle b/docker/datahub-ingestion-base/build.gradle index c4d8a962dcd32..e0168290c48f8 100644 --- a/docker/datahub-ingestion-base/build.gradle +++ b/docker/datahub-ingestion-base/build.gradle @@ -25,7 +25,24 @@ docker { }.exclude { i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } - buildArgs([APP_ENV: docker_target]) + + def dockerBuildArgs = [APP_ENV: docker_target] + + // Add build args if they are defined (needed for some CI or enterprise environments) + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('debianAptRepositoryUrl')) { + dockerBuildArgs.DEBIAN_REPO_URL = project.getProperty('debianAptRepositoryUrl') + } + if (project.hasProperty('pipMirrorUrl')) { + dockerBuildArgs.PIP_MIRROR_URL = project.getProperty('pipMirrorUrl') + } + + buildArgs(dockerBuildArgs) } tasks.getByName('docker').dependsOn('build') @@ -42,4 +59,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 1aee79a428a98..9516c31a19e21 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -2,6 +2,8 @@ ARG APP_ENV=full ARG BASE_IMAGE=acryldata/datahub-ingestion-base ARG DOCKER_VERSION=head +ARG PIP_MIRROR_URL=null +ARG DEBIAN_REPO_URL=http://deb.debian.org/debian FROM $BASE_IMAGE:$DOCKER_VERSION as base USER 0 @@ -20,16 +22,23 @@ USER datahub ENV PATH="/datahub-ingestion/.local/bin:$PATH" FROM base as slim-install +ARG PIP_MIRROR_URL + +RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi RUN pip install --no-cache --user ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]" FROM base as full-install-build +ARG PIP_MIRROR_URL +ARG DEBIAN_REPO_URL USER 0 +RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i "s#http.*://deb.debian.org/debian#${DEBIAN_REPO_URL}#g" /etc/apt/sources.list.d/debian.sources ; fi RUN apt-get update && apt-get install -y -qq maven USER datahub COPY ./docker/datahub-ingestion/pyspark_jars.sh . +RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi RUN pip install --no-cache --user ".[base]" && \ pip install --no-cache --user "./airflow-plugin[acryl-datahub-airflow-plugin]" && \ pip install --no-cache --user ".[all]" diff --git a/docker/datahub-ingestion/Dockerfile-slim-only b/docker/datahub-ingestion/Dockerfile-slim-only index cb8c27ab463c4..4112f470c25be 100644 --- a/docker/datahub-ingestion/Dockerfile-slim-only +++ b/docker/datahub-ingestion/Dockerfile-slim-only @@ -1,6 +1,7 @@ # Defining environment ARG BASE_IMAGE=acryldata/datahub-ingestion-base ARG DOCKER_VERSION=head-slim +ARG PIP_MIRROR_URL=null FROM $BASE_IMAGE:$DOCKER_VERSION as base USER 0 @@ -17,6 +18,10 @@ USER datahub ENV PATH="/datahub-ingestion/.local/bin:$PATH" FROM base as slim-install + +ARG PIP_MIRROR_URL + +RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi RUN pip install --no-cache --user ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]" FROM slim-install as final diff --git a/docker/datahub-ingestion/build.gradle b/docker/datahub-ingestion/build.gradle index 247b896d6955c..52db594e2ef85 100644 --- a/docker/datahub-ingestion/build.gradle +++ b/docker/datahub-ingestion/build.gradle @@ -32,8 +32,18 @@ docker { }.exclude { i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } - buildArgs([DOCKER_VERSION: version, - RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", '')]) + + def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", '')] + + // Add build args if they are defined (needed for some CI or enterprise environments) + if (project.hasProperty('pipMirrorUrl')) { + dockerBuildArgs.PIP_MIRROR_URL = project.getProperty('pipMirrorUrl') + } + if (project.hasProperty('debianAptRepositoryUrl')) { + dockerBuildArgs.DEBIAN_REPO_URL = project.getProperty('debianAptRepositoryUrl') + } + + buildArgs(dockerBuildArgs) } tasks.getByName('docker').dependsOn(['build', ':docker:datahub-ingestion-base:docker', @@ -51,4 +61,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 07af7c66a7783..3bacd3b2dc81a 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -1,11 +1,22 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + FROM golang:1-alpine3.18 AS binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -14,15 +25,23 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 AS base +# Re-declaring args from above to make them available in this stage (will inherit default values) +ARG ALPINE_REPO_URL +ARG GITHUB_REPO_URL +ARG MAVEN_CENTRAL_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ - && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ - && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ + && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index 97861d6be3141..bb22ab82f4402 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -1,11 +1,22 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + FROM golang:1-alpine3.18 AS binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -14,15 +25,23 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 AS base +# Re-declaring args from above to make them available in this stage (will inherit default values) +ARG ALPINE_REPO_URL +ARG GITHUB_REPO_URL +ARG MAVEN_CENTRAL_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ - && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ - && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ + && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index fa8e65009662b..551d61f41b979 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -1,11 +1,22 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + FROM golang:1-alpine3.18 AS binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -14,17 +25,25 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 AS base +# Re-declaring args from above to make them available in this stage (will inherit default values) +ARG ALPINE_REPO_URL +ARG GITHUB_REPO_URL +ARG MAVEN_CENTRAL_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ - && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ - && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ + && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ + && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/elasticsearch-setup/Dockerfile b/docker/elasticsearch-setup/Dockerfile index c8fb2eba911b8..f4dd1cb9b018e 100644 --- a/docker/elasticsearch-setup/Dockerfile +++ b/docker/elasticsearch-setup/Dockerfile @@ -3,11 +3,19 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine + FROM golang:1-alpine3.18 AS binary +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update add openssl git tar curl sqlite @@ -16,6 +24,12 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 AS base + +ARG ALPINE_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk add --no-cache curl jq bash coreutils COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/elasticsearch-setup/build.gradle b/docker/elasticsearch-setup/build.gradle index ac935ca42fd12..f9dff3032b56d 100644 --- a/docker/elasticsearch-setup/build.gradle +++ b/docker/elasticsearch-setup/build.gradle @@ -27,6 +27,16 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByName('docker').dependsOn('build') @@ -42,4 +52,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index e7f084739a576..f6a4b62a79356 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -1,28 +1,41 @@ ARG KAFKA_DOCKER_VERSION=7.4.1 +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 +ARG APACHE_DOWNLOAD_URL=null + # Using as a base image because to get the needed jars for confluent utils FROM confluentinc/cp-base-new:$KAFKA_DOCKER_VERSION as confluent_base -ARG MAVEN_REPO="https://repo1.maven.org/maven2" +ARG MAVEN_CENTRAL_REPO_URL ARG SNAKEYAML_VERSION="2.0" RUN rm /usr/share/java/cp-base-new/snakeyaml-*.jar \ - && wget -P /usr/share/java/cp-base-new $MAVEN_REPO/org/yaml/snakeyaml/$SNAKEYAML_VERSION/snakeyaml-$SNAKEYAML_VERSION.jar + && wget -P /usr/share/java/cp-base-new $MAVEN_CENTRAL_REPO_URL/org/yaml/snakeyaml/$SNAKEYAML_VERSION/snakeyaml-$SNAKEYAML_VERSION.jar # Based on https://github.com/blacktop's alpine kafka build FROM python:3-alpine +ARG ALPINE_REPO_URL +ARG APACHE_DOWNLOAD_URL +ARG GITHUB_REPO_URL + ENV KAFKA_VERSION 3.4.1 ENV SCALA_VERSION 2.13 LABEL name="kafka" version=${KAFKA_VERSION} +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk add --no-cache bash coreutils -RUN apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community +RUN apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community RUN apk add --no-cache -t .build-deps git curl ca-certificates jq gcc musl-dev libffi-dev zip RUN mkdir -p /opt \ - && mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred') \ + && if [ "${APACHE_DOWNLOAD_URL}" != "null" ] ; then mirror="${APACHE_DOWNLOAD_URL}/" ; else mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred'); fi \ && curl -sSL "${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" \ | tar -xzf - -C /opt \ && mv /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION} /opt/kafka \ @@ -39,8 +52,8 @@ RUN ls -la COPY --from=confluent_base /usr/share/java/cp-base-new/ /usr/share/java/cp-base-new/ COPY --from=confluent_base /etc/cp-base-new/log4j.properties /etc/cp-base-new/log4j.properties -ADD --chown=kafka:kafka https://github.com/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /usr/share/java/cp-base-new -ADD --chown=kafka:kafka https://github.com/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /opt/kafka/libs +ADD --chown=kafka:kafka ${GITHUB_REPO_URL}/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /usr/share/java/cp-base-new +ADD --chown=kafka:kafka ${GITHUB_REPO_URL}/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /opt/kafka/libs ENV METADATA_AUDIT_EVENT_NAME="MetadataAuditEvent_v4" ENV METADATA_CHANGE_EVENT_NAME="MetadataChangeEvent_v4" diff --git a/docker/kafka-setup/build.gradle b/docker/kafka-setup/build.gradle index 25f9847190de3..d7bc5c2d7d13f 100644 --- a/docker/kafka-setup/build.gradle +++ b/docker/kafka-setup/build.gradle @@ -26,6 +26,25 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + if (project.hasProperty('apacheDownloadUrl')) { + dockerBuildArgs.APACHE_DOWNLOAD_URL = project.getProperty('apacheDownloadUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByName('docker').dependsOn('build') diff --git a/docker/mysql-setup/Dockerfile b/docker/mysql-setup/Dockerfile index 56bab61180489..8b7ca704c32cd 100644 --- a/docker/mysql-setup/Dockerfile +++ b/docker/mysql-setup/Dockerfile @@ -1,8 +1,16 @@ +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine + FROM golang:1-alpine3.18 AS binary +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -12,6 +20,11 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 COPY --from=binary /go/bin/dockerize /usr/local/bin +ARG ALPINE_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk add --no-cache mysql-client bash mariadb-connector-c sqlite diff --git a/docker/mysql-setup/build.gradle b/docker/mysql-setup/build.gradle index 1598866914c0e..5c70a2f0d9a2d 100644 --- a/docker/mysql-setup/build.gradle +++ b/docker/mysql-setup/build.gradle @@ -27,6 +27,16 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByName('docker').dependsOn('build') diff --git a/docker/postgres-setup/Dockerfile b/docker/postgres-setup/Dockerfile index 7f4d53ae044d4..e10f70571501e 100644 --- a/docker/postgres-setup/Dockerfile +++ b/docker/postgres-setup/Dockerfile @@ -1,8 +1,16 @@ +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine + FROM golang:1-alpine3.18 AS binary +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -12,6 +20,11 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 COPY --from=binary /go/bin/dockerize /usr/local/bin +ARG ALPINE_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk add --no-cache postgresql-client sqlite diff --git a/docker/postgres-setup/build.gradle b/docker/postgres-setup/build.gradle index e24e206c99145..5c42a002f45be 100644 --- a/docker/postgres-setup/build.gradle +++ b/docker/postgres-setup/build.gradle @@ -27,6 +27,16 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByName('docker').dependsOn('build') diff --git a/entity-registry/custom-test-model/build.gradle b/entity-registry/custom-test-model/build.gradle index 778e2e42b95c4..8e17de0709188 100644 --- a/entity-registry/custom-test-model/build.gradle +++ b/entity-registry/custom-test-model/build.gradle @@ -2,7 +2,11 @@ import org.yaml.snakeyaml.Yaml buildscript { repositories{ - mavenCentral() + if (project.hasProperty('apacheMavenRepositoryUrl')) { + maven { url project.getProperty('apacheMavenRepositoryUrl') } + } else { + mavenCentral() + } } dependencies { classpath("org.yaml:snakeyaml:1.33") diff --git a/metadata-integration/java/datahub-protobuf-example/build.gradle b/metadata-integration/java/datahub-protobuf-example/build.gradle index 71cbb67061887..4e53d8ed763ba 100644 --- a/metadata-integration/java/datahub-protobuf-example/build.gradle +++ b/metadata-integration/java/datahub-protobuf-example/build.gradle @@ -4,7 +4,11 @@ plugins { } repositories { - mavenCentral() + if (project.hasProperty('apacheMavenRepositoryUrl')) { + maven { url project.getProperty('apacheMavenRepositoryUrl') } + } else { + mavenCentral() + } mavenLocal() } diff --git a/metadata-jobs/mae-consumer-job/build.gradle b/metadata-jobs/mae-consumer-job/build.gradle index 5e735e118493c..a8920d50b068e 100644 --- a/metadata-jobs/mae-consumer-job/build.gradle +++ b/metadata-jobs/mae-consumer-job/build.gradle @@ -58,6 +58,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":metadata-jobs:mae-consumer-job:docker").dependsOn([bootJar]) @@ -66,4 +82,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/metadata-jobs/mce-consumer-job/build.gradle b/metadata-jobs/mce-consumer-job/build.gradle index ef042188bc3d8..2f60d1ae985fb 100644 --- a/metadata-jobs/mce-consumer-job/build.gradle +++ b/metadata-jobs/mce-consumer-job/build.gradle @@ -69,6 +69,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":metadata-jobs:mce-consumer-job:docker").dependsOn([bootJar]) @@ -77,4 +93,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/metadata-models-custom/build.gradle b/metadata-models-custom/build.gradle index 95a00766039a8..71d3b0fd1f736 100644 --- a/metadata-models-custom/build.gradle +++ b/metadata-models-custom/build.gradle @@ -2,7 +2,11 @@ import org.yaml.snakeyaml.Yaml buildscript { repositories{ - mavenCentral() + if (project.hasProperty('apacheMavenRepositoryUrl')) { + maven { url project.getProperty('apacheMavenRepositoryUrl') } + } else { + mavenCentral() + } } dependencies { classpath("org.yaml:snakeyaml:1.33") diff --git a/metadata-service/war/build.gradle b/metadata-service/war/build.gradle index 35730ad6dfa9f..fc29b0bb46092 100644 --- a/metadata-service/war/build.gradle +++ b/metadata-service/war/build.gradle @@ -83,6 +83,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":metadata-service:war:docker").dependsOn([build, war]) diff --git a/repositories.gradle b/repositories.gradle index 69eaea6ca12bc..d82563c2659a0 100644 --- a/repositories.gradle +++ b/repositories.gradle @@ -1,15 +1,31 @@ repositories { gradlePluginPortal() mavenLocal() - mavenCentral() - maven { - url "https://packages.confluent.io/maven/" + + if (project.hasProperty('apacheMavenRepositoryUrl')) { + maven { url project.getProperty('apacheMavenRepositoryUrl') } + } else { + mavenCentral() } - maven { - url "https://plugins.gradle.org/m2/" + + if (project.hasProperty('confluentMavenRepositoryUrl')) { + maven { + url project.getProperty('confluentMavenRepositoryUrl') + } + } else { + maven { + url "https://packages.confluent.io/maven/" + } } - maven { - url "https://linkedin.jfrog.io/artifactory/open-source/" // GMA, pegasus + + if (project.hasProperty('linkedinOpenSourceRepositoryUrl')) { + maven { + url project.getProperty('linkedinOpenSourceRepositoryUrl') + } + } else { + maven { + url "https://linkedin.jfrog.io/artifactory/open-source/" // GMA, pegasus + } } } From 3d7962cf170632911467c280cb1fb173330e2568 Mon Sep 17 00:00:00 2001 From: Adriano Vega Llobell Date: Tue, 28 Nov 2023 23:58:42 +0100 Subject: [PATCH 156/792] fix(ingest/sql): improve handling of views with dots in their names (#9183) --- .../src/datahub/ingestion/source/sql/sql_common.py | 2 +- metadata-ingestion/tests/unit/test_sql_common.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 80f828e9ea2fd..67af6b2010c83 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -1054,7 +1054,7 @@ def _run_sql_parser( return view_definition_lineage_helper(raw_lineage, view_urn) def get_db_schema(self, dataset_identifier: str) -> Tuple[Optional[str], str]: - database, schema, _view = dataset_identifier.split(".") + database, schema, _view = dataset_identifier.split(".", 2) return database, schema def get_profiler_instance(self, inspector: Inspector) -> "DatahubGEProfiler": diff --git a/metadata-ingestion/tests/unit/test_sql_common.py b/metadata-ingestion/tests/unit/test_sql_common.py index 808b38192411d..e23d290b611f4 100644 --- a/metadata-ingestion/tests/unit/test_sql_common.py +++ b/metadata-ingestion/tests/unit/test_sql_common.py @@ -102,3 +102,17 @@ def test_use_source_schema_for_foreign_key_if_not_specified(): def test_get_platform_from_sqlalchemy_uri(uri: str, expected_platform: str) -> None: platform: str = get_platform_from_sqlalchemy_uri(uri) assert platform == expected_platform + + +def test_get_db_schema_with_dots_in_view_name(): + config: SQLCommonConfig = _TestSQLAlchemyConfig() + ctx: PipelineContext = PipelineContext(run_id="test_ctx") + platform: str = "TEST" + source = _TestSQLAlchemySource(config=config, ctx=ctx, platform=platform) + + database, schema = source.get_db_schema( + dataset_identifier="database.schema.long.view.name1" + ) + + assert database == "database" + assert schema == "schema" From 2031bd4de12d0e42974fb46e1839145dd86cb40e Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 28 Nov 2023 18:31:56 -0500 Subject: [PATCH 157/792] docs(ingest): update docs on adding stateful ingestion (#9327) --- .../add_stateful_ingestion_to_source.md | 197 ++++++------------ 1 file changed, 66 insertions(+), 131 deletions(-) diff --git a/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source.md b/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source.md index 9e39d24fb8578..a152697988c6f 100644 --- a/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source.md +++ b/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source.md @@ -5,160 +5,75 @@ the [Redunant Run Elimination](./stateful.md#redundant-run-elimination) use-case capability available for the sources. This document describes how to add support for these two use-cases to new sources. ## Adding Stale Metadata Removal to a Source -Adding the stale metadata removal use-case to a new source involves -1. Defining the new checkpoint state that stores the list of entities emitted from a specific ingestion run. -2. Modifying the `SourceConfig` associated with the source to use a custom `stateful_ingestion` config param. -3. Modifying the `SourceReport` associated with the source to include soft-deleted entities in the report. -4. Modifying the `Source` to - 1. Instantiate the StaleEntityRemovalHandler object - 2. Add entities from the current run to the state object - 3. Emit stale metadata removal workunits + +Adding the stale metadata removal use-case to a new source involves modifying the source config, source report, and the source itself. + +For a full example of all changes required: [Adding stale metadata removal to the MongoDB source](https://github.com/datahub-project/datahub/pull/9118). The [datahub.ingestion.source.state.stale_entity_removal_handler](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/stale_entity_removal_handler.py) module provides the supporting infrastructure for all the steps described above and substantially simplifies the implementation on the source side. Below is a detailed explanation of each of these steps along with examples. -### 1. Defining the checkpoint state for the source. -The checkpoint state class is responsible for tracking the entities emitted from each ingestion run. If none of the existing states do not meet the needs of the new source, a new checkpoint state must be created. The state must -inherit from the `StaleEntityCheckpointStateBase` abstract class shown below, and implement each of the abstract methods. -```python -class StaleEntityCheckpointStateBase(CheckpointStateBase, ABC, Generic[Derived]): - """ - Defines the abstract interface for the checkpoint states that are used for stale entity removal. - Examples include sql_common state for tracking table and & view urns, - dbt that tracks node & assertion urns, kafka state tracking topic urns. - """ - - @classmethod - @abstractmethod - def get_supported_types(cls) -> List[str]: - pass - - @abstractmethod - def add_checkpoint_urn(self, type: str, urn: str) -> None: - """ - Adds an urn into the list used for tracking the type. - :param type: The type of the urn such as a 'table', 'view', - 'node', 'topic', 'assertion' that the concrete sub-class understands. - :param urn: The urn string - :return: None. - """ - pass - - @abstractmethod - def get_urns_not_in( - self, type: str, other_checkpoint_state: Derived - ) -> Iterable[str]: - """ - Gets the urns present in this checkpoint but not the other_checkpoint for the given type. - :param type: The type of the urn such as a 'table', 'view', - 'node', 'topic', 'assertion' that the concrete sub-class understands. - :param other_checkpoint_state: the checkpoint state to compute the urn set difference against. - :return: an iterable to the set of urns present in this checkpoing state but not in the other_checkpoint. - """ - pass -``` - -Examples: -* [BaseSQLAlchemyCheckpointState](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/sql_common_state.py#L17) - -### 2. Modifying the SourceConfig +### 1. Modify the source config The source's config must inherit from `StatefulIngestionConfigBase`, and should declare a field named `stateful_ingestion` of type `Optional[StatefulStaleMetadataRemovalConfig]`. -Examples: -- The `KafkaSourceConfig` +Example: + ```python -from typing import List, Optional -import pydantic -from datahub.ingestion.source.state.stale_entity_removal_handler import StatefulStaleMetadataRemovalConfig -from datahub.ingestion.source.state.stateful_ingestion_base import ( +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StatefulStaleMetadataRemovalConfig, StatefulIngestionConfigBase, ) -class KafkaSourceConfig(StatefulIngestionConfigBase): +class MySourceConfig(StatefulIngestionConfigBase): # ...... stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = None ``` -### 3. Modifying the SourceReport -The report class of the source should inherit from `StaleEntityRemovalSourceReport` whose definition is shown below. -```python -from typing import List -from dataclasses import dataclass, field -from datahub.ingestion.source.state.stateful_ingestion_base import StatefulIngestionReport -@dataclass -class StaleEntityRemovalSourceReport(StatefulIngestionReport): - soft_deleted_stale_entities: List[str] = field(default_factory=list) +### 2. Modify the source report - def report_stale_entity_soft_deleted(self, urn: str) -> None: - self.soft_deleted_stale_entities.append(urn) -``` +The report class of the source should inherit from `StaleEntityRemovalSourceReport` instead of `SourceReport`. -Examples: -* The `KafkaSourceReport` ```python -from dataclasses import dataclass -from datahub.ingestion.source.state.stale_entity_removal_handler import StaleEntityRemovalSourceReport +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StaleEntityRemovalSourceReport, +) + @dataclass -class KafkaSourceReport(StaleEntityRemovalSourceReport): - # + pass ``` -### 4. Modifying the Source -The source must inherit from `StatefulIngestionSourceBase`. +### 3. Modify the source -#### 4.1 Instantiate StaleEntityRemovalHandler in the `__init__` method of the source. +1. The source must inherit from `StatefulIngestionSourceBase` instead of `Source`. +2. The source should contain a custom `get_workunit_processors` method. -Examples: -1. The `KafkaSource` ```python from datahub.ingestion.source.state.stateful_ingestion_base import StatefulIngestionSourceBase from datahub.ingestion.source.state.stale_entity_removal_handler import StaleEntityRemovalHandler -class KafkaSource(StatefulIngestionSourceBase): - def __init__(self, config: KafkaSourceConfig, ctx: PipelineContext): - # - # Create and register the stateful ingestion stale entity removal handler. - self.stale_entity_removal_handler = StaleEntityRemovalHandler( - source=self, - config=self.source_config, - state_type_class=KafkaCheckpointState, - pipeline_name=self.ctx.pipeline_name, - run_id=self.ctx.run_id, - ) -``` -#### 4.2 Adding entities from current run to the state object. -Use the `add_entity_to_state` method of the `StaleEntityRemovalHandler`. -Examples: -```python -# Kafka -self.stale_entity_removal_handler.add_entity_to_state( - type="topic", - urn=topic_urn,) - -# DBT -self.stale_entity_removal_handler.add_entity_to_state( - type="dataset", - urn=node_datahub_urn -) -self.stale_entity_removal_handler.add_entity_to_state( - type="assertion", - urn=node_datahub_urn, -) -``` +class MySource(StatefulIngestionSourceBase): + def __init__(self, config: MySourceConfig, ctx: PipelineContext): + super().__init__(config, ctx) -#### 4.3 Emitting soft-delete workunits associated with the stale entities. -```python -def get_workunits(self) -> Iterable[MetadataWorkUnit]: - # - # Emit the rest of the workunits for the source. - # NOTE: Populating the current state happens during the execution of this code. - # ... - - # Clean up stale entities at the end - yield from self.stale_entity_removal_handler.gen_removed_entity_workunits() + self.config = config + self.report = MySourceReport() + + # other initialization code here + + def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: + return [ + *super().get_workunit_processors(), + StaleEntityRemovalHandler.create( + self, self.config, self.ctx + ).workunit_processor, + ] + + # other methods here ``` ## Adding Redundant Run Elimination to a Source @@ -168,12 +83,13 @@ as snowflake usage, bigquery usage etc.). It typically involves expensive and lo run elimination to a new source to prevent the expensive reruns for the same time range(potentially due to a user error or a scheduler malfunction), the following steps are required. + 1. Update the `SourceConfig` 2. Update the `SourceReport` -3. Modify the `Source` to - 1. Instantiate the RedundantRunSkipHandler object. - 2. Check if the current run should be skipped. - 3. Update the state for the current run(start & end times). +3. Modify the `Source` to + 1. Instantiate the RedundantRunSkipHandler object. + 2. Check if the current run should be skipped. + 3. Update the state for the current run(start & end times). The [datahub.ingestion.source.state.redundant_run_skip_handler](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/redundant_run_skip_handler.py) modules provides the supporting infrastructure required for all the steps described above. @@ -181,11 +97,15 @@ modules provides the supporting infrastructure required for all the steps descri NOTE: The handler currently uses a simple state, the [BaseUsageCheckpointState](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/usage_common_state.py), across all sources it supports (unlike the StaleEntityRemovalHandler). + ### 1. Modifying the SourceConfig + The `SourceConfig` must inherit from the [StatefulRedundantRunSkipConfig](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/redundant_run_skip_handler.py#L23) class. Examples: + 1. Snowflake Usage + ```python from datahub.ingestion.source.state.redundant_run_skip_handler import ( StatefulRedundantRunSkipConfig, @@ -193,27 +113,36 @@ from datahub.ingestion.source.state.redundant_run_skip_handler import ( class SnowflakeStatefulIngestionConfig(StatefulRedundantRunSkipConfig): pass ``` + ### 2. Modifying the SourceReport + The `SourceReport` must inherit from the [StatefulIngestionReport](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py#L102) class. Examples: + 1. Snowflake Usage + ```python @dataclass class SnowflakeUsageReport(BaseSnowflakeReport, StatefulIngestionReport): # ``` + ### 3. Modifying the Source + The source must inherit from `StatefulIngestionSourceBase`. + #### 3.1 Instantiate RedundantRunSkipHandler in the `__init__` method of the source. + The source should instantiate an instance of the `RedundantRunSkipHandler` in its `__init__` method. Examples: Snowflake Usage + ```python from datahub.ingestion.source.state.redundant_run_skip_handler import ( RedundantRunSkipHandler, ) class SnowflakeUsageSource(StatefulIngestionSourceBase): - + def __init__(self, config: SnowflakeUsageConfig, ctx: PipelineContext): super(SnowflakeUsageSource, self).__init__(config, ctx) self.config: SnowflakeUsageConfig = config @@ -226,10 +155,13 @@ class SnowflakeUsageSource(StatefulIngestionSourceBase): run_id=self.ctx.run_id, ) ``` + #### 3.2 Checking if the current run should be skipped. + The sources can query if the current run should be skipped using `should_skip_this_run` method of `RedundantRunSkipHandler`. This should done from the `get_workunits` method, before doing any other work. Example code: + ```python def get_workunits(self) -> Iterable[MetadataWorkUnit]: # Skip a redundant run @@ -239,10 +171,13 @@ def get_workunits(self) -> Iterable[MetadataWorkUnit]: return # Generate the workunits. ``` + #### 3.3 Updating the state for the current run. + The source should use the `update_state` method of `RedundantRunSkipHandler` to update the current run's state if the run has not been skipped. This step can be performed in the `get_workunits` if the run has not been skipped. Example code: + ```python def get_workunits(self) -> Iterable[MetadataWorkUnit]: # Skip a redundant run @@ -250,7 +185,7 @@ Example code: cur_start_time_millis=self.config.start_time ): return - + # Generate the workunits. # # Update checkpoint state for this run. @@ -258,4 +193,4 @@ Example code: start_time_millis=self.config.start_time, end_time_millis=self.config.end_time, ) -``` \ No newline at end of file +``` From 3a840371ccdb84ea1a264ef69b0b87709f2e1adc Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 28 Nov 2023 21:21:15 -0600 Subject: [PATCH 158/792] fix(docker): docker compose health checks port fix (#9326) --- docker/docker-compose-with-cassandra.yml | 2 +- docker/docker-compose-without-neo4j.yml | 10 +++++----- docker/docker-compose.yml | 12 ++++++------ docker/quickstart/docker-compose-m1.quickstart.yml | 12 ++++++------ .../docker-compose-without-neo4j-m1.quickstart.yml | 10 +++++----- .../docker-compose-without-neo4j.quickstart.yml | 10 +++++----- docker/quickstart/docker-compose.quickstart.yml | 12 ++++++------ 7 files changed, 34 insertions(+), 34 deletions(-) diff --git a/docker/docker-compose-with-cassandra.yml b/docker/docker-compose-with-cassandra.yml index 39f4341600572..48239fcd87831 100644 --- a/docker/docker-compose-with-cassandra.yml +++ b/docker/docker-compose-with-cassandra.yml @@ -43,7 +43,7 @@ services: dockerfile: docker/datahub-gms/Dockerfile env_file: ./datahub-gms/env/docker.cassandra.env healthcheck: - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 20s interval: 1s retries: 20 diff --git a/docker/docker-compose-without-neo4j.yml b/docker/docker-compose-without-neo4j.yml index 235e89e340551..6191994eaa1ea 100644 --- a/docker/docker-compose-without-neo4j.yml +++ b/docker/docker-compose-without-neo4j.yml @@ -44,7 +44,7 @@ services: dockerfile: docker/datahub-gms/Dockerfile env_file: datahub-gms/env/docker-without-neo4j.env healthcheck: - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 90s interval: 1s retries: 3 @@ -119,7 +119,7 @@ services: limits: memory: 1G healthcheck: - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s start_period: 20s interval: 1s retries: 3 @@ -134,7 +134,7 @@ services: - ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081}:8081 env_file: schema-registry/env/docker.env healthcheck: - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} start_period: 60s interval: 1s retries: 3 @@ -150,7 +150,7 @@ services: - ${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092}:9092 env_file: broker/env/docker.env healthcheck: - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} start_period: 60s interval: 1s retries: 5 @@ -168,7 +168,7 @@ services: - ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181 env_file: zookeeper/env/docker.env healthcheck: - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} start_period: 30s interval: 5s retries: 3 diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 46da8c6fdbd2a..95f56fe47e3cc 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -42,7 +42,7 @@ services: context: ../ dockerfile: docker/datahub-gms/Dockerfile healthcheck: - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 90s interval: 1s retries: 3 @@ -124,7 +124,7 @@ services: limits: memory: 1G healthcheck: - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s start_period: 20s interval: 1s retries: 3 @@ -140,7 +140,7 @@ services: - ${DATAHUB_MAPPED_NEO4J_BOLT_PORT:-7687}:7687 env_file: neo4j/env/docker.env healthcheck: - test: wget http://neo4j:$${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474} + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} start_period: 5s interval: 1s retries: 5 @@ -155,7 +155,7 @@ services: - ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081}:8081 env_file: schema-registry/env/docker.env healthcheck: - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} start_period: 60s interval: 1s retries: 3 @@ -171,7 +171,7 @@ services: - ${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092}:9092 env_file: broker/env/docker.env healthcheck: - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} start_period: 60s interval: 1s retries: 5 @@ -189,7 +189,7 @@ services: - ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181 env_file: zookeeper/env/docker.env healthcheck: - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} start_period: 10s interval: 5s retries: 3 diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 4df32395cf82d..7b7ca4052f324 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -22,7 +22,7 @@ services: interval: 1s retries: 5 start_period: 60s - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} timeout: 5s hostname: broker image: confluentinc/cp-kafka:7.4.0 @@ -111,7 +111,7 @@ services: interval: 1s retries: 3 start_period: 90s - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} @@ -171,7 +171,7 @@ services: interval: 1s retries: 3 start_period: 20s - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s timeout: 5s hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} @@ -258,7 +258,7 @@ services: interval: 1s retries: 5 start_period: 5s - test: wget http://neo4j:$${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474} + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} timeout: 5s hostname: neo4j image: neo4j/neo4j-arm64-experimental:4.0.6-arm64 @@ -280,7 +280,7 @@ services: interval: 1s retries: 3 start_period: 60s - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} timeout: 5s hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 @@ -295,7 +295,7 @@ services: interval: 5s retries: 3 start_period: 10s - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} timeout: 5s hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index b1cb6c208a42d..53dacaf6ef63b 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -22,7 +22,7 @@ services: interval: 1s retries: 5 start_period: 60s - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} timeout: 5s hostname: broker image: confluentinc/cp-kafka:7.4.0 @@ -106,7 +106,7 @@ services: interval: 1s retries: 3 start_period: 90s - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} @@ -164,7 +164,7 @@ services: interval: 1s retries: 3 start_period: 20s - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s timeout: 5s hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} @@ -253,7 +253,7 @@ services: interval: 1s retries: 3 start_period: 60s - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} timeout: 5s hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 @@ -268,7 +268,7 @@ services: interval: 5s retries: 3 start_period: 30s - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} timeout: 5s hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 6eac53229e82a..1ca91aa19206d 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -22,7 +22,7 @@ services: interval: 1s retries: 5 start_period: 60s - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} timeout: 5s hostname: broker image: confluentinc/cp-kafka:7.4.0 @@ -106,7 +106,7 @@ services: interval: 1s retries: 3 start_period: 90s - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} @@ -164,7 +164,7 @@ services: interval: 1s retries: 3 start_period: 20s - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s timeout: 5s hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} @@ -253,7 +253,7 @@ services: interval: 1s retries: 3 start_period: 60s - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} timeout: 5s hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 @@ -268,7 +268,7 @@ services: interval: 5s retries: 3 start_period: 30s - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} timeout: 5s hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index 86d70abd2b815..c77b4418b6f36 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -22,7 +22,7 @@ services: interval: 1s retries: 5 start_period: 60s - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} timeout: 5s hostname: broker image: confluentinc/cp-kafka:7.4.0 @@ -111,7 +111,7 @@ services: interval: 1s retries: 3 start_period: 90s - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} @@ -171,7 +171,7 @@ services: interval: 1s retries: 3 start_period: 20s - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s timeout: 5s hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} @@ -258,7 +258,7 @@ services: interval: 1s retries: 5 start_period: 5s - test: wget http://neo4j:$${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474} + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} timeout: 5s hostname: neo4j image: neo4j:4.4.9-community @@ -280,7 +280,7 @@ services: interval: 1s retries: 3 start_period: 60s - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} timeout: 5s hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 @@ -295,7 +295,7 @@ services: interval: 5s retries: 3 start_period: 10s - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} timeout: 5s hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 From bc24136763a35d0d128162a0cbf74b9c69fc49ae Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Wed, 29 Nov 2023 09:13:21 +0530 Subject: [PATCH 159/792] =?UTF-8?q?fix(ui):=20vulnerability=20(React):=20I?= =?UTF-8?q?nefficient=20Regular=20Expression=20Complexit=E2=80=A6=20(#9324?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- datahub-web-react/package.json | 4 ++- datahub-web-react/yarn.lock | 64 ++++------------------------------ 2 files changed, 10 insertions(+), 58 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 62186125b4ad2..fd01fccbdff6c 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -149,6 +149,8 @@ "resolutions": { "@ant-design/colors": "6.0.0", "refractor": "3.3.1", - "json-schema": "0.4.0" + "json-schema": "0.4.0", + "prismjs": "^1.27.0", + "nth-check": "^2.0.1" } } diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index b9a6c62c88de3..3bab8aebdf3fb 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -5998,7 +5998,7 @@ bonjour-service@^1.0.11: fast-deep-equal "^3.1.3" multicast-dns "^7.2.5" -boolbase@^1.0.0, boolbase@~1.0.0: +boolbase@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= @@ -6437,15 +6437,6 @@ cli-width@^3.0.0: resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== -clipboard@^2.0.0: - version "2.0.8" - resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.8.tgz#ffc6c103dd2967a83005f3f61976aa4655a4cdba" - integrity sha512-Y6WO0unAIQp5bLmk1zdThRhgJt/x3ks6f30s3oE3H1mgIEU33XyQjEf8gsf6DxC7NPX8Y1SsNWjUjL/ywLnnbQ== - dependencies: - good-listener "^1.2.2" - select "^1.1.2" - tiny-emitter "^2.0.0" - cliui@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" @@ -7389,11 +7380,6 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= -delegate@^3.1.2: - version "3.2.0" - resolved "https://registry.yarnpkg.com/delegate/-/delegate-3.2.0.tgz#b66b71c3158522e8ab5744f720d8ca0c2af59166" - integrity sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw== - depd@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" @@ -8967,13 +8953,6 @@ globby@^13.1.1: merge2 "^1.4.1" slash "^4.0.0" -good-listener@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/good-listener/-/good-listener-1.2.2.tgz#d53b30cdf9313dffb7dc9a0d477096aa6d145c50" - integrity sha1-1TswzfkxPf+33JoNR3CWqm0UXFA= - dependencies: - delegate "^3.1.2" - got@^9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -10793,12 +10772,7 @@ json-schema-traverse@^1.0.0: resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== -json-schema@0.2.3, json-schema@0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" - integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== - -json-schema@^0.4.0: +json-schema@0.2.3, json-schema@0.4.0, json-schema@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== @@ -12122,14 +12096,7 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" -nth-check@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" - integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== - dependencies: - boolbase "~1.0.0" - -nth-check@^2.0.1: +nth-check@^1.0.2, nth-check@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== @@ -13262,17 +13229,10 @@ pretty-format@^28.1.3: ansi-styles "^5.0.0" react-is "^18.0.0" -prismjs@^1.22.0: - version "1.24.1" - resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.24.1.tgz#c4d7895c4d6500289482fa8936d9cdd192684036" - integrity sha512-mNPsedLuk90RVJioIky8ANZEwYm5w9LcvCXrxHlwf4fNVSn8jEipMybMkWUyyF0JhnC+C4VcOVSBuHRKs1L5Ow== - -prismjs@~1.23.0: - version "1.23.0" - resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.23.0.tgz#d3b3967f7d72440690497652a9d40ff046067f33" - integrity sha512-c29LVsqOaLbBHuIbsTxaKENh1N2EQBOHaWv7gkHN4dgRbxSREqDnDbtFJYdpPauS4YCplMSNCABQ6Eeor69bAA== - optionalDependencies: - clipboard "^2.0.0" +prismjs@^1.22.0, prismjs@^1.27.0, prismjs@~1.23.0: + version "1.29.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.29.0.tgz#f113555a8fa9b57c35e637bba27509dcf802dd12" + integrity sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q== process-nextick-args@~2.0.0: version "2.0.1" @@ -15039,11 +14999,6 @@ select-hose@^2.0.0: resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= -select@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" - integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0= - selfsigned@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" @@ -16067,11 +16022,6 @@ thunky@^1.0.2: resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== -tiny-emitter@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" - integrity sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q== - tiny-invariant@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.1.0.tgz#634c5f8efdc27714b7f386c35e6760991d230875" From e4c05fa9c81e9bb98f988aaa3b02f8b252df7933 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Tue, 28 Nov 2023 22:53:57 -0500 Subject: [PATCH 160/792] fix(ui): Fix UI glitch in policies creator (#9266) --- .../policy/PolicyPrivilegeForm.tsx | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx b/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx index b8e1505fceaec..ac73a1f5ece7c 100644 --- a/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx +++ b/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx @@ -319,7 +319,7 @@ export default function PolicyPrivilegeForm({ .filter((privs) => privs.resourceType !== 'all') .map((resPrivs) => { return ( - + {resPrivs.resourceTypeDisplayName} ); @@ -355,7 +355,9 @@ export default function PolicyPrivilegeForm({ )} > {resourceSearchResults?.map((result) => ( - {renderSearchResult(result)} + + {renderSearchResult(result)} + ))} @@ -389,7 +391,9 @@ export default function PolicyPrivilegeForm({ dropdownStyle={isShowingDomainNavigator ? { display: 'none' } : {}} > {domainSearchResults?.map((result) => ( - {renderSearchResult(result)} + + {renderSearchResult(result)} + ))} @@ -412,9 +416,14 @@ export default function PolicyPrivilegeForm({ )} > - {privilegeOptions.map((priv) => ( - {priv.displayName} - ))} + {privilegeOptions.map((priv, index) => { + const key = `${priv.type}-${index}`; + return ( + + {priv.displayName} + + ); + })} All Privileges From 10b7a951da2955dc0a80021d0ed40e6f00c51b16 Mon Sep 17 00:00:00 2001 From: allizex <150264485+allizex@users.noreply.github.com> Date: Wed, 29 Nov 2023 05:40:34 +0100 Subject: [PATCH 161/792] fix(sidebar): remove a space reserved for scroll bars when sidebar is collapsed (#9322) --- datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx index 0d3d40c4a71af..822e75b65febc 100644 --- a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx +++ b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx @@ -26,12 +26,12 @@ const SidebarHeader = styled.div` white-space: nowrap; `; -const SidebarBody = styled.div` +const SidebarBody = styled.div<{ visible: boolean }>` height: calc(100% - 47px); padding-left: 16px; padding-right: 12px; padding-bottom: 200px; - overflow: auto; + overflow: ${(props) => (props.visible ? 'auto' : 'hidden')}; white-space: nowrap; `; @@ -50,7 +50,7 @@ const BrowseSidebar = ({ visible, width }: Props) => { Navigate - + {entityAggregations && !entityAggregations.length &&
No results found
} {entityAggregations?.map((entityAggregation) => ( From ab10e6bc58471ec3ee8870377dc2d2a0f2527406 Mon Sep 17 00:00:00 2001 From: terratrue-daniel <97548386+terratrue-daniel@users.noreply.github.com> Date: Wed, 29 Nov 2023 00:02:26 -0800 Subject: [PATCH 162/792] feat(ingest/mssql): enable TLS encryption for SQLServer using pytds (#9256) --- metadata-ingestion/docs/sources/mssql/mssql_recipe.yml | 8 ++++++++ metadata-ingestion/setup.py | 2 +- .../src/datahub/ingestion/source/sql/mssql/source.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml b/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml index 5f1e24ce1e956..93be7a86d72cc 100644 --- a/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml +++ b/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml @@ -9,6 +9,14 @@ source: username: user password: pass + # Options + # Uncomment if you need to use encryption with pytds + # See https://python-tds.readthedocs.io/en/latest/pytds.html#pytds.connect + # options: + # connect_args: + # cafile: server-ca.pem + # validate_host: true + sink: # sink configs diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 2b002164a49b9..8d9892d8e11b1 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -350,7 +350,7 @@ "mlflow": {"mlflow-skinny>=2.3.0"}, "mode": {"requests", "tenacity>=8.0.1"} | sqllineage_lib, "mongodb": {"pymongo[srv]>=3.11", "packaging"}, - "mssql": sql_common | {"sqlalchemy-pytds>=0.3"}, + "mssql": sql_common | {"sqlalchemy-pytds>=0.3", "pyOpenSSL"}, "mssql-odbc": sql_common | {"pyodbc"}, "mysql": mysql, # mariadb should have same dependency as mysql diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py index 6eea5a4c31fa6..2442df595d967 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py @@ -155,7 +155,7 @@ class SQLServerSource(SQLAlchemySource): - Metadata for databases, schemas, views and tables - Column types associated with each table/view - Table, row, and column statistics via optional SQL profiling - We have two options for the underlying library used to connect to SQL Server: (1) [python-tds](https://github.com/denisenkom/pytds) and (2) [pyodbc](https://github.com/mkleehammer/pyodbc). The TDS library is pure Python and hence easier to install, but only PyODBC supports encrypted connections. + We have two options for the underlying library used to connect to SQL Server: (1) [python-tds](https://github.com/denisenkom/pytds) and (2) [pyodbc](https://github.com/mkleehammer/pyodbc). The TDS library is pure Python and hence easier to install. """ def __init__(self, config: SQLServerConfig, ctx: PipelineContext): From c946d26a624e39655d98e93a044d067030819d19 Mon Sep 17 00:00:00 2001 From: Mide Ojikutu Date: Wed, 29 Nov 2023 08:02:57 +0000 Subject: [PATCH 163/792] fix(datahub-frontend): Add playCaffeine as replacement for removed playEhcache dependency (#8344) --- build.gradle | 1 + datahub-frontend/play.gradle | 1 + 2 files changed, 2 insertions(+) diff --git a/build.gradle b/build.gradle index 7c5deb4783943..c1278a6dab1a0 100644 --- a/build.gradle +++ b/build.gradle @@ -167,6 +167,7 @@ project.ext.externalDependency = [ 'parquetHadoop': 'org.apache.parquet:parquet-hadoop:1.13.1', 'picocli': 'info.picocli:picocli:4.5.0', 'playCache': "com.typesafe.play:play-cache_2.12:$playVersion", + 'playCaffeineCache': "com.typesafe.play:play-caffeine-cache_2.12:$playVersion", 'playWs': 'com.typesafe.play:play-ahc-ws-standalone_2.12:2.1.10', 'playDocs': "com.typesafe.play:play-docs_2.12:$playVersion", 'playGuice': "com.typesafe.play:play-guice_2.12:$playVersion", diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index daecba16cbf72..dd1ceee411f74 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -58,6 +58,7 @@ dependencies { implementation externalDependency.shiroCore implementation externalDependency.playCache + implementation externalDependency.playCaffeineCache implementation externalDependency.playWs implementation externalDependency.playServer implementation externalDependency.playAkkaHttpServer From 4dd6738ae7707ab8b085c2b2c1502f0a8c86d361 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 29 Nov 2023 04:25:33 -0500 Subject: [PATCH 164/792] fix(ingest): bump pyhive to fix headers issue (#9328) --- metadata-ingestion/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 8d9892d8e11b1..4f5f09fb148fa 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -214,7 +214,8 @@ # - 0.6.13 adds a small fix for Databricks # - 0.6.14 uses pure-sasl instead of sasl so it builds on Python 3.11 # - 0.6.15 adds support for thrift > 0.14 (cherry-picked from https://github.com/apache/thrift/pull/2491) - "acryl-pyhive[hive_pure_sasl]==0.6.15", + # - 0.6.16 fixes a regression in 0.6.15 (https://github.com/acryldata/PyHive/pull/9) + "acryl-pyhive[hive-pure-sasl]==0.6.16", # As per https://github.com/datahub-project/datahub/issues/8405 # and https://github.com/dropbox/PyHive/issues/417, version 0.14.0 # of thrift broke PyHive's hive+http transport. From 0795f0b2e8b40502c6fedb469f4cc5b3e2e8146e Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 29 Nov 2023 09:16:48 -0600 Subject: [PATCH 165/792] feat(gradle): quickstart postgres gradle task (#9329) --- docker/build.gradle | 34 +++++++++++++++++++ ...ompose-without-neo4j.postgres.override.yml | 2 +- 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/docker/build.gradle b/docker/build.gradle index 56634a5fe0c67..c7f783af6c997 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -15,6 +15,7 @@ ext { ':metadata-service:war', ':datahub-frontend', ] + debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job', ':metadata-jobs:mae-consumer-job'] debug_compose_args = [ @@ -27,6 +28,13 @@ ext { 'datahub-gms', 'datahub-frontend-react' ] + + // Postgres + pg_quickstart_modules = quickstart_modules - [':docker:mysql-setup'] + [':docker:postgres-setup'] + pg_compose_args = [ + '-f', 'docker-compose-without-neo4j.yml', + '-f', 'docker-compose-without-neo4j.postgres.override.yml' + ] } task quickstart(type: Exec, dependsOn: ':metadata-ingestion:install') { @@ -125,3 +133,29 @@ task debugReload(type: Exec) { def cmd = ['docker compose -p datahub'] + debug_compose_args + ['restart'] + debug_reloadable commandLine 'bash', '-c', cmd.join(" ") } + +task quickstartPg(type: Exec, dependsOn: ':metadata-ingestion:install') { + dependsOn(pg_quickstart_modules.collect { it + ':dockerTag' }) + shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' + + environment "DATAHUB_TELEMETRY_ENABLED", "false" + environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" + environment "DATAHUB_POSTGRES_VERSION", "15.5" + + // OpenSearch + environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' + environment "DATAHUB_SEARCH_TAG", '2.9.0' + environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' + environment "USE_AWS_ELASTICSEARCH", 'true' + + def cmd = [ + 'source ../metadata-ingestion/venv/bin/activate && ', + 'datahub docker quickstart', + '--no-pull-images', + '--standalone_consumers', + '--version', "v${version}", + '--dump-logs-on-failure' + ] + pg_compose_args + + commandLine 'bash', '-c', cmd.join(" ") +} diff --git a/docker/docker-compose-without-neo4j.postgres.override.yml b/docker/docker-compose-without-neo4j.postgres.override.yml index e4c754b30afd7..369b5a155fc36 100644 --- a/docker/docker-compose-without-neo4j.postgres.override.yml +++ b/docker/docker-compose-without-neo4j.postgres.override.yml @@ -53,7 +53,7 @@ services: postgres: container_name: postgres hostname: postgres - image: postgres:12.3 + image: postgres:${DATAHUB_POSTGRES_VERSION:-12.3} env_file: postgres/env/docker.env ports: - '5432:5432' From f8db90926e927b890ce9be674b8b45d55e4bffc4 Mon Sep 17 00:00:00 2001 From: noggi Date: Wed, 29 Nov 2023 09:26:14 -0800 Subject: [PATCH 166/792] Upload metadata model to s3 (#9325) --- .github/workflows/metadata-model.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index 4bae5ccc9a266..eb098a327e4cb 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -37,6 +37,19 @@ jobs: run: ./metadata-ingestion/scripts/install_deps.sh - name: Run model generation run: ./gradlew :metadata-models:build + - name: Generate metadata files + if: ${{ needs.setup.outputs.publish == 'true' }} + run: ./gradlew :metadata-ingestion:modelDocGen + - name: Configure AWS Credentials + if: ${{ needs.setup.outputs.publish == 'true' }} + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-access-key-id: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY }} + aws-region: us-west-2 + - name: Upload metadata to S3 + if: ${{ needs.setup.outputs.publish == 'true' }} + run: aws s3 cp ./metadata-ingestion/generated/docs/metadata_model_mces.json s3://${{ secrets.ACRYL_CI_ARTIFACTS_BUCKET }}/datahub/demo/metadata/ - name: Upload metadata to DataHub if: ${{ needs.setup.outputs.publish == 'true' }} env: From fe444aff2638ca232b18827be9de25183cf1c347 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Wed, 29 Nov 2023 13:52:26 -0500 Subject: [PATCH 167/792] fix(ui) Set explicit height on logo images to fix render bug (#9344) --- datahub-web-react/src/app/shared/LogoCountCard.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/datahub-web-react/src/app/shared/LogoCountCard.tsx b/datahub-web-react/src/app/shared/LogoCountCard.tsx index ebf0d9cd4f54e..e67898520e7b8 100644 --- a/datahub-web-react/src/app/shared/LogoCountCard.tsx +++ b/datahub-web-react/src/app/shared/LogoCountCard.tsx @@ -7,6 +7,7 @@ import { HomePageButton } from './components'; const PlatformLogo = styled(Image)` max-height: 32px; + height: 32px; width: auto; object-fit: contain; background-color: transparent; From 5e52e31fc96f71204f8b58a9f4c2f75a489f5c46 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Wed, 29 Nov 2023 14:56:30 -0500 Subject: [PATCH 168/792] fix(ingest/browse): Re-emit browse path v2 aspects to avoid race condition (#9227) --- .../src/datahub/ingestion/api/source.py | 4 +++- .../src/datahub/ingestion/api/source_helpers.py | 15 +++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/api/source.py b/metadata-ingestion/src/datahub/ingestion/api/source.py index 8940642f7008a..a272b6e3cffcf 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/source.py +++ b/metadata-ingestion/src/datahub/ingestion/api/source.py @@ -33,6 +33,7 @@ auto_materialize_referenced_tags, auto_status_aspect, auto_workunit_reporter, + re_emit_browse_path_v2, ) from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent @@ -278,13 +279,14 @@ def _get_browse_path_processor(self, dry_run: bool) -> MetadataWorkUnitProcessor if isinstance(config, PlatformInstanceConfigMixin) and config.platform_instance: platform_instance = config.platform_instance - return partial( + browse_path_processor = partial( auto_browse_path_v2, platform=platform, platform_instance=platform_instance, drop_dirs=[s for s in browse_path_drop_dirs if s is not None], dry_run=dry_run, ) + return lambda stream: re_emit_browse_path_v2(browse_path_processor(stream)) class TestableSource(Source): diff --git a/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py b/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py index fae260226195c..66365ef0cdc45 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py +++ b/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py @@ -198,6 +198,21 @@ def auto_lowercase_urns( yield wu +def re_emit_browse_path_v2( + stream: Iterable[MetadataWorkUnit], +) -> Iterable[MetadataWorkUnit]: + """Re-emit browse paths v2 aspects, to avoid race condition where server overwrites with default.""" + browse_path_v2_workunits = [] + + for wu in stream: + yield wu + if wu.is_primary_source and wu.get_aspect_of_type(BrowsePathsV2Class): + browse_path_v2_workunits.append(wu) + + for wu in browse_path_v2_workunits: + yield wu + + def auto_browse_path_v2( stream: Iterable[MetadataWorkUnit], *, From 863894b80a36776438cfc4c8728fedba013ddd31 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 29 Nov 2023 16:25:48 -0500 Subject: [PATCH 169/792] feat(ingest/ldap): make ingestion robust to string departmentId (#9258) --- metadata-ingestion/src/datahub/ingestion/source/ldap.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/ldap.py b/metadata-ingestion/src/datahub/ingestion/source/ldap.py index e1d035a96d42f..72985688273f6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/ldap.py +++ b/metadata-ingestion/src/datahub/ingestion/source/ldap.py @@ -1,4 +1,5 @@ """LDAP Source""" +import contextlib import dataclasses from typing import Any, Dict, Iterable, List, Optional @@ -390,10 +391,10 @@ def build_corp_user_mce( country_code = get_attr_or_none( attrs, self.config.user_attrs_map["countryCode"] ) - if department_id_str: - department_id = int(department_id_str) - else: - department_id = None + department_id = None + with contextlib.suppress(ValueError): + if department_id_str: + department_id = int(department_id_str) custom_props_map = {} if self.config.custom_props_list: From dd09f5e68f76003ee54ab776eefbfb71f335ba15 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Thu, 30 Nov 2023 00:56:26 +0100 Subject: [PATCH 170/792] doc(ingest/teradata): Adding Teradata to list of Integrations (#9336) --- docs-website/filterTagIndexes.json | 11 +++++++++++ .../src/datahub/ingestion/source/redshift/redshift.py | 4 ++++ 2 files changed, 15 insertions(+) diff --git a/docs-website/filterTagIndexes.json b/docs-website/filterTagIndexes.json index c154b586fe66e..419f16e8d8a52 100644 --- a/docs-website/filterTagIndexes.json +++ b/docs-website/filterTagIndexes.json @@ -605,6 +605,17 @@ "Features": "Notifications, Alerting" } }, + { + "Path": "docs/generated/ingestion/sources/teradata", + "imgPath": "img/logos/platforms/teradata.svg", + "Title": "Teradata", + "Description": "Teradata is a data warehousing and analytics tool that allows users to store, manage, and analyze large amounts of data in a scalable and cost-effective manner.", + "tags": { + "Platform Type": "BI Tool", + "Connection Type": "Pull", + "Features": "Stateful Ingestion, Column Level Lineage, UI Ingestion, Lower Casing, Status Aspect" + } + }, { "Path": "docs/generated/ingestion/sources/trino", "imgPath": "img/logos/platforms/trino.png", diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py index 0b1bde6ca8c0a..04f0edf504595 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py @@ -114,6 +114,10 @@ @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration") @capability(SourceCapability.DESCRIPTIONS, "Enabled by default") @capability(SourceCapability.LINEAGE_COARSE, "Optionally enabled via configuration") +@capability( + SourceCapability.LINEAGE_FINE, + "Optionally enabled via configuration (`mixed` or `sql_based` lineage needs to be enabled)", +) @capability( SourceCapability.USAGE_STATS, "Enabled by default, can be disabled via configuration `include_usage_statistics`", From c00ce518c2f6eccab73bf0e3598761362b7df8d0 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Thu, 30 Nov 2023 06:07:33 +0530 Subject: [PATCH 171/792] fix(ui): Complexity in chalk/ansi-regex and minimatch ReDoS Vulnerability solution (#9323) Co-authored-by: John Joyce --- datahub-web-react/package.json | 2 ++ datahub-web-react/yarn.lock | 52 ++++++---------------------------- 2 files changed, 10 insertions(+), 44 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index fd01fccbdff6c..0b889810a809a 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -150,6 +150,8 @@ "@ant-design/colors": "6.0.0", "refractor": "3.3.1", "json-schema": "0.4.0", + "ansi-regex": "3.0.1", + "minimatch": "3.0.5", "prismjs": "^1.27.0", "nth-check": "^2.0.1" } diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 3bab8aebdf3fb..9924c223c1b0a 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -5343,25 +5343,10 @@ ansi-html-community@^0.0.8: resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= - -ansi-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" - integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= - -ansi-regex@^5.0.0, ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-regex@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== +ansi-regex@3.0.1, ansi-regex@^2.0.0, ansi-regex@^3.0.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" + integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== ansi-styles@^2.2.1: version "2.2.1" @@ -6011,13 +5996,6 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - braces@^2.3.1: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" @@ -11783,27 +11761,13 @@ minimalistic-assert@^1.0.0: resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== +minimatch@3.0.4, minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^5.0.1: + version "3.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.5.tgz#4da8f1290ee0f0f8e83d60ca69f8f134068604a3" + integrity sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw== dependencies: brace-expansion "^1.1.7" -minimatch@^5.0.1: - version "5.1.6" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" - integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== - dependencies: - brace-expansion "^2.0.1" - minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" From cb722533279d58d32cfdfd4fb5afe64c7e6552f7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 01:07:37 +0000 Subject: [PATCH 172/792] build(deps): bump tmpl from 1.0.4 to 1.0.5 in /datahub-web-react (#9345) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- datahub-web-react/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 9924c223c1b0a..e222209ead6bc 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -16016,9 +16016,9 @@ tmp@^0.0.33: os-tmpdir "~1.0.2" tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-fast-properties@^2.0.0: version "2.0.0" From c3499f8661c1a06cca1d165371db20d71aea4396 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Thu, 30 Nov 2023 07:37:50 +0530 Subject: [PATCH 173/792] fix(): Address @babel/traverse vulnerabilities (#9343) Co-authored-by: John Joyce --- datahub-web-react/package.json | 1 + datahub-web-react/yarn.lock | 129 +++++++++++++++++++++++++-------- 2 files changed, 98 insertions(+), 32 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 0b889810a809a..40bcad19284d9 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -150,6 +150,7 @@ "@ant-design/colors": "6.0.0", "refractor": "3.3.1", "json-schema": "0.4.0", + "@babel/traverse": ">=7.23.2", "ansi-regex": "3.0.1", "minimatch": "3.0.5", "prismjs": "^1.27.0", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index e222209ead6bc..b755281d17697 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -191,6 +191,14 @@ dependencies: "@babel/highlight" "^7.22.5" +"@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244" + integrity sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA== + dependencies: + "@babel/highlight" "^7.23.4" + chalk "^2.4.2" + "@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.5", "@babel/compat-data@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" @@ -226,7 +234,7 @@ eslint-visitor-keys "^2.1.0" semver "^6.3.0" -"@babel/generator@^7.12.13", "@babel/generator@^7.14.0", "@babel/generator@^7.22.5", "@babel/generator@^7.7.2": +"@babel/generator@^7.14.0", "@babel/generator@^7.22.5", "@babel/generator@^7.7.2": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.5.tgz#1e7bf768688acfb05cf30b2369ef855e82d984f7" integrity sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA== @@ -236,6 +244,16 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" +"@babel/generator@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.5.tgz#17d0a1ea6b62f351d281350a5f80b87a810c4755" + integrity sha512-BPssCHrBD+0YrxviOa3QzpqwhNIXKEtOa2jQrm4FlmkC2apYgRnQcmPWiGZDlGxiNtltnUFolMe8497Esry+jA== + dependencies: + "@babel/types" "^7.23.5" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + "@babel/helper-annotate-as-pure@^7.0.0", "@babel/helper-annotate-as-pure@^7.18.6", "@babel/helper-annotate-as-pure@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" @@ -297,12 +315,17 @@ resolve "^1.14.2" semver "^6.1.2" +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== + "@babel/helper-environment-visitor@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" integrity sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q== -"@babel/helper-function-name@^7.12.13", "@babel/helper-function-name@^7.22.5": +"@babel/helper-function-name@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz#ede300828905bb15e582c037162f99d5183af1be" integrity sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ== @@ -310,6 +333,14 @@ "@babel/template" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== + dependencies: + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" + "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" @@ -393,23 +424,40 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-split-export-declaration@^7.12.13", "@babel/helper-split-export-declaration@^7.22.5": +"@babel/helper-split-export-declaration@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.5.tgz#88cf11050edb95ed08d596f7a044462189127a08" integrity sha512-thqK5QFghPKWLhAV321lxF95yCg2K3Ob5yw+M3VHWfdia0IkPXUtoLH8x/6Fh486QUvzhb8YOWHChTVen2/PoQ== dependencies: "@babel/types" "^7.22.5" +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== + dependencies: + "@babel/types" "^7.22.5" + "@babel/helper-string-parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== +"@babel/helper-string-parser@^7.23.4": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" + integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== + "@babel/helper-validator-identifier@^7.12.11", "@babel/helper-validator-identifier@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + "@babel/helper-validator-option@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" @@ -443,16 +491,30 @@ chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/highlight@^7.23.4": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" + integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A== + dependencies: + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" + js-tokens "^4.0.0" + "@babel/parser@7.12.16": version "7.12.16" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.16.tgz#cc31257419d2c3189d394081635703f549fc1ed4" integrity sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw== -"@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.12.13", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": +"@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" + integrity sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ== + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.5.tgz#87245a21cd69a73b0b81bcda98d443d6df08f05e" @@ -1322,6 +1384,15 @@ dependencies: regenerator-runtime "^0.13.11" +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + "@babel/template@^7.22.5", "@babel/template@^7.3.3", "@babel/template@^7.4.4": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" @@ -1331,34 +1402,19 @@ "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/traverse@7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.12.13.tgz#689f0e4b4c08587ad26622832632735fb8c4e0c0" - integrity sha512-3Zb4w7eE/OslI0fTp8c7b286/cQps3+vdLW3UcwC8VSJC6GbKn55aeVVu2QJNuCDoeKyptLOFrPq8WqZZBodyA== - dependencies: - "@babel/code-frame" "^7.12.13" - "@babel/generator" "^7.12.13" - "@babel/helper-function-name" "^7.12.13" - "@babel/helper-split-export-declaration" "^7.12.13" - "@babel/parser" "^7.12.13" - "@babel/types" "^7.12.13" - debug "^4.1.0" - globals "^11.1.0" - lodash "^4.17.19" - -"@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.22.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.5.tgz#44bd276690db6f4940fdb84e1cb4abd2f729ccd1" - integrity sha512-7DuIjPgERaNo6r+PZwItpjCZEa5vyw4eJGufeLxrPdBXBoLcCJCIasvK6pK/9DVNrLZTLFhUGqaC6X/PA007TQ== +"@babel/traverse@7.12.13", "@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.22.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.5.tgz#f546bf9aba9ef2b042c0e00d245990c15508e7ec" + integrity sha512-czx7Xy5a6sapWWRx61m1Ke1Ra4vczu1mCTtJam5zRTBOonfdJ+S/B6HYmGYu3fJtr8GGET3si6IhgWVBhJ/m8w== dependencies: - "@babel/code-frame" "^7.22.5" - "@babel/generator" "^7.22.5" - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-function-name" "^7.22.5" + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.5" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.5" - "@babel/parser" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.5" + "@babel/types" "^7.23.5" debug "^4.1.0" globals "^11.1.0" @@ -1371,7 +1427,7 @@ lodash "^4.17.19" to-fast-properties "^2.0.0" -"@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.13", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": +"@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" integrity sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA== @@ -1380,6 +1436,15 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" +"@babel/types@^7.22.15", "@babel/types@^7.23.0", "@babel/types@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.5.tgz#48d730a00c95109fa4393352705954d74fb5b602" + integrity sha512-ON5kSOJwVO6xXVRTvOI0eOnWe7VdUcIpsovGo9U/Br4Ie4UVFQTboO2cYnDhAGU6Fp+UxSiT+pMft0SMHfuq6w== + dependencies: + "@babel/helper-string-parser" "^7.23.4" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@bcoe/v8-coverage@^0.2.3": version "0.2.3" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" @@ -6211,7 +6276,7 @@ chalk@^1.0.0, chalk@^1.1.3: strip-ansi "^3.0.0" supports-color "^2.0.0" -chalk@^2.0.0, chalk@^2.4.1: +chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== From efaf21d571262a58c0c3e624c523213be4310c43 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 03:51:44 -0500 Subject: [PATCH 174/792] docs(ingest/looker): mark platform instance as a supported capability (#9347) --- .../src/datahub/ingestion/source/looker/looker_config.py | 7 +++++-- .../src/datahub/ingestion/source/looker/looker_source.py | 2 +- .../src/datahub/ingestion/source/looker/lookml_source.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py index e6ddea9a30489..514f22b4f2158 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py @@ -9,7 +9,10 @@ from datahub.configuration import ConfigModel from datahub.configuration.common import AllowDenyPattern, ConfigurationError -from datahub.configuration.source_common import DatasetSourceConfigMixin, EnvConfigMixin +from datahub.configuration.source_common import ( + EnvConfigMixin, + PlatformInstanceConfigMixin, +) from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.looker.looker_lib_wrapper import LookerAPIConfig from datahub.ingestion.source.state.stale_entity_removal_handler import ( @@ -98,7 +101,7 @@ class LookerViewNamingPattern(NamingPattern): ] -class LookerCommonConfig(DatasetSourceConfigMixin): +class LookerCommonConfig(EnvConfigMixin, PlatformInstanceConfigMixin): explore_naming_pattern: LookerNamingPattern = pydantic.Field( description=f"Pattern for providing dataset names to explores. {LookerNamingPattern.allowed_docstring()}", default=LookerNamingPattern(pattern="{model}.explore.{name}"), diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index 4a98e8874bca0..7e8fbfde12042 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -99,7 +99,7 @@ @support_status(SupportStatus.CERTIFIED) @config_class(LookerDashboardSourceConfig) @capability(SourceCapability.DESCRIPTIONS, "Enabled by default") -@capability(SourceCapability.PLATFORM_INSTANCE, "Not supported", supported=False) +@capability(SourceCapability.PLATFORM_INSTANCE, "Use the `platform_instance` field") @capability( SourceCapability.OWNERSHIP, "Enabled by default, configured using `extract_owners`" ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 2bd469b3f9bcd..4e91d17feaa9f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -1455,7 +1455,7 @@ class LookerManifest: @support_status(SupportStatus.CERTIFIED) @capability( SourceCapability.PLATFORM_INSTANCE, - "Supported using the `connection_to_platform_map`", + "Use the `platform_instance` and `connection_to_platform_map` fields", ) @capability(SourceCapability.LINEAGE_COARSE, "Supported by default") @capability( From 65d5034a80d60f85f57a5157b730eda9d83c5516 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Thu, 30 Nov 2023 22:50:08 +0530 Subject: [PATCH 175/792] fix(): Address HIGH vulnerability with Axios (#9353) --- datahub-web-react/package.json | 2 +- datahub-web-react/yarn.lock | 116 ++++++++++++++++----------------- 2 files changed, 58 insertions(+), 60 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 40bcad19284d9..22c88f9647dc2 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -80,7 +80,7 @@ "reactour": "1.18.7", "remirror": "^2.0.23", "sinon": "^11.1.1", - "start-server-and-test": "1.12.2", + "start-server-and-test": "^2.0.3", "styled-components": "^5.2.1", "turndown-plugin-gfm": "^1.0.2", "typescript": "^4.8.4", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index b755281d17697..d33299a79b13a 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -3760,14 +3760,14 @@ resolved "https://registry.yarnpkg.com/@seznam/compose-react-refs/-/compose-react-refs-1.0.6.tgz#6ec4e70bdd6e32f8e70b4100f27267cf306bd8df" integrity sha512-izzOXQfeQLonzrIQb8u6LQ8dk+ymz3WXTIXjvOlTXHq6sbzROg3NWU+9TTAOpEoK9Bth24/6F/XrfHJ5yR5n6Q== -"@sideway/address@^4.1.0": - version "4.1.2" - resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.2.tgz#811b84333a335739d3969cfc434736268170cad1" - integrity sha512-idTz8ibqWFrPU8kMirL0CoPH/A29XOzzAzpyN3zQ4kAWnzmNfFmRaoMNN6VI8ske5M73HZyhIaW4OuSFIdM4oA== +"@sideway/address@^4.1.3": + version "4.1.4" + resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" + integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw== dependencies: "@hapi/hoek" "^9.0.0" -"@sideway/formula@^3.0.0": +"@sideway/formula@^3.0.1": version "3.0.1" resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.1.tgz#80fcbcbaf7ce031e0ef2dd29b1bfc7c3f583611f" integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg== @@ -5712,12 +5712,14 @@ axe-core@^4.6.2: resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" integrity sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g== -axios@^0.21.1: - version "0.21.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" - integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== +axios@^1.6.1: + version "1.6.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.2.tgz#de67d42c755b571d3e698df1b6504cde9b0ee9f2" + integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A== dependencies: - follow-redirects "^1.14.0" + follow-redirects "^1.15.0" + form-data "^4.0.0" + proxy-from-env "^1.1.0" axobject-query@^3.1.1: version "3.2.1" @@ -7301,7 +7303,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0: dependencies: ms "2.0.0" -debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: +debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -7315,13 +7317,6 @@ debug@4.1.1: dependencies: ms "^2.1.1" -debug@4.3.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - dependencies: - ms "2.1.2" - debug@^3.2.6, debug@^3.2.7: version "3.2.7" resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" @@ -8232,22 +8227,7 @@ events@^3.2.0: resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== -execa@5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/execa/-/execa-5.0.0.tgz#4029b0007998a841fbd1032e5f4de86a3c1e3376" - integrity sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -execa@^5.0.0: +execa@5.1.1, execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== @@ -8661,11 +8641,16 @@ focus-outline-manager@^1.0.2: resolved "https://registry.yarnpkg.com/focus-outline-manager/-/focus-outline-manager-1.0.2.tgz#7bf3658865341fb6b08d042a037b9d2868b119b5" integrity sha512-bHWEmjLsTjGP9gVs7P3Hyl+oY5NlMW8aTSPdTJ+X2GKt6glDctt9fUCLbRV+d/l8NDC40+FxMjp9WlTQXaQALw== -follow-redirects@^1.0.0, follow-redirects@^1.14.0: +follow-redirects@^1.0.0: version "1.15.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.0.tgz#06441868281c86d0dda4ad8bdaead2d02dca89d4" integrity sha512-aExlJShTV4qOUOL7yF1U5tvLCB0xQuudbf6toyYA0E/acBNw71mvjFTnLaRp50aQaYocMR0a/RMMBIHeZnGyjQ== +follow-redirects@^1.15.0: + version "1.15.3" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" + integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== + for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" @@ -8695,7 +8680,7 @@ fork-ts-checker-webpack-plugin@^6.5.0: semver "^7.3.2" tapable "^1.0.0" -form-data@4.0.0: +form-data@4.0.0, form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== @@ -10711,15 +10696,15 @@ jiti@^1.18.2: resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" integrity sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg== -joi@^17.3.0: - version "17.4.1" - resolved "https://registry.npmjs.org/joi/-/joi-17.4.1.tgz#15d2f23c8cbe4d1baded2dd190c58f8dbe11cca0" - integrity sha512-gDPOwQ5sr+BUxXuPDGrC1pSNcVR/yGGcTI0aCnjYxZEa3za60K/iCQ+OFIkEHWZGVCUcUlXlFKvMmrlmxrG6UQ== +joi@^17.11.0: + version "17.11.0" + resolved "https://registry.yarnpkg.com/joi/-/joi-17.11.0.tgz#aa9da753578ec7720e6f0ca2c7046996ed04fc1a" + integrity sha512-NgB+lZLNoqISVy1rZocE9PZI36bL/77ie924Ri43yEvi9GUUMPeyVIr8KdFTMUlby1p0PBYMk9spIxEUQYqrJQ== dependencies: "@hapi/hoek" "^9.0.0" "@hapi/topo" "^5.0.0" - "@sideway/address" "^4.1.0" - "@sideway/formula" "^3.0.0" + "@sideway/address" "^4.1.3" + "@sideway/formula" "^3.0.1" "@sideway/pinpoint" "^2.0.0" js-cookie@^2.2.1: @@ -11833,7 +11818,7 @@ minimatch@3.0.4, minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@ dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== @@ -13486,6 +13471,11 @@ proxy-addr@~2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== + prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" @@ -14883,13 +14873,20 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^6.3.3, rxjs@^6.6.0, rxjs@^6.6.3: +rxjs@^6.3.3, rxjs@^6.6.0: version "6.6.7" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== dependencies: tslib "^1.9.0" +rxjs@^7.8.1: + version "7.8.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== + dependencies: + tslib "^2.1.0" + safari-14-idb-fix@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/safari-14-idb-fix/-/safari-14-idb-fix-1.0.6.tgz#cbaabc33a4500c44b5c432d6c525b0ed9b68bb65" @@ -15495,18 +15492,19 @@ stacktrace-js@^2.0.2: stack-generator "^2.0.5" stacktrace-gps "^3.0.4" -start-server-and-test@1.12.2: - version "1.12.2" - resolved "https://registry.npmjs.org/start-server-and-test/-/start-server-and-test-1.12.2.tgz#13afe6f22d7347e0fd47a739cdd085786fced14b" - integrity sha512-rjJF8N/8XVukEYR44Ehm8LAZIDjWCQKXX54W8UQ8pXz3yDKPCdqTqJy7VYnCAknPw65cmLfPxz8M2+K/zCAvVQ== +start-server-and-test@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/start-server-and-test/-/start-server-and-test-2.0.3.tgz#15c53c85e23cba7698b498b8a2598cab95f3f802" + integrity sha512-QsVObjfjFZKJE6CS6bSKNwWZCKBG6975/jKRPPGFfFh+yOQglSeGXiNWjzgQNXdphcBI9nXbyso9tPfX4YAUhg== dependencies: + arg "^5.0.2" bluebird "3.7.2" check-more-types "2.24.0" - debug "4.3.1" - execa "5.0.0" + debug "4.3.4" + execa "5.1.1" lazy-ass "1.6.0" ps-tree "1.2.0" - wait-on "5.3.0" + wait-on "7.2.0" state-local@^1.0.6: version "1.0.7" @@ -16739,16 +16737,16 @@ w3c-xmlserializer@^2.0.0: dependencies: xml-name-validator "^3.0.0" -wait-on@5.3.0: - version "5.3.0" - resolved "https://registry.npmjs.org/wait-on/-/wait-on-5.3.0.tgz#584e17d4b3fe7b46ac2b9f8e5e102c005c2776c7" - integrity sha512-DwrHrnTK+/0QFaB9a8Ol5Lna3k7WvUR4jzSKmz0YaPBpuN2sACyiPVKVfj6ejnjcajAcvn3wlbTyMIn9AZouOg== +wait-on@7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-7.2.0.tgz#d76b20ed3fc1e2bebc051fae5c1ff93be7892928" + integrity sha512-wCQcHkRazgjG5XoAq9jbTMLpNIjoSlZslrJ2+N9MxDsGEv1HnFoVjOCexL0ESva7Y9cu350j+DWADdk54s4AFQ== dependencies: - axios "^0.21.1" - joi "^17.3.0" + axios "^1.6.1" + joi "^17.11.0" lodash "^4.17.21" - minimist "^1.2.5" - rxjs "^6.6.3" + minimist "^1.2.8" + rxjs "^7.8.1" walker@^1.0.7: version "1.0.7" From ae1169d6d5831751b6d26d08052472d4adfdbf43 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Thu, 30 Nov 2023 22:53:28 +0530 Subject: [PATCH 176/792] fix(ui): show formatted total result count in Search (#9356) --- datahub-web-react/src/app/search/SearchResults.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/search/SearchResults.tsx b/datahub-web-react/src/app/search/SearchResults.tsx index b93e835970196..11bb494588753 100644 --- a/datahub-web-react/src/app/search/SearchResults.tsx +++ b/datahub-web-react/src/app/search/SearchResults.tsx @@ -28,6 +28,7 @@ import SearchSortSelect from './sorting/SearchSortSelect'; import { combineSiblingsInSearchResults } from './utils/combineSiblingsInSearchResults'; import SearchQuerySuggester from './suggestions/SearchQuerySugggester'; import { ANTD_GRAY_V2 } from '../entity/shared/constants'; +import { formatNumberWithoutAbbreviation } from '../shared/formatNumber'; const SearchResultsWrapper = styled.div<{ v2Styles: boolean }>` display: flex; @@ -210,7 +211,13 @@ export const SearchResults = ({ {lastResultIndex > 0 ? (page - 1) * pageSize + 1 : 0} - {lastResultIndex} {' '} - of {totalResults} results + of{' '} + + {totalResults >= 10000 + ? `${formatNumberWithoutAbbreviation(10000)}+` + : formatNumberWithoutAbbreviation(totalResults)} + {' '} + results From a7dc9c9d2292898d9668a3e39b0db42837397f94 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 18:11:36 -0500 Subject: [PATCH 177/792] feat(sdk): autogenerate urn types (#9257) --- docs-website/sphinx/apidocs/urns.rst | 7 + docs-website/sphinx/conf.py | 4 + docs-website/sphinx/index.rst | 1 + docs-website/sphinx/requirements.txt | 2 +- docs/how/updating-datahub.md | 52 ++- .../dataset_add_column_documentation.py | 14 +- .../library/dataset_add_column_tag.py | 14 +- .../library/dataset_add_column_term.py | 14 +- .../examples/library/upsert_group.py | 8 +- metadata-ingestion/scripts/avro_codegen.py | 407 +++++++++++++++++- .../scripts/custom_package_codegen.py | 1 + .../dataprocess/dataprocess_instance.py | 2 +- .../datahub/ingestion/source/csv_enricher.py | 5 +- .../source/metadata/business_glossary.py | 2 +- .../src/datahub/ingestion/source/tableau.py | 4 +- .../utilities/_custom_package_loader.py | 5 + .../src/datahub/utilities/docs_build.py | 9 + .../src/datahub/utilities/sqlglot_lineage.py | 14 +- .../src/datahub/utilities/urn_encoder.py | 4 + .../src/datahub/utilities/urns/_urn_base.py | 234 ++++++++++ .../datahub/utilities/urns/corp_group_urn.py | 41 +- .../datahub/utilities/urns/corpuser_urn.py | 41 +- .../datahub/utilities/urns/data_flow_urn.py | 89 +--- .../datahub/utilities/urns/data_job_urn.py | 52 +-- .../utilities/urns/data_platform_urn.py | 35 +- .../urns/data_process_instance_urn.py | 47 +- .../src/datahub/utilities/urns/dataset_urn.py | 113 +---- .../src/datahub/utilities/urns/domain_urn.py | 41 +- .../src/datahub/utilities/urns/error.py | 3 +- .../src/datahub/utilities/urns/field_paths.py | 15 + .../datahub/utilities/urns/notebook_urn.py | 47 +- .../src/datahub/utilities/urns/tag_urn.py | 41 +- .../src/datahub/utilities/urns/urn.py | 163 +------ .../src/datahub/utilities/urns/urn_iter.py | 10 +- .../state/test_checkpoint.py | 8 +- .../stateful_ingestion/test_kafka_state.py | 2 +- metadata-ingestion/tests/unit/test_urn.py | 45 -- .../unit/{ => urns}/test_corp_group_urn.py | 5 +- .../unit/{ => urns}/test_corpuser_urn.py | 5 +- .../unit/{ => urns}/test_data_flow_urn.py | 10 +- .../unit/{ => urns}/test_data_job_urn.py | 5 +- .../test_data_process_instance_urn.py | 9 +- .../tests/unit/{ => urns}/test_dataset_urn.py | 15 +- .../tests/unit/{ => urns}/test_domain_urn.py | 5 +- .../unit/{ => urns}/test_notebook_urn.py | 5 +- .../tests/unit/{ => urns}/test_tag_urn.py | 5 +- .../tests/unit/urns/test_urn.py | 56 +++ .../src/main/resources/entity-registry.yml | 4 +- 48 files changed, 856 insertions(+), 864 deletions(-) create mode 100644 docs-website/sphinx/apidocs/urns.rst create mode 100644 metadata-ingestion/src/datahub/utilities/docs_build.py create mode 100644 metadata-ingestion/src/datahub/utilities/urns/_urn_base.py create mode 100644 metadata-ingestion/src/datahub/utilities/urns/field_paths.py delete mode 100644 metadata-ingestion/tests/unit/test_urn.py rename metadata-ingestion/tests/unit/{ => urns}/test_corp_group_urn.py (87%) rename metadata-ingestion/tests/unit/{ => urns}/test_corpuser_urn.py (88%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_flow_urn.py (77%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_job_urn.py (90%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_process_instance_urn.py (90%) rename metadata-ingestion/tests/unit/{ => urns}/test_dataset_urn.py (81%) rename metadata-ingestion/tests/unit/{ => urns}/test_domain_urn.py (87%) rename metadata-ingestion/tests/unit/{ => urns}/test_notebook_urn.py (86%) rename metadata-ingestion/tests/unit/{ => urns}/test_tag_urn.py (87%) create mode 100644 metadata-ingestion/tests/unit/urns/test_urn.py diff --git a/docs-website/sphinx/apidocs/urns.rst b/docs-website/sphinx/apidocs/urns.rst new file mode 100644 index 0000000000000..2bd70deb22c7e --- /dev/null +++ b/docs-website/sphinx/apidocs/urns.rst @@ -0,0 +1,7 @@ +URNs +====== + +.. automodule:: datahub.metadata.urns + :exclude-members: LI_DOMAIN, URN_PREFIX, url_encode, validate, get_type, get_entity_id, get_entity_id_as_string, get_domain, underlying_key_aspect_type + :member-order: alphabetical + :inherited-members: diff --git a/docs-website/sphinx/conf.py b/docs-website/sphinx/conf.py index 3f118aadeea81..49cd20d5ef44d 100644 --- a/docs-website/sphinx/conf.py +++ b/docs-website/sphinx/conf.py @@ -3,6 +3,10 @@ # For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html +# See https://stackoverflow.com/a/65147676 +import builtins + +builtins.__sphinx_build__ = True # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information diff --git a/docs-website/sphinx/index.rst b/docs-website/sphinx/index.rst index fe11648dff555..18d92f4053b94 100644 --- a/docs-website/sphinx/index.rst +++ b/docs-website/sphinx/index.rst @@ -14,6 +14,7 @@ Welcome to DataHub Python SDK's documentation! apidocs/builder apidocs/clients apidocs/models + apidocs/urns Indices and tables diff --git a/docs-website/sphinx/requirements.txt b/docs-website/sphinx/requirements.txt index 94ddd40579f0e..2e064330138d9 100644 --- a/docs-website/sphinx/requirements.txt +++ b/docs-website/sphinx/requirements.txt @@ -1,4 +1,4 @@ --e ../../metadata-ingestion[datahub-rest,sql-parsing] +-e ../../metadata-ingestion[datahub-rest,sql-parser] beautifulsoup4==4.11.2 Sphinx==6.1.3 sphinx-click==4.4.0 diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 3263a9f7c15fb..dad05fd0153f2 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -9,6 +9,9 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. - #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. - `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. +- #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. + The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. + ### Potential Downtime ### Deprecations @@ -23,18 +26,19 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - #9044 - GraphQL APIs for adding ownership now expect either an `ownershipTypeUrn` referencing a customer ownership type or a (deprecated) `type`. Where before adding an ownership without a concrete type was allowed, this is no longer the case. For simplicity you can use the `type` parameter which will get translated to a custom ownership type internally if one exists for the type being added. - #9010 - In Redshift source's config `incremental_lineage` is set default to off. - #8810 - Removed support for SQLAlchemy 1.3.x. Only SQLAlchemy 1.4.x is supported now. -- #8942 - Removed `urn:li:corpuser:datahub` owner for the `Measure`, `Dimension` and `Temporal` tags emitted +- #8942 - Removed `urn:li:corpuser:datahub` owner for the `Measure`, `Dimension` and `Temporal` tags emitted by Looker and LookML source connectors. - #8853 - The Airflow plugin no longer supports Airflow 2.0.x or Python 3.7. See the docs for more details. - #8853 - Introduced the Airflow plugin v2. If you're using Airflow 2.3+, the v2 plugin will be enabled by default, and so you'll need to switch your requirements to include `pip install 'acryl-datahub-airflow-plugin[plugin-v2]'`. To continue using the v1 plugin, set the `DATAHUB_AIRFLOW_PLUGIN_USE_V1_PLUGIN` environment variable to `true`. - #8943 - The Unity Catalog ingestion source has a new option `include_metastore`, which will cause all urns to be changed when disabled. -This is currently enabled by default to preserve compatibility, but will be disabled by default and then removed in the future. -If stateful ingestion is enabled, simply setting `include_metastore: false` will perform all required cleanup. -Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: -`datahub delete --platform databricks --soft` and then reingesting with `include_metastore: false`. + This is currently enabled by default to preserve compatibility, but will be disabled by default and then removed in the future. + If stateful ingestion is enabled, simply setting `include_metastore: false` will perform all required cleanup. + Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: + `datahub delete --platform databricks --soft` and then reingesting with `include_metastore: false`. - #8846 - Changed enum values in resource filters used by policies. `RESOURCE_TYPE` became `TYPE` and `RESOURCE_URN` became `URN`. -Any existing policies using these filters (i.e. defined for particular `urns` or `types` such as `dataset`) need to be upgraded -manually, for example by retrieving their respective `dataHubPolicyInfo` aspect and changing part using filter i.e. + Any existing policies using these filters (i.e. defined for particular `urns` or `types` such as `dataset`) need to be upgraded + manually, for example by retrieving their respective `dataHubPolicyInfo` aspect and changing part using filter i.e. + ```yaml "resources": { "filter": { @@ -49,7 +53,9 @@ manually, for example by retrieving their respective `dataHubPolicyInfo` aspect ] } ``` + into + ```yaml "resources": { "filter": { @@ -64,22 +70,25 @@ into ] } ``` + for example, using `datahub put` command. Policies can be also removed and re-created via UI. + - #9077 - The BigQuery ingestion source by default sets `match_fully_qualified_names: true`. -This means that any `dataset_pattern` or `schema_pattern` specified will be matched on the fully -qualified dataset name, i.e. `.`. We attempt to support the old -pattern format by prepending `.*\\.` to dataset patterns lacking a period, so in most cases this -should not cause any issues. However, if you have a complex dataset pattern, we recommend you -manually convert it to the fully qualified format to avoid any potential issues. + This means that any `dataset_pattern` or `schema_pattern` specified will be matched on the fully + qualified dataset name, i.e. `.`. We attempt to support the old + pattern format by prepending `.*\\.` to dataset patterns lacking a period, so in most cases this + should not cause any issues. However, if you have a complex dataset pattern, we recommend you + manually convert it to the fully qualified format to avoid any potential issues. - #9110 - The Unity Catalog source will now generate urns based on `env` properly. If you have -been setting `env` in your recipe to something besides `PROD`, we will now generate urns -with that new env variable, invalidating your existing urns. + been setting `env` in your recipe to something besides `PROD`, we will now generate urns + with that new env variable, invalidating your existing urns. ### Potential Downtime ### Deprecations ### Other Notable Changes + - Session token configuration has changed, all previously created session tokens will be invalid and users will be prompted to log in. Expiration time has also been shortened which may result in more login prompts with the default settings. There should be no other interruption due to this change. @@ -88,13 +97,16 @@ with that new env variable, invalidating your existing urns. ### Breaking Changes ### Potential Downtime + - #8611 Search improvements requires reindexing indices. A `system-update` job will run which will set indices to read-only and create a backup/clone of each index. During the reindexing new components will be prevented from start-up until the reindex completes. The logs of this job will indicate a % complete per index. Depending on index sizes and infrastructure this process can take 5 minutes to hours however as a rough estimate 1 hour for every 2.3 million entities. ### Deprecations + - #8525: In LDAP ingestor, the `manager_pagination_enabled` changed to general `pagination_enabled` - MAE Events are no longer produced. MAE events have been deprecated for over a year. ### Other Notable Changes + - In this release we now enable you to create and delete pinned announcements on your DataHub homepage! If you have the “Manage Home Page Posts” platform privilege you’ll see a new section in settings called “Home Page Posts” where you can create and delete text posts and link posts that your users see on the home page. - The new search and browse experience, which was first made available in the previous release behind a feature flag, is now on by default. Check out our release notes for v0.10.5 to get more information and documentation on this new Browse experience. - In addition to the ranking changes mentioned above, this release includes changes to the highlighting of search entities to understand why they match your query. You can also sort your results alphabetically or by last updated times, in addition to relevance. In this release, we suggest a correction if your query has a typo in it. @@ -121,12 +133,13 @@ with that new env variable, invalidating your existing urns. This determines which Okta profile attribute is used for the corresponding DataHub user and thus may change what DataHub users are generated by the Okta source. And in a follow up `okta_profile_to_username_regex` has been set to `.*` which taken together with previous change brings the defaults in line with OIDC. - #8331: For all sql-based sources that support profiling, you can no longer specify -`profile_table_level_only` together with `include_field_xyz` config options to ingest -certain column-level metrics. Instead, set `profile_table_level_only` to `false` and -individually enable / disable desired field metrics. + `profile_table_level_only` together with `include_field_xyz` config options to ingest + certain column-level metrics. Instead, set `profile_table_level_only` to `false` and + individually enable / disable desired field metrics. - #8451: The `bigquery-beta` and `snowflake-beta` source aliases have been dropped. Use `bigquery` and `snowflake` as the source type instead. - #8472: Ingestion runs created with Pipeline.create will show up in the DataHub ingestion tab as CLI-based runs. To revert to the previous behavior of not showing these runs in DataHub, pass `no_default_report=True`. -- #8513: `snowflake` connector will use user's `email` attribute as is in urn. To revert to previous behavior disable `email_as_user_identifier` in recipe. +- #8513: `snowflake` connector will use user's `email` attribute as is in urn. To revert to previous behavior disable `email_as_user_identifier` in recipe. + ### Potential Downtime - BrowsePathsV2 upgrade will now be handled by the `system-update` job in non-blocking mode. This process generates data needed for the new search @@ -153,9 +166,11 @@ individually enable / disable desired field metrics. ### Potential Downtime ### Deprecations + - #8045: With the introduction of custom ownership types, the `Owner` aspect has been updated where the `type` field is deprecated in favor of a new field `typeUrn`. This latter field is an urn reference to the new OwnershipType entity. GraphQL endpoints have been updated to use the new field. For pre-existing ownership aspect records, DataHub now has logic to map the old field to the new field. ### Other notable Changes + - #8191: Updates GMS's health check endpoint to account for its dependency on external components. Notably, at this time, elasticsearch. This means that DataHub operators can now use GMS health status more reliably. ## 0.10.3 @@ -170,6 +185,7 @@ individually enable / disable desired field metrics. ### Potential Downtime ### Deprecations + - The signature of `Source.get_workunits()` is changed from `Iterable[WorkUnit]` to the more restrictive `Iterable[MetadataWorkUnit]`. - Legacy usage creation via the `UsageAggregation` aspect, `/usageStats?action=batchIngest` GMS endpoint, and `UsageStatsWorkUnit` metadata-ingestion class are all deprecated. diff --git a/metadata-ingestion/examples/library/dataset_add_column_documentation.py b/metadata-ingestion/examples/library/dataset_add_column_documentation.py index a6dbf58c09c81..bf871e2dcdb8e 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_documentation.py +++ b/metadata-ingestion/examples/library/dataset_add_column_documentation.py @@ -14,24 +14,12 @@ EditableSchemaMetadataClass, InstitutionalMemoryClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> owner, ownership_type, dataset documentation_to_add = ( "Name of the user who was deleted. This description is updated via PythonSDK." diff --git a/metadata-ingestion/examples/library/dataset_add_column_tag.py b/metadata-ingestion/examples/library/dataset_add_column_tag.py index ede1809c7bad9..94204bc39b874 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_tag.py +++ b/metadata-ingestion/examples/library/dataset_add_column_tag.py @@ -15,24 +15,12 @@ GlobalTagsClass, TagAssociationClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> the column, dataset and the tag to set column = "user_name" dataset_urn = make_dataset_urn(platform="hive", name="fct_users_created", env="PROD") diff --git a/metadata-ingestion/examples/library/dataset_add_column_term.py b/metadata-ingestion/examples/library/dataset_add_column_term.py index 115517bcfa06e..9796fa9d5404c 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_term.py +++ b/metadata-ingestion/examples/library/dataset_add_column_term.py @@ -15,24 +15,12 @@ GlossaryTermAssociationClass, GlossaryTermsClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> the column, dataset and the term to set column = "address.zipcode" dataset_urn = make_dataset_urn(platform="hive", name="realestate_db.sales", env="PROD") diff --git a/metadata-ingestion/examples/library/upsert_group.py b/metadata-ingestion/examples/library/upsert_group.py index 86a03b72c1289..84844e142f46c 100644 --- a/metadata-ingestion/examples/library/upsert_group.py +++ b/metadata-ingestion/examples/library/upsert_group.py @@ -5,7 +5,7 @@ CorpGroupGenerationConfig, ) from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig -from datahub.utilities.urns.corpuser_urn import CorpuserUrn +from datahub.metadata.urns import CorpUserUrn log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) @@ -13,10 +13,10 @@ group_email = "foogroup@acryl.io" group = CorpGroup( id=group_email, - owners=[str(CorpuserUrn.create_from_id("datahub"))], + owners=[str(CorpUserUrn("datahub"))], members=[ - str(CorpuserUrn.create_from_id("bar@acryl.io")), - str(CorpuserUrn.create_from_id("joe@acryl.io")), + str(CorpUserUrn("bar@acryl.io")), + str(CorpUserUrn("joe@acryl.io")), ], display_name="Foo Group", email=group_email, diff --git a/metadata-ingestion/scripts/avro_codegen.py b/metadata-ingestion/scripts/avro_codegen.py index de8836559217b..c6f6bac128b79 100644 --- a/metadata-ingestion/scripts/avro_codegen.py +++ b/metadata-ingestion/scripts/avro_codegen.py @@ -1,6 +1,8 @@ import collections +import copy import json import re +import textwrap from pathlib import Path from typing import Dict, Iterable, List, Optional, Tuple, Union @@ -115,11 +117,20 @@ def patch_schema(schema: dict, urn_arrays: Dict[str, List[Tuple[str, str]]]) -> # Patch normal urn types. field: avro.schema.Field for field in nested.fields: - java_class: Optional[str] = field.props.get("java", {}).get("class") + field_props: dict = field.props # type: ignore + java_props: dict = field_props.get("java", {}) + java_class: Optional[str] = java_props.get("class") if java_class and java_class.startswith( "com.linkedin.pegasus2avro.common.urn." ): - field.set_prop("Urn", java_class.split(".")[-1]) + type = java_class.split(".")[-1] + entity_types = field_props.get("Relationship", {}).get( + "entityTypes", [] + ) + + field.set_prop("Urn", type) + if entity_types: + field.set_prop("entityTypes", entity_types) # Patch array urn types. if nested.name in urn_arrays: @@ -130,7 +141,7 @@ def patch_schema(schema: dict, urn_arrays: Dict[str, List[Tuple[str, str]]]) -> field.set_prop("Urn", type) field.set_prop("urn_is_array", True) - return patched.to_json() + return patched.to_json() # type: ignore def merge_schemas(schemas_obj: List[dict]) -> str: @@ -141,6 +152,7 @@ def merge_schemas(schemas_obj: List[dict]) -> str: class NamesWithDups(avro.schema.Names): def add_name(self, name_attr, space_attr, new_schema): to_add = avro.schema.Name(name_attr, space_attr, self.default_namespace) + assert to_add.fullname self.names[to_add.fullname] = new_schema return to_add @@ -228,7 +240,6 @@ def make_load_schema_methods(schemas: Iterable[str]) -> str: def save_raw_schemas(schema_save_dir: Path, schemas: Dict[str, dict]) -> None: # Save raw avsc files. - schema_save_dir.mkdir() for name, schema in schemas.items(): (schema_save_dir / f"{name}.avsc").write_text(json.dumps(schema, indent=2)) @@ -333,6 +344,342 @@ class AspectBag(TypedDict, total=False): schema_class_file.write_text("\n".join(schema_classes_lines)) +def write_urn_classes(key_aspects: List[dict], urn_dir: Path) -> None: + urn_dir.mkdir() + + (urn_dir / "__init__.py").write_text("\n# This file is intentionally left empty.") + + code = """ +# This file contains classes corresponding to entity URNs. + +from typing import ClassVar, List, Optional, Type, TYPE_CHECKING + +import functools +from deprecated.sphinx import deprecated as _sphinx_deprecated + +from datahub.utilities.urn_encoder import UrnEncoder +from datahub.utilities.urns._urn_base import _SpecificUrn, Urn +from datahub.utilities.urns.error import InvalidUrnError + +deprecated = functools.partial(_sphinx_deprecated, version="0.12.0.2") +""" + + for aspect in key_aspects: + entity_type = aspect["Aspect"]["keyForEntity"] + if aspect["Aspect"]["entityCategory"] == "internal": + continue + + code += generate_urn_class(entity_type, aspect) + + (urn_dir / "urn_defs.py").write_text(code) + + +def capitalize_entity_name(entity_name: str) -> str: + # Examples: + # corpuser -> CorpUser + # corpGroup -> CorpGroup + # mlModelDeployment -> MlModelDeployment + + if entity_name == "corpuser": + return "CorpUser" + + return f"{entity_name[0].upper()}{entity_name[1:]}" + + +def python_type(avro_type: str) -> str: + if avro_type == "string": + return "str" + elif ( + isinstance(avro_type, dict) + and avro_type.get("type") == "enum" + and avro_type.get("name") == "FabricType" + ): + # TODO: make this stricter using an enum + return "str" + raise ValueError(f"unknown type {avro_type}") + + +def field_type(field: dict) -> str: + return python_type(field["type"]) + + +def field_name(field: dict) -> str: + manual_mapping = { + "origin": "env", + "platformName": "platform_name", + } + + name: str = field["name"] + if name in manual_mapping: + return manual_mapping[name] + + # If the name is mixed case, convert to snake case. + if name.lower() != name: + # Inject an underscore before each capital letter, and then convert to lowercase. + return re.sub(r"(? "{class_name}": + return cls(id) +""" +_extra_urn_methods: Dict[str, List[str]] = { + "corpGroup": [_create_from_id.format(class_name="CorpGroupUrn")], + "corpuser": [_create_from_id.format(class_name="CorpUserUrn")], + "dataFlow": [ + """ +@classmethod +def create_from_ids( + cls, + orchestrator: str, + flow_id: str, + env: str, + platform_instance: Optional[str] = None, +) -> "DataFlowUrn": + return cls( + orchestrator=orchestrator, + flow_id=f"{platform_instance}.{flow_id}" if platform_instance else flow_id, + cluster=env, + ) + +@deprecated(reason="Use .orchestrator instead") +def get_orchestrator_name(self) -> str: + return self.orchestrator + +@deprecated(reason="Use .flow_id instead") +def get_flow_id(self) -> str: + return self.flow_id + +@deprecated(reason="Use .cluster instead") +def get_env(self) -> str: + return self.cluster +""", + ], + "dataJob": [ + """ +@classmethod +def create_from_ids(cls, data_flow_urn: str, job_id: str) -> "DataJobUrn": + return cls(data_flow_urn, job_id) + +def get_data_flow_urn(self) -> "DataFlowUrn": + return DataFlowUrn.from_string(self.flow) + +@deprecated(reason="Use .job_id instead") +def get_job_id(self) -> str: + return self.job_id +""" + ], + "dataPlatform": [_create_from_id.format(class_name="DataPlatformUrn")], + "dataProcessInstance": [ + _create_from_id.format(class_name="DataProcessInstanceUrn"), + """ +@deprecated(reason="Use .id instead") +def get_dataprocessinstance_id(self) -> str: + return self.id +""", + ], + "dataset": [ + """ +@classmethod +def create_from_ids( + cls, + platform_id: str, + table_name: str, + env: str, + platform_instance: Optional[str] = None, +) -> "DatasetUrn": + return DatasetUrn( + platform=platform_id, + name=f"{platform_instance}.{table_name}" if platform_instance else table_name, + env=env, + ) + +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path as _get_simple_field_path_from_v2_field_path + +get_simple_field_path_from_v2_field_path = staticmethod(deprecated(reason='Use the function from the field_paths module instead')(_get_simple_field_path_from_v2_field_path)) + +def get_data_platform_urn(self) -> "DataPlatformUrn": + return DataPlatformUrn.from_string(self.platform) + +@deprecated(reason="Use .name instead") +def get_dataset_name(self) -> str: + return self.name + +@deprecated(reason="Use .env instead") +def get_env(self) -> str: + return self.env +""" + ], + "domain": [_create_from_id.format(class_name="DomainUrn")], + "notebook": [ + """ +@deprecated(reason="Use .notebook_tool instead") +def get_platform_id(self) -> str: + return self.notebook_tool + +@deprecated(reason="Use .notebook_id instead") +def get_notebook_id(self) -> str: + return self.notebook_id +""" + ], + "tag": [_create_from_id.format(class_name="TagUrn")], +} + + +def generate_urn_class(entity_type: str, key_aspect: dict) -> str: + """Generate a class definition for this entity. + + The class definition has the following structure: + - A class attribute ENTITY_TYPE, which is the entity type string. + - A class attribute URN_PARTS, which is the number of parts in the URN. + - A constructor that takes the URN parts as arguments. The field names + will match the key aspect's field names. It will also have a _allow_coercion + flag, which will allow for some normalization (e.g. upper case env). + Then, each part will be validated (including nested calls for urn subparts). + - Utilities for converting to/from the key aspect. + - Any additional methods that are required for this entity type, defined above. + These are primarily for backwards compatibility. + - Getter methods for each field. + """ + + class_name = f"{capitalize_entity_name(entity_type)}Urn" + + fields = copy.deepcopy(key_aspect["fields"]) + if entity_type == "container": + # The annotations say guid is optional, but it is required. + # This is a quick fix of the annotations. + assert field_name(fields[0]) == "guid" + assert fields[0]["type"] == ["null", "string"] + fields[0]["type"] = "string" + + _init_arg_parts: List[str] = [] + for field in fields: + default = '"PROD"' if field_name(field) == "env" else None + _arg_part = f"{field_name(field)}: {field_type(field)}" + if default: + _arg_part += f" = {default}" + _init_arg_parts.append(_arg_part) + init_args = ", ".join(_init_arg_parts) + + super_init_args = ", ".join(field_name(field) for field in fields) + + arg_count = len(fields) + parse_ids_mapping = ", ".join( + f"{field_name(field)}=entity_ids[{i}]" for i, field in enumerate(fields) + ) + + key_aspect_class = f"{key_aspect['name']}Class" + to_key_aspect_args = ", ".join( + # The LHS bypasses any field name aliases. + f"{field['name']}=self.{field_name(field)}" + for field in fields + ) + from_key_aspect_args = ", ".join( + f"{field_name(field)}=key_aspect.{field['name']}" for field in fields + ) + + init_coercion = "" + init_validation = "" + for field in fields: + init_validation += f'if not {field_name(field)}:\n raise InvalidUrnError("{field_name(field)} cannot be empty")\n' + + # Generalized mechanism for validating embedded urns. + field_urn_type_class = None + if field_name(field) == "platform": + field_urn_type_class = "DataPlatformUrn" + elif field.get("Urn"): + if len(field.get("entityTypes", [])) == 1: + field_entity_type = field["entityTypes"][0] + field_urn_type_class = f"{capitalize_entity_name(field_entity_type)}Urn" + else: + field_urn_type_class = "Urn" + + if field_urn_type_class: + init_validation += f"{field_name(field)} = str({field_name(field)})\n" + init_validation += ( + f"assert {field_urn_type_class}.from_string({field_name(field)})\n" + ) + else: + init_validation += ( + f"assert not UrnEncoder.contains_reserved_char({field_name(field)})\n" + ) + + if field_name(field) == "env": + init_coercion += "env = env.upper()\n" + # TODO add ALL_ENV_TYPES validation + elif entity_type == "dataPlatform" and field_name(field) == "platform_name": + init_coercion += 'if platform_name.startswith("urn:li:dataPlatform:"):\n' + init_coercion += " platform_name = DataPlatformUrn.from_string(platform_name).platform_name\n" + + if field_name(field) == "platform": + init_coercion += "platform = DataPlatformUrn(platform).urn()\n" + elif field_urn_type_class is None: + # For all non-urns, run the value through the UrnEncoder. + init_coercion += ( + f"{field_name(field)} = UrnEncoder.encode_string({field_name(field)})\n" + ) + if not init_coercion: + init_coercion = "pass" + + # TODO include the docs for each field + + code = f""" +if TYPE_CHECKING: + from datahub.metadata.schema_classes import {key_aspect_class} + +class {class_name}(_SpecificUrn): + ENTITY_TYPE: ClassVar[str] = "{entity_type}" + URN_PARTS: ClassVar[int] = {arg_count} + + def __init__(self, {init_args}, *, _allow_coercion: bool = True) -> None: + if _allow_coercion: + # Field coercion logic (if any is required). +{textwrap.indent(init_coercion.strip(), prefix=" "*4*3)} + + # Validation logic. +{textwrap.indent(init_validation.strip(), prefix=" "*4*2)} + + super().__init__(self.ENTITY_TYPE, [{super_init_args}]) + + @classmethod + def _parse_ids(cls, entity_ids: List[str]) -> "{class_name}": + if len(entity_ids) != cls.URN_PARTS: + raise InvalidUrnError(f"{class_name} should have {{cls.URN_PARTS}} parts, got {{len(entity_ids)}}: {{entity_ids}}") + return cls({parse_ids_mapping}, _allow_coercion=False) + + @classmethod + def underlying_key_aspect_type(cls) -> Type["{key_aspect_class}"]: + from datahub.metadata.schema_classes import {key_aspect_class} + + return {key_aspect_class} + + def to_key_aspect(self) -> "{key_aspect_class}": + from datahub.metadata.schema_classes import {key_aspect_class} + + return {key_aspect_class}({to_key_aspect_args}) + + @classmethod + def from_key_aspect(cls, key_aspect: "{key_aspect_class}") -> "{class_name}": + return cls({from_key_aspect_args}) +""" + + for extra_method in _extra_urn_methods.get(entity_type, []): + code += textwrap.indent(extra_method, prefix=" " * 4) + + for i, field in enumerate(fields): + code += f""" + @property + def {field_name(field)}(self) -> {field_type(field)}: + return self.entity_ids[{i}] +""" + + return code + + @click.command() @click.argument( "entity_registry", type=click.Path(exists=True, dir_okay=False), required=True @@ -367,6 +714,7 @@ def generate( if schema.get("Aspect") } + # Copy entity registry info into the corresponding key aspect. for entity in entities: # This implicitly requires that all keyAspects are resolvable. aspect = aspects[entity.keyAspect] @@ -428,6 +776,7 @@ def generate( import importlib from typing import TYPE_CHECKING +from datahub.utilities.docs_build import IS_SPHINX_BUILD from datahub.utilities._custom_package_loader import get_custom_models_package _custom_package_path = get_custom_models_package() @@ -437,16 +786,64 @@ def generate( # Required explicitly because __all__ doesn't include _ prefixed names. from ._schema_classes import _Aspect, __SCHEMA_TYPES + + if IS_SPHINX_BUILD: + # Set __module__ to the current module so that Sphinx will document the + # classes as belonging to this module instead of the custom package. + for _cls in list(globals().values()): + if hasattr(_cls, "__module__") and "datahub.metadata._schema_classes" in _cls.__module__: + _cls.__module__ = __name__ else: _custom_package = importlib.import_module(_custom_package_path) globals().update(_custom_package.__dict__) +""" + ) + + (Path(outdir) / "urns.py").write_text( + """ +# This is a specialized shim layer that allows us to dynamically load custom URN types from elsewhere. + +import importlib +from typing import TYPE_CHECKING + +from datahub.utilities.docs_build import IS_SPHINX_BUILD +from datahub.utilities._custom_package_loader import get_custom_urns_package +from datahub.utilities.urns._urn_base import Urn # noqa: F401 +_custom_package_path = get_custom_urns_package() + +if TYPE_CHECKING or not _custom_package_path: + from ._urns.urn_defs import * # noqa: F401 + + if IS_SPHINX_BUILD: + # Set __module__ to the current module so that Sphinx will document the + # classes as belonging to this module instead of the custom package. + for _cls in list(globals().values()): + if hasattr(_cls, "__module__") and ("datahub.metadata._urns.urn_defs" in _cls.__module__ or _cls is Urn): + _cls.__module__ = __name__ +else: + _custom_package = importlib.import_module(_custom_package_path) + globals().update(_custom_package.__dict__) """ ) + # Generate URN classes. + urn_dir = Path(outdir) / "_urns" + write_urn_classes( + [aspect for aspect in aspects.values() if aspect["Aspect"].get("keyForEntity")], + urn_dir, + ) + + # Save raw schema files in codegen as well. + schema_save_dir = Path(outdir) / "schemas" + schema_save_dir.mkdir() + for schema_out_file, schema in schemas.items(): + (schema_save_dir / f"{schema_out_file}.avsc").write_text( + json.dumps(schema, indent=2) + ) + # Keep a copy of a few raw avsc files. required_avsc_schemas = {"MetadataChangeEvent", "MetadataChangeProposal"} - schema_save_dir = Path(outdir) / "schemas" save_raw_schemas( schema_save_dir, { diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index 4a674550d49df..a5883c9ae9020 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -90,6 +90,7 @@ def generate( entry_points={{ "datahub.custom_packages": [ "models={python_package_name}.models.schema_classes", + "urns={python_package_name}.models._urns.urn_defs", ], }}, ) diff --git a/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py b/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py index 2f07e4a112f93..6a2f733dcf8f7 100644 --- a/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py +++ b/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py @@ -75,7 +75,7 @@ class DataProcessInstance: def __post_init__(self): self.urn = DataProcessInstanceUrn.create_from_id( - dataprocessinstance_id=DataProcessInstanceKey( + id=DataProcessInstanceKey( cluster=self.cluster, orchestrator=self.orchestrator, id=self.id, diff --git a/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py b/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py index 611f0c5c52cc6..a2db8ceb4a89a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py +++ b/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py @@ -45,6 +45,7 @@ TagAssociationClass, ) from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path from datahub.utilities.urns.urn import Urn, guess_entity_type DATASET_ENTITY_TYPE = DatasetUrn.ENTITY_TYPE @@ -436,9 +437,7 @@ def process_sub_resource_row( field_match = False for field_info in current_editable_schema_metadata.editableSchemaFieldInfo: if ( - DatasetUrn.get_simple_field_path_from_v2_field_path( - field_info.fieldPath - ) + get_simple_field_path_from_v2_field_path(field_info.fieldPath) == field_path ): # we have some editable schema metadata for this field diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index b5d9d96354fc5..97877df63707f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -113,7 +113,7 @@ def create_id(path: List[str], default_id: Optional[str], enable_auto_id: bool) id_: str = ".".join(path) - if UrnEncoder.contains_reserved_char(id_): + if UrnEncoder.contains_extended_reserved_char(id_): enable_auto_id = True if enable_auto_id: diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index 08df7599510f4..da44d09121c6c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -1086,9 +1086,7 @@ def get_upstream_columns_of_fields_in_datasource( def is_snowflake_urn(self, urn: str) -> bool: return ( - DatasetUrn.create_from_string(urn) - .get_data_platform_urn() - .get_platform_name() + DatasetUrn.create_from_string(urn).get_data_platform_urn().platform_name == "snowflake" ) diff --git a/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py b/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py index 1b66258557406..bb029db3b65b7 100644 --- a/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py +++ b/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py @@ -10,6 +10,7 @@ _CUSTOM_PACKAGE_GROUP_KEY = "datahub.custom_packages" _MODELS_KEY = "models" +_URNS_KEY = "urns" class CustomPackageException(Exception): @@ -41,3 +42,7 @@ def _get_custom_package_for_name(name: str) -> Optional[str]: def get_custom_models_package() -> Optional[str]: return _get_custom_package_for_name(_MODELS_KEY) + + +def get_custom_urns_package() -> Optional[str]: + return _get_custom_package_for_name(_URNS_KEY) diff --git a/metadata-ingestion/src/datahub/utilities/docs_build.py b/metadata-ingestion/src/datahub/utilities/docs_build.py new file mode 100644 index 0000000000000..18cb3629516ba --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/docs_build.py @@ -0,0 +1,9 @@ +from typing import TYPE_CHECKING + +try: + # Via https://stackoverflow.com/a/65147676 + if not TYPE_CHECKING and __sphinx_build__: + IS_SPHINX_BUILD = True + +except NameError: + IS_SPHINX_BUILD = False diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index efe2d26aae3d9..c2cccf9f1e389 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -37,7 +37,7 @@ TimeTypeClass, ) from datahub.utilities.file_backed_collections import ConnectionWrapper, FileBackedDict -from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path logger = logging.getLogger(__name__) @@ -443,15 +443,14 @@ def _convert_schema_aspect_to_info( cls, schema_metadata: SchemaMetadataClass ) -> SchemaInfo: return { - DatasetUrn.get_simple_field_path_from_v2_field_path(col.fieldPath): ( + get_simple_field_path_from_v2_field_path(col.fieldPath): ( # The actual types are more of a "nice to have". col.nativeDataType or "str" ) for col in schema_metadata.fields # TODO: We can't generate lineage to columns nested within structs yet. - if "." - not in DatasetUrn.get_simple_field_path_from_v2_field_path(col.fieldPath) + if "." not in get_simple_field_path_from_v2_field_path(col.fieldPath) } @classmethod @@ -459,17 +458,14 @@ def convert_graphql_schema_metadata_to_info( cls, schema: GraphQLSchemaMetadata ) -> SchemaInfo: return { - DatasetUrn.get_simple_field_path_from_v2_field_path(field["fieldPath"]): ( + get_simple_field_path_from_v2_field_path(field["fieldPath"]): ( # The actual types are more of a "nice to have". field["nativeDataType"] or "str" ) for field in schema["fields"] # TODO: We can't generate lineage to columns nested within structs yet. - if "." - not in DatasetUrn.get_simple_field_path_from_v2_field_path( - field["fieldPath"] - ) + if "." not in get_simple_field_path_from_v2_field_path(field["fieldPath"]) } def close(self) -> None: diff --git a/metadata-ingestion/src/datahub/utilities/urn_encoder.py b/metadata-ingestion/src/datahub/utilities/urn_encoder.py index 706d50d942055..093c9ade8c152 100644 --- a/metadata-ingestion/src/datahub/utilities/urn_encoder.py +++ b/metadata-ingestion/src/datahub/utilities/urn_encoder.py @@ -23,4 +23,8 @@ def encode_char(c: str) -> str: @staticmethod def contains_reserved_char(value: str) -> bool: + return bool(set(value).intersection(RESERVED_CHARS)) + + @staticmethod + def contains_extended_reserved_char(value: str) -> bool: return bool(set(value).intersection(RESERVED_CHARS_EXTENDED)) diff --git a/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py new file mode 100644 index 0000000000000..fbde0d6e6d69a --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py @@ -0,0 +1,234 @@ +import functools +import urllib.parse +from abc import abstractmethod +from typing import ClassVar, Dict, List, Optional, Type, TypeVar + +from deprecated import deprecated + +from datahub.utilities.urns.error import InvalidUrnError + +URN_TYPES: Dict[str, Type["_SpecificUrn"]] = {} + + +def _split_entity_id(entity_id: str) -> List[str]: + if not (entity_id.startswith("(") and entity_id.endswith(")")): + return [entity_id] + + parts = [] + start_paren_count = 1 + part_start = 1 + for i in range(1, len(entity_id)): + c = entity_id[i] + if c == "(": + start_paren_count += 1 + elif c == ")": + start_paren_count -= 1 + if start_paren_count < 0: + raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") + elif c == ",": + if start_paren_count != 1: + continue + + if i - part_start <= 0: + raise InvalidUrnError(f"{entity_id}, empty part disallowed") + parts.append(entity_id[part_start:i]) + part_start = i + 1 + + if start_paren_count != 0: + raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") + + parts.append(entity_id[part_start:-1]) + + return parts + + +_UrnSelf = TypeVar("_UrnSelf", bound="Urn") + + +@functools.total_ordering +class Urn: + """ + URNs are globally unique identifiers used to refer to entities. + + It will be in format of urn:li:: or urn:li::(,,...) + + A note on encoding: certain characters, particularly commas and parentheses, are + not allowed in string portions of the URN. However, these are allowed when the urn + has another urn embedded within it. The main URN class ignores this possibility, + and assumes that the user provides a valid URN string. However, the specific URN + classes, such as DatasetUrn, will automatically encode these characters using + url-encoding when the URN is created and _allow_coercion is enabled (the default). + However, all from_string methods will try to preserve the string as-is, and will + raise an error if the string is invalid. + """ + + # retained for backwards compatibility + URN_PREFIX: ClassVar[str] = "urn" + LI_DOMAIN: ClassVar[str] = "li" + + _entity_type: str + _entity_ids: List[str] + + def __init__(self, entity_type: str, entity_id: List[str]) -> None: + self._entity_type = entity_type + self._entity_ids = entity_id + + if not self._entity_ids: + raise InvalidUrnError("Empty entity id.") + for part in self._entity_ids: + if not part: + raise InvalidUrnError("Empty entity id.") + + @property + def entity_type(self) -> str: + return self._entity_type + + @property + def entity_ids(self) -> List[str]: + return self._entity_ids + + @classmethod + def from_string(cls: Type[_UrnSelf], urn_str: str) -> "_UrnSelf": + """ + Creates an Urn from its string representation. + + Args: + urn_str: The string representation of the Urn. + + Returns: + Urn of the given string representation. + + Raises: + InvalidUrnError: If the string representation is in invalid format. + """ + + # TODO: Add handling for url encoded urns e.g. urn%3A ... + + if not urn_str.startswith("urn:li:"): + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. Urns should start with 'urn:li:'" + ) + + parts: List[str] = urn_str.split(":", maxsplit=3) + if len(parts) != 4: + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. Expect 4 parts from urn string but found {len(parts)}" + ) + if "" in parts: + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. There should not be empty parts in urn string." + ) + + _urn, _li, entity_type, entity_ids_str = parts + entity_ids = _split_entity_id(entity_ids_str) + + UrnCls: Optional[Type["_SpecificUrn"]] = URN_TYPES.get(entity_type) + if UrnCls: + if not issubclass(UrnCls, cls): + # We want to return a specific subtype of Urn. If we're called + # with Urn.from_string(), that's fine. However, if we're called as + # DatasetUrn.from_string('urn:li:corpuser:foo'), that should throw an error. + raise InvalidUrnError( + f"Passed an urn of type {entity_type} to the from_string method of {cls.__name__}. Use Urn.from_string() or {UrnCls.__name__}.from_string() instead." + ) + return UrnCls._parse_ids(entity_ids) # type: ignore + + # Fallback for unknown types. + if cls != Urn: + raise InvalidUrnError( + f"Unknown urn type {entity_type} for urn {urn_str} of type {cls}" + ) + return cls(entity_type, entity_ids) + + def urn(self) -> str: + """Get the string representation of the urn.""" + + if len(self._entity_ids) == 1: + return f"urn:li:{self._entity_type}:{self._entity_ids[0]}" + + return f"urn:li:{self._entity_type}:({','.join(self._entity_ids)})" + + def __str__(self) -> str: + return self.urn() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.urn()})" + + def urn_url_encoded(self) -> str: + return Urn.url_encode(self.urn()) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Urn): + return False + return self.urn() == other.urn() + + def __lt__(self, other: object) -> bool: + if not isinstance(other, Urn): + raise TypeError( + f"'<' not supported between instances of '{type(self)}' and '{type(other)}'" + ) + return self.urn() < other.urn() + + def __hash__(self) -> int: + return hash(self.urn()) + + @classmethod + @deprecated(reason="prefer .from_string") + def create_from_string(cls: Type[_UrnSelf], urn_str: str) -> "_UrnSelf": + return cls.from_string(urn_str) + + @deprecated(reason="prefer .entity_ids") + def get_entity_id(self) -> List[str]: + return self._entity_ids + + @deprecated(reason="prefer .entity_type") + def get_type(self) -> str: + return self._entity_type + + @deprecated(reason="no longer needed") + def get_domain(self) -> str: + return "li" + + @deprecated(reason="no longer needed") + def get_entity_id_as_string(self) -> str: + urn = self.urn() + prefix = "urn:li:" + assert urn.startswith(prefix) + id_with_type = urn[len(prefix) :] + return id_with_type.split(":", maxsplit=1)[1] + + @classmethod + @deprecated(reason="no longer needed") + def validate(cls, urn_str: str) -> None: + Urn.create_from_string(urn_str) + + @staticmethod + def url_encode(urn: str) -> str: + # safe='' encodes '/' as '%2F' + return urllib.parse.quote(urn, safe="") + + +class _SpecificUrn(Urn): + ENTITY_TYPE: str = "" + + def __init_subclass__(cls) -> None: + # Validate the subclass. + entity_type = cls.ENTITY_TYPE + if not entity_type: + raise ValueError(f'_SpecificUrn subclass {cls} must define "ENTITY_TYPE"') + + # Register the urn type. + if entity_type in URN_TYPES: + raise ValueError(f"duplicate urn type registered: {entity_type}") + URN_TYPES[entity_type] = cls + + return super().__init_subclass__() + + @classmethod + def underlying_key_aspect_type(cls) -> Type: + raise NotImplementedError() + + @classmethod + @abstractmethod + def _parse_ids(cls: Type[_UrnSelf], entity_ids: List[str]) -> _UrnSelf: + raise NotImplementedError() diff --git a/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py b/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py index 94fa133becf6c..37c1076925945 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class CorpGroupUrn(Urn): - """ - expected corp group urn format: urn:li:corpGroup:. example: "urn:li:corpGroup:data" - """ - - ENTITY_TYPE: str = "corpGroup" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "CorpGroupUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, group_id: str) -> "CorpGroupUrn": - return cls(CorpGroupUrn.ENTITY_TYPE, [group_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != CorpGroupUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {CorpGroupUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import CorpGroupUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py b/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py index 653b99f4af9bf..5f9ecf65951b9 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class CorpuserUrn(Urn): - """ - expected corp user urn format: urn:li:corpuser:. example: "urn:li:corpuser:tom" - """ - - ENTITY_TYPE: str = "corpuser" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "CorpuserUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, user_id: str) -> "CorpuserUrn": - return cls(CorpuserUrn.ENTITY_TYPE, [user_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != CorpuserUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {CorpuserUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import CorpUserUrn as CorpuserUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py index f0dda5d8db493..5b2b45927c339 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py @@ -1,88 +1 @@ -from typing import List, Optional - -from datahub.configuration.source_common import ALL_ENV_TYPES -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataFlowUrn(Urn): - """ - expected data flow urn format: urn:li:dataFlow:(,,). example: - urn:li:dataFlow:(airflow,ingest_user,prod) - """ - - ENTITY_TYPE: str = "dataFlow" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataFlowUrn": - """ - Create a DataFlowUrn from the its string representation - :param urn_str: the string representation of the DataFlowUrn - :return: DataFlowUrn of the given string representation - :raises InvalidUrnError is the string representation is in invalid format - """ - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - def get_orchestrator_name(self) -> str: - """ - :return: the orchestrator name for the Dataflow - """ - return self.get_entity_id()[0] - - def get_flow_id(self) -> str: - """ - :return: the data flow id from this DataFlowUrn - """ - return self.get_entity_id()[1] - - def get_env(self) -> str: - """ - :return: the environment where the DataFlow is run - """ - return self.get_entity_id()[2] - - @classmethod - def create_from_ids( - cls, - orchestrator: str, - flow_id: str, - env: str, - platform_instance: Optional[str] = None, - ) -> "DataFlowUrn": - entity_id: List[str] - if platform_instance: - entity_id = [ - orchestrator, - f"{platform_instance}.{flow_id}", - env, - ] - else: - entity_id = [orchestrator, flow_id, env] - return cls(DataFlowUrn.ENTITY_TYPE, entity_id) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataFlowUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataFlowUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - # expected entity id format (,,) - if len(entity_id) != 3: - raise InvalidUrnError( - f"Expect 3 parts in the entity id but found {entity_id}" - ) - - env = entity_id[2].upper() - if env not in ALL_ENV_TYPES: - raise InvalidUrnError( - f"Invalid env:{env}. Allowed envs are {ALL_ENV_TYPES}" - ) +from datahub.metadata.urns import DataFlowUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py index 9459646893b92..53e3419ee7ecb 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py @@ -1,51 +1 @@ -from typing import List - -from datahub.utilities.urns.data_flow_urn import DataFlowUrn -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataJobUrn(Urn): - """ - expected Data job urn format: urn:li:dataJob:(,). example: - "urn:li:dataJob:(urn:li:dataFlow:(airflow,sample_flow,prod),sample_job)" - """ - - ENTITY_TYPE: str = "dataJob" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - def get_data_flow_urn(self) -> DataFlowUrn: - return DataFlowUrn.create_from_string(self.get_entity_id()[0]) - - def get_job_id(self) -> str: - return self.get_entity_id()[1] - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataJobUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_ids(cls, data_flow_urn: str, job_id: str) -> "DataJobUrn": - return cls(DataJobUrn.ENTITY_TYPE, [data_flow_urn, job_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataJobUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataJobUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 2: - raise InvalidUrnError( - f"Expect 2 part in entity id, but found{len(entity_id)}" - ) - - data_flow_urn_str = entity_id[0] - DataFlowUrn.validate(data_flow_urn_str) +from datahub.metadata.urns import DataJobUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py index 79cf54dfe920a..9d37e38f256e7 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py @@ -1,34 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataPlatformUrn(Urn): - """ - expected dataset urn format: urn:li:dataPlatform:. example: "urn:li:dataPlatform:hive" - """ - - ENTITY_TYPE: str = "dataPlatform" - - def __init__(self, entity_type: str, entity_id: List[str], domain: str = "li"): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataPlatformUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, platform_id: str) -> "DataPlatformUrn": - return cls(DataPlatformUrn.ENTITY_TYPE, [platform_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataPlatformUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataPlatformUrn.ENTITY_TYPE} but found {entity_type}" - ) - - def get_platform_name(self) -> str: - return self.get_entity_id()[0] +from datahub.metadata.urns import DataPlatformUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py index 6367d48d6d441..df6ba797d069c 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py @@ -1,46 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataProcessInstanceUrn(Urn): - """ - expected domain urn format: urn:li:dataProcessInstance: - """ - - ENTITY_TYPE: str = "dataProcessInstance" - - def __init__( - self, entity_type: str, entity_id: List[str], domain_id: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain_id) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataProcessInstanceUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, dataprocessinstance_id: str) -> "DataProcessInstanceUrn": - return cls(DataProcessInstanceUrn.ENTITY_TYPE, [dataprocessinstance_id]) - - def get_dataprocessinstance_id(self) -> str: - """ - :return: the dataprocess instance id from this DatasetUrn - """ - return self.get_entity_id()[0] - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataProcessInstanceUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataProcessInstanceUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import DataProcessInstanceUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py b/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py index 3ed33c068496e..6078ffefc03d8 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py @@ -1,112 +1 @@ -from typing import List, Optional - -from datahub.configuration.source_common import ALL_ENV_TYPES -from datahub.utilities.urn_encoder import UrnEncoder -from datahub.utilities.urns.data_platform_urn import DataPlatformUrn -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DatasetUrn(Urn): - """ - expected dataset urn format: urn:li:dataset:(,,env). example: - urn:li:dataset:(urn:li:dataPlatform:hive,member,prod) - """ - - ENTITY_TYPE: str = "dataset" - - def __init__(self, entity_type: str, entity_id: List[str], domain: str = "li"): - super().__init__(entity_type, UrnEncoder.encode_string_array(entity_id), domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DatasetUrn": - """ - Create a DatasetUrn from the its string representation - :param urn_str: the string representation of the DatasetUrn - :return: DatasetUrn of the given string representation - :raises InvalidUrnError is the string representation is in invalid format - """ - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - def get_data_platform_urn(self) -> DataPlatformUrn: - """ - :return: the DataPlatformUrn of where the Dataset is created - """ - return DataPlatformUrn.create_from_string(self.get_entity_id()[0]) - - def get_dataset_name(self) -> str: - """ - :return: the dataset name from this DatasetUrn - """ - return self.get_entity_id()[1] - - def get_env(self) -> str: - """ - :return: the environment where the Dataset is created - """ - return self.get_entity_id()[2] - - @classmethod - def create_from_ids( - cls, - platform_id: str, - table_name: str, - env: str, - platform_instance: Optional[str] = None, - ) -> "DatasetUrn": - entity_id: List[str] - if platform_instance: - entity_id = [ - str(DataPlatformUrn.create_from_id(platform_id)), - f"{platform_instance}.{table_name}", - env, - ] - else: - entity_id = [ - str(DataPlatformUrn.create_from_id(platform_id)), - table_name, - env, - ] - return cls(DatasetUrn.ENTITY_TYPE, entity_id) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DatasetUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DatasetUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - # expected entity id format (,,) - if len(entity_id) != 3: - raise InvalidUrnError( - f"Expect 3 parts in the entity id but found {entity_id}" - ) - - platform_urn_str = entity_id[0] - - DataPlatformUrn.validate(platform_urn_str) - env = entity_id[2].upper() - if env not in ALL_ENV_TYPES: - raise InvalidUrnError( - f"Invalid env:{env}. Allowed envs are {ALL_ENV_TYPES}" - ) - - """A helper function to extract simple . path notation from the v2 field path""" - - @staticmethod - def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - if field_path.startswith("[version=2.0]"): - # this is a v2 field path - tokens = [ - t - for t in field_path.split(".") - if not (t.startswith("[") or t.endswith("]")) - ] - path = ".".join(tokens) - return path - else: - # not a v2, we assume this is a simple path - return field_path +from datahub.metadata.urns import DatasetUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py b/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py index dc875ce84f973..442a6b27729bb 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DomainUrn(Urn): - """ - expected domain urn format: urn:li:domain:. example: "urn:li:domain:product" - """ - - ENTITY_TYPE: str = "domain" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DomainUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, domain_id: str) -> "DomainUrn": - return cls(DomainUrn.ENTITY_TYPE, [domain_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DomainUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DomainUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import DomainUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/error.py b/metadata-ingestion/src/datahub/utilities/urns/error.py index 12b7c02ab2d9a..a5c17c40787ca 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/error.py +++ b/metadata-ingestion/src/datahub/utilities/urns/error.py @@ -1,3 +1,2 @@ class InvalidUrnError(Exception): - def __init__(self, msg: str): - super().__init__(msg) + pass diff --git a/metadata-ingestion/src/datahub/utilities/urns/field_paths.py b/metadata-ingestion/src/datahub/utilities/urns/field_paths.py new file mode 100644 index 0000000000000..c2ecfa3031140 --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/urns/field_paths.py @@ -0,0 +1,15 @@ +def get_simple_field_path_from_v2_field_path(field_path: str) -> str: + """A helper function to extract simple . path notation from the v2 field path""" + + if field_path.startswith("[version=2.0]"): + # this is a v2 field path + tokens = [ + t + for t in field_path.split(".") + if not (t.startswith("[") or t.endswith("]")) + ] + path = ".".join(tokens) + return path + else: + # not a v2, we assume this is a simple path + return field_path diff --git a/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py b/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py index fcf2c92450309..60a4f5396aa46 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py @@ -1,46 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class NotebookUrn(Urn): - """ - expected dataset urn format: urn:li:notebook:(,). example: "urn:li:notebook:(querybook,1234)" - """ - - ENTITY_TYPE: str = "notebook" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "NotebookUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_ids(cls, platform_id: str, notebook_id: str) -> "NotebookUrn": - return cls(NotebookUrn.ENTITY_TYPE, [platform_id, notebook_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != NotebookUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {NotebookUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 2: - raise InvalidUrnError( - f"Expect 2 parts in entity id, but found{len(entity_id)}" - ) - - def get_platform_id(self) -> str: - return self.get_entity_id()[0] - - def get_notebook_id(self) -> str: - return self.get_entity_id()[1] +from datahub.metadata.urns import NotebookUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py b/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py index e2baeea45e807..0ac632ee40a01 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class TagUrn(Urn): - """ - expected tag urn format: urn:li:tag:. example: "urn:li:tag:product" - """ - - ENTITY_TYPE: str = "tag" - - def __init__( - self, entity_type: str, entity_id: List[str], tag: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, tag) - - @classmethod - def create_from_string(cls, urn_str: str) -> "TagUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, tag_id: str) -> "TagUrn": - return cls(TagUrn.ENTITY_TYPE, [tag_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != TagUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {TagUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import TagUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn.py b/metadata-ingestion/src/datahub/utilities/urns/urn.py index db6898d55ad2b..2e5cebfd0e8f5 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn.py @@ -1,167 +1,6 @@ -import urllib.parse -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError +from datahub.metadata.urns import Urn # noqa: F401 def guess_entity_type(urn: str) -> str: assert urn.startswith("urn:li:"), "urns must start with urn:li:" return urn.split(":")[2] - - -class Urn: - """ - URNs are Globally Unique Identifiers (GUID) used to represent an entity. - It will be in format of urn::: - """ - - URN_PREFIX: str = "urn" - # all the Datahub urn use li domain for now. - LI_DOMAIN: str = "li" - - _entity_type: str - _domain: str - _entity_id: List[str] - - def __init__( - self, entity_type: str, entity_id: List[str], urn_domain: str = LI_DOMAIN - ): - if not entity_id: - raise InvalidUrnError("Empty entity id.") - self._validate_entity_type(entity_type) - self._validate_entity_id(entity_id) - self._entity_type = entity_type - self._domain = urn_domain - self._entity_id = entity_id - - @classmethod - def create_from_string(cls, urn_str: str) -> "Urn": - """ - Create a Urn from the its string representation - :param urn_str: the string representation of the Urn - :return: Urn of the given string representation - :raises InvalidUrnError if the string representation is in invalid format - """ - - # expect urn string in format of urn::: - cls.validate(urn_str) - parts: List[str] = urn_str.split(":", 3) - - return cls(parts[2], cls._get_entity_id_from_str(parts[3]), parts[1]) - - @classmethod - def validate(cls, urn_str: str) -> None: - """ - Validate if a string is in valid Urn format - :param urn_str: to be validated urn string - :raises InvalidUrnError if the string representation is in invalid format - """ - parts: List[str] = urn_str.split(":", 3) - if len(parts) != 4: - raise InvalidUrnError( - f"Invalid urn string: {urn_str}. Expect 4 parts from urn string but found {len(parts)}" - ) - - if "" in parts: - raise InvalidUrnError( - f"Invalid urn string: {urn_str}. There should not be empty parts in urn string." - ) - - if parts[0] != Urn.URN_PREFIX: - raise InvalidUrnError( - f'Invalid urn string: {urn_str}. Expect urn starting with "urn" but found {parts[0]}' - ) - - if "" in cls._get_entity_id_from_str(parts[3]): - raise InvalidUrnError( - f"Invalid entity id in urn string: {urn_str}. There should not be empty parts in entity id." - ) - - cls._validate_entity_type(parts[2]) - cls._validate_entity_id(cls._get_entity_id_from_str(parts[3])) - - @staticmethod - def url_encode(urn: str) -> str: - # safe='' encodes '/' as '%2F' - return urllib.parse.quote(urn, safe="") - - def get_type(self) -> str: - return self._entity_type - - def get_entity_id(self) -> List[str]: - return self._entity_id - - def get_entity_id_as_string(self) -> str: - """ - :return: string representation of the entity ids. If there are more than one part in the entity id part, it will - return in this format (,,...) - """ - return self._entity_id_to_string() - - def get_domain(self) -> str: - return self._domain - - @staticmethod - def _get_entity_id_from_str(entity_id: str) -> List[str]: - if not (entity_id.startswith("(") and entity_id.endswith(")")): - return [entity_id] - - parts = [] - start_paren_count = 1 - part_start = 1 - for i in range(1, len(entity_id)): - c = entity_id[i] - if c == "(": - start_paren_count += 1 - elif c == ")": - start_paren_count -= 1 - if start_paren_count < 0: - raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") - elif c == ",": - if start_paren_count != 1: - continue - - if i - part_start <= 0: - raise InvalidUrnError(f"{entity_id}, empty part disallowed") - parts.append(entity_id[part_start:i]) - part_start = i + 1 - - if start_paren_count != 0: - raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") - - parts.append(entity_id[part_start:-1]) - - return parts - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - pass - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - pass - - def __str__(self) -> str: - return f"{self.URN_PREFIX}:{self._domain}:{self._entity_type}:{self._entity_id_to_string()}" - - def _entity_id_to_string(self) -> str: - if len(self._entity_id) == 1: - return self._entity_id[0] - result = "" - for part in self._entity_id: - result = result + str(part) + "," - return f"({result[:-1]})" - - def __hash__(self) -> int: - return hash((self._domain, self._entity_type) + tuple(self._entity_id)) - - def __eq__(self, other: object) -> bool: - return ( - ( - self._entity_id == other._entity_id - and self._domain == other._domain - and self._entity_type == other._entity_type - ) - if isinstance(other, Urn) - else False - ) diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py index 169a4ac3649a3..4f228494f416b 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py @@ -131,9 +131,11 @@ def _modify_at_path( def _lowercase_dataset_urn(dataset_urn: str) -> str: - cur_urn = DatasetUrn.create_from_string(dataset_urn) - cur_urn._entity_id[1] = cur_urn._entity_id[1].lower() - return str(cur_urn) + cur_urn = DatasetUrn.from_string(dataset_urn) + new_urn = DatasetUrn( + platform=cur_urn.platform, name=cur_urn.name.lower(), env=cur_urn.env + ) + return str(new_urn) def lowercase_dataset_urns( @@ -149,7 +151,7 @@ def modify_urn(urn: str) -> str: return _lowercase_dataset_urn(urn) elif guess_entity_type(urn) == "schemaField": cur_urn = Urn.create_from_string(urn) - cur_urn._entity_id[0] = _lowercase_dataset_urn(cur_urn._entity_id[0]) + cur_urn._entity_ids[0] = _lowercase_dataset_urn(cur_urn._entity_ids[0]) return str(cur_urn) return urn diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py index 712ae2066b728..ecea318339345 100644 --- a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py @@ -4,7 +4,6 @@ import pydantic import pytest -from datahub.emitter.mce_builder import make_dataset_urn from datahub.ingestion.source.state.checkpoint import Checkpoint, CheckpointStateBase from datahub.ingestion.source.state.sql_common_state import ( BaseSQLAlchemyCheckpointState, @@ -59,12 +58,15 @@ def _assert_checkpoint_deserialization( def _make_sql_alchemy_checkpoint_state() -> BaseSQLAlchemyCheckpointState: + # Note that the urns here purposely use a lowercase env, even though it's + # technically incorrect. This is purely for backwards compatibility testing, but + # all existing code uses correctly formed envs. base_sql_alchemy_checkpoint_state_obj = BaseSQLAlchemyCheckpointState() base_sql_alchemy_checkpoint_state_obj.add_checkpoint_urn( - type="table", urn=make_dataset_urn("mysql", "db1.t1", "prod") + type="table", urn="urn:li:dataset:(urn:li:dataPlatform:mysql,db1.t1,prod)" ) base_sql_alchemy_checkpoint_state_obj.add_checkpoint_urn( - type="view", urn=make_dataset_urn("mysql", "db1.v1", "prod") + type="view", urn="urn:li:dataset:(urn:li:dataPlatform:mysql,db1.v1,prod)" ) return base_sql_alchemy_checkpoint_state_obj diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py b/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py index f4517ba2df9c9..3b0e4e31d4b4a 100644 --- a/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py +++ b/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py @@ -25,6 +25,6 @@ def test_kafka_state_migration() -> None: } ) assert state.urns == [ - "urn:li:dataset:(urn:li:dataPlatform:kafka,test_topic1,test)", + "urn:li:dataset:(urn:li:dataPlatform:kafka,test_topic1,TEST)", "urn:li:dataset:(urn:li:dataPlatform:kafka,topic_2,DEV)", ] diff --git a/metadata-ingestion/tests/unit/test_urn.py b/metadata-ingestion/tests/unit/test_urn.py deleted file mode 100644 index 8bab01e437fdb..0000000000000 --- a/metadata-ingestion/tests/unit/test_urn.py +++ /dev/null @@ -1,45 +0,0 @@ -import unittest - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class TestUrn(unittest.TestCase): - def test_parse_urn(self) -> None: - simple_urn_str = "urn:li:dataPlatform:abc" - urn = Urn.create_from_string(simple_urn_str) - assert urn.get_entity_id_as_string() == "abc" - assert urn.get_entity_id() == ["abc"] - assert urn.get_type() == "dataPlatform" - assert urn.get_domain() == "li" - assert urn.__str__() == simple_urn_str - assert urn == Urn("dataPlatform", ["abc"]) - - complex_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - urn = Urn.create_from_string(complex_urn_str) - assert urn.get_entity_id_as_string() == "(urn:li:dataPlatform:abc,def,prod)" - assert urn.get_entity_id() == ["urn:li:dataPlatform:abc", "def", "prod"] - assert urn.get_type() == "dataset" - assert urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - - def test_url_encode_urn(self) -> None: - urn_with_slash: Urn = Urn.create_from_string( - "urn:li:dataset:(urn:li:dataPlatform:abc,def/ghi,prod)" - ) - assert ( - Urn.url_encode(str(urn_with_slash)) - == "urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Aabc%2Cdef%2Fghi%2Cprod%29" - ) - - def test_invalid_urn(self) -> None: - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:()") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:(abc,)") diff --git a/metadata-ingestion/tests/unit/test_corp_group_urn.py b/metadata-ingestion/tests/unit/urns/test_corp_group_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_corp_group_urn.py rename to metadata-ingestion/tests/unit/urns/test_corp_group_urn.py index 9cfd925ef34eb..1897a0e8686f0 100644 --- a/metadata-ingestion/tests/unit/test_corp_group_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_corp_group_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.corp_group_urn import CorpGroupUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestCorpGroupUrn(unittest.TestCase): def test_parse_urn(self) -> None: corp_group_urn_str = "urn:li:corpGroup:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert corp_group_urn.get_entity_id() == ["abc"] assert str(corp_group_urn) == corp_group_urn_str - assert corp_group_urn == CorpGroupUrn("corpGroup", ["abc"]) + assert corp_group_urn == CorpGroupUrn(name="abc") assert corp_group_urn == CorpGroupUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_corpuser_urn.py b/metadata-ingestion/tests/unit/urns/test_corpuser_urn.py similarity index 88% rename from metadata-ingestion/tests/unit/test_corpuser_urn.py rename to metadata-ingestion/tests/unit/urns/test_corpuser_urn.py index 40b83214a785b..7a2a4f4ff4493 100644 --- a/metadata-ingestion/tests/unit/test_corpuser_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_corpuser_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.corpuser_urn import CorpuserUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestCorpuserUrn(unittest.TestCase): def test_parse_urn(self) -> None: corpuser_urn_str = "urn:li:corpuser:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert corpuser_urn.get_entity_id() == ["abc"] assert str(corpuser_urn) == corpuser_urn_str - assert corpuser_urn == CorpuserUrn("corpuser", ["abc"]) + assert corpuser_urn == CorpuserUrn("abc") assert corpuser_urn == CorpuserUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_data_flow_urn.py b/metadata-ingestion/tests/unit/urns/test_data_flow_urn.py similarity index 77% rename from metadata-ingestion/tests/unit/test_data_flow_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_flow_urn.py index 8b739d39abf67..524411121d418 100644 --- a/metadata-ingestion/tests/unit/test_data_flow_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_flow_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.data_flow_urn import DataFlowUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDataFlowUrn(unittest.TestCase): def test_parse_urn(self) -> None: data_flow_urn_str = "urn:li:dataFlow:(airflow,def,prod)" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert data_flow_urn.get_flow_id() == "def" assert data_flow_urn.get_env() == "prod" assert data_flow_urn.__str__() == "urn:li:dataFlow:(airflow,def,prod)" - assert data_flow_urn == DataFlowUrn("dataFlow", ["airflow", "def", "prod"]) + assert data_flow_urn == DataFlowUrn("airflow", "def", "prod") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): @@ -20,8 +23,3 @@ def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): DataFlowUrn.create_from_string("urn:li:dataFlow:(airflow,flow_id)") - - with self.assertRaises(InvalidUrnError): - DataFlowUrn.create_from_string( - "urn:li:dataFlow:(airflow,flow_id,invalidEnv)" - ) diff --git a/metadata-ingestion/tests/unit/test_data_job_urn.py b/metadata-ingestion/tests/unit/urns/test_data_job_urn.py similarity index 90% rename from metadata-ingestion/tests/unit/test_data_job_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_job_urn.py index 0cd9084a51522..bf039cd2a91f9 100644 --- a/metadata-ingestion/tests/unit/test_data_job_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_job_urn.py @@ -1,10 +1,13 @@ import unittest +import pytest + from datahub.utilities.urns.data_flow_urn import DataFlowUrn from datahub.utilities.urns.data_job_urn import DataJobUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDataJobUrn(unittest.TestCase): def test_parse_urn(self) -> None: data_job_urn_str = ( @@ -17,7 +20,7 @@ def test_parse_urn(self) -> None: assert data_job_urn.get_job_id() == "job_id" assert data_job_urn.__str__() == data_job_urn_str assert data_job_urn == DataJobUrn( - "dataJob", ["urn:li:dataFlow:(airflow,flow_id,prod)", "job_id"] + "urn:li:dataFlow:(airflow,flow_id,prod)", "job_id" ) def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_data_process_instance_urn.py b/metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py similarity index 90% rename from metadata-ingestion/tests/unit/test_data_process_instance_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py index e6cd201e12c7a..a86f8dd99416f 100644 --- a/metadata-ingestion/tests/unit/test_data_process_instance_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py @@ -1,10 +1,13 @@ import unittest +import pytest + from datahub.utilities.urns.data_process_instance_urn import DataProcessInstanceUrn from datahub.utilities.urns.error import InvalidUrnError -class TestDomainUrn(unittest.TestCase): +@pytest.mark.filterwarnings("ignore::DeprecationWarning") +class TestDataProcessInstanceUrn(unittest.TestCase): def test_parse_urn(self) -> None: dataprocessinstance_urn_str = "urn:li:dataProcessInstance:abc" dataprocessinstance_urn = DataProcessInstanceUrn.create_from_string( @@ -14,9 +17,7 @@ def test_parse_urn(self) -> None: assert dataprocessinstance_urn.get_entity_id() == ["abc"] assert str(dataprocessinstance_urn) == dataprocessinstance_urn_str - assert dataprocessinstance_urn == DataProcessInstanceUrn( - "dataProcessInstance", ["abc"] - ) + assert dataprocessinstance_urn == DataProcessInstanceUrn("abc") assert dataprocessinstance_urn == DataProcessInstanceUrn.create_from_id("abc") assert "abc" == dataprocessinstance_urn.get_dataprocessinstance_id() diff --git a/metadata-ingestion/tests/unit/test_dataset_urn.py b/metadata-ingestion/tests/unit/urns/test_dataset_urn.py similarity index 81% rename from metadata-ingestion/tests/unit/test_dataset_urn.py rename to metadata-ingestion/tests/unit/urns/test_dataset_urn.py index e1e37409d8a63..53065143a6ae4 100644 --- a/metadata-ingestion/tests/unit/test_dataset_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_dataset_urn.py @@ -1,26 +1,25 @@ import unittest +import pytest + from datahub.utilities.urns.data_platform_urn import DataPlatformUrn from datahub.utilities.urns.dataset_urn import DatasetUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDatasetUrn(unittest.TestCase): def test_parse_urn(self) -> None: - dataset_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + dataset_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,PROD)" dataset_urn = DatasetUrn.create_from_string(dataset_urn_str) assert ( dataset_urn.get_data_platform_urn() == DataPlatformUrn.create_from_string("urn:li:dataPlatform:abc") ) assert dataset_urn.get_dataset_name() == "def" - assert dataset_urn.get_env() == "prod" - assert ( - dataset_urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - ) - assert dataset_urn == DatasetUrn( - "dataset", ["urn:li:dataPlatform:abc", "def", "prod"] - ) + assert dataset_urn.get_env() == "PROD" + assert dataset_urn.__str__() == dataset_urn_str + assert dataset_urn == DatasetUrn("urn:li:dataPlatform:abc", "def", "prod") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): diff --git a/metadata-ingestion/tests/unit/test_domain_urn.py b/metadata-ingestion/tests/unit/urns/test_domain_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_domain_urn.py rename to metadata-ingestion/tests/unit/urns/test_domain_urn.py index e5e4dffc525cd..843a5bf40f5c6 100644 --- a/metadata-ingestion/tests/unit/test_domain_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_domain_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.domain_urn import DomainUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDomainUrn(unittest.TestCase): def test_parse_urn(self) -> None: domain_urn_str = "urn:li:domain:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert domain_urn.get_entity_id() == ["abc"] assert str(domain_urn) == domain_urn_str - assert domain_urn == DomainUrn("domain", ["abc"]) + assert domain_urn == DomainUrn("abc") assert domain_urn == DomainUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_notebook_urn.py b/metadata-ingestion/tests/unit/urns/test_notebook_urn.py similarity index 86% rename from metadata-ingestion/tests/unit/test_notebook_urn.py rename to metadata-ingestion/tests/unit/urns/test_notebook_urn.py index 6b245e29ceae9..3ec580f02142b 100644 --- a/metadata-ingestion/tests/unit/test_notebook_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_notebook_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.error import InvalidUrnError from datahub.utilities.urns.notebook_urn import NotebookUrn +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestNotebookUrn(unittest.TestCase): def test_parse_urn(self) -> None: notebook_urn_str = "urn:li:notebook:(querybook,123)" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert notebook_urn.get_notebook_id() == "123" assert str(notebook_urn) == notebook_urn_str - assert notebook_urn == NotebookUrn("notebook", ["querybook", "123"]) + assert notebook_urn == NotebookUrn("querybook", "123") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): diff --git a/metadata-ingestion/tests/unit/test_tag_urn.py b/metadata-ingestion/tests/unit/urns/test_tag_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_tag_urn.py rename to metadata-ingestion/tests/unit/urns/test_tag_urn.py index 630420dc1263f..fa3664bcc0218 100644 --- a/metadata-ingestion/tests/unit/test_tag_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_tag_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.error import InvalidUrnError from datahub.utilities.urns.tag_urn import TagUrn +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestTagUrn(unittest.TestCase): def test_parse_urn(self) -> None: tag_urn_str = "urn:li:tag:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert tag_urn.get_entity_id() == ["abc"] assert str(tag_urn) == tag_urn_str - assert tag_urn == TagUrn("tag", ["abc"]) + assert tag_urn == TagUrn("abc") assert tag_urn == TagUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/urns/test_urn.py b/metadata-ingestion/tests/unit/urns/test_urn.py new file mode 100644 index 0000000000000..1bf48082fec8c --- /dev/null +++ b/metadata-ingestion/tests/unit/urns/test_urn.py @@ -0,0 +1,56 @@ +import pytest + +from datahub.metadata.urns import DatasetUrn, Urn +from datahub.utilities.urns.error import InvalidUrnError + +pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning") + + +def test_parse_urn() -> None: + simple_urn_str = "urn:li:dataPlatform:abc" + urn = Urn.create_from_string(simple_urn_str) + assert urn.get_entity_id_as_string() == "abc" + assert urn.get_entity_id() == ["abc"] + assert urn.get_type() == "dataPlatform" + assert urn.get_domain() == "li" + assert urn.__str__() == simple_urn_str + assert urn == Urn("dataPlatform", ["abc"]) + + complex_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + urn = Urn.create_from_string(complex_urn_str) + assert urn.get_entity_id_as_string() == "(urn:li:dataPlatform:abc,def,prod)" + assert urn.get_entity_id() == ["urn:li:dataPlatform:abc", "def", "prod"] + assert urn.get_type() == "dataset" + assert urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + + +def test_url_encode_urn() -> None: + urn_with_slash: Urn = Urn.create_from_string( + "urn:li:dataset:(urn:li:dataPlatform:abc,def/ghi,prod)" + ) + assert ( + Urn.url_encode(str(urn_with_slash)) + == "urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Aabc%2Cdef%2Fghi%2Cprod%29" + ) + + +def test_invalid_urn() -> None: + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:()") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:(abc,)") + + +def test_urn_type_dispatch() -> None: + urn = Urn.from_string("urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)") + assert isinstance(urn, DatasetUrn) + + with pytest.raises(InvalidUrnError, match="Passed an urn of type corpuser"): + DatasetUrn.from_string("urn:li:corpuser:foo") diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index a5296d074093b..1ba238b737236 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -400,7 +400,7 @@ entities: - dataHubUpgradeRequest - dataHubUpgradeResult - name: inviteToken - category: core + category: internal keyAspect: inviteTokenKey aspects: - inviteToken @@ -425,7 +425,7 @@ entities: aspects: - postInfo - name: dataHubStepState - category: core + category: internal keyAspect: dataHubStepStateKey aspects: - dataHubStepStateProperties From a8476ee657a3c116b65de8cd14a731acff164503 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 18:34:48 -0500 Subject: [PATCH 178/792] fix(airflow): support inlet datajobs correctly in v1 plugin (#9331) --- docs/lineage/airflow.md | 42 ++++++++-- .../datahub_listener.py | 4 + .../datahub_plugin_v22.py | 43 ++++++---- .../integration/goldens/v1_basic_iolets.json | 64 ++++----------- .../integration/goldens/v1_simple_dag.json | 78 ++++++------------- .../integration/goldens/v2_basic_iolets.json | 18 ++--- .../v2_basic_iolets_no_dag_listener.json | 14 ++-- .../integration/goldens/v2_simple_dag.json | 34 ++++---- .../v2_simple_dag_no_dag_listener.json | 28 +++---- .../goldens/v2_snowflake_operator.json | 14 ++-- .../goldens/v2_sqlite_operator.json | 62 +++++++-------- .../v2_sqlite_operator_no_dag_listener.json | 70 ++++++++--------- .../tests/integration/test_plugin.py | 52 ++++++++++--- .../datahub/api/entities/datajob/datajob.py | 3 +- 14 files changed, 269 insertions(+), 257 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 32da518d6c04c..8fd38f560bfbb 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -8,7 +8,7 @@ If you're looking to schedule DataHub ingestion using Airflow, see the guide on The DataHub Airflow plugin supports: -- Automatic column-level lineage extraction from various operators e.g. `SqlOperator`s (including `MySqlOperator`, `PostgresOperator`, `SnowflakeOperator`, and more), `S3FileTransformOperator`, and a few others. +- Automatic column-level lineage extraction from various operators e.g. SQL operators (including `MySqlOperator`, `PostgresOperator`, `SnowflakeOperator`, and more), `S3FileTransformOperator`, and more. - Airflow DAG and tasks, including properties, ownership, and tags. - Task run information, including task successes and failures. - Manual lineage annotations using `inlets` and `outlets` on Airflow operators. @@ -76,12 +76,6 @@ enabled = True # default | log_level | _no change_ | [debug] Set the log level for the plugin. | | debug_emitter | false | [debug] If true, the plugin will log the emitted events. | -### Automatic lineage extraction - -To automatically extract lineage information, the v2 plugin builds on top of Airflow's built-in [OpenLineage extractors](https://openlineage.io/docs/integrations/airflow/default-extractors). - -The SQL-related extractors have been updated to use DataHub's SQL parser, which is more robust than the built-in one and uses DataHub's metadata information to generate column-level lineage. We discussed the DataHub SQL parser, including why schema-aware parsing works better and how it performs on benchmarks, during the [June 2023 community town hall](https://youtu.be/1QVcUmRQK5E?si=U27zygR7Gi_KdkzE&t=2309). - ## DataHub Plugin v1 ### Installation @@ -152,6 +146,40 @@ conn_id = datahub_rest_default # or datahub_kafka_default Emitting DataHub ... ``` +## Automatic lineage extraction + +Only the v2 plugin supports automatic lineage extraction. If you're using the v1 plugin, you must use manual lineage annotation or emit lineage directly. + +To automatically extract lineage information, the v2 plugin builds on top of Airflow's built-in [OpenLineage extractors](https://openlineage.io/docs/integrations/airflow/default-extractors). +As such, we support a superset of the default operators that Airflow/OpenLineage supports. + +The SQL-related extractors have been updated to use [DataHub's SQL lineage parser](https://blog.datahubproject.io/extracting-column-level-lineage-from-sql-779b8ce17567), which is more robust than the built-in one and uses DataHub's metadata information to generate column-level lineage. + +Supported operators: + +- `SQLExecuteQueryOperator`, including any subclasses. Note that in newer versions of Airflow (generally Airflow 2.5+), most SQL operators inherit from this class. +- `AthenaOperator` and `AWSAthenaOperator` +- `BigQueryOperator` and `BigQueryExecuteQueryOperator` +- `MySqlOperator` +- `PostgresOperator` +- `RedshiftSQLOperator` +- `SnowflakeOperator` and `SnowflakeOperatorAsync` +- `SqliteOperator` +- `TrinoOperator` + + + ## Manual Lineage Annotation ### Using `inlets` and `outlets` diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index c39eef2635658..debc91700d3db 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -296,6 +296,7 @@ def _extract_lineage( logger.debug("Merging start datajob into finish datajob") datajob.inlets.extend(original_datajob.inlets) datajob.outlets.extend(original_datajob.outlets) + datajob.upstream_urns.extend(original_datajob.upstream_urns) datajob.fine_grained_lineages.extend(original_datajob.fine_grained_lineages) for k, v in original_datajob.properties.items(): @@ -304,6 +305,9 @@ def _extract_lineage( # Deduplicate inlets/outlets. datajob.inlets = list(sorted(set(datajob.inlets), key=lambda x: str(x))) datajob.outlets = list(sorted(set(datajob.outlets), key=lambda x: str(x))) + datajob.upstream_urns = list( + sorted(set(datajob.upstream_urns), key=lambda x: str(x)) + ) # Write all other OL facets as DataHub properties. if task_metadata: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py index f9a2119f51e32..51a4151bc8207 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py @@ -18,6 +18,10 @@ ) from datahub_airflow_plugin._config import get_lineage_config from datahub_airflow_plugin.client.airflow_generator import AirflowGenerator +from datahub_airflow_plugin.entities import ( + entities_to_datajob_urn_list, + entities_to_dataset_urn_list, +) from datahub_airflow_plugin.hooks.datahub import DatahubGenericHook from datahub_airflow_plugin.lineage.datahub import DatahubLineageConfig @@ -94,7 +98,8 @@ def datahub_task_status_callback(context, status): # This code is from the original airflow lineage code -> # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py - inlets = get_task_inlets_advanced(task, context) + task_inlets = get_task_inlets_advanced(task, context) + task_outlets = get_task_outlets(task) emitter = ( DatahubGenericHook(config.datahub_conn_id).get_underlying_hook().make_emitter() @@ -116,13 +121,15 @@ def datahub_task_status_callback(context, status): capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, ) - - for inlet in inlets: - datajob.inlets.append(inlet.urn) - - task_outlets = get_task_outlets(task) - for outlet in task_outlets: - datajob.outlets.append(outlet.urn) + datajob.inlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_inlets]) + ) + datajob.outlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_outlets]) + ) + datajob.upstream_urns.extend( + entities_to_datajob_urn_list([let.urn for let in task_inlets]) + ) task.log.info(f"Emitting Datahub Datajob: {datajob}") datajob.emit(emitter, callback=_make_emit_callback(task.log)) @@ -169,7 +176,8 @@ def datahub_pre_execution(context): # This code is from the original airflow lineage code -> # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py - inlets = get_task_inlets_advanced(task, context) + task_inlets = get_task_inlets_advanced(task, context) + task_outlets = get_task_outlets(task) datajob = AirflowGenerator.generate_datajob( cluster=config.cluster, @@ -178,14 +186,15 @@ def datahub_pre_execution(context): capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, ) - - for inlet in inlets: - datajob.inlets.append(inlet.urn) - - task_outlets = get_task_outlets(task) - - for outlet in task_outlets: - datajob.outlets.append(outlet.urn) + datajob.inlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_inlets]) + ) + datajob.outlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_outlets]) + ) + datajob.upstream_urns.extend( + entities_to_datajob_urn_list([let.urn for let in task_inlets]) + ) task.log.info(f"Emitting Datahub dataJob {datajob}") datajob.emit(emitter, callback=_make_emit_callback(task.log)) diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json index 6b460e99b1f28..a21df71efcdac 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -95,14 +95,15 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -151,17 +152,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -257,14 +247,15 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -313,17 +304,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -389,9 +369,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.143271", - "start_date": "2023-11-08 09:55:05.801617+00:00", - "end_date": "2023-11-08 09:55:05.944888+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -408,7 +388,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437305801, + "time": 1701222667932, "actor": "urn:li:corpuser:datahub" } } @@ -437,8 +417,7 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ] } } @@ -501,17 +480,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -541,7 +509,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437305801, + "timestampMillis": 1701222667932, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -558,7 +526,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437305944, + "timestampMillis": 1701222668122, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json index 7ec172e3678dc..6116722350541 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -94,13 +94,14 @@ "json": { "inputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -127,17 +128,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -220,13 +210,14 @@ "json": { "inputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -253,17 +244,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -318,9 +298,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.120524", - "start_date": "2023-11-08 09:54:06.065112+00:00", - "end_date": "2023-11-08 09:54:06.185636+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -337,7 +317,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437246065, + "time": 1701222595752, "actor": "urn:li:corpuser:datahub" } } @@ -364,8 +344,7 @@ "json": { "inputs": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ] } } @@ -405,17 +384,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -434,7 +402,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437246065, + "timestampMillis": 1701222595752, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -451,7 +419,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437246185, + "timestampMillis": 1701222595962, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -476,7 +444,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -687,9 +655,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.099975", - "start_date": "2023-11-08 09:54:09.744583+00:00", - "end_date": "2023-11-08 09:54:09.844558+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -706,7 +674,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437249744, + "time": 1701222599804, "actor": "urn:li:corpuser:datahub" } } @@ -731,7 +699,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437249744, + "timestampMillis": 1701222599804, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -748,7 +716,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437249844, + "timestampMillis": 1701222599959, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json index 6767a368f366a..7c52cbcddc13c 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -75,7 +75,7 @@ "downstream_task_ids": "[]", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -218,9 +218,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:07:55.311482+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -237,7 +237,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671275311, + "time": 1701223416947, "actor": "urn:li:corpuser:datahub" } } @@ -358,7 +358,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671275311, + "timestampMillis": 1701223416947, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -387,7 +387,7 @@ "downstream_task_ids": "[]", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -528,7 +528,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671276777, + "timestampMillis": 1701223417702, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json index 63b0a05935554..150f95d5171c7 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -218,9 +218,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:11:17.444435+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -237,7 +237,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643477444, + "time": 1701223185349, "actor": "urn:li:corpuser:datahub" } } @@ -358,7 +358,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643477444, + "timestampMillis": 1701223185349, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -528,7 +528,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643478123, + "timestampMillis": 1701223186055, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json index c558f79c32e15..0248ab0473c9e 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -76,7 +76,7 @@ "downstream_task_ids": "['run_another_data_task']", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -183,9 +183,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:06:07.193282+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -202,7 +202,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671167193, + "time": 1701223349283, "actor": "urn:li:corpuser:datahub" } } @@ -287,7 +287,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671167193, + "timestampMillis": 1701223349283, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -316,7 +316,7 @@ "downstream_task_ids": "['run_another_data_task']", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -421,7 +421,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671168726, + "timestampMillis": 1701223349928, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -453,7 +453,7 @@ "downstream_task_ids": "[]", "inlets": "[]", "outlets": "[]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=run_another_data_task", "name": "run_another_data_task", @@ -522,9 +522,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:06:19.970466+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -541,7 +541,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671179970, + "time": 1701223355004, "actor": "urn:li:corpuser:datahub" } } @@ -566,7 +566,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671179970, + "timestampMillis": 1701223355004, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -595,7 +595,7 @@ "downstream_task_ids": "[]", "inlets": "[]", "outlets": "[]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=run_another_data_task", "name": "run_another_data_task", @@ -662,7 +662,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671180730, + "timestampMillis": 1701223355580, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json index ec0f3cab1e81f..7860251fc22dc 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -183,9 +183,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:10:10.856995+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -202,7 +202,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643410856, + "time": 1701223113232, "actor": "urn:li:corpuser:datahub" } } @@ -287,7 +287,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643410856, + "timestampMillis": 1701223113232, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -421,7 +421,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643411390, + "timestampMillis": 1701223113778, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -446,7 +446,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -580,9 +580,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:10:15.128009+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -599,7 +599,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643415128, + "time": 1701223119777, "actor": "urn:li:corpuser:datahub" } } @@ -624,7 +624,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643415128, + "timestampMillis": 1701223119777, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -720,7 +720,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643415856, + "timestampMillis": 1701223120456, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json index 0a704ed10c911..1bf0820c7cb41 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/snowflake_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -226,9 +226,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-09-30 06:55:36.844976+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -245,7 +245,7 @@ "name": "snowflake_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696056936844, + "time": 1701223475050, "actor": "urn:li:corpuser:datahub" } } @@ -318,7 +318,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056936844, + "timestampMillis": 1701223475050, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -496,7 +496,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056938096, + "timestampMillis": 1701223476665, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json index 3b4b60174f99f..3965ee4a10ad0 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -193,9 +193,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:10.262813+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -212,7 +212,7 @@ "name": "sqlite_operator_create_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401750262, + "time": 1701223533895, "actor": "urn:li:corpuser:datahub" } } @@ -261,7 +261,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401750262, + "timestampMillis": 1701223533895, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -442,7 +442,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401750651, + "timestampMillis": 1701223534302, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -557,9 +557,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:15.013834+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -576,7 +576,7 @@ "name": "sqlite_operator_populate_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401755013, + "time": 1701223539348, "actor": "urn:li:corpuser:datahub" } } @@ -625,7 +625,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401755013, + "timestampMillis": 1701223539348, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -735,7 +735,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401755600, + "timestampMillis": 1701223540058, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -920,9 +920,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:20.216818+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -939,7 +939,7 @@ "name": "sqlite_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401760216, + "time": 1701223548187, "actor": "urn:li:corpuser:datahub" } } @@ -1012,7 +1012,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401760216, + "timestampMillis": 1701223548187, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1248,7 +1248,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401761237, + "timestampMillis": 1701223549416, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1365,9 +1365,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:26.243934+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1384,7 +1384,7 @@ "name": "sqlite_operator_cleanup_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401766243, + "time": 1701223557795, "actor": "urn:li:corpuser:datahub" } } @@ -1433,7 +1433,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401766243, + "timestampMillis": 1701223557795, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1545,7 +1545,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401767373, + "timestampMillis": 1701223559079, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1662,9 +1662,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:32.075613+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1681,7 +1681,7 @@ "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401772075, + "time": 1701223564459, "actor": "urn:li:corpuser:datahub" } } @@ -1730,7 +1730,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401772075, + "timestampMillis": 1701223564459, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1842,7 +1842,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401773454, + "timestampMillis": 1701223566107, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json index 99a8aadb7fd9c..a9f9fbac56fff 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -193,9 +193,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:17.805860+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -212,7 +212,7 @@ "name": "sqlite_operator_create_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643537805, + "time": 1701223251992, "actor": "urn:li:corpuser:datahub" } } @@ -261,7 +261,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643537805, + "timestampMillis": 1701223251992, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -442,7 +442,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643538759, + "timestampMillis": 1701223253042, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -467,7 +467,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -614,9 +614,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:22.560376+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -633,7 +633,7 @@ "name": "sqlite_operator_populate_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643542560, + "time": 1701223258947, "actor": "urn:li:corpuser:datahub" } } @@ -682,7 +682,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643542560, + "timestampMillis": 1701223258947, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -792,7 +792,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643543925, + "timestampMillis": 1701223260414, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -817,7 +817,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1034,9 +1034,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:29.429032+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1053,7 +1053,7 @@ "name": "sqlite_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643549429, + "time": 1701223266595, "actor": "urn:li:corpuser:datahub" } } @@ -1126,7 +1126,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643549429, + "timestampMillis": 1701223266595, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1362,7 +1362,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643551423, + "timestampMillis": 1701223268728, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1387,7 +1387,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1536,9 +1536,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:37.423556+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1555,7 +1555,7 @@ "name": "sqlite_operator_cleanup_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643557423, + "time": 1701223275045, "actor": "urn:li:corpuser:datahub" } } @@ -1604,7 +1604,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643557423, + "timestampMillis": 1701223275045, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1716,7 +1716,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643559607, + "timestampMillis": 1701223277378, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1741,7 +1741,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1890,9 +1890,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:43.792375+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1909,7 +1909,7 @@ "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643563792, + "time": 1701223282010, "actor": "urn:li:corpuser:datahub" } } @@ -1958,7 +1958,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643563792, + "timestampMillis": 1701223282010, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -2070,7 +2070,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643566350, + "timestampMillis": 1701223284766, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py b/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py index a2b7fd151a1e4..0c5d11f693eef 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py @@ -1,6 +1,7 @@ import contextlib import dataclasses import functools +import json import logging import os import pathlib @@ -8,12 +9,13 @@ import signal import subprocess import time -from typing import Iterator, Sequence +from typing import Any, Iterator, Sequence import pytest import requests import tenacity from airflow.models.connection import Connection +from datahub.ingestion.sink.file import write_metadata_file from datahub.testing.compare_metadata_json import assert_metadata_files_equal from datahub_airflow_plugin._airflow_shims import ( @@ -358,26 +360,58 @@ def test_airflow_plugin( print("Sleeping for a few seconds to let the plugin finish...") time.sleep(10) + _sanitize_output_file(airflow_instance.metadata_file) + check_golden_file( pytestconfig=pytestconfig, output_path=airflow_instance.metadata_file, golden_path=golden_path, ignore_paths=[ # Timing-related items. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['start_date'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['end_date'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['duration'\]", - # Host-specific items. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['pid'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['hostname'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['unixname'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['start_date'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['end_date'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['duration'\]", # TODO: If we switched to Git urls, maybe we could get this to work consistently. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['fileloc'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['fileloc'\]", r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['openlineage_.*'\]", ], ) +def _sanitize_output_file(output_path: pathlib.Path) -> None: + # Overwrite some custom properties in the output file to make it easier to compare. + + props_job = { + "fileloc": "", + } + props_process = { + "start_date": "", + "end_date": "", + "duration": "", + } + + def _sanitize(obj: Any) -> None: + if isinstance(obj, dict) and "customProperties" in obj: + replacement_props = ( + props_process if "run_id" in obj["customProperties"] else props_job + ) + obj["customProperties"] = { + k: replacement_props.get(k, v) + for k, v in obj["customProperties"].items() + } + elif isinstance(obj, dict): + for v in obj.values(): + _sanitize(v) + elif isinstance(obj, list): + for v in obj: + _sanitize(v) + + objs = json.loads(output_path.read_text()) + _sanitize(objs) + + write_metadata_file(output_path, objs) + + if __name__ == "__main__": # When run directly, just set up a local airflow instance. import tempfile diff --git a/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py b/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py index 6c42e830e223b..1ec74b94179d5 100644 --- a/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py +++ b/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py @@ -40,7 +40,8 @@ class DataJob: group_owners Set[str]): A list of group ids that own this job. inlets (List[str]): List of urns the DataProcessInstance consumes outlets (List[str]): List of urns the DataProcessInstance produces - input_datajob_urns: List[DataJobUrn] = field(default_factory=list) + fine_grained_lineages: Column lineage for the inlets and outlets + upstream_urns: List[DataJobUrn] = field(default_factory=list) """ id: str From f9fd9467ef14cd5b39cac4c71e214d9088f0f9a1 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 21:00:43 -0500 Subject: [PATCH 179/792] feat(ingest): clean up DataHubRestEmitter return type (#9286) Co-authored-by: Andrew Sikowitz --- .../config/HomePageOnboardingConfig.tsx | 3 +-- docs/how/updating-datahub.md | 1 + .../datahub_airflow_plugin/hooks/datahub.py | 23 ++++++++++++++----- .../airflow-plugin/tests/unit/test_airflow.py | 8 +++---- .../src/datahub/emitter/generic_emitter.py | 7 ++---- .../src/datahub/emitter/rest_emitter.py | 19 ++++++++------- .../datahub/ingestion/sink/datahub_rest.py | 20 ++++++++++++---- .../tests/test_helpers/graph_helpers.py | 12 +++++----- .../tests/unit/test_rest_emitter.py | 6 +++++ 9 files changed, 62 insertions(+), 37 deletions(-) diff --git a/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx b/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx index 28a0465a1b2f7..8b361db5ab344 100644 --- a/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx +++ b/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx @@ -94,8 +94,7 @@ export const HomePageOnboardingConfig: OnboardingStep[] = [ Here are your organization's Data Platforms. Data Platforms represent specific third-party Data Systems or Tools. Examples include Data Warehouses like Snowflake, - Orchestrators like - Airflow, and Dashboarding tools like Looker. + Orchestrators like Airflow, and Dashboarding tools like Looker. ), }, diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index dad05fd0153f2..df179b0d0d2f7 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -11,6 +11,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. - #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. +- #9286: The `DataHubRestEmitter.emit` method no longer returns anything. It previously returned a tuple of timestamps. ### Potential Downtime diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py index 9604931795ccb..b60f20c5bf8b2 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py @@ -12,7 +12,7 @@ if TYPE_CHECKING: from airflow.models.connection import Connection from datahub.emitter.kafka_emitter import DatahubKafkaEmitter - from datahub.emitter.rest_emitter import DatahubRestEmitter + from datahub.emitter.rest_emitter import DataHubRestEmitter from datahub.emitter.synchronized_file_emitter import SynchronizedFileEmitter from datahub.ingestion.sink.datahub_kafka import KafkaSinkConfig @@ -63,6 +63,13 @@ def test_connection(self) -> Tuple[bool, str]: return True, "Successfully connected to DataHub." def _get_config(self) -> Tuple[str, Optional[str], Optional[int]]: + # We have a few places in the codebase that use this method directly, despite + # it being "private". For now, we retain backwards compatibility by keeping + # this method around, but should stop using it in the future. + host, token, extra_args = self._get_config_v2() + return host, token, extra_args.get("timeout_sec") + + def _get_config_v2(self) -> Tuple[str, Optional[str], Dict]: conn: "Connection" = self.get_connection(self.datahub_rest_conn_id) host = conn.host @@ -74,14 +81,18 @@ def _get_config(self) -> Tuple[str, Optional[str], Optional[int]]: "host parameter should not contain a port number if the port is specified separately" ) host = f"{host}:{conn.port}" - password = conn.password - timeout_sec = conn.extra_dejson.get("timeout_sec") - return (host, password, timeout_sec) + token = conn.password + + extra_args = conn.extra_dejson + return (host, token, extra_args) - def make_emitter(self) -> "DatahubRestEmitter": + def make_emitter(self) -> "DataHubRestEmitter": import datahub.emitter.rest_emitter - return datahub.emitter.rest_emitter.DatahubRestEmitter(*self._get_config()) + host, token, extra_args = self._get_config_v2() + return datahub.emitter.rest_emitter.DataHubRestEmitter( + host, token, **extra_args + ) def emit( self, diff --git a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py index 7fbf707995994..93b4af0501985 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py @@ -99,19 +99,19 @@ def patch_airflow_connection(conn: Connection) -> Iterator[Connection]: yield conn -@mock.patch("datahub.emitter.rest_emitter.DatahubRestEmitter", autospec=True) +@mock.patch("datahub.emitter.rest_emitter.DataHubRestEmitter", autospec=True) def test_datahub_rest_hook(mock_emitter): with patch_airflow_connection(datahub_rest_connection_config) as config: assert config.conn_id hook = DatahubRestHook(config.conn_id) hook.emit_mces([lineage_mce]) - mock_emitter.assert_called_once_with(config.host, None, None) + mock_emitter.assert_called_once_with(config.host, None) instance = mock_emitter.return_value instance.emit.assert_called_with(lineage_mce) -@mock.patch("datahub.emitter.rest_emitter.DatahubRestEmitter", autospec=True) +@mock.patch("datahub.emitter.rest_emitter.DataHubRestEmitter", autospec=True) def test_datahub_rest_hook_with_timeout(mock_emitter): with patch_airflow_connection( datahub_rest_connection_config_with_timeout @@ -120,7 +120,7 @@ def test_datahub_rest_hook_with_timeout(mock_emitter): hook = DatahubRestHook(config.conn_id) hook.emit_mces([lineage_mce]) - mock_emitter.assert_called_once_with(config.host, None, 5) + mock_emitter.assert_called_once_with(config.host, None, timeout_sec=5) instance = mock_emitter.return_value instance.emit.assert_called_with(lineage_mce) diff --git a/metadata-ingestion/src/datahub/emitter/generic_emitter.py b/metadata-ingestion/src/datahub/emitter/generic_emitter.py index 28138c6182758..54b3d6841fe9c 100644 --- a/metadata-ingestion/src/datahub/emitter/generic_emitter.py +++ b/metadata-ingestion/src/datahub/emitter/generic_emitter.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Optional, Union +from typing import Callable, Optional, Union from typing_extensions import Protocol @@ -21,10 +21,7 @@ def emit( # required. However, this would be a breaking change that may need # more careful consideration. callback: Optional[Callable[[Exception, str], None]] = None, - # TODO: The rest emitter returns timestamps as the return type. For now - # we smooth over that detail using Any, but eventually we should - # standardize on a return type. - ) -> Any: + ) -> None: raise NotImplementedError def flush(self) -> None: diff --git a/metadata-ingestion/src/datahub/emitter/rest_emitter.py b/metadata-ingestion/src/datahub/emitter/rest_emitter.py index afb19df9791af..4598c7faa2105 100644 --- a/metadata-ingestion/src/datahub/emitter/rest_emitter.py +++ b/metadata-ingestion/src/datahub/emitter/rest_emitter.py @@ -1,10 +1,9 @@ -import datetime import functools import json import logging import os from json.decoder import JSONDecodeError -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union import requests from deprecated import deprecated @@ -60,6 +59,7 @@ def __init__( self, gms_server: str, token: Optional[str] = None, + timeout_sec: Optional[float] = None, connect_timeout_sec: Optional[float] = None, read_timeout_sec: Optional[float] = None, retry_status_codes: Optional[List[int]] = None, @@ -103,11 +103,12 @@ def __init__( if disable_ssl_verification: self._session.verify = False - if connect_timeout_sec: - self._connect_timeout_sec = connect_timeout_sec - - if read_timeout_sec: - self._read_timeout_sec = read_timeout_sec + self._connect_timeout_sec = ( + connect_timeout_sec or timeout_sec or _DEFAULT_CONNECT_TIMEOUT_SEC + ) + self._read_timeout_sec = ( + read_timeout_sec or timeout_sec or _DEFAULT_READ_TIMEOUT_SEC + ) if self._connect_timeout_sec < 1 or self._read_timeout_sec < 1: logger.warning( @@ -208,8 +209,7 @@ def emit( UsageAggregation, ], callback: Optional[Callable[[Exception, str], None]] = None, - ) -> Tuple[datetime.datetime, datetime.datetime]: - start_time = datetime.datetime.now() + ) -> None: try: if isinstance(item, UsageAggregation): self.emit_usage(item) @@ -226,7 +226,6 @@ def emit( else: if callback: callback(None, "success") # type: ignore - return start_time, datetime.datetime.now() def emit_mce(self, mce: MetadataChangeEvent) -> None: url = f"{self._gms_server}/entities?action=ingest" diff --git a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py index d3abde0d36993..fedd8520dde4d 100644 --- a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py +++ b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py @@ -4,10 +4,10 @@ import logging from concurrent.futures import ThreadPoolExecutor from dataclasses import dataclass -from datetime import timedelta +from datetime import datetime, timedelta from enum import auto from threading import BoundedSemaphore -from typing import Union +from typing import Tuple, Union from datahub.cli.cli_utils import set_env_variables_override_config from datahub.configuration.common import ( @@ -181,6 +181,18 @@ def _write_done_callback( self.report.report_failure({"e": e}) write_callback.on_failure(record_envelope, Exception(e), {}) + def _emit_wrapper( + self, + record: Union[ + MetadataChangeEvent, + MetadataChangeProposal, + MetadataChangeProposalWrapper, + ], + ) -> Tuple[datetime, datetime]: + start_time = datetime.now() + self.emitter.emit(record) + return start_time, datetime.now() + def write_record_async( self, record_envelope: RecordEnvelope[ @@ -194,7 +206,7 @@ def write_record_async( ) -> None: record = record_envelope.record if self.config.mode == SyncOrAsync.ASYNC: - write_future = self.executor.submit(self.emitter.emit, record) + write_future = self.executor.submit(self._emit_wrapper, record) write_future.add_done_callback( functools.partial( self._write_done_callback, record_envelope, write_callback @@ -204,7 +216,7 @@ def write_record_async( else: # execute synchronously try: - (start, end) = self.emitter.emit(record) + (start, end) = self._emit_wrapper(record) write_callback.on_success(record_envelope, success_metadata={}) except Exception as e: write_callback.on_failure(record_envelope, e, failure_metadata={}) diff --git a/metadata-ingestion/tests/test_helpers/graph_helpers.py b/metadata-ingestion/tests/test_helpers/graph_helpers.py index 4c2c46c2f97ce..2e73f5e2c6cdb 100644 --- a/metadata-ingestion/tests/test_helpers/graph_helpers.py +++ b/metadata-ingestion/tests/test_helpers/graph_helpers.py @@ -1,6 +1,5 @@ -from datetime import datetime from pathlib import Path -from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type, Union +from typing import Any, Callable, Dict, Iterable, List, Optional, Type, Union from datahub.emitter.mce_builder import Aspect from datahub.emitter.mcp import MetadataChangeProposalWrapper @@ -22,7 +21,9 @@ class MockDataHubGraph(DataHubGraph): - def __init__(self, entity_graph: Dict[str, Dict[str, Any]] = {}) -> None: + def __init__( + self, entity_graph: Optional[Dict[str, Dict[str, Any]]] = None + ) -> None: self.emitted: List[ Union[ MetadataChangeEvent, @@ -30,7 +31,7 @@ def __init__(self, entity_graph: Dict[str, Dict[str, Any]] = {}) -> None: MetadataChangeProposalWrapper, ] ] = [] - self.entity_graph = entity_graph + self.entity_graph = entity_graph or {} def import_file(self, file: Path) -> None: """Imports metadata from any MCE/MCP file. Does not clear prior loaded data. @@ -110,9 +111,8 @@ def emit( UsageAggregationClass, ], callback: Union[Callable[[Exception, str], None], None] = None, - ) -> Tuple[datetime, datetime]: + ) -> None: self.emitted.append(item) # type: ignore - return (datetime.now(), datetime.now()) def emit_mce(self, mce: MetadataChangeEvent) -> None: self.emitted.append(mce) diff --git a/metadata-ingestion/tests/unit/test_rest_emitter.py b/metadata-ingestion/tests/unit/test_rest_emitter.py index e56cbd2c41c6b..b4d7cb17b66f5 100644 --- a/metadata-ingestion/tests/unit/test_rest_emitter.py +++ b/metadata-ingestion/tests/unit/test_rest_emitter.py @@ -20,6 +20,12 @@ def test_datahub_rest_emitter_timeout_construction(): assert emitter._read_timeout_sec == 4 +def test_datahub_rest_emitter_general_timeout_construction(): + emitter = DatahubRestEmitter(MOCK_GMS_ENDPOINT, timeout_sec=2, read_timeout_sec=4) + assert emitter._connect_timeout_sec == 2 + assert emitter._read_timeout_sec == 4 + + def test_datahub_rest_emitter_retry_construction(): emitter = DatahubRestEmitter( MOCK_GMS_ENDPOINT, From 4d9eb12cba3a36ca30a7b07fea9aeb6a13443522 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 04:03:10 -0500 Subject: [PATCH 180/792] feat(ingest/dbt): support custom ownership types in dbt meta (#9332) --- metadata-ingestion/docs/sources/dbt/dbt.md | 2 +- .../src/datahub/utilities/mapping.py | 10 ++++++++- metadata-ingestion/tests/unit/test_mapping.py | 22 +++++++++++++++++-- 3 files changed, 30 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/docs/sources/dbt/dbt.md b/metadata-ingestion/docs/sources/dbt/dbt.md index 43ced13c3b1f8..6cc8772871c2f 100644 --- a/metadata-ingestion/docs/sources/dbt/dbt.md +++ b/metadata-ingestion/docs/sources/dbt/dbt.md @@ -62,7 +62,7 @@ We support the following operations: 1. add_tag - Requires `tag` property in config. 2. add_term - Requires `term` property in config. 3. add_terms - Accepts an optional `separator` property in config. -4. add_owner - Requires `owner_type` property in config which can be either user or group. Optionally accepts the `owner_category` config property which you can set to one of `['TECHNICAL_OWNER', 'BUSINESS_OWNER', 'DATA_STEWARD', 'DATAOWNER'` (defaults to `DATAOWNER`). +4. add_owner - Requires `owner_type` property in config which can be either user or group. Optionally accepts the `owner_category` config property which can be set to either a [custom ownership type](../../../../docs/ownership/ownership-types.md) urn like `urn:li:ownershipType:architect` or one of `['TECHNICAL_OWNER', 'BUSINESS_OWNER', 'DATA_STEWARD', 'DATAOWNER'` (defaults to `DATAOWNER`). 5. add_doc_link - Requires `link` and `description` properties in config. Upon ingestion run, this will overwrite current links in the institutional knowledge section with this new link. The anchor text is defined here in the meta_mappings as `description`. Note: diff --git a/metadata-ingestion/src/datahub/utilities/mapping.py b/metadata-ingestion/src/datahub/utilities/mapping.py index f91c01d901ac1..00f7d370d1676 100644 --- a/metadata-ingestion/src/datahub/utilities/mapping.py +++ b/metadata-ingestion/src/datahub/utilities/mapping.py @@ -191,6 +191,7 @@ def convert_to_aspects( OwnerClass( owner=x.get("urn"), type=x.get("category"), + typeUrn=x.get("categoryUrn"), source=OwnershipSourceClass(type=self.owner_source_type) if self.owner_source_type else None, @@ -281,18 +282,25 @@ def get_operation_value( operation_config.get(Constants.OWNER_CATEGORY) or OwnershipTypeClass.DATAOWNER ) - owner_category = owner_category.upper() + owner_category_urn = None + if owner_category.startswith("urn:li:"): + owner_category_urn = owner_category + owner_category = OwnershipTypeClass.DATAOWNER + else: + owner_category = owner_category.upper() if self.strip_owner_email_id: owner_id = self.sanitize_owner_ids(owner_id) if operation_config[Constants.OWNER_TYPE] == Constants.USER_OWNER: return { "urn": mce_builder.make_owner_urn(owner_id, OwnerType.USER), "category": owner_category, + "categoryUrn": owner_category_urn, } elif operation_config[Constants.OWNER_TYPE] == Constants.GROUP_OWNER: return { "urn": mce_builder.make_owner_urn(owner_id, OwnerType.GROUP), "category": owner_category, + "categoryUrn": owner_category_urn, } elif ( operation_type == Constants.ADD_TERM_OPERATION diff --git a/metadata-ingestion/tests/unit/test_mapping.py b/metadata-ingestion/tests/unit/test_mapping.py index 5c258f16535f8..de35451c9ec4b 100644 --- a/metadata-ingestion/tests/unit/test_mapping.py +++ b/metadata-ingestion/tests/unit/test_mapping.py @@ -174,7 +174,11 @@ def test_operation_processor_advanced_matching_owners(): def test_operation_processor_ownership_category(): - raw_props = {"user_owner": "@test_user", "business_owner": "alice"} + raw_props = { + "user_owner": "@test_user", + "business_owner": "alice", + "architect": "bob", + } processor = OperationProcessor( operation_defs={ "user_owner": { @@ -193,6 +197,14 @@ def test_operation_processor_ownership_category(): "owner_category": OwnershipTypeClass.BUSINESS_OWNER, }, }, + "architect": { + "match": ".*", + "operation": "add_owner", + "config": { + "owner_type": "user", + "owner_category": "urn:li:ownershipType:architect", + }, + }, }, owner_source_type="SOURCE_CONTROL", ) @@ -200,7 +212,7 @@ def test_operation_processor_ownership_category(): assert "add_owner" in aspect_map ownership_aspect: OwnershipClass = aspect_map["add_owner"] - assert len(ownership_aspect.owners) == 2 + assert len(ownership_aspect.owners) == 3 new_owner: OwnerClass = ownership_aspect.owners[0] assert new_owner.owner == "urn:li:corpGroup:test_user" assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" @@ -211,6 +223,12 @@ def test_operation_processor_ownership_category(): assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" assert new_owner.type and new_owner.type == OwnershipTypeClass.BUSINESS_OWNER + new_owner = ownership_aspect.owners[2] + assert new_owner.owner == "urn:li:corpuser:bob" + assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" + assert new_owner.type == OwnershipTypeClass.DATAOWNER # dummy value + assert new_owner.typeUrn == "urn:li:ownershipType:architect" + def test_operation_processor_advanced_matching_tags(): raw_props = { From 82f375ded6c98160ad9edbe6488cbc16b2a01d22 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 04:03:20 -0500 Subject: [PATCH 181/792] docs(ingest/lookml): clarify that ssh key has no passphrase (#9348) --- docs/quick-ingestion-guides/looker/setup.md | 3 ++- metadata-ingestion/docs/sources/looker/lookml_pre.md | 2 +- metadata-ingestion/src/datahub/configuration/git.py | 4 +++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/quick-ingestion-guides/looker/setup.md b/docs/quick-ingestion-guides/looker/setup.md index c08de116895ea..81c2c9e4ba08c 100644 --- a/docs/quick-ingestion-guides/looker/setup.md +++ b/docs/quick-ingestion-guides/looker/setup.md @@ -129,7 +129,8 @@ Follow the below steps to create the GitHub Deploy Key. ### Generate a private-public SSH key pair ```bash - ssh-keygen -t rsa -f looker_datahub_deploy_key +ssh-keygen -t rsa -f looker_datahub_deploy_key +# If prompted, don't add a passphrase to the key ``` This will typically generate two files like the one below. diff --git a/metadata-ingestion/docs/sources/looker/lookml_pre.md b/metadata-ingestion/docs/sources/looker/lookml_pre.md index d78a30fe6ec37..68a4828a5ce2a 100644 --- a/metadata-ingestion/docs/sources/looker/lookml_pre.md +++ b/metadata-ingestion/docs/sources/looker/lookml_pre.md @@ -6,7 +6,7 @@ To use LookML ingestion through the UI, or automate github checkout through the In a nutshell, there are three steps: -1. Generate a private-public ssh key pair. This will typically generate two files, e.g. looker_datahub_deploy_key (this is the private key) and looker_datahub_deploy_key.pub (this is the public key) +1. Generate a private-public ssh key pair. This will typically generate two files, e.g. looker_datahub_deploy_key (this is the private key) and looker_datahub_deploy_key.pub (this is the public key). Do not add a passphrase. ![Image](https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/gitssh/ssh-key-generation.png) 2. Add the public key to your Looker git repo as a deploy key with read access (no need to provision write access). Follow the guide [here](https://docs.github.com/en/developers/overview/managing-deploy-keys#deploy-keys) for that. diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index 0c7d64d4aafcf..80eb41c100b10 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -77,7 +77,9 @@ class GitInfo(GitReference): deploy_key_file: Optional[FilePath] = Field( None, - description="A private key file that contains an ssh key that has been configured as a deploy key for this repository. Use a file where possible, else see deploy_key for a config field that accepts a raw string.", + description="A private key file that contains an ssh key that has been configured as a deploy key for this repository. " + "Use a file where possible, else see deploy_key for a config field that accepts a raw string. " + "We expect the key not have a passphrase.", ) deploy_key: Optional[SecretStr] = Field( None, From 3142efcad5a06c06d5546b05b7f259c1eba109c5 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Fri, 1 Dec 2023 14:55:26 +0530 Subject: [PATCH 182/792] fix(migrate): connect with token without dry-run (#9317) --- metadata-ingestion/src/datahub/cli/migrate.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/metadata-ingestion/src/datahub/cli/migrate.py b/metadata-ingestion/src/datahub/cli/migrate.py index e83a8ed8feaad..30f82987a6b65 100644 --- a/metadata-ingestion/src/datahub/cli/migrate.py +++ b/metadata-ingestion/src/datahub/cli/migrate.py @@ -23,7 +23,7 @@ SchemaKey, ) from datahub.emitter.rest_emitter import DatahubRestEmitter -from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph from datahub.metadata.schema_classes import ( ContainerKeyClass, ContainerPropertiesClass, @@ -141,13 +141,7 @@ def dataplatform2instance_func( migration_report = MigrationReport(run_id, dry_run, keep) system_metadata = SystemMetadataClass(runId=run_id) - # initialize for dry-run - graph = DataHubGraph(config=DataHubGraphConfig(server="127.0.0.1")) - - if not dry_run: - graph = DataHubGraph( - config=DataHubGraphConfig(server=cli_utils.get_session_and_host()[1]) - ) + graph = get_default_graph() urns_to_migrate = [] From 864d3dfa16b6abbb09361f52112dbb4b95bf6775 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 09:18:07 -0800 Subject: [PATCH 183/792] fix(ui): Minor: fix unnecessary lineage tab scroll by removing -1 margin on lists (#9364) --- .../src/app/entity/shared/tabs/Entity/components/EntityList.tsx | 1 - .../app/recommendations/renderer/component/EntityNameList.tsx | 1 - 2 files changed, 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx index 758b070864a9a..3a9061fd97d6e 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx @@ -8,7 +8,6 @@ import { EntityType } from '../../../../../../types.generated'; const StyledList = styled(List)` padding-left: 40px; padding-right: 40px; - margin-top: -1px; .ant-list-items > .ant-list-item { padding-right: 0px; padding-left: 0px; diff --git a/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx b/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx index 4ff78e64625b1..9e8454ae22317 100644 --- a/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx +++ b/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx @@ -11,7 +11,6 @@ import { capitalizeFirstLetterOnly } from '../../../shared/textUtil'; export const StyledList = styled(List)` overflow-y: auto; height: 100%; - margin-top: -1px; box-shadow: ${(props) => props.theme.styles['box-shadow']}; flex: 1; .ant-list-items > .ant-list-item { From 36c7813f89b1f20898e07f24c5f209f5c57947d7 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Fri, 1 Dec 2023 23:18:39 +0530 Subject: [PATCH 184/792] feat(ui): Support dynamic entity profile tab names (#9352) --- .../app/entity/shared/containers/profile/EntityProfile.tsx | 1 + .../entity/shared/containers/profile/header/EntityTabs.tsx | 5 +++-- datahub-web-react/src/app/entity/shared/types.ts | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx index 74c127cb05dd9..d7b7a4da804ef 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx @@ -238,6 +238,7 @@ export const EntityProfile = ({ visible: () => true, enabled: () => true, }, + getDynamicName: () => '', })) || []; const visibleTabs = [...sortedTabs, ...autoRenderTabs].filter((tab) => diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index ea5c263ef7abc..096f1db617d92 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -44,10 +44,11 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { onTabClick={(tab: string) => routeToTab({ tabName: tab })} > {tabs.map((tab) => { + const tabName = (tab.getDynamicName && tab.getDynamicName(entityData, baseEntity)) || tab.name; if (!tab.display?.enabled(entityData, baseEntity)) { - return ; + return ; } - return ; + return ; })} ); diff --git a/datahub-web-react/src/app/entity/shared/types.ts b/datahub-web-react/src/app/entity/shared/types.ts index 6596711d4e82a..ae8ab747f7cb6 100644 --- a/datahub-web-react/src/app/entity/shared/types.ts +++ b/datahub-web-react/src/app/entity/shared/types.ts @@ -50,6 +50,7 @@ export type EntityTab = { }; properties?: any; id?: string; + getDynamicName?: (GenericEntityProperties, T) => string; }; export type EntitySidebarSection = { From 7b0a8f422b02c47ffb4fe2ddd5f61c7230de0c03 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 14:23:11 -0500 Subject: [PATCH 185/792] docs: add setup instructions for mac dependencies (#9346) Co-authored-by: Hyejin Yoon <0327jane@gmail.com> --- docs/developers.md | 140 ++++++++++++++++++++++++++++----------------- 1 file changed, 89 insertions(+), 51 deletions(-) diff --git a/docs/developers.md b/docs/developers.md index 52fd7d356a44c..c3c3a59283e66 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -4,33 +4,53 @@ title: "Local Development" # DataHub Developer's Guide -## Pre-requirements - - [Java 11 SDK](https://openjdk.org/projects/jdk/11/) - - [Python 3.10] (https://www.python.org/downloads/release/python-3100/) - - [Docker](https://www.docker.com/) - - [Docker Compose](https://docs.docker.com/compose/) - - Docker engine with at least 8GB of memory to run tests. +## Requirements - :::note +- Both [Java 11 JDK](https://openjdk.org/projects/jdk/11/) and [Java 8 JDK](https://openjdk.java.net/projects/jdk8/) +- [Python 3.10](https://www.python.org/downloads/release/python-3100/) +- [Docker](https://www.docker.com/) +- [Docker Compose](https://docs.docker.com/compose/) +- Docker engine with at least 8GB of memory to run tests. - Do not try to use a JDK newer than JDK 11. The build process does not work with newer JDKs currently. +:::caution - ::: +Do not try to use a JDK newer than JDK 11. The build process does not currently work with newer JDKs versions. + +::: + +On macOS, these can be installed using [Homebrew](https://brew.sh/). + +```shell +# Install Java 8 and 11 +brew tap homebrew/cask-versions +brew install java11 +brew install --cask zulu8 + +# Install Python +brew install python@3.10 # you may need to add this to your PATH +# alternatively, you can use pyenv to manage your python versions + +# Install docker and docker compose +brew install --cask docker +``` ## Building the Project Fork and clone the repository if haven't done so already -``` + +```shell git clone https://github.com/{username}/datahub.git ``` Change into the repository's root directory -``` + +```shell cd datahub ``` Use [gradle wrapper](https://docs.gradle.org/current/userguide/gradle_wrapper.html) to build the project -``` + +```shell ./gradlew build ``` @@ -38,29 +58,37 @@ Note that the above will also run run tests and a number of validations which ma We suggest partially compiling DataHub according to your needs: - - Build Datahub's backend GMS (Generalized metadata service): -``` -./gradlew :metadata-service:war:build -``` - - Build Datahub's frontend: -``` -./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint -``` - - Build DataHub's command line tool: -``` -./gradlew :metadata-ingestion:installDev -``` - - Build DataHub's documentation: -``` -./gradlew :docs-website:yarnLintFix :docs-website:build -x :metadata-ingestion:runPreFlightScript -# To preview the documentation -./gradlew :docs-website:serve -``` +- Build Datahub's backend GMS (Generalized metadata service): + + ``` + ./gradlew :metadata-service:war:build + ``` + +- Build Datahub's frontend: + + ``` + ./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint + ``` + +- Build DataHub's command line tool: -## Deploying local versions + ``` + ./gradlew :metadata-ingestion:installDev + ``` + +- Build DataHub's documentation: + + ``` + ./gradlew :docs-website:yarnLintFix :docs-website:build -x :metadata-ingestion:runPreFlightScript + # To preview the documentation + ./gradlew :docs-website:serve + ``` + +## Deploying Local Versions Run just once to have the local `datahub` cli tool installed in your $PATH -``` + +```shell cd smoke-test/ python3 -m venv venv source venv/bin/activate @@ -70,34 +98,40 @@ cd ../ ``` Once you have compiled & packaged the project or appropriate module you can deploy the entire system via docker-compose by running: -``` + +```shell ./gradlew quickstart ``` Replace whatever container you want in the existing deployment. I.e, replacing datahub's backend (GMS): -``` + +```shell (cd docker && COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose -p datahub -f docker-compose-without-neo4j.yml -f docker-compose-without-neo4j.override.yml -f docker-compose.dev.yml up -d --no-deps --force-recreate --build datahub-gms) ``` Running the local version of the frontend -``` + +```shell (cd docker && COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose -p datahub -f docker-compose-without-neo4j.yml -f docker-compose-without-neo4j.override.yml -f docker-compose.dev.yml up -d --no-deps --force-recreate --build datahub-frontend-react) ``` + ## IDE Support -The recommended IDE for DataHub development is [IntelliJ IDEA](https://www.jetbrains.com/idea/). -You can run the following command to generate or update the IntelliJ project file -``` + +The recommended IDE for DataHub development is [IntelliJ IDEA](https://www.jetbrains.com/idea/). +You can run the following command to generate or update the IntelliJ project file. + +```shell ./gradlew idea ``` + Open `datahub.ipr` in IntelliJ to start developing! For consistency please import and auto format the code using [LinkedIn IntelliJ Java style](../gradle/idea/LinkedIn%20Style.xml). - ## Windows Compatibility -For optimal performance and compatibility, we strongly recommend building on a Mac or Linux system. +For optimal performance and compatibility, we strongly recommend building on a Mac or Linux system. Please note that we do not actively support Windows in a non-virtualized environment. If you must use Windows, one workaround is to build within a virtualized environment, such as a VM(Virtual Machine) or [WSL(Windows Subsystem for Linux)](https://learn.microsoft.com/en-us/windows/wsl). @@ -105,37 +139,41 @@ This approach can help ensure that your build environment remains isolated and s ## Common Build Issues -### Getting `Unsupported class file major version 57` +#### Getting `Unsupported class file major version 57` You're probably using a Java version that's too new for gradle. Run the following command to check your Java version -``` + +```shell java --version ``` + While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11). -### Getting `cannot find symbol` error for `javax.annotation.Generated` +#### Getting `cannot find symbol` error for `javax.annotation.Generated` Similar to the previous issue, please use Java 1.8 to build the project. You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details. -### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error +#### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error -This is a [known issue](https://github.com/linkedin/rest.li/issues/287) when building the project on Windows due a bug in the Pegasus plugin. Please refer to [Windows Compatibility](/docs/developers.md#windows-compatibility). +This is a [known issue](https://github.com/linkedin/rest.li/issues/287) when building the project on Windows due a bug in the Pegasus plugin. Please refer to [Windows Compatibility](/docs/developers.md#windows-compatibility). -### Various errors related to `generateDataTemplate` or other `generate` tasks +#### Various errors related to `generateDataTemplate` or other `generate` tasks -As we generate quite a few files from the models, it is possible that old generated files may conflict with new model changes. When this happens, a simple `./gradlew clean` should reosolve the issue. +As we generate quite a few files from the models, it is possible that old generated files may conflict with new model changes. When this happens, a simple `./gradlew clean` should reosolve the issue. -### `Execution failed for task ':metadata-service:restli-servlet-impl:checkRestModel'` +#### `Execution failed for task ':metadata-service:restli-servlet-impl:checkRestModel'` This generally means that an [incompatible change](https://linkedin.github.io/rest.li/modeling/compatibility_check) was introduced to the rest.li API in GMS. You'll need to rebuild the snapshots/IDL by running the following command once -``` + +```shell ./gradlew :metadata-service:restli-servlet-impl:build -Prest.model.compatibility=ignore ``` -### `java.io.IOException: No space left on device` +#### `java.io.IOException: No space left on device` This means you're running out of space on your disk to build. Please free up some space or try a different disk. -### `Build failed` for task `./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint` +#### `Build failed` for task `./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint` + This could mean that you need to update your [Yarn](https://yarnpkg.com/getting-started/install) version From f3abfd175e1c142750686b3c8f7b08acadd83a4d Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 13:21:28 -0800 Subject: [PATCH 186/792] feat(ui): Add caching to search, entity profile for better UX (#9362) --- datahub-web-react/src/Mocks.tsx | 149 +++++++++++++++--- .../styled/search/EmbeddedListSearch.tsx | 1 + .../search/EmbeddedListSearchResults.tsx | 11 +- .../containers/profile/header/EntityTabs.tsx | 1 + .../profile/useGetDataForProfile.ts | 1 + ...rateUseSearchResultsViaRelationshipHook.ts | 1 + .../src/app/search/SearchPage.tsx | 2 + .../src/app/search/SearchResultList.tsx | 4 +- .../src/app/search/SearchResults.tsx | 11 +- .../search/SearchResultsLoadingSection.tsx | 33 ++++ .../app/search/__tests__/SearchPage.test.tsx | 95 ++--------- .../src/app/search/filters/BasicFilters.tsx | 4 + .../filters/BasicFiltersLoadingSection.tsx | 27 ++++ .../src/app/search/filters/SearchFilters.tsx | 20 ++- 14 files changed, 241 insertions(+), 119 deletions(-) create mode 100644 datahub-web-react/src/app/search/SearchResultsLoadingSection.tsx create mode 100644 datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index a2e14308e8cee..ada9a06ab5b95 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -41,10 +41,12 @@ import { FetchedEntity } from './app/lineage/types'; import { DEFAULT_APP_CONFIG } from './appConfigContext'; export const user1 = { + __typename: 'CorpUser', username: 'sdas', urn: 'urn:li:corpuser:1', type: EntityType.CorpUser, info: { + __typename: 'CorpUserInfo', email: 'sdas@domain.com', active: true, displayName: 'sdas', @@ -53,18 +55,19 @@ export const user1 = { lastName: 'Das', fullName: 'Shirshanka Das', }, - editableInfo: { - pictureLink: 'https://crunchconf.com/img/2019/speakers/1559291783-ShirshankaDas.png', - }, globalTags: { + __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { + __typename: 'Tag', type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -74,14 +77,23 @@ export const user1 = { }, ], }, - settings: { appearance: { showSimplifiedHomepage: false }, views: { defaultView: null } }, + settings: { + __typename: 'CorpUserSettings', + appearance: { __typename: 'CorpUserAppearanceSettings', showSimplifiedHomepage: false }, + views: { __typename: 'CorpUserViewSettings', defaultView: null }, + }, + editableInfo: null, + properties: null, + editableProperties: null, }; const user2 = { + __typename: 'CorpUser', username: 'john', urn: 'urn:li:corpuser:3', type: EntityType.CorpUser, - info: { + properties: { + __typename: 'CorpUserInfo', email: 'john@domain.com', active: true, displayName: 'john', @@ -90,25 +102,41 @@ const user2 = { lastName: 'Joyce', fullName: 'John Joyce', }, - editableInfo: { - pictureLink: null, - }, editableProperties: { displayName: 'Test', title: 'test', pictureLink: null, teams: [], skills: [], + __typename: 'CorpUserEditableProperties', + email: 'john@domain.com', + }, + groups: { + __typename: 'EntityRelationshipsResult', + relationships: [ + { + __typename: 'EntityRelationship', + entity: { + __typename: 'CorpGroup', + urn: 'urn:li:corpgroup:group1', + name: 'group1', + properties: null, + }, + }, + ], }, globalTags: { + __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -118,7 +146,13 @@ const user2 = { }, ], }, - settings: { appearance: { showSimplifiedHomepage: false }, views: { defaultView: null } }, + settings: { + __typename: 'CorpUserSettings', + appearance: { __typename: 'CorpUserAppearanceSettings', showSimplifiedHomepage: false }, + views: { __typename: 'CorpUserViewSettings', defaultView: null }, + }, + editableInfo: null, + info: null, }; export const dataPlatform = { @@ -149,6 +183,7 @@ export const dataPlatformInstance = { }; export const dataset1 = { + __typename: 'Dataset', urn: 'urn:li:dataset:1', type: EntityType.Dataset, platform: { @@ -260,6 +295,7 @@ export const dataset1 = { }; export const dataset2 = { + __typename: 'Dataset', urn: 'urn:li:dataset:2', type: EntityType.Dataset, platform: { @@ -358,17 +394,23 @@ export const dataset3 = { urn: 'urn:li:dataset:3', type: EntityType.Dataset, platform: { + __typename: 'DataPlatform', urn: 'urn:li:dataPlatform:kafka', name: 'Kafka', + displayName: 'Kafka', info: { + __typename: 'DataPlatformInfo', displayName: 'Kafka', type: PlatformType.MessageBroker, datasetNameDelimiter: '.', logoUrl: '', }, type: EntityType.DataPlatform, + lastIngested: null, + properties: null, }, privileges: { + __typename: 'EntityPrivileges', canEditLineage: false, canEditEmbed: false, canEditQueries: false, @@ -381,54 +423,78 @@ export const dataset3 = { origin: 'PROD', uri: 'www.google.com', properties: { + __typename: 'DatasetProperties', name: 'Yet Another Dataset', + qualifiedName: 'Yet Another Dataset', description: 'This and here we have yet another Dataset (YAN). Are there more?', origin: 'PROD', - customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:3' }], + customProperties: [ + { + __typename: 'CustomPropertiesEntry', + key: 'propertyAKey', + value: 'propertyAValue', + associatedUrn: 'urn:li:dataset:3', + }, + ], externalUrl: 'https://data.hub', }, parentContainers: { + __typename: 'ParentContainersResult', count: 0, containers: [], }, editableProperties: null, created: { + __typename: 'AuditStamp', time: 0, + actor: null, }, lastModified: { + __typename: 'AuditStamp', time: 0, + actor: null, }, ownership: { + __typename: 'Ownership', owners: [ { + __typename: 'Owner', owner: { ...user1, }, type: 'DATAOWNER', associatedUrn: 'urn:li:dataset:3', + ownershipType: null, }, { + __typename: 'Owner', owner: { ...user2, }, type: 'DELEGATE', associatedUrn: 'urn:li:dataset:3', + ownershipType: null, }, ], lastModified: { + __typename: 'AuditStamp', time: 0, + actor: null, }, }, globalTags: { __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { + __typename: 'Tag', type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -439,14 +505,18 @@ export const dataset3 = { ], }, glossaryTerms: { + __typename: 'GlossaryTerms', terms: [ { + __typename: 'GlossaryTermAssociation', term: { + __typename: 'GlossaryTerm', type: EntityType.GlossaryTerm, urn: 'urn:li:glossaryTerm:sample-glossary-term', name: 'sample-glossary-term', hierarchicalName: 'example.sample-glossary-term', properties: { + __typename: 'GlossaryTermProperties', name: 'sample-glossary-term', description: 'sample definition', definition: 'sample definition', @@ -463,13 +533,21 @@ export const dataset3 = { incoming: null, outgoing: null, institutionalMemory: { + __typename: 'InstitutionalMemory', elements: [ { + __typename: 'InstitutionalMemoryMetadata', url: 'https://www.google.com', - author: { urn: 'urn:li:corpuser:datahub', username: 'datahub', type: EntityType.CorpUser }, + author: { + __typename: 'CorpUser', + urn: 'urn:li:corpuser:datahub', + username: 'datahub', + type: EntityType.CorpUser, + }, description: 'This only points to Google', label: 'This only points to Google', created: { + __typename: 'AuditStamp', actor: 'urn:li:corpuser:1', time: 1612396473001, }, @@ -482,12 +560,14 @@ export const dataset3 = { operations: null, datasetProfiles: [ { + __typename: 'DatasetProfile', rowCount: 10, columnCount: 5, sizeInBytes: 10000, timestampMillis: 0, fieldProfiles: [ { + __typename: 'DatasetFieldProfile', fieldPath: 'testColumn', uniqueCount: 1, uniqueProportion: 0.129, @@ -507,6 +587,7 @@ export const dataset3 = { viewProperties: null, autoRenderAspects: [ { + __typename: 'AutoRenderAspect', aspectName: 'autoRenderAspect', payload: '{ "values": [{ "autoField1": "autoValue1", "autoField2": "autoValue2" }] }', renderSpec: { @@ -529,7 +610,11 @@ export const dataset3 = { siblings: null, statsSummary: null, embed: null, - browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, + browsePathV2: { __typename: 'BrowsePathV2', path: [{ name: 'test', entity: null }] }, + access: null, + dataProduct: null, + lastProfile: null, + lastOperation: null, } as Dataset; export const dataset3WithSchema = { @@ -1839,7 +1924,6 @@ export const mocks = [ browse: { entities: [ { - __typename: 'Dataset', ...dataset1, }, ], @@ -1986,7 +2070,6 @@ export const mocks = [ searchResults: [ { entity: { - __typename: 'Dataset', ...dataset1, }, matchedFields: [ @@ -1999,7 +2082,6 @@ export const mocks = [ }, { entity: { - __typename: 'Dataset', ...dataset2, }, }, @@ -2075,6 +2157,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -2248,6 +2331,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -2259,10 +2343,12 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ { + __typename: 'AggregationMetadata', value: 'PROD', count: 3, entity: null, @@ -2270,6 +2356,7 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -2278,12 +2365,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -2829,6 +2917,7 @@ export const mocks = [ // ], // }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -2908,6 +2997,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3205,6 +3295,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3216,10 +3307,12 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ { + __typename: 'AggregationMetadata', value: 'PROD', count: 3, entity: null, @@ -3227,6 +3320,7 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -3235,12 +3329,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -3290,6 +3385,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3301,6 +3397,7 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ @@ -3308,10 +3405,12 @@ export const mocks = [ value: 'PROD', count: 3, entity: null, + __typename: 'AggregationMetadata', }, ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -3320,12 +3419,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -3367,6 +3467,7 @@ export const mocks = [ __typename: 'AuthenticatedUser', corpUser: { ...user2 }, platformPrivileges: { + __typename: 'PlatformPrivileges', viewAnalytics: true, managePolicies: true, manageIdentities: true, diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx index e27a63b98f012..26228e8c44515 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx @@ -188,6 +188,7 @@ export const EmbeddedListSearch = ({ variables: { input: searchInput, }, + fetchPolicy: 'cache-first', }); useEffect(() => { diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx index e4d43f34dcba7..1daf2a4c59b70 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx @@ -1,14 +1,15 @@ import React from 'react'; import { Pagination, Typography } from 'antd'; +import { LoadingOutlined } from '@ant-design/icons'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata, SearchResults as SearchResultType } from '../../../../../../types.generated'; import { SearchCfg } from '../../../../../../conf'; -import { ReactComponent as LoadingSvg } from '../../../../../../images/datahub-logo-color-loading_pendulum.svg'; import { EntityAndType } from '../../../types'; import { UnionType } from '../../../../../search/utils/constants'; import { SearchFiltersSection } from '../../../../../search/SearchFiltersSection'; import { EntitySearchResults, EntityActionProps } from './EntitySearchResults'; import MatchingViewsLabel from './MatchingViewsLabel'; +import { ANTD_GRAY } from '../../../constants'; const SearchBody = styled.div` height: 100%; @@ -59,6 +60,12 @@ const LoadingContainer = styled.div` flex: 1; `; +const StyledLoading = styled(LoadingOutlined)` + font-size: 36px; + color: ${ANTD_GRAY[7]}; + padding-bottom: 18px; +]`; + interface Props { page: number; searchResponse?: SearchResultType | null; @@ -121,7 +128,7 @@ export const EmbeddedListSearchResults = ({ {loading && ( - + )} {!loading && ( diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index 096f1db617d92..58693eca8af0e 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -39,6 +39,7 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { return ( routeToTab({ tabName: tab })} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts b/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts index 5a7d4f24dfd2a..ae87eeb1a8450 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts +++ b/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts @@ -32,6 +32,7 @@ export default function useGetDataForProfile({ urn, entityType, useEntityQuer refetch, } = useEntityQuery({ variables: { urn }, + fetchPolicy: 'cache-first', }); const dataPossiblyCombinedWithSiblings = isHideSiblingMode diff --git a/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts b/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts index f3b904956b224..e26aa01c385e8 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts +++ b/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts @@ -45,6 +45,7 @@ export default function generateUseSearchResultsViaRelationshipHook({ variables: { input: inputFields, }, + fetchPolicy: 'cache-first', skip: !filtersExist(filters, orFilters), // If you don't include any filters, we shound't return anything :). Might as well skip! }); diff --git a/datahub-web-react/src/app/search/SearchPage.tsx b/datahub-web-react/src/app/search/SearchPage.tsx index 6387f0ef8c05e..541355a3e2cb4 100644 --- a/datahub-web-react/src/app/search/SearchPage.tsx +++ b/datahub-web-react/src/app/search/SearchPage.tsx @@ -62,6 +62,7 @@ export const SearchPage = () => { searchFlags: { getSuggestions: true }, }, }, + fetchPolicy: 'cache-and-network', }); const total = data?.searchAcrossEntities?.total || 0; @@ -217,6 +218,7 @@ export const SearchPage = () => { )} {showSearchFiltersV2 && ( ` `; type Props = { + loading: boolean; query: string; searchResults: CombinedSearchResult[]; totalResultCount: number; @@ -64,6 +65,7 @@ type Props = { }; export const SearchResultList = ({ + loading, query, searchResults, totalResultCount, @@ -104,7 +106,7 @@ export const SearchResultList = ({ id="search-result-list" dataSource={searchResults} split={false} - locale={{ emptyText: }} + locale={{ emptyText: (!loading && ) || <> }} renderItem={(item, index) => ( ` display: flex; @@ -109,6 +109,7 @@ const SearchResultListContainer = styled.div<{ v2Styles: boolean }>` `; interface Props { + loading: boolean; unionType?: UnionType; query: string; viewUrn?: string; @@ -124,7 +125,6 @@ interface Props { } | null; facets?: Array | null; selectedFilters: Array; - loading: boolean; error: any; onChangeFilters: (filters: Array) => void; onChangeUnionType: (unionType: UnionType) => void; @@ -142,6 +142,7 @@ interface Props { } export const SearchResults = ({ + loading, unionType = UnionType.AND, query, viewUrn, @@ -149,7 +150,6 @@ export const SearchResults = ({ searchResponse, facets, selectedFilters, - loading, error, onChangeUnionType, onChangeFilters, @@ -180,7 +180,6 @@ export const SearchResults = ({ return ( <> - {loading && } {!showSearchFiltersV2 && ( @@ -247,10 +246,12 @@ export const SearchResults = ({ )} {(error && ) || - (!loading && ( + (loading && !combinedSiblingSearchResults.length && ) || + (combinedSiblingSearchResults && ( {totalResults > 0 && } + + + + + + + ); +} diff --git a/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx b/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx index 0111a264d1e17..5d921c82913ac 100644 --- a/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx +++ b/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx @@ -1,42 +1,23 @@ import React from 'react'; -import { act } from 'react-dom/test-utils'; -import { fireEvent, render, waitFor } from '@testing-library/react'; +import { render, waitFor } from '@testing-library/react'; +import { InMemoryCache } from '@apollo/client'; import { MockedProvider } from '@apollo/client/testing'; import { Route } from 'react-router'; - import { SearchPage } from '../SearchPage'; import TestPageContainer from '../../../utils/test-utils/TestPageContainer'; import { mocksWithSearchFlagsOff } from '../../../Mocks'; import { PageRoutes } from '../../../conf/Global'; +import possibleTypesResult from '../../../possibleTypes.generated'; -describe('SearchPage', () => { - it('renders loading', async () => { - const promise = Promise.resolve(); - const { getByText } = render( - - - } /> - - , - ); - await waitFor(() => expect(getByText('Loading...')).toBeInTheDocument()); - await act(() => promise); - }); +const cache = new InMemoryCache({ + // need to define possibleTypes to allow us to use Apollo cache with union types + possibleTypes: possibleTypesResult.possibleTypes, +}); +describe('SearchPage', () => { it('renders the selected filters as checked', async () => { const { getByTestId, queryByTestId } = render( - + @@ -56,14 +37,7 @@ describe('SearchPage', () => { it('renders the selected filters as checked using legacy URL scheme for entity (entity instead of _entityType)', async () => { const { getByTestId, queryByTestId } = render( - + @@ -83,14 +57,7 @@ describe('SearchPage', () => { it('renders multiple checked filters at once', async () => { const { getByTestId, queryByTestId } = render( - + @@ -108,44 +75,4 @@ describe('SearchPage', () => { const hdfsPlatformBox = getByTestId('facet-platform-hdfs'); expect(hdfsPlatformBox).toHaveProperty('checked', true); }); - - it('clicking a filter selects a new filter', async () => { - const promise = Promise.resolve(); - const { getByTestId, queryByTestId } = render( - - - } /> - - , - ); - - await waitFor(() => expect(queryByTestId('facet-_entityType-DATASET')).toBeInTheDocument()); - - const datasetEntityBox = getByTestId('facet-_entityType-DATASET'); - expect(datasetEntityBox).toHaveProperty('checked', true); - - const chartEntityBox = getByTestId('facet-_entityType-CHART'); - expect(chartEntityBox).toHaveProperty('checked', false); - act(() => { - fireEvent.click(chartEntityBox); - }); - - await waitFor(() => expect(queryByTestId('facet-_entityType-DATASET')).toBeInTheDocument()); - - const datasetEntityBox2 = getByTestId('facet-_entityType-DATASET'); - expect(datasetEntityBox2).toHaveProperty('checked', true); - - const chartEntityBox2 = getByTestId('facet-_entityType-CHART'); - expect(chartEntityBox2).toHaveProperty('checked', true); - await act(() => promise); - }); }); diff --git a/datahub-web-react/src/app/search/filters/BasicFilters.tsx b/datahub-web-react/src/app/search/filters/BasicFilters.tsx index e8f56e5c2cd5e..84750387853bb 100644 --- a/datahub-web-react/src/app/search/filters/BasicFilters.tsx +++ b/datahub-web-react/src/app/search/filters/BasicFilters.tsx @@ -24,6 +24,7 @@ import { } from '../../onboarding/config/SearchOnboardingConfig'; import { useFilterRendererRegistry } from './render/useFilterRenderer'; import { FilterScenarioType } from './render/types'; +import BasicFiltersLoadingSection from './BasicFiltersLoadingSection'; const NUM_VISIBLE_FILTER_DROPDOWNS = 5; @@ -56,6 +57,7 @@ const FILTERS_TO_REMOVE = [ ]; interface Props { + loading: boolean; availableFilters: FacetMetadata[] | null; activeFilters: FacetFilterInput[]; onChangeFilters: (newFilters: FacetFilterInput[]) => void; @@ -64,6 +66,7 @@ interface Props { } export default function BasicFilters({ + loading, availableFilters, activeFilters, onChangeFilters, @@ -88,6 +91,7 @@ export default function BasicFilters({ + {loading && !visibleFilters?.length && } {visibleFilters?.map((filter) => { return filterRendererRegistry.hasRenderer(filter.field) ? ( filterRendererRegistry.render(filter.field, { diff --git a/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx b/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx new file mode 100644 index 0000000000000..f82a66d4f0c6d --- /dev/null +++ b/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx @@ -0,0 +1,27 @@ +import * as React from 'react'; +import { Skeleton } from 'antd'; +import styled from 'styled-components'; + +const Container = styled.div` + display: flex; + align-items: center; +`; + +const CardSkeleton = styled(Skeleton.Input)` + && { + padding: 2px 12px 2px 0px; + height: 32px; + border-radius: 8px; + } +`; + +export default function BasicFiltersLoadingSection() { + return ( + + + + + + + ); +} diff --git a/datahub-web-react/src/app/search/filters/SearchFilters.tsx b/datahub-web-react/src/app/search/filters/SearchFilters.tsx index 97e71ae701aac..bcc987159e0e6 100644 --- a/datahub-web-react/src/app/search/filters/SearchFilters.tsx +++ b/datahub-web-react/src/app/search/filters/SearchFilters.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useEffect, useState } from 'react'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata } from '../../../types.generated'; import { ANTD_GRAY } from '../../entity/shared/constants'; @@ -13,6 +13,7 @@ const SearchFiltersWrapper = styled.div<{ removePadding: boolean }>` `; interface Props { + loading: boolean; mode: FilterMode; availableFilters: FacetMetadata[]; activeFilters: FacetFilterInput[]; @@ -24,6 +25,7 @@ interface Props { } export default function SearchFilters({ + loading, mode, availableFilters, activeFilters, @@ -33,6 +35,17 @@ export default function SearchFilters({ onChangeUnionType, onChangeMode, }: Props) { + const [finalAvailableFilters, setFinalAvailableFilters] = useState(availableFilters); + + /** + * Only update the active filters if we are done loading. Prevents jitter! + */ + useEffect(() => { + if (!loading && finalAvailableFilters !== availableFilters) { + setFinalAvailableFilters(availableFilters); + } + }, [availableFilters, loading, finalAvailableFilters]); + const isShowingBasicFilters = mode === FilterModes.BASIC; return ( {isShowingBasicFilters && ( Date: Fri, 1 Dec 2023 13:21:54 -0800 Subject: [PATCH 187/792] =?UTF-8?q?refactor(ui):=20Remove=20primary=20colo?= =?UTF-8?q?r=20for=20sort=20selector=20+=20add=20t=E2=80=A6=20(#9363)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/app/search/context/constants.ts | 8 ++-- .../app/search/sorting/SearchSortSelect.tsx | 42 ++++++++++--------- 2 files changed, 27 insertions(+), 23 deletions(-) diff --git a/datahub-web-react/src/app/search/context/constants.ts b/datahub-web-react/src/app/search/context/constants.ts index 5f841b8536e19..96e5d7c787203 100644 --- a/datahub-web-react/src/app/search/context/constants.ts +++ b/datahub-web-react/src/app/search/context/constants.ts @@ -7,19 +7,19 @@ export const LAST_OPERATION_TIME_FIELD = 'lastOperationTime'; export const DEFAULT_SORT_OPTION = RELEVANCE; export const SORT_OPTIONS = { - [RELEVANCE]: { label: 'Relevance', field: RELEVANCE, sortOrder: SortOrder.Descending }, + [RELEVANCE]: { label: 'Relevance (Default)', field: RELEVANCE, sortOrder: SortOrder.Descending }, [`${ENTITY_NAME_FIELD}_${SortOrder.Ascending}`]: { - label: 'A to Z', + label: 'Name A to Z', field: ENTITY_NAME_FIELD, sortOrder: SortOrder.Ascending, }, [`${ENTITY_NAME_FIELD}_${SortOrder.Descending}`]: { - label: 'Z to A', + label: 'Name Z to A', field: ENTITY_NAME_FIELD, sortOrder: SortOrder.Descending, }, [`${LAST_OPERATION_TIME_FIELD}_${SortOrder.Descending}`]: { - label: 'Last Modified in Platform', + label: 'Last Modified In Source', field: LAST_OPERATION_TIME_FIELD, sortOrder: SortOrder.Descending, }, diff --git a/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx b/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx index 683292a20b5b4..fc9486926214f 100644 --- a/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx +++ b/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx @@ -1,8 +1,9 @@ import Icon, { CaretDownFilled } from '@ant-design/icons'; -import { Select } from 'antd'; +import { Select, Tooltip } from 'antd'; import React from 'react'; import styled from 'styled-components'; import { ReactComponent as SortIcon } from '../../../images/sort.svg'; +import { ANTD_GRAY } from '../../entity/shared/constants'; import { DEFAULT_SORT_OPTION, SORT_OPTIONS } from '../context/constants'; import { useSearchContext } from '../context/SearchContext'; @@ -13,19 +14,20 @@ const SelectWrapper = styled.span` .ant-select-selection-item { // !important is necessary because updating Select styles for antd is impossible - color: ${(props) => props.theme.styles['primary-color']} !important; + color: ${ANTD_GRAY[8]} !important; font-weight: 700; } - svg { - color: ${(props) => props.theme.styles['primary-color']}; + .ant-select-selection-placeholder { + color: ${ANTD_GRAY[8]}; + font-weight: 700; } `; const StyledIcon = styled(Icon)` - color: ${(props) => props.theme.styles['primary-color']}; + color: ${ANTD_GRAY[8]}; font-size: 16px; - margin-right: -6px; + margin-right: -8px; `; export default function SearchSortSelect() { @@ -34,18 +36,20 @@ export default function SearchSortSelect() { const options = Object.entries(SORT_OPTIONS).map(([value, option]) => ({ value, label: option.label })); return ( - - - setSelectedSortOption(sortOption)} + dropdownStyle={{ minWidth: 'max-content' }} + placement="bottomRight" + suffixIcon={} + /> + + ); } From 14a463b1ce1b5b60bea8496f5f4aee16b8b7aa39 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 15:53:01 -0800 Subject: [PATCH 188/792] feat(ui): Supporting subtypes for data jobs (#9361) Co-authored-by: Andrew Sikowitz --- .../datahub/graphql/GmsGraphQLEngine.java | 8 ---- .../datahub/graphql/SubTypesResolver.java | 47 ------------------- .../graphql/types/chart/ChartType.java | 3 +- .../types/chart/mappers/ChartMapper.java | 4 ++ .../types/common/mappers/SubTypesMapper.java | 22 +++++++++ .../container/mappers/ContainerMapper.java | 9 +--- .../dashboard/mappers/DashboardMapper.java | 13 ++--- .../graphql/types/datajob/DataJobType.java | 3 +- .../types/datajob/mappers/DataJobMapper.java | 4 ++ .../graphql/types/dataset/DatasetType.java | 3 +- .../types/dataset/mappers/DatasetMapper.java | 4 ++ .../src/main/resources/entity.graphql | 5 ++ .../src/app/entity/chart/ChartEntity.tsx | 1 + .../src/app/entity/dataJob/DataJobEntity.tsx | 2 + .../app/entity/dataJob/preview/Preview.tsx | 4 +- datahub-web-react/src/graphql/browse.graphql | 6 +++ .../src/graphql/dataProcess.graphql | 3 ++ .../src/graphql/fragments.graphql | 9 ++++ datahub-web-react/src/graphql/lineage.graphql | 3 ++ datahub-web-react/src/graphql/preview.graphql | 6 +++ datahub-web-react/src/graphql/scroll.graphql | 6 +++ datahub-web-react/src/graphql/search.graphql | 3 ++ .../src/main/resources/entity-registry.yml | 1 + 23 files changed, 93 insertions(+), 76 deletions(-) delete mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index b0b26f073876c..9ea8126a07ab2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1174,10 +1174,6 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("testResults", new TestResultsResolver(entityClient)) .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("subTypes", new SubTypesResolver( - this.entityClient, - "dataset", - "subTypes")) .dataFetcher("runs", new EntityRunsResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) @@ -1433,10 +1429,6 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("subTypes", new SubTypesResolver( - this.entityClient, - "chart", - "subTypes")) ); builder.type("ChartInfo", typeWiring -> typeWiring .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java deleted file mode 100644 index c74d84d8be323..0000000000000 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java +++ /dev/null @@ -1,47 +0,0 @@ -package com.linkedin.datahub.graphql; - -import com.linkedin.common.SubTypes; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.r2.RemoteInvocationException; -import graphql.schema.DataFetcher; -import graphql.schema.DataFetchingEnvironment; -import java.net.URISyntaxException; -import java.util.Collections; -import java.util.concurrent.CompletableFuture; -import javax.annotation.Nullable; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; - - -@Slf4j -@AllArgsConstructor -public class SubTypesResolver implements DataFetcher> { - - EntityClient _entityClient; - String _entityType; - String _aspectName; - - @Override - @Nullable - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - SubTypes subType = null; - final String urnStr = ((Entity) environment.getSource()).getUrn(); - try { - final Urn urn = Urn.createFromString(urnStr); - EntityResponse entityResponse = _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(_aspectName), context.getAuthentication()).get(urn); - if (entityResponse != null && entityResponse.getAspects().containsKey(_aspectName)) { - subType = new SubTypes(entityResponse.getAspects().get(_aspectName).getValue().data()); - } - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + _aspectName + " for urn " + urnStr + " ", e); - } - return subType; - }); - } -} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index cfec8f8a2391f..fa0e3cd856803 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -77,7 +77,8 @@ public class ChartType implements SearchableEntityType, Browsable INPUT_FIELDS_ASPECT_NAME, EMBED_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index 657c9b688aed2..e0ffc57ddf519 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -11,6 +11,7 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.AccessLevel; @@ -34,6 +35,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -97,6 +99,8 @@ public Chart apply(@Nonnull final EntityResponse entityResponse) { chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java new file mode 100644 index 0000000000000..9aa94eae62999 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -0,0 +1,22 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.SubTypes; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.ArrayList; +import javax.annotation.Nonnull; + +public class SubTypesMapper implements ModelMapper { + + public static final SubTypesMapper INSTANCE = new SubTypesMapper(); + + public static com.linkedin.datahub.graphql.generated.SubTypes map(@Nonnull final SubTypes metadata) { + return INSTANCE.apply(metadata); + } + + @Override + public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { + final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); + result.setTypeNames(new ArrayList<>(input.getTypeNames())); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index aeaa8f4f85c14..b81259e78be3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -21,6 +21,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -97,7 +98,7 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedSubTypes = aspects.get(Constants.SUB_TYPES_ASPECT_NAME); if (envelopedSubTypes != null) { - result.setSubTypes(mapSubTypes(new SubTypes(envelopedSubTypes.getValue().data()))); + result.setSubTypes(SubTypesMapper.map(new SubTypes(envelopedSubTypes.getValue().data()))); } final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); @@ -150,12 +151,6 @@ private static com.linkedin.datahub.graphql.generated.ContainerEditablePropertie return editableContainerProperties; } - private static com.linkedin.datahub.graphql.generated.SubTypes mapSubTypes(final SubTypes gmsSubTypes) { - final com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); - subTypes.setTypeNames(gmsSubTypes.getTypeNames()); - return subTypes; - } - private static DataPlatform mapPlatform(final DataPlatformInstance platformInstance) { // Set dummy platform to be resolved. final DataPlatform dummyPlatform = new DataPlatform(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 32e4341ece4aa..432624ac4699f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -33,6 +33,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -91,7 +92,8 @@ public Dashboard apply(@Nonnull final EntityResponse entityResponse) { dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> @@ -204,13 +206,4 @@ private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) final Domains domains = new Domains(dataMap); dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); } - - private void mapSubTypes(@Nonnull Dashboard dashboard, DataMap dataMap) { - SubTypes pegasusSubTypes = new SubTypes(dataMap); - if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); - subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); - dashboard.setSubTypes(subTypes); - } - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index bde79f6dce6e8..f6f37978bb36a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -75,7 +75,8 @@ public class DataJobType implements SearchableEntityType, Brows DEPRECATION_ASPECT_NAME, DATA_PLATFORM_INSTANCE_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("flow"); private final EntityClient _entityClient; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 4845fc1876348..61802ad9cfe5c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -9,6 +9,7 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataFlow; @@ -27,6 +28,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -103,6 +105,8 @@ public DataJob apply(@Nonnull final EntityResponse entityResponse) { result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); + } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { + result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); } }); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 0fc4399ac902d..6f339d3985133 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -86,7 +86,8 @@ public class DatasetType implements SearchableEntityType, Brows EMBED_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, - ACCESS_DATASET_ASPECT_NAME + ACCESS_DATASET_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 4867aa1d89825..3e39c14c29ede 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -11,6 +11,7 @@ import com.linkedin.common.Ownership; import com.linkedin.common.Siblings; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -29,6 +30,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -114,6 +116,8 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 035f756a10d55..4f3769d908815 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -5689,6 +5689,11 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { """ type: EntityType! + """ + Sub Types that this entity implements + """ + subTypes: SubTypes + """ The timestamp for the last time this entity was ingested """ diff --git a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx index fc898dec9d93a..d2d35aad7c29f 100644 --- a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx +++ b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx @@ -168,6 +168,7 @@ export class ChartEntity implements Entity { return ( { { Date: Mon, 4 Dec 2023 09:50:46 +0100 Subject: [PATCH 189/792] fix(ingest/bigquery): Fix format arguments for table lineage test (#9340) (#9341) --- .../src/datahub/ingestion/source/bigquery_v2/lineage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index e9acf5ea86044..eddd08c92b808 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -894,8 +894,8 @@ def test_capability(self, project_id: str) -> None: for entry in self.audit_log_api.get_bigquery_log_entries_via_gcp_logging( gcp_logging_client, filter=BQ_FILTER_RULE_TEMPLATE_V2_LINEAGE.format( - self.start_time.strftime(BQ_DATETIME_FORMAT), - self.end_time.strftime(BQ_DATETIME_FORMAT), + start_time=self.start_time.strftime(BQ_DATETIME_FORMAT), + end_time=self.end_time.strftime(BQ_DATETIME_FORMAT), ), log_page_size=self.config.log_page_size, limit=1, From 7857944bb52ff29ee7d30d8fba21262aa4510b0a Mon Sep 17 00:00:00 2001 From: ethan-cartwright Date: Mon, 4 Dec 2023 11:32:45 -0500 Subject: [PATCH 190/792] fix(siblingsHook): add logic to account for non dbt upstreams (#9154) Co-authored-by: Ethan Cartwright --- .../hook/siblings/SiblingAssociationHook.java | 3 +-- .../siblings/SiblingAssociationHookTest.java | 22 +++++++++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 7cbe53dee9fe4..064f987ff1ba9 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -205,9 +205,8 @@ private void handleSourceDatasetEvent(MetadataChangeLog event, DatasetUrn source // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt model if (dbtUpstreams.size() == 1) { setSiblingsAndSoftDeleteSibling(dbtUpstreams.get(0).getDataset(), sourceUrn); - } else { + } else if (dbtUpstreams.size() > 1) { log.error("{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", sourceUrn.toString(), dbtUpstreams.size()); - } } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index 6a2a05aa4b8c0..93e98b7343cd4 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -305,6 +305,28 @@ public void testInvokeWhenSourceUrnHasTwoUpstreamsOneDbt() throws Exception { Mockito.verify(_mockEntityClient, Mockito.times(2)).ingestProposal(Mockito.any(), eq(true)); } + @Test + public void testInvokeWhenSourceUrnHasTwoUpstreamsNoDbt() throws Exception { + + MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + final UpstreamLineage upstreamLineage = new UpstreamLineage(); + final UpstreamArray upstreamArray = new UpstreamArray(); + Upstream snowflakeUpstream1 = + createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream snowflakeUpstream2 = + createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", DatasetLineageType.TRANSFORMED); + upstreamArray.add(snowflakeUpstream1); + upstreamArray.add(snowflakeUpstream2); + upstreamLineage.setUpstreams(upstreamArray); + + event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); + event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + _siblingAssociationHook.invoke(event); + + + Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); + } + private MetadataChangeLog createEvent(String entityType, String aspectName, ChangeType changeType) { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(entityType); From f9b24e07241bd5dc3e6d93698a90000fc08150fb Mon Sep 17 00:00:00 2001 From: purnimagarg1 <139125209+purnimagarg1@users.noreply.github.com> Date: Mon, 4 Dec 2023 22:58:41 +0530 Subject: [PATCH 191/792] feat: Support CSV ingestion through the UI (#9280) Co-authored-by: Gabe Lyons --- .../src/app/ingest/source/builder/CSVInfo.tsx | 27 ++++++++ .../ingest/source/builder/RecipeBuilder.tsx | 5 +- .../source/builder/RecipeForm/constants.ts | 8 ++- .../ingest/source/builder/RecipeForm/csv.ts | 60 ++++++++++++++++++ .../app/ingest/source/builder/constants.ts | 4 ++ .../app/ingest/source/builder/sources.json | 7 ++ .../src/app/ingest/source/conf/csv/csv.ts | 22 +++++++ .../src/app/ingest/source/conf/sources.tsx | 2 + datahub-web-react/src/images/csv-logo.png | Bin 0 -> 12029 bytes .../main/resources/boot/data_platforms.json | 10 +++ 10 files changed, 143 insertions(+), 2 deletions(-) create mode 100644 datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx create mode 100644 datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts create mode 100644 datahub-web-react/src/app/ingest/source/conf/csv/csv.ts create mode 100644 datahub-web-react/src/images/csv-logo.png diff --git a/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx b/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx new file mode 100644 index 0000000000000..87d632bb228b5 --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx @@ -0,0 +1,27 @@ +import React from 'react'; +import { Alert } from 'antd'; + +const CSV_FORMAT_LINK = 'https://datahubproject.io/docs/generated/ingestion/sources/csv'; + +export const CSVInfo = () => { + const link = ( + + link + + ); + + return ( + + Add the URL of your CSV file to be ingested. This will work for any web-hosted CSV file. For + example, You can create a file in google sheets following the format at this {link} and then + construct the CSV URL by publishing your google sheet in the CSV format. + + } + /> + ); +}; diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx b/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx index bee9b04cee100..db1f0fdd4dfa6 100644 --- a/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx @@ -7,8 +7,9 @@ import { ANTD_GRAY } from '../../../entity/shared/constants'; import { YamlEditor } from './YamlEditor'; import RecipeForm from './RecipeForm/RecipeForm'; import { SourceBuilderState, SourceConfig } from './types'; -import { LOOKER, LOOK_ML } from './constants'; +import { CSV, LOOKER, LOOK_ML } from './constants'; import { LookerWarning } from './LookerWarning'; +import { CSVInfo } from './CSVInfo'; export const ControlsContainer = styled.div` display: flex; @@ -81,6 +82,8 @@ function RecipeBuilder(props: Props) { return (
{(type === LOOKER || type === LOOK_ML) && } + {type === CSV && } + {sourceConfigs?.displayName} Recipe diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts index 351876fe6b16a..844bf50926764 100644 --- a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts @@ -83,7 +83,7 @@ import { PROJECT_NAME, } from './lookml'; import { PRESTO, PRESTO_HOST_PORT, PRESTO_DATABASE, PRESTO_USERNAME, PRESTO_PASSWORD } from './presto'; -import { BIGQUERY_BETA, DBT_CLOUD, MYSQL, POWER_BI, UNITY_CATALOG, VERTICA } from '../constants'; +import { BIGQUERY_BETA, CSV, DBT_CLOUD, MYSQL, POWER_BI, UNITY_CATALOG, VERTICA } from '../constants'; import { BIGQUERY_BETA_PROJECT_ID, DATASET_ALLOW, DATASET_DENY, PROJECT_ALLOW, PROJECT_DENY } from './bigqueryBeta'; import { MYSQL_HOST_PORT, MYSQL_PASSWORD, MYSQL_USERNAME } from './mysql'; import { MSSQL, MSSQL_DATABASE, MSSQL_HOST_PORT, MSSQL_PASSWORD, MSSQL_USERNAME } from './mssql'; @@ -140,6 +140,7 @@ import { INCLUDE_VIEW_LINEAGE, INCLUDE_PROJECTIONS_LINEAGE, } from './vertica'; +import { CSV_ARRAY_DELIMITER, CSV_DELIMITER, CSV_FILE_URL, CSV_WRITE_SEMANTICS } from './csv'; export enum RecipeSections { Connection = 0, @@ -453,6 +454,11 @@ export const RECIPE_FIELDS: RecipeFields = { ], filterSectionTooltip: 'Include or exclude specific Schemas, Tables, Views and Projections from ingestion.', }, + [CSV]: { + fields: [CSV_FILE_URL], + filterFields: [], + advancedFields: [CSV_ARRAY_DELIMITER, CSV_DELIMITER, CSV_WRITE_SEMANTICS], + }, }; export const CONNECTORS_WITH_FORM = new Set(Object.keys(RECIPE_FIELDS)); diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts new file mode 100644 index 0000000000000..fba4f3b9d0164 --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts @@ -0,0 +1,60 @@ +import { RecipeField, FieldType } from './common'; + +const validateURL = (fieldName) => { + return { + validator(_, value) { + const URLPattern = new RegExp(/^(?:http(s)?:\/\/)?[\w.-]+(?:\.[\w.-]+)+[\w\-._~:/?#[\]@!$&'()*+,;=.]+$/); + const isURLValid = URLPattern.test(value); + if (!value || isURLValid) { + return Promise.resolve(); + } + return Promise.reject(new Error(`A valid ${fieldName} is required.`)); + }, + }; +}; + +export const CSV_FILE_URL: RecipeField = { + name: 'filename', + label: 'File URL', + tooltip: 'File URL of the CSV file to ingest.', + type: FieldType.TEXT, + fieldPath: 'source.config.filename', + placeholder: 'File URL', + required: true, + rules: [() => validateURL('File URL')], +}; + +export const CSV_ARRAY_DELIMITER: RecipeField = { + name: 'array_delimiter', + label: 'Array delimiter', + tooltip: 'Delimiter to use when parsing array fields (tags, terms and owners)', + type: FieldType.TEXT, + fieldPath: 'source.config.array_delimiter', + placeholder: 'Array delimiter', + rules: null, +}; + +export const CSV_DELIMITER: RecipeField = { + name: 'delimiter', + label: 'Delimiter', + tooltip: 'Delimiter to use when parsing CSV', + type: FieldType.TEXT, + fieldPath: 'source.config.delimiter', + placeholder: 'Delimiter', + rules: null, +}; + +export const CSV_WRITE_SEMANTICS: RecipeField = { + name: 'write_semantics', + label: 'Write Semantics', + tooltip: + 'Whether the new tags, terms and owners to be added will override the existing ones added only by this source or not. Value for this config can be "PATCH" or "OVERRIDE"', + type: FieldType.SELECT, + options: [ + { label: 'PATCH', value: 'PATCH' }, + { label: 'OVERRIDE', value: 'OVERRIDE' }, + ], + fieldPath: 'source.config.write_semantics', + placeholder: 'Write Semantics', + rules: null, +}; diff --git a/datahub-web-react/src/app/ingest/source/builder/constants.ts b/datahub-web-react/src/app/ingest/source/builder/constants.ts index fdb094d721304..08538729de40b 100644 --- a/datahub-web-react/src/app/ingest/source/builder/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/constants.ts @@ -30,6 +30,7 @@ import verticaLogo from '../../../../images/verticalogo.png'; import mlflowLogo from '../../../../images/mlflowlogo.png'; import dynamodbLogo from '../../../../images/dynamodblogo.png'; import fivetranLogo from '../../../../images/fivetranlogo.png'; +import csvLogo from '../../../../images/csv-logo.png'; export const ATHENA = 'athena'; export const ATHENA_URN = `urn:li:dataPlatform:${ATHENA}`; @@ -108,6 +109,8 @@ export const VERTICA = 'vertica'; export const VERTICA_URN = `urn:li:dataPlatform:${VERTICA}`; export const FIVETRAN = 'fivetran'; export const FIVETRAN_URN = `urn:li:dataPlatform:${FIVETRAN}`; +export const CSV = 'csv-enricher'; +export const CSV_URN = `urn:li:dataPlatform:${CSV}`; export const PLATFORM_URN_TO_LOGO = { [ATHENA_URN]: athenaLogo, @@ -142,6 +145,7 @@ export const PLATFORM_URN_TO_LOGO = { [UNITY_CATALOG_URN]: databricksLogo, [VERTICA_URN]: verticaLogo, [FIVETRAN_URN]: fivetranLogo, + [CSV_URN]: csvLogo, }; export const SOURCE_TO_PLATFORM_URN = { diff --git a/datahub-web-react/src/app/ingest/source/builder/sources.json b/datahub-web-react/src/app/ingest/source/builder/sources.json index 9619abebbd54e..2dc2598c1a0ab 100644 --- a/datahub-web-react/src/app/ingest/source/builder/sources.json +++ b/datahub-web-react/src/app/ingest/source/builder/sources.json @@ -223,6 +223,13 @@ "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/fivetran/", "recipe": "source:\n type: fivetran\n config:\n # Fivetran log connector destination server configurations\n fivetran_log_config:\n destination_platform: snowflake\n destination_config:\n # Coordinates\n account_id: snowflake_account_id\n warehouse: warehouse_name\n database: snowflake_db\n log_schema: fivetran_log_schema\n\n # Credentials\n username: ${SNOWFLAKE_USER}\n password: ${SNOWFLAKE_PASS}\n role: snowflake_role\n\n # Optional - filter for certain connector names instead of ingesting everything.\n # connector_patterns:\n # allow:\n # - connector_name\n\n # Optional -- This mapping is optional and only required to configure platform-instance for source\n # A mapping of Fivetran connector id to data platform instance\n # sources_to_platform_instance:\n # calendar_elected:\n # platform_instance: cloud_postgres_instance\n # env: DEV\n\n # Optional -- This mapping is optional and only required to configure platform-instance for destination.\n # A mapping of Fivetran destination id to data platform instance\n # destination_to_platform_instance:\n # calendar_elected:\n # platform_instance: cloud_postgres_instance\n # env: DEV" }, + { + "urn": "urn:li:dataPlatform:csv-enricher", + "name": "csv-enricher", + "displayName": "CSV", + "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/csv'", + "recipe": "source: \n type: csv-enricher \n config: \n # URL of your csv file to ingest \n filename: \n array_delimiter: '|' \n delimiter: ',' \n write_semantics: PATCH" + }, { "urn": "urn:li:dataPlatform:custom", "name": "custom", diff --git a/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts b/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts new file mode 100644 index 0000000000000..e1dc22c086fb4 --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts @@ -0,0 +1,22 @@ +import { SourceConfig } from '../types'; +import csvLogo from '../../../../../images/csv-logo.png'; + +const placeholderRecipe = `\ +source: + type: csv-enricher + config: + filename: # URL of your csv file to ingest, e.g. https://docs.google.com/spreadsheets/d/DOCID/export?format=csv + array_delimiter: | + delimiter: , + write_semantics: PATCH +`; + +const csvConfig: SourceConfig = { + type: 'csv-enricher', + placeholderRecipe, + displayName: 'CSV', + docsUrl: 'https://datahubproject.io/docs/generated/ingestion/sources/csv', + logoUrl: csvLogo, +}; + +export default csvConfig; diff --git a/datahub-web-react/src/app/ingest/source/conf/sources.tsx b/datahub-web-react/src/app/ingest/source/conf/sources.tsx index a3cdb0a8f5843..4dbeeb5c975e9 100644 --- a/datahub-web-react/src/app/ingest/source/conf/sources.tsx +++ b/datahub-web-react/src/app/ingest/source/conf/sources.tsx @@ -16,6 +16,7 @@ import { SourceConfig } from './types'; import hiveConfig from './hive/hive'; import oracleConfig from './oracle/oracle'; import tableauConfig from './tableau/tableau'; +import csvConfig from './csv/csv'; const baseUrl = window.location.origin; @@ -46,6 +47,7 @@ export const SOURCE_TEMPLATE_CONFIGS: Array<SourceConfig> = [ glueConfig, oracleConfig, hiveConfig, + csvConfig, { type: 'custom', placeholderRecipe: DEFAULT_PLACEHOLDER_RECIPE, diff --git a/datahub-web-react/src/images/csv-logo.png b/datahub-web-react/src/images/csv-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..b5fdc189cf58ec8c62a14ff58fcc271c36bb1a4f GIT binary patch literal 12029 zcmdUVbyQUGx9=H-loXVdlt!eKln@XYT0kiYNf}@O0f!DnLPcPZZfPkMkeCq=L_$L8 z8XBcL2bg(>-@WhMx9(f-ci%sE-TT&pb>PH4-?R5;$9L~fjG?|J^(EFz002<KwA76N zfC&7P2p}f~5Bq*YIPgH>sb%gB05okEzmN`(5(n@ki_bkXA7c+EALQewjsOygyy=2) z^|pWP>3GxQsdL8W9aaF~0$}QQP5d%9uz{&r4}y;fM-$ai5%F5jjoP8vbhU!>d2kZ$ zxdIB5gF&W*6a(F*=mSGq<w>Q9!<DlM3-{Cg;vqE>w)<L6^KZIzqv^SE@gK$>T3-D% z!tlyZg@i90NbAY|8WOs$qW*MD%DXCfYy9Zc*Q%kOtGanDkXq^#zwTQVC>qZTL;WZI zto%})roLjtEAH3VikXZrNd9${#!hTE?S3T|l50Lv6~tXQuok`JHcO%6JS_?{dYuY{ zXp@gJ)MpN7G$kX|?%4ub8{L$DB%S|8g;DODio&Ak!)xDgo;ck+JKn@(xHwzK8-1`j ztAi9TVqfw0O4``YZC0Cq@x8O9G4ViXi*#ZlGm8+x6beg!YupgHdAf&qpCt#4?~d%= zE7`eCWsbUKbsJurXjXlajA3)pw9Yh&nX@~?YY*tY6HL0<jgEP?L?WET+~0#Hfx)59 zOIQ`OBv7#T9m5u1AnoZkO<f}bRgvG@L#Uu8!nO)ym;qjxFc&n87e?X$#PgPhQ2{94 ztML#OQ~{ABpoKC~S;DaWf?%_dP}IAGd;T)Hnvy`xho;~8yNJ6eNeQzU|GHewUB-VR zfE0kCE_DK2Fe3=dfB3fl);F8<p&_*;-yR6iyE0vtWoS+0=14&5pjfYNzVKt)mV?*U zmX8PNOtY6BFa!IZqc^kK-ZrG%-JoxMA*C)clg+Dl{|=EXL^j-LrI)JmjGVG#rtfpb zf<LloWKI;=Fm67!A$#;nnav0-c-IfA@CV^ofBF%Vaer-F&-!#dHIT*AV%l9x+m2#5 z=4c5UVG#vU@qJOIPkgfrO$`%zsWWcquRm>7a+?{b8C`5Y-s7r8)4+G$V=@d~x2bIc zUrOz6su)Q{RD*|#Z$K^?dj@FK^wt?)2&i>o)||Dgw!kl?y1&@4f}(AtRTxC&)a@r4 z0@L#fo~FJKYhqefLBhX>P%|=0G{l)r>;2<ooztFZgy+4Br3r<kHpR|#qbX}ip<#z| zyWw>ZnMG`50WnWb@zlv%%%tI(IPfIL(XvA*#Rj&h=BJ<nXG)|~R&n-cQM?|wi_oZO z8w3tK4`iZcKIQEu;NCu-yD{6<a+ax(I8<-d;;#PXvbJm1;V2&b#?Ug&W(thxmNYNS z#gFpe0Q=t>LjM;h`)6zJ2Az)acKxcl-3AYlv#Ouu_v^a|m{ERfFr5cfNweZh^6d<d zF7)+U4rmz1=0u_Qn8cA}iIu9KXQbet$;1MoSDJtTAYdr`HSJpx=i|BK$$!L~z9D%G zNpA2e6Jae(XUAC$&csLVVcRiPL51W@MoPiio4Q<c*uBM|`aeN<!EWTq?=*o>0u5M# z@Ene5;q$NK7Ao`<ViWG~`d5ReW0*rZ3Z9ML6}5hcWPnpQ&AC>2YD}|h!6Q8%PP1R2 zWk1&*j6Fui$LO0aU`V*P;Qf_|3YhqN2nML3EBH#v3w*fy%W#znRrZ742I6QH^s5$! z3wXilwK;#HDgJSC#I5NiU1jG5^4n<#>vk;tk$EE*_|;U}mY$6SgZDd?#l3e5GYLpG zI6En@QSG)SyR<736GZMornO;5KTo!NLoDCf!C<<##Mh5Cm!lF-y2CGRqGU89aU+xD z4ohl|w=;^hj<P<sQ(D!fVh}&U34+?=U|A--ZV4O7D@5%9xd@Q|%y$1rH2weOW<$9x z(xLaHrde9$R_e35<OY#`>R`fyo7B_JySMx;86Nozy1hTLV8kBy)n<kEp;7o_O<Jo% zPTNP<9u$bWb#I4!mUxy}yHG}|e)sx~YoQ81zr6j6O|yuq18NM;=R?^&h45}#8}Etd zsMk}`NpqWR|8Rd9|BY(<O~)3O3wXuwnFvg$fKUPrI80qBf4QC)boat5HyzcM<o?y* z)`?6D{V#?#<xARM?;=zyzR3h_T<z;bLt(viecfp21zI?Kz|5xwzZ5!30w%oDCqb2_ z7tw7h``-fKMz!b`Devv}1JK(*92r_wd|^!Oxj??2>WaO;%mug%vFW{abgftaf&Amg zIO!0Pt(!xv7x?WYn<kx#N3x;lql2=a{#DpTs8zH9xxvZq?8T2l_!4O6Hh@wqUDY!n zYsqo{&3YG_rIyA3I@C5i!!`XqvRiXiA@XH)m;iLodsEkF;Tfdv^0<azh3|yTDX3t4 z1;;u6_7@P;yoF$@`2Q)NUh<duWx^)hVZ5Lk4F)eBae=ufubqx@XsH!d)b42aR@4Gr zC{dyNcTc`^w&oPi@?@hofal4Yy!rOe=BRD{+)Shbsv!9mBq+>&6K|aCR`my~3*X=b zz`!L+XPtl)K^2&DeNO6_`HI1+shn0pN<w#!=EY!eU0WgKjUbO{_kxjWDc~RwKhx?G z3L6;G6_-0fXCix#_7J>DhuM(mGl=yWAIx|Z*09iu$<Qk8g{s`SzQxE3t2SBe!B}dQ zeuV;aV;F^a-d+p$U}~2wfG#ZuNeRAJc(>$Vu7l;zgxEi;qW%m0{=$_Dgg!u&NkS;@ z0A4Z??8H1k!b=sFc~$Z_GL8(JGSL;9S!cQ*dC!!&r^wd1=_!bL_PF}OJ&Ctxk30~S z>^j=5K<>js?^zw<CaO8~cTW+@Edn>_&oYl5*PnWRs1;zCAIv-C;Sr16wwsmcYD7>g zbNyfwdP(4VD)J6tAN0-r(eVz}xUvSZlE0jK<P+V`zk4kjOiIwWK8!0&k6{<VReF$g zO5OwJn%m{t4s3i%_bT@9;bcO@#<jo<lmFvVv<UYd<#E|w`UcK<=E4EN2Z%+C9+H>c z!#4N(PTid-LrdM*C1OS>dyED;k?v+;{(_F~lud7eUFxY{>RBN9Ps!4hAEf*OWP*Bl z$wWAN6uVw55>nM$V0~GAvp94=8Z)WY4u0mg2i?g1P3Ao3#lE7rrjX%5@*SgSCW%v} zVZ;l+t4n<n0UFPiM%q1(7E1J=sbPry5Jk22K~m@ToIN%CEuKdSCS}T`j1Ja;e?smx zobd)|aCFwJQosH=^=Vmi$YX2SkP4_yh2$pPBhTU2ZAzK$wdq5%<c6SaKTqf^_J<WO zVf(m)l9>r8{HLl#Y-gJ~(TlQVdUz;NX7c@BYn-NxgV9<Y^}`X!ISk&fPVTyb_3=VY zyh!qZ_Ujs<u{Hj{wh(aJ#+IRWVsCk}2IF`W4=7%z{iT4D=w5PWSg;I7?;=(bT0`mJ z0)sM*HA~?2a4o_Q*A+G^-B|B%OQ{n7A3bae@e=}r!`b1?%1H&AxCaK%w+2R*)o&px zz57sYFP5+QW&hZNWXR=aCn)a6S!px`UEBVRnH(>>bjH_lhI^NMNsp}BX@3yh{9Lm; zW?MDz&>byn*nNJ>fae`8nx<X>wy|=0`-tQGgf~eiF}UWmr;61HFqG0I+A4Y!LQ~f9 z1$6W_P}u`AIC;C3YpZenOG^*obB_D5_^Uc!oU6$z&B09-0w;4$QdoZ_brbjYWzV>m zezLn_fwKdxcyhtvzWbj65q^}(+q?q=Qc#8zC_JRJlYQOvTe&~zu5tCg2PT;!o6@cS z@qWhRU561j9Gu49+4*4X;N4F#q9&=YL=<9=J`GvaLA1SiVc5I}Vno?V1z()_pkV_P zQ(CD(kn;>gCpw8GaAUbj_RnTDS4e$KKAwpoQ-9`m^OO11mHjtU`boy&8AUs*-pA^E zy$q~zgyFxX9yf+BJu_U@l{)7XINz^|k^lr_nXCR}=coJ%?Csx0@JZ=>F<oH`ye@HO z&Y*#QNyeYDJ@MP-_T%zDa-sVvhKh-FP0H=292#<!+5>~AjR!m#sG;l+bUzNKlRHN| zkz^6WX`{pCpf)dYE(Ho$#jh(lO@n$8jX?5Vd(6aV-F9zuD#P>5*}i+jYxuzfUEdum z@?aNMBJEGN5zvSUPgEmAtyFjiUulK;PawD4N@Huekcz6uVHml#Hn|^1ryO78hlw7} zCnOd<do8<_zGe98{NoW-)Q`+=T%~}(N^$<)>qYc01|xmK!hSPH7m$17oH97yBY4?1 z`x&z5ZJpZLqoUjPnDnMkw{nOqdsjbIM2IJK+<<Z)@~;#+JD{}E+mW%8nGU$npACED zc_@FY-T+d-(NO`!OC%=a$p^m4vUdvo_PDq@#Yc-F@(MF)u!`|{{Hj72r@`D+SEE{0 zsuR*GRu^ePL!CW|xH8PW7Y+)Td&uwSJ2}0loSL1GqZVW(-QKGFOKNw}4c<%7WaTXu z9xjBLCmyM~BLovW&uG$pQzuXw{6s+xe)L7XcJL+tg^=DMgkZ%J^zf?jZy9OQM^bIV z%}!58l^uTfmG9M~tL(H-p}Z3eTyzw2Skma7bRl+cY{}eaK6hr4zf~;>G|Xw0Q0TEx zw~OFGayOBTDV~o;-R*0}H?6a#-viSd{qbpJv(m4hEh_0pl@|ILL@(7d2)eo$(q&!H z0ka`0QgnnwFcm8MBY)EebRb^HzVL>&c1W%{euzZVmMIHl=f|8XQA`cankt7ohuFhV zN~;Pt`_6#;(($fV#ZiIMM^D#m*v-JKaLaWgkCBkO5;#ap^-dO_{FlzPpEtrjT$8~r z#$t@R7R^0%Pw<>6n5`K*)CBPBY~@|Irn;uoZaS^L^`olz$qF|V%9nj|kQg&}g<Jq4 z0K}0n8dmf{Y(MS-A1UYKxTdJ)*wicYLI>kehoqkPgCmKd+VHX2fLFNeLl=%T${<uH z8Z|WCED9?kR9S~yOGt6BHgyN<CYM(!G8^j6Y>X)l3!R0A@|*~HsF{g!zuVSe<Yn@N z;Hb=D+mBBLw&heea#pZs?M3_k(?tr0*<ztxS8URj$30e$!peq~BQpYG3I#ak%V?_z z6q-=6=j7o(nLh-jS-6DZW5<<tk0;@erSj%TlFFDjL+nv|e0D2|hOX8?bj(9Siq5&Z z0e&#tpHjt`sUfFpATV8A-!^zDXwz)pUC;`OVKp0?EsG-s-ykMlzw1g>;pg9|5vmpw zY6s7lR3RrM`*S9=*p6D3K5V8ulkhBgO$aCE5nzB%GM7_5-58K#d3NHNc!U@EwJb2F zc|S{52F4l8=0TcXdwEcCU2eCLH%altAPU^l_yncyNf%;oZ+!61={8&<&>s_T@26d* z>=#J>B4y^U+=eyI%Fan2&Yy`Wwf<FR(}#(pw&HwwtYrH*@<HIZ1TvM4u{cexT<m#f z>(R|QlAE_&TxXe;dtY+q;OBos&6{`ITcjkb%D&hEzf3mXS5F<k+E`(>Xh?l}q8#4N z@=z4wk!Nv!!!<>$^W!F7*YVy_Uk|M=YdHqREY@A~N9kvS{cldb57gg1bmv~%q^(D^ zjQ!38;#WVl%~liSZ$ZO~;L+I$ZqJ$~Jq{|pqvzyjOVqDtc4OYInb+;#k6Bf&&$qO9 z=8o82BW5p<yX}z!i9`=xyTNes=SmY!oF-oZ#(8kBU{LOhpYUekRNDDI0ZgZK)`_4y zv4g_-her6zzBa9y*qvSRH2W+8@Q4-OI=bGqS~b4Up!&jiT*8SHAkuWU-kSr_S(Nrz zu)TCT(BK#*bK)LmAYlIe`1MO?Nb~;`H_sR#L(oKZw_)QZ17~~@C|umoPh9s&*N0Ko zsWHCK*Y`fzpFRF_JJgb#iP!wGwFn_d+lJGh#Wd@5sOJ<AOgMB4GbtkoWZ_NisQ~z+ zH!Di#1}4fH)%_y0XV*=$g4NR}*kx{wsXZTm`&J~p=2Yst-!-+e5ShKktU??8ovYzb z8^rdMfQNBd?Zq+}d~|fctrxTEc@(7Ta_^7ta55;=q`*X{+Sa9H?HZ9z;*e5($1i_% z4YHH7<bg#7*0b()Cf-ALOd9jd4--z!%nycv=6`MzP9sjkyM&sA=LP}pVfV0GgM{Ij zB)Y)}iwh;P`g;E<8_vUH0^Y0~U6I{B_UQMqIhotEXIXDdwP^c0Rcziqt$cWw=c+B- zc-8o_oZMb|fBnf7+YgiU${7bDHmUw%z|z~xDB{voC!F$9ega%!lek2~QQ3+@+{B#d zjtp$$$DM=)M2N)p)?mN^?m>T8modeh$*jqT;bPrStqfyNH}CXepAu?*D5v6o_fjD} ziY<cKC7N8eM!L)``aF*qSe07+z*<kLDS+3j@AA8aJNyeo+Bb4XBxvE9bo0jCP4)p5 z5h1h9MMo1amRTCI^14&1sTJmw7)`1P!K{z)X0ts#YaiTMfT5Cp{tu^7F{{#*S8VQG zhj7ua9(`SK`93lII%I>{=?i1!cSZINsIfo(tC!++h49eX$o8#;w=YG==WY+lMzvt5 zK8U@y_!{%AyO`o}CNmRVs^%L_y0(PC4%tlHX6)W<k({Sl@Z=v>y4Gp;OwJ&?r=&9- zE$~P*0n9q3(@L^6%t{mzZ2+BeBJXzl0Q~vZ3OrN!2K@WYO-1=r?CO_Hg}6Yo5h8RF zQc8TgnT6T!xcamW%Ra=PqZw-)KFIv4SY<;(<$OLVYewJ!;AeRLt0=Zb<Ki)^te?@J z*km)(HW-he%+`LNkxdB~Z_~Xc+F@9cP2=_xv&)<F{>72RfhEH!lWKbHEq#LPD-cjB z^=Ud)shya0#ieRV9Iw1_$6imy=x0PbL6eNKB*|Joy4a{9o2Xz?eEBYSCdrxit`~<1 zVgE#E)|d*36qndhRh4C*XQ9(~S}kVz^6GX0IdGts-2^yz5_{FhoiL2{Vfs880#2kT z&a6QUk5B>fHOzz89<a@`_y?IxRWXa@nly$*(Oh+d+Z1~*k$5MrQ-lg02<=s^pYK^$ zVr*}EKWn0r&1$jZ5hIn)m>%6X0Rh9wpJpu(L_gmHRf!(uU#SY&&AG)#3f$(p4{$jU zn8(+Bc)hn)h)DMk4j$wu&4Gj=kHIO+kh|yVEq?qFDbN*t;><LXDPwCIx6Y>h?;3)k z@o7!YBlH{RoNcST;4*`#45J>dc7_=2<4S`BoUD;wUhSYTmSa`kbyEg2%PGs8=L)zV zeeWNI+(_WC!?4ziY#nmsy?bI1)+CC&dv^8)uD<EK+q7OxAQNeh6Ck*Ny#o1nTAC}C zX{Olfzp_sKLr<k{I9Zt<_76K(I6DZO=U?BC%Q9#<lbv0n!*d*aO;<qZ{7KrdF@373 zb44oLZrH#@uP=73vGGF}N_>Y#t(!4whuEFC7amVzEK^b02{4>Sv-(Vt@)Ei~@Y42E z)q0|OfsRV~wDLpV(mNcQ;X5l#C61wATEHM0p)SDlVS@$m5ggyblhMt4!B+ba#o|M^ z72>?unM;1l;P?V$th%4CO3+!;28pFu8wK+;dAgBBtQDWEk_jT6l+Ou+_o}`p5^m{{ z0_@!eD3jQ{1~NhqqeO`3)>}mNXoKZuvB%(2(2;-t^J924VSJIsx;rw-z`#gOe=mBr z<sn$$J5K2}-li2MhKAAHjQH`zKkF%W(Rrl*VO;n?2lHm1(_q>iYO0}Sw#iPUK>$Vg zD5M!N&>3#=UeD{9fi+b3JlRb5;}KzEi8A^|Kc?~$SU$XXEiwF9G7e@^M;+m)YvMc_ z5cYxv{2m3|eQ*y1SxdhKPX|1I-#$`JT{Xc&?--u?{P}uq`u8von~O`bB(OZoLm~YD zqWaw4a}qYoUT%{{kfShPWobxk`n?yuL#=YCprbUkQ@*BMEj9hl0~)cm`i<Kg^d$|@ zCiMfiiWJ!HFMQ<norIBd+I?K3_ODB5z<&zmAL;`_(tI+FbiKoaOb(<}7LNARp7(}U zZpxF&D(*E3UjbE6$vEg{z(xg7lhY}8&UPc|Egkz8-|QpsJhc<_0|@?_bT&rk=1G@e zkr`3ykORbKl{Bt4b}jm=gRgADC9qt7A(AxE1JqL(<`YFUgN?F8u%VlVluEOxK4}(u zX$mMX#BQ5yn)x`Px)XjNpu_{Z1oTOqBv@#CD@=!k!QVG-2o31f22j38GW~<qE_&#J zW0klefbCaHr(AlE%_&TnMcHi;sFR!-{O<noeIN^3{*h1tjs^CWyqsN;Vyt_%+nuUX zo_otla<F-Yvc;xYh(nkP+24;@2ge;PW86*f%zA~>6+&lvkL!o{3zab&D>~H(+{Y4; zJF9N2Q9V@Z#y!~hRQR;v3^7YN&h%DO{JvE}uvM;4XECJGzIVi}?%rEsp5U|*lgA)> z$>+A!Um9143ck{xwNu>)WiEdsZYCG}Q0&S&OB!$TO<p~5U2itqzgs~x$HNeUQpCCp zf#pLc)g3-i=e(fK`SbvCGXI4k6Ca?8RH@%^2^4WAHiA@?+bSw53|*iN6OpQ7lYLz# zJie6@x^k7Fr3iYqA<ND_>Y!Bgn(&VM0=h#3#9`?Wu5f3fE$;7}k|rngFRTdt<(49J z@GU0|fz1umw5M9$okp@P?qMy`0UDo!8^P|0IFXCKya_k>6`m5j6Q5-Yg7D~NqJ%_l zJZDL8;<xS%s)v=f!G*7VbvTod-ekk*la7{QP$MpkeZD>jvXGr<LblxjLETr#(*!hz z*@ZD?VX)QY`3=}AJT#m5k*e+W!Um*-*?1qWvf0*ZwCfSRabz-%JGlrsKmF~iY}fNH zFlg{J2UmsC2HG7C60*;V-#9kwBv0swf`JqtW{z=kXIvxJzy5uY{Os!97AEIgk_~?{ zB4V5GnIg{SXnL}*e<8FsOMwwfVV=XxavC1)=1NpJArMn2qqM&jzSiLBrdecPZiy4* zsp_sNy425dvB((Ms=h9s3x-W2cZ=Mnv4zHOELc_ui_^BIMI5I@Oh_|Mo#;dg+fzNj z3)X2;rMOiM96v<Iiu}%5gvD&#A!y7B(4Gg1B6@<}3`{*Ju!t{f1wGf0<EJO`@br){ z`%;+)%lOflX<>N(ZDoPSpz?ss#Lrf2v)YX;HFDQ*g*`-73?sey-9<^&4K&rmrk}P; z=Wb^H(%{@~@VnCI83wK&3uZG1sGc<%s-b9F?A{ENAw!B2Dnqz`coBN(6KmH^QM6P; zVT5kXW=#?Y{&*b3_eED>T93*2CZ}4oPV^bW_CgTOK9kvl3{TC>7ePxuZ$r0`!R<hB z@y~GvJn|ST?K`G&J~gdXu{^?$MJx;_wTi-4pYb-vS$`NF6;)R<AUJRK)f|EaP*{7~ z$U9H!FxLASL6GeO;-qF(*K00~iB@qZ0}#R&X9xN*8aIL6ha<?Z(MIb;!b6;2Gh)1E zX~>nYf6rl9Fo-G4VK~HB=N}qtFWLN(dOB+!-5JIFVG78UZnLIY5>!3&UiKqf6X7iG z&e@yzeJjpeX}g5r)Ym6$-mKYv%z85vJ9)%ZehxARLN2%fQZ|1-2JD%R{W?&K9<RdZ zlpoGV-R1-1NM4e*81aacwaoYa%WW6R8FF_$eohztaiy%qG#n{eze?Qd@Y140cPA_v z*}9mtZ?)H{*;yn^XU?g7kGvGx^5PG@pJG<I!+H1B{kUMhy~`Jk9#WARvg{<UrTjEP zS#_AJR0V5vxwB%{5|~}Fr#3OE!oE)=n#5t)w|!y*tgknULM4_bk3|DI@R9i6O!V`{ z6qu}*Sh8Tf*_jo#U=MyKLNLvp7YTg}<a6?o4|xNOB60*uS>Y}>OA^u3I{lqhQWXt( zyVqv@L8Tu`0wtFPB&V{@FD-)Yt`=v>Dut^Fr$WyAr4`6^=>W|c&8=mj77{WRsY%W6 zKofGHcd@$UDYwtrgZC3It&bh<p%{qz!99MD>qyX{%?dLx6y2b!U({gAIe!(lD*fTg zh$Fs~Iv2SwJgf5hTXyz|-@&~k{~jWhH)ZvYy<SbmxDct>awH}u@zz$~5;x@0Fu)s2 zf|br{Chr5&xO0*185o??g7-TuZ)woM_+A(^xI0KYo&9(ox9%$K06}t`AFu!X;Zqj{ zkNHB{H?Sk~HIwdbFPgd*Y+AD#mlaZgEx<)F)R0nvpCUL`O)K#Edsbb#)mS&vyws_X zlYWv)*&*Zgalyqn{aTfi%`G1Xo4s!Xncu)JC~CMra*&|i4dnia@Vo)eqrom@s1L(- zJ8vzmTV~gUCH;s^$gk##_C!feukjJDS%$Px#TsnTUy^aLF-jP1(@L&<gZ$(3v{H}8 z{Z(*894G}mxx_D~`>@d>Sd`c78j?!_ld@NNqK}#Yi{S5m(JYDEpLy+#gK6W>(<5J? zL$+8W+=h*Ef=pep24f1eHl#PY!7q~mw}xDqIg2hp*k0Wk9sDme5w@y4rFSD>R<Y}Y zX_>QE=L@&_F4&LF$)`*bO;WH8s8D+xZz-Ff532E$)bH|(vd`1bT-U2@W1l-Ig2Ltn z`2_~`bPnC8Ro|o#NsKdll1Y@0DOhD|ykWYAd)>Nc2JU8IKM2z>0@02)46Zyt2R1RC ztPdjF9m!W{ufO_q-~_3B)DWNg<afnBnBmjYJJ%9Nwm2<y7Vp>^JYQ7~A6VJ<z}<g7 z;k(vvu1+q(dfctA?LC0$0)tF6gJO#eYA96e%;3iY8|odt{XGL0M?%4&$E8nVT@>L` z62<%Q+%z(=(p*sT`(w$GDtW0Dy7T<CQqGX4uFvi*)EWjWfvub@tKM_+C@b<F_jOmb zM(5wB%HA?w{VNo-mvBT&eNs}&NBFym5~s$ehg&XgW;>^PtYvrg31xDi)JDTV<gS5U za6SR-_R>q7QGawLO$yO}<57fMe*pFpMam8j%kXXlOEx^NbHKq>IG2M6CoL-Ad3jv) z<2ADB-mjWDuzsz^EHa%!e(QG%7Jr{BOa<TA-UY2L!3^AD7jycE3K6KyPOt*?Kv^<! zG!_>`{luZxzXD%77blpKqd^!WTZ=m8{W9C9WGj3+-|uY4bFT04HJ@`Kyvv>&lduAU z2s$f9QS1Cm2KP5Dr|2l7(n>yxnHJCNm~6MMUoA0sOV@8VC%w3KGuY8Rcu9$9Zx@lT z7o71DZJ^zliU=j`52ni7<D-ic*i~0eHF)F5(_HjjEP5wOH)5mfZK-1XMN`WDw@ljZ zPRq{PeyrZom2YY2)v4lv@srb`c^&dQFL4V_ufzQ5kgD~?c!7{Ox~6Z7i!#eIXT3!c zf?)?q2%<JUmNo9$rG0mOUvz=ji=>S3zkjfihp(m#2v2aef^s{uwtA!0V{%8K(9n^g z`)h;8Iow-n5h0WPOjh0R#PGn}3DraWJdYax)8m}xrKlF#@NbbegFUd_7S{|}3oEK~ zl_rR-+LHee=D5s16F#ti4<F%c)trK4Yhc?{LUp!K2^Yxgc^o!?>n5stf~)7gVQ^^X zlz->MK+g&IrTY$a1#lY{TM^)4IvmqX!XfqIk`8k@MzZXgQ|;4}sxD`zUC-9c%4<I= z*q8+&UmIepR3(8YMIkA23S#}}n5l9bU;YT?p4@?%+8)&!jN5Vm#NV1&?QKp=j@O<` z3xhYC%}WQskg?C%)6u&w$DPEq@)3EWYnPM6Modq0S;Lx;g<i!WauJt(C0V@Y&bLoL zVyQN&32-H7m?Zslui>cc?&f<JQc~?6YE$O`>%i#Hhhw*|swIu8X`#Oj<@wSnq=%~7 zo-<m6)Nz`CMUf+);cHI&w(jaZM1fGY|KJS0|AQY7e)@=a3JAy0eshsvYqVBb4EC?P zpVB6<({O~jWHz?%^}r8&Qb0V;qCWE5RnSD*M&KJ5P7--*?eDzRS9*|o0D$Y(5~Uej zKJ4mx`L`O;z9sG3xx5;>1nxr0d&37bMob&QIb+PX%GWVLsWll-M4j+rqmonD1hD;X z<^S!`A912Pidzpkit>jrl)Iis_vun@yWd%nVFuo*3WRfmxx8#i-y#DPAftxKbXw14 zY4z9Dee^(?v5xv5&u?CH2|vZhU2&PF2p{f3%zi9-!*|XI4KrHd`i_(`VJWLh&S^)O zJ#q?OdO=5sal-dS{OVxWj1`|!Y<97iZUZM)J>w_i#IAy$cdtm7%NEM1-76*}v4IjS z`=$m=%|{OgFl=ebU^57W{Q>cGssYrCZ+O)-J;X*n$yX#>rxtg0wgzJ<S27kc38;zW z5Zw}bq7?^PH$Z|zQ)s%=a5T!f*G+|2+~frMVc0T^PY$`~rp<2ixMcT-Ir^q8*wg{r zq!fjrWN-V?S)@KvQ(4d8O%GS|v9u1xI#NK1x%T(ts2&PJo8hRxXfU_s=z&pWW0Sgr zX#5s_n>I$1?u3hBxT>dd{9WEvvAHxEEcs4w^|DW2eu;j8RVGs=-n3~#AvoZg=h~&Y z(;@g)Vf7w7K;v)`O9QMmhQ!f_Ui$QU1dolTQ__oVw(l)s+e7>bllJ)<#=4bGGSd$| zxe}c3lI_G|aS&v;;R=I`D7c4Q+jcuEbjyU;N*_ybTi>m)kH^O<Kfx`xVWXt(TiuP3 zxh&VPve0ibs{!A!iC)Mst3JraOg`S8>QMPB9R0Nh7mQb%>?Jl>tj%xk#_WQ%XNx7+ zgLPv8oFt$vZuH@yQR3=UIpF5w2o-y4D)<V+a655h*i{vU^3OARwv-W~x=*35(&q(2 z`5HuP$(#la3np+(ddte9ce)MFW6C^SKtR=rIseklP%8n-5c6=)tKtHQ>DnED_-pVT z)e6x&l<!)ZN?%$ouh%Dm(-8fJ?g!4CcRMf)<b@0|I}OD8B>DyCqs>7nbWP+tPSG>( z^Y@$rG&VEuw~PMmvv*z*ir6(4)N2Z3w!Vb9A~-VAE=&V$x#0vnkwJ%Ba!*4Ft>`$p zA_8M-y;NOjB%aZy8aQxo`^|d!dExYqh`?zy<Sc@2^ZE}fV*cnUgJ?+W+)%24kPrQW zDtXl1AbQUmo<5aWo!*f@klfg!RTIEFoPjITu-P8?P9E`<u$V_EKeqtxxNhZWhV;)* z_e1Ao@O>U7LtiF>Sxr#98z4LGe{mT93#ae@JC*X^FC+Jm1_%$_1BsNkbV#24!K!L+ zumjP`eIR#bpnP~_G+vq(D0KHna>3FI?dp}b7>(c{_n<AQMIB^Ap`5ZP;KK%sSoMpi zz)3mnvC6-kKJuQvJp%^ecM}Q#aLxNDhkiUAWTq6DFGzv^fzw}}+Cy3Z95MQzVSjSt zuTRs&^RkZ-Z_|QTwf~i1!M#PAzPh^9aeuQuzB8@mQ%hz<zG(W~+E!lyC9K;<q?z+A zv<^Bc*e+tO_{_ESlZEytA{8m>hx2c&XJl3hY_N@QM{UVN_&SjBIdJ_yW|{tb)%;UR z>VLnRlR#1%nYh`B@3g2EY9a-a>nqv~GAE5r|6iD~@qLy5py$p8y6}*O`GjPGtfV1Q zL_a$GnIoXZ_gNI=qPUE_ew2cw+~@}4wFN6k;2;GFRz1f1>6zoS-UX#s_+oUVbYbRO zf$cxdFX+MOC#Ixf?-(~jD7V`|=1_t8vW>Yc{t%p=I)mpyVv_S&XLV^mF-XEh(NvH= z>TAVT*^ZCwT(Fcu^Ix)mGQ1a4s}l~gk|0@fNM2Zl9nrBw!Wy@=K4DB|7g0h^1^7o3 yllGgZg0CH-$0=36P`O}C{bv%iVW@pAf-+YEBx&bEe-cPt2VnQ~)hktPpZ^a=hZQ0K literal 0 HcmV?d00001 diff --git a/metadata-service/war/src/main/resources/boot/data_platforms.json b/metadata-service/war/src/main/resources/boot/data_platforms.json index 3c70eda8561b8..0574f3fda4017 100644 --- a/metadata-service/war/src/main/resources/boot/data_platforms.json +++ b/metadata-service/war/src/main/resources/boot/data_platforms.json @@ -574,5 +574,15 @@ "type": "OTHERS", "logoUrl": "/assets/platforms/fivetranlogo.png" } + }, + { + "urn": "urn:li:dataPlatform:csv", + "aspect": { + "datasetNameDelimiter": ".", + "name": "csv", + "displayName": "CSV", + "type": "OTHERS", + "logoUrl": "/assets/platforms/csv-logo.png" + } } ] From 6a1801089116e04333ab20c80183ff73c0b2374c Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Mon, 4 Dec 2023 23:46:42 +0530 Subject: [PATCH 192/792] fix(vulns): node-fetch forwards secure headers to untrusted sites (#9375) unrelated smoke test failing --- datahub-web-react/package.json | 2 +- datahub-web-react/yarn.lock | 1986 ++++++++++++++------------------ 2 files changed, 893 insertions(+), 1095 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 22c88f9647dc2..c26338ea285fb 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -124,7 +124,7 @@ }, "devDependencies": { "@babel/plugin-proposal-private-property-in-object": "^7.21.11", - "@graphql-codegen/cli": "1.20.0", + "@graphql-codegen/cli": "^5.0.0", "@graphql-codegen/near-operation-file-preset": "^1.17.13", "@graphql-codegen/typescript-operations": "1.17.13", "@graphql-codegen/typescript-react-apollo": "2.2.1", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index d33299a79b13a..41b542da97550 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -184,6 +184,13 @@ signedsource "^1.0.0" yargs "^15.3.1" +"@ardatan/sync-fetch@^0.0.1": + version "0.0.1" + resolved "https://registry.yarnpkg.com/@ardatan/sync-fetch/-/sync-fetch-0.0.1.tgz#3385d3feedceb60a896518a1db857ec1e945348f" + integrity sha512-xhlTqH0m31mnsG0tIP4ETgfSB6gXDaYYsUWTrlUV93fFQPI9dd8hE0Ot6MHLCtqgB32hwJAC3YZMWlXZw7AleA== + dependencies: + node-fetch "^2.6.1" + "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.5.tgz#234d98e1551960604f1246e6475891a570ad5658" @@ -204,6 +211,11 @@ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" integrity sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA== +"@babel/compat-data@^7.22.9": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98" + integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== + "@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.14.0", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.5.tgz#d67d9747ecf26ee7ecd3ebae1ee22225fe902a89" @@ -225,6 +237,27 @@ json5 "^2.2.2" semver "^6.3.0" +"@babel/core@^7.22.9": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.5.tgz#6e23f2acbcb77ad283c5ed141f824fd9f70101c7" + integrity sha512-Cwc2XjUrG4ilcfOw4wBAK+enbdgwAcAJCfGUItPBKR7Mjw4aEfAFYrLxeRp4jWgtNIKn3n2AlBOfwwafl+42/g== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.5" + "@babel/helper-compilation-targets" "^7.22.15" + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helpers" "^7.23.5" + "@babel/parser" "^7.23.5" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.5" + "@babel/types" "^7.23.5" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + "@babel/eslint-parser@^7.16.3": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.22.5.tgz#fa032503b9e2d188e25b1b95d29e8b8431042d78" @@ -244,7 +277,7 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/generator@^7.23.5": +"@babel/generator@^7.18.13", "@babel/generator@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.5.tgz#17d0a1ea6b62f351d281350a5f80b87a810c4755" integrity sha512-BPssCHrBD+0YrxviOa3QzpqwhNIXKEtOa2jQrm4FlmkC2apYgRnQcmPWiGZDlGxiNtltnUFolMe8497Esry+jA== @@ -279,6 +312,17 @@ lru-cache "^5.1.1" semver "^6.3.0" +"@babel/helper-compilation-targets@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz#0698fc44551a26cf29f18d4662d5bf545a6cfc52" + integrity sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw== + dependencies: + "@babel/compat-data" "^7.22.9" + "@babel/helper-validator-option" "^7.22.15" + browserslist "^4.21.9" + lru-cache "^5.1.1" + semver "^6.3.1" + "@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.21.0", "@babel/helper-create-class-features-plugin@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.5.tgz#2192a1970ece4685fbff85b48da2c32fcb130b7c" @@ -362,6 +406,13 @@ dependencies: "@babel/types" "^7.22.5" +"@babel/helper-module-imports@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" + integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== + dependencies: + "@babel/types" "^7.22.15" + "@babel/helper-module-transforms@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz#0f65daa0716961b6e96b164034e737f60a80d2ef" @@ -376,6 +427,17 @@ "@babel/traverse" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helper-module-transforms@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" + integrity sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ== + dependencies: + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.20" + "@babel/helper-optimise-call-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" @@ -448,16 +510,21 @@ resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== -"@babel/helper-validator-identifier@^7.12.11", "@babel/helper-validator-identifier@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" - integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== - "@babel/helper-validator-identifier@^7.22.20": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== +"@babel/helper-validator-identifier@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" + integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== + +"@babel/helper-validator-option@^7.22.15": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz#907a3fbd4523426285365d1206c423c4c5520307" + integrity sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw== + "@babel/helper-validator-option@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" @@ -482,6 +549,15 @@ "@babel/traverse" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helpers@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.5.tgz#52f522840df8f1a848d06ea6a79b79eefa72401e" + integrity sha512-oO7us8FzTEsG3U6ag9MfdF1iA/7Z6dz+MtFhifZk8C8o453rGJFFWUP1t+ULM9TUIAzC9uxXEiXjOiVMyd7QPg== + dependencies: + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.5" + "@babel/types" "^7.23.5" + "@babel/highlight@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.5.tgz#aa6c05c5407a67ebce408162b7ede789b4d22031" @@ -500,17 +576,12 @@ chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/parser@7.12.16": - version "7.12.16" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.16.tgz#cc31257419d2c3189d394081635703f549fc1ed4" - integrity sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw== - "@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== -"@babel/parser@^7.22.15", "@babel/parser@^7.23.5": +"@babel/parser@^7.16.8", "@babel/parser@^7.22.15", "@babel/parser@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" integrity sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ== @@ -673,6 +744,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" +"@babel/plugin-syntax-import-assertions@^7.20.0": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.23.3.tgz#9c05a7f592982aff1a2768260ad84bcd3f0c77fc" + integrity sha512-lPgDSU+SJLK3xmFDTV2ZRQAiM7UuUjGidwBywFavObCiZc1BeAAcMtHJKUya92hPHO+at63JJPLygilZard8jw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-import-assertions@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz#07d252e2aa0bc6125567f742cd58619cb14dce98" @@ -1384,7 +1462,7 @@ dependencies: regenerator-runtime "^0.13.11" -"@babel/template@^7.22.15": +"@babel/template@^7.18.10", "@babel/template@^7.22.15": version "7.22.15" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== @@ -1402,7 +1480,7 @@ "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/traverse@7.12.13", "@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.22.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": +"@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.16.8", "@babel/traverse@^7.22.5", "@babel/traverse@^7.23.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.5.tgz#f546bf9aba9ef2b042c0e00d245990c15508e7ec" integrity sha512-czx7Xy5a6sapWWRx61m1Ke1Ra4vczu1mCTtJam5zRTBOonfdJ+S/B6HYmGYu3fJtr8GGET3si6IhgWVBhJ/m8w== @@ -1418,15 +1496,6 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/types@7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.12.13.tgz#8be1aa8f2c876da11a9cf650c0ecf656913ad611" - integrity sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ== - dependencies: - "@babel/helper-validator-identifier" "^7.12.11" - lodash "^4.17.19" - to-fast-properties "^2.0.0" - "@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" @@ -1436,7 +1505,7 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" -"@babel/types@^7.22.15", "@babel/types@^7.23.0", "@babel/types@^7.23.5": +"@babel/types@^7.16.8", "@babel/types@^7.18.13", "@babel/types@^7.22.15", "@babel/types@^7.23.0", "@babel/types@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.5.tgz#48d730a00c95109fa4393352705954d74fb5b602" integrity sha512-ON5kSOJwVO6xXVRTvOI0eOnWe7VdUcIpsovGo9U/Br4Ie4UVFQTboO2cYnDhAGU6Fp+UxSiT+pMft0SMHfuq6w== @@ -1727,16 +1796,6 @@ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.3.0.tgz#ea89004119dc42db2e1dba0f97d553f7372f6fcb" integrity sha512-AHPmaAx+RYfZz0eYu6Gviiagpmiyw98ySSlQvCUhVGDRtDFe4DBS0x1bSjdF3gqUDYOczB+yYvBTtEylYSdRhg== -"@endemolshinegroup/cosmiconfig-typescript-loader@3.0.2": - version "3.0.2" - resolved "https://registry.yarnpkg.com/@endemolshinegroup/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-3.0.2.tgz#eea4635828dde372838b0909693ebd9aafeec22d" - integrity sha512-QRVtqJuS1mcT56oHpVegkKBlgtWjXw/gHNWO3eL9oyB5Sc7HBoc2OLG/nYpVfT/Jejvo3NUrD0Udk7XgoyDKkA== - dependencies: - lodash.get "^4" - make-error "^1" - ts-node "^9" - tslib "^2" - "@eslint-community/eslint-utils@^4.2.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" @@ -1777,66 +1836,55 @@ "@graphql-codegen/plugin-helpers" "^1.18.2" tslib "~2.0.1" -"@graphql-codegen/cli@1.20.0": - version "1.20.0" - resolved "https://registry.yarnpkg.com/@graphql-codegen/cli/-/cli-1.20.0.tgz#e1bb62fce07caaf1395ca6e94ffc0f2ba1f57938" - integrity sha512-5pLtZoaqEmEui6PR7IArmD23VLD3++UQby6iNe4NFG4eMcRai2raIM0E4a/MSn7SjyfSRguekYMMC5JKS1VgQw== - dependencies: - "@graphql-codegen/core" "1.17.9" - "@graphql-codegen/plugin-helpers" "^1.18.2" - "@graphql-tools/apollo-engine-loader" "^6" - "@graphql-tools/code-file-loader" "^6" - "@graphql-tools/git-loader" "^6" - "@graphql-tools/github-loader" "^6" - "@graphql-tools/graphql-file-loader" "^6" - "@graphql-tools/json-file-loader" "^6" - "@graphql-tools/load" "^6" - "@graphql-tools/prisma-loader" "^6" - "@graphql-tools/url-loader" "^6" - "@graphql-tools/utils" "^7.0.0" - ansi-escapes "^4.3.1" - camel-case "^4.1.2" +"@graphql-codegen/cli@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@graphql-codegen/cli/-/cli-5.0.0.tgz#761dcf08cfee88bbdd9cdf8097b2343445ec6f0a" + integrity sha512-A7J7+be/a6e+/ul2KI5sfJlpoqeqwX8EzktaKCeduyVKgOLA6W5t+NUGf6QumBDXU8PEOqXk3o3F+RAwCWOiqA== + dependencies: + "@babel/generator" "^7.18.13" + "@babel/template" "^7.18.10" + "@babel/types" "^7.18.13" + "@graphql-codegen/core" "^4.0.0" + "@graphql-codegen/plugin-helpers" "^5.0.1" + "@graphql-tools/apollo-engine-loader" "^8.0.0" + "@graphql-tools/code-file-loader" "^8.0.0" + "@graphql-tools/git-loader" "^8.0.0" + "@graphql-tools/github-loader" "^8.0.0" + "@graphql-tools/graphql-file-loader" "^8.0.0" + "@graphql-tools/json-file-loader" "^8.0.0" + "@graphql-tools/load" "^8.0.0" + "@graphql-tools/prisma-loader" "^8.0.0" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.8.0" chalk "^4.1.0" - chokidar "^3.4.3" - common-tags "^1.8.0" - constant-case "^3.0.3" - cosmiconfig "^7.0.0" + cosmiconfig "^8.1.3" debounce "^1.2.0" - dependency-graph "^0.9.0" detect-indent "^6.0.0" - glob "^7.1.6" - graphql-config "^3.2.0" - indent-string "^4.0.0" - inquirer "^7.3.3" + graphql-config "^5.0.2" + inquirer "^8.0.0" is-glob "^4.0.1" + jiti "^1.17.1" json-to-pretty-yaml "^1.2.2" - latest-version "5.1.0" - listr "^0.14.3" - listr-update-renderer "^0.5.0" + listr2 "^4.0.5" log-symbols "^4.0.0" - lower-case "^2.0.1" - minimatch "^3.0.4" - mkdirp "^1.0.4" - pascal-case "^3.1.1" - request "^2.88.2" + micromatch "^4.0.5" + shell-quote "^1.7.3" string-env-interpolation "^1.0.1" ts-log "^2.2.3" - tslib "~2.0.1" - upper-case "^2.0.2" - valid-url "^1.0.9" - wrap-ansi "^7.0.0" - yaml "^1.10.0" - yargs "^16.1.1" + tslib "^2.4.0" + yaml "^2.3.1" + yargs "^17.0.0" -"@graphql-codegen/core@1.17.9": - version "1.17.9" - resolved "https://registry.yarnpkg.com/@graphql-codegen/core/-/core-1.17.9.tgz#c03e71018ff04d26f5139a2d90a32b31d3bb2b43" - integrity sha512-7nwy+bMWqb0iYJ2DKxA9UiE16meeJ2Ch2XWS/N/ZnA0snTR+GZ20USI8z6YqP1Fuist7LvGO1MbitO2qBT8raA== +"@graphql-codegen/core@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@graphql-codegen/core/-/core-4.0.0.tgz#b29c911746a532a675e33720acb4eb2119823e01" + integrity sha512-JAGRn49lEtSsZVxeIlFVIRxts2lWObR+OQo7V2LHDJ7ohYYw3ilv7nJ8pf8P4GTg/w6ptcYdSdVVdkI8kUHB/Q== dependencies: - "@graphql-codegen/plugin-helpers" "^1.18.2" - "@graphql-tools/merge" "^6" - "@graphql-tools/utils" "^6" - tslib "~2.0.1" + "@graphql-codegen/plugin-helpers" "^5.0.0" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.0" + tslib "~2.5.0" "@graphql-codegen/fragment-matcher@^5.0.0": version "5.0.0" @@ -1868,7 +1916,7 @@ lodash "~4.17.0" tslib "~2.3.0" -"@graphql-codegen/plugin-helpers@^5.0.0": +"@graphql-codegen/plugin-helpers@^5.0.0", "@graphql-codegen/plugin-helpers@^5.0.1": version "5.0.1" resolved "https://registry.yarnpkg.com/@graphql-codegen/plugin-helpers/-/plugin-helpers-5.0.1.tgz#e2429fcfba3f078d5aa18aa062d46c922bbb0d55" integrity sha512-6L5sb9D8wptZhnhLLBcheSPU7Tg//DGWgc5tQBWX46KYTOTQHGqDpv50FxAJJOyFVJrveN9otWk9UT9/yfY4ww== @@ -1929,125 +1977,181 @@ parse-filepath "^1.0.2" tslib "~2.3.0" -"@graphql-tools/apollo-engine-loader@^6": - version "6.2.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-6.2.5.tgz#b9e65744f522bb9f6ca50651e5622820c4f059a8" - integrity sha512-CE4uef6PyxtSG+7OnLklIr2BZZDgjO89ZXK47EKdY7jQy/BQD/9o+8SxPsgiBc+2NsDJH2I6P/nqoaJMOEat6g== +"@graphql-tools/apollo-engine-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-8.0.0.tgz#ac1f351cbe41508411784f25757f5557b0f27489" + integrity sha512-axQTbN5+Yxs1rJ6cWQBOfw3AEeC+fvIuZSfJLPLLvFJLj4pUm9fhxey/g6oQZAAQJqKPfw+tLDUQvnfvRK8Kmg== dependencies: - "@graphql-tools/utils" "^7.0.0" - cross-fetch "3.0.6" - tslib "~2.0.1" + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.9.0" + tslib "^2.4.0" -"@graphql-tools/batch-execute@^7.1.2": - version "7.1.2" - resolved "https://registry.yarnpkg.com/@graphql-tools/batch-execute/-/batch-execute-7.1.2.tgz#35ba09a1e0f80f34f1ce111d23c40f039d4403a0" - integrity sha512-IuR2SB2MnC2ztA/XeTMTfWcA0Wy7ZH5u+nDkDNLAdX+AaSyDnsQS35sCmHqG0VOGTl7rzoyBWLCKGwSJplgtwg== +"@graphql-tools/batch-execute@^9.0.1": + version "9.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/batch-execute/-/batch-execute-9.0.2.tgz#5ac3257501e7941fad40661bb5e1110d6312f58b" + integrity sha512-Y2uwdZI6ZnatopD/SYfZ1eGuQFI7OU2KGZ2/B/7G9ISmgMl5K+ZZWz/PfIEXeiHirIDhyk54s4uka5rj2xwKqQ== dependencies: - "@graphql-tools/utils" "^7.7.0" - dataloader "2.0.0" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/utils" "^10.0.5" + dataloader "^2.2.2" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/code-file-loader@^6": - version "6.3.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/code-file-loader/-/code-file-loader-6.3.1.tgz#42dfd4db5b968acdb453382f172ec684fa0c34ed" - integrity sha512-ZJimcm2ig+avgsEOWWVvAaxZrXXhiiSZyYYOJi0hk9wh5BxZcLUNKkTp6EFnZE/jmGUwuos3pIjUD3Hwi3Bwhg== +"@graphql-tools/code-file-loader@^8.0.0": + version "8.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/code-file-loader/-/code-file-loader-8.0.3.tgz#8e1e8c2fc05c94614ce25c3cee36b3b4ec08bb64" + integrity sha512-gVnnlWs0Ua+5FkuHHEriFUOI3OIbHv6DS1utxf28n6NkfGMJldC4j0xlJRY0LS6dWK34IGYgD4HelKYz2l8KiA== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.5.1" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/graphql-tag-pluck" "8.1.0" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/delegate@^7.0.1", "@graphql-tools/delegate@^7.1.5": - version "7.1.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/delegate/-/delegate-7.1.5.tgz#0b027819b7047eff29bacbd5032e34a3d64bd093" - integrity sha512-bQu+hDd37e+FZ0CQGEEczmRSfQRnnXeUxI/0miDV+NV/zCbEdIJj5tYFNrKT03W6wgdqx8U06d8L23LxvGri/g== +"@graphql-tools/delegate@^10.0.0", "@graphql-tools/delegate@^10.0.3": + version "10.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/delegate/-/delegate-10.0.3.tgz#2d0e133da94ca92c24e0c7360414e5592321cf2d" + integrity sha512-Jor9oazZ07zuWkykD3OOhT/2XD74Zm6Ar0ENZMk75MDD51wB2UWUIMljtHxbJhV5A6UBC2v8x6iY0xdCGiIlyw== dependencies: - "@ardatan/aggregate-error" "0.0.6" - "@graphql-tools/batch-execute" "^7.1.2" - "@graphql-tools/schema" "^7.1.5" - "@graphql-tools/utils" "^7.7.1" - dataloader "2.0.0" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/batch-execute" "^9.0.1" + "@graphql-tools/executor" "^1.0.0" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.5" + dataloader "^2.2.2" + tslib "^2.5.0" + +"@graphql-tools/executor-graphql-ws@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-1.1.0.tgz#7727159ebaa9df4dc793d0d02e74dd1ca4a7cc60" + integrity sha512-yM67SzwE8rYRpm4z4AuGtABlOp9mXXVy6sxXnTJRoYIdZrmDbKVfIY+CpZUJCqS0FX3xf2+GoHlsj7Qswaxgcg== + dependencies: + "@graphql-tools/utils" "^10.0.2" + "@types/ws" "^8.0.0" + graphql-ws "^5.14.0" + isomorphic-ws "^5.0.0" + tslib "^2.4.0" + ws "^8.13.0" -"@graphql-tools/git-loader@^6": - version "6.2.6" - resolved "https://registry.yarnpkg.com/@graphql-tools/git-loader/-/git-loader-6.2.6.tgz#c2226f4b8f51f1c05c9ab2649ba32d49c68cd077" - integrity sha512-ooQTt2CaG47vEYPP3CPD+nbA0F+FYQXfzrB1Y1ABN9K3d3O2RK3g8qwslzZaI8VJQthvKwt0A95ZeE4XxteYfw== +"@graphql-tools/executor-http@^1.0.0": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-http/-/executor-http-1.0.4.tgz#d4b3b32430c24b0167760d3b6ffb91846a3b6956" + integrity sha512-lSoPFWrGU6XT9nGGBogUI8bSOtP0yce2FhXTrU5akMZ35BDCNWbkmgryzRhxoAH/yDOaZtKkHQB3xrYX3uo5zA== + dependencies: + "@graphql-tools/utils" "^10.0.2" + "@repeaterjs/repeater" "^3.0.4" + "@whatwg-node/fetch" "^0.9.0" + extract-files "^11.0.0" + meros "^1.2.1" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/executor-legacy-ws@^1.0.0": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-1.0.4.tgz#27fcccba782daf605d4cf34ffa85a675f43c33f6" + integrity sha512-b7aGuRekZDS+m3af3BIvMKxu15bmVPMt5eGQVuP2v5pxmbaPTh+iv5mx9b3Plt32z5Ke5tycBnNm5urSFtW8ng== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/utils" "^10.0.0" + "@types/ws" "^8.0.0" + isomorphic-ws "5.0.0" + tslib "^2.4.0" + ws "8.14.2" -"@graphql-tools/github-loader@^6": - version "6.2.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/github-loader/-/github-loader-6.2.5.tgz#460dff6f5bbaa26957a5ea3be4f452b89cc6a44b" - integrity sha512-DLuQmYeNNdPo8oWus8EePxWCfCAyUXPZ/p1PWqjrX/NGPyH2ZObdqtDAfRHztljt0F/qkBHbGHCEk2TKbRZTRw== +"@graphql-tools/executor@^1.0.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor/-/executor-1.2.0.tgz#6c45f4add765769d9820c4c4405b76957ba39c79" + integrity sha512-SKlIcMA71Dha5JnEWlw4XxcaJ+YupuXg0QCZgl2TOLFz4SkGCwU/geAsJvUJFwK2RbVLpQv/UMq67lOaBuwDtg== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - cross-fetch "3.0.6" - tslib "~2.0.1" + "@graphql-tools/utils" "^10.0.0" + "@graphql-typed-document-node/core" "3.2.0" + "@repeaterjs/repeater" "^3.0.4" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/graphql-file-loader@^6", "@graphql-tools/graphql-file-loader@^6.0.0": - version "6.2.7" - resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-file-loader/-/graphql-file-loader-6.2.7.tgz#d3720f2c4f4bb90eb2a03a7869a780c61945e143" - integrity sha512-5k2SNz0W87tDcymhEMZMkd6/vs6QawDyjQXWtqkuLTBF3vxjxPD1I4dwHoxgWPIjjANhXybvulD7E+St/7s9TQ== +"@graphql-tools/git-loader@^8.0.0": + version "8.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/git-loader/-/git-loader-8.0.3.tgz#a86d352b23a646c28d27282fef7694b846b31c44" + integrity sha512-Iz9KbRUAkuOe8JGTS0qssyJ+D5Snle17W+z9anwWrLFrkBhHrRFUy5AdjZqgJuhls0x30QkZBnnCtnHDBdQ4nA== dependencies: - "@graphql-tools/import" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/graphql-tag-pluck" "8.1.0" + "@graphql-tools/utils" "^10.0.0" + is-glob "4.0.3" + micromatch "^4.0.4" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/graphql-tag-pluck@^6.2.6", "@graphql-tools/graphql-tag-pluck@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-6.5.1.tgz#5fb227dbb1e19f4b037792b50f646f16a2d4c686" - integrity sha512-7qkm82iFmcpb8M6/yRgzjShtW6Qu2OlCSZp8uatA3J0eMl87TxyJoUmL3M3UMMOSundAK8GmoyNVFUrueueV5Q== +"@graphql-tools/github-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/github-loader/-/github-loader-8.0.0.tgz#683195800618364701cfea9bc6f88674486f053b" + integrity sha512-VuroArWKcG4yaOWzV0r19ElVIV6iH6UKDQn1MXemND0xu5TzrFme0kf3U9o0YwNo0kUYEk9CyFM0BYg4he17FA== dependencies: - "@babel/parser" "7.12.16" - "@babel/traverse" "7.12.13" - "@babel/types" "7.12.13" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/executor-http" "^1.0.0" + "@graphql-tools/graphql-tag-pluck" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.9.0" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/import@^6.2.6": - version "6.3.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-6.3.1.tgz#731c47ab6c6ac9f7994d75c76b6c2fa127d2d483" - integrity sha512-1szR19JI6WPibjYurMLdadHKZoG9C//8I/FZ0Dt4vJSbrMdVNp8WFxg4QnZrDeMG4MzZc90etsyF5ofKjcC+jw== +"@graphql-tools/graphql-file-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-file-loader/-/graphql-file-loader-8.0.0.tgz#a2026405bce86d974000455647511bf65df4f211" + integrity sha512-wRXj9Z1IFL3+zJG1HWEY0S4TXal7+s1vVhbZva96MSp0kbb/3JBF7j0cnJ44Eq0ClccMgGCDFqPFXty4JlpaPg== dependencies: + "@graphql-tools/import" "7.0.0" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" + +"@graphql-tools/graphql-tag-pluck@8.1.0", "@graphql-tools/graphql-tag-pluck@^8.0.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-8.1.0.tgz#0745b6f0103eb725f10c5d4c1a9438670bb8e05b" + integrity sha512-kt5l6H/7QxQcIaewInTcune6NpATojdFEW98/8xWcgmy7dgXx5vU9e0AicFZIH+ewGyZzTpwFqO2RI03roxj2w== + dependencies: + "@babel/core" "^7.22.9" + "@babel/parser" "^7.16.8" + "@babel/plugin-syntax-import-assertions" "^7.20.0" + "@babel/traverse" "^7.16.8" + "@babel/types" "^7.16.8" + "@graphql-tools/utils" "^10.0.0" + tslib "^2.4.0" + +"@graphql-tools/import@7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-7.0.0.tgz#a6a91a90a707d5f46bad0fd3fde2f407b548b2be" + integrity sha512-NVZiTO8o1GZs6OXzNfjB+5CtQtqsZZpQOq+Uu0w57kdUkT4RlQKlwhT8T81arEsbV55KpzkpFsOZP7J1wdmhBw== + dependencies: + "@graphql-tools/utils" "^10.0.0" resolve-from "5.0.0" - tslib "~2.2.0" + tslib "^2.4.0" -"@graphql-tools/json-file-loader@^6", "@graphql-tools/json-file-loader@^6.0.0": - version "6.2.6" - resolved "https://registry.yarnpkg.com/@graphql-tools/json-file-loader/-/json-file-loader-6.2.6.tgz#830482cfd3721a0799cbf2fe5b09959d9332739a" - integrity sha512-CnfwBSY5926zyb6fkDBHnlTblHnHI4hoBALFYXnrg0Ev4yWU8B04DZl/pBRUc459VNgO2x8/mxGIZj2hPJG1EA== +"@graphql-tools/json-file-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/json-file-loader/-/json-file-loader-8.0.0.tgz#9b1b62902f766ef3f1c9cd1c192813ea4f48109c" + integrity sha512-ki6EF/mobBWJjAAC84xNrFMhNfnUFD6Y0rQMGXekrUgY0NdeYXHU0ZUgHzC9O5+55FslqUmAUHABePDHTyZsLg== dependencies: - "@graphql-tools/utils" "^7.0.0" - tslib "~2.0.1" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/load@^6", "@graphql-tools/load@^6.0.0": - version "6.2.8" - resolved "https://registry.yarnpkg.com/@graphql-tools/load/-/load-6.2.8.tgz#16900fb6e75e1d075cad8f7ea439b334feb0b96a" - integrity sha512-JpbyXOXd8fJXdBh2ta0Q4w8ia6uK5FHzrTNmcvYBvflFuWly2LDTk2abbSl81zKkzswQMEd2UIYghXELRg8eTA== +"@graphql-tools/load@^8.0.0": + version "8.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/load/-/load-8.0.1.tgz#498f2230448601cb87894b8a93df7867daef69ea" + integrity sha512-qSMsKngJhDqRbuWyo3NvakEFqFL6+eSjy8ooJ1o5qYD26N7dqXkKzIMycQsX7rBK19hOuINAUSaRcVWH6hTccw== dependencies: - "@graphql-tools/merge" "^6.2.12" - "@graphql-tools/utils" "^7.5.0" - globby "11.0.3" - import-from "3.0.0" - is-glob "4.0.1" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.11" p-limit "3.1.0" - tslib "~2.2.0" - unixify "1.0.0" - valid-url "1.0.9" + tslib "^2.4.0" -"@graphql-tools/merge@^6", "@graphql-tools/merge@^6.0.0", "@graphql-tools/merge@^6.2.12": - version "6.2.14" - resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-6.2.14.tgz#694e2a2785ba47558e5665687feddd2935e9d94e" - integrity sha512-RWT4Td0ROJai2eR66NHejgf8UwnXJqZxXgDWDI+7hua5vNA2OW8Mf9K1Wav1ZkjWnuRp4ztNtkZGie5ISw55ow== +"@graphql-tools/merge@^9.0.0", "@graphql-tools/merge@^9.0.1": + version "9.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-9.0.1.tgz#693f15da152339284469b1ce5c6827e3ae350a29" + integrity sha512-hIEExWO9fjA6vzsVjJ3s0cCQ+Q/BEeMVJZtMXd7nbaVefVy0YDyYlEkeoYYNV3NVVvu1G9lr6DM1Qd0DGo9Caw== dependencies: - "@graphql-tools/schema" "^7.0.0" - "@graphql-tools/utils" "^7.7.0" - tslib "~2.2.0" + "@graphql-tools/utils" "^10.0.10" + tslib "^2.4.0" "@graphql-tools/optimize@^1.0.1": version "1.3.1" @@ -2056,31 +2160,28 @@ dependencies: tslib "^2.4.0" -"@graphql-tools/prisma-loader@^6": - version "6.3.0" - resolved "https://registry.yarnpkg.com/@graphql-tools/prisma-loader/-/prisma-loader-6.3.0.tgz#c907e17751ff2b26e7c2bc75d0913ebf03f970da" - integrity sha512-9V3W/kzsFBmUQqOsd96V4a4k7Didz66yh/IK89B1/rrvy9rYj+ULjEqR73x9BYZ+ww9FV8yP8LasWAJwWaqqJQ== +"@graphql-tools/prisma-loader@^8.0.0": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/prisma-loader/-/prisma-loader-8.0.2.tgz#3a7126ec2389a7aa7846bd0e441629ac5a1934fc" + integrity sha512-8d28bIB0bZ9Bj0UOz9sHagVPW+6AHeqvGljjERtwCnWl8OCQw2c2pNboYXISLYUG5ub76r4lDciLLTU+Ks7Q0w== dependencies: - "@graphql-tools/url-loader" "^6.8.2" - "@graphql-tools/utils" "^7.0.0" - "@types/http-proxy-agent" "^2.0.2" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.8" "@types/js-yaml" "^4.0.0" "@types/json-stable-stringify" "^1.0.32" - "@types/jsonwebtoken" "^8.5.0" + "@whatwg-node/fetch" "^0.9.0" chalk "^4.1.0" debug "^4.3.1" - dotenv "^8.2.0" - graphql-request "^3.3.0" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" - isomorphic-fetch "^3.0.0" + dotenv "^16.0.0" + graphql-request "^6.0.0" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.0" + jose "^5.0.0" js-yaml "^4.0.0" json-stable-stringify "^1.0.1" - jsonwebtoken "^8.5.1" lodash "^4.17.20" - replaceall "^0.1.6" scuid "^1.1.0" - tslib "~2.1.0" + tslib "^2.4.0" yaml-ast-parser "^0.0.43" "@graphql-tools/relay-operation-optimizer@^6.3.0": @@ -2092,39 +2193,34 @@ "@graphql-tools/utils" "9.1.3" tslib "^2.4.0" -"@graphql-tools/schema@^7.0.0", "@graphql-tools/schema@^7.1.5": - version "7.1.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-7.1.5.tgz#07b24e52b182e736a6b77c829fc48b84d89aa711" - integrity sha512-uyn3HSNSckf4mvQSq0Q07CPaVZMNFCYEVxroApOaw802m9DcZPgf9XVPy/gda5GWj9AhbijfRYVTZQgHnJ4CXA== +"@graphql-tools/schema@^10.0.0": + version "10.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-10.0.2.tgz#21bc2ee25a65fb4890d2e5f9f22ef1f733aa81da" + integrity sha512-TbPsIZnWyDCLhgPGnDjt4hosiNU2mF/rNtSk5BVaXWnZqvKJ6gzJV4fcHcvhRIwtscDMW2/YTnK6dLVnk8pc4w== dependencies: - "@graphql-tools/utils" "^7.1.2" - tslib "~2.2.0" - value-or-promise "1.0.6" - -"@graphql-tools/url-loader@^6", "@graphql-tools/url-loader@^6.0.0", "@graphql-tools/url-loader@^6.8.2": - version "6.10.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/url-loader/-/url-loader-6.10.1.tgz#dc741e4299e0e7ddf435eba50a1f713b3e763b33" - integrity sha512-DSDrbhQIv7fheQ60pfDpGD256ixUQIR6Hhf9Z5bRjVkXOCvO5XrkwoWLiU7iHL81GB1r0Ba31bf+sl+D4nyyfw== - dependencies: - "@graphql-tools/delegate" "^7.0.1" - "@graphql-tools/utils" "^7.9.0" - "@graphql-tools/wrap" "^7.0.4" - "@microsoft/fetch-event-source" "2.0.1" - "@types/websocket" "1.0.2" - abort-controller "3.0.0" - cross-fetch "3.1.4" - extract-files "9.0.0" - form-data "4.0.0" - graphql-ws "^4.4.1" - is-promise "4.0.0" - isomorphic-ws "4.0.1" - lodash "4.17.21" - meros "1.1.4" - subscriptions-transport-ws "^0.9.18" - sync-fetch "0.3.0" - tslib "~2.2.0" - valid-url "1.0.9" - ws "7.4.5" + "@graphql-tools/merge" "^9.0.1" + "@graphql-tools/utils" "^10.0.10" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/url-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/url-loader/-/url-loader-8.0.0.tgz#8d952d5ebb7325e587cb914aaebded3dbd078cf6" + integrity sha512-rPc9oDzMnycvz+X+wrN3PLrhMBQkG4+sd8EzaFN6dypcssiefgWKToXtRKI8HHK68n2xEq1PyrOpkjHFJB+GwA== + dependencies: + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/delegate" "^10.0.0" + "@graphql-tools/executor-graphql-ws" "^1.0.0" + "@graphql-tools/executor-http" "^1.0.0" + "@graphql-tools/executor-legacy-ws" "^1.0.0" + "@graphql-tools/utils" "^10.0.0" + "@graphql-tools/wrap" "^10.0.0" + "@types/ws" "^8.0.0" + "@whatwg-node/fetch" "^0.9.0" + isomorphic-ws "^5.0.0" + tslib "^2.4.0" + value-or-promise "^1.0.11" + ws "^8.12.0" "@graphql-tools/utils@9.1.3": version "9.1.3" @@ -2143,16 +2239,17 @@ dset "^3.1.2" tslib "^2.4.0" -"@graphql-tools/utils@^6": - version "6.2.4" - resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-6.2.4.tgz#38a2314d2e5e229ad4f78cca44e1199e18d55856" - integrity sha512-ybgZ9EIJE3JMOtTrTd2VcIpTXtDrn2q6eiYkeYMKRVh3K41+LZa6YnR2zKERTXqTWqhobROwLt4BZbw2O3Aeeg== +"@graphql-tools/utils@^10.0.10", "@graphql-tools/utils@^10.0.11", "@graphql-tools/utils@^10.0.2", "@graphql-tools/utils@^10.0.5", "@graphql-tools/utils@^10.0.8": + version "10.0.11" + resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-10.0.11.tgz#1238fbe37e8d6c662c48ab2477c98269d6fd851a" + integrity sha512-vVjXgKn6zjXIlYBd7yJxCVMYGb5j18gE3hx3Qw3mNsSEsYQXbJbPdlwb7Fc9FogsJei5AaqiQerqH4kAosp1nQ== dependencies: - "@ardatan/aggregate-error" "0.0.6" - camel-case "4.1.1" - tslib "~2.0.1" + "@graphql-typed-document-node/core" "^3.1.1" + cross-inspect "1.0.0" + dset "^3.1.2" + tslib "^2.4.0" -"@graphql-tools/utils@^7.0.0", "@graphql-tools/utils@^7.1.2", "@graphql-tools/utils@^7.5.0", "@graphql-tools/utils@^7.7.0", "@graphql-tools/utils@^7.7.1", "@graphql-tools/utils@^7.8.1", "@graphql-tools/utils@^7.9.0", "@graphql-tools/utils@^7.9.1": +"@graphql-tools/utils@^7.9.1": version "7.10.0" resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-7.10.0.tgz#07a4cb5d1bec1ff1dc1d47a935919ee6abd38699" integrity sha512-d334r6bo9mxdSqZW6zWboEnnOOFRrAPVQJ7LkU8/6grglrbcu6WhwCLzHb90E94JI3TD3ricC3YGbUqIi9Xg0w== @@ -2161,27 +2258,27 @@ camel-case "4.1.2" tslib "~2.2.0" -"@graphql-tools/wrap@^7.0.4": - version "7.0.8" - resolved "https://registry.yarnpkg.com/@graphql-tools/wrap/-/wrap-7.0.8.tgz#ad41e487135ca3ea1ae0ea04bb3f596177fb4f50" - integrity sha512-1NDUymworsOlb53Qfh7fonDi2STvqCtbeE68ntKY9K/Ju/be2ZNxrFSbrBHwnxWcN9PjISNnLcAyJ1L5tCUyhg== +"@graphql-tools/wrap@^10.0.0": + version "10.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/wrap/-/wrap-10.0.1.tgz#9e3d27d2723962c26c4377d5d7ab0d3038bf728c" + integrity sha512-Cw6hVrKGM2OKBXeuAGltgy4tzuqQE0Nt7t/uAqnuokSXZhMHXJUb124Bnvxc2gPZn5chfJSDafDe4Cp8ZAVJgg== dependencies: - "@graphql-tools/delegate" "^7.1.5" - "@graphql-tools/schema" "^7.1.5" - "@graphql-tools/utils" "^7.8.1" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/delegate" "^10.0.3" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.0" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-typed-document-node/core@3.2.0", "@graphql-typed-document-node/core@^3.1.1", "@graphql-typed-document-node/core@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" + integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== "@graphql-typed-document-node/core@^3.0.0": version "3.1.0" resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950" integrity sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg== -"@graphql-typed-document-node/core@^3.1.1": - version "3.2.0" - resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" - integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== - "@hapi/hoek@^9.0.0": version "9.2.0" resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.0.tgz#f3933a44e365864f4dad5db94158106d511e8131" @@ -2213,11 +2310,6 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== -"@iarna/toml@^2.2.5": - version "2.2.5" - resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" - integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== - "@icons/material@^0.2.4": version "0.2.4" resolved "https://registry.yarnpkg.com/@icons/material/-/material-0.2.4.tgz#e90c9f71768b3736e76d7dd6783fc6c2afa88bc8" @@ -2546,11 +2638,6 @@ refractor "^3.3.1" unist-util-visit "^2.0.3" -"@microsoft/fetch-event-source@2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@microsoft/fetch-event-source/-/fetch-event-source-2.0.1.tgz#9ceecc94b49fbaa15666e38ae8587f64acce007d" - integrity sha512-W6CLUJ2eBMw3Rec70qrsEW0jOm/3twwJv21mrmj2yORiaVmVYGS4sSS5yUwvQc1ZlDLYGPnClVWmUUMagKNsfA== - "@miragejs/graphql@^0.1.11": version "0.1.12" resolved "https://registry.npmjs.org/@miragejs/graphql/-/graphql-0.1.12.tgz#60679c4ad807fc4a001bc88aba396ba3fa5a958b" @@ -2700,6 +2787,33 @@ dependencies: svgmoji "^3.2.0" +"@peculiar/asn1-schema@^2.3.6": + version "2.3.8" + resolved "https://registry.yarnpkg.com/@peculiar/asn1-schema/-/asn1-schema-2.3.8.tgz#04b38832a814e25731232dd5be883460a156da3b" + integrity sha512-ULB1XqHKx1WBU/tTFIA+uARuRoBVZ4pNdOA878RDrRbBfBGcSzi5HBkdScC6ZbHn8z7L8gmKCgPC1LHRrP46tA== + dependencies: + asn1js "^3.0.5" + pvtsutils "^1.3.5" + tslib "^2.6.2" + +"@peculiar/json-schema@^1.1.12": + version "1.1.12" + resolved "https://registry.yarnpkg.com/@peculiar/json-schema/-/json-schema-1.1.12.tgz#fe61e85259e3b5ba5ad566cb62ca75b3d3cd5339" + integrity sha512-coUfuoMeIB7B8/NMekxaDzLhaYmp0HZNPEjYRm9goRou8UZIC3z21s0sL9AWoCw4EG876QyO3kYrc61WNF9B/w== + dependencies: + tslib "^2.0.0" + +"@peculiar/webcrypto@^1.4.0": + version "1.4.3" + resolved "https://registry.yarnpkg.com/@peculiar/webcrypto/-/webcrypto-1.4.3.tgz#078b3e8f598e847b78683dc3ba65feb5029b93a7" + integrity sha512-VtaY4spKTdN5LjJ04im/d/joXuvLbQdgy5Z4DXF4MFZhQ+MTrejbNMkfZBp1Bs3O5+bFqnJgyGdPuZQflvIa5A== + dependencies: + "@peculiar/asn1-schema" "^2.3.6" + "@peculiar/json-schema" "^1.1.12" + pvtsutils "^1.3.2" + tslib "^2.5.0" + webcrypto-core "^1.7.7" + "@pmmmwh/react-refresh-webpack-plugin@^0.5.3": version "0.5.10" resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.10.tgz#2eba163b8e7dbabb4ce3609ab5e32ab63dda3ef8" @@ -3701,6 +3815,11 @@ dependencies: type-fest "^2.0.0" +"@repeaterjs/repeater@^3.0.4": + version "3.0.5" + resolved "https://registry.yarnpkg.com/@repeaterjs/repeater/-/repeater-3.0.5.tgz#b77571685410217a548a9c753aa3cdfc215bfc78" + integrity sha512-l3YHBLAol6d/IKnB9LhpD0cEZWAoe3eFKUyTYWmFmCO2Q/WOckxLQAUyMZWwZV2M/m3+4vgRoaolFqaII82/TA== + "@rollup/plugin-babel@^5.2.0": version "5.3.1" resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" @@ -3748,13 +3867,6 @@ resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.3.2.tgz#31b9c510d8cada9683549e1dbb4284cca5001faf" integrity sha512-V+MvGwaHH03hYhY+k6Ef/xKd6RYlc4q8WBx+2ANmipHJcKuktNcI/NgEsJgdSUF6Lw32njT6OnrRsKYCdgHjYw== -"@samverschueren/stream-to-observable@^0.3.0": - version "0.3.1" - resolved "https://registry.yarnpkg.com/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.1.tgz#a21117b19ee9be70c379ec1877537ef2e1c63301" - integrity sha512-c/qwwcHyafOQuVQJj0IlBjf5yYgBI7YPJ77k4fOJYesb41jio65eaJODRUmfYKhTOFBrIZ66kgvGPlNbjuoRdQ== - dependencies: - any-observable "^0.3.0" - "@seznam/compose-react-refs@^1.0.6": version "1.0.6" resolved "https://registry.yarnpkg.com/@seznam/compose-react-refs/-/compose-react-refs-1.0.6.tgz#6ec4e70bdd6e32f8e70b4100f27267cf306bd8df" @@ -3782,11 +3894,6 @@ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== -"@sindresorhus/is@^0.14.0": - version "0.14.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" - integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== - "@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.3": version "1.8.3" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" @@ -3979,13 +4086,6 @@ "@svgr/plugin-svgo" "^5.5.0" loader-utils "^2.0.0" -"@szmarczak/http-timer@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" - integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== - dependencies: - defer-to-connect "^1.0.1" - "@testing-library/dom@^7.28.1": version "7.31.0" resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-7.31.0.tgz#938451abd3ca27e1b69bb395d4a40759fd7f5b3b" @@ -4300,13 +4400,6 @@ resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== -"@types/http-proxy-agent@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@types/http-proxy-agent/-/http-proxy-agent-2.0.2.tgz#942c1f35c7e1f0edd1b6ffae5d0f9051cfb32be1" - integrity sha512-2S6IuBRhqUnH1/AUx9k8KWtY3Esg4eqri946MnxTG5HwehF1S5mqLln8fcyMiuQkY72p2gH3W+rIPqp5li0LyQ== - dependencies: - "@types/node" "*" - "@types/http-proxy@^1.17.5", "@types/http-proxy@^1.17.8": version "1.17.11" resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.11.tgz#0ca21949a5588d55ac2b659b69035c84bd5da293" @@ -4366,13 +4459,6 @@ resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= -"@types/jsonwebtoken@^8.5.0": - version "8.5.1" - resolved "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#56958cb2d80f6d74352bd2e501a018e2506a8a84" - integrity sha512-rNAPdomlIUX0i0cg2+I+Q1wOUr531zHBQ+cV/28PJ39bSPKjahatZZ2LMuhiguETkCgLVzfruw/ZvNMNkKoSzw== - dependencies: - "@types/node" "*" - "@types/lodash@^4.14.172": version "4.14.195" resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.195.tgz#bafc975b252eb6cea78882ce8a7b6bf22a6de632" @@ -4659,10 +4745,10 @@ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== -"@types/websocket@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@types/websocket/-/websocket-1.0.2.tgz#d2855c6a312b7da73ed16ba6781815bf30c6187a" - integrity sha512-B5m9aq7cbbD/5/jThEr33nUY8WEfVi6A2YKCTOvw5Ldy7mtsOkqRvGjnzy6g7iMMDsgu7xREuCzqATLDLQVKcQ== +"@types/ws@^8.0.0": + version "8.5.10" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.10.tgz#4acfb517970853fa6574a3a6886791d04a396787" + integrity sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A== dependencies: "@types/node" "*" @@ -5213,6 +5299,57 @@ "@webassemblyjs/ast" "1.11.6" "@xtuc/long" "4.2.2" +"@whatwg-node/events@^0.0.3": + version "0.0.3" + resolved "https://registry.yarnpkg.com/@whatwg-node/events/-/events-0.0.3.tgz#13a65dd4f5893f55280f766e29ae48074927acad" + integrity sha512-IqnKIDWfXBJkvy/k6tzskWTc2NK3LcqHlb+KHGCrjOCH4jfQckRX0NAiIcC/vIqQkzLYw2r2CTSwAxcrtcD6lA== + +"@whatwg-node/events@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@whatwg-node/events/-/events-0.1.1.tgz#0ca718508249419587e130da26d40e29d99b5356" + integrity sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w== + +"@whatwg-node/fetch@^0.8.0": + version "0.8.8" + resolved "https://registry.yarnpkg.com/@whatwg-node/fetch/-/fetch-0.8.8.tgz#48c6ad0c6b7951a73e812f09dd22d75e9fa18cae" + integrity sha512-CdcjGC2vdKhc13KKxgsc6/616BQ7ooDIgPeTuAiE8qfCnS0mGzcfCOoZXypQSz73nxI+GWc7ZReIAVhxoE1KCg== + dependencies: + "@peculiar/webcrypto" "^1.4.0" + "@whatwg-node/node-fetch" "^0.3.6" + busboy "^1.6.0" + urlpattern-polyfill "^8.0.0" + web-streams-polyfill "^3.2.1" + +"@whatwg-node/fetch@^0.9.0": + version "0.9.14" + resolved "https://registry.yarnpkg.com/@whatwg-node/fetch/-/fetch-0.9.14.tgz#262039fd8aea52a9c8aac2ec20f316382eae1a3c" + integrity sha512-wurZC82zzZwXRDSW0OS9l141DynaJQh7Yt0FD1xZ8niX7/Et/7RoiLiltbVU1fSF1RR9z6ndEaTUQBAmddTm1w== + dependencies: + "@whatwg-node/node-fetch" "^0.5.0" + urlpattern-polyfill "^9.0.0" + +"@whatwg-node/node-fetch@^0.3.6": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@whatwg-node/node-fetch/-/node-fetch-0.3.6.tgz#e28816955f359916e2d830b68a64493124faa6d0" + integrity sha512-w9wKgDO4C95qnXZRwZTfCmLWqyRnooGjcIwG0wADWjw9/HN0p7dtvtgSvItZtUyNteEvgTrd8QojNEqV6DAGTA== + dependencies: + "@whatwg-node/events" "^0.0.3" + busboy "^1.6.0" + fast-querystring "^1.1.1" + fast-url-parser "^1.1.3" + tslib "^2.3.1" + +"@whatwg-node/node-fetch@^0.5.0": + version "0.5.1" + resolved "https://registry.yarnpkg.com/@whatwg-node/node-fetch/-/node-fetch-0.5.1.tgz#36a2bc31e5fc8cffa17826c192a8829d4c0ccc1e" + integrity sha512-sQz/s3NyyzIZxQ7PHxDFUMM1k4kQQbi2jU8ILdTbt5+S59ME8aI7XF30O9qohRIIYdSrUvm/OwKQmVP1y6e2WQ== + dependencies: + "@whatwg-node/events" "^0.1.0" + busboy "^1.6.0" + fast-querystring "^1.1.1" + fast-url-parser "^1.1.3" + tslib "^2.3.1" + "@wry/context@^0.6.0": version "0.6.0" resolved "https://registry.yarnpkg.com/@wry/context/-/context-0.6.0.tgz#f903eceb89d238ef7e8168ed30f4511f92d83e06" @@ -5263,13 +5400,6 @@ abab@^2.0.3, abab@^2.0.5: resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== -abort-controller@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" - integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== - dependencies: - event-target-shim "^5.0.0" - accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: version "1.3.8" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" @@ -5336,6 +5466,21 @@ agent-base@6: dependencies: debug "4" +agent-base@^7.0.2, agent-base@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.0.tgz#536802b76bc0b34aa50195eb2442276d613e3434" + integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg== + dependencies: + debug "^4.3.4" + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + ajv-formats@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" @@ -5391,12 +5536,7 @@ analytics@^0.8.9: "@analytics/core" "^0.12.7" "@analytics/storage-utils" "^0.4.2" -ansi-escapes@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" - integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== - -ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: +ansi-escapes@^4.2.1, ansi-escapes@^4.3.0, ansi-escapes@^4.3.1: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== @@ -5408,16 +5548,11 @@ ansi-html-community@^0.0.8: resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== -ansi-regex@3.0.1, ansi-regex@^2.0.0, ansi-regex@^3.0.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: +ansi-regex@3.0.1, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= - ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -5486,11 +5621,6 @@ antd@4.24.7: rc-util "^5.22.5" scroll-into-view-if-needed "^2.2.25" -any-observable@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.3.0.tgz#af933475e5806a67d0d7df090dd5e8bef65d119b" - integrity sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog== - any-promise@^1.0.0: version "1.3.0" resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" @@ -5635,6 +5765,15 @@ asn1@~0.2.3: dependencies: safer-buffer "~2.1.0" +asn1js@^3.0.1, asn1js@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/asn1js/-/asn1js-3.0.5.tgz#5ea36820443dbefb51cc7f88a2ebb5b462114f38" + integrity sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ== + dependencies: + pvtsutils "^1.3.2" + pvutils "^1.1.3" + tslib "^2.4.0" + assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" @@ -5650,10 +5789,10 @@ ast-types-flow@^0.0.7: resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= -async-limiter@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" - integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== async-validator@^4.1.0: version "4.2.5" @@ -5942,11 +6081,6 @@ babel-preset-react-app@^10.0.1: babel-plugin-macros "^3.1.0" babel-plugin-transform-react-remove-prop-types "^0.4.24" -backo2@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" - integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= - bail@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776" @@ -6012,6 +6146,15 @@ binary-extensions@^2.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== +bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + blacklist@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/blacklist/-/blacklist-1.1.4.tgz#b2dd09d6177625b2caa69835a37b28995fa9a2f2" @@ -6101,6 +6244,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4 node-releases "^2.0.12" update-browserslist-db "^1.0.11" +browserslist@^4.21.9: + version "4.22.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.2.tgz#704c4943072bd81ea18997f3bd2180e89c77874b" + integrity sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A== + dependencies: + caniuse-lite "^1.0.30001565" + electron-to-chromium "^1.4.601" + node-releases "^2.0.14" + update-browserslist-db "^1.0.13" + bser@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" @@ -6113,17 +6266,12 @@ btoa@^1.2.1: resolved "https://registry.yarnpkg.com/btoa/-/btoa-1.2.1.tgz#01a9909f8b2c93f6bf680ba26131eb30f7fa3d73" integrity sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g== -buffer-equal-constant-time@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" - integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk= - buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== -buffer@^5.7.0: +buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -6136,6 +6284,13 @@ builtin-modules@^3.1.0: resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== +busboy@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== + dependencies: + streamsearch "^1.1.0" + bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" @@ -6161,19 +6316,6 @@ cache-base@^1.0.1: union-value "^1.0.0" unset-value "^1.0.0" -cacheable-request@^6.0.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" - integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^3.0.0" - lowercase-keys "^2.0.0" - normalize-url "^4.1.0" - responselike "^1.0.2" - call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" @@ -6187,14 +6329,6 @@ callsites@^3.0.0: resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== -camel-case@4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.1.tgz#1fc41c854f00e2f7d0139dfeba1542d6896fe547" - integrity sha512-7fa2WcG4fYFkclIvEmxBbTvmibwF2/agfEBc6q3lOpVu0A13ltLsA+Hr/8Hp6kp5f+G7hKi6t8lys6XxP+1K6Q== - dependencies: - pascal-case "^3.1.1" - tslib "^1.10.0" - camel-case@4.1.2, camel-case@^4.1.1, camel-case@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" @@ -6238,6 +6372,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001503: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001508.tgz#4461bbc895c692a96da399639cc1e146e7302a33" integrity sha512-sdQZOJdmt3GJs1UMNpCCCyeuS2IEGLXnHyAo9yIO5JJDjbjoVRij4M1qep6P6gFpptD1PqIYgzM+gwJbOi92mw== +caniuse-lite@^1.0.30001565: + version "1.0.30001566" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001566.tgz#61a8e17caf3752e3e426d4239c549ebbb37fef0d" + integrity sha512-ggIhCsTxmITBAMmK8yZjEhCO5/47jKXPu6Dha/wuCS4JePVL+3uiDEBuhu2aIoT+bqTOR8L76Ip1ARL9xYsEJA== + capital-case@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669" @@ -6267,17 +6406,6 @@ ccount@^1.0.0: resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== -chalk@^1.0.0, chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" @@ -6295,7 +6423,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -6398,7 +6526,7 @@ check-types@^11.1.1: resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== -chokidar@^3.4.2, chokidar@^3.4.3, chokidar@^3.5.3: +chokidar@^3.4.2, chokidar@^3.5.3: version "3.5.3" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== @@ -6455,12 +6583,10 @@ clean-css@^5.2.2: dependencies: source-map "~0.6.0" -cli-cursor@^2.0.0, cli-cursor@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" - integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= - dependencies: - restore-cursor "^2.0.0" +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== cli-cursor@^3.1.0: version "3.1.0" @@ -6469,13 +6595,18 @@ cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" -cli-truncate@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-0.2.1.tgz#9f15cfbb0705005369216c626ac7d05ab90dd574" - integrity sha1-nxXPuwcFAFNpIWxiasfQWrkN1XQ= +cli-spinners@^2.5.0: + version "2.9.2" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" + integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== + +cli-truncate@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7" + integrity sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg== dependencies: - slice-ansi "0.0.4" - string-width "^1.0.1" + slice-ansi "^3.0.0" + string-width "^4.2.0" cli-width@^3.0.0: version "3.0.0" @@ -6500,6 +6631,15 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + clone-deep@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" @@ -6509,12 +6649,10 @@ clone-deep@^4.0.1: kind-of "^6.0.2" shallow-clone "^3.0.0" -clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= - dependencies: - mimic-response "^1.0.0" +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== clsx@^1.2.1: version "1.2.1" @@ -6535,11 +6673,6 @@ coa@^2.0.2: chalk "^2.4.1" q "^1.1.2" -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= - codemirror@^5.62.0: version "5.65.10" resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.65.10.tgz#4276a93b8534ce91f14b733ba9a1ac949666eac9" @@ -6597,7 +6730,7 @@ colord@^2.9.1: resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== -colorette@^2.0.10: +colorette@^2.0.10, colorette@^2.0.16: version "2.0.20" resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== @@ -6699,7 +6832,7 @@ connect-history-api-fallback@^2.0.0: resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== -constant-case@^3.0.3, constant-case@^3.0.4: +constant-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1" integrity sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ== @@ -6725,6 +6858,11 @@ convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" @@ -6788,13 +6926,6 @@ core-util-is@1.0.2, core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= -cosmiconfig-toml-loader@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig-toml-loader/-/cosmiconfig-toml-loader-1.0.0.tgz#0681383651cceff918177debe9084c0d3769509b" - integrity sha512-H/2gurFWVi7xXvCyvsWRLCMekl4tITJcX0QEsDMpzxtuxDyM59xLatYNg4s/k9AA/HdtCYfj2su8mgA0GSDLDA== - dependencies: - "@iarna/toml" "^2.2.5" - cosmiconfig-typescript-loader@^1.0.0: version "1.0.9" resolved "https://registry.yarnpkg.com/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-1.0.9.tgz#69c523f7e8c3d9f27f563d02bbeadaf2f27212d3" @@ -6803,17 +6934,6 @@ cosmiconfig-typescript-loader@^1.0.0: cosmiconfig "^7" ts-node "^10.7.0" -cosmiconfig@7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3" - integrity sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.2.1" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.10.0" - cosmiconfig@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" @@ -6836,6 +6956,16 @@ cosmiconfig@^7, cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: path-type "^4.0.0" yaml "^1.10.0" +cosmiconfig@^8.1.0, cosmiconfig@^8.1.3: + version "8.3.6" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.3.6.tgz#060a2b871d66dba6c8538ea1118ba1ac16f5fae3" + integrity sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA== + dependencies: + import-fresh "^3.3.0" + js-yaml "^4.1.0" + parse-json "^5.2.0" + path-type "^4.0.0" + craco-antd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/craco-antd/-/craco-antd-2.0.0.tgz#f38977f4de1714e984ad4f68aae2bcce81bdab79" @@ -6883,21 +7013,7 @@ cronstrue@^1.122.0: resolved "https://registry.yarnpkg.com/cronstrue/-/cronstrue-1.122.0.tgz#bd6838077b476d28f61d381398b47b8c3912a126" integrity sha512-PFuhZd+iPQQ0AWTXIEYX+t3nFGzBrWxmTWUKJOrsGRewaBSLKZ4I1f8s2kryU75nNxgyugZgiGh2OJsCTA/XlA== -cross-fetch@3.0.6: - version "3.0.6" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.0.6.tgz#3a4040bc8941e653e0e9cf17f29ebcd177d3365c" - integrity sha512-KBPUbqgFjzWlVcURG+Svp9TlhA5uliYtiNx/0r8nv0pdypeQCRJ9IaSIc3q/x3q8t3F75cHuwxVql1HFGHCNJQ== - dependencies: - node-fetch "2.6.1" - -cross-fetch@3.1.4: - version "3.1.4" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.4.tgz#9723f3a3a247bf8b89039f3a380a9244e8fa2f39" - integrity sha512-1eAtFWdIubi6T4XPy6ei9iUFoKpUkIF971QLN8lIvvvwueI65+Nw5haMNKUwfJxabqlIIDODJKGrQ66gxC0PbQ== - dependencies: - node-fetch "2.6.1" - -cross-fetch@^3.0.6, cross-fetch@^3.1.5: +cross-fetch@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== @@ -7271,21 +7387,16 @@ data-urls@^2.0.0: whatwg-mimetype "^2.3.0" whatwg-url "^8.0.0" -dataloader@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-2.0.0.tgz#41eaf123db115987e21ca93c005cd7753c55fe6f" - integrity sha512-YzhyDAwA4TaQIhM5go+vCLmU0UikghC/t9DTQYZR2M/UvZ1MdOhPezSDZcjj9uqQJOMqjLcpWtyW2iNINdlatQ== +dataloader@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-2.2.2.tgz#216dc509b5abe39d43a9b9d97e6e5e473dfbe3e0" + integrity sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g== date-fns@2.x: version "2.29.3" resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.29.3.tgz#27402d2fc67eb442b511b70bbdf98e6411cd68a8" integrity sha512-dDCnyH2WnnKusqvZZ6+jA1O51Ibt8ZMRNkDZdyAyK4YfbDwa/cEmuztzG5pk6hqlp9aSBPYcjOlktquahGwGeA== -date-fns@^1.27.2: - version "1.30.1" - resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.30.1.tgz#2e71bf0b119153dbb4cc4e88d9ea5acfb50dc05c" - integrity sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw== - dayjs@1.x, dayjs@^1.11.7: version "1.11.7" resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.7.tgz#4b296922642f70999544d1144a2c25730fce63e2" @@ -7339,23 +7450,11 @@ decode-uri-component@^0.2.0: resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== -decompress-response@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" - integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= - dependencies: - mimic-response "^1.0.0" - dedent@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - deep-is@^0.1.3, deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" @@ -7373,10 +7472,12 @@ default-gateway@^6.0.3: dependencies: execa "^5.0.0" -defer-to-connect@^1.0.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" - integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== +defaults@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== + dependencies: + clone "^1.0.2" define-lazy-prop@^2.0.0: version "2.0.0" @@ -7433,11 +7534,6 @@ dependency-graph@^0.11.0: resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.11.0.tgz#ac0ce7ed68a54da22165a85e97a01d53f5eb2e27" integrity sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg== -dependency-graph@^0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318" - integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w== - dequal@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" @@ -7660,6 +7756,11 @@ dotenv@^10.0.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== +dotenv@^16.0.0: + version "16.3.1" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" + integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== + dotenv@^8.2.0: version "8.6.0" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" @@ -7670,11 +7771,6 @@ dset@^3.1.2: resolved "https://registry.yarnpkg.com/dset/-/dset-3.1.3.tgz#c194147f159841148e8e34ca41f638556d9542d2" integrity sha512-20TuZZHCEZ2O71q9/+8BwKwZ0QtD9D8ObhrihJPr+vLLYlSuAU3/zL4cSlgbfeoGHTjCSJBa7NGcrF9/Bx/WJQ== -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= - duplexer@^0.1.2, duplexer@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" @@ -7688,13 +7784,6 @@ ecc-jsbn@~0.1.1: jsbn "~0.1.0" safer-buffer "^2.1.0" -ecdsa-sig-formatter@1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" - integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== - dependencies: - safe-buffer "^5.0.1" - ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -7712,10 +7801,10 @@ electron-to-chromium@^1.4.431: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.441.tgz#94dd9c1cbf081d83f032a4f1cd9f787e21fc24ce" integrity sha512-LlCgQ8zgYZPymf5H4aE9itwiIWH4YlCiv1HFLmmcBeFYi5E+3eaIFnjHzYtcFQbaKfAW+CqZ9pgxo33DZuoqPg== -elegant-spinner@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" - integrity sha1-2wQ1IcldfjA/2PNFvtwzSc+wcp4= +electron-to-chromium@^1.4.601: + version "1.4.601" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.601.tgz#cac69868548aee89961ffe63ff5a7716f0685b75" + integrity sha512-SpwUMDWe9tQu8JX5QCO1+p/hChAi9AE9UpoC3rcHVc+gdCGlbT3SGb5I1klgb952HRIyvt9wZhSz9bNBYz9swA== emittery@^0.10.2: version "0.10.2" @@ -7772,13 +7861,6 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -end-of-stream@^1.1.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - enhanced-resolve@^5.15.0: version "5.15.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" @@ -7874,7 +7956,7 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= @@ -8207,16 +8289,6 @@ event-stream@=3.3.4: stream-combiner "~0.0.4" through "~2.3.1" -event-target-shim@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" - integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== - -eventemitter3@^3.1.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7" - integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== - eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" @@ -8355,10 +8427,10 @@ extract-domain@2.2.1: resolved "https://registry.yarnpkg.com/extract-domain/-/extract-domain-2.2.1.tgz#1deeae633a5cbf05ae2fd7b3ff87cb98cbc4cb5b" integrity sha512-lOq1adCJha0tFFBci4quxC4XLa6+Rs2WgAwTo9qbO9OsElvJmGgCvOzmHo/yg5CiqeP4+sHjkXYGkrCcIEprMg== -extract-files@9.0.0, extract-files@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-9.0.0.tgz#8a7744f2437f81f5ed3250ed9f1550de902fe54a" - integrity sha512-CvdFfHkC95B4bBBk36hcEmvdR2awOdhhVUYH6S/zrVj3477zven/fJMYg7121h4T1xHZC+tetUpubpAhxwI7hQ== +extract-files@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-11.0.0.tgz#b72d428712f787eef1f5193aff8ab5351ca8469a" + integrity sha512-FuoE1qtbJ4bBVvv94CC7s0oTnKUGvQs+Rjf1L2SJFfS+HTVVjhPFtehPdQ0JiGPqVNfSSZvL5yzHHQq2Z4WNhQ== extsprintf@1.3.0: version "1.3.0" @@ -8380,15 +8452,20 @@ faker@5.5.3: resolved "https://registry.npmjs.org/faker/-/faker-5.5.3.tgz#c57974ee484431b25205c2c8dc09fda861e51e0e" integrity sha512-wLTv2a28wjUyWkbnX7u/ABZBkUkIF2fCd73V6P2oFqEGEktDfzWx4UxrSqtPRw0xPRAcjeAOIiJWqZm3pP4u3g== +fast-decode-uri-component@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz#46f8b6c22b30ff7a81357d4f59abfae938202543" + integrity sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg== + fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-glob@^3.1.1, fast-glob@^3.2.12, fast-glob@^3.2.9: - version "3.2.12" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== +fast-glob@^3.2.11: + version "3.3.0" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.0.tgz#7c40cb491e1e2ed5664749e87bfb516dbe8727c0" + integrity sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -8396,10 +8473,10 @@ fast-glob@^3.1.1, fast-glob@^3.2.12, fast-glob@^3.2.9: merge2 "^1.3.0" micromatch "^4.0.4" -fast-glob@^3.2.11: - version "3.3.0" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.0.tgz#7c40cb491e1e2ed5664749e87bfb516dbe8727c0" - integrity sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA== +fast-glob@^3.2.12, fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -8422,11 +8499,25 @@ fast-loops@^1.1.3: resolved "https://registry.yarnpkg.com/fast-loops/-/fast-loops-1.1.3.tgz#ce96adb86d07e7bf9b4822ab9c6fac9964981f75" integrity sha512-8EZzEP0eKkEEVX+drtd9mtuQ+/QrlfW/5MlwcwK5Nds6EkZ/tRzEexkzUY2mIssnAyVLT+TKHuRXmFNNXYUd6g== +fast-querystring@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/fast-querystring/-/fast-querystring-1.1.2.tgz#a6d24937b4fc6f791b4ee31dcb6f53aeafb89f53" + integrity sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg== + dependencies: + fast-decode-uri-component "^1.0.1" + fast-shallow-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fast-shallow-equal/-/fast-shallow-equal-1.0.0.tgz#d4dcaf6472440dcefa6f88b98e3251e27f25628b" integrity sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw== +fast-url-parser@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d" + integrity sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ== + dependencies: + punycode "^1.3.2" + fastest-stable-stringify@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/fastest-stable-stringify/-/fastest-stable-stringify-2.0.2.tgz#3757a6774f6ec8de40c4e86ec28ea02417214c76" @@ -8478,21 +8569,6 @@ fbjs@^3.0.0: setimmediate "^1.0.5" ua-parser-js "^0.7.30" -figures@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" - integrity sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4= - dependencies: - escape-string-regexp "^1.0.5" - object-assign "^4.1.0" - -figures@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" - integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI= - dependencies: - escape-string-regexp "^1.0.5" - figures@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" @@ -8680,19 +8756,19 @@ fork-ts-checker-webpack-plugin@^6.5.0: semver "^7.3.2" tapable "^1.0.0" -form-data@4.0.0, form-data@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" - integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" mime-types "^2.1.12" -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" @@ -8836,20 +8912,6 @@ get-package-type@^0.1.0: resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== -get-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - get-stream@^6.0.0: version "6.0.1" resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" @@ -8946,19 +9008,7 @@ globals@^13.19.0: dependencies: type-fest "^0.20.2" -globby@11.0.3: - version "11.0.3" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" - integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - -globby@^11.0.4, globby@^11.1.0: +globby@^11.0.3, globby@^11.0.4, globby@^11.1.0: version "11.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== @@ -8981,23 +9031,6 @@ globby@^13.1.1: merge2 "^1.4.1" slash "^4.0.0" -got@^9.6.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" - integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - cacheable-request "^6.0.0" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^4.1.0" - lowercase-keys "^1.0.1" - mimic-response "^1.0.1" - p-cancelable "^1.0.0" - to-readable-stream "^1.0.0" - url-parse-lax "^3.0.0" - graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" @@ -9013,31 +9046,30 @@ graphemer@^1.4.0: resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -graphql-config@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/graphql-config/-/graphql-config-3.3.0.tgz#24c3672a427cb67c0c717ca3b9d70e9f0c9e752b" - integrity sha512-mSQIsPMssr7QrgqhnjI+CyVH6oQgCrgS6irHsTvwf7RFDRnR2k9kqpQOQgVoOytBSn0DOYryS0w0SAg9xor/Jw== - dependencies: - "@endemolshinegroup/cosmiconfig-typescript-loader" "3.0.2" - "@graphql-tools/graphql-file-loader" "^6.0.0" - "@graphql-tools/json-file-loader" "^6.0.0" - "@graphql-tools/load" "^6.0.0" - "@graphql-tools/merge" "^6.0.0" - "@graphql-tools/url-loader" "^6.0.0" - "@graphql-tools/utils" "^7.0.0" - cosmiconfig "7.0.0" - cosmiconfig-toml-loader "1.0.0" - minimatch "3.0.4" - string-env-interpolation "1.0.1" - -graphql-request@^3.3.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-3.4.0.tgz#3a400cd5511eb3c064b1873afb059196bbea9c2b" - integrity sha512-acrTzidSlwAj8wBNO7Q/UQHS8T+z5qRGquCQRv9J1InwR01BBWV9ObnoE+JS5nCCEj8wSGS0yrDXVDoRiKZuOg== +graphql-config@^5.0.2: + version "5.0.3" + resolved "https://registry.yarnpkg.com/graphql-config/-/graphql-config-5.0.3.tgz#d9aa2954cf47a927f9cb83cdc4e42ae55d0b321e" + integrity sha512-BNGZaoxIBkv9yy6Y7omvsaBUHOzfFcII3UN++tpH8MGOKFPFkCPZuwx09ggANMt8FgyWP1Od8SWPmrUEZca4NQ== + dependencies: + "@graphql-tools/graphql-file-loader" "^8.0.0" + "@graphql-tools/json-file-loader" "^8.0.0" + "@graphql-tools/load" "^8.0.0" + "@graphql-tools/merge" "^9.0.0" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + cosmiconfig "^8.1.0" + jiti "^1.18.2" + minimatch "^4.2.3" + string-env-interpolation "^1.0.1" + tslib "^2.4.0" + +graphql-request@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-6.1.0.tgz#f4eb2107967af3c7a5907eb3131c671eac89be4f" + integrity sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw== dependencies: - cross-fetch "^3.0.6" - extract-files "^9.0.0" - form-data "^3.0.0" + "@graphql-typed-document-node/core" "^3.2.0" + cross-fetch "^3.1.5" graphql-tag@2.10.3: version "2.10.3" @@ -9051,10 +9083,10 @@ graphql-tag@^2.10.1, graphql-tag@^2.11.0, graphql-tag@^2.12.0: dependencies: tslib "^2.1.0" -graphql-ws@^4.4.1: - version "4.5.1" - resolved "https://registry.yarnpkg.com/graphql-ws/-/graphql-ws-4.5.1.tgz#d9dc6e047c6d4ddb928ccbfb3ca3022580a89925" - integrity sha512-GE7vCMKe2D7fc0ugkM1V8QMneHcbV9c3BpPBzdlW/Uzkqv0F/zZq9DDHxLzg55ZhE5OSLL+n/gyqAMPgH59hcw== +graphql-ws@^5.14.0: + version "5.14.2" + resolved "https://registry.yarnpkg.com/graphql-ws/-/graphql-ws-5.14.2.tgz#7db6f6138717a544d9480f0213f65f2841ed1c52" + integrity sha512-LycmCwhZ+Op2GlHz4BZDsUYHKRiiUz+3r9wbhBATMETNlORQJAaFlAgTFoeRh6xQoQegwYwIylVD1Qns9/DA3w== graphql.macro@^1.4.2: version "1.4.2" @@ -9101,13 +9133,6 @@ harmony-reflect@^1.4.6: resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= - dependencies: - ansi-regex "^2.0.0" - has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -9395,11 +9420,6 @@ htmlparser2@^6.1.0: domutils "^2.5.2" entities "^2.0.0" -http-cache-semantics@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" - integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== - http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" @@ -9440,6 +9460,14 @@ http-proxy-agent@^4.0.1: agent-base "6" debug "4" +http-proxy-agent@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-7.0.0.tgz#e9096c5afd071a3fce56e6252bb321583c124673" + integrity sha512-+ZT+iBxVUQ1asugqnD6oWoRiS25AkjNfG085dKJGtGxkdwLQrMKU5wJr2bOOFAXzKcTuqq+7fZlTMgG3SRfIYQ== + dependencies: + agent-base "^7.1.0" + debug "^4.3.4" + http-proxy-middleware@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.0.tgz#20d1ac3409199c83e5d0383ba6436b04e7acb9fe" @@ -9488,6 +9516,14 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" +https-proxy-agent@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz#e2645b846b90e96c6e6f347fb5b2e41f1590b09b" + integrity sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA== + dependencies: + agent-base "^7.0.2" + debug "4" + human-signals@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" @@ -9541,7 +9577,7 @@ ieee754@^1.1.13: resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^5.1.4, ignore@^5.2.0: +ignore@^5.2.0: version "5.2.4" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== @@ -9561,7 +9597,7 @@ immutable@~3.7.6: resolved "https://registry.yarnpkg.com/immutable/-/immutable-3.7.6.tgz#13b4d3cb12befa15482a26fe1b2ebae640071e4b" integrity sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw== -import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: +import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== @@ -9569,13 +9605,6 @@ import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-from@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/import-from/-/import-from-3.0.0.tgz#055cfec38cd5a27d8057ca51376d7d3bf0891966" - integrity sha512-CiuXOFFSzkU5x/CR0+z7T91Iht4CXgfCxVOFRhh2Zyhg5wOpWvvDLQUsWl+gcN+QscYBjez8hDCt85O7RLDttQ== - dependencies: - resolve-from "^5.0.0" - import-from@4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-4.0.0.tgz#2710b8d66817d232e16f4166e319248d3d5492e2" @@ -9594,11 +9623,6 @@ imurmurhash@^0.1.4: resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= -indent-string@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" - integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok= - indent-string@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" @@ -9627,7 +9651,7 @@ inherits@2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= -ini@^1.3.5, ini@~1.3.0: +ini@^1.3.5: version "1.3.8" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== @@ -9645,24 +9669,26 @@ inline-style-prefixer@^6.0.0: css-in-js-utils "^3.1.0" fast-loops "^1.1.3" -inquirer@^7.3.3: - version "7.3.3" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003" - integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA== +inquirer@^8.0.0: + version "8.2.6" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.6.tgz#733b74888195d8d400a67ac332011b5fae5ea562" + integrity sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg== dependencies: ansi-escapes "^4.2.1" - chalk "^4.1.0" + chalk "^4.1.1" cli-cursor "^3.1.0" cli-width "^3.0.0" external-editor "^3.0.3" figures "^3.0.0" - lodash "^4.17.19" + lodash "^4.17.21" mute-stream "0.0.8" + ora "^5.4.1" run-async "^2.4.0" - rxjs "^6.6.0" + rxjs "^7.5.5" string-width "^4.1.0" strip-ansi "^6.0.0" through "^2.3.6" + wrap-ansi "^6.0.1" internal-slot@^1.0.3: version "1.0.3" @@ -9852,18 +9878,6 @@ is-finite@~1.0.1: dependencies: number-is-nan "^1.0.0" -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= - is-fullwidth-code-point@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" @@ -9874,14 +9888,7 @@ is-generator-fn@^2.0.0: resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== -is-glob@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== - dependencies: - is-extglob "^2.1.1" - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: +is-glob@4.0.3, is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== @@ -9900,6 +9907,11 @@ is-integer@~1.0.4: dependencies: is-finite "^1.0.0" +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + is-lower-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/is-lower-case/-/is-lower-case-2.0.2.tgz#1c0884d3012c841556243483aa5d522f47396d2a" @@ -9939,13 +9951,6 @@ is-obj@^1.0.1: resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= -is-observable@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-1.1.0.tgz#b3e986c8f44de950867cab5403f5a3465005975e" - integrity sha512-NqCa4Sa2d+u7BWc6CukaObG3Fh+CU9bvixbpcXYhy2VvYS7vVGIdAgnIS5Ks3A/cqk4rebLJ9s8zBstT2aKnIA== - dependencies: - symbol-observable "^1.1.0" - is-path-inside@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -9973,16 +9978,6 @@ is-potential-custom-element-name@^1.0.1: resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== -is-promise@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-4.0.0.tgz#42ff9f84206c1991d26debf520dd5c01042dd2f3" - integrity sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ== - -is-promise@^2.1.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" - integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== - is-regex@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" @@ -10015,11 +10010,6 @@ is-shared-array-buffer@^1.0.2: dependencies: call-bind "^1.0.2" -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= - is-stream@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" @@ -10114,18 +10104,10 @@ isobject@^3.0.0, isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= -isomorphic-fetch@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4" - integrity sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA== - dependencies: - node-fetch "^2.6.1" - whatwg-fetch "^3.4.1" - -isomorphic-ws@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz#55fd4cd6c5e6491e76dc125938dd863f5cd4f2dc" - integrity sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w== +isomorphic-ws@5.0.0, isomorphic-ws@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz#e5529148912ecb9b451b46ed44d53dae1ce04bbf" + integrity sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw== isomorphic.js@^0.2.4: version "0.2.5" @@ -10179,11 +10161,6 @@ istanbul-reports@^3.1.3: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -iterall@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea" - integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg== - jake@^10.8.5: version "10.8.7" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.7.tgz#63a32821177940c33f356e0ba44ff9d34e1c7d8f" @@ -10691,6 +10668,11 @@ jest@^27.4.3: import-local "^3.0.2" jest-cli "^27.5.1" +jiti@^1.17.1: + version "1.21.0" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.0.tgz#7c97f8fe045724e136a397f7340475244156105d" + integrity sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q== + jiti@^1.18.2: version "1.18.2" resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" @@ -10707,6 +10689,11 @@ joi@^17.11.0: "@sideway/formula" "^3.0.1" "@sideway/pinpoint" "^2.0.0" +jose@^5.0.0: + version "5.1.3" + resolved "https://registry.yarnpkg.com/jose/-/jose-5.1.3.tgz#303959d85c51b5cb14725f930270b72be56abdca" + integrity sha512-GPExOkcMsCLBTi1YetY2LmkoY559fss0+0KVa6kOfb2YFe84nAM7Nm/XzuZozah4iHgmBGrCOHL5/cy670SBRw== + js-cookie@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8" @@ -10780,11 +10767,6 @@ jsesc@~0.5.0: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= - json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" @@ -10844,7 +10826,7 @@ json5@^1.0.2: dependencies: minimist "^1.2.0" -json5@^2.1.2, json5@^2.2.0, json5@^2.2.2: +json5@^2.1.2, json5@^2.2.0, json5@^2.2.2, json5@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== @@ -10875,22 +10857,6 @@ jsonpointer@^5.0.0: resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== -jsonwebtoken@^8.5.1: - version "8.5.1" - resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" - integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== - dependencies: - jws "^3.2.2" - lodash.includes "^4.3.0" - lodash.isboolean "^3.0.3" - lodash.isinteger "^4.0.4" - lodash.isnumber "^3.0.3" - lodash.isplainobject "^4.0.6" - lodash.isstring "^4.0.1" - lodash.once "^4.0.0" - ms "^2.1.1" - semver "^5.6.0" - jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" @@ -10921,30 +10887,6 @@ just-extend@^4.0.2: resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== -jwa@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" - integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== - dependencies: - buffer-equal-constant-time "1.0.1" - ecdsa-sig-formatter "1.0.11" - safe-buffer "^5.0.1" - -jws@^3.2.2: - version "3.2.2" - resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" - integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== - dependencies: - jwa "^1.4.1" - safe-buffer "^5.0.1" - -keyv@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" - integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== - dependencies: - json-buffer "3.0.0" - kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" @@ -10991,13 +10933,6 @@ language-tags@=1.0.5: dependencies: language-subtag-registry "~0.3.2" -latest-version@5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face" - integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== - dependencies: - package-json "^6.3.0" - launch-editor@^2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.6.0.tgz#4c0c1a6ac126c572bd9ff9a30da1d2cae66defd7" @@ -11082,49 +11017,19 @@ lines-and-columns@^1.1.6: resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= -listr-silent-renderer@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" - integrity sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4= - -listr-update-renderer@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz#4ea8368548a7b8aecb7e06d8c95cb45ae2ede6a2" - integrity sha512-tKRsZpKz8GSGqoI/+caPmfrypiaq+OQCbd+CovEC24uk1h952lVj5sC7SqyFUm+OaJ5HN/a1YLt5cit2FMNsFA== - dependencies: - chalk "^1.1.3" - cli-truncate "^0.2.1" - elegant-spinner "^1.0.1" - figures "^1.7.0" - indent-string "^3.0.0" - log-symbols "^1.0.2" - log-update "^2.3.0" - strip-ansi "^3.0.1" - -listr-verbose-renderer@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz#f1132167535ea4c1261102b9f28dac7cba1e03db" - integrity sha512-04PDPqSlsqIOaaaGZ+41vq5FejI9auqTInicFRndCBgE3bXG8D6W1I+mWhk+1nqbHmyhla/6BUrd5OSiHwKRXw== - dependencies: - chalk "^2.4.1" - cli-cursor "^2.1.0" - date-fns "^1.27.2" - figures "^2.0.0" - -listr@^0.14.3: - version "0.14.3" - resolved "https://registry.yarnpkg.com/listr/-/listr-0.14.3.tgz#2fea909604e434be464c50bddba0d496928fa586" - integrity sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA== - dependencies: - "@samverschueren/stream-to-observable" "^0.3.0" - is-observable "^1.1.0" - is-promise "^2.1.0" - is-stream "^1.1.0" - listr-silent-renderer "^1.1.1" - listr-update-renderer "^0.5.0" - listr-verbose-renderer "^0.5.0" - p-map "^2.0.0" - rxjs "^6.3.3" +listr2@^4.0.5: + version "4.0.5" + resolved "https://registry.yarnpkg.com/listr2/-/listr2-4.0.5.tgz#9dcc50221583e8b4c71c43f9c7dfd0ef546b75d5" + integrity sha512-juGHV1doQdpNT3GSTs9IUN43QJb7KHdF9uqg7Vufs/tG9VTzpFphqF4pm/ICdAABGQxsyNn9CiYA3StkI6jpwA== + dependencies: + cli-truncate "^2.1.0" + colorette "^2.0.16" + log-update "^4.0.0" + p-map "^4.0.0" + rfdc "^1.3.0" + rxjs "^7.5.5" + through "^2.3.8" + wrap-ansi "^7.0.0" loader-runner@^4.2.0: version "4.3.0" @@ -11212,7 +11117,7 @@ lodash.forin@^4.4.0: resolved "https://registry.npmjs.org/lodash.forin/-/lodash.forin-4.4.0.tgz#5d3f20ae564011fbe88381f7d98949c9c9519731" integrity sha1-XT8grlZAEfvog4H32YlJyclRlzE= -lodash.get@^4, lodash.get@^4.4.2: +lodash.get@^4.4.2: version "4.4.2" resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= @@ -11222,21 +11127,11 @@ lodash.has@^4.5.2: resolved "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz#d19f4dc1095058cccbe2b0cdf4ee0fe4aa37c862" integrity sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI= -lodash.includes@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" - integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8= - lodash.invokemap@^4.6.0: version "4.6.0" resolved "https://registry.npmjs.org/lodash.invokemap/-/lodash.invokemap-4.6.0.tgz#1748cda5d8b0ef8369c4eb3ec54c21feba1f2d62" integrity sha1-F0jNpdiw74NpxOs+xUwh/rofLWI= -lodash.isboolean@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" - integrity sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY= - lodash.isempty@^4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz#6f86cbedd8be4ec987be9aaf33c9684db1b31e7e" @@ -11257,21 +11152,11 @@ lodash.isinteger@^4.0.4: resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" integrity sha1-YZwK89A/iwTDH1iChAt3sRzWg0M= -lodash.isnumber@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" - integrity sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w= - lodash.isplainobject@^4.0.6: version "4.0.6" resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs= -lodash.isstring@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" - integrity sha1-1SfftUVuynzJu5XV2ur4i6VKVFE= - lodash.lowerfirst@^4.3.1: version "4.3.1" resolved "https://registry.npmjs.org/lodash.lowerfirst/-/lodash.lowerfirst-4.3.1.tgz#de3c7b12e02c6524a0059c2f6cb7c5c52655a13d" @@ -11297,11 +11182,6 @@ lodash.merge@^4.6.2: resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash.once@^4.0.0: - version "4.1.1" - resolved "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" - integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= - lodash.pick@4.4.0, lodash.pick@^4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" @@ -11332,19 +11212,12 @@ lodash.values@^4.3.0: resolved "https://registry.npmjs.org/lodash.values/-/lodash.values-4.3.0.tgz#a3a6c2b0ebecc5c2cba1c17e6e620fe81b53d347" integrity sha1-o6bCsOvsxcLLocF+bmIP6BtT00c= -lodash@4.17.21, lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.0: +lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== -log-symbols@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" - integrity sha1-N2/3tY6jCGoPCfrMdGF+ylAeGhg= - dependencies: - chalk "^1.0.0" - -log-symbols@^4.0.0: +log-symbols@^4.0.0, log-symbols@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== @@ -11352,14 +11225,15 @@ log-symbols@^4.0.0: chalk "^4.1.0" is-unicode-supported "^0.1.0" -log-update@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/log-update/-/log-update-2.3.0.tgz#88328fd7d1ce7938b29283746f0b1bc126b24708" - integrity sha1-iDKP19HOeTiykoN0bwsbwSayRwg= +log-update@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-4.0.0.tgz#589ecd352471f2a1c0c570287543a64dfd20e0a1" + integrity sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg== dependencies: - ansi-escapes "^3.0.0" - cli-cursor "^2.0.0" - wrap-ansi "^3.0.1" + ansi-escapes "^4.3.0" + cli-cursor "^3.1.0" + slice-ansi "^4.0.0" + wrap-ansi "^6.2.0" longest-streak@^2.0.0: version "2.0.4" @@ -11380,23 +11254,13 @@ lower-case-first@^2.0.2: dependencies: tslib "^2.0.3" -lower-case@^2.0.1, lower-case@^2.0.2: +lower-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== dependencies: tslib "^2.0.3" -lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - -lowercase-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" - integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== - lowlight@^1.17.0: version "1.20.0" resolved "https://registry.yarnpkg.com/lowlight/-/lowlight-1.20.0.tgz#ddb197d33462ad0d93bf19d17b6c301aa3941888" @@ -11446,7 +11310,7 @@ make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: dependencies: semver "^6.0.0" -make-error@^1, make-error@^1.1.1, make-error@^1.3.6: +make-error@^1.1.1, make-error@^1.3.6: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== @@ -11652,10 +11516,10 @@ merge2@^1.3.0, merge2@^1.4.1: resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== -meros@1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/meros/-/meros-1.1.4.tgz#c17994d3133db8b23807f62bec7f0cb276cfd948" - integrity sha512-E9ZXfK9iQfG9s73ars9qvvvbSIkJZF5yOo9j4tcwM5tN8mUKfj/EKN5PzOr3ZH0y5wL7dLAHw3RVEfpQV9Q7VQ== +meros@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/meros/-/meros-1.3.0.tgz#c617d2092739d55286bf618129280f362e6242f2" + integrity sha512-2BNGOimxEz5hmjUG2FwoxCt5HN7BXdaWyFqEwxPTrJzVdABtrL4TiHTcsWSFAxPQ/tOnEaQEJh3qWq71QRMY+w== messageformat-parser@^4.1.3: version "4.1.3" @@ -11764,21 +11628,11 @@ mime@1.6.0, mime@^1.4.1: resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== -mimic-fn@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" - integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== -mimic-response@^1.0.0, mimic-response@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" - integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== - min-document@^2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/min-document/-/min-document-2.19.0.tgz#7bd282e3f5842ed295bb748cdd9f1ffa2c824685" @@ -11811,7 +11665,7 @@ minimalistic-assert@^1.0.0: resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.0.4, minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^5.0.1: +minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^4.2.3, minimatch@^5.0.1: version "3.0.5" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.5.tgz#4da8f1290ee0f0f8e83d60ca69f8f134068604a3" integrity sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw== @@ -11875,11 +11729,6 @@ mkdirp@^0.5.1, mkdirp@~0.5.1: dependencies: minimist "^1.2.5" -mkdirp@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" - integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== - mocked-env@1.3.2: version "1.3.2" resolved "https://registry.npmjs.org/mocked-env/-/mocked-env-1.3.2.tgz#548eb2fde141d083de70dc6b231cd9f3210d8731" @@ -12049,11 +11898,6 @@ no-case@^3.0.4: lower-case "^2.0.2" tslib "^2.0.3" -node-fetch@2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" - integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== - node-fetch@2.6.7, node-fetch@^2.6.1: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -12076,6 +11920,11 @@ node-releases@^2.0.12: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== +node-releases@^2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" + integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== + normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" @@ -12093,11 +11942,6 @@ normalize-range@^0.1.2: resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= -normalize-url@^4.1.0: - version "4.5.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" - integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== - normalize-url@^6.0.1: version "6.1.0" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" @@ -12258,20 +12102,13 @@ on-headers@~1.0.2: resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== -once@^1.3.0, once@^1.3.1, once@^1.4.0: +once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" -onetime@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" - integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= - dependencies: - mimic-fn "^1.0.0" - onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" @@ -12328,6 +12165,21 @@ optionator@^0.9.1: type-check "^0.4.0" word-wrap "^1.2.3" +ora@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + orderedmap@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/orderedmap/-/orderedmap-2.1.0.tgz#819457082fa3a06abd316d83a281a1ca467437cd" @@ -12338,11 +12190,6 @@ os-tmpdir@~1.0.2: resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= -p-cancelable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" - integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== - p-limit@3.1.0, p-limit@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" @@ -12378,10 +12225,12 @@ p-locate@^5.0.0: dependencies: p-limit "^3.0.2" -p-map@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" - integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" p-retry@^4.5.0: version "4.6.2" @@ -12396,16 +12245,6 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -package-json@^6.3.0: - version "6.5.0" - resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0" - integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== - dependencies: - got "^9.6.0" - registry-auth-token "^4.0.0" - registry-url "^5.0.0" - semver "^6.2.0" - param-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" @@ -13183,11 +13022,6 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= - pretender@^3.4.3: version "3.4.3" resolved "https://registry.npmjs.org/pretender/-/pretender-3.4.3.tgz#a3b4160516007075d29127262f3a0063d19896e9" @@ -13493,19 +13327,28 @@ psl@^1.1.28, psl@^1.1.33: resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" +punycode@^1.3.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +pvtsutils@^1.3.2, pvtsutils@^1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/pvtsutils/-/pvtsutils-1.3.5.tgz#b8705b437b7b134cd7fd858f025a23456f1ce910" + integrity sha512-ARvb14YB9Nm2Xi6nBq1ZX6dAM0FsJnuk+31aUp4TrcZEdKUlSqOqsxJHUPJDNE3qiIp+iUPEIeR6Je/tgV7zsA== + dependencies: + tslib "^2.6.1" + +pvutils@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/pvutils/-/pvutils-1.1.3.tgz#f35fc1d27e7cd3dfbd39c0826d173e806a03f5a3" + integrity sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ== + q@^1.1.2: version "1.5.1" resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" @@ -13959,16 +13802,6 @@ rc-virtual-list@^3.2.0, rc-virtual-list@^3.4.8: rc-resize-observer "^1.0.0" rc-util "^5.15.0" -rc@^1.2.8: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - react-app-polyfill@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" @@ -14335,6 +14168,15 @@ readable-stream@^3.0.6: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@^3.4.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + readdirp@~3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" @@ -14440,20 +14282,6 @@ regexpu-core@^5.3.1: unicode-match-property-ecmascript "^2.0.0" unicode-match-property-value-ecmascript "^2.1.0" -registry-auth-token@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.1.tgz#6d7b4006441918972ccd5fedcd41dc322c79b250" - integrity sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw== - dependencies: - rc "^1.2.8" - -registry-url@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009" - integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== - dependencies: - rc "^1.2.8" - regjsparser@^0.9.1: version "0.9.1" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" @@ -14642,11 +14470,6 @@ repeat-string@^1.0.0, repeat-string@^1.6.1: resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= -replaceall@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/replaceall/-/replaceall-0.1.6.tgz#81d81ac7aeb72d7f5c4942adf2697a3220688d8e" - integrity sha1-gdgax663LX9cSUKt8ml6MiBojY4= - request@^2.88.2: version "2.88.2" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" @@ -14759,21 +14582,6 @@ resolve@^2.0.0-next.4: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -responselike@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= - dependencies: - lowercase-keys "^1.0.0" - -restore-cursor@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" - integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= - dependencies: - onetime "^2.0.0" - signal-exit "^3.0.2" - restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" @@ -14797,6 +14605,11 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== +rfdc@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" + integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== + rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" @@ -14873,14 +14686,7 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^6.3.3, rxjs@^6.6.0: - version "6.6.7" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" - integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== - dependencies: - tslib "^1.9.0" - -rxjs@^7.8.1: +rxjs@^7.5.5, rxjs@^7.8.1: version "7.8.1" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== @@ -15037,11 +14843,16 @@ semver@^5.6.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" @@ -15224,10 +15035,23 @@ slash@^4.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== -slice-ansi@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" - integrity sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU= +slice-ansi@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-3.0.0.tgz#31ddc10930a1b7e0b67b08c96c2f49b77a789787" + integrity sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" snake-case@^3.0.4: version "3.0.4" @@ -15332,7 +15156,7 @@ source-map-resolve@^0.6.0: atob "^2.1.2" decode-uri-component "^0.2.0" -source-map-support@^0.5.17, source-map-support@^0.5.6, source-map-support@~0.5.20: +source-map-support@^0.5.6, source-map-support@~0.5.20: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== @@ -15536,6 +15360,11 @@ stream-combiner@~0.0.4: dependencies: duplexer "~0.1.1" +streamsearch@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== + strict-uri-encode@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" @@ -15546,7 +15375,7 @@ string-convert@^0.2.0: resolved "https://registry.yarnpkg.com/string-convert/-/string-convert-0.2.1.tgz#6982cc3049fbb4cd85f8b24568b9d9bf39eeff97" integrity sha1-aYLMMEn7tM2F+LJFaLnZvznu/5c= -string-env-interpolation@1.0.1, string-env-interpolation@^1.0.1: +string-env-interpolation@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/string-env-interpolation/-/string-env-interpolation-1.0.1.tgz#ad4397ae4ac53fe6c91d1402ad6f6a52862c7152" integrity sha512-78lwMoCcn0nNu8LszbP1UA7g55OeE4v7rCeWnM5B453rnNr4aq+5it3FEYtZrSEiMvHZOZ9Jlqb0OD0M2VInqg== @@ -15572,23 +15401,6 @@ string-natural-compare@^3.0.1: resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== -string-width@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -string-width@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" - string-width@^4.1.0, string-width@^4.2.0: version "4.2.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" @@ -15598,6 +15410,15 @@ string-width@^4.1.0, string-width@^4.2.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" +string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7, string.prototype.matchall@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz#3bf85722021816dcd1bf38bb714915887ca79fd3" @@ -15662,20 +15483,6 @@ stringify-object@^3.3.0: is-obj "^1.0.1" is-regexp "^1.0.0" -strip-ansi@^3.0.0, strip-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= - dependencies: - ansi-regex "^3.0.0" - strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -15717,7 +15524,7 @@ strip-indent@^3.0.0: dependencies: min-indent "^1.0.0" -strip-json-comments@^2.0.1, strip-json-comments@~2.0.1: +strip-json-comments@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= @@ -15768,17 +15575,6 @@ stylis@4.1.3, stylis@^4.0.6: resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.1.3.tgz#fd2fbe79f5fed17c55269e16ed8da14c84d069f7" integrity sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA== -subscriptions-transport-ws@^0.9.18: - version "0.9.18" - resolved "https://registry.yarnpkg.com/subscriptions-transport-ws/-/subscriptions-transport-ws-0.9.18.tgz#bcf02320c911fbadb054f7f928e51c6041a37b97" - integrity sha512-tztzcBTNoEbuErsVQpTN2xUNN/efAZXyCyL5m3x4t6SKrEiTL2N8SaKWBFWM4u56pL79ULif3zjyeq+oV+nOaA== - dependencies: - backo2 "^1.0.2" - eventemitter3 "^3.1.0" - iterall "^1.2.1" - symbol-observable "^1.0.4" - ws "^5.2.0" - sucrase@^3.32.0: version "3.32.0" resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.32.0.tgz#c4a95e0f1e18b6847127258a75cf360bc568d4a7" @@ -15792,11 +15588,6 @@ sucrase@^3.32.0: pirates "^4.0.1" ts-interface-checker "^0.1.9" -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= - supports-color@^5.3.0, supports-color@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" @@ -15887,11 +15678,6 @@ swap-case@^2.0.2: dependencies: tslib "^2.0.3" -symbol-observable@^1.0.4, symbol-observable@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" - integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== - symbol-observable@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-2.0.3.tgz#5b521d3d07a43c351055fa43b8355b62d33fd16a" @@ -15902,14 +15688,6 @@ symbol-tree@^3.2.4: resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== -sync-fetch@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/sync-fetch/-/sync-fetch-0.3.0.tgz#77246da949389310ad978ab26790bb05f88d1335" - integrity sha512-dJp4qg+x4JwSEW1HibAuMi0IIrBI3wuQr2GimmqB7OXR50wmwzfdusG+p39R9w3R6aFtZ2mzvxvWKQ3Bd/vx3g== - dependencies: - buffer "^5.7.0" - node-fetch "^2.6.1" - tailwindcss@^3.0.2: version "3.3.2" resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.2.tgz#2f9e35d715fdf0bbf674d90147a0684d7054a2d3" @@ -16039,7 +15817,7 @@ throttle-debounce@^3.0.1: resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-3.0.1.tgz#32f94d84dfa894f786c9a1f290e7a645b6a19abb" integrity sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg== -through@2, through@^2.3.6, through@~2.3, through@~2.3.1: +through@2, through@^2.3.6, through@^2.3.8, through@~2.3, through@~2.3.1: version "2.3.8" resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= @@ -16095,11 +15873,6 @@ to-object-path@^0.3.0: dependencies: kind-of "^3.0.2" -to-readable-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" - integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== - to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" @@ -16222,18 +15995,6 @@ ts-node@^10.7.0: v8-compile-cache-lib "^3.0.1" yn "3.1.1" -ts-node@^9: - version "9.1.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-9.1.1.tgz#51a9a450a3e959401bda5f004a72d54b936d376d" - integrity sha512-hPlt7ZACERQGf03M253ytLY3dHbGNGrAq9qIHWUY9XHYl1z7wYngSr3OQ5xmui8o2AaxsONxIzjafLUiWBo1Fg== - dependencies: - arg "^4.1.0" - create-require "^1.1.0" - diff "^4.0.1" - make-error "^1.1.1" - source-map-support "^0.5.17" - yn "3.1.1" - tsconfig-paths@^3.14.1: version "3.14.2" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" @@ -16244,26 +16005,26 @@ tsconfig-paths@^3.14.1: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0: +tslib@^1.10.0, tslib@^1.8.1: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2, tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0: +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0: version "2.4.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.1.tgz#0d0bfbaac2880b91e22df0768e55be9753a5b17e" integrity sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA== +tslib@^2.3.1, tslib@^2.5.0, tslib@^2.6.1, tslib@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== + tslib@~2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.0.3.tgz#8e0741ac45fc0c226e58a17bfc3e64b9bc6ca61c" integrity sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ== -tslib@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" - integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== - tslib@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.2.0.tgz#fb2c475977e35e241311ede2693cee1ec6698f5c" @@ -16509,10 +16270,10 @@ universalify@^2.0.0: resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== -unixify@1.0.0: +unixify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unixify/-/unixify-1.0.0.tgz#3a641c8c2ffbce4da683a5c70f03a462940c2090" - integrity sha1-OmQcjC/7zk2mg6XHDwOkYpQMIJA= + integrity sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg== dependencies: normalize-path "^2.1.1" @@ -16547,6 +16308,14 @@ update-browserslist-db@^1.0.11: escalade "^3.1.1" picocolors "^1.0.0" +update-browserslist-db@^1.0.13: + version "1.0.13" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" + integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + upper-case-first@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/upper-case-first/-/upper-case-first-2.0.2.tgz#992c3273f882abd19d1e02894cc147117f844324" @@ -16573,12 +16342,15 @@ urix@^0.1.0: resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= - dependencies: - prepend-http "^2.0.0" +urlpattern-polyfill@^8.0.0: + version "8.0.2" + resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-8.0.2.tgz#99f096e35eff8bf4b5a2aa7d58a1523d6ebc7ce5" + integrity sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ== + +urlpattern-polyfill@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-9.0.0.tgz#bc7e386bb12fd7898b58d1509df21d3c29ab3460" + integrity sha512-WHN8KDQblxd32odxeIgo83rdVDE2bvdkb86it7bMhYZwWKJz0+O0RK/eZiHYnM+zgt/U7hAHOlCQGfjjvSkw2g== use-callback-ref@^1.2.5: version "1.3.0" @@ -16661,20 +16433,15 @@ v8-to-istanbul@^8.1.0: convert-source-map "^1.6.0" source-map "^0.7.3" -valid-url@1.0.9, valid-url@^1.0.9: - version "1.0.9" - resolved "https://registry.yarnpkg.com/valid-url/-/valid-url-1.0.9.tgz#1c14479b40f1397a75782f115e4086447433a200" - integrity sha1-HBRHm0DxOXp1eC8RXkCGRHQzogA= - value-equal@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== -value-or-promise@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/value-or-promise/-/value-or-promise-1.0.6.tgz#218aa4794aa2ee24dcf48a29aba4413ed584747f" - integrity sha512-9r0wQsWD8z/BxPOvnwbPf05ZvFngXyouE9EKB+5GbYix+BYnAwrIChCUyFIinfbf2FL/U71z+CPpbnmTdxrwBg== +value-or-promise@^1.0.11, value-or-promise@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/value-or-promise/-/value-or-promise-1.0.12.tgz#0e5abfeec70148c78460a849f6b003ea7986f15c" + integrity sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q== vary@~1.1.2: version "1.1.2" @@ -16770,16 +16537,39 @@ wbuf@^1.1.0, wbuf@^1.7.3: dependencies: minimalistic-assert "^1.0.0" +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== + dependencies: + defaults "^1.0.3" + web-namespaces@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-1.1.4.tgz#bc98a3de60dadd7faefc403d1076d529f5e030ec" integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== +web-streams-polyfill@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6" + integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q== + web-vitals@^0.2.4: version "0.2.4" resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-0.2.4.tgz#ec3df43c834a207fd7cdefd732b2987896e08511" integrity sha512-6BjspCO9VriYy12z356nL6JBS0GYeEcA457YyRzD+dD6XYCQ75NKhcOHUMHentOE7OcVCIXXDvOm0jKFfQG2Gg== +webcrypto-core@^1.7.7: + version "1.7.7" + resolved "https://registry.yarnpkg.com/webcrypto-core/-/webcrypto-core-1.7.7.tgz#06f24b3498463e570fed64d7cab149e5437b162c" + integrity sha512-7FjigXNsBfopEj+5DV2nhNpfic2vumtjjgPmeDKk45z+MJwXKKfhPB7118Pfzrmh4jqOMST6Ch37iPAHoImg5g== + dependencies: + "@peculiar/asn1-schema" "^2.3.6" + "@peculiar/json-schema" "^1.1.12" + asn1js "^3.0.1" + pvtsutils "^1.3.2" + tslib "^2.4.0" + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" @@ -16935,7 +16725,7 @@ whatwg-encoding@^1.0.5: dependencies: iconv-lite "0.4.24" -whatwg-fetch@^3.4.1, whatwg-fetch@^3.6.2: +whatwg-fetch@^3.6.2: version "3.6.2" resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== @@ -17180,15 +16970,7 @@ workbox-window@6.6.1: "@types/trusted-types" "^2.0.2" workbox-core "6.6.1" -wrap-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-3.0.1.tgz#288a04d87eda5c286e060dfe8f135ce8d007f8ba" - integrity sha1-KIoE2H7aXChuBg3+jxNc6NAH+Lo= - dependencies: - string-width "^2.1.1" - strip-ansi "^4.0.0" - -wrap-ansi@^6.2.0: +wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== @@ -17221,17 +17003,10 @@ write-file-atomic@^3.0.0: signal-exit "^3.0.2" typedarray-to-buffer "^3.1.5" -ws@7.4.5: - version "7.4.5" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.5.tgz#a484dd851e9beb6fdb420027e3885e8ce48986c1" - integrity sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g== - -ws@^5.2.0: - version "5.2.2" - resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.2.tgz#dffef14866b8e8dc9133582514d1befaf96e980f" - integrity sha512-jaHFD6PFv6UgoIVda6qZllptQsMlDEJkTQcybzzXDYM1XO9Y8em691FGMPmM46WGyLU4z9KMgQN+qrux/nhlHA== - dependencies: - async-limiter "~1.0.0" +ws@8.14.2, ws@^8.12.0: + version "8.14.2" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.14.2.tgz#6c249a806eb2db7a20d26d51e7709eab7b2e6c7f" + integrity sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g== ws@^7.4.6: version "7.5.9" @@ -17308,6 +17083,11 @@ yaml@^2.1.1: resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.1.tgz#02fe0975d23cd441242aa7204e09fc28ac2ac33b" integrity sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ== +yaml@^2.3.1: + version "2.3.4" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" + integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA== + yamljs@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/yamljs/-/yamljs-0.3.0.tgz#dc060bf267447b39f7304e9b2bfbe8b5a7ddb03b" @@ -17329,6 +17109,11 @@ yargs-parser@^20.2.2: resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== +yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + yargs@^15.3.1: version "15.4.1" resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" @@ -17346,7 +17131,7 @@ yargs@^15.3.1: y18n "^4.0.0" yargs-parser "^18.1.2" -yargs@^16.1.1, yargs@^16.2.0: +yargs@^16.2.0: version "16.2.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== @@ -17359,6 +17144,19 @@ yargs@^16.1.1, yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" +yargs@^17.0.0: + version "17.7.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yjs@^13.5.23: version "13.5.44" resolved "https://registry.yarnpkg.com/yjs/-/yjs-13.5.44.tgz#1c79ec7407963e07f44174cffcfde5b58a62b0da" From c0ef72886828044c40eb9db8a140e7e8afecb2d1 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Mon, 4 Dec 2023 13:21:42 -0500 Subject: [PATCH 193/792] fix(ingest/powerbi): Allow old parser to parse [db].[schema].[table] table references (#9360) --- .../ingestion/source/powerbi/config.py | 1 + .../source/powerbi/m_query/resolver.py | 28 +++++--- .../tests/unit/test_powerbi_parser.py | 65 +++++++++++++++++++ 3 files changed, 84 insertions(+), 10 deletions(-) create mode 100644 metadata-ingestion/tests/unit/test_powerbi_parser.py diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py index b8cc34c234ffa..f71afac737ca6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py @@ -314,6 +314,7 @@ class PowerBiDashboardSourceConfig( description="Configure how is ownership ingested", ) modified_since: Optional[str] = pydantic.Field( + default=None, description="Get only recently modified workspaces based on modified_since datetime '2023-02-10T00:00:00.0000000Z', excludePersonalWorkspaces and excludeInActiveWorkspaces limit to last 30 days", ) extract_dashboards: bool = pydantic.Field( diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py index e200ff41f71c2..930841f1f0df2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py @@ -617,16 +617,25 @@ def create_urn_using_old_parser( tables: List[str] = native_sql_parser.get_tables(query) - for table in tables: - schema_and_table: List[str] = table.split(".") - if len(schema_and_table) == 1: - # schema name is not present. set default schema - schema_and_table.insert(0, MSSqlDataPlatformTableCreator.DEFAULT_SCHEMA) - - qualified_table_name = ( - f"{db_name}.{schema_and_table[0]}.{schema_and_table[1]}" - ) + for parsed_table in tables: + # components: List[str] = [v.strip("[]") for v in parsed_table.split(".")] + components = [v.strip("[]") for v in parsed_table.split(".")] + if len(components) == 3: + database, schema, table = components + elif len(components) == 2: + schema, table = components + database = db_name + elif len(components) == 1: + (table,) = components + database = db_name + schema = MSSqlDataPlatformTableCreator.DEFAULT_SCHEMA + else: + logger.warning( + f"Unsupported table format found {parsed_table} in query {query}" + ) + continue + qualified_table_name = f"{database}.{schema}.{table}" urn = urn_creator( config=self.config, platform_instance_resolver=self.platform_instance_resolver, @@ -634,7 +643,6 @@ def create_urn_using_old_parser( server=server, qualified_table_name=qualified_table_name, ) - dataplatform_tables.append( DataPlatformTable( data_platform_pair=self.get_platform_pair(), diff --git a/metadata-ingestion/tests/unit/test_powerbi_parser.py b/metadata-ingestion/tests/unit/test_powerbi_parser.py new file mode 100644 index 0000000000000..e53e8d7aee16f --- /dev/null +++ b/metadata-ingestion/tests/unit/test_powerbi_parser.py @@ -0,0 +1,65 @@ +import pytest + +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.source.powerbi.config import PowerBiDashboardSourceConfig +from datahub.ingestion.source.powerbi.dataplatform_instance_resolver import ( + ResolvePlatformInstanceFromDatasetTypeMapping, +) +from datahub.ingestion.source.powerbi.m_query.resolver import ( + MSSqlDataPlatformTableCreator, +) + + +@pytest.fixture +def creator(): + config = PowerBiDashboardSourceConfig( + tenant_id="test-tenant-id", + client_id="test-client-id", + client_secret="test-client-secret", + ) + return MSSqlDataPlatformTableCreator( + ctx=PipelineContext(run_id="test-run-id"), + config=config, + platform_instance_resolver=ResolvePlatformInstanceFromDatasetTypeMapping( + config + ), + ) + + +def test_parse_three_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM [dwhdbt].[dbo2].[my_table] where oper_day_date > getdate() - 5", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,dwhdbt.dbo2.my_table,PROD)" + ) + + +def test_parse_two_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM my_schema.my_table", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,default_db.my_schema.my_table,PROD)" + ) + + +def test_parse_one_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM my_table", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,default_db.dbo.my_table,PROD)" + ) From 4ec3208918791b517a6d18c41905ee2dbe189a12 Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 14:31:58 -0500 Subject: [PATCH 194/792] feat(ingest): support stdin in `datahub put` (#9359) --- .../src/datahub/cli/ingest_cli.py | 4 +++ metadata-ingestion/src/datahub/cli/put_cli.py | 27 ++++++++++--------- .../src/datahub/cli/specific/file_loader.py | 1 + .../datahub/configuration/config_loader.py | 22 ++++++++++----- .../src/datahub/configuration/json_loader.py | 11 ++++++++ .../source/metadata/business_glossary.py | 2 +- .../ingestion/source/metadata/lineage.py | 2 +- .../tests/unit/config/test_config_loader.py | 9 +++++-- 8 files changed, 55 insertions(+), 23 deletions(-) create mode 100644 metadata-ingestion/src/datahub/configuration/json_loader.py diff --git a/metadata-ingestion/src/datahub/cli/ingest_cli.py b/metadata-ingestion/src/datahub/cli/ingest_cli.py index dd0287004a368..b7827ec9f050b 100644 --- a/metadata-ingestion/src/datahub/cli/ingest_cli.py +++ b/metadata-ingestion/src/datahub/cli/ingest_cli.py @@ -147,6 +147,9 @@ async def run_pipeline_to_completion(pipeline: Pipeline) -> int: squirrel_original_config=True, squirrel_field="__raw_config", allow_stdin=True, + allow_remote=True, + process_directives=True, + resolve_env_vars=True, ) raw_pipeline_config = pipeline_config.pop("__raw_config") @@ -268,6 +271,7 @@ def deploy( pipeline_config = load_config_file( config, allow_stdin=True, + allow_remote=True, resolve_env_vars=False, ) diff --git a/metadata-ingestion/src/datahub/cli/put_cli.py b/metadata-ingestion/src/datahub/cli/put_cli.py index 6a1d82388dc2a..324d7f94db258 100644 --- a/metadata-ingestion/src/datahub/cli/put_cli.py +++ b/metadata-ingestion/src/datahub/cli/put_cli.py @@ -1,11 +1,11 @@ -import json import logging -from typing import Any, Optional +from typing import Optional import click from click_default_group import DefaultGroup from datahub.cli.cli_utils import post_entity +from datahub.configuration.config_loader import load_config_file from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.graph.client import get_default_graph from datahub.metadata.schema_classes import ( @@ -36,22 +36,23 @@ def put() -> None: @click.option("--urn", required=True, type=str) @click.option("-a", "--aspect", required=True, type=str) @click.option("-d", "--aspect-data", required=True, type=str) -@click.pass_context @upgrade.check_upgrade @telemetry.with_telemetry() -def aspect(ctx: Any, urn: str, aspect: str, aspect_data: str) -> None: +def aspect(urn: str, aspect: str, aspect_data: str) -> None: """Update a single aspect of an entity""" entity_type = guess_entity_type(urn) - with open(aspect_data) as fp: - aspect_obj = json.load(fp) - status = post_entity( - urn=urn, - aspect_name=aspect, - entity_type=entity_type, - aspect_value=aspect_obj, - ) - click.secho(f"Update succeeded with status {status}", fg="green") + aspect_obj = load_config_file( + aspect_data, allow_stdin=True, resolve_env_vars=False, process_directives=False + ) + + status = post_entity( + urn=urn, + aspect_name=aspect, + entity_type=entity_type, + aspect_value=aspect_obj, + ) + click.secho(f"Update succeeded with status {status}", fg="green") @put.command() diff --git a/metadata-ingestion/src/datahub/cli/specific/file_loader.py b/metadata-ingestion/src/datahub/cli/specific/file_loader.py index a9787343fdb91..cad32eb0a22a1 100644 --- a/metadata-ingestion/src/datahub/cli/specific/file_loader.py +++ b/metadata-ingestion/src/datahub/cli/specific/file_loader.py @@ -21,5 +21,6 @@ def load_file(config_file: Path) -> Union[dict, list]: squirrel_original_config=False, resolve_env_vars=False, allow_stdin=False, + process_directives=False, ) return res diff --git a/metadata-ingestion/src/datahub/configuration/config_loader.py b/metadata-ingestion/src/datahub/configuration/config_loader.py index 30ca4ff6aed2d..2f41af6f7286e 100644 --- a/metadata-ingestion/src/datahub/configuration/config_loader.py +++ b/metadata-ingestion/src/datahub/configuration/config_loader.py @@ -11,6 +11,7 @@ from expandvars import UnboundVariable, expandvars from datahub.configuration.common import ConfigurationError, ConfigurationMechanism +from datahub.configuration.json_loader import JsonConfigurationMechanism from datahub.configuration.toml import TomlConfigurationMechanism from datahub.configuration.yaml import YamlConfigurationMechanism @@ -100,33 +101,42 @@ def load_config_file( squirrel_original_config: bool = False, squirrel_field: str = "__orig_config", allow_stdin: bool = False, - resolve_env_vars: bool = True, - process_directives: bool = True, + allow_remote: bool = True, # TODO: Change the default to False. + resolve_env_vars: bool = True, # TODO: Change the default to False. + process_directives: bool = False, ) -> dict: config_mech: ConfigurationMechanism if allow_stdin and config_file == "-": # If we're reading from stdin, we assume that the input is a YAML file. + # Note that YAML is a superset of JSON, so this will also read JSON files. config_mech = YamlConfigurationMechanism() raw_config_file = sys.stdin.read() else: config_file_path = pathlib.Path(config_file) if config_file_path.suffix in {".yaml", ".yml"}: config_mech = YamlConfigurationMechanism() + elif config_file_path.suffix == ".json": + config_mech = JsonConfigurationMechanism() elif config_file_path.suffix == ".toml": config_mech = TomlConfigurationMechanism() else: raise ConfigurationError( - f"Only .toml and .yml are supported. Cannot process file type {config_file_path.suffix}" + f"Only .toml, .yml, and .json are supported. Cannot process file type {config_file_path.suffix}" ) + url_parsed = parse.urlparse(str(config_file)) - if url_parsed.scheme in ("http", "https"): # URLs will return http/https + if allow_remote and url_parsed.scheme in ( + "http", + "https", + ): # URLs will return http/https + # If the URL is remote, we need to fetch it. try: response = requests.get(str(config_file)) raw_config_file = response.text except Exception as e: raise ConfigurationError( - f"Cannot read remote file {config_file_path}, error:{e}" - ) + f"Cannot read remote file {config_file_path}: {e}" + ) from e else: if not config_file_path.is_file(): raise ConfigurationError(f"Cannot open config file {config_file_path}") diff --git a/metadata-ingestion/src/datahub/configuration/json_loader.py b/metadata-ingestion/src/datahub/configuration/json_loader.py new file mode 100644 index 0000000000000..35667eb5951fc --- /dev/null +++ b/metadata-ingestion/src/datahub/configuration/json_loader.py @@ -0,0 +1,11 @@ +import json +from typing import IO + +from datahub.configuration import ConfigurationMechanism + + +class JsonConfigurationMechanism(ConfigurationMechanism): + """Ability to load configuration from json files""" + + def load_config(self, config_fp: IO) -> dict: + return json.load(config_fp) diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index 97877df63707f..6baa70aa581d6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -495,7 +495,7 @@ def create(cls, config_dict, ctx): def load_glossary_config( cls, file_name: Union[str, pathlib.Path] ) -> BusinessGlossaryConfig: - config = load_config_file(file_name) + config = load_config_file(file_name, resolve_env_vars=True) glossary_cfg = BusinessGlossaryConfig.parse_obj(config) return glossary_cfg diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py index f33c6e0edae3d..659444fe610e0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py @@ -147,7 +147,7 @@ def create( @staticmethod def load_lineage_config(file_name: str) -> LineageConfig: - config = load_config_file(file_name) + config = load_config_file(file_name, resolve_env_vars=True) lineage_config = LineageConfig.parse_obj(config) return lineage_config diff --git a/metadata-ingestion/tests/unit/config/test_config_loader.py b/metadata-ingestion/tests/unit/config/test_config_loader.py index 3253c96b876aa..f9a4076e18363 100644 --- a/metadata-ingestion/tests/unit/config/test_config_loader.py +++ b/metadata-ingestion/tests/unit/config/test_config_loader.py @@ -134,7 +134,7 @@ def test_load_success(pytestconfig, filename, golden_config, env, referenced_env assert list_referenced_env_variables(raw_config) == referenced_env_vars with mock.patch.dict(os.environ, env): - loaded_config = load_config_file(filepath) + loaded_config = load_config_file(filepath, resolve_env_vars=True) assert loaded_config == golden_config # TODO check referenced env vars @@ -183,7 +183,12 @@ def test_write_file_directive(pytestconfig): fake_ssl_key = "my-secret-key-value" with mock.patch.dict(os.environ, {"DATAHUB_SSL_KEY": fake_ssl_key}): - loaded_config = load_config_file(filepath, squirrel_original_config=False) + loaded_config = load_config_file( + filepath, + squirrel_original_config=False, + resolve_env_vars=True, + process_directives=True, + ) # Check that the rest of the dict is unmodified. diff = deepdiff.DeepDiff( From 7517c77ffdbafc193dc7529881fc42ebe3f2ab2a Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 20:00:11 -0500 Subject: [PATCH 195/792] fix(ingest): resolve issue with caplog and asyncio (#9377) --- .../src/datahub/ingestion/source/looker/lookml_source.py | 2 +- .../tests/unit/api/source_helpers/test_source_helpers.py | 9 +++++++-- .../tests/unit/utilities/test_perf_timer.py | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 4e91d17feaa9f..93c405f0a39f2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -550,7 +550,7 @@ def resolve_includes( @dataclass class LookerViewFile: absolute_file_path: str - connection: Optional[str] + connection: Optional[LookerConnectionDefinition] includes: List[str] resolved_includes: List[ProjectInclude] views: List[Dict] diff --git a/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py b/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py index b667af8bb41e9..26e8639bed6e7 100644 --- a/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py +++ b/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py @@ -3,6 +3,7 @@ from typing import Any, Dict, Iterable, List, Union from unittest.mock import patch +import pytest from freezegun import freeze_time import datahub.metadata.schema_classes as models @@ -482,7 +483,7 @@ def test_auto_browse_path_v2_dry_run(telemetry_ping_mock): @freeze_time("2023-01-02 00:00:00") -def test_auto_empty_dataset_usage_statistics(caplog): +def test_auto_empty_dataset_usage_statistics(caplog: pytest.LogCaptureFixture) -> None: has_urn = make_dataset_urn("my_platform", "has_aspect") empty_urn = make_dataset_urn("my_platform", "no_aspect") config = BaseTimeWindowConfig() @@ -499,6 +500,7 @@ def test_auto_empty_dataset_usage_statistics(caplog): ), ).as_workunit() ] + caplog.clear() with caplog.at_level(logging.WARNING): new_wus = list( auto_empty_dataset_usage_statistics( @@ -530,7 +532,9 @@ def test_auto_empty_dataset_usage_statistics(caplog): @freeze_time("2023-01-02 00:00:00") -def test_auto_empty_dataset_usage_statistics_invalid_timestamp(caplog): +def test_auto_empty_dataset_usage_statistics_invalid_timestamp( + caplog: pytest.LogCaptureFixture, +) -> None: urn = make_dataset_urn("my_platform", "my_dataset") config = BaseTimeWindowConfig() wus = [ @@ -546,6 +550,7 @@ def test_auto_empty_dataset_usage_statistics_invalid_timestamp(caplog): ), ).as_workunit() ] + caplog.clear() with caplog.at_level(logging.WARNING): new_wus = list( auto_empty_dataset_usage_statistics( diff --git a/metadata-ingestion/tests/unit/utilities/test_perf_timer.py b/metadata-ingestion/tests/unit/utilities/test_perf_timer.py index d5fde314c2b57..6129b3e37d8bc 100644 --- a/metadata-ingestion/tests/unit/utilities/test_perf_timer.py +++ b/metadata-ingestion/tests/unit/utilities/test_perf_timer.py @@ -5,7 +5,7 @@ from datahub.utilities.perf_timer import PerfTimer -approx = partial(pytest.approx, rel=1e-2) +approx = partial(pytest.approx, rel=2e-2) def test_perf_timer_simple(): From 0d9aa2641014f36611e0d740dcd0df563df0984d Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 20:00:57 -0500 Subject: [PATCH 196/792] fix(ingest/airflow): compat with pluggy 1.0 (#9365) --- docs/lineage/airflow.md | 14 ++++++++ .../datahub_airflow_plugin/_airflow_shims.py | 5 +++ .../_datahub_listener_module.py | 35 ++++++++++++++++--- .../datahub_airflow_plugin/datahub_plugin.py | 6 ++-- .../airflow-plugin/tox.ini | 6 +++- 5 files changed, 57 insertions(+), 9 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 8fd38f560bfbb..da3a36bc87be5 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -246,6 +246,20 @@ If your URLs aren't being generated correctly (usually they'll start with `http: base_url = http://airflow.mycorp.example.com ``` +### TypeError ... missing 3 required positional arguments + +If you see errors like the following with the v2 plugin: + +```shell +ERROR - on_task_instance_success() missing 3 required positional arguments: 'previous_state', 'task_instance', and 'session' +Traceback (most recent call last): + File "/home/airflow/.local/lib/python3.8/site-packages/datahub_airflow_plugin/datahub_listener.py", line 124, in wrapper + f(*args, **kwargs) +TypeError: on_task_instance_success() missing 3 required positional arguments: 'previous_state', 'task_instance', and 'session' +``` + +The solution is to upgrade `acryl-datahub-airflow-plugin>=0.12.0.4` or upgrade `pluggy>=1.2.0`. See this [PR](https://github.com/datahub-project/datahub/pull/9365) for details. + ## Compatibility We no longer officially support Airflow <2.1. However, you can use older versions of `acryl-datahub-airflow-plugin` with older versions of Airflow. diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py index 10f014fbd586f..d384958cf3ddb 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py @@ -2,6 +2,7 @@ import airflow.version import packaging.version +import pluggy from airflow.models.baseoperator import BaseOperator from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED @@ -27,9 +28,13 @@ # Approach suggested by https://stackoverflow.com/a/11887885/5004662. AIRFLOW_VERSION = packaging.version.parse(airflow.version.version) +PLUGGY_VERSION = packaging.version.parse(pluggy.__version__) HAS_AIRFLOW_STANDALONE_CMD = AIRFLOW_VERSION >= packaging.version.parse("2.2.0.dev0") HAS_AIRFLOW_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.3.0.dev0") HAS_AIRFLOW_DAG_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.5.0.dev0") +NEEDS_AIRFLOW_LISTENER_MODULE = AIRFLOW_VERSION < packaging.version.parse( + "2.5.0.dev0" +) or PLUGGY_VERSION <= packaging.version.parse("1.0.0") def get_task_inlets(operator: "Operator") -> List: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py index f39d37b122228..e16563400e397 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py @@ -1,7 +1,34 @@ -from datahub_airflow_plugin.datahub_listener import get_airflow_plugin_listener +from datahub_airflow_plugin.datahub_listener import ( + get_airflow_plugin_listener, + hookimpl, +) _listener = get_airflow_plugin_listener() if _listener: - on_task_instance_running = _listener.on_task_instance_running - on_task_instance_success = _listener.on_task_instance_success - on_task_instance_failed = _listener.on_task_instance_failed + # The run_in_thread decorator messes with pluggy's interface discovery, + # which causes the hooks to be called with no arguments and results in TypeErrors. + # This is only an issue with Pluggy <= 1.0.0. + # See https://github.com/pytest-dev/pluggy/issues/358 + # Note that pluggy 1.0.0 is in the constraints file for Airflow 2.4 and 2.5. + + @hookimpl + def on_task_instance_running(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_running(previous_state, task_instance, session) + + @hookimpl + def on_task_instance_success(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_success(previous_state, task_instance, session) + + @hookimpl + def on_task_instance_failed(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_failed(previous_state, task_instance, session) + + if hasattr(_listener, "on_dag_run_running"): + + @hookimpl + def on_dag_run_running(dag_run, session): + assert _listener + _listener.on_dag_run_running(dag_run, session) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py index c96fab31647f5..2b0b751bd787b 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py @@ -6,8 +6,8 @@ from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED from datahub_airflow_plugin._airflow_shims import ( - HAS_AIRFLOW_DAG_LISTENER_API, HAS_AIRFLOW_LISTENER_API, + NEEDS_AIRFLOW_LISTENER_MODULE, ) assert AIRFLOW_PATCHED @@ -50,7 +50,7 @@ class DatahubPlugin(AirflowPlugin): name = "datahub_plugin" if _USE_AIRFLOW_LISTENER_INTERFACE: - if HAS_AIRFLOW_DAG_LISTENER_API: + if not NEEDS_AIRFLOW_LISTENER_MODULE: from datahub_airflow_plugin.datahub_listener import ( # type: ignore[misc] get_airflow_plugin_listener, ) @@ -60,8 +60,6 @@ class DatahubPlugin(AirflowPlugin): else: # On Airflow < 2.5, we need the listener to be a module. # This is just a quick shim layer to make that work. - # The DAG listener API was added at the same time as this method - # was fixed, so we're reusing the same check variable. # # Related Airflow change: https://github.com/apache/airflow/pull/27113. import datahub_airflow_plugin._datahub_listener_module as _listener_module # type: ignore[misc] diff --git a/metadata-ingestion-modules/airflow-plugin/tox.ini b/metadata-ingestion-modules/airflow-plugin/tox.ini index 2f05854940d10..1010bd2933e45 100644 --- a/metadata-ingestion-modules/airflow-plugin/tox.ini +++ b/metadata-ingestion-modules/airflow-plugin/tox.ini @@ -14,7 +14,11 @@ deps = # Airflow version airflow21: apache-airflow~=2.1.0 airflow22: apache-airflow~=2.2.0 - airflow24: apache-airflow~=2.4.0 + # On Airflow 2.4 and 2.5, Airflow's constraints file pins pluggy to 1.0.0, + # which has caused issues for us before. As such, we now pin it explicitly + # to prevent regressions. + # See https://github.com/datahub-project/datahub/pull/9365 + airflow24: apache-airflow~=2.4.0,pluggy==1.0.0 airflow26: apache-airflow~=2.6.0 airflow27: apache-airflow~=2.7.0 commands = From d123b6174743f080a0eb8264b224569eaf952550 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth <treff7es@gmail.com> Date: Tue, 5 Dec 2023 17:16:35 +0100 Subject: [PATCH 197/792] feat(ingest/athena): Enable Athena view ingestion and view lineage (#9354) --- .../datahub/ingestion/source/sql/athena.py | 25 +++++++++++++++++-- .../ingestion/source/sql/sql_common.py | 2 ++ .../src/datahub/utilities/sqlglot_lineage.py | 2 ++ 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py index ac0e2bd4bb8a9..c3759875b2769 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py @@ -8,7 +8,8 @@ from pyathena.common import BaseCursor from pyathena.model import AthenaTableMetadata from pyathena.sqlalchemy_athena import AthenaRestDialect -from sqlalchemy import create_engine, inspect, types +from sqlalchemy import create_engine, exc, inspect, text, types +from sqlalchemy.engine import reflection from sqlalchemy.engine.reflection import Inspector from sqlalchemy.types import TypeEngine from sqlalchemy_bigquery import STRUCT @@ -64,6 +65,22 @@ class CustomAthenaRestDialect(AthenaRestDialect): # regex to identify complex types in DDL strings which are embedded in `<>`. _complex_type_pattern = re.compile(r"(<.+>)") + @typing.no_type_check + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + # This method was backported from PyAthena v3.0.7 to allow to retrieve the view definition + # from Athena. This is required until we support sqlalchemy > 2.0 + # https://github.com/laughingman7743/PyAthena/blob/509dd37d0fd15ad603993482cc47b8549b82facd/pyathena/sqlalchemy/base.py#L1118 + raw_connection = self._raw_connection(connection) + schema = schema if schema else raw_connection.schema_name # type: ignore + query = f"""SHOW CREATE VIEW "{schema}"."{view_name}";""" + try: + res = connection.scalars(text(query)) + except exc.OperationalError as e: + raise exc.NoSuchTableError(f"{schema}.{view_name}") from e + else: + return "\n".join([r for r in res]) + @typing.no_type_check def _get_column_type( self, type_: Union[str, Dict[str, Any]] @@ -236,7 +253,7 @@ class AthenaConfig(SQLCommonConfig): # overwrite default behavior of SQLAlchemyConfing include_views: Optional[bool] = pydantic.Field( - default=False, description="Whether views should be ingested." + default=True, description="Whether views should be ingested." ) _s3_staging_dir_population = pydantic_renamed_field( @@ -303,6 +320,10 @@ def get_inspectors(self) -> Iterable[Inspector]: inspector = inspect(conn) yield inspector + def get_db_schema(self, dataset_identifier: str) -> Tuple[Optional[str], str]: + schema, _view = dataset_identifier.split(".", 1) + return None, schema + def get_table_properties( self, inspector: Inspector, schema: str, table: str ) -> Tuple[Optional[str], Dict[str, str], Optional[str]]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 67af6b2010c83..590bc7f696784 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -371,6 +371,8 @@ def get_db_name(self, inspector: Inspector) -> str: engine = inspector.engine if engine and hasattr(engine, "url") and hasattr(engine.url, "database"): + if engine.url.database is None: + return "" return str(engine.url.database).strip('"').lower() else: raise Exception("Unable to get database name from Sqlalchemy inspector") diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index c2cccf9f1e389..fc3efef2ba532 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -962,6 +962,8 @@ def _get_dialect(platform: str) -> str: return "hive" if platform == "mssql": return "tsql" + if platform == "athena": + return "trino" else: return platform From 3ee82590cd2ab7da08b5ad8b19b1e4dd988023d9 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth <treff7es@gmail.com> Date: Tue, 5 Dec 2023 17:42:29 +0100 Subject: [PATCH 198/792] fix(ingest/redshift): Identify materialized views properly + fix connection args support (#9368) --- .../docs/sources/redshift/redshift_recipe.yml | 4 +- metadata-ingestion/setup.py | 8 +- .../ingestion/source/redshift/config.py | 23 ++++- .../ingestion/source/redshift/query.py | 18 +++- .../ingestion/source/redshift/redshift.py | 6 +- .../source/redshift/redshift_schema.py | 98 +++++++++++++------ 6 files changed, 109 insertions(+), 48 deletions(-) diff --git a/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml b/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml index be704e6759d41..a561405d3de47 100644 --- a/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml +++ b/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml @@ -40,8 +40,8 @@ source: options: connect_args: - sslmode: "prefer" # or "require" or "verify-ca" - sslrootcert: ~ # needed to unpin the AWS Redshift certificate + # check all available options here: https://pypi.org/project/redshift-connector/ + ssl_insecure: "false" # Specifies if IDP hosts server certificate will be verified sink: # sink configs diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 4f5f09fb148fa..416b255fb763f 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -181,8 +181,8 @@ redshift_common = { # Clickhouse 0.8.3 adds support for SQLAlchemy 1.4.x "sqlalchemy-redshift>=0.8.3", - "psycopg2-binary", "GeoAlchemy2", + "redshift-connector", *sqllineage_lib, *path_spec_common, } @@ -365,11 +365,7 @@ | {"psycopg2-binary", "pymysql>=1.0.2"}, "pulsar": {"requests"}, "redash": {"redash-toolbelt", "sql-metadata"} | sqllineage_lib, - "redshift": sql_common - | redshift_common - | usage_common - | {"redshift-connector"} - | sqlglot_lib, + "redshift": sql_common | redshift_common | usage_common | sqlglot_lib, "s3": {*s3_base, *data_lake_profiling}, "gcs": {*s3_base, *data_lake_profiling}, "sagemaker": aws_common, diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 95038ef2c6212..51ad8a050adc2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -82,7 +82,7 @@ class RedshiftConfig( # large Redshift warehouses. As an example, see this query for the columns: # https://github.com/sqlalchemy-redshift/sqlalchemy-redshift/blob/60b4db04c1d26071c291aeea52f1dcb5dd8b0eb0/sqlalchemy_redshift/dialect.py#L745. scheme: str = Field( - default="redshift+psycopg2", + default="redshift+redshift_connector", description="", hidden_from_schema=True, ) @@ -170,3 +170,24 @@ def backward_compatibility_configs_set(cls, values: Dict) -> Dict: "The config option `match_fully_qualified_names` will be deprecated in future and the default behavior will assume `match_fully_qualified_names: True`." ) return values + + @root_validator(skip_on_failure=True) + def connection_config_compatibility_set(cls, values: Dict) -> Dict: + if ( + ("options" in values and "connect_args" in values["options"]) + and "extra_client_options" in values + and len(values["extra_client_options"]) > 0 + ): + raise ValueError( + "Cannot set both `connect_args` and `extra_client_options` in the config. Please use `extra_client_options` only." + ) + + if "options" in values and "connect_args" in values["options"]: + values["extra_client_options"] = values["options"]["connect_args"] + + if values["extra_client_options"]: + if values["options"]: + values["options"]["connect_args"] = values["extra_client_options"] + else: + values["options"] = {"connect_args": values["extra_client_options"]} + return values diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py index a96171caf9835..92e36fffd6bb4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py @@ -179,14 +179,18 @@ class RedshiftQuery: additional_table_metadata: str = """ select - database, - schema, + ti.database, + ti.schema, "table", size, tbl_rows, estimated_visible_rows, skew_rows, - last_accessed + last_accessed, + case + when smi.name is not null then 1 + else 0 + end as is_materialized from pg_catalog.svv_table_info as ti left join ( @@ -198,8 +202,12 @@ class RedshiftQuery: group by tbl) as la on (la.tbl = ti.table_id) - ; - """ + left join stv_mv_info smi on + smi.db_name = ti.database + and smi.schema = ti.schema + and smi.name = ti.table + ; +""" @staticmethod def stl_scan_based_lineage_query( diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py index 04f0edf504595..eb635b1292b81 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py @@ -6,7 +6,6 @@ import humanfriendly # These imports verify that the dependencies are available. -import psycopg2 # noqa: F401 import pydantic import redshift_connector @@ -352,7 +351,6 @@ def create(cls, config_dict, ctx): def get_redshift_connection( config: RedshiftConfig, ) -> redshift_connector.Connection: - client_options = config.extra_client_options host, port = config.host_port.split(":") conn = redshift_connector.connect( host=host, @@ -360,7 +358,7 @@ def get_redshift_connection( user=config.username, database=config.database, password=config.password.get_secret_value() if config.password else None, - **client_options, + **config.extra_client_options, ) conn.autocommit = True @@ -641,7 +639,7 @@ def gen_view_dataset_workunits( dataset_urn = self.gen_dataset_urn(datahub_dataset_name) if view.ddl: view_properties_aspect = ViewProperties( - materialized=view.type == "VIEW_MATERIALIZED", + materialized=view.materialized, viewLanguage="SQL", viewLogic=view.ddl, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py index 4a13d17d2cc0f..ca81682ae00e4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py @@ -40,6 +40,7 @@ class RedshiftTable(BaseTable): @dataclass class RedshiftView(BaseTable): type: Optional[str] = None + materialized: bool = False columns: List[RedshiftColumn] = field(default_factory=list) last_altered: Optional[datetime] = None size_in_bytes: Optional[int] = None @@ -66,6 +67,7 @@ class RedshiftExtraTableMeta: estimated_visible_rows: Optional[int] = None skew_rows: Optional[float] = None last_accessed: Optional[datetime] = None + is_materialized: bool = False @dataclass @@ -148,6 +150,7 @@ def enrich_tables( ], skew_rows=meta[field_names.index("skew_rows")], last_accessed=meta[field_names.index("last_accessed")], + is_materialized=meta[field_names.index("is_materialized")], ) if table_meta.schema not in table_enrich: table_enrich.setdefault(table_meta.schema, {}) @@ -173,42 +176,23 @@ def get_tables_and_views( logger.info(f"Fetched {len(db_tables)} tables/views from Redshift") for table in db_tables: schema = table[field_names.index("schema")] + table_name = table[field_names.index("relname")] + if table[field_names.index("tabletype")] not in [ "MATERIALIZED VIEW", "VIEW", ]: if schema not in tables: tables.setdefault(schema, []) - table_name = table[field_names.index("relname")] - - creation_time: Optional[datetime] = None - if table[field_names.index("creation_time")]: - creation_time = table[field_names.index("creation_time")].replace( - tzinfo=timezone.utc - ) - last_altered: Optional[datetime] = None - size_in_bytes: Optional[int] = None - rows_count: Optional[int] = None - if schema in enriched_table and table_name in enriched_table[schema]: - if enriched_table[schema][table_name].last_accessed: - # Mypy seems to be not clever enough to understand the above check - last_accessed = enriched_table[schema][table_name].last_accessed - assert last_accessed - last_altered = last_accessed.replace(tzinfo=timezone.utc) - elif creation_time: - last_altered = creation_time - - if enriched_table[schema][table_name].size: - # Mypy seems to be not clever enough to understand the above check - size = enriched_table[schema][table_name].size - if size: - size_in_bytes = size * 1024 * 1024 - - if enriched_table[schema][table_name].estimated_visible_rows: - rows = enriched_table[schema][table_name].estimated_visible_rows - assert rows - rows_count = int(rows) + ( + creation_time, + last_altered, + rows_count, + size_in_bytes, + ) = RedshiftDataDictionary.get_table_stats( + enriched_table, field_names, schema, table + ) tables[schema].append( RedshiftTable( @@ -231,16 +215,37 @@ def get_tables_and_views( else: if schema not in views: views[schema] = [] + ( + creation_time, + last_altered, + rows_count, + size_in_bytes, + ) = RedshiftDataDictionary.get_table_stats( + enriched_table=enriched_table, + field_names=field_names, + schema=schema, + table=table, + ) + + materialized = False + if schema in enriched_table and table_name in enriched_table[schema]: + if enriched_table[schema][table_name].is_materialized: + materialized = True views[schema].append( RedshiftView( type=table[field_names.index("tabletype")], name=table[field_names.index("relname")], ddl=table[field_names.index("view_definition")], - created=table[field_names.index("creation_time")], + created=creation_time, comment=table[field_names.index("table_description")], + last_altered=last_altered, + size_in_bytes=size_in_bytes, + rows_count=rows_count, + materialized=materialized, ) ) + for schema_key, schema_tables in tables.items(): logger.info( f"In schema: {schema_key} discovered {len(schema_tables)} tables" @@ -250,6 +255,39 @@ def get_tables_and_views( return tables, views + @staticmethod + def get_table_stats(enriched_table, field_names, schema, table): + table_name = table[field_names.index("relname")] + + creation_time: Optional[datetime] = None + if table[field_names.index("creation_time")]: + creation_time = table[field_names.index("creation_time")].replace( + tzinfo=timezone.utc + ) + last_altered: Optional[datetime] = None + size_in_bytes: Optional[int] = None + rows_count: Optional[int] = None + if schema in enriched_table and table_name in enriched_table[schema]: + if enriched_table[schema][table_name].last_accessed: + # Mypy seems to be not clever enough to understand the above check + last_accessed = enriched_table[schema][table_name].last_accessed + assert last_accessed + last_altered = last_accessed.replace(tzinfo=timezone.utc) + elif creation_time: + last_altered = creation_time + + if enriched_table[schema][table_name].size: + # Mypy seems to be not clever enough to understand the above check + size = enriched_table[schema][table_name].size + if size: + size_in_bytes = size * 1024 * 1024 + + if enriched_table[schema][table_name].estimated_visible_rows: + rows = enriched_table[schema][table_name].estimated_visible_rows + assert rows + rows_count = int(rows) + return creation_time, last_altered, rows_count, size_in_bytes + @staticmethod def get_schema_fields_for_column( column: RedshiftColumn, From 806f09ae23b1a569006be9eaf8d13165e67742b3 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Tue, 5 Dec 2023 12:33:00 -0500 Subject: [PATCH 199/792] test(ingest/unity): Unity catalog data generation (#8949) --- metadata-ingestion/setup.py | 1 + .../performance/bigquery/bigquery_events.py | 10 +- .../tests/performance/data_generation.py | 153 ++++++++++----- .../tests/performance/data_model.py | 54 +++++- .../tests/performance/databricks/generator.py | 177 ++++++++++++++++++ .../databricks/unity_proxy_mock.py | 73 ++++---- .../tests/unit/test_bigquery_source.py | 2 +- .../tests/unit/test_bigquery_usage.py | 14 +- 8 files changed, 383 insertions(+), 101 deletions(-) create mode 100644 metadata-ingestion/tests/performance/databricks/generator.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 416b255fb763f..69cbe8d823450 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -262,6 +262,7 @@ "databricks-sdk>=0.9.0", "pyspark~=3.3.0", "requests", + "databricks-sql-connector", } mysql = sql_common | {"pymysql>=1.0.2"} diff --git a/metadata-ingestion/tests/performance/bigquery/bigquery_events.py b/metadata-ingestion/tests/performance/bigquery/bigquery_events.py index d9b5571a8015f..0e0bfe78c260f 100644 --- a/metadata-ingestion/tests/performance/bigquery/bigquery_events.py +++ b/metadata-ingestion/tests/performance/bigquery/bigquery_events.py @@ -2,7 +2,7 @@ import random import uuid from collections import defaultdict -from typing import Dict, Iterable, List, cast +from typing import Dict, Iterable, List, Set from typing_extensions import get_args @@ -15,7 +15,7 @@ ) from datahub.ingestion.source.bigquery_v2.bigquery_config import BigQueryV2Config from datahub.ingestion.source.bigquery_v2.usage import OPERATION_STATEMENT_TYPES -from tests.performance.data_model import Query, StatementType, Table, View +from tests.performance.data_model import Query, StatementType, Table # https://cloud.google.com/bigquery/docs/reference/auditlogs/rest/Shared.Types/BigQueryAuditMetadata.TableDataRead.Reason READ_REASONS = [ @@ -86,7 +86,7 @@ def generate_events( ref_from_table(parent, table_to_project) for field in query.fields_accessed if field.table.is_view() - for parent in cast(View, field.table).parents + for parent in field.table.upstreams ) ), referencedViews=referencedViews, @@ -96,7 +96,7 @@ def generate_events( query_on_view=True if referencedViews else False, ) ) - table_accesses = defaultdict(set) + table_accesses: Dict[BigQueryTableRef, Set[str]] = defaultdict(set) for field in query.fields_accessed: if not field.table.is_view(): table_accesses[ref_from_table(field.table, table_to_project)].add( @@ -104,7 +104,7 @@ def generate_events( ) else: # assuming that same fields are accessed in parent tables - for parent in cast(View, field.table).parents: + for parent in field.table.upstreams: table_accesses[ref_from_table(parent, table_to_project)].add( field.column ) diff --git a/metadata-ingestion/tests/performance/data_generation.py b/metadata-ingestion/tests/performance/data_generation.py index 67b156896909a..9b80d6260d408 100644 --- a/metadata-ingestion/tests/performance/data_generation.py +++ b/metadata-ingestion/tests/performance/data_generation.py @@ -8,16 +8,16 @@ This is a work in progress, built piecemeal as needed. """ import random -import uuid +from abc import ABCMeta, abstractmethod +from collections import OrderedDict from dataclasses import dataclass from datetime import datetime, timedelta, timezone -from typing import Iterable, List, TypeVar, Union, cast +from typing import Collection, Iterable, List, Optional, TypeVar, Union, cast from faker import Faker from tests.performance.data_model import ( Column, - ColumnMapping, ColumnType, Container, FieldAccess, @@ -40,17 +40,46 @@ "UNKNOWN", ] +ID_COLUMN = "id" # Use to allow joins between all tables + + +class Distribution(metaclass=ABCMeta): + @abstractmethod + def _sample(self) -> int: + raise NotImplementedError + + def sample( + self, *, floor: Optional[int] = None, ceiling: Optional[int] = None + ) -> int: + value = self._sample() + if floor is not None: + value = max(value, floor) + if ceiling is not None: + value = min(value, ceiling) + return value + @dataclass(frozen=True) -class NormalDistribution: +class NormalDistribution(Distribution): mu: float sigma: float - def sample(self) -> int: + def _sample(self) -> int: return int(random.gauss(mu=self.mu, sigma=self.sigma)) - def sample_with_floor(self, floor: int = 1) -> int: - return max(int(random.gauss(mu=self.mu, sigma=self.sigma)), floor) + +@dataclass(frozen=True) +class LomaxDistribution(Distribution): + """See https://en.wikipedia.org/wiki/Lomax_distribution. + + Equivalent to pareto(scale, shape) - scale; scale * beta_prime(1, shape) + """ + + scale: float + shape: float + + def _sample(self) -> int: + return int(self.scale * (random.paretovariate(self.shape) - 1)) @dataclass @@ -72,9 +101,9 @@ def generate_data( num_containers: Union[List[int], int], num_tables: int, num_views: int, - columns_per_table: NormalDistribution = NormalDistribution(5, 2), - parents_per_view: NormalDistribution = NormalDistribution(2, 1), - view_definition_length: NormalDistribution = NormalDistribution(150, 50), + columns_per_table: Distribution = NormalDistribution(5, 2), + parents_per_view: Distribution = NormalDistribution(2, 1), + view_definition_length: Distribution = NormalDistribution(150, 50), time_range: timedelta = timedelta(days=14), ) -> SeedMetadata: # Assemble containers @@ -85,43 +114,32 @@ def generate_data( for i, num_in_layer in enumerate(num_containers): layer = [ Container( - f"{i}-container-{j}", + f"{_container_type(i)}_{j}", parent=random.choice(containers[-1]) if containers else None, ) for j in range(num_in_layer) ] containers.append(layer) - # Assemble tables + # Assemble tables and views, lineage, and definitions tables = [ - Table( - f"table-{i}", - container=random.choice(containers[-1]), - columns=[ - f"column-{j}-{uuid.uuid4()}" - for j in range(columns_per_table.sample_with_floor()) - ], - column_mapping=None, - ) - for i in range(num_tables) + _generate_table(i, containers[-1], columns_per_table) for i in range(num_tables) ] views = [ View( - f"view-{i}", - container=random.choice(containers[-1]), - columns=[ - f"column-{j}-{uuid.uuid4()}" - for j in range(columns_per_table.sample_with_floor()) - ], - column_mapping=None, - definition=f"{uuid.uuid4()}-{'*' * view_definition_length.sample_with_floor(10)}", - parents=random.sample(tables, parents_per_view.sample_with_floor()), + **{ # type: ignore + **_generate_table(i, containers[-1], columns_per_table).__dict__, + "name": f"view_{i}", + "definition": f"--{'*' * view_definition_length.sample(floor=0)}", + }, ) for i in range(num_views) ] - for table in tables + views: - _generate_column_mapping(table) + for view in views: + view.upstreams = random.sample(tables, k=parents_per_view.sample(floor=1)) + + generate_lineage(tables, views) now = datetime.now(tz=timezone.utc) return SeedMetadata( @@ -133,6 +151,33 @@ def generate_data( ) +def generate_lineage( + tables: Collection[Table], + views: Collection[Table], + # Percentiles: 75th=0, 80th=1, 95th=2, 99th=4, 99.99th=15 + upstream_distribution: Distribution = LomaxDistribution(scale=3, shape=5), +) -> None: + num_upstreams = [upstream_distribution.sample(ceiling=100) for _ in tables] + # Prioritize tables with a lot of upstreams themselves + factor = 1 + len(tables) // 10 + table_weights = [1 + (num_upstreams[i] * factor) for i in range(len(tables))] + view_weights = [1] * len(views) + + # TODO: Python 3.9 use random.sample with counts + sample = [] + for table, weight in zip(tables, table_weights): + for _ in range(weight): + sample.append(table) + for view, weight in zip(views, view_weights): + for _ in range(weight): + sample.append(view) + for i, table in enumerate(tables): + table.upstreams = random.sample( # type: ignore + sample, + k=num_upstreams[i], + ) + + def generate_queries( seed_metadata: SeedMetadata, num_selects: int, @@ -146,12 +191,12 @@ def generate_queries( ) -> Iterable[Query]: faker = Faker() query_texts = [ - faker.paragraph(query_length.sample_with_floor(30) // 30) + faker.paragraph(query_length.sample(floor=30) // 30) for _ in range(num_unique_queries) ] all_tables = seed_metadata.tables + seed_metadata.views - users = [f"user-{i}@xyz.com" for i in range(num_users)] + users = [f"user_{i}@xyz.com" for i in range(num_users)] for i in range(num_selects): # Pure SELECT statements tables = _sample_list(all_tables, tables_per_select) all_columns = [ @@ -191,21 +236,43 @@ def generate_queries( ) -def _generate_column_mapping(table: Table) -> ColumnMapping: - d = {} - for column in table.columns: - d[column] = Column( - name=column, +def _container_type(i: int) -> str: + if i == 0: + return "database" + elif i == 1: + return "schema" + else: + return f"{i}container" + + +def _generate_table( + i: int, parents: List[Container], columns_per_table: Distribution +) -> Table: + num_columns = columns_per_table.sample(floor=1) + + columns = OrderedDict({ID_COLUMN: Column(ID_COLUMN, ColumnType.INTEGER, False)}) + for j in range(num_columns): + name = f"column_{j}" + columns[name] = Column( + name=name, type=random.choice(list(ColumnType)), nullable=random.random() < 0.1, # Fixed 10% chance for now ) - table.column_mapping = d - return d + return Table( + f"table_{i}", + container=random.choice(parents), + columns=columns, + upstreams=[], + ) def _sample_list(lst: List[T], dist: NormalDistribution, floor: int = 1) -> List[T]: - return random.sample(lst, min(dist.sample_with_floor(floor), len(lst))) + return random.sample(lst, min(dist.sample(floor=floor), len(lst))) def _random_time_between(start: datetime, end: datetime) -> datetime: return start + timedelta(seconds=(end - start).total_seconds() * random.random()) + + +if __name__ == "__main__": + z = generate_data(10, 1000, 10) diff --git a/metadata-ingestion/tests/performance/data_model.py b/metadata-ingestion/tests/performance/data_model.py index 9425fa827070e..728bb6ddde215 100644 --- a/metadata-ingestion/tests/performance/data_model.py +++ b/metadata-ingestion/tests/performance/data_model.py @@ -1,7 +1,9 @@ -from dataclasses import dataclass +import typing +from collections import OrderedDict +from dataclasses import dataclass, field from datetime import datetime from enum import Enum -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Union from typing_extensions import Literal @@ -37,29 +39,63 @@ class ColumnType(str, Enum): @dataclass class Column: name: str - type: ColumnType - nullable: bool + type: ColumnType = ColumnType.STRING + nullable: bool = False ColumnRef = str ColumnMapping = Dict[ColumnRef, Column] -@dataclass +@dataclass(init=False) class Table: name: str container: Container - columns: List[ColumnRef] - column_mapping: Optional[ColumnMapping] + columns: typing.OrderedDict[ColumnRef, Column] = field(repr=False) + upstreams: List["Table"] = field(repr=False) + + def __init__( + self, + name: str, + container: Container, + columns: Union[List[str], Dict[str, Column]], + upstreams: List["Table"], + ): + self.name = name + self.container = container + self.upstreams = upstreams + if isinstance(columns, list): + self.columns = OrderedDict((col, Column(col)) for col in columns) + elif isinstance(columns, dict): + self.columns = OrderedDict(columns) + + @property + def name_components(self) -> List[str]: + lst = [self.name] + container: Optional[Container] = self.container + while container: + lst.append(container.name) + container = container.parent + return lst[::-1] def is_view(self) -> bool: return False -@dataclass +@dataclass(init=False) class View(Table): definition: str - parents: List[Table] + + def __init__( + self, + name: str, + container: Container, + columns: Union[List[str], Dict[str, Column]], + upstreams: List["Table"], + definition: str, + ): + super().__init__(name, container, columns, upstreams) + self.definition = definition def is_view(self) -> bool: return True diff --git a/metadata-ingestion/tests/performance/databricks/generator.py b/metadata-ingestion/tests/performance/databricks/generator.py new file mode 100644 index 0000000000000..29df325d856a1 --- /dev/null +++ b/metadata-ingestion/tests/performance/databricks/generator.py @@ -0,0 +1,177 @@ +import logging +import random +import string +from concurrent.futures import ThreadPoolExecutor, wait +from datetime import datetime +from typing import Callable, List, TypeVar, Union +from urllib.parse import urlparse + +from databricks.sdk import WorkspaceClient +from databricks.sdk.core import DatabricksError +from databricks.sdk.service.catalog import ColumnTypeName +from performance.data_generation import Distribution, LomaxDistribution, SeedMetadata +from performance.data_model import ColumnType, Container, Table, View +from performance.databricks.unity_proxy_mock import _convert_column_type +from sqlalchemy import create_engine + +from datahub.ingestion.source.sql.sql_config import make_sqlalchemy_uri + +logger = logging.getLogger(__name__) +T = TypeVar("T") + +MAX_WORKERS = 200 + + +class DatabricksDataGenerator: + def __init__(self, host: str, token: str, warehouse_id: str): + self.client = WorkspaceClient(host=host, token=token) + self.warehouse_id = warehouse_id + url = make_sqlalchemy_uri( + scheme="databricks", + username="token", + password=token, + at=urlparse(host).netloc, + db=None, + uri_opts={"http_path": f"/sql/1.0/warehouses/{warehouse_id}"}, + ) + engine = create_engine( + url, connect_args={"timeout": 600}, pool_size=MAX_WORKERS + ) + self.connection = engine.connect() + + def clear_data(self, seed_metadata: SeedMetadata) -> None: + for container in seed_metadata.containers[0]: + try: + self.client.catalogs.delete(container.name, force=True) + except DatabricksError: + pass + + def create_data( + self, + seed_metadata: SeedMetadata, + # Percentiles: 1st=0, 10th=7, 25th=21, 50th=58, 75th=152, 90th=364, 99th=2063, 99.99th=46316 + num_rows_distribution: Distribution = LomaxDistribution(scale=100, shape=1.5), + ) -> None: + """Create data in Databricks based on SeedMetadata.""" + for container in seed_metadata.containers[0]: + self._create_catalog(container) + for container in seed_metadata.containers[1]: + self._create_schema(container) + + _thread_pool_execute("create tables", seed_metadata.tables, self._create_table) + _thread_pool_execute("create views", seed_metadata.views, self._create_view) + _thread_pool_execute( + "populate tables", + seed_metadata.tables, + lambda t: self._populate_table( + t, num_rows_distribution.sample(ceiling=1_000_000) + ), + ) + _thread_pool_execute( + "create table lineage", seed_metadata.tables, self._create_table_lineage + ) + + def _create_catalog(self, catalog: Container) -> None: + try: + self.client.catalogs.get(catalog.name) + except DatabricksError: + self.client.catalogs.create(catalog.name) + + def _create_schema(self, schema: Container) -> None: + try: + self.client.schemas.get(f"{schema.parent.name}.{schema.name}") + except DatabricksError: + self.client.schemas.create(schema.name, schema.parent.name) + + def _create_table(self, table: Table) -> None: + try: + self.client.tables.delete(".".join(table.name_components)) + except DatabricksError: + pass + + columns = ", ".join( + f"{name} {_convert_column_type(column.type).value}" + for name, column in table.columns.items() + ) + self._execute_sql(f"CREATE TABLE {_quote_table(table)} ({columns})") + self._assert_table_exists(table) + + def _create_view(self, view: View) -> None: + self._execute_sql(_generate_view_definition(view)) + self._assert_table_exists(view) + + def _assert_table_exists(self, table: Table) -> None: + self.client.tables.get(".".join(table.name_components)) + + def _populate_table(self, table: Table, num_rows: int) -> None: + values = [ + ", ".join( + str(_generate_value(column.type)) for column in table.columns.values() + ) + for _ in range(num_rows) + ] + values_str = ", ".join(f"({value})" for value in values) + self._execute_sql(f"INSERT INTO {_quote_table(table)} VALUES {values_str}") + + def _create_table_lineage(self, table: Table) -> None: + for upstream in table.upstreams: + self._execute_sql(_generate_insert_lineage(table, upstream)) + + def _execute_sql(self, sql: str) -> None: + print(sql) + self.connection.execute(sql) + + +def _thread_pool_execute(desc: str, lst: List[T], fn: Callable[[T], None]) -> None: + with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: + futures = [executor.submit(fn, item) for item in lst] + wait(futures) + for future in futures: + try: + future.result() + except Exception as e: + logger.error(f"Error executing '{desc}': {e}", exc_info=True) + + +def _generate_value(t: ColumnType) -> Union[int, float, str, bool]: + ctn = _convert_column_type(t) + if ctn == ColumnTypeName.INT: + return random.randint(-(2**31), 2**31 - 1) + elif ctn == ColumnTypeName.DOUBLE: + return random.uniform(-(2**31), 2**31 - 1) + elif ctn == ColumnTypeName.STRING: + return ( + "'" + "".join(random.choice(string.ascii_letters) for _ in range(8)) + "'" + ) + elif ctn == ColumnTypeName.BOOLEAN: + return random.choice([True, False]) + elif ctn == ColumnTypeName.TIMESTAMP: + return random.randint(0, int(datetime.now().timestamp())) + else: + raise NotImplementedError(f"Unsupported type {ctn}") + + +def _generate_insert_lineage(table: Table, upstream: Table) -> str: + select = [] + for column in table.columns.values(): + matching_cols = [c for c in upstream.columns.values() if c.type == column.type] + if matching_cols: + upstream_col = random.choice(matching_cols) + select.append(f"{upstream_col.name} AS {column.name}") + else: + select.append(f"{_generate_value(column.type)} AS {column.name}") + + return f"INSERT INTO {_quote_table(table)} SELECT {', '.join(select)} FROM {_quote_table(upstream)}" + + +def _generate_view_definition(view: View) -> str: + from_statement = f"FROM {_quote_table(view.upstreams[0])} t0" + join_statement = " ".join( + f"JOIN {_quote_table(upstream)} t{i+1} ON t0.id = t{i+1}.id" + for i, upstream in enumerate(view.upstreams[1:]) + ) + return f"CREATE VIEW {_quote_table(view)} AS SELECT * {from_statement} {join_statement} {view.definition}" + + +def _quote_table(table: Table) -> str: + return ".".join(f"`{component}`" for component in table.name_components) diff --git a/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py b/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py index 593163e12bf0a..ee1caf6783ec1 100644 --- a/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py +++ b/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py @@ -88,22 +88,21 @@ def schemas(self, catalog: Catalog) -> Iterable[Schema]: def tables(self, schema: Schema) -> Iterable[Table]: for table in self._schema_to_table[schema.name]: columns = [] - if table.column_mapping: - for i, col_name in enumerate(table.columns): - column = table.column_mapping[col_name] - columns.append( - Column( - id=column.name, - name=column.name, - type_name=self._convert_column_type(column.type), - type_text=column.type.value, - nullable=column.nullable, - position=i, - comment=None, - type_precision=0, - type_scale=0, - ) + for i, col_name in enumerate(table.columns): + column = table.columns[col_name] + columns.append( + Column( + id=column.name, + name=column.name, + type_name=_convert_column_type(column.type), + type_text=column.type.value, + nullable=column.nullable, + position=i, + comment=None, + type_precision=0, + type_scale=0, ) + ) yield Table( id=f"{schema.id}.{table.name}", @@ -145,7 +144,7 @@ def query_history( yield Query( query_id=str(i), query_text=query.text, - statement_type=self._convert_statement_type(query.type), + statement_type=_convert_statement_type(query.type), start_time=query.timestamp, end_time=query.timestamp, user_id=hash(query.actor), @@ -160,24 +159,24 @@ def table_lineage(self, table: Table) -> None: def get_column_lineage(self, table: Table) -> None: pass - @staticmethod - def _convert_column_type(t: ColumnType) -> ColumnTypeName: - if t == ColumnType.INTEGER: - return ColumnTypeName.INT - elif t == ColumnType.FLOAT: - return ColumnTypeName.DOUBLE - elif t == ColumnType.STRING: - return ColumnTypeName.STRING - elif t == ColumnType.BOOLEAN: - return ColumnTypeName.BOOLEAN - elif t == ColumnType.DATETIME: - return ColumnTypeName.TIMESTAMP - else: - raise ValueError(f"Unknown column type: {t}") - - @staticmethod - def _convert_statement_type(t: StatementType) -> QueryStatementType: - if t == "CUSTOM" or t == "UNKNOWN": - return QueryStatementType.OTHER - else: - return QueryStatementType[t] + +def _convert_column_type(t: ColumnType) -> ColumnTypeName: + if t == ColumnType.INTEGER: + return ColumnTypeName.INT + elif t == ColumnType.FLOAT: + return ColumnTypeName.DOUBLE + elif t == ColumnType.STRING: + return ColumnTypeName.STRING + elif t == ColumnType.BOOLEAN: + return ColumnTypeName.BOOLEAN + elif t == ColumnType.DATETIME: + return ColumnTypeName.TIMESTAMP + else: + raise ValueError(f"Unknown column type: {t}") + + +def _convert_statement_type(t: StatementType) -> QueryStatementType: + if t == "CUSTOM" or t == "UNKNOWN": + return QueryStatementType.OTHER + else: + return QueryStatementType[t] diff --git a/metadata-ingestion/tests/unit/test_bigquery_source.py b/metadata-ingestion/tests/unit/test_bigquery_source.py index 4cfa5c48d2377..3cdb73d77d0a1 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_source.py +++ b/metadata-ingestion/tests/unit/test_bigquery_source.py @@ -324,7 +324,7 @@ def test_get_projects_list_failure( {"project_id_pattern": {"deny": ["^test-project$"]}} ) source = BigqueryV2Source(config=config, ctx=PipelineContext(run_id="test")) - caplog.records.clear() + caplog.clear() with caplog.at_level(logging.ERROR): projects = source._get_projects() assert len(caplog.records) == 1 diff --git a/metadata-ingestion/tests/unit/test_bigquery_usage.py b/metadata-ingestion/tests/unit/test_bigquery_usage.py index 1eb5d8b00e27c..c0055763bc15b 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_usage.py +++ b/metadata-ingestion/tests/unit/test_bigquery_usage.py @@ -1,7 +1,7 @@ import logging import random from datetime import datetime, timedelta, timezone -from typing import Iterable, cast +from typing import Iterable from unittest.mock import MagicMock, patch import pytest @@ -45,15 +45,16 @@ ACTOR_2, ACTOR_2_URN = "b@acryl.io", "urn:li:corpuser:b" DATABASE_1 = Container("database_1") DATABASE_2 = Container("database_2") -TABLE_1 = Table("table_1", DATABASE_1, ["id", "name", "age"], None) -TABLE_2 = Table("table_2", DATABASE_1, ["id", "table_1_id", "value"], None) +TABLE_1 = Table("table_1", DATABASE_1, columns=["id", "name", "age"], upstreams=[]) +TABLE_2 = Table( + "table_2", DATABASE_1, columns=["id", "table_1_id", "value"], upstreams=[] +) VIEW_1 = View( name="view_1", container=DATABASE_1, columns=["id", "name", "total"], definition="VIEW DEFINITION 1", - parents=[TABLE_1, TABLE_2], - column_mapping=None, + upstreams=[TABLE_1, TABLE_2], ) ALL_TABLES = [TABLE_1, TABLE_2, VIEW_1] @@ -842,6 +843,7 @@ def test_usage_counts_no_columns( ) ), ] + caplog.clear() with caplog.at_level(logging.WARNING): workunits = usage_extractor._get_workunits_internal( events, [TABLE_REFS[TABLE_1.name]] @@ -938,7 +940,7 @@ def test_operational_stats( ).to_urn("PROD") for field in query.fields_accessed if field.table.is_view() - for parent in cast(View, field.table).parents + for parent in field.table.upstreams ) ), ), From c66619ccc7be509e37e804588023c51984b4fb33 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 5 Dec 2023 14:03:24 -0600 Subject: [PATCH 200/792] fix(elasticsearch): set datahub usage events shard & replica count (#9388) --- docker/elasticsearch-setup/create-indices.sh | 7 ++++++- .../resources/index/usage-event/aws_es_index_template.json | 4 +++- .../main/resources/index/usage-event/index_template.json | 4 +++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/docker/elasticsearch-setup/create-indices.sh b/docker/elasticsearch-setup/create-indices.sh index 5c4eb3ce3851e..81cf405bf4b3d 100755 --- a/docker/elasticsearch-setup/create-indices.sh +++ b/docker/elasticsearch-setup/create-indices.sh @@ -5,6 +5,8 @@ set -e : ${DATAHUB_ANALYTICS_ENABLED:=true} : ${USE_AWS_ELASTICSEARCH:=false} : ${ELASTICSEARCH_INSECURE:=false} +: ${DUE_SHARDS:=1} +: ${DUE_REPLICAS:=1} # protocol: http or https? if [[ $ELASTICSEARCH_USE_SSL == true ]]; then @@ -74,7 +76,10 @@ function create_if_not_exists { # use the file at given path as definition, but first replace all occurences of `PREFIX` # placeholder within the file with the actual prefix value TMP_SOURCE_PATH="/tmp/$RESOURCE_DEFINITION_NAME" - sed -e "s/PREFIX/$PREFIX/g" "$INDEX_DEFINITIONS_ROOT/$RESOURCE_DEFINITION_NAME" | tee -a "$TMP_SOURCE_PATH" + sed -e "s/PREFIX/$PREFIX/g" "$INDEX_DEFINITIONS_ROOT/$RESOURCE_DEFINITION_NAME" \ + | sed -e "s/DUE_SHARDS/$DUE_SHARDS/g" \ + | sed -e "s/DUE_REPLICAS/$DUE_REPLICAS/g" \ + | tee -a "$TMP_SOURCE_PATH" curl "${CURL_ARGS[@]}" -XPUT "$ELASTICSEARCH_URL/$RESOURCE_ADDRESS" -H 'Content-Type: application/json' --data "@$TMP_SOURCE_PATH" elif [ $RESOURCE_STATUS -eq 403 ]; then diff --git a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json index 21e98e4e96b5f..16d1e14720b2d 100644 --- a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json +++ b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json @@ -20,6 +20,8 @@ } }, "settings": { - "index.opendistro.index_state_management.rollover_alias": "PREFIXdatahub_usage_event" + "index.opendistro.index_state_management.rollover_alias": "PREFIXdatahub_usage_event", + "index.number_of_shards": DUE_SHARDS, + "index.number_of_replicas": DUE_REPLICAS } } \ No newline at end of file diff --git a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json index 44f6e644713eb..e3c6a8c37e573 100644 --- a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json +++ b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json @@ -23,7 +23,9 @@ } }, "settings": { - "index.lifecycle.name": "PREFIXdatahub_usage_event_policy" + "index.lifecycle.name": "PREFIXdatahub_usage_event_policy", + "index.number_of_shards": DUE_SHARDS, + "index.number_of_replicas": DUE_REPLICAS } } } \ No newline at end of file From 7fb60869f2a9757d6729d52a44f5c0390af86381 Mon Sep 17 00:00:00 2001 From: siladitya <68184387+siladitya2@users.noreply.github.com> Date: Wed, 6 Dec 2023 03:28:47 +0100 Subject: [PATCH 201/792] feat(gms/search): Adding support for DOUBLE Searchable type (#9369) Co-authored-by: si-chakraborty <si.chakraborty@adevinta.com> --- .../metadata/models/annotation/SearchableAnnotation.java | 3 ++- .../com/linkedin/metadata/models/EntitySpecBuilderTest.java | 6 +++++- .../search/elasticsearch/indexbuilder/MappingsBuilder.java | 2 ++ .../metadata/search/indexbuilder/MappingsBuilderTest.java | 6 +++++- .../src/main/pegasus/com/datahub/test/TestEntityInfo.pdl | 6 ++++++ 5 files changed, 20 insertions(+), 3 deletions(-) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java index d5e5044f95c23..efa30a948e237 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java @@ -66,7 +66,8 @@ public enum FieldType { DATETIME, OBJECT, BROWSE_PATH_V2, - WORD_GRAM + WORD_GRAM, + DOUBLE } @Nonnull diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index 3618108970afa..b95cb1085283f 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -142,7 +142,7 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { assertEquals(new TestEntityInfo().schema().getFullName(), testEntityInfo.getPegasusSchema().getFullName()); // Assert on Searchable Fields - assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 10); + assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 11); assertEquals("customProperties", testEntityInfo.getSearchableFieldSpecMap().get( new PathSpec("customProperties").toString()).getSearchableAnnotation().getFieldName()); assertEquals(SearchableAnnotation.FieldType.KEYWORD, testEntityInfo.getSearchableFieldSpecMap().get( @@ -189,6 +189,10 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { new PathSpec("foreignKey").toString()).getSearchableAnnotation().getFieldName()); assertEquals(true, testEntityInfo.getSearchableFieldSpecMap().get( new PathSpec("foreignKey").toString()).getSearchableAnnotation().isQueryByDefault()); + assertEquals("doubleField", testEntityInfo.getSearchableFieldSpecMap().get( + new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldName()); + assertEquals(SearchableAnnotation.FieldType.DOUBLE, testEntityInfo.getSearchableFieldSpecMap().get( + new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldType()); // Assert on Relationship Fields diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index 35cef71edd953..13a0f57ccea99 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -134,6 +134,8 @@ private static Map<String, Object> getMappingsForField(@Nonnull final Searchable mappingForField.put(TYPE, ESUtils.DATE_FIELD_TYPE); } else if (fieldType == FieldType.OBJECT) { mappingForField.put(TYPE, ESUtils.OBJECT_FIELD_TYPE); + } else if (fieldType == FieldType.DOUBLE) { + mappingForField.put(TYPE, ESUtils.DOUBLE_FIELD_TYPE); } else { log.info("FieldType {} has no mappings implemented", fieldType); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index 0d2ce236d9f54..d9f2f0e5aac94 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -18,7 +18,7 @@ public void testMappingsBuilder() { Map<String, Object> result = MappingsBuilder.getMappings(TestEntitySpecBuilder.getSpec()); assertEquals(result.size(), 1); Map<String, Object> properties = (Map<String, Object>) result.get("properties"); - assertEquals(properties.size(), 19); + assertEquals(properties.size(), 20); assertEquals(properties.get("urn"), ImmutableMap.of("type", "keyword", "fields", ImmutableMap.of("delimited", @@ -123,5 +123,9 @@ public void testMappingsBuilder() { assertEquals(feature1.get("type"), "double"); Map<String, Object> feature2 = (Map<String, Object>) properties.get("feature2"); assertEquals(feature2.get("type"), "double"); + + // DOUBLE + Map<String, Object> doubleField = (Map<String, Object>) properties.get("doubleField"); + assertEquals(doubleField.get("type"), "double"); } } diff --git a/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl b/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl index 6dff14133ee60..db293140ad650 100644 --- a/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl +++ b/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl @@ -90,4 +90,10 @@ record TestEntityInfo includes CustomProperties { } } esObjectField: optional map[string, string] + + @Searchable = { + "fieldName": "doubleField", + "fieldType": "DOUBLE" + } + doubleField: optional double } From e14474176f20e38b2c4c883949c561223181b57c Mon Sep 17 00:00:00 2001 From: Aseem Bansal <asmbansal2@gmail.com> Date: Wed, 6 Dec 2023 11:02:42 +0530 Subject: [PATCH 202/792] feat(lint): add spotless for java lint (#9373) --- build.gradle | 39 +- datahub-frontend/app/auth/AuthModule.java | 366 +- datahub-frontend/app/auth/AuthUtils.java | 247 +- datahub-frontend/app/auth/Authenticator.java | 57 +- datahub-frontend/app/auth/ConfigUtil.java | 10 +- datahub-frontend/app/auth/CookieConfigs.java | 19 +- datahub-frontend/app/auth/JAASConfigs.java | 19 +- .../app/auth/NativeAuthenticationConfigs.java | 16 +- .../app/auth/cookie/CustomCookiesModule.java | 5 +- datahub-frontend/app/auth/sso/SsoConfigs.java | 34 +- datahub-frontend/app/auth/sso/SsoManager.java | 15 +- .../app/auth/sso/SsoProvider.java | 22 +- .../sso/oidc/OidcAuthorizationGenerator.java | 69 +- .../app/auth/sso/oidc/OidcCallbackLogic.java | 273 +- .../app/auth/sso/oidc/OidcConfigs.java | 194 +- .../app/auth/sso/oidc/OidcProvider.java | 33 +- .../sso/oidc/OidcResponseErrorHandler.java | 77 +- .../oidc/custom/CustomOidcAuthenticator.java | 76 +- .../app/client/AuthServiceClient.java | 152 +- .../app/client/KafkaTrackingProducer.java | 228 +- .../app/config/ConfigurationProvider.java | 18 +- .../app/controllers/Application.java | 179 +- .../controllers/AuthenticationController.java | 590 +-- .../controllers/CentralLogoutController.java | 32 +- .../controllers/SsoCallbackController.java | 99 +- .../app/controllers/TrackingController.java | 82 +- datahub-frontend/app/security/AuthUtil.java | 37 +- .../app/security/AuthenticationManager.java | 16 +- .../app/security/DummyLoginModule.java | 18 +- datahub-frontend/app/utils/ConfigUtil.java | 20 +- datahub-frontend/app/utils/SearchUtil.java | 39 +- datahub-frontend/build.gradle | 2 - .../test/app/ApplicationTest.java | 68 +- .../test/security/DummyLoginModuleTest.java | 8 +- .../test/security/OidcConfigurationTest.java | 567 ++- .../test/utils/SearchUtilTest.java | 23 +- .../linkedin/datahub/graphql/Constants.java | 42 +- .../datahub/graphql/GmsGraphQLEngine.java | 3369 ++++++++++------- .../datahub/graphql/GmsGraphQLEngineArgs.java | 72 +- .../datahub/graphql/GmsGraphQLPlugin.java | 25 +- .../datahub/graphql/GraphQLEngine.java | 241 +- .../datahub/graphql/QueryContext.java | 43 +- .../datahub/graphql/RelationshipKey.java | 1 - .../datahub/graphql/TimeSeriesAspectArgs.java | 6 +- .../datahub/graphql/UsageStatsKey.java | 1 - .../datahub/graphql/VersionedAspectKey.java | 2 +- .../graphql/WeaklyTypedAspectsResolver.java | 104 +- .../resolver/AnalyticsChartTypeResolver.java | 23 +- .../analytics/resolver/GetChartsResolver.java | 265 +- .../resolver/GetHighlightsResolver.java | 114 +- .../GetMetadataAnalyticsResolver.java | 93 +- .../resolver/IsAnalyticsEnabledResolver.java | 9 +- .../analytics/service/AnalyticsService.java | 179 +- .../analytics/service/AnalyticsUtil.java | 182 +- .../authorization/AuthorizationUtils.java | 183 +- .../exception/AuthenticationException.java | 16 +- .../exception/AuthorizationException.java | 5 +- .../DataHubDataFetcherExceptionHandler.java | 3 +- .../exception/DataHubGraphQLError.java | 12 +- .../exception/DataHubGraphQLException.java | 1 - .../exception/ValidationException.java | 16 +- .../graphql/featureflags/FeatureFlags.java | 1 - .../datahub/graphql/resolvers/AuthUtils.java | 24 +- .../resolvers/AuthenticatedResolver.java | 27 +- .../graphql/resolvers/BatchLoadUtils.java | 14 +- .../graphql/resolvers/EntityTypeMapper.java | 10 +- .../datahub/graphql/resolvers/MeResolver.java | 175 +- .../graphql/resolvers/ResolverUtils.java | 328 +- .../assertion/AssertionRunEventResolver.java | 128 +- .../assertion/DeleteAssertionResolver.java | 109 +- .../assertion/EntityAssertionsResolver.java | 102 +- .../resolvers/auth/AccessTokenUtil.java | 11 +- .../auth/CreateAccessTokenResolver.java | 133 +- .../auth/GetAccessTokenResolver.java | 75 +- .../auth/ListAccessTokensResolver.java | 123 +- .../auth/RevokeAccessTokenResolver.java | 58 +- .../resolvers/browse/BrowsePathsResolver.java | 86 +- .../resolvers/browse/BrowseResolver.java | 98 +- .../browse/EntityBrowsePathsResolver.java | 22 +- .../resolvers/chart/BrowseV2Resolver.java | 102 +- .../chart/ChartStatsSummaryResolver.java | 15 +- .../resolvers/config/AppConfigResolver.java | 111 +- .../container/ContainerEntitiesResolver.java | 93 +- .../container/ParentContainersResolver.java | 53 +- .../DashboardStatsSummaryResolver.java | 106 +- .../DashboardUsageStatsResolver.java | 74 +- .../dashboard/DashboardUsageStatsUtils.java | 184 +- .../BatchSetDataProductResolver.java | 101 +- .../CreateDataProductResolver.java | 69 +- .../DataProductAuthorizationUtils.java | 47 +- .../DeleteDataProductResolver.java | 55 +- .../ListDataProductAssetsResolver.java | 176 +- .../UpdateDataProductResolver.java | 77 +- .../dataset/DatasetHealthResolver.java | 130 +- .../dataset/DatasetStatsSummaryResolver.java | 96 +- .../dataset/DatasetUsageStatsResolver.java | 40 +- .../UpdateDeprecationResolver.java | 113 +- .../domain/CreateDomainResolver.java | 156 +- .../domain/DeleteDomainResolver.java | 65 +- .../domain/DomainEntitiesResolver.java | 100 +- .../resolvers/domain/ListDomainsResolver.java | 88 +- .../domain/ParentDomainsResolver.java | 72 +- .../resolvers/domain/SetDomainResolver.java | 86 +- .../resolvers/domain/UnsetDomainResolver.java | 74 +- .../resolvers/embed/UpdateEmbedResolver.java | 91 +- .../entity/EntityExistsResolver.java | 30 +- .../entity/EntityPrivilegesResolver.java | 71 +- .../glossary/AddRelatedTermsResolver.java | 135 +- .../glossary/CreateGlossaryNodeResolver.java | 116 +- .../glossary/CreateGlossaryTermResolver.java | 160 +- .../DeleteGlossaryEntityResolver.java | 61 +- .../GetRootGlossaryNodesResolver.java | 97 +- .../GetRootGlossaryTermsResolver.java | 88 +- .../glossary/ParentNodesResolver.java | 96 +- .../glossary/RemoveRelatedTermsResolver.java | 124 +- .../group/AddGroupMembersResolver.java | 72 +- .../resolvers/group/CreateGroupResolver.java | 41 +- .../resolvers/group/EntityCountsResolver.java | 62 +- .../resolvers/group/ListGroupsResolver.java | 82 +- .../group/RemoveGroupMembersResolver.java | 70 +- .../resolvers/group/RemoveGroupResolver.java | 46 +- .../resolvers/ingest/IngestionAuthUtils.java | 13 +- .../ingest/IngestionResolverUtils.java | 63 +- ...ncelIngestionExecutionRequestResolver.java | 101 +- ...eateIngestionExecutionRequestResolver.java | 194 +- .../CreateTestConnectionRequestResolver.java | 104 +- .../GetIngestionExecutionRequestResolver.java | 60 +- ...estionSourceExecutionRequestsResolver.java | 120 +- .../execution/RollbackIngestionResolver.java | 52 +- .../ingest/secret/CreateSecretResolver.java | 79 +- .../ingest/secret/DeleteSecretResolver.java | 27 +- .../secret/GetSecretValuesResolver.java | 97 +- .../ingest/secret/ListSecretsResolver.java | 107 +- .../resolvers/ingest/secret/SecretUtils.java | 11 +- .../source/DeleteIngestionSourceResolver.java | 29 +- .../source/GetIngestionSourceResolver.java | 52 +- .../source/ListIngestionSourcesResolver.java | 108 +- .../source/UpsertIngestionSourceResolver.java | 118 +- .../resolvers/jobs/DataJobRunsResolver.java | 127 +- .../resolvers/jobs/EntityRunsResolver.java | 140 +- .../lineage/UpdateLineageResolver.java | 230 +- .../resolvers/load/AspectResolver.java | 22 +- .../load/BatchGetEntitiesResolver.java | 28 +- .../load/EntityLineageResultResolver.java | 73 +- .../EntityRelationshipsResultResolver.java | 54 +- .../load/EntityTypeBatchResolver.java | 34 +- .../resolvers/load/EntityTypeResolver.java | 79 +- .../load/LoadableTypeBatchResolver.java | 41 +- .../resolvers/load/LoadableTypeResolver.java | 41 +- .../resolvers/load/OwnerTypeResolver.java | 42 +- .../load/TimeSeriesAspectResolver.java | 115 +- .../resolvers/mutate/AddLinkResolver.java | 61 +- .../resolvers/mutate/AddOwnerResolver.java | 45 +- .../resolvers/mutate/AddOwnersResolver.java | 66 +- .../resolvers/mutate/AddTagResolver.java | 80 +- .../resolvers/mutate/AddTagsResolver.java | 79 +- .../resolvers/mutate/AddTermResolver.java | 70 +- .../resolvers/mutate/AddTermsResolver.java | 75 +- .../mutate/BatchAddOwnersResolver.java | 62 +- .../mutate/BatchAddTagsResolver.java | 127 +- .../mutate/BatchAddTermsResolver.java | 115 +- .../mutate/BatchRemoveOwnersResolver.java | 77 +- .../mutate/BatchRemoveTagsResolver.java | 64 +- .../mutate/BatchRemoveTermsResolver.java | 64 +- .../mutate/BatchSetDomainResolver.java | 61 +- .../BatchUpdateDeprecationResolver.java | 68 +- .../BatchUpdateSoftDeletedResolver.java | 55 +- .../resolvers/mutate/DescriptionUtils.java | 440 ++- .../resolvers/mutate/MoveDomainResolver.java | 110 +- .../mutate/MutableTypeBatchResolver.java | 43 +- .../resolvers/mutate/MutableTypeResolver.java | 50 +- .../resolvers/mutate/MutationUtils.java | 80 +- .../resolvers/mutate/RemoveLinkResolver.java | 55 +- .../resolvers/mutate/RemoveOwnerResolver.java | 59 +- .../resolvers/mutate/RemoveTagResolver.java | 79 +- .../resolvers/mutate/RemoveTermResolver.java | 79 +- .../mutate/UpdateDescriptionResolver.java | 661 ++-- .../resolvers/mutate/UpdateNameResolver.java | 219 +- .../mutate/UpdateParentNodeResolver.java | 113 +- .../mutate/UpdateUserSettingResolver.java | 71 +- .../resolvers/mutate/util/DeleteUtils.java | 54 +- .../mutate/util/DeprecationUtils.java | 58 +- .../resolvers/mutate/util/DomainUtils.java | 217 +- .../resolvers/mutate/util/EmbedUtils.java | 25 +- .../resolvers/mutate/util/GlossaryUtils.java | 109 +- .../resolvers/mutate/util/LabelUtils.java | 409 +- .../resolvers/mutate/util/LinkUtils.java | 98 +- .../resolvers/mutate/util/OwnerUtils.java | 243 +- .../resolvers/mutate/util/SiblingsUtils.java | 21 +- .../operation/ReportOperationResolver.java | 105 +- .../CreateOwnershipTypeResolver.java | 41 +- .../DeleteOwnershipTypeResolver.java | 28 +- .../ownership/ListOwnershipTypesResolver.java | 86 +- .../UpdateOwnershipTypeResolver.java | 51 +- .../policy/DeletePolicyResolver.java | 35 +- .../policy/GetGrantedPrivilegesResolver.java | 33 +- .../policy/ListPoliciesResolver.java | 33 +- .../resolvers/policy/PolicyAuthUtils.java | 10 +- .../policy/UpsertPolicyResolver.java | 48 +- .../mappers/PolicyInfoPolicyMapper.java | 45 +- .../mappers/PolicyUpdateInputInfoMapper.java | 43 +- .../resolvers/post/CreatePostResolver.java | 38 +- .../resolvers/post/DeletePostResolver.java | 19 +- .../resolvers/post/ListPostsResolver.java | 73 +- .../resolvers/query/CreateQueryResolver.java | 76 +- .../resolvers/query/DeleteQueryResolver.java | 46 +- .../resolvers/query/ListQueriesResolver.java | 83 +- .../resolvers/query/UpdateQueryResolver.java | 109 +- .../ListRecommendationsResolver.java | 111 +- .../resolvers/role/AcceptRoleResolver.java | 47 +- .../role/BatchAssignRoleResolver.java | 30 +- .../role/CreateInviteTokenResolver.java | 30 +- .../role/GetInviteTokenResolver.java | 30 +- .../resolvers/role/ListRolesResolver.java | 66 +- .../AggregateAcrossEntitiesResolver.java | 112 +- .../AutoCompleteForMultipleResolver.java | 133 +- .../search/AutoCompleteResolver.java | 114 +- .../resolvers/search/AutocompleteUtils.java | 102 +- .../search/GetQuickFiltersResolver.java | 172 +- .../search/ScrollAcrossEntitiesResolver.java | 124 +- .../search/ScrollAcrossLineageResolver.java | 124 +- .../search/SearchAcrossEntitiesResolver.java | 109 +- .../search/SearchAcrossLineageResolver.java | 138 +- .../resolvers/search/SearchResolver.java | 74 +- .../graphql/resolvers/search/SearchUtils.java | 301 +- .../UpdateCorpUserViewsSettingsResolver.java | 76 +- .../view/GlobalViewsSettingsResolver.java | 36 +- .../UpdateGlobalViewsSettingsResolver.java | 70 +- .../step/BatchGetStepStatesResolver.java | 109 +- .../step/BatchUpdateStepStatesResolver.java | 65 +- .../resolvers/tag/CreateTagResolver.java | 93 +- .../resolvers/tag/DeleteTagResolver.java | 55 +- .../resolvers/tag/SetTagColorResolver.java | 104 +- .../resolvers/test/CreateTestResolver.java | 85 +- .../resolvers/test/DeleteTestResolver.java | 32 +- .../resolvers/test/ListTestsResolver.java | 81 +- .../resolvers/test/TestResultsResolver.java | 55 +- .../graphql/resolvers/test/TestUtils.java | 14 +- .../resolvers/test/UpdateTestResolver.java | 53 +- .../timeline/GetSchemaBlameResolver.java | 62 +- .../GetSchemaVersionListResolver.java | 53 +- .../type/AspectInterfaceTypeResolver.java | 20 +- .../type/EntityInterfaceTypeResolver.java | 26 +- .../type/HyperParameterValueTypeResolver.java | 36 +- .../type/PlatformSchemaUnionTypeResolver.java | 22 +- .../resolvers/type/ResultsTypeResolver.java | 18 +- ...TimeSeriesAspectInterfaceTypeResolver.java | 3 +- .../CreateNativeUserResetTokenResolver.java | 41 +- .../resolvers/user/ListUsersResolver.java | 77 +- .../resolvers/user/RemoveUserResolver.java | 46 +- .../user/UpdateUserStatusResolver.java | 46 +- .../resolvers/view/CreateViewResolver.java | 79 +- .../resolvers/view/DeleteViewResolver.java | 40 +- .../view/ListGlobalViewsResolver.java | 86 +- .../resolvers/view/ListMyViewsResolver.java | 100 +- .../resolvers/view/UpdateViewResolver.java | 63 +- .../graphql/resolvers/view/ViewUtils.java | 92 +- .../graphql/scalar/LongScalarType.java | 3 +- .../graphql/types/BatchMutableType.java | 18 +- .../graphql/types/BrowsableEntityType.java | 58 +- .../datahub/graphql/types/EntityType.java | 16 +- .../datahub/graphql/types/LoadableType.java | 68 +- .../datahub/graphql/types/MutableType.java | 25 +- .../graphql/types/SearchableEntityType.java | 83 +- .../graphql/types/aspect/AspectMapper.java | 1 - .../graphql/types/aspect/AspectType.java | 72 +- .../types/assertion/AssertionMapper.java | 45 +- .../types/assertion/AssertionType.java | 112 +- .../types/auth/AccessTokenMetadataType.java | 25 +- .../mappers/AccessTokenMetadataMapper.java | 9 +- .../graphql/types/chart/ChartType.java | 366 +- .../types/chart/mappers/ChartMapper.java | 339 +- .../chart/mappers/ChartUpdateInputMapper.java | 109 +- .../chart/mappers/InputFieldsMapper.java | 53 +- .../common/mappers/AuditStampMapper.java | 26 +- .../common/mappers/BrowsePathsV2Mapper.java | 6 +- .../mappers/ChangeAuditStampsMapper.java | 4 +- .../types/common/mappers/CostMapper.java | 26 +- .../types/common/mappers/CostValueMapper.java | 29 +- .../mappers/CustomPropertiesMapper.java | 36 +- .../DataPlatformInstanceAspectMapper.java | 10 +- .../common/mappers/DeprecationMapper.java | 32 +- .../types/common/mappers/EmbedMapper.java | 1 - .../mappers/FineGrainedLineagesMapper.java | 42 +- .../mappers/InstitutionalMemoryMapper.java | 27 +- .../InstitutionalMemoryMetadataMapper.java | 46 +- ...stitutionalMemoryMetadataUpdateMapper.java | 37 +- .../InstitutionalMemoryUpdateMapper.java | 34 +- .../types/common/mappers/OperationMapper.java | 91 +- .../types/common/mappers/OwnerMapper.java | 74 +- .../common/mappers/OwnerUpdateMapper.java | 76 +- .../types/common/mappers/OwnershipMapper.java | 33 +- .../common/mappers/OwnershipSourceMapper.java | 30 +- .../common/mappers/OwnershipUpdateMapper.java | 37 +- .../mappers/SearchFlagsInputMapper.java | 10 +- .../types/common/mappers/SiblingsMapper.java | 12 +- .../types/common/mappers/StatusMapper.java | 21 +- .../types/common/mappers/StringMapMapper.java | 31 +- .../types/common/mappers/SubTypesMapper.java | 9 +- .../mappers/UpstreamLineagesMapper.java | 12 +- .../common/mappers/UrnToEntityMapper.java | 7 +- .../common/mappers/util/MappingHelper.java | 8 +- .../types/common/mappers/util/RunInfo.java | 1 - .../mappers/util/SystemMetadataUtils.java | 19 +- .../mappers/util/UpdateMappingHelper.java | 1 - .../types/container/ContainerType.java | 108 +- .../container/mappers/ContainerMapper.java | 80 +- .../types/corpgroup/CorpGroupType.java | 329 +- .../types/corpgroup/CorpGroupUtils.java | 24 +- .../CorpGroupEditablePropertiesMapper.java | 18 +- .../mappers/CorpGroupInfoMapper.java | 76 +- .../corpgroup/mappers/CorpGroupMapper.java | 119 +- .../mappers/CorpGroupPropertiesMapper.java | 9 +- .../graphql/types/corpuser/CorpUserType.java | 355 +- .../graphql/types/corpuser/CorpUserUtils.java | 24 +- .../mappers/CorpUserEditableInfoMapper.java | 46 +- .../corpuser/mappers/CorpUserInfoMapper.java | 48 +- .../corpuser/mappers/CorpUserMapper.java | 212 +- .../mappers/CorpUserPropertiesMapper.java | 12 +- .../mappers/CorpUserStatusMapper.java | 9 +- .../types/dashboard/DashboardType.java | 366 +- .../dashboard/mappers/DashboardMapper.java | 325 +- .../mappers/DashboardUpdateInputMapper.java | 110 +- .../mappers/DashboardUsageMetricMapper.java | 7 +- .../graphql/types/dataflow/DataFlowType.java | 340 +- .../dataflow/mappers/DataFlowMapper.java | 239 +- .../mappers/DataFlowUpdateInputMapper.java | 44 +- .../graphql/types/datajob/DataJobType.java | 346 +- .../types/datajob/mappers/DataJobMapper.java | 247 +- .../mappers/DataJobUpdateInputMapper.java | 104 +- .../types/dataplatform/DataPlatformType.java | 86 +- .../mappers/DataPlatformInfoMapper.java | 36 +- .../mappers/DataPlatformMapper.java | 64 +- .../mappers/DataPlatformPropertiesMapper.java | 37 +- .../DataPlatformInstanceType.java | 182 +- .../mappers/DataPlatformInstanceMapper.java | 107 +- .../mappers/DataProcessInstanceMapper.java | 64 +- .../DataProcessInstanceRunEventMapper.java | 65 +- .../DataProcessInstanceRunResultMapper.java | 42 +- .../types/dataproduct/DataProductType.java | 87 +- .../mappers/DataProductMapper.java | 67 +- .../graphql/types/dataset/DatasetType.java | 487 +-- .../graphql/types/dataset/DatasetUtils.java | 16 +- .../types/dataset/VersionedDatasetType.java | 64 +- .../mappers/AssertionRunEventMapper.java | 13 +- .../mappers/DatasetDeprecationMapper.java | 31 +- .../types/dataset/mappers/DatasetMapper.java | 305 +- .../dataset/mappers/DatasetProfileMapper.java | 21 +- .../mappers/DatasetUpdateInputMapper.java | 78 +- .../EditableSchemaFieldInfoMapper.java | 51 +- .../mappers/EditableSchemaMetadataMapper.java | 35 +- .../mappers/ForeignKeyConstraintMapper.java | 18 +- .../dataset/mappers/PlatformSchemaMapper.java | 109 +- .../dataset/mappers/SchemaFieldMapper.java | 119 +- .../types/dataset/mappers/SchemaMapper.java | 71 +- .../dataset/mappers/SchemaMetadataMapper.java | 65 +- .../mappers/VersionedDatasetMapper.java | 110 +- .../types/domain/DomainAssociationMapper.java | 38 +- .../graphql/types/domain/DomainMapper.java | 25 +- .../graphql/types/domain/DomainType.java | 74 +- .../types/glossary/GlossaryNodeType.java | 52 +- .../types/glossary/GlossaryTermType.java | 246 +- .../types/glossary/GlossaryTermUtils.java | 28 +- .../glossary/mappers/GlossaryNodeMapper.java | 16 +- .../mappers/GlossaryTermInfoMapper.java | 53 +- .../glossary/mappers/GlossaryTermMapper.java | 107 +- .../mappers/GlossaryTermPropertiesMapper.java | 17 +- .../glossary/mappers/GlossaryTermsMapper.java | 75 +- .../mappers/AutoCompleteResultsMapper.java | 33 +- .../types/mappers/BrowsePathMapper.java | 30 +- .../types/mappers/BrowsePathsMapper.java | 25 +- .../types/mappers/BrowseResultMapper.java | 11 +- .../types/mappers/InputModelMapper.java | 8 +- .../graphql/types/mappers/MapperUtils.java | 87 +- .../graphql/types/mappers/ModelMapper.java | 7 +- .../types/mappers/TimeSeriesAspectMapper.java | 6 +- .../UrnScrollAcrossLineageResultsMapper.java | 15 +- .../types/mappers/UrnScrollResultsMapper.java | 9 +- .../UrnSearchAcrossLineageResultsMapper.java | 31 +- .../types/mappers/UrnSearchResultsMapper.java | 14 +- .../types/mlmodel/MLFeatureTableType.java | 220 +- .../graphql/types/mlmodel/MLFeatureType.java | 142 +- .../types/mlmodel/MLModelGroupType.java | 222 +- .../graphql/types/mlmodel/MLModelType.java | 213 +- .../graphql/types/mlmodel/MLModelUtils.java | 63 +- .../types/mlmodel/MLPrimaryKeyType.java | 143 +- .../types/mlmodel/mappers/BaseDataMapper.java | 25 +- .../CaveatsAndRecommendationsMapper.java | 43 +- .../mlmodel/mappers/CaveatsDetailsMapper.java | 28 +- .../mappers/EthicalConsiderationsMapper.java | 34 +- .../mappers/HyperParameterMapMapper.java | 30 +- .../HyperParameterValueTypeMapper.java | 49 +- .../mlmodel/mappers/IntendedUseMapper.java | 36 +- .../mlmodel/mappers/MLFeatureMapper.java | 166 +- .../mappers/MLFeaturePropertiesMapper.java | 58 +- .../mlmodel/mappers/MLFeatureTableMapper.java | 170 +- .../MLFeatureTablePropertiesMapper.java | 71 +- .../mlmodel/mappers/MLHyperParamMapper.java | 30 +- .../types/mlmodel/mappers/MLMetricMapper.java | 27 +- .../mappers/MLModelFactorPromptsMapper.java | 43 +- .../mlmodel/mappers/MLModelFactorsMapper.java | 43 +- .../mlmodel/mappers/MLModelGroupMapper.java | 160 +- .../mappers/MLModelGroupPropertiesMapper.java | 35 +- .../types/mlmodel/mappers/MLModelMapper.java | 257 +- .../mappers/MLModelPropertiesMapper.java | 100 +- .../mlmodel/mappers/MLPrimaryKeyMapper.java | 152 +- .../mappers/MLPrimaryKeyPropertiesMapper.java | 58 +- .../types/mlmodel/mappers/MetricsMapper.java | 23 +- .../mappers/QuantitativeAnalysesMapper.java | 29 +- .../mlmodel/mappers/ResultsTypeMapper.java | 30 +- .../mlmodel/mappers/SourceCodeUrlMapper.java | 28 +- .../mlmodel/mappers/VersionTagMapper.java | 26 +- .../graphql/types/notebook/NotebookType.java | 159 +- .../notebook/mappers/NotebookMapper.java | 139 +- .../mappers/NotebookUpdateInputMapper.java | 34 +- .../types/ownership/OwnershipType.java | 34 +- .../types/ownership/OwnershipTypeMapper.java | 13 +- .../types/policy/DataHubPolicyMapper.java | 44 +- .../types/policy/DataHubPolicyType.java | 26 +- .../graphql/types/post/PostMapper.java | 5 +- .../graphql/types/query/QueryMapper.java | 23 +- .../graphql/types/query/QueryType.java | 33 +- .../DataFlowDataJobsRelationshipsMapper.java | 36 +- .../DownstreamEntityRelationshipsMapper.java | 36 +- .../EntityRelationshipLegacyMapper.java | 38 +- .../UpstreamEntityRelationshipsMapper.java | 34 +- .../graphql/types/role/DataHubRoleType.java | 26 +- .../types/role/mappers/DataHubRoleMapper.java | 5 +- .../graphql/types/rolemetadata/RoleType.java | 158 +- .../rolemetadata/mappers/AccessMapper.java | 60 +- .../rolemetadata/mappers/RoleMapper.java | 120 +- .../types/schemafield/SchemaFieldType.java | 25 +- .../datahub/graphql/types/tag/TagType.java | 278 +- .../types/tag/mappers/GlobalTagsMapper.java | 51 +- .../mappers/TagAssociationUpdateMapper.java | 35 +- .../graphql/types/tag/mappers/TagMapper.java | 88 +- .../tag/mappers/TagUpdateInputMapper.java | 21 +- .../graphql/types/test/TestMapper.java | 14 +- .../datahub/graphql/types/test/TestType.java | 39 +- .../timeline/mappers/SchemaBlameMapper.java | 72 +- .../mappers/SchemaVersionListMapper.java | 50 +- .../types/timeline/utils/TimelineUtils.java | 30 +- .../types/usage/FieldUsageCountsMapper.java | 7 +- .../types/usage/UsageAggregationMapper.java | 11 +- .../usage/UsageAggregationMetricsMapper.java | 23 +- .../UsageQueryResultAggregationMapper.java | 27 +- .../types/usage/UsageQueryResultMapper.java | 17 +- .../types/usage/UserUsageCountsMapper.java | 12 +- .../graphql/types/view/DataHubViewMapper.java | 38 +- .../graphql/types/view/DataHubViewType.java | 26 +- .../datahub/graphql/util/DateUtil.java | 51 +- .../graphql/util/SearchInsightsUtil.java | 3 +- .../linkedin/datahub/graphql/TestUtils.java | 92 +- .../graphql/resolvers/ResolverUtilsTest.java | 112 +- .../resolvers/UpdateLineageResolverTest.java | 78 +- .../AssertionRunEventResolverTest.java | 102 +- .../DeleteAssertionResolverTest.java | 154 +- .../EntityAssertionsResolverTest.java | 179 +- .../auth/ListAccessTokensResolverTest.java | 31 +- .../browse/BrowseV2ResolverTest.java | 230 +- .../browse/EntityBrowsePathsResolverTest.java | 20 +- .../ContainerEntitiesResolverTest.java | 78 +- .../ParentContainersResolverTest.java | 149 +- .../dashboard/DashboardStatsSummaryTest.java | 162 +- .../dataset/DatasetHealthResolverTest.java | 207 +- .../DatasetStatsSummaryResolverTest.java | 96 +- .../BatchUpdateSoftDeletedResolverTest.java | 122 +- .../BatchUpdateDeprecationResolverTest.java | 188 +- .../UpdateDeprecationResolverTest.java | 183 +- .../domain/BatchSetDomainResolverTest.java | 234 +- .../domain/CreateDomainProposalMatcher.java | 21 +- .../domain/CreateDomainResolverTest.java | 187 +- .../domain/DeleteDomainResolverTest.java | 44 +- .../domain/DomainEntitiesResolverTest.java | 83 +- .../domain/ListDomainsResolverTest.java | 141 +- .../domain/MoveDomainResolverTest.java | 67 +- .../domain/ParentDomainsResolverTest.java | 97 +- .../domain/SetDomainResolverTest.java | 196 +- .../domain/UnsetDomainResolverTest.java | 155 +- .../embed/UpdateEmbedResolverTest.java | 106 +- .../entity/EntityExistsResolverTest.java | 7 +- .../entity/EntityPrivilegesResolverTest.java | 24 +- .../glossary/AddRelatedTermsResolverTest.java | 107 +- .../CreateGlossaryNodeResolverTest.java | 80 +- .../CreateGlossaryTermResolverTest.java | 176 +- .../DeleteGlossaryEntityResolverTest.java | 35 +- .../GetRootGlossaryNodesResolverTest.java | 68 +- .../GetRootGlossaryTermsResolverTest.java | 63 +- .../resolvers/glossary/GlossaryUtilsTest.java | 176 +- .../glossary/ParentNodesResolverTest.java | 293 +- .../RemoveRelatedTermsResolverTest.java | 88 +- .../glossary/UpdateNameResolverTest.java | 68 +- .../UpdateParentNodeResolverTest.java | 66 +- .../group/AddGroupMembersResolverTest.java | 9 +- .../group/CreateGroupResolverTest.java | 9 +- .../group/RemoveGroupMembersResolverTest.java | 9 +- .../resolvers/ingest/IngestTestUtils.java | 44 +- .../ingest/IngestionAuthUtilsTest.java | 34 +- ...IngestionExecutionRequestResolverTest.java | 75 +- ...IngestionExecutionRequestResolverTest.java | 75 +- ...eateTestConnectionRequestResolverTest.java | 37 +- ...IngestionExecutionRequestResolverTest.java | 82 +- ...onSourceExecutionRequestsResolverTest.java | 138 +- .../RollbackIngestionResolverTest.java | 28 +- .../CreateSecretResolverMatcherTest.java | 19 +- .../secret/CreateSecretResolverTest.java | 58 +- .../secret/DeleteSecretResolverTest.java | 16 +- .../secret/GetSecretValuesResolverTest.java | 66 +- .../secret/ListSecretsResolverTest.java | 121 +- .../DeleteIngestionSourceResolverTest.java | 25 +- .../GetIngestionSourceResolverTest.java | 65 +- .../ListIngestionSourceResolverTest.java | 126 +- .../UpsertIngestionSourceResolverTest.java | 77 +- .../mutate/MutableTypeBatchResolverTest.java | 269 +- .../resolvers/mutate/SiblingsUtilsTest.java | 58 +- .../mutate/UpdateUserSettingResolverTest.java | 17 +- .../ReportOperationResolverTest.java | 57 +- .../owner/AddOwnersResolverTest.java | 333 +- .../owner/BatchAddOwnersResolverTest.java | 353 +- .../owner/BatchRemoveOwnersResolverTest.java | 161 +- .../CreateOwnershipTypeResolverTest.java | 52 +- .../DeleteOwnershipTypeResolverTest.java | 49 +- .../ListOwnershipTypesResolverTest.java | 82 +- .../UpdateOwnershipTypeResolverTest.java | 95 +- .../post/CreatePostResolverTest.java | 51 +- .../post/DeletePostResolverTest.java | 9 +- .../resolvers/post/ListPostsResolverTest.java | 60 +- .../query/CreateQueryResolverTest.java | 253 +- .../query/DeleteQueryResolverTest.java | 102 +- .../query/ListQueriesResolverTest.java | 119 +- .../query/UpdateQueryResolverTest.java | 288 +- .../role/AcceptRoleResolverTest.java | 24 +- .../role/BatchAssignRoleResolverTest.java | 9 +- .../role/CreateInviteTokenResolverTest.java | 15 +- .../role/GetInviteTokenResolverTest.java | 15 +- .../resolvers/role/ListRolesResolverTest.java | 60 +- .../AggregateAcrossEntitiesResolverTest.java | 487 ++- .../AutoCompleteForMultipleResolverTest.java | 243 +- .../search/GetQuickFiltersResolverTest.java | 214 +- .../SearchAcrossEntitiesResolverTest.java | 686 ++-- .../SearchAcrossLineageResolverTest.java | 42 +- .../resolvers/search/SearchResolverTest.java | 299 +- .../resolvers/search/SearchUtilsTest.java | 580 +-- ...dateCorpUserViewsSettingsResolverTest.java | 152 +- .../view/GlobalViewsSettingsResolverTest.java | 47 +- ...UpdateGlobalViewsSettingsResolverTest.java | 100 +- .../step/BatchGetStepStatesResolverTest.java | 51 +- .../BatchUpdateStepStatesResolverTest.java | 12 +- .../resolvers/tag/AddTagsResolverTest.java | 156 +- .../tag/BatchAddTagsResolverTest.java | 255 +- .../tag/BatchRemoveTagsResolverTest.java | 206 +- .../resolvers/tag/CreateTagResolverTest.java | 49 +- .../resolvers/tag/DeleteTagResolverTest.java | 21 +- .../tag/SetTagColorResolverTest.java | 108 +- .../resolvers/term/AddTermsResolverTest.java | 169 +- .../term/BatchAddTermsResolverTest.java | 195 +- .../term/BatchRemoveTermsResolverTest.java | 165 +- .../test/CreateTestResolverTest.java | 46 +- .../test/DeleteTestResolverTest.java | 21 +- .../resolvers/test/ListTestsResolverTest.java | 82 +- .../test/UpdateTestResolverTest.java | 42 +- ...reateNativeUserResetTokenResolverTest.java | 15 +- .../view/CreateViewResolverTest.java | 172 +- .../view/DeleteViewResolverTest.java | 70 +- .../view/ListGlobalViewsResolverTest.java | 80 +- .../view/ListMyViewsResolverTest.java | 156 +- .../view/UpdateViewResolverTest.java | 261 +- .../graphql/resolvers/view/ViewUtilsTest.java | 159 +- .../types/assertion/AssertionTypeTest.java | 102 +- .../types/container/ContainerTypeTest.java | 187 +- .../DataPlatformInstanceTest.java | 356 +- .../dataset/mappers/DatasetMapperTest.java | 293 +- .../mappers/DatasetProfileMapperTest.java | 322 +- .../graphql/types/domain/DomainTypeTest.java | 128 +- .../types/notebook/NotebookTypeTest.java | 255 +- .../graphql/types/query/QueryTypeTest.java | 277 +- .../types/view/DataHubViewTypeTest.java | 303 +- .../datahub/graphql/utils/DateUtilTest.java | 67 +- .../graphql/utils/MutationsUtilsTest.java | 28 +- .../utils/SystemMetadataUtilsTest.java | 105 +- .../com/linkedin/datahub/upgrade/Upgrade.java | 19 +- .../datahub/upgrade/UpgradeCleanupStep.java | 12 +- .../linkedin/datahub/upgrade/UpgradeCli.java | 7 +- .../upgrade/UpgradeCliApplication.java | 23 +- .../datahub/upgrade/UpgradeContext.java | 26 +- .../datahub/upgrade/UpgradeManager.java | 14 +- .../datahub/upgrade/UpgradeReport.java | 18 +- .../datahub/upgrade/UpgradeResult.java | 29 +- .../linkedin/datahub/upgrade/UpgradeStep.java | 24 +- .../datahub/upgrade/UpgradeStepResult.java | 43 +- .../datahub/upgrade/UpgradeUtils.java | 6 +- .../common/steps/ClearGraphServiceStep.java | 1 - .../common/steps/ClearSearchServiceStep.java | 4 +- .../common/steps/GMSDisableWriteModeStep.java | 1 - .../common/steps/GMSEnableWriteModeStep.java | 1 - .../common/steps/GMSQualificationStep.java | 64 +- .../config/BackfillBrowsePathsV2Config.java | 4 +- .../upgrade/config/BuildIndicesConfig.java | 22 +- .../upgrade/config/CleanIndicesConfig.java | 22 +- .../upgrade/config/NoCodeCleanupConfig.java | 15 +- .../upgrade/config/NoCodeUpgradeConfig.java | 7 +- .../config/RemoveUnknownAspectsConfig.java | 1 - .../upgrade/config/RestoreBackupConfig.java | 21 +- .../upgrade/config/RestoreIndicesConfig.java | 11 +- .../upgrade/config/SystemUpdateConfig.java | 31 +- .../upgrade/impl/DefaultUpgradeContext.java | 1 - .../upgrade/impl/DefaultUpgradeManager.java | 59 +- .../upgrade/impl/DefaultUpgradeReport.java | 3 +- .../upgrade/impl/DefaultUpgradeResult.java | 1 - .../impl/DefaultUpgradeStepResult.java | 1 - .../upgrade/nocode/CreateAspectTableStep.java | 60 +- .../upgrade/nocode/DataMigrationStep.java | 103 +- .../datahub/upgrade/nocode/NoCodeUpgrade.java | 5 +- .../nocode/RemoveAspectV2TableStep.java | 5 +- .../nocode/UpgradeQualificationStep.java | 15 +- .../nocodecleanup/DeleteAspectTableStep.java | 5 +- .../DeleteLegacyGraphRelationshipsStep.java | 8 +- .../DeleteLegacySearchIndicesStep.java | 4 +- .../nocodecleanup/NoCodeCleanupUpgrade.java | 15 +- .../NoCodeUpgradeQualificationStep.java | 17 +- .../RemoveClientIdAspectStep.java | 8 +- .../RemoveUnknownAspects.java | 1 - .../restorebackup/ClearAspectV2TableStep.java | 5 +- .../upgrade/restorebackup/RestoreBackup.java | 4 +- .../restorebackup/RestoreStorageStep.java | 83 +- .../backupreader/BackupReader.java | 6 +- .../backupreader/BackupReaderArgs.java | 9 +- .../EbeanAspectBackupIterator.java | 20 +- .../backupreader/LocalParquetReader.java | 17 +- .../backupreader/ParquetReaderWrapper.java | 26 +- .../backupreader/ReaderWrapper.java | 17 +- .../restoreindices/RestoreIndices.java | 15 +- .../upgrade/restoreindices/SendMAEStep.java | 99 +- .../datahub/upgrade/system/SystemUpdate.java | 75 +- .../system/elasticsearch/BuildIndices.java | 79 +- .../system/elasticsearch/CleanIndices.java | 56 +- .../steps/BuildIndicesPostStep.java | 35 +- .../steps/BuildIndicesPreStep.java | 62 +- .../elasticsearch/steps/BuildIndicesStep.java | 2 - .../elasticsearch/steps/CleanIndicesStep.java | 79 +- .../steps/DataHubStartupStep.java | 8 +- .../system/elasticsearch/util/IndexUtils.java | 45 +- .../entity/steps/BackfillBrowsePathsV2.java | 1 - .../steps/BackfillBrowsePathsV2Step.java | 90 +- .../DatahubUpgradeNoSchemaRegistryTest.java | 105 +- .../upgrade/UpgradeCliApplicationTest.java | 69 +- ...pgradeCliApplicationTestConfiguration.java | 24 +- docker/build.gradle | 2 +- .../linkedin/metadata/models/AspectSpec.java | 50 +- .../metadata/models/ConfigEntitySpec.java | 12 +- .../metadata/models/DataSchemaFactory.java | 60 +- .../metadata/models/DefaultEntitySpec.java | 5 +- .../metadata/models/DefaultEventSpec.java | 1 - .../linkedin/metadata/models/EntitySpec.java | 5 +- .../metadata/models/EntitySpecBuilder.java | 252 +- .../metadata/models/EntitySpecUtils.java | 18 +- .../linkedin/metadata/models/EventSpec.java | 17 +- .../metadata/models/EventSpecBuilder.java | 25 +- .../linkedin/metadata/models/FieldSpec.java | 12 +- .../metadata/models/FieldSpecUtils.java | 21 +- .../models/ModelValidationException.java | 4 +- .../metadata/models/PartialEntitySpec.java | 20 +- .../models/PropertyOverrideComparator.java | 1 - .../models/RelationshipFieldSpec.java | 9 +- .../RelationshipFieldSpecExtractor.java | 45 +- .../metadata/models/SearchScoreFieldSpec.java | 3 +- .../models/SearchScoreFieldSpecExtractor.java | 25 +- .../metadata/models/SearchableFieldSpec.java | 3 +- .../models/SearchableFieldSpecExtractor.java | 127 +- .../models/TimeseriesFieldCollectionSpec.java | 3 +- .../metadata/models/TimeseriesFieldSpec.java | 3 +- .../models/TimeseriesFieldSpecExtractor.java | 84 +- .../models/annotation/AnnotationUtils.java | 4 +- .../models/annotation/AspectAnnotation.java | 23 +- .../models/annotation/EntityAnnotation.java | 25 +- .../models/annotation/EventAnnotation.java | 23 +- .../annotation/RelationshipAnnotation.java | 49 +- .../annotation/SearchScoreAnnotation.java | 24 +- .../annotation/SearchableAnnotation.java | 62 +- .../annotation/TimeseriesFieldAnnotation.java | 21 +- .../TimeseriesFieldCollectionAnnotation.java | 18 +- .../models/extractor/AspectExtractor.java | 28 +- .../models/extractor/FieldExtractor.java | 55 +- .../models/registry/ConfigEntityRegistry.java | 71 +- .../models/registry/EntityRegistry.java | 19 +- .../models/registry/EntityRegistryUtils.java | 12 +- .../models/registry/LineageRegistry.java | 103 +- .../models/registry/MergedEntityRegistry.java | 98 +- .../models/registry/PatchEntityRegistry.java | 141 +- .../registry/PluginEntityRegistryLoader.java | 151 +- .../registry/SnapshotEntityRegistry.java | 34 +- .../models/registry/config/Entity.java | 10 +- .../config/EntityRegistryLoadResult.java | 4 +- .../models/registry/config/Event.java | 2 +- .../template/ArrayMergingTemplate.java | 88 +- .../template/AspectTemplateEngine.java | 40 +- .../template/CompoundKeyTemplate.java | 17 +- .../models/registry/template/Template.java | 28 +- .../template/common/GlobalTagsTemplate.java | 4 +- .../common/GlossaryTermsTemplate.java | 24 +- .../template/common/OwnershipTemplate.java | 18 +- .../dataflow/DataFlowInfoTemplate.java | 1 - .../template/datajob/DataJobInfoTemplate.java | 1 - .../datajob/DataJobInputOutputTemplate.java | 70 +- .../DataProductPropertiesTemplate.java | 4 +- .../dataset/DatasetPropertiesTemplate.java | 1 - .../EditableSchemaMetadataTemplate.java | 86 +- .../dataset/UpstreamLineageTemplate.java | 8 +- .../registry/template/util/TemplateUtil.java | 28 +- .../models/DataSchemaFactoryTest.java | 16 +- .../models/EntitySpecBuilderTest.java | 392 +- .../registry/ConfigEntityRegistryTest.java | 23 +- .../models/registry/LineageRegistryTest.java | 72 +- .../registry/PatchEntityRegistryTest.java | 45 +- .../PluginEntityRegistryLoaderTest.java | 276 +- .../models/registry/TestConstants.java | 4 +- gradle/checkstyle/checkstyle.xml | 198 - gradle/checkstyle/suppressions.xml | 7 - .../ingestion/IngestionScheduler.java | 241 +- .../ingestion/IngestionSchedulerTest.java | 193 +- .../java/com/datahub/util/ModelUtils.java | 235 +- .../java/com/datahub/util/RecordUtils.java | 291 +- .../main/java/com/datahub/util/Statement.java | 1 - .../util/exception/ESQueryException.java | 4 +- .../exception/InvalidSchemaException.java | 4 +- .../exception/ModelConversionException.java | 4 +- .../util/validator/AspectValidator.java | 24 +- .../util/validator/DeltaValidator.java | 23 +- .../util/validator/DocumentValidator.java | 48 +- .../util/validator/EntityValidator.java | 68 +- .../util/validator/RelationshipValidator.java | 119 +- .../util/validator/SnapshotValidator.java | 56 +- .../util/validator/ValidationUtils.java | 140 +- .../java/com/linkedin/metadata/Constants.java | 99 +- .../java/com/linkedin/util/Configuration.java | 40 +- .../com/linkedin/common/uri/Uri.java | 48 +- .../com/linkedin/common/uri/UriCoercer.java | 19 +- .../com/linkedin/common/url/Url.java | 48 +- .../com/linkedin/common/url/UrlCoercer.java | 19 +- .../linkedin/common/urn/AzkabanFlowUrn.java | 31 +- .../linkedin/common/urn/AzkabanJobUrn.java | 30 +- .../com/linkedin/common/urn/ChartUrn.java | 30 +- .../com/linkedin/common/urn/CorpGroupUrn.java | 34 +- .../com/linkedin/common/urn/CorpuserUrn.java | 33 +- .../com/linkedin/common/urn/DashboardUrn.java | 30 +- .../com/linkedin/common/urn/DataFlowUrn.java | 31 +- .../com/linkedin/common/urn/DataJobUrn.java | 31 +- .../linkedin/common/urn/DataPlatformUrn.java | 28 +- .../linkedin/common/urn/DataProcessUrn.java | 37 +- .../linkedin/common/urn/DatasetFieldUrn.java | 62 +- .../com/linkedin/common/urn/DatasetUrn.java | 36 +- .../com/linkedin/common/urn/FabricUrn.java | 29 +- .../linkedin/common/urn/GlossaryNodeUrn.java | 94 +- .../linkedin/common/urn/GlossaryTermUrn.java | 35 +- .../com/linkedin/common/urn/MLFeatureUrn.java | 30 +- .../com/linkedin/common/urn/MLModelUrn.java | 40 +- .../com/linkedin/common/urn/NotebookUrn.java | 30 +- .../com/linkedin/common/urn/TagUrn.java | 94 +- .../linkedin/common/urn/TestEntityUrn.java | 101 +- .../com/linkedin/common/urn/TupleKey.java | 72 +- .../com/linkedin/common/urn/Urn.java | 140 +- .../com/linkedin/common/urn/UrnCoercer.java | 29 +- .../com/linkedin/common/urn/UrnUtils.java | 114 +- .../com/linkedin/common/urn/UrnValidator.java | 13 +- .../com/linkedin/common/urn/VersionedUrn.java | 99 +- .../common/urn/VersionedUrnUtils.java | 18 +- .../linkedin/util/VersionedUrnCoercer.java | 1 - .../common/urn/DatasetFieldUrnTest.java | 23 +- .../linkedin/common/util/ModelUtilsTest.java | 38 +- .../linkedin/common/util/RecordUtilsTest.java | 103 +- .../common/util/VersionedUrnUtilsTest.java | 5 +- .../com/datahub/authentication/Actor.java | 23 +- .../com/datahub/authentication/ActorType.java | 8 +- .../authentication/Authentication.java | 15 +- .../authentication/AuthenticationContext.java | 3 +- .../AuthenticationException.java | 1 - .../AuthenticationExpiredException.java | 1 - .../authentication/AuthenticationRequest.java | 11 +- .../authentication/AuthenticatorContext.java | 13 +- .../com/datahub/authorization/AuthUtil.java | 26 +- .../authorization/AuthorizationRequest.java | 19 +- .../authorization/AuthorizationResult.java | 29 +- .../authorization/AuthorizedActors.java | 1 - .../authorization/AuthorizerContext.java | 14 +- .../ConjunctivePrivilegeGroup.java | 6 +- .../DisjunctivePrivilegeGroup.java | 4 +- .../authorization/EntityFieldType.java | 30 +- .../com/datahub/authorization/EntitySpec.java | 22 +- .../authorization/EntitySpecResolver.java | 7 +- .../datahub/authorization/FieldResolver.java | 27 +- .../authorization/ResolvedEntitySpec.java | 23 +- .../main/java/com/datahub/plugins/Plugin.java | 7 +- .../com/datahub/plugins/PluginConstant.java | 3 +- .../auth/authentication/Authenticator.java | 30 +- .../auth/authorization/Authorizer.java | 22 +- .../producer/BaseMetadataEventProducer.java | 28 +- .../dao/producer/KafkaEventProducer.java | 70 +- .../dao/producer/KafkaHealthChecker.java | 181 +- .../producer/KafkaMetadataEventProducer.java | 62 +- .../dao/producer/KafkaProducerCallback.java | 1 - metadata-events/mxe-avro/build.gradle | 2 +- .../main/java/com/linkedin/mxe/Configs.java | 38 +- .../com/linkedin/mxe/TopicConvention.java | 52 +- .../com/linkedin/mxe/TopicConventionImpl.java | 64 +- .../main/java/com/linkedin/mxe/Topics.java | 20 +- .../com/linkedin/metadata/EventUtils.java | 188 +- .../linkedin/metadata/EventUtilsTests.java | 55 +- .../java/datahub-client/build.gradle | 2 - .../main/java/datahub/client/Callback.java | 9 +- .../src/main/java/datahub/client/Emitter.java | 54 +- .../client/MetadataResponseFuture.java | 7 +- .../datahub/client/MetadataWriteResponse.java | 18 +- .../java/datahub/client/file/FileEmitter.java | 117 +- .../client/file/FileEmitterConfig.java | 9 +- .../datahub/client/kafka/AvroSerializer.java | 27 +- .../datahub/client/kafka/KafkaEmitter.java | 67 +- .../client/kafka/KafkaEmitterConfig.java | 29 +- .../patch/AbstractMultiFieldPatchBuilder.java | 25 +- .../client/patch/PatchOperationType.java | 5 +- .../common/CustomPropertiesPatchBuilder.java | 29 +- .../patch/common/GlobalTagsPatchBuilder.java | 11 +- .../common/GlossaryTermsPatchBuilder.java | 14 +- .../patch/common/OwnershipPatchBuilder.java | 32 +- .../dataflow/DataFlowInfoPatchBuilder.java | 54 +- .../datajob/DataJobInfoPatchBuilder.java | 51 +- .../DataJobInputOutputPatchBuilder.java | 96 +- .../DatasetPropertiesPatchBuilder.java | 66 +- .../EditableSchemaMetadataPatchBuilder.java | 53 +- .../dataset/UpstreamLineagePatchBuilder.java | 25 +- .../CustomPropertiesPatchBuilderSupport.java | 8 +- .../IntermediatePatchBuilder.java | 13 +- .../java/datahub/client/rest/RestEmitter.java | 316 +- .../client/rest/RestEmitterConfig.java | 46 +- .../java/datahub/event/EventFormatter.java | 52 +- .../event/EventValidationException.java | 1 + .../event/MetadataChangeProposalWrapper.java | 17 +- .../java/datahub/event/StringEscapeUtils.java | 122 +- .../datahub/event/UpsertAspectRequest.java | 17 +- .../datahub/client/file/FileEmitterTest.java | 125 +- .../client/kafka/AvroSerializerTest.java | 17 +- .../client/kafka/KafkaEmitterTest.java | 73 +- .../kafka/containers/KafkaContainer.java | 71 +- .../containers/SchemaRegistryContainer.java | 76 +- .../client/kafka/containers/Utils.java | 25 +- .../kafka/containers/ZookeeperContainer.java | 80 +- .../java/datahub/client/patch/PatchTest.java | 354 +- .../datahub/client/rest/RestEmitterTest.java | 425 ++- .../datahub/event/EventFormatterTest.java | 44 +- .../MetadataChangeProposalWrapperTest.java | 121 +- .../datahub/server/TestDataHubServer.java | 24 +- .../google/protobuf/ExtensionRegistry.java | 543 ++- .../datahub/protobuf/DirectoryWalker.java | 67 +- .../java/datahub/protobuf/Proto2DataHub.java | 685 ++-- .../datahub/protobuf/ProtobufDataset.java | 465 +-- .../java/datahub/protobuf/ProtobufUtils.java | 354 +- .../datahub/protobuf/model/FieldTypeEdge.java | 75 +- .../protobuf/model/ProtobufElement.java | 45 +- .../datahub/protobuf/model/ProtobufEnum.java | 131 +- .../datahub/protobuf/model/ProtobufField.java | 452 +-- .../datahub/protobuf/model/ProtobufGraph.java | 800 ++-- .../protobuf/model/ProtobufMessage.java | 194 +- .../protobuf/model/ProtobufOneOfField.java | 87 +- .../visitors/ProtobufExtensionUtil.java | 307 +- .../visitors/ProtobufModelVisitor.java | 28 +- .../protobuf/visitors/VisitContext.java | 87 +- .../visitors/dataset/DatasetVisitor.java | 204 +- .../visitors/dataset/DeprecationVisitor.java | 77 +- .../visitors/dataset/DescriptionVisitor.java | 9 +- .../visitors/dataset/DomainVisitor.java | 23 +- .../dataset/InstitutionalMemoryVisitor.java | 216 +- .../dataset/KafkaTopicPropertyVisitor.java | 28 +- .../visitors/dataset/OwnershipVisitor.java | 78 +- .../visitors/dataset/PropertyVisitor.java | 61 +- .../dataset/TagAssociationVisitor.java | 20 +- .../dataset/TermAssociationVisitor.java | 17 +- .../field/ProtobufExtensionFieldVisitor.java | 119 +- .../visitors/field/SchemaFieldVisitor.java | 26 +- .../protobuf/visitors/tags/TagVisitor.java | 59 +- .../datahub/protobuf/ProtobufDatasetTest.java | 1113 ++++-- .../datahub/protobuf/ProtobufUtilsTest.java | 72 +- .../java/datahub/protobuf/TestFixtures.java | 115 +- .../protobuf/model/ProtobufEnumTest.java | 125 +- .../protobuf/model/ProtobufFieldTest.java | 398 +- .../protobuf/model/ProtobufGraphTest.java | 161 +- .../protobuf/model/ProtobufMessageTest.java | 318 +- .../model/ProtobufOneOfFieldTest.java | 219 +- .../protobuf/visitors/VisitContextTest.java | 53 +- .../visitors/dataset/DatasetVisitorTest.java | 85 +- .../dataset/DescriptionVisitorTest.java | 27 +- .../visitors/dataset/DomainVisitorTest.java | 29 +- .../InstitutionalMemoryVisitorTest.java | 110 +- .../KafkaTopicPropertyVisitorTest.java | 47 +- .../dataset/OwnershipVisitorTest.java | 88 +- .../visitors/dataset/PropertyVisitorTest.java | 100 +- .../dataset/TermAssociationVisitorTest.java | 58 +- .../ProtobufExtensionFieldVisitorTest.java | 445 ++- .../field/SchemaFieldVisitorTest.java | 107 +- .../protobuf/visitors/tag/TagVisitorTest.java | 132 +- .../examples/DataJobLineageAdd.java | 52 +- .../datahubproject/examples/DatasetAdd.java | 113 +- .../examples/DatasetCustomPropertiesAdd.java | 58 +- .../DatasetCustomPropertiesAddRemove.java | 53 +- .../DatasetCustomPropertiesReplace.java | 28 +- .../io/datahubproject/examples/TagCreate.java | 49 +- .../test/spark/lineage/HdfsIn2HdfsOut1.java | 32 +- .../test/spark/lineage/HdfsIn2HdfsOut2.java | 42 +- .../lineage/HdfsIn2HiveCreateInsertTable.java | 65 +- .../spark/lineage/HdfsIn2HiveCreateTable.java | 58 +- .../test/spark/lineage/HiveInHiveOut.java | 80 +- .../spark/lineage/HiveInHiveOut_test1.java | 79 +- .../main/java/test/spark/lineage/Utils.java | 2 +- .../datahub/spark/DatahubSparkListener.java | 346 +- .../java/datahub/spark/DatasetExtractor.java | 451 ++- .../consumer/impl/CoalesceJobsEmitter.java | 70 +- .../spark/consumer/impl/McpEmitter.java | 141 +- .../java/datahub/spark/model/AppEndEvent.java | 11 +- .../datahub/spark/model/AppStartEvent.java | 45 +- .../datahub/spark/model/DatasetLineage.java | 15 +- .../datahub/spark/model/LineageConsumer.java | 3 +- .../datahub/spark/model/LineageEvent.java | 5 +- .../datahub/spark/model/LineageUtils.java | 60 +- .../spark/model/SQLQueryExecEndEvent.java | 13 +- .../spark/model/SQLQueryExecStartEvent.java | 42 +- .../model/dataset/CatalogTableDataset.java | 11 +- .../spark/model/dataset/HdfsPathDataset.java | 27 +- .../spark/model/dataset/JdbcDataset.java | 9 +- .../spark/model/dataset/SparkDataset.java | 8 +- .../datahub/spark/TestCoalesceJobLineage.java | 126 +- .../datahub/spark/TestSparkJobsLineage.java | 293 +- .../aspect/utils/DeprecationUtils.java | 47 +- .../metadata/client/JavaEntityClient.java | 1290 ++++--- .../client/SystemJavaEntityClient.java | 48 +- .../com/linkedin/metadata/dao/AspectKey.java | 14 +- .../linkedin/metadata/dao/BaseReadDAO.java | 56 +- .../linkedin/metadata/entity/AspectDao.java | 280 +- .../metadata/entity/AspectMigrationsDao.java | 12 +- .../metadata/entity/EntityAspect.java | 37 +- .../entity/EntityAspectIdentifier.java | 12 +- .../metadata/entity/EntityServiceImpl.java | 1983 ++++++---- .../linkedin/metadata/entity/EntityUtils.java | 104 +- .../metadata/entity/NewModelUtils.java | 60 +- .../AspectStorageValidationUtil.java | 13 +- .../entity/cassandra/CassandraAspect.java | 22 +- .../entity/cassandra/CassandraAspectDao.java | 430 ++- .../cassandra/CassandraRetentionService.java | 195 +- .../ebean/AspectStorageValidationUtil.java | 18 +- .../metadata/entity/ebean/EbeanAspectDao.java | 445 ++- .../metadata/entity/ebean/EbeanAspectV1.java | 14 +- .../metadata/entity/ebean/EbeanAspectV2.java | 41 +- .../entity/ebean/EbeanRetentionService.java | 214 +- .../ebean/transactions/AspectsBatchImpl.java | 92 +- .../ebean/transactions/PatchBatchItem.java | 304 +- .../ebean/transactions/UpsertBatchItem.java | 282 +- .../EntityRegistryUrnValidator.java | 50 +- .../validation/RecordTemplateValidator.java | 72 +- .../validation/ValidationException.java | 4 +- .../entity/validation/ValidationUtils.java | 60 +- .../metadata/event/EntityEventProducer.java | 23 +- .../metadata/event/EventProducer.java | 39 +- .../metadata/graph/JavaGraphClient.java | 77 +- .../metadata/graph/SiblingGraphService.java | 220 +- .../metadata/graph/dgraph/DgraphExecutor.java | 147 +- .../graph/dgraph/DgraphGraphService.java | 1261 +++--- .../metadata/graph/dgraph/DgraphSchema.java | 216 +- .../graph/elastic/ESGraphQueryDAO.java | 365 +- .../graph/elastic/ESGraphWriteDAO.java | 36 +- .../elastic/ElasticSearchGraphService.java | 161 +- .../GraphRelationshipMappingsBuilder.java | 17 +- .../graph/elastic/TimeFilterUtils.java | 90 +- .../graph/neo4j/Neo4jGraphService.java | 470 ++- .../candidatesource/MostPopularSource.java | 73 +- .../candidatesource/RecentlyEditedSource.java | 82 +- .../candidatesource/RecentlyViewedSource.java | 85 +- .../search/EntityLineageResultCacheKey.java | 23 +- .../metadata/search/LineageSearchService.java | 631 +-- .../metadata/search/SearchService.java | 195 +- .../search/cache/CacheableSearcher.java | 40 +- .../cache/CachedEntityLineageResult.java | 7 +- .../search/cache/EntityDocCountCache.java | 22 +- .../client/CachingEntitySearchService.java | 232 +- .../elasticsearch/ElasticSearchService.java | 165 +- .../indexbuilder/ESIndexBuilder.java | 400 +- .../indexbuilder/EntityIndexBuilders.java | 68 +- .../indexbuilder/MappingsBuilder.java | 148 +- .../indexbuilder/ReindexConfig.java | 446 ++- .../indexbuilder/SettingsBuilder.java | 286 +- .../elasticsearch/query/ESBrowseDAO.java | 194 +- .../elasticsearch/query/ESSearchDAO.java | 280 +- .../request/AggregationQueryBuilder.java | 49 +- .../request/AutocompleteRequestHandler.java | 99 +- .../query/request/CustomizedQueryHandler.java | 49 +- .../query/request/PITAwareSearchRequest.java | 1 - .../query/request/SearchAfterWrapper.java | 10 +- .../query/request/SearchFieldConfig.java | 303 +- .../query/request/SearchQueryBuilder.java | 488 ++- .../query/request/SearchRequestHandler.java | 365 +- .../elasticsearch/update/BulkListener.java | 50 +- .../elasticsearch/update/ESBulkProcessor.java | 318 +- .../elasticsearch/update/ESWriteDAO.java | 27 +- .../search/features/FeatureExtractor.java | 9 +- .../metadata/search/features/Features.java | 11 +- .../metadata/search/ranker/SearchRanker.java | 50 +- .../metadata/search/ranker/SimpleRanker.java | 4 +- .../SearchDocumentTransformer.java | 182 +- .../search/utils/BrowsePathUtils.java | 210 +- .../search/utils/BrowsePathV2Utils.java | 115 +- .../metadata/search/utils/ESUtils.java | 336 +- .../metadata/search/utils/FilterUtils.java | 17 +- .../metadata/search/utils/GZIPUtil.java | 6 +- .../metadata/search/utils/SearchUtils.java | 95 +- .../service/UpdateIndicesService.java | 310 +- .../metadata/shared/ElasticSearchIndexed.java | 24 +- .../systemmetadata/ESSystemMetadataDAO.java | 62 +- .../ElasticSearchSystemMetadataService.java | 129 +- .../systemmetadata/SystemMetadataEntry.java | 1 - .../SystemMetadataMappingsBuilder.java | 3 +- .../timeline/MissingEntityAspect.java | 3 +- .../timeline/TimelineServiceImpl.java | 388 +- .../DatasetSchemaFieldChangeEvent.java | 10 +- .../SchemaFieldGlossaryTermChangeEvent.java | 13 +- .../schema/SchemaFieldTagChangeEvent.java | 13 +- .../data/entity/DomainChangeEvent.java | 11 +- .../data/entity/GlossaryTermChangeEvent.java | 11 +- .../data/entity/OwnerChangeEvent.java | 9 +- .../timeline/data/entity/TagChangeEvent.java | 11 +- .../timeline/eventgenerator/Aspect.java | 13 +- ...AssertionRunEventChangeEventGenerator.java | 28 +- .../ChangeEventGeneratorUtils.java | 88 +- ...sInstanceRunEventChangeEventGenerator.java | 37 +- ...DatasetPropertiesChangeEventGenerator.java | 94 +- .../DeprecationChangeEventGenerator.java | 32 +- ...DatasetPropertiesChangeEventGenerator.java | 69 +- ...bleSchemaMetadataChangeEventGenerator.java | 209 +- .../EntityChangeEventGenerator.java | 36 +- .../EntityChangeEventGeneratorFactory.java | 15 +- .../EntityChangeEventGeneratorRegistry.java | 22 +- .../EntityKeyChangeEventGenerator.java | 7 +- .../GlobalTagsChangeEventGenerator.java | 124 +- .../GlossaryTermInfoChangeEventGenerator.java | 187 +- .../GlossaryTermsChangeEventGenerator.java | 153 +- ...stitutionalMemoryChangeEventGenerator.java | 171 +- .../OwnershipChangeEventGenerator.java | 173 +- .../SchemaMetadataChangeEventGenerator.java | 427 ++- .../SingleDomainChangeEventGenerator.java | 28 +- .../StatusChangeEventGenerator.java | 30 +- .../ElasticSearchTimeseriesAspectService.java | 195 +- .../elastic/indexbuilder/MappingsBuilder.java | 37 +- .../TimeseriesAspectIndexBuilders.java | 44 +- .../elastic/query/ESAggregatedStatsDAO.java | 192 +- .../TimeseriesAspectTransformer.java | 135 +- .../linkedin/metadata/version/GitVersion.java | 1 - .../metadata/AspectGenerationUtils.java | 10 +- .../metadata/AspectIngestionUtils.java | 57 +- .../linkedin/metadata/AspectUtilsTest.java | 24 +- .../linkedin/metadata/CassandraTestUtils.java | 96 +- .../linkedin/metadata/DockerTestUtils.java | 24 +- .../com/linkedin/metadata/EbeanTestUtils.java | 7 +- .../metadata/TestEntitySpecBuilder.java | 4 +- .../com/linkedin/metadata/TestEntityUtil.java | 65 +- .../metadata/client/JavaEntityClientTest.java | 223 +- .../update/BulkListenerTest.java | 52 +- .../update/ESBulkProcessorTest.java | 18 +- .../entity/AspectMigrationsDaoTest.java | 22 +- .../CassandraAspectMigrationsDaoTest.java | 24 +- .../entity/CassandraEntityServiceTest.java | 83 +- .../entity/DeleteEntityServiceTest.java | 70 +- .../entity/DeleteEntityUtilsTest.java | 264 +- .../entity/EbeanAspectMigrationsDaoTest.java | 36 +- .../entity/EbeanEntityServiceTest.java | 287 +- .../metadata/entity/EntityServiceTest.java | 2909 +++++++------- .../metadata/entity/TestEntityRegistry.java | 9 +- .../extractor/AspectExtractorTest.java | 8 +- .../extractor/FieldExtractorTest.java | 82 +- .../com/linkedin/metadata/graph/EdgeTest.java | 77 +- .../metadata/graph/GraphServiceTestBase.java | 2449 ++++++------ .../graph/dgraph/DgraphContainer.java | 419 +- .../graph/dgraph/DgraphGraphServiceTest.java | 1390 +++---- .../graph/neo4j/Neo4jGraphServiceTest.java | 199 +- .../graph/neo4j/Neo4jTestServerBuilder.java | 7 +- .../graph/search/ESGraphQueryDAOTest.java | 165 +- .../search/SearchGraphServiceTestBase.java | 309 +- .../graph/search/TimeFilterUtilsTest.java | 7 +- .../SearchGraphServiceElasticSearchTest.java | 11 +- .../SearchGraphServiceOpenSearchTest.java | 10 +- .../sibling/SiblingGraphServiceTest.java | 523 +-- .../RecommendationsServiceTest.java | 91 +- ...ySearchAggregationCandidateSourceTest.java | 63 +- .../RecommendationUtilsTest.java | 13 +- .../candidatesource/TestSource.java | 8 +- .../LineageSearchResultCacheKeyTest.java | 30 +- .../search/LineageServiceTestBase.java | 1019 +++-- .../search/SearchServiceTestBase.java | 273 +- .../metadata/search/TestEntityTestBase.java | 130 +- .../search/cache/CacheableSearcherTest.java | 126 +- .../elasticsearch/ElasticSearchSuite.java | 32 +- .../GoldenElasticSearchTest.java | 63 +- .../IndexBuilderElasticSearchTest.java | 26 +- .../LineageDataFixtureElasticSearchTest.java | 59 +- .../LineageServiceElasticSearchTest.java | 24 +- .../SampleDataFixtureElasticSearchTest.java | 45 +- .../SearchDAOElasticSearchTest.java | 24 +- .../SearchServiceElasticSearchTest.java | 25 +- ...ystemMetadataServiceElasticSearchTest.java | 12 +- .../TestEntityElasticSearchTest.java | 21 +- ...eseriesAspectServiceElasticSearchTest.java | 13 +- .../search/fixtures/GoldenTestBase.java | 297 +- .../fixtures/LineageDataFixtureTestBase.java | 83 +- .../fixtures/SampleDataFixtureTestBase.java | 3277 +++++++++------- .../indexbuilder/IndexBuilderTestBase.java | 421 +- .../indexbuilder/MappingsBuilderTest.java | 67 +- .../opensearch/GoldenOpenSearchTest.java | 59 +- .../IndexBuilderOpenSearchTest.java | 26 +- .../LineageDataFixtureOpenSearchTest.java | 55 +- .../LineageServiceOpenSearchTest.java | 21 +- .../search/opensearch/OpenSearchSuite.java | 31 +- .../SampleDataFixtureOpenSearchTest.java | 44 +- .../opensearch/SearchDAOOpenSearchTest.java | 18 +- .../SearchServiceOpenSearchTest.java | 21 +- .../SystemMetadataServiceOpenSearchTest.java | 10 +- .../opensearch/TestEntityOpenSearchTest.java | 21 +- ...TimeseriesAspectServiceOpenSearchTest.java | 9 +- .../metadata/search/query/BrowseDAOTest.java | 42 +- .../search/query/SearchDAOTestBase.java | 647 ++-- .../request/AggregationQueryBuilderTest.java | 149 +- .../AutocompleteRequestHandlerTest.java | 19 +- .../request/CustomizedQueryHandlerTest.java | 357 +- .../query/request/SearchQueryBuilderTest.java | 383 +- .../request/SearchRequestHandlerTest.java | 576 +-- .../SearchDocumentTransformerTest.java | 65 +- .../search/utils/BrowsePathUtilsTest.java | 65 +- .../search/utils/BrowsePathV2UtilsTest.java | 167 +- .../metadata/search/utils/ESUtilsTest.java | 361 +- .../search/utils/SearchUtilsTest.java | 207 +- .../SystemMetadataServiceTestBase.java | 28 +- .../CassandraTimelineServiceTest.java | 32 +- .../timeline/EbeanTimelineServiceTest.java | 35 +- .../timeline/TimelineServiceTest.java | 100 +- ...chemaMetadataChangeEventGeneratorTest.java | 64 +- .../TimeseriesAspectServiceTestBase.java | 1025 +++-- .../io/datahubproject/test/DataGenerator.java | 687 ++-- .../test/fixtures/search/EntityExporter.java | 87 +- .../test/fixtures/search/FixtureReader.java | 170 +- .../test/fixtures/search/FixtureWriter.java | 111 +- .../test/fixtures/search/LineageExporter.java | 359 +- .../SampleDataFixtureConfiguration.java | 493 +-- .../fixtures/search/SearchFixtureUtils.java | 243 +- .../SearchLineageFixtureConfiguration.java | 378 +- .../test/models/Anonymized.java | 75 +- .../test/models/DatasetAnonymized.java | 66 +- .../test/models/GraphAnonymized.java | 20 +- .../search/ElasticsearchTestContainer.java | 70 +- .../test/search/OpenSearchTestContainer.java | 73 +- .../test/search/SearchTestContainer.java | 11 +- .../test/search/SearchTestUtils.java | 279 +- .../config/SearchCommonTestConfiguration.java | 89 +- .../SearchTestContainerConfiguration.java | 127 +- .../kafka/MaeConsumerApplication.java | 43 +- .../kafka/MaeConsumerApplicationTest.java | 11 +- ...eConsumerApplicationTestConfiguration.java | 27 +- .../kafka/DataHubUsageEventsProcessor.java | 30 +- .../metadata/kafka/MclConsumerConfig.java | 16 +- .../kafka/MetadataChangeLogProcessor.java | 55 +- .../boot/ApplicationStartupListener.java | 12 +- .../boot/MCLBootstrapManagerFactory.java | 8 +- .../DataHubUsageEventsProcessorCondition.java | 11 +- .../kafka/config/EntityHydratorConfig.java | 12 +- .../MetadataChangeLogProcessorCondition.java | 5 +- .../kafka/elasticsearch/ElasticEvent.java | 2 +- .../elasticsearch/ElasticsearchConnector.java | 19 +- .../ElasticsearchConnectorFactory.java | 7 +- .../kafka/elasticsearch/JsonElasticEvent.java | 17 +- .../kafka/elasticsearch/MCEElasticEvent.java | 20 +- .../kafka/hook/MetadataChangeLogHook.java | 17 +- .../kafka/hook/UpdateIndicesHook.java | 18 +- .../event/EntityChangeEventGeneratorHook.java | 120 +- .../ingestion/IngestionSchedulerHook.java | 45 +- .../hook/siblings/SiblingAssociationHook.java | 284 +- .../metadata/kafka/hydrator/BaseHydrator.java | 9 +- .../kafka/hydrator/ChartHydrator.java | 16 +- .../kafka/hydrator/CorpUserHydrator.java | 15 +- .../kafka/hydrator/DashboardHydrator.java | 16 +- .../kafka/hydrator/DataFlowHydrator.java | 16 +- .../kafka/hydrator/DataJobHydrator.java | 16 +- .../kafka/hydrator/DatasetHydrator.java | 5 +- .../kafka/hydrator/EntityHydrator.java | 28 +- .../DataHubUsageEventTransformer.java | 48 +- .../kafka/hook/EntityRegistryTestUtil.java | 20 +- .../kafka/hook/GraphIndexUtilsTest.java | 102 +- .../hook/MCLProcessingTestDataGenerator.java | 24 +- .../kafka/hook/UpdateIndicesHookTest.java | 227 +- .../EntityChangeEventGeneratorHookTest.java | 318 +- .../hook/event/PlatformEventMatcher.java | 37 +- .../ingestion/IngestionSchedulerHookTest.java | 34 +- .../siblings/SiblingAssociationHookTest.java | 311 +- .../kafka/hook/spring/MCLSpringTest.java | 37 +- .../spring/MCLSpringTestConfiguration.java | 40 +- .../kafka/MceConsumerApplication.java | 55 +- .../metadata/restli/EbeanServerConfig.java | 99 +- .../metadata/restli/RestliServletConfig.java | 78 +- .../kafka/MceConsumerApplicationTest.java | 39 +- ...eConsumerApplicationTestConfiguration.java | 44 +- .../metadata/kafka/McpConsumerConfig.java | 16 +- .../kafka/MetadataChangeEventsProcessor.java | 52 +- .../MetadataChangeProposalsProcessor.java | 45 +- .../boot/ApplicationStartupListener.java | 14 +- .../boot/MCPBootstrapManagerFactory.java | 13 +- ...adataChangeProposalProcessorCondition.java | 5 +- .../datahub/event/PlatformEventProcessor.java | 22 +- .../datahub/event/hook/PlatformEventHook.java | 13 +- .../model/validation/ModelValidationTask.java | 40 +- metadata-models/build.gradle | 4 - .../linkedin/metadata/ModelValidation.java | 28 +- .../metadata/ModelValidationConstants.java | 1 - .../AuthenticationConfiguration.java | 26 +- .../AuthenticationConstants.java | 23 +- .../AuthenticatorConfiguration.java | 11 +- .../TokenServiceConfiguration.java | 5 +- .../AuthorizationConfiguration.java | 16 +- .../AuthorizerConfiguration.java | 19 +- .../DefaultAuthorizerConfiguration.java | 10 +- .../filter/AuthenticationFilter.java | 217 +- .../authentication/AuthTestConfiguration.java | 22 +- .../AuthenticationFilterTest.java | 36 +- .../authenticator/AuthenticatorChain.java | 73 +- .../DataHubJwtTokenAuthenticator.java | 67 +- .../DataHubSystemAuthenticator.java | 59 +- .../DataHubTokenAuthenticator.java | 56 +- .../HealthStatusAuthenticator.java | 41 +- .../authenticator/NoOpAuthenticator.java | 39 +- .../authentication/group/GroupService.java | 125 +- .../invite/InviteTokenService.java | 76 +- .../authentication/post/PostService.java | 33 +- .../token/DataHubJwtSigningKeyResolver.java | 33 +- .../token/StatefulTokenService.java | 92 +- .../token/StatelessTokenService.java | 71 +- .../authentication/token/TokenClaims.java | 58 +- .../authentication/token/TokenException.java | 4 +- .../token/TokenExpiredException.java | 4 +- .../authentication/token/TokenType.java | 12 +- .../authentication/token/TokenVersion.java | 45 +- .../user/NativeUserService.java | 83 +- .../authorization/AuthorizerChain.java | 51 +- .../authorization/DataHubAuthorizer.java | 144 +- .../DefaultEntitySpecResolver.java | 12 +- .../datahub/authorization/FilterUtils.java | 41 +- .../datahub/authorization/PolicyEngine.java | 149 +- .../datahub/authorization/PolicyFetcher.java | 123 +- ...PlatformInstanceFieldResolverProvider.java | 31 +- .../DomainFieldResolverProvider.java | 74 +- .../EntityFieldResolverProvider.java | 12 +- .../EntityTypeFieldResolverProvider.java | 7 +- .../EntityUrnFieldResolverProvider.java | 7 +- .../GroupMembershipFieldResolverProvider.java | 45 +- .../OwnerFieldResolverProvider.java | 20 +- .../authorization/role/RoleService.java | 49 +- .../datahub/telemetry/TrackingService.java | 94 +- .../authenticator/AuthenticatorChainTest.java | 28 +- .../DataHubJwtTokenAuthenticatorTest.java | 24 +- .../DataHubSystemAuthenticatorTest.java | 110 +- .../DataHubTokenAuthenticatorTest.java | 291 +- .../group/GroupServiceTest.java | 186 +- .../invite/InviteTokenServiceTest.java | 125 +- .../authentication/post/PostServiceTest.java | 25 +- .../DataHubJwtSigningKeyResolverTest.java | 35 +- .../token/StatefulTokenServiceTest.java | 97 +- .../token/StatelessTokenServiceTest.java | 78 +- .../user/NativeUserServiceTest.java | 163 +- .../authorization/DataHubAuthorizerTest.java | 496 ++- .../authorization/PolicyEngineTest.java | 593 ++- .../authorization/RoleServiceTest.java | 44 +- ...formInstanceFieldResolverProviderTest.java | 152 +- ...upMembershipFieldResolverProviderTest.java | 210 +- .../telemetry/TrackingServiceTest.java | 70 +- .../authentication/AuthServiceController.java | 300 +- .../metadata/config/AssetsConfiguration.java | 6 +- .../config/AuthPluginConfiguration.java | 7 +- .../metadata/config/DataHubConfiguration.java | 10 +- .../metadata/config/EntityProfileConfig.java | 4 +- .../EntityRegistryPluginConfiguration.java | 3 +- .../config/IngestionConfiguration.java | 14 +- .../metadata/config/PluginConfiguration.java | 24 +- .../metadata/config/PreProcessHooks.java | 1 - .../metadata/config/QueriesTabConfig.java | 5 +- .../config/RetentionPluginConfiguration.java | 3 +- .../config/SearchResultVisualConfig.java | 9 +- .../metadata/config/TestsConfiguration.java | 10 +- .../metadata/config/ViewsConfiguration.java | 10 +- .../metadata/config/VisualConfiguration.java | 21 +- .../config/cache/CacheConfiguration.java | 1 - .../EntityDocCountCacheConfiguration.java | 1 - .../cache/HomepageCacheConfiguration.java | 1 - .../cache/PrimaryCacheConfiguration.java | 1 - .../cache/SearchCacheConfiguration.java | 1 - .../SearchLineageCacheConfiguration.java | 1 - .../cache/client/ClientCacheConfig.java | 15 +- .../client/ClientCacheConfiguration.java | 4 +- .../cache/client/EntityClientCacheConfig.java | 17 +- .../cache/client/UsageClientCacheConfig.java | 10 +- .../config/kafka/ConsumerConfiguration.java | 1 - .../config/kafka/ProducerConfiguration.java | 1 - .../kafka/SchemaRegistryConfiguration.java | 1 - .../search/BuildIndicesConfiguration.java | 1 - .../config/search/CustomConfiguration.java | 11 +- .../search/ElasticSearchConfiguration.java | 1 - .../search/ExactMatchConfiguration.java | 1 - .../search/GraphQueryConfiguration.java | 2 +- .../config/search/PartialConfiguration.java | 1 - .../config/search/SearchConfiguration.java | 1 - .../config/search/WordGramConfiguration.java | 1 - .../search/custom/BoolQueryConfiguration.java | 18 +- .../custom/CustomSearchConfiguration.java | 7 +- .../search/custom/QueryConfiguration.java | 24 +- .../telemetry/TelemetryConfiguration.java | 33 +- .../spring/YamlPropertySourceFactory.java | 12 +- .../BatchWriteOperationsOptions.java | 1 - .../factory/auth/AuthorizerChainFactory.java | 93 +- .../AwsRequestSigningApacheInterceptor.java | 69 +- .../auth/DataHubAuthorizerFactory.java | 20 +- .../auth/DataHubTokenServiceFactory.java | 17 +- .../gms/factory/auth/GroupServiceFactory.java | 7 +- .../auth/InviteTokenServiceFactory.java | 3 +- .../auth/NativeUserServiceFactory.java | 15 +- .../gms/factory/auth/PostServiceFactory.java | 3 +- .../gms/factory/auth/RoleServiceFactory.java | 7 +- .../auth/SystemAuthenticationFactory.java | 10 +- .../gms/factory/common/CacheConfig.java | 22 +- .../common/DatasetUrnNameCasingFactory.java | 3 +- .../ElasticSearchGraphServiceFactory.java | 25 +- ...ticSearchSystemMetadataServiceFactory.java | 13 +- .../ElasticsearchSSLContextFactory.java | 154 +- .../gms/factory/common/GitVersionFactory.java | 1 - .../factory/common/GraphClientFactory.java | 1 - .../factory/common/GraphServiceFactory.java | 8 +- .../common/IndexConventionFactory.java | 1 - .../LocalCassandraSessionConfigFactory.java | 24 +- .../common/LocalEbeanServerConfigFactory.java | 4 +- .../factory/common/Neo4jDriverFactory.java | 8 +- .../common/Neo4jGraphServiceFactory.java | 6 +- .../common/RestHighLevelClientFactory.java | 103 +- .../common/SiblingGraphServiceFactory.java | 1 - .../common/SystemMetadataServiceFactory.java | 3 +- .../common/TopicConventionFactory.java | 24 +- .../factory/config/ConfigurationProvider.java | 72 +- .../config/HealthCheckConfiguration.java | 1 - .../DataProductServiceFactory.java | 5 +- .../entity/CassandraSessionFactory.java | 33 +- .../factory/entity/EbeanServerFactory.java | 4 +- .../entity/EntityAspectDaoFactory.java | 3 +- .../EntityAspectMigrationsDaoFactory.java | 3 +- .../factory/entity/EntityServiceFactory.java | 43 +- .../entity/JavaEntityClientFactory.java | 18 +- .../entity/RestliEntityClientFactory.java | 25 +- .../entity/RetentionServiceFactory.java | 14 +- .../indices/UpdateIndicesServiceFactory.java | 29 +- .../ConfigEntityRegistryFactory.java | 3 +- .../entityregistry/EntityRegistryFactory.java | 3 +- .../PluginEntityRegistryFactory.java | 6 +- .../factory/graphql/GraphQLEngineFactory.java | 27 +- .../ingestion/IngestionSchedulerFactory.java | 15 +- .../DataHubKafkaEventProducerFactory.java | 11 +- .../kafka/DataHubKafkaProducerFactory.java | 58 +- .../kafka/KafkaEventConsumerFactory.java | 201 +- .../kafka/SimpleKafkaConsumerFactory.java | 22 +- .../kafka/ThreadPoolContainerCustomizer.java | 7 +- .../AwsGlueSchemaRegistryFactory.java | 15 +- .../DUHESchemaRegistryFactory.java | 42 +- .../InternalSchemaRegistryFactory.java | 23 +- .../KafkaSchemaRegistryFactory.java | 16 +- .../schemaregistry/SchemaRegistryConfig.java | 1 - .../lineage/LineageServiceFactory.java | 8 +- .../OwnershipTypeServiceFactory.java | 5 +- .../factory/query/QueryServiceFactory.java | 3 +- .../RecommendationServiceFactory.java | 28 +- .../MostPopularCandidateSourceFactory.java | 7 +- .../RecentlyEditedCandidateSourceFactory.java | 7 +- ...ecentlySearchedCandidateSourceFactory.java | 1 - .../RecentlyViewedCandidateSourceFactory.java | 7 +- .../TopPlatformsCandidateSourceFactory.java | 1 - .../TopTagsCandidateSourceFactory.java | 1 - .../TopTermsCandidateSourceFactory.java | 1 - .../BaseElasticSearchComponentsFactory.java | 20 +- .../CachingEntitySearchServiceFactory.java | 13 +- .../ElasticSearchBulkProcessorFactory.java | 20 +- .../ElasticSearchIndexBuilderFactory.java | 56 +- .../search/ElasticSearchServiceFactory.java | 60 +- .../search/EntityIndexBuildersFactory.java | 40 +- .../search/EntitySearchServiceFactory.java | 1 - .../search/LineageSearchServiceFactory.java | 19 +- .../SearchDocumentTransformerFactory.java | 3 +- .../factory/search/SearchServiceFactory.java | 9 +- .../search/SettingsBuilderFactory.java | 3 +- .../search/ranker/SearchRankerFactory.java | 1 - .../search/views/ViewServiceFactory.java | 3 +- .../factory/secret/SecretServiceFactory.java | 1 - .../settings/SettingsServiceFactory.java | 3 +- .../gms/factory/telemetry/DailyReport.java | 67 +- .../factory/telemetry/MixpanelApiFactory.java | 1 - .../MixpanelMessageBuilderFactory.java | 2 - .../telemetry/ScheduledAnalyticsFactory.java | 19 +- .../gms/factory/telemetry/TelemetryUtils.java | 55 +- .../telemetry/TrackingServiceFactory.java | 14 +- ...tyChangeEventGeneratorRegistryFactory.java | 34 +- .../timeline/TimelineServiceFactory.java | 9 +- ...cSearchTimeseriesAspectServiceFactory.java | 16 +- .../TimeseriesAspectServiceFactory.java | 1 - .../gms/factory/usage/UsageClientFactory.java | 24 +- .../metadata/boot/BootstrapManager.java | 42 +- .../linkedin/metadata/boot/BootstrapStep.java | 31 +- .../boot/OnBootApplicationListener.java | 67 +- .../linkedin/metadata/boot/UpgradeStep.java | 43 +- .../dependencies/BootstrapDependency.java | 5 +- .../factories/BootstrapManagerFactory.java | 73 +- .../IngestRetentionPoliciesStepFactory.java | 12 +- .../kafka/DataHubUpgradeKafkaListener.java | 70 +- .../boot/kafka/MockDUHEDeserializer.java | 83 +- .../boot/kafka/MockDUHESerializer.java | 83 +- .../boot/steps/BackfillBrowsePathsV2Step.java | 72 +- .../boot/steps/IndexDataPlatformsStep.java | 72 +- .../IngestDataPlatformInstancesStep.java | 31 +- .../boot/steps/IngestDataPlatformsStep.java | 83 +- .../IngestDefaultGlobalSettingsStep.java | 76 +- .../boot/steps/IngestOwnershipTypesStep.java | 45 +- .../boot/steps/IngestPoliciesStep.java | 82 +- .../steps/IngestRetentionPoliciesStep.java | 50 +- .../metadata/boot/steps/IngestRolesStep.java | 68 +- .../boot/steps/IngestRootUserStep.java | 50 +- .../boot/steps/RemoveClientIdAspectStep.java | 5 +- .../steps/RestoreColumnLineageIndices.java | 158 +- .../boot/steps/RestoreDbtSiblingsIndices.java | 116 +- .../boot/steps/RestoreGlossaryIndices.java | 171 +- .../steps/UpgradeDefaultBrowsePathsStep.java | 65 +- .../boot/steps/WaitForSystemUpdateStep.java | 3 +- .../restli/server/RAPServletFactory.java | 85 +- .../restli/server/RestliHandlerServlet.java | 30 +- .../kafka/SimpleKafkaConsumerFactoryTest.java | 30 +- .../gms/factory/search/CacheTest.java | 236 +- ...ElasticSearchBulkProcessorFactoryTest.java | 19 +- ...ticSearchIndexBuilderFactoryEmptyTest.java | 34 +- ...earchIndexBuilderFactoryOverridesTest.java | 41 +- .../ElasticSearchIndexBuilderFactoryTest.java | 22 +- .../secret/SecretServiceFactoryTest.java | 33 +- .../steps/BackfillBrowsePathsV2StepTest.java | 208 +- .../IngestDataPlatformInstancesStepTest.java | 118 +- .../IngestDefaultGlobalSettingsStepTest.java | 92 +- .../RestoreColumnLineageIndicesTest.java | 415 +- .../steps/RestoreGlossaryIndicesTest.java | 447 ++- .../UpgradeDefaultBrowsePathsStepTest.java | 398 +- .../telemetry/TelemetryUtilsTest.java | 10 +- .../datahub/graphql/GraphQLController.java | 158 +- .../datahub/graphql/GraphiQLController.java | 7 +- .../datahub/graphql/SpringQueryContext.java | 6 +- .../openapi-analytics-servlet/build.gradle | 2 - .../config/OpenapiAnalyticsConfig.java | 8 +- .../delegates/DatahubUsageEventsImpl.java | 62 +- .../OpenAPIAnalyticsTestConfiguration.java | 61 +- .../delegates/DatahubUsageEventsImplTest.java | 39 +- .../openapi-entity-servlet/build.gradle | 4 +- .../datahubproject/CustomSpringCodegen.java | 54 +- .../delegates/EntityApiDelegateImpl.java | 1236 +++--- .../openapi/util/OpenApiEntitiesUtil.java | 535 +-- .../OpenAPIEntityTestConfiguration.java | 195 +- .../delegates/EntityApiDelegateImplTest.java | 439 ++- .../openapi/util/OpenApiEntitiesUtilTest.java | 67 +- .../GlobalControllerExceptionHandler.java | 1 - .../openapi/config/SpringWebConfig.java | 24 +- .../StringToChangeCategoryConverter.java | 27 +- .../openapi/dto/RollbackRunResultDto.java | 1 - .../openapi/dto/UpsertAspectRequest.java | 17 +- .../openapi/dto/UrnResponseMap.java | 1 - .../openapi/entities/EntitiesController.java | 226 +- .../openapi/health/HealthCheckController.java | 51 +- .../openapi/health/HealthController.java | 1 - .../elastic/OperationsController.java | 43 +- .../entities/PlatformEntitiesController.java | 38 +- .../RelationshipsController.java | 148 +- .../openapi/timeline/TimelineController.java | 25 +- .../openapi/util/ElasticsearchUtils.java | 3 +- .../openapi/util/MappingUtil.java | 387 +- .../openapi/util/ReflectionCache.java | 224 +- .../java/entities/EntitiesControllerTest.java | 234 +- .../src/test/java/mock/MockEntityService.java | 125 +- .../plugins/auth/configuration/AuthParam.java | 21 +- .../auth/configuration/AuthPluginConfig.java | 15 +- .../AuthenticatorPluginConfig.java | 12 +- .../configuration/AuthorizerPluginConfig.java | 12 +- .../provider/AuthPluginConfigProvider.java | 4 +- .../AuthenticatorPluginConfigProvider.java | 21 +- .../AuthorizerPluginConfigProvider.java | 21 +- .../plugins/common/ConfigValidationUtils.java | 33 +- .../datahub/plugins/common/PluginConfig.java | 37 +- .../plugins/common/PluginConfigProvider.java | 1 - .../common/PluginPermissionManager.java | 9 +- .../datahub/plugins/common/PluginType.java | 12 +- .../datahub/plugins/common/SecurityMode.java | 29 +- .../datahub/plugins/common/YamlMapper.java | 9 +- .../datahub/plugins/configuration/Config.java | 20 +- .../plugins/configuration/ConfigProvider.java | 13 +- .../plugins/configuration/PluginConfig.java | 34 +- .../plugins/factory/PluginConfigFactory.java | 7 +- .../plugins/loader/IsolatedClassLoader.java | 63 +- .../datahub/plugins/loader/JarExtractor.java | 7 +- .../loader/PluginPermissionManagerImpl.java | 5 +- .../com/datahub/plugins/auth/TestConfig.java | 9 +- .../plugins/auth/TestConfigProvider.java | 65 +- .../auth/TestConfigValidationUtils.java | 1 - .../plugins/auth/TestIsolatedClassLoader.java | 138 +- .../plugins/auth/TestPluginConfigFactory.java | 21 +- .../auth/TestPluginPermissionManager.java | 43 +- .../plugins/test/TestAuthenticator.java | 17 +- .../datahub/plugins/test/TestAuthorizer.java | 12 +- .../test/TestLenientModeAuthenticator.java | 9 +- ...linkedin.analytics.analytics.restspec.json | 2 + .../com.linkedin.entity.aspects.restspec.json | 6 + ...com.linkedin.entity.entities.restspec.json | 26 +- ...m.linkedin.entity.entitiesV2.restspec.json | 3 + ...n.entity.entitiesVersionedV2.restspec.json | 2 + .../com.linkedin.entity.runs.restspec.json | 4 + ...nkedin.lineage.relationships.restspec.json | 4 + ...nkedin.operations.operations.restspec.json | 5 + ...m.linkedin.platform.platform.restspec.json | 2 + ...om.linkedin.usage.usageStats.restspec.json | 4 + ...linkedin.analytics.analytics.snapshot.json | 2 + .../com.linkedin.entity.aspects.snapshot.json | 6 + ...com.linkedin.entity.entities.snapshot.json | 26 +- ...m.linkedin.entity.entitiesV2.snapshot.json | 3 + ...n.entity.entitiesVersionedV2.snapshot.json | 2 + .../com.linkedin.entity.runs.snapshot.json | 4 + ...nkedin.lineage.relationships.snapshot.json | 4 + ...nkedin.operations.operations.snapshot.json | 5 + ...m.linkedin.platform.platform.snapshot.json | 2 + ...om.linkedin.usage.usageStats.snapshot.json | 4 + .../main/java/com/linkedin/BatchGetUtils.java | 77 +- .../linkedin/common/client/BaseClient.java | 37 +- .../linkedin/common/client/ClientCache.java | 208 +- .../linkedin/entity/client/EntityClient.java | 378 +- .../entity/client/EntityClientCache.java | 235 +- .../entity/client/RestliEntityClient.java | 583 ++- .../entity/client/SystemEntityClient.java | 142 +- .../client/SystemRestliEntityClient.java | 28 +- .../java/com/linkedin/usage/UsageClient.java | 98 +- .../com/linkedin/usage/UsageClientCache.java | 92 +- .../common/client/BaseClientTest.java | 100 +- .../metadata/filter/RestliLoggingFilter.java | 8 +- .../resources/analytics/Analytics.java | 45 +- .../resources/entity/AspectResource.java | 293 +- .../entity/BatchIngestionRunResource.java | 492 ++- .../resources/entity/EntityResource.java | 837 ++-- .../resources/entity/EntityV2Resource.java | 119 +- .../entity/EntityVersionedV2Resource.java | 86 +- .../resources/entity/ResourceUtils.java | 9 +- .../resources/lineage/Relationships.java | 153 +- .../operations/OperationsResource.java | 206 +- .../metadata/resources/operations/Utils.java | 36 +- .../resources/platform/PlatformResource.java | 32 +- .../resources/restli/RestliConstants.java | 2 +- .../resources/restli/RestliUtils.java | 49 +- .../metadata/resources/usage/UsageStats.java | 313 +- .../resources/entity/AspectResourceTest.java | 73 +- .../operations/OperationsResourceTest.java | 124 +- .../mock/MockTimeseriesAspectService.java | 53 +- .../schema-registry-api/build.gradle | 2 - .../generated/CompatibilityCheckResponse.java | 45 +- .../openapi/generated/Config.java | 55 +- .../generated/ConfigUpdateRequest.java | 55 +- .../openapi/generated/ErrorMessage.java | 43 +- .../openapi/generated/Mode.java | 49 +- .../openapi/generated/ModeUpdateRequest.java | 49 +- .../generated/RegisterSchemaRequest.java | 71 +- .../generated/RegisterSchemaResponse.java | 37 +- .../openapi/generated/Schema.java | 75 +- .../openapi/generated/SchemaReference.java | 63 +- .../SchemaRegistryServerVersion.java | 43 +- .../openapi/generated/SchemaString.java | 64 +- .../openapi/generated/ServerClusterId.java | 43 +- .../openapi/generated/SubjectVersion.java | 43 +- .../java/io/swagger/api/CompatibilityApi.java | 311 +- .../api/CompatibilityApiController.java | 40 +- .../main/java/io/swagger/api/ConfigApi.java | 614 ++- .../io/swagger/api/ConfigApiController.java | 40 +- .../main/java/io/swagger/api/ContextsApi.java | 131 +- .../io/swagger/api/ContextsApiController.java | 40 +- .../main/java/io/swagger/api/DefaultApi.java | 162 +- .../io/swagger/api/DefaultApiController.java | 40 +- .../src/main/java/io/swagger/api/ModeApi.java | 529 ++- .../io/swagger/api/ModeApiController.java | 40 +- .../main/java/io/swagger/api/SchemasApi.java | 653 +++- .../io/swagger/api/SchemasApiController.java | 40 +- .../main/java/io/swagger/api/SubjectsApi.java | 1051 +++-- .../io/swagger/api/SubjectsApiController.java | 40 +- .../src/main/java/io/swagger/api/V1Api.java | 114 +- .../java/io/swagger/api/V1ApiController.java | 40 +- .../registry/SchemaRegistryController.java | 126 +- .../config/SpringWebSchemaRegistryConfig.java | 8 +- .../openapi/test/OpenAPISpringTestServer.java | 1 - .../OpenAPISpringTestServerConfiguration.java | 10 +- .../test/SchemaRegistryControllerTest.java | 82 +- ...maRegistryControllerTestConfiguration.java | 5 +- .../DataHubUsageEventConstants.java | 3 +- .../datahubusage/DataHubUsageEventType.java | 1 - .../linkedin/metadata/entity/AspectUtils.java | 86 +- .../metadata/entity/DeleteEntityService.java | 787 ++-- .../metadata/entity/DeleteEntityUtils.java | 162 +- .../metadata/entity/EntityService.java | 188 +- .../metadata/entity/IngestProposalResult.java | 1 - .../metadata/entity/IngestResult.java | 14 +- .../linkedin/metadata/entity/ListResult.java | 3 +- .../metadata/entity/RetentionService.java | 136 +- .../metadata/entity/RollbackResult.java | 2 - .../metadata/entity/RollbackRunResult.java | 1 - .../metadata/entity/UpdateAspectResult.java | 4 +- .../restoreindices/RestoreIndicesArgs.java | 86 +- .../restoreindices/RestoreIndicesResult.java | 18 +- .../retention/BulkApplyRetentionArgs.java | 10 +- .../retention/BulkApplyRetentionResult.java | 18 +- .../transactions/AbstractBatchItem.java | 124 +- .../entity/transactions/AspectsBatch.java | 26 +- .../com/linkedin/metadata/graph/Edge.java | 28 +- .../linkedin/metadata/graph/GraphClient.java | 18 +- .../linkedin/metadata/graph/GraphFilters.java | 1 - .../metadata/graph/GraphIndexUtils.java | 94 +- .../linkedin/metadata/graph/GraphService.java | 296 +- .../metadata/graph/RelatedEntity.java | 8 +- .../RecommendationsService.java | 46 +- .../DomainsCandidateSource.java | 5 +- .../EntitySearchAggregationSource.java | 87 +- .../RecentlySearchedSource.java | 51 +- .../candidatesource/RecommendationSource.java | 33 +- .../candidatesource/RecommendationUtils.java | 8 +- .../candidatesource/TopPlatformsSource.java | 39 +- .../candidatesource/TopTagsSource.java | 4 +- .../candidatesource/TopTermsSource.java | 4 +- .../ranker/RecommendationModuleRanker.java | 8 +- .../ranker/SimpleRecommendationRanker.java | 8 +- .../registry/SchemaRegistryService.java | 3 +- .../registry/SchemaRegistryServiceImpl.java | 55 +- .../metadata/resource/ResourceReference.java | 13 +- .../metadata/resource/SubResourceType.java | 4 +- .../metadata/search/EntitySearchService.java | 153 +- .../metadata/search/utils/QueryUtils.java | 75 +- .../metadata/secret/SecretService.java | 7 +- .../metadata/service/BaseService.java | 81 +- .../metadata/service/DataProductService.java | 266 +- .../metadata/service/DomainService.java | 169 +- .../metadata/service/GlossaryTermService.java | 260 +- .../metadata/service/LineageService.java | 474 ++- .../metadata/service/OwnerService.java | 106 +- .../service/OwnershipTypeService.java | 126 +- .../metadata/service/QueryService.java | 124 +- .../metadata/service/SettingsService.java | 95 +- .../linkedin/metadata/service/TagService.java | 241 +- .../metadata/service/ViewService.java | 108 +- .../metadata/shared/ValidationUtils.java | 152 +- .../systemmetadata/SystemMetadataService.java | 10 +- .../metadata/timeline/SemanticVersion.java | 16 +- .../metadata/timeline/TimelineService.java | 7 +- .../timeline/data/ChangeCategory.java | 11 +- .../metadata/timeline/data/ChangeEvent.java | 50 +- .../timeline/data/ChangeOperation.java | 36 +- .../timeline/data/ChangeTransaction.java | 6 +- .../timeline/data/PatchOperation.java | 1 - .../timeline/data/SemanticChangeType.java | 6 +- .../timeline/data/SemanticDifference.java | 3 +- .../timeseries/TimeseriesAspectService.java | 101 +- .../metadata/service/DomainServiceTest.java | 351 +- .../service/GlossaryTermServiceTest.java | 579 +-- .../metadata/service/LineageServiceTest.java | 376 +- .../metadata/service/OwnerServiceTest.java | 255 +- .../service/OwnershipTypeServiceTest.java | 423 +-- .../metadata/service/QueryServiceTest.java | 660 ++-- .../metadata/service/SettingsServiceTest.java | 345 +- .../metadata/service/TagServiceTest.java | 497 +-- .../metadata/service/ViewServiceTest.java | 664 ++-- .../java/com/datahub/gms/servlet/Config.java | 92 +- .../gms/servlet/ConfigSearchExport.java | 198 +- .../java/com/datahub/gms/util/CSVWriter.java | 66 +- .../authorization/PoliciesConfig.java | 1005 ++--- .../restli/DefaultRestliClientFactory.java | 56 +- .../linkedin/metadata/restli/RestliUtil.java | 35 +- .../metadata/utils/AuditStampUtils.java | 20 +- .../linkedin/metadata/utils/BrowseUtil.java | 28 +- .../metadata/utils/ConcurrencyUtils.java | 75 +- .../utils/DataPlatformInstanceUtils.java | 22 +- .../metadata/utils/EntityKeyUtils.java | 103 +- .../metadata/utils/GenericRecordUtils.java | 31 +- .../metadata/utils/IngestionUtils.java | 22 +- .../linkedin/metadata/utils/PegasusUtils.java | 71 +- .../linkedin/metadata/utils/SearchUtil.java | 107 +- .../metadata/utils/SystemMetadataUtils.java | 13 +- .../utils/elasticsearch/IndexConvention.java | 11 +- .../elasticsearch/IndexConventionImpl.java | 31 +- .../exception/UnsupportedGraphEntities.java | 4 +- .../metadata/utils/log/LogMessageFilter.java | 11 +- .../metadata/utils/metrics/MetricUtils.java | 10 +- .../metadata/utils/EntityKeyUtilsTest.java | 27 +- .../metadata/utils/IngestionUtilsTest.java | 14 +- .../metadata/utils/SearchUtilTest.java | 42 +- .../IndexConventionImplTest.java | 48 +- .../src/main/java/mock/MockAspectSpec.java | 18 +- .../main/java/mock/MockEntityRegistry.java | 2 - .../src/main/java/mock/MockEntitySpec.java | 28 +- .../java/com/datahub/utils/TestUtils.java | 5 +- .../com/datahub/test/testing/urn/BarUrn.java | 7 +- .../test/testing/urn/BarUrnCoercer.java | 4 +- .../test/testing/urn/BaseUrnCoercer.java | 4 +- .../com/datahub/test/testing/urn/BazUrn.java | 7 +- .../test/testing/urn/BazUrnCoercer.java | 4 +- .../com/datahub/test/testing/urn/FooUrn.java | 7 +- .../test/testing/urn/FooUrnCoercer.java | 4 +- .../datahub/test/testing/urn/PizzaUrn.java | 7 +- .../test/testing/urn/PizzaUrnCoercer.java | 4 +- .../testing/urn/SingleAspectEntityUrn.java | 1 - 1711 files changed, 91903 insertions(+), 71109 deletions(-) delete mode 100644 gradle/checkstyle/checkstyle.xml delete mode 100644 gradle/checkstyle/suppressions.xml diff --git a/build.gradle b/build.gradle index c1278a6dab1a0..f5e5403e822e7 100644 --- a/build.gradle +++ b/build.gradle @@ -21,6 +21,7 @@ buildscript { ext.kafkaVersion = '2.3.0' ext.hazelcastVersion = '5.3.6' ext.ebeanVersion = '12.16.1' + ext.googleJavaFormatVersion = '1.18.1' ext.docker_registry = 'linkedin' @@ -42,6 +43,7 @@ plugins { id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' id 'com.github.johnrengelman.shadow' version '6.1.0' id 'com.palantir.docker' version '0.35.0' apply false + id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" } @@ -225,13 +227,11 @@ project.ext.externalDependency = [ 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' - ] allprojects { apply plugin: 'idea' apply plugin: 'eclipse' - apply plugin: 'checkstyle' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' } @@ -253,6 +253,7 @@ subprojects { apply plugin: 'maven-publish' apply plugin: 'com.gorylenko.gradle-git-properties' + apply plugin: 'com.diffplug.spotless' gitProperties { keys = ['git.commit.id','git.commit.id.describe','git.commit.time'] @@ -266,6 +267,7 @@ subprojects { plugins.withType(JavaPlugin) { dependencies { constraints { + implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") implementation('io.netty:netty-all:4.1.100.Final') implementation('org.apache.commons:commons-compress:1.21') implementation('org.apache.velocity:velocity-engine-core:2.3') @@ -274,13 +276,32 @@ subprojects { implementation("com.fasterxml.jackson.core:jackson-dataformat-cbor:$jacksonVersion") } } - - checkstyle { - configDirectory = file("${project.rootDir}/gradle/checkstyle") - sourceSets = [ getProject().sourceSets.main, getProject().sourceSets.test ] - toolVersion = "8.0" - maxWarnings = 0 - ignoreFailures = false + spotless { + java { + googleJavaFormat() + target project.fileTree(project.projectDir) { + include '**/*.java' + exclude 'build/**/*.java' + exclude '**/generated/**/*.*' + exclude '**/mainGeneratedDataTemplate/**/*.*' + exclude '**/mainGeneratedRest/**/*.*' + } + } + } + afterEvaluate { + def spotlessJavaTask = tasks.findByName('spotlessJava') + def processTask = tasks.findByName('processResources') + if (processTask != null) { + spotlessJavaTask.dependsOn processTask + } + def compileJavaTask = tasks.findByName('compileJava') + if (compileJavaTask != null) { + spotlessJavaTask.dependsOn compileJavaTask + } + // TODO - Do not run this in CI. How? + // tasks.withType(JavaCompile) { + // finalizedBy(tasks.findByName('spotlessApply')) + // } } } diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index fe04c3629fe58..ef33bde8f61d3 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -1,5 +1,9 @@ package auth; +import static auth.AuthUtils.*; +import static auth.sso.oidc.OidcConfigs.*; +import static utils.ConfigUtil.*; + import auth.sso.SsoConfigs; import auth.sso.SsoManager; import auth.sso.oidc.OidcConfigs; @@ -18,12 +22,10 @@ import com.linkedin.util.Configuration; import config.ConfigurationProvider; import controllers.SsoCallbackController; - import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; - import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; @@ -42,205 +44,227 @@ import play.cache.SyncCacheApi; import utils.ConfigUtil; -import static auth.AuthUtils.*; -import static auth.sso.oidc.OidcConfigs.*; -import static utils.ConfigUtil.*; +/** Responsible for configuring, validating, and providing authentication related components. */ +public class AuthModule extends AbstractModule { + /** + * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration + * value provides a stable encryption base from which to derive the encryption key. + * + * <p>We hash this value (SHA256), then take the first 16 bytes as the AES key. + */ + private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; -/** - * Responsible for configuring, validating, and providing authentication related components. - */ -public class AuthModule extends AbstractModule { + private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; + private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; + private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + + private final com.typesafe.config.Config _configs; + + public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { + _configs = configs; + } + @Override + protected void configure() { /** - * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration - * value provides a stable encryption base from which to derive the encryption key. - * - * We hash this value (SHA256), then take the first 16 bytes as the AES key. + * In Pac4J, you are given the option to store the profiles of authenticated users in either (i) + * PlayCacheSessionStore - saves your data in the Play cache or (ii) PlayCookieSessionStore + * saves your data in the Play session cookie However there is problem + * (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j + * profile in cookie. Whenever the profile returned by Pac4j is greater than 4096 characters, + * the response will be rejected by the browser. Default to PlayCacheCookieStore so that + * datahub-frontend container remains as a stateless service */ - private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; - private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; - private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; - private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - private final com.typesafe.config.Config _configs; - - public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { - _configs = configs; + if (sessionStoreProvider.equals("PlayCacheSessionStore")) { + final PlayCacheSessionStore playCacheSessionStore = + new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); + bind(SessionStore.class).toInstance(playCacheSessionStore); + bind(PlaySessionStore.class).toInstance(playCacheSessionStore); + } else { + PlayCookieSessionStore playCacheCookieStore; + try { + // To generate a valid encryption key from an input value, we first + // hash the input to generate a fixed-length string. Then, we convert + // it to hex and slice the first 16 bytes, because AES key length must strictly + // have a specific length. + final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); + final String aesKeyHash = + DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); + final String aesEncryptionKey = aesKeyHash.substring(0, 16); + playCacheCookieStore = + new PlayCookieSessionStore(new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); + } catch (Exception e) { + throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); + } + bind(SessionStore.class).toInstance(playCacheCookieStore); + bind(PlaySessionStore.class).toInstance(playCacheCookieStore); } - @Override - protected void configure() { - /** - * In Pac4J, you are given the option to store the profiles of authenticated users in either - * (i) PlayCacheSessionStore - saves your data in the Play cache or - * (ii) PlayCookieSessionStore saves your data in the Play session cookie - * However there is problem (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j profile in cookie. - * Whenever the profile returned by Pac4j is greater than 4096 characters, the response will be rejected by the browser. - * Default to PlayCacheCookieStore so that datahub-frontend container remains as a stateless service - */ - String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - - if (sessionStoreProvider.equals("PlayCacheSessionStore")) { - final PlayCacheSessionStore playCacheSessionStore = new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); - bind(SessionStore.class).toInstance(playCacheSessionStore); - bind(PlaySessionStore.class).toInstance(playCacheSessionStore); - } else { - PlayCookieSessionStore playCacheCookieStore; - try { - // To generate a valid encryption key from an input value, we first - // hash the input to generate a fixed-length string. Then, we convert - // it to hex and slice the first 16 bytes, because AES key length must strictly - // have a specific length. - final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); - final String aesKeyHash = DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); - final String aesEncryptionKey = aesKeyHash.substring(0, 16); - playCacheCookieStore = new PlayCookieSessionStore( - new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); - } catch (Exception e) { - throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); - } - bind(SessionStore.class).toInstance(playCacheCookieStore); - bind(PlaySessionStore.class).toInstance(playCacheCookieStore); - } - - try { - bind(SsoCallbackController.class).toConstructor(SsoCallbackController.class.getConstructor( - SsoManager.class, - Authentication.class, - SystemEntityClient.class, - AuthServiceClient.class, - com.typesafe.config.Config.class)); - } catch (NoSuchMethodException | SecurityException e) { - throw new RuntimeException("Failed to bind to SsoCallbackController. Cannot find constructor", e); - } - // logout - final LogoutController logoutController = new LogoutController(); - logoutController.setDefaultUrl("/"); - bind(LogoutController.class).toInstance(logoutController); + try { + bind(SsoCallbackController.class) + .toConstructor( + SsoCallbackController.class.getConstructor( + SsoManager.class, + Authentication.class, + SystemEntityClient.class, + AuthServiceClient.class, + com.typesafe.config.Config.class)); + } catch (NoSuchMethodException | SecurityException e) { + throw new RuntimeException( + "Failed to bind to SsoCallbackController. Cannot find constructor", e); } + // logout + final LogoutController logoutController = new LogoutController(); + logoutController.setDefaultUrl("/"); + bind(LogoutController.class).toInstance(logoutController); + } - @Provides @Singleton - protected Config provideConfig(SsoManager ssoManager) { - if (ssoManager.isSsoEnabled()) { - final Clients clients = new Clients(); - final List<Client> clientList = new ArrayList<>(); - clientList.add(ssoManager.getSsoProvider().client()); - clients.setClients(clientList); - final Config config = new Config(clients); - config.setHttpActionAdapter(new PlayHttpActionAdapter()); - return config; - } - return new Config(); + @Provides + @Singleton + protected Config provideConfig(SsoManager ssoManager) { + if (ssoManager.isSsoEnabled()) { + final Clients clients = new Clients(); + final List<Client> clientList = new ArrayList<>(); + clientList.add(ssoManager.getSsoProvider().client()); + clients.setClients(clientList); + final Config config = new Config(clients); + config.setHttpActionAdapter(new PlayHttpActionAdapter()); + return config; } + return new Config(); + } - @Provides @Singleton - protected SsoManager provideSsoManager() { - SsoManager manager = new SsoManager(); - // Seed the SSO manager with a default SSO provider. - if (isSsoEnabled(_configs)) { - SsoConfigs ssoConfigs = new SsoConfigs(_configs); - if (ssoConfigs.isOidcEnabled()) { - // Register OIDC Provider, add to list of managers. - OidcConfigs oidcConfigs = new OidcConfigs(_configs); - OidcProvider oidcProvider = new OidcProvider(oidcConfigs); - // Set the default SSO provider to this OIDC client. - manager.setSsoProvider(oidcProvider); - } - } - return manager; + @Provides + @Singleton + protected SsoManager provideSsoManager() { + SsoManager manager = new SsoManager(); + // Seed the SSO manager with a default SSO provider. + if (isSsoEnabled(_configs)) { + SsoConfigs ssoConfigs = new SsoConfigs(_configs); + if (ssoConfigs.isOidcEnabled()) { + // Register OIDC Provider, add to list of managers. + OidcConfigs oidcConfigs = new OidcConfigs(_configs); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + // Set the default SSO provider to this OIDC client. + manager.setSsoProvider(oidcProvider); + } } + return manager; + } - @Provides - @Singleton - protected Authentication provideSystemAuthentication() { - // Returns an instance of Authentication used to authenticate system initiated calls to Metadata Service. - String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); - String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); - final Actor systemActor = - new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. - return new Authentication(systemActor, String.format("Basic %s:%s", systemClientId, systemSecret), - Collections.emptyMap()); - } + @Provides + @Singleton + protected Authentication provideSystemAuthentication() { + // Returns an instance of Authentication used to authenticate system initiated calls to Metadata + // Service. + String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); + String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); + final Actor systemActor = + new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. + return new Authentication( + systemActor, + String.format("Basic %s:%s", systemClientId, systemSecret), + Collections.emptyMap()); + } - @Provides - @Singleton - protected ConfigurationProvider provideConfigurationProvider() { - AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ConfigurationProvider.class); - return context.getBean(ConfigurationProvider.class); - } + @Provides + @Singleton + protected ConfigurationProvider provideConfigurationProvider() { + AnnotationConfigApplicationContext context = + new AnnotationConfigApplicationContext(ConfigurationProvider.class); + return context.getBean(ConfigurationProvider.class); + } - @Provides - @Singleton - protected SystemEntityClient provideEntityClient(final Authentication systemAuthentication, - final ConfigurationProvider configurationProvider) { - return new SystemRestliEntityClient(buildRestliClient(), - new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), - _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); - } + @Provides + @Singleton + protected SystemEntityClient provideEntityClient( + final Authentication systemAuthentication, + final ConfigurationProvider configurationProvider) { + return new SystemRestliEntityClient( + buildRestliClient(), + new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), + _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); + } - @Provides - @Singleton - protected CloseableHttpClient provideHttpClient() { - return HttpClients.createDefault(); - } + @Provides + @Singleton + protected CloseableHttpClient provideHttpClient() { + return HttpClients.createDefault(); + } - @Provides - @Singleton - protected AuthServiceClient provideAuthClient(Authentication systemAuthentication, CloseableHttpClient httpClient) { - // Init a GMS auth client - final String metadataServiceHost = - _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) - : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); - - final int metadataServicePort = - _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) - : Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - - final Boolean metadataServiceUseSsl = - _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) ? _configs.getBoolean( - METADATA_SERVICE_USE_SSL_CONFIG_PATH) - : Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); - - return new AuthServiceClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, - systemAuthentication, httpClient); - } + @Provides + @Singleton + protected AuthServiceClient provideAuthClient( + Authentication systemAuthentication, CloseableHttpClient httpClient) { + // Init a GMS auth client + final String metadataServiceHost = + _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) + ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) + : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); + + final int metadataServicePort = + _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) + ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) + : Integer.parseInt( + Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - private com.linkedin.restli.client.Client buildRestliClient() { - final String metadataServiceHost = utils.ConfigUtil.getString( + final Boolean metadataServiceUseSsl = + _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + ? _configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + : Boolean.parseBoolean( + Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); + + return new AuthServiceClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + systemAuthentication, + httpClient); + } + + private com.linkedin.restli.client.Client buildRestliClient() { + final String metadataServiceHost = + utils.ConfigUtil.getString( _configs, METADATA_SERVICE_HOST_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = utils.ConfigUtil.getInt( + final int metadataServicePort = + utils.ConfigUtil.getInt( _configs, utils.ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = utils.ConfigUtil.getBoolean( + final boolean metadataServiceUseSsl = + utils.ConfigUtil.getBoolean( _configs, utils.ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); - final String metadataServiceSslProtocol = utils.ConfigUtil.getString( + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); + final String metadataServiceSslProtocol = + utils.ConfigUtil.getString( _configs, utils.ConfigUtil.METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL - ); - return DefaultRestliClientFactory.getRestLiClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, metadataServiceSslProtocol); - } + ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL); + return DefaultRestliClientFactory.getRestLiClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + metadataServiceSslProtocol); + } - protected boolean isSsoEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); - } + protected boolean isSsoEnabled(com.typesafe.config.Config configs) { + // If OIDC is enabled, we infer SSO to be enabled. + return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals(Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + } - protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( + protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { + // If OIDC is enabled, we infer SSO to be enabled. + return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals( Boolean.parseBoolean(configs.getString(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH))); - } + } } - diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java index 386eee725c83d..283a2164584b9 100644 --- a/datahub-frontend/app/auth/AuthUtils.java +++ b/datahub-frontend/app/auth/AuthUtils.java @@ -1,137 +1,136 @@ package auth; import com.linkedin.common.urn.CorpuserUrn; -import lombok.extern.slf4j.Slf4j; -import play.mvc.Http; - -import javax.annotation.Nonnull; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import play.mvc.Http; @Slf4j public class AuthUtils { - /** - * The config path that determines whether Metadata Service Authentication is enabled. - * - * When enabled, the frontend server will proxy requests to the Metadata Service without requiring them to have a valid - * frontend-issued Session Cookie. This effectively means delegating the act of authentication to the Metadata Service. It - * is critical that if Metadata Service authentication is enabled at the frontend service layer, it is also enabled in the - * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. - * - * When disabled, the frontend server will require that all requests have a valid Session Cookie associated with them. Otherwise, - * requests will be denied with an Unauthorized error. - */ - public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = "metadataService.auth.enabled"; - - /** - * The attribute inside session cookie representing a GMS-issued access token - */ - public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; - - /** - * An ID used to identify system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; - - /** - * An Secret used to authenticate system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; - - /** - * Cookie name for redirect url that is manually separated from the session to reduce size - */ - public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; - - public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); - - public static final String LOGIN_ROUTE = "/login"; - public static final String USER_NAME = "username"; - public static final String PASSWORD = "password"; - public static final String ACTOR = "actor"; - public static final String ACCESS_TOKEN = "token"; - public static final String FULL_NAME = "fullName"; - public static final String EMAIL = "email"; - public static final String TITLE = "title"; - public static final String INVITE_TOKEN = "inviteToken"; - public static final String RESET_TOKEN = "resetToken"; - - /** - * Determines whether the inbound request should be forward to downstream Metadata Service. Today, this simply - * checks for the presence of an "Authorization" header or the presence of a valid session cookie issued - * by the frontend. - * - * Note that this method DOES NOT actually verify the authentication token of an inbound request. That will - * be handled by the downstream Metadata Service. Until then, the request should be treated as UNAUTHENTICATED. - * - * Returns true if the request is eligible to be forwarded to GMS, false otherwise. - */ - public static boolean isEligibleForForwarding(Http.Request req) { - return hasValidSessionCookie(req) || hasAuthHeader(req); + /** + * The config path that determines whether Metadata Service Authentication is enabled. + * + * <p>When enabled, the frontend server will proxy requests to the Metadata Service without + * requiring them to have a valid frontend-issued Session Cookie. This effectively means + * delegating the act of authentication to the Metadata Service. It is critical that if Metadata + * Service authentication is enabled at the frontend service layer, it is also enabled in the + * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. + * + * <p>When disabled, the frontend server will require that all requests have a valid Session + * Cookie associated with them. Otherwise, requests will be denied with an Unauthorized error. + */ + public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = + "metadataService.auth.enabled"; + + /** The attribute inside session cookie representing a GMS-issued access token */ + public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; + + /** + * An ID used to identify system callers that are internal to DataHub. Provided via configuration. + */ + public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; + + /** + * An Secret used to authenticate system callers that are internal to DataHub. Provided via + * configuration. + */ + public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; + + /** Cookie name for redirect url that is manually separated from the session to reduce size */ + public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; + + public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); + + public static final String LOGIN_ROUTE = "/login"; + public static final String USER_NAME = "username"; + public static final String PASSWORD = "password"; + public static final String ACTOR = "actor"; + public static final String ACCESS_TOKEN = "token"; + public static final String FULL_NAME = "fullName"; + public static final String EMAIL = "email"; + public static final String TITLE = "title"; + public static final String INVITE_TOKEN = "inviteToken"; + public static final String RESET_TOKEN = "resetToken"; + + /** + * Determines whether the inbound request should be forward to downstream Metadata Service. Today, + * this simply checks for the presence of an "Authorization" header or the presence of a valid + * session cookie issued by the frontend. + * + * <p>Note that this method DOES NOT actually verify the authentication token of an inbound + * request. That will be handled by the downstream Metadata Service. Until then, the request + * should be treated as UNAUTHENTICATED. + * + * <p>Returns true if the request is eligible to be forwarded to GMS, false otherwise. + */ + public static boolean isEligibleForForwarding(Http.Request req) { + return hasValidSessionCookie(req) || hasAuthHeader(req); + } + + /** + * Returns true if a request has a valid session cookie issued by the frontend server. Note that + * this DOES NOT verify whether the token within the session cookie will be accepted by the + * downstream GMS service. + * + * <p>Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, + * as well as their agreement to determine authentication status. + */ + public static boolean hasValidSessionCookie(final Http.Request req) { + Map<String, String> sessionCookie = req.session().data(); + return sessionCookie.containsKey(ACCESS_TOKEN) + && sessionCookie.containsKey(ACTOR) + && req.getCookie(ACTOR).isPresent() + && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); + } + + /** Returns true if a request includes the Authorization header, false otherwise */ + public static boolean hasAuthHeader(final Http.Request req) { + return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); + } + + /** + * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. + * + * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" + * @param ttlInHours the number of hours until the actor cookie expires after being set + */ + public static Http.Cookie createActorCookie( + @Nonnull final String actorUrn, + @Nonnull final Integer ttlInHours, + @Nonnull final String sameSite, + final boolean isSecure) { + return Http.Cookie.builder(ACTOR, actorUrn) + .withHttpOnly(false) + .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) + .withSameSite(convertSameSiteValue(sameSite)) + .withSecure(isSecure) + .build(); + } + + public static Map<String, String> createSessionMap( + final String userUrnStr, final String accessToken) { + final Map<String, String> sessionAttributes = new HashMap<>(); + sessionAttributes.put(ACTOR, userUrnStr); + sessionAttributes.put(ACCESS_TOKEN, accessToken); + return sessionAttributes; + } + + private AuthUtils() {} + + private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { + try { + return Http.Cookie.SameSite.valueOf(sameSiteValue); + } catch (IllegalArgumentException e) { + log.warn( + String.format( + "Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), + e); + return Http.Cookie.SameSite.LAX; } - - /** - * Returns true if a request has a valid session cookie issued by the frontend server. - * Note that this DOES NOT verify whether the token within the session cookie will be accepted - * by the downstream GMS service. - * - * Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, - * as well as their agreement to determine authentication status. - */ - public static boolean hasValidSessionCookie(final Http.Request req) { - Map<String, String> sessionCookie = req.session().data(); - return sessionCookie.containsKey(ACCESS_TOKEN) - && sessionCookie.containsKey(ACTOR) - && req.getCookie(ACTOR).isPresent() - && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); - } - - /** - * Returns true if a request includes the Authorization header, false otherwise - */ - public static boolean hasAuthHeader(final Http.Request req) { - return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); - } - - /** - * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. - * - * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" - * @param ttlInHours the number of hours until the actor cookie expires after being set - */ - public static Http.Cookie createActorCookie( - @Nonnull final String actorUrn, - @Nonnull final Integer ttlInHours, - @Nonnull final String sameSite, - final boolean isSecure - ) { - return Http.Cookie.builder(ACTOR, actorUrn) - .withHttpOnly(false) - .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) - .withSameSite(convertSameSiteValue(sameSite)) - .withSecure(isSecure) - .build(); - } - - public static Map<String, String> createSessionMap(final String userUrnStr, final String accessToken) { - final Map<String, String> sessionAttributes = new HashMap<>(); - sessionAttributes.put(ACTOR, userUrnStr); - sessionAttributes.put(ACCESS_TOKEN, accessToken); - return sessionAttributes; - } - - private AuthUtils() { } - - private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { - try { - return Http.Cookie.SameSite.valueOf(sameSiteValue); - } catch (IllegalArgumentException e) { - log.warn(String.format("Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), e); - return Http.Cookie.SameSite.LAX; - } - } - + } } diff --git a/datahub-frontend/app/auth/Authenticator.java b/datahub-frontend/app/auth/Authenticator.java index ae847b318dce2..8536fc7e01695 100644 --- a/datahub-frontend/app/auth/Authenticator.java +++ b/datahub-frontend/app/auth/Authenticator.java @@ -1,48 +1,49 @@ package auth; +import static auth.AuthUtils.*; + import com.typesafe.config.Config; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import javax.inject.Inject; import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -import static auth.AuthUtils.*; - - /** * Implementation of base Play Authentication used to determine if a request to a route should be * authenticated. */ public class Authenticator extends Security.Authenticator { - private final boolean metadataServiceAuthEnabled; + private final boolean metadataServiceAuthEnabled; - @Inject - public Authenticator(@Nonnull Config config) { - this.metadataServiceAuthEnabled = config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + @Inject + public Authenticator(@Nonnull Config config) { + this.metadataServiceAuthEnabled = + config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) && config.getBoolean(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH); + } + + @Override + public Optional<String> getUsername(@Nonnull Http.Request req) { + if (this.metadataServiceAuthEnabled) { + // If Metadata Service auth is enabled, we only want to verify presence of the + // "Authorization" header OR the presence of a frontend generated session cookie. + // At this time, the actor is still considered to be unauthenicated. + return Optional.ofNullable( + AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); + } else { + // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. + return Optional.ofNullable( + AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); } + } - @Override - public Optional<String> getUsername(@Nonnull Http.Request req) { - if (this.metadataServiceAuthEnabled) { - // If Metadata Service auth is enabled, we only want to verify presence of the - // "Authorization" header OR the presence of a frontend generated session cookie. - // At this time, the actor is still considered to be unauthenicated. - return Optional.ofNullable(AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); - } else { - // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. - return Optional.ofNullable(AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); - } - } - - @Override - @Nonnull - public Result onUnauthorized(@Nullable Http.Request req) { - return unauthorized(); - } + @Override + @Nonnull + public Result onUnauthorized(@Nullable Http.Request req) { + return unauthorized(); + } } diff --git a/datahub-frontend/app/auth/ConfigUtil.java b/datahub-frontend/app/auth/ConfigUtil.java index e0999ee00be38..9fbed91ce6a10 100644 --- a/datahub-frontend/app/auth/ConfigUtil.java +++ b/datahub-frontend/app/auth/ConfigUtil.java @@ -3,20 +3,20 @@ import com.typesafe.config.Config; import java.util.Optional; - public class ConfigUtil { - private ConfigUtil() { - } + private ConfigUtil() {} public static String getRequired(final Config configs, final String path) { if (!configs.hasPath(path)) { - throw new IllegalArgumentException(String.format("Missing required config with path %s", path)); + throw new IllegalArgumentException( + String.format("Missing required config with path %s", path)); } return configs.getString(path); } - public static String getOptional(final Config configs, final String path, final String defaultVal) { + public static String getOptional( + final Config configs, final String path, final String defaultVal) { if (!configs.hasPath(path)) { return defaultVal; } diff --git a/datahub-frontend/app/auth/CookieConfigs.java b/datahub-frontend/app/auth/CookieConfigs.java index b6da9b7a1833c..63b2ce61aaf9b 100644 --- a/datahub-frontend/app/auth/CookieConfigs.java +++ b/datahub-frontend/app/auth/CookieConfigs.java @@ -1,6 +1,5 @@ package auth; - import com.typesafe.config.Config; public class CookieConfigs { @@ -16,12 +15,18 @@ public class CookieConfigs { private final boolean _authCookieSecure; public CookieConfigs(final Config configs) { - _ttlInHours = configs.hasPath(SESSION_TTL_CONFIG_PATH) ? configs.getInt(SESSION_TTL_CONFIG_PATH) - : DEFAULT_SESSION_TTL_HOURS; - _authCookieSameSite = configs.hasPath(AUTH_COOKIE_SAME_SITE) ? configs.getString(AUTH_COOKIE_SAME_SITE) - : DEFAULT_AUTH_COOKIE_SAME_SITE; - _authCookieSecure = configs.hasPath(AUTH_COOKIE_SECURE) ? configs.getBoolean(AUTH_COOKIE_SECURE) - : DEFAULT_AUTH_COOKIE_SECURE; + _ttlInHours = + configs.hasPath(SESSION_TTL_CONFIG_PATH) + ? configs.getInt(SESSION_TTL_CONFIG_PATH) + : DEFAULT_SESSION_TTL_HOURS; + _authCookieSameSite = + configs.hasPath(AUTH_COOKIE_SAME_SITE) + ? configs.getString(AUTH_COOKIE_SAME_SITE) + : DEFAULT_AUTH_COOKIE_SAME_SITE; + _authCookieSecure = + configs.hasPath(AUTH_COOKIE_SECURE) + ? configs.getBoolean(AUTH_COOKIE_SECURE) + : DEFAULT_AUTH_COOKIE_SECURE; } public int getTtlInHours() { diff --git a/datahub-frontend/app/auth/JAASConfigs.java b/datahub-frontend/app/auth/JAASConfigs.java index f39c20aceb6f9..529bf98e1fdcf 100644 --- a/datahub-frontend/app/auth/JAASConfigs.java +++ b/datahub-frontend/app/auth/JAASConfigs.java @@ -6,17 +6,18 @@ */ public class JAASConfigs { - public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; + public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; - private Boolean _isEnabled = true; + private Boolean _isEnabled = true; - public JAASConfigs(final com.typesafe.config.Config configs) { - if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { - _isEnabled = false; - } + public JAASConfigs(final com.typesafe.config.Config configs) { + if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) + && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { + _isEnabled = false; } + } - public boolean isJAASEnabled() { - return _isEnabled; - } + public boolean isJAASEnabled() { + return _isEnabled; + } } diff --git a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java index 3114da92d7d79..772c2c8f92f28 100644 --- a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java +++ b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java @@ -1,23 +1,27 @@ package auth; -/** - * Currently, this config enables or disable native user authentication. - */ +/** Currently, this config enables or disable native user authentication. */ public class NativeAuthenticationConfigs { public static final String NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH = "auth.native.enabled"; - public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = "auth.native.signUp.enforceValidEmail"; + public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = + "auth.native.signUp.enforceValidEmail"; private Boolean _isEnabled = true; private Boolean _isEnforceValidEmailEnabled = true; public NativeAuthenticationConfigs(final com.typesafe.config.Config configs) { if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH)) { - _isEnabled = Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); + _isEnabled = + Boolean.parseBoolean( + configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); } if (configs.hasPath(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH)) { _isEnforceValidEmailEnabled = - Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH).toString()); + Boolean.parseBoolean( + configs + .getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH) + .toString()); } } diff --git a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java index a6dbd69a93889..223ac669bd6ea 100644 --- a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java +++ b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java @@ -7,16 +7,15 @@ import play.api.mvc.FlashCookieBaker; import play.api.mvc.SessionCookieBaker; - public class CustomCookiesModule extends AbstractModule { @Override public void configure() { bind(CookieSigner.class).toProvider(CookieSignerProvider.class); - // We override the session cookie baker to not use a fallback, this prevents using an old URL Encoded cookie + // We override the session cookie baker to not use a fallback, this prevents using an old URL + // Encoded cookie bind(SessionCookieBaker.class).to(CustomSessionCookieBaker.class); // We don't care about flash cookies, we don't use them bind(FlashCookieBaker.class).to(DefaultFlashCookieBaker.class); } - } diff --git a/datahub-frontend/app/auth/sso/SsoConfigs.java b/datahub-frontend/app/auth/sso/SsoConfigs.java index 062054173bddb..1f8455e773ffb 100644 --- a/datahub-frontend/app/auth/sso/SsoConfigs.java +++ b/datahub-frontend/app/auth/sso/SsoConfigs.java @@ -2,24 +2,19 @@ import static auth.ConfigUtil.*; - -/** - * Class responsible for extracting and validating top-level SSO related configurations. - */ +/** Class responsible for extracting and validating top-level SSO related configurations. */ public class SsoConfigs { - /** - * Required configs - */ + /** Required configs */ private static final String AUTH_BASE_URL_CONFIG_PATH = "auth.baseUrl"; + private static final String AUTH_BASE_CALLBACK_PATH_CONFIG_PATH = "auth.baseCallbackPath"; private static final String AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH = "auth.successRedirectPath"; public static final String OIDC_ENABLED_CONFIG_PATH = "auth.oidc.enabled"; - /** - * Default values - */ + /** Default values */ private static final String DEFAULT_BASE_CALLBACK_PATH = "/callback"; + private static final String DEFAULT_SUCCESS_REDIRECT_PATH = "/"; private final String _authBaseUrl; @@ -29,17 +24,14 @@ public class SsoConfigs { public SsoConfigs(final com.typesafe.config.Config configs) { _authBaseUrl = getRequired(configs, AUTH_BASE_URL_CONFIG_PATH); - _authBaseCallbackPath = getOptional( - configs, - AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, - DEFAULT_BASE_CALLBACK_PATH); - _authSuccessRedirectPath = getOptional( - configs, - AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, - DEFAULT_SUCCESS_REDIRECT_PATH); - _oidcEnabled = configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + _authBaseCallbackPath = + getOptional(configs, AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, DEFAULT_BASE_CALLBACK_PATH); + _authSuccessRedirectPath = + getOptional(configs, AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, DEFAULT_SUCCESS_REDIRECT_PATH); + _oidcEnabled = + configs.hasPath(OIDC_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals( + Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); } public String getAuthBaseUrl() { diff --git a/datahub-frontend/app/auth/sso/SsoManager.java b/datahub-frontend/app/auth/sso/SsoManager.java index 739ce3f1ba450..bf33f4148a553 100644 --- a/datahub-frontend/app/auth/sso/SsoManager.java +++ b/datahub-frontend/app/auth/sso/SsoManager.java @@ -2,19 +2,16 @@ import javax.annotation.Nonnull; - -/** - * Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. - */ +/** Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. */ public class SsoManager { private SsoProvider<?> _provider; // Only one active provider at a time. - public SsoManager() { } + public SsoManager() {} /** - * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been - * provided to the manager. + * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been provided to the + * manager. * * @return true if SSO logic is enabled, false otherwise. */ @@ -34,8 +31,8 @@ public void setSsoProvider(@Nonnull final SsoProvider<?> provider) { /** * Gets the active {@link SsoProvider} instance. * - * @return the {@SsoProvider} that should be used during authentication and on - * IdP callback, or null if SSO is not enabled. + * @return the {@SsoProvider} that should be used during authentication and on IdP callback, or + * null if SSO is not enabled. */ public SsoProvider<?> getSsoProvider() { return _provider; diff --git a/datahub-frontend/app/auth/sso/SsoProvider.java b/datahub-frontend/app/auth/sso/SsoProvider.java index f7454d599ba99..a0947b52b92ae 100644 --- a/datahub-frontend/app/auth/sso/SsoProvider.java +++ b/datahub-frontend/app/auth/sso/SsoProvider.java @@ -3,15 +3,10 @@ import org.pac4j.core.client.Client; import org.pac4j.core.credentials.Credentials; -/** - * A thin interface over a Pac4j {@link Client} object and its - * associated configurations. - */ +/** A thin interface over a Pac4j {@link Client} object and its associated configurations. */ public interface SsoProvider<C extends SsoConfigs> { - /** - * The protocol used for SSO. - */ + /** The protocol used for SSO. */ enum SsoProtocol { OIDC("oidc"); // SAML -- not yet supported. @@ -28,19 +23,12 @@ public String getCommonName() { } } - /** - * Returns the configs required by the provider. - */ + /** Returns the configs required by the provider. */ C configs(); - /** - * Returns the SSO protocol associated with the provider instance. - */ + /** Returns the SSO protocol associated with the provider instance. */ SsoProtocol protocol(); - /** - * Retrieves an initialized Pac4j {@link Client}. - */ + /** Retrieves an initialized Pac4j {@link Client}. */ Client<? extends Credentials> client(); - } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java index baca144610ec4..fa676d2d16c90 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java @@ -1,9 +1,9 @@ package auth.sso.oidc; +import com.nimbusds.jwt.JWT; +import com.nimbusds.jwt.JWTParser; import java.util.Map.Entry; import java.util.Optional; - -import com.nimbusds.jwt.JWTParser; import org.pac4j.core.authorization.generator.AuthorizationGenerator; import org.pac4j.core.context.WebContext; import org.pac4j.core.profile.AttributeLocation; @@ -14,44 +14,43 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.nimbusds.jwt.JWT; - public class OidcAuthorizationGenerator implements AuthorizationGenerator { - private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - - private final ProfileDefinition<?> profileDef; + private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - private final OidcConfigs oidcConfigs; + private final ProfileDefinition<?> profileDef; - public OidcAuthorizationGenerator(final ProfileDefinition<?> profileDef, final OidcConfigs oidcConfigs) { - this.profileDef = profileDef; - this.oidcConfigs = oidcConfigs; - } + private final OidcConfigs oidcConfigs; - @Override - public Optional<UserProfile> generate(WebContext context, UserProfile profile) { - if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { - try { - final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); - - CommonProfile commonProfile = new CommonProfile(); - - for (final Entry<String, Object> entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { - final String claimName = entry.getKey(); - - if (profile.getAttribute(claimName) == null) { - profileDef.convertAndAdd(commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); - } - } - - return Optional.of(commonProfile); - } catch (Exception e) { - logger.warn("Cannot parse access token claims", e); - } + public OidcAuthorizationGenerator( + final ProfileDefinition<?> profileDef, final OidcConfigs oidcConfigs) { + this.profileDef = profileDef; + this.oidcConfigs = oidcConfigs; + } + + @Override + public Optional<UserProfile> generate(WebContext context, UserProfile profile) { + if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { + try { + final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); + + CommonProfile commonProfile = new CommonProfile(); + + for (final Entry<String, Object> entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { + final String claimName = entry.getKey(); + + if (profile.getAttribute(claimName) == null) { + profileDef.convertAndAdd( + commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); + } } - - return Optional.ofNullable(profile); + + return Optional.of(commonProfile); + } catch (Exception e) { + logger.warn("Cannot parse access token claims", e); + } } - + + return Optional.ofNullable(profile); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java index 7164710f4e0de..fa562f54312ec 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java @@ -1,6 +1,13 @@ package auth.sso.oidc; +import static auth.AuthUtils.*; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static org.pac4j.play.store.PlayCookieSessionStore.*; +import static play.mvc.Results.internalServerError; + import auth.CookieConfigs; +import auth.sso.SsoManager; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; @@ -59,23 +66,16 @@ import org.pac4j.core.util.Pac4jConstants; import org.pac4j.play.PlayWebContext; import play.mvc.Result; -import auth.sso.SsoManager; - -import static auth.AuthUtils.*; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; -import static org.pac4j.play.store.PlayCookieSessionStore.*; -import static play.mvc.Results.internalServerError; - /** - * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects back to D - * DataHub after an authentication attempt. + * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects + * back to D DataHub after an authentication attempt. * - * On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract - * basic information about the user including their name, email, groups, & more. If just-in-time provisioning - * is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for the user, along with any Groups - * ({@link CorpGroupSnapshot}) that can be extracted, only doing so if the user does not already exist. + * <p>On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract + * basic information about the user including their name, email, groups, & more. If just-in-time + * provisioning is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for + * the user, along with any Groups ({@link CorpGroupSnapshot}) that can be extracted, only doing so + * if the user does not already exist. */ @Slf4j public class OidcCallbackLogic extends DefaultCallbackLogic<Result, PlayWebContext> { @@ -86,9 +86,12 @@ public class OidcCallbackLogic extends DefaultCallbackLogic<Result, PlayWebConte private final AuthServiceClient _authClient; private final CookieConfigs _cookieConfigs; - public OidcCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication, - final SystemEntityClient entityClient, final AuthServiceClient authClient, - final CookieConfigs cookieConfigs) { + public OidcCallbackLogic( + final SsoManager ssoManager, + final Authentication systemAuthentication, + final SystemEntityClient entityClient, + final AuthServiceClient authClient, + final CookieConfigs cookieConfigs) { _ssoManager = ssoManager; _systemAuthentication = systemAuthentication; _entityClient = entityClient; @@ -97,14 +100,27 @@ public OidcCallbackLogic(final SsoManager ssoManager, final Authentication syste } @Override - public Result perform(PlayWebContext context, Config config, - HttpActionAdapter<Result, PlayWebContext> httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter<Result, PlayWebContext> httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { setContextRedirectUrl(context); final Result result = - super.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, + super.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, defaultClient); // Handle OIDC authentication errors. @@ -119,14 +135,25 @@ public Result perform(PlayWebContext context, Config config, @SuppressWarnings("unchecked") private void setContextRedirectUrl(PlayWebContext context) { - Optional<Cookie> redirectUrl = context.getRequestCookies().stream() - .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())).findFirst(); + Optional<Cookie> redirectUrl = + context.getRequestCookies().stream() + .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())) + .findFirst(); redirectUrl.ifPresent( - cookie -> context.getSessionStore().set(context, Pac4jConstants.REQUESTED_URL, - JAVA_SER_HELPER.deserializeFromBytes(uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); + cookie -> + context + .getSessionStore() + .set( + context, + Pac4jConstants.REQUESTED_URL, + JAVA_SER_HELPER.deserializeFromBytes( + uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); } - private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result result, final PlayWebContext context, + private Result handleOidcCallback( + final OidcConfigs oidcConfigs, + final Result result, + final PlayWebContext context, final ProfileManager<UserProfile> profileManager) { log.debug("Beginning OIDC Callback Handling..."); @@ -134,14 +161,17 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re if (profileManager.isAuthenticated()) { // If authenticated, the user should have a profile. final CommonProfile profile = (CommonProfile) profileManager.get(true).get(); - log.debug(String.format("Found authenticated user with profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Found authenticated user with profile %s", profile.getAttributes().toString())); // Extract the User name required to log into DataHub. final String userName = extractUserNameOrThrow(oidcConfigs, profile); final CorpuserUrn corpUserUrn = new CorpuserUrn(userName); try { - // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does not exist. + // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does + // not exist. if (oidcConfigs.isJitProvisioningEnabled()) { log.debug("Just-in-time provisioning is enabled. Beginning provisioning process..."); CorpUserSnapshot extractedUser = extractUser(corpUserUrn, profile); @@ -150,7 +180,8 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re // Extract groups & provision them. List<CorpGroupSnapshot> extractedGroups = extractGroups(profile); tryProvisionGroups(extractedGroups); - // Add users to groups on DataHub. Note that this clears existing group membership for a user if it already exists. + // Add users to groups on DataHub. Note that this clears existing group membership for a + // user if it already exists. updateGroupMembership(corpUserUrn, createGroupMembership(extractedGroups)); } } else if (oidcConfigs.isPreProvisioningRequired()) { @@ -160,55 +191,69 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re } // Update user status to active on login. // If we want to prevent certain users from logging in, here's where we'll want to do it. - setUserStatus(corpUserUrn, new CorpUserStatus().setStatus(Constants.CORP_USER_STATUS_ACTIVE) - .setLastModified(new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()))); + setUserStatus( + corpUserUrn, + new CorpUserStatus() + .setStatus(Constants.CORP_USER_STATUS_ACTIVE) + .setLastModified( + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()))); } catch (Exception e) { log.error("Failed to perform post authentication steps. Redirecting to error page.", e); return internalServerError( - String.format("Failed to perform post authentication steps. Error message: %s", e.getMessage())); + String.format( + "Failed to perform post authentication steps. Error message: %s", e.getMessage())); } // Successfully logged in - Generate GMS login token final String accessToken = _authClient.generateSessionTokenForUser(corpUserUrn.getId()); return result - .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) - .withCookies( - createActorCookie( - corpUserUrn.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) + .withCookies( + createActorCookie( + corpUserUrn.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); } return internalServerError( "Failed to authenticate current user. Cannot find valid identity provider profile in session."); } - private String extractUserNameOrThrow(final OidcConfigs oidcConfigs, final CommonProfile profile) { + private String extractUserNameOrThrow( + final OidcConfigs oidcConfigs, final CommonProfile profile) { // Ensure that the attribute exists (was returned by IdP) if (!profile.containsAttribute(oidcConfigs.getUserNameClaim())) { - throw new RuntimeException(String.format( - "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", - oidcConfigs.getUserNameClaim(), oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString())); + throw new RuntimeException( + String.format( + "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", + oidcConfigs.getUserNameClaim(), + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString())); } final String userNameClaim = (String) profile.getAttribute(oidcConfigs.getUserNameClaim()); - final Optional<String> mappedUserName = extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); - - return mappedUserName.orElseThrow(() -> new RuntimeException( - String.format("Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", - userNameClaim, oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString()))); + final Optional<String> mappedUserName = + extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); + + return mappedUserName.orElseThrow( + () -> + new RuntimeException( + String.format( + "Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", + userNameClaim, + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString()))); } - /** - * Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. - */ + /** Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. */ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { - log.debug(String.format("Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); // Extracts these based on the default set of OIDC claims, described here: // https://developer.okta.com/blog/2017/07/25/oidc-primer-part-1 @@ -217,7 +262,9 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { String email = profile.getEmail(); URI picture = profile.getPictureUrl(); String displayName = profile.getDisplayName(); - String fullName = (String) profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. + String fullName = + (String) + profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. if (fullName == null && firstName != null && lastName != null) { fullName = String.format("%s %s", firstName, lastName); } @@ -231,7 +278,8 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { userInfo.setFullName(fullName, SetMode.IGNORE_NULL); userInfo.setEmail(email, SetMode.IGNORE_NULL); // If there is a display name, use it. Otherwise fall back to full name. - userInfo.setDisplayName(displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); + userInfo.setDisplayName( + displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); final CorpUserEditableInfo editableInfo = new CorpUserEditableInfo(); try { @@ -254,15 +302,18 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { - log.debug(String.format("Attempting to extract groups from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract groups from OIDC profile %s", + profile.getAttributes().toString())); final OidcConfigs configs = (OidcConfigs) _ssoManager.getSsoProvider().configs(); - // First, attempt to extract a list of groups from the profile, using the group name attribute config. + // First, attempt to extract a list of groups from the profile, using the group name attribute + // config. final List<CorpGroupSnapshot> extractedGroups = new ArrayList<>(); final List<String> groupsClaimNames = - new ArrayList<String>(Arrays.asList(configs.getGroupsClaimName().split(","))).stream() - .map(String::trim) - .collect(Collectors.toList()); + new ArrayList<String>(Arrays.asList(configs.getGroupsClaimName().split(","))) + .stream().map(String::trim).collect(Collectors.toList()); for (final String groupsClaimName : groupsClaimNames) { @@ -273,14 +324,16 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { final Object groupAttribute = profile.getAttribute(groupsClaimName); if (groupAttribute instanceof Collection) { // List of group names - groupNames = (Collection<String>) profile.getAttribute(groupsClaimName, Collection.class); + groupNames = + (Collection<String>) profile.getAttribute(groupsClaimName, Collection.class); } else if (groupAttribute instanceof String) { // Single group name groupNames = Collections.singleton(profile.getAttribute(groupsClaimName, String.class)); } else { log.error( - String.format("Fail to parse OIDC group claim with name %s. Unknown type %s provided.", groupsClaimName, - groupAttribute.getClass())); + String.format( + "Fail to parse OIDC group claim with name %s. Unknown type %s provided.", + groupsClaimName, groupAttribute.getClass())); // Skip over group attribute. Do not throw. groupNames = Collections.emptyList(); } @@ -297,7 +350,8 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { corpGroupInfo.setDisplayName(groupName); // To deal with the possibility of spaces, we url encode the URN group name. - final String urlEncodedGroupName = URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); + final String urlEncodedGroupName = + URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); final CorpGroupUrn groupUrn = new CorpGroupUrn(urlEncodedGroupName); final CorpGroupSnapshot corpGroupSnapshot = new CorpGroupSnapshot(); corpGroupSnapshot.setUrn(groupUrn); @@ -306,18 +360,23 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { corpGroupSnapshot.setAspects(aspects); groupSnapshots.add(corpGroupSnapshot); } catch (UnsupportedEncodingException ex) { - log.error(String.format("Failed to URL encoded extracted group name %s. Skipping", groupName)); + log.error( + String.format( + "Failed to URL encoded extracted group name %s. Skipping", groupName)); } } if (groupSnapshots.isEmpty()) { - log.warn(String.format("Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); + log.warn( + String.format( + "Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); } else { extractedGroups.addAll(groupSnapshots); } } catch (Exception e) { - log.error(String.format( - "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", - groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); + log.error( + String.format( + "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", + groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); } } } @@ -327,7 +386,8 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { private GroupMembership createGroupMembership(final List<CorpGroupSnapshot> extractedGroups) { final GroupMembership groupMembershipAspect = new GroupMembership(); groupMembershipAspect.setGroups( - new UrnArray(extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + new UrnArray( + extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); return groupMembershipAspect; } @@ -345,30 +405,39 @@ private void tryProvisionUser(CorpUserSnapshot corpUserSnapshot) { // If we find more than the key aspect, then the entity "exists". if (existingCorpUserSnapshot.getAspects().size() <= 1) { log.debug( - String.format("Extracted user that does not yet exist %s. Provisioning...", corpUserSnapshot.getUrn())); + String.format( + "Extracted user that does not yet exist %s. Provisioning...", + corpUserSnapshot.getUrn())); // 2. The user does not exist. Provision them. final Entity newEntity = new Entity(); newEntity.setValue(Snapshot.create(corpUserSnapshot)); _entityClient.update(newEntity, _systemAuthentication); log.debug(String.format("Successfully provisioned user %s", corpUserSnapshot.getUrn())); } - log.debug(String.format("User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); + log.debug( + String.format( + "User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); // Otherwise, the user exists. Skip provisioning. } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); + throw new RuntimeException( + String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); } } private void tryProvisionGroups(List<CorpGroupSnapshot> corpGroups) { - log.debug(String.format("Attempting to provision groups with urns %s", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + log.debug( + String.format( + "Attempting to provision groups with urns %s", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); // 1. Check if this user already exists. try { - final Set<Urn> urnsToFetch = corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); - final Map<Urn, Entity> existingGroups = _entityClient.batchGet(urnsToFetch, _systemAuthentication); + final Set<Urn> urnsToFetch = + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); + final Map<Urn, Entity> existingGroups = + _entityClient.batchGet(urnsToFetch, _systemAuthentication); log.debug(String.format("Fetched GMS groups with urns %s", existingGroups.keySet())); @@ -381,15 +450,21 @@ private void tryProvisionGroups(List<CorpGroupSnapshot> corpGroups) { // If more than the key aspect exists, then the group already "exists". if (corpGroupSnapshot.getAspects().size() <= 1) { - log.debug(String.format("Extracted group that does not yet exist %s. Provisioning...", - corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + corpGroupSnapshot.getUrn())); groupsToCreate.add(extractedGroup); } - log.debug(String.format("Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); } else { // Should not occur until we stop returning default Key aspects for unrecognized entities. log.debug( - String.format("Extracted group that does not yet exist %s. Provisioning...", extractedGroup.getUrn())); + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + extractedGroup.getUrn())); groupsToCreate.add(extractedGroup); } } @@ -400,15 +475,20 @@ private void tryProvisionGroups(List<CorpGroupSnapshot> corpGroups) { log.debug(String.format("Provisioning groups with urns %s", groupsToCreateUrns)); // Now batch create all entities identified to create. - _entityClient.batchUpdate(groupsToCreate.stream() - .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) - .collect(Collectors.toSet()), _systemAuthentication); + _entityClient.batchUpdate( + groupsToCreate.stream() + .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) + .collect(Collectors.toSet()), + _systemAuthentication); log.debug(String.format("Successfully provisioned groups with urns %s", groupsToCreateUrns)); } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision groups with urns %s.", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), e); + throw new RuntimeException( + String.format( + "Failed to provision groups with urns %s.", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), + e); } } @@ -423,12 +503,14 @@ private void updateGroupMembership(Urn urn, GroupMembership groupMembership) { try { _entityClient.ingestProposal(proposal, _systemAuthentication); } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to update group membership for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to update group membership for user with urn %s", urn), e); } } private void verifyPreProvisionedUser(CorpuserUrn urn) { - // Validate that the user exists in the system (there is more than just a key aspect for them, as of today). + // Validate that the user exists in the system (there is more than just a key aspect for them, + // as of today). try { final Entity corpUser = _entityClient.get(urn, _systemAuthentication); @@ -436,9 +518,14 @@ private void verifyPreProvisionedUser(CorpuserUrn urn) { // If we find more than the key aspect, then the entity "exists". if (corpUser.getValue().getCorpUserSnapshot().getAspects().size() <= 1) { - log.debug(String.format("Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); - throw new RuntimeException(String.format("User with urn %s has not yet been provisioned in DataHub. " - + "Please contact your DataHub admin to provision an account.", urn)); + log.debug( + String.format( + "Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); + throw new RuntimeException( + String.format( + "User with urn %s has not yet been provisioned in DataHub. " + + "Please contact your DataHub admin to provision an account.", + urn)); } // Otherwise, the user exists. } catch (RemoteInvocationException e) { diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java index eb037db2ef9c0..6877ca187da97 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java @@ -1,104 +1,122 @@ package auth.sso.oidc; +import static auth.ConfigUtil.*; + import auth.sso.SsoConfigs; import java.util.Optional; import lombok.Getter; -import static auth.ConfigUtil.*; - - -/** - * Class responsible for extracting and validating OIDC related configurations. - */ +/** Class responsible for extracting and validating OIDC related configurations. */ @Getter public class OidcConfigs extends SsoConfigs { - /** - * Required configs - */ - public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; - public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; - public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + /** Required configs */ + public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; + + public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; + public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + + /** Optional configs */ + public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; + + public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; + public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; + public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; + public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = + "auth.oidc.clientAuthenticationMethod"; + public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = + "auth.oidc.jitProvisioningEnabled"; + public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = + "auth.oidc.preProvisioningRequired"; + public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; + public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = + "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. + public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; + public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; + public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; + public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; + public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; + public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = + "auth.oidc.extractJwtAccessTokenClaims"; + public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; - /** - * Optional configs - */ - public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; - public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; - public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; - public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; - public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = "auth.oidc.clientAuthenticationMethod"; - public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = "auth.oidc.jitProvisioningEnabled"; - public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = "auth.oidc.preProvisioningRequired"; - public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; - public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. - public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; - public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; - public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; - public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; - public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; - public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "auth.oidc.extractJwtAccessTokenClaims"; - public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; + /** Default values */ + private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - /** - * Default values - */ - private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; - private static final String DEFAULT_OIDC_SCOPE = "openid profile email"; // Often "group" must be included for groups. - private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; - private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; - private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; - private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; - private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = "false"; // False since extraction of groups can overwrite existing group membership. - private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; - private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; + private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; + private static final String DEFAULT_OIDC_SCOPE = + "openid profile email"; // Often "group" must be included for groups. + private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; + private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; + private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; + private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; + private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = + "false"; // False since extraction of groups can overwrite existing group membership. + private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; + private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; - private String clientId; - private String clientSecret; - private String discoveryUri; - private String userNameClaim; - private String userNameClaimRegex; - private String scope; - private String clientName; - private String clientAuthenticationMethod; - private boolean jitProvisioningEnabled; - private boolean preProvisioningRequired; - private boolean extractGroupsEnabled; - private String groupsClaimName; - private Optional<String> responseType; - private Optional<String> responseMode; - private Optional<Boolean> useNonce; - private Optional<String> customParamResource; - private String readTimeout; - private Optional<Boolean> extractJwtAccessTokenClaims; - private Optional<String> preferredJwsAlgorithm; + private String clientId; + private String clientSecret; + private String discoveryUri; + private String userNameClaim; + private String userNameClaimRegex; + private String scope; + private String clientName; + private String clientAuthenticationMethod; + private boolean jitProvisioningEnabled; + private boolean preProvisioningRequired; + private boolean extractGroupsEnabled; + private String groupsClaimName; + private Optional<String> responseType; + private Optional<String> responseMode; + private Optional<Boolean> useNonce; + private Optional<String> customParamResource; + private String readTimeout; + private Optional<Boolean> extractJwtAccessTokenClaims; + private Optional<String> preferredJwsAlgorithm; - public OidcConfigs(final com.typesafe.config.Config configs) { - super(configs); - clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); - clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); - discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); - userNameClaim = getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); - userNameClaimRegex = - getOptional(configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); - scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); - clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); - clientAuthenticationMethod = getOptional(configs, OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, + public OidcConfigs(final com.typesafe.config.Config configs) { + super(configs); + clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); + clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); + discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); + userNameClaim = + getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); + userNameClaimRegex = + getOptional( + configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); + scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); + clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); + clientAuthenticationMethod = + getOptional( + configs, + OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD); - jitProvisioningEnabled = Boolean.parseBoolean( - getOptional(configs, OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); - preProvisioningRequired = Boolean.parseBoolean( - getOptional(configs, OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); - extractGroupsEnabled = Boolean.parseBoolean( + jitProvisioningEnabled = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, + DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); + preProvisioningRequired = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, + DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); + extractGroupsEnabled = + Boolean.parseBoolean( getOptional(configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED)); - groupsClaimName = getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); - responseType = getOptional(configs, OIDC_RESPONSE_TYPE); - responseMode = getOptional(configs, OIDC_RESPONSE_MODE); - useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); - customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); - readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); - extractJwtAccessTokenClaims = getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); - preferredJwsAlgorithm = Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); - } + groupsClaimName = + getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); + responseType = getOptional(configs, OIDC_RESPONSE_TYPE); + responseMode = getOptional(configs, OIDC_RESPONSE_MODE); + useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); + customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); + readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); + extractJwtAccessTokenClaims = + getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); + preferredJwsAlgorithm = + Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java index fd0a2e1877154..39a65a46cbf91 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java @@ -10,15 +10,15 @@ import org.pac4j.oidc.credentials.OidcCredentials; import org.pac4j.oidc.profile.OidcProfileDefinition; - /** * Implementation of {@link SsoProvider} supporting the OIDC protocol. * - * This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC related - * configuration options, which reside in an instance of {@link OidcConfigs}. + * <p>This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC + * related configuration options, which reside in an instance of {@link OidcConfigs}. * - * It is responsible for initializing this client from a configuration object ({@link OidcConfigs}. Note that - * this class is not related to the logic performed when an IdP performs a callback to DataHub. + * <p>It is responsible for initializing this client from a configuration object ({@link + * OidcConfigs}. Note that this class is not related to the logic performed when an IdP performs a + * callback to DataHub. */ @Slf4j public class OidcProvider implements SsoProvider<OidcConfigs> { @@ -53,7 +53,8 @@ private Client<OidcCredentials> createPac4jClient() { oidcConfiguration.setClientId(_oidcConfigs.getClientId()); oidcConfiguration.setSecret(_oidcConfigs.getClientSecret()); oidcConfiguration.setDiscoveryURI(_oidcConfigs.getDiscoveryUri()); - oidcConfiguration.setClientAuthenticationMethodAsString(_oidcConfigs.getClientAuthenticationMethod()); + oidcConfiguration.setClientAuthenticationMethodAsString( + _oidcConfigs.getClientAuthenticationMethod()); oidcConfiguration.setScope(_oidcConfigs.getScope()); try { oidcConfiguration.setReadTimeout(Integer.parseInt(_oidcConfigs.getReadTimeout())); @@ -63,18 +64,24 @@ private Client<OidcCredentials> createPac4jClient() { _oidcConfigs.getResponseType().ifPresent(oidcConfiguration::setResponseType); _oidcConfigs.getResponseMode().ifPresent(oidcConfiguration::setResponseMode); _oidcConfigs.getUseNonce().ifPresent(oidcConfiguration::setUseNonce); - _oidcConfigs.getCustomParamResource() + _oidcConfigs + .getCustomParamResource() .ifPresent(value -> oidcConfiguration.setCustomParams(ImmutableMap.of("resource", value))); - _oidcConfigs.getPreferredJwsAlgorithm().ifPresent(preferred -> { - log.info("Setting preferredJwsAlgorithm: " + preferred); - oidcConfiguration.setPreferredJwsAlgorithm(preferred); - }); + _oidcConfigs + .getPreferredJwsAlgorithm() + .ifPresent( + preferred -> { + log.info("Setting preferredJwsAlgorithm: " + preferred); + oidcConfiguration.setPreferredJwsAlgorithm(preferred); + }); final CustomOidcClient oidcClient = new CustomOidcClient(oidcConfiguration); oidcClient.setName(OIDC_CLIENT_NAME); - oidcClient.setCallbackUrl(_oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); + oidcClient.setCallbackUrl( + _oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); oidcClient.setCallbackUrlResolver(new PathParameterCallbackUrlResolver()); - oidcClient.addAuthorizationGenerator(new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); + oidcClient.addAuthorizationGenerator( + new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); return oidcClient; } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java index 014632c17e690..9881b5e095b78 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java @@ -1,57 +1,58 @@ package auth.sso.oidc; +import static play.mvc.Results.internalServerError; +import static play.mvc.Results.unauthorized; + +import java.util.Optional; import org.pac4j.play.PlayWebContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.mvc.Result; -import java.util.Optional; - -import static play.mvc.Results.internalServerError; -import static play.mvc.Results.unauthorized; - - public class OidcResponseErrorHandler { - private OidcResponseErrorHandler() { - - } - - private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); + private OidcResponseErrorHandler() {} - private static final String ERROR_FIELD_NAME = "error"; - private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; + private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); - public static Result handleError(final PlayWebContext context) { + private static final String ERROR_FIELD_NAME = "error"; + private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; - _logger.warn("OIDC responded with an error: '{}'. Error description: '{}'", - getError(context), - getErrorDescription(context)); + public static Result handleError(final PlayWebContext context) { - if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { - return unauthorized(String.format("Access denied. " - + "The OIDC service responded with 'Access denied'. " - + "It seems that you don't have access to this application yet. Please apply for access. \n\n" - + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " - + "Error details: '%s':'%s'", - context.getRequestParameter("error"), - context.getRequestParameter("error_description"))); - } + _logger.warn( + "OIDC responded with an error: '{}'. Error description: '{}'", + getError(context), + getErrorDescription(context)); - return internalServerError( - String.format("Internal server error. The OIDC service responded with an error: '%s'.\n" - + "Error description: '%s'", getError(context).orElse(""), getErrorDescription(context).orElse(""))); + if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { + return unauthorized( + String.format( + "Access denied. " + + "The OIDC service responded with 'Access denied'. " + + "It seems that you don't have access to this application yet. Please apply for access. \n\n" + + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " + + "Error details: '%s':'%s'", + context.getRequestParameter("error"), + context.getRequestParameter("error_description"))); } - public static boolean isError(final PlayWebContext context) { - return getError(context).isPresent() && !getError(context).get().isEmpty(); - } + return internalServerError( + String.format( + "Internal server error. The OIDC service responded with an error: '%s'.\n" + + "Error description: '%s'", + getError(context).orElse(""), getErrorDescription(context).orElse(""))); + } - public static Optional<String> getError(final PlayWebContext context) { - return context.getRequestParameter(ERROR_FIELD_NAME); - } + public static boolean isError(final PlayWebContext context) { + return getError(context).isPresent() && !getError(context).get().isEmpty(); + } - public static Optional<String> getErrorDescription(final PlayWebContext context) { - return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); - } + public static Optional<String> getError(final PlayWebContext context) { + return context.getRequestParameter(ERROR_FIELD_NAME); + } + + public static Optional<String> getErrorDescription(final PlayWebContext context) { + return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java index 8c8c250fb7e63..01f8f16171d13 100644 --- a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java +++ b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java @@ -1,8 +1,8 @@ package auth.sso.oidc.custom; -import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; +import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.ParseException; import com.nimbusds.oauth2.sdk.TokenErrorResponse; import com.nimbusds.oauth2.sdk.TokenRequest; @@ -37,7 +37,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class CustomOidcAuthenticator implements Authenticator<OidcCredentials> { private static final Logger logger = LoggerFactory.getLogger(OidcAuthenticator.class); @@ -61,14 +60,17 @@ public CustomOidcAuthenticator(final OidcClient<OidcConfiguration> client) { this.client = client; // check authentication methods - final List<ClientAuthenticationMethod> metadataMethods = configuration.findProviderMetadata().getTokenEndpointAuthMethods(); + final List<ClientAuthenticationMethod> metadataMethods = + configuration.findProviderMetadata().getTokenEndpointAuthMethods(); - final ClientAuthenticationMethod preferredMethod = getPreferredAuthenticationMethod(configuration); + final ClientAuthenticationMethod preferredMethod = + getPreferredAuthenticationMethod(configuration); final ClientAuthenticationMethod chosenMethod; if (CommonHelper.isNotEmpty(metadataMethods)) { if (preferredMethod != null) { - if (ClientAuthenticationMethod.NONE.equals(preferredMethod) || metadataMethods.contains(preferredMethod)) { + if (ClientAuthenticationMethod.NONE.equals(preferredMethod) + || metadataMethods.contains(preferredMethod)) { chosenMethod = preferredMethod; } else { throw new TechnicalException( @@ -83,8 +85,10 @@ public CustomOidcAuthenticator(final OidcClient<OidcConfiguration> client) { chosenMethod = firstSupportedMethod(metadataMethods); } } else { - chosenMethod = preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); - logger.info("Provider metadata does not provide Token endpoint authentication methods. Using: {}", + chosenMethod = + preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); + logger.info( + "Provider metadata does not provide Token endpoint authentication methods. Using: {}", chosenMethod); } @@ -103,38 +107,41 @@ public CustomOidcAuthenticator(final OidcClient<OidcConfiguration> client) { } /** - * The preferred {@link ClientAuthenticationMethod} specified in the given - * {@link OidcConfiguration}, or <code>null</code> meaning that the a - * provider-supported method should be chosen. + * The preferred {@link ClientAuthenticationMethod} specified in the given {@link + * OidcConfiguration}, or <code>null</code> meaning that the a provider-supported method should be + * chosen. */ - private static ClientAuthenticationMethod getPreferredAuthenticationMethod(OidcConfiguration config) { + private static ClientAuthenticationMethod getPreferredAuthenticationMethod( + OidcConfiguration config) { final ClientAuthenticationMethod configurationMethod = config.getClientAuthenticationMethod(); if (configurationMethod == null) { return null; } if (!SUPPORTED_METHODS.contains(configurationMethod)) { - throw new TechnicalException("Configured authentication method (" + configurationMethod + ") is not supported."); + throw new TechnicalException( + "Configured authentication method (" + configurationMethod + ") is not supported."); } return configurationMethod; } /** - * The first {@link ClientAuthenticationMethod} from the given list of - * methods that is supported by this implementation. + * The first {@link ClientAuthenticationMethod} from the given list of methods that is supported + * by this implementation. * - * @throws TechnicalException - * if none of the provider-supported methods is supported. + * @throws TechnicalException if none of the provider-supported methods is supported. */ - private static ClientAuthenticationMethod firstSupportedMethod(final List<ClientAuthenticationMethod> metadataMethods) { + private static ClientAuthenticationMethod firstSupportedMethod( + final List<ClientAuthenticationMethod> metadataMethods) { Optional<ClientAuthenticationMethod> firstSupported = metadataMethods.stream().filter((m) -> SUPPORTED_METHODS.contains(m)).findFirst(); if (firstSupported.isPresent()) { return firstSupported.get(); } else { - throw new TechnicalException("None of the Token endpoint provider metadata authentication methods are supported: " - + metadataMethods); + throw new TechnicalException( + "None of the Token endpoint provider metadata authentication methods are supported: " + + metadataMethods); } } @@ -145,21 +152,30 @@ public void validate(final OidcCredentials credentials, final WebContext context if (code != null) { try { final String computedCallbackUrl = client.computeFinalCallbackUrl(context); - CodeVerifier verifier = (CodeVerifier) configuration.getValueRetriever() - .retrieve(client.getCodeVerifierSessionAttributeName(), client, context).orElse(null); + CodeVerifier verifier = + (CodeVerifier) + configuration + .getValueRetriever() + .retrieve(client.getCodeVerifierSessionAttributeName(), client, context) + .orElse(null); // Token request - final TokenRequest request = createTokenRequest(new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); + final TokenRequest request = + createTokenRequest( + new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); HTTPRequest tokenHttpRequest = request.toHTTPRequest(); tokenHttpRequest.setConnectTimeout(configuration.getConnectTimeout()); tokenHttpRequest.setReadTimeout(configuration.getReadTimeout()); final HTTPResponse httpResponse = tokenHttpRequest.send(); - logger.debug("Token response: status={}, content={}", httpResponse.getStatusCode(), + logger.debug( + "Token response: status={}, content={}", + httpResponse.getStatusCode(), httpResponse.getContent()); final TokenResponse response = OIDCTokenResponseParser.parse(httpResponse); if (response instanceof TokenErrorResponse) { - throw new TechnicalException("Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); + throw new TechnicalException( + "Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); } logger.debug("Token response successful"); final OIDCTokenResponse tokenSuccessResponse = (OIDCTokenResponse) response; @@ -178,11 +194,15 @@ public void validate(final OidcCredentials credentials, final WebContext context private TokenRequest createTokenRequest(final AuthorizationGrant grant) { if (clientAuthentication != null) { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - this.clientAuthentication, grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + this.clientAuthentication, + grant); } else { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - new ClientID(configuration.getClientId()), grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + new ClientID(configuration.getClientId()), + grant); } } } diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java index 24183f5c625da..4d40f45cd09b4 100644 --- a/datahub-frontend/app/client/AuthServiceClient.java +++ b/datahub-frontend/app/client/AuthServiceClient.java @@ -3,7 +3,6 @@ import com.datahub.authentication.Authentication; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; - import java.nio.charset.StandardCharsets; import java.util.Objects; import javax.annotation.Nonnull; @@ -17,17 +16,16 @@ import org.apache.http.util.EntityUtils; import play.mvc.Http; - -/** - * This class is responsible for coordinating authentication with the backend Metadata Service. - */ +/** This class is responsible for coordinating authentication with the backend Metadata Service. */ @Slf4j public class AuthServiceClient { private static final String GENERATE_SESSION_TOKEN_ENDPOINT = "auth/generateSessionTokenForUser"; private static final String SIGN_UP_ENDPOINT = "auth/signUp"; - private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/resetNativeUserCredentials"; - private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/verifyNativeUserCredentials"; + private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/resetNativeUserCredentials"; + private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/verifyNativeUserCredentials"; private static final String TRACK_ENDPOINT = "auth/track"; private static final String ACCESS_TOKEN_FIELD = "accessToken"; private static final String USER_ID_FIELD = "userId"; @@ -39,7 +37,8 @@ public class AuthServiceClient { private static final String INVITE_TOKEN_FIELD = "inviteToken"; private static final String RESET_TOKEN_FIELD = "resetToken"; private static final String IS_NATIVE_USER_CREATED_FIELD = "isNativeUserCreated"; - private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = "areNativeUserCredentialsReset"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = + "areNativeUserCredentialsReset"; private static final String DOES_PASSWORD_MATCH_FIELD = "doesPasswordMatch"; private final String metadataServiceHost; @@ -48,8 +47,11 @@ public class AuthServiceClient { private final Authentication systemAuthentication; private final CloseableHttpClient httpClient; - public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort, - @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication, + public AuthServiceClient( + @Nonnull final String metadataServiceHost, + @Nonnull final Integer metadataServicePort, + @Nonnull final Boolean useSsl, + @Nonnull final Authentication systemAuthentication, @Nonnull final CloseableHttpClient httpClient) { this.metadataServiceHost = Objects.requireNonNull(metadataServiceHost); this.metadataServicePort = Objects.requireNonNull(metadataServicePort); @@ -59,10 +61,11 @@ public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull fin } /** - * Call the Auth Service to generate a session token for a particular user with a unique actor id, or throws an exception if generation fails. + * Call the Auth Service to generate a session token for a particular user with a unique actor id, + * or throws an exception if generation fails. * - * Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of an Actor of type - * USER. + * <p>Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of + * an Actor of type USER. */ @Nonnull public String generateSessionTokenForUser(@Nonnull final String userId) { @@ -72,15 +75,21 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - GENERATE_SESSION_TOKEN_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + GENERATE_SESSION_TOKEN_ENDPOINT)); // Build JSON request to generate a token on behalf of a user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_ID_FIELD, userId); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -94,7 +103,8 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { return getAccessTokenFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", + String.format( + "Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { @@ -110,11 +120,14 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { } } - /** - * Call the Auth Service to create a native Datahub user. - */ - public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullName, @Nonnull final String email, - @Nonnull final String title, @Nonnull final String password, @Nonnull final String inviteToken) { + /** Call the Auth Service to create a native Datahub user. */ + public boolean signUp( + @Nonnull final String userUrn, + @Nonnull final String fullName, + @Nonnull final String email, + @Nonnull final String title, + @Nonnull final String password, + @Nonnull final String inviteToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(fullName, "fullName must not be null"); Objects.requireNonNull(email, "email must not be null"); @@ -126,9 +139,11 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - SIGN_UP_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, SIGN_UP_ENDPOINT)); // Build JSON request to sign up a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -139,7 +154,8 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN objectNode.put(TITLE_FIELD, title); objectNode.put(PASSWORD_FIELD, password); objectNode.put(INVITE_TOKEN_FIELD, inviteToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -152,11 +168,15 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN final String jsonStr = EntityUtils.toString(entity); return getIsNativeUserCreatedFromJson(jsonStr); } else { - String content = response.getEntity().getContent() == null ? "" : new String( - response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); + String content = + response.getEntity().getContent() == null + ? "" + : new String( + response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s Body: %s", response.getStatusLine().toString(), - response.getEntity().toString(), content)); + String.format( + "Bad response from the Metadata Service: %s %s Body: %s", + response.getStatusLine().toString(), response.getEntity().toString(), content)); } } catch (Exception e) { throw new RuntimeException(String.format("Failed to create user %s", userUrn), e); @@ -171,10 +191,10 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN } } - /** - * Call the Auth Service to reset credentials for a native DataHub user. - */ - public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password, + /** Call the Auth Service to reset credentials for a native DataHub user. */ + public boolean resetNativeUserCredentials( + @Nonnull final String userUrn, + @Nonnull final String password, @Nonnull final String resetToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); @@ -184,9 +204,14 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -194,7 +219,8 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); objectNode.put(RESET_TOKEN_FIELD, resetToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -208,8 +234,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul return getAreNativeUserCredentialsResetFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to reset credentials for user", e); @@ -224,10 +251,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul } } - /** - * Call the Auth Service to verify the credentials for a native Datahub user. - */ - public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password) { + /** Call the Auth Service to verify the credentials for a native Datahub user. */ + public boolean verifyNativeUserCredentials( + @Nonnull final String userUrn, @Nonnull final String password) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); CloseableHttpResponse response = null; @@ -235,16 +261,22 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -258,8 +290,9 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu return getDoesPasswordMatchFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to verify credentials for user", e); @@ -274,18 +307,18 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu } } - /** - * Call the Auth Service to track an analytics event - */ + /** Call the Auth Service to track an analytics event */ public void track(@Nonnull final String event) { Objects.requireNonNull(event, "event must not be null"); CloseableHttpResponse response = null; try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - TRACK_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, TRACK_ENDPOINT)); // Build JSON request to track event. request.setEntity(new StringEntity(event, StandardCharsets.UTF_8)); @@ -298,8 +331,9 @@ public void track(@Nonnull final String event) { if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK || entity == null) { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to track event", e); diff --git a/datahub-frontend/app/client/KafkaTrackingProducer.java b/datahub-frontend/app/client/KafkaTrackingProducer.java index 59e91a6d5a0f7..b7173684b6350 100644 --- a/datahub-frontend/app/client/KafkaTrackingProducer.java +++ b/datahub-frontend/app/client/KafkaTrackingProducer.java @@ -3,6 +3,15 @@ import com.linkedin.metadata.config.kafka.ProducerConfiguration; import com.typesafe.config.Config; import config.ConfigurationProvider; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import javax.inject.Inject; +import javax.inject.Singleton; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -15,98 +24,141 @@ import play.api.inject.ApplicationLifecycle; import utils.ConfigUtil; -import javax.inject.Inject; - -import javax.annotation.Nonnull; -import javax.inject.Singleton; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; - @Singleton public class KafkaTrackingProducer { - private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); - private static final List<String> KAFKA_SSL_PROTOCOLS = Collections.unmodifiableList( - Arrays.asList(SecurityProtocol.SSL.name(), SecurityProtocol.SASL_SSL.name(), - SecurityProtocol.SASL_PLAINTEXT.name())); - - private final Boolean _isEnabled; - private final KafkaProducer<String, String> _producer; - - @Inject - public KafkaTrackingProducer(@Nonnull Config config, ApplicationLifecycle lifecycle, final ConfigurationProvider configurationProvider) { - _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); - - if (_isEnabled) { - _logger.debug("Analytics tracking is enabled"); - _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); - - lifecycle.addStopHook( - () -> { - _producer.flush(); - _producer.close(); - return CompletableFuture.completedFuture(null); - }); - } else { - _logger.debug("Analytics tracking is disabled"); - _producer = null; - } - } - - public Boolean isEnabled() { - return _isEnabled; + private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); + private static final List<String> KAFKA_SSL_PROTOCOLS = + Collections.unmodifiableList( + Arrays.asList( + SecurityProtocol.SSL.name(), + SecurityProtocol.SASL_SSL.name(), + SecurityProtocol.SASL_PLAINTEXT.name())); + + private final Boolean _isEnabled; + private final KafkaProducer<String, String> _producer; + + @Inject + public KafkaTrackingProducer( + @Nonnull Config config, + ApplicationLifecycle lifecycle, + final ConfigurationProvider configurationProvider) { + _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); + + if (_isEnabled) { + _logger.debug("Analytics tracking is enabled"); + _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); + + lifecycle.addStopHook( + () -> { + _producer.flush(); + _producer.close(); + return CompletableFuture.completedFuture(null); + }); + } else { + _logger.debug("Analytics tracking is disabled"); + _producer = null; } - - public void send(ProducerRecord<String, String> record) { - _producer.send(record); + } + + public Boolean isEnabled() { + return _isEnabled; + } + + public void send(ProducerRecord<String, String> record) { + _producer.send(record); + } + + private static KafkaProducer createKafkaProducer( + Config config, ProducerConfiguration producerConfiguration) { + final Properties props = new Properties(); + props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); + props.put( + ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, + config.getString("analytics.kafka.delivery.timeout.ms")); + props.put( + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, + config.getString("analytics.kafka.bootstrap.server")); + props.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. + props.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. + props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); + props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); + + final String securityProtocolConfig = "analytics.kafka.security.protocol"; + if (config.hasPath(securityProtocolConfig) + && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { + props.put( + CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); + setConfig( + config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); + + setConfig( + config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.keystore.location"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.keystore.password"); + + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, + "analytics.kafka.ssl.truststore.type"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.truststore.location"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.truststore.password"); + + setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); + setConfig( + config, + props, + SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, + "analytics.kafka.ssl.endpoint.identification.algorithm"); + + final String securityProtocol = config.getString(securityProtocolConfig); + if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) + || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { + setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); + setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); + setConfig( + config, + props, + SaslConfigs.SASL_KERBEROS_SERVICE_NAME, + "analytics.kafka.sasl.kerberos.service.name"); + setConfig( + config, + props, + SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.login.callback.handler.class"); + setConfig( + config, + props, + SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.client.callback.handler.class"); + } } - private static KafkaProducer createKafkaProducer(Config config, ProducerConfiguration producerConfiguration) { - final Properties props = new Properties(); - props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); - props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.getString("analytics.kafka.delivery.timeout.ms")); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("analytics.kafka.bootstrap.server")); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. - props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); - props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); - - final String securityProtocolConfig = "analytics.kafka.security.protocol"; - if (config.hasPath(securityProtocolConfig) - && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { - props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); - setConfig(config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); - - setConfig(config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.keystore.location"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.keystore.password"); - - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "analytics.kafka.ssl.truststore.type"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.truststore.location"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.truststore.password"); - - setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); - setConfig(config, props, SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "analytics.kafka.ssl.endpoint.identification.algorithm"); - - final String securityProtocol = config.getString(securityProtocolConfig); - if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) - || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { - setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); - setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); - setConfig(config, props, SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "analytics.kafka.sasl.kerberos.service.name"); - setConfig(config, props, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.login.callback.handler.class"); - setConfig(config, props, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.client.callback.handler.class"); - } - } - - return new org.apache.kafka.clients.producer.KafkaProducer<String, String>(props); - } + return new org.apache.kafka.clients.producer.KafkaProducer<String, String>(props); + } - private static void setConfig(Config config, Properties props, String key, String configKey) { - Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) - .ifPresent(v -> props.put(key, v)); - } + private static void setConfig(Config config, Properties props, String key, String configKey) { + Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) + .ifPresent(v -> props.put(key, v)); + } } diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 8f526c831b5c9..3d87267f8ebe3 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -4,28 +4,22 @@ import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; - import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.PropertySource; - /** - * Minimal sharing between metadata-service and frontend - * Does not use the factories module to avoid transitive dependencies. + * Minimal sharing between metadata-service and frontend Does not use the factories module to avoid + * transitive dependencies. */ @EnableConfigurationProperties @PropertySource(value = "application.yml", factory = YamlPropertySourceFactory.class) @ConfigurationProperties @Data public class ConfigurationProvider { - /** - * Kafka related configs. - */ - private KafkaConfiguration kafka; + /** Kafka related configs. */ + private KafkaConfiguration kafka; - /** - * Configuration for caching - */ - private CacheConfiguration cache; + /** Configuration for caching */ + private CacheConfiguration cache; } diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java index 5c76f2572a936..60971bf06e27b 100644 --- a/datahub-frontend/app/controllers/Application.java +++ b/datahub-frontend/app/controllers/Application.java @@ -1,5 +1,8 @@ package controllers; +import static auth.AuthUtils.ACTOR; +import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; + import akka.actor.ActorSystem; import akka.stream.ActorMaterializer; import akka.stream.Materializer; @@ -9,41 +12,35 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.util.Pair; import com.typesafe.config.Config; - +import java.io.InputStream; +import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.Environment; import play.http.HttpEntity; +import play.libs.Json; import play.libs.ws.InMemoryBodyWritable; import play.libs.ws.StandaloneWSClient; -import play.libs.Json; import play.libs.ws.ahc.StandaloneAhcWSClient; import play.mvc.Controller; import play.mvc.Http; import play.mvc.ResponseHeader; import play.mvc.Result; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.inject.Inject; -import java.io.InputStream; import play.mvc.Security; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClientConfig; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClientConfig; import utils.ConfigUtil; -import java.time.Duration; - -import static auth.AuthUtils.ACTOR; -import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; - public class Application extends Controller { private final Logger _logger = LoggerFactory.getLogger(Application.class.getName()); @@ -61,22 +58,17 @@ public Application(Environment environment, @Nonnull Config config) { /** * Serves the build output index.html for any given path * - * @param path takes a path string, which essentially is ignored - * routing is managed client side + * @param path takes a path string, which essentially is ignored routing is managed client side * @return {Result} build output index.html resource */ @Nonnull private Result serveAsset(@Nullable String path) { try { InputStream indexHtml = _environment.resourceAsStream("public/index.html"); - return ok(indexHtml) - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return ok(indexHtml).withHeader("Cache-Control", "no-cache").as("text/html"); } catch (Exception e) { _logger.warn("Cannot load public/index.html resource. Static assets or assets jar missing?"); - return notFound() - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return notFound().withHeader("Cache-Control", "no-cache").as("text/html"); } } @@ -99,66 +91,87 @@ public Result index(@Nullable String path) { /** * Proxies requests to the Metadata Service * - * TODO: Investigate using mutual SSL authentication to call Metadata Service. + * <p>TODO: Investigate using mutual SSL authentication to call Metadata Service. */ @Security.Authenticated(Authenticator.class) - public CompletableFuture<Result> proxy(String path, Http.Request request) throws ExecutionException, InterruptedException { + public CompletableFuture<Result> proxy(String path, Http.Request request) + throws ExecutionException, InterruptedException { final String authorizationHeaderValue = getAuthorizationHeaderValueToProxy(request); final String resolvedUri = mapPath(request.uri()); - final String metadataServiceHost = ConfigUtil.getString( - _config, - ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = ConfigUtil.getInt( - _config, - ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = ConfigUtil.getBoolean( - _config, - ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); + final String metadataServiceHost = + ConfigUtil.getString( + _config, + ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); + final int metadataServicePort = + ConfigUtil.getInt( + _config, + ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); + final boolean metadataServiceUseSsl = + ConfigUtil.getBoolean( + _config, + ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); // TODO: Fully support custom internal SSL. final String protocol = metadataServiceUseSsl ? "https" : "http"; final Map<String, List<String>> headers = request.getHeaders().toMap(); - if (headers.containsKey(Http.HeaderNames.HOST) && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { - headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); + if (headers.containsKey(Http.HeaderNames.HOST) + && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { + headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); } - return _ws.url(String.format("%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) + return _ws.url( + String.format( + "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) .setMethod(request.method()) - .setHeaders(headers - .entrySet() - .stream() - // Remove X-DataHub-Actor to prevent malicious delegation. - .filter(entry -> !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) - // Remove Host s.th. service meshes do not route to wrong host - .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - ) + .setHeaders( + headers.entrySet().stream() + // Remove X-DataHub-Actor to prevent malicious delegation. + .filter( + entry -> + !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase( + entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) + // Remove Host s.th. service meshes do not route to wrong host + .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) .addHeader(Http.HeaderNames.AUTHORIZATION, authorizationHeaderValue) - .addHeader(AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) - .setBody(new InMemoryBodyWritable(ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), "application/json")) + .addHeader( + AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) + .setBody( + new InMemoryBodyWritable( + ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), + "application/json")) .setRequestTimeout(Duration.ofSeconds(120)) .execute() - .thenApply(apiResponse -> { - final ResponseHeader header = new ResponseHeader(apiResponse.getStatus(), apiResponse.getHeaders() - .entrySet() - .stream() - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); - final HttpEntity body = new HttpEntity.Strict(apiResponse.getBodyAsBytes(), Optional.ofNullable(apiResponse.getContentType())); - return new Result(header, body); - }).toCompletableFuture(); + .thenApply( + apiResponse -> { + final ResponseHeader header = + new ResponseHeader( + apiResponse.getStatus(), + apiResponse.getHeaders().entrySet().stream() + .filter( + entry -> + !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter( + entry -> + !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); + final HttpEntity body = + new HttpEntity.Strict( + apiResponse.getBodyAsBytes(), + Optional.ofNullable(apiResponse.getContentType())); + return new Result(header, body); + }) + .toCompletableFuture(); } /** @@ -173,11 +186,13 @@ public Result appConfig() { config.put("appVersion", _config.getString("app.version")); config.put("isInternal", _config.getBoolean("linkedin.internal")); config.put("shouldShowDatasetLineage", _config.getBoolean("linkedin.show.dataset.lineage")); - config.put("suggestionConfidenceThreshold", + config.put( + "suggestionConfidenceThreshold", Integer.valueOf(_config.getString("linkedin.suggestion.confidence.threshold"))); config.set("wikiLinks", wikiLinks()); config.set("tracking", trackingInfo()); - // In a staging environment, we can trigger this flag to be true so that the UI can handle based on + // In a staging environment, we can trigger this flag to be true so that the UI can handle based + // on // such config and alert users that their changes will not affect production data config.put("isStagingBanner", _config.getBoolean("ui.show.staging.banner")); config.put("isLiveDataWarning", _config.getBoolean("ui.show.live.data.banner")); @@ -206,6 +221,7 @@ public Result appConfig() { /** * Creates a JSON object of profile / avatar properties + * * @return Json avatar / profile image properties */ @Nonnull @@ -273,23 +289,26 @@ private StandaloneWSClient createWsClient() { } /** - * Returns the value of the Authorization Header to be provided when proxying requests to the downstream Metadata Service. + * Returns the value of the Authorization Header to be provided when proxying requests to the + * downstream Metadata Service. * - * Currently, the Authorization header value may be derived from + * <p>Currently, the Authorization header value may be derived from * - * a) The value of the "token" attribute of the Session Cookie provided by the client. This value is set - * when creating the session token initially from a token granted by the Metadata Service. + * <p>a) The value of the "token" attribute of the Session Cookie provided by the client. This + * value is set when creating the session token initially from a token granted by the Metadata + * Service. * - * Or if the "token" attribute cannot be found in a session cookie, then we fallback to + * <p>Or if the "token" attribute cannot be found in a session cookie, then we fallback to * - * b) The value of the Authorization - * header provided in the original request. This will be used in cases where clients are making programmatic requests - * to Metadata Service APIs directly, without providing a session cookie (ui only). + * <p>b) The value of the Authorization header provided in the original request. This will be used + * in cases where clients are making programmatic requests to Metadata Service APIs directly, + * without providing a session cookie (ui only). * - * If neither are found, an empty string is returned. + * <p>If neither are found, an empty string is returned. */ private String getAuthorizationHeaderValueToProxy(Http.Request request) { - // If the session cookie has an authorization token, use that. If there's an authorization header provided, simply + // If the session cookie has an authorization token, use that. If there's an authorization + // header provided, simply // use that. String value = ""; if (request.session().data().containsKey(SESSION_COOKIE_GMS_TOKEN_NAME)) { @@ -301,11 +320,13 @@ private String getAuthorizationHeaderValueToProxy(Http.Request request) { } /** - * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This is sent along - * with any requests that have a valid frontend session cookie to identify the calling actor, for backwards compatibility. + * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This + * is sent along with any requests that have a valid frontend session cookie to identify the + * calling actor, for backwards compatibility. * - * If Metadata Service authentication is enabled, this value is not required because Actor context will most often come - * from the authentication credentials provided in the Authorization header. + * <p>If Metadata Service authentication is enabled, this value is not required because Actor + * context will most often come from the authentication credentials provided in the Authorization + * header. */ private String getDataHubActorHeader(Http.Request request) { String actor = request.session().data().get(ACTOR); diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index e28d4ba2ee37e..9c232e965a003 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -1,5 +1,9 @@ package controllers; +import static auth.AuthUtils.*; +import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; +import static org.pac4j.play.store.PlayCookieSessionStore.*; + import auth.AuthUtils; import auth.CookieConfigs; import auth.JAASConfigs; @@ -35,325 +39,337 @@ import play.mvc.Results; import security.AuthenticationManager; -import static auth.AuthUtils.*; -import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; -import static org.pac4j.play.store.PlayCookieSessionStore.*; - - // TODO add logging. public class AuthenticationController extends Controller { - public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; - private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; - private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; - private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; - - private static final String SSO_NO_REDIRECT_MESSAGE = "SSO is configured, however missing redirect from idp"; - - private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); - private final CookieConfigs _cookieConfigs; - private final JAASConfigs _jaasConfigs; - private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; - private final boolean _verbose; - - @Inject - private org.pac4j.core.config.Config _ssoConfig; - - @Inject - private PlaySessionStore _playSessionStore; - - @Inject - private SsoManager _ssoManager; - - @Inject - AuthServiceClient _authClient; - - @Inject - public AuthenticationController(@Nonnull Config configs) { - _cookieConfigs = new CookieConfigs(configs); - _jaasConfigs = new JAASConfigs(configs); - _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); - _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; + private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; + private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; + private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; + + private static final String SSO_NO_REDIRECT_MESSAGE = + "SSO is configured, however missing redirect from idp"; + + private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); + private final CookieConfigs _cookieConfigs; + private final JAASConfigs _jaasConfigs; + private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; + private final boolean _verbose; + + @Inject private org.pac4j.core.config.Config _ssoConfig; + + @Inject private PlaySessionStore _playSessionStore; + + @Inject private SsoManager _ssoManager; + + @Inject AuthServiceClient _authClient; + + @Inject + public AuthenticationController(@Nonnull Config configs) { + _cookieConfigs = new CookieConfigs(configs); + _jaasConfigs = new JAASConfigs(configs); + _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); + _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + } + + /** + * Route used to perform authentication, or redirect to log in if authentication fails. + * + * <p>If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider + * (Indirect auth). If not, we will fall back to the default username / password login experience + * (Direct auth). + */ + @Nonnull + public Result authenticate(Http.Request request) { + + // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is + // authenticated. + + final Optional<String> maybeRedirectPath = + Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); + final String redirectPath = maybeRedirectPath.orElse("/"); + + if (AuthUtils.hasValidSessionCookie(request)) { + return Results.redirect(redirectPath); } - /** - * Route used to perform authentication, or redirect to log in if authentication fails. - * - * If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider (Indirect auth). - * If not, we will fall back to the default username / password login experience (Direct auth). - */ - @Nonnull - public Result authenticate(Http.Request request) { - - // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is authenticated. - - final Optional<String> maybeRedirectPath = Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); - final String redirectPath = maybeRedirectPath.orElse("/"); - - if (AuthUtils.hasValidSessionCookie(request)) { - return Results.redirect(redirectPath); - } - - // 1. If SSO is enabled, redirect to IdP if not authenticated. - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, redirectPath).orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - - // 2. If either JAAS auth or Native auth is enabled, fallback to it - if (_jaasConfigs.isJAASEnabled() || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { - return Results.redirect( - LOGIN_ROUTE + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); - } - - // 3. If no auth enabled, fallback to using default user account & redirect. - // Generate GMS session token, TODO: - final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); - return Results.redirect(redirectPath).withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) - .withCookies( - createActorCookie( - DEFAULT_ACTOR_URN.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + // 1. If SSO is enabled, redirect to IdP if not authenticated. + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, redirectPath) + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); } - /** - * Redirect to the identity provider for authentication. - */ - @Nonnull - public Result sso(Http.Request request) { - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, "/").orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - return Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + // 2. If either JAAS auth or Native auth is enabled, fallback to it + if (_jaasConfigs.isJAASEnabled() + || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { + return Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); } - /** - * Log in a user based on a username + password. - * - * TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the default. - */ - @Nonnull - public Result logIn(Http.Request request) { - boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); - _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; - if (noAuthEnabled) { - String message = "Neither JAAS nor native authentication is enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } - - final JsonNode json = request.body().asJson(); - final String username = json.findPath(USER_NAME).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - - if (StringUtils.isBlank(username)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); - return Results.badRequest(invalidCredsJson); - } - - JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); - boolean loginSucceeded = tryLogin(username, password); - - if (!loginSucceeded) { - return Results.badRequest(invalidCredsJson); - } - - final Urn actorUrn = new CorpuserUrn(username); - final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); - return createSession(actorUrn.toString(), accessToken); + // 3. If no auth enabled, fallback to using default user account & redirect. + // Generate GMS session token, TODO: + final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); + return Results.redirect(redirectPath) + .withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) + .withCookies( + createActorCookie( + DEFAULT_ACTOR_URN.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } + + /** Redirect to the identity provider for authentication. */ + @Nonnull + public Result sso(Http.Request request) { + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, "/") + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); + } + return Results.redirect( + LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + } + + /** + * Log in a user based on a username + password. + * + * <p>TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the + * default. + */ + @Nonnull + public Result logIn(Http.Request request) { + boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); + _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; + if (noAuthEnabled) { + String message = "Neither JAAS nor native authentication is enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); } - /** - * Sign up a native user based on a name, email, title, and password. The invite token must match an existing invite token. - * - */ - @Nonnull - public Result signUp(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } + final JsonNode json = request.body().asJson(); + final String username = json.findPath(USER_NAME).textValue(); + final String password = json.findPath(PASSWORD).textValue(); - final JsonNode json = request.body().asJson(); - final String fullName = json.findPath(FULL_NAME).textValue(); - final String email = json.findPath(EMAIL).textValue(); - final String title = json.findPath(TITLE).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); + if (StringUtils.isBlank(username)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(fullName)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); - return Results.badRequest(invalidCredsJson); - } + JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); + boolean loginSucceeded = tryLogin(username, password); - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } - if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { - Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); - if (!emailValidator.isValid(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } - } + if (!loginSucceeded) { + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn actorUrn = new CorpuserUrn(username); + final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); + return createSession(actorUrn.toString(), accessToken); + } + + /** + * Sign up a native user based on a name, email, title, and password. The invite token must match + * an existing invite token. + */ + @Nonnull + public Result signUp(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); + } - if (StringUtils.isBlank(title)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String fullName = json.findPath(FULL_NAME).textValue(); + final String email = json.findPath(EMAIL).textValue(); + final String title = json.findPath(TITLE).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); - if (StringUtils.isBlank(inviteToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Invite token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(fullName)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } + if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { + Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); + if (!emailValidator.isValid(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } } - /** - * Reset a native user's credentials based on a username, old password, and new password. - * - */ - @Nonnull - public Result resetNativeUserCredentials(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return badRequest(error); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final JsonNode json = request.body().asJson(); - final String email = json.findPath(EMAIL).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String resetToken = json.findPath(RESET_TOKEN).textValue(); + if (StringUtils.isBlank(title)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(inviteToken)) { + JsonNode invalidCredsJson = + Json.newObject().put("message", "Invite token must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + /** Reset a native user's credentials based on a username, old password, and new password. */ + @Nonnull + public Result resetNativeUserCredentials(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return badRequest(error); + } - if (StringUtils.isBlank(resetToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String email = json.findPath(EMAIL).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String resetToken = json.findPath(RESET_TOKEN).textValue(); - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); } - private Optional<Result> redirectToIdentityProvider(Http.RequestHeader request, String redirectPath) { - final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); - final Client client = _ssoManager.getSsoProvider().client(); - configurePac4jSessionStore(playWebContext, client, redirectPath); - try { - final Optional<RedirectionAction> action = client.getRedirectionAction(playWebContext); - return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); - } catch (Exception e) { - if (_verbose) { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", e); - } else { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); - } - return Optional.of(Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8)))); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); } - private void configurePac4jSessionStore(PlayWebContext context, Client client, String redirectPath) { - // Set the originally requested path for post-auth redirection. We split off into a separate cookie from the session - // to reduce size of the session cookie - FoundAction foundAction = new FoundAction(redirectPath); - byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); - String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); - context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); - // This is to prevent previous login attempts from being cached. - // We replicate the logic here, which is buried in the Pac4j client. - if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) != null) { - _logger.debug("Found previous login attempt. Removing it manually to prevent unexpected errors."); - _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); - } + if (StringUtils.isBlank(resetToken)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); + return Results.badRequest(invalidCredsJson); } - private String encodeRedirectUri(final String redirectUri) { - return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + private Optional<Result> redirectToIdentityProvider( + Http.RequestHeader request, String redirectPath) { + final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); + final Client client = _ssoManager.getSsoProvider().client(); + configurePac4jSessionStore(playWebContext, client, redirectPath); + try { + final Optional<RedirectionAction> action = client.getRedirectionAction(playWebContext); + return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); + } catch (Exception e) { + if (_verbose) { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", + e); + } else { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); + } + return Optional.of( + Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8)))); } - - private boolean tryLogin(String username, String password) { - boolean loginSucceeded = false; - - // First try jaas login, if enabled - if (_jaasConfigs.isJAASEnabled()) { - try { - _logger.debug("Attempting jaas authentication"); - AuthenticationManager.authenticateJaasUser(username, password); - _logger.debug("Jaas authentication successful. Login succeeded"); - loginSucceeded = true; - } catch (Exception e) { - if (_verbose) { - _logger.debug("Jaas authentication error. Login failed", e); - } else { - _logger.debug("Jaas authentication error. Login failed"); - } - } - } - - // If jaas login fails or is disabled, try native auth login - if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { - final Urn userUrn = new CorpuserUrn(username); - final String userUrnString = userUrn.toString(); - loginSucceeded = loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); + } + + private void configurePac4jSessionStore( + PlayWebContext context, Client client, String redirectPath) { + // Set the originally requested path for post-auth redirection. We split off into a separate + // cookie from the session + // to reduce size of the session cookie + FoundAction foundAction = new FoundAction(redirectPath); + byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); + String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); + context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); + // This is to prevent previous login attempts from being cached. + // We replicate the logic here, which is buried in the Pac4j client. + if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) + != null) { + _logger.debug( + "Found previous login attempt. Removing it manually to prevent unexpected errors."); + _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); + } + } + + private String encodeRedirectUri(final String redirectUri) { + return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + } + + private boolean tryLogin(String username, String password) { + boolean loginSucceeded = false; + + // First try jaas login, if enabled + if (_jaasConfigs.isJAASEnabled()) { + try { + _logger.debug("Attempting jaas authentication"); + AuthenticationManager.authenticateJaasUser(username, password); + _logger.debug("Jaas authentication successful. Login succeeded"); + loginSucceeded = true; + } catch (Exception e) { + if (_verbose) { + _logger.debug("Jaas authentication error. Login failed", e); + } else { + _logger.debug("Jaas authentication error. Login failed"); } - - return loginSucceeded; + } } - private Result createSession(String userUrnString, String accessToken) { - return Results.ok().withSession(createSessionMap(userUrnString, accessToken)) - .withCookies( - createActorCookie( - userUrnString, - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); - + // If jaas login fails or is disabled, try native auth login + if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { + final Urn userUrn = new CorpuserUrn(username); + final String userUrnString = userUrn.toString(); + loginSucceeded = + loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); } -} \ No newline at end of file + + return loginSucceeded; + } + + private Result createSession(String userUrnString, String accessToken) { + return Results.ok() + .withSession(createSessionMap(userUrnString, accessToken)) + .withCookies( + createActorCookie( + userUrnString, + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } +} diff --git a/datahub-frontend/app/controllers/CentralLogoutController.java b/datahub-frontend/app/controllers/CentralLogoutController.java index 5e24fe9f8220c..eea1c662ebf89 100644 --- a/datahub-frontend/app/controllers/CentralLogoutController.java +++ b/datahub-frontend/app/controllers/CentralLogoutController.java @@ -2,18 +2,15 @@ import com.typesafe.config.Config; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import javax.inject.Inject; import lombok.extern.slf4j.Slf4j; import org.pac4j.play.LogoutController; import play.mvc.Http; import play.mvc.Result; import play.mvc.Results; -import javax.inject.Inject; -import java.nio.charset.StandardCharsets; - -/** - * Responsible for handling logout logic with oidc providers - */ +/** Responsible for handling logout logic with oidc providers */ @Slf4j public class CentralLogoutController extends LogoutController { private static final String AUTH_URL_CONFIG_PATH = "/login"; @@ -28,26 +25,27 @@ public CentralLogoutController(Config config) { setLogoutUrlPattern(DEFAULT_BASE_URL_PATH + ".*"); setLocalLogout(true); setCentralLogout(true); - } - /** - * logout() method should not be called if oidc is not enabled - */ + /** logout() method should not be called if oidc is not enabled */ public Result executeLogout(Http.Request request) { if (_isOidcEnabled) { try { return logout(request).toCompletableFuture().get().withNewSession(); } catch (Exception e) { - log.error("Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", e); + log.error( + "Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", + e); return redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8))) - .withNewSession(); + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8))) + .withNewSession(); } } - return Results.redirect(AUTH_URL_CONFIG_PATH) - .withNewSession(); + return Results.redirect(AUTH_URL_CONFIG_PATH).withNewSession(); } } diff --git a/datahub-frontend/app/controllers/SsoCallbackController.java b/datahub-frontend/app/controllers/SsoCallbackController.java index 7a4b5585cc21a..9f4445b1aa5c7 100644 --- a/datahub-frontend/app/controllers/SsoCallbackController.java +++ b/datahub-frontend/app/controllers/SsoCallbackController.java @@ -1,6 +1,9 @@ package controllers; import auth.CookieConfigs; +import auth.sso.SsoManager; +import auth.sso.SsoProvider; +import auth.sso.oidc.OidcCallbackLogic; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemEntityClient; @@ -18,17 +21,13 @@ import org.pac4j.play.PlayWebContext; import play.mvc.Http; import play.mvc.Result; -import auth.sso.oidc.OidcCallbackLogic; -import auth.sso.SsoManager; -import auth.sso.SsoProvider; import play.mvc.Results; - /** * A dedicated Controller for handling redirects to DataHub by 3rd-party Identity Providers after * off-platform authentication. * - * Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines + * <p>Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines * the handling logic to invoke. */ @Slf4j @@ -46,56 +45,88 @@ public SsoCallbackController( _ssoManager = ssoManager; setDefaultUrl("/"); // By default, redirects to Home Page on log in. setSaveInSession(false); - setCallbackLogic(new SsoCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, new CookieConfigs(configs))); + setCallbackLogic( + new SsoCallbackLogic( + ssoManager, + systemAuthentication, + entityClient, + authClient, + new CookieConfigs(configs))); } public CompletionStage<Result> handleCallback(String protocol, Http.Request request) { if (shouldHandleCallback(protocol)) { log.debug(String.format("Handling SSO callback. Protocol: %s", protocol)); - return callback(request).handle((res, e) -> { - if (e != null) { - log.error("Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", e); - return Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode( - "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", - StandardCharsets.UTF_8))) - .discardingCookie("actor") - .withNewSession(); - } - return res; - }); + return callback(request) + .handle( + (res, e) -> { + if (e != null) { + log.error( + "Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", + e); + return Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", + StandardCharsets.UTF_8))) + .discardingCookie("actor") + .withNewSession(); + } + return res; + }); } - return CompletableFuture.completedFuture(Results.internalServerError( - String.format("Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); + return CompletableFuture.completedFuture( + Results.internalServerError( + String.format( + "Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); } - - /** - * Logic responsible for delegating to protocol-specific callback logic. - */ + /** Logic responsible for delegating to protocol-specific callback logic. */ public class SsoCallbackLogic implements CallbackLogic<Result, PlayWebContext> { private final OidcCallbackLogic _oidcCallbackLogic; - SsoCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication, - final SystemEntityClient entityClient, final AuthServiceClient authClient, final CookieConfigs cookieConfigs) { - _oidcCallbackLogic = new OidcCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); + SsoCallbackLogic( + final SsoManager ssoManager, + final Authentication systemAuthentication, + final SystemEntityClient entityClient, + final AuthServiceClient authClient, + final CookieConfigs cookieConfigs) { + _oidcCallbackLogic = + new OidcCallbackLogic( + ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); } @Override - public Result perform(PlayWebContext context, Config config, - HttpActionAdapter<Result, PlayWebContext> httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter<Result, PlayWebContext> httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { if (SsoProvider.SsoProtocol.OIDC.equals(_ssoManager.getSsoProvider().protocol())) { - return _oidcCallbackLogic.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, defaultClient); + return _oidcCallbackLogic.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, + defaultClient); } // Should never occur. - throw new UnsupportedOperationException("Failed to find matching SSO Provider. Only one supported is OIDC."); + throw new UnsupportedOperationException( + "Failed to find matching SSO Provider. Only one supported is OIDC."); } } private boolean shouldHandleCallback(final String protocol) { - return _ssoManager.isSsoEnabled() && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); + return _ssoManager.isSsoEnabled() + && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); } } diff --git a/datahub-frontend/app/controllers/TrackingController.java b/datahub-frontend/app/controllers/TrackingController.java index 776ab5cad58ff..254a8cc640d0c 100644 --- a/datahub-frontend/app/controllers/TrackingController.java +++ b/datahub-frontend/app/controllers/TrackingController.java @@ -1,14 +1,15 @@ package controllers; +import static auth.AuthUtils.ACTOR; + import auth.Authenticator; import client.AuthServiceClient; +import client.KafkaTrackingProducer; import com.fasterxml.jackson.databind.JsonNode; import com.typesafe.config.Config; import javax.annotation.Nonnull; import javax.inject.Inject; import javax.inject.Singleton; - - import org.apache.kafka.clients.producer.ProducerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -16,57 +17,52 @@ import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import client.KafkaTrackingProducer; - -import static auth.AuthUtils.ACTOR; - // TODO: Migrate this to metadata-service. @Singleton public class TrackingController extends Controller { - private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); + private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); - private final String _topic; + private final String _topic; - @Inject - KafkaTrackingProducer _producer; + @Inject KafkaTrackingProducer _producer; - @Inject - AuthServiceClient _authClient; + @Inject AuthServiceClient _authClient; - @Inject - public TrackingController(@Nonnull Config config) { - _topic = config.getString("analytics.tracking.topic"); - } + @Inject + public TrackingController(@Nonnull Config config) { + _topic = config.getString("analytics.tracking.topic"); + } - @Security.Authenticated(Authenticator.class) - @Nonnull - public Result track(Http.Request request) throws Exception { - if (!_producer.isEnabled()) { - // If tracking is disabled, simply return a 200. - return status(200); - } + @Security.Authenticated(Authenticator.class) + @Nonnull + public Result track(Http.Request request) throws Exception { + if (!_producer.isEnabled()) { + // If tracking is disabled, simply return a 200. + return status(200); + } - JsonNode event; - try { - event = request.body().asJson(); - } catch (Exception e) { - return badRequest(); - } - final String actor = request.session().data().get(ACTOR); - try { - _logger.debug(String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); - final ProducerRecord<String, String> record = new ProducerRecord<>( - _topic, - actor, - event.toString()); - _producer.send(record); - _authClient.track(event.toString()); - return ok(); - } catch (Exception e) { - _logger.error(String.format("Failed to emit product analytics event. actor: %s, event: %s", actor, event)); - return internalServerError(e.getMessage()); - } + JsonNode event; + try { + event = request.body().asJson(); + } catch (Exception e) { + return badRequest(); + } + final String actor = request.session().data().get(ACTOR); + try { + _logger.debug( + String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); + final ProducerRecord<String, String> record = + new ProducerRecord<>(_topic, actor, event.toString()); + _producer.send(record); + _authClient.track(event.toString()); + return ok(); + } catch (Exception e) { + _logger.error( + String.format( + "Failed to emit product analytics event. actor: %s, event: %s", actor, event)); + return internalServerError(e.getMessage()); } + } } diff --git a/datahub-frontend/app/security/AuthUtil.java b/datahub-frontend/app/security/AuthUtil.java index 8af90b37a6f31..55752644ada70 100644 --- a/datahub-frontend/app/security/AuthUtil.java +++ b/datahub-frontend/app/security/AuthUtil.java @@ -8,52 +8,53 @@ import javax.crypto.spec.SecretKeySpec; import org.apache.commons.codec.digest.HmacAlgorithms; - -/** - * Auth Utils - * Adheres to HSEC requirement for creating application tokens - */ +/** Auth Utils Adheres to HSEC requirement for creating application tokens */ public final class AuthUtil { private static final String HMAC_SHA256_ALGORITHM = HmacAlgorithms.HMAC_SHA_256.toString(); private static final String DELIIMITER = ":"; private static final String HEX_CHARS = "0123456789ABCDEF"; - private AuthUtil() { } + private AuthUtil() {} /** * Generate hash string using the secret HMAC Key + * * @param value value to be hashed * @param hmacKey secret HMAC key * @return Hashed string using the secret key * @throws NoSuchAlgorithmException * @throws InvalidKeyException */ - public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlgorithmException, InvalidKeyException { - //Time-stamp at Encryption time + public static String generateHash(String value, byte[] hmacKey) + throws NoSuchAlgorithmException, InvalidKeyException { + // Time-stamp at Encryption time long tStamp = System.currentTimeMillis(); String uTValue = new String(); String cValue; String finalEncValue; - //Concatenated Values + // Concatenated Values uTValue = uTValue.concat(value).concat(":").concat(Long.toString(tStamp)); cValue = uTValue; - //Digest - HMAC-SHA256 + // Digest - HMAC-SHA256 SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); byte[] rawHmac = mac.doFinal(uTValue.getBytes()); String hmacString = getHex(rawHmac); - finalEncValue = Base64.getEncoder().encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); + finalEncValue = + Base64.getEncoder() + .encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); return finalEncValue; } /** * Validate the one-way hash string + * * @param hashedValue Hashed value to be validated * @param hmacKey HMAC Key used to create the hash * @param sessionWindow previously defined session window to validate if the hash is expired @@ -62,7 +63,7 @@ public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlg */ public static String verifyHash(String hashedValue, byte[] hmacKey, long sessionWindow) throws GeneralSecurityException { - //Username:Timestamp:SignedHMAC(Username:Timestamp) + // Username:Timestamp:SignedHMAC(Username:Timestamp) String[] decryptedHash = decryptBase64Hash(hashedValue); String username = decryptedHash[0]; String timestamp = decryptedHash[1]; @@ -70,7 +71,7 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session long newTStamp = System.currentTimeMillis(); String newUTValue = username.concat(DELIIMITER).concat(timestamp); - //Digest - HMAC-SHA1 Verify + // Digest - HMAC-SHA1 Verify SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); @@ -87,8 +88,10 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session return decryptedHash[0]; } + /** * Decrypt base64 hash + * * @param value base 64 hash string * @return Decrypted base 64 string */ @@ -96,8 +99,10 @@ private static String[] decryptBase64Hash(String value) { String decodedBase64 = new String(Base64.getDecoder().decode(value)); return decodedBase64.split(DELIIMITER); } + /** * Get Hex string from byte array + * * @param raw byte array * @return Hex representation of the byte array */ @@ -114,14 +119,16 @@ private static String getHex(byte[] raw) { return hex.toString(); } + /** * Compares two HMAC byte arrays + * * @param a HMAC byte array 1 * @param b HMAC byte array 2 * @return true if the two HMAC are identical */ private static boolean isEqual(byte[] a, byte[] b) { - if (a == null || b == null || a.length != b.length) { + if (a == null || b == null || a.length != b.length) { return false; } @@ -133,4 +140,4 @@ private static boolean isEqual(byte[] a, byte[] b) { return result == 0; } -} \ No newline at end of file +} diff --git a/datahub-frontend/app/security/AuthenticationManager.java b/datahub-frontend/app/security/AuthenticationManager.java index 67bcf7e404335..f46dc57c232bd 100644 --- a/datahub-frontend/app/security/AuthenticationManager.java +++ b/datahub-frontend/app/security/AuthenticationManager.java @@ -15,13 +15,12 @@ import org.eclipse.jetty.jaas.PropertyUserStoreManager; import play.Logger; - public class AuthenticationManager { - private AuthenticationManager(boolean verbose) { - } + private AuthenticationManager(boolean verbose) {} - public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) throws Exception { + public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) + throws Exception { Preconditions.checkArgument(!StringUtils.isAnyEmpty(userName), "Username cannot be empty"); JAASLoginService jaasLoginService = new JAASLoginService("WHZ-Authentication"); PropertyUserStoreManager propertyUserStoreManager = new PropertyUserStoreManager(); @@ -29,10 +28,12 @@ public static void authenticateJaasUser(@Nonnull String userName, @Nonnull Strin jaasLoginService.setBeans(Collections.singletonList(propertyUserStoreManager)); JAASLoginService.INSTANCE.set(jaasLoginService); try { - LoginContext lc = new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); + LoginContext lc = + new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); lc.login(); } catch (LoginException le) { - AuthenticationException authenticationException = new AuthenticationException(le.getMessage()); + AuthenticationException authenticationException = + new AuthenticationException(le.getMessage()); authenticationException.setRootCause(le); throw authenticationException; } @@ -52,7 +53,8 @@ public void handle(@Nonnull Callback[] callbacks) { NameCallback nc = null; PasswordCallback pc = null; for (Callback callback : callbacks) { - Logger.debug("The submitted callback is of type: " + callback.getClass() + " : " + callback); + Logger.debug( + "The submitted callback is of type: " + callback.getClass() + " : " + callback); if (callback instanceof NameCallback) { nc = (NameCallback) callback; nc.setName(this.username); diff --git a/datahub-frontend/app/security/DummyLoginModule.java b/datahub-frontend/app/security/DummyLoginModule.java index 56822f0805be4..c46fa29e1599a 100644 --- a/datahub-frontend/app/security/DummyLoginModule.java +++ b/datahub-frontend/app/security/DummyLoginModule.java @@ -1,21 +1,22 @@ package security; +import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.LoginException; import javax.security.auth.spi.LoginModule; -import java.util.Map; - /** - * This LoginModule performs dummy authentication. - * Any username and password can work for authentication + * This LoginModule performs dummy authentication. Any username and password can work for + * authentication */ public class DummyLoginModule implements LoginModule { - public void initialize(final Subject subject, final CallbackHandler callbackHandler, - final Map<String, ?> sharedState, final Map<String, ?> options) { - } + public void initialize( + final Subject subject, + final CallbackHandler callbackHandler, + final Map<String, ?> sharedState, + final Map<String, ?> options) {} public boolean login() throws LoginException { return true; @@ -32,5 +33,4 @@ public boolean abort() throws LoginException { public boolean logout() throws LoginException { return true; } - -} \ No newline at end of file +} diff --git a/datahub-frontend/app/utils/ConfigUtil.java b/datahub-frontend/app/utils/ConfigUtil.java index b99a5e123b9eb..5c80389c96da4 100644 --- a/datahub-frontend/app/utils/ConfigUtil.java +++ b/datahub-frontend/app/utils/ConfigUtil.java @@ -3,18 +3,16 @@ import com.linkedin.util.Configuration; import com.typesafe.config.Config; - public class ConfigUtil { - private ConfigUtil() { - - } + private ConfigUtil() {} // New configurations, provided via application.conf file. public static final String METADATA_SERVICE_HOST_CONFIG_PATH = "metadataService.host"; public static final String METADATA_SERVICE_PORT_CONFIG_PATH = "metadataService.port"; public static final String METADATA_SERVICE_USE_SSL_CONFIG_PATH = "metadataService.useSsl"; - public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = "metadataService.sslProtocol"; + public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = + "metadataService.sslProtocol"; // Legacy env-var based config values, for backwards compatibility: public static final String GMS_HOST_ENV_VAR = "DATAHUB_GMS_HOST"; @@ -27,10 +25,14 @@ private ConfigUtil() { public static final String DEFAULT_GMS_PORT = "8080"; public static final String DEFAULT_GMS_USE_SSL = "False"; - public static final String DEFAULT_METADATA_SERVICE_HOST = Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); - public static final Integer DEFAULT_METADATA_SERVICE_PORT = Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); - public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); - public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); + public static final String DEFAULT_METADATA_SERVICE_HOST = + Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); + public static final Integer DEFAULT_METADATA_SERVICE_PORT = + Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); + public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = + Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); + public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = + Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); public static boolean getBoolean(Config config, String key) { return config.hasPath(key) && config.getBoolean(key); diff --git a/datahub-frontend/app/utils/SearchUtil.java b/datahub-frontend/app/utils/SearchUtil.java index 2c52ff5b40156..803c70a63646a 100644 --- a/datahub-frontend/app/utils/SearchUtil.java +++ b/datahub-frontend/app/utils/SearchUtil.java @@ -2,29 +2,26 @@ import javax.annotation.Nonnull; - -/** - * Utility functions for Search - */ +/** Utility functions for Search */ public class SearchUtil { - private SearchUtil() { - //utility class - } + private SearchUtil() { + // utility class + } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - * - * @param input - * @return - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + * + * @param input + * @return + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } + return input; + } } diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 9a5fb3210a311..a1b97701dbf88 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -55,8 +55,6 @@ tasks.withType(Checkstyle) { exclude "**/generated/**" } -checkstyleMain.source = "app/" - /* PLAY UPGRADE NOTE diff --git a/datahub-frontend/test/app/ApplicationTest.java b/datahub-frontend/test/app/ApplicationTest.java index f27fefdb79669..a5da0951d1632 100644 --- a/datahub-frontend/test/app/ApplicationTest.java +++ b/datahub-frontend/test/app/ApplicationTest.java @@ -1,11 +1,22 @@ package app; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static play.mvc.Http.Status.NOT_FOUND; +import static play.mvc.Http.Status.OK; +import static play.test.Helpers.fakeRequest; +import static play.test.Helpers.route; + import com.nimbusds.jwt.JWT; import com.nimbusds.jwt.JWTClaimsSet; import com.nimbusds.jwt.JWTParser; import controllers.routes; +import java.io.IOException; +import java.net.InetAddress; import java.text.ParseException; import java.util.Date; +import java.util.List; +import java.util.Map; import no.nav.security.mock.oauth2.MockOAuth2Server; import no.nav.security.mock.oauth2.token.DefaultOAuth2TokenCallback; import okhttp3.mockwebserver.MockResponse; @@ -26,22 +37,9 @@ import play.mvc.Http; import play.mvc.Result; import play.test.Helpers; - import play.test.TestBrowser; import play.test.WithBrowser; -import java.io.IOException; -import java.net.InetAddress; -import java.util.List; -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static play.mvc.Http.Status.NOT_FOUND; -import static play.mvc.Http.Status.OK; -import static play.test.Helpers.fakeRequest; -import static play.test.Helpers.route; - @TestInstance(TestInstance.Lifecycle.PER_CLASS) @SetEnvironmentVariable(key = "DATAHUB_SECRET", value = "test") @SetEnvironmentVariable(key = "KAFKA_BOOTSTRAP_SERVER", value = "") @@ -56,11 +54,15 @@ public class ApplicationTest extends WithBrowser { @Override protected Application provideApplication() { return new GuiceApplicationBuilder() - .configure("metadataService.port", String.valueOf(gmsServerPort())) - .configure("auth.baseUrl", "http://localhost:" + providePort()) - .configure("auth.oidc.discoveryUri", "http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration") - .in(new Environment(Mode.TEST)).build(); + .configure("metadataService.port", String.valueOf(gmsServerPort())) + .configure("auth.baseUrl", "http://localhost:" + providePort()) + .configure( + "auth.oidc.discoveryUri", + "http://localhost:" + + oauthServerPort() + + "/testIssuer/.well-known/openid-configuration") + .in(new Environment(Mode.TEST)) + .build(); } @Override @@ -90,16 +92,20 @@ public int gmsServerPort() { public void init() throws IOException { _gmsServer = new MockWebServer(); _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"value\":\"%s\"}", TEST_USER))); - _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); + _gmsServer.enqueue( + new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); _gmsServer.start(gmsServerPort()); _oauthServer = new MockOAuth2Server(); _oauthServer.enqueueCallback( - new DefaultOAuth2TokenCallback(ISSUER_ID, "testUser", List.of(), Map.of( - "email", "testUser@myCompany.com", - "groups", "myGroup" - ), 600) - ); + new DefaultOAuth2TokenCallback( + ISSUER_ID, + "testUser", + List.of(), + Map.of( + "email", "testUser@myCompany.com", + "groups", "myGroup"), + 600)); _oauthServer.start(InetAddress.getByName("localhost"), oauthServerPort()); // Discovery url to authorization server metadata @@ -147,8 +153,9 @@ public void testIndexNotFound() { @Test public void testOpenIdConfig() { - assertEquals("http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration", _wellKnownUrl); + assertEquals( + "http://localhost:" + oauthServerPort() + "/testIssuer/.well-known/openid-configuration", + _wellKnownUrl); } @Test @@ -166,8 +173,13 @@ public void testHappyPathOidc() throws ParseException { Map<String, String> data = (Map<String, String>) claims.getClaim("data"); assertEquals(TEST_TOKEN, data.get("token")); assertEquals(TEST_USER, data.get("actor")); - // Default expiration is 24h, so should always be less than current time + 1 day since it stamps the time before this executes - assertTrue(claims.getExpirationTime().compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) < 0); + // Default expiration is 24h, so should always be less than current time + 1 day since it stamps + // the time before this executes + assertTrue( + claims + .getExpirationTime() + .compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) + < 0); } @Test diff --git a/datahub-frontend/test/security/DummyLoginModuleTest.java b/datahub-frontend/test/security/DummyLoginModuleTest.java index 6727513d884af..9bf2b5dd4d11c 100644 --- a/datahub-frontend/test/security/DummyLoginModuleTest.java +++ b/datahub-frontend/test/security/DummyLoginModuleTest.java @@ -1,14 +1,12 @@ package security; -import com.sun.security.auth.callback.TextCallbackHandler; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import com.sun.security.auth.callback.TextCallbackHandler; import java.util.HashMap; import javax.security.auth.Subject; import javax.security.auth.login.LoginException; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class DummyLoginModuleTest { diff --git a/datahub-frontend/test/security/OidcConfigurationTest.java b/datahub-frontend/test/security/OidcConfigurationTest.java index ed16014b58e59..a27a1462a8a27 100644 --- a/datahub-frontend/test/security/OidcConfigurationTest.java +++ b/datahub-frontend/test/security/OidcConfigurationTest.java @@ -1,5 +1,8 @@ package security; +import static auth.sso.oidc.OidcConfigs.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import auth.sso.oidc.OidcConfigs; import auth.sso.oidc.OidcProvider; import com.typesafe.config.Config; @@ -19,296 +22,290 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.junit.jupiter.api.Test; import org.pac4j.oidc.client.OidcClient; -import static auth.sso.oidc.OidcConfigs.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - - public class OidcConfigurationTest { - private static final com.typesafe.config.Config CONFIG = new Config() { - - private final Map<String, Object> _map = new HashMap<>(); - - @Override - public ConfigObject root() { - return null; - } - - @Override - public ConfigOrigin origin() { - return null; - } - - @Override - public Config withFallback(ConfigMergeable other) { - return null; - } - - @Override - public Config resolve() { - return null; - } - - @Override - public Config resolve(ConfigResolveOptions options) { - return null; - } - - @Override - public boolean isResolved() { - return false; - } - - @Override - public Config resolveWith(Config source) { - return null; - } - - @Override - public Config resolveWith(Config source, ConfigResolveOptions options) { - return null; - } - - @Override - public void checkValid(Config reference, String... restrictToPaths) { - - } - - @Override - public boolean hasPath(String path) { - return true; - } - - @Override - public boolean hasPathOrNull(String path) { - return false; - } - - @Override - public boolean isEmpty() { - return false; - } - - @Override - public Set<Map.Entry<String, ConfigValue>> entrySet() { - return null; - } - - @Override - public boolean getIsNull(String path) { - return false; - } - - @Override - public boolean getBoolean(String path) { - return false; - } - - @Override - public Number getNumber(String path) { - return null; - } - - @Override - public int getInt(String path) { - return 0; - } - - @Override - public long getLong(String path) { - return 0; - } - - @Override - public double getDouble(String path) { - return 0; - } - - @Override - public String getString(String path) { - return (String) _map.getOrDefault(path, "1"); - } - - @Override - public <T extends Enum<T>> T getEnum(Class<T> enumClass, String path) { - return null; - } - - @Override - public ConfigObject getObject(String path) { - return null; - } - - @Override - public Config getConfig(String path) { - return null; - } - - @Override - public Object getAnyRef(String path) { - return null; - } - - @Override - public ConfigValue getValue(String path) { - return null; - } - - @Override - public Long getBytes(String path) { - return null; - } - - @Override - public ConfigMemorySize getMemorySize(String path) { - return null; - } - - @Override - public Long getMilliseconds(String path) { - return null; - } - - @Override - public Long getNanoseconds(String path) { - return null; - } - - @Override - public long getDuration(String path, TimeUnit unit) { - return 0; - } - - @Override - public Duration getDuration(String path) { - return null; - } - - @Override - public Period getPeriod(String path) { - return null; - } - - @Override - public TemporalAmount getTemporal(String path) { - return null; - } - - @Override - public ConfigList getList(String path) { - return null; - } - - @Override - public List<Boolean> getBooleanList(String path) { - return null; - } - - @Override - public List<Number> getNumberList(String path) { - return null; - } - - @Override - public List<Integer> getIntList(String path) { - return null; - } - - @Override - public List<Long> getLongList(String path) { - return null; - } - - @Override - public List<Double> getDoubleList(String path) { - return null; - } - - @Override - public List<String> getStringList(String path) { - return null; - } - - @Override - public <T extends Enum<T>> List<T> getEnumList(Class<T> enumClass, String path) { - return null; - } - - @Override - public List<? extends ConfigObject> getObjectList(String path) { - return null; - } - - @Override - public List<? extends Config> getConfigList(String path) { - return null; - } - - @Override - public List<? extends Object> getAnyRefList(String path) { - return null; - } - - @Override - public List<Long> getBytesList(String path) { - return null; - } - - @Override - public List<ConfigMemorySize> getMemorySizeList(String path) { - return null; - } - - @Override - public List<Long> getMillisecondsList(String path) { - return null; - } - - @Override - public List<Long> getNanosecondsList(String path) { - return null; - } - - @Override - public List<Long> getDurationList(String path, TimeUnit unit) { - return null; - } - - @Override - public List<Duration> getDurationList(String path) { - return null; - } - - @Override - public Config withOnlyPath(String path) { - return null; - } - - @Override - public Config withoutPath(String path) { - return null; - } - - @Override - public Config atPath(String path) { - return null; - } - - @Override - public Config atKey(String key) { - return null; - } - - @Override - public Config withValue(String path, ConfigValue value) { - _map.put(path, value.unwrapped()); - return this; - } - }; + private static final com.typesafe.config.Config CONFIG = + new Config() { + + private final Map<String, Object> _map = new HashMap<>(); + + @Override + public ConfigObject root() { + return null; + } + + @Override + public ConfigOrigin origin() { + return null; + } + + @Override + public Config withFallback(ConfigMergeable other) { + return null; + } + + @Override + public Config resolve() { + return null; + } + + @Override + public Config resolve(ConfigResolveOptions options) { + return null; + } + + @Override + public boolean isResolved() { + return false; + } + + @Override + public Config resolveWith(Config source) { + return null; + } + + @Override + public Config resolveWith(Config source, ConfigResolveOptions options) { + return null; + } + + @Override + public void checkValid(Config reference, String... restrictToPaths) {} + + @Override + public boolean hasPath(String path) { + return true; + } + + @Override + public boolean hasPathOrNull(String path) { + return false; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public Set<Map.Entry<String, ConfigValue>> entrySet() { + return null; + } + + @Override + public boolean getIsNull(String path) { + return false; + } + + @Override + public boolean getBoolean(String path) { + return false; + } + + @Override + public Number getNumber(String path) { + return null; + } + + @Override + public int getInt(String path) { + return 0; + } + + @Override + public long getLong(String path) { + return 0; + } + + @Override + public double getDouble(String path) { + return 0; + } + + @Override + public String getString(String path) { + return (String) _map.getOrDefault(path, "1"); + } + + @Override + public <T extends Enum<T>> T getEnum(Class<T> enumClass, String path) { + return null; + } + + @Override + public ConfigObject getObject(String path) { + return null; + } + + @Override + public Config getConfig(String path) { + return null; + } + + @Override + public Object getAnyRef(String path) { + return null; + } + + @Override + public ConfigValue getValue(String path) { + return null; + } + + @Override + public Long getBytes(String path) { + return null; + } + + @Override + public ConfigMemorySize getMemorySize(String path) { + return null; + } + + @Override + public Long getMilliseconds(String path) { + return null; + } + + @Override + public Long getNanoseconds(String path) { + return null; + } + + @Override + public long getDuration(String path, TimeUnit unit) { + return 0; + } + + @Override + public Duration getDuration(String path) { + return null; + } + + @Override + public Period getPeriod(String path) { + return null; + } + + @Override + public TemporalAmount getTemporal(String path) { + return null; + } + + @Override + public ConfigList getList(String path) { + return null; + } + + @Override + public List<Boolean> getBooleanList(String path) { + return null; + } + + @Override + public List<Number> getNumberList(String path) { + return null; + } + + @Override + public List<Integer> getIntList(String path) { + return null; + } + + @Override + public List<Long> getLongList(String path) { + return null; + } + + @Override + public List<Double> getDoubleList(String path) { + return null; + } + + @Override + public List<String> getStringList(String path) { + return null; + } + + @Override + public <T extends Enum<T>> List<T> getEnumList(Class<T> enumClass, String path) { + return null; + } + + @Override + public List<? extends ConfigObject> getObjectList(String path) { + return null; + } + + @Override + public List<? extends Config> getConfigList(String path) { + return null; + } + + @Override + public List<? extends Object> getAnyRefList(String path) { + return null; + } + + @Override + public List<Long> getBytesList(String path) { + return null; + } + + @Override + public List<ConfigMemorySize> getMemorySizeList(String path) { + return null; + } + + @Override + public List<Long> getMillisecondsList(String path) { + return null; + } + + @Override + public List<Long> getNanosecondsList(String path) { + return null; + } + + @Override + public List<Long> getDurationList(String path, TimeUnit unit) { + return null; + } + + @Override + public List<Duration> getDurationList(String path) { + return null; + } + + @Override + public Config withOnlyPath(String path) { + return null; + } + + @Override + public Config withoutPath(String path) { + return null; + } + + @Override + public Config atPath(String path) { + return null; + } + + @Override + public Config atKey(String key) { + return null; + } + + @Override + public Config withValue(String path, ConfigValue value) { + _map.put(path, value.unwrapped()); + return this; + } + }; @Test public void readTimeoutPropagation() { diff --git a/datahub-frontend/test/utils/SearchUtilTest.java b/datahub-frontend/test/utils/SearchUtilTest.java index 428566ae3f424..6767fa5637469 100644 --- a/datahub-frontend/test/utils/SearchUtilTest.java +++ b/datahub-frontend/test/utils/SearchUtilTest.java @@ -1,17 +1,18 @@ package utils; -import org.junit.jupiter.api.Test; - import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; + public class SearchUtilTest { - @Test - public void testEscapeForwardSlash() { - // escape "/" - assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); - // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to retain the regex behaviour with "*" - assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); - assertEquals("", ""); - assertEquals("foo", "foo"); - } + @Test + public void testEscapeForwardSlash() { + // escape "/" + assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); + // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to + // retain the regex behaviour with "*" + assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); + assertEquals("", ""); + assertEquals("foo", "foo"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index 4488f27c19d80..e45bed33eb023 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -1,29 +1,27 @@ package com.linkedin.datahub.graphql; -/** - * Constants relating to GraphQL type system & execution. - */ +/** Constants relating to GraphQL type system & execution. */ public class Constants { - private Constants() { }; + private Constants() {} + ; - public static final String URN_FIELD_NAME = "urn"; - public static final String URNS_FIELD_NAME = "urns"; - public static final String GMS_SCHEMA_FILE = "entity.graphql"; - public static final String SEARCH_SCHEMA_FILE = "search.graphql"; - public static final String APP_SCHEMA_FILE = "app.graphql"; - public static final String AUTH_SCHEMA_FILE = "auth.graphql"; - public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; - public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; - public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; - public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; - public static final String TESTS_SCHEMA_FILE = "tests.graphql"; - public static final String STEPS_SCHEMA_FILE = "step.graphql"; - public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; - public static final String BROWSE_PATH_DELIMITER = "/"; - public static final String BROWSE_PATH_V2_DELIMITER = "␟"; - public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; - - public static final String ENTITY_FILTER_NAME = "_entityType"; + public static final String URN_FIELD_NAME = "urn"; + public static final String URNS_FIELD_NAME = "urns"; + public static final String GMS_SCHEMA_FILE = "entity.graphql"; + public static final String SEARCH_SCHEMA_FILE = "search.graphql"; + public static final String APP_SCHEMA_FILE = "app.graphql"; + public static final String AUTH_SCHEMA_FILE = "auth.graphql"; + public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; + public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; + public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; + public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; + public static final String TESTS_SCHEMA_FILE = "tests.graphql"; + public static final String STEPS_SCHEMA_FILE = "step.graphql"; + public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; + public static final String BROWSE_PATH_DELIMITER = "/"; + public static final String BROWSE_PATH_V2_DELIMITER = "␟"; + public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; + public static final String ENTITY_FILTER_NAME = "_entityType"; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 9ea8126a07ab2..f0cb56b1a99ce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; +import static graphql.scalars.ExtendedScalars.*; + import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; @@ -68,7 +72,6 @@ import com.linkedin.datahub.graphql.generated.ListQueriesResult; import com.linkedin.datahub.graphql.generated.ListTestsResult; import com.linkedin.datahub.graphql.generated.ListViewsResult; -import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; import com.linkedin.datahub.graphql.generated.MLFeatureTable; @@ -78,6 +81,7 @@ import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; +import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.Owner; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; @@ -284,7 +288,6 @@ import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType; import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper; import com.linkedin.datahub.graphql.types.domain.DomainType; -import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; @@ -297,6 +300,7 @@ import com.linkedin.datahub.graphql.types.policy.DataHubPolicyType; import com.linkedin.datahub.graphql.types.query.QueryType; import com.linkedin.datahub.graphql.types.role.DataHubRoleType; +import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; @@ -352,205 +356,191 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; -import static graphql.scalars.ExtendedScalars.*; - - /** - * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS graph. + * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS + * graph. */ @Slf4j @Getter public class GmsGraphQLEngine { - private final EntityClient entityClient; - private final SystemEntityClient systemEntityClient; - private final GraphClient graphClient; - private final UsageClient usageClient; - private final SiblingGraphService siblingGraphService; - - private final EntityService entityService; - private final AnalyticsService analyticsService; - private final RecommendationsService recommendationsService; - private final EntityRegistry entityRegistry; - private final StatefulTokenService statefulTokenService; - private final SecretService secretService; - private final GitVersion gitVersion; - private final boolean supportsImpactAnalysis; - private final TimeseriesAspectService timeseriesAspectService; - private final TimelineService timelineService; - private final NativeUserService nativeUserService; - private final GroupService groupService; - private final RoleService roleService; - private final InviteTokenService inviteTokenService; - private final PostService postService; - private final SettingsService settingsService; - private final ViewService viewService; - private final OwnershipTypeService ownershipTypeService; - private final LineageService lineageService; - private final QueryService queryService; - private final DataProductService dataProductService; - - private final FeatureFlags featureFlags; - - private final IngestionConfiguration ingestionConfiguration; - private final AuthenticationConfiguration authenticationConfiguration; - private final AuthorizationConfiguration authorizationConfiguration; - private final VisualConfiguration visualConfiguration; - private final TelemetryConfiguration telemetryConfiguration; - private final TestsConfiguration testsConfiguration; - private final DataHubConfiguration datahubConfiguration; - private final ViewsConfiguration viewsConfiguration; - - private final DatasetType datasetType; - - private final RoleType roleType; - - private final CorpUserType corpUserType; - private final CorpGroupType corpGroupType; - private final ChartType chartType; - private final DashboardType dashboardType; - private final DataPlatformType dataPlatformType; - private final TagType tagType; - private final MLModelType mlModelType; - private final MLModelGroupType mlModelGroupType; - private final MLFeatureType mlFeatureType; - private final MLFeatureTableType mlFeatureTableType; - private final MLPrimaryKeyType mlPrimaryKeyType; - private final DataFlowType dataFlowType; - private final DataJobType dataJobType; - private final GlossaryTermType glossaryTermType; - private final GlossaryNodeType glossaryNodeType; - private final AspectType aspectType; - private final ContainerType containerType; - private final DomainType domainType; - private final NotebookType notebookType; - private final AssertionType assertionType; - private final VersionedDatasetType versionedDatasetType; - private final DataPlatformInstanceType dataPlatformInstanceType; - private final AccessTokenMetadataType accessTokenMetadataType; - private final TestType testType; - private final DataHubPolicyType dataHubPolicyType; - private final DataHubRoleType dataHubRoleType; - private final SchemaFieldType schemaFieldType; - private final DataHubViewType dataHubViewType; - private final QueryType queryType; - private final DataProductType dataProductType; - private final OwnershipType ownershipType; - - /** - * A list of GraphQL Plugins that extend the core engine - */ - private final List<GmsGraphQLPlugin> graphQLPlugins; - - /** - * Configures the graph objects that can be fetched primary key. - */ - public final List<EntityType<?, ?>> entityTypes; - - /** - * Configures all graph objects - */ - public final List<LoadableType<?, ?>> loadableTypes; - - /** - * Configures the graph objects for owner - */ - public final List<LoadableType<?, ?>> ownerTypes; - - /** - * Configures the graph objects that can be searched. - */ - public final List<SearchableEntityType<?, ?>> searchableTypes; - - /** - * Configures the graph objects that can be browsed. - */ - public final List<BrowsableEntityType<?, ?>> browsableTypes; - - public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { - - this.graphQLPlugins = List.of( + private final EntityClient entityClient; + private final SystemEntityClient systemEntityClient; + private final GraphClient graphClient; + private final UsageClient usageClient; + private final SiblingGraphService siblingGraphService; + + private final EntityService entityService; + private final AnalyticsService analyticsService; + private final RecommendationsService recommendationsService; + private final EntityRegistry entityRegistry; + private final StatefulTokenService statefulTokenService; + private final SecretService secretService; + private final GitVersion gitVersion; + private final boolean supportsImpactAnalysis; + private final TimeseriesAspectService timeseriesAspectService; + private final TimelineService timelineService; + private final NativeUserService nativeUserService; + private final GroupService groupService; + private final RoleService roleService; + private final InviteTokenService inviteTokenService; + private final PostService postService; + private final SettingsService settingsService; + private final ViewService viewService; + private final OwnershipTypeService ownershipTypeService; + private final LineageService lineageService; + private final QueryService queryService; + private final DataProductService dataProductService; + + private final FeatureFlags featureFlags; + + private final IngestionConfiguration ingestionConfiguration; + private final AuthenticationConfiguration authenticationConfiguration; + private final AuthorizationConfiguration authorizationConfiguration; + private final VisualConfiguration visualConfiguration; + private final TelemetryConfiguration telemetryConfiguration; + private final TestsConfiguration testsConfiguration; + private final DataHubConfiguration datahubConfiguration; + private final ViewsConfiguration viewsConfiguration; + + private final DatasetType datasetType; + + private final RoleType roleType; + + private final CorpUserType corpUserType; + private final CorpGroupType corpGroupType; + private final ChartType chartType; + private final DashboardType dashboardType; + private final DataPlatformType dataPlatformType; + private final TagType tagType; + private final MLModelType mlModelType; + private final MLModelGroupType mlModelGroupType; + private final MLFeatureType mlFeatureType; + private final MLFeatureTableType mlFeatureTableType; + private final MLPrimaryKeyType mlPrimaryKeyType; + private final DataFlowType dataFlowType; + private final DataJobType dataJobType; + private final GlossaryTermType glossaryTermType; + private final GlossaryNodeType glossaryNodeType; + private final AspectType aspectType; + private final ContainerType containerType; + private final DomainType domainType; + private final NotebookType notebookType; + private final AssertionType assertionType; + private final VersionedDatasetType versionedDatasetType; + private final DataPlatformInstanceType dataPlatformInstanceType; + private final AccessTokenMetadataType accessTokenMetadataType; + private final TestType testType; + private final DataHubPolicyType dataHubPolicyType; + private final DataHubRoleType dataHubRoleType; + private final SchemaFieldType schemaFieldType; + private final DataHubViewType dataHubViewType; + private final QueryType queryType; + private final DataProductType dataProductType; + private final OwnershipType ownershipType; + + /** A list of GraphQL Plugins that extend the core engine */ + private final List<GmsGraphQLPlugin> graphQLPlugins; + + /** Configures the graph objects that can be fetched primary key. */ + public final List<EntityType<?, ?>> entityTypes; + + /** Configures all graph objects */ + public final List<LoadableType<?, ?>> loadableTypes; + + /** Configures the graph objects for owner */ + public final List<LoadableType<?, ?>> ownerTypes; + + /** Configures the graph objects that can be searched. */ + public final List<SearchableEntityType<?, ?>> searchableTypes; + + /** Configures the graph objects that can be browsed. */ + public final List<BrowsableEntityType<?, ?>> browsableTypes; + + public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { + + this.graphQLPlugins = + List.of( // Add new plugins here - ); - - this.graphQLPlugins.forEach(plugin -> plugin.init(args)); - - this.entityClient = args.entityClient; - this.systemEntityClient = args.systemEntityClient; - this.graphClient = args.graphClient; - this.usageClient = args.usageClient; - this.siblingGraphService = args.siblingGraphService; - - this.analyticsService = args.analyticsService; - this.entityService = args.entityService; - this.recommendationsService = args.recommendationsService; - this.statefulTokenService = args.statefulTokenService; - this.secretService = args.secretService; - this.entityRegistry = args.entityRegistry; - this.gitVersion = args.gitVersion; - this.supportsImpactAnalysis = args.supportsImpactAnalysis; - this.timeseriesAspectService = args.timeseriesAspectService; - this.timelineService = args.timelineService; - this.nativeUserService = args.nativeUserService; - this.groupService = args.groupService; - this.roleService = args.roleService; - this.inviteTokenService = args.inviteTokenService; - this.postService = args.postService; - this.viewService = args.viewService; - this.ownershipTypeService = args.ownershipTypeService; - this.settingsService = args.settingsService; - this.lineageService = args.lineageService; - this.queryService = args.queryService; - this.dataProductService = args.dataProductService; - - this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); - this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); - this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); - this.visualConfiguration = args.visualConfiguration; - this.telemetryConfiguration = args.telemetryConfiguration; - this.testsConfiguration = args.testsConfiguration; - this.datahubConfiguration = args.datahubConfiguration; - this.viewsConfiguration = args.viewsConfiguration; - this.featureFlags = args.featureFlags; - - this.datasetType = new DatasetType(entityClient); - this.roleType = new RoleType(entityClient); - this.corpUserType = new CorpUserType(entityClient, featureFlags); - this.corpGroupType = new CorpGroupType(entityClient); - this.chartType = new ChartType(entityClient); - this.dashboardType = new DashboardType(entityClient); - this.dataPlatformType = new DataPlatformType(entityClient); - this.tagType = new TagType(entityClient); - this.mlModelType = new MLModelType(entityClient); - this.mlModelGroupType = new MLModelGroupType(entityClient); - this.mlFeatureType = new MLFeatureType(entityClient); - this.mlFeatureTableType = new MLFeatureTableType(entityClient); - this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); - this.dataFlowType = new DataFlowType(entityClient); - this.dataJobType = new DataJobType(entityClient); - this.glossaryTermType = new GlossaryTermType(entityClient); - this.glossaryNodeType = new GlossaryNodeType(entityClient); - this.aspectType = new AspectType(entityClient); - this.containerType = new ContainerType(entityClient); - this.domainType = new DomainType(entityClient); - this.notebookType = new NotebookType(entityClient); - this.assertionType = new AssertionType(entityClient); - this.versionedDatasetType = new VersionedDatasetType(entityClient); - this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); - this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); - this.testType = new TestType(entityClient); - this.dataHubPolicyType = new DataHubPolicyType(entityClient); - this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(); - this.dataHubViewType = new DataHubViewType(entityClient); - this.queryType = new QueryType(entityClient); - this.dataProductType = new DataProductType(entityClient); - this.ownershipType = new OwnershipType(entityClient); - - // Init Lists - this.entityTypes = ImmutableList.of( + ); + + this.graphQLPlugins.forEach(plugin -> plugin.init(args)); + + this.entityClient = args.entityClient; + this.systemEntityClient = args.systemEntityClient; + this.graphClient = args.graphClient; + this.usageClient = args.usageClient; + this.siblingGraphService = args.siblingGraphService; + + this.analyticsService = args.analyticsService; + this.entityService = args.entityService; + this.recommendationsService = args.recommendationsService; + this.statefulTokenService = args.statefulTokenService; + this.secretService = args.secretService; + this.entityRegistry = args.entityRegistry; + this.gitVersion = args.gitVersion; + this.supportsImpactAnalysis = args.supportsImpactAnalysis; + this.timeseriesAspectService = args.timeseriesAspectService; + this.timelineService = args.timelineService; + this.nativeUserService = args.nativeUserService; + this.groupService = args.groupService; + this.roleService = args.roleService; + this.inviteTokenService = args.inviteTokenService; + this.postService = args.postService; + this.viewService = args.viewService; + this.ownershipTypeService = args.ownershipTypeService; + this.settingsService = args.settingsService; + this.lineageService = args.lineageService; + this.queryService = args.queryService; + this.dataProductService = args.dataProductService; + + this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); + this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); + this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); + this.visualConfiguration = args.visualConfiguration; + this.telemetryConfiguration = args.telemetryConfiguration; + this.testsConfiguration = args.testsConfiguration; + this.datahubConfiguration = args.datahubConfiguration; + this.viewsConfiguration = args.viewsConfiguration; + this.featureFlags = args.featureFlags; + + this.datasetType = new DatasetType(entityClient); + this.roleType = new RoleType(entityClient); + this.corpUserType = new CorpUserType(entityClient, featureFlags); + this.corpGroupType = new CorpGroupType(entityClient); + this.chartType = new ChartType(entityClient); + this.dashboardType = new DashboardType(entityClient); + this.dataPlatformType = new DataPlatformType(entityClient); + this.tagType = new TagType(entityClient); + this.mlModelType = new MLModelType(entityClient); + this.mlModelGroupType = new MLModelGroupType(entityClient); + this.mlFeatureType = new MLFeatureType(entityClient); + this.mlFeatureTableType = new MLFeatureTableType(entityClient); + this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); + this.dataFlowType = new DataFlowType(entityClient); + this.dataJobType = new DataJobType(entityClient); + this.glossaryTermType = new GlossaryTermType(entityClient); + this.glossaryNodeType = new GlossaryNodeType(entityClient); + this.aspectType = new AspectType(entityClient); + this.containerType = new ContainerType(entityClient); + this.domainType = new DomainType(entityClient); + this.notebookType = new NotebookType(entityClient); + this.assertionType = new AssertionType(entityClient); + this.versionedDatasetType = new VersionedDatasetType(entityClient); + this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); + this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); + this.testType = new TestType(entityClient); + this.dataHubPolicyType = new DataHubPolicyType(entityClient); + this.dataHubRoleType = new DataHubRoleType(entityClient); + this.schemaFieldType = new SchemaFieldType(); + this.dataHubViewType = new DataHubViewType(entityClient); + this.queryType = new QueryType(entityClient); + this.dataProductType = new DataProductType(entityClient); + this.ownershipType = new OwnershipType(entityClient); + + // Init Lists + this.entityTypes = + ImmutableList.of( datasetType, roleType, corpUserType, @@ -582,1262 +572,1867 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { dataHubViewType, queryType, dataProductType, - ownershipType - ); - this.loadableTypes = new ArrayList<>(entityTypes); - // Extend loadable types with types from the plugins - // This allows us to offer search and browse capabilities out of the box for those types - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - Collection<? extends LoadableType<?, ?>> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - this.loadableTypes.addAll(pluginLoadableTypes); - } - } - this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); - this.searchableTypes = loadableTypes.stream() + ownershipType); + this.loadableTypes = new ArrayList<>(entityTypes); + // Extend loadable types with types from the plugins + // This allows us to offer search and browse capabilities out of the box for those types + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + Collection<? extends LoadableType<?, ?>> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + this.loadableTypes.addAll(pluginLoadableTypes); + } + } + this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); + this.searchableTypes = + loadableTypes.stream() .filter(type -> (type instanceof SearchableEntityType<?, ?>)) .map(type -> (SearchableEntityType<?, ?>) type) .collect(Collectors.toList()); - this.browsableTypes = loadableTypes.stream() + this.browsableTypes = + loadableTypes.stream() .filter(type -> (type instanceof BrowsableEntityType<?, ?>)) .map(type -> (BrowsableEntityType<?, ?>) type) .collect(Collectors.toList()); - } + } - /** - * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from - * a {@link LoadableType}. - */ - public Map<String, Function<QueryContext, DataLoader<?, ?>>> loaderSuppliers(final Collection<? extends LoadableType<?, ?>> loadableTypes) { - return loadableTypes - .stream() - .collect(Collectors.toMap( + /** + * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from a {@link + * LoadableType}. + */ + public Map<String, Function<QueryContext, DataLoader<?, ?>>> loaderSuppliers( + final Collection<? extends LoadableType<?, ?>> loadableTypes) { + return loadableTypes.stream() + .collect( + Collectors.toMap( LoadableType::name, - (graphType) -> (context) -> createDataLoader(graphType, context) - )); - } + (graphType) -> (context) -> createDataLoader(graphType, context))); + } - /** - * Final call to wire up any extra resolvers the plugin might want to add on - * @param builder - */ - private void configurePluginResolvers(final RuntimeWiring.Builder builder) { - this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); - } - - - public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { - configureQueryResolvers(builder); - configureMutationResolvers(builder); - configureGenericEntityResolvers(builder); - configureDatasetResolvers(builder); - configureCorpUserResolvers(builder); - configureCorpGroupResolvers(builder); - configureDashboardResolvers(builder); - configureNotebookResolvers(builder); - configureChartResolvers(builder); - configureTypeResolvers(builder); - configureTypeExtensions(builder); - configureTagAssociationResolver(builder); - configureGlossaryTermAssociationResolver(builder); - configureDataJobResolvers(builder); - configureDataFlowResolvers(builder); - configureMLFeatureTableResolvers(builder); - configureGlossaryRelationshipResolvers(builder); - configureIngestionSourceResolvers(builder); - configureAnalyticsResolvers(builder); - configureContainerResolvers(builder); - configureDataPlatformInstanceResolvers(builder); - configureGlossaryTermResolvers(builder); - configureOrganisationRoleResolvers(builder); - configureGlossaryNodeResolvers(builder); - configureDomainResolvers(builder); - configureDataProductResolvers(builder); - configureAssertionResolvers(builder); - configurePolicyResolvers(builder); - configureDataProcessInstanceResolvers(builder); - configureVersionedDatasetResolvers(builder); - configureAccessAccessTokenMetadataResolvers(builder); - configureTestResultResolvers(builder); - configureRoleResolvers(builder); - configureSchemaFieldResolvers(builder); - configureEntityPathResolvers(builder); - configureViewResolvers(builder); - configureQueryEntityResolvers(builder); - configureOwnershipTypeResolver(builder); - configurePluginResolvers(builder); - } - - private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { - builder.type("Role", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("RoleAssociation", typeWiring -> typeWiring - .dataFetcher("role", - new LoadableTypeResolver<>(roleType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleAssociation) - env.getSource()).getRole().getUrn())) - ); - builder.type("RoleUser", typeWiring -> typeWiring - .dataFetcher("user", - new LoadableTypeResolver<>(corpUserType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleUser) - env.getSource()).getUser().getUrn())) - ); + /** + * Final call to wire up any extra resolvers the plugin might want to add on + * + * @param builder + */ + private void configurePluginResolvers(final RuntimeWiring.Builder builder) { + this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); + } + + public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { + configureQueryResolvers(builder); + configureMutationResolvers(builder); + configureGenericEntityResolvers(builder); + configureDatasetResolvers(builder); + configureCorpUserResolvers(builder); + configureCorpGroupResolvers(builder); + configureDashboardResolvers(builder); + configureNotebookResolvers(builder); + configureChartResolvers(builder); + configureTypeResolvers(builder); + configureTypeExtensions(builder); + configureTagAssociationResolver(builder); + configureGlossaryTermAssociationResolver(builder); + configureDataJobResolvers(builder); + configureDataFlowResolvers(builder); + configureMLFeatureTableResolvers(builder); + configureGlossaryRelationshipResolvers(builder); + configureIngestionSourceResolvers(builder); + configureAnalyticsResolvers(builder); + configureContainerResolvers(builder); + configureDataPlatformInstanceResolvers(builder); + configureGlossaryTermResolvers(builder); + configureOrganisationRoleResolvers(builder); + configureGlossaryNodeResolvers(builder); + configureDomainResolvers(builder); + configureDataProductResolvers(builder); + configureAssertionResolvers(builder); + configurePolicyResolvers(builder); + configureDataProcessInstanceResolvers(builder); + configureVersionedDatasetResolvers(builder); + configureAccessAccessTokenMetadataResolvers(builder); + configureTestResultResolvers(builder); + configureRoleResolvers(builder); + configureSchemaFieldResolvers(builder); + configureEntityPathResolvers(builder); + configureViewResolvers(builder); + configureQueryEntityResolvers(builder); + configureOwnershipTypeResolver(builder); + configurePluginResolvers(builder); + } + + private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { + builder.type( + "Role", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "RoleAssociation", + typeWiring -> + typeWiring.dataFetcher( + "role", + new LoadableTypeResolver<>( + roleType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleAssociation) env.getSource()) + .getRole() + .getUrn()))); + builder.type( + "RoleUser", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleUser) env.getSource()) + .getUser() + .getUrn()))); + } + + public GraphQLEngine.Builder builder() { + final GraphQLEngine.Builder builder = GraphQLEngine.builder(); + builder + .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) + .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); + + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + List<String> pluginSchemaFiles = plugin.getSchemaFiles(); + if (pluginSchemaFiles != null) { + pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); + } + Collection<? extends LoadableType<?, ?>> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + pluginLoadableTypes.forEach( + loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); + } } - - public GraphQLEngine.Builder builder() { - final GraphQLEngine.Builder builder = GraphQLEngine.builder(); - builder - .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) - .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); - - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - List<String> pluginSchemaFiles = plugin.getSchemaFiles(); - if (pluginSchemaFiles != null) { - pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); - } - Collection<? extends LoadableType<?, ?>> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - pluginLoadableTypes.forEach(loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); - } - } - builder - .addDataLoaders(loaderSuppliers(loadableTypes)) - .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) - .configureRuntimeWiring(this::configureRuntimeWiring); - return builder; + builder + .addDataLoaders(loaderSuppliers(loadableTypes)) + .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) + .configureRuntimeWiring(this::configureRuntimeWiring); + return builder; + } + + public static String fileBasedSchema(String fileName) { + String schema; + try { + InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); + schema = IOUtils.toString(is, StandardCharsets.UTF_8); + is.close(); + } catch (IOException e) { + throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); } - - public static String fileBasedSchema(String fileName) { - String schema; - try { - InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); - schema = IOUtils.toString(is, StandardCharsets.UTF_8); - is.close(); - } catch (IOException e) { - throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); - } - return schema; + return schema; + } + + private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { + final boolean isAnalyticsEnabled = analyticsService != null; + builder + .type( + "Query", + typeWiring -> + typeWiring.dataFetcher( + "isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) + .type( + "AnalyticsChart", + typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); + if (isAnalyticsEnabled) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "getAnalyticsCharts", new GetChartsResolver(analyticsService, entityClient)) + .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) + .dataFetcher( + "getMetadataAnalyticsCharts", + new GetMetadataAnalyticsResolver(entityClient))); } + } - private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { - final boolean isAnalyticsEnabled = analyticsService != null; - builder.type("Query", typeWiring -> typeWiring.dataFetcher("isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) - .type("AnalyticsChart", typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); - if (isAnalyticsEnabled) { - builder.type("Query", typeWiring -> typeWiring.dataFetcher("getAnalyticsCharts", - new GetChartsResolver(analyticsService, entityClient)) - .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) - .dataFetcher("getMetadataAnalyticsCharts", new GetMetadataAnalyticsResolver(entityClient))); - } - } - - private void configureContainerResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Container", typeWiring -> typeWiring + private void configureContainerResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Container", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("entities", new ContainerEntitiesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, (env) -> ((Container) env.getSource()).getPlatform().getUrn())) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Container container = env.getSource(); - return container.getContainer() != null ? container.getContainer().getUrn() : null; - }) - ) + final Container container = env.getSource(); + return container.getContainer() != null + ? container.getContainer().getUrn() + : null; + })) .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Container container = env.getSource(); - return container.getDataPlatformInstance() != null ? container.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataPlatformInstance", typeWiring -> typeWiring - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn())) - ); - } - - private void configureQueryResolvers(final RuntimeWiring.Builder builder) { - builder.type("Query", typeWiring -> typeWiring - .dataFetcher("appConfig", - new AppConfigResolver(gitVersion, analyticsService != null, - this.ingestionConfiguration, - this.authenticationConfiguration, - this.authorizationConfiguration, - this.supportsImpactAnalysis, - this.visualConfiguration, - this.telemetryConfiguration, - this.testsConfiguration, - this.datahubConfiguration, - this.viewsConfiguration, - this.featureFlags - )) - .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) - .dataFetcher("search", new SearchResolver(this.entityClient)) - .dataFetcher("searchAcrossEntities", new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("scrollAcrossEntities", new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) - .dataFetcher("scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) - .dataFetcher("aggregateAcrossEntities", new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) - .dataFetcher("autoCompleteForMultiple", new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) - .dataFetcher("browse", new BrowseResolver(browsableTypes)) - .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) - .dataFetcher("dataset", getResolver(datasetType)) - .dataFetcher("role", getResolver(roleType)) - .dataFetcher("versionedDataset", getResolver(versionedDatasetType, - (env) -> new VersionedUrn().setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) - .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) - .dataFetcher("notebook", getResolver(notebookType)) - .dataFetcher("corpUser", getResolver(corpUserType)) - .dataFetcher("corpGroup", getResolver(corpGroupType)) - .dataFetcher("dashboard", getResolver(dashboardType)) - .dataFetcher("chart", getResolver(chartType)) - .dataFetcher("tag", getResolver(tagType)) - .dataFetcher("dataFlow", getResolver(dataFlowType)) - .dataFetcher("dataJob", getResolver(dataJobType)) - .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) - .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) - .dataFetcher("domain", getResolver((domainType))) - .dataFetcher("dataPlatform", getResolver(dataPlatformType)) - .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) - .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) - .dataFetcher("mlFeature", getResolver(mlFeatureType)) - .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) - .dataFetcher("mlModel", getResolver(mlModelType)) - .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) - .dataFetcher("assertion", getResolver(assertionType)) - .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) - .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) - .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) - .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) - .dataFetcher("listRecommendations", new ListRecommendationsResolver(recommendationsService)) - .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) - .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) - .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) - .dataFetcher("container", getResolver(containerType)) - .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) - .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) - .dataFetcher("getSecretValues", new GetSecretValuesResolver(this.entityClient, this.secretService)) - .dataFetcher("listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) - .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) - .dataFetcher("executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) - .dataFetcher("getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) - .dataFetcher("test", getResolver(testType)) - .dataFetcher("listTests", new ListTestsResolver(entityClient)) - .dataFetcher("getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) - .dataFetcher("getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) - .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) - .dataFetcher("entity", getEntityResolver()) - .dataFetcher("entities", getEntitiesResolver()) - .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) - .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) - .dataFetcher("batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) - .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) - .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) - .dataFetcher("globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) - .dataFetcher("getQuickFilters", new GetQuickFiltersResolver(this.entityClient, this.viewService)) - .dataFetcher("dataProduct", getResolver(dataProductType)) - .dataFetcher("listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) - .dataFetcher("browseV2", new BrowseV2Resolver(this.entityClient, this.viewService)) - ); - } - - private DataFetcher getEntitiesResolver() { - return new BatchGetEntitiesResolver(entityTypes, - (env) -> { - List<String> urns = env.getArgument(URNS_FIELD_NAME); - return urns.stream().map((urn) -> { + final Container container = env.getSource(); + return container.getDataPlatformInstance() != null + ? container.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataPlatformInstance", + typeWiring -> + typeWiring.dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn()))); + } + + private void configureQueryResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "appConfig", + new AppConfigResolver( + gitVersion, + analyticsService != null, + this.ingestionConfiguration, + this.authenticationConfiguration, + this.authorizationConfiguration, + this.supportsImpactAnalysis, + this.visualConfiguration, + this.telemetryConfiguration, + this.testsConfiguration, + this.datahubConfiguration, + this.viewsConfiguration, + this.featureFlags)) + .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) + .dataFetcher("search", new SearchResolver(this.entityClient)) + .dataFetcher( + "searchAcrossEntities", + new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "scrollAcrossEntities", + new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "aggregateAcrossEntities", + new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) + .dataFetcher( + "autoCompleteForMultiple", + new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) + .dataFetcher("browse", new BrowseResolver(browsableTypes)) + .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) + .dataFetcher("dataset", getResolver(datasetType)) + .dataFetcher("role", getResolver(roleType)) + .dataFetcher( + "versionedDataset", + getResolver( + versionedDatasetType, + (env) -> + new VersionedUrn() + .setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) + .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) + .dataFetcher("notebook", getResolver(notebookType)) + .dataFetcher("corpUser", getResolver(corpUserType)) + .dataFetcher("corpGroup", getResolver(corpGroupType)) + .dataFetcher("dashboard", getResolver(dashboardType)) + .dataFetcher("chart", getResolver(chartType)) + .dataFetcher("tag", getResolver(tagType)) + .dataFetcher("dataFlow", getResolver(dataFlowType)) + .dataFetcher("dataJob", getResolver(dataJobType)) + .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) + .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) + .dataFetcher("domain", getResolver((domainType))) + .dataFetcher("dataPlatform", getResolver(dataPlatformType)) + .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) + .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) + .dataFetcher("mlFeature", getResolver(mlFeatureType)) + .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) + .dataFetcher("mlModel", getResolver(mlModelType)) + .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) + .dataFetcher("assertion", getResolver(assertionType)) + .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) + .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) + .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) + .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) + .dataFetcher( + "listRecommendations", new ListRecommendationsResolver(recommendationsService)) + .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) + .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) + .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) + .dataFetcher("container", getResolver(containerType)) + .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) + .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) + .dataFetcher( + "getSecretValues", + new GetSecretValuesResolver(this.entityClient, this.secretService)) + .dataFetcher( + "listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) + .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) + .dataFetcher( + "getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) + .dataFetcher("test", getResolver(testType)) + .dataFetcher("listTests", new ListTestsResolver(entityClient)) + .dataFetcher( + "getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) + .dataFetcher( + "getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) + .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) + .dataFetcher("entity", getEntityResolver()) + .dataFetcher("entities", getEntitiesResolver()) + .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) + .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) + .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) + .dataFetcher( + "batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) + .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) + .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) + .dataFetcher( + "globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) + .dataFetcher( + "getQuickFilters", + new GetQuickFiltersResolver(this.entityClient, this.viewService)) + .dataFetcher("dataProduct", getResolver(dataProductType)) + .dataFetcher( + "listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher( + "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) + .dataFetcher( + "browseV2", new BrowseV2Resolver(this.entityClient, this.viewService))); + } + + private DataFetcher getEntitiesResolver() { + return new BatchGetEntitiesResolver( + entityTypes, + (env) -> { + List<String> urns = env.getArgument(URNS_FIELD_NAME); + return urns.stream() + .map( + (urn) -> { try { - Urn entityUrn = Urn.createFromString(urn); - return UrnToEntityMapper.map(entityUrn); + Urn entityUrn = Urn.createFromString(urn); + return UrnToEntityMapper.map(entityUrn); } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); + throw new RuntimeException("Failed to get entity", e); } - }).collect(Collectors.toList()); - }); - } + }) + .collect(Collectors.toList()); + }); + } + + private DataFetcher getEntityResolver() { + return new EntityTypeResolver( + entityTypes, + (env) -> { + try { + Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); + return UrnToEntityMapper.map(urn); + } catch (Exception e) { + throw new RuntimeException("Failed to get entity", e); + } + }); + } + + private DataFetcher getResolver(LoadableType<?, String> loadableType) { + return getResolver(loadableType, this::getUrnField); + } + + private <T, K> DataFetcher getResolver( + LoadableType<T, K> loadableType, Function<DataFetchingEnvironment, K> keyProvider) { + return new LoadableTypeResolver<>(loadableType, keyProvider); + } + + private String getUrnField(DataFetchingEnvironment env) { + return env.getArgument(URN_FIELD_NAME); + } + + private void configureMutationResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Mutation", + typeWiring -> + typeWiring + .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) + .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) + .dataFetcher( + "createTag", new CreateTagResolver(this.entityClient, this.entityService)) + .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) + .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) + .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) + .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) + .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) + .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) + .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) + .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) + .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) + .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) + .dataFetcher("addTag", new AddTagResolver(entityService)) + .dataFetcher("addTags", new AddTagsResolver(entityService)) + .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) + .dataFetcher("removeTag", new RemoveTagResolver(entityService)) + .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) + .dataFetcher("addTerm", new AddTermResolver(entityService)) + .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) + .dataFetcher("addTerms", new AddTermsResolver(entityService)) + .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) + .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) + .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) + .dataFetcher( + "updateDescription", + new UpdateDescriptionResolver(entityService, this.entityClient)) + .dataFetcher("addOwner", new AddOwnerResolver(entityService)) + .dataFetcher("addOwners", new AddOwnersResolver(entityService)) + .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) + .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) + .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) + .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) + .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) + .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) + .dataFetcher( + "removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) + .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) + .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) + .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) + .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) + .dataFetcher( + "createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) + .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) + .dataFetcher( + "setDomain", new SetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) + .dataFetcher( + "updateDeprecation", + new UpdateDeprecationResolver(this.entityClient, this.entityService)) + .dataFetcher( + "batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) + .dataFetcher( + "unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) + .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) + .dataFetcher( + "createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) + .dataFetcher( + "revokeAccessToken", + new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) + .dataFetcher( + "createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "createIngestionExecutionRequest", + new CreateIngestionExecutionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "cancelIngestionExecutionRequest", + new CancelIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher( + "createTestConnectionRequest", + new CreateTestConnectionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "deleteAssertion", + new DeleteAssertionResolver(this.entityClient, this.entityService)) + .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) + .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) + .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) + .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) + .dataFetcher( + "createGlossaryTerm", + new CreateGlossaryTermResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createGlossaryNode", + new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateParentNode", + new UpdateParentNodeResolver(this.entityService, this.entityClient)) + .dataFetcher( + "deleteGlossaryEntity", + new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateName", new UpdateNameResolver(this.entityService, this.entityClient)) + .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) + .dataFetcher( + "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) + .dataFetcher( + "createNativeUserResetToken", + new CreateNativeUserResetTokenResolver(this.nativeUserService)) + .dataFetcher( + "batchUpdateSoftDeleted", + new BatchUpdateSoftDeletedResolver(this.entityService)) + .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) + .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) + .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) + .dataFetcher( + "createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) + .dataFetcher( + "acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) + .dataFetcher("createPost", new CreatePostResolver(this.postService)) + .dataFetcher("deletePost", new DeletePostResolver(this.postService)) + .dataFetcher( + "batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) + .dataFetcher("createView", new CreateViewResolver(this.viewService)) + .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) + .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) + .dataFetcher( + "updateGlobalViewsSettings", + new UpdateGlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateCorpUserViewsSettings", + new UpdateCorpUserViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateLineage", + new UpdateLineageResolver(this.entityService, this.lineageService)) + .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) + .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) + .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) + .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) + .dataFetcher( + "createDataProduct", new CreateDataProductResolver(this.dataProductService)) + .dataFetcher( + "updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) + .dataFetcher( + "deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) + .dataFetcher( + "batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) + .dataFetcher( + "createOwnershipType", + new CreateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "updateOwnershipType", + new UpdateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "deleteOwnershipType", + new DeleteOwnershipTypeResolver(this.ownershipTypeService))); + } + + private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "SearchResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((SearchResult) env.getSource()).getEntity()))) + .type( + "MatchedField", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((MatchedField) env.getSource()).getEntity()))) + .type( + "SearchAcrossLineageResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity()))) + .type( + "AggregationMetadata", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((AggregationMetadata) env.getSource()).getEntity()))) + .type( + "RecommendationContent", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((RecommendationContent) env.getSource()).getEntity()))) + .type( + "BrowseResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, (env) -> ((BrowseResults) env.getSource()).getEntities()))) + .type( + "ParentDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new EntityTypeBatchResolver( + entityTypes, + (env) -> { + final ParentDomainsResult result = env.getSource(); + return result != null ? result.getDomains() : null; + }))) + .type( + "EntityRelationshipLegacy", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity()))) + .type( + "EntityRelationship", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((EntityRelationship) env.getSource()).getEntity()))) + .type( + "BrowseResultGroupV2", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity()))) + .type( + "BrowsePathEntry", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity()))) + .type( + "LineageRelationship", + typeWiring -> + typeWiring + .dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((LineageRelationship) env.getSource()).getEntity())) + .dataFetcher( + "createdActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getCreatedActor() != null + ? relationship.getCreatedActor() + : null; + })) + .dataFetcher( + "updatedActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getUpdatedActor() != null + ? relationship.getUpdatedActor() + : null; + }))) + .type( + "ListDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new LoadableTypeBatchResolver<>( + domainType, + (env) -> + ((ListDomainsResult) env.getSource()) + .getDomains().stream() + .map(Domain::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryTermsResult", + typeWiring -> + typeWiring.dataFetcher( + "terms", + new LoadableTypeBatchResolver<>( + glossaryTermType, + (env) -> + ((GetRootGlossaryTermsResult) env.getSource()) + .getTerms().stream() + .map(GlossaryTerm::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryNodesResult", + typeWiring -> + typeWiring.dataFetcher( + "nodes", + new LoadableTypeBatchResolver<>( + glossaryNodeType, + (env) -> + ((GetRootGlossaryNodesResult) env.getSource()) + .getNodes().stream() + .map(GlossaryNode::getUrn) + .collect(Collectors.toList())))) + .type( + "AutoCompleteResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResults) env.getSource()).getEntities()))) + .type( + "AutoCompleteResultForEntity", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities()))) + .type( + "PolicyMatchCriterionValue", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity()))) + .type( + "ListTestsResult", + typeWiring -> + typeWiring.dataFetcher( + "tests", + new LoadableTypeBatchResolver<>( + testType, + (env) -> + ((ListTestsResult) env.getSource()) + .getTests().stream() + .map(Test::getUrn) + .collect(Collectors.toList())))) + .type( + "QuickFilter", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((QuickFilter) env.getSource()).getEntity()))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "ownershipType", + new EntityTypeResolver( + entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))); + } - private DataFetcher getEntityResolver() { - return new EntityTypeResolver(entityTypes, - (env) -> { - try { - Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); - return UrnToEntityMapper.map(urn); - } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); - } - }); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dataset} type. + */ + private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Dataset", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dataset) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getContainer() != null + ? dataset.getContainer().getUrn() + : null; + })) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getDataPlatformInstance() != null + ? dataset.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "datasetProfiles", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "datasetProfile", + DatasetProfileMapper::map)) + .dataFetcher( + "operations", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "operation", + OperationMapper::map, + new SortCriterion() + .setField(OPERATION_EVENT_TIME_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))) + .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) + .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) + .dataFetcher( + "health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher( + "assertions", new EntityAssertionsResolver(entityClient, graphClient)) + .dataFetcher("testResults", new TestResultsResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("runs", new EntityRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "owner", + new OwnerTypeResolver<>( + ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "UserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn()))) + .type( + "ForeignKeyConstraint", + typeWiring -> + typeWiring.dataFetcher( + "foreignDataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> + ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn()))) + .type( + "SiblingProperties", + typeWiring -> + typeWiring.dataFetcher( + "siblings", + new EntityTypeBatchResolver( + new ArrayList<>(entityTypes), + (env) -> ((SiblingProperties) env.getSource()).getSiblings()))) + .type( + "InstitutionalMemoryMetadata", + typeWiring -> + typeWiring.dataFetcher( + "author", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))) + .type( + "DatasetStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } - private DataFetcher getResolver(LoadableType<?, String> loadableType) { - return getResolver(loadableType, this::getUrnField); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.VersionedDataset} type. + */ + private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "VersionedDataset", + typeWiring -> typeWiring.dataFetcher("relationships", new StaticDataFetcher(null))); + } - private <T, K> DataFetcher getResolver(LoadableType<T, K> loadableType, - Function<DataFetchingEnvironment, K> keyProvider) { - return new LoadableTypeResolver<>(loadableType, keyProvider); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. + */ + private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "AccessToken", + typeWiring -> + typeWiring.dataFetcher( + "metadata", + new LoadableTypeResolver<>( + accessTokenMetadataType, + (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn()))); + builder.type( + "ListAccessTokenResult", + typeWiring -> + typeWiring.dataFetcher( + "tokens", + new LoadableTypeBatchResolver<>( + accessTokenMetadataType, + (env) -> + ((ListAccessTokenResult) env.getSource()) + .getTokens().stream() + .map(AccessTokenMetadata::getUrn) + .collect(Collectors.toList())))); + } + + private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTerm", + typeWiring -> + typeWiring + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryNode", + typeWiring -> + typeWiring + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "SchemaFieldEntity", + typeWiring -> + typeWiring.dataFetcher( + "parent", + new EntityTypeResolver( + entityTypes, (env) -> ((SchemaFieldEntity) env.getSource()).getParent()))); + } + + private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "EntityPath", + typeWiring -> + typeWiring.dataFetcher( + "path", + new BatchGetEntitiesResolver( + entityTypes, (env) -> ((EntityPath) env.getSource()).getPath()))); + } - private String getUrnField(DataFetchingEnvironment env) { - return env.getArgument(URN_FIELD_NAME); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpUser} type. + */ + private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpUser", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "CorpUserInfo", + typeWiring -> + typeWiring.dataFetcher( + "manager", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn()))); + } - private void configureMutationResolvers(final RuntimeWiring.Builder builder) { - builder.type("Mutation", typeWiring -> typeWiring - .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) - .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) - .dataFetcher("createTag", new CreateTagResolver(this.entityClient, this.entityService)) - .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) - .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) - .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) - .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) - .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) - .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) - .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) - .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) - .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) - .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) - .dataFetcher("addTag", new AddTagResolver(entityService)) - .dataFetcher("addTags", new AddTagsResolver(entityService)) - .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) - .dataFetcher("removeTag", new RemoveTagResolver(entityService)) - .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) - .dataFetcher("addTerm", new AddTermResolver(entityService)) - .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) - .dataFetcher("addTerms", new AddTermsResolver(entityService)) - .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) - .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) - .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) - .dataFetcher("updateDescription", new UpdateDescriptionResolver(entityService, this.entityClient)) - .dataFetcher("addOwner", new AddOwnerResolver(entityService)) - .dataFetcher("addOwners", new AddOwnersResolver(entityService)) - .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) - .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) - .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) - .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) - .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) - .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) - .dataFetcher("removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) - .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) - .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) - .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) - .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) - .dataFetcher("createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) - .dataFetcher("setDomain", new SetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) - .dataFetcher("updateDeprecation", new UpdateDeprecationResolver(this.entityClient, this.entityService)) - .dataFetcher("batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) - .dataFetcher("unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) - .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) - .dataFetcher("createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) - .dataFetcher("revokeAccessToken", new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) - .dataFetcher("createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) - .dataFetcher("createIngestionExecutionRequest", new CreateIngestionExecutionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("cancelIngestionExecutionRequest", new CancelIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("createTestConnectionRequest", new CreateTestConnectionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("deleteAssertion", new DeleteAssertionResolver(this.entityClient, this.entityService)) - .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) - .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) - .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) - .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) - .dataFetcher("createGlossaryTerm", new CreateGlossaryTermResolver(this.entityClient, this.entityService)) - .dataFetcher("createGlossaryNode", new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) - .dataFetcher("updateParentNode", new UpdateParentNodeResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteGlossaryEntity", - new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) - .dataFetcher("updateName", new UpdateNameResolver(this.entityService, this.entityClient)) - .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) - .dataFetcher("removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) - .dataFetcher("createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) - .dataFetcher("batchUpdateSoftDeleted", new BatchUpdateSoftDeletedResolver(this.entityService)) - .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) - .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) - .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) - .dataFetcher("createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) - .dataFetcher("createPost", new CreatePostResolver(this.postService)) - .dataFetcher("deletePost", new DeletePostResolver(this.postService)) - .dataFetcher("batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) - .dataFetcher("createView", new CreateViewResolver(this.viewService)) - .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) - .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) - .dataFetcher("updateGlobalViewsSettings", new UpdateGlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateCorpUserViewsSettings", new UpdateCorpUserViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateLineage", new UpdateLineageResolver(this.entityService, this.lineageService)) - .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) - .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) - .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) - .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) - .dataFetcher("createDataProduct", new CreateDataProductResolver(this.dataProductService)) - .dataFetcher("updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) - .dataFetcher("deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) - .dataFetcher("batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) - .dataFetcher("createOwnershipType", new CreateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("updateOwnershipType", new UpdateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("deleteOwnershipType", new DeleteOwnershipTypeResolver(this.ownershipTypeService)) - ); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpGroup} type. + */ + private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpGroup", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder + .type( + "CorpGroupInfo", + typeWiring -> + typeWiring + .dataFetcher( + "admins", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getAdmins().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()))) + .dataFetcher( + "members", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getMembers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList())))) + .type( + "ListGroupsResult", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> + ((ListGroupsResult) env.getSource()) + .getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList())))); + } + + private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "Tag", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "TagAssociation", + typeWiring -> + typeWiring.dataFetcher( + "tag", + new LoadableTypeResolver<>( + tagType, + (env) -> + ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()) + .getTag() + .getUrn()))); + } + + private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTermAssociation", + typeWiring -> + typeWiring.dataFetcher( + "term", + new LoadableTypeResolver<>( + glossaryTermType, + (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn()))); + } - private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("SearchResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchResult) env.getSource()).getEntity())) - ) - .type("MatchedField", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((MatchedField) env.getSource()).getEntity())) - ) - .type("SearchAcrossLineageResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity())) - ) - .type("AggregationMetadata", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((AggregationMetadata) env.getSource()).getEntity())) - ) - .type("RecommendationContent", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((RecommendationContent) env.getSource()).getEntity())) - ) - .type("BrowseResults", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((BrowseResults) env.getSource()).getEntities())) - ) - .type("ParentDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new EntityTypeBatchResolver(entityTypes, - (env) -> { - final ParentDomainsResult result = env.getSource(); - return result != null ? result.getDomains() : null; - })) - ) - .type("EntityRelationshipLegacy", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity())) - ) - .type("EntityRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationship) env.getSource()).getEntity())) - ) - .type("BrowseResultGroupV2", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity())) - ) - .type("BrowsePathEntry", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowsePathEntry) env.getSource()).getEntity())) - ) - .type("LineageRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((LineageRelationship) env.getSource()).getEntity())) - .dataFetcher("createdActor", - new EntityTypeResolver(entityTypes, - (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getCreatedActor() != null ? relationship.getCreatedActor() : null; - }) - ) - .dataFetcher("updatedActor", - new EntityTypeResolver(entityTypes, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Notebook} type. + */ + private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Notebook", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getUpdatedActor() != null ? relationship.getUpdatedActor() : null; - }) - ) - ) - .type("ListDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new LoadableTypeBatchResolver<>(domainType, - (env) -> ((ListDomainsResult) env.getSource()).getDomains().stream() - .map(Domain::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryTermsResult", typeWiring -> typeWiring - .dataFetcher("terms", new LoadableTypeBatchResolver<>(glossaryTermType, - (env) -> ((GetRootGlossaryTermsResult) env.getSource()).getTerms().stream() - .map(GlossaryTerm::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryNodesResult", typeWiring -> typeWiring - .dataFetcher("nodes", new LoadableTypeBatchResolver<>(glossaryNodeType, - (env) -> ((GetRootGlossaryNodesResult) env.getSource()).getNodes().stream() - .map(GlossaryNode::getUrn) - .collect(Collectors.toList()))) - ) - .type("AutoCompleteResults", typeWiring -> typeWiring - .dataFetcher("entities", - new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResults) env.getSource()).getEntities())) - ) - .type("AutoCompleteResultForEntity", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities())) - ) - .type("PolicyMatchCriterionValue", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity())) - ) - .type("ListTestsResult", typeWiring -> typeWiring - .dataFetcher("tests", new LoadableTypeBatchResolver<>(testType, - (env) -> ((ListTestsResult) env.getSource()).getTests().stream() - .map(Test::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuickFilter", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((QuickFilter) env.getSource()).getEntity())) - ) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("ownershipType", new EntityTypeResolver(entityTypes, - (env) -> ((Owner) env.getSource()).getOwnershipType())) - ); - } + final Notebook notebook = env.getSource(); + return notebook.getDataPlatformInstance() != null + ? notebook.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dataset} type. - */ - private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Dataset", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dashboard} type. + */ + private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Dashboard", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dataset) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getContainer() != null ? dataset.getContainer().getUrn() : null; - }) - ) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Dashboard dashboard = env.getSource(); + return dashboard.getDataPlatformInstance() != null + ? dashboard.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getDataPlatformInstance() != null ? dataset.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("datasetProfiles", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "datasetProfile", - DatasetProfileMapper::map - ) - ) - .dataFetcher("operations", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "operation", - OperationMapper::map, - new SortCriterion().setField(OPERATION_EVENT_TIME_FIELD_NAME).setOrder(SortOrder.DESCENDING) - ) - ) - .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) - .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) - .dataFetcher("health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("assertions", new EntityAssertionsResolver(entityClient, graphClient)) - .dataFetcher("testResults", new TestResultsResolver(entityClient)) - .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("runs", new EntityRunsResolver(entityClient)) + final Dashboard dashboard = env.getSource(); + return dashboard.getContainer() != null + ? dashboard.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) + .dataFetcher( + "statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("owner", new OwnerTypeResolver<>(ownerTypes, - (env) -> ((Owner) env.getSource()).getOwner())) - ) - .type("UserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>(corpUserType, - (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn())) - ) - .type("ForeignKeyConstraint", typeWiring -> typeWiring - .dataFetcher("foreignDataset", new LoadableTypeResolver<>(datasetType, - (env) -> ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn())) - ) - .type("SiblingProperties", typeWiring -> typeWiring - .dataFetcher("siblings", - new EntityTypeBatchResolver( - new ArrayList<>(entityTypes), - (env) -> ((SiblingProperties) env.getSource()).getSiblings())) - ) - .type("InstitutionalMemoryMetadata", typeWiring -> typeWiring - .dataFetcher("author", new LoadableTypeResolver<>(corpUserType, - (env) -> ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())) - ) - .type("DatasetStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "DashboardInfo", + typeWiring -> + typeWiring.dataFetcher( + "charts", + new LoadableTypeBatchResolver<>( + chartType, + (env) -> + ((DashboardInfo) env.getSource()) + .getCharts().stream() + .map(Chart::getUrn) + .collect(Collectors.toList())))); + builder.type( + "DashboardUserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn()))); + builder.type( + "DashboardStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.VersionedDataset} type. - */ - private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("VersionedDataset", typeWiring -> typeWiring - .dataFetcher("relationships", new StaticDataFetcher(null))); - - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. - */ - private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { - builder.type("AccessToken", typeWiring -> typeWiring - .dataFetcher("metadata", new LoadableTypeResolver<>(accessTokenMetadataType, - (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn())) - ); - builder.type("ListAccessTokenResult", typeWiring -> typeWiring - .dataFetcher("tokens", new LoadableTypeBatchResolver<>(accessTokenMetadataType, - (env) -> ((ListAccessTokenResult) env.getSource()).getTokens().stream() - .map(AccessTokenMetadata::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryNode", typeWiring -> typeWiring - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { - builder.type("SchemaFieldEntity", typeWiring -> typeWiring - .dataFetcher("parent", new EntityTypeResolver(entityTypes, - (env) -> ((SchemaFieldEntity) env.getSource()).getParent())) - ); - } - - private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { - builder.type("EntityPath", typeWiring -> typeWiring - .dataFetcher("path", new BatchGetEntitiesResolver(entityTypes, - (env) -> ((EntityPath) env.getSource()).getPath())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpUser} type. - */ - private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpUser", typeWiring -> typeWiring - .dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("CorpUserInfo", typeWiring -> typeWiring - .dataFetcher("manager", new LoadableTypeResolver<>(corpUserType, - (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpGroup} type. - */ - private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpGroup", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); - builder.type("CorpGroupInfo", typeWiring -> typeWiring - .dataFetcher("admins", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getAdmins().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - .dataFetcher("members", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getMembers().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - ) - .type("ListGroupsResult", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(corpGroupType, - (env) -> ((ListGroupsResult) env.getSource()).getGroups().stream() - .map(CorpGroup::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("Tag", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - builder.type("TagAssociation", typeWiring -> typeWiring - .dataFetcher("tag", - new LoadableTypeResolver<>(tagType, - (env) -> ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()).getTag().getUrn())) - ); - } - - private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTermAssociation", typeWiring -> typeWiring - .dataFetcher("term", - new LoadableTypeResolver<>(glossaryTermType, - (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn())) - ); - } + DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Notebook} type. + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Chart} type. */ - private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { - builder.type("Notebook", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Notebook notebook = env.getSource(); - return notebook.getDataPlatformInstance() != null ? notebook.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dashboard} type. - */ - private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { - builder.type("Dashboard", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getDataPlatformInstance() != null ? dashboard.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>(containerType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getContainer() != null ? dashboard.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) - .dataFetcher("statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("DashboardInfo", typeWiring -> typeWiring - .dataFetcher("charts", new LoadableTypeBatchResolver<>(chartType, - (env) -> ((DashboardInfo) env.getSource()).getCharts().stream() - .map(Chart::getUrn) - .collect(Collectors.toList()))) - ); - builder.type("DashboardUserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>( - corpUserType, - (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn())) - ); - builder.type("DashboardStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, - (env) -> { - DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Chart} type. - */ - private void configureChartResolvers(final RuntimeWiring.Builder builder) { - builder.type("Chart", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getDataPlatformInstance() != null ? chart.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>( - containerType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getContainer() != null ? chart.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("ChartInfo", typeWiring -> typeWiring - .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((ChartInfo) env.getSource()).getInputs().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - ); - } - - /** - * Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. - */ - private void configureTypeResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Entity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType<?, ?>) graphType) - .collect(Collectors.toList()) - ))) - .type("EntityWithRelationships", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType<?, ?>) graphType) - .collect(Collectors.toList()) - ))) - .type("BrowsableEntity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(browsableTypes.stream() - .map(graphType -> (EntityType<?, ?>) graphType) - .collect(Collectors.toList()) - ))) - .type("OwnerType", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(ownerTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType<?, ?>) graphType) - .collect(Collectors.toList()) - ))) - .type("PlatformSchema", typeWiring -> typeWiring - .typeResolver(new PlatformSchemaUnionTypeResolver()) - ) - .type("HyperParameterValueType", typeWiring -> typeWiring - .typeResolver(new HyperParameterValueTypeResolver()) - ) - .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) - .type("TimeSeriesAspect", typeWiring -> typeWiring - .typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) - .type("ResultsType", typeWiring -> typeWiring - .typeResolver(new ResultsTypeResolver())); - } - - /** - * Configures custom type extensions leveraged within our GraphQL schema. - */ - private void configureTypeExtensions(final RuntimeWiring.Builder builder) { - builder.scalar(GraphQLLong); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataJob} type. - */ - private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataJob", typeWiring -> typeWiring + private void configureChartResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Chart", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("dataFlow", new LoadableTypeResolver<>(dataFlowType, - (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Chart chart = env.getSource(); + return chart.getDataPlatformInstance() != null + ? chart.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final DataJob dataJob = env.getSource(); - return dataJob.getDataPlatformInstance() != null ? dataJob.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + final Chart chart = env.getSource(); + return chart.getContainer() != null + ? chart.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher( + "statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ) - .type("DataJobInputOutput", typeWiring -> typeWiring - .dataFetcher("inputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("outputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getOutputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("inputDatajobs", new LoadableTypeBatchResolver<>(dataJobType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatajobs().stream() - .map(DataJob::getUrn) - .collect(Collectors.toList()))) - ); - } + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "ChartInfo", + typeWiring -> + typeWiring.dataFetcher( + "inputs", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((ChartInfo) env.getSource()) + .getInputs().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList())))); + } + + /** Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. */ + private void configureTypeResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Entity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType<?, ?>) graphType) + .collect(Collectors.toList())))) + .type( + "EntityWithRelationships", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType<?, ?>) graphType) + .collect(Collectors.toList())))) + .type( + "BrowsableEntity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + browsableTypes.stream() + .map(graphType -> (EntityType<?, ?>) graphType) + .collect(Collectors.toList())))) + .type( + "OwnerType", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + ownerTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType<?, ?>) graphType) + .collect(Collectors.toList())))) + .type( + "PlatformSchema", + typeWiring -> typeWiring.typeResolver(new PlatformSchemaUnionTypeResolver())) + .type( + "HyperParameterValueType", + typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver())) + .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) + .type( + "TimeSeriesAspect", + typeWiring -> typeWiring.typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) + .type("ResultsType", typeWiring -> typeWiring.typeResolver(new ResultsTypeResolver())); + } + + /** Configures custom type extensions leveraged within our GraphQL schema. */ + private void configureTypeExtensions(final RuntimeWiring.Builder builder) { + builder.scalar(GraphQLLong); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataFlow} type. - */ - private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataFlow", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataJob} type. + */ + private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataJob", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "dataFlow", + new LoadableTypeResolver<>( + dataFlowType, + (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getDataPlatformInstance() != null + ? dataJob.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))) + .type( + "DataJobInputOutput", + typeWiring -> + typeWiring + .dataFetcher( + "inputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "outputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getOutputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "inputDatajobs", + new LoadableTypeBatchResolver<>( + dataJobType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatajobs().stream() + .map(DataJob::getUrn) + .collect(Collectors.toList())))); + } + + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataFlow} type. + */ + private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataFlow", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final DataFlow dataFlow = env.getSource(); - return dataFlow.getDataPlatformInstance() != null ? dataFlow.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } + final DataFlow dataFlow = env.getSource(); + return dataFlow.getDataPlatformInstance() != null + ? dataFlow.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.MLFeatureTable} type. - */ - private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { - builder - .type("MLFeatureTable", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.MLFeatureTable} type. + */ + private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "MLFeatureTable", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeatureTable entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeatureTableProperties", + typeWiring -> + typeWiring + .dataFetcher( + "mlFeatures", + new LoadableTypeBatchResolver<>( + mlFeatureType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlFeatures().stream() + .map(MLFeature::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of())) + .dataFetcher( + "mlPrimaryKeys", + new LoadableTypeBatchResolver<>( + mlPrimaryKeyType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() + != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlPrimaryKeys().stream() + .map(MLPrimaryKey::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of()))) + .type( + "MLFeatureProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, (env) -> { - final MLFeatureTable entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeatureTableProperties", typeWiring -> typeWiring - .dataFetcher("mlFeatures", - new LoadableTypeBatchResolver<>(mlFeatureType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlFeatures().stream() - .map(MLFeature::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - .dataFetcher("mlPrimaryKeys", - new LoadableTypeBatchResolver<>(mlPrimaryKeyType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys().stream() - .map(MLPrimaryKey::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - ) - .type("MLFeatureProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLFeatureProperties) env.getSource()).getSources() == null) { + if (((MLFeatureProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLFeatureProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLPrimaryKeyProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { + } + return ((MLFeatureProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLPrimaryKeyProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> { + if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLPrimaryKeyProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLModel", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + } + return ((MLPrimaryKeyProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLModel", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModel mlModel = env.getSource(); + return mlModel.getDataPlatformInstance() != null + ? mlModel.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLModelProperties", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + mlModelGroupType, (env) -> { - final MLModel mlModel = env.getSource(); - return mlModel.getDataPlatformInstance() != null ? mlModel.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLModelProperties", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(mlModelGroupType, - (env) -> { - MLModelProperties properties = env.getSource(); - if (properties.getGroups() != null) { + MLModelProperties properties = env.getSource(); + if (properties.getGroups() != null) { return properties.getGroups().stream() .map(MLModelGroup::getUrn) .collect(Collectors.toList()); - } - return Collections.emptyList(); - }) - ) - ) - .type("MLModelGroup", typeWiring -> typeWiring + } + return Collections.emptyList(); + }))) + .type( + "MLModelGroup", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModelGroup entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeature", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeature entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLPrimaryKey", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLPrimaryKey entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "GlossaryTerm", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "GlossaryNode", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureDomainResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Domain", + typeWiring -> + typeWiring + .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) + .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "DomainAssociation", + typeWiring -> + typeWiring.dataFetcher( + "domain", + new LoadableTypeResolver<>( + domainType, + (env) -> + ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()) + .getDomain() + .getUrn()))); + } + + private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProduct", + typeWiring -> + typeWiring + .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Assertion", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final MLModelGroup entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeature", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Assertion assertion = env.getSource(); + return assertion.getDataPlatformInstance() != null + ? assertion.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); + } + + private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { + // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. + builder.type( + "ActorFilter", + typeWiring -> + typeWiring + .dataFetcher( + "resolvedUsers", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - final MLFeature entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLPrimaryKey", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final ActorFilter filter = env.getSource(); + return filter.getUsers(); + })) + .dataFetcher( + "resolvedGroups", + new LoadableTypeBatchResolver<>( + corpGroupType, (env) -> { - final MLPrimaryKey entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))) - .type("GlossaryNode", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureDomainResolvers(final RuntimeWiring.Builder builder) { - builder.type("Domain", typeWiring -> typeWiring - .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) - .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("DomainAssociation", typeWiring -> typeWiring - .dataFetcher("domain", - new LoadableTypeResolver<>(domainType, - (env) -> ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()).getDomain().getUrn())) - ); - } - - private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProduct", typeWiring -> typeWiring - .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - } - - private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { - builder.type("Assertion", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Assertion assertion = env.getSource(); - return assertion.getDataPlatformInstance() != null ? assertion.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); - } - - private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { - // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. - builder.type("ActorFilter", typeWiring -> typeWiring.dataFetcher("resolvedUsers", - new LoadableTypeBatchResolver<>(corpUserType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getUsers(); - })).dataFetcher("resolvedGroups", new LoadableTypeBatchResolver<>(corpGroupType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getGroups(); - })).dataFetcher("resolvedRoles", new LoadableTypeBatchResolver<>(dataHubRoleType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getRoles(); - })).dataFetcher("resolvedOwnershipTypes", new LoadableTypeBatchResolver<>(ownershipType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getResourceOwnersTypes(); - }))); - } - - private void configureRoleResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataHubRole", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureViewResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataHubView", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListViewsResult", typeWiring -> typeWiring - .dataFetcher("views", new LoadableTypeBatchResolver<>( - dataHubViewType, - (env) -> ((ListViewsResult) env.getSource()).getViews().stream() - .map(DataHubView::getUrn) - .collect(Collectors.toList()))) - ) - .type("CorpUserViewsSettings", typeWiring -> typeWiring - .dataFetcher("defaultView", new LoadableTypeResolver<>( + final ActorFilter filter = env.getSource(); + return filter.getGroups(); + })) + .dataFetcher( + "resolvedRoles", + new LoadableTypeBatchResolver<>( + dataHubRoleType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getRoles(); + })) + .dataFetcher( + "resolvedOwnershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getResourceOwnersTypes(); + }))); + } + + private void configureRoleResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataHubRole", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureViewResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataHubView", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListViewsResult", + typeWiring -> + typeWiring.dataFetcher( + "views", + new LoadableTypeBatchResolver<>( + dataHubViewType, + (env) -> + ((ListViewsResult) env.getSource()) + .getViews().stream() + .map(DataHubView::getUrn) + .collect(Collectors.toList())))) + .type( + "CorpUserViewsSettings", + typeWiring -> + typeWiring.dataFetcher( + "defaultView", + new LoadableTypeResolver<>( dataHubViewType, (env) -> { - final CorpUserViewsSettings settings = env.getSource(); - if (settings.getDefaultView() != null) { - return settings.getDefaultView().getUrn(); - } - return null; - } - ) - )); - } - - private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("QueryEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListQueriesResult", typeWiring -> typeWiring - .dataFetcher("queries", new LoadableTypeBatchResolver<>( - queryType, - (env) -> ((ListQueriesResult) env.getSource()).getQueries().stream() - .map(QueryEntity::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuerySubject", typeWiring -> typeWiring - .dataFetcher("dataset", new LoadableTypeResolver<>( - datasetType, - (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn())) - ); - - } - - private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { - builder - .type("OwnershipTypeEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListOwnershipTypesResult", typeWiring -> typeWiring - .dataFetcher("ownershipTypes", new LoadableTypeBatchResolver<>(ownershipType, - (env) -> ((ListOwnershipTypesResult) env.getSource()).getOwnershipTypes().stream() - .map(OwnershipTypeEntity::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProcessInstance", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + final CorpUserViewsSettings settings = env.getSource(); + if (settings.getDefaultView() != null) { + return settings.getDefaultView().getUrn(); + } + return null; + }))); + } + + private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "QueryEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListQueriesResult", + typeWiring -> + typeWiring.dataFetcher( + "queries", + new LoadableTypeBatchResolver<>( + queryType, + (env) -> + ((ListQueriesResult) env.getSource()) + .getQueries().stream() + .map(QueryEntity::getUrn) + .collect(Collectors.toList())))) + .type( + "QuerySubject", + typeWiring -> + typeWiring.dataFetcher( + "dataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn()))); + } + + private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { + builder + .type( + "OwnershipTypeEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListOwnershipTypesResult", + typeWiring -> + typeWiring.dataFetcher( + "ownershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> + ((ListOwnershipTypesResult) env.getSource()) + .getOwnershipTypes().stream() + .map(OwnershipTypeEntity::getUrn) + .collect(Collectors.toList())))); + } + + private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProcessInstance", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("state", new TimeSeriesAspectResolver(this.entityClient, "dataProcessInstance", - DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, DataProcessInstanceRunEventMapper::map))); - } - - private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { - builder.type("TestResult", typeWiring -> typeWiring - .dataFetcher("test", new LoadableTypeResolver<>(testType, - (env) -> { - final TestResult testResult = env.getSource(); - return testResult.getTest() != null ? testResult.getTest().getUrn() : null; - })) - ); - } - - private <T, K> DataLoader<K, DataFetcherResult<T>> createDataLoader(final LoadableType<T, K> graphType, final QueryContext queryContext) { - BatchLoaderContextProvider contextProvider = () -> queryContext; - DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); - return DataLoader.newDataLoader((keys, context) -> CompletableFuture.supplyAsync(() -> { - try { - log.debug(String.format("Batch loading entities of type: %s, keys: %s", graphType.name(), keys)); - return graphType.batchLoad(keys, context.getContext()); - } catch (Exception e) { - log.error(String.format("Failed to load Entities of type: %s, keys: %s", graphType.name(), keys) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to retrieve entities of type %s", graphType.name()), e); - } - }), loaderOptions); - } - - private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { - builder.type("IngestionSource", typeWiring -> typeWiring - .dataFetcher("executions", new IngestionSourceExecutionRequestsResolver(entityClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> { - final IngestionSource ingestionSource = env.getSource(); - return ingestionSource.getPlatform() != null ? ingestionSource.getPlatform().getUrn() : null; - }) - )); - } + .dataFetcher( + "state", + new TimeSeriesAspectResolver( + this.entityClient, + "dataProcessInstance", + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + DataProcessInstanceRunEventMapper::map))); + } + + private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "TestResult", + typeWiring -> + typeWiring.dataFetcher( + "test", + new LoadableTypeResolver<>( + testType, + (env) -> { + final TestResult testResult = env.getSource(); + return testResult.getTest() != null ? testResult.getTest().getUrn() : null; + }))); + } + + private <T, K> DataLoader<K, DataFetcherResult<T>> createDataLoader( + final LoadableType<T, K> graphType, final QueryContext queryContext) { + BatchLoaderContextProvider contextProvider = () -> queryContext; + DataLoaderOptions loaderOptions = + DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); + return DataLoader.newDataLoader( + (keys, context) -> + CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + String.format( + "Batch loading entities of type: %s, keys: %s", + graphType.name(), keys)); + return graphType.batchLoad(keys, context.getContext()); + } catch (Exception e) { + log.error( + String.format( + "Failed to load Entities of type: %s, keys: %s", + graphType.name(), keys) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to retrieve entities of type %s", graphType.name()), + e); + } + }), + loaderOptions); + } + + private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "IngestionSource", + typeWiring -> + typeWiring + .dataFetcher( + "executions", new IngestionSourceExecutionRequestsResolver(entityClient)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> { + final IngestionSource ingestionSource = env.getSource(); + return ingestionSource.getPlatform() != null + ? ingestionSource.getPlatform().getUrn() + : null; + }))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index 157fb10ce7078..4829194a8ce4d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -38,41 +38,41 @@ @Data public class GmsGraphQLEngineArgs { - EntityClient entityClient; - SystemEntityClient systemEntityClient; - GraphClient graphClient; - UsageClient usageClient; - AnalyticsService analyticsService; - EntityService entityService; - RecommendationsService recommendationsService; - StatefulTokenService statefulTokenService; - TimeseriesAspectService timeseriesAspectService; - EntityRegistry entityRegistry; - SecretService secretService; - NativeUserService nativeUserService; - IngestionConfiguration ingestionConfiguration; - AuthenticationConfiguration authenticationConfiguration; - AuthorizationConfiguration authorizationConfiguration; - GitVersion gitVersion; - TimelineService timelineService; - boolean supportsImpactAnalysis; - VisualConfiguration visualConfiguration; - TelemetryConfiguration telemetryConfiguration; - TestsConfiguration testsConfiguration; - DataHubConfiguration datahubConfiguration; - ViewsConfiguration viewsConfiguration; - SiblingGraphService siblingGraphService; - GroupService groupService; - RoleService roleService; - InviteTokenService inviteTokenService; - PostService postService; - ViewService viewService; - OwnershipTypeService ownershipTypeService; - SettingsService settingsService; - LineageService lineageService; - QueryService queryService; - FeatureFlags featureFlags; - DataProductService dataProductService; + EntityClient entityClient; + SystemEntityClient systemEntityClient; + GraphClient graphClient; + UsageClient usageClient; + AnalyticsService analyticsService; + EntityService entityService; + RecommendationsService recommendationsService; + StatefulTokenService statefulTokenService; + TimeseriesAspectService timeseriesAspectService; + EntityRegistry entityRegistry; + SecretService secretService; + NativeUserService nativeUserService; + IngestionConfiguration ingestionConfiguration; + AuthenticationConfiguration authenticationConfiguration; + AuthorizationConfiguration authorizationConfiguration; + GitVersion gitVersion; + TimelineService timelineService; + boolean supportsImpactAnalysis; + VisualConfiguration visualConfiguration; + TelemetryConfiguration telemetryConfiguration; + TestsConfiguration testsConfiguration; + DataHubConfiguration datahubConfiguration; + ViewsConfiguration viewsConfiguration; + SiblingGraphService siblingGraphService; + GroupService groupService; + RoleService roleService; + InviteTokenService inviteTokenService; + PostService postService; + ViewService viewService; + OwnershipTypeService ownershipTypeService; + SettingsService settingsService; + LineageService lineageService; + QueryService queryService; + FeatureFlags featureFlags; + DataProductService dataProductService; - //any fork specific args should go below this line + // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java index e7ef0c402a1de..472d9465aeee1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java @@ -5,41 +5,42 @@ import java.util.Collection; import java.util.List; - /** - * An interface that allows the Core GMS GraphQL Engine to be extended without requiring - * code changes in the GmsGraphQLEngine class if new entities, relationships or resolvers - * need to be introduced. This is useful if you are maintaining a fork of DataHub and - * don't want to deal with merge conflicts. + * An interface that allows the Core GMS GraphQL Engine to be extended without requiring code + * changes in the GmsGraphQLEngine class if new entities, relationships or resolvers need to be + * introduced. This is useful if you are maintaining a fork of DataHub and don't want to deal with + * merge conflicts. */ public interface GmsGraphQLPlugin { /** * Initialization method that allows the plugin to instantiate + * * @param args */ void init(GmsGraphQLEngineArgs args); /** - * Return a list of schema files that contain graphql definitions - * that are served by this plugin + * Return a list of schema files that contain graphql definitions that are served by this plugin + * * @return */ List<String> getSchemaFiles(); /** * Return a list of LoadableTypes that this plugin serves + * * @return */ Collection<? extends LoadableType<?, ?>> getLoadableTypes(); /** - * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific resolvers. + * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific + * resolvers. + * * @param wiringBuilder : the builder being used to configure the runtime wiring * @param baseEngine : a reference to the core engine and its graphql types */ - default void configureExtraResolvers(final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) { - - } - + default void configureExtraResolvers( + final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java index 74c4c541b972b..f95727a1e8fd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static graphql.schema.idl.RuntimeWiring.*; + import com.linkedin.datahub.graphql.exception.DataHubDataFetcherExceptionHandler; import graphql.ExecutionInput; import graphql.ExecutionResult; @@ -22,152 +24,157 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; -import static graphql.schema.idl.RuntimeWiring.*; - /** - * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and executing - * GQL queries. - - * <p>This class provides a {@link Builder} builder for constructing {@link GraphQL} instances provided one or more - * schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. + * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and + * executing GQL queries. + * + * <p>This class provides a {@link Builder} builder for constructing {@link GraphQL} instances + * provided one or more schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. * - * <p>In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set of variables. + * <p>In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set + * of variables. */ public class GraphQLEngine { - private final GraphQL _graphQL; - private final Map<String, Function<QueryContext, DataLoader<?, ?>>> _dataLoaderSuppliers; + private final GraphQL _graphQL; + private final Map<String, Function<QueryContext, DataLoader<?, ?>>> _dataLoaderSuppliers; - private GraphQLEngine(@Nonnull final List<String> schemas, - @Nonnull final RuntimeWiring runtimeWiring, - @Nonnull final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) { + private GraphQLEngine( + @Nonnull final List<String> schemas, + @Nonnull final RuntimeWiring runtimeWiring, + @Nonnull final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) { - _dataLoaderSuppliers = dataLoaderSuppliers; + _dataLoaderSuppliers = dataLoaderSuppliers; - /* - * Parse schema - */ - SchemaParser schemaParser = new SchemaParser(); - TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); - schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); + /* + * Parse schema + */ + SchemaParser schemaParser = new SchemaParser(); + TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); + schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); - /* - * Configure resolvers (data fetchers) - */ - SchemaGenerator schemaGenerator = new SchemaGenerator(); - GraphQLSchema graphQLSchema = schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); + /* + * Configure resolvers (data fetchers) + */ + SchemaGenerator schemaGenerator = new SchemaGenerator(); + GraphQLSchema graphQLSchema = + schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); - /* - * Instantiate engine - */ - _graphQL = new GraphQL.Builder(graphQLSchema) + /* + * Instantiate engine + */ + _graphQL = + new GraphQL.Builder(graphQLSchema) .defaultDataFetcherExceptionHandler(new DataHubDataFetcherExceptionHandler()) .instrumentation(new TracingInstrumentation()) .build(); - } + } + + public ExecutionResult execute( + @Nonnull final String query, + @Nullable final Map<String, Object> variables, + @Nonnull final QueryContext context) { + /* + * Init DataLoaderRegistry - should be created for each request. + */ + DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - public ExecutionResult execute(@Nonnull final String query, - @Nullable final Map<String, Object> variables, - @Nonnull final QueryContext context) { - /* - * Init DataLoaderRegistry - should be created for each request. - */ - DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - - /* - * Construct execution input - */ - ExecutionInput executionInput = ExecutionInput.newExecutionInput() + /* + * Construct execution input + */ + ExecutionInput executionInput = + ExecutionInput.newExecutionInput() .query(query) .variables(variables) .dataLoaderRegistry(register) .context(context) .build(); - /* - * Execute GraphQL Query - */ - return _graphQL.execute(executionInput); - } + /* + * Execute GraphQL Query + */ + return _graphQL.execute(executionInput); + } + + public GraphQL getGraphQL() { + return _graphQL; + } + + public static Builder builder() { + return new Builder(); + } + + /** Used to construct a {@link GraphQLEngine}. */ + public static class Builder { - public GraphQL getGraphQL() { - return _graphQL; + private final List<String> _schemas = new ArrayList<>(); + private final Map<String, Function<QueryContext, DataLoader<?, ?>>> _loaderSuppliers = + new HashMap<>(); + private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); + + /** + * Used to add a schema file containing the GQL types resolved by the engine. + * + * <p>If multiple files are provided, their schemas will be merged together. + */ + public Builder addSchema(final String schema) { + _schemas.add(schema); + return this; } - public static Builder builder() { - return new Builder(); + /** + * Used to register a {@link DataLoader} to be used within the configured resolvers. + * + * <p>The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} + * when invoked. + * + * <p>If multiple loaders are registered with the name, the latter will override the former. + */ + public Builder addDataLoader( + final String name, final Function<QueryContext, DataLoader<?, ?>> dataLoaderSupplier) { + _loaderSuppliers.put(name, dataLoaderSupplier); + return this; } /** - * Used to construct a {@link GraphQLEngine}. + * Used to register multiple {@link DataLoader}s for use within the configured resolvers. + * + * <p>The included {@link Supplier} provided is expected to return a new instance of {@link + * DataLoader} when invoked. + * + * <p>If multiple loaders are registered with the name, the latter will override the former. */ - public static class Builder { - - private final List<String> _schemas = new ArrayList<>(); - private final Map<String, Function<QueryContext, DataLoader<?, ?>>> _loaderSuppliers = new HashMap<>(); - private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); - - /** - * Used to add a schema file containing the GQL types resolved by the engine. - * - * If multiple files are provided, their schemas will be merged together. - */ - public Builder addSchema(final String schema) { - _schemas.add(schema); - return this; - } - - /** - * Used to register a {@link DataLoader} to be used within the configured resolvers. - * - * The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoader(final String name, final Function<QueryContext, DataLoader<?, ?>> dataLoaderSupplier) { - _loaderSuppliers.put(name, dataLoaderSupplier); - return this; - } - - /** - * Used to register multiple {@link DataLoader}s for use within the configured resolvers. - * - * The included {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoaders(Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) { - _loaderSuppliers.putAll(dataLoaderSuppliers); - return this; - } - - /** - * Used to configure the runtime wiring (data fetchers & type resolvers) - * used in resolving the Graph QL schema. - * - * The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register any required - * data + type resolvers. - */ - public Builder configureRuntimeWiring(final Consumer<RuntimeWiring.Builder> builderFunc) { - builderFunc.accept(_runtimeWiringBuilder); - return this; - } - - /** - * Builds a {@link GraphQLEngine}. - */ - public GraphQLEngine build() { - return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); - } + public Builder addDataLoaders( + Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) { + _loaderSuppliers.putAll(dataLoaderSuppliers); + return this; } - private DataLoaderRegistry createDataLoaderRegistry(final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers, - final QueryContext context) { - final DataLoaderRegistry registry = new DataLoaderRegistry(); - for (String key : dataLoaderSuppliers.keySet()) { - registry.register(key, dataLoaderSuppliers.get(key).apply(context)); - } - return registry; + /** + * Used to configure the runtime wiring (data fetchers & type resolvers) used in resolving the + * Graph QL schema. + * + * <p>The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register + * any required data + type resolvers. + */ + public Builder configureRuntimeWiring(final Consumer<RuntimeWiring.Builder> builderFunc) { + builderFunc.accept(_runtimeWiringBuilder); + return this; } + /** Builds a {@link GraphQLEngine}. */ + public GraphQLEngine build() { + return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); + } + } + + private DataLoaderRegistry createDataLoaderRegistry( + final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers, + final QueryContext context) { + final DataLoaderRegistry registry = new DataLoaderRegistry(); + for (String key : dataLoaderSuppliers.keySet()) { + registry.register(key, dataLoaderSuppliers.get(key).apply(context)); + } + return registry; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java index 4803ef08fdddc..9f110e713ed57 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java @@ -4,38 +4,25 @@ import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; - -/** - * Provided as input to GraphQL resolvers; used to carry information about GQL request context. - */ +/** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */ public interface QueryContext { - /** - * Returns true if the current actor is authenticated, false otherwise. - */ - boolean isAuthenticated(); + /** Returns true if the current actor is authenticated, false otherwise. */ + boolean isAuthenticated(); - /** - * Returns the {@link Authentication} associated with the current query context. - */ - Authentication getAuthentication(); + /** Returns the {@link Authentication} associated with the current query context. */ + Authentication getAuthentication(); - /** - * Returns the current authenticated actor, null if there is none. - */ - default Actor getActor() { - return getAuthentication().getActor(); - } + /** Returns the current authenticated actor, null if there is none. */ + default Actor getActor() { + return getAuthentication().getActor(); + } - /** - * Returns the current authenticated actor, null if there is none. - */ - default String getActorUrn() { - return getActor().toUrnStr(); - } + /** Returns the current authenticated actor, null if there is none. */ + default String getActorUrn() { + return getActor().toUrnStr(); + } - /** - * Returns the authorizer used to authorize specific actions. - */ - Authorizer getAuthorizer(); + /** Returns the authorizer used to authorize specific actions. */ + Authorizer getAuthorizer(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java index df7f0884852d4..425c86ab0f0f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java @@ -4,7 +4,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class RelationshipKey { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java index d51de6652bb0a..c3ad37ddcb201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java @@ -10,11 +10,7 @@ public class TimeSeriesAspectArgs { private Long count; private TimeRange timeRange; - public TimeSeriesAspectArgs( - String urn, - String aspectName, - Long count, - TimeRange timeRange) { + public TimeSeriesAspectArgs(String urn, String aspectName, Long count, TimeRange timeRange) { this.urn = urn; this.aspectName = aspectName; this.count = count; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java index 5f703f520bde4..c7302c9772c5e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java @@ -3,7 +3,6 @@ import com.linkedin.usage.UsageTimeRange; import lombok.Data; - @Data public class UsageStatsKey { private String resource; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java index b0c0436ffd891..6f81de5f04d8f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java @@ -8,7 +8,7 @@ public class VersionedAspectKey { private String urn; private Long version; - public VersionedAspectKey(String urn, String aspectName, Long version) { + public VersionedAspectKey(String urn, String aspectName, Long version) { this.urn = urn; this.version = version; this.aspectName = aspectName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index a78d89e59bc7b..22ee4d4d4845c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.codec.JacksonDataCodec; @@ -26,68 +28,84 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @AllArgsConstructor public class WeaklyTypedAspectsResolver implements DataFetcher<CompletableFuture<List<RawAspect>>> { - private final EntityClient _entityClient; - private final EntityRegistry _entityRegistry; - private static final JacksonDataCodec CODEC = new JacksonDataCodec(); + private final EntityClient _entityClient; + private final EntityRegistry _entityRegistry; + private static final JacksonDataCodec CODEC = new JacksonDataCodec(); - private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { - return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); - } + private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { + return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); + } - @Override - public CompletableFuture<List<RawAspect>> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - List<RawAspect> results = new ArrayList<>(); + @Override + public CompletableFuture<List<RawAspect>> get(DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + List<RawAspect> results = new ArrayList<>(); - final QueryContext context = environment.getContext(); - final String urnStr = ((Entity) environment.getSource()).getUrn(); - final EntityType entityType = ((Entity) environment.getSource()).getType(); - final String entityTypeName = EntityTypeMapper.getName(entityType); - final AspectParams input = bindArgument(environment.getArgument("input"), AspectParams.class); + final QueryContext context = environment.getContext(); + final String urnStr = ((Entity) environment.getSource()).getUrn(); + final EntityType entityType = ((Entity) environment.getSource()).getType(); + final String entityTypeName = EntityTypeMapper.getName(entityType); + final AspectParams input = + bindArgument(environment.getArgument("input"), AspectParams.class); - EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); - entitySpec.getAspectSpecs().stream().filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)).forEach(aspectSpec -> { - try { - Urn urn = Urn.createFromString(urnStr); - RawAspect result = new RawAspect(); - EntityResponse entityResponse = - _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(aspectSpec.getName()), context.getAuthentication()).get(urn); - if (entityResponse == null || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { + EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); + entitySpec.getAspectSpecs().stream() + .filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)) + .forEach( + aspectSpec -> { + try { + Urn urn = Urn.createFromString(urnStr); + RawAspect result = new RawAspect(); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(aspectSpec.getName()), + context.getAuthentication()) + .get(urn); + if (entityResponse == null + || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { return; - } + } - DataMap resolvedAspect = entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); - if (resolvedAspect == null) { + DataMap resolvedAspect = + entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); + if (resolvedAspect == null) { return; - } + } - result.setPayload(CODEC.mapToString(resolvedAspect)); - result.setAspectName(aspectSpec.getName()); + result.setPayload(CODEC.mapToString(resolvedAspect)); + result.setAspectName(aspectSpec.getName()); - DataMap renderSpec = aspectSpec.getRenderSpec(); + DataMap renderSpec = aspectSpec.getRenderSpec(); - if (renderSpec != null) { + if (renderSpec != null) { AspectRenderSpec resultRenderSpec = new AspectRenderSpec(); resultRenderSpec.setDisplayType(renderSpec.getString("displayType")); resultRenderSpec.setDisplayName(renderSpec.getString("displayName")); resultRenderSpec.setKey(renderSpec.getString("key")); result.setRenderSpec(resultRenderSpec); - } + } - results.add(result); - } catch (IOException | RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + aspectSpec.getName() + " for urn " + urnStr + " ", e); - } - }); - return results; + results.add(result); + } catch (IOException | RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + + aspectSpec.getName() + + " for urn " + + urnStr + + " ", + e); + } + }); + return results; }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java index 7728dcae5d8ee..3bf932c4281e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java @@ -7,18 +7,17 @@ import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - public class AnalyticsChartTypeResolver implements TypeResolver { - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TimeSeriesChart) { - return env.getSchema().getObjectType("TimeSeriesChart"); - } else if (env.getObject() instanceof BarChart) { - return env.getSchema().getObjectType("BarChart"); - } else if (env.getObject() instanceof TableChart) { - return env.getSchema().getObjectType("TableChart"); - } else { - throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TimeSeriesChart) { + return env.getSchema().getObjectType("TimeSeriesChart"); + } else if (env.getObject() instanceof BarChart) { + return env.getSchema().getObjectType("BarChart"); + } else if (env.getObject() instanceof TableChart) { + return env.getSchema().getObjectType("TableChart"); + } else { + throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index b8a5dd1121a10..3f635872747a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -27,15 +27,11 @@ import java.util.Collections; import java.util.List; import java.util.Optional; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @Slf4j @RequiredArgsConstructor public final class GetChartsResolver implements DataFetcher<List<AnalyticsChartGroup>> { @@ -47,15 +43,17 @@ public final class GetChartsResolver implements DataFetcher<List<AnalyticsChartG public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) throws Exception { Authentication authentication = ResolverUtils.getAuthentication(environment); try { - return ImmutableList.of(AnalyticsChartGroup.builder() - .setGroupId("DataHubUsageAnalytics") - .setTitle("DataHub Usage Analytics") - .setCharts(getProductAnalyticsCharts(authentication)) - .build(), AnalyticsChartGroup.builder() - .setGroupId("GlobalMetadataAnalytics") - .setTitle("Data Landscape Summary") - .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) - .build()); + return ImmutableList.of( + AnalyticsChartGroup.builder() + .setGroupId("DataHubUsageAnalytics") + .setTitle("DataHub Usage Analytics") + .setCharts(getProductAnalyticsCharts(authentication)) + .build(), + AnalyticsChartGroup.builder() + .setGroupId("GlobalMetadataAnalytics") + .setTitle("Data Landscape Summary") + .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) + .build()); } catch (Exception e) { log.error("Failed to retrieve analytics charts!", e); return Collections.emptyList(); // Simply return nothing. @@ -63,85 +61,115 @@ public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) } private TimeSeriesChart getActiveUsersTimeSeriesChart( - final DateTime beginning, - final DateTime end, - final String title, - final DateInterval interval - ) { + final DateTime beginning, + final DateTime end, + final String title, + final DateInterval interval) { final DateRange dateRange = - new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); + new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); final List<NamedLine> timeSeriesLines = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), dateRange, interval, - Optional.empty(), ImmutableMap.of(), Collections.emptyMap(), Optional.of("browserId")); + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + dateRange, + interval, + Optional.empty(), + ImmutableMap.of(), + Collections.emptyMap(), + Optional.of("browserId")); return TimeSeriesChart.builder() - .setTitle(title) - .setDateRange(dateRange) - .setInterval(interval) - .setLines(timeSeriesLines) - .build(); + .setTitle(title) + .setDateRange(dateRange) + .setInterval(interval) + .setLines(timeSeriesLines) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ - private List<AnalyticsChart> getProductAnalyticsCharts(Authentication authentication) throws Exception { + /** TODO: Config Driven Charts Instead of Hardcoded. */ + private List<AnalyticsChart> getProductAnalyticsCharts(Authentication authentication) + throws Exception { final List<AnalyticsChart> charts = new ArrayList<>(); DateUtil dateUtil = new DateUtil(); final DateTime startOfNextWeek = dateUtil.getStartOfNextWeek(); final DateTime startOfNextMonth = dateUtil.getStartOfNextMonth(); final DateRange trailingWeekDateRange = dateUtil.getTrailingWeekDateRange(); - charts.add(getActiveUsersTimeSeriesChart( + charts.add( + getActiveUsersTimeSeriesChart( startOfNextWeek.minusWeeks(10), startOfNextWeek.minusMillis(1), "Weekly Active Users", - DateInterval.WEEK - )); - charts.add(getActiveUsersTimeSeriesChart( + DateInterval.WEEK)); + charts.add( + getActiveUsersTimeSeriesChart( startOfNextMonth.minusMonths(12), startOfNextMonth.minusMillis(1), "Monthly Active Users", - DateInterval.MONTH - )); + DateInterval.MONTH)); String searchesTitle = "Searches Last Week"; DateInterval dailyInterval = DateInterval.DAY; String searchEventType = "SearchEvent"; final List<NamedLine> searchesTimeseries = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), trailingWeekDateRange, dailyInterval, - Optional.empty(), ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + trailingWeekDateRange, + dailyInterval, + Optional.empty(), + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), Optional.empty()); - charts.add(TimeSeriesChart.builder() - .setTitle(searchesTitle) - .setDateRange(trailingWeekDateRange) - .setInterval(dailyInterval) - .setLines(searchesTimeseries) - .build()); + charts.add( + TimeSeriesChart.builder() + .setTitle(searchesTitle) + .setDateRange(trailingWeekDateRange) + .setInterval(dailyInterval) + .setLines(searchesTimeseries) + .build()); final String topSearchTitle = "Top Search Queries"; final List<String> columns = ImmutableList.of("Query", "Count"); final List<Row> topSearchQueries = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "query.keyword", ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), - Optional.empty(), 10, AnalyticsUtil::buildCellWithSearchLandingPage); - charts.add(TableChart.builder().setTitle(topSearchTitle).setColumns(columns).setRows(topSearchQueries).build()); + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "query.keyword", + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), + Optional.empty(), + 10, + AnalyticsUtil::buildCellWithSearchLandingPage); + charts.add( + TableChart.builder() + .setTitle(topSearchTitle) + .setColumns(columns) + .setRows(topSearchQueries) + .build()); final String sectionViewsTitle = "Section Views across Entity Types"; final List<NamedBar> sectionViewsPerEntityType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "section.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), Collections.emptyMap(), - Optional.empty(), true); - charts.add(BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); + ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), + Collections.emptyMap(), + Optional.empty(), + true); + charts.add( + BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); final String actionsByTypeTitle = "Actions by Entity Type"; final List<NamedBar> eventsByEventType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "actionType.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), Collections.emptyMap(), Optional.empty(), + ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), + Collections.emptyMap(), + Optional.empty(), true); charts.add(BarChart.builder().setTitle(actionsByTypeTitle).setBars(eventsByEventType).build()); @@ -149,61 +177,128 @@ private List<AnalyticsChart> getProductAnalyticsCharts(Authentication authentica final List<String> columns5 = ImmutableList.of("Dataset", "#Views"); final List<Row> topViewedDatasets = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "entityUrn.keyword", ImmutableMap.of("type", ImmutableList.of("EntityViewEvent"), "entityType.keyword", - ImmutableList.of(EntityType.DATASET.name())), Collections.emptyMap(), Optional.empty(), 10, + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "entityUrn.keyword", + ImmutableMap.of( + "type", + ImmutableList.of("EntityViewEvent"), + "entityType.keyword", + ImmutableList.of(EntityType.DATASET.name())), + Collections.emptyMap(), + Optional.empty(), + 10, AnalyticsUtil::buildCellWithEntityLandingPage); - AnalyticsUtil.hydrateDisplayNameForTable(_entityClient, topViewedDatasets, Constants.DATASET_ENTITY_NAME, - ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), AnalyticsUtil::getDatasetName, authentication); - charts.add(TableChart.builder().setTitle(topViewedTitle).setColumns(columns5).setRows(topViewedDatasets).build()); + AnalyticsUtil.hydrateDisplayNameForTable( + _entityClient, + topViewedDatasets, + Constants.DATASET_ENTITY_NAME, + ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), + AnalyticsUtil::getDatasetName, + authentication); + charts.add( + TableChart.builder() + .setTitle(topViewedTitle) + .setColumns(columns5) + .setRows(topViewedDatasets) + .build()); return charts; } - private List<AnalyticsChart> getGlobalMetadataAnalyticsCharts(Authentication authentication) throws Exception { + private List<AnalyticsChart> getGlobalMetadataAnalyticsCharts(Authentication authentication) + throws Exception { final List<AnalyticsChart> charts = new ArrayList<>(); // Chart 1: Entities per domain final List<NamedBar> entitiesPerDomain = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("domains.keyword", "platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerDomain, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); - AnalyticsUtil.hydrateDisplayNameForSegments(_entityClient, entitiesPerDomain, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("domains.keyword", "platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerDomain, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName, + authentication); + AnalyticsUtil.hydrateDisplayNameForSegments( + _entityClient, + entitiesPerDomain, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); if (!entitiesPerDomain.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); + charts.add( + BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); } // Chart 2: Entities per platform final List<NamedBar> entitiesPerPlatform = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerPlatform, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerPlatform, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); if (!entitiesPerPlatform.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Platform").setBars(entitiesPerPlatform).build()); + charts.add( + BarChart.builder() + .setTitle("Entities per Platform") + .setBars(entitiesPerPlatform) + .build()); } // Chart 3: Entities per term final List<NamedBar> entitiesPerTerm = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("glossaryTerms.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerTerm, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("glossaryTerms.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerTerm, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName, + authentication); if (!entitiesPerTerm.isEmpty()) { charts.add(BarChart.builder().setTitle("Entities per Term").setBars(entitiesPerTerm).build()); } // Chart 4: Entities per fabric type final List<NamedBar> entitiesPerEnv = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("origin.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("origin.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); if (entitiesPerEnv.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); + charts.add( + BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); } return charts; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java index c631a13b0bcb6..7000ab7adff5d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java @@ -14,15 +14,11 @@ import java.util.Map; import java.util.Optional; import java.util.function.Function; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetHighlightsResolver implements DataFetcher<List<Highlight>> { @@ -40,69 +36,72 @@ public final List<Highlight> get(DataFetchingEnvironment environment) throws Exc } private Highlight getTimeBasedHighlight( - final String title, - final String changeString, - final DateTime endDateTime, - final Function<DateTime, DateTime> periodStartFunc - ) { + final String title, + final String changeString, + final DateTime endDateTime, + final Function<DateTime, DateTime> periodStartFunc) { DateTime startDate = periodStartFunc.apply(endDateTime); DateTime timeBeforeThat = periodStartFunc.apply(startDate); - DateRange dateRangeThis = new DateRange( - String.valueOf(startDate.getMillis()), - String.valueOf(endDateTime.getMillis()) - ); - DateRange dateRangeLast = new DateRange( - String.valueOf(timeBeforeThat.getMillis()), - String.valueOf(startDate.getMillis()) - ); - - int activeUsersThisRange = _analyticsService.getHighlights( + DateRange dateRangeThis = + new DateRange( + String.valueOf(startDate.getMillis()), String.valueOf(endDateTime.getMillis())); + DateRange dateRangeLast = + new DateRange( + String.valueOf(timeBeforeThat.getMillis()), String.valueOf(startDate.getMillis())); + + int activeUsersThisRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeThis), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); - int activeUsersLastRange = _analyticsService.getHighlights( + Optional.of("browserId")); + int activeUsersLastRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeLast), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); + Optional.of("browserId")); String bodyText = ""; if (activeUsersLastRange > 0) { - double percentChange = (double) (activeUsersThisRange - activeUsersLastRange) - / (double) activeUsersLastRange * 100; + double percentChange = + (double) (activeUsersThisRange - activeUsersLastRange) + / (double) activeUsersLastRange + * 100; String directionChange = percentChange > 0 ? "increase" : "decrease"; - bodyText = Double.isInfinite(percentChange) ? "" + bodyText = + Double.isInfinite(percentChange) + ? "" : String.format(changeString, percentChange, directionChange); } - return Highlight.builder().setTitle(title).setValue(activeUsersThisRange).setBody(bodyText).build(); + return Highlight.builder() + .setTitle(title) + .setValue(activeUsersThisRange) + .setBody(bodyText) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ + /** TODO: Config Driven Charts Instead of Hardcoded. */ private List<Highlight> getHighlights() { final List<Highlight> highlights = new ArrayList<>(); DateTime endDate = DateTime.now(); - highlights.add(getTimeBasedHighlight( + highlights.add( + getTimeBasedHighlight( "Weekly Active Users", "%.2f%% %s from last week", endDate, - (date) -> date.minusWeeks(1) - )); - highlights.add(getTimeBasedHighlight( + (date) -> date.minusWeeks(1))); + highlights.add( + getTimeBasedHighlight( "Monthly Active Users", "%.2f%% %s from last month", endDate, - (date) -> date.minusMonths(1) - )); + (date) -> date.minusMonths(1))); // Entity metdata statistics getEntityMetadataStats("Datasets", EntityType.DATASET).ifPresent(highlights::add); @@ -121,10 +120,13 @@ private Optional<Highlight> getEntityMetadataStats(String title, EntityType enti if (numEntities == 0) { return Optional.empty(); } - int numEntitiesWithOwners = getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); - int numEntitiesWithTags = getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); + int numEntitiesWithOwners = + getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); + int numEntitiesWithTags = + getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); int numEntitiesWithGlossaryTerms = - getNumEntitiesFiltered(index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); + getNumEntitiesFiltered( + index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); int numEntitiesWithDescription = getNumEntitiesFiltered(index, ImmutableMap.of("hasDescription", ImmutableList.of("true"))); @@ -137,22 +139,36 @@ private Optional<Highlight> getEntityMetadataStats(String title, EntityType enti if (entityType == EntityType.DOMAIN) { // Don't show percent with domain when asking for stats regarding domains bodyText = - String.format("%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription); + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription); } else { int numEntitiesWithDomains = getNumEntitiesFiltered(index, ImmutableMap.of("hasDomain", ImmutableList.of("true"))); double percentWithDomains = 100.0 * numEntitiesWithDomains / numEntities; - bodyText = String.format( - "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription, percentWithDomains); + bodyText = + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription, + percentWithDomains); } } - return Optional.of(Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); + return Optional.of( + Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); } private int getNumEntitiesFiltered(String index, Map<String, List<String>> filters) { - return _analyticsService.getHighlights(index, Optional.empty(), filters, - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty()); + return _analyticsService.getHighlights( + index, + Optional.empty(), + filters, + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index f61c2eb77739b..31a8359f8f0e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.analytics.resolver; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -30,12 +32,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetMetadataAnalyticsResolver implements DataFetcher<List<AnalyticsChartGroup>> { @@ -45,7 +42,8 @@ public final class GetMetadataAnalyticsResolver implements DataFetcher<List<Anal @Override public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) throws Exception { final Authentication authentication = ResolverUtils.getAuthentication(environment); - final MetadataAnalyticsInput input = bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); + final MetadataAnalyticsInput input = + bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); try { final AnalyticsChartGroup group = new AnalyticsChartGroup(); @@ -59,7 +57,8 @@ public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) } } - private List<AnalyticsChart> getCharts(MetadataAnalyticsInput input, Authentication authentication) throws Exception { + private List<AnalyticsChart> getCharts( + MetadataAnalyticsInput input, Authentication authentication) throws Exception { final List<AnalyticsChart> charts = new ArrayList<>(); List<String> entities = Collections.emptyList(); @@ -77,48 +76,76 @@ private List<AnalyticsChart> getCharts(MetadataAnalyticsInput input, Authenticat filter = QueryUtils.newFilter("domains.keyword", input.getDomain()); } - SearchResult searchResult = _entityClient.searchAcrossEntities(entities, query, filter, 0, 0, - null, null, authentication); + SearchResult searchResult = + _entityClient.searchAcrossEntities( + entities, query, filter, 0, 0, null, null, authentication); - List<AggregationMetadata> aggregationMetadataList = searchResult.getMetadata().getAggregations(); + List<AggregationMetadata> aggregationMetadataList = + searchResult.getMetadata().getAggregations(); Optional<AggregationMetadata> domainAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("domains")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("domains")) + .findFirst(); if (StringUtils.isEmpty(input.getDomain()) && domainAggregation.isPresent()) { List<NamedBar> domainChart = buildBarChart(domainAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, domainChart, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + domainChart, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName, + authentication); charts.add(BarChart.builder().setTitle("Entities by Domain").setBars(domainChart).build()); } Optional<AggregationMetadata> platformAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("platform")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("platform")) + .findFirst(); if (platformAggregation.isPresent()) { List<NamedBar> platformChart = buildBarChart(platformAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, platformChart, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); - charts.add(BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + platformChart, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); + charts.add( + BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); } Optional<AggregationMetadata> termAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("glossaryTerms")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("glossaryTerms")) + .findFirst(); if (termAggregation.isPresent()) { List<NamedBar> termChart = buildBarChart(termAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, termChart, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + termChart, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName, + authentication); charts.add(BarChart.builder().setTitle("Entities by Term").setBars(termChart).build()); } Optional<AggregationMetadata> envAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("origin")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("origin")) + .findFirst(); if (envAggregation.isPresent()) { List<NamedBar> termChart = buildBarChart(envAggregation.get()); if (termChart.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); + charts.add( + BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); } } @@ -126,16 +153,20 @@ private List<AnalyticsChart> getCharts(MetadataAnalyticsInput input, Authenticat } private List<NamedBar> buildBarChart(AggregationMetadata aggregation) { - return aggregation.getAggregations() - .entrySet() - .stream() + return aggregation.getAggregations().entrySet().stream() .sorted(Collections.reverseOrder(Map.Entry.comparingByValue())) .limit(10) - .map(entry -> NamedBar.builder() - .setName(entry.getKey()) - .setSegments(ImmutableList.of( - BarSegment.builder().setLabel("#Entities").setValue(entry.getValue().intValue()).build())) - .build()) + .map( + entry -> + NamedBar.builder() + .setName(entry.getKey()) + .setSegments( + ImmutableList.of( + BarSegment.builder() + .setLabel("#Entities") + .setValue(entry.getValue().intValue()) + .build())) + .build()) .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java index 8e3bffc9ccf08..c7f5c0bbc63eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java @@ -3,20 +3,17 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -/** - * Returns true if analytics feature flag is enabled, false otherwise. - */ +/** Returns true if analytics feature flag is enabled, false otherwise. */ public class IsAnalyticsEnabledResolver implements DataFetcher<Boolean> { private final Boolean _isAnalyticsEnabled; public IsAnalyticsEnabledResolver(final Boolean isAnalyticsEnabled) { - _isAnalyticsEnabled = isAnalyticsEnabled; + _isAnalyticsEnabled = isAnalyticsEnabled; } @Override public final Boolean get(DataFetchingEnvironment environment) throws Exception { - return _isAnalyticsEnabled; + return _isAnalyticsEnabled; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index 4135a7b0da148..03333bda05f61 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -40,7 +40,6 @@ import org.opensearch.search.aggregations.metrics.Cardinality; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class AnalyticsService { @@ -72,25 +71,35 @@ public String getUsageIndexName() { return _indexConvention.getIndexName(DATAHUB_USAGE_EVENT_INDEX); } - public List<NamedLine> getTimeseriesChart(String indexName, DateRange dateRange, DateInterval granularity, + public List<NamedLine> getTimeseriesChart( + String indexName, + DateRange dateRange, + DateInterval granularity, Optional<String> dimension, // Length 1 for now - Map<String, List<String>> filters, Map<String, List<String>> mustNotFilters, Optional<String> uniqueOn) { + Map<String, List<String>> filters, + Map<String, List<String>> mustNotFilters, + Optional<String> uniqueOn) { log.debug( - String.format("Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", - indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + String.format("filters: %s, uniqueOn: %s", filters, - uniqueOn)); - - AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); - - AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(DATE_HISTOGRAM) - .field("timestamp") - .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); + String.format( + "Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", + indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + + AggregationBuilder filteredAgg = + getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); + + AggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram(DATE_HISTOGRAM) + .field("timestamp") + .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); uniqueOn.ifPresent(s -> dateHistogram.subAggregation(getUniqueQuery(s))); if (dimension.isPresent()) { filteredAgg.subAggregation( - AggregationBuilders.terms(DIMENSION).field(dimension.get()).subAggregation(dateHistogram)); + AggregationBuilders.terms(DIMENSION) + .field(dimension.get()) + .subAggregation(dateHistogram)); } else { filteredAgg.subAggregation(dateHistogram); } @@ -99,39 +108,55 @@ public List<NamedLine> getTimeseriesChart(String indexName, DateRange dateRange, Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { if (dimension.isPresent()) { - return aggregationResult.<Terms>get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedLine(bucket.getKeyAsString(), - extractPointsFromAggregations(bucket.getAggregations(), uniqueOn.isPresent()))) + return aggregationResult.<Terms>get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedLine( + bucket.getKeyAsString(), + extractPointsFromAggregations( + bucket.getAggregations(), uniqueOn.isPresent()))) .collect(Collectors.toList()); } else { return ImmutableList.of( - new NamedLine("total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); + new NamedLine( + "total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); } } catch (Exception e) { - log.error(String.format("Caught exception while getting time series chart: %s", e.getMessage())); + log.error( + String.format("Caught exception while getting time series chart: %s", e.getMessage())); return ImmutableList.of(); } } private int extractCount(MultiBucketsAggregation.Bucket bucket, boolean didUnique) { - return didUnique ? (int) bucket.getAggregations().<Cardinality>get(UNIQUE).getValue() : (int) bucket.getDocCount(); + return didUnique + ? (int) bucket.getAggregations().<Cardinality>get(UNIQUE).getValue() + : (int) bucket.getDocCount(); } - private List<NumericDataPoint> extractPointsFromAggregations(Aggregations aggregations, boolean didUnique) { - return aggregations.<Histogram>get(DATE_HISTOGRAM).getBuckets() - .stream() - .map(bucket -> new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) + private List<NumericDataPoint> extractPointsFromAggregations( + Aggregations aggregations, boolean didUnique) { + return aggregations.<Histogram>get(DATE_HISTOGRAM).getBuckets().stream() + .map( + bucket -> + new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } - public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRange, List<String> dimensions, + public List<NamedBar> getBarChart( + String indexName, + Optional<DateRange> dateRange, + List<String> dimensions, // Length 1 or 2 - Map<String, List<String>> filters, Map<String, List<String>> mustNotFilters, Optional<String> uniqueOn, + Map<String, List<String>> filters, + Map<String, List<String>> mustNotFilters, + Optional<String> uniqueOn, boolean showMissing) { log.debug( - String.format("Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", indexName, dateRange, - dimensions) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", + indexName, dateRange, dimensions) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); assert (dimensions.size() == 1 || dimensions.size() == 2); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); @@ -142,7 +167,8 @@ public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRang } if (dimensions.size() == 2) { - TermsAggregationBuilder secondTermAgg = AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); + TermsAggregationBuilder secondTermAgg = + AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); if (showMissing) { secondTermAgg.missing(NA); } @@ -161,14 +187,24 @@ public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRang List<BarSegment> barSegments = extractBarSegmentsFromAggregations(aggregationResult, DIMENSION, uniqueOn.isPresent()); return barSegments.stream() - .map(segment -> new NamedBar(segment.getLabel(), - ImmutableList.of(BarSegment.builder().setLabel("Count").setValue(segment.getValue()).build()))) + .map( + segment -> + new NamedBar( + segment.getLabel(), + ImmutableList.of( + BarSegment.builder() + .setLabel("Count") + .setValue(segment.getValue()) + .build()))) .collect(Collectors.toList()); } else { - return aggregationResult.<Terms>get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedBar(bucket.getKeyAsString(), - extractBarSegmentsFromAggregations(bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) + return aggregationResult.<Terms>get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedBar( + bucket.getKeyAsString(), + extractBarSegmentsFromAggregations( + bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) .collect(Collectors.toList()); } } catch (Exception e) { @@ -177,31 +213,41 @@ public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRang } } - private List<BarSegment> extractBarSegmentsFromAggregations(Aggregations aggregations, String aggregationKey, - boolean didUnique) { - return aggregations.<Terms>get(aggregationKey).getBuckets() - .stream() + private List<BarSegment> extractBarSegmentsFromAggregations( + Aggregations aggregations, String aggregationKey, boolean didUnique) { + return aggregations.<Terms>get(aggregationKey).getBuckets().stream() .map(bucket -> new BarSegment(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } public Row buildRow(String groupByValue, Function<String, Cell> groupByValueToCell, int count) { List<String> values = ImmutableList.of(groupByValue, String.valueOf(count)); - List<Cell> cells = ImmutableList.of(groupByValueToCell.apply(groupByValue), - Cell.builder().setValue(String.valueOf(count)).build()); + List<Cell> cells = + ImmutableList.of( + groupByValueToCell.apply(groupByValue), + Cell.builder().setValue(String.valueOf(count)).build()); return new Row(values, cells); } - public List<Row> getTopNTableChart(String indexName, Optional<DateRange> dateRange, String groupBy, - Map<String, List<String>> filters, Map<String, List<String>> mustNotFilters, Optional<String> uniqueOn, - int maxRows, Function<String, Cell> groupByValueToCell) { + public List<Row> getTopNTableChart( + String indexName, + Optional<DateRange> dateRange, + String groupBy, + Map<String, List<String>> filters, + Map<String, List<String>> mustNotFilters, + Optional<String> uniqueOn, + int maxRows, + Function<String, Cell> groupByValueToCell) { log.debug( - String.format("Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", indexName, dateRange, - groupBy) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", + indexName, dateRange, groupBy) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); - TermsAggregationBuilder termAgg = AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); + TermsAggregationBuilder termAgg = + AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); if (uniqueOn.isPresent()) { termAgg.order(BucketOrder.aggregation(UNIQUE, false)); termAgg.subAggregation(getUniqueQuery(uniqueOn.get())); @@ -212,10 +258,13 @@ public List<Row> getTopNTableChart(String indexName, Optional<DateRange> dateRan Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { - return aggregationResult.<Terms>get(DIMENSION).getBuckets() - .stream() - .map(bucket -> buildRow(bucket.getKeyAsString(), groupByValueToCell, - extractCount(bucket, uniqueOn.isPresent()))) + return aggregationResult.<Terms>get(DIMENSION).getBuckets().stream() + .map( + bucket -> + buildRow( + bucket.getKeyAsString(), + groupByValueToCell, + extractCount(bucket, uniqueOn.isPresent()))) .collect(Collectors.toList()); } catch (Exception e) { log.error(String.format("Caught exception while getting top n chart: %s", e.getMessage())); @@ -223,11 +272,16 @@ public List<Row> getTopNTableChart(String indexName, Optional<DateRange> dateRan } } - public int getHighlights(String indexName, Optional<DateRange> dateRange, Map<String, List<String>> filters, - Map<String, List<String>> mustNotFilters, Optional<String> uniqueOn) { + public int getHighlights( + String indexName, + Optional<DateRange> dateRange, + Map<String, List<String>> filters, + Map<String, List<String>> mustNotFilters, + Optional<String> uniqueOn) { log.debug( - String.format("Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + String.format( - "filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); uniqueOn.ifPresent(s -> filteredAgg.subAggregation(getUniqueQuery(s))); @@ -246,7 +300,8 @@ public int getHighlights(String indexName, Optional<DateRange> dateRange, Map<St } } - private SearchRequest constructSearchRequest(String indexName, AggregationBuilder aggregationBuilder) { + private SearchRequest constructSearchRequest( + String indexName, AggregationBuilder aggregationBuilder) { SearchRequest searchRequest = new SearchRequest(indexName); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); @@ -257,7 +312,8 @@ private SearchRequest constructSearchRequest(String indexName, AggregationBuilde private Filter executeAndExtract(SearchRequest searchRequest) { try { - final SearchResponse searchResponse = _elasticClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _elasticClient.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well return searchResponse.getAggregations().<Filter>get(FILTERED); } catch (Exception e) { @@ -266,11 +322,14 @@ private Filter executeAndExtract(SearchRequest searchRequest) { } } - private AggregationBuilder getFilteredAggregation(Map<String, List<String>> mustFilters, - Map<String, List<String>> mustNotFilters, Optional<DateRange> dateRange) { + private AggregationBuilder getFilteredAggregation( + Map<String, List<String>> mustFilters, + Map<String, List<String>> mustNotFilters, + Optional<DateRange> dateRange) { BoolQueryBuilder filteredQuery = QueryBuilders.boolQuery(); mustFilters.forEach((key, values) -> filteredQuery.must(QueryBuilders.termsQuery(key, values))); - mustNotFilters.forEach((key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); + mustNotFilters.forEach( + (key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); dateRange.ifPresent(range -> filteredQuery.must(dateRangeQuery(range))); return AggregationBuilders.filter(FILTERED, filteredQuery); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java index 42f4e25c010ef..be7f4d2f0897a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java @@ -31,16 +31,17 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; - @Slf4j public class AnalyticsUtil { - private AnalyticsUtil() { - } + private AnalyticsUtil() {} public static Cell buildCellWithSearchLandingPage(String query) { Cell result = new Cell(); result.setValue(query); - result.setLinkParams(LinkParams.builder().setSearchParams(SearchParams.builder().setQuery(query).build()).build()); + result.setLinkParams( + LinkParams.builder() + .setSearchParams(SearchParams.builder().setQuery(query).build()) + .build()); return result; } @@ -50,70 +51,138 @@ public static Cell buildCellWithEntityLandingPage(String urn) { try { Entity entity = UrnToEntityMapper.map(Urn.createFromString(urn)); result.setEntity(entity); - result.setLinkParams(LinkParams.builder() - .setEntityProfileParams(EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) - .build()); + result.setLinkParams( + LinkParams.builder() + .setEntityProfileParams( + EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) + .build()); } catch (URISyntaxException e) { log.error("Malformed urn {} in table", urn, e); } return result; } - public static void hydrateDisplayNameForBars(EntityClient entityClient, List<NamedBar> bars, String entityName, - Set<String> aspectNames, Function<EntityResponse, Optional<String>> extractDisplayName, - Authentication authentication) throws Exception { + public static void hydrateDisplayNameForBars( + EntityClient entityClient, + List<NamedBar> bars, + String entityName, + Set<String> aspectNames, + Function<EntityResponse, Optional<String>> extractDisplayName, + Authentication authentication) + throws Exception { Map<String, String> urnToDisplayName = - getUrnToDisplayName(entityClient, bars.stream().map(NamedBar::getName).collect(Collectors.toList()), entityName, - aspectNames, extractDisplayName, authentication); + getUrnToDisplayName( + entityClient, + bars.stream().map(NamedBar::getName).collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.setName(urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); + bars.forEach( + namedBar -> + namedBar.setName( + urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); } - public static void hydrateDisplayNameForSegments(EntityClient entityClient, List<NamedBar> bars, String entityName, - Set<String> aspectNames, Function<EntityResponse, Optional<String>> extractDisplayName, - Authentication authentication) throws Exception { - Map<String, String> urnToDisplayName = getUrnToDisplayName(entityClient, - bars.stream().flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)).collect(Collectors.toList()), - entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForSegments( + EntityClient entityClient, + List<NamedBar> bars, + String entityName, + Set<String> aspectNames, + Function<EntityResponse, Optional<String>> extractDisplayName, + Authentication authentication) + throws Exception { + Map<String, String> urnToDisplayName = + getUrnToDisplayName( + entityClient, + bars.stream() + .flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.getSegments() - .forEach(segment -> segment.setLabel(urnToDisplayName.getOrDefault(segment.getLabel(), segment.getLabel())))); + bars.forEach( + namedBar -> + namedBar + .getSegments() + .forEach( + segment -> + segment.setLabel( + urnToDisplayName.getOrDefault( + segment.getLabel(), segment.getLabel())))); } - public static void hydrateDisplayNameForTable(EntityClient entityClient, List<Row> rows, String entityName, - Set<String> aspectNames, Function<EntityResponse, Optional<String>> extractDisplayName, - Authentication authentication) throws Exception { - Map<String, String> urnToDisplayName = getUrnToDisplayName(entityClient, rows.stream() - .flatMap(row -> row.getCells().stream().filter(cell -> cell.getEntity() != null).map(Cell::getValue)) - .collect(Collectors.toList()), entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForTable( + EntityClient entityClient, + List<Row> rows, + String entityName, + Set<String> aspectNames, + Function<EntityResponse, Optional<String>> extractDisplayName, + Authentication authentication) + throws Exception { + Map<String, String> urnToDisplayName = + getUrnToDisplayName( + entityClient, + rows.stream() + .flatMap( + row -> + row.getCells().stream() + .filter(cell -> cell.getEntity() != null) + .map(Cell::getValue)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - rows.forEach(row -> row.getCells().forEach(cell -> { - if (cell.getEntity() != null) { - cell.setValue(urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); - } - })); + rows.forEach( + row -> + row.getCells() + .forEach( + cell -> { + if (cell.getEntity() != null) { + cell.setValue( + urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); + } + })); } - public static Map<String, String> getUrnToDisplayName(EntityClient entityClient, List<String> urns, String entityName, - Set<String> aspectNames, Function<EntityResponse, Optional<String>> extractDisplayName, - Authentication authentication) throws Exception { - Set<Urn> uniqueUrns = urns.stream().distinct().map(urnStr -> { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - return null; - } - }).filter(Objects::nonNull).collect(Collectors.toSet()); - Map<Urn, EntityResponse> aspects = entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); - return aspects.entrySet() - .stream() - .map(entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) + public static Map<String, String> getUrnToDisplayName( + EntityClient entityClient, + List<String> urns, + String entityName, + Set<String> aspectNames, + Function<EntityResponse, Optional<String>> extractDisplayName, + Authentication authentication) + throws Exception { + Set<Urn> uniqueUrns = + urns.stream() + .distinct() + .map( + urnStr -> { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + Map<Urn, EntityResponse> aspects = + entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); + return aspects.entrySet().stream() + .map( + entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) .filter(pair -> pair.getValue().isPresent()) .collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue().get())); } public static Optional<String> getDomainName(EntityResponse entityResponse) { - EnvelopedAspect domainProperties = entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + EnvelopedAspect domainProperties = + entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (domainProperties == null) { return Optional.empty(); } @@ -126,13 +195,17 @@ public static Optional<String> getPlatformName(EntityResponse entityResponse) { if (envelopedDataPlatformInfo == null) { return Optional.empty(); } - DataPlatformInfo dataPlatformInfo = new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); + DataPlatformInfo dataPlatformInfo = + new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); return Optional.of( - dataPlatformInfo.getDisplayName() == null ? dataPlatformInfo.getName() : dataPlatformInfo.getDisplayName()); + dataPlatformInfo.getDisplayName() == null + ? dataPlatformInfo.getName() + : dataPlatformInfo.getDisplayName()); } public static Optional<String> getDatasetName(EntityResponse entityResponse) { - EnvelopedAspect envelopedDatasetKey = entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); + EnvelopedAspect envelopedDatasetKey = + entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); if (envelopedDatasetKey == null) { return Optional.empty(); } @@ -141,7 +214,8 @@ public static Optional<String> getDatasetName(EntityResponse entityResponse) { } public static Optional<String> getTermName(EntityResponse entityResponse) { - EnvelopedAspect envelopedTermInfo = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); + EnvelopedAspect envelopedTermInfo = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); if (envelopedTermInfo != null) { GlossaryTermInfo glossaryTermInfo = new GlossaryTermInfo(envelopedTermInfo.getValue().data()); if (glossaryTermInfo.hasName()) { @@ -150,11 +224,13 @@ public static Optional<String> getTermName(EntityResponse entityResponse) { } // if name is not set on GlossaryTermInfo or there is no GlossaryTermInfo - EnvelopedAspect envelopedGlossaryTermKey = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); + EnvelopedAspect envelopedGlossaryTermKey = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); if (envelopedGlossaryTermKey == null) { return Optional.empty(); } - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); + GlossaryTermKey glossaryTermKey = + new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); return Optional.of(glossaryTermKey.getName()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index 03e63c7fb472f..6ba3777d476cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -1,36 +1,37 @@ package com.linkedin.datahub.graphql.authorization; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.AuthUtil; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; - import java.time.Clock; import java.util.List; import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class AuthorizationUtils { private static final Clock CLOCK = Clock.systemUTC(); public static AuditStamp createAuditStamp(@Nonnull QueryContext context) { - return new AuditStamp().setTime(CLOCK.millis()).setActor(UrnUtils.getUrn(context.getActorUrn())); + return new AuditStamp() + .setTime(CLOCK.millis()) + .setActor(UrnUtils.getUrn(context.getActorUrn())); } public static boolean canManageUsersAndGroups(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); } public static boolean canManagePolicies(@Nonnull QueryContext context) { @@ -38,7 +39,8 @@ public static boolean canManagePolicies(@Nonnull QueryContext context) { } public static boolean canGeneratePersonalAccessToken(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } public static boolean canManageTokens(@Nonnull QueryContext context) { @@ -46,21 +48,20 @@ public static boolean canManageTokens(@Nonnull QueryContext context) { } /** - * Returns true if the current used is able to create Domains. This is true if the user has the 'Manage Domains' or 'Create Domains' platform privilege. + * Returns true if the current used is able to create Domains. This is true if the user has the + * 'Manage Domains' or 'Create Domains' platform privilege. */ public static boolean canCreateDomains(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageDomains(@Nonnull QueryContext context) { @@ -68,21 +69,20 @@ public static boolean canManageDomains(@Nonnull QueryContext context) { } /** - * Returns true if the current used is able to create Tags. This is true if the user has the 'Manage Tags' or 'Create Tags' platform privilege. + * Returns true if the current used is able to create Tags. This is true if the user has the + * 'Manage Tags' or 'Create Tags' platform privilege. */ public static boolean canCreateTags(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageTags(@Nonnull QueryContext context) { @@ -90,48 +90,59 @@ public static boolean canManageTags(@Nonnull QueryContext context) { } public static boolean canDeleteEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { - return isAuthorized(context, Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), PoliciesConfig.DELETE_ENTITY_PRIVILEGE); + return isAuthorized( + context, + Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), + PoliciesConfig.DELETE_ENTITY_PRIVILEGE); } public static boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } - public static boolean canEditGroupMembers(@Nonnull String groupUrnStr, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); + public static boolean canEditGroupMembers( + @Nonnull String groupUrnStr, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized(context.getAuthorizer(), context.getActorUrn(), CORP_GROUP_ENTITY_NAME, - groupUrnStr, orPrivilegeGroups); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + CORP_GROUP_ENTITY_NAME, + groupUrnStr, + orPrivilegeGroups); } public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageGlobalViews(@Nonnull QueryContext context) { @@ -142,31 +153,39 @@ public static boolean canManageOwnershipTypes(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); } - public static boolean canEditEntityQueries(@Nonnull List<Urn> entityUrns, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); - return entityUrns.stream().allMatch(entityUrn -> - isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - entityUrn.getEntityType(), - entityUrn.toString(), - orPrivilegeGroups - )); - } - - public static boolean canCreateQuery(@Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { + public static boolean canEditEntityQueries( + @Nonnull List<Urn> entityUrns, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); + return entityUrns.stream() + .allMatch( + entityUrn -> + isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + entityUrn.getEntityType(), + entityUrn.toString(), + orPrivilegeGroups)); + } + + public static boolean canCreateQuery( + @Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to create a query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canUpdateQuery(@Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { + public static boolean canUpdateQuery( + @Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to update any query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canDeleteQuery(@Nonnull Urn entityUrn, @Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { + public static boolean canDeleteQuery( + @Nonnull Urn entityUrn, @Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to remove any query. return canEditEntityQueries(subjectUrns, context); } @@ -177,15 +196,16 @@ public static boolean isAuthorized( @Nonnull PoliciesConfig.Privilege privilege) { final Authorizer authorizer = context.getAuthorizer(); final String actor = context.getActorUrn(); - final ConjunctivePrivilegeGroup andGroup = new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); - return AuthUtil.isAuthorized(authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); + final ConjunctivePrivilegeGroup andGroup = + new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); + return AuthUtil.isAuthorized( + authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); } public static boolean isAuthorized( @Nonnull Authorizer authorizer, @Nonnull String actor, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { return AuthUtil.isAuthorized(authorizer, actor, Optional.empty(), privilegeGroup); } @@ -194,13 +214,10 @@ public static boolean isAuthorized( @Nonnull String actor, @Nonnull String resourceType, @Nonnull String resource, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { final EntitySpec resourceSpec = new EntitySpec(resourceType, resource); return AuthUtil.isAuthorized(authorizer, actor, Optional.of(resourceSpec), privilegeGroup); } - private AuthorizationUtils() { } - + private AuthorizationUtils() {} } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java index a09dc8741cd29..69e0ed0625b2f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthenticationException extends GraphQLException { - public AuthenticationException(String message) { - super(message); - } + public AuthenticationException(String message) { + super(message); + } - public AuthenticationException(String message, Throwable cause) { - super(message, cause); - } + public AuthenticationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java index 803af09e079d1..30568e45938c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.exception; - -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthorizationException extends DataHubGraphQLException { public AuthorizationException(String message) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java index 8d3f5d5cea9eb..7c3ea1d581b6e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java @@ -13,7 +13,8 @@ public class DataHubDataFetcherExceptionHandler implements DataFetcherExceptionHandler { @Override - public DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { + public DataFetcherExceptionHandlerResult onException( + DataFetcherExceptionHandlerParameters handlerParameters) { Throwable exception = handlerParameters.getException(); SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java index 15c539a608cc0..f007a8b7c7adb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.exception; +import static graphql.Assert.*; + import graphql.ErrorType; import graphql.GraphQLError; import graphql.GraphqlErrorHelper; @@ -11,9 +13,6 @@ import java.util.List; import java.util.Map; -import static graphql.Assert.*; - - @PublicApi public class DataHubGraphQLError implements GraphQLError { @@ -23,7 +22,11 @@ public class DataHubGraphQLError implements GraphQLError { private final List<SourceLocation> locations; private final Map<String, Object> extensions; - public DataHubGraphQLError(String message, ResultPath path, SourceLocation sourceLocation, DataHubGraphQLErrorCode errorCode) { + public DataHubGraphQLError( + String message, + ResultPath path, + SourceLocation sourceLocation, + DataHubGraphQLErrorCode errorCode) { this.path = assertNotNull(path).toList(); this.errorCode = assertNotNull(errorCode); this.locations = Collections.singletonList(sourceLocation); @@ -90,4 +93,3 @@ public int hashCode() { return GraphqlErrorHelper.hashCode(this); } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java index 3d3c54e2febb2..75096a8c4148e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java @@ -2,7 +2,6 @@ import graphql.GraphQLException; - public class DataHubGraphQLException extends GraphQLException { private final DataHubGraphQLErrorCode code; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java index 2ee9838af5428..87a1aebb02f2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when an unexpected value is provided by the client. - */ +/** Exception thrown when an unexpected value is provided by the client. */ public class ValidationException extends GraphQLException { - public ValidationException(String message) { - super(message); - } + public ValidationException(String message) { + super(message); + } - public ValidationException(String message, Throwable cause) { - super(message, cause); - } + public ValidationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 4d6133f18df05..07bd1fba5d8a8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -3,7 +3,6 @@ import com.linkedin.metadata.config.PreProcessHooks; import lombok.Data; - @Data public class FeatureFlags { private boolean showSimplifiedHomepageByDefault = false; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java index e228cb8445c02..9faf00e0211bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java @@ -1,26 +1,25 @@ package com.linkedin.datahub.graphql.resolvers; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthorizationRequest; +import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import java.util.List; import java.util.Optional; -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; public class AuthUtils { - public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static boolean isAuthorized( - String principal, - List<String> privilegeGroup, - Authorizer authorizer) { + String principal, List<String> privilegeGroup, Authorizer authorizer) { for (final String privilege : privilegeGroup) { - final AuthorizationRequest request = new AuthorizationRequest(principal, privilege, Optional.empty()); + final AuthorizationRequest request = + new AuthorizationRequest(principal, privilege, Optional.empty()); final AuthorizationResult result = authorizer.authorize(request); if (AuthorizationResult.Type.DENY.equals(result.getType())) { return false; @@ -29,6 +28,5 @@ public static boolean isAuthorized( return true; } - - private AuthUtils() { } + private AuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java index 2520b55c24e25..570ea322be7a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java @@ -2,29 +2,28 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthenticationException; - import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - /** - * Checks whether the user is currently authenticated & if so delegates execution to a child resolver. + * Checks whether the user is currently authenticated & if so delegates execution to a child + * resolver. */ @Deprecated public final class AuthenticatedResolver<T> implements DataFetcher<T> { - private final DataFetcher<T> _resolver; + private final DataFetcher<T> _resolver; - public AuthenticatedResolver(final DataFetcher<T> resolver) { - _resolver = resolver; - } + public AuthenticatedResolver(final DataFetcher<T> resolver) { + _resolver = resolver; + } - @Override - public final T get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); - if (context.isAuthenticated()) { - return _resolver.get(environment); - } - throw new AuthenticationException("Failed to authenticate the current user."); + @Override + public final T get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + if (context.isAuthenticated()) { + return _resolver.get(environment); } + throw new AuthenticationException("Failed to authenticate the current user."); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java index 930c98ee7113a..5ab07701c15a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java @@ -2,18 +2,17 @@ import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; -import org.dataloader.DataLoader; -import org.dataloader.DataLoaderRegistry; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; public class BatchLoadUtils { - private BatchLoadUtils() { } + private BatchLoadUtils() {} public static CompletableFuture<List<Entity>> batchLoadEntitiesOfSameType( List<Entity> entities, @@ -24,9 +23,10 @@ public static CompletableFuture<List<Entity>> batchLoadEntitiesOfSameType( } // Assume all entities are of the same type final com.linkedin.datahub.graphql.types.EntityType filteredEntity = - Iterables.getOnlyElement(entityTypes.stream() - .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) - .collect(Collectors.toList())); + Iterables.getOnlyElement( + entityTypes.stream() + .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) + .collect(Collectors.toList())); final DataLoader loader = dataLoaderRegistry.getDataLoader(filteredEntity.name()); List keyList = new ArrayList(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java index b0f23e63177e6..aba781f9e1dc7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java @@ -7,9 +7,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** - * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service Storage Entities + * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service + * Storage Entities */ public class EntityTypeMapper { @@ -44,10 +44,10 @@ public class EntityTypeMapper { .build(); private static final Map<String, EntityType> ENTITY_NAME_TO_TYPE = - ENTITY_TYPE_TO_NAME.entrySet().stream().collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); + ENTITY_TYPE_TO_NAME.entrySet().stream() + .collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); - private EntityTypeMapper() { - } + private EntityTypeMapper() {} public static EntityType getType(String name) { String lowercaseName = name.toLowerCase(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index 02921b453e315..b480e287adb9b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -23,17 +26,12 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * GraphQL resolver responsible for resolving information about the currently - * logged in User, including - * - * 1. User profile information - * 2. User privilege information, i.e. which features to display in the UI. + * GraphQL resolver responsible for resolving information about the currently logged in User, + * including * + * <p>1. User profile information 2. User privilege information, i.e. which features to display in + * the UI. */ public class MeResolver implements DataFetcher<CompletableFuture<AuthenticatedUser>> { @@ -48,114 +46,123 @@ public MeResolver(final EntityClient entityClient, final FeatureFlags featureFla @Override public CompletableFuture<AuthenticatedUser> get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - // 1. Get currently logged in user profile. - final Urn userUrn = Urn.createFromString(context.getActorUrn()); - final EntityResponse gmsUser = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - Collections.singleton(userUrn), null, context.getAuthentication()).get(userUrn); - final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); - - // 2. Get platform privileges - final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); - platformPrivileges.setViewAnalytics(canViewAnalytics(context)); - platformPrivileges.setManagePolicies(canManagePolicies(context)); - platformPrivileges.setManageIdentities(canManageUsersGroups(context)); - platformPrivileges.setGeneratePersonalAccessTokens(canGeneratePersonalAccessToken(context)); - platformPrivileges.setManageDomains(canManageDomains(context)); - platformPrivileges.setManageIngestion(canManageIngestion(context)); - platformPrivileges.setManageSecrets(canManageSecrets(context)); - platformPrivileges.setManageTokens(canManageTokens(context)); - platformPrivileges.setManageTests(canManageTests(context)); - platformPrivileges.setManageGlossaries(canManageGlossaries(context)); - platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); - platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); - platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); - platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); - platformPrivileges.setManageGlobalViews(AuthorizationUtils.canManageGlobalViews(context)); - platformPrivileges.setManageOwnershipTypes(AuthorizationUtils.canManageOwnershipTypes(context)); - platformPrivileges.setManageGlobalAnnouncements(AuthorizationUtils.canManageGlobalAnnouncements(context)); - - // Construct and return authenticated user object. - final AuthenticatedUser authUser = new AuthenticatedUser(); - authUser.setCorpUser(corpUser); - authUser.setPlatformPrivileges(platformPrivileges); - return authUser; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to fetch authenticated user!", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // 1. Get currently logged in user profile. + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + final EntityResponse gmsUser = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + null, + context.getAuthentication()) + .get(userUrn); + final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); + + // 2. Get platform privileges + final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); + platformPrivileges.setViewAnalytics(canViewAnalytics(context)); + platformPrivileges.setManagePolicies(canManagePolicies(context)); + platformPrivileges.setManageIdentities(canManageUsersGroups(context)); + platformPrivileges.setGeneratePersonalAccessTokens( + canGeneratePersonalAccessToken(context)); + platformPrivileges.setManageDomains(canManageDomains(context)); + platformPrivileges.setManageIngestion(canManageIngestion(context)); + platformPrivileges.setManageSecrets(canManageSecrets(context)); + platformPrivileges.setManageTokens(canManageTokens(context)); + platformPrivileges.setManageTests(canManageTests(context)); + platformPrivileges.setManageGlossaries(canManageGlossaries(context)); + platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); + platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); + platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); + platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); + platformPrivileges.setManageGlobalViews( + AuthorizationUtils.canManageGlobalViews(context)); + platformPrivileges.setManageOwnershipTypes( + AuthorizationUtils.canManageOwnershipTypes(context)); + platformPrivileges.setManageGlobalAnnouncements( + AuthorizationUtils.canManageGlobalAnnouncements(context)); + + // Construct and return authenticated user object. + final AuthenticatedUser authUser = new AuthenticatedUser(); + authUser.setCorpUser(corpUser); + authUser.setPlatformPrivileges(platformPrivileges); + return authUser; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to fetch authenticated user!", e); + } + }); } - /** - * Returns true if the authenticated user has privileges to view analytics. - */ + /** Returns true if the authenticated user has privileges to view analytics. */ private boolean canViewAnalytics(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage policies analytics. - */ + /** Returns true if the authenticated user has privileges to manage policies analytics. */ private boolean canManagePolicies(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage users & groups. - */ + /** Returns true if the authenticated user has privileges to manage users & groups. */ private boolean canManageUsersGroups(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to generate personal access tokens - */ + /** Returns true if the authenticated user has privileges to generate personal access tokens */ private boolean canGeneratePersonalAccessToken(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage (add or remove) tests. - */ + /** Returns true if the authenticated user has privileges to manage (add or remove) tests. */ private boolean canManageTests(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage domains - */ + /** Returns true if the authenticated user has privileges to manage domains */ private boolean canManageDomains(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage access tokens - */ + /** Returns true if the authenticated user has privileges to manage access tokens */ private boolean canManageTokens(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); } - /** - * Returns true if the authenticated user has privileges to manage glossaries - */ + /** Returns true if the authenticated user has privileges to manage glossaries */ private boolean canManageGlossaries(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage user credentials - */ + /** Returns true if the authenticated user has privileges to manage user credentials */ private boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } /** * Returns true if the provided actor is authorized for a particular privilege, false otherwise. */ - private boolean isAuthorized(final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); + private boolean isAuthorized( + final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { + final AuthorizationRequest request = + new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); final AuthorizationResult result = authorizer.authorize(request); return AuthorizationResult.Type.ALLOW.equals(result.getType()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java index 2c2e71ee92eaa..244012d320b43 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -30,184 +32,198 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.metadata.Constants.*; - - public class ResolverUtils { - private static final Set<String> KEYWORD_EXCLUDED_FILTERS = ImmutableSet.of( - "runId", - "_entityType" - ); - private static final ObjectMapper MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + private static final Set<String> KEYWORD_EXCLUDED_FILTERS = + ImmutableSet.of("runId", "_entityType"); + private static final ObjectMapper MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); + + private ResolverUtils() {} + + @Nonnull + public static <T> T bindArgument(Object argument, Class<T> clazz) { + return MAPPER.convertValue(argument, clazz); + } + + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } - - private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); - - private ResolverUtils() { } - - @Nonnull - public static <T> T bindArgument(Object argument, Class<T> clazz) { - return MAPPER.convertValue(argument, clazz); + return input; + } + + @Nonnull + public static Authentication getAuthentication(DataFetchingEnvironment environment) { + return ((QueryContext) environment.getContext()).getAuthentication(); + } + + /** + * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure + * that it is matched against a keyword filter in ElasticSearch. + * @param facetFilterInputs The list of facet filters inputs + * @param validFacetFields The set of valid fields against which to filter for. + * @return A map of filter definitions to be used in ElasticSearch. + */ + @Nonnull + public static Map<String, String> buildFacetFilters( + @Nullable List<FacetFilterInput> facetFilterInputs, @Nonnull Set<String> validFacetFields) { + if (facetFilterInputs == null) { + return Collections.emptyMap(); } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; - } - - @Nonnull - public static Authentication getAuthentication(DataFetchingEnvironment environment) { - return ((QueryContext) environment.getContext()).getAuthentication(); - } - - /** - * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure - * that it is matched against a keyword filter in ElasticSearch. - * - * @param facetFilterInputs The list of facet filters inputs - * @param validFacetFields The set of valid fields against which to filter for. - * @return A map of filter definitions to be used in ElasticSearch. - */ - @Nonnull - public static Map<String, String> buildFacetFilters(@Nullable List<FacetFilterInput> facetFilterInputs, - @Nonnull Set<String> validFacetFields) { - if (facetFilterInputs == null) { - return Collections.emptyMap(); - } - - final Map<String, String> facetFilters = new HashMap<>(); - - facetFilterInputs.forEach(facetFilterInput -> { - if (!validFacetFields.contains(facetFilterInput.getField())) { - throw new ValidationException(String.format("Unrecognized facet with name %s provided", facetFilterInput.getField())); - } - if (!facetFilterInput.getValues().isEmpty()) { - facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); - } + final Map<String, String> facetFilters = new HashMap<>(); + + facetFilterInputs.forEach( + facetFilterInput -> { + if (!validFacetFields.contains(facetFilterInput.getField())) { + throw new ValidationException( + String.format( + "Unrecognized facet with name %s provided", facetFilterInput.getField())); + } + if (!facetFilterInput.getValues().isEmpty()) { + facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); + } }); - return facetFilters; - } + return facetFilters; + } - public static List<Criterion> criterionListFromAndFilter(List<FacetFilterInput> andFilters) { - return andFilters != null && !andFilters.isEmpty() - ? andFilters.stream() + public static List<Criterion> criterionListFromAndFilter(List<FacetFilterInput> andFilters) { + return andFilters != null && !andFilters.isEmpty() + ? andFilters.stream() .map(filter -> criterionFromFilter(filter)) - .collect(Collectors.toList()) : Collections.emptyList(); + .collect(Collectors.toList()) + : Collections.emptyList(); + } + + // In the case that user sends filters to be or-d together, we need to build a series of + // conjunctive criterion + // arrays, rather than just one for the AND case. + public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( + @Nonnull List<AndFilterInput> orFilters) { + return new ConjunctiveCriterionArray( + orFilters.stream() + .map( + orFilter -> { + CriterionArray andCriterionForOr = + new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); + return new ConjunctiveCriterion().setAnd(andCriterionForOr); + }) + .collect(Collectors.toList())); + } + + @Nullable + public static Filter buildFilter( + @Nullable List<FacetFilterInput> andFilters, @Nullable List<AndFilterInput> orFilters) { + if ((andFilters == null || andFilters.isEmpty()) + && (orFilters == null || orFilters.isEmpty())) { + return null; + } + // Or filters are the new default. We will check them first. + // If we have OR filters, we need to build a series of CriterionArrays + if (orFilters != null && !orFilters.isEmpty()) { + return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); } - // In the case that user sends filters to be or-d together, we need to build a series of conjunctive criterion - // arrays, rather than just one for the AND case. - public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( - @Nonnull List<AndFilterInput> orFilters - ) { - return new ConjunctiveCriterionArray(orFilters.stream().map(orFilter -> { - CriterionArray andCriterionForOr = new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); - return new ConjunctiveCriterion().setAnd( - andCriterionForOr - ); - } - ).collect(Collectors.toList())); + // If or filters are not set, someone may be using the legacy and filters + final List<Criterion> andCriterions = criterionListFromAndFilter(andFilters); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + } + + public static Criterion criterionFromFilter(final FacetFilterInput filter) { + return criterionFromFilter(filter, false); + } + + // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) + public static Criterion criterionFromFilter( + final FacetFilterInput filter, final Boolean skipKeywordSuffix) { + Criterion result = new Criterion(); + + if (skipKeywordSuffix) { + result.setField(filter.getField()); + } else { + result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); } - @Nullable - public static Filter buildFilter(@Nullable List<FacetFilterInput> andFilters, @Nullable List<AndFilterInput> orFilters) { - if ((andFilters == null || andFilters.isEmpty()) && (orFilters == null || orFilters.isEmpty())) { - return null; - } - - // Or filters are the new default. We will check them first. - // If we have OR filters, we need to build a series of CriterionArrays - if (orFilters != null && !orFilters.isEmpty()) { - return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); - } - - // If or filters are not set, someone may be using the legacy and filters - final List<Criterion> andCriterions = criterionListFromAndFilter(andFilters); - return new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + // `value` is deprecated in place of `values`- this is to support old query patterns. If values + // is provided, + // this statement will be skipped + if (filter.getValues() == null && filter.getValue() != null) { + result.setValues(new StringArray(filter.getValue())); + result.setValue(filter.getValue()); + } else if (filter.getValues() != null) { + result.setValues(new StringArray(filter.getValues())); + if (!filter.getValues().isEmpty()) { + result.setValue(filter.getValues().get(0)); + } else { + result.setValue(""); + } + } else { + result.setValues(new StringArray()); + result.setValue(""); } - public static Criterion criterionFromFilter(final FacetFilterInput filter) { - return criterionFromFilter(filter, false); + if (filter.getCondition() != null) { + result.setCondition(Condition.valueOf(filter.getCondition().toString())); + } else { + result.setCondition(Condition.EQUAL); } - // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) - public static Criterion criterionFromFilter(final FacetFilterInput filter, final Boolean skipKeywordSuffix) { - Criterion result = new Criterion(); - - if (skipKeywordSuffix) { - result.setField(filter.getField()); - } else { - result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); - } - - // `value` is deprecated in place of `values`- this is to support old query patterns. If values is provided, - // this statement will be skipped - if (filter.getValues() == null && filter.getValue() != null) { - result.setValues(new StringArray(filter.getValue())); - result.setValue(filter.getValue()); - } else if (filter.getValues() != null) { - result.setValues(new StringArray(filter.getValues())); - if (!filter.getValues().isEmpty()) { - result.setValue(filter.getValues().get(0)); - } else { - result.setValue(""); - } - } else { - result.setValues(new StringArray()); - result.setValue(""); - } - - - if (filter.getCondition() != null) { - result.setCondition(Condition.valueOf(filter.getCondition().toString())); - } else { - result.setCondition(Condition.EQUAL); - } - - if (filter.getNegated() != null) { - result.setNegated(filter.getNegated()); - } - - return result; + if (filter.getNegated() != null) { + result.setNegated(filter.getNegated()); } - private static String getFilterField(final String originalField, final boolean skipKeywordSuffix) { - if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { - return originalField; - } - return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + return result; + } + + private static String getFilterField( + final String originalField, final boolean skipKeywordSuffix) { + if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { + return originalField; } + return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + } - public static Filter buildFilterWithUrns(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters) { - Criterion urnMatchCriterion = new Criterion().setField("urn") + public static Filter buildFilterWithUrns(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters) { + Criterion urnMatchCriterion = + new Criterion() + .setField("urn") .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); - if (inputFilters == null) { - return QueryUtils.newFilter(urnMatchCriterion); - } - - // Add urn match criterion to each or clause - if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { - for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { - conjunctiveCriterion.getAnd().add(urnMatchCriterion); - } - return inputFilters; - } - return QueryUtils.newFilter(urnMatchCriterion); + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + if (inputFilters == null) { + return QueryUtils.newFilter(urnMatchCriterion); + } + + // Add urn match criterion to each or clause + if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { + for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { + conjunctiveCriterion.getAnd().add(urnMatchCriterion); + } + return inputFilters; } + return QueryUtils.newFilter(urnMatchCriterion); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java index b5b13cc00b40d..2a074b950d0ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; @@ -26,13 +28,9 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * GraphQL Resolver used for fetching AssertionRunEvents. - */ -public class AssertionRunEventResolver implements DataFetcher<CompletableFuture<AssertionRunEventsResult>> { +/** GraphQL Resolver used for fetching AssertionRunEvents. */ +public class AssertionRunEventResolver + implements DataFetcher<CompletableFuture<AssertionRunEventsResult>> { private final EntityClient _client; @@ -42,58 +40,72 @@ public AssertionRunEventResolver(final EntityClient client) { @Override public CompletableFuture<AssertionRunEventsResult> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String urn = ((Assertion) environment.getSource()).getUrn(); - final String maybeStatus = environment.getArgumentOrDefault("status", null); - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; + final String urn = ((Assertion) environment.getSource()).getUrn(); + final String maybeStatus = environment.getArgumentOrDefault("status", null); + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; - try { - // Step 1: Fetch aspects from GMS - List<EnvelopedAspect> aspects = _client.getTimeseriesAspectValues( - urn, - Constants.ASSERTION_ENTITY_NAME, - Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - buildFilter(maybeFilters, maybeStatus), - context.getAuthentication()); + try { + // Step 1: Fetch aspects from GMS + List<EnvelopedAspect> aspects = + _client.getTimeseriesAspectValues( + urn, + Constants.ASSERTION_ENTITY_NAME, + Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilter(maybeFilters, maybeStatus), + context.getAuthentication()); - // Step 2: Bind profiles into GraphQL strong types. - List<AssertionRunEvent> runEvents = aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); + // Step 2: Bind profiles into GraphQL strong types. + List<AssertionRunEvent> runEvents = + aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); - // Step 3: Package and return response. - final AssertionRunEventsResult result = new AssertionRunEventsResult(); - result.setTotal(runEvents.size()); - result.setFailed(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.FAILURE.equals( - runEvent.getResult().getType() - )).count())); - result.setSucceeded(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.SUCCESS.equals(runEvent.getResult().getType() - )).count())); - result.setRunEvents(runEvents); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 3: Package and return response. + final AssertionRunEventsResult result = new AssertionRunEventsResult(); + result.setTotal(runEvents.size()); + result.setFailed( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.FAILURE.equals( + runEvent.getResult().getType())) + .count())); + result.setSucceeded( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.SUCCESS.equals( + runEvent.getResult().getType())) + .count())); + result.setRunEvents(runEvents); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }); } @Nullable - public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable final String status) { + public static Filter buildFilter( + @Nullable FilterInput filtersInput, @Nullable final String status) { if (filtersInput == null && status == null) { return null; } @@ -107,8 +119,14 @@ public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable f if (filtersInput != null) { facetFilters.addAll(filtersInput.getAnd()); } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(facetFilters.stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + facetFilters.stream() + .map(filter -> criterionFromFilter(filter, true)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index 8006ae7d2a464..89912b2814e40 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -1,12 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionInfo; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; @@ -19,63 +19,76 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver that deletes an Assertion. - */ +/** GraphQL Resolver that deletes an Assertion. */ @Slf4j -public class DeleteAssertionResolver implements DataFetcher<CompletableFuture<Boolean>> { +public class DeleteAssertionResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; private final EntityService _entityService; - public DeleteAssertionResolver(final EntityClient entityClient, final EntityService entityService) { + public DeleteAssertionResolver( + final EntityClient entityClient, final EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn assertionUrn = Urn.createFromString(environment.getArgument("urn")); - return CompletableFuture.supplyAsync(() -> { - - // 1. check the entity exists. If not, return false. - if (!_entityService.exists(assertionUrn)) { - return true; - } - - if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { - try { - _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(assertionUrn, context.getAuthentication()); - } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for assertion with urn %s", assertionUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + // 1. check the entity exists. If not, return false. + if (!_entityService.exists(assertionUrn)) { return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against assertion with urn %s", assertionUrn), e); } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + + if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { + try { + _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences( + assertionUrn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for assertion with urn %s", + assertionUrn), + e); + } + }); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against assertion with urn %s", assertionUrn), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - /** - * Determine whether the current user is allowed to remove an assertion. - */ - private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final Urn assertionUrn) { + /** Determine whether the current user is allowed to remove an assertion. */ + private boolean isAuthorizedToDeleteAssertion( + final QueryContext context, final Urn assertionUrn) { // 2. fetch the assertion info AssertionInfo info = - (AssertionInfo) EntityUtils.getAspectFromEntity( - assertionUrn.toString(), Constants.ASSERTION_INFO_ASPECT_NAME, _entityService, null); + (AssertionInfo) + EntityUtils.getAspectFromEntity( + assertionUrn.toString(), + Constants.ASSERTION_INFO_ASPECT_NAME, + _entityService, + null); if (info != null) { // 3. check whether the actor has permission to edit the assertions on the assertee @@ -86,11 +99,14 @@ private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final return true; } - private boolean isAuthorizedToDeleteAssertionFromAssertee(final QueryContext context, final Urn asserteeUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToDeleteAssertionFromAssertee( + final QueryContext context, final Urn asserteeUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), @@ -104,7 +120,8 @@ private Urn getAsserteeUrnFromInfo(final AssertionInfo info) { case DATASET: return info.getDatasetAssertion().getDataset(); default: - throw new RuntimeException(String.format("Unsupported Assertion Type %s provided", info.getType())); + throw new RuntimeException( + String.format("Unsupported Assertion Type %s provided", info.getType())); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java index ff573bb59fba1..9814589df7651 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java @@ -26,11 +26,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of Assertions associated with an Entity. - */ -public class EntityAssertionsResolver implements DataFetcher<CompletableFuture<EntityAssertionsResult>> { +/** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +public class EntityAssertionsResolver + implements DataFetcher<CompletableFuture<EntityAssertionsResult>> { private static final String ASSERTS_RELATIONSHIP_NAME = "Asserts"; @@ -44,54 +42,60 @@ public EntityAssertionsResolver(final EntityClient entityClient, final GraphClie @Override public CompletableFuture<EntityAssertionsResult> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 200); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 200); - try { - // Step 1: Fetch set of assertions associated with the target entity from the Graph Store - final EntityRelationships relationships = _graphClient.getRelatedEntities( - entityUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - start, - count, - context.getActorUrn() - ); + try { + // Step 1: Fetch set of assertions associated with the target entity from the Graph + // Store + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + start, + count, + context.getActorUrn()); - final List<Urn> assertionUrns = relationships.getRelationships().stream().map(EntityRelationship::getEntity).collect(Collectors.toList()); + final List<Urn> assertionUrns = + relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the assertion entities based on the urns from step 1 - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the assertion entities based on the urns from step 1 + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + null, + context.getAuthentication()); - // Step 3: Map GMS assertion model to GraphQL model - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List<Assertion> assertions = gmsResults.stream() - .filter(Objects::nonNull) - .map(AssertionMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS assertion model to GraphQL model + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List<Assertion> assertions = + gmsResults.stream() + .filter(Objects::nonNull) + .map(AssertionMapper::map) + .collect(Collectors.toList()); - // Step 4: Package and return result - final EntityAssertionsResult result = new EntityAssertionsResult(); - result.setCount(relationships.getCount()); - result.setStart(relationships.getStart()); - result.setTotal(relationships.getTotal()); - result.setAssertions(assertions); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 4: Package and return result + final EntityAssertionsResult result = new EntityAssertionsResult(); + result.setCount(relationships.getCount()); + result.setStart(relationships.getStart()); + result.setTotal(relationships.getTotal()); + result.setAssertions(assertions); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java index 8f5be1000bb45..9015ad0ebb210 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java @@ -5,13 +5,9 @@ import java.time.temporal.ChronoUnit; import java.util.Optional; - - public class AccessTokenUtil { - /** - * Convert an {@link AccessTokenDuration} into its milliseconds equivalent. - */ + /** Convert an {@link AccessTokenDuration} into its milliseconds equivalent. */ public static Optional<Long> mapDurationToMs(final AccessTokenDuration duration) { switch (duration) { case ONE_HOUR: @@ -29,9 +25,10 @@ public static Optional<Long> mapDurationToMs(final AccessTokenDuration duration) case NO_EXPIRY: return Optional.empty(); default: - throw new RuntimeException(String.format("Unrecognized access token duration %s provided", duration)); + throw new RuntimeException( + String.format("Unrecognized access token duration %s provided", duration)); } } - private AccessTokenUtil() { } + private AccessTokenUtil() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java index cd55d81aec6ad..14a1b9a1f7a01 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatefulTokenService; @@ -10,10 +12,10 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AccessToken; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.AccessTokenType; import com.linkedin.datahub.graphql.generated.CreateAccessTokenInput; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.metadata.Constants; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -22,12 +24,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for creating personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for creating personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class CreateAccessTokenResolver implements DataFetcher<CompletableFuture<AccessToken>> { @@ -38,62 +35,85 @@ public CreateAccessTokenResolver(final StatefulTokenService statefulTokenService } @Override - public CompletableFuture<AccessToken> get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final CreateAccessTokenInput input = bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); - - log.info("User {} requesting new access token for user {} ", context.getActorUrn(), input.getActorUrn()); - - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Date date = new Date(); - final long createdAtInMs = date.getTime(); - final Optional<Long> expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - - final String tokenName = input.getName(); - final String tokenDescription = input.getDescription(); - - final String accessToken = - _statefulTokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null), - createdAtInMs, tokenName, tokenDescription, context.getActorUrn()); - log.info("Generated access token for {} of type {} with duration {}", input.getActorUrn(), input.getType(), - input.getDuration()); - try { - final String tokenHash = _statefulTokenService.hash(accessToken); - - final AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - result.setMetadata(metadata); - - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new access token with name %s", input.getName()), - e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + public CompletableFuture<AccessToken> get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final CreateAccessTokenInput input = + bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); + + log.info( + "User {} requesting new access token for user {} ", + context.getActorUrn(), + input.getActorUrn()); + + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Date date = new Date(); + final long createdAtInMs = date.getTime(); + final Optional<Long> expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + + final String tokenName = input.getName(); + final String tokenDescription = input.getDescription(); + + final String accessToken = + _statefulTokenService.generateAccessToken( + type, + createActor(input.getType(), actorUrn), + expiresInMs.orElse(null), + createdAtInMs, + tokenName, + tokenDescription, + context.getActorUrn()); + log.info( + "Generated access token for {} of type {} with duration {}", + input.getActorUrn(), + input.getType(), + input.getDuration()); + try { + final String tokenHash = _statefulTokenService.hash(accessToken); + + final AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn( + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + result.setMetadata(metadata); + + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new access token with name %s", input.getName()), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final CreateAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final CreateAccessTokenInput input) { if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final CreateAccessTokenInput input) { + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final CreateAccessTokenInput input) { return AuthorizationUtils.canManageTokens(context) - || input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + || input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -101,6 +121,7 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java index 5ac4ec8ac3a6b..aed6bd6cb98af 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatelessTokenService; @@ -18,12 +20,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for generating personal & service principal access tokens - */ +/** Resolver for generating personal & service principal access tokens */ @Slf4j public class GetAccessTokenResolver implements DataFetcher<CompletableFuture<AccessToken>> { @@ -34,39 +31,49 @@ public GetAccessTokenResolver(final StatelessTokenService tokenService) { } @Override - public CompletableFuture<AccessToken> get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final GetAccessTokenInput input = bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); + public CompletableFuture<AccessToken> get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final GetAccessTokenInput input = + bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Optional<Long> expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - final String accessToken = - _tokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); - AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - return result; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Optional<Long> expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + final String accessToken = + _tokenService.generateAccessToken( + type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); + AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + return result; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final GetAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final GetAccessTokenInput input) { // Currently only an actor can generate a personal token for themselves. if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final GetAccessTokenInput input) { - return input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final GetAccessTokenInput input) { + return input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -74,14 +81,16 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, createUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } private Urn createUrn(final String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new IllegalArgumentException(String.format("Failed to validate provided urn %s", urnStr)); + throw new IllegalArgumentException( + String.format("Failed to validate provided urn %s", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index f9ba552d349e0..5cfa80e394c5f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -23,14 +25,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for listing personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for listing personal & service principal v2-type (stateful) access tokens. */ @Slf4j -public class ListAccessTokensResolver implements DataFetcher<CompletableFuture<ListAccessTokenResult>> { +public class ListAccessTokensResolver + implements DataFetcher<CompletableFuture<ListAccessTokenResult>> { private static final String EXPIRES_AT_FIELD_NAME = "expiresAt"; @@ -41,60 +39,87 @@ public ListAccessTokensResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListAccessTokenResult> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final ListAccessTokenInput input = bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - final List<FacetFilterInput> filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + public CompletableFuture<ListAccessTokenResult> get(DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final ListAccessTokenInput input = + bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + final List<FacetFilterInput> filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - log.info("User {} listing access tokens with filters {}", context.getActorUrn(), filters.toString()); + log.info( + "User {} listing access tokens with filters {}", + context.getActorUrn(), + filters.toString()); - if (AuthorizationUtils.canManageTokens(context) || isListingSelfTokens(filters, context)) { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(EXPIRES_AT_FIELD_NAME).setOrder(SortOrder.DESCENDING); - final SearchResult searchResult = _entityClient.search(Constants.ACCESS_TOKEN_ENTITY_NAME, "", - buildFilter(filters, Collections.emptyList()), sortCriterion, start, count, - getAuthentication(environment), new SearchFlags().setFulltext(true)); + if (AuthorizationUtils.canManageTokens(context) + || isListingSelfTokens(filters, context)) { + try { + final SortCriterion sortCriterion = + new SortCriterion() + .setField(EXPIRES_AT_FIELD_NAME) + .setOrder(SortOrder.DESCENDING); + final SearchResult searchResult = + _entityClient.search( + Constants.ACCESS_TOKEN_ENTITY_NAME, + "", + buildFilter(filters, Collections.emptyList()), + sortCriterion, + start, + count, + getAuthentication(environment), + new SearchFlags().setFulltext(true)); - final List<AccessTokenMetadata> tokens = searchResult.getEntities().stream().map(entity -> { - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(entity.getEntity().toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - return metadata; - }).collect(Collectors.toList()); + final List<AccessTokenMetadata> tokens = + searchResult.getEntities().stream() + .map( + entity -> { + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn(entity.getEntity().toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + return metadata; + }) + .collect(Collectors.toList()); - final ListAccessTokenResult result = new ListAccessTokenResult(); - result.setTokens(tokens); - result.setStart(searchResult.getFrom()); - result.setCount(searchResult.getPageSize()); - result.setTotal(searchResult.getNumEntities()); + final ListAccessTokenResult result = new ListAccessTokenResult(); + result.setTokens(tokens); + result.setStart(searchResult.getFrom()); + result.setCount(searchResult.getPageSize()); + result.setTotal(searchResult.getNumEntities()); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list access tokens", e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list access tokens", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } /** - * Utility method to answer: Does the existing security context have permissions to generate their personal tokens - * AND is the request coming in requesting those personal tokens? - * <p> - * Note: We look for the actorUrn field because a token generated by someone else means that the generator actor has - * manage all access token privileges which means that he/she will be bound to just listing their own tokens. + * Utility method to answer: Does the existing security context have permissions to generate their + * personal tokens AND is the request coming in requesting those personal tokens? + * + * <p>Note: We look for the actorUrn field because a token generated by someone else means that + * the generator actor has manage all access token privileges which means that he/she will be + * bound to just listing their own tokens. * * @param filters The filters being used in the request. * @param context Current security context. * @return A boolean stating if the current user can list its personal tokens. */ - private boolean isListingSelfTokens(final List<FacetFilterInput> filters, final QueryContext context) { - return AuthorizationUtils.canGeneratePersonalAccessToken(context) && filters.stream() - .anyMatch(filter -> filter.getField().equals("ownerUrn") && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); + private boolean isListingSelfTokens( + final List<FacetFilterInput> filters, final QueryContext context) { + return AuthorizationUtils.canGeneratePersonalAccessToken(context) + && filters.stream() + .anyMatch( + filter -> + filter.getField().equals("ownerUrn") + && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java index 252c0eaba6e85..8d0a23e665b1b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.token.StatefulTokenService; import com.google.common.collect.ImmutableSet; import com.linkedin.access.token.DataHubAccessTokenInfo; @@ -18,42 +20,39 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for revoking personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for revoking personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class RevokeAccessTokenResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; private final StatefulTokenService _statefulTokenService; - public RevokeAccessTokenResolver(final EntityClient entityClient, final StatefulTokenService statefulTokenService) { + public RevokeAccessTokenResolver( + final EntityClient entityClient, final StatefulTokenService statefulTokenService) { _entityClient = entityClient; _statefulTokenService = statefulTokenService; } @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); - log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); + log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); - if (isAuthorizedToRevokeToken(context, tokenId)) { - try { - _statefulTokenService.revokeAccessToken(tokenId); - } catch (Exception e) { - throw new RuntimeException("Failed to revoke access token", e); - } - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToRevokeToken(context, tokenId)) { + try { + _statefulTokenService.revokeAccessToken(tokenId); + } catch (Exception e) { + throw new RuntimeException("Failed to revoke access token", e); + } + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private boolean isAuthorizedToRevokeToken(final QueryContext context, final String tokenId) { @@ -62,12 +61,17 @@ private boolean isAuthorizedToRevokeToken(final QueryContext context, final Stri private boolean isOwnerOfAccessToken(final QueryContext context, final String tokenId) { try { - final EntityResponse entityResponse = _entityClient.getV2(Constants.ACCESS_TOKEN_ENTITY_NAME, - Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), - ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), context.getAuthentication()); + final EntityResponse entityResponse = + _entityClient.getV2( + Constants.ACCESS_TOKEN_ENTITY_NAME, + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), + ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { + final DataMap data = + entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(data); return tokenInfo.getOwnerUrn().toString().equals(context.getActorUrn()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java index 4a1964b36032c..40c91b43850f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java @@ -1,61 +1,65 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.BrowsePathsInput; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.Collections; -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowsePathsResolver implements DataFetcher<CompletableFuture<List<BrowsePath>>> { - private static final Logger _logger = LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); - - private final Map<EntityType, BrowsableEntityType<?, ?>> _typeToEntity; - - public BrowsePathsResolver(@Nonnull final List<BrowsableEntityType<?, ?>> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } - - @Override - public CompletableFuture<List<BrowsePath>> get(DataFetchingEnvironment environment) { - final BrowsePathsInput input = bindArgument(environment.getArgument("input"), BrowsePathsInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Fetch browse paths. entity type: %s, urn: %s", - input.getType(), - input.getUrn())); - if (_typeToEntity.containsKey(input.getType())) { - return _typeToEntity.get(input.getType()).browsePaths(input.getUrn(), environment.getContext()); - } - // Browse path is impl detail. - return Collections.emptyList(); - } catch (Exception e) { - _logger.error("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()) + " " + e.getMessage()); - throw new RuntimeException("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()), e); + private static final Logger _logger = + LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); + + private final Map<EntityType, BrowsableEntityType<?, ?>> _typeToEntity; + + public BrowsePathsResolver(@Nonnull final List<BrowsableEntityType<?, ?>> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } + + @Override + public CompletableFuture<List<BrowsePath>> get(DataFetchingEnvironment environment) { + final BrowsePathsInput input = + bindArgument(environment.getArgument("input"), BrowsePathsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Fetch browse paths. entity type: %s, urn: %s", + input.getType(), input.getUrn())); + if (_typeToEntity.containsKey(input.getType())) { + return _typeToEntity + .get(input.getType()) + .browsePaths(input.getUrn(), environment.getContext()); } + // Browse path is impl detail. + return Collections.emptyList(); + } catch (Exception e) { + _logger.error( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java index 9c95eceb1e78f..287d0eef8aec8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java @@ -1,77 +1,69 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.generated.BrowseInput; import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowseResolver implements DataFetcher<CompletableFuture<BrowseResults>> { - private static final int DEFAULT_START = 0; - private static final int DEFAULT_COUNT = 10; + private static final int DEFAULT_START = 0; + private static final int DEFAULT_COUNT = 10; - private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); + private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); - private final Map<EntityType, BrowsableEntityType<?, ?>> _typeToEntity; + private final Map<EntityType, BrowsableEntityType<?, ?>> _typeToEntity; - public BrowseResolver(@Nonnull final List<BrowsableEntityType<?, ?>> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } + public BrowseResolver(@Nonnull final List<BrowsableEntityType<?, ?>> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture<BrowseResults> get(DataFetchingEnvironment environment) { - final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); + @Override + public CompletableFuture<BrowseResults> get(DataFetchingEnvironment environment) { + final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); - final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; - final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; + final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; + final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count)); - return _typeToEntity.get(input.getType()).browse( - input.getPath(), - input.getFilters(), - start, - count, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count) + " " + e.getMessage()); - throw new RuntimeException("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count), e); - } + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count)); + return _typeToEntity + .get(input.getType()) + .browse( + input.getPath(), input.getFilters(), start, count, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java index 81f82c93f1fa7..396d91c37d81c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java @@ -1,14 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.browse; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.types.BrowsableEntityType; -import com.linkedin.datahub.graphql.generated.BrowsePath; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; public class EntityBrowsePathsResolver implements DataFetcher<CompletableFuture<List<BrowsePath>>> { @@ -24,12 +24,14 @@ public CompletableFuture<List<BrowsePath>> get(DataFetchingEnvironment environme final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - return CompletableFuture.supplyAsync(() -> { - try { - return _browsableType.browsePaths(urn, context); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _browsableType.browsePaths(urn, context); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 76abddc9a99a9..292d6108b7a04 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.chart; +import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; @@ -17,18 +21,13 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -52,30 +51,40 @@ public CompletableFuture<BrowseResultsV2> get(DataFetchingEnvironment environmen // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query); - return CompletableFuture.supplyAsync(() -> { - try { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - final String pathStr = input.getPath().size() > 0 ? BROWSE_PATH_V2_DELIMITER + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) : ""; - final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); + return CompletableFuture.supplyAsync( + () -> { + try { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + final String pathStr = + input.getPath().size() > 0 + ? BROWSE_PATH_V2_DELIMITER + + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) + : ""; + final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); - BrowseResultV2 browseResults = _entityClient.browseV2( - entityName, - pathStr, - maybeResolvedView != null - ? SearchUtils.combineFilters(filter, maybeResolvedView.getDefinition().getFilter()) - : filter, - sanitizedQuery, - start, - count, - context.getAuthentication() - ); - return mapBrowseResults(browseResults); - } catch (Exception e) { - throw new RuntimeException("Failed to execute browse V2", e); - } - }); + BrowseResultV2 browseResults = + _entityClient.browseV2( + entityName, + pathStr, + maybeResolvedView != null + ? SearchUtils.combineFilters( + filter, maybeResolvedView.getDefinition().getFilter()) + : filter, + sanitizedQuery, + start, + count, + context.getAuthentication()); + return mapBrowseResults(browseResults); + } catch (Exception e) { + throw new RuntimeException("Failed to execute browse V2", e); + } + }); } private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { @@ -85,28 +94,29 @@ private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { results.setCount(browseResults.getPageSize()); List<BrowseResultGroupV2> groups = new ArrayList<>(); - browseResults.getGroups().forEach(group -> { - BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); - browseGroup.setName(group.getName()); - browseGroup.setCount(group.getCount()); - browseGroup.setHasSubGroups(group.isHasSubGroups()); - if (group.hasUrn() && group.getUrn() != null) { - browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); - } - groups.add(browseGroup); - }); + browseResults + .getGroups() + .forEach( + group -> { + BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); + browseGroup.setName(group.getName()); + browseGroup.setCount(group.getCount()); + browseGroup.setHasSubGroups(group.isHasSubGroups()); + if (group.hasUrn() && group.getUrn() != null) { + browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); + } + groups.add(browseGroup); + }); results.setGroups(groups); BrowseResultMetadata resultMetadata = new BrowseResultMetadata(); - resultMetadata.setPath(Arrays.stream(browseResults.getMetadata().getPath() - .split(BROWSE_PATH_V2_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()) - ); + resultMetadata.setPath( + Arrays.stream(browseResults.getMetadata().getPath().split(BROWSE_PATH_V2_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList())); resultMetadata.setTotalNumEntities(browseResults.getMetadata().getTotalNumEntities()); results.setMetadata(resultMetadata); return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java index 207da02de6ec2..a2d04a26bfa97 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java @@ -11,24 +11,23 @@ import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; - @Slf4j -public class ChartStatsSummaryResolver implements DataFetcher<CompletableFuture<ChartStatsSummary>> { +public class ChartStatsSummaryResolver + implements DataFetcher<CompletableFuture<ChartStatsSummary>> { private final TimeseriesAspectService timeseriesAspectService; private final Cache<Urn, ChartStatsSummary> summaryCache; public ChartStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) - .build(); + this.summaryCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(6, TimeUnit.HOURS).build(); } @Override - public CompletableFuture<ChartStatsSummary> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ChartStatsSummary> get(DataFetchingEnvironment environment) + throws Exception { // Not yet implemented return CompletableFuture.completedFuture(null); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index f6bc68caa0821..34f7f133f6fb9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -35,10 +35,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * Resolver responsible for serving app configurations to the React UI. - */ +/** Resolver responsible for serving app configurations to the React UI. */ public class AppConfigResolver implements DataFetcher<CompletableFuture<AppConfig>> { private final GitVersion _gitVersion; @@ -82,7 +79,8 @@ public AppConfigResolver( } @Override - public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); @@ -103,19 +101,20 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen final PoliciesConfig policiesConfig = new PoliciesConfig(); policiesConfig.setEnabled(_authorizationConfiguration.getDefaultAuthorizer().isEnabled()); - policiesConfig.setPlatformPrivileges(com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES - .stream() - .map(this::mapPrivilege) - .collect(Collectors.toList())); + policiesConfig.setPlatformPrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES.stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); - policiesConfig.setResourcePrivileges(com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES - .stream() - .map(this::mapResourcePrivileges) - .collect(Collectors.toList()) - ); + policiesConfig.setResourcePrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES.stream() + .map(this::mapResourcePrivileges) + .collect(Collectors.toList())); final IdentityManagementConfig identityManagementConfig = new IdentityManagementConfig(); - identityManagementConfig.setEnabled(true); // Identity Management always enabled. TODO: Understand if there's a case where this should change. + identityManagementConfig.setEnabled( + true); // Identity Management always enabled. TODO: Understand if there's a case where this + // should change. final ManagedIngestionConfig ingestionConfig = new ManagedIngestionConfig(); ingestionConfig.setEnabled(_ingestionConfiguration.isEnabled()); @@ -133,7 +132,8 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen } if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) { QueriesTabConfig queriesTabConfig = new QueriesTabConfig(); - queriesTabConfig.setQueriesTabResultSize(_visualConfiguration.getQueriesTab().getQueriesTabResultSize()); + queriesTabConfig.setQueriesTabResultSize( + _visualConfiguration.getQueriesTab().getQueriesTabResultSize()); visualConfig.setQueriesTab(queriesTabConfig); } if (_visualConfiguration != null && _visualConfiguration.getEntityProfile() != null) { @@ -148,7 +148,8 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen if (_visualConfiguration != null && _visualConfiguration.getSearchResult() != null) { SearchResultsVisualConfig searchResultsVisualConfig = new SearchResultsVisualConfig(); if (_visualConfiguration.getSearchResult().getEnableNameHighlight() != null) { - searchResultsVisualConfig.setEnableNameHighlight(_visualConfiguration.getSearchResult().getEnableNameHighlight()); + searchResultsVisualConfig.setEnableNameHighlight( + _visualConfiguration.getSearchResult().getEnableNameHighlight()); } visualConfig.setSearchResult(searchResultsVisualConfig); } @@ -166,14 +167,15 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen viewsConfig.setEnabled(_viewsConfiguration.isEnabled()); appConfig.setViewsConfig(viewsConfig); - final FeatureFlagsConfig featureFlagsConfig = FeatureFlagsConfig.builder() - .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) - .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) - .setShowBrowseV2(_featureFlags.isShowBrowseV2()) - .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) - .setShowAccessManagement(_featureFlags.isShowAccessManagement()) - .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) - .build(); + final FeatureFlagsConfig featureFlagsConfig = + FeatureFlagsConfig.builder() + .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) + .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) + .setShowBrowseV2(_featureFlags.isShowBrowseV2()) + .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) + .setShowAccessManagement(_featureFlags.isShowAccessManagement()) + .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) + .build(); appConfig.setFeatureFlags(featureFlagsConfig); @@ -185,14 +187,17 @@ private ResourcePrivileges mapResourcePrivileges( final ResourcePrivileges graphQLPrivileges = new ResourcePrivileges(); graphQLPrivileges.setResourceType(resourcePrivileges.getResourceType()); graphQLPrivileges.setResourceTypeDisplayName(resourcePrivileges.getResourceTypeDisplayName()); - graphQLPrivileges.setEntityType(mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); + graphQLPrivileges.setEntityType( + mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); graphQLPrivileges.setPrivileges( - resourcePrivileges.getPrivileges().stream().map(this::mapPrivilege).collect(Collectors.toList()) - ); + resourcePrivileges.getPrivileges().stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); return graphQLPrivileges; } - private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { + private Privilege mapPrivilege( + com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { final Privilege graphQLPrivilege = new Privilege(); graphQLPrivilege.setType(privilege.getType()); graphQLPrivilege.setDisplayName(privilege.getDisplayName()); @@ -202,29 +207,53 @@ private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfi private EntityType mapResourceTypeToEntityType(final String resourceType) { // TODO: Is there a better way to instruct the UI to present a searchable resource? - if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES.getResourceType().equals(resourceType)) { + if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATASET; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DASHBOARD; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CHART; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_FLOW; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_JOB; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.TAG; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_TERM; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_NODE; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DOMAIN; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CONTAINER; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_GROUP; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_USER; } else { return null; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index 4b8bd37a4fabe..58f7715c3e627 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; @@ -20,21 +22,16 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j public class ContainerEntitiesResolver implements DataFetcher<CompletableFuture<SearchResults>> { - static final List<String> CONTAINABLE_ENTITY_NAMES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME - ); + static final List<String> CONTAINABLE_ENTITY_NAMES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME); private static final String CONTAINER_FIELD_NAME = "container"; private static final String INPUT_ARG_NAME = "input"; private static final String DEFAULT_QUERY = "*"; @@ -55,45 +52,53 @@ public ContainerEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<SearchResults> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<SearchResults> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Container) environment.getSource()).getUrn(); - final ContainerEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final ContainerEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : "*"; final int start = input.getStart() != null ? input.getStart() : 0; final int count = input.getCount() != null ? input.getCount() : 20; - return CompletableFuture.supplyAsync(() -> { - - try { - - final Criterion filterCriterion = new Criterion() - .setField(CONTAINER_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - CONTAINABLE_ENTITY_NAMES, - query, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with container with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + + final Criterion filterCriterion = + new Criterion() + .setField(CONTAINER_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + CONTAINABLE_ENTITY_NAMES, + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + start, + count, + null, + null, + context.getAuthentication())); + + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve entities associated with container with urn %s", urn), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java index 90fad4ca4578a..9502fb8e5cb93 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -12,15 +14,13 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; - -public class ParentContainersResolver implements DataFetcher<CompletableFuture<ParentContainersResult>> { +public class ParentContainersResolver + implements DataFetcher<CompletableFuture<ParentContainersResult>> { private final EntityClient _entityClient; @@ -28,21 +28,25 @@ public ParentContainersResolver(final EntityClient entityClient) { _entityClient = entityClient; } - private void aggregateParentContainers(List<Container> containers, String urn, QueryContext context) { + private void aggregateParentContainers( + List<Container> containers, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(CONTAINER_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(CONTAINER_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { + if (entityResponse != null + && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { DataMap dataMap = entityResponse.getAspects().get(CONTAINER_ASPECT_NAME).getValue().data(); com.linkedin.container.Container container = new com.linkedin.container.Container(dataMap); Urn containerUrn = container.getContainer(); - EntityResponse response = _entityClient.getV2(containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); if (response != null) { Container mappedContainer = ContainerMapper.map(response); containers.add(mappedContainer); @@ -61,16 +65,17 @@ public CompletableFuture<ParentContainersResult> get(DataFetchingEnvironment env final String urn = ((Entity) environment.getSource()).getUrn(); final List<Container> containers = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - aggregateParentContainers(containers, urn, context); - final ParentContainersResult result = new ParentContainersResult(); - result.setCount(containers.size()); - result.setContainers(containers); - return result; - } catch (DataHubGraphQLException e) { - throw new RuntimeException("Failed to load all containers", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + aggregateParentContainers(containers, urn, context); + final ParentContainersResult result = new ParentContainersResult(); + result.setCount(containers.size()); + result.setContainers(containers); + return result; + } catch (DataHubGraphQLException e) { + throw new RuntimeException("Failed to load all containers", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java index db125384745a1..b5480359bde6a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.CorpUser; -import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardStatsSummary; +import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardUserUsageCounts; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.query.filter.Filter; @@ -19,10 +21,9 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - @Slf4j -public class DashboardStatsSummaryResolver implements DataFetcher<CompletableFuture<DashboardStatsSummary>> { +public class DashboardStatsSummaryResolver + implements DataFetcher<CompletableFuture<DashboardStatsSummary>> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; @@ -32,63 +33,72 @@ public class DashboardStatsSummaryResolver implements DataFetcher<CompletableFut public DashboardStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. - .build(); + this.summaryCache = + CacheBuilder.newBuilder() + .maximumSize(10000) + .expireAfterWrite( + 6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. + .build(); } @Override - public CompletableFuture<DashboardStatsSummary> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DashboardStatsSummary> get(DataFetchingEnvironment environment) + throws Exception { final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } - - try { - - final DashboardStatsSummary result = new DashboardStatsSummary(); - - // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. - List<DashboardUsageMetrics> dashboardUsageMetrics = - getDashboardUsageMetrics(resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); - if (dashboardUsageMetrics.size() > 0) { - result.setViewCount(getDashboardViewCount(resourceUrn)); - } - - // Obtain unique user statistics, by rolling up unique users over the past month. - List<DashboardUserUsageCounts> userUsageCounts = getDashboardUsagePerUser(resourceUrn); - result.setUniqueUserCountLast30Days(userUsageCounts.size()); - result.setTopUsersLast30Days( - trimUsers(userUsageCounts.stream().map(DashboardUserUsageCounts::getUser).collect(Collectors.toList()))); - - this.summaryCache.put(resourceUrn, result); - return result; - - } catch (Exception e) { - log.error(String.format("Failed to load dashboard usage summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (this.summaryCache.getIfPresent(resourceUrn) != null) { + return this.summaryCache.getIfPresent(resourceUrn); + } + + try { + + final DashboardStatsSummary result = new DashboardStatsSummary(); + + // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. + List<DashboardUsageMetrics> dashboardUsageMetrics = + getDashboardUsageMetrics( + resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + if (dashboardUsageMetrics.size() > 0) { + result.setViewCount(getDashboardViewCount(resourceUrn)); + } + + // Obtain unique user statistics, by rolling up unique users over the past month. + List<DashboardUserUsageCounts> userUsageCounts = getDashboardUsagePerUser(resourceUrn); + result.setUniqueUserCountLast30Days(userUsageCounts.size()); + result.setTopUsersLast30Days( + trimUsers( + userUsageCounts.stream() + .map(DashboardUserUsageCounts::getUser) + .collect(Collectors.toList()))); + + this.summaryCache.put(resourceUrn, result); + return result; + + } catch (Exception e) { + log.error( + String.format( + "Failed to load dashboard usage summary for resource %s", + resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }); } private int getDashboardViewCount(final Urn resourceUrn) { - List<DashboardUsageMetrics> dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), - null, - null, - 1, - this.timeseriesAspectService); + List<DashboardUsageMetrics> dashboardUsageMetrics = + getDashboardUsageMetrics( + resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); return dashboardUsageMetrics.get(0).getViewsCount(); } private List<DashboardUserUsageCounts> getDashboardUsagePerUser(final Urn resourceUrn) { long now = System.currentTimeMillis(); long nowMinusOneMonth = timeMinusOneMonth(now); - Filter bucketStatsFilter = createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); + Filter bucketStatsFilter = + createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); return getUserUsageCounts(bucketStatsFilter, this.timeseriesAspectService); } @@ -98,4 +108,4 @@ private List<CorpUser> trimUsers(final List<CorpUser> originalUsers) { } return originalUsers; } - } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java index 24e1db33e9d40..07d028b07b01d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; @@ -26,16 +28,14 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - /** * Resolver used for resolving the usage statistics of a Dashboard. - * <p> - * Returns daily as well as absolute usage metrics of Dashboard + * + * <p>Returns daily as well as absolute usage metrics of Dashboard */ @Slf4j -public class DashboardUsageStatsResolver implements DataFetcher<CompletableFuture<DashboardUsageQueryResult>> { +public class DashboardUsageStatsResolver + implements DataFetcher<CompletableFuture<DashboardUsageQueryResult>> { private static final String ES_FIELD_EVENT_GRANULARITY = "eventGranularity"; private final TimeseriesAspectService timeseriesAspectService; @@ -44,34 +44,40 @@ public DashboardUsageStatsResolver(TimeseriesAspectService timeseriesAspectServi } @Override - public CompletableFuture<DashboardUsageQueryResult> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DashboardUsageQueryResult> get(DataFetchingEnvironment environment) + throws Exception { final String dashboardUrn = ((Entity) environment.getSource()).getUrn(); final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); // Max number of aspects to return for absolute dashboard usage. final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - return CompletableFuture.supplyAsync(() -> { - DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); + return CompletableFuture.supplyAsync( + () -> { + DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); - // Time Bucket Stats - Filter bucketStatsFilter = createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); - List<DashboardUsageAggregation> dailyUsageBuckets = getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); + // Time Bucket Stats + Filter bucketStatsFilter = + createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); + List<DashboardUsageAggregation> dailyUsageBuckets = + getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); - usageQueryResult.setBuckets(dailyUsageBuckets); - usageQueryResult.setAggregations(aggregations); + usageQueryResult.setBuckets(dailyUsageBuckets); + usageQueryResult.setAggregations(aggregations); - // Absolute usage metrics - List<DashboardUsageMetrics> dashboardUsageMetrics = - getDashboardUsageMetrics(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); - usageQueryResult.setMetrics(dashboardUsageMetrics); - return usageQueryResult; - }); + // Absolute usage metrics + List<DashboardUsageMetrics> dashboardUsageMetrics = + getDashboardUsageMetrics( + dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); + usageQueryResult.setMetrics(dashboardUsageMetrics); + return usageQueryResult; + }); } - private List<DashboardUsageMetrics> getDashboardUsageMetrics(String dashboardUrn, Long maybeStartTimeMillis, - Long maybeEndTimeMillis, Integer maybeLimit) { + private List<DashboardUsageMetrics> getDashboardUsageMetrics( + String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, Integer maybeLimit) { List<DashboardUsageMetrics> dashboardUsageMetrics; try { Filter filter = new Filter(); @@ -79,16 +85,26 @@ private List<DashboardUsageMetrics> getDashboardUsageMetrics(String dashboardUrn // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); List<EnvelopedAspect> aspects = - timeseriesAspectService.getAspectValues(Urn.createFromString(dashboardUrn), Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit, + timeseriesAspectService.getAspectValues( + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + dashboardUsageMetrics = + aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java index 462c18ea33dd4..4f170a296c47e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java @@ -32,7 +32,6 @@ import java.util.List; import java.util.stream.Collectors; - public class DashboardUsageStatsUtils { public static final String ES_FIELD_URN = "urn"; @@ -49,15 +48,17 @@ public static List<DashboardUsageMetrics> getDashboardUsageMetrics( List<DashboardUsageMetrics> dashboardUsageMetrics; try { Filter filter = createUsageFilter(dashboardUrn, null, null, false); - List<EnvelopedAspect> aspects = timeseriesAspectService.getAspectValues( - Urn.createFromString(dashboardUrn), - Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + List<EnvelopedAspect> aspects = + timeseriesAspectService.getAspectValues( + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + filter); + dashboardUsageMetrics = + aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } @@ -69,8 +70,10 @@ public static DashboardUsageQueryResultAggregations getAggregations( List<DashboardUsageAggregation> dailyUsageBuckets, TimeseriesAspectService timeseriesAspectService) { - List<DashboardUserUsageCounts> userUsageCounts = getUserUsageCounts(filter, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = new DashboardUsageQueryResultAggregations(); + List<DashboardUserUsageCounts> userUsageCounts = + getUserUsageCounts(filter, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + new DashboardUsageQueryResultAggregations(); aggregations.setUsers(userUsageCounts); aggregations.setUniqueUserCount(userUsageCounts.size()); @@ -99,29 +102,47 @@ public static DashboardUsageQueryResultAggregations getAggregations( } public static List<DashboardUsageAggregation> getBuckets( - Filter filter, - String dashboardUrn, - TimeseriesAspectService timeseriesAspectService) { + Filter filter, String dashboardUrn, TimeseriesAspectService timeseriesAspectService) { AggregationSpec usersCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountAggregation = new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("viewsCount"); AggregationSpec executionsCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("executionsCount"); AggregationSpec usersCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("viewsCount"); AggregationSpec executionsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{usersCountAggregation, viewsCountAggregation, executionsCountAggregation, - usersCountCardinalityAggregation, viewsCountCardinalityAggregation, executionsCountCardinalityAggregation}; - GenericTable dailyStats = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, - createUsageGroupingBuckets(CalendarInterval.DAY)); + new AggregationSpec[] { + usersCountAggregation, + viewsCountAggregation, + executionsCountAggregation, + usersCountCardinalityAggregation, + viewsCountCardinalityAggregation, + executionsCountCardinalityAggregation + }; + GenericTable dailyStats = + timeseriesAspectService.getAggregatedStats( + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + createUsageGroupingBuckets(CalendarInterval.DAY)); List<DashboardUsageAggregation> buckets = new ArrayList<>(); for (StringArray row : dailyStats.getRows()) { @@ -130,7 +151,8 @@ public static List<DashboardUsageAggregation> getBuckets( usageAggregation.setDuration(WindowDuration.DAY); usageAggregation.setResource(dashboardUrn); - DashboardUsageAggregationMetrics usageAggregationMetrics = new DashboardUsageAggregationMetrics(); + DashboardUsageAggregationMetrics usageAggregationMetrics = + new DashboardUsageAggregationMetrics(); if (!row.get(1).equals(ES_NULL_VALUE) && !row.get(4).equals(ES_NULL_VALUE)) { try { @@ -156,7 +178,8 @@ public static List<DashboardUsageAggregation> getBuckets( usageAggregationMetrics.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert executionsCount from ES to object", e); + throw new IllegalArgumentException( + "Failed to convert executionsCount from ES to object", e); } } usageAggregation.setMetrics(usageAggregationMetrics); @@ -165,34 +188,59 @@ public static List<DashboardUsageAggregation> getBuckets( return buckets; } - public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, TimeseriesAspectService timeseriesAspectService) { + public static List<DashboardUserUsageCounts> getUserUsageCounts( + Filter filter, TimeseriesAspectService timeseriesAspectService) { // Sum aggregation on userCounts.count AggregationSpec sumUsageCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.usageCount"); AggregationSpec sumViewCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.viewsCount"); AggregationSpec sumExecutionCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.executionsCount"); AggregationSpec usageCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.usageCount"); AggregationSpec viewCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.viewsCount"); AggregationSpec executionCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY) + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) .setFieldPath("userCounts.executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{sumUsageCountsCountAggSpec, sumViewCountsCountAggSpec, sumExecutionCountsCountAggSpec, - usageCountsCardinalityAggSpec, viewCountsCardinalityAggSpec, executionCountsCardinalityAggSpec}; + new AggregationSpec[] { + sumUsageCountsCountAggSpec, + sumViewCountsCountAggSpec, + sumExecutionCountsCountAggSpec, + usageCountsCardinalityAggSpec, + viewCountsCardinalityAggSpec, + executionCountsCardinalityAggSpec + }; // String grouping bucket on userCounts.user GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("userCounts.user").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("userCounts.user") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend - GenericTable result = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, groupingBuckets); + GenericTable result = + timeseriesAspectService.getAggregatedStats( + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List<DashboardUserUsageCounts> userUsageCounts = new ArrayList<>(); for (StringArray row : result.getRows()) { @@ -208,7 +256,8 @@ public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, T userUsageCount.setUsageCount(Integer.valueOf(row.get(1))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user usage count from ES to int", e); } } if (!row.get(2).equals(ES_NULL_VALUE) && row.get(5).equals(ES_NULL_VALUE)) { @@ -217,7 +266,8 @@ public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, T userUsageCount.setViewsCount(Integer.valueOf(row.get(2))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user views count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user views count from ES to int", e); } } if (!row.get(3).equals(ES_NULL_VALUE) && !row.get(6).equals(ES_NULL_VALUE)) { @@ -226,7 +276,8 @@ public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, T userUsageCount.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user executions count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user executions count from ES to int", e); } } userUsageCounts.add(userUsageCount); @@ -239,17 +290,15 @@ public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, T private static GroupingBucket[] createUsageGroupingBuckets(CalendarInterval calenderInterval) { GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setKey(ES_FIELD_TIMESTAMP) + timestampBucket + .setKey(ES_FIELD_TIMESTAMP) .setType(GroupingBucketType.DATE_GROUPING_BUCKET) .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(calenderInterval)); - return new GroupingBucket[]{timestampBucket}; + return new GroupingBucket[] {timestampBucket}; } public static Filter createUsageFilter( - String dashboardUrn, - Long startTime, - Long endTime, - boolean byBucket) { + String dashboardUrn, Long startTime, Long endTime, boolean byBucket) { Filter filter = new Filter(); final ArrayList<Criterion> criteria = new ArrayList<>(); @@ -260,44 +309,55 @@ public static Filter createUsageFilter( if (startTime != null) { // Add filter for start time - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(Long.toString(startTime)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(Long.toString(startTime)); criteria.add(startTimeCriterion); } if (endTime != null) { // Add filter for end time - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(Long.toString(endTime)); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(Long.toString(endTime)); criteria.add(endTimeCriterion); } if (byBucket) { - // Add filter for presence of eventGranularity - only consider bucket stats and not absolute stats + // Add filter for presence of eventGranularity - only consider bucket stats and not absolute + // stats // since unit is mandatory, we assume if eventGranularity contains unit, then it is not null Criterion onlyTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.CONTAIN).setValue("unit"); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.CONTAIN) + .setValue("unit"); criteria.add(onlyTimeBucketsCriterion); } else { // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); } - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } - public static Long timeMinusOneMonth(long time) { final long oneHourMillis = 60 * 60 * 1000; final long oneDayMillis = 24 * oneHourMillis; return time - (31 * oneDayMillis + 1); } - private DashboardUsageStatsUtils() { } + private DashboardUsageStatsUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java index 9c32fa1c08076..f5d4f949e5710 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -8,15 +10,12 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -27,54 +26,80 @@ public class BatchSetDataProductResolver implements DataFetcher<CompletableFutur @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDataProductInput input = bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); + final BatchSetDataProductInput input = + bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); final String maybeDataProductUrn = input.getDataProductUrn(); final List<String> resources = input.getResourceUrns(); - return CompletableFuture.supplyAsync(() -> { - - verifyResources(resources, context); - verifyDataProduct(maybeDataProductUrn, context); + return CompletableFuture.supplyAsync( + () -> { + verifyResources(resources, context); + verifyDataProduct(maybeDataProductUrn, context); - try { - List<Urn> resourceUrns = resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (maybeDataProductUrn != null) { - batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); - } else { - batchUnsetDataProduct(resourceUrns, context); - } - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + List<Urn> resourceUrns = + resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + if (maybeDataProductUrn != null) { + batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); + } else { + batchUnsetDataProduct(resourceUrns, context); + } + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void verifyResources(List<String> resources, QueryContext context) { for (String resource : resources) { - if (!_dataProductService.verifyEntityExists(UrnUtils.getUrn(resource), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, %s in resources does not exist", resource)); + if (!_dataProductService.verifyEntityExists( + UrnUtils.getUrn(resource), context.getAuthentication())) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, %s in resources does not exist", resource)); } Urn resourceUrn = UrnUtils.getUrn(resource); - if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity( + context, resourceUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } } private void verifyDataProduct(String maybeDataProductUrn, QueryContext context) { - if (maybeDataProductUrn != null && !_dataProductService.verifyEntityExists(UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, Data Product urn %s does not exist", maybeDataProductUrn)); + if (maybeDataProductUrn != null + && !_dataProductService.verifyEntityExists( + UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, Data Product urn %s does not exist", + maybeDataProductUrn)); } } - private void batchSetDataProduct(@Nonnull String dataProductUrn, List<Urn> resources, QueryContext context) { - log.debug("Batch setting Data Product. dataProduct urn: {}, resources: {}", dataProductUrn, resources); + private void batchSetDataProduct( + @Nonnull String dataProductUrn, List<Urn> resources, QueryContext context) { + log.debug( + "Batch setting Data Product. dataProduct urn: {}, resources: {}", + dataProductUrn, + resources); try { - _dataProductService.batchSetDataProduct(UrnUtils.getUrn(dataProductUrn), resources, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.batchSetDataProduct( + UrnUtils.getUrn(dataProductUrn), + resources, + context.getAuthentication(), + UrnUtils.getUrn(context.getActorUrn())); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Data Product %s to resources with urns %s!", dataProductUrn, resources), e); + throw new RuntimeException( + String.format( + "Failed to batch set Data Product %s to resources with urns %s!", + dataProductUrn, resources), + e); } } @@ -82,10 +107,14 @@ private void batchUnsetDataProduct(List<Urn> resources, QueryContext context) { log.debug("Batch unsetting Data Product. resources: {}", resources); try { for (Urn resource : resources) { - _dataProductService.unsetDataProduct(resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.unsetDataProduct( + resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch unset data product for resources with urns %s!", resources), e); + throw new RuntimeException( + String.format( + "Failed to batch unset data product for resources with urns %s!", resources), + e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index f644ff31a571b..10c487a839f35 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -12,13 +14,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class CreateDataProductResolver implements DataFetcher<CompletableFuture<DataProduct>> { @@ -26,37 +25,45 @@ public class CreateDataProductResolver implements DataFetcher<CompletableFuture< private final DataProductService _dataProductService; @Override - public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateDataProductInput input = bindArgument(environment.getArgument("input"), CreateDataProductInput.class); + final CreateDataProductInput input = + bindArgument(environment.getArgument("input"), CreateDataProductInput.class); final Authentication authentication = context.getAuthentication(); final Urn domainUrn = UrnUtils.getUrn(input.getDomainUrn()); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Domain provided dos not exist"); - } - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - final Urn dataProductUrn = _dataProductService.createDataProduct( - input.getProperties().getName(), - input.getProperties().getDescription(), - authentication); - _dataProductService.setDomain(dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new DataProduct from input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Domain provided dos not exist"); + } + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + final Urn dataProductUrn = + _dataProductService.createDataProduct( + input.getProperties().getName(), + input.getProperties().getDescription(), + authentication); + _dataProductService.setDomain( + dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); + EntityResponse response = + _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); + if (response != null) { + return DataProductMapper.map(response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new DataProduct from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java index 596e292e7fe33..f6fe11a587a39 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java @@ -7,25 +7,27 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.metadata.authorization.PoliciesConfig; -import lombok.extern.slf4j.Slf4j; - import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class DataProductAuthorizationUtils { - private DataProductAuthorizationUtils() { + private DataProductAuthorizationUtils() {} - } - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDataProductsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -35,11 +37,14 @@ public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryCo orPrivilegeGroups); } - public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext context, Urn domainUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToManageDataProducts( + @Nonnull QueryContext context, Urn domainUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -49,10 +54,10 @@ public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext con orPrivilegeGroups); } - public static boolean isAuthorizedToEditDataProduct(@Nonnull QueryContext context, Urn dataProductUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP - )); + public static boolean isAuthorizedToEditDataProduct( + @Nonnull QueryContext context, Urn dataProductUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup(ImmutableList.of(ALL_PRIVILEGES_GROUP)); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java index fd31e2199c22a..ea13f96cfc1bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java @@ -9,11 +9,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class DeleteDataProductResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -21,32 +20,38 @@ public class DeleteDataProductResolver implements DataFetcher<CompletableFuture< private final DataProductService _dataProductService; @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } - - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } - - try { - _dataProductService.deleteDataProduct(dataProductUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Data Product", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + dataProductUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } + + Domains domains = + _dataProductService.getDataProductDomains( + dataProductUrn, context.getAuthentication()); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } + + try { + _dataProductService.deleteDataProduct(dataProductUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Data Product", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index 831d449bef9ef..a0f1698bf99e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -22,18 +25,14 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** * Resolver responsible for getting the assets belonging to a Data Product. Get the assets from the @@ -41,7 +40,8 @@ */ @Slf4j @RequiredArgsConstructor -public class ListDataProductAssetsResolver implements DataFetcher<CompletableFuture<SearchResults>> { +public class ListDataProductAssetsResolver + implements DataFetcher<CompletableFuture<SearchResults>> { private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; @@ -52,7 +52,10 @@ public class ListDataProductAssetsResolver implements DataFetcher<CompletableFut public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); // get urn from either input or source (in the case of "entities" field) - final String urn = environment.getArgument("urn") != null ? environment.getArgument("urn") : ((DataProduct) environment.getSource()).getUrn(); + final String urn = + environment.getArgument("urn") != null + ? environment.getArgument("urn") + : ((DataProduct) environment.getSource()).getUrn(); final Urn dataProductUrn = UrnUtils.getUrn(urn); final SearchAcrossEntitiesInput input = bindArgument(environment.getArgument("input"), SearchAcrossEntitiesInput.class); @@ -60,32 +63,52 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) // 1. Get urns of assets belonging to Data Product using an aspect query List<Urn> assetUrns = new ArrayList<>(); try { - final EntityResponse entityResponse = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME).getValue().data(); + final EntityResponse entityResponse = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), + context.getAuthentication()); + if (entityResponse != null + && entityResponse + .getAspects() + .containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { + final DataMap data = + entityResponse + .getAspects() + .get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); final DataProductProperties dataProductProperties = new DataProductProperties(data); if (dataProductProperties.hasAssets()) { - assetUrns.addAll(dataProductProperties.getAssets().stream().map(DataProductAssociation::getDestinationUrn).collect(Collectors.toList())); + assetUrns.addAll( + dataProductProperties.getAssets().stream() + .map(DataProductAssociation::getDestinationUrn) + .collect(Collectors.toList())); } } } catch (Exception e) { log.error(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); - throw new RuntimeException(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to list data product assets with urn %s", dataProductUrn), e); } // 2. Get list of entities that we should query based on filters or assets from aspect. - List<String> entitiesToQuery = assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); - - - final List<EntityType> inputEntityTypes = (input.getTypes() == null || input.getTypes().isEmpty()) ? ImmutableList.of() : input.getTypes(); - final List<String> inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).distinct().collect(Collectors.toList()); - - final List<String> finalEntityNames = inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; + List<String> entitiesToQuery = + assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); + + final List<EntityType> inputEntityTypes = + (input.getTypes() == null || input.getTypes().isEmpty()) + ? ImmutableList.of() + : input.getTypes(); + final List<String> inputEntityNames = + inputEntityTypes.stream() + .map(EntityTypeMapper::getName) + .distinct() + .collect(Collectors.toList()); + + final List<String> finalEntityNames = + inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); @@ -93,49 +116,64 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - // if no assets in data product properties, exit early before search and return empty results - if (assetUrns.size() == 0) { - SearchResults results = new SearchResults(); - results.setStart(start); - results.setCount(count); - results.setTotal(0); - results.setSearchResults(ImmutableList.of()); - return results; - } - - // add urns from the aspect to our filters - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); - - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } - - try { - log.debug( - "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - finalEntityNames, - sanitizedQuery, - finalFilter, - start, - count, - searchFlags, - null, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + // if no assets in data product properties, exit early before search and return empty + // results + if (assetUrns.size() == 0) { + SearchResults results = new SearchResults(); + results.setStart(start); + results.setCount(count); + results.setTotal(0); + results.setSearchResults(ImmutableList.of()); + return results; + } + + // add urns from the aspect to our filters + final Filter baseFilter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); + + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } + + try { + log.debug( + "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + finalEntityNames, + sanitizedQuery, + finalFilter, + start, + count, + searchFlags, + null, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java index 79afddbb873fb..304ef96d90aa5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,13 +15,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class UpdateDataProductResolver implements DataFetcher<CompletableFuture<DataProduct>> { @@ -27,43 +26,51 @@ public class UpdateDataProductResolver implements DataFetcher<CompletableFuture< private final DataProductService _dataProductService; @Override - public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateDataProductInput input = bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); + final UpdateDataProductInput input = + bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + dataProductUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } + Domains domains = + _dataProductService.getDataProductDomains( + dataProductUrn, context.getAuthentication()); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } - try { - final Urn urn = _dataProductService.updateDataProduct( - dataProductUrn, - input.getName(), - input.getDescription(), - authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(urn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); - } - }); + try { + final Urn urn = + _dataProductService.updateDataProduct( + dataProductUrn, input.getName(), input.getDescription(), authentication); + EntityResponse response = + _dataProductService.getDataProductEntityResponse(urn, authentication); + if (response != null) { + return DataProductMapper.map(response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java index 1587df4c9899b..604c46a1f7c01 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java @@ -39,13 +39,11 @@ import lombok.Data; import lombok.extern.slf4j.Slf4j; - /** * Resolver used for resolving the Health state of a Dataset. * - * Currently, the health status is calculated via the validation on a Dataset. If there are no validations found, the - * health status will be undefined for the Dataset. - * + * <p>Currently, the health status is calculated via the validation on a Dataset. If there are no + * validations found, the health status will be undefined for the Dataset. */ @Slf4j public class DatasetHealthResolver implements DataFetcher<CompletableFuture<List<Health>>> { @@ -60,47 +58,48 @@ public class DatasetHealthResolver implements DataFetcher<CompletableFuture<List private final Cache<String, CachedHealth> _statusCache; public DatasetHealthResolver( - final GraphClient graphClient, - final TimeseriesAspectService timeseriesAspectService) { + final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService) { this(graphClient, timeseriesAspectService, new Config(true)); - } + public DatasetHealthResolver( final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService, final Config config) { _graphClient = graphClient; _timeseriesAspectService = timeseriesAspectService; - _statusCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(1, TimeUnit.MINUTES) - .build(); + _statusCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(1, TimeUnit.MINUTES).build(); _config = config; } @Override - public CompletableFuture<List<Health>> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<List<Health>> get(final DataFetchingEnvironment environment) + throws Exception { final Dataset parent = environment.getSource(); - return CompletableFuture.supplyAsync(() -> { - try { - final CachedHealth cachedStatus = _statusCache.get(parent.getUrn(), () -> ( - computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); - return cachedStatus.healths; - } catch (Exception e) { - throw new RuntimeException("Failed to resolve dataset's health status.", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final CachedHealth cachedStatus = + _statusCache.get( + parent.getUrn(), + () -> + (computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); + return cachedStatus.healths; + } catch (Exception e) { + throw new RuntimeException("Failed to resolve dataset's health status.", e); + } + }); } /** * Computes the "resolved health status" for a Dataset by * - * - fetching active (non-deleted) assertions - * - fetching latest assertion run for each - * - checking whether any of the assertions latest runs are failing - * + * <p>- fetching active (non-deleted) assertions - fetching latest assertion run for each - + * checking whether any of the assertions latest runs are failing */ - private CachedHealth computeHealthStatusForDataset(final String datasetUrn, final QueryContext context) { + private CachedHealth computeHealthStatusForDataset( + final String datasetUrn, final QueryContext context) { final List<Health> healthStatuses = new ArrayList<>(); if (_config.getAssertionsEnabled()) { @@ -113,31 +112,33 @@ private CachedHealth computeHealthStatusForDataset(final String datasetUrn, fina } /** - * Returns the resolved "assertions health", which is currently a static function of whether the most recent run of - * all dataset assertions has succeeded. + * Returns the resolved "assertions health", which is currently a static function of whether the + * most recent run of all dataset assertions has succeeded. * * @param datasetUrn the dataset to compute health for * @param context the query context * @return an instance of {@link Health} for the Dataset, null if one cannot be computed. */ @Nullable - private Health computeAssertionHealthForDataset(final String datasetUrn, final QueryContext context) { + private Health computeAssertionHealthForDataset( + final String datasetUrn, final QueryContext context) { // Get active assertion urns - final EntityRelationships relationships = _graphClient.getRelatedEntities( - datasetUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - 0, - 500, - context.getActorUrn() - ); + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + datasetUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + context.getActorUrn()); if (relationships.getTotal() > 0) { // If there are assertions defined, then we should return a non-null health for this asset. - final Set<String> activeAssertionUrns = relationships.getRelationships() - .stream() - .map(relationship -> relationship.getEntity().toString()).collect(Collectors.toSet()); + final Set<String> activeAssertionUrns = + relationships.getRelationships().stream() + .map(relationship -> relationship.getEntity().toString()) + .collect(Collectors.toSet()); final GenericTable assertionRunResults = getAssertionRunsTable(datasetUrn); @@ -146,22 +147,24 @@ private Health computeAssertionHealthForDataset(final String datasetUrn, final Q return null; } - final List<String> failingAssertionUrns = getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); + final List<String> failingAssertionUrns = + getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); // Finally compute & return the health. final Health health = new Health(); health.setType(HealthStatusType.ASSERTIONS); if (failingAssertionUrns.size() > 0) { health.setStatus(HealthStatus.FAIL); - health.setMessage(String.format("%s of %s assertions are failing", failingAssertionUrns.size(), - activeAssertionUrns.size())); + health.setMessage( + String.format( + "%s of %s assertions are failing", + failingAssertionUrns.size(), activeAssertionUrns.size())); health.setCauses(failingAssertionUrns); } else { health.setStatus(HealthStatus.PASS); health.setMessage("All assertions are passing"); } return health; - } return null; } @@ -175,7 +178,8 @@ private GenericTable getAssertionRunsTable(final String asserteeUrn) { createAssertionGroupingBuckets()); } - private List<String> getFailingAssertionUrns(final GenericTable assertionRunsResult, final Set<String> candidateAssertionUrns) { + private List<String> getFailingAssertionUrns( + final GenericTable assertionRunsResult, final Set<String> candidateAssertionUrns) { // Create the buckets based on the result return resultToFailedAssertionUrns(assertionRunsResult.getRows(), candidateAssertionUrns); } @@ -191,12 +195,15 @@ private Filter createAssertionsFilter(final String datasetUrn) { // Add filter for result == result Criterion startTimeCriterion = - new Criterion().setField("status").setCondition(Condition.EQUAL).setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); + new Criterion() + .setField("status") + .setCondition(Condition.EQUAL) + .setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); criteria.add(startTimeCriterion); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } @@ -205,31 +212,38 @@ private AggregationSpec[] createAssertionAggregationSpecs() { AggregationSpec resultTypeAggregation = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("type"); AggregationSpec timestampAggregation = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("timestampMillis"); - return new AggregationSpec[]{resultTypeAggregation, timestampAggregation}; + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("timestampMillis"); + return new AggregationSpec[] {resultTypeAggregation, timestampAggregation}; } private GroupingBucket[] createAssertionGroupingBuckets() { // String grouping bucket on "assertionUrn" GroupingBucket assertionUrnBucket = new GroupingBucket(); assertionUrnBucket.setKey("assertionUrn").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - return new GroupingBucket[]{assertionUrnBucket}; + return new GroupingBucket[] {assertionUrnBucket}; } - private List<String> resultToFailedAssertionUrns(final StringArrayArray rows, final Set<String> activeAssertionUrns) { + private List<String> resultToFailedAssertionUrns( + final StringArrayArray rows, final Set<String> activeAssertionUrns) { final List<String> failedAssertionUrns = new ArrayList<>(); for (StringArray row : rows) { // Result structure should be assertionUrn, event.result.type, timestampMillis if (row.size() != 3) { - throw new RuntimeException(String.format( - "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", row.size())); + throw new RuntimeException( + String.format( + "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", + row.size())); } final String assertionUrn = row.get(0); final String resultType = row.get(1); - // If assertion is "active" (not deleted) & is failing, then we report a degradation in health. - if (activeAssertionUrns.contains(assertionUrn) && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { + // If assertion is "active" (not deleted) & is failing, then we report a degradation in + // health. + if (activeAssertionUrns.contains(assertionUrn) + && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { failedAssertionUrns.add(assertionUrn); } } @@ -246,4 +260,4 @@ public static class Config { private static class CachedHealth { private final List<Health> healths; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java index 2873866bb34f7..74fbd9c2c868a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java @@ -24,13 +24,13 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - /** * This resolver is a thin wrapper around the {@link DatasetUsageStatsResolver} which simply * computes some aggregate usage metrics for a Dashboard. */ @Slf4j -public class DatasetStatsSummaryResolver implements DataFetcher<CompletableFuture<DatasetStatsSummary>> { +public class DatasetStatsSummaryResolver + implements DataFetcher<CompletableFuture<DatasetStatsSummary>> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; @@ -40,53 +40,64 @@ public class DatasetStatsSummaryResolver implements DataFetcher<CompletableFutur public DatasetStatsSummaryResolver(final UsageClient usageClient) { this.usageClient = usageClient; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. - .build(); + this.summaryCache = + CacheBuilder.newBuilder() + .maximumSize(10000) + .expireAfterWrite( + 6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. + .build(); } @Override - public CompletableFuture<DatasetStatsSummary> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DatasetStatsSummary> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } + return CompletableFuture.supplyAsync( + () -> { + if (this.summaryCache.getIfPresent(resourceUrn) != null) { + return this.summaryCache.getIfPresent(resourceUrn); + } - try { + try { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view profile information for dataset {}", + if (!isAuthorized(resourceUrn, context)) { + log.debug( + "User {} is not authorized to view profile information for dataset {}", context.getActorUrn(), resourceUrn.toString()); - return null; - } - - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); - - final DatasetStatsSummary result = new DatasetStatsSummary(); - result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); - result.setUniqueUserCountLast30Days(usageQueryResult.getAggregations().getUniqueUserCount()); - if (usageQueryResult.getAggregations().hasUsers()) { - result.setTopUsersLast30Days(trimUsers(usageQueryResult.getAggregations().getUsers() - .stream() - .filter(UserUsageCounts::hasUser) - .sorted((a, b) -> (b.getCount() - a.getCount())) - .map(userCounts -> createPartialUser(Objects.requireNonNull(userCounts.getUser()))) - .collect(Collectors.toList()))); - } - this.summaryCache.put(resourceUrn, result); - return result; - } catch (Exception e) { - log.error(String.format("Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return null; + } + + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); + + final DatasetStatsSummary result = new DatasetStatsSummary(); + result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); + result.setUniqueUserCountLast30Days( + usageQueryResult.getAggregations().getUniqueUserCount()); + if (usageQueryResult.getAggregations().hasUsers()) { + result.setTopUsersLast30Days( + trimUsers( + usageQueryResult.getAggregations().getUsers().stream() + .filter(UserUsageCounts::hasUser) + .sorted((a, b) -> (b.getCount() - a.getCount())) + .map( + userCounts -> + createPartialUser(Objects.requireNonNull(userCounts.getUser()))) + .collect(Collectors.toList()))); + } + this.summaryCache.put(resourceUrn, result); + return result; + } catch (Exception e) { + log.error( + String.format( + "Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }); } private List<CorpUser> trimUsers(final List<CorpUser> originalUsers) { @@ -103,8 +114,9 @@ private CorpUser createPartialUser(final Urn userUrn) { } private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); + return AuthorizationUtils.isAuthorized( + context, + Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java index e4bec8e896fdf..75288ec989c79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java @@ -17,7 +17,6 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DatasetUsageStatsResolver implements DataFetcher<CompletableFuture<UsageQueryResult>> { @@ -28,30 +27,35 @@ public DatasetUsageStatsResolver(final UsageClient usageClient) { } @Override - public CompletableFuture<UsageQueryResult> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<UsageQueryResult> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final UsageTimeRange range = UsageTimeRange.valueOf(environment.getArgument("range")); - return CompletableFuture.supplyAsync(() -> { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view usage information for dataset {}", - context.getActorUrn(), - resourceUrn.toString()); - return null; - } - try { - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), range); - return UsageQueryResultMapper.map(usageQueryResult); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorized(resourceUrn, context)) { + log.debug( + "User {} is not authorized to view usage information for dataset {}", + context.getActorUrn(), + resourceUrn.toString()); + return null; + } + try { + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats(resourceUrn.toString(), range); + return UsageQueryResultMapper.map(usageQueryResult); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + } + }); } private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, + return AuthorizationUtils.isAuthorized( + context, Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index 75c09d0cf7e43..62c88c506ba61 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -1,16 +1,20 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateDeprecationInput; import com.linkedin.datahub.graphql.resolvers.AuthUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -23,13 +27,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor @@ -37,48 +37,61 @@ public class UpdateDeprecationResolver implements DataFetcher<CompletableFuture< private static final String EMPTY_STRING = ""; private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateDeprecationInput input = bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); + final UpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); final Urn entityUrn = Urn.createFromString(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateDeprecationInput( - entityUrn, - _entityService - ); - try { - Deprecation deprecation = (Deprecation) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DEPRECATION_ASPECT_NAME, - _entityService, - new Deprecation()); - updateDeprecation(deprecation, input, context); - - // Create the Deprecation aspect - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(entityUrn, DEPRECATION_ASPECT_NAME, deprecation); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to update Deprecation for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to update Deprecation for resource with entity urn %s", entityUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateDeprecationInput(entityUrn, _entityService); + try { + Deprecation deprecation = + (Deprecation) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), + DEPRECATION_ASPECT_NAME, + _entityService, + new Deprecation()); + updateDeprecation(deprecation, input, context); + + // Create the Deprecation aspect + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + entityUrn, DEPRECATION_ASPECT_NAME, deprecation); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to update Deprecation for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to update Deprecation for resource with entity urn %s", entityUrn), + e); + } + }); } - private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext context, final Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToUpdateDeprecationForEntity( + final QueryContext context, final Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -88,20 +101,19 @@ private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext cont orPrivilegeGroups); } - public static Boolean validateUpdateDeprecationInput( - Urn entityUrn, - EntityService entityService - ) { + public static Boolean validateUpdateDeprecationInput(Urn entityUrn, EntityService entityService) { if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( - String.format("Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); + String.format( + "Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); } return true; } - private static void updateDeprecation(Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { + private static void updateDeprecation( + Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { deprecation.setDeprecated(input.getDeprecated()); deprecation.setDecommissionTime(input.getDecommissionTime(), SetMode.REMOVE_IF_NULL); if (input.getNote() != null) { @@ -115,9 +127,10 @@ private static void updateDeprecation(Deprecation deprecation, UpdateDeprecation } catch (URISyntaxException e) { // Should never happen. throw new RuntimeException( - String.format("Failed to convert authorized actor into an Urn. actor urn: %s", - context.getActorUrn()), + String.format( + "Failed to convert authorized actor into an Urn. actor urn: %s", + context.getActorUrn()), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java index 1930cdc1f8667..9099394d32bd0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -23,22 +28,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS privilege. + * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -51,71 +49,101 @@ public class CreateDomainResolver implements DataFetcher<CompletableFuture<Strin public CompletableFuture<String> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateDomainInput input = bindArgument(environment.getArgument("input"), CreateDomainInput.class); - final Urn parentDomain = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canCreateDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - // Create the Domain Key - final DomainKey key = new DomainKey(); - - // Take user provided id OR generate a random UUID for the domain. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setId(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Domain already exists!"); - } - - if (parentDomain != null && !_entityClient.exists(parentDomain, context.getAuthentication())) { - throw new IllegalArgumentException("Parent Domain does not exist!"); - } - - if (DomainUtils.hasNameConflict(input.getName(), parentDomain, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, DOMAIN_ENTITY_NAME, - DOMAIN_PROPERTIES_ASPECT_NAME, mapDomainProperties(input, context)); - proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); - - String domainUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - OwnerUtils.addCreatorAsOwner(context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return domainUrn; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to create Domain with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Domain with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + final CreateDomainInput input = + bindArgument(environment.getArgument("input"), CreateDomainInput.class); + final Urn parentDomain = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + // Create the Domain Key + final DomainKey key = new DomainKey(); + + // Take user provided id OR generate a random UUID for the domain. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setId(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Domain already exists!"); + } + + if (parentDomain != null + && !_entityClient.exists(parentDomain, context.getAuthentication())) { + throw new IllegalArgumentException("Parent Domain does not exist!"); + } + + if (DomainUtils.hasNameConflict( + input.getName(), parentDomain, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + DOMAIN_ENTITY_NAME, + DOMAIN_PROPERTIES_ASPECT_NAME, + mapDomainProperties(input, context)); + proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); + + String domainUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + OwnerUtils.addCreatorAsOwner( + context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + return domainUrn; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to create Domain with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Domain with id: %s, name: %s", + input.getId(), input.getName()), + e); + } + }); } - private DomainProperties mapDomainProperties(final CreateDomainInput input, final QueryContext context) { + private DomainProperties mapDomainProperties( + final CreateDomainInput input, final QueryContext context) { final DomainProperties result = new DomainProperties(); result.setName(input.getName()); result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - result.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + result.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); if (input.getParentDomain() != null) { try { result.setParentDomain(Urn.createFromString(input.getParentDomain())); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), e); + throw new RuntimeException( + String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), + e); } } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java index 9ab90e8b4ff72..c863f2e581dcb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java @@ -11,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -25,37 +22,49 @@ public DeleteDomainResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String domainUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(domainUrn); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageDomains(context) + || AuthorizationUtils.canDeleteEntity(urn, context)) { + try { + // Make sure there are no child domains + if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { + throw new RuntimeException( + String.format("Cannot delete domain %s which has child domains", domainUrn)); + } - if (AuthorizationUtils.canManageDomains(context) || AuthorizationUtils.canDeleteEntity(urn, context)) { - try { - // Make sure there are no child domains - if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { - throw new RuntimeException(String.format("Cannot delete domain %s which has child domains", domainUrn)); - } + _entityClient.deleteEntity(urn, context.getAuthentication()); + log.info( + String.format("I've successfully deleted the entity %s with urn", domainUrn)); - _entityClient.deleteEntity(urn, context.getAuthentication()); - log.info(String.format("I've successfully deleted the entity %s with urn", domainUrn)); + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Domain with urn %s", + urn), + e); + } + }); - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for Domain with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", domainUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", domainUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 0bf551c4683e6..8f6d109e71b2c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; @@ -19,13 +22,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolves the entities in a particular Domain. - */ +/** Resolves the entities in a particular Domain. */ @Slf4j public class DomainEntitiesResolver implements DataFetcher<CompletableFuture<SearchResults>> { @@ -49,50 +46,65 @@ public DomainEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<SearchResults> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<SearchResults> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Domain) environment.getSource()).getUrn(); - final DomainEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final DomainEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : DEFAULT_QUERY; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - try { - - final CriterionArray criteria = new CriterionArray(); - final Criterion filterCriterion = new Criterion() - .setField(DOMAINS_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - criteria.add(filterCriterion); - if (input.getFilters() != null) { - input.getFilters().forEach(filter -> { - criteria.add(new Criterion().setField(filter.getField()).setValue(filter.getValue())); - }); - } - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - query, - new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(criteria))), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with Domain with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + + final CriterionArray criteria = new CriterionArray(); + final Criterion filterCriterion = + new Criterion() + .setField(DOMAINS_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + criteria.add(filterCriterion); + if (input.getFilters() != null) { + input + .getFilters() + .forEach( + filter -> { + criteria.add( + new Criterion() + .setField(filter.getField()) + .setValue(filter.getValue())); + }); + } + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(criteria))), + start, + count, + null, + null, + context.getAuthentication())); + + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to resolve entities associated with Domain with urn %s", urn), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java index 3a751e502eb10..5453603f4cc9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,18 +21,14 @@ import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS + * platform privilege. */ public class ListDomainsResolver implements DataFetcher<CompletableFuture<ListDomainsResult>> { private static final Integer DEFAULT_START = 0; @@ -43,47 +42,56 @@ public ListDomainsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListDomainsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListDomainsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - final ListDomainsInput input = bindArgument(environment.getArgument("input"), ListDomainsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final Urn parentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); + return CompletableFuture.supplyAsync( + () -> { + final ListDomainsInput input = + bindArgument(environment.getArgument("input"), ListDomainsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final Urn parentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); - try { - // First, get all domain Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.DOMAIN_ENTITY_NAME, - query, - filter, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all domain Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.DOMAIN_ENTITY_NAME, + query, + filter, + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Now that we have entities we can bind this to a result. - final ListDomainsResult result = new ListDomainsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setDomains(mapUnresolvedDomains(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list domains", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListDomainsResult result = new ListDomainsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setDomains( + mapUnresolvedDomains( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list domains", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial Domain objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Domain objects which will be + // resolved be a separate Batch resolver. private List<Domain> mapUnresolvedDomains(final List<Urn> entityUrns) { final List<Domain> results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java index dcaa7d61ed90c..8406e19810468 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -9,51 +11,53 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; - public class ParentDomainsResolver implements DataFetcher<CompletableFuture<ParentDomainsResult>> { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public ParentDomainsResolver(final EntityClient entityClient) { - _entityClient = entityClient; + public ParentDomainsResolver(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public CompletableFuture<ParentDomainsResult> get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); + final List<Entity> parentDomains = new ArrayList<>(); + final Set<String> visitedParentUrns = new HashSet<>(); + + if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { + throw new IllegalArgumentException( + String.format("Failed to resolve parents for entity type %s", urn)); } - @Override - public CompletableFuture<ParentDomainsResult> get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - final List<Entity> parentDomains = new ArrayList<>(); - final Set<String> visitedParentUrns = new HashSet<>(); - - if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { - throw new IllegalArgumentException(String.format("Failed to resolve parents for entity type %s", urn)); - } - - return CompletableFuture.supplyAsync(() -> { - try { - Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); - - while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { - parentDomains.add(parentDomain); - visitedParentUrns.add(parentDomain.getUrn()); - parentDomain = DomainUtils.getParentDomain(Urn.createFromString(parentDomain.getUrn()), context, _entityClient); - } - - final ParentDomainsResult result = new ParentDomainsResult(); - result.setCount(parentDomains.size()); - result.setDomains(parentDomains); - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load parent domains for entity %s", urn), e); + return CompletableFuture.supplyAsync( + () -> { + try { + Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); + + while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { + parentDomains.add(parentDomain); + visitedParentUrns.add(parentDomain.getUrn()); + parentDomain = + DomainUtils.getParentDomain( + Urn.createFromString(parentDomain.getUrn()), context, _entityClient); } + + final ParentDomainsResult result = new ParentDomainsResult(); + result.setCount(parentDomains.size()); + result.setDomains(parentDomains); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load parent domains for entity %s", urn), e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java index 56a76dcb1e07f..1c52f707c61a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,19 +19,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { @@ -37,49 +38,56 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); final Urn domainUrn = Urn.createFromString(environment.getArgument("domainUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateSetDomainInput( - entityUrn, - domainUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - setDomain(domains, domainUrn); + return CompletableFuture.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateSetDomainInput(entityUrn, domainUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), DOMAINS_ASPECT_NAME, _entityService, new Domains()); + setDomain(domains, domainUrn); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set Domain to resource with entity urn {}, domain urn {}: {}", entityUrn, domainUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set Domain to resource with entity urn %s, domain urn %s", entityUrn, domainUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to set Domain to resource with entity urn {}, domain urn {}: {}", + entityUrn, + domainUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to set Domain to resource with entity urn %s, domain urn %s", + entityUrn, domainUrn), + e); + } + }); } public static Boolean validateSetDomainInput( - Urn entityUrn, - Urn domainUrn, - EntityService entityService - ) { + Urn entityUrn, Urn domainUrn, EntityService entityService) { if (!entityService.exists(domainUrn)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Domain does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Domain does not exist.", + entityUrn, domainUrn)); } if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Entity does not exist.", + entityUrn, domainUrn)); } return true; @@ -90,4 +98,4 @@ private static void setDomain(Domains domains, Urn domainUrn) { newDomain.add(domainUrn); domains.setDomains(newDomain); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java index 01dd4f1254f8e..b2a82ac7608d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -17,19 +20,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class UnsetDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { @@ -37,39 +38,40 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - validateUnsetDomainInput( - entityUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - unsetDomain(domains); + validateUnsetDomainInput(entityUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), DOMAINS_ASPECT_NAME, _entityService, new Domains()); + unsetDomain(domains); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to unset Domains for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to unset Domains for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), + e); + } + }); } - public static Boolean validateUnsetDomainInput( - Urn entityUrn, - EntityService entityService - ) { + public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( @@ -85,4 +87,4 @@ private static void unsetDomain(@Nonnull Domains domains) { } domains.getDomains().clear(); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java index dbaf6000477aa..e1b264606074c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Embed; import com.linkedin.common.urn.Urn; @@ -19,14 +23,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for updating the embed render URL for an asset. - */ +/** Resolver used for updating the embed render URL for an asset. */ @Slf4j @RequiredArgsConstructor public class UpdateEmbedResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -37,62 +34,70 @@ public class UpdateEmbedResolver implements DataFetcher<CompletableFuture<Boolea public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateEmbedInput input = bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); + final UpdateEmbedInput input = + bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); final Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateEmbedInput(input, _entityService); + try { + final Embed embed = + (Embed) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), EMBED_ASPECT_NAME, _entityService, new Embed()); - if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateEmbedInput( - input, - _entityService - ); - try { - final Embed embed = (Embed) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - EMBED_ASPECT_NAME, - _entityService, - new Embed()); + updateEmbed(embed, input); - updateEmbed(embed, input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); - _entityService.ingestProposal( - proposal, - new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis()), - false - ); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Embed for to resource with entity urn %s", entityUrn), e); - } - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis()), + false); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update Embed for to resource with entity urn %s", entityUrn), + e); + } + }); } /** - * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link IllegalArgumentException} if the input - * is not valid. + * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link + * IllegalArgumentException} if the input is not valid. * - * For an input to be valid, the target URN must exist. + * <p>For an input to be valid, the target URN must exist. * * @param input the input to validate * @param entityService an instance of {@link EntityService} used to validate the input. */ - private static void validateUpdateEmbedInput(@Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { + private static void validateUpdateEmbedInput( + @Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) { throw new IllegalArgumentException( - String.format("Failed to update embed for entity with urn %s. Entity does not exist!", input.getUrn())); + String.format( + "Failed to update embed for entity with urn %s. Entity does not exist!", + input.getUrn())); } } /** * Applies an instance of {@link UpdateEmbedInput} to a base instance of {@link Embed}. + * * @param embed an embed to update * @param input the updates to apply */ - private static void updateEmbed(@Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { + private static void updateEmbed( + @Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { embed.setRenderUrl(input.getRenderUrl(), SetMode.IGNORE_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java index 613f97182c5dd..d2bd2f3fb8a17 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.entity.EntityService; @@ -8,12 +10,7 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for returning whether an entity exists. - */ +/** Resolver responsible for returning whether an entity exists. */ public class EntityExistsResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityService _entityService; @@ -22,7 +19,8 @@ public EntityExistsResolver(final EntityService entityService) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { String entityUrnString = bindArgument(environment.getArgument("urn"), String.class); // resolver can be used as its own endpoint or when hydrating an entity if (entityUrnString == null && environment.getSource() != null) { @@ -31,12 +29,14 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) Objects.requireNonNull(entityUrnString, "Entity urn must not be null!"); final Urn entityUrn = Urn.createFromString(entityUrnString); - return CompletableFuture.supplyAsync(() -> { - try { - return _entityService.exists(entityUrn); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to check whether entity %s exists", entityUrn.toString())); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _entityService.exists(entityUrn); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to check whether entity %s exists", entityUrn.toString())); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java index d8190a160f268..751c6096de1a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java @@ -9,17 +9,16 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPrivileges; -import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.EmbedUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.extern.slf4j.Slf4j; - import java.util.Collections; import java.util.concurrent.CompletableFuture; +import lombok.extern.slf4j.Slf4j; @Slf4j public class EntityPrivilegesResolver implements DataFetcher<CompletableFuture<EntityPrivileges>> { @@ -36,25 +35,28 @@ public CompletableFuture<EntityPrivileges> get(DataFetchingEnvironment environme final String urnString = ((Entity) environment.getSource()).getUrn(); final Urn urn = UrnUtils.getUrn(urnString); - return CompletableFuture.supplyAsync(() -> { - switch (urn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return getGlossaryTermPrivileges(urn, context); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return getGlossaryNodePrivileges(urn, context); - case Constants.DATASET_ENTITY_NAME: - return getDatasetPrivileges(urn, context); - case Constants.CHART_ENTITY_NAME: - return getChartPrivileges(urn, context); - case Constants.DASHBOARD_ENTITY_NAME: - return getDashboardPrivileges(urn, context); - case Constants.DATA_JOB_ENTITY_NAME: - return getDataJobPrivileges(urn, context); - default: - log.warn("Tried to get entity privileges for entity type {} but nothing is implemented for it yet", urn.getEntityType()); - return new EntityPrivileges(); - } - }); + return CompletableFuture.supplyAsync( + () -> { + switch (urn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return getGlossaryTermPrivileges(urn, context); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return getGlossaryNodePrivileges(urn, context); + case Constants.DATASET_ENTITY_NAME: + return getDatasetPrivileges(urn, context); + case Constants.CHART_ENTITY_NAME: + return getChartPrivileges(urn, context); + case Constants.DASHBOARD_ENTITY_NAME: + return getDashboardPrivileges(urn, context); + case Constants.DATA_JOB_ENTITY_NAME: + return getDataJobPrivileges(urn, context); + default: + log.warn( + "Tried to get entity privileges for entity type {} but nothing is implemented for it yet", + urn.getEntityType()); + return new EntityPrivileges(); + } + }); } private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext context) { @@ -66,7 +68,8 @@ private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext con } Urn parentNodeUrn = GlossaryUtils.getParentUrn(termUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; @@ -80,25 +83,29 @@ private EntityPrivileges getGlossaryNodePrivileges(Urn nodeUrn, QueryContext con result.setCanManageChildren(true); return result; } - Boolean canManageChildren = GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); + Boolean canManageChildren = + GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); result.setCanManageChildren(canManageChildren); Urn parentNodeUrn = GlossaryUtils.getParentUrn(nodeUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; } private boolean canEditEntityLineage(Urn urn, QueryContext context) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup orPrivilegesGroup = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup orPrivilegesGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + allPrivilegesGroup, + new ConjunctivePrivilegeGroup( + Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 69b5b14edfbee..535dbbf70a4cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; @@ -9,22 +12,18 @@ import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -36,70 +35,89 @@ public class AddRelatedTermsResolver implements DataFetcher<CompletableFuture<Bo public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List<Urn> termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - validateRelatedTermsInput(urn, termUrns); - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - glossaryRelatedTerms = new GlossaryRelatedTerms(); - } - - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List<Urn> termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + validateRelatedTermsInput(urn, termUrns); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + glossaryRelatedTerms = new GlossaryRelatedTerms(); + } + + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); + + return updateRelatedTerms( + termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); + + return updateRelatedTerms( + termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add related terms to %s", input.getUrn()), e); } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add related terms to %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } public Boolean validateRelatedTermsInput(Urn urn, List<Urn> termUrns) { - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(urn)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); } for (Urn termUrn : termUrns) { if (termUrn.equals(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. Tried to create related term with itself.", urn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. Tried to create related term with itself.", urn)); } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); } else if (!_entityService.exists(termUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", urn, termUrn)); } } return true; } - private Boolean updateRelatedTerms(List<Urn> termUrns, GlossaryTermUrnArray existingTermUrns, Urn urn, GlossaryRelatedTerms glossaryRelatedTerms, Urn actor) { + private Boolean updateRelatedTerms( + List<Urn> termUrns, + GlossaryTermUrnArray existingTermUrns, + Urn urn, + GlossaryRelatedTerms glossaryRelatedTerms, + Urn actor) { List<Urn> termsToAdd = new ArrayList<>(); for (Urn termUrn : termUrns) { if (existingTermUrns.stream().anyMatch(association -> association.equals(termUrn))) { @@ -117,7 +135,12 @@ private Boolean updateRelatedTerms(List<Urn> termUrns, GlossaryTermUrnArray exis existingTermUrns.add(newUrn); } - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); return true; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index cc0ab4e03a4e8..815b4662e1ed2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -19,18 +24,11 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -43,41 +41,67 @@ public class CreateGlossaryNodeResolver implements DataFetcher<CompletableFuture public CompletableFuture<String> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - try { - final GlossaryNodeKey key = new GlossaryNodeKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Node already exists!"); + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + try { + final GlossaryNodeKey key = new GlossaryNodeKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Glossary Node already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_NODE_ENTITY_NAME, + GLOSSARY_NODE_INFO_ASPECT_NAME, + mapGlossaryNodeInfo(input)); + + String glossaryNodeUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + + OwnerUtils.addCreatorAsOwner( + context, + glossaryNodeUrn, + OwnerEntityType.CORP_USER, + ownershipType, + _entityService); + return glossaryNodeUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryNode with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNode with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, mapGlossaryNodeInfo(input)); - - String glossaryNodeUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - - OwnerUtils.addCreatorAsOwner(context, glossaryNodeUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryNodeUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryNode with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryNode with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput input) { @@ -90,10 +114,12 @@ private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java index ad69e0c5876e2..90979fe918f71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,9 +30,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -37,12 +39,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -57,42 +55,69 @@ public class CreateGlossaryTermResolver implements DataFetcher<CompletableFuture public CompletableFuture<String> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - // Ensure there isn't another glossary term with the same name at this level of the glossary - validateGlossaryTermName(parentNode, context, input.getName()); - try { - final GlossaryTermKey key = new GlossaryTermKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Term already exists!"); - } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, mapGlossaryTermInfo(input)); - - String glossaryTermUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + // Ensure there isn't another glossary term with the same name at this level of the + // glossary + validateGlossaryTermName(parentNode, context, input.getName()); + try { + final GlossaryTermKey key = new GlossaryTermKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Glossary Term already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_TERM_ENTITY_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + mapGlossaryTermInfo(input)); + + String glossaryTermUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + + OwnerUtils.addCreatorAsOwner( + context, + glossaryTermUrn, + OwnerEntityType.CORP_USER, + ownershipType, + _entityService); + return glossaryTermUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryTerm with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryTerm with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - OwnerUtils.addCreatorAsOwner(context, glossaryTermUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryTermUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryTerm with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryTerm with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput input) { @@ -106,7 +131,10 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; @@ -114,25 +142,22 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp private Filter buildParentNodeFilter(final Urn parentNodeUrn) { final Map<String, String> criterionMap = new HashMap<>(); - criterionMap.put(PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); + criterionMap.put( + PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); return QueryUtils.newFilter(criterionMap); } private Map<Urn, EntityResponse> getTermsWithSameParent(Urn parentNode, QueryContext context) { try { final Filter filter = buildParentNodeFilter(parentNode); - final SearchResult searchResult = _entityClient.filter( - GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + _entityClient.filter( + GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); - final List<Urn> termUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List<Urn> termUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); return _entityClient.batchGetV2( GLOSSARY_TERM_ENTITY_NAME, @@ -147,14 +172,17 @@ private Map<Urn, EntityResponse> getTermsWithSameParent(Urn parentNode, QueryCon private void validateGlossaryTermName(Urn parentNode, QueryContext context, String name) { Map<Urn, EntityResponse> entities = getTermsWithSameParent(parentNode, context); - entities.forEach((urn, entityResponse) -> { - if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); - GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); - if (termInfo.hasName() && termInfo.getName().equals(name)) { - throw new IllegalArgumentException("Glossary Term with this name already exists at this level of the Business Glossary"); - } - } - }); + entities.forEach( + (urn, entityResponse) -> { + if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); + if (termInfo.hasName() && termInfo.getName().equals(name)) { + throw new IllegalArgumentException( + "Glossary Term with this name already exists at this level of the Business Glossary"); + } + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java index 0929c7138528d..f623f0e34b366 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java @@ -11,50 +11,59 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DeleteGlossaryEntityResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; private final EntityService _entityService; - public DeleteGlossaryEntityResolver(final EntityClient entityClient, EntityService entityService) { + public DeleteGlossaryEntityResolver( + final EntityClient entityClient, EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("urn")); final Urn parentNodeUrn = GlossaryUtils.getParentUrn(entityUrn, context, _entityClient); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { - if (!_entityService.exists(entityUrn)) { - throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); - } - - try { - _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + if (!_entityService.exists(entityUrn)) { + throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); + } - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { try { - _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for glossary entity with urn %s", + entityUrn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for glossary entity with urn %s", entityUrn), e); + throw new RuntimeException( + String.format( + "Failed to perform delete against glossary entity with urn %s", entityUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against glossary entity with urn %s", entityUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java index 1457a308c8774..e7990b1a343d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,15 +22,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryNodesResolver implements DataFetcher<CompletableFuture<GetRootGlossaryNodesResult>> { +public class GetRootGlossaryNodesResolver + implements DataFetcher<CompletableFuture<GetRootGlossaryNodesResult>> { private final EntityClient _entityClient; @@ -37,56 +37,58 @@ public GetRootGlossaryNodesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<GetRootGlossaryNodesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<GetRootGlossaryNodesResult> get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsNodesResult = _entityClient.filter( - Constants.GLOSSARY_NODE_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); - - final List<Urn> glossaryNodeUrns = gmsNodesResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); - - final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); - result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); - result.setCount(glossaryNodeUrns.size()); - result.setStart(gmsNodesResult.getFrom()); - result.setTotal(gmsNodesResult.getNumEntities()); - - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsNodesResult = + _entityClient.filter( + Constants.GLOSSARY_NODE_ENTITY_NAME, + filter, + null, + start, + count, + context.getAuthentication()); + + final List<Urn> glossaryNodeUrns = + gmsNodesResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); + result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); + result.setCount(glossaryNodeUrns.size()); + result.setStart(gmsNodesResult.getFrom()); + result.setTotal(gmsNodesResult.getNumEntities()); + + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); + } + }); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } @@ -101,4 +103,3 @@ private List<GlossaryNode> mapUnresolvedGlossaryNodes(final List<Urn> entityUrns return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java index f7684e477f830..40e4363dcff93 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,15 +22,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryTermsResolver implements DataFetcher<CompletableFuture<GetRootGlossaryTermsResult>> { +public class GetRootGlossaryTermsResolver + implements DataFetcher<CompletableFuture<GetRootGlossaryTermsResult>> { private final EntityClient _entityClient; @@ -37,56 +37,58 @@ public GetRootGlossaryTermsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<GetRootGlossaryTermsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<GetRootGlossaryTermsResult> get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); + return CompletableFuture.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsTermsResult = _entityClient.filter( - Constants.GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsTermsResult = + _entityClient.filter( + Constants.GLOSSARY_TERM_ENTITY_NAME, + filter, + null, + start, + count, + context.getAuthentication()); - final List<Urn> glossaryTermUrns = gmsTermsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List<Urn> glossaryTermUrns = + gmsTermsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); - result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); - result.setCount(glossaryTermUrns.size()); - result.setStart(gmsTermsResult.getFrom()); - result.setTotal(gmsTermsResult.getNumEntities()); + final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); + result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); + result.setCount(glossaryTermUrns.size()); + result.setStart(gmsTermsResult.getFrom()); + result.setTotal(gmsTermsResult.getNumEntities()); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); - } - }); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); + } + }); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java index d513d70f39f58..850469f996515 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -14,18 +18,13 @@ import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; - -public class ParentNodesResolver implements DataFetcher<CompletableFuture<ParentNodesResult>> { +public class ParentNodesResolver implements DataFetcher<CompletableFuture<ParentNodesResult>> { private final EntityClient _entityClient; @@ -36,19 +35,23 @@ public ParentNodesResolver(final EntityClient entityClient) { private void aggregateParentNodes(List<GlossaryNode> nodes, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(dataMap); if (nodeInfo.hasParentNode()) { Urn parentNodeUrn = nodeInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { GlossaryNode mappedNode = GlossaryNodeMapper.map(response); nodes.add(mappedNode); @@ -64,19 +67,23 @@ private void aggregateParentNodes(List<GlossaryNode> nodes, String urn, QueryCon private GlossaryNode getTermParentNode(String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); if (termInfo.hasParentNode()) { Urn parentNodeUrn = termInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { GlossaryNode mappedNode = GlossaryNodeMapper.map(response); return mappedNode; @@ -95,27 +102,28 @@ public CompletableFuture<ParentNodesResult> get(DataFetchingEnvironment environm final String urn = ((Entity) environment.getSource()).getUrn(); final List<GlossaryNode> nodes = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - final String type = Urn.createFromString(urn).getEntityType(); + return CompletableFuture.supplyAsync( + () -> { + try { + final String type = Urn.createFromString(urn).getEntityType(); - if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { - final GlossaryNode parentNode = getTermParentNode(urn, context); - if (parentNode != null) { - nodes.add(parentNode); - aggregateParentNodes(nodes, parentNode.getUrn(), context); - } - } else { - aggregateParentNodes(nodes, urn, context); - } + if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { + final GlossaryNode parentNode = getTermParentNode(urn, context); + if (parentNode != null) { + nodes.add(parentNode); + aggregateParentNodes(nodes, parentNode.getUrn(), context); + } + } else { + aggregateParentNodes(nodes, urn, context); + } - final ParentNodesResult result = new ParentNodesResult(); - result.setCount(nodes.size()); - result.setNodes(nodes); - return result; - } catch (DataHubGraphQLException | URISyntaxException e) { - throw new RuntimeException(("Failed to load parent nodes")); - } - }); + final ParentNodesResult result = new ParentNodesResult(); + result.setCount(nodes.size()); + result.setNodes(nodes); + return result; + } catch (DataHubGraphQLException | URISyntaxException e) { + throw new RuntimeException(("Failed to load parent nodes")); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index 417ef4292d0f7..8c9b792b74e0d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,15 +17,11 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -34,57 +33,82 @@ public class RemoveRelatedTermsResolver implements DataFetcher<CompletableFuture public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List<Urn> termUrnsToRemove = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List<Urn> termUrnsToRemove = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); - } + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(urn)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", + urn, urn)); + } - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - throw new RuntimeException(String.format("Related Terms for this Urn do not exist: %s", urn)); - } + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + throw new RuntimeException( + String.format("Related Terms for this Urn do not exist: %s", urn)); + } - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to removes related terms from %s", input.getUrn()), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to removes related terms from %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java index daff0962bc2e8..acfc2cd14f8d4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -17,13 +20,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver that adds a set of native members to a group, if the user and group both exist. - */ +/** Resolver that adds a set of native members to a group, if the user and group both exist. */ public class AddGroupMembersResolver implements DataFetcher<CompletableFuture<Boolean>> { private final GroupService _groupService; @@ -33,9 +30,11 @@ public AddGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { - final AddGroupMembersInput input = bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); + final AddGroupMembersInput input = + bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -52,30 +51,37 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) String.format("Failed to add members to group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership for group %s when adding group members", groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually added to it", - groupUrnStr)); - } + return CompletableFuture.supplyAsync( + () -> { + Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + groupUrn, context.getActorUrn(), context.getAuthentication()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership for group %s when adding group members", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually added to it", + groupUrnStr)); + } - try { - // Add each user to the group - final List<Urn> userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - userUrnList.forEach(userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add group members to group %s", groupUrnStr)); - } - }); + try { + // Add each user to the group + final List<Urn> userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + userUrnList.forEach( + userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add group members to group %s", groupUrnStr)); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java index 75f2a61287ecc..e487ee00608d4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,10 +14,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -// Currently, this resolver will override the group details, but not group membership, if a group with the same name already exists. +// Currently, this resolver will override the group details, but not group membership, if a group +// with the same name already exists. public class CreateGroupResolver implements DataFetcher<CompletableFuture<String>> { private final GroupService _groupService; @@ -33,19 +33,22 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - final CreateGroupInput input = bindArgument(environment.getArgument("input"), CreateGroupInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, check if the group already exists. - // Create the Group key. - final CorpGroupKey key = new CorpGroupKey(); - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". - return _groupService.createNativeGroup(key, input.getName(), input.getDescription(), authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create group", e); - } - }); + final CreateGroupInput input = + bindArgument(environment.getArgument("input"), CreateGroupInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + // First, check if the group already exists. + // Create the Group key. + final CorpGroupKey key = new CorpGroupKey(); + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". + return _groupService.createNativeGroup( + key, input.getName(), input.getDescription(), authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create group", e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index d0874b21fb106..93582fb956bd8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityCountInput; import com.linkedin.datahub.graphql.generated.EntityCountResult; @@ -14,9 +16,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class EntityCountsResolver implements DataFetcher<CompletableFuture<EntityCountResults>> { private final EntityClient _entityClient; @@ -27,31 +26,42 @@ public EntityCountsResolver(final EntityClient entityClient) { @Override @WithSpan - public CompletableFuture<EntityCountResults> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<EntityCountResults> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final EntityCountInput input = bindArgument(environment.getArgument("input"), EntityCountInput.class); - final EntityCountResults results = new EntityCountResults(); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all counts - Map<String, Long> gmsResult = _entityClient.batchGetTotalEntityCount( - input.getTypes().stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), context.getAuthentication()); - - // bind to a result. - List<EntityCountResult> resultList = gmsResult.entrySet().stream().map(entry -> { - EntityCountResult result = new EntityCountResult(); - result.setCount(Math.toIntExact(entry.getValue())); - result.setEntityType(EntityTypeMapper.getType(entry.getKey())); - return result; - }).collect(Collectors.toList()); - results.setCounts(resultList); - return results; - } catch (Exception e) { - throw new RuntimeException("Failed to get entity counts", e); - } - }); + final EntityCountInput input = + bindArgument(environment.getArgument("input"), EntityCountInput.class); + final EntityCountResults results = new EntityCountResults(); + + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all counts + Map<String, Long> gmsResult = + _entityClient.batchGetTotalEntityCount( + input.getTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + context.getAuthentication()); + + // bind to a result. + List<EntityCountResult> resultList = + gmsResult.entrySet().stream() + .map( + entry -> { + EntityCountResult result = new EntityCountResult(); + result.setCount(Math.toIntExact(entry.getValue())); + result.setEntityType(EntityTypeMapper.getType(entry.getKey())); + return result; + }) + .collect(Collectors.toList()); + results.setCounts(resultList); + return results; + } catch (Exception e) { + throw new RuntimeException("Failed to get entity counts", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java index 67cc84a33a954..a6ad8698679f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -24,10 +27,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListGroupsResolver implements DataFetcher<CompletableFuture<ListGroupsResult>> { private static final Integer DEFAULT_START = 0; @@ -41,51 +40,68 @@ public ListGroupsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListGroupsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListGroupsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListGroupsInput input = bindArgument(environment.getArgument("input"), ListGroupsInput.class); + final ListGroupsInput input = + bindArgument(environment.getArgument("input"), ListGroupsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all group Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_GROUP_ENTITY_NAME, + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + CORP_GROUP_ENTITY_NAME, query, null, - new SortCriterion().setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, count, context.getAuthentication(), + new SortCriterion() + .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), new SearchFlags().setFulltext(true)); - // Then, get hydrate all groups. - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), null, context.getAuthentication()); + // Then, get hydrate all groups. + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - // Now that we have entities we can bind this to a result. - final ListGroupsResult result = new ListGroupsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setGroups(mapUnresolvedGroups(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list groups", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListGroupsResult result = new ListGroupsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setGroups( + mapUnresolvedGroups( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list groups", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - // This method maps urns returned from the list endpoint into Partial Group objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Group objects which will be + // resolved be a separate Batch resolver. private List<CorpGroup> mapUnresolvedGroups(final List<Urn> entityUrns) { final List<CorpGroup> results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java index 287b4aa7b5dbd..9fb63b3eb463d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -17,10 +20,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class RemoveGroupMembersResolver implements DataFetcher<CompletableFuture<Boolean>> { private final GroupService _groupService; @@ -30,9 +29,11 @@ public RemoveGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { - final RemoveGroupMembersInput input = bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); + final RemoveGroupMembersInput input = + bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -43,37 +44,42 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) } final Urn groupUrn = Urn.createFromString(groupUrnStr); - final List<Urn> userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + final List<Urn> userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); if (!_groupService.groupExists(groupUrn)) { // The group doesn't exist. throw new DataHubGraphQLException( - String.format("Failed to add remove members from group %s. Group does not exist.", groupUrnStr), + String.format( + "Failed to add remove members from group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership when removing group members from group %s", - groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually removed from it", - groupUrnStr)); - } - try { - _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + groupUrn, context.getActorUrn(), context.getAuthentication()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership when removing group members from group %s", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually removed from it", + groupUrnStr)); + } + try { + _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException(e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java index 99481868e30ce..e69d6b471f3c5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java @@ -10,10 +10,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class RemoveGroupResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -24,30 +21,39 @@ public RemoveGroupResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String groupUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(groupUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for group with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for group with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against group with urn %s", groupUrn), e); } }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against group with urn %s", groupUrn), e); - } - }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java index 6a4af7563a8d8..036780d446701 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java @@ -1,25 +1,30 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; + import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class IngestionAuthUtils { public static boolean canManageIngestion(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, + ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), + authorizer); } public static boolean canManageSecrets(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); } - private IngestionAuthUtils() { } + private IngestionAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java index 1140c031f1d35..ffa9dcf42d176 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java @@ -25,11 +25,11 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class IngestionResolverUtils { - public static List<ExecutionRequest> mapExecutionRequests(final Collection<EntityResponse> requests) { + public static List<ExecutionRequest> mapExecutionRequests( + final Collection<EntityResponse> requests) { List<ExecutionRequest> result = new ArrayList<>(); for (final EntityResponse request : requests) { result.add(mapExecutionRequest(request)); @@ -46,10 +46,13 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe result.setId(entityUrn.getId()); // Map input aspect. Must be present. - final EnvelopedAspect envelopedInput = aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); + final EnvelopedAspect envelopedInput = + aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); if (envelopedInput != null) { - final ExecutionRequestInput executionRequestInput = new ExecutionRequestInput(envelopedInput.getValue().data()); - final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); + final ExecutionRequestInput executionRequestInput = + new ExecutionRequestInput(envelopedInput.getValue().data()); + final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = + new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); inputResult.setTask(executionRequestInput.getTask()); if (executionRequestInput.hasSource()) { @@ -63,23 +66,29 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe } // Map result aspect. Optional. - final EnvelopedAspect envelopedResult = aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + final EnvelopedAspect envelopedResult = + aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); if (envelopedResult != null) { - final ExecutionRequestResult executionRequestResult = new ExecutionRequestResult(envelopedResult.getValue().data()); + final ExecutionRequestResult executionRequestResult = + new ExecutionRequestResult(envelopedResult.getValue().data()); result.setResult(mapExecutionRequestResult(executionRequestResult)); } return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource + mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); result.setType(execRequestSource.getType()); return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult + mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); result.setStatus(execRequestResult.getStatus()); result.setStartTimeMs(execRequestResult.getStartTimeMs()); result.setDurationMs(execRequestResult.getDurationMs()); @@ -90,7 +99,8 @@ public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapE return result; } - public static StructuredReport mapStructuredReport(final StructuredExecutionReport structuredReport) { + public static StructuredReport mapStructuredReport( + final StructuredExecutionReport structuredReport) { StructuredReport structuredReportResult = new StructuredReport(); structuredReportResult.setType(structuredReport.getType()); structuredReportResult.setSerializedValue(structuredReport.getSerializedValue()); @@ -98,7 +108,8 @@ public static StructuredReport mapStructuredReport(final StructuredExecutionRepo return structuredReportResult; } - public static List<IngestionSource> mapIngestionSources(final Collection<EntityResponse> entities) { + public static List<IngestionSource> mapIngestionSources( + final Collection<EntityResponse> entities) { final List<IngestionSource> results = new ArrayList<>(); for (EntityResponse response : entities) { try { @@ -118,16 +129,19 @@ public static IngestionSource mapIngestionSource(final EntityResponse ingestionS final EnvelopedAspect envelopedInfo = aspects.get(Constants.INGESTION_INFO_ASPECT_NAME); if (envelopedInfo == null) { - throw new IllegalStateException("No ingestion source info aspect exists for urn: " + entityUrn); + throw new IllegalStateException( + "No ingestion source info aspect exists for urn: " + entityUrn); } // Bind into a strongly typed object. - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); return mapIngestionSourceInfo(entityUrn, ingestionSourceInfo); } - public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHubIngestionSourceInfo info) { + public static IngestionSource mapIngestionSourceInfo( + final Urn urn, final DataHubIngestionSourceInfo info) { final IngestionSource result = new IngestionSource(); result.setUrn(urn.toString()); result.setName(info.getName()); @@ -139,29 +153,30 @@ public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHu return result; } - public static IngestionConfig mapIngestionSourceConfig(final DataHubIngestionSourceConfig config) { + public static IngestionConfig mapIngestionSourceConfig( + final DataHubIngestionSourceConfig config) { final IngestionConfig result = new IngestionConfig(); result.setRecipe(config.getRecipe()); result.setVersion(config.getVersion()); result.setExecutorId(config.getExecutorId()); result.setDebugMode(config.isDebugMode()); if (config.getExtraArgs() != null) { - List<StringMapEntry> extraArgs = config.getExtraArgs() - .keySet() - .stream() - .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) - .collect(Collectors.toList()); + List<StringMapEntry> extraArgs = + config.getExtraArgs().keySet().stream() + .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) + .collect(Collectors.toList()); result.setExtraArgs(extraArgs); } return result; } - public static IngestionSchedule mapIngestionSourceSchedule(final DataHubIngestionSourceSchedule schedule) { + public static IngestionSchedule mapIngestionSourceSchedule( + final DataHubIngestionSourceSchedule schedule) { final IngestionSchedule result = new IngestionSchedule(); result.setInterval(schedule.getInterval()); result.setTimezone(schedule.getTimezone()); return result; } - private IngestionResolverUtils() { } + private IngestionResolverUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java index 7f9cb6176989f..e346f2b077c98 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -22,15 +26,9 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Cancels a requested ingestion execution by emitting a KILL signal. - */ -public class CancelIngestionExecutionRequestResolver implements DataFetcher<CompletableFuture<String>> { +/** Cancels a requested ingestion execution by emitting a KILL signal. */ +public class CancelIngestionExecutionRequestResolver + implements DataFetcher<CompletableFuture<String>> { private static final String KILL_EXECUTION_REQUEST_SIGNAL = "KILL"; @@ -44,45 +42,58 @@ public CancelIngestionExecutionRequestResolver(final EntityClient entityClient) public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { - if (IngestionAuthUtils.canManageIngestion(context)) { + final CancelIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); - final CancelIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); + try { + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map<Urn, EntityResponse> response = + _entityClient.batchGetV2( + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); - try { - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map<Urn, EntityResponse> response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", ingestionSourceUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = - response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - // Build the arguments map. - final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); - execSignal.setSignal(KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. - execSignal.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execSignal.setCreatedAt(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(Urn.createFromString(context.getActorUrn())) - ); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn( - input.getExecutionRequestUrn()), EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, execSignal); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to submit cancel signal %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Build the arguments map. + final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); + execSignal.setSignal( + KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. + execSignal.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execSignal.setCreatedAt( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(input.getExecutionRequestUrn()), + EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, + execSignal); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to submit cancel signal %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java index ea20b837e0a1f..8ef5447cd9433 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; @@ -30,15 +34,9 @@ import org.json.JSONException; import org.json.JSONObject; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ -public class CreateIngestionExecutionRequestResolver implements DataFetcher<CompletableFuture<String>> { +/** Creates an on-demand ingestion execution request. */ +public class CreateIngestionExecutionRequestResolver + implements DataFetcher<CompletableFuture<String>> { private static final String RUN_INGEST_TASK_NAME = "RUN_INGEST"; private static final String MANUAL_EXECUTION_SOURCE_NAME = "MANUAL_INGESTION_SOURCE"; @@ -49,7 +47,8 @@ public class CreateIngestionExecutionRequestResolver implements DataFetcher<Comp private final EntityClient _entityClient; private final IngestionConfiguration _ingestionConfiguration; - public CreateIngestionExecutionRequestResolver(final EntityClient entityClient, final IngestionConfiguration ingestionConfiguration) { + public CreateIngestionExecutionRequestResolver( + final EntityClient entityClient, final IngestionConfiguration ingestionConfiguration) { _entityClient = entityClient; _ingestionConfiguration = ingestionConfiguration; } @@ -58,86 +57,108 @@ public CreateIngestionExecutionRequestResolver(final EntityClient entityClient, public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final CreateIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - // Fetch the original ingestion source - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map<Urn, EntityResponse> response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); - - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - - if (!ingestionSourceInfo.getConfig().hasRecipe()) { - throw new DataHubGraphQLException( - String.format("Failed to find valid ingestion source with urn %s. Missing recipe", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - // Build the arguments map. - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task - execInput.setSource( - new ExecutionRequestSource().setType(MANUAL_EXECUTION_SOURCE_NAME).setIngestionSource(ingestionSourceUrn)); - execInput.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map<String, String> arguments = new HashMap<>(); - String recipe = ingestionSourceInfo.getConfig().getRecipe(); - recipe = injectRunId(recipe, executionRequestUrn.toString()); - recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); - arguments.put(RECIPE_ARG_NAME, recipe); - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().hasVersion() - ? ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion() - ); - if (ingestionSourceInfo.getConfig().hasVersion()) { - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); - } - String debugMode = "false"; - if (ingestionSourceInfo.getConfig().hasDebugMode()) { - debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; - } - if (ingestionSourceInfo.getConfig().hasExtraArgs()) { - arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final CreateIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + // Fetch the original ingestion source + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map<Urn, EntityResponse> response = + _entityClient.batchGetV2( + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); + + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + + if (!ingestionSourceInfo.getConfig().hasRecipe()) { + throw new DataHubGraphQLException( + String.format( + "Failed to find valid ingestion source with urn %s. Missing recipe", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + // Build the arguments map. + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task + execInput.setSource( + new ExecutionRequestSource() + .setType(MANUAL_EXECUTION_SOURCE_NAME) + .setIngestionSource(ingestionSourceUrn)); + execInput.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execInput.setRequestedAt(System.currentTimeMillis()); + + Map<String, String> arguments = new HashMap<>(); + String recipe = ingestionSourceInfo.getConfig().getRecipe(); + recipe = injectRunId(recipe, executionRequestUrn.toString()); + recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); + arguments.put(RECIPE_ARG_NAME, recipe); + arguments.put( + VERSION_ARG_NAME, + ingestionSourceInfo.getConfig().hasVersion() + ? ingestionSourceInfo.getConfig().getVersion() + : _ingestionConfiguration.getDefaultCliVersion()); + if (ingestionSourceInfo.getConfig().hasVersion()) { + arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); + } + String debugMode = "false"; + if (ingestionSourceInfo.getConfig().hasDebugMode()) { + debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; + } + if (ingestionSourceInfo.getConfig().hasExtraArgs()) { + arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + } + arguments.put(DEBUG_MODE_ARG_NAME, debugMode); + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new ingestion execution request %s", input), e); + } } - arguments.put(DEBUG_MODE_ARG_NAME, debugMode); - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, - EXECUTION_REQUEST_ENTITY_NAME, EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new ingestion execution request %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } /** - * Injects an override run id into a recipe for tracking purposes. Any existing run id will be overwritten. + * Injects an override run id into a recipe for tracking purposes. Any existing run id will be + * overwritten. * - * TODO: Determine if this should be handled in the executor itself. + * <p>TODO: Determine if this should be handled in the executor itself. * * @param runId the run id to place into the recipe * @return a modified recipe JSON string @@ -149,7 +170,8 @@ private String injectRunId(final String originalJson, final String runId) { return obj.toString(); } catch (JSONException e) { // This should ideally never be hit. - throw new IllegalArgumentException("Failed to create execution request: Invalid recipe json provided."); + throw new IllegalArgumentException( + "Failed to create execution request: Invalid recipe json provided."); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java index 1886db62ae450..2505ce28c5c2b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.linkedin.metadata.config.IngestionConfiguration; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; @@ -10,26 +13,19 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.execution.ExecutionRequestInput; import com.linkedin.execution.ExecutionRequestSource; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.IngestionUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ +/** Creates an on-demand ingestion execution request. */ public class CreateTestConnectionRequestResolver implements DataFetcher<CompletableFuture<String>> { private static final String TEST_CONNECTION_TASK_NAME = "TEST_CONNECTION"; @@ -41,7 +37,8 @@ public class CreateTestConnectionRequestResolver implements DataFetcher<Completa private final EntityClient _entityClient; private final IngestionConfiguration _ingestionConfiguration; - public CreateTestConnectionRequestResolver(final EntityClient entityClient, final IngestionConfiguration ingestionConfiguration) { + public CreateTestConnectionRequestResolver( + final EntityClient entityClient, final IngestionConfiguration ingestionConfiguration) { _entityClient = entityClient; _ingestionConfiguration = ingestionConfiguration; } @@ -50,41 +47,54 @@ public CreateTestConnectionRequestResolver(final EntityClient entityClient, fina public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - final CreateTestConnectionRequestInput input = - bindArgument(environment.getArgument("input"), CreateTestConnectionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(TEST_CONNECTION_TASK_NAME); - execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); - execInput.setExecutorId(DEFAULT_EXECUTOR_ID); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map<String, String> arguments = new HashMap<>(); - arguments.put(RECIPE_ARG_NAME, IngestionUtils.injectPipelineName(input.getRecipe(), executionRequestUrn.toString())); - if (input.getVersion() != null) { - arguments.put(VERSION_ARG_NAME, input.getVersion()); - } - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, EXECUTION_REQUEST_ENTITY_NAME, - EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new test ingestion connection request %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + final CreateTestConnectionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateTestConnectionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(TEST_CONNECTION_TASK_NAME); + execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); + execInput.setExecutorId(DEFAULT_EXECUTOR_ID); + execInput.setRequestedAt(System.currentTimeMillis()); + + Map<String, String> arguments = new HashMap<>(); + arguments.put( + RECIPE_ARG_NAME, + IngestionUtils.injectPipelineName( + input.getRecipe(), executionRequestUrn.toString())); + if (input.getVersion() != null) { + arguments.put(VERSION_ARG_NAME, input.getVersion()); + } + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to create new test ingestion connection request %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java index 8880330d63495..722ffe3aba6b8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java @@ -19,12 +19,10 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Retrieves an Ingestion Execution Request by primary key (urn). - */ +/** Retrieves an Ingestion Execution Request by primary key (urn). */ @Slf4j -public class GetIngestionExecutionRequestResolver implements DataFetcher<CompletableFuture<ExecutionRequest>> { +public class GetIngestionExecutionRequestResolver + implements DataFetcher<CompletableFuture<ExecutionRequest>> { private final EntityClient _entityClient; @@ -33,32 +31,40 @@ public GetIngestionExecutionRequestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ExecutionRequest> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ExecutionRequest> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch specific execution request - final Urn urn = Urn.createFromString(urnStr); - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No execution request found - throw new DataHubGraphQLException(String.format("Failed to find Execution Request with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Execution request found - return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve execution request", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // Fetch specific execution request + final Urn urn = Urn.createFromString(urnStr); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), + context.getAuthentication()); + if (!entities.containsKey(urn)) { + // No execution request found + throw new DataHubGraphQLException( + String.format("Failed to find Execution Request with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Execution request found + return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve execution request", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index c72f273a9027e..01100a24d6b15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -29,11 +29,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j -public class IngestionSourceExecutionRequestsResolver implements DataFetcher<CompletableFuture<IngestionSourceExecutionRequests>> { +public class IngestionSourceExecutionRequestsResolver + implements DataFetcher<CompletableFuture<IngestionSourceExecutionRequests>> { private static final String INGESTION_SOURCE_FIELD_NAME = "ingestionSource"; private static final String REQUEST_TIME_MS_FIELD_NAME = "requestTimeMs"; @@ -45,64 +44,77 @@ public IngestionSourceExecutionRequestsResolver(final EntityClient entityClient) } @Override - public CompletableFuture<IngestionSourceExecutionRequests> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<IngestionSourceExecutionRequests> get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final String urn = ((IngestionSource) environment.getSource()).getUrn(); - final Integer start = environment.getArgument("start") != null ? environment.getArgument("start") : 0; - final Integer count = environment.getArgument("count") != null ? environment.getArgument("count") : 10; + final Integer start = + environment.getArgument("start") != null ? environment.getArgument("start") : 0; + final Integer count = + environment.getArgument("count") != null ? environment.getArgument("count") : 10; - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + try { - try { + // 1. Fetch the related edges + final Criterion filterCriterion = + new Criterion() + .setField(INGESTION_SOURCE_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(urn); - // 1. Fetch the related edges - final Criterion filterCriterion = new Criterion() - .setField(INGESTION_SOURCE_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(urn); + final SearchResult executionsSearchResult = + _entityClient.filter( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + new SortCriterion() + .setField(REQUEST_TIME_MS_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication()); - final SearchResult executionsSearchResult = _entityClient.filter( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - new SortCriterion().setField(REQUEST_TIME_MS_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication() - ); + // 2. Batch fetch the related ExecutionRequests + final Set<Urn> relatedExecRequests = + executionsSearchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); - // 2. Batch fetch the related ExecutionRequests - final Set<Urn> relatedExecRequests = executionsSearchResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + relatedExecRequests, + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), + context.getAuthentication()); - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - relatedExecRequests, - ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - - // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests - final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); - result.setStart(executionsSearchResult.getFrom()); - result.setCount(executionsSearchResult.getPageSize()); - result.setTotal(executionsSearchResult.getNumEntities()); - result.setExecutionRequests(IngestionResolverUtils.mapExecutionRequests( - executionsSearchResult.getEntities() - .stream() - .map(searchResult -> entities.get(searchResult.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()) - )); - return result; - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve executions associated with ingestion source with urn %s", urn), e); - } - }); + // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests + final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); + result.setStart(executionsSearchResult.getFrom()); + result.setCount(executionsSearchResult.getPageSize()); + result.setTotal(executionsSearchResult.getNumEntities()); + result.setExecutionRequests( + IngestionResolverUtils.mapExecutionRequests( + executionsSearchResult.getEntities().stream() + .map(searchResult -> entities.get(searchResult.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve executions associated with ingestion source with urn %s", + urn), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java index 05fcacf7c0946..0b909dee51374 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; @@ -7,11 +9,8 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class RollbackIngestionResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; @@ -20,33 +19,36 @@ public RollbackIngestionResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - final RollbackIngestionInput input = bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); - final String runId = input.getRunId(); + final RollbackIngestionInput input = + bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); + final String runId = input.getRunId(); - rollbackIngestion(runId, context); - return true; - }); + rollbackIngestion(runId, context); + return true; + }); } - public CompletableFuture<Boolean> rollbackIngestion(final String runId, final QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.rollbackIngestion(runId, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to rollback ingestion execution", e); - } - }); - + public CompletableFuture<Boolean> rollbackIngestion( + final String runId, final QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.rollbackIngestion(runId, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to rollback ingestion execution", e); + } + }); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java index e1745031d9dae..577780e53ce86 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -17,23 +21,16 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the MANAGE_SECRETS privilege. + * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the + * MANAGE_SECRETS privilege. */ public class CreateSecretResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; private final SecretService _secretService; - public CreateSecretResolver( - final EntityClient entityClient, - final SecretService secretService - ) { + public CreateSecretResolver(final EntityClient entityClient, final SecretService secretService) { _entityClient = entityClient; _secretService = secretService; } @@ -41,36 +38,46 @@ public CreateSecretResolver( @Override public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateSecretInput input = bindArgument(environment.getArgument("input"), CreateSecretInput.class); + final CreateSecretInput input = + bindArgument(environment.getArgument("input"), CreateSecretInput.class); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageSecrets(context)) { - if (IngestionAuthUtils.canManageSecrets(context)) { + try { + // Create the Ingestion source key --> use the display name as a unique id to ensure + // it's not duplicated. + final DataHubSecretKey key = new DataHubSecretKey(); + key.setId(input.getName()); - try { - // Create the Ingestion source key --> use the display name as a unique id to ensure it's not duplicated. - final DataHubSecretKey key = new DataHubSecretKey(); - key.setId(input.getName()); + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Secret already exists!"); + } - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Secret already exists!"); - } - - // Create the secret value. - final DataHubSecretValue value = new DataHubSecretValue(); - value.setName(input.getName()); - value.setValue(_secretService.encrypt(input.getValue())); - value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + // Create the secret value. + final DataHubSecretValue value = new DataHubSecretValue(); + value.setName(input.getName()); + value.setValue(_secretService.encrypt(input.getValue())); + value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); + value.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, SECRETS_ENTITY_NAME, - SECRET_VALUE_ASPECT_NAME, value); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new secret with name %s", input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, SECRETS_ENTITY_NAME, SECRET_VALUE_ASPECT_NAME, value); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new secret with name %s", input.getName()), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java index b35931420c078..228d5a094cdef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java @@ -9,10 +9,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. - */ +/** Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. */ public class DeleteSecretResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; @@ -27,15 +24,19 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageSecrets(context)) { final String secretUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(secretUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return secretUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against secret with urn %s", secretUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return secretUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against secret with urn %s", secretUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java index 85c6c6754470d..67564aa721bda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -23,11 +25,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / decryption. - * Requires the MANAGE_SECRETS privilege. + * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / + * decryption. Requires the MANAGE_SECRETS privilege. */ public class GetSecretValuesResolver implements DataFetcher<CompletableFuture<List<SecretValue>>> { @@ -35,60 +35,67 @@ public class GetSecretValuesResolver implements DataFetcher<CompletableFuture<Li private final SecretService _secretService; public GetSecretValuesResolver( - final EntityClient entityClient, - final SecretService secretService - ) { + final EntityClient entityClient, final SecretService secretService) { _entityClient = entityClient; _secretService = secretService; } @Override - public CompletableFuture<List<SecretValue>> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<List<SecretValue>> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final GetSecretValuesInput input = bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); + final GetSecretValuesInput input = + bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch secrets - final Set<Urn> urns = input.getSecrets() - .stream() - .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) - .collect(Collectors.toSet()); + return CompletableFuture.supplyAsync( + () -> { + try { + // Fetch secrets + final Set<Urn> urns = + input.getSecrets().stream() + .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) + .collect(Collectors.toSet()); - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(urns), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.SECRETS_ENTITY_NAME, + new HashSet<>(urns), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); - // Now for each secret, decrypt and return the value. If no secret was found, then we will simply omit it from the list. - // There is no ordering guarantee for the list. - return entities.values() - .stream() - .map(entity -> { - EnvelopedAspect aspect = entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); - if (aspect != null) { - // Aspect is present. - final DataHubSecretValue secretValue = new DataHubSecretValue(aspect.getValue().data()); - // Now decrypt the encrypted secret. - final String decryptedSecretValue = decryptSecret(secretValue.getValue()); - return new SecretValue(secretValue.getName(), decryptedSecretValue); - } else { - // No secret exists - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + // Now for each secret, decrypt and return the value. If no secret was found, then we + // will simply omit it from the list. + // There is no ordering guarantee for the list. + return entities.values().stream() + .map( + entity -> { + EnvelopedAspect aspect = + entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); + if (aspect != null) { + // Aspect is present. + final DataHubSecretValue secretValue = + new DataHubSecretValue(aspect.getValue().data()); + // Now decrypt the encrypted secret. + final String decryptedSecretValue = decryptSecret(secretValue.getValue()); + return new SecretValue(secretValue.getName(), decryptedSecretValue); + } else { + // No secret exists + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private String decryptSecret(final String encryptedSecret) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java index b0d8c9fd34303..eb054295af09b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; @@ -31,13 +34,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. - */ +/** Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. */ @Slf4j public class ListSecretsResolver implements DataFetcher<CompletableFuture<ListSecretsResult>> { @@ -52,55 +49,66 @@ public ListSecretsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListSecretsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListSecretsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final ListSecretsInput input = bindArgument(environment.getArgument("input"), ListSecretsInput.class); + final ListSecretsInput input = + bindArgument(environment.getArgument("input"), ListSecretsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all secrets - final SearchResult gmsResult = _entityClient.search( - Constants.SECRETS_ENTITY_NAME, - query, - null, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - // Then, resolve all secrets - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); - - // Now that we have entities we can bind this to a result. - final ListSecretsResult result = new ListSecretsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setSecrets(mapEntities(gmsResult.getEntities().stream() - .map(entity -> entities.get(entity.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()))); - return result; - - } catch (Exception e) { - throw new RuntimeException("Failed to list secrets", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all secrets + final SearchResult gmsResult = + _entityClient.search( + Constants.SECRETS_ENTITY_NAME, + query, + null, + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + + // Then, resolve all secrets + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.SECRETS_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); + + // Now that we have entities we can bind this to a result. + final ListSecretsResult result = new ListSecretsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setSecrets( + mapEntities( + gmsResult.getEntities().stream() + .map(entity -> entities.get(entity.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + + } catch (Exception e) { + throw new RuntimeException("Failed to list secrets", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List<Secret> mapEntities(final List<EntityResponse> entities) { @@ -113,7 +121,8 @@ private List<Secret> mapEntities(final List<EntityResponse> entities) { final EnvelopedAspect envelopedInfo = aspects.get(Constants.SECRET_VALUE_ASPECT_NAME); // Bind into a strongly typed object. - final DataHubSecretValue secretValue = new DataHubSecretValue(envelopedInfo.getValue().data()); + final DataHubSecretValue secretValue = + new DataHubSecretValue(envelopedInfo.getValue().data()); // Map using the strongly typed object. results.add(mapSecretValue(entityUrn, secretValue)); @@ -128,4 +137,4 @@ private Secret mapSecretValue(final Urn urn, final DataHubSecretValue value) { result.setDescription(value.getDescription(GetMode.NULL)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java index e510a9fff80aa..225a5801adec9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java @@ -8,10 +8,7 @@ import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; - -/** - * Utility methods to encrypt and decrypt DataHub secrets. - */ +/** Utility methods to encrypt and decrypt DataHub secrets. */ public class SecretUtils { static String encrypt(String value, String secret) { @@ -30,7 +27,8 @@ static String encrypt(String value, String secret) { } Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding"); cipher.init(Cipher.ENCRYPT_MODE, secretKey); - return Base64.getEncoder().encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); + return Base64.getEncoder() + .encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); } catch (Exception e) { throw new RuntimeException("Failed to encrypt value using provided secret!"); } @@ -59,6 +57,5 @@ static String decrypt(String encryptedValue, String secret) { return null; } - private SecretUtils() { - } + private SecretUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java index 38050331318ca..0666fab52dd4e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java @@ -9,10 +9,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - /** - * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires MANAGE_INGESTION - * privilege. + * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires + * MANAGE_INGESTION privilege. */ public class DeleteIngestionSourceResolver implements DataFetcher<CompletableFuture<String>> { @@ -28,15 +27,21 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageIngestion(context)) { final String ingestionSourceUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(ingestionSourceUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return ingestionSourceUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against ingestion source with urn %s", ingestionSourceUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return ingestionSourceUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against ingestion source with urn %s", + ingestionSourceUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java index 562d06b79d2c7..3b6790212ba23 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java @@ -19,9 +19,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -/** - * Gets a particular Ingestion Source by urn. - */ +/** Gets a particular Ingestion Source by urn. */ @Slf4j public class GetIngestionSourceResolver implements DataFetcher<CompletableFuture<IngestionSource>> { @@ -32,31 +30,37 @@ public GetIngestionSourceResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<IngestionSource> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<IngestionSource> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = Urn.createFromString(urnStr); - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No ingestion source found - throw new DataHubGraphQLException(String.format("Failed to find Ingestion Source with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Ingestion source found - return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve ingestion source", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn urn = Urn.createFromString(urnStr); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); + if (!entities.containsKey(urn)) { + // No ingestion source found + throw new DataHubGraphQLException( + String.format("Failed to find Ingestion Source with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Ingestion source found + return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve ingestion source", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java index d019473606e58..51c9e30aadcce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -26,12 +28,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. - */ -public class ListIngestionSourcesResolver implements DataFetcher<CompletableFuture<ListIngestionSourcesResult>> { +/** Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. */ +public class ListIngestionSourcesResolver + implements DataFetcher<CompletableFuture<ListIngestionSourcesResult>> { private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,57 +43,74 @@ public ListIngestionSourcesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListIngestionSourcesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListIngestionSourcesResult> get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { - final ListIngestionSourcesInput input = bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); + final ListIngestionSourcesInput input = + bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List<FacetFilterInput> filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + final List<FacetFilterInput> filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all ingestion sources Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.INGESTION_SOURCE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - null, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all ingestion sources Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.INGESTION_SOURCE_ENTITY_NAME, + query, + buildFilter(filters, Collections.emptyList()), + null, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, resolve all ingestion sources - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), - context.getAuthentication()); + // Then, resolve all ingestion sources + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), + context.getAuthentication()); - final Collection<EntityResponse> sortedEntities = entities.values() - .stream() - .sorted(Comparator.comparingLong(s -> -s.getAspects().get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME).getCreated().getTime())) - .collect(Collectors.toList()); + final Collection<EntityResponse> sortedEntities = + entities.values().stream() + .sorted( + Comparator.comparingLong( + s -> + -s.getAspects() + .get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME) + .getCreated() + .getTime())) + .collect(Collectors.toList()); - // Now that we have entities we can bind this to a result. - final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setIngestionSources(IngestionResolverUtils.mapIngestionSources(sortedEntities)); - return result; + // Now that we have entities we can bind this to a result. + final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setIngestionSources( + IngestionResolverUtils.mapIngestionSources(sortedEntities)); + return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list ingestion sources", e); - } - }); + } catch (Exception e) { + throw new RuntimeException("Failed to list ingestion sources", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java index 68e334bd976f8..6194452e4b6fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; @@ -19,23 +23,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; +import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. - */ +/** Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. */ @Slf4j public class UpsertIngestionSourceResolver implements DataFetcher<CompletableFuture<String>> { @@ -49,46 +45,60 @@ public UpsertIngestionSourceResolver(final EntityClient entityClient) { public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final Optional<String> ingestionSourceUrn = Optional.ofNullable(environment.getArgument("urn")); - final UpdateIngestionSourceInput input = bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); - - // Create the policy info. - final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); - final MetadataChangeProposal proposal; - if (ingestionSourceUrn.isPresent()) { - // Update existing ingestion source - try { - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(ingestionSourceUrn.get()), INGESTION_INFO_ASPECT_NAME, info); - } catch (URISyntaxException e) { - throw new DataHubGraphQLException( - String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), - DataHubGraphQLErrorCode.BAD_REQUEST); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final Optional<String> ingestionSourceUrn = + Optional.ofNullable(environment.getArgument("urn")); + final UpdateIngestionSourceInput input = + bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); + + // Create the policy info. + final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); + final MetadataChangeProposal proposal; + if (ingestionSourceUrn.isPresent()) { + // Update existing ingestion source + try { + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(ingestionSourceUrn.get()), + INGESTION_INFO_ASPECT_NAME, + info); + } catch (URISyntaxException e) { + throw new DataHubGraphQLException( + String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + } else { + // Create new ingestion source + // Since we are creating a new Ingestion Source, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); + key.setId(uuidStr); + proposal = + buildMetadataChangeProposalWithKey( + key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); + } + + try { + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform update against ingestion source with urn %s", + input.toString()), + e); + } } - } else { - // Create new ingestion source - // Since we are creating a new Ingestion Source, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); - key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); - } - - try { - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against ingestion source with urn %s", input.toString()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private DataHubIngestionSourceInfo mapIngestionSourceInfo(final UpdateIngestionSourceInput input) { + private DataHubIngestionSourceInfo mapIngestionSourceInfo( + final UpdateIngestionSourceInput input) { final DataHubIngestionSourceInfo result = new DataHubIngestionSourceInfo(); result.setType(input.getType()); result.setName(input.getName()); @@ -113,15 +123,17 @@ private DataHubIngestionSourceConfig mapConfig(final UpdateIngestionSourceConfig result.setDebugMode(input.getDebugMode()); } if (input.getExtraArgs() != null) { - Map<String, String> extraArgs = input.getExtraArgs() - .stream() - .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + Map<String, String> extraArgs = + input.getExtraArgs().stream() + .collect( + Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); result.setExtraArgs(new StringMap(extraArgs)); } return result; } - private DataHubIngestionSourceSchedule mapSchedule(final UpdateIngestionSourceScheduleInput input) { + private DataHubIngestionSourceSchedule mapSchedule( + final UpdateIngestionSourceScheduleInput input) { final DataHubIngestionSourceSchedule result = new DataHubIngestionSourceSchedule(); result.setInterval(input.getInterval()); result.setTimezone(input.getTimezone()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index ea61b5e258d8b..06bad27e27062 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -32,10 +32,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -/** - * GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job - */ -public class DataJobRunsResolver implements DataFetcher<CompletableFuture<DataProcessInstanceResult>> { +/** GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job */ +public class DataJobRunsResolver + implements DataFetcher<CompletableFuture<DataProcessInstanceResult>> { private static final String PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME = "parentTemplate"; private static final String CREATED_TIME_SEARCH_INDEX_FIELD_NAME = "created"; @@ -48,74 +47,76 @@ public DataJobRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture<DataProcessInstanceResult> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final QueryContext context = environment.getContext(); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn); + final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final SearchResult gmsResult = + _entityClient.filter( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + final List<Urn> dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List<Urn> dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + // Step 2: Hydrate the incident entities + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null, + context.getAuthentication()); - // Step 2: Hydrate the incident entities - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 3: Map GMS incident model to GraphQL model + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List<DataProcessInstance> dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(DataProcessInstanceMapper::map) + .collect(Collectors.toList()); - // Step 3: Map GMS incident model to GraphQL model - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List<DataProcessInstance> dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); - - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); } private Filter buildTaskRunsEntityFilter(final String entityUrn) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index 3ecf396f808b3..d595b1e513d75 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -33,11 +33,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of task runs associated with a Dataset. - */ -public class EntityRunsResolver implements DataFetcher<CompletableFuture<DataProcessInstanceResult>> { +/** GraphQL Resolver used for fetching the list of task runs associated with a Dataset. */ +public class EntityRunsResolver + implements DataFetcher<CompletableFuture<DataProcessInstanceResult>> { private static final String INPUT_FIELD_NAME = "inputs.keyword"; private static final String OUTPUT_FIELD_NAME = "outputs.keyword"; @@ -51,76 +49,84 @@ public EntityRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture<DataProcessInstanceResult> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); - final RelationshipDirection direction = RelationshipDirection.valueOf(environment.getArgumentOrDefault("direction", - RelationshipDirection.INCOMING.toString())); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); + final RelationshipDirection direction = + RelationshipDirection.valueOf( + environment.getArgumentOrDefault( + "direction", RelationshipDirection.INCOMING.toString())); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List<Urn> dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); + final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final SearchResult gmsResult = + _entityClient.filter( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + final List<Urn> dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the incident entities - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the incident entities + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null, + context.getAuthentication()); - // Step 3: Map GMS instance model to GraphQL model - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List<DataProcessInstance> dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS instance model to GraphQL model + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List<DataProcessInstance> dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(DataProcessInstanceMapper::map) + .collect(Collectors.toList()); - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); } - private Filter buildTaskRunsEntityFilter(final String entityUrn, final RelationshipDirection direction) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(direction.equals(RelationshipDirection.INCOMING) ? INPUT_FIELD_NAME : OUTPUT_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + private Filter buildTaskRunsEntityFilter( + final String entityUrn, final RelationshipDirection direction) { + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField( + direction.equals(RelationshipDirection.INCOMING) + ? INPUT_FIELD_NAME + : OUTPUT_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index 8fc3a60900662..a0caef20a4755 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.lineage; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -16,10 +18,6 @@ import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -29,8 +27,9 @@ import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -43,11 +42,13 @@ public class UpdateLineageResolver implements DataFetcher<CompletableFuture<Bool public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - final UpdateLineageInput input = bindArgument(environment.getArgument("input"), UpdateLineageInput.class); + final UpdateLineageInput input = + bindArgument(environment.getArgument("input"), UpdateLineageInput.class); final List<LineageEdge> edgesToAdd = input.getEdgesToAdd(); final List<LineageEdge> edgesToRemove = input.getEdgesToRemove(); - // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage for each entity + // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage + // for each entity checkPrivileges(context, edgesToAdd, edgesToRemove); // organize data to make updating lineage cleaner @@ -57,77 +58,118 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw downstreamUrns.addAll(downstreamToUpstreamsToAdd.keySet()); downstreamUrns.addAll(downstreamToUpstreamsToRemove.keySet()); - return CompletableFuture.supplyAsync(() -> { - // build MCP for every downstreamUrn - for (Urn downstreamUrn : downstreamUrns) { - if (!_entityService.exists(downstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); - } - - final List<Urn> upstreamUrnsToAdd = downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); - final List<Urn> upstreamUrnsToRemove = downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); - try { - switch (downstreamUrn.getEntityType()) { - case Constants.DATASET_ENTITY_NAME: - // need to filter out dataJobs since this is a valid lineage edge, but will be handled in the downstream direction for DataJobInputOutputs - final List<Urn> filteredUpstreamUrnsToAdd = filterOutDataJobUrns(upstreamUrnsToAdd); - final List<Urn> filteredUpstreamUrnsToRemove = filterOutDataJobUrns(upstreamUrnsToRemove); - - _lineageService.updateDatasetLineage(downstreamUrn, filteredUpstreamUrnsToAdd, filteredUpstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.CHART_ENTITY_NAME: - _lineageService.updateChartLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DASHBOARD_ENTITY_NAME: - _lineageService.updateDashboardLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DATA_JOB_ENTITY_NAME: - _lineageService.updateDataJobUpstreamLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - default: + return CompletableFuture.supplyAsync( + () -> { + // build MCP for every downstreamUrn + for (Urn downstreamUrn : downstreamUrns) { + if (!_entityService.exists(downstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); + } + + final List<Urn> upstreamUrnsToAdd = + downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); + final List<Urn> upstreamUrnsToRemove = + downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); + try { + switch (downstreamUrn.getEntityType()) { + case Constants.DATASET_ENTITY_NAME: + // need to filter out dataJobs since this is a valid lineage edge, but will be + // handled in the downstream direction for DataJobInputOutputs + final List<Urn> filteredUpstreamUrnsToAdd = + filterOutDataJobUrns(upstreamUrnsToAdd); + final List<Urn> filteredUpstreamUrnsToRemove = + filterOutDataJobUrns(upstreamUrnsToRemove); + + _lineageService.updateDatasetLineage( + downstreamUrn, + filteredUpstreamUrnsToAdd, + filteredUpstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.CHART_ENTITY_NAME: + _lineageService.updateChartLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.DASHBOARD_ENTITY_NAME: + _lineageService.updateDashboardLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.DATA_JOB_ENTITY_NAME: + _lineageService.updateDataJobUpstreamLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + default: + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", downstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", downstreamUrn), e); - } - } - - Map<Urn, List<Urn>> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); - Map<Urn, List<Urn>> upstreamToDownstreamsToRemove = getUpstreamToDownstreamMap(edgesToRemove); - Set<Urn> upstreamUrns = new HashSet<>(); - upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); - upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); - - // build MCP for upstreamUrn if necessary - for (Urn upstreamUrn : upstreamUrns) { - if (!_entityService.exists(upstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); - } - - final List<Urn> downstreamUrnsToAdd = upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); - final List<Urn> downstreamUrnsToRemove = upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); - try { - if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { - // need to filter out dataJobs since this is a valid lineage edge, but is handled in the upstream direction for DataJobs - final List<Urn> filteredDownstreamUrnsToAdd = filterOutDataJobUrns(downstreamUrnsToAdd); - final List<Urn> filteredDownstreamUrnsToRemove = filterOutDataJobUrns(downstreamUrnsToRemove); - - _lineageService.updateDataJobDownstreamLineage( - upstreamUrn, filteredDownstreamUrnsToAdd, filteredDownstreamUrnsToRemove, actor, context.getAuthentication() - ); + + Map<Urn, List<Urn>> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); + Map<Urn, List<Urn>> upstreamToDownstreamsToRemove = + getUpstreamToDownstreamMap(edgesToRemove); + Set<Urn> upstreamUrns = new HashSet<>(); + upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); + upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); + + // build MCP for upstreamUrn if necessary + for (Urn upstreamUrn : upstreamUrns) { + if (!_entityService.exists(upstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); + } + + final List<Urn> downstreamUrnsToAdd = + upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); + final List<Urn> downstreamUrnsToRemove = + upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); + try { + if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { + // need to filter out dataJobs since this is a valid lineage edge, but is handled in + // the upstream direction for DataJobs + final List<Urn> filteredDownstreamUrnsToAdd = + filterOutDataJobUrns(downstreamUrnsToAdd); + final List<Urn> filteredDownstreamUrnsToRemove = + filterOutDataJobUrns(downstreamUrnsToRemove); + + _lineageService.updateDataJobDownstreamLineage( + upstreamUrn, + filteredDownstreamUrnsToAdd, + filteredDownstreamUrnsToRemove, + actor, + context.getAuthentication()); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", upstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", upstreamUrn), e); - } - } - return true; - }); + return true; + }); } private List<Urn> filterOutDataJobUrns(@Nonnull final List<Urn> urns) { - return urns.stream().filter( - upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME) - ).collect(Collectors.toList()); + return urns.stream() + .filter(upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) + .collect(Collectors.toList()); } private Map<Urn, List<Urn>> getDownstreamToUpstreamsMap(@Nonnull final List<LineageEdge> edges) { @@ -156,7 +198,10 @@ private Map<Urn, List<Urn>> getUpstreamToDownstreamMap(@Nonnull final List<Linea return upstreamToDownstreams; } - private boolean isAuthorized(@Nonnull final QueryContext context, @Nonnull final Urn urn, @Nonnull final DisjunctivePrivilegeGroup orPrivilegesGroup) { + private boolean isAuthorized( + @Nonnull final QueryContext context, + @Nonnull final Urn urn, + @Nonnull final DisjunctivePrivilegeGroup orPrivilegesGroup) { return AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), @@ -168,39 +213,42 @@ private boolean isAuthorized(@Nonnull final QueryContext context, @Nonnull final private void checkLineageEdgePrivileges( @Nonnull final QueryContext context, @Nonnull final LineageEdge lineageEdge, - @Nonnull final DisjunctivePrivilegeGroup editLineagePrivileges - ) { + @Nonnull final DisjunctivePrivilegeGroup editLineagePrivileges) { Urn upstreamUrn = UrnUtils.getUrn(lineageEdge.getUpstreamUrn()); if (!isAuthorized(context, upstreamUrn, editLineagePrivileges)) { throw new AuthorizationException( - String.format("Unauthorized to edit %s lineage. Please contact your DataHub administrator.", upstreamUrn.getEntityType()) - ); + String.format( + "Unauthorized to edit %s lineage. Please contact your DataHub administrator.", + upstreamUrn.getEntityType())); } Urn downstreamUrn = UrnUtils.getUrn(lineageEdge.getDownstreamUrn()); if (!isAuthorized(context, downstreamUrn, editLineagePrivileges)) { throw new AuthorizationException( - String.format("Unauthorized to edit %s lineage. Please contact your DataHub administrator.", downstreamUrn.getEntityType()) - ); + String.format( + "Unauthorized to edit %s lineage. Please contact your DataHub administrator.", + downstreamUrn.getEntityType())); } } /** - * Loop over each edge to add and each edge to remove and ensure that the user has edit lineage privilege or edit entity privilege - * for every upstream and downstream urn. Throws an AuthorizationException if the actor doesn't have permissions. + * Loop over each edge to add and each edge to remove and ensure that the user has edit lineage + * privilege or edit entity privilege for every upstream and downstream urn. Throws an + * AuthorizationException if the actor doesn't have permissions. */ private void checkPrivileges( @Nonnull final QueryContext context, @Nonnull final List<LineageEdge> edgesToAdd, - @Nonnull final List<LineageEdge> edgesToRemove - ) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup editLineagePrivileges = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + @Nonnull final List<LineageEdge> edgesToRemove) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup editLineagePrivileges = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + allPrivilegesGroup, + new ConjunctivePrivilegeGroup( + Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); for (LineageEdge edgeToAdd : edgesToAdd) { checkLineageEdgePrivileges(context, edgeToAdd, editLineagePrivileges); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java index 023686b1d10c9..7f031cb481852 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java @@ -8,21 +8,19 @@ import java.util.concurrent.CompletableFuture; import org.dataloader.DataLoader; - /** * Generic GraphQL resolver responsible for * - * 1. Generating a single input AspectLoadKey. - * 2. Resolving a single {@link Aspect}. - * + * <p>1. Generating a single input AspectLoadKey. 2. Resolving a single {@link Aspect}. */ public class AspectResolver implements DataFetcher<CompletableFuture<Aspect>> { - @Override - public CompletableFuture<Aspect> get(DataFetchingEnvironment environment) { - final DataLoader<VersionedAspectKey, Aspect> loader = environment.getDataLoaderRegistry().getDataLoader("Aspect"); - final String fieldName = environment.getField().getName(); - final Long version = environment.getArgument("version"); - final String urn = ((Entity) environment.getSource()).getUrn(); - return loader.load(new VersionedAspectKey(urn, fieldName, version)); - } + @Override + public CompletableFuture<Aspect> get(DataFetchingEnvironment environment) { + final DataLoader<VersionedAspectKey, Aspect> loader = + environment.getDataLoaderRegistry().getDataLoader("Aspect"); + final String fieldName = environment.getField().getName(); + final Long version = environment.getArgument("version"); + final String urn = ((Entity) environment.getSource()).getUrn(); + return loader.load(new VersionedAspectKey(urn, fieldName, version)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java index 20e0e4ae1c22a..ecf36769dfa9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java @@ -5,7 +5,6 @@ import com.linkedin.datahub.graphql.resolvers.BatchLoadUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -21,8 +20,7 @@ public class BatchGetEntitiesResolver implements DataFetcher<CompletableFuture<L public BatchGetEntitiesResolver( final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, - final Function<DataFetchingEnvironment, List<Entity>> entitiesProvider - ) { + final Function<DataFetchingEnvironment, List<Entity>> entitiesProvider) { _entityTypes = entityTypes; _entitiesProvider = entitiesProvider; } @@ -32,22 +30,28 @@ public CompletableFuture<List<Entity>> get(DataFetchingEnvironment environment) final List<Entity> entities = _entitiesProvider.apply(environment); Map<EntityType, List<Entity>> entityTypeToEntities = new HashMap<>(); - entities.forEach((entity) -> { - EntityType type = entity.getType(); - List<Entity> entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); - entitiesList.add(entity); - entityTypeToEntities.put(type, entitiesList); - }); + entities.forEach( + (entity) -> { + EntityType type = entity.getType(); + List<Entity> entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); + entitiesList.add(entity); + entityTypeToEntities.put(type, entitiesList); + }); List<CompletableFuture<List<Entity>>> entitiesFutures = new ArrayList<>(); for (Map.Entry<EntityType, List<Entity>> entry : entityTypeToEntities.entrySet()) { - CompletableFuture<List<Entity>> entitiesFuture = BatchLoadUtils - .batchLoadEntitiesOfSameType(entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); + CompletableFuture<List<Entity>> entitiesFuture = + BatchLoadUtils.batchLoadEntitiesOfSameType( + entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); entitiesFutures.add(entitiesFuture); } return CompletableFuture.allOf(entitiesFutures.toArray(new CompletableFuture[0])) - .thenApply(v -> entitiesFutures.stream().flatMap(future -> future.join().stream()).collect(Collectors.toList())); + .thenApply( + v -> + entitiesFutures.stream() + .flatMap(future -> future.join().stream()) + .collect(Collectors.toList())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java index d44f2b77029f3..c63ec819e8f6a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityLineageResult; @@ -17,15 +19,14 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub + * graph. Lineage relationship denotes whether an entity is directly upstream or downstream of + * another entity */ @Slf4j -public class EntityLineageResultResolver implements DataFetcher<CompletableFuture<EntityLineageResult>> { +public class EntityLineageResultResolver + implements DataFetcher<CompletableFuture<EntityLineageResult>> { private final SiblingGraphService _siblingGraphService; @@ -39,38 +40,34 @@ public CompletableFuture<EntityLineageResult> get(DataFetchingEnvironment enviro final LineageInput input = bindArgument(environment.getArgument("input"), LineageInput.class); final LineageDirection lineageDirection = input.getDirection(); - @Nullable - final Integer start = input.getStart(); // Optional! - @Nullable - final Integer count = input.getCount(); // Optional! - @Nullable - final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! - @Nullable - final Long startTimeMillis = input.getStartTimeMillis(); // Optional! - @Nullable - final Long endTimeMillis = input.getEndTimeMillis(); // Optional! + @Nullable final Integer start = input.getStart(); // Optional! + @Nullable final Integer count = input.getCount(); // Optional! + @Nullable final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! + @Nullable final Long startTimeMillis = input.getStartTimeMillis(); // Optional! + @Nullable final Long endTimeMillis = input.getEndTimeMillis(); // Optional! com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - return mapEntityRelationships( - _siblingGraphService.getLineage( - Urn.createFromString(urn), - resolvedDirection, - start != null ? start : 0, - count != null ? count : 100, - 1, - separateSiblings != null ? input.getSeparateSiblings() : false, - new HashSet<>(), - startTimeMillis, - endTimeMillis)); - } catch (URISyntaxException e) { - log.error("Failed to fetch lineage for {}", urn); - throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return mapEntityRelationships( + _siblingGraphService.getLineage( + Urn.createFromString(urn), + resolvedDirection, + start != null ? start : 0, + count != null ? count : 100, + 1, + separateSiblings != null ? input.getSeparateSiblings() : false, + new HashSet<>(), + startTimeMillis, + endTimeMillis)); + } catch (URISyntaxException e) { + log.error("Failed to fetch lineage for {}", urn); + throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); + } + }); } private EntityLineageResult mapEntityRelationships( @@ -80,10 +77,10 @@ private EntityLineageResult mapEntityRelationships( result.setCount(entityLineageResult.getCount()); result.setTotal(entityLineageResult.getTotal()); result.setFiltered(entityLineageResult.getFiltered()); - result.setRelationships(entityLineageResult.getRelationships() - .stream() - .map(this::mapEntityRelationship) - .collect(Collectors.toList())); + result.setRelationships( + entityLineageResult.getRelationships().stream() + .map(this::mapEntityRelationship) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java index 43b28ef85f78a..223548d5d6242 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.load; -import com.linkedin.common.EntityRelationship; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationships; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; @@ -17,13 +18,11 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * GraphQL Resolver responsible for fetching relationships between entities in the DataHub graph. */ -public class EntityRelationshipsResultResolver implements DataFetcher<CompletableFuture<EntityRelationshipsResult>> { +public class EntityRelationshipsResultResolver + implements DataFetcher<CompletableFuture<EntityRelationshipsResult>> { private final GraphClient _graphClient; @@ -35,24 +34,22 @@ public EntityRelationshipsResultResolver(final GraphClient graphClient) { public CompletableFuture<EntityRelationshipsResult> get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - final RelationshipsInput input = bindArgument(environment.getArgument("input"), RelationshipsInput.class); + final RelationshipsInput input = + bindArgument(environment.getArgument("input"), RelationshipsInput.class); final List<String> relationshipTypes = input.getTypes(); - final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = input.getDirection(); + final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = + input.getDirection(); final Integer start = input.getStart(); // Optional! final Integer count = input.getCount(); // Optional! - final RelationshipDirection resolvedDirection = RelationshipDirection.valueOf(relationshipDirection.toString()); - return CompletableFuture.supplyAsync(() -> mapEntityRelationships( - fetchEntityRelationships( - urn, - relationshipTypes, - resolvedDirection, - start, - count, - context.getActorUrn() - ), - resolvedDirection - )); + final RelationshipDirection resolvedDirection = + RelationshipDirection.valueOf(relationshipDirection.toString()); + return CompletableFuture.supplyAsync( + () -> + mapEntityRelationships( + fetchEntityRelationships( + urn, relationshipTypes, resolvedDirection, start, count, context.getActorUrn()), + resolvedDirection)); } private EntityRelationships fetchEntityRelationships( @@ -68,23 +65,28 @@ private EntityRelationships fetchEntityRelationships( private EntityRelationshipsResult mapEntityRelationships( final EntityRelationships entityRelationships, - final RelationshipDirection relationshipDirection - ) { + final RelationshipDirection relationshipDirection) { final EntityRelationshipsResult result = new EntityRelationshipsResult(); result.setStart(entityRelationships.getStart()); result.setCount(entityRelationships.getCount()); result.setTotal(entityRelationships.getTotal()); - result.setRelationships(entityRelationships.getRelationships().stream().map(entityRelationship -> mapEntityRelationship( - com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf(relationshipDirection.name()), - entityRelationship) - ).collect(Collectors.toList())); + result.setRelationships( + entityRelationships.getRelationships().stream() + .map( + entityRelationship -> + mapEntityRelationship( + com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf( + relationshipDirection.name()), + entityRelationship)) + .collect(Collectors.toList())); return result; } private com.linkedin.datahub.graphql.generated.EntityRelationship mapEntityRelationship( final com.linkedin.datahub.graphql.generated.RelationshipDirection direction, final EntityRelationship entityRelationship) { - final com.linkedin.datahub.graphql.generated.EntityRelationship result = new com.linkedin.datahub.graphql.generated.EntityRelationship(); + final com.linkedin.datahub.graphql.generated.EntityRelationship result = + new com.linkedin.datahub.graphql.generated.EntityRelationship(); final Entity partialEntity = UrnToEntityMapper.map(entityRelationship.getEntity()); if (partialEntity != null) { result.setEntity(partialEntity); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java index 6a32e0b14e313..d298c344240c7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java @@ -8,31 +8,27 @@ import java.util.concurrent.CompletableFuture; import java.util.function.Function; - /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + * <p>1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeBatchResolver implements DataFetcher<CompletableFuture<List<Entity>>> { - private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes; - private final Function<DataFetchingEnvironment, List<Entity>> _entitiesProvider; + private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes; + private final Function<DataFetchingEnvironment, List<Entity>> _entitiesProvider; - public EntityTypeBatchResolver( - final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, - final Function<DataFetchingEnvironment, List<Entity>> entitiesProvider - ) { - _entityTypes = entityTypes; - _entitiesProvider = entitiesProvider; - } + public EntityTypeBatchResolver( + final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, + final Function<DataFetchingEnvironment, List<Entity>> entitiesProvider) { + _entityTypes = entityTypes; + _entitiesProvider = entitiesProvider; + } - @Override - public CompletableFuture<List<Entity>> get(DataFetchingEnvironment environment) { - final List<Entity> entities = _entitiesProvider.apply(environment); - return BatchLoadUtils.batchLoadEntitiesOfSameType(entities, _entityTypes, environment.getDataLoaderRegistry()); - } + @Override + public CompletableFuture<List<Entity>> get(DataFetchingEnvironment environment) { + final List<Entity> entities = _entitiesProvider.apply(environment); + return BatchLoadUtils.batchLoadEntitiesOfSameType( + entities, _entityTypes, environment.getDataLoaderRegistry()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java index 29d5d78e0ea96..3c285f30661bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java @@ -5,64 +5,65 @@ import com.linkedin.datahub.graphql.generated.Entity; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; +import org.dataloader.DataLoader; /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + * <p>1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeResolver implements DataFetcher<CompletableFuture<Entity>> { - private static final List<String> IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); - private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes; - private final Function<DataFetchingEnvironment, Entity> _entityProvider; + private static final List<String> IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); + private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes; + private final Function<DataFetchingEnvironment, Entity> _entityProvider; - public EntityTypeResolver( - final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, - final Function<DataFetchingEnvironment, Entity> entity - ) { - _entityTypes = entityTypes; - _entityProvider = entity; - } + public EntityTypeResolver( + final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, + final Function<DataFetchingEnvironment, Entity> entity) { + _entityTypes = entityTypes; + _entityProvider = entity; + } + private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { + return environment.getField().getSelectionSet().getSelections().stream() + .filter( + selection -> { + if (!(selection instanceof graphql.language.Field)) { + return true; + } + return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); + }) + .count() + == 0; + } - private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { - return environment.getField().getSelectionSet().getSelections().stream().filter(selection -> { - if (!(selection instanceof graphql.language.Field)) { - return true; - } - return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); - }).count() == 0; + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final Entity resolvedEntity = _entityProvider.apply(environment); + if (resolvedEntity == null) { + return CompletableFuture.completedFuture(null); } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final Entity resolvedEntity = _entityProvider.apply(environment); - if (resolvedEntity == null) { - return CompletableFuture.completedFuture(null); - } - - final Object javaObject = _entityProvider.apply(environment); + final Object javaObject = _entityProvider.apply(environment); - if (isOnlySelectingIdentityFields(environment)) { - return CompletableFuture.completedFuture(javaObject); - } + if (isOnlySelectingIdentityFields(environment)) { + return CompletableFuture.completedFuture(javaObject); + } - final com.linkedin.datahub.graphql.types.EntityType filteredEntity = Iterables.getOnlyElement(_entityTypes.stream() + final com.linkedin.datahub.graphql.types.EntityType filteredEntity = + Iterables.getOnlyElement( + _entityTypes.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); - return loader.load(key); - } + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java index 02a92544855a3..ee2f7c3abe97d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java @@ -3,41 +3,42 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a batch of urns. - * 2. Resolving a single {@link LoadableType}. + * <p>1. Retrieving a batch of urns. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + * <p>Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param <T> the generated GraphQL POJO corresponding to the resolved type. * @param <K> the key type for the DataLoader */ public class LoadableTypeBatchResolver<T, K> implements DataFetcher<CompletableFuture<List<T>>> { - private final LoadableType<T, K> _loadableType; - private final Function<DataFetchingEnvironment, List<K>> _keyProvider; + private final LoadableType<T, K> _loadableType; + private final Function<DataFetchingEnvironment, List<K>> _keyProvider; - public LoadableTypeBatchResolver(final LoadableType<T, K> loadableType, final Function<DataFetchingEnvironment, List<K>> keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeBatchResolver( + final LoadableType<T, K> loadableType, + final Function<DataFetchingEnvironment, List<K>> keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture<List<T>> get(DataFetchingEnvironment environment) { - final List<K> keys = _keyProvider.apply(environment); - if (keys == null) { - return null; - } - final DataLoader<K, T> loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.loadMany(keys); + @Override + public CompletableFuture<List<T>> get(DataFetchingEnvironment environment) { + final List<K> keys = _keyProvider.apply(environment); + if (keys == null) { + return null; } + final DataLoader<K, T> loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.loadMany(keys); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java index 53702f9cafe8b..3868b1a35b64f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java @@ -3,40 +3,41 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + * <p>1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + * <p>Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param <T> the generated GraphQL POJO corresponding to the resolved type. * @param <K> the key type for the DataLoader */ public class LoadableTypeResolver<T, K> implements DataFetcher<CompletableFuture<T>> { - private final LoadableType<T, K> _loadableType; - private final Function<DataFetchingEnvironment, K> _keyProvider; + private final LoadableType<T, K> _loadableType; + private final Function<DataFetchingEnvironment, K> _keyProvider; - public LoadableTypeResolver(final LoadableType<T, K> loadableType, final Function<DataFetchingEnvironment, K> keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeResolver( + final LoadableType<T, K> loadableType, + final Function<DataFetchingEnvironment, K> keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture<T> get(DataFetchingEnvironment environment) { - final K key = _keyProvider.apply(environment); - if (key == null) { - return null; - } - final DataLoader<K, T> loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.load(key); + @Override + public CompletableFuture<T> get(DataFetchingEnvironment environment) { + final K key = _keyProvider.apply(environment); + if (key == null) { + return null; } + final DataLoader<K, T> loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java index a4867819a2401..e85eaca127d62 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.load; +import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.OwnerType; import com.linkedin.datahub.graphql.types.LoadableType; @@ -8,38 +9,41 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; -import org.dataloader.DataLoader; import java.util.stream.Collectors; -import com.google.common.collect.Iterables; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + * <p>1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + * <p>Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param <T> the generated GraphQL POJO corresponding to the resolved type. */ public class OwnerTypeResolver<T> implements DataFetcher<CompletableFuture<T>> { - private final List<LoadableType<?, ?>> _loadableTypes; - private final Function<DataFetchingEnvironment, OwnerType> _urnProvider; + private final List<LoadableType<?, ?>> _loadableTypes; + private final Function<DataFetchingEnvironment, OwnerType> _urnProvider; - public OwnerTypeResolver(final List<LoadableType<?, ?>> loadableTypes, final Function<DataFetchingEnvironment, OwnerType> urnProvider) { - _loadableTypes = loadableTypes; - _urnProvider = urnProvider; - } + public OwnerTypeResolver( + final List<LoadableType<?, ?>> loadableTypes, + final Function<DataFetchingEnvironment, OwnerType> urnProvider) { + _loadableTypes = loadableTypes; + _urnProvider = urnProvider; + } - @Override - public CompletableFuture<T> get(DataFetchingEnvironment environment) { - final OwnerType ownerType = _urnProvider.apply(environment); - final LoadableType<?, ?> filteredEntity = Iterables.getOnlyElement(_loadableTypes.stream() + @Override + public CompletableFuture<T> get(DataFetchingEnvironment environment) { + final OwnerType ownerType = _urnProvider.apply(environment); + final LoadableType<?, ?> filteredEntity = + Iterables.getOnlyElement( + _loadableTypes.stream() .filter(entity -> ownerType.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader<String, T> loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - return loader.load(((Entity) ownerType).getUrn()); - } + final DataLoader<String, T> loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + return loader.load(((Entity) ownerType).getUrn()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java index f13ebf8373e91..0d00823697c25 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authorization.EntitySpec; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -27,24 +29,21 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. - * The purpose of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" API - * to a single place. - * - * It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and limit arguments - * used for filtering the specific TimeSeries Aspects to be fetched. + * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. The purpose + * of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" + * API to a single place. * - * On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping - * a generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil - * be invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. + * <p>It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and + * limit arguments used for filtering the specific TimeSeries Aspects to be fetched. * + * <p>On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping a + * generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil be + * invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. */ @Slf4j -public class TimeSeriesAspectResolver implements DataFetcher<CompletableFuture<List<TimeSeriesAspect>>> { +public class TimeSeriesAspectResolver + implements DataFetcher<CompletableFuture<List<TimeSeriesAspect>>> { private final EntityClient _client; private final String _entityName; @@ -73,13 +72,13 @@ public TimeSeriesAspectResolver( _sort = sort; } - /** - * Check whether the actor is authorized to fetch the timeseries aspect given the resource urn - */ + /** Check whether the actor is authorized to fetch the timeseries aspect given the resource urn */ private boolean isAuthorized(QueryContext context, String urn) { - if (_entityName.equals(Constants.DATASET_ENTITY_NAME) && _aspectName.equals( - Constants.DATASET_PROFILE_ASPECT_NAME)) { - return AuthorizationUtils.isAuthorized(context, Optional.of(new EntitySpec(_entityName, urn)), + if (_entityName.equals(Constants.DATASET_ENTITY_NAME) + && _aspectName.equals(Constants.DATASET_PROFILE_ASPECT_NAME)) { + return AuthorizationUtils.isAuthorized( + context, + Optional.of(new EntitySpec(_entityName, urn)), PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE); } return true; @@ -87,46 +86,62 @@ private boolean isAuthorized(QueryContext context, String urn) { @Override public CompletableFuture<List<TimeSeriesAspect>> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); - // Fetch the urn, assuming the parent has an urn field. - // todo: what if the parent urn isn't projected? - final String urn = ((Entity) environment.getSource()).getUrn(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + // Fetch the urn, assuming the parent has an urn field. + // todo: what if the parent urn isn't projected? + final String urn = ((Entity) environment.getSource()).getUrn(); - if (!isAuthorized(context, urn)) { - return Collections.emptyList(); - } + if (!isAuthorized(context, urn)) { + return Collections.emptyList(); + } - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - // Max number of aspects to return. - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; - final SortCriterion maybeSort = _sort; + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + // Max number of aspects to return. + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; + final SortCriterion maybeSort = _sort; - try { - // Step 1: Get aspects. - List<EnvelopedAspect> aspects = - _client.getTimeseriesAspectValues(urn, _entityName, _aspectName, maybeStartTimeMillis, maybeEndTimeMillis, - maybeLimit, buildFilters(maybeFilters), maybeSort, context.getAuthentication()); + try { + // Step 1: Get aspects. + List<EnvelopedAspect> aspects = + _client.getTimeseriesAspectValues( + urn, + _entityName, + _aspectName, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilters(maybeFilters), + maybeSort, + context.getAuthentication()); - // Step 2: Bind profiles into GraphQL strong types. - return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve aspects from GMS", e); - } - }); + // Step 2: Bind profiles into GraphQL strong types. + return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve aspects from GMS", e); + } + }); } private Filter buildFilters(@Nullable FilterInput maybeFilters) { if (maybeFilters == null) { return null; } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(maybeFilters.getAnd().stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + maybeFilters.getAnd().stream() + .map(filter -> criterionFromFilter(filter, true)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java index 619ca95e7d9ed..bee46f8a18cf2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.linkedin.common.urn.CorpuserUrn; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -17,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddLinkResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -35,41 +33,42 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw String linkLabel = input.getLabel(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) + && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput(linkUrl, targetUrn, _entityService); + try { - log.debug("Adding Link. input: {}", input.toString()); + log.debug("Adding Link. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.addLink( - linkUrl, - linkLabel, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add link to resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to add link to resource with input %s", input.toString()), e); - } - }); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LinkUtils.addLink(linkUrl, linkLabel, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to add link to resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to add link to resource with input %s", input.toString()), e); + } + }); } - // Returns whether this is a glossary entity and whether you can edit this glossary entity with the + // Returns whether this is a glossary entity and whether you can edit this glossary entity with + // the // Manage all children or Manage direct children privileges private boolean canUpdateGlossaryEntityLinks(Urn targetUrn, QueryContext context) { - final boolean isGlossaryEntity = targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); + final boolean isGlossaryEntity = + targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); if (!isGlossaryEntity) { return false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java index 3f2dab0a5ba71..9c0d009ff9b0e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnerResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -42,28 +41,32 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw OwnerInput ownerInput = ownerInputBuilder.build(); if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); + return CompletableFuture.supplyAsync( + () -> { + OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); - try { + try { - log.debug("Adding Owner. input: {}", input); + log.debug("Adding Owner. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - ImmutableList.of(ownerInput), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owner to resource with input %s", input), e); - } - }); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.addOwnersToResources( + ImmutableList.of(ownerInput), + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owner to resource with input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java index 4e5b5bdb2a651..c64b2403364c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -18,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnersResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -29,37 +27,37 @@ public class AddOwnersResolver implements DataFetcher<CompletableFuture<Boolean> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final AddOwnersInput input = bindArgument(environment.getArgument("input"), AddOwnersInput.class); + final AddOwnersInput input = + bindArgument(environment.getArgument("input"), AddOwnersInput.class); List<OwnerInput> owners = input.getOwners(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - OwnerUtils.validateAddOwnerInput( - owners, - targetUrn, - _entityService - ); - try { - - log.debug("Adding Owners. input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - owners, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owners to resource with input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + OwnerUtils.validateAddOwnerInput(owners, targetUrn, _entityService); + try { + + log.debug("Adding Owners. input: {}", input); + + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.addOwnersToResources( + owners, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owners to resource with input %s", input), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java index 78d2341492b39..f4e3f7ed49056 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -17,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -27,44 +25,54 @@ public class AddTagResolver implements DataFetcher<CompletableFuture<Boolean>> { @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { - if (!tagUrn.getEntityType().equals("tag")) { - log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals("tag")) { + log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.info("Adding Tag. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info("Adding Tag. input: {}", input.toString()); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTagsToResources( + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java index 7174f3edffee6..4135e774172c9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -20,9 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -32,40 +30,47 @@ public class AddTagsResolver implements DataFetcher<CompletableFuture<Boolean>> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final AddTagsInput input = bindArgument(environment.getArgument("input"), AddTagsInput.class); - List<Urn> tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List<Urn> tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - tagUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { - log.info("Adding Tags. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - tagUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + LabelUtils.validateResourceAndLabel( + tagUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { + log.info("Adding Tags. input: {}", input.toString()); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTagsToResources( + tagUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java index 056b5db4324c3..a776fda558a42 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,8 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -25,39 +25,49 @@ public class AddTermResolver implements DataFetcher<CompletableFuture<Boolean>> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTermsToResources( + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java index 2f58b6b09e681..4fbe74a0349b4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -19,8 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -29,41 +29,48 @@ public class AddTermsResolver implements DataFetcher<CompletableFuture<Boolean>> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final AddTermsInput input = bindArgument(environment.getArgument("input"), AddTermsInput.class); - List<Urn> termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List<Urn> termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - termUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + LabelUtils.validateResourceAndLabel( + termUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - termUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTermsToResources( + termUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java index 5beaeecae673f..94182835de159 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddOwnersResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -29,26 +28,30 @@ public class BatchAddOwnersResolver implements DataFetcher<CompletableFuture<Boo @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final BatchAddOwnersInput input = bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); + final BatchAddOwnersInput input = + bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); final List<OwnerInput> owners = input.getOwners(); final List<ResourceRefInput> resources = input.getResources(); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateOwners(owners); - validateInputResources(resources, context); + // First, validate the batch + validateOwners(owners); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchAddOwners(owners, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + try { + // Then execute the bulk add + batchAddOwners(owners, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } private void validateOwners(List<OwnerInput> owners) { @@ -67,23 +70,32 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be applied to subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be applied to subresources."); } if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddOwners(List<OwnerInput> owners, List<ResourceRefInput> resources, QueryContext context) { + private void batchAddOwners( + List<OwnerInput> owners, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch adding owners. owners: {}, resources: {}", owners, resources); try { - OwnerUtils.addOwnersToResources(owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.addOwnersToResources( + owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - owners, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + owners, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java index 9c5cddb3c50bc..239ada1653695 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -12,21 +14,16 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTagsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -36,62 +33,64 @@ public class BatchAddTagsResolver implements DataFetcher<CompletableFuture<Boole @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTagsInput input = bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); - final List<Urn> tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTagsInput input = + bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); + final List<Urn> tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTags(tagUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTagsToSingleSchemaField(context, resources, tagUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // First, validate the batch + validateTags(tagUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTagsToSingleSchemaField(context, resources, tagUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } /** * When adding tags to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * tag to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the tag to one of its siblings. If that fails, keep trying all siblings until one passes or all + * fail. Then we throw if none succeed. */ private Boolean handleAddTagsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List<ResourceRefInput> resources, - @Nonnull final List<Urn> tagUrns - ) { + @Nonnull final List<Urn> tagUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); final List<Urn> siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, new HashSet<>(), siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, new HashSet<>(), siblingUrns); } /** - * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. - * Try adding until we attempt all siblings or one passes. Throw if none pass. + * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. Try + * adding until we attempt all siblings or one passes. Throw if none pass. */ private Boolean attemptBatchAddTagsWithSiblings( @Nonnull final List<Urn> tagUrns, @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet<Urn> attemptedUrns, - @Nonnull final List<Urn> siblingUrns - ) { + @Nonnull final List<Urn> siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List<ResourceRefInput> resources = new ArrayList<>(); resources.add(resource); @@ -106,13 +105,19 @@ private Boolean attemptBatchAddTagsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add tags for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } @@ -132,20 +137,28 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddTags(List<Urn> tagUrns, List<ResourceRefInput> resources, QueryContext context) { - log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); - try { - LabelUtils.addTagsToResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + private void batchAddTags( + List<Urn> tagUrns, List<ResourceRefInput> resources, QueryContext context) { + log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); + try { + LabelUtils.addTagsToResources( + tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); - } + } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java index a46f37b110f4e..b6d799c13345d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -12,21 +14,16 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTermsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -36,49 +33,52 @@ public class BatchAddTermsResolver implements DataFetcher<CompletableFuture<Bool @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTermsInput input = bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); - final List<Urn> termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTermsInput input = + bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); + final List<Urn> termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTerms(termUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTermsToSingleSchemaField(context, resources, termUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // First, validate the batch + validateTerms(termUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTermsToSingleSchemaField(context, resources, termUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } /** * When adding terms to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * term to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the term to one of its siblings. If that fails, keep trying all siblings until one passes or + * all fail. Then we throw if none succeed. */ private Boolean handleAddTermsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List<ResourceRefInput> resources, - @Nonnull final List<Urn> termUrns - ) { + @Nonnull final List<Urn> termUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); final List<Urn> siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, new HashSet<>(), siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, new HashSet<>(), siblingUrns); } /** @@ -90,8 +90,7 @@ private Boolean attemptBatchAddTermsWithSiblings( @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet<Urn> attemptedUrns, - @Nonnull final List<Urn> siblingUrns - ) { + @Nonnull final List<Urn> siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List<ResourceRefInput> resources = new ArrayList<>(); resources.add(resource); @@ -106,13 +105,19 @@ private Boolean attemptBatchAddTermsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add terms for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } @@ -132,20 +137,28 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddTerms(List<Urn> termUrns, List<ResourceRefInput> resources, QueryContext context) { + private void batchAddTerms( + List<Urn> termUrns, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch adding Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.addTermsToResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.addTermsToResources( + termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java index debd68646910f..30e04ac36ee0f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveOwnersResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -29,27 +28,33 @@ public class BatchRemoveOwnersResolver implements DataFetcher<CompletableFuture< @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final BatchRemoveOwnersInput input = bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); + final BatchRemoveOwnersInput input = + bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); final List<String> owners = input.getOwnerUrns(); final List<ResourceRefInput> resources = input.getResources(); - final Optional<Urn> maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + final Optional<Urn> maybeOwnershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? Optional.empty() + : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk remove - batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk remove + batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List<ResourceRefInput> resources, QueryContext context) { @@ -62,26 +67,40 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be removed from subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be removed from subresources."); } if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveOwners(List<String> ownerUrns, Optional<Urn> maybeOwnershipTypeUrn, - List<ResourceRefInput> resources, QueryContext context) { + private void batchRemoveOwners( + List<String> ownerUrns, + Optional<Urn> maybeOwnershipTypeUrn, + List<ResourceRefInput> resources, + QueryContext context) { log.debug("Batch removing owners. owners: {}, resources: {}", ownerUrns, resources); try { - OwnerUtils.removeOwnersFromResources(ownerUrns.stream().map(UrnUtils::getUrn).collect( - Collectors.toList()), maybeOwnershipTypeUrn, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.removeOwnersFromResources( + ownerUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + maybeOwnershipTypeUrn, + resources, + UrnUtils.getUrn(context.getActorUrn()), + _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch remove Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch remove Owners %s to resources with urns %s!", + ownerUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java index ab432f0afcaec..7500f29a0c67f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTagsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -28,26 +27,29 @@ public class BatchRemoveTagsResolver implements DataFetcher<CompletableFuture<Bo @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTagsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); - final List<Urn> tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTagsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); + final List<Urn> tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchRemoveTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List<ResourceRefInput> resources, QueryContext context) { @@ -59,20 +61,28 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveTags(List<Urn> tagUrns, List<ResourceRefInput> resources, QueryContext context) { + private void batchRemoveTags( + List<Urn> tagUrns, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch removing Tags. tags: {}, resources: {}", resources, tagUrns); try { - LabelUtils.removeTagsFromResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTagsFromResources( + tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java index c8870cc44bf9e..3706e4e911b17 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTermsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -28,26 +27,29 @@ public class BatchRemoveTermsResolver implements DataFetcher<CompletableFuture<B @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTermsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); - final List<Urn> termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTermsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); + final List<Urn> termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchRemoveTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List<ResourceRefInput> resources, QueryContext context) { @@ -59,20 +61,28 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveTerms(List<Urn> termUrns, List<ResourceRefInput> resources, QueryContext context) { + private void batchRemoveTerms( + List<Urn> termUrns, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch removing Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.removeTermsFromResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTermsFromResources( + termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java index 9b6167c673d8d..551878371b489 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchSetDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -30,25 +29,29 @@ public class BatchSetDomainResolver implements DataFetcher<CompletableFuture<Boo @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDomainInput input = bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); + final BatchSetDomainInput input = + bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); final String maybeDomainUrn = input.getDomainUrn(); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the domain - validateDomain(maybeDomainUrn); - validateInputResources(resources, context); + // First, validate the domain + validateDomain(maybeDomainUrn); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchSetDomains(maybeDomainUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchSetDomains(maybeDomainUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateDomain(@Nullable String maybeDomainUrn) { @@ -66,23 +69,31 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchSetDomains(String maybeDomainUrn, List<ResourceRefInput> resources, QueryContext context) { + private void batchSetDomains( + String maybeDomainUrn, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch adding Domains. domainUrn: {}, resources: {}", maybeDomainUrn, resources); try { - DomainUtils.setDomainForResources(maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), + DomainUtils.setDomainForResources( + maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Domain %s to resources with urns %s!", - maybeDomainUrn, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch set Domain %s to resources with urns %s!", + maybeDomainUrn, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java index 5961dc9087a63..e76617d119621 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateDeprecationResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -30,23 +29,32 @@ public class BatchUpdateDeprecationResolver implements DataFetcher<CompletableFu @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateDeprecationInput input = bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); + final BatchUpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the resources - validateInputResources(resources, context); + // First, validate the resources + validateInputResources(resources, context); - try { - // Then execute the bulk update - batchUpdateDeprecation(input.getDeprecated(), input.getNote(), input.getDecommissionTime(), resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk update + batchUpdateDeprecation( + input.getDeprecated(), + input.getNote(), + input.getDecommissionTime(), + resources, + context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List<ResourceRefInput> resources, QueryContext context) { @@ -58,17 +66,25 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DeprecationUtils.isAuthorizedToUpdateDeprecationForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchUpdateDeprecation(boolean deprecated, + private void batchUpdateDeprecation( + boolean deprecated, @Nullable String note, @Nullable Long decommissionTime, List<ResourceRefInput> resources, QueryContext context) { - log.debug("Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", deprecated, note, decommissionTime, resources); + log.debug( + "Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", + deprecated, + note, + decommissionTime, + resources); try { DeprecationUtils.updateDeprecationForResources( deprecated, @@ -78,10 +94,14 @@ private void batchUpdateDeprecation(boolean deprecated, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch update deprecated to %s for resources with urns %s!", - deprecated, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch update deprecated to %s for resources with urns %s!", + deprecated, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java index 69b2b92fb9cca..5a25e6d83e648 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -14,9 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateSoftDeletedResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,24 +25,32 @@ public class BatchUpdateSoftDeletedResolver implements DataFetcher<CompletableFu @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateSoftDeletedInput input = bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); + final BatchUpdateSoftDeletedInput input = + bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); final List<String> urns = input.getUrns(); final boolean deleted = input.getDeleted(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the entities exist - validateInputUrns(urns, context); + // First, validate the entities exist + validateInputUrns(urns, context); - try { - // Then execute the bulk soft delete - batchUpdateSoftDeleted(deleted, urns, context); - return true; - } catch (Exception e) { - log.error("Failed to perform batch soft delete against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform batch soft delete against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk soft delete + batchUpdateSoftDeleted(deleted, urns, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform batch soft delete against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform batch soft delete against input %s", input.toString()), + e); + } + }); } private void validateInputUrns(List<String> urnStrs, QueryContext context) { @@ -55,10 +62,12 @@ private void validateInputUrns(List<String> urnStrs, QueryContext context) { private void validateInputUrn(String urnStr, QueryContext context) { final Urn urn = UrnUtils.getUrn(urnStr); if (!DeleteUtils.isAuthorizedToDeleteEntity(context, urn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } if (!_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); + throw new IllegalArgumentException( + String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); } } @@ -66,14 +75,12 @@ private void batchUpdateSoftDeleted(boolean removed, List<String> urnStrs, Query log.debug("Batch soft deleting assets. urns: {}", urnStrs); try { DeleteUtils.updateStatusForResources( - removed, - urnStrs, - UrnUtils.getUrn(context.getActorUrn()), - _entityService); + removed, urnStrs, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to batch update soft deleted status entities with urns %s!", urnStrs), + String.format( + "Failed to batch update soft deleted status entities with urns %s!", urnStrs), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java index 59d5d6939c04c..d0796389d2280 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java @@ -1,13 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.container.EditableContainerProperties; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.dataproduct.DataProductProperties; import com.linkedin.domain.DomainProperties; @@ -30,148 +31,191 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DescriptionUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DescriptionUtils() { } + private DescriptionUtils() {} public static void updateFieldDescription( String newDescription, Urn resourceUrn, String fieldPath, Urn actor, - EntityService entityService - ) { - EditableSchemaMetadata editableSchemaMetadata = - (EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); - - editableFieldInfo.setDescription(newDescription); - - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + EntityService entityService) { + EditableSchemaMetadata editableSchemaMetadata = + (EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); + + editableFieldInfo.setDescription(newDescription); + + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } public static void updateContainerDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableContainerProperties containerProperties = - (EditableContainerProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableContainerProperties()); + (EditableContainerProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableContainerProperties()); containerProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, containerProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + containerProperties, + actor, + entityService); } public static void updateDomainDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DomainProperties domainProperties = - (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, entityService, null); + (DomainProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + entityService, + null); if (domainProperties == null) { - // If there are no properties for the domain already, then we should throw since the properties model also requires a name. + // If there are no properties for the domain already, then we should throw since the + // properties model also requires a name. throw new IllegalArgumentException("Properties for this Domain do not yet exist!"); } domainProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + entityService); } public static void updateTagDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { TagProperties tagProperties = - (TagProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); + (TagProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); if (tagProperties == null) { - // If there are no properties for the tag already, then we should throw since the properties model also requires a name. + // If there are no properties for the tag already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Tag do not yet exist!"); } tagProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); + persistAspect( + resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); } public static void updateCorpGroupDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { CorpGroupEditableInfo corpGroupEditableInfo = - (CorpGroupEditableInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, entityService, new CorpGroupEditableInfo()); + (CorpGroupEditableInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + entityService, + new CorpGroupEditableInfo()); if (corpGroupEditableInfo != null) { corpGroupEditableInfo.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, corpGroupEditableInfo, actor, entityService); + persistAspect( + resourceUrn, + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + corpGroupEditableInfo, + actor, + entityService); } public static void updateGlossaryTermDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + entityService, + null); if (glossaryTermInfo == null) { - // If there are no properties for the term already, then we should throw since the properties model also requires a name. + // If there are no properties for the term already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Glossary Term do not yet exist!"); } - glossaryTermInfo.setDefinition(newDescription); // We call description 'definition' for glossary terms. Not great, we know. :( - persistAspect(resourceUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, entityService); + glossaryTermInfo.setDefinition( + newDescription); // We call description 'definition' for glossary terms. Not great, we know. + // :( + persistAspect( + resourceUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + entityService); } public static void updateGlossaryNodeDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setDefinition(newDescription); - persistAspect(resourceUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, entityService); + persistAspect( + resourceUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + entityService); } public static void updateNotebookDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableNotebookProperties notebookProperties = (EditableNotebookProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableNotebookProperties notebookProperties = + (EditableNotebookProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + entityService, + null); if (notebookProperties != null) { notebookProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, notebookProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + notebookProperties, + actor, + entityService); } public static Boolean validateFieldDescriptionInput( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); @@ -179,51 +223,41 @@ public static Boolean validateFieldDescriptionInput( return true; } - public static Boolean validateDomainInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateContainerInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateLabelInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateCorpGroupInput( - Urn corpUserUrn, - EntityService entityService - ) { + public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { if (!entityService.exists(corpUserUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); } return true; } - public static Boolean validateNotebookInput( - Urn notebookUrn, - EntityService entityService) { + public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { if (!entityService.exists(notebookUrn)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", notebookUrn, notebookUrn)); @@ -231,11 +265,15 @@ public static Boolean validateNotebookInput( return true; } - public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateFieldDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -245,11 +283,14 @@ public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -259,25 +300,31 @@ public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContex orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateContainerDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - targetUrn.getEntityType(), - targetUrn.toString(), - orPrivilegeGroups); - } + public static boolean isAuthorizedToUpdateContainerDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + targetUrn.getEntityType(), + targetUrn.toString(), + orPrivilegeGroups); + } - public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -288,79 +335,122 @@ public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext cont } public static void updateMlModelDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLModelProperties editableProperties = (EditableMLModelProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLModelProperties editableProperties = + (EditableMLModelProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlModelGroupDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLModelGroupProperties editableProperties = (EditableMLModelGroupProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelGroupProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLModelGroupProperties editableProperties = + (EditableMLModelGroupProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelGroupProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } + public static void updateMlFeatureDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLFeatureProperties editableProperties = (EditableMLFeatureProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLFeatureProperties editableProperties = + (EditableMLFeatureProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlFeatureTableDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLFeatureTableProperties editableProperties = (EditableMLFeatureTableProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureTableProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLFeatureTableProperties editableProperties = + (EditableMLFeatureTableProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureTableProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlPrimaryKeyDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLPrimaryKeyProperties editableProperties = (EditableMLPrimaryKeyProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLPrimaryKeyProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLPrimaryKeyProperties editableProperties = + (EditableMLPrimaryKeyProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLPrimaryKeyProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateDataProductDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - DataProductProperties properties = (DataProductProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, entityService, new DataProductProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + DataProductProperties properties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + entityService, + new DataProductProperties()); if (properties != null) { properties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties, actor, entityService); + persistAspect( + resourceUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + properties, + actor, + entityService); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java index e5e3a5a0ee42e..e4c5c132be4f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java @@ -19,11 +19,10 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class MoveDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -33,57 +32,78 @@ public class MoveDomainResolver implements DataFetcher<CompletableFuture<Boolean @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final MoveDomainInput input = ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); + final MoveDomainInput input = + ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(input.getResourceUrn()); - final Urn newParentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Urn newParentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canManageDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canManageDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Resource is not a domain."); - } + try { + if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Resource is not a domain."); + } - DomainProperties properties = (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, - null - ); + DomainProperties properties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); - if (properties == null) { - throw new IllegalArgumentException("Domain properties do not exist."); - } + if (properties == null) { + throw new IllegalArgumentException("Domain properties do not exist."); + } - if (newParentDomainUrn != null) { - if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Parent entity is not a domain."); - } - if (!_entityService.exists(newParentDomainUrn)) { - throw new IllegalArgumentException("Parent entity does not exist."); - } - } + if (newParentDomainUrn != null) { + if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Parent entity is not a domain."); + } + if (!_entityService.exists(newParentDomainUrn)) { + throw new IllegalArgumentException("Parent entity does not exist."); + } + } - if (DomainUtils.hasNameConflict(properties.getName(), newParentDomainUrn, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in the destination domain. Please pick a unique name.", properties.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } + if (DomainUtils.hasNameConflict( + properties.getName(), newParentDomainUrn, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in the destination domain. Please pick a unique name.", + properties.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } - properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - MutationUtils.persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, properties, actor, _entityService); - return true; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to move domain {} to parent {} : {}", input.getResourceUrn(), input.getParentDomain(), e.getMessage()); - throw new RuntimeException(String.format("Failed to move domain %s to %s", input.getResourceUrn(), input.getParentDomain()), e); - } - }); + properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + MutationUtils.persistAspect( + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + properties, + actor, + _entityService); + return true; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to move domain {} to parent {} : {}", + input.getResourceUrn(), + input.getParentDomain(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to move domain %s to %s", + input.getResourceUrn(), input.getParentDomain()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java index 30bd940a7dfed..064b532a792c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java @@ -1,20 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.codahale.metrics.Timer; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.metadata.utils.metrics.MetricUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -23,7 +21,8 @@ */ public class MutableTypeBatchResolver<I, B, T> implements DataFetcher<CompletableFuture<List<T>>> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); private final BatchMutableType<I, B, T> _batchMutableType; @@ -33,21 +32,23 @@ public MutableTypeBatchResolver(final BatchMutableType<I, B, T> batchMutableType @Override public CompletableFuture<List<T>> get(DataFetchingEnvironment environment) throws Exception { - final B[] input = bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); - - return CompletableFuture.supplyAsync(() -> { - Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); - - try { - return _batchMutableType.batchUpdate(input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error("Failed to perform batchUpdate", e); - throw new IllegalArgumentException(e); - } finally { - timer.stop(); - } - }); + final B[] input = + bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); + + return CompletableFuture.supplyAsync( + () -> { + Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); + + try { + return _batchMutableType.batchUpdate(input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error("Failed to perform batchUpdate", e); + throw new IllegalArgumentException(e); + } finally { + timer.stop(); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java index 115a68e808de6..c62282c906597 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.MutableType; import graphql.schema.DataFetcher; @@ -8,8 +10,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -18,28 +18,34 @@ */ public class MutableTypeResolver<I, T> implements DataFetcher<CompletableFuture<T>> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeResolver.class.getName()); - private final MutableType<I, T> _mutableType; + private final MutableType<I, T> _mutableType; - public MutableTypeResolver(final MutableType<I, T> mutableType) { - _mutableType = mutableType; - } + public MutableTypeResolver(final MutableType<I, T> mutableType) { + _mutableType = mutableType; + } - @Override - public CompletableFuture<T> get(DataFetchingEnvironment environment) throws Exception { - final String urn = environment.getArgument("urn"); - final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug(String.format("Mutating entity. input: %s", input)); - return _mutableType.update(urn, input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error(String.format("Failed to perform update against input %s", input) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } + @Override + public CompletableFuture<T> get(DataFetchingEnvironment environment) throws Exception { + final String urn = environment.getArgument("urn"); + final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug(String.format("Mutating entity. input: %s", input)); + return _mutableType.update(urn, input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error( + String.format("Failed to perform update against input %s", input) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java index c862fcfa83594..4a915b2a477cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; @@ -19,49 +21,56 @@ import java.util.Optional; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class MutationUtils { - private MutationUtils() { } + private MutationUtils() {} - public static void persistAspect(Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); + public static void persistAspect( + Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param urn * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithUrn(Urn urn, String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithUrn( + Urn urn, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); return setProposalProperties(proposal, urn.getEntityType(), aspectName, aspect); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param entityKey * @param entityType * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithKey(RecordTemplate entityKey, String entityType, - String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithKey( + RecordTemplate entityKey, String entityType, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(entityKey)); return setProposalProperties(proposal, entityType, aspectName, aspect); } - private static MetadataChangeProposal setProposalProperties(MetadataChangeProposal proposal, - String entityType, String aspectName, RecordTemplate aspect) { + private static MetadataChangeProposal setProposalProperties( + MetadataChangeProposal proposal, + String entityType, + String aspectName, + RecordTemplate aspect) { proposal.setEntityType(entityType); proposal.setAspectName(aspectName); proposal.setAspect(GenericRecordUtils.serializeAspect(aspect)); @@ -77,18 +86,16 @@ private static MetadataChangeProposal setProposalProperties(MetadataChangePropos } public static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional<EditableSchemaFieldInfo> fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional<EditableSchemaFieldInfo> fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); @@ -104,34 +111,37 @@ public static Boolean validateSubresourceExists( Urn targetUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (subResourceType.equals(SubResourceType.DATASET_FIELD)) { - SchemaMetadata schemaMetadata = (SchemaMetadata) entityService.getAspect(targetUrn, - Constants.SCHEMA_METADATA_ASPECT_NAME, 0); + SchemaMetadata schemaMetadata = + (SchemaMetadata) + entityService.getAspect(targetUrn, Constants.SCHEMA_METADATA_ASPECT_NAME, 0); if (schemaMetadata == null) { throw new IllegalArgumentException( - String.format("Failed to update %s & field %s. %s has no schema.", targetUrn, subResource, targetUrn) - ); + String.format( + "Failed to update %s & field %s. %s has no schema.", + targetUrn, subResource, targetUrn)); } Optional<SchemaField> fieldMatch = - schemaMetadata.getFields().stream().filter(field -> field.getFieldPath().equals(subResource)).findFirst(); + schemaMetadata.getFields().stream() + .filter(field -> field.getFieldPath().equals(subResource)) + .findFirst(); if (!fieldMatch.isPresent()) { - throw new IllegalArgumentException(String.format( - "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", - targetUrn, subResource, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", + targetUrn, subResource, subResource)); } return true; } - throw new IllegalArgumentException(String.format( - "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", - targetUrn, subResource, SubResourceType.values() - )); + throw new IllegalArgumentException( + String.format( + "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", + targetUrn, subResource, SubResourceType.values())); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java index 23c08043af5d3..f84d1b3a66f6f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,9 +15,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveLinkResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -24,36 +23,38 @@ public class RemoveLinkResolver implements DataFetcher<CompletableFuture<Boolean @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final RemoveLinkInput input = bindArgument(environment.getArgument("input"), RemoveLinkInput.class); + final RemoveLinkInput input = + bindArgument(environment.getArgument("input"), RemoveLinkInput.class); String linkUrl = input.getLinkUrl(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { - log.debug("Removing Link input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.removeLink( - linkUrl, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove link from resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to remove link from resource with input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput(linkUrl, targetUrn, _entityService); + try { + log.debug("Removing Link input: {}", input); + + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LinkUtils.removeLink(linkUrl, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to remove link from resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to remove link from resource with input %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java index 2d5faaab44458..9827aa0666d19 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveOwnerResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -27,36 +26,42 @@ public class RemoveOwnerResolver implements DataFetcher<CompletableFuture<Boolea @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final RemoveOwnerInput input = bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); + final RemoveOwnerInput input = + bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); Urn ownerUrn = Urn.createFromString(input.getOwnerUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - Optional<Urn> maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + Optional<Urn> maybeOwnershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? Optional.empty() + : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateRemoveInput( - targetUrn, - _entityService - ); - try { - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.removeOwnersFromResources( - ImmutableList.of(ownerUrn), - maybeOwnershipTypeUrn, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove owner from resource with input {}", input); - throw new RuntimeException(String.format("Failed to remove owner from resource with input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + OwnerUtils.validateRemoveInput(targetUrn, _entityService); + try { + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.removeOwnersFromResources( + ImmutableList.of(ownerUrn), + maybeOwnershipTypeUrn, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to remove owner from resource with input {}", input); + throw new RuntimeException( + String.format( + "Failed to remove owner from resource with input %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java index 33a95c3576061..7e2919e0ca1f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTagResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,44 +25,54 @@ public class RemoveTagResolver implements DataFetcher<CompletableFuture<Boolean> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - true - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + true); + try { - if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { - log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { + log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.debug("Removing Tag. input: %s", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTagsFromResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.debug("Removing Tag. input: %s", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.removeTagsFromResources( + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java index 8f18b0ecd6198..ec38360df6d8e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTermResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,45 +25,55 @@ public class RemoveTermResolver implements DataFetcher<CompletableFuture<Boolean @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - true - ); + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + true); - try { + try { - if (!termUrn.getEntityType().equals("glossaryTerm")) { - log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); - return false; - } + if (!termUrn.getEntityType().equals("glossaryTerm")) { + log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); + return false; + } - log.info(String.format("Removing Term. input: {}", input)); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTermsFromResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info(String.format("Removing Term. input: {}", input)); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.removeTermsFromResources( + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java index d6e6e5610da56..13a8427633cae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -12,19 +14,14 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class UpdateDescriptionResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -33,7 +30,8 @@ public class UpdateDescriptionResolver implements DataFetcher<CompletableFuture< @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final DescriptionUpdateInput input = bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); + final DescriptionUpdateInput input = + bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating description. input: {}", input.toString()); switch (targetUrn.getEntityType()) { @@ -67,380 +65,383 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw return updateDataProductDescription(targetUrn, input, environment.getContext()); default: throw new RuntimeException( - String.format("Failed to update description. Unsupported resource type %s provided.", targetUrn)); + String.format( + "Failed to update description. Unsupported resource type %s provided.", targetUrn)); } } - private CompletableFuture<Boolean> updateContainerDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - DescriptionUtils.validateContainerInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateContainerDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateContainerDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + DescriptionUtils.validateContainerInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateContainerDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateDomainDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateDomainInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDomainDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateDomainDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateDomainInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDomainDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - // If updating schema field description fails, try again on a sibling until there are no more siblings to try. Then throw if necessary. + // If updating schema field description fails, try again on a sibling until there are no more + // siblings to try. Then throw if necessary. private Boolean attemptUpdateDatasetSchemaFieldDescription( @Nonnull final Urn targetUrn, @Nonnull final DescriptionUpdateInput input, @Nonnull final QueryContext context, @Nonnull final HashSet<Urn> attemptedUrns, - @Nonnull final List<Urn> siblingUrns - ) { + @Nonnull final List<Urn> siblingUrns) { attemptedUrns.add(targetUrn); try { - DescriptionUtils.validateFieldDescriptionInput(targetUrn, input.getSubResource(), input.getSubResourceType(), - _entityService); + DescriptionUtils.validateFieldDescriptionInput( + targetUrn, input.getSubResource(), input.getSubResourceType(), _entityService); final Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateFieldDescription(input.getDescription(), targetUrn, input.getSubResource(), actor, - _entityService); + DescriptionUtils.updateFieldDescription( + input.getDescription(), targetUrn, input.getSubResource(), actor, _entityService); return true; } catch (Exception e) { final Optional<Urn> siblingUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, attemptedUrns); if (siblingUrn.isPresent()) { - log.warn("Failed to update description for input {}, trying sibling urn {} now.", input.toString(), siblingUrn.get()); - return attemptUpdateDatasetSchemaFieldDescription(siblingUrn.get(), input, context, attemptedUrns, siblingUrns); + log.warn( + "Failed to update description for input {}, trying sibling urn {} now.", + input.toString(), + siblingUrn.get()); + return attemptUpdateDatasetSchemaFieldDescription( + siblingUrn.get(), input, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } - private CompletableFuture<Boolean> updateDatasetSchemaFieldDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + private CompletableFuture<Boolean> updateDatasetSchemaFieldDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - if (input.getSubResourceType() == null) { - throw new IllegalArgumentException("Update description without subresource is not currently supported"); - } + if (input.getSubResourceType() == null) { + throw new IllegalArgumentException( + "Update description without subresource is not currently supported"); + } - List<Urn> siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); + List<Urn> siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); - return attemptUpdateDatasetSchemaFieldDescription(targetUrn, input, context, new HashSet<>(), siblingUrns); - }); + return attemptUpdateDatasetSchemaFieldDescription( + targetUrn, input, context, new HashSet<>(), siblingUrns); + }); } - private CompletableFuture<Boolean> updateTagDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateTagDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateTagDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateTagDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateGlossaryTermDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryTermDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateGlossaryTermDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryTermDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateGlossaryNodeDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryNodeDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateGlossaryNodeDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryNodeDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateCorpGroupDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateCorpGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateCorpGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateCorpGroupDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateNotebookDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateNotebookInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateNotebookDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateNotebookDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateNotebookInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateNotebookDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlModelDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlModelDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlModelGroupDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlModelGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelGroupDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlFeatureDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlFeatureDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlPrimaryKeyDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlPrimaryKeyDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlPrimaryKeyDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlPrimaryKeyDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlFeatureTableDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureTableDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlFeatureTableDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureTableDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateDataProductDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDataProductDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateDataProductDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDataProductDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index 0e316ac1296ee..dd44c2718b3a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -16,22 +19,18 @@ import com.linkedin.domain.DomainProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.identity.CorpGroupInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateNameResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -41,178 +40,232 @@ public class UpdateNameResolver implements DataFetcher<CompletableFuture<Boolean @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final UpdateNameInput input = bindArgument(environment.getArgument("input"), UpdateNameInput.class); + final UpdateNameInput input = + bindArgument(environment.getArgument("input"), UpdateNameInput.class); Urn targetUrn = Urn.createFromString(input.getUrn()); log.info("Updating name. input: {}", input); - return CompletableFuture.supplyAsync(() -> { - if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); - } + return CompletableFuture.supplyAsync( + () -> { + if (!_entityService.exists(targetUrn)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + } - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermName(targetUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeName(targetUrn, input, environment.getContext()); - case Constants.DOMAIN_ENTITY_NAME: - return updateDomainName(targetUrn, input, environment.getContext()); - case Constants.CORP_GROUP_ENTITY_NAME: - return updateGroupName(targetUrn, input, environment.getContext()); - case Constants.DATA_PRODUCT_ENTITY_NAME: - return updateDataProductName(targetUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update name. Unsupported resource type %s provided.", targetUrn)); - } - }); + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermName(targetUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeName(targetUrn, input, environment.getContext()); + case Constants.DOMAIN_ENTITY_NAME: + return updateDomainName(targetUrn, input, environment.getContext()); + case Constants.CORP_GROUP_ENTITY_NAME: + return updateGroupName(targetUrn, input, environment.getContext()); + case Constants.DATA_PRODUCT_ENTITY_NAME: + return updateDataProductName(targetUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update name. Unsupported resource type %s provided.", targetUrn)); + } + }); } private Boolean updateGlossaryTermName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { throw new IllegalArgumentException("Glossary Term does not exist"); } glossaryTermInfo.setName(input.getName()); Urn actor = UrnUtils.getUrn(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateGlossaryNodeName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateDomainName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateDomainName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageDomains(context)) { try { - DomainProperties domainProperties = (DomainProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, null); + DomainProperties domainProperties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (domainProperties == null) { throw new IllegalArgumentException("Domain does not exist"); } - if (DomainUtils.hasNameConflict(input.getName(), DomainUtils.getParentDomainSafely(domainProperties), context, _entityClient)) { + if (DomainUtils.hasNameConflict( + input.getName(), + DomainUtils.getParentDomainSafely(domainProperties), + context, + _entityClient)) { throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); } domainProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, _entityService); + persistAspect( + targetUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + _entityService); return true; } catch (DataHubGraphQLException e) { throw e; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateGroupName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateGroupName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageUsersAndGroups(context)) { try { - CorpGroupInfo corpGroupInfo = (CorpGroupInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.CORP_GROUP_INFO_ASPECT_NAME, _entityService, null); + CorpGroupInfo corpGroupInfo = + (CorpGroupInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.CORP_GROUP_INFO_ASPECT_NAME, + _entityService, + null); if (corpGroupInfo == null) { throw new IllegalArgumentException("Group does not exist"); } corpGroupInfo.setDisplayName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); + persistAspect( + targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateDataProductName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { try { - DataProductProperties dataProductProperties = (DataProductProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, _entityService, null); + DataProductProperties dataProductProperties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (dataProductProperties == null) { throw new IllegalArgumentException("Data Product does not exist"); } - Domains dataProductDomains = (Domains) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); - if (dataProductDomains != null && dataProductDomains.hasDomains() && dataProductDomains.getDomains().size() > 0) { + Domains dataProductDomains = + (Domains) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); + if (dataProductDomains != null + && dataProductDomains.hasDomains() + && dataProductDomains.getDomains().size() > 0) { // get first domain since we only allow one domain right now Urn domainUrn = UrnUtils.getUrn(dataProductDomains.getDomains().get(0).toString()); - // if they can't edit a data product from either the parent domain permission or from permission on the data product itself, throw error + // if they can't edit a data product from either the parent domain permission or from + // permission on the data product itself, throw error if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn) && !DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } else { // should not happen since data products need to have a domain if (!DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } dataProductProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties, actor, _entityService); + persistAspect( + targetUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + dataProductProperties, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java index 5d78bc38eafe8..848118e6cc0f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -8,21 +11,17 @@ import com.linkedin.datahub.graphql.generated.UpdateParentNodeInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateParentNodeResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -32,54 +31,72 @@ public class UpdateParentNodeResolver implements DataFetcher<CompletableFuture<B @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final UpdateParentNodeInput input = bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); + final UpdateParentNodeInput input = + bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); final QueryContext context = environment.getContext(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating parent node. input: {}", input.toString()); if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } GlossaryNodeUrn parentNodeUrn = null; if (input.getParentNode() != null) { parentNodeUrn = GlossaryNodeUrn.createFromString(input.getParentNode()); - if (!_entityService.exists(parentNodeUrn) || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryNode.", targetUrn, parentNodeUrn)); + if (!_entityService.exists(parentNodeUrn) + || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryNode.", + targetUrn, parentNodeUrn)); } } GlossaryNodeUrn finalParentNodeUrn = parentNodeUrn; - return CompletableFuture.supplyAsync(() -> { - Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - // need to be able to manage current parent node and new parent node - if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) - && GlossaryUtils.canManageChildrenEntities(context, finalParentNodeUrn, _entityClient)) { - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update parentNode. Unsupported resource type %s provided.", targetUrn)); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + // need to be able to manage current parent node and new parent node + if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) + && GlossaryUtils.canManageChildrenEntities( + context, finalParentNodeUrn, _entityClient)) { + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update parentNode. Unsupported resource type %s provided.", + targetUrn)); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private Boolean updateGlossaryTermParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { - // If there is no info aspect for the term already, then we should throw since the model also requires a name. + // If there is no info aspect for the term already, then we should throw since the model + // also requires a name. throw new IllegalArgumentException("Info for this Glossary Term does not yet exist!"); } @@ -89,12 +106,18 @@ private Boolean updateGlossaryTermParentNode( glossaryTermInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } @@ -102,11 +125,15 @@ private Boolean updateGlossaryNodeParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Info for this Glossary Node does not yet exist!"); } @@ -117,12 +144,18 @@ private Boolean updateGlossaryNodeParentNode( glossaryNodeInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java index 875bc43e7c100..53b215bce7746 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,15 +21,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} - * instead. - */ +/** Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} instead. */ @Slf4j @RequiredArgsConstructor public class UpdateUserSettingResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -35,35 +31,46 @@ public class UpdateUserSettingResolver implements DataFetcher<CompletableFuture< @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateUserSettingInput input = bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); + final UpdateUserSettingInput input = + bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); UserSetting name = input.getName(); final boolean value = input.getValue(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - return CompletableFuture.supplyAsync(() -> { - try { - // In the future with more settings, we'll need to do a read-modify-write - // for now though, we can just write since there is only 1 setting - CorpUserSettings newSettings = new CorpUserSettings(); - newSettings.setAppearance(new CorpUserAppearanceSettings()); - if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { - newSettings.setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); - } else { - log.error("User Setting name {} not currently supported", name); - throw new RuntimeException(String.format("User Setting name %s not currently supported", name)); - } + return CompletableFuture.supplyAsync( + () -> { + try { + // In the future with more settings, we'll need to do a read-modify-write + // for now though, we can just write since there is only 1 setting + CorpUserSettings newSettings = new CorpUserSettings(); + newSettings.setAppearance(new CorpUserAppearanceSettings()); + if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { + newSettings.setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); + } else { + log.error("User Setting name {} not currently supported", name); + throw new RuntimeException( + String.format("User Setting name %s not currently supported", name)); + } - MetadataChangeProposal proposal = - buildMetadataChangeProposalWithUrn(actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); - _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); + _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); - return true; - } catch (Exception e) { - log.error("Failed to perform user settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform user settings update against input %s", input.toString()), e); - } - }); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform user settings update against input %s", input.toString()), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java index 7d4c5bee61e19..3fffe9fa019e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -19,22 +20,21 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DeleteUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeleteUtils() { } + private DeleteUtils() {} public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -45,11 +45,7 @@ public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, } public static void updateStatusForResources( - boolean removed, - List<String> urnStrs, - Urn actor, - EntityService entityService - ) { + boolean removed, List<String> urnStrs, Urn actor, EntityService entityService) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (String urnStr : urnStrs) { changes.add(buildSoftDeleteProposal(removed, urnStr, actor, entityService)); @@ -58,17 +54,13 @@ public static void updateStatusForResources( } private static MetadataChangeProposal buildSoftDeleteProposal( - boolean removed, - String urnStr, - Urn actor, - EntityService entityService - ) { - Status status = (Status) EntityUtils.getAspectFromEntity( - urnStr, - Constants.STATUS_ASPECT_NAME, - entityService, - new Status()); + boolean removed, String urnStr, Urn actor, EntityService entityService) { + Status status = + (Status) + EntityUtils.getAspectFromEntity( + urnStr, Constants.STATUS_ASPECT_NAME, entityService, new Status()); status.setRemoved(removed); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java index bd82bbb8e514f..3114e5241711c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; @@ -22,22 +23,22 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; - - @Slf4j public class DeprecationUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeprecationUtils() { } + private DeprecationUtils() {} - public static boolean isAuthorizedToUpdateDeprecationForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDeprecationForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -53,11 +54,12 @@ public static void updateDeprecationForResources( @Nullable Long decommissionTime, List<ResourceRefInput> resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildUpdateDeprecationProposal(deprecated, note, decommissionTime, resource, actor, entityService)); + changes.add( + buildUpdateDeprecationProposal( + deprecated, note, decommissionTime, resource, actor, entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } @@ -68,21 +70,11 @@ private static MetadataChangeProposal buildUpdateDeprecationProposal( @Nullable Long decommissionTime, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { String resourceUrn = resource.getResourceUrn(); - Deprecation deprecation = getDeprecation( - entityService, - resourceUrn, - actor, - note, - deprecated, - decommissionTime - ); + Deprecation deprecation = + getDeprecation(entityService, resourceUrn, actor, note, deprecated, decommissionTime); return MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn(resourceUrn), - Constants.DEPRECATION_ASPECT_NAME, - deprecation - ); + UrnUtils.getUrn(resourceUrn), Constants.DEPRECATION_ASPECT_NAME, deprecation); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index 585fbdf53a2ba..fb88d6c29f662 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -1,15 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; @@ -30,7 +32,6 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; - import com.linkedin.r2.RemoteInvocationException; import java.util.ArrayList; import java.util.Collections; @@ -40,13 +41,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - // TODO: Move to consuming from DomainService. @Slf4j public class DomainUtils { @@ -54,17 +50,20 @@ public class DomainUtils { private static final String HAS_PARENT_DOMAIN_INDEX_FIELD_NAME = "hasParentDomain"; private static final String NAME_INDEX_FIELD_NAME = "name"; - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DomainUtils() { } + private DomainUtils() {} - public static boolean isAuthorizedToUpdateDomainsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -78,8 +77,8 @@ public static void setDomainForResources( @Nullable Urn domainUrn, List<ResourceRefInput> resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildSetDomainProposal(domainUrn, resource, actor, entityService)); @@ -88,27 +87,27 @@ public static void setDomainForResources( } private static MetadataChangeProposal buildSetDomainProposal( - @Nullable Urn domainUrn, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.DOMAINS_ASPECT_NAME, - entityService, - new Domains()); + @Nullable Urn domainUrn, ResourceRefInput resource, Urn actor, EntityService entityService) { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.DOMAINS_ASPECT_NAME, + entityService, + new Domains()); final UrnArray newDomains = new UrnArray(); if (domainUrn != null) { newDomains.add(domainUrn); } domains.setDomains(newDomains); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); } public static void validateDomain(Urn domainUrn, EntityService entityService) { if (!entityService.exists(domainUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); + throw new IllegalArgumentException( + String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); } } @@ -119,14 +118,12 @@ private static List<Criterion> buildRootDomainCriteria() { new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("false") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("") - .setCondition(Condition.IS_NULL) - ); + .setCondition(Condition.IS_NULL)); return criteria; } @@ -138,14 +135,12 @@ private static List<Criterion> buildParentDomainCriteria(@Nonnull final Urn pare new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("true") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue(parentDomainUrn.toString()) - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); return criteria; } @@ -158,36 +153,38 @@ private static Criterion buildNameCriterion(@Nonnull final String name) { } /** - * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain criterion. - * The reason for the OR on root is elastic can have a null|false value to represent an root domain in the index. + * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain + * criterion. The reason for the OR on root is elastic can have a null|false value to represent an + * root domain in the index. + * * @param name an optional name to AND in to each condition of the filter * @param parentDomainUrn the parent domain (null means root). * @return the Filter */ - public static Filter buildNameAndParentDomainFilter(@Nullable final String name, @Nullable final Urn parentDomainUrn) { + public static Filter buildNameAndParentDomainFilter( + @Nullable final String name, @Nullable final Urn parentDomainUrn) { if (parentDomainUrn == null) { - return new Filter().setOr( - new ConjunctiveCriterionArray( - buildRootDomainCriteria().stream().map(parentCriterion -> { - final CriterionArray array = new CriterionArray(parentCriterion); - if (name != null) { - array.add(buildNameCriterion(name)); - } - return new ConjunctiveCriterion().setAnd(array); - }).collect(Collectors.toList()) - ) - ); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + buildRootDomainCriteria().stream() + .map( + parentCriterion -> { + final CriterionArray array = new CriterionArray(parentCriterion); + if (name != null) { + array.add(buildNameCriterion(name)); + } + return new ConjunctiveCriterion().setAnd(array); + }) + .collect(Collectors.toList()))); } final CriterionArray andArray = new CriterionArray(buildParentDomainCriteria(parentDomainUrn)); if (name != null) { andArray.add(buildNameCriterion(name)); } - return new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(andArray) - ) - ); + return new Filter() + .setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(andArray))); } public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn) { @@ -196,6 +193,7 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn /** * Check if a domain has any child domains + * * @param domainUrn the URN of the domain to check * @param context query context (includes authorization context to authorize the request) * @param entityClient client used to perform the check @@ -204,18 +202,14 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn public static boolean hasChildDomains( @Nonnull final Urn domainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) throws RemoteInvocationException { + @Nonnull final EntityClient entityClient) + throws RemoteInvocationException { Filter parentDomainFilter = buildParentDomainFilter(domainUrn); // Search for entities matching parent domain // Limit count to 1 for existence check - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - parentDomainFilter, - null, - 0, - 1, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1, context.getAuthentication()); return (searchResult.getNumEntities() > 0); } @@ -223,23 +217,18 @@ private static Map<Urn, EntityResponse> getDomainsByNameAndParent( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { final Filter filter = buildNameAndParentDomainFilter(name, parentDomainUrn); - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + DOMAIN_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); - final Set<Urn> domainUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Set<Urn> domainUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); return entityClient.batchGetV2( DOMAIN_ENTITY_NAME, @@ -255,51 +244,63 @@ public static boolean hasNameConflict( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { - final Map<Urn, EntityResponse> entities = getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); + @Nonnull final EntityClient entityClient) { + final Map<Urn, EntityResponse> entities = + getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); // Even though we searched by name, do one more pass to check the name is unique - return entities.values().stream().anyMatch(entityResponse -> { - if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data(); - DomainProperties domainProperties = new DomainProperties(dataMap); - return (domainProperties.hasName() && domainProperties.getName().equals(name)); - } - return false; - }); + return entities.values().stream() + .anyMatch( + entityResponse -> { + if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(DOMAIN_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); + DomainProperties domainProperties = new DomainProperties(dataMap); + return (domainProperties.hasName() && domainProperties.getName().equals(name)); + } + return false; + }); } @Nullable public static Entity getParentDomain( @Nonnull final Urn urn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { - final EntityResponse entityResponse = entityClient.getV2( - DOMAIN_ENTITY_NAME, - urn, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - - if (entityResponse != null && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - final DomainProperties properties = new DomainProperties(entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); + final EntityResponse entityResponse = + entityClient.getV2( + DOMAIN_ENTITY_NAME, + urn, + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), + context.getAuthentication()); + + if (entityResponse != null + && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + final DomainProperties properties = + new DomainProperties( + entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); final Urn parentDomainUrn = getParentDomainSafely(properties); return parentDomainUrn != null ? UrnToEntityMapper.map(parentDomainUrn) : null; } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve parent domain for entity %s", urn), e); + throw new RuntimeException( + String.format("Failed to retrieve parent domain for entity %s", urn), e); } return null; } /** - * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where moving a domain - * to the root leaves the parentDomain field set but makes hasParentDomain false. This helper makes sure that queries - * to elastic where hasParentDomain=false and parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where + * moving a domain to the root leaves the parentDomain field set but makes hasParentDomain false. + * This helper makes sure that queries to elastic where hasParentDomain=false and + * parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * * @param properties the domain properties aspect * @return the parentDomain or null */ @@ -307,4 +308,4 @@ public static Entity getParentDomain( public static Urn getParentDomainSafely(@Nonnull final DomainProperties properties) { return properties.hasParentDomain() ? properties.getParentDomain() : null; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java index 8aa4a8d756bea..15c93904fc3bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java @@ -3,7 +3,6 @@ import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -11,20 +10,22 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class EmbedUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private EmbedUtils() { } + private EmbedUtils() {} - public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateEmbedForEntity( + @Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -33,4 +34,4 @@ public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn enti entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java index 655e5333cb34e..996bd3da120d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java @@ -1,12 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; @@ -15,32 +15,36 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.authorization.PoliciesConfig.Privilege; import com.linkedin.r2.RemoteInvocationException; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class GlossaryUtils { - private GlossaryUtils() { } + private GlossaryUtils() {} /** - * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the user has global control - * of their Business Glossary to create, edit, move, and delete Terms and Nodes. + * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the + * user has global control of their Business Glossary to create, edit, move, and delete Terms and + * Nodes. */ public static boolean canManageGlossaries(@Nonnull QueryContext context) { - return AuthorizationUtils.isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return AuthorizationUtils.isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } /** - * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes under a parent Node. - * They can do this with either the global MANAGE_GLOSSARIES privilege, or if they have the MANAGE_GLOSSARY_CHILDREN privilege - * on the relevant parent node in the Glossary. + * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes + * under a parent Node. They can do this with either the global MANAGE_GLOSSARIES privilege, or if + * they have the MANAGE_GLOSSARY_CHILDREN privilege on the relevant parent node in the Glossary. */ - public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, @Nonnull EntityClient entityClient) { + public static boolean canManageChildrenEntities( + @Nonnull QueryContext context, + @Nullable Urn parentNodeUrn, + @Nonnull EntityClient entityClient) { if (canManageGlossaries(context)) { return true; } @@ -48,28 +52,31 @@ public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @ return false; // if no parent node, we must rely on the canManageGlossaries method above } - //Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege - if (hasManagePrivilege(context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { + // Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege + if (hasManagePrivilege( + context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } - //Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no parent associated. + // Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no + // parent associated. Urn currentParentNodeUrn = parentNodeUrn; while (currentParentNodeUrn != null) { - if (hasManagePrivilege(context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { + if (hasManagePrivilege( + context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } currentParentNodeUrn = getParentUrn(currentParentNodeUrn, context, entityClient); } return false; - } - public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())) - )); + public static boolean hasManagePrivilege( + @Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -83,13 +90,24 @@ public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullabl * Returns the urn of the parent node for a given Glossary Term. Returns null if it doesn't exist. */ @Nullable - private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getTermParentUrn( + @Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_TERM_ENTITY_NAME, termUrn, - ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { - GlossaryTermInfo termInfo = new GlossaryTermInfo(response.getAspects() - .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + termUrn, + ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + context.getAuthentication()); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { + GlossaryTermInfo termInfo = + new GlossaryTermInfo( + response + .getAspects() + .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) + .getValue() + .data()); return termInfo.getParentNode(); } return null; @@ -102,13 +120,24 @@ private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext * Returns the urn of the parent node for a given Glossary Node. Returns null if it doesn't exist. */ @Nullable - private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getNodeParentUrn( + @Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_NODE_ENTITY_NAME, nodeUrn, - ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { - GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(response.getAspects() - .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + nodeUrn, + ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + context.getAuthentication()); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { + GlossaryNodeInfo nodeInfo = + new GlossaryNodeInfo( + response + .getAspects() + .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) + .getValue() + .data()); return nodeInfo.getParentNode(); } return null; @@ -118,17 +147,21 @@ private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext } /** - * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null otherwise. + * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null + * otherwise. */ @Nullable - public static Urn getParentUrn(@Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + public static Urn getParentUrn( + @Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { switch (urn.getEntityType()) { case Constants.GLOSSARY_TERM_ENTITY_NAME: return getTermParentUrn(urn, context, entityClient); case Constants.GLOSSARY_NODE_ENTITY_NAME: return getNodeParentUrn(urn, context, entityClient); default: - log.warn("Tried to get the parent node urn of a non-glossary entity type: {}", urn.getEntityType()); + log.warn( + "Tried to get the parent node urn of a non-glossary entity type: {}", + urn.getEntityType()); return null; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index a93c7d5b333da..8765b91f65d9d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -13,8 +17,6 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.metadata.Constants; @@ -30,53 +32,56 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming GlossaryTermService, TagService. @Slf4j public class LabelUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LabelUtils() { } + private LabelUtils() {} public static void removeTermFromResource( - Urn labelUrn, - Urn resourceUrn, - String subResource, - Urn actor, - EntityService entityService - ) { + Urn labelUrn, Urn resourceUrn, String subResource, Urn actor, EntityService entityService) { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermIfExists(terms, labelUrn); persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermIfExists(editableFieldInfo.getGlossaryTerms(), labelUrn); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } public static void removeTagsFromResources( - List<Urn> tags, - List<ResourceRefInput> resources, - Urn actor, - EntityService entityService - ) throws Exception { + List<Urn> tags, List<ResourceRefInput> resources, Urn actor, EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildRemoveTagsProposal(tags, resource, actor, entityService)); @@ -85,11 +90,8 @@ public static void removeTagsFromResources( } public static void addTagsToResources( - List<Urn> tagUrns, - List<ResourceRefInput> resources, - Urn actor, - EntityService entityService - ) throws Exception { + List<Urn> tagUrns, List<ResourceRefInput> resources, Urn actor, EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildAddTagsProposal(tagUrns, resource, actor, entityService)); @@ -98,11 +100,8 @@ public static void addTagsToResources( } public static void removeTermsFromResources( - List<Urn> termUrns, - List<ResourceRefInput> resources, - Urn actor, - EntityService entityService - ) throws Exception { + List<Urn> termUrns, List<ResourceRefInput> resources, Urn actor, EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildRemoveTermsProposal(termUrns, resource, actor, entityService)); @@ -111,11 +110,8 @@ public static void removeTermsFromResources( } public static void addTermsToResources( - List<Urn> termUrns, - List<ResourceRefInput> resources, - Urn actor, - EntityService entityService - ) throws Exception { + List<Urn> termUrns, List<ResourceRefInput> resources, Urn actor, EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildAddTermsProposal(termUrns, resource, actor, entityService)); @@ -128,12 +124,16 @@ public static void addTermsToResource( Urn resourceUrn, String subResource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -144,10 +144,15 @@ public static void addTermsToResource( persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -155,7 +160,12 @@ public static void addTermsToResource( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), labelUrns); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } @@ -181,17 +191,22 @@ private static GlossaryTermAssociationArray removeTermIfExists(GlossaryTerms ter return termArray; } - public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTags( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -201,19 +216,23 @@ public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Ur orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateTerms(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTerms( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType() - )) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -230,37 +249,56 @@ public static void validateResourceAndLabel( SubResourceType subResourceType, String labelEntityType, EntityService entityService, - Boolean isRemoving - ) { + Boolean isRemoving) { for (Urn urn : labelUrns) { - validateResourceAndLabel(urn, resourceUrn, subResource, subResourceType, labelEntityType, entityService, isRemoving); + validateResourceAndLabel( + urn, + resourceUrn, + subResource, + subResourceType, + labelEntityType, + entityService, + isRemoving); } } - public static void validateLabel(Urn labelUrn, String labelEntityType, EntityService entityService) { + public static void validateLabel( + Urn labelUrn, String labelEntityType, EntityService entityService) { if (!labelUrn.getEntityType().equals(labelEntityType)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn type does not match entity type %s..", - labelUrn, - labelEntityType)); + throw new IllegalArgumentException( + String.format( + "Failed to validate label with urn %s. Urn type does not match entity type %s..", + labelUrn, labelEntityType)); } if (!entityService.exists(labelUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); + throw new IllegalArgumentException( + String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); } } // TODO: Move this out into a separate utilities class. - public static void validateResource(Urn resourceUrn, String subResource, SubResourceType subResourceType, EntityService entityService) { + public static void validateResource( + Urn resourceUrn, + String subResource, + SubResourceType subResourceType, + EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); } if ((subResource != null && subResource.length() > 0) || subResourceType != null) { if (subResource == null || subResource.length() == 0) { - throw new IllegalArgumentException(String.format( - "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", resourceUrn, subResourceType)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", + resourceUrn, subResourceType)); } if (subResourceType == null) { - throw new IllegalArgumentException(String.format( - "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", resourceUrn, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", + resourceUrn, subResource)); } validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); } @@ -273,8 +311,7 @@ public static void validateResourceAndLabel( SubResourceType subResourceType, String labelEntityType, EntityService entityService, - Boolean isRemoving - ) { + Boolean isRemoving) { if (!isRemoving) { validateLabel(labelUrn, labelEntityType, entityService); } @@ -282,11 +319,8 @@ public static void validateResourceAndLabel( } private static MetadataChangeProposal buildAddTagsProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity return buildAddTagsToEntityProposal(tagUrns, resource, actor, entityService); @@ -297,11 +331,8 @@ private static MetadataChangeProposal buildAddTagsProposal( } private static MetadataChangeProposal buildRemoveTagsProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity return buildRemoveTagsToEntityProposal(tagUrns, resource, actor, entityService); @@ -312,82 +343,90 @@ private static MetadataChangeProposal buildRemoveTagsProposal( } private static MetadataChangeProposal buildRemoveTagsToEntityProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } removeTagsIfExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - entityService, - new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } removeTagsIfExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildAddTagsToEntityProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } addTagsIfNotExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildAddTagsToSubResourceProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } addTagsIfNotExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } - private static void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) throws URISyntaxException { + private static void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) + throws URISyntaxException { if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } @@ -396,7 +435,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) throw List<Urn> tagsToAdd = new ArrayList<>(); for (Urn tagUrn : tagUrns) { - if (tagAssociationArray.stream().anyMatch(association -> association.getTag().equals(tagUrn))) { + if (tagAssociationArray.stream() + .anyMatch(association -> association.getTag().equals(tagUrn))) { continue; } tagsToAdd.add(tagUrn); @@ -415,11 +455,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) throw } private static MetadataChangeProposal buildAddTermsProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding terms to a top-level entity return buildAddTermsToEntityProposal(termUrns, resource, actor, entityService); @@ -430,11 +467,8 @@ private static MetadataChangeProposal buildAddTermsProposal( } private static MetadataChangeProposal buildRemoveTermsProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Removing terms from a top-level entity return buildRemoveTermsToEntityProposal(termUrns, resource, actor, entityService); @@ -445,14 +479,15 @@ private static MetadataChangeProposal buildRemoveTermsProposal( } private static MetadataChangeProposal buildAddTermsToEntityProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -460,20 +495,23 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal( } addTermsIfNotExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildAddTermsToSubResourceProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -481,42 +519,48 @@ private static MetadataChangeProposal buildAddTermsToSubResourceProposal( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildRemoveTermsToEntityProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermsIfExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildRemoveTermsToSubResourceProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermsIfExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static void addTermsIfNotExists(GlossaryTerms terms, List<Urn> termUrns) @@ -547,7 +591,8 @@ private static void addTermsIfNotExists(GlossaryTerms terms, List<Urn> termUrns) } } - private static GlossaryTermAssociationArray removeTermsIfExists(GlossaryTerms terms, List<Urn> termUrns) { + private static GlossaryTermAssociationArray removeTermsIfExists( + GlossaryTerms terms, List<Urn> termUrns) { if (!terms.hasTerms()) { terms.setTerms(new GlossaryTermAssociationArray()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java index 9ec0f9b8e6070..b93c72edbcfc5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; @@ -9,59 +12,59 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; - import com.linkedin.metadata.entity.EntityUtils; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class LinkUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LinkUtils() { } + private LinkUtils() {} public static void addLink( - String linkUrl, - String linkLabel, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + String linkUrl, String linkLabel, Urn resourceUrn, Urn actor, EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); addLink(institutionalMemoryAspect, linkUrl, linkLabel, actor); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } public static void removeLink( - String linkUrl, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); removeLink(institutionalMemoryAspect, linkUrl); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } - private static void addLink(InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { + private static void addLink( + InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { if (!institutionalMemoryAspect.hasElements()) { institutionalMemoryAspect.setElements(new InstitutionalMemoryMetadataArray()); } @@ -90,10 +93,12 @@ private static void removeLink(InstitutionalMemory institutionalMemoryAspect, St } public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -104,21 +109,22 @@ public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, U } public static Boolean validateAddRemoveInput( - String linkUrl, - Urn resourceUrn, - EntityService entityService - ) { + String linkUrl, Urn resourceUrn, EntityService entityService) { try { new Url(linkUrl); } catch (Exception e) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Expected a corp group urn.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Expected a corp group urn.", + resourceUrn)); } if (!entityService.exists(resourceUrn)) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Resource does not exist.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Resource does not exist.", + resourceUrn)); } return true; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 7233995804423..15c3c14c7b8f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -28,104 +30,124 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming from OwnerService @Slf4j public class OwnerUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static final String SYSTEM_ID = "__system__"; - private OwnerUtils() { } + private OwnerUtils() {} public static void addOwnersToResources( List<OwnerInput> owners, List<ResourceRefInput> resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildAddOwnersProposal(owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); + changes.add( + buildAddOwnersProposal( + owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } public static void removeOwnersFromResources( - List<Urn> ownerUrns, Optional<Urn> maybeOwnershipTypeUrn, List<ResourceRefInput> resources, + List<Urn> ownerUrns, + Optional<Urn> maybeOwnershipTypeUrn, + List<ResourceRefInput> resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildRemoveOwnersProposal(ownerUrns, maybeOwnershipTypeUrn, UrnUtils.getUrn(resource.getResourceUrn()), - actor, entityService)); + changes.add( + buildRemoveOwnersProposal( + ownerUrns, + maybeOwnershipTypeUrn, + UrnUtils.getUrn(resource.getResourceUrn()), + actor, + entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } - - static MetadataChangeProposal buildAddOwnersProposal(List<OwnerInput> owners, Urn resourceUrn, EntityService entityService) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, entityService, - new Ownership()); + static MetadataChangeProposal buildAddOwnersProposal( + List<OwnerInput> owners, Urn resourceUrn, EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); for (OwnerInput input : owners) { - addOwner(ownershipAspect, UrnUtils.getUrn(input.getOwnerUrn()), input.getType(), UrnUtils.getUrn(input.getOwnershipTypeUrn())); + addOwner( + ownershipAspect, + UrnUtils.getUrn(input.getOwnerUrn()), + input.getType(), + UrnUtils.getUrn(input.getOwnershipTypeUrn())); } - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } public static MetadataChangeProposal buildRemoveOwnersProposal( - List<Urn> ownerUrns, Optional<Urn> maybeOwnershipTypeUrn, Urn resourceUrn, + List<Urn> ownerUrns, + Optional<Urn> maybeOwnershipTypeUrn, + Urn resourceUrn, Urn actor, - EntityService entityService - ) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, - entityService, - new Ownership()); + EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); ownershipAspect.setLastModified(EntityUtils.getAuditStamp(actor)); removeOwnersIfExists(ownershipAspect, ownerUrns, maybeOwnershipTypeUrn); - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } - private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { + private static void addOwner( + Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { if (!ownershipAspect.hasOwners()) { ownershipAspect.setOwners(new OwnerArray()); } final OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()); - ownerArray.removeIf(owner -> { - // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) + ownerArray.removeIf( + owner -> { + // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } + // Owner is not what we are looking for + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(ownershipUrn); - } + // Check custom entity type urn if exists + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(ownershipUrn); + } - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(ownershipUrn.toString()); - }); + // Fall back to mapping deprecated type to the new ownership entity, if it matches remove + return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(ownershipUrn.toString()); + }); Owner newOwner = new Owner(); // For backwards compatibility we have to always set the deprecated type. // If the type exists we assume it's an old ownership type that we can map to. // Else if it's a net new custom ownership type set old type to CUSTOM. - com.linkedin.common.OwnershipType gmsType = type != null ? com.linkedin.common.OwnershipType.valueOf(type.toString()) - : com.linkedin.common.OwnershipType.CUSTOM; + com.linkedin.common.OwnershipType gmsType = + type != null + ? com.linkedin.common.OwnershipType.valueOf(type.toString()) + : com.linkedin.common.OwnershipType.CUSTOM; newOwner.setType(gmsType); newOwner.setTypeUrn(ownershipUrn); @@ -135,8 +157,8 @@ private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipT ownershipAspect.setOwners(ownerArray); } - private static void removeOwnersIfExists(Ownership ownership, List<Urn> ownerUrns, - Optional<Urn> maybeOwnershipTypeUrn) { + private static void removeOwnersIfExists( + Ownership ownership, List<Urn> ownerUrns, Optional<Urn> maybeOwnershipTypeUrn) { if (!ownership.hasOwners()) { ownership.setOwners(new OwnerArray()); } @@ -144,23 +166,26 @@ private static void removeOwnersIfExists(Ownership ownership, List<Urn> ownerUrn OwnerArray ownerArray = ownership.getOwners(); for (Urn ownerUrn : ownerUrns) { if (maybeOwnershipTypeUrn.isPresent()) { - ownerArray.removeIf(owner -> { - // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) - - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } - - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); - } - - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(maybeOwnershipTypeUrn.get().toString()); - }); + ownerArray.removeIf( + owner -> { + // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) + + // Owner is not what we are looking for + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } + + // Check custom entity type urn if exists + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); + } + + // Fall back to mapping deprecated type to the new ownership entity, if it matches + // remove + return mapOwnershipTypeToEntity( + OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(maybeOwnershipTypeUrn.get().toString()); + }); } else { ownerArray.removeIf(owner -> owner.getOwner().equals(ownerUrn)); } @@ -168,10 +193,12 @@ private static void removeOwnersIfExists(Ownership ownership, List<Urn> ownerUrn } public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -182,10 +209,7 @@ public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, } public static Boolean validateAddOwnerInput( - List<OwnerInput> owners, - Urn resourceUrn, - EntityService entityService - ) { + List<OwnerInput> owners, Urn resourceUrn, EntityService entityService) { for (OwnerInput owner : owners) { boolean result = validateAddOwnerInput(owner, resourceUrn, entityService); if (!result) { @@ -196,13 +220,12 @@ public static Boolean validateAddOwnerInput( } public static Boolean validateAddOwnerInput( - OwnerInput owner, - Urn resourceUrn, - EntityService entityService - ) { + OwnerInput owner, Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } validateOwner(owner, entityService); @@ -210,45 +233,55 @@ public static Boolean validateAddOwnerInput( return true; } - public static void validateOwner( - OwnerInput owner, - EntityService entityService - ) { + public static void validateOwner(OwnerInput owner, EntityService entityService) { OwnerEntityType ownerEntityType = owner.getOwnerEntityType(); Urn ownerUrn = UrnUtils.getUrn(owner.getOwnerUrn()); - if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) + && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp group urn, found %s", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp group urn, found %s", + ownerUrn)); } - if (OwnerEntityType.CORP_USER.equals(ownerEntityType) && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_USER.equals(ownerEntityType) + && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp user urn, found %s.", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp user urn, found %s.", + ownerUrn)); } if (!entityService.exists(ownerUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Owner with urn %s does not exist.", ownerUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Owner with urn %s does not exist.", + ownerUrn)); } - if (owner.getOwnershipTypeUrn() != null && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Custom Ownership type with " - + "urn %s does not exist.", owner.getOwnershipTypeUrn())); + if (owner.getOwnershipTypeUrn() != null + && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Custom Ownership type with " + + "urn %s does not exist.", + owner.getOwnershipTypeUrn())); } if (owner.getType() == null && owner.getOwnershipTypeUrn() == null) { - throw new IllegalArgumentException("Failed to change ownership for resource(s). Expected either " - + "type or ownershipTypeUrn to be specified."); + throw new IllegalArgumentException( + "Failed to change ownership for resource(s). Expected either " + + "type or ownershipTypeUrn to be specified."); } } - public static Boolean validateRemoveInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateRemoveInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } return true; } @@ -264,15 +297,17 @@ public static void addCreatorAsOwner( String ownershipTypeUrn = mapOwnershipTypeToEntity(ownershipType.name()); if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn))) { - throw new RuntimeException(String.format("Unknown ownership type urn %s", ownershipTypeUrn)); + throw new RuntimeException( + String.format("Unknown ownership type urn %s", ownershipTypeUrn)); } addOwnersToResources( - ImmutableList.of(new OwnerInput(actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), + ImmutableList.of( + new OwnerInput( + actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), ImmutableList.of(new ResourceRefInput(urn, null, null)), actorUrn, - entityService - ); + entityService); } catch (Exception e) { log.error(String.format("Failed to add creator as owner of tag %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java index f740836694dbe..0dd737d3b2292 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java @@ -1,32 +1,35 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; + import com.linkedin.common.Siblings; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import javax.annotation.Nonnull; public class SiblingsUtils { - private SiblingsUtils() { } + private SiblingsUtils() {} - public static List<Urn> getSiblingUrns(@Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { - final Siblings siblingAspectOfEntity = (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); + public static List<Urn> getSiblingUrns( + @Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { + final Siblings siblingAspectOfEntity = + (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); if (siblingAspectOfEntity != null && siblingAspectOfEntity.hasSiblings()) { return siblingAspectOfEntity.getSiblings(); } return new ArrayList<>(); } - public static Optional<Urn> getNextSiblingUrn(@Nonnull final List<Urn> siblingUrns, @Nonnull final HashSet<Urn> usedUrns) { - final List<Urn> unusedSiblingUrns = siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); + public static Optional<Urn> getNextSiblingUrn( + @Nonnull final List<Urn> siblingUrns, @Nonnull final HashSet<Urn> usedUrns) { + final List<Urn> unusedSiblingUrns = + siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); return unusedSiblingUrns.stream().findFirst(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java index c0fe697c6654c..abc479ed18ebf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -10,8 +17,6 @@ import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -30,22 +35,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for reporting Asset Operations - */ +/** Resolver used for reporting Asset Operations */ @Slf4j @RequiredArgsConstructor public class ReportOperationResolver implements DataFetcher<CompletableFuture<Boolean>> { - private static final List<String> SUPPORTED_ENTITY_TYPES = ImmutableList.of( - DATASET_ENTITY_NAME - ); + private static final List<String> SUPPORTED_ENTITY_TYPES = ImmutableList.of(DATASET_ENTITY_NAME); private final EntityClient _entityClient; @@ -53,32 +48,36 @@ public class ReportOperationResolver implements DataFetcher<CompletableFuture<Bo public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final ReportOperationInput input = bindArgument(environment.getArgument("input"), ReportOperationInput.class); - - return CompletableFuture.supplyAsync(() -> { - - Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - - if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - validateInput(entityUrn, input); - - try { - // Create an MCP to emit the operation - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, OPERATION_ASPECT_NAME, - mapOperation(input, context)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to report operation. {}", e.getMessage()); - throw new RuntimeException("Failed to report operation", e); - } - }); + final ReportOperationInput input = + bindArgument(environment.getArgument("input"), ReportOperationInput.class); + + return CompletableFuture.supplyAsync( + () -> { + Urn entityUrn = UrnUtils.getUrn(input.getUrn()); + + if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + validateInput(entityUrn, input); + + try { + // Create an MCP to emit the operation + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + entityUrn, OPERATION_ASPECT_NAME, mapOperation(input, context)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error("Failed to report operation. {}", e.getMessage()); + throw new RuntimeException("Failed to report operation", e); + } + }); } - private Operation mapOperation(final ReportOperationInput input, final QueryContext context) throws URISyntaxException { + private Operation mapOperation(final ReportOperationInput input, final QueryContext context) + throws URISyntaxException { final Operation result = new Operation(); result.setActor(UrnUtils.getUrn(context.getActorUrn())); @@ -86,13 +85,17 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont result.setCustomOperationType(input.getCustomOperationType(), SetMode.IGNORE_NULL); result.setNumAffectedRows(input.getNumAffectedRows(), SetMode.IGNORE_NULL); - long timestampMillis = input.getTimestampMillis() != null ? input.getTimestampMillis() : System.currentTimeMillis(); + long timestampMillis = + input.getTimestampMillis() != null + ? input.getTimestampMillis() + : System.currentTimeMillis(); result.setLastUpdatedTimestamp(timestampMillis); result.setTimestampMillis(timestampMillis); result.setSourceType(OperationSourceType.valueOf(input.getSourceType().toString())); if (input.getPartition() != null) { - result.setPartitionSpec(new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); + result.setPartitionSpec( + new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); } if (input.getCustomProperties() != null) { @@ -102,7 +105,8 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont return result; } - private StringMap mapCustomProperties(final List<StringMapEntryInput> properties) throws URISyntaxException { + private StringMap mapCustomProperties(final List<StringMapEntryInput> properties) + throws URISyntaxException { final StringMap result = new StringMap(); for (StringMapEntryInput entry : properties) { result.put(entry.getKey(), entry.getValue()); @@ -113,16 +117,21 @@ private StringMap mapCustomProperties(final List<StringMapEntryInput> properties private void validateInput(final Urn entityUrn, final ReportOperationInput input) { if (!SUPPORTED_ENTITY_TYPES.contains(entityUrn.getEntityType())) { throw new DataHubGraphQLException( - String.format("Unable to report operation. Invalid entity type %s provided.", entityUrn.getEntityType()), + String.format( + "Unable to report operation. Invalid entity type %s provided.", + entityUrn.getEntityType()), DataHubGraphQLErrorCode.BAD_REQUEST); } } - private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToReportOperationForResource( + final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -131,4 +140,4 @@ private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, resourceUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java index 4cfe58072aae9..a0cffa5eca44c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.metadata.service.OwnershipTypeService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -16,17 +18,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class CreateOwnershipTypeResolver implements DataFetcher<CompletableFuture<OwnershipTypeEntity>> { +public class CreateOwnershipTypeResolver + implements DataFetcher<CompletableFuture<OwnershipTypeEntity>> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateOwnershipTypeInput input = bindArgument(environment.getArgument("input"), CreateOwnershipTypeInput.class); @@ -36,19 +37,25 @@ public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = _ownershipTypeService.createOwnershipType(input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - return createOwnershipType(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn urn = + _ownershipTypeService.createOwnershipType( + input.getName(), + input.getDescription(), + context.getAuthentication(), + System.currentTimeMillis()); + return createOwnershipType(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } - private OwnershipTypeEntity createOwnershipType(@Nonnull final Urn urn, - @Nonnull final CreateOwnershipTypeInput input) { + private OwnershipTypeEntity createOwnershipType( + @Nonnull final Urn urn, @Nonnull final CreateOwnershipTypeInput input) { return OwnershipTypeEntity.builder() .setUrn(urn.toString()) .setType(EntityType.CUSTOM_OWNERSHIP_TYPE) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java index 87cf70193d7fd..c5bb58a7d4b2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java @@ -12,7 +12,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteOwnershipTypeResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,21 +25,26 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw final Urn urn = UrnUtils.getUrn(ownershipTypeUrn); // By default, delete references final boolean deleteReferences = - environment.getArgument("deleteReferences") == null ? true : environment.getArgument("deleteReferences"); + environment.getArgument("deleteReferences") == null + ? true + : environment.getArgument("deleteReferences"); if (!AuthorizationUtils.canManageOwnershipTypes(context)) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.deleteOwnershipType(urn, deleteReferences, context.getAuthentication()); - log.info(String.format("Successfully deleted ownership type %s with urn", urn)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _ownershipTypeService.deleteOwnershipType( + urn, deleteReferences, context.getAuthentication()); + log.info(String.format("Successfully deleted ownership type %s with urn", urn)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java index 70441815f0a74..1c8f43a490173 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesResult; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.SearchFlags; @@ -24,18 +26,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListOwnershipTypesResolver implements - DataFetcher<CompletableFuture<ListOwnershipTypesResult>> { +public class ListOwnershipTypesResolver + implements DataFetcher<CompletableFuture<ListOwnershipTypesResult>> { private static final String CREATED_AT_FIELD = "createdAt"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,43 +42,47 @@ public class ListOwnershipTypesResolver implements private final EntityClient _entityClient; @Override - public CompletableFuture<ListOwnershipTypesResult> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListOwnershipTypesResult> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListOwnershipTypesInput input = bindArgument(environment.getArgument("input"), - ListOwnershipTypesInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List<FacetFilterInput> filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - + final ListOwnershipTypesInput input = + bindArgument(environment.getArgument("input"), ListOwnershipTypesInput.class); - try { + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final List<FacetFilterInput> filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - final SearchResult gmsResult = _entityClient.search( - Constants.OWNERSHIP_TYPE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { - final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setOwnershipTypes(mapUnresolvedOwnershipTypes(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list custom ownership types", e); - } + final SearchResult gmsResult = + _entityClient.search( + Constants.OWNERSHIP_TYPE_ENTITY_NAME, + query, + buildFilter(filters, Collections.emptyList()), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - }); + final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setOwnershipTypes( + mapUnresolvedOwnershipTypes( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list custom ownership types", e); + } + }); } private List<OwnershipTypeEntity> mapUnresolvedOwnershipTypes(List<Urn> entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java index 43fd249304397..839121a295d9a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -17,17 +19,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class UpdateOwnershipTypeResolver implements DataFetcher<CompletableFuture<OwnershipTypeEntity>> { +public class UpdateOwnershipTypeResolver + implements DataFetcher<CompletableFuture<OwnershipTypeEntity>> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final UpdateOwnershipTypeInput input = @@ -39,27 +40,35 @@ public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.updateOwnershipType(urn, input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); - return getOwnershipType(urn, context.getAuthentication()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _ownershipTypeService.updateOwnershipType( + urn, + input.getName(), + input.getDescription(), + context.getAuthentication(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); + return getOwnershipType(urn, context.getAuthentication()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }); } - private OwnershipTypeEntity getOwnershipType(@Nonnull final Urn urn, - @Nonnull final Authentication authentication) { - final EntityResponse maybeResponse = _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); + private OwnershipTypeEntity getOwnershipType( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) { + final EntityResponse maybeResponse = + _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", + String.format( + "Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", urn)); } return OwnershipTypeMapper.map(maybeResponse); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java index 485d40e60547e..567745b894ca9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java @@ -9,10 +9,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Resolver responsible for hard deleting a particular DataHub access control policy. - */ +/** Resolver responsible for hard deleting a particular DataHub access control policy. */ public class DeletePolicyResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; @@ -27,18 +24,24 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final String policyUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(policyUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return policyUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against policy with urn %s", policyUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return policyUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against policy with urn %s", policyUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java index 11f7793db82c8..3328eff2bdf45 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.DataHubAuthorizer; import com.datahub.authorization.EntitySpec; @@ -14,17 +16,15 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - /** - * Resolver to support the getGrantedPrivileges end point - * Fetches all privileges that are granted for the given actor for the given resource (optional) + * Resolver to support the getGrantedPrivileges end point Fetches all privileges that are granted + * for the given actor for the given resource (optional) */ public class GetGrantedPrivilegesResolver implements DataFetcher<CompletableFuture<Privileges>> { @Override - public CompletableFuture<Privileges> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Privileges> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final GetGrantedPrivilegesInput input = @@ -33,22 +33,27 @@ public CompletableFuture<Privileges> get(final DataFetchingEnvironment environme if (!isAuthorized(context, actor)) { throw new AuthorizationException("Unauthorized to get privileges for the given author."); } - final Optional<EntitySpec> resourceSpec = Optional.ofNullable(input.getResourceSpec()) - .map(spec -> new EntitySpec(EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); + final Optional<EntitySpec> resourceSpec = + Optional.ofNullable(input.getResourceSpec()) + .map( + spec -> + new EntitySpec( + EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); if (context.getAuthorizer() instanceof AuthorizerChain) { - DataHubAuthorizer dataHubAuthorizer = ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); + DataHubAuthorizer dataHubAuthorizer = + ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); List<String> privileges = dataHubAuthorizer.getGrantedPrivileges(actor, resourceSpec); - return CompletableFuture.supplyAsync(() -> Privileges.builder() - .setPrivileges(privileges) - .build()); + return CompletableFuture.supplyAsync( + () -> Privileges.builder().setPrivileges(privileges).build()); } throw new UnsupportedOperationException( - String.format("GetGrantedPrivileges function is not supported on authorizer of type %s", + String.format( + "GetGrantedPrivileges function is not supported on authorizer of type %s", context.getAuthorizer().getClass().getSimpleName())); } private boolean isAuthorized(final QueryContext context, final String actor) { return actor.equals(context.getActorUrn()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index b44da1c2f832c..87832b8c3aa40 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.PolicyFetcher; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -14,9 +16,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - public class ListPoliciesResolver implements DataFetcher<CompletableFuture<ListPoliciesResult>> { private static final Integer DEFAULT_START = 0; @@ -30,18 +29,22 @@ public ListPoliciesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListPoliciesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListPoliciesResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (PolicyAuthUtils.canManagePolicies(context)) { - final ListPoliciesInput input = bindArgument(environment.getArgument("input"), ListPoliciesInput.class); + final ListPoliciesInput input = + bindArgument(environment.getArgument("input"), ListPoliciesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return _policyFetcher.fetchPolicies(start, query, count, context.getAuthentication()) - .thenApply(policyFetchResult -> { + return _policyFetcher + .fetchPolicies(start, query, count, context.getAuthentication()) + .thenApply( + policyFetchResult -> { final ListPoliciesResult result = new ListPoliciesResult(); result.setStart(start); result.setCount(count); @@ -50,14 +53,18 @@ public CompletableFuture<ListPoliciesResult> get(final DataFetchingEnvironment e return result; }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List<Policy> mapEntities(final List<PolicyFetcher.Policy> policies) { - return policies.stream().map(policy -> { - Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); - mappedPolicy.setUrn(policy.getUrn().toString()); - return mappedPolicy; - }).collect(Collectors.toList()); + return policies.stream() + .map( + policy -> { + Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); + mappedPolicy.setUrn(policy.getUrn().toString()); + return mappedPolicy; + }) + .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java index dcc5d1fd23302..d0446d218dac6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; + import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class PolicyAuthUtils { static boolean canManagePolicies(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, + ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), + authorizer); } - private PolicyAuthUtils() { } + private PolicyAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java index 6dcc143a1a3af..dcdf78ebc15bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.AuthorizerChain; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,10 +19,6 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - public class UpsertPolicyResolver implements DataFetcher<CompletableFuture<String>> { private static final String POLICY_ENTITY_NAME = "dataHubPolicy"; @@ -38,7 +37,8 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final Optional<String> policyUrn = Optional.ofNullable(environment.getArgument("urn")); - final PolicyUpdateInput input = bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); + final PolicyUpdateInput input = + bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal; @@ -48,7 +48,9 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (policyUrn.isPresent()) { // Update existing policy - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); } else { // Create new policy // Since we are creating a new Policy, we need to generate a unique UUID. @@ -58,21 +60,29 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) // Create the Policy key. final DataHubPolicyKey key = new DataHubPolicyKey(); key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithKey( + key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); } - return CompletableFuture.supplyAsync(() -> { - try { - String urn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return urn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + String urn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return urn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java index b9a6bf07be8c8..a350fb91f9d3b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.Policy; import com.linkedin.datahub.graphql.generated.PolicyMatchCondition; import com.linkedin.datahub.graphql.generated.PolicyMatchCriterion; @@ -9,7 +10,6 @@ import com.linkedin.datahub.graphql.generated.PolicyMatchFilter; import com.linkedin.datahub.graphql.generated.PolicyState; import com.linkedin.datahub.graphql.generated.PolicyType; -import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.ResourceFilter; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -20,9 +20,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** - * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link com.linkedin.datahub.graphql.generated.Policy}. + * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link + * com.linkedin.datahub.graphql.generated.Policy}. */ public class PolicyInfoPolicyMapper implements ModelMapper<DataHubPolicyInfo, Policy> { @@ -56,16 +56,20 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { result.setResourceOwners(actorFilter.isResourceOwners()); UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } @@ -87,14 +91,20 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(this::mapValue) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } @@ -102,7 +112,10 @@ private PolicyMatchCriterionValue mapValue(final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java index cb323b60dd465..d82d71295d41b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java @@ -19,11 +19,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. - */ -public class PolicyUpdateInputInfoMapper implements ModelMapper<PolicyUpdateInput, DataHubPolicyInfo> { +/** Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. */ +public class PolicyUpdateInputInfoMapper + implements ModelMapper<PolicyUpdateInput, DataHubPolicyInfo> { public static final PolicyUpdateInputInfoMapper INSTANCE = new PolicyUpdateInputInfoMapper(); @@ -52,13 +50,21 @@ private DataHubActorFilter mapActors(final ActorFilterInput actorInput) { result.setAllUsers(actorInput.getAllUsers()); result.setResourceOwners(actorInput.getResourceOwners()); if (actorInput.getResourceOwnersTypes() != null) { - result.setResourceOwnersTypes(new UrnArray(actorInput.getResourceOwnersTypes().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setResourceOwnersTypes( + new UrnArray( + actorInput.getResourceOwnersTypes().stream() + .map(this::createUrn) + .collect(Collectors.toList()))); } if (actorInput.getGroups() != null) { - result.setGroups(new UrnArray(actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setGroups( + new UrnArray( + actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); } if (actorInput.getUsers() != null) { - result.setUsers(new UrnArray(actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setUsers( + new UrnArray( + actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); } return result; } @@ -83,19 +89,26 @@ private DataHubResourceFilter mapResources(final ResourceFilterInput resourceInp } private PolicyMatchFilter mapFilter(final PolicyMatchFilterInput filter) { - return new PolicyMatchFilter().setCriteria(new PolicyMatchCriterionArray(filter.getCriteria() - .stream() - .map(criterion -> new PolicyMatchCriterion().setField(criterion.getField()) - .setValues(new StringArray(criterion.getValues())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name()))) - .collect(Collectors.toList()))); + return new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + filter.getCriteria().stream() + .map( + criterion -> + new PolicyMatchCriterion() + .setField(criterion.getField()) + .setValues(new StringArray(criterion.getValues())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name()))) + .collect(Collectors.toList()))); } private Urn createUrn(String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urnStr %s into an URN object", urnStr), e); + throw new RuntimeException( + String.format("Failed to convert urnStr %s into an URN object", urnStr), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java index 524caf14e9afe..8e0ee335e09f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -18,16 +20,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreatePostResolver implements DataFetcher<CompletableFuture<Boolean>> { private final PostService _postService; @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canCreateGlobalAnnouncements(context)) { @@ -35,7 +35,8 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) "Unauthorized to create posts. Please contact your DataHub administrator if this needs corrective action."); } - final CreatePostInput input = bindArgument(environment.getArgument("input"), CreatePostInput.class); + final CreatePostInput input = + bindArgument(environment.getArgument("input"), CreatePostInput.class); final PostType type = input.getPostType(); final UpdatePostContentInput content = input.getContent(); final PostContentType contentType = content.getContentType(); @@ -45,16 +46,21 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) final UpdateMediaInput updateMediaInput = content.getMedia(); final Authentication authentication = context.getAuthentication(); - Media media = updateMediaInput == null ? null - : _postService.mapMedia(updateMediaInput.getType().toString(), updateMediaInput.getLocation()); - PostContent postContent = _postService.mapPostContent(contentType.toString(), title, description, link, media); - - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.createPost(type.toString(), postContent, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + Media media = + updateMediaInput == null + ? null + : _postService.mapMedia( + updateMediaInput.getType().toString(), updateMediaInput.getLocation()); + PostContent postContent = + _postService.mapPostContent(contentType.toString(), title, description, link, media); + + return CompletableFuture.supplyAsync( + () -> { + try { + return _postService.createPost(type.toString(), postContent, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java index d3cd0126fb852..7ab5d1381a1b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java @@ -13,14 +13,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeletePostResolver implements DataFetcher<CompletableFuture<Boolean>> { private final PostService _postService; @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canManageGlobalAnnouncements(context)) { @@ -31,12 +31,13 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) final Urn postUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.deletePost(postUrn, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _postService.deletePost(postUrn, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 59f2b458fdc90..5292adbe3aac3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -22,10 +25,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListPostsResolver implements DataFetcher<CompletableFuture<ListPostsResult>> { @@ -36,38 +35,58 @@ public class ListPostsResolver implements DataFetcher<CompletableFuture<ListPost private final EntityClient _entityClient; @Override - public CompletableFuture<ListPostsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListPostsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final ListPostsInput input = bindArgument(environment.getArgument("input"), ListPostsInput.class); + final ListPostsInput input = + bindArgument(environment.getArgument("input"), ListPostsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(LAST_MODIFIED_FIELD_NAME).setOrder(SortOrder.DESCENDING); + return CompletableFuture.supplyAsync( + () -> { + try { + final SortCriterion sortCriterion = + new SortCriterion() + .setField(LAST_MODIFIED_FIELD_NAME) + .setOrder(SortOrder.DESCENDING); - // First, get all Post Urns. - final SearchResult gmsResult = _entityClient.search(POST_ENTITY_NAME, query, null, sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + // First, get all Post Urns. + final SearchResult gmsResult = + _entityClient.search( + POST_ENTITY_NAME, + query, + null, + sortCriterion, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get and hydrate all Posts. - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2(POST_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, authentication); + // Then, get and hydrate all Posts. + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + POST_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + authentication); - final ListPostsResult result = new ListPostsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setPosts(entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list posts", e); - } - }); + final ListPostsResult result = new ListPostsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setPosts( + entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list posts", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java index 27de443bc100a..48f31fb75d371 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -7,8 +9,8 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateQueryInput; -import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.CreateQuerySubjectInput; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.types.query.QueryMapper; import com.linkedin.metadata.service.QueryService; import com.linkedin.query.QueryLanguage; @@ -22,9 +24,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateQueryResolver implements DataFetcher<CompletableFuture<QueryEntity>> { @@ -32,40 +31,49 @@ public class CreateQueryResolver implements DataFetcher<CompletableFuture<QueryE private final QueryService _queryService; @Override - public CompletableFuture<QueryEntity> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<QueryEntity> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateQueryInput input = bindArgument(environment.getArgument("input"), CreateQueryInput.class); + final CreateQueryInput input = + bindArgument(environment.getArgument("input"), CreateQueryInput.class); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!AuthorizationUtils.canCreateQuery(input.getSubjects() - .stream() - .map(CreateQuerySubjectInput::getDatasetUrn).map(UrnUtils::getUrn) - .collect(Collectors.toList()), context)) { - throw new AuthorizationException( - "Unauthorized to create Query. Please contact your DataHub administrator for more information."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateQuery( + input.getSubjects().stream() + .map(CreateQuerySubjectInput::getDatasetUrn) + .map(UrnUtils::getUrn) + .collect(Collectors.toList()), + context)) { + throw new AuthorizationException( + "Unauthorized to create Query. Please contact your DataHub administrator for more information."); + } - try { - final Urn queryUrn = _queryService.createQuery( - input.getProperties().getName(), - input.getProperties().getDescription(), - QuerySource.MANUAL, - new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())), - input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()), - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new Query from input %s", input), e); - } - }); + try { + final Urn queryUrn = + _queryService.createQuery( + input.getProperties().getName(), + input.getProperties().getDescription(), + QuerySource.MANUAL, + new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())), + input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()), + authentication, + System.currentTimeMillis()); + return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new Query from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java index 5c5bb288f32bf..4f5887c91b494 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java @@ -18,7 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteQueryResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,29 +25,34 @@ public class DeleteQueryResolver implements DataFetcher<CompletableFuture<Boolea private final QueryService _queryService; @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); - final List<Urn> subjectUrns = existingSubjects != null - ? existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()) - : Collections.emptyList(); - - if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); - } - - try { - _queryService.deleteQuery(queryUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Query", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(queryUrn, authentication); + final List<Urn> subjectUrns = + existingSubjects != null + ? existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()) + : Collections.emptyList(); + + if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); + } + + try { + _queryService.deleteQuery(queryUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Query", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java index c7e70cac15bdb..fec5bb120eeba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -29,10 +32,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListQueriesResolver implements DataFetcher<CompletableFuture<ListQueriesResult>> { @@ -48,38 +47,52 @@ public class ListQueriesResolver implements DataFetcher<CompletableFuture<ListQu private final EntityClient _entityClient; @Override - public CompletableFuture<ListQueriesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListQueriesResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListQueriesInput input = bindArgument(environment.getArgument("input"), ListQueriesInput.class); + final ListQueriesInput input = + bindArgument(environment.getArgument("input"), ListQueriesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); - - // First, get all Query Urns. - final SearchResult gmsResult = _entityClient.search(QUERY_ENTITY_NAME, query, buildFilters(input), sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true).setSkipHighlighting(true)); - - final ListQueriesResult result = new ListQueriesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setQueries(mapUnresolvedQueries(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Queries", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final SortCriterion sortCriterion = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); + + // First, get all Query Urns. + final SearchResult gmsResult = + _entityClient.search( + QUERY_ENTITY_NAME, + query, + buildFilters(input), + sortCriterion, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true).setSkipHighlighting(true)); + + final ListQueriesResult result = new ListQueriesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setQueries( + mapUnresolvedQueries( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Queries", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial Query objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Query objects which will be + // resolved be a separate Batch resolver. private List<QueryEntity> mapUnresolvedQueries(final List<Urn> queryUrns) { final List<QueryEntity> results = new ArrayList<>(); for (final Urn urn : queryUrns) { @@ -99,13 +112,23 @@ private Filter buildFilters(@Nonnull final ListQueriesInput input) { // Optionally add a source filter. if (input.getSource() != null) { andConditions.add( - new FacetFilterInput(QUERY_SOURCE_FIELD, null, ImmutableList.of(input.getSource().toString()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_SOURCE_FIELD, + null, + ImmutableList.of(input.getSource().toString()), + false, + FilterOperator.EQUAL)); } // Optionally add an entity type filter. if (input.getDatasetUrn() != null) { andConditions.add( - new FacetFilterInput(QUERY_ENTITIES_FIELD, null, ImmutableList.of(input.getDatasetUrn()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_ENTITIES_FIELD, + null, + ImmutableList.of(input.getDatasetUrn()), + false, + FilterOperator.EQUAL)); } criteria.setAnd(andConditions); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java index ef34e91d8fe77..cc284aaf7b563 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -26,9 +28,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class UpdateQueryResolver implements DataFetcher<CompletableFuture<QueryEntity>> { @@ -36,60 +35,72 @@ public class UpdateQueryResolver implements DataFetcher<CompletableFuture<QueryE private final QueryService _queryService; @Override - public CompletableFuture<QueryEntity> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<QueryEntity> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateQueryInput input = bindArgument(environment.getArgument("input"), UpdateQueryInput.class); + final UpdateQueryInput input = + bindArgument(environment.getArgument("input"), UpdateQueryInput.class); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); + return CompletableFuture.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(queryUrn, authentication); - if (existingSubjects == null) { - // No Query Found - throw new DataHubGraphQLException(String.format("Failed to find query with urn %s", queryUrn), DataHubGraphQLErrorCode.NOT_FOUND); - } + if (existingSubjects == null) { + // No Query Found + throw new DataHubGraphQLException( + String.format("Failed to find query with urn %s", queryUrn), + DataHubGraphQLErrorCode.NOT_FOUND); + } - final List<Urn> subjectUrns = existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()); - final List<Urn> newSubjectUrns = input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) - .collect(Collectors.toList()) - : Collections.emptyList(); - final List<Urn> impactedSubjectUrns = new ArrayList<>(); - impactedSubjectUrns.addAll(subjectUrns); - impactedSubjectUrns.addAll(newSubjectUrns); + final List<Urn> subjectUrns = + existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()); + final List<Urn> newSubjectUrns = + input.getSubjects() != null + ? input.getSubjects().stream() + .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) + .collect(Collectors.toList()) + : Collections.emptyList(); + final List<Urn> impactedSubjectUrns = new ArrayList<>(); + impactedSubjectUrns.addAll(subjectUrns); + impactedSubjectUrns.addAll(newSubjectUrns); - if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); - } + if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); + } - try { - _queryService.updateQuery( - queryUrn, - input.getProperties() != null ? input.getProperties().getName() : null, - input.getProperties() != null ? input.getProperties().getDescription() : null, - input.getProperties() != null && input.getProperties().getStatement() != null - ? new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())) - : null, - input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()) - : null, - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Query from input %s", input), e); - } - }); + try { + _queryService.updateQuery( + queryUrn, + input.getProperties() != null ? input.getProperties().getName() : null, + input.getProperties() != null ? input.getProperties().getDescription() : null, + input.getProperties() != null && input.getProperties().getStatement() != null + ? new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())) + : null, + input.getSubjects() != null + ? input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()) + : null, + authentication, + System.currentTimeMillis()); + return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update Query from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index df1a6d4d4b00d..ca1e01b45989d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.recommendation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ContentParams; @@ -31,12 +33,10 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListRecommendationsResolver implements DataFetcher<CompletableFuture<ListRecommendationsResult>> { +public class ListRecommendationsResolver + implements DataFetcher<CompletableFuture<ListRecommendationsResult>> { private static final ListRecommendationsResult EMPTY_RECOMMENDATIONS = new ListRecommendationsResult(Collections.emptyList()); @@ -49,24 +49,28 @@ public CompletableFuture<ListRecommendationsResult> get(DataFetchingEnvironment final ListRecommendationsInput input = bindArgument(environment.getArgument("input"), ListRecommendationsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Listing recommendations for input {}", input); - List<com.linkedin.metadata.recommendation.RecommendationModule> modules = - _recommendationsService.listRecommendations(Urn.createFromString(input.getUserUrn()), - mapRequestContext(input.getRequestContext()), input.getLimit()); - return ListRecommendationsResult.builder() - .setModules(modules.stream() - .map(this::mapRecommendationModule) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toList())) - .build(); - } catch (Exception e) { - log.error("Failed to get recommendations for input {}", input, e); - return EMPTY_RECOMMENDATIONS; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug("Listing recommendations for input {}", input); + List<com.linkedin.metadata.recommendation.RecommendationModule> modules = + _recommendationsService.listRecommendations( + Urn.createFromString(input.getUserUrn()), + mapRequestContext(input.getRequestContext()), + input.getLimit()); + return ListRecommendationsResult.builder() + .setModules( + modules.stream() + .map(this::mapRecommendationModule) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList())) + .build(); + } catch (Exception e) { + log.error("Failed to get recommendations for input {}", input, e); + return EMPTY_RECOMMENDATIONS; + } + }); } private com.linkedin.metadata.recommendation.RecommendationRequestContext mapRequestContext( @@ -74,22 +78,24 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq com.linkedin.metadata.recommendation.ScenarioType mappedScenarioType; try { mappedScenarioType = - com.linkedin.metadata.recommendation.ScenarioType.valueOf(requestContext.getScenario().toString()); + com.linkedin.metadata.recommendation.ScenarioType.valueOf( + requestContext.getScenario().toString()); } catch (IllegalArgumentException e) { log.error("Failed to map scenario type: {}", requestContext.getScenario(), e); throw e; } com.linkedin.metadata.recommendation.RecommendationRequestContext mappedRequestContext = - new com.linkedin.metadata.recommendation.RecommendationRequestContext().setScenario(mappedScenarioType); + new com.linkedin.metadata.recommendation.RecommendationRequestContext() + .setScenario(mappedScenarioType); if (requestContext.getSearchRequestContext() != null) { SearchRequestContext searchRequestContext = new SearchRequestContext().setQuery(requestContext.getSearchRequestContext().getQuery()); if (requestContext.getSearchRequestContext().getFilters() != null) { - searchRequestContext.setFilters(new CriterionArray(requestContext.getSearchRequestContext() - .getFilters() - .stream() - .map(facetField -> criterionFromFilter(facetField)) - .collect(Collectors.toList()))); + searchRequestContext.setFilters( + new CriterionArray( + requestContext.getSearchRequestContext().getFilters().stream() + .map(facetField -> criterionFromFilter(facetField)) + .collect(Collectors.toList()))); } mappedRequestContext.setSearchRequestContext(searchRequestContext); } @@ -98,12 +104,17 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq try { entityUrn = Urn.createFromString(requestContext.getEntityRequestContext().getUrn()); } catch (URISyntaxException e) { - log.error("Malformed URN while mapping recommendations request: {}", - requestContext.getEntityRequestContext().getUrn(), e); + log.error( + "Malformed URN while mapping recommendations request: {}", + requestContext.getEntityRequestContext().getUrn(), + e); throw new IllegalArgumentException(e); } - EntityRequestContext entityRequestContext = new EntityRequestContext().setUrn(entityUrn) - .setType(EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); + EntityRequestContext entityRequestContext = + new EntityRequestContext() + .setUrn(entityUrn) + .setType( + EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); mappedRequestContext.setEntityRequestContext(entityRequestContext); } return mappedRequestContext; @@ -115,13 +126,16 @@ private Optional<RecommendationModule> mapRecommendationModule( mappedModule.setTitle(module.getTitle()); mappedModule.setModuleId(module.getModuleId()); try { - mappedModule.setRenderType(RecommendationRenderType.valueOf(module.getRenderType().toString())); + mappedModule.setRenderType( + RecommendationRenderType.valueOf(module.getRenderType().toString())); } catch (IllegalArgumentException e) { log.error("Failed to map render type: {}", module.getRenderType(), e); throw e; } mappedModule.setContent( - module.getContent().stream().map(this::mapRecommendationContent).collect(Collectors.toList())); + module.getContent().stream() + .map(this::mapRecommendationContent) + .collect(Collectors.toList())); return Optional.of(mappedModule); } @@ -145,26 +159,31 @@ private RecommendationParams mapRecommendationParams( SearchParams searchParams = new SearchParams(); searchParams.setQuery(params.getSearchParams().getQuery()); if (!params.getSearchParams().getFilters().isEmpty()) { - searchParams.setFilters(params.getSearchParams() - .getFilters() - .stream() - .map(criterion -> FacetFilter.builder().setField(criterion.getField()).setValues( - ImmutableList.of(criterion.getValue())).build()) - .collect(Collectors.toList())); + searchParams.setFilters( + params.getSearchParams().getFilters().stream() + .map( + criterion -> + FacetFilter.builder() + .setField(criterion.getField()) + .setValues(ImmutableList.of(criterion.getValue())) + .build()) + .collect(Collectors.toList())); } mappedParams.setSearchParams(searchParams); } if (params.hasEntityProfileParams()) { Urn profileUrn = params.getEntityProfileParams().getUrn(); - mappedParams.setEntityProfileParams(EntityProfileParams.builder() - .setUrn(profileUrn.toString()) - .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) - .build()); + mappedParams.setEntityProfileParams( + EntityProfileParams.builder() + .setUrn(profileUrn.toString()) + .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) + .build()); } if (params.hasContentParams()) { - mappedParams.setContentParams(ContentParams.builder().setCount(params.getContentParams().getCount()).build()); + mappedParams.setContentParams( + ContentParams.builder().setCount(params.getContentParams().getCount()).build()); } return mappedParams; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java index 43d975344ba25..a71da7821f09c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.datahub.authorization.role.RoleService; @@ -13,11 +15,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j - @RequiredArgsConstructor public class AcceptRoleResolver implements DataFetcher<CompletableFuture<Boolean>> { private final RoleService _roleService; @@ -27,25 +25,32 @@ public class AcceptRoleResolver implements DataFetcher<CompletableFuture<Boolean public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final AcceptRoleInput input = bindArgument(environment.getArgument("input"), AcceptRoleInput.class); + final AcceptRoleInput input = + bindArgument(environment.getArgument("input"), AcceptRoleInput.class); final String inviteTokenStr = input.getInviteToken(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); - if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { - throw new RuntimeException(String.format("Invite token %s is invalid", inviteTokenStr)); - } - - final Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); - _roleService.batchAssignRoleToActors(Collections.singletonList(authentication.getActor().toUrnStr()), roleUrn, - authentication); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to accept role using invite token %s", inviteTokenStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); + if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { + throw new RuntimeException( + String.format("Invite token %s is invalid", inviteTokenStr)); + } + + final Urn roleUrn = + _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); + _roleService.batchAssignRoleToActors( + Collections.singletonList(authentication.getActor().toUrnStr()), + roleUrn, + authentication); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to accept role using invite token %s", inviteTokenStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java index dc847069afae9..1997d0ac74601 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.linkedin.common.urn.Urn; @@ -13,10 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAssignRoleResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -30,19 +29,22 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw "Unauthorized to assign roles. Please contact your DataHub administrator if this needs corrective action."); } - final BatchAssignRoleInput input = bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); + final BatchAssignRoleInput input = + bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); final String roleUrnStr = input.getRoleUrn(); final List<String> actors = input.getActors(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); - _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); + _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java index 6bdf52e2f89f1..61ecf09fc91a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,33 +15,34 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateInviteTokenResolver implements DataFetcher<CompletableFuture<InviteToken>> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture<InviteToken> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<InviteToken> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to create invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final CreateInviteTokenInput input = bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); + final CreateInviteTokenInput input = + bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create invite token for role %s", roleUrnStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create invite token for role %s", roleUrnStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java index 0b0cbbb7ba473..066753c4f7559 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,33 +15,34 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class GetInviteTokenResolver implements DataFetcher<CompletableFuture<InviteToken>> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture<InviteToken> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<InviteToken> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to get invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final GetInviteTokenInput input = bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); + final GetInviteTokenInput input = + bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to get invite token for role %s", roleUrnStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to get invite token for role %s", roleUrnStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java index 4746370d8603b..a1dd9219f6549 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubRole; @@ -24,10 +27,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListRolesResolver implements DataFetcher<CompletableFuture<ListRolesResult>> { @@ -38,36 +37,51 @@ public class ListRolesResolver implements DataFetcher<CompletableFuture<ListRole private final EntityClient _entityClient; @Override - public CompletableFuture<ListRolesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListRolesResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListRolesInput input = bindArgument(environment.getArgument("input"), ListRolesInput.class); + final ListRolesInput input = + bindArgument(environment.getArgument("input"), ListRolesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all role Urns. - final SearchResult gmsResult = - _entityClient.search(DATAHUB_ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all role Urns. + final SearchResult gmsResult = + _entityClient.search( + DATAHUB_ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get and hydrate all users. - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, context.getAuthentication()); + // Then, get and hydrate all users. + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - final ListRolesResult result = new ListRolesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setRoles(mapEntitiesToRoles(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list roles", e); - } - }); + final ListRolesResult result = new ListRolesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setRoles(mapEntitiesToRoles(entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list roles", e); + } + }); } private List<DataHubRole> mapEntitiesToRoles(final Collection<EntityResponse> entities) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index e9140441999e2..6d23456b76b4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AggregateAcrossEntitiesInput; @@ -14,25 +19,20 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** - * Executes a search query only to get a provided list of aggregations back. - * Does not resolve any entities as results. + * Executes a search query only to get a provided list of aggregations back. Does not resolve any + * entities as results. */ @Slf4j @RequiredArgsConstructor -public class AggregateAcrossEntitiesResolver implements DataFetcher<CompletableFuture<AggregateResults>> { +public class AggregateAcrossEntitiesResolver + implements DataFetcher<CompletableFuture<AggregateResults>> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -48,47 +48,63 @@ public CompletableFuture<AggregateResults> get(DataFetchingEnvironment environme // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - - final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - - final List<String> facets = input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; - - try { - return mapAggregateResults(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - 0, - 0, // 0 entity count because we don't want resolved entities - searchFlags, - null, - ResolverUtils.getAuthentication(environment), - facets)); - } catch (Exception e) { - log.error( - "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", - input.getTypes(), input.getQuery(), input.getOrFilters()); - throw new RuntimeException( - "Failed to execute aggregate across entities: " + String.format("entity types %s, query %s, filters: %s", - input.getTypes(), input.getQuery(), input.getOrFilters()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + + final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + + final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + + final List<String> facets = + input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; + + try { + return mapAggregateResults( + _entityClient.searchAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + 0, + 0, // 0 entity count because we don't want resolved entities + searchFlags, + null, + ResolverUtils.getAuthentication(environment), + facets)); + } catch (Exception e) { + log.error( + "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters()); + throw new RuntimeException( + "Failed to execute aggregate across entities: " + + String.format( + "entity types %s, query %s, filters: %s", + input.getTypes(), input.getQuery(), input.getOrFilters()), + e); + } + }); } AggregateResults mapAggregateResults(SearchResult searchResult) { final AggregateResults results = new AggregateResults(); - results.setFacets(searchResult.getMetadata().getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + results.setFacets( + searchResult.getMetadata().getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return results; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java index 043ecf5eb97f1..c3e843cefd5c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.ValidationException; @@ -13,87 +17,90 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; +import java.util.List; import java.util.Map; +import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; -import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ -public class AutoCompleteForMultipleResolver implements DataFetcher<CompletableFuture<AutoCompleteMultipleResults>> { +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ +public class AutoCompleteForMultipleResolver + implements DataFetcher<CompletableFuture<AutoCompleteMultipleResults>> { - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); - private final Map<EntityType, SearchableEntityType<?, ?>> _typeToEntity; - private final ViewService _viewService; + private final Map<EntityType, SearchableEntityType<?, ?>> _typeToEntity; + private final ViewService _viewService; - public AutoCompleteForMultipleResolver(@Nonnull final List<SearchableEntityType<?, ?>> searchableEntities, @Nonnull final ViewService viewService) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - _viewService = viewService; - } + public AutoCompleteForMultipleResolver( + @Nonnull final List<SearchableEntityType<?, ?>> searchableEntities, + @Nonnull final ViewService viewService) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + _viewService = viewService; + } - @Override - public CompletableFuture<AutoCompleteMultipleResults> get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final AutoCompleteMultipleInput input = bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); + @Override + public CompletableFuture<AutoCompleteMultipleResults> get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final AutoCompleteMultipleInput input = + bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); - if (isBlank(input.getQuery())) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) + if (isBlank(input.getQuery())) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) : null; - List<EntityType> types = getEntityTypes(input.getTypes(), maybeResolvedView); - if (types != null && types.size() > 0) { - return AutocompleteUtils.batchGetAutocompleteResults( - types.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); - } - - // By default, autocomplete only against the Default Set of Autocomplete entities - return AutocompleteUtils.batchGetAutocompleteResults( - AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); + List<EntityType> types = getEntityTypes(input.getTypes(), maybeResolvedView); + if (types != null && types.size() > 0) { + return AutocompleteUtils.batchGetAutocompleteResults( + types.stream().map(_typeToEntity::get).collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); } - /** - * Gets the intersection of provided input types and types on the view applied (if any) - */ - @Nullable - List<EntityType> getEntityTypes(final @Nullable List<EntityType> inputTypes, final @Nullable DataHubViewInfo maybeResolvedView) { - List<EntityType> types = inputTypes; - if (maybeResolvedView != null) { - List<EntityType> inputEntityTypes = types != null ? types : new ArrayList<>(); - final List<String> inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - List<String> stringEntityTypes = SearchUtils.intersectEntityTypes(inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); + // By default, autocomplete only against the Default Set of Autocomplete entities + return AutocompleteUtils.batchGetAutocompleteResults( + AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); + } - types = stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); - } + /** Gets the intersection of provided input types and types on the view applied (if any) */ + @Nullable + List<EntityType> getEntityTypes( + final @Nullable List<EntityType> inputTypes, + final @Nullable DataHubViewInfo maybeResolvedView) { + List<EntityType> types = inputTypes; + if (maybeResolvedView != null) { + List<EntityType> inputEntityTypes = types != null ? types : new ArrayList<>(); + final List<String> inputEntityNames = + inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + List<String> stringEntityTypes = + SearchUtils.intersectEntityTypes( + inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); - return types; + types = + stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); } + + return types; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java index e13545aadc516..235f5f8d27899 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java @@ -1,90 +1,94 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.linkedin.datahub.graphql.types.SearchableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.datahub.graphql.exception.ValidationException; import com.linkedin.datahub.graphql.generated.AutoCompleteInput; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ public class AutoCompleteResolver implements DataFetcher<CompletableFuture<AutoCompleteResults>> { - private static final int DEFAULT_LIMIT = 5; + private static final int DEFAULT_LIMIT = 5; - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); - private final Map<EntityType, SearchableEntityType<?, ?>> _typeToEntity; + private final Map<EntityType, SearchableEntityType<?, ?>> _typeToEntity; - public AutoCompleteResolver(@Nonnull final List<SearchableEntityType<?, ?>> searchableEntities) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - } + public AutoCompleteResolver(@Nonnull final List<SearchableEntityType<?, ?>> searchableEntities) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture<AutoCompleteResults> get(DataFetchingEnvironment environment) { - final AutoCompleteInput input = bindArgument(environment.getArgument("input"), AutoCompleteInput.class); + @Override + public CompletableFuture<AutoCompleteResults> get(DataFetchingEnvironment environment) { + final AutoCompleteInput input = + bindArgument(environment.getArgument("input"), AutoCompleteInput.class); - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - if (isBlank(sanitizedQuery)) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + if (isBlank(sanitizedQuery)) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug("Executing autocomplete. " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + "Executing autocomplete. " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), input.getLimit())); - return _typeToEntity.get(input.getType()).autoComplete( - sanitizedQuery, - input.getField(), - filter, - limit, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + return _typeToEntity + .get(input.getType()) + .autoComplete( + sanitizedQuery, input.getField(), filter, limit, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), - input.getLimit()) + " " - + e.getMessage()); - throw new RuntimeException("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", - input.getType(), - input.getField(), - input.getQuery(), - input.getFilters(), - input.getLimit()), e); - } - }); - } + input.getLimit()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", + input.getType(), + input.getField(), + input.getQuery(), + input.getFilters(), + input.getLimit()), + e); + } + }); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java index 40722211de8d3..9cd860781c0d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java @@ -14,69 +14,81 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; - - public class AutocompleteUtils { private static final Logger _logger = LoggerFactory.getLogger(AutocompleteUtils.class.getName()); private static final int DEFAULT_LIMIT = 5; - private AutocompleteUtils() { } + private AutocompleteUtils() {} public static CompletableFuture<AutoCompleteMultipleResults> batchGetAutocompleteResults( List<SearchableEntityType<?, ?>> entities, String sanitizedQuery, AutoCompleteMultipleInput input, DataFetchingEnvironment environment, - @Nullable DataHubViewInfo view - ) { + @Nullable DataHubViewInfo view) { final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - final List<CompletableFuture<AutoCompleteResultForEntity>> autoCompletesFuture = entities.stream().map(entity -> CompletableFuture.supplyAsync(() -> { - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = view != null - ? SearchUtils.combineFilters(filter, view.getDefinition().getFilter()) - : filter; + final List<CompletableFuture<AutoCompleteResultForEntity>> autoCompletesFuture = + entities.stream() + .map( + entity -> + CompletableFuture.supplyAsync( + () -> { + final Filter filter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final Filter finalFilter = + view != null + ? SearchUtils.combineFilters( + filter, view.getDefinition().getFilter()) + : filter; - try { - final AutoCompleteResults searchResult = entity.autoComplete( - sanitizedQuery, - input.getField(), - finalFilter, - limit, - environment.getContext() - ); - return new AutoCompleteResultForEntity( - entity.type(), - searchResult.getSuggestions(), - searchResult.getEntities() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete all: " - + String.format("field %s, query %s, filters: %s, limit: %s", - input.getField(), - input.getQuery(), - filter, - input.getLimit()), e); - return new AutoCompleteResultForEntity(entity.type(), Collections.emptyList(), Collections.emptyList()); - } - })).collect(Collectors.toList()); + try { + final AutoCompleteResults searchResult = + entity.autoComplete( + sanitizedQuery, + input.getField(), + finalFilter, + limit, + environment.getContext()); + return new AutoCompleteResultForEntity( + entity.type(), + searchResult.getSuggestions(), + searchResult.getEntities()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete all: " + + String.format( + "field %s, query %s, filters: %s, limit: %s", + input.getField(), + input.getQuery(), + filter, + input.getLimit()), + e); + return new AutoCompleteResultForEntity( + entity.type(), Collections.emptyList(), Collections.emptyList()); + } + })) + .collect(Collectors.toList()); return CompletableFuture.allOf(autoCompletesFuture.toArray(new CompletableFuture[0])) - .thenApplyAsync((res) -> { - AutoCompleteMultipleResults result = new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); - List<AutoCompleteResultForEntity> suggestions = autoCompletesFuture.stream() - .map(CompletableFuture::join) - .filter( + .thenApplyAsync( + (res) -> { + AutoCompleteMultipleResults result = + new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); + List<AutoCompleteResultForEntity> suggestions = + autoCompletesFuture.stream() + .map(CompletableFuture::join) + .filter( autoCompleteResultForEntity -> - autoCompleteResultForEntity.getSuggestions() != null && autoCompleteResultForEntity.getSuggestions().size() > 0 - ) - .collect(Collectors.toList()); - result.setSuggestions(suggestions); - return result; - }); + autoCompleteResultForEntity.getSuggestions() != null + && autoCompleteResultForEntity.getSuggestions().size() > 0) + .collect(Collectors.toList()); + result.setSuggestions(suggestions); + return result; + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index 17058fd8d7cff..e54955e1857f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.Entity; @@ -18,26 +23,20 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; - +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor -public class GetQuickFiltersResolver implements DataFetcher<CompletableFuture<GetQuickFiltersResult>> { +public class GetQuickFiltersResolver + implements DataFetcher<CompletableFuture<GetQuickFiltersResult>> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -47,41 +46,51 @@ public class GetQuickFiltersResolver implements DataFetcher<CompletableFuture<Ge private static final int SOURCE_ENTITY_COUNT = 3; private static final int DATAHUB_ENTITY_COUNT = 2; - public CompletableFuture<GetQuickFiltersResult> get(final DataFetchingEnvironment environment) throws Exception { - final GetQuickFiltersInput input = bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); - - return CompletableFuture.supplyAsync(() -> { - final GetQuickFiltersResult result = new GetQuickFiltersResult(); - final List<QuickFilter> quickFilters = new ArrayList<>(); - - try { - final SearchResult searchResult = getSearchResults(ResolverUtils.getAuthentication(environment), input); - final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations(); - - quickFilters.addAll(getPlatformQuickFilters(aggregations)); - quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); - } catch (Exception e) { - log.error("Failed getting quick filters", e); - throw new RuntimeException("Failed to to get quick filters", e); - } - - result.setQuickFilters(quickFilters); - return result; - }); + public CompletableFuture<GetQuickFiltersResult> get(final DataFetchingEnvironment environment) + throws Exception { + final GetQuickFiltersInput input = + bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final GetQuickFiltersResult result = new GetQuickFiltersResult(); + final List<QuickFilter> quickFilters = new ArrayList<>(); + + try { + final SearchResult searchResult = + getSearchResults(ResolverUtils.getAuthentication(environment), input); + final AggregationMetadataArray aggregations = + searchResult.getMetadata().getAggregations(); + + quickFilters.addAll(getPlatformQuickFilters(aggregations)); + quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); + } catch (Exception e) { + log.error("Failed getting quick filters", e); + throw new RuntimeException("Failed to to get quick filters", e); + } + + result.setQuickFilters(quickFilters); + return result; + }); } - /** - * Do a star search with view filter applied to get info about all data in this instance. - */ - private SearchResult getSearchResults(@Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) throws Exception { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) - : null; - final List<String> entityNames = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + /** Do a star search with view filter applied to get info about all data in this instance. */ + private SearchResult getSearchResults( + @Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) + throws Exception { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) + : null; + final List<String> entityNames = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); return _entityClient.searchAcrossEntities( maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) : entityNames, "*", maybeResolvedView != null @@ -95,67 +104,88 @@ private SearchResult getSearchResults(@Nonnull final Authentication authenticati } /** - * Get platforms and their count from an aggregations array, sorts by entity count, and map the top 5 to quick filters + * Get platforms and their count from an aggregations array, sorts by entity count, and map the + * top 5 to quick filters */ - private List<QuickFilter> getPlatformQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List<QuickFilter> getPlatformQuickFilters( + @Nonnull final AggregationMetadataArray aggregations) { final List<QuickFilter> platforms = new ArrayList<>(); - final Optional<AggregationMetadata> platformAggregations = aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); + final Optional<AggregationMetadata> platformAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); if (platformAggregations.isPresent()) { final List<FilterValue> sortedPlatforms = - platformAggregations.get().getFilterValues().stream().sorted(Comparator.comparingLong(val -> -val.getFacetCount())).collect(Collectors.toList()); - sortedPlatforms.forEach(platformFilter -> { - if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { - platforms.add(mapQuickFilter(PLATFORM, platformFilter)); - } - }); + platformAggregations.get().getFilterValues().stream() + .sorted(Comparator.comparingLong(val -> -val.getFacetCount())) + .collect(Collectors.toList()); + sortedPlatforms.forEach( + platformFilter -> { + if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { + platforms.add(mapQuickFilter(PLATFORM, platformFilter)); + } + }); } // return platforms sorted alphabetically by their name - return platforms.stream().sorted(Comparator.comparing(QuickFilter::getValue)).collect(Collectors.toList()); + return platforms.stream() + .sorted(Comparator.comparing(QuickFilter::getValue)) + .collect(Collectors.toList()); } /** - * Gets entity type quick filters from search aggregations. First, get source entity type quick filters - * from a prioritized list. Do the same for datathub entity types. + * Gets entity type quick filters from search aggregations. First, get source entity type quick + * filters from a prioritized list. Do the same for datathub entity types. */ - private List<QuickFilter> getEntityTypeQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List<QuickFilter> getEntityTypeQuickFilters( + @Nonnull final AggregationMetadataArray aggregations) { final List<QuickFilter> entityTypes = new ArrayList<>(); - final Optional<AggregationMetadata> entityAggregations = aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); + final Optional<AggregationMetadata> entityAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); if (entityAggregations.isPresent()) { final List<QuickFilter> sourceEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, SOURCE_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, + SOURCE_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(sourceEntityTypeFilters); final List<QuickFilter> dataHubEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, DATAHUB_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, + DATAHUB_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(dataHubEntityTypeFilters); } return entityTypes; } /** - * Create a quick filters list by looping over prioritized list and adding filters that exist until we reach the maxListSize defined + * Create a quick filters list by looping over prioritized list and adding filters that exist + * until we reach the maxListSize defined */ private List<QuickFilter> getQuickFiltersFromList( @Nonnull final List<String> prioritizedList, final int maxListSize, - @Nonnull final AggregationMetadata entityAggregations - ) { + @Nonnull final AggregationMetadata entityAggregations) { final List<QuickFilter> entityTypes = new ArrayList<>(); - prioritizedList.forEach(entityType -> { - if (entityTypes.size() < maxListSize) { - final Optional<FilterValue> entityFilter = entityAggregations.getFilterValues().stream().filter(val -> val.getValue().equals(entityType)).findFirst(); - if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { - entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); - } - } - }); + prioritizedList.forEach( + entityType -> { + if (entityTypes.size() < maxListSize) { + final Optional<FilterValue> entityFilter = + entityAggregations.getFilterValues().stream() + .filter(val -> val.getValue().equals(entityType)) + .findFirst(); + if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { + entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); + } + } + }); return entityTypes; } - private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final FilterValue filterValue) { + private QuickFilter mapQuickFilter( + @Nonnull final String field, @Nonnull final FilterValue filterValue) { final boolean isEntityTypeFilter = field.equals(ENTITY_FILTER_NAME); final QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); @@ -167,9 +197,7 @@ private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final F return quickFilter; } - /** - * If we're working with an entity type filter, we need to convert the value to an EntityType - */ + /** If we're working with an entity type filter, we need to convert the value to an EntityType */ public static String convertFilterValue(String filterValue, boolean isEntityType) { if (isEntityType) { return EntityTypeMapper.getType(filterValue).toString(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index d576ffc8ca280..742d1d170de64 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -24,13 +27,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossEntitiesResolver implements DataFetcher<CompletableFuture<ScrollResults>> { @@ -48,57 +45,80 @@ public CompletableFuture<ScrollResults> get(DataFetchingEnvironment environment) bindArgument(environment.getArgument("input"), ScrollAcrossEntitiesInput.class); final List<EntityType> entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - final List<String> entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + final List<String> entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - // escape forward slash since it is a reserved character in Elasticsearch, default to * if blank/empty - final String sanitizedQuery = StringUtils.isNotBlank(input.getQuery()) - ? ResolverUtils.escapeForwardSlash(input.getQuery()) : "*"; + // escape forward slash since it is a reserved character in Elasticsearch, default to * if + // blank/empty + final String sanitizedQuery = + StringUtils.isNotBlank(input.getQuery()) + ? ResolverUtils.escapeForwardSlash(input.getQuery()) + : "*"; - @Nullable - final String scrollId = input.getScrollId(); + @Nullable final String scrollId = input.getScrollId(); final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } + final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; - return UrnScrollResultsMapper.map(_entityClient.scrollAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - scrollId, - keepAlive, - count, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), e); - } - }); + return UrnScrollResultsMapper.map( + _entityClient.scrollAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + scrollId, + keepAlive, + count, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index 78be1ac309690..adab62c22bb72 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AndFilterInput; @@ -25,13 +28,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossLineageResolver @@ -53,55 +50,98 @@ public CompletableFuture<ScrollAcrossLineageResults> get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List<EntityType> entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List<String> entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List<String> entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final String scrollId = input.getScrollId() != null ? input.getScrollId() : null; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List<AndFilterInput> filters = input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); - final List<FacetFilterInput> facetFilters = filters.stream() - .map(AndFilterInput::getAnd) - .flatMap(List::stream) - .collect(Collectors.toList()); + final List<AndFilterInput> filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List<FacetFilterInput> facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); final Integer maxHops = getMaxHops(facetFilters); String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + final Long startTimeMillis = + input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); - SearchFlags searchFlags = null; - final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = new SearchFlags() - .setSkipCache(inputFlags.getSkipCache()) - .setFulltext(inputFlags.getFulltext()) - .setMaxAggValues(inputFlags.getMaxAggValues()); - } - return UrnScrollAcrossLineageResultsMapper.map( - _entityClient.scrollAcrossLineage(urn, resolvedDirection, entityNames, sanitizedQuery, - maxHops, ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), null, scrollId, - keepAlive, count, startTimeMillis, endTimeMillis, searchFlags, ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); - throw new RuntimeException("Failed to execute scroll across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count), e); - } - }); + SearchFlags searchFlags = null; + final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = + input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = + new SearchFlags() + .setSkipCache(inputFlags.getSkipCache()) + .setFulltext(inputFlags.getFulltext()) + .setMaxAggValues(inputFlags.getMaxAggValues()); + } + return UrnScrollAcrossLineageResultsMapper.map( + _entityClient.scrollAcrossLineage( + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), + null, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); + throw new RuntimeException( + "Failed to execute scroll across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index 1022b25b3cd99..f8178e3b396cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; @@ -19,13 +22,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossEntitiesResolver implements DataFetcher<CompletableFuture<SearchResults>> { @@ -50,43 +47,65 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - - SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - SortCriterion sortCriterion = input.getSortInput() != null ? mapSortCriterion(input.getSortInput().getSortCriterion()) : null; - - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - start, - count, - searchFlags, - sortCriterion, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + + final Filter baseFilter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + + SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + SortCriterion sortCriterion = + input.getSortInput() != null + ? mapSortCriterion(input.getSortInput().getSortCriterion()) + : null; + + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + start, + count, + searchFlags, + sortCriterion, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 9f489183f4af7..0f5d2d90ba0c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; @@ -25,13 +28,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossLineageResolver @@ -54,76 +51,95 @@ public CompletableFuture<SearchAcrossLineageResults> get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List<EntityType> entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List<String> entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List<String> entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List<FacetFilterInput> filters = input.getFilters() != null ? input.getFilters() : new ArrayList<>(); + final List<FacetFilterInput> filters = + input.getFilters() != null ? input.getFilters() : new ArrayList<>(); final Integer maxHops = getMaxHops(filters); @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + final Long startTimeMillis = + input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, - resolvedDirection, - input.getTypes(), - input.getQuery(), - filters, - start, - count); - - final Filter filter = - ResolverUtils.buildFilter( + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), filters, - input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - if (inputFlags.getSkipHighlighting() == null) { - searchFlags.setSkipHighlighting(true); - } - } else { - searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); - } - - return UrnSearchAcrossLineageResultsMapper.map( - _entityClient.searchAcrossLineage( + start, + count); + + final Filter filter = ResolverUtils.buildFilter(filters, input.getOrFilters()); + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + if (inputFlags.getSkipHighlighting() == null) { + searchFlags.setSkipHighlighting(true); + } + } else { + searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); + } + + return UrnSearchAcrossLineageResultsMapper.map( + _entityClient.searchAcrossLineage( + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + filter, + null, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", urn, resolvedDirection, - entityNames, - sanitizedQuery, - maxHops, - filter, - null, + input.getTypes(), + input.getQuery(), + filters, start, - count, - startTimeMillis, - endTimeMillis, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count); - throw new RuntimeException("Failed to execute search across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count), e); - } finally { - log.debug("Returning from search across lineage resolver"); - } - }); + count); + throw new RuntimeException( + "Failed to execute search across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + start, + count), + e); + } finally { + log.debug("Returning from search across lineage resolver"); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 0e66d6e601399..6821423887923 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; @@ -15,17 +18,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; - - -/** - * Resolver responsible for resolving the 'search' field of the Query type - */ +/** Resolver responsible for resolving the 'search' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchResolver implements DataFetcher<CompletableFuture<SearchResults>> { - private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = new SearchFlags() + private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = + new SearchFlags() .setFulltext(true) .setMaxAggValues(20) .setSkipCache(false) @@ -54,22 +52,52 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) searchFlags = applyDefaultSearchFlags(null, sanitizedQuery, SEARCH_RESOLVER_DEFAULTS); } - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); - return UrnSearchResultsMapper.map( - _entityClient.search(entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters(), - input.getOrFilters()), null, start, count, ResolverUtils.getAuthentication(environment), + return UrnSearchResultsMapper.map( + _entityClient.search( + entityName, + sanitizedQuery, + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()), + null, + start, + count, + ResolverUtils.getAuthentication(environment), searchFlags)); - } catch (Exception e) { - log.error("Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags), e); - } - }); + } catch (Exception e) { + log.error( + "Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index fb146ef72877d..d04cb57e1a860 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -1,5 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -28,31 +44,11 @@ import lombok.extern.slf4j.Slf4j; import org.codehaus.plexus.util.CollectionUtils; -import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; - - @Slf4j public class SearchUtils { - private SearchUtils() { - } + private SearchUtils() {} - /** - * Entities that are searched by default in Search Across Entities - */ + /** Entities that are searched by default in Search Across Entities */ public static final List<EntityType> SEARCHABLE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -76,10 +72,7 @@ private SearchUtils() { EntityType.DATA_PRODUCT, EntityType.NOTEBOOK); - - /** - * Entities that are part of autocomplete by default in Auto Complete Across Entities - */ + /** Entities that are part of autocomplete by default in Auto Complete Across Entities */ public static final List<EntityType> AUTO_COMPLETE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -99,63 +92,64 @@ private SearchUtils() { EntityType.NOTEBOOK, EntityType.DATA_PRODUCT); - /** - * A prioritized list of source filter types used to generate quick filters - */ - public static final List<String> PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of( - DATASET_ENTITY_NAME, - DASHBOARD_ENTITY_NAME, - DATA_FLOW_ENTITY_NAME, - DATA_JOB_ENTITY_NAME, - CHART_ENTITY_NAME, - CONTAINER_ENTITY_NAME, - ML_MODEL_ENTITY_NAME, - ML_MODEL_GROUP_ENTITY_NAME, - ML_FEATURE_ENTITY_NAME, - ML_FEATURE_TABLE_ENTITY_NAME, - ML_PRIMARY_KEY_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** A prioritized list of source filter types used to generate quick filters */ + public static final List<String> PRIORITIZED_SOURCE_ENTITY_TYPES = + Stream.of( + DATASET_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME, + CHART_ENTITY_NAME, + CONTAINER_ENTITY_NAME, + ML_MODEL_ENTITY_NAME, + ML_MODEL_GROUP_ENTITY_NAME, + ML_FEATURE_ENTITY_NAME, + ML_FEATURE_TABLE_ENTITY_NAME, + ML_PRIMARY_KEY_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); - /** - * A prioritized list of DataHub filter types used to generate quick filters - */ - public static final List<String> PRIORITIZED_DATAHUB_ENTITY_TYPES = Stream.of( - DOMAIN_ENTITY_NAME, - GLOSSARY_TERM_ENTITY_NAME, - CORP_GROUP_ENTITY_NAME, - CORP_USER_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** A prioritized list of DataHub filter types used to generate quick filters */ + public static final List<String> PRIORITIZED_DATAHUB_ENTITY_TYPES = + Stream.of( + DOMAIN_ENTITY_NAME, + GLOSSARY_TERM_ENTITY_NAME, + CORP_GROUP_ENTITY_NAME, + CORP_USER_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); /** - * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link Filter} - * in disjunctive normal form. + * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link + * Filter} in disjunctive normal form. * * @param baseFilter the filter to apply the view to * @param viewFilter the view filter, null if it doesn't exist - * * @return a new instance of {@link Filter} representing the applied view. */ @Nonnull - public static Filter combineFilters(@Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { - final Filter finalBaseFilter = baseFilter == null - ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) - : baseFilter; + public static Filter combineFilters( + @Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { + final Filter finalBaseFilter = + baseFilter == null + ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) + : baseFilter; // Join the filter conditions in Disjunctive Normal Form. return combineFiltersInConjunction(finalBaseFilter, viewFilter); } /** - * Returns the intersection of two sets of entity types. (Really just string lists). - * If either is empty, consider the entity types list to mean "all" (take the other set). + * Returns the intersection of two sets of entity types. (Really just string lists). If either is + * empty, consider the entity types list to mean "all" (take the other set). * * @param baseEntityTypes the entity types to apply the view to * @param viewEntityTypes the view info, null if it doesn't exist - * * @return the intersection of the two input sets */ @Nonnull - public static List<String> intersectEntityTypes(@Nonnull final List<String> baseEntityTypes, @Nonnull final List<String> viewEntityTypes) { + public static List<String> intersectEntityTypes( + @Nonnull final List<String> baseEntityTypes, @Nonnull final List<String> viewEntityTypes) { if (baseEntityTypes.isEmpty()) { return viewEntityTypes; } @@ -171,126 +165,29 @@ public static List<String> intersectEntityTypes(@Nonnull final List<String> base * * @param filter1 the first filter in the pair * @param filter2 the second filter in the pair - * - * This method supports either Filter format, where the "or" field is used, instead - * of criteria. If the criteria filter is used, then it will be converted into an "OR" before - * returning the new filter. - * + * <p>This method supports either Filter format, where the "or" field is used, instead of + * criteria. If the criteria filter is used, then it will be converted into an "OR" before + * returning the new filter. * @return the result of joining the 2 filters in a conjunction (AND) - * - * How does it work? It basically cross-products the conjunctions inside of each Filter clause. - * - * Example Inputs: - * filter1 -> - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * } - * ] - * } - * ] - * } - * filter2 -> - * { - * or: [ - * { - * and: [ - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * }, - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term2"] - * } - * ] - * } - * ] - * } - * Example Output: - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * ] - * } + * <p>How does it work? It basically cross-products the conjunctions inside of each Filter + * clause. + * <p>Example Inputs: filter1 -> { or: [ { and: [ { field: tags, condition: EQUAL, values: + * ["urn:li:tag:tag"] } ] }, { and: [ { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] } ] } ] } filter2 -> { or: [ { and: [ { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] }, ] }, { and: [ { field: glossaryTerms, + * condition: EQUAL, values: ["urn:li:glossaryTerm:term2"] } ] } ] } Example Output: { or: [ { + * and: [ { field: tags, condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] } ] }, { and: [ { field: tags, + * condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: glossaryTerms, condition: EQUAL, + * values: ["urn:li:glosaryTerm:term2"] } ] }, { and: [ { field: glossaryTerm, condition: + * EQUAL, values: ["urn:li:glossaryTerm:term"] }, { field: domain, condition: EQUAL, values: + * ["urn:li:domain:domain"] } ] }, { and: [ { field: glossaryTerm, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] }, { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glosaryTerm:term2"] } ] }, ] } */ @Nonnull - private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, @Nonnull final Filter filter2) { + private static Filter combineFiltersInConjunction( + @Nonnull final Filter filter1, @Nonnull final Filter filter2) { final Filter finalFilter1 = convertToV2Filter(filter1); final Filter finalFilter2 = convertToV2Filter(filter2); @@ -310,7 +207,8 @@ private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, for (ConjunctiveCriterion conjunction2 : finalFilter2.getOr()) { final List<Criterion> joinedCriterion = new ArrayList<>(conjunction1.getAnd()); joinedCriterion.addAll(conjunction2.getAnd()); - ConjunctiveCriterion newConjunction = new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); + ConjunctiveCriterion newConjunction = + new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); newDisjunction.add(newConjunction); } } @@ -325,38 +223,45 @@ private static Filter convertToV2Filter(@Nonnull Filter filter) { } else if (filter.hasCriteria()) { // Convert criteria to an OR return new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(filter.getCriteria()) - ))); + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(filter.getCriteria())))); } throw new IllegalArgumentException( - String.format("Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", filter)); + String.format( + "Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", + filter)); } /** - * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the specified - * urn cannot be found. + * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the + * specified urn cannot be found. */ - public static DataHubViewInfo resolveView(@Nonnull ViewService viewService, @Nonnull final Urn viewUrn, + public static DataHubViewInfo resolveView( + @Nonnull ViewService viewService, + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { try { DataHubViewInfo maybeViewInfo = viewService.getViewInfo(viewUrn, authentication); if (maybeViewInfo == null) { - log.warn(String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); + log.warn( + String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); } return maybeViewInfo; } catch (Exception e) { - throw new RuntimeException(String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), e); + throw new RuntimeException( + String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), + e); } } // Assumption is that filter values for degree are either null, 3+, 2, or 1. public static Integer getMaxHops(List<FacetFilterInput> filters) { - Set<String> degreeFilterValues = filters.stream() - .filter(filter -> filter.getField().equals("degree")) - .flatMap(filter -> filter.getValues().stream()) - .collect(Collectors.toSet()); + Set<String> degreeFilterValues = + filters.stream() + .filter(filter -> filter.getField().equals("degree")) + .flatMap(filter -> filter.getValues().stream()) + .collect(Collectors.toSet()); Integer maxHops = null; if (!degreeFilterValues.contains("3+")) { if (degreeFilterValues.contains("2")) { @@ -368,7 +273,8 @@ public static Integer getMaxHops(List<FacetFilterInput> filters) { return maxHops; } - public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { + public static SearchFlags mapInputFlags( + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { SearchFlags searchFlags = null; if (inputFlags != null) { searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); @@ -376,7 +282,8 @@ public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.S return searchFlags; } - public static SortCriterion mapSortCriterion(com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { + public static SortCriterion mapSortCriterion( + com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { SortCriterion result = new SortCriterion(); result.setField(sortCriterion.getField()); result.setOrder(SortOrder.valueOf(sortCriterion.getSortOrder().name())); @@ -388,4 +295,4 @@ public static List<String> getEntityNames(List<EntityType> inputTypes) { (inputTypes == null || inputTypes.isEmpty()) ? SEARCHABLE_ENTITY_TYPES : inputTypes; return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java index 8c21277b66a69..a7e0d93c7bd1e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -16,58 +18,61 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating the authenticated user's View-specific settings. - */ +/** Resolver responsible for updating the authenticated user's View-specific settings. */ @Slf4j @RequiredArgsConstructor -public class UpdateCorpUserViewsSettingsResolver implements DataFetcher<CompletableFuture<Boolean>> { +public class UpdateCorpUserViewsSettingsResolver + implements DataFetcher<CompletableFuture<Boolean>> { private final SettingsService _settingsService; @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateCorpUserViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); + final UpdateCorpUserViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { + return CompletableFuture.supplyAsync( + () -> { + try { - final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); + final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); - final CorpUserSettings maybeSettings = _settingsService.getCorpUserSettings( - userUrn, - context.getAuthentication() - ); + final CorpUserSettings maybeSettings = + _settingsService.getCorpUserSettings(userUrn, context.getAuthentication()); - final CorpUserSettings newSettings = maybeSettings == null - ? new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) - : maybeSettings; + final CorpUserSettings newSettings = + maybeSettings == null + ? new CorpUserSettings() + .setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) + : maybeSettings; - // Patch the new corp user settings. This does a R-M-F. - updateCorpUserSettings(newSettings, input); + // Patch the new corp user settings. This does a R-M-F. + updateCorpUserSettings(newSettings, input); - _settingsService.updateCorpUserSettings( - userUrn, - newSettings, - context.getAuthentication() - ); - return true; - } catch (Exception e) { - log.error("Failed to perform user view settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update to user view settings against input %s", input.toString()), e); - } - }); + _settingsService.updateCorpUserSettings( + userUrn, newSettings, context.getAuthentication()); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user view settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform update to user view settings against input %s", + input.toString()), + e); + } + }); } private static void updateCorpUserSettings( @Nonnull final CorpUserSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - final CorpUserViewsSettings newViewSettings = settings.hasViews() - ? settings.getViews() - : new CorpUserViewsSettings(); + final CorpUserViewsSettings newViewSettings = + settings.hasViews() ? settings.getViews() : new CorpUserViewsSettings(); updateCorpUserViewsSettings(newViewSettings, input); settings.setViews(newViewSettings); } @@ -75,9 +80,8 @@ private static void updateCorpUserSettings( private static void updateCorpUserViewsSettings( @Nonnull final CorpUserViewsSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java index f1aba3d9247c5..208e871743269 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java @@ -14,11 +14,11 @@ /** * Retrieves the Global Settings related to the Views feature. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + * <p>This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ @Slf4j -public class GlobalViewsSettingsResolver implements - DataFetcher<CompletableFuture<GlobalViewsSettings>> { +public class GlobalViewsSettingsResolver + implements DataFetcher<CompletableFuture<GlobalViewsSettings>> { private final SettingsService _settingsService; @@ -27,25 +27,29 @@ public GlobalViewsSettingsResolver(final SettingsService settingsService) { } @Override - public CompletableFuture<GlobalViewsSettings> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<GlobalViewsSettings> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - final GlobalSettingsInfo globalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); - return globalSettings != null && globalSettings.hasViews() - ? mapGlobalViewsSettings(globalSettings.getViews()) - : new GlobalViewsSettings(); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve Global Views Settings", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final GlobalSettingsInfo globalSettings = + _settingsService.getGlobalSettings(context.getAuthentication()); + return globalSettings != null && globalSettings.hasViews() + ? mapGlobalViewsSettings(globalSettings.getViews()) + : new GlobalViewsSettings(); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve Global Views Settings", e); + } + }); } - private static GlobalViewsSettings mapGlobalViewsSettings(@Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { + private static GlobalViewsSettings mapGlobalViewsSettings( + @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { final GlobalViewsSettings result = new GlobalViewsSettings(); if (settings.hasDefaultView()) { result.setDefaultView(settings.getDefaultView().toString()); } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java index c90ec04b3a2df..7d37683785fc2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,13 +17,10 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Resolver responsible for updating the Global Views settings. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + * <p>This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ public class UpdateGlobalViewsSettingsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -32,45 +31,50 @@ public UpdateGlobalViewsSettingsResolver(@Nonnull final SettingsService settings } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateGlobalViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); + final UpdateGlobalViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (AuthorizationUtils.canManageGlobalViews(context)) { - try { - // First, fetch the existing global settings. This does a R-M-F. - final GlobalSettingsInfo maybeGlobalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageGlobalViews(context)) { + try { + // First, fetch the existing global settings. This does a R-M-F. + final GlobalSettingsInfo maybeGlobalSettings = + _settingsService.getGlobalSettings(context.getAuthentication()); - final GlobalSettingsInfo newGlobalSettings = maybeGlobalSettings != null - ? maybeGlobalSettings - : new GlobalSettingsInfo(); + final GlobalSettingsInfo newGlobalSettings = + maybeGlobalSettings != null ? maybeGlobalSettings : new GlobalSettingsInfo(); - final GlobalViewsSettings newGlobalViewsSettings = newGlobalSettings.hasViews() - ? newGlobalSettings.getViews() - : new GlobalViewsSettings(); + final GlobalViewsSettings newGlobalViewsSettings = + newGlobalSettings.hasViews() + ? newGlobalSettings.getViews() + : new GlobalViewsSettings(); - // Next, patch the global views settings. - updateViewsSettings(newGlobalViewsSettings, input); - newGlobalSettings.setViews(newGlobalViewsSettings); + // Next, patch the global views settings. + updateViewsSettings(newGlobalViewsSettings, input); + newGlobalSettings.setViews(newGlobalViewsSettings); - // Finally, write back to GMS. - _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update global view settings! %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Finally, write back to GMS. + _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update global view settings! %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static void updateViewsSettings( @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings, @Nonnull final UpdateGlobalViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java index 0e93cdfb231fa..9ea6cba0f211a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.EntityKeyUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -27,59 +31,64 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.utils.EntityKeyUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchGetStepStatesResolver implements DataFetcher<CompletableFuture<BatchGetStepStatesResult>> { +public class BatchGetStepStatesResolver + implements DataFetcher<CompletableFuture<BatchGetStepStatesResult>> { private final EntityClient _entityClient; @Override - public CompletableFuture<BatchGetStepStatesResult> get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture<BatchGetStepStatesResult> get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); final BatchGetStepStatesInput input = bindArgument(environment.getArgument("input"), BatchGetStepStatesInput.class); - return CompletableFuture.supplyAsync(() -> { - Map<Urn, String> urnsToIdsMap; - Set<Urn> urns; - Map<Urn, EntityResponse> entityResponseMap; + return CompletableFuture.supplyAsync( + () -> { + Map<Urn, String> urnsToIdsMap; + Set<Urn> urns; + Map<Urn, EntityResponse> entityResponseMap; - try { - urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); - urns = urnsToIdsMap.keySet(); - entityResponseMap = _entityClient.batchGetV2(DATAHUB_STEP_STATE_ENTITY_NAME, urns, - ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), authentication); - } catch (Exception e) { - throw new RuntimeException(e); - } + try { + urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); + urns = urnsToIdsMap.keySet(); + entityResponseMap = + _entityClient.batchGetV2( + DATAHUB_STEP_STATE_ENTITY_NAME, + urns, + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), + authentication); + } catch (Exception e) { + throw new RuntimeException(e); + } - final Map<Urn, DataHubStepStateProperties> stepStatePropertiesMap = new HashMap<>(); - for (Map.Entry<Urn, EntityResponse> entry : entityResponseMap.entrySet()) { - final Urn urn = entry.getKey(); - final DataHubStepStateProperties stepStateProperties = getStepStateProperties(urn, entry.getValue()); - if (stepStateProperties != null) { - stepStatePropertiesMap.put(urn, stepStateProperties); - } - } + final Map<Urn, DataHubStepStateProperties> stepStatePropertiesMap = new HashMap<>(); + for (Map.Entry<Urn, EntityResponse> entry : entityResponseMap.entrySet()) { + final Urn urn = entry.getKey(); + final DataHubStepStateProperties stepStateProperties = + getStepStateProperties(urn, entry.getValue()); + if (stepStateProperties != null) { + stepStatePropertiesMap.put(urn, stepStateProperties); + } + } - final List<StepStateResult> results = stepStatePropertiesMap.entrySet() - .stream() - .map(entry -> buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) - .collect(Collectors.toList()); - final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); - result.setResults(results); - return result; - }); + final List<StepStateResult> results = + stepStatePropertiesMap.entrySet().stream() + .map( + entry -> + buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); + result.setResults(results); + return result; + }); } @Nonnull - private Map<Urn, String> buildUrnToIdMap(@Nonnull final List<String> ids, @Nonnull final Authentication authentication) + private Map<Urn, String> buildUrnToIdMap( + @Nonnull final List<String> ids, @Nonnull final Authentication authentication) throws RemoteInvocationException { final Map<Urn, String> urnToIdMap = new HashMap<>(); for (final String id : ids) { @@ -99,37 +108,37 @@ private Urn getStepStateUrn(@Nonnull final String id) { } @Nullable - private DataHubStepStateProperties getStepStateProperties(@Nonnull final Urn urn, - @Nonnull final EntityResponse entityResponse) { + private DataHubStepStateProperties getStepStateProperties( + @Nonnull final Urn urn, @Nonnull final EntityResponse entityResponse) { final EnvelopedAspectMap aspectMap = entityResponse.getAspects(); // If aspect is not present, log the error and return null. if (!aspectMap.containsKey(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME)) { log.error("Failed to find step state properties for urn: " + urn); return null; } - return new DataHubStepStateProperties(aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); + return new DataHubStepStateProperties( + aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); } @Nonnull - private StepStateResult buildStepStateResult(@Nonnull final String id, - @Nonnull final DataHubStepStateProperties stepStateProperties) { + private StepStateResult buildStepStateResult( + @Nonnull final String id, @Nonnull final DataHubStepStateProperties stepStateProperties) { final StepStateResult result = new StepStateResult(); result.setId(id); - final List<StringMapEntry> mappedProperties = stepStateProperties - .getProperties() - .entrySet() - .stream() - .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) - .collect(Collectors.toList()); + final List<StringMapEntry> mappedProperties = + stepStateProperties.getProperties().entrySet().stream() + .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()); result.setProperties(mappedProperties); return result; } @Nonnull - private StringMapEntry buildStringMapEntry(@Nonnull final String key, @Nonnull final String value) { + private StringMapEntry buildStringMapEntry( + @Nonnull final String key, @Nonnull final String value) { final StringMapEntry entry = new StringMapEntry(); entry.setKey(key); entry.setValue(value); return entry; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java index e4c21207ddd34..23d77ebba7457 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -25,19 +29,15 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchUpdateStepStatesResolver implements DataFetcher<CompletableFuture<BatchUpdateStepStatesResult>> { +public class BatchUpdateStepStatesResolver + implements DataFetcher<CompletableFuture<BatchUpdateStepStatesResult>> { private final EntityClient _entityClient; @Override - public CompletableFuture<BatchUpdateStepStatesResult> get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture<BatchUpdateStepStatesResult> get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -46,20 +46,23 @@ public CompletableFuture<BatchUpdateStepStatesResult> get(@Nonnull final DataFet final List<StepStateInput> states = input.getStates(); final String actorUrnStr = authentication.getActor().toUrnStr(); - return CompletableFuture.supplyAsync(() -> { - final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); - final AuditStamp auditStamp = new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); - final List<UpdateStepStateResult> results = states - .stream() - .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) - .collect(Collectors.toList()); - final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); - result.setResults(results); - return result; - }); + return CompletableFuture.supplyAsync( + () -> { + final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); + final AuditStamp auditStamp = + new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); + final List<UpdateStepStateResult> results = + states.stream() + .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) + .collect(Collectors.toList()); + final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); + result.setResults(results); + return result; + }); } - private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStateInput state, + private UpdateStepStateResult buildUpdateStepStateResult( + @Nonnull final StepStateInput state, @Nonnull final AuditStamp auditStamp, @Nonnull final Authentication authentication) { final String id = state.getId(); @@ -70,19 +73,27 @@ private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStat return updateStepStateResult; } - private boolean updateStepState(@Nonnull final String id, - @Nonnull final List<StringMapEntryInput> inputProperties, @Nonnull final AuditStamp auditStamp, + private boolean updateStepState( + @Nonnull final String id, + @Nonnull final List<StringMapEntryInput> inputProperties, + @Nonnull final AuditStamp auditStamp, @Nonnull final Authentication authentication) { final Map<String, String> properties = - inputProperties.stream().collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + inputProperties.stream() + .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); try { final DataHubStepStateKey stepStateKey = new DataHubStepStateKey().setId(id); final DataHubStepStateProperties stepStateProperties = - new DataHubStepStateProperties().setProperties(new StringMap(properties)).setLastModified(auditStamp); + new DataHubStepStateProperties() + .setProperties(new StringMap(properties)) + .setLastModified(auditStamp); final MetadataChangeProposal proposal = - buildMetadataChangeProposal(DATAHUB_STEP_STATE_ENTITY_NAME, stepStateKey, - DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, stepStateProperties); + buildMetadataChangeProposal( + DATAHUB_STEP_STATE_ENTITY_NAME, + stepStateKey, + DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, + stepStateProperties); _entityClient.ingestProposal(proposal, authentication, false); return true; } catch (Exception e) { @@ -90,4 +101,4 @@ private boolean updateStepState(@Nonnull final String id, return false; } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java index 9ee24e6941017..153c95c697a77 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -22,14 +27,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS privilege. + * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -42,43 +42,58 @@ public class CreateTagResolver implements DataFetcher<CompletableFuture<String>> public CompletableFuture<String> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateTagInput input = bindArgument(environment.getArgument("input"), CreateTagInput.class); - - return CompletableFuture.supplyAsync(() -> { + final CreateTagInput input = + bindArgument(environment.getArgument("input"), CreateTagInput.class); - if (!AuthorizationUtils.canCreateTags(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateTags(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - // Create the Tag Key - final TagKey key = new TagKey(); + try { + // Create the Tag Key + final TagKey key = new TagKey(); - // Take user provided id OR generate a random UUID for the Tag. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); + // Take user provided id OR generate a random UUID for the Tag. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Tag already exists!"); - } + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Tag already exists!"); + } - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); - String tagUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); + String tagUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } - OwnerUtils.addCreatorAsOwner(context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return tagUrn; - } catch (Exception e) { - log.error("Failed to create Tag with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + OwnerUtils.addCreatorAsOwner( + context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + return tagUrn; + } catch (Exception e) { + log.error( + "Failed to create Tag with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), + e); + } + }); } private TagProperties mapTagProperties(final CreateTagInput input) { @@ -87,4 +102,4 @@ private TagProperties mapTagProperties(final CreateTagInput input) { result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java index e6c3cf49df8db..c5b86b013103c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java @@ -11,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteTagResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -25,33 +22,41 @@ public DeleteTagResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String tagUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(tagUrn); - return CompletableFuture.supplyAsync(() -> { - - if (AuthorizationUtils.canManageTags(context) || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageTags(context) + || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Tag with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format( - "Caught exception while attempting to clear all entity references for Tag with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", tagUrn), e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", tagUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java index e2aa5905be8bd..7b9290b4532b5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; @@ -21,19 +24,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetTagColorResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { @@ -42,48 +43,55 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw final Urn tagUrn = Urn.createFromString(environment.getArgument("urn")); final String colorHex = environment.getArgument("colorHex"); - return CompletableFuture.supplyAsync(() -> { - - // If user is not authorized, then throw exception. - if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - // If tag does not exist, then throw exception. - if (!_entityService.exists(tagUrn)) { - throw new IllegalArgumentException( - String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); - } - - try { - TagProperties tagProperties = (TagProperties) EntityUtils.getAspectFromEntity( - tagUrn.toString(), - TAG_PROPERTIES_ASPECT_NAME, - _entityService, - null); - - if (tagProperties == null) { - throw new IllegalArgumentException("Failed to set tag color. Tag properties does not yet exist!"); - } - - tagProperties.setColorHex(colorHex); - - // Update the TagProperties aspect. - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set color for Tag with urn %s", tagUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // If user is not authorized, then throw exception. + if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + // If tag does not exist, then throw exception. + if (!_entityService.exists(tagUrn)) { + throw new IllegalArgumentException( + String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); + } + + try { + TagProperties tagProperties = + (TagProperties) + EntityUtils.getAspectFromEntity( + tagUrn.toString(), TAG_PROPERTIES_ASPECT_NAME, _entityService, null); + + if (tagProperties == null) { + throw new IllegalArgumentException( + "Failed to set tag color. Tag properties does not yet exist!"); + } + + tagProperties.setColorHex(colorHex); + + // Update the TagProperties aspect. + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); + throw new RuntimeException( + String.format("Failed to set color for Tag with urn %s", tagUrn), e); + } + }); } public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -92,4 +100,4 @@ public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, U entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java index 14ae9f96eb683..b12b345a7b211 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,15 +20,7 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Creates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class CreateTestResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; @@ -36,39 +33,44 @@ public CreateTestResolver(final EntityClient entityClient) { public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final CreateTestInput input = bindArgument(environment.getArgument("input"), CreateTestInput.class); - - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - - try { - - // Create new test - // Since we are creating a new Test, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); - - // Create the Ingestion source key - final TestKey key = new TestKey(); - key.setId(uuidStr); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), - authentication)) { - throw new IllegalArgumentException("This Test already exists!"); + final CreateTestInput input = + bindArgument(environment.getArgument("input"), CreateTestInput.class); + + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + + try { + + // Create new test + // Since we are creating a new Test, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); + + // Create the Ingestion source key + final TestKey key = new TestKey(); + key.setId(uuidStr); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), authentication)) { + throw new IllegalArgumentException("This Test already exists!"); + } + + // Create the Test info. + final TestInfo info = mapCreateTestInput(input); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } } - - // Create the Test info. - final TestInfo info = mapCreateTestInput(input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static TestInfo mapCreateTestInput(final CreateTestInput input) { @@ -79,5 +81,4 @@ private static TestInfo mapCreateTestInput(final CreateTestInput input) { result.setDefinition(mapDefinition(input.getDefinition())); return result; } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java index e0c878dc652bd..6bc7e479b305c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -7,8 +9,6 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - /** * Resolver responsible for hard deleting a particular DataHub Test. Requires MANAGE_TESTS @@ -23,20 +23,24 @@ public DeleteTestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String testUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(testUrn); - return CompletableFuture.supplyAsync(() -> { - if (canManageTests(context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against Test with urn %s", testUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against Test with urn %s", testUrn), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java index a1e1e48aae847..f345d9ceb21e5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java @@ -1,12 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ListTestsInput; import com.linkedin.datahub.graphql.generated.ListTestsResult; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.SearchFlags; @@ -20,12 +23,9 @@ import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - - /** - * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform + * privilege. */ public class ListTestsResolver implements DataFetcher<CompletableFuture<ListTestsResult>> { @@ -39,45 +39,50 @@ public ListTestsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListTestsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListTestsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - final ListTestsInput input = bindArgument(environment.getArgument("input"), ListTestsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? "" : input.getQuery(); + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + final ListTestsInput input = + bindArgument(environment.getArgument("input"), ListTestsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? "" : input.getQuery(); - try { - // First, get all group Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.TEST_ENTITY_NAME, - query, - Collections.emptyMap(), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.TEST_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Now that we have entities we can bind this to a result. - final ListTestsResult result = new ListTestsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setTests(mapUnresolvedTests(gmsResult.getEntities())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list tests", e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Now that we have entities we can bind this to a result. + final ListTestsResult result = new ListTestsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setTests(mapUnresolvedTests(gmsResult.getEntities())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list tests", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - // This method maps urns returned from the list endpoint into Partial Test objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Test objects which will be + // resolved be a separate Batch resolver. private List<Test> mapUnresolvedTests(final SearchEntityArray entityArray) { final List<Test> results = new ArrayList<>(); for (final SearchEntity entity : entityArray) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java index 9c4b5a4d4e0fa..6cb55100ec08e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java @@ -20,10 +20,7 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver used for fetching the list of tests for an entity - */ +/** GraphQL Resolver used for fetching the list of tests for an entity */ @Slf4j public class TestResultsResolver implements DataFetcher<CompletableFuture<TestResults>> { @@ -38,42 +35,44 @@ public CompletableFuture<TestResults> get(DataFetchingEnvironment environment) t final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); + return CompletableFuture.supplyAsync( + () -> { + final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); - if (gmsTestResults == null) { - return null; - } + if (gmsTestResults == null) { + return null; + } - TestResults testResults = new TestResults(); - testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); - testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); - return testResults; - }); + TestResults testResults = new TestResults(); + testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); + testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); + return testResults; + }); } @Nullable - private com.linkedin.test.TestResults getTestResults(final Urn entityUrn, final QueryContext context) { + private com.linkedin.test.TestResults getTestResults( + final Urn entityUrn, final QueryContext context) { try { - final EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), - context.getAuthentication()); - if (entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { + final EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), + context.getAuthentication()); + if (entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { return new com.linkedin.test.TestResults( - entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME) - .getValue() - .data()); + entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME).getValue().data()); } return null; } catch (Exception e) { - throw new RuntimeException("Failed to get test results", e); + throw new RuntimeException("Failed to get test results", e); } } - private List<TestResult> mapTestResults(final @Nonnull List<com.linkedin.test.TestResult> gmsResults) { + private List<TestResult> mapTestResults( + final @Nonnull List<com.linkedin.test.TestResult> gmsResults) { final List<TestResult> results = new ArrayList<>(); for (com.linkedin.test.TestResult gmsResult : gmsResults) { results.add(mapTestResult(gmsResult)); @@ -89,4 +88,4 @@ private TestResult mapTestResult(final @Nonnull com.linkedin.test.TestResult gms testResult.setType(TestResultType.valueOf(gmsResult.getType().toString())); return testResult; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java index 248da3e58d8ae..922c28097f83c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; + import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,14 +17,9 @@ import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; - - public class TestUtils { - /** - * Returns true if the authenticated user is able to manage tests. - */ + /** Returns true if the authenticated user is able to manage tests. */ public static boolean canManageTests(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } @@ -38,11 +35,12 @@ public static EntityResponse buildEntityResponse(Map<String, RecordTemplate> asp final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); for (Map.Entry<String, RecordTemplate> entry : aspects.entrySet()) { - aspectMap.put(entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); + aspectMap.put( + entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); } entityResponse.setAspects(aspectMap); return entityResponse; } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java index 1dd8518076796..b5d6e50fe0774 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -13,15 +18,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Updates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Updates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class UpdateTestResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; @@ -35,26 +32,30 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { - final String urn = environment.getArgument("urn"); - final UpdateTestInput input = bindArgument(environment.getArgument("input"), UpdateTestInput.class); + final String urn = environment.getArgument("urn"); + final UpdateTestInput input = + bindArgument(environment.getArgument("input"), UpdateTestInput.class); - // Update the Test info - currently this simply creates a new test with same urn. - final TestInfo info = mapUpdateTestInput(input); + // Update the Test info - currently this simply creates a new test with same urn. + final TestInfo info = mapUpdateTestInput(input); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); - try { - return _entityClient.ingestProposal(proposal, authentication, false); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); + try { + return _entityClient.ingestProposal(proposal, authentication, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static TestInfo mapUpdateTestInput(final UpdateTestInput input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java index 499e7c9ac177d..ea234280ed6c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GetSchemaBlameInput; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -16,15 +18,13 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. TODO: Add tests for this resolver. */ @Slf4j -public class GetSchemaBlameResolver implements DataFetcher<CompletableFuture<GetSchemaBlameResult>> { +public class GetSchemaBlameResolver + implements DataFetcher<CompletableFuture<GetSchemaBlameResult>> { private final TimelineService _timelineService; public GetSchemaBlameResolver(TimelineService timelineService) { @@ -32,37 +32,37 @@ public GetSchemaBlameResolver(TimelineService timelineService) { } @Override - public CompletableFuture<GetSchemaBlameResult> get(final DataFetchingEnvironment environment) throws Exception { - final GetSchemaBlameInput input = bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); + public CompletableFuture<GetSchemaBlameResult> get(final DataFetchingEnvironment environment) + throws Exception { + final GetSchemaBlameInput input = + bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); final String datasetUrnString = input.getDatasetUrn(); final long startTime = 0; final long endTime = 0; final String version = input.getVersion() == null ? null : input.getVersion(); - return CompletableFuture.supplyAsync(() -> { - try { - final Set<ChangeCategory> changeCategorySet = Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); - final Urn datasetUrn = Urn.createFromString(datasetUrnString); - final List<ChangeTransaction> changeTransactionList = - _timelineService.getTimeline( - datasetUrn, - changeCategorySet, - startTime, - endTime, - null, - null, - false); - return SchemaBlameMapper.map(changeTransactionList, version); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Set<ChangeCategory> changeCategorySet = + Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); + final Urn datasetUrn = Urn.createFromString(datasetUrnString); + final List<ChangeTransaction> changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaBlameMapper.map(changeTransactionList, version); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java index cfad1395a61a8..5063dbbf7ccf3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListInput; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; @@ -16,14 +18,12 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. */ @Slf4j -public class GetSchemaVersionListResolver implements DataFetcher<CompletableFuture<GetSchemaVersionListResult>> { +public class GetSchemaVersionListResolver + implements DataFetcher<CompletableFuture<GetSchemaVersionListResult>> { private final TimelineService _timelineService; public GetSchemaVersionListResolver(TimelineService timelineService) { @@ -31,7 +31,8 @@ public GetSchemaVersionListResolver(TimelineService timelineService) { } @Override - public CompletableFuture<GetSchemaVersionListResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<GetSchemaVersionListResult> get( + final DataFetchingEnvironment environment) throws Exception { final GetSchemaVersionListInput input = bindArgument(environment.getArgument("input"), GetSchemaVersionListInput.class); @@ -39,23 +40,27 @@ public CompletableFuture<GetSchemaVersionListResult> get(final DataFetchingEnvir final long startTime = 0; final long endTime = 0; - return CompletableFuture.supplyAsync(() -> { - try { - final Set<ChangeCategory> changeCategorySet = new HashSet<>(); - changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); - Urn datasetUrn = Urn.createFromString(datasetUrnString); - List<ChangeTransaction> changeTransactionList = - _timelineService.getTimeline(datasetUrn, changeCategorySet, startTime, endTime, null, null, false); - return SchemaVersionListMapper.map(changeTransactionList); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Set<ChangeCategory> changeCategorySet = new HashSet<>(); + changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); + Urn datasetUrn = Urn.createFromString(datasetUrnString); + List<ChangeTransaction> changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaVersionListMapper.map(changeTransactionList); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java index 45998bdae45b0..14429696fefd4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java @@ -5,16 +5,18 @@ import graphql.schema.TypeResolver; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface + * type. */ public class AspectInterfaceTypeResolver implements TypeResolver { - public AspectInterfaceTypeResolver() { } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this - // out in the case we ever want to return fields of type Aspect in graphql. Right now - // we just use Aspect to define the shared `version` field. - return null; - } + public AspectInterfaceTypeResolver() {} + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this + // out in the case we ever want to return fields of type Aspect in graphql. Right now + // we just use Aspect to define the shared `version` field. + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java index 1a5f06da04014..52c20254332b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java @@ -6,27 +6,29 @@ import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - import java.util.List; import java.util.stream.Collectors; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface + * type. */ public class EntityInterfaceTypeResolver implements TypeResolver { - private final List<EntityType<?, ?>> _entities; + private final List<EntityType<?, ?>> _entities; - public EntityInterfaceTypeResolver(final List<EntityType<?, ?>> entities) { - _entities = entities; - } + public EntityInterfaceTypeResolver(final List<EntityType<?, ?>> entities) { + _entities = entities; + } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - Object javaObject = env.getObject(); - final LoadableType<?, ?> filteredEntity = Iterables.getOnlyElement(_entities.stream() + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + Object javaObject = env.getObject(); + final LoadableType<?, ?> filteredEntity = + Iterables.getOnlyElement( + _entities.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); - } + return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java index a69500f24ee24..aeeb9bafa1f4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java @@ -4,30 +4,30 @@ import com.linkedin.datahub.graphql.generated.FloatBox; import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class HyperParameterValueTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; - public static final String INT_BOX = "IntBox"; - public static final String FLOAT_BOX = "FloatBox"; - public static final String BOOLEAN_BOX = "BooleanBox"; + public static final String STRING_BOX = "StringBox"; + public static final String INT_BOX = "IntBox"; + public static final String FLOAT_BOX = "FloatBox"; + public static final String BOOLEAN_BOX = "BooleanBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else if (env.getObject() instanceof IntBox) { - return env.getSchema().getObjectType(INT_BOX); - } else if (env.getObject() instanceof BooleanBox) { - return env.getSchema().getObjectType(BOOLEAN_BOX); - } else if (env.getObject() instanceof FloatBox) { - return env.getSchema().getObjectType(FLOAT_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else if (env.getObject() instanceof IntBox) { + return env.getSchema().getObjectType(INT_BOX); + } else if (env.getObject() instanceof BooleanBox) { + return env.getSchema().getObjectType(BOOLEAN_BOX); + } else if (env.getObject() instanceof FloatBox) { + return env.getSchema().getObjectType(FLOAT_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java index 25a9a540f51b1..ff190cff1339e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java @@ -8,17 +8,17 @@ public class PlatformSchemaUnionTypeResolver implements TypeResolver { - private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; - private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; + private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; + private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TableSchema) { - return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); - } else if (env.getObject() instanceof KeyValueSchema) { - return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TableSchema) { + return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); + } else if (env.getObject() instanceof KeyValueSchema) { + return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); + } else { + throw new RuntimeException("Unrecognized object type provided to type resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java index 0dc7b0485c51c..c5be5725f1d45 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.type; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class ResultsTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; + public static final String STRING_BOX = "StringBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java index 5263e6b9b7df6..c66588008b103 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java @@ -6,7 +6,8 @@ public class TimeSeriesAspectInterfaceTypeResolver implements TypeResolver { - public TimeSeriesAspectInterfaceTypeResolver() { } + public TimeSeriesAspectInterfaceTypeResolver() {} + @Override public GraphQLObjectType getType(TypeResolutionEnvironment env) { // TODO(John): Fill this out. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java index d02f1a5f786a7..db26da05a2ba4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -10,14 +13,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Resolver responsible for creating a password reset token that Admins can share with native users to reset their - * credentials. + * Resolver responsible for creating a password reset token that Admins can share with native users + * to reset their credentials. */ -public class CreateNativeUserResetTokenResolver implements DataFetcher<CompletableFuture<ResetToken>> { +public class CreateNativeUserResetTokenResolver + implements DataFetcher<CompletableFuture<ResetToken>> { private final NativeUserService _nativeUserService; public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserService) { @@ -25,7 +26,8 @@ public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserServ } @Override - public CompletableFuture<ResetToken> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ResetToken> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateNativeUserResetTokenInput input = bindArgument(environment.getArgument("input"), CreateNativeUserResetTokenInput.class); @@ -38,15 +40,18 @@ public CompletableFuture<ResetToken> get(final DataFetchingEnvironment environme "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - String resetToken = - _nativeUserService.generateNativeUserPasswordResetToken(userUrnString, context.getAuthentication()); - return new ResetToken(resetToken); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to generate password reset token for user: %s", userUrnString)); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + String resetToken = + _nativeUserService.generateNativeUserPasswordResetToken( + userUrnString, context.getAuthentication()); + return new ResetToken(resetToken); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to generate password reset token for user: %s", userUrnString)); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java index 69da642ad6bb1..215d53299c8ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -23,10 +26,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListUsersResolver implements DataFetcher<CompletableFuture<ListUsersResult>> { private static final Integer DEFAULT_START = 0; @@ -40,48 +39,60 @@ public ListUsersResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListUsersResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListUsersResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListUsersInput input = bindArgument(environment.getArgument("input"), ListUsersInput.class); + final ListUsersInput input = + bindArgument(environment.getArgument("input"), ListUsersInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all policy Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_USER_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all policy Urns. + final SearchResult gmsResult = + _entityClient.search( + CORP_USER_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get hydrate all users. - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()) - ), null, context.getAuthentication()); + // Then, get hydrate all users. + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - // Now that we have entities we can bind this to a result. - final ListUsersResult result = new ListUsersResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setUsers(mapEntities(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list users", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListUsersResult result = new ListUsersResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setUsers(mapEntities(entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list users", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List<CorpUser> mapEntities(final Collection<EntityResponse> entities) { - return entities.stream() - .map(CorpUserMapper::map) - .collect(Collectors.toList()); + return entities.stream().map(CorpUserMapper::map).collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java index 718810e4710e7..7131a9d2a9a26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java @@ -10,10 +10,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp User - */ +/** Resolver responsible for hard deleting a particular DataHub Corp User */ @Slf4j public class RemoveUserResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -24,30 +21,39 @@ public RemoveUserResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String userUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(userUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for user with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against user with urn %s", userUrn), e); } }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against user with urn %s", userUrn), e); - } - }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java index ab04d26fb5801..6a0e81a10f40b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,12 +16,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform privilege. + * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform + * privilege. */ public class UpdateUserStatusResolver implements DataFetcher<CompletableFuture<String>> { @@ -37,20 +37,28 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) final CorpUserStatus newStatus = CorpUserStatus.valueOf(environment.getArgument("status")); // Create ths status aspect - final com.linkedin.identity.CorpUserStatus statusAspect = new com.linkedin.identity.CorpUserStatus(); + final com.linkedin.identity.CorpUserStatus statusAspect = + new com.linkedin.identity.CorpUserStatus(); statusAspect.setStatus(newStatus.toString()); - statusAspect.setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(Urn.createFromString(context.getActorUrn()))); - - return CompletableFuture.supplyAsync(() -> { - try { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(userUrn), - CORP_USER_STATUS_ASPECT_NAME, statusAspect); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update user status for urn", userUrn), e); - } - }); + statusAspect.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + + return CompletableFuture.supplyAsync( + () -> { + try { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(userUrn), CORP_USER_STATUS_ASPECT_NAME, statusAspect); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update user status for urn", userUrn), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java index 6e39879dd56bc..830c9013835d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -18,12 +20,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class CreateViewResolver implements DataFetcher<CompletableFuture<DataHubView>> { @@ -34,29 +31,34 @@ public CreateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture<DataHubView> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DataHubView> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateViewInput input = bindArgument(environment.getArgument("input"), CreateViewInput.class); + final CreateViewInput input = + bindArgument(environment.getArgument("input"), CreateViewInput.class); - return CompletableFuture.supplyAsync(() -> { - if (ViewUtils.canCreateView( - DataHubViewType.valueOf(input.getViewType().toString()), - context)) { - try { - final Urn urn = _viewService.createView( - DataHubViewType.valueOf(input.getViewType().toString()), - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - return createView(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create View with input: %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + if (ViewUtils.canCreateView( + DataHubViewType.valueOf(input.getViewType().toString()), context)) { + try { + final Urn urn = + _viewService.createView( + DataHubViewType.valueOf(input.getViewType().toString()), + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition(input.getDefinition()), + context.getAuthentication(), + System.currentTimeMillis()); + return createView(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create View with input: %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateViewInput input) { @@ -66,15 +68,20 @@ private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateView .setViewType(input.getViewType()) .setName(input.getName()) .setDescription(input.getDescription()) - .setDefinition(new DataHubViewDefinition( - input.getDefinition().getEntityTypes(), - new DataHubViewFilter( - input.getDefinition().getFilter().getOperator(), - input.getDefinition().getFilter().getFilters().stream().map(filterInput -> - new FacetFilter(filterInput.getField(), filterInput.getCondition(), - filterInput.getValues(), - filterInput.getNegated())) - .collect(Collectors.toList())))) + .setDefinition( + new DataHubViewDefinition( + input.getDefinition().getEntityTypes(), + new DataHubViewFilter( + input.getDefinition().getFilter().getOperator(), + input.getDefinition().getFilter().getFilters().stream() + .map( + filterInput -> + new FacetFilter( + filterInput.getField(), + filterInput.getCondition(), + filterInput.getValues(), + filterInput.getNegated())) + .collect(Collectors.toList())))) .build(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java index 2b8c3b8640aa8..a3b21ad0c9681 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java @@ -11,10 +11,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub View - */ +/** Resolver responsible for hard deleting a particular DataHub View */ @Slf4j public class DeleteViewResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -25,24 +22,27 @@ public DeleteViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.deleteView(urn, context.getAuthentication()); - log.info(String.format("Successfully deleted View %s with urn", urn)); - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.deleteView(urn, context.getAuthentication()); + log.info(String.format("Successfully deleted View %s with urn", urn)); + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against View with urn %s", urn), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java index 51bbcfcfa25ae..caa37f8264854 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -30,20 +32,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing global DataHub Views. - */ +/** Resolver used for listing global DataHub Views. */ @Slf4j public class ListGlobalViewsResolver implements DataFetcher<CompletableFuture<ListViewsResult>> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -55,43 +51,50 @@ public ListGlobalViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture<ListViewsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListViewsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListGlobalViewsInput input = bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); + final ListGlobalViewsInput input = + bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - try { + try { - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + final SearchResult gmsResult = + _entityClient.search( + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list global Views", e); - } - }); + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list global Views", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List<DataHubView> mapUnresolvedViews(final List<Urn> entityUrns) { final List<DataHubView> results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -107,7 +110,12 @@ private Filter buildFilters() { final AndFilterInput globalCriteria = new AndFilterInput(); List<FacetFilterInput> andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(DataHubViewType.GLOBAL.name()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, + null, + ImmutableList.of(DataHubViewType.GLOBAL.name()), + false, + FilterOperator.EQUAL)); globalCriteria.setAnd(andConditions); return buildFilter(Collections.emptyList(), ImmutableList.of(globalCriteria)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java index d8705e216503c..945d2d50bcc3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -30,21 +32,15 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing the current user's DataHub Views. - */ +/** Resolver used for listing the current user's DataHub Views. */ @Slf4j public class ListMyViewsResolver implements DataFetcher<CompletableFuture<ListViewsResult>> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; private static final String CREATOR_URN_FIELD = "createdBy"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -56,44 +52,52 @@ public ListMyViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture<ListViewsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListViewsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListMyViewsInput input = bindArgument(environment.getArgument("input"), ListMyViewsInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final String viewType = input.getViewType() == null ? null : input.getViewType().toString(); - - try { - - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(viewType, context.getActorUrn()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Views", e); - } - }); + final ListMyViewsInput input = + bindArgument(environment.getArgument("input"), ListMyViewsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final String viewType = + input.getViewType() == null ? null : input.getViewType().toString(); + + try { + + final SearchResult gmsResult = + _entityClient.search( + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(viewType, context.getActorUrn()), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Views", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List<DataHubView> mapUnresolvedViews(final List<Urn> entityUrns) { final List<DataHubView> results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -110,14 +114,12 @@ private Filter buildFilters(@Nullable final String viewType, final String creato final AndFilterInput filterCriteria = new AndFilterInput(); final List<FacetFilterInput> andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(CREATOR_URN_FIELD, - null, - ImmutableList.of(creatorUrn), - false, - FilterOperator.EQUAL)); + new FacetFilterInput( + CREATOR_URN_FIELD, null, ImmutableList.of(creatorUrn), false, FilterOperator.EQUAL)); if (viewType != null) { andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); } filterCriteria.setAnd(andConditions); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java index 61e22da3c9444..5a52a57d9c374 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,11 +18,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class UpdateViewResolver implements DataFetcher<CompletableFuture<DataHubView>> { @@ -31,40 +29,47 @@ public UpdateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture<DataHubView> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DataHubView> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); - final UpdateViewInput input = bindArgument(environment.getArgument("input"), UpdateViewInput.class); + final UpdateViewInput input = + bindArgument(environment.getArgument("input"), UpdateViewInput.class); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.updateView( - urn, - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - log.info(String.format("Successfully updated View %s with urn", urn)); - return getView(urn, context.getAuthentication()); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.updateView( + urn, + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition(input.getDefinition()), + context.getAuthentication(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated View %s with urn", urn)); + return getView(urn, context.getAuthentication()); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }); } - private DataHubView getView(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { + private DataHubView getView( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) { final EntityResponse maybeResponse = _viewService.getViewEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); + String.format( + "Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); } return DataHubViewMapper.map(maybeResponse); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index dda0c3bebc2eb..9da5f915ff31d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -26,39 +26,40 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class ViewUtils { /** * Returns true if the authenticated actor is allowed to create a view with the given parameters. * - * The user can create a View if it's a personal View specific to them, or - * if it's a Global view and they have the correct Platform privileges. + * <p>The user can create a View if it's a personal View specific to them, or if it's a Global + * view and they have the correct Platform privileges. * * @param type the type of the new View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ public static boolean canCreateView( - @Nonnull DataHubViewType type, - @Nonnull QueryContext context) { + @Nonnull DataHubViewType type, @Nonnull QueryContext context) { Objects.requireNonNull(type, "type must not be null"); Objects.requireNonNull(context, "context must not be null"); return DataHubViewType.PERSONAL.equals(type) - || (DataHubViewType.GLOBAL.equals(type) && AuthorizationUtils.canManageGlobalViews(context)); + || (DataHubViewType.GLOBAL.equals(type) + && AuthorizationUtils.canManageGlobalViews(context)); } - /** - * Returns true if the authenticated actor is allowed to update or delete - * the View with the specified urn. + * Returns true if the authenticated actor is allowed to update or delete the View with the + * specified urn. * * @param viewService an instance of {@link ViewService} * @param viewUrn the urn of the View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ - public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { + public static boolean canUpdateView( + @Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { Objects.requireNonNull(viewService, "viewService must not be null"); Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(context, "context must not be null"); @@ -67,16 +68,21 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U final DataHubViewInfo viewInfo = viewService.getViewInfo(viewUrn, context.getAuthentication()); if (viewInfo == null) { - throw new IllegalArgumentException(String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); + throw new IllegalArgumentException( + String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); } - // If the View is Global, then the user must have ability to manage global views OR must be its owner - if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) && AuthorizationUtils.canManageGlobalViews(context)) { + // If the View is Global, then the user must have ability to manage global views OR must be its + // owner + if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) + && AuthorizationUtils.canManageGlobalViews(context)) { return true; } // If the View is Personal, then the current actor must be the owner. - return isViewOwner(viewInfo.getCreated().getActor(), UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); + return isViewOwner( + viewInfo.getCreated().getActor(), + UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); } /** @@ -86,28 +92,32 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U * @return the GMS model */ @Nonnull - public static DataHubViewDefinition mapDefinition(@Nonnull final DataHubViewDefinitionInput input) { + public static DataHubViewDefinition mapDefinition( + @Nonnull final DataHubViewDefinitionInput input) { Objects.requireNonNull(input, "input must not be null"); final DataHubViewDefinition result = new DataHubViewDefinition(); if (input.getFilter() != null) { result.setFilter(mapFilter(input.getFilter()), SetMode.IGNORE_NULL); } - result.setEntityTypes(new StringArray(input.getEntityTypes().stream().map(EntityTypeMapper::getName).collect( - Collectors.toList()))); + result.setEntityTypes( + new StringArray( + input.getEntityTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()))); return result; } /** - * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} object, - * which is then persisted to the backend in an aspect. + * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} + * object, which is then persisted to the backend in an aspect. * - * We intentionally convert from a more rigid model to something more flexible to hedge for the case - * in which the views feature evolves to require more advanced filter capabilities. + * <p>We intentionally convert from a more rigid model to something more flexible to hedge for the + * case in which the views feature evolves to require more advanced filter capabilities. * - * The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), which cannot be - * rendered in full by the UI. We account for this on the read path by logging a warning and returning an empty - * View in such cases. + * <p>The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), + * which cannot be rendered in full by the UI. We account for this on the read path by logging a + * warning and returning an empty View in such cases. */ private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { if (LogicalOperator.AND.equals(input.getOperator())) { @@ -121,19 +131,30 @@ private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { private static Filter buildAndFilter(@Nonnull List<FacetFilterInput> input) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(input.stream().map(ResolverUtils::criterionFromFilter).collect(Collectors.toList())))) - )); + result.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + input.stream() + .map(ResolverUtils::criterionFromFilter) + .collect(Collectors.toList())))))); return result; } private static Filter buildOrFilter(@Nonnull List<FacetFilterInput> input) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(input.stream().map(filter -> - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(ResolverUtils.criterionFromFilter(filter)))) - ) - .collect(Collectors.toList()))); + result.setOr( + new ConjunctiveCriterionArray( + input.stream() + .map( + filter -> + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(ResolverUtils.criterionFromFilter(filter))))) + .collect(Collectors.toList()))); return result; } @@ -141,6 +162,5 @@ private static boolean isViewOwner(Urn creatorUrn, Urn actorUrn) { return creatorUrn.equals(actorUrn); } - private ViewUtils() { } - + private ViewUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java index 51fd503fff578..49c8c24c2b6be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java @@ -1,4 +1,3 @@ package com.linkedin.datahub.graphql.scalar; -public class LongScalarType { -} +public class LongScalarType {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java index 3bd8719a37abc..df7c729cb14c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java @@ -1,16 +1,18 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface BatchMutableType<I, B, T> extends MutableType<I, T> { - default Class<B[]> batchInputClass() throws UnsupportedOperationException { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchInputClass method"); - } + default Class<B[]> batchInputClass() throws UnsupportedOperationException { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchInputClass method"); + } - default List<T> batchUpdate(@Nonnull final B[] updateInput, QueryContext context) throws Exception { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchUpdate method"); - } + default List<T> batchUpdate(@Nonnull final B[] updateInput, QueryContext context) + throws Exception { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchUpdate method"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java index b50a229be0633..368c126131af2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java @@ -5,42 +5,46 @@ import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.FacetFilterInput; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** * Extension of {@link EntityType} containing methods required for 'browse' functionality. * - * @param <T>: The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param <T>: The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. * @param <K> the key type for the DataLoader */ public interface BrowsableEntityType<T extends Entity, K> extends EntityType<T, K> { - /** - * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. - * - * @param path the path to find browse results under - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & count. - * - * @param urn the entity urn to fetch browse paths for - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception; + /** + * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. + * + * @param path the path to find browse results under + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & + * count. + * + * @param urn the entity urn to fetch browse paths for + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java index 4185288776c06..43e4c1be55b71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java @@ -3,20 +3,20 @@ import com.linkedin.datahub.graphql.generated.Entity; import java.util.function.Function; - /** - * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, etc.). + * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, + * etc.). * * @param <T>: The GraphQL object type corresponding to the entity, must be of type {@link Entity} * @param <K> the key type for the DataLoader */ public interface EntityType<T extends Entity, K> extends LoadableType<T, K> { - /** - * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the Graph type, eg. 'DATASET' - */ - com.linkedin.datahub.graphql.generated.EntityType type(); - - Function<Entity, K> getKeyProvider(); + /** + * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the + * Graph type, eg. 'DATASET' + */ + com.linkedin.datahub.graphql.generated.EntityType type(); + Function<Entity, K> getKeyProvider(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java index a21fab09b79c3..9f9fe1f28994c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java @@ -2,10 +2,9 @@ import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; - import graphql.execution.DataFetcherResult; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; /** * GQL graph type that can be loaded from a downstream service by primary key. @@ -15,35 +14,38 @@ */ public interface LoadableType<T, K> { - /** - * Returns generated GraphQL class associated with the type - */ - Class<T> objectClass(); - - /** - * Returns the name of the type, to be used in creating a corresponding GraphQL {@link org.dataloader.DataLoader} - */ - default String name() { - return objectClass().getSimpleName(); - } - - /** - * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity cannot be found. - * - * @param key to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - default DataFetcherResult<T> load(@Nonnull final K key, @Nonnull final QueryContext context) throws Exception { - return batchLoad(ImmutableList.of(key), context).get(0); - }; - - /** - * Retrieves an list of entities given a list of urn strings. The list returned is expected to - * be of same length of the list of urns, where nulls are provided in place of an entity object if an entity cannot be found. - * - * @param keys to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - List<DataFetcherResult<T>> batchLoad(@Nonnull final List<K> keys, @Nonnull final QueryContext context) throws Exception; - + /** Returns generated GraphQL class associated with the type */ + Class<T> objectClass(); + + /** + * Returns the name of the type, to be used in creating a corresponding GraphQL {@link + * org.dataloader.DataLoader} + */ + default String name() { + return objectClass().getSimpleName(); + } + + /** + * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity + * cannot be found. + * + * @param key to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + default DataFetcherResult<T> load(@Nonnull final K key, @Nonnull final QueryContext context) + throws Exception { + return batchLoad(ImmutableList.of(key), context).get(0); + } + ; + + /** + * Retrieves an list of entities given a list of urn strings. The list returned is expected to be + * of same length of the list of urns, where nulls are provided in place of an entity object if an + * entity cannot be found. + * + * @param keys to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + List<DataFetcherResult<T>> batchLoad( + @Nonnull final List<K> keys, @Nonnull final QueryContext context) throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java index 94f1200d3a783..fa24192913324 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java @@ -1,7 +1,6 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - import javax.annotation.Nonnull; /** @@ -10,18 +9,16 @@ * @param <I>: The input type corresponding to the write. */ public interface MutableType<I, T> { - /** - * Returns generated GraphQL class associated with the input type - */ - - Class<I> inputClass(); + /** Returns generated GraphQL class associated with the input type */ + Class<I> inputClass(); - /** - * Update an entity by urn - * - * @param urn - * @param input input type - * @param context the {@link QueryContext} corresponding to the request. - */ - T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) throws Exception; + /** + * Update an entity by urn + * + * @param urn + * @param input input type + * @param context the {@link QueryContext} corresponding to the request. + */ + T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java index 96875956d22c1..a5ade054e71eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java @@ -6,52 +6,61 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.metadata.query.filter.Filter; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. * - * Extension of {@link EntityType} containing methods required for 'search' functionality. + * <p>Extension of {@link EntityType} containing methods required for 'search' functionality. * - * @param <T>: The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param <T>: The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. */ @Deprecated public interface SearchableEntityType<T extends Entity, K> extends EntityType<T, K> { - /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. - * - * Retrieves {@link SearchResults} corresponding to a given query string, list of filters, start index, & count. - * - * @param query query text - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Deprecated - SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of filters, & limit. - * - * @param query query text - * @param field the name of the field to autocomplete against, null if one was not provided - * @param filters list of filters that should be applied to search results, null if non were provided - * @param limit the maximum number of autocomplete suggestions to be returned - * @param context the {@link QueryContext} corresponding to the request. - */ - AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception; + /** + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. + * + * <p>Retrieves {@link SearchResults} corresponding to a given query string, list of filters, + * start index, & count. + * + * @param query query text + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Deprecated + SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of + * filters, & limit. + * + * @param query query text + * @param field the name of the field to autocomplete against, null if one was not provided + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param limit the maximum number of autocomplete suggestions to be returned + * @param context the {@link QueryContext} corresponding to the request. + */ + AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java index c9e2c322ace8d..00e9badf5e345 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - public class AspectMapper { public static final AspectMapper INSTANCE = new AspectMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java index f3fdfdaa86f9e..45e80822b12c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java @@ -2,8 +2,8 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.generated.Aspect; import com.linkedin.datahub.graphql.types.LoadableType; import com.linkedin.entity.EntityResponse; @@ -35,45 +35,55 @@ public String name() { } /** - * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list returned is expected to - * be of same length of the list of keys, where nulls are provided in place of an aspect object if an entity cannot be found. + * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list + * returned is expected to be of same length of the list of keys, where nulls are provided in + * place of an aspect object if an entity cannot be found. * * @param keys to retrieve * @param context the {@link QueryContext} corresponding to the request. */ - public List<DataFetcherResult<Aspect>> batchLoad(@Nonnull List<VersionedAspectKey> keys, @Nonnull QueryContext context) throws Exception { + public List<DataFetcherResult<Aspect>> batchLoad( + @Nonnull List<VersionedAspectKey> keys, @Nonnull QueryContext context) throws Exception { try { - return keys.stream().map(key -> { - try { - Urn entityUrn = Urn.createFromString(key.getUrn()); + return keys.stream() + .map( + key -> { + try { + Urn entityUrn = Urn.createFromString(key.getUrn()); - Map<Urn, EntityResponse> response = _entityClient.batchGetV2( - entityUrn.getEntityType(), - ImmutableSet.of(entityUrn), - ImmutableSet.of(key.getAspectName()), - context.getAuthentication() - ); + Map<Urn, EntityResponse> response = + _entityClient.batchGetV2( + entityUrn.getEntityType(), + ImmutableSet.of(entityUrn), + ImmutableSet.of(key.getAspectName()), + context.getAuthentication()); - EntityResponse entityResponse = response.get(entityUrn); + EntityResponse entityResponse = response.get(entityUrn); - if (entityResponse == null || entityResponse.getAspects().get(key.getAspectName()) == null) { - // The aspect was not found. Return null. - return DataFetcherResult.<Aspect>newResult().data(null).build(); - } - final EnvelopedAspect aspect = entityResponse.getAspects().get(key.getAspectName()); - return DataFetcherResult.<Aspect>newResult().data(AspectMapper.map(aspect, entityUrn)).build(); - } catch (Exception e) { - if (e instanceof RestLiResponseException) { - // if no aspect is found, restli will return a 404 rather than null - // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls - if (((RestLiResponseException) e).getStatus() == 404) { - return DataFetcherResult.<Aspect>newResult().data(null).build(); - } - } - throw new RuntimeException(String.format("Failed to load Aspect for entity %s", key.getUrn()), e); - } - }).collect(Collectors.toList()); + if (entityResponse == null + || entityResponse.getAspects().get(key.getAspectName()) == null) { + // The aspect was not found. Return null. + return DataFetcherResult.<Aspect>newResult().data(null).build(); + } + final EnvelopedAspect aspect = + entityResponse.getAspects().get(key.getAspectName()); + return DataFetcherResult.<Aspect>newResult() + .data(AspectMapper.map(aspect, entityUrn)) + .build(); + } catch (Exception e) { + if (e instanceof RestLiResponseException) { + // if no aspect is found, restli will return a 404 rather than null + // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls + if (((RestLiResponseException) e).getStatus() == 404) { + return DataFetcherResult.<Aspect>newResult().data(null).build(); + } + } + throw new RuntimeException( + String.format("Failed to load Aspect for entity %s", key.getUrn()), e); + } + }) + .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Aspects", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java index e1d81bb31f471..2536f4d2521ee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java @@ -25,7 +25,6 @@ import java.util.Collections; import java.util.stream.Collectors; - public class AssertionMapper { public static Assertion map(final EntityResponse entityResponse) { @@ -36,15 +35,18 @@ public static Assertion map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.ASSERTION); - final EnvelopedAspect envelopedAssertionInfo = aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); + final EnvelopedAspect envelopedAssertionInfo = + aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); if (envelopedAssertionInfo != null) { result.setInfo(mapAssertionInfo(new AssertionInfo(envelopedAssertionInfo.getValue().data()))); } - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(Constants.UNKNOWN_DATA_PLATFORM); @@ -60,7 +62,8 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion new com.linkedin.datahub.graphql.generated.AssertionInfo(); assertionInfo.setType(AssertionType.valueOf(gmsAssertionInfo.getType().name())); if (gmsAssertionInfo.hasDatasetAssertion()) { - DatasetAssertionInfo datasetAssertion = mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); + DatasetAssertionInfo datasetAssertion = + mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); assertionInfo.setDatasetAssertion(datasetAssertion); } return assertionInfo; @@ -69,25 +72,25 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion private static DatasetAssertionInfo mapDatasetAssertionInfo( final com.linkedin.assertion.DatasetAssertionInfo gmsDatasetAssertion) { DatasetAssertionInfo datasetAssertion = new DatasetAssertionInfo(); - datasetAssertion.setDatasetUrn( - gmsDatasetAssertion.getDataset().toString()); - datasetAssertion.setScope( - DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); + datasetAssertion.setDatasetUrn(gmsDatasetAssertion.getDataset().toString()); + datasetAssertion.setScope(DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); if (gmsDatasetAssertion.hasFields()) { - datasetAssertion.setFields(gmsDatasetAssertion.getFields() - .stream() - .map(AssertionMapper::mapDatasetSchemaField) - .collect(Collectors.toList())); + datasetAssertion.setFields( + gmsDatasetAssertion.getFields().stream() + .map(AssertionMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } else { datasetAssertion.setFields(Collections.emptyList()); } // Agg if (gmsDatasetAssertion.hasAggregation()) { - datasetAssertion.setAggregation(AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); + datasetAssertion.setAggregation( + AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); } // Op - datasetAssertion.setOperator(AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); + datasetAssertion.setOperator( + AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); // Params if (gmsDatasetAssertion.hasParameters()) { @@ -98,7 +101,8 @@ private static DatasetAssertionInfo mapDatasetAssertionInfo( datasetAssertion.setNativeType(gmsDatasetAssertion.getNativeType()); } if (gmsDatasetAssertion.hasNativeParameters()) { - datasetAssertion.setNativeParameters(StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); + datasetAssertion.setNativeParameters( + StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); } else { datasetAssertion.setNativeParameters(Collections.emptyList()); } @@ -119,7 +123,8 @@ private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { return new SchemaFieldRef(schemaFieldUrn.toString(), schemaFieldUrn.getEntityKey().get(1)); } - private static AssertionStdParameters mapParameters(final com.linkedin.assertion.AssertionStdParameters params) { + private static AssertionStdParameters mapParameters( + final com.linkedin.assertion.AssertionStdParameters params) { final AssertionStdParameters result = new AssertionStdParameters(); if (params.hasValue()) { result.setValue(mapParameter(params.getValue())); @@ -133,13 +138,13 @@ private static AssertionStdParameters mapParameters(final com.linkedin.assertion return result; } - private static AssertionStdParameter mapParameter(final com.linkedin.assertion.AssertionStdParameter param) { + private static AssertionStdParameter mapParameter( + final com.linkedin.assertion.AssertionStdParameter param) { final AssertionStdParameter result = new AssertionStdParameter(); result.setType(AssertionStdParameterType.valueOf(param.getType().name())); result.setValue(param.getValue()); return result; } - private AssertionMapper() { - } + private AssertionMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java index 3493afdd8bd84..ac5cce1191e5d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java @@ -20,69 +20,71 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AssertionType + implements com.linkedin.datahub.graphql.types.EntityType<Assertion, String> { -public class AssertionType implements com.linkedin.datahub.graphql.types.EntityType<Assertion, String> { + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ASSERTION_KEY_ASPECT_NAME, + Constants.ASSERTION_INFO_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + private final EntityClient _entityClient; - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ASSERTION_KEY_ASPECT_NAME, - Constants.ASSERTION_INFO_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME - ); - private final EntityClient _entityClient; + public AssertionType(final EntityClient entityClient) { + _entityClient = entityClient; + } - public AssertionType(final EntityClient entityClient) { - _entityClient = entityClient; - } + @Override + public EntityType type() { + return EntityType.ASSERTION; + } - @Override - public EntityType type() { - return EntityType.ASSERTION; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<Assertion> objectClass() { - return Assertion.class; - } + @Override + public Class<Assertion> objectClass() { + return Assertion.class; + } - @Override - public List<DataFetcherResult<Assertion>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> assertionUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + @Override + public List<DataFetcherResult<Assertion>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> assertionUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Assertion>newResult() - .data(AssertionMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Assertions", e); - } + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Assertion>newResult() + .data(AssertionMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Assertions", e); } + } - private Urn getUrn(final String urnStr) { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); - } + private Urn getUrn(final String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } -} \ No newline at end of file + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java index d9f25a7cec8e1..bfe2ccbe34166 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java @@ -4,9 +4,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.types.auth.mappers.AccessTokenMetadataMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; @@ -21,7 +21,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class AccessTokenMetadataType implements com.linkedin.datahub.graphql.types.EntityType<AccessTokenMetadata, String> { @@ -48,13 +47,17 @@ public Class<AccessTokenMetadata> objectClass() { } @Override - public List<DataFetcherResult<AccessTokenMetadata>> batchLoad(@Nonnull List<String> keys, - @Nonnull QueryContext context) throws Exception { - final List<Urn> tokenInfoUrns = keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List<DataFetcherResult<AccessTokenMetadata>> batchLoad( + @Nonnull List<String> keys, @Nonnull QueryContext context) throws Exception { + final List<Urn> tokenInfoUrns = + keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(Constants.ACCESS_TOKEN_ENTITY_NAME, new HashSet<>(tokenInfoUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + Constants.ACCESS_TOKEN_ENTITY_NAME, + new HashSet<>(tokenInfoUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -62,9 +65,13 @@ public List<DataFetcherResult<AccessTokenMetadata>> batchLoad(@Nonnull List<Stri gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null : DataFetcherResult.<AccessTokenMetadata>newResult() - .data(AccessTokenMetadataMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<AccessTokenMetadata>newResult() + .data(AccessTokenMetadataMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Access Token Info", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java index 9b38757879896..a519a65e5cb6b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java @@ -2,8 +2,8 @@ import com.linkedin.access.token.DataHubAccessTokenInfo; import com.linkedin.data.DataMap; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; @@ -11,7 +11,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - public class AccessTokenMetadataMapper implements ModelMapper<EntityResponse, AccessTokenMetadata> { public static final AccessTokenMetadataMapper INSTANCE = new AccessTokenMetadataMapper(); @@ -29,13 +28,15 @@ public AccessTokenMetadata apply(@Nonnull final EntityResponse input) { metadata.setType(EntityType.ACCESS_TOKEN); final EnvelopedAspectMap aspectMap = input.getAspects(); - final MappingHelper<AccessTokenMetadata> mappingHelper = new MappingHelper<>(aspectMap, metadata); + final MappingHelper<AccessTokenMetadata> mappingHelper = + new MappingHelper<>(aspectMap, metadata); mappingHelper.mapToResult(Constants.ACCESS_TOKEN_INFO_NAME, this::mapTokenInfo); return mappingHelper.getResult(); } - private void mapTokenInfo(@Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { + private void mapTokenInfo( + @Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(dataMap); accessTokenMetadata.setName(tokenInfo.getName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index fa0e3cd856803..ba8e96159b0bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.chart; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.ChartUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -36,8 +39,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,203 +57,214 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - +public class ChartType + implements SearchableEntityType<Chart, String>, + BrowsableEntityType<Chart, String>, + MutableType<ChartUpdateInput, Chart> { -public class ChartType implements SearchableEntityType<Chart, String>, BrowsableEntityType<Chart, String>, MutableType<ChartUpdateInput, Chart> { + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + CHART_KEY_ASPECT_NAME, + CHART_INFO_ASPECT_NAME, + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, + CHART_QUERY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + private static final Set<String> FACET_FIELDS = + ImmutableSet.of("access", "queryType", "tool", "type"); - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - CHART_KEY_ASPECT_NAME, - CHART_INFO_ASPECT_NAME, - EDITABLE_CHART_PROPERTIES_ASPECT_NAME, - CHART_QUERY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); - - private final EntityClient _entityClient; - - public ChartType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class<ChartUpdateInput> inputClass() { - return ChartUpdateInput.class; - } + public ChartType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.CHART; - } + @Override + public Class<ChartUpdateInput> inputClass() { + return ChartUpdateInput.class; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.CHART; + } - @Override - public Class<Chart> objectClass() { - return Chart.class; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public List<DataFetcherResult<Chart>> batchLoad(@Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> chartMap = - _entityClient.batchGetV2( - CHART_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); + @Override + public Class<Chart> objectClass() { + return Chart.class; + } - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(chartMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsChart -> gmsChart == null ? null : DataFetcherResult.<Chart>newResult() - .data(ChartMapper.map(gmsChart)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Charts", e); - } - } + @Override + public List<DataFetcherResult<Chart>> batchLoad( + @Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map<Urn, EntityResponse> chartMap = + _entityClient.batchGetV2( + CHART_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "chart", - query, - facetFilters, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(chartMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsChart -> + gmsChart == null + ? null + : DataFetcherResult.<Chart>newResult() + .data(ChartMapper.map(gmsChart)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Charts", e); } + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "chart", query, - filters, - limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "chart", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("chart", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "chart", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - private ChartUrn getChartUrn(String urnStr) { - try { - return ChartUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); - } - } + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } - @Override - public Chart update(@Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = ChartUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + private ChartUrn getChartUrn(String urnStr) { + try { + return ChartUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); + } + } - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + @Override + public Chart update( + @Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = ChartUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } - private boolean isAuthorized(@Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CHART_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CHART_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index e0ffc57ddf519..0ef52c9f45716 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -28,13 +30,13 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -51,184 +53,211 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class ChartMapper implements ModelMapper<EntityResponse, Chart> { + public static final ChartMapper INSTANCE = new ChartMapper(); -public class ChartMapper implements ModelMapper<EntityResponse, Chart> { + public static Chart map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - public static final ChartMapper INSTANCE = new ChartMapper(); + @Override + public Chart apply(@Nonnull final EntityResponse entityResponse) { + final Chart result = new Chart(); + Urn entityUrn = entityResponse.getUrn(); - public static Chart map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CHART); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - @Override - public Chart apply(@Nonnull final EntityResponse entityResponse) { - final Chart result = new Chart(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CHART); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<Chart> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); - mappingHelper.mapToResult(CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); - mappingHelper.mapToResult(EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (chart, dataMap) -> + MappingHelper<Chart> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); + mappingHelper.mapToResult( + CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); + mappingHelper.mapToResult( + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (chart, dataMap) -> chart.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (chart, dataMap) -> - chart.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (chart, dataMap) -> - chart.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (chart, dataMap) -> chart.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (chart, dataMap) -> + chart.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (chart, dataMap) -> chart.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (chart, dataMap) -> - chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (chart, dataMap) -> chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (chart, dataMap) -> chart.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (chart, dataMap) -> - chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (chart, dataMap) -> chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + return mappingHelper.getResult(); + } - private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final ChartKey gmsKey = new ChartKey(dataMap); - chart.setChartId(gmsKey.getChartId()); - chart.setTool(gmsKey.getDashboardTool()); - chart.setPlatform(DataPlatform.builder() + private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final ChartKey gmsKey = new ChartKey(dataMap); + chart.setChartId(gmsKey.getChartId()); + chart.setTool(gmsKey.getDashboardTool()); + chart.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } - private void mapChartInfo(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); - chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); - chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); - } + private void mapChartInfo( + @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); + chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); + chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); + } - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} - */ - private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartInfo result = new ChartInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasInputs()) { - result.setInputs(info.getInputs().stream().map(input -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(input.getDatasetUrn().toString()); - return dataset; - }).collect(Collectors.toList())); - } - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; - } + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} */ + private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + final ChartInfo result = new ChartInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} - */ - private ChartProperties mapChartInfoToProperties(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartProperties result = new ChartProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasInputs()) { + result.setInputs( + info.getInputs().stream() + .map( + input -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(input.getDatasetUrn().toString()); + return dataset; + }) + .collect(Collectors.toList())); } - private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); - chart.setQuery(mapQuery(gmsChartQuery)); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { - final ChartQuery result = new ChartQuery(); - result.setRawQuery(query.getRawQuery()); - result.setType(ChartQueryType.valueOf(query.getType().toString())); - return result; + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); } - - private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); - final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); - chartEditableProperties.setDescription(editableChartProperties.getDescription()); - chart.setEditableProperties(chartEditableProperties); + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} */ + private ChartProperties mapChartInfoToProperties( + final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + final ChartProperties result = new ChartProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - private void mapGlobalTags(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - chart.setGlobalTags(globalTags); - chart.setTags(globalTags); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); + } + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); + chart.setQuery(mapQuery(gmsChartQuery)); + } + + private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { + final ChartQuery result = new ChartQuery(); + result.setRawQuery(query.getRawQuery()); + result.setType(ChartQueryType.valueOf(query.getType().toString())); + return result; + } + + private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); + final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); + chartEditableProperties.setDescription(editableChartProperties.getDescription()); + chart.setEditableProperties(chartEditableProperties); + } - private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - chart.setContainer(Container - .builder() + private void mapGlobalTags( + @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + chart.setGlobalTags(globalTags); + chart.setTags(globalTags); + } + + private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + chart.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); - } + private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java index b52ddad0b0071..f2a434b58686c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -17,68 +19,65 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class ChartUpdateInputMapper + implements InputModelMapper<ChartUpdateInput, Collection<MetadataChangeProposal>, Urn> { + public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); -public class ChartUpdateInputMapper implements InputModelMapper<ChartUpdateInput, Collection<MetadataChangeProposal>, Urn> { + public static Collection<MetadataChangeProposal> map( + @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(chartUpdateInput, actor); + } - public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); + @Override + public Collection<MetadataChangeProposal> apply( + @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - public static Collection<MetadataChangeProposal> map(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(chartUpdateInput, actor); + if (chartUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } - @Override - public Collection<MetadataChangeProposal> apply(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - - if (chartUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper - .aspectToProposal(OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); - } - - if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (chartUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - // Tags overrides global tags if provided - if (chartUpdateInput.getTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } - - if (chartUpdateInput.getEditableProperties() != null) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(); - editableChartProperties.setDescription(chartUpdateInput.getEditableProperties().getDescription()); - if (!editableChartProperties.hasCreated()) { - editableChartProperties.setCreated(auditStamp); - } - editableChartProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); - } + if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (chartUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + // Tags overrides global tags if provided + if (chartUpdateInput.getTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (chartUpdateInput.getEditableProperties() != null) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(); + editableChartProperties.setDescription( + chartUpdateInput.getEditableProperties().getDescription()); + if (!editableChartProperties.hasCreated()) { + editableChartProperties.setCreated(auditStamp); + } + editableChartProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java index d6ef713f3ade6..4da18403f95cc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java @@ -7,29 +7,36 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class InputFieldsMapper { - public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); - - public static com.linkedin.datahub.graphql.generated.InputFields map(@Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.InputFields apply(@Nonnull final InputFields input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.InputFields result = new com.linkedin.datahub.graphql.generated.InputFields(); - result.setFields(input.getFields().stream().map(field -> { - InputField fieldResult = new InputField(); - - if (field.hasSchemaField()) { - fieldResult.setSchemaField(SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); - } - if (field.hasSchemaFieldUrn()) { - fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); - } - return fieldResult; - }).collect(Collectors.toList())); - - return result; - } + public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); + + public static com.linkedin.datahub.graphql.generated.InputFields map( + @Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.InputFields apply( + @Nonnull final InputFields input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.InputFields result = + new com.linkedin.datahub.graphql.generated.InputFields(); + result.setFields( + input.getFields().stream() + .map( + field -> { + InputField fieldResult = new InputField(); + + if (field.hasSchemaField()) { + fieldResult.setSchemaField( + SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); + } + if (field.hasSchemaFieldUrn()) { + fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); + } + return fieldResult; + }) + .collect(Collectors.toList())); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java index beb2b64e1dd7d..1f952bb6a2bd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java @@ -1,29 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class AuditStampMapper implements ModelMapper<com.linkedin.common.AuditStamp, AuditStamp> { - public static final AuditStampMapper INSTANCE = new AuditStampMapper(); + public static final AuditStampMapper INSTANCE = new AuditStampMapper(); - public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - return INSTANCE.apply(auditStamp); - } + public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { + return INSTANCE.apply(auditStamp); + } - @Override - public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - final AuditStamp result = new AuditStamp(); - result.setActor(auditStamp.getActor().toString()); - result.setTime(auditStamp.getTime()); - return result; - } + @Override + public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { + final AuditStamp result = new AuditStamp(); + result.setActor(auditStamp.getActor().toString()); + result.setTime(auditStamp.getTime()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java index 41ee99fa412ad..79b7cf8e050d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java @@ -4,10 +4,9 @@ import com.linkedin.datahub.graphql.generated.BrowsePathEntry; import com.linkedin.datahub.graphql.generated.BrowsePathV2; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class BrowsePathsV2Mapper implements ModelMapper<BrowsePathsV2, BrowsePathV2> { @@ -20,7 +19,8 @@ public static BrowsePathV2 map(@Nonnull final BrowsePathsV2 metadata) { @Override public BrowsePathV2 apply(@Nonnull final BrowsePathsV2 input) { final BrowsePathV2 result = new BrowsePathV2(); - final List<BrowsePathEntry> path = input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); + final List<BrowsePathEntry> path = + input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); result.setPath(path); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java index 7144730ba9337..e3a09bc8926a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java @@ -3,8 +3,8 @@ import com.linkedin.datahub.graphql.generated.ChangeAuditStamps; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -public class ChangeAuditStampsMapper implements ModelMapper<com.linkedin.common.ChangeAuditStamps, ChangeAuditStamps> { +public class ChangeAuditStampsMapper + implements ModelMapper<com.linkedin.common.ChangeAuditStamps, ChangeAuditStamps> { public static final ChangeAuditStampsMapper INSTANCE = new ChangeAuditStampsMapper(); public static ChangeAuditStamps map(com.linkedin.common.ChangeAuditStamps input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java index 6c8bdada17b24..806e8e6aadc5b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java @@ -1,26 +1,24 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.Cost; import com.linkedin.datahub.graphql.generated.CostType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nonnull; import lombok.NonNull; public class CostMapper implements ModelMapper<com.linkedin.common.Cost, Cost> { - public static final CostMapper INSTANCE = new CostMapper(); + public static final CostMapper INSTANCE = new CostMapper(); - public static Cost map(@NonNull final com.linkedin.common.Cost cost) { - return INSTANCE.apply(cost); - } + public static Cost map(@NonNull final com.linkedin.common.Cost cost) { + return INSTANCE.apply(cost); + } - @Override - public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { - final Cost result = new Cost(); - result.setCostType(CostType.valueOf(cost.getCostType().name())); - result.setCostValue(CostValueMapper.map(cost.getCost())); - return result; - } + @Override + public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { + final Cost result = new Cost(); + result.setCostType(CostType.valueOf(cost.getCostType().name())); + result.setCostValue(CostValueMapper.map(cost.getCost())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java index 3f41c92cd1715..56c107f7ec059 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java @@ -2,25 +2,24 @@ import com.linkedin.datahub.graphql.generated.CostValue; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class CostValueMapper implements ModelMapper<com.linkedin.common.CostValue, CostValue> { - public static final CostValueMapper INSTANCE = new CostValueMapper(); + public static final CostValueMapper INSTANCE = new CostValueMapper(); - public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { - return INSTANCE.apply(costValue); - } + public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { + return INSTANCE.apply(costValue); + } - @Override - public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { - final CostValue result = new CostValue(); - if (costValue.isCostCode()) { - result.setCostCode(costValue.getCostCode()); - } - if (costValue.isCostId()) { - result.setCostId(costValue.getCostId().floatValue()); - } - return result; + @Override + public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { + final CostValue result = new CostValue(); + if (costValue.isCostCode()) { + result.setCostCode(costValue.getCostCode()); + } + if (costValue.isCostId()) { + result.setCostId(costValue.getCostId().floatValue()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java index 50e4846611a9b..b09678ddeb42e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java @@ -1,36 +1,36 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.CustomPropertiesEntry; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class CustomPropertiesMapper { - public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); + public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); - public static List<CustomPropertiesEntry> map(@Nonnull final Map<String, String> input, @Nonnull Urn urn) { - return INSTANCE.apply(input, urn); - } + public static List<CustomPropertiesEntry> map( + @Nonnull final Map<String, String> input, @Nonnull Urn urn) { + return INSTANCE.apply(input, urn); + } - public List<CustomPropertiesEntry> apply(@Nonnull final Map<String, String> input, @Nonnull Urn urn) { - List<CustomPropertiesEntry> results = new ArrayList<>(); - for (String key : input.keySet()) { - final CustomPropertiesEntry entry = new CustomPropertiesEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - entry.setAssociatedUrn(urn.toString()); - results.add(entry); - } - return results; + public List<CustomPropertiesEntry> apply( + @Nonnull final Map<String, String> input, @Nonnull Urn urn) { + List<CustomPropertiesEntry> results = new ArrayList<>(); + for (String key : input.keySet()) { + final CustomPropertiesEntry entry = new CustomPropertiesEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + entry.setAssociatedUrn(urn.toString()); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java index 1f10cd6ee3658..a2236f7e8586d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java @@ -3,14 +3,16 @@ import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class DataPlatformInstanceAspectMapper implements ModelMapper<com.linkedin.common.DataPlatformInstance, DataPlatformInstance> { +public class DataPlatformInstanceAspectMapper + implements ModelMapper<com.linkedin.common.DataPlatformInstance, DataPlatformInstance> { - public static final DataPlatformInstanceAspectMapper INSTANCE = new DataPlatformInstanceAspectMapper(); + public static final DataPlatformInstanceAspectMapper INSTANCE = + new DataPlatformInstanceAspectMapper(); - public static DataPlatformInstance map(@Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { + public static DataPlatformInstance map( + @Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { return INSTANCE.apply(dataPlatformInstance); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java index 4bbf50bb72362..7a88474166915 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java @@ -1,24 +1,24 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class DeprecationMapper implements ModelMapper<com.linkedin.common.Deprecation, Deprecation> { - public static final DeprecationMapper INSTANCE = new DeprecationMapper(); +public class DeprecationMapper + implements ModelMapper<com.linkedin.common.Deprecation, Deprecation> { + public static final DeprecationMapper INSTANCE = new DeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { + return INSTANCE.apply(deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java index 478d256df66a4..339c6a848d9f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.graphql.generated.Embed; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; public class EmbedMapper implements ModelMapper<com.linkedin.common.Embed, Embed> { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java index 9f4517c89a6dc..830cbb0e79d79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java @@ -1,44 +1,49 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.FineGrainedLineage; import com.linkedin.datahub.graphql.generated.SchemaFieldRef; import com.linkedin.dataset.FineGrainedLineageArray; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; +import javax.annotation.Nonnull; public class FineGrainedLineagesMapper { public static final FineGrainedLineagesMapper INSTANCE = new FineGrainedLineagesMapper(); - public static List<FineGrainedLineage> map(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { + public static List<FineGrainedLineage> map( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { return INSTANCE.apply(fineGrainedLineages); } - public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { - final List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> result = new ArrayList<>(); + public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { + final List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> result = + new ArrayList<>(); if (fineGrainedLineages.size() == 0) { return result; } for (com.linkedin.dataset.FineGrainedLineage fineGrainedLineage : fineGrainedLineages) { - com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); + com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = + new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); if (fineGrainedLineage.hasUpstreams()) { - resultEntry.setUpstreams(fineGrainedLineage.getUpstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setUpstreams( + fineGrainedLineage.getUpstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } if (fineGrainedLineage.hasDownstreams()) { - resultEntry.setDownstreams(fineGrainedLineage.getDownstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setDownstreams( + fineGrainedLineage.getDownstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } result.add(resultEntry); } @@ -46,8 +51,7 @@ public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply(@No } private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { - return new SchemaFieldRef(schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); + return new SchemaFieldRef( + schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java index 8bcfe7eb3b6d0..4546e0e4d8dc0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java @@ -2,22 +2,25 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.InstitutionalMemory; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class InstitutionalMemoryMapper { - public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); + public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); - public static InstitutionalMemory map(@Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(memory, entityUrn); - } + public static InstitutionalMemory map( + @Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(memory, entityUrn); + } - public InstitutionalMemory apply(@Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { - final InstitutionalMemory result = new InstitutionalMemory(); - result.setElements(input.getElements().stream().map(metadata -> - InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)).collect(Collectors.toList())); - return result; - } + public InstitutionalMemory apply( + @Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { + final InstitutionalMemory result = new InstitutionalMemory(); + result.setElements( + input.getElements().stream() + .map(metadata -> InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java index ba4d37173abb8..49a4618507086 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java @@ -1,33 +1,37 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import com.linkedin.datahub.graphql.generated.CorpUser; - +import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import javax.annotation.Nonnull; public class InstitutionalMemoryMetadataMapper { - public static final InstitutionalMemoryMetadataMapper INSTANCE = new InstitutionalMemoryMetadataMapper(); + public static final InstitutionalMemoryMetadataMapper INSTANCE = + new InstitutionalMemoryMetadataMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static InstitutionalMemoryMetadata map( + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } - public InstitutionalMemoryMetadata apply(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, @Nonnull final Urn entityUrn) { - final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); - result.setUrl(input.getUrl().toString()); - result.setDescription(input.getDescription()); // deprecated field - result.setLabel(input.getDescription()); - result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); - result.setCreated(AuditStampMapper.map(input.getCreateStamp())); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + public InstitutionalMemoryMetadata apply( + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, + @Nonnull final Urn entityUrn) { + final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); + result.setUrl(input.getUrl().toString()); + result.setDescription(input.getDescription()); // deprecated field + result.setLabel(input.getDescription()); + result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); + result.setCreated(AuditStampMapper.map(input.getCreateStamp())); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } - private CorpUser getAuthor(String actor) { - CorpUser partialUser = new CorpUser(); - partialUser.setUrn(actor); - return partialUser; - } + private CorpUser getAuthor(String actor) { + CorpUser partialUser = new CorpUser(); + partialUser.setUrn(actor); + return partialUser; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java index 28986dcae5725..87d865471708e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java @@ -1,31 +1,34 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.common.AuditStamp; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadataUpdate; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class InstitutionalMemoryMetadataUpdateMapper implements ModelMapper<InstitutionalMemoryMetadataUpdate, InstitutionalMemoryMetadata> { +public class InstitutionalMemoryMetadataUpdateMapper + implements ModelMapper<InstitutionalMemoryMetadataUpdate, InstitutionalMemoryMetadata> { - private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = new InstitutionalMemoryMetadataUpdateMapper(); + private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = + new InstitutionalMemoryMetadataUpdateMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemoryMetadata map( + @Nonnull final InstitutionalMemoryMetadataUpdate input) { + return INSTANCE.apply(input); + } - @Override - public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); - metadata.setDescription(input.getDescription()); - metadata.setUrl(new Url(input.getUrl())); - metadata.setCreateStamp(new AuditStamp() + @Override + public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { + final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); + metadata.setDescription(input.getDescription()); + metadata.setUrl(new Url(input.getUrl())); + metadata.setCreateStamp( + new AuditStamp() .setActor(CorpUserUtils.getCorpUserUrn(input.getAuthor())) - .setTime(input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt()) - ); - return metadata; - } + .setTime( + input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt())); + return metadata; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java index bf063896290eb..d8b451458e72c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java @@ -1,30 +1,30 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - -import javax.annotation.Nonnull; - import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadataArray; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class InstitutionalMemoryUpdateMapper implements ModelMapper<InstitutionalMemoryUpdate, InstitutionalMemory> { +public class InstitutionalMemoryUpdateMapper + implements ModelMapper<InstitutionalMemoryUpdate, InstitutionalMemory> { - private static final InstitutionalMemoryUpdateMapper INSTANCE = new InstitutionalMemoryUpdateMapper(); + private static final InstitutionalMemoryUpdateMapper INSTANCE = + new InstitutionalMemoryUpdateMapper(); - public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { + return INSTANCE.apply(input); + } - @Override - public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { - final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); - institutionalMemory.setElements(new InstitutionalMemoryMetadataArray( - input.getElements() - .stream() + @Override + public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { + final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); + institutionalMemory.setElements( + new InstitutionalMemoryMetadataArray( + input.getElements().stream() .map(InstitutionalMemoryMetadataUpdateMapper::map) .collect(Collectors.toList()))); - return institutionalMemory; - } + return institutionalMemory; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java index 986954fab87db..37b625715edd5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java @@ -1,59 +1,66 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.Operation; +import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.OperationSourceType; import com.linkedin.datahub.graphql.generated.OperationType; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; -public class OperationMapper implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.Operation> { +public class OperationMapper + implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.Operation> { - public static final OperationMapper INSTANCE = new OperationMapper(); + public static final OperationMapper INSTANCE = new OperationMapper(); - public static com.linkedin.datahub.graphql.generated.Operation map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); - } + public static com.linkedin.datahub.graphql.generated.Operation map( + @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.Operation apply( + @Nonnull final EnvelopedAspect envelopedAspect) { + + Operation gmsProfile = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + Operation.class); + + final com.linkedin.datahub.graphql.generated.Operation result = + new com.linkedin.datahub.graphql.generated.Operation(); - @Override - public com.linkedin.datahub.graphql.generated.Operation apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - Operation gmsProfile = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - Operation.class); - - final com.linkedin.datahub.graphql.generated.Operation result = - new com.linkedin.datahub.graphql.generated.Operation(); - - result.setTimestampMillis(gmsProfile.getTimestampMillis()); - result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); - if (gmsProfile.hasActor()) { - result.setActor(gmsProfile.getActor().toString()); - } - result.setOperationType(OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); - result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); - if (gmsProfile.hasSourceType()) { - result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); - } - if (gmsProfile.hasPartitionSpec()) { - result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); - } - if (gmsProfile.hasCustomProperties()) { - result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); - } - if (gmsProfile.hasNumAffectedRows()) { - result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); - } - if (gmsProfile.hasAffectedDatasets()) { - result.setAffectedDatasets(gmsProfile.getAffectedDatasets().stream().map(Urn::toString).collect(Collectors.toList())); - } - - return result; + result.setTimestampMillis(gmsProfile.getTimestampMillis()); + result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); + if (gmsProfile.hasActor()) { + result.setActor(gmsProfile.getActor().toString()); } + result.setOperationType( + OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); + result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); + if (gmsProfile.hasSourceType()) { + result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); + } + if (gmsProfile.hasPartitionSpec()) { + result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); + } + if (gmsProfile.hasCustomProperties()) { + result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); + } + if (gmsProfile.hasNumAffectedRows()) { + result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); + } + if (gmsProfile.hasAffectedDatasets()) { + result.setAffectedDatasets( + gmsProfile.getAffectedDatasets().stream() + .map(Urn::toString) + .collect(Collectors.toList())); + } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java index 181bdc176fb94..ea15aefdad3b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -10,51 +12,49 @@ import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class OwnerMapper { - public static final OwnerMapper INSTANCE = new OwnerMapper(); + public static final OwnerMapper INSTANCE = new OwnerMapper(); - public static Owner map(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(owner, entityUrn); + public static Owner map( + @Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(owner, entityUrn); + } + + public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + final Owner result = new Owner(); + // Deprecated + result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); + + if (owner.getTypeUrn() == null) { + OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); + owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); } - public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - final Owner result = new Owner(); - // Deprecated - result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); - - if (owner.getTypeUrn() == null) { - OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); - owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); - } - - if (owner.getTypeUrn() != null) { - OwnershipTypeEntity entity = new OwnershipTypeEntity(); - entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); - entity.setUrn(owner.getTypeUrn().toString()); - result.setOwnershipType(entity); - } - if (owner.getOwner().getEntityType().equals("corpuser")) { - CorpUser partialOwner = new CorpUser(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } else { - CorpGroup partialOwner = new CorpGroup(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } - if (owner.hasSource()) { - result.setSource(OwnershipSourceMapper.map(owner.getSource())); - } - result.setAssociatedUrn(entityUrn.toString()); - return result; + if (owner.getTypeUrn() != null) { + OwnershipTypeEntity entity = new OwnershipTypeEntity(); + entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); + entity.setUrn(owner.getTypeUrn().toString()); + result.setOwnershipType(entity); + } + if (owner.getOwner().getEntityType().equals("corpuser")) { + CorpUser partialOwner = new CorpUser(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } else { + CorpGroup partialOwner = new CorpGroup(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } + if (owner.hasSource()) { + result.setSource(OwnershipSourceMapper.map(owner.getSource())); } + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java index d978abee5bdfc..a38c16d02f121 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java @@ -1,56 +1,56 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import com.linkedin.common.urn.UrnUtils; -import javax.annotation.Nonnull; - import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; import com.linkedin.common.OwnershipType; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.OwnerUpdate; -import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupUtils; +import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import com.linkedin.common.urn.Urn; - import java.net.URISyntaxException; +import javax.annotation.Nonnull; public class OwnerUpdateMapper implements ModelMapper<OwnerUpdate, Owner> { - private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); - - public static Owner map(@Nonnull final OwnerUpdate input) { - return INSTANCE.apply(input); + private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); + + public static Owner map(@Nonnull final OwnerUpdate input) { + return INSTANCE.apply(input); + } + + @Override + public Owner apply(@Nonnull final OwnerUpdate input) { + final Owner owner = new Owner(); + try { + if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { + owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); + } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { + owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); } - - @Override - public Owner apply(@Nonnull final OwnerUpdate input) { - final Owner owner = new Owner(); - try { - if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { - owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); - } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { - owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); - } - } catch (URISyntaxException e) { - e.printStackTrace(); - } - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - } - // For backwards compatibility we have to always set the deprecated type. - // If the type exists we assume it's an old ownership type that we can map to. - // Else if it's a net new custom ownership type set old type to CUSTOM. - OwnershipType type = input.getType() != null ? OwnershipType.valueOf(input.getType().toString()) + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + } + // For backwards compatibility we have to always set the deprecated type. + // If the type exists we assume it's an old ownership type that we can map to. + // Else if it's a net new custom ownership type set old type to CUSTOM. + OwnershipType type = + input.getType() != null + ? OwnershipType.valueOf(input.getType().toString()) : OwnershipType.CUSTOM; - owner.setType(type); - - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - owner.setType(OwnershipType.CUSTOM); - } + owner.setType(type); - owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); - return owner; + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + owner.setType(OwnershipType.CUSTOM); } + + owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); + return owner; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java index 6614cfb28a478..31f637a047798 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java @@ -2,30 +2,31 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Ownership; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class OwnershipMapper { - public static final OwnershipMapper INSTANCE = new OwnershipMapper(); + public static final OwnershipMapper INSTANCE = new OwnershipMapper(); - public static Ownership map(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(ownership, entityUrn); - } + public static Ownership map( + @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(ownership, entityUrn); + } - public Ownership apply(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - final Ownership result = new Ownership(); - result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); - result.setOwners(ownership.getOwners() - .stream() - .map(owner -> OwnerMapper.map(owner, entityUrn)) - .collect(Collectors.toList())); - return result; - } + public Ownership apply( + @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + final Ownership result = new Ownership(); + result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); + result.setOwners( + ownership.getOwners().stream() + .map(owner -> OwnerMapper.map(owner, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java index abcc67c35f92a..75eaffb850a8b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java @@ -3,28 +3,28 @@ import com.linkedin.datahub.graphql.generated.OwnershipSource; import com.linkedin.datahub.graphql.generated.OwnershipSourceType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class OwnershipSourceMapper implements ModelMapper<com.linkedin.common.OwnershipSource, OwnershipSource> { +public class OwnershipSourceMapper + implements ModelMapper<com.linkedin.common.OwnershipSource, OwnershipSource> { - public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); + public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); - public static OwnershipSource map(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - return INSTANCE.apply(ownershipSource); - } + public static OwnershipSource map( + @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + return INSTANCE.apply(ownershipSource); + } - @Override - public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - final OwnershipSource result = new OwnershipSource(); - result.setUrl(ownershipSource.getUrl()); - result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); - return result; - } + @Override + public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + final OwnershipSource result = new OwnershipSource(); + result.setUrl(ownershipSource.getUrl()); + result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java index 1162c69d74938..97afbc7ddf855 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java @@ -1,7 +1,5 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - import com.linkedin.common.AuditStamp; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -9,31 +7,30 @@ import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.generated.OwnershipUpdate; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; public class OwnershipUpdateMapper implements InputModelMapper<OwnershipUpdate, Ownership, Urn> { - private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); + private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); - public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - return INSTANCE.apply(input, actor); - } + public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + return INSTANCE.apply(input, actor); + } - @Override - public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - final Ownership ownership = new Ownership(); + @Override + public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + final Ownership ownership = new Ownership(); - ownership.setOwners(new OwnerArray(input.getOwners() - .stream() - .map(OwnerUpdateMapper::map) - .collect(Collectors.toList()))); + ownership.setOwners( + new OwnerArray( + input.getOwners().stream().map(OwnerUpdateMapper::map).collect(Collectors.toList()))); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - ownership.setLastModified(auditStamp); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + ownership.setLastModified(auditStamp); - return ownership; - } + return ownership; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index f3ac008734339..e2d29d0297449 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps GraphQL SearchFlags to Pegasus * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class SearchFlagsInputMapper implements ModelMapper<SearchFlags, com.linkedin.metadata.query.SearchFlags> { +public class SearchFlagsInputMapper + implements ModelMapper<SearchFlags, com.linkedin.metadata.query.SearchFlags> { public static final SearchFlagsInputMapper INSTANCE = new SearchFlagsInputMapper(); - public static com.linkedin.metadata.query.SearchFlags map(@Nonnull final SearchFlags searchFlags) { + public static com.linkedin.metadata.query.SearchFlags map( + @Nonnull final SearchFlags searchFlags) { return INSTANCE.apply(searchFlags); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java index 942171017cea4..0758daf5df2e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java @@ -5,13 +5,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class SiblingsMapper implements ModelMapper<com.linkedin.common.Siblings, SiblingProperties> { +public class SiblingsMapper + implements ModelMapper<com.linkedin.common.Siblings, SiblingProperties> { public static final SiblingsMapper INSTANCE = new SiblingsMapper(); @@ -23,10 +23,8 @@ public static SiblingProperties map(@Nonnull final com.linkedin.common.Siblings public SiblingProperties apply(@Nonnull final com.linkedin.common.Siblings siblings) { final SiblingProperties result = new SiblingProperties(); result.setIsPrimary(siblings.isPrimary()); - result.setSiblings(siblings.getSiblings() - .stream() - .map(UrnToEntityMapper::map) - .collect(Collectors.toList())); + result.setSiblings( + siblings.getSiblings().stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java index 25d01d8de0e4c..2d1efdffc496c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java @@ -2,21 +2,20 @@ import com.linkedin.datahub.graphql.generated.Status; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; public class StatusMapper implements ModelMapper<com.linkedin.common.Status, Status> { - public static final StatusMapper INSTANCE = new StatusMapper(); + public static final StatusMapper INSTANCE = new StatusMapper(); - public static Status map(@Nonnull final com.linkedin.common.Status metadata) { - return INSTANCE.apply(metadata); - } + public static Status map(@Nonnull final com.linkedin.common.Status metadata) { + return INSTANCE.apply(metadata); + } - @Override - public Status apply(@Nonnull final com.linkedin.common.Status input) { - final Status result = new Status(); - result.setRemoved(input.isRemoved()); - return result; - } + @Override + public Status apply(@Nonnull final com.linkedin.common.Status input) { + final Status result = new Status(); + result.setRemoved(input.isRemoved()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java index 32c49a2010414..0e8d6822b7d09 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java @@ -7,29 +7,28 @@ import java.util.Map; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class StringMapMapper implements ModelMapper<Map<String, String>, List<StringMapEntry>> { - public static final StringMapMapper INSTANCE = new StringMapMapper(); + public static final StringMapMapper INSTANCE = new StringMapMapper(); - public static List<StringMapEntry> map(@Nonnull final Map<String, String> input) { - return INSTANCE.apply(input); - } + public static List<StringMapEntry> map(@Nonnull final Map<String, String> input) { + return INSTANCE.apply(input); + } - @Override - public List<StringMapEntry> apply(@Nonnull final Map<String, String> input) { - List<StringMapEntry> results = new ArrayList<>(); - for (String key : input.keySet()) { - final StringMapEntry entry = new StringMapEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - results.add(entry); - } - return results; + @Override + public List<StringMapEntry> apply(@Nonnull final Map<String, String> input) { + List<StringMapEntry> results = new ArrayList<>(); + for (String key : input.keySet()) { + final StringMapEntry entry = new StringMapEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java index 9aa94eae62999..55294e4b46822 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -5,17 +5,20 @@ import java.util.ArrayList; import javax.annotation.Nonnull; -public class SubTypesMapper implements ModelMapper<SubTypes, com.linkedin.datahub.graphql.generated.SubTypes> { +public class SubTypesMapper + implements ModelMapper<SubTypes, com.linkedin.datahub.graphql.generated.SubTypes> { public static final SubTypesMapper INSTANCE = new SubTypesMapper(); - public static com.linkedin.datahub.graphql.generated.SubTypes map(@Nonnull final SubTypes metadata) { + public static com.linkedin.datahub.graphql.generated.SubTypes map( + @Nonnull final SubTypes metadata) { return INSTANCE.apply(metadata); } @Override public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { - final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); + final com.linkedin.datahub.graphql.generated.SubTypes result = + new com.linkedin.datahub.graphql.generated.SubTypes(); result.setTypeNames(new ArrayList<>(input.getTypeNames())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java index 8359f1ec86f34..4fdf7edea07d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java @@ -4,22 +4,24 @@ import java.util.List; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class UpstreamLineagesMapper { public static final UpstreamLineagesMapper INSTANCE = new UpstreamLineagesMapper(); - public static List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> map(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + public static List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> map( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { return INSTANCE.apply(upstreamLineage); } - public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { - if (!upstreamLineage.hasFineGrainedLineages() || upstreamLineage.getFineGrainedLineages() == null) { + public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + if (!upstreamLineage.hasFineGrainedLineages() + || upstreamLineage.getFineGrainedLineages() == null) { return new ArrayList<>(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 34bf56a396b62..4c452af126201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.Chart; @@ -35,10 +37,7 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class UrnToEntityMapper implements ModelMapper<com.linkedin.common.urn.Urn, Entity> { +public class UrnToEntityMapper implements ModelMapper<com.linkedin.common.urn.Urn, Entity> { public static final UrnToEntityMapper INSTANCE = new UrnToEntityMapper(); public static Entity map(@Nonnull final com.linkedin.common.urn.Urn urn) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java index 1e284efdb610f..0b156f11e8834 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java @@ -7,14 +7,10 @@ import lombok.AllArgsConstructor; import lombok.Getter; - @AllArgsConstructor public class MappingHelper<O> { - @Nonnull - private final EnvelopedAspectMap _aspectMap; - @Getter - @Nonnull - private final O result; + @Nonnull private final EnvelopedAspectMap _aspectMap; + @Getter @Nonnull private final O result; public void mapToResult(@Nonnull String aspectName, @Nonnull BiConsumer<O, DataMap> consumer) { if (_aspectMap.containsKey(aspectName)) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java index 7d1b374e1f9b6..00e339a0320ef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java @@ -5,7 +5,6 @@ import lombok.Getter; import lombok.Setter; - @Data @Setter @Getter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java index d08300d648c32..46df032cbffbf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java @@ -1,19 +1,17 @@ package com.linkedin.datahub.graphql.types.common.mappers.util; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; + import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.mxe.SystemMetadata; - import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtils { - private SystemMetadataUtils() { - } + private SystemMetadataUtils() {} @Nullable public static Long getLastIngestedTime(@Nonnull EnvelopedAspectMap aspectMap) { @@ -28,7 +26,8 @@ public static String getLastIngestedRunId(@Nonnull EnvelopedAspectMap aspectMap) } /** - * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects present for the entity. + * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects + * present for the entity. */ @Nonnull public static List<RunInfo> getLastIngestionRuns(@Nonnull EnvelopedAspectMap aspectMap) { @@ -36,12 +35,16 @@ public static List<RunInfo> getLastIngestionRuns(@Nonnull EnvelopedAspectMap asp for (String aspect : aspectMap.keySet()) { if (aspectMap.get(aspect).hasSystemMetadata()) { SystemMetadata systemMetadata = aspectMap.get(aspect).getSystemMetadata(); - if (systemMetadata.hasLastRunId() && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + if (systemMetadata.hasLastRunId() + && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getLastRunId(); RunInfo run = new RunInfo(runId, lastObserved); runs.add(run); - } else if (systemMetadata.hasRunId() && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + } else if (systemMetadata.hasRunId() + && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { // Handle the legacy case: Check original run ids. Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getRunId(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java index 108aa7ed5b0c9..606cebba0880f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java @@ -6,7 +6,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import lombok.AllArgsConstructor; - @AllArgsConstructor public class UpdateMappingHelper { private final String entityName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java index 20cfe6ac46127..1200493666a59 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java @@ -18,8 +18,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.net.URISyntaxException; @@ -33,31 +33,31 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -public class ContainerType implements SearchableEntityType<Container, String>, +public class ContainerType + implements SearchableEntityType<Container, String>, com.linkedin.datahub.graphql.types.EntityType<Container, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME, - Constants.SUB_TYPES_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.GLOSSARY_TERMS_ASPECT_NAME, - Constants.CONTAINER_ASPECT_NAME, - Constants.DOMAINS_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.DATA_PRODUCTS_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + Constants.CONTAINER_PROPERTIES_ASPECT_NAME, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME, + Constants.SUB_TYPES_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + Constants.CONTAINER_ASPECT_NAME, + Constants.DOMAINS_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.DATA_PRODUCTS_ASPECT_NAME); private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "container"; private final EntityClient _entityClient; - public ContainerType(final EntityClient entityClient) { + public ContainerType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -77,28 +77,30 @@ public Class<Container> objectClass() { } @Override - public List<DataFetcherResult<Container>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> containerUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<Container>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> containerUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.CONTAINER_ENTITY_NAME, - new HashSet<>(containerUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.CONTAINER_ENTITY_NAME, + new HashSet<>(containerUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); for (Urn urn : containerUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Container>newResult() - .data(ContainerMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Container>newResult() + .data(ContainerMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Container", e); @@ -114,24 +116,36 @@ private Urn getUrn(final String urnStr) { } @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index b81259e78be3e..07594c53c6831 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.container.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; @@ -15,11 +17,11 @@ import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -33,9 +35,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class ContainerMapper { @Nullable @@ -49,46 +48,61 @@ public static Container map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.CONTAINER); - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(UNKNOWN_DATA_PLATFORM); result.setPlatform(unknownPlatform); } - final EnvelopedAspect envelopedContainerProperties = aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedContainerProperties = + aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); if (envelopedContainerProperties != null) { - result.setProperties(mapContainerProperties(new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); + result.setProperties( + mapContainerProperties( + new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedEditableContainerProperties = aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedEditableContainerProperties = + aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); if (envelopedEditableContainerProperties != null) { - result.setEditableProperties(mapContainerEditableProperties(new EditableContainerProperties(envelopedEditableContainerProperties.getValue().data()))); + result.setEditableProperties( + mapContainerEditableProperties( + new EditableContainerProperties( + envelopedEditableContainerProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedTags = aspects.get(Constants.GLOBAL_TAGS_ASPECT_NAME); if (envelopedTags != null) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); result.setTags(globalTags); } final EnvelopedAspect envelopedTerms = aspects.get(Constants.GLOSSARY_TERMS_ASPECT_NAME); if (envelopedTerms != null) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); } final EnvelopedAspect statusAspect = aspects.get(Constants.STATUS_ASPECT_NAME); @@ -103,12 +117,13 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); if (envelopedContainer != null) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(envelopedContainer.getValue().data()); - result.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(envelopedContainer.getValue().data()); + result.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } final EnvelopedAspect envelopedDomains = aspects.get(Constants.DOMAINS_ASPECT_NAME); @@ -120,21 +135,25 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedDeprecation = aspects.get(Constants.DEPRECATION_ASPECT_NAME); if (envelopedDeprecation != null) { - result.setDeprecation(DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); + result.setDeprecation( + DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); } return result; } - private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties(final ContainerProperties gmsProperties, Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = new com.linkedin.datahub.graphql.generated.ContainerProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties( + final ContainerProperties gmsProperties, Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.ContainerProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { propertiesResult.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - propertiesResult.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + propertiesResult.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } if (gmsProperties.hasQualifiedName()) { propertiesResult.setQualifiedName(gmsProperties.getQualifiedName().toString()); @@ -143,10 +162,11 @@ private static com.linkedin.datahub.graphql.generated.ContainerProperties mapCon return propertiesResult; } - private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties mapContainerEditableProperties( - final EditableContainerProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.ContainerEditableProperties editableContainerProperties = - new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties + mapContainerEditableProperties(final EditableContainerProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.ContainerEditableProperties + editableContainerProperties = + new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); editableContainerProperties.setDescription(gmsProperties.getDescription()); return editableContainerProperties; } @@ -158,5 +178,5 @@ private static DataPlatform mapPlatform(final DataPlatformInstance platformInsta return dummyPlatform; } - private ContainerMapper() { } + private ContainerMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java index 285a119be0d43..371cf6b280c20 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.corpgroup; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -7,8 +12,6 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -27,8 +30,8 @@ import com.linkedin.identity.CorpGroupEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -42,155 +45,193 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - -public class CorpGroupType implements SearchableEntityType<CorpGroup, String>, MutableType<CorpGroupUpdateInput, CorpGroup> { - - private final EntityClient _entityClient; - - public CorpGroupType(final EntityClient entityClient) { - _entityClient = entityClient; +public class CorpGroupType + implements SearchableEntityType<CorpGroup, String>, + MutableType<CorpGroupUpdateInput, CorpGroup> { + + private final EntityClient _entityClient; + + public CorpGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class<CorpGroup> objectClass() { + return CorpGroup.class; + } + + public Class<CorpGroupUpdateInput> inputClass() { + return CorpGroupUpdateInput.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_GROUP; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List<DataFetcherResult<CorpGroup>> batchLoad( + final List<String> urns, final QueryContext context) { + try { + final List<Urn> corpGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map<Urn, EntityResponse> corpGroupMap = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + new HashSet<>(corpGroupUrns), + null, + context.getAuthentication()); + + final List<EntityResponse> results = new ArrayList<>(); + for (Urn urn : corpGroupUrns) { + results.add(corpGroupMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpGroup -> + gmsCorpGroup == null + ? null + : DataFetcherResult.<CorpGroup>newResult() + .data(CorpGroupMapper.map(gmsCorpGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load CorpGroup", e); } - - @Override - public Class<CorpGroup> objectClass() { - return CorpGroup.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + "corpGroup", + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("corpGroup", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public CorpGroup update( + @Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Urn groupUrn = Urn.createFromString(urn); + Map<Urn, EntityResponse> gmsResponse = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + ImmutableSet.of(groupUrn), + ImmutableSet.of(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), + context.getAuthentication()); + + CorpGroupEditableInfo existingCorpGroupEditableInfo = null; + if (gmsResponse.containsKey(groupUrn) + && gmsResponse + .get(groupUrn) + .getAspects() + .containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { + existingCorpGroupEditableInfo = + new CorpGroupEditableInfo( + gmsResponse + .get(groupUrn) + .getAspects() + .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME) + .getValue() + .data()); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + return load(urn, context).getData(); } - - public Class<CorpGroupUpdateInput> inputClass() { - return CorpGroupUpdateInput.class; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpGroupUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final CorpGroupUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getDescription() != null) { + // Requires the Update Docs privilege. + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { + // Requires the Update Contact info privilege. + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); } - @Override - public EntityType type() { - return EntityType.CORP_GROUP; - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public List<DataFetcherResult<CorpGroup>> batchLoad(final List<String> urns, final QueryContext context) { - try { - final List<Urn> corpGroupUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map<Urn, EntityResponse> corpGroupMap = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(corpGroupUrns), null, context.getAuthentication()); - - final List<EntityResponse> results = new ArrayList<>(); - for (Urn urn : corpGroupUrns) { - results.add(corpGroupMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpGroup -> gmsCorpGroup == null ? null - : DataFetcherResult.<CorpGroup>newResult().data(CorpGroupMapper.map(gmsCorpGroup)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load CorpGroup", e); - } - } + private RecordTemplate mapCorpGroupEditableInfo( + CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { + CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult - searchResult = _entityClient.search("corpGroup", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (input.getDescription() != null) { + result.setDescription(input.getDescription()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpGroup", query, filters, limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - @Override - public CorpGroup update(@Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Urn groupUrn = Urn.createFromString(urn); - Map<Urn, EntityResponse> gmsResponse = - _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, ImmutableSet.of(groupUrn), ImmutableSet.of( - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), - context.getAuthentication()); - - CorpGroupEditableInfo existingCorpGroupEditableInfo = null; - if (gmsResponse.containsKey(groupUrn) && gmsResponse.get(groupUrn).getAspects().containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { - existingCorpGroupEditableInfo = new CorpGroupEditableInfo(gmsResponse.get(groupUrn).getAspects() - .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME).getValue().data()); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorizedToUpdate(String urn, CorpGroupUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpGroupUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getDescription() != null) { - // Requires the Update Docs privilege. - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { - // Requires the Update Contact info privilege. - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } - - private RecordTemplate mapCorpGroupEditableInfo(CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { - CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - - if (input.getDescription() != null) { - result.setDescription(input.getDescription()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - return result; + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java index c1cd33b0077f6..318506d9d61fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpgroup; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpGroupUrn; +import java.net.URISyntaxException; public class CorpGroupUtils { - private CorpGroupUtils() { } + private CorpGroupUtils() {} - public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpGroupUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); - } + public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpGroupUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java index f476794bc545e..a6e14535cf0b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java @@ -3,28 +3,32 @@ import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.CorpGroupEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpGroupEditablePropertiesMapper implements ModelMapper<com.linkedin.identity.CorpGroupEditableInfo, CorpGroupEditableProperties> { +public class CorpGroupEditablePropertiesMapper + implements ModelMapper< + com.linkedin.identity.CorpGroupEditableInfo, CorpGroupEditableProperties> { - public static final CorpGroupEditablePropertiesMapper INSTANCE = new CorpGroupEditablePropertiesMapper(); + public static final CorpGroupEditablePropertiesMapper INSTANCE = + new CorpGroupEditablePropertiesMapper(); - public static CorpGroupEditableProperties map(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public static CorpGroupEditableProperties map( + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { return INSTANCE.apply(corpGroupEditableInfo); } @Override - public CorpGroupEditableProperties apply(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public CorpGroupEditableProperties apply( + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { final CorpGroupEditableProperties result = new CorpGroupEditableProperties(); result.setDescription(corpGroupEditableInfo.getDescription(GetMode.DEFAULT)); result.setSlack(corpGroupEditableInfo.getSlack(GetMode.DEFAULT)); result.setEmail(corpGroupEditableInfo.getEmail(GetMode.DEFAULT)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java index 3d2d4aea2b001..04d0cc8ce94e6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java @@ -1,48 +1,58 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; -import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpGroupInfo; +import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpGroupInfoMapper implements ModelMapper<com.linkedin.identity.CorpGroupInfo, CorpGroupInfo> { +public class CorpGroupInfoMapper + implements ModelMapper<com.linkedin.identity.CorpGroupInfo, CorpGroupInfo> { - public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); + public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); - public static CorpGroupInfo map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); - } + public static CorpGroupInfo map( + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + return INSTANCE.apply(corpGroupInfo); + } - @Override - public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { - final CorpGroupInfo result = new CorpGroupInfo(); - result.setEmail(info.getEmail()); - result.setDescription(info.getDescription()); - result.setDisplayName(info.getDisplayName()); - if (info.hasAdmins()) { - result.setAdmins(info.getAdmins().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasMembers()) { - result.setMembers(info.getMembers().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasGroups()) { - result.setGroups(info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); - } - return result; + @Override + public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + final CorpGroupInfo result = new CorpGroupInfo(); + result.setEmail(info.getEmail()); + result.setDescription(info.getDescription()); + result.setDisplayName(info.getDisplayName()); + if (info.hasAdmins()) { + result.setAdmins( + info.getAdmins().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasMembers()) { + result.setMembers( + info.getMembers().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasGroups()) { + result.setGroups( + info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java index 0fb1b66c644d7..52e200d19923a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Origin; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; @@ -16,78 +18,79 @@ import com.linkedin.metadata.key.CorpGroupKey; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class CorpGroupMapper implements ModelMapper<EntityResponse, CorpGroup> { - public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); + public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); - public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { - final CorpGroup result = new CorpGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { + final CorpGroup result = new CorpGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<CorpGroup> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); - mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); - mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); - if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { - mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); - } else { - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - result.setOrigin(mappedGroupOrigin); - } - return mappingHelper.getResult(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<CorpGroup> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); + mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); + mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); + if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { + mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); + } else { + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + result.setOrigin(mappedGroupOrigin); } + return mappingHelper.getResult(); + } - private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); - corpGroup.setName(corpGroupKey.getName()); - } + private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); + corpGroup.setName(corpGroupKey.getName()); + } - private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); - corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); - corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); - } + private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); + corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); + corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); + } - private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - corpGroup.setEditableProperties(CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); - } + private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + corpGroup.setEditableProperties( + CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); + } - private void mapOwnership(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); - } + private void mapOwnership( + @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); + } - private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - Origin groupOrigin = new Origin(dataMap); - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - if (groupOrigin.hasType()) { - mappedGroupOrigin.setType( - com.linkedin.datahub.graphql.generated.OriginType.valueOf(groupOrigin.getType().toString())); - } else { - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - } - if (groupOrigin.hasExternalType()) { - mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); - } - corpGroup.setOrigin(mappedGroupOrigin); + private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + Origin groupOrigin = new Origin(dataMap); + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + if (groupOrigin.hasType()) { + mappedGroupOrigin.setType( + com.linkedin.datahub.graphql.generated.OriginType.valueOf( + groupOrigin.getType().toString())); + } else { + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + } + if (groupOrigin.hasExternalType()) { + mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); } + corpGroup.setOrigin(mappedGroupOrigin); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java index 266d8be67cb06..29d0482863971 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java @@ -3,19 +3,20 @@ import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.CorpGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpGroupPropertiesMapper implements ModelMapper<com.linkedin.identity.CorpGroupInfo, CorpGroupProperties> { +public class CorpGroupPropertiesMapper + implements ModelMapper<com.linkedin.identity.CorpGroupInfo, CorpGroupProperties> { public static final CorpGroupPropertiesMapper INSTANCE = new CorpGroupPropertiesMapper(); - public static CorpGroupProperties map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + public static CorpGroupProperties map( + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { return INSTANCE.apply(corpGroupInfo); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java index db2b49c790f57..5749eef970fce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.corpuser; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.url.Url; import com.linkedin.common.urn.Urn; @@ -8,8 +13,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; @@ -29,8 +32,8 @@ import com.linkedin.identity.CorpUserEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -45,176 +48,206 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class CorpUserType implements SearchableEntityType<CorpUser, String>, MutableType<CorpUserUpdateInput, CorpUser> { +public class CorpUserType + implements SearchableEntityType<CorpUser, String>, MutableType<CorpUserUpdateInput, CorpUser> { + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; + + public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { + _entityClient = entityClient; + _featureFlags = featureFlags; + } + + @Override + public Class<CorpUser> objectClass() { + return CorpUser.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_USER; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List<DataFetcherResult<CorpUser>> batchLoad( + final List<String> urns, final QueryContext context) { + try { + final List<Urn> corpUserUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map<Urn, EntityResponse> corpUserMap = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + new HashSet<>(corpUserUrns), + null, + context.getAuthentication()); + + final List<EntityResponse> results = new ArrayList<>(); + for (Urn urn : corpUserUrns) { + results.add(corpUserMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpUser -> + gmsCorpUser == null + ? null + : DataFetcherResult.<CorpUser>newResult() + .data(CorpUserMapper.map(gmsCorpUser, _featureFlags)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); + } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + "corpuser", + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + public Class<CorpUserUpdateInput> inputClass() { + return CorpUserUpdateInput.class; + } + + @Override + public CorpUser update( + @Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Optional<CorpUserEditableInfo> existingCorpUserEditableInfo = + _entityClient.getVersionedAspect( + urn, + CORP_USER_EDITABLE_INFO_NAME, + 0L, + CorpUserEditableInfo.class, + context.getAuthentication()); + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_USER_EDITABLE_INFO_NAME, + mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + return load(urn, context).getData(); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpUserUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + + // Either the updating actor is the user, or the actor has privileges to update the user + // information. + return context.getActorUrn().equals(urn) + || AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getSlack() != null + || updateInput.getEmail() != null + || updateInput.getPhone() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); + } else if (updateInput.getAboutMe() != null + || updateInput.getDisplayName() != null + || updateInput.getPictureLink() != null + || updateInput.getTeams() != null + || updateInput.getTitle() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); + } - private final EntityClient _entityClient; - private final FeatureFlags _featureFlags; + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { - _entityClient = entityClient; - _featureFlags = featureFlags; - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public Class<CorpUser> objectClass() { - return CorpUser.class; + private RecordTemplate mapCorpUserEditableInfo( + CorpUserUpdateInput input, Optional<CorpUserEditableInfo> existing) { + CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public EntityType type() { - return EntityType.CORP_USER; + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; + if (input.getPictureLink() != null) { + result.setPictureLink(new Url(input.getPictureLink())); } - - @Override - public List<DataFetcherResult<CorpUser>> batchLoad(final List<String> urns, final QueryContext context) { - try { - final List<Urn> corpUserUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map<Urn, EntityResponse> corpUserMap = _entityClient - .batchGetV2(CORP_USER_ENTITY_NAME, new HashSet<>(corpUserUrns), null, - context.getAuthentication()); - - final List<EntityResponse> results = new ArrayList<>(); - for (Urn urn : corpUserUrns) { - results.add(corpUserMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpUser -> gmsCorpUser == null ? null - : DataFetcherResult.<CorpUser>newResult().data(CorpUserMapper.map(gmsCorpUser, _featureFlags)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search("corpuser", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (input.getSkills() != null) { + result.setSkills(new StringArray(input.getSkills())); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (input.getTeams() != null) { + result.setTeams(new StringArray(input.getTeams())); } - - public Class<CorpUserUpdateInput> inputClass() { - return CorpUserUpdateInput.class; + if (input.getTitle() != null) { + result.setTitle(input.getTitle()); } - - @Override - public CorpUser update(@Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Optional<CorpUserEditableInfo> existingCorpUserEditableInfo = - _entityClient.getVersionedAspect(urn, CORP_USER_EDITABLE_INFO_NAME, 0L, CorpUserEditableInfo.class, - context.getAuthentication()); - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_USER_EDITABLE_INFO_NAME, mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (input.getPhone() != null) { + result.setPhone(input.getPhone()); } - - private boolean isAuthorizedToUpdate(String urn, CorpUserUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - - // Either the updating actor is the user, or the actor has privileges to update the user information. - return context.getActorUrn().equals(urn) || AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getSlack() != null - || updateInput.getEmail() != null - || updateInput.getPhone() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } else if (updateInput.getAboutMe() != null - || updateInput.getDisplayName() != null - || updateInput.getPictureLink() != null - || updateInput.getTeams() != null - || updateInput.getTitle() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } - private RecordTemplate mapCorpUserEditableInfo(CorpUserUpdateInput input, Optional<CorpUserEditableInfo> existing) { - CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getPictureLink() != null) { - result.setPictureLink(new Url(input.getPictureLink())); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getSkills() != null) { - result.setSkills(new StringArray(input.getSkills())); - } - if (input.getTeams() != null) { - result.setTeams(new StringArray(input.getTeams())); - } - if (input.getTitle() != null) { - result.setTitle(input.getTitle()); - } - if (input.getPhone() != null) { - result.setPhone(input.getPhone()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java index 0b5b40c3117e0..9cf8da69281a9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpuser; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpuserUrn; +import java.net.URISyntaxException; public class CorpUserUtils { - private CorpUserUtils() { } + private CorpUserUtils() {} - public static CorpuserUrn getCorpUserUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpuserUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpUserUrn from string %s", urnStr), e); - } + public static CorpuserUrn getCorpUserUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpuserUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpUserUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java index 2a9f0efd69bcc..3ee353293393e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java @@ -2,36 +2,38 @@ import com.linkedin.datahub.graphql.generated.CorpUserEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpUserEditableInfoMapper implements ModelMapper<com.linkedin.identity.CorpUserEditableInfo, CorpUserEditableProperties> { +public class CorpUserEditableInfoMapper + implements ModelMapper<com.linkedin.identity.CorpUserEditableInfo, CorpUserEditableProperties> { - public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); + public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); - public static CorpUserEditableProperties map(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - return INSTANCE.apply(info); - } + public static CorpUserEditableProperties map( + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + return INSTANCE.apply(info); + } - @Override - public CorpUserEditableProperties apply(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - final CorpUserEditableProperties result = new CorpUserEditableProperties(); - result.setDisplayName(info.getDisplayName()); - result.setTitle(info.getTitle()); - result.setAboutMe(info.getAboutMe()); - result.setSkills(info.getSkills()); - result.setTeams(info.getTeams()); - result.setEmail(info.getEmail()); - result.setPhone(info.getPhone()); - result.setSlack(info.getSlack()); - if (info.hasPictureLink()) { - result.setPictureLink(info.getPictureLink().toString()); - } - return result; + @Override + public CorpUserEditableProperties apply( + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + final CorpUserEditableProperties result = new CorpUserEditableProperties(); + result.setDisplayName(info.getDisplayName()); + result.setTitle(info.getTitle()); + result.setAboutMe(info.getAboutMe()); + result.setSkills(info.getSkills()); + result.setTeams(info.getTeams()); + result.setEmail(info.getEmail()); + result.setPhone(info.getPhone()); + result.setSlack(info.getSlack()); + if (info.hasPictureLink()) { + result.setPictureLink(info.getPictureLink().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java index 96f60c08cd7c2..9044f4d510bcf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java @@ -3,38 +3,38 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserInfo; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpUserInfoMapper implements ModelMapper<com.linkedin.identity.CorpUserInfo, CorpUserInfo> { +public class CorpUserInfoMapper + implements ModelMapper<com.linkedin.identity.CorpUserInfo, CorpUserInfo> { - public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); + public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); - public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); - } + public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + return INSTANCE.apply(corpUserInfo); + } - @Override - public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { - final CorpUserInfo result = new CorpUserInfo(); - result.setActive(info.isActive()); - result.setCountryCode(info.getCountryCode()); - result.setDepartmentId(info.getDepartmentId()); - result.setDepartmentName(info.getDepartmentName()); - result.setEmail(info.getEmail()); - result.setDisplayName(info.getDisplayName()); - result.setFirstName(info.getFirstName()); - result.setLastName(info.getLastName()); - result.setFullName(info.getFullName()); - result.setTitle(info.getTitle()); - if (info.hasManagerUrn()) { - result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); - } - return result; + @Override + public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + final CorpUserInfo result = new CorpUserInfo(); + result.setActive(info.isActive()); + result.setCountryCode(info.getCountryCode()); + result.setDepartmentId(info.getDepartmentId()); + result.setDepartmentName(info.getDepartmentName()); + result.setEmail(info.getEmail()); + result.setDisplayName(info.getDisplayName()); + result.setFirstName(info.getFirstName()); + result.setLastName(info.getLastName()); + result.setFullName(info.getFullName()); + result.setTitle(info.getTitle()); + if (info.hasManagerUrn()) { + result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index adcfb91c9cdf2..98783131a2d52 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.GlobalTags; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -26,120 +28,134 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class CorpUserMapper { - public static final CorpUserMapper INSTANCE = new CorpUserMapper(); - - public static CorpUser map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse, null); - } - - public static CorpUser map(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - return INSTANCE.apply(entityResponse, featureFlags); - } - - public CorpUser apply(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - final CorpUser result = new CorpUser(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_USER); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<CorpUser> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); - mappingHelper.mapToResult(CORP_USER_INFO_ASPECT_NAME, (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); - mappingHelper.mapToResult(CORP_USER_EDITABLE_INFO_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setEditableProperties(CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (corpUser, dataMap) -> + public static final CorpUserMapper INSTANCE = new CorpUserMapper(); + + public static CorpUser map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse, null); + } + + public static CorpUser map( + @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + return INSTANCE.apply(entityResponse, featureFlags); + } + + public CorpUser apply( + @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + final CorpUser result = new CorpUser(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_USER); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<CorpUser> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); + mappingHelper.mapToResult( + CORP_USER_INFO_ASPECT_NAME, + (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); + mappingHelper.mapToResult( + CORP_USER_EDITABLE_INFO_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setEditableProperties( + CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (corpUser, dataMap) -> corpUser.setGlobalTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(CORP_USER_STATUS_ASPECT_NAME, - (corpUser, dataMap) -> corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); - mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); - - mapCorpUserSettings(result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); - - return mappingHelper.getResult(); + mappingHelper.mapToResult( + CORP_USER_STATUS_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); + mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); + + mapCorpUserSettings( + result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); + + return mappingHelper.getResult(); + } + + private void mapCorpUserSettings( + @Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { + CorpUserSettings corpUserSettings = new CorpUserSettings(); + if (envelopedAspect != null) { + corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); } + com.linkedin.datahub.graphql.generated.CorpUserSettings result = + new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - private void mapCorpUserSettings(@Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { - CorpUserSettings corpUserSettings = new CorpUserSettings(); - if (envelopedAspect != null) { - corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); - } - com.linkedin.datahub.graphql.generated.CorpUserSettings result = - new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - - // Map Appearance Settings -- Appearance settings always exist. - result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); + // Map Appearance Settings -- Appearance settings always exist. + result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); - // Map Views Settings. - if (corpUserSettings.hasViews()) { - result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); - } - - corpUser.setSettings(result); + // Map Views Settings. + if (corpUserSettings.hasViews()) { + result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); } - @Nonnull - private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( - @Nonnull final CorpUserSettings corpUserSettings, - @Nullable final FeatureFlags featureFlags - ) { - CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); - if (featureFlags != null) { - appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); - } else { - appearanceResult.setShowSimplifiedHomepage(false); - } - - if (corpUserSettings.hasAppearance()) { - appearanceResult.setShowSimplifiedHomepage(corpUserSettings.getAppearance().isShowSimplifiedHomepage()); - } - return appearanceResult; + corpUser.setSettings(result); + } + + @Nonnull + private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( + @Nonnull final CorpUserSettings corpUserSettings, @Nullable final FeatureFlags featureFlags) { + CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); + if (featureFlags != null) { + appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); + } else { + appearanceResult.setShowSimplifiedHomepage(false); } - @Nonnull - private CorpUserViewsSettings mapCorpUserViewsSettings(@Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { - CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); - - if (viewsSettings.hasDefaultView()) { - final DataHubView unresolvedView = new DataHubView(); - unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); - unresolvedView.setType(EntityType.DATAHUB_VIEW); - viewsResult.setDefaultView(unresolvedView); - } - - return viewsResult; - } - - private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserKey corpUserKey = new CorpUserKey(dataMap); - corpUser.setUsername(corpUserKey.getUsername()); + if (corpUserSettings.hasAppearance()) { + appearanceResult.setShowSimplifiedHomepage( + corpUserSettings.getAppearance().isShowSimplifiedHomepage()); } - - private void mapCorpUserInfo(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); - corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); - corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); - CorpUserProperties corpUserProperties = corpUser.getProperties(); - if (corpUserInfo.hasCustomProperties()) { - corpUserProperties.setCustomProperties(CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); - } - corpUser.setProperties(corpUserProperties); + return appearanceResult; + } + + @Nonnull + private CorpUserViewsSettings mapCorpUserViewsSettings( + @Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { + CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); + + if (viewsSettings.hasDefaultView()) { + final DataHubView unresolvedView = new DataHubView(); + unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); + unresolvedView.setType(EntityType.DATAHUB_VIEW); + viewsResult.setDefaultView(unresolvedView); } - private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); - boolean isNativeUser = - corpUserCredentials != null && corpUserCredentials.hasSalt() && corpUserCredentials.hasHashedPassword(); - corpUser.setIsNativeUser(isNativeUser); + return viewsResult; + } + + private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserKey corpUserKey = new CorpUserKey(dataMap); + corpUser.setUsername(corpUserKey.getUsername()); + } + + private void mapCorpUserInfo( + @Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); + corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); + corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); + CorpUserProperties corpUserProperties = corpUser.getProperties(); + if (corpUserInfo.hasCustomProperties()) { + corpUserProperties.setCustomProperties( + CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); } + corpUser.setProperties(corpUserProperties); + } + + private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); + boolean isNativeUser = + corpUserCredentials != null + && corpUserCredentials.hasSalt() + && corpUserCredentials.hasHashedPassword(); + corpUser.setIsNativeUser(isNativeUser); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java index c64406a74733b..106e3de661201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java @@ -3,18 +3,16 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ -public class CorpUserPropertiesMapper implements ModelMapper<com.linkedin.identity.CorpUserInfo, CorpUserProperties> { +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ +public class CorpUserPropertiesMapper + implements ModelMapper<com.linkedin.identity.CorpUserInfo, CorpUserProperties> { public static final CorpUserPropertiesMapper INSTANCE = new CorpUserPropertiesMapper(); - public static CorpUserProperties map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + public static CorpUserProperties map( + @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { return INSTANCE.apply(corpUserInfo); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java index d0644fbfdacec..dd9e465a2d4ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java @@ -2,14 +2,15 @@ import com.linkedin.datahub.graphql.generated.CorpUserStatus; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class CorpUserStatusMapper implements ModelMapper<com.linkedin.identity.CorpUserStatus, CorpUserStatus> { +public class CorpUserStatusMapper + implements ModelMapper<com.linkedin.identity.CorpUserStatus, CorpUserStatus> { public static final CorpUserStatusMapper INSTANCE = new CorpUserStatusMapper(); - public static CorpUserStatus map(@Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { + public static CorpUserStatus map( + @Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { return INSTANCE.apply(corpUserStatus); } @@ -18,4 +19,4 @@ public CorpUserStatus apply(@Nonnull final com.linkedin.identity.CorpUserStatus // Warning- if the backend provides an unexpected value this will fail. return CorpUserStatus.valueOf(status.getStatus()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index 104c7c004cb66..d01f9b3945dc3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dashboard; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -55,191 +58,214 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DashboardType + implements SearchableEntityType<Dashboard, String>, + BrowsableEntityType<Dashboard, String>, + MutableType<DashboardUpdateInput, Dashboard> { + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DASHBOARD_KEY_ASPECT_NAME, + DASHBOARD_INFO_ASPECT_NAME, + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); + private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "tool"); -public class DashboardType implements SearchableEntityType<Dashboard, String>, BrowsableEntityType<Dashboard, String>, - MutableType<DashboardUpdateInput, Dashboard> { - - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DASHBOARD_KEY_ASPECT_NAME, - DASHBOARD_INFO_ASPECT_NAME, - EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "tool"); - - private final EntityClient _entityClient; - - public DashboardType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class<DashboardUpdateInput> inputClass() { - return DashboardUpdateInput.class; - } + public DashboardType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DASHBOARD; - } + @Override + public Class<DashboardUpdateInput> inputClass() { + return DashboardUpdateInput.class; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.DASHBOARD; + } - @Override - public Class<Dashboard> objectClass() { - return Dashboard.class; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public List<DataFetcherResult<Dashboard>> batchLoad(@Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> dashboardMap = - _entityClient.batchGetV2( - Constants.DASHBOARD_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dashboardMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDashboard -> gmsDashboard == null ? null : DataFetcherResult.<Dashboard>newResult() - .data(DashboardMapper.map(gmsDashboard)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Dashboards", e); - } - } + @Override + public Class<Dashboard> objectClass() { + return Dashboard.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dashboard", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List<DataFetcherResult<Dashboard>> batchLoad( + @Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map<Urn, EntityResponse> dashboardMap = + _entityClient.batchGetV2( + Constants.DASHBOARD_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dashboardMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDashboard -> + gmsDashboard == null + ? null + : DataFetcherResult.<Dashboard>newResult() + .data(DashboardMapper.map(gmsDashboard)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Dashboards", e); } + } - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dashboard", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { - try { - return DashboardUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); - } - } + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dashboard", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - @Override - public Dashboard update(@Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = DashboardUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { + try { + return DashboardUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); } + } + + @Override + public Dashboard update( + @Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = + DashboardUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final DashboardUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DashboardUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 432624ac4699f..704d2ae308c1a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -26,13 +28,13 @@ import com.linkedin.datahub.graphql.types.chart.mappers.InputFieldsMapper; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -49,161 +51,202 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DashboardMapper implements ModelMapper<EntityResponse, Dashboard> { - public static final DashboardMapper INSTANCE = new DashboardMapper(); - - public static Dashboard map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Dashboard apply(@Nonnull final EntityResponse entityResponse) { - final Dashboard result = new Dashboard(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DASHBOARD); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<Dashboard> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); - mappingHelper.mapToResult(DASHBOARD_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dashboard, dataMap) -> + public static final DashboardMapper INSTANCE = new DashboardMapper(); + + public static Dashboard map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public Dashboard apply(@Nonnull final EntityResponse entityResponse) { + final Dashboard result = new Dashboard(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DASHBOARD); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper<Dashboard> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); + mappingHelper.mapToResult( + DASHBOARD_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final DashboardKey gmsKey = new DashboardKey(dataMap); - dashboard.setDashboardId(gmsKey.getDashboardId()); - dashboard.setTool(gmsKey.getDashboardTool()); - dashboard.setPlatform(DataPlatform.builder() + return mappingHelper.getResult(); + } + + private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final DashboardKey gmsKey = new DashboardKey(dataMap); + dashboard.setDashboardId(gmsKey.getDashboardId()); + dashboard.setTool(gmsKey.getDashboardTool()); + dashboard.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapDashboardInfo( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = + new com.linkedin.dashboard.DashboardInfo(dataMap); + dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); + dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link + * DashboardInfo} + */ + private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + final DashboardInfo result = new DashboardInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + result.setCharts( + info.getCharts().stream() + .map( + urn -> { + final Chart chart = new Chart(); + chart.setUrn(urn.toString()); + return chart; + }) + .collect(Collectors.toList())); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - private void mapDashboardInfo(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = new com.linkedin.dashboard.DashboardInfo(dataMap); - dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); - dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link DashboardInfo} - */ - private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardInfo result = new DashboardInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - result.setCharts(info.getCharts().stream().map(urn -> { - final Chart chart = new Chart(); - chart.setUrn(urn.toString()); - return chart; - }).collect(Collectors.toList())); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link DashboardProperties} - */ - private DashboardProperties mapDashboardInfoToProperties(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardProperties result = new DashboardProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); } - - private void mapEditableDashboardProperties(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(dataMap); - final DashboardEditableProperties dashboardEditableProperties = new DashboardEditableProperties(); - dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); - dashboard.setEditableProperties(dashboardEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link + * DashboardProperties} + */ + private DashboardProperties mapDashboardInfoToProperties( + final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + final DashboardProperties result = new DashboardProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - private void mapGlobalTags(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dashboard.setGlobalTags(globalTags); - dashboard.setTags(globalTags); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dashboard.setContainer(Container - .builder() + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + return result; + } + + private void mapEditableDashboardProperties( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(dataMap); + final DashboardEditableProperties dashboardEditableProperties = + new DashboardEditableProperties(); + dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); + dashboard.setEditableProperties(dashboardEditableProperties); + } + + private void mapGlobalTags( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dashboard.setGlobalTags(globalTags); + dashboard.setTags(globalTags); + } + + private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dashboard.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); - } + private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java index f084dbc0bc09f..6212663ee87e4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,67 +19,65 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class DashboardUpdateInputMapper + implements InputModelMapper<DashboardUpdateInput, Collection<MetadataChangeProposal>, Urn> { + public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); + public static Collection<MetadataChangeProposal> map( + @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(dashboardUpdateInput, actor); + } -public class DashboardUpdateInputMapper implements - InputModelMapper<DashboardUpdateInput, Collection<MetadataChangeProposal>, Urn> { - public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); - - public static Collection<MetadataChangeProposal> map(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dashboardUpdateInput, actor); - } + @Override + public Collection<MetadataChangeProposal> apply( + @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { - @Override - public Collection<MetadataChangeProposal> apply(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { + final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - - if (dashboardUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } - - if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dashboardUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } else { - // Tags override global tags - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dashboardUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dashboardUpdateInput.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); - editableDashboardProperties.setDescription(dashboardUpdateInput.getEditableProperties().getDescription()); - if (!editableDashboardProperties.hasCreated()) { - editableDashboardProperties.setCreated(auditStamp); - } - editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); - } + if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dashboardUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } else { + // Tags override global tags + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dashboardUpdateInput.getEditableProperties() != null) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); + editableDashboardProperties.setDescription( + dashboardUpdateInput.getEditableProperties().getDescription()); + if (!editableDashboardProperties.hasCreated()) { + editableDashboardProperties.setCreated(auditStamp); + } + editableDashboardProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java index d257aef4be565..782ec3d3a6c07 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - public class DashboardUsageMetricMapper implements TimeSeriesAspectMapper<DashboardUsageMetrics> { public static final DashboardUsageMetricMapper INSTANCE = new DashboardUsageMetricMapper(); @@ -18,8 +17,10 @@ public static DashboardUsageMetrics map(@Nonnull final EnvelopedAspect enveloped @Override public DashboardUsageMetrics apply(EnvelopedAspect envelopedAspect) { com.linkedin.dashboard.DashboardUsageStatistics gmsDashboardUsageStatistics = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), com.linkedin.dashboard.DashboardUsageStatistics.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + com.linkedin.dashboard.DashboardUsageStatistics.class); final com.linkedin.datahub.graphql.generated.DashboardUsageMetrics dashboardUsageMetrics = new com.linkedin.datahub.graphql.generated.DashboardUsageMetrics(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index 54f7660064c05..6ec1979cd090d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataflow; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +57,201 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataFlowType + implements SearchableEntityType<DataFlow, String>, + BrowsableEntityType<DataFlow, String>, + MutableType<DataFlowUpdateInput, DataFlow> { + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_FLOW_KEY_ASPECT_NAME, + DATA_FLOW_INFO_ASPECT_NAME, + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); + private static final Set<String> FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); + private final EntityClient _entityClient; -public class DataFlowType implements SearchableEntityType<DataFlow, String>, BrowsableEntityType<DataFlow, String>, - MutableType<DataFlowUpdateInput, DataFlow> { - - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_FLOW_KEY_ASPECT_NAME, - DATA_FLOW_INFO_ASPECT_NAME, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set<String> FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); - private final EntityClient _entityClient; - - public DataFlowType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataFlowType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_FLOW; - } + @Override + public EntityType type() { + return EntityType.DATA_FLOW; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<DataFlow> objectClass() { - return DataFlow.class; - } + @Override + public Class<DataFlow> objectClass() { + return DataFlow.class; + } - @Override - public Class<DataFlowUpdateInput> inputClass() { - return DataFlowUpdateInput.class; - } - - @Override - public List<DataFetcherResult<DataFlow>> batchLoad(final List<String> urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> dataFlowMap = - _entityClient.batchGetV2( - Constants.DATA_FLOW_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataFlowMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataFlow -> gmsDataFlow == null ? null : DataFetcherResult.<DataFlow>newResult() - .data(DataFlowMapper.map(gmsDataFlow)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Flows", e); - } - } + @Override + public Class<DataFlowUpdateInput> inputClass() { + return DataFlowUpdateInput.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dataFlow", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List<DataFetcherResult<DataFlow>> batchLoad( + final List<String> urnStrs, @Nonnull final QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map<Urn, EntityResponse> dataFlowMap = + _entityClient.batchGetV2( + Constants.DATA_FLOW_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dataFlowMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataFlow -> + gmsDataFlow == null + ? null + : DataFetcherResult.<DataFlow>newResult() + .data(DataFlowMapper.map(gmsDataFlow)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Flows", e); } + } - @Override - public BrowseResults browse(@Nonnull List<String> path, @Nullable List<FacetFilterInput> filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dataFlow", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataFlowUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public DataFlow update(@Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) throws Exception { + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataFlow", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = DataFlowUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + DataFlowUrn.createFromString(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + @Override + public DataFlow update( + @Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) + throws Exception { - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = + DataFlowUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java index 719fa9f0b2bf0..165fae81527ab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -17,12 +19,12 @@ import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -38,120 +40,147 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataFlowMapper implements ModelMapper<EntityResponse, DataFlow> { - public static final DataFlowMapper INSTANCE = new DataFlowMapper(); - - public static DataFlow map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public DataFlow apply(@Nonnull final EntityResponse entityResponse) { - final DataFlow result = new DataFlow(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_FLOW); - Urn entityUrn = entityResponse.getUrn(); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<DataFlow> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); - mappingHelper.mapToResult(DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataFlow, dataMap) -> + public static final DataFlowMapper INSTANCE = new DataFlowMapper(); + + public static DataFlow map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataFlow apply(@Nonnull final EntityResponse entityResponse) { + final DataFlow result = new DataFlow(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_FLOW); + Urn entityUrn = entityResponse.getUrn(); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper<DataFlow> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); + mappingHelper.mapToResult( + DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataFlow, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataFlow, dataMap) -> + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } + return mappingHelper.getResult(); + } - private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final DataFlowKey gmsKey = new DataFlowKey(dataMap); - dataFlow.setOrchestrator(gmsKey.getOrchestrator()); - dataFlow.setFlowId(gmsKey.getFlowId()); - dataFlow.setCluster(gmsKey.getCluster()); - dataFlow.setPlatform(DataPlatform.builder() + private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final DataFlowKey gmsKey = new DataFlowKey(dataMap); + dataFlow.setOrchestrator(gmsKey.getOrchestrator()); + dataFlow.setFlowId(gmsKey.getFlowId()); + dataFlow.setCluster(gmsKey.getCluster()); + dataFlow.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getOrchestrator()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } - - private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = new com.linkedin.datajob.DataFlowInfo(dataMap); - dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); - dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); - } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} - */ - private DataFlowInfo mapDataFlowInfo(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowInfo result = new DataFlowInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getOrchestrator()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = + new com.linkedin.datajob.DataFlowInfo(dataMap); + dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); + dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} + */ + private DataFlowInfo mapDataFlowInfo( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowInfo result = new DataFlowInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} - */ - private DataFlowProperties mapDataFlowInfoToProperties(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowProperties result = new DataFlowProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(dataMap); - final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); - dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); - dataFlow.setEditableProperties(dataFlowEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} + */ + private DataFlowProperties mapDataFlowInfoToProperties( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowProperties result = new DataFlowProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - private void mapGlobalTags(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataFlow.setGlobalTags(globalTags); - dataFlow.setTags(globalTags); - } - - private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } + return result; + } + + private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(dataMap); + final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); + dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); + dataFlow.setEditableProperties(dataFlowEditableProperties); + } + + private void mapGlobalTags( + @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dataFlow.setGlobalTags(globalTags); + dataFlow.setTags(globalTags); + } + + private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java index c966fc8338ed4..87579a15d586e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,22 +19,18 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DataFlowUpdateInputMapper implements InputModelMapper<DataFlowUpdateInput, - Collection<MetadataChangeProposal>, Urn> { +public class DataFlowUpdateInputMapper + implements InputModelMapper<DataFlowUpdateInput, Collection<MetadataChangeProposal>, Urn> { public static final DataFlowUpdateInputMapper INSTANCE = new DataFlowUpdateInputMapper(); - public static Collection<MetadataChangeProposal> map(@Nonnull final DataFlowUpdateInput dataFlowUpdateInput, - @Nonnull final Urn actor) { + public static Collection<MetadataChangeProposal> map( + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(dataFlowUpdateInput, actor); } @Override public Collection<MetadataChangeProposal> apply( - @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); @@ -41,7 +39,8 @@ public Collection<MetadataChangeProposal> apply( if (dataFlowUpdateInput.getOwnership() != null) { proposals.add( - updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -50,28 +49,29 @@ public Collection<MetadataChangeProposal> apply( if (dataFlowUpdateInput.getGlobalTags() != null) { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getGlobalTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (dataFlowUpdateInput.getEditableProperties() != null) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(); - editableDataFlowProperties.setDescription(dataFlowUpdateInput.getEditableProperties().getDescription()); + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(); + editableDataFlowProperties.setDescription( + dataFlowUpdateInput.getEditableProperties().getDescription()); editableDataFlowProperties.setCreated(auditStamp); editableDataFlowProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataFlowProperties, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataFlowProperties, EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index f6f37978bb36a..6e71584007504 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.datajob; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +57,201 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataJobType + implements SearchableEntityType<DataJob, String>, + BrowsableEntityType<DataJob, String>, + MutableType<DataJobUpdateInput, DataJob> { + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_JOB_KEY_ASPECT_NAME, + DATA_JOB_INFO_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + private static final Set<String> FACET_FIELDS = ImmutableSet.of("flow"); + private final EntityClient _entityClient; -public class DataJobType implements SearchableEntityType<DataJob, String>, BrowsableEntityType<DataJob, String>, - MutableType<DataJobUpdateInput, DataJob> { - - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_JOB_KEY_ASPECT_NAME, - DATA_JOB_INFO_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - private static final Set<String> FACET_FIELDS = ImmutableSet.of("flow"); - private final EntityClient _entityClient; - - public DataJobType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataJobType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_JOB; - } + @Override + public EntityType type() { + return EntityType.DATA_JOB; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<DataJob> objectClass() { - return DataJob.class; - } + @Override + public Class<DataJob> objectClass() { + return DataJob.class; + } - @Override - public Class<DataJobUpdateInput> inputClass() { - return DataJobUpdateInput.class; - } + @Override + public Class<DataJobUpdateInput> inputClass() { + return DataJobUpdateInput.class; + } - @Override - public List<DataFetcherResult<DataJob>> batchLoad(final List<String> urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> dataJobMap = _entityClient.batchGetV2( - Constants.DATA_JOB_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataJobMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataJob -> gmsDataJob == null ? null : DataFetcherResult.<DataJob>newResult() - .data(DataJobMapper.map(gmsDataJob)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Jobs", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "dataJob", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List<DataFetcherResult<DataJob>> batchLoad( + final List<String> urnStrs, @Nonnull final QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map<Urn, EntityResponse> dataJobMap = + _entityClient.batchGetV2( + Constants.DATA_JOB_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dataJobMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataJob -> + gmsDataJob == null + ? null + : DataFetcherResult.<DataJob>newResult() + .data(DataJobMapper.map(gmsDataJob)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Jobs", e); } + } - @Override - public BrowseResults browse(@Nonnull List<String> path, @Nullable List<FacetFilterInput> filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dataJob", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public DataJob update(@Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = DataJobUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataJob", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + @Override + public DataJob update( + @Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = + DataJobUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 61802ad9cfe5c..0d0e7a613c8d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -21,13 +23,13 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.FineGrainedLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -42,143 +44,164 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataJobMapper implements ModelMapper<EntityResponse, DataJob> { - public static final DataJobMapper INSTANCE = new DataJobMapper(); + public static final DataJobMapper INSTANCE = new DataJobMapper(); - public static DataJob map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static DataJob map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public DataJob apply(@Nonnull final EntityResponse entityResponse) { - final DataJob result = new DataJob(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public DataJob apply(@Nonnull final EntityResponse entityResponse) { + final DataJob result = new DataJob(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_JOB); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_JOB); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - entityResponse.getAspects().forEach((name, aspect) -> { - DataMap data = aspect.getValue().data(); - if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { + entityResponse + .getAspects() + .forEach( + (name, aspect) -> { + DataMap data = aspect.getValue().data(); + if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { final DataJobKey gmsKey = new DataJobKey(data); - result.setDataFlow(new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + result.setDataFlow( + new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); result.setJobId(gmsKey.getJobId()); - } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = new com.linkedin.datajob.DataJobInfo(data); + } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = + new com.linkedin.datajob.DataJobInfo(data); result.setInfo(mapDataJobInfo(gmsDataJobInfo, entityUrn)); result.setProperties(mapDataJobInfoToProperties(gmsDataJobInfo, entityUrn)); - } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = new com.linkedin.datajob.DataJobInputOutput(data); + } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = + new com.linkedin.datajob.DataJobInputOutput(data); result.setInputOutput(mapDataJobInputOutput(gmsDataJobInputOutput)); - } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(data); - final DataJobEditableProperties dataJobEditableProperties = new DataJobEditableProperties(); - dataJobEditableProperties.setDescription(editableDataJobProperties.getDescription()); + } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { + final EditableDataJobProperties editableDataJobProperties = + new EditableDataJobProperties(data); + final DataJobEditableProperties dataJobEditableProperties = + new DataJobEditableProperties(); + dataJobEditableProperties.setDescription( + editableDataJobProperties.getDescription()); result.setEditableProperties(dataJobEditableProperties); - } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { + } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { result.setOwnership(OwnershipMapper.map(new Ownership(data), entityUrn)); - } else if (STATUS_ASPECT_NAME.equals(name)) { + } else if (STATUS_ASPECT_NAME.equals(name)) { result.setStatus(StatusMapper.map(new Status(data))); - } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(data), entityUrn); + } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(data), entityUrn); result.setGlobalTags(globalTags); result.setTags(globalTags); - } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); - } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); - } else if (DOMAINS_ASPECT_NAME.equals(name)) { + } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); + } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { + result.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); + } else if (DOMAINS_ASPECT_NAME.equals(name)) { final Domains domains = new Domains(data); // Currently we only take the first domain if it exists. result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); - } else if (DEPRECATION_ASPECT_NAME.equals(name)) { + } else if (DEPRECATION_ASPECT_NAME.equals(name)) { result.setDeprecation(DeprecationMapper.map(new Deprecation(data))); - } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); - } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { + } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); - } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { + } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); - } - }); - - return result; + } + }); + + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} */ + private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobInfo result = new DataJobInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} - */ - private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobInfo result = new DataJobInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} - */ - private DataJobProperties mapDataJobInfoToProperties(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobProperties result = new DataJobProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} */ + private DataJobProperties mapDataJobInfoToProperties( + final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobProperties result = new DataJobProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private DataJobInputOutput mapDataJobInputOutput( + final com.linkedin.datajob.DataJobInputOutput inputOutput) { + final DataJobInputOutput result = new DataJobInputOutput(); + if (inputOutput.hasInputDatasets()) { + result.setInputDatasets( + inputOutput.getInputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatasets(ImmutableList.of()); + } + if (inputOutput.hasOutputDatasets()) { + result.setOutputDatasets( + inputOutput.getOutputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setOutputDatasets(ImmutableList.of()); + } + if (inputOutput.hasInputDatajobs()) { + result.setInputDatajobs( + inputOutput.getInputDatajobs().stream() + .map( + urn -> { + final DataJob dataJob = new DataJob(); + dataJob.setUrn(urn.toString()); + return dataJob; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatajobs(ImmutableList.of()); } - private DataJobInputOutput mapDataJobInputOutput(final com.linkedin.datajob.DataJobInputOutput inputOutput) { - final DataJobInputOutput result = new DataJobInputOutput(); - if (inputOutput.hasInputDatasets()) { - result.setInputDatasets(inputOutput.getInputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setInputDatasets(ImmutableList.of()); - } - if (inputOutput.hasOutputDatasets()) { - result.setOutputDatasets(inputOutput.getOutputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setOutputDatasets(ImmutableList.of()); - } - if (inputOutput.hasInputDatajobs()) { - result.setInputDatajobs(inputOutput.getInputDatajobs().stream().map(urn -> { - final DataJob dataJob = new DataJob(); - dataJob.setUrn(urn.toString()); - return dataJob; - }).collect(Collectors.toList())); - } else { - result.setInputDatajobs(ImmutableList.of()); - } - - if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { - result.setFineGrainedLineages(FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); - } - - return result; + if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { + result.setFineGrainedLineages( + FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java index b075c42d411fb..b0f299e00b4ba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,63 +19,61 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DataJobUpdateInputMapper implements InputModelMapper<DataJobUpdateInput, Collection<MetadataChangeProposal>, Urn> { - public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); +public class DataJobUpdateInputMapper + implements InputModelMapper<DataJobUpdateInput, Collection<MetadataChangeProposal>, Urn> { + public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); - public static Collection<MetadataChangeProposal> map( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dataJobUpdateInput, actor); - } - - @Override - public Collection<MetadataChangeProposal> apply( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); + public static Collection<MetadataChangeProposal> map( + @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(dataJobUpdateInput, actor); + } - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); + @Override + public Collection<MetadataChangeProposal> apply( + @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); - if (dataJobUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dataJobUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } else { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dataJobUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dataJobUpdateInput.getEditableProperties() != null) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); - editableDataJobProperties.setDescription(dataJobUpdateInput.getEditableProperties().getDescription()); - editableDataJobProperties.setCreated(auditStamp); - editableDataJobProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataJobProperties, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); - } + if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dataJobUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getGlobalTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); + } else { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dataJobUpdateInput.getEditableProperties() != null) { + final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); + editableDataJobProperties.setDescription( + dataJobUpdateInput.getEditableProperties().getDescription()); + editableDataJobProperties.setCreated(auditStamp); + editableDataJobProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataJobProperties, EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); } + + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java index 57a035d136645..567d275dbee0a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,56 +19,60 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformType implements EntityType<DataPlatform, String> { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public DataPlatformType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataPlatformType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public Class<DataPlatform> objectClass() { - return DataPlatform.class; - } + @Override + public Class<DataPlatform> objectClass() { + return DataPlatform.class; + } - @Override - public List<DataFetcherResult<DataPlatform>> batchLoad(final List<String> urns, final QueryContext context) { + @Override + public List<DataFetcherResult<DataPlatform>> batchLoad( + final List<String> urns, final QueryContext context) { - final List<Urn> dataPlatformUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final List<Urn> dataPlatformUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> dataPlatformMap = _entityClient.batchGetV2( - DATA_PLATFORM_ENTITY_NAME, new HashSet<>(dataPlatformUrns), null, context.getAuthentication()); + try { + final Map<Urn, EntityResponse> dataPlatformMap = + _entityClient.batchGetV2( + DATA_PLATFORM_ENTITY_NAME, + new HashSet<>(dataPlatformUrns), + null, + context.getAuthentication()); - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformUrns) { - gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); - } + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformUrns) { + gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); + } - return gmsResults.stream() - .map(gmsPlatform -> gmsPlatform == null ? null - : DataFetcherResult.<DataPlatform>newResult() - .data(DataPlatformMapper.map(gmsPlatform)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Platforms", e); - } + return gmsResults.stream() + .map( + gmsPlatform -> + gmsPlatform == null + ? null + : DataFetcherResult.<DataPlatform>newResult() + .data(DataPlatformMapper.map(gmsPlatform)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Platforms", e); } + } - @Override - public com.linkedin.datahub.graphql.generated.EntityType type() { - return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; - } + @Override + public com.linkedin.datahub.graphql.generated.EntityType type() { + return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java index 011fb83cddb33..c2dc3bfabd07c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java @@ -6,25 +6,27 @@ import javax.annotation.Nonnull; @Deprecated -public class DataPlatformInfoMapper implements ModelMapper<com.linkedin.dataplatform.DataPlatformInfo, DataPlatformInfo> { +public class DataPlatformInfoMapper + implements ModelMapper<com.linkedin.dataplatform.DataPlatformInfo, DataPlatformInfo> { - public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); + public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); - public static DataPlatformInfo map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); - } + public static DataPlatformInfo map( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(platform); + } - @Override - public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformInfo result = new DataPlatformInfo(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.hasDisplayName()) { - result.setDisplayName(input.getDisplayName()); - } - if (input.hasLogoUrl()) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + @Override + public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformInfo result = new DataPlatformInfo(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.hasDisplayName()) { + result.setDisplayName(input.getDisplayName()); + } + if (input.hasLogoUrl()) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java index 8df44e8f6e9e9..f7078f9f37d7c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; @@ -13,36 +15,40 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformMapper implements ModelMapper<EntityResponse, DataPlatform> { - public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); - - public static DataPlatform map(@Nonnull final EntityResponse platform) { - return INSTANCE.apply(platform); - } - - @Override - public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { - final DataPlatform result = new DataPlatform(); - final DataPlatformKey dataPlatformKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKeyInternal(entityResponse.getUrn(), - new DataPlatformKey().schema()); - result.setType(EntityType.DATA_PLATFORM); - Urn urn = entityResponse.getUrn(); - result.setUrn(urn.toString()); - result.setName(dataPlatformKey.getPlatformName()); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<DataPlatform> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PLATFORM_KEY_ASPECT_NAME, (dataPlatform, dataMap) -> + public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); + + public static DataPlatform map(@Nonnull final EntityResponse platform) { + return INSTANCE.apply(platform); + } + + @Override + public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { + final DataPlatform result = new DataPlatform(); + final DataPlatformKey dataPlatformKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + entityResponse.getUrn(), new DataPlatformKey().schema()); + result.setType(EntityType.DATA_PLATFORM); + Urn urn = entityResponse.getUrn(); + result.setUrn(urn.toString()); + result.setName(dataPlatformKey.getPlatformName()); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper<DataPlatform> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PLATFORM_KEY_ASPECT_NAME, + (dataPlatform, dataMap) -> dataPlatform.setName(new DataPlatformKey(dataMap).getPlatformName())); - mappingHelper.mapToResult(DATA_PLATFORM_INFO_ASPECT_NAME, (dataPlatform, dataMap) -> - dataPlatform.setProperties(DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + DATA_PLATFORM_INFO_ASPECT_NAME, + (dataPlatform, dataMap) -> + dataPlatform.setProperties( + DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); + return mappingHelper.getResult(); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java index c0a236dc1a402..ad6de5505bed6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java @@ -5,27 +5,28 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +public class DataPlatformPropertiesMapper + implements ModelMapper<com.linkedin.dataplatform.DataPlatformInfo, DataPlatformProperties> { -public class DataPlatformPropertiesMapper implements ModelMapper<com.linkedin.dataplatform.DataPlatformInfo, DataPlatformProperties> { + public static final DataPlatformPropertiesMapper INSTANCE = new DataPlatformPropertiesMapper(); - public static final DataPlatformPropertiesMapper - INSTANCE = new DataPlatformPropertiesMapper(); + public static DataPlatformProperties map( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(platform); + } - public static DataPlatformProperties map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + @Override + public DataPlatformProperties apply( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformProperties result = new DataPlatformProperties(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public DataPlatformProperties apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformProperties result = new DataPlatformProperties(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getLogoUrl() != null) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + if (input.getLogoUrl() != null) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java index 87614e1332528..6519a493f3991 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,19 +12,15 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.datahub.graphql.types.dataplatforminstance.mappers.DataPlatformInstanceMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; -import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,90 +28,100 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; -import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; - -public class DataPlatformInstanceType implements SearchableEntityType<DataPlatformInstance, String>, +public class DataPlatformInstanceType + implements SearchableEntityType<DataPlatformInstance, String>, com.linkedin.datahub.graphql.types.EntityType<DataPlatformInstance, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME - ); - private final EntityClient _entityClient; - - public DataPlatformInstanceType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.DATA_PLATFORM_INSTANCE; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<DataPlatformInstance> objectClass() { - return DataPlatformInstance.class; + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME); + private final EntityClient _entityClient; + + public DataPlatformInstanceType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.DATA_PLATFORM_INSTANCE; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<DataPlatformInstance> objectClass() { + return DataPlatformInstance.class; + } + + @Override + public List<DataFetcherResult<DataPlatformInstance>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> dataPlatformInstanceUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + new HashSet<>(dataPlatformInstanceUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataPlatformInstance>newResult() + .data(DataPlatformInstanceMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + + } catch (Exception e) { + throw new RuntimeException("Failed to batch load DataPlatformInstance", e); } - - @Override - public List<DataFetcherResult<DataPlatformInstance>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> dataPlatformInstanceUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, - new HashSet<>(dataPlatformInstanceUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<DataPlatformInstance>newResult() - .data(DataPlatformInstanceMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - - } catch (Exception e) { - throw new RuntimeException("Failed to batch load DataPlatformInstance", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, - filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java index ba49f23133f9e..1a2bd0488c4bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java @@ -1,28 +1,27 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance.mappers; -import com.linkedin.common.Ownership; +import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; +import com.linkedin.common.Ownership; import com.linkedin.common.Status; -import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; -import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; -import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; +import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataPlatformInstanceKey; - import javax.annotation.Nonnull; public class DataPlatformInstanceMapper { @@ -41,65 +40,75 @@ public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) final EnvelopedAspectMap aspects = entityResponse.getAspects(); MappingHelper<DataPlatformInstance> mappingHelper = new MappingHelper<>(aspects, result); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - this::mapDataPlatformInstanceKey - ); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.OWNERSHIP_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.GLOBAL_TAGS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.STATUS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap))) - ); - mappingHelper.mapToResult(Constants.DEPRECATION_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap))) - ); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, this::mapDataPlatformInstanceKey); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.OWNERSHIP_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setOwnership( + OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.GLOBAL_TAGS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.STATUS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + Constants.DEPRECATION_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); return mappingHelper.getResult(); } - private void mapDataPlatformInstanceKey(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { + private void mapDataPlatformInstanceKey( + @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { final DataPlatformInstanceKey gmsKey = new DataPlatformInstanceKey(dataMap); - dataPlatformInstance.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()) - .build()); + dataPlatformInstance.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); dataPlatformInstance.setInstanceId(gmsKey.getInstance()); } private void mapDataPlatformInstanceProperties( - @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn - ) { - final DataPlatformInstanceProperties gmsProperties = new DataPlatformInstanceProperties(dataMap); + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + final DataPlatformInstanceProperties gmsProperties = + new DataPlatformInstanceProperties(dataMap); final com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties properties = - new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); + new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); properties.setName(gmsProperties.getName()); properties.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } dataPlatformInstance.setProperties(properties); } - private void mapGlobalTags(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); dataPlatformInstance.setTags(globalTags); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java index ee014f9f66571..48a0cb984862d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.DataProcessInstance; @@ -12,43 +14,43 @@ import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class DataProcessInstanceMapper implements ModelMapper<EntityResponse, DataProcessInstance> { - public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); - - public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); + + public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { + final DataProcessInstance result = new DataProcessInstance(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_PROCESS_INSTANCE); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<DataProcessInstance> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); + + return mappingHelper.getResult(); + } + + private void mapDataProcessProperties( + @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { + DataProcessInstanceProperties dataProcessInstanceProperties = + new DataProcessInstanceProperties(dataMap); + dpi.setName(dataProcessInstanceProperties.getName()); + if (dataProcessInstanceProperties.hasCreated()) { + dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); } - - @Override - public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { - final DataProcessInstance result = new DataProcessInstance(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_PROCESS_INSTANCE); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<DataProcessInstance> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); - - return mappingHelper.getResult(); - } - - private void mapDataProcessProperties(@Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { - DataProcessInstanceProperties dataProcessInstanceProperties = new DataProcessInstanceProperties(dataMap); - dpi.setName(dataProcessInstanceProperties.getName()); - if (dataProcessInstanceProperties.hasCreated()) { - dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); - } - if (dataProcessInstanceProperties.hasExternalUrl()) { - dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); - } + if (dataProcessInstanceProperties.hasExternalUrl()) { + dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java index ca9a77f7e45cb..fd60711e8c569 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java @@ -6,36 +6,41 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - -public class DataProcessInstanceRunEventMapper implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.DataProcessRunEvent> { - - public static final DataProcessInstanceRunEventMapper INSTANCE = new DataProcessInstanceRunEventMapper(); - - public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); +public class DataProcessInstanceRunEventMapper + implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.DataProcessRunEvent> { + + public static final DataProcessInstanceRunEventMapper INSTANCE = + new DataProcessInstanceRunEventMapper(); + + public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map( + @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( + @Nonnull final EnvelopedAspect envelopedAspect) { + + DataProcessInstanceRunEvent runEvent = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + DataProcessInstanceRunEvent.class); + + final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = + new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); + + result.setTimestampMillis(runEvent.getTimestampMillis()); + result.setAttempt(runEvent.getAttempt()); + if (runEvent.hasStatus()) { + result.setStatus( + com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf( + runEvent.getStatus().toString())); } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - DataProcessInstanceRunEvent runEvent = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - DataProcessInstanceRunEvent.class); - - final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = - new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); - - result.setTimestampMillis(runEvent.getTimestampMillis()); - result.setAttempt(runEvent.getAttempt()); - if (runEvent.hasStatus()) { - result.setStatus(com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf(runEvent.getStatus().toString())); - } - if (runEvent.hasResult()) { - result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); - } - - return result; + if (runEvent.hasResult()) { + result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java index 91b03eea2745f..422bea73925a8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java @@ -5,30 +5,34 @@ import com.linkedin.dataprocess.DataProcessInstanceRunResult; import javax.annotation.Nonnull; +public class DataProcessInstanceRunResultMapper + implements ModelMapper< + DataProcessInstanceRunResult, + com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { -public class DataProcessInstanceRunResultMapper implements ModelMapper< - DataProcessInstanceRunResult, com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { + public static final DataProcessInstanceRunResultMapper INSTANCE = + new DataProcessInstanceRunResultMapper(); - public static final DataProcessInstanceRunResultMapper INSTANCE = new DataProcessInstanceRunResultMapper(); + public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map( + @Nonnull final DataProcessInstanceRunResult input) { + return INSTANCE.apply(input); + } - public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map(@Nonnull final DataProcessInstanceRunResult input) { - return INSTANCE.apply(input); - } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply(@Nonnull final DataProcessInstanceRunResult input) { - - final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = - new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); + @Override + public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply( + @Nonnull final DataProcessInstanceRunResult input) { - if (input.hasType()) { - result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); - } + final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = + new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); - if (input.hasNativeResultType()) { - result.setNativeResultType(input.getNativeResultType()); - } + if (input.hasType()) { + result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); + } - return result; + if (input.hasNativeResultType()) { + result.setNativeResultType(input.getNativeResultType()); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java index eb8ca23f00b37..766f6937ce3e2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java @@ -1,5 +1,13 @@ package com.linkedin.datahub.graphql.types.dataproduct; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,11 +26,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import lombok.RequiredArgsConstructor; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,26 +33,23 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import org.apache.commons.lang3.NotImplementedException; @RequiredArgsConstructor -public class DataProductType implements SearchableEntityType<DataProduct, String>, - com.linkedin.datahub.graphql.types.EntityType<DataProduct, String> { - public static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME - ); +public class DataProductType + implements SearchableEntityType<DataProduct, String>, + com.linkedin.datahub.graphql.types.EntityType<DataProduct, String> { + public static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -68,13 +68,17 @@ public Class<DataProduct> objectClass() { } @Override - public List<DataFetcherResult<DataProduct>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { - final List<Urn> dataProductUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List<DataFetcherResult<DataProduct>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> dataProductUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(DATA_PRODUCT_ENTITY_NAME, new HashSet<>(dataProductUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATA_PRODUCT_ENTITY_NAME, + new HashSet<>(dataProductUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -82,8 +86,13 @@ public List<DataFetcherResult<DataProduct>> batchLoad(@Nonnull List<String> urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<DataProduct>newResult().data(DataProductMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataProduct>newResult() + .data(DataProductMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); @@ -91,22 +100,28 @@ public List<DataFetcherResult<DataProduct>> batchLoad(@Nonnull List<String> urns } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List<FacetFilterInput> filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Data Product entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Data Product entity type"); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java index 254b43ecb96cc..8039ea08dc722 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.types.dataproduct.mappers; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -21,17 +28,8 @@ import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; - import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - - public class DataProductMapper implements ModelMapper<EntityResponse, DataProduct> { public static final DataProductMapper INSTANCE = new DataProductMapper(); @@ -50,27 +48,44 @@ public DataProduct apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<DataProduct> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PRODUCT_PROPERTIES_ASPECT_NAME, (dataProduct, dataMap) -> - mapDataProductProperties(dataProduct, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setDomain(DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + (dataProduct, dataMap) -> mapDataProductProperties(dataProduct, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + DOMAINS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setDomain( + DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); return result; } - private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { + private void mapDataProductProperties( + @Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { DataProductProperties dataProductProperties = new DataProductProperties(dataMap); - com.linkedin.datahub.graphql.generated.DataProductProperties properties = new com.linkedin.datahub.graphql.generated.DataProductProperties(); + com.linkedin.datahub.graphql.generated.DataProductProperties properties = + new com.linkedin.datahub.graphql.generated.DataProductProperties(); - final String name = dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); + final String name = + dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); properties.setName(name); properties.setDescription(dataProductProperties.getDescription()); if (dataProductProperties.hasExternalUrl()) { @@ -81,7 +96,9 @@ private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull } else { properties.setNumAssets(0); } - properties.setCustomProperties(CustomPropertiesMapper.map(dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); + properties.setCustomProperties( + CustomPropertiesMapper.map( + dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); dataProduct.setProperties(properties); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 6f339d3985133..badb24810c82b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -8,19 +13,17 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; -import com.linkedin.datahub.graphql.generated.Dataset; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; -import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.generated.BrowsePath; +import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.BrowsableEntityType; @@ -37,13 +40,12 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -56,235 +58,266 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetType implements SearchableEntityType<Dataset, String>, BrowsableEntityType<Dataset, String>, +public class DatasetType + implements SearchableEntityType<Dataset, String>, + BrowsableEntityType<Dataset, String>, BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> { - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - ACCESS_DATASET_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - - private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private static final String ENTITY_NAME = "dataset"; - - private final EntityClient _entityClient; - - public DatasetType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class<Dataset> objectClass() { - return Dataset.class; + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + ACCESS_DATASET_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + + private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private static final String ENTITY_NAME = "dataset"; + + private final EntityClient _entityClient; + + public DatasetType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class<Dataset> objectClass() { + return Dataset.class; + } + + @Override + public Class<DatasetUpdateInput> inputClass() { + return DatasetUpdateInput.class; + } + + @Override + public Class<BatchDatasetUpdateInput[]> batchInputClass() { + return BatchDatasetUpdateInput[].class; + } + + @Override + public EntityType type() { + return EntityType.DATASET; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List<DataFetcherResult<Dataset>> batchLoad( + @Nonnull final List<String> urnStrs, @Nonnull final QueryContext context) { + try { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map<Urn, EntityResponse> datasetMap = + _entityClient.batchGetV2( + Constants.DATASET_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(datasetMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.<Dataset>newResult() + .data(DatasetMapper.map(gmsDataset)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); } - - @Override - public Class<DatasetUpdateInput> inputClass() { - return DatasetUpdateInput.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataset", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + @Override + public List<Dataset> batchUpdate( + @Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { + final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); + + final Collection<MetadataChangeProposal> proposals = + Arrays.stream(input) + .map( + updateInput -> { + if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { + Collection<MetadataChangeProposal> datasetProposals = + DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); + datasetProposals.forEach( + proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); + return datasetProposals; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + + final List<String> urns = + Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); } - @Override - public Class<BatchDatasetUpdateInput[]> batchInputClass() { - return BatchDatasetUpdateInput[].class; + return batchLoad(urns, context).stream() + .map(DataFetcherResult::getData) + .collect(Collectors.toList()); + } + + @Override + public Dataset update( + @Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = + DatasetUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - @Override - public EntityType type() { - return EntityType.DATASET; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getInstitutionalMemory() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - @Override - public List<DataFetcherResult<Dataset>> batchLoad(@Nonnull final List<String> urnStrs, - @Nonnull final QueryContext context) { - try { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map<Urn, EntityResponse> datasetMap = - _entityClient.batchGetV2( - Constants.DATASET_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(datasetMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataset -> gmsDataset == null ? null : DataFetcherResult.<Dataset>newResult() - .data(DatasetMapper.map(gmsDataset)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + if (updateInput.getDeprecation() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "dataset", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); + if (updateInput.getEditableSchemaMetadata() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public List<Dataset> batchUpdate(@Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { - final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); - - final Collection<MetadataChangeProposal> proposals = Arrays.stream(input).map(updateInput -> { - if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { - Collection<MetadataChangeProposal> datasetProposals = DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); - datasetProposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); - return datasetProposals; - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }).flatMap(Collection::stream).collect(Collectors.toList()); - - final List<String> urns = Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); - } - - return batchLoad(urns, context).stream().map(DataFetcherResult::getData).collect(Collectors.toList()); - } - - @Override - public Dataset update(@Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = DatasetUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorized(@Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getInstitutionalMemory() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); - } - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDeprecation() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - if (updateInput.getEditableSchemaMetadata() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java index e1aa580276a50..676617bfa2f90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java @@ -1,18 +1,18 @@ package com.linkedin.datahub.graphql.types.dataset; import com.linkedin.common.urn.DatasetUrn; - import java.net.URISyntaxException; public class DatasetUtils { - private DatasetUtils() { } + private DatasetUtils() {} - static DatasetUrn getDatasetUrn(String urnStr) { - try { - return DatasetUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); - } + static DatasetUrn getDatasetUrn(String urnStr) { + try { + return DatasetUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java index e620bfb30b6b7..df019cc5df8fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; @@ -22,32 +24,30 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class VersionedDatasetType implements com.linkedin.datahub.graphql.types.EntityType<VersionedDataset, VersionedUrn> { +public class VersionedDatasetType + implements com.linkedin.datahub.graphql.types.EntityType<VersionedDataset, VersionedUrn> { - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME - ); + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME); private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "dataset"; @@ -74,8 +74,8 @@ public Function<Entity, VersionedUrn> getKeyProvider() { } @Override - public List<DataFetcherResult<VersionedDataset>> batchLoad(@Nonnull final List<VersionedUrn> versionedUrns, - @Nonnull final QueryContext context) { + public List<DataFetcherResult<VersionedDataset>> batchLoad( + @Nonnull final List<VersionedUrn> versionedUrns, @Nonnull final QueryContext context) { try { final Map<Urn, EntityResponse> datasetMap = _entityClient.batchGetVersionedV2( @@ -89,9 +89,13 @@ public List<DataFetcherResult<VersionedDataset>> batchLoad(@Nonnull final List<V gmsResults.add(datasetMap.getOrDefault(versionedUrn.getUrn(), null)); } return gmsResults.stream() - .map(gmsDataset -> gmsDataset == null ? null : DataFetcherResult.<VersionedDataset>newResult() - .data(VersionedDatasetMapper.map(gmsDataset)) - .build()) + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.<VersionedDataset>newResult() + .data(VersionedDatasetMapper.map(gmsDataset)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Datasets", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java index 0ec9bed0c8511..5fe7815ea2f8d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - public class AssertionRunEventMapper implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.AssertionRunEvent> { @@ -29,8 +28,10 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( @Nonnull final EnvelopedAspect envelopedAspect) { AssertionRunEvent gmsAssertionRunEvent = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), AssertionRunEvent.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + AssertionRunEvent.class); final com.linkedin.datahub.graphql.generated.AssertionRunEvent assertionRunEvent = new com.linkedin.datahub.graphql.generated.AssertionRunEvent(); @@ -39,7 +40,8 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setAssertionUrn(gmsAssertionRunEvent.getAssertionUrn().toString()); assertionRunEvent.setAsserteeUrn(gmsAssertionRunEvent.getAsserteeUrn().toString()); assertionRunEvent.setRunId(gmsAssertionRunEvent.getRunId()); - assertionRunEvent.setStatus(AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); + assertionRunEvent.setStatus( + AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); if (gmsAssertionRunEvent.hasBatchSpec()) { assertionRunEvent.setBatchSpec(mapBatchSpec(gmsAssertionRunEvent.getBatchSpec())); } @@ -50,7 +52,8 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setResult(mapAssertionResult(gmsAssertionRunEvent.getResult())); } if (gmsAssertionRunEvent.hasRuntimeContext()) { - assertionRunEvent.setRuntimeContext(StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); + assertionRunEvent.setRuntimeContext( + StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); } return assertionRunEvent; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java index 1adcea7e53dc2..1644e0243a181 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java @@ -2,24 +2,25 @@ import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class DatasetDeprecationMapper implements ModelMapper<com.linkedin.dataset.DatasetDeprecation, Deprecation> { +public class DatasetDeprecationMapper + implements ModelMapper<com.linkedin.dataset.DatasetDeprecation, Deprecation> { - public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); + public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map( + @Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { + return INSTANCE.apply(deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 3e39c14c29ede..8296bc8244995 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Access; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -22,6 +24,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; @@ -29,15 +32,14 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; @@ -53,155 +55,196 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ @Slf4j public class DatasetMapper implements ModelMapper<EntityResponse, Dataset> { - public static final DatasetMapper INSTANCE = new DatasetMapper(); - - public static Dataset map(@Nonnull final EntityResponse dataset) { - return INSTANCE.apply(dataset); - } - - public Dataset apply(@Nonnull final EntityResponse entityResponse) { - Dataset result = new Dataset(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATASET); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<Dataset> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> + public static final DatasetMapper INSTANCE = new DatasetMapper(); + + public static Dataset map(@Nonnull final EntityResponse dataset) { + return INSTANCE.apply(dataset); + } + + public Dataset apply(@Nonnull final EntityResponse entityResponse) { + Dataset result = new Dataset(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATASET); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper<Dataset> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); - mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(SIBLINGS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); - mappingHelper.mapToResult(UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setFineGrainedLineages(UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + SIBLINGS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); + mappingHelper.mapToResult( + UPSTREAM_LINEAGE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setFineGrainedLineages( + UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataset, dataMap) -> dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> - dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final DatasetKey gmsKey = new DatasetKey(dataMap); - dataset.setName(gmsKey.getName()); - dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() + mappingHelper.mapToResult( + ACCESS_DATASET_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + return mappingHelper.getResult(); + } + + private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final DatasetKey gmsKey = new DatasetKey(dataMap); + dataset.setName(gmsKey.getName()); + dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); + dataset.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + .setUrn(gmsKey.getPlatform().toString()) + .build()); + } + + private void mapDatasetProperties( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final DatasetProperties gmsProperties = new DatasetProperties(dataMap); + final com.linkedin.datahub.graphql.generated.DatasetProperties properties = + new com.linkedin.datahub.graphql.generated.DatasetProperties(); + properties.setDescription(gmsProperties.getDescription()); + dataset.setDescription(gmsProperties.getDescription()); + properties.setOrigin(dataset.getOrigin()); + if (gmsProperties.getExternalUrl() != null) { + properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - - private void mapDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final DatasetProperties gmsProperties = new DatasetProperties(dataMap); - final com.linkedin.datahub.graphql.generated.DatasetProperties properties = - new com.linkedin.datahub.graphql.generated.DatasetProperties(); - properties.setDescription(gmsProperties.getDescription()); - dataset.setDescription(gmsProperties.getDescription()); - properties.setOrigin(dataset.getOrigin()); - if (gmsProperties.getExternalUrl() != null) { - properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); - } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); - if (gmsProperties.getName() != null) { - properties.setName(gmsProperties.getName()); - } else { - properties.setName(dataset.getName()); - } - properties.setQualifiedName(gmsProperties.getQualifiedName()); - dataset.setProperties(properties); - dataset.setDescription(properties.getDescription()); - if (gmsProperties.getUri() != null) { - dataset.setUri(gmsProperties.getUri().toString()); - } - TimeStamp created = gmsProperties.getCreated(); - if (created != null) { - properties.setCreated(created.getTime()); - if (created.hasActor()) { - properties.setCreatedActor(created.getActor().toString()); - } - } - TimeStamp lastModified = gmsProperties.getLastModified(); - if (lastModified != null) { - properties.setLastModified(lastModified.getTime()); - if (lastModified.hasActor()) { - properties.setLastModifiedActor(lastModified.getActor().toString()); - } - } + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + if (gmsProperties.getName() != null) { + properties.setName(gmsProperties.getName()); + } else { + properties.setName(dataset.getName()); } - - private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); - final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); - editableProperties.setDescription(editableDatasetProperties.getDescription()); - dataset.setEditableProperties(editableProperties); + properties.setQualifiedName(gmsProperties.getQualifiedName()); + dataset.setProperties(properties); + dataset.setDescription(properties.getDescription()); + if (gmsProperties.getUri() != null) { + dataset.setUri(gmsProperties.getUri().toString()); } - - private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final ViewProperties properties = new ViewProperties(dataMap); - final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = - new com.linkedin.datahub.graphql.generated.ViewProperties(); - graphqlProperties.setMaterialized(properties.isMaterialized()); - graphqlProperties.setLanguage(properties.getViewLanguage()); - graphqlProperties.setLogic(properties.getViewLogic()); - dataset.setViewProperties(graphqlProperties); + TimeStamp created = gmsProperties.getCreated(); + if (created != null) { + properties.setCreated(created.getTime()); + if (created.hasActor()) { + properties.setCreatedActor(created.getActor().toString()); + } } - - private void mapGlobalTags(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataset.setGlobalTags(globalTags); - dataset.setTags(globalTags); + TimeStamp lastModified = gmsProperties.getLastModified(); + if (lastModified != null) { + properties.setLastModified(lastModified.getTime()); + if (lastModified.hasActor()) { + properties.setLastModifiedActor(lastModified.getActor().toString()); + } } - - private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() + } + + private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); + final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); + editableProperties.setDescription(editableDatasetProperties.getDescription()); + dataset.setEditableProperties(editableProperties); + } + + private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final ViewProperties properties = new ViewProperties(dataMap); + final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = + new com.linkedin.datahub.graphql.generated.ViewProperties(); + graphqlProperties.setMaterialized(properties.isMaterialized()); + graphqlProperties.setLanguage(properties.getViewLanguage()); + graphqlProperties.setLogic(properties.getViewLogic()); + dataset.setViewProperties(graphqlProperties); + } + + private void mapGlobalTags( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dataset.setGlobalTags(globalTags); + dataset.setTags(globalTags); + } + + private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); - } + private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java index dbaaf27a3f2bc..25639e431fac1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java @@ -8,20 +8,22 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class DatasetProfileMapper implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.DatasetProfile> { +public class DatasetProfileMapper + implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.DatasetProfile> { public static final DatasetProfileMapper INSTANCE = new DatasetProfileMapper(); - public static com.linkedin.datahub.graphql.generated.DatasetProfile map(@Nonnull final EnvelopedAspect envelopedAspect) { + public static com.linkedin.datahub.graphql.generated.DatasetProfile map( + @Nonnull final EnvelopedAspect envelopedAspect) { return INSTANCE.apply(envelopedAspect); } @Override - public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull final EnvelopedAspect envelopedAspect) { + public com.linkedin.datahub.graphql.generated.DatasetProfile apply( + @Nonnull final EnvelopedAspect envelopedAspect) { - DatasetProfile gmsProfile = GenericRecordUtils - .deserializeAspect( + DatasetProfile gmsProfile = + GenericRecordUtils.deserializeAspect( envelopedAspect.getAspect().getValue(), envelopedAspect.getAspect().getContentType(), DatasetProfile.class); @@ -35,13 +37,16 @@ public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull fina result.setTimestampMillis(gmsProfile.getTimestampMillis()); if (gmsProfile.hasFieldProfiles()) { result.setFieldProfiles( - gmsProfile.getFieldProfiles().stream().map(DatasetProfileMapper::mapFieldProfile).collect(Collectors.toList())); + gmsProfile.getFieldProfiles().stream() + .map(DatasetProfileMapper::mapFieldProfile) + .collect(Collectors.toList())); } return result; } - private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile(DatasetFieldProfile gmsProfile) { + private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile( + DatasetFieldProfile gmsProfile) { final com.linkedin.datahub.graphql.generated.DatasetFieldProfile result = new com.linkedin.datahub.graphql.generated.DatasetFieldProfile(); result.setFieldPath(gmsProfile.getFieldPath()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java index 78c1299ed9bd9..0b05d420030b5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -22,23 +24,19 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetUpdateInputMapper implements InputModelMapper<DatasetUpdateInput, Collection<MetadataChangeProposal>, Urn> { +public class DatasetUpdateInputMapper + implements InputModelMapper<DatasetUpdateInput, Collection<MetadataChangeProposal>, Urn> { public static final DatasetUpdateInputMapper INSTANCE = new DatasetUpdateInputMapper(); public static Collection<MetadataChangeProposal> map( - @Nonnull final DatasetUpdateInput datasetUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(datasetUpdateInput, actor); } @Override public Collection<MetadataChangeProposal> apply( - @Nonnull final DatasetUpdateInput datasetUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { final Collection<MetadataChangeProposal> proposals = new ArrayList<>(6); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATASET_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -46,8 +44,10 @@ public Collection<MetadataChangeProposal> apply( auditStamp.setTime(System.currentTimeMillis()); if (datasetUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } if (datasetUpdateInput.getDeprecation() != null) { @@ -58,29 +58,32 @@ public Collection<MetadataChangeProposal> apply( } deprecation.setNote(datasetUpdateInput.getDeprecation().getNote()); deprecation.setActor(actor, SetMode.IGNORE_NULL); - proposals.add(updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); } if (datasetUpdateInput.getInstitutionalMemory() != null) { - proposals.add(updateMappingHelper.aspectToProposal(InstitutionalMemoryUpdateMapper - .map(datasetUpdateInput.getInstitutionalMemory()), INSTITUTIONAL_MEMORY_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + InstitutionalMemoryUpdateMapper.map(datasetUpdateInput.getInstitutionalMemory()), + INSTITUTIONAL_MEMORY_ASPECT_NAME)); } if (datasetUpdateInput.getTags() != null || datasetUpdateInput.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); if (datasetUpdateInput.getGlobalTags() != null) { - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getGlobalTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); } else { // Tags field overrides deprecated globalTags field - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } @@ -89,28 +92,32 @@ public Collection<MetadataChangeProposal> apply( final EditableSchemaMetadata editableSchemaMetadata = new EditableSchemaMetadata(); editableSchemaMetadata.setEditableSchemaFieldInfo( new EditableSchemaFieldInfoArray( - datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream().map( - element -> mapSchemaFieldInfo(element) - ).collect(Collectors.toList()))); + datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream() + .map(element -> mapSchemaFieldInfo(element)) + .collect(Collectors.toList()))); editableSchemaMetadata.setLastModified(auditStamp); editableSchemaMetadata.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); } if (datasetUpdateInput.getEditableProperties() != null) { final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(); - editableDatasetProperties.setDescription(datasetUpdateInput.getEditableProperties().getDescription()); + editableDatasetProperties.setDescription( + datasetUpdateInput.getEditableProperties().getDescription()); editableDatasetProperties.setLastModified(auditStamp); editableDatasetProperties.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); } return proposals; } private EditableSchemaFieldInfo mapSchemaFieldInfo( - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo - ) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo) { final EditableSchemaFieldInfo output = new EditableSchemaFieldInfo(); if (schemaFieldInfo.getDescription() != null) { @@ -120,11 +127,14 @@ private EditableSchemaFieldInfo mapSchemaFieldInfo( if (schemaFieldInfo.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(schemaFieldInfo.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element)).collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + schemaFieldInfo.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); output.setGlobalTags(globalTags); } return output; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java index 922574d5051d3..f54adbe8ba26c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java @@ -4,39 +4,34 @@ import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.schema.EditableSchemaFieldInfo; - import javax.annotation.Nonnull; - public class EditableSchemaFieldInfoMapper { - public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); + public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); - public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( - @Nonnull final EditableSchemaFieldInfo fieldInfo, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(fieldInfo, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( + @Nonnull final EditableSchemaFieldInfo fieldInfo, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(fieldInfo, entityUrn); + } - public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( - @Nonnull final EditableSchemaFieldInfo input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); - if (input.hasDescription()) { - result.setDescription((input.getDescription())); - } - if (input.hasFieldPath()) { - result.setFieldPath((input.getFieldPath())); - } - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - return result; + public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( + @Nonnull final EditableSchemaFieldInfo input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = + new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); + if (input.hasDescription()) { + result.setDescription((input.getDescription())); + } + if (input.hasFieldPath()) { + result.setFieldPath((input.getFieldPath())); + } + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java index 376558d2fd18c..3cf012a523d54 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java @@ -1,28 +1,27 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; -import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.common.urn.Urn; - -import javax.annotation.Nonnull; +import com.linkedin.schema.EditableSchemaMetadata; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class EditableSchemaMetadataMapper { - public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - - public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( - @Nonnull final EditableSchemaMetadata metadata, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(metadata, entityUrn); - } + public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply(@Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); - result.setEditableSchemaFieldInfo(input.getEditableSchemaFieldInfo().stream().map(schemaField -> - EditableSchemaFieldInfoMapper.map(schemaField, entityUrn) - ).collect(Collectors.toList())); - return result; - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( + @Nonnull final EditableSchemaMetadata metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } + public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply( + @Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = + new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); + result.setEditableSchemaFieldInfo( + input.getEditableSchemaFieldInfo().stream() + .map(schemaField -> EditableSchemaFieldInfoMapper.map(schemaField, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java index b76767fa5d045..b99b243da5b94 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java @@ -5,14 +5,12 @@ import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; -import lombok.extern.slf4j.Slf4j; - import java.util.stream.Collectors; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class ForeignKeyConstraintMapper { - private ForeignKeyConstraintMapper() { } + private ForeignKeyConstraintMapper() {} public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint constraint) { ForeignKeyConstraint result = new ForeignKeyConstraint(); @@ -22,15 +20,15 @@ public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint } if (constraint.hasSourceFields()) { result.setSourceFields( - constraint.getSourceFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getSourceFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .collect(Collectors.toList())); } if (constraint.hasForeignFields()) { result.setForeignFields( - constraint.getForeignFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getForeignFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java index 515cba5e99c74..dd345bebf657f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java @@ -5,63 +5,66 @@ import com.linkedin.datahub.graphql.generated.TableSchema; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.schema.SchemaMetadata; - import javax.annotation.Nonnull; -public class PlatformSchemaMapper implements ModelMapper<SchemaMetadata.PlatformSchema, PlatformSchema> { +public class PlatformSchemaMapper + implements ModelMapper<SchemaMetadata.PlatformSchema, PlatformSchema> { - public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); + public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); - public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { - return INSTANCE.apply(metadata); - } + public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { + return INSTANCE.apply(metadata); + } - @Override - public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { - Object result; - if (input.isSchemaless()) { - return null; - } else if (input.isPrestoDDL()) { - final TableSchema prestoSchema = new TableSchema(); - prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); - result = prestoSchema; - } else if (input.isOracleDDL()) { - final TableSchema oracleSchema = new TableSchema(); - oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); - result = oracleSchema; - } else if (input.isMySqlDDL()) { - final TableSchema mySqlSchema = new TableSchema(); - mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); - result = mySqlSchema; - } else if (input.isKafkaSchema()) { - final TableSchema kafkaSchema = new TableSchema(); - kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); - result = kafkaSchema; - } else if (input.isOrcSchema()) { - final TableSchema orcSchema = new TableSchema(); - orcSchema.setSchema(input.getOrcSchema().getSchema()); - result = orcSchema; - } else if (input.isBinaryJsonSchema()) { - final TableSchema binaryJsonSchema = new TableSchema(); - binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); - result = binaryJsonSchema; - } else if (input.isEspressoSchema()) { - final KeyValueSchema espressoSchema = new KeyValueSchema(); - espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); - espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); - result = espressoSchema; - } else if (input.isKeyValueSchema()) { - final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); - otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); - otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); - result = otherKeyValueSchema; - } else if (input.isOtherSchema()) { - final TableSchema otherTableSchema = new TableSchema(); - otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); - result = otherTableSchema; - } else { - throw new RuntimeException(String.format("Unrecognized platform schema type %s provided", input.memberType().getType().name())); - } - return (PlatformSchema) result; + @Override + public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { + Object result; + if (input.isSchemaless()) { + return null; + } else if (input.isPrestoDDL()) { + final TableSchema prestoSchema = new TableSchema(); + prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); + result = prestoSchema; + } else if (input.isOracleDDL()) { + final TableSchema oracleSchema = new TableSchema(); + oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); + result = oracleSchema; + } else if (input.isMySqlDDL()) { + final TableSchema mySqlSchema = new TableSchema(); + mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); + result = mySqlSchema; + } else if (input.isKafkaSchema()) { + final TableSchema kafkaSchema = new TableSchema(); + kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); + result = kafkaSchema; + } else if (input.isOrcSchema()) { + final TableSchema orcSchema = new TableSchema(); + orcSchema.setSchema(input.getOrcSchema().getSchema()); + result = orcSchema; + } else if (input.isBinaryJsonSchema()) { + final TableSchema binaryJsonSchema = new TableSchema(); + binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); + result = binaryJsonSchema; + } else if (input.isEspressoSchema()) { + final KeyValueSchema espressoSchema = new KeyValueSchema(); + espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); + espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); + result = espressoSchema; + } else if (input.isKeyValueSchema()) { + final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); + otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); + otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); + result = otherKeyValueSchema; + } else if (input.isOtherSchema()) { + final TableSchema otherTableSchema = new TableSchema(); + otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); + result = otherTableSchema; + } else { + throw new RuntimeException( + String.format( + "Unrecognized platform schema type %s provided", + input.memberType().getType().name())); } + return (PlatformSchema) result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index f05a1adb6b443..f53803ce5be85 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -3,72 +3,75 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; - +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import javax.annotation.Nonnull; public class SchemaFieldMapper { - public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); - public static SchemaField map(@Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static SchemaField map( + @Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } - public SchemaField apply(@Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { - final SchemaField result = new SchemaField(); - result.setDescription(input.getDescription()); - result.setFieldPath(input.getFieldPath()); - result.setJsonPath(input.getJsonPath()); - result.setRecursive(input.isRecursive()); - result.setNullable(input.isNullable()); - result.setNativeDataType(input.getNativeDataType()); - result.setType(mapSchemaFieldDataType(input.getType())); - result.setLabel(input.getLabel()); - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - result.setIsPartOfKey(input.isIsPartOfKey()); - result.setIsPartitioningKey(input.isIsPartitioningKey()); - return result; + public SchemaField apply( + @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + final SchemaField result = new SchemaField(); + result.setDescription(input.getDescription()); + result.setFieldPath(input.getFieldPath()); + result.setJsonPath(input.getJsonPath()); + result.setRecursive(input.isRecursive()); + result.setNullable(input.isNullable()); + result.setNativeDataType(input.getNativeDataType()); + result.setType(mapSchemaFieldDataType(input.getType())); + result.setLabel(input.getLabel()); + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); } + result.setIsPartOfKey(input.isIsPartOfKey()); + result.setIsPartitioningKey(input.isIsPartitioningKey()); + return result; + } - private SchemaFieldDataType mapSchemaFieldDataType(@Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { - final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); - if (type.isBytesType()) { - return SchemaFieldDataType.BYTES; - } else if (type.isFixedType()) { - return SchemaFieldDataType.FIXED; - } else if (type.isBooleanType()) { - return SchemaFieldDataType.BOOLEAN; - } else if (type.isStringType()) { - return SchemaFieldDataType.STRING; - } else if (type.isNumberType()) { - return SchemaFieldDataType.NUMBER; - } else if (type.isDateType()) { - return SchemaFieldDataType.DATE; - } else if (type.isTimeType()) { - return SchemaFieldDataType.TIME; - } else if (type.isEnumType()) { - return SchemaFieldDataType.ENUM; - } else if (type.isNullType()) { - return SchemaFieldDataType.NULL; - } else if (type.isArrayType()) { - return SchemaFieldDataType.ARRAY; - } else if (type.isMapType()) { - return SchemaFieldDataType.MAP; - } else if (type.isRecordType()) { - return SchemaFieldDataType.STRUCT; - } else if (type.isUnionType()) { - return SchemaFieldDataType.UNION; - } else { - throw new RuntimeException(String.format("Unrecognized SchemaFieldDataType provided %s", - type.memberType().toString())); - } + private SchemaFieldDataType mapSchemaFieldDataType( + @Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { + final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); + if (type.isBytesType()) { + return SchemaFieldDataType.BYTES; + } else if (type.isFixedType()) { + return SchemaFieldDataType.FIXED; + } else if (type.isBooleanType()) { + return SchemaFieldDataType.BOOLEAN; + } else if (type.isStringType()) { + return SchemaFieldDataType.STRING; + } else if (type.isNumberType()) { + return SchemaFieldDataType.NUMBER; + } else if (type.isDateType()) { + return SchemaFieldDataType.DATE; + } else if (type.isTimeType()) { + return SchemaFieldDataType.TIME; + } else if (type.isEnumType()) { + return SchemaFieldDataType.ENUM; + } else if (type.isNullType()) { + return SchemaFieldDataType.NULL; + } else if (type.isArrayType()) { + return SchemaFieldDataType.ARRAY; + } else if (type.isMapType()) { + return SchemaFieldDataType.MAP; + } else if (type.isRecordType()) { + return SchemaFieldDataType.STRUCT; + } else if (type.isUnionType()) { + return SchemaFieldDataType.UNION; + } else { + throw new RuntimeException( + String.format( + "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java index eb793cc17efb6..d0424ba89eca1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java @@ -4,44 +4,53 @@ import com.linkedin.datahub.graphql.generated.Schema; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaMetadata; - +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.stream.Collectors; public class SchemaMapper { - public static final SchemaMapper INSTANCE = new SchemaMapper(); + public static final SchemaMapper INSTANCE = new SchemaMapper(); - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, null, entityUrn); - } + public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, null, entityUrn); + } - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, systemMetadata, entityUrn); - } + public static Schema map( + @Nonnull final SchemaMetadata metadata, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, systemMetadata, entityUrn); + } - public Schema apply(@Nonnull final com.linkedin.schema.SchemaMetadata input, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - final Schema result = new Schema(); - if (input.getDataset() != null) { - result.setDatasetUrn(input.getDataset().toString()); - } - if (systemMetadata != null) { - result.setLastObserved(systemMetadata.getLastObserved()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - if (input.getForeignKeys() != null) { - result.setForeignKeys(input.getForeignKeys().stream() - .map(ForeignKeyConstraintMapper::map) - .collect(Collectors.toList())); - } - return result; + public Schema apply( + @Nonnull final com.linkedin.schema.SchemaMetadata input, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + final Schema result = new Schema(); + if (input.getDataset() != null) { + result.setDatasetUrn(input.getDataset().toString()); + } + if (systemMetadata != null) { + result.setLastObserved(systemMetadata.getLastObserved()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + if (input.getForeignKeys() != null) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(ForeignKeyConstraintMapper::map) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index 00cb91bed8abb..31381073a16dd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -6,43 +6,42 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class SchemaMetadataMapper { - public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); + public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); - public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(aspect, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( + @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(aspect, entityUrn); + } - public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); - final com.linkedin.datahub.graphql.generated.SchemaMetadata result = - new com.linkedin.datahub.graphql.generated.SchemaMetadata(); + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); + final com.linkedin.datahub.graphql.generated.SchemaMetadata result = + new com.linkedin.datahub.graphql.generated.SchemaMetadata(); - if (input.hasDataset()) { - result.setDatasetUrn(input.getDataset().toString()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - result.setAspectVersion(aspect.getVersion()); - if (input.hasForeignKeys()) { - result.setForeignKeys(input.getForeignKeys().stream().map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map( - foreignKeyConstraint - )).collect(Collectors.toList())); - } - return result; + if (input.hasDataset()) { + result.setDatasetUrn(input.getDataset().toString()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + result.setAspectVersion(aspect.getVersion()); + if (input.hasForeignKeys()) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map(foreignKeyConstraint)) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java index 241c4872b1caa..727e8629f74b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -14,11 +16,11 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.generated.VersionedDataset; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -38,13 +40,10 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ @Slf4j public class VersionedDatasetMapper implements ModelMapper<EntityResponse, VersionedDataset> { @@ -67,28 +66,52 @@ public VersionedDataset apply(@Nonnull final EntityResponse entityResponse) { SystemMetadata schemaSystemMetadata = getSystemMetadata(aspectMap, SCHEMA_METADATA_ASPECT_NAME); mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setSchema( + SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); return mappingHelper.getResult(); } @@ -104,12 +127,15 @@ private void mapDatasetKey(@Nonnull VersionedDataset dataset, @Nonnull DataMap d final DatasetKey gmsKey = new DatasetKey(dataMap); dataset.setName(gmsKey.getName()); dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + dataset.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); } - private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { + private void mapDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { final DatasetProperties gmsProperties = new DatasetProperties(dataMap); final com.linkedin.datahub.graphql.generated.DatasetProperties properties = new com.linkedin.datahub.graphql.generated.DatasetProperties(); @@ -118,7 +144,8 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da if (gmsProperties.getExternalUrl() != null) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); if (gmsProperties.getName() != null) { properties.setName(gmsProperties.getName()); } else { @@ -128,8 +155,10 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da dataset.setProperties(properties); } - private void mapEditableDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); + private void mapEditableDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); editableProperties.setDescription(editableDatasetProperties.getDescription()); dataset.setEditableProperties(editableProperties); @@ -145,18 +174,21 @@ private void mapViewProperties(@Nonnull VersionedDataset dataset, @Nonnull DataM dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); dataset.setTags(globalTags); } private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } private void mapDomains(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java index df8de87ff69ff..51ef254f52225 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java @@ -5,32 +5,32 @@ import com.linkedin.datahub.graphql.generated.EntityType; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class DomainAssociationMapper { - public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); + public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); - public static DomainAssociation map( - @Nonnull final com.linkedin.domain.Domains domains, - @Nonnull final String entityUrn - ) { - return INSTANCE.apply(domains, entityUrn); - } + public static DomainAssociation map( + @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { + return INSTANCE.apply(domains, entityUrn); + } - public DomainAssociation apply(@Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - if (domains.getDomains().size() > 0) { - DomainAssociation association = new DomainAssociation(); - association.setDomain(Domain.builder() - .setType(EntityType.DOMAIN) - .setUrn(domains.getDomains().get(0).toString()).build()); - association.setAssociatedUrn(entityUrn); - return association; - } - return null; + public DomainAssociation apply( + @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { + if (domains.getDomains().size() > 0) { + DomainAssociation association = new DomainAssociation(); + association.setDomain( + Domain.builder() + .setType(EntityType.DOMAIN) + .setUrn(domains.getDomains().get(0).toString()) + .build()); + association.setAssociatedUrn(entityUrn); + return association; } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java index fe52b5eff718f..7ff1f70311b22 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; - public class DomainMapper { public static Domain map(final EntityResponse entityResponse) { @@ -33,30 +32,38 @@ public static Domain map(final EntityResponse entityResponse) { return null; } - final EnvelopedAspect envelopedDomainProperties = aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedDomainProperties = + aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (envelopedDomainProperties != null) { - result.setProperties(mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); + result.setProperties( + mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); } return result; } - private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties(final DomainProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = new com.linkedin.datahub.graphql.generated.DomainProperties(); + private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties( + final DomainProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.DomainProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); return propertiesResult; } - private DomainMapper() { } + private DomainMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java index 4879c339d99fa..06d5df9354380 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java @@ -17,8 +17,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashSet; @@ -29,19 +27,21 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; +public class DomainType + implements SearchableEntityType<Domain, String>, + com.linkedin.datahub.graphql.types.EntityType<Domain, String> { -public class DomainType implements SearchableEntityType<Domain, String>, com.linkedin.datahub.graphql.types.EntityType<Domain, String> { - - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); private final EntityClient _entityClient; - public DomainType(final EntityClient entityClient) { + public DomainType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -61,28 +61,30 @@ public Class<Domain> objectClass() { } @Override - public List<DataFetcherResult<Domain>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> domainUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<Domain>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> domainUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.DOMAIN_ENTITY_NAME, - new HashSet<>(domainUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.DOMAIN_ENTITY_NAME, + new HashSet<>(domainUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); for (Urn urn : domainUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Domain>newResult() - .data(DomainMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Domain>newResult() + .data(DomainMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Domains", e); @@ -90,25 +92,31 @@ public List<DataFetcherResult<Domain>> batchLoad(@Nonnull List<String> urns, @No } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List<FacetFilterInput> filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Domain entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Domain entity type"); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } - private Urn getUrn(final String urnStr) { try { return Urn.createFromString(urnStr); @@ -116,4 +124,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java index f2c9e962811b9..9a27a1fba853f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java @@ -1,17 +1,21 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryNodeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -20,18 +24,12 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - -public class GlossaryNodeType implements com.linkedin.datahub.graphql.types.EntityType<GlossaryNode, String> { +public class GlossaryNodeType + implements com.linkedin.datahub.graphql.types.EntityType<GlossaryNode, String> { - static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_NODE_KEY_ASPECT_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_NODE_KEY_ASPECT_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); private final EntityClient _entityClient; @@ -55,25 +53,31 @@ public Function<Entity, String> getKeyProvider() { } @Override - public List<DataFetcherResult<GlossaryNode>> batchLoad(final List<String> urns, final QueryContext context) { - final List<Urn> glossaryNodeUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<GlossaryNode>> batchLoad( + final List<String> urns, final QueryContext context) { + final List<Urn> glossaryNodeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> glossaryNodeMap = _entityClient.batchGetV2(GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(glossaryNodeUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map<Urn, EntityResponse> glossaryNodeMap = + _entityClient.batchGetV2( + GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(glossaryNodeUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); for (Urn urn : glossaryNodeUrns) { gmsResults.add(glossaryNodeMap.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsGlossaryNode -> - gmsGlossaryNode == null ? null - : DataFetcherResult.<GlossaryNode>newResult() - .data(GlossaryNodeMapper.map(gmsGlossaryNode)) - .build()) + .map( + gmsGlossaryNode -> + gmsGlossaryNode == null + ? null + : DataFetcherResult.<GlossaryNode>newResult() + .data(GlossaryNodeMapper.map(gmsGlossaryNode)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load GlossaryNodes", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java index 3574c17a50923..c40740238f61e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.ArrayList; @@ -39,118 +42,135 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class GlossaryTermType implements SearchableEntityType<GlossaryTerm, String>, - BrowsableEntityType<GlossaryTerm, String> { - - private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); - - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_TERM_KEY_ASPECT_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, - GLOSSARY_RELATED_TERM_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - BROWSE_PATHS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME - ); - - private final EntityClient _entityClient; - - public GlossaryTermType(final EntityClient entityClient) { - _entityClient = entityClient; +public class GlossaryTermType + implements SearchableEntityType<GlossaryTerm, String>, + BrowsableEntityType<GlossaryTerm, String> { + + private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); + + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_TERM_KEY_ASPECT_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + GLOSSARY_RELATED_TERM_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + BROWSE_PATHS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME); + + private final EntityClient _entityClient; + + public GlossaryTermType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class<GlossaryTerm> objectClass() { + return GlossaryTerm.class; + } + + @Override + public EntityType type() { + return EntityType.GLOSSARY_TERM; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List<DataFetcherResult<GlossaryTerm>> batchLoad( + final List<String> urns, final QueryContext context) { + final List<Urn> glossaryTermUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> glossaryTermMap = + _entityClient.batchGetV2( + GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(glossaryTermUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : glossaryTermUrns) { + gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsGlossaryTerm -> + gmsGlossaryTerm == null + ? null + : DataFetcherResult.<GlossaryTerm>newResult() + .data(GlossaryTermMapper.map(gmsGlossaryTerm)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load GlossaryTerms", e); } - - @Override - public Class<GlossaryTerm> objectClass() { - return GlossaryTerm.class; - } - - @Override - public EntityType type() { - return EntityType.GLOSSARY_TERM; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public List<DataFetcherResult<GlossaryTerm>> batchLoad(final List<String> urns, final QueryContext context) { - final List<Urn> glossaryTermUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> glossaryTermMap = _entityClient.batchGetV2(GLOSSARY_TERM_ENTITY_NAME, - new HashSet<>(glossaryTermUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : glossaryTermUrns) { - gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsGlossaryTerm -> - gmsGlossaryTerm == null ? null - : DataFetcherResult.<GlossaryTerm>newResult() - .data(GlossaryTermMapper.map(gmsGlossaryTerm)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load GlossaryTerms", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "glossaryTerm", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "glossaryTerm", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( "glossaryTerm", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "glossaryTerm", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } - + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "glossaryTerm", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java index 93b6ab53d5a3a..59f7cc8a9c828 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java @@ -1,27 +1,27 @@ package com.linkedin.datahub.graphql.types.glossary; import com.linkedin.common.urn.GlossaryTermUrn; - import java.net.URISyntaxException; import java.util.regex.Pattern; public class GlossaryTermUtils { - private GlossaryTermUtils() { } + private GlossaryTermUtils() {} - static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { - try { - return GlossaryTermUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); - } + static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { + try { + return GlossaryTermUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); } + } - public static String getGlossaryTermName(String hierarchicalName) { - if (hierarchicalName.contains(".")) { - String[] nodes = hierarchicalName.split(Pattern.quote(".")); - return nodes[nodes.length - 1]; - } - return hierarchicalName; + public static String getGlossaryTermName(String hierarchicalName) { + if (hierarchicalName.contains(".")) { + String[] nodes = hierarchicalName.split(Pattern.quote(".")); + return nodes[nodes.length - 1]; } + return hierarchicalName; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 6a1d849dd23bf..901361eb0b2be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -13,11 +15,8 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.key.GlossaryNodeKey; - import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - public class GlossaryNodeMapper implements ModelMapper<EntityResponse, GlossaryNode> { public static final GlossaryNodeMapper INSTANCE = new GlossaryNodeMapper(); @@ -35,11 +34,14 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<GlossaryNode> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_NODE_INFO_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); + mappingHelper.mapToResult( + GLOSSARY_NODE_INFO_ASPECT_NAME, + (glossaryNode, dataMap) -> glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); mappingHelper.mapToResult(GLOSSARY_NODE_KEY_ASPECT_NAME, this::mapGlossaryNodeKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryNode, dataMap) -> + glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java index 2f99700bc30a1..12ba8c1e088f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java @@ -1,41 +1,44 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.GlossaryTermInfo; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class GlossaryTermInfoMapper { - public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); + public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); - public static GlossaryTermInfo map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - return INSTANCE.apply(glossaryTermInfo, entityUrn); - } + public static GlossaryTermInfo map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + return INSTANCE.apply(glossaryTermInfo, entityUrn); + } - public GlossaryTermInfo apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); - glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); - if (glossaryTermInfo.hasName()) { - glossaryTermInfoResult.setName(glossaryTermInfo.getName()); - } - if (glossaryTermInfo.hasSourceRef()) { - glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); - } - if (glossaryTermInfo.hasSourceUrl()) { - glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); - } - if (glossaryTermInfo.hasCustomProperties()) { - glossaryTermInfoResult.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); - } - return glossaryTermInfoResult; + public GlossaryTermInfo apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = + new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); + glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); + if (glossaryTermInfo.hasName()) { + glossaryTermInfoResult.setName(glossaryTermInfo.getName()); + } + if (glossaryTermInfo.hasSourceRef()) { + glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); + } + if (glossaryTermInfo.hasSourceUrl()) { + glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); + } + if (glossaryTermInfo.hasCustomProperties()) { + glossaryTermInfoResult.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } + return glossaryTermInfoResult; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java index c98177b458dea..a02f79535399f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; @@ -15,71 +17,82 @@ import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.key.GlossaryTermKey; -import com.linkedin.domain.Domains; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class GlossaryTermMapper implements ModelMapper<EntityResponse, GlossaryTerm> { - public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); + public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); - public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { - GlossaryTerm result = new GlossaryTerm(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { + GlossaryTerm result = new GlossaryTerm(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.GLOSSARY_TERM); - final String legacyName = GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.GLOSSARY_TERM); + final String legacyName = + GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<GlossaryTerm> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setGlossaryTermInfo(GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setProperties(GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<GlossaryTerm> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setGlossaryTermInfo( + GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setProperties( + GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - // If there's no name property, resort to the legacy name computation. - if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { - result.getGlossaryTermInfo().setName(legacyName); - } - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } - return mappingHelper.getResult(); + // If there's no name property, resort to the legacy name computation. + if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { + result.getGlossaryTermInfo().setName(legacyName); } - - private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); - glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); - glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } + return mappingHelper.getResult(); + } - private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); - } + private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); + glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); + glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + } + + private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java index 6b35833183393..94edfcbd31455 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java @@ -2,25 +2,27 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GlossaryTermProperties; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class GlossaryTermPropertiesMapper { public static final GlossaryTermPropertiesMapper INSTANCE = new GlossaryTermPropertiesMapper(); - public static GlossaryTermProperties map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + public static GlossaryTermProperties map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { return INSTANCE.apply(glossaryTermInfo, entityUrn); } - public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); + public GlossaryTermProperties apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = + new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); result.setDefinition(glossaryTermInfo.getDefinition()); result.setDescription(glossaryTermInfo.getDefinition()); result.setTermSource(glossaryTermInfo.getTermSource()); @@ -34,7 +36,8 @@ public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.Glossar result.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); } if (glossaryTermInfo.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index a64b0f7dc64fb..8494eace22244 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -1,51 +1,52 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.GlossaryTerms; -import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.datahub.graphql.generated.GlossaryTerm; +import com.linkedin.datahub.graphql.generated.GlossaryTerms; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class GlossaryTermsMapper { - public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); - - public static GlossaryTerms map( - @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(glossaryTerms, entityUrn); - } - - public GlossaryTerms apply(@Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTerms result = new com.linkedin.datahub.graphql.generated.GlossaryTerms(); - result.setTerms(glossaryTerms.getTerms().stream().map( - association -> this.mapGlossaryTermAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( - @Nonnull final GlossaryTermAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); - final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); - resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); - resultGlossaryTerm.setUrn(input.getUrn().toString()); - resultGlossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); - result.setTerm(resultGlossaryTerm); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } - + public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); + + public static GlossaryTerms map( + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(glossaryTerms, entityUrn); + } + + public GlossaryTerms apply( + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTerms result = + new com.linkedin.datahub.graphql.generated.GlossaryTerms(); + result.setTerms( + glossaryTerms.getTerms().stream() + .map(association -> this.mapGlossaryTermAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( + @Nonnull final GlossaryTermAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = + new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); + final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); + resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); + resultGlossaryTerm.setUrn(input.getUrn().toString()); + resultGlossaryTerm.setName( + GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); + result.setTerm(resultGlossaryTerm); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java index d575a81f4ae03..621fcf5f04140 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java @@ -3,26 +3,27 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.query.AutoCompleteResult; - import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AutoCompleteResultsMapper + implements ModelMapper<AutoCompleteResult, AutoCompleteResults> { -public class AutoCompleteResultsMapper implements ModelMapper<AutoCompleteResult, AutoCompleteResults> { - - public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); + public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); - public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { - return INSTANCE.apply(results); - } + public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { + return INSTANCE.apply(results); + } - @Override - public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { - final AutoCompleteResults result = new AutoCompleteResults(); - result.setQuery(input.getQuery()); - result.setSuggestions(input.getSuggestions()); - result.setEntities(input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect( - Collectors.toList())); - return result; - } + @Override + public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { + final AutoCompleteResults result = new AutoCompleteResults(); + result.setQuery(input.getQuery()); + result.setSuggestions(input.getSuggestions()); + result.setEntities( + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java index ea44c4409b709..689ff82147e15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java @@ -2,27 +2,27 @@ import com.linkedin.datahub.graphql.Constants; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class BrowsePathMapper implements ModelMapper<String, BrowsePath> { - public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); + public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); - public static BrowsePath map(@Nonnull final String input) { - return INSTANCE.apply(input); - } + public static BrowsePath map(@Nonnull final String input) { + return INSTANCE.apply(input); + } - @Override - public BrowsePath apply(@Nonnull final String input) { - final BrowsePath browsePath = new BrowsePath(); - final List<String> path = Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()); - browsePath.setPath(path); - return browsePath; - } + @Override + public BrowsePath apply(@Nonnull final String input) { + final BrowsePath browsePath = new BrowsePath(); + final List<String> path = + Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList()); + browsePath.setPath(path); + return browsePath; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java index 4dac4468a80d5..ae70823d675d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java @@ -1,25 +1,24 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; public class BrowsePathsMapper implements ModelMapper<List<String>, List<BrowsePath>> { - public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); + public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); - public static List<BrowsePath> map(@Nonnull final List<String> input) { - return INSTANCE.apply(input); - } + public static List<BrowsePath> map(@Nonnull final List<String> input) { + return INSTANCE.apply(input); + } - @Override - public List<BrowsePath> apply(@Nonnull final List<String> input) { - List<BrowsePath> results = new ArrayList<>(); - for (String pathStr : input) { - results.add(BrowsePathMapper.map(pathStr)); - } - return results; + @Override + public List<BrowsePath> apply(@Nonnull final List<String> input) { + List<BrowsePath> results = new ArrayList<>(); + for (String pathStr : input) { + results.add(BrowsePathMapper.map(pathStr)); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java index c3e74c28fe59d..5cac03b19a74c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java @@ -9,10 +9,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class BrowseResultMapper { - private BrowseResultMapper() { - } + private BrowseResultMapper() {} public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) { final BrowseResults result = new BrowseResults(); @@ -31,7 +29,9 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) result.setMetadata(browseResultMetadata); List<Entity> entities = - input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect(Collectors.toList()); + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .collect(Collectors.toList()); result.setEntities(entities); List<BrowseResultGroup> groups = @@ -41,7 +41,8 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) return result; } - private static BrowseResultGroup mapGroup(@Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { + private static BrowseResultGroup mapGroup( + @Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { final BrowseResultGroup result = new BrowseResultGroup(); result.setName(group.getName()); result.setCount(group.getCount()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java index e6172debb439e..c58341f994d4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java @@ -1,8 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Maps an input of type I to an output of type O with actor context. - */ +/** Maps an input of type I to an output of type O with actor context. */ public interface InputModelMapper<I, O, A> { - O apply(final I input, final A actor); -} \ No newline at end of file + O apply(final I input, final A actor); +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 2a615b24eaac2..7c7dab2e02472 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.AggregationMetadata; import com.linkedin.datahub.graphql.generated.FacetMetadata; @@ -10,75 +13,87 @@ import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; -import static com.linkedin.metadata.utils.SearchUtil.*; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class MapperUtils { - private MapperUtils() { - - } + private MapperUtils() {} public static SearchResult mapResult(SearchEntity searchEntity) { - return new SearchResult(UrnToEntityMapper.map(searchEntity.getEntity()), + return new SearchResult( + UrnToEntityMapper.map(searchEntity.getEntity()), getInsightsFromFeatures(searchEntity.getFeatures()), getMatchedFieldEntry(searchEntity.getMatchedFields())); } - public static FacetMetadata mapFacet(com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { + public static FacetMetadata mapFacet( + com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { final FacetMetadata facetMetadata = new FacetMetadata(); - List<String> aggregationFacets = List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); - List<Boolean> isEntityTypeFilter = aggregationFacets.stream().map( - facet -> facet.equals("entity") || facet.contains("_entityType")).collect(Collectors.toList()); + List<String> aggregationFacets = + List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); + List<Boolean> isEntityTypeFilter = + aggregationFacets.stream() + .map(facet -> facet.equals("entity") || facet.contains("_entityType")) + .collect(Collectors.toList()); facetMetadata.setField(aggregationMetadata.getName()); facetMetadata.setDisplayName( - Optional.ofNullable(aggregationMetadata.getDisplayName()).orElse(aggregationMetadata.getName())); - facetMetadata.setAggregations(aggregationMetadata.getFilterValues() - .stream() - .map(filterValue -> new AggregationMetadata(convertFilterValue(filterValue.getValue(), isEntityTypeFilter), - filterValue.getFacetCount(), - filterValue.getEntity() == null ? null : UrnToEntityMapper.map(filterValue.getEntity()))) - .collect(Collectors.toList())); + Optional.ofNullable(aggregationMetadata.getDisplayName()) + .orElse(aggregationMetadata.getName())); + facetMetadata.setAggregations( + aggregationMetadata.getFilterValues().stream() + .map( + filterValue -> + new AggregationMetadata( + convertFilterValue(filterValue.getValue(), isEntityTypeFilter), + filterValue.getFacetCount(), + filterValue.getEntity() == null + ? null + : UrnToEntityMapper.map(filterValue.getEntity()))) + .collect(Collectors.toList())); return facetMetadata; } public static String convertFilterValue(String filterValue, List<Boolean> isEntityTypeFilter) { String[] aggregations = filterValue.split(AGGREGATION_SEPARATOR_CHAR); - return IntStream.range(0, aggregations.length).mapToObj( - idx -> idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) ? EntityTypeMapper.getType(aggregations[idx]).toString() : aggregations[idx]) + return IntStream.range(0, aggregations.length) + .mapToObj( + idx -> + idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) + ? EntityTypeMapper.getType(aggregations[idx]).toString() + : aggregations[idx]) .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } - public static List<MatchedField> getMatchedFieldEntry(List<com.linkedin.metadata.search.MatchedField> highlightMetadata) { + public static List<MatchedField> getMatchedFieldEntry( + List<com.linkedin.metadata.search.MatchedField> highlightMetadata) { return highlightMetadata.stream() - .map(field -> { - MatchedField matchedField = new MatchedField(); - matchedField.setName(field.getName()); - matchedField.setValue(field.getValue()); - if (SearchUtils.isUrn(field.getValue())) { - try { + .map( + field -> { + MatchedField matchedField = new MatchedField(); + matchedField.setName(field.getName()); + matchedField.setValue(field.getValue()); + if (SearchUtils.isUrn(field.getValue())) { + try { Urn urn = Urn.createFromString(field.getValue()); matchedField.setEntity(UrnToEntityMapper.map(urn)); - } catch (URISyntaxException e) { + } catch (URISyntaxException e) { log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); + } } - } - return matchedField; - }) + return matchedField; + }) .collect(Collectors.toList()); } - public static SearchSuggestion mapSearchSuggestion(com.linkedin.metadata.search.SearchSuggestion suggestion) { - return new SearchSuggestion(suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); + public static SearchSuggestion mapSearchSuggestion( + com.linkedin.metadata.search.SearchSuggestion suggestion) { + return new SearchSuggestion( + suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java index 08afbd510b98f..2167be9f27ca8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Simple interface for classes capable of mapping an input of type I to - * an output of type O. - */ +/** Simple interface for classes capable of mapping an input of type I to an output of type O. */ public interface ModelMapper<I, O> { - O apply(final I input); + O apply(final I input); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java index 903e962524734..e0ac0336c8715 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java @@ -3,7 +3,5 @@ import com.linkedin.datahub.graphql.generated.TimeSeriesAspect; import com.linkedin.metadata.aspect.EnvelopedAspect; - -public interface TimeSeriesAspectMapper<T extends TimeSeriesAspect> extends ModelMapper<EnvelopedAspect, T> { - -} +public interface TimeSeriesAspectMapper<T extends TimeSeriesAspect> + extends ModelMapper<EnvelopedAspect, T> {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java index dd00727fc2845..baf632ae8bdf4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Entity; @@ -12,10 +15,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - - public class UrnScrollAcrossLineageResultsMapper<T extends RecordTemplate, E extends Entity> { public static <T extends RecordTemplate, E extends Entity> ScrollAcrossLineageResults map( LineageScrollResult searchResult) { @@ -30,8 +29,12 @@ public ScrollAcrossLineageResults apply(LineageScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java index fd774d73f3df7..72eb71cd095bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - public class UrnScrollResultsMapper<T extends RecordTemplate, E extends Entity> { public static <T extends RecordTemplate, E extends Entity> ScrollResults map( com.linkedin.metadata.search.ScrollResult scrollResult) { @@ -25,8 +24,12 @@ public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java index ae87d0269c188..642fe90cf2aed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java @@ -1,23 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.FreshnessStats; -import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; +import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - - public class UrnSearchAcrossLineageResultsMapper<T extends RecordTemplate, E extends Entity> { public static <T extends RecordTemplate, E extends Entity> SearchAcrossLineageResults map( LineageSearchResult searchResult) { @@ -32,17 +31,25 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); if (input.hasFreshness()) { FreshnessStats outputFreshness = new FreshnessStats(); outputFreshness.setCached(input.getFreshness().isCached()); - outputFreshness.setSystemFreshness(input.getFreshness().getSystemFreshness().entrySet().stream().map(x -> - SystemFreshness.builder() - .setSystemName(x.getKey()) - .setFreshnessMillis(x.getValue()) - .build()).collect(Collectors.toList())); + outputFreshness.setSystemFreshness( + input.getFreshness().getSystemFreshness().entrySet().stream() + .map( + x -> + SystemFreshness.builder() + .setSystemName(x.getKey()) + .setFreshnessMillis(x.getValue()) + .build()) + .collect(Collectors.toList())); result.setFreshness(outputFreshness); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java index b16e2f10d1df7..d814c44e469bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - public class UrnSearchResultsMapper<T extends RecordTemplate, E extends Entity> { public static <T extends RecordTemplate, E extends Entity> SearchResults map( com.linkedin.metadata.search.SearchResult searchResult) { @@ -25,9 +24,16 @@ public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); - result.setSuggestions(searchResultMetadata.getSuggestions().stream().map(MapperUtils::mapSearchSuggestion).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); + result.setSuggestions( + searchResultMetadata.getSuggestions().stream() + .map(MapperUtils::mapSearchSuggestion) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java index f5594afc1a5b5..da3ddd1115437 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,103 +41,122 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLFeatureTableType implements SearchableEntityType<MLFeatureTable, String>, - BrowsableEntityType<MLFeatureTable, String> { - - private static final Set<String> FACET_FIELDS = ImmutableSet.of("platform", "name"); - private final EntityClient _entityClient; - - public MLFeatureTableType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLFEATURE_TABLE; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<MLFeatureTable> objectClass() { - return MLFeatureTable.class; - } - - @Override - public List<DataFetcherResult<MLFeatureTable>> batchLoad(final List<String> urns, final QueryContext context) throws Exception { - final List<Urn> mlFeatureTableUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> mlFeatureTableMap = _entityClient.batchGetV2(ML_FEATURE_TABLE_ENTITY_NAME, - new HashSet<>(mlFeatureTableUrns), null, context.getAuthentication()); - - final List<EntityResponse> gmsResults = mlFeatureTableUrns.stream() - .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlFeatureTable -> gmsMlFeatureTable == null ? null - : DataFetcherResult.<MLFeatureTable>newResult() - .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatureTables", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeatureTable", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeatureTable", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlFeatureTable", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLFeatureTableType + implements SearchableEntityType<MLFeatureTable, String>, + BrowsableEntityType<MLFeatureTable, String> { + + private static final Set<String> FACET_FIELDS = ImmutableSet.of("platform", "name"); + private final EntityClient _entityClient; + + public MLFeatureTableType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLFEATURE_TABLE; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<MLFeatureTable> objectClass() { + return MLFeatureTable.class; + } + + @Override + public List<DataFetcherResult<MLFeatureTable>> batchLoad( + final List<String> urns, final QueryContext context) throws Exception { + final List<Urn> mlFeatureTableUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> mlFeatureTableMap = + _entityClient.batchGetV2( + ML_FEATURE_TABLE_ENTITY_NAME, + new HashSet<>(mlFeatureTableUrns), + null, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = + mlFeatureTableUrns.stream() + .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlFeatureTable -> + gmsMlFeatureTable == null + ? null + : DataFetcherResult.<MLFeatureTable>newResult() + .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatureTables", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlFeatureTable", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlFeatureTable", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlFeatureTable", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java index f5e0d80948bcc..6f94ea44cd476 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,8 +20,8 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +33,94 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLFeatureType implements SearchableEntityType<MLFeature, String> { - private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLFeatureType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLFeatureType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLFEATURE; - } + @Override + public EntityType type() { + return EntityType.MLFEATURE; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<MLFeature> objectClass() { - return MLFeature.class; - } + @Override + public Class<MLFeature> objectClass() { + return MLFeature.class; + } - @Override - public List<DataFetcherResult<MLFeature>> batchLoad(final List<String> urns, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> mlFeatureUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List<DataFetcherResult<MLFeature>> batchLoad( + final List<String> urns, @Nonnull final QueryContext context) throws Exception { + final List<Urn> mlFeatureUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> mlFeatureMap = _entityClient.batchGetV2(ML_FEATURE_ENTITY_NAME, - new HashSet<>(mlFeatureUrns), null, context.getAuthentication()); + try { + final Map<Urn, EntityResponse> mlFeatureMap = + _entityClient.batchGetV2( + ML_FEATURE_ENTITY_NAME, + new HashSet<>(mlFeatureUrns), + null, + context.getAuthentication()); - final List<EntityResponse> gmsResults = mlFeatureUrns.stream() - .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) - .collect(Collectors.toList()); + final List<EntityResponse> gmsResults = + mlFeatureUrns.stream() + .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlFeature -> gmsMlFeature == null ? null - : DataFetcherResult.<MLFeature>newResult() - .data(MLFeatureMapper.map(gmsMlFeature)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatures", e); - } + return gmsResults.stream() + .map( + gmsMlFeature -> + gmsMlFeature == null + ? null + : DataFetcherResult.<MLFeature>newResult() + .data(MLFeatureMapper.map(gmsMlFeature)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatures", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeature", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlFeature", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java index 05b70c15bafc6..d505b70effdd4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,104 +41,123 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelGroupType implements SearchableEntityType<MLModelGroup, String>, - BrowsableEntityType<MLModelGroup, String> { - - private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelGroupType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL_GROUP; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<MLModelGroup> objectClass() { - return MLModelGroup.class; - } - - @Override - public List<DataFetcherResult<MLModelGroup>> batchLoad(final List<String> urns, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> mlModelGroupUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> mlModelMap = _entityClient.batchGetV2(ML_MODEL_GROUP_ENTITY_NAME, - new HashSet<>(mlModelGroupUrns), null, context.getAuthentication()); - - final List<EntityResponse> gmsResults = mlModelGroupUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModelGroup -> gmsMlModelGroup == null ? null - : DataFetcherResult.<MLModelGroup>newResult() - .data(MLModelGroupMapper.map(gmsMlModelGroup)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModelGroups", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModelGroup", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModelGroup", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModelGroup", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelGroupType + implements SearchableEntityType<MLModelGroup, String>, + BrowsableEntityType<MLModelGroup, String> { + + private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL_GROUP; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<MLModelGroup> objectClass() { + return MLModelGroup.class; + } + + @Override + public List<DataFetcherResult<MLModelGroup>> batchLoad( + final List<String> urns, @Nonnull final QueryContext context) throws Exception { + final List<Urn> mlModelGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> mlModelMap = + _entityClient.batchGetV2( + ML_MODEL_GROUP_ENTITY_NAME, + new HashSet<>(mlModelGroupUrns), + null, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = + mlModelGroupUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModelGroup -> + gmsMlModelGroup == null + ? null + : DataFetcherResult.<MLModelGroup>newResult() + .data(MLModelGroupMapper.map(gmsMlModelGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModelGroups", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlModelGroup", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlModelGroup", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlModelGroup", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java index ef4be247a246b..27b791d78e78e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,102 +41,116 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelType implements SearchableEntityType<MLModel, String>, BrowsableEntityType<MLModel, String> { - - private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<MLModel> objectClass() { - return MLModel.class; - } - - @Override - public List<DataFetcherResult<MLModel>> batchLoad(final List<String> urns, final QueryContext context) throws Exception { - final List<Urn> mlModelUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> mlModelMap = _entityClient.batchGetV2(ML_MODEL_ENTITY_NAME, - new HashSet<>(mlModelUrns), null, context.getAuthentication()); - - final List<EntityResponse> gmsResults = mlModelUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModel -> gmsMlModel == null ? null - : DataFetcherResult.<MLModel>newResult() - .data(MLModelMapper.map(gmsMlModel)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModels", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModel", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModel", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelType + implements SearchableEntityType<MLModel, String>, BrowsableEntityType<MLModel, String> { + + private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<MLModel> objectClass() { + return MLModel.class; + } + + @Override + public List<DataFetcherResult<MLModel>> batchLoad( + final List<String> urns, final QueryContext context) throws Exception { + final List<Urn> mlModelUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> mlModelMap = + _entityClient.batchGetV2( + ML_MODEL_ENTITY_NAME, new HashSet<>(mlModelUrns), null, context.getAuthentication()); + + final List<EntityResponse> gmsResults = + mlModelUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModel -> + gmsMlModel == null + ? null + : DataFetcherResult.<MLModel>newResult() + .data(MLModelMapper.map(gmsMlModel)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModels", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlModel", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlModel", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java index ff51bab6c114e..ccecb0ae6406f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java @@ -1,44 +1,47 @@ package com.linkedin.datahub.graphql.types.mlmodel; -import java.net.URISyntaxException; - import com.linkedin.common.urn.MLFeatureUrn; import com.linkedin.common.urn.MLModelUrn; import com.linkedin.common.urn.Urn; +import java.net.URISyntaxException; public class MLModelUtils { - private MLModelUtils() { } + private MLModelUtils() {} - static MLModelUrn getMLModelUrn(String modelUrn) { - try { - return MLModelUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); - } + static MLModelUrn getMLModelUrn(String modelUrn) { + try { + return MLModelUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); } - - static Urn getMLModelGroupUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getMLModelGroupUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); } - - static MLFeatureUrn getMLFeatureUrn(String modelUrn) { - try { - return MLFeatureUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); - } + } + + static MLFeatureUrn getMLFeatureUrn(String modelUrn) { + try { + return MLFeatureUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); } - - static Urn getUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java index a6963e6b20abd..10cfe181dd292 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,8 +20,8 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +33,95 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLPrimaryKeyType implements SearchableEntityType<MLPrimaryKey, String> { - private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLPrimaryKeyType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLPrimaryKeyType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLPRIMARY_KEY; - } + @Override + public EntityType type() { + return EntityType.MLPRIMARY_KEY; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<MLPrimaryKey> objectClass() { - return MLPrimaryKey.class; - } + @Override + public Class<MLPrimaryKey> objectClass() { + return MLPrimaryKey.class; + } - @Override - public List<DataFetcherResult<MLPrimaryKey>> batchLoad(final List<String> urns, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> mlPrimaryKeyUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List<DataFetcherResult<MLPrimaryKey>> batchLoad( + final List<String> urns, @Nonnull final QueryContext context) throws Exception { + final List<Urn> mlPrimaryKeyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> mlPrimaryKeyMap = _entityClient.batchGetV2(ML_PRIMARY_KEY_ENTITY_NAME, - new HashSet<>(mlPrimaryKeyUrns), null, context.getAuthentication()); + try { + final Map<Urn, EntityResponse> mlPrimaryKeyMap = + _entityClient.batchGetV2( + ML_PRIMARY_KEY_ENTITY_NAME, + new HashSet<>(mlPrimaryKeyUrns), + null, + context.getAuthentication()); - final List<EntityResponse> gmsResults = mlPrimaryKeyUrns.stream() - .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) - .collect(Collectors.toList()); + final List<EntityResponse> gmsResults = + mlPrimaryKeyUrns.stream() + .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlPrimaryKey -> gmsMlPrimaryKey == null ? null - : DataFetcherResult.<MLPrimaryKey>newResult() - .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); - } + return gmsResults.stream() + .map( + gmsMlPrimaryKey -> + gmsMlPrimaryKey == null + ? null + : DataFetcherResult.<MLPrimaryKey>newResult() + .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlPrimaryKey", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlPrimaryKey", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlPrimaryKey", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlPrimaryKey", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java index c82909d49acbf..7db1216e1390d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java @@ -2,22 +2,21 @@ import com.linkedin.datahub.graphql.generated.BaseData; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class BaseDataMapper implements ModelMapper<com.linkedin.ml.metadata.BaseData, BaseData> { - public static final BaseDataMapper INSTANCE = new BaseDataMapper(); + public static final BaseDataMapper INSTANCE = new BaseDataMapper(); - public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { - return INSTANCE.apply(input); - } + public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { + return INSTANCE.apply(input); + } - @Override - public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { - final BaseData result = new BaseData(); - result.setDataset(input.getDataset().toString()); - result.setMotivation(input.getMotivation()); - result.setPreProcessing(input.getPreProcessing()); - return result; - } + @Override + public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { + final BaseData result = new BaseData(); + result.setDataset(input.getDataset().toString()); + result.setMotivation(input.getMotivation()); + result.setPreProcessing(input.getPreProcessing()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java index c19cb7bae2aff..108717f325f68 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java @@ -2,29 +2,34 @@ import com.linkedin.datahub.graphql.generated.CaveatsAndRecommendations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class CaveatsAndRecommendationsMapper implements ModelMapper<com.linkedin.ml.metadata.CaveatsAndRecommendations, CaveatsAndRecommendations> { +public class CaveatsAndRecommendationsMapper + implements ModelMapper< + com.linkedin.ml.metadata.CaveatsAndRecommendations, CaveatsAndRecommendations> { - public static final CaveatsAndRecommendationsMapper INSTANCE = new CaveatsAndRecommendationsMapper(); + public static final CaveatsAndRecommendationsMapper INSTANCE = + new CaveatsAndRecommendationsMapper(); - public static CaveatsAndRecommendations map(@NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - return INSTANCE.apply(caveatsAndRecommendations); - } + public static CaveatsAndRecommendations map( + @NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + return INSTANCE.apply(caveatsAndRecommendations); + } - @Override - public CaveatsAndRecommendations apply(com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); - if (caveatsAndRecommendations.getCaveats() != null) { - result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); - } - if (caveatsAndRecommendations.getRecommendations() != null) { - result.setRecommendations(caveatsAndRecommendations.getRecommendations()); - } - if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { - result.setIdealDatasetCharacteristics(caveatsAndRecommendations.getIdealDatasetCharacteristics()); - } - return result; + @Override + public CaveatsAndRecommendations apply( + com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); + if (caveatsAndRecommendations.getCaveats() != null) { + result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); + } + if (caveatsAndRecommendations.getRecommendations() != null) { + result.setRecommendations(caveatsAndRecommendations.getRecommendations()); + } + if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { + result.setIdealDatasetCharacteristics( + caveatsAndRecommendations.getIdealDatasetCharacteristics()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java index 22617a8bc03e7..2226197e673f5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java @@ -2,24 +2,24 @@ import com.linkedin.datahub.graphql.generated.CaveatDetails; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class CaveatsDetailsMapper implements ModelMapper<com.linkedin.ml.metadata.CaveatDetails, CaveatDetails> { +public class CaveatsDetailsMapper + implements ModelMapper<com.linkedin.ml.metadata.CaveatDetails, CaveatDetails> { - public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); + public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); - public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - return INSTANCE.apply(input); - } + public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + return INSTANCE.apply(input); + } - @Override - public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - final CaveatDetails result = new CaveatDetails(); + @Override + public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + final CaveatDetails result = new CaveatDetails(); - result.setCaveatDescription(input.getCaveatDescription()); - result.setGroupsNotRepresented(input.getGroupsNotRepresented()); - result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); - return result; - } + result.setCaveatDescription(input.getCaveatDescription()); + result.setGroupsNotRepresented(input.getGroupsNotRepresented()); + result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java index 1d967619d43cb..8959e59265e14 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java @@ -2,25 +2,27 @@ import com.linkedin.datahub.graphql.generated.EthicalConsiderations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class EthicalConsiderationsMapper implements ModelMapper<com.linkedin.ml.metadata.EthicalConsiderations, EthicalConsiderations> { +public class EthicalConsiderationsMapper + implements ModelMapper<com.linkedin.ml.metadata.EthicalConsiderations, EthicalConsiderations> { - public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); + public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); - public static EthicalConsiderations map(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - return INSTANCE.apply(ethicalConsiderations); - } + public static EthicalConsiderations map( + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + return INSTANCE.apply(ethicalConsiderations); + } - @Override - public EthicalConsiderations apply(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - final EthicalConsiderations result = new EthicalConsiderations(); - result.setData(ethicalConsiderations.getData()); - result.setHumanLife(ethicalConsiderations.getHumanLife()); - result.setMitigations(ethicalConsiderations.getMitigations()); - result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); - result.setUseCases(ethicalConsiderations.getUseCases()); - return result; - } + @Override + public EthicalConsiderations apply( + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + final EthicalConsiderations result = new EthicalConsiderations(); + result.setData(ethicalConsiderations.getData()); + result.setHumanLife(ethicalConsiderations.getHumanLife()); + result.setMitigations(ethicalConsiderations.getMitigations()); + result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); + result.setUseCases(ethicalConsiderations.getUseCases()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java index 73aa8db362a54..212db94081371 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java @@ -3,26 +3,26 @@ import com.linkedin.datahub.graphql.generated.HyperParameterMap; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.ml.metadata.HyperParameterValueTypeMap; - import lombok.NonNull; -public class HyperParameterMapMapper implements ModelMapper<HyperParameterValueTypeMap, HyperParameterMap> { - - public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); +public class HyperParameterMapMapper + implements ModelMapper<HyperParameterValueTypeMap, HyperParameterMap> { - public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { - return INSTANCE.apply(input); - } + public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); - @Override - public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { - final HyperParameterMap result = new HyperParameterMap(); + public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { + return INSTANCE.apply(input); + } - for (String key: input.keySet()) { - result.setKey(key); - result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); - } + @Override + public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { + final HyperParameterMap result = new HyperParameterMap(); - return result; + for (String key : input.keySet()) { + result.setKey(key); + result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index 6509b0e6cfa84..f60f34dd7a085 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -6,34 +6,37 @@ import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class HyperParameterValueTypeMapper implements ModelMapper<com.linkedin.ml.metadata.HyperParameterValueType, HyperParameterValueType> { +public class HyperParameterValueTypeMapper + implements ModelMapper< + com.linkedin.ml.metadata.HyperParameterValueType, HyperParameterValueType> { - public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); + public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); - public static HyperParameterValueType map(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - return INSTANCE.apply(input); - } + public static HyperParameterValueType map( + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + return INSTANCE.apply(input); + } - @Override - public HyperParameterValueType apply(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - HyperParameterValueType result = null; + @Override + public HyperParameterValueType apply( + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + HyperParameterValueType result = null; - if (input.isString()) { - result = new StringBox(input.getString()); - } else if (input.isBoolean()) { - result = new BooleanBox(input.getBoolean()); - } else if (input.isInt()) { - result = new IntBox(input.getInt()); - } else if (input.isDouble()) { - result = new FloatBox(input.getDouble()); - } else if (input.isFloat()) { - result = new FloatBox(new Double(input.getFloat())); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); - } - return result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else if (input.isBoolean()) { + result = new BooleanBox(input.getBoolean()); + } else if (input.isInt()) { + result = new IntBox(input.getInt()); + } else if (input.isDouble()) { + result = new FloatBox(input.getDouble()); + } else if (input.isFloat()) { + result = new FloatBox(new Double(input.getFloat())); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java index 47598bc2a3e4c..9f724ae71a55e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java @@ -1,29 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.IntendedUse; import com.linkedin.datahub.graphql.generated.IntendedUserType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; -public class IntendedUseMapper implements ModelMapper<com.linkedin.ml.metadata.IntendedUse, IntendedUse> { +public class IntendedUseMapper + implements ModelMapper<com.linkedin.ml.metadata.IntendedUse, IntendedUse> { - public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); + public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); - public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - return INSTANCE.apply(intendedUse); - } + public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + return INSTANCE.apply(intendedUse); + } - @Override - public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - final IntendedUse result = new IntendedUse(); - result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); - result.setPrimaryUses(intendedUse.getPrimaryUses()); - if (intendedUse.getPrimaryUsers() != null) { - result.setPrimaryUsers(intendedUse.getPrimaryUsers().stream().map(v -> IntendedUserType.valueOf(v.toString())).collect(Collectors.toList())); - } - return result; + @Override + public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + final IntendedUse result = new IntendedUse(); + result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); + result.setPrimaryUses(intendedUse.getPrimaryUses()); + if (intendedUse.getPrimaryUsers() != null) { + result.setPrimaryUsers( + intendedUse.getPrimaryUsers().stream() + .map(v -> IntendedUserType.valueOf(v.toString())) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java index 010ae477251f3..58e59edfa2e38 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - - import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -37,91 +37,105 @@ import com.linkedin.ml.metadata.MLFeatureProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureMapper implements ModelMapper<EntityResponse, MLFeature> { - public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); + public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); - public static MLFeature map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLFeature map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLFeature apply(@Nonnull final EntityResponse entityResponse) { - final MLFeature result = new MLFeature(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLFeature apply(@Nonnull final EntityResponse entityResponse) { + final MLFeature result = new MLFeature(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLFeature> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeature, dataMap) -> + MappingHelper<MLFeature> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeature, dataMap) -> + mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeature, dataMap) -> + mlFeature.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + return mappingHelper.getResult(); + } + + private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); + mlFeature.setName(mlFeatureKey.getName()); + mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); + } + + private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); + mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setDescription(featureProperties.getDescription()); + if (featureProperties.getDataType() != null) { + mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); } - - private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); - mlFeature.setName(mlFeatureKey.getName()); - mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); - } - - private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); - mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setDescription(featureProperties.getDescription()); - if (featureProperties.getDataType() != null) { - mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); - } - } - - private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeature entity, DataMap dataMap) { - EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); - MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeature entity, DataMap dataMap) { + EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); + MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java index 9d647a38d2153..7bcefbc305192 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java @@ -1,44 +1,46 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; -import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; -public class MLFeaturePropertiesMapper implements ModelMapper<com.linkedin.ml.metadata.MLFeatureProperties, MLFeatureProperties> { +public class MLFeaturePropertiesMapper + implements ModelMapper<com.linkedin.ml.metadata.MLFeatureProperties, MLFeatureProperties> { - public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); + public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); - public static MLFeatureProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - return INSTANCE.apply(mlFeatureProperties); - } + public static MLFeatureProperties map( + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + return INSTANCE.apply(mlFeatureProperties); + } - @Override - public MLFeatureProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - final MLFeatureProperties result = new MLFeatureProperties(); + @Override + public MLFeatureProperties apply( + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + final MLFeatureProperties result = new MLFeatureProperties(); - result.setDescription(mlFeatureProperties.getDescription()); - if (mlFeatureProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); - } - if (mlFeatureProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); - } - if (mlFeatureProperties.getSources() != null) { - result.setSources(mlFeatureProperties - .getSources() - .stream() - .map(urn -> { + result.setDescription(mlFeatureProperties.getDescription()); + if (mlFeatureProperties.getDataType() != null) { + result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); + } + if (mlFeatureProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); + } + if (mlFeatureProperties.getSources() != null) { + result.setSources( + mlFeatureProperties.getSources().stream() + .map( + urn -> { final Dataset dataset = new Dataset(); dataset.setUrn(urn.toString()); return dataset; - }) - .collect(Collectors.toList())); - } - - return result; + }) + .collect(Collectors.toList())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java index 3ba9a76c4bdde..d074e14f95c82 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java @@ -1,9 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -36,90 +37,111 @@ import com.linkedin.ml.metadata.MLFeatureTableProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureTableMapper implements ModelMapper<EntityResponse, MLFeatureTable> { - public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); + public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); - public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { - final MLFeatureTable result = new MLFeatureTable(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { + final MLFeatureTable result = new MLFeatureTable(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE_TABLE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE_TABLE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLFeatureTable> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeatureTable, dataMap) -> + MappingHelper<MLFeatureTable> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); - mappingHelper.mapToResult(ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeatureTable, dataMap) -> + mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeatureTable, dataMap) -> + mlFeatureTable.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeatureTable, dataMap) -> + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapMLFeatureTableKey(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { - MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); - mlFeatureTable.setName(mlFeatureTableKey.getName()); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); - mlFeatureTable.setPlatform(partialPlatform); - } - - private void mapMLFeatureTableProperties(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { - MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); - mlFeatureTable.setFeatureTableProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setDescription(featureTableProperties.getDescription()); - } - - private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { - EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); - MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + return mappingHelper.getResult(); + } + + private void mapMLFeatureTableKey( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { + MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); + mlFeatureTable.setName(mlFeatureTableKey.getName()); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); + mlFeatureTable.setPlatform(partialPlatform); + } + + private void mapMLFeatureTableProperties( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { + MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); + mlFeatureTable.setFeatureTableProperties( + MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + mlFeatureTable.setProperties( + MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + mlFeatureTable.setDescription(featureTableProperties.getDescription()); + } + + private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { + EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); + MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java index 13e3c79599725..fff504d43c81a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java @@ -5,46 +5,55 @@ import com.linkedin.datahub.graphql.generated.MLFeatureTableProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; public class MLFeatureTablePropertiesMapper { - public static final MLFeatureTablePropertiesMapper INSTANCE = new MLFeatureTablePropertiesMapper(); - - public static MLFeatureTableProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + public static final MLFeatureTablePropertiesMapper INSTANCE = + new MLFeatureTablePropertiesMapper(); + + public static MLFeatureTableProperties map( + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + } + + public MLFeatureTableProperties apply( + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + final MLFeatureTableProperties result = new MLFeatureTableProperties(); + + result.setDescription(mlFeatureTableProperties.getDescription()); + if (mlFeatureTableProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlFeatureTableProperties.getMlFeatures().stream() + .map( + urn -> { + final MLFeature mlFeature = new MLFeature(); + mlFeature.setUrn(urn.toString()); + return mlFeature; + }) + .collect(Collectors.toList())); } - public MLFeatureTableProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - final MLFeatureTableProperties result = new MLFeatureTableProperties(); - - result.setDescription(mlFeatureTableProperties.getDescription()); - if (mlFeatureTableProperties.getMlFeatures() != null) { - result.setMlFeatures(mlFeatureTableProperties.getMlFeatures().stream().map(urn -> { - final MLFeature mlFeature = new MLFeature(); - mlFeature.setUrn(urn.toString()); - return mlFeature; - }).collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { - result.setMlPrimaryKeys(mlFeatureTableProperties - .getMlPrimaryKeys() - .stream() - .map(urn -> { + if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { + result.setMlPrimaryKeys( + mlFeatureTableProperties.getMlPrimaryKeys().stream() + .map( + urn -> { final MLPrimaryKey mlPrimaryKey = new MLPrimaryKey(); mlPrimaryKey.setUrn(urn.toString()); return mlPrimaryKey; - }) - .collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); - } + }) + .collect(Collectors.toList())); + } - return result; + if (mlFeatureTableProperties.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java index 5cc242d0b19f2..bb3c85e411e71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java @@ -2,25 +2,25 @@ import com.linkedin.datahub.graphql.generated.MLHyperParam; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class MLHyperParamMapper implements ModelMapper<com.linkedin.ml.metadata.MLHyperParam, MLHyperParam> { +public class MLHyperParamMapper + implements ModelMapper<com.linkedin.ml.metadata.MLHyperParam, MLHyperParam> { - public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); + public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); - public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - return INSTANCE.apply(input); - } + public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + return INSTANCE.apply(input); + } - @Override - public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - final MLHyperParam result = new MLHyperParam(); + @Override + public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + final MLHyperParam result = new MLHyperParam(); - result.setDescription(input.getDescription()); - result.setValue(input.getValue()); - result.setCreatedAt(input.getCreatedAt()); - result.setName(input.getName()); - return result; - } + result.setDescription(input.getDescription()); + result.setValue(input.getValue()); + result.setCreatedAt(input.getCreatedAt()); + result.setName(input.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java index 2545bd5f8a848..765a44d218567 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java @@ -4,22 +4,21 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import lombok.NonNull; - public class MLMetricMapper implements ModelMapper<com.linkedin.ml.metadata.MLMetric, MLMetric> { - public static final MLMetricMapper INSTANCE = new MLMetricMapper(); + public static final MLMetricMapper INSTANCE = new MLMetricMapper(); - public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - return INSTANCE.apply(metric); - } + public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + return INSTANCE.apply(metric); + } - @Override - public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - final MLMetric result = new MLMetric(); - result.setDescription(metric.getDescription()); - result.setValue(metric.getValue()); - result.setCreatedAt(metric.getCreatedAt()); - result.setName(metric.getName()); - return result; - } + @Override + public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + final MLMetric result = new MLMetric(); + result.setDescription(metric.getDescription()); + result.setValue(metric.getValue()); + result.setCreatedAt(metric.getCreatedAt()); + result.setName(metric.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java index 0d32f7275e5fe..e86072ce3848e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java @@ -1,29 +1,36 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.MLModelFactorPrompts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; -public class MLModelFactorPromptsMapper implements ModelMapper<com.linkedin.ml.metadata.MLModelFactorPrompts, MLModelFactorPrompts> { +public class MLModelFactorPromptsMapper + implements ModelMapper<com.linkedin.ml.metadata.MLModelFactorPrompts, MLModelFactorPrompts> { - public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); + public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); - public static MLModelFactorPrompts map(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - return INSTANCE.apply(input); - } + public static MLModelFactorPrompts map( + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + return INSTANCE.apply(input); + } - @Override - public MLModelFactorPrompts apply(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); - if (input.getEvaluationFactors() != null) { - mlModelFactorPrompts.setEvaluationFactors(input.getEvaluationFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - if (input.getRelevantFactors() != null) { - mlModelFactorPrompts.setRelevantFactors(input.getRelevantFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - return mlModelFactorPrompts; + @Override + public MLModelFactorPrompts apply( + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); + if (input.getEvaluationFactors() != null) { + mlModelFactorPrompts.setEvaluationFactors( + input.getEvaluationFactors().stream() + .map(MLModelFactorsMapper::map) + .collect(Collectors.toList())); + } + if (input.getRelevantFactors() != null) { + mlModelFactorPrompts.setRelevantFactors( + input.getRelevantFactors().stream() + .map(MLModelFactorsMapper::map) + .collect(Collectors.toList())); } + return mlModelFactorPrompts; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java index aa4737dfd229c..3b212eca52801 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java @@ -1,32 +1,33 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.ArrayList; - import com.linkedin.datahub.graphql.generated.MLModelFactors; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.ArrayList; import lombok.NonNull; -public class MLModelFactorsMapper implements ModelMapper<com.linkedin.ml.metadata.MLModelFactors, MLModelFactors> { +public class MLModelFactorsMapper + implements ModelMapper<com.linkedin.ml.metadata.MLModelFactors, MLModelFactors> { - public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); + public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); - public static MLModelFactors map(@NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { - return INSTANCE.apply(modelFactors); - } + public static MLModelFactors map( + @NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { + return INSTANCE.apply(modelFactors); + } - @Override - public MLModelFactors apply(@NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { - final MLModelFactors result = new MLModelFactors(); - if (mlModelFactors.getEnvironment() != null) { - result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); - } - if (mlModelFactors.getGroups() != null) { - result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); - } - if (mlModelFactors.getInstrumentation() != null) { - result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); - } - return result; + @Override + public MLModelFactors apply( + @NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { + final MLModelFactors result = new MLModelFactors(); + if (mlModelFactors.getEnvironment() != null) { + result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); + } + if (mlModelFactors.getGroups() != null) { + result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); + } + if (mlModelFactors.getInstrumentation() != null) { + result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java index 311ee121bcaf9..cc9baaa33a660 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -34,90 +36,102 @@ import com.linkedin.ml.metadata.MLModelGroupProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelGroupMapper implements ModelMapper<EntityResponse, MLModelGroup> { - public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); + public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); - public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { - final MLModelGroup result = new MLModelGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { + final MLModelGroup result = new MLModelGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLModelGroup> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModelGroup, dataMap) -> + MappingHelper<MLModelGroup> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); - mappingHelper.mapToResult(ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModelGroup, dataMap) -> + mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); + mappingHelper.mapToResult( + ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModelGroup, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); - mlModelGroup.setName(mlModelGroupKey.getName()); - mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); - mlModelGroup.setPlatform(partialPlatform); + return mappingHelper.getResult(); + } + + private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); + mlModelGroup.setName(mlModelGroupKey.getName()); + mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); + mlModelGroup.setPlatform(partialPlatform); + } + + private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); + mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); + if (modelGroupProperties.getDescription() != null) { + mlModelGroup.setDescription(modelGroupProperties.getDescription()); } - - private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); - mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); - if (modelGroupProperties.getDescription() != null) { - mlModelGroup.setDescription(modelGroupProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { - EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); - MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { + EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); + MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java index 9a12d7917e648..bae60a026b49a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java @@ -1,28 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - import com.linkedin.datahub.graphql.generated.MLModelGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import lombok.NonNull; -public class MLModelGroupPropertiesMapper implements ModelMapper<com.linkedin.ml.metadata.MLModelGroupProperties, MLModelGroupProperties> { - - public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); +public class MLModelGroupPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLModelGroupProperties, MLModelGroupProperties> { - public static MLModelGroupProperties map(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - return INSTANCE.apply(mlModelGroupProperties); - } + public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); - @Override - public MLModelGroupProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - final MLModelGroupProperties result = new MLModelGroupProperties(); + public static MLModelGroupProperties map( + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + return INSTANCE.apply(mlModelGroupProperties); + } - result.setDescription(mlModelGroupProperties.getDescription()); - if (mlModelGroupProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); - } - result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + @Override + public MLModelGroupProperties apply( + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + final MLModelGroupProperties result = new MLModelGroupProperties(); - return result; + result.setDescription(mlModelGroupProperties.getDescription()); + if (mlModelGroupProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); } + result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java index 0c2eeabe5701d..827b35c282237 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.Cost; import com.linkedin.common.DataPlatformInstance; @@ -48,124 +50,165 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelMapper implements ModelMapper<EntityResponse, MLModel> { - public static final MLModelMapper INSTANCE = new MLModelMapper(); + public static final MLModelMapper INSTANCE = new MLModelMapper(); - public static MLModel map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLModel map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLModel apply(@Nonnull final EntityResponse entityResponse) { - final MLModel result = new MLModel(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLModel apply(@Nonnull final EntityResponse entityResponse) { + final MLModel result = new MLModel(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLModel> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModel, dataMap) -> + MappingHelper<MLModel> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); - mappingHelper.mapToResult(INTENDED_USE_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + ML_MODEL_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); + mappingHelper.mapToResult( + INTENDED_USE_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setIntendedUse(IntendedUseMapper.map(new IntendedUse(dataMap)))); - mappingHelper.mapToResult(ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setFactorPrompts(MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); - mappingHelper.mapToResult(METRICS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); - mappingHelper.mapToResult(EVALUATION_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEvaluationData(new EvaluationData(dataMap).getEvaluationData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(TRAINING_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setTrainingData(new TrainingData(dataMap).getTrainingData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(QUANTITATIVE_ANALYSES_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setQuantitativeAnalyses(QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); - mappingHelper.mapToResult(ETHICAL_CONSIDERATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEthicalConsiderations(EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); - mappingHelper.mapToResult(CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCaveatsAndRecommendations(CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(COST_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCost(CostMapper.map(new Cost(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setFactorPrompts( + MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); + mappingHelper.mapToResult( + METRICS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); + mappingHelper.mapToResult( + EVALUATION_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEvaluationData( + new EvaluationData(dataMap) + .getEvaluationData().stream() + .map(BaseDataMapper::map) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + TRAINING_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setTrainingData( + new TrainingData(dataMap) + .getTrainingData().stream() + .map(BaseDataMapper::map) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + QUANTITATIVE_ANALYSES_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setQuantitativeAnalyses( + QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); + mappingHelper.mapToResult( + ETHICAL_CONSIDERATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEthicalConsiderations( + EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); + mappingHelper.mapToResult( + CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setCaveatsAndRecommendations( + CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + COST_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(new Cost(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { - MLModelKey mlModelKey = new MLModelKey(dataMap); - mlModel.setName(mlModelKey.getName()); - mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelKey.getPlatform().toString()); - mlModel.setPlatform(partialPlatform); + return mappingHelper.getResult(); + } + + private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { + MLModelKey mlModelKey = new MLModelKey(dataMap); + mlModel.setName(mlModelKey.getName()); + mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelKey.getPlatform().toString()); + mlModel.setPlatform(partialPlatform); + } + + private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + MLModelProperties modelProperties = new MLModelProperties(dataMap); + mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); + if (modelProperties.getDescription() != null) { + mlModel.setDescription(modelProperties.getDescription()); } - - private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - MLModelProperties modelProperties = new MLModelProperties(dataMap); - mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); - if (modelProperties.getDescription() != null) { - mlModel.setDescription(modelProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - mlModel.setGlobalTags(graphQlGlobalTags); - mlModel.setTags(graphQlGlobalTags); - } - - private void mapSourceCode(MLModel mlModel, DataMap dataMap) { - SourceCode sourceCode = new SourceCode(dataMap); - com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = - new com.linkedin.datahub.graphql.generated.SourceCode(); - graphQlSourceCode.setSourceCode(sourceCode.getSourceCode().stream() - .map(SourceCodeUrlMapper::map).collect(Collectors.toList())); - mlModel.setSourceCode(graphQlSourceCode); - } - - private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModel entity, DataMap dataMap) { - EditableMLModelProperties input = new EditableMLModelProperties(dataMap); - MLModelEditableProperties editableProperties = new MLModelEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + mlModel.setGlobalTags(graphQlGlobalTags); + mlModel.setTags(graphQlGlobalTags); + } + + private void mapSourceCode(MLModel mlModel, DataMap dataMap) { + SourceCode sourceCode = new SourceCode(dataMap); + com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = + new com.linkedin.datahub.graphql.generated.SourceCode(); + graphQlSourceCode.setSourceCode( + sourceCode.getSourceCode().stream() + .map(SourceCodeUrlMapper::map) + .collect(Collectors.toList())); + mlModel.setSourceCode(graphQlSourceCode); + } + + private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLModel entity, DataMap dataMap) { + EditableMLModelProperties input = new EditableMLModelProperties(dataMap); + MLModelEditableProperties editableProperties = new MLModelEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java index 554c14e9a4a56..f2781f5bca5c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java @@ -1,65 +1,71 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.MLModelGroup; +import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; - -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.MLModelProperties; - import lombok.NonNull; public class MLModelPropertiesMapper { - public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); + public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); - public static MLModelProperties map(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - return INSTANCE.apply(mlModelProperties, entityUrn); - } + public static MLModelProperties map( + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + return INSTANCE.apply(mlModelProperties, entityUrn); + } - public MLModelProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - final MLModelProperties result = new MLModelProperties(); + public MLModelProperties apply( + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + final MLModelProperties result = new MLModelProperties(); - result.setDate(mlModelProperties.getDate()); - result.setDescription(mlModelProperties.getDescription()); - if (mlModelProperties.getExternalUrl() != null) { - result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); - } - if (mlModelProperties.getVersion() != null) { - result.setVersion(mlModelProperties.getVersion().getVersionTag()); - } - result.setType(mlModelProperties.getType()); - if (mlModelProperties.getHyperParams() != null) { - result.setHyperParams(mlModelProperties.getHyperParams().stream().map( - param -> MLHyperParamMapper.map(param)).collect(Collectors.toList())); - } - - result.setCustomProperties(CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); + result.setDate(mlModelProperties.getDate()); + result.setDescription(mlModelProperties.getDescription()); + if (mlModelProperties.getExternalUrl() != null) { + result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); + } + if (mlModelProperties.getVersion() != null) { + result.setVersion(mlModelProperties.getVersion().getVersionTag()); + } + result.setType(mlModelProperties.getType()); + if (mlModelProperties.getHyperParams() != null) { + result.setHyperParams( + mlModelProperties.getHyperParams().stream() + .map(param -> MLHyperParamMapper.map(param)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getTrainingMetrics() != null) { - result.setTrainingMetrics(mlModelProperties.getTrainingMetrics().stream().map(metric -> - MLMetricMapper.map(metric) - ).collect(Collectors.toList())); - } + result.setCustomProperties( + CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); - if (mlModelProperties.getGroups() != null) { - result.setGroups(mlModelProperties.getGroups().stream().map(group -> { - final MLModelGroup subgroup = new MLModelGroup(); - subgroup.setUrn(group.toString()); - return subgroup; - }).collect(Collectors.toList())); - } + if (mlModelProperties.getTrainingMetrics() != null) { + result.setTrainingMetrics( + mlModelProperties.getTrainingMetrics().stream() + .map(metric -> MLMetricMapper.map(metric)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getMlFeatures() != null) { - result.setMlFeatures(mlModelProperties - .getMlFeatures() - .stream() - .map(Urn::toString) - .collect(Collectors.toList())); - } - result.setTags(mlModelProperties.getTags()); + if (mlModelProperties.getGroups() != null) { + result.setGroups( + mlModelProperties.getGroups().stream() + .map( + group -> { + final MLModelGroup subgroup = new MLModelGroup(); + subgroup.setUrn(group.toString()); + return subgroup; + }) + .collect(Collectors.toList())); + } - return result; + if (mlModelProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlModelProperties.getMlFeatures().stream() + .map(Urn::toString) + .collect(Collectors.toList())); } + result.setTags(mlModelProperties.getTags()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java index 0bd5db4d884ae..a8efd748401f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; @@ -33,88 +35,102 @@ import com.linkedin.ml.metadata.MLPrimaryKeyProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLPrimaryKeyMapper implements ModelMapper<EntityResponse, MLPrimaryKey> { - public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); + public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); - public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { - final MLPrimaryKey result = new MLPrimaryKey(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { + final MLPrimaryKey result = new MLPrimaryKey(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLPRIMARY_KEY); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLPRIMARY_KEY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLPrimaryKey> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlPrimaryKey, dataMap) -> + MappingHelper<MLPrimaryKey> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); - mappingHelper.mapToResult(ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlPrimaryKey, dataMap) -> + mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + return mappingHelper.getResult(); + } - private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); - mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); - mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); - } + private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); + mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); + mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); + } - private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); - mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); - if (primaryKeyProperties.getDataType() != null) { - mlPrimaryKey.setDataType(MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); - } + private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); + mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); + if (primaryKeyProperties.getDataType() != null) { + mlPrimaryKey.setDataType( + MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); } + } - private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } + private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } - private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } + private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } - private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { - EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); - MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { + EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); + MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java index 39ecd96af182f..16d6120cd9dff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java @@ -4,39 +4,43 @@ import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; -public class MLPrimaryKeyPropertiesMapper implements ModelMapper<com.linkedin.ml.metadata.MLPrimaryKeyProperties, MLPrimaryKeyProperties> { +public class MLPrimaryKeyPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLPrimaryKeyProperties, MLPrimaryKeyProperties> { - public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); + public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); - public static MLPrimaryKeyProperties map(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - return INSTANCE.apply(mlPrimaryKeyProperties); - } + public static MLPrimaryKeyProperties map( + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + return INSTANCE.apply(mlPrimaryKeyProperties); + } - @Override - public MLPrimaryKeyProperties apply(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); + @Override + public MLPrimaryKeyProperties apply( + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); - result.setDescription(mlPrimaryKeyProperties.getDescription()); - if (mlPrimaryKeyProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); - } - if (mlPrimaryKeyProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); - } - result.setSources(mlPrimaryKeyProperties - .getSources() - .stream() - .map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }) + result.setDescription(mlPrimaryKeyProperties.getDescription()); + if (mlPrimaryKeyProperties.getDataType() != null) { + result.setDataType( + MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); + } + if (mlPrimaryKeyProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); + } + result.setSources( + mlPrimaryKeyProperties.getSources().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) .collect(Collectors.toList())); - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java index 05b34ba3acb9c..76fa8c84e9571 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java @@ -2,22 +2,21 @@ import com.linkedin.datahub.graphql.generated.Metrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class MetricsMapper implements ModelMapper<com.linkedin.ml.metadata.Metrics, Metrics> { - public static final MetricsMapper INSTANCE = new MetricsMapper(); + public static final MetricsMapper INSTANCE = new MetricsMapper(); - public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - return INSTANCE.apply(metrics); - } + public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + return INSTANCE.apply(metrics); + } - @Override - public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - final Metrics result = new Metrics(); - result.setDecisionThreshold(metrics.getDecisionThreshold()); - result.setPerformanceMeasures(metrics.getPerformanceMeasures()); - return result; - } + @Override + public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + final Metrics result = new Metrics(); + result.setDecisionThreshold(metrics.getDecisionThreshold()); + result.setPerformanceMeasures(metrics.getPerformanceMeasures()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java index 8bd25a4474579..e46cb0a074bd7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java @@ -2,22 +2,25 @@ import com.linkedin.datahub.graphql.generated.QuantitativeAnalyses; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class QuantitativeAnalysesMapper implements ModelMapper<com.linkedin.ml.metadata.QuantitativeAnalyses, QuantitativeAnalyses> { +public class QuantitativeAnalysesMapper + implements ModelMapper<com.linkedin.ml.metadata.QuantitativeAnalyses, QuantitativeAnalyses> { - public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); + public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); - public static QuantitativeAnalyses map(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - return INSTANCE.apply(quantitativeAnalyses); - } + public static QuantitativeAnalyses map( + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + return INSTANCE.apply(quantitativeAnalyses); + } - @Override - public QuantitativeAnalyses apply(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - final QuantitativeAnalyses result = new QuantitativeAnalyses(); - result.setIntersectionalResults(ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); - result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); - return result; - } + @Override + public QuantitativeAnalyses apply( + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + final QuantitativeAnalyses result = new QuantitativeAnalyses(); + result.setIntersectionalResults( + ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); + result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java index 78292f08f8cad..4b6529c59db3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java @@ -3,25 +3,25 @@ import com.linkedin.datahub.graphql.generated.ResultsType; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class ResultsTypeMapper implements ModelMapper<com.linkedin.ml.metadata.ResultsType, ResultsType> { +public class ResultsTypeMapper + implements ModelMapper<com.linkedin.ml.metadata.ResultsType, ResultsType> { - public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); + public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); - public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - return INSTANCE.apply(input); - } + public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + return INSTANCE.apply(input); + } - @Override - public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - final ResultsType result; - if (input.isString()) { - result = new StringBox(input.getString()); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); - } - return result; + @Override + public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + final ResultsType result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java index 79dbd2cded4c2..b6bd5efdc4217 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java @@ -1,23 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.SourceCodeUrl; import com.linkedin.datahub.graphql.generated.SourceCodeUrlType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class SourceCodeUrlMapper implements ModelMapper<com.linkedin.ml.metadata.SourceCodeUrl, SourceCodeUrl> { - public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); +public class SourceCodeUrlMapper + implements ModelMapper<com.linkedin.ml.metadata.SourceCodeUrl, SourceCodeUrl> { + public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); - public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - return INSTANCE.apply(input); - } + public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + return INSTANCE.apply(input); + } - @Override - public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - final SourceCodeUrl results = new SourceCodeUrl(); - results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); - results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); - return results; - } + @Override + public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + final SourceCodeUrl results = new SourceCodeUrl(); + results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); + results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java index 6ad0945b0621f..5758a52538c1e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java @@ -2,20 +2,22 @@ import com.linkedin.common.VersionTag; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class VersionTagMapper implements ModelMapper<VersionTag, com.linkedin.datahub.graphql.generated.VersionTag> { - public static final VersionTagMapper INSTANCE = new VersionTagMapper(); +public class VersionTagMapper + implements ModelMapper<VersionTag, com.linkedin.datahub.graphql.generated.VersionTag> { + public static final VersionTagMapper INSTANCE = new VersionTagMapper(); - public static com.linkedin.datahub.graphql.generated.VersionTag map(@Nonnull final VersionTag versionTag) { - return INSTANCE.apply(versionTag); - } + public static com.linkedin.datahub.graphql.generated.VersionTag map( + @Nonnull final VersionTag versionTag) { + return INSTANCE.apply(versionTag); + } - @Override - public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { - final com.linkedin.datahub.graphql.generated.VersionTag result = new com.linkedin.datahub.graphql.generated.VersionTag(); - result.setVersionTag(input.getVersionTag()); - return result; - } + @Override + public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { + final com.linkedin.datahub.graphql.generated.VersionTag result = + new com.linkedin.datahub.graphql.generated.VersionTag(); + result.setVersionTag(input.getVersionTag()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index 080cdeba09f19..b6990c3816b53 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.notebook; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -18,25 +21,25 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.NotebookUpdateInput; +import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.BrowsableEntityType; import com.linkedin.datahub.graphql.types.MutableType; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowsePathsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowseResultMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookUpdateInputMapper; -import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -53,25 +56,25 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class NotebookType implements SearchableEntityType<Notebook, String>, BrowsableEntityType<Notebook, String>, - MutableType<NotebookUpdateInput, Notebook> { - static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - NOTEBOOK_KEY_ASPECT_NAME, - NOTEBOOK_INFO_ASPECT_NAME, - NOTEBOOK_CONTENT_ASPECT_NAME, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME); +public class NotebookType + implements SearchableEntityType<Notebook, String>, + BrowsableEntityType<Notebook, String>, + MutableType<NotebookUpdateInput, Notebook> { + static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + NOTEBOOK_KEY_ASPECT_NAME, + NOTEBOOK_INFO_ASPECT_NAME, + NOTEBOOK_CONTENT_ASPECT_NAME, + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); private final EntityClient _entityClient; @@ -80,44 +83,68 @@ public NotebookType(EntityClient entityClient) { } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List<FacetFilterInput> filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { + @Nonnull final QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map<String, String> facetFilters = Collections.emptyMap(); - final SearchResult searchResult = _entityClient.search(NOTEBOOK_ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + NOTEBOOK_ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } @Override - public BrowseResults browse(@Nonnull List<String> path, @Nullable List<FacetFilterInput> filters, int start, - int count, @Nonnull QueryContext context) throws Exception { + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map<String, String> facetFilters = Collections.emptyMap(); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse(NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); return BrowseResultMapper.map(result); } @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(NotebookUrn.createFromString(urn), context.getAuthentication()); + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + NotebookUrn.createFromString(urn), context.getAuthentication()); return BrowsePathsMapper.map(result); } @@ -137,22 +164,26 @@ public Class<Notebook> objectClass() { } @Override - public List<DataFetcherResult<Notebook>> batchLoad(@Nonnull List<String> urnStrs, @Nonnull QueryContext context) - throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<Notebook>> batchLoad( + @Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> notebookMap = _entityClient.batchGetV2(NOTEBOOK_ENTITY_NAME, new HashSet<>(urns), - ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map<Urn, EntityResponse> notebookMap = + _entityClient.batchGetV2( + NOTEBOOK_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); return urns.stream() .map(urn -> notebookMap.getOrDefault(urn, null)) - .map(entityResponse -> entityResponse == null - ? null - : DataFetcherResult.<Notebook>newResult() - .data(NotebookMapper.map(entityResponse)) - .build()) + .map( + entityResponse -> + entityResponse == null + ? null + : DataFetcherResult.<Notebook>newResult() + .data(NotebookMapper.map(entityResponse)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Notebook", e); @@ -165,13 +196,16 @@ public Class<NotebookUpdateInput> inputClass() { } @Override - public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) + public Notebook update( + @Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) throws Exception { if (!isAuthorized(urn, input, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); Collection<MetadataChangeProposal> proposals = NotebookUpdateInputMapper.map(input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); @@ -184,7 +218,8 @@ public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, return load(urn, context).getData(); } - private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { // Decide whether the current principal should be allowed to update the Dataset. final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( @@ -197,9 +232,9 @@ private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput u private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); List<String> specificPrivileges = new ArrayList<>(); if (updateInput.getOwnership() != null) { @@ -211,12 +246,12 @@ private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateIn if (updateInput.getTags() != null) { specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java index 2b937c86c9779..a263e31b26faf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.GlobalTags; @@ -26,11 +28,11 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; import com.linkedin.datahub.graphql.types.common.mappers.ChangeAuditStampsMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -45,8 +47,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - public class NotebookMapper implements ModelMapper<EntityResponse, Notebook> { public static final NotebookMapper INSTANCE = new NotebookMapper(); @@ -64,41 +64,59 @@ public Notebook apply(EntityResponse response) { EnvelopedAspectMap aspectMap = response.getAspects(); MappingHelper<Notebook> mappingHelper = new MappingHelper<>(aspectMap, convertedNotebook); mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, this::mapNotebookKey); - mappingHelper.mapToResult(NOTEBOOK_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + NOTEBOOK_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); mappingHelper.mapToResult(NOTEBOOK_CONTENT_ASPECT_NAME, this::mapNotebookContent); - mappingHelper.mapToResult(EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (notebook, dataMap) -> notebook.setOwnership( - OwnershipMapper.map(new Ownership(dataMap), entityUrn) - )); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (notebook, dataMap) -> - notebook.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapDataPlatformInstance); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (notebook, dataMap) -> - notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); return mappingHelper.getResult(); } private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataMap); - notebook.setPlatform(DataPlatform - .builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(dataPlatformInstance.getPlatform().toString()) - .build()); - notebook.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); + notebook.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(dataPlatformInstance.getPlatform().toString()) + .build()); + notebook.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); } private void mapSubTypes(Notebook notebook, DataMap dataMap) { SubTypes pegasusSubTypes = new SubTypes(dataMap); if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); + com.linkedin.datahub.graphql.generated.SubTypes subTypes = + new com.linkedin.datahub.graphql.generated.SubTypes(); subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); notebook.setSubTypes(subTypes); } @@ -110,11 +128,14 @@ private void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap notebook.setTool(notebookKey.getNotebookTool()); } - private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = new com.linkedin.notebook.NotebookInfo(dataMap); + private void mapNotebookInfo( + @Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = + new com.linkedin.notebook.NotebookInfo(dataMap); final NotebookInfo notebookInfo = new NotebookInfo(); notebookInfo.setTitle(gmsNotebookInfo.getTitle()); - notebookInfo.setChangeAuditStamps(ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); + notebookInfo.setChangeAuditStamps( + ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); notebookInfo.setDescription(gmsNotebookInfo.getDescription()); if (gmsNotebookInfo.hasExternalUrl()) { @@ -122,40 +143,46 @@ private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMa } if (gmsNotebookInfo.hasCustomProperties()) { - notebookInfo.setCustomProperties(CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); + notebookInfo.setCustomProperties( + CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); } notebook.setInfo(notebookInfo); } private void mapNotebookContent(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - com.linkedin.notebook.NotebookContent pegasusNotebookContent = new com.linkedin.notebook.NotebookContent(dataMap); + com.linkedin.notebook.NotebookContent pegasusNotebookContent = + new com.linkedin.notebook.NotebookContent(dataMap); NotebookContent notebookContent = new NotebookContent(); notebookContent.setCells(mapNotebookCells(pegasusNotebookContent.getCells())); notebook.setContent(notebookContent); } - private List<NotebookCell> mapNotebookCells(com.linkedin.notebook.NotebookCellArray pegasusCells) { + private List<NotebookCell> mapNotebookCells( + com.linkedin.notebook.NotebookCellArray pegasusCells) { return pegasusCells.stream() - .map(pegasusCell -> { - NotebookCell notebookCell = new NotebookCell(); - NotebookCellType cellType = NotebookCellType.valueOf(pegasusCell.getType().toString()); - notebookCell.setType(cellType); - switch (cellType) { - case CHART_CELL: - notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); - break; - case TEXT_CELL: - notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); - break; - case QUERY_CELL: - notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); - break; - default: - throw new DataHubGraphQLException(String.format("Un-supported NotebookCellType: %s", cellType), - DataHubGraphQLErrorCode.SERVER_ERROR); - } - return notebookCell; - }) + .map( + pegasusCell -> { + NotebookCell notebookCell = new NotebookCell(); + NotebookCellType cellType = + NotebookCellType.valueOf(pegasusCell.getType().toString()); + notebookCell.setType(cellType); + switch (cellType) { + case CHART_CELL: + notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); + break; + case TEXT_CELL: + notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); + break; + case QUERY_CELL: + notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); + break; + default: + throw new DataHubGraphQLException( + String.format("Un-supported NotebookCellType: %s", cellType), + DataHubGraphQLErrorCode.SERVER_ERROR); + } + return notebookCell; + }) .collect(Collectors.toList()); } @@ -163,7 +190,8 @@ private ChartCell mapChartCell(com.linkedin.notebook.ChartCell pegasusChartCell) ChartCell chartCell = new ChartCell(); chartCell.setCellId(pegasusChartCell.getCellId()); chartCell.setCellTitle(pegasusChartCell.getCellTitle()); - chartCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); + chartCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); return chartCell; } @@ -171,7 +199,8 @@ private TextCell mapTextCell(com.linkedin.notebook.TextCell pegasusTextCell) { TextCell textCell = new TextCell(); textCell.setCellId(pegasusTextCell.getCellId()); textCell.setCellTitle(pegasusTextCell.getCellTitle()); - textCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); + textCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); textCell.setText(pegasusTextCell.getText()); return textCell; } @@ -180,7 +209,8 @@ private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) QueryCell queryCell = new QueryCell(); queryCell.setCellId(pegasusQueryCell.getCellId()); queryCell.setCellTitle(pegasusQueryCell.getCellTitle()); - queryCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); + queryCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); queryCell.setRawQuery(pegasusQueryCell.getRawQuery()); if (pegasusQueryCell.hasLastExecuted()) { queryCell.setLastExecuted(AuditStampMapper.map(pegasusQueryCell.getLastExecuted())); @@ -189,7 +219,8 @@ private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) } private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - final EditableNotebookProperties editableNotebookProperties = new EditableNotebookProperties(dataMap); + final EditableNotebookProperties editableNotebookProperties = + new EditableNotebookProperties(dataMap); final NotebookEditableProperties notebookEditableProperties = new NotebookEditableProperties(); notebookEditableProperties.setDescription(editableNotebookProperties.getDescription()); notebook.setEditableProperties(notebookEditableProperties); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java index 0c3787d630500..0d6c70e07053f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,16 +19,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class NotebookUpdateInputMapper implements InputModelMapper<NotebookUpdateInput, Collection<MetadataChangeProposal>, - Urn> { +public class NotebookUpdateInputMapper + implements InputModelMapper<NotebookUpdateInput, Collection<MetadataChangeProposal>, Urn> { public static final NotebookUpdateInputMapper INSTANCE = new NotebookUpdateInputMapper(); - public static Collection<MetadataChangeProposal> map(@Nonnull final NotebookUpdateInput notebookUpdateInput, - @Nonnull final Urn actor) { + public static Collection<MetadataChangeProposal> map( + @Nonnull final NotebookUpdateInput notebookUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(notebookUpdateInput, actor); } @@ -39,27 +38,32 @@ public Collection<MetadataChangeProposal> apply(NotebookUpdateInput input, Urn a auditStamp.setTime(System.currentTimeMillis()); if (input.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(input.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(input.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } if (input.getTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(input.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + input.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (input.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); editableDashboardProperties.setDescription(input.getEditableProperties().getDescription()); if (!editableDashboardProperties.hasCreated()) { editableDashboardProperties.setCreated(auditStamp); } editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java index 79f95ac8439a5..f7ed4c59a805a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; @@ -20,14 +22,12 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class OwnershipType implements com.linkedin.datahub.graphql.types.EntityType<OwnershipTypeEntity, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,13 +46,17 @@ public Class<OwnershipTypeEntity> objectClass() { } @Override - public List<DataFetcherResult<OwnershipTypeEntity>> batchLoad(@Nonnull List<String> urns, - @Nonnull QueryContext context) throws Exception { - final List<Urn> ownershipTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List<DataFetcherResult<OwnershipTypeEntity>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> ownershipTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(OWNERSHIP_TYPE_ENTITY_NAME, new HashSet<>(ownershipTypeUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + OWNERSHIP_TYPE_ENTITY_NAME, + new HashSet<>(ownershipTypeUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -60,12 +64,16 @@ public List<DataFetcherResult<OwnershipTypeEntity>> batchLoad(@Nonnull List<Stri gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null : DataFetcherResult.<OwnershipTypeEntity>newResult() - .data(OwnershipTypeMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<OwnershipTypeEntity>newResult() + .data(OwnershipTypeMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Custom Ownership Types", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java index 37b59b679e3ac..9eebe95df8d8c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Status; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -14,9 +16,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipTypeMapper implements ModelMapper<EntityResponse, OwnershipTypeEntity> { public static final OwnershipTypeMapper INSTANCE = new OwnershipTypeMapper(); @@ -34,12 +33,14 @@ public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { EnvelopedAspectMap aspectMap = input.getAspects(); MappingHelper<OwnershipTypeEntity> mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(OWNERSHIP_TYPE_INFO_ASPECT_NAME, this::mapOwnershipTypeInfo); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); return mappingHelper.getResult(); } - private void mapOwnershipTypeInfo(@Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { + private void mapOwnershipTypeInfo( + @Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { final com.linkedin.ownership.OwnershipTypeInfo gmsOwnershipTypeInfo = new com.linkedin.ownership.OwnershipTypeInfo(dataMap); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java index 167e1615fc4cc..318818b8a2140 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -25,9 +27,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataHubPolicyMapper implements ModelMapper<EntityResponse, DataHubPolicy> { public static final DataHubPolicyMapper INSTANCE = new DataHubPolicyMapper(); @@ -71,16 +70,20 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { // Change here is not executed at the moment - leaving it for the future UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } @@ -102,14 +105,20 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(this::mapValue) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } @@ -117,7 +126,10 @@ private PolicyMatchCriterionValue mapValue(final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java index 4cec59009af3f..3dea9046dcf36 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubPolicyType implements com.linkedin.datahub.graphql.types.EntityType<DataHubPolicy, String> { +public class DataHubPolicyType + implements com.linkedin.datahub.graphql.types.EntityType<DataHubPolicy, String> { static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_POLICY_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,13 +44,16 @@ public Class<DataHubPolicy> objectClass() { } @Override - public List<DataFetcherResult<DataHubPolicy>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { + public List<DataFetcherResult<DataHubPolicy>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { final List<Urn> roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(POLICY_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + POLICY_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -58,8 +61,13 @@ public List<DataFetcherResult<DataHubPolicy>> batchLoad(@Nonnull List<String> ur gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<DataHubPolicy>newResult().data(DataHubPolicyMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataHubPolicy>newResult() + .data(DataHubPolicyMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java index 791197c7d47e4..f35111f78a694 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.post; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; @@ -16,9 +18,6 @@ import com.linkedin.post.PostInfo; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class PostMapper implements ModelMapper<EntityResponse, Post> { public static final PostMapper INSTANCE = new PostMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java index cf77821b1a280..2bdcda3592608 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; @@ -21,9 +23,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class QueryMapper implements ModelMapper<EntityResponse, QueryEntity> { public static final QueryMapper INSTANCE = new QueryMapper(); @@ -47,13 +46,15 @@ public QueryEntity apply(@Nonnull final EntityResponse entityResponse) { private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QueryProperties queryProperties = new QueryProperties(dataMap); - com.linkedin.datahub.graphql.generated.QueryProperties res = new com.linkedin.datahub.graphql.generated.QueryProperties(); + com.linkedin.datahub.graphql.generated.QueryProperties res = + new com.linkedin.datahub.graphql.generated.QueryProperties(); // Query Source must be kept in sync. res.setSource(QuerySource.valueOf(queryProperties.getSource().toString())); - res.setStatement(new QueryStatement( - queryProperties.getStatement().getValue(), - QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); + res.setStatement( + new QueryStatement( + queryProperties.getStatement().getValue(), + QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); res.setName(queryProperties.getName(GetMode.NULL)); res.setDescription(queryProperties.getDescription(GetMode.NULL)); @@ -73,10 +74,10 @@ private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dat @Nonnull private void mapQuerySubjects(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QuerySubjects querySubjects = new QuerySubjects(dataMap); - List<QuerySubject> res = querySubjects.getSubjects() - .stream() - .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) - .collect(Collectors.toList()); + List<QuerySubject> res = + querySubjects.getSubjects().stream() + .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) + .collect(Collectors.toList()); query.setSubjects(res); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java index c138cd56f20b3..0c1fd33e38110 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -20,14 +22,11 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class QueryType implements com.linkedin.datahub.graphql.types.EntityType<QueryEntity, String> { - public static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - QUERY_PROPERTIES_ASPECT_NAME, - QUERY_SUBJECTS_ASPECT_NAME); +public class QueryType + implements com.linkedin.datahub.graphql.types.EntityType<QueryEntity, String> { + public static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,13 +45,16 @@ public Class<QueryEntity> objectClass() { } @Override - public List<DataFetcherResult<QueryEntity>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { + public List<DataFetcherResult<QueryEntity>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { final List<Urn> viewUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(QUERY_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + QUERY_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -60,11 +62,16 @@ public List<DataFetcherResult<QueryEntity>> batchLoad(@Nonnull List<String> urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<QueryEntity>newResult().data(QueryMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<QueryEntity>newResult() + .data(QueryMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java index e1762022f4bcb..db086e682d57c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java @@ -2,26 +2,28 @@ import com.linkedin.datahub.graphql.generated.DataFlowDataJobsRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class DataFlowDataJobsRelationshipsMapper implements - ModelMapper<com.linkedin.common.EntityRelationships, DataFlowDataJobsRelationships> { +public class DataFlowDataJobsRelationshipsMapper + implements ModelMapper<com.linkedin.common.EntityRelationships, DataFlowDataJobsRelationships> { - public static final DataFlowDataJobsRelationshipsMapper INSTANCE = new DataFlowDataJobsRelationshipsMapper(); + public static final DataFlowDataJobsRelationshipsMapper INSTANCE = + new DataFlowDataJobsRelationshipsMapper(); - public static DataFlowDataJobsRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DataFlowDataJobsRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public DataFlowDataJobsRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DataFlowDataJobsRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java index 824e1181c5871..4df64c7ecb85e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java @@ -2,26 +2,28 @@ import com.linkedin.datahub.graphql.generated.DownstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class DownstreamEntityRelationshipsMapper implements - ModelMapper<com.linkedin.common.EntityRelationships, DownstreamEntityRelationships> { +public class DownstreamEntityRelationshipsMapper + implements ModelMapper<com.linkedin.common.EntityRelationships, DownstreamEntityRelationships> { - public static final DownstreamEntityRelationshipsMapper INSTANCE = new DownstreamEntityRelationshipsMapper(); + public static final DownstreamEntityRelationshipsMapper INSTANCE = + new DownstreamEntityRelationshipsMapper(); - public static DownstreamEntityRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DownstreamEntityRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public DownstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DownstreamEntityRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java index 58f4f477bc7e6..e3743804b4908 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java @@ -5,28 +5,32 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class EntityRelationshipLegacyMapper implements ModelMapper<com.linkedin.common.EntityRelationship, EntityRelationshipLegacy> { +public class EntityRelationshipLegacyMapper + implements ModelMapper<com.linkedin.common.EntityRelationship, EntityRelationshipLegacy> { - public static final EntityRelationshipLegacyMapper INSTANCE = new EntityRelationshipLegacyMapper(); + public static final EntityRelationshipLegacyMapper INSTANCE = + new EntityRelationshipLegacyMapper(); - public static EntityRelationshipLegacy map(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - return INSTANCE.apply(relationship); - } + public static EntityRelationshipLegacy map( + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + return INSTANCE.apply(relationship); + } - @Override - public EntityRelationshipLegacy apply(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); + @Override + public EntityRelationshipLegacy apply( + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); - EntityWithRelationships partialLineageEntity = (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); - if (partialLineageEntity != null) { - result.setEntity(partialLineageEntity); - } - if (relationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(relationship.getCreated())); - } - return result; + EntityWithRelationships partialLineageEntity = + (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); + if (partialLineageEntity != null) { + result.setEntity(partialLineageEntity); + } + if (relationship.hasCreated()) { + result.setCreated(AuditStampMapper.map(relationship.getCreated())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java index 7db5e08c73fc6..832e1bb396b3b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java @@ -2,24 +2,28 @@ import com.linkedin.datahub.graphql.generated.UpstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class UpstreamEntityRelationshipsMapper implements ModelMapper<com.linkedin.common.EntityRelationships, UpstreamEntityRelationships> { +public class UpstreamEntityRelationshipsMapper + implements ModelMapper<com.linkedin.common.EntityRelationships, UpstreamEntityRelationships> { - public static final UpstreamEntityRelationshipsMapper INSTANCE = new UpstreamEntityRelationshipsMapper(); + public static final UpstreamEntityRelationshipsMapper INSTANCE = + new UpstreamEntityRelationshipsMapper(); - public static UpstreamEntityRelationships map(@Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static UpstreamEntityRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public UpstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public UpstreamEntityRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java index 8c6496390943b..9521945770195 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -21,11 +23,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubRoleType implements com.linkedin.datahub.graphql.types.EntityType<DataHubRole, String> { +public class DataHubRoleType + implements com.linkedin.datahub.graphql.types.EntityType<DataHubRole, String> { static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_ROLE_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -45,13 +45,16 @@ public Class<DataHubRole> objectClass() { } @Override - public List<DataFetcherResult<DataHubRole>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { + public List<DataFetcherResult<DataHubRole>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { final List<Urn> roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -59,8 +62,13 @@ public List<DataFetcherResult<DataHubRole>> batchLoad(@Nonnull List<String> urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<DataHubRole>newResult().data(DataHubRoleMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataHubRole>newResult() + .data(DataHubRoleMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java index 5ba31a1602780..7a467886fc084 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataHubRole; import com.linkedin.datahub.graphql.generated.EntityType; @@ -10,9 +12,6 @@ import com.linkedin.policy.DataHubRoleInfo; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataHubRoleMapper implements ModelMapper<EntityResponse, DataHubRole> { public static final DataHubRoleMapper INSTANCE = new DataHubRoleMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java index 084c4d5033ad0..d51e0d06c0fda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java @@ -11,9 +11,9 @@ import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.SearchableEntityType; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -22,9 +22,6 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -33,88 +30,101 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class RoleType implements SearchableEntityType<Role, String>, +public class RoleType + implements SearchableEntityType<Role, String>, com.linkedin.datahub.graphql.types.EntityType<Role, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ROLE_KEY, - Constants.ROLE_PROPERTIES_ASPECT_NAME, - Constants.ROLE_ACTORS_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ROLE_KEY, + Constants.ROLE_PROPERTIES_ASPECT_NAME, + Constants.ROLE_ACTORS_ASPECT_NAME); - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public RoleType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public RoleType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.ROLE; - } + @Override + public EntityType type() { + return EntityType.ROLE; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<Role> objectClass() { - return Role.class; - } + @Override + public Class<Role> objectClass() { + return Role.class; + } - @Override - public List<DataFetcherResult<Role>> batchLoad(@Nonnull List<String> urns, - @Nonnull QueryContext context) throws Exception { - final List<Urn> externalRolesUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List<DataFetcherResult<Role>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> externalRolesUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.ROLE_ENTITY_NAME, - new HashSet<>(externalRolesUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.ROLE_ENTITY_NAME, + new HashSet<>(externalRolesUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : externalRolesUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Role>newResult() - .data(RoleMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Role", e); - } + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : externalRolesUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Role>newResult().data(RoleMapper.map(gmsResult)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Role", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search(Constants.ROLE_ENTITY_NAME, - query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.ROLE_ENTITY_NAME, - query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + Constants.ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + Constants.ROLE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java index cabace1a52441..3eb090e452439 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java @@ -1,41 +1,39 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.RoleAssociation; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class AccessMapper { - public static final AccessMapper INSTANCE = new AccessMapper(); - - public static com.linkedin.datahub.graphql.generated.Access map( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - return INSTANCE.apply(access, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.Access apply( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.Access result = new com.linkedin.datahub.graphql.generated.Access(); - result.setRoles(access.getRoles().stream().map( - association -> this.mapRoleAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private RoleAssociation mapRoleAssociation(com.linkedin.common.RoleAssociation association, Urn entityUrn) { - RoleAssociation roleAssociation = new RoleAssociation(); - Role role = new Role(); - role.setType(EntityType.ROLE); - role.setUrn(association.getUrn().toString()); - roleAssociation.setRole(role); - roleAssociation.setAssociatedUrn(entityUrn.toString()); - return roleAssociation; - } - + public static final AccessMapper INSTANCE = new AccessMapper(); + + public static com.linkedin.datahub.graphql.generated.Access map( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(access, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Access apply( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.Access result = + new com.linkedin.datahub.graphql.generated.Access(); + result.setRoles( + access.getRoles().stream() + .map(association -> this.mapRoleAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private RoleAssociation mapRoleAssociation( + com.linkedin.common.RoleAssociation association, Urn entityUrn) { + RoleAssociation roleAssociation = new RoleAssociation(); + Role role = new Role(); + role.setType(EntityType.ROLE); + role.setUrn(association.getUrn().toString()); + roleAssociation.setRole(role); + roleAssociation.setAssociatedUrn(entityUrn.toString()); + return roleAssociation; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java index 3cb0ec942a457..df18b7c89fafc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java @@ -15,79 +15,77 @@ import com.linkedin.metadata.key.RoleKey; import com.linkedin.role.Actors; import com.linkedin.role.RoleUserArray; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class RoleMapper implements ModelMapper<EntityResponse, Role> { - public static final RoleMapper INSTANCE = new RoleMapper(); - - public static Role map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final RoleMapper INSTANCE = new RoleMapper(); + + public static Role map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { + final RoleProperties propertiesResult = new RoleProperties(); + propertiesResult.setName(e.getName()); + propertiesResult.setDescription(e.getDescription()); + propertiesResult.setType(e.getType()); + propertiesResult.setRequestUrl(e.getRequestUrl()); + + return propertiesResult; + } + + private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { + RoleUser result = new RoleUser(); + CorpUser corpUser = new CorpUser(); + corpUser.setUrn(provisionedUser.getUser().toString()); + result.setUser(corpUser); + return result; + } + + private static Actor mapActor(Actors actors) { + Actor actor = new Actor(); + actor.setUsers(mapRoleUsers(actors.getUsers())); + return actor; + } + + private static List<RoleUser> mapRoleUsers(RoleUserArray users) { + if (users == null) { + return null; } + return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + } - private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { - final RoleProperties propertiesResult = new RoleProperties(); - propertiesResult.setName(e.getName()); - propertiesResult.setDescription(e.getDescription()); - propertiesResult.setType(e.getType()); - propertiesResult.setRequestUrl(e.getRequestUrl()); + @Override + public Role apply(EntityResponse input) { - return propertiesResult; - } + final Role result = new Role(); + final Urn entityUrn = input.getUrn(); - private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { - RoleUser result = new RoleUser(); - CorpUser corpUser = new CorpUser(); - corpUser.setUrn(provisionedUser.getUser().toString()); - result.setUser(corpUser); - return result; - } + result.setUrn(entityUrn.toString()); + result.setType(EntityType.ROLE); - private static Actor mapActor(Actors actors) { - Actor actor = new Actor(); - actor.setUsers(mapRoleUsers(actors.getUsers())); - return actor; - } + final EnvelopedAspectMap aspects = input.getAspects(); - private static List<RoleUser> mapRoleUsers(RoleUserArray users) { - if (users == null) { - return null; - } - return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); + if (roleKeyAspect != null) { + result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); + } + final EnvelopedAspect envelopedPropertiesAspect = + aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); + if (envelopedPropertiesAspect != null) { + result.setProperties( + mapRoleProperties( + new com.linkedin.role.RoleProperties(envelopedPropertiesAspect.getValue().data()))); } - @Override - public Role apply(EntityResponse input) { - - - final Role result = new Role(); - final Urn entityUrn = input.getUrn(); - - result.setUrn(entityUrn.toString()); - result.setType(EntityType.ROLE); - - final EnvelopedAspectMap aspects = input.getAspects(); - - final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); - if (roleKeyAspect != null) { - result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); - } - final EnvelopedAspect envelopedPropertiesAspect = aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); - if (envelopedPropertiesAspect != null) { - result.setProperties(mapRoleProperties( - new com.linkedin.role.RoleProperties( - envelopedPropertiesAspect.getValue().data())) - ); - } - - final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); - if (envelopedUsers != null) { - result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); - } - - return result; + final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); + if (envelopedUsers != null) { + result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index 748753c4e22b1..b543a40cbac41 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -8,15 +8,15 @@ import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class SchemaFieldType implements com.linkedin.datahub.graphql.types.EntityType<SchemaFieldEntity, String> { +public class SchemaFieldType + implements com.linkedin.datahub.graphql.types.EntityType<SchemaFieldEntity, String> { - public SchemaFieldType() { } + public SchemaFieldType() {} @Override public EntityType type() { @@ -34,18 +34,17 @@ public Class<SchemaFieldEntity> objectClass() { } @Override - public List<DataFetcherResult<SchemaFieldEntity>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> schemaFieldUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<SchemaFieldEntity>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> schemaFieldUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { return schemaFieldUrns.stream() .map(this::mapSchemaFieldUrn) - .map(schemaFieldEntity -> DataFetcherResult.<SchemaFieldEntity>newResult() - .data(schemaFieldEntity) - .build() - ) + .map( + schemaFieldEntity -> + DataFetcherResult.<SchemaFieldEntity>newResult().data(schemaFieldEntity).build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -66,6 +65,4 @@ private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { throw new RuntimeException("Failed to load schemaField entity", e); } } - } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index f79b23033c995..c56833cc817eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.types.tag; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.Entity; @@ -26,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -44,136 +46,150 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class TagType implements com.linkedin.datahub.graphql.types.SearchableEntityType<Tag, String>, - MutableType<TagUpdateInput, Tag> { - - private static final Set<String> FACET_FIELDS = Collections.emptySet(); - - private final EntityClient _entityClient; - - public TagType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class<Tag> objectClass() { - return Tag.class; +public class TagType + implements com.linkedin.datahub.graphql.types.SearchableEntityType<Tag, String>, + MutableType<TagUpdateInput, Tag> { + + private static final Set<String> FACET_FIELDS = Collections.emptySet(); + + private final EntityClient _entityClient; + + public TagType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class<Tag> objectClass() { + return Tag.class; + } + + @Override + public EntityType type() { + return EntityType.TAG; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<TagUpdateInput> inputClass() { + return TagUpdateInput.class; + } + + @Override + public List<DataFetcherResult<Tag>> batchLoad( + final List<String> urns, final QueryContext context) { + + final List<Urn> tagUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> tagMap = + _entityClient.batchGetV2( + TAG_ENTITY_NAME, new HashSet<>(tagUrns), null, context.getAuthentication()); + + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : tagUrns) { + gmsResults.add(tagMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsTag -> + gmsTag == null + ? null + : DataFetcherResult.<Tag>newResult().data(TagMapper.map(gmsTag)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Tags", e); } - - @Override - public EntityType type() { - return EntityType.TAG; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<TagUpdateInput> inputClass() { - return TagUpdateInput.class; - } - - @Override - public List<DataFetcherResult<Tag>> batchLoad(final List<String> urns, final QueryContext context) { - - final List<Urn> tagUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> tagMap = _entityClient.batchGetV2(TAG_ENTITY_NAME, new HashSet<>(tagUrns), - null, context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : tagUrns) { - gmsResults.add(tagMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsTag -> gmsTag == null ? null - : DataFetcherResult.<Tag>newResult() - .data(TagMapper.map(gmsTag)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Tags", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("tag", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - - @Override - public Tag update(@Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = TagUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "tag", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public Tag update( + @Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = TagUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.TAG_PRIVILEGES.getResourceType(), - update.getUrn(), - orPrivilegeGroups); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.TAG_PRIVILEGES.getResourceType(), + update.getUrn(), + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDescription() != null || updateInput.getName() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (updateInput.getDescription() != null || updateInput.getName() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java index f4d5f0a549a0e..72665535e5980 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java @@ -4,35 +4,36 @@ import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Tag; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class GlobalTagsMapper { - public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); + public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); - public static com.linkedin.datahub.graphql.generated.GlobalTags map( - @Nonnull final GlobalTags standardTags, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(standardTags, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.GlobalTags map( + @Nonnull final GlobalTags standardTags, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(standardTags, entityUrn); + } - public com.linkedin.datahub.graphql.generated.GlobalTags apply(@Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.GlobalTags result = new com.linkedin.datahub.graphql.generated.GlobalTags(); - result.setTags(input.getTags().stream().map(tag -> this.mapTagAssociation(tag, entityUrn)).collect(Collectors.toList())); - return result; - } + public com.linkedin.datahub.graphql.generated.GlobalTags apply( + @Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlobalTags result = + new com.linkedin.datahub.graphql.generated.GlobalTags(); + result.setTags( + input.getTags().stream() + .map(tag -> this.mapTagAssociation(tag, entityUrn)) + .collect(Collectors.toList())); + return result; + } - private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( - @Nonnull final TagAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.TagAssociation result = new com.linkedin.datahub.graphql.generated.TagAssociation(); - final Tag resultTag = new Tag(); - resultTag.setUrn(input.getTag().toString()); - result.setTag(resultTag); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( + @Nonnull final TagAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.TagAssociation result = + new com.linkedin.datahub.graphql.generated.TagAssociation(); + final Tag resultTag = new Tag(); + resultTag.setUrn(input.getTag().toString()); + result.setTag(resultTag); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java index 775c123070a80..3792a42376004 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java @@ -4,27 +4,28 @@ import com.linkedin.common.urn.TagUrn; import com.linkedin.datahub.graphql.generated.TagAssociationUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.net.URISyntaxException; +import javax.annotation.Nonnull; -public class TagAssociationUpdateMapper implements ModelMapper<TagAssociationUpdate, TagAssociation> { +public class TagAssociationUpdateMapper + implements ModelMapper<TagAssociationUpdate, TagAssociation> { - public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); + public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); - public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { - return INSTANCE.apply(tagAssociationUpdate); - } + public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { + return INSTANCE.apply(tagAssociationUpdate); + } - public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { - final TagAssociation output = new TagAssociation(); - try { - output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to update tag with urn %s, invalid urn", - tagAssociationUpdate.getTag().getUrn())); - } - return output; + public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { + final TagAssociation output = new TagAssociation(); + try { + output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to update tag with urn %s, invalid urn", + tagAssociationUpdate.getTag().getUrn())); } - + return output; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java index 43736b412b004..d6ce24582678d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -16,63 +18,61 @@ import com.linkedin.tag.TagProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class TagMapper implements ModelMapper<EntityResponse, Tag> { - public static final TagMapper INSTANCE = new TagMapper(); + public static final TagMapper INSTANCE = new TagMapper(); - public static Tag map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Tag apply(@Nonnull final EntityResponse entityResponse) { - final Tag result = new Tag(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.TAG); + public static Tag map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - final String legacyName = entityResponse.getUrn().getId(); - result.setName(legacyName); + @Override + public Tag apply(@Nonnull final EntityResponse entityResponse) { + final Tag result = new Tag(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.TAG); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<Tag> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); - mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (tag, dataMap) -> - tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + final String legacyName = entityResponse.getUrn().getId(); + result.setName(legacyName); - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<Tag> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); + mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (tag, dataMap) -> tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - return mappingHelper.getResult(); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } - private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - TagKey tagKey = new TagKey(dataMap); - tag.setName(tagKey.getName()); - } + return mappingHelper.getResult(); + } + + private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + TagKey tagKey = new TagKey(dataMap); + tag.setName(tagKey.getName()); + } - private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - final TagProperties properties = new TagProperties(dataMap); - final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = - new com.linkedin.datahub.graphql.generated.TagProperties.Builder() - .setColorHex(properties.getColorHex(GetMode.DEFAULT)) - .setName(properties.getName(GetMode.DEFAULT)) - .setDescription(properties.getDescription(GetMode.DEFAULT)) - .build(); - tag.setProperties(graphQlProperties); - // Set deprecated top-level description field. - if (properties.hasDescription()) { - tag.setDescription(properties.getDescription()); - } + private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + final TagProperties properties = new TagProperties(dataMap); + final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = + new com.linkedin.datahub.graphql.generated.TagProperties.Builder() + .setColorHex(properties.getColorHex(GetMode.DEFAULT)) + .setName(properties.getName(GetMode.DEFAULT)) + .setDescription(properties.getDescription(GetMode.DEFAULT)) + .build(); + tag.setProperties(graphQlProperties); + // Set deprecated top-level description field. + if (properties.hasDescription()) { + tag.setDescription(properties.getDescription()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java index 505dd0d36954b..316994881ccfe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; @@ -19,24 +22,19 @@ import java.util.Collection; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class TagUpdateInputMapper implements InputModelMapper<TagUpdateInput, Collection<MetadataChangeProposal>, Urn> { +public class TagUpdateInputMapper + implements InputModelMapper<TagUpdateInput, Collection<MetadataChangeProposal>, Urn> { public static final TagUpdateInputMapper INSTANCE = new TagUpdateInputMapper(); public static Collection<MetadataChangeProposal> map( - @Nonnull final TagUpdateInput tagUpdate, - @Nonnull final Urn actor) { + @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { return INSTANCE.apply(tagUpdate, actor); } @Override public Collection<MetadataChangeProposal> apply( - @Nonnull final TagUpdateInput tagUpdate, - @Nonnull final Urn actor) { + @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { final Collection<MetadataChangeProposal> proposals = new ArrayList<>(2); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(TAG_ENTITY_NAME); @@ -59,9 +57,10 @@ public Collection<MetadataChangeProposal> apply( TagProperties tagProperties = new TagProperties(); tagProperties.setName(tagUpdate.getName()); tagProperties.setDescription(tagUpdate.getDescription()); - proposals.add(updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); } return proposals; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java index ddc9f33b25516..be67d17421917 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java @@ -1,15 +1,14 @@ package com.linkedin.datahub.graphql.types.test; -import com.linkedin.datahub.graphql.generated.TestDefinition; -import com.linkedin.test.TestInfo; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; +import com.linkedin.datahub.graphql.generated.TestDefinition; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; - +import com.linkedin.test.TestInfo; public class TestMapper { @@ -29,12 +28,11 @@ public static Test map(final EntityResponse entityResponse) { result.setName(testInfo.getName()); result.setDescription(testInfo.getDescription()); result.setDefinition(new TestDefinition(testInfo.getDefinition().getJson())); - } else { + } else { return null; } return result; } - private TestMapper() { - } -} \ No newline at end of file + private TestMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java index 4b7df8a0d23d3..eefcc356c22a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java @@ -3,9 +3,9 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -20,15 +20,12 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class TestType implements com.linkedin.datahub.graphql.types.EntityType<Test, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.TEST_INFO_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(Constants.TEST_INFO_ASPECT_NAME); private final EntityClient _entityClient; - public TestType(final EntityClient entityClient) { + public TestType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -48,28 +45,28 @@ public Class<Test> objectClass() { } @Override - public List<DataFetcherResult<Test>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> testUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<Test>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> testUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.TEST_ENTITY_NAME, - new HashSet<>(testUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.TEST_ENTITY_NAME, + new HashSet<>(testUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); for (Urn urn : testUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Test>newResult() - .data(TestMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Test>newResult().data(TestMapper.map(gmsResult)).build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Tests", e); @@ -83,4 +80,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java index 7812282d0c1e5..02de39ffc644c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ChangeOperationType; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -25,15 +27,14 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame structs for every schema +// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame +// structs for every schema // at every semantic version. @Slf4j public class SchemaBlameMapper { - public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> changeTransactions, + public static GetSchemaBlameResult map( + @Nonnull final List<ChangeTransaction> changeTransactions, @Nullable final String versionCutoff) { final GetSchemaBlameResult result = new GetSchemaBlameResult(); if (changeTransactions.isEmpty()) { @@ -46,7 +47,8 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch final String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - final String semanticVersionFilterString = versionCutoff == null ? latestSemanticVersionString : versionCutoff; + final String semanticVersionFilterString = + versionCutoff == null ? latestSemanticVersionString : versionCutoff; final Optional<ComparableVersion> semanticVersionFilterOptional = createSemanticVersion(semanticVersionFilterString); if (semanticVersionFilterOptional.isEmpty()) { @@ -55,25 +57,30 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch final ComparableVersion semanticVersionFilter = semanticVersionFilterOptional.get(); - final List<ChangeTransaction> reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .filter(semanticVersionChangeTransactionPair -> - semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) <= 0) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + final List<ChangeTransaction> reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .filter( + semanticVersionChangeTransactionPair -> + semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) + <= 0) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); if (reversedChangeTransactions.isEmpty()) { return result; } - final String selectedSemanticVersion = truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); + final String selectedSemanticVersion = + truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); final long selectedSemanticVersionTimestamp = reversedChangeTransactions.get(0).getTimestamp(); final String selectedVersionStamp = reversedChangeTransactions.get(0).getVersionStamp(); result.setVersion( - new SemanticVersionStruct(selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); + new SemanticVersionStruct( + selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); for (ChangeTransaction changeTransaction : reversedChangeTransactions) { for (ChangeEvent changeEvent : changeTransaction.getChangeEvents()) { @@ -90,8 +97,10 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch SchemaFieldKey schemaFieldKey; try { - schemaFieldKey = (SchemaFieldKey) EntityKeyUtils.convertUrnToEntityKeyInternal(Urn.createFromString(schemaUrn), - new SchemaFieldKey().schema()); + schemaFieldKey = + (SchemaFieldKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + Urn.createFromString(schemaUrn), new SchemaFieldKey().schema()); } catch (Exception e) { log.debug(String.format("Could not generate schema urn for %s", schemaUrn)); continue; @@ -101,7 +110,10 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch schemaFieldBlame.setFieldPath(fieldPath); final SchemaFieldChange schemaFieldChange = - getLastSchemaFieldChange(changeEvent, changeTransaction.getTimestamp(), changeTransaction.getSemVer(), + getLastSchemaFieldChange( + changeEvent, + changeTransaction.getTimestamp(), + changeTransaction.getSemVer(), changeTransaction.getVersionStamp()); schemaFieldBlame.setSchemaFieldChange(schemaFieldChange); @@ -109,15 +121,17 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch } } - result.setSchemaFieldBlameList(schemaBlameMap.values() - .stream() - .filter(schemaFieldBlame -> !schemaFieldBlame.getSchemaFieldChange() - .getChangeType() - .equals(ChangeOperationType.REMOVE)) - .collect(Collectors.toList())); + result.setSchemaFieldBlameList( + schemaBlameMap.values().stream() + .filter( + schemaFieldBlame -> + !schemaFieldBlame + .getSchemaFieldChange() + .getChangeType() + .equals(ChangeOperationType.REMOVE)) + .collect(Collectors.toList())); return result; } - private SchemaBlameMapper() { - } -} \ No newline at end of file + private SchemaBlameMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java index 249957b1a1262..295ca0856821c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; import com.linkedin.datahub.graphql.generated.SemanticVersionStruct; import com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils; @@ -12,10 +14,8 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to list of schema versions. +// Class for converting ChangeTransactions received from the Timeline API to list of schema +// versions. @Slf4j public class SchemaVersionListMapper { @@ -29,28 +29,36 @@ public static GetSchemaVersionListResult map(List<ChangeTransaction> changeTrans String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - long latestSemanticVersionTimestamp = changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); - String latestVersionStamp = changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); + long latestSemanticVersionTimestamp = + changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); + String latestVersionStamp = + changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); result.setLatestVersion( - new SemanticVersionStruct(latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); + new SemanticVersionStruct( + latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); - List<ChangeTransaction> reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + List<ChangeTransaction> reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); - List<SemanticVersionStruct> semanticVersionStructList = reversedChangeTransactions.stream() - .map(changeTransaction -> new SemanticVersionStruct(truncateSemanticVersion(changeTransaction.getSemVer()), - changeTransaction.getTimestamp(), changeTransaction.getVersionStamp())) - .collect(Collectors.toList()); + List<SemanticVersionStruct> semanticVersionStructList = + reversedChangeTransactions.stream() + .map( + changeTransaction -> + new SemanticVersionStruct( + truncateSemanticVersion(changeTransaction.getSemVer()), + changeTransaction.getTimestamp(), + changeTransaction.getVersionStamp())) + .collect(Collectors.toList()); result.setSemanticVersionList(semanticVersionStructList); return result; } - private SchemaVersionListMapper() { - } -} \ No newline at end of file + private SchemaVersionListMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java index 175cf678117f0..37acfe3da0f9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java @@ -9,13 +9,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; - @Slf4j public class TimelineUtils { - public static Optional<Pair<ComparableVersion, ChangeTransaction>> semanticVersionChangeTransactionPair( - ChangeTransaction changeTransaction) { - Optional<ComparableVersion> semanticVersion = createSemanticVersion(changeTransaction.getSemVer()); + public static Optional<Pair<ComparableVersion, ChangeTransaction>> + semanticVersionChangeTransactionPair(ChangeTransaction changeTransaction) { + Optional<ComparableVersion> semanticVersion = + createSemanticVersion(changeTransaction.getSemVer()); return semanticVersion.map(version -> Pair.of(version, changeTransaction)); } @@ -29,21 +29,24 @@ public static Optional<ComparableVersion> createSemanticVersion(String semanticV } } - // The SemanticVersion is currently returned from the ChangeTransactions in the format "x.y.z-computed". This function + // The SemanticVersion is currently returned from the ChangeTransactions in the format + // "x.y.z-computed". This function // removes the suffix "computed". public static String truncateSemanticVersion(String semanticVersion) { String suffix = "-computed"; - return semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + return semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; } - public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent, long timestamp, - String semanticVersion, String versionStamp) { + public static SchemaFieldChange getLastSchemaFieldChange( + ChangeEvent changeEvent, long timestamp, String semanticVersion, String versionStamp) { SchemaFieldChange schemaFieldChange = new SchemaFieldChange(); schemaFieldChange.setTimestampMillis(timestamp); schemaFieldChange.setLastSemanticVersion(truncateSemanticVersion(semanticVersion)); schemaFieldChange.setChangeType( - ChangeOperationType.valueOf(ChangeOperationType.class, changeEvent.getOperation().toString())); + ChangeOperationType.valueOf( + ChangeOperationType.class, changeEvent.getOperation().toString())); schemaFieldChange.setVersionStamp(versionStamp); String translatedChangeOperationType; @@ -65,15 +68,16 @@ public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent String suffix = "-computed"; String translatedSemanticVersion = - semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; - String lastSchemaFieldChange = String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); + String lastSchemaFieldChange = + String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); schemaFieldChange.setLastSchemaFieldChange(lastSchemaFieldChange); return schemaFieldChange; } - private TimelineUtils() { - } + private TimelineUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java index 3bf84d21a3215..e4e67c86f1ae6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java @@ -4,12 +4,13 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class FieldUsageCountsMapper implements ModelMapper<com.linkedin.usage.FieldUsageCounts, FieldUsageCounts> { +public class FieldUsageCountsMapper + implements ModelMapper<com.linkedin.usage.FieldUsageCounts, FieldUsageCounts> { public static final FieldUsageCountsMapper INSTANCE = new FieldUsageCountsMapper(); - public static FieldUsageCounts map(@Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { + public static FieldUsageCounts map( + @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { return INSTANCE.apply(usageCounts); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java index 453ae97d40306..3449c6782a46b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java @@ -5,18 +5,19 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class UsageAggregationMapper implements - ModelMapper<com.linkedin.usage.UsageAggregation, UsageAggregation> { +public class UsageAggregationMapper + implements ModelMapper<com.linkedin.usage.UsageAggregation, UsageAggregation> { public static final UsageAggregationMapper INSTANCE = new UsageAggregationMapper(); - public static UsageAggregation map(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public static UsageAggregation map( + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { return INSTANCE.apply(pdlUsageAggregation); } @Override - public UsageAggregation apply(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public UsageAggregation apply( + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { UsageAggregation result = new UsageAggregation(); result.setBucket(pdlUsageAggregation.getBucket()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java index 697b15d57e4e4..ff9f6fd5c4855 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java @@ -5,31 +5,34 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class UsageAggregationMetricsMapper implements - ModelMapper<com.linkedin.usage.UsageAggregationMetrics, UsageAggregationMetrics> { +public class UsageAggregationMetricsMapper + implements ModelMapper<com.linkedin.usage.UsageAggregationMetrics, UsageAggregationMetrics> { public static final UsageAggregationMetricsMapper INSTANCE = new UsageAggregationMetricsMapper(); - public static UsageAggregationMetrics map(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public static UsageAggregationMetrics map( + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { return INSTANCE.apply(usageAggregationMetrics); } @Override - public UsageAggregationMetrics apply(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public UsageAggregationMetrics apply( + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { UsageAggregationMetrics result = new UsageAggregationMetrics(); result.setTotalSqlQueries(usageAggregationMetrics.getTotalSqlQueries()); result.setUniqueUserCount(usageAggregationMetrics.getUniqueUserCount()); result.setTopSqlQueries(usageAggregationMetrics.getTopSqlQueries()); if (usageAggregationMetrics.hasFields()) { result.setFields( - usageAggregationMetrics.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + usageAggregationMetrics.getFields().stream() + .map(FieldUsageCountsMapper::map) + .collect(Collectors.toList())); } if (usageAggregationMetrics.hasUsers()) { - result.setUsers(usageAggregationMetrics.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + usageAggregationMetrics.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java index ba3b86b72af8b..63fe051b7ede9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java @@ -5,30 +5,35 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class UsageQueryResultAggregationMapper + implements ModelMapper< + com.linkedin.usage.UsageQueryResultAggregations, UsageQueryResultAggregations> { -public class UsageQueryResultAggregationMapper implements - ModelMapper<com.linkedin.usage.UsageQueryResultAggregations, UsageQueryResultAggregations> { + public static final UsageQueryResultAggregationMapper INSTANCE = + new UsageQueryResultAggregationMapper(); - public static final UsageQueryResultAggregationMapper INSTANCE = new UsageQueryResultAggregationMapper(); - - public static UsageQueryResultAggregations map(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public static UsageQueryResultAggregations map( + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { return INSTANCE.apply(pdlUsageResultAggregations); } @Override - public UsageQueryResultAggregations apply(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public UsageQueryResultAggregations apply( + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { UsageQueryResultAggregations result = new UsageQueryResultAggregations(); result.setTotalSqlQueries(pdlUsageResultAggregations.getTotalSqlQueries()); result.setUniqueUserCount(pdlUsageResultAggregations.getUniqueUserCount()); if (pdlUsageResultAggregations.hasFields()) { result.setFields( - pdlUsageResultAggregations.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + pdlUsageResultAggregations.getFields().stream() + .map(FieldUsageCountsMapper::map) + .collect(Collectors.toList())); } if (pdlUsageResultAggregations.hasUsers()) { - result.setUsers(pdlUsageResultAggregations.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + pdlUsageResultAggregations.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java index f54259180c739..444605cd99377 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java @@ -1,17 +1,17 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.UsageQueryResult; - import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class UsageQueryResultMapper implements ModelMapper<com.linkedin.usage.UsageQueryResult, UsageQueryResult> { +public class UsageQueryResultMapper + implements ModelMapper<com.linkedin.usage.UsageQueryResult, UsageQueryResult> { public static final UsageQueryResultMapper INSTANCE = new UsageQueryResultMapper(); - public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { + public static UsageQueryResult map( + @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { return INSTANCE.apply(pdlUsageResult); } @@ -19,11 +19,14 @@ public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryR public UsageQueryResult apply(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { UsageQueryResult result = new UsageQueryResult(); if (pdlUsageResult.hasAggregations()) { - result.setAggregations(UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); + result.setAggregations( + UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); } if (pdlUsageResult.hasBuckets()) { - result.setBuckets(pdlUsageResult.getBuckets().stream().map( - bucket -> UsageAggregationMapper.map(bucket)).collect(Collectors.toList())); + result.setBuckets( + pdlUsageResult.getBuckets().stream() + .map(bucket -> UsageAggregationMapper.map(bucket)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java index b525a761841e3..014003dd86554 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java @@ -1,23 +1,23 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.CorpUser; - import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class UserUsageCountsMapper implements - ModelMapper<com.linkedin.usage.UserUsageCounts, UserUsageCounts> { +public class UserUsageCountsMapper + implements ModelMapper<com.linkedin.usage.UserUsageCounts, UserUsageCounts> { public static final UserUsageCountsMapper INSTANCE = new UserUsageCountsMapper(); - public static UserUsageCounts map(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public static UserUsageCounts map( + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { return INSTANCE.apply(pdlUsageResultAggregations); } @Override - public UserUsageCounts apply(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public UserUsageCounts apply( + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { UserUsageCounts result = new UserUsageCounts(); if (pdlUsageResultAggregations.hasUser()) { CorpUser partialUser = new CorpUser(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index f6c348937c7a5..8ea06f46d5133 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataHubViewDefinition; @@ -24,8 +26,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - @Slf4j public class DataHubViewMapper implements ModelMapper<EntityResponse, DataHubView> { @@ -57,20 +57,26 @@ private void mapDataHubViewInfo(@Nonnull final DataHubView view, @Nonnull final } @Nonnull - private DataHubViewDefinition mapViewDefinition(@Nonnull final com.linkedin.view.DataHubViewDefinition definition) { + private DataHubViewDefinition mapViewDefinition( + @Nonnull final com.linkedin.view.DataHubViewDefinition definition) { final DataHubViewDefinition result = new DataHubViewDefinition(); result.setFilter(mapFilter(definition.getFilter())); - result.setEntityTypes(definition.getEntityTypes().stream().map(EntityTypeMapper::getType).collect( - Collectors.toList())); + result.setEntityTypes( + definition.getEntityTypes().stream() + .map(EntityTypeMapper::getType) + .collect(Collectors.toList())); return result; } @Nullable - private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.filter.Filter filter) { - // This assumes that people DO NOT emit Views on their own, since we expect that the Filter structure is within + private DataHubViewFilter mapFilter( + @Nonnull final com.linkedin.metadata.query.filter.Filter filter) { + // This assumes that people DO NOT emit Views on their own, since we expect that the Filter + // structure is within // a finite set of possibilities. // - // If we find a View that was ingested manually and malformed, then we log that and return a default. + // If we find a View that was ingested manually and malformed, then we log that and return a + // default. final DataHubViewFilter result = new DataHubViewFilter(); if (filter.hasOr() && filter.getOr().size() == 1) { // Then we are looking at an AND with multiple sub conditions. @@ -84,9 +90,7 @@ private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.f return result; } - /** - * This simply converts a List of leaf criterion into the FacetFiler equivalent. - */ + /** This simply converts a List of leaf criterion into the FacetFiler equivalent. */ @Nonnull private List<FacetFilter> mapAndFilters(@Nullable final List<Criterion> ands) { // If the array is missing, return empty array. @@ -98,9 +102,9 @@ private List<FacetFilter> mapAndFilters(@Nullable final List<Criterion> ands) { } /** - * This converts a list of Conjunctive Criterion into a flattened list - * of FacetFilters. This method makes the assumption that WE (our GraphQL API) - * has minted the View and that each or criterion contains at maximum one nested condition. + * This converts a list of Conjunctive Criterion into a flattened list of FacetFilters. This + * method makes the assumption that WE (our GraphQL API) has minted the View and that each or + * criterion contains at maximum one nested condition. */ @Nonnull private List<FacetFilter> mapOrFilters(@Nullable final List<ConjunctiveCriterion> ors) { @@ -109,8 +113,10 @@ private List<FacetFilter> mapOrFilters(@Nullable final List<ConjunctiveCriterion return Collections.emptyList(); } if (ors.stream().anyMatch(or -> or.hasAnd() && or.getAnd().size() > 1)) { - log.warn(String.format( - "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", ors)); + log.warn( + String.format( + "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", + ors)); return Collections.emptyList(); } // It is assumed that in this case, the view is a flat list of ORs. Thus, we filter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java index 21a80e3f900d4..9b3680bde9b2b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubViewType implements com.linkedin.datahub.graphql.types.EntityType<DataHubView, String> { +public class DataHubViewType + implements com.linkedin.datahub.graphql.types.EntityType<DataHubView, String> { public static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,13 +44,16 @@ public Class<DataHubView> objectClass() { } @Override - public List<DataFetcherResult<DataHubView>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { + public List<DataFetcherResult<DataHubView>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { final List<Urn> viewUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(DATAHUB_VIEW_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATAHUB_VIEW_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -58,8 +61,13 @@ public List<DataFetcherResult<DataHubView>> batchLoad(@Nonnull List<String> urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<DataHubView>newResult().data(DataHubViewMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataHubView>newResult() + .data(DataHubViewMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Views", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java index bb9de5fb96802..4b837605d4e31 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java @@ -5,39 +5,30 @@ import org.joda.time.DateTimeConstants; public class DateUtil { - public DateTime getNow() { - return DateTime.now(); - } + public DateTime getNow() { + return DateTime.now(); + } - public DateTime getStartOfNextWeek() { - return setTimeToZero(getNow() - .withDayOfWeek(DateTimeConstants.SUNDAY) - .plusDays(1)); - } + public DateTime getStartOfNextWeek() { + return setTimeToZero(getNow().withDayOfWeek(DateTimeConstants.SUNDAY).plusDays(1)); + } - public DateTime getStartOfNextMonth() { - return setTimeToZero(getNow() - .withDayOfMonth(1) - .plusMonths(1)); - } + public DateTime getStartOfNextMonth() { + return setTimeToZero(getNow().withDayOfMonth(1).plusMonths(1)); + } - public DateTime setTimeToZero(DateTime input) { - return input.withHourOfDay(0) - .withMinuteOfHour(0) - .withSecondOfMinute(0) - .withMillisOfDay(0); - } + public DateTime setTimeToZero(DateTime input) { + return input.withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0).withMillisOfDay(0); + } - public DateTime getTomorrowStart() { - return setTimeToZero(getNow().plusDays(1)); - } + public DateTime getTomorrowStart() { + return setTimeToZero(getNow().plusDays(1)); + } - public DateRange getTrailingWeekDateRange() { - final DateTime todayEnd = getTomorrowStart().minusMillis(1); - final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); - return new DateRange( - String.valueOf(aWeekAgoStart.getMillis()), - String.valueOf(todayEnd.getMillis()) - ); - } + public DateRange getTrailingWeekDateRange() { + final DateTime todayEnd = getTomorrowStart().minusMillis(1); + final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); + return new DateRange( + String.valueOf(aWeekAgoStart.getMillis()), String.valueOf(todayEnd.getMillis())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java index 7f90071c6770c..904db311d34d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java @@ -6,7 +6,6 @@ import java.util.List; import javax.annotation.Nullable; - public class SearchInsightsUtil { public static List<SearchInsight> getInsightsFromFeatures(@Nullable final DoubleMap features) { @@ -18,5 +17,5 @@ public static List<SearchInsight> getInsightsFromFeatures(@Nullable final Double return Collections.emptyList(); } - private SearchInsightsUtil() { } + private SearchInsightsUtil() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index 606123cac926d..69cd73ecd7d68 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -14,17 +14,17 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; -import org.mockito.Mockito; - import java.util.List; - +import org.mockito.Mockito; public class TestUtils { public static EntityService getMockEntityService() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - EntityRegistry registry = new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); + EntityRegistry registry = + new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); EntityService mockEntityService = Mockito.mock(EntityService.class); Mockito.when(mockEntityService.getEntityRegistry()).thenReturn(registry); return mockEntityService; @@ -44,9 +44,10 @@ public static QueryContext getMockAllowContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -60,9 +61,10 @@ public static QueryContext getMockAllowContext(String actorUrn, AuthorizationReq Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -80,9 +82,10 @@ public static QueryContext getMockDenyContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -96,55 +99,54 @@ public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequ Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { verifyIngestProposal(mockService, numberOfInvocations, List.of(proposal)); } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, List<MetadataChangeProposal> proposals) { - AspectsBatchImpl batch = AspectsBatchImpl.builder() - .mcps(proposals, mockService.getEntityRegistry()) - .build(); - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(batch), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, List<MetadataChangeProposal> proposals) { + AspectsBatchImpl batch = + AspectsBatchImpl.builder().mcps(proposals, mockService.getEntityRegistry()).build(); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(Mockito.eq(batch), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(AuditStamp.class), Mockito.eq(false)); } public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( Mockito.any(MetadataChangeProposal.class), Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.eq(false)); } public static void verifyNoIngestProposal(EntityService mockService) { - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java index 7cd548a4790ba..57d85e5b204c2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static org.testng.AssertJUnit.assertEquals; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,16 +18,11 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetchingEnvironment; -import org.testng.annotations.Test; -import org.mockito.Mockito; - import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static org.testng.AssertJUnit.assertEquals; - +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ResolverUtilsTest { @@ -35,46 +33,48 @@ public void testCriterionFromFilter() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockAllowContext); // this is the expected path - Criterion valuesCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), - false, - FilterOperator.EQUAL - ) - ); - assertEquals(valuesCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valuesCriterion = + criterionFromFilter( + new FacetFilterInput( + "tags", + null, + ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), + false, + FilterOperator.EQUAL)); + assertEquals( + valuesCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); // this is the legacy pathway - Criterion valueCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - "urn:li:tag:abc", - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(valueCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc")) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valueCriterion = + criterionFromFilter( + new FacetFilterInput("tags", "urn:li:tag:abc", null, true, FilterOperator.EQUAL)); + assertEquals( + valueCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc"))) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); - // check that both being null doesn't cause a NPE. this should never happen except via API interaction - Criterion doubleNullCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(doubleNullCriterion, new Criterion().setValue("").setValues( - new StringArray(ImmutableList.of()) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + // check that both being null doesn't cause a NPE. this should never happen except via API + // interaction + Criterion doubleNullCriterion = + criterionFromFilter(new FacetFilterInput("tags", null, null, true, FilterOperator.EQUAL)); + assertEquals( + doubleNullCriterion, + new Criterion() + .setValue("") + .setValues(new StringArray(ImmutableList.of())) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); } @Test @@ -85,21 +85,25 @@ public void testBuildFilterWithUrns() throws Exception { urns.add(urn1); urns.add(urn2); - Criterion ownersCriterion = new Criterion() - .setField("owners") - .setValues(new StringArray("urn:li:corpuser:chris")) - .setCondition(Condition.EQUAL); + Criterion ownersCriterion = + new Criterion() + .setField("owners") + .setValues(new StringArray("urn:li:corpuser:chris")) + .setCondition(Condition.EQUAL); CriterionArray andCriterionArray = new CriterionArray(ImmutableList.of(ownersCriterion)); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(andCriterionArray) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(andCriterionArray)))); Filter finalFilter = buildFilterWithUrns(urns, filter); - Criterion urnsCriterion = new Criterion().setField("urn") - .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + Criterion urnsCriterion = + new Criterion() + .setField("urn") + .setValue("") + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); for (ConjunctiveCriterion conjunctiveCriterion : finalFilter.getOr()) { assertEquals(conjunctiveCriterion.getAnd().contains(ownersCriterion), true); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java index c7424174255ce..0d87ce4b2e2ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -9,35 +14,35 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetchingEnvironment; -import org.joda.time.DateTimeUtils; -import org.mockito.Mockito; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.joda.time.DateTimeUtils; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; public class UpdateLineageResolverTest { private static EntityService _mockService = Mockito.mock(EntityService.class); private static LineageService _lineageService; private static DataFetchingEnvironment _mockEnv; - private static final String DATASET_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; - private static final String DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; - private static final String DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; - private static final String DATASET_URN_4 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; + private static final String DATASET_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; + private static final String DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; + private static final String DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; + private static final String DATASET_URN_4 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; - private static final String DATAJOB_URN_1 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; - private static final String DATAJOB_URN_2 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; + private static final String DATAJOB_URN_1 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATAJOB_URN_2 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; @BeforeMethod public void setupTest() { @@ -50,8 +55,12 @@ public void setupTest() { // Adds upstream for dataset1 to dataset2 and removes edge to dataset3 @Test public void testUpdateDatasetLineage() throws Exception { - List<LineageEdge> edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List<LineageEdge> edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List<LineageEdge> edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List<LineageEdge> edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -65,7 +74,8 @@ public void testUpdateDatasetLineage() throws Exception { @Test public void testFailUpdateWithMissingDownstream() throws Exception { - List<LineageEdge> edgesToAdd = Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); + List<LineageEdge> edgesToAdd = + Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); mockInputAndContext(edgesToAdd, new ArrayList<>()); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -93,8 +103,12 @@ public void testUpdateChartLineage() throws Exception { // Adds upstream for dashboard to dataset2 and chart1 and removes edge to dataset1 @Test public void testUpdateDashboardLineage() throws Exception { - List<LineageEdge> edgesToAdd = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_2), createLineageEdge(DASHBOARD_URN, CHART_URN)); - List<LineageEdge> edgesToRemove = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); + List<LineageEdge> edgesToAdd = + Arrays.asList( + createLineageEdge(DASHBOARD_URN, DATASET_URN_2), + createLineageEdge(DASHBOARD_URN, CHART_URN)); + List<LineageEdge> edgesToRemove = + Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -109,12 +123,13 @@ public void testUpdateDashboardLineage() throws Exception { // Adds upstream datajob and dataset and one downstream dataset @Test public void testUpdateDataJobLineage() throws Exception { - List<LineageEdge> edgesToAdd = Arrays.asList( - createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), - createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), - createLineageEdge(DATASET_URN_3, DATAJOB_URN_1) - ); - List<LineageEdge> edgesToRemove = Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); + List<LineageEdge> edgesToAdd = + Arrays.asList( + createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), + createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), + createLineageEdge(DATASET_URN_3, DATAJOB_URN_1)); + List<LineageEdge> edgesToRemove = + Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -129,8 +144,12 @@ public void testUpdateDataJobLineage() throws Exception { @Test public void testFailUpdateLineageNoPermissions() throws Exception { - List<LineageEdge> edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List<LineageEdge> edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List<LineageEdge> edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List<LineageEdge> edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); QueryContext mockContext = getMockDenyContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); @@ -147,7 +166,6 @@ public void testFailUpdateLineageNoPermissions() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(_mockEnv).join()); } - private void mockInputAndContext(List<LineageEdge> edgesToAdd, List<LineageEdge> edgesToRemove) { QueryContext mockContext = getMockAllowContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java index 6fdb1f2b70ce4..f590e71146eb4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionResult; @@ -19,9 +21,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class AssertionRunEventResolverTest { @Test public void testGetSuccess() throws Exception { @@ -29,35 +28,36 @@ public void testGetSuccess() throws Exception { final Urn assertionUrn = Urn.createFromString("urn:li:assertion:guid-1"); final Urn asserteeUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); - final AssertionRunEvent gmsRunEvent = new AssertionRunEvent() - .setTimestampMillis(12L) - .setAssertionUrn(assertionUrn) - .setRunId("test-id") - .setAsserteeUrn(asserteeUrn) - .setStatus(AssertionRunStatus.COMPLETE) - .setResult(new AssertionResult() - .setActualAggValue(10) - .setMissingCount(0L) - .setRowCount(1L) - .setType(AssertionResultType.SUCCESS) - .setUnexpectedCount(2L) - ); + final AssertionRunEvent gmsRunEvent = + new AssertionRunEvent() + .setTimestampMillis(12L) + .setAssertionUrn(assertionUrn) + .setRunId("test-id") + .setAsserteeUrn(asserteeUrn) + .setStatus(AssertionRunStatus.COMPLETE) + .setResult( + new AssertionResult() + .setActualAggValue(10) + .setMissingCount(0L) + .setRowCount(1L) + .setType(AssertionResultType.SUCCESS) + .setUnexpectedCount(2L)); - Mockito.when(mockClient.getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.eq(AssertionRunEventResolver.buildFilter(null, AssertionRunStatus.COMPLETE.toString())), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableList.of( - new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)) - ) - ); + Mockito.when( + mockClient.getTimeseriesAspectValues( + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.eq( + AssertionRunEventResolver.buildFilter( + null, AssertionRunStatus.COMPLETE.toString())), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableList.of( + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)))); AssertionRunEventResolver resolver = new AssertionRunEventResolver(mockClient); @@ -66,9 +66,12 @@ public void testGetSuccess() throws Exception { Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))).thenReturn("COMPLETE"); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))).thenReturn(0L); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))).thenReturn(10L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))) + .thenReturn("COMPLETE"); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))) + .thenReturn(0L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))) + .thenReturn(10L); Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("limit"), Mockito.eq(null))).thenReturn(5); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -78,32 +81,37 @@ public void testGetSuccess() throws Exception { AssertionRunEventsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.any(Filter.class), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .getTimeseriesAspectValues( + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.any(Filter.class), + Mockito.any(Authentication.class)); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getTotal(), 1); assertEquals(result.getFailed(), 0); assertEquals(result.getSucceeded(), 1); - com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = resolver.get(mockEnv).get().getRunEvents().get(0); + com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = + resolver.get(mockEnv).get().getRunEvents().get(0); assertEquals(graphqlRunEvent.getAssertionUrn(), assertionUrn.toString()); assertEquals(graphqlRunEvent.getAsserteeUrn(), asserteeUrn.toString()); assertEquals(graphqlRunEvent.getRunId(), "test-id"); - assertEquals(graphqlRunEvent.getStatus(), com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); + assertEquals( + graphqlRunEvent.getStatus(), + com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); assertEquals((float) graphqlRunEvent.getTimestampMillis(), 12L); assertEquals((float) graphqlRunEvent.getResult().getActualAggValue(), 10); assertEquals((long) graphqlRunEvent.getResult().getMissingCount(), 0L); assertEquals((long) graphqlRunEvent.getResult().getRowCount(), 1L); assertEquals((long) graphqlRunEvent.getResult().getUnexpectedCount(), 2L); - assertEquals(graphqlRunEvent.getResult().getType(), com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); + assertEquals( + graphqlRunEvent.getResult().getType(), + com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 8afec0a889577..019d254ffdaac 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.assertion.AssertionInfo; @@ -18,10 +21,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteAssertionResolverTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; @@ -33,20 +32,17 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -58,20 +54,19 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), + Mockito.eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -80,11 +75,10 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn(null); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn(null); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -96,20 +90,19 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), + Mockito.eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -130,21 +123,20 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), - Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); + + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), + Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -153,20 +145,17 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -177,17 +166,16 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); @@ -202,4 +190,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java index c5b5725f23b7a..19152a7a11877 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -37,9 +39,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityAssertionsResolverTest { @Test public void testGetSuccess() throws Exception { @@ -49,73 +48,76 @@ public void testGetSuccess() throws Exception { Urn datasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); Urn assertionUrn = Urn.createFromString("urn:li:assertion:test-guid"); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(datasetUrn.toString()), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(assertionUrn) - .setType("Asserts")) - )) - ); - + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(datasetUrn.toString()), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship().setEntity(assertionUrn).setType("Asserts"))))); Map<String, com.linkedin.entity.EnvelopedAspect> assertionAspects = new HashMap<>(); assertionAspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionKey().setAssertionId("test-guid").data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new AssertionKey().setAssertionId("test-guid").data()))); assertionAspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(new DatasetAssertionInfo() - .setDataset(datasetUrn) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setAggregation(AssertionStdAggregation.MAX) - .setOperator(AssertionStdOperator.EQUAL_TO) - .setFields(new UrnArray(ImmutableList.of( - Urn.createFromString("urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)") - ))) - .setParameters(new AssertionStdParameters().setValue(new AssertionStdParameter() - .setValue("10") - .setType( - AssertionStdParameterType.NUMBER))) - ).data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(datasetUrn) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setAggregation(AssertionStdAggregation.MAX) + .setOperator(AssertionStdOperator.EQUAL_TO) + .setFields( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)")))) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setValue("10") + .setType(AssertionStdParameterType.NUMBER)))) + .data()))); assertionAspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) - .data() - )) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn)), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(ImmutableMap.of( - assertionUrn, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn) - .setAspects(new EnvelopedAspectMap(assertionAspects)))); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new DataPlatformInstance() + .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) + .data()))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn)), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + assertionUrn, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn) + .setAspects(new EnvelopedAspectMap(assertionAspects)))); EntityAssertionsResolver resolver = new EntityAssertionsResolver(mockClient, graphClient); @@ -134,38 +136,45 @@ public void testGetSuccess() throws Exception { EntityAssertionsResult result = resolver.get(mockEnv).get(); - Mockito.verify(graphClient, Mockito.times(1)).getRelatedEntities( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(graphClient, Mockito.times(1)) + .getRelatedEntities( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getStart(), 0); assertEquals(result.getCount(), 1); assertEquals(result.getTotal(), 1); - com.linkedin.datahub.graphql.generated.Assertion assertion = resolver.get(mockEnv).get().getAssertions().get(0); + com.linkedin.datahub.graphql.generated.Assertion assertion = + resolver.get(mockEnv).get().getAssertions().get(0); assertEquals(assertion.getUrn(), assertionUrn.toString()); assertEquals(assertion.getType(), EntityType.ASSERTION); assertEquals(assertion.getPlatform().getUrn(), "urn:li:dataPlatform:hive"); - assertEquals(assertion.getInfo().getType(), com.linkedin.datahub.graphql.generated.AssertionType.DATASET); + assertEquals( + assertion.getInfo().getType(), + com.linkedin.datahub.graphql.generated.AssertionType.DATASET); assertEquals(assertion.getInfo().getDatasetAssertion().getDatasetUrn(), datasetUrn.toString()); - assertEquals(assertion.getInfo().getDatasetAssertion().getScope(), com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); - assertEquals(assertion.getInfo().getDatasetAssertion().getAggregation(), com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); - assertEquals(assertion.getInfo().getDatasetAssertion().getOperator(), com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), + assertEquals( + assertion.getInfo().getDatasetAssertion().getScope(), + com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); + assertEquals( + assertion.getInfo().getDatasetAssertion().getAggregation(), + com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); + assertEquals( + assertion.getInfo().getDatasetAssertion().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 52d06f73dcfab..419eb71d5e143 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class ListAccessTokensResolverTest { @Test @@ -42,16 +41,22 @@ public void testGetSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); final Authentication testAuth = getAuthentication(mockEnv); - Mockito.when(mockClient.search( - Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(buildFilter(filters, Collections.emptyList())), - Mockito.any(SortCriterion.class), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.eq(testAuth), - Mockito.any(SearchFlags.class))) - .thenReturn(new SearchResult().setFrom(0).setNumEntities(0).setPageSize(0).setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(buildFilter(filters, Collections.emptyList())), + Mockito.any(SortCriterion.class), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()), + Mockito.eq(testAuth), + Mockito.any(SearchFlags.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setNumEntities(0) + .setPageSize(0) + .setEntities(new SearchEntityArray())); final ListAccessTokensResolver resolver = new ListAccessTokensResolver(mockClient); final ListAccessTokenResult listAccessTokenResult = resolver.get(mockEnv).get(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index 4a948537ab4fe..bffc2b31af2b9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -29,15 +31,12 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.List; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.List; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class BrowseV2ResolverTest { private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @@ -46,23 +45,30 @@ public class BrowseV2ResolverTest { @Test public static void testBrowseV2Success() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - null, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + null, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); @@ -92,23 +98,30 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { orFilters.add(andFilterInput); Filter filter = ResolverUtils.buildFilter(null, orFilters); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "test", - filter, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "test", + filter, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); @@ -132,23 +145,30 @@ public static void testBrowseV2SuccessWithView() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - viewInfo.getDefinition().getFilter(), - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + viewInfo.getDefinition().getFilter(), + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, viewService); @@ -166,16 +186,25 @@ public static void testBrowseV2SuccessWithView() throws Exception { compareResultToExpectedData(result, getExpectedResult()); } - private static void compareResultToExpectedData(BrowseResultsV2 result, BrowseResultsV2 expected) { + private static void compareResultToExpectedData( + BrowseResultsV2 result, BrowseResultsV2 expected) { Assert.assertEquals(result.getCount(), expected.getCount()); Assert.assertEquals(result.getStart(), expected.getStart()); Assert.assertEquals(result.getTotal(), expected.getTotal()); Assert.assertEquals(result.getGroups().size(), expected.getGroups().size()); - result.getGroups().forEach(group -> { - Assert.assertTrue(expected.getGroups().stream().filter(g -> g.getName().equals(group.getName())).count() > 0); - }); + result + .getGroups() + .forEach( + group -> { + Assert.assertTrue( + expected.getGroups().stream() + .filter(g -> g.getName().equals(group.getName())) + .count() + > 0); + }); Assert.assertEquals(result.getMetadata().getPath(), expected.getMetadata().getPath()); - Assert.assertEquals(result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); + Assert.assertEquals( + result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); } private static BrowseResultsV2 getExpectedResult() { @@ -185,19 +214,22 @@ private static BrowseResultsV2 getExpectedResult() { results.setCount(10); List<com.linkedin.datahub.graphql.generated.BrowseResultGroupV2> groups = new ArrayList<>(); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup1.setName("first group"); browseGroup1.setCount(5L); browseGroup1.setHasSubGroups(true); groups.add(browseGroup1); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup2.setName("second group"); browseGroup2.setCount(4L); browseGroup2.setHasSubGroups(false); groups.add(browseGroup2); results.setGroups(groups); - com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); + com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = + new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); resultMetadata.setPath(ImmutableList.of("test", "path")); resultMetadata.setTotalNumEntities(100L); results.setMetadata(resultMetadata); @@ -212,60 +244,52 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - BrowseResultV2 result - ) throws Exception { + BrowseResultV2 result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.browseV2( - Mockito.eq(entityName), - Mockito.eq(path), - Mockito.eq(filter), - Mockito.eq(query), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.browseV2( + Mockito.eq(entityName), + Mockito.eq(path), + Mockito.eq(filter), + Mockito.eq(query), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } + private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } - private BrowseV2ResolverTest() { } - + private BrowseV2ResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java index 659e6aea740ec..75abf1d48a15c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -16,12 +18,10 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityBrowsePathsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { @@ -30,9 +30,7 @@ public void testGetSuccess() throws Exception { List<String> path = ImmutableList.of("prod", "mysql"); Mockito.when(mockType.browsePaths(Mockito.eq(TEST_ENTITY_URN), Mockito.any())) - .thenReturn(ImmutableList.of( - new BrowsePath(path)) - ); + .thenReturn(ImmutableList.of(new BrowsePath(path))); // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -55,9 +53,9 @@ public void testGetSuccess() throws Exception { @Test public void testGetBrowsePathsException() throws Exception { BrowsableEntityType mockType = Mockito.mock(BrowsableEntityType.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockType).browsePaths( - Mockito.any(), - Mockito.any()); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockType) + .browsePaths(Mockito.any(), Mockito.any()); EntityBrowsePathsResolver resolver = new EntityBrowsePathsResolver(mockType); @@ -75,4 +73,4 @@ public void testGetBrowsePathsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java index 39a08ca26167d..1203f4e22bdc2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -24,17 +26,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class ContainerEntitiesResolverTest { - private static final ContainerEntitiesInput TEST_INPUT = new ContainerEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final ContainerEntitiesInput TEST_INPUT = + new ContainerEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -44,35 +39,39 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String containerUrn = "urn:li:container:test-container"; - final Criterion filterCriterion = new Criterion() - .setField("container.keyword") - .setCondition(Condition.EQUAL) - .setValue(containerUrn); + final Criterion filterCriterion = + new Criterion() + .setField("container.keyword") + .setCondition(Condition.EQUAL) + .setValue(containerUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); ContainerEntitiesResolver resolver = new ContainerEntitiesResolver(mockClient); @@ -92,6 +91,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java index 92f8dfc4e1d67..b4c58ca182b2f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.container.Container; @@ -14,18 +19,11 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentContainersResolverTest { @Test @@ -42,77 +40,88 @@ public void testGetSuccess() throws Exception { datasetEntity.setType(EntityType.DATASET); Mockito.when(mockEnv.getSource()).thenReturn(datasetEntity); - final Container parentContainer1 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); - final Container parentContainer2 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); + final Container parentContainer1 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); + final Container parentContainer2 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); Map<String, EnvelopedAspect> datasetAspects = new HashMap<>(); - datasetAspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); + datasetAspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); Map<String, EnvelopedAspect> parentContainer1Aspects = new HashMap<>(); - parentContainer1Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_schema").data() - ))); - parentContainer1Aspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - parentContainer2.data() - ))); + parentContainer1Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_schema").data()))); + parentContainer1Aspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer2.data()))); Map<String, EnvelopedAspect> parentContainer2Aspects = new HashMap<>(); - parentContainer2Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_database").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(datasetUrn.getEntityType()), - Mockito.eq(datasetUrn), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer1.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer2.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + parentContainer2Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_database").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(datasetUrn.getEntityType()), + Mockito.eq(datasetUrn), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer1.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer2.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); ParentContainersResolver resolver = new ParentContainersResolver(mockClient); ParentContainersResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); - assertEquals(result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); - assertEquals(result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); + assertEquals( + result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); + assertEquals( + result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java index 6a9617ea41b44..2abfa39b35149 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; @@ -28,9 +30,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - public class DashboardStatsSummaryTest { private static final Dashboard TEST_SOURCE = new Dashboard(); @@ -65,31 +64,35 @@ public void testGetSuccess() throws Exception { Assert.assertEquals((int) result.getUniqueUserCountLast30Days(), 2); // Validate the cache. -- First return a new result. - DashboardUsageStatistics newUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(40); - EnvelopedAspect newResult = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); + DashboardUsageStatistics newUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(40); + EnvelopedAspect newResult = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn(ImmutableList.of(newResult)); + Mockito.when( + mockClient.getAspectValues( + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(newResult)); // Then verify that the new result is _not_ returned (cache hit) DashboardStatsSummary cachedResult = resolver.get(mockEnv).get(); Assert.assertEquals((int) cachedResult.getViewCount(), 20); Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 2); } @@ -97,28 +100,27 @@ public void testGetSuccess() throws Exception { public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DASHBOARD_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DASHBOARD_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -140,48 +142,46 @@ private TimeseriesAspectService initTestAspectService() { TimeseriesAspectService mockClient = Mockito.mock(TimeseriesAspectService.class); // Mock fetching the latest absolute (snapshot) statistics - DashboardUsageStatistics latestUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(20); - EnvelopedAspect envelopedLatestStats = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); + DashboardUsageStatistics latestUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(20); + EnvelopedAspect envelopedLatestStats = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn( - ImmutableList.of(envelopedLatestStats) - ); - - Mockito.when(mockClient.getAggregatedStats( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.any(), - Mockito.any(Filter.class), - Mockito.any() - )).thenReturn( - new GenericTable().setRows(new StringArrayArray( - new StringArray(ImmutableList.of( - TEST_USER_URN_1, "10", "20", "30", "1", "1", "1" - )), - new StringArray(ImmutableList.of( - TEST_USER_URN_2, "20", "30", "40", "1", "1", "1" - )) - )) - .setColumnNames(new StringArray()) - .setColumnTypes(new StringArray()) - ); + Mockito.when( + mockClient.getAspectValues( + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(envelopedLatestStats)); + + Mockito.when( + mockClient.getAggregatedStats( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.any(), + Mockito.any(Filter.class), + Mockito.any())) + .thenReturn( + new GenericTable() + .setRows( + new StringArrayArray( + new StringArray( + ImmutableList.of(TEST_USER_URN_1, "10", "20", "30", "1", "1", "1")), + new StringArray( + ImmutableList.of(TEST_USER_URN_2, "20", "30", "40", "1", "1", "1")))) + .setColumnNames(new StringArray()) + .setColumnTypes(new StringArray())); return mockClient; } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java index ea9ab2a1b768b..3ff0120448e54 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataset; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.EntityRelationship; @@ -23,60 +25,54 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class DatasetHealthResolverTest { private static final String TEST_DATASET_URN = "urn:li:dataset:(test,test,test)"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:test-guid-2"; - @Test public void testGetSuccessHealthy() throws Exception { GraphClient graphClient = Mockito.mock(GraphClient.class); TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts")) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )) - ) - )) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")))))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); @@ -103,20 +99,20 @@ public void testGetSuccessNullHealth() throws Exception { TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); // 0 associated assertions, meaning we don't report any health. - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(0) - .setRelationships(new EntityRelationshipArray(Collections.emptyList())) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(0) + .setRelationships(new EntityRelationshipArray(Collections.emptyList()))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); @@ -134,13 +130,9 @@ public void testGetSuccessNullHealth() throws Exception { List<Health> result = resolver.get(mockEnv).get(); assertEquals(result.size(), 0); - Mockito.verify(mockAspectService, Mockito.times(0)).getAggregatedStats( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockAspectService, Mockito.times(0)) + .getAggregatedStats( + Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); } @Test @@ -148,52 +140,47 @@ public void testGetSuccessUnhealthy() throws Exception { GraphClient graphClient = Mockito.mock(GraphClient.class); TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(2) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts"), - new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) - .setType("Asserts") - ) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )), - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN_2, "FAILURE", "0" - )) - ) - )) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(2) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"), + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray(ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")), + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN_2, "FAILURE", "0")))))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java index 013e23b779c51..52516295f97ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java @@ -19,11 +19,11 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class DatasetStatsSummaryResolverTest { private static final Dataset TEST_SOURCE = new Dataset(); - private static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; private static final String TEST_USER_URN_1 = "urn:li:corpuser:test1"; private static final String TEST_USER_URN_2 = "urn:li:corpuser:test2"; @@ -35,28 +35,27 @@ public class DatasetStatsSummaryResolverTest { public void testGetSuccess() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(testResult); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(testResult); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -84,17 +83,19 @@ public void testGetSuccess() throws Exception { // Validate the cache. -- First return a new result. UsageQueryResult newResult = new UsageQueryResult(); newResult.setAggregations(new UsageQueryResultAggregations()); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(newResult); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(newResult); // Then verify that the new result is _not_ returned (cache hit) DatasetStatsSummary cachedResult = resolver.get(mockEnv).get(); Assert.assertEquals((int) cachedResult.getQueryCountLast30Days(), 10); Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 5); } @@ -102,28 +103,27 @@ public void testGetSuccess() throws Exception { public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index bae6f27a854bc..49ccc751d35f6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.delete; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Status; @@ -14,39 +18,36 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateSoftDeletedResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingStatus() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -55,17 +56,21 @@ public void testGetSuccessNoExistingStatus() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(true); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -76,16 +81,18 @@ public void testGetSuccessExistingStatus() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -96,17 +103,21 @@ public void testGetSuccessExistingStatus() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(false); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -115,15 +126,17 @@ public void testGetSuccessExistingStatus() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -134,7 +147,9 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -151,7 +166,9 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); @@ -165,20 +182,25 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index ce5a02bb573e1..8c3620fa978a9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; @@ -15,39 +19,36 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingDeprecation() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -56,46 +57,57 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 0L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 0L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(0L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(0L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - final Deprecation originalDeprecation = new Deprecation() - .setDeprecated(false) - .setNote("") - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setNote("") + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -106,23 +118,31 @@ public void testGetSuccessExistingDeprecation() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(1L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(1L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -131,15 +151,17 @@ public void testGetSuccessExistingDeprecation() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -150,9 +172,14 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -168,9 +195,14 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -183,21 +215,29 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index 5d30ae08d6dea..e4be330f5ba2a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -26,20 +30,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = new UpdateDeprecationInput( - TEST_ENTITY_URN, - true, - 0L, - "Test note" - ); + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = + new UpdateDeprecationInput(TEST_ENTITY_URN, true, 0L, "Test note"); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test @@ -47,16 +43,19 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -71,41 +70,53 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation().setDeprecated(true).setDecommissionTime(0L).setNote("Test note").setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - Deprecation originalDeprecation = new Deprecation().setDeprecated(false).setDecommissionTime(1L).setActor(TEST_ACTOR_URN).setNote(""); + Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setDecommissionTime(1L) + .setActor(TEST_ACTOR_URN) + .setNote(""); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDeprecation.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDeprecation.data()))))))); EntityService mockService = Mockito.mock(EntityService.class); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -120,24 +131,21 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setDecommissionTime(0L) - .setNote("Test note") - .setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -145,16 +153,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DEPRECATION_ASPECT_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DEPRECATION_ASPECT_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = Mockito.mock(EntityService.class); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -169,9 +180,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -188,18 +198,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); // Execute resolver @@ -210,4 +219,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 8cd3c71a21555..d5ba88066e846 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.UrnArray; @@ -18,21 +22,17 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchSetDomainResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_DOMAIN_1_URN = "urn:li:domain:test-id-1"; private static final String TEST_DOMAIN_2_URN = "urn:li:domain:test-id-2"; @@ -40,19 +40,20 @@ public class BatchSetDomainResolverTest { public void testGetSuccessNoExistingDomains() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -64,46 +65,53 @@ public void testGetSuccessNoExistingDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -117,51 +125,58 @@ public void testGetSuccessExistingDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); proposal1.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_1)); proposal1.setEntityType(Constants.DATASET_ENTITY_NAME); proposal1.setAspectName(Constants.DOMAINS_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); } @Test public void testGetSuccessUnsetDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -175,19 +190,24 @@ public void testGetSuccessUnsetDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -196,10 +216,11 @@ public void testGetSuccessUnsetDomains() throws Exception { public void testGetFailureDomainDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -210,9 +231,12 @@ public void testGetFailureDomainDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -224,15 +248,17 @@ public void testGetFailureDomainDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -244,9 +270,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -262,9 +291,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -277,21 +309,27 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java index 1ea84b99cfec3..8f86e33158ad5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java @@ -6,13 +6,12 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.mockito.ArgumentMatcher; - public class CreateDomainProposalMatcher implements ArgumentMatcher<MetadataChangeProposal> { private MetadataChangeProposal left; public CreateDomainProposalMatcher(MetadataChangeProposal left) { - this.left = left; + this.left = left; } @Override @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean domainPropertiesMatch(GenericAspect left, GenericAspect right) { - DomainProperties leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DomainProperties.class - ); - - DomainProperties rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DomainProperties.class - ); + DomainProperties leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DomainProperties.class); + + DomainProperties rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DomainProperties.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 560a3865ce9e1..6184760abfabd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -15,49 +19,35 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.DomainKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; -import static org.testng.Assert.*; - - public class CreateDomainResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final CreateDomainInput TEST_INPUT = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - TEST_PARENT_DOMAIN_URN.toString() - ); + private static final CreateDomainInput TEST_INPUT = + new CreateDomainInput( + "test-id", "test-name", "test-description", TEST_PARENT_DOMAIN_URN.toString()); - private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = + new CreateDomainInput("test-id", "test-name", "test-description", null); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public void testGetSuccess() throws Exception { // Create resolver @@ -65,15 +55,13 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(true); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -81,14 +69,17 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -107,11 +98,11 @@ public void testGetSuccess() throws Exception { proposal.setChangeType(ChangeType.UPSERT); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat(new CreateDomainProposalMatcher(proposal)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -120,24 +111,23 @@ public void testGetSuccessNoParentDomain() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_NO_PARENT_DOMAIN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -154,11 +144,11 @@ public void testGetSuccessNoParentDomain() throws Exception { proposal.setAspect(GenericRecordUtils.serializeAspect(props)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat(new CreateDomainProposalMatcher(proposal)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -167,15 +157,13 @@ public void testGetInvalidParent() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -191,31 +179,32 @@ public void testGetNameConflict() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)) - )); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)))); DomainProperties domainProperties = new DomainProperties(); domainProperties.setDescription(TEST_INPUT.getDescription()); @@ -225,18 +214,21 @@ public void testGetNameConflict() throws Exception { EntityResponse entityResponse = new EntityResponse(); EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); + envelopedAspectMap.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); entityResponse.setAspects(envelopedAspectMap); Map<Urn, EntityResponse> entityResponseMap = new HashMap<>(); entityResponseMap.put(TEST_DOMAIN_URN, entityResponse); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class) - )).thenReturn(entityResponseMap); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class))) + .thenReturn(entityResponseMap); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } @@ -255,9 +247,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -265,9 +256,9 @@ public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver @@ -278,4 +269,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java index 9bcdbe6d2a0e0..5632654a26ad9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -10,10 +13,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteDomainResolverTest { private static final String TEST_URN = "urn:li:domain:test-id"; @@ -30,15 +29,21 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has 0 child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1), + Mockito.any())) .thenReturn(new SearchResult().setNumEntities(0)); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -53,14 +58,20 @@ public void testDeleteWithChildDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1), + Mockito.any())) .thenReturn(new SearchResult().setNumEntities(1)); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -76,8 +87,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index 93fe3d0017160..9596abf55d04f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -26,18 +29,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.testng.Assert.*; - - public class DomainEntitiesResolverTest { - private static final DomainEntitiesInput TEST_INPUT = new DomainEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final DomainEntitiesInput TEST_INPUT = + new DomainEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -47,35 +42,42 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String domainUrn = "urn:li:domain:test-domain"; - final Criterion filterCriterion = new Criterion() - .setField("domains.keyword") - .setCondition(Condition.EQUAL) - .setValue(domainUrn); + final Criterion filterCriterion = + new Criterion() + .setField("domains.keyword") + .setCondition(Condition.EQUAL) + .setValue(domainUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList())), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.eq( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList())), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); DomainEntitiesResolver resolver = new DomainEntitiesResolver(mockClient); @@ -95,6 +97,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java index bd8a8f98de497..ffc3e823d8351 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -20,46 +25,43 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; - - public class ListDomainsResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final ListDomainsInput TEST_INPUT = new ListDomainsInput( - 0, 20, null, TEST_PARENT_DOMAIN_URN.toString() - ); + private static final ListDomainsInput TEST_INPUT = + new ListDomainsInput(0, 20, null, TEST_PARENT_DOMAIN_URN.toString()); - private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = new ListDomainsInput( - 0, 20, null, null - ); + private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = + new ListDomainsInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), + Mockito.eq( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -74,7 +76,8 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -82,22 +85,27 @@ public void testGetSuccessNoParentDomain() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(null)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(null)), + Mockito.eq( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -112,7 +120,8 @@ public void testGetSuccessNoParentDomain() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -124,33 +133,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); // Execute resolver @@ -161,4 +172,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index 4059c180b0eb0..a0eff5d0574db 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.CorpuserUrn; @@ -17,52 +22,51 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class MoveDomainResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; private static final String PARENT_DOMAIN_URN = "urn:li:domain:00005397daf94708a8822b8106cfd451"; private static final String DOMAIN_URN = "urn:li:domain:11115397daf94708a8822b8106cfd451"; private static final MoveDomainInput INPUT = new MoveDomainInput(PARENT_DOMAIN_URN, DOMAIN_URN); - private static final MoveDomainInput INVALID_INPUT = new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); + private static final MoveDomainInput INVALID_INPUT = + new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(name, Urn.createFromString(PARENT_DOMAIN_URN))), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + name, Urn.createFromString(PARENT_DOMAIN_URN))), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(name); properties.setParentDomain(Urn.createFromString(PARENT_DOMAIN_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); } @Test @@ -77,11 +81,11 @@ public void testGetSuccess() throws Exception { setupTests(mockEnv, mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -97,10 +101,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(null); MoveDomainResolver resolver = new MoveDomainResolver(mockService, mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java index 7bd7c3afac001..4c8ceff9c4f80 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,15 +16,11 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentDomainsResolverTest { @Test @@ -38,58 +37,68 @@ public void testGetSuccessForDomain() throws Exception { domainEntity.setType(EntityType.DOMAIN); Mockito.when(mockEnv.getSource()).thenReturn(domainEntity); - final DomainProperties parentDomain1 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:11115397daf94708a8822b8106cfd451") - ).setName("test def"); - final DomainProperties parentDomain2 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:22225397daf94708a8822b8106cfd451") - ).setName("test def 2"); + final DomainProperties parentDomain1 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:11115397daf94708a8822b8106cfd451")) + .setName("test def"); + final DomainProperties parentDomain2 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:22225397daf94708a8822b8106cfd451")) + .setName("test def 2"); Map<String, EnvelopedAspect> domainAspects = new HashMap<>(); - domainAspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); + domainAspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); Map<String, EnvelopedAspect> parentDomain1Aspects = new HashMap<>(); - parentDomain1Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 1").setParentDomain(parentDomain2.getParentDomain()).data() - ))); + parentDomain1Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new DomainProperties() + .setName("domain parent 1") + .setParentDomain(parentDomain2.getParentDomain()) + .data()))); Map<String, EnvelopedAspect> parentDomain2Aspects = new HashMap<>(); - parentDomain2Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 2").data() - ))); + parentDomain2Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new DomainProperties().setName("domain parent 2").data()))); - Mockito.when(mockClient.getV2( - Mockito.eq(domainUrn.getEntityType()), - Mockito.eq(domainUrn), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(domainUrn.getEntityType()), + Mockito.eq(domainUrn), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain1.getParentDomain().getEntityType()), - Mockito.eq(parentDomain1.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(parentDomain1.getParentDomain().getEntityType()), + Mockito.eq(parentDomain1.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain2.getParentDomain().getEntityType()), - Mockito.eq(parentDomain2.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(parentDomain2.getParentDomain().getEntityType()), + Mockito.eq(parentDomain2.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); ParentDomainsResolver resolver = new ParentDomainsResolver(mockClient); ParentDomainsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(3)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(3)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getDomains().get(0).getUrn(), parentDomain1.getParentDomain().toString()); assertEquals(result.getDomains().get(1).getUrn(), parentDomain2.getParentDomain().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index 92fb26288aa1d..ad5ad2315ce43 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,14 +30,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; private static final String TEST_NEW_DOMAIN_URN = "urn:li:domain:test-id-2"; @@ -43,16 +43,19 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -68,47 +71,52 @@ public void testGetSuccessNoExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -124,23 +132,21 @@ public void testGetSuccessExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); } @Test @@ -149,16 +155,19 @@ public void testGetFailureDomainDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -174,9 +183,8 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -185,16 +193,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -210,9 +221,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -230,18 +240,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetDomainResolver resolver = new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + SetDomainResolver resolver = + new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -252,4 +262,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index decda39943dde..7e6e258168898 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,14 +30,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UnsetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; @Test @@ -42,16 +42,19 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -66,43 +69,46 @@ public void testGetSuccessNoExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -117,18 +123,15 @@ public void testGetSuccessExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -137,16 +140,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -160,9 +166,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -179,18 +184,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + UnsetDomainResolver resolver = + new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -200,4 +205,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index f1d44fcb47255..45a17744a2697 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -25,29 +29,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateEmbedResolverTest { private static final String TEST_ENTITY_URN = "urn:li:dashboard:(looker,1)"; private static final String TEST_RENDER_URL = "https://www.google.com"; - private static final UpdateEmbedInput TEST_EMBED_INPUT = new UpdateEmbedInput( - TEST_ENTITY_URN, - TEST_RENDER_URL - ); + private static final UpdateEmbedInput TEST_EMBED_INPUT = + new UpdateEmbedInput(TEST_ENTITY_URN, TEST_RENDER_URL); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test public void testGetSuccessNoExistingEmbed() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -62,14 +61,15 @@ public void testGetSuccessNoExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); - verifySingleIngestProposal(mockService, 1, proposal);; + verifySingleIngestProposal(mockService, 1, proposal); + ; - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -79,10 +79,12 @@ public void testGetSuccessExistingEmbed() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(originalEmbed); + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalEmbed); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -97,14 +99,14 @@ public void testGetSuccessExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); verifySingleIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -112,16 +114,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DASHBOARD_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DASHBOARD_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -136,11 +141,10 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - );; + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + ; } @Test @@ -156,20 +160,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); // Execute resolver @@ -180,4 +182,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java index cde2739b2bcc6..fa8b1d6a747ca 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java @@ -1,14 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class EntityExistsResolverTest { private static final String ENTITY_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java index 913ea4602faf0..d9d5e643057ce 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Chart; @@ -14,14 +17,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class EntityPrivilegesResolverTest { final String glossaryTermUrn = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; @@ -29,7 +28,8 @@ public class EntityPrivilegesResolverTest { final String datasetUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)"; final String chartUrn = "urn:li:chart:(looker,baz1)"; final String dashboardUrn = "urn:li:dashboard:(looker,dashboards.1)"; - final String dataJobUrn = "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; + final String dataJobUrn = + "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; private DataFetchingEnvironment setUpTestWithPermissions(Entity entity) { QueryContext mockContext = getMockAllowContext(); @@ -115,11 +115,13 @@ public void testGetFailure() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); DataFetchingEnvironment mockEnv = setUpTestWithoutPermissions(glossaryNode); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class)); EntityPrivilegesResolver resolver = new EntityPrivilegesResolver(mockClient); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 26c13186c4a81..287d270ab569c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,15 +14,9 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.ExecutionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.*; - - public class AddRelatedTermsResolverTest { private static final String TEST_ENTITY_URN = "urn:li:glossaryTerm:test-id-0"; @@ -28,10 +26,11 @@ public class AddRelatedTermsResolverTest { private EntityService setUpService() { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); return mockService; } @@ -48,24 +47,22 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test @@ -80,24 +77,22 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test @@ -110,9 +105,9 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_ENTITY_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_ENTITY_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -130,9 +125,9 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - DATASET_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(DATASET_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -151,9 +146,9 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -172,9 +167,9 @@ public void testGetFailAddToNonExistentUrn() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -193,9 +188,9 @@ public void testGetFailAddToNonTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(DATASET_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + DATASET_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -215,15 +210,15 @@ public void testFailNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java index 3b47514d87181..2a36d77716ab7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,39 +11,27 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; -import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static com.linkedin.metadata.Constants.*; - - public class CreateGlossaryNodeResolverTest { - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -47,8 +39,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -63,8 +55,8 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_NODE_ENTITY_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, props); } @Test @@ -72,16 +64,14 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -89,16 +79,14 @@ public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -106,15 +94,13 @@ public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index 2dbe637d16057..6653b19d6ef2b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -1,61 +1,53 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.GlossaryTermKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static com.linkedin.metadata.Constants.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class CreateGlossaryTermResolverTest { private static final String EXISTING_TERM_URN = "urn:li:glossaryTerm:testing12345"; - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -63,8 +55,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -80,8 +72,8 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_TERM_ENTITY_NAME, GLOSSARY_TERM_INFO_ASPECT_NAME, props); } @Test @@ -89,16 +81,14 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -106,16 +96,14 @@ public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -123,16 +111,14 @@ public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -140,73 +126,71 @@ public void testGetFailureExistingTermSameName() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))) - )); + mockClient.filter( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000), + Mockito.any())) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))))); Map<Urn, EntityResponse> result = new HashMap<>(); EnvelopedAspectMap map = new EnvelopedAspectMap(); GlossaryTermInfo termInfo = new GlossaryTermInfo().setName("Duplicated Name"); - map.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); + map.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); result.put(UrnUtils.getUrn(EXISTING_TERM_URN), new EntityResponse().setAspects(map)); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(result); + mockClient.batchGetV2( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn(result); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - CreateGlossaryEntityInput input = new CreateGlossaryEntityInput( - "test-id", - "Duplicated Name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); + CreateGlossaryEntityInput input = + new CreateGlossaryEntityInput( + "test-id", + "Duplicated Name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); setupTest(mockEnv, input, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private EntityClient initMockClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + mockClient.filter( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000), + Mockito.any())) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(new HashMap<>()); + mockClient.batchGetV2( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn(new HashMap<>()); return mockClient; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index 94f0d0b7a1143..7229d2acf763d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,19 +12,14 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class DeleteGlossaryEntityResolverTest { - private static final String TEST_TERM_URN = "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; + private static final String TEST_TERM_URN = + "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; @Test public void testGetSuccess() throws Exception { @@ -33,26 +33,27 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_TERM_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_TERM_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_TERM_URN)), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java index 677516e9404e8..b879baf1e65dc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,16 +24,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryNodesResolverTest { - final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput( - 0, 100 - ); + final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryNodeUrn1 = "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451"; final String glossaryNodeUrn2 = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -42,21 +39,23 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryNodesResolver resolver = new GetRootGlossaryNodesResolver(mockClient); GetRootGlossaryNodesResult result = resolver.get(mockEnv).get(); @@ -64,24 +63,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); - assertEquals(result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); + assertEquals( + result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); + assertEquals( + result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java index 5aba32108b7db..201bea752d53f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,14 +24,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryTermsResolverTest { final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryTermUrn1 = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; final String glossaryTermUrn2 = "urn:li:glossaryTerm:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -40,21 +39,23 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryTermsResolver resolver = new GetRootGlossaryTermsResolver(mockClient); GetRootGlossaryTermsResult result = resolver.get(mockEnv).get(); @@ -62,23 +63,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); - assertEquals(result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); + assertEquals( + result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); + assertEquals( + result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java index 8bfc32e1999ae..969fda541d6a6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java @@ -1,33 +1,32 @@ package com.linkedin.datahub.graphql.resolvers.glossary; -import com.google.common.collect.ImmutableSet; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.Aspect; -import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.Constants; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.Optional; -import java.util.Map; -import java.util.HashMap; - -import static org.testng.Assert.*; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; - public class GlossaryUtilsTest { private final String userUrn = "urn:li:corpuser:authorized"; @@ -44,67 +43,87 @@ private void setUpTests() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(userUrn); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node2") - ); - GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node3") - ); - + GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node2")); + GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node3")); + GlossaryNodeInfo parentNode3 = new GlossaryNodeInfo(); - + Map<String, EnvelopedAspect> parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode1.getParentNode()).data() - ))); - + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode1.getParentNode()) + .data()))); + Map<String, EnvelopedAspect> parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 2") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map<String, EnvelopedAspect> parentNode3Aspects = new HashMap<>(); - parentNode3Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 3").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn1), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn2), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn3), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); - - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + parentNode3Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 3").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn1), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn2), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn3), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); + + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); } - private void mockAuthRequest(String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { - final AuthorizationRequest authorizationRequest = new AuthorizationRequest( - userUrn, - privilege, - resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty() - ); + private void mockAuthRequest( + String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { + final AuthorizationRequest authorizationRequest = + new AuthorizationRequest( + userUrn, + privilege, + resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(allowOrDeny); Mockito.when(mockAuthorizer.authorize(Mockito.eq(authorizationRequest))).thenReturn(result); @@ -150,7 +169,8 @@ public void testCanManageChildrenEntitiesAuthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn, mockClient)); @@ -162,7 +182,8 @@ public void testCanManageChildrenEntitiesUnauthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); @@ -175,13 +196,16 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorized() throws Exceptio // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -193,13 +217,16 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorized() throws Except // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -211,10 +238,12 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorizedLevel2() throws Ex // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -226,10 +255,12 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorizedLevel2() throws // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn2, mockClient)); @@ -241,7 +272,8 @@ public void testCanManageChildrenRecursivelyEntitiesNoLevel2() throws Exception // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn3, mockClient)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java index 06dff7611fac8..446f58bec73aa 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -16,17 +21,11 @@ import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentNodesResolverTest { @Test @@ -43,76 +42,94 @@ public void testGetSuccessForTerm() throws Exception { termEntity.setType(EntityType.GLOSSARY_TERM); Mockito.when(mockEnv.getSource()).thenReturn(termEntity); - final GlossaryTermInfo parentNode1 = new GlossaryTermInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryTermInfo parentNode1 = + new GlossaryTermInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map<String, EnvelopedAspect> glossaryTermAspects = new HashMap<>(); - glossaryTermAspects.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryTermAspects.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map<String, EnvelopedAspect> parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map<String, EnvelopedAspect> parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(termUrn.getEntityType()), - Mockito.eq(termUrn), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(termUrn.getEntityType()), + Mockito.eq(termUrn), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); @@ -132,78 +149,96 @@ public void testGetSuccessForNode() throws Exception { nodeEntity.setType(EntityType.GLOSSARY_NODE); Mockito.when(mockEnv.getSource()).thenReturn(nodeEntity); - final GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map<String, EnvelopedAspect> glossaryNodeAspects = new HashMap<>(); - glossaryNodeAspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryNodeAspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map<String, EnvelopedAspect> parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map<String, EnvelopedAspect> parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(nodeUrn.getEntityType()), - Mockito.eq(nodeUrn), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(nodeUrn.getEntityType()), + Mockito.eq(nodeUrn), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 3906d1188cb17..47de668b2c9dc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; @@ -12,15 +16,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Arrays; import java.util.concurrent.ExecutionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RemoveRelatedTermsResolverTest { @@ -35,10 +34,11 @@ public void testGetSuccessIsA() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -47,17 +47,16 @@ public void testGetSuccessIsA() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -67,10 +66,11 @@ public void testGetSuccessHasA() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -79,26 +79,26 @@ public void testGetSuccessHasA() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testFailAspectDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -107,9 +107,9 @@ public void testFailAspectDoesNotExist() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -124,10 +124,11 @@ public void testFailNoPermissions() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -136,16 +137,15 @@ public void testFailNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); - Mockito.verify(mockService, Mockito.times(0)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(0)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index eee9cfbae8fcb..3972715fcefb1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; @@ -19,16 +23,10 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateNameResolverTest { private static final String NEW_NAME = "New Name"; @@ -40,23 +38,23 @@ public class UpdateNameResolverTest { private static final UpdateNameInput INPUT_FOR_DOMAIN = new UpdateNameInput(NEW_NAME, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(TERM_URN), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(NEW_NAME); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test @@ -88,16 +86,16 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(NODE_URN), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); @@ -118,25 +116,27 @@ public void testGetSuccessForDomain() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index a78c28890fecf..74a59b10a40b0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; @@ -15,45 +20,43 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.net.URISyntaxException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateParentNodeResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; - private static final String PARENT_NODE_URN = "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; + private static final String PARENT_NODE_URN = + "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; private static final String TERM_URN = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; private static final String NODE_URN = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - private static final UpdateParentNodeInput INPUT = new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); - private static final UpdateParentNodeInput INPUT_WITH_NODE = new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); - private static final UpdateParentNodeInput INVALID_INPUT = new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT = + new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT_WITH_NODE = + new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); + private static final UpdateParentNodeInput INVALID_INPUT = + new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(TERM_URN), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test @@ -61,7 +64,8 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -77,7 +81,8 @@ public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_WITH_NODE); @@ -87,17 +92,17 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(NODE_URN), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateParentNodeResolver resolver = new UpdateParentNodeResolver(mockService, mockClient); @@ -110,7 +115,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -126,7 +132,8 @@ public void testGetFailureNodeDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(false); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -142,7 +149,8 @@ public void testGetFailureParentIsNotNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java index a20c84d11ba9f..19d9dd20d3f80 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AddGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java index 876de633bd656..a29680a6de52d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateGroupResolverTest { private static final String GROUP_ID = "id"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java index 73b0be96fce17..601d5e08a4233 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class RemoveGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java index dae0758f6a2f6..e5cb43c4dab61 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -21,15 +23,14 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.Mockito; -import static org.testng.Assert.*; - - public class IngestTestUtils { - public static final Urn TEST_INGESTION_SOURCE_URN = Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); - public static final Urn TEST_SECRET_URN = Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); - public static final Urn TEST_EXECUTION_REQUEST_URN = Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); - + public static final Urn TEST_INGESTION_SOURCE_URN = + Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); + public static final Urn TEST_SECRET_URN = + Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); + public static final Urn TEST_EXECUTION_REQUEST_URN = + Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); public static QueryContext getMockAllowContext() { QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -63,8 +64,13 @@ public static DataHubIngestionSourceInfo getTestIngestionSourceInfo() { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setName("My Test Source"); info.setType("mysql"); - info.setSchedule(new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); - info.setConfig(new DataHubIngestionSourceConfig().setVersion("0.8.18").setRecipe("{}").setExecutorId("executor id")); + info.setSchedule( + new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); + info.setConfig( + new DataHubIngestionSourceConfig() + .setVersion("0.8.18") + .setRecipe("{}") + .setExecutorId("executor id")); return info; } @@ -78,15 +84,18 @@ public static DataHubSecretValue getTestSecretValue() { public static ExecutionRequestInput getTestExecutionRequestInput() { ExecutionRequestInput input = new ExecutionRequestInput(); - input.setArgs(new StringMap( - ImmutableMap.of( - "recipe", "my-custom-recipe", - "version", "0.8.18") - )); + input.setArgs( + new StringMap( + ImmutableMap.of( + "recipe", "my-custom-recipe", + "version", "0.8.18"))); input.setTask("RUN_INGEST"); input.setExecutorId("default"); input.setRequestedAt(0L); - input.setSource(new ExecutionRequestSource().setIngestionSource(TEST_INGESTION_SOURCE_URN).setType("SCHEDULED_INGESTION")); + input.setSource( + new ExecutionRequestSource() + .setIngestionSource(TEST_INGESTION_SOURCE_URN) + .setType("SCHEDULED_INGESTION")); return input; } @@ -99,7 +108,8 @@ public static ExecutionRequestResult getTestExecutionRequestResult() { return result; } - public static void verifyTestIngestionSourceGraphQL(IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { + public static void verifyTestIngestionSourceGraphQL( + IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { assertEquals(ingestionSource.getUrn(), TEST_INGESTION_SOURCE_URN.toString()); assertEquals(ingestionSource.getName(), info.getName()); assertEquals(ingestionSource.getType(), info.getType()); @@ -134,5 +144,5 @@ public static void verifyTestExecutionRequest( assertEquals(executionRequest.getResult().getStartTimeMs(), result.getStartTimeMs()); } - private IngestTestUtils() { } + private IngestTestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java index 12045b9361469..3de88333b959d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -7,7 +9,6 @@ import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; public class IngestionAuthUtilsTest { @@ -16,11 +17,9 @@ public void testCanManageIngestionAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:authorized", "MANAGE_INGESTION", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -37,11 +36,9 @@ public void testCanManageIngestionUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", "MANAGE_INGESTION", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); @@ -58,11 +55,8 @@ public void testCanManageSecretsAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest("urn:li:corpuser:authorized", "MANAGE_SECRETS", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -79,11 +73,9 @@ public void testCanManageSecretsUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", "MANAGE_SECRETS", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java index e7226c6e4db08..3d0c24b9aa022 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -18,35 +21,36 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CancelIngestionExecutionRequestResolverTest { - private static final CancelIngestionExecutionRequestInput TEST_INPUT = new CancelIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString(), - TEST_EXECUTION_REQUEST_URN.toString() - ); + private static final CancelIngestionExecutionRequestInput TEST_INPUT = + new CancelIngestionExecutionRequestInput( + TEST_INGESTION_SOURCE_URN.toString(), TEST_EXECUTION_REQUEST_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -57,18 +61,19 @@ Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( resolver.get(mockEnv).get(); // Verify ingest proposal has been called to create a Signal request. - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -77,19 +82,19 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java index 7973e49c6efdf..18ce1d8c27955 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -12,6 +14,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -19,35 +22,37 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateIngestionExecutionRequestResolverTest { - private static final CreateIngestionExecutionRequestInput TEST_INPUT = new CreateIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString() - ); + private static final CreateIngestionExecutionRequestInput TEST_INPUT = + new CreateIngestionExecutionRequestInput(TEST_INGESTION_SOURCE_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_INGESTION_SOURCE_URN, - new EntityResponse().setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -58,11 +63,11 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -71,7 +76,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -80,21 +86,21 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -105,4 +111,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java index 75df240441965..eaf3186524721 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java @@ -1,25 +1,22 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestConnectionRequestInput; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestConnectionRequestResolverTest { - private static final CreateTestConnectionRequestInput TEST_INPUT = new CreateTestConnectionRequestInput( - "{}", - "0.8.44" - ); + private static final CreateTestConnectionRequestInput TEST_INPUT = + new CreateTestConnectionRequestInput("{}", "0.8.44"); @Test public void testGetSuccess() throws Exception { @@ -27,7 +24,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -37,11 +35,11 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -50,7 +48,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -59,9 +58,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java index 532b9b89f3a99..268f8b8927b67 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -20,9 +23,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetIngestionExecutionRequestResolverTest { @Test @@ -33,32 +33,48 @@ public void testGetSuccess() throws Exception { ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -69,7 +85,8 @@ public void testGetSuccess() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -78,7 +95,9 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test @@ -87,13 +106,16 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) - .batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java index 25f3ccbd47cd6..fdb150e692441 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,10 +31,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class IngestionSourceExecutionRequestsResolverTest { @Test @@ -40,49 +39,65 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Mock filter response - Mockito.when(mockClient.filter( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.any(Filter.class), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any(Authentication.class))) - .thenReturn(new SearchResult() - .setFrom(0) - .setPageSize(10) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableList.of( - new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN)))) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.any(Filter.class), + Mockito.any(SortCriterion.class), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(10) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN))))); // Mock batch get response ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -99,14 +114,16 @@ public void testGetSuccess() throws Exception { assertEquals((int) executionRequests.getStart(), 0); assertEquals((int) executionRequests.getCount(), 10); assertEquals((int) executionRequests.getTotal(), 1); - verifyTestExecutionRequest(executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); + verifyTestExecutionRequest( + executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -119,29 +136,28 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getSource()).thenReturn(parentSource); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).list( - Mockito.any(), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .list( + Mockito.any(), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java index c7a72e475f7ab..bec141bddf260 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; @@ -8,10 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class RollbackIngestionResolverTest { private static final String RUN_ID = "testRunId"; @@ -46,9 +45,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); } @Test @@ -59,24 +57,22 @@ public void testRollbackIngestionMethod() throws Exception { QueryContext mockContext = getMockAllowContext(); resolver.rollbackIngestion(RUN_ID, mockContext).get(); - Mockito.verify(mockClient, Mockito.times(1)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).rollbackIngestion( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .rollbackIngestion(Mockito.any(), Mockito.any(Authentication.class)); RollbackIngestionResolver resolver = new RollbackIngestionResolver(mockClient); QueryContext mockContext = getMockAllowContext(); - assertThrows(RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); + assertThrows( + RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java index 2d64d4ec56ba1..85ef304d28533 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java @@ -6,7 +6,6 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.ArgumentMatcher; - public class CreateSecretResolverMatcherTest implements ArgumentMatcher<MetadataChangeProposal> { private MetadataChangeProposal left; @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean secretPropertiesMatch(GenericAspect left, GenericAspect right) { - DataHubSecretValue leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DataHubSecretValue.class - ); - - DataHubSecretValue rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DataHubSecretValue.class - ); + DataHubSecretValue leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DataHubSecretValue.class); + + DataHubSecretValue rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DataHubSecretValue.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java index 18ae71661318e..eafdfde364947 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; @@ -20,24 +22,18 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateSecretResolverTest { - private static final CreateSecretInput TEST_INPUT = new CreateSecretInput( - "MY_SECRET", - "mysecretvalue", - "none" - ); + private static final CreateSecretInput TEST_INPUT = + new CreateSecretInput("MY_SECRET", "mysecretvalue", "none"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))).thenReturn("encryptedvalue"); + Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))) + .thenReturn("encryptedvalue"); CreateSecretResolver resolver = new CreateSecretResolver(mockClient, mockSecretService); // Execute resolver @@ -57,18 +53,21 @@ public void testGetSuccess() throws Exception { value.setValue("encryptedvalue"); value.setName(TEST_INPUT.getName()); value.setDescription(TEST_INPUT.getDescription()); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateSecretResolverMatcherTest(new MetadataChangeProposal() - .setChangeType(ChangeType.UPSERT) - .setEntityType(Constants.SECRETS_ENTITY_NAME) - .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) - .setAspect(GenericRecordUtils.serializeAspect(value)) - .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + value.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat( + new CreateSecretResolverMatcherTest( + new MetadataChangeProposal() + .setChangeType(ChangeType.UPSERT) + .setEntityType(Constants.SECRETS_ENTITY_NAME) + .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) + .setAspect(GenericRecordUtils.serializeAspect(value)) + .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -80,23 +79,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver @@ -108,4 +105,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java index 679425afbf2e7..7cfe33feb58fc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -8,9 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteSecretResolverTest { @Test @@ -26,7 +26,8 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_SECRET_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); } @Test @@ -42,14 +43,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); DeleteSecretResolver resolver = new DeleteSecretResolver(mockClient); // Execute Resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java index 0042d34e602cc..495adb27dbd5d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -22,14 +25,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetSecretValuesResolverTest { - private static final GetSecretValuesInput TEST_INPUT = new GetSecretValuesInput( - ImmutableList.of(getTestSecretValue().getName()) - ); + private static final GetSecretValuesInput TEST_INPUT = + new GetSecretValuesInput(ImmutableList.of(getTestSecretValue().getName())); @Test public void testGetSuccess() throws Exception { @@ -39,27 +38,29 @@ public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))).thenReturn(decryptedSecretValue); + Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))) + .thenReturn(decryptedSecretValue); DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); @@ -90,22 +91,19 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); SecretService mockSecretService = Mockito.mock(SecretService.class); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java index ad91c214db28f..7d89f4aafa01a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,15 +26,9 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListSecretsResolverTest { - private static final ListSecretsInput TEST_INPUT = new ListSecretsInput( - 0, 20, null - ); + private static final ListSecretsInput TEST_INPUT = new ListSecretsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { @@ -40,40 +37,43 @@ public void testGetSuccess() throws Exception { DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.search( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(SortCriterion.class), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver @@ -99,36 +99,33 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(SortCriterion.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java index c898ae7280710..5172ef01c25eb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -8,9 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteIngestionSourceResolverTest { @Test @@ -22,11 +22,13 @@ public void testGetSuccess() throws Exception { // execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_INGESTION_SOURCE_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); } @Test @@ -38,24 +40,29 @@ public void testGetUnauthorized() throws Exception { // Execute resolver QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); // Execute Resolver QueryContext mockContext = getMockAllowContext(); DeleteIngestionSourceResolver resolver = new DeleteIngestionSourceResolver(mockClient); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java index ebafd1782e000..bda18961d3890 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -14,13 +17,9 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; import java.util.HashSet; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.assertThrows; - public class GetIngestionSourceResolverTest { @Test @@ -30,29 +29,31 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo returnedInfo = getTestIngestionSourceInfo(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInfo.data()))))))); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -72,28 +73,26 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java index 8e2453ce06a39..a86d67fcd15c1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,13 +26,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListIngestionSourceResolverTest { - private static final ListIngestionSourcesInput TEST_INPUT = new ListIngestionSourcesInput(0, 20, null, null); + private static final ListIngestionSourcesInput TEST_INPUT = + new ListIngestionSourcesInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { @@ -40,41 +40,47 @@ public void testGetSuccess() throws Exception { final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(""), - Mockito.any(), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), - Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(key.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(key.data()))))))); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver @@ -88,7 +94,8 @@ public void testGetSuccess() throws Exception { assertEquals(resolver.get(mockEnv).get().getCount(), 1); assertEquals(resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getIngestionSources().size(), 1); - verifyTestIngestionSourceGraphQL(resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); + verifyTestIngestionSourceGraphQL( + resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); } @Test @@ -100,35 +107,32 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java index 16d8da9169a8f..8213a5fb61a55 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateIngestionSourceConfigInput; @@ -15,19 +19,16 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpsertIngestionSourceResolverTest { - private static final UpdateIngestionSourceInput TEST_INPUT = new UpdateIngestionSourceInput( - "Test source", - "mysql", "Test source description", - new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), - new UpdateIngestionSourceConfigInput("my test recipe", "0.8.18", "executor id", false, null) - ); + private static final UpdateIngestionSourceInput TEST_INPUT = + new UpdateIngestionSourceInput( + "Test source", + "mysql", + "Test source description", + new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), + new UpdateIngestionSourceConfigInput( + "my test recipe", "0.8.18", "executor id", false, null)); @Test public void testGetSuccess() throws Exception { @@ -38,7 +39,8 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -48,24 +50,24 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setType(TEST_INPUT.getType()); info.setName(TEST_INPUT.getName()); - info.setSchedule(new DataHubIngestionSourceSchedule() - .setInterval(TEST_INPUT.getSchedule().getInterval()) - .setTimezone(TEST_INPUT.getSchedule().getTimezone()) - ); - info.setConfig(new DataHubIngestionSourceConfig() - .setRecipe(TEST_INPUT.getConfig().getRecipe()) - .setVersion(TEST_INPUT.getConfig().getVersion()) - .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) - .setDebugMode(TEST_INPUT.getConfig().getDebugMode()) - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(MutationUtils.buildMetadataChangeProposalWithUrn(TEST_INGESTION_SOURCE_URN, - INGESTION_INFO_ASPECT_NAME, info) - ), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + info.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval(TEST_INPUT.getSchedule().getInterval()) + .setTimezone(TEST_INPUT.getSchedule().getTimezone())); + info.setConfig( + new DataHubIngestionSourceConfig() + .setRecipe(TEST_INPUT.getConfig().getRecipe()) + .setVersion(TEST_INPUT.getConfig().getVersion()) + .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) + .setDebugMode(TEST_INPUT.getConfig().getDebugMode())); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + MutationUtils.buildMetadataChangeProposalWithUrn( + TEST_INGESTION_SOURCE_URN, INGESTION_INFO_ASPECT_NAME, info)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -77,24 +79,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 61dd6c678e6e0..8fc5ab6ebb828 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -14,6 +17,7 @@ import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.dataset.DatasetType; +import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; @@ -22,153 +26,158 @@ import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.testng.annotations.Test; -import com.linkedin.entity.Aspect; - import java.net.URISyntaxException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class MutableTypeBatchResolverTest { - private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; - private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; - private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; - private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; - private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; - private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; - private static final Deprecation TEST_DATASET_1_DEPRECATION; - - static { - try { - TEST_DATASET_1_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; + private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; + private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; + private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; + private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; + private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; + private static final Deprecation TEST_DATASET_1_DEPRECATION; + + static { + try { + TEST_DATASET_1_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } - - private static final Deprecation TEST_DATASET_2_DEPRECATION; - - static { - try { - TEST_DATASET_2_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + } + + private static final Deprecation TEST_DATASET_2_DEPRECATION; + + static { + try { + TEST_DATASET_2_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } + } - @Test - public void testGetSuccess() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> batchMutableType = new DatasetType(mockClient); + @Test + public void testGetSuccess() throws Exception { + EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> batchMutableType = + new DatasetType(mockClient); - MutableTypeBatchResolver<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> resolver = new MutableTypeBatchResolver<>(batchMutableType); + MutableTypeBatchResolver<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> resolver = + new MutableTypeBatchResolver<>(batchMutableType); - List<BatchDatasetUpdateInput> mockInputs = Arrays.asList( + List<BatchDatasetUpdateInput> mockInputs = + Arrays.asList( new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_1_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build(), + .setUrn(TEST_DATASET_1_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .build()) + .build()) + .build(), new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_2_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build() - ); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Authentication mockAuth = Mockito.mock(Authentication.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); - Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - - Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); - Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); - - Mockito.when(mockClient.batchGetV2(Mockito.eq(Constants.DATASET_ENTITY_NAME), + .setUrn(TEST_DATASET_2_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .build()) + .build()) + .build()); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Authentication mockAuth = Mockito.mock(Authentication.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); + Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + + Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); + Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(new HashSet<>(ImmutableSet.of(datasetUrn1, datasetUrn2))), Mockito.any(), Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - datasetUrn1, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data())) - ))), - datasetUrn2, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn2) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data())) - ))) - )); - - List<Dataset> result = resolver.get(mockEnv).join(); - - ArgumentCaptor<Collection<MetadataChangeProposal>> changeProposalCaptor = ArgumentCaptor.forClass((Class) Collection.class); - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), - // Dataset aspects to fetch are private, but aren't important for this test - Mockito.any(), - Mockito.any(Authentication.class) - ); - Collection<MetadataChangeProposal> changeProposals = changeProposalCaptor.getValue(); - - assertEquals(changeProposals.size(), 2); - assertEquals(result.size(), 2); - } - - @Test - public void testGetFailureUnauthorized() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> batchMutableType = new DatasetType(mockClient); - - MutableTypeBatchResolver<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> resolver = new MutableTypeBatchResolver<>(batchMutableType); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - } + .thenReturn( + ImmutableMap.of( + datasetUrn1, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data()))))), + datasetUrn2, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn2) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data()))))))); + + List<Dataset> result = resolver.get(mockEnv).join(); + + ArgumentCaptor<Collection<MetadataChangeProposal>> changeProposalCaptor = + ArgumentCaptor.forClass((Class) Collection.class); + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), + // Dataset aspects to fetch are private, but aren't important for this test + Mockito.any(), + Mockito.any(Authentication.class)); + Collection<MetadataChangeProposal> changeProposals = changeProposalCaptor.getValue(); + + assertEquals(changeProposals.size(), 2); + assertEquals(result.size(), 2); + } + + @Test + public void testGetFailureUnauthorized() throws Exception { + EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> batchMutableType = + new DatasetType(mockClient); + + MutableTypeBatchResolver<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> resolver = + new MutableTypeBatchResolver<>(batchMutableType); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockDenyContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java index 1adf7b1200574..bdadfc98f6d85 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java @@ -1,60 +1,69 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.common.Siblings; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.SiblingsUtils; import com.linkedin.metadata.entity.EntityService; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.HashSet; import java.util.Optional; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; -import static org.testng.AssertJUnit.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class SiblingsUtilsTest { - private static final String TEST_DATASET_URN1 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; - private static final String TEST_DATASET_URN2 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; - private static final String TEST_DATASET_URN3 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; + private static final String TEST_DATASET_URN1 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; + private static final String TEST_DATASET_URN2 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; + private static final String TEST_DATASET_URN3 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; @Test public void testGetSiblingUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings().setSiblings(siblingUrns) - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(new Siblings().setSiblings(siblingUrns)); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); } @Test public void testGetSiblingUrnsWithoutSiblings() { EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings() - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(new Siblings()); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetSiblingUrnsWithSiblingsAspect() { EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - null - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(null); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetNextSiblingUrn() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); Optional<Urn> nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, new HashSet<>()); assertEquals(nextUrn, Optional.of(UrnUtils.getUrn(TEST_DATASET_URN2))); @@ -62,7 +71,8 @@ public void testGetNextSiblingUrn() { @Test public void testGetNextSiblingUrnWithUsedUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); HashSet<Urn> usedUrns = new HashSet<>(); usedUrns.add(UrnUtils.getUrn(TEST_DATASET_URN2)); Optional<Urn> nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, usedUrns); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index 9bd44e9ab0906..3fee28bc31725 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateUserSettingInput; @@ -12,13 +15,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class UpdateUserSettingResolverTest { private static final String TEST_USER_URN = "urn:li:corpuser:test"; + @Test public void testWriteCorpUserSettings() throws Exception { EntityService mockService = getMockEntityService(); @@ -36,9 +36,12 @@ public void testWriteCorpUserSettings() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - CorpUserSettings newSettings = new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_USER_URN), - CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + CorpUserSettings newSettings = + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_USER_URN), CORP_USER_SETTINGS_ASPECT_NAME, newSettings); verifySingleIngestProposal(mockService, 1, proposal); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java index e2661841fe8f7..abc1a5786f363 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -16,37 +20,35 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class ReportOperationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Operation expectedOperation = new Operation() - .setTimestampMillis(0L) - .setLastUpdatedTimestamp(0L) - .setOperationType(OperationType.INSERT) - .setSourceType(OperationSourceType.DATA_PLATFORM) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) - .setCustomOperationType(null, SetMode.IGNORE_NULL) - .setNumAffectedRows(1L); + Operation expectedOperation = + new Operation() + .setTimestampMillis(0L) + .setLastUpdatedTimestamp(0L) + .setOperationType(OperationType.INSERT) + .setSourceType(OperationSourceType.DATA_PLATFORM) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setCustomOperationType(null, SetMode.IGNORE_NULL) + .setNumAffectedRows(1L); - MetadataChangeProposal expectedProposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - OPERATION_ASPECT_NAME, expectedOperation); + MetadataChangeProposal expectedProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), OPERATION_ASPECT_NAME, expectedOperation); // Test setting the domain - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class))) - .thenReturn(TEST_ENTITY_URN); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class))) + .thenReturn(TEST_ENTITY_URN); ReportOperationResolver resolver = new ReportOperationResolver(mockClient); @@ -57,11 +59,9 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -77,9 +77,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private ReportOperationInput getTestInput() { @@ -91,4 +90,4 @@ private ReportOperationInput getTestInput() { input.setSourceType(com.linkedin.datahub.graphql.generated.OperationSourceType.DATA_PLATFORM); return input; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 329d71ec125db..74f88f95fc171 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -24,13 +27,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddOwnersResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_OWNER_1_URN = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_2_URN = "urn:li:corpuser:test-id-2"; private static final String TEST_OWNER_3_URN = "urn:li:corpGroup:test-id-3"; @@ -39,18 +39,23 @@ public class AddOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -58,12 +63,20 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), - new OwnerInput(TEST_OWNER_2_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) - ), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), + new OwnerInput( + TEST_OWNER_2_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -71,38 +84,45 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); } @Test public void testGetSuccessExistingOwnerNewType() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -110,13 +130,16 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -124,34 +147,42 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); } @Test public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -159,12 +190,16 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -172,39 +207,51 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); } @Test public void testGetSuccessMultipleOwnerTypes() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_3_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -212,22 +259,28 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_2_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_3_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_GROUP) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -235,27 +288,25 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN))); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -266,9 +317,15 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -280,10 +337,11 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -294,9 +352,15 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -312,9 +376,15 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -327,21 +397,30 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 79fc62742f444..92a789530d6e4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -23,14 +26,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @@ -38,16 +39,18 @@ public class BatchAddOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -56,8 +59,12 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -65,52 +72,64 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); } @Test public void testGetSuccessExistingOwners() throws Exception { - final Ownership originalOwnership = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); + final Ownership originalOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -119,12 +138,20 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -132,44 +159,49 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of( - new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -180,20 +212,27 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -205,15 +244,17 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -225,20 +266,27 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -254,20 +302,27 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -280,32 +335,42 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 9dc2ec8127806..7cef90ffee512 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @@ -35,15 +36,17 @@ public class BatchRemoveOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -57,12 +60,13 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of( - TEST_OWNER_URN_1, - TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -74,24 +78,36 @@ public void testGetSuccessNoExistingOwners() throws Exception { public void testGetSuccessExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - final Ownership oldOwners1 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners1 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldOwners1); - final Ownership oldOwners2 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_2)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners2 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_2)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldOwners2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -105,10 +121,13 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -120,15 +139,17 @@ public void testGetSuccessExistingOwners() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -140,10 +161,13 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -159,10 +183,13 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -175,22 +202,28 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java index 0643ead444c94..ff11d971b52e8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java @@ -1,12 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.service.OwnershipTypeService; @@ -15,15 +18,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateOwnershipTypeResolverTest { - private static final CreateOwnershipTypeInput TEST_INPUT = new CreateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final CreateOwnershipTypeInput TEST_INPUT = + new CreateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); @@ -45,10 +44,12 @@ public void testCreateSuccess() throws Exception { assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); assertEquals(ownershipType.getType(), EntityType.CUSTOM_OWNERSHIP_TYPE); - Mockito.verify(mockService, Mockito.times(1)).createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createOwnershipType( + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -65,20 +66,18 @@ public void testCreateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testCreateOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createOwnershipType( - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createOwnershipType( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class), Mockito.anyLong()); CreateOwnershipTypeResolver resolver = new CreateOwnershipTypeResolver(mockService); @@ -93,12 +92,13 @@ public void testCreateOwnershipTypeServiceException() throws Exception { private OwnershipTypeService initMockService() { OwnershipTypeService service = Mockito.mock(OwnershipTypeService.class); - Mockito.when(service.createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_OWNERSHIP_TYPE_URN); + Mockito.when( + service.createOwnershipType( + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_OWNERSHIP_TYPE_URN); return service; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java index 9f526e4008236..ae97164a2787e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -14,11 +18,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.ArgumentMatchers.*; -import static org.testng.Assert.*; - - public class DeleteOwnershipTypeResolverTest { private static final Urn TEST_URN = @@ -41,11 +40,8 @@ public void testGetSuccessOwnershipTypeCanManage() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteOwnershipType(Mockito.eq(TEST_URN), anyBoolean(), Mockito.any(Authentication.class)); } @Test @@ -62,21 +58,17 @@ public void testGetFailureOwnershipTypeCanNotManager() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteOwnershipType(Mockito.eq(TEST_URN), anyBoolean(), Mockito.any(Authentication.class)); } @Test public void testGetOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteOwnershipType( - Mockito.any(), - anyBoolean(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteOwnershipType(Mockito.any(), anyBoolean(), Mockito.any(Authentication.class)); DeleteOwnershipTypeResolver resolver = new DeleteOwnershipTypeResolver(mockService); @@ -93,17 +85,18 @@ public void testGetOwnershipTypeServiceException() throws Exception { private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getOwnershipTypeInfo( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java index ceab13167246c..fd7baf6af7469 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,16 +21,13 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListOwnershipTypesResolverTest { private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); - private static final ListOwnershipTypesInput TEST_INPUT = new ListOwnershipTypesInput(0, 20, "", null); + private static final ListOwnershipTypesInput TEST_INPUT = + new ListOwnershipTypesInput(0, 20, "", null); @Test public void testGetSuccess() throws Exception { @@ -38,21 +38,24 @@ public void testGetSuccess() throws Exception { final OwnershipTypeKey key = new OwnershipTypeKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN))))); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); @@ -78,35 +81,32 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); // Execute resolver @@ -124,4 +124,4 @@ public static OwnershipTypeInfo getOwnershipTypeInfo() { info.setDescription("some description"); return info; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java index f35b8f98cc1ac..6e428842201d5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.linkedin.common.AuditStamp; @@ -7,8 +10,8 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.UpdateOwnershipTypeInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; @@ -24,19 +27,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateOwnershipTypeResolverTest { private static final Urn TEST_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateOwnershipTypeInput TEST_INPUT = new UpdateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final UpdateOwnershipTypeInput TEST_INPUT = + new UpdateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); @Test public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { @@ -55,23 +54,27 @@ public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { assertEquals(ownershipType.getInfo().getName(), TEST_INPUT.getName()); assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); - Mockito.verify(mockService, Mockito.times(1)).updateOwnershipType( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateOwnershipType( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test public void testUpdateOwnershipTypeServiceException() throws Exception { // Update resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateOwnershipType( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateOwnershipType( + Mockito.any(Urn.class), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateOwnershipTypeResolver resolver = new UpdateOwnershipTypeResolver(mockService); @@ -100,39 +103,41 @@ public void testUpdateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when( + mockService.getOwnershipTypeInfo( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); - Mockito.when(mockService.getOwnershipTypeEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getOwnershipTypeEntityResponse( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testEntityResponse); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java index b56d897a468ba..2827e3602e379 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -15,11 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreatePostResolverTest { private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; private static final String POST_MEDIA_LOCATION = @@ -59,9 +58,12 @@ public void testCreatePost() throws Exception { UpdateMediaInput media = new UpdateMediaInput(); media.setType(POST_MEDIA_TYPE); media.setLocation(POST_MEDIA_LOCATION); - Media mediaObj = new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION)); - when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))).thenReturn(mediaObj); + Media mediaObj = + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION)); + when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))) + .thenReturn(mediaObj); UpdatePostContentInput content = new UpdatePostContentInput(); content.setTitle(POST_TITLE); @@ -69,22 +71,33 @@ public void testCreatePost() throws Exception { content.setLink(POST_LINK); content.setContentType(POST_CONTENT_TYPE); content.setMedia(media); - com.linkedin.post.PostContent postContentObj = new com.linkedin.post.PostContent().setType( - com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION))); - when(_postService.mapPostContent(eq(POST_CONTENT_TYPE.toString()), eq(POST_TITLE), eq(POST_DESCRIPTION), - eq(POST_LINK), any(Media.class))).thenReturn(postContentObj); + com.linkedin.post.PostContent postContentObj = + new com.linkedin.post.PostContent() + .setType(com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia( + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION))); + when(_postService.mapPostContent( + eq(POST_CONTENT_TYPE.toString()), + eq(POST_TITLE), + eq(POST_DESCRIPTION), + eq(POST_LINK), + any(Media.class))) + .thenReturn(postContentObj); CreatePostInput input = new CreatePostInput(); input.setPostType(PostType.HOME_PAGE_ANNOUNCEMENT); input.setContent(content); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - when(_postService.createPost(eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), eq(postContentObj), - eq(_authentication))).thenReturn(true); + when(_postService.createPost( + eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), + eq(postContentObj), + eq(_authentication))) + .thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java index b8a7488a824fd..085cfd0569781 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.urn.Urn; @@ -9,11 +13,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class DeletePostResolverTest { private static final String POST_URN_STRING = "urn:li:post:123"; private PostService _postService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java index c22d6bf39640d..6c475cdc7f5a8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,17 +33,10 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListPostsResolverTest { private static Map<Urn, EntityResponse> _entityResponseMap; private static final String POST_URN_STRING = "urn:li:post:examplePost"; @@ -49,12 +47,15 @@ public class ListPostsResolverTest { private static final String POST_TITLE = "title"; private static final String POST_DESCRIPTION = "description"; private static final String POST_LINK = "https://datahubproject.io"; - private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); - private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(MEDIA); + private static final Media MEDIA = + new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = + new PostContent() + .setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; private EntityClient _entityClient; @@ -72,8 +73,11 @@ private Map<Urn, EntityResponse> getMockPostsEntityResponse() throws URISyntaxEx DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(postUrn.toString()); dataHubRoleInfo.setName(postUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return ImmutableMap.of(postUrn, entityResponse); } @@ -106,13 +110,27 @@ public void testListPosts() throws Exception { ListPostsInput input = new ListPostsInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(1); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(1); roleSearchResult.setEntities( - new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); - - when(_entityClient.search(eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt(), - eq(_authentication), Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); + + when(_entityClient.search( + eq(POST_ENTITY_NAME), + any(), + eq(null), + any(), + anyInt(), + anyInt(), + eq(_authentication), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())) + .thenReturn(_entityResponseMap); ListPostsResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java index 9c04c67dd3a3b..eebe0034fce61 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -24,10 +26,10 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.service.QueryService; -import com.linkedin.entity.client.EntityClient; import com.linkedin.query.QueryProperties; import com.linkedin.query.QuerySource; import com.linkedin.query.QueryStatement; @@ -40,21 +42,19 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class CreateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final CreateQueryInput TEST_INPUT = new CreateQueryInput( - new CreateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString())) - ); + private static final CreateQueryInput TEST_INPUT = + new CreateQueryInput( + new CreateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString()))); @Test public void testGetSuccess() throws Exception { @@ -70,25 +70,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createQuery( + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -105,23 +115,24 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createQuery( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); CreateQueryResolver resolver = new CreateQueryResolver(mockService); @@ -136,58 +147,68 @@ public void testGetQueryServiceException() throws Exception { private QueryService initMockService() { QueryService service = Mockito.mock(QueryService.class); - Mockito.when(service.createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_QUERY_URN); - - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects querySubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(querySubjects.data())) - ))) - ); + Mockito.when( + service.createQuery( + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT + .getProperties() + .getStatement() + .getLanguage() + .toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_QUERY_URN); + + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects querySubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + + Mockito.when( + service.getQueryEntityResponse( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(querySubjects.data())))))); return service; } @@ -197,36 +218,40 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java index 78c894f27cbc3..96ddc632562ee 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -22,14 +25,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteQueryResolverTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @Test @@ -45,10 +45,8 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test @@ -62,10 +60,8 @@ public void testGetSuccessCanEditQueries() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test @@ -79,19 +75,17 @@ public void testGetFailureActorUnauthorized() { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteQuery( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteQuery(Mockito.any(), Mockito.any(Authentication.class)); DeleteQueryResolver resolver = new DeleteQueryResolver(mockService); @@ -108,14 +102,13 @@ private static QueryService initMockService() { QueryService mockService = Mockito.mock(QueryService.class); QuerySubjects existingQuerySubjects = new QuerySubjects(); - existingQuerySubjects.setSubjects(new QuerySubjectArray( - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)) - )); + existingQuerySubjects.setSubjects( + new QuerySubjectArray(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(mockService.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) - .thenReturn(existingQuerySubjects); + Mockito.when( + mockService.getQuerySubjects( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn(existingQuerySubjects); return mockService; } @@ -126,40 +119,47 @@ private QueryContext getMockAllowEditQueriesOnQueryContext() { private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); + Mockito.when(mockContext.getActorUrn()) + .thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), - DeleteQueryResolverTest.TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec( + DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), + DeleteQueryResolverTest.TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java index 877a4d2b27f6a..8a56b142e5b5e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -32,28 +35,24 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListQueriesResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = Urn.createFromTuple("query", "test-id"); - private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = new ListQueriesInput( - 0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString() - ); - private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = new ListQueriesInput( - 0, 30, null, QuerySource.MANUAL, null - ); - private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = new ListQueriesInput( - 0, 40, null, null, TEST_DATASET_URN.toString() - ); + private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = + new ListQueriesInput(0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString()); + private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = + new ListQueriesInput(0, 30, null, QuerySource.MANUAL, null); + private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = + new ListQueriesInput(0, 40, null, null, TEST_DATASET_URN.toString()); @DataProvider(name = "inputs") public static Object[][] inputs() { - return new Object[][] {{ TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER}}; + return new Object[][] { + {TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER} + }; } @Test(dataProvider = "inputs") @@ -61,22 +60,30 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(input.getQuery() == null ? ListQueriesResolver.DEFAULT_QUERY : input.getQuery()), - Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), - Mockito.eq(new SortCriterion().setField(ListQueriesResolver.CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING)), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq( + input.getQuery() == null + ? ListQueriesResolver.DEFAULT_QUERY + : input.getQuery()), + Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), + Mockito.eq( + new SortCriterion() + .setField(ListQueriesResolver.CREATED_AT_FIELD) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN))))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); @@ -90,7 +97,8 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getQueries().size(), 1); - assertEquals(resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); } @Test @@ -102,33 +110,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT_FULL_FILTERS); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_FULL_FILTERS); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); // Execute resolver @@ -146,7 +156,8 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity if (source != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_SOURCE_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_SOURCE_FIELD, null, ImmutableList.of(source.toString()), false, @@ -154,14 +165,14 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity } if (entityUrn != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_ENTITIES_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_ENTITIES_FIELD, null, ImmutableList.of(entityUrn), false, FilterOperator.EQUAL)); - } criteria.setAnd(andConditions); return ResolverUtils.buildFilter(Collections.emptyList(), ImmutableList.of(criteria)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java index 9b500b5fb3936..766d8a2ccb136 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -13,12 +15,12 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateQueryInput; -import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; -import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.QueryLanguage; import com.linkedin.datahub.graphql.generated.QueryStatementInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; +import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; import com.linkedin.entity.EntityResponse; @@ -40,22 +42,21 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class UpdateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); - private static final Urn TEST_DATASET_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateQueryInput TEST_INPUT = new UpdateQueryInput( - new UpdateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString())) - ); + private static final UpdateQueryInput TEST_INPUT = + new UpdateQueryInput( + new UpdateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString()))); @Test public void testGetSuccess() throws Exception { @@ -72,25 +73,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).updateQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN_2) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateQuery( + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -108,23 +119,24 @@ public void testGetUnauthorizedNoEditQueriesRights() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Update resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateQuery( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateQueryResolver resolver = new UpdateQueryResolver(mockService); @@ -143,56 +155,59 @@ private QueryService initMockService() { // Pre-Update QueryService service = Mockito.mock(QueryService.class); - final QuerySubjects existingSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); + final QuerySubjects existingSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(service.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) + Mockito.when( + service.getQuerySubjects(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) .thenReturn(existingSubjects); // Post-Update - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects newSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN_2) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(newSubjects.data())) - ))) - ); + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects newSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)))); + + Mockito.when( + service.getQueryEntityResponse( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(newSubjects.data())))))); return service; } @@ -202,62 +217,71 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editQueriesRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); - - AuthorizationRequest editAllRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); + AuthorizationRequest editQueriesRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editQueriesRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); + + AuthorizationRequest editAllRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); AuthorizationResult editQueriesResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))).thenReturn(editQueriesResult1); + Mockito.when(editQueriesResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))) + .thenReturn(editQueriesResult1); AuthorizationResult editAllResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest1))).thenReturn(editAllResult1); AuthorizationResult editQueriesResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))).thenReturn(editQueriesResult2); + Mockito.when(editQueriesResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))) + .thenReturn(editQueriesResult2); AuthorizationResult editAllResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest2))).thenReturn(editAllResult2); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java index 3cde81d7a7f31..fe032d0bf4859 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AcceptRoleResolverTest { private static final String INVITE_TOKEN_URN_STRING = "urn:li:inviteToken:admin-invite-token"; private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; @@ -54,7 +53,8 @@ public void testInvalidInviteToken() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(false); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(false); AcceptRoleInput input = new AcceptRoleInput(); input.setInviteToken(INVITE_TOKEN_STRING); @@ -69,8 +69,10 @@ public void testNoRoleUrn() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(null); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(null); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); @@ -89,8 +91,10 @@ public void testAssignRolePasses() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(roleUrn); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(roleUrn); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java index 85891dbd96fb0..6411728552a1e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.google.common.collect.ImmutableList; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchAssignRoleResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String FIRST_ACTOR_URN_STRING = "urn:li:corpuser:foo"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java index 8d8faf5c3f12e..9197d1b18c0c9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +42,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))) + .thenReturn(INVITE_TOKEN_STRING); CreateInviteTokenInput input = new CreateInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java index ef426979953d0..8e761454cb06c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class GetInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +42,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))) + .thenReturn(INVITE_TOKEN_STRING); GetInviteTokenInput input = new GetInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java index 4a0b062c67ffd..d956295faa180 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.anyInt; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -24,14 +31,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListRolesResolverTest { private static final String ADMIN_ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String EDITOR_ROLE_URN_STRING = "urn:li:dataHubRole:Editor"; @@ -47,8 +46,11 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(roleUrn.toString()); dataHubRoleInfo.setName(roleUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return entityResponse; } @@ -57,8 +59,12 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { public void setupTest() throws Exception { Urn adminRoleUrn = Urn.createFromString(ADMIN_ROLE_URN_STRING); Urn editorRoleUrn = Urn.createFromString(EDITOR_ROLE_URN_STRING); - _entityResponseMap = ImmutableMap.of(adminRoleUrn, getMockRoleEntityResponse(adminRoleUrn), editorRoleUrn, - getMockRoleEntityResponse(editorRoleUrn)); + _entityResponseMap = + ImmutableMap.of( + adminRoleUrn, + getMockRoleEntityResponse(adminRoleUrn), + editorRoleUrn, + getMockRoleEntityResponse(editorRoleUrn)); _entityClient = mock(EntityClient.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); @@ -84,14 +90,28 @@ public void testListRoles() throws Exception { ListRolesInput input = new ListRolesInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(2); - roleSearchResult.setEntities(new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), - new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); - - when(_entityClient.search(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), anyInt(), anyInt(), any(), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(2); + roleSearchResult.setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), + new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); + + when(_entityClient.search( + eq(DATAHUB_ROLE_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + any(), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())) + .thenReturn(_entityResponseMap); ListRolesResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index c161a66d3ee93..c7d397c5a4a73 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -30,61 +33,49 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class AggregateAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); List<String> facets = ImmutableList.of("platform", "domains"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -94,18 +85,16 @@ public static void testApplyViewNullBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", viewFilter, // Verify that view filter was used. 0, 0, facets // Verify called with facets we provide - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -113,42 +102,44 @@ public static void testApplyViewBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); Filter baseFilter = createFilter("baseField.keyword", "baseTest"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -158,18 +149,15 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, 0, - null - ); + null); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -178,36 +166,28 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List<String> facets = ImmutableList.of("platform"); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - null, - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput(null, "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -218,18 +198,17 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, facets // Verify facets passed in were used - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -238,36 +217,29 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List<String> facets = ImmutableList.of(); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -278,55 +250,50 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, null // Verify that an empty list for facets in input sends null - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List<String> searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List<String> searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -334,45 +301,31 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 0, - null - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 0, null); } @Test public static void testErrorFetchingResults() throws Exception { - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -383,17 +336,18 @@ public static void testErrorFetchingResults() throws Exception { private static Filter createFilter(String field, String value) { return new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField(field) - .setValue(value) - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of(value))) - )) - ))); + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(field) + .setValue(value) + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues(new StringArray(ImmutableList.of(value)))))))); } private static DataHubViewInfo getViewInfo(Filter viewFilter) { @@ -402,24 +356,20 @@ private static DataHubViewInfo getViewInfo(Filter viewFilter) { info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); return info; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } @@ -430,22 +380,21 @@ private static EntityClient initMockEntityClient( int start, int limit, List<String> facets, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), - Mockito.eq(facets) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class), + Mockito.eq(facets))) + .thenReturn(result); return client; } @@ -456,8 +405,8 @@ private static void verifyMockEntityClient( Filter filter, int start, int limit, - List<String> facets - ) throws Exception { + List<String> facets) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( Mockito.eq(entityTypes), @@ -468,21 +417,13 @@ private static void verifyMockEntityClient( Mockito.eq(null), Mockito.eq(null), Mockito.any(Authentication.class), - Mockito.eq(facets) - ); + Mockito.eq(facets)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + .getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class)); } - private AggregateAcrossEntitiesResolverTest() { } - + private AggregateAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java index 7397ea8fa21cf..3b69337acfbd0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -32,14 +34,12 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class AutoCompleteForMultipleResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private AutoCompleteForMultipleResolverTest() { } + private AutoCompleteForMultipleResolverTest() {} public static void testAutoCompleteResolverSuccess( EntityClient mockClient, @@ -48,9 +48,10 @@ public static void testAutoCompleteResolverSuccess( EntityType entityType, SearchableEntityType<?, ?> entity, Urn viewUrn, - Filter filter - ) throws Exception { - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); + Filter filter) + throws Exception { + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -65,13 +66,7 @@ public static void testAutoCompleteResolverSuccess( Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - entityName, - "test", - filter, - 10 - ); + verifyMockEntityClient(mockClient, entityName, "test", filter, 10); } // test our main entity types @@ -79,43 +74,64 @@ public static void testAutoCompleteResolverSuccess( public static void testAutoCompleteResolverSuccessForDifferentEntities() throws Exception { ViewService viewService = initMockViewService(null, null); // Daatasets - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATASET_ENTITY_NAME, EntityType.DATASET, new DatasetType(mockClient), null, null); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATASET_ENTITY_NAME, + EntityType.DATASET, + new DatasetType(mockClient), + null, + null); // Dashboards - mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DASHBOARD_ENTITY_NAME, EntityType.DASHBOARD, new DashboardType(mockClient), null, null); + mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DASHBOARD_ENTITY_NAME, + EntityType.DASHBOARD, + new DashboardType(mockClient), + null, + null); - //DataFlows - mockClient = initMockEntityClient( - Constants.DATA_FLOW_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATA_FLOW_ENTITY_NAME, EntityType.DATA_FLOW, new DataFlowType(mockClient), null, null); + // DataFlows + mockClient = + initMockEntityClient( + Constants.DATA_FLOW_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATA_FLOW_ENTITY_NAME, + EntityType.DATA_FLOW, + new DataFlowType(mockClient), + null, + null); } // test filters with a given view @@ -123,16 +139,16 @@ public static void testAutoCompleteResolverSuccessForDifferentEntities() throws public static void testAutoCompleteResolverWithViewFilter() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); testAutoCompleteResolverSuccess( mockClient, viewService, @@ -140,8 +156,7 @@ public static void testAutoCompleteResolverWithViewFilter() throws Exception { EntityType.DATASET, new DatasetType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); } // test entity type filters with a given view @@ -152,16 +167,16 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti entityNames.add(Constants.DASHBOARD_ENTITY_NAME); DataHubViewInfo viewInfo = createViewInfo(entityNames); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); // ensure we do hit the entity client for dashboards since dashboards are in our view testAutoCompleteResolverSuccess( @@ -171,25 +186,26 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti EntityType.DASHBOARD, new DashboardType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); - // if the view has only dashboards, we should not make an auto-complete request on other entity types + // if the view has only dashboards, we should not make an auto-complete request on other entity + // types Mockito.verify(mockClient, Mockito.times(0)) .autoComplete( Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq("test"), Mockito.eq(viewInfo.getDefinition().getFilter()), Mockito.eq(10), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } @Test public static void testAutoCompleteResolverFailNoQuery() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); ViewService viewService = initMockViewService(null, null); - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(new DatasetType(mockClient)), viewService); + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver( + ImmutableList.of(new DatasetType(mockClient)), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -204,75 +220,60 @@ public static void testAutoCompleteResolverFailNoQuery() throws Exception { } private static EntityClient initMockEntityClient( - String entityName, - String query, - Filter filters, - int limit, - AutoCompleteResult result - ) throws Exception { + String entityName, String query, Filter filters, int limit, AutoCompleteResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.autoComplete( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn(result); + Mockito.when( + client.autoComplete( + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filters), + Mockito.eq(limit), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } - + private static void verifyMockEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filters, - int limit - ) throws Exception { + EntityClient mockClient, String entityName, String query, Filter filters, int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .autoComplete( Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), Mockito.eq(limit), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index a599117c3e165..29a2b3081aefe 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -20,20 +23,16 @@ import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class GetQuickFiltersResolverTest { @@ -41,19 +40,21 @@ public class GetQuickFiltersResolverTest { public static void testGetQuickFiltersHappyPathSuccess() throws Exception { SearchResultMetadata mockData = getHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -72,19 +73,21 @@ public static void testGetQuickFiltersHappyPathSuccess() throws Exception { public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { SearchResultMetadata mockData = getUnHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -103,16 +106,17 @@ public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { public static void testGetQuickFiltersFailure() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -124,26 +128,36 @@ public static void testGetQuickFiltersFailure() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static void compareResultToExpectedData(GetQuickFiltersResult result, GetQuickFiltersResult expected) { - IntStream.range(0, result.getQuickFilters().size()).forEach(index -> { - QuickFilter resultFilter = result.getQuickFilters().get(index); - QuickFilter expectedFilter = expected.getQuickFilters().get(index); - Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); - Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); - if (resultFilter.getEntity() != null) { - Assert.assertEquals(resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); - } - }); + private static void compareResultToExpectedData( + GetQuickFiltersResult result, GetQuickFiltersResult expected) { + IntStream.range(0, result.getQuickFilters().size()) + .forEach( + index -> { + QuickFilter resultFilter = result.getQuickFilters().get(index); + QuickFilter expectedFilter = expected.getQuickFilters().get(index); + Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); + Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); + if (resultFilter.getEntity() != null) { + Assert.assertEquals( + resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); + } + }); } private static SearchResultMetadata getHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); FilterValueArray entityTypeFilters = new FilterValueArray(); entityTypeFilters.add(createFilterValue("dataset", 100, null)); @@ -168,11 +182,18 @@ private static GetQuickFiltersResult getHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List<QuickFilter> quickFilters = new ArrayList<>(); // platforms should be in alphabetical order - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DASHBOARD", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_FLOW", null)); @@ -186,9 +207,12 @@ private static GetQuickFiltersResult getHappyPathResultData() { private static SearchResultMetadata getUnHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); // only 3 platforms available - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); FilterValueArray entityTypeFilters = new FilterValueArray(); // no dashboard, data flows, or glossary terms @@ -210,10 +234,15 @@ private static SearchResultMetadata getUnHappyPathTestData() { private static GetQuickFiltersResult getUnHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List<QuickFilter> quickFilters = new ArrayList<>(); - // in correct order by count for platforms (alphabetical). In correct order by priority for entity types - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + // in correct order by count for platforms (alphabetical). In correct order by priority for + // entity types + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_JOB", null)); quickFilters.add(createQuickFilter("_entityType", "CHART", null)); @@ -224,7 +253,8 @@ private static GetQuickFiltersResult getUnHappyPathResultData() { return result; } - private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { + private static QuickFilter createQuickFilter( + @Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); quickFilter.setValue(value); @@ -234,7 +264,8 @@ private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnu return quickFilter; } - private static FilterValue createFilterValue(@Nonnull final String value, final int count, @Nullable final String entity) { + private static FilterValue createFilterValue( + @Nonnull final String value, final int count, @Nullable final String entity) { FilterValue filterValue = new FilterValue(); filterValue.setValue(value); filterValue.setFacetCount(count); @@ -244,7 +275,8 @@ private static FilterValue createFilterValue(@Nonnull final String value, final return filterValue; } - private static AggregationMetadata createAggregationMetadata(@Nonnull final String name, @Nonnull final FilterValueArray filterValues) { + private static AggregationMetadata createAggregationMetadata( + @Nonnull final String name, @Nonnull final FilterValueArray filterValues) { AggregationMetadata aggregationMetadata = new AggregationMetadata(); aggregationMetadata.setName(name); aggregationMetadata.setFilterValues(filterValues); @@ -257,24 +289,22 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private GetQuickFiltersResolverTest() { } - + private GetQuickFiltersResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index b0a681c9b2342..d0bbfd126b9b9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -38,167 +41,172 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - public class SearchAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockEntityClient( + mockClient, + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", - viewFilter, + viewFilter, // Verify that view filter was used. 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockEntityClient( - mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. - "", - viewFilter, // Verify that view filter was used. - 0, - 10 - ); - - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + 10); + + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("baseField.keyword") - .setValue("baseTest") - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("baseTest"))) - )) - ))); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("baseField.keyword") + .setValue("baseTest") + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues( + new StringArray(ImmutableList.of("baseTest")))))))); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -208,74 +216,66 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewNullBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - null, - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + null, "", 0, 10, null, null, TEST_VIEW_URN.toString(), null, null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -285,74 +285,75 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewEmptyBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -362,56 +363,55 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List<String> searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List<String> searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -419,49 +419,41 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 10 - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 10); } @Test public static void testApplyViewErrorFetchingView() throws Exception { // When a view cannot be successfully resolved, the endpoint show THROW. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -470,17 +462,10 @@ public static void testApplyViewErrorFetchingView() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } @@ -490,21 +475,20 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } @@ -514,8 +498,8 @@ private static void verifyMockEntityClient( String query, Filter filter, int start, - int limit - ) throws Exception { + int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( Mockito.eq(entityTypes), @@ -525,21 +509,13 @@ private static void verifyMockEntityClient( Mockito.eq(limit), Mockito.eq(null), Mockito.eq(null), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + .getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class)); } - private SearchAcrossEntitiesResolverTest() { } - + private SearchAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java index c68b621e6921f..273f7156c12a8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.UrnUtils; @@ -23,15 +27,12 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - // Initialize this class in the style of SearchAcrossEntitiesResolverTest.java public class SearchAcrossLineageResolverTest { - private static final String SOURCE_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; - private static final String TARGET_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; + private static final String SOURCE_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; + private static final String TARGET_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; private static final String QUERY = ""; private static final int START = 0; private static final int COUNT = 10; @@ -87,19 +88,20 @@ public void testSearchAcrossLineage() throws Exception { lineageSearchResult.setEntities(new LineageSearchEntityArray(lineageSearchEntity)); when(_entityClient.searchAcrossLineage( - eq(UrnUtils.getUrn(SOURCE_URN_STRING)), - eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), - anyList(), - eq(QUERY), - eq(null), - any(), - eq(null), - eq(START), - eq(COUNT), - eq(START_TIMESTAMP_MILLIS), - eq(END_TIMESTAMP_MILLIS), - eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), - eq(_authentication))).thenReturn(lineageSearchResult); + eq(UrnUtils.getUrn(SOURCE_URN_STRING)), + eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), + anyList(), + eq(QUERY), + eq(null), + any(), + eq(null), + eq(START), + eq(COUNT), + eq(START_TIMESTAMP_MILLIS), + eq(END_TIMESTAMP_MILLIS), + eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), + eq(_authentication))) + .thenReturn(lineageSearchResult); final SearchAcrossLineageResults results = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(results.getCount(), 10); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index 6ba8b3cefe504..24724cb8e23ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -16,134 +18,107 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - - public class SearchResolverTest { - @Test - public void testDefaultSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(true) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - @Test - public void testOverrideSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchFlags inputSearchFlags = new SearchFlags(); - inputSearchFlags.setFulltext(false); - inputSearchFlags.setSkipAggregates(true); - inputSearchFlags.setSkipHighlighting(true); - inputSearchFlags.setMaxAggValues(10); - inputSearchFlags.setSkipCache(true); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 1, - 11, - null, - null, - inputSearchFlags - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 1, - 11, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setMaxAggValues(10) - .setSkipCache(true) - ); - } - - @Test - public void testNonWildCardSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "not a wildcard", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "not a wildcard", - null, // Verify that view filter was used. - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(false) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - private EntityClient initMockSearchEntityClient() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.search( + @Test + public void testDefaultSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = new SearchInput(EntityType.DATASET, "", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + null, + 0, + 10, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(true) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false)); + } + + @Test + public void testOverrideSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchFlags inputSearchFlags = new SearchFlags(); + inputSearchFlags.setFulltext(false); + inputSearchFlags.setSkipAggregates(true); + inputSearchFlags.setSkipHighlighting(true); + inputSearchFlags.setMaxAggValues(10); + inputSearchFlags.setSkipCache(true); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "", 1, 11, null, null, inputSearchFlags); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + null, + 1, + 11, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setMaxAggValues(10) + .setSkipCache(true)); + } + + @Test + public void testNonWildCardSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "not a wildcard", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "not a wildcard", + null, // Verify that view filter was used. + null, + 0, + 10, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(false) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false)); + } + + private EntityClient initMockSearchEntityClient() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + Mockito.when( + client.search( Mockito.anyString(), Mockito.anyString(), Mockito.any(), @@ -151,40 +126,38 @@ private EntityClient initMockSearchEntityClient() throws Exception { Mockito.anyInt(), Mockito.anyInt(), Mockito.any(Authentication.class), - Mockito.any() - )).thenReturn( - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - return client; - } - - private void verifyMockSearchEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filter, - SortCriterion sortCriterion, - int start, - int limit, - com.linkedin.metadata.query.SearchFlags searchFlags - ) throws Exception { - Mockito.verify(mockClient, Mockito.times(1)).search( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(sortCriterion), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class), - Mockito.eq(searchFlags) - ); - } - - private SearchResolverTest() { - } + Mockito.any())) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + return client; + } + + private void verifyMockSearchEntityClient( + EntityClient mockClient, + String entityName, + String query, + Filter filter, + SortCriterion sortCriterion, + int start, + int limit, + com.linkedin.metadata.query.SearchFlags searchFlags) + throws Exception { + Mockito.verify(mockClient, Mockito.times(1)) + .search( + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(sortCriterion), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.any(Authentication.class), + Mockito.eq(searchFlags)); + } + + private SearchResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java index b35f7a77b209c..8f23f0a624576 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java @@ -17,16 +17,18 @@ public class SearchUtilsTest { @Test public static void testApplyViewToFilterNullBaseFilter() { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(null, viewFilter); Assert.assertEquals(viewFilter, result); @@ -34,275 +36,272 @@ public static void testApplyViewToFilterNullBaseFilter() { @Test public static void testApplyViewToFilterComplexBaseFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterComplexViewFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray(ImmutableList.of("viewTest4")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterV1Filter() { - Filter baseFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ); - - Filter viewFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ); - - Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( + Filter baseFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("field1") .setValue("test1") @@ -310,7 +309,13 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("field2") .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), + .setValues(new StringArray(ImmutableList.of("test2")))))); + + Filter viewFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("viewField1") .setValue("viewTest1") @@ -318,10 +323,38 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("viewField2") .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ) - ))); + .setValues(new StringArray(ImmutableList.of("viewTest2")))))); + + Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); + + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray( + ImmutableList.of("viewTest2"))))))))); Assert.assertEquals(expectedResult, result); } @@ -329,24 +362,17 @@ public static void testApplyViewToFilterV1Filter() { @Test public static void testApplyViewToEntityTypes() { - List<String> baseEntityTypes = ImmutableList.of( - Constants.CHART_ENTITY_NAME, - Constants.DATASET_ENTITY_NAME - ); + List<String> baseEntityTypes = + ImmutableList.of(Constants.CHART_ENTITY_NAME, Constants.DATASET_ENTITY_NAME); - List<String> viewEntityTypes = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME - ); + List<String> viewEntityTypes = + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME); final List<String> result = SearchUtils.intersectEntityTypes(baseEntityTypes, viewEntityTypes); - final List<String> expectedResult = ImmutableList.of( - Constants.DATASET_ENTITY_NAME - ); + final List<String> expectedResult = ImmutableList.of(Constants.DATASET_ENTITY_NAME); Assert.assertEquals(expectedResult, result); } - private SearchUtilsTest() { } - + private SearchUtilsTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java index 905e913fba909..553a2c85a7ae2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,29 +18,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateCorpUserViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = new UpdateCorpUserViewsSettingsInput( - TEST_URN.toString() - ); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = new UpdateCorpUserViewsSettingsInput( - null - ); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = + new UpdateCorpUserViewsSettingsInput(TEST_URN.toString()); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = + new UpdateCorpUserViewsSettingsInput(null); @Test public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -46,25 +44,28 @@ public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessViewSettingsExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -73,26 +74,28 @@ public void testGetSuccessViewSettingsExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } - @Test public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -101,22 +104,26 @@ public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), + Mockito.any(Authentication.class)); } @Test public void testGetCorpUserSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getCorpUserSettings(Mockito.eq(TEST_USER_URN), Mockito.any(Authentication.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -126,19 +133,18 @@ public void testGetCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateCorpUserSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - null - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(CorpUserSettings.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(TEST_USER_URN, null); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.any(CorpUserSettings.class), + Mockito.any(Authentication.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -148,17 +154,13 @@ public void testUpdateCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - Urn user, - CorpUserSettings existingSettings - ) { + private static SettingsService initSettingsService(Urn user, CorpUserSettings existingSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getCorpUserSettings( - Mockito.eq(user), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getCorpUserSettings(Mockito.eq(user), Mockito.any(Authentication.class))) .thenReturn(existingSettings); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java index 4e2283735b8c9..8f96eae9480f8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,10 +17,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class GlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -25,9 +24,7 @@ public class GlobalViewsSettingsResolverTest { @Test public void testGetSuccessNullSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -42,9 +39,7 @@ public void testGetSuccessNullSettings() throws Exception { @Test public void testGetSuccessEmptySettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -53,16 +48,13 @@ public void testGetSuccessEmptySettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertNull( - result.getDefaultView() - ); + Assert.assertNull(result.getDefaultView()); } @Test public void testGetSuccessExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(TEST_URN) - ); + SettingsService mockService = + initSettingsService(new GlobalViewsSettings().setDefaultView(TEST_URN)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -71,17 +63,15 @@ public void testGetSuccessExistingSettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertEquals( - result.getDefaultView(), - TEST_URN.toString() - ); + Assert.assertEquals(result.getDefaultView(), TEST_URN.toString()); } @Test public void testGetException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); @@ -94,9 +84,7 @@ public void testGetException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -107,15 +95,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) + Mockito.when(mockService.getGlobalSettings(Mockito.any(Authentication.class))) .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java index 9ea3c223559cd..c0cc09052176d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,22 +17,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateGlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); - private static final UpdateGlobalViewsSettingsInput TEST_INPUT = new UpdateGlobalViewsSettingsInput( - TEST_URN.toString() - ); + private static final UpdateGlobalViewsSettingsInput TEST_INPUT = + new UpdateGlobalViewsSettingsInput(TEST_URN.toString()); @Test public void testGetSuccessNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -39,16 +35,17 @@ public void testGetSuccessNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessNoDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -58,18 +55,20 @@ public void testGetSuccessNoDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessExistingDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - )) - ); + SettingsService mockService = + initSettingsService( + new GlobalViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView"))); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -79,16 +78,20 @@ public void testGetSuccessExistingDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetGlobalViewsSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -100,15 +103,13 @@ public void testGetGlobalViewsSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateGlobalViewsSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateGlobalSettings( - Mockito.any(GlobalSettingsInfo.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateGlobalSettings( + Mockito.any(GlobalSettingsInfo.class), Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -122,11 +123,13 @@ public void testUpdateGlobalViewsSettingsException() throws Exception { @Test public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - null // Should never be null. - ); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + SettingsService mockService = + initSettingsService( + null // Should never be null. + ); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -140,9 +143,7 @@ public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -154,15 +155,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) - .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); + Mockito.when(mockService.getGlobalSettings(Mockito.any(Authentication.class))) + .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java index 8c4445452c564..db3e9afab7249 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -21,12 +26,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchGetStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final long TIME = 123L; @@ -35,7 +34,8 @@ public class BatchGetStepStatesResolverTest { private static final String SECOND_STEP_STATE_ID = "2"; private static final Urn FIRST_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:1"); private static final Urn SECOND_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:2"); - private static final Set<String> ASPECTS = ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); + private static final Set<String> ASPECTS = + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); private EntityClient _entityClient; private BatchGetStepStatesResolver _resolver; private DataFetchingEnvironment _dataFetchingEnvironment; @@ -68,15 +68,17 @@ public void testBatchGetStepStatesFirstStepCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set<Urn> urns = ImmutableSet.of(FIRST_STEP_STATE_URN); - final Map<String, RecordTemplate> firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map<Urn, EntityResponse> entityResponseMap = ImmutableMap.of(FIRST_STEP_STATE_URN, - TestUtils.buildEntityResponse(firstAspectMap)); + final Map<String, RecordTemplate> firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map<Urn, EntityResponse> entityResponseMap = + ImmutableMap.of(FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap)); - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + when(_entityClient.batchGetV2( + eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); } @@ -100,18 +102,21 @@ public void testBatchGetStepStatesBothStepsCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set<Urn> urns = ImmutableSet.of(FIRST_STEP_STATE_URN, SECOND_STEP_STATE_URN); - final Map<String, RecordTemplate> firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map<String, RecordTemplate> secondAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - secondStepStateProperties); - final Map<Urn, EntityResponse> entityResponseMap = ImmutableMap.of( - FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), - SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); - - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + final Map<String, RecordTemplate> firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map<String, RecordTemplate> secondAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, secondStepStateProperties); + final Map<Urn, EntityResponse> entityResponseMap = + ImmutableMap.of( + FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), + SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); + + when(_entityClient.batchGetV2( + eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(2, actualBatchResult.getResults().size()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java index 5f20a11f15ac6..b457498cc547a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -16,11 +20,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchUpdateStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final String FIRST_STEP_STATE_ID = "1"; @@ -52,7 +51,8 @@ public void testBatchUpdateStepStatesFirstStepCompleted() throws Exception { input.setStates(ImmutableList.of(firstInput)); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); - final BatchUpdateStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchUpdateStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); verify(_entityClient, times(1)).ingestProposal(any(), eq(_authentication), eq(false)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 268d6a6bc4268..340802cde467b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -20,14 +24,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class AddTagsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -35,11 +35,12 @@ public class AddTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); @@ -50,46 +51,51 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(originalTags); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalTags); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); @@ -100,41 +106,43 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -145,9 +153,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -159,10 +166,11 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -173,9 +181,8 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -191,9 +198,8 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -206,21 +212,21 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); AddTagsResolver resolver = new AddTagsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index 651b89359c83f..71354627b1145 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -18,21 +22,17 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchAddTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -40,19 +40,20 @@ public class BatchAddTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -64,55 +65,63 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -126,45 +135,49 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -175,33 +188,36 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -213,19 +229,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -236,42 +254,47 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index f302540eba904..8cd10afee293e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -26,15 +30,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchRemoveTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -42,15 +43,17 @@ public class BatchRemoveTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -64,22 +67,25 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); proposal2.setEntityType(Constants.DATASET_ENTITY_NAME); proposal2.setAspectName(Constants.GLOBAL_TAGS_ASPECT_NAME); @@ -93,25 +99,33 @@ public void testGetSuccessNoExistingTags() throws Exception { public void testGetSuccessExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - final GlobalTags oldTags1 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags1 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags1); - final GlobalTags oldTags2 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags2 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -125,22 +139,25 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -149,15 +166,17 @@ public void testGetSuccessExistingTags() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -169,19 +188,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -192,44 +213,49 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java index f801daf4f2a3f..dac7104ca2930 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java @@ -1,39 +1,36 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTagInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.tag.TagProperties; import com.linkedin.metadata.key.TagKey; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.tag.TagProperties; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class CreateTagResolverTest { - private static final CreateTagInput TEST_INPUT = new CreateTagInput( - "test-id", - "test-name", - "test-description" - ); + private static final CreateTagInput TEST_INPUT = + new CreateTagInput("test-id", "test-name", "test-description"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal(Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) .thenReturn(String.format("urn:li:tag:%s", TEST_INPUT.getId())); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); @@ -50,15 +47,13 @@ public void testGetSuccess() throws Exception { TagProperties props = new TagProperties(); props.setDescription("test-description"); props.setName("test-name"); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, props); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, props); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -75,9 +70,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -85,10 +79,9 @@ public void testGetEntityClientException() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); // Execute resolver @@ -99,4 +92,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java index b01ac1a9b14ae..11dfad43d5731 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTagResolverTest { private static final String TEST_URN = "urn:li:tag:test-id"; @@ -30,10 +29,9 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -49,8 +47,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index b5bbf0775a8ba..6ae72fcbb7268 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,11 +27,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetTagColorResolverTest { private static final String TEST_ENTITY_URN = "urn:li:tag:test-tag"; @@ -41,10 +40,11 @@ public void testGetSuccessExistingProperties() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTagProperties); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -59,19 +59,17 @@ public void testGetSuccessExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final TagProperties newTagProperties = new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - TAG_PROPERTIES_ASPECT_NAME, newTagProperties); + final TagProperties newTagProperties = + new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), TAG_PROPERTIES_ASPECT_NAME, newTagProperties); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -81,10 +79,11 @@ public void testGetFailureNoExistingProperties() throws Exception { EntityService mockService = getMockEntityService(); // Test setting the domain - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0))) + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -99,9 +98,8 @@ public void testGetFailureNoExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -111,21 +109,26 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - final EnvelopedAspect oldTagPropertiesAspect = new EnvelopedAspect() - .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) - .setValue(new Aspect(oldTagProperties.data())); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.TAG_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.TAG_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.TAG_PROPERTIES_ASPECT_NAME, - oldTagPropertiesAspect))))); + final EnvelopedAspect oldTagPropertiesAspect = + new EnvelopedAspect() + .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) + .setValue(new Aspect(oldTagProperties.data())); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.TAG_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.TAG_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -139,9 +142,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("colorHex"))).thenReturn(TEST_COLOR_HEX); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -159,18 +161,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetTagColorResolver resolver = new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + SetTagColorResolver resolver = + new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -181,4 +183,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index 213d21fd35dc1..cb827a42333b2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -19,13 +22,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddTermsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -33,11 +33,12 @@ public class AddTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); @@ -48,41 +49,42 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)))) - ); + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -94,37 +96,34 @@ public void testGetSuccessExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test public void testGetFailureTermDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -135,26 +134,28 @@ public void testGetFailureTermDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -165,16 +166,17 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -185,38 +187,41 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); AddTermsResolver resolver = new AddTermsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 8887bb452b478..7df19fad52689 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_GLOSSARY_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_GLOSSARY_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -35,123 +36,134 @@ public class BatchAddTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN)))) - ); + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of( - TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(false); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -163,31 +175,35 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -203,11 +219,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -220,21 +237,25 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index 995a4acb8a467..659ce40542a9c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -35,15 +36,17 @@ public class BatchRemoveTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -57,12 +60,12 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -74,25 +77,36 @@ public void testGetSuccessNoExistingTerms() throws Exception { public void testGetSuccessExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - final GlossaryTerms oldTerms1 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms1 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTerms1); - final GlossaryTerms oldTerms2 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms2 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTerms2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -106,12 +120,12 @@ public void testGetSuccessExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -123,15 +137,17 @@ public void testGetSuccessExistingTerms() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -143,12 +159,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -164,12 +180,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -182,24 +198,27 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java index 911152d8c97c1..adf4b1c29ad0d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestInput; @@ -19,19 +22,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestResolverTest { - private static final CreateTestInput TEST_INPUT = new CreateTestInput( - "test-id", - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final CreateTestInput TEST_INPUT = + new CreateTestInput( + "test-id", + "test-name", + "test-category", + "test-description", + new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -50,16 +49,21 @@ public void testGetSuccess() throws Exception { final TestKey key = new TestKey(); key.setId("test-id"); - ArgumentCaptor<MetadataChangeProposal> proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor<MetadataChangeProposal> proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal( + proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityKeyAspect(), GenericRecordUtils.serializeAspect(key)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -80,19 +84,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateTestResolver resolver = new CreateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java index 6a449e3c4c4c4..1c4973871af09 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; @@ -30,10 +29,9 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -49,8 +47,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java index 5026e015039e1..6075425d09c05 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,37 +21,34 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListTestsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("test", "test-id"); - private static final ListTestsInput TEST_INPUT = new ListTestsInput( - 0, 20, null - ); + private static final ListTestsInput TEST_INPUT = new ListTestsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.TEST_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(Collections.emptyMap()), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.TEST_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(Collections.emptyMap()), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListTestsResolver resolver = new ListTestsResolver(mockClient); @@ -75,33 +75,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); ListTestsResolver resolver = new ListTestsResolver(mockClient); // Execute resolver @@ -112,4 +114,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java index ae24232bce17c..45e0126367578 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.datahub.graphql.generated.TestDefinitionInput; +import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; @@ -19,19 +22,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; - private static final UpdateTestInput TEST_INPUT = new UpdateTestInput( - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final UpdateTestInput TEST_INPUT = + new UpdateTestInput( + "test-name", "test-category", "test-description", new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -48,16 +44,21 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - ArgumentCaptor<MetadataChangeProposal> proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor<MetadataChangeProposal> proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal( + proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityUrn(), UrnUtils.getUrn(TEST_URN)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -79,18 +80,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Update resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateTestResolver resolver = new UpdateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java index 2164d4160634c..742e162963ea3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateNativeUserResetTokenResolverTest { private static final String RESET_TOKEN = "resetToken"; @@ -47,7 +46,8 @@ public void testFailsNullUserUrn() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(null); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); } @@ -59,7 +59,8 @@ public void testPasses() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(USER_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertEquals(RESET_TOKEN, _resolver.get(_dataFetchingEnvironment).join().getResetToken()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java index 0957acf0cbbb3..15864dc3ac925 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -15,6 +18,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -23,34 +28,35 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateViewResolverTest { - private static final CreateViewInput TEST_INPUT = new CreateViewInput( - DataHubViewType.PERSONAL, - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final CreateViewInput TEST_INPUT = + new CreateViewInput( + DataHubViewType.PERSONAL, + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -71,37 +77,59 @@ public void testGetSuccess() throws Exception { assertEquals(view.getDescription(), TEST_INPUT.getDescription()); assertEquals(view.getViewType(), TEST_INPUT.getViewType()); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - assertEquals(view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); - assertEquals(view.getDefinition().getFilter().getOperator(), TEST_INPUT.getDefinition().getFilter().getOperator()); - assertEquals(view.getDefinition().getFilter().getFilters().size(), TEST_INPUT.getDefinition().getFilter().getFilters().size()); - - Mockito.verify(mockService, Mockito.times(1)).createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + assertEquals( + view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); + assertEquals( + view.getDefinition().getFilter().getOperator(), + TEST_INPUT.getDefinition().getFilter().getOperator()); + assertEquals( + view.getDefinition().getFilter().getFilters().size(), + TEST_INPUT.getDefinition().getFilter().getFilters().size()); + + Mockito.verify(mockService, Mockito.times(1)) + .createView( + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -118,22 +146,23 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createView( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createView( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); CreateViewResolver resolver = new CreateViewResolver(mockService); @@ -148,14 +177,15 @@ public void testGetViewServiceException() throws Exception { private ViewService initMockService() { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_VIEW_URN); + Mockito.when( + service.createView( + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_VIEW_URN); return service; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java index afb4c16767f47..357f2119187d6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -17,10 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -40,10 +39,8 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -60,10 +57,8 @@ public void testGetSuccessGlobalViewCanManager() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -79,13 +74,10 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } - @Test public void testGetSuccessPersonalViewIsCreator() throws Exception { ViewService mockService = initViewService(DataHubViewType.PERSONAL); @@ -99,10 +91,8 @@ public void testGetSuccessPersonalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -118,19 +108,17 @@ public void testGetFailurePersonalViewIsNotCreator() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteView( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteView(Mockito.any(), Mockito.any(Authentication.class)); DeleteViewResolver resolver = new DeleteViewResolver(mockService); @@ -146,19 +134,21 @@ public void testGetViewServiceException() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when(mockService.getViewInfo(Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java index 9a25c9eb1d25c..8c30c17201bc6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -28,53 +31,53 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListGlobalViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput( - 0, 20, "" - ); + private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput(0, 20, ""); @Test public void testGetSuccessInput() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListGlobalViewsResolver resolver = new ListGlobalViewsResolver(mockClient); @@ -107,7 +110,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -121,7 +125,9 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -139,4 +145,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java index 4c43584144825..85e20cd656fcd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -27,63 +30,65 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListMyViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListMyViewsInput TEST_INPUT_1 = new ListMyViewsInput( - 0, 20, "", DataHubViewType.GLOBAL - ); + private static final ListMyViewsInput TEST_INPUT_1 = + new ListMyViewsInput(0, 20, "", DataHubViewType.GLOBAL); - private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput( - 0, 20, "", null - ); + private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput(0, 20, "", null); @Test public void testGetSuccessInput1() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false), + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false), new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -106,35 +111,41 @@ public void testGetSuccessInput2() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -165,7 +176,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -179,7 +191,9 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -197,4 +211,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java index b4895982ae780..1917e55705828 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -38,30 +41,33 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateViewInput TEST_INPUT = new UpdateViewInput( - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final UpdateViewInput TEST_INPUT = + new UpdateViewInput( + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); @Test public void testGetSuccessGlobalViewIsCreator() throws Exception { @@ -81,33 +87,50 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -128,46 +151,65 @@ public void testGetSuccessGlobalViewManageGlobalViews() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test public void testGetViewServiceException() throws Exception { // Update resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateView( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateView( + Mockito.any(Urn.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateViewResolver resolver = new UpdateViewResolver(mockService); @@ -196,43 +238,46 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when(mockService.getViewInfo(Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); - Mockito.when(mockService.getViewEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getViewEntityResponse( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testEntityResponse); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java index 9578ff201ca19..3ad3f0786e987 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -25,12 +28,8 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.Assert; -import org.testng.annotations.Test; import org.mockito.Mockito; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - +import org.testng.annotations.Test; public class ViewUtilsTest { @@ -39,10 +38,10 @@ public class ViewUtilsTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); - @Test public static void testCanCreatePersonalViewAllowed() { - boolean res = ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); + boolean res = + ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); Assert.assertTrue(res); } @@ -67,10 +66,8 @@ public void testCanUpdateViewSuccessGlobalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -80,10 +77,8 @@ public void testCanUpdateViewSuccessGlobalViewCanManageGlobalViews() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -93,10 +88,8 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -106,10 +99,8 @@ public void testGetSuccessPersonalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -119,50 +110,69 @@ public void testGetFailurePersonalViewIsNotCreator() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test public void testMapDefinition() throws Exception { - DataHubViewDefinitionInput input = new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.IN), - new FacetFilterInput("test2", null, ImmutableList.of("value3", "value4"), true, FilterOperator.CONTAIN) - ) - ) - ); - - DataHubViewDefinition expectedResult = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion() - .setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setValue("value1") // Disgraceful - .setField("test1.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.IN), - new Criterion() - .setNegated(true) - .setValues(new StringArray(ImmutableList.of("value3", "value4"))) - .setValue("value3") // Disgraceful - .setField("test2.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.CONTAIN) - )) - ) - ) - )) - ); + DataHubViewDefinitionInput input = + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.IN), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value3", "value4"), + true, + FilterOperator.CONTAIN)))); + + DataHubViewDefinition expectedResult = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setNegated(false) + .setValues( + new StringArray( + ImmutableList.of("value1", "value2"))) + .setValue("value1") // Disgraceful + .setField( + "test1.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.IN), + new Criterion() + .setNegated(true) + .setValues( + new StringArray( + ImmutableList.of("value3", "value4"))) + .setValue("value3") // Disgraceful + .setField( + "test2.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.CONTAIN)))))))); assertEquals(ViewUtils.mapDefinition(input), expectedResult); } @@ -170,17 +180,20 @@ public void testMapDefinition() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when( + mockService.getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java index c4465c7d3cb65..c975c7ebb0507 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,27 +30,25 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class AssertionTypeTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:guid-1"; - private static final AssertionKey TEST_ASSERTION_KEY = new AssertionKey() - .setAssertionId("guid-1"); - private static final AssertionInfo TEST_ASSERTION_INFO = new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(null, SetMode.IGNORE_NULL) - .setCustomProperties(new StringMap()); - private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("snowflake")) - .setInstance(null, SetMode.IGNORE_NULL); + private static final AssertionKey TEST_ASSERTION_KEY = + new AssertionKey().setAssertionId("guid-1"); + private static final AssertionInfo TEST_ASSERTION_INFO = + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion(null, SetMode.IGNORE_NULL) + .setCustomProperties(new StringMap()); + private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance() + .setPlatform(new DataPlatformUrn("snowflake")) + .setInstance(null, SetMode.IGNORE_NULL); private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:guid-2"; - @Test public void testBatchLoad() throws Exception { @@ -60,41 +60,43 @@ public void testBatchLoad() throws Exception { Map<String, EnvelopedAspect> assertion1Aspects = new HashMap<>(); assertion1Aspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data()))); assertion1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data()))); assertion1Aspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - assertionUrn1, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn1) - .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); - - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + assertionUrn1, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn1) + .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); + + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<Assertion>> result = type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); + List<DataFetcherResult<Assertion>> result = + type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -112,17 +114,21 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java index 3ff4e43ca112c..1e2acd0db455c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java @@ -1,6 +1,7 @@ - package com.linkedin.datahub.graphql.types.container; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,12 +27,12 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.container.ContainerProperties; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; @@ -46,46 +47,55 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class ContainerTypeTest { private static final String TEST_CONTAINER_1_URN = "urn:li:container:guid-1"; - private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey() - .setGuid("guid-1"); - private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = new ContainerProperties() - .setDescription("test description") - .setName("Test Container"); - private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = new EditableContainerProperties() - .setDescription("test editable description"); - private static final Ownership TEST_CONTAINER_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); - private static final Status TEST_CONTAINER_1_STATUS = new Status() - .setRemoved(false); - private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = new SubTypes() - .setTypeNames(new StringArray(ImmutableList.of("Database"))); - private static final GlobalTags TEST_CONTAINER_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); - private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = new com.linkedin.container.Container() - .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); + private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey().setGuid("guid-1"); + private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = + new ContainerProperties().setDescription("test description").setName("Test Container"); + private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = + new EditableContainerProperties().setDescription("test editable description"); + private static final Ownership TEST_CONTAINER_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); + private static final Status TEST_CONTAINER_1_STATUS = new Status().setRemoved(false); + private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("Database"))); + private static final GlobalTags TEST_CONTAINER_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = + new com.linkedin.container.Container() + .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); private static final String TEST_CONTAINER_2_URN = "urn:li:container:guid-2"; @@ -100,73 +110,65 @@ public void testBatchLoad() throws Exception { Map<String, EnvelopedAspect> container1Aspects = new HashMap<>(); container1Aspects.put( Constants.CONTAINER_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data()))); container1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data()))); container1Aspects.put( Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data()))); container1Aspects.put( Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data()))); container1Aspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data()))); container1Aspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data()))); container1Aspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data()))); container1Aspects.put( Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data()))); container1Aspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data()))); container1Aspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data()))); container1Aspects.put( Constants.CONTAINER_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - containerUrn1, - new EntityResponse() - .setEntityName(Constants.CONTAINER_ENTITY_NAME) - .setUrn(containerUrn1) - .setAspects(new EnvelopedAspectMap(container1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + containerUrn1, + new EntityResponse() + .setEntityName(Constants.CONTAINER_ENTITY_NAME) + .setUrn(containerUrn1) + .setAspects(new EnvelopedAspectMap(container1Aspects)))); ContainerType type = new ContainerType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<Container>> result = type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); + List<DataFetcherResult<Container>> result = + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -177,8 +179,12 @@ public void testBatchLoad() throws Exception { assertEquals(container1.getProperties().getDescription(), "test description"); assertEquals(container1.getProperties().getName(), "Test Container"); assertEquals(container1.getInstitutionalMemory().getElements().size(), 1); - assertEquals(container1.getSubTypes().getTypeNames().get(0), TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); - assertEquals(container1.getEditableProperties().getDescription(), TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); + assertEquals( + container1.getSubTypes().getTypeNames().get(0), + TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); + assertEquals( + container1.getEditableProperties().getDescription(), + TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); assertEquals( container1.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_CONTAINER_1_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); @@ -186,8 +192,7 @@ public void testBatchLoad() throws Exception { container1.getTags().getTags().get(0).getTag().getUrn(), TEST_CONTAINER_1_TAGS.getTags().get(0).getTag().toString()); assertEquals( - container1.getContainer().getUrn(), - TEST_CONTAINER_1_CONTAINER.getContainer().toString()); + container1.getContainer().getUrn(), TEST_CONTAINER_1_CONTAINER.getContainer().toString()); // Assert second element is null. assertNull(result.get(1)); @@ -196,17 +201,21 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java index 9b6e11fd0b3a4..667d943b1095d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java @@ -1,22 +1,24 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; -import com.linkedin.common.Ownership; -import com.linkedin.common.OwnerArray; -import com.linkedin.common.Owner; -import com.linkedin.common.OwnershipType; +import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; -import com.linkedin.common.AuditStamp; -import com.linkedin.common.GlobalTags; +import com.linkedin.common.Owner; +import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; +import com.linkedin.common.OwnershipType; +import com.linkedin.common.Status; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; -import com.linkedin.common.Status; import com.linkedin.common.url.Url; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; @@ -33,185 +35,181 @@ import com.linkedin.metadata.key.DataPlatformInstanceKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class DataPlatformInstanceTest { - private static final Urn TEST_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); - - private static final String TEST_DATAPLATFORMINSTANCE_1_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; - - private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY - = new DataPlatformInstanceKey() - .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) - .setInstance("I1"); - - private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES - = new DataPlatformInstanceProperties() - .setDescription("test description") - .setName("Test Data Platform Instance"); - - private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = new Deprecation() - .setDeprecated(true) - .setActor(TEST_ACTOR_URN) - .setNote("legacy"); - - private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(TEST_ACTOR_URN)))); - - private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); - - private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - - private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status() - .setRemoved(false); - - private static final String TEST_DATAPLATFORMINSTANCE_2_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; - - @Test - public void testBatchLoad() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - - Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); - Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); - - Map<String, EnvelopedAspect> dataPlatformInstance1Aspects = new HashMap<>(); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), - Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - dataPlatformInstance1Urn, - new EntityResponse() - .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) - .setUrn(dataPlatformInstance1Urn) - .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); - - DataPlatformInstanceType type = new DataPlatformInstanceType(client); - - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); - List<DataFetcherResult<DataPlatformInstance>> result = type.batchLoad( - ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), mockContext); - - // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( + private static final Urn TEST_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); + + private static final String TEST_DATAPLATFORMINSTANCE_1_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; + + private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY = + new DataPlatformInstanceKey() + .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) + .setInstance("I1"); + + private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES = + new DataPlatformInstanceProperties() + .setDescription("test description") + .setName("Test Data Platform Instance"); + + private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = + new Deprecation().setDeprecated(true).setActor(TEST_ACTOR_URN).setNote("legacy"); + + private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setType(OwnershipType.DATAOWNER).setOwner(TEST_ACTOR_URN)))); + + private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); + + private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + + private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status().setRemoved(false); + + private static final String TEST_DATAPLATFORMINSTANCE_2_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; + + @Test + public void testBatchLoad() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + + Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); + Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); + + Map<String, EnvelopedAspect> dataPlatformInstance1Aspects = new HashMap<>(); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data()))); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data()))); + dataPlatformInstance1Aspects.put( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data()))); + dataPlatformInstance1Aspects.put( + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data()))); + dataPlatformInstance1Aspects.put( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data()))); + dataPlatformInstance1Aspects.put( + Constants.GLOBAL_TAGS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data()))); + dataPlatformInstance1Aspects.put( + Constants.STATUS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data()))); + Mockito.when( + client.batchGetV2( Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq( + new HashSet<>( + ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); - - assertEquals(result.size(), 2); - - DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); - assertEquals( - dataPlatformInstance1.getUrn(), - TEST_DATAPLATFORMINSTANCE_1_URN - ); - assertEquals( - dataPlatformInstance1.getType(), - EntityType.DATA_PLATFORM_INSTANCE - ); - assertEquals( - dataPlatformInstance1.getProperties().getDescription(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription() - ); - assertEquals( - dataPlatformInstance1.getProperties().getName(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getDeprecated(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getNote(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getActor(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString() - ); - assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); - assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); - assertEquals( - dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), - TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString() - ); - assertEquals( - dataPlatformInstance1.getStatus().getRemoved(), - TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue() - ); - - // Assert second element is null. - assertNull(result.get(1)); - } - - @Test - public void testBatchLoadClientException() throws Exception { - EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type - = new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType(mockClient); - - // Execute Batch load - QueryContext context = Mockito.mock(QueryContext.class); - Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of( - TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), context)); - } + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + dataPlatformInstance1Urn, + new EntityResponse() + .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) + .setUrn(dataPlatformInstance1Urn) + .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); + + DataPlatformInstanceType type = new DataPlatformInstanceType(client); + + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); + List<DataFetcherResult<DataPlatformInstance>> result = + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); + + assertEquals(result.size(), 2); + + DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); + assertEquals(dataPlatformInstance1.getUrn(), TEST_DATAPLATFORMINSTANCE_1_URN); + assertEquals(dataPlatformInstance1.getType(), EntityType.DATA_PLATFORM_INSTANCE); + assertEquals( + dataPlatformInstance1.getProperties().getDescription(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription()); + assertEquals( + dataPlatformInstance1.getProperties().getName(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName()); + assertEquals( + dataPlatformInstance1.getDeprecation().getDeprecated(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue()); + assertEquals( + dataPlatformInstance1.getDeprecation().getNote(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote()); + assertEquals( + dataPlatformInstance1.getDeprecation().getActor(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString()); + assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); + assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); + assertEquals( + dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), + TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString()); + assertEquals( + dataPlatformInstance1.getStatus().getRemoved(), + TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue()); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type = + new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType( + mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + context)); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index 3d22f1c429fd6..1959ae6d43208 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -8,146 +8,165 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.Map; +import org.testng.Assert; +import org.testng.annotations.Test; public class DatasetMapperTest { - private static final Urn TEST_DATASET_URN = Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); - private static final Urn TEST_CREATED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); - private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); - - @Test - public void testDatasetPropertiesMapperWithCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - input.setQualifiedName("Test QualifiedName"); - - final TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - final TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setQualifiedName("Test QualifiedName"); - expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); - expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - Assert.assertEquals(actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(null); - expectedDatasetProperties.setCreated(null); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutTimestampActors() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } + private static final Urn TEST_DATASET_URN = + Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); + private static final Urn TEST_CREATED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); + private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); + + @Test + public void testDatasetPropertiesMapperWithCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + input.setQualifiedName("Test QualifiedName"); + + final TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + final TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setQualifiedName("Test QualifiedName"); + expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); + expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); + expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + Assert.assertEquals( + actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(null); + expectedDatasetProperties.setCreated(null); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutTimestampActors() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java index 78cdaa0a276da..612136d1f9164 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java @@ -19,90 +19,128 @@ public void testMapperFullProfile() { input.setRowCount(10L); input.setColumnCount(45L); input.setSizeInBytes(15L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setMax("1") - .setMean("2") - .setStdev("3") - .setMedian("4") - .setMin("5") - .setNullCount(20L) - .setNullProportion(20.5f) - .setUniqueCount(30L) - .setUniqueProportion(30.5f) - .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setNullCount(30L) - .setNullProportion(30.5f) - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - .setSampleValues(new StringArray(ImmutableList.of("val3", "val4"))) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setMax("1") + .setMean("2") + .setStdev("3") + .setMedian("4") + .setMin("5") + .setNullCount(20L) + .setNullProportion(20.5f) + .setUniqueCount(30L) + .setUniqueProportion(30.5f) + .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setNullCount(30L) + .setNullProportion(30.5f) + .setUniqueCount(40L) + .setUniqueProportion(40.5f) + .setSampleValues(new StringArray(ImmutableList.of("val3", "val4")))))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); expected.setSizeInBytes(15L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - 20L, - 20.5f, - "5", - "1", - "2", - "4", - "3", - new ArrayList<>(ImmutableList.of("val1", "val2"))), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - 30L, - 30.5f, - "6", - "2", - "3", - "5", - "4", - new ArrayList<>(ImmutableList.of("val3", "val4"))) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", + 30L, + 30.5f, + 20L, + 20.5f, + "5", + "1", + "2", + "4", + "3", + new ArrayList<>(ImmutableList.of("val1", "val2"))), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", + 40L, + 40.5f, + 30L, + 30.5f, + "6", + "2", + "3", + "5", + "4", + new ArrayList<>(ImmutableList.of("val3", "val4")))))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } @Test @@ -111,77 +149,95 @@ public void testMapperPartialProfile() { input.setTimestampMillis(1L); input.setRowCount(10L); input.setColumnCount(45L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setUniqueCount(30L) - .setUniqueProportion(30.5f), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setUniqueCount(30L) + .setUniqueProportion(30.5f), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setUniqueCount(40L) + .setUniqueProportion(40.5f)))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - null, - null, - null, - null, - null, - null, - null, - null), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - null, - null, - "6", - "2", - "3", - "5", - "4", - null) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", 30L, 30.5f, null, null, null, null, null, null, null, null), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", 40L, 40.5f, null, null, "6", "2", "3", "5", "4", null)))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java index 48c23f436f875..32735ad7874a0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -30,33 +33,34 @@ import java.util.HashSet; import java.util.List; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class DomainTypeTest { private static final String TEST_DOMAIN_1_URN = "urn:li:domain:id-1"; - private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey() - .setId("id-1"); - private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = new DomainProperties() - .setDescription("test description") - .setName("Test Domain"); - private static final Ownership TEST_DOMAIN_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey().setId("id-1"); + private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = + new DomainProperties().setDescription("test description").setName("Test Domain"); + private static final Ownership TEST_DOMAIN_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); private static final String TEST_DOMAIN_2_URN = "urn:li:domain:id-2"; @@ -68,39 +72,48 @@ public void testBatchLoad() throws Exception { Urn domainUrn1 = Urn.createFromString(TEST_DOMAIN_1_URN); Urn domainUrn2 = Urn.createFromString(TEST_DOMAIN_2_URN); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - domainUrn1, - new EntityResponse() - .setEntityName(Constants.DOMAIN_ENTITY_NAME) - .setUrn(domainUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data())) - ))))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), + Mockito.eq(DomainType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + domainUrn1, + new EntityResponse() + .setEntityName(Constants.DOMAIN_ENTITY_NAME) + .setUrn(domainUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data()))))))); DomainType type = new DomainType(client); QueryContext mockContext = getMockAllowContext(); - List<DataFetcherResult<Domain>> result = type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); + List<DataFetcherResult<Domain>> result = + type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), + Mockito.eq(DomainType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -120,17 +133,20 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); DomainType type = new DomainType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java index 918616a2705b7..f88c8285e20df 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -31,16 +33,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; -import com.linkedin.notebook.NotebookCell; -import com.linkedin.notebook.NotebookCellArray; -import com.linkedin.notebook.NotebookCellType; -import com.linkedin.notebook.NotebookContent; -import com.linkedin.notebook.NotebookInfo; -import com.linkedin.notebook.EditableNotebookProperties; -import com.linkedin.notebook.TextCell; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.types.container.ContainerType; import com.linkedin.domain.Domains; import com.linkedin.entity.Aspect; @@ -50,6 +45,13 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.NotebookKey; +import com.linkedin.notebook.EditableNotebookProperties; +import com.linkedin.notebook.NotebookCell; +import com.linkedin.notebook.NotebookCellArray; +import com.linkedin.notebook.NotebookCellType; +import com.linkedin.notebook.NotebookContent; +import com.linkedin.notebook.NotebookInfo; +import com.linkedin.notebook.TextCell; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; @@ -60,58 +62,75 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class NotebookTypeTest { private static final String TEST_NOTEBOOK = "urn:li:notebook:(querybook,123)"; - private static final NotebookKey NOTEBOOK_KEY = new NotebookKey() - .setNotebookId("123") - .setNotebookTool("querybook"); - private static final NotebookContent NOTEBOOK_CONTENT = new NotebookContent() - .setCells(new NotebookCellArray(ImmutableList.of(new NotebookCell() - .setType(NotebookCellType.TEXT_CELL) - .setTextCell(new TextCell() - .setCellId("1234") - .setCellTitle("test cell") - .setText("test text") - .setChangeAuditStamps(new ChangeAuditStamps()))))); - private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = new EditableNotebookProperties() - .setDescription("test editable description"); - private static final Ownership OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - - private static final SubTypes SUB_TYPES = new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); - - private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("test_platform")); - - private static final NotebookInfo NOTEBOOK_INFO = new NotebookInfo() - .setTitle("title") - .setExternalUrl(new Url("https://querybook.com/notebook/123")) - .setChangeAuditStamps(new ChangeAuditStamps()) - .setDescription("test doc"); - - private static final Status STATUS = new Status() - .setRemoved(false); - - private static final Domains DOMAINS = new Domains() - .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); - private static final GlobalTags GLOBAL_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final NotebookKey NOTEBOOK_KEY = + new NotebookKey().setNotebookId("123").setNotebookTool("querybook"); + private static final NotebookContent NOTEBOOK_CONTENT = + new NotebookContent() + .setCells( + new NotebookCellArray( + ImmutableList.of( + new NotebookCell() + .setType(NotebookCellType.TEXT_CELL) + .setTextCell( + new TextCell() + .setCellId("1234") + .setCellTitle("test cell") + .setText("test text") + .setChangeAuditStamps(new ChangeAuditStamps()))))); + private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = + new EditableNotebookProperties().setDescription("test editable description"); + private static final Ownership OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + + private static final SubTypes SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); + + private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(new DataPlatformUrn("test_platform")); + + private static final NotebookInfo NOTEBOOK_INFO = + new NotebookInfo() + .setTitle("title") + .setExternalUrl(new Url("https://querybook.com/notebook/123")) + .setChangeAuditStamps(new ChangeAuditStamps()) + .setDescription("test doc"); + + private static final Status STATUS = new Status().setRemoved(false); + + private static final Domains DOMAINS = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); + private static final GlobalTags GLOBAL_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); @Test public void testBatchLoad() throws Exception { @@ -121,79 +140,69 @@ public void testBatchLoad() throws Exception { Map<String, EnvelopedAspect> notebookAspects = new HashMap<>(); notebookAspects.put( Constants.NOTEBOOK_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data()))); notebookAspects.put( Constants.NOTEBOOK_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data()))); notebookAspects.put( Constants.NOTEBOOK_CONTENT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data()))); notebookAspects.put( Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data()))); notebookAspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data()))); notebookAspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data()))); notebookAspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(STATUS.data())) - ); + Constants.STATUS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(STATUS.data()))); notebookAspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data()))); notebookAspects.put( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(DOMAINS.data())) - ); + Constants.DOMAINS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DOMAINS.data()))); notebookAspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data()))); notebookAspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data())) - ); - notebookAspects.put(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data()))); + notebookAspects.put( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DATA_PLATFORM_INSTANCE.data()))); Urn notebookUrn = new NotebookUrn("querybook", "123"); Urn dummyNotebookUrn = new NotebookUrn("querybook", "dummy"); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - notebookUrn, - new EntityResponse() - .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) - .setUrn(notebookUrn) - .setAspects(new EnvelopedAspectMap(notebookAspects)))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + notebookUrn, + new EntityResponse() + .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) + .setUrn(notebookUrn) + .setAspects(new EnvelopedAspectMap(notebookAspects)))); NotebookType type = new NotebookType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<Notebook>> - result = type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); + List<DataFetcherResult<Notebook>> result = + type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -201,13 +210,17 @@ public void testBatchLoad() throws Exception { Notebook notebook = result.get(0).getData(); assertEquals(notebook.getContent().getCells().size(), NOTEBOOK_CONTENT.getCells().size()); - assertEquals(notebook.getContent().getCells().get(0).getType().toString(), + assertEquals( + notebook.getContent().getCells().get(0).getType().toString(), NOTEBOOK_CONTENT.getCells().get(0).getType().toString()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellId(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellId(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellId()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellTitle()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getText(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getText(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getText()); assertEquals(notebook.getInfo().getDescription(), NOTEBOOK_INFO.getDescription()); assertEquals(notebook.getInfo().getExternalUrl(), NOTEBOOK_INFO.getExternalUrl().toString()); @@ -217,11 +230,17 @@ public void testBatchLoad() throws Exception { assertEquals(notebook.getType(), EntityType.NOTEBOOK); assertEquals(notebook.getOwnership().getOwners().size(), 1); assertEquals(notebook.getInstitutionalMemory().getElements().size(), 1); - assertEquals(notebook.getEditableProperties().getDescription(), TEST_EDITABLE_DESCRIPTION.getDescription()); - assertEquals(notebook.getTags().getTags().get(0).getTag().getUrn(), + assertEquals( + notebook.getEditableProperties().getDescription(), + TEST_EDITABLE_DESCRIPTION.getDescription()); + assertEquals( + notebook.getTags().getTags().get(0).getTag().getUrn(), GLOBAL_TAGS.getTags().get(0).getTag().toString()); - assertEquals(notebook.getSubTypes().getTypeNames(), SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); - assertEquals(notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), + assertEquals( + notebook.getSubTypes().getTypeNames(), + SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); + assertEquals( + notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); assertEquals(notebook.getPlatform().getUrn(), DATA_PLATFORM_INSTANCE.getPlatform().toString()); @@ -232,17 +251,19 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), - context)); + assertThrows( + RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java index a3c089b91de87..c8f694320d88a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java @@ -1,28 +1,30 @@ package com.linkedin.datahub.graphql.types.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; -import com.linkedin.common.urn.UrnUtils; -import com.linkedin.datahub.graphql.generated.QueryEntity; -import com.linkedin.query.QueryLanguage; -import com.linkedin.query.QueryProperties; -import com.linkedin.query.QuerySource; -import com.linkedin.query.QueryStatement; -import com.linkedin.query.QuerySubject; -import com.linkedin.query.QuerySubjectArray; -import com.linkedin.query.QuerySubjects; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.query.QueryLanguage; +import com.linkedin.query.QueryProperties; +import com.linkedin.query.QuerySource; +import com.linkedin.query.QueryStatement; +import com.linkedin.query.QuerySubject; +import com.linkedin.query.QuerySubjectArray; +import com.linkedin.query.QuerySubjects; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; @@ -30,53 +32,50 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class QueryTypeTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:test"); private static final Urn TEST_QUERY_2_URN = UrnUtils.getUrn("urn:li:query:test-2"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); - private static final Urn TEST_DATASET_2_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); + private static final Urn TEST_DATASET_2_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final QueryProperties TEST_QUERY_PROPERTIES_1 = new QueryProperties() - .setName("Query Name") - .setDescription("Query Description") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ) - )); - private static final QueryProperties TEST_QUERY_PROPERTIES_2 = new QueryProperties() - .setName("Query Name 2") - .setDescription("Query Description 2") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable2") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_2_URN) - ) - )); + private static final QueryProperties TEST_QUERY_PROPERTIES_1 = + new QueryProperties() + .setName("Query Name") + .setDescription("Query Description") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + private static final QueryProperties TEST_QUERY_PROPERTIES_2 = + new QueryProperties() + .setName("Query Name 2") + .setDescription("Query Description 2") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable2")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_2_URN)))); @Test public void testBatchLoad() throws Exception { @@ -87,38 +86,54 @@ public void testBatchLoad() throws Exception { Urn queryUrn2 = TEST_QUERY_2_URN; Map<String, EnvelopedAspect> query1Aspects = new HashMap<>(); - query1Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); - query1Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); Map<String, EnvelopedAspect> query2Aspects = new HashMap<>(); - query2Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_2.data()))); - query2Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_2.data()))); - Mockito.when(client.batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(queryUrn1, new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)), queryUrn2, - new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn2) - .setAspects(new EnvelopedAspectMap(query2Aspects)))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)), + queryUrn2, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn2) + .setAspects(new EnvelopedAspectMap(query2Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); List<DataFetcherResult<QueryEntity>> result = - type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response Mockito.verify(client, Mockito.times(1)) - .batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), Mockito.eq(QueryType.ASPECTS_TO_FETCH), + .batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH), Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -141,40 +156,39 @@ public void testBatchLoadNullEntity() throws Exception { Map<String, EnvelopedAspect> query1Aspects = new HashMap<>(); query1Aspects.put( Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); query1Aspects.put( Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - queryUrn1, - new EntityResponse() - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<QueryEntity>> result = type.batchLoad(ImmutableList.of( - TEST_QUERY_URN.toString(), - TEST_QUERY_2_URN.toString()), - mockContext); + List<DataFetcherResult<QueryEntity>> result = + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), - Mockito.eq(QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -188,18 +202,23 @@ public void testBatchLoadNullEntity() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); QueryType type = new QueryType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), context)); } private void verifyQuery1(QueryEntity query) { @@ -207,14 +226,30 @@ private void verifyQuery1(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_1.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_1.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_1.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_1.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_1.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_1.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_1.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_1.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); } private void verifyQuery2(QueryEntity query) { @@ -222,13 +257,29 @@ private void verifyQuery2(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_2.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_2.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_2.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_2.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_2.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_2.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_2.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_2.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java index 7f3c8f99f6593..f02fd38e2ca7c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java @@ -1,114 +1,149 @@ package com.linkedin.datahub.graphql.types.view; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubView; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.DataHubView; -import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.entity.Aspect; -import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; -import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class DataHubViewTypeTest { private static final String TEST_VIEW_URN = "urn:li:dataHubView:test"; private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); + /** * A Valid View is one which is minted by the createView or updateView GraphQL resolvers. * - * View Definitions currently support a limited Filter structure, which includes a single Logical filter set. - * Either a set of OR criteria with 1 value in each nested "and", or a single OR criteria with a set of nested ANDs. + * <p>View Definitions currently support a limited Filter structure, which includes a single + * Logical filter set. Either a set of OR criteria with 1 value in each nested "and", or a single + * OR criteria with a set of nested ANDs. * - * This enables us to easily support merging more complex View predicates in the future without a data migration, - * should the need arise. + * <p>This enables us to easily support merging more complex View predicates in the future without + * a data migration, should the need arise. */ - private static final DataHubViewInfo TEST_VALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_VALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); /** - * An Invalid View is on which has been ingested manually, which should not occur under normal operation of DataHub. + * An Invalid View is on which has been ingested manually, which should not occur under normal + * operation of DataHub. * - * This would be a complex view with multiple OR and nested AND predicates. + * <p>This would be a complex view with multiple OR and nested AND predicates. */ - private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))), - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); private static final String TEST_VIEW_URN_2 = "urn:li:dataHubView:test2"; @@ -123,33 +158,37 @@ public void testBatchLoadValidView() throws Exception { Map<String, EnvelopedAspect> view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - viewUrn1, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(viewUrn1) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + viewUrn1, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(viewUrn1) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<DataHubView>> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); + List<DataFetcherResult<DataHubView>> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -164,9 +203,12 @@ public void testBatchLoadValidView() throws Exception { assertEquals(view.getDefinition().getEntityTypes().get(1), EntityType.DASHBOARD); assertEquals(view.getDefinition().getFilter().getOperator(), LogicalOperator.AND); assertEquals(view.getDefinition().getFilter().getFilters().size(), 1); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); assertEquals(view.getDefinition().getFilter().getFilters().get(0).getField(), "test"); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getValues(), ImmutableList.of("value1", "value2")); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getValues(), + ImmutableList.of("value1", "value2")); // Assert second element is null. assertNull(result.get(1)); @@ -174,40 +216,45 @@ public void testBatchLoadValidView() throws Exception { @Test public void testBatchLoadInvalidView() throws Exception { - // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log a warning). + // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log + // a warning). EntityClient client = Mockito.mock(EntityClient.class); Urn invalidViewUrn = Urn.createFromString(TEST_VIEW_URN); Map<String, EnvelopedAspect> view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - invalidViewUrn, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(invalidViewUrn) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + invalidViewUrn, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(invalidViewUrn) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<DataHubView>> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); + List<DataFetcherResult<DataHubView>> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(invalidViewUrn)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(invalidViewUrn)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 1); @@ -227,17 +274,21 @@ public void testBatchLoadInvalidView() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java index 0a58ff88586c6..6ecbc8d015b29 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java @@ -1,57 +1,50 @@ package com.linkedin.datahub.graphql.utils; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.datahub.graphql.util.DateUtil; import org.joda.time.DateTime; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertEquals; - public class DateUtilTest { - private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { - DateTime result = new DateTime() - .withDate(2023, 1, dayOfMonth); - if (zeroTime) { - return new DateUtil().setTimeToZero(result); - } - return result - .withHourOfDay(1) - .withMinuteOfHour(2) - .withSecondOfMinute(3) - .withMillisOfSecond(4); + private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { + DateTime result = new DateTime().withDate(2023, 1, dayOfMonth); + if (zeroTime) { + return new DateUtil().setTimeToZero(result); } + return result.withHourOfDay(1).withMinuteOfHour(2).withSecondOfMinute(3).withMillisOfSecond(4); + } - private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { - assertEquals( - setTimeParts(dayOfMonth, true).getMillis(), - dateUtil.getStartOfNextWeek().getMillis() - ); - } + private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { + assertEquals( + setTimeParts(dayOfMonth, true).getMillis(), dateUtil.getStartOfNextWeek().getMillis()); + } - @Test - public void testStartOfNextWeek() { - DateUtil dateUtil = Mockito.spy(DateUtil.class); + @Test + public void testStartOfNextWeek() { + DateUtil dateUtil = Mockito.spy(DateUtil.class); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); - assertEqualStartOfNextWeek(dateUtil, 9); - } + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); + assertEqualStartOfNextWeek(dateUtil, 9); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java index 48ce2ddb6dde4..0419fe0b5254d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.*; +import static org.testng.AssertJUnit.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.identity.CorpUserInfo; @@ -7,19 +10,24 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.AssertJUnit.*; - - public class MutationsUtilsTest { @Test public void testBuildMetadataChangeProposal() { - MetadataChangeProposal metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn("urn:li:corpuser:datahub"), CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); - metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithKey(new CorpUserKey().setUsername("datahub"), - CORP_USER_ENTITY_NAME, CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + MetadataChangeProposal metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn("urn:li:corpuser:datahub"), + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithKey( + new CorpUserKey().setUsername("datahub"), + CORP_USER_ENTITY_NAME, + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java index adbc6808b5ab9..005b47df56982 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.types.common.mappers.util.RunInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.entity.EnvelopedAspect; @@ -8,10 +11,6 @@ import java.util.List; import org.testng.annotations.Test; -import static org.testng.Assert.*; - -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtilsTest { private final Long recentLastObserved = 1660056070640L; @@ -21,15 +20,21 @@ public class SystemMetadataUtilsTest { @Test public void testGetLastIngestedTime() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertEquals(lastObserved, mediumLastObserved); @@ -38,15 +43,21 @@ public void testGetLastIngestedTime() { @Test public void testGetLastIngestedRunId() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); String lastRunId = SystemMetadataUtils.getLastIngestedRunId(aspectMap); assertEquals(lastRunId, "real-id-1"); @@ -55,15 +66,21 @@ public void testGetLastIngestedRunId() { @Test public void testGetLastIngestedRuns() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); List<RunInfo> runs = SystemMetadataUtils.getLastIngestionRuns(aspectMap); @@ -75,15 +92,23 @@ public void testGetLastIngestedRuns() { @Test public void testGetLastIngestedTimeAllDefaultRunIds() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("default-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved) - )); - aspectMap.put("default-run-id3", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "default-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved))); + aspectMap.put( + "default-run-id3", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata() + .setRunId(DEFAULT_RUN_ID) + .setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertNull(lastObserved, null); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java index c42e1bb7f92e0..d3aea2a3dac12 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java @@ -1,30 +1,19 @@ package com.linkedin.datahub.upgrade; import com.google.common.collect.ImmutableList; - import java.util.List; - -/** - * Specification of an upgrade to be performed to the DataHub platform. - */ +/** Specification of an upgrade to be performed to the DataHub platform. */ public interface Upgrade { - /** - * String identifier for the upgrade. - */ + /** String identifier for the upgrade. */ String id(); - /** - * Returns a set of steps to perform during the upgrade. - */ + /** Returns a set of steps to perform during the upgrade. */ List<UpgradeStep> steps(); - /** - * Returns a set of steps to perform on upgrade success, failure, or abort. - */ + /** Returns a set of steps to perform on upgrade success, failure, or abort. */ default List<UpgradeCleanupStep> cleanupSteps() { return ImmutableList.of(); } - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java index bf356c60a21a4..6da656020edf8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java @@ -2,21 +2,15 @@ import java.util.function.BiConsumer; - /** * Step executed on finish of an {@link Upgrade}. * - * Note that this step is not retried, even in case of failures. + * <p>Note that this step is not retried, even in case of failures. */ public interface UpgradeCleanupStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the cleanup step's logic. - */ + /** Returns a function representing the cleanup step's logic. */ BiConsumer<UpgradeContext, UpgradeResult> executable(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java index e6be6905accee..eee27096e2238 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java @@ -1,14 +1,14 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeManager; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.datahub.upgrade.removeunknownaspects.RemoveUnknownAspects; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.SystemUpdate; +import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import java.util.List; import javax.inject.Inject; import javax.inject.Named; @@ -17,7 +17,6 @@ import org.springframework.stereotype.Component; import picocli.CommandLine; - @Slf4j @Component public class UpgradeCli implements CommandLineRunner { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index 53a5c0758f318..909ceeb8f3bab 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -8,18 +8,23 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") @SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class}) -@ComponentScan(basePackages = { - "com.linkedin.gms.factory", - "com.linkedin.datahub.upgrade.config", - "com.linkedin.metadata.dao.producer" -}, excludeFilters = { - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ScheduledAnalyticsFactory.class) -}) +@ComponentScan( + basePackages = { + "com.linkedin.gms.factory", + "com.linkedin.datahub.upgrade.config", + "com.linkedin.metadata.dao.producer" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = ScheduledAnalyticsFactory.class) + }) public class UpgradeCliApplication { public static void main(String[] args) { - new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class).web(WebApplicationType.NONE).run(args); + new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class) + .web(WebApplicationType.NONE) + .run(args); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java index 76cfc6321adfd..25a3d44b6e9da 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java @@ -4,35 +4,21 @@ import java.util.Map; import java.util.Optional; - -/** - * Context about a currently running upgrade. - */ +/** Context about a currently running upgrade. */ public interface UpgradeContext { - /** - * Returns the currently running upgrade. - */ + /** Returns the currently running upgrade. */ Upgrade upgrade(); - /** - * Returns the results from steps that have been completed. - */ + /** Returns the results from steps that have been completed. */ List<UpgradeStepResult> stepResults(); - /** - * Returns a report object where human-readable messages can be logged. - */ + /** Returns a report object where human-readable messages can be logged. */ UpgradeReport report(); - /** - * Returns a list of raw arguments that have been provided as input to the upgrade. - */ + /** Returns a list of raw arguments that have been provided as input to the upgrade. */ List<String> args(); - /** - * Returns a map of argument to <>optional</> value, as delimited by an '=' character. - */ + /** Returns a map of argument to <>optional</> value, as delimited by an '=' character. */ Map<String, Optional<String>> parsedArgs(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java index 927ccc0578308..c01aca12254a3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java @@ -2,20 +2,12 @@ import java.util.List; - -/** - * Responsible for managing the execution of an {@link Upgrade}. - */ +/** Responsible for managing the execution of an {@link Upgrade}. */ public interface UpgradeManager { - /** - * Register an {@link Upgrade} with the manaager. - */ + /** Register an {@link Upgrade} with the manaager. */ void register(Upgrade upgrade); - /** - * Kick off an {@link Upgrade} by identifier. - */ + /** Kick off an {@link Upgrade} by identifier. */ UpgradeResult execute(String upgradeId, List<String> args); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java index 2ed3f105a4eda..1c677f6fe8578 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java @@ -2,25 +2,15 @@ import java.util.List; - -/** - * A human-readable record of upgrade progress + status. - */ +/** A human-readable record of upgrade progress + status. */ public interface UpgradeReport { - /** - * Adds a new line to the upgrade report. - */ + /** Adds a new line to the upgrade report. */ void addLine(String line); - /** - * Adds a new line to the upgrade report with exception - */ + /** Adds a new line to the upgrade report with exception */ void addLine(String line, Exception e); - /** - * Retrieves the lines in the report. - */ + /** Retrieves the lines in the report. */ List<String> lines(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java index cdb94f0c0bba1..25dc758575fd1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java @@ -1,36 +1,21 @@ package com.linkedin.datahub.upgrade; -/** - * Represents the result of executing an {@link Upgrade} - */ +/** Represents the result of executing an {@link Upgrade} */ public interface UpgradeResult { - /** - * The execution result. - */ + /** The execution result. */ enum Result { - /** - * Upgrade succeeded. - */ + /** Upgrade succeeded. */ SUCCEEDED, - /** - * Upgrade failed. - */ + /** Upgrade failed. */ FAILED, - /** - * Upgrade was aborted. - */ + /** Upgrade was aborted. */ ABORTED } - /** - * Returns the {@link Result} of executing an {@link Upgrade} - */ + /** Returns the {@link Result} of executing an {@link Upgrade} */ Result result(); - /** - * Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. - */ + /** Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. */ UpgradeReport report(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java index b85bd7a51e3dd..3f90dcb33a005 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java @@ -2,39 +2,29 @@ import java.util.function.Function; - -/** - * Represents a single executable step in an {@link Upgrade}. - */ +/** Represents a single executable step in an {@link Upgrade}. */ public interface UpgradeStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the step's execution logic. - */ + /** Returns a function representing the step's execution logic. */ Function<UpgradeContext, UpgradeStepResult> executable(); - /** - * Returns the number of times the step should be retried. - */ + /** Returns the number of times the step should be retried. */ default int retryCount() { return 0; } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ default boolean isOptional() { return false; } - /** - * Returns whether or not to skip the step based on the UpgradeContext - */ + /** Returns whether or not to skip the step based on the UpgradeContext */ default boolean skip(UpgradeContext context) { return false; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java index 60d51f9ba476c..04b3d4b8559e6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java @@ -2,52 +2,33 @@ public interface UpgradeStepResult { - /** - * Returns a string identifier associated with the step. - */ + /** Returns a string identifier associated with the step. */ String stepId(); - /** - * The outcome of the step execution. - */ + /** The outcome of the step execution. */ enum Result { - /** - * The step succeeded. - */ + /** The step succeeded. */ SUCCEEDED, - /** - * The step failed. - */ + /** The step failed. */ FAILED } - /** - * A control-flow action to perform as a result of the step execution. - */ + /** A control-flow action to perform as a result of the step execution. */ enum Action { - /** - * Continue attempting the upgrade. - */ + /** Continue attempting the upgrade. */ CONTINUE, - /** - * Immediately fail the upgrade, without retry. - */ + /** Immediately fail the upgrade, without retry. */ FAIL, - /** - * Immediately abort the upgrade, without retry. - */ + /** Immediately abort the upgrade, without retry. */ ABORT } - /** - * Returns the result of executing the step, either success or failure. - */ + /** Returns the result of executing the step, either success or failure. */ Result result(); - /** - * Returns the action to perform after executing the step, either continue or abort. - */ + /** Returns the action to perform after executing the step, either continue or abort. */ default Action action() { return Action.CONTINUE; - }; + } + ; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java index a6f3ef5560442..8d5f1118433fc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java @@ -19,10 +19,12 @@ public static Map<String, Optional<String>> parseArgs(final List<String> args) { for (final String arg : args) { List<String> parsedArg = Arrays.asList(arg.split(KEY_VALUE_DELIMITER, 2)); - parsedArgs.put(parsedArg.get(0), parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); + parsedArgs.put( + parsedArg.get(0), + parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); } return parsedArgs; } - private UpgradeUtils() { } + private UpgradeUtils() {} } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java index 4f980b11b888a..393b5411599ad 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java @@ -8,7 +8,6 @@ import com.linkedin.metadata.graph.GraphService; import java.util.function.Function; - public class ClearGraphServiceStep implements UpgradeStep { private final String deletePattern = ".*"; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java index fca8f60aefd95..230f5a60cb9ff 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java @@ -8,13 +8,13 @@ import com.linkedin.metadata.search.EntitySearchService; import java.util.function.Function; - public class ClearSearchServiceStep implements UpgradeStep { private final EntitySearchService _entitySearchService; private final boolean _alwaysRun; - public ClearSearchServiceStep(final EntitySearchService entitySearchService, final boolean alwaysRun) { + public ClearSearchServiceStep( + final EntitySearchService entitySearchService, final boolean alwaysRun) { _entitySearchService = entitySearchService; _alwaysRun = alwaysRun; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java index 270aa11c7b070..dd6c3fd1e44aa 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java @@ -8,7 +8,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor public class GMSDisableWriteModeStep implements UpgradeStep { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java index 8df02123983e8..8a0d374d6ee3e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java @@ -8,7 +8,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor public class GMSEnableWriteModeStep implements UpgradeStep { private final SystemRestliEntityClient _entityClient; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java index 1391ef685c335..4e7447cb1e2cb 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.common.steps; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -18,9 +20,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class GMSQualificationStep implements UpgradeStep { @@ -70,9 +69,16 @@ private boolean isEligible(ObjectNode configJson) { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - String gmsHost = System.getenv("DATAHUB_GMS_HOST") == null ? "localhost" : System.getenv("DATAHUB_GMS_HOST"); - String gmsPort = System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); - String gmsProtocol = System.getenv("DATAHUB_GMS_PROTOCOL") == null ? "http" : System.getenv("DATAHUB_GMS_PROTOCOL"); + String gmsHost = + System.getenv("DATAHUB_GMS_HOST") == null + ? "localhost" + : System.getenv("DATAHUB_GMS_HOST"); + String gmsPort = + System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); + String gmsProtocol = + System.getenv("DATAHUB_GMS_PROTOCOL") == null + ? "http" + : System.getenv("DATAHUB_GMS_PROTOCOL"); try { String spec = String.format("%s://%s:%s/config", gmsProtocol, gmsHost, gmsPort); @@ -81,33 +87,37 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { String responseString = convertStreamToString(response); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, - MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSize).build()); JsonNode configJson = mapper.readTree(responseString); if (isEligible((ObjectNode) configJson)) { - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } else { - context.report().addLine(String.format("Failed to qualify GMS. It is not running on the latest version." - + "Re-run GMS on the latest datahub release")); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "Failed to qualify GMS. It is not running on the latest version." + + "Re-run GMS on the latest datahub release")); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } catch (Exception e) { e.printStackTrace(); - context.report().addLine(String.format("ERROR: Cannot connect to GMS" - + "at %s://host %s port %s. Make sure GMS is on the latest version " - + "and is running at that host before starting the migration.", - gmsProtocol, - gmsHost, - gmsPort)); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "ERROR: Cannot connect to GMS" + + "at %s://host %s port %s. Make sure GMS is on the latest version " + + "and is running at that host before starting the migration.", + gmsProtocol, gmsHost, gmsPort)); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index 16e5e4247267f..abd144bf453ed 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -6,12 +6,12 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class BackfillBrowsePathsV2Config { @Bean - public BackfillBrowsePathsV2 backfillBrowsePathsV2(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2 backfillBrowsePathsV2( + EntityService entityService, SearchService searchService) { return new BackfillBrowsePathsV2(entityService, searchService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index e98f0dc2093f6..1e9298bc60612 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -10,16 +10,24 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class BuildIndicesConfig { @Bean(name = "buildIndices") - public BuildIndices buildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public BuildIndices buildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - return new BuildIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new BuildIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java index 558c9780911ac..5bd7244a92e45 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java @@ -10,16 +10,24 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class CleanIndicesConfig { @Bean(name = "cleanIndices") - public CleanIndices cleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public CleanIndices cleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - return new CleanIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new CleanIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 23ea81009fa1d..24bcec5852b4f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.config; +import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; + import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -12,17 +14,18 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; - - @Configuration public class NoCodeCleanupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeCleanup") - @DependsOn({"ebeanServer", "graphService", "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN}) + @DependsOn({ + "ebeanServer", + "graphService", + "elasticSearchRestHighLevelClient", + INDEX_CONVENTION_BEAN + }) @Nonnull public NoCodeCleanupUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index cd264e529e9a5..68009d7ed1718 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -12,12 +12,10 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class NoCodeUpgradeConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeUpgrade") @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) @@ -25,7 +23,8 @@ public class NoCodeUpgradeConfig { public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java index cdc739efc416d..0b46133209382 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java @@ -5,7 +5,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class RemoveUnknownAspectsConfig { @Bean(name = "removeUnknownAspects") diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 97a08800534de..743e4ffe84b0e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -14,25 +14,30 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class RestoreBackupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreBackup") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "graphService", - "searchService", "entityRegistry"}) + @DependsOn({ + "ebeanServer", + "entityService", + "systemRestliEntityClient", + "graphService", + "searchService", + "entityRegistry" + }) @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final GraphService graphClient = applicationContext.getBean(GraphService.class); final EntitySearchService searchClient = applicationContext.getBean(EntitySearchService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreBackup(ebeanServer, entityService, entityRegistry, entityClient, - graphClient, searchClient); + return new RestoreBackup( + ebeanServer, entityService, entityRegistry, entityClient, graphClient, searchClient); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index 663cad4a4bff6..d258c4a4d1a52 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -13,11 +13,9 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class RestoreIndicesConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) @@ -25,11 +23,12 @@ public class RestoreIndicesConfig { public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); + final EntitySearchService entitySearchService = + applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreIndices(ebeanServer, entityService, entityRegistry, entitySearchService, - graphService); + return new RestoreIndices( + ebeanServer, entityService, entityRegistry, entitySearchService, graphService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 9848fc7a0008f..3b63d81486eb4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -24,18 +24,21 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Slf4j @Configuration public class SystemUpdateConfig { @Bean(name = "systemUpdate") - public SystemUpdate systemUpdate(final BuildIndices buildIndices, final CleanIndices cleanIndices, - @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, - final GitVersion gitVersion, @Qualifier("revision") String revision, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + public SystemUpdate systemUpdate( + final BuildIndices buildIndices, + final CleanIndices cleanIndices, + @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, + final GitVersion gitVersion, + @Qualifier("revision") String revision, + final BackfillBrowsePathsV2 backfillBrowsePathsV2) { String version = String.format("%s-%s", gitVersion.getVersion(), revision); - return new SystemUpdate(buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); + return new SystemUpdate( + buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); } @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") @@ -50,16 +53,18 @@ public String getRevision() { @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; - @Autowired - private KafkaHealthChecker kafkaHealthChecker; + @Autowired private KafkaHealthChecker kafkaHealthChecker; @Bean(name = "duheKafkaEventProducer") - protected KafkaEventProducer duheKafkaEventProducer(@Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties properties, - @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { + protected KafkaEventProducer duheKafkaEventProducer( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties, + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Producer<String, IndexedRecord> producer = new KafkaProducer<>( - DataHubKafkaProducerFactory.buildProducerProperties(duheSchemaRegistryConfig, kafkaConfiguration, properties)); + Producer<String, IndexedRecord> producer = + new KafkaProducer<>( + DataHubKafkaProducerFactory.buildProducerProperties( + duheSchemaRegistryConfig, kafkaConfiguration, properties)); return new KafkaEventProducer(producer, topicConvention, kafkaHealthChecker); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java index 972b55f2001f1..6cc94fbed5bf3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java @@ -9,7 +9,6 @@ import java.util.Map; import java.util.Optional; - public class DefaultUpgradeContext implements UpgradeContext { private final Upgrade _upgrade; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java index a642ee3fb0a90..623c8a71e861d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java @@ -17,7 +17,6 @@ import java.util.Map; import javax.annotation.Nonnull; - public class DefaultUpgradeManager implements UpgradeManager { private final Map<String, Upgrade> _upgrades = new HashMap<>(); @@ -32,16 +31,19 @@ public UpgradeResult execute(String upgradeId, List<String> args) { if (_upgrades.containsKey(upgradeId)) { return executeInternal(_upgrades.get(upgradeId), args); } - throw new IllegalArgumentException(String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); + throw new IllegalArgumentException( + String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); } private UpgradeResult executeInternal(Upgrade upgrade, List<String> args) { final UpgradeReport upgradeReport = new DefaultUpgradeReport(); - final UpgradeContext context = new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); + final UpgradeContext context = + new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); upgradeReport.addLine(String.format("Starting upgrade with id %s...", upgrade.id())); UpgradeResult result = executeInternal(context); upgradeReport.addLine( - String.format("Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); + String.format( + "Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); executeCleanupInternal(context, result); return result; } @@ -58,12 +60,16 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (step.skip(context)) { upgradeReport.addLine( - String.format(String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); continue; } upgradeReport.addLine( - String.format(String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); final UpgradeStepResult stepResult = executeStepInternal(context, step); stepResults.add(stepResult); @@ -71,7 +77,8 @@ private UpgradeResult executeInternal(UpgradeContext context) { // Apply Actions if (UpgradeStepResult.Action.ABORT.equals(stepResult.action())) { upgradeReport.addLine( - String.format("Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", + String.format( + "Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", step.id())); return new DefaultUpgradeResult(UpgradeResult.Result.ABORTED, upgradeReport); } @@ -80,23 +87,27 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (UpgradeStepResult.Result.FAILED.equals(stepResult.result())) { if (step.isOptional()) { upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", i + 1, - steps.size(), step.id())); + String.format( + "Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", + i + 1, steps.size(), step.id())); continue; } // Required step failed. Fail the entire upgrade process. upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Failed after %s retries.", i + 1, steps.size(), step.id(), - step.retryCount())); + String.format( + "Failed Step %s/%s: %s. Failed after %s retries.", + i + 1, steps.size(), step.id(), step.retryCount())); upgradeReport.addLine(String.format("Exiting upgrade %s with failure.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.FAILED, upgradeReport); } - upgradeReport.addLine(String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); + upgradeReport.addLine( + String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); } - upgradeReport.addLine(String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); + upgradeReport.addLine( + String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.SUCCEEDED, upgradeReport); } @@ -105,15 +116,19 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte UpgradeStepResult result = null; int maxAttempts = retryCount + 1; for (int i = 0; i < maxAttempts; i++) { - try (Timer.Context completionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { - try (Timer.Context executionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { + try (Timer.Context completionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { + try (Timer.Context executionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { result = step.executable().apply(context); } if (result == null) { // Failed to even retrieve a result. Create a default failure result. result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); - context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); + context + .report() + .addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); MetricUtils.counter(MetricRegistry.name(step.id(), "retry")).inc(); } @@ -122,9 +137,11 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte break; } } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); + String.format( + "Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); MetricUtils.counter(MetricRegistry.name(step.id(), "failed")).inc(); result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); @@ -139,7 +156,11 @@ private void executeCleanupInternal(UpgradeContext context, UpgradeResult result try { step.executable().accept(context, result); } catch (Exception e) { - context.report().addLine(String.format("Caught exception while executing cleanup step with id %s", step.id())); + context + .report() + .addLine( + String.format( + "Caught exception while executing cleanup step with id %s", step.id())); } } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java index 19706937e20ca..913b0ff20e6ff 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java @@ -1,10 +1,9 @@ package com.linkedin.datahub.upgrade.impl; import com.linkedin.datahub.upgrade.UpgradeReport; -import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.List; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class DefaultUpgradeReport implements UpgradeReport { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java index 6ecb522848291..cf0e7221b406b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java @@ -3,7 +3,6 @@ import com.linkedin.datahub.upgrade.UpgradeReport; import com.linkedin.datahub.upgrade.UpgradeResult; - public class DefaultUpgradeResult implements UpgradeResult { private final Result _result; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java index d0c086f607edd..e11eaf89bfc8d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; - public class DefaultUpgradeStepResult implements UpgradeStepResult { private final String _stepId; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java index 7ed7169bf20bc..3b3098f43c473 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import io.ebean.Database; import java.util.function.Function; @@ -36,40 +36,42 @@ public int retryCount() { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - - DbType targetDbType = context.parsedArgs().containsKey(DB_TYPE_ARG) - ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) - : DbType.MYSQL; + DbType targetDbType = + context.parsedArgs().containsKey(DB_TYPE_ARG) + ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) + : DbType.MYSQL; String sqlUpdateStr; switch (targetDbType) { case POSTGRES: - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint not null,\n" - + " metadata text not null,\n" - + " systemmetadata text,\n" - + " createdon timestamp not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint not null,\n" + + " metadata text not null,\n" + + " systemmetadata text,\n" + + " createdon timestamp not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; default: // both mysql and maria - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint(20) not null,\n" - + " metadata longtext not null,\n" - + " systemmetadata longtext,\n" - + " createdon datetime(6) not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint(20) not null,\n" + + " metadata longtext not null,\n" + + " systemmetadata longtext,\n" + + " createdon datetime(6) not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; } @@ -77,9 +79,7 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { _server.execute(_server.createSqlUpdate(sqlUpdateStr)); } catch (Exception e) { context.report().addLine("Failed to create table metadata_aspect_v2", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java index 1b5770a11ff62..ac56e5e91c72b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java @@ -1,22 +1,22 @@ package com.linkedin.datahub.upgrade.nocode; +import com.datahub.util.RecordUtils; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePaths; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.utils.PegasusUtils; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.ebean.EbeanAspectV1; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; +import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.util.Pair; import io.ebean.Database; import io.ebean.PagedList; @@ -29,13 +29,13 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; - public class DataMigrationStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; private static final long DEFAULT_BATCH_DELAY_MS = 250; - private static final String BROWSE_PATHS_ASPECT_NAME = PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); + private static final String BROWSE_PATHS_ASPECT_NAME = + PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); private final Database _server; private final EntityService _entityService; @@ -64,7 +64,6 @@ public int retryCount() { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - context.report().addLine("Starting data migration..."); final int rowCount = _server.find(EbeanAspectV1.class).findCount(); context.report().addLine(String.format("Found %s rows in legacy aspects table", rowCount)); @@ -74,7 +73,11 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { int count = getBatchSize(context.parsedArgs()); while (start < rowCount) { - context.report().addLine(String.format("Reading rows %s through %s from legacy aspects table.", start, start + count)); + context + .report() + .addLine( + String.format( + "Reading rows %s through %s from legacy aspects table.", start, start + count)); PagedList<EbeanAspectV1> rows = getPagedAspects(start, count); for (EbeanAspectV1 oldAspect : rows.getList()) { @@ -84,11 +87,18 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { // 1. Instantiate the RecordTemplate class associated with the aspect. final RecordTemplate aspectRecord; try { - aspectRecord = RecordUtils.toRecordTemplate( - Class.forName(oldAspectName).asSubclass(RecordTemplate.class), - oldAspect.getMetadata()); + aspectRecord = + RecordUtils.toRecordTemplate( + Class.forName(oldAspectName).asSubclass(RecordTemplate.class), + oldAspect.getMetadata()); } catch (Exception e) { - context.report().addLine(String.format("Failed to convert aspect with name %s into a RecordTemplate class", oldAspectName), e); + context + .report() + .addLine( + String.format( + "Failed to convert aspect with name %s into a RecordTemplate class", + oldAspectName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -97,7 +107,11 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { urn = Urn.createFromString(oldAspect.getKey().getUrn()); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to bind Urn with value %s into Urn object", oldAspect.getKey().getUrn()), e); + throw new RuntimeException( + String.format( + "Failed to bind Urn with value %s into Urn object", + oldAspect.getKey().getUrn()), + e); } // 3. Verify that the entity associated with the aspect is found in the registry. @@ -106,7 +120,12 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find Entity with name %s in Entity Registry", entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -115,9 +134,13 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { newAspectName = PegasusUtils.getAspectNameFromSchema(aspectRecord.schema()); } catch (Exception e) { - context.report().addLine(String.format("Failed to retrieve @Aspect name from schema %s, urn %s", - aspectRecord.schema().getFullName(), - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to retrieve @Aspect name from schema %s, urn %s", + aspectRecord.schema().getFullName(), entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -126,23 +149,24 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { aspectSpec = entitySpec.getAspectSpec(newAspectName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - newAspectName, - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + newAspectName, entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } // 6. Write the row back using the EntityService boolean emitMae = oldAspect.getKey().getVersion() == 0L; _entityService.ingestAspects( - urn, - List.of(Pair.of(newAspectName, aspectRecord)), - toAuditStamp(oldAspect), - null - ); + urn, List.of(Pair.of(newAspectName, aspectRecord)), toAuditStamp(oldAspect), null); // 7. If necessary, emit a browse path aspect. - if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) && !urnsWithBrowsePath.contains(urn)) { + if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) + && !urnsWithBrowsePath.contains(urn)) { // Emit a browse path aspect. final BrowsePaths browsePaths; try { @@ -152,7 +176,11 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { browsePathsStamp.setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)); browsePathsStamp.setTime(System.currentTimeMillis()); - _entityService.ingestAspects(urn, List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), browsePathsStamp, null); + _entityService.ingestAspects( + urn, + List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), + browsePathsStamp, + null); urnsWithBrowsePath.add(urn); } catch (URISyntaxException e) { @@ -167,13 +195,17 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { TimeUnit.MILLISECONDS.sleep(getBatchDelayMs(context.parsedArgs())); } catch (InterruptedException e) { - throw new RuntimeException("Thread interrupted while sleeping after successful batch migration."); + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); } } if (totalRowsMigrated != rowCount) { - context.report().addLine(String.format("Number of rows migrated %s does not equal the number of input rows %s...", - totalRowsMigrated, - rowCount)); + context + .report() + .addLine( + String.format( + "Number of rows migrated %s does not equal the number of input rows %s...", + totalRowsMigrated, rowCount)); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -195,9 +227,9 @@ private AuditStamp toAuditStamp(final EbeanAspectV1 aspect) { return auditStamp; } - private PagedList<EbeanAspectV1> getPagedAspects(final int start, final int pageSize) { - return _server.find(EbeanAspectV1.class) + return _server + .find(EbeanAspectV1.class) .select(EbeanAspectV1.ALL_COLUMNS) .setFirstRow(start) .setMaxRows(pageSize) @@ -219,7 +251,8 @@ private long getBatchDelayMs(final Map<String, Optional<String>> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (parsedArgs.containsKey(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME) && parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).isPresent()) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index a299deb874721..6753d309b9f50 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -30,10 +30,7 @@ public NoCodeUpgrade( final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient) { - _steps = buildUpgradeSteps( - server, entityService, - entityRegistry, - entityClient); + _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); _cleanupSteps = buildCleanupSteps(); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java index cf8e848762f14..6180573d902d2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java @@ -7,10 +7,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class RemoveAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java index 0fe9afa8cc6f8..d22af9d292400 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.entity.ebean.AspectStorageValidationUtil; import io.ebean.Database; import java.util.function.Function; @@ -29,7 +29,6 @@ public int retryCount() { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - if (context.parsedArgs().containsKey(NoCodeUpgrade.FORCE_UPGRADE_ARG_NAME)) { context.report().addLine("Forced upgrade detected. Proceeding with upgrade..."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -43,7 +42,8 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { } // Unqualified (Table already exists) context.report().addLine("Failed to qualify upgrade candidate. Aborting the upgrade..."); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); + return new DefaultUpgradeStepResult( + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); @@ -67,8 +67,13 @@ private boolean isQualified(Database server, UpgradeContext context) { return true; } context.report().addLine(String.format("-- V2 table has %d rows", v2TableRowCount)); - context.report().addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); - context.report().addLine("-- If V2 table has significantly less rows, consider running the forced upgrade. "); + context + .report() + .addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); + context + .report() + .addLine( + "-- If V2 table has significantly less rows, consider running the forced upgrade. "); return false; } context.report().addLine("-- V2 table does not exist"); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java index 8005e31e01c67..ba0a0124545e9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java @@ -7,7 +7,6 @@ import io.ebean.Database; import java.util.function.Function; - // Do we need SQL-tech specific migration paths? public class DeleteAspectTableStep implements UpgradeStep { @@ -34,9 +33,7 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { _server.execute(_server.sqlUpdate("DROP TABLE IF EXISTS metadata_aspect;")); } catch (Exception e) { context.report().addLine("Failed to delete data from legacy table metadata_aspect", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java index 12ff125a05127..5066e05f8bf5a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java @@ -6,10 +6,8 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; -import lombok.extern.slf4j.Slf4j; - import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; // Do we need SQL-tech specific migration paths? @Slf4j @@ -44,9 +42,7 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { } } catch (Exception e) { context.report().addLine("Failed to delete legacy data from graph", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java index 9a64d5fe1810c..05656373377b9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java @@ -11,7 +11,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; - // Do we need SQL-tech specific migration paths? @RequiredArgsConstructor public class DeleteLegacySearchIndicesStep implements UpgradeStep { @@ -20,7 +19,8 @@ public class DeleteLegacySearchIndicesStep implements UpgradeStep { private final RestHighLevelClient _searchClient; - public DeleteLegacySearchIndicesStep(final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + public DeleteLegacySearchIndicesStep( + final RestHighLevelClient searchClient, final IndexConvention indexConvention) { _searchClient = searchClient; deletePattern = indexConvention.getPrefix().map(p -> p + "_").orElse("") + "*document*"; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index a5d8d6ce9b666..8a267be6ad808 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -11,15 +11,17 @@ import java.util.List; import org.opensearch.client.RestHighLevelClient; - public class NoCodeCleanupUpgrade implements Upgrade { private final List<UpgradeStep> _steps; private final List<UpgradeCleanupStep> _cleanupSteps; // Upgrade requires the Database. - public NoCodeCleanupUpgrade(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + public NoCodeCleanupUpgrade( + final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); _cleanupSteps = buildCleanupSteps(); } @@ -43,8 +45,11 @@ private List<UpgradeCleanupStep> buildCleanupSteps() { return Collections.emptyList(); } - private List<UpgradeStep> buildUpgradeSteps(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + private List<UpgradeStep> buildUpgradeSteps( + final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { final List<UpgradeStep> steps = new ArrayList<>(); steps.add(new NoCodeUpgradeQualificationStep(server)); steps.add(new DeleteAspectTableStep(server)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java index 67a226f8f0676..15c7584532e2c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java @@ -8,7 +8,6 @@ import io.ebean.Database; import java.util.function.Function; - public class NoCodeUpgradeQualificationStep implements UpgradeStep { private final Database _server; @@ -33,23 +32,19 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { if (!AspectStorageValidationUtil.checkV2TableExists(_server)) { // Unqualified (V2 Table does not exist) - context.report().addLine("You have not successfully migrated yet. Aborting the cleanup..."); + context + .report() + .addLine("You have not successfully migrated yet. Aborting the cleanup..."); return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED, - UpgradeStepResult.Action.ABORT); + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } else { // Qualified. context.report().addLine("Found qualified upgrade candidate. Proceeding with upgrade..."); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists: %s", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java index b55d439745e69..7e55dcddc639f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java @@ -11,7 +11,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class RemoveClientIdAspectStep implements UpgradeStep { @@ -33,9 +32,10 @@ public boolean skip(UpgradeContext context) { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return upgradeContext -> { - _entityService.deleteAspect(TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, - new HashMap<>(), true); - return (UpgradeStepResult) new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + _entityService.deleteAspect( + TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, new HashMap<>(), true); + return (UpgradeStepResult) + new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java index f8af69dba0865..dc95b7605ef88 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java @@ -8,7 +8,6 @@ import java.util.ArrayList; import java.util.List; - public class RemoveUnknownAspects implements Upgrade { private final List<UpgradeStep> _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java index 0303739e62afe..addf6dcb89c1a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java @@ -8,10 +8,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class ClearAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index 9175ad606e3c8..b11abb2d6bc23 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -17,7 +17,6 @@ import java.util.ArrayList; import java.util.List; - public class RestoreBackup implements Upgrade { private final List<UpgradeStep> _steps; @@ -29,7 +28,8 @@ public RestoreBackup( final SystemRestliEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { - _steps = buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); + _steps = + buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 42f7f0073e59b..5c4567c856d0e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -21,7 +21,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.util.Pair; - import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.util.ArrayList; @@ -35,7 +34,6 @@ import java.util.function.Function; import java.util.stream.Collectors; - public class RestoreStorageStep implements UpgradeStep { private static final int REPORT_BATCH_SIZE = 1000; @@ -43,11 +41,13 @@ public class RestoreStorageStep implements UpgradeStep { private final EntityService _entityService; private final EntityRegistry _entityRegistry; - private final Map<String, Class<? extends BackupReader<? extends ReaderWrapper<?>>>> _backupReaders; + private final Map<String, Class<? extends BackupReader<? extends ReaderWrapper<?>>>> + _backupReaders; private final ExecutorService _fileReaderThreadPool; private final ExecutorService _gmsThreadPool; - public RestoreStorageStep(final EntityService entityService, final EntityRegistry entityRegistry) { + public RestoreStorageStep( + final EntityService entityService, final EntityRegistry entityRegistry) { _entityService = entityService; _entityRegistry = entityRegistry; _backupReaders = ImmutableBiMap.of(LocalParquetReader.READER_NAME, LocalParquetReader.class); @@ -82,7 +82,6 @@ public int retryCount() { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - context.report().addLine("Starting backup restore..."); int numRows = 0; Optional<String> backupReaderName = context.parsedArgs().get("BACKUP_READER"); @@ -93,19 +92,32 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } - Class<? extends BackupReader<? extends ReaderWrapper>> clazz = _backupReaders.get(backupReaderName.get()); + Class<? extends BackupReader<? extends ReaderWrapper>> clazz = + _backupReaders.get(backupReaderName.get()); List<String> argNames = BackupReaderArgs.getArgNames(clazz); - List<Optional<String>> args = argNames.stream().map(argName -> context.parsedArgs().get(argName)).collect( - Collectors.toList()); + List<Optional<String>> args = + argNames.stream() + .map(argName -> context.parsedArgs().get(argName)) + .collect(Collectors.toList()); BackupReader<? extends ReaderWrapper> backupReader; try { backupReader = clazz.getConstructor(List.class).newInstance(args); - } catch (InstantiationException | InvocationTargetException | IllegalAccessException | NoSuchMethodException e) { + } catch (InstantiationException + | InvocationTargetException + | IllegalAccessException + | NoSuchMethodException e) { e.printStackTrace(); - context.report().addLine("Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); - throw new IllegalArgumentException("Invalid BackupReader: " + clazz.getSimpleName() + ", need to implement proper constructor."); + context + .report() + .addLine( + "Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); + throw new IllegalArgumentException( + "Invalid BackupReader: " + + clazz.getSimpleName() + + ", need to implement proper constructor."); } - EbeanAspectBackupIterator<? extends ReaderWrapper> iterator = backupReader.getBackupIterator(context); + EbeanAspectBackupIterator<? extends ReaderWrapper> iterator = + backupReader.getBackupIterator(context); ReaderWrapper reader; List<Future<?>> futureList = new ArrayList<>(); while ((reader = iterator.getNextReader()) != null) { @@ -138,9 +150,12 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { urn = Urn.createFromString(aspect.getKey().getUrn()); } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), e); + String.format( + "Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), + e); continue; } @@ -150,8 +165,11 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format("Failed to find Entity with name %s in Entity Registry", entityName), + e); continue; } final String aspectName = aspect.getKey().getAspect(); @@ -160,11 +178,16 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { final RecordTemplate aspectRecord; try { aspectRecord = - EntityUtils.toAspectRecord(entityName, aspectName, aspect.getMetadata(), _entityRegistry); + EntityUtils.toAspectRecord( + entityName, aspectName, aspect.getMetadata(), _entityRegistry); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to create aspect record with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to create aspect record with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } @@ -173,17 +196,27 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { aspectSpec = entitySpec.getAspectSpec(aspectName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } // 5. Write the row back using the EntityService final long version = aspect.getKey().getVersion(); final AuditStamp auditStamp = toAuditStamp(aspect); - futureList.add(_gmsThreadPool.submit(() -> - _entityService.ingestAspects(urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null).get(0).getNewValue())); + futureList.add( + _gmsThreadPool.submit( + () -> + _entityService + .ingestAspects( + urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null) + .get(0) + .getNewValue())); if (numRows % REPORT_BATCH_SIZE == 0) { for (Future<?> future : futureList) { try { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java index 7ea1811adfdd8..212f0da9f592d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java @@ -3,10 +3,10 @@ import com.linkedin.datahub.upgrade.UpgradeContext; import javax.annotation.Nonnull; - /** - * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 object to be - * ingested back into GMS. Must have a constructor that takes a List of Optional Strings + * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 + * object to be ingested back into GMS. Must have a constructor that takes a List of Optional + * Strings */ public interface BackupReader<T extends ReaderWrapper> { String getName(); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java index 20f43b5414ddd..6176d56fbec95 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java @@ -4,14 +4,9 @@ import java.util.List; import java.util.Map; - -/** - * Retains a map of what arguments are passed in to a backup reader - */ +/** Retains a map of what arguments are passed in to a backup reader */ public final class BackupReaderArgs { - private BackupReaderArgs() { - - } + private BackupReaderArgs() {} private static final Map<Class<? extends BackupReader>, List<String>> ARGS_MAP; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java index 3a2505311e245..cce5928277a20 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java @@ -7,10 +7,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - /** - * Base interface for iterators that retrieves EbeanAspectV2 objects - * This allows us to restore from backups of various format + * Base interface for iterators that retrieves EbeanAspectV2 objects This allows us to restore from + * backups of various format */ @Slf4j @RequiredArgsConstructor @@ -35,12 +34,13 @@ public T getNextReader() { @Override public void close() { - _readers.forEach(reader -> { - try { - reader.close(); - } catch (IOException e) { - log.error("Error while closing parquet reader", e); - } - }); + _readers.forEach( + reader -> { + try { + reader.close(); + } catch (IOException e) { + log.error("Error while closing parquet reader", e); + } + }); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java index 9b8a3133ac04c..9f0f81f466cfa 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java @@ -14,10 +14,7 @@ import org.apache.parquet.avro.AvroParquetReader; import org.apache.parquet.hadoop.ParquetReader; - -/** - * BackupReader for retrieving EbeanAspectV2 objects from a local parquet file - */ +/** BackupReader for retrieving EbeanAspectV2 objects from a local parquet file */ @Slf4j public class LocalParquetReader implements BackupReader<ParquetReaderWrapper> { @@ -46,16 +43,20 @@ public String getName() { public EbeanAspectBackupIterator<ParquetReaderWrapper> getBackupIterator(UpgradeContext context) { Optional<String> path = context.parsedArgs().get("BACKUP_FILE_PATH"); if (!path.isPresent()) { - context.report().addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); + context + .report() + .addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); throw new IllegalArgumentException( "BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); } try { - ParquetReader<GenericRecord> reader = AvroParquetReader.<GenericRecord>builder(new Path(path.get())).build(); - return new EbeanAspectBackupIterator<>(ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); + ParquetReader<GenericRecord> reader = + AvroParquetReader.<GenericRecord>builder(new Path(path.get())).build(); + return new EbeanAspectBackupIterator<>( + ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); } catch (IOException e) { throw new RuntimeException(String.format("Failed to build ParquetReader: %s", e)); } } -} \ No newline at end of file +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java index 2b7cacff65249..01c502221f77f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java @@ -15,9 +15,9 @@ @Slf4j public class ParquetReaderWrapper extends ReaderWrapper<GenericRecord> { - private final static long NANOS_PER_MILLISECOND = 1000000; - private final static long MILLIS_IN_DAY = 86400000; - private final static long JULIAN_EPOCH_OFFSET_DAYS = 2440588; + private static final long NANOS_PER_MILLISECOND = 1000000; + private static final long MILLIS_IN_DAY = 86400000; + private static final long JULIAN_EPOCH_OFFSET_DAYS = 2440588; private final ParquetReader<GenericRecord> _parquetReader; @@ -45,22 +45,30 @@ EbeanAspectV2 convertRecord(GenericRecord record) { ts = (Long) record.get("createdon"); } - return new EbeanAspectV2(record.get("urn").toString(), record.get("aspect").toString(), - (Long) record.get("version"), record.get("metadata").toString(), - Timestamp.from(Instant.ofEpochMilli(ts / 1000)), record.get("createdby").toString(), + return new EbeanAspectV2( + record.get("urn").toString(), + record.get("aspect").toString(), + (Long) record.get("version"), + record.get("metadata").toString(), + Timestamp.from(Instant.ofEpochMilli(ts / 1000)), + record.get("createdby").toString(), Optional.ofNullable(record.get("createdfor")).map(Object::toString).orElse(null), Optional.ofNullable(record.get("systemmetadata")).map(Object::toString).orElse(null)); } private long convertFixed96IntToTs(GenericFixed createdon) { // From https://github.com/apache/parquet-format/pull/49/filesParquetTimestampUtils.java - // and ParquetTimestampUtils.java from https://github.com/kube-reporting/presto/blob/master/presto-parquet/ + // and ParquetTimestampUtils.java from + // https://github.com/kube-reporting/presto/blob/master/presto-parquet/ // src/main/java/io/prestosql/parquet/ParquetTimestampUtils.java byte[] bytes = createdon.bytes(); // little endian encoding - need to invert byte order - long timeOfDayNanos = Longs.fromBytes(bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); + long timeOfDayNanos = + Longs.fromBytes( + bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); int julianDay = Ints.fromBytes(bytes[11], bytes[10], bytes[9], bytes[8]); - return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + (timeOfDayNanos / NANOS_PER_MILLISECOND); + return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + + (timeOfDayNanos / NANOS_PER_MILLISECOND); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java index d0db42e678eea..48d0fa2fda04c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java @@ -5,9 +5,10 @@ import java.io.IOException; import lombok.extern.slf4j.Slf4j; - /** - * Abstract class that reads entries from a given source and transforms then into {@link EbeanAspectV2} instances. + * Abstract class that reads entries from a given source and transforms then into {@link + * EbeanAspectV2} instances. + * * @param <T> The object type to read from a reader source. */ @Slf4j @@ -69,9 +70,15 @@ record = read(); abstract EbeanAspectV2 convertRecord(T record); private void printStat(String prefix) { - log.info("{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," - + " records failed: {}, Total millis in convert: {}", prefix, _fileName, - recordsProcessed, totalTimeSpentInRead / 1000 / 1000, recordsSkipped, recordsFailed, + log.info( + "{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," + + " records failed: {}, Total millis in convert: {}", + prefix, + _fileName, + recordsProcessed, + totalTimeSpentInRead / 1000 / 1000, + recordsSkipped, + recordsFailed, totalTimeSpentInConvert / 1000 / 1000); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 3c0a9762a28c9..8bb3b0073710a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -14,7 +14,6 @@ import java.util.ArrayList; import java.util.List; - public class RestoreIndices implements Upgrade { public static final String BATCH_SIZE_ARG_NAME = "batchSize"; public static final String BATCH_DELAY_MS_ARG_NAME = "batchDelayMs"; @@ -29,8 +28,11 @@ public class RestoreIndices implements Upgrade { private final List<UpgradeStep> _steps; - public RestoreIndices(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + public RestoreIndices( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry, + final EntitySearchService entitySearchService, final GraphService graphService) { _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); } @@ -45,8 +47,11 @@ public List<UpgradeStep> steps() { return _steps; } - private List<UpgradeStep> buildSteps(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + private List<UpgradeStep> buildSteps( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry, + final EntitySearchService entitySearchService, final GraphService graphService) { final List<UpgradeStep> steps = new ArrayList<>(); steps.add(new ClearSearchServiceStep(entitySearchService, false)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index 2ac4fea2e653a..ce59cf2edb84e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.restoreindices; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -11,7 +13,6 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import io.ebean.ExpressionList; - import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -23,9 +24,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - - public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; @@ -38,19 +36,24 @@ public class SendMAEStep implements UpgradeStep { private final EntityService _entityService; public class KafkaJob implements Callable<RestoreIndicesResult> { - UpgradeContext context; - RestoreIndicesArgs args; - public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { - this.context = context; - this.args = args; - } - @Override - public RestoreIndicesResult call() { - return _entityService.restoreIndices(args, context.report()::addLine); - } + UpgradeContext context; + RestoreIndicesArgs args; + + public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { + this.context = context; + this.args = args; + } + + @Override + public RestoreIndicesResult call() { + return _entityService.restoreIndices(args, context.report()::addLine); + } } - public SendMAEStep(final Database server, final EntityService entityService, final EntityRegistry entityRegistry) { + public SendMAEStep( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry) { _server = server; _entityService = entityService; } @@ -67,7 +70,7 @@ public int retryCount() { private List<RestoreIndicesResult> iterateFutures(List<Future<RestoreIndicesResult>> futures) { List<RestoreIndicesResult> result = new ArrayList<>(); - for (Future<RestoreIndicesResult> future: new ArrayList<>(futures)) { + for (Future<RestoreIndicesResult> future : new ArrayList<>(futures)) { if (future.isDone()) { try { result.add(future.get()); @@ -100,9 +103,10 @@ private RestoreIndicesArgs getArgs(UpgradeContext context) { private int getRowCount(RestoreIndicesArgs args) { ExpressionList<EbeanAspectV2> countExp = - _server.find(EbeanAspectV2.class) - .where() - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); + _server + .find(EbeanAspectV2.class) + .where() + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); if (args.aspectName != null) { countExp = countExp.eq(EbeanAspectV2.ASPECT_COLUMN, args.aspectName); } @@ -120,13 +124,18 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { RestoreIndicesResult finalJobResult = new RestoreIndicesResult(); RestoreIndicesArgs args = getArgs(context); - ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); + ThreadPoolExecutor executor = + (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); context.report().addLine("Sending MAE from local DB"); long startTime = System.currentTimeMillis(); final int rowCount = getRowCount(args); - context.report().addLine(String.format("Found %s latest aspects in aspects table in %.2f minutes.", - rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); + context + .report() + .addLine( + String.format( + "Found %s latest aspects in aspects table in %.2f minutes.", + rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); int start = args.start; List<Future<RestoreIndicesResult>> futures = new ArrayList<>(); @@ -139,7 +148,7 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { } while (futures.size() > 0) { List<RestoreIndicesResult> tmpResults = iterateFutures(futures); - for (RestoreIndicesResult tmpResult: tmpResults) { + for (RestoreIndicesResult tmpResult : tmpResults) { reportStats(context, finalJobResult, tmpResult, rowCount, startTime); } } @@ -149,16 +158,23 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { if (rowCount > 0) { percentFailed = (float) (rowCount - finalJobResult.rowsMigrated) * 100 / rowCount; } - context.report().addLine(String.format( - "Failed to send MAEs for %d rows (%.2f%% of total).", - rowCount - finalJobResult.rowsMigrated, percentFailed)); + context + .report() + .addLine( + String.format( + "Failed to send MAEs for %d rows (%.2f%% of total).", + rowCount - finalJobResult.rowsMigrated, percentFailed)); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } - private static void reportStats(UpgradeContext context, RestoreIndicesResult finalResult, RestoreIndicesResult tmpResult, - int rowCount, long startTime) { + private static void reportStats( + UpgradeContext context, + RestoreIndicesResult finalResult, + RestoreIndicesResult tmpResult, + int rowCount, + long startTime) { finalResult.ignored += tmpResult.ignored; finalResult.rowsMigrated += tmpResult.rowsMigrated; finalResult.timeSqlQueryMs += tmpResult.timeSqlQueryMs; @@ -178,11 +194,22 @@ private static void reportStats(UpgradeContext context, RestoreIndicesResult fin estimatedTimeMinutesComplete = timeSoFarMinutes * (100 - percentSent) / percentSent; } float totalTimeComplete = timeSoFarMinutes + estimatedTimeMinutesComplete; - context.report().addLine(String.format( - "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", - finalResult.rowsMigrated, rowCount, percentSent, finalResult.ignored, percentIgnored)); - context.report().addLine(String.format("%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", - timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); + context + .report() + .addLine( + String.format( + "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", + finalResult.rowsMigrated, + rowCount, + percentSent, + finalResult.ignored, + percentIgnored)); + context + .report() + .addLine( + String.format( + "%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", + timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); } private int getBatchSize(final Map<String, Optional<String>> parsedArgs) { @@ -196,7 +223,8 @@ private int getStartingOffset(final Map<String, Optional<String>> parsedArgs) { private long getBatchDelayMs(final Map<String, Optional<String>> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (containsKey(parsedArgs, RestoreIndices.BATCH_DELAY_MS_ARG_NAME)) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } @@ -205,7 +233,8 @@ private int getThreadCount(final Map<String, Optional<String>> parsedArgs) { return getInt(parsedArgs, DEFAULT_THREADS, RestoreIndices.NUM_THREADS_ARG_NAME); } - private int getInt(final Map<String, Optional<String>> parsedArgs, int defaultVal, String argKey) { + private int getInt( + final Map<String, Optional<String>> parsedArgs, int defaultVal, String argKey) { int result = defaultVal; if (containsKey(parsedArgs, argKey)) { result = Integer.parseInt(parsedArgs.get(argKey).get()); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java index 4a8211f2cd4ac..aba751bff8177 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java @@ -8,47 +8,48 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; import com.linkedin.metadata.dao.producer.KafkaEventProducer; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class SystemUpdate implements Upgrade { - private final List<Upgrade> _preStartupUpgrades; - private final List<Upgrade> _postStartupUpgrades; - private final List<UpgradeStep> _steps; - - public SystemUpdate(final BuildIndices buildIndicesJob, final CleanIndices cleanIndicesJob, - final KafkaEventProducer kafkaEventProducer, final String version, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { - - _preStartupUpgrades = List.of(buildIndicesJob); - _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); - _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); - } - - @Override - public String id() { - return "SystemUpdate"; - } - - @Override - public List<UpgradeStep> steps() { - return Stream.concat(Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), - _steps.stream()), - _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) - .collect(Collectors.toList()); - } - - @Override - public List<UpgradeCleanupStep> cleanupSteps() { - return Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), - _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) - .collect(Collectors.toList()); - } + private final List<Upgrade> _preStartupUpgrades; + private final List<Upgrade> _postStartupUpgrades; + private final List<UpgradeStep> _steps; + + public SystemUpdate( + final BuildIndices buildIndicesJob, + final CleanIndices cleanIndicesJob, + final KafkaEventProducer kafkaEventProducer, + final String version, + final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + + _preStartupUpgrades = List.of(buildIndicesJob); + _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); + _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); + } + + @Override + public String id() { + return "SystemUpdate"; + } + + @Override + public List<UpgradeStep> steps() { + return Stream.concat( + Stream.concat( + _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), _steps.stream()), + _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) + .collect(Collectors.toList()); + } + + @Override + public List<UpgradeCleanupStep> cleanupSteps() { + return Stream.concat( + _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), + _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) + .collect(Collectors.toList()); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java index 1da5b6d6a25ce..eb76a72fba71a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java @@ -2,9 +2,9 @@ import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPostStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPreStep; +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; import com.linkedin.metadata.graph.GraphService; @@ -17,49 +17,54 @@ import java.util.stream.Collectors; import java.util.stream.Stream; - public class BuildIndices implements Upgrade { - private final List<UpgradeStep> _steps; - - public BuildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - - final ConfigurationProvider configurationProvider) { - + private final List<UpgradeStep> _steps; - List<ElasticSearchIndexed> indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); + public BuildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); - } + List<ElasticSearchIndexed> indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); - @Override - public String id() { - return "BuildIndices"; - } + _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); + } - @Override - public List<UpgradeStep> steps() { - return _steps; - } + @Override + public String id() { + return "BuildIndices"; + } - private List<UpgradeStep> buildSteps(final List<ElasticSearchIndexed> indexedServices, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + @Override + public List<UpgradeStep> steps() { + return _steps; + } - final List<UpgradeStep> steps = new ArrayList<>(); - // Disable ES write mode/change refresh rate and clone indices - steps.add(new BuildIndicesPreStep(baseElasticSearchComponents, indexedServices, configurationProvider)); - // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService - steps.add(new BuildIndicesStep(indexedServices)); - // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in pre-configure step if it already exists? - steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); - return steps; - } + private List<UpgradeStep> buildSteps( + final List<ElasticSearchIndexed> indexedServices, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { + final List<UpgradeStep> steps = new ArrayList<>(); + // Disable ES write mode/change refresh rate and clone indices + steps.add( + new BuildIndicesPreStep( + baseElasticSearchComponents, indexedServices, configurationProvider)); + // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService + steps.add(new BuildIndicesStep(indexedServices)); + // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in + // pre-configure step if it already exists? + steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); + return steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java index 1fb9c8526ad3b..ad68386622b21 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java @@ -10,41 +10,45 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class CleanIndices implements Upgrade { - private final List<UpgradeStep> _steps; - - public CleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { - - List<ElasticSearchIndexed> indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); - - _steps = List.of(new CleanIndicesStep( + private final List<UpgradeStep> _steps; + + public CleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { + + List<ElasticSearchIndexed> indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); + + _steps = + List.of( + new CleanIndicesStep( baseElasticSearchComponents.getSearchClient(), configurationProvider.getElasticSearch(), indexedServices)); - } + } - @Override - public String id() { - return "CleanIndices"; - } + @Override + public String id() { + return "CleanIndices"; + } - @Override - public List<UpgradeStep> steps() { - return _steps; - } + @Override + public List<UpgradeStep> steps() { + return _steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java index 2feca1f27e625..a44f6d6487067 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; @@ -13,16 +16,11 @@ import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.client.RequestOptions; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPostStep implements UpgradeStep { @@ -45,8 +43,9 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { try { - List<ReindexConfig> indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List<ReindexConfig> indexConfigs = + getAllReindexConfigs(_services).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); // Reset write blocking @@ -56,12 +55,26 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { request.settings(indexSettings); boolean ack = - _esComponents.getSearchClient().indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + _esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); if (ack) { - ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexConfig.name(), false); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + ack = + IndexUtils.validateWriteBlock( + _esComponents.getSearchClient(), indexConfig.name(), false); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); } if (!ack) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java index 82b9428c89fb8..c25888be07f89 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; @@ -8,15 +11,13 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; - +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.shared.ElasticSearchIndexed; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - -import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; -import com.linkedin.metadata.shared.ElasticSearchIndexed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.OpenSearchStatusException; @@ -24,10 +25,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.indices.ResizeRequest; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPreStep implements UpgradeStep { @@ -50,16 +47,19 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { try { // Get indices to update - List<ReindexConfig> indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List<ReindexConfig> indexConfigs = + getAllReindexConfigs(_services).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); for (ReindexConfig indexConfig : indexConfigs) { - String indexName = IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); + String indexName = + IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); boolean ack = blockWrites(indexName); if (!ack) { - log.error("Partial index settings update, some indices may still be blocking writes." + log.error( + "Partial index settings update, some indices may still be blocking writes." + " Please fix the error and re-run the BuildIndices upgrade job."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -69,10 +69,16 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { String clonedName = indexConfig.name() + "_clone_" + System.currentTimeMillis(); ResizeRequest resizeRequest = new ResizeRequest(clonedName, indexName); boolean cloneAck = - _esComponents.getSearchClient().indices().clone(resizeRequest, RequestOptions.DEFAULT).isAcknowledged(); + _esComponents + .getSearchClient() + .indices() + .clone(resizeRequest, RequestOptions.DEFAULT) + .isAcknowledged(); log.info("Cloned index {} into {}, Acknowledged: {}", indexName, clonedName, cloneAck); if (!cloneAck) { - log.error("Partial index settings update, cloned indices may need to be cleaned up: {}", clonedName); + log.error( + "Partial index settings update, cloned indices may need to be cleaned up: {}", + clonedName); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } @@ -85,8 +91,6 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { }; } - - private boolean blockWrites(String indexName) throws InterruptedException, IOException { UpdateSettingsRequest request = new UpdateSettingsRequest(indexName); Map<String, Object> indexSettings = ImmutableMap.of(INDEX_BLOCKS_WRITE_SETTING, "true"); @@ -94,13 +98,23 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc request.settings(indexSettings); boolean ack; try { - ack = _esComponents.getSearchClient().indices() - .putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + ack = + _esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } catch (OpenSearchStatusException | IOException ese) { - // Cover first run case, indices won't exist so settings updates won't work nor will the rest of the preConfigure steps. + // Cover first run case, indices won't exist so settings updates won't work nor will the rest + // of the preConfigure steps. // Since no data are in there they are skippable. - // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way to extract it :( + // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way + // to extract it :( if (ese.getMessage().contains("index_not_found")) { return true; } else { @@ -110,7 +124,11 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc if (ack) { ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexName, true); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } return ack; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java index ef59f2998929e..d37ee173bd9af 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java @@ -5,13 +5,11 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.shared.ElasticSearchIndexed; - import java.util.List; import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class BuildIndicesStep implements UpgradeStep { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java index bb042bac6df95..c3a4d8ab89c07 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java @@ -1,54 +1,55 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.shared.ElasticSearchIndexed; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RestHighLevelClient; - import java.util.List; import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; +import org.opensearch.client.RestHighLevelClient; @Slf4j public class CleanIndicesStep implements UpgradeStep { - private final RestHighLevelClient searchClient; - private final ElasticSearchConfiguration esConfig; - private final List<ElasticSearchIndexed> indexedServices; - - public CleanIndicesStep(final RestHighLevelClient searchClient, final ElasticSearchConfiguration esConfig, - final List<ElasticSearchIndexed> indexedServices) { - this.searchClient = searchClient; - this.esConfig = esConfig; - this.indexedServices = indexedServices; - } - - @Override - public String id() { - return "CleanUpIndicesStep"; - } - - @Override - public int retryCount() { - return 0; - } - - @Override - public Function<UpgradeContext, UpgradeStepResult> executable() { - return (context) -> { - try { - IndexUtils.getAllReindexConfigs(indexedServices) - .forEach(reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); - } catch (Exception e) { - log.error("CleanUpIndicesStep failed.", e); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); - } - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); - }; - } + private final RestHighLevelClient searchClient; + private final ElasticSearchConfiguration esConfig; + private final List<ElasticSearchIndexed> indexedServices; + + public CleanIndicesStep( + final RestHighLevelClient searchClient, + final ElasticSearchConfiguration esConfig, + final List<ElasticSearchIndexed> indexedServices) { + this.searchClient = searchClient; + this.esConfig = esConfig; + this.indexedServices = indexedServices; + } + + @Override + public String id() { + return "CleanUpIndicesStep"; + } + + @Override + public int retryCount() { + return 0; + } + + @Override + public Function<UpgradeContext, UpgradeStepResult> executable() { + return (context) -> { + try { + IndexUtils.getAllReindexConfigs(indexedServices) + .forEach( + reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); + } catch (Exception e) { + log.error("CleanUpIndicesStep failed.", e); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java index 1e568f1e9a9fe..b4a506c3f5c63 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java @@ -6,12 +6,10 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; +import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.function.Function; - - @RequiredArgsConstructor @Slf4j public class DataHubStartupStep implements UpgradeStep { @@ -32,8 +30,8 @@ public int retryCount() { public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { try { - DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = new DataHubUpgradeHistoryEvent() - .setVersion(_version); + DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = + new DataHubUpgradeHistoryEvent().setVersion(_version); _kafkaEventProducer.produceDataHubUpgradeHistoryEvent(dataHubUpgradeHistoryEvent); log.info("Initiating startup for version: {}", _version); } catch (Exception e) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java index d9788448444ed..b3de7c503fb3e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java @@ -2,6 +2,10 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -11,22 +15,18 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - - @Slf4j public class IndexUtils { public static final String INDEX_BLOCKS_WRITE_SETTING = "index.blocks.write"; public static final int INDEX_BLOCKS_WRITE_RETRY = 4; public static final int INDEX_BLOCKS_WRITE_WAIT_SECONDS = 10; - private IndexUtils() { } + + private IndexUtils() {} private static List<ReindexConfig> _reindexConfigs = new ArrayList<>(); - public static List<ReindexConfig> getAllReindexConfigs(List<ElasticSearchIndexed> elasticSearchIndexedList) throws IOException { + public static List<ReindexConfig> getAllReindexConfigs( + List<ElasticSearchIndexed> elasticSearchIndexedList) throws IOException { // Avoid locking & reprocessing List<ReindexConfig> reindexConfigs = new ArrayList<>(_reindexConfigs); if (reindexConfigs.isEmpty()) { @@ -39,19 +39,24 @@ public static List<ReindexConfig> getAllReindexConfigs(List<ElasticSearchIndexed return reindexConfigs; } - public static boolean validateWriteBlock(RestHighLevelClient esClient, String indexName, boolean expectedState) - throws IOException, InterruptedException { + public static boolean validateWriteBlock( + RestHighLevelClient esClient, String indexName, boolean expectedState) + throws IOException, InterruptedException { final String finalIndexName = resolveAlias(esClient, indexName); - GetSettingsRequest request = new GetSettingsRequest() + GetSettingsRequest request = + new GetSettingsRequest() .indices(finalIndexName) .names(INDEX_BLOCKS_WRITE_SETTING) .includeDefaults(true); int count = INDEX_BLOCKS_WRITE_RETRY; while (count > 0) { - GetSettingsResponse response = esClient.indices().getSettings(request, RequestOptions.DEFAULT); - if (response.getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING).equals(String.valueOf(expectedState))) { + GetSettingsResponse response = + esClient.indices().getSettings(request, RequestOptions.DEFAULT); + if (response + .getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING) + .equals(String.valueOf(expectedState))) { return true; } count = count - 1; @@ -64,20 +69,20 @@ public static boolean validateWriteBlock(RestHighLevelClient esClient, String in return false; } - public static String resolveAlias(RestHighLevelClient esClient, String indexName) throws IOException { + public static String resolveAlias(RestHighLevelClient esClient, String indexName) + throws IOException { String finalIndexName = indexName; - GetAliasesResponse aliasResponse = esClient.indices() - .getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); + GetAliasesResponse aliasResponse = + esClient.indices().getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); if (!aliasResponse.getAliases().isEmpty()) { Set<String> indices = aliasResponse.getAliases().keySet(); if (indices.size() != 1) { throw new NotImplementedException( - String.format("Clone not supported for %s indices in alias %s. Indices: %s", - indices.size(), - indexName, - String.join(",", indices))); + String.format( + "Clone not supported for %s indices in alias %s. Indices: %s", + indices.size(), indexName, String.join(",", indices))); } finalIndexName = indices.stream().findFirst().get(); log.info("Alias {} resolved to index {}", indexName, finalIndexName); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java index e213c0b2fd4de..03f0b0b7f2ec2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.search.SearchService; import java.util.List; - public class BackfillBrowsePathsV2 implements Upgrade { private final List<UpgradeStep> _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 08a752d9597f4..610d9069337a5 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.system.entity.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; @@ -27,32 +29,29 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.util.Set; import java.util.function.Function; import lombok.extern.slf4j.Slf4j; -import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class BackfillBrowsePathsV2Step implements UpgradeStep { public static final String BACKFILL_BROWSE_PATHS_V2 = "BACKFILL_BROWSE_PATHS_V2"; - public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; + public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = + "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; public static final String DEFAULT_BROWSE_PATH_V2 = "␟Default"; - private static final Set<String> ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); + private static final Set<String> ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); private static final Integer BATCH_SIZE = 5000; private final EntityService _entityService; @@ -67,14 +66,18 @@ public BackfillBrowsePathsV2Step(EntityService entityService, SearchService sear public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { final AuditStamp auditStamp = - new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); String scrollId = null; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s of browse paths for entity type %s", migratedCount, - migratedCount + BATCH_SIZE, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); migratedCount += BATCH_SIZE; } while (scrollId != null); @@ -88,22 +91,26 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S final Filter filter; if (System.getenv().containsKey(REPROCESS_DEFAULT_BROWSE_PATHS_V2) - && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { + && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { filter = backfillDefaultBrowsePathsV2Filter(); - } else { + } else { filter = backfillBrowsePathsV2Filter(); } - final ScrollResult scrollResult = _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - null, - BATCH_SIZE, - new SearchFlags().setFulltext(true).setSkipCache(true).setSkipHighlighting(true).setSkipAggregates(true) - ); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + ImmutableList.of(entityType), + "*", + filter, + null, + scrollId, + null, + BATCH_SIZE, + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -113,7 +120,11 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S ingestBrowsePathsV2(searchEntity.getEntity(), auditStamp); } catch (Exception e) { // don't stop the whole step because of one bad urn or one bad ingestion - log.error(String.format("Error ingesting default browsePathsV2 aspect for urn %s", searchEntity.getEntity()), e); + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); } } @@ -177,13 +188,10 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal( - proposal, - auditStamp, - true - ); + _entityService.ingestProposal(proposal, auditStamp, true); } @Override @@ -192,7 +200,8 @@ public String id() { } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ @Override public boolean isOptional() { @@ -204,4 +213,3 @@ public boolean skip(UpgradeContext context) { return !Boolean.parseBoolean(System.getenv(BACKFILL_BROWSE_PATHS_V2)); } } - diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java index db697a40d0c6c..83b8e028727ce 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java @@ -1,70 +1,73 @@ package com.linkedin.datahub.upgrade; +import static org.testng.AssertJUnit.assertEquals; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.datahub.upgrade.system.SystemUpdate; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import static org.testng.AssertJUnit.assertEquals; -import static org.testng.AssertJUnit.assertNotNull; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, - properties = { - "kafka.schemaRegistry.type=INTERNAL", - "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" - }) +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, + properties = { + "kafka.schemaRegistry.type=INTERNAL", + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" + }) public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("systemUpdate") - private SystemUpdate systemUpdate; - - @Test - public void testSystemUpdateInit() { - assertNotNull(systemUpdate); - } + @Autowired + @Named("systemUpdate") + private SystemUpdate systemUpdate; - @Test - public void testSystemUpdateSend() { - UpgradeStepResult.Result result = systemUpdate.steps().stream() - .filter(s -> s.id().equals("DataHubStartupStep")) - .findFirst().get() - .executable().apply(new UpgradeContext() { - @Override - public Upgrade upgrade() { - return null; - } + @Test + public void testSystemUpdateInit() { + assertNotNull(systemUpdate); + } - @Override - public List<UpgradeStepResult> stepResults() { - return null; - } + @Test + public void testSystemUpdateSend() { + UpgradeStepResult.Result result = + systemUpdate.steps().stream() + .filter(s -> s.id().equals("DataHubStartupStep")) + .findFirst() + .get() + .executable() + .apply( + new UpgradeContext() { + @Override + public Upgrade upgrade() { + return null; + } - @Override - public UpgradeReport report() { - return null; - } + @Override + public List<UpgradeStepResult> stepResults() { + return null; + } - @Override - public List<String> args() { - return null; - } + @Override + public UpgradeReport report() { + return null; + } - @Override - public Map<String, Optional<String>> parsedArgs() { - return null; - } - }).result(); - assertEquals("SUCCEEDED", result.toString()); - } + @Override + public List<String> args() { + return null; + } + @Override + public Map<String, Optional<String>> parsedArgs() { + return null; + } + }) + .result(); + assertEquals("SUCCEEDED", result.toString()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java index 74cde414adc2f..3e655be900bf2 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java @@ -1,49 +1,48 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import static org.testng.AssertJUnit.*; + import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import static org.testng.AssertJUnit.*; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("restoreIndices") - private RestoreIndices restoreIndices; - - @Autowired - @Named("buildIndices") - private BuildIndices buildIndices; - - @Autowired - private ESIndexBuilder esIndexBuilder; - - @Test - public void testRestoreIndicesInit() { - /* - This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean - */ - assertTrue(restoreIndices.steps().size() >= 3); - } - - @Test - public void testBuildIndicesInit() { - assertEquals("BuildIndices", buildIndices.id()); - assertTrue(buildIndices.steps().size() >= 3); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); - assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); - assertFalse(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); - } - + @Autowired + @Named("restoreIndices") + private RestoreIndices restoreIndices; + + @Autowired + @Named("buildIndices") + private BuildIndices buildIndices; + + @Autowired private ESIndexBuilder esIndexBuilder; + + @Test + public void testRestoreIndicesInit() { + /* + This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean + */ + assertTrue(restoreIndices.steps().size() >= 3); + } + + @Test + public void testBuildIndicesInit() { + assertEquals("BuildIndices", buildIndices.id()); + assertTrue(buildIndices.steps().size() >= 3); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); + assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); + assertFalse( + esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index 6cc853b2c7c4d..0e7bf5ddd5250 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -16,27 +16,19 @@ @Import(value = {SystemAuthenticationFactory.class}) public class UpgradeCliApplicationTestConfiguration { - @MockBean - private UpgradeCli upgradeCli; + @MockBean private UpgradeCli upgradeCli; - @MockBean - private Database ebeanServer; + @MockBean private Database ebeanServer; - @MockBean - private EntityService _entityService; + @MockBean private EntityService _entityService; - @MockBean - private SearchService searchService; + @MockBean private SearchService searchService; - @MockBean - private GraphService graphService; + @MockBean private GraphService graphService; - @MockBean - private EntityRegistry entityRegistry; + @MockBean private EntityRegistry entityRegistry; - @MockBean - ConfigEntityRegistry configEntityRegistry; + @MockBean ConfigEntityRegistry configEntityRegistry; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/docker/build.gradle b/docker/build.gradle index c7f783af6c997..bc79be501b395 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -158,4 +158,4 @@ task quickstartPg(type: Exec, dependsOn: ':metadata-ingestion:install') { ] + pg_compose_args commandLine 'bash', '-c', cmd.join(" ") -} +} \ No newline at end of file diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java index 015a0a9a0f14a..9cf8b4174ecfb 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java @@ -27,12 +27,11 @@ public class AspectSpec { // Classpath & Pegasus-specific: Temporary. private final RecordDataSchema _schema; private final Class<RecordTemplate> _aspectClass; - @Setter @Getter - private String registryName = "unknownRegistry"; - @Setter @Getter - private ComparableVersion registryVersion = new ComparableVersion("0.0.0.0-dev"); + @Setter @Getter private String registryName = "unknownRegistry"; + @Setter @Getter private ComparableVersion registryVersion = new ComparableVersion("0.0.0.0-dev"); - public AspectSpec(@Nonnull final AspectAnnotation aspectAnnotation, + public AspectSpec( + @Nonnull final AspectAnnotation aspectAnnotation, @Nonnull final List<SearchableFieldSpec> searchableFieldSpecs, @Nonnull final List<SearchScoreFieldSpec> searchScoreFieldSpecs, @Nonnull final List<RelationshipFieldSpec> relationshipFieldSpecs, @@ -41,18 +40,35 @@ public AspectSpec(@Nonnull final AspectAnnotation aspectAnnotation, final RecordDataSchema schema, final Class<RecordTemplate> aspectClass) { _aspectAnnotation = aspectAnnotation; - _searchableFieldSpecs = searchableFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _searchScoreFieldSpecs = searchScoreFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _relationshipFieldSpecs = relationshipFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _timeseriesFieldSpecs = timeseriesFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getTimeseriesFieldAnnotation().getStatName(), spec -> spec, - (val1, val2) -> val1)); - _timeseriesFieldCollectionSpecs = timeseriesFieldCollectionSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getTimeseriesFieldCollectionAnnotation().getCollectionName(), spec -> spec, - (val1, val2) -> val1)); + _searchableFieldSpecs = + searchableFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _searchScoreFieldSpecs = + searchScoreFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _relationshipFieldSpecs = + relationshipFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _timeseriesFieldSpecs = + timeseriesFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getTimeseriesFieldAnnotation().getStatName(), + spec -> spec, + (val1, val2) -> val1)); + _timeseriesFieldCollectionSpecs = + timeseriesFieldCollectionSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getTimeseriesFieldCollectionAnnotation().getCollectionName(), + spec -> spec, + (val1, val2) -> val1)); _schema = schema; _aspectClass = aspectClass; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java index 766944e150390..b235e2adcae11 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java @@ -10,10 +10,8 @@ import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; - import lombok.ToString; - @ToString public class ConfigEntitySpec implements EntitySpec { @@ -26,7 +24,8 @@ public ConfigEntitySpec( @Nonnull final String entityName, @Nonnull final String keyAspect, @Nonnull final Collection<AspectSpec> aspectSpecs) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = new EntityAnnotation(entityName, keyAspect); } @@ -72,12 +71,14 @@ public AspectSpec getAspectSpec(final String name) { @Override public RecordDataSchema getSnapshotSchema() { - throw new UnsupportedOperationException("Failed to find Snapshot associated with Config-based Entity"); + throw new UnsupportedOperationException( + "Failed to find Snapshot associated with Config-based Entity"); } @Override public TyperefDataSchema getAspectTyperefSchema() { - throw new UnsupportedOperationException("Failed to find Typeref schema associated with Config-based Entity"); + throw new UnsupportedOperationException( + "Failed to find Typeref schema associated with Config-based Entity"); } @Override @@ -89,4 +90,3 @@ public List<SearchableFieldSpec> getSearchableFieldSpecs() { return _searchableFieldSpecs; } } - diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java index ddc87aacc72cf..b9766d0ca8640 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java @@ -23,7 +23,6 @@ import lombok.extern.slf4j.Slf4j; import org.reflections.Reflections; - /** * Factory class to get a map of all entity schemas and aspect schemas under com.linkedin package * This lets us fetch the PDL data schema of an arbitrary entity or aspect based on their names @@ -39,22 +38,25 @@ public class DataSchemaFactory { private static final String NAME_FIELD = "name"; private static final DataSchemaFactory INSTANCE = new DataSchemaFactory(); - private static final String[] DEFAULT_TOP_LEVEL_NAMESPACES = new String[]{"com", "org", "io", "datahub"}; + private static final String[] DEFAULT_TOP_LEVEL_NAMESPACES = + new String[] {"com", "org", "io", "datahub"}; public DataSchemaFactory() { - this(new String[]{"com.linkedin", "com.datahub"}); + this(new String[] {"com.linkedin", "com.datahub"}); } public DataSchemaFactory(String classPath) { - this(new String[]{classPath}); + this(new String[] {classPath}); } + public DataSchemaFactory(String[] classPaths) { this(classPaths, null); } /** - * Construct a DataSchemaFactory with classes and schemas found under a specific folder. - * This will only look for classes under the `com`, `org` or `datahub` top level namespaces. + * Construct a DataSchemaFactory with classes and schemas found under a specific folder. This will + * only look for classes under the `com`, `org` or `datahub` top level namespaces. + * * @param pluginLocation The location of the classes and schema files. */ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws IOException { @@ -66,15 +68,18 @@ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws File pluginDir = pluginLocation.toFile(); if (!pluginDir.exists()) { throw new RuntimeException( - "Failed to find plugin directory " + pluginDir.getAbsolutePath() + ". Current directory is " + new File( - ".").getAbsolutePath()); + "Failed to find plugin directory " + + pluginDir.getAbsolutePath() + + ". Current directory is " + + new File(".").getAbsolutePath()); } List<URL> urls = new ArrayList<URL>(); if (pluginDir.isDirectory()) { - List<Path> jarFiles = Files.walk(pluginLocation) - .filter(Files::isRegularFile) - .filter(p -> p.toString().endsWith(".jar")) - .collect(Collectors.toList()); + List<Path> jarFiles = + Files.walk(pluginLocation) + .filter(Files::isRegularFile) + .filter(p -> p.toString().endsWith(".jar")) + .collect(Collectors.toList()); for (Path f : jarFiles) { URL url = f.toUri().toURL(); if (url != null) { @@ -87,12 +92,14 @@ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws } URL[] urlsArray = new URL[urls.size()]; urls.toArray(urlsArray); - URLClassLoader classLoader = new URLClassLoader(urlsArray, Thread.currentThread().getContextClassLoader()); + URLClassLoader classLoader = + new URLClassLoader(urlsArray, Thread.currentThread().getContextClassLoader()); return new DataSchemaFactory(DEFAULT_TOP_LEVEL_NAMESPACES, classLoader); } /** - * Construct a DataSchemaFactory with a custom class loader and a list of class namespaces to look for entities and aspects. + * Construct a DataSchemaFactory with a custom class loader and a list of class namespaces to look + * for entities and aspects. */ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader) { entitySchemas = new HashMap<>(); @@ -120,7 +127,8 @@ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader Reflections reflections = new Reflections(namespace, standardClassLoader); stdClasses.addAll(reflections.getSubTypesOf(RecordTemplate.class)); } - log.debug("Standard ClassLoader found a total of {} RecordTemplate classes", stdClasses.size()); + log.debug( + "Standard ClassLoader found a total of {} RecordTemplate classes", stdClasses.size()); classes.removeAll(stdClasses); log.debug("Finally found a total of {} RecordTemplate classes to inspect", classes.size()); } @@ -135,15 +143,19 @@ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader if (schema != null) { DataSchema finalSchema = schema; - getName(schema, EntityAnnotation.ANNOTATION_NAME).ifPresent( - entityName -> entitySchemas.put(entityName, finalSchema)); - getName(schema, AspectAnnotation.ANNOTATION_NAME).ifPresent(aspectName -> { - aspectSchemas.put(aspectName, finalSchema); - aspectClasses.put(aspectName, recordClass); - }); - getName(schema, EventAnnotation.ANNOTATION_NAME).ifPresent(eventName -> { - eventSchemas.put(eventName, finalSchema); - }); + getName(schema, EntityAnnotation.ANNOTATION_NAME) + .ifPresent(entityName -> entitySchemas.put(entityName, finalSchema)); + getName(schema, AspectAnnotation.ANNOTATION_NAME) + .ifPresent( + aspectName -> { + aspectSchemas.put(aspectName, finalSchema); + aspectClasses.put(aspectName, recordClass); + }); + getName(schema, EventAnnotation.ANNOTATION_NAME) + .ifPresent( + eventName -> { + eventSchemas.put(eventName, finalSchema); + }); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java index 9a083660d1023..5db8ca264f69d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java @@ -11,7 +11,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.ToString; @ToString @@ -31,7 +30,8 @@ public DefaultEntitySpec( @Nonnull final EntityAnnotation entityAnnotation, @Nonnull final RecordDataSchema snapshotSchema, @Nullable final TyperefDataSchema aspectTyperefSchema) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = entityAnnotation; _snapshotSchema = snapshotSchema; _aspectTyperefSchema = aspectTyperefSchema; @@ -102,5 +102,4 @@ public List<SearchableFieldSpec> getSearchableFieldSpecs() { return _searchableFieldSpecs; } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java index 7f7c1004aeddb..31b73e6cc9e5e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java @@ -5,7 +5,6 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor @Getter public class DefaultEventSpec implements EventSpec { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java index 4bdb8e37d565f..e4c9dd55a3b4a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java @@ -7,10 +7,7 @@ import java.util.Map; import java.util.stream.Collectors; - -/** - * A specification of a DataHub Entity - */ +/** A specification of a DataHub Entity */ public interface EntitySpec { String getName(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java index 37cb9eabc09da..580134f566871 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java @@ -28,7 +28,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class EntitySpecBuilder { @@ -61,7 +60,8 @@ public EntitySpecBuilder(final AnnotationExtractionMode extractionMode) { public List<EntitySpec> buildEntitySpecs(@Nonnull final DataSchema snapshotSchema) { - final UnionDataSchema snapshotUnionSchema = (UnionDataSchema) snapshotSchema.getDereferencedDataSchema(); + final UnionDataSchema snapshotUnionSchema = + (UnionDataSchema) snapshotSchema.getDereferencedDataSchema(); final List<UnionDataSchema.Member> unionMembers = snapshotUnionSchema.getMembers(); final List<EntitySpec> entitySpecs = new ArrayList<>(); @@ -73,16 +73,19 @@ public List<EntitySpec> buildEntitySpecs(@Nonnull final DataSchema snapshotSchem } // Now validate that all relationships point to valid entities. - // TODO: Fix this so that aspects that are just in the entity registry don't fail because they aren't in the + // TODO: Fix this so that aspects that are just in the entity registry don't fail because they + // aren't in the // snapshot registry. -// for (final RelationshipFieldSpec spec : _relationshipFieldSpecs) { -// if (!_entityNames.containsAll( -// spec.getValidDestinationTypes().stream().map(String::toLowerCase).collect(Collectors.toList()))) { -// failValidation( -// String.format("Found invalid relationship with name %s at path %s. Invalid entityType(s) provided.", -// spec.getRelationshipName(), spec.getPath())); -// } -// } + // for (final RelationshipFieldSpec spec : _relationshipFieldSpecs) { + // if (!_entityNames.containsAll( + // + // spec.getValidDestinationTypes().stream().map(String::toLowerCase).collect(Collectors.toList()))) { + // failValidation( + // String.format("Found invalid relationship with name %s at path %s. Invalid + // entityType(s) provided.", + // spec.getRelationshipName(), spec.getPath())); + // } + // } return entitySpecs; } @@ -93,17 +96,21 @@ public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema final RecordDataSchema entitySnapshotRecordSchema = validateSnapshot(entitySnapshotSchema); // 1. Parse information about the entity from the "entity" annotation. - final Object entityAnnotationObj = entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { EntityAnnotation entityAnnotation = - EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); + EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); final ArrayDataSchema aspectArraySchema = - (ArrayDataSchema) entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME) - .getType() - .getDereferencedDataSchema(); + (ArrayDataSchema) + entitySnapshotRecordSchema + .getField(ASPECTS_FIELD_NAME) + .getType() + .getDereferencedDataSchema(); final UnionDataSchema aspectUnionSchema = (UnionDataSchema) aspectArraySchema.getItems().getDereferencedDataSchema(); @@ -113,111 +120,147 @@ public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema for (final UnionDataSchema.Member member : unionMembers) { NamedDataSchema namedDataSchema = (NamedDataSchema) member.getType(); try { - final AspectSpec spec = buildAspectSpec(member.getType(), - (Class<RecordTemplate>) Class.forName(namedDataSchema.getFullName()).asSubclass(RecordTemplate.class)); + final AspectSpec spec = + buildAspectSpec( + member.getType(), + (Class<RecordTemplate>) + Class.forName(namedDataSchema.getFullName()) + .asSubclass(RecordTemplate.class)); aspectSpecs.add(spec); } catch (ClassNotFoundException ce) { log.warn("Failed to find class for {}", member.getType(), ce); } } - final EntitySpec entitySpec = new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema, - (TyperefDataSchema) aspectArraySchema.getItems()); + final EntitySpec entitySpec = + new DefaultEntitySpec( + aspectSpecs, + entityAnnotation, + entitySnapshotRecordSchema, + (TyperefDataSchema) aspectArraySchema.getItems()); validateEntitySpec(entitySpec); return entitySpec; } - failValidation(String.format("Could not build entity spec for entity with name %s. Missing @%s annotation.", - entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); + failValidation( + String.format( + "Could not build entity spec for entity with name %s. Missing @%s annotation.", + entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); return null; } - public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema, - @Nonnull final List<AspectSpec> aspectSpecs) { + public EntitySpec buildEntitySpec( + @Nonnull final DataSchema entitySnapshotSchema, @Nonnull final List<AspectSpec> aspectSpecs) { // 0. Validate the Snapshot definition final RecordDataSchema entitySnapshotRecordSchema = validateSnapshot(entitySnapshotSchema); // 1. Parse information about the entity from the "entity" annotation. - final Object entityAnnotationObj = entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { EntityAnnotation entityAnnotation = - EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); + EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); - final EntitySpec entitySpec = new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema); + final EntitySpec entitySpec = + new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema); validateEntitySpec(entitySpec); return entitySpec; } - failValidation(String.format("Could not build entity spec for entity with name %s. Missing @%s annotation.", - entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); + failValidation( + String.format( + "Could not build entity spec for entity with name %s. Missing @%s annotation.", + entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); return null; } - /** - * Build a config-based {@link EntitySpec}, as opposed to a Snapshot-based {@link EntitySpec} - */ - public EntitySpec buildConfigEntitySpec(@Nonnull final String entityName, @Nonnull final String keyAspect, + /** Build a config-based {@link EntitySpec}, as opposed to a Snapshot-based {@link EntitySpec} */ + public EntitySpec buildConfigEntitySpec( + @Nonnull final String entityName, + @Nonnull final String keyAspect, @Nonnull final List<AspectSpec> aspectSpecs) { return new ConfigEntitySpec(entityName, keyAspect, aspectSpecs); } - public EntitySpec buildPartialEntitySpec(@Nonnull final String entityName, @Nullable final String keyAspectName, + public EntitySpec buildPartialEntitySpec( + @Nonnull final String entityName, + @Nullable final String keyAspectName, @Nonnull final List<AspectSpec> aspectSpecs) { - EntitySpec entitySpec = new PartialEntitySpec(aspectSpecs, new EntityAnnotation(entityName, keyAspectName)); + EntitySpec entitySpec = + new PartialEntitySpec(aspectSpecs, new EntityAnnotation(entityName, keyAspectName)); return entitySpec; } - public AspectSpec buildAspectSpec(@Nonnull final DataSchema aspectDataSchema, - final Class<RecordTemplate> aspectClass) { + public AspectSpec buildAspectSpec( + @Nonnull final DataSchema aspectDataSchema, final Class<RecordTemplate> aspectClass) { final RecordDataSchema aspectRecordSchema = validateAspect(aspectDataSchema); - final Object aspectAnnotationObj = aspectRecordSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectRecordSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { final AspectAnnotation aspectAnnotation = - AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectRecordSchema.getFullName()); + AspectAnnotation.fromSchemaProperty( + aspectAnnotationObj, aspectRecordSchema.getFullName()); if (AnnotationExtractionMode.IGNORE_ASPECT_FIELDS.equals(_extractionMode)) { // Short Circuit. - return new AspectSpec(aspectAnnotation, Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), aspectRecordSchema, aspectClass); + return new AspectSpec( + aspectAnnotation, + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + aspectRecordSchema, + aspectClass); } final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedSearchResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_searchHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_searchHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract Searchable Field Specs - final SearchableFieldSpecExtractor searchableFieldSpecExtractor = new SearchableFieldSpecExtractor(); + final SearchableFieldSpecExtractor searchableFieldSpecExtractor = + new SearchableFieldSpecExtractor(); final DataSchemaRichContextTraverser searchableFieldSpecTraverser = new DataSchemaRichContextTraverser(searchableFieldSpecExtractor); searchableFieldSpecTraverser.traverse(processedSearchResult.getResultSchema()); final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedSearchScoreResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_searchScoreHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_searchScoreHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract SearchScore Field Specs - final SearchScoreFieldSpecExtractor searchScoreFieldSpecExtractor = new SearchScoreFieldSpecExtractor(); + final SearchScoreFieldSpecExtractor searchScoreFieldSpecExtractor = + new SearchScoreFieldSpecExtractor(); final DataSchemaRichContextTraverser searcScoreFieldSpecTraverser = new DataSchemaRichContextTraverser(searchScoreFieldSpecExtractor); searcScoreFieldSpecTraverser.traverse(processedSearchScoreResult.getResultSchema()); final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedRelationshipResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_relationshipHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_relationshipHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract Relationship Field Specs - final RelationshipFieldSpecExtractor relationshipFieldSpecExtractor = new RelationshipFieldSpecExtractor(); + final RelationshipFieldSpecExtractor relationshipFieldSpecExtractor = + new RelationshipFieldSpecExtractor(); final DataSchemaRichContextTraverser relationshipFieldSpecTraverser = new DataSchemaRichContextTraverser(relationshipFieldSpecExtractor); relationshipFieldSpecTraverser.traverse(processedRelationshipResult.getResultSchema()); @@ -227,23 +270,33 @@ public AspectSpec buildAspectSpec(@Nonnull final DataSchema aspectDataSchema, final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedTimeseriesFieldResult = SchemaAnnotationProcessor.process( - ImmutableList.of(_timeseriesFiledAnnotationHandler, _timeseriesFieldCollectionHandler), - aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); + ImmutableList.of( + _timeseriesFiledAnnotationHandler, _timeseriesFieldCollectionHandler), + aspectRecordSchema, + new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract TimeseriesField/ TimeseriesFieldCollection Specs - final TimeseriesFieldSpecExtractor timeseriesFieldSpecExtractor = new TimeseriesFieldSpecExtractor(); + final TimeseriesFieldSpecExtractor timeseriesFieldSpecExtractor = + new TimeseriesFieldSpecExtractor(); final DataSchemaRichContextTraverser timeseriesFieldSpecTraverser = new DataSchemaRichContextTraverser(timeseriesFieldSpecExtractor); timeseriesFieldSpecTraverser.traverse(processedTimeseriesFieldResult.getResultSchema()); - return new AspectSpec(aspectAnnotation, searchableFieldSpecExtractor.getSpecs(), - searchScoreFieldSpecExtractor.getSpecs(), relationshipFieldSpecExtractor.getSpecs(), + return new AspectSpec( + aspectAnnotation, + searchableFieldSpecExtractor.getSpecs(), + searchScoreFieldSpecExtractor.getSpecs(), + relationshipFieldSpecExtractor.getSpecs(), timeseriesFieldSpecExtractor.getTimeseriesFieldSpecs(), - timeseriesFieldSpecExtractor.getTimeseriesFieldCollectionSpecs(), aspectRecordSchema, aspectClass); + timeseriesFieldSpecExtractor.getTimeseriesFieldCollectionSpecs(), + aspectRecordSchema, + aspectClass); } - failValidation(String.format("Could not build aspect spec for aspect with name %s. Missing @Aspect annotation.", - aspectRecordSchema.getName())); + failValidation( + String.format( + "Could not build aspect spec for aspect with name %s. Missing @Aspect annotation.", + aspectRecordSchema.getName())); return null; } @@ -252,7 +305,8 @@ private void validateEntitySpec(EntitySpec entitySpec) { if (entitySpec.getKeyAspectSpec() == null) { failValidation( - String.format("Did not find required Key Aspect with name %s in aspects for Entity %s in list of aspects.", + String.format( + "Did not find required Key Aspect with name %s in aspects for Entity %s in list of aspects.", entitySpec.getKeyAspectName(), entitySpec.getName())); } @@ -263,9 +317,11 @@ private void validateEntitySpec(EntitySpec entitySpec) { for (final AspectSpec aspectSpec : entitySpec.getAspectSpecs()) { validateAspect(aspectSpec); if (aspectNames.contains(aspectSpec.getName())) { - failValidation(String.format( - "Could not build entity spec for entity with name %s." + " Found multiple Aspects with the same name %s", - entitySpec.getName(), aspectSpec.getName())); + failValidation( + String.format( + "Could not build entity spec for entity with name %s." + + " Found multiple Aspects with the same name %s", + entitySpec.getName(), aspectSpec.getName())); } aspectNames.add(aspectSpec.getName()); } @@ -273,8 +329,11 @@ private void validateEntitySpec(EntitySpec entitySpec) { // Validate entity name if (_entityNames.contains(entitySpec.getName().toLowerCase())) { // Duplicate entity found. - failValidation(String.format("Could not build entity spec for entity with name %s." - + " Found multiple Entity Snapshots with the same name.", entitySpec.getName())); + failValidation( + String.format( + "Could not build entity spec for entity with name %s." + + " Found multiple Entity Snapshots with the same name.", + entitySpec.getName())); } _entityNames.add(entitySpec.getName().toLowerCase()); @@ -283,13 +342,16 @@ private void validateEntitySpec(EntitySpec entitySpec) { private void validateAspect(final AspectSpec aspectSpec) { if (aspectSpec.isTimeseries()) { if (aspectSpec.getPegasusSchema().contains(TIMESTAMP_FIELD_NAME)) { - DataSchema timestamp = aspectSpec.getPegasusSchema().getField(TIMESTAMP_FIELD_NAME).getType(); + DataSchema timestamp = + aspectSpec.getPegasusSchema().getField(TIMESTAMP_FIELD_NAME).getType(); if (timestamp.getType() == DataSchema.Type.LONG) { return; } } - failValidation(String.format("Aspect %s is of type timeseries but does not include TimeseriesAspectBase", - aspectSpec.getName())); + failValidation( + String.format( + "Aspect %s is of type timeseries but does not include TimeseriesAspectBase", + aspectSpec.getName())); } } @@ -297,7 +359,8 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh // 0. Validate that schema is a Record if (entitySnapshotSchema.getType() != DataSchema.Type.RECORD) { failValidation( - String.format("Failed to validate entity snapshot schema of type %s. Schema must be of record type.", + String.format( + "Failed to validate entity snapshot schema of type %s. Schema must be of record type.", entitySnapshotSchema.getType().toString())); } @@ -306,30 +369,40 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh // 1. Validate Urn field if (entitySnapshotRecordSchema.getField(URN_FIELD_NAME) == null || entitySnapshotRecordSchema.getField(URN_FIELD_NAME).getType().getDereferencedType() - != DataSchema.Type.STRING) { - failValidation(String.format("Failed to validate entity snapshot schema with name %s. Invalid urn field.", - entitySnapshotRecordSchema.getName())); + != DataSchema.Type.STRING) { + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid urn field.", + entitySnapshotRecordSchema.getName())); } // 2. Validate Aspect Array if (entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME) == null || entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME).getType().getDereferencedType() - != DataSchema.Type.ARRAY) { + != DataSchema.Type.ARRAY) { - failValidation(String.format( - "Failed to validate entity snapshot schema with name %s. Invalid aspects field found. " - + "'aspects' should be an array of union type.", entitySnapshotRecordSchema.getName())); + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid aspects field found. " + + "'aspects' should be an array of union type.", + entitySnapshotRecordSchema.getName())); } // 3. Validate Aspect Union final ArrayDataSchema aspectArray = - (ArrayDataSchema) entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME).getType().getDereferencedDataSchema(); + (ArrayDataSchema) + entitySnapshotRecordSchema + .getField(ASPECTS_FIELD_NAME) + .getType() + .getDereferencedDataSchema(); if (aspectArray.getItems().getType() != DataSchema.Type.TYPEREF || aspectArray.getItems().getDereferencedType() != DataSchema.Type.UNION) { - failValidation(String.format( - "Failed to validate entity snapshot schema with name %s. Invalid aspects field field. " - + "'aspects' should be an array of union type.", entitySnapshotRecordSchema.getName())); + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid aspects field field. " + + "'aspects' should be an array of union type.", + entitySnapshotRecordSchema.getName())); } return entitySnapshotRecordSchema; @@ -338,8 +411,10 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh private RecordDataSchema validateAspect(@Nonnull final DataSchema aspectSchema) { // Validate that schema is a Record if (aspectSchema.getType() != DataSchema.Type.RECORD) { - failValidation(String.format("Failed to validate aspect schema of type %s. Schema must be of record type.", - aspectSchema.getType().toString())); + failValidation( + String.format( + "Failed to validate aspect schema of type %s. Schema must be of record type.", + aspectSchema.getType().toString())); } return (RecordDataSchema) aspectSchema; } @@ -349,11 +424,13 @@ private void validateKeyAspect(@Nonnull final AspectSpec keyAspect) { RecordDataSchema schema = keyAspect.getPegasusSchema(); // Validate that each field is a string or enum. for (RecordDataSchema.Field field : schema.getFields()) { - if (!DataSchema.Type.STRING.equals(field.getType().getDereferencedType()) && !DataSchema.Type.ENUM.equals( - field.getType().getDereferencedType())) { - failValidation(String.format("Failed to validate key aspect nameed %s. Key " - + "aspects must only contain fields of STRING or ENUM type. Found %s.", keyAspect.getName(), - field.getType().toString())); + if (!DataSchema.Type.STRING.equals(field.getType().getDereferencedType()) + && !DataSchema.Type.ENUM.equals(field.getType().getDereferencedType())) { + failValidation( + String.format( + "Failed to validate key aspect nameed %s. Key " + + "aspects must only contain fields of STRING or ENUM type. Found %s.", + keyAspect.getName(), field.getType().toString())); } } } @@ -363,14 +440,9 @@ private void failValidation(@Nonnull final String message) { } public enum AnnotationExtractionMode { - /** - * Extract all annotations types, the default. - */ + /** Extract all annotations types, the default. */ DEFAULT, - /** - * Skip annotations on aspect record fields, only - * parse entity + aspect annotations. - */ + /** Skip annotations on aspect record fields, only parse entity + aspect annotations. */ IGNORE_ASPECT_FIELDS } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java index a25bf1c2dea62..0a265c46a5164 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java @@ -5,19 +5,17 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class EntitySpecUtils { - private EntitySpecUtils() { - } + private EntitySpecUtils() {} - public static List<String> getEntityTimeseriesAspectNames(@Nonnull EntityRegistry entityRegistry, - @Nonnull String entityName) { + public static List<String> getEntityTimeseriesAspectNames( + @Nonnull EntityRegistry entityRegistry, @Nonnull String entityName) { final EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - final List<String> timeseriesAspectNames = entitySpec.getAspectSpecs() - .stream() - .filter(x -> x.isTimeseries()) - .map(x -> x.getName()) - .collect(Collectors.toList()); + final List<String> timeseriesAspectNames = + entitySpec.getAspectSpecs().stream() + .filter(x -> x.isTimeseries()) + .map(x -> x.getName()) + .collect(Collectors.toList()); return timeseriesAspectNames; } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java index 20f0dfc70d465..09ec6641777f9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java @@ -3,23 +3,14 @@ import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.metadata.models.annotation.EventAnnotation; - -/** - * A specification of a DataHub Platform Event - */ +/** A specification of a DataHub Platform Event */ public interface EventSpec { - /** - * Returns the name of an event - */ + /** Returns the name of an event */ String getName(); - /** - * Returns the raw event annotation - */ + /** Returns the raw event annotation */ EventAnnotation getEventAnnotation(); - /** - * Returns the PDL schema object for the Event - */ + /** Returns the PDL schema object for the Event */ RecordDataSchema getPegasusSchema(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java index 04322b3b550cb..ceb984cdbc5b4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java @@ -9,33 +9,32 @@ @Slf4j public class EventSpecBuilder { - public EventSpecBuilder() { - } + public EventSpecBuilder() {} public EventSpec buildEventSpec( - @Nonnull final String eventName, - @Nonnull final DataSchema eventDataSchema) { + @Nonnull final String eventName, @Nonnull final DataSchema eventDataSchema) { final RecordDataSchema eventRecordSchema = validateEvent(eventDataSchema); - final Object eventAnnotationObj = eventDataSchema.getProperties().get(EventAnnotation.ANNOTATION_NAME); + final Object eventAnnotationObj = + eventDataSchema.getProperties().get(EventAnnotation.ANNOTATION_NAME); if (eventAnnotationObj != null) { final EventAnnotation eventAnnotation = - EventAnnotation.fromPegasusAnnotationObject(eventAnnotationObj, eventRecordSchema.getFullName()); + EventAnnotation.fromPegasusAnnotationObject( + eventAnnotationObj, eventRecordSchema.getFullName()); - return new DefaultEventSpec( - eventName, - eventAnnotation, - eventRecordSchema); + return new DefaultEventSpec(eventName, eventAnnotation, eventRecordSchema); } return null; } private RecordDataSchema validateEvent(@Nonnull final DataSchema eventSchema) { if (eventSchema.getType() != DataSchema.Type.RECORD) { - failValidation(String.format("Failed to validate event schema of type %s. Schema must be of record type.", - eventSchema.getType().toString())); + failValidation( + String.format( + "Failed to validate event schema of type %s. Schema must be of record type.", + eventSchema.getType().toString())); } return (RecordDataSchema) eventSchema; } @@ -43,4 +42,4 @@ private RecordDataSchema validateEvent(@Nonnull final DataSchema eventSchema) { private void failValidation(@Nonnull final String message) { throw new ModelValidationException(message); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java index 303fd06299356..b109f9498cba6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java @@ -3,19 +3,15 @@ import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.PathSpec; - /** - * Base interface for aspect field specs. Contains a) the path to the field and b) the schema of the field + * Base interface for aspect field specs. Contains a) the path to the field and b) the schema of the + * field */ public interface FieldSpec { - /** - * Returns the {@link PathSpec} corresponding to the field, relative to its parent aspect. - */ + /** Returns the {@link PathSpec} corresponding to the field, relative to its parent aspect. */ PathSpec getPath(); - /** - * Returns the {@link DataSchema} associated with the aspect field. - */ + /** Returns the {@link DataSchema} associated with the aspect field. */ DataSchema getPegasusSchema(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java index ac1e1dfc21590..53a689602f27c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java @@ -9,11 +9,9 @@ import java.util.Map; import java.util.Optional; - public class FieldSpecUtils { - private FieldSpecUtils() { - } + private FieldSpecUtils() {} public static String getSchemaFieldName(PathSpec pathSpec) { List<String> components = pathSpec.getPathComponents(); @@ -25,16 +23,25 @@ public static String getSchemaFieldName(PathSpec pathSpec) { } public static Map<String, Object> getResolvedProperties(final DataSchema schema) { - return !schema.getResolvedProperties().isEmpty() ? schema.getResolvedProperties() : schema.getProperties(); + return !schema.getResolvedProperties().isEmpty() + ? schema.getResolvedProperties() + : schema.getProperties(); } public static Optional<PathSpec> getPathSpecWithAspectName(TraverserContext context) { - Object aspectAnnotationObj = context.getTopLevelSchema().getProperties().get(AspectAnnotation.ANNOTATION_NAME); - if (aspectAnnotationObj == null || !Map.class.isAssignableFrom(aspectAnnotationObj.getClass()) + Object aspectAnnotationObj = + context.getTopLevelSchema().getProperties().get(AspectAnnotation.ANNOTATION_NAME); + if (aspectAnnotationObj == null + || !Map.class.isAssignableFrom(aspectAnnotationObj.getClass()) || !((Map) aspectAnnotationObj).containsKey(AspectAnnotation.NAME_FIELD)) { return Optional.empty(); } String aspectName = (((Map) aspectAnnotationObj).get(AspectAnnotation.NAME_FIELD)).toString(); - return Optional.of(new PathSpec(ImmutableList.<String>builder().add(aspectName).addAll(context.getSchemaPathSpec()).build())); + return Optional.of( + new PathSpec( + ImmutableList.<String>builder() + .add(aspectName) + .addAll(context.getSchemaPathSpec()) + .build())); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java b/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java index 7dfe596c8de4c..549c0a9ef7916 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.models; -/** - * Exception thrown when Entity, Aspect models fail to be validated. - */ +/** Exception thrown when Entity, Aspect models fail to be validated. */ public class ModelValidationException extends RuntimeException { public ModelValidationException(String message) { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java index 0124fc8ce7bb1..13678d29da730 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java @@ -12,13 +12,11 @@ import javax.annotation.Nonnull; import lombok.ToString; - /** - * A partially specified entity spec that can be used with a {@link com.linkedin.metadata.models.registry.PatchEntityRegistry}. - * Specifically, it does not require the following things compared to a {@link DefaultEntitySpec} - * - a key aspect - * - snapshot schemas for the entity - * - typeref schemas for aspect + * A partially specified entity spec that can be used with a {@link + * com.linkedin.metadata.models.registry.PatchEntityRegistry}. Specifically, it does not require the + * following things compared to a {@link DefaultEntitySpec} - a key aspect - snapshot schemas for + * the entity - typeref schemas for aspect */ @ToString public class PartialEntitySpec implements EntitySpec { @@ -26,8 +24,10 @@ public class PartialEntitySpec implements EntitySpec { private final EntityAnnotation _entityAnnotation; private final Map<String, AspectSpec> _aspectSpecs; - public PartialEntitySpec(@Nonnull final Collection<AspectSpec> aspectSpecs, final EntityAnnotation entityAnnotation) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + public PartialEntitySpec( + @Nonnull final Collection<AspectSpec> aspectSpecs, final EntityAnnotation entityAnnotation) { + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = entityAnnotation; } @@ -82,7 +82,7 @@ public RecordDataSchema getSnapshotSchema() { @Override public TyperefDataSchema getAspectTyperefSchema() { - throw new UnsupportedOperationException("Partial entity specs do not contain aspect typeref schemas"); + throw new UnsupportedOperationException( + "Partial entity specs do not contain aspect typeref schemas"); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java b/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java index 1a262731a48af..a4dabea0a3345 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java @@ -3,7 +3,6 @@ import java.util.Comparator; import org.apache.commons.lang3.tuple.Pair; - public class PropertyOverrideComparator implements Comparator<Pair<String, Object>> { public int compare(Pair<String, Object> o1, Pair<String, Object> o2) { return Integer.compare(o2.getKey().split("/").length, o1.getKey().split("/").length); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java index 76454850aa2f8..06d6994e7dc45 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java @@ -8,7 +8,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class RelationshipFieldSpec implements FieldSpec { @@ -16,17 +15,13 @@ public class RelationshipFieldSpec implements FieldSpec { @NonNull RelationshipAnnotation relationshipAnnotation; @NonNull DataSchema pegasusSchema; - /** - * Returns the name of the outbound relationship extending from the field. - */ + /** Returns the name of the outbound relationship extending from the field. */ @Nonnull public String getRelationshipName() { return relationshipAnnotation.getName(); } - /** - * Returns a list of entity names representing the destination node type of the relationship. - */ + /** Returns a list of entity names representing the destination node type of the relationship. */ @Nonnull public List<String> getValidDestinationTypes() { return relationshipAnnotation.getValidDestinationTypes(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java index 99c0908abbd02..ad32b315f6b1a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java @@ -12,7 +12,6 @@ import java.util.List; import java.util.Map; - /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link RelationshipFieldSpec}s * from an aspect schema. @@ -41,25 +40,31 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order final Object primaryAnnotationObj = properties.get(RelationshipAnnotation.ANNOTATION_NAME); if (primaryAnnotationObj != null) { - validatePropertiesAnnotation(currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); + validatePropertiesAnnotation( + currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); } // Next, check resolved properties for annotations on primitives. - final Map<String, Object> resolvedProperties = FieldSpecUtils.getResolvedProperties(currentSchema); - final Object resolvedAnnotationObj = resolvedProperties.get(RelationshipAnnotation.ANNOTATION_NAME); + final Map<String, Object> resolvedProperties = + FieldSpecUtils.getResolvedProperties(currentSchema); + final Object resolvedAnnotationObj = + resolvedProperties.get(RelationshipAnnotation.ANNOTATION_NAME); if (resolvedAnnotationObj != null) { - if (currentSchema.isPrimitive() && isValidPrimitiveType((PrimitiveDataSchema) currentSchema)) { + if (currentSchema.isPrimitive() + && isValidPrimitiveType((PrimitiveDataSchema) currentSchema)) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); - final RelationshipAnnotation annotation = RelationshipAnnotation.fromPegasusAnnotationObject( - resolvedAnnotationObj, - path.toString() - ); - final RelationshipFieldSpec fieldSpec = new RelationshipFieldSpec(path, annotation, currentSchema); + final RelationshipAnnotation annotation = + RelationshipAnnotation.fromPegasusAnnotationObject( + resolvedAnnotationObj, path.toString()); + final RelationshipFieldSpec fieldSpec = + new RelationshipFieldSpec(path, annotation, currentSchema); _specs.add(fieldSpec); return; } - throw new ModelValidationException(String.format("Invalid @Relationship Annotation at %s", context.getSchemaPathSpec().toString())); + throw new ModelValidationException( + String.format( + "Invalid @Relationship Annotation at %s", context.getSchemaPathSpec().toString())); } } } @@ -78,7 +83,8 @@ private Boolean isValidPrimitiveType(final PrimitiveDataSchema schema) { return DataSchema.Type.STRING.equals(schema.getDereferencedDataSchema().getDereferencedType()); } - private void validatePropertiesAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validatePropertiesAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. if (currentSchema.isPrimitive()) { @@ -87,20 +93,19 @@ private void validatePropertiesAnnotation(DataSchema currentSchema, Object annot // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - RelationshipAnnotation.ANNOTATION_NAME, - pathStr - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + RelationshipAnnotation.ANNOTATION_NAME, pathStr)); } Map<String, Object> annotationMap = (Map<String, Object>) annotationObj; for (String key : annotationMap.keySet()) { if (!key.startsWith(Character.toString(PathSpec.SEPARATOR))) { throw new ModelValidationException( - String.format("Invalid @Relationship Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, - currentSchema.getType())); + String.format( + "Invalid @Relationship Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java index 2346923d70a48..bdd3546b75857 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java @@ -6,10 +6,9 @@ import lombok.NonNull; import lombok.Value; - @Value public class SearchScoreFieldSpec implements FieldSpec { @NonNull PathSpec path; @NonNull SearchScoreAnnotation searchScoreAnnotation; @NonNull DataSchema pegasusSchema; -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java index c4f767c4a24bc..776d5ee7a20b7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java @@ -15,7 +15,6 @@ import java.util.Optional; import java.util.Set; - /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link SearchScoreFieldSpec}s * from an aspect schema. @@ -24,7 +23,8 @@ public class SearchScoreFieldSpecExtractor implements SchemaVisitor { private final List<SearchScoreFieldSpec> _specs = new ArrayList<>(); private static final Set<DataSchema.Type> NUMERIC_TYPES = - ImmutableSet.of(DataSchema.Type.INT, DataSchema.Type.LONG, DataSchema.Type.FLOAT, DataSchema.Type.DOUBLE); + ImmutableSet.of( + DataSchema.Type.INT, DataSchema.Type.LONG, DataSchema.Type.FLOAT, DataSchema.Type.DOUBLE); public List<SearchScoreFieldSpec> getSpecs() { return _specs; @@ -46,9 +46,10 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order if (currentSchema.isPrimitive() && isNumericType((PrimitiveDataSchema) currentSchema)) { extractAnnotation(annotationObj, currentSchema, context); } else { - throw new ModelValidationException(String.format( - "Invalid @SearchScore Annotation at %s. This annotation can only be put in on a numeric singular (non-array) field", - context.getSchemaPathSpec().toString())); + throw new ModelValidationException( + String.format( + "Invalid @SearchScore Annotation at %s. This annotation can only be put in on a numeric singular (non-array) field", + context.getSchemaPathSpec().toString())); } } } @@ -59,19 +60,21 @@ private Object getAnnotationObj(TraverserContext context) { return properties.get(SearchScoreAnnotation.ANNOTATION_NAME); } - private void extractAnnotation(final Object annotationObj, final DataSchema currentSchema, - final TraverserContext context) { + private void extractAnnotation( + final Object annotationObj, final DataSchema currentSchema, final TraverserContext context) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); final Optional<PathSpec> fullPath = FieldSpecUtils.getPathSpecWithAspectName(context); if (context.getSchemaPathSpec().contains(PathSpec.WILDCARD)) { throw new ModelValidationException( - String.format("SearchScore annotation can only be put on singular fields (non-arrays): path %s", + String.format( + "SearchScore annotation can only be put on singular fields (non-arrays): path %s", fullPath.orElse(path))); } final SearchScoreAnnotation annotation = - SearchScoreAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - path.toString()); - final SearchScoreFieldSpec fieldSpec = new SearchScoreFieldSpec(path, annotation, currentSchema); + SearchScoreAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); + final SearchScoreFieldSpec fieldSpec = + new SearchScoreFieldSpec(path, annotation, currentSchema); _specs.add(fieldSpec); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java index 9ebd7e991df48..217bd8e58340a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java @@ -6,7 +6,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class SearchableFieldSpec implements FieldSpec { @@ -17,4 +16,4 @@ public class SearchableFieldSpec implements FieldSpec { public boolean isArray() { return path.getPathComponents().contains("*"); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java index 8f2f42cd69cae..add6a88369b13 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java @@ -11,14 +11,13 @@ import com.linkedin.data.schema.annotation.SchemaVisitorTraversalResult; import com.linkedin.data.schema.annotation.TraverserContext; import com.linkedin.metadata.models.annotation.SearchableAnnotation; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import lombok.extern.slf4j.Slf4j; /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link SearchableFieldSpec}s @@ -33,6 +32,7 @@ public class SearchableFieldSpecExtractor implements SchemaVisitor { private static final String MAP = "map"; public static final Map<String, Object> PRIMARY_URN_SEARCH_PROPERTIES; + static { PRIMARY_URN_SEARCH_PROPERTIES = new DataMap(); PRIMARY_URN_SEARCH_PROPERTIES.put("enableAutocomplete", "true"); @@ -41,10 +41,8 @@ public class SearchableFieldSpecExtractor implements SchemaVisitor { } private static final float SECONDARY_URN_FACTOR = 0.1f; - private static final Set<String> SECONDARY_URN_FIELD_TYPES = ImmutableSet.<String>builder() - .add("URN") - .add("URN_PARTIAL") - .build(); + private static final Set<String> SECONDARY_URN_FIELD_TYPES = + ImmutableSet.<String>builder().add("URN").add("URN_PARTIAL").build(); public List<SearchableFieldSpec> getSpecs() { return _specs; @@ -72,7 +70,8 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order extractSearchableAnnotation(annotationObj, currentSchema, context); } else { throw new ModelValidationException( - String.format("Invalid @Searchable Annotation at %s", context.getSchemaPathSpec().toString())); + String.format( + "Invalid @Searchable Annotation at %s", context.getSchemaPathSpec().toString())); } } } @@ -86,34 +85,45 @@ private Object getAnnotationObj(TraverserContext context) { final Object primaryAnnotationObj = properties.get(SearchableAnnotation.ANNOTATION_NAME); if (primaryAnnotationObj != null) { - validatePropertiesAnnotation(currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); - // Unfortunately, annotations on collections always need to be a nested map (byproduct of making overrides work) + validatePropertiesAnnotation( + currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); + // Unfortunately, annotations on collections always need to be a nested map (byproduct of + // making overrides work) // As such, for annotation maps, we make it a single entry map, where the key has no meaning - if (currentSchema.getDereferencedType() == DataSchema.Type.MAP && primaryAnnotationObj instanceof Map + if (currentSchema.getDereferencedType() == DataSchema.Type.MAP + && primaryAnnotationObj instanceof Map && !((Map) primaryAnnotationObj).isEmpty()) { return ((Map<?, ?>) primaryAnnotationObj).entrySet().stream().findFirst().get().getValue(); } } - // Check if the path has map in it. Individual values of the maps (actual maps are caught above) can be ignored + // Check if the path has map in it. Individual values of the maps (actual maps are caught above) + // can be ignored if (context.getTraversePath().contains(MAP)) { return null; } - final boolean isUrn = ((DataMap) context.getParentSchema().getProperties() - .getOrDefault("java", new DataMap())) - .getOrDefault("class", "").equals("com.linkedin.common.urn.Urn"); + final boolean isUrn = + ((DataMap) context.getParentSchema().getProperties().getOrDefault("java", new DataMap())) + .getOrDefault("class", "") + .equals("com.linkedin.common.urn.Urn"); - final Map<String, Object> resolvedProperties = FieldSpecUtils.getResolvedProperties(currentSchema); + final Map<String, Object> resolvedProperties = + FieldSpecUtils.getResolvedProperties(currentSchema); // if primary doesn't have an annotation, then ignore secondary urns if (isUrn && primaryAnnotationObj != null) { - DataMap annotationMap = (DataMap) resolvedProperties.get(SearchableAnnotation.ANNOTATION_NAME); + DataMap annotationMap = + (DataMap) resolvedProperties.get(SearchableAnnotation.ANNOTATION_NAME); Map<String, Object> result = new HashMap<>(annotationMap); // Override boostScore for secondary urn - if (SECONDARY_URN_FIELD_TYPES.contains(annotationMap.getOrDefault("fieldType", "URN").toString())) { - result.put("boostScore", Float.parseFloat(String.valueOf(annotationMap.getOrDefault("boostScore", "1.0"))) * SECONDARY_URN_FACTOR); + if (SECONDARY_URN_FIELD_TYPES.contains( + annotationMap.getOrDefault("fieldType", "URN").toString())) { + result.put( + "boostScore", + Float.parseFloat(String.valueOf(annotationMap.getOrDefault("boostScore", "1.0"))) + * SECONDARY_URN_FACTOR); } return result; @@ -123,40 +133,47 @@ private Object getAnnotationObj(TraverserContext context) { } } - private void extractSearchableAnnotation(final Object annotationObj, final DataSchema currentSchema, - final TraverserContext context) { + private void extractSearchableAnnotation( + final Object annotationObj, final DataSchema currentSchema, final TraverserContext context) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); final Optional<PathSpec> fullPath = FieldSpecUtils.getPathSpecWithAspectName(context); SearchableAnnotation annotation = - SearchableAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - currentSchema.getDereferencedType(), path.toString()); + SearchableAnnotation.fromPegasusAnnotationObject( + annotationObj, + FieldSpecUtils.getSchemaFieldName(path), + currentSchema.getDereferencedType(), + path.toString()); String schemaPathSpec = context.getSchemaPathSpec().toString(); - if (_searchFieldNamesToPatch.containsKey(annotation.getFieldName()) && !_searchFieldNamesToPatch.get( - annotation.getFieldName()).equals(schemaPathSpec)) { + if (_searchFieldNamesToPatch.containsKey(annotation.getFieldName()) + && !_searchFieldNamesToPatch.get(annotation.getFieldName()).equals(schemaPathSpec)) { // Try to use path String pathName = path.toString().replace('/', '_').replace("*", ""); if (pathName.startsWith("_")) { pathName = pathName.replaceFirst("_", ""); } - if (_searchFieldNamesToPatch.containsKey(pathName) && !_searchFieldNamesToPatch.get(pathName).equals(schemaPathSpec)) { + if (_searchFieldNamesToPatch.containsKey(pathName) + && !_searchFieldNamesToPatch.get(pathName).equals(schemaPathSpec)) { throw new ModelValidationException( - String.format("Entity has multiple searchable fields with the same field name %s, path: %s", annotation.getFieldName(), fullPath.orElse(path))); + String.format( + "Entity has multiple searchable fields with the same field name %s, path: %s", + annotation.getFieldName(), fullPath.orElse(path))); } else { - annotation = new SearchableAnnotation( - pathName, - annotation.getFieldType(), - annotation.isQueryByDefault(), - annotation.isEnableAutocomplete(), - annotation.isAddToFilters(), - annotation.isAddHasValuesToFilters(), - annotation.getFilterNameOverride(), - annotation.getHasValuesFilterNameOverride(), - annotation.getBoostScore(), - annotation.getHasValuesFieldName(), - annotation.getNumValuesFieldName(), - annotation.getWeightsPerFieldValue(), - annotation.getFieldNameAliases()); + annotation = + new SearchableAnnotation( + pathName, + annotation.getFieldType(), + annotation.isQueryByDefault(), + annotation.isEnableAutocomplete(), + annotation.isAddToFilters(), + annotation.isAddHasValuesToFilters(), + annotation.getFilterNameOverride(), + annotation.getHasValuesFilterNameOverride(), + annotation.getBoostScore(), + annotation.getHasValuesFieldName(), + annotation.getNumValuesFieldName(), + annotation.getWeightsPerFieldValue(), + annotation.getFieldNameAliases()); } } log.debug("Searchable annotation for field: {} : {}", schemaPathSpec, annotation); @@ -184,35 +201,39 @@ private Boolean isValidPrimitiveType(final PrimitiveDataSchema schema) { return true; } - private void validatePropertiesAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validatePropertiesAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. - if (currentSchema.isPrimitive() || currentSchema.getDereferencedType().equals(DataSchema.Type.ENUM) || currentSchema - .getDereferencedType() - .equals(DataSchema.Type.MAP)) { + if (currentSchema.isPrimitive() + || currentSchema.getDereferencedType().equals(DataSchema.Type.ENUM) + || currentSchema.getDereferencedType().equals(DataSchema.Type.MAP)) { return; } // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - SearchableAnnotation.ANNOTATION_NAME, pathStr)); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + SearchableAnnotation.ANNOTATION_NAME, pathStr)); } Map<String, Object> annotationMap = (Map<String, Object>) annotationObj; if (annotationMap.size() == 0) { - throw new ModelValidationException(String.format( - "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, currentSchema.getType())); + throw new ModelValidationException( + String.format( + "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } for (String key : annotationMap.keySet()) { if (!key.startsWith(Character.toString(PathSpec.SEPARATOR))) { - throw new ModelValidationException(String.format( - "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, currentSchema.getType())); + throw new ModelValidationException( + String.format( + "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java index 5771144fd33c2..efdb8b876cbda 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java @@ -7,7 +7,6 @@ import lombok.Data; import lombok.NonNull; - @Data public class TimeseriesFieldCollectionSpec implements FieldSpec { @NonNull PathSpec path; @@ -24,4 +23,4 @@ public String getName() { public String getKeyPathFromAnnotation() { return path + "/" + timeseriesFieldCollectionAnnotation.getKey(); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java index cbd0c0581600e..6ad7c1c9d34ca 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java @@ -6,7 +6,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class TimeseriesFieldSpec implements FieldSpec { @NonNull PathSpec path; @@ -16,4 +15,4 @@ public class TimeseriesFieldSpec implements FieldSpec { public String getName() { return timeseriesFieldAnnotation.getStatName(); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java index 4391bd1497741..e29b1a88afca4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java @@ -15,16 +15,16 @@ import java.util.Optional; import lombok.Getter; - /** - * Implementation of {@link SchemaVisitor} responsible for extracting {@link TimeseriesFieldSpec} and - * {@link TimeseriesFieldCollectionSpec} from an aspect schema. + * Implementation of {@link SchemaVisitor} responsible for extracting {@link TimeseriesFieldSpec} + * and {@link TimeseriesFieldCollectionSpec} from an aspect schema. */ @Getter public class TimeseriesFieldSpecExtractor implements SchemaVisitor { private final List<TimeseriesFieldSpec> timeseriesFieldSpecs = new ArrayList<>(); - private final List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs = new ArrayList<>(); + private final List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs = + new ArrayList<>(); private final Map<String, String> namesToPath = new HashMap<>(); @Override @@ -40,16 +40,22 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order // First, check for collection in primary properties final Map<String, Object> primaryProperties = context.getEnclosingField().getProperties(); - final Object timeseriesFieldAnnotationObj = primaryProperties.get(TimeseriesFieldAnnotation.ANNOTATION_NAME); + final Object timeseriesFieldAnnotationObj = + primaryProperties.get(TimeseriesFieldAnnotation.ANNOTATION_NAME); final Object timeseriesFieldCollectionAnnotationObj = primaryProperties.get(TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME); - if (currentSchema.getType() == DataSchema.Type.RECORD && timeseriesFieldCollectionAnnotationObj != null) { - validateCollectionAnnotation(currentSchema, timeseriesFieldCollectionAnnotationObj, + if (currentSchema.getType() == DataSchema.Type.RECORD + && timeseriesFieldCollectionAnnotationObj != null) { + validateCollectionAnnotation( + currentSchema, + timeseriesFieldCollectionAnnotationObj, context.getTraversePath().toString()); - addTimeseriesFieldCollectionSpec(currentSchema, path, timeseriesFieldCollectionAnnotationObj); - } else if (timeseriesFieldAnnotationObj != null && !path.getPathComponents() - .get(path.getPathComponents().size() - 1) - .equals("*")) { // For arrays make sure to add just the array form + addTimeseriesFieldCollectionSpec( + currentSchema, path, timeseriesFieldCollectionAnnotationObj); + } else if (timeseriesFieldAnnotationObj != null + && !path.getPathComponents() + .get(path.getPathComponents().size() - 1) + .equals("*")) { // For arrays make sure to add just the array form addTimeseriesFieldSpec(currentSchema, path, timeseriesFieldAnnotationObj); } else { addTimeseriesFieldCollectionKey(path); @@ -57,7 +63,8 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order } } - private void validateCollectionAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validateCollectionAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. if (currentSchema.isPrimitive()) { @@ -66,21 +73,25 @@ private void validateCollectionAnnotation(DataSchema currentSchema, Object annot // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME, pathStr)); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME, pathStr)); } } - private void addTimeseriesFieldCollectionSpec(DataSchema currentSchema, PathSpec path, Object annotationObj) { + private void addTimeseriesFieldCollectionSpec( + DataSchema currentSchema, PathSpec path, Object annotationObj) { if (currentSchema.getType() == DataSchema.Type.RECORD) { TimeseriesFieldCollectionAnnotation annotation = - TimeseriesFieldCollectionAnnotation.fromPegasusAnnotationObject(annotationObj, - FieldSpecUtils.getSchemaFieldName(path), path.toString()); - if (namesToPath.containsKey(annotation.getCollectionName()) && !namesToPath.get(annotation.getCollectionName()) - .equals(path.toString())) { + TimeseriesFieldCollectionAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); + if (namesToPath.containsKey(annotation.getCollectionName()) + && !namesToPath.get(annotation.getCollectionName()).equals(path.toString())) { throw new ModelValidationException( - String.format("There are multiple fields with the same name: %s", annotation.getCollectionName())); + String.format( + "There are multiple fields with the same name: %s", + annotation.getCollectionName())); } namesToPath.put(annotation.getCollectionName(), path.toString()); timeseriesFieldCollectionSpecs.add( @@ -88,25 +99,32 @@ private void addTimeseriesFieldCollectionSpec(DataSchema currentSchema, PathSpec } } - private void addTimeseriesFieldSpec(DataSchema currentSchema, PathSpec path, Object annotationObj) { + private void addTimeseriesFieldSpec( + DataSchema currentSchema, PathSpec path, Object annotationObj) { // First check whether the stat is part of a collection String pathStr = path.toString(); - Optional<TimeseriesFieldCollectionSpec> fieldCollectionSpec = timeseriesFieldCollectionSpecs.stream() - .filter(spec -> pathStr.startsWith(spec.getPath().toString())) - .findFirst(); + Optional<TimeseriesFieldCollectionSpec> fieldCollectionSpec = + timeseriesFieldCollectionSpecs.stream() + .filter(spec -> pathStr.startsWith(spec.getPath().toString())) + .findFirst(); TimeseriesFieldAnnotation annotation = - TimeseriesFieldAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - path.toString()); + TimeseriesFieldAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); if (fieldCollectionSpec.isPresent()) { - fieldCollectionSpec.get() + fieldCollectionSpec + .get() .getTimeseriesFieldSpecMap() - .put(annotation.getStatName(), - new TimeseriesFieldSpec(getRelativePath(path, fieldCollectionSpec.get().getPath()), annotation, + .put( + annotation.getStatName(), + new TimeseriesFieldSpec( + getRelativePath(path, fieldCollectionSpec.get().getPath()), + annotation, currentSchema)); } else { if (path.getPathComponents().contains("*")) { throw new ModelValidationException( - String.format("No matching collection found for the given timeseries field %s", pathStr)); + String.format( + "No matching collection found for the given timeseries field %s", pathStr)); } timeseriesFieldSpecs.add(new TimeseriesFieldSpec(path, annotation, currentSchema)); } @@ -123,7 +141,9 @@ private void addTimeseriesFieldCollectionKey(PathSpec path) { private PathSpec getRelativePath(PathSpec child, PathSpec parent) { return new PathSpec( - child.getPathComponents().subList(parent.getPathComponents().size(), child.getPathComponents().size())); + child + .getPathComponents() + .subList(parent.getPathComponents().size(), child.getPathComponents().size())); } @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java index 3d9e1cf04cd36..7aa5be69a0541 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java @@ -4,11 +4,11 @@ import java.util.Optional; import lombok.experimental.UtilityClass; - @UtilityClass public class AnnotationUtils { <T> Optional<T> getField(final Map fieldMap, final String fieldName, final Class<T> fieldType) { - if (fieldMap.containsKey(fieldName) && fieldType.isAssignableFrom(fieldMap.get(fieldName).getClass())) { + if (fieldMap.containsKey(fieldName) + && fieldType.isAssignableFrom(fieldMap.get(fieldName).getClass())) { return Optional.of(fieldType.cast(fieldMap.get(fieldName))); } return Optional.empty(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java index d116170e10d22..56dca9ab3eaf9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java @@ -7,10 +7,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Aspect annotation metadata. - */ +/** Simple object representation of the @Aspect annotation metadata. */ @Value public class AspectAnnotation { @@ -29,15 +26,12 @@ public class AspectAnnotation { @Nonnull public static AspectAnnotation fromSchemaProperty( - @Nonnull final Object annotationObj, - @Nonnull final String context) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + ANNOTATION_NAME, context)); } final Map map = (Map) annotationObj; final Optional<String> name = AnnotationUtils.getField(map, NAME_FIELD, String.class); @@ -45,10 +39,7 @@ public static AspectAnnotation fromSchemaProperty( throw new ModelValidationException( String.format( "Failed to validated @%s annotation declared at %s: missing '%s' property", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } final Optional<String> type = AnnotationUtils.getField(map, TYPE_FIELD, String.class); @@ -56,6 +47,10 @@ public static AspectAnnotation fromSchemaProperty( Optional<Boolean> autoRender = AnnotationUtils.getField(map, AUTO_RENDER_FIELD, Boolean.class); Optional<DataMap> renderSpec = AnnotationUtils.getField(map, RENDER_SPEC_FIELD, DataMap.class); - return new AspectAnnotation(name.get(), isTimeseries, autoRender.orElseGet(() -> false), renderSpec.orElseGet(() -> null)); + return new AspectAnnotation( + name.get(), + isTimeseries, + autoRender.orElseGet(() -> false), + renderSpec.orElseGet(() -> null)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java index e7174dcc9b176..94cdf130d1e88 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java @@ -6,10 +6,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Entity annotation metadata. - */ +/** Simple object representation of the @Entity annotation metadata. */ @Value public class EntityAnnotation { @@ -22,39 +19,31 @@ public class EntityAnnotation { @Nonnull public static EntityAnnotation fromSchemaProperty( - @Nonnull final Object annotationObj, - @Nonnull final String context) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional<String> name = AnnotationUtils.getField(map, NAME_FIELD, String.class); - final Optional<String> keyAspect = AnnotationUtils.getField(map, KEY_ASPECT_FIELD, String.class); + final Optional<String> keyAspect = + AnnotationUtils.getField(map, KEY_ASPECT_FIELD, String.class); if (!name.isPresent()) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } if (!keyAspect.isPresent()) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - KEY_ASPECT_FIELD - )); + ANNOTATION_NAME, context, KEY_ASPECT_FIELD)); } return new EntityAnnotation(name.get(), keyAspect.get()); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java index ee0229dabfc37..ddfa23412955d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java @@ -6,10 +6,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * An annotation associated with a DataHub Event. - */ +/** An annotation associated with a DataHub Event. */ @Value public class EventAnnotation { @@ -20,15 +17,12 @@ public class EventAnnotation { @Nonnull public static EventAnnotation fromPegasusAnnotationObject( - @Nonnull final Object annotationObj, - @Nonnull final String context - ) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; @@ -37,10 +31,7 @@ public static EventAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } return new EventAnnotation(name.get()); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java index 7631f95c3a5ff..a22ef56d60006 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java @@ -8,10 +8,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Relationship annotation metadata. - */ +/** Simple object representation of the @Relationship annotation metadata. */ @Value public class RelationshipAnnotation { @@ -38,15 +35,12 @@ public class RelationshipAnnotation { @Nonnull public static RelationshipAnnotation fromPegasusAnnotationObject( - @Nonnull final Object annotationObj, - @Nonnull final String context - ) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; @@ -55,13 +49,11 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } - final Optional<List> entityTypesList = AnnotationUtils.getField(map, ENTITY_TYPES_FIELD, List.class); + final Optional<List> entityTypesList = + AnnotationUtils.getField(map, ENTITY_TYPES_FIELD, List.class); final List<String> entityTypes = new ArrayList<>(); if (entityTypesList.isPresent()) { for (Object entityTypeObj : entityTypesList.get()) { @@ -69,21 +61,22 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type List<String>", - ANNOTATION_NAME, - context, - ENTITY_TYPES_FIELD - )); + ANNOTATION_NAME, context, ENTITY_TYPES_FIELD)); } entityTypes.add((String) entityTypeObj); } } - final Optional<Boolean> isUpstream = AnnotationUtils.getField(map, IS_UPSTREAM_FIELD, Boolean.class); - final Optional<Boolean> isLineage = AnnotationUtils.getField(map, IS_LINEAGE_FIELD, Boolean.class); + final Optional<Boolean> isUpstream = + AnnotationUtils.getField(map, IS_UPSTREAM_FIELD, Boolean.class); + final Optional<Boolean> isLineage = + AnnotationUtils.getField(map, IS_LINEAGE_FIELD, Boolean.class); final Optional<String> createdOn = AnnotationUtils.getField(map, CREATED_ON, String.class); - final Optional<String> createdActor = AnnotationUtils.getField(map, CREATED_ACTOR, String.class); + final Optional<String> createdActor = + AnnotationUtils.getField(map, CREATED_ACTOR, String.class); final Optional<String> updatedOn = AnnotationUtils.getField(map, UPDATED_ON, String.class); - final Optional<String> updatedActor = AnnotationUtils.getField(map, UPDATED_ACTOR, String.class); + final Optional<String> updatedActor = + AnnotationUtils.getField(map, UPDATED_ACTOR, String.class); final Optional<String> properties = AnnotationUtils.getField(map, PROPERTIES, String.class); return new RelationshipAnnotation( @@ -95,6 +88,6 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( createdActor.orElse(null), updatedOn.orElse(null), updatedActor.orElse(null), - properties.orElse(null) - ); } -} \ No newline at end of file + properties.orElse(null)); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java index 77c5920ca9ba8..2221650eac1c9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java @@ -8,7 +8,6 @@ import lombok.Value; import org.apache.commons.lang3.EnumUtils; - /** * Annotation indicating how the search results should be ranked by the underlying search service */ @@ -35,26 +34,31 @@ public enum Modifier { } @Nonnull - public static SearchScoreAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static SearchScoreAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional<String> fieldName = AnnotationUtils.getField(map, "fieldName", String.class); final Optional<Double> weight = AnnotationUtils.getField(map, "weight", Double.class); - final Optional<Double> defaultValue = AnnotationUtils.getField(map, "defaultValue", Double.class); + final Optional<Double> defaultValue = + AnnotationUtils.getField(map, "defaultValue", Double.class); final Optional<String> modifierStr = AnnotationUtils.getField(map, "modifier", String.class); if (modifierStr.isPresent() && !EnumUtils.isValidEnum(Modifier.class, modifierStr.get())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid field 'modifier'. Invalid modifier provided. Valid modifiers are %s", - ANNOTATION_NAME, context, Arrays.toString(Modifier.values()))); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid field 'modifier'. Invalid modifier provided. Valid modifiers are %s", + ANNOTATION_NAME, context, Arrays.toString(Modifier.values()))); } final Optional<Modifier> modifier = modifierStr.map(Modifier::valueOf); - return new SearchScoreAnnotation(fieldName.orElse(schemaFieldName), weight.orElse(1.0), defaultValue.orElse(0.0), - modifier); + return new SearchScoreAnnotation( + fieldName.orElse(schemaFieldName), weight.orElse(1.0), defaultValue.orElse(0.0), modifier); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java index efa30a948e237..d5eae2a2315fa 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java @@ -4,7 +4,6 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.data.schema.DataSchema; import com.linkedin.metadata.models.ModelValidationException; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -15,17 +14,19 @@ import lombok.Value; import org.apache.commons.lang3.EnumUtils; - -/** - * Simple object representation of the @Searchable annotation metadata. - */ +/** Simple object representation of the @Searchable annotation metadata. */ @Value public class SearchableAnnotation { public static final String FIELD_NAME_ALIASES = "fieldNameAliases"; public static final String ANNOTATION_NAME = "Searchable"; private static final Set<FieldType> DEFAULT_QUERY_FIELD_TYPES = - ImmutableSet.of(FieldType.TEXT, FieldType.TEXT_PARTIAL, FieldType.WORD_GRAM, FieldType.URN, FieldType.URN_PARTIAL); + ImmutableSet.of( + FieldType.TEXT, + FieldType.TEXT_PARTIAL, + FieldType.WORD_GRAM, + FieldType.URN, + FieldType.URN_PARTIAL); // Name of the field in the search index. Defaults to the field name in the schema String fieldName; @@ -71,12 +72,15 @@ public enum FieldType { } @Nonnull - public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final DataSchema.Type schemaDataType, + public static SearchableAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final DataSchema.Type schemaDataType, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } @@ -84,23 +88,32 @@ public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Ob final Optional<String> fieldName = AnnotationUtils.getField(map, "fieldName", String.class); final Optional<String> fieldType = AnnotationUtils.getField(map, "fieldType", String.class); if (fieldType.isPresent() && !EnumUtils.isValidEnum(FieldType.class, fieldType.get())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid field 'fieldType'. Invalid fieldType provided. Valid types are %s", - ANNOTATION_NAME, context, Arrays.toString(FieldType.values()))); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid field 'fieldType'. Invalid fieldType provided. Valid types are %s", + ANNOTATION_NAME, context, Arrays.toString(FieldType.values()))); } - final Optional<Boolean> queryByDefault = AnnotationUtils.getField(map, "queryByDefault", Boolean.class); - final Optional<Boolean> enableAutocomplete = AnnotationUtils.getField(map, "enableAutocomplete", Boolean.class); - final Optional<Boolean> addToFilters = AnnotationUtils.getField(map, "addToFilters", Boolean.class); - final Optional<Boolean> addHasValuesToFilters = AnnotationUtils.getField(map, "addHasValuesToFilters", Boolean.class); - final Optional<String> filterNameOverride = AnnotationUtils.getField(map, "filterNameOverride", String.class); + final Optional<Boolean> queryByDefault = + AnnotationUtils.getField(map, "queryByDefault", Boolean.class); + final Optional<Boolean> enableAutocomplete = + AnnotationUtils.getField(map, "enableAutocomplete", Boolean.class); + final Optional<Boolean> addToFilters = + AnnotationUtils.getField(map, "addToFilters", Boolean.class); + final Optional<Boolean> addHasValuesToFilters = + AnnotationUtils.getField(map, "addHasValuesToFilters", Boolean.class); + final Optional<String> filterNameOverride = + AnnotationUtils.getField(map, "filterNameOverride", String.class); final Optional<String> hasValuesFilterNameOverride = AnnotationUtils.getField(map, "hasValuesFilterNameOverride", String.class); final Optional<Double> boostScore = AnnotationUtils.getField(map, "boostScore", Double.class); - final Optional<String> hasValuesFieldName = AnnotationUtils.getField(map, "hasValuesFieldName", String.class); - final Optional<String> numValuesFieldName = AnnotationUtils.getField(map, "numValuesFieldName", String.class); + final Optional<String> hasValuesFieldName = + AnnotationUtils.getField(map, "hasValuesFieldName", String.class); + final Optional<String> numValuesFieldName = + AnnotationUtils.getField(map, "numValuesFieldName", String.class); final Optional<Map> weightsPerFieldValueMap = - AnnotationUtils.getField(map, "weightsPerFieldValue", Map.class).map(m -> (Map<Object, Double>) m); + AnnotationUtils.getField(map, "weightsPerFieldValue", Map.class) + .map(m -> (Map<Object, Double>) m); final List<String> fieldNameAliases = getFieldNameAliases(map); final FieldType resolvedFieldType = getFieldType(fieldType, schemaDataType); @@ -120,7 +133,8 @@ public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Ob fieldNameAliases); } - private static FieldType getFieldType(Optional<String> maybeFieldType, DataSchema.Type schemaDataType) { + private static FieldType getFieldType( + Optional<String> maybeFieldType, DataSchema.Type schemaDataType) { if (!maybeFieldType.isPresent()) { return getDefaultFieldType(schemaDataType); } @@ -139,7 +153,8 @@ private static FieldType getDefaultFieldType(DataSchema.Type schemaDataType) { } } - private static Boolean getQueryByDefault(Optional<Boolean> maybeQueryByDefault, FieldType fieldType) { + private static Boolean getQueryByDefault( + Optional<Boolean> maybeQueryByDefault, FieldType fieldType) { if (!maybeQueryByDefault.isPresent()) { if (DEFAULT_QUERY_FIELD_TYPES.contains(fieldType)) { return Boolean.TRUE; @@ -168,7 +183,8 @@ private static String capitalizeFirstLetter(String str) { private static List<String> getFieldNameAliases(Map map) { final List<String> aliases = new ArrayList<>(); - final Optional<List> fieldNameAliases = AnnotationUtils.getField(map, FIELD_NAME_ALIASES, List.class); + final Optional<List> fieldNameAliases = + AnnotationUtils.getField(map, FIELD_NAME_ALIASES, List.class); if (fieldNameAliases.isPresent()) { for (Object alias : fieldNameAliases.get()) { aliases.add((String) alias); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java index ca74c2df385f1..62ab073e41acd 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class TimeseriesFieldAnnotation { @@ -16,23 +15,29 @@ public class TimeseriesFieldAnnotation { AggregationType aggregationType; @Nonnull - public static TimeseriesFieldAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static TimeseriesFieldAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional<String> statName = AnnotationUtils.getField(map, "name", String.class); - final Optional<String> aggregationType = AnnotationUtils.getField(map, "aggregationType", String.class); + final Optional<String> aggregationType = + AnnotationUtils.getField(map, "aggregationType", String.class); - return new TimeseriesFieldAnnotation(statName.orElse(schemaFieldName), + return new TimeseriesFieldAnnotation( + statName.orElse(schemaFieldName), aggregationType.map(AggregationType::valueOf).orElse(AggregationType.LATEST)); } public enum AggregationType { - LATEST, SUM + LATEST, + SUM } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java index c507d88445cdf..d8816e0667316 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class TimeseriesFieldCollectionAnnotation { public static final String ANNOTATION_NAME = "TimeseriesFieldCollection"; @@ -15,11 +14,14 @@ public class TimeseriesFieldCollectionAnnotation { String key; @Nonnull - public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } @@ -28,10 +30,12 @@ public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject(@N final Optional<String> key = AnnotationUtils.getField(map, "key", String.class); if (!key.isPresent()) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: 'key' field is required", ANNOTATION_NAME, - context)); + String.format( + "Failed to validate @%s annotation declared at %s: 'key' field is required", + ANNOTATION_NAME, context)); } - return new TimeseriesFieldCollectionAnnotation(collectionName.orElse(schemaFieldName), key.get()); + return new TimeseriesFieldCollectionAnnotation( + collectionName.orElse(schemaFieldName), key.get()); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java index 720eb87ec5c0e..b0ff6459ffbee 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java @@ -10,29 +10,31 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - -/** - * Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. - */ +/** Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. */ @Slf4j public class AspectExtractor { - private AspectExtractor() { - } + private AspectExtractor() {} public static Map<String, RecordTemplate> extractAspectRecords(RecordTemplate snapshot) { - return ModelUtils.getAspectsFromSnapshot(snapshot) - .stream() - .collect(Collectors.toMap(record -> getAspectNameFromSchema(record.schema()), Function.identity())); + return ModelUtils.getAspectsFromSnapshot(snapshot).stream() + .collect( + Collectors.toMap( + record -> getAspectNameFromSchema(record.schema()), Function.identity())); } private static String getAspectNameFromSchema(final RecordDataSchema aspectSchema) { - final Object aspectAnnotationObj = aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { - return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()).getName(); + return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + log.error( + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java index 6cc4fa4cd362d..899f66e66ea5a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.models.extractor; +import com.datahub.util.RecordUtils; import com.linkedin.data.schema.PathSpec; import com.linkedin.data.template.RecordTemplate; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.FieldSpec; @@ -16,28 +16,26 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. - */ +/** Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. */ public class FieldExtractor { private static final String ARRAY_WILDCARD = "*"; private static final int MAX_VALUE_LENGTH = 200; - private FieldExtractor() { - } + private FieldExtractor() {} private static long getNumArrayWildcards(PathSpec pathSpec) { return pathSpec.getPathComponents().stream().filter(ARRAY_WILDCARD::equals).count(); } // Extract the value of each field in the field specs from the input record - public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull RecordTemplate record, List<T> fieldSpecs) { + public static <T extends FieldSpec> Map<T, List<Object>> extractFields( + @Nonnull RecordTemplate record, List<T> fieldSpecs) { return extractFields(record, fieldSpecs, MAX_VALUE_LENGTH); } - public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull RecordTemplate record, List<T> fieldSpecs, int maxValueLength) { + public static <T extends FieldSpec> Map<T, List<Object>> extractFields( + @Nonnull RecordTemplate record, List<T> fieldSpecs, int maxValueLength) { final Map<T, List<Object>> extractedFields = new HashMap<>(); for (T fieldSpec : fieldSpecs) { Optional<Object> value = RecordUtils.getFieldValue(record, fieldSpec.getPath()); @@ -49,12 +47,16 @@ public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull if (numArrayWildcards == 0) { // For maps, convert it into a list of the form key=value (Filter out long values) if (value.get() instanceof Map) { - extractedFields.put(fieldSpec, ((Map<?, ?>) value.get()).entrySet() - .stream() - .map(entry -> new Pair<>(entry.getKey().toString(), entry.getValue().toString())) - .filter(entry -> entry.getValue().length() < maxValueLength) - .map(entry -> entry.getKey() + "=" + entry.getValue()) - .collect(Collectors.toList())); + extractedFields.put( + fieldSpec, + ((Map<?, ?>) value.get()) + .entrySet().stream() + .map( + entry -> + new Pair<>(entry.getKey().toString(), entry.getValue().toString())) + .filter(entry -> entry.getValue().length() < maxValueLength) + .map(entry -> entry.getKey() + "=" + entry.getValue()) + .collect(Collectors.toList())); } else { extractedFields.put(fieldSpec, Collections.singletonList(value.get())); } @@ -62,7 +64,10 @@ public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull List<Object> valueList = (List<Object>) value.get(); // If the field is a nested list of values, flatten it for (int i = 0; i < numArrayWildcards - 1; i++) { - valueList = valueList.stream().flatMap(v -> ((List<Object>) v).stream()).collect(Collectors.toList()); + valueList = + valueList.stream() + .flatMap(v -> ((List<Object>) v).stream()) + .collect(Collectors.toList()); } extractedFields.put(fieldSpec, valueList); } @@ -71,14 +76,20 @@ public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull return extractedFields; } - public static <T extends FieldSpec> Map<T, List<Object>> extractFieldsFromSnapshot(RecordTemplate snapshot, - EntitySpec entitySpec, Function<AspectSpec, List<T>> getFieldSpecsFunc, int maxValueLength) { + public static <T extends FieldSpec> Map<T, List<Object>> extractFieldsFromSnapshot( + RecordTemplate snapshot, + EntitySpec entitySpec, + Function<AspectSpec, List<T>> getFieldSpecsFunc, + int maxValueLength) { final Map<String, RecordTemplate> aspects = AspectExtractor.extractAspectRecords(snapshot); final Map<T, List<Object>> extractedFields = new HashMap<>(); - aspects.keySet() - .stream() - .map(aspectName -> FieldExtractor.extractFields(aspects.get(aspectName), - getFieldSpecsFunc.apply(entitySpec.getAspectSpec(aspectName)), maxValueLength)) + aspects.keySet().stream() + .map( + aspectName -> + FieldExtractor.extractFields( + aspects.get(aspectName), + getFieldSpecsFunc.apply(entitySpec.getAspectSpec(aspectName)), + maxValueLength)) .forEach(extractedFields::putAll); return extractedFields; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java index 95195620cf85a..fba916abd2430 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @@ -32,13 +35,9 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects - * from an entity registry config yaml file + * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects from an + * entity registry config yaml file */ @Slf4j public class ConfigEntityRegistry implements EntityRegistry { @@ -51,37 +50,55 @@ public class ConfigEntityRegistry implements EntityRegistry { private final Map<String, AspectSpec> _aspectNameToSpec; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public ConfigEntityRegistry(Pair<Path, Path> configFileClassPathPair) throws IOException { - this(DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), configFileClassPathPair.getFirst()); + this( + DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), + configFileClassPathPair.getFirst()); } - public ConfigEntityRegistry(String entityRegistryRoot) throws EntityRegistryException, IOException { + public ConfigEntityRegistry(String entityRegistryRoot) + throws EntityRegistryException, IOException { this(getFileAndClassPath(entityRegistryRoot)); } - private static Pair<Path, Path> getFileAndClassPath(String entityRegistryRoot) throws IOException, EntityRegistryException { + private static Pair<Path, Path> getFileAndClassPath(String entityRegistryRoot) + throws IOException, EntityRegistryException { Path entityRegistryRootLoc = Paths.get(entityRegistryRoot); if (Files.isDirectory(entityRegistryRootLoc)) { // Look for entity_registry.yml or entity_registry.yaml in the root folder - List<Path> yamlFiles = Files.walk(entityRegistryRootLoc, 1) - .filter(Files::isRegularFile) - .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) - .collect(Collectors.toList()); + List<Path> yamlFiles = + Files.walk(entityRegistryRootLoc, 1) + .filter(Files::isRegularFile) + .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) + .collect(Collectors.toList()); if (yamlFiles.size() == 0) { throw new EntityRegistryException( - String.format("Did not find an entity registry (entity_registry.yaml/yml) under %s", entityRegistryRootLoc)); + String.format( + "Did not find an entity registry (entity_registry.yaml/yml) under %s", + entityRegistryRootLoc)); } if (yamlFiles.size() > 1) { - log.warn("Found more than one yaml file in the directory {}. Will pick the first {}", - entityRegistryRootLoc, yamlFiles.get(0)); + log.warn( + "Found more than one yaml file in the directory {}. Will pick the first {}", + entityRegistryRootLoc, + yamlFiles.get(0)); } Path entityRegistryFile = yamlFiles.get(0); - log.info("Loading custom config entity file: {}, dir: {}", entityRegistryFile, entityRegistryRootLoc); + log.info( + "Loading custom config entity file: {}, dir: {}", + entityRegistryFile, + entityRegistryRootLoc); return new Pair<>(entityRegistryFile, entityRegistryRootLoc); } else { // We assume that the file being passed in is a bare entity registry yaml file @@ -94,7 +111,8 @@ public ConfigEntityRegistry(InputStream configFileInputStream) { this(DataSchemaFactory.getInstance(), configFileInputStream); } - public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath) throws FileNotFoundException { + public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath) + throws FileNotFoundException { this(dataSchemaFactory, new FileInputStream(configFilePath.toString())); } @@ -106,7 +124,8 @@ public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con } catch (IOException e) { e.printStackTrace(); throw new IllegalArgumentException( - String.format("Error while reading config file in path %s: %s", configFileStream, e.getMessage())); + String.format( + "Error while reading config file in path %s: %s", configFileStream, e.getMessage())); } if (entities.getId() != null) { identifier = entities.getId(); @@ -120,12 +139,16 @@ public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con for (Entity entity : entities.getEntities()) { List<AspectSpec> aspectSpecs = new ArrayList<>(); aspectSpecs.add(buildAspectSpec(entity.getKeyAspect(), entitySpecBuilder)); - entity.getAspects().forEach(aspect -> aspectSpecs.add(buildAspectSpec(aspect, entitySpecBuilder))); + entity + .getAspects() + .forEach(aspect -> aspectSpecs.add(buildAspectSpec(aspect, entitySpecBuilder))); EntitySpec entitySpec; Optional<DataSchema> entitySchema = dataSchemaFactory.getEntitySchema(entity.getName()); if (!entitySchema.isPresent()) { - entitySpec = entitySpecBuilder.buildConfigEntitySpec(entity.getName(), entity.getKeyAspect(), aspectSpecs); + entitySpec = + entitySpecBuilder.buildConfigEntitySpec( + entity.getName(), entity.getKeyAspect(), aspectSpecs); } else { entitySpec = entitySpecBuilder.buildEntitySpec(entitySchema.get(), aspectSpecs); } @@ -210,7 +233,7 @@ public Map<String, EventSpec> getEventSpecs() { @Override public AspectTemplateEngine getAspectTemplateEngine() { - //TODO: add support for config based aspect templates + // TODO: add support for config based aspect templates return new AspectTemplateEngine(); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java index cf9ca68d0ee4f..8c415d56f0d5f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java @@ -9,7 +9,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** * The Entity Registry provides a mechanism to retrieve metadata about entities modeled in GMA. * Metadata includes the entity's common name, the aspects that comprise it, and search index + @@ -23,8 +22,10 @@ default String getIdentifier() { /** * Given an entity name, returns an instance of {@link DefaultEntitySpec} + * * @param entityName the name of the entity to be retrieved - * @return an {@link DefaultEntitySpec} corresponding to the entity name provided, null if none exists. + * @return an {@link DefaultEntitySpec} corresponding to the entity name provided, null if none + * exists. */ @Nonnull EntitySpec getEntitySpec(@Nonnull final String entityName); @@ -33,34 +34,36 @@ default String getIdentifier() { * Given an event name, returns an instance of {@link DefaultEventSpec}. * * @param eventName the name of the event to be retrieved - * @return an {@link DefaultEventSpec} corresponding to the entity name provided, null if none exists. + * @return an {@link DefaultEventSpec} corresponding to the entity name provided, null if none + * exists. */ @Nullable EventSpec getEventSpec(@Nonnull final String eventName); /** * Returns all {@link DefaultEntitySpec}s that the registry is aware of. + * * @return a map of String to {@link DefaultEntitySpec}s, empty map if none exists. */ @Nonnull Map<String, EntitySpec> getEntitySpecs(); - /** * Returns all {@link AspectSpec}s that the registry is aware of. + * * @return a map of String to {@link AspectSpec}s, empty map if none exists. */ @Nonnull Map<String, AspectSpec> getAspectSpecs(); - /** - * Returns all {@link EventSpec}s that the registry is aware of. - */ + /** Returns all {@link EventSpec}s that the registry is aware of. */ @Nonnull Map<String, EventSpec> getEventSpecs(); /** - * Returns an {@link AspectTemplateEngine} that is used for generating templates from {@link com.linkedin.metadata.models.AspectSpec}s + * Returns an {@link AspectTemplateEngine} that is used for generating templates from {@link + * com.linkedin.metadata.models.AspectSpec}s + * * @return a template engine instance associated with this registry */ @Nonnull diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java index d43782ce0f07f..8d108445e67be 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java @@ -8,17 +8,17 @@ import java.util.function.Function; import java.util.stream.Collectors; - public class EntityRegistryUtils { - private EntityRegistryUtils() { - - } + private EntityRegistryUtils() {} public static Map<String, AspectSpec> populateAspectMap(List<EntitySpec> entitySpecs) { return entitySpecs.stream() .map(EntitySpec::getAspectSpecs) .flatMap(Collection::stream) - .collect(Collectors.toMap(AspectSpec::getName, Function.identity(), (aspectSpec1, aspectSpec2) -> aspectSpec1)); + .collect( + Collectors.toMap( + AspectSpec::getName, + Function.identity(), + (aspectSpec1, aspectSpec2) -> aspectSpec1)); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java index 345d5aa02f398..2a5d09db00396 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java @@ -19,10 +19,10 @@ import lombok.Value; import org.apache.commons.lang3.tuple.Triple; - /** - * The Lineage Registry provides a mechanism to retrieve metadata about the lineage relationships between different entities - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * The Lineage Registry provides a mechanism to retrieve metadata about the lineage relationships + * between different entities Lineage relationship denotes whether an entity is directly upstream or + * downstream of another entity */ public class LineageRegistry { @@ -35,55 +35,73 @@ public LineageRegistry(EntityRegistry entityRegistry) { } private Map<String, LineageSpec> buildLineageSpecs(EntityRegistry entityRegistry) { - // 1. Flatten relationship annotations into a list of lineage edges (source, dest, type, isUpstream) - Collection<LineageEdge> lineageEdges = entityRegistry.getEntitySpecs() - .entrySet() - .stream() - .flatMap(entry -> entry.getValue() - .getRelationshipFieldSpecs() - .stream() + // 1. Flatten relationship annotations into a list of lineage edges (source, dest, type, + // isUpstream) + Collection<LineageEdge> lineageEdges = + entityRegistry.getEntitySpecs().entrySet().stream() .flatMap( - spec -> getLineageEdgesFromRelationshipAnnotation(entry.getKey(), spec.getRelationshipAnnotation()))) - // If there are multiple edges with the same source, dest, edge type, get one of them - .collect(Collectors.toMap(edge -> Triple.of(edge.getSourceEntity(), edge.getDestEntity(), edge.getType()), - Function.identity(), (x1, x2) -> x1)) - .values(); + entry -> + entry.getValue().getRelationshipFieldSpecs().stream() + .flatMap( + spec -> + getLineageEdgesFromRelationshipAnnotation( + entry.getKey(), spec.getRelationshipAnnotation()))) + // If there are multiple edges with the same source, dest, edge type, get one of them + .collect( + Collectors.toMap( + edge -> Triple.of(edge.getSourceEntity(), edge.getDestEntity(), edge.getType()), + Function.identity(), + (x1, x2) -> x1)) + .values(); // 2. Figure out the upstream and downstream edges of each entity type Map<String, Set<EdgeInfo>> upstreamPerEntity = new HashMap<>(); Map<String, Set<EdgeInfo>> downstreamPerEntity = new HashMap<>(); - // A downstreamOf B : A -> upstream (downstreamOf, OUTGOING), B -> downstream (downstreamOf, INCOMING) + // A downstreamOf B : A -> upstream (downstreamOf, OUTGOING), B -> downstream (downstreamOf, + // INCOMING) // A produces B : A -> downstream (produces, OUTGOING), B -> upstream (produces, INCOMING) for (LineageEdge edge : lineageEdges) { if (edge.isUpstream()) { - upstreamPerEntity.computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) + upstreamPerEntity + .computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.OUTGOING, edge.destEntity)); - downstreamPerEntity.computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) + downstreamPerEntity + .computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.INCOMING, edge.sourceEntity)); } else { - downstreamPerEntity.computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) + downstreamPerEntity + .computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.OUTGOING, edge.destEntity)); - upstreamPerEntity.computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) + upstreamPerEntity + .computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.INCOMING, edge.sourceEntity)); } } - return entityRegistry.getEntitySpecs() - .keySet() - .stream() - .collect(Collectors.toMap(String::toLowerCase, entityName -> new LineageSpec( - new ArrayList<>(upstreamPerEntity.getOrDefault(entityName.toLowerCase(), Collections.emptySet())), - new ArrayList<>(downstreamPerEntity.getOrDefault(entityName.toLowerCase(), Collections.emptySet()))))); + return entityRegistry.getEntitySpecs().keySet().stream() + .collect( + Collectors.toMap( + String::toLowerCase, + entityName -> + new LineageSpec( + new ArrayList<>( + upstreamPerEntity.getOrDefault( + entityName.toLowerCase(), Collections.emptySet())), + new ArrayList<>( + downstreamPerEntity.getOrDefault( + entityName.toLowerCase(), Collections.emptySet()))))); } - private Stream<LineageEdge> getLineageEdgesFromRelationshipAnnotation(String sourceEntity, - RelationshipAnnotation annotation) { + private Stream<LineageEdge> getLineageEdgesFromRelationshipAnnotation( + String sourceEntity, RelationshipAnnotation annotation) { if (!annotation.isLineage()) { return Stream.empty(); } - return annotation.getValidDestinationTypes() - .stream() - .map(destEntity -> new LineageEdge(sourceEntity, destEntity, annotation.getName(), annotation.isUpstream())); + return annotation.getValidDestinationTypes().stream() + .map( + destEntity -> + new LineageEdge( + sourceEntity, destEntity, annotation.getName(), annotation.isUpstream())); } public LineageSpec getLineageSpec(String entityName) { @@ -92,11 +110,13 @@ public LineageSpec getLineageSpec(String entityName) { public Set<String> getEntitiesWithLineageToEntityType(String entityType) { Map<String, EntitySpec> specs = _entityRegistry.getEntitySpecs(); - Set<String> result = Streams.concat(_lineageSpecMap.get(entityType.toLowerCase()).getDownstreamEdges().stream(), - _lineageSpecMap.get(entityType.toLowerCase()).getUpstreamEdges().stream()) - .map(EdgeInfo::getOpposingEntityType) - .map(entity -> specs.get(entity.toLowerCase()).getName()) - .collect(Collectors.toSet()); + Set<String> result = + Streams.concat( + _lineageSpecMap.get(entityType.toLowerCase()).getDownstreamEdges().stream(), + _lineageSpecMap.get(entityType.toLowerCase()).getUpstreamEdges().stream()) + .map(EdgeInfo::getOpposingEntityType) + .map(entity -> specs.get(entity.toLowerCase()).getName()) + .collect(Collectors.toSet()); result.add(entityType); return result; } @@ -120,9 +140,11 @@ public List<EdgeInfo> getLineageRelationships(String entityName, LineageDirectio private List<EdgeInfo> getSchemaFieldRelationships(LineageDirection direction) { List<EdgeInfo> schemaFieldEdges = new ArrayList<>(); if (direction == LineageDirection.UPSTREAM) { - schemaFieldEdges.add(new EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "schemafield")); + schemaFieldEdges.add( + new EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "schemafield")); } else { - schemaFieldEdges.add(new EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "schemafield")); + schemaFieldEdges.add( + new EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "schemafield")); } return schemaFieldEdges; } @@ -165,8 +187,9 @@ public boolean equals(Object o) { public int hashCode() { return ((this.type == null ? 0 : this.type.toLowerCase().hashCode()) ^ (this.direction == null ? 0 : this.direction.hashCode()) - ^ (this.opposingEntityType == null ? 0 : this.opposingEntityType.toLowerCase().hashCode())); + ^ (this.opposingEntityType == null + ? 0 + : this.opposingEntityType.toLowerCase().hashCode())); } } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java index f0ec57b8d81c3..06aeefc2e5aa0 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java @@ -19,10 +19,7 @@ import lombok.Setter; import lombok.extern.slf4j.Slf4j; - -/** - * Combines results from two entity registries, where the second takes precedence - */ +/** Combines results from two entity registries, where the second takes precedence */ @Slf4j public class MergedEntityRegistry implements EntityRegistry { @@ -34,8 +31,14 @@ public class MergedEntityRegistry implements EntityRegistry { public MergedEntityRegistry(EntityRegistry baseEntityRegistry) { // baseEntityRegistry.get*Specs() can return immutable Collections.emptyMap() which fails // when this class attempts .put* operations on it. - entityNameToSpec = baseEntityRegistry.getEntitySpecs() != null ? new HashMap<>(baseEntityRegistry.getEntitySpecs()) : new HashMap<>(); - eventNameToSpec = baseEntityRegistry.getEventSpecs() != null ? new HashMap<>(baseEntityRegistry.getEventSpecs()) : new HashMap<>(); + entityNameToSpec = + baseEntityRegistry.getEntitySpecs() != null + ? new HashMap<>(baseEntityRegistry.getEntitySpecs()) + : new HashMap<>(); + eventNameToSpec = + baseEntityRegistry.getEventSpecs() != null + ? new HashMap<>(baseEntityRegistry.getEventSpecs()) + : new HashMap<>(); baseEntityRegistry.getAspectTemplateEngine(); _aspectTemplateEngine = baseEntityRegistry.getAspectTemplateEngine(); _aspectNameToSpec = baseEntityRegistry.getAspectSpecs(); @@ -44,22 +47,28 @@ public MergedEntityRegistry(EntityRegistry baseEntityRegistry) { private void validateEntitySpec(EntitySpec entitySpec, final ValidationResult validationResult) { if (entitySpec.getKeyAspectSpec() == null) { validationResult.setValid(false); - validationResult.getValidationFailures().add(String.format("Key aspect is missing in entity {}", entitySpec.getName())); + validationResult + .getValidationFailures() + .add(String.format("Key aspect is missing in entity {}", entitySpec.getName())); } } - public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) throws EntityRegistryException { + public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) + throws EntityRegistryException { ValidationResult validationResult = validatePatch(patchEntityRegistry); if (!validationResult.isValid()) { - throw new EntityRegistryException(String.format("Failed to validate new registry with %s", validationResult.validationFailures.stream().collect( - Collectors.joining("\n")))); + throw new EntityRegistryException( + String.format( + "Failed to validate new registry with %s", + validationResult.validationFailures.stream().collect(Collectors.joining("\n")))); } // Merge Entity Specs for (Map.Entry<String, EntitySpec> e2Entry : patchEntityRegistry.getEntitySpecs().entrySet()) { if (entityNameToSpec.containsKey(e2Entry.getKey())) { - EntitySpec mergeEntitySpec = mergeEntitySpecs(entityNameToSpec.get(e2Entry.getKey()), e2Entry.getValue()); + EntitySpec mergeEntitySpec = + mergeEntitySpecs(entityNameToSpec.get(e2Entry.getKey()), e2Entry.getValue()); entityNameToSpec.put(e2Entry.getKey(), mergeEntitySpec); } else { // We are inserting a new entity into the registry @@ -71,41 +80,63 @@ public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) throws Ent if (patchEntityRegistry.getEventSpecs().size() > 0) { eventNameToSpec.putAll(patchEntityRegistry.getEventSpecs()); } - //TODO: Validate that the entity registries don't have conflicts among each other + // TODO: Validate that the entity registries don't have conflicts among each other return this; } private ValidationResult validatePatch(EntityRegistry patchEntityRegistry) { ValidationResult validationResult = new ValidationResult(); for (Map.Entry<String, EntitySpec> e2Entry : patchEntityRegistry.getEntitySpecs().entrySet()) { - checkMergeable(entityNameToSpec.getOrDefault(e2Entry.getKey(), null), e2Entry.getValue(), validationResult); + checkMergeable( + entityNameToSpec.getOrDefault(e2Entry.getKey(), null), + e2Entry.getValue(), + validationResult); } return validationResult; } - private void checkMergeable(EntitySpec existingEntitySpec, EntitySpec newEntitySpec, final ValidationResult validationResult) { + private void checkMergeable( + EntitySpec existingEntitySpec, + EntitySpec newEntitySpec, + final ValidationResult validationResult) { if (existingEntitySpec != null) { - existingEntitySpec.getAspectSpecMap().entrySet().forEach(aspectSpecEntry -> { - if (newEntitySpec.hasAspect(aspectSpecEntry.getKey())) { - CompatibilityResult result = CompatibilityChecker.checkCompatibility(aspectSpecEntry.getValue().getPegasusSchema(), newEntitySpec.getAspectSpec( - aspectSpecEntry.getKey()).getPegasusSchema(), new CompatibilityOptions()); - if (result.isError()) { - log.error("{} schema is not compatible with previous schema due to {}", aspectSpecEntry.getKey(), result.getMessages()); - // we want to continue processing all aspects to collect all failures - validationResult.setValid(false); - validationResult.getValidationFailures().add( - String.format("%s schema is not compatible with previous schema due to %s", aspectSpecEntry.getKey(), result.getMessages())); - } else { - log.info("{} schema is compatible with previous schema due to {}", aspectSpecEntry.getKey(), result.getMessages()); - } - } - }); + existingEntitySpec + .getAspectSpecMap() + .entrySet() + .forEach( + aspectSpecEntry -> { + if (newEntitySpec.hasAspect(aspectSpecEntry.getKey())) { + CompatibilityResult result = + CompatibilityChecker.checkCompatibility( + aspectSpecEntry.getValue().getPegasusSchema(), + newEntitySpec.getAspectSpec(aspectSpecEntry.getKey()).getPegasusSchema(), + new CompatibilityOptions()); + if (result.isError()) { + log.error( + "{} schema is not compatible with previous schema due to {}", + aspectSpecEntry.getKey(), + result.getMessages()); + // we want to continue processing all aspects to collect all failures + validationResult.setValid(false); + validationResult + .getValidationFailures() + .add( + String.format( + "%s schema is not compatible with previous schema due to %s", + aspectSpecEntry.getKey(), result.getMessages())); + } else { + log.info( + "{} schema is compatible with previous schema due to {}", + aspectSpecEntry.getKey(), + result.getMessages()); + } + } + }); } else { validateEntitySpec(newEntitySpec, validationResult); } } - private EntitySpec mergeEntitySpecs(EntitySpec existingEntitySpec, EntitySpec newEntitySpec) { Map<String, AspectSpec> aspectSpecMap = new HashMap<>(existingEntitySpec.getAspectSpecMap()); aspectSpecMap.putAll(newEntitySpec.getAspectSpecMap()); @@ -116,8 +147,11 @@ private EntitySpec mergeEntitySpecs(EntitySpec existingEntitySpec, EntitySpec ne existingEntitySpec.getEntityAnnotation().getKeyAspect(), aspectSpecMap.values()); } - return new DefaultEntitySpec(aspectSpecMap.values(), existingEntitySpec.getEntityAnnotation(), - existingEntitySpec.getSnapshotSchema(), existingEntitySpec.getAspectTyperefSchema()); + return new DefaultEntitySpec( + aspectSpecMap.values(), + existingEntitySpec.getEntityAnnotation(), + existingEntitySpec.getSnapshotSchema(), + existingEntitySpec.getAspectTyperefSchema()); } @Nonnull diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java index 76d9c8ceb089c..9eafbe05a4fc6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @@ -32,13 +35,10 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that is similar to {@link ConfigEntityRegistry} but different in one important way. - * It builds potentially partially specified {@link com.linkedin.metadata.models.PartialEntitySpec} objects from an entity registry config yaml file + * Implementation of {@link EntityRegistry} that is similar to {@link ConfigEntityRegistry} but + * different in one important way. It builds potentially partially specified {@link + * com.linkedin.metadata.models.PartialEntitySpec} objects from an entity registry config yaml file */ @Slf4j public class PatchEntityRegistry implements EntityRegistry { @@ -53,37 +53,50 @@ public class PatchEntityRegistry implements EntityRegistry { private final String identifier; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } @Override public String toString() { StringBuilder sb = new StringBuilder("PatchEntityRegistry[" + "identifier=" + identifier + ';'); - entityNameToSpec.entrySet() - .stream() - .forEach(entry -> sb.append("[entityName=") - .append(entry.getKey()) - .append(";aspects=[") - .append( - entry.getValue().getAspectSpecs().stream().map(spec -> spec.getName()).collect(Collectors.joining(","))) - .append("]]")); - eventNameToSpec.entrySet() - .stream() - .forEach(entry -> sb.append("[eventName=") - .append(entry.getKey()) - .append("]")); + entityNameToSpec.entrySet().stream() + .forEach( + entry -> + sb.append("[entityName=") + .append(entry.getKey()) + .append(";aspects=[") + .append( + entry.getValue().getAspectSpecs().stream() + .map(spec -> spec.getName()) + .collect(Collectors.joining(","))) + .append("]]")); + eventNameToSpec.entrySet().stream() + .forEach(entry -> sb.append("[eventName=").append(entry.getKey()).append("]")); return sb.toString(); } - public PatchEntityRegistry(Pair<Path, Path> configFileClassPathPair, String registryName, - ComparableVersion registryVersion) throws IOException, EntityRegistryException { - this(DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), configFileClassPathPair.getFirst(), - registryName, registryVersion); + public PatchEntityRegistry( + Pair<Path, Path> configFileClassPathPair, + String registryName, + ComparableVersion registryVersion) + throws IOException, EntityRegistryException { + this( + DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), + configFileClassPathPair.getFirst(), + registryName, + registryVersion); } - public PatchEntityRegistry(String entityRegistryRoot, String registryName, ComparableVersion registryVersion) + public PatchEntityRegistry( + String entityRegistryRoot, String registryName, ComparableVersion registryVersion) throws EntityRegistryException, IOException { this(getFileAndClassPath(entityRegistryRoot), registryName, registryVersion); } @@ -93,21 +106,28 @@ private static Pair<Path, Path> getFileAndClassPath(String entityRegistryRoot) Path entityRegistryRootLoc = Paths.get(entityRegistryRoot); if (Files.isDirectory(entityRegistryRootLoc)) { // Look for entity-registry.yml or entity-registry.yaml in the root folder - List<Path> yamlFiles = Files.walk(entityRegistryRootLoc, 1) - .filter(Files::isRegularFile) - .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) - .collect(Collectors.toList()); + List<Path> yamlFiles = + Files.walk(entityRegistryRootLoc, 1) + .filter(Files::isRegularFile) + .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) + .collect(Collectors.toList()); if (yamlFiles.size() == 0) { throw new EntityRegistryException( - String.format("Did not find an entity registry (entity-registry.yaml/yml) under %s", + String.format( + "Did not find an entity registry (entity-registry.yaml/yml) under %s", entityRegistryRootLoc)); } if (yamlFiles.size() > 1) { - log.warn("Found more than one yaml file in the directory {}. Will pick the first {}", entityRegistryRootLoc, + log.warn( + "Found more than one yaml file in the directory {}. Will pick the first {}", + entityRegistryRootLoc, yamlFiles.get(0)); } Path entityRegistryFile = yamlFiles.get(0); - log.info("Loading custom config entity file: {}, dir: {}", entityRegistryFile, entityRegistryRootLoc); + log.info( + "Loading custom config entity file: {}, dir: {}", + entityRegistryFile, + entityRegistryRootLoc); return new Pair<>(entityRegistryFile, entityRegistryRootLoc); } else { // We assume that the file being passed in is a bare entity registry yaml file @@ -116,13 +136,25 @@ private static Pair<Path, Path> getFileAndClassPath(String entityRegistryRoot) } } - public PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath, String registryName, - ComparableVersion registryVersion) throws FileNotFoundException, EntityRegistryException { - this(dataSchemaFactory, new FileInputStream(configFilePath.toString()), registryName, registryVersion); + public PatchEntityRegistry( + DataSchemaFactory dataSchemaFactory, + Path configFilePath, + String registryName, + ComparableVersion registryVersion) + throws FileNotFoundException, EntityRegistryException { + this( + dataSchemaFactory, + new FileInputStream(configFilePath.toString()), + registryName, + registryVersion); } - private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream configFileStream, String registryName, - ComparableVersion registryVersion) throws EntityRegistryException { + private PatchEntityRegistry( + DataSchemaFactory dataSchemaFactory, + InputStream configFileStream, + String registryName, + ComparableVersion registryVersion) + throws EntityRegistryException { this.dataSchemaFactory = dataSchemaFactory; this.registryName = registryName; this.registryVersion = registryVersion; @@ -133,7 +165,8 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con } catch (IOException e) { e.printStackTrace(); throw new IllegalArgumentException( - String.format("Error while reading config file in path %s: %s", configFileStream, e.getMessage())); + String.format( + "Error while reading config file in path %s: %s", configFileStream, e.getMessage())); } if (entities.getId() != null) { identifier = entities.getId(); @@ -144,7 +177,9 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con // Build Entity Specs EntitySpecBuilder entitySpecBuilder = new EntitySpecBuilder(); for (Entity entity : entities.getEntities()) { - log.info("Discovered entity {} with aspects {}", entity.getName(), + log.info( + "Discovered entity {} with aspects {}", + entity.getName(), entity.getAspects().stream().collect(Collectors.joining())); List<AspectSpec> aspectSpecs = new ArrayList<>(); if (entity.getKeyAspect() != null) { @@ -152,16 +187,20 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con log.info("Adding key aspect {} with spec {}", entity.getKeyAspect(), keyAspectSpec); aspectSpecs.add(keyAspectSpec); } - entity.getAspects().forEach(aspect -> { - if (!aspect.equals(entity.getKeyAspect())) { - AspectSpec aspectSpec = buildAspectSpec(aspect, entitySpecBuilder); - log.info("Adding aspect {} with spec {}", aspect, aspectSpec); - aspectSpecs.add(aspectSpec); - } - }); + entity + .getAspects() + .forEach( + aspect -> { + if (!aspect.equals(entity.getKeyAspect())) { + AspectSpec aspectSpec = buildAspectSpec(aspect, entitySpecBuilder); + log.info("Adding aspect {} with spec {}", aspect, aspectSpec); + aspectSpecs.add(aspectSpec); + } + }); EntitySpec entitySpec = - entitySpecBuilder.buildPartialEntitySpec(entity.getName(), entity.getKeyAspect(), aspectSpecs); + entitySpecBuilder.buildPartialEntitySpec( + entity.getName(), entity.getKeyAspect(), aspectSpecs); entityNameToSpec.put(entity.getName().toLowerCase(), entitySpec); } @@ -225,7 +264,7 @@ public Map<String, EventSpec> getEventSpecs() { @Nonnull @Override public AspectTemplateEngine getAspectTemplateEngine() { - //TODO: support patch based templates + // TODO: support patch based templates return new AspectTemplateEngine(); } @@ -236,7 +275,8 @@ private AspectSpec buildAspectSpec(String aspectName, EntitySpecBuilder entitySp if (!aspectSchema.isPresent()) { throw new IllegalArgumentException(String.format("Aspect %s does not exist", aspectName)); } - AspectSpec aspectSpec = entitySpecBuilder.buildAspectSpec(aspectSchema.get(), aspectClass.get()); + AspectSpec aspectSpec = + entitySpecBuilder.buildAspectSpec(aspectSchema.get(), aspectClass.get()); aspectSpec.setRegistryName(this.registryName); aspectSpec.setRegistryVersion(this.registryVersion); return aspectSpec; @@ -249,5 +289,4 @@ private EventSpec buildEventSpec(String eventName) { } return new EventSpecBuilder().buildEventSpec(eventName, eventSchema.get()); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java index 4809b1f4d2f21..05c752a5c1575 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java @@ -23,14 +23,14 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; - @Slf4j public class PluginEntityRegistryLoader { private static int _MAXLOADFAILURES = 5; private final Boolean scanningEnabled; private final String pluginDirectory; // Registry Name -> Registry Version -> (Registry, LoadResult) - private final Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> patchRegistries; + private final Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> + patchRegistries; private MergedEntityRegistry mergedEntityRegistry; private boolean started = false; private final Lock lock = new ReentrantLock(); @@ -41,7 +41,9 @@ public class PluginEntityRegistryLoader { public PluginEntityRegistryLoader(String pluginDirectory) { File directory = new File(pluginDirectory); if (!directory.exists() || !directory.isDirectory()) { - log.warn("{} directory does not exist or is not a directory. Plugin scanning will be disabled.", directory); + log.warn( + "{} directory does not exist or is not a directory. Plugin scanning will be disabled.", + directory); scanningEnabled = false; } else { scanningEnabled = true; @@ -50,7 +52,8 @@ public PluginEntityRegistryLoader(String pluginDirectory) { this.patchRegistries = new HashMap<>(); } - public Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> getPatchRegistries() { + public Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> + getPatchRegistries() { return patchRegistries; } @@ -59,7 +62,8 @@ public PluginEntityRegistryLoader withBaseRegistry(MergedEntityRegistry baseEnti return this; } - public PluginEntityRegistryLoader start(boolean waitForInitialization) throws InterruptedException { + public PluginEntityRegistryLoader start(boolean waitForInitialization) + throws InterruptedException { if (started) { log.warn("Already started!. Skipping"); return this; @@ -68,45 +72,69 @@ public PluginEntityRegistryLoader start(boolean waitForInitialization) throws In return this; } - executorService.scheduleAtFixedRate(() -> { - lock.lock(); - try { - Path rootPath = Paths.get(this.pluginDirectory); - int rootDepth = rootPath.getNameCount(); - List<Path> paths = - Files.walk(rootPath, 2).filter(x -> x.getNameCount() - rootDepth == 2).collect(Collectors.toList()); - log.debug("Size of list {}", paths.size()); - log.debug("Paths : {}", paths.stream().map(x -> x.toString() + ";").collect(Collectors.joining())); - List<Path> versionedPaths = paths.stream().filter(path -> { + executorService.scheduleAtFixedRate( + () -> { + lock.lock(); try { - ComparableVersion comparableVersion = new ComparableVersion(path.getName(rootDepth + 1).toString()); - return true; + Path rootPath = Paths.get(this.pluginDirectory); + int rootDepth = rootPath.getNameCount(); + List<Path> paths = + Files.walk(rootPath, 2) + .filter(x -> x.getNameCount() - rootDepth == 2) + .collect(Collectors.toList()); + log.debug("Size of list {}", paths.size()); + log.debug( + "Paths : {}", + paths.stream().map(x -> x.toString() + ";").collect(Collectors.joining())); + List<Path> versionedPaths = + paths.stream() + .filter( + path -> { + try { + ComparableVersion comparableVersion = + new ComparableVersion(path.getName(rootDepth + 1).toString()); + return true; + } catch (Exception e) { + log.warn( + String.format( + "Will skip %s since we weren't able to parse a legal version from it", + path.toString())); + return false; + } + }) + .sorted( + (path1, path2) -> { + if (path1.getName(rootDepth).equals(path2.getName(rootDepth))) { + return new ComparableVersion(path1.getName(rootDepth + 1).toString()) + .compareTo( + new ComparableVersion(path2.getName(rootDepth + 1).toString())); + } else { + return path1.getName(rootDepth).compareTo(path2.getName(rootDepth)); + } + }) + .collect(Collectors.toList()); + log.debug( + "Will be loading paths in this order {}", + versionedPaths.stream().map(p -> p.toString()).collect(Collectors.joining(";"))); + + versionedPaths.forEach( + x -> + loadOneRegistry( + this.mergedEntityRegistry, + x.getName(rootDepth).toString(), + x.getName(rootDepth + 1).toString(), + x.toString())); } catch (Exception e) { - log.warn( - String.format("Will skip %s since we weren't able to parse a legal version from it", path.toString())); - return false; - } - }).sorted((path1, path2) -> { - if (path1.getName(rootDepth).equals(path2.getName(rootDepth))) { - return new ComparableVersion(path1.getName(rootDepth + 1).toString()).compareTo( - new ComparableVersion(path2.getName(rootDepth + 1).toString())); - } else { - return path1.getName(rootDepth).compareTo(path2.getName(rootDepth)); + log.warn("Failed to walk directory with exception", e); + } finally { + booted = true; + initialized.signal(); + lock.unlock(); } - }).collect(Collectors.toList()); - log.debug("Will be loading paths in this order {}", - versionedPaths.stream().map(p -> p.toString()).collect(Collectors.joining(";"))); - - versionedPaths.forEach(x -> loadOneRegistry(this.mergedEntityRegistry, x.getName(rootDepth).toString(), - x.getName(rootDepth + 1).toString(), x.toString())); - } catch (Exception e) { - log.warn("Failed to walk directory with exception", e); - } finally { - booted = true; - initialized.signal(); - lock.unlock(); - } - }, 0, 5, TimeUnit.SECONDS); + }, + 0, + 5, + TimeUnit.SECONDS); started = true; if (waitForInitialization) { lock.lock(); @@ -121,7 +149,10 @@ public PluginEntityRegistryLoader start(boolean waitForInitialization) throws In return this; } - private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registryName, String registryVersionStr, + private void loadOneRegistry( + MergedEntityRegistry parentRegistry, + String registryName, + String registryVersionStr, String patchDirectory) { ComparableVersion registryVersion = new ComparableVersion("0.0.0-dev"); try { @@ -129,11 +160,15 @@ private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registr log.debug("{}: Found registry version {}", this, maybeVersion); registryVersion = maybeVersion; } catch (IllegalArgumentException ie) { - log.warn("Found un-parseable registry version {}, will default to {}", registryVersionStr, registryVersion); + log.warn( + "Found un-parseable registry version {}, will default to {}", + registryVersionStr, + registryVersion); } if (registryExists(registryName, registryVersion)) { - log.debug("Registry {}:{} already exists. Skipping loading...", registryName, registryVersion); + log.debug( + "Registry {}:{} already exists. Skipping loading...", registryName, registryVersion); return; } else { log.info("{}: Registry {}:{} discovered. Loading...", this, registryName, registryVersion); @@ -160,31 +195,39 @@ private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registr private boolean registryExists(String registryName, ComparableVersion registryVersion) { Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>> nameTree = patchRegistries.getOrDefault(registryName, new HashMap<>()); - if (nameTree.containsKey(registryVersion) && ( - (nameTree.get(registryVersion).getSecond().getLoadResult() == LoadStatus.SUCCESS) || ( - nameTree.get(registryVersion).getSecond().getFailureCount() == _MAXLOADFAILURES))) { + if (nameTree.containsKey(registryVersion) + && ((nameTree.get(registryVersion).getSecond().getLoadResult() == LoadStatus.SUCCESS) + || (nameTree.get(registryVersion).getSecond().getFailureCount() == _MAXLOADFAILURES))) { return true; } return false; } - private void addLoadResult(String registryName, ComparableVersion semanticVersion, - EntityRegistryLoadResult loadResult, EntityRegistry e) { + private void addLoadResult( + String registryName, + ComparableVersion semanticVersion, + EntityRegistryLoadResult loadResult, + EntityRegistry e) { Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>> nameTree = patchRegistries.getOrDefault(registryName, new HashMap<>()); if (nameTree.containsKey(semanticVersion)) { - if ((loadResult.getLoadResult() == LoadStatus.FAILURE) && ( - nameTree.get(semanticVersion).getSecond().getLoadResult() == LoadStatus.FAILURE)) { + if ((loadResult.getLoadResult() == LoadStatus.FAILURE) + && (nameTree.get(semanticVersion).getSecond().getLoadResult() == LoadStatus.FAILURE)) { // previous load and current loads are both failures loadResult.setFailureCount(nameTree.get(semanticVersion).getSecond().getFailureCount() + 1); if (loadResult.getFailureCount() == _MAXLOADFAILURES) { // Abandoning this registry version forever - log.error("Tried {} times. Failed to load registry {} with {}", loadResult.getFailureCount(), registryName, loadResult.getFailureReason()); + log.error( + "Tried {} times. Failed to load registry {} with {}", + loadResult.getFailureCount(), + registryName, + loadResult.getFailureReason()); } } log.warn( - String.format("Attempt %d to re-load registry %s: %s", loadResult.getFailureCount(), - registryName, semanticVersion)); + String.format( + "Attempt %d to re-load registry %s: %s", + loadResult.getFailureCount(), registryName, semanticVersion)); } nameTree.put(semanticVersion, new Pair<>(e, loadResult)); patchRegistries.put(registryName, nameTree); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index 32738d65573fd..cfc2c0901ce0d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.UnionTemplate; import com.linkedin.metadata.models.AspectSpec; @@ -27,13 +30,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects - * from the a {@link Snapshot} Record Template present on the classpath + * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects from the a + * {@link Snapshot} Record Template present on the classpath */ public class SnapshotEntityRegistry implements EntityRegistry { @@ -45,36 +44,41 @@ public class SnapshotEntityRegistry implements EntityRegistry { private static final SnapshotEntityRegistry INSTANCE = new SnapshotEntityRegistry(); public SnapshotEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder().buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder() + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); entitySpecs = new ArrayList<>(entityNameToSpec.values()); _aspectNameToSpec = populateAspectMap(entitySpecs); _aspectTemplateEngine = populateTemplateEngine(_aspectNameToSpec); } public SnapshotEntityRegistry(UnionTemplate snapshot) { - entityNameToSpec = new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder() + .buildEntitySpecs(snapshot.schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); entitySpecs = new ArrayList<>(entityNameToSpec.values()); _aspectNameToSpec = populateAspectMap(entitySpecs); _aspectTemplateEngine = populateTemplateEngine(_aspectNameToSpec); } private AspectTemplateEngine populateTemplateEngine(Map<String, AspectSpec> aspectSpecs) { - // TODO: This should be more dynamic ideally, "hardcoding" for now, passing in aspect spec map preemptively + // TODO: This should be more dynamic ideally, "hardcoding" for now, passing in aspect spec map + // preemptively Map<String, Template<? extends RecordTemplate>> aspectSpecTemplateMap = new HashMap<>(); aspectSpecTemplateMap.put(OWNERSHIP_ASPECT_NAME, new OwnershipTemplate()); aspectSpecTemplateMap.put(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesTemplate()); aspectSpecTemplateMap.put(UPSTREAM_LINEAGE_ASPECT_NAME, new UpstreamLineageTemplate()); aspectSpecTemplateMap.put(GLOBAL_TAGS_ASPECT_NAME, new GlobalTagsTemplate()); - aspectSpecTemplateMap.put(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataTemplate()); + aspectSpecTemplateMap.put( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataTemplate()); aspectSpecTemplateMap.put(GLOSSARY_TERMS_ASPECT_NAME, new GlossaryTermsTemplate()); aspectSpecTemplateMap.put(DATA_FLOW_INFO_ASPECT_NAME, new DataFlowInfoTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INFO_ASPECT_NAME, new DataJobInfoTemplate()); - aspectSpecTemplateMap.put(DATA_PRODUCT_PROPERTIES_ASPECT_NAME, new DataProductPropertiesTemplate()); + aspectSpecTemplateMap.put( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, new DataProductPropertiesTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, new DataJobInputOutputTemplate()); return new AspectTemplateEngine(aspectSpecTemplateMap); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java index f32aa1aa8bd47..e5d048d6ef647 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java @@ -1,15 +1,12 @@ package com.linkedin.metadata.models.registry.config; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; - +import javax.annotation.Nullable; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; import lombok.Value; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -import javax.annotation.Nullable; - @Value @NoArgsConstructor(force = true, access = AccessLevel.PRIVATE) @@ -21,6 +18,5 @@ public class Entity { String keyAspect; List<String> aspects; - @Nullable - String category; + @Nullable String category; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java index caec5fc69c148..f08fa5ba0a477 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java @@ -4,13 +4,11 @@ import lombok.Getter; import lombok.Setter; - @Builder @Getter public class EntityRegistryLoadResult { private LoadStatus loadResult; private String registryLocation; private String failureReason; - @Setter - private int failureCount; + @Setter private int failureCount; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java index 12c9f5ab36a09..4a868ed92e4a7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.models.registry.config; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.AccessLevel; import lombok.NoArgsConstructor; import lombok.Value; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @Value @NoArgsConstructor(force = true, access = AccessLevel.PRIVATE) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java index cf63e87abf7f9..9cd8e74d952d6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -8,61 +10,68 @@ import java.util.Collections; import java.util.List; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - public interface ArrayMergingTemplate<T extends RecordTemplate> extends Template<T> { /** - * Takes an Array field on the {@link RecordTemplate} subtype along with a set of key fields to transform into a map - * Avoids producing side effects by copying nodes, use resulting node and not the original + * Takes an Array field on the {@link RecordTemplate} subtype along with a set of key fields to + * transform into a map Avoids producing side effects by copying nodes, use resulting node and not + * the original + * * @param baseNode the base unmodified node * @param arrayFieldName name of the array field to be transformed - * @param keyFields subfields of the array object to be used as keys, empty implies the list is just strings to be merged + * @param keyFields subfields of the array object to be used as keys, empty implies the list is + * just strings to be merged * @return the modified {@link JsonNode} with array fields transformed to maps */ - default JsonNode arrayFieldToMap(JsonNode baseNode, String arrayFieldName, List<String> keyFields) { + default JsonNode arrayFieldToMap( + JsonNode baseNode, String arrayFieldName, List<String> keyFields) { JsonNode transformedNode = baseNode.deepCopy(); JsonNode arrayNode = baseNode.get(arrayFieldName); ObjectNode mapNode = instance.objectNode(); if (arrayNode instanceof ArrayNode) { - ((ArrayNode) arrayNode).elements() - .forEachRemaining(node -> { - ObjectNode keyValue = mapNode; - // Creates nested object of keys with final value being the full value of the node - JsonNode nodeClone = node.deepCopy(); - if (!keyFields.isEmpty()) { - for (String keyField : keyFields) { - String key = node.get(keyField).asText(); - keyValue = keyValue.get(key) == null ? (ObjectNode) keyValue.set(key, instance.objectNode()).get(key) - : (ObjectNode) keyValue.get(key); + ((ArrayNode) arrayNode) + .elements() + .forEachRemaining( + node -> { + ObjectNode keyValue = mapNode; + // Creates nested object of keys with final value being the full value of the node + JsonNode nodeClone = node.deepCopy(); + if (!keyFields.isEmpty()) { + for (String keyField : keyFields) { + String key = node.get(keyField).asText(); + keyValue = + keyValue.get(key) == null + ? (ObjectNode) keyValue.set(key, instance.objectNode()).get(key) + : (ObjectNode) keyValue.get(key); + } + } else { + // No key fields, assume String array + nodeClone = instance.objectNode().set(((TextNode) node).asText(), node); } - } else { - // No key fields, assume String array - nodeClone = instance.objectNode().set(((TextNode) node).asText(), node); - } - keyValue.setAll((ObjectNode) nodeClone); - } - ); - + keyValue.setAll((ObjectNode) nodeClone); + }); } return ((ObjectNode) transformedNode).set(arrayFieldName, mapNode); } /** - * Takes a transformed map field on the {@link JsonNode} representation along with a set of key fields used to transform into a map - * and rebases it to the original defined format - * Avoids producing side effects by copying nodes, use resulting node and not the original + * Takes a transformed map field on the {@link JsonNode} representation along with a set of key + * fields used to transform into a map and rebases it to the original defined format Avoids + * producing side effects by copying nodes, use resulting node and not the original + * * @param transformedNode the transformed node * @param arrayFieldName name of the array field to be transformed - * @param keyFields subfields of the array object to be used as keys, empty implies the list is just strings to be merged + * @param keyFields subfields of the array object to be used as keys, empty implies the list is + * just strings to be merged * @return the modified {@link JsonNode} formatted consistent with the original schema */ - default JsonNode transformedMapToArray(JsonNode transformedNode, String arrayFieldName, List<String> keyFields) { + default JsonNode transformedMapToArray( + JsonNode transformedNode, String arrayFieldName, List<String> keyFields) { JsonNode fieldNode = transformedNode.get(arrayFieldName); if (fieldNode instanceof ArrayNode) { - // We already have an ArrayNode, no need to transform. This happens during `replace` operations + // We already have an ArrayNode, no need to transform. This happens during `replace` + // operations return transformedNode; } ObjectNode rebasedNode = transformedNode.deepCopy(); @@ -74,9 +83,7 @@ default JsonNode transformedMapToArray(JsonNode transformedNode, String arrayFie } else { // No keys, assume pure Strings arrayNode = instance.arrayNode(); - mapNode.fields().forEachRemaining(entry -> - arrayNode.add(entry.getValue()) - ); + mapNode.fields().forEachRemaining(entry -> arrayNode.add(entry.getValue())); } return rebasedNode.set(arrayFieldName, arrayNode); } @@ -86,9 +93,16 @@ default ArrayNode mergeToArray(JsonNode mapNode, List<String> keyFields) { return instance.arrayNode().add(mapNode); } else { ArrayNode mergingArray = instance.arrayNode(); - mapNode.elements().forEachRemaining(node -> - mergingArray.addAll(mergeToArray(node, keyFields.size() > 1 ? keyFields.subList(1, keyFields.size()) : Collections.emptyList())) - ); + mapNode + .elements() + .forEachRemaining( + node -> + mergingArray.addAll( + mergeToArray( + node, + keyFields.size() > 1 + ? keyFields.subList(1, keyFields.size()) + : Collections.emptyList()))); return mergingArray; } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java index 742dbd70d4503..95849a94bae29 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.github.fge.jsonpatch.JsonPatchException; import com.github.fge.jsonpatch.Patch; @@ -13,25 +15,25 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - /** - * Holds connection between aspect specs and their templates and drives the generation from templates + * Holds connection between aspect specs and their templates and drives the generation from + * templates */ public class AspectTemplateEngine { - public static final Set<String> SUPPORTED_TEMPLATES = Stream.of( - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - DATA_FLOW_INFO_ASPECT_NAME, - DATA_JOB_INFO_ASPECT_NAME, - DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).collect(Collectors.toSet()); + public static final Set<String> SUPPORTED_TEMPLATES = + Stream.of( + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + DATA_FLOW_INFO_ASPECT_NAME, + DATA_JOB_INFO_ASPECT_NAME, + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .collect(Collectors.toSet()); private final Map<String, Template<? extends RecordTemplate>> _aspectTemplateMap; @@ -45,11 +47,14 @@ public AspectTemplateEngine(Map<String, Template<? extends RecordTemplate>> aspe @Nullable public RecordTemplate getDefaultTemplate(String aspectSpecName) { - return _aspectTemplateMap.containsKey(aspectSpecName) ? _aspectTemplateMap.get(aspectSpecName).getDefault() : null; + return _aspectTemplateMap.containsKey(aspectSpecName) + ? _aspectTemplateMap.get(aspectSpecName).getDefault() + : null; } /** * Applies a json patch to a record, optionally merging array fields as necessary + * * @param recordTemplate original template to be updated * @param jsonPatch patch to apply * @param aspectSpec aspectSpec of the template @@ -58,7 +63,8 @@ public RecordTemplate getDefaultTemplate(String aspectSpecName) { * @throws JsonPatchException if there is an issue with applying the json patch */ @Nonnull - public <T extends RecordTemplate> RecordTemplate applyPatch(RecordTemplate recordTemplate, Patch jsonPatch, AspectSpec aspectSpec) + public <T extends RecordTemplate> RecordTemplate applyPatch( + RecordTemplate recordTemplate, Patch jsonPatch, AspectSpec aspectSpec) throws JsonProcessingException, JsonPatchException { Template<T> template = getTemplate(aspectSpec); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java index cf2f5552fbb73..44090b3a6d05b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry.template; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -9,14 +12,13 @@ import com.linkedin.data.template.RecordTemplate; import java.util.List; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - -public abstract class CompoundKeyTemplate<T extends RecordTemplate> implements ArrayMergingTemplate<T> { +public abstract class CompoundKeyTemplate<T extends RecordTemplate> + implements ArrayMergingTemplate<T> { /** - * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent paths to be specified + * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent + * paths to be specified + * * @param transformedNode transformed node to have keys populated * @return transformed node that has top level keys populated */ @@ -25,7 +27,8 @@ public JsonNode populateTopLevelKeys(JsonNode transformedNode, Patch jsonPatch) List<String> paths = getPaths(jsonPatch); for (String path : paths) { String[] keys = path.split("/"); - // Skip first as it will always be blank due to path starting with /, skip last key as we only need to populate top level + // Skip first as it will always be blank due to path starting with /, skip last key as we only + // need to populate top level JsonNode parent = transformedNodeClone; for (int i = 1; i < keys.length - 1; i++) { if (parent.get(keys[i]) == null) { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java index 4310c84ded0e2..0793cacce780f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -8,26 +10,23 @@ import com.linkedin.data.template.RecordTemplate; import javax.annotation.Nonnull; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - public interface Template<T extends RecordTemplate> { /** * Cast method to get subtype of {@link RecordTemplate} for applying templating methods + * * @param recordTemplate generic record * @return specific type for this template * @throws {@link ClassCastException} when recordTemplate is not the correct type for the template */ T getSubtype(RecordTemplate recordTemplate) throws ClassCastException; - /** - * Get the template clas type - */ + /** Get the template clas type */ Class<T> getTemplateType(); /** * Get a template aspect with defaults set + * * @return subtype of {@link RecordTemplate} that lines up with a predefined AspectSpec */ @Nonnull @@ -35,6 +34,7 @@ public interface Template<T extends RecordTemplate> { /** * Applies a specified {@link Patch} to an aspect + * * @param recordTemplate original {@link RecordTemplate} to be patched * @param jsonPatch patch to apply * @return patched value @@ -50,20 +50,24 @@ default T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) } /** - * Returns a json representation of the template, modified for template based operations to be compatible with patch - * semantics. + * Returns a json representation of the template, modified for template based operations to be + * compatible with patch semantics. + * * @param recordTemplate template to be transformed into json * @return a {@link JsonNode} representation of the template * @throws JsonProcessingException if there is an issue converting the input to JSON */ - default JsonNode preprocessTemplate(RecordTemplate recordTemplate) throws JsonProcessingException { + default JsonNode preprocessTemplate(RecordTemplate recordTemplate) + throws JsonProcessingException { T subtype = getSubtype(recordTemplate); JsonNode baseNode = OBJECT_MAPPER.readTree(RecordUtils.toJsonString(subtype)); return transformFields(baseNode); } /** - * Transforms fields from base json representation of RecordTemplate to definition specific to aspect per patch semantics + * Transforms fields from base json representation of RecordTemplate to definition specific to + * aspect per patch semantics + * * @param baseNode the base node to be transformed * @return transformed {@link JsonNode} */ @@ -72,12 +76,10 @@ default JsonNode preprocessTemplate(RecordTemplate recordTemplate) throws JsonPr /** * Reserializes the patched {@link JsonNode} to the base {@link RecordTemplate} definition + * * @param patched the deserialized patched json in custom format per aspect spec * @return A {@link JsonNode} that has been retranslated from patch semantics */ @Nonnull JsonNode rebaseFields(JsonNode patched); - - - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java index 0cd9a52c8fe60..a98e60c739749 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java @@ -8,7 +8,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class GlobalTagsTemplate implements ArrayMergingTemplate<GlobalTags> { private static final String TAGS_FIELD_NAME = "tags"; @@ -45,6 +44,7 @@ public JsonNode transformFields(JsonNode baseNode) { @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, TAGS_FIELD_NAME, Collections.singletonList(TAG_FIELD_NAME)); + return transformedMapToArray( + patched, TAGS_FIELD_NAME, Collections.singletonList(TAG_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java index e905404824022..7ce59916f2073 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry.template.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.AuditStamp; @@ -11,10 +14,6 @@ import java.util.Collections; import javax.annotation.Nonnull; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class GlossaryTermsTemplate implements ArrayMergingTemplate<GlossaryTerms> { private static final String TERMS_FIELD_NAME = "terms"; @@ -40,8 +39,12 @@ public Class<GlossaryTerms> getTemplateType() { @Override public GlossaryTerms getDefault() { GlossaryTerms glossaryTerms = new GlossaryTerms(); - glossaryTerms.setTerms(new GlossaryTermAssociationArray()) - .setAuditStamp(new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + glossaryTerms + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp( + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); return glossaryTerms; } @@ -52,8 +55,7 @@ public JsonNode transformFields(JsonNode baseNode) { // Set required deprecated field if (baseNode.get(AUDIT_STAMP_FIELD) == null) { ObjectNode auditStampNode = instance.objectNode(); - auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR) - .put(TIME_FIELD, System.currentTimeMillis()); + auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR).put(TIME_FIELD, System.currentTimeMillis()); ((ObjectNode) baseNode).set(AUDIT_STAMP_FIELD, auditStampNode); } return arrayFieldToMap(baseNode, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); @@ -65,10 +67,10 @@ public JsonNode rebaseFields(JsonNode patched) { // Set required deprecated field if (patched.get(AUDIT_STAMP_FIELD) == null) { ObjectNode auditStampNode = instance.objectNode(); - auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR) - .put(TIME_FIELD, System.currentTimeMillis()); + auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR).put(TIME_FIELD, System.currentTimeMillis()); ((ObjectNode) patched).set(AUDIT_STAMP_FIELD, auditStampNode); } - return transformedMapToArray(patched, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); + return transformedMapToArray( + patched, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java index 0a2cff4395b54..b850ae830b98c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.common; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.AuditStamp; import com.linkedin.common.OwnerArray; @@ -10,9 +12,6 @@ import java.util.Arrays; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipTemplate extends CompoundKeyTemplate<Ownership> { private static final String OWNERS_FIELD_NAME = "owners"; @@ -37,9 +36,10 @@ public Class<Ownership> getTemplateType() { public Ownership getDefault() { Ownership ownership = new Ownership(); ownership.setOwners(new OwnerArray()); - ownership.setLastModified(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(SYSTEM_ACTOR))); + ownership.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR))); return ownership; } @@ -47,12 +47,14 @@ public Ownership getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - return arrayFieldToMap(baseNode, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); + return arrayFieldToMap( + baseNode, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); + return transformedMapToArray( + patched, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java index 5997bd8e7910d..73e837f368f0b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.models.registry.template.Template; import javax.annotation.Nonnull; - public class DataFlowInfoTemplate implements Template<DataFlowInfo> { @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java index 9d25fa71286d3..bdb306c2d32e4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.models.registry.template.Template; import javax.annotation.Nonnull; - public class DataJobInfoTemplate implements Template<DataJobInfo> { @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java index b4ddb4523c9a5..889297734e977 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java @@ -12,7 +12,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class DataJobInputOutputTemplate implements ArrayMergingTemplate<DataJobInputOutput> { private static final String INPUT_DATA_JOB_EDGES_FIELD_NAME = "inputDatajobEdges"; @@ -23,6 +22,7 @@ public class DataJobInputOutputTemplate implements ArrayMergingTemplate<DataJobI private static final String INPUT_DATASET_FIELDS_FIELD_NAME = "inputDatasetFields"; private static final String OUTPUT_DATASET_FIELDS_FIELD_NAME = "outputDatasetFields"; + // TODO: Fine Grained Lineages not patchable at this time, they don't have a well established key @Override @@ -60,17 +60,28 @@ public DataJobInputOutput getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - JsonNode transformedNode = arrayFieldToMap(baseNode, INPUT_DATA_JOB_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - transformedNode = arrayFieldToMap(transformedNode, INPUT_DATASET_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - transformedNode = arrayFieldToMap(transformedNode, OUTPUT_DATASET_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - transformedNode = arrayFieldToMap(transformedNode, INPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); - transformedNode = arrayFieldToMap(transformedNode, OUTPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + INPUT_DATA_JOB_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap( + transformedNode, + INPUT_DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap( + transformedNode, + OUTPUT_DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap(transformedNode, INPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); + transformedNode = + arrayFieldToMap(transformedNode, OUTPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); return transformedNode; } @@ -78,17 +89,30 @@ public JsonNode transformFields(JsonNode baseNode) { @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - JsonNode rebasedNode = transformedMapToArray(patched, INPUT_DATA_JOB_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - rebasedNode = transformedMapToArray(rebasedNode, INPUT_DATASET_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - rebasedNode = transformedMapToArray(rebasedNode, OUTPUT_DATASET_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - rebasedNode = transformedMapToArray(rebasedNode, INPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); - rebasedNode = transformedMapToArray(rebasedNode, OUTPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); + JsonNode rebasedNode = + transformedMapToArray( + patched, + INPUT_DATA_JOB_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = + transformedMapToArray( + rebasedNode, + INPUT_DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = + transformedMapToArray( + rebasedNode, + OUTPUT_DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = + transformedMapToArray( + rebasedNode, INPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); + rebasedNode = + transformedMapToArray( + rebasedNode, OUTPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); return rebasedNode; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java index d835d5ae939ae..899c51a7c3d7e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java @@ -8,7 +8,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class DataProductPropertiesTemplate implements ArrayMergingTemplate<DataProductProperties> { private static final String ASSETS_FIELD_NAME = "assets"; @@ -44,6 +43,7 @@ public JsonNode transformFields(JsonNode baseNode) { @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, ASSETS_FIELD_NAME, Collections.singletonList(KEY_FIELD_NAME)); + return transformedMapToArray( + patched, ASSETS_FIELD_NAME, Collections.singletonList(KEY_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java index 3c1be1f7ecaad..991f7f3d4053a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java @@ -9,7 +9,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class DatasetPropertiesTemplate implements ArrayMergingTemplate<DatasetProperties> { private static final String TAGS_FIELD_NAME = "tags"; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java index 62888d117b3de..9712a9081d33a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.dataset; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.AuditStamp; @@ -13,9 +15,6 @@ import java.util.Collections; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class EditableSchemaMetadataTemplate extends CompoundKeyTemplate<EditableSchemaMetadata> { private static final String EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME = "editableSchemaFieldInfo"; @@ -24,7 +23,8 @@ public class EditableSchemaMetadataTemplate extends CompoundKeyTemplate<Editable private static final String GLOSSARY_TERMS_FIELD_NAME = "glossaryTerms"; @Override - public EditableSchemaMetadata getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + public EditableSchemaMetadata getSubtype(RecordTemplate recordTemplate) + throws ClassCastException { if (recordTemplate instanceof EditableSchemaMetadata) { return (EditableSchemaMetadata) recordTemplate; } @@ -39,7 +39,10 @@ public Class<EditableSchemaMetadata> getTemplateType() { @Nonnull @Override public EditableSchemaMetadata getDefault() { - AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); return new EditableSchemaMetadata() .setCreated(auditStamp) .setLastModified(auditStamp) @@ -49,47 +52,70 @@ public EditableSchemaMetadata getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - JsonNode transformedNode = arrayFieldToMap(baseNode, EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, - Collections.singletonList(FIELDPATH_FIELD_NAME)); + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, + Collections.singletonList(FIELDPATH_FIELD_NAME)); // Create temporary templates for array subfields GlobalTagsTemplate globalTagsTemplate = new GlobalTagsTemplate(); GlossaryTermsTemplate glossaryTermsTemplate = new GlossaryTermsTemplate(); // Apply template transforms to array subfields - transformedNode.get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME).elements().forEachRemaining(node -> { - JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); - JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); - if (globalTags != null) { - ((ObjectNode) node).set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.transformFields(node.get(GLOBAL_TAGS_FIELD_NAME))); - } - if (glossaryTerms != null) { - ((ObjectNode) node).set(GLOSSARY_TERMS_FIELD_NAME, glossaryTermsTemplate.transformFields(node.get(GLOSSARY_TERMS_FIELD_NAME))); - } - }); + transformedNode + .get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME) + .elements() + .forEachRemaining( + node -> { + JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); + JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); + if (globalTags != null) { + ((ObjectNode) node) + .set( + GLOBAL_TAGS_FIELD_NAME, + globalTagsTemplate.transformFields(node.get(GLOBAL_TAGS_FIELD_NAME))); + } + if (glossaryTerms != null) { + ((ObjectNode) node) + .set( + GLOSSARY_TERMS_FIELD_NAME, + glossaryTermsTemplate.transformFields(node.get(GLOSSARY_TERMS_FIELD_NAME))); + } + }); return transformedNode; } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - JsonNode rebasedNode = transformedMapToArray(patched, EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, - Collections.singletonList(FIELDPATH_FIELD_NAME)); + JsonNode rebasedNode = + transformedMapToArray( + patched, + EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, + Collections.singletonList(FIELDPATH_FIELD_NAME)); // Create temporary templates for array subfields GlobalTagsTemplate globalTagsTemplate = new GlobalTagsTemplate(); GlossaryTermsTemplate glossaryTermsTemplate = new GlossaryTermsTemplate(); // Apply template rebases to array subfields - rebasedNode.get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME).elements().forEachRemaining(node -> { - JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); - JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); - if (globalTags != null) { - ((ObjectNode) node).set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.rebaseFields(globalTags)); - } - if (glossaryTerms != null) { - ((ObjectNode) node).set(GLOSSARY_TERMS_FIELD_NAME, glossaryTermsTemplate.rebaseFields(glossaryTerms)); - } - }); - + rebasedNode + .get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME) + .elements() + .forEachRemaining( + node -> { + JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); + JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); + if (globalTags != null) { + ((ObjectNode) node) + .set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.rebaseFields(globalTags)); + } + if (glossaryTerms != null) { + ((ObjectNode) node) + .set( + GLOSSARY_TERMS_FIELD_NAME, + glossaryTermsTemplate.rebaseFields(glossaryTerms)); + } + }); return rebasedNode; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java index 9e87b8a385328..35816895669be 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java @@ -9,11 +9,11 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class UpstreamLineageTemplate implements ArrayMergingTemplate<UpstreamLineage> { private static final String UPSTREAMS_FIELD_NAME = "upstreams"; private static final String DATASET_FIELD_NAME = "dataset"; + // TODO: Fine Grained Lineages not patchable at this time, they don't have a well established key @Override @@ -42,12 +42,14 @@ public UpstreamLineage getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - return arrayFieldToMap(baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + return arrayFieldToMap( + baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + return transformedMapToArray( + patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java index 6496ac125d867..18d070ec3da45 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.util; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -7,27 +9,31 @@ import java.util.ArrayList; import java.util.List; -import static com.linkedin.metadata.Constants.*; - - public class TemplateUtil { - private TemplateUtil() { - - } + private TemplateUtil() {} public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public static List<String> getPaths(Patch jsonPatch) { JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); List<String> paths = new ArrayList<>(); - patchNode.elements().forEachRemaining(node -> { - paths.add(node.get("path").asText()); - }); + patchNode + .elements() + .forEachRemaining( + node -> { + paths.add(node.get("path").asText()); + }); return paths; } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java index 0ce066b7a3433..ad16aec7f66d2 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java @@ -1,20 +1,24 @@ package com.linkedin.metadata.models; +import static org.testng.Assert.*; + import com.linkedin.data.schema.DataSchema; import com.linkedin.metadata.models.registry.TestConstants; import java.nio.file.Paths; import java.util.Optional; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class DataSchemaFactoryTest { @Test public void testCustomClassLoading() throws Exception { - DataSchemaFactory dsf = DataSchemaFactory.withCustomClasspath(Paths.get( - TestConstants.BASE_DIRECTORY + "/" + TestConstants.TEST_REGISTRY + "/" - + TestConstants.TEST_VERSION.toString())); + DataSchemaFactory dsf = + DataSchemaFactory.withCustomClasspath( + Paths.get( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString())); // Assert that normally found aspects from the core model are missing Optional<DataSchema> dataSchema = dsf.getAspectSchema("datasetProfile"); assertFalse(dataSchema.isPresent(), "datasetProfile"); diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index b95cb1085283f..e1ea80e2bcad2 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -1,8 +1,12 @@ package com.linkedin.metadata.models; -import com.datahub.test.TestBrowsePaths; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.test.SearchFeatures; import com.datahub.test.Snapshot; +import com.datahub.test.TestBrowsePaths; import com.datahub.test.TestEntityInfo; import com.datahub.test.TestEntityKey; import com.datahub.test.invalid.DuplicateSearchableFields; @@ -18,67 +22,76 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - - -/** - * Tests the capabilities of {@link EntitySpecBuilder} - */ +/** Tests the capabilities of {@link EntitySpecBuilder} */ public class EntitySpecBuilderTest { @Test public void testBuildAspectSpecValidationAspectMissingAnnotation() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new MissingAspectAnnotation().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new MissingAspectAnnotation().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationInvalidSearchableFieldType() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new InvalidSearchableFieldType().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new InvalidSearchableFieldType().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationDuplicateSearchableFields() { - AspectSpec aspectSpec = new EntitySpecBuilder() - .buildAspectSpec(new DuplicateSearchableFields().schema(), RecordTemplate.class); + AspectSpec aspectSpec = + new EntitySpecBuilder() + .buildAspectSpec(new DuplicateSearchableFields().schema(), RecordTemplate.class); - aspectSpec.getSearchableFieldSpecs().forEach(searchableFieldSpec -> { - String name = searchableFieldSpec.getSearchableAnnotation().getFieldName(); - assertTrue("textField".equals(name) || "textField2".equals(name)); - }); + aspectSpec + .getSearchableFieldSpecs() + .forEach( + searchableFieldSpec -> { + String name = searchableFieldSpec.getSearchableAnnotation().getFieldName(); + assertTrue("textField".equals(name) || "textField2".equals(name)); + }); } @Test public void testBuildAspectSpecValidationMissingRelationshipName() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new MissingRelationshipName().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new MissingRelationshipName().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationNonNumericSearchScoreField() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new NonNumericSearchScoreField().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new NonNumericSearchScoreField().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationNonSingularSearchScoreField() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new NonSingularSearchScoreField().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new NonSingularSearchScoreField().schema(), RecordTemplate.class)); } + @Test public void testBuildEntitySpecs() { // Instantiate the test Snapshot final Snapshot snapshot = new Snapshot(); - final List<EntitySpec> validEntitySpecs = new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()); + final List<EntitySpec> validEntitySpecs = + new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()); // Assert single entity. assertEquals(1, validEntitySpecs.size()); @@ -110,116 +123,265 @@ public void testBuildEntitySpecs() { private void validateTestEntityKey(final AspectSpec keyAspectSpec) { assertEquals("testEntityKey", keyAspectSpec.getName()); - assertEquals(new TestEntityKey().schema().getFullName(), keyAspectSpec.getPegasusSchema().getFullName()); + assertEquals( + new TestEntityKey().schema().getFullName(), keyAspectSpec.getPegasusSchema().getFullName()); // Assert on Searchable Fields assertEquals(2, keyAspectSpec.getSearchableFieldSpecs().size()); // keyPart1, keyPart3 - assertEquals("keyPart1", keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart1").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart1").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("keyPart3", keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart3").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.KEYWORD, keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart3").toString()) - .getSearchableAnnotation().getFieldType()); + assertEquals( + "keyPart1", + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart1").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart1").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "keyPart3", + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart3").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.KEYWORD, + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart3").toString()) + .getSearchableAnnotation() + .getFieldType()); // Assert on Relationship Field assertEquals(1, keyAspectSpec.getRelationshipFieldSpecs().size()); - assertEquals("keyForeignKey", keyAspectSpec.getRelationshipFieldSpecMap().get(new PathSpec("keyPart2").toString()).getRelationshipName()); + assertEquals( + "keyForeignKey", + keyAspectSpec + .getRelationshipFieldSpecMap() + .get(new PathSpec("keyPart2").toString()) + .getRelationshipName()); } - private void validateBrowsePaths(final AspectSpec browsePathAspectSpec) { assertEquals("testBrowsePaths", browsePathAspectSpec.getName()); - assertEquals(new TestBrowsePaths().schema().getFullName(), browsePathAspectSpec.getPegasusSchema().getFullName()); + assertEquals( + new TestBrowsePaths().schema().getFullName(), + browsePathAspectSpec.getPegasusSchema().getFullName()); assertEquals(1, browsePathAspectSpec.getSearchableFieldSpecs().size()); - assertEquals(SearchableAnnotation.FieldType.BROWSE_PATH, browsePathAspectSpec.getSearchableFieldSpecs().get(0) - .getSearchableAnnotation().getFieldType()); + assertEquals( + SearchableAnnotation.FieldType.BROWSE_PATH, + browsePathAspectSpec + .getSearchableFieldSpecs() + .get(0) + .getSearchableAnnotation() + .getFieldType()); } private void validateTestEntityInfo(final AspectSpec testEntityInfo) { assertEquals("testEntityInfo", testEntityInfo.getName()); - assertEquals(new TestEntityInfo().schema().getFullName(), testEntityInfo.getPegasusSchema().getFullName()); + assertEquals( + new TestEntityInfo().schema().getFullName(), + testEntityInfo.getPegasusSchema().getFullName()); // Assert on Searchable Fields assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 11); - assertEquals("customProperties", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("customProperties").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.KEYWORD, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("customProperties").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("textFieldOverride", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("textArrayField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textArrayField", "*").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT_PARTIAL, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textArrayField", "*").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("wordGramField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("wordGramField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.WORD_GRAM, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("wordGramField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedIntegerField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedIntegerField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.COUNT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedIntegerField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedArrayStringField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedArrayArrayField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("esObjectField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("esObjectField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.OBJECT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("esObjectField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("foreignKey", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(true, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getSearchableAnnotation().isQueryByDefault()); - assertEquals("doubleField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.DOUBLE, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldType()); - + assertEquals( + "customProperties", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("customProperties").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.KEYWORD, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("customProperties").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "textFieldOverride", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "textArrayField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textArrayField", "*").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT_PARTIAL, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textArrayField", "*").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "wordGramField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("wordGramField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.WORD_GRAM, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("wordGramField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedIntegerField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedIntegerField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.COUNT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedIntegerField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedArrayStringField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedArrayArrayField", + testEntityInfo + .getSearchableFieldSpecMap() + .get( + new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*") + .toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get( + new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*") + .toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "esObjectField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("esObjectField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.OBJECT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("esObjectField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "foreignKey", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + true, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getSearchableAnnotation() + .isQueryByDefault()); + assertEquals( + "doubleField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("doubleField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.DOUBLE, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("doubleField").toString()) + .getSearchableAnnotation() + .getFieldType()); // Assert on Relationship Fields assertEquals(4, testEntityInfo.getRelationshipFieldSpecs().size()); - assertEquals("foreignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getRelationshipName()); - assertEquals("foreignKeyArray", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("foreignKeyArray", "*").toString()).getRelationshipName()); - assertEquals("nestedForeignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedForeignKey").toString()).getRelationshipName()); - assertEquals("nestedArrayForeignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayForeignKey").toString()).getRelationshipName()); + assertEquals( + "foreignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getRelationshipName()); + assertEquals( + "foreignKeyArray", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("foreignKeyArray", "*").toString()) + .getRelationshipName()); + assertEquals( + "nestedForeignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedForeignKey").toString()) + .getRelationshipName()); + assertEquals( + "nestedArrayForeignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayForeignKey").toString()) + .getRelationshipName()); } private void validateSearchFeatures(final AspectSpec searchFeaturesAspectSpec) { assertEquals("searchFeatures", searchFeaturesAspectSpec.getName()); - assertEquals(new SearchFeatures().schema().getFullName(), + assertEquals( + new SearchFeatures().schema().getFullName(), searchFeaturesAspectSpec.getPegasusSchema().getFullName()); assertEquals(2, searchFeaturesAspectSpec.getSearchScoreFieldSpecs().size()); - assertEquals("feature1", searchFeaturesAspectSpec.getSearchScoreFieldSpecMap() - .get(new PathSpec("feature1").toString()) - .getSearchScoreAnnotation() - .getFieldName()); - assertEquals("feature2", searchFeaturesAspectSpec.getSearchScoreFieldSpecMap() - .get(new PathSpec("feature2").toString()) - .getSearchScoreAnnotation() - .getFieldName()); + assertEquals( + "feature1", + searchFeaturesAspectSpec + .getSearchScoreFieldSpecMap() + .get(new PathSpec("feature1").toString()) + .getSearchScoreAnnotation() + .getFieldName()); + assertEquals( + "feature2", + searchFeaturesAspectSpec + .getSearchScoreFieldSpecMap() + .get(new PathSpec("feature2").toString()) + .getSearchScoreAnnotation() + .getFieldName()); } - } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java index 320dfc47f21e4..852e4f19bac12 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry; +import static org.testng.Assert.*; + import com.datahub.test.TestEntityProfile; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.metadata.models.EntitySpec; @@ -9,21 +11,22 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class ConfigEntityRegistryTest { @BeforeTest public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @Test public void testEntityRegistry() throws FileNotFoundException { - ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); Map<String, EntitySpec> entitySpecs = configEntityRegistry.getEntitySpecs(); Map<String, EventSpec> eventSpecs = configEntityRegistry.getEventSpecs(); @@ -54,9 +57,11 @@ public void testEntityRegistry() throws FileNotFoundException { @Test public void testEntityRegistryIdentifier() throws FileNotFoundException { - ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); assertEquals(configEntityRegistry.getIdentifier(), "test-registry"); } } - diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java index 05d23eb4b455f..20a64f9af25c0 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java @@ -1,5 +1,11 @@ package com.linkedin.metadata.models.registry; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.RelationshipFieldSpec; @@ -11,13 +17,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - - public class LineageRegistryTest { @Test public void testRegistryWhenEmpty() { @@ -33,14 +32,16 @@ public void testRegistry() { Map<String, EntitySpec> mockEntitySpecs = new HashMap<>(); EntitySpec mockDatasetSpec = mock(EntitySpec.class); List<RelationshipFieldSpec> datasetRelations = - ImmutableList.of(buildSpec("DownstreamOf", ImmutableList.of("dataset"), true, true), + ImmutableList.of( + buildSpec("DownstreamOf", ImmutableList.of("dataset"), true, true), buildSpec("AssociatedWith", ImmutableList.of("tag"), true, false), buildSpec("AssociatedWith", ImmutableList.of("glossaryTerm"), true, false)); when(mockDatasetSpec.getRelationshipFieldSpecs()).thenReturn(datasetRelations); mockEntitySpecs.put("dataset", mockDatasetSpec); EntitySpec mockJobSpec = mock(EntitySpec.class); List<RelationshipFieldSpec> jobRelations = - ImmutableList.of(buildSpec("Produces", ImmutableList.of("dataset"), false, true), + ImmutableList.of( + buildSpec("Produces", ImmutableList.of("dataset"), false, true), buildSpec("Consumes", ImmutableList.of("dataset"), true, true)); when(mockJobSpec.getRelationshipFieldSpecs()).thenReturn(jobRelations); mockEntitySpecs.put("dataJob", mockJobSpec); @@ -50,22 +51,51 @@ public void testRegistry() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); LineageRegistry.LineageSpec lineageSpec = lineageRegistry.getLineageSpec("dataset"); assertEquals(lineageSpec.getUpstreamEdges().size(), 2); - assertTrue(lineageSpec.getUpstreamEdges() - .contains(new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "dataset"))); - assertTrue(lineageSpec.getUpstreamEdges() - .contains(new LineageRegistry.EdgeInfo("Produces", RelationshipDirection.INCOMING, "dataJob"))); + assertTrue( + lineageSpec + .getUpstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.OUTGOING, "dataset"))); + assertTrue( + lineageSpec + .getUpstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "Produces", RelationshipDirection.INCOMING, "dataJob"))); assertEquals(lineageSpec.getDownstreamEdges().size(), 2); - assertTrue(lineageSpec.getDownstreamEdges() - .contains(new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "dataset"))); - assertTrue(lineageSpec.getDownstreamEdges() - .contains(new LineageRegistry.EdgeInfo("Consumes", RelationshipDirection.INCOMING, "dataJob"))); + assertTrue( + lineageSpec + .getDownstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.INCOMING, "dataset"))); + assertTrue( + lineageSpec + .getDownstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "Consumes", RelationshipDirection.INCOMING, "dataJob"))); } - private RelationshipFieldSpec buildSpec(String relationshipType, List<String> destinationEntityTypes, - boolean isUpstream, boolean isLineage) { + private RelationshipFieldSpec buildSpec( + String relationshipType, + List<String> destinationEntityTypes, + boolean isUpstream, + boolean isLineage) { RelationshipFieldSpec spec = mock(RelationshipFieldSpec.class); - when(spec.getRelationshipAnnotation()).thenReturn( - new RelationshipAnnotation(relationshipType, destinationEntityTypes, isUpstream, isLineage, null, null, null, null, null)); + when(spec.getRelationshipAnnotation()) + .thenReturn( + new RelationshipAnnotation( + relationshipType, + destinationEntityTypes, + isUpstream, + isLineage, + null, + null, + null, + null, + null)); return spec; } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java index 38664fedb1570..1652a51290597 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry; +import static org.testng.Assert.*; + import com.linkedin.metadata.models.DataSchemaFactory; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; @@ -7,20 +9,19 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class PatchEntityRegistryTest { @Test public void testEntityRegistryLoad() throws Exception, EntityRegistryException { - PatchEntityRegistry patchEntityRegistry = new PatchEntityRegistry( - TestConstants.BASE_DIRECTORY - + "/" - + TestConstants.TEST_REGISTRY - + "/" - + TestConstants.TEST_VERSION.toString(), - TestConstants.TEST_REGISTRY, TestConstants.TEST_VERSION); + PatchEntityRegistry patchEntityRegistry = + new PatchEntityRegistry( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString(), + TestConstants.TEST_REGISTRY, + TestConstants.TEST_VERSION); Map<String, EntitySpec> entitySpecs = patchEntityRegistry.getEntitySpecs(); assertEquals(entitySpecs.values().size(), 1); @@ -40,21 +41,27 @@ public void testEntityRegistryLoad() throws Exception, EntityRegistryException { /** * Validate that patch entity registries can have key aspects + * * @throws Exception * @throws EntityRegistryException */ @Test public void testEntityRegistryWithKeyLoad() throws Exception, EntityRegistryException { - DataSchemaFactory dataSchemaFactory = DataSchemaFactory.withCustomClasspath( - Paths.get(TestConstants.BASE_DIRECTORY - + "/" - + TestConstants.TEST_REGISTRY - + "/" - + TestConstants.TEST_VERSION.toString())); + DataSchemaFactory dataSchemaFactory = + DataSchemaFactory.withCustomClasspath( + Paths.get( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString())); - PatchEntityRegistry patchEntityRegistry = new PatchEntityRegistry( - dataSchemaFactory, Paths.get("src/test_plugins/mycompany-full-model/0.0.1/entity-registry.yaml"), - TestConstants.TEST_REGISTRY, TestConstants.TEST_VERSION); + PatchEntityRegistry patchEntityRegistry = + new PatchEntityRegistry( + dataSchemaFactory, + Paths.get("src/test_plugins/mycompany-full-model/0.0.1/entity-registry.yaml"), + TestConstants.TEST_REGISTRY, + TestConstants.TEST_VERSION); Map<String, EntitySpec> entitySpecs = patchEntityRegistry.getEntitySpecs(); assertEquals(entitySpecs.values().size(), 1); diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java index 06ed794ecc684..b3eb2af72708c 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.models.registry.TestConstants.*; +import static org.testng.Assert.*; + import com.linkedin.data.schema.ArrayDataSchema; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.RecordDataSchema; @@ -28,122 +31,137 @@ import org.apache.maven.artifact.versioning.ComparableVersion; import org.testng.annotations.Test; -import static com.linkedin.metadata.models.registry.TestConstants.*; -import static org.testng.Assert.*; - - public class PluginEntityRegistryLoaderTest { @Test public void testEntityRegistry() throws FileNotFoundException, InterruptedException { - EntityRegistry baseEntityRegistry = new EntityRegistry() { - @Nonnull - @Override - public EntitySpec getEntitySpec(@Nonnull String entityName) { - return null; - } - - @Nonnull - @Override - public EventSpec getEventSpec(@Nonnull String eventName) { - return null; - } - - @Nonnull - @Override - public Map<String, EntitySpec> getEntitySpecs() { - return null; - } - - @Nonnull - @Override - public Map<String, AspectSpec> getAspectSpecs() { - return new HashMap<>(); - } - - @Nonnull - @Override - public Map<String, EventSpec> getEventSpecs() { - return null; - } - - @Nonnull - @Override - public AspectTemplateEngine getAspectTemplateEngine() { - return new AspectTemplateEngine(); - } - }; + EntityRegistry baseEntityRegistry = + new EntityRegistry() { + @Nonnull + @Override + public EntitySpec getEntitySpec(@Nonnull String entityName) { + return null; + } + + @Nonnull + @Override + public EventSpec getEventSpec(@Nonnull String eventName) { + return null; + } + + @Nonnull + @Override + public Map<String, EntitySpec> getEntitySpecs() { + return null; + } + + @Nonnull + @Override + public Map<String, AspectSpec> getAspectSpecs() { + return new HashMap<>(); + } + + @Nonnull + @Override + public Map<String, EventSpec> getEventSpecs() { + return null; + } + + @Nonnull + @Override + public AspectTemplateEngine getAspectTemplateEngine() { + return new AspectTemplateEngine(); + } + }; MergedEntityRegistry configEntityRegistry = new MergedEntityRegistry(baseEntityRegistry); PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(TestConstants.BASE_DIRECTORY).withBaseRegistry(configEntityRegistry).start(true); + new PluginEntityRegistryLoader(TestConstants.BASE_DIRECTORY) + .withBaseRegistry(configEntityRegistry) + .start(true); assertEquals(pluginEntityRegistryLoader.getPatchRegistries().size(), 1); EntityRegistryLoadResult loadResult = - pluginEntityRegistryLoader.getPatchRegistries().get(TestConstants.TEST_REGISTRY).get(TEST_VERSION).getSecond(); + pluginEntityRegistryLoader + .getPatchRegistries() + .get(TestConstants.TEST_REGISTRY) + .get(TEST_VERSION) + .getSecond(); assertNotNull(loadResult); assertEquals(loadResult.getLoadResult(), LoadStatus.FAILURE); } private EntityRegistry getBaseEntityRegistry() { final AspectSpec keyAspectSpec = - new AspectSpec(new AspectAnnotation("datasetKey", false, false, null), Collections.emptyList(), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), + new AspectSpec( + new AspectAnnotation("datasetKey", false, false, null), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), (RecordDataSchema) DataSchemaFactory.getInstance().getAspectSchema("datasetKey").get(), DataSchemaFactory.getInstance().getAspectClass("datasetKey").get()); final Map<String, EntitySpec> entitySpecMap = new HashMap<>(1); List<AspectSpec> aspectSpecList = new ArrayList<>(1); aspectSpecList.add(keyAspectSpec); - EntitySpec baseEntitySpec = new DefaultEntitySpec(aspectSpecList, new EntityAnnotation("dataset", "datasetKey"), - (RecordDataSchema) DataSchemaFactory.getInstance().getEntitySchema("dataset").get()); + EntitySpec baseEntitySpec = + new DefaultEntitySpec( + aspectSpecList, + new EntityAnnotation("dataset", "datasetKey"), + (RecordDataSchema) DataSchemaFactory.getInstance().getEntitySchema("dataset").get()); entitySpecMap.put("dataset", baseEntitySpec); final Map<String, EventSpec> eventSpecMap = new HashMap<>(1); - EventSpec baseEventSpec = new DefaultEventSpec("testEvent", new EventAnnotation("testEvent"), - (RecordDataSchema) DataSchemaFactory.getInstance().getEventSchema("testEvent").get()); + EventSpec baseEventSpec = + new DefaultEventSpec( + "testEvent", + new EventAnnotation("testEvent"), + (RecordDataSchema) DataSchemaFactory.getInstance().getEventSchema("testEvent").get()); eventSpecMap.put("testevent", baseEventSpec); - EntityRegistry baseEntityRegistry = new EntityRegistry() { - - @Nonnull - @Override - public EntitySpec getEntitySpec(@Nonnull String entityName) { - assertEquals(entityName, "dataset"); - return baseEntitySpec; - } - - @Nullable - @Override - public EventSpec getEventSpec(@Nonnull String eventName) { - assertEquals(eventName, "testEvent"); - return baseEventSpec; - } - - @Nonnull - @Override - public Map<String, EntitySpec> getEntitySpecs() { - return entitySpecMap; - } - - @Nonnull - @Override - public Map<String, AspectSpec> getAspectSpecs() { - return new HashMap<>(); - } - - @Nonnull - @Override - public Map<String, EventSpec> getEventSpecs() { - return eventSpecMap; - } - - @Nonnull - @Override - public AspectTemplateEngine getAspectTemplateEngine() { - return new AspectTemplateEngine(); - } - }; + EntityRegistry baseEntityRegistry = + new EntityRegistry() { + + @Nonnull + @Override + public EntitySpec getEntitySpec(@Nonnull String entityName) { + assertEquals(entityName, "dataset"); + return baseEntitySpec; + } + + @Nullable + @Override + public EventSpec getEventSpec(@Nonnull String eventName) { + assertEquals(eventName, "testEvent"); + return baseEventSpec; + } + + @Nonnull + @Override + public Map<String, EntitySpec> getEntitySpecs() { + return entitySpecMap; + } + + @Nonnull + @Override + public Map<String, AspectSpec> getAspectSpecs() { + return new HashMap<>(); + } + + @Nonnull + @Override + public Map<String, EventSpec> getEventSpecs() { + return eventSpecMap; + } + + @Nonnull + @Override + public AspectTemplateEngine getAspectTemplateEngine() { + return new AspectTemplateEngine(); + } + }; return baseEntityRegistry; } @@ -152,12 +170,21 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter MergedEntityRegistry mergedEntityRegistry = new MergedEntityRegistry(getBaseEntityRegistry()); PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(BASE_DIRECTORY).withBaseRegistry(mergedEntityRegistry).start(true); + new PluginEntityRegistryLoader(BASE_DIRECTORY) + .withBaseRegistry(mergedEntityRegistry) + .start(true); assertEquals(pluginEntityRegistryLoader.getPatchRegistries().size(), 1); EntityRegistryLoadResult loadResult = - pluginEntityRegistryLoader.getPatchRegistries().get(TEST_REGISTRY).get(TEST_VERSION).getSecond(); + pluginEntityRegistryLoader + .getPatchRegistries() + .get(TEST_REGISTRY) + .get(TEST_VERSION) + .getSecond(); assertNotNull(loadResult); - assertEquals(loadResult.getLoadResult(), LoadStatus.SUCCESS, "load failed with " + loadResult.getFailureReason()); + assertEquals( + loadResult.getLoadResult(), + LoadStatus.SUCCESS, + "load failed with " + loadResult.getFailureReason()); Map<String, EntitySpec> entitySpecs = mergedEntityRegistry.getEntitySpecs(); @@ -165,7 +192,8 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter assertEquals(entitySpec.getName(), "dataset"); assertEquals(entitySpec.getKeyAspectSpec().getName(), "datasetKey"); Optional<DataSchema> dataSchema = - Optional.ofNullable(entitySpecs.get("dataset").getAspectSpec("datasetKey").getPegasusSchema()); + Optional.ofNullable( + entitySpecs.get("dataset").getAspectSpec("datasetKey").getPegasusSchema()); assertTrue(dataSchema.isPresent(), "datasetKey"); assertNotNull(entitySpec.getAspectSpec("testDataQualityRules")); assertEquals(entitySpecs.values().size(), 1); @@ -179,37 +207,65 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter @Test /** - * Tests that we can load up entity registries that represent safe evolutions as well as decline to load registries that represent unsafe evolutions. - * - */ public void testEntityRegistryVersioning() throws InterruptedException { + * Tests that we can load up entity registries that represent safe evolutions as well as decline + * to load registries that represent unsafe evolutions. + */ + public void testEntityRegistryVersioning() throws InterruptedException { MergedEntityRegistry mergedEntityRegistry = new MergedEntityRegistry(getBaseEntityRegistry()); String multiversionPluginDir = "src/test_plugins/"; PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(multiversionPluginDir).withBaseRegistry(mergedEntityRegistry).start(true); - Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> loadedRegistries = - pluginEntityRegistryLoader.getPatchRegistries(); + new PluginEntityRegistryLoader(multiversionPluginDir) + .withBaseRegistry(mergedEntityRegistry) + .start(true); + Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> + loadedRegistries = pluginEntityRegistryLoader.getPatchRegistries(); String registryName = "mycompany-dq-model"; assertTrue(loadedRegistries.containsKey(registryName)); assertTrue(loadedRegistries.get(registryName).containsKey(new ComparableVersion("0.0.1"))); - System.out.println(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.1")).getSecond().getFailureReason()); - - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.1")).getSecond().getLoadResult(), + System.out.println( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.1")) + .getSecond() + .getFailureReason()); + + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.1")) + .getSecond() + .getLoadResult(), LoadStatus.SUCCESS); - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.2")).getSecond().getLoadResult(), + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.2")) + .getSecond() + .getLoadResult(), LoadStatus.SUCCESS); - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.3")).getSecond().getLoadResult(), + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.3")) + .getSecond() + .getLoadResult(), LoadStatus.FAILURE); - assertTrue(loadedRegistries.get(registryName) - .get(new ComparableVersion("0.0.3")) - .getSecond() - .getFailureReason() - .contains("new record removed required fields type")); + assertTrue( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.3")) + .getSecond() + .getFailureReason() + .contains("new record removed required fields type")); assertTrue(mergedEntityRegistry.getEntitySpec("dataset").hasAspect("dataQualityRules")); RecordDataSchema dataSchema = - mergedEntityRegistry.getEntitySpec("dataset").getAspectSpec("dataQualityRules").getPegasusSchema(); + mergedEntityRegistry + .getEntitySpec("dataset") + .getAspectSpec("dataQualityRules") + .getPegasusSchema(); ArrayDataSchema arrayDataSchema = (ArrayDataSchema) dataSchema.getField("rules").getType().getDereferencedDataSchema(); // Aspect Schema should be the same as version 0.0.2, checking to see that all fields exist diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java index ae46f3796aa73..43ae86076ae8c 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java @@ -2,7 +2,6 @@ import org.apache.maven.artifact.versioning.ComparableVersion; - public class TestConstants { public static final String TEST_REGISTRY = "mycompany-dq-model"; public static final String BASE_DIRECTORY = "custom-test-model/build/plugins/models"; @@ -10,6 +9,5 @@ public class TestConstants { public static final String TEST_ASPECT_NAME = "testDataQualityRules"; public static final String TEST_EVENT_NAME = "dataQualityEvent"; - private TestConstants() { - } + private TestConstants() {} } diff --git a/gradle/checkstyle/checkstyle.xml b/gradle/checkstyle/checkstyle.xml deleted file mode 100644 index a9bffe839edad..0000000000000 --- a/gradle/checkstyle/checkstyle.xml +++ /dev/null @@ -1,198 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN" "http://www.puppycrawl.com/dtds/configuration_1_3.dtd"> - -<!-- - Checkstyle-Configuration: LinkedIn Style - Description: -LinkedIn Java style. ---> -<module name="Checker"> - <property name="severity" value="warning"/> - <property name="fileExtensions" value="java"/> - - <module name="TreeWalker"> - <property name="tabWidth" value="2"/> - <module name="SuppressWarningsHolder"/> - <module name="FileContentsHolder"/> - - <!-- ANNOTATIONS --> - - <!-- No trailing empty parenthesis or commas --> - <module name="AnnotationUseStyle"> - <property name="elementStyle" value="ignore"/> - </module> - <!-- Ensure @Override is present when {@inheritDoc} Javadoc tag is present --> - <module name="MissingOverride"/> - <!-- Package level annotations belong in package-info.java --> - <module name="PackageAnnotation"/> - - <!-- BLOCKS --> - - <!-- Block opening brace on same line --> - <module name="LeftCurly"> - <property name="option" value="eol"/> - </module> - <!-- Block closing brace for else, catch, finally on same line --> - <module name="RightCurly"> - <property name="option" value="same"/> - </module> - <!-- Always use braces even if optional --> - <module name="NeedBraces"/> - - <!-- CLASS DESIGN --> - - <!-- Classes containing only static methods should not have a public constructor --> - <module name="HideUtilityClassConstructor"/> - - <!-- CODING --> - - <!-- Use Java style array declarations (e.g. String[] names), not C style (e.g. String names[]) --> - <module name="ArrayTypeStyle"/> - <!-- If covariant equals defined, standard equals must also be defined --> - <module name="CovariantEquals"/> - <!-- Switch 'default' case must appear last --> - <module name="DefaultComesLast"/> - <!-- Override equals and hashCode together --> - <module name="EqualsHashCode"/> - <!-- No fall through in switch cases, even the last one --> - <module name="FallThrough"> - <property name="checkLastCaseGroup" value="true"/> - </module> - <!-- Do not perform assignments embedded within expressions --> - <module name="InnerAssignment"/> - <!-- Switch statements must have a 'default' case --> - <module name="MissingSwitchDefault"/> - <!-- Do not modify the 'for' loop control variable --> - <module name="ModifiedControlVariable"/> - <!-- Each variable delcaration must be on a separate line --> - <module name="MultipleVariableDeclarations"/> - <!-- Each statement (i.e. code terminated by a semicolon) must be on a separate line --> - <module name="OneStatementPerLine"/> - <!-- Classes must have an explicit package declaration --> - <module name="PackageDeclaration"/> - <!-- Do not test boolean expressions against the values true or false --> - <module name="SimplifyBooleanExpression"/> - <!-- Do not test for boolean conditions and return the values true or false --> - <module name="SimplifyBooleanReturn"/> - <!-- Do not use '==' to compare string against a literal; use 'equals' --> - <module name="StringLiteralEquality"/> - <!-- Use 'L' with long literals --> - <module name="UpperEll"/> - - <!-- IMPORTS --> - - <!-- No imports statements using '*' notation except static imports --> - <module name="AvoidStarImport"> - <property name="allowStaticMemberImports" value="true"/> - </module> - <!-- Do not import 'sun' packages --> - <module name="IllegalImport"/> - <!-- Do not duplicate import statements --> - <module name="RedundantImport"/> - <!-- Eliminate unused imports --> - <module name="UnusedImports"/> - - <!-- JAVADOC COMMENTS --> - - <!-- If you have a Javadoc comment, make sure it is properly formed --> - <module name="JavadocStyle"> - <property name="checkFirstSentence" value="false"/> - </module> - - <!-- NAMING CONVENTIONS --> - - <!-- Generic parameters for a class must be uppercase letters separated by underscores (e.g. <V>, <NEW>, <KEY_T>) --> - <module name="ClassTypeParameterName"> - <property name="format" value="^[A-Z]+(_[A-Z]+)*$"/> - </module> - <!-- Constants must be all uppercase letters separated by underscores --> - <module name="ConstantName"> - <property name="format" value="^(_?log)|([A-Z][A-Z0-9]*(_[A-Z0-9]+)*)$"/> - </module> - <!-- Local variables must be camel case starting with lowercase letter --> - <module name="LocalFinalVariableName"/> - <module name="LocalVariableName"/> - <!-- Member variables must be camel case starting with an underscore or lowercase letter --> - <module name="MemberName"> - <property name="format" value="^[_a-z][a-zA-Z0-9]*$"/> - </module> - <!-- Method name must be camel case starting with a lowercase letter --> - <module name="MethodName"/> - <!-- Generic parameters for a method must be uppercase letters separated by underscores (e.g. <V>, <NEW>, <KEY_T>) --> - <module name="MethodTypeParameterName"> - <property name="format" value="^[A-Z]+(_[A-Z]+)*$"/> - </module> - <!-- Package name must be all lowercase letters separated by periods --> - <module name="PackageName"> - <property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/> - </module> - <!-- Parameters must be camel case starting with a lowercase letter --> - <module name="ParameterName"/> - <!-- Static variables must be camel case starting with an underscore or lowercase letter --> - <module name="StaticVariableName"> - <property name="format" value="^[_a-z][a-zA-Z0-9]*$"/> - </module> - <!-- Type names must be camel case starting with an uppercase letter --> - <module name="TypeName"/> - - <!-- LENGTHS --> - - <!-- Desired line length is 120 but allow some overrun beyond that --> - <module name="LineLength"> - <property name="max" value="160"/> - <message key="maxLineLen" value="Line is longer than {0,number,integer} characters (found {1,number,integer}). Try to keep lines under 120 characters."/> - </module> - - <!-- WHITESPACE --> - - <module name="GenericWhitespace"/> - <module name="MethodParamPad"/> - <module name="NoWhitespaceAfter"> - <property name="tokens" value="BNOT,DEC,DOT,INC,LNOT,UNARY_MINUS,UNARY_PLUS"/> - </module> - <module name="NoWhitespaceBefore"/> - <module name="OperatorWrap"/> - <module name="ParenPad"/> - <module name="TypecastParenPad"> - <property name="tokens" value="RPAREN,TYPECAST"/> - </module> - <module name="WhitespaceAfter"/> - <module name="WhitespaceAround"/> - - <!-- Do not allow meaningless, IDE generated parameter names --> - <module name="RegexpSinglelineJava"> - <property name="format" value="[\s]+arg[\d]+[,\)]"/> - <property name="message" value="Replace argN with a meaningful parameter name"/> - </module> - </module> - - <!-- Do not allow tab characters in source files --> - <module name="FileTabCharacter"/> - - <!-- Ensure parameter and exception names are present on @param and @throws tags --> - <module name="RegexpSingleline"> - <property name="format" value="\*[\s]*@(throws|param)[\s]*$"/> - <property name="message" value="Missing parameter or exception name"/> - </module> - <!-- IDE generated code must be reviewed by developer --> - <module name="RegexpSingleline"> - <property name="format" value="\/\/[\s]*TODO[\s]+Auto-generated"/> - <property name="message" value="Replace IDE generated code with real implementation"/> - </module> - <!-- Detect commonly misspelled Javadoc tags --> - <module name="RegexpSingleline"> - <property name="format" value="\*[\s]*@(params|throw|returns)[\s]+"/> - <property name="message" value="Correct misspelled Javadoc tag"/> - </module> - - <!-- Read checker suppressions from a file --> - <module name="SuppressionFilter"> - <property name="file" value="${config_loc}/suppressions.xml"/> - </module> - <!-- Allow Checkstyle warnings to be suppressed using trailing comments --> - <module name="SuppressWithNearbyCommentFilter"/> - <!-- Allow Checkstyle warnings to be suppressed using block comments --> - <module name="SuppressionCommentFilter"/> - <!-- Allow SuppressWarnings annotation to suppress Checkstyle issues --> - <module name="SuppressWarningsFilter"/> -</module> diff --git a/gradle/checkstyle/suppressions.xml b/gradle/checkstyle/suppressions.xml deleted file mode 100644 index 829689ba35611..0000000000000 --- a/gradle/checkstyle/suppressions.xml +++ /dev/null @@ -1,7 +0,0 @@ -<?xml version="1.0"?> -<!DOCTYPE suppressions PUBLIC - "-//Puppy Crawl//DTD Suppressions 1.1//EN" - "http://www.puppycrawl.com/dtds/suppressions_1_1.dtd"> -<suppressions> - <suppress checks=".*" files="src/mainGeneratedDataTemplate"/> -</suppressions> diff --git a/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java b/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java index e71fe6266b955..02aeb047a4d3e 100644 --- a/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java +++ b/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java @@ -1,7 +1,6 @@ package com.datahub.metadata.ingestion; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,6 +17,7 @@ import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.query.ListResult; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -45,29 +45,31 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.scheduling.support.CronSequenceGenerator; - /** - * This class serves as a stateful scheduler of Ingestion Runs for Ingestion Sources defined - * within DataHub. It manages storing and triggering ingestion sources on a pre-defined schedule - * based on the information present in the {@link DataHubIngestionSourceInfo} aspect. As such, this class + * This class serves as a stateful scheduler of Ingestion Runs for Ingestion Sources defined within + * DataHub. It manages storing and triggering ingestion sources on a pre-defined schedule based on + * the information present in the {@link DataHubIngestionSourceInfo} aspect. As such, this class * should never be instantiated more than once - it's a singleton. * - * When the scheduler is created, it will first batch load all "info" aspects associated with the DataHubIngestionSource entity. - * It then iterates through all the aspects and attempts to extract a Quartz-cron (* * * * *) formatted schedule string & timezone from each. - * Upon finding a schedule and timezone, the "next execution time" as a relative timestamp is computed and a task - * is scheduled at that time in the future. + * <p>When the scheduler is created, it will first batch load all "info" aspects associated with the + * DataHubIngestionSource entity. It then iterates through all the aspects and attempts to extract a + * Quartz-cron (* * * * *) formatted schedule string & timezone from each. Upon finding a schedule + * and timezone, the "next execution time" as a relative timestamp is computed and a task is + * scheduled at that time in the future. * - * The child task is scheduled on another thread via {@link ScheduledExecutorService} and is responsible for creating a - * new DataHubExecutionRequest entity instance using an {@link EntityClient}. The execution request includes the inputs required - * to execute an ingestion source: an Ingestion Recipe encoded as JSON. This in turn triggers the execution of a downstream - * "action" which actually executes the ingestion process and reports the status back. + * <p>The child task is scheduled on another thread via {@link ScheduledExecutorService} and is + * responsible for creating a new DataHubExecutionRequest entity instance using an {@link + * EntityClient}. The execution request includes the inputs required to execute an ingestion source: + * an Ingestion Recipe encoded as JSON. This in turn triggers the execution of a downstream "action" + * which actually executes the ingestion process and reports the status back. * - * After initial load, this class will continuously listen to the MetadataChangeProposal stream and update its local cache based - * on changes performed against Ingestion Source entities. Specifically, if the schedule of an Ingestion Source is changed in any way, - * the next execution time of that source will be recomputed, with previously scheduled execution clear if necessary. + * <p>After initial load, this class will continuously listen to the MetadataChangeProposal stream + * and update its local cache based on changes performed against Ingestion Source entities. + * Specifically, if the schedule of an Ingestion Source is changed in any way, the next execution + * time of that source will be recomputed, with previously scheduled execution clear if necessary. * - * On top of that, the component can also refresh its entire cache periodically. By default, it batch loads all the latest - * schedules on a once-per-day cadence. + * <p>On top of that, the component can also refresh its entire cache periodically. By default, it + * batch loads all the latest schedules on a once-per-day cadence. */ @Slf4j @RequiredArgsConstructor @@ -76,32 +78,35 @@ public class IngestionScheduler { private final Authentication _systemAuthentication; private final EntityClient _entityClient; - // Maps a DataHubIngestionSource to a future representing the "next" scheduled execution of the source + // Maps a DataHubIngestionSource to a future representing the "next" scheduled execution of the + // source // Visible for testing final Map<Urn, ScheduledFuture<?>> _nextIngestionSourceExecutionCache = new HashMap<>(); // Shared executor service used for executing an ingestion source on a schedule - private final ScheduledExecutorService _sharedExecutorService = Executors.newScheduledThreadPool(1); + private final ScheduledExecutorService _sharedExecutorService = + Executors.newScheduledThreadPool(1); private final IngestionConfiguration _ingestionConfiguration; private final int _batchGetDelayIntervalSeconds; private final int _batchGetRefreshIntervalSeconds; public void init() { - final BatchRefreshSchedulesRunnable batchRefreshSchedulesRunnable = new BatchRefreshSchedulesRunnable( - _systemAuthentication, - _entityClient, - this::scheduleNextIngestionSourceExecution, - this::unscheduleAll); + final BatchRefreshSchedulesRunnable batchRefreshSchedulesRunnable = + new BatchRefreshSchedulesRunnable( + _systemAuthentication, + _entityClient, + this::scheduleNextIngestionSourceExecution, + this::unscheduleAll); // Schedule a recurring batch-reload task. _sharedExecutorService.scheduleAtFixedRate( - batchRefreshSchedulesRunnable, _batchGetDelayIntervalSeconds, _batchGetRefreshIntervalSeconds, + batchRefreshSchedulesRunnable, + _batchGetDelayIntervalSeconds, + _batchGetRefreshIntervalSeconds, TimeUnit.SECONDS); } - /** - * Removes the next scheduled execution of a particular ingestion source, if it exists. - */ + /** Removes the next scheduled execution of a particular ingestion source, if it exists. */ public void unscheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn) { log.info("Unscheduling ingestion source with urn {}", ingestionSourceUrn); // Deleting an ingestion source schedule. Un-schedule the next execution. @@ -113,20 +118,25 @@ public void unscheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn) } /** - * Un-schedule all ingestion sources that are scheduled for execution. This is performed on refresh of ingestion sources. + * Un-schedule all ingestion sources that are scheduled for execution. This is performed on + * refresh of ingestion sources. */ public void unscheduleAll() { // Deleting an ingestion source schedule. Un-schedule the next execution. - Set<Urn> scheduledSources = new HashSet<>(_nextIngestionSourceExecutionCache.keySet()); // Create copy to avoid concurrent mod. + Set<Urn> scheduledSources = + new HashSet<>( + _nextIngestionSourceExecutionCache.keySet()); // Create copy to avoid concurrent mod. for (Urn urn : scheduledSources) { unscheduleNextIngestionSourceExecution(urn); } } /** - * Computes and schedules the next execution time for a particular Ingestion Source, if it has not already been scheduled. + * Computes and schedules the next execution time for a particular Ingestion Source, if it has not + * already been scheduled. */ - public void scheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn, final DataHubIngestionSourceInfo newInfo) { + public void scheduleNextIngestionSourceExecution( + final Urn ingestionSourceUrn, final DataHubIngestionSourceInfo newInfo) { // 1. Attempt to un-schedule any previous executions unscheduleNextIngestionSourceExecution(ingestionSourceUrn); @@ -137,50 +147,63 @@ public void scheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn, f // 2. Schedule the next run of the ingestion source log.info( - String.format("Scheduling next execution of Ingestion Source with urn %s. Schedule: %s", - ingestionSourceUrn, - schedule.getInterval(GetMode.NULL))); + String.format( + "Scheduling next execution of Ingestion Source with urn %s. Schedule: %s", + ingestionSourceUrn, schedule.getInterval(GetMode.NULL))); // Construct the new cron expression final String modifiedCronInterval = adjustCronInterval(schedule.getInterval()); if (CronSequenceGenerator.isValidExpression(modifiedCronInterval)) { final String timezone = schedule.hasTimezone() ? schedule.getTimezone() : "UTC"; - final CronSequenceGenerator generator = new CronSequenceGenerator(modifiedCronInterval, TimeZone.getTimeZone(timezone)); + final CronSequenceGenerator generator = + new CronSequenceGenerator(modifiedCronInterval, TimeZone.getTimeZone(timezone)); final Date currentDate = new Date(); final Date nextExecDate = generator.next(currentDate); final long scheduleTime = nextExecDate.getTime() - currentDate.getTime(); // Schedule the ingestion source to run some time in the future. - final ExecutionRequestRunnable executionRequestRunnable = new ExecutionRequestRunnable( - _systemAuthentication, - _entityClient, - _ingestionConfiguration, - ingestionSourceUrn, - newInfo, - () -> _nextIngestionSourceExecutionCache.remove(ingestionSourceUrn), - this::scheduleNextIngestionSourceExecution); + final ExecutionRequestRunnable executionRequestRunnable = + new ExecutionRequestRunnable( + _systemAuthentication, + _entityClient, + _ingestionConfiguration, + ingestionSourceUrn, + newInfo, + () -> _nextIngestionSourceExecutionCache.remove(ingestionSourceUrn), + this::scheduleNextIngestionSourceExecution); // Schedule the next ingestion run - final ScheduledFuture<?> scheduledFuture = _sharedExecutorService.schedule(executionRequestRunnable, scheduleTime, TimeUnit.MILLISECONDS); + final ScheduledFuture<?> scheduledFuture = + _sharedExecutorService.schedule( + executionRequestRunnable, scheduleTime, TimeUnit.MILLISECONDS); _nextIngestionSourceExecutionCache.put(ingestionSourceUrn, scheduledFuture); - log.info(String.format("Scheduled next execution of Ingestion Source with urn %s in %sms.", ingestionSourceUrn, scheduleTime)); + log.info( + String.format( + "Scheduled next execution of Ingestion Source with urn %s in %sms.", + ingestionSourceUrn, scheduleTime)); } else { - log.error(String.format("Found malformed Ingestion Source schedule: %s for urn: %s. Skipping scheduling.", schedule.getInterval(), ingestionSourceUrn)); + log.error( + String.format( + "Found malformed Ingestion Source schedule: %s for urn: %s. Skipping scheduling.", + schedule.getInterval(), ingestionSourceUrn)); } } else { - log.info(String.format("Ingestion source with urn %s has no configured schedule. Not scheduling.", ingestionSourceUrn)); + log.info( + String.format( + "Ingestion source with urn %s has no configured schedule. Not scheduling.", + ingestionSourceUrn)); } } /** * A {@link Runnable} used to periodically re-populate the schedules cache. * - * Currently, the refresh logic is not very smart. When the cache is invalidated, we simply re-fetch the - * entire cache using schedules stored in the backend. + * <p>Currently, the refresh logic is not very smart. When the cache is invalidated, we simply + * re-fetch the entire cache using schedules stored in the backend. */ @VisibleForTesting static class BatchRefreshSchedulesRunnable implements Runnable { @@ -193,11 +216,13 @@ static class BatchRefreshSchedulesRunnable implements Runnable { public BatchRefreshSchedulesRunnable( @Nonnull final Authentication systemAuthentication, @Nonnull final EntityClient entityClient, - @Nonnull final BiConsumer<Urn, DataHubIngestionSourceInfo> scheduleNextIngestionSourceExecution, + @Nonnull + final BiConsumer<Urn, DataHubIngestionSourceInfo> scheduleNextIngestionSourceExecution, @Nonnull final Runnable unscheduleAll) { _systemAuthentication = Objects.requireNonNull(systemAuthentication); _entityClient = Objects.requireNonNull(entityClient); - _scheduleNextIngestionSourceExecution = Objects.requireNonNull(scheduleNextIngestionSourceExecution); + _scheduleNextIngestionSourceExecution = + Objects.requireNonNull(scheduleNextIngestionSourceExecution); _unscheduleAll = unscheduleAll; } @@ -214,25 +239,31 @@ public void run() { while (start < total) { try { - log.debug(String.format("Batch fetching ingestion source schedules. start: %s, count: %s ", start, count)); + log.debug( + String.format( + "Batch fetching ingestion source schedules. start: %s, count: %s ", + start, count)); // 1. List all ingestion source urns. - final ListResult ingestionSourceUrns = _entityClient.list( - Constants.INGESTION_SOURCE_ENTITY_NAME, - Collections.emptyMap(), - start, - count, - _systemAuthentication); + final ListResult ingestionSourceUrns = + _entityClient.list( + Constants.INGESTION_SOURCE_ENTITY_NAME, + Collections.emptyMap(), + start, + count, + _systemAuthentication); // 2. Fetch all ingestion sources, specifically the "info" aspect. - final Map<Urn, EntityResponse> ingestionSources = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(ingestionSourceUrns.getEntities()), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - _systemAuthentication); + final Map<Urn, EntityResponse> ingestionSources = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>(ingestionSourceUrns.getEntities()), + ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), + _systemAuthentication); // 3. Reschedule ingestion sources based on the fetched schedules (inside "info") - log.debug("Received batch of Ingestion Source Info aspects. Attempting to re-schedule execution requests."); + log.debug( + "Received batch of Ingestion Source Info aspects. Attempting to re-schedule execution requests."); // Then schedule the next ingestion runs scheduleNextIngestionRuns(new ArrayList<>(ingestionSources.values())); @@ -242,29 +273,33 @@ public void run() { } catch (RemoteInvocationException e) { log.error( - String.format("Failed to retrieve ingestion sources! Skipping updating schedule cache until next refresh. start: %s, count: %s", - start, - count), + String.format( + "Failed to retrieve ingestion sources! Skipping updating schedule cache until next refresh. start: %s, count: %s", + start, count), e); return; } } log.info(String.format("Successfully fetched %s ingestion sources.", total)); } catch (Exception e) { - log.error("Caught exception while loading Ingestion Sources. Will retry on next scheduled attempt.", e); + log.error( + "Caught exception while loading Ingestion Sources. Will retry on next scheduled attempt.", + e); } } /** - * Attempts to reschedule the next ingestion source run based on a batch of {@link EntityResponse} objects - * received from the Metadata Service. + * Attempts to reschedule the next ingestion source run based on a batch of {@link + * EntityResponse} objects received from the Metadata Service. */ - private void scheduleNextIngestionRuns(@Nonnull final List<EntityResponse> ingestionSourceEntities) { + private void scheduleNextIngestionRuns( + @Nonnull final List<EntityResponse> ingestionSourceEntities) { for (final EntityResponse response : ingestionSourceEntities) { final Urn entityUrn = response.getUrn(); final EnvelopedAspectMap aspects = response.getAspects(); final EnvelopedAspect envelopedInfo = aspects.get(Constants.INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); // Invoke the "scheduleNextIngestionSourceExecution" (passed from parent) _scheduleNextIngestionSourceExecution.accept(entityUrn, ingestionSourceInfo); @@ -275,7 +310,8 @@ private void scheduleNextIngestionRuns(@Nonnull final List<EntityResponse> inges /** * A {@link Runnable} used to create Ingestion Execution Requests. * - * The expectation is that there's a downstream action which is listening and executing new Execution Requests. + * <p>The expectation is that there's a downstream action which is listening and executing new + * Execution Requests. */ @VisibleForTesting static class ExecutionRequestRunnable implements Runnable { @@ -294,7 +330,8 @@ static class ExecutionRequestRunnable implements Runnable { private final Urn _ingestionSourceUrn; private final DataHubIngestionSourceInfo _ingestionSourceInfo; - // Used for clearing the "next execution" cache once a corresponding execution request has been created. + // Used for clearing the "next execution" cache once a corresponding execution request has been + // created. private final Runnable _deleteNextIngestionSourceExecution; // Used for re-scheduling the ingestion source once it has executed! @@ -307,27 +344,33 @@ public ExecutionRequestRunnable( @Nonnull final Urn ingestionSourceUrn, @Nonnull final DataHubIngestionSourceInfo ingestionSourceInfo, @Nonnull final Runnable deleteNextIngestionSourceExecution, - @Nonnull final BiConsumer<Urn, DataHubIngestionSourceInfo> scheduleNextIngestionSourceExecution) { + @Nonnull + final BiConsumer<Urn, DataHubIngestionSourceInfo> + scheduleNextIngestionSourceExecution) { _systemAuthentication = Objects.requireNonNull(systemAuthentication); _entityClient = Objects.requireNonNull(entityClient); _ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration); _ingestionSourceUrn = Objects.requireNonNull(ingestionSourceUrn); _ingestionSourceInfo = Objects.requireNonNull(ingestionSourceInfo); - _deleteNextIngestionSourceExecution = Objects.requireNonNull(deleteNextIngestionSourceExecution); - _scheduleNextIngestionSourceExecution = Objects.requireNonNull(scheduleNextIngestionSourceExecution); + _deleteNextIngestionSourceExecution = + Objects.requireNonNull(deleteNextIngestionSourceExecution); + _scheduleNextIngestionSourceExecution = + Objects.requireNonNull(scheduleNextIngestionSourceExecution); } @Override public void run() { - // Remove the next ingestion execution as we are going to execute it now. (no retry logic currently) + // Remove the next ingestion execution as we are going to execute it now. (no retry logic + // currently) _deleteNextIngestionSourceExecution.run(); try { - log.info(String.format( - "Creating Execution Request for scheduled Ingestion Source with urn %s", - _ingestionSourceUrn)); + log.info( + String.format( + "Creating Execution Request for scheduled Ingestion Source with urn %s", + _ingestionSourceUrn)); // Create a new Execution Request Proposal final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -341,18 +384,23 @@ public void run() { // Construct arguments (arguments) of the Execution Request final ExecutionRequestInput input = new ExecutionRequestInput(); input.setTask(RUN_INGEST_TASK_NAME); - input.setSource(new ExecutionRequestSource() - .setType(EXECUTION_REQUEST_SOURCE_NAME) - .setIngestionSource(_ingestionSourceUrn)); + input.setSource( + new ExecutionRequestSource() + .setType(EXECUTION_REQUEST_SOURCE_NAME) + .setIngestionSource(_ingestionSourceUrn)); input.setExecutorId(_ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); input.setRequestedAt(System.currentTimeMillis()); Map<String, String> arguments = new HashMap<>(); - String recipe = IngestionUtils.injectPipelineName(_ingestionSourceInfo.getConfig().getRecipe(), _ingestionSourceUrn.toString()); + String recipe = + IngestionUtils.injectPipelineName( + _ingestionSourceInfo.getConfig().getRecipe(), _ingestionSourceUrn.toString()); arguments.put(RECIPE_ARGUMENT_NAME, recipe); - arguments.put(VERSION_ARGUMENT_NAME, _ingestionSourceInfo.getConfig().hasVersion() - ? _ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion()); + arguments.put( + VERSION_ARGUMENT_NAME, + _ingestionSourceInfo.getConfig().hasVersion() + ? _ingestionSourceInfo.getConfig().getVersion() + : _ingestionConfiguration.getDefaultCliVersion()); String debugMode = "false"; if (_ingestionSourceInfo.getConfig().hasDebugMode()) { debugMode = _ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; @@ -368,9 +416,11 @@ public void run() { _entityClient.ingestProposal(proposal, _systemAuthentication); } catch (Exception e) { // TODO: This type of thing should likely be proactively reported. - log.error(String.format( - "Caught exception while attempting to create Execution Request for Ingestion Source with urn %s. Will retry on next scheduled attempt.", - _ingestionSourceUrn), e); + log.error( + String.format( + "Caught exception while attempting to create Execution Request for Ingestion Source with urn %s. Will retry on next scheduled attempt.", + _ingestionSourceUrn), + e); } // 2. Re-Schedule the next execution request. @@ -380,11 +430,12 @@ public void run() { private String adjustCronInterval(final String origCronInterval) { Objects.requireNonNull(origCronInterval, "origCronInterval must not be null"); - // Typically we support 5-character cron. Spring's lib only supports 6 character cron so we make an adjustment here. + // Typically we support 5-character cron. Spring's lib only supports 6 character cron so we make + // an adjustment here. final String[] originalCronParts = origCronInterval.split(" "); if (originalCronParts.length == 5) { return String.format("0 %s", origCronInterval); } return origCronInterval; } -} \ No newline at end of file +} diff --git a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java index 51b7fe85f4922..4366ff64ae384 100644 --- a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java +++ b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java @@ -1,7 +1,8 @@ package com.datahub.metadata.ingestion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.UrnArray; @@ -10,11 +11,12 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.ingestion.DataHubIngestionSourceConfig; import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.query.ListResult; import java.util.Collections; import java.util.concurrent.Future; @@ -24,8 +26,6 @@ import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class IngestionSchedulerTest { private IngestionScheduler _ingestionScheduler; @@ -36,14 +36,17 @@ public void setupTest() throws Exception { // Init mocks. final Urn ingestionSourceUrn1 = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); final DataHubIngestionSourceInfo info1 = new DataHubIngestionSourceInfo(); - info1.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 0 * * 1").setTimezone("America/Los Angeles")); // Run every monday + info1.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 0 * * 1") + .setTimezone("America/Los Angeles")); // Run every monday info1.setType("mysql"); info1.setName("My Test Source"); - info1.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + info1.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); final EnvelopedAspect envelopedAspect1 = new EnvelopedAspect(); envelopedAspect1.setName(Constants.INGESTION_INFO_ASPECT_NAME); @@ -54,19 +57,23 @@ public void setupTest() throws Exception { final EntityResponse entityResponse1 = Mockito.mock(EntityResponse.class); Mockito.when(entityResponse1.getUrn()).thenReturn(ingestionSourceUrn1); - Mockito.when(entityResponse1.getEntityName()).thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); + Mockito.when(entityResponse1.getEntityName()) + .thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); Mockito.when(entityResponse1.getAspects()).thenReturn(map1); final Urn ingestionSourceUrn2 = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:1"); final DataHubIngestionSourceInfo info2 = new DataHubIngestionSourceInfo(); - info2.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 0 * * 1 BLUE GREEN").setTimezone("America/Los Angeles")); // Run every monday + info2.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 0 * * 1 BLUE GREEN") + .setTimezone("America/Los Angeles")); // Run every monday info2.setType("invalid"); info2.setName("My Invalid Source"); - info2.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + info2.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); final EnvelopedAspect envelopedAspect2 = new EnvelopedAspect(); envelopedAspect2.setName(Constants.INGESTION_INFO_ASPECT_NAME); @@ -77,35 +84,44 @@ public void setupTest() throws Exception { final EntityResponse entityResponse2 = Mockito.mock(EntityResponse.class); Mockito.when(entityResponse2.getUrn()).thenReturn(ingestionSourceUrn2); - Mockito.when(entityResponse2.getEntityName()).thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); + Mockito.when(entityResponse2.getEntityName()) + .thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); Mockito.when(entityResponse2.getAspects()).thenReturn(map2); JavaEntityClient mockClient = Mockito.mock(JavaEntityClient.class); // Set up mocks for ingestion source batch fetching - Mockito.when(mockClient.list( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(Collections.emptyMap()), - Mockito.eq(0), - Mockito.eq(30), - Mockito.any() - )).thenReturn(new ListResult().setCount(30).setTotal(2).setStart(0).setEntities( - new UrnArray(ingestionSourceUrn1, ingestionSourceUrn2))); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(ingestionSourceUrn1, ingestionSourceUrn2)), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any() - )).thenReturn(ImmutableMap.of( - ingestionSourceUrn1, entityResponse1, - ingestionSourceUrn2, entityResponse2)); - - _ingestionScheduler = new IngestionScheduler( - Mockito.mock(Authentication.class), - mockClient, - Mockito.mock(IngestionConfiguration.class), - 1, - 1200); + Mockito.when( + mockClient.list( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(Collections.emptyMap()), + Mockito.eq(0), + Mockito.eq(30), + Mockito.any())) + .thenReturn( + new ListResult() + .setCount(30) + .setTotal(2) + .setStart(0) + .setEntities(new UrnArray(ingestionSourceUrn1, ingestionSourceUrn2))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(ingestionSourceUrn1, ingestionSourceUrn2)), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn( + ImmutableMap.of( + ingestionSourceUrn1, entityResponse1, + ingestionSourceUrn2, entityResponse2)); + + _ingestionScheduler = + new IngestionScheduler( + Mockito.mock(Authentication.class), + mockClient, + Mockito.mock(IngestionConfiguration.class), + 1, + 1200); _ingestionScheduler.init(); Thread.sleep(2000); // Sleep so the runnable can execute. (not ideal) } @@ -115,22 +131,27 @@ public void testInvokeUpdateExistingSchedule() throws Exception { assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); Urn ingestionSourceUrn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); - Future<?> beforeFuture = _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + Future<?> beforeFuture = + _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the new source has been scheduled successfully. _ingestionScheduler.scheduleNextIngestionSourceExecution(ingestionSourceUrn, newInfo); assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); - Future<?> newFuture = _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + Future<?> newFuture = + _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); // Ensure that there is an overwritten future. Assert.assertNotSame(beforeFuture, newFuture); @@ -142,14 +163,17 @@ public void testInvokeNewSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:2"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the new source has been scheduled successfully. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -163,14 +187,17 @@ public void testInvokeInvalidSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:2"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); // Invalid schedule set. - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("NOT A SCHEDULE").setTimezone("America/Los Angeles")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("NOT A SCHEDULE") + .setTimezone("America/Los Angeles")); // Run every monday newInfo.setType("snowflake"); newInfo.setName("My Snowflake Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that no changes have been made to next execution cache. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -186,11 +213,11 @@ public void testInvokeMissingSchedule() throws Exception { // No schedule set. newInfo.setType("mysql"); newInfo.setName("My Test Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the schedule has been removed. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -218,19 +245,24 @@ public void testSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("* * * * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("* * * * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); ScheduledFuture<?> future = _ingestionScheduler._nextIngestionSourceExecutionCache.get(urn); - Assert.assertTrue(future.getDelay(TimeUnit.SECONDS) < 60); // Next execution must always be less than a minute away. + Assert.assertTrue( + future.getDelay(TimeUnit.SECONDS) + < 60); // Next execution must always be less than a minute away. } @Test @@ -239,14 +271,17 @@ public void testUnscheduleAll() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:3"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("* * * * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("* * * * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source 2"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 2); diff --git a/li-utils/src/main/java/com/datahub/util/ModelUtils.java b/li-utils/src/main/java/com/datahub/util/ModelUtils.java index 65379d353de86..538a0d2dfdeb0 100644 --- a/li-utils/src/main/java/com/datahub/util/ModelUtils.java +++ b/li-utils/src/main/java/com/datahub/util/ModelUtils.java @@ -29,7 +29,6 @@ import javax.annotation.Nonnull; import org.reflections.Reflections; - public class ModelUtils { private static final ClassLoader CLASS_LOADER = DummySnapshot.class.getClassLoader(); @@ -69,13 +68,15 @@ public static Class<? extends RecordTemplate> getAspectClass(@Nonnull String asp * @return a set of supported aspects */ @Nonnull - public static <ASPECT_UNION extends UnionTemplate> Set<Class<? extends RecordTemplate>> getValidAspectTypes( - @Nonnull Class<ASPECT_UNION> aspectUnionClass) { + public static <ASPECT_UNION extends UnionTemplate> + Set<Class<? extends RecordTemplate>> getValidAspectTypes( + @Nonnull Class<ASPECT_UNION> aspectUnionClass) { AspectValidator.validateAspectUnionSchema(aspectUnionClass); Set<Class<? extends RecordTemplate>> validTypes = new HashSet<>(); - for (UnionDataSchema.Member member : ValidationUtils.getUnionSchema(aspectUnionClass).getMembers()) { + for (UnionDataSchema.Member member : + ValidationUtils.getUnionSchema(aspectUnionClass).getMembers()) { if (member.getType().getType() == DataSchema.Type.RECORD) { String fqcn = ((RecordDataSchema) member.getType()).getBindingName(); try { @@ -89,11 +90,10 @@ public static <ASPECT_UNION extends UnionTemplate> Set<Class<? extends RecordTem return validTypes; } - /** - * Gets a {@link Class} from its FQCN. - */ + /** Gets a {@link Class} from its FQCN. */ @Nonnull - public static <T> Class<? extends T> getClassFromName(@Nonnull String className, @Nonnull Class<T> parentClass) { + public static <T> Class<? extends T> getClassFromName( + @Nonnull String className, @Nonnull Class<T> parentClass) { try { return CLASS_LOADER.loadClass(className).asSubclass(parentClass); } catch (ClassNotFoundException e) { @@ -108,8 +108,10 @@ public static <T> Class<? extends T> getClassFromName(@Nonnull String className, * @return snapshot class that extends {@link RecordTemplate}, associated with className */ @Nonnull - public static Class<? extends RecordTemplate> getMetadataSnapshotClassFromName(@Nonnull String className) { - Class<? extends RecordTemplate> snapshotClass = getClassFromName(className, RecordTemplate.class); + public static Class<? extends RecordTemplate> getMetadataSnapshotClassFromName( + @Nonnull String className) { + Class<? extends RecordTemplate> snapshotClass = + getClassFromName(className, RecordTemplate.class); SnapshotValidator.validateSnapshotSchema(snapshotClass); return snapshotClass; } @@ -122,13 +124,16 @@ public static Class<? extends RecordTemplate> getMetadataSnapshotClassFromName(@ * @return the extracted {@link Urn} */ @Nonnull - public static <SNAPSHOT extends RecordTemplate> Urn getUrnFromSnapshot(@Nonnull SNAPSHOT snapshot) { + public static <SNAPSHOT extends RecordTemplate> Urn getUrnFromSnapshot( + @Nonnull SNAPSHOT snapshot) { SnapshotValidator.validateSnapshotSchema(snapshot.getClass()); - return RecordUtils.getRecordTemplateField(snapshot, "urn", urnClassForSnapshot(snapshot.getClass())); + return RecordUtils.getRecordTemplateField( + snapshot, "urn", urnClassForSnapshot(snapshot.getClass())); } /** - * Similar to {@link #getUrnFromSnapshot(RecordTemplate)} but extracts from a Snapshot union instead. + * Similar to {@link #getUrnFromSnapshot(RecordTemplate)} but extracts from a Snapshot union + * instead. */ @Nonnull public static Urn getUrnFromSnapshotUnion(@Nonnull UnionTemplate snapshotUnion) { @@ -164,9 +169,11 @@ public static Urn getUrnFromDeltaUnion(@Nonnull UnionTemplate deltaUnion) { * @return the extracted {@link Urn} */ @Nonnull - public static <DOCUMENT extends RecordTemplate> Urn getUrnFromDocument(@Nonnull DOCUMENT document) { + public static <DOCUMENT extends RecordTemplate> Urn getUrnFromDocument( + @Nonnull DOCUMENT document) { DocumentValidator.validateDocumentSchema(document.getClass()); - return RecordUtils.getRecordTemplateField(document, "urn", urnClassForDocument(document.getClass())); + return RecordUtils.getRecordTemplateField( + document, "urn", urnClassForDocument(document.getClass())); } /** @@ -179,37 +186,35 @@ public static <DOCUMENT extends RecordTemplate> Urn getUrnFromDocument(@Nonnull @Nonnull public static <ENTITY extends RecordTemplate> Urn getUrnFromEntity(@Nonnull ENTITY entity) { EntityValidator.validateEntitySchema(entity.getClass()); - return RecordUtils.getRecordTemplateField(entity, "urn", urnClassForDocument(entity.getClass())); + return RecordUtils.getRecordTemplateField( + entity, "urn", urnClassForDocument(entity.getClass())); } /** * Extracts the fields with type urn from a relationship. * * @param relationship the relationship to extract urn from - * @param <RELATIONSHIP> must be a valid relationship model defined in com.linkedin.metadata.relationship + * @param <RELATIONSHIP> must be a valid relationship model defined in + * com.linkedin.metadata.relationship * @param fieldName name of the field with type urn * @return the extracted {@link Urn} */ @Nonnull - private static <RELATIONSHIP extends RecordTemplate> Urn getUrnFromRelationship(@Nonnull RELATIONSHIP relationship, - @Nonnull String fieldName) { + private static <RELATIONSHIP extends RecordTemplate> Urn getUrnFromRelationship( + @Nonnull RELATIONSHIP relationship, @Nonnull String fieldName) { RelationshipValidator.validateRelationshipSchema(relationship.getClass()); - return RecordUtils.getRecordTemplateField(relationship, fieldName, - urnClassForRelationship(relationship.getClass(), fieldName)); + return RecordUtils.getRecordTemplateField( + relationship, fieldName, urnClassForRelationship(relationship.getClass(), fieldName)); } - /** - * Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. - */ + /** Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. */ @Nonnull public static <RELATIONSHIP extends RecordTemplate> Urn getSourceUrnFromRelationship( @Nonnull RELATIONSHIP relationship) { return getUrnFromRelationship(relationship, "source"); } - /** - * Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. - */ + /** Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. */ @Nonnull public static <RELATIONSHIP extends RecordTemplate> Urn getDestinationUrnFromRelationship( @Nonnull RELATIONSHIP relationship) { @@ -240,8 +245,9 @@ public static <SNAPSHOT extends RecordTemplate> List<RecordTemplate> getAspectsF * @return the extracted aspect */ @Nonnull - public static <SNAPSHOT extends RecordTemplate, ASPECT extends DataTemplate> Optional<ASPECT> getAspectFromSnapshot( - @Nonnull SNAPSHOT snapshot, @Nonnull Class<ASPECT> aspectClass) { + public static <SNAPSHOT extends RecordTemplate, ASPECT extends DataTemplate> + Optional<ASPECT> getAspectFromSnapshot( + @Nonnull SNAPSHOT snapshot, @Nonnull Class<ASPECT> aspectClass) { return getAspectsFromSnapshot(snapshot).stream() .filter(aspect -> aspect.getClass().equals(aspectClass)) @@ -250,10 +256,12 @@ public static <SNAPSHOT extends RecordTemplate, ASPECT extends DataTemplate> Opt } /** - * Similar to {@link #getAspectsFromSnapshot(RecordTemplate)} but extracts from a snapshot union instead. + * Similar to {@link #getAspectsFromSnapshot(RecordTemplate)} but extracts from a snapshot union + * instead. */ @Nonnull - public static List<RecordTemplate> getAspectsFromSnapshotUnion(@Nonnull UnionTemplate snapshotUnion) { + public static List<RecordTemplate> getAspectsFromSnapshotUnion( + @Nonnull UnionTemplate snapshotUnion) { return getAspects(RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion)); } @@ -261,10 +269,12 @@ public static List<RecordTemplate> getAspectsFromSnapshotUnion(@Nonnull UnionTem private static List<RecordTemplate> getAspects(@Nonnull RecordTemplate snapshot) { final Class<? extends WrappingArrayTemplate> clazz = getAspectsArrayClass(snapshot.getClass()); - WrappingArrayTemplate aspectArray = RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); + WrappingArrayTemplate aspectArray = + RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); final List<RecordTemplate> aspects = new ArrayList<>(); - aspectArray.forEach(item -> aspects.add(RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item))); + aspectArray.forEach( + item -> aspects.add(RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item))); return aspects; } @@ -280,12 +290,17 @@ private static List<RecordTemplate> getAspects(@Nonnull RecordTemplate snapshot) * @return the created snapshot */ @Nonnull - public static <SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> SNAPSHOT newSnapshot( - @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull URN urn, @Nonnull List<ASPECT_UNION> aspects) { + public static < + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> + SNAPSHOT newSnapshot( + @Nonnull Class<SNAPSHOT> snapshotClass, + @Nonnull URN urn, + @Nonnull List<ASPECT_UNION> aspects) { SnapshotValidator.validateSnapshotSchema(snapshotClass); - final Class<? extends WrappingArrayTemplate> aspectArrayClass = getAspectsArrayClass(snapshotClass); + final Class<? extends WrappingArrayTemplate> aspectArrayClass = + getAspectsArrayClass(snapshotClass); try { final SNAPSHOT snapshot = snapshotClass.newInstance(); @@ -300,11 +315,15 @@ public static <SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTempla } @Nonnull - private static <SNAPSHOT extends RecordTemplate> Class<? extends WrappingArrayTemplate> getAspectsArrayClass( - @Nonnull Class<SNAPSHOT> snapshotClass) { + private static <SNAPSHOT extends RecordTemplate> + Class<? extends WrappingArrayTemplate> getAspectsArrayClass( + @Nonnull Class<SNAPSHOT> snapshotClass) { try { - return snapshotClass.getMethod("getAspects").getReturnType().asSubclass(WrappingArrayTemplate.class); + return snapshotClass + .getMethod("getAspects") + .getReturnType() + .asSubclass(WrappingArrayTemplate.class); } catch (NoSuchMethodException | ClassCastException e) { throw new RuntimeException((e)); } @@ -320,8 +339,9 @@ private static <SNAPSHOT extends RecordTemplate> Class<? extends WrappingArrayTe * @return the created aspect union */ @Nonnull - public static <ASPECT_UNION extends UnionTemplate, ASPECT extends RecordTemplate> ASPECT_UNION newAspectUnion( - @Nonnull Class<ASPECT_UNION> aspectUnionClass, @Nonnull ASPECT aspect) { + public static <ASPECT_UNION extends UnionTemplate, ASPECT extends RecordTemplate> + ASPECT_UNION newAspectUnion( + @Nonnull Class<ASPECT_UNION> aspectUnionClass, @Nonnull ASPECT aspect) { AspectValidator.validateAspectUnionSchema(aspectUnionClass); @@ -334,60 +354,57 @@ public static <ASPECT_UNION extends UnionTemplate, ASPECT extends RecordTemplate } } - /** - * Gets the expected aspect class for a specific kind of snapshot. - */ + /** Gets the expected aspect class for a specific kind of snapshot. */ @Nonnull public static Class<? extends UnionTemplate> aspectClassForSnapshot( @Nonnull Class<? extends RecordTemplate> snapshotClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); - String aspectClassName = ((TyperefDataSchema) ((ArrayDataSchema) ValidationUtils.getRecordSchema(snapshotClass) - .getField("aspects") - .getType()).getItems()).getBindingName(); + String aspectClassName = + ((TyperefDataSchema) + ((ArrayDataSchema) + ValidationUtils.getRecordSchema(snapshotClass) + .getField("aspects") + .getType()) + .getItems()) + .getBindingName(); return getClassFromName(aspectClassName, UnionTemplate.class); } - /** - * Gets the expected {@link Urn} class for a specific kind of entity. - */ + /** Gets the expected {@link Urn} class for a specific kind of entity. */ @Nonnull - public static Class<? extends Urn> urnClassForEntity(@Nonnull Class<? extends RecordTemplate> entityClass) { + public static Class<? extends Urn> urnClassForEntity( + @Nonnull Class<? extends RecordTemplate> entityClass) { EntityValidator.validateEntitySchema(entityClass); return urnClassForField(entityClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of snapshot. - */ + /** Gets the expected {@link Urn} class for a specific kind of snapshot. */ @Nonnull - public static Class<? extends Urn> urnClassForSnapshot(@Nonnull Class<? extends RecordTemplate> snapshotClass) { + public static Class<? extends Urn> urnClassForSnapshot( + @Nonnull Class<? extends RecordTemplate> snapshotClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); return urnClassForField(snapshotClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of delta. - */ + /** Gets the expected {@link Urn} class for a specific kind of delta. */ @Nonnull - public static Class<? extends Urn> urnClassForDelta(@Nonnull Class<? extends RecordTemplate> deltaClass) { + public static Class<? extends Urn> urnClassForDelta( + @Nonnull Class<? extends RecordTemplate> deltaClass) { DeltaValidator.validateDeltaSchema(deltaClass); return urnClassForField(deltaClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of search document. - */ + /** Gets the expected {@link Urn} class for a specific kind of search document. */ @Nonnull - public static Class<? extends Urn> urnClassForDocument(@Nonnull Class<? extends RecordTemplate> documentClass) { + public static Class<? extends Urn> urnClassForDocument( + @Nonnull Class<? extends RecordTemplate> documentClass) { DocumentValidator.validateDocumentSchema(documentClass); return urnClassForField(documentClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of relationship. - */ + /** Gets the expected {@link Urn} class for a specific kind of relationship. */ @Nonnull private static Class<? extends Urn> urnClassForRelationship( @Nonnull Class<? extends RecordTemplate> relationshipClass, @Nonnull String fieldName) { @@ -405,7 +422,8 @@ public static Class<? extends Urn> sourceUrnClassForRelationship( } /** - * Gets the expected {@link Urn} class for the destination field of a specific kind of relationship. + * Gets the expected {@link Urn} class for the destination field of a specific kind of + * relationship. */ @Nonnull public static Class<? extends Urn> destinationUrnClassForRelationship( @@ -414,35 +432,37 @@ public static Class<? extends Urn> destinationUrnClassForRelationship( } @Nonnull - private static Class<? extends Urn> urnClassForField(@Nonnull Class<? extends RecordTemplate> recordClass, - @Nonnull String fieldName) { - String urnClassName = ((DataMap) ValidationUtils.getRecordSchema(recordClass) - .getField(fieldName) - .getType() - .getProperties() - .get("java")).getString("class"); + private static Class<? extends Urn> urnClassForField( + @Nonnull Class<? extends RecordTemplate> recordClass, @Nonnull String fieldName) { + String urnClassName = + ((DataMap) + ValidationUtils.getRecordSchema(recordClass) + .getField(fieldName) + .getType() + .getProperties() + .get("java")) + .getString("class"); return getClassFromName(urnClassName, Urn.class); } - /** - * Validates a specific snapshot-aspect combination. - */ - public static <SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate> void validateSnapshotAspect( - @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull Class<ASPECT_UNION> aspectUnionClass) { + /** Validates a specific snapshot-aspect combination. */ + public static <SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate> + void validateSnapshotAspect( + @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull Class<ASPECT_UNION> aspectUnionClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); AspectValidator.validateAspectUnionSchema(aspectUnionClass); // Make sure that SNAPSHOT's "aspects" array field contains ASPECT_UNION type. if (!aspectClassForSnapshot(snapshotClass).equals(aspectUnionClass)) { - throw new InvalidSchemaException(aspectUnionClass.getCanonicalName() + " is not a supported aspect class of " - + snapshotClass.getCanonicalName()); + throw new InvalidSchemaException( + aspectUnionClass.getCanonicalName() + + " is not a supported aspect class of " + + snapshotClass.getCanonicalName()); } } - /** - * Validates a specific snapshot-URN combination. - */ + /** Validates a specific snapshot-URN combination. */ public static <SNAPSHOT extends RecordTemplate, URN extends Urn> void validateSnapshotUrn( @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull Class<URN> urnClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); @@ -450,7 +470,9 @@ public static <SNAPSHOT extends RecordTemplate, URN extends Urn> void validateSn // Make sure that SNAPSHOT's "urn" field uses the correct class or subclasses if (!urnClassForSnapshot(snapshotClass).isAssignableFrom(urnClass)) { throw new InvalidSchemaException( - urnClass.getCanonicalName() + " is not a supported URN class of " + snapshotClass.getCanonicalName()); + urnClass.getCanonicalName() + + " is not a supported URN class of " + + snapshotClass.getCanonicalName()); } } @@ -459,13 +481,16 @@ public static <SNAPSHOT extends RecordTemplate, URN extends Urn> void validateSn * * @param relationshipUnionClass the type of relationship union to create * @param relationship the relationship to set - * @param <RELATIONSHIP_UNION> must be a valid relationship union defined in com.linkedin.metadata.relationship + * @param <RELATIONSHIP_UNION> must be a valid relationship union defined in + * com.linkedin.metadata.relationship * @param <RELATIONSHIP> must be a supported relationship type in ASPECT_UNION * @return the created relationship union */ @Nonnull - public static <RELATIONSHIP_UNION extends UnionTemplate, RELATIONSHIP extends RecordTemplate> RELATIONSHIP_UNION newRelationshipUnion( - @Nonnull Class<RELATIONSHIP_UNION> relationshipUnionClass, @Nonnull RELATIONSHIP relationship) { + public static <RELATIONSHIP_UNION extends UnionTemplate, RELATIONSHIP extends RecordTemplate> + RELATIONSHIP_UNION newRelationshipUnion( + @Nonnull Class<RELATIONSHIP_UNION> relationshipUnionClass, + @Nonnull RELATIONSHIP relationship) { RelationshipValidator.validateRelationshipUnionSchema(relationshipUnionClass); @@ -478,20 +503,16 @@ public static <RELATIONSHIP_UNION extends UnionTemplate, RELATIONSHIP extends Re } } - /** - * Returns all entity classes. - */ + /** Returns all entity classes. */ @Nonnull public static Set<Class<? extends RecordTemplate>> getAllEntities() { - return new Reflections("com.linkedin.metadata.entity").getSubTypesOf(RecordTemplate.class) - .stream() - .filter(EntityValidator::isValidEntitySchema) - .collect(Collectors.toSet()); + return new Reflections("com.linkedin.metadata.entity") + .getSubTypesOf(RecordTemplate.class).stream() + .filter(EntityValidator::isValidEntitySchema) + .collect(Collectors.toSet()); } - /** - * Get entity type from urn class. - */ + /** Get entity type from urn class. */ @Nonnull public static String getEntityTypeFromUrnClass(@Nonnull Class<? extends Urn> urnClass) { try { @@ -501,13 +522,14 @@ public static String getEntityTypeFromUrnClass(@Nonnull Class<? extends Urn> urn } } - /** - * Get aspect specific kafka topic name from urn and aspect classes. - */ + /** Get aspect specific kafka topic name from urn and aspect classes. */ @Nonnull - public static <URN extends Urn, ASPECT extends RecordTemplate> String getAspectSpecificMAETopicName(@Nonnull URN urn, - @Nonnull ASPECT newValue) { - return String.format("%s_%s_%s", METADATA_AUDIT_EVENT_PREFIX, urn.getEntityType().toUpperCase(), + public static <URN extends Urn, ASPECT extends RecordTemplate> + String getAspectSpecificMAETopicName(@Nonnull URN urn, @Nonnull ASPECT newValue) { + return String.format( + "%s_%s_%s", + METADATA_AUDIT_EVENT_PREFIX, + urn.getEntityType().toUpperCase(), newValue.getClass().getSimpleName().toUpperCase()); } @@ -521,8 +543,9 @@ public static <URN extends Urn, ASPECT extends RecordTemplate> String getAspectS * @return the created entity union */ @Nonnull - public static <ENTITY_UNION extends UnionTemplate, ENTITY extends RecordTemplate> ENTITY_UNION newEntityUnion( - @Nonnull Class<ENTITY_UNION> entityUnionClass, @Nonnull ENTITY entity) { + public static <ENTITY_UNION extends UnionTemplate, ENTITY extends RecordTemplate> + ENTITY_UNION newEntityUnion( + @Nonnull Class<ENTITY_UNION> entityUnionClass, @Nonnull ENTITY entity) { EntityValidator.validateEntityUnionSchema(entityUnionClass); diff --git a/li-utils/src/main/java/com/datahub/util/RecordUtils.java b/li-utils/src/main/java/com/datahub/util/RecordUtils.java index a9f8a07742491..d57875f79de61 100644 --- a/li-utils/src/main/java/com/datahub/util/RecordUtils.java +++ b/li-utils/src/main/java/com/datahub/util/RecordUtils.java @@ -33,10 +33,10 @@ import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; - public class RecordUtils { - private static final JacksonDataTemplateCodec DATA_TEMPLATE_CODEC = new JacksonDataTemplateCodec(); + private static final JacksonDataTemplateCodec DATA_TEMPLATE_CODEC = + new JacksonDataTemplateCodec(); private static final String ARRAY_WILDCARD = "*"; private static final Pattern LEADING_SPACESLASH_PATTERN = Pattern.compile("^[/ ]+"); private static final Pattern TRAILING_SPACESLASH_PATTERN = Pattern.compile("[/ ]+$"); @@ -44,10 +44,11 @@ public class RecordUtils { /** * Using in-memory hash map to store the get/is methods of the schema fields of RecordTemplate. - * Here map has RecordTemplate class as key, value being another map of field name with the associated get/is method + * Here map has RecordTemplate class as key, value being another map of field name with the + * associated get/is method */ - private static final ConcurrentHashMap<Class<? extends RecordTemplate>, Map<String, Method>> METHOD_CACHE = - new ConcurrentHashMap<>(); + private static final ConcurrentHashMap<Class<? extends RecordTemplate>, Map<String, Method>> + METHOD_CACHE = new ConcurrentHashMap<>(); private RecordUtils() { // Util class @@ -72,7 +73,8 @@ public static String toJsonString(@Nonnull RecordTemplate recordTemplate) { try { return DATA_TEMPLATE_CODEC.mapToString(recordTemplate.data()); } catch (IOException e) { - throw new ModelConversionException("Failed to serialize RecordTemplate: " + recordTemplate.toString()); + throw new ModelConversionException( + "Failed to serialize RecordTemplate: " + recordTemplate.toString()); } } @@ -85,7 +87,8 @@ public static String toJsonString(@Nonnull RecordTemplate recordTemplate) { * @return the created {@link RecordTemplate} */ @Nonnull - public static <T extends RecordTemplate> T toRecordTemplate(@Nonnull Class<T> type, @Nonnull String jsonString) { + public static <T extends RecordTemplate> T toRecordTemplate( + @Nonnull Class<T> type, @Nonnull String jsonString) { DataMap dataMap; try { dataMap = DATA_TEMPLATE_CODEC.stringToMap(jsonString); @@ -105,18 +108,21 @@ public static <T extends RecordTemplate> T toRecordTemplate(@Nonnull Class<T> ty * @return the created {@link RecordTemplate} */ @Nonnull - public static <T extends RecordTemplate> T toRecordTemplate(@Nonnull Class<T> type, @Nonnull DataMap dataMap) { + public static <T extends RecordTemplate> T toRecordTemplate( + @Nonnull Class<T> type, @Nonnull DataMap dataMap) { Constructor<T> constructor; try { constructor = type.getConstructor(DataMap.class); } catch (NoSuchMethodException e) { - throw new ModelConversionException("Unable to find constructor for " + type.getCanonicalName(), e); + throw new ModelConversionException( + "Unable to find constructor for " + type.getCanonicalName(), e); } try { return constructor.newInstance(dataMap); } catch (Exception e) { - throw new ModelConversionException("Failed to invoke constructor for " + type.getCanonicalName(), e); + throw new ModelConversionException( + "Failed to invoke constructor for " + type.getCanonicalName(), e); } } @@ -128,7 +134,8 @@ public static <T extends RecordTemplate> T toRecordTemplate(@Nonnull Class<T> ty * @return the created {@link RecordTemplate} */ @Nonnull - public static RecordTemplate toRecordTemplate(@Nonnull String className, @Nonnull DataMap dataMap) { + public static RecordTemplate toRecordTemplate( + @Nonnull String className, @Nonnull DataMap dataMap) { Class<? extends RecordTemplate> clazz; try { clazz = Class.forName(className).asSubclass(RecordTemplate.class); @@ -145,34 +152,41 @@ public static RecordTemplate toRecordTemplate(@Nonnull String className, @Nonnul * @param entity the entity value. * @param aspectClass the aspect class. * @return the aspect which is included in the entity. - * */ + */ @Nonnull - public static <ASPECT extends RecordTemplate, ENTITY extends RecordTemplate> ASPECT extractAspectFromSingleAspectEntity( - @Nonnull ENTITY entity, @Nonnull Class<ASPECT> aspectClass) { + public static <ASPECT extends RecordTemplate, ENTITY extends RecordTemplate> + ASPECT extractAspectFromSingleAspectEntity( + @Nonnull ENTITY entity, @Nonnull Class<ASPECT> aspectClass) { // Create an empty aspect to extract it's field names final Constructor<ASPECT> constructor; try { @SuppressWarnings("rawtypes") - final Class[] constructorParamArray = new Class[]{}; + final Class[] constructorParamArray = new Class[] {}; constructor = aspectClass.getConstructor(constructorParamArray); } catch (NoSuchMethodException e) { - throw new RuntimeException("Exception occurred while trying to get the default constructor for the aspect. ", e); + throw new RuntimeException( + "Exception occurred while trying to get the default constructor for the aspect. ", e); } final ASPECT aspect; try { aspect = constructor.newInstance(); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException("Exception occurred while creating an instance of the aspect. ", e); + throw new RuntimeException( + "Exception occurred while creating an instance of the aspect. ", e); } final Set<String> aspectFields = - aspect.schema().getFields().stream().map(RecordDataSchema.Field::getName).collect(Collectors.toSet()); + aspect.schema().getFields().stream() + .map(RecordDataSchema.Field::getName) + .collect(Collectors.toSet()); // Get entity's field names and only keep fields which occur in the entity and not in the aspect final Set<String> entityFields = - entity.schema().getFields().stream().map(RecordDataSchema.Field::getName).collect(Collectors.toSet()); + entity.schema().getFields().stream() + .map(RecordDataSchema.Field::getName) + .collect(Collectors.toSet()); entityFields.removeAll(aspectFields); // remove non aspect fields from entity's cloned datamap and use it to create an aspect @@ -194,13 +208,15 @@ public static <ASPECT extends RecordTemplate, ENTITY extends RecordTemplate> ASP * @return the field */ @Nonnull - public static <T extends RecordTemplate> RecordDataSchema.Field getRecordDataSchemaField(@Nonnull T recordTemplate, - @Nonnull String fieldName) { + public static <T extends RecordTemplate> RecordDataSchema.Field getRecordDataSchemaField( + @Nonnull T recordTemplate, @Nonnull String fieldName) { RecordDataSchema.Field field = recordTemplate.schema().getField(fieldName); if (field == null) { throw new InvalidSchemaException( - String.format("Missing expected field '%s' in %s", fieldName, recordTemplate.getClass().getCanonicalName())); + String.format( + "Missing expected field '%s' in %s", + fieldName, recordTemplate.getClass().getCanonicalName())); } return field; } @@ -212,14 +228,20 @@ public static <T extends RecordTemplate> RecordDataSchema.Field getRecordDataSch * @param fieldName the name of the field to update * @param value the value to set */ - public static <T extends RecordTemplate, V> void setRecordTemplatePrimitiveField(@Nonnull T recordTemplate, - @Nonnull String fieldName, @Nonnull V value) { + public static <T extends RecordTemplate, V> void setRecordTemplatePrimitiveField( + @Nonnull T recordTemplate, @Nonnull String fieldName, @Nonnull V value) { final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method putDirect = - getProtectedMethod(RecordTemplate.class, "putDirect", RecordDataSchema.Field.class, Class.class, Object.class, + getProtectedMethod( + RecordTemplate.class, + "putDirect", + RecordDataSchema.Field.class, + Class.class, + Object.class, SetMode.class); - invokeProtectedMethod(recordTemplate, putDirect, field, value.getClass(), value, SetMode.DISALLOW_NULL); + invokeProtectedMethod( + recordTemplate, putDirect, field, value.getClass(), value, SetMode.DISALLOW_NULL); } /** @@ -234,9 +256,15 @@ public static <T extends RecordTemplate, V> void setRecordTemplateComplexField( final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method putWrapped = - getProtectedMethod(RecordTemplate.class, "putWrapped", RecordDataSchema.Field.class, Class.class, - DataTemplate.class, SetMode.class); - invokeProtectedMethod(recordTemplate, putWrapped, field, value.getClass(), value, SetMode.DISALLOW_NULL); + getProtectedMethod( + RecordTemplate.class, + "putWrapped", + RecordDataSchema.Field.class, + Class.class, + DataTemplate.class, + SetMode.class); + invokeProtectedMethod( + recordTemplate, putWrapped, field, value.getClass(), value, SetMode.DISALLOW_NULL); } /** @@ -248,14 +276,19 @@ public static <T extends RecordTemplate, V> void setRecordTemplateComplexField( * @return the value for the field */ @Nonnull - public static <T extends RecordTemplate, V> V getRecordTemplateField(@Nonnull T recordTemplate, - @Nonnull String fieldName, @Nonnull Class<V> valueClass) { + public static <T extends RecordTemplate, V> V getRecordTemplateField( + @Nonnull T recordTemplate, @Nonnull String fieldName, @Nonnull Class<V> valueClass) { final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method obtainCustomType = - getProtectedMethod(RecordTemplate.class, "obtainCustomType", RecordDataSchema.Field.class, Class.class, + getProtectedMethod( + RecordTemplate.class, + "obtainCustomType", + RecordDataSchema.Field.class, + Class.class, GetMode.class); - return (V) invokeProtectedMethod(recordTemplate, obtainCustomType, field, valueClass, GetMode.STRICT); + return (V) + invokeProtectedMethod(recordTemplate, obtainCustomType, field, valueClass, GetMode.STRICT); } /** @@ -272,9 +305,14 @@ public static <T extends RecordTemplate, V extends DataTemplate> V getRecordTemp final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method obtainWrapped = - getProtectedMethod(RecordTemplate.class, "obtainWrapped", RecordDataSchema.Field.class, Class.class, + getProtectedMethod( + RecordTemplate.class, + "obtainWrapped", + RecordDataSchema.Field.class, + Class.class, GetMode.class); - return (V) invokeProtectedMethod(recordTemplate, obtainWrapped, field, valueClass, GetMode.STRICT); + return (V) + invokeProtectedMethod(recordTemplate, obtainWrapped, field, valueClass, GetMode.STRICT); } /** @@ -290,22 +328,33 @@ public static <V extends RecordTemplate> RecordTemplate getSelectedRecordTemplat final DataSchema dataSchema = unionTemplate.memberType(); if (!(dataSchema instanceof RecordDataSchema)) { throw new InvalidSchemaException( - "The currently selected member isn't a RecordTemplate in " + unionTemplate.getClass().getCanonicalName()); + "The currently selected member isn't a RecordTemplate in " + + unionTemplate.getClass().getCanonicalName()); } final Class<? extends RecordTemplate> clazz = - ModelUtils.getClassFromName(((RecordDataSchema) dataSchema).getBindingName(), RecordTemplate.class); + ModelUtils.getClassFromName( + ((RecordDataSchema) dataSchema).getBindingName(), RecordTemplate.class); final Method obtainWrapped = - getProtectedMethod(UnionTemplate.class, "obtainWrapped", DataSchema.class, Class.class, String.class); - final List<UnionDataSchema.Member> members = ((UnionDataSchema) unionTemplate.schema()).getMembers(); + getProtectedMethod( + UnionTemplate.class, "obtainWrapped", DataSchema.class, Class.class, String.class); + final List<UnionDataSchema.Member> members = + ((UnionDataSchema) unionTemplate.schema()).getMembers(); for (UnionDataSchema.Member m : members) { - if (m.hasAlias() && m.getType().getDereferencedDataSchema().getUnionMemberKey().equals(clazz.getName())) { - return (V) invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, m.getAlias()); + if (m.hasAlias() + && m.getType().getDereferencedDataSchema().getUnionMemberKey().equals(clazz.getName())) { + return (V) + invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, m.getAlias()); } } - return (V) invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, - ((RecordDataSchema) dataSchema).getFullName()); + return (V) + invokeProtectedMethod( + unionTemplate, + obtainWrapped, + dataSchema, + clazz, + ((RecordDataSchema) dataSchema).getFullName()); } /** @@ -320,25 +369,44 @@ public static <V extends RecordTemplate> RecordTemplate setSelectedRecordTemplat @Nonnull UnionTemplate unionTemplate, @Nonnull RecordTemplate selectedMember) { final Method selectWrapped = - getProtectedMethod(UnionTemplate.class, "selectWrapped", DataSchema.class, Class.class, String.class, + getProtectedMethod( + UnionTemplate.class, + "selectWrapped", + DataSchema.class, + Class.class, + String.class, DataTemplate.class); - final List<UnionDataSchema.Member> members = ((UnionDataSchema) unionTemplate.schema()).getMembers(); + final List<UnionDataSchema.Member> members = + ((UnionDataSchema) unionTemplate.schema()).getMembers(); for (UnionDataSchema.Member m : members) { - if (m.hasAlias() && m.getType() - .getDereferencedDataSchema() - .getUnionMemberKey() - .equals(selectedMember.getClass().getName())) { - return (V) invokeProtectedMethod(unionTemplate, selectWrapped, selectedMember.schema(), - selectedMember.getClass(), m.getAlias(), selectedMember); + if (m.hasAlias() + && m.getType() + .getDereferencedDataSchema() + .getUnionMemberKey() + .equals(selectedMember.getClass().getName())) { + return (V) + invokeProtectedMethod( + unionTemplate, + selectWrapped, + selectedMember.schema(), + selectedMember.getClass(), + m.getAlias(), + selectedMember); } } - return (V) invokeProtectedMethod(unionTemplate, selectWrapped, selectedMember.schema(), selectedMember.getClass(), - selectedMember.schema().getUnionMemberKey(), selectedMember); + return (V) + invokeProtectedMethod( + unionTemplate, + selectWrapped, + selectedMember.schema(), + selectedMember.getClass(), + selectedMember.schema().getUnionMemberKey(), + selectedMember); } @Nonnull - private static Method getProtectedMethod(@Nonnull Class clazz, @Nonnull String methodName, - @Nonnull Class<?>... parameterTypes) { + private static Method getProtectedMethod( + @Nonnull Class clazz, @Nonnull String methodName, @Nonnull Class<?>... parameterTypes) { try { return clazz.getDeclaredMethod(methodName, parameterTypes); } catch (NoSuchMethodException e) { @@ -359,26 +427,32 @@ private static <T> T invokeProtectedMethod(Object object, Method method, Object. } @Nonnull - private static Map<String, Method> getMethodsFromRecordTemplate(@Nonnull RecordTemplate recordTemplate) { + private static Map<String, Method> getMethodsFromRecordTemplate( + @Nonnull RecordTemplate recordTemplate) { final HashMap<String, Method> methodMap = new HashMap<>(); for (RecordDataSchema.Field field : recordTemplate.schema().getFields()) { final String capitalizedName = capitalizeFirst(field.getName()); final String getMethodName = - (field.getType().getType().equals(RecordDataSchema.Type.BOOLEAN) ? "is" : "get") + capitalizedName; + (field.getType().getType().equals(RecordDataSchema.Type.BOOLEAN) ? "is" : "get") + + capitalizedName; try { methodMap.put(field.getName(), recordTemplate.getClass().getMethod(getMethodName)); } catch (NoSuchMethodException e) { - throw new RuntimeException(String.format("Failed to get method [%s], for class [%s], field [%s]", getMethodName, - recordTemplate.getClass().getCanonicalName(), field.getName()), e); + throw new RuntimeException( + String.format( + "Failed to get method [%s], for class [%s], field [%s]", + getMethodName, recordTemplate.getClass().getCanonicalName(), field.getName()), + e); } } return Collections.unmodifiableMap(methodMap); } /** - * Given a {@link RecordTemplate} and field name, this will find and execute getFieldName/isFieldName and return the result - * If neither getFieldName/isFieldName has been called for any of the fields of the RecordTemplate, then the get/is method - * for all schema fields of the record will be found and subsequently cached. + * Given a {@link RecordTemplate} and field name, this will find and execute + * getFieldName/isFieldName and return the result If neither getFieldName/isFieldName has been + * called for any of the fields of the RecordTemplate, then the get/is method for all schema + * fields of the record will be found and subsequently cached. * * @param record {@link RecordTemplate} whose field has to be referenced * @param fieldName field name of the record that has to be referenced @@ -391,8 +465,10 @@ private static Object invokeMethod(@Nonnull RecordTemplate record, @Nonnull Stri return METHOD_CACHE.get(record.getClass()).get(fieldName).invoke(record); } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException( - String.format("Failed to execute method for class [%s], field [%s]", record.getClass().getCanonicalName(), - fieldName), e); + String.format( + "Failed to execute method for class [%s], field [%s]", + record.getClass().getCanonicalName(), fieldName), + e); } } @@ -402,21 +478,24 @@ private static Object getUnionMember(@Nonnull UnionTemplate union, @Nonnull Stri return ((DataMap) union.data()).get(memberName); } throw new RuntimeException( - String.format("Failed to extract member from union [%s], member [%s]", union.getClass().getCanonicalName(), - memberName)); + String.format( + "Failed to extract member from union [%s], member [%s]", + union.getClass().getCanonicalName(), memberName)); } /** - * Helper method for referencing array of RecordTemplate objects. Referencing a particular index or range of indices of an array is not supported. + * Helper method for referencing array of RecordTemplate objects. Referencing a particular index + * or range of indices of an array is not supported. * - * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} which needs to be referenced + * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} + * which needs to be referenced * @param ps {@link PathSpec} for the entire path inside the array that needs to be referenced * @return {@link List} of objects from the array, referenced using the PathSpec */ @Nonnull @SuppressWarnings("rawtypes") - private static List<Object> getReferenceForAbstractArray(@Nonnull AbstractArrayTemplate<Object> reference, - @Nonnull PathSpec ps) { + private static List<Object> getReferenceForAbstractArray( + @Nonnull AbstractArrayTemplate<Object> reference, @Nonnull PathSpec ps) { if (!reference.isEmpty()) { return Arrays.stream((reference).toArray()) .map(x -> getFieldValue(x, ps)) @@ -427,17 +506,19 @@ private static List<Object> getReferenceForAbstractArray(@Nonnull AbstractArrayT } /** - * Nullable version of the method above. Allows us to get null values in a list in the correct oder. - * Helper method for referencing array of RecordTemplate objects. Referencing a particular index or range of indices of an array is not supported. + * Nullable version of the method above. Allows us to get null values in a list in the correct + * oder. Helper method for referencing array of RecordTemplate objects. Referencing a particular + * index or range of indices of an array is not supported. * - * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} which needs to be referenced + * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} + * which needs to be referenced * @param ps {@link PathSpec} for the entire path inside the array that needs to be referenced * @return {@link List} of objects from the array, referenced using the PathSpec */ @Nullable @SuppressWarnings("rawtypes") - private static List<Object> getNullableReferenceForAbstractArray(@Nonnull AbstractArrayTemplate<Object> reference, - @Nonnull PathSpec ps) { + private static List<Object> getNullableReferenceForAbstractArray( + @Nonnull AbstractArrayTemplate<Object> reference, @Nonnull PathSpec ps) { if (!reference.isEmpty()) { return Arrays.stream((reference).toArray()) .map(x -> getNullableFieldValue(x, ps)) @@ -447,11 +528,12 @@ private static List<Object> getNullableReferenceForAbstractArray(@Nonnull Abstra } /** - * Similar to {@link #getFieldValue(Object, PathSpec)} but takes string representation of Pegasus PathSpec as - * input. + * Similar to {@link #getFieldValue(Object, PathSpec)} but takes string representation of Pegasus + * PathSpec as input. */ @Nonnull - public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull String pathSpecAsString) { + public static Optional<Object> getFieldValue( + @Nonnull Object record, @Nonnull String pathSpecAsString) { pathSpecAsString = LEADING_SPACESLASH_PATTERN.matcher(pathSpecAsString).replaceAll(""); pathSpecAsString = TRAILING_SPACESLASH_PATTERN.matcher(pathSpecAsString).replaceAll(""); @@ -462,13 +544,16 @@ public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull St } /** - * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of the path from the record. - * This handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of primitive types or array of records. - * Fetching of values in a RecordTemplate where the field has a default value will return the field default value. - * Referencing field corresponding to a particular index or range of indices of an array is not supported. - * Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported. + * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of + * the path from the record. This handles only RecordTemplate, fields of which can be primitive + * types, typeRefs, arrays of primitive types or array of records. Fetching of values in a + * RecordTemplate where the field has a default value will return the field default value. + * Referencing field corresponding to a particular index or range of indices of an array is not + * supported. Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) + * FixedTemplate are currently not supported. * - * @param record {@link Object} Object to traverse the path. If record is of primitive type, and path is not empty, it will fail to traverse. + * @param record {@link Object} Object to traverse the path. If record is of primitive type, and + * path is not empty, it will fail to traverse. * @param ps {@link PathSpec} representing the path whose value needs to be returned * @return Referenced object of the RecordTemplate corresponding to the PathSpec */ @@ -484,7 +569,8 @@ public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull Pa } if (StringUtils.isNumeric(part)) { throw new UnsupportedOperationException( - String.format("Array indexing is not supported for %s (%s from %s)", part, ps, reference)); + String.format( + "Array indexing is not supported for %s (%s from %s)", part, ps, reference)); } if (reference instanceof RecordTemplate) { reference = invokeMethod((RecordTemplate) reference, part); @@ -497,8 +583,10 @@ public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull Pa return Optional.empty(); } } else if (reference instanceof AbstractArrayTemplate) { - return Optional.of(getReferenceForAbstractArray((AbstractArrayTemplate<Object>) reference, - new PathSpec(ps.getPathComponents().subList(i, pathSize)))); + return Optional.of( + getReferenceForAbstractArray( + (AbstractArrayTemplate<Object>) reference, + new PathSpec(ps.getPathComponents().subList(i, pathSize)))); } else { throw new UnsupportedOperationException( String.format("Failed at extracting %s (%s from %s)", part, ps, record)); @@ -508,16 +596,20 @@ public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull Pa } /** - * A nullable version of the getFieldValue method above. This is used when grabbing values from aspects based on field specs - * on Relationship annotations. This allows us to get null values for fields that don't have a value for a given path spec. - * Then we can map values correctly based on list indices creating graph edges. - * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of the path from the record. - * This handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of primitive types or array of records. - * Fetching of values in a RecordTemplate where the field has a default value will return the field default value. - * Referencing field corresponding to a particular index or range of indices of an array is not supported. - * Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported, return null. + * A nullable version of the getFieldValue method above. This is used when grabbing values from + * aspects based on field specs on Relationship annotations. This allows us to get null values for + * fields that don't have a value for a given path spec. Then we can map values correctly based on + * list indices creating graph edges. Given a {@link Object} and {@link + * com.linkedin.data.schema.PathSpec} this will return value of the path from the record. This + * handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of + * primitive types or array of records. Fetching of values in a RecordTemplate where the field has + * a default value will return the field default value. Referencing field corresponding to a + * particular index or range of indices of an array is not supported. Fields corresponding to 1) + * multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported, + * return null. * - * @param record {@link Object} Object to traverse the path. If record is of primitive type, and path is not empty, it will fail to traverse. + * @param record {@link Object} Object to traverse the path. If record is of primitive type, and + * path is not empty, it will fail to traverse. * @param ps {@link PathSpec} representing the path whose value needs to be returned * @return Referenced object of the RecordTemplate corresponding to the PathSpec */ @@ -533,14 +625,16 @@ public static Object getNullableFieldValue(@Nonnull Object record, @Nonnull Path } if (StringUtils.isNumeric(part)) { throw new UnsupportedOperationException( - String.format("Array indexing is not supported for %s (%s from %s)", part, ps, reference)); + String.format( + "Array indexing is not supported for %s (%s from %s)", part, ps, reference)); } if (reference instanceof RecordTemplate) { reference = invokeMethod((RecordTemplate) reference, part); } else if (reference instanceof UnionTemplate) { reference = getUnionMember((UnionTemplate) reference, part); } else if (reference instanceof AbstractArrayTemplate) { - return getNullableReferenceForAbstractArray((AbstractArrayTemplate<Object>) reference, + return getNullableReferenceForAbstractArray( + (AbstractArrayTemplate<Object>) reference, new PathSpec(ps.getPathComponents().subList(i, pathSize))); } else { return null; @@ -548,5 +642,4 @@ public static Object getNullableFieldValue(@Nonnull Object record, @Nonnull Path } return reference; } - } diff --git a/li-utils/src/main/java/com/datahub/util/Statement.java b/li-utils/src/main/java/com/datahub/util/Statement.java index c30a5e9b70c76..f2c56a409312c 100644 --- a/li-utils/src/main/java/com/datahub/util/Statement.java +++ b/li-utils/src/main/java/com/datahub/util/Statement.java @@ -5,7 +5,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class Statement { diff --git a/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java b/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java index 3df693e59adf7..56b97a3fb8233 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * An exception to be thrown when elastic search query fails. - */ +/** An exception to be thrown when elastic search query fails. */ public class ESQueryException extends RuntimeException { public ESQueryException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java b/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java index 019e6896eb006..b7e182df527bf 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * Thrown when a schema didn't match the expectation. - */ +/** Thrown when a schema didn't match the expectation. */ public class InvalidSchemaException extends RuntimeException { public InvalidSchemaException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java b/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java index bab319812bed9..2a1784f6d7197 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * An exception to be thrown when Model Conversion fails. - */ +/** An exception to be thrown when Model Conversion fails. */ public class ModelConversionException extends RuntimeException { public ModelConversionException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java b/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java index e0533cb2d2502..fc082abf22771 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java @@ -8,14 +8,12 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate aspects are part of the union schemas. - */ +/** Utility class to validate aspects are part of the union schemas. */ public final class AspectValidator { // A cache of validated classes - private static final Set<Class<? extends UnionTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends UnionTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); private AspectValidator() { // Util class @@ -26,15 +24,18 @@ private AspectValidator() { * * @param schema schema for the model */ - public static void validateAspectUnionSchema(@Nonnull UnionDataSchema schema, @Nonnull String aspectClassName) { + public static void validateAspectUnionSchema( + @Nonnull UnionDataSchema schema, @Nonnull String aspectClassName) { if (!ValidationUtils.isUnionWithOnlyComplexMembers(schema)) { - ValidationUtils.invalidSchema("Aspect '%s' must be a union containing only record type members", aspectClassName); + ValidationUtils.invalidSchema( + "Aspect '%s' must be a union containing only record type members", aspectClassName); } } /** - * Similar to {@link #validateAspectUnionSchema(UnionDataSchema, String)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateAspectUnionSchema(UnionDataSchema, String)} but take a {@link Class} + * instead and caches results. */ public static void validateAspectUnionSchema(@Nonnull Class<? extends UnionTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -46,8 +47,9 @@ public static void validateAspectUnionSchema(@Nonnull Class<? extends UnionTempl } private static boolean isValidMetadataField(RecordDataSchema.Field field) { - return field.getName().equals("metadata") && !field.getOptional() - && field.getType().getType() == DataSchema.Type.UNION && ValidationUtils.isUnionWithOnlyComplexMembers( - (UnionDataSchema) field.getType()); + return field.getName().equals("metadata") + && !field.getOptional() + && field.getType().getType() == DataSchema.Type.UNION + && ValidationUtils.isUnionWithOnlyComplexMembers((UnionDataSchema) field.getType()); } } diff --git a/li-utils/src/main/java/com/datahub/util/validator/DeltaValidator.java b/li-utils/src/main/java/com/datahub/util/validator/DeltaValidator.java index 3fbf348c5cb1c..034e3008b26ab 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/DeltaValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/DeltaValidator.java @@ -7,14 +7,12 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate delta event schemas. - */ +/** Utility class to validate delta event schemas. */ public final class DeltaValidator { // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); private DeltaValidator() { // Util class @@ -30,17 +28,19 @@ public static void validateDeltaSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Delta '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Delta '%s' must contain an non-optional 'urn' field of URN type", className); } if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, DeltaValidator::isValidDeltaField)) { - ValidationUtils.invalidSchema("Delta '%s' must contain an non-optional 'delta' field of UNION type", - className); + ValidationUtils.invalidSchema( + "Delta '%s' must contain an non-optional 'delta' field of UNION type", className); } } /** - * Similar to {@link #validateDeltaSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateDeltaSchema(RecordDataSchema)} but take a {@link Class} instead and + * caches results. */ public static void validateDeltaSchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -52,7 +52,8 @@ public static void validateDeltaSchema(@Nonnull Class<? extends RecordTemplate> } private static boolean isValidDeltaField(@Nonnull RecordDataSchema.Field field) { - return field.getName().equals("delta") && !field.getOptional() + return field.getName().equals("delta") + && !field.getOptional() && field.getType().getType() == DataSchema.Type.UNION; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java b/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java index 31898c01f4233..c8741d2ccea83 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java @@ -8,21 +8,21 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate search document schemas. - */ +/** Utility class to validate search document schemas. */ public final class DocumentValidator { // Allowed non-optional fields. All other fields must be optional. - private static final Set<String> NON_OPTIONAL_FIELDS = Collections.unmodifiableSet(new HashSet<String>() { - { - add("urn"); - } - }); + private static final Set<String> NON_OPTIONAL_FIELDS = + Collections.unmodifiableSet( + new HashSet<String>() { + { + add("urn"); + } + }); // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); private DocumentValidator() { // Util class @@ -38,21 +38,29 @@ public static void validateDocumentSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Document '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Document '%s' must contain an non-optional 'urn' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Document '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Document '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); - ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS).forEach(field -> { - ValidationUtils.invalidSchema("Document '%s' must contain an optional '%s' field", className, field.getName()); - }); + ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Document '%s' must contain an optional '%s' field", className, field.getName()); + }); } /** - * Similar to {@link #validateDocumentSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateDocumentSchema(RecordDataSchema)} but take a {@link Class} instead + * and caches results. */ public static void validateDocumentSchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -62,4 +70,4 @@ public static void validateDocumentSchema(@Nonnull Class<? extends RecordTemplat validateDocumentSchema(ValidationUtils.getRecordSchema(clazz)); VALIDATED.add(clazz); } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/EntityValidator.java b/li-utils/src/main/java/com/datahub/util/validator/EntityValidator.java index ccb1c2751a802..726283a40f830 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/EntityValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/EntityValidator.java @@ -11,25 +11,25 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate entity schemas. - */ +/** Utility class to validate entity schemas. */ public final class EntityValidator { // Allowed non-optional fields. All other fields must be optional. - private static final Set<String> NON_OPTIONAL_FIELDS = Collections.unmodifiableSet(new HashSet<String>() { - { - add("urn"); - } - }); + private static final Set<String> NON_OPTIONAL_FIELDS = + Collections.unmodifiableSet( + new HashSet<String>() { + { + add("urn"); + } + }); // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); // A cache of validated classes - private static final Set<Class<? extends UnionTemplate>> UNION_VALIDATED = ConcurrentHashMap.newKeySet(); - + private static final Set<Class<? extends UnionTemplate>> UNION_VALIDATED = + ConcurrentHashMap.newKeySet(); private EntityValidator() { // Util class @@ -45,21 +45,29 @@ public static void validateEntitySchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Entity '%s' must contain a non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Entity '%s' must contain a non-optional 'urn' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Entity '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); - - ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS).forEach(field -> { - ValidationUtils.invalidSchema("Entity '%s' must contain an optional '%s' field", className, field.getName()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Entity '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); + + ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Entity '%s' must contain an optional '%s' field", className, field.getName()); + }); } /** - * Similar to {@link #validateEntitySchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateEntitySchema(RecordDataSchema)} but take a {@link Class} instead and + * caches results. */ public static void validateEntitySchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -71,8 +79,8 @@ public static void validateEntitySchema(@Nonnull Class<? extends RecordTemplate> } /** - * Similar to {@link #validateEntityUnionSchema(UnionDataSchema, String)} but take a {@link Class} instead and caches - * results. + * Similar to {@link #validateEntityUnionSchema(UnionDataSchema, String)} but take a {@link Class} + * instead and caches results. */ public static void validateEntityUnionSchema(@Nonnull Class<? extends UnionTemplate> clazz) { if (UNION_VALIDATED.contains(clazz)) { @@ -88,16 +96,16 @@ public static void validateEntityUnionSchema(@Nonnull Class<? extends UnionTempl * * @param schema schema for the model */ - public static void validateEntityUnionSchema(@Nonnull UnionDataSchema schema, @Nonnull String entityClassName) { + public static void validateEntityUnionSchema( + @Nonnull UnionDataSchema schema, @Nonnull String entityClassName) { if (!ValidationUtils.isUnionWithOnlyComplexMembers(schema)) { - ValidationUtils.invalidSchema("Entity '%s' must be a union containing only record type members", entityClassName); + ValidationUtils.invalidSchema( + "Entity '%s' must be a union containing only record type members", entityClassName); } } - /** - * Checks if an entity schema is valid. - */ + /** Checks if an entity schema is valid. */ public static boolean isValidEntitySchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (!VALIDATED.contains(clazz)) { try { @@ -109,4 +117,4 @@ public static boolean isValidEntitySchema(@Nonnull Class<? extends RecordTemplat return true; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/RelationshipValidator.java b/li-utils/src/main/java/com/datahub/util/validator/RelationshipValidator.java index f4627087149fc..c96f1c4fb7313 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/RelationshipValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/RelationshipValidator.java @@ -14,14 +14,15 @@ import javax.annotation.Nonnull; import lombok.Value; - public class RelationshipValidator { // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); // A cache of validated classes - private static final Set<Class<? extends UnionTemplate>> UNION_VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends UnionTemplate>> UNION_VALIDATED = + ConcurrentHashMap.newKeySet(); @Value private static class Pair { @@ -42,29 +43,33 @@ public static void validateRelationshipSchema(@Nonnull RecordDataSchema schema) final String className = schema.getBindingName(); - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, - field -> ValidationUtils.isValidUrnField(field, "source"))) { - ValidationUtils.invalidSchema("Relationship '%s' must contain an non-optional 'source' field of URN type", - className); + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, field -> ValidationUtils.isValidUrnField(field, "source"))) { + ValidationUtils.invalidSchema( + "Relationship '%s' must contain an non-optional 'source' field of URN type", className); } - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, - field -> ValidationUtils.isValidUrnField(field, "destination"))) { - ValidationUtils.invalidSchema("Relationship '%s' must contain an non-optional 'destination' field of URN type", + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, field -> ValidationUtils.isValidUrnField(field, "destination"))) { + ValidationUtils.invalidSchema( + "Relationship '%s' must contain an non-optional 'destination' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Relationship '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Relationship '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); validatePairings(schema); } - /** - * Similar to {@link #validateRelationshipSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateRelationshipSchema(RecordDataSchema)} but take a {@link Class} + * instead and caches results. */ public static void validateRelationshipSchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -76,14 +81,17 @@ public static void validateRelationshipSchema(@Nonnull Class<? extends RecordTem } /** - * Similar to {@link #validateRelationshipUnionSchema(UnionDataSchema, String)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateRelationshipUnionSchema(UnionDataSchema, String)} but take a {@link + * Class} instead and caches results. */ - public static void validateRelationshipUnionSchema(@Nonnull Class<? extends UnionTemplate> clazz) { + public static void validateRelationshipUnionSchema( + @Nonnull Class<? extends UnionTemplate> clazz) { if (UNION_VALIDATED.contains(clazz)) { return; } - validateRelationshipUnionSchema(ValidationUtils.getUnionSchema(clazz), clazz.getCanonicalName()); + validateRelationshipUnionSchema( + ValidationUtils.getUnionSchema(clazz), clazz.getCanonicalName()); UNION_VALIDATED.add(clazz); } @@ -92,10 +100,13 @@ public static void validateRelationshipUnionSchema(@Nonnull Class<? extends Unio * * @param schema schema for the model */ - public static void validateRelationshipUnionSchema(@Nonnull UnionDataSchema schema, @Nonnull String relationshipClassName) { + public static void validateRelationshipUnionSchema( + @Nonnull UnionDataSchema schema, @Nonnull String relationshipClassName) { if (!ValidationUtils.isUnionWithOnlyComplexMembers(schema)) { - ValidationUtils.invalidSchema("Relationship '%s' must be a union containing only record type members", relationshipClassName); + ValidationUtils.invalidSchema( + "Relationship '%s' must be a union containing only record type members", + relationshipClassName); } } @@ -105,39 +116,45 @@ private static void validatePairings(@Nonnull RecordDataSchema schema) { Map<String, Object> properties = schema.getProperties(); if (!properties.containsKey("pairings")) { - ValidationUtils.invalidSchema("Relationship '%s' must contain a 'pairings' property", className); + ValidationUtils.invalidSchema( + "Relationship '%s' must contain a 'pairings' property", className); } DataList pairings = (DataList) properties.get("pairings"); Set<Pair> registeredPairs = new HashSet<>(); - pairings.stream().forEach(obj -> { - DataMap map = (DataMap) obj; - if (!map.containsKey("source") || !map.containsKey("destination")) { - ValidationUtils.invalidSchema("Relationship '%s' contains an invalid 'pairings' item. " - + "Each item must contain a 'source' and 'destination' properties.", className); - } - - String sourceUrn = map.getString("source"); - if (!isValidUrnClass(sourceUrn)) { - ValidationUtils.invalidSchema( - "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", className, - sourceUrn); - } - - String destinationUrn = map.getString("destination"); - if (!isValidUrnClass(destinationUrn)) { - ValidationUtils.invalidSchema( - "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", className, - destinationUrn); - } - - Pair pair = new Pair(sourceUrn, destinationUrn); - if (registeredPairs.contains(pair)) { - ValidationUtils.invalidSchema("Relationship '%s' contains a repeated 'pairings' item (%s, %s)", className, - sourceUrn, destinationUrn); - } - registeredPairs.add(pair); - }); + pairings.stream() + .forEach( + obj -> { + DataMap map = (DataMap) obj; + if (!map.containsKey("source") || !map.containsKey("destination")) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid 'pairings' item. " + + "Each item must contain a 'source' and 'destination' properties.", + className); + } + + String sourceUrn = map.getString("source"); + if (!isValidUrnClass(sourceUrn)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", + className, sourceUrn); + } + + String destinationUrn = map.getString("destination"); + if (!isValidUrnClass(destinationUrn)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", + className, destinationUrn); + } + + Pair pair = new Pair(sourceUrn, destinationUrn); + if (registeredPairs.contains(pair)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains a repeated 'pairings' item (%s, %s)", + className, sourceUrn, destinationUrn); + } + registeredPairs.add(pair); + }); } private static boolean isValidUrnClass(String className) { @@ -147,4 +164,4 @@ private static boolean isValidUrnClass(String className) { throw new RuntimeException(e); } } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java b/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java index 988fabe0411c8..08f349b146db6 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java @@ -13,11 +13,11 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - public class SnapshotValidator { // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); private SnapshotValidator() { // Util class @@ -33,19 +33,22 @@ public static void validateSnapshotSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Snapshot '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Snapshot '%s' must contain an non-optional 'urn' field of URN type", className); } - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, SnapshotValidator::isValidAspectsField)) { - ValidationUtils.invalidSchema("Snapshot '%s' must contain an non-optional 'aspects' field of ARRAY type", - className); + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, SnapshotValidator::isValidAspectsField)) { + ValidationUtils.invalidSchema( + "Snapshot '%s' must contain an non-optional 'aspects' field of ARRAY type", className); } validateAspectsItemType(schema.getField("aspects"), className); } /** - * Similar to {@link #validateSnapshotSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateSnapshotSchema(RecordDataSchema)} but take a {@link Class} instead + * and caches results. */ public static void validateSnapshotSchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -61,38 +64,47 @@ public static void validateSnapshotSchema(@Nonnull Class<? extends RecordTemplat * * @param snapshotClasses a collection of snapshot classes. */ - public static void validateUniqueUrn(@Nonnull Collection<Class<? extends RecordTemplate>> snapshotClasses) { + public static void validateUniqueUrn( + @Nonnull Collection<Class<? extends RecordTemplate>> snapshotClasses) { final Set<Class<Urn>> urnClasses = new HashSet<>(); - snapshotClasses.forEach(snapshotClass -> { - final Class<Urn> urnClass = - ValidationUtils.getUrnClass(ValidationUtils.getRecordSchema(snapshotClass).getField("urn")); - if (urnClasses.contains(urnClass)) { - ValidationUtils.invalidSchema("URN class %s in %s has already been claimed by another snapshot.", urnClass, - snapshotClass); - } - urnClasses.add(urnClass); - }); + snapshotClasses.forEach( + snapshotClass -> { + final Class<Urn> urnClass = + ValidationUtils.getUrnClass( + ValidationUtils.getRecordSchema(snapshotClass).getField("urn")); + if (urnClasses.contains(urnClass)) { + ValidationUtils.invalidSchema( + "URN class %s in %s has already been claimed by another snapshot.", + urnClass, snapshotClass); + } + urnClasses.add(urnClass); + }); } private static boolean isValidAspectsField(@Nonnull RecordDataSchema.Field field) { - return field.getName().equals("aspects") && !field.getOptional() + return field.getName().equals("aspects") + && !field.getOptional() && field.getType().getType() == DataSchema.Type.ARRAY; } - private static void validateAspectsItemType(@Nonnull RecordDataSchema.Field aspectsField, @Nonnull String className) { + private static void validateAspectsItemType( + @Nonnull RecordDataSchema.Field aspectsField, @Nonnull String className) { DataSchema itemSchema = ((ArrayDataSchema) aspectsField.getType()).getItems(); if (itemSchema.getType() != DataSchema.Type.TYPEREF) { - ValidationUtils.invalidSchema("Snapshot %s' 'aspects' field must be an array of aspect typeref", className); + ValidationUtils.invalidSchema( + "Snapshot %s' 'aspects' field must be an array of aspect typeref", className); } TyperefDataSchema typerefSchema = (TyperefDataSchema) itemSchema; DataSchema unionSchema = typerefSchema.getDereferencedDataSchema(); if (unionSchema.getType() != DataSchema.Type.UNION) { - ValidationUtils.invalidSchema("Snapshot '%s' 'aspects' field must be an array of union typeref", className); + ValidationUtils.invalidSchema( + "Snapshot '%s' 'aspects' field must be an array of union typeref", className); } - AspectValidator.validateAspectUnionSchema((UnionDataSchema) unionSchema, typerefSchema.getBindingName()); + AspectValidator.validateAspectUnionSchema( + (UnionDataSchema) unionSchema, typerefSchema.getBindingName()); } } diff --git a/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java b/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java index 1af6de8ff3940..5b38ff21e4b81 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java +++ b/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java @@ -1,7 +1,7 @@ package com.datahub.util.validator; -import com.linkedin.common.urn.Urn; import com.datahub.util.exception.InvalidSchemaException; +import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.schema.ArrayDataSchema; import com.linkedin.data.schema.DataSchema; @@ -18,24 +18,22 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Utility class for schema validation classes. - */ +/** Utility class for schema validation classes. */ public final class ValidationUtils { public static final Set<DataSchema.Type> PRIMITIVE_TYPES = - Collections.unmodifiableSet(new HashSet<DataSchema.Type>() { - { - add(DataSchema.Type.BOOLEAN); - add(DataSchema.Type.INT); - add(DataSchema.Type.LONG); - add(DataSchema.Type.FLOAT); - add(DataSchema.Type.DOUBLE); - add(DataSchema.Type.STRING); - add(DataSchema.Type.ENUM); - } - }); + Collections.unmodifiableSet( + new HashSet<DataSchema.Type>() { + { + add(DataSchema.Type.BOOLEAN); + add(DataSchema.Type.INT); + add(DataSchema.Type.LONG); + add(DataSchema.Type.FLOAT); + add(DataSchema.Type.DOUBLE); + add(DataSchema.Type.STRING); + add(DataSchema.Type.ENUM); + } + }); private ValidationUtils() { // Util class @@ -45,9 +43,7 @@ public static void invalidSchema(@Nonnull String format, Object... args) { throw new InvalidSchemaException(String.format(format, args)); } - /** - * Gets the {@link RecordDataSchema} of a {@link RecordTemplate} via reflection. - */ + /** Gets the {@link RecordDataSchema} of a {@link RecordTemplate} via reflection. */ @Nonnull public static RecordDataSchema getRecordSchema(@Nonnull Class<? extends RecordTemplate> clazz) { try { @@ -61,9 +57,7 @@ public static RecordDataSchema getRecordSchema(@Nonnull Class<? extends RecordTe } } - /** - * Gets the {@link UnionDataSchema} of a {@link UnionTemplate} via reflection. - */ + /** Gets the {@link UnionDataSchema} of a {@link UnionTemplate} via reflection. */ @Nonnull public static UnionDataSchema getUnionSchema(@Nonnull Class<? extends UnionTemplate> clazz) { try { @@ -77,30 +71,29 @@ public static UnionDataSchema getUnionSchema(@Nonnull Class<? extends UnionTempl } } - /** - * Returns true if the supply schema has exactly one field matching the predicate. - */ - public static boolean schemaHasExactlyOneSuchField(@Nonnull RecordDataSchema schema, - @Nonnull Predicate<RecordDataSchema.Field> predicate) { + /** Returns true if the supply schema has exactly one field matching the predicate. */ + public static boolean schemaHasExactlyOneSuchField( + @Nonnull RecordDataSchema schema, @Nonnull Predicate<RecordDataSchema.Field> predicate) { return schema.getFields().stream().filter(predicate).count() == 1; } - /** - * Returns true if the non-optional field matches the field name and has a URN type. - */ - public static boolean isValidUrnField(@Nonnull RecordDataSchema.Field field, @Nonnull String fieldName) { - return field.getName().equals(fieldName) && !field.getOptional() - && field.getType().getType() == DataSchema.Type.TYPEREF && Urn.class.isAssignableFrom(getUrnClass(field)); + /** Returns true if the non-optional field matches the field name and has a URN type. */ + public static boolean isValidUrnField( + @Nonnull RecordDataSchema.Field field, @Nonnull String fieldName) { + return field.getName().equals(fieldName) + && !field.getOptional() + && field.getType().getType() == DataSchema.Type.TYPEREF + && Urn.class.isAssignableFrom(getUrnClass(field)); } - /** - * Returns the Java class for an URN typeref field. - */ + /** Returns the Java class for an URN typeref field. */ public static Class<Urn> getUrnClass(@Nonnull RecordDataSchema.Field field) { try { @SuppressWarnings("unchecked") final Class<Urn> clazz = - (Class<Urn>) Class.forName(((DataMap) field.getType().getProperties().get("java")).getString("class")); + (Class<Urn>) + Class.forName( + ((DataMap) field.getType().getProperties().get("java")).getString("class")); return clazz; } catch (ClassNotFoundException e) { throw new RuntimeException(e); @@ -108,52 +101,55 @@ public static Class<Urn> getUrnClass(@Nonnull RecordDataSchema.Field field) { } /** - * Similar to {@link #isValidUrnField(RecordDataSchema.Field, String)} but with a fixed field "urn". + * Similar to {@link #isValidUrnField(RecordDataSchema.Field, String)} but with a fixed field + * "urn". */ public static boolean isValidUrnField(@Nonnull RecordDataSchema.Field field) { return isValidUrnField(field, "urn"); } - /** - * Returns all the non-whitelisted, non-optional fields in a {@link RecordDataSchema}. - */ + /** Returns all the non-whitelisted, non-optional fields in a {@link RecordDataSchema}. */ @Nonnull - public static List<RecordDataSchema.Field> nonOptionalFields(@Nonnull RecordDataSchema schema, - @Nonnull Set<String> whitelistedFields) { - return schema.getFields().stream().filter(field -> { - if (!whitelistedFields.contains(field.getName())) { - if (!field.getOptional()) { - return true; - } - } - return false; - }).collect(Collectors.toList()); + public static List<RecordDataSchema.Field> nonOptionalFields( + @Nonnull RecordDataSchema schema, @Nonnull Set<String> whitelistedFields) { + return schema.getFields().stream() + .filter( + field -> { + if (!whitelistedFields.contains(field.getName())) { + if (!field.getOptional()) { + return true; + } + } + return false; + }) + .collect(Collectors.toList()); } - /** - * Returns all the non-whitelisted, optional fields in a {@link RecordDataSchema}. - */ + /** Returns all the non-whitelisted, optional fields in a {@link RecordDataSchema}. */ @Nonnull - public static List<RecordDataSchema.Field> optionalFields(@Nonnull RecordDataSchema schema, - @Nonnull Set<String> whitelistedFields) { - return schema.getFields().stream().filter(field -> { - if (!whitelistedFields.contains(field.getName())) { - if (field.getOptional()) { - return true; - } - } - return false; - }).collect(Collectors.toList()); + public static List<RecordDataSchema.Field> optionalFields( + @Nonnull RecordDataSchema schema, @Nonnull Set<String> whitelistedFields) { + return schema.getFields().stream() + .filter( + field -> { + if (!whitelistedFields.contains(field.getName())) { + if (field.getOptional()) { + return true; + } + } + return false; + }) + .collect(Collectors.toList()); } /** - * Return all the fields in a {@link RecordDataSchema} that are not using one of the allowed types. + * Return all the fields in a {@link RecordDataSchema} that are not using one of the allowed + * types. */ @Nonnull - public static List<RecordDataSchema.Field> fieldsUsingInvalidType(@Nonnull RecordDataSchema schema, - @Nonnull Set<DataSchema.Type> allowedTypes) { - return schema.getFields() - .stream() + public static List<RecordDataSchema.Field> fieldsUsingInvalidType( + @Nonnull RecordDataSchema schema, @Nonnull Set<DataSchema.Type> allowedTypes) { + return schema.getFields().stream() .filter(field -> !allowedTypes.contains(getFieldOrArrayItemType(field))) .collect(Collectors.toList()); } @@ -164,8 +160,10 @@ public static boolean isUnionWithOnlyComplexMembers(UnionDataSchema unionDataSch @Nonnull private static DataSchema.Type getFieldOrArrayItemType(@Nonnull RecordDataSchema.Field field) { - DataSchema type = field.getType().getType() == DataSchema.Type.ARRAY - ? ((ArrayDataSchema) field.getType()).getItems() : field.getType(); + DataSchema type = + field.getType().getType() == DataSchema.Type.ARRAY + ? ((ArrayDataSchema) field.getType()).getItems() + : field.getType(); if (type.getType() == DataSchema.Type.TYPEREF) { return type.getDereferencedType(); } diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index 972f52b8824ce..f5a3c9c12ff70 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -2,42 +2,42 @@ import com.linkedin.common.urn.Urn; - -/** - * Static class containing commonly-used constants across DataHub services. - */ +/** Static class containing commonly-used constants across DataHub services. */ public class Constants { public static final String INTERNAL_DELEGATED_FOR_ACTOR_HEADER_NAME = "X-DataHub-Delegated-For"; public static final String INTERNAL_DELEGATED_FOR_ACTOR_TYPE = "X-DataHub-Delegated-For-"; public static final String DATAHUB_ACTOR = "urn:li:corpuser:datahub"; // Super user. - public static final String SYSTEM_ACTOR = "urn:li:corpuser:__datahub_system"; // DataHub internal service principal. + public static final String SYSTEM_ACTOR = + "urn:li:corpuser:__datahub_system"; // DataHub internal service principal. public static final String UNKNOWN_ACTOR = "urn:li:corpuser:UNKNOWN"; // Unknown principal. public static final Long ASPECT_LATEST_VERSION = 0L; public static final String UNKNOWN_DATA_PLATFORM = "urn:li:dataPlatform:unknown"; // !!!!!!! IMPORTANT !!!!!!! - // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. Without this the limit is + // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. + // Without this the limit is // whatever Jackson is defaulting to (5 MB currently). public static final String MAX_JACKSON_STRING_SIZE = "16000000"; - public static final String INGESTION_MAX_SERIALIZED_STRING_LENGTH = "INGESTION_MAX_SERIALIZED_STRING_LENGTH"; + public static final String INGESTION_MAX_SERIALIZED_STRING_LENGTH = + "INGESTION_MAX_SERIALIZED_STRING_LENGTH"; - /** - * System Metadata - */ + /** System Metadata */ public static final String DEFAULT_RUN_ID = "no-run-id-provided"; - // Forces indexing for no-ops, enabled for restore indices calls. Only considered in the no-op case + + // Forces indexing for no-ops, enabled for restore indices calls. Only considered in the no-op + // case public static final String FORCE_INDEXING_KEY = "forceIndexing"; - // Indicates an event source from an application with hooks that have already been processed and should not be reprocessed + // Indicates an event source from an application with hooks that have already been processed and + // should not be reprocessed public static final String APP_SOURCE = "appSource"; // App sources public static final String UI_SOURCE = "ui"; - /** - * Entities - */ + /** Entities */ public static final String CORP_USER_ENTITY_NAME = "corpuser"; + public static final String CORP_GROUP_ENTITY_NAME = "corpGroup"; public static final String DATASET_ENTITY_NAME = "dataset"; public static final String CHART_ENTITY_NAME = "chart"; @@ -74,11 +74,10 @@ public class Constants { public static final String DATA_PRODUCT_ENTITY_NAME = "dataProduct"; public static final String OWNERSHIP_TYPE_ENTITY_NAME = "ownershipType"; - /** - * Aspects - */ + /** Aspects */ // Common public static final String OWNERSHIP_ASPECT_NAME = "ownership"; + public static final String INSTITUTIONAL_MEMORY_ASPECT_NAME = "institutionalMemory"; public static final String DATA_PLATFORM_INSTANCE_ASPECT_NAME = "dataPlatformInstance"; public static final String BROWSE_PATHS_ASPECT_NAME = "browsePaths"; @@ -136,19 +135,22 @@ public class Constants { // Dashboard public static final String DASHBOARD_KEY_ASPECT_NAME = "dashboardKey"; public static final String DASHBOARD_INFO_ASPECT_NAME = "dashboardInfo"; - public static final String EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME = "editableDashboardProperties"; + public static final String EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME = + "editableDashboardProperties"; public static final String DASHBOARD_USAGE_STATISTICS_ASPECT_NAME = "dashboardUsageStatistics"; // Notebook public static final String NOTEBOOK_KEY_ASPECT_NAME = "notebookKey"; public static final String NOTEBOOK_INFO_ASPECT_NAME = "notebookInfo"; public static final String NOTEBOOK_CONTENT_ASPECT_NAME = "notebookContent"; - public static final String EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME = "editableNotebookProperties"; + public static final String EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME = + "editableNotebookProperties"; // DataFlow public static final String DATA_FLOW_KEY_ASPECT_NAME = "dataFlowKey"; public static final String DATA_FLOW_INFO_ASPECT_NAME = "dataFlowInfo"; - public static final String EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME = "editableDataFlowProperties"; + public static final String EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME = + "editableDataFlowProperties"; // DataJob public static final String DATA_JOB_KEY_ASPECT_NAME = "dataJobKey"; @@ -162,19 +164,22 @@ public class Constants { // DataPlatformInstance public static final String DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME = "dataPlatformInstanceKey"; - public static final String DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME = "dataPlatformInstanceProperties"; + public static final String DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME = + "dataPlatformInstanceProperties"; // ML Feature public static final String ML_FEATURE_KEY_ASPECT_NAME = "mlFeatureKey"; public static final String ML_FEATURE_PROPERTIES_ASPECT_NAME = "mlFeatureProperties"; - public static final String ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlFeatureProperties"; + public static final String ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlFeatureProperties"; // ML Feature Table public static final String ML_FEATURE_TABLE_KEY_ASPECT_NAME = "mlFeatureTableKey"; public static final String ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME = "mlFeatureTableProperties"; - public static final String ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlFeatureTableProperties"; + public static final String ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlFeatureTableProperties"; - //ML Model + // ML Model public static final String ML_MODEL_KEY_ASPECT_NAME = "mlModelKey"; public static final String ML_MODEL_PROPERTIES_ASPECT_NAME = "mlModelProperties"; public static final String ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlModelProperties"; @@ -192,12 +197,14 @@ public class Constants { // ML Model Group public static final String ML_MODEL_GROUP_KEY_ASPECT_NAME = "mlModelGroupKey"; public static final String ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME = "mlModelGroupProperties"; - public static final String ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlModelGroupProperties"; + public static final String ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlModelGroupProperties"; // ML Primary Key public static final String ML_PRIMARY_KEY_KEY_ASPECT_NAME = "mlPrimaryKeyKey"; public static final String ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME = "mlPrimaryKeyProperties"; - public static final String ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlPrimaryKeyProperties"; + public static final String ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlPrimaryKeyProperties"; // Policy public static final String DATAHUB_POLICY_INFO_ASPECT_NAME = "dataHubPolicyInfo"; @@ -212,15 +219,16 @@ public class Constants { // Container public static final String CONTAINER_KEY_ASPECT_NAME = "containerKey"; public static final String CONTAINER_PROPERTIES_ASPECT_NAME = "containerProperties"; - public static final String CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME = "editableContainerProperties"; + public static final String CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableContainerProperties"; public static final String CONTAINER_ASPECT_NAME = "container"; // parent container - // Glossary term + // Glossary term public static final String GLOSSARY_TERM_KEY_ASPECT_NAME = "glossaryTermKey"; public static final String GLOSSARY_TERM_INFO_ASPECT_NAME = "glossaryTermInfo"; public static final String GLOSSARY_RELATED_TERM_ASPECT_NAME = "glossaryRelatedTerms"; - // Glossary node + // Glossary node public static final String GLOSSARY_NODE_KEY_ASPECT_NAME = "glossaryNodeKey"; public static final String GLOSSARY_NODE_INFO_ASPECT_NAME = "glossaryNodeInfo"; @@ -304,24 +312,24 @@ public class Constants { public static final String CHANGE_EVENT_PLATFORM_EVENT_NAME = "entityChangeEvent"; - /** - * Retention - */ + /** Retention */ public static final String DATAHUB_RETENTION_ENTITY = "dataHubRetention"; + public static final String DATAHUB_RETENTION_ASPECT = "dataHubRetentionConfig"; public static final String DATAHUB_RETENTION_KEY_ASPECT = "dataHubRetentionKey"; - /** - * User Status - */ + + /** User Status */ public static final String CORP_USER_STATUS_ACTIVE = "ACTIVE"; - /** - * Task Runs - */ + /** Task Runs */ public static final String DATA_PROCESS_INSTANCE_ENTITY_NAME = "dataProcessInstance"; - public static final String DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME = "dataProcessInstanceProperties"; - public static final String DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME = "dataProcessInstanceRunEvent"; - public static final String DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME = "dataProcessInstanceRelationships"; + + public static final String DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME = + "dataProcessInstanceProperties"; + public static final String DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME = + "dataProcessInstanceRunEvent"; + public static final String DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME = + "dataProcessInstanceRelationships"; // Posts public static final String POST_INFO_ASPECT_NAME = "postInfo"; @@ -332,8 +340,8 @@ public class Constants { public static final String CLIENT_ID_ASPECT = "telemetryClientId"; // Step - public static final String DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME = "dataHubStepStateProperties"; - + public static final String DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME = + "dataHubStepStateProperties"; // Authorization public static final String REST_API_AUTHORIZATION_ENABLED_ENV = "REST_API_AUTHORIZATION_ENABLED"; @@ -357,6 +365,5 @@ public class Constants { // DAO public static final long LATEST_VERSION = 0; - private Constants() { - } + private Constants() {} } diff --git a/li-utils/src/main/java/com/linkedin/util/Configuration.java b/li-utils/src/main/java/com/linkedin/util/Configuration.java index cf2085839aefa..e0a1f181b48aa 100644 --- a/li-utils/src/main/java/com/linkedin/util/Configuration.java +++ b/li-utils/src/main/java/com/linkedin/util/Configuration.java @@ -1,34 +1,34 @@ package com.linkedin.util; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.util.Optional; import java.util.Properties; +import javax.annotation.Nonnull; public class Configuration { - private Configuration() { - } + private Configuration() {} - @Nonnull - public static Properties loadProperties(@Nonnull String configFile) { - Properties configuration = new Properties(); - try (InputStream inputStream = Configuration.class.getClassLoader().getResourceAsStream(configFile)) { - configuration.load(inputStream); - } catch (IOException e) { - throw new RuntimeException("Can't read file: " + configFile); - } - return configuration; + @Nonnull + public static Properties loadProperties(@Nonnull String configFile) { + Properties configuration = new Properties(); + try (InputStream inputStream = + Configuration.class.getClassLoader().getResourceAsStream(configFile)) { + configuration.load(inputStream); + } catch (IOException e) { + throw new RuntimeException("Can't read file: " + configFile); } + return configuration; + } - @Nonnull - public static String getEnvironmentVariable(@Nonnull String envVar) { - return System.getenv(envVar); - } + @Nonnull + public static String getEnvironmentVariable(@Nonnull String envVar) { + return System.getenv(envVar); + } - @Nonnull - public static String getEnvironmentVariable(@Nonnull String envVar, @Nonnull String defaultVal) { - return Optional.ofNullable(System.getenv(envVar)).orElse(defaultVal); - } + @Nonnull + public static String getEnvironmentVariable(@Nonnull String envVar, @Nonnull String defaultVal) { + return Optional.ofNullable(System.getenv(envVar)).orElse(defaultVal); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java index ab90b3e054a3b..22d8065844a8c 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java @@ -4,35 +4,35 @@ import java.net.URISyntaxException; public class Uri { - private final String _uri; + private final String _uri; - public Uri(String url) { - if (url == null) { - throw new NullPointerException("URL must be non-null"); - } - _uri = url; + public Uri(String url) { + if (url == null) { + throw new NullPointerException("URL must be non-null"); } + _uri = url; + } - @Override - public String toString() { - return _uri; - } + @Override + public String toString() { + return _uri; + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof Uri)) { - return false; - } else { - return _uri.equals(((Uri) obj)._uri); - } + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Uri)) { + return false; + } else { + return _uri.equals(((Uri) obj)._uri); } + } - @Override - public int hashCode() { - return _uri.hashCode(); - } + @Override + public int hashCode() { + return _uri.hashCode(); + } - public URI toURI() throws URISyntaxException { - return new URI(_uri); - } + public URI toURI() throws URISyntaxException { + return new URI(_uri); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java index a23d2b08752d1..6a30bb22a73a3 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java @@ -5,15 +5,16 @@ import com.linkedin.data.template.TemplateOutputCastException; public class UriCoercer implements DirectCoercer<Uri> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new UriCoercer(), Uri.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new UriCoercer(), Uri.class); - @Override - public Object coerceInput(Uri object) throws ClassCastException { - return object.toString(); - } + @Override + public Object coerceInput(Uri object) throws ClassCastException { + return object.toString(); + } - @Override - public Uri coerceOutput(Object object) throws TemplateOutputCastException { - return new Uri((String) object); - } + @Override + public Uri coerceOutput(Object object) throws TemplateOutputCastException { + return new Uri((String) object); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java b/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java index 3e1950160cca2..17abf09361e36 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java @@ -4,35 +4,35 @@ import java.net.URISyntaxException; public class Url { - private final String _url; + private final String _url; - public Url(String url) { - if (url == null) { - throw new NullPointerException("URL must be non-null"); - } - _url = url; + public Url(String url) { + if (url == null) { + throw new NullPointerException("URL must be non-null"); } + _url = url; + } - @Override - public String toString() { - return _url; - } + @Override + public String toString() { + return _url; + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof Url)) { - return false; - } else { - return _url.equals(((Url) obj)._url); - } + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Url)) { + return false; + } else { + return _url.equals(((Url) obj)._url); } + } - @Override - public int hashCode() { - return _url.hashCode(); - } + @Override + public int hashCode() { + return _url.hashCode(); + } - public URI toURI() throws URISyntaxException { - return new URI(_url); - } + public URI toURI() throws URISyntaxException { + return new URI(_url); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java index 9424fffdd2f68..3bae43ee0ca6a 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java @@ -5,15 +5,16 @@ import com.linkedin.data.template.TemplateOutputCastException; public class UrlCoercer implements DirectCoercer<Url> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new UrlCoercer(), Url.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new UrlCoercer(), Url.class); - @Override - public Object coerceInput(Url object) throws ClassCastException { - return object.toString(); - } + @Override + public Object coerceInput(Url object) throws ClassCastException { + return object.toString(); + } - @Override - public Url coerceOutput(Object object) throws TemplateOutputCastException { - return new Url((String) object); - } + @Override + public Url coerceOutput(Object object) throws TemplateOutputCastException { + return new Url((String) object); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java index feb7cacd7a48a..0110471c9cdfd 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class AzkabanFlowUrn extends Urn { public static final String ENTITY_TYPE = "azkabanFlow"; @@ -48,7 +47,9 @@ public static AzkabanFlowUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new AzkabanFlowUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new AzkabanFlowUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (String) key.getAs(2, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -62,18 +63,20 @@ public static AzkabanFlowUrn deserialize(String rawUrn) throws URISyntaxExceptio } static { - Custom.registerCoercer(new DirectCoercer<AzkabanFlowUrn>() { - public Object coerceInput(AzkabanFlowUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<AzkabanFlowUrn>() { + public Object coerceInput(AzkabanFlowUrn object) throws ClassCastException { + return object.toString(); + } - public AzkabanFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return AzkabanFlowUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, AzkabanFlowUrn.class); + public AzkabanFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return AzkabanFlowUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + AzkabanFlowUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java index 662c89b12139f..f264bccbc5056 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class AzkabanJobUrn extends Urn { public static final String ENTITY_TYPE = "azkabanJob"; @@ -42,7 +41,8 @@ public static AzkabanJobUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new AzkabanJobUrn((AzkabanFlowUrn) key.getAs(0, AzkabanFlowUrn.class), + return new AzkabanJobUrn( + (AzkabanFlowUrn) key.getAs(0, AzkabanFlowUrn.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -57,18 +57,20 @@ public static AzkabanJobUrn deserialize(String rawUrn) throws URISyntaxException static { Custom.initializeCustomClass(AzkabanFlowUrn.class); - Custom.registerCoercer(new DirectCoercer<AzkabanJobUrn>() { - public Object coerceInput(AzkabanJobUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<AzkabanJobUrn>() { + public Object coerceInput(AzkabanJobUrn object) throws ClassCastException { + return object.toString(); + } - public AzkabanJobUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return AzkabanJobUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, AzkabanJobUrn.class); + public AzkabanJobUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return AzkabanJobUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + AzkabanJobUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java index 4840a2bf7b1e8..8193bd05b527a 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class ChartUrn extends Urn { public static final String ENTITY_TYPE = "chart"; @@ -42,7 +41,8 @@ public static ChartUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new ChartUrn((String)key.getAs(0, String.class), (String)key.getAs(1, String.class)); + return new ChartUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -55,18 +55,20 @@ public static ChartUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<ChartUrn>() { - public Object coerceInput(ChartUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<ChartUrn>() { + public Object coerceInput(ChartUrn object) throws ClassCastException { + return object.toString(); + } - public ChartUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return ChartUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, ChartUrn.class); + public ChartUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return ChartUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + ChartUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java index da33ed2a625f1..0ed5b3514e786 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class CorpGroupUrn extends Urn { public static final String ENTITY_TYPE = "corpGroup"; @@ -31,7 +30,7 @@ public static CorpGroupUrn createFromString(String rawUrn) throws URISyntaxExcep } private static CorpGroupUrn decodeUrn(String groupName) throws Exception { - return new CorpGroupUrn(TupleKey.create(new Object[]{groupName}), groupName); + return new CorpGroupUrn(TupleKey.create(new Object[] {groupName}), groupName); } public static CorpGroupUrn createFromUrn(Urn urn) throws URISyntaxException { @@ -45,9 +44,10 @@ public static CorpGroupUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return decodeUrn((String)key.getAs(0, String.class)); + return decodeUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -58,18 +58,20 @@ public static CorpGroupUrn deserialize(String rawUrn) throws URISyntaxException } static { - Custom.registerCoercer(new DirectCoercer<CorpGroupUrn>() { - public Object coerceInput(CorpGroupUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<CorpGroupUrn>() { + public Object coerceInput(CorpGroupUrn object) throws ClassCastException { + return object.toString(); + } - public CorpGroupUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return CorpGroupUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, CorpGroupUrn.class); + public CorpGroupUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return CorpGroupUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + CorpGroupUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java index da527254bbe2c..701e18a015753 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java @@ -1,12 +1,9 @@ package com.linkedin.common.urn; -import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import java.util.regex.Pattern; - public final class CorpuserUrn extends Urn { @@ -40,7 +37,8 @@ public static CorpuserUrn createFromUrn(Urn urn) throws URISyntaxException { try { return new CorpuserUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -51,19 +49,20 @@ public static CorpuserUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<CorpuserUrn>() { - public Object coerceInput(CorpuserUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<CorpuserUrn>() { + public Object coerceInput(CorpuserUrn object) throws ClassCastException { + return object.toString(); + } - public CorpuserUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return CorpuserUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, CorpuserUrn.class); + public CorpuserUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return CorpuserUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + CorpuserUrn.class); } - } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java index ed4b38fe2f2be..ceb06986989b5 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DashboardUrn extends Urn { public static final String ENTITY_TYPE = "dashboard"; @@ -42,7 +41,8 @@ public static DashboardUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DashboardUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new DashboardUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -55,18 +55,20 @@ public static DashboardUrn deserialize(String rawUrn) throws URISyntaxException } static { - Custom.registerCoercer(new DirectCoercer<DashboardUrn>() { - public Object coerceInput(DashboardUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DashboardUrn>() { + public Object coerceInput(DashboardUrn object) throws ClassCastException { + return object.toString(); + } - public DashboardUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DashboardUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DashboardUrn.class); + public DashboardUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DashboardUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DashboardUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java index 40e6d796d1882..2df70eed13343 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DataFlowUrn extends Urn { public static final String ENTITY_TYPE = "dataFlow"; @@ -48,7 +47,9 @@ public static DataFlowUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataFlowUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new DataFlowUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (String) key.getAs(2, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -62,18 +63,20 @@ public static DataFlowUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<DataFlowUrn>() { - public Object coerceInput(DataFlowUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DataFlowUrn>() { + public Object coerceInput(DataFlowUrn object) throws ClassCastException { + return object.toString(); + } - public DataFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataFlowUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataFlowUrn.class); + public DataFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataFlowUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataFlowUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java index 46579a40897a3..6d0f37d1796b8 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DataJobUrn extends Urn { public static final String ENTITY_TYPE = "dataJob"; @@ -42,8 +41,8 @@ public static DataJobUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataJobUrn((DataFlowUrn) key.getAs(0, DataFlowUrn.class), - (String) key.getAs(1, String.class)); + return new DataJobUrn( + (DataFlowUrn) key.getAs(0, DataFlowUrn.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -57,18 +56,20 @@ public static DataJobUrn deserialize(String rawUrn) throws URISyntaxException { static { Custom.initializeCustomClass(DataFlowUrn.class); - Custom.registerCoercer(new DirectCoercer<DataJobUrn>() { - public Object coerceInput(DataJobUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DataJobUrn>() { + public Object coerceInput(DataJobUrn object) throws ClassCastException { + return object.toString(); + } - public DataJobUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataJobUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataJobUrn.class); + public DataJobUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataJobUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataJobUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java index 25d219ef2c39e..910e6b9c98e96 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java @@ -3,10 +3,8 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class DataPlatformUrn extends Urn { public static final String ENTITY_TYPE = "dataPlatform"; @@ -50,18 +48,20 @@ public static DataPlatformUrn deserialize(String rawUrn) throws URISyntaxExcepti } static { - Custom.registerCoercer(new DirectCoercer<DataPlatformUrn>() { - public Object coerceInput(DataPlatformUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DataPlatformUrn>() { + public Object coerceInput(DataPlatformUrn object) throws ClassCastException { + return object.toString(); + } - public DataPlatformUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataPlatformUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataPlatformUrn.class); + public DataPlatformUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataPlatformUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataPlatformUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java index 2edfdae251b01..513ffa6d8cf44 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java @@ -4,12 +4,8 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; -import static com.linkedin.common.urn.UrnUtils.toFabricType; - - public class DataProcessUrn extends Urn { public static final String ENTITY_TYPE = "dataProcess"; @@ -55,10 +51,13 @@ public static DataProcessUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataProcessUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new DataProcessUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -67,18 +66,20 @@ public static DataProcessUrn createFromUrn(Urn urn) throws URISyntaxException { static { Custom.initializeCustomClass(DataProcessUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer<DataProcessUrn>() { - public Object coerceInput(DataProcessUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DataProcessUrn>() { + public Object coerceInput(DataProcessUrn object) throws ClassCastException { + return object.toString(); + } - public DataProcessUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataProcessUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataProcessUrn.class); + public DataProcessUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataProcessUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataProcessUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java index 3d4b7d71566be..14cbfaf02fbae 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java @@ -5,53 +5,49 @@ import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Standardized dataset field information identifier - */ +/** Standardized dataset field information identifier */ public class DatasetFieldUrn extends Urn { // uniquely identifies urn's key type public static final String ENTITY_TYPE = "datasetField"; - /** - * Dataset urn of the datasetFieldUrn - */ + /** Dataset urn of the datasetFieldUrn */ private final DatasetUrn _dataset; - /** - * Field of datasetFieldUrn - */ + /** Field of datasetFieldUrn */ private final String _fieldPath; static { Custom.initializeCustomClass(DatasetUrn.class); - Custom.registerCoercer(new DirectCoercer<DatasetFieldUrn>() { + Custom.registerCoercer( + new DirectCoercer<DatasetFieldUrn>() { - @Override - public String coerceInput(DatasetFieldUrn object) throws ClassCastException { - return object.toString(); - } + @Override + public String coerceInput(DatasetFieldUrn object) throws ClassCastException { + return object.toString(); + } - @Override - public DatasetFieldUrn coerceOutput(Object object) throws TemplateOutputCastException { - if (object instanceof String) { - try { - return DatasetFieldUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + @Override + public DatasetFieldUrn coerceOutput(Object object) throws TemplateOutputCastException { + if (object instanceof String) { + try { + return DatasetFieldUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + throw new TemplateOutputCastException( + (("Output '" + object) + + ("' is not a String, and cannot be coerced to " + + DatasetFieldUrn.class.getName()))); } - } - throw new TemplateOutputCastException((("Output '" + object) + ("' is not a String, and cannot be coerced to " - + DatasetFieldUrn.class.getName()))); - } - }, DatasetFieldUrn.class); + }, + DatasetFieldUrn.class); } - public DatasetFieldUrn(String dataPlatform, String datasetName, FabricType fabricType, String fieldPath) { + public DatasetFieldUrn( + String dataPlatform, String datasetName, FabricType fabricType, String fieldPath) { this(new DatasetUrn(new DataPlatformUrn(dataPlatform), datasetName, fabricType), fieldPath); } @@ -86,9 +82,11 @@ public static DatasetFieldUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DatasetFieldUrn((DatasetUrn) key.getAs(0, DatasetUrn.class), (String) key.getAs(1, String.class)); + return new DatasetFieldUrn( + (DatasetUrn) key.getAs(0, DatasetUrn.class), (String) key.getAs(1, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java index 3be084d1daff9..5f18ce5f1abe7 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java @@ -6,7 +6,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DatasetUrn extends Urn { public static final String ENTITY_TYPE = "dataset"; @@ -49,10 +48,13 @@ public static DatasetUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DatasetUrn((DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), - (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); + return new DatasetUrn( + (DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), + (String) key.getAs(1, String.class), + (FabricType) key.getAs(2, FabricType.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -66,18 +68,20 @@ public static DatasetUrn deserialize(String rawUrn) throws URISyntaxException { Custom.initializeCustomClass(DataPlatformUrn.class); Custom.initializeCustomClass(DatasetUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer<DatasetUrn>() { - public Object coerceInput(DatasetUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DatasetUrn>() { + public Object coerceInput(DatasetUrn object) throws ClassCastException { + return object.toString(); + } - public DatasetUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DatasetUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DatasetUrn.class); + public DatasetUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DatasetUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DatasetUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java index 597ae3386fec1..24fd7f26bf977 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class FabricUrn extends Urn { public static final String ENTITY_TYPE = "fabric"; @@ -45,18 +44,20 @@ public static FabricUrn createFromUrn(Urn urn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<FabricUrn>() { - public Object coerceInput(FabricUrn object) throws ClassCastException { - return object.toString(); - } - - public FabricUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return FabricUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, FabricUrn.class); + Custom.registerCoercer( + new DirectCoercer<FabricUrn>() { + public Object coerceInput(FabricUrn object) throws ClassCastException { + return object.toString(); + } + + public FabricUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return FabricUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + FabricUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java index 29ff1aa5fcdb3..7820eac21755d 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java @@ -3,66 +3,66 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class GlossaryNodeUrn extends Urn { - public static final String ENTITY_TYPE = "glossaryNode"; + public static final String ENTITY_TYPE = "glossaryNode"; - private final String _name; + private final String _name; - public GlossaryNodeUrn(String name) { - super(ENTITY_TYPE, TupleKey.create(name)); - this._name = name; - } + public GlossaryNodeUrn(String name) { + super(ENTITY_TYPE, TupleKey.create(name)); + this._name = name; + } - public String getNameEntity() { - return _name; - } + public String getNameEntity() { + return _name; + } - public static GlossaryNodeUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static GlossaryNodeUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static GlossaryNodeUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be 'glossaryNode'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 1) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys."); - } else { - try { - return new GlossaryNodeUrn((String) key.getAs(0, String.class)); - } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); - } - } + public static GlossaryNodeUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException(urn.toString(), "Urn entity type should be 'glossaryNode'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 1) { + throw new URISyntaxException(urn.toString(), "Invalid number of keys."); + } else { + try { + return new GlossaryNodeUrn((String) key.getAs(0, String.class)); + } catch (Exception var3) { + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } + } } + } - public static GlossaryNodeUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static GlossaryNodeUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer<GlossaryNodeUrn>() { - public Object coerceInput(GlossaryNodeUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer<GlossaryNodeUrn>() { + public Object coerceInput(GlossaryNodeUrn object) throws ClassCastException { + return object.toString(); + } - public GlossaryNodeUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return GlossaryNodeUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public GlossaryNodeUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return GlossaryNodeUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, GlossaryNodeUrn.class); - } - + } + }, + GlossaryNodeUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java index bf8ec131d410e..f7e3496fbc582 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java @@ -1,14 +1,9 @@ package com.linkedin.common.urn; -import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - -import java.lang.reflect.Array; import java.net.URISyntaxException; -import java.util.regex.Pattern; - public final class GlossaryTermUrn extends Urn { @@ -42,7 +37,8 @@ public static GlossaryTermUrn createFromUrn(Urn urn) throws URISyntaxException { try { return new GlossaryTermUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -53,19 +49,20 @@ public static GlossaryTermUrn deserialize(String rawUrn) throws URISyntaxExcepti } static { - Custom.registerCoercer(new DirectCoercer<GlossaryTermUrn>() { - public Object coerceInput(GlossaryTermUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<GlossaryTermUrn>() { + public Object coerceInput(GlossaryTermUrn object) throws ClassCastException { + return object.toString(); + } - public GlossaryTermUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return GlossaryTermUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, GlossaryTermUrn.class); + public GlossaryTermUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return GlossaryTermUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + GlossaryTermUrn.class); } - } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java index 8774ba36d07b2..5c05b74cb0038 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class MLFeatureUrn extends Urn { public static final String ENTITY_TYPE = "mlFeature"; @@ -43,7 +42,8 @@ public static MLFeatureUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new MLFeatureUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new MLFeatureUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -52,18 +52,20 @@ public static MLFeatureUrn createFromUrn(Urn urn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<MLFeatureUrn>() { - public Object coerceInput(MLFeatureUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<MLFeatureUrn>() { + public Object coerceInput(MLFeatureUrn object) throws ClassCastException { + return object.toString(); + } - public MLFeatureUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return MLFeatureUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, MLFeatureUrn.class); + public MLFeatureUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return MLFeatureUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + MLFeatureUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java index ded7f90dcc112..85680f5a3922f 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java @@ -1,15 +1,11 @@ package com.linkedin.common.urn; +import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import com.linkedin.common.FabricType; - -import static com.linkedin.common.urn.UrnUtils.toFabricType; - - public final class MLModelUrn extends Urn { public static final String ENTITY_TYPE = "mlModel"; @@ -52,8 +48,10 @@ public static MLModelUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new MLModelUrn((DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), - (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); + return new MLModelUrn( + (DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), + (String) key.getAs(1, String.class), + (FabricType) key.getAs(2, FabricType.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -68,18 +66,20 @@ public static MLModelUrn deserialize(String rawUrn) throws URISyntaxException { static { Custom.initializeCustomClass(DataPlatformUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer<MLModelUrn>() { - public Object coerceInput(MLModelUrn object) throws ClassCastException { - return object.toString(); - } - - public MLModelUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return MLModelUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, MLModelUrn.class); + Custom.registerCoercer( + new DirectCoercer<MLModelUrn>() { + public Object coerceInput(MLModelUrn object) throws ClassCastException { + return object.toString(); + } + + public MLModelUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return MLModelUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + MLModelUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java index c9d6c203d2ed8..00a0660bbf49d 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public class NotebookUrn extends Urn { public static final String ENTITY_TYPE = "notebook"; @@ -41,7 +40,8 @@ public static NotebookUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new NotebookUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new NotebookUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -54,18 +54,20 @@ public static NotebookUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<NotebookUrn>() { - public Object coerceInput(NotebookUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<NotebookUrn>() { + public Object coerceInput(NotebookUrn object) throws ClassCastException { + return object.toString(); + } - public NotebookUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return NotebookUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, NotebookUrn.class); + public NotebookUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return NotebookUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + NotebookUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java index 1375cf345b084..60cf2d4e16819 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java @@ -3,65 +3,67 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class TagUrn extends Urn { - public static final String ENTITY_TYPE = "tag"; + public static final String ENTITY_TYPE = "tag"; - private final String _name; + private final String _name; - public TagUrn(String name) { - super(ENTITY_TYPE, TupleKey.create(name)); - this._name = name; - } + public TagUrn(String name) { + super(ENTITY_TYPE, TupleKey.create(name)); + this._name = name; + } - public String getName() { - return _name; - } + public String getName() { + return _name; + } - public static TagUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static TagUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static TagUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be '" + urn.getEntityType() + "'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 1) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys: found " + key.size() + " expected 1."); - } else { - try { - return new TagUrn((String) key.getAs(0, String.class)); - } catch (Exception e) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); - } - } + public static TagUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException( + urn.toString(), "Urn entity type should be '" + urn.getEntityType() + "'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 1) { + throw new URISyntaxException( + urn.toString(), "Invalid number of keys: found " + key.size() + " expected 1."); + } else { + try { + return new TagUrn((String) key.getAs(0, String.class)); + } catch (Exception e) { + throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } + } } + } - public static TagUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static TagUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer<TagUrn>() { - public Object coerceInput(TagUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer<TagUrn>() { + public Object coerceInput(TagUrn object) throws ClassCastException { + return object.toString(); + } - public TagUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return TagUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public TagUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return TagUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, TagUrn.class); - } + } + }, + TagUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java index 5b348b7d9b1a9..ecdd4f754c4ea 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java @@ -3,68 +3,69 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class TestEntityUrn extends Urn { - public static final String ENTITY_TYPE = "testEntity"; + public static final String ENTITY_TYPE = "testEntity"; - private final String _keyPart1; - private final String _keyPart2; - private final String _keyPart3; + private final String _keyPart1; + private final String _keyPart2; + private final String _keyPart3; - public TestEntityUrn(String keyPart1, String keyPart2, String keyPart3) { - super(ENTITY_TYPE, TupleKey.create(keyPart1, keyPart2, keyPart3)); - this._keyPart1 = keyPart1; - this._keyPart2 = keyPart2; - this._keyPart3 = keyPart3; - } + public TestEntityUrn(String keyPart1, String keyPart2, String keyPart3) { + super(ENTITY_TYPE, TupleKey.create(keyPart1, keyPart2, keyPart3)); + this._keyPart1 = keyPart1; + this._keyPart2 = keyPart2; + this._keyPart3 = keyPart3; + } - public static TestEntityUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static TestEntityUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static TestEntityUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be '" + ENTITY_TYPE + " got " + urn.getEntityType() + "'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 3) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys: found " + key.size() + " expected 3."); - } else { - try { - return new TestEntityUrn( - key.getAs(0, String.class), - key.getAs(1, String.class), - key.getAs(2, String.class)); - } catch (Exception e) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); - } - } + public static TestEntityUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException( + urn.toString(), + "Urn entity type should be '" + ENTITY_TYPE + " got " + urn.getEntityType() + "'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 3) { + throw new URISyntaxException( + urn.toString(), "Invalid number of keys: found " + key.size() + " expected 3."); + } else { + try { + return new TestEntityUrn( + key.getAs(0, String.class), key.getAs(1, String.class), key.getAs(2, String.class)); + } catch (Exception e) { + throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } + } } + } - public static TestEntityUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static TestEntityUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer<TestEntityUrn>() { - public Object coerceInput(TestEntityUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer<TestEntityUrn>() { + public Object coerceInput(TestEntityUrn object) throws ClassCastException { + return object.toString(); + } - public TestEntityUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return TestEntityUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public TestEntityUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return TestEntityUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, TestEntityUrn.class); - } + } + }, + TestEntityUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java index c26e0d2571b33..f847252e28836 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java @@ -8,11 +8,10 @@ import java.util.Collections; import java.util.List; - /** - * Represents the entity key portion of a Urn, encoded as a tuple of Strings. - * A single-element tuple is encoded simply as the value of that element. A tuple with multiple - * elements is encoded as a parenthesized list of strings, comma-delimited. + * Represents the entity key portion of a Urn, encoded as a tuple of Strings. A single-element tuple + * is encoded simply as the value of that element. A tuple with multiple elements is encoded as a + * parenthesized list of strings, comma-delimited. */ public class TupleKey { public static final char START_TUPLE = '('; @@ -31,27 +30,26 @@ public TupleKey(List<String> tuple) { /** * Constructs a {@code TupleKey} given a list of tuple parts. - * <p> - * When {@code calledFromExternal} is {@code false}, it means the constructor - * was called from within this class, where we can ensure our implementation - * satisfies some constraints and skip some work. - * <p> - * The work we skip is checking that no tuple parts are null and wrapping the - * list with an unmodifiable view. - * <p> - * For context, an earlier performance optimization introduced from Guava the - * {@code ImmutableList}, which gives both of that for free. Since then, we - * have encountered complications with Guava (specifically, Hadoop at the time - * of this writing requires using Guava 11 -- see LIHADOOP-44200). In order to - * resolve that with minimal effect, we copy this behavior here. - * <p> - * Whether this optimization is meaningful can be examined later, if time is - * permitting, or {@code List#copyOf} from JDK 10 can be used to recover the - * benefits more elegantly when it is available for us to use. + * + * <p>When {@code calledFromExternal} is {@code false}, it means the constructor was called from + * within this class, where we can ensure our implementation satisfies some constraints and skip + * some work. + * + * <p>The work we skip is checking that no tuple parts are null and wrapping the list with an + * unmodifiable view. + * + * <p>For context, an earlier performance optimization introduced from Guava the {@code + * ImmutableList}, which gives both of that for free. Since then, we have encountered + * complications with Guava (specifically, Hadoop at the time of this writing requires using Guava + * 11 -- see LIHADOOP-44200). In order to resolve that with minimal effect, we copy this behavior + * here. + * + * <p>Whether this optimization is meaningful can be examined later, if time is permitting, or + * {@code List#copyOf} from JDK 10 can be used to recover the benefits more elegantly when it is + * available for us to use. * * @param tuple tuple parts - * @param calledFromExternal whether the constructions is invoked from outside - * of this class + * @param calledFromExternal whether the constructions is invoked from outside of this class */ private TupleKey(List<String> tuple, boolean calledFromExternal) { _tuple = calledFromExternal ? Collections.unmodifiableList(checkStringsNotNull(tuple)) : tuple; @@ -74,9 +72,8 @@ public static TupleKey createWithOneKeyPart(String input) { } /** - * Create a tuple key from a sequence of Objects. The resulting tuple - * consists of the sequence of String values resulting from calling .toString() on each - * object in the input sequence + * Create a tuple key from a sequence of Objects. The resulting tuple consists of the sequence of + * String values resulting from calling .toString() on each object in the input sequence * * @param tuple - a sequence of Objects to be represented in the tuple * @return - a TupleKey representation of the object sequence @@ -99,9 +96,8 @@ public static TupleKey create(Object... tuple) { } /** - * Create a tuple key from a sequence of Objects. The resulting tuple - * consists of the sequence of String values resulting from calling .toString() on each - * object in the input sequence + * Create a tuple key from a sequence of Objects. The resulting tuple consists of the sequence of + * String values resulting from calling .toString() on each object in the input sequence * * @param tuple - a sequence of Objects to be represented in the tuple * @return - a TupleKey representation of the object sequence @@ -130,7 +126,8 @@ public String get(int index) { * Return a tuple element coerced to a specific type * * @param index - the index of the tuple element to be returned - * @param clazz - the Class object for the return type. Must be String, Short, Boolean, Integer, Long, or an Enum subclass + * @param clazz - the Class object for the return type. Must be String, Short, Boolean, Integer, + * Long, or an Enum subclass * @param <T> - the desired type for the returned object. * @return The specified element of the tuple, coerced to the specified type T. */ @@ -166,9 +163,7 @@ public <T> T getAs(int index, Class<T> clazz) { return rv; } - /** - * Helper method to capture E. - */ + /** Helper method to capture E. */ private <E extends Enum<E>> Enum<E> getEnumValue(Class<?> clazz, String value) { @SuppressWarnings("unchecked") final Class<E> enumClazz = (Class<E>) clazz.asSubclass(Enum.class); @@ -228,6 +223,7 @@ public static TupleKey fromString(String s) throws URISyntaxException { /** * Create a tuple key from a string starting at the given index. + * * @param s raw urn string or urn type specific string. * @param startIndex index where urn type specific string starts. * @return entity tuple key. @@ -237,7 +233,8 @@ public static TupleKey fromString(String s, int startIndex) throws URISyntaxExce return new TupleKey(parseKeyParts(s, startIndex), false); } - private static List<String> parseKeyParts(String input, int startIndex) throws URISyntaxException { + private static List<String> parseKeyParts(String input, int startIndex) + throws URISyntaxException { if (startIndex >= input.length()) { return Collections.emptyList(); } @@ -270,7 +267,7 @@ private static List<String> parseKeyParts(String input, int startIndex) throws U List<String> parts = new ArrayList<>(3); int numStartedParenPairs = 1; // We know we have at least one starting paren - int partStart = startIndex + 1; // +1 to skip opening paren + int partStart = startIndex + 1; // +1 to skip opening paren for (int i = startIndex + 1; i < input.length(); i++) { char c = input.charAt(i); if (c == START_TUPLE) { @@ -302,7 +299,8 @@ private static List<String> parseKeyParts(String input, int startIndex) throws U throw new URISyntaxException(input, "mismatched paren nesting"); } - int lastPartEnd = input.charAt(input.length() - 1) == END_TUPLE ? input.length() - 1 : input.length(); + int lastPartEnd = + input.charAt(input.length() - 1) == END_TUPLE ? input.length() - 1 : input.length(); if (lastPartEnd - partStart <= 0) { throw new URISyntaxException(input, "empty part disallowed"); @@ -347,4 +345,4 @@ private static List<String> checkStringsNotNull(List<String> list) { } return list; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java index 84231fdf3be4a..e7ae51b57671f 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java @@ -8,34 +8,30 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nullable; - /** - * Represents a URN (Uniform Resource Name) for a Linkedin entity, in the spirit of RFC 2141. - * Our default URN format uses the non-standard namespace identifier "li", and hence default URNs - * begin with "urn:li:". Note that the namespace according to - * <a href="https://www.ietf.org/rfc/rfc2141.txt">RFC 2141</a> [Section 2.1] is case-insensitive and + * Represents a URN (Uniform Resource Name) for a Linkedin entity, in the spirit of RFC 2141. Our + * default URN format uses the non-standard namespace identifier "li", and hence default URNs begin + * with "urn:li:". Note that the namespace according to <a + * href="https://www.ietf.org/rfc/rfc2141.txt">RFC 2141</a> [Section 2.1] is case-insensitive and * for safety we only allow lower-case letters in our implementation. * - * <p>Our URNs all consist of an "entity type", which denotes an internal namespace for the resource, - * as well as an entity key, formatted as a tuple of parts. The full format of a URN is: + * <p>Our URNs all consist of an "entity type", which denotes an internal namespace for the + * resource, as well as an entity key, formatted as a tuple of parts. The full format of a URN is: * * <p><URN> ::= urn:<namespace>:<entityType>:<entityKey> * - * <p>The entity key is represented as a tuple of strings. If the tuple is of length 1, the - * key is encoded directly. If the tuple has multiple parts, the parts are enclosed in - * parenthesizes and comma-delimited, e.g., a URN whose key is the tuple [1, 2, 3] would be - * encoded as: + * <p>The entity key is represented as a tuple of strings. If the tuple is of length 1, the key is + * encoded directly. If the tuple has multiple parts, the parts are enclosed in parenthesizes and + * comma-delimited, e.g., a URN whose key is the tuple [1, 2, 3] would be encoded as: * * <p>urn:li:example:(1,2,3) */ public class Urn { /** - * - * @deprecated Don't create the Urn string manually, use Typed Urns or {@link #create(String entityType, Object... - * tupleParts)} + * @deprecated Don't create the Urn string manually, use Typed Urns or {@link #create(String + * entityType, Object... tupleParts)} */ - @Deprecated - public static final String URN_PREFIX = "urn:li:"; + @Deprecated public static final String URN_PREFIX = "urn:li:"; private static final String URN_START = "urn:"; private static final String DEFAULT_NAMESPACE = "li"; @@ -46,29 +42,28 @@ public class Urn { // Used to speed up toString() in the common case where the Urn is built up // from parsing an input string. - @Nullable - private String _cachedStringUrn; + @Nullable private String _cachedStringUrn; static { Custom.registerCoercer(new UrnCoercer(), Urn.class); } /** - * Customized interner for all strings that may be used for _entityType. - * Urn._entityType is by nature a pretty small set of values, such as "member", - * "company" etc. Due to this fact, when an app creates and keeps in memory a - * large number of Urn's, it may end up with a very big number of identical strings. - * Thus it's worth saving memory by interning _entityType when an Urn is instantiated. - * String.intern() would be a natural choice, but it takes a few microseconds, and - * thus may become too expensive when many (temporary) Urns are generated in very - * quick succession. Thus we use a faster CHM below. Compared to the internal table - * used by String.intern() it has a bigger memory overhead per each interned string, - * but for a small set of canonical strings it doesn't matter. + * Customized interner for all strings that may be used for _entityType. Urn._entityType is by + * nature a pretty small set of values, such as "member", "company" etc. Due to this fact, when an + * app creates and keeps in memory a large number of Urn's, it may end up with a very big number + * of identical strings. Thus it's worth saving memory by interning _entityType when an Urn is + * instantiated. String.intern() would be a natural choice, but it takes a few microseconds, and + * thus may become too expensive when many (temporary) Urns are generated in very quick + * succession. Thus we use a faster CHM below. Compared to the internal table used by + * String.intern() it has a bigger memory overhead per each interned string, but for a small set + * of canonical strings it doesn't matter. */ private static final Map<String, String> ENTITY_TYPE_INTERNER = new ConcurrentHashMap<>(); /** * Create a Urn given its raw String representation. + * * @param rawUrn - the String representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -77,10 +72,7 @@ public Urn(String rawUrn) throws URISyntaxException { _cachedStringUrn = rawUrn; if (!rawUrn.startsWith(URN_START)) { - throw new URISyntaxException( - rawUrn, - "Urn doesn't start with 'urn:'. Urn: " + rawUrn, - 0); + throw new URISyntaxException(rawUrn, "Urn doesn't start with 'urn:'. Urn: " + rawUrn, 0); } int secondColonIndex = rawUrn.indexOf(':', URN_START.length() + 1); @@ -89,9 +81,7 @@ public Urn(String rawUrn) throws URISyntaxException { // First char of entityType must be [a-z] if (!charIsLowerCaseAlphabet(rawUrn, secondColonIndex + 1)) { throw new URISyntaxException( - rawUrn, - "First char of entityType must be [a-z]! Urn: " + rawUrn, - secondColonIndex + 1); + rawUrn, "First char of entityType must be [a-z]! Urn: " + rawUrn, secondColonIndex + 1); } int thirdColonIndex = rawUrn.indexOf(':', secondColonIndex + 2); @@ -101,8 +91,7 @@ public Urn(String rawUrn) throws URISyntaxException { _entityType = rawUrn.substring(secondColonIndex + 1); if (!charsAreWordClass(_entityType)) { throw new URISyntaxException( - rawUrn, - "entityType must have only [a-zA-Z0-9] chars. Urn: " + rawUrn); + rawUrn, "entityType must have only [a-zA-Z0-9] chars. Urn: " + rawUrn); } _entityKey = new TupleKey(); return; @@ -111,15 +100,13 @@ public Urn(String rawUrn) throws URISyntaxException { String entityType = rawUrn.substring(secondColonIndex + 1, thirdColonIndex); if (!charsAreWordClass(entityType)) { throw new URISyntaxException( - rawUrn, - "entityType must have only [a-zA-Z_0-9] chars. Urn: " + rawUrn); + rawUrn, "entityType must have only [a-zA-Z_0-9] chars. Urn: " + rawUrn); } int numEntityKeyChars = rawUrn.length() - (thirdColonIndex + 1); if (numEntityKeyChars <= 0) { throw new URISyntaxException( - rawUrn, - "Urns with empty entityKey are not allowed. Urn: " + rawUrn); + rawUrn, "Urns with empty entityKey are not allowed. Urn: " + rawUrn); } _entityType = internEntityType(entityType); @@ -135,8 +122,8 @@ public Urn(String rawUrn) throws URISyntaxException { } /** - * Create a Urn from an entity type and an encoded String key. The key is converted to a - * Tuple by parsing using @see TupleKey#fromString + * Create a Urn from an entity type and an encoded String key. The key is converted to a Tuple by + * parsing using @see TupleKey#fromString * * @param entityType - the entity type for the Urn * @param typeSpecificString - the encoded string representation of a TupleKey @@ -158,9 +145,8 @@ public Urn(String namespace, String entityType, TupleKey entityKey) { } /** - * DEPRECATED - use {@link #createFromTuple(String, Object...)} - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * DEPRECATED - use {@link #createFromTuple(String, Object...)} Create a Urn from an entity type + * and a sequence of key parts. The key parts are converted to a tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -172,9 +158,9 @@ public static Urn create(String entityType, Object... tupleParts) { } /** - * DEPRECATED - use {@link #createFromTuple(String, java.util.Collection)} - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * DEPRECATED - use {@link #createFromTuple(String, java.util.Collection)} Create a Urn from an + * entity type and a sequence of key parts. The key parts are converted to a tuple using @see + * TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -186,8 +172,8 @@ public static Urn create(String entityType, Collection<?> tupleParts) { } /** - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an entity type and a sequence of key parts. The key parts are converted to a + * tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -198,21 +184,22 @@ public static Urn createFromTuple(String entityType, Object... tupleParts) { } /** - * Create a Urn from an namespace, entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an namespace, entity type and a sequence of key parts. The key parts are + * converted to a tuple using @see TupleKey#create * * @param namespace - The namespace of this urn. * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn * @return - a new Urn object */ - public static Urn createFromTupleWithNamespace(String namespace, String entityType, Object... tupleParts) { + public static Urn createFromTupleWithNamespace( + String namespace, String entityType, Object... tupleParts) { return new Urn(namespace, entityType, TupleKey.create(tupleParts)); } /** - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an entity type and a sequence of key parts. The key parts are converted to a + * tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -224,6 +211,7 @@ public static Urn createFromTuple(String entityType, Collection<?> tupleParts) { /** * Create a Urn given its raw String representation. + * * @param rawUrn - the String representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -233,6 +221,7 @@ public static Urn createFromString(String rawUrn) throws URISyntaxException { /** * Create a Urn given its raw CharSequence representation. + * * @param rawUrn - the Char Sequence representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -242,8 +231,8 @@ public static Urn createFromCharSequence(CharSequence rawUrn) throws URISyntaxEx } /** - * Create a Urn from an entity type and an encoded String key. The key is converted to a - * Tuple by parsing using @see TupleKey#fromString + * Create a Urn from an entity type and an encoded String key. The key is converted to a Tuple by + * parsing using @see TupleKey#fromString * * @param entityType - the entity type for the Urn * @param typeSpecificString - the encoded string representation of a TupleKey @@ -298,8 +287,8 @@ public Urn getIdAsUrn() { } /** - * Return the namespace-specific string portion of this URN, i.e., - * everything following the "urn:<namespace>:" prefix. + * Return the namespace-specific string portion of this URN, i.e., everything following the + * "urn:<namespace>:" prefix. * * @return The namespace-specific string portion of this URN */ @@ -344,28 +333,21 @@ public int hashCode() { return result; } - private static String validateAndExtractNamespace(String rawUrn, - int secondColonIndex) + private static String validateAndExtractNamespace(String rawUrn, int secondColonIndex) throws URISyntaxException { if (!charIsLowerCaseAlphabet(rawUrn, URN_START.length())) { throw new URISyntaxException( - rawUrn, - "First char of Urn namespace must be [a-z]! Urn: " + rawUrn, - URN_START.length()); + rawUrn, "First char of Urn namespace must be [a-z]! Urn: " + rawUrn, URN_START.length()); } if (secondColonIndex == -1) { - throw new URISyntaxException( - rawUrn, - "Missing second ':' char. Urn: " + rawUrn); + throw new URISyntaxException(rawUrn, "Missing second ':' char. Urn: " + rawUrn); } int namespaceLen = secondColonIndex - URN_START.length(); if (namespaceLen > 32) { throw new URISyntaxException( - rawUrn, - "Namespace length > 32 chars. Urn: " + rawUrn, - secondColonIndex); + rawUrn, "Namespace length > 32 chars. Urn: " + rawUrn, secondColonIndex); } if (namespaceLen == 2 @@ -377,9 +359,7 @@ private static String validateAndExtractNamespace(String rawUrn, String namespace = rawUrn.substring(URN_START.length(), secondColonIndex); if (!charsAreValidNamespace(namespace)) { - throw new URISyntaxException( - rawUrn, - "Chars in namespace must be [a-z0-9-]!. Urn: " + rawUrn); + throw new URISyntaxException(rawUrn, "Chars in namespace must be [a-z0-9-]!. Urn: " + rawUrn); } return namespace; } @@ -414,17 +394,17 @@ private static boolean charsAreWordClass(String input) { char c = input.charAt(index); // Not using Character.isLowerCase etc on purpose because that is // unicode-aware and we only need ASCII. Handling only ASCII is faster. - if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') - || (c >= '0' && c <= '9') || c == '_')) { + if (!((c >= 'a' && c <= 'z') + || (c >= 'A' && c <= 'Z') + || (c >= '0' && c <= '9') + || c == '_')) { return false; } } return true; } - /** - * Intern a string to be assigned to the _entityType field. - */ + /** Intern a string to be assigned to the _entityType field. */ private static String internEntityType(String et) { // Most of the times this method is called, the canonical string is already // in the table, so let's do a quick get() first. @@ -436,4 +416,4 @@ private static String internEntityType(String et) { canonicalET = ENTITY_TYPE_INTERNER.putIfAbsent(et, et); return canonicalET != null ? canonicalET : et; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java index a1bd54a995d65..e04796690db77 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java @@ -10,22 +10,21 @@ import java.net.URISyntaxException; public class UrnCoercer implements DirectCoercer<Urn> { - public UrnCoercer() { - } + public UrnCoercer() {} - public Object coerceInput(Urn object) throws ClassCastException { - return object.toString(); - } + public Object coerceInput(Urn object) throws ClassCastException { + return object.toString(); + } - public Urn coerceOutput(Object object) throws TemplateOutputCastException { - if (object.getClass() != String.class) { - throw new TemplateOutputCastException("Urn not backed by String"); - } else { - try { - return Urn.createFromString((String)object); - } catch (URISyntaxException use) { - throw new TemplateOutputCastException("Invalid URN syntax: " + use.getMessage(), use); - } - } + public Urn coerceOutput(Object object) throws TemplateOutputCastException { + if (object.getClass() != String.class) { + throw new TemplateOutputCastException("Urn not backed by String"); + } else { + try { + return Urn.createFromString((String) object); + } catch (URISyntaxException use) { + throw new TemplateOutputCastException("Invalid URN syntax: " + use.getMessage(), use); + } } + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java index b68e429a5202c..25cb5475d7299 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java @@ -2,75 +2,75 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.FabricType; - import java.net.URISyntaxException; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class UrnUtils { - private static final CorpuserUrn UNKNOWN_ACTOR_URN = new CorpuserUrn("unknown"); + private static final CorpuserUrn UNKNOWN_ACTOR_URN = new CorpuserUrn("unknown"); - private UrnUtils() { - } + private UrnUtils() {} - /** - * Convert platform + dataset + origin into DatasetUrn - * @param platformName String, e.g. hdfs, oracle - * @param datasetName String, e.g. /jobs/xxx, ABOOK.ADDRESS - * @param origin PROD, CORP, EI, DEV - * @return DatasetUrn - */ - @Nonnull - public static DatasetUrn toDatasetUrn(@Nonnull String platformName, @Nonnull String datasetName, - @Nonnull String origin) { - return new DatasetUrn(new DataPlatformUrn(platformName), datasetName, toFabricType(origin)); - } + /** + * Convert platform + dataset + origin into DatasetUrn + * + * @param platformName String, e.g. hdfs, oracle + * @param datasetName String, e.g. /jobs/xxx, ABOOK.ADDRESS + * @param origin PROD, CORP, EI, DEV + * @return DatasetUrn + */ + @Nonnull + public static DatasetUrn toDatasetUrn( + @Nonnull String platformName, @Nonnull String datasetName, @Nonnull String origin) { + return new DatasetUrn(new DataPlatformUrn(platformName), datasetName, toFabricType(origin)); + } - /** - * Convert fabric String to FabricType - * @param fabric PROD, CORP, EI, DEV, LIT, PRIME - * @return FabricType - */ - @Nonnull - public static FabricType toFabricType(@Nonnull String fabric) { - switch (fabric.toUpperCase()) { - case "PROD": - return FabricType.PROD; - case "CORP": - return FabricType.CORP; - case "EI": - return FabricType.EI; - case "DEV": - return FabricType.DEV; - default: - throw new IllegalArgumentException("Unsupported Fabric Type: " + fabric); - } + /** + * Convert fabric String to FabricType + * + * @param fabric PROD, CORP, EI, DEV, LIT, PRIME + * @return FabricType + */ + @Nonnull + public static FabricType toFabricType(@Nonnull String fabric) { + switch (fabric.toUpperCase()) { + case "PROD": + return FabricType.PROD; + case "CORP": + return FabricType.CORP; + case "EI": + return FabricType.EI; + case "DEV": + return FabricType.DEV; + default: + throw new IllegalArgumentException("Unsupported Fabric Type: " + fabric); } + } - public static Urn getUrn(String urnStr) { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve entity with urn %s, invalid urn", urnStr)); - } + public static Urn getUrn(String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve entity with urn %s, invalid urn", urnStr)); } + } - /** - * Get audit stamp without time. If actor is null, set as Unknown Application URN. - * @param actor Urn - * @return AuditStamp - */ - @Nonnull - public static AuditStamp getAuditStamp(@Nullable Urn actor) { - return new AuditStamp().setActor(getActorOrDefault(actor)); - } + /** + * Get audit stamp without time. If actor is null, set as Unknown Application URN. + * + * @param actor Urn + * @return AuditStamp + */ + @Nonnull + public static AuditStamp getAuditStamp(@Nullable Urn actor) { + return new AuditStamp().setActor(getActorOrDefault(actor)); + } - /** - * Return actor URN, if input actor is null, return Unknown Application URN. - */ - @Nonnull - public static Urn getActorOrDefault(@Nullable Urn actor) { - return actor != null ? actor : UNKNOWN_ACTOR_URN; - } + /** Return actor URN, if input actor is null, return Unknown Application URN. */ + @Nonnull + public static Urn getActorOrDefault(@Nullable Urn actor) { + return actor != null ? actor : UNKNOWN_ACTOR_URN; + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java index fb3d79964f71d..24026f0287b22 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java @@ -7,12 +7,11 @@ import com.linkedin.data.schema.validator.ValidatorContext; import java.net.URISyntaxException; - /** * Rest.li Validator responsible for ensuring that {@link Urn} objects are well-formed. * - * Note that this validator does not validate the integrity of strongly typed urns, - * or validate Urn objects against their associated key aspect. + * <p>Note that this validator does not validate the integrity of strongly typed urns, or validate + * Urn objects against their associated key aspect. */ public class UrnValidator implements Validator { @Override @@ -22,9 +21,13 @@ public void validate(ValidatorContext context) { try { Urn.createFromString((String) context.dataElement().getValue()); } catch (URISyntaxException e) { - context.addResult(new Message(context.dataElement().path(), "\"Provided urn %s\" is invalid", context.dataElement().getValue())); + context.addResult( + new Message( + context.dataElement().path(), + "\"Provided urn %s\" is invalid", + context.dataElement().getValue())); context.setHasFix(false); } } } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java index d5b7a7da456a9..2742d13fb4dba 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java @@ -2,54 +2,53 @@ public class VersionedUrn { - private final String _urn; - private final String _versionStamp; - - public VersionedUrn(String urn, String versionStamp) { - _urn = urn; - _versionStamp = versionStamp; - } - - public String getUrn() { - return _urn; - } - - public String getVersionStamp() { - return _versionStamp; - } - - @SuppressWarnings("unchecked") - @Override - public boolean equals(Object obj) { - if (obj instanceof VersionedUrn) { - VersionedUrn other = (VersionedUrn) obj; - return equals(_urn, other._urn) && equals(_versionStamp, other._versionStamp); - } - return false; - } - - @Override - public int hashCode() { - int h1 = _urn != null ? _urn.hashCode() : 0; - int h2 = _versionStamp != null ? _versionStamp.hashCode() : 0; - return 31 * h1 + h2; - } - - @Override - public String toString() { - return "(" + _urn + " , " + _versionStamp + ")"; - } - - private static boolean equals(Object o1, Object o2) { - if (o1 != null) { - return o1.equals(o2); - } - return o2 == null; - } - - /*convenient method*/ - public static VersionedUrn of(String urn, String versionStamp) { - return new VersionedUrn(urn, versionStamp); - } - + private final String _urn; + private final String _versionStamp; + + public VersionedUrn(String urn, String versionStamp) { + _urn = urn; + _versionStamp = versionStamp; + } + + public String getUrn() { + return _urn; + } + + public String getVersionStamp() { + return _versionStamp; + } + + @SuppressWarnings("unchecked") + @Override + public boolean equals(Object obj) { + if (obj instanceof VersionedUrn) { + VersionedUrn other = (VersionedUrn) obj; + return equals(_urn, other._urn) && equals(_versionStamp, other._versionStamp); + } + return false; + } + + @Override + public int hashCode() { + int h1 = _urn != null ? _urn.hashCode() : 0; + int h2 = _versionStamp != null ? _versionStamp.hashCode() : 0; + return 31 * h1 + h2; + } + + @Override + public String toString() { + return "(" + _urn + " , " + _versionStamp + ")"; + } + + private static boolean equals(Object o1, Object o2) { + if (o1 != null) { + return o1.equals(o2); + } + return o2 == null; + } + + /*convenient method*/ + public static VersionedUrn of(String urn, String versionStamp) { + return new VersionedUrn(urn, versionStamp); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java index f7e0b6c99e334..2bae15bd19354 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java @@ -5,11 +5,9 @@ import java.util.SortedMap; import org.apache.commons.lang3.StringUtils; - public class VersionedUrnUtils { - private VersionedUrnUtils() { - } + private VersionedUrnUtils() {} public static Map<String, Long> convertVersionStamp(String versionStamp) { Map<String, Long> aspectVersionMap = new HashMap<>(); @@ -20,7 +18,8 @@ public static Map<String, Long> convertVersionStamp(String versionStamp) { for (String pair : aspectNameVersionPairs) { String[] tokens = pair.split(":"); if (tokens.length != 2) { - throw new IllegalArgumentException("Invalid version stamp cannot be parsed: " + versionStamp); + throw new IllegalArgumentException( + "Invalid version stamp cannot be parsed: " + versionStamp); } try { aspectVersionMap.put(tokens[0], Long.valueOf(tokens[1])); @@ -33,10 +32,13 @@ public static Map<String, Long> convertVersionStamp(String versionStamp) { } public static String constructVersionStamp(SortedMap<String, Long> versionStampMap) { - StringBuilder versionStamp = versionStampMap.entrySet().stream() - .collect(StringBuilder::new, (builder, entry) -> builder.append(entry.getKey()) - .append(":") - .append(entry.getValue()).append(";"), StringBuilder::append); + StringBuilder versionStamp = + versionStampMap.entrySet().stream() + .collect( + StringBuilder::new, + (builder, entry) -> + builder.append(entry.getKey()).append(":").append(entry.getValue()).append(";"), + StringBuilder::append); // trim off last ; return versionStamp.substring(0, versionStamp.length() - 1); } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java index 14949d9c946d9..880fcc2843333 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - public class VersionedUrnCoercer implements DirectCoercer<VersionedUrn> { static { Custom.registerCoercer(new VersionedUrnCoercer(), VersionedUrn.class); diff --git a/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java b/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java index c0ddbb710e2ee..ea878c41936ae 100644 --- a/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java +++ b/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java @@ -5,7 +5,6 @@ import org.assertj.core.api.Assertions; import org.testng.annotations.Test; - public class DatasetFieldUrnTest { private static final String PLATFORM = "fooPlatform"; @@ -16,39 +15,45 @@ public class DatasetFieldUrnTest { @Test public void testSerialization() throws URISyntaxException { final String datasetFieldString = - String.format("urn:li:datasetField:(urn:li:dataset:(urn:li:dataPlatform:%s,%s,%s),%s)", PLATFORM, DATASET_NAME, - FABRIC_TYPE, FIELD_NAME); + String.format( + "urn:li:datasetField:(urn:li:dataset:(urn:li:dataPlatform:%s,%s,%s),%s)", + PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetFieldUrn datasetFieldUrn = DatasetFieldUrn.deserialize(datasetFieldString); final DatasetUrn datasetUrn = datasetFieldUrn.getDatasetEntity(); Assertions.assertThat(datasetFieldUrn.getFieldPathEntity()).isEqualTo(FIELD_NAME); Assertions.assertThat(datasetUrn.getDatasetNameEntity()).isEqualTo(DATASET_NAME); - Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()).isEqualTo(PLATFORM); + Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()) + .isEqualTo(PLATFORM); Assertions.assertThat(datasetUrn.getOriginEntity()).isEqualTo(FabricType.PROD); Assertions.assertThat(datasetFieldUrn.toString()) .isEqualTo(datasetFieldString) - .describedAs("serialization followed by deserialization should produce the same urn string"); + .describedAs( + "serialization followed by deserialization should produce the same urn string"); } @Test public void testCreateUrn() { - final DatasetFieldUrn datasetFieldUrn = new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); + final DatasetFieldUrn datasetFieldUrn = + new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetUrn datasetUrn = datasetFieldUrn.getDatasetEntity(); Assertions.assertThat(datasetFieldUrn.getFieldPathEntity()).isEqualTo(FIELD_NAME); Assertions.assertThat(datasetUrn.getDatasetNameEntity()).isEqualTo(DATASET_NAME); - Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()).isEqualTo(PLATFORM); + Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()) + .isEqualTo(PLATFORM); Assertions.assertThat(datasetUrn.getOriginEntity()).isEqualTo(FabricType.PROD); } @Test public void testUrnConstructors() { - final DatasetFieldUrn datasetFieldUrn1 = new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); + final DatasetFieldUrn datasetFieldUrn1 = + new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetUrn datasetUrn = datasetFieldUrn1.getDatasetEntity(); final DatasetFieldUrn datasetFieldUrn2 = new DatasetFieldUrn(datasetUrn, FIELD_NAME); Assertions.assertThat(datasetFieldUrn1).isEqualTo(datasetFieldUrn2); } -} \ No newline at end of file +} diff --git a/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java index 76668abf4e5ce..f2d58c80177fb 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.common.util; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.test.testing.AspectBar; import com.datahub.test.testing.AspectFoo; import com.datahub.test.testing.DeltaUnion; @@ -39,10 +42,6 @@ import org.testng.annotations.Test; import org.testng.collections.Lists; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class ModelUtilsTest { class ChildUrn extends Urn { @@ -71,7 +70,8 @@ public void testGetInvalidAspectClass() { @Test public void testGetValidAspectTypes() { - Set<Class<? extends RecordTemplate>> validTypes = ModelUtils.getValidAspectTypes(EntityAspectUnion.class); + Set<Class<? extends RecordTemplate>> validTypes = + ModelUtils.getValidAspectTypes(EntityAspectUnion.class); assertEquals(validTypes, ImmutableSet.of(AspectFoo.class, AspectBar.class)); } @@ -172,7 +172,8 @@ public void testGetUrnFromEntity() { public void testGetUrnFromRelationship() { FooUrn expectedSource = makeFooUrn(1); BarUrn expectedDestination = makeBarUrn(1); - RelationshipFoo relationship = new RelationshipFoo().setSource(expectedSource).setDestination(expectedDestination); + RelationshipFoo relationship = + new RelationshipFoo().setSource(expectedSource).setDestination(expectedDestination); Urn sourceUrn = ModelUtils.getSourceUrnFromRelationship(relationship); Urn destinationUrn = ModelUtils.getDestinationUrnFromRelationship(relationship); @@ -269,7 +270,8 @@ public void testNewSnapshot() { EntityAspectUnion aspectUnion = new EntityAspectUnion(); aspectUnion.setAspectFoo(foo); - EntitySnapshot snapshot = ModelUtils.newSnapshot(EntitySnapshot.class, urn, Lists.newArrayList(aspectUnion)); + EntitySnapshot snapshot = + ModelUtils.newSnapshot(EntitySnapshot.class, urn, Lists.newArrayList(aspectUnion)); assertEquals(snapshot.getUrn(), urn); assertEquals(snapshot.getAspects().size(), 1); @@ -289,7 +291,8 @@ public void testNewAspect() { public void testNewAspectAlias() { AspectFoo foo = new AspectFoo().setValue("foo"); - EntityAspectUnionAlias aspectUnion = ModelUtils.newAspectUnion(EntityAspectUnionAlias.class, foo); + EntityAspectUnionAlias aspectUnion = + ModelUtils.newAspectUnion(EntityAspectUnionAlias.class, foo); assertEquals(aspectUnion.getFoo(), foo); } @@ -337,18 +340,22 @@ public void testValidateCorrectUrnForSnapshot() { @Test public void testNewRelatioshipUnion() { - RelationshipFoo foo = new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); + RelationshipFoo foo = + new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); - RelationshipUnion relationshipUnion = ModelUtils.newRelationshipUnion(RelationshipUnion.class, foo); + RelationshipUnion relationshipUnion = + ModelUtils.newRelationshipUnion(RelationshipUnion.class, foo); assertEquals(relationshipUnion.getRelationshipFoo(), foo); } @Test public void testNewRelatioshipUnionAlias() { - RelationshipFoo foo = new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); + RelationshipFoo foo = + new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); - RelationshipUnionAlias relationshipUnion = ModelUtils.newRelationshipUnion(RelationshipUnionAlias.class, foo); + RelationshipUnionAlias relationshipUnion = + ModelUtils.newRelationshipUnion(RelationshipUnionAlias.class, foo); assertEquals(relationshipUnion.getFoo(), foo); } @@ -358,11 +365,14 @@ public void testGetMAETopicName() throws URISyntaxException { FooUrn urn = new FooUrn(1); AspectFoo foo = new AspectFoo().setValue("foo"); - assertEquals(ModelUtils.getAspectSpecificMAETopicName(urn, foo), "METADATA_AUDIT_EVENT_FOO_ASPECTFOO"); + assertEquals( + ModelUtils.getAspectSpecificMAETopicName(urn, foo), "METADATA_AUDIT_EVENT_FOO_ASPECTFOO"); PizzaUrn pizza = new PizzaUrn(1); AspectBar bar = new AspectBar().setValue("bar"); - assertEquals(ModelUtils.getAspectSpecificMAETopicName(pizza, bar), "METADATA_AUDIT_EVENT_PIZZA_ASPECTBAR"); + assertEquals( + ModelUtils.getAspectSpecificMAETopicName(pizza, bar), + "METADATA_AUDIT_EVENT_PIZZA_ASPECTBAR"); } @Test diff --git a/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java index 90514a498c67a..145ab2322adb0 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.common.util; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.test.testing.AspectBar; import com.datahub.test.testing.AspectBaz; import com.datahub.test.testing.AspectFoo; @@ -29,17 +32,16 @@ import org.apache.commons.io.IOUtils; import org.testng.annotations.Test; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class RecordUtilsTest { @Test public void testToJsonString() throws IOException { AspectFoo foo = new AspectFoo().setValue("foo"); String expected = - loadJsonFromResource("foo.json").replaceAll("\\s+", "").replaceAll("\\n", "").replaceAll("\\r", ""); + loadJsonFromResource("foo.json") + .replaceAll("\\s+", "") + .replaceAll("\\n", "") + .replaceAll("\\r", ""); String actual = RecordUtils.toJsonString(foo); @@ -55,7 +57,8 @@ public void testToRecordTemplate() throws IOException { assertEquals(actual, expected); - RecordTemplate actual2 = RecordUtils.toRecordTemplate(AspectFoo.class.getCanonicalName(), expected.data()); + RecordTemplate actual2 = + RecordUtils.toRecordTemplate(AspectFoo.class.getCanonicalName(), expected.data()); assertEquals(actual2.getClass(), AspectFoo.class); assertEquals(actual2, expected); @@ -71,7 +74,8 @@ public void testGetValidRecordDataSchemaField() { RecordDataSchema schema = ValidationUtils.getRecordSchema(AspectFoo.class); RecordDataSchema.Field expected = schema.getField("value"); - assertEquals(RecordUtils.getRecordDataSchemaField(new AspectFoo().setValue("foo"), "value"), expected); + assertEquals( + RecordUtils.getRecordDataSchemaField(new AspectFoo().setValue("foo"), "value"), expected); } @Test(expectedExceptions = InvalidSchemaException.class) @@ -112,7 +116,8 @@ public void testGetRecordTemplatePrimitiveField() throws IOException { assertTrue(RecordUtils.getRecordTemplateField(baz, "boolField", Boolean.class)); assertEquals(RecordUtils.getRecordTemplateField(baz, "stringField", String.class), "baz"); - assertEquals(RecordUtils.getRecordTemplateField(baz, "longField", Long.class), Long.valueOf(1234L)); + assertEquals( + RecordUtils.getRecordTemplateField(baz, "longField", Long.class), Long.valueOf(1234L)); } @Test @@ -127,9 +132,10 @@ public void testGetRecordTemplateUrnField() { public void testGetRecordTemplateWrappedField() throws IOException { AspectBaz baz = loadAspectBaz("baz.json"); - StringArray stringArray = RecordUtils.getRecordTemplateWrappedField(baz, "arrayField", StringArray.class); + StringArray stringArray = + RecordUtils.getRecordTemplateWrappedField(baz, "arrayField", StringArray.class); - assertEquals(stringArray.toArray(), new String[]{"1", "2", "3"}); + assertEquals(stringArray.toArray(), new String[] {"1", "2", "3"}); } @Test @@ -241,7 +247,10 @@ public void testGetFieldValueRecordType() { MixedRecord mixedRecord1 = new MixedRecord().setRecordField(foo1); PathSpec ps1f1 = MixedRecord.fields().recordField().value(); PathSpec ps1f2 = - MixedRecord.fields().nestedRecordField().foo().value(); // referencing a nullable record template field + MixedRecord.fields() + .nestedRecordField() + .foo() + .value(); // referencing a nullable record template field Optional<Object> o1f1 = RecordUtils.getFieldValue(mixedRecord1, ps1f1); Optional<Object> o1f2 = RecordUtils.getFieldValue(mixedRecord1, ps1f2); @@ -253,7 +262,8 @@ public void testGetFieldValueRecordType() { // case 2: referencing a field inside a RecordTemplate, two levels deep i.e. nested field AspectFoo foo2 = new AspectFoo().setValue("fooVal2"); - com.datahub.test.testing.EntityValue entityValue = new com.datahub.test.testing.EntityValue().setFoo(foo2); + com.datahub.test.testing.EntityValue entityValue = + new com.datahub.test.testing.EntityValue().setFoo(foo2); MixedRecord mixedRecord2 = new MixedRecord().setNestedRecordField(entityValue); PathSpec ps2 = MixedRecord.fields().nestedRecordField().foo().value(); @@ -268,7 +278,8 @@ public void testGetFieldValueArray() { // case 1: array of strings final MixedRecord mixedRecord1 = - new MixedRecord().setStringArray(new StringArray(Arrays.asList("val1", "val2", "val3", "val4"))); + new MixedRecord() + .setStringArray(new StringArray(Arrays.asList("val1", "val2", "val3", "val4"))); PathSpec ps1 = MixedRecord.fields().stringArray(); Object o1 = RecordUtils.getFieldValue(mixedRecord1, ps1).get(); @@ -293,20 +304,25 @@ public void testGetFieldValueArray() { // case 3: array of records is empty final MixedRecord mixedRecord3 = new MixedRecord().setRecordArray(new AspectFooArray()); - Object o3 = RecordUtils.getFieldValue(mixedRecord3, MixedRecord.fields().recordArray().items().value()).get(); + Object o3 = + RecordUtils.getFieldValue(mixedRecord3, MixedRecord.fields().recordArray().items().value()) + .get(); assertEquals(o3, new StringArray()); // case 4: referencing an index of array is not supported final MixedRecord mixedRecord4 = new MixedRecord().setRecordArray(aspectFooArray); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> RecordUtils.getFieldValue(mixedRecord4, "/recordArray/0/value")); // case 5: referencing nested field inside array of records, field being 2 levels deep AspectFoo f1 = new AspectFoo().setValue("val1"); AspectFoo f2 = new AspectFoo().setValue("val2"); - com.datahub.test.testing.EntityValue val1 = new com.datahub.test.testing.EntityValue().setFoo(f1); - com.datahub.test.testing.EntityValue val2 = new com.datahub.test.testing.EntityValue().setFoo(f2); + com.datahub.test.testing.EntityValue val1 = + new com.datahub.test.testing.EntityValue().setFoo(f1); + com.datahub.test.testing.EntityValue val2 = + new com.datahub.test.testing.EntityValue().setFoo(f2); EntityValueArray entityValues = new EntityValueArray(Arrays.asList(val1, val2)); final MixedRecord mixedRecord5 = new MixedRecord().setNestedRecordArray(entityValues); @@ -333,17 +349,21 @@ public void testGetFieldValueArray() { assertFalse(o7.isPresent()); } - @Test(description = "Test getFieldValue() when RecordTemplate has field of type array of primitive unions") + @Test( + description = + "Test getFieldValue() when RecordTemplate has field of type array of primitive unions") public void testGetFieldValueArrayOfPrimitiveUnions() { // case 1: array of unions of strings final MixedRecord mixedRecord1 = - new MixedRecord().setUnionArray(new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + new MixedRecord() + .setUnionArray( + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); PathSpec ps1 = MixedRecord.fields().unionArray(); Object o1 = RecordUtils.getFieldValue(mixedRecord1, ps1).get(); @@ -351,20 +371,24 @@ public void testGetFieldValueArrayOfPrimitiveUnions() { PathSpec ps2 = MixedRecord.fields().unionArray().items(); Object o2 = RecordUtils.getFieldValue(mixedRecord1, ps2).get(); - assertEquals(o1, new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + assertEquals( + o1, + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); assertEquals(ps1.toString(), "/unionArray"); - assertEquals(o2, new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + assertEquals( + o2, + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); assertEquals(ps2.toString(), "/unionArray/*"); } @@ -381,8 +405,9 @@ public void testCapitalizeFirst() { } private AspectBaz loadAspectBaz(String resourceName) throws IOException { - return RecordUtils.toRecordTemplate(AspectBaz.class, - IOUtils.toString(ClassLoader.getSystemResourceAsStream(resourceName), StandardCharsets.UTF_8)); + return RecordUtils.toRecordTemplate( + AspectBaz.class, + IOUtils.toString( + ClassLoader.getSystemResourceAsStream(resourceName), StandardCharsets.UTF_8)); } - } diff --git a/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java index cb5ac62d71a1d..93875b0f06706 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.common.util; +import static org.testng.AssertJUnit.*; + import com.linkedin.common.urn.VersionedUrnUtils; import java.util.Comparator; import java.util.Map; @@ -7,9 +9,6 @@ import java.util.TreeMap; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class VersionedUrnUtilsTest { private static final String SCHEMA_METADATA = "schemaMetadata"; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java index b8b62782309b8..2ae9ee8ab14ea 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java @@ -5,28 +5,24 @@ import lombok.AllArgsConstructor; import lombok.Getter; - /** - * Represents a unique DataHub actor (i.e. principal). Defining characteristics of all DataHub Actors includes a + * Represents a unique DataHub actor (i.e. principal). Defining characteristics of all DataHub + * Actors includes a * - * a) Actor Type: A specific type of actor, e.g. CORP_USER or SERVICE_USER. - * b) Actor Id: A unique id for the actor. + * <p>a) Actor Type: A specific type of actor, e.g. CORP_USER or SERVICE_USER. b) Actor Id: A unique + * id for the actor. * - * These pieces of information are in turn used to construct an Entity Urn, which can be used as a primary key to fetch and update specific information - * about the actor. + * <p>These pieces of information are in turn used to construct an Entity Urn, which can be used as + * a primary key to fetch and update specific information about the actor. */ @Getter @AllArgsConstructor public class Actor { - /** - * The {@link ActorType} associated with a DataHub actor. - */ + /** The {@link ActorType} associated with a DataHub actor. */ private final ActorType type; - /** - * The unique id associated with a DataHub actor. - */ + /** The unique id associated with a DataHub actor. */ private final String id; /** @@ -37,6 +33,7 @@ public String toUrnStr() { if (Objects.requireNonNull(getType()) == ActorType.USER) { return String.format("urn:li:corpuser:%s", getId()); } - throw new IllegalArgumentException(String.format("Unrecognized ActorType %s provided", getType())); + throw new IllegalArgumentException( + String.format("Unrecognized ActorType %s provided", getType())); } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java index c41a30e57b2d6..4fc175cd4815e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java @@ -3,12 +3,10 @@ /** * A specific type of Actor on DataHub's platform. * - * Currently the only actor type officially supported, though in the future this may evolve - * to include service users. + * <p>Currently the only actor type officially supported, though in the future this may evolve to + * include service users. */ public enum ActorType { - /** - * A user actor, e.g. john smith - */ + /** A user actor, e.g. john smith */ USER, } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java index 71efedda56e5c..b53d868e6e878 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java @@ -5,21 +5,21 @@ import java.util.Objects; import javax.annotation.Nonnull; - -/** - * Class representing an authenticated actor accessing DataHub. - */ +/** Class representing an authenticated actor accessing DataHub. */ public class Authentication { private final Actor authenticatedActor; private final String credentials; private final Map<String, Object> claims; - public Authentication(@Nonnull final Actor authenticatedActor, @Nonnull final String credentials) { + public Authentication( + @Nonnull final Actor authenticatedActor, @Nonnull final String credentials) { this(authenticatedActor, credentials, Collections.emptyMap()); } - public Authentication(@Nonnull final Actor authenticatedActor, @Nonnull final String credentials, + public Authentication( + @Nonnull final Actor authenticatedActor, + @Nonnull final String credentials, @Nonnull final Map<String, Object> claims) { this.authenticatedActor = Objects.requireNonNull(authenticatedActor); this.credentials = Objects.requireNonNull(credentials); @@ -34,7 +34,8 @@ public Actor getActor() { } /** - * @return Returns the credentials associated with the current request (e.g. the value of the "Authorization" header) + * @return Returns the credentials associated with the current request (e.g. the value of the + * "Authorization" header) */ public String getCredentials() { return this.credentials; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java index e95f891b853a5..3a59b23122e25 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java @@ -19,6 +19,5 @@ public static void remove() { AUTHENTICATION.remove(); } - private AuthenticationContext() { - } + private AuthenticationContext() {} } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java index 9fbac00d3aeb5..5928b258c5f80 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java @@ -2,7 +2,6 @@ import com.datahub.plugins.auth.authentication.Authenticator; - /** * An {@link Exception} thrown when an {@link Authenticator} is unable to be resolved an instance of * {@link Authentication} for the current request. diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java index 0a8e3cba3d07b..500248d6c7b4e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java @@ -2,7 +2,6 @@ import com.datahub.plugins.auth.authentication.Authenticator; - /** * An {@link Exception} thrown when an {@link Authenticator} is unable to be resolve an instance of * {@link Authentication} for the current request. diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java index 5673bac5442b2..9aaf40df5a0f6 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java @@ -1,18 +1,16 @@ package com.datahub.authentication; import com.datahub.plugins.auth.authentication.Authenticator; -import lombok.Getter; - import java.util.Map; import java.util.Objects; import java.util.TreeMap; import javax.annotation.Nonnull; - +import lombok.Getter; /** * Request context provided to each {@link Authenticator} to perform Authentication. * - * Currently, this class only hold the inbound request's headers, but could certainly be extended + * <p>Currently, this class only hold the inbound request's headers, but could certainly be extended * to contain additional information like the request parameters, body, ip, etc as needed. */ @Getter @@ -27,7 +25,10 @@ public AuthenticationRequest(@Nonnull final Map<String, String> requestHeaders) this("", "", requestHeaders); } - public AuthenticationRequest(@Nonnull String servletInfo, @Nonnull String pathInfo, @Nonnull final Map<String, String> requestHeaders) { + public AuthenticationRequest( + @Nonnull String servletInfo, + @Nonnull String pathInfo, + @Nonnull final Map<String, String> requestHeaders) { Objects.requireNonNull(requestHeaders); caseInsensitiveHeaders = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); caseInsensitiveHeaders.putAll(requestHeaders); diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java index 4c6ee071e5ca1..06a70d55c0802 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java @@ -6,10 +6,10 @@ import java.util.Objects; import javax.annotation.Nonnull; - /** - * Context class to provide Authenticator implementations with concrete objects necessary for their correct workings. - * DataHub creates {@link AuthenticatorContext} instance and provides it as an argument to init method of {@link Authenticator} + * Context class to provide Authenticator implementations with concrete objects necessary for their + * correct workings. DataHub creates {@link AuthenticatorContext} instance and provides it as an + * argument to init method of {@link Authenticator} */ public class AuthenticatorContext { private final Map<String, Object> contextMap; @@ -21,10 +21,9 @@ public AuthenticatorContext(@Nonnull final Map<String, Object> context) { } /** - * - * @return contextMap The contextMap contains below key and value - * {@link com.datahub.plugins.PluginConstant#PLUGIN_HOME PLUGIN_HOME}: Directory path where plugin is installed - * + * @return contextMap The contextMap contains below key and value {@link + * com.datahub.plugins.PluginConstant#PLUGIN_HOME PLUGIN_HOME}: Directory path where plugin is + * installed */ @Nonnull public Map<String, Object> data() { diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java index e159993a8a243..f8d08c6adbd3a 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java @@ -5,22 +5,22 @@ import java.util.Optional; import javax.annotation.Nonnull; - public class AuthUtil { public static boolean isAuthorized( @Nonnull Authorizer authorizer, @Nonnull String actor, @Nonnull Optional<EntitySpec> maybeResourceSpec, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { - for (ConjunctivePrivilegeGroup andPrivilegeGroup : privilegeGroup.getAuthorizedPrivilegeGroups()) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { + for (ConjunctivePrivilegeGroup andPrivilegeGroup : + privilegeGroup.getAuthorizedPrivilegeGroups()) { // If any conjunctive privilege group is authorized, then the entire request is authorized. if (isAuthorized(authorizer, actor, andPrivilegeGroup, maybeResourceSpec)) { return true; } } - // If none of the disjunctive privilege groups were authorized, then the entire request is not authorized. + // If none of the disjunctive privilege groups were authorized, then the entire request is not + // authorized. return false; } @@ -28,15 +28,16 @@ public static boolean isAuthorizedForResources( @Nonnull Authorizer authorizer, @Nonnull String actor, @Nonnull List<Optional<EntitySpec>> resourceSpecs, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { - for (ConjunctivePrivilegeGroup andPrivilegeGroup : privilegeGroup.getAuthorizedPrivilegeGroups()) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { + for (ConjunctivePrivilegeGroup andPrivilegeGroup : + privilegeGroup.getAuthorizedPrivilegeGroups()) { // If any conjunctive privilege group is authorized, then the entire request is authorized. if (isAuthorizedForResources(authorizer, actor, andPrivilegeGroup, resourceSpecs)) { return true; } } - // If none of the disjunctive privilege groups were authorized, then the entire request is not authorized. + // If none of the disjunctive privilege groups were authorized, then the entire request is not + // authorized. return false; } @@ -67,7 +68,8 @@ private static boolean isAuthorizedForResources( for (final String privilege : requiredPrivileges.getRequiredPrivileges()) { // Create and evaluate an Authorization request. for (Optional<EntitySpec> resourceSpec : resourceSpecs) { - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege, resourceSpec); + final AuthorizationRequest request = + new AuthorizationRequest(actor, privilege, resourceSpec); final AuthorizationResult result = authorizer.authorize(request); if (AuthorizationResult.Type.DENY.equals(result.getType())) { // Short circuit. @@ -78,5 +80,5 @@ private static boolean isAuthorizedForResources( return true; } - private AuthUtil() { } -} \ No newline at end of file + private AuthUtil() {} +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java index 9e75de3cbf44d..62889a50d2d96 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java @@ -3,23 +3,18 @@ import java.util.Optional; import lombok.Value; - -/** - * A request to authorize a user for a specific privilege. - */ +/** A request to authorize a user for a specific privilege. */ @Value public class AuthorizationRequest { - /** - * The urn of the actor (corpuser) making the request. - */ + /** The urn of the actor (corpuser) making the request. */ String actorUrn; - /** - * The privilege that the user is requesting - */ + + /** The privilege that the user is requesting */ String privilege; + /** - * The resource that the user is requesting for, if applicable. If the privilege is a platform privilege - * this optional will be empty. + * The resource that the user is requesting for, if applicable. If the privilege is a platform + * privilege this optional will be empty. */ Optional<EntitySpec> resourceSpec; } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java index 17d199be583e3..a8eea06dfab27 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java @@ -3,39 +3,24 @@ import lombok.AllArgsConstructor; import lombok.Data; - -/** - * A result returned after requesting authorization for a particular privilege. - */ +/** A result returned after requesting authorization for a particular privilege. */ @Data @AllArgsConstructor public class AuthorizationResult { - /** - * The original authorization request - */ + /** The original authorization request */ AuthorizationRequest request; - /** - * The result type. Allow or deny the authorization request for the actor. - */ + /** The result type. Allow or deny the authorization request for the actor. */ public enum Type { - /** - * Allow the request - the requested actor is privileged. - */ + /** Allow the request - the requested actor is privileged. */ ALLOW, - /** - * Deny the request - the requested actor is not privileged. - */ + /** Deny the request - the requested actor is not privileged. */ DENY } - /** - * The decision - whether to allow or deny the request. - */ + /** The decision - whether to allow or deny the request. */ public Type type; - /** - * Optional message associated with the decision. Useful for debugging. - */ + /** Optional message associated with the decision. Useful for debugging. */ String message; } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java index 5a9990552bb34..0155c49fd9da7 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java @@ -7,7 +7,6 @@ import lombok.Builder; import lombok.Value; - @Value @AllArgsConstructor(access = AccessLevel.PUBLIC) @Builder diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java index b79a4fa20c7ea..50bc749cd9921 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java @@ -7,25 +7,21 @@ import lombok.AllArgsConstructor; import lombok.Data; - /** - * Context provided to an Authorizer on initialization. - * DataHub creates {@link AuthenticatorContext} instance and provides it as an argument to init method of {@link Authenticator} + * Context provided to an Authorizer on initialization. DataHub creates {@link AuthenticatorContext} + * instance and provides it as an argument to init method of {@link Authenticator} */ @Data @AllArgsConstructor public class AuthorizerContext { private final Map<String, Object> contextMap; - /** - * A utility for resolving an {@link EntitySpec} to resolved entity field values. - */ + /** A utility for resolving an {@link EntitySpec} to resolved entity field values. */ private EntitySpecResolver entitySpecResolver; /** - * - * @return contextMap The contextMap contains below key and value - * PLUGIN_DIRECTORY: Directory path where plugin is installed i.e. PLUGIN_HOME + * @return contextMap The contextMap contains below key and value PLUGIN_DIRECTORY: Directory path + * where plugin is installed i.e. PLUGIN_HOME */ @Nonnull public Map<String, Object> data() { diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java index d47783268f70d..bc3a3c9f385a6 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java @@ -2,12 +2,10 @@ import java.util.List; - /** - * Represents a group of privileges that must <b>ALL</b> be required to - * authorize a request. + * Represents a group of privileges that must <b>ALL</b> be required to authorize a request. * - * That is, an AND of privileges. + * <p>That is, an AND of privileges. */ public class ConjunctivePrivilegeGroup { private final List<String> _requiredPrivileges; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java index 40bb22d036f0a..350476326da9f 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java @@ -5,7 +5,7 @@ /** * Represents a group of privilege groups, any of which must be authorized to authorize a request. * - * That is, an OR of privilege groups. + * <p>That is, an OR of privilege groups. */ public class DisjunctivePrivilegeGroup { private final List<ConjunctivePrivilegeGroup> _authorizedPrivilegeGroups; @@ -17,4 +17,4 @@ public DisjunctivePrivilegeGroup(List<ConjunctivePrivilegeGroup> authorizedPrivi public List<ConjunctivePrivilegeGroup> getAuthorizedPrivilegeGroups() { return _authorizedPrivilegeGroups; } -} \ No newline at end of file +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java index 1258d958f2092..6b08cdb00e9ab 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java @@ -1,44 +1,32 @@ package com.datahub.authorization; -/** - * List of entity field types to fetch for a given entity - */ +/** List of entity field types to fetch for a given entity */ public enum EntityFieldType { /** * Type of the entity (e.g. dataset, chart) + * * @deprecated */ @Deprecated RESOURCE_URN, /** * Urn of the entity + * * @deprecated */ @Deprecated RESOURCE_TYPE, - /** - * Type of the entity (e.g. dataset, chart) - */ + /** Type of the entity (e.g. dataset, chart) */ TYPE, - /** - * Urn of the entity - */ + /** Urn of the entity */ URN, - /** - * Owners of the entity - */ + /** Owners of the entity */ OWNER, - /** - * Domains of the entity - */ + /** Domains of the entity */ DOMAIN, - /** - * Groups of which the entity (only applies to corpUser) is a member - */ + /** Groups of which the entity (only applies to corpUser) is a member */ GROUP_MEMBERSHIP, - /** - * Data platform instance of resource - */ + /** Data platform instance of resource */ DATA_PLATFORM_INSTANCE } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java index 656bec0f44fc2..eb412cdeff14e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java @@ -3,21 +3,19 @@ import javax.annotation.Nonnull; import lombok.Value; - /** - * Details about the entities involved in the authorization process. It models the actor and the resource being acted - * upon. Resource types currently supported can be found inside of {@link com.linkedin.metadata.authorization.PoliciesConfig} + * Details about the entities involved in the authorization process. It models the actor and the + * resource being acted upon. Resource types currently supported can be found inside of {@link + * com.linkedin.metadata.authorization.PoliciesConfig} */ @Value public class EntitySpec { + /** The entity type. (dataset, chart, dashboard, corpGroup, etc). */ + @Nonnull String type; + /** - * The entity type. (dataset, chart, dashboard, corpGroup, etc). - */ - @Nonnull - String type; - /** - * The entity identity. Most often, this corresponds to the raw entity urn. (urn:li:corpGroup:groupId) + * The entity identity. Most often, this corresponds to the raw entity urn. + * (urn:li:corpGroup:groupId) */ - @Nonnull - String entity; -} \ No newline at end of file + @Nonnull String entity; +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java index 67347fbf87a87..0d482f3816e28 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java @@ -1,11 +1,10 @@ package com.datahub.authorization; /** - * An Entity Spec Resolver is responsible for resolving a {@link EntitySpec} to a {@link ResolvedEntitySpec}. + * An Entity Spec Resolver is responsible for resolving a {@link EntitySpec} to a {@link + * ResolvedEntitySpec}. */ public interface EntitySpecResolver { - /** - Resolve a {@link EntitySpec} to a resolved entity spec. - **/ + /** Resolve a {@link EntitySpec} to a resolved entity spec. */ ResolvedEntitySpec resolve(EntitySpec entitySpec); } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java index 955a06fd54cb9..3e6287c335c97 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java @@ -10,32 +10,30 @@ import lombok.RequiredArgsConstructor; import lombok.Value; - /** - * Helper class for lazy resolution of fields - * Input resolveField function that is given as input will only be called when getFieldValuesFuture is called + * Helper class for lazy resolution of fields Input resolveField function that is given as input + * will only be called when getFieldValuesFuture is called */ @RequiredArgsConstructor public class FieldResolver { private final Supplier<CompletableFuture<FieldValue>> resolveField; + @Getter(lazy = true) private final CompletableFuture<FieldValue> fieldValuesFuture = resolveField.get(); private static final FieldValue EMPTY = new FieldValue(Collections.emptySet()); - /** - * Helper function that returns FieldResolver for precomputed values - */ + /** Helper function that returns FieldResolver for precomputed values */ public static FieldResolver getResolverFromValues(Set<String> values) { - return new FieldResolver(() -> CompletableFuture.completedFuture(FieldValue.builder().values(values).build())); + return new FieldResolver( + () -> CompletableFuture.completedFuture(FieldValue.builder().values(values).build())); } - /** - * Helper function that returns FieldResolver given a fetchFieldValue function - */ - public static FieldResolver getResolverFromFunction(EntitySpec entitySpec, - Function<EntitySpec, FieldValue> fetchFieldValue) { - return new FieldResolver(() -> CompletableFuture.supplyAsync(() -> fetchFieldValue.apply(entitySpec))); + /** Helper function that returns FieldResolver given a fetchFieldValue function */ + public static FieldResolver getResolverFromFunction( + EntitySpec entitySpec, Function<EntitySpec, FieldValue> fetchFieldValue) { + return new FieldResolver( + () -> CompletableFuture.supplyAsync(() -> fetchFieldValue.apply(entitySpec))); } public static FieldValue emptyFieldValue() { @@ -43,7 +41,8 @@ public static FieldValue emptyFieldValue() { } /** - * Container for storing the field value, in case we need to extend this to have more types of field values + * Container for storing the field value, in case we need to extend this to have more types of + * field values */ @Value @Builder diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java index 7948766df5715..0a639bed1082b 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java @@ -8,15 +8,14 @@ import lombok.RequiredArgsConstructor; import lombok.ToString; - /** - * Wrapper around authorization request with field resolvers for lazily fetching the field values for each field type + * Wrapper around authorization request with field resolvers for lazily fetching the field values + * for each field type */ @RequiredArgsConstructor @ToString public class ResolvedEntitySpec { - @Getter - private final EntitySpec spec; + @Getter private final EntitySpec spec; private final Map<EntityFieldType, FieldResolver> fieldResolvers; public Set<String> getFieldValues(EntityFieldType entityFieldType) { @@ -28,6 +27,7 @@ public Set<String> getFieldValues(EntityFieldType entityFieldType) { /** * Fetch the owners for an entity. + * * @return a set of owner urns, or empty set if none exist. */ public Set<String> getOwners() { @@ -39,6 +39,7 @@ public Set<String> getOwners() { /** * Fetch the platform instance for a Resolved Resource Spec + * * @return a Platform Instance or null if one does not exist. */ @Nullable @@ -46,7 +47,12 @@ public String getDataPlatformInstance() { if (!fieldResolvers.containsKey(EntityFieldType.DATA_PLATFORM_INSTANCE)) { return null; } - Set<String> dataPlatformInstance = fieldResolvers.get(EntityFieldType.DATA_PLATFORM_INSTANCE).getFieldValuesFuture().join().getValues(); + Set<String> dataPlatformInstance = + fieldResolvers + .get(EntityFieldType.DATA_PLATFORM_INSTANCE) + .getFieldValuesFuture() + .join() + .getValues(); if (dataPlatformInstance.size() > 0) { return dataPlatformInstance.stream().findFirst().get(); } @@ -55,12 +61,17 @@ public String getDataPlatformInstance() { /** * Fetch the group membership for an entity. + * * @return a set of groups urns, or empty set if none exist. */ public Set<String> getGroupMembership() { if (!fieldResolvers.containsKey(EntityFieldType.GROUP_MEMBERSHIP)) { return Collections.emptySet(); } - return fieldResolvers.get(EntityFieldType.GROUP_MEMBERSHIP).getFieldValuesFuture().join().getValues(); + return fieldResolvers + .get(EntityFieldType.GROUP_MEMBERSHIP) + .getFieldValuesFuture() + .join() + .getValues(); } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java index 474dd7363e495..3b8406ad5ed5a 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java @@ -1,7 +1,4 @@ package com.datahub.plugins; -/** - * A tag interface for plugin - */ -public interface Plugin { -} +/** A tag interface for plugin */ +public interface Plugin {} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java index 03afc06af7f3c..ac72fc9b8f816 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java @@ -3,6 +3,5 @@ public class PluginConstant { public static final String PLUGIN_HOME = "PLUGIN_HOME"; - private PluginConstant() { - } + private PluginConstant() {} } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java index b7cf80384564b..6485495608773 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java @@ -10,30 +10,36 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** - * An {@link Authenticator}'s job is to authenticate an inbound request by resolving the provided {@link AuthenticationRequest} - * to an instance of {@link Authentication}, which includes an authenticated {@link Actor} within. + * An {@link Authenticator}'s job is to authenticate an inbound request by resolving the provided + * {@link AuthenticationRequest} to an instance of {@link Authentication}, which includes an + * authenticated {@link Actor} within. * - * In the case that {@link Authentication} cannot be resolved, for example because the request is missing the required - * authentication information, an {@link AuthenticationException} may be thrown. + * <p>In the case that {@link Authentication} cannot be resolved, for example because the request is + * missing the required authentication information, an {@link AuthenticationException} may be + * thrown. */ public interface Authenticator extends Plugin { /** * Initialize the Authenticator. Invoked once at boot time. * - * @param authenticatorConfig config provided to the authenticator derived from the Metadata Service YAML config. This - * config comes from the "plugins[].params.configs" configuration. - * @param context nullable configuration objects that are potentially required by an Authenticator instance. + * @param authenticatorConfig config provided to the authenticator derived from the Metadata + * Service YAML config. This config comes from the "plugins[].params.configs" configuration. + * @param context nullable configuration objects that are potentially required by an Authenticator + * instance. */ - void init(@Nonnull final Map<String, Object> authenticatorConfig, @Nullable final AuthenticatorContext context); + void init( + @Nonnull final Map<String, Object> authenticatorConfig, + @Nullable final AuthenticatorContext context); /** * Authenticates an inbound request given an instance of the {@link AuthenticationRequest}. - * @param authenticationRequest authentication request {@link AuthenticationRequest} that need to be authenticated - * If the request is authenticated successfully, an instance of {@link Authentication} is returned. - * If the request cannot be authenticated, returns "null" or throws an {@link AuthenticationException}. + * + * @param authenticationRequest authentication request {@link AuthenticationRequest} that need to + * be authenticated If the request is authenticated successfully, an instance of {@link + * Authentication} is returned. If the request cannot be authenticated, returns "null" or + * throws an {@link AuthenticationException}. */ @Nullable Authentication authenticate(@Nonnull final AuthenticationRequest authenticationRequest) diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java index c731a3ec987c1..a6baf0b5b282c 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java @@ -10,27 +10,27 @@ import java.util.Optional; import javax.annotation.Nonnull; - /** - * An Authorizer is responsible for determining whether an actor should be granted a specific privilege. + * An Authorizer is responsible for determining whether an actor should be granted a specific + * privilege. */ public interface Authorizer extends Plugin { /** * Initialize the Authorizer. Invoked once at boot time. * - * @param authorizerConfig config provided to the authenticator derived from the Metadata Service YAML config. This - * config comes from the "authorization.authorizers.config" configuration. + * @param authorizerConfig config provided to the authenticator derived from the Metadata Service + * YAML config. This config comes from the "authorization.authorizers.config" configuration. */ - void init(@Nonnull final Map<String, Object> authorizerConfig, @Nonnull final AuthorizerContext ctx); + void init( + @Nonnull final Map<String, Object> authorizerConfig, @Nonnull final AuthorizerContext ctx); - /** - * Authorizes an action based on the actor, the resource, and required privileges. - */ + /** Authorizes an action based on the actor, the resource, and required privileges. */ AuthorizationResult authorize(@Nonnull final AuthorizationRequest request); /** - * Retrieves the current list of actors authorized to for a particular privilege against - * an optional resource + * Retrieves the current list of actors authorized to for a particular privilege against an + * optional resource */ - AuthorizedActors authorizedActors(final String privilege, final Optional<EntitySpec> resourceSpec); + AuthorizedActors authorizedActors( + final String privilege, final Optional<EntitySpec> resourceSpec); } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java index d8d66ddeeb648..2ac16091128a2 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java @@ -1,25 +1,25 @@ package com.datahub.metadata.dao.producer; -import com.linkedin.common.urn.Urn; import com.datahub.util.ModelUtils; +import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.UnionTemplate; import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** * A base class for all metadata event producers. * - *<p>See http://go/gma for more details. + * <p>See http://go/gma for more details. */ -public abstract class BaseMetadataEventProducer<SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> { +public abstract class BaseMetadataEventProducer< + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> { protected final Class<SNAPSHOT> _snapshotClass; protected final Class<ASPECT_UNION> _aspectUnionClass; - public BaseMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, - @Nonnull Class<ASPECT_UNION> aspectUnionClass) { + public BaseMetadataEventProducer( + @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull Class<ASPECT_UNION> aspectUnionClass) { ModelUtils.validateSnapshotAspect(snapshotClass, aspectUnionClass); _snapshotClass = snapshotClass; _aspectUnionClass = aspectUnionClass; @@ -32,8 +32,8 @@ public BaseMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, * @param newValue the proposed new value for the metadata * @param <ASPECT> must be a supported aspect type in {@code ASPECT_UNION} */ - public abstract <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadataChangeEvent(@Nonnull URN urn, - @Nonnull ASPECT newValue); + public abstract <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadataChangeEvent( + @Nonnull URN urn, @Nonnull ASPECT newValue); /** * Produces a Metadata Audit Event (MAE) after a metadata aspect is updated for an entity. @@ -43,17 +43,17 @@ public abstract <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadat * @param newValue the value after the update * @param <ASPECT> must be a supported aspect type in {@code ASPECT_UNION} */ - public abstract <ASPECT extends RecordTemplate> void produceMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); + public abstract <ASPECT extends RecordTemplate> void produceMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); /** - * Produces an aspect specific Metadata Audit Event (MAE) after a metadata aspect is updated for an entity. + * Produces an aspect specific Metadata Audit Event (MAE) after a metadata aspect is updated for + * an entity. * * @param urn {@link Urn} of the entity * @param oldValue the value prior to the update, or null if there's none. * @param newValue the value after the update */ - public abstract <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); + public abstract <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); } - diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java index 00b5bb75d901b..26b48449c1c2f 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java @@ -24,8 +24,9 @@ import org.apache.kafka.clients.producer.ProducerRecord; /** - * <p>The topic names that this emits to can be controlled by constructing this with a {@link TopicConvention}. - * If none is given, defaults to a {@link TopicConventionImpl} with the default delimiter of an underscore (_). + * The topic names that this emits to can be controlled by constructing this with a {@link + * TopicConvention}. If none is given, defaults to a {@link TopicConventionImpl} with the default + * delimiter of an underscore (_). */ @Slf4j public class KafkaEventProducer implements EventProducer { @@ -41,8 +42,10 @@ public class KafkaEventProducer implements EventProducer { * @param topicConvention the convention to use to get kafka topic names * @param kafkaHealthChecker The {@link Callback} to invoke when the request is completed */ - public KafkaEventProducer(@Nonnull final Producer<String, ? extends IndexedRecord> producer, - @Nonnull final TopicConvention topicConvention, @Nonnull final KafkaHealthChecker kafkaHealthChecker) { + public KafkaEventProducer( + @Nonnull final Producer<String, ? extends IndexedRecord> producer, + @Nonnull final TopicConvention topicConvention, + @Nonnull final KafkaHealthChecker kafkaHealthChecker) { _producer = producer; _topicConvention = topicConvention; _kafkaHealthChecker = kafkaHealthChecker; @@ -50,13 +53,16 @@ public KafkaEventProducer(@Nonnull final Producer<String, ? extends IndexedRecor @Override @WithSpan - public Future<?> produceMetadataChangeLog(@Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, + public Future<?> produceMetadataChangeLog( + @Nonnull final Urn urn, + @Nonnull AspectSpec aspectSpec, @Nonnull final MetadataChangeLog metadataChangeLog) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeLog: %s", - urn, - metadataChangeLog)); + log.debug( + String.format( + "Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeLog: %s", + urn, metadataChangeLog)); record = EventUtils.pegasusToAvroMCL(metadataChangeLog); } catch (IOException e) { log.error(String.format("Failed to convert Pegasus MAE to Avro: %s", metadataChangeLog), e); @@ -67,38 +73,42 @@ record = EventUtils.pegasusToAvroMCL(metadataChangeLog); if (aspectSpec.isTimeseries()) { topic = _topicConvention.getMetadataChangeLogTimeseriesTopicName(); } - return _producer.send(new ProducerRecord(topic, urn.toString(), record), - _kafkaHealthChecker.getKafkaCallBack("MCL", urn.toString())); + return _producer.send( + new ProducerRecord(topic, urn.toString(), record), + _kafkaHealthChecker.getKafkaCallBack("MCL", urn.toString())); } @Override @WithSpan - public Future<?> produceMetadataChangeProposal(@Nonnull final Urn urn, - @Nonnull final MetadataChangeProposal metadataChangeProposal) { + public Future<?> produceMetadataChangeProposal( + @Nonnull final Urn urn, @Nonnull final MetadataChangeProposal metadataChangeProposal) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeProposal: %s", - urn, - metadataChangeProposal)); + log.debug( + String.format( + "Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeProposal: %s", + urn, metadataChangeProposal)); record = EventUtils.pegasusToAvroMCP(metadataChangeProposal); } catch (IOException e) { - log.error(String.format("Failed to convert Pegasus MCP to Avro: %s", metadataChangeProposal), e); + log.error( + String.format("Failed to convert Pegasus MCP to Avro: %s", metadataChangeProposal), e); throw new ModelConversionException("Failed to convert Pegasus MCP to Avro", e); } String topic = _topicConvention.getMetadataChangeProposalTopicName(); - return _producer.send(new ProducerRecord(topic, urn.toString(), record), - _kafkaHealthChecker.getKafkaCallBack("MCP", urn.toString())); + return _producer.send( + new ProducerRecord(topic, urn.toString(), record), + _kafkaHealthChecker.getKafkaCallBack("MCP", urn.toString())); } @Override - public Future<?> producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) { + public Future<?> producePlatformEvent( + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus Event to Avro Event urn %s\nEvent: %s", - name, - event)); + log.debug( + String.format("Converting Pegasus Event to Avro Event urn %s\nEvent: %s", name, event)); record = EventUtils.pegasusToAvroPE(event); } catch (IOException e) { log.error(String.format("Failed to convert Pegasus Platform Event to Avro: %s", event), e); @@ -106,8 +116,9 @@ record = EventUtils.pegasusToAvroPE(event); } final String topic = _topicConvention.getPlatformEventTopicName(); - return _producer.send(new ProducerRecord(topic, key == null ? name : key, record), - _kafkaHealthChecker.getKafkaCallBack("Platform Event", name)); + return _producer.send( + new ProducerRecord(topic, key == null ? name : key, record), + _kafkaHealthChecker.getKafkaCallBack("Platform Event", name)); } @Override @@ -117,12 +128,17 @@ public void produceDataHubUpgradeHistoryEvent(@Nonnull DataHubUpgradeHistoryEven log.debug(String.format("Converting Pegasus Event to Avro Event\nEvent: %s", event)); record = EventUtils.pegasusToAvroDUHE(event); } catch (IOException e) { - log.error(String.format("Failed to convert Pegasus DataHub Upgrade History Event to Avro: %s", event), e); + log.error( + String.format( + "Failed to convert Pegasus DataHub Upgrade History Event to Avro: %s", event), + e); throw new ModelConversionException("Failed to convert Pegasus Platform Event to Avro", e); } final String topic = _topicConvention.getDataHubUpgradeHistoryTopicName(); - _producer.send(new ProducerRecord(topic, event.getVersion(), record), _kafkaHealthChecker - .getKafkaCallBack("History Event", "Event Version: " + event.getVersion())); + _producer.send( + new ProducerRecord(topic, event.getVersion(), record), + _kafkaHealthChecker.getKafkaCallBack( + "History Event", "Event Version: " + event.getVersion())); } } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java index 8fc89a8ddd5ed..1bfd829617e09 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java @@ -2,6 +2,10 @@ import com.codahale.metrics.MetricRegistry; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.producer.Callback; @@ -10,107 +14,108 @@ import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; - @Slf4j @EnableScheduling @Component public class KafkaHealthChecker { - @Value("${kafka.producer.deliveryTimeout}") - private long kafkaProducerDeliveryTimeout; - - private final Set<MessageLog> messagesInProgress = ConcurrentHashMap.newKeySet(); - - public Callback getKafkaCallBack(String eventType, String entityDesc) { - final MessageLog tracking = MessageLog.track(entityDesc, kafkaProducerDeliveryTimeout); - sendMessageStarted(tracking); - return (metadata, e) -> { - sendMessageEnded(tracking); - if (e != null) { - log.error(String.format("Failed to emit %s for entity %s", eventType, entityDesc), e); - MetricUtils.counter(this.getClass(), - MetricRegistry.name("producer_failed_count", eventType.replaceAll(" ", "_"))).inc(); - } else { - log.debug(String.format( - "Successfully emitted %s for entity %s at offset %s, partition %s, topic %s", - eventType, entityDesc, metadata.offset(), metadata.partition(), metadata.topic())); - } - }; + @Value("${kafka.producer.deliveryTimeout}") + private long kafkaProducerDeliveryTimeout; + + private final Set<MessageLog> messagesInProgress = ConcurrentHashMap.newKeySet(); + + public Callback getKafkaCallBack(String eventType, String entityDesc) { + final MessageLog tracking = MessageLog.track(entityDesc, kafkaProducerDeliveryTimeout); + sendMessageStarted(tracking); + return (metadata, e) -> { + sendMessageEnded(tracking); + if (e != null) { + log.error(String.format("Failed to emit %s for entity %s", eventType, entityDesc), e); + MetricUtils.counter( + this.getClass(), + MetricRegistry.name("producer_failed_count", eventType.replaceAll(" ", "_"))) + .inc(); + } else { + log.debug( + String.format( + "Successfully emitted %s for entity %s at offset %s, partition %s, topic %s", + eventType, entityDesc, metadata.offset(), metadata.partition(), metadata.topic())); + } + }; + } + + private void sendMessageStarted(MessageLog messageLog) { + messagesInProgress.add(messageLog); + } + + private void sendMessageEnded(MessageLog messageLog) { + messagesInProgress.remove(messageLog); + } + + @Scheduled(cron = "0/60 * * * * ?") + private synchronized void periodicKafkaHealthChecker() { + long moment = System.currentTimeMillis(); + Set<MessageLog> oldItems = + messagesInProgress.stream() + .filter(item -> item.expectedMilli < moment) + .collect(Collectors.toSet()); + + if (oldItems.size() > 0) { + Map<String, Long> itemCounts = + oldItems.stream() + .collect(Collectors.groupingBy(MessageLog::getEntityDesc, Collectors.counting())); + log.error( + String.format( + "Kafka Health Check Failed. Old message(s) were waiting to be sent: %s", itemCounts)); + messagesInProgress.removeAll(oldItems); } + } - private void sendMessageStarted(MessageLog messageLog) { - messagesInProgress.add(messageLog); + @Getter + static class MessageLog { + private final String entityDesc; + private final long uniqueMessageId; + private final long expectedMilli; + private static long lastMoment = 0L; + + public static MessageLog track(String entityDesc, long maxDelayMilli) { + return new MessageLog(entityDesc, maxDelayMilli); } - private void sendMessageEnded(MessageLog messageLog) { - messagesInProgress.remove(messageLog); + private MessageLog(String entityDesc, long maxDelayMilli) { + this.entityDesc = entityDesc; + this.uniqueMessageId = getNextUniqueMoment(); + this.expectedMilli = this.uniqueMessageId + maxDelayMilli; } - @Scheduled(cron = "0/60 * * * * ?") - private synchronized void periodicKafkaHealthChecker() { - long moment = System.currentTimeMillis(); - Set<MessageLog> oldItems = messagesInProgress.stream() - .filter(item -> item.expectedMilli < moment) - .collect(Collectors.toSet()); - - if (oldItems.size() > 0) { - Map<String, Long> itemCounts = oldItems.stream() - .collect(Collectors.groupingBy(MessageLog::getEntityDesc, Collectors.counting())); - log.error(String.format("Kafka Health Check Failed. Old message(s) were waiting to be sent: %s", itemCounts)); - messagesInProgress.removeAll(oldItems); - } + private synchronized long getNextUniqueMoment() { + long moment = System.currentTimeMillis(); + lastMoment = moment != lastMoment ? moment : ++lastMoment; + return lastMoment; } - @Getter - static class MessageLog { - private final String entityDesc; - private final long uniqueMessageId; - private final long expectedMilli; - private static long lastMoment = 0L; - - - public static MessageLog track(String entityDesc, long maxDelayMilli) { - return new MessageLog(entityDesc, maxDelayMilli); - } - private MessageLog(String entityDesc, long maxDelayMilli) { - this.entityDesc = entityDesc; - this.uniqueMessageId = getNextUniqueMoment(); - this.expectedMilli = this.uniqueMessageId + maxDelayMilli; - } - - private synchronized long getNextUniqueMoment() { - long moment = System.currentTimeMillis(); - lastMoment = moment != lastMoment ? moment : ++lastMoment; - return lastMoment; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - MessageLog that = (MessageLog) o; - - if (uniqueMessageId != that.uniqueMessageId) { - return false; - } - return entityDesc.equals(that.entityDesc); - } - - @Override - public int hashCode() { - int result = entityDesc.hashCode(); - result = 31 * result + (int) (uniqueMessageId ^ (uniqueMessageId >>> 32)); - return result; - } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + MessageLog that = (MessageLog) o; + + if (uniqueMessageId != that.uniqueMessageId) { + return false; + } + return entityDesc.equals(that.entityDesc); } + @Override + public int hashCode() { + int result = entityDesc.hashCode(); + result = 31 * result + (int) (uniqueMessageId ^ (uniqueMessageId >>> 32)); + return result; + } + } } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java index 8b4db36ba27ff..765ee8c0736f2 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java @@ -29,15 +29,16 @@ import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; - /** * A Kafka implementation of {@link BaseMetadataEventProducer}. * - * <p>The topic names that this emits to can be controlled by constructing this with a {@link TopicConvention}. If - * none is given, defaults to a {@link TopicConventionImpl} with the default delimiter of an underscore (_). + * <p>The topic names that this emits to can be controlled by constructing this with a {@link + * TopicConvention}. If none is given, defaults to a {@link TopicConventionImpl} with the default + * delimiter of an underscore (_). */ @Slf4j -public class KafkaMetadataEventProducer<SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> +public class KafkaMetadataEventProducer< + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> extends BaseMetadataEventProducer<SNAPSHOT, ASPECT_UNION, URN> { private final Producer<String, ? extends IndexedRecord> _producer; @@ -52,10 +53,11 @@ public class KafkaMetadataEventProducer<SNAPSHOT extends RecordTemplate, ASPECT_ * @param producer The Kafka {@link Producer} to use * @param topicConvention the convention to use to get kafka topic names */ - public KafkaMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, - @Nonnull Class<ASPECT_UNION> aspectUnionClass, - @Nonnull Producer<String, ? extends IndexedRecord> producer, - @Nonnull TopicConvention topicConvention) { + public KafkaMetadataEventProducer( + @Nonnull Class<SNAPSHOT> snapshotClass, + @Nonnull Class<ASPECT_UNION> aspectUnionClass, + @Nonnull Producer<String, ? extends IndexedRecord> producer, + @Nonnull TopicConvention topicConvention) { this(snapshotClass, aspectUnionClass, producer, topicConvention, null); } @@ -68,11 +70,12 @@ public KafkaMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, * @param topicConvention the convention to use to get kafka topic names * @param callback The {@link Callback} to invoke when the request is completed */ - public KafkaMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, - @Nonnull Class<ASPECT_UNION> aspectUnionClass, - @Nonnull Producer<String, ? extends IndexedRecord> producer, - @Nonnull TopicConvention topicConvention, - @Nullable Callback callback) { + public KafkaMetadataEventProducer( + @Nonnull Class<SNAPSHOT> snapshotClass, + @Nonnull Class<ASPECT_UNION> aspectUnionClass, + @Nonnull Producer<String, ? extends IndexedRecord> producer, + @Nonnull TopicConvention topicConvention, + @Nullable Callback callback) { super(snapshotClass, aspectUnionClass); _producer = producer; _callback = Optional.ofNullable(callback); @@ -80,8 +83,8 @@ public KafkaMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, } @Override - public <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadataChangeEvent(@Nonnull URN urn, - @Nonnull ASPECT newValue) { + public <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadataChangeEvent( + @Nonnull URN urn, @Nonnull ASPECT newValue) { MetadataChangeEvent metadataChangeEvent = new MetadataChangeEvent(); metadataChangeEvent.setProposedSnapshot(makeSnapshot(urn, newValue)); @@ -93,16 +96,20 @@ record = EventUtils.pegasusToAvroMCE(metadataChangeEvent); } if (_callback.isPresent()) { - _producer.send(new ProducerRecord(_topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record), + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record), _callback.get()); } else { - _producer.send(new ProducerRecord(_topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record)); + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record)); } } @Override - public <ASPECT extends RecordTemplate> void produceMetadataAuditEvent(@Nonnull URN urn, @Nullable ASPECT oldValue, - @Nonnull ASPECT newValue) { + public <ASPECT extends RecordTemplate> void produceMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { MetadataAuditEvent metadataAuditEvent = new MetadataAuditEvent(); metadataAuditEvent.setNewSnapshot(makeSnapshot(urn, newValue)); @@ -118,16 +125,20 @@ record = EventUtils.pegasusToAvroMAE(metadataAuditEvent); } if (_callback.isPresent()) { - _producer.send(new ProducerRecord(_topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record), + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record), _callback.get()); } else { - _producer.send(new ProducerRecord(_topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record)); + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record)); } } @Override - public <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { + public <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { // Aspect Specific MAE not supported. // TODO: Remove references to this class. throw new UnsupportedOperationException(); @@ -139,7 +150,8 @@ private Snapshot makeSnapshot(@Nonnull URN urn, @Nonnull RecordTemplate value) { List<ASPECT_UNION> aspects = new ArrayList<>(); aspects.add(ModelUtils.newAspectUnion(_aspectUnionClass, value)); - RecordUtils.setSelectedRecordTemplateInUnion(snapshot, ModelUtils.newSnapshot(_snapshotClass, urn, aspects)); + RecordUtils.setSelectedRecordTemplateInUnion( + snapshot, ModelUtils.newSnapshot(_snapshotClass, urn, aspects)); return snapshot; } @@ -147,4 +159,4 @@ private Snapshot makeSnapshot(@Nonnull URN urn, @Nonnull RecordTemplate value) { static boolean isValidAspectSpecificTopic(@Nonnull String topic) { return Arrays.stream(Topics.class.getFields()).anyMatch(field -> field.getName().equals(topic)); } -} \ No newline at end of file +} diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java index 2622404d03939..ca17ed4aa12d0 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java @@ -4,7 +4,6 @@ import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.RecordMetadata; - @Slf4j public class KafkaProducerCallback implements Callback { @Override diff --git a/metadata-events/mxe-avro/build.gradle b/metadata-events/mxe-avro/build.gradle index 9d11eeb160ff0..3aebc6bb1004d 100644 --- a/metadata-events/mxe-avro/build.gradle +++ b/metadata-events/mxe-avro/build.gradle @@ -47,4 +47,4 @@ jar { clean { delete 'src' -} \ No newline at end of file +} diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java index df06d1bae28e0..5611e4356bb64 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java @@ -8,30 +8,32 @@ import java.util.Map; import org.apache.avro.Schema; - public class Configs { public static final Map<String, String> FABRIC_SCHEMA_REGISTRY_MAP = - Collections.unmodifiableMap(new HashMap<String, String>() { - { - put("ei", "http://1.schemaregistry.ei4.atd.int.linkedin.com:10252"); - put("corp", "http://1.schemaregistry.corp-lca1.atd.corp.linkedin.com:10252"); - } - }); + Collections.unmodifiableMap( + new HashMap<String, String>() { + { + put("ei", "http://1.schemaregistry.ei4.atd.int.linkedin.com:10252"); + put("corp", "http://1.schemaregistry.corp-lca1.atd.corp.linkedin.com:10252"); + } + }); - public static final Map<String, Schema> TOPIC_SCHEMA_MAP = Collections.unmodifiableMap(new HashMap<String, Schema>() { - { - put(Topics.METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); - put(Topics.METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); - put(Topics.FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); + public static final Map<String, Schema> TOPIC_SCHEMA_MAP = + Collections.unmodifiableMap( + new HashMap<String, Schema>() { + { + put(Topics.METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); + put(Topics.METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); + put(Topics.FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); - put(Topics.DEV_METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); - put(Topics.DEV_METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); - put(Topics.DEV_FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); - } - }); + put(Topics.DEV_METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); + put(Topics.DEV_METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); + put(Topics.DEV_FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); + } + }); private Configs() { // Util class } -} \ No newline at end of file +} diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java index 463abfdeca845..c61330565bcbf 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java @@ -5,65 +5,55 @@ import javax.annotation.Nonnull; import org.apache.avro.specific.SpecificRecord; - /** * The convention for naming kafka topics. * - * <p>Different companies may have different naming conventions or styles for their kafka topics. Namely, companies - * should pick _ or . as a delimiter, but not both, as they collide in metric names. + * <p>Different companies may have different naming conventions or styles for their kafka topics. + * Namely, companies should pick _ or . as a delimiter, but not both, as they collide in metric + * names. */ public interface TopicConvention { /** - * The name of the metadata change event (v4) kafka topic. - * Note that MetadataChangeEvents are deprecated, replaced by {@link MetadataChangeProposal}. + * The name of the metadata change event (v4) kafka topic. Note that MetadataChangeEvents are + * deprecated, replaced by {@link MetadataChangeProposal}. */ @Nonnull @Deprecated String getMetadataChangeEventTopicName(); /** - * The name of the metadata audit event (v4) kafka topic. - * Note that MetadataAuditEvents are deprecated, replaced by {@link MetadataChangeLog}. + * The name of the metadata audit event (v4) kafka topic. Note that MetadataAuditEvents are + * deprecated, replaced by {@link MetadataChangeLog}. */ @Nonnull @Deprecated String getMetadataAuditEventTopicName(); /** - * The name of the failed metadata change event (v4) kafka topic. - * Note that FailedMetadataChangeEvents are deprecated, replaced by {@link FailedMetadataChangeProposal}. + * The name of the failed metadata change event (v4) kafka topic. Note that + * FailedMetadataChangeEvents are deprecated, replaced by {@link FailedMetadataChangeProposal}. */ @Nonnull @Deprecated String getFailedMetadataChangeEventTopicName(); - /** - * The name of the metadata change proposal kafka topic. - */ + /** The name of the metadata change proposal kafka topic. */ @Nonnull String getMetadataChangeProposalTopicName(); - /** - * The name of the metadata change log kafka topic. - */ + /** The name of the metadata change log kafka topic. */ @Nonnull String getMetadataChangeLogVersionedTopicName(); - /** - * The name of the metadata change log kafka topic with limited retention. - */ + /** The name of the metadata change log kafka topic with limited retention. */ @Nonnull String getMetadataChangeLogTimeseriesTopicName(); - /** - * The name of the failed metadata change proposal kafka topic. - */ + /** The name of the failed metadata change proposal kafka topic. */ @Nonnull String getFailedMetadataChangeProposalTopicName(); - /** - * The name of the platform event topic. - */ + /** The name of the platform event topic. */ @Nonnull String getPlatformEventTopicName(); @@ -77,9 +67,7 @@ public interface TopicConvention { @Deprecated String getMetadataChangeEventTopicName(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); - /** - * The name of the DataHub Upgrade history topic. - */ + /** The name of the DataHub Upgrade history topic. */ String getDataHubUpgradeHistoryTopicName(); /** @@ -89,7 +77,8 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class<? extends SpecificRecord> getMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); + Class<? extends SpecificRecord> getMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); /** * Returns the name of the metadata audit event (v5) kafka topic. @@ -108,8 +97,8 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class<? extends SpecificRecord> getMetadataAuditEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); - + Class<? extends SpecificRecord> getMetadataAuditEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); /** * Returns the name of the failed metadata change event (v5) kafka topic. @@ -128,5 +117,6 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class<? extends SpecificRecord> getFailedMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); + Class<? extends SpecificRecord> getFailedMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); } diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java index 3143584bbdcaf..282a015319781 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java @@ -5,14 +5,14 @@ import javax.annotation.Nonnull; import org.apache.avro.specific.SpecificRecord; - /** * Default implementation of a {@link TopicConvention}, which is fully customizable for event names. * - * <p>The newer aspect-entity specific event names are based on a pattern that can also be configured. The pattern is a - * string, which can use {@link #EVENT_TYPE_PLACEHOLDER}, {@link #VERSION_PLACEHOLDER}, {@link #ENTITY_PLACEHOLDER}, and - * {@link #ASPECT_PLACEHOLDER} as placeholders for the event type (MCE, MAE, FMCE, etc), event version, entity name, - * and aspect name, respectively. + * <p>The newer aspect-entity specific event names are based on a pattern that can also be + * configured. The pattern is a string, which can use {@link #EVENT_TYPE_PLACEHOLDER}, {@link + * #VERSION_PLACEHOLDER}, {@link #ENTITY_PLACEHOLDER}, and {@link #ASPECT_PLACEHOLDER} as + * placeholders for the event type (MCE, MAE, FMCE, etc), event version, entity name, and aspect + * name, respectively. */ public final class TopicConventionImpl implements TopicConvention { // Placeholders @@ -45,11 +45,17 @@ public final class TopicConventionImpl implements TopicConvention { // v5 patterns private final String _eventPattern; - public TopicConventionImpl(@Nonnull String metadataChangeEventTopicName, @Nonnull String metadataAuditEventTopicName, - @Nonnull String failedMetadataChangeEventTopicName, @Nonnull String metadataChangeProposalTopicName, - @Nonnull String metadataChangeLogVersionedTopicName, @Nonnull String metadataChangeLogTimeseriesTopicName, - @Nonnull String failedMetadataChangeProposalTopicName, @Nonnull String platformEventTopicName, - @Nonnull String eventPattern, @Nonnull String dataHubUpgradeHistoryTopicName) { + public TopicConventionImpl( + @Nonnull String metadataChangeEventTopicName, + @Nonnull String metadataAuditEventTopicName, + @Nonnull String failedMetadataChangeEventTopicName, + @Nonnull String metadataChangeProposalTopicName, + @Nonnull String metadataChangeLogVersionedTopicName, + @Nonnull String metadataChangeLogTimeseriesTopicName, + @Nonnull String failedMetadataChangeProposalTopicName, + @Nonnull String platformEventTopicName, + @Nonnull String eventPattern, + @Nonnull String dataHubUpgradeHistoryTopicName) { _metadataChangeEventTopicName = metadataChangeEventTopicName; _metadataAuditEventTopicName = metadataAuditEventTopicName; _failedMetadataChangeEventTopicName = failedMetadataChangeEventTopicName; @@ -63,9 +69,17 @@ public TopicConventionImpl(@Nonnull String metadataChangeEventTopicName, @Nonnul } public TopicConventionImpl() { - this(Topics.METADATA_CHANGE_EVENT, Topics.METADATA_AUDIT_EVENT, Topics.FAILED_METADATA_CHANGE_EVENT, - Topics.METADATA_CHANGE_PROPOSAL, Topics.METADATA_CHANGE_LOG_VERSIONED, Topics.METADATA_CHANGE_LOG_TIMESERIES, - Topics.FAILED_METADATA_CHANGE_PROPOSAL, Topics.PLATFORM_EVENT, DEFAULT_EVENT_PATTERN, Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME); + this( + Topics.METADATA_CHANGE_EVENT, + Topics.METADATA_AUDIT_EVENT, + Topics.FAILED_METADATA_CHANGE_EVENT, + Topics.METADATA_CHANGE_PROPOSAL, + Topics.METADATA_CHANGE_LOG_VERSIONED, + Topics.METADATA_CHANGE_LOG_TIMESERIES, + Topics.FAILED_METADATA_CHANGE_PROPOSAL, + Topics.PLATFORM_EVENT, + DEFAULT_EVENT_PATTERN, + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME); } @Nonnull @@ -117,15 +131,20 @@ public String getPlatformEventTopicName() { } @Nonnull - private String buildEventName(@Nonnull String eventType, @Nonnull String entityName, @Nonnull String aspectName, + private String buildEventName( + @Nonnull String eventType, + @Nonnull String entityName, + @Nonnull String aspectName, int version) { - return _eventPattern.replace(EVENT_TYPE_PLACEHOLDER, eventType) + return _eventPattern + .replace(EVENT_TYPE_PLACEHOLDER, eventType) .replace(ENTITY_PLACEHOLDER, entityName) .replace(ASPECT_PLACEHOLDER, aspectName) .replace(VERSION_PLACEHOLDER, Integer.toString(version)); } - private String buildEventName(@Nonnull String eventType, @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + private String buildEventName( + @Nonnull String eventType, @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { final String urnName = urn.getClass().getSimpleName(); // Expect URN name to relate to the entity name. (EntityName) + "Urn" == (UrnName) final String entityType = urnName.substring(0, urnName.length() - "Urn".length()); @@ -147,7 +166,8 @@ public String getDataHubUpgradeHistoryTopicName() { } @Override - public Class<? extends SpecificRecord> getMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public Class<? extends SpecificRecord> getMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } @@ -159,20 +179,22 @@ public String getMetadataAuditEventTopicName(@Nonnull Urn urn, @Nonnull RecordTe } @Override - public Class<? extends SpecificRecord> getMetadataAuditEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public Class<? extends SpecificRecord> getMetadataAuditEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } @Nonnull @Override - public String getFailedMetadataChangeEventTopicName(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public String getFailedMetadataChangeEventTopicName( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { return buildEventName(FAILED_METADATA_CHANGE_EVENT_TYPE, urn, aspect); } @Override - public Class<? extends SpecificRecord> getFailedMetadataChangeEventType(@Nonnull Urn urn, - @Nonnull RecordTemplate aspect) { + public Class<? extends SpecificRecord> getFailedMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java index 3a9a0812e1031..45bc2364aaa42 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java @@ -19,10 +19,7 @@ public class Topics { public static final String DEV_METADATA_CHANGE_EVENT = "MetadataChangeEvent_v4_dev"; public static final String DEV_FAILED_METADATA_CHANGE_EVENT = "FailedMetadataChangeEvent_v4_dev"; - /** - * aspect-specific MAE topics. - * format : METADATA_AUDIT_EVENT_<URN>_<ASPECT> - */ + /** aspect-specific MAE topics. format : METADATA_AUDIT_EVENT_<URN>_<ASPECT> */ // MAE topics for CorpGroup entity. public static final String METADATA_AUDIT_EVENT_CORPGROUP_CORPGROUPINFO = "MetadataAuditEvent_CorpGroup_CorpGroupInfo_v1"; @@ -30,12 +27,10 @@ public class Topics { // MAE topics for CorpUser entity. public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSEREDITABLEINFO = "MetadataAuditEvent_CorpUser_CorpUserEditableInfo_v2"; - public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSERINFO = "MetadataAuditEvent_CorpUser_CorpUserInfo_v2"; + public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSERINFO = + "MetadataAuditEvent_CorpUser_CorpUserInfo_v2"; - /** - * aspect-specific MCE topics. - * format : METADATA_CHANGE_EVENT_<URN>_<ASPECT> - */ + /** aspect-specific MCE topics. format : METADATA_CHANGE_EVENT_<URN>_<ASPECT> */ // MCE topics for CorpGroup entity. public static final String METADATA_CHANGE_EVENT_CORPGROUP_CORPGROUPINFO = "MetadataChangeEvent_CorpGroup_CorpGroupInfo_v1"; @@ -46,10 +41,7 @@ public class Topics { public static final String METADATA_CHANGE_EVENT_CORPUSER_CORPUSERINFO = "MetadataChangeEvent_CorpUser_CorpUserInfo_v1"; - /** - * aspect-specific FMCE topics. - * format : FAILED_METADATA_CHANGE_EVENT_<URN>_<ASPECT> - */ + /** aspect-specific FMCE topics. format : FAILED_METADATA_CHANGE_EVENT_<URN>_<ASPECT> */ // FMCE topics for CorpGroup entity. public static final String FAILED_METADATA_CHANGE_EVENT_CORPGROUP_CORPGROUPINFO = "FailedMetadataChangeEvent_CorpGroup_CorpGroupInfo_v1"; @@ -63,4 +55,4 @@ public class Topics { private Topics() { // Util class } -} \ No newline at end of file +} diff --git a/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java b/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java index 9c95d9f4aabdc..645c2fe210e09 100644 --- a/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java +++ b/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java @@ -8,10 +8,10 @@ import com.linkedin.mxe.DataHubUpgradeHistoryEvent; import com.linkedin.mxe.FailedMetadataChangeEvent; import com.linkedin.mxe.FailedMetadataChangeProposal; -import com.linkedin.mxe.MetadataChangeLog; -import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.MetadataAuditEvent; import com.linkedin.mxe.MetadataChangeEvent; +import com.linkedin.mxe.MetadataChangeLog; +import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -30,7 +30,6 @@ import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificRecord; - public class EventUtils { private static final RecordDataSchema MCE_PEGASUS_SCHEMA = new MetadataChangeEvent().schema(); @@ -43,7 +42,8 @@ public class EventUtils { private static final RecordDataSchema PE_PEGASUS_SCHEMA = new PlatformEvent().schema(); - private static final RecordDataSchema DUHE_PEGASUS_SCHEMA = new DataHubUpgradeHistoryEvent().schema(); + private static final RecordDataSchema DUHE_PEGASUS_SCHEMA = + new DataHubUpgradeHistoryEvent().schema(); private static final Schema ORIGINAL_MCE_AVRO_SCHEMA = getAvroSchemaFromResource("avro/com/linkedin/mxe/MetadataChangeEvent.avsc"); @@ -69,14 +69,17 @@ public class EventUtils { public static final Schema ORIGINAL_DUHE_AVRO_SCHEMA = getAvroSchemaFromResource("avro/com/linkedin/mxe/DataHubUpgradeHistoryEvent.avsc"); - private static final Schema RENAMED_MCE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$; + private static final Schema RENAMED_MCE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$; - private static final Schema RENAMED_MAE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$; + private static final Schema RENAMED_MAE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$; private static final Schema RENAMED_FAILED_MCE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$; - private static final Schema RENAMED_PE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.PlatformEvent.SCHEMA$; + private static final Schema RENAMED_PE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.PlatformEvent.SCHEMA$; private static final Schema RENAMED_MCP_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataChangeProposal.SCHEMA$; @@ -107,79 +110,102 @@ private static Schema getAvroSchemaFromResource(@Nonnull String resourcePath) { /** * Converts a {@link GenericRecord} MAE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MAE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MAE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataAuditEvent} model */ @Nonnull - public static MetadataAuditEvent avroToPegasusMAE(@Nonnull GenericRecord record) throws IOException { - return new MetadataAuditEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MAE_AVRO_SCHEMA, ORIGINAL_MAE_AVRO_SCHEMA), MAE_PEGASUS_SCHEMA, - ORIGINAL_MAE_AVRO_SCHEMA)); + public static MetadataAuditEvent avroToPegasusMAE(@Nonnull GenericRecord record) + throws IOException { + return new MetadataAuditEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MAE_AVRO_SCHEMA, ORIGINAL_MAE_AVRO_SCHEMA), + MAE_PEGASUS_SCHEMA, + ORIGINAL_MAE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeEvent} model */ @Nonnull - public static MetadataChangeEvent avroToPegasusMCE(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCE_AVRO_SCHEMA, ORIGINAL_MCE_AVRO_SCHEMA), MCE_PEGASUS_SCHEMA, - ORIGINAL_MCE_AVRO_SCHEMA)); + public static MetadataChangeEvent avroToPegasusMCE(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCE_AVRO_SCHEMA, ORIGINAL_MCE_AVRO_SCHEMA), + MCE_PEGASUS_SCHEMA, + ORIGINAL_MCE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCL into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCL in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCL in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeLog} model */ @Nonnull - public static MetadataChangeLog avroToPegasusMCL(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeLog(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCL_AVRO_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA), - MCL_PEGASUS_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA)); + public static MetadataChangeLog avroToPegasusMCL(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeLog( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCL_AVRO_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA), + MCL_PEGASUS_SCHEMA, + ORIGINAL_MCL_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCP into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCP in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCP in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeProposal} model */ @Nonnull - public static MetadataChangeProposal avroToPegasusMCP(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeProposal(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCP_AVRO_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA), - MCP_PEGASUS_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA)); + public static MetadataChangeProposal avroToPegasusMCP(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeProposal( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCP_AVRO_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA), + MCP_PEGASUS_SCHEMA, + ORIGINAL_MCP_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} PE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link PlatformEvent} model */ @Nonnull public static PlatformEvent avroToPegasusPE(@Nonnull GenericRecord record) throws IOException { - return new PlatformEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_PE_AVRO_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA), - PE_PEGASUS_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA)); + return new PlatformEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_PE_AVRO_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA), + PE_PEGASUS_SCHEMA, + ORIGINAL_PE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} PE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link PlatformEvent} model */ @Nonnull - public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecord record) throws IOException { - return new DataHubUpgradeHistoryEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_DUHE_AVRO_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA), - DUHE_PEGASUS_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA)); + public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecord record) + throws IOException { + return new DataHubUpgradeHistoryEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_DUHE_AVRO_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA), + DUHE_PEGASUS_SCHEMA, + ORIGINAL_DUHE_AVRO_SCHEMA)); } /** @@ -190,9 +216,11 @@ public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecor * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) throws IOException { + public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MAE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MAE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MAE_AVRO_SCHEMA); } @@ -204,9 +232,11 @@ public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) throws IOException { + public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCL_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCL_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCL_AVRO_SCHEMA); } @@ -218,9 +248,11 @@ public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) t * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal event) throws IOException { + public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCP_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCP_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCP_AVRO_SCHEMA); } @@ -232,26 +264,30 @@ public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal eve * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCE(@Nonnull MetadataChangeEvent event) throws IOException { + public static GenericRecord pegasusToAvroMCE(@Nonnull MetadataChangeEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCE_AVRO_SCHEMA); } /** - * Converts a Pegasus aspect specific MXE into the equivalent Avro model as a {@link GenericRecord}. + * Converts a Pegasus aspect specific MXE into the equivalent Avro model as a {@link + * GenericRecord}. * * @param event the Pegasus aspect specific MXE model * @return the Avro model with com.linkedin.pegasus2avro.mxe namespace * @throws IOException if the conversion fails */ @Nonnull - public static <MXE extends GenericRecord, T extends SpecificRecord> MXE pegasusToAvroAspectSpecificMXE( - @Nonnull Class<T> clazz, @Nonnull RecordTemplate event) - throws NoSuchFieldException, IOException, IllegalAccessException { + public static <MXE extends GenericRecord, T extends SpecificRecord> + MXE pegasusToAvroAspectSpecificMXE(@Nonnull Class<T> clazz, @Nonnull RecordTemplate event) + throws NoSuchFieldException, IOException, IllegalAccessException { final Schema newSchema = (Schema) clazz.getField("SCHEMA$").get(null); final Schema originalSchema = getAvroSchemaFromResource(getAvroResourcePath(clazz)); - final GenericRecord original = DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), originalSchema); + final GenericRecord original = + DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), originalSchema); return (MXE) renameSchemaNamespace(original, originalSchema, newSchema); } @@ -263,10 +299,12 @@ public static <MXE extends GenericRecord, T extends SpecificRecord> MXE pegasusT * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroFailedMCE(@Nonnull FailedMetadataChangeEvent failedMetadataChangeEvent) - throws IOException { + public static GenericRecord pegasusToAvroFailedMCE( + @Nonnull FailedMetadataChangeEvent failedMetadataChangeEvent) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(failedMetadataChangeEvent.data(), failedMetadataChangeEvent.schema(), + DataTranslator.dataMapToGenericRecord( + failedMetadataChangeEvent.data(), + failedMetadataChangeEvent.schema(), ORIGINAL_FAILED_MCE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_FAILED_MCE_AVRO_SCHEMA); } @@ -282,7 +320,9 @@ public static GenericRecord pegasusToAvroFailedMCE(@Nonnull FailedMetadataChange public static GenericRecord pegasusToAvroFailedMCP( @Nonnull FailedMetadataChangeProposal failedMetadataChangeProposal) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(failedMetadataChangeProposal.data(), failedMetadataChangeProposal.schema(), + DataTranslator.dataMapToGenericRecord( + failedMetadataChangeProposal.data(), + failedMetadataChangeProposal.schema(), ORIGINAL_FMCL_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_FMCP_AVRO_SCHEMA); } @@ -297,33 +337,37 @@ public static GenericRecord pegasusToAvroFailedMCP( @Nonnull public static GenericRecord pegasusToAvroPE(@Nonnull PlatformEvent event) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_PE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_PE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_PE_AVRO_SCHEMA); } /** - * Converts a Pegasus DataHub Upgrade History Event into the equivalent Avro model as a {@link GenericRecord}. + * Converts a Pegasus DataHub Upgrade History Event into the equivalent Avro model as a {@link + * GenericRecord}. * * @param event the Pegasus {@link com.linkedin.mxe.DataHubUpgradeHistoryEvent} model * @return the Avro model with com.linkedin.pegasus2avro.event namespace * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroDUHE(@Nonnull DataHubUpgradeHistoryEvent event) throws IOException { + public static GenericRecord pegasusToAvroDUHE(@Nonnull DataHubUpgradeHistoryEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_DUHE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_DUHE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_DUHE_AVRO_SCHEMA); } /** - * Converts original MXE into a renamed namespace - * Does a double convert that should not be necessary since we're already converting prior to calling this method - * in most spots + * Converts original MXE into a renamed namespace Does a double convert that should not be + * necessary since we're already converting prior to calling this method in most spots */ @Nonnull @Deprecated - private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord original, @Nonnull Schema originalSchema, - @Nonnull Schema newSchema) throws IOException { + private static GenericRecord renameSchemaNamespace( + @Nonnull GenericRecord original, @Nonnull Schema originalSchema, @Nonnull Schema newSchema) + throws IOException { // Step 1: Updates to the latest original schema final GenericRecord record = changeSchema(original, original.getSchema(), originalSchema); @@ -332,12 +376,10 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin return changeSchema(record, newSchema, newSchema); } - /** - * Converts original MXE into a renamed namespace - */ + /** Converts original MXE into a renamed namespace */ @Nonnull - private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord original, @Nonnull Schema newSchema) - throws IOException { + private static GenericRecord renameSchemaNamespace( + @Nonnull GenericRecord original, @Nonnull Schema newSchema) throws IOException { return changeSchema(original, newSchema, newSchema); } @@ -345,7 +387,8 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin /** * Changes the schema of a {@link GenericRecord} to a compatible schema * - * Achieved by serializing the record using its embedded schema and deserializing it using the new compatible schema. + * <p>Achieved by serializing the record using its embedded schema and deserializing it using the + * new compatible schema. * * @param record the record to update schema for * @param writerSchema the writer schema to use when deserializing @@ -354,8 +397,9 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin * @throws IOException */ @Nonnull - private static GenericRecord changeSchema(@Nonnull GenericRecord record, @Nonnull Schema writerSchema, - @Nonnull Schema readerSchema) throws IOException { + private static GenericRecord changeSchema( + @Nonnull GenericRecord record, @Nonnull Schema writerSchema, @Nonnull Schema readerSchema) + throws IOException { try (ByteArrayOutputStream os = new ByteArrayOutputStream()) { BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(os, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema()); @@ -374,6 +418,7 @@ private static GenericRecord changeSchema(@Nonnull GenericRecord record, @Nonnul /** * Get Pegasus class from Avro class. + * * @param clazz the aspect specific MXE avro class * @return the Pegasus aspect specific MXE class * @throws Exception @@ -383,6 +428,7 @@ public static Class<?> getPegasusClass(@Nonnull Class<?> clazz) throws ClassNotF } private static String getAvroResourcePath(@Nonnull Class<?> clazz) { - return String.format("avro/%s.avsc", clazz.getCanonicalName().replace(".pegasus2avro", "").replace(".", "/")); + return String.format( + "avro/%s.avsc", clazz.getCanonicalName().replace(".pegasus2avro", "").replace(".", "/")); } } diff --git a/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java b/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java index 66759d4637c18..1318109d476d7 100644 --- a/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java +++ b/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java @@ -1,5 +1,8 @@ package com.linkedin.metadata; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.util.RecordUtils; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.data.template.RecordTemplate; @@ -16,39 +19,53 @@ import org.apache.avro.io.JsonDecoder; import org.testng.annotations.Test; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class EventUtilsTests { @Test public void testAvroToPegasusMAE() throws IOException { - GenericRecord record = genericRecordFromResource("test-avro2pegasus-mae.json", - com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$); + GenericRecord record = + genericRecordFromResource( + "test-avro2pegasus-mae.json", com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$); MetadataAuditEvent mae = EventUtils.avroToPegasusMAE(record); assertEquals( - mae.getNewSnapshot().getDatasetSnapshot().getAspects().get(0).getOwnership().getOwners().get(0).getOwner(), + mae.getNewSnapshot() + .getDatasetSnapshot() + .getAspects() + .get(0) + .getOwnership() + .getOwners() + .get(0) + .getOwner(), new CorpuserUrn("foobar")); } @Test public void testAvroToPegasusMCE() throws IOException { - GenericRecord record = genericRecordFromResource("test-avro2pegasus-mce.json", - com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$); + GenericRecord record = + genericRecordFromResource( + "test-avro2pegasus-mce.json", + com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$); MetadataChangeEvent mce = EventUtils.avroToPegasusMCE(record); assertEquals( - mce.getProposedSnapshot().getDatasetSnapshot().getAspects().get(0).getOwnership().getOwners().get(0).getOwner(), + mce.getProposedSnapshot() + .getDatasetSnapshot() + .getAspects() + .get(0) + .getOwnership() + .getOwners() + .get(0) + .getOwner(), new CorpuserUrn("foobar")); } @Test public void testPegasusToAvroMAE() throws IOException { - MetadataAuditEvent event = recordTemplateFromResource("test-pegasus2avro-mae.json", MetadataAuditEvent.class); + MetadataAuditEvent event = + recordTemplateFromResource("test-pegasus2avro-mae.json", MetadataAuditEvent.class); GenericRecord record = EventUtils.pegasusToAvroMAE(event); @@ -58,7 +75,8 @@ public void testPegasusToAvroMAE() throws IOException { @Test public void testPegasusToAvroMCE() throws IOException { - MetadataChangeEvent event = recordTemplateFromResource("test-pegasus2avro-mce.json", MetadataChangeEvent.class); + MetadataChangeEvent event = + recordTemplateFromResource("test-pegasus2avro-mce.json", MetadataChangeEvent.class); GenericRecord record = EventUtils.pegasusToAvroMCE(event); @@ -68,24 +86,27 @@ public void testPegasusToAvroMCE() throws IOException { @Test public void testPegasusToAvroFailedMCE() throws IOException { - FailedMetadataChangeEvent event = recordTemplateFromResource("test-pegasus2avro-fmce.json", FailedMetadataChangeEvent.class); + FailedMetadataChangeEvent event = + recordTemplateFromResource("test-pegasus2avro-fmce.json", FailedMetadataChangeEvent.class); GenericRecord record = EventUtils.pegasusToAvroFailedMCE(event); - assertEquals(record.getSchema(), com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$); + assertEquals( + record.getSchema(), com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$); assertNotNull(record.get("error")); assertNotNull(record.get("metadataChangeEvent")); } - private GenericRecord genericRecordFromResource(String resourcePath, Schema schema) throws IOException { + private GenericRecord genericRecordFromResource(String resourcePath, Schema schema) + throws IOException { InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath); JsonDecoder decoder = DecoderFactory.get().jsonDecoder(schema, is); DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema); return reader.read(null, decoder); } - private <T extends RecordTemplate> T recordTemplateFromResource(String resourcePath, - Class<? extends RecordTemplate> clazz) throws IOException { + private <T extends RecordTemplate> T recordTemplateFromResource( + String resourcePath, Class<? extends RecordTemplate> clazz) throws IOException { String json = loadJsonFromResource(resourcePath); return (T) RecordUtils.toRecordTemplate(clazz, json); } diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index 0bf6b18fa5073..7ae01faaaabdd 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -239,8 +239,6 @@ processResources.dependsOn generateOpenApiPojos sourceSets.main.java.srcDir "${generateOpenApiPojos.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" -checkstyleMain.exclude '**/generated/**' - clean { project.delete("$projectDir/generated") } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java index 84fe9cef0817c..a899f27a0cb2c 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java @@ -2,21 +2,20 @@ import javax.annotation.Nullable; - public interface Callback { /** - * Called when the client request has completed. - * Completion does not imply success. Inspect the response object to understand if - * this was a successfully processed request or not. + * Called when the client request has completed. Completion does not imply success. Inspect the + * response object to understand if this was a successfully processed request or not. + * * @param response */ void onCompletion(@Nullable MetadataWriteResponse response); /** * Called when the client request has thrown an exception before completion. + * * @param exception */ void onFailure(Throwable exception); - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java index 25bcba5f7d4c6..97c4558933b69 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java @@ -11,61 +11,74 @@ import javax.annotation.Nonnull; import javax.annotation.concurrent.ThreadSafe; - /** - * An interface implemented by all metadata emitters to DataHub. - * Typical usage: - * 1. Construct the emitter using the native constructor or builder for the Emitter. - * 2. Call `emitter.emit(mcpw, callback)` for each event you want to send - * 3. Wait for all events to be sent by inspecting the futures returned by each call or using callbacks - * 4. Call `emitter.close()` to finalize. + * An interface implemented by all metadata emitters to DataHub. Typical usage: 1. Construct the + * emitter using the native constructor or builder for the Emitter. 2. Call `emitter.emit(mcpw, + * callback)` for each event you want to send 3. Wait for all events to be sent by inspecting the + * futures returned by each call or using callbacks 4. Call `emitter.close()` to finalize. */ @ThreadSafe public interface Emitter extends Closeable { /** * Asynchronously emit a {@link MetadataChangeProposalWrapper} event. + * * @param mcpw * @param callback if not null, is called from the IO thread. Should be a quick operation. - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposalWrapper mcpw, Callback callback) throws IOException; + Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException; /** * Asynchronously emit a {@link MetadataChangeProposalWrapper} event. + * * @param mcpw - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposalWrapper mcpw) throws IOException { + default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposalWrapper mcpw) + throws IOException { return emit(mcpw, null); } /** * Asynchronously emit a {@link MetadataChangeProposal} event. Prefer using the sibling method - * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to construct. + * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to + * construct. + * * @param mcp * @param callback if not null, is called from the IO thread. Should be a quick operation. - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp, Callback callback) throws IOException; + Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp, Callback callback) + throws IOException; /** * Asynchronously emit a {@link MetadataChangeProposal} event. Prefer using the sibling method - * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to construct. + * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to + * construct. + * * @param mcp - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp) throws IOException { + default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp) + throws IOException { return emit(mcp, null); } /** * Test that the emitter can establish a valid connection to the DataHub platform - * @return true if a valid connection can be established, false or throws one of the exceptions otherwise + * + * @return true if a valid connection can be established, false or throws one of the exceptions + * otherwise * @throws IOException * @throws ExecutionException * @throws InterruptedException @@ -74,11 +87,12 @@ default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp) /** * Asynchronously emit a {@link UpsertAspectRequest}. + * * @param request request with with metadata aspect to upsert into DataHub - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) throws IOException; - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java index 51126a1cdcbea..89db9738efda6 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java @@ -9,15 +9,16 @@ import lombok.SneakyThrows; import org.apache.http.HttpResponse; - public class MetadataResponseFuture implements Future<MetadataWriteResponse> { private final Future<HttpResponse> requestFuture; private final AtomicReference<MetadataWriteResponse> responseReference; private final CountDownLatch responseLatch; private final ResponseMapper mapper; - public MetadataResponseFuture(Future<HttpResponse> underlyingFuture, - AtomicReference<MetadataWriteResponse> responseAtomicReference, CountDownLatch responseLatch) { + public MetadataResponseFuture( + Future<HttpResponse> underlyingFuture, + AtomicReference<MetadataWriteResponse> responseAtomicReference, + CountDownLatch responseLatch) { this.requestFuture = underlyingFuture; this.responseReference = responseAtomicReference; this.responseLatch = responseLatch; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java index 969ef10c41a24..b6e77556980c1 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java @@ -3,27 +3,19 @@ import lombok.Builder; import lombok.Value; - @Value @Builder public class MetadataWriteResponse { - /** - * True if the client send succeeded and we got a successful response from the server - */ - @Builder.Default - boolean success = true; + /** True if the client send succeeded and we got a successful response from the server */ + @Builder.Default boolean success = true; /** - * If the write failed due to an exception thrown by the server - * and we have access to it, then we store the stack trace here + * If the write failed due to an exception thrown by the server and we have access to it, then we + * store the stack trace here */ String responseContent; - /** - * The underlying response object - * (typically an HTTPResponse or a kafka.ResponseMetadata) - */ + /** The underlying response object (typically an HTTPResponse or a kafka.ResponseMetadata) */ Object underlyingResponse; - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java index aae0e51b6736e..ab866f060b354 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java @@ -1,34 +1,31 @@ package datahub.client.file; -import com.fasterxml.jackson.core.StreamReadConstraints; -import java.io.BufferedWriter; -import java.io.FileWriter; -import java.io.IOException; -import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; +import static com.linkedin.metadata.Constants.*; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.util.DefaultIndenter; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.data.template.JacksonDataTemplateCodec; import com.linkedin.mxe.MetadataChangeProposal; - import datahub.client.Callback; import datahub.client.Emitter; import datahub.client.MetadataWriteResponse; import datahub.event.EventFormatter; import datahub.event.MetadataChangeProposalWrapper; import datahub.event.UpsertAspectRequest; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class FileEmitter implements Emitter { @@ -45,22 +42,27 @@ public class FileEmitter implements Emitter { /** * The default constructor - * + * * @param config */ public FileEmitter(FileEmitterConfig config) { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); this.config = config; this.eventFormatter = this.config.getEventFormatter(); - DefaultPrettyPrinter pp = new DefaultPrettyPrinter() - .withObjectIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)) - .withArrayIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)); + DefaultPrettyPrinter pp = + new DefaultPrettyPrinter() + .withObjectIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)) + .withArrayIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)); this.dataTemplateCodec.setPrettyPrinter(pp); try { @@ -75,33 +77,37 @@ public FileEmitter(FileEmitterConfig config) { this.wroteSomething = false; log.debug("Emitter created successfully for " + this.config.getFileName()); - this.cachedSuccessFuture = new Future<MetadataWriteResponse>() { - @Override - public boolean cancel(boolean mayInterruptIfRunning) { - return false; - } - - @Override - public MetadataWriteResponse get() throws InterruptedException, ExecutionException { - return MetadataWriteResponse.builder().success(true).responseContent("MCP witten to File").build(); - } - - @Override - public MetadataWriteResponse get(long timeout, TimeUnit unit) - throws InterruptedException, ExecutionException, TimeoutException { - return this.get(); - } - - @Override - public boolean isCancelled() { - return false; - } - - @Override - public boolean isDone() { - return true; - } - }; + this.cachedSuccessFuture = + new Future<MetadataWriteResponse>() { + @Override + public boolean cancel(boolean mayInterruptIfRunning) { + return false; + } + + @Override + public MetadataWriteResponse get() throws InterruptedException, ExecutionException { + return MetadataWriteResponse.builder() + .success(true) + .responseContent("MCP witten to File") + .build(); + } + + @Override + public MetadataWriteResponse get(long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException { + return this.get(); + } + + @Override + public boolean isCancelled() { + return false; + } + + @Override + public boolean isDone() { + return true; + } + }; } @Override @@ -114,13 +120,15 @@ public void close() throws IOException { } @Override - public Future<MetadataWriteResponse> emit(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, - Callback callback) throws IOException { + public Future<MetadataWriteResponse> emit( + @SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException { return emit(this.eventFormatter.convert(mcpw), callback); } @Override - public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback callback) throws IOException { + public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback callback) + throws IOException { if (this.closed.get()) { String errorMsg = "File Emitter is already closed."; log.error(errorMsg); @@ -167,7 +175,8 @@ public boolean testConnection() throws IOException, ExecutionException, Interrup } @Override - public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) throws IOException { + public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) + throws IOException { throw new UnsupportedOperationException("UpsertAspectRequest not relevant for File Emitter"); } @@ -185,8 +194,8 @@ public MetadataWriteResponse get() throws InterruptedException, ExecutionExcepti } @Override - public MetadataWriteResponse get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, - TimeoutException { + public MetadataWriteResponse get(long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException { return this.get(); } @@ -199,8 +208,6 @@ public boolean isCancelled() { public boolean isDone() { return true; } - }; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java index c89edef81ef5e..61ee12d88824d 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java @@ -7,10 +7,9 @@ @Value @Builder public class FileEmitterConfig { + @Builder.Default @lombok.NonNull private final String fileName = null; + @Builder.Default - @lombok.NonNull - private final String fileName = null; - @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); - + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java index 6212e57470be4..0d0341562e7dd 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java @@ -1,16 +1,13 @@ package datahub.client.kafka; -import java.io.IOException; - -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData; -import org.apache.avro.generic.GenericRecord; - import com.google.common.annotations.VisibleForTesting; import com.linkedin.mxe.MetadataChangeProposal; - import datahub.event.EventFormatter; import datahub.event.MetadataChangeProposalWrapper; +import java.io.IOException; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; class AvroSerializer { @@ -20,8 +17,12 @@ class AvroSerializer { private final EventFormatter _eventFormatter; public AvroSerializer() throws IOException { - _recordSchema = new Schema.Parser() - .parse(this.getClass().getClassLoader().getResourceAsStream("MetadataChangeProposal.avsc")); + _recordSchema = + new Schema.Parser() + .parse( + this.getClass() + .getClassLoader() + .getResourceAsStream("MetadataChangeProposal.avsc")); _genericAspectSchema = this._recordSchema.getField("aspect").schema().getTypes().get(1); _changeTypeEnumSchema = this._recordSchema.getField("changeType").schema(); _eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); @@ -32,7 +33,8 @@ Schema getRecordSchema() { return _recordSchema; } - public GenericRecord serialize(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw) throws IOException { + public GenericRecord serialize(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw) + throws IOException { return serialize(_eventFormatter.convert(mcpw)); } @@ -45,7 +47,8 @@ public GenericRecord serialize(MetadataChangeProposal mcp) throws IOException { genericRecord.put("aspect", genericAspect); genericRecord.put("aspectName", mcp.getAspectName()); genericRecord.put("entityType", mcp.getEntityType()); - genericRecord.put("changeType", new GenericData.EnumSymbol(_changeTypeEnumSchema, mcp.getChangeType())); + genericRecord.put( + "changeType", new GenericData.EnumSymbol(_changeTypeEnumSchema, mcp.getChangeType())); return genericRecord; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java index 45528f79fad19..ba310de14813e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java @@ -1,5 +1,11 @@ package datahub.client.kafka; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.Emitter; +import datahub.client.MetadataWriteResponse; +import datahub.event.MetadataChangeProposalWrapper; +import datahub.event.UpsertAspectRequest; import java.io.IOException; import java.util.List; import java.util.Properties; @@ -7,7 +13,7 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - +import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.ListTopicsOptions; @@ -16,15 +22,6 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.Emitter; -import datahub.client.MetadataWriteResponse; -import datahub.event.MetadataChangeProposalWrapper; -import datahub.event.UpsertAspectRequest; -import lombok.extern.slf4j.Slf4j; - @Slf4j public class KafkaEmitter implements Emitter { @@ -45,9 +42,11 @@ public KafkaEmitter(KafkaEmitterConfig config) throws IOException { this.config = config; kafkaConfigProperties = new Properties(); kafkaConfigProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.config.getBootstrap()); - kafkaConfigProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + kafkaConfigProperties.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.class); - kafkaConfigProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + kafkaConfigProperties.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class); kafkaConfigProperties.put("schema.registry.url", this.config.getSchemaRegistryUrl()); kafkaConfigProperties.putAll(config.getSchemaRegistryConfig()); @@ -59,28 +58,31 @@ public KafkaEmitter(KafkaEmitterConfig config) throws IOException { @Override public void close() throws IOException { producer.close(); - } @Override - public Future<MetadataWriteResponse> emit(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, - Callback datahubCallback) throws IOException { + public Future<MetadataWriteResponse> emit( + @SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, Callback datahubCallback) + throws IOException { return emit(this.config.getEventFormatter().convert(mcpw), datahubCallback); } @Override - public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback datahubCallback) throws IOException { + public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback datahubCallback) + throws IOException { GenericRecord genricRecord = _avroSerializer.serialize(mcp); - ProducerRecord<Object, Object> record = new ProducerRecord<>(KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, - mcp.getEntityUrn().toString(), genricRecord); - org.apache.kafka.clients.producer.Callback callback = new org.apache.kafka.clients.producer.Callback() { - - @Override - public void onCompletion(RecordMetadata metadata, Exception exception) { - MetadataWriteResponse response = mapResponse(metadata, exception); - datahubCallback.onCompletion(response); - } - }; + ProducerRecord<Object, Object> record = + new ProducerRecord<>( + KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, mcp.getEntityUrn().toString(), genricRecord); + org.apache.kafka.clients.producer.Callback callback = + new org.apache.kafka.clients.producer.Callback() { + + @Override + public void onCompletion(RecordMetadata metadata, Exception exception) { + MetadataWriteResponse response = mapResponse(metadata, exception); + datahubCallback.onCompletion(response); + } + }; log.debug("Emit: topic: {} \n record: {}", KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, record); Future<RecordMetadata> future = this.producer.send(record, callback); return mapFuture(future); @@ -117,14 +119,17 @@ public boolean isDone() { return future.isDone(); } }; - } @Override public boolean testConnection() throws IOException, ExecutionException, InterruptedException { try (AdminClient client = AdminClient.create(this.kafkaConfigProperties)) { - log.info("Available topics:" - + client.listTopics(new ListTopicsOptions().timeoutMs(ADMIN_CLIENT_TIMEOUT_MS)).listings().get()); + log.info( + "Available topics:" + + client + .listTopics(new ListTopicsOptions().timeoutMs(ADMIN_CLIENT_TIMEOUT_MS)) + .listings() + .get()); } catch (ExecutionException ex) { log.error("Kafka is not available, timed out after {} ms", ADMIN_CLIENT_TIMEOUT_MS); return false; @@ -133,7 +138,8 @@ public boolean testConnection() throws IOException, ExecutionException, Interrup } @Override - public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) throws IOException { + public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) + throws IOException { throw new UnsupportedOperationException("UpsertAspectRequest cannot be sent over Kafka"); } @@ -156,5 +162,4 @@ private static MetadataWriteResponse mapResponse(RecordMetadata metadata, Except public Properties getKafkaConfgiProperties() { return kafkaConfigProperties; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java index 9452dd5686ac7..c0a5df3bddf37 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java @@ -1,12 +1,11 @@ package datahub.client.kafka; +import datahub.event.EventFormatter; import java.io.InputStream; import java.util.Collections; import java.util.Map; import java.util.Properties; import java.util.function.Consumer; - -import datahub.event.EventFormatter; import lombok.Builder; import lombok.Value; import lombok.extern.slf4j.Slf4j; @@ -18,24 +17,22 @@ public class KafkaEmitterConfig { public static final String CLIENT_VERSION_PROPERTY = "clientVersion"; + @Builder.Default private final String bootstrap = "localhost:9092"; + @Builder.Default private final String schemaRegistryUrl = "http://localhost:8081"; + + @Builder.Default private final Map<String, String> schemaRegistryConfig = Collections.emptyMap(); + @Builder.Default private final Map<String, String> producerConfig = Collections.emptyMap(); + @Builder.Default - private final String bootstrap = "localhost:9092"; - @Builder.Default - private final String schemaRegistryUrl = "http://localhost:8081"; - - @Builder.Default - private final Map<String, String> schemaRegistryConfig = Collections.emptyMap(); - @Builder.Default - private final Map<String, String> producerConfig = Collections.emptyMap(); - - @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); - + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); + public static class KafkaEmitterConfigBuilder { @SuppressWarnings("unused") private String getVersion() { - try (InputStream foo = this.getClass().getClassLoader().getResourceAsStream("client.properties")) { + try (InputStream foo = + this.getClass().getClassLoader().getResourceAsStream("client.properties")) { Properties properties = new Properties(); properties.load(foo); return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); @@ -49,7 +46,5 @@ public KafkaEmitterConfigBuilder with(Consumer<KafkaEmitterConfigBuilder> builde builderFunction.accept(this); return this; } - } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java index bf40addef6505..943aaefec469b 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java @@ -1,5 +1,7 @@ package datahub.client.patch; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.linkedin.common.urn.Urn; @@ -13,9 +15,6 @@ import org.apache.commons.lang3.tuple.ImmutableTriple; import org.apache.http.entity.ContentType; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - public abstract class AbstractMultiFieldPatchBuilder<T extends AbstractMultiFieldPatchBuilder<T>> { public static final String OP_KEY = "op"; @@ -27,6 +26,7 @@ public abstract class AbstractMultiFieldPatchBuilder<T extends AbstractMultiFiel /** * Builder method + * * @return a {@link MetadataChangeProposal} constructed from the builder's properties */ public MetadataChangeProposal build() { @@ -41,6 +41,7 @@ public MetadataChangeProposal build() { /** * Sets the target entity urn to be updated by this patch + * * @param urn The target entity whose aspect is to be patched by this update * @return this PatchBuilder subtype's instance */ @@ -52,18 +53,21 @@ public T urn(Urn urn) { /** * The aspect name associated with this builder + * * @return aspect name */ protected abstract String getAspectName(); /** * Returns the String representation of the Entity type associated with this aspect + * * @return entity type name */ protected abstract String getEntityType(); /** * Overrides basic behavior to construct multiple patches based on properties + * * @return a JsonPatch wrapped by GenericAspect */ protected GenericAspect buildPatch() { @@ -73,9 +77,14 @@ protected GenericAspect buildPatch() { ArrayNode patches = instance.arrayNode(); List<ImmutableTriple<String, String, JsonNode>> triples = getPathValues(); - triples.forEach(triple -> patches.add(instance.objectNode().put(OP_KEY, triple.left) - .put(PATH_KEY, triple.middle) - .set(VALUE_KEY, triple.right))); + triples.forEach( + triple -> + patches.add( + instance + .objectNode() + .put(OP_KEY, triple.left) + .put(PATH_KEY, triple.middle) + .set(VALUE_KEY, triple.right))); GenericAspect genericAspect = new GenericAspect(); genericAspect.setContentType(ContentType.APPLICATION_JSON.getMimeType()); @@ -85,7 +94,9 @@ protected GenericAspect buildPatch() { } /** - * Constructs a list of Op, Path, Value triples to create as patches. Not idempotent and should not be called more than once + * Constructs a list of Op, Path, Value triples to create as patches. Not idempotent and should + * not be called more than once + * * @return list of patch precursor triples */ protected List<ImmutableTriple<String, String, JsonNode>> getPathValues() { diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java index 8e8b5e324586f..ac93fd24fee02 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java @@ -2,16 +2,13 @@ import lombok.Getter; - public enum PatchOperationType { ADD("add"), REMOVE("remove"); - @Getter - private final String value; + @Getter private final String value; PatchOperationType(String value) { this.value = value; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java index 34618ddba7c5e..e621aaf57ff97 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java @@ -1,5 +1,7 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import datahub.client.patch.AbstractMultiFieldPatchBuilder; @@ -10,10 +12,8 @@ import java.util.Map; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - -public class CustomPropertiesPatchBuilder<T extends AbstractMultiFieldPatchBuilder<T>> implements IntermediatePatchBuilder<T> { +public class CustomPropertiesPatchBuilder<T extends AbstractMultiFieldPatchBuilder<T>> + implements IntermediatePatchBuilder<T> { public static final String CUSTOM_PROPERTIES_BASE_PATH = "/customProperties"; @@ -26,35 +26,46 @@ public CustomPropertiesPatchBuilder(T parentBuilder) { /** * Add a property to a custom properties field + * * @param key * @param value * @return */ public CustomPropertiesPatchBuilder<T> addProperty(String key, String value) { - operations.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, - instance.textNode(value))); + operations.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + CUSTOM_PROPERTIES_BASE_PATH + "/" + key, + instance.textNode(value))); return this; } /** - * Remove a property from a custom properties field. If the property doesn't exist, this is a no-op. + * Remove a property from a custom properties field. If the property doesn't exist, this is a + * no-op. + * * @param key * @return */ public CustomPropertiesPatchBuilder<T> removeProperty(String key) { - operations.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, null)); + operations.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, null)); return this; } /** * Fully replace the properties of the target aspect + * * @param properties * @return */ public CustomPropertiesPatchBuilder<T> setProperties(Map<String, String> properties) { ObjectNode propertiesNode = instance.objectNode(); properties.forEach((key, value) -> propertiesNode.set(key, instance.textNode(value))); - operations.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH, propertiesNode)); + operations.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH, propertiesNode)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java index 6cebee0ac1265..84db0ba307cf2 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TagUrn; import datahub.client.patch.AbstractMultiFieldPatchBuilder; @@ -8,10 +11,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class GlobalTagsPatchBuilder extends AbstractMultiFieldPatchBuilder<GlobalTagsPatchBuilder> { private static final String BASE_PATH = "/tags/"; @@ -20,6 +19,7 @@ public class GlobalTagsPatchBuilder extends AbstractMultiFieldPatchBuilder<Globa /** * Adds a tag with an optional context string + * * @param urn required * @param context optional * @return @@ -49,7 +49,8 @@ protected String getAspectName() { @Override protected String getEntityType() { if (this.targetEntityUrn == null) { - throw new IllegalStateException("Target Entity Urn must be set to determine entity type before building Patch."); + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); } return this.targetEntityUrn.getEntityType(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java index 9f937503384fc..6f31025406b1b 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.GlossaryTermUrn; import datahub.client.patch.AbstractMultiFieldPatchBuilder; @@ -8,11 +11,8 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class GlossaryTermsPatchBuilder extends AbstractMultiFieldPatchBuilder<GlossaryTermsPatchBuilder> { +public class GlossaryTermsPatchBuilder + extends AbstractMultiFieldPatchBuilder<GlossaryTermsPatchBuilder> { private static final String BASE_PATH = "/glossaryTerms/"; private static final String URN_KEY = "urn"; @@ -20,6 +20,7 @@ public class GlossaryTermsPatchBuilder extends AbstractMultiFieldPatchBuilder<Gl /** * Adds a term with an optional context string + * * @param urn required * @param context optional * @return @@ -49,7 +50,8 @@ protected String getAspectName() { @Override protected String getEntityType() { if (this.targetEntityUrn == null) { - throw new IllegalStateException("Target Entity Urn must be set to determine entity type before building Patch."); + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); } return this.targetEntityUrn.getEntityType(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java index 33fc8b68d9c26..20e0c930a8c95 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; @@ -8,10 +11,6 @@ import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipPatchBuilder extends AbstractMultiFieldPatchBuilder<OwnershipPatchBuilder> { private static final String BASE_PATH = "/owners/"; @@ -23,33 +22,39 @@ public OwnershipPatchBuilder addOwner(@Nonnull Urn owner, @Nonnull OwnershipType value.put(OWNER_KEY, owner.toString()); value.put(TYPE_KEY, type.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + owner + "/" + type, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + owner + "/" + type, value)); return this; } /** * Remove all ownership types for an owner + * * @param owner * @return */ public OwnershipPatchBuilder removeOwner(@Nonnull Urn owner) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + owner, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + owner, null)); return this; } /** - * Removes a specific ownership type for a particular owner, a single owner may have multiple ownership types + * Removes a specific ownership type for a particular owner, a single owner may have multiple + * ownership types + * * @param owner * @param type * @return */ - public OwnershipPatchBuilder removeOwnershipType(@Nonnull Urn owner, @Nonnull OwnershipType type) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + owner + "/" + type, null)); + public OwnershipPatchBuilder removeOwnershipType( + @Nonnull Urn owner, @Nonnull OwnershipType type) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + owner + "/" + type, null)); return this; } @@ -61,7 +66,8 @@ protected String getAspectName() { @Override protected String getEntityType() { if (this.targetEntityUrn == null) { - throw new IllegalStateException("Target Entity Urn must be set to determine entity type before building Patch."); + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); } return this.targetEntityUrn.getEntityType(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java index 3161eb492dff5..9e55ab4fc6db4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java @@ -1,23 +1,23 @@ package datahub.client.patch.dataflow; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; import datahub.client.patch.common.CustomPropertiesPatchBuilder; +import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DataFlowInfoPatchBuilder extends AbstractMultiFieldPatchBuilder<DataFlowInfoPatchBuilder> +public class DataFlowInfoPatchBuilder + extends AbstractMultiFieldPatchBuilder<DataFlowInfoPatchBuilder> implements CustomPropertiesPatchBuilderSupport<DataFlowInfoPatchBuilder> { public static final String BASE_PATH = "/"; @@ -30,28 +30,41 @@ public class DataFlowInfoPatchBuilder extends AbstractMultiFieldPatchBuilder<Dat public static final String TIME_KEY = "time"; public static final String ACTOR_KEY = "actor"; - private CustomPropertiesPatchBuilder<DataFlowInfoPatchBuilder> customPropertiesPatchBuilder = new CustomPropertiesPatchBuilder<>(this); + private CustomPropertiesPatchBuilder<DataFlowInfoPatchBuilder> customPropertiesPatchBuilder = + new CustomPropertiesPatchBuilder<>(this); public DataFlowInfoPatchBuilder setName(@Nonnull String name) { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); return this; } public DataFlowInfoPatchBuilder setDescription(@Nullable String description) { if (description == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } public DataFlowInfoPatchBuilder setProject(@Nullable String project) { if (project == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + PROJECT_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + PROJECT_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + PROJECT_KEY, instance.textNode(project))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + PROJECT_KEY, + instance.textNode(project))); } return this; } @@ -59,28 +72,35 @@ public DataFlowInfoPatchBuilder setProject(@Nullable String project) { public DataFlowInfoPatchBuilder setCreated(@Nullable TimeStamp created) { if (created == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); } else { ObjectNode createdNode = instance.objectNode(); createdNode.put(TIME_KEY, created.getTime()); if (created.getActor() != null) { createdNode.put(ACTOR_KEY, created.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); } return this; } public DataFlowInfoPatchBuilder setLastModified(@Nullable TimeStamp lastModified) { if (lastModified == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); } ObjectNode lastModifiedNode = instance.objectNode(); lastModifiedNode.put(TIME_KEY, lastModified.getTime()); if (lastModified.getActor() != null) { lastModifiedNode.put(ACTOR_KEY, lastModified.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java index 96e9c31288966..581616f54e9b9 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.datajob; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; @@ -14,10 +17,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class DataJobInfoPatchBuilder extends AbstractMultiFieldPatchBuilder<DataJobInfoPatchBuilder> implements CustomPropertiesPatchBuilderSupport<DataJobInfoPatchBuilder> { @@ -37,62 +36,80 @@ public class DataJobInfoPatchBuilder extends AbstractMultiFieldPatchBuilder<Data new CustomPropertiesPatchBuilder<>(this); public DataJobInfoPatchBuilder setName(@Nonnull String name) { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); return this; } public DataJobInfoPatchBuilder setDescription(@Nullable String description) { if (description == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } - public DataJobInfoPatchBuilder setType(@Nonnull String type) { ObjectNode union = instance.objectNode(); union.set("string", instance.textNode(type)); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + TYPE_KEY, union)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + TYPE_KEY, union)); return this; } public DataJobInfoPatchBuilder setFlowUrn(@Nullable DataFlowUrn flowUrn) { if (flowUrn == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + FLOW_URN_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + FLOW_URN_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + FLOW_URN_KEY, - instance.textNode(flowUrn.toString()))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + FLOW_URN_KEY, + instance.textNode(flowUrn.toString()))); } return this; } public DataJobInfoPatchBuilder setCreated(@Nullable TimeStamp created) { if (created == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); } else { ObjectNode createdNode = instance.objectNode(); createdNode.put(TIME_KEY, created.getTime()); if (created.getActor() != null) { createdNode.put(ACTOR_KEY, created.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); } return this; } public DataJobInfoPatchBuilder setLastModified(@Nullable TimeStamp lastModified) { if (lastModified == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); } else { ObjectNode lastModifiedNode = instance.objectNode(); lastModifiedNode.put(TIME_KEY, lastModified.getTime()); if (lastModified.getActor() != null) { lastModifiedNode.put(ACTOR_KEY, lastModified.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); } return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java index 1ff6e817e40cf..0fb0454533fc0 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.datajob; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; import com.linkedin.common.Edge; @@ -12,11 +15,8 @@ import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DataJobInputOutputPatchBuilder extends AbstractMultiFieldPatchBuilder<DataJobInputOutputPatchBuilder> { +public class DataJobInputOutputPatchBuilder + extends AbstractMultiFieldPatchBuilder<DataJobInputOutputPatchBuilder> { private static final String INPUT_DATA_JOB_EDGES_PATH_START = "/inputDatajobEdges/"; private static final String INPUT_DATASET_EDGES_PATH_START = "/inputDatasetEdges/"; private static final String OUTPUT_DATASET_EDGES_PATH_START = "/outputDatasetEdges/"; @@ -39,65 +39,96 @@ public class DataJobInputOutputPatchBuilder extends AbstractMultiFieldPatchBuild public DataJobInputOutputPatchBuilder addInputDatajobEdge(@Nonnull DataJobUrn dataJobUrn) { ObjectNode value = createEdgeValue(dataJobUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, + value)); return this; } public DataJobInputOutputPatchBuilder removeInputDatajobEdge(@Nonnull DataJobUrn dataJobUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addInputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { ObjectNode value = createEdgeValue(datasetUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); return this; } public DataJobInputOutputPatchBuilder removeInputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + INPUT_DATASET_EDGES_PATH_START + datasetUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addOutputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { ObjectNode value = createEdgeValue(datasetUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, + value)); return this; } public DataJobInputOutputPatchBuilder removeOutputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addInputDatasetField(@Nonnull Urn urn) { TextNode textNode = instance.textNode(urn.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, textNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, textNode)); return this; } public DataJobInputOutputPatchBuilder removeInputDatasetField(@Nonnull Urn urn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, null)); return this; } public DataJobInputOutputPatchBuilder addOutputDatasetField(@Nonnull Urn urn) { TextNode textNode = instance.textNode(urn.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, textNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, textNode)); return this; } public DataJobInputOutputPatchBuilder removeOutputDatasetField(@Nonnull Urn urn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, null)); return this; } // Full Edge modification - public DataJobInputOutputPatchBuilder addEdge(@Nonnull Edge edge, @Nonnull LineageDirection direction) { + public DataJobInputOutputPatchBuilder addEdge( + @Nonnull Edge edge, @Nonnull LineageDirection direction) { ObjectNode value = createEdgeValue(edge); String path = getEdgePath(edge, direction); @@ -105,7 +136,8 @@ public DataJobInputOutputPatchBuilder addEdge(@Nonnull Edge edge, @Nonnull Linea return this; } - public DataJobInputOutputPatchBuilder removeEdge(@Nonnull Edge edge, @Nonnull LineageDirection direction) { + public DataJobInputOutputPatchBuilder removeEdge( + @Nonnull Edge edge, @Nonnull LineageDirection direction) { String path = getEdgePath(edge, direction); pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), path, null)); @@ -115,11 +147,9 @@ public DataJobInputOutputPatchBuilder removeEdge(@Nonnull Edge edge, @Nonnull Li private ObjectNode createEdgeValue(@Nonnull Urn urn) { ObjectNode value = instance.objectNode(); ObjectNode auditStamp = instance.objectNode(); - auditStamp.put(TIME_KEY, System.currentTimeMillis()) - .put(ACTOR_KEY, UNKNOWN_ACTOR); + auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - value.put(DESTINATION_URN_KEY, urn.toString()) - .set(LAST_MODIFIED_KEY, auditStamp); + value.put(DESTINATION_URN_KEY, urn.toString()).set(LAST_MODIFIED_KEY, auditStamp); value.set(CREATED_KEY, auditStamp); return value; @@ -151,11 +181,11 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { lastModified .put(TIME_KEY, edge.getLastModified().getTime()) .put(ACTOR_KEY, edge.getLastModified().getActor().toString()); - if (edge.getLastModified() .getImpersonator() != null) { + if (edge.getLastModified().getImpersonator() != null) { lastModified.put(IMPERSONATOR_KEY, edge.getLastModified().getImpersonator().toString()); } - if (edge.getLastModified() .getMessage() != null) { - lastModified.put(MESSAGE_KEY, edge.getLastModified() .getMessage()); + if (edge.getLastModified().getMessage() != null) { + lastModified.put(MESSAGE_KEY, edge.getLastModified().getMessage()); } } value.set(LAST_MODIFIED_KEY, lastModified); @@ -171,12 +201,13 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { value.put(SOURCE_URN_KEY, edge.getSourceUrn().toString()); } - return value; } /** - * Determines Edge path based on supplied Urn, if not a valid entity type throws IllegalArgumentException + * Determines Edge path based on supplied Urn, if not a valid entity type throws + * IllegalArgumentException + * * @param edge * @return * @throws IllegalArgumentException if destinationUrn is an invalid entity type @@ -184,21 +215,25 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { private String getEdgePath(@Nonnull Edge edge, LineageDirection direction) { Urn destinationUrn = edge.getDestinationUrn(); - if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.UPSTREAM.equals(direction)) { + if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.UPSTREAM.equals(direction)) { return INPUT_DATASET_EDGES_PATH_START + destinationUrn; } - if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.DOWNSTREAM.equals(direction)) { + if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.DOWNSTREAM.equals(direction)) { return INPUT_DATASET_EDGES_PATH_START + destinationUrn; } - if (DATA_JOB_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.UPSTREAM.equals(direction)) { + if (DATA_JOB_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.UPSTREAM.equals(direction)) { return INPUT_DATA_JOB_EDGES_PATH_START + destinationUrn; } // TODO: Output Data Jobs not supported by aspect, add here if this changes - throw new IllegalArgumentException(String.format("Unsupported entity type: %s", destinationUrn.getEntityType())); + throw new IllegalArgumentException( + String.format("Unsupported entity type: %s", destinationUrn.getEntityType())); } @Override @@ -210,5 +245,4 @@ protected String getAspectName() { protected String getEntityType() { return DATA_JOB_ENTITY_NAME; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java index d8c9b9308ae57..f4329c84f33ff 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; @@ -11,11 +14,8 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetPropertiesPatchBuilder extends AbstractMultiFieldPatchBuilder<DatasetPropertiesPatchBuilder> +public class DatasetPropertiesPatchBuilder + extends AbstractMultiFieldPatchBuilder<DatasetPropertiesPatchBuilder> implements CustomPropertiesPatchBuilderSupport<DatasetPropertiesPatchBuilder> { public static final String BASE_PATH = "/"; @@ -29,62 +29,78 @@ public class DatasetPropertiesPatchBuilder extends AbstractMultiFieldPatchBuilde private CustomPropertiesPatchBuilder<DatasetPropertiesPatchBuilder> customPropertiesPatchBuilder = new CustomPropertiesPatchBuilder<>(this); - public DatasetPropertiesPatchBuilder setExternalUrl(@Nullable String externalUrl) { if (externalUrl == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + EXTERNAL_URL_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + EXTERNAL_URL_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + EXTERNAL_URL_KEY, - instance.textNode(externalUrl))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + EXTERNAL_URL_KEY, + instance.textNode(externalUrl))); } return this; } public DatasetPropertiesPatchBuilder setName(@Nullable String name) { if (name == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + NAME_KEY, null)); + this.pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + NAME_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); } return this; } public DatasetPropertiesPatchBuilder setQualifiedName(@Nullable String qualifiedName) { if (qualifiedName == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + QUALIFIED_NAME_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + QUALIFIED_NAME_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + QUALIFIED_NAME_KEY, - instance.textNode(qualifiedName))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + QUALIFIED_NAME_KEY, + instance.textNode(qualifiedName))); } return this; } public DatasetPropertiesPatchBuilder setDescription(@Nullable String description) { if (description == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + DESCRIPTION_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } public DatasetPropertiesPatchBuilder setUri(@Nullable String uri) { if (uri == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + URI_KEY, null)); + this.pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + URI_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + URI_KEY, instance.textNode(uri))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + URI_KEY, instance.textNode(uri))); } return this; } @Override - public DatasetPropertiesPatchBuilder addCustomProperty(@Nonnull String key, @Nonnull String value) { + public DatasetPropertiesPatchBuilder addCustomProperty( + @Nonnull String key, @Nonnull String value) { this.customPropertiesPatchBuilder.addProperty(key, value); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java index 8b8dea275a3f4..6478b31d27ef0 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java @@ -1,21 +1,20 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.GlossaryTermAssociation; -import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.TagAssociation; +import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class EditableSchemaMetadataPatchBuilder extends - AbstractMultiFieldPatchBuilder<EditableSchemaMetadataPatchBuilder> { +public class EditableSchemaMetadataPatchBuilder + extends AbstractMultiFieldPatchBuilder<EditableSchemaMetadataPatchBuilder> { private static final String BASE_PATH = "/editableSchemaFieldInfo/"; private static final String TAGS_PATH_EXTENSION = "/globalTags/tags/"; @@ -24,39 +23,55 @@ public class EditableSchemaMetadataPatchBuilder extends private static final String URN_KEY = "urn"; private static final String CONTEXT_KEY = "context"; - public EditableSchemaMetadataPatchBuilder addTag(@Nonnull TagAssociation tag, @Nonnull String fieldPath) { + public EditableSchemaMetadataPatchBuilder addTag( + @Nonnull TagAssociation tag, @Nonnull String fieldPath) { ObjectNode value = instance.objectNode(); value.put(TAG_KEY, tag.getTag().toString()); if (tag.getContext() != null) { value.put(CONTEXT_KEY, tag.getContext()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag.getTag(), value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag.getTag(), + value)); return this; } - public EditableSchemaMetadataPatchBuilder removeTag(@Nonnull TagUrn tag, @Nonnull String fieldPath) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag, null)); + public EditableSchemaMetadataPatchBuilder removeTag( + @Nonnull TagUrn tag, @Nonnull String fieldPath) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag, + null)); return this; } - public EditableSchemaMetadataPatchBuilder addGlossaryTerm(@Nonnull GlossaryTermAssociation term, @Nonnull String fieldPath) { + public EditableSchemaMetadataPatchBuilder addGlossaryTerm( + @Nonnull GlossaryTermAssociation term, @Nonnull String fieldPath) { ObjectNode value = instance.objectNode(); value.put(URN_KEY, term.getUrn().toString()); if (term.getContext() != null) { value.put(CONTEXT_KEY, term.getContext()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term.getUrn(), value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term.getUrn(), + value)); return this; } - public EditableSchemaMetadataPatchBuilder removeGlossaryTerm(@Nonnull GlossaryTermUrn term, @Nonnull String fieldPath) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term, null)); + public EditableSchemaMetadataPatchBuilder removeGlossaryTerm( + @Nonnull GlossaryTermUrn term, @Nonnull String fieldPath) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term, + null)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java index 29330bee01ef3..6ded8a25b4e22 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.dataset.DatasetLineageType; @@ -9,12 +12,9 @@ import lombok.ToString; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - @ToString -public class UpstreamLineagePatchBuilder extends AbstractMultiFieldPatchBuilder<UpstreamLineagePatchBuilder> { +public class UpstreamLineagePatchBuilder + extends AbstractMultiFieldPatchBuilder<UpstreamLineagePatchBuilder> { private static final String PATH_START = "/upstreams/"; private static final String DATASET_KEY = "dataset"; @@ -23,21 +23,24 @@ public class UpstreamLineagePatchBuilder extends AbstractMultiFieldPatchBuilder< private static final String ACTOR_KEY = "actor"; private static final String TYPE_KEY = "type"; - public UpstreamLineagePatchBuilder addUpstream(@Nonnull DatasetUrn datasetUrn, @Nonnull DatasetLineageType lineageType) { + public UpstreamLineagePatchBuilder addUpstream( + @Nonnull DatasetUrn datasetUrn, @Nonnull DatasetLineageType lineageType) { ObjectNode value = instance.objectNode(); ObjectNode auditStamp = instance.objectNode(); - auditStamp.put(TIME_KEY, System.currentTimeMillis()) - .put(ACTOR_KEY, UNKNOWN_ACTOR); - value.put(DATASET_KEY, datasetUrn.toString()) + auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + value + .put(DATASET_KEY, datasetUrn.toString()) .put(TYPE_KEY, lineageType.toString()) .set(AUDIT_STAMP_KEY, auditStamp); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), PATH_START + datasetUrn, value)); return this; } public UpstreamLineagePatchBuilder removeUpstream(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), PATH_START + datasetUrn, null)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java index 562ab715848b1..9f221bac15be4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java @@ -4,14 +4,12 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * Interface to implement if an aspect supports custom properties changes - */ +/** Interface to implement if an aspect supports custom properties changes */ public interface CustomPropertiesPatchBuilderSupport<T extends AbstractMultiFieldPatchBuilder<T>> { /** * Adds a custom property + * * @param key * @param value * @return @@ -20,6 +18,7 @@ public interface CustomPropertiesPatchBuilderSupport<T extends AbstractMultiFiel /** * Removes a custom property + * * @param key * @return */ @@ -27,6 +26,7 @@ public interface CustomPropertiesPatchBuilderSupport<T extends AbstractMultiFiel /** * Fully replace the custom properties + * * @param properties * @return */ diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java index 660b6ff8fb84a..e3b14c0838ad6 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java @@ -5,21 +5,20 @@ import java.util.List; import org.apache.commons.lang3.tuple.ImmutableTriple; - /** - * Used for supporting intermediate subtypes when constructing a patch for an aspect that includes complex objects. + * Used for supporting intermediate subtypes when constructing a patch for an aspect that includes + * complex objects. + * * @param <T> The parent patch builder type */ public interface IntermediatePatchBuilder<T extends AbstractMultiFieldPatchBuilder<T>> { - /** - * Convenience method to return parent patch builder in functional callstack - */ + /** Convenience method to return parent patch builder in functional callstack */ T getParent(); /** - * Exposes subpath values to parent patch builder in Op, Path, Value triples. Should - * usually only be called by the parent patch builder class when constructing the path values. + * Exposes subpath values to parent patch builder in Op, Path, Value triples. Should usually only + * be called by the parent patch builder class when constructing the path values. */ List<ImmutableTriple<String, String, JsonNode>> getSubPaths(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java index 7396fa2d926d3..a2692c432513e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java @@ -1,77 +1,64 @@ package datahub.client.rest; +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.JacksonDataTemplateCodec; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.Emitter; +import datahub.client.MetadataResponseFuture; +import datahub.client.MetadataWriteResponse; +import datahub.event.EventFormatter; +import datahub.event.MetadataChangeProposalWrapper; +import datahub.event.UpsertAspectRequest; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; - import javax.annotation.concurrent.ThreadSafe; - +import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.concurrent.FutureCallback; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.TrustAllStrategy; import org.apache.http.entity.StringEntity; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.data.DataMap; -import com.linkedin.data.template.JacksonDataTemplateCodec; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.Emitter; -import datahub.client.MetadataResponseFuture; -import datahub.client.MetadataWriteResponse; -import datahub.event.EventFormatter; -import datahub.event.MetadataChangeProposalWrapper; -import datahub.event.UpsertAspectRequest; -import lombok.extern.slf4j.Slf4j; - -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.conn.ssl.TrustAllStrategy; import org.apache.http.nio.client.HttpAsyncClient; import org.apache.http.ssl.SSLContextBuilder; -import java.security.KeyManagementException; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; - -import static com.linkedin.metadata.Constants.*; - - @ThreadSafe @Slf4j /** * The REST emitter is a thin wrapper on top of the Apache HttpClient - * (https://hc.apache.org/httpcomponents-client-4.5.x/index.html) library. It supports non-blocking emission of - * metadata and handles the details of JSON serialization of metadata aspects over the wire. + * (https://hc.apache.org/httpcomponents-client-4.5.x/index.html) library. It supports non-blocking + * emission of metadata and handles the details of JSON serialization of metadata aspects over the + * wire. * - * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` method. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * ); - * You can also customize the underlying - * http client by calling the `customizeHttpAsyncClient` method on the builder. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) - * ); + * <p>Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` + * method. e.g. RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") ); You can also customize + * the underlying http client by calling the `customizeHttpAsyncClient` method on the builder. e.g. + * RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") .customizeHttpAsyncClient(c + * :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) ); */ public class RestEmitter implements Emitter { @@ -87,29 +74,36 @@ public class RestEmitter implements Emitter { /** * The default constructor, prefer using the `create` factory method. + * * @param config */ public RestEmitter(RestEmitterConfig config) { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); this.config = config; // Override httpClient settings with RestEmitter configs if present if (config.getTimeoutSec() != null) { HttpAsyncClientBuilder httpClientBuilder = this.config.getAsyncHttpClientBuilder(); - httpClientBuilder.setDefaultRequestConfig(RequestConfig.custom() - .setConnectTimeout(config.getTimeoutSec() * 1000) - .setSocketTimeout(config.getTimeoutSec() * 1000) - .build()); + httpClientBuilder.setDefaultRequestConfig( + RequestConfig.custom() + .setConnectTimeout(config.getTimeoutSec() * 1000) + .setSocketTimeout(config.getTimeoutSec() * 1000) + .build()); } if (config.isDisableSslVerification()) { HttpAsyncClientBuilder httpClientBuilder = this.config.getAsyncHttpClientBuilder(); try { httpClientBuilder - .setSSLContext(new SSLContextBuilder().loadTrustMaterial(null, TrustAllStrategy.INSTANCE).build()) + .setSSLContext( + new SSLContextBuilder().loadTrustMaterial(null, TrustAllStrategy.INSTANCE).build()) .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE); } catch (KeyManagementException | NoSuchAlgorithmException | KeyStoreException e) { throw new RuntimeException("Error while creating insecure http client", e); @@ -127,8 +121,10 @@ public RestEmitter(RestEmitterConfig config) { private static MetadataWriteResponse mapResponse(HttpResponse response) { MetadataWriteResponse.MetadataWriteResponseBuilder builder = MetadataWriteResponse.builder().underlyingResponse(response); - if ((response != null) && (response.getStatusLine() != null) && (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK - || response.getStatusLine().getStatusCode() == HttpStatus.SC_CREATED)) { + if ((response != null) + && (response.getStatusLine() != null) + && (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK + || response.getStatusLine().getStatusCode() == HttpStatus.SC_CREATED)) { builder.success(true); } else { builder.success(false); @@ -144,51 +140,49 @@ private static MetadataWriteResponse mapResponse(HttpResponse response) { length = contentStream.read(buffer); } builder.responseContent(result.toString("UTF-8")); - } catch (Exception e) { - // Catch all exceptions and still return a valid response object - log.warn("Wasn't able to convert response into a string", e); - } + } catch (Exception e) { + // Catch all exceptions and still return a valid response object + log.warn("Wasn't able to convert response into a string", e); + } return builder.build(); } - /** - * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` method. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") // coordinates of gms server - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * ); - * You can also customize the underlying http client by calling the `customizeHttpAsyncClient` method on the builder. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) - * ); + * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` + * method. e.g. RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * // coordinates of gms server .extraHeaders(Collections.singletonMap("Custom-Header", + * "custom-val") ); You can also customize the underlying http client by calling the + * `customizeHttpAsyncClient` method on the builder. e.g. RestEmitter emitter = + * RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") + * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) ); + * * @param builderSupplier - * @return a constructed RestEmitter. Call #testConnection to make sure this emitter has a valid connection to the server + * @return a constructed RestEmitter. Call #testConnection to make sure this emitter has a valid + * connection to the server */ - public static RestEmitter create(Consumer<RestEmitterConfig.RestEmitterConfigBuilder> builderSupplier) { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().with(builderSupplier).build()); + public static RestEmitter create( + Consumer<RestEmitterConfig.RestEmitterConfigBuilder> builderSupplier) { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().with(builderSupplier).build()); return restEmitter; } /** * Creates a RestEmitter with default settings. - * @return a constructed RestEmitter. - * Call #test_connection to validate that this emitter can communicate with the server. + * + * @return a constructed RestEmitter. Call #test_connection to validate that this emitter can + * communicate with the server. */ public static RestEmitter createWithDefaults() { // No-op creator -> creates RestEmitter using default settings - return create(b -> { - }); + return create(b -> {}); } @Override - public Future<MetadataWriteResponse> emit(MetadataChangeProposalWrapper mcpw, - Callback callback) throws IOException { - return emit(this.eventFormatter.convert(mcpw), callback); + public Future<MetadataWriteResponse> emit(MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException { + return emit(this.eventFormatter.convert(mcpw), callback); } @Override @@ -201,8 +195,9 @@ public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback c return this.postGeneric(this.ingestProposalUrl, serializedMCP, mcp, callback); } - private Future<MetadataWriteResponse> postGeneric(String urlStr, String payloadJson, Object originalRequest, - Callback callback) throws IOException { + private Future<MetadataWriteResponse> postGeneric( + String urlStr, String payloadJson, Object originalRequest, Callback callback) + throws IOException { HttpPost httpPost = new HttpPost(urlStr); httpPost.setHeader("Content-Type", "application/json"); httpPost.setHeader("X-RestLi-Protocol-Version", "2.0.0"); @@ -214,48 +209,49 @@ private Future<MetadataWriteResponse> postGeneric(String urlStr, String payloadJ httpPost.setEntity(new StringEntity(payloadJson)); AtomicReference<MetadataWriteResponse> responseAtomicReference = new AtomicReference<>(); CountDownLatch responseLatch = new CountDownLatch(1); - FutureCallback<HttpResponse> httpCallback = new FutureCallback<HttpResponse>() { - @Override - public void completed(HttpResponse response) { - MetadataWriteResponse writeResponse = null; - try { - writeResponse = mapResponse(response); - responseAtomicReference.set(writeResponse); - } catch (Exception e) { - // do nothing - } - responseLatch.countDown(); - if (callback != null) { - try { - callback.onCompletion(writeResponse); - } catch (Exception e) { - log.error("Error executing user callback on completion.", e); + FutureCallback<HttpResponse> httpCallback = + new FutureCallback<HttpResponse>() { + @Override + public void completed(HttpResponse response) { + MetadataWriteResponse writeResponse = null; + try { + writeResponse = mapResponse(response); + responseAtomicReference.set(writeResponse); + } catch (Exception e) { + // do nothing + } + responseLatch.countDown(); + if (callback != null) { + try { + callback.onCompletion(writeResponse); + } catch (Exception e) { + log.error("Error executing user callback on completion.", e); + } + } } - } - } - @Override - public void failed(Exception ex) { - if (callback != null) { - try { - callback.onFailure(ex); - } catch (Exception e) { - log.error("Error executing user callback on failure.", e); + @Override + public void failed(Exception ex) { + if (callback != null) { + try { + callback.onFailure(ex); + } catch (Exception e) { + log.error("Error executing user callback on failure.", e); + } + } } - } - } - @Override - public void cancelled() { - if (callback != null) { - try { - callback.onFailure(new RuntimeException("Cancelled")); - } catch (Exception e) { - log.error("Error executing user callback on failure due to cancellation.", e); + @Override + public void cancelled() { + if (callback != null) { + try { + callback.onFailure(new RuntimeException("Cancelled")); + } catch (Exception e) { + log.error("Error executing user callback on failure due to cancellation.", e); + } + } } - } - } - }; + }; Future<HttpResponse> requestFuture = httpClient.execute(httpPost, httpCallback); return new MetadataResponseFuture(requestFuture, responseAtomicReference, responseLatch); } @@ -286,8 +282,8 @@ public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Cal return this.postOpenAPI(request, callback); } - private Future<MetadataWriteResponse> postOpenAPI(List<UpsertAspectRequest> payload, Callback callback) - throws IOException { + private Future<MetadataWriteResponse> postOpenAPI( + List<UpsertAspectRequest> payload, Callback callback) throws IOException { HttpPost httpPost = new HttpPost(ingestOpenApiUrl); httpPost.setHeader("Content-Type", "application/json"); httpPost.setHeader("Accept", "application/json"); @@ -298,48 +294,49 @@ private Future<MetadataWriteResponse> postOpenAPI(List<UpsertAspectRequest> payl httpPost.setEntity(new StringEntity(objectMapper.writeValueAsString(payload))); AtomicReference<MetadataWriteResponse> responseAtomicReference = new AtomicReference<>(); CountDownLatch responseLatch = new CountDownLatch(1); - FutureCallback<HttpResponse> httpCallback = new FutureCallback<HttpResponse>() { - @Override - public void completed(HttpResponse response) { - MetadataWriteResponse writeResponse = null; - try { - writeResponse = mapResponse(response); - responseAtomicReference.set(writeResponse); - } catch (Exception e) { - // do nothing - } - responseLatch.countDown(); - if (callback != null) { - try { - callback.onCompletion(writeResponse); - } catch (Exception e) { - log.error("Error executing user callback on completion.", e); + FutureCallback<HttpResponse> httpCallback = + new FutureCallback<HttpResponse>() { + @Override + public void completed(HttpResponse response) { + MetadataWriteResponse writeResponse = null; + try { + writeResponse = mapResponse(response); + responseAtomicReference.set(writeResponse); + } catch (Exception e) { + // do nothing + } + responseLatch.countDown(); + if (callback != null) { + try { + callback.onCompletion(writeResponse); + } catch (Exception e) { + log.error("Error executing user callback on completion.", e); + } + } } - } - } - @Override - public void failed(Exception ex) { - if (callback != null) { - try { - callback.onFailure(ex); - } catch (Exception e) { - log.error("Error executing user callback on failure.", e); + @Override + public void failed(Exception ex) { + if (callback != null) { + try { + callback.onFailure(ex); + } catch (Exception e) { + log.error("Error executing user callback on failure.", e); + } + } } - } - } - @Override - public void cancelled() { - if (callback != null) { - try { - callback.onFailure(new RuntimeException("Cancelled")); - } catch (Exception e) { - log.error("Error executing user callback on failure due to cancellation.", e); + @Override + public void cancelled() { + if (callback != null) { + try { + callback.onFailure(new RuntimeException("Cancelled")); + } catch (Exception e) { + log.error("Error executing user callback on failure due to cancellation.", e); + } + } } - } - } - }; + }; Future<HttpResponse> requestFuture = httpClient.execute(httpPost, httpCallback); return new MetadataResponseFuture(requestFuture, responseAtomicReference, responseLatch); } @@ -348,5 +345,4 @@ public void cancelled() { HttpAsyncClient getHttpClient() { return this.httpClient; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java index f615c3ccb3e4f..7e24429213246 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java @@ -13,7 +13,6 @@ import org.apache.http.client.config.RequestConfig; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; - @Value @Builder @Slf4j @@ -24,46 +23,43 @@ public class RestEmitterConfig { public static final String DEFAULT_AUTH_TOKEN = null; public static final String CLIENT_VERSION_PROPERTY = "clientVersion"; - @Builder.Default - private final String server = "http://localhost:8080"; + @Builder.Default private final String server = "http://localhost:8080"; private final Integer timeoutSec; - @Builder.Default - private final boolean disableSslVerification = false; - - @Builder.Default - private final String token = DEFAULT_AUTH_TOKEN; + @Builder.Default private final boolean disableSslVerification = false; - @Builder.Default - @NonNull - private final Map<String, String> extraHeaders = Collections.EMPTY_MAP; + @Builder.Default private final String token = DEFAULT_AUTH_TOKEN; + + @Builder.Default @NonNull private final Map<String, String> extraHeaders = Collections.EMPTY_MAP; private final HttpAsyncClientBuilder asyncHttpClientBuilder; @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); public static class RestEmitterConfigBuilder { private String getVersion() { - try ( - InputStream foo = this.getClass().getClassLoader().getResourceAsStream("client.properties")) { - Properties properties = new Properties(); - properties.load(foo); - return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); + try (InputStream foo = + this.getClass().getClassLoader().getResourceAsStream("client.properties")) { + Properties properties = new Properties(); + properties.load(foo); + return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); } catch (Exception e) { log.warn("Unable to find a version for datahub-client. Will set to unknown", e); return "unknown"; } } - private HttpAsyncClientBuilder asyncHttpClientBuilder = HttpAsyncClientBuilder - .create() - .setDefaultRequestConfig(RequestConfig.custom() - .setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_SEC * 1000) - .setSocketTimeout(DEFAULT_READ_TIMEOUT_SEC * 1000) - .build()) - .setUserAgent("DataHub-RestClient/" + getVersion()); + private HttpAsyncClientBuilder asyncHttpClientBuilder = + HttpAsyncClientBuilder.create() + .setDefaultRequestConfig( + RequestConfig.custom() + .setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_SEC * 1000) + .setSocketTimeout(DEFAULT_READ_TIMEOUT_SEC * 1000) + .build()) + .setUserAgent("DataHub-RestClient/" + getVersion()); public RestEmitterConfigBuilder with(Consumer<RestEmitterConfigBuilder> builderFunction) { builderFunction.accept(this); @@ -76,4 +72,4 @@ public RestEmitterConfigBuilder customizeHttpAsyncClient( return this; } } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java index 5d42f814e1fe0..5238c19610601 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java @@ -1,5 +1,7 @@ package datahub.event; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -8,18 +10,12 @@ import com.linkedin.data.template.JacksonDataTemplateCodec; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; - import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import lombok.SneakyThrows; -import static com.linkedin.metadata.Constants.*; - - -/** - * A class that helps to format Metadata events for transport - */ +/** A class that helps to format Metadata events for transport */ public class EventFormatter { private final ObjectMapper objectMapper; @@ -30,9 +26,13 @@ public class EventFormatter { public EventFormatter(Format serializationFormat) { this.serializationFormat = serializationFormat; objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); } @@ -42,21 +42,29 @@ public EventFormatter() { @SneakyThrows(URISyntaxException.class) public MetadataChangeProposal convert(MetadataChangeProposalWrapper mcpw) throws IOException { - - String serializedAspect = StringEscapeUtils.escapeJava(dataTemplateCodec.dataTemplateToString(mcpw.getAspect())); - MetadataChangeProposal mcp = new MetadataChangeProposal().setEntityType(mcpw.getEntityType()) - .setAspectName(mcpw.getAspectName()) - .setEntityUrn(Urn.createFromString(mcpw.getEntityUrn())) - .setChangeType(mcpw.getChangeType()); + + String serializedAspect = + StringEscapeUtils.escapeJava(dataTemplateCodec.dataTemplateToString(mcpw.getAspect())); + MetadataChangeProposal mcp = + new MetadataChangeProposal() + .setEntityType(mcpw.getEntityType()) + .setAspectName(mcpw.getAspectName()) + .setEntityUrn(Urn.createFromString(mcpw.getEntityUrn())) + .setChangeType(mcpw.getChangeType()); switch (this.serializationFormat) { - case PEGASUS_JSON: { - mcp.setAspect(new GenericAspect().setContentType("application/json") - .setValue(ByteString.unsafeWrap(serializedAspect.getBytes(StandardCharsets.UTF_8)))); - } - break; + case PEGASUS_JSON: + { + mcp.setAspect( + new GenericAspect() + .setContentType("application/json") + .setValue( + ByteString.unsafeWrap(serializedAspect.getBytes(StandardCharsets.UTF_8)))); + } + break; default: - throw new EventValidationException("Cannot handle serialization format " + this.serializationFormat); + throw new EventValidationException( + "Cannot handle serialization format " + this.serializationFormat); } return mcp; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java index 43778cb325971..dff3791a64ec9 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java @@ -4,6 +4,7 @@ public class EventValidationException extends RuntimeException { public EventValidationException(String message) { super(message); } + public EventValidationException(String message, Throwable t) { super(message, t); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java index 083a4cb40471b..4eb33015e33f4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java @@ -12,9 +12,9 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - /** * A class that makes it easy to create new {@link MetadataChangeProposal} events + * * @param <T> */ @Value @@ -53,7 +53,11 @@ public interface Build { } public static class MetadataChangeProposalWrapperBuilder - implements EntityUrnStepBuilder, EntityTypeStepBuilder, ChangeStepBuilder, AspectStepBuilder, Build { + implements EntityUrnStepBuilder, + EntityTypeStepBuilder, + ChangeStepBuilder, + AspectStepBuilder, + Build { private String entityUrn; private String entityType; @@ -116,9 +120,11 @@ public Build aspect(DataTemplate aspect) { @Override public MetadataChangeProposalWrapper build() { try { - Objects.requireNonNull(this.aspectName, + Objects.requireNonNull( + this.aspectName, "aspectName could not be inferred from provided aspect and was not explicitly provided as an override"); - return new MetadataChangeProposalWrapper(entityType, entityUrn, changeType, aspect, aspectName); + return new MetadataChangeProposalWrapper( + entityType, entityUrn, changeType, aspect, aspectName); } catch (Exception e) { throw new EventValidationException("Failed to create a metadata change proposal event", e); } @@ -131,7 +137,8 @@ public Build aspectName(String aspectName) { } } - public static MetadataChangeProposalWrapper create(Consumer<EntityTypeStepBuilder> builderConsumer) { + public static MetadataChangeProposalWrapper create( + Consumer<EntityTypeStepBuilder> builderConsumer) { MetadataChangeProposalWrapperBuilder builder = new MetadataChangeProposalWrapperBuilder(); builderConsumer.accept(builder); return builder.build(); diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java index 6fe07ac448a80..fa7c21fd41d9a 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java @@ -1,22 +1,17 @@ /** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * <p>http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + * <p>Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing permissions and + * limitations under the License. */ - package datahub.event; import java.io.IOException; @@ -26,21 +21,20 @@ public class StringEscapeUtils { - private StringEscapeUtils() { + private StringEscapeUtils() {} - } - /** * Worker method for the {@link #escapeJavaScript(String)} method. - * + * * @param out write to receieve the escaped string * @param str String to escape values in, may be null * @param escapeSingleQuote escapes single quotes if <code>true</code> * @param escapeForwardSlash TODO * @throws IOException if an IOException occurs */ - private static void escapeJavaStyleString(Writer out, String str, boolean escapeSingleQuote, - boolean escapeForwardSlash) throws IOException { + private static void escapeJavaStyleString( + Writer out, String str, boolean escapeSingleQuote, boolean escapeForwardSlash) + throws IOException { if (out == null) { throw new IllegalArgumentException("The Writer must not be null"); } else if (str != null) { @@ -56,35 +50,35 @@ private static void escapeJavaStyleString(Writer out, String str, boolean escape out.write("\\u00" + hex(ch)); } else if (ch < ' ') { switch (ch) { - case '\b': - out.write(92); - out.write(98); - break; - case '\t': - out.write(92); - out.write(116); - break; - case '\n': - out.write(92); - out.write(110); - break; - case '\u000b': + case '\b': + out.write(92); + out.write(98); + break; + case '\t': + out.write(92); + out.write(116); + break; + case '\n': + out.write(92); + out.write(110); + break; + case '\u000b': - case '\f': - out.write(92); - out.write(102); - break; - case '\r': - out.write(92); - out.write(114); - break; - default: - if (ch > 15) { - out.write("\\u00" + hex(ch)); - } else { - out.write("\\u000" + hex(ch)); - } - break; + case '\f': + out.write(92); + out.write(102); + break; + case '\r': + out.write(92); + out.write(114); + break; + default: + if (ch > 15) { + out.write("\\u00" + hex(ch)); + } else { + out.write("\\u000" + hex(ch)); + } + break; } } else { @@ -95,8 +89,7 @@ private static void escapeJavaStyleString(Writer out, String str, boolean escape } /** - * Returns an upper case hexadecimal <code>String</code> for the given - * character. + * Returns an upper case hexadecimal <code>String</code> for the given character. * * @param ch The character to convert. * @return An upper case hexadecimal <code>String</code> @@ -113,34 +106,35 @@ private static String hex(char ch) { * @param escapeForwardSlash TODO * @return the escaped string */ - private static String escapeJavaStyleString(String str, boolean escapeSingleQuotes, boolean escapeForwardSlash) throws IOException { + private static String escapeJavaStyleString( + String str, boolean escapeSingleQuotes, boolean escapeForwardSlash) throws IOException { if (str == null) { return null; } else { StringWriter writer = new StringWriter(str.length() * 2); escapeJavaStyleString(writer, str, escapeSingleQuotes, escapeForwardSlash); return writer.toString(); - } } - + /** * Escapes the characters in a <code>String</code> using Java String rules. - * <p> - * Deals correctly with quotes and control-chars (tab, backslash, cr, ff, etc.) - * <p> - * So a tab becomes the characters <code>'\\'</code> and <code>'t'</code>. - * <p> - * The only difference between Java strings and JavaScript strings - * is that in JavaScript, a single quote must be escaped. - * <p> - * Example: + * + * <p>Deals correctly with quotes and control-chars (tab, backslash, cr, ff, etc.) + * + * <p>So a tab becomes the characters <code>'\\'</code> and <code>'t'</code>. + * + * <p>The only difference between Java strings and JavaScript strings is that in JavaScript, a + * single quote must be escaped. + * + * <p>Example: + * * <pre> * input string: He didn't say, "Stop!" * output string: He didn't say, \"Stop!\" * </pre> * - * @param str String to escape values in, may be null + * @param str String to escape values in, may be null * @return String with escaped values, <code>null</code> if null string input */ public static String escapeJava(String str) throws IOException { diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java index eb834ccea2b91..7dfb9d33f6948 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java @@ -9,7 +9,6 @@ import lombok.Builder; import lombok.Value; - @JsonInclude(JsonInclude.Include.NON_NULL) @Value @Builder @@ -17,15 +16,21 @@ public class UpsertAspectRequest { @JsonProperty("entityType") - @Schema(required = true, description = "The name of the entity matching with its definition in the entity registry") + @Schema( + required = true, + description = "The name of the entity matching with its definition in the entity registry") String entityType; @JsonProperty("entityUrn") - @Schema(description = "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") + @Schema( + description = + "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") String entityUrn; @JsonProperty("entityKeyAspect") - @Schema(description = "A key aspect referencing the entity to be updated, required if entityUrn is null") + @Schema( + description = + "A key aspect referencing the entity to be updated, required if entityUrn is null") OneOfGenericAspectValue entityKeyAspect; @JsonProperty("aspect") @@ -33,7 +38,5 @@ public class UpsertAspectRequest { OneOfGenericAspectValue aspect; @JsonPOJOBuilder(withPrefix = "") - public static class UpsertAspectRequestBuilder { - - } + public static class UpsertAspectRequestBuilder {} } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java index e591fee3f68a8..01b39f77913bc 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java @@ -1,7 +1,20 @@ package datahub.client.file; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.core.exc.StreamReadException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DatabindException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.JacksonDataTemplateCodec; +import com.linkedin.dataset.DatasetProperties; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.MetadataWriteResponse; +import datahub.event.MetadataChangeProposalWrapper; import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -10,47 +23,34 @@ import java.util.Map; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; - import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; -import com.fasterxml.jackson.core.exc.StreamReadException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.DatabindException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.data.DataMap; -import com.linkedin.data.template.JacksonDataTemplateCodec; -import com.linkedin.dataset.DatasetProperties; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.MetadataWriteResponse; -import datahub.event.MetadataChangeProposalWrapper; - -import static com.linkedin.metadata.Constants.*; - - public class FileEmitterTest { private final ObjectMapper objectMapper; private final JacksonDataTemplateCodec dataTemplateCodec; public FileEmitterTest() { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); } - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Test public void testFileEmitter() throws IOException { - InputStream goldenFileStream = ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); + InputStream goldenFileStream = + ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); String tempRoot = tempFolder.getRoot().toString(); String outputFile = tempRoot + "/test.json"; @@ -61,24 +61,22 @@ public void testFileEmitter() throws IOException { emitter.close(); goldenFileStream = ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); this.assertEqualJsonFile(goldenFileStream, outputFile); - } - private void assertEqualJsonFile(InputStream file1, String file2) throws StreamReadException, DatabindException, - IOException { - TypeReference<List<Map<String, Object>>> typeRef = new TypeReference<List<Map<String, Object>>>() { - }; + private void assertEqualJsonFile(InputStream file1, String file2) + throws StreamReadException, DatabindException, IOException { + TypeReference<List<Map<String, Object>>> typeRef = + new TypeReference<List<Map<String, Object>>>() {}; List<Map<String, Object>> map1 = this.objectMapper.readValue(file1, typeRef); File f2 = new File(file2); List<Map<String, Object>> map2 = this.objectMapper.readValue(f2, typeRef); Assert.assertEquals(map1, map2); } - private List<MetadataChangeProposal> getMCPs(InputStream fileStream) throws StreamReadException, DatabindException, - IOException { + private List<MetadataChangeProposal> getMCPs(InputStream fileStream) + throws StreamReadException, DatabindException, IOException { ArrayList<MetadataChangeProposal> mcps = new ArrayList<MetadataChangeProposal>(); - TypeReference<Map<String, Object>[]> typeRef = new TypeReference<Map<String, Object>[]>() { - }; + TypeReference<Map<String, Object>[]> typeRef = new TypeReference<Map<String, Object>[]>() {}; Map<String, Object>[] maps = this.objectMapper.readValue(fileStream, typeRef); for (Map<String, Object> map : maps) { String json = objectMapper.writeValueAsString(map); @@ -94,20 +92,24 @@ public void testSuccessCallback() throws Exception { String tempRoot = tempFolder.getRoot().toString(); String outputFile = tempRoot + "/testCallBack.json"; FileEmitter emitter = new FileEmitter(FileEmitterConfig.builder().fileName(outputFile).build()); - MetadataChangeProposalWrapper<?> mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + MetadataChangeProposalWrapper<?> mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); AtomicReference<MetadataWriteResponse> callbackResponse = new AtomicReference<>(); - Future<MetadataWriteResponse> future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - callbackResponse.set(response); - Assert.assertTrue(response.isSuccess()); - } - - @Override - public void onFailure(Throwable exception) { - Assert.fail("Should not be called"); - } - }); + Future<MetadataWriteResponse> future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + callbackResponse.set(response); + Assert.assertTrue(response.isSuccess()); + } + + @Override + public void onFailure(Throwable exception) { + Assert.fail("Should not be called"); + } + }); Assert.assertEquals(callbackResponse.get(), future.get()); } @@ -119,25 +121,27 @@ public void testFailCallback() throws Exception { String outputFile = tempRoot + "/testCallBack.json"; FileEmitter emitter = new FileEmitter(FileEmitterConfig.builder().fileName(outputFile).build()); emitter.close(); - MetadataChangeProposalWrapper<?> mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); - Future<MetadataWriteResponse> future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - - Assert.fail("Should not be called"); - } - - @Override - public void onFailure(Throwable exception) { - - } - }); + MetadataChangeProposalWrapper<?> mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + Future<MetadataWriteResponse> future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + + Assert.fail("Should not be called"); + } + + @Override + public void onFailure(Throwable exception) {} + }); Assert.assertFalse(future.get().isSuccess()); - } - private MetadataChangeProposalWrapper<?> getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper<?> getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -145,5 +149,4 @@ private MetadataChangeProposalWrapper<?> getMetadataChangeProposalWrapper(String .aspect(new DatasetProperties().setDescription(description)) .build(); } - } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java index 520594381426f..f61121adf1395 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java @@ -14,14 +14,12 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; - public class AvroSerializerTest { + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); - - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -35,12 +33,14 @@ public void avroFileWrite() throws Exception { AvroSerializer avroSerializer = new AvroSerializer(); File file = tempFolder.newFile("data.avro"); - DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(avroSerializer.getRecordSchema()); + DatumWriter<GenericRecord> writer = + new GenericDatumWriter<GenericRecord>(avroSerializer.getRecordSchema()); DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(writer); dataFileWriter.create(avroSerializer.getRecordSchema(), file); String entityUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,logging_events,PROD)"; for (int i = 0; i < 10; ++i) { - MetadataChangeProposalWrapper metadataChangeProposalWrapper = getMetadataChangeProposalWrapper("Test description - " + i, entityUrn); + MetadataChangeProposalWrapper metadataChangeProposalWrapper = + getMetadataChangeProposalWrapper("Test description - " + i, entityUrn); GenericRecord record = avroSerializer.serialize(metadataChangeProposalWrapper); dataFileWriter.append(record); } @@ -48,7 +48,8 @@ public void avroFileWrite() throws Exception { File readerFile = file; DatumReader<GenericRecord> reader = new GenericDatumReader<>(avroSerializer.getRecordSchema()); - DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(readerFile, reader); + DataFileReader<GenericRecord> dataFileReader = + new DataFileReader<GenericRecord>(readerFile, reader); while (dataFileReader.hasNext()) { GenericRecord record = dataFileReader.next(); System.out.println(record.get("entityUrn")); diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java index dff109cf1e455..5161e6460b8a1 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java @@ -3,13 +3,20 @@ import static datahub.client.kafka.KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC; import static java.util.Collections.singletonList; +import com.linkedin.dataset.DatasetProperties; +import datahub.client.MetadataWriteResponse; +import datahub.client.kafka.containers.KafkaContainer; +import datahub.client.kafka.containers.SchemaRegistryContainer; +import datahub.client.kafka.containers.ZookeeperContainer; +import datahub.event.MetadataChangeProposalWrapper; +import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import java.io.IOException; import java.util.Objects; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.stream.Stream; - import org.apache.avro.Schema; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.KafkaAdminClient; @@ -20,16 +27,6 @@ import org.testcontainers.containers.Network; import org.testng.Assert; -import com.linkedin.dataset.DatasetProperties; - -import datahub.client.MetadataWriteResponse; -import datahub.client.kafka.containers.KafkaContainer; -import datahub.client.kafka.containers.SchemaRegistryContainer; -import datahub.client.kafka.containers.ZookeeperContainer; -import datahub.event.MetadataChangeProposalWrapper; -import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; - public class KafkaEmitterTest { private static final String TOPIC = DEFAULT_MCP_KAFKA_TOPIC; @@ -47,11 +44,13 @@ public class KafkaEmitterTest { public static void confluentSetup() throws Exception { network = Network.newNetwork(); zookeeperContainer = new ZookeeperContainer().withNetwork(network); - kafkaContainer = new KafkaContainer(zookeeperContainer.getInternalUrl()) + kafkaContainer = + new KafkaContainer(zookeeperContainer.getInternalUrl()) .withNetwork(network) .dependsOn(zookeeperContainer); - schemaRegistryContainer = new SchemaRegistryContainer(zookeeperContainer.getInternalUrl(), - kafkaContainer.getInternalBootstrapServers()) + schemaRegistryContainer = + new SchemaRegistryContainer( + zookeeperContainer.getInternalUrl(), kafkaContainer.getInternalBootstrapServers()) .withNetwork(network) .dependsOn(zookeeperContainer, kafkaContainer); schemaRegistryContainer.start(); @@ -78,8 +77,9 @@ public void testConnection() throws IOException, ExecutionException, Interrupted public void testSend() throws IOException, InterruptedException, ExecutionException { @SuppressWarnings("rawtypes") - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", - "urn:li:dataset:(urn:li:dataPlatform:spark,foo.bar,PROD)"); + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper( + "Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:spark,foo.bar,PROD)"); Future<MetadataWriteResponse> future = emitter.emit(mcpw); MetadataWriteResponse response = future.get(); System.out.println("Response: " + response); @@ -95,26 +95,41 @@ private static AdminClient createAdminClient(String bootstrap) { private static void registerSchemaRegistryTypes() throws IOException, RestClientException { Schema mcpSchema = new AvroSerializer().getRecordSchema(); - CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryContainer.getUrl(), 1000); + CachedSchemaRegistryClient schemaRegistryClient = + new CachedSchemaRegistryClient(schemaRegistryContainer.getUrl(), 1000); schemaRegistryClient.register(mcpSchema.getFullName(), mcpSchema); } private static String createTopics(Stream<String> bootstraps) { short replicationFactor = 1; int partitions = 1; - return bootstraps.parallel().map(bootstrap -> { - try { - createAdminClient(bootstrap).createTopics(singletonList(new NewTopic(TOPIC, partitions, replicationFactor))).all().get(); - return bootstrap; - } catch (RuntimeException | InterruptedException | ExecutionException ex) { - return null; - } - }).filter(Objects::nonNull).findFirst().get(); + return bootstraps + .parallel() + .map( + bootstrap -> { + try { + createAdminClient(bootstrap) + .createTopics(singletonList(new NewTopic(TOPIC, partitions, replicationFactor))) + .all() + .get(); + return bootstrap; + } catch (RuntimeException | InterruptedException | ExecutionException ex) { + return null; + } + }) + .filter(Objects::nonNull) + .findFirst() + .get(); } @SuppressWarnings("rawtypes") - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { - return MetadataChangeProposalWrapper.builder().entityType("dataset").entityUrn(entityUrn).upsert() - .aspect(new DatasetProperties().setDescription(description)).build(); + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { + return MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn(entityUrn) + .upsert() + .aspect(new DatasetProperties().setDescription(description)) + .build(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java index 5f26748cb677c..86ced5ce620cd 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java @@ -1,21 +1,17 @@ package datahub.client.kafka.containers; +import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; + import com.github.dockerjava.api.command.InspectContainerResponse; +import java.nio.charset.StandardCharsets; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; import org.testcontainers.images.builder.Transferable; import org.testcontainers.utility.TestcontainersConfiguration; -import java.nio.charset.StandardCharsets; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; - -/** - * This container wraps Confluent Kafka. - * - */ +/** This container wraps Confluent Kafka. */ public class KafkaContainer extends GenericContainer<KafkaContainer> { private static final String STARTER_SCRIPT = "/testcontainers_start.sh"; @@ -47,11 +43,17 @@ public KafkaContainer(String confluentPlatformVersion, String zookeeperConnect) // with itself via internal // listener when KAFKA_INTER_BROKER_LISTENER_NAME is set, otherwise Kafka will // try to use the advertised listener - withEnv("KAFKA_LISTENERS", - "PLAINTEXT://0.0.0.0:" + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT - + ",BROKER://0.0.0.0:" + KAFKA_INTERNAL_PORT - + ",BROKER_LOCAL://0.0.0.0:" + KAFKA_LOCAL_PORT); - withEnv("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT,BROKER_LOCAL:PLAINTEXT"); + withEnv( + "KAFKA_LISTENERS", + "PLAINTEXT://0.0.0.0:" + + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT + + ",BROKER://0.0.0.0:" + + KAFKA_INTERNAL_PORT + + ",BROKER_LOCAL://0.0.0.0:" + + KAFKA_LOCAL_PORT); + withEnv( + "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", + "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT,BROKER_LOCAL:PLAINTEXT"); withEnv("KAFKA_INTER_BROKER_LISTENER_NAME", "BROKER"); withEnv("KAFKA_BROKER_ID", "1"); @@ -68,8 +70,9 @@ public Stream<String> getBootstrapServers() { if (port == PORT_NOT_ASSIGNED) { throw new IllegalStateException("You should start Kafka container first"); } - return Stream.of(String.format("PLAINTEXT://%s:%s", getHost(), port), - String.format("PLAINTEXT://localhost:%s", getMappedPort(KAFKA_LOCAL_PORT))); + return Stream.of( + String.format("PLAINTEXT://%s:%s", getHost(), port), + String.format("PLAINTEXT://localhost:%s", getMappedPort(KAFKA_LOCAL_PORT))); } public String getInternalBootstrapServers() { @@ -78,7 +81,10 @@ public String getInternalBootstrapServers() { @Override protected void doStart() { - withCommand("sh", "-c", "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + "sh " + STARTER_SCRIPT); + withCommand( + "sh", + "-c", + "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + "sh " + STARTER_SCRIPT); super.doStart(); } @@ -100,22 +106,33 @@ protected void containerIsStarting(InspectContainerResponse containerInfo, boole String command = "#!/bin/bash \n"; command += "export KAFKA_ZOOKEEPER_CONNECT='" + zookeeperConnect + "'\n"; - command += "export KAFKA_ADVERTISED_LISTENERS='" + Stream - .concat(Stream.of("PLAINTEXT://" + networkAlias + ":" + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT, + command += + "export KAFKA_ADVERTISED_LISTENERS='" + + Stream.concat( + Stream.of( + "PLAINTEXT://" + + networkAlias + + ":" + + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT, "BROKER_LOCAL://localhost:" + getMappedPort(KAFKA_LOCAL_PORT)), - containerInfo.getNetworkSettings().getNetworks().values().stream() - .map(it -> "BROKER://" + it.getIpAddress() + ":" + KAFKA_INTERNAL_PORT)) - .collect(Collectors.joining(",")) + "'\n"; + containerInfo.getNetworkSettings().getNetworks().values().stream() + .map(it -> "BROKER://" + it.getIpAddress() + ":" + KAFKA_INTERNAL_PORT)) + .collect(Collectors.joining(",")) + + "'\n"; command += ". /etc/confluent/docker/bash-config \n"; command += "/etc/confluent/docker/configure \n"; command += "/etc/confluent/docker/launch \n"; - copyFileToContainer(Transferable.of(command.getBytes(StandardCharsets.UTF_8), 700), STARTER_SCRIPT); + copyFileToContainer( + Transferable.of(command.getBytes(StandardCharsets.UTF_8), 700), STARTER_SCRIPT); } private static String getKafkaContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration.getInstance().getProperties().getOrDefault("kafka.container.image", - "confluentinc/cp-kafka:" + confluentPlatformVersion); + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "kafka.container.image", "confluentinc/cp-kafka:" + confluentPlatformVersion); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java index 907a4558b60d9..5c0223e580575 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java @@ -5,46 +5,48 @@ import java.io.IOException; import java.time.Duration; - import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; import org.testcontainers.utility.TestcontainersConfiguration; - public class SchemaRegistryContainer extends GenericContainer<SchemaRegistryContainer> { - private static final int SCHEMA_REGISTRY_INTERNAL_PORT = 8081; - - private final String networkAlias = "schema-registry"; - - public SchemaRegistryContainer(String zookeeperConnect, String kafkaBootstrap) throws IOException { - this(CONFLUENT_PLATFORM_VERSION, zookeeperConnect, kafkaBootstrap); - } - - public SchemaRegistryContainer(String confluentPlatformVersion, String zookeeperConnect, String kafkaBootstrap) throws IOException { - super(getSchemaRegistryContainerImage(confluentPlatformVersion)); - - addEnv("SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL", zookeeperConnect); - addEnv("SCHEMA_REGISTRY_HOST_NAME", "localhost"); - addEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", kafkaBootstrap); - - withExposedPorts(SCHEMA_REGISTRY_INTERNAL_PORT); - withNetworkAliases(networkAlias); - - waitingFor(new HttpWaitStrategy().forPath("/subjects").withStartupTimeout(Duration.ofMinutes(2))); - } - - public String getUrl() { - return format("http://%s:%d", this.getContainerIpAddress(), this.getMappedPort(SCHEMA_REGISTRY_INTERNAL_PORT)); - } - - - private static String getSchemaRegistryContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration - .getInstance().getProperties().getOrDefault( - "schemaregistry.container.image", - "confluentinc/cp-schema-registry:" + confluentPlatformVersion - ); - } + private static final int SCHEMA_REGISTRY_INTERNAL_PORT = 8081; + + private final String networkAlias = "schema-registry"; + + public SchemaRegistryContainer(String zookeeperConnect, String kafkaBootstrap) + throws IOException { + this(CONFLUENT_PLATFORM_VERSION, zookeeperConnect, kafkaBootstrap); + } + + public SchemaRegistryContainer( + String confluentPlatformVersion, String zookeeperConnect, String kafkaBootstrap) + throws IOException { + super(getSchemaRegistryContainerImage(confluentPlatformVersion)); + + addEnv("SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL", zookeeperConnect); + addEnv("SCHEMA_REGISTRY_HOST_NAME", "localhost"); + addEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", kafkaBootstrap); + + withExposedPorts(SCHEMA_REGISTRY_INTERNAL_PORT); + withNetworkAliases(networkAlias); + + waitingFor( + new HttpWaitStrategy().forPath("/subjects").withStartupTimeout(Duration.ofMinutes(2))); + } + + public String getUrl() { + return format( + "http://%s:%d", + this.getContainerIpAddress(), this.getMappedPort(SCHEMA_REGISTRY_INTERNAL_PORT)); + } + + private static String getSchemaRegistryContainerImage(String confluentPlatformVersion) { + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "schemaregistry.container.image", + "confluentinc/cp-schema-registry:" + confluentPlatformVersion); + } } - - diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java index efa79724f114b..93ba828096282 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java @@ -6,18 +6,17 @@ final class Utils { public static final String CONFLUENT_PLATFORM_VERSION = "7.4.0"; - private Utils() { - } + private Utils() {} /** - * Retrieves a random port that is currently not in use on this machine. - * - * @return a free port - * @throws IOException wraps the exceptions which may occur during this method call. - */ - static int getRandomFreePort() throws IOException { - @SuppressWarnings("resource") - ServerSocket serverSocket = new ServerSocket(0); - return serverSocket.getLocalPort(); - } -} \ No newline at end of file + * Retrieves a random port that is currently not in use on this machine. + * + * @return a free port + * @throws IOException wraps the exceptions which may occur during this method call. + */ + static int getRandomFreePort() throws IOException { + @SuppressWarnings("resource") + ServerSocket serverSocket = new ServerSocket(0); + return serverSocket.getLocalPort(); + } +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java index 5bfc5055df68a..538092d810ce1 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java @@ -1,48 +1,48 @@ package datahub.client.kafka.containers; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; -import org.testcontainers.utility.TestcontainersConfiguration; +import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; +import static java.lang.String.format; import java.io.IOException; import java.util.HashMap; - -import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; -import static java.lang.String.format; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; +import org.testcontainers.utility.TestcontainersConfiguration; public class ZookeeperContainer extends GenericContainer<ZookeeperContainer> { - private static final int ZOOKEEPER_INTERNAL_PORT = 2181; - private static final int ZOOKEEPER_TICK_TIME = 2000; - - private final String networkAlias = "zookeeper"; - - public ZookeeperContainer() throws IOException { - this(CONFLUENT_PLATFORM_VERSION); - } - - public ZookeeperContainer(String confluentPlatformVersion) throws IOException { - super(getZookeeperContainerImage(confluentPlatformVersion)); - - HashMap<String, String> env = new HashMap<String, String>(); - env.put("ZOOKEEPER_CLIENT_PORT", Integer.toString(ZOOKEEPER_INTERNAL_PORT)); - env.put("ZOOKEEPER_TICK_TIME", Integer.toString(ZOOKEEPER_TICK_TIME)); - withEnv(env); - - addExposedPort(ZOOKEEPER_INTERNAL_PORT); - withNetworkAliases(networkAlias); - waitingFor(new HostPortWaitStrategy()); - } - - public String getInternalUrl() { - return format("%s:%d", networkAlias, ZOOKEEPER_INTERNAL_PORT); - } - - private static String getZookeeperContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration - .getInstance().getProperties().getOrDefault( - "zookeeper.container.image", - "confluentinc/cp-zookeeper:" + confluentPlatformVersion - ); - } -} \ No newline at end of file + private static final int ZOOKEEPER_INTERNAL_PORT = 2181; + private static final int ZOOKEEPER_TICK_TIME = 2000; + + private final String networkAlias = "zookeeper"; + + public ZookeeperContainer() throws IOException { + this(CONFLUENT_PLATFORM_VERSION); + } + + public ZookeeperContainer(String confluentPlatformVersion) throws IOException { + super(getZookeeperContainerImage(confluentPlatformVersion)); + + HashMap<String, String> env = new HashMap<String, String>(); + env.put("ZOOKEEPER_CLIENT_PORT", Integer.toString(ZOOKEEPER_INTERNAL_PORT)); + env.put("ZOOKEEPER_TICK_TIME", Integer.toString(ZOOKEEPER_TICK_TIME)); + withEnv(env); + + addExposedPort(ZOOKEEPER_INTERNAL_PORT); + withNetworkAliases(networkAlias); + waitingFor(new HostPortWaitStrategy()); + } + + public String getInternalUrl() { + return format("%s:%d", networkAlias, ZOOKEEPER_INTERNAL_PORT); + } + + private static String getZookeeperContainerImage(String confluentPlatformVersion) { + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "zookeeper.container.image", + "confluentinc/cp-zookeeper:" + confluentPlatformVersion); + } +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index c90d3f0d2179e..1d387acb0ce12 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -1,5 +1,7 @@ package datahub.client.patch; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Edge; import com.linkedin.common.FabricType; @@ -38,26 +40,25 @@ import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; -import static com.linkedin.metadata.Constants.*; - - @RunWith(MockitoJUnitRunner.class) public class PatchTest { - /** - * Examples for running patches, tests set to ignore as they target a GMS running on localhost - */ - + /** Examples for running patches, tests set to ignore as they target a GMS running on localhost */ @Test @Ignore public void testLocalUpstream() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addUpstream(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"), - DatasetLineageType.TRANSFORMED) - .build(); + MetadataChangeProposal upstreamPatch = + new UpstreamLineagePatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addUpstream( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"), + DatasetLineageType.TRANSFORMED) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(upstreamPatch); System.out.println(response.get().getResponseContent()); @@ -72,10 +73,15 @@ public void testLocalUpstream() { public void testLocalUpstreamRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeUpstream(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .build(); + MetadataChangeProposal upstreamPatch = + new UpstreamLineagePatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeUpstream( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(upstreamPatch); System.out.println(response.get().getResponseContent()); @@ -92,10 +98,13 @@ public void testLocalEditableSchemaMetadataTag() { try { TagAssociation tagAssociation = new TagAssociation(); tagAssociation.setTag(new TagUrn("Legacy")); - MetadataChangeProposal fieldTagPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addTag(tagAssociation, "field_foo") - .build(); + MetadataChangeProposal fieldTagPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addTag(tagAssociation, "field_foo") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(fieldTagPatch); System.out.println(response.get().getResponseContent()); @@ -111,10 +120,13 @@ public void testLocalEditableSchemaMetadataTagRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { TagUrn urn = new TagUrn("Legacy"); - MetadataChangeProposal fieldTagPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeTag(urn, "field_foo") - .build(); + MetadataChangeProposal fieldTagPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeTag(urn, "field_foo") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(fieldTagPatch); System.out.println(response.get().getResponseContent()); @@ -132,10 +144,13 @@ public void testLocalEditableSchemaMetadataTerm() { GlossaryTermAssociation termAssociation = new GlossaryTermAssociation(); termAssociation.setUrn(new GlossaryTermUrn("CustomerAccount")); - MetadataChangeProposal fieldTermPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addGlossaryTerm(termAssociation, "field_foo") - .build(); + MetadataChangeProposal fieldTermPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addGlossaryTerm(termAssociation, "field_foo") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(fieldTermPatch); System.out.println(response.get().getResponseContent()); @@ -152,10 +167,13 @@ public void testLocalEditableSchemaMetadataTermRemove() { try { GlossaryTermUrn urn = new GlossaryTermUrn("CustomerAccount"); - MetadataChangeProposal fieldTermPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeGlossaryTerm(urn, "field_foo") - .build(); + MetadataChangeProposal fieldTermPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeGlossaryTerm(urn, "field_foo") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(fieldTermPatch); System.out.println(response.get().getResponseContent()); @@ -168,16 +186,18 @@ public void testLocalEditableSchemaMetadataTermRemove() { @Test @Ignore public void testLocalOwnership() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .addOwner(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder() + .urn(datasetUrn) + .addOwner(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) + .build(); System.out.println(ownershipPatch.toString()); Future<MetadataWriteResponse> response = fileEmitter.emit(ownershipPatch); response.get(); @@ -193,16 +213,15 @@ public void testLocalOwnership() { @Test @Ignore public void testLocalOwnershipRemove() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .removeOwner(new CorpuserUrn("gdoe")) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder().urn(datasetUrn).removeOwner(new CorpuserUrn("gdoe")).build(); System.out.println(ownershipPatch.toString()); Future<MetadataWriteResponse> response = fileEmitter.emit(ownershipPatch); response.get(); @@ -218,16 +237,18 @@ public void testLocalOwnershipRemove() { @Test @Ignore public void testLocalOwnershipRemoveType() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .removeOwnershipType(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder() + .urn(datasetUrn) + .removeOwnershipType(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) + .build(); System.out.println(ownershipPatch.toString()); Future<MetadataWriteResponse> response = fileEmitter.emit(ownershipPatch); response.get(); @@ -245,14 +266,17 @@ public void testLocalOwnershipRemoveType() { public void testLocalDataJobInfo() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal jobInfoToPatch = new DataJobInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .setDescription("something") - .setName("name") - .setType("type") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + MetadataChangeProposal jobInfoToPatch = + new DataJobInfoPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .setDescription("something") + .setName("name") + .setType("type") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(jobInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -267,12 +291,15 @@ public void testLocalDataJobInfo() { public void testLocalDataJobInfoRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal jobInfoToPatch = new DataJobInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .setDescription(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + MetadataChangeProposal jobInfoToPatch = + new DataJobInfoPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .setDescription(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(jobInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -288,14 +315,16 @@ public void testLocalDatasetProperties() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal datasetPropertiesToPatch = new DatasetPropertiesPatchBuilder() - .urn(datasetUrn) - .setDescription("something") - .setName("name") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal datasetPropertiesToPatch = + new DatasetPropertiesPatchBuilder() + .urn(datasetUrn) + .setDescription("something") + .setName("name") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(datasetPropertiesToPatch); System.out.println(response.get().getResponseContent()); @@ -311,14 +340,16 @@ public void testLocalDatasetPropertiesRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal datasetPropertiesToPatch = new DatasetPropertiesPatchBuilder() - .urn(datasetUrn) - .setDescription(null) - .setName(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal datasetPropertiesToPatch = + new DatasetPropertiesPatchBuilder() + .urn(datasetUrn) + .setDescription(null) + .setName(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(datasetPropertiesToPatch); System.out.println(response.get().getResponseContent()); @@ -333,14 +364,15 @@ public void testLocalDatasetPropertiesRemove() { public void testLocalDataFlowInfo() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal flowInfoToPatch = new DataFlowInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) - .setDescription("something") - .setName("name") - .setProject("project") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + MetadataChangeProposal flowInfoToPatch = + new DataFlowInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) + .setDescription("something") + .setName("name") + .setProject("project") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(flowInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -355,13 +387,14 @@ public void testLocalDataFlowInfo() { public void testLocalDataFlowInfoRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal flowInfoToPatch = new DataFlowInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) - .setDescription(null) - .setProject(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + MetadataChangeProposal flowInfoToPatch = + new DataFlowInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) + .setDescription(null) + .setProject(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(flowInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -376,14 +409,27 @@ public void testLocalDataFlowInfoRemove() { public void testLocalDataJobInputAdd() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .addInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .addOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .addInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .addInputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .addOutputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .addInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .addOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .addInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .addInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .addOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -398,14 +444,27 @@ public void testLocalDataJobInputAdd() { public void testLocalDataJobInputRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .removeInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .removeOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .removeInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .removeInputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .removeOutputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .removeInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .removeOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .removeInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .removeInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .removeOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -420,24 +479,54 @@ public void testLocalDataJobInputRemove() { public void testLocalDataJobInputAddEdge() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - Edge inputDataset = new Edge() - .setDestinationUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - Edge outputDataset = new Edge() - .setDestinationUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - Edge inputDataJob = new Edge() - .setDestinationUrn(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .addEdge(inputDataset, LineageDirection.UPSTREAM) - .addEdge(outputDataset, LineageDirection.DOWNSTREAM) - .addEdge(inputDataJob, LineageDirection.UPSTREAM) - .build(); + Edge inputDataset = + new Edge() + .setDestinationUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + Edge outputDataset = + new Edge() + .setDestinationUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + Edge inputDataJob = + new Edge() + .setDestinationUrn( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .addEdge(inputDataset, LineageDirection.UPSTREAM) + .addEdge(outputDataset, LineageDirection.DOWNSTREAM) + .addEdge(inputDataJob, LineageDirection.UPSTREAM) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -446,5 +535,4 @@ public void testLocalDataJobInputAddEdge() { System.out.println(Arrays.asList(e.getStackTrace())); } } - } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java index 190ca8a8313c2..657669d19439c 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java @@ -1,5 +1,8 @@ package datahub.client.rest; +import static com.linkedin.metadata.Constants.*; +import static org.mockserver.model.HttpRequest.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.dataset.DatasetProperties; @@ -28,9 +31,7 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; - import javax.net.ssl.SSLHandshakeException; - import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; @@ -50,24 +51,16 @@ import org.mockserver.model.HttpRequest; import org.mockserver.model.RequestDefinition; -import static com.linkedin.metadata.Constants.*; -import static org.mockserver.model.HttpRequest.*; - - @RunWith(MockitoJUnitRunner.class) public class RestEmitterTest { - @Mock - HttpAsyncClientBuilder mockHttpClientFactory; + @Mock HttpAsyncClientBuilder mockHttpClientFactory; - @Mock - CloseableHttpAsyncClient mockClient; + @Mock CloseableHttpAsyncClient mockClient; - @Captor - ArgumentCaptor<HttpPost> postArgumentCaptor; + @Captor ArgumentCaptor<HttpPost> postArgumentCaptor; - @Captor - ArgumentCaptor<FutureCallback> callbackCaptor; + @Captor ArgumentCaptor<FutureCallback> callbackCaptor; @Before public void setupMocks() { @@ -79,7 +72,8 @@ public void testPost() throws URISyntaxException, IOException { RestEmitter emitter = RestEmitter.create(b -> b.asyncHttpClientBuilder(mockHttpClientFactory)); MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)"); + getMetadataChangeProposalWrapper( + "Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)"); emitter.emit(mcp, null); Mockito.verify(mockClient).execute(postArgumentCaptor.capture(), callbackCaptor.capture()); FutureCallback callback = callbackCaptor.getValue(); @@ -90,26 +84,32 @@ public void testPost() throws URISyntaxException, IOException { byte[] contentBytes = new byte[(int) testPost.getEntity().getContentLength()]; is.read(contentBytes); String contentString = new String(contentBytes, StandardCharsets.UTF_8); - String expectedContent = "{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset\\\"}\"}}}"; + String expectedContent = + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset\\\"}\"}}}"; Assert.assertEquals(expectedContent, contentString); } - + @Test - public void testExceptions() throws URISyntaxException, IOException, ExecutionException, InterruptedException { + public void testExceptions() + throws URISyntaxException, IOException, ExecutionException, InterruptedException { RestEmitter emitter = RestEmitter.create($ -> $.asyncHttpClientBuilder(mockHttpClientFactory)); - MetadataChangeProposalWrapper mcp = MetadataChangeProposalWrapper.create(b -> b.entityType("dataset") - .entityUrn("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)") - .upsert() - .aspect(new DatasetProperties().setDescription("Test Dataset"))); + MetadataChangeProposalWrapper mcp = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)") + .upsert() + .aspect(new DatasetProperties().setDescription("Test Dataset"))); Future<HttpResponse> mockFuture = Mockito.mock(Future.class); Mockito.when(mockClient.execute(Mockito.any(), Mockito.any())).thenReturn(mockFuture); - Mockito.when(mockFuture.get()).thenThrow(new ExecutionException("Test execution exception", null)); + Mockito.when(mockFuture.get()) + .thenThrow(new ExecutionException("Test execution exception", null)); try { emitter.emit(mcp, null).get(); Assert.fail("should not be here"); @@ -120,10 +120,18 @@ public void testExceptions() throws URISyntaxException, IOException, ExecutionEx @Test public void testExtraHeaders() throws Exception { - RestEmitter emitter = RestEmitter.create(b -> b.asyncHttpClientBuilder(mockHttpClientFactory) - .extraHeaders(Collections.singletonMap("Test-Header", "Test-Value"))); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create( - b -> b.entityType("dataset").entityUrn("urn:li:dataset:foo").upsert().aspect(new DatasetProperties())); + RestEmitter emitter = + RestEmitter.create( + b -> + b.asyncHttpClientBuilder(mockHttpClientFactory) + .extraHeaders(Collections.singletonMap("Test-Header", "Test-Value"))); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:foo") + .upsert() + .aspect(new DatasetProperties())); Future<HttpResponse> mockFuture = Mockito.mock(Future.class); Mockito.when(mockClient.execute(Mockito.any(), Mockito.any())).thenReturn(mockFuture); emitter.emit(mcpw, null); @@ -151,11 +159,15 @@ public void multithreadedTestExecutors() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) .respond(org.mockserver.model.HttpResponse.response().withStatusCode(200)); ExecutorService executor = Executors.newFixedThreadPool(10); ArrayList<Future> results = new ArrayList(); @@ -164,59 +176,82 @@ public void multithreadedTestExecutors() throws Exception { int numRequests = 100; for (int i = 0; i < numRequests; ++i) { int finalI = i; - results.add(executor.submit(() -> { - try { - Thread.sleep(random.nextInt(100)); - MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper(String.format("Test Dataset %d", testIteration), - String.format("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", finalI)); - Future<MetadataWriteResponse> future = emitter.emit(mcp, null); - MetadataWriteResponse response = future.get(); - Assert.assertTrue(response.isSuccess()); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - })); + results.add( + executor.submit( + () -> { + try { + Thread.sleep(random.nextInt(100)); + MetadataChangeProposalWrapper mcp = + getMetadataChangeProposalWrapper( + String.format("Test Dataset %d", testIteration), + String.format( + "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", finalI)); + Future<MetadataWriteResponse> future = emitter.emit(mcp, null); + MetadataWriteResponse response = future.get(); + Assert.assertTrue(response.isSuccess()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + })); } - results.forEach(x -> { - try { - x.get(); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - }); + results.forEach( + x -> { + try { + x.get(); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + }); RequestDefinition[] recordedRequests = - testDataHubServer.getMockServer().retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); + testDataHubServer + .getMockServer() + .retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); Assert.assertEquals(100, recordedRequests.length); - List<HttpRequest> requests = Arrays.stream(recordedRequests) - .sequential() - .filter(x -> x instanceof HttpRequest) - .map(x -> (HttpRequest) x) - .collect(Collectors.toList()); + List<HttpRequest> requests = + Arrays.stream(recordedRequests) + .sequential() + .filter(x -> x instanceof HttpRequest) + .map(x -> (HttpRequest) x) + .collect(Collectors.toList()); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); for (int i = 0; i < numRequests; ++i) { - String expectedContent = String.format("{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", i, testIteration); - - Assert.assertEquals(requests.stream().filter(x -> { - String bodyString = ""; - try { - bodyString = mapper.writeValueAsString( - mapper.readValue(x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); - } catch (IOException ioException) { - return false; - } - return bodyString.equals(expectedContent); - }).count(), 1); + String expectedContent = + String.format( + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", + i, testIteration); + + Assert.assertEquals( + requests.stream() + .filter( + x -> { + String bodyString = ""; + try { + bodyString = + mapper.writeValueAsString( + mapper.readValue( + x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); + } catch (IOException ioException) { + return false; + } + return bodyString.equals(expectedContent); + }) + .count(), + 1); } } - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -231,11 +266,15 @@ public void multithreadedTestSingleThreadCaller() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) .respond(org.mockserver.model.HttpResponse.response().withStatusCode(200)); ArrayList<Future> results = new ArrayList(); Random random = new Random(); @@ -243,46 +282,65 @@ public void multithreadedTestSingleThreadCaller() throws Exception { int numRequests = 100; for (int i = 0; i < numRequests; ++i) { MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper(String.format("Test Dataset %d", testIteration), + getMetadataChangeProposalWrapper( + String.format("Test Dataset %d", testIteration), String.format("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", i)); Future<MetadataWriteResponse> future = emitter.emit(mcp, null); results.add(future); } - results.forEach(x -> { - try { - x.get(); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - }); + results.forEach( + x -> { + try { + x.get(); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + }); RequestDefinition[] recordedRequests = - testDataHubServer.getMockServer().retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); + testDataHubServer + .getMockServer() + .retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); Assert.assertEquals(numRequests, recordedRequests.length); - List<HttpRequest> requests = Arrays.stream(recordedRequests) - .sequential() - .filter(x -> x instanceof HttpRequest) - .map(x -> (HttpRequest) x) - .collect(Collectors.toList()); + List<HttpRequest> requests = + Arrays.stream(recordedRequests) + .sequential() + .filter(x -> x instanceof HttpRequest) + .map(x -> (HttpRequest) x) + .collect(Collectors.toList()); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); for (int i = 0; i < numRequests; ++i) { - String expectedContent = String.format("{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", i, testIteration); - - Assert.assertEquals(requests.stream().filter(x -> { - String bodyString = ""; - try { - bodyString = mapper.writeValueAsString( - mapper.readValue(x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); - } catch (IOException ioException) { - return false; - } - return bodyString.equals(expectedContent); - }).count(), 1); + String expectedContent = + String.format( + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", + i, testIteration); + + Assert.assertEquals( + requests.stream() + .filter( + x -> { + String bodyString = ""; + try { + bodyString = + mapper.writeValueAsString( + mapper.readValue( + x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); + } catch (IOException ioException) { + return false; + } + return bodyString.equals(expectedContent); + }) + .count(), + 1); } } @@ -292,30 +350,39 @@ public void testCallback() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) - .respond(org.mockserver.model.HttpResponse.response().withStatusCode(500).withBody("exception")); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond( + org.mockserver.model.HttpResponse.response().withStatusCode(500).withBody("exception")); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); AtomicReference<MetadataWriteResponse> callbackResponse = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - Future<MetadataWriteResponse> future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - callbackResponse.set(response); - Assert.assertFalse(response.isSuccess()); - latch.countDown(); - } - - @Override - public void onFailure(Throwable exception) { - Assert.fail("Should not be called"); - latch.countDown(); - } - }); + Future<MetadataWriteResponse> future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + callbackResponse.set(response); + Assert.assertFalse(response.isSuccess()); + latch.countDown(); + } + + @Override + public void onFailure(Throwable exception) { + Assert.fail("Should not be called"); + latch.countDown(); + } + }); latch.await(); Assert.assertEquals(callbackResponse.get(), future.get()); @@ -328,16 +395,22 @@ public void testTimeoutOnGet() { RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); testDataHubServer.getMockServer().reset(); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.once()) - .respond(org.mockserver.model.HttpResponse.response() - .withStatusCode(200) - .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.once()) + .respond( + org.mockserver.model.HttpResponse.response() + .withStatusCode(200) + .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); try { long startTime = System.currentTimeMillis(); MetadataWriteResponse response = emitter.emit(mcpw, null).get(); @@ -356,20 +429,28 @@ public void testTimeoutOnGetWithTimeout() { RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); testDataHubServer.getMockServer().reset(); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.once()) - .respond(org.mockserver.model.HttpResponse.response() - .withStatusCode(200) - .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.once()) + .respond( + org.mockserver.model.HttpResponse.response() + .withStatusCode(200) + .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); try { long startTime = System.currentTimeMillis(); MetadataWriteResponse response = - emitter.emit(mcpw, null).get(RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC - 3, TimeUnit.SECONDS); + emitter + .emit(mcpw, null) + .get(RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC - 3, TimeUnit.SECONDS); long duration = (long) ((System.currentTimeMillis() - startTime) / 1000.0); Assert.fail("Should not succeed with duration " + duration); } catch (Exception ioe) { @@ -388,14 +469,16 @@ public void testUserAgentHeader() throws IOException, ExecutionException, Interr properties.load(emitter.getClass().getClassLoader().getResourceAsStream("client.properties")); Assert.assertNotNull(properties.getProperty("clientVersion")); String version = properties.getProperty("clientVersion"); - testDataHubServer.getMockServer().verify( - request("/config") - .withHeader("User-Agent", "DataHub-RestClient/" + version)); + testDataHubServer + .getMockServer() + .verify(request("/config").withHeader("User-Agent", "DataHub-RestClient/" + version)); } - + @Test - public void testDisableSslVerification() throws IOException, InterruptedException, ExecutionException { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().disableSslVerification(true).build()); + public void testDisableSslVerification() + throws IOException, InterruptedException, ExecutionException { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().disableSslVerification(true).build()); final String hostWithSsl = "https://self-signed.badssl.com"; final HttpGet request = new HttpGet(hostWithSsl); @@ -403,10 +486,12 @@ public void testDisableSslVerification() throws IOException, InterruptedExceptio restEmitter.close(); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } - + @Test - public void testSslVerificationException() throws IOException, InterruptedException, ExecutionException { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().disableSslVerification(false).build()); + public void testSslVerificationException() + throws IOException, InterruptedException, ExecutionException { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().disableSslVerification(false).build()); final String hostWithSsl = "https://self-signed.badssl.com"; final HttpGet request = new HttpGet(hostWithSsl); try { @@ -418,4 +503,4 @@ public void testSslVerificationException() throws IOException, InterruptedExcept } restEmitter.close(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java index 70efcd240a0ef..0b2a4500e019d 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java @@ -1,49 +1,53 @@ package datahub.event; +import com.linkedin.dataset.DatasetProperties; +import com.linkedin.mxe.MetadataChangeProposal; import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; - import org.junit.Test; import org.testng.Assert; -import com.linkedin.dataset.DatasetProperties; -import com.linkedin.mxe.MetadataChangeProposal; - - public class EventFormatterTest { @Test public void testPartialMCPW() throws URISyntaxException, IOException, EventValidationException { - MetadataChangeProposalWrapper metadataChangeProposalWrapper = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:foo") + MetadataChangeProposalWrapper metadataChangeProposalWrapper = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn("urn:li:foo") .upsert() - .aspect(new DatasetProperties().setDescription("A test dataset")) - .build(); + .aspect(new DatasetProperties().setDescription("A test dataset")) + .build(); EventFormatter eventFormatter = new EventFormatter(); MetadataChangeProposal mcp = eventFormatter.convert(metadataChangeProposalWrapper); Assert.assertEquals(mcp.getAspect().getContentType(), "application/json"); String content = mcp.getAspect().getValue().asString(StandardCharsets.UTF_8); Assert.assertEquals(content, "{\"description\":\"A test dataset\"}"); } - + @Test public void testUtf8Encoding() throws URISyntaxException, IOException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-project.my-dataset.user-table,PROD)") - .upsert() - .aspect(new DatasetProperties().setDescription("This is the canonical User profile dataset œ∑´´†¥¨ˆˆπ“‘åß∂ƒ©˙∆˚¬…æΩ≈ç√∫˜˜≤≥ç")) - .build(); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-project.my-dataset.user-table,PROD)") + .upsert() + .aspect( + new DatasetProperties() + .setDescription( + "This is the canonical User profile dataset œ∑´´†¥¨ˆˆπ“‘åß∂ƒ©˙∆˚¬…æΩ≈ç√∫˜˜≤≥ç")) + .build(); EventFormatter eventFormatter = new EventFormatter(); MetadataChangeProposal mcp = eventFormatter.convert(mcpw); Assert.assertEquals(mcp.getAspect().getContentType(), "application/json"); String content = mcp.getAspect().getValue().asString(StandardCharsets.UTF_8); - String expectedContent = "{\"description\":\"This is the canonical User profile dataset \\u0153\\u2211\\u00B4\\u00B4" - + "\\u2020\\u00A5\\u00A8\\u02C6\\u02C6\\u03C0\\u201C\\u2018\\u00E5\\u00DF\\u2202\\u0192\\u00A9\\u02D9\\u2206" - + "\\u02DA\\u00AC\\u2026\\u00E6\\u03A9\\u2248\\u00E7\\u221A\\u222B\\u02DC\\u02DC\\u2264\\u2265\\u00E7\"}"; + String expectedContent = + "{\"description\":\"This is the canonical User profile dataset \\u0153\\u2211\\u00B4\\u00B4" + + "\\u2020\\u00A5\\u00A8\\u02C6\\u02C6\\u03C0\\u201C\\u2018\\u00E5\\u00DF\\u2202\\u0192\\u00A9\\u02D9\\u2206" + + "\\u02DA\\u00AC\\u2026\\u00E6\\u03A9\\u2248\\u00E7\\u221A\\u222B\\u02DC\\u02DC\\u2264\\u2265\\u00E7\"}"; Assert.assertEquals(content, expectedContent); } } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java index 3d371954c0f37..3a333abc5cb10 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java @@ -3,75 +3,74 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.dataset.DatasetProperties; - import java.net.URISyntaxException; - import org.junit.Assert; import org.junit.Test; - public class MetadataChangeProposalWrapperTest { - /** - * We should throw errors on validation as exceptions - */ - @Test - public void testBuilderExceptions() { - try { - MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("foo") // bad urn should throw exception - ); - Assert.fail("Should throw an exception"); - } catch (EventValidationException e) { - Assert.assertTrue("Underlying exception should be a URI syntax issue", e.getCause() instanceof URISyntaxException); - } catch (Exception e) { - Assert.fail("Should not throw any other exception"); - } - } - - @Test - public void testAspectInferenceSuccess() throws EventValidationException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new DatasetProperties())); - Assert.assertEquals(mcpw.getAspectName(), "datasetProperties"); - } - - /** - * We throw exceptions on using the regular builder pattern - * - * @throws URISyntaxException - * @throws EventValidationException - */ - @Test(expected = EventValidationException.class) - public void testAspectInferenceFailure() throws URISyntaxException, EventValidationException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new AuditStamp().setActor(Urn.createFromString("urn:li:corpUser:jdoe"))) - .build(); + /** We should throw errors on validation as exceptions */ + @Test + public void testBuilderExceptions() { + try { + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataset").entityUrn("foo") // bad urn should throw exception + ); + Assert.fail("Should throw an exception"); + } catch (EventValidationException e) { + Assert.assertTrue( + "Underlying exception should be a URI syntax issue", + e.getCause() instanceof URISyntaxException); + } catch (Exception e) { + Assert.fail("Should not throw any other exception"); } + } - /** - * We throw exceptions on using the lambda builder pattern - * - * @throws URISyntaxException - * @throws EventValidationException - */ - @Test(expected = EventValidationException.class) - public void testAspectInferenceFailureLambda() throws URISyntaxException, EventValidationException { - Urn actorUrn = Urn.createFromString("urn:li:corpUser:jdoe"); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new AuditStamp().setActor(actorUrn)) - ); - } + @Test + public void testAspectInferenceSuccess() throws EventValidationException { + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new DatasetProperties())); + Assert.assertEquals(mcpw.getAspectName(), "datasetProperties"); + } + /** + * We throw exceptions on using the regular builder pattern + * + * @throws URISyntaxException + * @throws EventValidationException + */ + @Test(expected = EventValidationException.class) + public void testAspectInferenceFailure() throws URISyntaxException, EventValidationException { + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new AuditStamp().setActor(Urn.createFromString("urn:li:corpUser:jdoe"))) + .build(); + } + /** + * We throw exceptions on using the lambda builder pattern + * + * @throws URISyntaxException + * @throws EventValidationException + */ + @Test(expected = EventValidationException.class) + public void testAspectInferenceFailureLambda() + throws URISyntaxException, EventValidationException { + Urn actorUrn = Urn.createFromString("urn:li:corpUser:jdoe"); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new AuditStamp().setActor(actorUrn))); + } } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java index e7cdee3f369e1..44e60a4bde783 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java @@ -1,11 +1,10 @@ package datahub.server; -import org.mockserver.integration.ClientAndServer; -import org.mockserver.matchers.Times; - import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.*; +import org.mockserver.integration.ClientAndServer; +import org.mockserver.matchers.Times; public class TestDataHubServer { @@ -26,17 +25,12 @@ public TestDataHubServer() { public void init() { mockServer - .when( - request() - .withMethod("GET") - .withPath("/config") - .withHeader("Content-type", "application/json"), - Times.unlimited() - ).respond( - org.mockserver.model.HttpResponse.response() - .withBody("{\"noCode\": true }") - ); + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); } - - } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java b/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java index e6f93eb1a4f0c..12bbb9e59ab95 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java @@ -32,7 +32,6 @@ import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.FieldDescriptor; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -89,300 +88,296 @@ * @author kenton@google.com Kenton Varda */ public class ExtensionRegistry extends ExtensionRegistryLite { - /** Construct a new, empty instance. */ - public static ExtensionRegistry newInstance() { - return new ExtensionRegistry(); - } - - /** Get the unmodifiable singleton empty instance. */ - public static ExtensionRegistry getEmptyRegistry() { - return EMPTY_REGISTRY; - } - - - /** Returns an unmodifiable view of the registry. */ - @Override - public ExtensionRegistry getUnmodifiable() { - return new ExtensionRegistry(this); - } - - /** A (Descriptor, Message) pair, returned by lookup methods. */ - public static final class ExtensionInfo { - /** The extension's descriptor. */ - public final FieldDescriptor descriptor; - - /** - * A default instance of the extension's type, if it has a message type. Otherwise, {@code - * null}. - */ - public final Message defaultInstance; - - private ExtensionInfo(final FieldDescriptor descriptor) { - this.descriptor = descriptor; - defaultInstance = null; - } - - private ExtensionInfo(final FieldDescriptor descriptor, final Message defaultInstance) { - this.descriptor = descriptor; - this.defaultInstance = defaultInstance; - } - } - - /** Deprecated. Use {@link #findImmutableExtensionByName(String)} instead. */ - @Deprecated - public ExtensionInfo findExtensionByName(final String fullName) { - return findImmutableExtensionByName(fullName); - } + /** Construct a new, empty instance. */ + public static ExtensionRegistry newInstance() { + return new ExtensionRegistry(); + } + + /** Get the unmodifiable singleton empty instance. */ + public static ExtensionRegistry getEmptyRegistry() { + return EMPTY_REGISTRY; + } + + /** Returns an unmodifiable view of the registry. */ + @Override + public ExtensionRegistry getUnmodifiable() { + return new ExtensionRegistry(this); + } + + /** A (Descriptor, Message) pair, returned by lookup methods. */ + public static final class ExtensionInfo { + /** The extension's descriptor. */ + public final FieldDescriptor descriptor; /** - * Find an extension for immutable APIs by fully-qualified field name, in the proto namespace. - * i.e. {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. - * - * @return Information about the extension if found, or {@code null} otherwise. + * A default instance of the extension's type, if it has a message type. Otherwise, {@code + * null}. */ - public ExtensionInfo findImmutableExtensionByName(final String fullName) { - return immutableExtensionsByName.get(fullName); - } + public final Message defaultInstance; - /** - * Find an extension for mutable APIs by fully-qualified field name, in the proto namespace. i.e. - * {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findMutableExtensionByName(final String fullName) { - return mutableExtensionsByName.get(fullName); + private ExtensionInfo(final FieldDescriptor descriptor) { + this.descriptor = descriptor; + defaultInstance = null; } - /** Deprecated. Use {@link #findImmutableExtensionByNumber( Descriptors.Descriptor, int)} */ - @Deprecated - public ExtensionInfo findExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return findImmutableExtensionByNumber(containingType, fieldNumber); + private ExtensionInfo(final FieldDescriptor descriptor, final Message defaultInstance) { + this.descriptor = descriptor; + this.defaultInstance = defaultInstance; } - - /** - * Find an extension by containing type and field number for immutable APIs. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findImmutableExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return immutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** Deprecated. Use {@link #findImmutableExtensionByName(String)} instead. */ + @Deprecated + public ExtensionInfo findExtensionByName(final String fullName) { + return findImmutableExtensionByName(fullName); + } + + /** + * Find an extension for immutable APIs by fully-qualified field name, in the proto namespace. + * i.e. {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findImmutableExtensionByName(final String fullName) { + return immutableExtensionsByName.get(fullName); + } + + /** + * Find an extension for mutable APIs by fully-qualified field name, in the proto namespace. i.e. + * {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findMutableExtensionByName(final String fullName) { + return mutableExtensionsByName.get(fullName); + } + + /** Deprecated. Use {@link #findImmutableExtensionByNumber( Descriptors.Descriptor, int)} */ + @Deprecated + public ExtensionInfo findExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return findImmutableExtensionByNumber(containingType, fieldNumber); + } + + /** + * Find an extension by containing type and field number for immutable APIs. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findImmutableExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return immutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** + * Find an extension by containing type and field number for mutable APIs. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findMutableExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return mutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** + * Find all extensions for mutable APIs by fully-qualified name of extended class. Note that this + * method is more computationally expensive than getting a single extension by name or number. + * + * @return Information about the extensions found, or {@code null} if there are none. + */ + public Set<ExtensionInfo> getAllMutableExtensionsByExtendedType(final String fullName) { + HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>(); + for (DescriptorIntPair pair : mutableExtensionsByNumber.keySet()) { + if (pair.descriptor.getFullName().equals(fullName)) { + extensions.add(mutableExtensionsByNumber.get(pair)); + } } - - /** - * Find an extension by containing type and field number for mutable APIs. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findMutableExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return mutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + return extensions; + } + + /** + * Find all extensions for immutable APIs by fully-qualified name of extended class. Note that + * this method is more computationally expensive than getting a single extension by name or + * number. + * + * @return Information about the extensions found, or {@code null} if there are none. + */ + public Set<ExtensionInfo> getAllImmutableExtensionsByExtendedType(final String fullName) { + HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>(); + for (DescriptorIntPair pair : immutableExtensionsByNumber.keySet()) { + if (pair.descriptor.getFullName().equals(fullName)) { + extensions.add(immutableExtensionsByNumber.get(pair)); + } } - - /** - * Find all extensions for mutable APIs by fully-qualified name of extended class. Note that this - * method is more computationally expensive than getting a single extension by name or number. - * - * @return Information about the extensions found, or {@code null} if there are none. - */ - public Set<ExtensionInfo> getAllMutableExtensionsByExtendedType(final String fullName) { - HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>(); - for (DescriptorIntPair pair : mutableExtensionsByNumber.keySet()) { - if (pair.descriptor.getFullName().equals(fullName)) { - extensions.add(mutableExtensionsByNumber.get(pair)); - } - } - return extensions; + return extensions; + } + + /** Add an extension from a generated file to the registry. */ + public void add(final Extension<?, ?> extension) { + if (extension.getExtensionType() != Extension.ExtensionType.IMMUTABLE + && extension.getExtensionType() != Extension.ExtensionType.MUTABLE) { + // do not support other extension types. ignore + return; } - - /** - * Find all extensions for immutable APIs by fully-qualified name of extended class. Note that - * this method is more computationally expensive than getting a single extension by name or - * number. - * - * @return Information about the extensions found, or {@code null} if there are none. - */ - public Set<ExtensionInfo> getAllImmutableExtensionsByExtendedType(final String fullName) { - HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>(); - for (DescriptorIntPair pair : immutableExtensionsByNumber.keySet()) { - if (pair.descriptor.getFullName().equals(fullName)) { - extensions.add(immutableExtensionsByNumber.get(pair)); - } - } - return extensions; + add(newExtensionInfo(extension), extension.getExtensionType()); + } + + /** Add an extension from a generated file to the registry. */ + public void add(final GeneratedMessage.GeneratedExtension<?, ?> extension) { + add((Extension<?, ?>) extension); + } + + static ExtensionInfo newExtensionInfo(final Extension<?, ?> extension) { + if (extension.getDescriptor().getJavaType() == FieldDescriptor.JavaType.MESSAGE) { + if (extension.getMessageDefaultInstance() == null) { + throw new IllegalStateException( + "Registered message-type extension had null default instance: " + + extension.getDescriptor().getFullName()); + } + return new ExtensionInfo( + extension.getDescriptor(), (Message) extension.getMessageDefaultInstance()); + } else { + return new ExtensionInfo(extension.getDescriptor(), null); } - - /** Add an extension from a generated file to the registry. */ - public void add(final Extension<?, ?> extension) { - if (extension.getExtensionType() != Extension.ExtensionType.IMMUTABLE - && extension.getExtensionType() != Extension.ExtensionType.MUTABLE) { - // do not support other extension types. ignore - return; - } - add(newExtensionInfo(extension), extension.getExtensionType()); + } + + /** Add a non-message-type extension to the registry by descriptor. */ + public void add(final FieldDescriptor type) { + if (type.getJavaType() == FieldDescriptor.JavaType.MESSAGE) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() must be provided a default instance when " + + "adding an embedded message extension."); } - - /** Add an extension from a generated file to the registry. */ - public void add(final GeneratedMessage.GeneratedExtension<?, ?> extension) { - add((Extension<?, ?>) extension); + ExtensionInfo info = new ExtensionInfo(type, null); + add(info, Extension.ExtensionType.IMMUTABLE); + add(info, Extension.ExtensionType.MUTABLE); + } + + /** Add a message-type extension to the registry by descriptor. */ + public void add(final FieldDescriptor type, final Message defaultInstance) { + if (type.getJavaType() != FieldDescriptor.JavaType.MESSAGE) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() provided a default instance for a non-message extension."); } - - static ExtensionInfo newExtensionInfo(final Extension<?, ?> extension) { - if (extension.getDescriptor().getJavaType() == FieldDescriptor.JavaType.MESSAGE) { - if (extension.getMessageDefaultInstance() == null) { - throw new IllegalStateException( - "Registered message-type extension had null default instance: " - + extension.getDescriptor().getFullName()); - } - return new ExtensionInfo( - extension.getDescriptor(), (Message) extension.getMessageDefaultInstance()); - } else { - return new ExtensionInfo(extension.getDescriptor(), null); - } + add(new ExtensionInfo(type, defaultInstance), Extension.ExtensionType.IMMUTABLE); + } + + // ================================================================= + // Private stuff. + + private ExtensionRegistry() { + this.immutableExtensionsByName = new HashMap<String, ExtensionInfo>(); + this.mutableExtensionsByName = new HashMap<String, ExtensionInfo>(); + this.immutableExtensionsByNumber = new HashMap<DescriptorIntPair, ExtensionInfo>(); + this.mutableExtensionsByNumber = new HashMap<DescriptorIntPair, ExtensionInfo>(); + } + + private ExtensionRegistry(ExtensionRegistry other) { + super(other); + this.immutableExtensionsByName = Collections.unmodifiableMap(other.immutableExtensionsByName); + this.mutableExtensionsByName = Collections.unmodifiableMap(other.mutableExtensionsByName); + this.immutableExtensionsByNumber = + Collections.unmodifiableMap(other.immutableExtensionsByNumber); + this.mutableExtensionsByNumber = Collections.unmodifiableMap(other.mutableExtensionsByNumber); + } + + private final Map<String, ExtensionInfo> immutableExtensionsByName; + private final Map<String, ExtensionInfo> mutableExtensionsByName; + private final Map<DescriptorIntPair, ExtensionInfo> immutableExtensionsByNumber; + private final Map<DescriptorIntPair, ExtensionInfo> mutableExtensionsByNumber; + + ExtensionRegistry(boolean empty) { + super(EMPTY_REGISTRY_LITE); + this.immutableExtensionsByName = Collections.<String, ExtensionInfo>emptyMap(); + this.mutableExtensionsByName = Collections.<String, ExtensionInfo>emptyMap(); + this.immutableExtensionsByNumber = Collections.<DescriptorIntPair, ExtensionInfo>emptyMap(); + this.mutableExtensionsByNumber = Collections.<DescriptorIntPair, ExtensionInfo>emptyMap(); + } + + static final ExtensionRegistry EMPTY_REGISTRY = new ExtensionRegistry(true); + + private void add(final ExtensionInfo extension, final Extension.ExtensionType extensionType) { + if (!extension.descriptor.isExtension()) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() was given a FieldDescriptor for a regular " + + "(non-extension) field."); } - /** Add a non-message-type extension to the registry by descriptor. */ - public void add(final FieldDescriptor type) { - if (type.getJavaType() == FieldDescriptor.JavaType.MESSAGE) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() must be provided a default instance when " - + "adding an embedded message extension."); - } - ExtensionInfo info = new ExtensionInfo(type, null); - add(info, Extension.ExtensionType.IMMUTABLE); - add(info, Extension.ExtensionType.MUTABLE); + Map<String, ExtensionInfo> extensionsByName; + Map<DescriptorIntPair, ExtensionInfo> extensionsByNumber; + switch (extensionType) { + case IMMUTABLE: + extensionsByName = immutableExtensionsByName; + extensionsByNumber = immutableExtensionsByNumber; + break; + case MUTABLE: + extensionsByName = mutableExtensionsByName; + extensionsByNumber = mutableExtensionsByNumber; + break; + default: + // Ignore the unknown supported type. + return; } - /** Add a message-type extension to the registry by descriptor. */ - public void add(final FieldDescriptor type, final Message defaultInstance) { - if (type.getJavaType() != FieldDescriptor.JavaType.MESSAGE) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() provided a default instance for a non-message extension."); - } - add(new ExtensionInfo(type, defaultInstance), Extension.ExtensionType.IMMUTABLE); + extensionsByName.put(extension.descriptor.getFullName(), extension); + extensionsByNumber.put( + new DescriptorIntPair( + extension.descriptor.getContainingType(), extension.descriptor.getNumber()), + extension); + + final FieldDescriptor field = extension.descriptor; + if (field.getContainingType().getOptions().getMessageSetWireFormat() + && field.getType() == FieldDescriptor.Type.MESSAGE + && field.isOptional() + && field.getExtensionScope() == field.getMessageType()) { + // This is an extension of a MessageSet type defined within the extension + // type's own scope. For backwards-compatibility, allow it to be looked + // up by type name. + extensionsByName.put(field.getMessageType().getFullName(), extension); } - - // ================================================================= - // Private stuff. - - private ExtensionRegistry() { - this.immutableExtensionsByName = new HashMap<String, ExtensionInfo>(); - this.mutableExtensionsByName = new HashMap<String, ExtensionInfo>(); - this.immutableExtensionsByNumber = new HashMap<DescriptorIntPair, ExtensionInfo>(); - this.mutableExtensionsByNumber = new HashMap<DescriptorIntPair, ExtensionInfo>(); + } + + /** + * DataHub modification of hashcode/equals based on full name. The upstream project uses the + * descriptor and in our use of the registry results in objects that are practically identical + * except for the `jsonName` field. This is a difference generated by internal components and is + * not under our control. + * + * <p>A (GenericDescriptor, int) pair, used as a map key. + */ + private static final class DescriptorIntPair { + private final String fullName; + private final Descriptor descriptor; + private final int number; + + DescriptorIntPair(final Descriptor descriptor, final int number) { + this.descriptor = descriptor; + this.fullName = descriptor.getFullName(); + this.number = number; } - private ExtensionRegistry(ExtensionRegistry other) { - super(other); - this.immutableExtensionsByName = Collections.unmodifiableMap(other.immutableExtensionsByName); - this.mutableExtensionsByName = Collections.unmodifiableMap(other.mutableExtensionsByName); - this.immutableExtensionsByNumber = - Collections.unmodifiableMap(other.immutableExtensionsByNumber); - this.mutableExtensionsByNumber = Collections.unmodifiableMap(other.mutableExtensionsByNumber); - } - - private final Map<String, ExtensionInfo> immutableExtensionsByName; - private final Map<String, ExtensionInfo> mutableExtensionsByName; - private final Map<DescriptorIntPair, ExtensionInfo> immutableExtensionsByNumber; - private final Map<DescriptorIntPair, ExtensionInfo> mutableExtensionsByNumber; - - ExtensionRegistry(boolean empty) { - super(EMPTY_REGISTRY_LITE); - this.immutableExtensionsByName = Collections.<String, ExtensionInfo>emptyMap(); - this.mutableExtensionsByName = Collections.<String, ExtensionInfo>emptyMap(); - this.immutableExtensionsByNumber = Collections.<DescriptorIntPair, ExtensionInfo>emptyMap(); - this.mutableExtensionsByNumber = Collections.<DescriptorIntPair, ExtensionInfo>emptyMap(); - } - - static final ExtensionRegistry EMPTY_REGISTRY = new ExtensionRegistry(true); - - private void add(final ExtensionInfo extension, final Extension.ExtensionType extensionType) { - if (!extension.descriptor.isExtension()) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() was given a FieldDescriptor for a regular " - + "(non-extension) field."); - } - - Map<String, ExtensionInfo> extensionsByName; - Map<DescriptorIntPair, ExtensionInfo> extensionsByNumber; - switch (extensionType) { - case IMMUTABLE: - extensionsByName = immutableExtensionsByName; - extensionsByNumber = immutableExtensionsByNumber; - break; - case MUTABLE: - extensionsByName = mutableExtensionsByName; - extensionsByNumber = mutableExtensionsByNumber; - break; - default: - // Ignore the unknown supported type. - return; - } - - extensionsByName.put(extension.descriptor.getFullName(), extension); - extensionsByNumber.put( - new DescriptorIntPair( - extension.descriptor.getContainingType(), extension.descriptor.getNumber()), - extension); - - final FieldDescriptor field = extension.descriptor; - if (field.getContainingType().getOptions().getMessageSetWireFormat() - && field.getType() == FieldDescriptor.Type.MESSAGE - && field.isOptional() - && field.getExtensionScope() == field.getMessageType()) { - // This is an extension of a MessageSet type defined within the extension - // type's own scope. For backwards-compatibility, allow it to be looked - // up by type name. - extensionsByName.put(field.getMessageType().getFullName(), extension); - } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + DescriptorIntPair that = (DescriptorIntPair) o; + + if (number != that.number) { + return false; + } + return fullName.equals(that.fullName); } - /** - * - * DataHub modification of hashcode/equals based on full name. The upstream - * project uses the descriptor and in our use of the registry results - * in objects that are practically identical except for the `jsonName` field. - * This is a difference generated by internal components and is not under - * our control. - * - * A (GenericDescriptor, int) pair, used as a map key. - * - * */ - private static final class DescriptorIntPair { - private final String fullName; - private final Descriptor descriptor; - private final int number; - - DescriptorIntPair(final Descriptor descriptor, final int number) { - this.descriptor = descriptor; - this.fullName = descriptor.getFullName(); - this.number = number; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - DescriptorIntPair that = (DescriptorIntPair) o; - - if (number != that.number) { - return false; - } - return fullName.equals(that.fullName); - } - - @Override - public int hashCode() { - int result = fullName.hashCode(); - result = 31 * result + number; - return result; - } + @Override + public int hashCode() { + int result = fullName.hashCode(); + result = 31 * result + number; + return result; } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java index c0a6a2eaa410c..e4030e12574f0 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java @@ -25,51 +25,52 @@ public DirectoryWalker(String directory, String[] excludePatterns) { this.excludeMatchers.add(FileSystems.getDefault().getPathMatcher("glob:" + excludePattern)); } } - } public Stream<Path> walkFiles() throws IOException { final Path baseDir = this.rootDirectory; final ArrayList<Path> files = new ArrayList<>(); - Files.walkFileTree(this.rootDirectory, new FileVisitor<Path>() { - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - return FileVisitResult.CONTINUE; - } + Files.walkFileTree( + this.rootDirectory, + new FileVisitor<Path>() { + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) + throws IOException { + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - boolean excluded = false; - Path relativePath = baseDir.relativize(file); - if (!includeMatcher.matches(relativePath)) { - excluded = true; - } else { - for (PathMatcher matcher : excludeMatchers) { - if (matcher.matches(relativePath)) { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) + throws IOException { + boolean excluded = false; + Path relativePath = baseDir.relativize(file); + if (!includeMatcher.matches(relativePath)) { excluded = true; + } else { + for (PathMatcher matcher : excludeMatchers) { + if (matcher.matches(relativePath)) { + excluded = true; + } + } } - } - } - if (!excluded) { - files.add(file); - } - return FileVisitResult.CONTINUE; - } + if (!excluded) { + files.add(file); + } + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } + @Override + public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } - }); + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + return FileVisitResult.CONTINUE; + } + }); return files.stream(); } - - } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java index dc49457e3e6e1..dcc95222fabf2 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java @@ -1,14 +1,13 @@ package datahub.protobuf; -import com.linkedin.common.FabricType; import com.linkedin.common.AuditStamp; +import com.linkedin.common.FabricType; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.DataPlatformUrn; import datahub.client.Emitter; import datahub.client.file.FileEmitter; import datahub.client.file.FileEmitterConfig; import datahub.client.rest.RestEmitter; - import java.io.FileInputStream; import java.io.InputStream; import java.nio.file.Files; @@ -25,350 +24,396 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; - -/** - * Rudimentary application - */ +/** Rudimentary application */ public class Proto2DataHub { - private static final Option OPTION_DATAHUB_PLATFORM = - Option.builder() - .longOpt("platform") - .hasArg() - .desc("[Optional] The data platform to produce schemas for. e.g. kafka, snowflake, etc. (defaults to kafka)") - .build(); - - private static final Option OPTION_DATAHUB_API = Option.builder() - .longOpt("datahub_api") - .hasArg() - .desc("[Optional] The API endpoint for DataHub GMS. (defaults to https://localhost:8080)") - .build(); - - private static final Option OPTION_DATAHUB_TOKEN = Option.builder() - .longOpt("datahub_token") - .hasArg() - .desc("[Optional] The authentication token for DataHub API access. (defaults to empty)") - .build(); - - private static final Option OPTION_DESCRIPTOR = Option.builder() - .longOpt("descriptor") - .hasArg() - .desc("[Required] The generated protobuf descriptor file. " - + "Typically a single .dsc file for the repo or a .protoc file (1:1 with each src file)") - .required() - .build(); - - private static final Option OPTION_FILE = Option.builder() - .longOpt("file") - .hasArg() - .desc("[Optional if using --directory] The protobuf source file. Typically a .proto file.") - .build(); - - private static final Option OPTION_DIR = Option.builder() - .longOpt("directory") - .hasArg() - .desc("[Optional if using --file] The root directory containing protobuf source files.") - .build(); - - private static final Option OPTION_EXCLUDE_PATTERN = Option.builder() - .longOpt("exclude") - .valueSeparator(',') - .hasArgs() - .desc("[Optional] Exclude patterns to avoid processing all source files, separated by ,. Typically used with --directory option. " - + "Follows glob patterns: e.g. --exclude \"build/**,generated/**\" will exclude all files in the build " - + "and generated directories under the rootDirectory given by the --directory option") - .build(); - - private static final Option OPTION_DATAHUB_USER = Option.builder() - .longOpt("datahub_user") - .hasArg() - .desc("[Optional] The datahub user to attribute this ingestion to. (defaults to ..)") - .build(); - - private static final Option OPTION_ENV = Option.builder() - .longOpt("env") - .hasArg() - .desc("[Optional] The environment to attach all entities to. Typically, DEV, PROD etc. (defaults to DEV)") - .build(); - - private static final Option OPTION_GITHUB_ORG = Option.builder() - .longOpt("github_org") - .hasArg() - .desc("[Optional] The GitHub organization that this schema repository belongs to. " - + "We will translate comments in your protoc files like @datahub-project/data-team " - + "to GitHub team urls like: https://github.com/orgs/datahub-project/teams/data-team") - .build(); - - private static final Option OPTION_SLACK_ID = Option.builder() - .longOpt("slack_id") - .hasArg() - .desc("[Optional] The Slack team id if your protobuf files contain comments with references to channel names. " - + "We will translate comments like #data-eng in your protobuf file to slack urls like: " - + "https://slack.com/app_redirect?channel=data-eng&team=T1234 following the " - + "documentation at (https://api.slack.com/reference/deep-linking#deep-linking-into-your-slack-app__opening-a-channel-by-name-or-id) " - + "The easiest way to find your Slack team id is to open your workspace in your browser. It should look " - + "something like: https://app.slack.com/client/TUMKD5EGJ/... In this case, the team-id is TUMKD5EGJ.") - .build(); - - private static final Option OPTION_TRANSPORT = Option.builder() - .longOpt("transport") - .hasArg() - .desc("[Optional] What transport to use to communicate with DataHub. Options are: rest (default), kafka and file.") - .build(); - - private static final Option OPTION_FILENAME = Option.builder() - .longOpt("filename") - .hasArg() - .desc("[Required if using transport file] Filename to write output to.") - .build(); - - private static final Option OPTION_HELP = Option.builder() - .longOpt("help") - .desc("Print this help message") - .build(); - - private static final Option OPTION_SUBTYPE = Option.builder() - .longOpt("subtype") - .desc("[Optional] A custom subtype to attach to all entities produced. e.g. event, schema, topic etc." - + "(Default is schema)") - .build(); - - enum TransportOptions { - REST, - KAFKA, - FILE + private static final Option OPTION_DATAHUB_PLATFORM = + Option.builder() + .longOpt("platform") + .hasArg() + .desc( + "[Optional] The data platform to produce schemas for. e.g. kafka, snowflake, etc. (defaults to kafka)") + .build(); + + private static final Option OPTION_DATAHUB_API = + Option.builder() + .longOpt("datahub_api") + .hasArg() + .desc("[Optional] The API endpoint for DataHub GMS. (defaults to https://localhost:8080)") + .build(); + + private static final Option OPTION_DATAHUB_TOKEN = + Option.builder() + .longOpt("datahub_token") + .hasArg() + .desc("[Optional] The authentication token for DataHub API access. (defaults to empty)") + .build(); + + private static final Option OPTION_DESCRIPTOR = + Option.builder() + .longOpt("descriptor") + .hasArg() + .desc( + "[Required] The generated protobuf descriptor file. " + + "Typically a single .dsc file for the repo or a .protoc file (1:1 with each src file)") + .required() + .build(); + + private static final Option OPTION_FILE = + Option.builder() + .longOpt("file") + .hasArg() + .desc( + "[Optional if using --directory] The protobuf source file. Typically a .proto file.") + .build(); + + private static final Option OPTION_DIR = + Option.builder() + .longOpt("directory") + .hasArg() + .desc("[Optional if using --file] The root directory containing protobuf source files.") + .build(); + + private static final Option OPTION_EXCLUDE_PATTERN = + Option.builder() + .longOpt("exclude") + .valueSeparator(',') + .hasArgs() + .desc( + "[Optional] Exclude patterns to avoid processing all source files, separated by ,. Typically used with --directory option. " + + "Follows glob patterns: e.g. --exclude \"build/**,generated/**\" will exclude all files in the build " + + "and generated directories under the rootDirectory given by the --directory option") + .build(); + + private static final Option OPTION_DATAHUB_USER = + Option.builder() + .longOpt("datahub_user") + .hasArg() + .desc("[Optional] The datahub user to attribute this ingestion to. (defaults to ..)") + .build(); + + private static final Option OPTION_ENV = + Option.builder() + .longOpt("env") + .hasArg() + .desc( + "[Optional] The environment to attach all entities to. Typically, DEV, PROD etc. (defaults to DEV)") + .build(); + + private static final Option OPTION_GITHUB_ORG = + Option.builder() + .longOpt("github_org") + .hasArg() + .desc( + "[Optional] The GitHub organization that this schema repository belongs to. " + + "We will translate comments in your protoc files like @datahub-project/data-team " + + "to GitHub team urls like: https://github.com/orgs/datahub-project/teams/data-team") + .build(); + + private static final Option OPTION_SLACK_ID = + Option.builder() + .longOpt("slack_id") + .hasArg() + .desc( + "[Optional] The Slack team id if your protobuf files contain comments with references to channel names. " + + "We will translate comments like #data-eng in your protobuf file to slack urls like: " + + "https://slack.com/app_redirect?channel=data-eng&team=T1234 following the " + + "documentation at (https://api.slack.com/reference/deep-linking#deep-linking-into-your-slack-app__opening-a-channel-by-name-or-id) " + + "The easiest way to find your Slack team id is to open your workspace in your browser. It should look " + + "something like: https://app.slack.com/client/TUMKD5EGJ/... In this case, the team-id is TUMKD5EGJ.") + .build(); + + private static final Option OPTION_TRANSPORT = + Option.builder() + .longOpt("transport") + .hasArg() + .desc( + "[Optional] What transport to use to communicate with DataHub. Options are: rest (default), kafka and file.") + .build(); + + private static final Option OPTION_FILENAME = + Option.builder() + .longOpt("filename") + .hasArg() + .desc("[Required if using transport file] Filename to write output to.") + .build(); + + private static final Option OPTION_HELP = + Option.builder().longOpt("help").desc("Print this help message").build(); + + private static final Option OPTION_SUBTYPE = + Option.builder() + .longOpt("subtype") + .desc( + "[Optional] A custom subtype to attach to all entities produced. e.g. event, schema, topic etc." + + "(Default is schema)") + .build(); + + enum TransportOptions { + REST, + KAFKA, + FILE + } + + static class AppConfig { + + private final String datahubUser; + private final FabricType fabricType; + private final String datahubAPI; + private final String datahubToken; + private final String githubOrg; + private final String slackId; + private final String dataPlatform; + private final String protoc; + private final String inputFile; + private final String inputDir; + private final TransportOptions transport; + private final String filename; + private final String subType; + private final String[] excludePatterns; + + AppConfig(CommandLine cli) { + Map<String, String> env = System.getenv(); + datahubAPI = + cli.getOptionValue( + OPTION_DATAHUB_API, env.getOrDefault("DATAHUB_API", "http://localhost:8080")); + datahubToken = + cli.getOptionValue(OPTION_DATAHUB_TOKEN, env.getOrDefault("DATAHUB_TOKEN", "")); + datahubUser = + cli.getOptionValue(OPTION_DATAHUB_USER, env.getOrDefault("DATAHUB_USER", "datahub")); + fabricType = + FabricType.valueOf( + cli.getOptionValue(OPTION_ENV, env.getOrDefault("DATAHUB_ENV", "DEV")) + .toUpperCase(Locale.ROOT)); + githubOrg = + cli.getOptionValue(OPTION_GITHUB_ORG, env.getOrDefault("DATAHUB_GITHUBORG", null)); + slackId = cli.getOptionValue(OPTION_SLACK_ID, env.getOrDefault("DATAHUB_SLACKID", null)); + dataPlatform = cli.getOptionValue(OPTION_DATAHUB_PLATFORM, "kafka").toLowerCase(Locale.ROOT); + protoc = cli.getOptionValue(OPTION_DESCRIPTOR); + inputFile = cli.getOptionValue(OPTION_FILE, null); + transport = + TransportOptions.valueOf( + cli.getOptionValue(OPTION_TRANSPORT, "rest").toUpperCase(Locale.ROOT)); + filename = cli.getOptionValue(OPTION_FILENAME, null); + subType = cli.getOptionValue(OPTION_SUBTYPE, "schema").toLowerCase(Locale.ROOT); + inputDir = cli.getOptionValue(OPTION_DIR, null); + excludePatterns = cli.getOptionValues(OPTION_EXCLUDE_PATTERN); } - static class AppConfig { - - private final String datahubUser; - private final FabricType fabricType; - private final String datahubAPI; - private final String datahubToken; - private final String githubOrg; - private final String slackId; - private final String dataPlatform; - private final String protoc; - private final String inputFile; - private final String inputDir; - private final TransportOptions transport; - private final String filename; - private final String subType; - private final String[] excludePatterns; - - - AppConfig(CommandLine cli) { - Map<String, String> env = System.getenv(); - datahubAPI = cli.getOptionValue(OPTION_DATAHUB_API, env.getOrDefault("DATAHUB_API", "http://localhost:8080")); - datahubToken = cli.getOptionValue(OPTION_DATAHUB_TOKEN, env.getOrDefault("DATAHUB_TOKEN", "")); - datahubUser = cli.getOptionValue(OPTION_DATAHUB_USER, env.getOrDefault("DATAHUB_USER", "datahub")); - fabricType = FabricType.valueOf( - cli.getOptionValue(OPTION_ENV, env.getOrDefault("DATAHUB_ENV", "DEV")).toUpperCase(Locale.ROOT)); - githubOrg = cli.getOptionValue(OPTION_GITHUB_ORG, env.getOrDefault("DATAHUB_GITHUBORG", null)); - slackId = cli.getOptionValue(OPTION_SLACK_ID, env.getOrDefault("DATAHUB_SLACKID", null)); - dataPlatform = cli.getOptionValue(OPTION_DATAHUB_PLATFORM, "kafka").toLowerCase(Locale.ROOT); - protoc = cli.getOptionValue(OPTION_DESCRIPTOR); - inputFile = cli.getOptionValue(OPTION_FILE, null); - transport = TransportOptions.valueOf(cli.getOptionValue(OPTION_TRANSPORT, "rest").toUpperCase(Locale.ROOT)); - filename = cli.getOptionValue(OPTION_FILENAME, null); - subType = cli.getOptionValue(OPTION_SUBTYPE, "schema").toLowerCase(Locale.ROOT); - inputDir = cli.getOptionValue(OPTION_DIR, null); - excludePatterns = cli.getOptionValues(OPTION_EXCLUDE_PATTERN); + private AppConfig validate() throws Exception { + switch (transport) { + case FILE: + if (filename == null) { + throw new Exception("Transport file is being used, but a filename was not provided"); + } + break; + default: + // do nothing + } + if (this.protoc != null) { + Path path = Path.of(this.protoc); + if (!Files.exists(path)) { + throw new Exception( + String.format("Proto-descriptor file %s does not exist", this.protoc)); } - - private AppConfig validate() throws Exception { - switch (transport) { - case FILE: - if (filename == null) { - throw new Exception("Transport file is being used, but a filename was not provided"); - } - break; - default: - // do nothing - } - if (this.protoc != null) { - Path path = Path.of(this.protoc); - if (!Files.exists(path)) { - throw new Exception(String.format("Proto-descriptor file %s does not exist", this.protoc)); - } - if (!Files.isRegularFile(path)) { - throw new Exception(String.format("Proto-descriptor file %s is not a regular file", this.protoc)); - } - } - if ((this.inputFile == null) && (this.inputDir == null)) { - throw new Exception("Must provide either an input file or an input directory to read from"); - } - if (this.slackId != null) { - if (!this.slackId.startsWith("T")) { - throw new Exception(String.format("Slack team id %s should start with the letter T. " - + "The easiest way to find your Slack team id is to open your workspace in your browser. " - + "It should look something like: https://app.slack.com/client/TUMKD5EGJ/... " - + "In this case, the team-id is TUMKD5EGJ.", this.slackId)); - } - } - return this; + if (!Files.isRegularFile(path)) { + throw new Exception( + String.format("Proto-descriptor file %s is not a regular file", this.protoc)); } - + } + if ((this.inputFile == null) && (this.inputDir == null)) { + throw new Exception("Must provide either an input file or an input directory to read from"); + } + if (this.slackId != null) { + if (!this.slackId.startsWith("T")) { + throw new Exception( + String.format( + "Slack team id %s should start with the letter T. " + + "The easiest way to find your Slack team id is to open your workspace in your browser. " + + "It should look something like: https://app.slack.com/client/TUMKD5EGJ/... " + + "In this case, the team-id is TUMKD5EGJ.", + this.slackId)); + } + } + return this; } - - private Proto2DataHub() { - + } + + private Proto2DataHub() {} + + public static void main(String[] args) throws Exception { + Options options = new Options(); + + options + .addOption(OPTION_DATAHUB_PLATFORM) + .addOption(OPTION_DATAHUB_API) + .addOption(OPTION_DATAHUB_TOKEN) + .addOption(OPTION_DESCRIPTOR) + .addOption(OPTION_FILE) + .addOption(OPTION_DIR) + .addOption(OPTION_EXCLUDE_PATTERN) + .addOption(OPTION_DATAHUB_USER) + .addOption(OPTION_GITHUB_ORG) + .addOption(OPTION_ENV) + .addOption(OPTION_SLACK_ID) + .addOption(OPTION_TRANSPORT) + .addOption(OPTION_FILENAME) + .addOption(OPTION_SUBTYPE) + .addOption(OPTION_HELP); + + Options firstPassOptions = new Options().addOption(OPTION_HELP); + + // create the parser + CommandLineParser parser = new DefaultParser(); + CommandLine cli = null; + cli = parser.parse(firstPassOptions, args, true); + if (cli.hasOption(OPTION_HELP)) { + printUsageAndExit(options, 0); } - public static void main(String[] args) throws Exception { - Options options = new Options(); - - options.addOption(OPTION_DATAHUB_PLATFORM) - .addOption(OPTION_DATAHUB_API) - .addOption(OPTION_DATAHUB_TOKEN) - .addOption(OPTION_DESCRIPTOR) - .addOption(OPTION_FILE) - .addOption(OPTION_DIR) - .addOption(OPTION_EXCLUDE_PATTERN) - .addOption(OPTION_DATAHUB_USER) - .addOption(OPTION_GITHUB_ORG) - .addOption(OPTION_ENV) - .addOption(OPTION_SLACK_ID) - .addOption(OPTION_TRANSPORT) - .addOption(OPTION_FILENAME) - .addOption(OPTION_SUBTYPE) - .addOption(OPTION_HELP); - - Options firstPassOptions = new Options() - .addOption(OPTION_HELP); - - // create the parser - CommandLineParser parser = new DefaultParser(); - CommandLine cli = null; - cli = parser.parse(firstPassOptions, args, true); - if (cli.hasOption(OPTION_HELP)) { - printUsageAndExit(options, 0); + try { + // parse the real command line arguments + cli = parser.parse(options, args); + } catch (Exception exp) { + // oops, something went wrong + // we try old-style format before giving up + try { + String[] translatedArgs = convertOldStyleArgsIfPossible(args); + if (translatedArgs != null) { + cli = parser.parse(options, translatedArgs); + } else { + System.err.println("Parsing failed. Reason: " + exp.getMessage()); + printUsageAndExit(options, 1); } + } catch (Exception secondExp) { + System.err.println("Parsing failed. Reason: " + secondExp.getMessage()); + printUsageAndExit(options, 1); + } + } - try { - // parse the real command line arguments - cli = parser.parse(options, args); - } catch (Exception exp) { - // oops, something went wrong - // we try old-style format before giving up - try { - String[] translatedArgs = convertOldStyleArgsIfPossible(args); - if (translatedArgs != null) { - cli = parser.parse(options, translatedArgs); - } else { - System.err.println("Parsing failed. Reason: " + exp.getMessage()); - printUsageAndExit(options, 1); - } - } catch (Exception secondExp) { - System.err.println("Parsing failed. Reason: " + secondExp.getMessage()); - printUsageAndExit(options, 1); - } - } + AppConfig config = new AppConfig(cli).validate(); + Emitter emitter = null; + AtomicInteger totalEvents = new AtomicInteger(); - AppConfig config = new AppConfig(cli).validate(); - Emitter emitter = null; - AtomicInteger totalEvents = new AtomicInteger(); - - switch (config.transport) { - case REST: { - emitter = RestEmitter - .create(b -> b.server(config.datahubAPI).token(config.datahubToken)); - } break; - case KAFKA: { - throw new UnsupportedOperationException("Kafka transport is not supported yet."); - } - case FILE: { - emitter = new FileEmitter(FileEmitterConfig.builder().fileName(config.filename).build()); - } - break; - default: { - throw new UnsupportedOperationException(String - .format("%s transport is not supported yet.", config.transport)); - } + switch (config.transport) { + case REST: + { + emitter = RestEmitter.create(b -> b.server(config.datahubAPI).token(config.datahubToken)); + } + break; + case KAFKA: + { + throw new UnsupportedOperationException("Kafka transport is not supported yet."); + } + case FILE: + { + emitter = new FileEmitter(FileEmitterConfig.builder().fileName(config.filename).build()); + } + break; + default: + { + throw new UnsupportedOperationException( + String.format("%s transport is not supported yet.", config.transport)); } + } - AuditStamp auditStamp = new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(new CorpuserUrn(config.datahubUser)); + AuditStamp auditStamp = + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(new CorpuserUrn(config.datahubUser)); - InputStream protocStream = new FileInputStream(config.protoc); + InputStream protocStream = new FileInputStream(config.protoc); - Stream<Path> filePathStream = Stream.empty(); - if (config.inputFile != null) { - filePathStream = Stream.of(Path.of(config.inputFile)); - } else { - DirectoryWalker walker = new DirectoryWalker(config.inputDir, config.excludePatterns); - filePathStream = walker.walkFiles(); - } + Stream<Path> filePathStream = Stream.empty(); + if (config.inputFile != null) { + filePathStream = Stream.of(Path.of(config.inputFile)); + } else { + DirectoryWalker walker = new DirectoryWalker(config.inputDir, config.excludePatterns); + filePathStream = walker.walkFiles(); + } - Emitter finalEmitter = emitter; - AtomicInteger exitCode = new AtomicInteger(0); - AtomicInteger totalFiles = new AtomicInteger(0); - - try { - filePathStream.forEach(filePath -> { - totalFiles.incrementAndGet(); - try { - String textSchema = Files.readString(filePath); - - ProtobufDataset dataset = ProtobufDataset.builder() - .setDataPlatformUrn(new DataPlatformUrn(config.dataPlatform)) - .setProtocIn(new FileInputStream(config.protoc)) - .setFilename(filePath.toString()) - .setSchema(textSchema) - .setAuditStamp(auditStamp) - .setFabricType(config.fabricType) - .setGithubOrganization(config.githubOrg) - .setSlackTeamId(config.slackId) - .setSubType(config.subType) - .build(); - - dataset.getAllMetadataChangeProposals().flatMap(Collection::stream).forEach(mcpw -> { + Emitter finalEmitter = emitter; + AtomicInteger exitCode = new AtomicInteger(0); + AtomicInteger totalFiles = new AtomicInteger(0); + + try { + filePathStream.forEach( + filePath -> { + totalFiles.incrementAndGet(); + try { + String textSchema = Files.readString(filePath); + + ProtobufDataset dataset = + ProtobufDataset.builder() + .setDataPlatformUrn(new DataPlatformUrn(config.dataPlatform)) + .setProtocIn(new FileInputStream(config.protoc)) + .setFilename(filePath.toString()) + .setSchema(textSchema) + .setAuditStamp(auditStamp) + .setFabricType(config.fabricType) + .setGithubOrganization(config.githubOrg) + .setSlackTeamId(config.slackId) + .setSubType(config.subType) + .build(); + + dataset + .getAllMetadataChangeProposals() + .flatMap(Collection::stream) + .forEach( + mcpw -> { try { - finalEmitter.emit(mcpw, null).get(); - totalEvents.getAndIncrement(); + finalEmitter.emit(mcpw, null).get(); + totalEvents.getAndIncrement(); } catch (Exception e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); - } catch (Exception e) { - if (e.getMessage() != null && e.getMessage().equals("Cannot autodetect protobuf Message.")) { - System.err.printf("WARN: Top-level schema not found in %s, no dataset emitted%n", args[1]); - } else { - e.printStackTrace(); - System.err.println(String.format("‼️ Failed to emit to DataHub over %s. Num events emitted so far %d", - config.transport, totalEvents.get())); - exitCode.set(1); - } - } - }); - } finally { - if (emitter != null) { - emitter.close(); - } + }); + } catch (Exception e) { + if (e.getMessage() != null + && e.getMessage().equals("Cannot autodetect protobuf Message.")) { + System.err.printf( + "WARN: Top-level schema not found in %s, no dataset emitted%n", args[1]); + } else { + e.printStackTrace(); + System.err.println( + String.format( + "‼️ Failed to emit to DataHub over %s. Num events emitted so far %d", + config.transport, totalEvents.get())); + exitCode.set(1); + } + } + }); + } finally { + if (emitter != null) { + emitter.close(); + } } if (exitCode.get() == 0) { - System.out.println( - String.format("✅ Successfully emitted %d events for %d files to DataHub %s", totalEvents.get(), totalFiles.get(), config.transport)); + System.out.println( + String.format( + "✅ Successfully emitted %d events for %d files to DataHub %s", + totalEvents.get(), totalFiles.get(), config.transport)); } else { - System.out.println( - String.format("‼️ Emitted %d events for %d files to DataHub %s", totalEvents.get(), totalFiles.get(), config.transport)); + System.out.println( + String.format( + "‼️ Emitted %d events for %d files to DataHub %s", + totalEvents.get(), totalFiles.get(), config.transport)); } System.exit(exitCode.get()); -} + } - private static String[] convertOldStyleArgsIfPossible(String[] args) { - if (args.length == 2) { - String[] translatedArgs = {"--descriptor", args[0], "--file", args[1]}; - return translatedArgs; - } else { - return null; - } + private static String[] convertOldStyleArgsIfPossible(String[] args) { + if (args.length == 2) { + String[] translatedArgs = {"--descriptor", args[0], "--file", args[1]}; + return translatedArgs; + } else { + return null; } + } - private static void printUsageAndExit(Options options, int exitCode) { - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(Proto2DataHub.class.getSimpleName(), options); - System.exit(exitCode); - } + private static void printUsageAndExit(Options options, int exitCode) { + HelpFormatter helpFormatter = new HelpFormatter(); + helpFormatter.printHelp(Proto2DataHub.class.getSimpleName(), options); + System.exit(exitCode); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java index 312b3785ac791..e0c27ebea18bc 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java @@ -15,6 +15,7 @@ import com.linkedin.schema.SchemaFieldArray; import com.linkedin.schema.SchemaMetadata; import com.linkedin.util.Pair; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; @@ -26,270 +27,282 @@ import datahub.protobuf.visitors.dataset.PropertyVisitor; import datahub.protobuf.visitors.dataset.TagAssociationVisitor; import datahub.protobuf.visitors.dataset.TermAssociationVisitor; -import datahub.protobuf.visitors.field.SchemaFieldVisitor; -import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.visitors.field.ProtobufExtensionFieldVisitor; +import datahub.protobuf.visitors.field.SchemaFieldVisitor; import datahub.protobuf.visitors.tags.TagVisitor; - -import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.util.Base64; import java.util.Collection; import java.util.Comparator; -import java.util.Optional; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - +import javax.annotation.Nullable; public class ProtobufDataset { - public static ProtobufDataset.Builder builder() { - return new Builder(); - } - - public static class Builder { - private DataPlatformUrn dataPlatformUrn; - private DatasetUrn datasetUrn; - private FabricType fabricType; - private AuditStamp auditStamp; - private byte[] protocBytes; - private String messageName; - private String filename; - private String schema; - private String githubOrganization; - private String slackTeamId; - private String subType; - - public Builder setGithubOrganization(@Nullable String githubOrganization) { - this.githubOrganization = githubOrganization; - return this; - } - - public Builder setSlackTeamId(@Nullable String slackTeamId) { - this.slackTeamId = slackTeamId; - return this; - } - - public Builder setProtocIn(InputStream protocIn) throws IOException { - return setProtocBytes(protocIn.readAllBytes()); - } - - public Builder setDataPlatformUrn(@Nullable DataPlatformUrn dataPlatformUrn) { - this.dataPlatformUrn = dataPlatformUrn; - return this; - } - - public Builder setDatasetUrn(@Nullable DatasetUrn datasetUrn) { - this.datasetUrn = datasetUrn; - return this; - } - - public Builder setProtocBytes(byte[] protocBytes) { - this.protocBytes = protocBytes; - return this; - } - - public Builder setFabricType(FabricType fabricType) { - this.fabricType = fabricType; - return this; - } - - public Builder setAuditStamp(AuditStamp auditStamp) { - this.auditStamp = auditStamp; - return this; - } - - public Builder setMessageName(@Nullable String messageName) { - this.messageName = messageName; - return this; - } - public Builder setFilename(@Nullable String filename) { - this.filename = filename; - return this; - } - - public Builder setSchema(@Nullable String schema) { - this.schema = schema; - return this; - } - - public Builder setSubType(@Nullable String subType) { - this.subType = subType; - return this; - } - - public ProtobufDataset build() throws IOException { - FileDescriptorSet fileSet = FileDescriptorSet.parseFrom(protocBytes); - - return new ProtobufDataset( - this, - Optional.ofNullable(dataPlatformUrn).orElse(new DataPlatformUrn("kafka")), - datasetUrn, - new ProtobufGraph(fileSet, messageName, filename), schema, auditStamp, fabricType) - .setMetadataChangeProposalVisitors( - List.of( - new TagVisitor() - ) - ) - .setFieldVisitor(new ProtobufExtensionFieldVisitor()) - .setDatasetVisitor(DatasetVisitor.builder() - .protocBase64(Base64.getEncoder().encodeToString(protocBytes)) - .datasetPropertyVisitors( - List.of( - new KafkaTopicPropertyVisitor(), - new PropertyVisitor() - ) - ) - .institutionalMemoryMetadataVisitors( - List.of( - new InstitutionalMemoryVisitor(slackTeamId, githubOrganization) - ) - ) - .tagAssociationVisitors( - List.of( - new TagAssociationVisitor() - ) - ) - .termAssociationVisitors( - List.of( - new TermAssociationVisitor() - ) - ) - .ownershipVisitors( - List.of( - new OwnershipVisitor() - ) - ) - .domainVisitors( - List.of( - new DomainVisitor() - ) - ) - .build() - ) - .setSubType(subType); - } + public static ProtobufDataset.Builder builder() { + return new Builder(); + } + + public static class Builder { + private DataPlatformUrn dataPlatformUrn; + private DatasetUrn datasetUrn; + private FabricType fabricType; + private AuditStamp auditStamp; + private byte[] protocBytes; + private String messageName; + private String filename; + private String schema; + private String githubOrganization; + private String slackTeamId; + private String subType; + + public Builder setGithubOrganization(@Nullable String githubOrganization) { + this.githubOrganization = githubOrganization; + return this; } - private final DatasetUrn datasetUrn; - private final Optional<String> schemaSource; - private final ProtobufGraph graph; - private final AuditStamp auditStamp; - private Optional<String> subType; - private final VisitContext.VisitContextBuilder contextBuilder; - private final ProtobufDataset.Builder builder; - - private DatasetVisitor datasetVisitor; - private ProtobufModelVisitor<Pair<SchemaField, Double>> fieldVisitor; - private List<ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>>> mcpwVisitors; - - public ProtobufDataset(DataPlatformUrn dataPlatformUrn, DatasetUrn datasetUrn, ProtobufGraph graph, String schema, - AuditStamp auditStamp, FabricType fabricType) { - this(null, dataPlatformUrn, datasetUrn, graph, schema, auditStamp, fabricType); + public Builder setSlackTeamId(@Nullable String slackTeamId) { + this.slackTeamId = slackTeamId; + return this; } - public ProtobufDataset(ProtobufDataset.Builder builder, DataPlatformUrn dataPlatformUrn, DatasetUrn datasetUrn, ProtobufGraph graph, - String schema, AuditStamp auditStamp, FabricType fabricType) { - this.builder = builder; - this.schemaSource = Optional.ofNullable(schema); - this.auditStamp = auditStamp; - this.graph = graph; - this.subType = Optional.empty(); - - // Default - non-protobuf extension - fieldVisitor = new SchemaFieldVisitor(); - mcpwVisitors = List.of(); - - this.datasetUrn = datasetUrn != null ? datasetUrn : new DatasetUrn(dataPlatformUrn, this.graph.getFullName(), fabricType); - this.contextBuilder = VisitContext.builder().datasetUrn(this.datasetUrn).auditStamp(this.auditStamp); + public Builder setProtocIn(InputStream protocIn) throws IOException { + return setProtocBytes(protocIn.readAllBytes()); } - public ProtobufDataset setMetadataChangeProposalVisitors(List<ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>>> visitors) { - this.mcpwVisitors = visitors; - return this; + public Builder setDataPlatformUrn(@Nullable DataPlatformUrn dataPlatformUrn) { + this.dataPlatformUrn = dataPlatformUrn; + return this; } - public ProtobufDataset setDatasetVisitor(DatasetVisitor datasetVisitor) { - this.datasetVisitor = datasetVisitor; - return this; + public Builder setDatasetUrn(@Nullable DatasetUrn datasetUrn) { + this.datasetUrn = datasetUrn; + return this; } - public ProtobufDataset setFieldVisitor(ProtobufModelVisitor<Pair<SchemaField, Double>> visitor) { - this.fieldVisitor = visitor; - return this; + public Builder setProtocBytes(byte[] protocBytes) { + this.protocBytes = protocBytes; + return this; } - public ProtobufDataset setSubType(String subType) { - this.subType = Optional.ofNullable(subType); - return this; + public Builder setFabricType(FabricType fabricType) { + this.fabricType = fabricType; + return this; } - public ProtobufDataset.Builder toBuilder() { - return builder; + public Builder setAuditStamp(AuditStamp auditStamp) { + this.auditStamp = auditStamp; + return this; } - public ProtobufGraph getGraph() { - return graph; + public Builder setMessageName(@Nullable String messageName) { + this.messageName = messageName; + return this; } - public AuditStamp getAuditStamp() { - return auditStamp; + public Builder setFilename(@Nullable String filename) { + this.filename = filename; + return this; } - public DatasetUrn getDatasetUrn() { - return datasetUrn; + public Builder setSchema(@Nullable String schema) { + this.schema = schema; + return this; } - public Stream<Collection<MetadataChangeProposalWrapper<? extends RecordTemplate>>> getAllMetadataChangeProposals() { - return Stream.of(getVisitorMCPs(), getDatasetMCPs()); + public Builder setSubType(@Nullable String subType) { + this.subType = subType; + return this; } - public List<MetadataChangeProposalWrapper<? extends RecordTemplate>> getVisitorMCPs() { - return graph.accept(contextBuilder, mcpwVisitors).collect(Collectors.toList()); + public ProtobufDataset build() throws IOException { + FileDescriptorSet fileSet = FileDescriptorSet.parseFrom(protocBytes); + + return new ProtobufDataset( + this, + Optional.ofNullable(dataPlatformUrn).orElse(new DataPlatformUrn("kafka")), + datasetUrn, + new ProtobufGraph(fileSet, messageName, filename), + schema, + auditStamp, + fabricType) + .setMetadataChangeProposalVisitors(List.of(new TagVisitor())) + .setFieldVisitor(new ProtobufExtensionFieldVisitor()) + .setDatasetVisitor( + DatasetVisitor.builder() + .protocBase64(Base64.getEncoder().encodeToString(protocBytes)) + .datasetPropertyVisitors( + List.of(new KafkaTopicPropertyVisitor(), new PropertyVisitor())) + .institutionalMemoryMetadataVisitors( + List.of(new InstitutionalMemoryVisitor(slackTeamId, githubOrganization))) + .tagAssociationVisitors(List.of(new TagAssociationVisitor())) + .termAssociationVisitors(List.of(new TermAssociationVisitor())) + .ownershipVisitors(List.of(new OwnershipVisitor())) + .domainVisitors(List.of(new DomainVisitor())) + .build()) + .setSubType(subType); } - - public List<MetadataChangeProposalWrapper<? extends RecordTemplate>> getDatasetMCPs() { - Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> mcpStream = - Stream.concat(this.graph.accept(contextBuilder, List.of(datasetVisitor)), + } + + private final DatasetUrn datasetUrn; + private final Optional<String> schemaSource; + private final ProtobufGraph graph; + private final AuditStamp auditStamp; + private Optional<String> subType; + private final VisitContext.VisitContextBuilder contextBuilder; + private final ProtobufDataset.Builder builder; + + private DatasetVisitor datasetVisitor; + private ProtobufModelVisitor<Pair<SchemaField, Double>> fieldVisitor; + private List<ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>>> + mcpwVisitors; + + public ProtobufDataset( + DataPlatformUrn dataPlatformUrn, + DatasetUrn datasetUrn, + ProtobufGraph graph, + String schema, + AuditStamp auditStamp, + FabricType fabricType) { + this(null, dataPlatformUrn, datasetUrn, graph, schema, auditStamp, fabricType); + } + + public ProtobufDataset( + ProtobufDataset.Builder builder, + DataPlatformUrn dataPlatformUrn, + DatasetUrn datasetUrn, + ProtobufGraph graph, + String schema, + AuditStamp auditStamp, + FabricType fabricType) { + this.builder = builder; + this.schemaSource = Optional.ofNullable(schema); + this.auditStamp = auditStamp; + this.graph = graph; + this.subType = Optional.empty(); + + // Default - non-protobuf extension + fieldVisitor = new SchemaFieldVisitor(); + mcpwVisitors = List.of(); + + this.datasetUrn = + datasetUrn != null + ? datasetUrn + : new DatasetUrn(dataPlatformUrn, this.graph.getFullName(), fabricType); + this.contextBuilder = + VisitContext.builder().datasetUrn(this.datasetUrn).auditStamp(this.auditStamp); + } + + public ProtobufDataset setMetadataChangeProposalVisitors( + List<ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>>> + visitors) { + this.mcpwVisitors = visitors; + return this; + } + + public ProtobufDataset setDatasetVisitor(DatasetVisitor datasetVisitor) { + this.datasetVisitor = datasetVisitor; + return this; + } + + public ProtobufDataset setFieldVisitor(ProtobufModelVisitor<Pair<SchemaField, Double>> visitor) { + this.fieldVisitor = visitor; + return this; + } + + public ProtobufDataset setSubType(String subType) { + this.subType = Optional.ofNullable(subType); + return this; + } + + public ProtobufDataset.Builder toBuilder() { + return builder; + } + + public ProtobufGraph getGraph() { + return graph; + } + + public AuditStamp getAuditStamp() { + return auditStamp; + } + + public DatasetUrn getDatasetUrn() { + return datasetUrn; + } + + public Stream<Collection<MetadataChangeProposalWrapper<? extends RecordTemplate>>> + getAllMetadataChangeProposals() { + return Stream.of(getVisitorMCPs(), getDatasetMCPs()); + } + + public List<MetadataChangeProposalWrapper<? extends RecordTemplate>> getVisitorMCPs() { + return graph.accept(contextBuilder, mcpwVisitors).collect(Collectors.toList()); + } + + public List<MetadataChangeProposalWrapper<? extends RecordTemplate>> getDatasetMCPs() { + Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> mcpStream = + Stream.concat( + this.graph.accept(contextBuilder, List.of(datasetVisitor)), Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - getSchemaMetadata(), "schemaMetadata"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - new Status().setRemoved(false), "status"))); - - if (this.subType.isPresent()) { - SubTypes subTypes = new SubTypes().setTypeNames(new StringArray(this.subType.get())); - mcpStream = Stream.concat(mcpStream, - Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - subTypes, "subTypes"))); - } - return mcpStream.collect(Collectors.toList()); - } - - public SchemaMetadata getSchemaMetadata() { - SchemaMetadata.PlatformSchema platformSchema = new SchemaMetadata.PlatformSchema(); - schemaSource.ifPresent(schemaStr -> platformSchema.setKafkaSchema(new KafkaSchema().setDocumentSchema(schemaStr))); - - List<SchemaField> schemaFields = graph.accept(contextBuilder, List.of(fieldVisitor)) - .sorted(COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); - - return new SchemaMetadata() - .setSchemaName(graph.getFullName()) - .setPlatform(datasetUrn.getPlatformEntity()) - .setCreated(auditStamp) - .setLastModified(auditStamp) - .setVersion(graph.getMajorVersion()) - .setHash(graph.getHash()) - .setPlatformSchema(platformSchema) - .setFields(new SchemaFieldArray(schemaFields)); + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + getSchemaMetadata(), + "schemaMetadata"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + new Status().setRemoved(false), + "status"))); + + if (this.subType.isPresent()) { + SubTypes subTypes = new SubTypes().setTypeNames(new StringArray(this.subType.get())); + mcpStream = + Stream.concat( + mcpStream, + Stream.of( + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + subTypes, + "subTypes"))); } - - public static final Comparator<Pair<SchemaField, Double>> COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT = Comparator.comparing(Pair::getSecond); - public static final Comparator<Pair<SchemaField, Double>> COMPARE_BY_FIELD_PATH = Comparator - .comparing(p -> p.getFirst().getFieldPath()); + return mcpStream.collect(Collectors.toList()); + } + + public SchemaMetadata getSchemaMetadata() { + SchemaMetadata.PlatformSchema platformSchema = new SchemaMetadata.PlatformSchema(); + schemaSource.ifPresent( + schemaStr -> platformSchema.setKafkaSchema(new KafkaSchema().setDocumentSchema(schemaStr))); + + List<SchemaField> schemaFields = + graph + .accept(contextBuilder, List.of(fieldVisitor)) + .sorted(COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); + + return new SchemaMetadata() + .setSchemaName(graph.getFullName()) + .setPlatform(datasetUrn.getPlatformEntity()) + .setCreated(auditStamp) + .setLastModified(auditStamp) + .setVersion(graph.getMajorVersion()) + .setHash(graph.getHash()) + .setPlatformSchema(platformSchema) + .setFields(new SchemaFieldArray(schemaFields)); + } + + public static final Comparator<Pair<SchemaField, Double>> COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT = + Comparator.comparing(Pair::getSecond); + public static final Comparator<Pair<SchemaField, Double>> COMPARE_BY_FIELD_PATH = + Comparator.comparing(p -> p.getFirst().getFieldPath()); } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java index 5f5cfaa15cf41..ef5bc52aaee7a 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java @@ -5,7 +5,6 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.ExtensionRegistry; import com.linkedin.util.Pair; - import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; @@ -20,176 +19,211 @@ import java.util.stream.Stream; public class ProtobufUtils { - private ProtobufUtils() { } - - public static String collapseLocationComments(DescriptorProtos.SourceCodeInfo.Location location) { - String orig = Stream.concat(location.getLeadingDetachedCommentsList().stream(), - Stream.of(location.getLeadingComments(), location.getTrailingComments())) - .filter(Objects::nonNull) - .flatMap(line -> Arrays.stream(line.split("\n"))) - .map(line -> line.replaceFirst("^[*/ ]+", "")) - .collect(Collectors.joining("\n")) - .trim(); - - /* - * Sometimes DataHub doesn't like these strings. Not sure if its DataHub - * or protobuf issue: https://github.com/protocolbuffers/protobuf/issues/4691 - * - * We essentially smash utf8 chars to ascii here - */ - return new String(orig.getBytes(StandardCharsets.ISO_8859_1)); - } + private ProtobufUtils() {} + + public static String collapseLocationComments(DescriptorProtos.SourceCodeInfo.Location location) { + String orig = + Stream.concat( + location.getLeadingDetachedCommentsList().stream(), + Stream.of(location.getLeadingComments(), location.getTrailingComments())) + .filter(Objects::nonNull) + .flatMap(line -> Arrays.stream(line.split("\n"))) + .map(line -> line.replaceFirst("^[*/ ]+", "")) + .collect(Collectors.joining("\n")) + .trim(); /* - * Reflection used to prevent an exception deep inside the protobuf library due to a getter method - * mutating the json name field and causing an equality check to fail between an instance that has and has not - * had the getter called. - * - * https://github.com/protocolbuffers/protobuf/blob/main/java/core/src/main/java/com/google/protobuf/Descriptors.java#L1105 - * - * java.lang.IllegalArgumentException: FieldDescriptors can only be compared to other FieldDescriptors for fields of the same message type. - * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1344) - * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1057) - * at java.base/java.util.TreeMap.put(TreeMap.java:566) - * at java.base/java.util.AbstractMap.putAll(AbstractMap.java:281) - * at java.base/java.util.TreeMap.putAll(TreeMap.java:325) - * at com.google.protobuf.GeneratedMessageV3$ExtendableMessage.getAllFields(GeneratedMessageV3.java:1240) + * Sometimes DataHub doesn't like these strings. Not sure if its DataHub + * or protobuf issue: https://github.com/protocolbuffers/protobuf/issues/4691 * + * We essentially smash utf8 chars to ascii here */ - private static final Method FIELD_OPT_EXT_FIELDS_METHOD; - private static final Method FIELD_OPT_ALL_FIELD_METHOD; - private static final Method MSG_OPT_EXT_FIELDS_METHOD; - private static final Method MSG_OPT_ALL_FIELD_METHOD; - static { - try { - FIELD_OPT_EXT_FIELDS_METHOD = DescriptorProtos.FieldOptions.class.getSuperclass() - .getDeclaredMethod("getExtensionFields"); - FIELD_OPT_EXT_FIELDS_METHOD.setAccessible(true); - - FIELD_OPT_ALL_FIELD_METHOD = DescriptorProtos.FieldOptions.class.getSuperclass().getSuperclass() - .getDeclaredMethod("getAllFieldsMutable", boolean.class); - FIELD_OPT_ALL_FIELD_METHOD.setAccessible(true); - - MSG_OPT_EXT_FIELDS_METHOD = DescriptorProtos.MessageOptions.class.getSuperclass() - .getDeclaredMethod("getExtensionFields"); - MSG_OPT_EXT_FIELDS_METHOD.setAccessible(true); - - MSG_OPT_ALL_FIELD_METHOD = DescriptorProtos.MessageOptions.class.getSuperclass().getSuperclass() - .getDeclaredMethod("getAllFieldsMutable", boolean.class); - MSG_OPT_ALL_FIELD_METHOD.setAccessible(true); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } + return new String(orig.getBytes(StandardCharsets.ISO_8859_1)); + } + + /* + * Reflection used to prevent an exception deep inside the protobuf library due to a getter method + * mutating the json name field and causing an equality check to fail between an instance that has and has not + * had the getter called. + * + * https://github.com/protocolbuffers/protobuf/blob/main/java/core/src/main/java/com/google/protobuf/Descriptors.java#L1105 + * + * java.lang.IllegalArgumentException: FieldDescriptors can only be compared to other FieldDescriptors for fields of the same message type. + * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1344) + * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1057) + * at java.base/java.util.TreeMap.put(TreeMap.java:566) + * at java.base/java.util.AbstractMap.putAll(AbstractMap.java:281) + * at java.base/java.util.TreeMap.putAll(TreeMap.java:325) + * at com.google.protobuf.GeneratedMessageV3$ExtendableMessage.getAllFields(GeneratedMessageV3.java:1240) + * + */ + private static final Method FIELD_OPT_EXT_FIELDS_METHOD; + private static final Method FIELD_OPT_ALL_FIELD_METHOD; + private static final Method MSG_OPT_EXT_FIELDS_METHOD; + private static final Method MSG_OPT_ALL_FIELD_METHOD; + + static { + try { + FIELD_OPT_EXT_FIELDS_METHOD = + DescriptorProtos.FieldOptions.class + .getSuperclass() + .getDeclaredMethod("getExtensionFields"); + FIELD_OPT_EXT_FIELDS_METHOD.setAccessible(true); + + FIELD_OPT_ALL_FIELD_METHOD = + DescriptorProtos.FieldOptions.class + .getSuperclass() + .getSuperclass() + .getDeclaredMethod("getAllFieldsMutable", boolean.class); + FIELD_OPT_ALL_FIELD_METHOD.setAccessible(true); + + MSG_OPT_EXT_FIELDS_METHOD = + DescriptorProtos.MessageOptions.class + .getSuperclass() + .getDeclaredMethod("getExtensionFields"); + MSG_OPT_EXT_FIELDS_METHOD.setAccessible(true); + + MSG_OPT_ALL_FIELD_METHOD = + DescriptorProtos.MessageOptions.class + .getSuperclass() + .getSuperclass() + .getDeclaredMethod("getAllFieldsMutable", boolean.class); + MSG_OPT_ALL_FIELD_METHOD.setAccessible(true); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); } - - public static List<Pair<Descriptors.FieldDescriptor, Object>> getFieldOptions(DescriptorProtos.FieldDescriptorProto fieldProto) { - try { - LinkedList<Pair<Descriptors.FieldDescriptor, Object>> options = new LinkedList<>(); - - options.addAll(((Map<Descriptors.FieldDescriptor, Object>) FIELD_OPT_EXT_FIELDS_METHOD.invoke(fieldProto.getOptions())) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - options.addAll(((Map<Descriptors.FieldDescriptor, Object>) FIELD_OPT_ALL_FIELD_METHOD.invoke(fieldProto.getOptions(), false)) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - return options; - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } + } + + public static List<Pair<Descriptors.FieldDescriptor, Object>> getFieldOptions( + DescriptorProtos.FieldDescriptorProto fieldProto) { + try { + LinkedList<Pair<Descriptors.FieldDescriptor, Object>> options = new LinkedList<>(); + + options.addAll( + ((Map<Descriptors.FieldDescriptor, Object>) + FIELD_OPT_EXT_FIELDS_METHOD.invoke(fieldProto.getOptions())) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + options.addAll( + ((Map<Descriptors.FieldDescriptor, Object>) + FIELD_OPT_ALL_FIELD_METHOD.invoke(fieldProto.getOptions(), false)) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + return options; + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - - public static List<Pair<Descriptors.FieldDescriptor, Object>> getMessageOptions(DescriptorProtos.DescriptorProto messageProto) { - try { - LinkedList<Pair<Descriptors.FieldDescriptor, Object>> options = new LinkedList<>(); - - options.addAll(((Map<Descriptors.FieldDescriptor, Object>) MSG_OPT_EXT_FIELDS_METHOD.invoke(messageProto.getOptions())) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - options.addAll(((Map<Descriptors.FieldDescriptor, Object>) MSG_OPT_ALL_FIELD_METHOD.invoke(messageProto.getOptions(), - false)) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - return options; - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } + } + + public static List<Pair<Descriptors.FieldDescriptor, Object>> getMessageOptions( + DescriptorProtos.DescriptorProto messageProto) { + try { + LinkedList<Pair<Descriptors.FieldDescriptor, Object>> options = new LinkedList<>(); + + options.addAll( + ((Map<Descriptors.FieldDescriptor, Object>) + MSG_OPT_EXT_FIELDS_METHOD.invoke(messageProto.getOptions())) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + options.addAll( + ((Map<Descriptors.FieldDescriptor, Object>) + MSG_OPT_ALL_FIELD_METHOD.invoke(messageProto.getOptions(), false)) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + return options; + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - - public static ExtensionRegistry buildRegistry(DescriptorProtos.FileDescriptorSet fileSet) { - ExtensionRegistry registry = ExtensionRegistry.newInstance(); - Map<String, DescriptorProtos.FileDescriptorProto> descriptorProtoMap = fileSet.getFileList().stream() - .collect(Collectors.toMap(DescriptorProtos.FileDescriptorProto::getName, Function.identity())); - Map<String, Descriptors.FileDescriptor> descriptorCache = new HashMap<>(); - - fileSet.getFileList().forEach(fdp -> { - try { - Descriptors.FileDescriptor file = descriptorFromProto(fdp, descriptorProtoMap, descriptorCache); - Stream.concat(file.getExtensions().stream(), file.getMessageTypes().stream().flatMap(msg -> msg.getExtensions().stream())) - .forEach(ext -> addToRegistry(fdp, ext, registry)); - } catch (Descriptors.DescriptorValidationException e) { + } + + public static ExtensionRegistry buildRegistry(DescriptorProtos.FileDescriptorSet fileSet) { + ExtensionRegistry registry = ExtensionRegistry.newInstance(); + Map<String, DescriptorProtos.FileDescriptorProto> descriptorProtoMap = + fileSet.getFileList().stream() + .collect( + Collectors.toMap( + DescriptorProtos.FileDescriptorProto::getName, Function.identity())); + Map<String, Descriptors.FileDescriptor> descriptorCache = new HashMap<>(); + + fileSet + .getFileList() + .forEach( + fdp -> { + try { + Descriptors.FileDescriptor file = + descriptorFromProto(fdp, descriptorProtoMap, descriptorCache); + Stream.concat( + file.getExtensions().stream(), + file.getMessageTypes().stream() + .flatMap(msg -> msg.getExtensions().stream())) + .forEach(ext -> addToRegistry(fdp, ext, registry)); + } catch (Descriptors.DescriptorValidationException e) { e.printStackTrace(); - } - }); - return registry; + } + }); + return registry; + } + + private static void addToRegistry( + DescriptorProtos.FileDescriptorProto fileDescriptorProto, + Descriptors.FieldDescriptor fieldDescriptor, + ExtensionRegistry registry) { + if (fieldDescriptor.getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { + registry.add(fieldDescriptor); + } else { + fileDescriptorProto.getMessageTypeList().stream() + .filter(typ -> typ.getName().equals(fieldDescriptor.getMessageType().getName())) + .findFirst() + .ifPresent( + messageType -> + registry.add(fieldDescriptor, messageType.getDefaultInstanceForType())); + fieldDescriptor.getMessageType().getFields().stream() + .filter(Descriptors.FieldDescriptor::isExtension) + .forEach(f -> addToRegistry(fileDescriptorProto, f, registry)); } - - private static void addToRegistry(DescriptorProtos.FileDescriptorProto fileDescriptorProto, - Descriptors.FieldDescriptor fieldDescriptor, ExtensionRegistry registry) { - if (fieldDescriptor.getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { - registry.add(fieldDescriptor); - } else { - fileDescriptorProto.getMessageTypeList().stream() - .filter(typ -> typ.getName().equals(fieldDescriptor.getMessageType().getName())) - .findFirst().ifPresent(messageType -> registry.add(fieldDescriptor, messageType.getDefaultInstanceForType())); - fieldDescriptor.getMessageType().getFields() - .stream().filter(Descriptors.FieldDescriptor::isExtension) - .forEach(f -> addToRegistry(fileDescriptorProto, f, registry)); - } + } + + /** + * Recursively constructs file descriptors for all dependencies of the supplied proto and returns + * a {@link Descriptors.FileDescriptor} for the supplied proto itself. For maximal efficiency, + * reuse the descriptorCache argument across calls. + */ + private static Descriptors.FileDescriptor descriptorFromProto( + DescriptorProtos.FileDescriptorProto descriptorProto, + Map<String, DescriptorProtos.FileDescriptorProto> descriptorProtoIndex, + Map<String, Descriptors.FileDescriptor> descriptorCache) + throws Descriptors.DescriptorValidationException { + // First, check the cache. + String descriptorName = descriptorProto.getName(); + if (descriptorCache.containsKey(descriptorName)) { + return descriptorCache.get(descriptorName); } - /** - * Recursively constructs file descriptors for all dependencies of the supplied proto and returns - * a {@link Descriptors.FileDescriptor} for the supplied proto itself. For maximal efficiency, reuse the - * descriptorCache argument across calls. - */ - private static Descriptors.FileDescriptor descriptorFromProto( - DescriptorProtos.FileDescriptorProto descriptorProto, - Map<String, DescriptorProtos.FileDescriptorProto> descriptorProtoIndex, - Map<String, Descriptors.FileDescriptor> descriptorCache) throws Descriptors.DescriptorValidationException { - // First, check the cache. - String descriptorName = descriptorProto.getName(); - if (descriptorCache.containsKey(descriptorName)) { - return descriptorCache.get(descriptorName); - } - - // Then, fetch all the required dependencies recursively. - ImmutableList.Builder<Descriptors.FileDescriptor> dependencies = ImmutableList.builder(); - for (String dependencyName : descriptorProto.getDependencyList()) { - if (!descriptorProtoIndex.containsKey(dependencyName)) { - throw new IllegalArgumentException("Could not find dependency: " + dependencyName); - } - DescriptorProtos.FileDescriptorProto dependencyProto = descriptorProtoIndex.get(dependencyName); - dependencies.add(descriptorFromProto(dependencyProto, descriptorProtoIndex, descriptorCache)); - } - - // Finally, construct the actual descriptor. - Descriptors.FileDescriptor[] empty = new Descriptors.FileDescriptor[0]; - Descriptors.FileDescriptor descript = Descriptors.FileDescriptor.buildFrom(descriptorProto, dependencies.build().toArray(empty), false); - descriptorCache.put(descript.getName(), descript); - return descript; + // Then, fetch all the required dependencies recursively. + ImmutableList.Builder<Descriptors.FileDescriptor> dependencies = ImmutableList.builder(); + for (String dependencyName : descriptorProto.getDependencyList()) { + if (!descriptorProtoIndex.containsKey(dependencyName)) { + throw new IllegalArgumentException("Could not find dependency: " + dependencyName); + } + DescriptorProtos.FileDescriptorProto dependencyProto = + descriptorProtoIndex.get(dependencyName); + dependencies.add(descriptorFromProto(dependencyProto, descriptorProtoIndex, descriptorCache)); } + // Finally, construct the actual descriptor. + Descriptors.FileDescriptor[] empty = new Descriptors.FileDescriptor[0]; + Descriptors.FileDescriptor descript = + Descriptors.FileDescriptor.buildFrom( + descriptorProto, dependencies.build().toArray(empty), false); + descriptorCache.put(descript.getName(), descript); + return descript; + } } - diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java index 7926ba0702762..49ecb7ec2aedf 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java @@ -5,51 +5,48 @@ import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; - @Builder @Getter public class FieldTypeEdge extends DefaultEdge { - @Builder.Default - private final String type = ""; - @Builder.Default - private final boolean isMessageType = false; - private final transient ProtobufElement edgeSource; - private final transient ProtobufElement edgeTarget; - - public FieldTypeEdge inGraph(DefaultDirectedGraph<ProtobufElement, FieldTypeEdge> g) { - g.addEdge(edgeSource, edgeTarget, this); - return this; + @Builder.Default private final String type = ""; + @Builder.Default private final boolean isMessageType = false; + private final transient ProtobufElement edgeSource; + private final transient ProtobufElement edgeTarget; + + public FieldTypeEdge inGraph(DefaultDirectedGraph<ProtobufElement, FieldTypeEdge> g) { + g.addEdge(edgeSource, edgeTarget, this); + return this; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - FieldTypeEdge that = (FieldTypeEdge) o; + FieldTypeEdge that = (FieldTypeEdge) o; - if (isMessageType() != that.isMessageType()) { - return false; - } - if (!getType().equals(that.getType())) { - return false; - } - if (!getEdgeSource().equals(that.getEdgeSource())) { - return false; - } - return getEdgeTarget().equals(that.getEdgeTarget()); + if (isMessageType() != that.isMessageType()) { + return false; } - - @Override - public int hashCode() { - int result = getType().hashCode(); - result = 31 * result + (isMessageType() ? 1 : 0); - result = 31 * result + getEdgeSource().hashCode(); - result = 31 * result + getEdgeTarget().hashCode(); - return result; + if (!getType().equals(that.getType())) { + return false; + } + if (!getEdgeSource().equals(that.getEdgeSource())) { + return false; } + return getEdgeTarget().equals(that.getEdgeTarget()); + } + + @Override + public int hashCode() { + int result = getType().hashCode(); + result = 31 * result + (isMessageType() ? 1 : 0); + result = 31 * result + getEdgeSource().hashCode(); + result = 31 * result + getEdgeTarget().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java index 91c76fe16b73f..e47e804763ecf 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java @@ -5,28 +5,33 @@ import com.google.protobuf.DescriptorProtos.SourceCodeInfo; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.List; import java.util.stream.Stream; - public interface ProtobufElement { - String name(); - String fullName(); - String nativeType(); - String comment(); - String fieldPathType(); - - FileDescriptorProto fileProto(); - DescriptorProto messageProto(); - - default Stream<SourceCodeInfo.Location> messageLocations() { - List<SourceCodeInfo.Location> fileLocations = fileProto().getSourceCodeInfo().getLocationList(); - return fileLocations.stream() - .filter(loc -> loc.getPathCount() > 1 - && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER - && messageProto() == fileProto().getMessageType(loc.getPath(1))); - } - - <T> Stream<T> accept(ProtobufModelVisitor<T> v, VisitContext context); + String name(); + + String fullName(); + + String nativeType(); + + String comment(); + + String fieldPathType(); + + FileDescriptorProto fileProto(); + + DescriptorProto messageProto(); + + default Stream<SourceCodeInfo.Location> messageLocations() { + List<SourceCodeInfo.Location> fileLocations = fileProto().getSourceCodeInfo().getLocationList(); + return fileLocations.stream() + .filter( + loc -> + loc.getPathCount() > 1 + && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER + && messageProto() == fileProto().getMessageType(loc.getPath(1))); + } + + <T> Stream<T> accept(ProtobufModelVisitor<T> v, VisitContext context); } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java index ff894112d0d51..3d4e170939455 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java @@ -6,82 +6,81 @@ import com.linkedin.schema.EnumType; import com.linkedin.schema.SchemaFieldDataType; import datahub.protobuf.ProtobufUtils; +import java.util.stream.Collectors; import lombok.Builder; import lombok.Getter; -import java.util.stream.Collectors; - - @Getter public class ProtobufEnum extends ProtobufMessage { - private final EnumDescriptorProto enumProto; - - @Builder(builderMethodName = "enumBuilder") - public ProtobufEnum(FileDescriptorProto fileProto, - DescriptorProto messageProto, - EnumDescriptorProto enumProto) { - super(messageProto, null, fileProto); - this.enumProto = enumProto; - } - - @Override - public String name() { - return enumProto.getName(); - } - - @Override - public String fieldPathType() { - return "[type=enum]"; + private final EnumDescriptorProto enumProto; + + @Builder(builderMethodName = "enumBuilder") + public ProtobufEnum( + FileDescriptorProto fileProto, DescriptorProto messageProto, EnumDescriptorProto enumProto) { + super(messageProto, null, fileProto); + this.enumProto = enumProto; + } + + @Override + public String name() { + return enumProto.getName(); + } + + @Override + public String fieldPathType() { + return "[type=enum]"; + } + + @Override + public String nativeType() { + return "enum"; + } + + @Override + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())); + } + + @Override + public String comment() { + return messageLocations() + .filter( + loc -> + loc.getPathCount() > 3 + && loc.getPath(2) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER + && enumProto == messageProto().getEnumType(loc.getPath(3))) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } + + @Override + public String toString() { + return String.format("ProtobufEnum[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String nativeType() { - return "enum"; + if (o == null || getClass() != o.getClass()) { + return false; } - - @Override - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())); + if (!super.equals(o)) { + return false; } - @Override - public String comment() { - return messageLocations() - .filter(loc -> loc.getPathCount() > 3 - && loc.getPath(2) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER - && enumProto == messageProto().getEnumType(loc.getPath(3))) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); - } - - @Override - public String toString() { - return String.format("ProtobufEnum[%s]", fullName()); - } + ProtobufEnum that = (ProtobufEnum) o; - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - if (!super.equals(o)) { - return false; - } + return getEnumProto().equals(that.getEnumProto()); + } - ProtobufEnum that = (ProtobufEnum) o; - - return getEnumProto().equals(that.getEnumProto()); - } - - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + getEnumProto().hashCode(); - return result; - } + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + getEnumProto().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java index d890c373f1299..5bb41017488f3 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java @@ -18,152 +18,159 @@ import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Getter; - import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; @Builder(toBuilder = true) @Getter @AllArgsConstructor public class ProtobufField implements ProtobufElement { - private final ProtobufMessage protobufMessage; - private final FieldDescriptorProto fieldProto; - private final String nativeType; - private final String fieldPathType; - private final Boolean isMessageType; - private final SchemaFieldDataType schemaFieldDataType; - private final Boolean isNestedType; - - public OneofDescriptorProto oneOfProto() { - if (fieldProto.hasOneofIndex()) { - return protobufMessage.messageProto().getOneofDecl(fieldProto.getOneofIndex()); - } - return null; - } - - @Override - public FileDescriptorProto fileProto() { - return protobufMessage.fileProto(); - } - - @Override - public DescriptorProto messageProto() { - return protobufMessage.messageProto(); - } - - public String parentMessageName() { - return protobufMessage.fullName(); - } - - @Override - public String name() { - return fieldProto.getName(); - } - - @Override - public String fullName() { - return String.join(".", parentMessageName(), name()); - } - - public String getNativeType() { - return nativeType(); - } - - public int getNumber() { - return fieldProto.getNumber(); + private final ProtobufMessage protobufMessage; + private final FieldDescriptorProto fieldProto; + private final String nativeType; + private final String fieldPathType; + private final Boolean isMessageType; + private final SchemaFieldDataType schemaFieldDataType; + private final Boolean isNestedType; + + public OneofDescriptorProto oneOfProto() { + if (fieldProto.hasOneofIndex()) { + return protobufMessage.messageProto().getOneofDecl(fieldProto.getOneofIndex()); } - - @Override - public String nativeType() { - return Optional.ofNullable(nativeType).orElseGet(() -> { - if (fieldProto.getTypeName().isEmpty()) { + return null; + } + + @Override + public FileDescriptorProto fileProto() { + return protobufMessage.fileProto(); + } + + @Override + public DescriptorProto messageProto() { + return protobufMessage.messageProto(); + } + + public String parentMessageName() { + return protobufMessage.fullName(); + } + + @Override + public String name() { + return fieldProto.getName(); + } + + @Override + public String fullName() { + return String.join(".", parentMessageName(), name()); + } + + public String getNativeType() { + return nativeType(); + } + + public int getNumber() { + return fieldProto.getNumber(); + } + + @Override + public String nativeType() { + return Optional.ofNullable(nativeType) + .orElseGet( + () -> { + if (fieldProto.getTypeName().isEmpty()) { return fieldProto.getType().name().split("_")[1].toLowerCase(); - } else { + } else { return fieldProto.getTypeName().replaceFirst("^[.]", ""); - } - }); - } - - @Override - public String fieldPathType() { - return Optional.ofNullable(fieldPathType).orElseGet(() -> { - final String pathType; - - switch (fieldProto.getType()) { + } + }); + } + + @Override + public String fieldPathType() { + return Optional.ofNullable(fieldPathType) + .orElseGet( + () -> { + final String pathType; + + switch (fieldProto.getType()) { case TYPE_DOUBLE: - pathType = "double"; - break; + pathType = "double"; + break; case TYPE_FLOAT: - pathType = "float"; - break; + pathType = "float"; + break; case TYPE_SFIXED64: case TYPE_FIXED64: case TYPE_UINT64: case TYPE_INT64: case TYPE_SINT64: - pathType = "long"; - break; + pathType = "long"; + break; case TYPE_FIXED32: case TYPE_SFIXED32: case TYPE_INT32: case TYPE_UINT32: case TYPE_SINT32: - pathType = "int"; - break; + pathType = "int"; + break; case TYPE_BYTES: - pathType = "bytes"; - break; + pathType = "bytes"; + break; case TYPE_ENUM: - pathType = "enum"; - break; + pathType = "enum"; + break; case TYPE_BOOL: - pathType = "boolean"; - break; + pathType = "boolean"; + break; case TYPE_STRING: - pathType = "string"; - break; + pathType = "string"; + break; case TYPE_GROUP: case TYPE_MESSAGE: - pathType = nativeType().replace(".", "_"); - break; + pathType = nativeType().replace(".", "_"); + break; default: - throw new IllegalStateException(String.format("Unexpected FieldDescriptorProto => FieldPathType %s", fieldProto.getType())); - } + throw new IllegalStateException( + String.format( + "Unexpected FieldDescriptorProto => FieldPathType %s", + fieldProto.getType())); + } - StringArray fieldPath = new StringArray(); + StringArray fieldPath = new StringArray(); - if (schemaFieldDataType().getType().isArrayType()) { + if (schemaFieldDataType().getType().isArrayType()) { fieldPath.add("[type=array]"); - } + } - fieldPath.add(String.format("[type=%s]", pathType)); + fieldPath.add(String.format("[type=%s]", pathType)); - return String.join(".", fieldPath); - }); - } + return String.join(".", fieldPath); + }); + } - public boolean isMessage() { - return Optional.ofNullable(isMessageType).orElseGet(() -> - fieldProto.getType().equals(FieldDescriptorProto.Type.TYPE_MESSAGE)); - } + public boolean isMessage() { + return Optional.ofNullable(isMessageType) + .orElseGet(() -> fieldProto.getType().equals(FieldDescriptorProto.Type.TYPE_MESSAGE)); + } - public int sortWeight() { - return messageProto().getFieldList().indexOf(fieldProto) + 1; - } + public int sortWeight() { + return messageProto().getFieldList().indexOf(fieldProto) + 1; + } - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return Optional.ofNullable(schemaFieldDataType).orElseGet(() -> { - final SchemaFieldDataType.Type fieldType; + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return Optional.ofNullable(schemaFieldDataType) + .orElseGet( + () -> { + final SchemaFieldDataType.Type fieldType; - switch (fieldProto.getType()) { + switch (fieldProto.getType()) { case TYPE_DOUBLE: case TYPE_FLOAT: case TYPE_INT64: @@ -172,139 +179,150 @@ public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { case TYPE_UINT32: case TYPE_SINT32: case TYPE_SINT64: - fieldType = SchemaFieldDataType.Type.create(new NumberType()); - break; + fieldType = SchemaFieldDataType.Type.create(new NumberType()); + break; case TYPE_GROUP: case TYPE_MESSAGE: - fieldType = SchemaFieldDataType.Type.create(new RecordType()); - break; + fieldType = SchemaFieldDataType.Type.create(new RecordType()); + break; case TYPE_BYTES: - fieldType = SchemaFieldDataType.Type.create(new BytesType()); - break; + fieldType = SchemaFieldDataType.Type.create(new BytesType()); + break; case TYPE_ENUM: - fieldType = SchemaFieldDataType.Type.create(new EnumType()); - break; + fieldType = SchemaFieldDataType.Type.create(new EnumType()); + break; case TYPE_BOOL: - fieldType = SchemaFieldDataType.Type.create(new BooleanType()); - break; + fieldType = SchemaFieldDataType.Type.create(new BooleanType()); + break; case TYPE_STRING: - fieldType = SchemaFieldDataType.Type.create(new StringType()); - break; + fieldType = SchemaFieldDataType.Type.create(new StringType()); + break; case TYPE_FIXED64: case TYPE_FIXED32: case TYPE_SFIXED32: case TYPE_SFIXED64: - fieldType = SchemaFieldDataType.Type.create(new FixedType()); - break; + fieldType = SchemaFieldDataType.Type.create(new FixedType()); + break; default: - throw new IllegalStateException(String.format("Unexpected FieldDescriptorProto => SchemaFieldDataType: %s", fieldProto.getType())); - } - - if (fieldProto.getLabel().equals(FieldDescriptorProto.Label.LABEL_REPEATED)) { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType() - .setNestedType(new StringArray()))); - } - - return new SchemaFieldDataType().setType(fieldType); - }); - } - - @Override - public Stream<SourceCodeInfo.Location> messageLocations() { - List<SourceCodeInfo.Location> fileLocations = fileProto().getSourceCodeInfo().getLocationList(); - return fileLocations.stream() - .filter(loc -> loc.getPathCount() > 1 - && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER); + throw new IllegalStateException( + String.format( + "Unexpected FieldDescriptorProto => SchemaFieldDataType: %s", + fieldProto.getType())); + } + + if (fieldProto.getLabel().equals(FieldDescriptorProto.Label.LABEL_REPEATED)) { + return new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray()))); + } + + return new SchemaFieldDataType().setType(fieldType); + }); + } + + @Override + public Stream<SourceCodeInfo.Location> messageLocations() { + List<SourceCodeInfo.Location> fileLocations = fileProto().getSourceCodeInfo().getLocationList(); + return fileLocations.stream() + .filter( + loc -> + loc.getPathCount() > 1 + && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER); + } + + @Override + public String comment() { + return messageLocations() + .filter(location -> location.getPathCount() > 3) + .filter( + location -> + !ProtobufUtils.collapseLocationComments(location).isEmpty() + && !isEnumType(location.getPathList())) + .filter( + location -> { + List<Integer> pathList = location.getPathList(); + DescriptorProto messageType = fileProto().getMessageType(pathList.get(1)); + + if (!isNestedType + && location.getPath(2) == DescriptorProto.FIELD_FIELD_NUMBER + && fieldProto == messageType.getField(location.getPath(3))) { + return true; + } else if (isNestedType + && location.getPath(2) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER + && fieldProto == getNestedTypeFields(pathList, messageType)) { + return true; + } + return false; + }) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } + + private FieldDescriptorProto getNestedTypeFields( + List<Integer> pathList, DescriptorProto messageType) { + int pathSize = pathList.size(); + List<Integer> nestedValues = new ArrayList<>(pathSize); + + for (int index = 0; index < pathSize; index++) { + if (index > 1 + && index % 2 == 0 + && pathList.get(index) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER) { + nestedValues.add(pathList.get(index + 1)); + } } - @Override - public String comment() { - return messageLocations() - .filter(location -> location.getPathCount() > 3) - .filter(location -> !ProtobufUtils.collapseLocationComments(location).isEmpty() - && !isEnumType(location.getPathList())) - .filter(location -> { - List<Integer> pathList = location.getPathList(); - DescriptorProto messageType = fileProto().getMessageType(pathList.get(1)); - - if (!isNestedType - && location.getPath(2) == DescriptorProto.FIELD_FIELD_NUMBER - && fieldProto == messageType.getField(location.getPath(3))) { - return true; - } else if (isNestedType - && location.getPath(2) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER - && fieldProto == getNestedTypeFields(pathList, messageType)) { - return true; - } - return false; - }) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); + for (Integer value : nestedValues) { + messageType = messageType.getNestedType(value); } - private FieldDescriptorProto getNestedTypeFields(List<Integer> pathList, DescriptorProto messageType) { - int pathSize = pathList.size(); - List<Integer> nestedValues = new ArrayList<>(pathSize); - - for (int index = 0; index < pathSize; index++) { - if (index > 1 - && index % 2 == 0 - && pathList.get(index) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER) { - nestedValues.add(pathList.get(index + 1)); - } - } - - for (Integer value : nestedValues) { - messageType = messageType.getNestedType(value); - } - - if (pathList.get(pathSize - 2) == DescriptorProto.FIELD_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_RANGE_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_NAME_FIELD_NUMBER) { - return messageType.getField(pathList.get(pathSize - 1)); - } else { - return null; - } + if (pathList.get(pathSize - 2) == DescriptorProto.FIELD_FIELD_NUMBER + && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_RANGE_FIELD_NUMBER + && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_NAME_FIELD_NUMBER) { + return messageType.getField(pathList.get(pathSize - 1)); + } else { + return null; } - - private boolean isEnumType(List<Integer> pathList) { - for (int index = 0; index < pathList.size(); index++) { - if (index > 1 - && index % 2 == 0 - && pathList.get(index) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER) { - return true; - } - } - return false; + } + + private boolean isEnumType(List<Integer> pathList) { + for (int index = 0; index < pathList.size(); index++) { + if (index > 1 + && index % 2 == 0 + && pathList.get(index) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER) { + return true; + } } - - @Override - public <T> Stream<T> accept(ProtobufModelVisitor<T> visitor, VisitContext context) { - return visitor.visitField(this, context); + return false; + } + + @Override + public <T> Stream<T> accept(ProtobufModelVisitor<T> visitor, VisitContext context) { + return visitor.visitField(this, context); + } + + @Override + public String toString() { + return String.format("ProtobufField[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return String.format("ProtobufField[%s]", fullName()); + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } + ProtobufElement that = (ProtobufElement) o; - ProtobufElement that = (ProtobufElement) o; + return fullName().equals(that.fullName()); + } - return fullName().equals(that.fullName()); - } - - @Override - public int hashCode() { - return fullName().hashCode(); - } + @Override + public int hashCode() { + return fullName().hashCode(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java index ae2319af85988..2f8c885de0e96 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java @@ -6,10 +6,6 @@ import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import org.jgrapht.GraphPath; -import org.jgrapht.alg.shortestpath.AllDirectedPaths; -import org.jgrapht.graph.DefaultDirectedGraph; - import java.util.Collection; import java.util.HashSet; import java.util.List; @@ -18,374 +14,476 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - +import org.jgrapht.GraphPath; +import org.jgrapht.alg.shortestpath.AllDirectedPaths; +import org.jgrapht.graph.DefaultDirectedGraph; public class ProtobufGraph extends DefaultDirectedGraph<ProtobufElement, FieldTypeEdge> { - private final transient ProtobufMessage rootProtobufMessage; - private final transient AllDirectedPaths<ProtobufElement, FieldTypeEdge> directedPaths; - private final transient ExtensionRegistry registry; - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) throws InvalidProtocolBufferException { - this(fileSet, null, null, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName) throws InvalidProtocolBufferException { - this(fileSet, messageName, null, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName, String relativeFilename) throws InvalidProtocolBufferException { - this(fileSet, messageName, relativeFilename, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName, String filename, - boolean flattenGoogleWrapped) throws InvalidProtocolBufferException { - super(FieldTypeEdge.class); - this.registry = ProtobufUtils.buildRegistry(fileSet); - DescriptorProtos.FileDescriptorSet fileSetExtended = DescriptorProtos.FileDescriptorSet - .parseFrom(fileSet.toByteArray(), this.registry); - buildProtobufGraph(fileSetExtended); - if (flattenGoogleWrapped) { - flattenGoogleWrapped(); - } - - if (messageName != null) { - this.rootProtobufMessage = findMessage(messageName); - } else { - DescriptorProtos.FileDescriptorProto lastFile = fileSetExtended.getFileList() - .stream().filter(f -> filename != null && filename.endsWith(f.getName())) - .findFirst().orElse(fileSetExtended.getFile(fileSetExtended.getFileCount() - 1)); - - if (filename != null) { - this.rootProtobufMessage = autodetectRootMessage(lastFile) - .orElse(autodetectSingleMessage(lastFile) - .orElse(autodetectLocalFileRootMessage(lastFile) - .orElseThrow(() -> new IllegalArgumentException("Cannot autodetect protobuf Message.")))); - } else { - this.rootProtobufMessage = autodetectRootMessage(lastFile) - .orElseThrow(() -> new IllegalArgumentException("Cannot autodetect root protobuf Message.")); - } - } - - this.directedPaths = new AllDirectedPaths<>(this); - } - - public List<GraphPath<ProtobufElement, FieldTypeEdge>> getAllPaths(ProtobufElement a, ProtobufElement b) { - return directedPaths.getAllPaths(a, b, true, null); - } - - public ExtensionRegistry getRegistry() { - return registry; - } - - public String getFullName() { - return rootProtobufMessage.fullName(); - } - - public int getMajorVersion() { - return rootProtobufMessage.majorVersion(); - } - - public String getComment() { - return rootProtobufMessage.comment(); - } - - public ProtobufMessage root() { - return rootProtobufMessage; - } - - - public <T, V extends ProtobufModelVisitor<T>> Stream<T> accept(VisitContext.VisitContextBuilder contextBuilder, Collection<V> visitors) { - VisitContext context = Optional.ofNullable(contextBuilder).orElse(VisitContext.builder()).graph(this).build(); - return accept(context, visitors); - } - - public <T, V extends ProtobufModelVisitor<T>> Stream<T> accept(VisitContext context, Collection<V> visitors) { - return Stream.concat( - visitors.stream().flatMap(visitor -> visitor.visitGraph(context)), - vertexSet().stream().flatMap(vertex -> visitors.stream().flatMap(visitor -> vertex.accept(visitor, context))) - ); - } - - protected Optional<ProtobufMessage> autodetectRootMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && incomingEdgesOf(v).isEmpty() - && outgoingEdgesOf(v).stream() - .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) - .allMatch(e -> e.getEdgeSource().equals(v))) // all the incoming edges on the child vertices should be self - .map(v -> (ProtobufMessage) v) - .findFirst(); + private final transient ProtobufMessage rootProtobufMessage; + private final transient AllDirectedPaths<ProtobufElement, FieldTypeEdge> directedPaths; + private final transient ExtensionRegistry registry; + + public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) + throws InvalidProtocolBufferException { + this(fileSet, null, null, true); + } + + public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName) + throws InvalidProtocolBufferException { + this(fileSet, messageName, null, true); + } + + public ProtobufGraph( + DescriptorProtos.FileDescriptorSet fileSet, String messageName, String relativeFilename) + throws InvalidProtocolBufferException { + this(fileSet, messageName, relativeFilename, true); + } + + public ProtobufGraph( + DescriptorProtos.FileDescriptorSet fileSet, + String messageName, + String filename, + boolean flattenGoogleWrapped) + throws InvalidProtocolBufferException { + super(FieldTypeEdge.class); + this.registry = ProtobufUtils.buildRegistry(fileSet); + DescriptorProtos.FileDescriptorSet fileSetExtended = + DescriptorProtos.FileDescriptorSet.parseFrom(fileSet.toByteArray(), this.registry); + buildProtobufGraph(fileSetExtended); + if (flattenGoogleWrapped) { + flattenGoogleWrapped(); } - protected Optional<ProtobufMessage> autodetectSingleMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && targetFile.getMessageTypeCount() == 1) - .map(v -> (ProtobufMessage) v) - .findFirst(); + if (messageName != null) { + this.rootProtobufMessage = findMessage(messageName); + } else { + DescriptorProtos.FileDescriptorProto lastFile = + fileSetExtended.getFileList().stream() + .filter(f -> filename != null && filename.endsWith(f.getName())) + .findFirst() + .orElse(fileSetExtended.getFile(fileSetExtended.getFileCount() - 1)); + + if (filename != null) { + this.rootProtobufMessage = + autodetectRootMessage(lastFile) + .orElse( + autodetectSingleMessage(lastFile) + .orElse( + autodetectLocalFileRootMessage(lastFile) + .orElseThrow( + () -> + new IllegalArgumentException( + "Cannot autodetect protobuf Message.")))); + } else { + this.rootProtobufMessage = + autodetectRootMessage(lastFile) + .orElseThrow( + () -> new IllegalArgumentException("Cannot autodetect root protobuf Message.")); + } } - protected Optional<ProtobufMessage> autodetectLocalFileRootMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && incomingEdgesOf(v).stream().noneMatch(e -> e.getEdgeSource().fileProto().equals(targetFile)) - && outgoingEdgesOf(v).stream() // all the incoming edges on the child vertices should be self within target file - .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) - .allMatch(e -> !e.getEdgeSource().fileProto().equals(targetFile) || e.getEdgeSource().equals(v))) - .map(v -> (ProtobufMessage) v) - .findFirst(); - } + this.directedPaths = new AllDirectedPaths<>(this); + } + + public List<GraphPath<ProtobufElement, FieldTypeEdge>> getAllPaths( + ProtobufElement a, ProtobufElement b) { + return directedPaths.getAllPaths(a, b, true, null); + } + + public ExtensionRegistry getRegistry() { + return registry; + } + + public String getFullName() { + return rootProtobufMessage.fullName(); + } + + public int getMajorVersion() { + return rootProtobufMessage.majorVersion(); + } + + public String getComment() { + return rootProtobufMessage.comment(); + } + + public ProtobufMessage root() { + return rootProtobufMessage; + } + + public <T, V extends ProtobufModelVisitor<T>> Stream<T> accept( + VisitContext.VisitContextBuilder contextBuilder, Collection<V> visitors) { + VisitContext context = + Optional.ofNullable(contextBuilder).orElse(VisitContext.builder()).graph(this).build(); + return accept(context, visitors); + } + + public <T, V extends ProtobufModelVisitor<T>> Stream<T> accept( + VisitContext context, Collection<V> visitors) { + return Stream.concat( + visitors.stream().flatMap(visitor -> visitor.visitGraph(context)), + vertexSet().stream() + .flatMap( + vertex -> visitors.stream().flatMap(visitor -> vertex.accept(visitor, context)))); + } + + protected Optional<ProtobufMessage> autodetectRootMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && incomingEdgesOf(v).isEmpty() + && outgoingEdgesOf(v).stream() + .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) + .allMatch( + e -> + e.getEdgeSource() + .equals( + v))) // all the incoming edges on the child vertices should + // be self + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + protected Optional<ProtobufMessage> autodetectSingleMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && targetFile.getMessageTypeCount() == 1) + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + protected Optional<ProtobufMessage> autodetectLocalFileRootMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && incomingEdgesOf(v).stream() + .noneMatch(e -> e.getEdgeSource().fileProto().equals(targetFile)) + && outgoingEdgesOf(v) + .stream() // all the incoming edges on the child vertices should be self + // within target file + .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) + .allMatch( + e -> + !e.getEdgeSource().fileProto().equals(targetFile) + || e.getEdgeSource().equals(v))) + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + public ProtobufMessage findMessage(String messageName) throws IllegalArgumentException { + return (ProtobufMessage) + vertexSet().stream() + .filter(v -> v instanceof ProtobufMessage && messageName.equals(v.fullName())) + .findFirst() + .orElseThrow( + () -> + new IllegalArgumentException( + String.format("Cannot find protobuf Message %s", messageName))); + } + + private void buildProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) { + // Attach non-nested fields to messages + fileSet + .getFileList() + .forEach( + fileProto -> + fileProto + .getMessageTypeList() + .forEach( + messageProto -> { + ProtobufMessage messageVertex = + ProtobufMessage.builder() + .fileProto(fileProto) + .messageProto(messageProto) + .build(); + addVertex(messageVertex); + + // Handle nested fields + addNestedMessage(fileProto, messageProto); + + // Add enum types + addEnum(fileProto, messageProto); + + // handle normal fields and oneofs + messageProto + .getFieldList() + .forEach( + fieldProto -> { + ProtobufField fieldVertex = + ProtobufField.builder() + .protobufMessage(messageVertex) + .fieldProto(fieldProto) + .isNestedType(false) + .build(); + + // Add field vertex + addVertex(fieldVertex); + + if (fieldVertex.oneOfProto() != null) { + // Handle oneOf + addOneOf(messageVertex, fieldVertex); + } else { + // Add schema to field edge + linkMessageToField(messageVertex, fieldVertex); + } + }); + })); + + // attach field paths to root message + Map<String, List<ProtobufField>> fieldMap = + vertexSet().stream() + .filter( + v -> + v instanceof ProtobufField + && incomingEdgesOf(v).stream() + .noneMatch(e -> e.getEdgeSource() instanceof ProtobufOneOfField)) + .map(v -> (ProtobufField) v) + .collect(Collectors.groupingBy(ProtobufField::parentMessageName)); + + edgeSet().stream().filter(FieldTypeEdge::isMessageType).collect(Collectors.toSet()).stream() + .map(e -> (ProtobufField) e.getEdgeTarget()) + .forEach(f -> attachNestedMessageFields(fieldMap, f)); + } + + private void addEnum( + DescriptorProtos.FileDescriptorProto fileProto, + DescriptorProtos.DescriptorProto messageProto) { + messageProto + .getEnumTypeList() + .forEach( + enumProto -> { + ProtobufEnum enumVertex = + ProtobufEnum.enumBuilder() + .fileProto(fileProto) + .messageProto(messageProto) + .enumProto(enumProto) + .build(); + addVertex(enumVertex); + }); + } - public ProtobufMessage findMessage(String messageName) throws IllegalArgumentException { - return (ProtobufMessage) vertexSet().stream() - .filter(v -> v instanceof ProtobufMessage && messageName.equals(v.fullName())) - .findFirst().orElseThrow(() -> new IllegalArgumentException(String.format("Cannot find protobuf Message %s", messageName))); + private void addNestedMessage( + DescriptorProtos.FileDescriptorProto fileProto, + DescriptorProtos.DescriptorProto messageProto) { + if (messageProto.getNestedTypeCount() < 1) { + return; } - private void buildProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) { - // Attach non-nested fields to messages - fileSet.getFileList().forEach(fileProto -> - fileProto.getMessageTypeList().forEach(messageProto -> { - - ProtobufMessage messageVertex = ProtobufMessage.builder() - .fileProto(fileProto) - .messageProto(messageProto) - .build(); - addVertex(messageVertex); - - // Handle nested fields - addNestedMessage(fileProto, messageProto); - - // Add enum types - addEnum(fileProto, messageProto); - - // handle normal fields and oneofs - messageProto.getFieldList().forEach(fieldProto -> { - ProtobufField fieldVertex = ProtobufField.builder() - .protobufMessage(messageVertex) - .fieldProto(fieldProto) - .isNestedType(false) + messageProto + .getNestedTypeList() + .forEach( + nestedMessageProto -> { + ProtobufMessage nestedMessageVertex = + ProtobufMessage.builder() + .fileProto(fileProto) + .parentMessageProto(messageProto) + .messageProto(nestedMessageProto) + .build(); + addVertex(nestedMessageVertex); + + nestedMessageProto + .getFieldList() + .forEach( + nestedFieldProto -> { + ProtobufField field = + ProtobufField.builder() + .protobufMessage(nestedMessageVertex) + .fieldProto(nestedFieldProto) + .isNestedType(true) .build(); // Add field vertex - addVertex(fieldVertex); - - if (fieldVertex.oneOfProto() != null) { - // Handle oneOf - addOneOf(messageVertex, fieldVertex); - } else { - // Add schema to field edge - linkMessageToField(messageVertex, fieldVertex); + addVertex(field); + + // Add schema to field edge + if (!field.isMessage()) { + FieldTypeEdge.builder() + .edgeSource(nestedMessageVertex) + .edgeTarget(field) + .type(field.fieldPathType()) + .build() + .inGraph(this); } - }); - }) - ); - - // attach field paths to root message - Map<String, List<ProtobufField>> fieldMap = vertexSet().stream() - .filter(v -> v instanceof ProtobufField && incomingEdgesOf(v).stream().noneMatch(e -> e.getEdgeSource() instanceof ProtobufOneOfField)) - .map(v -> (ProtobufField) v) - .collect(Collectors.groupingBy(ProtobufField::parentMessageName)); - - edgeSet().stream().filter(FieldTypeEdge::isMessageType).collect(Collectors.toSet()) - .stream().map(e -> (ProtobufField) e.getEdgeTarget()) - .forEach(f -> attachNestedMessageFields(fieldMap, f)); - } - + }); - private void addEnum(DescriptorProtos.FileDescriptorProto fileProto, DescriptorProtos.DescriptorProto messageProto) { - messageProto.getEnumTypeList().forEach(enumProto -> { - ProtobufEnum enumVertex = ProtobufEnum.enumBuilder() - .fileProto(fileProto) - .messageProto(messageProto) - .enumProto(enumProto) - .build(); - addVertex(enumVertex); - }); - } - - private void addNestedMessage(DescriptorProtos.FileDescriptorProto fileProto, DescriptorProtos.DescriptorProto messageProto) { - if (messageProto.getNestedTypeCount() < 1) { - return; - } - - messageProto.getNestedTypeList().forEach(nestedMessageProto -> { - ProtobufMessage nestedMessageVertex = ProtobufMessage.builder() - .fileProto(fileProto) - .parentMessageProto(messageProto) - .messageProto(nestedMessageProto) - .build(); - addVertex(nestedMessageVertex); - - nestedMessageProto.getFieldList().forEach(nestedFieldProto -> { - ProtobufField field = ProtobufField.builder() - .protobufMessage(nestedMessageVertex) - .fieldProto(nestedFieldProto) - .isNestedType(true) - .build(); - - // Add field vertex - addVertex(field); - - // Add schema to field edge - if (!field.isMessage()) { - FieldTypeEdge.builder() - .edgeSource(nestedMessageVertex) - .edgeTarget(field) - .type(field.fieldPathType()) - .build().inGraph(this); - } + addNestedMessage(fileProto, nestedMessageProto); + }); + } + + private Stream<ProtobufField> addOneOf(ProtobufMessage messageVertex, ProtobufField fieldVertex) { + // Handle oneOf + ProtobufField oneOfVertex = + ProtobufOneOfField.oneOfBuilder() + .protobufMessage(messageVertex) + .fieldProto(fieldVertex.getFieldProto()) + .build(); + addVertex(oneOfVertex); + + FieldTypeEdge.builder() + .edgeSource(messageVertex) + .edgeTarget(oneOfVertex) + .type(oneOfVertex.fieldPathType()) + .build() + .inGraph(this); + + // Add oneOf field to field edge + FieldTypeEdge.builder() + .edgeSource(oneOfVertex) + .edgeTarget(fieldVertex) + .type(fieldVertex.fieldPathType()) + .isMessageType(fieldVertex.isMessage()) + .build() + .inGraph(this); + + return Stream.of(oneOfVertex); + } + + private Stream<ProtobufField> linkMessageToField( + ProtobufMessage messageVertex, ProtobufField fieldVertex) { + FieldTypeEdge.builder() + .edgeSource(messageVertex) + .edgeTarget(fieldVertex) + .type(fieldVertex.fieldPathType()) + .isMessageType(fieldVertex.isMessage()) + .build() + .inGraph(this); + + return Stream.of(fieldVertex); + } + + private void attachNestedMessageFields( + Map<String, List<ProtobufField>> fieldMap, ProtobufField messageField) { + fieldMap + .getOrDefault(messageField.nativeType(), List.of()) + .forEach( + target -> { + FieldTypeEdge.builder() + .edgeSource(messageField) + .edgeTarget(target) + .type(target.fieldPathType()) + .isMessageType(target.isMessage()) + .build() + .inGraph(this); + }); + } + + private static final Set<String> GOOGLE_WRAPPERS = + Set.of("google/protobuf/wrappers.proto", "google/protobuf/timestamp.proto"); + + private void flattenGoogleWrapped() { + HashSet<ProtobufElement> removeVertices = new HashSet<>(); + HashSet<FieldTypeEdge> removeEdges = new HashSet<>(); + HashSet<ProtobufElement> addVertices = new HashSet<>(); + HashSet<FieldTypeEdge> addEdges = new HashSet<>(); + + Set<ProtobufElement> googleWrapped = + vertexSet().stream() + .filter( + v -> + v instanceof ProtobufMessage + && GOOGLE_WRAPPERS.contains(v.fileProto().getName())) + .collect(Collectors.toSet()); + removeVertices.addAll(googleWrapped); + + Set<ProtobufField> wrappedPrimitiveFields = + googleWrapped.stream() + .flatMap(wrapped -> outgoingEdgesOf(wrapped).stream()) + .map(FieldTypeEdge::getEdgeTarget) + .map(ProtobufField.class::cast) + .collect(Collectors.toSet()); + removeVertices.addAll(wrappedPrimitiveFields); + + wrappedPrimitiveFields.stream() + .filter(fld -> fld.getNumber() == 1) + .forEach( + primitiveField -> { + // remove incoming old edges to primitive + removeEdges.addAll(incomingEdgesOf(primitiveField)); + + Set<ProtobufField> originatingFields = + incomingEdgesOf(primitiveField).stream() + .map(FieldTypeEdge::getEdgeSource) + .filter(edgeSource -> !googleWrapped.contains(edgeSource)) + .map(ProtobufField.class::cast) + .collect(Collectors.toSet()); + removeVertices.addAll(originatingFields); + + originatingFields.forEach( + originatingField -> { + // Replacement Field + ProtobufElement fieldVertex = + originatingField.toBuilder() + .fieldPathType(primitiveField.fieldPathType()) + .schemaFieldDataType(primitiveField.schemaFieldDataType()) + .isMessageType(false) + .build(); + addVertices.add(fieldVertex); + + // link source field parent directly to primitive + Set<FieldTypeEdge> incomingEdges = incomingEdgesOf(originatingField); + removeEdges.addAll(incomingEdgesOf(originatingField)); + addEdges.addAll( + incomingEdges.stream() + .map( + oldEdge -> + // Replace old edge with new edge to primitive + FieldTypeEdge.builder() + .edgeSource(oldEdge.getEdgeSource()) + .edgeTarget(fieldVertex) + .type(primitiveField.fieldPathType()) + .isMessageType(false) // known primitive + .build()) + .collect(Collectors.toSet())); + }); + + // remove old fields + removeVertices.addAll(originatingFields); }); - addNestedMessage(fileProto, nestedMessageProto); - }); - } - - private Stream<ProtobufField> addOneOf(ProtobufMessage messageVertex, ProtobufField fieldVertex) { - // Handle oneOf - ProtobufField oneOfVertex = ProtobufOneOfField.oneOfBuilder() - .protobufMessage(messageVertex) - .fieldProto(fieldVertex.getFieldProto()) - .build(); - addVertex(oneOfVertex); - - FieldTypeEdge.builder() - .edgeSource(messageVertex) - .edgeTarget(oneOfVertex) - .type(oneOfVertex.fieldPathType()) - .build().inGraph(this); - - // Add oneOf field to field edge - FieldTypeEdge.builder() - .edgeSource(oneOfVertex) - .edgeTarget(fieldVertex) - .type(fieldVertex.fieldPathType()) - .isMessageType(fieldVertex.isMessage()) - .build().inGraph(this); - - return Stream.of(oneOfVertex); + // Remove edges + removeAllEdges(removeEdges); + // Remove vertices + removeAllVertices(removeVertices); + // Add vertices + addVertices.forEach(this::addVertex); + // Add edges + addEdges.forEach(e -> e.inGraph(this)); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - private Stream<ProtobufField> linkMessageToField(ProtobufMessage messageVertex, ProtobufField fieldVertex) { - FieldTypeEdge.builder() - .edgeSource(messageVertex) - .edgeTarget(fieldVertex) - .type(fieldVertex.fieldPathType()) - .isMessageType(fieldVertex.isMessage()) - .build().inGraph(this); - - return Stream.of(fieldVertex); + if (o == null || getClass() != o.getClass()) { + return false; } - - private void attachNestedMessageFields(Map<String, List<ProtobufField>> fieldMap, ProtobufField messageField) { - fieldMap.getOrDefault(messageField.nativeType(), List.of()).forEach(target -> { - FieldTypeEdge.builder() - .edgeSource(messageField) - .edgeTarget(target) - .type(target.fieldPathType()) - .isMessageType(target.isMessage()) - .build().inGraph(this); - }); + if (!super.equals(o)) { + return false; } - private static final Set<String> GOOGLE_WRAPPERS = Set.of("google/protobuf/wrappers.proto", "google/protobuf/timestamp.proto"); - private void flattenGoogleWrapped() { - HashSet<ProtobufElement> removeVertices = new HashSet<>(); - HashSet<FieldTypeEdge> removeEdges = new HashSet<>(); - HashSet<ProtobufElement> addVertices = new HashSet<>(); - HashSet<FieldTypeEdge> addEdges = new HashSet<>(); + ProtobufGraph that = (ProtobufGraph) o; - Set<ProtobufElement> googleWrapped = vertexSet().stream() - .filter(v -> v instanceof ProtobufMessage - && GOOGLE_WRAPPERS.contains(v.fileProto().getName())) - .collect(Collectors.toSet()); - removeVertices.addAll(googleWrapped); - - Set<ProtobufField> wrappedPrimitiveFields = googleWrapped.stream() - .flatMap(wrapped -> outgoingEdgesOf(wrapped).stream()) - .map(FieldTypeEdge::getEdgeTarget) - .map(ProtobufField.class::cast) - .collect(Collectors.toSet()); - removeVertices.addAll(wrappedPrimitiveFields); - - wrappedPrimitiveFields.stream().filter(fld -> fld.getNumber() == 1).forEach(primitiveField -> { - // remove incoming old edges to primitive - removeEdges.addAll(incomingEdgesOf(primitiveField)); - - Set<ProtobufField> originatingFields = incomingEdgesOf(primitiveField).stream() - .map(FieldTypeEdge::getEdgeSource) - .filter(edgeSource -> !googleWrapped.contains(edgeSource)) - .map(ProtobufField.class::cast) - .collect(Collectors.toSet()); - removeVertices.addAll(originatingFields); - - originatingFields.forEach(originatingField -> { - // Replacement Field - ProtobufElement fieldVertex = originatingField.toBuilder() - .fieldPathType(primitiveField.fieldPathType()) - .schemaFieldDataType(primitiveField.schemaFieldDataType()) - .isMessageType(false) - .build(); - addVertices.add(fieldVertex); - - // link source field parent directly to primitive - Set<FieldTypeEdge> incomingEdges = incomingEdgesOf(originatingField); - removeEdges.addAll(incomingEdgesOf(originatingField)); - addEdges.addAll(incomingEdges.stream().map(oldEdge -> - // Replace old edge with new edge to primitive - FieldTypeEdge.builder() - .edgeSource(oldEdge.getEdgeSource()) - .edgeTarget(fieldVertex) - .type(primitiveField.fieldPathType()) - .isMessageType(false) // known primitive - .build()).collect(Collectors.toSet())); - }); - - // remove old fields - removeVertices.addAll(originatingFields); - }); - - // Remove edges - removeAllEdges(removeEdges); - // Remove vertices - removeAllVertices(removeVertices); - // Add vertices - addVertices.forEach(this::addVertex); - // Add edges - addEdges.forEach(e -> e.inGraph(this)); - } + return rootProtobufMessage.equals(that.rootProtobufMessage); + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - if (!super.equals(o)) { - return false; - } - - ProtobufGraph that = (ProtobufGraph) o; - - return rootProtobufMessage.equals(that.rootProtobufMessage); - } + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + rootProtobufMessage.hashCode(); + return result; + } - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + rootProtobufMessage.hashCode(); - return result; - } - - public String getHash() { - return String.valueOf(super.hashCode()); - } + public String getHash() { + return String.valueOf(super.hashCode()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java index 6b46b11231623..62f02a47a6c86 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java @@ -4,119 +4,117 @@ import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.MapType; import com.linkedin.schema.RecordType; -import lombok.AllArgsConstructor; -import lombok.Builder; -import java.util.Arrays; -import java.util.stream.Stream; import com.linkedin.schema.SchemaFieldDataType; - import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - - +import java.util.Arrays; +import java.util.stream.Stream; +import lombok.AllArgsConstructor; +import lombok.Builder; @Builder @AllArgsConstructor public class ProtobufMessage implements ProtobufElement { - private final DescriptorProto messageProto; - private final DescriptorProto parentMessageProto; - private final FileDescriptorProto fileProto; - - @Override - public String name() { - return messageProto.getName(); - } - - @Override - public String fullName() { - if (parentMessageProto != null) { - return String.join(".", fileProto.getPackage(), parentMessageProto.getName(), name()); - } - return String.join(".", fileProto.getPackage(), name()); + private final DescriptorProto messageProto; + private final DescriptorProto parentMessageProto; + private final FileDescriptorProto fileProto; + + @Override + public String name() { + return messageProto.getName(); + } + + @Override + public String fullName() { + if (parentMessageProto != null) { + return String.join(".", fileProto.getPackage(), parentMessageProto.getName(), name()); } - - @Override - public String nativeType() { - return fullName(); - } - - @Override - public String fieldPathType() { - return String.format("[type=%s]", nativeType().replace(".", "_")); - } - - @Override - public FileDescriptorProto fileProto() { - return fileProto; - } - - @Override - public DescriptorProto messageProto() { - return messageProto; + return String.join(".", fileProto.getPackage(), name()); + } + + @Override + public String nativeType() { + return fullName(); + } + + @Override + public String fieldPathType() { + return String.format("[type=%s]", nativeType().replace(".", "_")); + } + + @Override + public FileDescriptorProto fileProto() { + return fileProto; + } + + @Override + public DescriptorProto messageProto() { + return messageProto; + } + + public SchemaFieldDataType schemaFieldDataType() { + if (parentMessageProto != null && messageProto.getName().equals("MapFieldEntry")) { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())); } - - public SchemaFieldDataType schemaFieldDataType() { - if (parentMessageProto != null && messageProto.getName().equals("MapFieldEntry")) { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())); - } - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())); + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())); + } + + public int majorVersion() { + return Integer.parseInt( + Arrays.stream(fileProto.getName().split("/")) + .filter(p -> p.matches("^v[0-9]+$")) + .findFirst() + .map(p -> p.replace("v", "")) + .orElse("1")); + } + + @Override + public String comment() { + return messageLocations().map(ProtobufUtils::collapseLocationComments).findFirst().orElse(""); + } + + @Override + public <T> Stream<T> accept(ProtobufModelVisitor<T> visitor, VisitContext context) { + return visitor.visitMessage(this, context); + } + + @Override + public String toString() { + return String.format("ProtobufMessage[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - public int majorVersion() { - return Integer.parseInt(Arrays.stream(fileProto.getName().split("/")) - .filter(p -> p.matches("^v[0-9]+$")) - .findFirst() - .map(p -> p.replace("v", "")) - .orElse("1")); + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public String comment() { - return messageLocations() - .map(ProtobufUtils::collapseLocationComments) - .findFirst().orElse(""); - } + ProtobufMessage that = (ProtobufMessage) o; - @Override - public <T> Stream<T> accept(ProtobufModelVisitor<T> visitor, VisitContext context) { - return visitor.visitMessage(this, context); + if (!fullName().equals(that.fullName())) { + return false; } - - @Override - public String toString() { - return String.format("ProtobufMessage[%s]", fullName()); + if (!messageProto.equals(that.messageProto)) { + return false; } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ProtobufMessage that = (ProtobufMessage) o; - - if (!fullName().equals(that.fullName())) { - return false; - } - if (!messageProto.equals(that.messageProto)) { - return false; - } - if (parentMessageProto != null ? !parentMessageProto.equals(that.parentMessageProto) : that.parentMessageProto != null) { - return false; - } - return fileProto.equals(that.fileProto); - } - - @Override - public int hashCode() { - int result = messageProto.hashCode(); - result = 31 * result + (parentMessageProto != null ? parentMessageProto.hashCode() : 0); - result = 31 * result + fileProto.hashCode(); - result = 31 * result + fullName().hashCode(); - return result; + if (parentMessageProto != null + ? !parentMessageProto.equals(that.parentMessageProto) + : that.parentMessageProto != null) { + return false; } + return fileProto.equals(that.fileProto); + } + + @Override + public int hashCode() { + int result = messageProto.hashCode(); + result = 31 * result + (parentMessageProto != null ? parentMessageProto.hashCode() : 0); + result = 31 * result + fileProto.hashCode(); + result = 31 * result + fullName().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java index 514d84b1cff2a..08c157f4c9c71 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java @@ -5,61 +5,60 @@ import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.UnionType; import datahub.protobuf.ProtobufUtils; +import java.util.stream.Collectors; import lombok.Builder; import lombok.Getter; -import java.util.stream.Collectors; - - @Getter public class ProtobufOneOfField extends ProtobufField { - public static final String NATIVE_TYPE = "oneof"; - public static final String FIELD_PATH_TYPE = "[type=union]"; + public static final String NATIVE_TYPE = "oneof"; + public static final String FIELD_PATH_TYPE = "[type=union]"; - @Builder(builderMethodName = "oneOfBuilder") - public ProtobufOneOfField(ProtobufMessage protobufMessage, - FieldDescriptorProto fieldProto) { - super(protobufMessage, fieldProto, null, null, null, null, null); - } + @Builder(builderMethodName = "oneOfBuilder") + public ProtobufOneOfField(ProtobufMessage protobufMessage, FieldDescriptorProto fieldProto) { + super(protobufMessage, fieldProto, null, null, null, null, null); + } - @Override - public String name() { - return oneOfProto().getName(); - } + @Override + public String name() { + return oneOfProto().getName(); + } - @Override - public String fieldPathType() { - return FIELD_PATH_TYPE; - } + @Override + public String fieldPathType() { + return FIELD_PATH_TYPE; + } - @Override - public String nativeType() { - return NATIVE_TYPE; - } + @Override + public String nativeType() { + return NATIVE_TYPE; + } - @Override - public boolean isMessage() { - return false; - } + @Override + public boolean isMessage() { + return false; + } - @Override - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())); - } + @Override + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())); + } - @Override - public String comment() { - return messageLocations() - .filter(loc -> loc.getPathCount() > 3 - && loc.getPath(2) == DescriptorProto.ONEOF_DECL_FIELD_NUMBER - && oneOfProto() == messageProto().getOneofDecl(loc.getPath(3))) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); - } + @Override + public String comment() { + return messageLocations() + .filter( + loc -> + loc.getPathCount() > 3 + && loc.getPath(2) == DescriptorProto.ONEOF_DECL_FIELD_NUMBER + && oneOfProto() == messageProto().getOneofDecl(loc.getPath(3))) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } - @Override - public String toString() { - return String.format("ProtobufOneOf[%s]", fullName()); - } + @Override + public String toString() { + return String.format("ProtobufOneOf[%s]", fullName()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java index c14217fb9add2..085516a025e0c 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java @@ -9,7 +9,6 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.tag.TagProperties; import com.linkedin.util.Pair; - import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -21,140 +20,200 @@ public class ProtobufExtensionUtil { - private ProtobufExtensionUtil() { } + private ProtobufExtensionUtil() {} - public static DescriptorProtos.FieldDescriptorProto extendProto(DescriptorProtos.FieldDescriptorProto proto, ExtensionRegistry registry) { - try { - return DescriptorProtos.FieldDescriptorProto.parseFrom(proto.toByteArray(), registry); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } + public static DescriptorProtos.FieldDescriptorProto extendProto( + DescriptorProtos.FieldDescriptorProto proto, ExtensionRegistry registry) { + try { + return DescriptorProtos.FieldDescriptorProto.parseFrom(proto.toByteArray(), registry); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); } + } - public enum DataHubMetadataType { - PROPERTY, TAG, TAG_LIST, TERM, OWNER, DOMAIN, DEPRECATION; + public enum DataHubMetadataType { + PROPERTY, + TAG, + TAG_LIST, + TERM, + OWNER, + DOMAIN, + DEPRECATION; - public static final String PROTOBUF_TYPE = "DataHubMetadataType"; - } + public static final String PROTOBUF_TYPE = "DataHubMetadataType"; + } - public static List<Pair<Descriptors.FieldDescriptor, Object>> filterByDataHubType(List<Pair<Descriptors.FieldDescriptor, Object>> options, - ExtensionRegistry registry, DataHubMetadataType filterType) { - return options.stream() - .filter(entry -> { - DescriptorProtos.FieldDescriptorProto extendedProtoOptions = extendProto(entry.getKey().toProto(), registry); - Optional<DataHubMetadataType> dataHubMetadataType = extendedProtoOptions.getOptions().getAllFields().entrySet().stream() - .filter(extEntry -> extEntry.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.ENUM) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection<Descriptors.EnumValueDescriptor>) extEntry.getValue()).stream(); - } else { - return Stream.of((Descriptors.EnumValueDescriptor) extEntry.getValue()); - } - }) - .filter(enumDesc -> enumDesc.getType().getFullName().endsWith("." + DataHubMetadataType.PROTOBUF_TYPE)) - .map(enumDesc -> DataHubMetadataType.valueOf(enumDesc.getName())) - .filter(dhmt -> dhmt.equals(filterType)) - .findFirst(); + public static List<Pair<Descriptors.FieldDescriptor, Object>> filterByDataHubType( + List<Pair<Descriptors.FieldDescriptor, Object>> options, + ExtensionRegistry registry, + DataHubMetadataType filterType) { + return options.stream() + .filter( + entry -> { + DescriptorProtos.FieldDescriptorProto extendedProtoOptions = + extendProto(entry.getKey().toProto(), registry); + Optional<DataHubMetadataType> dataHubMetadataType = + extendedProtoOptions.getOptions().getAllFields().entrySet().stream() + .filter( + extEntry -> + extEntry.getKey().getJavaType() + == Descriptors.FieldDescriptor.JavaType.ENUM) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection<Descriptors.EnumValueDescriptor>) + extEntry.getValue()) + .stream(); + } else { + return Stream.of( + (Descriptors.EnumValueDescriptor) extEntry.getValue()); + } + }) + .filter( + enumDesc -> + enumDesc + .getType() + .getFullName() + .endsWith("." + DataHubMetadataType.PROTOBUF_TYPE)) + .map(enumDesc -> DataHubMetadataType.valueOf(enumDesc.getName())) + .filter(dhmt -> dhmt.equals(filterType)) + .findFirst(); - return filterType.equals(dataHubMetadataType.orElse(DataHubMetadataType.PROPERTY)); - }).collect(Collectors.toList()); - } + return filterType.equals(dataHubMetadataType.orElse(DataHubMetadataType.PROPERTY)); + }) + .collect(Collectors.toList()); + } - public static Stream<Map.Entry<String, String>> getProperties(Descriptors.FieldDescriptor field, DescriptorProtos.DescriptorProto value) { - return value.getUnknownFields().asMap().entrySet().stream().map(unknown -> { - Descriptors.FieldDescriptor fieldDesc = field.getMessageType().findFieldByNumber(unknown.getKey()); - String fieldValue = unknown.getValue().getLengthDelimitedList().stream().map(ByteString::toStringUtf8).collect(Collectors.joining("")); - return Map.entry(String.join(".", field.getFullName(), fieldDesc.getName()), fieldValue); - }); - } + public static Stream<Map.Entry<String, String>> getProperties( + Descriptors.FieldDescriptor field, DescriptorProtos.DescriptorProto value) { + return value.getUnknownFields().asMap().entrySet().stream() + .map( + unknown -> { + Descriptors.FieldDescriptor fieldDesc = + field.getMessageType().findFieldByNumber(unknown.getKey()); + String fieldValue = + unknown.getValue().getLengthDelimitedList().stream() + .map(ByteString::toStringUtf8) + .collect(Collectors.joining("")); + return Map.entry( + String.join(".", field.getFullName(), fieldDesc.getName()), fieldValue); + }); + } - public static Stream<TagProperties> extractTagPropertiesFromOptions(List<Pair<Descriptors.FieldDescriptor, Object>> options, ExtensionRegistry registry) { - Stream<TagProperties> tags = filterByDataHubType(options, registry, DataHubMetadataType.TAG).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection<?>) extEntry.getValue()).stream().map(v -> Pair.of(extEntry.getKey(), v)); - } else { - return Stream.of(extEntry); - } + public static Stream<TagProperties> extractTagPropertiesFromOptions( + List<Pair<Descriptors.FieldDescriptor, Object>> options, ExtensionRegistry registry) { + Stream<TagProperties> tags = + filterByDataHubType(options, registry, DataHubMetadataType.TAG).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection<?>) extEntry.getValue()) + .stream().map(v -> Pair.of(extEntry.getKey(), v)); + } else { + return Stream.of(extEntry); + } }) - .map(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return new TagProperties() - .setName(String.format("%s.%s", entry.getKey().getName(), entry.getValue().toString())) - .setDescription(entry.getKey().getFullName()); - case BOOLEAN: - if ((boolean) entry.getValue()) { - return new TagProperties() - .setName(entry.getKey().getName()) - .setDescription(String.format("%s is true.", entry.getKey().getFullName())); - } - return null; - case ENUM: - Descriptors.EnumValueDescriptor desc = (Descriptors.EnumValueDescriptor) entry.getValue(); - String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); - String others = entry.getKey().getEnumType().getValues().stream() - .map(Descriptors.EnumValueDescriptor::getName).collect(Collectors.joining(", ")); - return new TagProperties() - .setName(name) - .setDescription(String.format("Enum %s of {%s}", name, others)); - default: - return null; - } - }).filter(Objects::nonNull); - - Stream<TagProperties> tagListTags = filterByDataHubType(options, registry, DataHubMetadataType.TAG_LIST).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return Arrays.stream(entry.getValue().toString().split(",")) - .map(t -> new TagProperties() - .setName(t.trim()) - .setDescription(entry.getKey().getFullName())); - default: - return Stream.empty(); - } - }).filter(Objects::nonNull); + .map( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return new TagProperties() + .setName( + String.format( + "%s.%s", entry.getKey().getName(), entry.getValue().toString())) + .setDescription(entry.getKey().getFullName()); + case BOOLEAN: + if ((boolean) entry.getValue()) { + return new TagProperties() + .setName(entry.getKey().getName()) + .setDescription( + String.format("%s is true.", entry.getKey().getFullName())); + } + return null; + case ENUM: + Descriptors.EnumValueDescriptor desc = + (Descriptors.EnumValueDescriptor) entry.getValue(); + String name = + String.format("%s.%s", desc.getType().getName(), desc.getName()); + String others = + entry.getKey().getEnumType().getValues().stream() + .map(Descriptors.EnumValueDescriptor::getName) + .collect(Collectors.joining(", ")); + return new TagProperties() + .setName(name) + .setDescription(String.format("Enum %s of {%s}", name, others)); + default: + return null; + } + }) + .filter(Objects::nonNull); - Stream<TagProperties> deprecationTag; - if (options.stream().anyMatch(opt -> opt.getKey().getFullName().endsWith(".deprecated") - && opt.getKey().getFullName().startsWith("google.protobuf.") - && opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.BOOLEAN - && (Boolean) opt.getValue())) { - deprecationTag = Stream.of(new TagProperties().setName("deprecated").setColorHex("#FF0000")); - } else { - deprecationTag = Stream.empty(); - } + Stream<TagProperties> tagListTags = + filterByDataHubType(options, registry, DataHubMetadataType.TAG_LIST).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return Arrays.stream(entry.getValue().toString().split(",")) + .map( + t -> + new TagProperties() + .setName(t.trim()) + .setDescription(entry.getKey().getFullName())); + default: + return Stream.empty(); + } + }) + .filter(Objects::nonNull); - return Stream.of(tags, tagListTags, deprecationTag).reduce(Stream::concat).orElse(Stream.empty()); + Stream<TagProperties> deprecationTag; + if (options.stream() + .anyMatch( + opt -> + opt.getKey().getFullName().endsWith(".deprecated") + && opt.getKey().getFullName().startsWith("google.protobuf.") + && opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.BOOLEAN + && (Boolean) opt.getValue())) { + deprecationTag = Stream.of(new TagProperties().setName("deprecated").setColorHex("#FF0000")); + } else { + deprecationTag = Stream.empty(); } - public static Stream<GlossaryTermAssociation> extractTermAssociationsFromOptions(List<Pair<Descriptors.FieldDescriptor, Object>> fieldOptions, - ExtensionRegistry registry) { - return filterByDataHubType(fieldOptions, registry, DataHubMetadataType.TERM).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection<?>) extEntry.getValue()).stream().map(v -> Pair.of(extEntry.getKey(), v)); - } else { - return Stream.of(extEntry); - } - }) - .map(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return new GlossaryTermAssociation() - .setUrn(new GlossaryTermUrn(entry.getValue().toString())); - case ENUM: - Descriptors.EnumValueDescriptor desc = (Descriptors.EnumValueDescriptor) entry.getValue(); - String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); - return new GlossaryTermAssociation() - .setUrn(new GlossaryTermUrn(name)); - default: - return null; - } - }).filter(Objects::nonNull); - } + return Stream.of(tags, tagListTags, deprecationTag) + .reduce(Stream::concat) + .orElse(Stream.empty()); + } + + public static Stream<GlossaryTermAssociation> extractTermAssociationsFromOptions( + List<Pair<Descriptors.FieldDescriptor, Object>> fieldOptions, ExtensionRegistry registry) { + return filterByDataHubType(fieldOptions, registry, DataHubMetadataType.TERM).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection<?>) extEntry.getValue()) + .stream().map(v -> Pair.of(extEntry.getKey(), v)); + } else { + return Stream.of(extEntry); + } + }) + .map( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return new GlossaryTermAssociation() + .setUrn(new GlossaryTermUrn(entry.getValue().toString())); + case ENUM: + Descriptors.EnumValueDescriptor desc = + (Descriptors.EnumValueDescriptor) entry.getValue(); + String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); + return new GlossaryTermAssociation().setUrn(new GlossaryTermUrn(name)); + default: + return null; + } + }) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java index b5c630302d946..336de520a96bd 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java @@ -1,24 +1,24 @@ package datahub.protobuf.visitors; - import datahub.protobuf.model.ProtobufElement; import datahub.protobuf.model.ProtobufField; import datahub.protobuf.model.ProtobufMessage; - import java.util.stream.Stream; public interface ProtobufModelVisitor<T> { - default Stream<T> visitField(ProtobufField field, VisitContext context) { - return visitElement(field, context); - } - default Stream<T> visitMessage(ProtobufMessage message, VisitContext context) { - return visitElement(message, context); - } + default Stream<T> visitField(ProtobufField field, VisitContext context) { + return visitElement(field, context); + } + + default Stream<T> visitMessage(ProtobufMessage message, VisitContext context) { + return visitElement(message, context); + } + + default Stream<T> visitElement(ProtobufElement element, VisitContext context) { + return Stream.of(); + } - default Stream<T> visitElement(ProtobufElement element, VisitContext context) { - return Stream.of(); - } - default Stream<T> visitGraph(VisitContext context) { - return Stream.of(); - } + default Stream<T> visitGraph(VisitContext context) { + return Stream.of(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java index 51c92332d98a0..5718b0a8a2ae6 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java @@ -7,55 +7,56 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.model.ProtobufMessage; -import lombok.Builder; -import lombok.Getter; -import org.jgrapht.GraphPath; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; +import lombok.Builder; +import lombok.Getter; +import org.jgrapht.GraphPath; @Builder @Getter public class VisitContext { - public static final String FIELD_PATH_VERSION = "[version=2.0]"; - - private final DatasetUrn datasetUrn; - private final ProtobufGraph graph; - private final AuditStamp auditStamp; - - public ProtobufMessage root() { - return graph.root(); - } - - public Stream<GraphPath<ProtobufElement, FieldTypeEdge>> streamAllPaths(ProtobufField field) { - return graph.getAllPaths(root(), field).stream(); - } - - public String getFieldPath(GraphPath<ProtobufElement, FieldTypeEdge> path) { - String fieldPathString = path.getEdgeList().stream() - .flatMap(e -> Stream.of(e.getType(), e.getEdgeTarget().name())) - .collect(Collectors.joining(".")); - return String.join(".", FIELD_PATH_VERSION, root().fieldPathType(), fieldPathString); - } - - // This is because order matters for the frontend. Both for matching the protobuf field order - // and also the nested struct's fieldPaths - public Double calculateSortOrder(GraphPath<ProtobufElement, FieldTypeEdge> path, ProtobufField field) { - List<Integer> weights = path.getEdgeList().stream() - .map(FieldTypeEdge::getEdgeTarget) - .filter(f -> f instanceof ProtobufField) - .map(f -> ((ProtobufField) f).sortWeight()) - .collect(Collectors.toList()); - - return IntStream.range(0, weights.size()) - .mapToDouble(i -> weights.get(i) * (1.0 / (i + 1))) - .reduce(Double::sum) - .orElse(0); - } - - public static class VisitContextBuilder { - - }; + public static final String FIELD_PATH_VERSION = "[version=2.0]"; + + private final DatasetUrn datasetUrn; + private final ProtobufGraph graph; + private final AuditStamp auditStamp; + + public ProtobufMessage root() { + return graph.root(); + } + + public Stream<GraphPath<ProtobufElement, FieldTypeEdge>> streamAllPaths(ProtobufField field) { + return graph.getAllPaths(root(), field).stream(); + } + + public String getFieldPath(GraphPath<ProtobufElement, FieldTypeEdge> path) { + String fieldPathString = + path.getEdgeList().stream() + .flatMap(e -> Stream.of(e.getType(), e.getEdgeTarget().name())) + .collect(Collectors.joining(".")); + return String.join(".", FIELD_PATH_VERSION, root().fieldPathType(), fieldPathString); + } + + // This is because order matters for the frontend. Both for matching the protobuf field order + // and also the nested struct's fieldPaths + public Double calculateSortOrder( + GraphPath<ProtobufElement, FieldTypeEdge> path, ProtobufField field) { + List<Integer> weights = + path.getEdgeList().stream() + .map(FieldTypeEdge::getEdgeTarget) + .filter(f -> f instanceof ProtobufField) + .map(f -> ((ProtobufField) f).sortWeight()) + .collect(Collectors.toList()); + + return IntStream.range(0, weights.size()) + .mapToDouble(i -> weights.get(i) * (1.0 / (i + 1))) + .reduce(Double::sum) + .orElse(0); + } + + public static class VisitContextBuilder {} + ; } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java index 80dc05d33e17d..1b03e13705910 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java @@ -21,13 +21,10 @@ import com.linkedin.dataset.DatasetProperties; import com.linkedin.domain.Domains; import com.linkedin.events.metadata.ChangeType; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; -import lombok.AllArgsConstructor; -import lombok.Builder; - import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -35,76 +32,145 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.AllArgsConstructor; +import lombok.Builder; @Builder @AllArgsConstructor -public class DatasetVisitor implements ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>> { - @Builder.Default - private final List<ProtobufModelVisitor<InstitutionalMemoryMetadata>> institutionalMemoryMetadataVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<DatasetProperties>> datasetPropertyVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<TagAssociation>> tagAssociationVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<GlossaryTermAssociation>> termAssociationVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<Owner>> ownershipVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<com.linkedin.common.urn.Urn>> domainVisitors = List.of(); - @Builder.Default - private final String protocBase64 = ""; - @Builder.Default - private final ProtobufModelVisitor<String> descriptionVisitor = new DescriptionVisitor(); - @Builder.Default - private final ProtobufModelVisitor<Deprecation> deprecationVisitor = new DeprecationVisitor(); +public class DatasetVisitor + implements ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>> { + @Builder.Default + private final List<ProtobufModelVisitor<InstitutionalMemoryMetadata>> + institutionalMemoryMetadataVisitors = List.of(); + + @Builder.Default + private final List<ProtobufModelVisitor<DatasetProperties>> datasetPropertyVisitors = List.of(); + + @Builder.Default + private final List<ProtobufModelVisitor<TagAssociation>> tagAssociationVisitors = List.of(); + + @Builder.Default + private final List<ProtobufModelVisitor<GlossaryTermAssociation>> termAssociationVisitors = + List.of(); + + @Builder.Default private final List<ProtobufModelVisitor<Owner>> ownershipVisitors = List.of(); + + @Builder.Default + private final List<ProtobufModelVisitor<com.linkedin.common.urn.Urn>> domainVisitors = List.of(); + + @Builder.Default private final String protocBase64 = ""; + + @Builder.Default + private final ProtobufModelVisitor<String> descriptionVisitor = new DescriptionVisitor(); + + @Builder.Default + private final ProtobufModelVisitor<Deprecation> deprecationVisitor = new DeprecationVisitor(); - @Override - public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitGraph(VisitContext context) { - final String datasetUrn = context.getDatasetUrn().toString(); - final ProtobufGraph g = context.getGraph(); + @Override + public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitGraph( + VisitContext context) { + final String datasetUrn = context.getDatasetUrn().toString(); + final ProtobufGraph g = context.getGraph(); - return Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new DatasetProperties() - .setName(context.getDatasetUrn() + return Stream.of( + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new DatasetProperties() + .setName( + context + .getDatasetUrn() .getDatasetNameEntity() - .substring(context.getDatasetUrn() - .getDatasetNameEntity() - .lastIndexOf(".") + 1)) - .setQualifiedName(context.getDatasetUrn().getDatasetNameEntity()) - .setDescription(g.accept(context, List.of(descriptionVisitor)).collect(Collectors.joining("\n"))) - .setCustomProperties(new StringMap( - Stream.concat( - Stream.of(Map.entry("protoc", protocBase64)), - g.accept(context, datasetPropertyVisitors).flatMap(props -> props.getCustomProperties().entrySet().stream())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - )), "datasetProperties"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, new InstitutionalMemory().setElements( + .substring( + context.getDatasetUrn().getDatasetNameEntity().lastIndexOf(".") + + 1)) + .setQualifiedName(context.getDatasetUrn().getDatasetNameEntity()) + .setDescription( + g.accept(context, List.of(descriptionVisitor)) + .collect(Collectors.joining("\n"))) + .setCustomProperties( + new StringMap( + Stream.concat( + Stream.of(Map.entry("protoc", protocBase64)), + g.accept(context, datasetPropertyVisitors) + .flatMap( + props -> + props.getCustomProperties().entrySet().stream())) + .collect( + Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))), + "datasetProperties"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new InstitutionalMemory() + .setElements( new InstitutionalMemoryMetadataArray( - g.accept(context, institutionalMemoryMetadataVisitors) - .map(inst -> inst.setCreateStamp(context.getAuditStamp())) - .collect(Collectors.toMap(InstitutionalMemoryMetadata::getUrl, Function.identity(), - (a1, a2) -> a1, LinkedHashMap::new)) - .values() - )), "institutionalMemory"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new GlobalTags().setTags(new TagAssociationArray( - g.accept(context, tagAssociationVisitors).collect(Collectors.toList()) - )), "globalTags"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - g.accept(context, termAssociationVisitors).collect(Collectors.toList()) - )).setAuditStamp(context.getAuditStamp()), "glossaryTerms"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new Ownership().setOwners(new OwnerArray( - g.accept(context, ownershipVisitors).collect(Collectors.toList()) - )).setLastModified(context.getAuditStamp()), "ownership"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new Domains(new DataMap(Map.of("domains", - new UrnArray(g.accept(context, domainVisitors).collect(Collectors.toList())).data()))), "domains"), - g.accept(context, List.of(deprecationVisitor)).findFirst() - .map(dep -> new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - dep, "deprecation")).orElse(null) - ).filter(Objects::nonNull); - } + g.accept(context, institutionalMemoryMetadataVisitors) + .map(inst -> inst.setCreateStamp(context.getAuditStamp())) + .collect( + Collectors.toMap( + InstitutionalMemoryMetadata::getUrl, + Function.identity(), + (a1, a2) -> a1, + LinkedHashMap::new)) + .values())), + "institutionalMemory"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new GlobalTags() + .setTags( + new TagAssociationArray( + g.accept(context, tagAssociationVisitors) + .collect(Collectors.toList()))), + "globalTags"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + g.accept(context, termAssociationVisitors) + .collect(Collectors.toList()))) + .setAuditStamp(context.getAuditStamp()), + "glossaryTerms"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new Ownership() + .setOwners( + new OwnerArray( + g.accept(context, ownershipVisitors).collect(Collectors.toList()))) + .setLastModified(context.getAuditStamp()), + "ownership"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new Domains( + new DataMap( + Map.of( + "domains", + new UrnArray( + g.accept(context, domainVisitors).collect(Collectors.toList())) + .data()))), + "domains"), + g.accept(context, List.of(deprecationVisitor)) + .findFirst() + .map( + dep -> + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + dep, + "deprecation")) + .orElse(null)) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java index 612082e6a521b..46d17205e4219 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java @@ -1,53 +1,60 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.google.protobuf.Descriptors; import com.linkedin.common.Deprecation; import com.linkedin.util.Pair; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class DeprecationVisitor implements ProtobufModelVisitor<Deprecation> { - @Override - public Stream<Deprecation> visitGraph(VisitContext context) { - if (context.root().messageProto().getOptions().getDeprecated()) { - List<Pair<Descriptors.FieldDescriptor, Object>> deprecationOptions = ProtobufExtensionUtil - .filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.DEPRECATION); - - String decommissionNote = deprecationOptions.stream() - .filter(opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.STRING) - .flatMap(opt -> { - if (opt.getKey().isRepeated()) { - return ((Collection<String>) opt.getValue()).stream(); - } else { - return Stream.of(opt.getValue()); - } - }) - .map(Object::toString) - .collect(Collectors.joining("\n")); - - Optional<Long> decommissionTime = deprecationOptions.stream() - .filter(opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.LONG) - .map(opt -> (Long) opt.getValue()) - .findFirst(); - - return Stream.of(new Deprecation() - .setDeprecated(true) - .setNote(decommissionNote) - .setDecommissionTime(decommissionTime.orElse(0L)) - .setActor(context.getAuditStamp().getActor())); - } else { - return Stream.empty(); - } + @Override + public Stream<Deprecation> visitGraph(VisitContext context) { + if (context.root().messageProto().getOptions().getDeprecated()) { + List<Pair<Descriptors.FieldDescriptor, Object>> deprecationOptions = + ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.DEPRECATION); + + String decommissionNote = + deprecationOptions.stream() + .filter( + opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.STRING) + .flatMap( + opt -> { + if (opt.getKey().isRepeated()) { + return ((Collection<String>) opt.getValue()).stream(); + } else { + return Stream.of(opt.getValue()); + } + }) + .map(Object::toString) + .collect(Collectors.joining("\n")); + + Optional<Long> decommissionTime = + deprecationOptions.stream() + .filter( + opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.LONG) + .map(opt -> (Long) opt.getValue()) + .findFirst(); + + return Stream.of( + new Deprecation() + .setDeprecated(true) + .setNote(decommissionNote) + .setDecommissionTime(decommissionTime.orElse(0L)) + .setActor(context.getAuditStamp().getActor())); + } else { + return Stream.empty(); } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java index 802c7e0c05408..4bd7dd96d0db9 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java @@ -2,13 +2,12 @@ import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; public class DescriptionVisitor implements ProtobufModelVisitor<String> { - @Override - public Stream<String> visitGraph(VisitContext context) { - return Stream.of(context.root().comment()); - } + @Override + public Stream<String> visitGraph(VisitContext context) { + return Stream.of(context.root().comment()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java index ac9d092c2392e..01908bb8c3b6d 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java @@ -1,23 +1,24 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.urn.Urn; import com.linkedin.util.Pair; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class DomainVisitor implements ProtobufModelVisitor<Urn> { - @Override - public Stream<Urn> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.DOMAIN) - .stream().map(Pair::getValue).map(o -> - Urn.createFromTuple("domain", ((String) o).toLowerCase()) - ); - } + @Override + public Stream<Urn> visitGraph(VisitContext context) { + return ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.DOMAIN) + .stream() + .map(Pair::getValue) + .map(o -> Urn.createFromTuple("domain", ((String) o).toLowerCase())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java index b6f52fe01c109..c4a29b1b70f61 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java @@ -5,8 +5,6 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - -import javax.annotation.Nullable; import java.util.LinkedList; import java.util.List; import java.util.Optional; @@ -18,123 +16,153 @@ import java.util.regex.Pattern; import java.util.stream.Stream; import java.util.stream.StreamSupport; +import javax.annotation.Nullable; -public class InstitutionalMemoryVisitor implements ProtobufModelVisitor<InstitutionalMemoryMetadata> { - public static final String TEAM_DESC = "Github Team"; - public static final String SLACK_CHAN_DESC = "Slack Channel"; - - private static final Pattern SLACK_CHANNEL_REGEX = Pattern.compile("(?si).*#([a-z0-9-]+).*"); - private static final Pattern LINK_REGEX = Pattern.compile("(?s)(\\b(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|])"); - private final String githubOrganization; - private final Pattern githubTeamRegex; - private final String slackTeamId; - - public InstitutionalMemoryVisitor(@Nullable String slackTeamId, @Nullable String githubOrganization) { - this.slackTeamId = slackTeamId; - this.githubOrganization = githubOrganization; - if (githubOrganization != null) { - this.githubTeamRegex = Pattern.compile(String.format("(?si).*@%s/([a-z-]+).*", githubOrganization)); - } else { - this.githubTeamRegex = null; - } +public class InstitutionalMemoryVisitor + implements ProtobufModelVisitor<InstitutionalMemoryMetadata> { + public static final String TEAM_DESC = "Github Team"; + public static final String SLACK_CHAN_DESC = "Slack Channel"; + + private static final Pattern SLACK_CHANNEL_REGEX = Pattern.compile("(?si).*#([a-z0-9-]+).*"); + private static final Pattern LINK_REGEX = + Pattern.compile( + "(?s)(\\b(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|])"); + private final String githubOrganization; + private final Pattern githubTeamRegex; + private final String slackTeamId; + + public InstitutionalMemoryVisitor( + @Nullable String slackTeamId, @Nullable String githubOrganization) { + this.slackTeamId = slackTeamId; + this.githubOrganization = githubOrganization; + if (githubOrganization != null) { + this.githubTeamRegex = + Pattern.compile(String.format("(?si).*@%s/([a-z-]+).*", githubOrganization)); + } else { + this.githubTeamRegex = null; } - - // https://slack.com/app_redirect?channel=fdn-analytics-data-catalog&team=T024F4EL1 - private Optional<Url> slackLink(String text) { - return Optional.ofNullable(slackTeamId).map(teamId -> { - Matcher m = SLACK_CHANNEL_REGEX.matcher(text); - if (m.matches()) { - return new Url(String.format("https://slack.com/app_redirect?channel=%s&team=%s", m.group(1), slackTeamId)); - } else { + } + + // https://slack.com/app_redirect?channel=fdn-analytics-data-catalog&team=T024F4EL1 + private Optional<Url> slackLink(String text) { + return Optional.ofNullable(slackTeamId) + .map( + teamId -> { + Matcher m = SLACK_CHANNEL_REGEX.matcher(text); + if (m.matches()) { + return new Url( + String.format( + "https://slack.com/app_redirect?channel=%s&team=%s", + m.group(1), slackTeamId)); + } else { return null; - } - }); - } - - private Optional<Url> teamLink(String text) { - return Optional.ofNullable(githubTeamRegex).map(regex -> { - Matcher m = regex.matcher(text); - if (m.matches()) { - return new Url(String.format("https://github.com/orgs/%s/teams/%s", githubOrganization, m.group(1))); - } else { + } + }); + } + + private Optional<Url> teamLink(String text) { + return Optional.ofNullable(githubTeamRegex) + .map( + regex -> { + Matcher m = regex.matcher(text); + if (m.matches()) { + return new Url( + String.format( + "https://github.com/orgs/%s/teams/%s", githubOrganization, m.group(1))); + } else { return null; - } - }); - } + } + }); + } - @Override - public Stream<InstitutionalMemoryMetadata> visitGraph(VisitContext context) { - List<InstitutionalMemoryMetadata> institutionalMemoryMetadata = new LinkedList<>(); + @Override + public Stream<InstitutionalMemoryMetadata> visitGraph(VisitContext context) { + List<InstitutionalMemoryMetadata> institutionalMemoryMetadata = new LinkedList<>(); - teamLink(context.root().comment()).ifPresent(url -> - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + teamLink(context.root().comment()) + .ifPresent( + url -> + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) .setDescription(TEAM_DESC) .setUrl(url))); - - slackLink(context.root().comment()).ifPresent(url -> - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + slackLink(context.root().comment()) + .ifPresent( + url -> + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) .setDescription(SLACK_CHAN_DESC) .setUrl(url))); - final int[] cnt = {0}; - MatcherStream.findMatches(LINK_REGEX, context.root().comment()).forEach(match -> { - cnt[0] += 1; - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() - .setCreateStamp(context.getAuditStamp()) - .setDescription(String.format("%s Reference %d", context.root().name(), cnt[0])) - .setUrl(new Url(match.group(1)))); - }); + final int[] cnt = {0}; + MatcherStream.findMatches(LINK_REGEX, context.root().comment()) + .forEach( + match -> { + cnt[0] += 1; + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() + .setCreateStamp(context.getAuditStamp()) + .setDescription( + String.format("%s Reference %d", context.root().name(), cnt[0])) + .setUrl(new Url(match.group(1)))); + }); - return institutionalMemoryMetadata.stream(); - } + return institutionalMemoryMetadata.stream(); + } - @Override - public Stream<InstitutionalMemoryMetadata> visitField(ProtobufField field, VisitContext context) { - List<InstitutionalMemoryMetadata> institutionalMemoryMetadata = new LinkedList<>(); + @Override + public Stream<InstitutionalMemoryMetadata> visitField(ProtobufField field, VisitContext context) { + List<InstitutionalMemoryMetadata> institutionalMemoryMetadata = new LinkedList<>(); - if (field.messageProto().equals(context.getGraph().root().messageProto())) { - final int[] cnt = {0}; - MatcherStream.findMatches(LINK_REGEX, field.comment()).forEach(match -> { + if (field.messageProto().equals(context.getGraph().root().messageProto())) { + final int[] cnt = {0}; + MatcherStream.findMatches(LINK_REGEX, field.comment()) + .forEach( + match -> { cnt[0] += 1; - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) - .setDescription(String.format("%s.%s Reference %d", + .setDescription( + String.format( + "%s.%s Reference %d", field.getProtobufMessage().name(), field.getFieldProto().getName(), cnt[0])) .setUrl(new Url(match.group(1)))); - }); - } + }); + } + + return institutionalMemoryMetadata.stream(); + } + + private static class MatcherStream { + private MatcherStream() {} - return institutionalMemoryMetadata.stream(); + public static Stream<String> find(Pattern pattern, CharSequence input) { + return findMatches(pattern, input).map(MatchResult::group); } - private static class MatcherStream { - private MatcherStream() { } - - public static Stream<String> find(Pattern pattern, CharSequence input) { - return findMatches(pattern, input).map(MatchResult::group); - } - - public static Stream<MatchResult> findMatches( - Pattern pattern, CharSequence input) { - Matcher matcher = pattern.matcher(input); - - Spliterator<MatchResult> spliterator = new Spliterators.AbstractSpliterator<MatchResult>( - Long.MAX_VALUE, Spliterator.ORDERED | Spliterator.NONNULL) { - @Override - public boolean tryAdvance(Consumer<? super MatchResult> action) { - if (!matcher.find()) { - return false; - } - action.accept(matcher.toMatchResult()); - return true; - } }; - - return StreamSupport.stream(spliterator, false); - } + public static Stream<MatchResult> findMatches(Pattern pattern, CharSequence input) { + Matcher matcher = pattern.matcher(input); + + Spliterator<MatchResult> spliterator = + new Spliterators.AbstractSpliterator<MatchResult>( + Long.MAX_VALUE, Spliterator.ORDERED | Spliterator.NONNULL) { + @Override + public boolean tryAdvance(Consumer<? super MatchResult> action) { + if (!matcher.find()) { + return false; + } + action.accept(matcher.toMatchResult()); + return true; + } + }; + + return StreamSupport.stream(spliterator, false); } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java index 57ec38611d47f..d2132316fdef3 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java @@ -4,28 +4,28 @@ import com.linkedin.dataset.DatasetProperties; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Map; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; - public class KafkaTopicPropertyVisitor implements ProtobufModelVisitor<DatasetProperties> { - @Override - public Stream<DatasetProperties> visitGraph(VisitContext context) { - return getKafkaTopic(context.root().comment()).stream().map(kafkaTopic -> - new DatasetProperties() - .setCustomProperties(new StringMap(Map.of("kafka_topic", kafkaTopic))) - ); - } + @Override + public Stream<DatasetProperties> visitGraph(VisitContext context) { + return getKafkaTopic(context.root().comment()).stream() + .map( + kafkaTopic -> + new DatasetProperties() + .setCustomProperties(new StringMap(Map.of("kafka_topic", kafkaTopic)))); + } - private static final Pattern TOPIC_NAME_REGEX = Pattern.compile("(?si).*kafka.+topic.+[`]([a-z._-]+)[`].*"); + private static final Pattern TOPIC_NAME_REGEX = + Pattern.compile("(?si).*kafka.+topic.+[`]([a-z._-]+)[`].*"); - private static Optional<String> getKafkaTopic(String text) { - Matcher m = TOPIC_NAME_REGEX.matcher(text); - return m.matches() ? Optional.of(m.group(1)) : Optional.empty(); - } + private static Optional<String> getKafkaTopic(String text) { + Matcher m = TOPIC_NAME_REGEX.matcher(text); + return m.matches() ? Optional.of(m.group(1)) : Optional.empty(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java index 7bb4d9860f72c..0a7081a35fa86 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java @@ -1,5 +1,7 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; @@ -8,47 +10,55 @@ import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.net.URISyntaxException; import java.util.Collection; import java.util.Map; import java.util.Objects; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class OwnershipVisitor implements ProtobufModelVisitor<Owner> { - @Override - public Stream<Owner> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry(), - ProtobufExtensionUtil.DataHubMetadataType.OWNER) - .stream() - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection<String>) extEntry.getValue()).stream().map(v -> Map.entry(extEntry.getKey(), v)); - } else { - return Stream.of(Map.entry(extEntry.getKey(), (String) extEntry.getValue())); - } - }) - .map(entry -> { - try { - OwnershipType ownershipType; - try { - ownershipType = OwnershipType.valueOf(entry.getKey().getName().toUpperCase()); - } catch (IllegalArgumentException e) { - ownershipType = OwnershipType.TECHNICAL_OWNER; - } + @Override + public Stream<Owner> visitGraph(VisitContext context) { + return ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.OWNER) + .stream() + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection<String>) extEntry.getValue()) + .stream().map(v -> Map.entry(extEntry.getKey(), v)); + } else { + return Stream.of(Map.entry(extEntry.getKey(), (String) extEntry.getValue())); + } + }) + .map( + entry -> { + try { + OwnershipType ownershipType; + try { + ownershipType = OwnershipType.valueOf(entry.getKey().getName().toUpperCase()); + } catch (IllegalArgumentException e) { + ownershipType = OwnershipType.TECHNICAL_OWNER; + } - String[] id = entry.getValue().toLowerCase().split(":", 2); - return new Owner() - .setType(ownershipType) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(new Urn(id.length > 1 ? id[0].replaceFirst("corpgroup", "corpGroup") : "corpGroup", id[id.length - 1])); - } catch (URISyntaxException e) { - System.err.println(e.getMessage()); - return null; - } - }).filter(Objects::nonNull); - } + String[] id = entry.getValue().toLowerCase().split(":", 2); + return new Owner() + .setType(ownershipType) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner( + new Urn( + id.length > 1 + ? id[0].replaceFirst("corpgroup", "corpGroup") + : "corpGroup", + id[id.length - 1])); + } catch (URISyntaxException e) { + System.err.println(e.getMessage()); + return null; + } + }) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java index 9abd903f242aa..113cf6f1a548f 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java @@ -1,45 +1,56 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; +import static datahub.protobuf.visitors.ProtobufExtensionUtil.getProperties; + import com.google.gson.Gson; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Collection; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; -import static datahub.protobuf.visitors.ProtobufExtensionUtil.getProperties; - - public class PropertyVisitor implements ProtobufModelVisitor<DatasetProperties> { - private static final Gson GSON = new Gson(); + private static final Gson GSON = new Gson(); - @Override - public Stream<DatasetProperties> visitGraph(VisitContext context) { - Map<String, String> properties = ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.PROPERTY) - .stream().flatMap(fd -> { - if (fd.getKey().getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { - if (fd.getKey().isRepeated()) { - return Stream.of(Map.entry(fd.getKey().getName(), GSON.toJson( - ((Collection<?>) fd.getValue()).stream().map(Object::toString).collect(Collectors.toList())))); - } else { - return Stream.of(Map.entry(fd.getKey().getName(), fd.getValue().toString())); - } + @Override + public Stream<DatasetProperties> visitGraph(VisitContext context) { + Map<String, String> properties = + ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.PROPERTY) + .stream() + .flatMap( + fd -> { + if (fd.getKey().getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { + if (fd.getKey().isRepeated()) { + return Stream.of( + Map.entry( + fd.getKey().getName(), + GSON.toJson( + ((Collection<?>) fd.getValue()) + .stream() + .map(Object::toString) + .collect(Collectors.toList())))); } else { - Descriptors.FieldDescriptor field = fd.getKey(); - DescriptorProtos.DescriptorProto value = (DescriptorProtos.DescriptorProto) fd.getValue(); - return getProperties(field, value); + return Stream.of(Map.entry(fd.getKey().getName(), fd.getValue().toString())); } - }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } else { + Descriptors.FieldDescriptor field = fd.getKey(); + DescriptorProtos.DescriptorProto value = + (DescriptorProtos.DescriptorProto) fd.getValue(); + return getProperties(field, value); + } + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - return Stream.of(new DatasetProperties().setCustomProperties(new StringMap(properties))); - } + return Stream.of(new DatasetProperties().setCustomProperties(new StringMap(properties))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java index f0ca32fbbc2f8..6874044215241 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java @@ -1,22 +1,20 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.TagUrn; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - - public class TagAssociationVisitor implements ProtobufModelVisitor<TagAssociation> { - @Override - public Stream<TagAssociation> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()) - .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))); - } + @Override + public Stream<TagAssociation> visitGraph(VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()) + .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java index 7656bb5236825..b13bc0eed1152 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java @@ -1,19 +1,18 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.GlossaryTermAssociation; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class TermAssociationVisitor implements ProtobufModelVisitor<GlossaryTermAssociation> { - @Override - public Stream<GlossaryTermAssociation> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTermAssociationsFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()); - } + @Override + public Stream<GlossaryTermAssociation> visitGraph(VisitContext context) { + return ProtobufExtensionUtil.extractTermAssociationsFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java index c67c7414e521b..240cf7b6d168b 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java @@ -1,5 +1,8 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.ProtobufUtils.getFieldOptions; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -13,41 +16,45 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.VisitContext; - import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getFieldOptions; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class ProtobufExtensionFieldVisitor extends SchemaFieldVisitor { - @Override - public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { - boolean isPrimaryKey = getFieldOptions(field.getFieldProto()).stream().map(Pair::getKey) - .anyMatch(fieldDesc -> fieldDesc.getName().matches("(?i).*primary_?key")); + @Override + public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { + boolean isPrimaryKey = + getFieldOptions(field.getFieldProto()).stream() + .map(Pair::getKey) + .anyMatch(fieldDesc -> fieldDesc.getName().matches("(?i).*primary_?key")); - List<TagAssociation> tags = Stream.concat( + List<TagAssociation> tags = + Stream.concat( ProtobufExtensionUtil.extractTagPropertiesFromOptions( - getFieldOptions(field.getFieldProto()), - context.getGraph().getRegistry()), - promotedTags(field, context)) - .distinct().map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))) - .sorted(Comparator.comparing(t -> t.getTag().getName())) - .collect(Collectors.toList()); + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), + promotedTags(field, context)) + .distinct() + .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))) + .sorted(Comparator.comparing(t -> t.getTag().getName())) + .collect(Collectors.toList()); - List<GlossaryTermAssociation> terms = Stream.concat( + List<GlossaryTermAssociation> terms = + Stream.concat( ProtobufExtensionUtil.extractTermAssociationsFromOptions( - getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), promotedTerms(field, context)) - .distinct() - .sorted(Comparator.comparing(a -> a.getUrn().getNameEntity())) - .collect(Collectors.toList()); + .distinct() + .sorted(Comparator.comparing(a -> a.getUrn().getNameEntity())) + .collect(Collectors.toList()); - return context.streamAllPaths(field).map(path -> Pair.of( - new SchemaField() + return context + .streamAllPaths(field) + .map( + path -> + Pair.of( + new SchemaField() .setFieldPath(context.getFieldPath(path)) .setNullable(!isPrimaryKey) .setIsPartOfKey(isPrimaryKey) @@ -55,40 +62,48 @@ public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitCo .setNativeDataType(field.nativeType()) .setType(field.schemaFieldDataType()) .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray(tags))) - .setGlossaryTerms(new GlossaryTerms() + .setGlossaryTerms( + new GlossaryTerms() .setTerms(new GlossaryTermAssociationArray(terms)) .setAuditStamp(context.getAuditStamp())), - context.calculateSortOrder(path, field))); - } + context.calculateSortOrder(path, field))); + } - /** - * Promote tags from nested message to field. - * @return tags - */ - private Stream<TagProperties> promotedTags(ProtobufField field, VisitContext context) { - if (field.isMessage()) { - return context.getGraph().outgoingEdgesOf(field).stream().flatMap(e -> - ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(e.getEdgeTarget().messageProto()), - context.getGraph().getRegistry()) - ).distinct(); - } else { - return Stream.of(); - } + /** + * Promote tags from nested message to field. + * + * @return tags + */ + private Stream<TagProperties> promotedTags(ProtobufField field, VisitContext context) { + if (field.isMessage()) { + return context.getGraph().outgoingEdgesOf(field).stream() + .flatMap( + e -> + ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(e.getEdgeTarget().messageProto()), + context.getGraph().getRegistry())) + .distinct(); + } else { + return Stream.of(); } + } - /** - * Promote terms from nested message to field. - * @return terms - */ - private Stream<GlossaryTermAssociation> promotedTerms(ProtobufField field, VisitContext context) { - if (field.isMessage()) { - return context.getGraph().outgoingEdgesOf(field).stream().flatMap(e -> - ProtobufExtensionUtil.extractTermAssociationsFromOptions(getMessageOptions(e.getEdgeTarget().messageProto()), - context.getGraph().getRegistry()) - ).distinct(); - } else { - return Stream.of(); - } + /** + * Promote terms from nested message to field. + * + * @return terms + */ + private Stream<GlossaryTermAssociation> promotedTerms(ProtobufField field, VisitContext context) { + if (field.isMessage()) { + return context.getGraph().outgoingEdgesOf(field).stream() + .flatMap( + e -> + ProtobufExtensionUtil.extractTermAssociationsFromOptions( + getMessageOptions(e.getEdgeTarget().messageProto()), + context.getGraph().getRegistry())) + .distinct(); + } else { + return Stream.of(); } - + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java index 8f8da1970967d..46f9bc5f2f90c 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java @@ -5,21 +5,23 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; public class SchemaFieldVisitor implements ProtobufModelVisitor<Pair<SchemaField, Double>> { - @Override - public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { - return context.streamAllPaths(field).map(path -> + @Override + public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { + return context + .streamAllPaths(field) + .map( + path -> Pair.of( - new SchemaField() - .setFieldPath(context.getFieldPath(path)) - .setNullable(true) - .setDescription(field.comment()) - .setNativeDataType(field.nativeType()) - .setType(field.schemaFieldDataType()), - context.calculateSortOrder(path, field))); - } + new SchemaField() + .setFieldPath(context.getFieldPath(path)) + .setNullable(true) + .setDescription(field.comment()) + .setNativeDataType(field.nativeType()) + .setType(field.schemaFieldDataType()), + context.calculateSortOrder(path, field))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java index eb416653232a1..ad6a3344e5b1e 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java @@ -1,43 +1,46 @@ package datahub.protobuf.visitors.tags; +import static datahub.protobuf.ProtobufUtils.getFieldOptions; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.urn.TagUrn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.tag.TagProperties; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufField; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; - -import static datahub.protobuf.ProtobufUtils.getFieldOptions; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - import java.util.stream.Stream; -public class TagVisitor implements ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>> { - private static final String TAG_PROPERTIES_ASPECT = "tagProperties"; +public class TagVisitor + implements ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>> { + private static final String TAG_PROPERTIES_ASPECT = "tagProperties"; - @Override - public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()) - .map(TagVisitor::wrapTagProperty); - } + @Override + public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitGraph( + VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()) + .map(TagVisitor::wrapTagProperty); + } - @Override - public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitField(ProtobufField field, VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getFieldOptions(field.getFieldProto()), - context.getGraph().getRegistry()) - .map(TagVisitor::wrapTagProperty); - } + @Override + public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitField( + ProtobufField field, VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()) + .map(TagVisitor::wrapTagProperty); + } - private static MetadataChangeProposalWrapper<TagProperties> wrapTagProperty(TagProperties tagProperty) { - return new MetadataChangeProposalWrapper<>( - "tag", - new TagUrn(tagProperty.getName()).toString(), - ChangeType.UPSERT, - tagProperty, - TAG_PROPERTIES_ASPECT); - } + private static MetadataChangeProposalWrapper<TagProperties> wrapTagProperty( + TagProperties tagProperty) { + return new MetadataChangeProposalWrapper<>( + "tag", + new TagUrn(tagProperty.getName()).toString(), + ChangeType.UPSERT, + tagProperty, + TAG_PROPERTIES_ASPECT); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java index bbb8e532f1033..e96bb63220b04 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java @@ -1,5 +1,10 @@ package datahub.protobuf; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import com.linkedin.common.FabricType; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -9,9 +14,8 @@ import com.linkedin.common.Status; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.url.Url; -import com.linkedin.data.template.StringArray; import com.linkedin.common.urn.DataPlatformUrn; -import com.linkedin.common.FabricType; +import com.linkedin.data.template.StringArray; import com.linkedin.schema.ArrayType; import com.linkedin.schema.BooleanType; import com.linkedin.schema.BytesType; @@ -26,430 +30,701 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; - +import org.junit.jupiter.api.Test; public class ProtobufDatasetTest { - @Test - public void noSchemaTest() throws IOException { - ProtobufDataset dataset = ProtobufDataset.builder() - .setDataPlatformUrn(new DataPlatformUrn("kafka")) - .setProtocIn(getTestProtoc("protobuf", "messageA")) - .setAuditStamp(TEST_AUDIT_STAMP) - .setFabricType(FabricType.DEV) - .build(); - - assertNotNull(dataset); - assertEquals(2, dataset.getAllMetadataChangeProposals().count()); - assertEquals(8, dataset.getDatasetMCPs().size()); - assertEquals(0, dataset.getVisitorMCPs().size()); - } - - @Test - public void platformSchemaTest() throws IOException { - assertEquals(getTestProtoSource("protobuf", "messageA"), - extractDocumentSchema(getTestProtobufDataset("protobuf", "messageA"))); - } - - @Test - public void messageA() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(9, testMetadata.getFields().size()); - - assertEquals("MessageA", extractAspect(test.getDatasetMCPs().get(0), "name")); - assertEquals("protobuf.MessageA", extractAspect(test.getDatasetMCPs().get(0), "qualifiedName")); - - assertEquals("platform.topic", extractCustomProperty(test.getDatasetMCPs().get(0), "kafka_topic")); - - assertEquals(new InstitutionalMemory().setElements(new InstitutionalMemoryMetadataArray( - new InstitutionalMemoryMetadata() - .setDescription("Github Team") - .setCreateStamp(TEST_AUDIT_STAMP) - .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), - new InstitutionalMemoryMetadata() - .setDescription("Slack Channel") - .setCreateStamp(TEST_AUDIT_STAMP) - .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 1") - .setUrl(new Url("https://some/link")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 2") - .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 3") - .setUrl(new Url("https://github.com/apache/kafka")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA.map_field Reference 1") - .setUrl(new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps")))).data(), - test.getDatasetMCPs().get(1).getAspect().data()); - - assertEquals(new Status().setRemoved(false).data(), test.getDatasetMCPs().get(test.getDatasetMCPs().size() - 1).getAspect().data()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))) - .setNativeDataType("bytes") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Leading single line comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].position") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("uint32") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Leading multiline comment\nSecond line of leading multiline comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=int].position")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].total") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("uint32") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Detached comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=int].total")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("uint64") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Test repeated and trailing comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("string") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str")).findFirst().orElseThrow()); - - } - - @Test - public void messageB() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(24, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].id") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("google.protobuf.Int64Value") - .setDescription("wrapped int64") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=long].id")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BooleanType()))) - .setNativeDataType("google.protobuf.BoolValue") - .setDescription("Indicator") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot")).findFirst().orElseThrow()); - - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=string].value") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("message value") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=string].value")).findFirst().orElseThrow()); - } - - @Test - public void messageC() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC"); - - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(4, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) - .setNativeDataType("oneof") - .setDescription("one of field comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("one of string comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("int32") - .setDescription("one of int comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int")).findFirst().orElseThrow()); - } - - @Test - @SuppressWarnings("LineLength") - public void messageC2NestedOneOf() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC2"); - - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC1,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(6, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageC2") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("protobuf.MessageC3") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) - .setNativeDataType("oneof") - .setDescription("one of field comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("one of string comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("int32") - .setDescription("one of int comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int")).findFirst().orElseThrow()); - } - - @Test - public void customFieldVisitors() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); - - test.setFieldVisitor(new ProtobufModelVisitor<Pair<SchemaField, Double>>() { - @Override - public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { - if (field.fullName().equals("protobuf.MessageA.sequence_id")) { - return Stream.of(Pair.of( - new SchemaField() - .setDescription("my comment") - .setNativeDataType("my type") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))), - 0d)); - } else { - return Stream.of(); - } + @Test + public void noSchemaTest() throws IOException { + ProtobufDataset dataset = + ProtobufDataset.builder() + .setDataPlatformUrn(new DataPlatformUrn("kafka")) + .setProtocIn(getTestProtoc("protobuf", "messageA")) + .setAuditStamp(TEST_AUDIT_STAMP) + .setFabricType(FabricType.DEV) + .build(); + + assertNotNull(dataset); + assertEquals(2, dataset.getAllMetadataChangeProposals().count()); + assertEquals(8, dataset.getDatasetMCPs().size()); + assertEquals(0, dataset.getVisitorMCPs().size()); + } + + @Test + public void platformSchemaTest() throws IOException { + assertEquals( + getTestProtoSource("protobuf", "messageA"), + extractDocumentSchema(getTestProtobufDataset("protobuf", "messageA"))); + } + + @Test + public void messageA() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(9, testMetadata.getFields().size()); + + assertEquals("MessageA", extractAspect(test.getDatasetMCPs().get(0), "name")); + assertEquals("protobuf.MessageA", extractAspect(test.getDatasetMCPs().get(0), "qualifiedName")); + + assertEquals( + "platform.topic", extractCustomProperty(test.getDatasetMCPs().get(0), "kafka_topic")); + + assertEquals( + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + new InstitutionalMemoryMetadata() + .setDescription("Github Team") + .setCreateStamp(TEST_AUDIT_STAMP) + .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), + new InstitutionalMemoryMetadata() + .setDescription("Slack Channel") + .setCreateStamp(TEST_AUDIT_STAMP) + .setUrl( + new Url( + "https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 1") + .setUrl(new Url("https://some/link")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 2") + .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 3") + .setUrl(new Url("https://github.com/apache/kafka")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA.map_field Reference 1") + .setUrl( + new Url( + "https://developers.google.com/protocol-buffers/docs/proto3#maps")))) + .data(), + test.getDatasetMCPs().get(1).getAspect().data()); + + assertEquals( + new Status().setRemoved(false).data(), + test.getDatasetMCPs().get(test.getDatasetMCPs().size() - 1).getAspect().data()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id") + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))) + .setNativeDataType("bytes") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Leading single line comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].position") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("uint32") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Leading multiline comment\nSecond line of leading multiline comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=int].position")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].total") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("uint32") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Detached comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=int].total")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num") + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("uint64") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Test repeated and trailing comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str") + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("string") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str")) + .findFirst() + .orElseThrow()); + } + + @Test + public void messageB() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(24, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].id") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("google.protobuf.Int64Value") + .setDescription("wrapped int64") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=long].id")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BooleanType()))) + .setNativeDataType("google.protobuf.BoolValue") + .setDescription("Indicator") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=string].value") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("message value") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=string].value")) + .findFirst() + .orElseThrow()); + } + + @Test + public void messageC() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(4, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) + .setNativeDataType("oneof") + .setDescription("one of field comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("one of string comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("int32") + .setDescription("one of int comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int")) + .findFirst() + .orElseThrow()); + } + + @Test + @SuppressWarnings("LineLength") + public void messageC2NestedOneOf() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC2"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC1,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(6, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageC2") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("protobuf.MessageC3") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) + .setNativeDataType("oneof") + .setDescription("one of field comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("one of string comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("int32") + .setDescription("one of int comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int")) + .findFirst() + .orElseThrow()); + } + + @Test + public void customFieldVisitors() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); + + test.setFieldVisitor( + new ProtobufModelVisitor<Pair<SchemaField, Double>>() { + @Override + public Stream<Pair<SchemaField, Double>> visitField( + ProtobufField field, VisitContext context) { + if (field.fullName().equals("protobuf.MessageA.sequence_id")) { + return Stream.of( + Pair.of( + new SchemaField() + .setDescription("my comment") + .setNativeDataType("my type") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType()))), + 0d)); + } else { + return Stream.of(); } + } }); - assertEquals(1, test.getSchemaMetadata().getFields().size()); - assertEquals(new SchemaField() - .setDescription("my comment") - .setNativeDataType("my type") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))), - test.getSchemaMetadata().getFields().get(0)); - } - - @Test - public void duplicateNested() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageA") - .setDescription("nested message a") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageA") - .setDescription("nested message a second time") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested")).findFirst().orElseThrow()); - - Set<String> firstNested = testMetadata.getFields().stream().map(SchemaField::getFieldPath) - .filter(f -> f.contains(".nested")) - .collect(Collectors.toSet()); - Set<String> secondNested = testMetadata.getFields().stream().map(SchemaField::getFieldPath) - .filter(f -> f.contains(".secondary_nested")) - .collect(Collectors.toSet()); - - assertEquals(firstNested.size(), secondNested.size()); - assertEquals(firstNested.stream().map(s -> s.replace(".nested", ".secondary_nested")).collect(Collectors.toSet()), secondNested); - } - - @Test - public void googleTimestamp() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].time") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("google.protobuf.Timestamp") - .setDescription("google timestamp") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=long].time")).findFirst().orElseThrow()); - } + assertEquals(1, test.getSchemaMetadata().getFields().size()); + assertEquals( + new SchemaField() + .setDescription("my comment") + .setNativeDataType("my type") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType()))), + test.getSchemaMetadata().getFields().get(0)); + } + + @Test + public void duplicateNested() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageA") + .setDescription("nested message a") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageA") + .setDescription("nested message a second time") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested")) + .findFirst() + .orElseThrow()); + + Set<String> firstNested = + testMetadata.getFields().stream() + .map(SchemaField::getFieldPath) + .filter(f -> f.contains(".nested")) + .collect(Collectors.toSet()); + Set<String> secondNested = + testMetadata.getFields().stream() + .map(SchemaField::getFieldPath) + .filter(f -> f.contains(".secondary_nested")) + .collect(Collectors.toSet()); + + assertEquals(firstNested.size(), secondNested.size()); + assertEquals( + firstNested.stream() + .map(s -> s.replace(".nested", ".secondary_nested")) + .collect(Collectors.toSet()), + secondNested); + } + + @Test + public void googleTimestamp() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].time") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("google.protobuf.Timestamp") + .setDescription("google timestamp") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=long].time")) + .findFirst() + .orElseThrow()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java index 3a00edca8284a..e2599cb4c3f68 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java @@ -1,45 +1,47 @@ package datahub.protobuf; -import com.google.protobuf.DescriptorProtos; -import com.google.protobuf.ExtensionRegistry; -import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - -import java.io.IOException; - import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtoc; import static org.junit.jupiter.api.Assertions.*; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.ExtensionRegistry; +import datahub.protobuf.model.ProtobufGraph; +import java.io.IOException; +import org.junit.jupiter.api.Test; public class ProtobufUtilsTest { - @Test - public void registryTest() throws IOException, IllegalArgumentException { - byte[] protocBytes = getTestProtoc("extended_protobuf", "messageA").readAllBytes(); - DescriptorProtos.FileDescriptorSet fileSet = getTestProtobufFileSet("extended_protobuf", "messageA"); - ExtensionRegistry registry = ProtobufUtils.buildRegistry(fileSet); - DescriptorProtos.FileDescriptorSet fileSetWithRegistry = DescriptorProtos.FileDescriptorSet.parseFrom(protocBytes, registry); - - assertNotEquals(fileSet, fileSetWithRegistry); - - /* - * - * Without the ExtensionRegistry we get field numbers instead of the names. - */ - ProtobufGraph graph = new ProtobufGraph(fileSet, null); - assertEquals("[meta.msg.classification_enum]: HighlyConfidential\n" - + "[meta.msg.team]: \"corpGroup:TeamB\"\n" - + "[meta.msg.team]: \"corpUser:datahub\"\n" - + "[meta.msg.technical_owner]: \"corpGroup:TechnicalOwner\"\n" - + "[meta.msg.domain]: \"Engineering\"\n" - + "[meta.msg.type]: ENTITY\n" - + "[meta.msg.bool_feature]: true\n" - + "[meta.msg.alert_channel]: \"#alerts\"\n" - + "[meta.msg.tag_list]: \"a, b, c\"\n" - + "[meta.msg.repeat_string]: \"a\"\n" - + "[meta.msg.repeat_string]: \"b\"\n" - + "[meta.msg.repeat_enum]: ENTITY\n" - + "[meta.msg.repeat_enum]: EVENT\n", graph.root().messageProto().getOptions().toString()); - } + @Test + public void registryTest() throws IOException, IllegalArgumentException { + byte[] protocBytes = getTestProtoc("extended_protobuf", "messageA").readAllBytes(); + DescriptorProtos.FileDescriptorSet fileSet = + getTestProtobufFileSet("extended_protobuf", "messageA"); + ExtensionRegistry registry = ProtobufUtils.buildRegistry(fileSet); + DescriptorProtos.FileDescriptorSet fileSetWithRegistry = + DescriptorProtos.FileDescriptorSet.parseFrom(protocBytes, registry); + + assertNotEquals(fileSet, fileSetWithRegistry); + + /* + * + * Without the ExtensionRegistry we get field numbers instead of the names. + */ + ProtobufGraph graph = new ProtobufGraph(fileSet, null); + assertEquals( + "[meta.msg.classification_enum]: HighlyConfidential\n" + + "[meta.msg.team]: \"corpGroup:TeamB\"\n" + + "[meta.msg.team]: \"corpUser:datahub\"\n" + + "[meta.msg.technical_owner]: \"corpGroup:TechnicalOwner\"\n" + + "[meta.msg.domain]: \"Engineering\"\n" + + "[meta.msg.type]: ENTITY\n" + + "[meta.msg.bool_feature]: true\n" + + "[meta.msg.alert_channel]: \"#alerts\"\n" + + "[meta.msg.tag_list]: \"a, b, c\"\n" + + "[meta.msg.repeat_string]: \"a\"\n" + + "[meta.msg.repeat_string]: \"b\"\n" + + "[meta.msg.repeat_enum]: ENTITY\n" + + "[meta.msg.repeat_enum]: EVENT\n", + graph.root().messageProto().getOptions().toString()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java index 6859e7fee9a60..7ee69149cf9dd 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java @@ -11,72 +11,85 @@ import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.VisitContext; - import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.Objects; public class TestFixtures { - private TestFixtures() { } + private TestFixtures() {} - public static final DataPlatformUrn TEST_DATA_PLATFORM = new DataPlatformUrn("kafka"); - public static final AuditStamp TEST_AUDIT_STAMP = new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(new CorpuserUrn("datahub")); + public static final DataPlatformUrn TEST_DATA_PLATFORM = new DataPlatformUrn("kafka"); + public static final AuditStamp TEST_AUDIT_STAMP = + new AuditStamp().setTime(System.currentTimeMillis()).setActor(new CorpuserUrn("datahub")); - public static InputStream getTestProtoc(String protoPackage, String filename) { - return Objects.requireNonNull(TestFixtures.class.getClassLoader() - .getResourceAsStream(String.format("%s/%s.protoc", protoPackage, filename))); - } + public static InputStream getTestProtoc(String protoPackage, String filename) { + return Objects.requireNonNull( + TestFixtures.class + .getClassLoader() + .getResourceAsStream(String.format("%s/%s.protoc", protoPackage, filename))); + } - public static String getTestProtoSource(String protoPackage, String filename) throws IOException { - return new String(Objects.requireNonNull(TestFixtures.class.getClassLoader() - .getResourceAsStream(String.format("%s/%s.proto", protoPackage, filename))).readAllBytes(), - StandardCharsets.UTF_8); - } + public static String getTestProtoSource(String protoPackage, String filename) throws IOException { + return new String( + Objects.requireNonNull( + TestFixtures.class + .getClassLoader() + .getResourceAsStream(String.format("%s/%s.proto", protoPackage, filename))) + .readAllBytes(), + StandardCharsets.UTF_8); + } - public static ProtobufDataset getTestProtobufDataset(String protoPackage, String filename) throws IOException { - return ProtobufDataset.builder() - .setDataPlatformUrn(TEST_DATA_PLATFORM) - .setSchema(getTestProtoSource(protoPackage, filename)) - .setProtocIn(getTestProtoc(protoPackage, filename)) - .setAuditStamp(TEST_AUDIT_STAMP) - .setFabricType(FabricType.TEST) - .setGithubOrganization("myOrg") - .setSlackTeamId("SLACK123") - .build(); - } + public static ProtobufDataset getTestProtobufDataset(String protoPackage, String filename) + throws IOException { + return ProtobufDataset.builder() + .setDataPlatformUrn(TEST_DATA_PLATFORM) + .setSchema(getTestProtoSource(protoPackage, filename)) + .setProtocIn(getTestProtoc(protoPackage, filename)) + .setAuditStamp(TEST_AUDIT_STAMP) + .setFabricType(FabricType.TEST) + .setGithubOrganization("myOrg") + .setSlackTeamId("SLACK123") + .build(); + } - public static DescriptorProtos.FileDescriptorSet getTestProtobufFileSet(String protoPackage, String filename) throws IOException { - return DescriptorProtos.FileDescriptorSet - .parseFrom(getTestProtoc(protoPackage, filename).readAllBytes()); - } + public static DescriptorProtos.FileDescriptorSet getTestProtobufFileSet( + String protoPackage, String filename) throws IOException { + return DescriptorProtos.FileDescriptorSet.parseFrom( + getTestProtoc(protoPackage, filename).readAllBytes()); + } - public static VisitContext.VisitContextBuilder getVisitContextBuilder(String message) { - return VisitContext.builder() - .datasetUrn(new DatasetUrn(TEST_DATA_PLATFORM, message, FabricType.TEST)) - .auditStamp(TEST_AUDIT_STAMP); - } + public static VisitContext.VisitContextBuilder getVisitContextBuilder(String message) { + return VisitContext.builder() + .datasetUrn(new DatasetUrn(TEST_DATA_PLATFORM, message, FabricType.TEST)) + .auditStamp(TEST_AUDIT_STAMP); + } - public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename) throws IOException { - return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename)); - } + public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename) + throws IOException { + return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename)); + } - public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename, String messageName) throws IOException { - return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename), messageName); - } + public static ProtobufGraph getTestProtobufGraph( + String protoPackage, String filename, String messageName) throws IOException { + return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename), messageName); + } - public static Object extractAspect(MetadataChangeProposalWrapper<? extends RecordTemplate> mcp, String aspect) { - return mcp.getAspect().data().get(aspect); - } + public static Object extractAspect( + MetadataChangeProposalWrapper<? extends RecordTemplate> mcp, String aspect) { + return mcp.getAspect().data().get(aspect); + } - public static Object extractCustomProperty(MetadataChangeProposalWrapper<? extends RecordTemplate> mcp, String key) { - return ((DataMap) extractAspect(mcp, "customProperties")).get(key); - } + public static Object extractCustomProperty( + MetadataChangeProposalWrapper<? extends RecordTemplate> mcp, String key) { + return ((DataMap) extractAspect(mcp, "customProperties")).get(key); + } - public static String extractDocumentSchema(ProtobufDataset protobufDataset) { - return String.valueOf(((DataMap) ((DataMap) protobufDataset.getSchemaMetadata().getPlatformSchema().data()) - .get("com.linkedin.schema.KafkaSchema")).get("documentSchema")); - } + public static String extractDocumentSchema(ProtobufDataset protobufDataset) { + return String.valueOf( + ((DataMap) + ((DataMap) protobufDataset.getSchemaMetadata().getPlatformSchema().data()) + .get("com.linkedin.schema.KafkaSchema")) + .get("documentSchema")); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java index 7c98077690d66..fed9f250b359f 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java @@ -1,80 +1,87 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.EnumDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.EnumType; import com.linkedin.schema.SchemaFieldDataType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufEnumTest { - @Test - public void enumTest() { - EnumDescriptorProto expectedEnum = EnumDescriptorProto.newBuilder() - .setName("enum1") - .build(); - DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .addEnumType(expectedEnum) - .build(); - - ProtobufEnum test = ProtobufEnum.enumBuilder() - .enumProto(expectedEnum) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); + @Test + public void enumTest() { + EnumDescriptorProto expectedEnum = EnumDescriptorProto.newBuilder().setName("enum1").build(); + DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .addEnumType(expectedEnum) + .build(); - assertEquals("enum1", test.name()); - assertEquals("protobuf.enum1", test.fullName()); - assertEquals("[type=enum]", test.fieldPathType()); - assertEquals("enum", test.nativeType()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), test.schemaFieldDataType()); - assertEquals("ProtobufEnum[protobuf.enum1]", test.toString()); - assertEquals("", test.comment()); - } + ProtobufEnum test = + ProtobufEnum.enumBuilder() + .enumProto(expectedEnum) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); - @Test - public void enumEqualityTest() { - EnumDescriptorProto enum1 = EnumDescriptorProto.newBuilder().setName("enum1").build(); - EnumDescriptorProto enum2 = EnumDescriptorProto.newBuilder().setName("enum2").build(); - EnumDescriptorProto enum1Dup = EnumDescriptorProto.newBuilder().setName("enum1").build(); + assertEquals("enum1", test.name()); + assertEquals("protobuf.enum1", test.fullName()); + assertEquals("[type=enum]", test.fieldPathType()); + assertEquals("enum", test.nativeType()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), + test.schemaFieldDataType()); + assertEquals("ProtobufEnum[protobuf.enum1]", test.toString()); + assertEquals("", test.comment()); + } - DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .addAllEnumType(List.of(enum1, enum2, enum1Dup)) - .build(); + @Test + public void enumEqualityTest() { + EnumDescriptorProto enum1 = EnumDescriptorProto.newBuilder().setName("enum1").build(); + EnumDescriptorProto enum2 = EnumDescriptorProto.newBuilder().setName("enum2").build(); + EnumDescriptorProto enum1Dup = EnumDescriptorProto.newBuilder().setName("enum1").build(); - ProtobufEnum test1 = ProtobufEnum.enumBuilder().enumProto(enum1) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); - ProtobufEnum test2 = ProtobufEnum.enumBuilder().enumProto(enum2) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); - ProtobufEnum test1Dup = ProtobufEnum.enumBuilder().enumProto(enum1Dup) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); + DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .addAllEnumType(List.of(enum1, enum2, enum1Dup)) + .build(); - assertEquals(test1, test1Dup); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); - } + ProtobufEnum test1 = + ProtobufEnum.enumBuilder() + .enumProto(enum1) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + ProtobufEnum test2 = + ProtobufEnum.enumBuilder() + .enumProto(enum2) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + ProtobufEnum test1Dup = + ProtobufEnum.enumBuilder() + .enumProto(enum1Dup) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + assertEquals(test1, test1Dup); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java index 543b815f7f72b..6d4dc8bc4d585 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java @@ -1,10 +1,12 @@ package datahub.protobuf.model; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.data.template.StringArray; - import com.linkedin.schema.ArrayType; import com.linkedin.schema.BooleanType; import com.linkedin.schema.BytesType; @@ -12,257 +14,313 @@ import com.linkedin.schema.FixedType; import com.linkedin.schema.NumberType; import com.linkedin.schema.RecordType; -import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.SchemaField; +import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.SchemaMetadata; import com.linkedin.schema.StringType; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.Arrays; import java.util.Set; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufFieldTest { - private static final DescriptorProto EXPECTED_MESSAGE_PROTO = DescriptorProto.newBuilder() - .setName("message1") + private static final DescriptorProto EXPECTED_MESSAGE_PROTO = + DescriptorProto.newBuilder().setName("message1").build(); + private static final FileDescriptorProto EXPECTED_FILE_PROTO = + FileDescriptorProto.newBuilder() + .addMessageType(EXPECTED_MESSAGE_PROTO) + .setPackage("protobuf") + .build(); + private static final ProtobufMessage EXPECTED_MESSAGE = + ProtobufMessage.builder() + .messageProto(EXPECTED_MESSAGE_PROTO) + .fileProto(EXPECTED_FILE_PROTO) + .build(); + + @Test + public void fieldTest() { + FieldDescriptorProto expectedField = + FieldDescriptorProto.newBuilder() + .setName("field1") + .setNumber(1) + .setType(FieldDescriptorProto.Type.TYPE_BYTES) .build(); - private static final FileDescriptorProto EXPECTED_FILE_PROTO = FileDescriptorProto.newBuilder() - .addMessageType(EXPECTED_MESSAGE_PROTO) + DescriptorProto expectedMessage1 = + DescriptorProto.newBuilder().setName("message1").addField(expectedField).build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage1) .setPackage("protobuf") .build(); - private static final ProtobufMessage EXPECTED_MESSAGE = ProtobufMessage.builder() - .messageProto(EXPECTED_MESSAGE_PROTO) - .fileProto(EXPECTED_FILE_PROTO) - .build(); - - - @Test - public void fieldTest() { - FieldDescriptorProto expectedField = FieldDescriptorProto.newBuilder() - .setName("field1") - .setNumber(1) - .setType(FieldDescriptorProto.Type.TYPE_BYTES) - .build(); - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .addField(expectedField) - .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage1) - .setPackage("protobuf") - .build(); - ProtobufMessage expectedMessage = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile) - .build(); + ProtobufMessage expectedMessage = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile).build(); - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(expectedMessage) - .build(); + ProtobufField test = + ProtobufField.builder().fieldProto(expectedField).protobufMessage(expectedMessage).build(); - assertEquals("field1", test.name()); - assertEquals("protobuf.message1.field1", test.fullName()); - assertEquals("[type=bytes]", test.fieldPathType()); - assertEquals("protobuf.message1", test.parentMessageName()); - assertEquals(expectedMessage1, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertNull(test.oneOfProto()); - assertEquals("bytes", test.nativeType()); - assertFalse(test.isMessage()); - assertEquals(1, test.sortWeight()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), test.schemaFieldDataType()); - assertEquals("ProtobufField[protobuf.message1.field1]", test.toString()); - } + assertEquals("field1", test.name()); + assertEquals("protobuf.message1.field1", test.fullName()); + assertEquals("[type=bytes]", test.fieldPathType()); + assertEquals("protobuf.message1", test.parentMessageName()); + assertEquals(expectedMessage1, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertNull(test.oneOfProto()); + assertEquals("bytes", test.nativeType()); + assertFalse(test.isMessage()); + assertEquals(1, test.sortWeight()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), + test.schemaFieldDataType()); + assertEquals("ProtobufField[protobuf.message1.field1]", test.toString()); + } - @Test - public void fieldPathTypeTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void fieldPathTypeTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { + if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { assertEquals("[type=protobuf_message1]", test.fieldPathType()); - } else if (type.name().endsWith("64")) { + } else if (type.name().endsWith("64")) { assertEquals("[type=long]", test.fieldPathType()); - } else if (type.name().endsWith("32")) { + } else if (type.name().endsWith("32")) { assertEquals("[type=int]", test.fieldPathType()); - } else if (type.name().endsWith("BOOL")) { + } else if (type.name().endsWith("BOOL")) { assertEquals("[type=boolean]", test.fieldPathType()); - } else { - assertEquals(String.format("[type=%s]", type.name().split("_")[1].toLowerCase()), test.fieldPathType()); - } - }); - } + } else { + assertEquals( + String.format("[type=%s]", type.name().split("_")[1].toLowerCase()), + test.fieldPathType()); + } + }); + } - @Test - public void fieldPathTypeArrayTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; + @Test + public void fieldPathTypeArrayTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { + if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { assertEquals("[type=array].[type=protobuf_message1]", test.fieldPathType()); - } else if (type.name().endsWith("64")) { + } else if (type.name().endsWith("64")) { assertEquals("[type=array].[type=long]", test.fieldPathType()); - } else if (type.name().endsWith("32")) { + } else if (type.name().endsWith("32")) { assertEquals("[type=array].[type=int]", test.fieldPathType()); - } else if (type.name().endsWith("BOOL")) { + } else if (type.name().endsWith("BOOL")) { assertEquals("[type=array].[type=boolean]", test.fieldPathType()); - } else { - assertEquals(String.format("[type=array].[type=%s]", type.name().split("_")[1].toLowerCase()), test.fieldPathType()); - } - }); - } + } else { + assertEquals( + String.format( + "[type=array].[type=%s]", type.name().split("_")[1].toLowerCase()), + test.fieldPathType()); + } + }); + } - @Test - public void schemaFieldTypeTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void schemaFieldTypeTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (Set.of("TYPE_MESSAGE", "TYPE_GROUP").contains(type.name())) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), test.schemaFieldDataType()); - } else if (type.name().contains("FIXED")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new FixedType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("64") || type.name().endsWith("32") || Set.of("TYPE_DOUBLE", "TYPE_FLOAT").contains(type.name())) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("BOOL")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BooleanType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("STRING")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("ENUM")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("BYTES")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), test.schemaFieldDataType()); - } else { + if (Set.of("TYPE_MESSAGE", "TYPE_GROUP").contains(type.name())) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType())), + test.schemaFieldDataType()); + } else if (type.name().contains("FIXED")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new FixedType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("64") + || type.name().endsWith("32") + || Set.of("TYPE_DOUBLE", "TYPE_FLOAT").contains(type.name())) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("BOOL")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BooleanType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("STRING")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("ENUM")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new EnumType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("BYTES")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType())), + test.schemaFieldDataType()); + } else { fail(String.format("Add test case for %s", type)); - } - }); - } + } + }); + } - @Test - public void schemaFieldTypeArrayTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void schemaFieldTypeArrayTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType() - .setNestedType(new StringArray()))), test.schemaFieldDataType()); - }); - } + assertEquals( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray()))), + test.schemaFieldDataType()); + }); + } - @Test - public void nestedTypeFieldTest() throws IOException { - ProtobufDataset test = getTestProtobufDataset("extended_protobuf", "messageC"); - SchemaMetadata testMetadata = test.getSchemaMetadata(); + @Test + public void nestedTypeFieldTest() throws IOException { + ProtobufDataset test = getTestProtobufDataset("extended_protobuf", "messageC"); + SchemaMetadata testMetadata = test.getSchemaMetadata(); - SchemaField nicknameField = testMetadata.getFields() - .stream() - .filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].nickname")) - .findFirst() - .orElseThrow(); + SchemaField nicknameField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].nickname")) + .findFirst() + .orElseThrow(); - assertEquals("nickname info", nicknameField.getDescription()); + assertEquals("nickname info", nicknameField.getDescription()); - SchemaField profileUrlField = testMetadata.getFields() - .stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].profile_url")) - .findFirst() - .orElseThrow(); + SchemaField profileUrlField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].profile_url")) + .findFirst() + .orElseThrow(); - assertEquals("profile url info", profileUrlField.getDescription()); + assertEquals("profile url info", profileUrlField.getDescription()); - SchemaField addressField = testMetadata.getFields() - .stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg]." + SchemaField addressField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg]." + "[type=extended_protobuf_UserMsg_AddressMsg].address.[type=google_protobuf_StringValue].zipcode")) - .findFirst() - .orElseThrow(); + .findFirst() + .orElseThrow(); - assertEquals("Zip code, alphanumeric", addressField.getDescription()); - } + assertEquals("Zip code, alphanumeric", addressField.getDescription()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java index 80ffafff3f451..488222b87766d 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java @@ -1,84 +1,99 @@ package datahub.protobuf.model; -import com.google.protobuf.DescriptorProtos.FileDescriptorSet; -import org.junit.jupiter.api.Test; +import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static org.junit.jupiter.api.Assertions.*; +import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; public class ProtobufGraphTest { - @Test - public void autodetectRootMessageTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - assertEquals("MessageB", test.autodetectRootMessage( - fileset.getFileList().stream().filter(f -> f.getName().equals("protobuf/messageB.proto")).findFirst().get()).get().messageProto().getName()); - - assertEquals("MessageA", test.autodetectRootMessage( - fileset.getFileList().stream().filter(f -> f.getName().equals("protobuf/messageA.proto")).findFirst().get()).get().messageProto().getName()); - } - - @Test - public void autodetectRootMessageFailureTest() throws IOException { - FileDescriptorSet empty = getTestProtobufFileSet("protobuf", "messageEmpty"); - assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(empty)); - } - - @Test - public void findMessageTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - assertEquals("MessageA", - test.findMessage("protobuf.MessageA").messageProto().getName()); - assertEquals("MessageB", - test.findMessage("protobuf.MessageB").messageProto().getName()); - - assertThrows(IllegalArgumentException.class, () -> test.findMessage("not found")); - assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(fileset, "not found")); - assertEquals(test, new ProtobufGraph(fileset, "protobuf.MessageB")); - } - - @Test - public void commentTest() throws IOException { - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageC"); - assertEquals("Test for one of", test.getComment()); - } - - @Test - public void equalityHashCodeTest() throws IOException { - ProtobufGraph testA = getTestProtobufGraph("protobuf", "messageA"); - ProtobufGraph testB = getTestProtobufGraph("protobuf", "messageB"); - FileDescriptorSet filesetB = getTestProtobufFileSet("protobuf", "messageB"); - - assertEquals(testB, new ProtobufGraph(filesetB)); - assertNotEquals(testA, new ProtobufGraph(filesetB)); - assertEquals(testA, testA); - assertNotEquals(testA, testB); - - HashSet<ProtobufGraph> graphs = new HashSet<>(); - graphs.add(testA); - graphs.add(testB); - graphs.add(new ProtobufGraph(filesetB)); - assertEquals(2, graphs.size()); - } - - @Test - public void duplicateNestedTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - List<ProtobufElement> nestedMessages = test.vertexSet().stream().filter(f -> f.name().endsWith("nested")) - .collect(Collectors.toList()); - - assertEquals(2, nestedMessages.size(), "Expected 2 nested fields"); - } + @Test + public void autodetectRootMessageTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + assertEquals( + "MessageB", + test.autodetectRootMessage( + fileset.getFileList().stream() + .filter(f -> f.getName().equals("protobuf/messageB.proto")) + .findFirst() + .get()) + .get() + .messageProto() + .getName()); + + assertEquals( + "MessageA", + test.autodetectRootMessage( + fileset.getFileList().stream() + .filter(f -> f.getName().equals("protobuf/messageA.proto")) + .findFirst() + .get()) + .get() + .messageProto() + .getName()); + } + + @Test + public void autodetectRootMessageFailureTest() throws IOException { + FileDescriptorSet empty = getTestProtobufFileSet("protobuf", "messageEmpty"); + assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(empty)); + } + + @Test + public void findMessageTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + assertEquals("MessageA", test.findMessage("protobuf.MessageA").messageProto().getName()); + assertEquals("MessageB", test.findMessage("protobuf.MessageB").messageProto().getName()); + + assertThrows(IllegalArgumentException.class, () -> test.findMessage("not found")); + assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(fileset, "not found")); + assertEquals(test, new ProtobufGraph(fileset, "protobuf.MessageB")); + } + + @Test + public void commentTest() throws IOException { + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageC"); + assertEquals("Test for one of", test.getComment()); + } + + @Test + public void equalityHashCodeTest() throws IOException { + ProtobufGraph testA = getTestProtobufGraph("protobuf", "messageA"); + ProtobufGraph testB = getTestProtobufGraph("protobuf", "messageB"); + FileDescriptorSet filesetB = getTestProtobufFileSet("protobuf", "messageB"); + + assertEquals(testB, new ProtobufGraph(filesetB)); + assertNotEquals(testA, new ProtobufGraph(filesetB)); + assertEquals(testA, testA); + assertNotEquals(testA, testB); + + HashSet<ProtobufGraph> graphs = new HashSet<>(); + graphs.add(testA); + graphs.add(testB); + graphs.add(new ProtobufGraph(filesetB)); + assertEquals(2, graphs.size()); + } + + @Test + public void duplicateNestedTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + List<ProtobufElement> nestedMessages = + test.vertexSet().stream() + .filter(f -> f.name().endsWith("nested")) + .collect(Collectors.toList()); + + assertEquals(2, nestedMessages.size(), "Expected 2 nested fields"); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java index e961b6ffd2d61..1d6b3907d76d9 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java @@ -1,180 +1,168 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.MapType; import com.linkedin.schema.RecordType; import com.linkedin.schema.SchemaFieldDataType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufMessageTest { - @Test - public void messageTest() { - DescriptorProto expectedMessage = DescriptorProto.newBuilder() - .setName("message1") - .build(); - DescriptorProto expectedParentMessage1 = DescriptorProto.newBuilder() - .setName("messageParent1") - .addNestedType(expectedMessage) - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .build(); - - ProtobufMessage testParent = ProtobufMessage.builder() - .messageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage test = ProtobufMessage.builder() - .messageProto(expectedMessage) - .parentMessageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - - assertEquals("messageParent1", testParent.name()); - assertEquals("protobuf.messageParent1", testParent.fullName()); - assertEquals("protobuf.messageParent1", testParent.nativeType()); - assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); - assertEquals(expectedFile, testParent.fileProto()); - assertEquals(expectedParentMessage1, testParent.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), testParent.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); - - assertEquals("message1", test.name()); - assertEquals("protobuf.messageParent1.message1", test.fullName()); - assertEquals("protobuf.messageParent1.message1", test.nativeType()); - assertEquals("[type=protobuf_messageParent1_message1]", test.fieldPathType()); - assertEquals(expectedFile, test.fileProto()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), test.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1.message1]", test.toString()); - } - - @Test - public void mapTest() { - DescriptorProto expectedMap = DescriptorProto.newBuilder() - .setName("MapFieldEntry") - .build(); - DescriptorProto expectedParentMessage1 = DescriptorProto.newBuilder() - .setName("messageParent1") - .addNestedType(expectedMap) - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMap) - .setPackage("protobuf") - .build(); - - ProtobufMessage testParent = ProtobufMessage.builder() - .messageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage testMap = ProtobufMessage.builder() - .messageProto(expectedMap) - .parentMessageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - - assertEquals("messageParent1", testParent.name()); - assertEquals("protobuf.messageParent1", testParent.fullName()); - assertEquals("protobuf.messageParent1", testParent.nativeType()); - assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); - assertEquals(expectedFile, testParent.fileProto()); - assertEquals(expectedParentMessage1, testParent.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), testParent.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); - - assertEquals("MapFieldEntry", testMap.name()); - assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.fullName()); - assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.nativeType()); - assertEquals("[type=protobuf_messageParent1_MapFieldEntry]", testMap.fieldPathType()); - assertEquals(expectedFile, testMap.fileProto()); - assertEquals(expectedMap, testMap.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())), testMap.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1.MapFieldEntry]", testMap.toString()); - } - - @Test - public void messageEqualityTest() { - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .build(); - DescriptorProto expectedMessage2 = DescriptorProto.newBuilder() - .setName("message2") - .build(); - DescriptorProto expectedMessage1Dup = DescriptorProto.newBuilder() - .setName("message1") - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addAllMessageType(List.of(expectedMessage1, expectedMessage2, expectedMessage1Dup)) - .setPackage("protobuf") - .build(); - - - ProtobufMessage test1 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage test2 = ProtobufMessage.builder() - .messageProto(expectedMessage2) - .fileProto(expectedFile) - .build(); - ProtobufMessage test1Dup = ProtobufMessage.builder() - .messageProto(expectedMessage1Dup) - .fileProto(expectedFile) - .build(); - - assertEquals(test1, test1Dup); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); - } - - @Test - public void majorVersionTest() { - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .build(); - - FileDescriptorProto expectedFile1 = FileDescriptorProto.newBuilder() - .setName("zendesk/v1/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test1 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile1) - .build(); - assertEquals(1, test1.majorVersion()); - - FileDescriptorProto expectedFile2 = FileDescriptorProto.newBuilder() - .setName("zendesk/v2/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test2 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile2) - .build(); - assertEquals(2, test2.majorVersion()); - - FileDescriptorProto expectedFile3 = FileDescriptorProto.newBuilder() - .setName("zendesk/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test3 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile3) - .build(); - assertEquals(1, test3.majorVersion()); - } + @Test + public void messageTest() { + DescriptorProto expectedMessage = DescriptorProto.newBuilder().setName("message1").build(); + DescriptorProto expectedParentMessage1 = + DescriptorProto.newBuilder() + .setName("messageParent1") + .addNestedType(expectedMessage) + .build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .build(); + + ProtobufMessage testParent = + ProtobufMessage.builder() + .messageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + ProtobufMessage test = + ProtobufMessage.builder() + .messageProto(expectedMessage) + .parentMessageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + + assertEquals("messageParent1", testParent.name()); + assertEquals("protobuf.messageParent1", testParent.fullName()); + assertEquals("protobuf.messageParent1", testParent.nativeType()); + assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); + assertEquals(expectedFile, testParent.fileProto()); + assertEquals(expectedParentMessage1, testParent.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + testParent.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); + + assertEquals("message1", test.name()); + assertEquals("protobuf.messageParent1.message1", test.fullName()); + assertEquals("protobuf.messageParent1.message1", test.nativeType()); + assertEquals("[type=protobuf_messageParent1_message1]", test.fieldPathType()); + assertEquals(expectedFile, test.fileProto()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + test.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1.message1]", test.toString()); + } + + @Test + public void mapTest() { + DescriptorProto expectedMap = DescriptorProto.newBuilder().setName("MapFieldEntry").build(); + DescriptorProto expectedParentMessage1 = + DescriptorProto.newBuilder().setName("messageParent1").addNestedType(expectedMap).build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder().addMessageType(expectedMap).setPackage("protobuf").build(); + + ProtobufMessage testParent = + ProtobufMessage.builder() + .messageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + ProtobufMessage testMap = + ProtobufMessage.builder() + .messageProto(expectedMap) + .parentMessageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + + assertEquals("messageParent1", testParent.name()); + assertEquals("protobuf.messageParent1", testParent.fullName()); + assertEquals("protobuf.messageParent1", testParent.nativeType()); + assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); + assertEquals(expectedFile, testParent.fileProto()); + assertEquals(expectedParentMessage1, testParent.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + testParent.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); + + assertEquals("MapFieldEntry", testMap.name()); + assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.fullName()); + assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.nativeType()); + assertEquals("[type=protobuf_messageParent1_MapFieldEntry]", testMap.fieldPathType()); + assertEquals(expectedFile, testMap.fileProto()); + assertEquals(expectedMap, testMap.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())), + testMap.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1.MapFieldEntry]", testMap.toString()); + } + + @Test + public void messageEqualityTest() { + DescriptorProto expectedMessage1 = DescriptorProto.newBuilder().setName("message1").build(); + DescriptorProto expectedMessage2 = DescriptorProto.newBuilder().setName("message2").build(); + DescriptorProto expectedMessage1Dup = DescriptorProto.newBuilder().setName("message1").build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addAllMessageType(List.of(expectedMessage1, expectedMessage2, expectedMessage1Dup)) + .setPackage("protobuf") + .build(); + + ProtobufMessage test1 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile).build(); + ProtobufMessage test2 = + ProtobufMessage.builder().messageProto(expectedMessage2).fileProto(expectedFile).build(); + ProtobufMessage test1Dup = + ProtobufMessage.builder().messageProto(expectedMessage1Dup).fileProto(expectedFile).build(); + + assertEquals(test1, test1Dup); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); + } + + @Test + public void majorVersionTest() { + DescriptorProto expectedMessage1 = DescriptorProto.newBuilder().setName("message1").build(); + + FileDescriptorProto expectedFile1 = + FileDescriptorProto.newBuilder() + .setName("zendesk/v1/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test1 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile1).build(); + assertEquals(1, test1.majorVersion()); + + FileDescriptorProto expectedFile2 = + FileDescriptorProto.newBuilder() + .setName("zendesk/v2/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test2 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile2).build(); + assertEquals(2, test2.majorVersion()); + + FileDescriptorProto expectedFile3 = + FileDescriptorProto.newBuilder() + .setName("zendesk/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test3 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile3).build(); + assertEquals(1, test3.majorVersion()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java index 438e0a79206bd..c8bd8a322aad5 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java @@ -1,121 +1,146 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.google.protobuf.DescriptorProtos.OneofDescriptorProto; import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.UnionType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufOneOfFieldTest { - @Test - public void oneOfTest() { - OneofDescriptorProto expectedOneOf = OneofDescriptorProto.newBuilder() - .setName("oneof1") - .build(); - FieldDescriptorProto expectedField = FieldDescriptorProto.newBuilder() - .setName("field1") - .setOneofIndex(0) - .build(); - DescriptorProto expectedMessage = DescriptorProto.newBuilder() - .setName("message1") - .addOneofDecl(expectedOneOf) - .addField(expectedField) - .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .build(); + @Test + public void oneOfTest() { + OneofDescriptorProto expectedOneOf = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + FieldDescriptorProto expectedField = + FieldDescriptorProto.newBuilder().setName("field1").setOneofIndex(0).build(); + DescriptorProto expectedMessage = + DescriptorProto.newBuilder() + .setName("message1") + .addOneofDecl(expectedOneOf) + .addField(expectedField) + .build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .build(); - ProtobufOneOfField test = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage).build()) - .build(); + ProtobufOneOfField test = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage) + .build()) + .build(); - assertEquals("oneof1", test.name()); - assertEquals("protobuf.message1.oneof1", test.fullName()); - assertEquals("[type=union]", test.fieldPathType()); - assertEquals("oneof", test.nativeType()); - assertEquals(expectedOneOf, test.oneOfProto()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertFalse(test.isMessage()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())), test.schemaFieldDataType()); - assertEquals("ProtobufOneOf[protobuf.message1.oneof1]", test.toString()); - } + assertEquals("oneof1", test.name()); + assertEquals("protobuf.message1.oneof1", test.fullName()); + assertEquals("[type=union]", test.fieldPathType()); + assertEquals("oneof", test.nativeType()); + assertEquals(expectedOneOf, test.oneOfProto()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertFalse(test.isMessage()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())), + test.schemaFieldDataType()); + assertEquals("ProtobufOneOf[protobuf.message1.oneof1]", test.toString()); + } - @Test - public void oneOfEqualityTest() { - OneofDescriptorProto oneof1Message1 = OneofDescriptorProto.newBuilder().setName("oneof1").build(); - OneofDescriptorProto oneof2Message1 = OneofDescriptorProto.newBuilder().setName("oneof2").build(); - OneofDescriptorProto oneof1Message2 = OneofDescriptorProto.newBuilder().setName("oneof1").build(); - OneofDescriptorProto oneof1Message1Dup = OneofDescriptorProto.newBuilder().setName("oneof1").build(); + @Test + public void oneOfEqualityTest() { + OneofDescriptorProto oneof1Message1 = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + OneofDescriptorProto oneof2Message1 = + OneofDescriptorProto.newBuilder().setName("oneof2").build(); + OneofDescriptorProto oneof1Message2 = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + OneofDescriptorProto oneof1Message1Dup = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); - FieldDescriptorProto expectedField1 = FieldDescriptorProto.newBuilder() - .setName("field1") - .setOneofIndex(0) - .build(); - FieldDescriptorProto expectedField2 = FieldDescriptorProto.newBuilder() - .setName("field2") - .setOneofIndex(1) - .build(); - FieldDescriptorProto expectedField1Dup = FieldDescriptorProto.newBuilder() - .setName("field3") - .setOneofIndex(3) - .build(); - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .addAllOneofDecl(List.of(oneof1Message1, oneof2Message1, oneof1Message1Dup)) - .addField(expectedField1) - .addField(expectedField2) - .addField(expectedField1Dup) - .build(); + FieldDescriptorProto expectedField1 = + FieldDescriptorProto.newBuilder().setName("field1").setOneofIndex(0).build(); + FieldDescriptorProto expectedField2 = + FieldDescriptorProto.newBuilder().setName("field2").setOneofIndex(1).build(); + FieldDescriptorProto expectedField1Dup = + FieldDescriptorProto.newBuilder().setName("field3").setOneofIndex(3).build(); + DescriptorProto expectedMessage1 = + DescriptorProto.newBuilder() + .setName("message1") + .addAllOneofDecl(List.of(oneof1Message1, oneof2Message1, oneof1Message1Dup)) + .addField(expectedField1) + .addField(expectedField2) + .addField(expectedField1Dup) + .build(); - FieldDescriptorProto expectedField3 = FieldDescriptorProto.newBuilder() - .setName("field3") - .setOneofIndex(0) - .build(); - DescriptorProto expectedMessage2 = DescriptorProto.newBuilder() - .setName("message2") - .addAllOneofDecl(List.of(oneof1Message2)) - .addField(expectedField3) - .build(); + FieldDescriptorProto expectedField3 = + FieldDescriptorProto.newBuilder().setName("field3").setOneofIndex(0).build(); + DescriptorProto expectedMessage2 = + DescriptorProto.newBuilder() + .setName("message2") + .addAllOneofDecl(List.of(oneof1Message2)) + .addField(expectedField3) + .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addAllMessageType(List.of(expectedMessage1, expectedMessage2)) - .setPackage("protobuf") - .build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addAllMessageType(List.of(expectedMessage1, expectedMessage2)) + .setPackage("protobuf") + .build(); - ProtobufOneOfField test1 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField1) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test1Dup = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField1) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test2 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField2) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test3 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField3) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage2).build()) - .build(); + ProtobufOneOfField test1 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField1) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test1Dup = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField1) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test2 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField2) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test3 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField3) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage2) + .build()) + .build(); - assertEquals(test1, test1Dup); - assertNotEquals(test1, test3); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2, test3), Stream.of(test1, test2, test3, test1Dup).collect(Collectors.toSet())); - } + assertEquals(test1, test1Dup); + assertNotEquals(test1, test3); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2, test3), + Stream.of(test1, test2, test3, test1Dup).collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java index ceebefb3a207e..2fc5f3834a749 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java @@ -1,38 +1,43 @@ package datahub.protobuf.visitors; +import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import datahub.protobuf.model.FieldTypeEdge; import datahub.protobuf.model.ProtobufElement; import datahub.protobuf.model.ProtobufGraph; -import org.jgrapht.GraphPath; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.assertNotEquals; +import org.jgrapht.GraphPath; +import org.junit.jupiter.api.Test; public class VisitContextTest { - @Test - public void duplicateNestedTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageB"); - VisitContext test = VisitContext.builder().graph(graph).build(); - - List<ProtobufElement> nestedMessages = graph.vertexSet().stream().filter(f -> f.name().endsWith("nested")) - .collect(Collectors.toList()); - - List<GraphPath<ProtobufElement, FieldTypeEdge>> nestedPathsA = graph.getAllPaths(graph.root(), nestedMessages.get(0)); - List<GraphPath<ProtobufElement, FieldTypeEdge>> nestedPathsB = graph.getAllPaths(graph.root(), nestedMessages.get(1)); - assertNotEquals(nestedPathsA, nestedPathsB); - - Set<String> fieldPathsA = nestedPathsA.stream().map(test::getFieldPath).collect(Collectors.toSet()); - Set<String> fieldPathsB = nestedPathsB.stream().map(test::getFieldPath).collect(Collectors.toSet()); - assertNotEquals(fieldPathsA, fieldPathsB); - } + @Test + public void duplicateNestedTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageB"); + VisitContext test = VisitContext.builder().graph(graph).build(); + + List<ProtobufElement> nestedMessages = + graph.vertexSet().stream() + .filter(f -> f.name().endsWith("nested")) + .collect(Collectors.toList()); + + List<GraphPath<ProtobufElement, FieldTypeEdge>> nestedPathsA = + graph.getAllPaths(graph.root(), nestedMessages.get(0)); + List<GraphPath<ProtobufElement, FieldTypeEdge>> nestedPathsB = + graph.getAllPaths(graph.root(), nestedMessages.get(1)); + assertNotEquals(nestedPathsA, nestedPathsB); + + Set<String> fieldPathsA = + nestedPathsA.stream().map(test::getFieldPath).collect(Collectors.toSet()); + Set<String> fieldPathsB = + nestedPathsB.stream().map(test::getFieldPath).collect(Collectors.toSet()); + assertNotEquals(fieldPathsA, fieldPathsB); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java index fb51f42a6c759..de9a0f5ec4abe 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java @@ -1,56 +1,59 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.urn.DatasetUrn; import com.linkedin.data.template.RecordTemplate; -import org.junit.jupiter.api.Test; - +import datahub.event.MetadataChangeProposalWrapper; +import datahub.protobuf.ProtobufDataset; +import datahub.protobuf.visitors.ProtobufModelVisitor; +import datahub.protobuf.visitors.VisitContext; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import datahub.protobuf.ProtobufDataset; -import datahub.protobuf.visitors.ProtobufModelVisitor; -import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DatasetVisitorTest { - @Test - public void protocBase64Test() throws URISyntaxException, IOException { - String expected = "23454345452345233455"; - DatasetVisitor test = DatasetVisitor.builder().protocBase64(expected).build(); - - List<MetadataChangeProposalWrapper<? extends RecordTemplate>> changes = - test.visitGraph( - VisitContext.builder() - .auditStamp(TEST_AUDIT_STAMP) - .datasetUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)")) - .graph(getTestProtobufGraph("protobuf", "messageA")).build() - ).collect(Collectors.toList()); - - assertEquals(expected, extractCustomProperty(changes.get(0), "protoc")); - } - - @Test - public void customDescriptionVisitors() throws IOException { - ProtobufDataset testDataset = getTestProtobufDataset("protobuf", "messageA"); - - DatasetVisitor test = DatasetVisitor.builder() - .descriptionVisitor(new ProtobufModelVisitor<String>() { - @Override - public Stream<String> visitGraph(VisitContext context) { - return Stream.of("Test Description"); - } + @Test + public void protocBase64Test() throws URISyntaxException, IOException { + String expected = "23454345452345233455"; + DatasetVisitor test = DatasetVisitor.builder().protocBase64(expected).build(); + + List<MetadataChangeProposalWrapper<? extends RecordTemplate>> changes = + test.visitGraph( + VisitContext.builder() + .auditStamp(TEST_AUDIT_STAMP) + .datasetUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)")) + .graph(getTestProtobufGraph("protobuf", "messageA")) + .build()) + .collect(Collectors.toList()); + + assertEquals(expected, extractCustomProperty(changes.get(0), "protoc")); + } + + @Test + public void customDescriptionVisitors() throws IOException { + ProtobufDataset testDataset = getTestProtobufDataset("protobuf", "messageA"); + + DatasetVisitor test = + DatasetVisitor.builder() + .descriptionVisitor( + new ProtobufModelVisitor<String>() { + @Override + public Stream<String> visitGraph(VisitContext context) { + return Stream.of("Test Description"); + } }) - .build(); - testDataset.setDatasetVisitor(test); + .build(); + testDataset.setDatasetVisitor(test); - assertEquals("Test Description", extractAspect(testDataset.getDatasetMCPs().get(0), "description")); - } + assertEquals( + "Test Description", extractAspect(testDataset.getDatasetMCPs().get(0), "description")); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java index 4edc65b29d663..679048fb48a53 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java @@ -1,26 +1,27 @@ package datahub.protobuf.visitors.dataset; -import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import datahub.protobuf.model.ProtobufGraph; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DescriptionVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageC2", "protobuf.MessageC2"); + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageC2", "protobuf.MessageC2"); - DescriptionVisitor test = new DescriptionVisitor(); + DescriptionVisitor test = new DescriptionVisitor(); - assertEquals(Set.of("This contains nested type\n\nDescription for MessageC2"), - graph.accept(getVisitContextBuilder("protobuf.MessageC2"), List.of(test)).collect(Collectors.toSet())); - } + assertEquals( + Set.of("This contains nested type\n\nDescription for MessageC2"), + graph + .accept(getVisitContextBuilder("protobuf.MessageC2"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java index b3fa2c8fd081b..c24fc30766f0e 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java @@ -1,28 +1,29 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.urn.Urn; import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DomainVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); - DomainVisitor test = new DomainVisitor(); + DomainVisitor test = new DomainVisitor(); - assertEquals(Set.of(Urn.createFromTuple("domain", "engineering")), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)).collect(Collectors.toSet())); - } + assertEquals( + Set.of(Urn.createFromTuple("domain", "engineering")), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java index 09fc0a3765436..a57916441bfcb 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java @@ -1,68 +1,70 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class InstitutionalMemoryVisitorTest { - @Test - public void messageATest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("Slack Channel") - .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("Github Team") - .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 1") - .setUrl(new Url("https://some/link")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 2") - .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 3") - .setUrl(new Url("https://github.com/apache/kafka")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA.map_field Reference 1") - .setUrl(new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps")) - ), - - getTestProtobufGraph("protobuf", "messageA") - .accept(getVisitContextBuilder("protobuf.MessageA"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageATest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of( + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("Slack Channel") + .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("Github Team") + .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 1") + .setUrl(new Url("https://some/link")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 2") + .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 3") + .setUrl(new Url("https://github.com/apache/kafka")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA.map_field Reference 1") + .setUrl( + new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps"))), + getTestProtobufGraph("protobuf", "messageA") + .accept(getVisitContextBuilder("protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void messageBTest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(), - getTestProtobufGraph("protobuf", "messageB") - .accept(getVisitContextBuilder("protobuf.MessageB"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageBTest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageB") + .accept(getVisitContextBuilder("protobuf.MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void messageCTest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(), getTestProtobufGraph("protobuf", "messageC") - .accept(getVisitContextBuilder("protobuf.MessageC"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageCTest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageC") + .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java index 971500b5f43a2..5f8572cf6ddd8 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java @@ -1,36 +1,39 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class KafkaTopicPropertyVisitorTest { - @Test - public void visitorTest() throws IOException { - KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); - assertEquals(List.of(new DatasetProperties() - .setCustomProperties(new StringMap(Map.of("kafka_topic", "platform.topic")))), - getTestProtobufGraph("protobuf", "messageA") - .accept(getVisitContextBuilder("MessageB"), - List.of(test)).collect(Collectors.toList())); - } + @Test + public void visitorTest() throws IOException { + KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties(new StringMap(Map.of("kafka_topic", "platform.topic")))), + getTestProtobufGraph("protobuf", "messageA") + .accept(getVisitContextBuilder("MessageB"), List.of(test)) + .collect(Collectors.toList())); + } - @Test - public void visitorEmptyTest() throws IOException { - KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); - assertEquals(Set.of(), getTestProtobufGraph("protobuf", "messageB") - .accept(getVisitContextBuilder("MessageB"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void visitorEmptyTest() throws IOException { + KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageB") + .accept(getVisitContextBuilder("MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java index b087c683f9ffe..1b0aff28eb517 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java @@ -1,58 +1,62 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class OwnershipVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); - - OwnershipVisitor test = new OwnershipVisitor(); - - assertEquals(Set.of(new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpGroup", "teamb")), - new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpuser", "datahub")), - new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpGroup", "technicalowner")) - ), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)).collect(Collectors.toSet())); - } - - @Test - public void visitorSingleOwnerTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageB"); - - OwnershipVisitor test = new OwnershipVisitor(); - - assertEquals(Set.of(new Owner() - .setType(OwnershipType.DATA_STEWARD) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpuser", "datahub")) - ), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageB"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); + + OwnershipVisitor test = new OwnershipVisitor(); + + assertEquals( + Set.of( + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpGroup", "teamb")), + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpuser", "datahub")), + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpGroup", "technicalowner"))), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } + + @Test + public void visitorSingleOwnerTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageB"); + + OwnershipVisitor test = new OwnershipVisitor(); + + assertEquals( + Set.of( + new Owner() + .setType(OwnershipType.DATA_STEWARD) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpuser", "datahub"))), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java index dc3647cdf34c8..13912100f28a5 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java @@ -1,58 +1,68 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static java.util.Map.entry; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static java.util.Map.entry; -import static org.junit.jupiter.api.Assertions.assertEquals; +public class PropertyVisitorTest { + @Test + public void extendedMessageTest() throws IOException { + PropertyVisitor test = new PropertyVisitor(); -public class PropertyVisitorTest { + List<DatasetProperties> actual = + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toList()); + + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties( + new StringMap( + Map.ofEntries( + entry("classification_enum", "HighlyConfidential"), + entry("bool_feature", "true"), + entry("alert_channel", "#alerts"), + entry("repeat_enum", "[\"ENTITY\",\"EVENT\"]"), + entry("team", "[\"corpGroup:TeamB\",\"corpUser:datahub\"]"), + entry("technical_owner", "[\"corpGroup:TechnicalOwner\"]"), + entry("tag_list", "a, b, c"), + entry("domain", "Engineering"), + entry("repeat_string", "[\"a\",\"b\"]"), + entry("type", "ENTITY"))))), + actual); + } + + @Test + public void extendedFieldTest() throws IOException { + PropertyVisitor test = new PropertyVisitor(); + List<DatasetProperties> actual = + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toList()); - @Test - public void extendedMessageTest() throws IOException { - PropertyVisitor test = new PropertyVisitor(); - - List<DatasetProperties> actual = getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toList()); - - assertEquals(List.of( - new DatasetProperties().setCustomProperties(new StringMap(Map.ofEntries( - entry("classification_enum", "HighlyConfidential"), - entry("bool_feature", "true"), - entry("alert_channel", "#alerts"), - entry("repeat_enum", "[\"ENTITY\",\"EVENT\"]"), - entry("team", "[\"corpGroup:TeamB\",\"corpUser:datahub\"]"), - entry("technical_owner", "[\"corpGroup:TechnicalOwner\"]"), - entry("tag_list", "a, b, c"), - entry("domain", "Engineering"), - entry("repeat_string", "[\"a\",\"b\"]"), - entry("type", "ENTITY"))))), - actual); - } - - @Test - public void extendedFieldTest() throws IOException { - PropertyVisitor test = new PropertyVisitor(); - List<DatasetProperties> actual = getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toList()); - - assertEquals(List.of(new DatasetProperties() - .setCustomProperties(new StringMap(Map.ofEntries( - entry("data_steward", "corpUser:datahub"), - entry("deprecated", "true"), - entry("deprecation_note", "[\"Deprecated for this other message.\",\"Drop in replacement.\"]"), - entry("deprecation_time", "1649689387") - )))), actual); - } + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties( + new StringMap( + Map.ofEntries( + entry("data_steward", "corpUser:datahub"), + entry("deprecated", "true"), + entry( + "deprecation_note", + "[\"Deprecated for this other message.\",\"Drop in replacement.\"]"), + entry("deprecation_time", "1649689387"))))), + actual); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java index c140a798ef6e6..f734c00bb76e0 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java @@ -1,42 +1,42 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.GlossaryTermUrn; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class TermAssociationVisitorTest { - @Test - public void extendedMessageTest() throws IOException { - TermAssociationVisitor test = new TermAssociationVisitor(); - assertEquals(Set.of( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("a")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("b")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.ENTITY")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.EVENT")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - ), - getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void extendedMessageTest() throws IOException { + TermAssociationVisitor test = new TermAssociationVisitor(); + assertEquals( + Set.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("a")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("b")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.ENTITY")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.EVENT")), + new GlossaryTermAssociation() + .setUrn(new GlossaryTermUrn("Classification.HighlyConfidential"))), + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void extendedFieldTest() throws IOException { - TermAssociationVisitor test = new TermAssociationVisitor(); - assertEquals( - Set.of(), - getTestProtobufGraph("extended_protobuf", "messageB"). - accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void extendedFieldTest() throws IOException { + TermAssociationVisitor test = new TermAssociationVisitor(); + assertEquals( + Set.of(), + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java index 57a8cf1d63cd2..eec397011a4ce 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java @@ -1,5 +1,8 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -15,207 +18,303 @@ import com.linkedin.schema.StringType; import com.linkedin.util.Pair; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class ProtobufExtensionFieldVisitorTest { - @Test - public void extendedMessageTest() throws IOException, URISyntaxException { - ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); - List<SchemaField> actual = getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); + @Test + public void extendedMessageTest() throws IOException, URISyntaxException { + ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); + List<SchemaField> actual = + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - List<SchemaField> expected = Stream.of( + List<SchemaField> expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 2), + new SchemaField() + .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].email") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 3), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].email") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 3), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("extended_protobuf.Department") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - new TagAssociation().setTag(new TagUrn("MetaEnumExample.ENTITY")) - ))) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.Sensitive")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("extended_protobuf.Department") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + new TagAssociation() + .setTag(new TagUrn("MetaEnumExample.ENTITY"))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn("Classification.Sensitive")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4) - ).map(Pair::getFirst).collect(Collectors.toList()); - + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - assertEquals(expected, actual); - } + assertEquals(expected, actual); + } - @Test - public void extendedFieldTest() throws IOException { - ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); - List<SchemaField> actual = getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); + @Test + public void extendedFieldTest() throws IOException { + ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); + List<SchemaField> actual = + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - List<SchemaField> expected = Stream.of( + List<SchemaField> expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("person name") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("person name") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn( + "Classification.HighlyConfidential")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") - .setNullable(false) - .setIsPartOfKey(true) - .setDescription("unique identifier for a given person") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 2), + new SchemaField() + .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") + .setNullable(false) + .setIsPartOfKey(true) + .setDescription("unique identifier for a given person") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].email") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("official email address") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 3), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].email") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("official email address") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn( + "Classification.HighlyConfidential")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 3), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("department name of the person") - .setNativeDataType("extended_protobuf.Department") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("department name of the person") + .setNativeDataType("extended_protobuf.Department") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") - .setNullable(false) - .setIsPartOfKey(true) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") + .setNullable(false) + .setIsPartOfKey(true) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].test_coverage") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - new TagAssociation().setTag(new TagUrn("MetaEnumExample.EVENT")), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].test_coverage") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + new TagAssociation() + .setTag(new TagUrn("MetaEnumExample.EVENT")), new TagAssociation().setTag(new TagUrn("d")), new TagAssociation().setTag(new TagUrn("deprecated")), new TagAssociation().setTag(new TagUrn("e")), new TagAssociation().setTag(new TagUrn("f")), - new TagAssociation().setTag(new TagUrn("product_type.my type")), - new TagAssociation().setTag(new TagUrn("product_type_bool")) - ))) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 5) - ).map(Pair::getFirst).collect(Collectors.toList()); + new TagAssociation() + .setTag(new TagUrn("product_type.my type")), + new TagAssociation() + .setTag(new TagUrn("product_type_bool"))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 5)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - assertEquals(expected, actual); - } + assertEquals(expected, actual); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java index 1da29b5320637..af31a80d3b53a 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java @@ -1,5 +1,9 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.schema.NumberType; import com.linkedin.schema.SchemaField; import com.linkedin.schema.SchemaFieldDataType; @@ -7,62 +11,73 @@ import com.linkedin.schema.UnionType; import com.linkedin.util.Pair; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class SchemaFieldVisitorTest { - @Test - public void visitorTest() throws IOException { - List<SchemaField> expected = Stream.of( + @Test + public void visitorTest() throws IOException { + List<SchemaField> expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") - .setNullable(true) - .setDescription("one of field comment") - .setNativeDataType("oneof") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") + .setNullable(true) + .setDescription("one of field comment") + .setNativeDataType("oneof") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new UnionType()))), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") - .setNullable(true) - .setDescription("one of string comment") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") + .setNullable(true) + .setDescription("one of string comment") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") - .setNullable(true) - .setDescription("one of int comment") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))), - 2), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") + .setNullable(true) + .setDescription("one of int comment") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=string].normal") - .setNullable(true) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))), - 4) - ).map(Pair::getFirst).collect(Collectors.toList()); + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=string].normal") + .setNullable(true) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))), + 4)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - SchemaFieldVisitor test = new SchemaFieldVisitor(); - assertEquals(expected, getTestProtobufGraph("protobuf", "messageC") - .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList())); - } + SchemaFieldVisitor test = new SchemaFieldVisitor(); + assertEquals( + expected, + getTestProtobufGraph("protobuf", "messageC") + .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java index 84ab1312a7d8a..258d816d9d1da 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java @@ -1,89 +1,69 @@ package datahub.protobuf.visitors.tag; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.tag.TagProperties; -import datahub.protobuf.visitors.tags.TagVisitor; import datahub.event.MetadataChangeProposalWrapper; -import org.junit.jupiter.api.Test; - +import datahub.protobuf.visitors.tags.TagVisitor; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class TagVisitorTest { - @Test - public void extendedMessageTest() throws IOException { - TagVisitor test = new TagVisitor(); - assertEquals(Set.of( - new TagProperties() - .setName("bool_feature") - .setDescription("meta.msg.bool_feature is true."), - new TagProperties() - .setName("MetaEnumExample.ENTITY") - .setDescription("Enum MetaEnumExample.ENTITY of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("MetaEnumExample.EVENT") - .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("a") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("b") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("c") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("repeat_string.a") - .setDescription("meta.msg.repeat_string"), - new TagProperties() - .setName("repeat_string.b") - .setDescription("meta.msg.repeat_string"), - new TagProperties() - .setName("deprecated") - .setColorHex("#FF0000") - ), getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .map(MetadataChangeProposalWrapper::getAspect) - .collect(Collectors.toSet())); - } + @Test + public void extendedMessageTest() throws IOException { + TagVisitor test = new TagVisitor(); + assertEquals( + Set.of( + new TagProperties() + .setName("bool_feature") + .setDescription("meta.msg.bool_feature is true."), + new TagProperties() + .setName("MetaEnumExample.ENTITY") + .setDescription("Enum MetaEnumExample.ENTITY of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties() + .setName("MetaEnumExample.EVENT") + .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties().setName("a").setDescription("meta.msg.tag_list"), + new TagProperties().setName("b").setDescription("meta.msg.tag_list"), + new TagProperties().setName("c").setDescription("meta.msg.tag_list"), + new TagProperties().setName("repeat_string.a").setDescription("meta.msg.repeat_string"), + new TagProperties().setName("repeat_string.b").setDescription("meta.msg.repeat_string"), + new TagProperties().setName("deprecated").setColorHex("#FF0000")), + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .map(MetadataChangeProposalWrapper::getAspect) + .collect(Collectors.toSet())); + } - @Test - public void extendedFieldTest() throws IOException { - Set<TagProperties> expectedTagProperties = Set.of( - new TagProperties() - .setName("product_type_bool") - .setDescription("meta.fld.product_type_bool is true."), - new TagProperties() - .setName("product_type.my type") - .setDescription("meta.fld.product_type"), - new TagProperties() - .setName("MetaEnumExample.EVENT") - .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("d") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("e") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("f") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("deprecated") - .setColorHex("#FF0000") - ); + @Test + public void extendedFieldTest() throws IOException { + Set<TagProperties> expectedTagProperties = + Set.of( + new TagProperties() + .setName("product_type_bool") + .setDescription("meta.fld.product_type_bool is true."), + new TagProperties() + .setName("product_type.my type") + .setDescription("meta.fld.product_type"), + new TagProperties() + .setName("MetaEnumExample.EVENT") + .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties().setName("d").setDescription("meta.fld.tag_list"), + new TagProperties().setName("e").setDescription("meta.fld.tag_list"), + new TagProperties().setName("f").setDescription("meta.fld.tag_list"), + new TagProperties().setName("deprecated").setColorHex("#FF0000")); - assertEquals(expectedTagProperties, - getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(new TagVisitor())) - .map(MetadataChangeProposalWrapper::getAspect) - .collect(Collectors.toSet())); - } -} \ No newline at end of file + assertEquals( + expectedTagProperties, + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(new TagVisitor())) + .map(MetadataChangeProposalWrapper::getAspect) + .collect(Collectors.toSet())); + } +} diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java index 4fd5c771caeba..4cff55afc92de 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java @@ -3,46 +3,54 @@ import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; -import com.linkedin.mxe.MetadataChangeProposal; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DataJobLineageAdd { - private DataJobLineageAdd() { - - } + private DataJobLineageAdd() {} /** * Adds lineage to an existing DataJob without affecting any lineage + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { String token = ""; - try (RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - )) { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder().urn(UrnUtils - .getUrn("urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_456)")) - .addInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .addOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .addInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_123)")) - .addInputDatasetField(UrnUtils.getUrn( - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .addOutputDatasetField(UrnUtils.getUrn( - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + try (RestEmitter emitter = + RestEmitter.create(b -> b.server("http://localhost:8080").token(token))) { + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_456)")) + .addInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .addOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .addInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_123)")) + .addInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .addOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future<MetadataWriteResponse> response = emitter.emit(dataJobIOPatch); @@ -51,9 +59,5 @@ public static void main(String[] args) throws IOException, ExecutionException, I log.error("Failed to emit metadata to DataHub", e); throw new RuntimeException(e); } - } - } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java index ac368972e8dc9..342fbddde8223 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java @@ -15,70 +15,79 @@ import datahub.client.MetadataWriteResponse; import datahub.client.rest.RestEmitter; import datahub.event.MetadataChangeProposalWrapper; - import java.io.IOException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; public class DatasetAdd { - - private DatasetAdd() { - - } - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - DatasetUrn datasetUrn = UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD"); - CorpuserUrn userUrn = new CorpuserUrn("ingestion"); - AuditStamp lastModified = new AuditStamp().setTime(1640692800000L).setActor(userUrn); + private DatasetAdd() {} - SchemaMetadata schemaMetadata = new SchemaMetadata() - .setSchemaName("customer") - .setPlatform(new DataPlatformUrn("hive")) - .setVersion(0L) - .setHash("") - .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new OtherSchema().setRawSchema("__insert raw schema here__"))) - .setLastModified(lastModified); + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + DatasetUrn datasetUrn = UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD"); + CorpuserUrn userUrn = new CorpuserUrn("ingestion"); + AuditStamp lastModified = new AuditStamp().setTime(1640692800000L).setActor(userUrn); - SchemaFieldArray fields = new SchemaFieldArray(); + SchemaMetadata schemaMetadata = + new SchemaMetadata() + .setSchemaName("customer") + .setPlatform(new DataPlatformUrn("hive")) + .setVersion(0L) + .setHash("") + .setPlatformSchema( + SchemaMetadata.PlatformSchema.create( + new OtherSchema().setRawSchema("__insert raw schema here__"))) + .setLastModified(lastModified); - SchemaField field1 = new SchemaField() - .setFieldPath("address.zipcode") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("VARCHAR(50)") - .setDescription("This is the zipcode of the address. Specified using extended form and limited to addresses in the United States") - .setLastModified(lastModified); - fields.add(field1); + SchemaFieldArray fields = new SchemaFieldArray(); - SchemaField field2 = new SchemaField().setFieldPath("address.street") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("VARCHAR(100)") - .setDescription("Street corresponding to the address") - .setLastModified(lastModified); - fields.add(field2); + SchemaField field1 = + new SchemaField() + .setFieldPath("address.zipcode") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("VARCHAR(50)") + .setDescription( + "This is the zipcode of the address. Specified using extended form and limited to addresses in the United States") + .setLastModified(lastModified); + fields.add(field1); - SchemaField field3 = new SchemaField().setFieldPath("last_sold_date") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new DateType()))) - .setNativeDataType("Date") - .setDescription("Date of the last sale date for this property") - .setLastModified(lastModified); - fields.add(field3); + SchemaField field2 = + new SchemaField() + .setFieldPath("address.street") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("VARCHAR(100)") + .setDescription("Street corresponding to the address") + .setLastModified(lastModified); + fields.add(field2); - schemaMetadata.setFields(fields); + SchemaField field3 = + new SchemaField() + .setFieldPath("last_sold_date") + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new DateType()))) + .setNativeDataType("Date") + .setDescription("Date of the last sale date for this property") + .setLastModified(lastModified); + fields.add(field3); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn(datasetUrn) - .upsert() - .aspect(schemaMetadata) - .build(); + schemaMetadata.setFields(fields); - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - Future<MetadataWriteResponse> response = emitter.emit(mcpw, null); - System.out.println(response.get().getResponseContent()); - } + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn(datasetUrn) + .upsert() + .aspect(schemaMetadata) + .build(); -} \ No newline at end of file + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + Future<MetadataWriteResponse> response = emitter.emit(mcpw, null); + System.out.println(response.get().getResponseContent()); + } +} diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java index 5d1698556cac5..b30cb5166df70 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java @@ -1,55 +1,49 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; -import com.linkedin.mxe.MetadataChangeProposal; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesAdd { - private DatasetCustomPropertiesAdd() { - - } + private DatasetCustomPropertiesAdd() {} /** - * Adds properties to an existing custom properties aspect without affecting any existing properties + * Adds properties to an existing custom properties aspect without affecting any existing + * properties + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .addCustomProperty("cluster_name", "datahubproject.acryl.io") - .addCustomProperty("retention_time", "2 years") - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - try { - Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); - - System.out.println(response.get().getResponseContent()); - } catch (Exception e) { - log.error("Failed to emit metadata to DataHub", e); - throw e; - } finally { - emitter.close(); - } - + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .addCustomProperty("cluster_name", "datahubproject.acryl.io") + .addCustomProperty("retention_time", "2 years") + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + try { + Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); + + System.out.println(response.get().getResponseContent()); + } catch (Exception e) { + log.error("Failed to emit metadata to DataHub", e); + throw e; + } finally { + emitter.close(); } - + } } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java index 9a0ec2030be48..0a89e87060698 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java @@ -10,47 +10,40 @@ import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesAddRemove { - private DatasetCustomPropertiesAddRemove() { - - } + private DatasetCustomPropertiesAddRemove() {} /** * Applies Add and Remove property operations on an existing custom properties aspect without * affecting any other properties + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .addCustomProperty("cluster_name", "datahubproject.acryl.io") - .removeCustomProperty("retention_time") - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - try { - Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); - - System.out.println(response.get().getResponseContent()); - } catch (Exception e) { - log.error("Failed to emit metadata to DataHub", e); - throw e; - } finally { - emitter.close(); - } - + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .addCustomProperty("cluster_name", "datahubproject.acryl.io") + .removeCustomProperty("retention_time") + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + try { + Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); + + System.out.println(response.get().getResponseContent()); + } catch (Exception e) { + log.error("Failed to emit metadata to DataHub", e); + throw e; + } finally { + emitter.close(); } - + } } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java index 1d4c937e2f6a0..053c1f068e048 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java @@ -11,17 +11,15 @@ import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesReplace { - private DatasetCustomPropertiesReplace() { - - } + private DatasetCustomPropertiesReplace() {} /** - * Replaces the existing custom properties map with a new map. - * Fields like dataset name, description etc remain unchanged. + * Replaces the existing custom properties map with a new map. Fields like dataset name, + * description etc remain unchanged. + * * @param args * @throws IOException */ @@ -29,16 +27,14 @@ public static void main(String[] args) throws IOException { Map<String, String> customPropsMap = new HashMap<>(); customPropsMap.put("cluster_name", "datahubproject.acryl.io"); customPropsMap.put("retention_time", "2 years"); - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .setCustomProperties(customPropsMap) - .build(); + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .setCustomProperties(customPropsMap) + .build(); String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); try { Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); @@ -48,9 +44,5 @@ public static void main(String[] args) throws IOException { } finally { emitter.close(); } - } - } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java index 077489a9e02d9..233434ccf7002 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java @@ -4,37 +4,32 @@ import datahub.client.MetadataWriteResponse; import datahub.client.rest.RestEmitter; import datahub.event.MetadataChangeProposalWrapper; - import java.io.IOException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; public class TagCreate { - - private TagCreate() { - - } - - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - TagProperties tagProperties = new TagProperties() - .setName("Deprecated") - .setDescription("Having this tag means this column or table is deprecated."); - - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("tag") - .entityUrn("urn:li:tag:deprecated") - .upsert() - .aspect(tagProperties) - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - Future<MetadataWriteResponse> response = emitter.emit(mcpw, null); - System.out.println(response.get().getResponseContent()); - - } + private TagCreate() {} + + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + TagProperties tagProperties = + new TagProperties() + .setName("Deprecated") + .setDescription("Having this tag means this column or table is deprecated."); + + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("tag") + .entityUrn("urn:li:tag:deprecated") + .upsert() + .aspect(tagProperties) + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + Future<MetadataWriteResponse> response = emitter.emit(mcpw, null); + System.out.println(response.get().getResponseContent()); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java index 2b9d20009eeb7..3dc5cfc919c16 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java @@ -7,25 +7,27 @@ public class HdfsIn2HdfsOut1 { - private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut1.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut1.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { + public static void main(String[] args) { - System.out.println("Inside main"); - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + System.out.println("Inside main"); + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); + Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - Dataset<Row> df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + Dataset<Row> df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); - // InsertIntoHadoopFsRelationCommand - df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + // InsertIntoHadoopFsRelationCommand + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - spark.stop(); - } + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java index ed7dd95431a34..34a5e5dfaef97 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java @@ -1,6 +1,5 @@ package test.spark.lineage; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -8,30 +7,31 @@ public class HdfsIn2HdfsOut2 { - private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut2.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut2.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - Dataset<Row> df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - // InsertIntoHadoopFsRelationCommand - df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); + Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - Dataset<Row> dfO = spark - .sql("select v1.c1 as a1, v1.c2 as b1, v2.c1 as c1, v2.c2 as d1 from v1 join v2 on v1.id = v2.id"); + Dataset<Row> df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); - // InsertIntoHadoopFsRelationCommand - dfO.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - spark.stop(); + // InsertIntoHadoopFsRelationCommand + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - } + Dataset<Row> dfO = + spark.sql( + "select v1.c1 as a1, v1.c2 as b1, v2.c1 as c1, v2.c2 as d1 from v1 join v2 on v1.id = v2.id"); + // InsertIntoHadoopFsRelationCommand + dfO.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java index b2bafcfade35d..1fc6d0374d2ed 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java @@ -7,29 +7,44 @@ public class HdfsIn2HiveCreateInsertTable { - private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateInsertTable.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME,"foo4")); - - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); - - Dataset<Row> df = df1.join(df2, "id").drop("id"); - - df.write().mode(SaveMode.Overwrite).saveAsTable(Utils.tbl(TEST_NAME,"foo4")); // CreateDataSourceTableAsSelectCommand - df.write().mode(SaveMode.Append).saveAsTable(Utils.tbl(TEST_NAME,"foo4")); // CreateDataSourceTableAsSelectCommand - df.write().insertInto(Utils.tbl(TEST_NAME,"foo4")); // InsertIntoHadoopFsRelationCommand - - spark.stop(); - } - + private static final String TEST_NAME = + "Java" + HdfsIn2HiveCreateInsertTable.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; + + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo4")); + + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); + + Dataset<Row> df = df1.join(df2, "id").drop("id"); + + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(Utils.tbl(TEST_NAME, "foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Append) + .saveAsTable(Utils.tbl(TEST_NAME, "foo4")); // CreateDataSourceTableAsSelectCommand + df.write().insertInto(Utils.tbl(TEST_NAME, "foo4")); // InsertIntoHadoopFsRelationCommand + + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java index ca15bfee111fe..6d9cc032f7e9d 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java @@ -7,27 +7,39 @@ public class HdfsIn2HiveCreateTable { - private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateTable.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME,"foo3")); - - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); - - Dataset<Row> df = df1.join(df2, "id").drop("id"); - - df.write().mode(SaveMode.Overwrite).saveAsTable(Utils.tbl(TEST_NAME,"foo3")); // CreateDataSourceTableAsSelectCommand - - spark.stop(); - } - + private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateTable.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; + + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo3")); + + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); + + Dataset<Row> df = df1.join(df2, "id").drop("id"); + + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(Utils.tbl(TEST_NAME, "foo3")); // CreateDataSourceTableAsSelectCommand + + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java index 6b8de329ba05a..7d71136e27f24 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java @@ -6,44 +6,66 @@ public class HiveInHiveOut { - private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "foo5") + " as " - + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "hivetab") + " as " + "(select * from " - + Utils.tbl(TEST_NAME, "foo5") + ")"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "hivetab") + + " as " + + "(select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - // InsertIntoHiveTable - spark.sql( - "insert into " + Utils.tbl(TEST_NAME, "hivetab") + " (select * from " + Utils.tbl(TEST_NAME, "foo5") + ")"); + // InsertIntoHiveTable + spark.sql( + "insert into " + + Utils.tbl(TEST_NAME, "hivetab") + + " (select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - Dataset<Row> df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); + Dataset<Row> df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); - // InsertIntoHiveTable - df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); - - spark.stop(); - } + // InsertIntoHiveTable + df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java index 2d31b72998637..598b347cd2064 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java @@ -6,44 +6,65 @@ public class HiveInHiveOut_test1 { - private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().enableHiveSupport().getOrCreate(); + public static void main(String[] args) { + SparkSession spark = SparkSession.builder().enableHiveSupport().getOrCreate(); - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "foo5") + " as " - + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "hivetab") + " as " + "(select * from " - + Utils.tbl(TEST_NAME, "foo5") + ")"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "hivetab") + + " as " + + "(select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - // InsertIntoHiveTable - spark.sql( - "insert into " + Utils.tbl(TEST_NAME, "hivetab") + " (select * from " + Utils.tbl(TEST_NAME, "foo5") + ")"); + // InsertIntoHiveTable + spark.sql( + "insert into " + + Utils.tbl(TEST_NAME, "hivetab") + + " (select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - Dataset<Row> df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); + Dataset<Row> df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); - // InsertIntoHiveTable - df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); - - spark.stop(); - } + // InsertIntoHiveTable + df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java index 22007a8d41e90..278d7068f20bc 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java @@ -1,7 +1,7 @@ package test.spark.lineage; public class Utils { - public static String tbl(String testDb ,String tbl) { + public static String tbl(String testDb, String tbl) { return testDb + "." + tbl; } } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java index 90410332c3d7a..1dda979bfcefd 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java @@ -1,6 +1,17 @@ package datahub.spark; +import com.google.common.base.Splitter; +import com.typesafe.config.Config; import datahub.spark.consumer.impl.CoalesceJobsEmitter; +import datahub.spark.consumer.impl.McpEmitter; +import datahub.spark.model.AppEndEvent; +import datahub.spark.model.AppStartEvent; +import datahub.spark.model.DatasetLineage; +import datahub.spark.model.LineageConsumer; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.SQLQueryExecEndEvent; +import datahub.spark.model.SQLQueryExecStartEvent; +import datahub.spark.model.dataset.SparkDataset; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; @@ -15,7 +26,7 @@ import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - +import lombok.extern.slf4j.Slf4j; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.SparkEnv; @@ -30,27 +41,12 @@ import org.apache.spark.sql.execution.SQLExecution; import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionEnd; import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart; - -import com.google.common.base.Splitter; -import com.typesafe.config.Config; - -import datahub.spark.consumer.impl.McpEmitter; -import datahub.spark.model.AppEndEvent; -import datahub.spark.model.AppStartEvent; -import datahub.spark.model.DatasetLineage; -import datahub.spark.model.LineageConsumer; -import datahub.spark.model.LineageUtils; -import datahub.spark.model.SQLQueryExecEndEvent; -import datahub.spark.model.SQLQueryExecStartEvent; -import datahub.spark.model.dataset.SparkDataset; -import lombok.extern.slf4j.Slf4j; import org.apache.spark.util.JsonProtocol; import org.json4s.jackson.JsonMethods$; import scala.collection.JavaConversions; import scala.runtime.AbstractFunction1; import scala.runtime.AbstractPartialFunction; - @Slf4j public class DatahubSparkListener extends SparkListener { @@ -63,7 +59,8 @@ public class DatahubSparkListener extends SparkListener { public static final String COALESCE_KEY = "coalesce_jobs"; private final Map<String, AppStartEvent> appDetails = new ConcurrentHashMap<>(); - private final Map<String, Map<Long, SQLQueryExecStartEvent>> appSqlDetails = new ConcurrentHashMap<>(); + private final Map<String, Map<Long, SQLQueryExecStartEvent>> appSqlDetails = + new ConcurrentHashMap<>(); private final Map<String, McpEmitter> appEmitters = new ConcurrentHashMap<>(); private final Map<String, Config> appConfig = new ConcurrentHashMap<>(); @@ -77,15 +74,22 @@ private class SqlStartTask { private final SparkContext ctx; private final LogicalPlan plan; - public SqlStartTask(SparkListenerSQLExecutionStart sqlStart, LogicalPlan plan, SparkContext ctx) { + public SqlStartTask( + SparkListenerSQLExecutionStart sqlStart, LogicalPlan plan, SparkContext ctx) { this.sqlStart = sqlStart; this.plan = plan; this.ctx = ctx; String jsonPlan = (plan != null) ? plan.toJSON() : null; String sqlStartJson = - (sqlStart != null) ? JsonMethods$.MODULE$.compact(JsonProtocol.sparkEventToJson(sqlStart)) : null; - log.debug("SqlStartTask with parameters: sqlStart: {}, plan: {}, ctx: {}", sqlStartJson, jsonPlan, ctx); + (sqlStart != null) + ? JsonMethods$.MODULE$.compact(JsonProtocol.sparkEventToJson(sqlStart)) + : null; + log.debug( + "SqlStartTask with parameters: sqlStart: {}, plan: {}, ctx: {}", + sqlStartJson, + jsonPlan, + ctx); } public void run() { @@ -104,40 +108,55 @@ public void run() { return; } - appSqlDetails.get(ctx.applicationId()) - .put(sqlStart.executionId(), - new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx), ctx.applicationId(), - sqlStart.time(), sqlStart.executionId(), null)); - log.debug("PLAN for execution id: " + getPipelineName(ctx) + ":" + sqlStart.executionId() + "\n"); + appSqlDetails + .get(ctx.applicationId()) + .put( + sqlStart.executionId(), + new SQLQueryExecStartEvent( + ctx.conf().get("spark.master"), + getPipelineName(ctx), + ctx.applicationId(), + sqlStart.time(), + sqlStart.executionId(), + null)); + log.debug( + "PLAN for execution id: " + getPipelineName(ctx) + ":" + sqlStart.executionId() + "\n"); log.debug(plan.toString()); - Optional<? extends Collection<SparkDataset>> outputDS = DatasetExtractor.asDataset(plan, ctx, true); + Optional<? extends Collection<SparkDataset>> outputDS = + DatasetExtractor.asDataset(plan, ctx, true); if (!outputDS.isPresent() || outputDS.get().isEmpty()) { - log.debug("Skipping execution as no output dataset present for execution id: " + ctx.applicationId() + ":" - + sqlStart.executionId()); + log.debug( + "Skipping execution as no output dataset present for execution id: " + + ctx.applicationId() + + ":" + + sqlStart.executionId()); return; } // Here assumption is that there will be only single target for single sql query DatasetLineage lineage = - new DatasetLineage(sqlStart.description(), plan.toString(), outputDS.get().iterator().next()); + new DatasetLineage( + sqlStart.description(), plan.toString(), outputDS.get().iterator().next()); Collection<QueryPlan<?>> allInners = new ArrayList<>(); - plan.collect(new AbstractPartialFunction<LogicalPlan, Void>() { - - @Override - public Void apply(LogicalPlan plan) { - log.debug("CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); - Optional<? extends Collection<SparkDataset>> inputDS = DatasetExtractor.asDataset(plan, ctx, false); - inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); - allInners.addAll(JavaConversions.asJavaCollection(plan.innerChildren())); - return null; - } + plan.collect( + new AbstractPartialFunction<LogicalPlan, Void>() { + + @Override + public Void apply(LogicalPlan plan) { + log.debug("CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); + Optional<? extends Collection<SparkDataset>> inputDS = + DatasetExtractor.asDataset(plan, ctx, false); + inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); + allInners.addAll(JavaConversions.asJavaCollection(plan.innerChildren())); + return null; + } - @Override - public boolean isDefinedAt(LogicalPlan x) { - return true; - } - }); + @Override + public boolean isDefinedAt(LogicalPlan x) { + return true; + } + }); for (QueryPlan<?> qp : allInners) { if (!(qp instanceof LogicalPlan)) { @@ -145,28 +164,42 @@ public boolean isDefinedAt(LogicalPlan x) { } LogicalPlan nestedPlan = (LogicalPlan) qp; - nestedPlan.collect(new AbstractPartialFunction<LogicalPlan, Void>() { - - @Override - public Void apply(LogicalPlan plan) { - log.debug("INNER CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); - Optional<? extends Collection<SparkDataset>> inputDS = DatasetExtractor.asDataset(plan, ctx, false); - inputDS.ifPresent( - x -> log.debug("source added for " + ctx.appName() + "/" + sqlStart.executionId() + ": " + x)); - inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); - return null; - } - - @Override - public boolean isDefinedAt(LogicalPlan x) { - return true; - } - }); + nestedPlan.collect( + new AbstractPartialFunction<LogicalPlan, Void>() { + + @Override + public Void apply(LogicalPlan plan) { + log.debug("INNER CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); + Optional<? extends Collection<SparkDataset>> inputDS = + DatasetExtractor.asDataset(plan, ctx, false); + inputDS.ifPresent( + x -> + log.debug( + "source added for " + + ctx.appName() + + "/" + + sqlStart.executionId() + + ": " + + x)); + inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); + return null; + } + + @Override + public boolean isDefinedAt(LogicalPlan x) { + return true; + } + }); } SQLQueryExecStartEvent evt = - new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx), ctx.applicationId(), - sqlStart.time(), sqlStart.executionId(), lineage); + new SQLQueryExecStartEvent( + ctx.conf().get("spark.master"), + getPipelineName(ctx), + ctx.applicationId(), + sqlStart.time(), + sqlStart.executionId(), + lineage); appSqlDetails.get(ctx.applicationId()).put(sqlStart.executionId(), evt); @@ -185,14 +218,16 @@ public boolean isDefinedAt(LogicalPlan x) { public void onApplicationStart(SparkListenerApplicationStart applicationStart) { try { log.info("Application started: " + applicationStart); - LineageUtils.findSparkCtx().foreach(new AbstractFunction1<SparkContext, Void>() { - - @Override - public Void apply(SparkContext sc) { - checkOrCreateApplicationSetup(sc); - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1<SparkContext, Void>() { + + @Override + public Void apply(SparkContext sc) { + checkOrCreateApplicationSetup(sc); + return null; + } + }); super.onApplicationStart(applicationStart); } catch (Exception e) { // log error, but don't impact thread @@ -207,41 +242,52 @@ public Void apply(SparkContext sc) { @Override public void onApplicationEnd(SparkListenerApplicationEnd applicationEnd) { try { - LineageUtils.findSparkCtx().foreach(new AbstractFunction1<SparkContext, Void>() { - - @Override - public Void apply(SparkContext sc) { - log.info("Application ended : {} {}", sc.appName(), sc.applicationId()); - AppStartEvent start = appDetails.remove(sc.applicationId()); - appSqlDetails.remove(sc.applicationId()); - if (start == null) { - log.error("Application end event received, but start event missing for appId " + sc.applicationId()); - } else { - AppEndEvent evt = new AppEndEvent(LineageUtils.getMaster(sc), getPipelineName(sc), sc.applicationId(), - applicationEnd.time(), start); - - McpEmitter emitter = appEmitters.get(sc.applicationId()); - if (emitter != null) { - emitter.accept(evt); - try { - emitter.close(); - appEmitters.remove(sc.applicationId()); - } catch (Exception e) { - log.warn("Failed to close underlying emitter due to {}", e.getMessage()); - } - } - consumers().forEach(x -> { - x.accept(evt); - try { - x.close(); - } catch (IOException e) { - log.warn("Failed to close lineage consumer", e); - } - }); - } - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1<SparkContext, Void>() { + + @Override + public Void apply(SparkContext sc) { + log.info("Application ended : {} {}", sc.appName(), sc.applicationId()); + AppStartEvent start = appDetails.remove(sc.applicationId()); + appSqlDetails.remove(sc.applicationId()); + if (start == null) { + log.error( + "Application end event received, but start event missing for appId " + + sc.applicationId()); + } else { + AppEndEvent evt = + new AppEndEvent( + LineageUtils.getMaster(sc), + getPipelineName(sc), + sc.applicationId(), + applicationEnd.time(), + start); + + McpEmitter emitter = appEmitters.get(sc.applicationId()); + if (emitter != null) { + emitter.accept(evt); + try { + emitter.close(); + appEmitters.remove(sc.applicationId()); + } catch (Exception e) { + log.warn("Failed to close underlying emitter due to {}", e.getMessage()); + } + } + consumers() + .forEach( + x -> { + x.accept(evt); + try { + x.close(); + } catch (IOException e) { + log.warn("Failed to close lineage consumer", e); + } + }); + } + return null; + } + }); super.onApplicationEnd(applicationEnd); } catch (Exception e) { // log error, but don't impact thread @@ -276,27 +322,37 @@ public void onOtherEvent(SparkListenerEvent event) { } public void processExecutionEnd(SparkListenerSQLExecutionEnd sqlEnd) { - LineageUtils.findSparkCtx().foreach(new AbstractFunction1<SparkContext, Void>() { - - @Override - public Void apply(SparkContext sc) { - SQLQueryExecStartEvent start = appSqlDetails.get(sc.applicationId()).remove(sqlEnd.executionId()); - if (start == null) { - log.error( - "Execution end event received, but start event missing for appId/sql exec Id " + sc.applicationId() + ":" - + sqlEnd.executionId()); - } else if (start.getDatasetLineage() != null) { - SQLQueryExecEndEvent evt = - new SQLQueryExecEndEvent(LineageUtils.getMaster(sc), sc.appName(), sc.applicationId(), sqlEnd.time(), - sqlEnd.executionId(), start); - McpEmitter emitter = appEmitters.get(sc.applicationId()); - if (emitter != null) { - emitter.accept(evt); - } - } - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1<SparkContext, Void>() { + + @Override + public Void apply(SparkContext sc) { + SQLQueryExecStartEvent start = + appSqlDetails.get(sc.applicationId()).remove(sqlEnd.executionId()); + if (start == null) { + log.error( + "Execution end event received, but start event missing for appId/sql exec Id " + + sc.applicationId() + + ":" + + sqlEnd.executionId()); + } else if (start.getDatasetLineage() != null) { + SQLQueryExecEndEvent evt = + new SQLQueryExecEndEvent( + LineageUtils.getMaster(sc), + sc.appName(), + sc.applicationId(), + sqlEnd.time(), + sqlEnd.executionId(), + start); + McpEmitter emitter = appEmitters.get(sc.applicationId()); + if (emitter != null) { + emitter.accept(evt); + } + } + return null; + } + }); } private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { @@ -306,15 +362,27 @@ private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { if (datahubConfig == null) { Config datahubConf = LineageUtils.parseSparkConfig(); appConfig.put(appId, datahubConf); - Config pipelineConfig = datahubConf.hasPath(PIPELINE_KEY) ? datahubConf.getConfig(PIPELINE_KEY) - : com.typesafe.config.ConfigFactory.empty(); + Config pipelineConfig = + datahubConf.hasPath(PIPELINE_KEY) + ? datahubConf.getConfig(PIPELINE_KEY) + : com.typesafe.config.ConfigFactory.empty(); AppStartEvent evt = - new AppStartEvent(LineageUtils.getMaster(ctx), getPipelineName(ctx), appId, ctx.startTime(), ctx.sparkUser(), + new AppStartEvent( + LineageUtils.getMaster(ctx), + getPipelineName(ctx), + appId, + ctx.startTime(), + ctx.sparkUser(), pipelineConfig); - appEmitters.computeIfAbsent(appId, - s -> datahubConf.hasPath(COALESCE_KEY) && datahubConf.getBoolean(COALESCE_KEY) ? new CoalesceJobsEmitter( - datahubConf) : new McpEmitter(datahubConf)).accept(evt); + appEmitters + .computeIfAbsent( + appId, + s -> + datahubConf.hasPath(COALESCE_KEY) && datahubConf.getBoolean(COALESCE_KEY) + ? new CoalesceJobsEmitter(datahubConf) + : new McpEmitter(datahubConf)) + .accept(evt); consumers().forEach(c -> c.accept(evt)); appDetails.put(appId, evt); appSqlDetails.put(appId, new ConcurrentHashMap<>()); @@ -322,7 +390,8 @@ private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { } private String getPipelineName(SparkContext cx) { - Config datahubConfig = appConfig.computeIfAbsent(cx.applicationId(), s -> LineageUtils.parseSparkConfig()); + Config datahubConfig = + appConfig.computeIfAbsent(cx.applicationId(), s -> LineageUtils.parseSparkConfig()); String name = ""; if (datahubConfig.hasPath(DATABRICKS_CLUSTER_KEY)) { name = datahubConfig.getString(DATABRICKS_CLUSTER_KEY) + "_" + cx.applicationId(); @@ -339,8 +408,10 @@ private String getPipelineName(SparkContext cx) { private void processExecution(SparkListenerSQLExecutionStart sqlStart) { QueryExecution queryExec = SQLExecution.getQueryExecution(sqlStart.executionId()); if (queryExec == null) { - log.error("Skipping processing for sql exec Id" + sqlStart.executionId() - + " as Query execution context could not be read from current spark state"); + log.error( + "Skipping processing for sql exec Id" + + sqlStart.executionId() + + " as Query execution context could not be read from current spark state"); return; } LogicalPlan plan = queryExec.optimizedPlan(); @@ -354,7 +425,8 @@ private List<LineageConsumer> consumers() { SparkConf conf = SparkEnv.get().conf(); if (conf.contains(CONSUMER_TYPE_KEY)) { String consumerTypes = conf.get(CONSUMER_TYPE_KEY); - return StreamSupport.stream(Splitter.on(",").trimResults().split(consumerTypes).spliterator(), false) + return StreamSupport.stream( + Splitter.on(",").trimResults().split(consumerTypes).spliterator(), false) .map(x -> LineageUtils.getConsumer(x)) .filter(Objects::nonNull) .collect(Collectors.toList()); diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java index 51f5d561b26ae..ec8177bbc0e5c 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java @@ -1,5 +1,13 @@ package datahub.spark; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.FabricType; +import com.typesafe.config.Config; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.dataset.CatalogTableDataset; +import datahub.spark.model.dataset.HdfsPathDataset; +import datahub.spark.model.dataset.JdbcDataset; +import datahub.spark.model.dataset.SparkDataset; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -10,7 +18,7 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - +import lombok.extern.slf4j.Slf4j; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.spark.SparkContext; @@ -32,17 +40,6 @@ import org.apache.spark.sql.hive.execution.HiveTableScanExec; import org.apache.spark.sql.hive.execution.InsertIntoHiveTable; import org.apache.spark.sql.sources.BaseRelation; - -import com.google.common.collect.ImmutableSet; -import com.linkedin.common.FabricType; -import com.typesafe.config.Config; - -import datahub.spark.model.LineageUtils; -import datahub.spark.model.dataset.CatalogTableDataset; -import datahub.spark.model.dataset.HdfsPathDataset; -import datahub.spark.model.dataset.JdbcDataset; -import datahub.spark.model.dataset.SparkDataset; -import lombok.extern.slf4j.Slf4j; import scala.Option; import scala.collection.JavaConversions; import scala.runtime.AbstractFunction1; @@ -50,196 +47,287 @@ @Slf4j public class DatasetExtractor { - private static final Map<Class<? extends LogicalPlan>, PlanToDataset> PLAN_TO_DATASET = new HashMap<>(); - private static final Map<Class<? extends SparkPlan>, SparkPlanToDataset> SPARKPLAN_TO_DATASET = new HashMap<>(); - private static final Map<Class<? extends BaseRelation>, RelationToDataset> REL_TO_DATASET = new HashMap<>(); - private static final Set<Class<? extends LogicalPlan>> OUTPUT_CMD = ImmutableSet.of( - InsertIntoHadoopFsRelationCommand.class, SaveIntoDataSourceCommand.class, - CreateDataSourceTableAsSelectCommand.class, CreateHiveTableAsSelectCommand.class, InsertIntoHiveTable.class); + private static final Map<Class<? extends LogicalPlan>, PlanToDataset> PLAN_TO_DATASET = + new HashMap<>(); + private static final Map<Class<? extends SparkPlan>, SparkPlanToDataset> SPARKPLAN_TO_DATASET = + new HashMap<>(); + private static final Map<Class<? extends BaseRelation>, RelationToDataset> REL_TO_DATASET = + new HashMap<>(); + private static final Set<Class<? extends LogicalPlan>> OUTPUT_CMD = + ImmutableSet.of( + InsertIntoHadoopFsRelationCommand.class, + SaveIntoDataSourceCommand.class, + CreateDataSourceTableAsSelectCommand.class, + CreateHiveTableAsSelectCommand.class, + InsertIntoHiveTable.class); private static final String DATASET_ENV_KEY = "metadata.dataset.env"; private static final String DATASET_PLATFORM_INSTANCE_KEY = "metadata.dataset.platformInstance"; private static final String TABLE_HIVE_PLATFORM_ALIAS = "metadata.table.hive_platform_alias"; private static final String INCLUDE_SCHEME_KEY = "metadata.include_scheme"; private static final String REMOVE_PARTITION_PATTERN = "metadata.remove_partition_pattern"; - // TODO InsertIntoHiveDirCommand, InsertIntoDataSourceDirCommand - private DatasetExtractor() { + // TODO InsertIntoHiveDirCommand, InsertIntoDataSourceDirCommand - } + private DatasetExtractor() {} private static interface PlanToDataset { - Optional<? extends Collection<SparkDataset>> fromPlanNode(LogicalPlan plan, SparkContext ctx, Config datahubConfig); + Optional<? extends Collection<SparkDataset>> fromPlanNode( + LogicalPlan plan, SparkContext ctx, Config datahubConfig); } private static interface RelationToDataset { - Optional<? extends Collection<SparkDataset>> fromRelation(BaseRelation rel, SparkContext ctx, Config datahubConfig); + Optional<? extends Collection<SparkDataset>> fromRelation( + BaseRelation rel, SparkContext ctx, Config datahubConfig); } private static interface SparkPlanToDataset { - Optional<? extends Collection<SparkDataset>> fromSparkPlanNode(SparkPlan plan, SparkContext ctx, - Config datahubConfig); + Optional<? extends Collection<SparkDataset>> fromSparkPlanNode( + SparkPlan plan, SparkContext ctx, Config datahubConfig); } static { - - SPARKPLAN_TO_DATASET.put(FileSourceScanExec.class, (p, ctx, datahubConfig) -> { - - BaseRelation baseRel = ((FileSourceScanExec) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - - }); - - SPARKPLAN_TO_DATASET.put(HiveTableScanExec.class, (p, ctx, datahubConfig) -> { - - HiveTableRelation baseRel = ((HiveTableScanExec) p).relation(); - if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); - - }); - - SPARKPLAN_TO_DATASET.put(RowDataSourceScanExec.class, (p, ctx, datahubConfig) -> { - BaseRelation baseRel = ((RowDataSourceScanExec) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - }); - - SPARKPLAN_TO_DATASET.put(InMemoryTableScanExec.class, (p, ctx, datahubConfig) -> { - InMemoryRelation baseRel = ((InMemoryTableScanExec) p).relation(); - if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); - - }); - - PLAN_TO_DATASET.put(InsertIntoHadoopFsRelationCommand.class, (p, ctx, datahubConfig) -> { - InsertIntoHadoopFsRelationCommand cmd = (InsertIntoHadoopFsRelationCommand) p; - if (cmd.catalogTable().isDefined()) { - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.catalogTable().get(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - } - return Optional.of(Collections.singletonList(new HdfsPathDataset(cmd.outputPath(), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(LogicalRelation.class, (p, ctx, datahubConfig) -> { - BaseRelation baseRel = ((LogicalRelation) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - }); - - PLAN_TO_DATASET.put(SaveIntoDataSourceCommand.class, (p, ctx, datahubConfig) -> { - - SaveIntoDataSourceCommand cmd = (SaveIntoDataSourceCommand) p; - - Map<String, String> options = JavaConversions.mapAsJavaMap(cmd.options()); - String url = options.getOrDefault("url", ""); // e.g. jdbc:postgresql://localhost:5432/sparktestdb - if (url.contains("jdbc")) { - String tbl = options.get("dbtable"); - return Optional.of(Collections.singletonList( - new JdbcDataset(url, tbl, getCommonPlatformInstance(datahubConfig), getCommonFabricType(datahubConfig)))); - } else if (options.containsKey("path")) { - return Optional.of(Collections.singletonList(new HdfsPathDataset(new Path(options.get("path")), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - } else { - return Optional.empty(); - } - }); - - PLAN_TO_DATASET.put(CreateDataSourceTableAsSelectCommand.class, (p, ctx, datahubConfig) -> { - CreateDataSourceTableAsSelectCommand cmd = (CreateDataSourceTableAsSelectCommand) p; - // TODO what of cmd.mode() - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.table(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - PLAN_TO_DATASET.put(CreateHiveTableAsSelectCommand.class, (p, ctx, datahubConfig) -> { - CreateHiveTableAsSelectCommand cmd = (CreateHiveTableAsSelectCommand) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.tableDesc(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - PLAN_TO_DATASET.put(InsertIntoHiveTable.class, (p, ctx, datahubConfig) -> { - InsertIntoHiveTable cmd = (InsertIntoHiveTable) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.table(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(HiveTableRelation.class, (p, ctx, datahubConfig) -> { - HiveTableRelation cmd = (HiveTableRelation) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.tableMeta(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - - REL_TO_DATASET.put(HadoopFsRelation.class, (r, ctx, datahubConfig) -> { - List<Path> res = JavaConversions.asJavaCollection(((HadoopFsRelation) r).location().rootPaths()).stream() - .map(p -> getDirectoryPath(p, ctx.hadoopConfiguration())).distinct().collect(Collectors.toList()); - - // TODO mapping to URN TBD - return Optional.of(Collections.singletonList(new HdfsPathDataset(res.get(0), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - }); - REL_TO_DATASET.put(JDBCRelation.class, (r, ctx, datahubConfig) -> { - JDBCRelation rel = (JDBCRelation) r; - Option<String> tbl = rel.jdbcOptions().parameters().get(JDBCOptions.JDBC_TABLE_NAME()); - if (tbl.isEmpty()) { - return Optional.empty(); - } - - return Optional.of(Collections.singletonList(new JdbcDataset(rel.jdbcOptions().url(), tbl.get(), - getCommonPlatformInstance(datahubConfig), getCommonFabricType(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(InMemoryRelation.class, (plan, ctx, datahubConfig) -> { - SparkPlan cachedPlan = ((InMemoryRelation) plan).cachedPlan(); - ArrayList<SparkDataset> datasets = new ArrayList<>(); - cachedPlan.collectLeaves().toList().foreach(new AbstractFunction1<SparkPlan, Void>() { - - @Override - public Void apply(SparkPlan leafPlan) { - - if (SPARKPLAN_TO_DATASET.containsKey(leafPlan.getClass())) { - Optional<? extends Collection<SparkDataset>> dataset = SPARKPLAN_TO_DATASET.get(leafPlan.getClass()) - .fromSparkPlanNode(leafPlan, ctx, datahubConfig); - dataset.ifPresent(x -> datasets.addAll(x)); + SPARKPLAN_TO_DATASET.put( + FileSourceScanExec.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((FileSourceScanExec) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + HiveTableScanExec.class, + (p, ctx, datahubConfig) -> { + HiveTableRelation baseRel = ((HiveTableScanExec) p).relation(); + if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + RowDataSourceScanExec.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((RowDataSourceScanExec) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + InMemoryTableScanExec.class, + (p, ctx, datahubConfig) -> { + InMemoryRelation baseRel = ((InMemoryTableScanExec) p).relation(); + if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); + }); + + PLAN_TO_DATASET.put( + InsertIntoHadoopFsRelationCommand.class, + (p, ctx, datahubConfig) -> { + InsertIntoHadoopFsRelationCommand cmd = (InsertIntoHadoopFsRelationCommand) p; + if (cmd.catalogTable().isDefined()) { + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.catalogTable().get(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + } + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + cmd.outputPath(), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + LogicalRelation.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((LogicalRelation) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + PLAN_TO_DATASET.put( + SaveIntoDataSourceCommand.class, + (p, ctx, datahubConfig) -> { + SaveIntoDataSourceCommand cmd = (SaveIntoDataSourceCommand) p; + + Map<String, String> options = JavaConversions.mapAsJavaMap(cmd.options()); + String url = + options.getOrDefault("url", ""); // e.g. jdbc:postgresql://localhost:5432/sparktestdb + if (url.contains("jdbc")) { + String tbl = options.get("dbtable"); + return Optional.of( + Collections.singletonList( + new JdbcDataset( + url, + tbl, + getCommonPlatformInstance(datahubConfig), + getCommonFabricType(datahubConfig)))); + } else if (options.containsKey("path")) { + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + new Path(options.get("path")), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); } else { - log.error(leafPlan.getClass() + " is not yet supported. Please contact datahub team for further support."); + return Optional.empty(); + } + }); + + PLAN_TO_DATASET.put( + CreateDataSourceTableAsSelectCommand.class, + (p, ctx, datahubConfig) -> { + CreateDataSourceTableAsSelectCommand cmd = (CreateDataSourceTableAsSelectCommand) p; + // TODO what of cmd.mode() + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.table(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + PLAN_TO_DATASET.put( + CreateHiveTableAsSelectCommand.class, + (p, ctx, datahubConfig) -> { + CreateHiveTableAsSelectCommand cmd = (CreateHiveTableAsSelectCommand) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.tableDesc(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + PLAN_TO_DATASET.put( + InsertIntoHiveTable.class, + (p, ctx, datahubConfig) -> { + InsertIntoHiveTable cmd = (InsertIntoHiveTable) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.table(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + HiveTableRelation.class, + (p, ctx, datahubConfig) -> { + HiveTableRelation cmd = (HiveTableRelation) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.tableMeta(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + REL_TO_DATASET.put( + HadoopFsRelation.class, + (r, ctx, datahubConfig) -> { + List<Path> res = + JavaConversions.asJavaCollection(((HadoopFsRelation) r).location().rootPaths()) + .stream() + .map(p -> getDirectoryPath(p, ctx.hadoopConfiguration())) + .distinct() + .collect(Collectors.toList()); + + // TODO mapping to URN TBD + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + res.get(0), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); + }); + REL_TO_DATASET.put( + JDBCRelation.class, + (r, ctx, datahubConfig) -> { + JDBCRelation rel = (JDBCRelation) r; + Option<String> tbl = rel.jdbcOptions().parameters().get(JDBCOptions.JDBC_TABLE_NAME()); + if (tbl.isEmpty()) { + return Optional.empty(); } - return null; - } - }); - return datasets.isEmpty() ? Optional.empty() : Optional.of(datasets); - }); + + return Optional.of( + Collections.singletonList( + new JdbcDataset( + rel.jdbcOptions().url(), + tbl.get(), + getCommonPlatformInstance(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + InMemoryRelation.class, + (plan, ctx, datahubConfig) -> { + SparkPlan cachedPlan = ((InMemoryRelation) plan).cachedPlan(); + ArrayList<SparkDataset> datasets = new ArrayList<>(); + cachedPlan + .collectLeaves() + .toList() + .foreach( + new AbstractFunction1<SparkPlan, Void>() { + + @Override + public Void apply(SparkPlan leafPlan) { + + if (SPARKPLAN_TO_DATASET.containsKey(leafPlan.getClass())) { + Optional<? extends Collection<SparkDataset>> dataset = + SPARKPLAN_TO_DATASET + .get(leafPlan.getClass()) + .fromSparkPlanNode(leafPlan, ctx, datahubConfig); + dataset.ifPresent(x -> datasets.addAll(x)); + } else { + log.error( + leafPlan.getClass() + + " is not yet supported. Please contact datahub team for further support."); + } + return null; + } + }); + return datasets.isEmpty() ? Optional.empty() : Optional.of(datasets); + }); } - static Optional<? extends Collection<SparkDataset>> asDataset(LogicalPlan logicalPlan, SparkContext ctx, - boolean outputNode) { + static Optional<? extends Collection<SparkDataset>> asDataset( + LogicalPlan logicalPlan, SparkContext ctx, boolean outputNode) { if (!outputNode && OUTPUT_CMD.contains(logicalPlan.getClass())) { return Optional.empty(); } if (!PLAN_TO_DATASET.containsKey(logicalPlan.getClass())) { - log.error(logicalPlan.getClass() + " is not supported yet. Please contact datahub team for further support. "); + log.error( + logicalPlan.getClass() + + " is not supported yet. Please contact datahub team for further support. "); return Optional.empty(); } Config datahubconfig = LineageUtils.parseSparkConfig(); - return PLAN_TO_DATASET.get(logicalPlan.getClass()).fromPlanNode(logicalPlan, ctx, datahubconfig); + return PLAN_TO_DATASET + .get(logicalPlan.getClass()) + .fromPlanNode(logicalPlan, ctx, datahubconfig); } private static Path getDirectoryPath(Path p, Configuration hadoopConf) { @@ -255,9 +343,10 @@ private static Path getDirectoryPath(Path p, Configuration hadoopConf) { } private static FabricType getCommonFabricType(Config datahubConfig) { - String fabricTypeString = datahubConfig.hasPath(DATASET_ENV_KEY) - ? datahubConfig.getString(DATASET_ENV_KEY).toUpperCase() - : "PROD"; + String fabricTypeString = + datahubConfig.hasPath(DATASET_ENV_KEY) + ? datahubConfig.getString(DATASET_ENV_KEY).toUpperCase() + : "PROD"; FabricType fabricType = null; try { fabricType = FabricType.valueOf(fabricTypeString); @@ -269,22 +358,26 @@ private static FabricType getCommonFabricType(Config datahubConfig) { } private static String getCommonPlatformInstance(Config datahubConfig) { - return datahubConfig.hasPath(DATASET_PLATFORM_INSTANCE_KEY) ? datahubConfig.getString(DATASET_PLATFORM_INSTANCE_KEY) + return datahubConfig.hasPath(DATASET_PLATFORM_INSTANCE_KEY) + ? datahubConfig.getString(DATASET_PLATFORM_INSTANCE_KEY) : null; } private static String getTableHivePlatformAlias(Config datahubConfig) { - return datahubConfig.hasPath(TABLE_HIVE_PLATFORM_ALIAS) ? datahubConfig.getString(TABLE_HIVE_PLATFORM_ALIAS) - : "hive"; + return datahubConfig.hasPath(TABLE_HIVE_PLATFORM_ALIAS) + ? datahubConfig.getString(TABLE_HIVE_PLATFORM_ALIAS) + : "hive"; } private static boolean getIncludeScheme(Config datahubConfig) { - return datahubConfig.hasPath(INCLUDE_SCHEME_KEY) ? datahubConfig.getBoolean(INCLUDE_SCHEME_KEY) + return datahubConfig.hasPath(INCLUDE_SCHEME_KEY) + ? datahubConfig.getBoolean(INCLUDE_SCHEME_KEY) : true; } private static String getRemovePartitionPattern(Config datahubConfig) { - return datahubConfig.hasPath(REMOVE_PARTITION_PATTERN) ? datahubConfig.getString(REMOVE_PARTITION_PATTERN) + return datahubConfig.hasPath(REMOVE_PARTITION_PATTERN) + ? datahubConfig.getString(REMOVE_PARTITION_PATTERN) : null; } } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java index 6ddc5729d88f6..4e6eadc61bae0 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java @@ -1,30 +1,27 @@ package datahub.spark.consumer.impl; -import java.io.IOException; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; - -import com.linkedin.data.template.StringMap; - import com.linkedin.common.DataJobUrnArray; import com.linkedin.common.DatasetUrnArray; import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataJobInfo; import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.datajob.JobStatus; import com.typesafe.config.Config; - import datahub.event.MetadataChangeProposalWrapper; import datahub.spark.model.AppEndEvent; import datahub.spark.model.AppStartEvent; import datahub.spark.model.LineageEvent; import datahub.spark.model.SQLQueryExecStartEvent; +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -37,7 +34,8 @@ public class CoalesceJobsEmitter extends McpEmitter { public CoalesceJobsEmitter(Config datahubConf) { super(datahubConf); - parentJobUrn = datahubConf.hasPath(PARENT_JOB_KEY) ? datahubConf.getString(PARENT_JOB_KEY) : null; + parentJobUrn = + datahubConf.hasPath(PARENT_JOB_KEY) ? datahubConf.getString(PARENT_JOB_KEY) : null; log.info("CoalesceJobsEmitter initialised with " + PARENT_JOB_KEY + ":" + parentJobUrn); } @@ -50,13 +48,21 @@ public void accept(LineageEvent evt) { } else if (evt instanceof SQLQueryExecStartEvent) { SQLQueryExecStartEvent sqlQueryExecStartEvent = (SQLQueryExecStartEvent) evt; sqlQueryExecStartEvents.add(sqlQueryExecStartEvent); - log.debug("SQLQueryExecStartEvent received for processing. for app: " + sqlQueryExecStartEvent.getAppId() + ":" - + sqlQueryExecStartEvent.getAppName() + "sqlID: " + sqlQueryExecStartEvent.getSqlQueryExecId()); + log.debug( + "SQLQueryExecStartEvent received for processing. for app: " + + sqlQueryExecStartEvent.getAppId() + + ":" + + sqlQueryExecStartEvent.getAppName() + + "sqlID: " + + sqlQueryExecStartEvent.getSqlQueryExecId()); } else if (evt instanceof AppEndEvent) { AppEndEvent appEndEvent = (AppEndEvent) evt; if (appStartEvent == null) { - log.error("Application End event received for processing but start event is not received for processing for " - + appEndEvent.getAppId() + "-" + appEndEvent.getAppName()); + log.error( + "Application End event received for processing but start event is not received for processing for " + + appEndEvent.getAppId() + + "-" + + appEndEvent.getAppName()); return; } log.debug("AppEndEvent received for processing. for app start :" + appEndEvent.getAppId()); @@ -65,7 +71,8 @@ public void accept(LineageEvent evt) { } } - private List<MetadataChangeProposalWrapper> squashSQLQueryExecStartEvents(AppEndEvent appEndEvent) { + private List<MetadataChangeProposalWrapper> squashSQLQueryExecStartEvents( + AppEndEvent appEndEvent) { DataJobUrn jobUrn = new DataJobUrn(appStartEvent.getFlowUrn(), appStartEvent.getAppName()); @@ -85,11 +92,15 @@ private List<MetadataChangeProposalWrapper> squashSQLQueryExecStartEvents(AppEnd log.warn(PARENT_JOB_KEY + " is not a valid Datajob URN. Skipping setting up upstream job."); } - DataJobInputOutput jobio = new DataJobInputOutput().setInputDatasets(new DatasetUrnArray(inSet)) - .setOutputDatasets(new DatasetUrnArray(outSet)).setInputDatajobs(upStreamjobs); + DataJobInputOutput jobio = + new DataJobInputOutput() + .setInputDatasets(new DatasetUrnArray(inSet)) + .setOutputDatasets(new DatasetUrnArray(outSet)) + .setInputDatajobs(upStreamjobs); - MetadataChangeProposalWrapper<?> mcpJobIO = MetadataChangeProposalWrapper - .create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobio)); + MetadataChangeProposalWrapper<?> mcpJobIO = + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobio)); StringMap customProps = new StringMap(); customProps.put("startedAt", appStartEvent.timeStr()); @@ -97,15 +108,17 @@ private List<MetadataChangeProposalWrapper> squashSQLQueryExecStartEvents(AppEnd customProps.put("appName", appStartEvent.getAppName()); customProps.put("completedAt", appEndEvent.timeStr()); - DataJobInfo jobInfo = new DataJobInfo().setName(appStartEvent.getAppName()) - .setType(DataJobInfo.Type.create("sparkJob")); + DataJobInfo jobInfo = + new DataJobInfo() + .setName(appStartEvent.getAppName()) + .setType(DataJobInfo.Type.create("sparkJob")); jobInfo.setCustomProperties(customProps); jobInfo.setStatus(JobStatus.COMPLETED); - MetadataChangeProposalWrapper<?> mcpJobInfo = MetadataChangeProposalWrapper - .create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); + MetadataChangeProposalWrapper<?> mcpJobInfo = + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); return Arrays.asList(mcpJobIO, mcpJobInfo); - } @Override @@ -120,5 +133,4 @@ class DataSetUrnComparator implements Comparator<DatasetUrn> { public int compare(DatasetUrn urn1, DatasetUrn urn2) { return urn1.toString().compareTo(urn2.toString()); } - -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java index 336246fa9d3e8..918ce48d1cf42 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java @@ -1,23 +1,20 @@ package datahub.spark.consumer.impl; -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.ExecutionException; -import java.util.stream.Collectors; - import com.typesafe.config.Config; - import datahub.client.Emitter; import datahub.client.rest.RestEmitter; import datahub.client.rest.RestEmitterConfig; import datahub.event.MetadataChangeProposalWrapper; import datahub.spark.model.LineageConsumer; import datahub.spark.model.LineageEvent; +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class McpEmitter implements LineageConsumer { @@ -27,19 +24,21 @@ public class McpEmitter implements LineageConsumer { private static final String GMS_URL_KEY = "rest.server"; private static final String GMS_AUTH_TOKEN = "rest.token"; private static final String DISABLE_SSL_VERIFICATION_KEY = "rest.disable_ssl_verification"; + private Optional<Emitter> getEmitter() { Optional<Emitter> emitter = Optional.empty(); switch (emitterType) { - case "rest": - if (restEmitterConfig.isPresent()) { - emitter = Optional.of(new RestEmitter(restEmitterConfig.get())); - } - break; - - default: - log.error("DataHub Transport {} not recognized. DataHub Lineage emission will not work", emitterType); - break; - + case "rest": + if (restEmitterConfig.isPresent()) { + emitter = Optional.of(new RestEmitter(restEmitterConfig.get())); + } + break; + + default: + log.error( + "DataHub Transport {} not recognized. DataHub Lineage emission will not work", + emitterType); + break; } return emitter; } @@ -47,22 +46,28 @@ private Optional<Emitter> getEmitter() { protected void emit(List<MetadataChangeProposalWrapper> mcpws) { Optional<Emitter> emitter = getEmitter(); if (emitter.isPresent()) { - mcpws.stream().map(mcpw -> { - try { - log.debug("emitting mcpw: " + mcpw); - return emitter.get().emit(mcpw); - } catch (IOException ioException) { - log.error("Failed to emit metadata to DataHub", ioException); - return null; - } - }).filter(Objects::nonNull).collect(Collectors.toList()).forEach(future -> { - try { - log.info(future.get().toString()); - } catch (InterruptedException | ExecutionException e) { - // log error, but don't impact thread - log.error("Failed to emit metadata to DataHub", e); - } - }); + mcpws.stream() + .map( + mcpw -> { + try { + log.debug("emitting mcpw: " + mcpw); + return emitter.get().emit(mcpw); + } catch (IOException ioException) { + log.error("Failed to emit metadata to DataHub", ioException); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()) + .forEach( + future -> { + try { + log.info(future.get().toString()); + } catch (InterruptedException | ExecutionException e) { + // log error, but don't impact thread + log.error("Failed to emit metadata to DataHub", e); + } + }); try { emitter.get().close(); } catch (IOException e) { @@ -72,31 +77,45 @@ protected void emit(List<MetadataChangeProposalWrapper> mcpws) { } public McpEmitter(Config datahubConf) { - emitterType = datahubConf.hasPath(TRANSPORT_KEY) ? datahubConf.getString(TRANSPORT_KEY) : "rest"; - switch (emitterType) { + emitterType = + datahubConf.hasPath(TRANSPORT_KEY) ? datahubConf.getString(TRANSPORT_KEY) : "rest"; + switch (emitterType) { case "rest": - String gmsUrl = datahubConf.hasPath(GMS_URL_KEY) ? datahubConf.getString(GMS_URL_KEY) - : "http://localhost:8080"; - String token = datahubConf.hasPath(GMS_AUTH_TOKEN) ? datahubConf.getString(GMS_AUTH_TOKEN) : null; - boolean disableSslVerification = datahubConf.hasPath(DISABLE_SSL_VERIFICATION_KEY) ? datahubConf.getBoolean( - DISABLE_SSL_VERIFICATION_KEY) : false; - log.info("REST Emitter Configuration: GMS url {}{}", gmsUrl, - (datahubConf.hasPath(GMS_URL_KEY) ? "" : "(default)")); - if (token != null) { - log.info("REST Emitter Configuration: Token {}", (token != null) ? "XXXXX" : "(empty)"); - } - if (disableSslVerification) { - log.warn("REST Emitter Configuration: ssl verification will be disabled."); - } - restEmitterConfig = Optional.of(RestEmitterConfig.builder() - .server(gmsUrl).token(token) - .disableSslVerification(disableSslVerification).build()); - - break; + String gmsUrl = + datahubConf.hasPath(GMS_URL_KEY) + ? datahubConf.getString(GMS_URL_KEY) + : "http://localhost:8080"; + String token = + datahubConf.hasPath(GMS_AUTH_TOKEN) ? datahubConf.getString(GMS_AUTH_TOKEN) : null; + boolean disableSslVerification = + datahubConf.hasPath(DISABLE_SSL_VERIFICATION_KEY) + ? datahubConf.getBoolean(DISABLE_SSL_VERIFICATION_KEY) + : false; + log.info( + "REST Emitter Configuration: GMS url {}{}", + gmsUrl, + (datahubConf.hasPath(GMS_URL_KEY) ? "" : "(default)")); + if (token != null) { + log.info("REST Emitter Configuration: Token {}", (token != null) ? "XXXXX" : "(empty)"); + } + if (disableSslVerification) { + log.warn("REST Emitter Configuration: ssl verification will be disabled."); + } + restEmitterConfig = + Optional.of( + RestEmitterConfig.builder() + .server(gmsUrl) + .token(token) + .disableSslVerification(disableSslVerification) + .build()); + + break; default: - log.error("DataHub Transport {} not recognized. DataHub Lineage emission will not work", emitterType); - break; - } + log.error( + "DataHub Transport {} not recognized. DataHub Lineage emission will not work", + emitterType); + break; + } } @Override @@ -107,8 +126,6 @@ public void accept(LineageEvent evt) { @Override public void close() throws IOException { // Nothing to close at this point - - } - -} \ No newline at end of file + } +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java index 64aef77ddce2f..ac4d3a96308f3 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class AppEndEvent extends LineageEvent { @@ -28,9 +27,11 @@ public List<MetadataChangeProposalWrapper> asMetadataEvents() { StringMap customProps = start.customProps(); customProps.put("completedAt", timeStr()); - DataFlowInfo flowInfo = new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps); + DataFlowInfo flowInfo = + new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps); - return Collections.singletonList(MetadataChangeProposalWrapper.create( - b -> b.entityType("dataFlow").entityUrn(flowUrn).upsert().aspect(flowInfo))); + return Collections.singletonList( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(flowUrn).upsert().aspect(flowInfo))); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java index 393de44164ac2..b7f9b462c409f 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java @@ -1,19 +1,17 @@ package datahub.spark.model; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; - import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataFlowInfo; import com.typesafe.config.Config; - import datahub.event.MetadataChangeProposalWrapper; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; import lombok.Getter; import lombok.ToString; import lombok.extern.slf4j.Slf4j; @@ -28,7 +26,12 @@ public class AppStartEvent extends LineageEvent { private final String sparkUser; private Config pipelineConfig; - public AppStartEvent(String master, String appName, String appId, long time, String sparkUser, + public AppStartEvent( + String master, + String appName, + String appId, + long time, + String sparkUser, Config pipelineConfig) { super(master, appName, appId, time); this.sparkUser = sparkUser; @@ -38,18 +41,22 @@ public AppStartEvent(String master, String appName, String appId, long time, Str public DataFlowUrn getFlowUrn() { return LineageUtils.flowUrn(getMaster(), getAppName()); } - + @Override public List<MetadataChangeProposalWrapper> asMetadataEvents() { ArrayList<MetadataChangeProposalWrapper> mcps = new ArrayList<MetadataChangeProposalWrapper>(); if (this.pipelineConfig.hasPath(PLATFORM_INSTANCE_KEY)) { try { - DataPlatformInstance dpi = new DataPlatformInstance().setPlatform(new DataPlatformUrn(PLATFORM_SPARK)) - .setInstance(LineageUtils.dataPlatformInstanceUrn(PLATFORM_SPARK, - this.pipelineConfig.getString(PLATFORM_INSTANCE_KEY))); - mcps.add(MetadataChangeProposalWrapper - .create(b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(dpi))); + DataPlatformInstance dpi = + new DataPlatformInstance() + .setPlatform(new DataPlatformUrn(PLATFORM_SPARK)) + .setInstance( + LineageUtils.dataPlatformInstanceUrn( + PLATFORM_SPARK, this.pipelineConfig.getString(PLATFORM_INSTANCE_KEY))); + mcps.add( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(dpi))); } catch (URISyntaxException e) { // log error, but don't impact thread StringWriter s = new StringWriter(); @@ -59,9 +66,11 @@ public List<MetadataChangeProposalWrapper> asMetadataEvents() { p.close(); } } - DataFlowInfo flowInfo = new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps()); - mcps.add(MetadataChangeProposalWrapper - .create(b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(flowInfo))); + DataFlowInfo flowInfo = + new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps()); + mcps.add( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(flowInfo))); return mcps; } @@ -73,4 +82,4 @@ StringMap customProps() { customProps.put("sparkUser", sparkUser); return customProps; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java index 9583ab69a2d73..996a911ced9f9 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java @@ -1,11 +1,9 @@ package datahub.spark.model; +import datahub.spark.model.dataset.SparkDataset; import java.util.Collections; import java.util.HashSet; import java.util.Set; - -import datahub.spark.model.dataset.SparkDataset; - import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; @@ -16,14 +14,11 @@ public class DatasetLineage { private final Set<SparkDataset> sources = new HashSet<>(); - @Getter - private final String callSiteShort; - - @Getter - private final String plan; + @Getter private final String callSiteShort; + + @Getter private final String plan; - @Getter - private final SparkDataset sink; + @Getter private final SparkDataset sink; public void addSource(SparkDataset source) { sources.add(source); diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java index 890ed6329c47b..aa2d998ea5c99 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java @@ -3,5 +3,4 @@ import java.io.Closeable; import java.util.function.Consumer; -public interface LineageConsumer extends Consumer<LineageEvent>, Closeable { -} +public interface LineageConsumer extends Consumer<LineageEvent>, Closeable {} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java index 37b949a454b0d..a88474650c510 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java @@ -1,9 +1,8 @@ package datahub.spark.model; +import datahub.event.MetadataChangeProposalWrapper; import java.util.Date; import java.util.List; - -import datahub.event.MetadataChangeProposalWrapper; import lombok.Data; @Data @@ -18,4 +17,4 @@ public abstract class LineageEvent { public String timeStr() { return new Date(getTime()).toInstant().toString(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java index ad837f034ad64..ad628666a263d 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java @@ -8,15 +8,12 @@ import com.linkedin.common.urn.Urn; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; - import java.net.URISyntaxException; import java.util.Arrays; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; - import lombok.extern.slf4j.Slf4j; - import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.SparkContext$; @@ -35,33 +32,38 @@ public class LineageUtils { /* This is for generating urn from a hash of the plan */ // private static Function<String, String> PATH_REPLACER = (x -> x); - private LineageUtils() { - - } + private LineageUtils() {} - public static Urn dataPlatformInstanceUrn(String platform, String instance) throws URISyntaxException { - return new Urn("dataPlatformInstance", + public static Urn dataPlatformInstanceUrn(String platform, String instance) + throws URISyntaxException { + return new Urn( + "dataPlatformInstance", new TupleKey(Arrays.asList(new DataPlatformUrn(platform).toString(), instance))); } public static DataFlowUrn flowUrn(String master, String appName) { - return new DataFlowUrn("spark", appName, master.replaceAll(":", "_").replaceAll("/", "_").replaceAll("[_]+", "_")); + return new DataFlowUrn( + "spark", appName, master.replaceAll(":", "_").replaceAll("/", "_").replaceAll("[_]+", "_")); } public static Option<SparkContext> findSparkCtx() { - return SparkSession.getActiveSession().map(new AbstractFunction1<SparkSession, SparkContext>() { - - @Override - public SparkContext apply(SparkSession sess) { - return sess.sparkContext(); - } - }).orElse(new AbstractFunction0<Option<SparkContext>>() { - - @Override - public Option<SparkContext> apply() { - return SparkContext$.MODULE$.getActive(); - } - }); + return SparkSession.getActiveSession() + .map( + new AbstractFunction1<SparkSession, SparkContext>() { + + @Override + public SparkContext apply(SparkSession sess) { + return sess.sparkContext(); + } + }) + .orElse( + new AbstractFunction0<Option<SparkContext>>() { + + @Override + public Option<SparkContext> apply() { + return SparkContext$.MODULE$.getActive(); + } + }); } public static String getMaster(SparkContext ctx) { @@ -79,14 +81,16 @@ public static LineageConsumer getConsumer(String consumerType) { public static Config parseSparkConfig() { SparkConf conf = SparkEnv.get().conf(); - String propertiesString = Arrays.stream(conf.getAllWithPrefix("spark.datahub.")) - .map(tup -> tup._1 + "= \"" + tup._2 + "\"").collect(Collectors.joining("\n")); + String propertiesString = + Arrays.stream(conf.getAllWithPrefix("spark.datahub.")) + .map(tup -> tup._1 + "= \"" + tup._2 + "\"") + .collect(Collectors.joining("\n")); return ConfigFactory.parseString(propertiesString); } // TODO: URN creation with platform instance needs to be inside DatasetUrn class - public static DatasetUrn createDatasetUrn(String platform, String platformInstance, String name, - FabricType fabricType) { + public static DatasetUrn createDatasetUrn( + String platform, String platformInstance, String name, FabricType fabricType) { String datasteName = platformInstance == null ? name : platformInstance + "." + name; return new DatasetUrn(new DataPlatformUrn(platform), datasteName, fabricType); } @@ -103,10 +107,10 @@ public static DatasetUrn createDatasetUrn(String platform, String platformInstan * ""); s = s.replaceAll("Statistics:[^\n]+\n", ""); s = * s.replaceAll("Table Properties:[^\n]+\n", ""); // * System.out.println("CLEAN: " + s); return s; } - * + * * public static void setPathReplacer(Function<String, String> replacer) { * PATH_REPLACER = replacer; } - * + * * public static String hash(String s) { s = PATH_REPLACER.apply(s); * log.debug("PATH REPLACED " + s); return Hashing.md5().hashString(s, * Charset.forName("US-ASCII")).toString(); } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java index 6505cd586b2b5..17d5b941bced2 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class SQLQueryExecEndEvent extends LineageEvent { @@ -17,7 +16,12 @@ public class SQLQueryExecEndEvent extends LineageEvent { private final long sqlQueryExecId; private final SQLQueryExecStartEvent start; - public SQLQueryExecEndEvent(String master, String appName, String appId, long time, long sqlQueryExecId, + public SQLQueryExecEndEvent( + String master, + String appName, + String appId, + long time, + long sqlQueryExecId, SQLQueryExecStartEvent start) { super(master, appName, appId, time); this.sqlQueryExecId = sqlQueryExecId; @@ -33,6 +37,7 @@ public List<MetadataChangeProposalWrapper> asMetadataEvents() { DataJobInfo jobInfo = start.jobInfo().setCustomProperties(customProps); return Collections.singletonList( - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo))); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo))); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java index 0919f40c7e1c9..dbd56a59838bc 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataJobUrn; import com.linkedin.data.template.StringMap; -import datahub.spark.model.dataset.SparkDataset; import com.linkedin.datajob.DataJobInfo; import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.datajob.JobStatus; import datahub.event.MetadataChangeProposalWrapper; +import datahub.spark.model.dataset.SparkDataset; import java.util.Arrays; import java.util.Comparator; import java.util.List; @@ -17,14 +17,18 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class SQLQueryExecStartEvent extends LineageEvent { private final long sqlQueryExecId; private final DatasetLineage datasetLineage; - public SQLQueryExecStartEvent(String master, String appName, String appId, long time, long sqlQueryExecId, + public SQLQueryExecStartEvent( + String master, + String appName, + String appId, + long time, + long sqlQueryExecId, DatasetLineage datasetLineage) { super(master, appName, appId, time); this.sqlQueryExecId = sqlQueryExecId; @@ -35,20 +39,24 @@ public SQLQueryExecStartEvent(String master, String appName, String appId, long public List<MetadataChangeProposalWrapper> asMetadataEvents() { DataJobUrn jobUrn = jobUrn(); MetadataChangeProposalWrapper mcpJobIO = - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobIO())); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobIO())); DataJobInfo jobInfo = jobInfo(); jobInfo.setCustomProperties(customProps()); jobInfo.setStatus(JobStatus.IN_PROGRESS); MetadataChangeProposalWrapper mcpJobInfo = - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); return Arrays.asList(mcpJobIO, mcpJobInfo); } DataJobInfo jobInfo() { - return new DataJobInfo().setName(datasetLineage.getCallSiteShort()).setType(DataJobInfo.Type.create("sparkJob")); + return new DataJobInfo() + .setName(datasetLineage.getCallSiteShort()) + .setType(DataJobInfo.Type.create("sparkJob")); } DataJobUrn jobUrn() { @@ -91,12 +99,14 @@ public DatasetUrnArray getOuputDatasets() { public DatasetUrnArray getInputDatasets() { DatasetUrnArray in = new DatasetUrnArray(); - Set<SparkDataset> sources = new TreeSet<>(new Comparator<SparkDataset>() { - @Override - public int compare(SparkDataset x, SparkDataset y) { - return x.urn().toString().compareTo(y.urn().toString()); - } - }); + Set<SparkDataset> sources = + new TreeSet<>( + new Comparator<SparkDataset>() { + @Override + public int compare(SparkDataset x, SparkDataset y) { + return x.urn().toString().compareTo(y.urn().toString()); + } + }); sources.addAll(datasetLineage.getSources()); // maintain ordering for (SparkDataset source : sources) { in.add(source.urn()); @@ -106,8 +116,10 @@ public int compare(SparkDataset x, SparkDataset y) { } private DataJobInputOutput jobIO() { - DataJobInputOutput io = new DataJobInputOutput().setInputDatasets(getInputDatasets()) - .setOutputDatasets(getOuputDatasets()); + DataJobInputOutput io = + new DataJobInputOutput() + .setInputDatasets(getInputDatasets()) + .setOutputDatasets(getOuputDatasets()); return io; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java index 47552c69d78c4..13f70392f5bf5 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java @@ -1,20 +1,19 @@ package datahub.spark.model.dataset; -import org.apache.spark.sql.catalyst.catalog.CatalogTable; - import com.linkedin.common.FabricType; - import lombok.ToString; +import org.apache.spark.sql.catalyst.catalog.CatalogTable; @ToString public class CatalogTableDataset extends SparkDataset { - public CatalogTableDataset(CatalogTable table, String platformInstance, String platform, FabricType fabricType) { + public CatalogTableDataset( + CatalogTable table, String platformInstance, String platform, FabricType fabricType) { this(table.qualifiedName(), platformInstance, platform, fabricType); } - public CatalogTableDataset(String dsName, String platformInstance, String platform, FabricType fabricType) { + public CatalogTableDataset( + String dsName, String platformInstance, String platform, FabricType fabricType) { super(platform, platformInstance, dsName, fabricType); } - } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java index 700aef5d6b15a..c9b05f6a1d22f 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java @@ -1,12 +1,9 @@ package datahub.spark.model.dataset; -import org.apache.hadoop.fs.Path; - import com.linkedin.common.FabricType; - -import lombok.ToString; - import java.net.URI; +import lombok.ToString; +import org.apache.hadoop.fs.Path; @ToString public class HdfsPathDataset extends SparkDataset { @@ -30,18 +27,22 @@ private static String getPlatform(Path path) { } public HdfsPathDataset( - Path path, - String platformInstance, - boolean includeScheme, - FabricType fabricType, - String removePartitionPattern) { + Path path, + String platformInstance, + boolean includeScheme, + FabricType fabricType, + String removePartitionPattern) { // TODO check static partitions? - this(getPath(path, includeScheme, removePartitionPattern), platformInstance, getPlatform(path), fabricType); + this( + getPath(path, includeScheme, removePartitionPattern), + platformInstance, + getPlatform(path), + fabricType); } - public HdfsPathDataset(String pathUri, String platformInstance, String platform, FabricType fabricType) { + public HdfsPathDataset( + String pathUri, String platformInstance, String platform, FabricType fabricType) { // TODO check static partitions? super(platform, platformInstance, pathUri, fabricType); } - } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java index ea156b49fada7..1cdca6092bcb7 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java @@ -1,17 +1,16 @@ package datahub.spark.model.dataset; -import java.util.HashMap; -import java.util.Map; - import com.linkedin.common.FabricType; - import io.opentracing.contrib.jdbc.parser.URLParser; +import java.util.HashMap; +import java.util.Map; import lombok.ToString; @ToString public class JdbcDataset extends SparkDataset { - //TODO: Should map to the central location on datahub for platform names + // TODO: Should map to the central location on datahub for platform names private static final Map<String, String> PLATFORM_NAME_MAPPING = new HashMap<>(); + static { PLATFORM_NAME_MAPPING.put("postgresql", "postgres"); } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java index 546b737576e60..64e14f5e31542 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java @@ -2,16 +2,16 @@ import com.linkedin.common.FabricType; import com.linkedin.common.urn.DatasetUrn; - import datahub.spark.model.LineageUtils; import lombok.EqualsAndHashCode; @EqualsAndHashCode public abstract class SparkDataset { - + private DatasetUrn urn; - - public SparkDataset(String platform, String platformInstance, String name, FabricType fabricType) { + + public SparkDataset( + String platform, String platformInstance, String name, FabricType fabricType) { super(); this.urn = LineageUtils.createDatasetUrn(platform, platformInstance, name, fabricType); } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java index 447200d855a36..2df468fc03e74 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java @@ -3,13 +3,13 @@ import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.request; +import com.linkedin.common.FabricType; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.List; import java.util.Properties; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; @@ -29,8 +29,6 @@ import org.mockserver.socket.PortFactory; import org.mockserver.verify.VerificationTimes; -import com.linkedin.common.FabricType; - public class TestCoalesceJobLineage { private static final boolean MOCK_GMS = Boolean.valueOf("true"); // if false, MCPs get written to real GMS server (see GMS_PORT) @@ -59,29 +57,42 @@ public class TestCoalesceJobLineage { private static SparkSession spark; private static Properties jdbcConnnProperties; private static ClientAndServer mockServer; - @Rule - public TestRule mockServerWatcher = new TestWatcher() { - @Override - protected void finished(Description description) { - if (!VERIFY_EXPECTED) { - return; - } - verifyTestScenario(description.getMethodName()); - clear(); - super.finished(description); - } - }; + @Rule + public TestRule mockServerWatcher = + new TestWatcher() { + + @Override + protected void finished(Description description) { + if (!VERIFY_EXPECTED) { + return; + } + verifyTestScenario(description.getMethodName()); + clear(); + super.finished(description); + } + }; private static String addLocalPath(String s) { - return s.replaceAll("file:/" + RESOURCE_DIR, "file://" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); + return s.replaceAll( + "file:/" + RESOURCE_DIR, "file://" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); } public static void resetBaseExpectations() { - mockServer.when(request().withMethod("GET").withPath("/config").withHeader("Content-type", "application/json"), - Times.unlimited()).respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); mockServer - .when(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); + mockServer + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), Times.unlimited()) .respond(HttpResponse.response().withStatusCode(200)); } @@ -95,11 +106,16 @@ public static void initMockServer() { public static void verifyTestScenario(String testName) { String expectationFileName = testName + ".json"; try { - List<String> expected = Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName).toAbsolutePath()); + List<String> expected = + Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName).toAbsolutePath()); for (String content : expected) { String swappedContent = addLocalPath(content); - mockServer.verify(request().withMethod("POST").withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal").withBody(new JsonBody(swappedContent)), + mockServer.verify( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withBody(new JsonBody(swappedContent)), VerificationTimes.atLeast(1)); } } catch (IOException ioe) { @@ -112,23 +128,33 @@ public void setup() { resetBaseExpectations(); System.setProperty("user.dir", Paths.get("coalesce-test").toAbsolutePath().toString()); - spark = SparkSession.builder().appName(APP_NAME).config("spark.master", MASTER) - .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") - .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) - .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()).config("spark.datahub.coalesce_jobs", "true") - .config("spark.datahub.parent.datajob_urn", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)") - .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()).enableHiveSupport().getOrCreate(); + spark = + SparkSession.builder() + .appName(APP_NAME) + .config("spark.master", MASTER) + .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") + .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) + .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) + .config("spark.datahub.coalesce_jobs", "true") + .config( + "spark.datahub.parent.datajob_urn", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)") + .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .enableHiveSupport() + .getOrCreate(); spark.sql("drop database if exists " + TEST_DB + " cascade"); spark.sql("create database " + TEST_DB); } private static void clear() { - mockServer - .clear(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal")); + mockServer.clear( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal")); } @After @@ -150,27 +176,44 @@ public static void verify(int numRequests) { return; } mockServer.verify( - request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), VerificationTimes.exactly(numRequests)); } @Test public void testHiveInHiveOutCoalesce() throws Exception { - Dataset<Row> df1 = spark.read().option("header", "true").csv(new File(DATA_DIR + "/in1.csv").getAbsolutePath()).withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(new File(DATA_DIR + "/in2.csv").getAbsolutePath()).withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(new File(DATA_DIR + "/in1.csv").getAbsolutePath()) + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(new File(DATA_DIR + "/in2.csv").getAbsolutePath()) + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); // CreateHiveTableAsSelectCommand spark.sql( - "create table " + tbl("foo_coalesce") + " as " + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + "create table " + + tbl("foo_coalesce") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); // CreateHiveTableAsSelectCommand - spark.sql("create table " + tbl("hivetab") + " as " + "(select * from " + tbl("foo_coalesce") + ")"); + spark.sql( + "create table " + tbl("hivetab") + " as " + "(select * from " + tbl("foo_coalesce") + ")"); // InsertIntoHiveTable spark.sql("insert into " + tbl("hivetab") + " (select * from " + tbl("foo_coalesce") + ")"); @@ -181,5 +224,4 @@ public void testHiveInHiveOutCoalesce() throws Exception { df.write().insertInto(tbl("hivetab")); Thread.sleep(5000); } - } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java index b2280d171e378..3a70c10e0c1f9 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java @@ -5,6 +5,16 @@ import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.request; +import com.linkedin.common.FabricType; +import datahub.spark.model.DatasetLineage; +import datahub.spark.model.LineageConsumer; +import datahub.spark.model.LineageEvent; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.SQLQueryExecStartEvent; +import datahub.spark.model.dataset.CatalogTableDataset; +import datahub.spark.model.dataset.HdfsPathDataset; +import datahub.spark.model.dataset.JdbcDataset; +import datahub.spark.model.dataset.SparkDataset; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -18,7 +28,6 @@ import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -43,23 +52,11 @@ import org.mockserver.socket.PortFactory; import org.mockserver.verify.VerificationTimes; import org.testcontainers.containers.PostgreSQLContainer; - -import com.linkedin.common.FabricType; - -import datahub.spark.model.DatasetLineage; -import datahub.spark.model.LineageConsumer; -import datahub.spark.model.LineageEvent; -import datahub.spark.model.LineageUtils; -import datahub.spark.model.SQLQueryExecStartEvent; -import datahub.spark.model.dataset.CatalogTableDataset; -import datahub.spark.model.dataset.HdfsPathDataset; -import datahub.spark.model.dataset.JdbcDataset; -import datahub.spark.model.dataset.SparkDataset; import org.testcontainers.containers.wait.strategy.Wait; -//!!!! IMP !!!!!!!! -//Add the test number before naming the test. This will ensure that tests run in specified order. -//This is necessary to have fixed query execution numbers. Otherwise tests will fail. +// !!!! IMP !!!!!!!! +// Add the test number before naming the test. This will ensure that tests run in specified order. +// This is necessary to have fixed query execution numbers. Otherwise tests will fail. @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class TestSparkJobsLineage { private static final boolean MOCK_GMS = Boolean.valueOf("true"); @@ -88,40 +85,53 @@ public class TestSparkJobsLineage { private static final String DATASET_PLATFORM_INSTANCE = "test_dev_dataset"; private static final String TABLE_PLATFORM = "hive"; - @ClassRule - public static PostgreSQLContainer<?> db; + @ClassRule public static PostgreSQLContainer<?> db; + static { - db = new PostgreSQLContainer<>("postgres:9.6.12") - .withDatabaseName("sparktestdb"); + db = new PostgreSQLContainer<>("postgres:9.6.12").withDatabaseName("sparktestdb"); db.waitingFor(Wait.forListeningPort()).withStartupTimeout(Duration.ofMinutes(15)).start(); } + private static SparkSession spark; private static Properties jdbcConnnProperties; private static DatasetLineageAccumulator acc; private static ClientAndServer mockServer; - @Rule - public TestRule mockServerWatcher = new TestWatcher() { - @Override - protected void finished(Description description) { - if (!VERIFY_EXPECTED) { - return; - } - verifyTestScenario(description.getMethodName()); - clear(); - super.finished(description); - } - }; + @Rule + public TestRule mockServerWatcher = + new TestWatcher() { + + @Override + protected void finished(Description description) { + if (!VERIFY_EXPECTED) { + return; + } + verifyTestScenario(description.getMethodName()); + clear(); + super.finished(description); + } + }; private static String addLocalPath(String s) { - return s.replaceAll("file:/" + RESOURCE_DIR, "file:" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); + return s.replaceAll( + "file:/" + RESOURCE_DIR, "file:" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); } public static void resetBaseExpectations() { - mockServer.when(request().withMethod("GET").withPath("/config").withHeader("Content-type", "application/json"), - Times.unlimited()).respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); mockServer - .when(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); + mockServer + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), Times.unlimited()) .respond(HttpResponse.response().withStatusCode(200)); } @@ -134,11 +144,16 @@ public static void init() { public static void verifyTestScenario(String testName) { String expectationFileName = testName + ".json"; try { - List<String> expected = Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName)); + List<String> expected = + Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName)); for (String content : expected) { String swappedContent = addLocalPath(content); - mockServer.verify(request().withMethod("POST").withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal").withBody(new JsonBody(swappedContent)), + mockServer.verify( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withBody(new JsonBody(swappedContent)), VerificationTimes.atLeast(1)); } } catch (IOException ioe) { @@ -151,7 +166,10 @@ public static void verify(int numRequests) { return; } mockServer.verify( - request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), VerificationTimes.exactly(numRequests)); } @@ -162,14 +180,19 @@ public static void setup() { LineageUtils.registerConsumer("accumulator", acc); init(); - spark = SparkSession.builder().appName(APP_NAME).config("spark.master", MASTER) - .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") - .config("spark.datahub.lineage.consumerTypes", "accumulator") - .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) - .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) - .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()).enableHiveSupport().getOrCreate(); + spark = + SparkSession.builder() + .appName(APP_NAME) + .config("spark.master", MASTER) + .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") + .config("spark.datahub.lineage.consumerTypes", "accumulator") + .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) + .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) + .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .enableHiveSupport() + .getOrCreate(); spark.sql("drop database if exists " + TEST_DB + " cascade"); spark.sql("create database " + TEST_DB); @@ -184,8 +207,11 @@ public static void setup() { } private static void clear() { - mockServer - .clear(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal")); + mockServer.clear( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal")); } @AfterClass @@ -222,7 +248,8 @@ private static DatasetLineage dsl(String callSite, SparkDataset sink, SparkDatas } private static HdfsPathDataset hdfsDs(String fileName) { - return new HdfsPathDataset("file:" + abs(DATA_DIR + "/" + fileName), DATASET_PLATFORM_INSTANCE, "hdfs", DATASET_ENV); + return new HdfsPathDataset( + "file:" + abs(DATA_DIR + "/" + fileName), DATASET_PLATFORM_INSTANCE, "hdfs", DATASET_ENV); } private static JdbcDataset pgDs(String tbl) { @@ -230,7 +257,8 @@ private static JdbcDataset pgDs(String tbl) { } private static CatalogTableDataset catTblDs(String tbl) { - return new CatalogTableDataset(tbl(tbl), DATASET_PLATFORM_INSTANCE, TABLE_PLATFORM, DATASET_ENV); + return new CatalogTableDataset( + tbl(tbl), DATASET_PLATFORM_INSTANCE, TABLE_PLATFORM, DATASET_ENV); } private static String tbl(String tbl) { @@ -259,8 +287,9 @@ public void test1HdfsInOut() throws Exception { df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); - Dataset<Row> df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + Dataset<Row> df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); // InsertIntoHadoopFsRelationCommand df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out.csv"); @@ -274,11 +303,21 @@ public void test1HdfsInOut() throws Exception { @Test public void test5HdfsInJdbcOut() throws Exception { - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); Dataset<Row> df = df1.join(df2, "id").drop("id"); @@ -300,8 +339,13 @@ public void test3HdfsJdbcInJdbcOut() throws Exception { c.createStatement().execute("insert into foo2 values('a', 4);"); c.close(); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); Dataset<Row> df2 = spark.read().jdbc(db.getJdbcUrl(), "foo2", jdbcConnnProperties); @@ -320,16 +364,30 @@ public void test3HdfsJdbcInJdbcOut() throws Exception { @Test public void test2HdfsInHiveOut() throws Exception { - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); Dataset<Row> df = df1.join(df2, "id").drop("id"); - df.write().mode(SaveMode.Overwrite).saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand - df.write().mode(SaveMode.Append).saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Append) + .saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand df.write().insertInto(tbl("foo4")); // InsertIntoHadoopFsRelationCommand Thread.sleep(5000); @@ -345,18 +403,31 @@ public void test2HdfsInHiveOut() throws Exception { @Test public void test4HiveInHiveOut() throws Exception { - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); // CreateHiveTableAsSelectCommand spark.sql( - "create table " + tbl("foo5") + " as " + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + "create table " + + tbl("foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); check(dsl(catTblDs("foo5"), hdfsDs("in1.csv"), hdfsDs("in2.csv")), acc.getLineages().get(0)); @@ -388,13 +459,23 @@ public void test6HdfsJdbcInJdbcOutTwoLevel() throws Exception { c.createStatement().execute("insert into foo6 values('a', 4);"); c.close(); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); Dataset<Row> df2 = spark.read().jdbc(db.getJdbcUrl(), "foo6", jdbcConnnProperties); - Dataset<Row> df3 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b3"); + Dataset<Row> df3 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b3"); Dataset<Row> df = df1.join(df2, "a").drop("id").join(df3, "a"); @@ -402,7 +483,9 @@ public void test6HdfsJdbcInJdbcOutTwoLevel() throws Exception { // JDBCRelation input df.write().mode(SaveMode.Overwrite).jdbc(db.getJdbcUrl(), "foo7", jdbcConnnProperties); Thread.sleep(5000); - check(dsl(pgDs("foo7"), hdfsDs("in1.csv"), hdfsDs("in2.csv"), pgDs("foo6")), acc.getLineages().get(0)); + check( + dsl(pgDs("foo7"), hdfsDs("in1.csv"), hdfsDs("in2.csv"), pgDs("foo6")), + acc.getLineages().get(0)); if (VERIFY_EXPECTED) { verify(1 * N); } @@ -413,16 +496,26 @@ public void test7HdfsInPersistHdfsOut() throws Exception { Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in3.csv"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in4.csv").withColumnRenamed("c2", "d") - .withColumnRenamed("c1", "c").withColumnRenamed("id", "id2"); - Dataset<Row> df = df1.join(df2, df1.col("id").equalTo(df2.col("id2")), "inner") - .filter(df1.col("id").equalTo("id_filter")).persist(StorageLevel.MEMORY_ONLY()); + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in4.csv") + .withColumnRenamed("c2", "d") + .withColumnRenamed("c1", "c") + .withColumnRenamed("id", "id2"); + Dataset<Row> df = + df1.join(df2, df1.col("id").equalTo(df2.col("id2")), "inner") + .filter(df1.col("id").equalTo("id_filter")) + .persist(StorageLevel.MEMORY_ONLY()); df.show(); // InsertIntoHadoopFsRelationCommand df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out_persist.csv"); Thread.sleep(5000); - check(dsl(hdfsDs("out_persist.csv"), hdfsDs("in3.csv"), hdfsDs("in4.csv")), acc.getLineages().get(0)); + check( + dsl(hdfsDs("out_persist.csv"), hdfsDs("in3.csv"), hdfsDs("in4.csv")), + acc.getLineages().get(0)); if (VERIFY_EXPECTED) { verify(1 * N); } @@ -436,10 +529,19 @@ public void test8PersistHdfsJdbcInJdbcOut() throws Exception { c.createStatement().execute("insert into foo8 values('a', 4);"); c.close(); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); - Dataset<Row> df2 = spark.read().jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties).persist(StorageLevel.MEMORY_ONLY()); + Dataset<Row> df2 = + spark + .read() + .jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties) + .persist(StorageLevel.MEMORY_ONLY()); Dataset<Row> df = df1.join(df2, "a"); @@ -452,19 +554,24 @@ public void test8PersistHdfsJdbcInJdbcOut() throws Exception { verify(1 * N); } } - - // This test cannot be executed individually. It depends upon previous tests to create tables in the database. + + // This test cannot be executed individually. It depends upon previous tests to create tables in + // the database. @Test public void test9PersistJdbcInHdfsOut() throws Exception { Connection c = db.createConnection(""); - + Dataset<Row> df1 = spark.read().jdbc(db.getJdbcUrl(), "foo9", jdbcConnnProperties); df1 = df1.withColumnRenamed("b", "b1"); - Dataset<Row> df2 = spark.read().jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties).persist(StorageLevel.DISK_ONLY_2()); + Dataset<Row> df2 = + spark + .read() + .jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties) + .persist(StorageLevel.DISK_ONLY_2()); Dataset<Row> df = df1.join(df2, "a"); - + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out_persist.csv"); Thread.sleep(5000); check(dsl(hdfsDs("out_persist.csv"), pgDs("foo2"), pgDs("foo3")), acc.getLineages().get(0)); @@ -472,7 +579,7 @@ public void test9PersistJdbcInHdfsOut() throws Exception { verify(1 * N); } } - + private static class DatasetLineageAccumulator implements LineageConsumer { boolean closed = false; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java index acd8bff8c8c47..d1c1110329ad8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java @@ -6,36 +6,33 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; - +import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Objects; public class DeprecationUtils { - private DeprecationUtils() { } + private DeprecationUtils() {} - @Nullable - public static Deprecation getDeprecation( - @Nonnull EntityService entityService, - @Nonnull String urn, - Urn actor, - @Nullable String note, - boolean deprecated, - @Nullable Long decommissionTime - ) { - Deprecation deprecation = (Deprecation) EntityUtils.getAspectFromEntity( - urn, - Constants.DEPRECATION_ASPECT_NAME, - entityService, - new Deprecation()); - if (deprecation == null) { - return null; - } - deprecation.setActor(actor); - deprecation.setDeprecated(deprecated); - deprecation.setDecommissionTime(decommissionTime, SetMode.REMOVE_IF_NULL); - deprecation.setNote(Objects.requireNonNullElse(note, "")); - return deprecation; + @Nullable + public static Deprecation getDeprecation( + @Nonnull EntityService entityService, + @Nonnull String urn, + Urn actor, + @Nullable String note, + boolean deprecated, + @Nullable Long decommissionTime) { + Deprecation deprecation = + (Deprecation) + EntityUtils.getAspectFromEntity( + urn, Constants.DEPRECATION_ASPECT_NAME, entityService, new Deprecation()); + if (deprecation == null) { + return null; } + deprecation.setActor(actor); + deprecation.setDeprecated(deprecated); + deprecation.setDecommissionTime(decommissionTime, SetMode.REMOVE_IF_NULL); + deprecation.setNote(Objects.requireNonNullElse(note, "")); + return deprecation; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index dff9a22de8efd..53b974b560e2a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.client; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static com.linkedin.metadata.search.utils.SearchUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; @@ -70,578 +73,741 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static com.linkedin.metadata.search.utils.SearchUtils.*; - - @Slf4j @RequiredArgsConstructor public class JavaEntityClient implements EntityClient { - private static final int DEFAULT_RETRY_INTERVAL = 2; - private static final int DEFAULT_RETRY_COUNT = 3; - - private final static Set<String> NON_RETRYABLE = Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); - - private final Clock _clock = Clock.systemUTC(); - - private final EntityService _entityService; - private final DeleteEntityService _deleteEntityService; - private final EntitySearchService _entitySearchService; - private final CachingEntitySearchService _cachingEntitySearchService; - private final SearchService _searchService; - private final LineageSearchService _lineageSearchService; - private final TimeseriesAspectService _timeseriesAspectService; - private final EventProducer _eventProducer; - private final RestliEntityClient _restliEntityClient; - - @Nullable - public EntityResponse getV2( - @Nonnull String entityName, - @Nonnull final Urn urn, - @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set<String> projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntityV2(entityName, urn, projectedAspects); - } - - @Nonnull - public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { - return _entityService.getEntity(urn, ImmutableSet.of()); - } - - @Nonnull - @Override - public Map<Urn, EntityResponse> batchGetV2( - @Nonnull String entityName, - @Nonnull Set<Urn> urns, - @Nullable Set<String> aspectNames, - @Nonnull Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set<String> projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntitiesV2(entityName, urns, projectedAspects); - } - - @Nonnull - public Map<Urn, EntityResponse> batchGetVersionedV2( - @Nonnull String entityName, - @Nonnull final Set<VersionedUrn> versionedUrns, - @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set<String> projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntitiesVersionedV2(versionedUrns, projectedAspects); - } - - @Nonnull - public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) { - return _entityService.getEntities(urns, ImmutableSet.of()); - } - - /** - * Gets autocomplete results - * - * @param entityType the type of entity to autocomplete against - * @param query search query - * @param field field of the dataset to autocomplete against - * @param requestFilters autocomplete filters - * @param limit max number of autocomplete results - * @throws RemoteInvocationException - */ - @Nonnull - public AutoCompleteResult autoComplete( - @Nonnull String entityType, - @Nonnull String query, - @Nullable Filter requestFilters, - @Nonnull int limit, - @Nullable String field, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _cachingEntitySearchService.autoComplete(entityType, query, field, filterOrDefaultEmptyFilter(requestFilters), limit, null); - } - - /** - * Gets autocomplete results - * - * @param entityType the type of entity to autocomplete against - * @param query search query - * @param requestFilters autocomplete filters - * @param limit max number of autocomplete results - * @throws RemoteInvocationException - */ - @Nonnull - public AutoCompleteResult autoComplete( - @Nonnull String entityType, - @Nonnull String query, - @Nullable Filter requestFilters, - @Nonnull int limit, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _cachingEntitySearchService.autoComplete(entityType, query, "", filterOrDefaultEmptyFilter(requestFilters), limit, null); - } - - /** - * Gets autocomplete results - * - * @param entityType entity type being browse - * @param path path being browsed - * @param requestFilters browse filters - * @param start start offset of first dataset - * @param limit max number of datasets - * @throws RemoteInvocationException - */ - @Nonnull - public BrowseResult browse( - @Nonnull String entityType, - @Nonnull String path, - @Nullable Map<String, String> requestFilters, - int start, - int limit, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return ValidationUtils.validateBrowseResult( - _cachingEntitySearchService.browse(entityType, path, newFilter(requestFilters), start, limit, null), _entityService); - } - - - /** - * Gets browse V2 snapshot of a given path - * - * @param entityName entity being browsed - * @param path path being browsed - * @param filter browse filter - * @param input search query - * @param start start offset of first group - * @param count max number of results requested - * @throws RemoteInvocationException - */ - @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) { - // TODO: cache browseV2 results - return _entitySearchService.browseV2(entityName, path, filter, input, start, count); - } - - @SneakyThrows - @Deprecated - public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - Objects.requireNonNull(authentication, "authentication must not be null"); - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - _entityService.ingestEntity(entity, auditStamp); - } - - @SneakyThrows - @Deprecated - public void updateWithSystemMetadata( - @Nonnull final Entity entity, - @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - if (systemMetadata == null) { - update(entity, authentication); - return; - } - - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - - _entityService.ingestEntity(entity, auditStamp, systemMetadata); - tryIndexRunId(com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()), systemMetadata); - } - - @SneakyThrows - @Deprecated - public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - _entityService.ingestEntities(entities.stream().collect(Collectors.toList()), auditStamp, ImmutableList.of()); - } - - /** - * Searches for entities matching to a given query and filters - * - * @param input search query - * @param requestFilters search filters - * @param start start offset for search results - * @param count max number of search results requested - * @param searchFlags - * @return a set of search results - * @throws RemoteInvocationException - */ - @Nonnull - @WithSpan - @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map<String, String> requestFilters, int start, int count, @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) - throws RemoteInvocationException { - - return ValidationUtils.validateSearchResult(_entitySearchService.search(List.of(entity), input, newFilter(requestFilters), - null, start, count, searchFlags), _entityService); - } - - /** - * Deprecated! Use 'filter' or 'search' instead. - * - * Filters for entities matching to a given query and filters - * - * @param requestFilters search filters - * @param start start offset for search results - * @param count max number of search results requested - * @return a set of list results - * @throws RemoteInvocationException - */ - @Deprecated - @Nonnull - public ListResult list( - @Nonnull String entity, - @Nullable Map<String, String> requestFilters, - int start, - int count, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateListResult(toListResult( - _entitySearchService.filter(entity, newFilter(requestFilters), null, start, count)), _entityService); - } - - /** - * Searches for datasets matching to a given query and filters - * - * @param input search query - * @param filter search filters - * @param sortCriterion sort criterion - * @param start start offset for search results - * @param count max number of search results requested - * @return Snapshot key - * @throws RemoteInvocationException - */ - @Nonnull - @Override - public SearchResult search( - @Nonnull String entity, - @Nonnull String input, - @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, - int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) - throws RemoteInvocationException { - return ValidationUtils.validateSearchResult( - _entitySearchService.search(List.of(entity), input, filter, sortCriterion, start, count, searchFlags), _entityService); - } - - @Nonnull - public SearchResult searchAcrossEntities( - @Nonnull List<String> entities, - @Nonnull String input, - @Nullable Filter filter, - int start, - int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return searchAcrossEntities(entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); - } - - /** - * Searches for entities matching to a given query and filters across multiple entity types - * - * @param entities entity types to search (if empty, searches all entities) - * @param input search query - * @param filter search filters - * @param start start offset for search results - * @param count max number of search results requested - * @param facets list of facets we want aggregations for - * @param sortCriterion sorting criterion - * @return Snapshot key - * @throws RemoteInvocationException - */ - @Nonnull - public SearchResult searchAcrossEntities( - @Nonnull List<String> entities, - @Nonnull String input, - @Nullable Filter filter, - int start, - int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication, - @Nullable List<String> facets) throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return ValidationUtils.validateSearchResult( - _searchService.searchAcrossEntities(entities, input, filter, sortCriterion, start, count, finalFlags, facets), _entityService); - } - - @Nonnull - @Override - public ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, - @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) - throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return ValidationUtils.validateScrollResult( - _searchService.scrollAcrossEntities(entities, input, filter, null, scrollId, keepAlive, count, - finalFlags), _entityService); - } - - @Nonnull - @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, start, count, null, null, searchFlags), _entityService); - } - - @Nonnull - @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, start, count, startTimeMillis, endTimeMillis, searchFlags), _entityService); - } - - @Nonnull - @Override - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true).setSkipCache(true); - return ValidationUtils.validateLineageScrollResult( - _lineageSearchService.scrollAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, scrollId, keepAlive, count, startTimeMillis, endTimeMillis, finalFlags), _entityService); - } - - /** - * Gets browse path(s) given dataset urn - * - * @param urn urn for the entity - * @return list of paths given urn - * @throws RemoteInvocationException - */ - @Nonnull - public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return new StringArray(_entitySearchService.getBrowsePaths(urn.getEntityType(), urn)); - } - - public void setWritable(boolean canWrite, @Nonnull final Authentication authentication) throws RemoteInvocationException { - _entityService.setWritable(canWrite); - } - - @Nonnull - public Map<String, Long> batchGetTotalEntityCount( - @Nonnull List<String> entityNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _searchService.docCountPerEntity(entityNames); - } - - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return _entityService.listUrns(entityName, start, count); - } - - /** - * Hard delete an entity with a particular urn. - */ - public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - _entityService.deleteUrn(urn); - } - - @Override - public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) - throws RemoteInvocationException { - withRetry(() -> _deleteEntityService.deleteReferencesTo(urn, false), "deleteEntityReferences"); - } - - @Nonnull - @Override - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull final Authentication authentication) throws RemoteInvocationException { - return ValidationUtils.validateSearchResult(_entitySearchService.filter(entity, filter, sortCriterion, start, count), - _entityService); - } - - @Override - public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return _entityService.exists(urn); - } - - @SneakyThrows - @Override - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - } - - @SneakyThrows - @Override - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - } - - @SneakyThrows - @Override - public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); - response.setEntityName(entity); - response.setAspectName(aspect); - if (startTimeMillis != null) { - response.setStartTimeMillis(startTimeMillis); + private static final int DEFAULT_RETRY_INTERVAL = 2; + private static final int DEFAULT_RETRY_COUNT = 3; + + private static final Set<String> NON_RETRYABLE = + Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); + + private final Clock _clock = Clock.systemUTC(); + + private final EntityService _entityService; + private final DeleteEntityService _deleteEntityService; + private final EntitySearchService _entitySearchService; + private final CachingEntitySearchService _cachingEntitySearchService; + private final SearchService _searchService; + private final LineageSearchService _lineageSearchService; + private final TimeseriesAspectService _timeseriesAspectService; + private final EventProducer _eventProducer; + private final RestliEntityClient _restliEntityClient; + + @Nullable + public EntityResponse getV2( + @Nonnull String entityName, + @Nonnull final Urn urn, + @Nullable final Set<String> aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set<String> projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntityV2(entityName, urn, projectedAspects); + } + + @Nonnull + public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { + return _entityService.getEntity(urn, ImmutableSet.of()); + } + + @Nonnull + @Override + public Map<Urn, EntityResponse> batchGetV2( + @Nonnull String entityName, + @Nonnull Set<Urn> urns, + @Nullable Set<String> aspectNames, + @Nonnull Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set<String> projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntitiesV2(entityName, urns, projectedAspects); + } + + @Nonnull + public Map<Urn, EntityResponse> batchGetVersionedV2( + @Nonnull String entityName, + @Nonnull final Set<VersionedUrn> versionedUrns, + @Nullable final Set<String> aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set<String> projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntitiesVersionedV2(versionedUrns, projectedAspects); + } + + @Nonnull + public Map<Urn, Entity> batchGet( + @Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) { + return _entityService.getEntities(urns, ImmutableSet.of()); + } + + /** + * Gets autocomplete results + * + * @param entityType the type of entity to autocomplete against + * @param query search query + * @param field field of the dataset to autocomplete against + * @param requestFilters autocomplete filters + * @param limit max number of autocomplete results + * @throws RemoteInvocationException + */ + @Nonnull + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _cachingEntitySearchService.autoComplete( + entityType, query, field, filterOrDefaultEmptyFilter(requestFilters), limit, null); + } + + /** + * Gets autocomplete results + * + * @param entityType the type of entity to autocomplete against + * @param query search query + * @param requestFilters autocomplete filters + * @param limit max number of autocomplete results + * @throws RemoteInvocationException + */ + @Nonnull + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _cachingEntitySearchService.autoComplete( + entityType, query, "", filterOrDefaultEmptyFilter(requestFilters), limit, null); + } + + /** + * Gets autocomplete results + * + * @param entityType entity type being browse + * @param path path being browsed + * @param requestFilters browse filters + * @param start start offset of first dataset + * @param limit max number of datasets + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map<String, String> requestFilters, + int start, + int limit, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateBrowseResult( + _cachingEntitySearchService.browse( + entityType, path, newFilter(requestFilters), start, limit, null), + _entityService); + } + + /** + * Gets browse V2 snapshot of a given path + * + * @param entityName entity being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) { + // TODO: cache browseV2 results + return _entitySearchService.browseV2(entityName, path, filter, input, start, count); + } + + @SneakyThrows + @Deprecated + public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + Objects.requireNonNull(authentication, "authentication must not be null"); + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + _entityService.ingestEntity(entity, auditStamp); + } + + @SneakyThrows + @Deprecated + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + if (systemMetadata == null) { + update(entity, authentication); + return; + } + + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + + _entityService.ingestEntity(entity, auditStamp, systemMetadata); + tryIndexRunId( + com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()), systemMetadata); + } + + @SneakyThrows + @Deprecated + public void batchUpdate( + @Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + _entityService.ingestEntities( + entities.stream().collect(Collectors.toList()), auditStamp, ImmutableList.of()); + } + + /** + * Searches for entities matching to a given query and filters + * + * @param input search query + * @param requestFilters search filters + * @param start start offset for search results + * @param count max number of search results requested + * @param searchFlags + * @return a set of search results + * @throws RemoteInvocationException + */ + @Nonnull + @WithSpan + @Override + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException { + + return ValidationUtils.validateSearchResult( + _entitySearchService.search( + List.of(entity), input, newFilter(requestFilters), null, start, count, searchFlags), + _entityService); + } + + /** + * Deprecated! Use 'filter' or 'search' instead. + * + * <p>Filters for entities matching to a given query and filters + * + * @param requestFilters search filters + * @param start start offset for search results + * @param count max number of search results requested + * @return a set of list results + * @throws RemoteInvocationException + */ + @Deprecated + @Nonnull + public ListResult list( + @Nonnull String entity, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateListResult( + toListResult( + _entitySearchService.filter(entity, newFilter(requestFilters), null, start, count)), + _entityService); + } + + /** + * Searches for datasets matching to a given query and filters + * + * @param input search query + * @param filter search filters + * @param sortCriterion sort criterion + * @param start start offset for search results + * @param count max number of search results requested + * @return Snapshot key + * @throws RemoteInvocationException + */ + @Nonnull + @Override + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException { + return ValidationUtils.validateSearchResult( + _entitySearchService.search( + List.of(entity), input, filter, sortCriterion, start, count, searchFlags), + _entityService); + } + + @Nonnull + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return searchAcrossEntities( + entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); + } + + /** + * Searches for entities matching to a given query and filters across multiple entity types + * + * @param entities entity types to search (if empty, searches all entities) + * @param input search query + * @param filter search filters + * @param start start offset for search results + * @param count max number of search results requested + * @param facets list of facets we want aggregations for + * @param sortCriterion sorting criterion + * @return Snapshot key + * @throws RemoteInvocationException + */ + @Nonnull + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication, + @Nullable List<String> facets) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return ValidationUtils.validateSearchResult( + _searchService.searchAcrossEntities( + entities, input, filter, sortCriterion, start, count, finalFlags, facets), + _entityService); + } + + @Nonnull + @Override + public ScrollResult scrollAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull Authentication authentication) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return ValidationUtils.validateScrollResult( + _searchService.scrollAcrossEntities( + entities, input, filter, null, scrollId, keepAlive, count, finalFlags), + _entityService); + } + + @Nonnull + @Override + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + start, + count, + null, + null, + searchFlags), + _entityService); + } + + @Nonnull + @Override + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags), + _entityService); + } + + @Nonnull + @Override + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true).setSkipCache(true); + return ValidationUtils.validateLineageScrollResult( + _lineageSearchService.scrollAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + finalFlags), + _entityService); + } + + /** + * Gets browse path(s) given dataset urn + * + * @param urn urn for the entity + * @return list of paths given urn + * @throws RemoteInvocationException + */ + @Nonnull + public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return new StringArray(_entitySearchService.getBrowsePaths(urn.getEntityType(), urn)); + } + + public void setWritable(boolean canWrite, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + _entityService.setWritable(canWrite); + } + + @Nonnull + public Map<String, Long> batchGetTotalEntityCount( + @Nonnull List<String> entityNames, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _searchService.docCountPerEntity(entityNames); + } + + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.listUrns(entityName, start, count); + } + + /** Hard delete an entity with a particular urn. */ + public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + _entityService.deleteUrn(urn); + } + + @Override + public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) + throws RemoteInvocationException { + withRetry(() -> _deleteEntityService.deleteReferencesTo(urn, false), "deleteEntityReferences"); + } + + @Nonnull + @Override + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateSearchResult( + _entitySearchService.filter(entity, filter, sortCriterion, start, count), _entityService); + } + + @Override + public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.exists(urn); + } + + @SneakyThrows + @Override + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + } + + @SneakyThrows + @Override + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + } + + @SneakyThrows + @Override + public List<EnvelopedAspect> getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); + response.setEntityName(entity); + response.setAspectName(aspect); + if (startTimeMillis != null) { + response.setStartTimeMillis(startTimeMillis); + } + if (endTimeMillis != null) { + response.setEndTimeMillis(endTimeMillis); + } + if (limit != null) { + response.setLimit(limit); + } + if (filter != null) { + response.setFilter(filter); + } + response.setValues( + new EnvelopedAspectArray( + _timeseriesAspectService.getAspectValues( + Urn.createFromString(urn), + entity, + aspect, + startTimeMillis, + endTimeMillis, + limit, + filter, + sort))); + return response.getValues(); + } + + // TODO: Factor out ingest logic into a util that can be accessed by the java client and the + // resource + @Override + public String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { + String actorUrnStr = + authentication.getActor() != null + ? authentication.getActor().toUrnStr() + : Constants.UNKNOWN_ACTOR; + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(UrnUtils.getUrn(actorUrnStr)); + final List<MetadataChangeProposal> additionalChanges = + AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService); + + Stream<MetadataChangeProposal> proposalStream = + Stream.concat(Stream.of(metadataChangeProposal), additionalChanges.stream()); + AspectsBatch batch = + AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) + .build(); + + IngestResult one = + _entityService.ingestProposal(batch, auditStamp, async).stream().findFirst().get(); + + Urn urn = one.getUrn(); + tryIndexRunId(urn, metadataChangeProposal.getSystemMetadata()); + return urn.toString(); + } + + @SneakyThrows + @Override + public <T extends RecordTemplate> Optional<T> getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class<T> aspectClass, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + VersionedAspect entity = + _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + if (entity != null && entity.hasAspect()) { + DataMap rawAspect = ((DataMap) entity.data().get("aspect")); + if (rawAspect.containsKey(aspectClass.getCanonicalName())) { + DataMap aspectDataMap = rawAspect.getDataMap(aspectClass.getCanonicalName()); + return Optional.of(RecordUtils.toRecordTemplate(aspectClass, aspectDataMap)); } - if (endTimeMillis != null) { - response.setEndTimeMillis(endTimeMillis); - } - if (limit != null) { - response.setLimit(limit); - } - if (filter != null) { - response.setFilter(filter); - } - response.setValues(new EnvelopedAspectArray( - _timeseriesAspectService.getAspectValues(Urn.createFromString(urn), entity, aspect, startTimeMillis, - endTimeMillis, limit, filter, sort))); - return response.getValues(); - } - - // TODO: Factor out ingest logic into a util that can be accessed by the java client and the resource - @Override - public String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException { - String actorUrnStr = authentication.getActor() != null ? authentication.getActor().toUrnStr() : Constants.UNKNOWN_ACTOR; - final AuditStamp auditStamp = - new AuditStamp().setTime(_clock.millis()).setActor(UrnUtils.getUrn(actorUrnStr)); - final List<MetadataChangeProposal> additionalChanges = - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService); - - Stream<MetadataChangeProposal> proposalStream = Stream.concat(Stream.of(metadataChangeProposal), - additionalChanges.stream()); - AspectsBatch batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(); - - IngestResult one = _entityService.ingestProposal(batch, auditStamp, async).stream() - .findFirst().get(); - - Urn urn = one.getUrn(); - tryIndexRunId(urn, metadataChangeProposal.getSystemMetadata()); - return urn.toString(); - } - - @SneakyThrows - @Override - public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class<T> aspectClass, @Nonnull final Authentication authentication) throws RemoteInvocationException { - VersionedAspect entity = _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - if (entity != null && entity.hasAspect()) { - DataMap rawAspect = ((DataMap) entity.data().get("aspect")); - if (rawAspect.containsKey(aspectClass.getCanonicalName())) { - DataMap aspectDataMap = rawAspect.getDataMap(aspectClass.getCanonicalName()); - return Optional.of(RecordUtils.toRecordTemplate(aspectClass, aspectDataMap)); - } - } - return Optional.empty(); } - - @SneakyThrows - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Authentication authentication) throws RemoteInvocationException { - VersionedAspect entity = _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - if (entity == null) { - return null; + return Optional.empty(); + } + + @SneakyThrows + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException { + VersionedAspect entity = + _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + if (entity == null) { + return null; + } + + if (entity.hasAspect()) { + DataMap rawAspect = ((DataMap) entity.data().get("aspect")); + return rawAspect; + } + + return null; + } + + @Override + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull Authentication authentication) + throws Exception { + _eventProducer.producePlatformEvent(name, key, event); + } + + @Override + public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + throws Exception { + _restliEntityClient.rollbackIngestion(runId, authentication); + } + + private void tryIndexRunId(Urn entityUrn, @Nullable SystemMetadata systemMetadata) { + if (systemMetadata != null && systemMetadata.hasRunId()) { + _entitySearchService.appendRunId( + entityUrn.getEntityType(), entityUrn, systemMetadata.getRunId()); + } + } + + protected <T> T withRetry(@Nonnull final Supplier<T> block, @Nullable String counterPrefix) { + final BackoffPolicy backoffPolicy = new ExponentialBackoff(DEFAULT_RETRY_INTERVAL); + int attemptCount = 0; + + while (attemptCount < DEFAULT_RETRY_COUNT + 1) { + try { + return block.get(); + } catch (Throwable ex) { + MetricUtils.counter(this.getClass(), buildMetricName(ex, counterPrefix)).inc(); + + final boolean skipRetry = + NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) + || (ex.getCause() != null + && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); + + if (attemptCount == DEFAULT_RETRY_COUNT || skipRetry) { + throw ex; + } else { + attemptCount = attemptCount + 1; + try { + Thread.sleep(backoffPolicy.nextBackoff(attemptCount, ex) * 1000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } } - - if (entity.hasAspect()) { - DataMap rawAspect = ((DataMap) entity.data().get("aspect")); - return rawAspect; - } - - return null; - } - - @Override - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, - @Nonnull Authentication authentication) throws Exception { - _eventProducer.producePlatformEvent(name, key, event); - } - - @Override - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) throws Exception { - _restliEntityClient.rollbackIngestion(runId, authentication); + } } - private void tryIndexRunId(Urn entityUrn, @Nullable SystemMetadata systemMetadata) { - if (systemMetadata != null && systemMetadata.hasRunId()) { - _entitySearchService.appendRunId(entityUrn.getEntityType(), entityUrn, systemMetadata.getRunId()); - } - } + // Should never hit this line. + throw new IllegalStateException("No JavaEntityClient call executed."); + } - protected <T> T withRetry(@Nonnull final Supplier<T> block, @Nullable String counterPrefix) { - final BackoffPolicy backoffPolicy = new ExponentialBackoff(DEFAULT_RETRY_INTERVAL); - int attemptCount = 0; - - while (attemptCount < DEFAULT_RETRY_COUNT + 1) { - try { - return block.get(); - } catch (Throwable ex) { - MetricUtils.counter(this.getClass(), buildMetricName(ex, counterPrefix)).inc(); - - final boolean skipRetry = NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) - || (ex.getCause() != null && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); - - if (attemptCount == DEFAULT_RETRY_COUNT || skipRetry) { - throw ex; - } else { - attemptCount = attemptCount + 1; - try { - Thread.sleep(backoffPolicy.nextBackoff(attemptCount, ex) * 1000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - } - } + private String buildMetricName(Throwable throwable, @Nullable String counterPrefix) { + StringBuilder builder = new StringBuilder(); - // Should never hit this line. - throw new IllegalStateException("No JavaEntityClient call executed."); + // deleteEntityReferences_failures + if (counterPrefix != null) { + builder.append(counterPrefix).append(MetricUtils.DELIMITER); } - private String buildMetricName(Throwable throwable, @Nullable String counterPrefix) { - StringBuilder builder = new StringBuilder(); - - // deleteEntityReferences_failures - if (counterPrefix != null) { - builder.append(counterPrefix).append(MetricUtils.DELIMITER); - } - - return builder.append("exception") - .append(MetricUtils.DELIMITER) - .append(throwable.getClass().getName()) - .toString(); - } + return builder + .append("exception") + .append(MetricUtils.DELIMITER) + .append(throwable.getClass().getName()) + .toString(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java index 6b5a3d5bfb06e..0ac18b4aacc04 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java @@ -2,9 +2,9 @@ import com.datahub.authentication.Authentication; import com.linkedin.entity.client.EntityClientCache; -import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.event.EventProducer; @@ -15,25 +15,37 @@ import com.linkedin.metadata.timeseries.TimeseriesAspectService; import lombok.Getter; - -/** - * Java backed SystemEntityClient - */ +/** Java backed SystemEntityClient */ @Getter public class SystemJavaEntityClient extends JavaEntityClient implements SystemEntityClient { - private final EntityClientCache entityClientCache; - private final Authentication systemAuthentication; + private final EntityClientCache entityClientCache; + private final Authentication systemAuthentication; - public SystemJavaEntityClient(EntityService entityService, DeleteEntityService deleteEntityService, - EntitySearchService entitySearchService, CachingEntitySearchService cachingEntitySearchService, - SearchService searchService, LineageSearchService lineageSearchService, - TimeseriesAspectService timeseriesAspectService, EventProducer eventProducer, - RestliEntityClient restliEntityClient, Authentication systemAuthentication, - EntityClientCacheConfig cacheConfig) { - super(entityService, deleteEntityService, entitySearchService, cachingEntitySearchService, searchService, - lineageSearchService, timeseriesAspectService, eventProducer, restliEntityClient); - this.systemAuthentication = systemAuthentication; - this.entityClientCache = buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); - } + public SystemJavaEntityClient( + EntityService entityService, + DeleteEntityService deleteEntityService, + EntitySearchService entitySearchService, + CachingEntitySearchService cachingEntitySearchService, + SearchService searchService, + LineageSearchService lineageSearchService, + TimeseriesAspectService timeseriesAspectService, + EventProducer eventProducer, + RestliEntityClient restliEntityClient, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + super( + entityService, + deleteEntityService, + entitySearchService, + cachingEntitySearchService, + searchService, + lineageSearchService, + timeseriesAspectService, + eventProducer, + restliEntityClient); + this.systemAuthentication = systemAuthentication; + this.entityClientCache = + buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java b/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java index 660c1291a5651..c740f8562d8fe 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java @@ -5,19 +5,13 @@ import lombok.NonNull; import lombok.Value; - -/** - * A value class that holds the components of a key for metadata retrieval. - */ +/** A value class that holds the components of a key for metadata retrieval. */ @Value public class AspectKey<URN extends Urn, ASPECT extends RecordTemplate> { - @NonNull - Class<ASPECT> aspectClass; + @NonNull Class<ASPECT> aspectClass; - @NonNull - URN urn; + @NonNull URN urn; - @NonNull - Long version; + @NonNull Long version; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java index 7acb9ca0cbd64..999140759b09b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java @@ -15,7 +15,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public abstract class BaseReadDAO<ASPECT_UNION extends UnionTemplate, URN extends Urn> { public static final long FIRST_VERSION = 0; @@ -41,12 +40,10 @@ public BaseReadDAO(@Nonnull Set<Class<? extends RecordTemplate>> aspects) { * @return a mapping of given keys to the corresponding metadata aspect. */ @Nonnull - public abstract Map<AspectKey<URN, ? extends RecordTemplate>, Optional<? extends RecordTemplate>> get( - @Nonnull Set<AspectKey<URN, ? extends RecordTemplate>> keys); + public abstract Map<AspectKey<URN, ? extends RecordTemplate>, Optional<? extends RecordTemplate>> + get(@Nonnull Set<AspectKey<URN, ? extends RecordTemplate>> keys); - /** - * Similar to {@link #get(Set)} but only using only one {@link AspectKey}. - */ + /** Similar to {@link #get(Set)} but only using only one {@link AspectKey}. */ @Nonnull public <ASPECT extends RecordTemplate> Optional<ASPECT> get(@Nonnull AspectKey<URN, ASPECT> key) { return (Optional<ASPECT>) get(Collections.singleton(key)).get(key); @@ -56,21 +53,21 @@ public <ASPECT extends RecordTemplate> Optional<ASPECT> get(@Nonnull AspectKey<U * Similar to {@link #get(AspectKey)} but with each component of the key broken out as arguments. */ @Nonnull - public <ASPECT extends RecordTemplate> Optional<ASPECT> get(@Nonnull Class<ASPECT> aspectClass, @Nonnull URN urn, - long version) { + public <ASPECT extends RecordTemplate> Optional<ASPECT> get( + @Nonnull Class<ASPECT> aspectClass, @Nonnull URN urn, long version) { return get(new AspectKey<>(aspectClass, urn, version)); } - /** - * Similar to {@link #get(Class, Urn, long)} but always retrieves the latest version. - */ + /** Similar to {@link #get(Class, Urn, long)} but always retrieves the latest version. */ @Nonnull - public <ASPECT extends RecordTemplate> Optional<ASPECT> get(@Nonnull Class<ASPECT> aspectClass, @Nonnull URN urn) { + public <ASPECT extends RecordTemplate> Optional<ASPECT> get( + @Nonnull Class<ASPECT> aspectClass, @Nonnull URN urn) { return get(aspectClass, urn, LATEST_VERSION); } /** - * Similar to {@link #get(Class, Urn)} but retrieves multiple aspects latest versions associated with multiple URNs. + * Similar to {@link #get(Class, Urn)} but retrieves multiple aspects latest versions associated + * with multiple URNs. * * <p>The returned {@link Map} contains all the . */ @@ -85,20 +82,22 @@ public Map<URN, Map<Class<? extends RecordTemplate>, Optional<? extends RecordTe } } - final Map<URN, Map<Class<? extends RecordTemplate>, Optional<? extends RecordTemplate>>> results = new HashMap<>(); - get(keys).entrySet().forEach(entry -> { - final AspectKey<URN, ? extends RecordTemplate> key = entry.getKey(); - final URN urn = key.getUrn(); - results.putIfAbsent(urn, new HashMap<>()); - results.get(urn).put(key.getAspectClass(), entry.getValue()); - }); + final Map<URN, Map<Class<? extends RecordTemplate>, Optional<? extends RecordTemplate>>> + results = new HashMap<>(); + get(keys) + .entrySet() + .forEach( + entry -> { + final AspectKey<URN, ? extends RecordTemplate> key = entry.getKey(); + final URN urn = key.getUrn(); + results.putIfAbsent(urn, new HashMap<>()); + results.get(urn).put(key.getAspectClass(), entry.getValue()); + }); return results; } - /** - * Similar to {@link #get(Set, Set)} but only for one URN. - */ + /** Similar to {@link #get(Set, Set)} but only for one URN. */ @Nonnull public Map<Class<? extends RecordTemplate>, Optional<? extends RecordTemplate>> get( @Nonnull Set<Class<? extends RecordTemplate>> aspectClasses, @Nonnull URN urn) { @@ -112,16 +111,15 @@ public Map<Class<? extends RecordTemplate>, Optional<? extends RecordTemplate>> return results.get(urn); } - /** - * Similar to {@link #get(Set, Set)} but only for one aspect. - */ + /** Similar to {@link #get(Set, Set)} but only for one aspect. */ @Nonnull public <ASPECT extends RecordTemplate> Map<URN, Optional<ASPECT>> get( @Nonnull Class<ASPECT> aspectClass, @Nonnull Set<URN> urns) { - return get(Collections.singleton(aspectClass), urns).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> (Optional<ASPECT>) entry.getValue().get(aspectClass))); + return get(Collections.singleton(aspectClass), urns).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> (Optional<ASPECT>) entry.getValue().get(aspectClass))); } protected void checkValidAspect(@Nonnull Class<? extends RecordTemplate> aspectClass) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java index 42dd3f0405a6a..ae27f9f7e6f1a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java @@ -7,150 +7,162 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import io.ebean.PagedList; import io.ebean.Transaction; - -import java.util.stream.Stream; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.sql.Timestamp; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** - * An interface specifying create, update, and read operations against metadata entity aspects. - * This interface is meant to abstract away the storage concerns of these pieces of metadata, permitting any underlying - * storage system to be used. + * An interface specifying create, update, and read operations against metadata entity aspects. This + * interface is meant to abstract away the storage concerns of these pieces of metadata, permitting + * any underlying storage system to be used. * - * Requirements for any implementation: - * 1. Being able to map its internal storage representation to {@link EntityAspect}; - * 2. Honor the internal versioning semantics. The latest version of any aspect is set to 0 for efficient retrieval. - * In most cases only the latest state of an aspect will be fetched. See {@link EntityServiceImpl} for more details. + * <p>Requirements for any implementation: 1. Being able to map its internal storage representation + * to {@link EntityAspect}; 2. Honor the internal versioning semantics. The latest version of any + * aspect is set to 0 for efficient retrieval. In most cases only the latest state of an aspect will + * be fetched. See {@link EntityServiceImpl} for more details. * - * TODO: This interface exposes {@link #runInTransactionWithRetry(Supplier, int)} because {@link EntityServiceImpl} concerns - * itself with batching multiple commands into a single transaction. It exposes storage concerns somewhat and it'd be - * worth looking into ways to move this responsibility inside {@link AspectDao} implementations. + * <p>TODO: This interface exposes {@link #runInTransactionWithRetry(Supplier, int)} because {@link + * EntityServiceImpl} concerns itself with batching multiple commands into a single transaction. It + * exposes storage concerns somewhat and it'd be worth looking into ways to move this responsibility + * inside {@link AspectDao} implementations. */ public interface AspectDao { - String ASPECT_WRITE_COUNT_METRIC_NAME = "aspectWriteCount"; - String ASPECT_WRITE_BYTES_METRIC_NAME = "aspectWriteBytes"; - - @Nullable - EntityAspect getAspect(@Nonnull final String urn, @Nonnull final String aspectName, final long version); - - @Nullable - EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key); - - @Nonnull - Map<EntityAspectIdentifier, EntityAspect> batchGet(@Nonnull final Set<EntityAspectIdentifier> keys); - - @Nonnull - List<EntityAspect> getAspectsInRange(@Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis); - - @Nullable - default EntityAspect getLatestAspect(@Nonnull final String urn, @Nonnull final String aspectName) { - return getLatestAspects(Map.of(urn, Set.of(aspectName))).getOrDefault(urn, Map.of()) - .getOrDefault(aspectName, null); - } - - @Nonnull - Map<String, Map<String, EntityAspect>> getLatestAspects(Map<String, Set<String>> urnAspects); - - void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert); - - void saveAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert); - - long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion); - - void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect); - - @Nonnull - ListResult<String> listUrns( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final int start, - final int pageSize); - - @Nonnull - Integer countAspect( - @Nonnull final String aspectName, - @Nullable String urnLike); - - @Nonnull - PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args); - - @Nonnull - Stream<EntityAspect> streamAspects(String entityName, String aspectName); - - int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn); - - @Nonnull - ListResult<String> listLatestAspectMetadata( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final int start, - final int pageSize); - - @Nonnull - ListResult<String> listAspectMetadata( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final long version, - final int start, - final int pageSize); - - Map<String, Map<String, Long>> getNextVersions(@Nonnull Map<String, Set<String>> urnAspectMap); - - default long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { - return getNextVersions(urn, Set.of(aspectName)).get(aspectName); - } - - default Map<String, Long> getNextVersions(@Nonnull final String urn, @Nonnull final Set<String> aspectNames) { - return getNextVersions(Map.of(urn, aspectNames)).get(urn); - } - - long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName); - - void setWritable(boolean canWrite); - - @Nonnull - <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, final int maxTransactionRetry); - - @Nonnull - default <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, AspectsBatch batch, - final int maxTransactionRetry) { - return runInTransactionWithRetry(block, maxTransactionRetry); - } - - default void incrementWriteMetrics(String aspectName, long count, long bytes) { - MetricUtils.counter(this.getClass(), - String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_COUNT_METRIC_NAME, aspectName))).inc(count); - MetricUtils.counter(this.getClass(), - String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_BYTES_METRIC_NAME, aspectName))).inc(bytes); - } + String ASPECT_WRITE_COUNT_METRIC_NAME = "aspectWriteCount"; + String ASPECT_WRITE_BYTES_METRIC_NAME = "aspectWriteBytes"; + + @Nullable + EntityAspect getAspect( + @Nonnull final String urn, @Nonnull final String aspectName, final long version); + + @Nullable + EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key); + + @Nonnull + Map<EntityAspectIdentifier, EntityAspect> batchGet( + @Nonnull final Set<EntityAspectIdentifier> keys); + + @Nonnull + List<EntityAspect> getAspectsInRange( + @Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis); + + @Nullable + default EntityAspect getLatestAspect( + @Nonnull final String urn, @Nonnull final String aspectName) { + return getLatestAspects(Map.of(urn, Set.of(aspectName))) + .getOrDefault(urn, Map.of()) + .getOrDefault(aspectName, null); + } + + @Nonnull + Map<String, Map<String, EntityAspect>> getLatestAspects(Map<String, Set<String>> urnAspects); + + void saveAspect( + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert); + + void saveAspect( + @Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert); + + long saveLatestAspect( + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion); + + void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect); + + @Nonnull + ListResult<String> listUrns( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final int start, + final int pageSize); + + @Nonnull + Integer countAspect(@Nonnull final String aspectName, @Nullable String urnLike); + + @Nonnull + PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args); + + @Nonnull + Stream<EntityAspect> streamAspects(String entityName, String aspectName); + + int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn); + + @Nonnull + ListResult<String> listLatestAspectMetadata( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final int start, + final int pageSize); + + @Nonnull + ListResult<String> listAspectMetadata( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final long version, + final int start, + final int pageSize); + + Map<String, Map<String, Long>> getNextVersions(@Nonnull Map<String, Set<String>> urnAspectMap); + + default long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { + return getNextVersions(urn, Set.of(aspectName)).get(aspectName); + } + + default Map<String, Long> getNextVersions( + @Nonnull final String urn, @Nonnull final Set<String> aspectNames) { + return getNextVersions(Map.of(urn, aspectNames)).get(urn); + } + + long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName); + + void setWritable(boolean canWrite); + + @Nonnull + <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, final int maxTransactionRetry); + + @Nonnull + default <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, + AspectsBatch batch, + final int maxTransactionRetry) { + return runInTransactionWithRetry(block, maxTransactionRetry); + } + + default void incrementWriteMetrics(String aspectName, long count, long bytes) { + MetricUtils.counter( + this.getClass(), + String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_COUNT_METRIC_NAME, aspectName))) + .inc(count); + MetricUtils.counter( + this.getClass(), + String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_BYTES_METRIC_NAME, aspectName))) + .inc(bytes); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java index c16a41cbaf84b..485eb2b1af943 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java @@ -3,14 +3,15 @@ import javax.annotation.Nonnull; /** - * This interface is a split-off from {@link AspectDao} to segregate the methods that are only called by data migration - * tasks. This separation is not technically necessary, but it felt dangerous to leave entire-table queries mixed - * with the rest. + * This interface is a split-off from {@link AspectDao} to segregate the methods that are only + * called by data migration tasks. This separation is not technically necessary, but it felt + * dangerous to leave entire-table queries mixed with the rest. */ public interface AspectMigrationsDao { /** * Return a paged list of _all_ URNs in the database. + * * @param start Start offset of a page. * @param pageSize Number of records in a page. * @return An iterable of {@code String} URNs. @@ -20,14 +21,17 @@ public interface AspectMigrationsDao { /** * Return the count of entities (unique URNs) in the database. + * * @return Count of entities. */ long countEntities(); /** * Check if any record of given {@param aspectName} exists in the database. + * * @param aspectName Name of an entity aspect to search for. - * @return {@code true} if at least one record of given {@param aspectName} is found. {@code false} otherwise. + * @return {@code true} if at least one record of given {@param aspectName} is found. {@code + * false} otherwise. */ boolean checkIfAspectExists(@Nonnull final String aspectName); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java index 8296edd615aad..eaf9b1a2cc415 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java @@ -1,19 +1,18 @@ package com.linkedin.metadata.entity; +import java.sql.Timestamp; +import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import javax.annotation.Nonnull; -import java.sql.Timestamp; - /** - * This is an internal representation of an entity aspect record {@link EntityServiceImpl} and {@link AspectDao} - * implementations are using. While {@link AspectDao} implementations have their own aspect record implementations, - * they cary implementation details that should not leak outside. Therefore, this is the type to use in public - * {@link AspectDao} methods. + * This is an internal representation of an entity aspect record {@link EntityServiceImpl} and + * {@link AspectDao} implementations are using. While {@link AspectDao} implementations have their + * own aspect record implementations, they cary implementation details that should not leak outside. + * Therefore, this is the type to use in public {@link AspectDao} methods. */ @Getter @Setter @@ -22,25 +21,23 @@ @EqualsAndHashCode public class EntityAspect { - @Nonnull - private String urn; + @Nonnull private String urn; - @Nonnull - private String aspect; + @Nonnull private String aspect; - private long version; + private long version; - private String metadata; + private String metadata; - private String systemMetadata; + private String systemMetadata; - private Timestamp createdOn; + private Timestamp createdOn; - private String createdBy; + private String createdBy; - private String createdFor; + private String createdFor; - public EntityAspectIdentifier toAspectIdentifier() { - return new EntityAspectIdentifier(getUrn(), getAspect(), getVersion()); - } + public EntityAspectIdentifier toAspectIdentifier() { + return new EntityAspectIdentifier(getUrn(), getAspect(), getVersion()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java index cb360192c0120..887bd3910310d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java @@ -6,10 +6,10 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - /** - * This class holds values required to construct a unique key to identify an entity aspect record in a database. - * Its existence started mainly for compatibility with {@link com.linkedin.metadata.entity.ebean.EbeanAspectV2.PrimaryKey} + * This class holds values required to construct a unique key to identify an entity aspect record in + * a database. Its existence started mainly for compatibility with {@link + * com.linkedin.metadata.entity.ebean.EbeanAspectV2.PrimaryKey} */ @Value @Slf4j @@ -19,10 +19,12 @@ public class EntityAspectIdentifier { long version; public static EntityAspectIdentifier fromEbean(EbeanAspectV2 ebeanAspectV2) { - return new EntityAspectIdentifier(ebeanAspectV2.getUrn(), ebeanAspectV2.getAspect(), ebeanAspectV2.getVersion()); + return new EntityAspectIdentifier( + ebeanAspectV2.getUrn(), ebeanAspectV2.getAspect(), ebeanAspectV2.getVersion()); } public static EntityAspectIdentifier fromCassandra(CassandraAspect cassandraAspect) { - return new EntityAspectIdentifier(cassandraAspect.getUrn(), cassandraAspect.getAspect(), cassandraAspect.getVersion()); + return new EntityAspectIdentifier( + cassandraAspect.getUrn(), cassandraAspect.getAspect(), cassandraAspect.getVersion()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index 57f88e31deea5..a333839416556 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -1,10 +1,10 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.BrowsePathUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.*; + import com.codahale.metrics.Timer; -import com.linkedin.data.template.GetMode; -import com.linkedin.data.template.SetMode; -import com.linkedin.entity.client.SystemEntityClient; -import com.linkedin.metadata.config.PreProcessHooks; import com.datahub.util.RecordUtils; import com.datahub.util.exception.ModelConversionException; import com.google.common.collect.ImmutableList; @@ -22,7 +22,9 @@ import com.linkedin.common.urn.VersionedUrnUtils; import com.linkedin.data.schema.TyperefDataSchema; import com.linkedin.data.template.DataTemplateUtil; +import com.linkedin.data.template.GetMode; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.SetMode; import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; import com.linkedin.data.template.UnionTemplate; @@ -32,19 +34,21 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.Aspect; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.metadata.entity.ebean.transactions.PatchBatchItem; import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; @@ -67,7 +71,7 @@ import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import io.ebean.PagedList; - +import io.ebean.Transaction; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; @@ -84,51 +88,45 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.persistence.EntityNotFoundException; - -import io.ebean.Transaction; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.BrowsePathUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.*; - - /** - * A class specifying create, update, and read operations against metadata entities and aspects - * by primary key (urn). + * A class specifying create, update, and read operations against metadata entities and aspects by + * primary key (urn). * - * This interface is meant to abstract away the storage concerns of these pieces of metadata, permitting - * any underlying storage system to be used in materializing GMS domain objects, which are implemented using Pegasus - * {@link RecordTemplate}s. + * <p>This interface is meant to abstract away the storage concerns of these pieces of metadata, + * permitting any underlying storage system to be used in materializing GMS domain objects, which + * are implemented using Pegasus {@link RecordTemplate}s. * - * Internal versioning semantics - * ============================= + * <p>Internal versioning semantics ============================= * - * The latest version of any aspect is set to 0 for efficient retrieval; in most cases the latest state of an aspect - * will be the only fetched. + * <p>The latest version of any aspect is set to 0 for efficient retrieval; in most cases the latest + * state of an aspect will be the only fetched. * - * As such, 0 is treated as a special number. Once an aspect is no longer the latest, versions will increment - * monotonically, starting from 1. Thus, the second-to-last version of an aspect will be equal to total # versions - * of the aspect - 1. + * <p>As such, 0 is treated as a special number. Once an aspect is no longer the latest, versions + * will increment monotonically, starting from 1. Thus, the second-to-last version of an aspect will + * be equal to total # versions of the aspect - 1. * - * For example, if there are 5 instances of a single aspect, the latest will have version 0, and the second-to-last - * will have version 4. The "true" latest version of an aspect is always equal to the highest stored version - * of a given aspect + 1. + * <p>For example, if there are 5 instances of a single aspect, the latest will have version 0, and + * the second-to-last will have version 4. The "true" latest version of an aspect is always equal to + * the highest stored version of a given aspect + 1. * - * Note that currently, implementations of this interface are responsible for producing Metadata Change Log on - * ingestion using {@link #conditionallyProduceMCLAsync(RecordTemplate, SystemMetadata, RecordTemplate, SystemMetadata, - * MetadataChangeProposal, Urn, AuditStamp, AspectSpec)}. + * <p>Note that currently, implementations of this interface are responsible for producing Metadata + * Change Log on ingestion using {@link #conditionallyProduceMCLAsync(RecordTemplate, + * SystemMetadata, RecordTemplate, SystemMetadata, MetadataChangeProposal, Urn, AuditStamp, + * AspectSpec)}. * - * TODO: Consider whether we can abstract away virtual versioning semantics to subclasses of this class. + * <p>TODO: Consider whether we can abstract away virtual versioning semantics to subclasses of this + * class. */ @Slf4j public class EntityServiceImpl implements EntityService { @@ -137,7 +135,6 @@ public class EntityServiceImpl implements EntityService { * As described above, the latest version of an aspect should <b>always</b> take the value 0, with * monotonically increasing version incrementing as usual once the latest version is replaced. */ - private static final int DEFAULT_MAX_TRANSACTION_RETRY = 3; protected final AspectDao _aspectDao; @@ -146,8 +143,7 @@ public class EntityServiceImpl implements EntityService { private final Map<String, Set<String>> _entityToValidAspects; private RetentionService _retentionService; private final Boolean _alwaysEmitChangeLog; - @Getter - private final UpdateIndicesService _updateIndicesService; + @Getter private final UpdateIndicesService _updateIndicesService; private final PreProcessHooks _preProcessHooks; protected static final int MAX_KEYS_PER_QUERY = 500; @@ -160,17 +156,24 @@ public EntityServiceImpl( final boolean alwaysEmitChangeLog, final UpdateIndicesService updateIndicesService, final PreProcessHooks preProcessHooks) { - this(aspectDao, producer, entityRegistry, alwaysEmitChangeLog, updateIndicesService, preProcessHooks, DEFAULT_MAX_TRANSACTION_RETRY); + this( + aspectDao, + producer, + entityRegistry, + alwaysEmitChangeLog, + updateIndicesService, + preProcessHooks, + DEFAULT_MAX_TRANSACTION_RETRY); } public EntityServiceImpl( - @Nonnull final AspectDao aspectDao, - @Nonnull final EventProducer producer, - @Nonnull final EntityRegistry entityRegistry, - final boolean alwaysEmitChangeLog, - final UpdateIndicesService updateIndicesService, - final PreProcessHooks preProcessHooks, - final Integer retry) { + @Nonnull final AspectDao aspectDao, + @Nonnull final EventProducer producer, + @Nonnull final EntityRegistry entityRegistry, + final boolean alwaysEmitChangeLog, + final UpdateIndicesService updateIndicesService, + final PreProcessHooks preProcessHooks, + final Integer retry) { _aspectDao = aspectDao; _producer = producer; @@ -188,8 +191,8 @@ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { } /** - * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided - * set of aspect names. + * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided set + * of aspect names. * * @param urns set of urns to fetch aspects for * @param aspectNames aspects to fetch for each urn in urns set @@ -197,8 +200,7 @@ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { */ @Override public Map<Urn, List<RecordTemplate>> getLatestAspects( - @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) { + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { Map<EntityAspectIdentifier, EntityAspect> batchGetResults = getLatestAspect(urns, aspectNames); @@ -211,69 +213,88 @@ public Map<Urn, List<RecordTemplate>> getLatestAspects( } // Add "key" aspects for each urn. TODO: Replace this with a materialized key aspect. - urnToAspects.keySet().forEach(key -> { - final RecordTemplate keyAspect = EntityUtils.buildKeyAspect(_entityRegistry, key); - urnToAspects.get(key).add(keyAspect); - }); - - batchGetResults.forEach((key, aspectEntry) -> { - final Urn urn = toUrn(key.getUrn()); - final String aspectName = key.getAspect(); - // for now, don't add the key aspect here- we have already added it above - if (aspectName.equals(getKeyAspectName(urn))) { - return; - } + urnToAspects + .keySet() + .forEach( + key -> { + final RecordTemplate keyAspect = EntityUtils.buildKeyAspect(_entityRegistry, key); + urnToAspects.get(key).add(keyAspect); + }); + + batchGetResults.forEach( + (key, aspectEntry) -> { + final Urn urn = toUrn(key.getUrn()); + final String aspectName = key.getAspect(); + // for now, don't add the key aspect here- we have already added it above + if (aspectName.equals(getKeyAspectName(urn))) { + return; + } - final RecordTemplate aspectRecord = - EntityUtils.toAspectRecord(urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); - urnToAspects.putIfAbsent(urn, new ArrayList<>()); - urnToAspects.get(urn).add(aspectRecord); - }); + final RecordTemplate aspectRecord = + EntityUtils.toAspectRecord( + urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); + urnToAspects.putIfAbsent(urn, new ArrayList<>()); + urnToAspects.get(urn).add(aspectRecord); + }); return urnToAspects; } @Nonnull @Override - public Map<String, RecordTemplate> getLatestAspectsForUrn(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { - Map<EntityAspectIdentifier, EntityAspect> batchGetResults = getLatestAspect(new HashSet<>(Arrays.asList(urn)), aspectNames); + public Map<String, RecordTemplate> getLatestAspectsForUrn( + @Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { + Map<EntityAspectIdentifier, EntityAspect> batchGetResults = + getLatestAspect(new HashSet<>(Arrays.asList(urn)), aspectNames); final Map<String, RecordTemplate> result = new HashMap<>(); - batchGetResults.forEach((key, aspectEntry) -> { - final String aspectName = key.getAspect(); - final RecordTemplate aspectRecord = EntityUtils.toAspectRecord(urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); - result.put(aspectName, aspectRecord); - }); + batchGetResults.forEach( + (key, aspectEntry) -> { + final String aspectName = key.getAspect(); + final RecordTemplate aspectRecord = + EntityUtils.toAspectRecord( + urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); + result.put(aspectName, aspectRecord); + }); return result; } /** * Retrieves an aspect having a specific {@link Urn}, name, & version. * - * Note that once we drop support for legacy aspect-specific resources, - * we should make this a protected method. Only visible for backwards compatibility. + * <p>Note that once we drop support for legacy aspect-specific resources, we should make this a + * protected method. Only visible for backwards compatibility. * * @param urn an urn associated with the requested aspect * @param aspectName name of the aspect requested * @param version specific version of the aspect being requests - * @return the {@link RecordTemplate} representation of the requested aspect object, or null if one cannot be found + * @return the {@link RecordTemplate} representation of the requested aspect object, or null if + * one cannot be found */ @Nullable @Override - public RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { + public RecordTemplate getAspect( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { - log.debug("Invoked getAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); + log.debug( + "Invoked getAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); version = calculateVersionNumber(urn, aspectName, version); - final EntityAspectIdentifier primaryKey = new EntityAspectIdentifier(urn.toString(), aspectName, version); - final Optional<EntityAspect> maybeAspect = Optional.ofNullable(_aspectDao.getAspect(primaryKey)); - return maybeAspect.map( - aspect -> EntityUtils.toAspectRecord(urn, aspectName, aspect.getMetadata(), getEntityRegistry())).orElse(null); + final EntityAspectIdentifier primaryKey = + new EntityAspectIdentifier(urn.toString(), aspectName, version); + final Optional<EntityAspect> maybeAspect = + Optional.ofNullable(_aspectDao.getAspect(primaryKey)); + return maybeAspect + .map( + aspect -> + EntityUtils.toAspectRecord( + urn, aspectName, aspect.getMetadata(), getEntityRegistry())) + .orElse(null); } /** - * Retrieves the latest aspects for the given urn as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given urn as dynamic aspect objects (Without having to + * define union objects) * * @param entityName name of the entity to fetch * @param urn urn of entity to fetch @@ -285,13 +306,14 @@ public RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String as public EntityResponse getEntityV2( @Nonnull final String entityName, @Nonnull final Urn urn, - @Nonnull final Set<String> aspectNames) throws URISyntaxException { + @Nonnull final Set<String> aspectNames) + throws URISyntaxException { return getEntitiesV2(entityName, Collections.singleton(urn), aspectNames).get(urn); } /** - * Retrieves the latest aspects for the given set of urns as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given set of urns as dynamic aspect objects (Without + * having to define union objects) * * @param entityName name of the entity to fetch * @param urns set of urns to fetch @@ -302,29 +324,31 @@ public EntityResponse getEntityV2( public Map<Urn, EntityResponse> getEntitiesV2( @Nonnull final String entityName, @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) throws URISyntaxException { - return getLatestEnvelopedAspects(entityName, urns, aspectNames) - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); + @Nonnull final Set<String> aspectNames) + throws URISyntaxException { + return getLatestEnvelopedAspects(entityName, urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); } /** - * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects (Without + * having to define union objects) * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link Entity} object */ @Override public Map<Urn, EntityResponse> getEntitiesVersionedV2( - @Nonnull final Set<VersionedUrn> versionedUrns, - @Nonnull final Set<String> aspectNames) throws URISyntaxException { - return getVersionedEnvelopedAspects(versionedUrns, aspectNames) - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); + @Nonnull final Set<VersionedUrn> versionedUrns, @Nonnull final Set<String> aspectNames) + throws URISyntaxException { + return getVersionedEnvelopedAspects(versionedUrns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); } /** @@ -338,16 +362,21 @@ public Map<Urn, EntityResponse> getEntitiesVersionedV2( @Override public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects( // TODO: entityName is unused, can we remove this as a param? - @Nonnull String entityName, - @Nonnull Set<Urn> urns, - @Nonnull Set<String> aspectNames) throws URISyntaxException { - - final Set<EntityAspectIdentifier> dbKeys = urns.stream() - .map(urn -> aspectNames.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet()); + @Nonnull String entityName, @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) + throws URISyntaxException { + + final Set<EntityAspectIdentifier> dbKeys = + urns.stream() + .map( + urn -> + aspectNames.stream() + .map( + aspectName -> + new EntityAspectIdentifier( + urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet()); return getCorrespondingAspects(dbKeys, urns); } @@ -355,61 +384,86 @@ public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects( /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link EnvelopedAspect} object */ @Override public Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects( - @Nonnull Set<VersionedUrn> versionedUrns, - @Nonnull Set<String> aspectNames) throws URISyntaxException { + @Nonnull Set<VersionedUrn> versionedUrns, @Nonnull Set<String> aspectNames) + throws URISyntaxException { - Map<String, Map<String, Long>> urnAspectVersionMap = versionedUrns.stream() - .collect(Collectors.toMap(versionedUrn -> versionedUrn.getUrn().toString(), - versionedUrn -> VersionedUrnUtils.convertVersionStamp(versionedUrn.getVersionStamp()))); + Map<String, Map<String, Long>> urnAspectVersionMap = + versionedUrns.stream() + .collect( + Collectors.toMap( + versionedUrn -> versionedUrn.getUrn().toString(), + versionedUrn -> + VersionedUrnUtils.convertVersionStamp(versionedUrn.getVersionStamp()))); // Cover full/partial versionStamp - final Set<EntityAspectIdentifier> dbKeys = urnAspectVersionMap.entrySet().stream() - .filter(entry -> !entry.getValue().isEmpty()) - .map(entry -> aspectNames.stream() - .filter(aspectName -> entry.getValue().containsKey(aspectName)) - .map(aspectName -> new EntityAspectIdentifier(entry.getKey(), aspectName, - entry.getValue().get(aspectName))) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet()); + final Set<EntityAspectIdentifier> dbKeys = + urnAspectVersionMap.entrySet().stream() + .filter(entry -> !entry.getValue().isEmpty()) + .map( + entry -> + aspectNames.stream() + .filter(aspectName -> entry.getValue().containsKey(aspectName)) + .map( + aspectName -> + new EntityAspectIdentifier( + entry.getKey(), aspectName, entry.getValue().get(aspectName))) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet()); // Cover empty versionStamp - dbKeys.addAll(urnAspectVersionMap.entrySet().stream() - .filter(entry -> entry.getValue().isEmpty()) - .map(entry -> aspectNames.stream() - .map(aspectName -> new EntityAspectIdentifier(entry.getKey(), aspectName, 0L)) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet())); - - return getCorrespondingAspects(dbKeys, versionedUrns.stream() - .map(versionedUrn -> versionedUrn.getUrn().toString()) - .map(UrnUtils::getUrn).collect(Collectors.toSet())); - } - - private Map<Urn, List<EnvelopedAspect>> getCorrespondingAspects(Set<EntityAspectIdentifier> dbKeys, Set<Urn> urns) { - - final Map<EntityAspectIdentifier, EnvelopedAspect> envelopedAspectMap = getEnvelopedAspects(dbKeys); + dbKeys.addAll( + urnAspectVersionMap.entrySet().stream() + .filter(entry -> entry.getValue().isEmpty()) + .map( + entry -> + aspectNames.stream() + .map( + aspectName -> + new EntityAspectIdentifier(entry.getKey(), aspectName, 0L)) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet())); + + return getCorrespondingAspects( + dbKeys, + versionedUrns.stream() + .map(versionedUrn -> versionedUrn.getUrn().toString()) + .map(UrnUtils::getUrn) + .collect(Collectors.toSet())); + } + + private Map<Urn, List<EnvelopedAspect>> getCorrespondingAspects( + Set<EntityAspectIdentifier> dbKeys, Set<Urn> urns) { + + final Map<EntityAspectIdentifier, EnvelopedAspect> envelopedAspectMap = + getEnvelopedAspects(dbKeys); // Group result by Urn - final Map<String, List<EnvelopedAspect>> urnToAspects = envelopedAspectMap.entrySet() - .stream() - .collect(Collectors.groupingBy(entry -> entry.getKey().getUrn(), - Collectors.mapping(Map.Entry::getValue, Collectors.toList()))); + final Map<String, List<EnvelopedAspect>> urnToAspects = + envelopedAspectMap.entrySet().stream() + .collect( + Collectors.groupingBy( + entry -> entry.getKey().getUrn(), + Collectors.mapping(Map.Entry::getValue, Collectors.toList()))); final Map<Urn, List<EnvelopedAspect>> result = new HashMap<>(); for (Urn urn : urns) { - List<EnvelopedAspect> aspects = urnToAspects.getOrDefault(urn.toString(), Collections.emptyList()); + List<EnvelopedAspect> aspects = + urnToAspects.getOrDefault(urn.toString(), Collections.emptyList()); EnvelopedAspect keyAspect = getKeyEnvelopedAspect(urn); // Add key aspect if it does not exist in the returned aspects - if (aspects.isEmpty() || aspects.stream().noneMatch(aspect -> keyAspect.getName().equals(aspect.getName()))) { - result.put(urn, ImmutableList.<EnvelopedAspect>builder().addAll(aspects).add(keyAspect).build()); + if (aspects.isEmpty() + || aspects.stream().noneMatch(aspect -> keyAspect.getName().equals(aspect.getName()))) { + result.put( + urn, ImmutableList.<EnvelopedAspect>builder().addAll(aspects).add(keyAspect).build()); } else { result.put(urn, aspects); } @@ -427,33 +481,42 @@ private Map<Urn, List<EnvelopedAspect>> getCorrespondingAspects(Set<EntityAspect */ @Override public EnvelopedAspect getLatestEnvelopedAspect( - @Nonnull final String entityName, - @Nonnull final Urn urn, - @Nonnull final String aspectName) throws Exception { - return getLatestEnvelopedAspects(entityName, ImmutableSet.of(urn), ImmutableSet.of(aspectName)).getOrDefault(urn, Collections.emptyList()) + @Nonnull final String entityName, @Nonnull final Urn urn, @Nonnull final String aspectName) + throws Exception { + return getLatestEnvelopedAspects(entityName, ImmutableSet.of(urn), ImmutableSet.of(aspectName)) + .getOrDefault(urn, Collections.emptyList()) .stream() .filter(envelopedAspect -> envelopedAspect.getName().equals(aspectName)) .findFirst() .orElse(null); } - /** - * Retrieves an {@link VersionedAspect}, or null if one cannot be found. - */ + /** Retrieves an {@link VersionedAspect}, or null if one cannot be found. */ @Nullable @Override - public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version) { + public VersionedAspect getVersionedAspect( + @Nonnull Urn urn, @Nonnull String aspectName, long version) { - log.debug("Invoked getVersionedAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); + log.debug( + "Invoked getVersionedAspect with urn: {}, aspectName: {}, version: {}", + urn, + aspectName, + version); VersionedAspect result = new VersionedAspect(); version = calculateVersionNumber(urn, aspectName, version); - final EntityAspectIdentifier primaryKey = new EntityAspectIdentifier(urn.toString(), aspectName, version); - final Optional<EntityAspect> maybeAspect = Optional.ofNullable(_aspectDao.getAspect(primaryKey)); + final EntityAspectIdentifier primaryKey = + new EntityAspectIdentifier(urn.toString(), aspectName, version); + final Optional<EntityAspect> maybeAspect = + Optional.ofNullable(_aspectDao.getAspect(primaryKey)); RecordTemplate aspectRecord = - maybeAspect.map(aspect -> EntityUtils.toAspectRecord(urn, aspectName, aspect.getMetadata(), getEntityRegistry())) + maybeAspect + .map( + aspect -> + EntityUtils.toAspectRecord( + urn, aspectName, aspect.getMetadata(), getEntityRegistry())) .orElse(null); if (aspectRecord == null) { @@ -472,8 +535,8 @@ public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspe /** * Retrieves a list of all aspects belonging to an entity of a particular type, sorted by urn. * - * Note that once we drop support for legacy 'getAllDataPlatforms' endpoint, - * we can drop support for this unless otherwise required. Only visible for backwards compatibility. + * <p>Note that once we drop support for legacy 'getAllDataPlatforms' endpoint, we can drop + * support for this unless otherwise required. Only visible for backwards compatibility. * * @param entityName name of the entity type the aspect belongs to, e.g. 'dataset' * @param aspectName name of the aspect requested, e.g. 'ownership' @@ -489,25 +552,39 @@ public ListResult<RecordTemplate> listLatestAspects( final int start, final int count) { - log.debug("Invoked listLatestAspects with entityName: {}, aspectName: {}, start: {}, count: {}", entityName, - aspectName, start, count); + log.debug( + "Invoked listLatestAspects with entityName: {}, aspectName: {}, start: {}, count: {}", + entityName, + aspectName, + start, + count); final ListResult<String> aspectMetadataList = _aspectDao.listLatestAspectMetadata(entityName, aspectName, start, count); final List<RecordTemplate> aspects = new ArrayList<>(); for (int i = 0; i < aspectMetadataList.getValues().size(); i++) { - aspects.add(EntityUtils.toAspectRecord(aspectMetadataList.getMetadata().getExtraInfos().get(i).getUrn(), aspectName, - aspectMetadataList.getValues().get(i), getEntityRegistry())); + aspects.add( + EntityUtils.toAspectRecord( + aspectMetadataList.getMetadata().getExtraInfos().get(i).getUrn(), + aspectName, + aspectMetadataList.getValues().get(i), + getEntityRegistry())); } - return new ListResult<>(aspects, aspectMetadataList.getMetadata(), aspectMetadataList.getNextStart(), - aspectMetadataList.isHasNext(), aspectMetadataList.getTotalCount(), aspectMetadataList.getTotalPageCount(), + return new ListResult<>( + aspects, + aspectMetadataList.getMetadata(), + aspectMetadataList.getNextStart(), + aspectMetadataList.isHasNext(), + aspectMetadataList.getTotalCount(), + aspectMetadataList.getTotalPageCount(), aspectMetadataList.getPageSize()); } /** * Common batch-like pattern used primarily in tests. + * * @param entityUrn the entity urn * @param pairList list of aspects in pairs of aspect name and record template * @param auditStamp audit stamp @@ -515,38 +592,46 @@ public ListResult<RecordTemplate> listLatestAspects( * @return update result */ @Override - public List<UpdateAspectResult> ingestAspects(@Nonnull Urn entityUrn, - List<Pair<String, RecordTemplate>> pairList, - @Nonnull final AuditStamp auditStamp, - SystemMetadata systemMetadata) { - List<? extends AbstractBatchItem> items = pairList.stream() - .map(pair -> UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(pair.getKey()) - .aspect(pair.getValue()) - .systemMetadata(systemMetadata) - .build(_entityRegistry)) + public List<UpdateAspectResult> ingestAspects( + @Nonnull Urn entityUrn, + List<Pair<String, RecordTemplate>> pairList, + @Nonnull final AuditStamp auditStamp, + SystemMetadata systemMetadata) { + List<? extends AbstractBatchItem> items = + pairList.stream() + .map( + pair -> + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(pair.getKey()) + .aspect(pair.getValue()) + .systemMetadata(systemMetadata) + .build(_entityRegistry)) .collect(Collectors.toList()); return ingestAspects(AspectsBatchImpl.builder().items(items).build(), auditStamp, true, true); } /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataChangeLog}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataChangeLog}. * * @param aspectsBatch aspects to write * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time - * @param emitMCL whether a {@link com.linkedin.mxe.MetadataChangeLog} should be emitted in correspondence upon - * successful update + * @param emitMCL whether a {@link com.linkedin.mxe.MetadataChangeLog} should be emitted in + * correspondence upon successful update * @return the {@link RecordTemplate} representation of the written aspect object */ @Override - public List<UpdateAspectResult> ingestAspects(@Nonnull final AspectsBatch aspectsBatch, - @Nonnull final AuditStamp auditStamp, - boolean emitMCL, - boolean overwrite) { - - Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); - List<UpdateAspectResult> ingestResults = ingestAspectsToLocalDB(aspectsBatch, auditStamp, overwrite); + public List<UpdateAspectResult> ingestAspects( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean emitMCL, + boolean overwrite) { + + Timer.Context ingestToLocalDBTimer = + MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); + List<UpdateAspectResult> ingestResults = + ingestAspectsToLocalDB(aspectsBatch, auditStamp, overwrite); List<UpdateAspectResult> mclResults = emitMCL(ingestResults, emitMCL); ingestToLocalDBTimer.stop(); @@ -554,135 +639,197 @@ public List<UpdateAspectResult> ingestAspects(@Nonnull final AspectsBatch aspect } /** - * Checks whether there is an actual update to the aspect by applying the updateLambda - * If there is an update, push the new version into the local DB. - * Otherwise, do not push the new version, but just update the system metadata. + * Checks whether there is an actual update to the aspect by applying the updateLambda If there is + * an update, push the new version into the local DB. Otherwise, do not push the new version, but + * just update the system metadata. * - * @param aspectsBatch Collection of the following: an urn associated with the new aspect, name of the aspect being - * inserted, and a function to apply to the latest version of the aspect to get the updated version + * @param aspectsBatch Collection of the following: an urn associated with the new aspect, name of + * the aspect being inserted, and a function to apply to the latest version of the aspect to + * get the updated version * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time * @return Details about the new and old version of the aspect */ @Nonnull - private List<UpdateAspectResult> ingestAspectsToLocalDB(@Nonnull final AspectsBatch aspectsBatch, - @Nonnull final AuditStamp auditStamp, - boolean overwrite) { + private List<UpdateAspectResult> ingestAspectsToLocalDB( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean overwrite) { if (aspectsBatch.containsDuplicateAspects()) { log.warn(String.format("Batch contains duplicates: %s", aspectsBatch)); } - return _aspectDao.runInTransactionWithRetry((tx) -> { - // Read before write is unfortunate, however batch it - Map<String, Set<String>> urnAspects = aspectsBatch.getUrnAspectsMap(); - // read #1 - Map<String, Map<String, EntityAspect>> latestAspects = _aspectDao.getLatestAspects(urnAspects); - // read #2 - Map<String, Map<String, Long>> nextVersions = _aspectDao.getNextVersions(urnAspects); - - List<UpsertBatchItem> items = aspectsBatch.getItems().stream() - .map(item -> { - if (item instanceof UpsertBatchItem) { - return (UpsertBatchItem) item; - } else { - // patch to upsert - PatchBatchItem patchBatchItem = (PatchBatchItem) item; - final String urnStr = patchBatchItem.getUrn().toString(); - final EntityAspect latest = latestAspects.getOrDefault(urnStr, Map.of()).get(patchBatchItem.getAspectName()); - final RecordTemplate currentValue = latest != null - ? EntityUtils.toAspectRecord(patchBatchItem.getUrn(), patchBatchItem.getAspectName(), latest.getMetadata(), _entityRegistry) : null; - return patchBatchItem.applyPatch(_entityRegistry, currentValue); - } - }) - .collect(Collectors.toList()); - - // Database Upsert results - List<UpdateAspectResult> upsertResults = items.stream() - .map(item -> { - final String urnStr = item.getUrn().toString(); - final EntityAspect latest = latestAspects.getOrDefault(urnStr, Map.of()).get(item.getAspectName()); - final long nextVersion = nextVersions.getOrDefault(urnStr, Map.of()).getOrDefault(item.getAspectName(), 0L); - - final UpdateAspectResult result; - if (overwrite || latest == null) { - result = ingestAspectToLocalDB(tx, item.getUrn(), item.getAspectName(), item.getAspect(), - auditStamp, item.getSystemMetadata(), latest, nextVersion).toBuilder().request(item).build(); - - // support inner-batch upserts - latestAspects.computeIfAbsent(urnStr, key -> new HashMap<>()).put(item.getAspectName(), item.toLatestEntityAspect(auditStamp)); - nextVersions.computeIfAbsent(urnStr, key -> new HashMap<>()).put(item.getAspectName(), nextVersion + 1); - } else { - RecordTemplate oldValue = EntityUtils.toAspectRecord(item.getUrn().getEntityType(), item.getAspectName(), - latest.getMetadata(), getEntityRegistry()); - SystemMetadata oldMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); - result = UpdateAspectResult.builder() - .urn(item.getUrn()) - .request(item) - .oldValue(oldValue) - .newValue(oldValue) - .oldSystemMetadata(oldMetadata) - .newSystemMetadata(oldMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(latest.getVersion()) - .build(); - } - - return result; - }).collect(Collectors.toList()); - - // commit upserts prior to retention or kafka send, if supported by impl - if (tx != null) { - tx.commitAndContinue(); - } + return _aspectDao.runInTransactionWithRetry( + (tx) -> { + // Read before write is unfortunate, however batch it + Map<String, Set<String>> urnAspects = aspectsBatch.getUrnAspectsMap(); + // read #1 + Map<String, Map<String, EntityAspect>> latestAspects = + _aspectDao.getLatestAspects(urnAspects); + // read #2 + Map<String, Map<String, Long>> nextVersions = _aspectDao.getNextVersions(urnAspects); + + List<UpsertBatchItem> items = + aspectsBatch.getItems().stream() + .map( + item -> { + if (item instanceof UpsertBatchItem) { + return (UpsertBatchItem) item; + } else { + // patch to upsert + PatchBatchItem patchBatchItem = (PatchBatchItem) item; + final String urnStr = patchBatchItem.getUrn().toString(); + final EntityAspect latest = + latestAspects + .getOrDefault(urnStr, Map.of()) + .get(patchBatchItem.getAspectName()); + final RecordTemplate currentValue = + latest != null + ? EntityUtils.toAspectRecord( + patchBatchItem.getUrn(), + patchBatchItem.getAspectName(), + latest.getMetadata(), + _entityRegistry) + : null; + return patchBatchItem.applyPatch(_entityRegistry, currentValue); + } + }) + .collect(Collectors.toList()); + + // Database Upsert results + List<UpdateAspectResult> upsertResults = + items.stream() + .map( + item -> { + final String urnStr = item.getUrn().toString(); + final EntityAspect latest = + latestAspects.getOrDefault(urnStr, Map.of()).get(item.getAspectName()); + final long nextVersion = + nextVersions + .getOrDefault(urnStr, Map.of()) + .getOrDefault(item.getAspectName(), 0L); + + final UpdateAspectResult result; + if (overwrite || latest == null) { + result = + ingestAspectToLocalDB( + tx, + item.getUrn(), + item.getAspectName(), + item.getAspect(), + auditStamp, + item.getSystemMetadata(), + latest, + nextVersion) + .toBuilder() + .request(item) + .build(); + + // support inner-batch upserts + latestAspects + .computeIfAbsent(urnStr, key -> new HashMap<>()) + .put(item.getAspectName(), item.toLatestEntityAspect(auditStamp)); + nextVersions + .computeIfAbsent(urnStr, key -> new HashMap<>()) + .put(item.getAspectName(), nextVersion + 1); + } else { + RecordTemplate oldValue = + EntityUtils.toAspectRecord( + item.getUrn().getEntityType(), + item.getAspectName(), + latest.getMetadata(), + getEntityRegistry()); + SystemMetadata oldMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + result = + UpdateAspectResult.builder() + .urn(item.getUrn()) + .request(item) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(oldMetadata) + .newSystemMetadata(oldMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(latest.getVersion()) + .build(); + } + + return result; + }) + .collect(Collectors.toList()); + + // commit upserts prior to retention or kafka send, if supported by impl + if (tx != null) { + tx.commitAndContinue(); + } - // Retention optimization and tx - if (_retentionService != null) { - List<RetentionService.RetentionContext> retentionBatch = upsertResults.stream() - // Only consider retention when there was a previous version - .filter(result -> latestAspects.containsKey(result.getUrn().toString()) - && latestAspects.get(result.getUrn().toString()).containsKey(result.getRequest().getAspectName())) - .filter(result -> { - RecordTemplate oldAspect = result.getOldValue(); - RecordTemplate newAspect = result.getNewValue(); - // Apply retention policies if there was an update to existing aspect value - return oldAspect != newAspect && oldAspect != null && _retentionService != null; - }) - .map(result -> RetentionService.RetentionContext.builder() - .urn(result.getUrn()) - .aspectName(result.getRequest().getAspectName()) - .maxVersion(Optional.of(result.getMaxVersion())) - .build()) - .collect(Collectors.toList()); - _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); - } else { - log.warn("Retention service is missing!"); - } + // Retention optimization and tx + if (_retentionService != null) { + List<RetentionService.RetentionContext> retentionBatch = + upsertResults.stream() + // Only consider retention when there was a previous version + .filter( + result -> + latestAspects.containsKey(result.getUrn().toString()) + && latestAspects + .get(result.getUrn().toString()) + .containsKey(result.getRequest().getAspectName())) + .filter( + result -> { + RecordTemplate oldAspect = result.getOldValue(); + RecordTemplate newAspect = result.getNewValue(); + // Apply retention policies if there was an update to existing aspect + // value + return oldAspect != newAspect + && oldAspect != null + && _retentionService != null; + }) + .map( + result -> + RetentionService.RetentionContext.builder() + .urn(result.getUrn()) + .aspectName(result.getRequest().getAspectName()) + .maxVersion(Optional.of(result.getMaxVersion())) + .build()) + .collect(Collectors.toList()); + _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); + } else { + log.warn("Retention service is missing!"); + } - return upsertResults; - }, aspectsBatch, DEFAULT_MAX_TRANSACTION_RETRY); + return upsertResults; + }, + aspectsBatch, + DEFAULT_MAX_TRANSACTION_RETRY); } @Nonnull private List<UpdateAspectResult> emitMCL(List<UpdateAspectResult> sqlResults, boolean emitMCL) { - List<UpdateAspectResult> withEmitMCL = sqlResults.stream() + List<UpdateAspectResult> withEmitMCL = + sqlResults.stream() .map(result -> emitMCL ? conditionallyProduceMCLAsync(result) : result) .collect(Collectors.toList()); // join futures messages, capture error state - List<Pair<Boolean, UpdateAspectResult>> statusPairs = withEmitMCL.stream() + List<Pair<Boolean, UpdateAspectResult>> statusPairs = + withEmitMCL.stream() .filter(result -> result.getMclFuture() != null) - .map(result -> { - try { - result.getMclFuture().get(); - return Pair.of(true, result); - } catch (InterruptedException | ExecutionException e) { - return Pair.of(false, result); - } - }).collect(Collectors.toList()); + .map( + result -> { + try { + result.getMclFuture().get(); + return Pair.of(true, result); + } catch (InterruptedException | ExecutionException e) { + return Pair.of(false, result); + } + }) + .collect(Collectors.toList()); if (statusPairs.stream().anyMatch(p -> !p.getFirst())) { - log.error("Failed to produce MCLs: {}", statusPairs.stream() + log.error( + "Failed to produce MCLs: {}", + statusPairs.stream() .filter(p -> !p.getFirst()) .map(Pair::getValue) .map(v -> v.getRequest().toString()) @@ -695,12 +842,14 @@ private List<UpdateAspectResult> emitMCL(List<UpdateAspectResult> sqlResults, bo } /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataAuditEvent}. * - * This method runs a read -> write atomically in a single transaction, this is to prevent multiple IDs from being created. + * <p>This method runs a read -> write atomically in a single transaction, this is to prevent + * multiple IDs from being created. * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. + * <p>Note that in general, this should not be used externally. It is currently serving upgrade + * scripts and is as such public. * * @param urn an urn associated with the new aspect * @param aspectName name of the aspect being inserted @@ -711,15 +860,22 @@ private List<UpdateAspectResult> emitMCL(List<UpdateAspectResult> sqlResults, bo */ @Nullable @Override - public RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, - @Nonnull String aspectName, - @Nonnull RecordTemplate newValue, - @Nonnull AuditStamp auditStamp, - @Nonnull SystemMetadata systemMetadata) { - log.debug("Invoked ingestAspectIfNotPresent with urn: {}, aspectName: {}, newValue: {}", urn, aspectName, newValue); - - AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() - .one(UpsertBatchItem.builder() + public RecordTemplate ingestAspectIfNotPresent( + @Nonnull Urn urn, + @Nonnull String aspectName, + @Nonnull RecordTemplate newValue, + @Nonnull AuditStamp auditStamp, + @Nonnull SystemMetadata systemMetadata) { + log.debug( + "Invoked ingestAspectIfNotPresent with urn: {}, aspectName: {}, newValue: {}", + urn, + aspectName, + newValue); + + AspectsBatchImpl aspectsBatch = + AspectsBatchImpl.builder() + .one( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(newValue) @@ -733,150 +889,208 @@ public RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, /** * Wrapper around batch method for single item + * * @param proposal the proposal * @param auditStamp an audit stamp representing the time and actor proposing the change - * @param async a flag to control whether we commit to primary store or just write to proposal log before returning + * @param async a flag to control whether we commit to primary store or just write to proposal log + * before returning * @return an {@link IngestResult} containing the results */ @Override - public IngestResult ingestProposal(MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { - return ingestProposal(AspectsBatchImpl.builder().mcps(List.of(proposal), getEntityRegistry()).build(), auditStamp, - async).stream().findFirst().get(); + public IngestResult ingestProposal( + MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { + return ingestProposal( + AspectsBatchImpl.builder().mcps(List.of(proposal), getEntityRegistry()).build(), + auditStamp, + async) + .stream() + .findFirst() + .get(); } /** - * Ingest a new {@link MetadataChangeProposal}. Note that this method does NOT include any additional aspects or do any - * enrichment, instead it changes only those which are provided inside the metadata change proposal. + * Ingest a new {@link MetadataChangeProposal}. Note that this method does NOT include any + * additional aspects or do any enrichment, instead it changes only those which are provided + * inside the metadata change proposal. * - * Do not use this method directly for creating new entities, as it DOES NOT create an Entity Key aspect in the DB. Instead, - * use an Entity Client. + * <p>Do not use this method directly for creating new entities, as it DOES NOT create an Entity + * Key aspect in the DB. Instead, use an Entity Client. * * @param aspectsBatch the proposals to ingest * @param auditStamp an audit stamp representing the time and actor proposing the change - * @param async a flag to control whether we commit to primary store or just write to proposal log before returning + * @param async a flag to control whether we commit to primary store or just write to proposal log + * before returning * @return an {@link IngestResult} containing the results */ @Override - public Set<IngestResult> ingestProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async) { + public Set<IngestResult> ingestProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async) { - Stream<IngestResult> timeseriesIngestResults = ingestTimeseriesProposal(aspectsBatch, auditStamp); - Stream<IngestResult> nonTimeseriesIngestResults = async ? ingestProposalAsync(aspectsBatch) - : ingestProposalSync(aspectsBatch, auditStamp); + Stream<IngestResult> timeseriesIngestResults = + ingestTimeseriesProposal(aspectsBatch, auditStamp); + Stream<IngestResult> nonTimeseriesIngestResults = + async ? ingestProposalAsync(aspectsBatch) : ingestProposalSync(aspectsBatch, auditStamp); - return Stream.concat(timeseriesIngestResults, nonTimeseriesIngestResults).collect(Collectors.toSet()); + return Stream.concat(timeseriesIngestResults, nonTimeseriesIngestResults) + .collect(Collectors.toSet()); } /** * Timeseries is pass through to MCL, no MCP + * * @param aspectsBatch timeseries upserts batch * @param auditStamp provided audit information * @return returns ingest proposal result, however was never in the MCP topic */ - private Stream<IngestResult> ingestTimeseriesProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp) { - List<? extends AbstractBatchItem> unsupported = aspectsBatch.getItems().stream() - .filter(item -> item.getAspectSpec().isTimeseries() && item.getChangeType() != ChangeType.UPSERT) + private Stream<IngestResult> ingestTimeseriesProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp) { + List<? extends AbstractBatchItem> unsupported = + aspectsBatch.getItems().stream() + .filter( + item -> + item.getAspectSpec().isTimeseries() + && item.getChangeType() != ChangeType.UPSERT) .collect(Collectors.toList()); if (!unsupported.isEmpty()) { - throw new UnsupportedOperationException("ChangeType not supported: " + unsupported.stream() - .map(AbstractBatchItem::getChangeType).collect(Collectors.toSet())); + throw new UnsupportedOperationException( + "ChangeType not supported: " + + unsupported.stream() + .map(AbstractBatchItem::getChangeType) + .collect(Collectors.toSet())); } - List<Pair<UpsertBatchItem, Optional<Pair<Future<?>, Boolean>>>> timeseriesResults = aspectsBatch.getItems().stream() + List<Pair<UpsertBatchItem, Optional<Pair<Future<?>, Boolean>>>> timeseriesResults = + aspectsBatch.getItems().stream() .filter(item -> item.getAspectSpec().isTimeseries()) .map(item -> (UpsertBatchItem) item) - .map(item -> Pair.of(item, conditionallyProduceMCLAsync(null, null, item.getAspect(), item.getSystemMetadata(), - item.getMetadataChangeProposal(), item.getUrn(), auditStamp, item.getAspectSpec()))) + .map( + item -> + Pair.of( + item, + conditionallyProduceMCLAsync( + null, + null, + item.getAspect(), + item.getSystemMetadata(), + item.getMetadataChangeProposal(), + item.getUrn(), + auditStamp, + item.getAspectSpec()))) .collect(Collectors.toList()); - return timeseriesResults.stream().map(result -> { - Optional<Pair<Future<?>, Boolean>> emissionStatus = result.getSecond(); - - emissionStatus.ifPresent(status -> { - try { - status.getFirst().get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); - - UpsertBatchItem request = result.getFirst(); - return IngestResult.builder() - .urn(request.getUrn()) - .request(request) - .publishedMCL(emissionStatus.map(status -> status.getFirst() != null).orElse(false)) - .processedMCL(emissionStatus.map(Pair::getSecond).orElse(false)) - .build(); - }); + return timeseriesResults.stream() + .map( + result -> { + Optional<Pair<Future<?>, Boolean>> emissionStatus = result.getSecond(); + + emissionStatus.ifPresent( + status -> { + try { + status.getFirst().get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + + UpsertBatchItem request = result.getFirst(); + return IngestResult.builder() + .urn(request.getUrn()) + .request(request) + .publishedMCL( + emissionStatus.map(status -> status.getFirst() != null).orElse(false)) + .processedMCL(emissionStatus.map(Pair::getSecond).orElse(false)) + .build(); + }); } /** * For async ingestion of non-timeseries, any change type + * * @param aspectsBatch non-timeseries ingest aspects * @return produced items to the MCP topic */ private Stream<IngestResult> ingestProposalAsync(AspectsBatch aspectsBatch) { - List<? extends AbstractBatchItem> nonTimeseries = aspectsBatch.getItems().stream() + List<? extends AbstractBatchItem> nonTimeseries = + aspectsBatch.getItems().stream() .filter(item -> !item.getAspectSpec().isTimeseries()) .collect(Collectors.toList()); - List<Future<?>> futures = nonTimeseries.stream().map(item -> - // When async is turned on, we write to proposal log and return without waiting - _producer.produceMetadataChangeProposal(item.getUrn(), item.getMetadataChangeProposal())) + List<Future<?>> futures = + nonTimeseries.stream() + .map( + item -> + // When async is turned on, we write to proposal log and return without waiting + _producer.produceMetadataChangeProposal( + item.getUrn(), item.getMetadataChangeProposal())) .filter(Objects::nonNull) .collect(Collectors.toList()); try { - return nonTimeseries.stream().map(item -> - IngestResult.builder() + return nonTimeseries.stream() + .map( + item -> + IngestResult.builder() .urn(item.getUrn()) .request(item) .publishedMCP(true) .build()); } finally { - futures.forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); } } - private Stream<IngestResult> ingestProposalSync(AspectsBatch aspectsBatch, AuditStamp auditStamp) { - AspectsBatchImpl nonTimeseries = AspectsBatchImpl.builder() - .items(aspectsBatch.getItems().stream() + private Stream<IngestResult> ingestProposalSync( + AspectsBatch aspectsBatch, AuditStamp auditStamp) { + AspectsBatchImpl nonTimeseries = + AspectsBatchImpl.builder() + .items( + aspectsBatch.getItems().stream() .filter(item -> !item.getAspectSpec().isTimeseries()) .collect(Collectors.toList())) .build(); - List<? extends AbstractBatchItem> unsupported = nonTimeseries.getItems().stream() - .filter(item -> item.getMetadataChangeProposal().getChangeType() != ChangeType.PATCH - && item.getMetadataChangeProposal().getChangeType() != ChangeType.UPSERT) + List<? extends AbstractBatchItem> unsupported = + nonTimeseries.getItems().stream() + .filter( + item -> + item.getMetadataChangeProposal().getChangeType() != ChangeType.PATCH + && item.getMetadataChangeProposal().getChangeType() != ChangeType.UPSERT) .collect(Collectors.toList()); if (!unsupported.isEmpty()) { - throw new UnsupportedOperationException("ChangeType not supported: " + unsupported.stream() - .map(item -> item.getMetadataChangeProposal().getChangeType()).collect(Collectors.toSet())); + throw new UnsupportedOperationException( + "ChangeType not supported: " + + unsupported.stream() + .map(item -> item.getMetadataChangeProposal().getChangeType()) + .collect(Collectors.toSet())); } List<UpdateAspectResult> upsertResults = ingestAspects(nonTimeseries, auditStamp, true, true); - return upsertResults.stream().map(result -> { - AbstractBatchItem item = result.getRequest(); + return upsertResults.stream() + .map( + result -> { + AbstractBatchItem item = result.getRequest(); - return IngestResult.builder() - .urn(item.getUrn()) - .request(item) - .publishedMCL(result.getMclFuture() != null) - .sqlCommitted(true) - .isUpdate(result.getOldValue() != null) - .build(); - }); + return IngestResult.builder() + .urn(item.getUrn()) + .request(item) + .publishedMCL(result.getMclFuture() != null) + .sqlCommitted(true) + .isUpdate(result.getOldValue() != null) + .build(); + }); } @Override - public String batchApplyRetention(Integer start, Integer count, Integer attemptWithVersion, String aspectName, - String urn) { + public String batchApplyRetention( + Integer start, Integer count, Integer attemptWithVersion, String aspectName, String urn) { BulkApplyRetentionArgs args = new BulkApplyRetentionArgs(); if (start == null) { start = 0; @@ -900,7 +1114,8 @@ private boolean preprocessEvent(MetadataChangeLog metadataChangeLog) { if (_preProcessHooks.isUiEnabled()) { if (metadataChangeLog.getSystemMetadata() != null) { if (metadataChangeLog.getSystemMetadata().getProperties() != null) { - if (UI_SOURCE.equals(metadataChangeLog.getSystemMetadata().getProperties().get(APP_SOURCE))) { + if (UI_SOURCE.equals( + metadataChangeLog.getSystemMetadata().getProperties().get(APP_SOURCE))) { // Pre-process the update indices hook for UI updates to avoid perceived lag from Kafka _updateIndicesService.handleChangeEvent(metadataChangeLog); return true; @@ -918,19 +1133,24 @@ public Integer getCountAspect(@Nonnull String aspectName, @Nullable String urnLi @Nonnull @Override - public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @Nonnull Consumer<String> logger) { + public RestoreIndicesResult restoreIndices( + @Nonnull RestoreIndicesArgs args, @Nonnull Consumer<String> logger) { RestoreIndicesResult result = new RestoreIndicesResult(); int ignored = 0; int rowsMigrated = 0; logger.accept(String.format("Args are %s", args)); - logger.accept(String.format( - "Reading rows %s through %s from the aspects table started.", args.start, args.start + args.batchSize)); + logger.accept( + String.format( + "Reading rows %s through %s from the aspects table started.", + args.start, args.start + args.batchSize)); long startTime = System.currentTimeMillis(); PagedList<EbeanAspectV2> rows = _aspectDao.getPagedAspects(args); result.timeSqlQueryMs = System.currentTimeMillis() - startTime; startTime = System.currentTimeMillis(); - logger.accept(String.format( - "Reading rows %s through %s from the aspects table completed.", args.start, args.start + args.batchSize)); + logger.accept( + String.format( + "Reading rows %s through %s from the aspects table completed.", + args.start, args.start + args.batchSize)); LinkedList<Future<?>> futures = new LinkedList<>(); @@ -942,8 +1162,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No try { urn = Urn.createFromString(aspect.getKey().getUrn()); } catch (Exception e) { - logger.accept(String.format("Failed to bind Urn with value %s into Urn object: %s. Ignoring row.", - aspect.getKey().getUrn(), e)); + logger.accept( + String.format( + "Failed to bind Urn with value %s into Urn object: %s. Ignoring row.", + aspect.getKey().getUrn(), e)); ignored = ignored + 1; continue; } @@ -956,8 +1178,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - logger.accept(String.format("Failed to find entity with name %s in Entity Registry: %s. Ignoring row.", - entityName, e)); + logger.accept( + String.format( + "Failed to find entity with name %s in Entity Registry: %s. Ignoring row.", + entityName, e)); ignored = ignored + 1; continue; } @@ -968,8 +1192,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No // 3. Verify that the aspect is a valid aspect associated with the entity AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - logger.accept(String.format("Failed to find aspect with name %s associated with entity named %s", aspectName, - entityName)); + logger.accept( + String.format( + "Failed to find aspect with name %s associated with entity named %s", + aspectName, entityName)); ignored = ignored + 1; continue; } @@ -979,10 +1205,14 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No // 4. Create record from json aspect final RecordTemplate aspectRecord; try { - aspectRecord = EntityUtils.toAspectRecord(entityName, aspectName, aspect.getMetadata(), _entityRegistry); + aspectRecord = + EntityUtils.toAspectRecord( + entityName, aspectName, aspect.getMetadata(), _entityRegistry); } catch (Exception e) { - logger.accept(String.format("Failed to deserialize row %s for entity %s, aspect %s: %s. Ignoring row.", - aspect.getMetadata(), entityName, aspectName, e)); + logger.accept( + String.format( + "Failed to deserialize row %s for entity %s, aspect %s: %s. Ignoring row.", + aspect.getMetadata(), entityName, aspectName, e)); ignored = ignored + 1; continue; } @@ -990,32 +1220,50 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No startTime = System.currentTimeMillis(); // Force indexing to skip diff mode and fix error states - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(aspect.getSystemMetadata()); - StringMap properties = latestSystemMetadata.getProperties() != null ? latestSystemMetadata.getProperties() - : new StringMap(); + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(aspect.getSystemMetadata()); + StringMap properties = + latestSystemMetadata.getProperties() != null + ? latestSystemMetadata.getProperties() + : new StringMap(); properties.put(FORCE_INDEXING_KEY, Boolean.TRUE.toString()); latestSystemMetadata.setProperties(properties); // 5. Produce MAE events for the aspect record - futures.add(alwaysProduceMCLAsync(urn, entityName, aspectName, aspectSpec, null, aspectRecord, null, - latestSystemMetadata, - new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - ChangeType.RESTATE).getFirst()); + futures.add( + alwaysProduceMCLAsync( + urn, + entityName, + aspectName, + aspectSpec, + null, + aspectRecord, + null, + latestSystemMetadata, + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + ChangeType.RESTATE) + .getFirst()); result.sendMessageMs += System.currentTimeMillis() - startTime; rowsMigrated++; } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); try { TimeUnit.MILLISECONDS.sleep(args.batchDelayMs); } catch (InterruptedException e) { - throw new RuntimeException("Thread interrupted while sleeping after successful batch migration."); + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); } result.ignored = ignored; result.rowsMigrated = rowsMigrated; @@ -1030,12 +1278,16 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No * @param count the count */ @Override - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count) { - log.debug("Invoked listUrns with entityName: {}, start: {}, count: {}", entityName, start, count); + public ListUrnsResult listUrns( + @Nonnull final String entityName, final int start, final int count) { + log.debug( + "Invoked listUrns with entityName: {}, start: {}, count: {}", entityName, start, count); // If a keyAspect exists, the entity exists. - final String keyAspectName = getEntityRegistry().getEntitySpec(entityName).getKeyAspectSpec().getName(); - final ListResult<String> keyAspectList = _aspectDao.listUrns(entityName, keyAspectName, start, count); + final String keyAspectName = + getEntityRegistry().getEntitySpec(entityName).getKeyAspectSpec().getName(); + final ListResult<String> keyAspectList = + _aspectDao.listUrns(entityName, keyAspectName, start, count); final ListUrnsResult result = new ListUrnsResult(); result.setStart(start); @@ -1048,8 +1300,8 @@ public ListUrnsResult listUrns(@Nonnull final String entityName, final int start try { entityUrns.add(Urn.createFromString(urn)); } catch (URISyntaxException e) { - throw new IllegalArgumentException(String.format("Failed to convert urn %s found in db to Urn object.", urn), - e); + throw new IllegalArgumentException( + String.format("Failed to convert urn %s found in db to Urn object.", urn), e); } } result.setEntities(entityUrns); @@ -1057,17 +1309,20 @@ public ListUrnsResult listUrns(@Nonnull final String entityName, final int start } /** - * Default implementations. Subclasses should feel free to override if it's more efficient to do so. + * Default implementations. Subclasses should feel free to override if it's more efficient to do + * so. */ @Override public Entity getEntity(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { - return getEntities(Collections.singleton(urn), aspectNames).values().stream().findFirst().orElse(null); + return getEntities(Collections.singleton(urn), aspectNames).values().stream() + .findFirst() + .orElse(null); } /** * Deprecated! Use getEntitiesV2 instead. * - * Retrieves multiple entities. + * <p>Retrieves multiple entities. * * @param urns set of urns to fetch * @param aspectNames set of aspects to fetch @@ -1075,70 +1330,115 @@ public Entity getEntity(@Nonnull final Urn urn, @Nonnull final Set<String> aspec */ @Deprecated @Override - public Map<Urn, Entity> getEntities(@Nonnull final Set<Urn> urns, @Nonnull Set<String> aspectNames) { + public Map<Urn, Entity> getEntities( + @Nonnull final Set<Urn> urns, @Nonnull Set<String> aspectNames) { log.debug("Invoked getEntities with urns {}, aspects {}", urns, aspectNames); if (urns.isEmpty()) { return Collections.emptyMap(); } - return getSnapshotUnions(urns, aspectNames).entrySet() - .stream() + return getSnapshotUnions(urns, aspectNames).entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntity(entry.getValue()))); } @Override - public Pair<Future<?>, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull final AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog) { + public Pair<Future<?>, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull final AspectSpec aspectSpec, + @Nonnull final MetadataChangeLog metadataChangeLog) { Future<?> future = _producer.produceMetadataChangeLog(urn, aspectSpec, metadataChangeLog); return Pair.of(future, preprocessEvent(metadataChangeLog)); } @Override - public Pair<Future<?>, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, - @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, - @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, - @Nonnull final ChangeType changeType) { - final MetadataChangeLog metadataChangeLog = constructMCL(null, entityName, urn, changeType, aspectName, auditStamp, - newAspectValue, newSystemMetadata, oldAspectValue, oldSystemMetadata); + public Pair<Future<?>, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull final AspectSpec aspectSpec, + @Nullable final RecordTemplate oldAspectValue, + @Nullable final RecordTemplate newAspectValue, + @Nullable final SystemMetadata oldSystemMetadata, + @Nullable final SystemMetadata newSystemMetadata, + @Nonnull AuditStamp auditStamp, + @Nonnull final ChangeType changeType) { + final MetadataChangeLog metadataChangeLog = + constructMCL( + null, + entityName, + urn, + changeType, + aspectName, + auditStamp, + newAspectValue, + newSystemMetadata, + oldAspectValue, + oldSystemMetadata); return alwaysProduceMCLAsync(urn, aspectSpec, metadataChangeLog); } - public Optional<Pair<Future<?>, Boolean>> conditionallyProduceMCLAsync(@Nullable RecordTemplate oldAspect, - @Nullable SystemMetadata oldSystemMetadata, - RecordTemplate newAspect, SystemMetadata newSystemMetadata, - @Nullable MetadataChangeProposal mcp, Urn entityUrn, - AuditStamp auditStamp, AspectSpec aspectSpec) { + public Optional<Pair<Future<?>, Boolean>> conditionallyProduceMCLAsync( + @Nullable RecordTemplate oldAspect, + @Nullable SystemMetadata oldSystemMetadata, + RecordTemplate newAspect, + SystemMetadata newSystemMetadata, + @Nullable MetadataChangeProposal mcp, + Urn entityUrn, + AuditStamp auditStamp, + AspectSpec aspectSpec) { boolean isNoOp = oldAspect == newAspect; if (!isNoOp || _alwaysEmitChangeLog || shouldAspectEmitChangeLog(aspectSpec)) { - log.debug("Producing MetadataChangeLog for ingested aspect {}, urn {}", aspectSpec.getName(), entityUrn); - - final MetadataChangeLog metadataChangeLog = constructMCL(mcp, urnToEntityName(entityUrn), entityUrn, - isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT, aspectSpec.getName(), auditStamp, newAspect, newSystemMetadata, - oldAspect, oldSystemMetadata); + log.debug( + "Producing MetadataChangeLog for ingested aspect {}, urn {}", + aspectSpec.getName(), + entityUrn); + + final MetadataChangeLog metadataChangeLog = + constructMCL( + mcp, + urnToEntityName(entityUrn), + entityUrn, + isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT, + aspectSpec.getName(), + auditStamp, + newAspect, + newSystemMetadata, + oldAspect, + oldSystemMetadata); log.debug("Serialized MCL event: {}", metadataChangeLog); - Pair<Future<?>, Boolean> emissionStatus = alwaysProduceMCLAsync(entityUrn, aspectSpec, metadataChangeLog); + Pair<Future<?>, Boolean> emissionStatus = + alwaysProduceMCLAsync(entityUrn, aspectSpec, metadataChangeLog); return emissionStatus.getFirst() != null ? Optional.of(emissionStatus) : Optional.empty(); } else { log.debug( - "Skipped producing MetadataChangeLog for ingested aspect {}, urn {}. Aspect has not changed.", - aspectSpec.getName(), entityUrn); + "Skipped producing MetadataChangeLog for ingested aspect {}, urn {}. Aspect has not changed.", + aspectSpec.getName(), + entityUrn); return Optional.empty(); } } private UpdateAspectResult conditionallyProduceMCLAsync(UpdateAspectResult result) { AbstractBatchItem request = result.getRequest(); - Optional<Pair<Future<?>, Boolean>> emissionStatus = conditionallyProduceMCLAsync(result.getOldValue(), result.getOldSystemMetadata(), - result.getNewValue(), result.getNewSystemMetadata(), - request.getMetadataChangeProposal(), result.getUrn(), result.getAuditStamp(), request.getAspectSpec()); - - return emissionStatus.map(status -> - result.toBuilder() - .mclFuture(status.getFirst()) - .processedMCL(status.getSecond()) - .build() - ).orElse(result); + Optional<Pair<Future<?>, Boolean>> emissionStatus = + conditionallyProduceMCLAsync( + result.getOldValue(), + result.getOldSystemMetadata(), + result.getNewValue(), + result.getNewSystemMetadata(), + request.getMetadataChangeProposal(), + result.getUrn(), + result.getAuditStamp(), + request.getAspectSpec()); + + return emissionStatus + .map( + status -> + result.toBuilder() + .mclFuture(status.getFirst()) + .processedMCL(status.getSecond()) + .build()) + .orElse(result); } @Override @@ -1148,10 +1448,15 @@ public RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final Str } @Override - public void ingestEntities(@Nonnull final List<Entity> entities, @Nonnull final AuditStamp auditStamp, + public void ingestEntities( + @Nonnull final List<Entity> entities, + @Nonnull final AuditStamp auditStamp, @Nonnull final List<SystemMetadata> systemMetadata) { log.debug("Invoked ingestEntities with entities {}, audit stamp {}", entities, auditStamp); - Streams.zip(entities.stream(), systemMetadata.stream(), (a, b) -> new Pair<Entity, SystemMetadata>(a, b)) + Streams.zip( + entities.stream(), + systemMetadata.stream(), + (a, b) -> new Pair<Entity, SystemMetadata>(a, b)) .forEach(pair -> ingestEntity(pair.getFirst(), auditStamp, pair.getSecond())); } @@ -1166,42 +1471,50 @@ public SystemMetadata ingestEntity(Entity entity, AuditStamp auditStamp) { } @Override - public void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, - @Nonnull SystemMetadata systemMetadata) { - log.debug("Invoked ingestEntity with entity {}, audit stamp {} systemMetadata {}", entity, auditStamp, systemMetadata.toString()); + public void ingestEntity( + @Nonnull Entity entity, + @Nonnull AuditStamp auditStamp, + @Nonnull SystemMetadata systemMetadata) { + log.debug( + "Invoked ingestEntity with entity {}, audit stamp {} systemMetadata {}", + entity, + auditStamp, + systemMetadata.toString()); ingestSnapshotUnion(entity.getValue(), auditStamp, systemMetadata); } @Nonnull - protected Map<Urn, Snapshot> getSnapshotUnions(@Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { - return getSnapshotRecords(urns, aspectNames).entrySet() - .stream() + protected Map<Urn, Snapshot> getSnapshotUnions( + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + return getSnapshotRecords(urns, aspectNames).entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> toSnapshotUnion(entry.getValue()))); } @Nonnull - protected Map<Urn, RecordTemplate> getSnapshotRecords(@Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) { - return getLatestAspectUnions(urns, aspectNames).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toSnapshotRecord(entry.getKey(), entry.getValue()))); + protected Map<Urn, RecordTemplate> getSnapshotRecords( + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + return getLatestAspectUnions(urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toSnapshotRecord(entry.getKey(), entry.getValue()))); } @Nonnull protected Map<Urn, List<UnionTemplate>> getLatestAspectUnions( - @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) { - return getLatestAspects(urns, aspectNames).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue() - .stream() - .map(aspectRecord -> toAspectUnion(entry.getKey(), aspectRecord)) - .collect(Collectors.toList()))); + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + return getLatestAspects(urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> + entry.getValue().stream() + .map(aspectRecord -> toAspectUnion(entry.getKey(), aspectRecord)) + .collect(Collectors.toList()))); } /** - Returns true if entityType should have some aspect as per its definition - but aspects given does not have that aspect + * Returns true if entityType should have some aspect as per its definition but aspects given does + * not have that aspect */ private boolean isAspectMissing(String entityType, String aspectName, Set<String> aspects) { return _entityRegistry.getEntitySpec(entityType).getAspectSpecMap().containsKey(aspectName) @@ -1209,32 +1522,37 @@ private boolean isAspectMissing(String entityType, String aspectName, Set<String } @Override - public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstWrite(@Nonnull final Urn urn, - Map<String, RecordTemplate> includedAspects) { + public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstWrite( + @Nonnull final Urn urn, Map<String, RecordTemplate> includedAspects) { List<Pair<String, RecordTemplate>> returnAspects = new ArrayList<>(); final String keyAspectName = getKeyAspectName(urn); - final Map<String, RecordTemplate> latestAspects = new HashMap<>(getLatestAspectsForUrn(urn, Set.of(keyAspectName))); + final Map<String, RecordTemplate> latestAspects = + new HashMap<>(getLatestAspectsForUrn(urn, Set.of(keyAspectName))); // key aspect: does not exist in database && is being written - boolean generateDefaults = !latestAspects.containsKey(keyAspectName) && includedAspects.containsKey(keyAspectName); + boolean generateDefaults = + !latestAspects.containsKey(keyAspectName) && includedAspects.containsKey(keyAspectName); // conditionally generate defaults if (generateDefaults) { String entityType = urnToEntityName(urn); Set<String> aspectsToGet = new HashSet<>(); - boolean shouldCheckBrowsePath = isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckBrowsePath = + isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckBrowsePath) { aspectsToGet.add(BROWSE_PATHS_ASPECT_NAME); } - boolean shouldCheckBrowsePathV2 = isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckBrowsePathV2 = + isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckBrowsePathV2) { aspectsToGet.add(BROWSE_PATHS_V2_ASPECT_NAME); } - boolean shouldCheckDataPlatform = isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckDataPlatform = + isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckDataPlatform) { aspectsToGet.add(DATA_PLATFORM_INSTANCE_ASPECT_NAME); } @@ -1242,8 +1560,9 @@ public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsO // fetch additional aspects latestAspects.putAll(getLatestAspectsForUrn(urn, aspectsToGet)); - if (shouldCheckBrowsePath && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null - && !includedAspects.containsKey(BROWSE_PATHS_ASPECT_NAME)) { + if (shouldCheckBrowsePath + && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null + && !includedAspects.containsKey(BROWSE_PATHS_ASPECT_NAME)) { try { BrowsePaths generatedBrowsePath = buildDefaultBrowsePath(urn); returnAspects.add(Pair.of(BROWSE_PATHS_ASPECT_NAME, generatedBrowsePath)); @@ -1252,8 +1571,9 @@ public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsO } } - if (shouldCheckBrowsePathV2 && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null - && !includedAspects.containsKey(BROWSE_PATHS_V2_ASPECT_NAME)) { + if (shouldCheckBrowsePathV2 + && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null + && !includedAspects.containsKey(BROWSE_PATHS_V2_ASPECT_NAME)) { try { BrowsePathsV2 generatedBrowsePathV2 = buildDefaultBrowsePathV2(urn, false); returnAspects.add(Pair.of(BROWSE_PATHS_V2_ASPECT_NAME, generatedBrowsePathV2)); @@ -1262,11 +1582,13 @@ public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsO } } - if (shouldCheckDataPlatform && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null - && !includedAspects.containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { + if (shouldCheckDataPlatform + && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null + && !includedAspects.containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { RecordTemplate keyAspect = includedAspects.get(keyAspectName); DataPlatformInstanceUtils.buildDataPlatformInstance(entityType, keyAspect) - .ifPresent(aspect -> returnAspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); + .ifPresent( + aspect -> returnAspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); } } @@ -1274,8 +1596,8 @@ public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsO } @Override - public List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, - Map<String, RecordTemplate> includedAspects) { + public List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing( + @Nonnull final Urn urn, Map<String, RecordTemplate> includedAspects) { final String keyAspectName = getKeyAspectName(urn); @@ -1284,10 +1606,12 @@ public List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing(@Nonnu } else { // No key aspect being written, generate it and potentially suggest writing it later HashMap<String, RecordTemplate> includedWithKeyAspect = new HashMap<>(includedAspects); - Pair<String, RecordTemplate> keyAspect = Pair.of(keyAspectName, EntityUtils.buildKeyAspect(_entityRegistry, urn)); + Pair<String, RecordTemplate> keyAspect = + Pair.of(keyAspectName, EntityUtils.buildKeyAspect(_entityRegistry, urn)); includedWithKeyAspect.put(keyAspect.getKey(), keyAspect.getValue()); - Pair<Boolean, List<Pair<String, RecordTemplate>>> returnAspects = generateDefaultAspectsOnFirstWrite(urn, includedWithKeyAspect); + Pair<Boolean, List<Pair<String, RecordTemplate>>> returnAspects = + generateDefaultAspectsOnFirstWrite(urn, includedWithKeyAspect); // missing key aspect in database, add it if (!returnAspects.getFirst()) { @@ -1298,24 +1622,36 @@ public List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing(@Nonnu } } - private void ingestSnapshotUnion(@Nonnull final Snapshot snapshotUnion, @Nonnull final AuditStamp auditStamp, + private void ingestSnapshotUnion( + @Nonnull final Snapshot snapshotUnion, + @Nonnull final AuditStamp auditStamp, SystemMetadata systemMetadata) { - final RecordTemplate snapshotRecord = RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion); + final RecordTemplate snapshotRecord = + RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion); final Urn urn = com.datahub.util.ModelUtils.getUrnFromSnapshot(snapshotRecord); final List<Pair<String, RecordTemplate>> aspectRecordsToIngest = NewModelUtils.getAspectsFromSnapshot(snapshotRecord); log.info("INGEST urn {} with system metadata {}", urn.toString(), systemMetadata.toString()); - aspectRecordsToIngest.addAll(generateDefaultAspectsIfMissing(urn, - aspectRecordsToIngest.stream().collect(Collectors.toMap(Pair::getKey, Pair::getValue)))); - - AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() - .items(aspectRecordsToIngest.stream().map(pair -> UpsertBatchItem.builder() - .urn(urn) - .aspectName(pair.getKey()) - .aspect(pair.getValue()) - .systemMetadata(systemMetadata) - .build(_entityRegistry)).collect(Collectors.toList())) + aspectRecordsToIngest.addAll( + generateDefaultAspectsIfMissing( + urn, + aspectRecordsToIngest.stream() + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)))); + + AspectsBatchImpl aspectsBatch = + AspectsBatchImpl.builder() + .items( + aspectRecordsToIngest.stream() + .map( + pair -> + UpsertBatchItem.builder() + .urn(urn) + .aspectName(pair.getKey()) + .aspect(pair.getValue()) + .systemMetadata(systemMetadata) + .build(_entityRegistry)) + .collect(Collectors.toList())) .build(); ingestAspects(aspectsBatch, auditStamp, true, true); @@ -1333,7 +1669,8 @@ public AspectSpec getKeyAspectSpec(@Nonnull final String entityName) { } @Override - public Optional<AspectSpec> getAspectSpec(@Nonnull final String entityName, @Nonnull final String aspectName) { + public Optional<AspectSpec> getAspectSpec( + @Nonnull final String entityName, @Nonnull final String aspectName) { final EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); return Optional.ofNullable(entitySpec.getAspectSpec(aspectName)); } @@ -1355,25 +1692,29 @@ protected Snapshot toSnapshotUnion(@Nonnull final RecordTemplate snapshotRecord) return snapshot; } - protected RecordTemplate toSnapshotRecord(@Nonnull final Urn urn, - @Nonnull final List<UnionTemplate> aspectUnionTemplates) { + protected RecordTemplate toSnapshotRecord( + @Nonnull final Urn urn, @Nonnull final List<UnionTemplate> aspectUnionTemplates) { final String entityName = urnToEntityName(urn); final EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); return com.datahub.util.ModelUtils.newSnapshot( - getDataTemplateClassFromSchema(entitySpec.getSnapshotSchema(), RecordTemplate.class), urn, + getDataTemplateClassFromSchema(entitySpec.getSnapshotSchema(), RecordTemplate.class), + urn, aspectUnionTemplates); } - protected UnionTemplate toAspectUnion(@Nonnull final Urn urn, @Nonnull final RecordTemplate aspectRecord) { + protected UnionTemplate toAspectUnion( + @Nonnull final Urn urn, @Nonnull final RecordTemplate aspectRecord) { final EntitySpec entitySpec = _entityRegistry.getEntitySpec(urnToEntityName(urn)); final TyperefDataSchema aspectSchema = entitySpec.getAspectTyperefSchema(); if (aspectSchema == null) { throw new RuntimeException( - String.format("Aspect schema for %s is null: v4 operation is not supported on this entity registry", + String.format( + "Aspect schema for %s is null: v4 operation is not supported on this entity registry", entitySpec.getName())); } return com.datahub.util.ModelUtils.newAspectUnion( - getDataTemplateClassFromSchema(entitySpec.getAspectTyperefSchema(), UnionTemplate.class), aspectRecord); + getDataTemplateClassFromSchema(entitySpec.getAspectTyperefSchema(), UnionTemplate.class), + aspectRecord); } protected Urn toUrn(final String urnStr) { @@ -1381,26 +1722,32 @@ protected Urn toUrn(final String urnStr) { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { log.error("Failed to convert urn string {} into Urn object", urnStr); - throw new ModelConversionException(String.format("Failed to convert urn string %s into Urn object ", urnStr), e); + throw new ModelConversionException( + String.format("Failed to convert urn string %s into Urn object ", urnStr), e); } } - private EntityResponse toEntityResponse(final Urn urn, final List<EnvelopedAspect> envelopedAspects) { + private EntityResponse toEntityResponse( + final Urn urn, final List<EnvelopedAspect> envelopedAspects) { final EntityResponse response = new EntityResponse(); response.setUrn(urn); response.setEntityName(urnToEntityName(urn)); - response.setAspects(new EnvelopedAspectMap( - envelopedAspects.stream().collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)) - )); + response.setAspects( + new EnvelopedAspectMap( + envelopedAspects.stream() + .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)))); return response; } private Map<String, Set<String>> buildEntityToValidAspects(final EntityRegistry entityRegistry) { - return entityRegistry.getEntitySpecs() - .values() - .stream() - .collect(Collectors.toMap(EntitySpec::getName, - entry -> entry.getAspectSpecs().stream().map(AspectSpec::getName).collect(Collectors.toSet()))); + return entityRegistry.getEntitySpecs().values().stream() + .collect( + Collectors.toMap( + EntitySpec::getName, + entry -> + entry.getAspectSpecs().stream() + .map(AspectSpec::getName) + .collect(Collectors.toSet()))); } @Override @@ -1429,44 +1776,68 @@ public void setWritable(boolean canWrite) { } @Override - public RollbackRunResult rollbackRun(List<AspectRowSummary> aspectRows, String runId, boolean hardDelete) { + public RollbackRunResult rollbackRun( + List<AspectRowSummary> aspectRows, String runId, boolean hardDelete) { return rollbackWithConditions(aspectRows, Collections.singletonMap("runId", runId), hardDelete); } @Override - public RollbackRunResult rollbackWithConditions(List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete) { + public RollbackRunResult rollbackWithConditions( + List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete) { List<AspectRowSummary> removedAspects = new ArrayList<>(); AtomicInteger rowsDeletedFromEntityDeletion = new AtomicInteger(0); - List<Future<?>> futures = aspectRows.stream().map(aspectToRemove -> { - RollbackResult result = deleteAspect(aspectToRemove.getUrn(), aspectToRemove.getAspectName(), - conditions, hardDelete); - if (result != null) { - Optional<AspectSpec> aspectSpec = getAspectSpec(result.entityName, result.aspectName); - if (!aspectSpec.isPresent()) { - log.error("Issue while rolling back: unknown aspect {} for entity {}", result.entityName, result.aspectName); - return null; - } - - rowsDeletedFromEntityDeletion.addAndGet(result.additionalRowsAffected); - removedAspects.add(aspectToRemove); - return alwaysProduceMCLAsync(result.getUrn(), result.getEntityName(), result.getAspectName(), aspectSpec.get(), - result.getOldValue(), result.getNewValue(), result.getOldSystemMetadata(), result.getNewSystemMetadata(), - // TODO: use properly attributed audit stamp. - createSystemAuditStamp(), - result.getChangeType()).getFirst(); - } - - return null; - }).filter(Objects::nonNull).collect(Collectors.toList()); + List<Future<?>> futures = + aspectRows.stream() + .map( + aspectToRemove -> { + RollbackResult result = + deleteAspect( + aspectToRemove.getUrn(), + aspectToRemove.getAspectName(), + conditions, + hardDelete); + if (result != null) { + Optional<AspectSpec> aspectSpec = + getAspectSpec(result.entityName, result.aspectName); + if (!aspectSpec.isPresent()) { + log.error( + "Issue while rolling back: unknown aspect {} for entity {}", + result.entityName, + result.aspectName); + return null; + } + + rowsDeletedFromEntityDeletion.addAndGet(result.additionalRowsAffected); + removedAspects.add(aspectToRemove); + return alwaysProduceMCLAsync( + result.getUrn(), + result.getEntityName(), + result.getAspectName(), + aspectSpec.get(), + result.getOldValue(), + result.getNewValue(), + result.getOldSystemMetadata(), + result.getNewSystemMetadata(), + // TODO: use properly attributed audit stamp. + createSystemAuditStamp(), + result.getChangeType()) + .getFirst(); + } + + return null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); - futures.forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return new RollbackRunResult(removedAspects, rowsDeletedFromEntityDeletion.get()); } @@ -1490,8 +1861,14 @@ public RollbackRunResult deleteUrn(Urn urn) { return new RollbackRunResult(removedAspects, rowsDeletedFromEntityDeletion); } - SystemMetadata latestKeySystemMetadata = EntityUtils.parseSystemMetadata(latestKey.getSystemMetadata()); - RollbackResult result = deleteAspect(urn.toString(), keyAspectName, Collections.singletonMap("runId", latestKeySystemMetadata.getRunId()), true); + SystemMetadata latestKeySystemMetadata = + EntityUtils.parseSystemMetadata(latestKey.getSystemMetadata()); + RollbackResult result = + deleteAspect( + urn.toString(), + keyAspectName, + Collections.singletonMap("runId", latestKeySystemMetadata.getRunId()), + true); if (result != null) { AspectRowSummary summary = new AspectRowSummary(); @@ -1503,11 +1880,20 @@ public RollbackRunResult deleteUrn(Urn urn) { rowsDeletedFromEntityDeletion = result.additionalRowsAffected; removedAspects.add(summary); - Future<?> future = alwaysProduceMCLAsync(result.getUrn(), result.getEntityName(), result.getAspectName(), keySpec, - result.getOldValue(), result.getNewValue(), result.getOldSystemMetadata(), result.getNewSystemMetadata(), - // TODO: Use a proper inferred audit stamp - createSystemAuditStamp(), - result.getChangeType()).getFirst(); + Future<?> future = + alwaysProduceMCLAsync( + result.getUrn(), + result.getEntityName(), + result.getAspectName(), + keySpec, + result.getOldValue(), + result.getNewValue(), + result.getOldSystemMetadata(), + result.getNewSystemMetadata(), + // TODO: Use a proper inferred audit stamp + createSystemAuditStamp(), + result.getChangeType()) + .getFirst(); if (future != null) { try { @@ -1530,9 +1916,12 @@ public RollbackRunResult deleteUrn(Urn urn) { @Override public Boolean exists(Urn urn) { final Set<String> aspectsToFetch = getEntityAspectNames(urn); - final List<EntityAspectIdentifier> dbKeys = aspectsToFetch.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList()); + final List<EntityAspectIdentifier> dbKeys = + aspectsToFetch.stream() + .map( + aspectName -> + new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList()); Map<EntityAspectIdentifier, EntityAspect> aspects = _aspectDao.batchGet(new HashSet(dbKeys)); return aspects.values().stream().anyMatch(aspect -> aspect != null); @@ -1553,14 +1942,16 @@ public Boolean isSoftDeleted(@Nonnull final Urn urn) { @Override public Boolean exists(Urn urn, String aspectName) { - EntityAspectIdentifier dbKey = new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION); + EntityAspectIdentifier dbKey = + new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION); Map<EntityAspectIdentifier, EntityAspect> aspects = _aspectDao.batchGet(Set.of(dbKey)); return aspects.values().stream().anyMatch(Objects::nonNull); } @Nullable @Override - public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map<String, String> conditions, boolean hardDelete) { + public RollbackResult deleteAspect( + String urn, String aspectName, @Nonnull Map<String, String> conditions, boolean hardDelete) { // Validate pre-conditions before running queries Urn entityUrn; EntitySpec entitySpec; @@ -1573,120 +1964,153 @@ public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map<S throw new RuntimeException(String.format("Failed to extract urn from %s", urn)); } - final RollbackResult result = _aspectDao.runInTransactionWithRetry((tx) -> { - Integer additionalRowsDeleted = 0; + final RollbackResult result = + _aspectDao.runInTransactionWithRetry( + (tx) -> { + Integer additionalRowsDeleted = 0; - // 1. Fetch the latest existing version of the aspect. - final EntityAspect latest = _aspectDao.getLatestAspect(urn, aspectName); + // 1. Fetch the latest existing version of the aspect. + final EntityAspect latest = _aspectDao.getLatestAspect(urn, aspectName); - // 1.1 If no latest exists, skip this aspect - if (latest == null) { - return null; - } + // 1.1 If no latest exists, skip this aspect + if (latest == null) { + return null; + } - // 2. Compare the match conditions, if they don't match, ignore. - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); - if (!filterMatch(latestSystemMetadata, conditions)) { - return null; - } - String latestMetadata = latest.getMetadata(); + // 2. Compare the match conditions, if they don't match, ignore. + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + if (!filterMatch(latestSystemMetadata, conditions)) { + return null; + } + String latestMetadata = latest.getMetadata(); - // 3. Check if this is a key aspect - Boolean isKeyAspect = false; - try { - isKeyAspect = getKeyAspectName(Urn.createFromString(urn)).equals(aspectName); - } catch (URISyntaxException e) { - log.error("Error occurred while parsing urn: {}", urn, e); - } + // 3. Check if this is a key aspect + Boolean isKeyAspect = false; + try { + isKeyAspect = getKeyAspectName(Urn.createFromString(urn)).equals(aspectName); + } catch (URISyntaxException e) { + log.error("Error occurred while parsing urn: {}", urn, e); + } - // 4. Fetch all preceding aspects, that match - List<EntityAspect> aspectsToDelete = new ArrayList<>(); - long maxVersion = _aspectDao.getMaxVersion(urn, aspectName); - EntityAspect survivingAspect = null; - String previousMetadata = null; - boolean filterMatch = true; - while (maxVersion > 0 && filterMatch) { - EntityAspect candidateAspect = _aspectDao.getAspect(urn, aspectName, maxVersion); - SystemMetadata previousSysMetadata = EntityUtils.parseSystemMetadata(candidateAspect.getSystemMetadata()); - filterMatch = filterMatch(previousSysMetadata, conditions); - if (filterMatch) { - aspectsToDelete.add(candidateAspect); - maxVersion = maxVersion - 1; - } else { - survivingAspect = candidateAspect; - previousMetadata = survivingAspect.getMetadata(); - } - } + // 4. Fetch all preceding aspects, that match + List<EntityAspect> aspectsToDelete = new ArrayList<>(); + long maxVersion = _aspectDao.getMaxVersion(urn, aspectName); + EntityAspect survivingAspect = null; + String previousMetadata = null; + boolean filterMatch = true; + while (maxVersion > 0 && filterMatch) { + EntityAspect candidateAspect = _aspectDao.getAspect(urn, aspectName, maxVersion); + SystemMetadata previousSysMetadata = + EntityUtils.parseSystemMetadata(candidateAspect.getSystemMetadata()); + filterMatch = filterMatch(previousSysMetadata, conditions); + if (filterMatch) { + aspectsToDelete.add(candidateAspect); + maxVersion = maxVersion - 1; + } else { + survivingAspect = candidateAspect; + previousMetadata = survivingAspect.getMetadata(); + } + } - // 5. Apply deletes and fix up latest row - - aspectsToDelete.forEach(aspect -> _aspectDao.deleteAspect(tx, aspect)); - - if (survivingAspect != null) { - // if there was a surviving aspect, copy its information into the latest row - // eBean does not like us updating a pkey column (version) for the surviving aspect - // as a result we copy information from survivingAspect to latest and delete survivingAspect - latest.setMetadata(survivingAspect.getMetadata()); - latest.setSystemMetadata(survivingAspect.getSystemMetadata()); - latest.setCreatedOn(survivingAspect.getCreatedOn()); - latest.setCreatedBy(survivingAspect.getCreatedBy()); - latest.setCreatedFor(survivingAspect.getCreatedFor()); - _aspectDao.saveAspect(tx, latest, false); - // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); - _aspectDao.deleteAspect(tx, survivingAspect); - } else { - if (isKeyAspect) { - if (hardDelete) { - // If this is the key aspect, delete the entity entirely. - additionalRowsDeleted = _aspectDao.deleteUrn(tx, urn); - } else if (entitySpec.hasAspect(Constants.STATUS_ASPECT_NAME)) { - // soft delete by setting status.removed=true (if applicable) - final Status statusAspect = new Status(); - statusAspect.setRemoved(true); - - final MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType(entityUrn.getEntityType()); - gmce.setAspectName(Constants.STATUS_ASPECT_NAME); - gmce.setAspect(GenericRecordUtils.serializeAspect(statusAspect)); - final AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - this.ingestProposal(gmce, auditStamp, false); - } - } else { - // Else, only delete the specific aspect. - _aspectDao.deleteAspect(tx, latest); - } - } + // 5. Apply deletes and fix up latest row + + aspectsToDelete.forEach(aspect -> _aspectDao.deleteAspect(tx, aspect)); + + if (survivingAspect != null) { + // if there was a surviving aspect, copy its information into the latest row + // eBean does not like us updating a pkey column (version) for the surviving aspect + // as a result we copy information from survivingAspect to latest and delete + // survivingAspect + latest.setMetadata(survivingAspect.getMetadata()); + latest.setSystemMetadata(survivingAspect.getSystemMetadata()); + latest.setCreatedOn(survivingAspect.getCreatedOn()); + latest.setCreatedBy(survivingAspect.getCreatedBy()); + latest.setCreatedFor(survivingAspect.getCreatedFor()); + _aspectDao.saveAspect(tx, latest, false); + // metrics + _aspectDao.incrementWriteMetrics( + aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); + _aspectDao.deleteAspect(tx, survivingAspect); + } else { + if (isKeyAspect) { + if (hardDelete) { + // If this is the key aspect, delete the entity entirely. + additionalRowsDeleted = _aspectDao.deleteUrn(tx, urn); + } else if (entitySpec.hasAspect(Constants.STATUS_ASPECT_NAME)) { + // soft delete by setting status.removed=true (if applicable) + final Status statusAspect = new Status(); + statusAspect.setRemoved(true); + + final MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType(entityUrn.getEntityType()); + gmce.setAspectName(Constants.STATUS_ASPECT_NAME); + gmce.setAspect(GenericRecordUtils.serializeAspect(statusAspect)); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + this.ingestProposal(gmce, auditStamp, false); + } + } else { + // Else, only delete the specific aspect. + _aspectDao.deleteAspect(tx, latest); + } + } - // 6. Emit the Update - try { - final RecordTemplate latestValue = latest == null ? null - : EntityUtils.toAspectRecord(Urn.createFromString(latest.getUrn()), latest.getAspect(), - latestMetadata, getEntityRegistry()); - - final RecordTemplate previousValue = survivingAspect == null ? null - : EntityUtils.toAspectRecord(Urn.createFromString(survivingAspect.getUrn()), - survivingAspect.getAspect(), previousMetadata, getEntityRegistry()); - - final Urn urnObj = Urn.createFromString(urn); - // We are not deleting key aspect if hardDelete has not been set so do not return a rollback result - if (isKeyAspect && !hardDelete) { - return null; - } - return new RollbackResult(urnObj, urnObj.getEntityType(), latest.getAspect(), latestValue, - previousValue, latestSystemMetadata, - previousValue == null ? null : EntityUtils.parseSystemMetadata(survivingAspect.getSystemMetadata()), - survivingAspect == null ? ChangeType.DELETE : ChangeType.UPSERT, isKeyAspect, additionalRowsDeleted); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to emit the update for urn %s", urn)); - } catch (IllegalStateException e) { - log.warn("Unable to find aspect, rollback result will not be sent. Error: {}", e.getMessage()); - return null; - } - }, DEFAULT_MAX_TRANSACTION_RETRY); + // 6. Emit the Update + try { + final RecordTemplate latestValue = + latest == null + ? null + : EntityUtils.toAspectRecord( + Urn.createFromString(latest.getUrn()), + latest.getAspect(), + latestMetadata, + getEntityRegistry()); + + final RecordTemplate previousValue = + survivingAspect == null + ? null + : EntityUtils.toAspectRecord( + Urn.createFromString(survivingAspect.getUrn()), + survivingAspect.getAspect(), + previousMetadata, + getEntityRegistry()); + + final Urn urnObj = Urn.createFromString(urn); + // We are not deleting key aspect if hardDelete has not been set so do not return a + // rollback result + if (isKeyAspect && !hardDelete) { + return null; + } + return new RollbackResult( + urnObj, + urnObj.getEntityType(), + latest.getAspect(), + latestValue, + previousValue, + latestSystemMetadata, + previousValue == null + ? null + : EntityUtils.parseSystemMetadata(survivingAspect.getSystemMetadata()), + survivingAspect == null ? ChangeType.DELETE : ChangeType.UPSERT, + isKeyAspect, + additionalRowsDeleted); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to emit the update for urn %s", urn)); + } catch (IllegalStateException e) { + log.warn( + "Unable to find aspect, rollback result will not be sent. Error: {}", + e.getMessage()); + return null; + } + }, + DEFAULT_MAX_TRANSACTION_RETRY); return result; } @@ -1720,21 +2144,32 @@ protected AuditStamp createSystemAuditStamp() { } @Nonnull - private Map<EntityAspectIdentifier, EntityAspect> getLatestAspect(@Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + private Map<EntityAspectIdentifier, EntityAspect> getLatestAspect( + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { log.debug("Invoked getLatestAspects with urns: {}, aspectNames: {}", urns, aspectNames); // Create DB keys - final Set<EntityAspectIdentifier> dbKeys = urns.stream().map(urn -> { - final Set<String> aspectsToFetch = aspectNames.isEmpty() ? getEntityAspectNames(urn) : aspectNames; - return aspectsToFetch.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList()); - }).flatMap(List::stream).collect(Collectors.toSet()); + final Set<EntityAspectIdentifier> dbKeys = + urns.stream() + .map( + urn -> { + final Set<String> aspectsToFetch = + aspectNames.isEmpty() ? getEntityAspectNames(urn) : aspectNames; + return aspectsToFetch.stream() + .map( + aspectName -> + new EntityAspectIdentifier( + urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList()); + }) + .flatMap(List::stream) + .collect(Collectors.toSet()); Map<EntityAspectIdentifier, EntityAspect> batchGetResults = new HashMap<>(); Iterators.partition(dbKeys.iterator(), MAX_KEYS_PER_QUERY) - .forEachRemaining(batch -> batchGetResults.putAll(_aspectDao.batchGet(ImmutableSet.copyOf(batch)))); + .forEachRemaining( + batch -> batchGetResults.putAll(_aspectDao.batchGet(ImmutableSet.copyOf(batch)))); return batchGetResults; } @@ -1743,14 +2178,16 @@ private Map<EntityAspectIdentifier, EntityAspect> getLatestAspect(@Nonnull final * To do this, we want to fetch the maximum version and subtract the negative version from that. Since -1 represents * the maximum version, we need to add 1 to the final result. */ - private long calculateVersionNumber(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { + private long calculateVersionNumber( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { if (version < 0) { return _aspectDao.getMaxVersion(urn.toString(), aspectName) + version + 1; } return version; } - private Map<EntityAspectIdentifier, EnvelopedAspect> getEnvelopedAspects(final Set<EntityAspectIdentifier> dbKeys) { + private Map<EntityAspectIdentifier, EnvelopedAspect> getEnvelopedAspects( + final Set<EntityAspectIdentifier> dbKeys) { final Map<EntityAspectIdentifier, EnvelopedAspect> result = new HashMap<>(); final Map<EntityAspectIdentifier, EntityAspect> dbEntries = _aspectDao.batchGet(dbKeys); @@ -1764,29 +2201,36 @@ private Map<EntityAspectIdentifier, EnvelopedAspect> getEnvelopedAspects(final S } // Aspect found. Now turn it into an EnvelopedAspect - final com.linkedin.entity.Aspect aspect = RecordUtils.toRecordTemplate(com.linkedin.entity.Aspect.class, currAspectEntry - .getMetadata()); + final com.linkedin.entity.Aspect aspect = + RecordUtils.toRecordTemplate( + com.linkedin.entity.Aspect.class, currAspectEntry.getMetadata()); final EnvelopedAspect envelopedAspect = new EnvelopedAspect(); envelopedAspect.setName(currAspectEntry.getAspect()); envelopedAspect.setVersion(currAspectEntry.getVersion()); - // TODO: I think we can assume this here, adding as it's a required field so object mapping barfs when trying to access it, + // TODO: I think we can assume this here, adding as it's a required field so object mapping + // barfs when trying to access it, // since nowhere else is using it should be safe for now at least envelopedAspect.setType(AspectType.VERSIONED); envelopedAspect.setValue(aspect); try { if (currAspectEntry.getSystemMetadata() != null) { - final SystemMetadata systemMetadata = RecordUtils.toRecordTemplate(SystemMetadata.class, currAspectEntry.getSystemMetadata()); + final SystemMetadata systemMetadata = + RecordUtils.toRecordTemplate( + SystemMetadata.class, currAspectEntry.getSystemMetadata()); envelopedAspect.setSystemMetadata(systemMetadata); } } catch (Exception e) { - log.warn("Exception encountered when setting system metadata on enveloped aspect {}. Error: {}", envelopedAspect.getName(), e); + log.warn( + "Exception encountered when setting system metadata on enveloped aspect {}. Error: {}", + envelopedAspect.getName(), + e); } - envelopedAspect.setCreated(new AuditStamp() - .setActor(UrnUtils.getUrn(currAspectEntry.getCreatedBy())) - .setTime(currAspectEntry.getCreatedOn().getTime()) - ); + envelopedAspect.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(currAspectEntry.getCreatedBy())) + .setTime(currAspectEntry.getCreatedOn().getTime())); result.put(currKey, envelopedAspect); } return result; @@ -1802,40 +2246,50 @@ private EnvelopedAspect getKeyEnvelopedAspect(final Urn urn) { envelopedAspect.setName(keySpec.getName()); envelopedAspect.setVersion(ASPECT_LATEST_VERSION); envelopedAspect.setValue(aspect); - // TODO: I think we can assume this here, adding as it's a required field so object mapping barfs when trying to access it, + // TODO: I think we can assume this here, adding as it's a required field so object mapping + // barfs when trying to access it, // since nowhere else is using it should be safe for now at least envelopedAspect.setType(AspectType.VERSIONED); envelopedAspect.setCreated( - new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); return envelopedAspect; } @Nonnull private UpdateAspectResult ingestAspectToLocalDB( - @Nullable Transaction tx, - @Nonnull final Urn urn, - @Nonnull final String aspectName, - @Nonnull final RecordTemplate newValue, - @Nonnull final AuditStamp auditStamp, - @Nonnull final SystemMetadata providedSystemMetadata, - @Nullable final EntityAspect latest, - @Nonnull final Long nextVersion) { - - // Set the "last run id" to be the run id provided with the new system metadata. This will be stored in index + @Nullable Transaction tx, + @Nonnull final Urn urn, + @Nonnull final String aspectName, + @Nonnull final RecordTemplate newValue, + @Nonnull final AuditStamp auditStamp, + @Nonnull final SystemMetadata providedSystemMetadata, + @Nullable final EntityAspect latest, + @Nonnull final Long nextVersion) { + + // Set the "last run id" to be the run id provided with the new system metadata. This will be + // stored in index // for all aspects that have a run id, regardless of whether they change. - providedSystemMetadata.setLastRunId(providedSystemMetadata.getRunId(GetMode.NULL), SetMode.IGNORE_NULL); + providedSystemMetadata.setLastRunId( + providedSystemMetadata.getRunId(GetMode.NULL), SetMode.IGNORE_NULL); // 2. Compare the latest existing and new. final RecordTemplate oldValue = - latest == null ? null : EntityUtils.toAspectRecord(urn, aspectName, latest.getMetadata(), getEntityRegistry()); + latest == null + ? null + : EntityUtils.toAspectRecord( + urn, aspectName, latest.getMetadata(), getEntityRegistry()); // 3. If there is no difference between existing and new, we just update // the lastObserved in system metadata. RunId should stay as the original runId if (oldValue != null && DataTemplateUtil.areEqual(oldValue, newValue)) { - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); latestSystemMetadata.setLastObserved(providedSystemMetadata.getLastObserved()); - latestSystemMetadata.setLastRunId(providedSystemMetadata.getLastRunId(GetMode.NULL), SetMode.IGNORE_NULL); + latestSystemMetadata.setLastRunId( + providedSystemMetadata.getLastRunId(GetMode.NULL), SetMode.IGNORE_NULL); latest.setSystemMetadata(RecordUtils.toJsonString(latestSystemMetadata)); @@ -1843,55 +2297,70 @@ private UpdateAspectResult ingestAspectToLocalDB( _aspectDao.saveAspect(tx, latest, false); // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); + _aspectDao.incrementWriteMetrics( + aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); return UpdateAspectResult.builder() - .urn(urn) - .oldValue(oldValue) - .newValue(oldValue) - .oldSystemMetadata(EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) - .newSystemMetadata(latestSystemMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(0) - .build(); + .urn(urn) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) + .newSystemMetadata(latestSystemMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(0) + .build(); } // 4. Save the newValue as the latest version log.debug("Ingesting aspect with name {}, urn {}", aspectName, urn); String newValueStr = EntityUtils.toJsonAspect(newValue); - long versionOfOld = _aspectDao.saveLatestAspect(tx, urn.toString(), aspectName, latest == null ? null : EntityUtils.toJsonAspect(oldValue), - latest == null ? null : latest.getCreatedBy(), latest == null ? null : latest.getCreatedFor(), - latest == null ? null : latest.getCreatedOn(), latest == null ? null : latest.getSystemMetadata(), - newValueStr, auditStamp.getActor().toString(), - auditStamp.hasImpersonator() ? auditStamp.getImpersonator().toString() : null, - new Timestamp(auditStamp.getTime()), EntityUtils.toJsonAspect(providedSystemMetadata), nextVersion); + long versionOfOld = + _aspectDao.saveLatestAspect( + tx, + urn.toString(), + aspectName, + latest == null ? null : EntityUtils.toJsonAspect(oldValue), + latest == null ? null : latest.getCreatedBy(), + latest == null ? null : latest.getCreatedFor(), + latest == null ? null : latest.getCreatedOn(), + latest == null ? null : latest.getSystemMetadata(), + newValueStr, + auditStamp.getActor().toString(), + auditStamp.hasImpersonator() ? auditStamp.getImpersonator().toString() : null, + new Timestamp(auditStamp.getTime()), + EntityUtils.toJsonAspect(providedSystemMetadata), + nextVersion); // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, newValueStr.getBytes(StandardCharsets.UTF_8).length); + _aspectDao.incrementWriteMetrics( + aspectName, 1, newValueStr.getBytes(StandardCharsets.UTF_8).length); return UpdateAspectResult.builder() - .urn(urn) - .oldValue(oldValue) - .newValue(newValue) - .oldSystemMetadata(latest == null ? null : EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) - .newSystemMetadata(providedSystemMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(versionOfOld) - .build(); + .urn(urn) + .oldValue(oldValue) + .newValue(newValue) + .oldSystemMetadata( + latest == null ? null : EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) + .newSystemMetadata(providedSystemMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(versionOfOld) + .build(); } /** * Builds the default browse path aspects for a subset of well-supported entities. * - * This method currently supports datasets, charts, dashboards, data flows, data jobs, and glossary terms. + * <p>This method currently supports datasets, charts, dashboards, data flows, data jobs, and + * glossary terms. */ @Nonnull @Override public BrowsePaths buildDefaultBrowsePath(final @Nonnull Urn urn) throws URISyntaxException { Character dataPlatformDelimiter = getDataPlatformDelimiter(urn); - String defaultBrowsePath = getDefaultBrowsePath(urn, this.getEntityRegistry(), dataPlatformDelimiter); + String defaultBrowsePath = + getDefaultBrowsePath(urn, this.getEntityRegistry(), dataPlatformDelimiter); StringArray browsePaths = new StringArray(); browsePaths.add(defaultBrowsePath); BrowsePaths browsePathAspect = new BrowsePaths(); @@ -1902,19 +2371,19 @@ public BrowsePaths buildDefaultBrowsePath(final @Nonnull Urn urn) throws URISynt /** * Builds the default browse path V2 aspects for all entities. * - * This method currently supports datasets, charts, dashboards, and data jobs best. Everything else - * will have a basic "Default" folder added to their browsePathV2. + * <p>This method currently supports datasets, charts, dashboards, and data jobs best. Everything + * else will have a basic "Default" folder added to their browsePathV2. */ @Nonnull @Override - public BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException { + public BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) + throws URISyntaxException { Character dataPlatformDelimiter = getDataPlatformDelimiter(urn); - return BrowsePathV2Utils.getDefaultBrowsePathV2(urn, this.getEntityRegistry(), dataPlatformDelimiter, this, useContainerPaths); + return BrowsePathV2Utils.getDefaultBrowsePathV2( + urn, this.getEntityRegistry(), dataPlatformDelimiter, this, useContainerPaths); } - /** - * Returns a delimiter on which the name of an asset may be split. - */ + /** Returns a delimiter on which the name of an asset may be split. */ private Character getDataPlatformDelimiter(Urn urn) { // Attempt to construct the appropriate Data Platform URN Urn dataPlatformUrn = buildDataPlatformUrn(urn, this.getEntityRegistry()); @@ -1932,15 +2401,20 @@ private Character getDataPlatformDelimiter(Urn urn) { @Nullable private DataPlatformInfo getDataPlatformInfo(Urn urn) { try { - final EntityResponse entityResponse = getEntityV2( - Constants.DATA_PLATFORM_ENTITY_NAME, - urn, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) - ); - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects() - .containsKey(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)) { + final EntityResponse entityResponse = + getEntityV2( + Constants.DATA_PLATFORM_ENTITY_NAME, + urn, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)); + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)) { return new DataPlatformInfo( - entityResponse.getAspects().get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); + entityResponse + .getAspects() + .get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) + .getValue() + .data()); } } catch (Exception e) { log.warn(String.format("Failed to find Data Platform Info for urn %s", urn)); @@ -1949,7 +2423,8 @@ private DataPlatformInfo getDataPlatformInfo(Urn urn) { } private static boolean shouldAspectEmitChangeLog(@Nonnull final AspectSpec aspectSpec) { - final List<RelationshipFieldSpec> relationshipFieldSpecs = aspectSpec.getRelationshipFieldSpecs(); + final List<RelationshipFieldSpec> relationshipFieldSpecs = + aspectSpec.getRelationshipFieldSpecs(); return relationshipFieldSpecs.stream().anyMatch(RelationshipFieldSpec::isLineageRelationship); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java index ffd63479589bc..c2a0a211f9e76 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.datahub.util.RecordUtils; import com.google.common.base.Preconditions; import com.linkedin.common.AuditStamp; @@ -18,24 +21,17 @@ import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.net.URLEncoder; import java.util.List; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class EntityUtils { - private EntityUtils() { - } + private EntityUtils() {} public static final int URN_NUM_BYTES_LIMIT = 512; public static final String URN_DELIMITER_SEPARATOR = "␟"; @@ -63,17 +59,19 @@ public static AuditStamp getAuditStamp(Urn actor) { } public static void ingestChangeProposals( - @Nonnull List<MetadataChangeProposal> changes, - @Nonnull EntityService entityService, - @Nonnull Urn actor, - @Nonnull Boolean async - ) { - entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(changes, entityService.getEntityRegistry()).build(), getAuditStamp(actor), async); + @Nonnull List<MetadataChangeProposal> changes, + @Nonnull EntityService entityService, + @Nonnull Urn actor, + @Nonnull Boolean async) { + entityService.ingestProposal( + AspectsBatchImpl.builder().mcps(changes, entityService.getEntityRegistry()).build(), + getAuditStamp(actor), + async); } /** * Get aspect from entity + * * @param entityUrn URN of the entity * @param aspectName aspect name string * @param entityService EntityService obj @@ -82,11 +80,10 @@ public static void ingestChangeProposals( */ @Nullable public static RecordTemplate getAspectFromEntity( - String entityUrn, - String aspectName, - EntityService entityService, - RecordTemplate defaultValue - ) { + String entityUrn, + String aspectName, + EntityService entityService, + RecordTemplate defaultValue) { Urn urn = getUrnFromString(entityUrn); if (urn == null) { return defaultValue; @@ -99,11 +96,10 @@ public static RecordTemplate getAspectFromEntity( return aspect; } catch (Exception e) { log.error( - "Error constructing aspect from entity. Entity: {} aspect: {}. Error: {}", - entityUrn, - aspectName, - e.toString() - ); + "Error constructing aspect from entity. Entity: {} aspect: {}. Error: {}", + entityUrn, + aspectName, + e.toString()); return null; } } @@ -114,7 +110,8 @@ public static RecordTemplate toAspectRecord( @Nonnull final String aspectName, @Nonnull final String jsonAspect, @Nonnull final EntityRegistry entityRegistry) { - return toAspectRecord(PegasusUtils.urnToEntityName(entityUrn), aspectName, jsonAspect, entityRegistry); + return toAspectRecord( + PegasusUtils.urnToEntityName(entityUrn), aspectName, jsonAspect, entityRegistry); } /** @@ -131,13 +128,17 @@ public static RecordTemplate toAspectRecord( @Nonnull final EntityRegistry entityRegistry) { final EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); final AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); - //TODO: aspectSpec can be null here - Preconditions.checkState(aspectSpec != null, String.format("Aspect %s could not be found", aspectName)); + // TODO: aspectSpec can be null here + Preconditions.checkState( + aspectSpec != null, String.format("Aspect %s could not be found", aspectName)); final RecordDataSchema aspectSchema = aspectSpec.getPegasusSchema(); - RecordTemplate aspectRecord = RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), jsonAspect); - RecordTemplateValidator.validate(aspectRecord, validationFailure -> { - log.warn(String.format("Failed to validate record %s against its schema.", aspectRecord)); - }); + RecordTemplate aspectRecord = + RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), jsonAspect); + RecordTemplateValidator.validate( + aspectRecord, + validationFailure -> { + log.warn(String.format("Failed to validate record %s against its schema.", aspectRecord)); + }); return aspectRecord; } @@ -151,16 +152,14 @@ public static SystemMetadata parseSystemMetadata(String jsonSystemMetadata) { return RecordUtils.toRecordTemplate(SystemMetadata.class, jsonSystemMetadata); } - /** - * Check if entity is removed (removed=true in Status aspect) and exists - */ + /** Check if entity is removed (removed=true in Status aspect) and exists */ public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) { try { - + if (!entityService.exists(entityUrn)) { return false; } - + EnvelopedAspect statusAspect = entityService.getLatestEnvelopedAspect(entityUrn.getEntityType(), entityUrn, "status"); if (statusAspect == null) { @@ -174,7 +173,8 @@ public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) } } - public static RecordTemplate buildKeyAspect(@Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { + public static RecordTemplate buildKeyAspect( + @Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { final EntitySpec spec = entityRegistry.getEntitySpec(urnToEntityName(urn)); final AspectSpec keySpec = spec.getKeyAspectSpec(); return EntityKeyUtils.convertUrnToEntityKey(urn, keySpec); @@ -183,18 +183,27 @@ public static RecordTemplate buildKeyAspect(@Nonnull EntityRegistry entityRegist public static void validateUrn(@Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); validator.setCurrentEntitySpec(entityRegistry.getEntitySpec(urn.getEntityType())); - RecordTemplateValidator.validate(EntityUtils.buildKeyAspect(entityRegistry, urn), validationResult -> { - throw new IllegalArgumentException("Invalid urn: " + urn + "\n Cause: " - + validationResult.getMessages()); }, validator); + RecordTemplateValidator.validate( + EntityUtils.buildKeyAspect(entityRegistry, urn), + validationResult -> { + throw new IllegalArgumentException( + "Invalid urn: " + urn + "\n Cause: " + validationResult.getMessages()); + }, + validator); if (urn.toString().trim().length() != urn.toString().length()) { - throw new IllegalArgumentException("Error: cannot provide an URN with leading or trailing whitespace"); + throw new IllegalArgumentException( + "Error: cannot provide an URN with leading or trailing whitespace"); } if (URLEncoder.encode(urn.toString()).length() > URN_NUM_BYTES_LIMIT) { - throw new IllegalArgumentException("Error: cannot provide an URN longer than " + Integer.toString(URN_NUM_BYTES_LIMIT) + " bytes (when URL encoded)"); + throw new IllegalArgumentException( + "Error: cannot provide an URN longer than " + + Integer.toString(URN_NUM_BYTES_LIMIT) + + " bytes (when URL encoded)"); } if (urn.toString().contains(URN_DELIMITER_SEPARATOR)) { - throw new IllegalArgumentException("Error: URN cannot contain " + URN_DELIMITER_SEPARATOR + " character"); + throw new IllegalArgumentException( + "Error: URN cannot contain " + URN_DELIMITER_SEPARATOR + " character"); } try { Urn.createFromString(urn.toString()); @@ -202,5 +211,4 @@ public static void validateUrn(@Nonnull EntityRegistry entityRegistry, @Nonnull throw new IllegalArgumentException(e); } } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java index 81eb5d4eb947c..c0ee01abe0a84 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java @@ -20,12 +20,10 @@ import org.reflections.Reflections; import org.reflections.scanners.Scanner; - public class NewModelUtils { private static final ClassLoader CLASS_LOADER = DummySnapshot.class.getClassLoader(); - private NewModelUtils() { - } + private NewModelUtils() {} public static <T extends DataTemplate> String getAspectName(@Nonnull Class<T> aspectClass) { return aspectClass.getCanonicalName(); @@ -36,9 +34,9 @@ public static Class<? extends RecordTemplate> getAspectClass(@Nonnull String asp return getClassFromName(aspectName, RecordTemplate.class); } - @Nonnull - public static <T> Class<? extends T> getClassFromName(@Nonnull String className, @Nonnull Class<T> parentClass) { + public static <T> Class<? extends T> getClassFromName( + @Nonnull String className, @Nonnull Class<T> parentClass) { try { return CLASS_LOADER.loadClass(className).asSubclass(parentClass); } catch (ClassNotFoundException var3) { @@ -47,8 +45,8 @@ public static <T> Class<? extends T> getClassFromName(@Nonnull String className, } @Nonnull - public static <SNAPSHOT extends RecordTemplate> List<Pair<String, RecordTemplate>> getAspectsFromSnapshot( - @Nonnull SNAPSHOT snapshot) { + public static <SNAPSHOT extends RecordTemplate> + List<Pair<String, RecordTemplate>> getAspectsFromSnapshot(@Nonnull SNAPSHOT snapshot) { SnapshotValidator.validateSnapshotSchema(snapshot.getClass()); return getAspects(snapshot); } @@ -57,28 +55,34 @@ public static <SNAPSHOT extends RecordTemplate> List<Pair<String, RecordTemplate private static List<Pair<String, RecordTemplate>> getAspects(@Nonnull RecordTemplate snapshot) { Class<? extends WrappingArrayTemplate> clazz = getAspectsArrayClass(snapshot.getClass()); WrappingArrayTemplate aspectArray = - (WrappingArrayTemplate) RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); + (WrappingArrayTemplate) + RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); List<Pair<String, RecordTemplate>> aspects = new ArrayList(); - aspectArray.forEach((item) -> { - try { - RecordTemplate aspect = RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item); - String name = PegasusUtils.getAspectNameFromSchema(aspect.schema()); - aspects.add(Pair.of(name, aspect)); - } catch (InvalidSchemaException e) { - // ignore fields that are not part of the union - } catch (TemplateOutputCastException e) { - // ignore fields that are not part of the union - } - }); + aspectArray.forEach( + (item) -> { + try { + RecordTemplate aspect = + RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item); + String name = PegasusUtils.getAspectNameFromSchema(aspect.schema()); + aspects.add(Pair.of(name, aspect)); + } catch (InvalidSchemaException e) { + // ignore fields that are not part of the union + } catch (TemplateOutputCastException e) { + // ignore fields that are not part of the union + } + }); return aspects; } - @Nonnull - private static <SNAPSHOT extends RecordTemplate> Class<? extends WrappingArrayTemplate> getAspectsArrayClass( - @Nonnull Class<SNAPSHOT> snapshotClass) { + private static <SNAPSHOT extends RecordTemplate> + Class<? extends WrappingArrayTemplate> getAspectsArrayClass( + @Nonnull Class<SNAPSHOT> snapshotClass) { try { - return snapshotClass.getMethod("getAspects").getReturnType().asSubclass(WrappingArrayTemplate.class); + return snapshotClass + .getMethod("getAspects") + .getReturnType() + .asSubclass(WrappingArrayTemplate.class); } catch (ClassCastException | NoSuchMethodException var2) { throw new RuntimeException(var2); } @@ -86,10 +90,10 @@ private static <SNAPSHOT extends RecordTemplate> Class<? extends WrappingArrayTe @Nonnull public static Set<Class<? extends RecordTemplate>> getAllEntities() { - return (Set) (new Reflections("com.linkedin.metadata.entity", new Scanner[0])).getSubTypesOf(RecordTemplate.class) - .stream() - .filter(EntityValidator::isValidEntitySchema) - .collect(Collectors.toSet()); + return (Set) + (new Reflections("com.linkedin.metadata.entity", new Scanner[0])) + .getSubTypesOf(RecordTemplate.class).stream() + .filter(EntityValidator::isValidEntitySchema) + .collect(Collectors.toSet()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java index 7804aa2067088..43df42713cc4d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java @@ -2,23 +2,24 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.ResultSet; - import javax.annotation.Nonnull; public class AspectStorageValidationUtil { - private AspectStorageValidationUtil() { - } + private AspectStorageValidationUtil() {} /** * Check if entity aspect table exists in the database. + * * @param session * @return {@code true} if table exists. */ public static boolean checkTableExists(@Nonnull CqlSession session) { - String query = String.format("SELECT table_name \n " - + "FROM system_schema.tables where table_name = '%s' allow filtering;", - CassandraAspect.TABLE_NAME); + String query = + String.format( + "SELECT table_name \n " + + "FROM system_schema.tables where table_name = '%s' allow filtering;", + CassandraAspect.TABLE_NAME); ResultSet rs = session.execute(query); return rs.all().size() > 0; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java index 891a47130fe25..d68386291acb3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java @@ -1,23 +1,22 @@ package com.linkedin.metadata.entity.cassandra; import com.datastax.oss.driver.api.core.cql.Row; -import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityAspect; +import com.linkedin.metadata.entity.EntityAspectIdentifier; +import java.sql.Timestamp; +import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import javax.annotation.Nonnull; -import java.sql.Timestamp; - /** - * This class represents entity aspect records stored in Cassandra database. - * It's also aware of {@link EntityAspect} which is a shared in-memory representation of an aspect record and knows - * how to translate itself to it. + * This class represents entity aspect records stored in Cassandra database. It's also aware of + * {@link EntityAspect} which is a shared in-memory representation of an aspect record and knows how + * to translate itself to it. * - * TODO: Consider using datastax java driver `@Entity` - * (see: https://docs.datastax.com/en/developer/java-driver/4.13/manual/mapper/entities/) + * <p>TODO: Consider using datastax java driver `@Entity` (see: + * https://docs.datastax.com/en/developer/java-driver/4.13/manual/mapper/entities/) */ @Getter @Setter @@ -61,7 +60,9 @@ public static EntityAspect rowToEntityAspect(@Nonnull Row row) { row.getLong(CassandraAspect.VERSION_COLUMN), row.getString(CassandraAspect.METADATA_COLUMN), row.getString(CassandraAspect.SYSTEM_METADATA_COLUMN), - row.getInstant(CassandraAspect.CREATED_ON_COLUMN) == null ? null : Timestamp.from(row.getInstant(CassandraAspect.CREATED_ON_COLUMN)), + row.getInstant(CassandraAspect.CREATED_ON_COLUMN) == null + ? null + : Timestamp.from(row.getInstant(CassandraAspect.CREATED_ON_COLUMN)), row.getString(CassandraAspect.CREATED_BY_COLUMN), row.getString(CassandraAspect.CREATED_FOR_COLUMN)); } @@ -73,5 +74,4 @@ public static EntityAspectIdentifier rowToAspectIdentifier(@Nonnull Row row) { row.getString(CassandraAspect.ASPECT_COLUMN), row.getLong(CassandraAspect.VERSION_COLUMN)); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java index 9f4a36efb4501..3293bc6178e43 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity.cassandra; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.exception.ModelConversionException; import com.datahub.util.exception.RetryLimitReached; import com.datastax.oss.driver.api.core.CqlSession; @@ -31,6 +34,8 @@ import com.linkedin.metadata.query.ExtraInfo; import com.linkedin.metadata.query.ExtraInfoArray; import com.linkedin.metadata.query.ListResultMetadata; +import io.ebean.PagedList; +import io.ebean.Transaction; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; @@ -44,14 +49,8 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import io.ebean.PagedList; -import io.ebean.Transaction; import lombok.extern.slf4j.Slf4j; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*; -import static com.linkedin.metadata.Constants.*; - @Slf4j public class CassandraAspectDao implements AspectDao, AspectMigrationsDao { @@ -88,16 +87,22 @@ public EntityAspect getLatestAspect(@Nonnull String urn, @Nonnull String aspectN } @Override - public Map<String, Map<String, EntityAspect>> getLatestAspects(Map<String, Set<String>> urnAspects) { + public Map<String, Map<String, EntityAspect>> getLatestAspects( + Map<String, Set<String>> urnAspects) { return urnAspects.entrySet().stream() - .map(entry -> Map.entry(entry.getKey(), entry.getValue().stream() - .map(aspectName -> { - EntityAspect aspect = getLatestAspect(entry.getKey(), aspectName); - return aspect != null ? Map.entry(aspectName, aspect) : null; - }) - .filter(Objects::nonNull) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .map( + entry -> + Map.entry( + entry.getKey(), + entry.getValue().stream() + .map( + aspectName -> { + EntityAspect aspect = getLatestAspect(entry.getKey(), aspectName); + return aspect != null ? Map.entry(aspectName, aspect) : null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } @Override @@ -110,48 +115,63 @@ public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspec @Override public long countEntities() { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .distinct() - .column(CassandraAspect.URN_COLUMN) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .distinct() + .column(CassandraAspect.URN_COLUMN) + .build(); ResultSet rs = _cqlSession.execute(ss); // TODO: make sure it doesn't blow up on a large database - // Getting a count of distinct values in a Cassandra query doesn't seem to be feasible, but counting them in the app is dangerous - // The saving grace here is that the only place where this method is used should only run once, what the database is still young + // Getting a count of distinct values in a Cassandra query doesn't seem to be feasible, but + // counting them in the app is dangerous + // The saving grace here is that the only place where this method is used should only run once, + // what the database is still young return rs.all().size(); } @Override public boolean checkIfAspectExists(@Nonnull String aspectName) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .column(CassandraAspect.URN_COLUMN) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .limit(1) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .column(CassandraAspect.URN_COLUMN) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .limit(1) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.one() != null; } - private Map<String, Long> getMaxVersions(@Nonnull final String urn, @Nonnull final Set<String> aspectNames) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .selectors( - Selector.column(CassandraAspect.URN_COLUMN), - Selector.column(CassandraAspect.ASPECT_COLUMN), - Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)).as(CassandraAspect.VERSION_COLUMN)) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).in(aspectNamesToLiterals(aspectNames)) - .groupBy(ImmutableList.of(Selector.column(CassandraAspect.URN_COLUMN), Selector.column(CassandraAspect.ASPECT_COLUMN))) - .build(); + private Map<String, Long> getMaxVersions( + @Nonnull final String urn, @Nonnull final Set<String> aspectNames) { + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .selectors( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN), + Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .as(CassandraAspect.VERSION_COLUMN)) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .in(aspectNamesToLiterals(aspectNames)) + .groupBy( + ImmutableList.of( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN))) + .build(); ResultSet rs = _cqlSession.execute(ss); - Map<String, Long> aspectVersions = rs.all().stream() - .collect(Collectors.toMap( - row -> row.getString(CassandraAspect.ASPECT_COLUMN), - row -> row.getLong(CassandraAspect.VERSION_COLUMN))); + Map<String, Long> aspectVersions = + rs.all().stream() + .collect( + Collectors.toMap( + row -> row.getString(CassandraAspect.ASPECT_COLUMN), + row -> row.getLong(CassandraAspect.VERSION_COLUMN))); // For each requested aspect that didn't come back from DB, add a version -1 for (String aspect : aspectNames) { @@ -164,7 +184,8 @@ private Map<String, Long> getMaxVersions(@Nonnull final String urn, @Nonnull fin } @Override - public void saveAspect(@Nullable Transaction tx, @Nonnull EntityAspect aspect, final boolean insert) { + public void saveAspect( + @Nullable Transaction tx, @Nonnull EntityAspect aspect, final boolean insert) { validateConnection(); SimpleStatement statement = generateSaveStatement(aspect, insert); _cqlSession.execute(statement); @@ -174,7 +195,8 @@ public void saveAspect(@Nullable Transaction tx, @Nonnull EntityAspect aspect, f // TODO: look into supporting pagination @Override @Nonnull - public Map<EntityAspectIdentifier, EntityAspect> batchGet(@Nonnull final Set<EntityAspectIdentifier> keys) { + public Map<EntityAspectIdentifier, EntityAspect> batchGet( + @Nonnull final Set<EntityAspectIdentifier> keys) { validateConnection(); return keys.stream() .map(this::getAspect) @@ -210,13 +232,17 @@ public ListResult<String> listAspectMetadata( final int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); @@ -224,53 +250,58 @@ public ListResult<String> listAspectMetadata( OffsetPager offsetPager = new OffsetPager(pageSize); Page<Row> page = offsetPager.getPage(rs, pageNumber); - final List<EntityAspect> aspects = page - .getElements() - .stream().map(CassandraAspect::rowToEntityAspect) - .collect(Collectors.toList()); + final List<EntityAspect> aspects = + page.getElements().stream() + .map(CassandraAspect::rowToEntityAspect) + .collect(Collectors.toList()); // TODO: address performance issue for getting total count // https://www.datastax.com/blog/running-count-expensive-cassandra - SimpleStatement ssCount = selectFrom(CassandraAspect.TABLE_NAME) - .countAll() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ssCount = + selectFrom(CassandraAspect.TABLE_NAME) + .countAll() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); long totalCount = _cqlSession.execute(ssCount).one().getLong(0); - final List<String> aspectMetadatas = aspects - .stream() - .map(EntityAspect::getMetadata) - .collect(Collectors.toList()); + final List<String> aspectMetadatas = + aspects.stream().map(EntityAspect::getMetadata).collect(Collectors.toList()); - final ListResultMetadata listResultMetadata = toListResultMetadata(aspects - .stream() - .map(CassandraAspectDao::toExtraInfo) - .collect(Collectors.toList())); + final ListResultMetadata listResultMetadata = + toListResultMetadata( + aspects.stream().map(CassandraAspectDao::toExtraInfo).collect(Collectors.toList())); - return toListResult(aspectMetadatas, listResultMetadata, start, pageNumber, pageSize, totalCount); + return toListResult( + aspectMetadatas, listResultMetadata, start, pageNumber, pageSize, totalCount); } @Override @Nonnull - public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, final int maxTransactionRetry) { + public <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, final int maxTransactionRetry) { validateConnection(); int retryCount = 0; Exception lastException; do { try { - // TODO: Try to bend this code to make use of Cassandra batches. This method is called from single-urn operations, so perf should not suffer much + // TODO: Try to bend this code to make use of Cassandra batches. This method is called from + // single-urn operations, so perf should not suffer much return block.apply(null); } catch (DriverException exception) { lastException = exception; } } while (++retryCount <= maxTransactionRetry); - throw new RetryLimitReached("Failed to add after " + maxTransactionRetry + " retries", lastException); + throw new RetryLimitReached( + "Failed to add after " + maxTransactionRetry + " retries", lastException); } private <T> ListResult<T> toListResult( @@ -283,17 +314,18 @@ private <T> ListResult<T> toListResult( final int numPages = (int) (totalCount / pageSize + (totalCount % pageSize == 0 ? 0 : 1)); final boolean hasNext = pageNumber < numPages; - final int nextStart = (start != null && hasNext) ? (pageNumber * pageSize) : ListResult.INVALID_NEXT_START; + final int nextStart = + (start != null && hasNext) ? (pageNumber * pageSize) : ListResult.INVALID_NEXT_START; return ListResult.<T>builder() - .values(values) - .metadata(listResultMetadata) - .nextStart(nextStart) - .hasNext(hasNext) - .totalCount((int) totalCount) - .totalPageCount(numPages) - .pageSize(pageSize) - .build(); + .values(values) + .metadata(listResultMetadata) + .nextStart(nextStart) + .hasNext(hasNext) + .totalCount((int) totalCount) + .totalPageCount(numPages) + .pageSize(pageSize) + .build(); } @Nonnull @@ -336,12 +368,16 @@ private static AuditStamp toAuditStamp(@Nonnull final EntityAspect aspect) { @Override public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect) { validateConnection(); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(aspect.getUrn())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspect.getAspect())) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(aspect.getVersion())) - .ifExists() - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(aspect.getUrn())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspect.getAspect())) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(aspect.getVersion())) + .ifExists() + .build(); _cqlSession.execute(ss); } @@ -349,9 +385,11 @@ public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect a @Override public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { validateConnection(); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .build(); ResultSet rs = _cqlSession.execute(ss); // TODO: look into how to get around this for counts in Cassandra // https://stackoverflow.com/questions/28611459/how-to-know-affected-rows-in-cassandracql @@ -359,11 +397,14 @@ public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { } public List<EntityAspect> getAllAspects(String urn, String aspectName) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.all().stream().map(CassandraAspect::rowToEntityAspect).collect(Collectors.toList()); @@ -373,13 +414,17 @@ public List<EntityAspect> getAllAspects(String urn, String aspectName) { @Nullable public EntityAspect getAspect(@Nonnull String urn, @Nonnull String aspectName, long version) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .limit(1) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .limit(1) + .build(); ResultSet rs = _cqlSession.execute(ss); Row row = rs.one(); @@ -395,17 +440,20 @@ public ListResult<String> listUrns( final int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .columns( - CassandraAspect.URN_COLUMN, - CassandraAspect.ASPECT_COLUMN, - CassandraAspect.VERSION_COLUMN - ) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(ASPECT_LATEST_VERSION)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .columns( + CassandraAspect.URN_COLUMN, + CassandraAspect.ASPECT_COLUMN, + CassandraAspect.VERSION_COLUMN) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); @@ -414,17 +462,20 @@ public ListResult<String> listUrns( Page<Row> page = offsetPager.getPage(rs, pageNumber); - final List<String> urns = page - .getElements() - .stream().map(row -> CassandraAspect.rowToAspectIdentifier(row).getUrn()) - .collect(Collectors.toList()); + final List<String> urns = + page.getElements().stream() + .map(row -> CassandraAspect.rowToAspectIdentifier(row).getUrn()) + .collect(Collectors.toList()); // TODO: address performance issue for getting total count // https://www.datastax.com/blog/running-count-expensive-cassandra - SimpleStatement ssCount = selectFrom(CassandraAspect.TABLE_NAME) + SimpleStatement ssCount = + selectFrom(CassandraAspect.TABLE_NAME) .countAll() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(ASPECT_LATEST_VERSION)) .allowFiltering() .build(); @@ -457,9 +508,8 @@ public Stream<EntityAspect> streamAspects(String entityName, String aspectName) @Nonnull public Iterable<String> listAllUrns(int start, int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .column(CassandraAspect.URN_COLUMN) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME).column(CassandraAspect.URN_COLUMN).build(); ResultSet rs = _cqlSession.execute(ss); @@ -467,9 +517,8 @@ public Iterable<String> listAllUrns(int start, int pageSize) { OffsetPager offsetPager = new OffsetPager(pageSize); Page<Row> page = offsetPager.getPage(rs, pageNumber); - return page - .getElements() - .stream().map(row -> row.getString(CassandraAspect.URN_COLUMN)) + return page.getElements().stream() + .map(row -> row.getString(CassandraAspect.URN_COLUMN)) .collect(Collectors.toList()); } @@ -496,21 +545,20 @@ public Map<String, Map<String, Long>> getNextVersions(Map<String, Set<String>> u @Override public long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion - ) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion) { validateConnection(); if (!_canWrite) { @@ -521,7 +569,8 @@ public long saveLatestAspect( BatchStatement batch = BatchStatement.newInstance(BatchType.UNLOGGED); if (oldAspectMetadata != null && oldTime != null) { largestVersion = nextVersion; - final EntityAspect aspect = new EntityAspect( + final EntityAspect aspect = + new EntityAspect( urn, aspectName, largestVersion, @@ -529,13 +578,13 @@ public long saveLatestAspect( oldSystemMetadata, oldTime, oldActor, - oldImpersonator - ); + oldImpersonator); batch = batch.add(generateSaveStatement(aspect, true)); } // Save newValue as the latest version (v0) - final EntityAspect aspect = new EntityAspect( + final EntityAspect aspect = + new EntityAspect( urn, aspectName, ASPECT_LATEST_VERSION, @@ -543,8 +592,7 @@ public long saveLatestAspect( newSystemMetadata, newTime, newActor, - newImpersonator - ); + newImpersonator); batch = batch.add(generateSaveStatement(aspect, oldAspectMetadata == null)); _cqlSession.execute(batch); return largestVersion; @@ -558,7 +606,8 @@ private SimpleStatement generateSaveStatement(EntityAspect aspect, boolean inser throw new RuntimeException(e); } if (insert) { - Insert ri = insertInto(CassandraAspect.TABLE_NAME) + Insert ri = + insertInto(CassandraAspect.TABLE_NAME) .value(CassandraAspect.URN_COLUMN, literal(aspect.getUrn())) .value(CassandraAspect.ASPECT_COLUMN, literal(aspect.getAspect())) .value(CassandraAspect.VERSION_COLUMN, literal(aspect.getVersion())) @@ -572,16 +621,23 @@ private SimpleStatement generateSaveStatement(EntityAspect aspect, boolean inser return ri.build(); } else { - UpdateWithAssignments uwa = update(CassandraAspect.TABLE_NAME) + UpdateWithAssignments uwa = + update(CassandraAspect.TABLE_NAME) .setColumn(CassandraAspect.METADATA_COLUMN, literal(aspect.getMetadata())) - .setColumn(CassandraAspect.SYSTEM_METADATA_COLUMN, literal(aspect.getSystemMetadata())) - .setColumn(CassandraAspect.CREATED_ON_COLUMN, literal(aspect.getCreatedOn().getTime())) + .setColumn( + CassandraAspect.SYSTEM_METADATA_COLUMN, literal(aspect.getSystemMetadata())) + .setColumn( + CassandraAspect.CREATED_ON_COLUMN, literal(aspect.getCreatedOn().getTime())) .setColumn(CassandraAspect.CREATED_BY_COLUMN, literal(aspect.getCreatedBy())) .setColumn(CassandraAspect.CREATED_FOR_COLUMN, literal(aspect.getCreatedFor())); - Update u = uwa.whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(aspect.getUrn())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspect.getAspect())) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(aspect.getVersion())) + Update u = + uwa.whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(aspect.getUrn())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspect.getAspect())) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(aspect.getVersion())) .ifExists(); return u.build(); @@ -595,28 +651,28 @@ public void setWritable(boolean canWrite) { @Override public void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert) { validateConnection(); - final EntityAspect aspect = new EntityAspect( - urn, - aspectName, - version, - aspectMetadata, - systemMetadata, - timestamp, - actor, - impersonator - ); + final EntityAspect aspect = + new EntityAspect( + urn, + aspectName, + version, + aspectMetadata, + systemMetadata, + timestamp, + actor, + impersonator); saveAspect(tx, aspect, insert); @@ -626,16 +682,22 @@ public void saveAspect( @Override @Nonnull - public List<EntityAspect> getAspectsInRange(@Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis) { + public List<EntityAspect> getAspectsInRange( + @Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).in(aspectNamesToLiterals(aspectNames)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isLessThanOrEqualTo(literal(startTimeMillis)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isGreaterThan(literal(endTimeMillis)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .in(aspectNamesToLiterals(aspectNames)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isLessThanOrEqualTo(literal(startTimeMillis)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isGreaterThan(literal(endTimeMillis)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java index 9ebb6b26fc43d..6a1ba72c37676 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.entity.cassandra; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; + import com.datahub.util.RecordUtils; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.ResultSet; @@ -10,10 +14,11 @@ import com.datastax.oss.driver.api.querybuilder.select.Selector; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityAspect; +import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; -import com.linkedin.metadata.entity.EntityAspectIdentifier; -import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; @@ -23,13 +28,7 @@ import com.linkedin.retention.Retention; import com.linkedin.retention.TimeBasedRetention; import com.linkedin.retention.VersionBasedRetention; -import com.linkedin.metadata.Constants; import io.opentelemetry.extension.annotations.WithSpan; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.sql.Timestamp; import java.time.Clock; import java.util.List; @@ -37,10 +36,10 @@ import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; - -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -58,31 +57,38 @@ public EntityService getEntityService() { @Override protected AspectsBatch buildAspectsBatch(List<MetadataChangeProposal> mcps) { - return AspectsBatchImpl.builder() - .mcps(mcps, _entityService.getEntityRegistry()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, _entityService.getEntityRegistry()).build(); } @Override @WithSpan protected void applyRetention(List<RetentionContext> retentionContexts) { - List<RetentionContext> nonEmptyContexts = retentionContexts.stream() - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()) + List<RetentionContext> nonEmptyContexts = + retentionContexts.stream() + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) .collect(Collectors.toList()); - nonEmptyContexts.forEach(context -> { - if (context.getRetentionPolicy().map(Retention::hasVersion).orElse(false)) { - Retention retentionPolicy = context.getRetentionPolicy().get(); - applyVersionBasedRetention(context.getUrn(), context.getAspectName(), retentionPolicy.getVersion(), context.getMaxVersion()); - } + nonEmptyContexts.forEach( + context -> { + if (context.getRetentionPolicy().map(Retention::hasVersion).orElse(false)) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + applyVersionBasedRetention( + context.getUrn(), + context.getAspectName(), + retentionPolicy.getVersion(), + context.getMaxVersion()); + } - if (context.getRetentionPolicy().map(Retention::hasTime).orElse(false)) { - Retention retentionPolicy = context.getRetentionPolicy().get(); - applyTimeBasedRetention(context.getUrn(), context.getAspectName(), retentionPolicy.getTime()); - } - }); + if (context.getRetentionPolicy().map(Retention::hasTime).orElse(false)) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + applyTimeBasedRetention( + context.getUrn(), context.getAspectName(), retentionPolicy.getTime()); + } + }); } @Override @@ -111,18 +117,22 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } final String aspectNameFromRecord = id.getAspect(); // Get the retention policies to apply from the local retention policy map - Optional<Retention> retentionPolicy = getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() - .map(key -> retentionPolicyMap.get(key.toString())) - .filter(Objects::nonNull) - .findFirst() - .map(DataHubRetentionConfig::getRetention); - retentionPolicy.ifPresent(retention -> - applyRetention(List.of(RetentionContext.builder() - .urn(urn) - .aspectName(aspectNameFromRecord) - .retentionPolicy(retentionPolicy) - .maxVersion(Optional.of(id.getVersion())) - .build()))); + Optional<Retention> retentionPolicy = + getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() + .map(key -> retentionPolicyMap.get(key.toString())) + .filter(Objects::nonNull) + .findFirst() + .map(DataHubRetentionConfig::getRetention); + retentionPolicy.ifPresent( + retention -> + applyRetention( + List.of( + RetentionContext.builder() + .urn(urn) + .aspectName(aspectNameFromRecord) + .retentionPolicy(retentionPolicy) + .maxVersion(Optional.of(id.getVersion())) + .build()))); i += 1; if (i % _batchSize == 0) { @@ -134,7 +144,8 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } @Override - public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args) { + public BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args) { log.error("batchApplyRetentionEntities not implemented for cassandra"); return null; } @@ -147,23 +158,31 @@ private void applyVersionBasedRetention( long largestVersion = maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn, aspectName)); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isLessThanOrEqualTo(literal(largestVersion - retention.getMaxVersions() + 1L)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isLessThanOrEqualTo(literal(largestVersion - retention.getMaxVersions() + 1L)) + .build(); _cqlSession.execute(ss); } private long getMaxVersion(@Nonnull final Urn urn, @Nonnull final String aspectName) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .orderBy(CassandraAspect.VERSION_COLUMN, ClusteringOrder.DESC) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .orderBy(CassandraAspect.VERSION_COLUMN, ClusteringOrder.DESC) + .build(); ResultSet rs = _cqlSession.execute(ss); Row row = rs.one(); return row.getLong(CassandraAspect.VERSION_COLUMN); @@ -174,47 +193,69 @@ private void applyTimeBasedRetention( @Nonnull final String aspectName, @Nonnull final TimeBasedRetention retention) { Timestamp threshold = new Timestamp(_clock.millis() - retention.getMaxAgeInSeconds() * 1000); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isLessThanOrEqualTo(literal(threshold)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isLessThanOrEqualTo(literal(threshold)) + .build(); _cqlSession.execute(ss); } - private List<EntityAspectIdentifier> queryCandidates(@Nullable String entityName, @Nullable String aspectName) { - Select select = selectFrom(CassandraAspect.TABLE_NAME) - .selectors( - Selector.column(CassandraAspect.URN_COLUMN), - Selector.column(CassandraAspect.ASPECT_COLUMN), - Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)).as(CassandraAspect.VERSION_COLUMN)) - .allowFiltering(); + private List<EntityAspectIdentifier> queryCandidates( + @Nullable String entityName, @Nullable String aspectName) { + Select select = + selectFrom(CassandraAspect.TABLE_NAME) + .selectors( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN), + Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .as(CassandraAspect.VERSION_COLUMN)) + .allowFiltering(); if (aspectName != null) { select = select.whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)); } - select = select.whereColumn(CassandraAspect.VERSION_COLUMN).isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)); + select = + select + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)); if (entityName != null) { select = select.whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)); } - select = select.groupBy(ImmutableList.of(Selector.column(CassandraAspect.URN_COLUMN), Selector.column(CassandraAspect.ASPECT_COLUMN))); + select = + select.groupBy( + ImmutableList.of( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN))); SimpleStatement ss = select.build(); ResultSet rs = _cqlSession.execute(ss); - return rs.all().stream().map(CassandraAspect::rowToAspectIdentifier).collect(Collectors.toList()); + return rs.all().stream() + .map(CassandraAspect::rowToAspectIdentifier) + .collect(Collectors.toList()); } private Map<String, DataHubRetentionConfig> getAllRetentionPolicies() { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(Constants.DATAHUB_RETENTION_ASPECT)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(Constants.ASPECT_LATEST_VERSION)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(Constants.DATAHUB_RETENTION_ASPECT)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(Constants.ASPECT_LATEST_VERSION)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.all().stream() .map(CassandraAspect::rowToEntityAspect) - .collect(Collectors.toMap( - EntityAspect::getUrn, - aspect -> RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, aspect.getMetadata()))); + .collect( + Collectors.toMap( + EntityAspect::getUrn, + aspect -> + RecordUtils.toRecordTemplate( + DataHubRetentionConfig.class, aspect.getMetadata()))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java index c0aef268e14c9..b02ee0170354e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java @@ -1,30 +1,30 @@ package com.linkedin.metadata.entity.ebean; +import static io.ebean.Expr.ne; + import com.linkedin.metadata.Constants; import io.ebean.Database; import io.ebean.SqlQuery; import io.ebean.SqlRow; - import java.util.List; -import static io.ebean.Expr.ne; - - public class AspectStorageValidationUtil { - private AspectStorageValidationUtil() { - - } + private AspectStorageValidationUtil() {} public static long getV1RowCount(Database server) { return server.find(EbeanAspectV1.class).findCount(); } /** - * Get the number of rows created not by the DataHub system actor (urn:li:corpuser:__datahub_system) + * Get the number of rows created not by the DataHub system actor + * (urn:li:corpuser:__datahub_system) */ public static long getV2NonSystemRowCount(Database server) { - return server.find(EbeanAspectV2.class).where(ne("createdby", Constants.SYSTEM_ACTOR)).findCount(); + return server + .find(EbeanAspectV2.class) + .where(ne("createdby", Constants.SYSTEM_ACTOR)) + .findCount(); } public static boolean checkV2TableExists(Database server) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java index c16c98b34f3eb..b2b47c1d5ba32 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.codahale.metrics.MetricRegistry; import com.datahub.util.exception.ModelConversionException; import com.datahub.util.exception.RetryLimitReached; @@ -19,8 +21,8 @@ import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; -import io.ebean.DuplicateKeyException; import io.ebean.Database; +import io.ebean.DuplicateKeyException; import io.ebean.ExpressionList; import io.ebean.Junction; import io.ebean.PagedList; @@ -45,14 +47,10 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import javax.persistence.PersistenceException; import javax.persistence.Table; - import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - @Slf4j public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { @@ -64,8 +62,10 @@ public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { // while its storage is being migrated private boolean _canWrite = true; - // Why 375? From tuning, this seems to be about the largest size we can get without having ebean batch issues. - // This may be able to be moved up, 375 is a bit conservative. However, we should be careful to tweak this without + // Why 375? From tuning, this seems to be about the largest size we can get without having ebean + // batch issues. + // This may be able to be moved up, 375 is a bit conservative. However, we should be careful to + // tweak this without // more testing. private int _queryKeysCount = 375; // 0 means no pagination on keys @@ -79,8 +79,7 @@ public void setWritable(boolean canWrite) { } /** - * Return the {@link Database} server instance used for customized queries. - * Only used in tests. + * Return the {@link Database} server instance used for customized queries. Only used in tests. */ public Database getServer() { return _server; @@ -96,8 +95,9 @@ private boolean validateConnection() { return true; } if (!AspectStorageValidationUtil.checkV2TableExists(_server)) { - log.error("GMS is on a newer version than your storage layer. Please refer to " - + "https://datahubproject.io/docs/advanced/no-code-upgrade to view the upgrade guide."); + log.error( + "GMS is on a newer version than your storage layer. Please refer to " + + "https://datahubproject.io/docs/advanced/no-code-upgrade to view the upgrade guide."); _canWrite = false; return false; } else { @@ -106,24 +106,22 @@ private boolean validateConnection() { } } - @Override public long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion - ) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion) { validateConnection(); if (!_canWrite) { @@ -133,27 +131,47 @@ public long saveLatestAspect( long largestVersion = ASPECT_LATEST_VERSION; if (oldAspectMetadata != null && oldTime != null) { largestVersion = nextVersion; - saveAspect(tx, urn, aspectName, oldAspectMetadata, oldActor, oldImpersonator, oldTime, oldSystemMetadata, largestVersion, true); + saveAspect( + tx, + urn, + aspectName, + oldAspectMetadata, + oldActor, + oldImpersonator, + oldTime, + oldSystemMetadata, + largestVersion, + true); } // Save newValue as the latest version (v0) - saveAspect(tx, urn, aspectName, newAspectMetadata, newActor, newImpersonator, newTime, newSystemMetadata, ASPECT_LATEST_VERSION, oldAspectMetadata == null); + saveAspect( + tx, + urn, + aspectName, + newAspectMetadata, + newActor, + newImpersonator, + newTime, + newSystemMetadata, + ASPECT_LATEST_VERSION, + oldAspectMetadata == null); return largestVersion; } @Override public void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert) { validateConnection(); @@ -171,12 +189,14 @@ public void saveAspect( } @Override - public void saveAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert) { + public void saveAspect( + @Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert) { EbeanAspectV2 ebeanAspect = EbeanAspectV2.fromEntityAspect(aspect); saveEbeanAspect(tx, ebeanAspect, insert); } - private void saveEbeanAspect(@Nullable Transaction tx, @Nonnull final EbeanAspectV2 ebeanAspect, final boolean insert) { + private void saveEbeanAspect( + @Nullable Transaction tx, @Nonnull final EbeanAspectV2 ebeanAspect, final boolean insert) { validateConnection(); if (insert) { _server.insert(ebeanAspect, tx); @@ -186,17 +206,22 @@ private void saveEbeanAspect(@Nullable Transaction tx, @Nonnull final EbeanAspec } @Override - public Map<String, Map<String, EntityAspect>> getLatestAspects(@Nonnull Map<String, Set<String>> urnAspects) { + public Map<String, Map<String, EntityAspect>> getLatestAspects( + @Nonnull Map<String, Set<String>> urnAspects) { validateConnection(); - List<EbeanAspectV2.PrimaryKey> keys = urnAspects.entrySet().stream() - .flatMap(entry -> entry.getValue().stream() - .map(aspect -> new EbeanAspectV2.PrimaryKey(entry.getKey(), aspect, ASPECT_LATEST_VERSION)) - ).collect(Collectors.toList()); + List<EbeanAspectV2.PrimaryKey> keys = + urnAspects.entrySet().stream() + .flatMap( + entry -> + entry.getValue().stream() + .map( + aspect -> + new EbeanAspectV2.PrimaryKey( + entry.getKey(), aspect, ASPECT_LATEST_VERSION))) + .collect(Collectors.toList()); - List<EbeanAspectV2> results = _server.find(EbeanAspectV2.class) - .where().idIn(keys) - .findList(); + List<EbeanAspectV2> results = _server.find(EbeanAspectV2.class).where().idIn(keys).findList(); return toUrnAspectMap(results); } @@ -204,7 +229,8 @@ public Map<String, Map<String, EntityAspect>> getLatestAspects(@Nonnull Map<Stri @Override public long countEntities() { validateConnection(); - return _server.find(EbeanAspectV2.class) + return _server + .find(EbeanAspectV2.class) .setDistinct(true) .select(EbeanAspectV2.URN_COLUMN) .findCount(); @@ -213,7 +239,8 @@ public long countEntities() { @Override public boolean checkIfAspectExists(@Nonnull String aspectName) { validateConnection(); - return _server.find(EbeanAspectV2.class) + return _server + .find(EbeanAspectV2.class) .where() .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) .exists(); @@ -221,7 +248,8 @@ public boolean checkIfAspectExists(@Nonnull String aspectName) { @Override @Nullable - public EntityAspect getAspect(@Nonnull final String urn, @Nonnull final String aspectName, final long version) { + public EntityAspect getAspect( + @Nonnull final String urn, @Nonnull final String aspectName, final long version) { return getAspect(new EntityAspectIdentifier(urn, aspectName, version)); } @@ -229,7 +257,8 @@ public EntityAspect getAspect(@Nonnull final String urn, @Nonnull final String a @Nullable public EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key) { validateConnection(); - EbeanAspectV2.PrimaryKey primaryKey = new EbeanAspectV2.PrimaryKey(key.getUrn(), key.getAspect(), key.getVersion()); + EbeanAspectV2.PrimaryKey primaryKey = + new EbeanAspectV2.PrimaryKey(key.getUrn(), key.getAspect(), key.getVersion()); EbeanAspectV2 ebeanAspect = _server.find(EbeanAspectV2.class, primaryKey); return ebeanAspect == null ? null : ebeanAspect.toEntityAspect(); } @@ -244,46 +273,60 @@ public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect a @Override public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { validateConnection(); - return _server.createQuery(EbeanAspectV2.class).where().eq(EbeanAspectV2.URN_COLUMN, urn).delete(tx); + return _server + .createQuery(EbeanAspectV2.class) + .where() + .eq(EbeanAspectV2.URN_COLUMN, urn) + .delete(tx); } @Override @Nonnull - public Map<EntityAspectIdentifier, EntityAspect> batchGet(@Nonnull final Set<EntityAspectIdentifier> keys) { + public Map<EntityAspectIdentifier, EntityAspect> batchGet( + @Nonnull final Set<EntityAspectIdentifier> keys) { validateConnection(); if (keys.isEmpty()) { return Collections.emptyMap(); } - final Set<EbeanAspectV2.PrimaryKey> ebeanKeys = keys.stream().map(EbeanAspectV2.PrimaryKey::fromAspectIdentifier).collect(Collectors.toSet()); + final Set<EbeanAspectV2.PrimaryKey> ebeanKeys = + keys.stream() + .map(EbeanAspectV2.PrimaryKey::fromAspectIdentifier) + .collect(Collectors.toSet()); final List<EbeanAspectV2> records; if (_queryKeysCount == 0) { records = batchGet(ebeanKeys, ebeanKeys.size()); } else { records = batchGet(ebeanKeys, _queryKeysCount); } - return records.stream().collect(Collectors.toMap(record -> record.getKey().toAspectIdentifier(), EbeanAspectV2::toEntityAspect)); + return records.stream() + .collect( + Collectors.toMap( + record -> record.getKey().toAspectIdentifier(), EbeanAspectV2::toEntityAspect)); } /** - * BatchGet that allows pagination on keys to avoid large queries. - * TODO: can further improve by running the sub queries in parallel + * BatchGet that allows pagination on keys to avoid large queries. TODO: can further improve by + * running the sub queries in parallel * * @param keys a set of keys with urn, aspect and version * @param keysCount the max number of keys for each sub query */ @Nonnull - private List<EbeanAspectV2> batchGet(@Nonnull final Set<EbeanAspectV2.PrimaryKey> keys, final int keysCount) { + private List<EbeanAspectV2> batchGet( + @Nonnull final Set<EbeanAspectV2.PrimaryKey> keys, final int keysCount) { validateConnection(); int position = 0; final int totalPageCount = QueryUtils.getTotalPageCount(keys.size(), keysCount); - final List<EbeanAspectV2> finalResult = batchGetUnion(new ArrayList<>(keys), keysCount, position); + final List<EbeanAspectV2> finalResult = + batchGetUnion(new ArrayList<>(keys), keysCount, position); while (QueryUtils.hasMore(position, keysCount, totalPageCount)) { position += keysCount; - final List<EbeanAspectV2> oneStatementResult = batchGetUnion(new ArrayList<>(keys), keysCount, position); + final List<EbeanAspectV2> oneStatementResult = + batchGetUnion(new ArrayList<>(keys), keysCount, position); finalResult.addAll(oneStatementResult); } @@ -291,8 +334,8 @@ private List<EbeanAspectV2> batchGet(@Nonnull final Set<EbeanAspectV2.PrimaryKey } /** - * Builds a single SELECT statement for batch get, which selects one entity, and then can be UNION'd with other SELECT - * statements. + * Builds a single SELECT statement for batch get, which selects one entity, and then can be + * UNION'd with other SELECT statements. */ private String batchGetSelect( final int selectId, @@ -310,48 +353,52 @@ private String batchGetSelect( outputParamsToValues.put(aspectArg, aspect); outputParamsToValues.put(versionArg, version); - return String.format("SELECT urn, aspect, version, metadata, systemMetadata, createdOn, createdBy, createdFor " + return String.format( + "SELECT urn, aspect, version, metadata, systemMetadata, createdOn, createdBy, createdFor " + "FROM %s WHERE urn = :%s AND aspect = :%s AND version = :%s", EbeanAspectV2.class.getAnnotation(Table.class).name(), urnArg, aspectArg, versionArg); } @Nonnull private List<EbeanAspectV2> batchGetUnion( - @Nonnull final List<EbeanAspectV2.PrimaryKey> keys, - final int keysCount, - final int position) { + @Nonnull final List<EbeanAspectV2.PrimaryKey> keys, final int keysCount, final int position) { validateConnection(); - // Build one SELECT per key and then UNION ALL the results. This can be much more performant than OR'ing the + // Build one SELECT per key and then UNION ALL the results. This can be much more performant + // than OR'ing the // conditions together. Our query will look like: // SELECT * FROM metadata_aspect WHERE urn = 'urn0' AND aspect = 'aspect0' AND version = 0 // UNION ALL // SELECT * FROM metadata_aspect WHERE urn = 'urn0' AND aspect = 'aspect1' AND version = 0 // ... - // Note: UNION ALL should be safe and more performant than UNION. We're selecting the entire entity key (as well + // Note: UNION ALL should be safe and more performant than UNION. We're selecting the entire + // entity key (as well // as data), so each result should be unique. No need to deduplicate. - // Another note: ebean doesn't support UNION ALL, so we need to manually build the SQL statement ourselves. + // Another note: ebean doesn't support UNION ALL, so we need to manually build the SQL statement + // ourselves. final StringBuilder sb = new StringBuilder(); final int end = Math.min(keys.size(), position + keysCount); final Map<String, Object> params = new HashMap<>(); for (int index = position; index < end; index++) { - sb.append(batchGetSelect( - index - position, - keys.get(index).getUrn(), - keys.get(index).getAspect(), - keys.get(index).getVersion(), - params)); + sb.append( + batchGetSelect( + index - position, + keys.get(index).getUrn(), + keys.get(index).getAspect(), + keys.get(index).getVersion(), + params)); if (index != end - 1) { sb.append(" UNION ALL "); } } - final RawSql rawSql = RawSqlBuilder.parse(sb.toString()) - .columnMapping(EbeanAspectV2.URN_COLUMN, "key.urn") - .columnMapping(EbeanAspectV2.ASPECT_COLUMN, "key.aspect") - .columnMapping(EbeanAspectV2.VERSION_COLUMN, "key.version") - .create(); + final RawSql rawSql = + RawSqlBuilder.parse(sb.toString()) + .columnMapping(EbeanAspectV2.URN_COLUMN, "key.urn") + .columnMapping(EbeanAspectV2.ASPECT_COLUMN, "key.aspect") + .columnMapping(EbeanAspectV2.VERSION_COLUMN, "key.version") + .create(); final Query<EbeanAspectV2> query = _server.find(EbeanAspectV2.class).setRawSql(rawSql); @@ -373,23 +420,24 @@ public ListResult<String> listUrns( validateConnection(); final String urnPrefixMatcher = "urn:li:" + entityName + ":%"; - final PagedList<EbeanAspectV2> pagedList = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.KEY_ID) - .where() - .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) - .setFirstRow(start) - .setMaxRows(pageSize) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .findPagedList(); + final PagedList<EbeanAspectV2> pagedList = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.KEY_ID) + .where() + .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) + .setFirstRow(start) + .setMaxRows(pageSize) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .findPagedList(); - final List<String> urns = pagedList - .getList() - .stream() - .map(entry -> entry.getKey().getUrn()) - .collect(Collectors.toList()); + final List<String> urns = + pagedList.getList().stream() + .map(entry -> entry.getKey().getUrn()) + .collect(Collectors.toList()); return toListResult(urns, null, pagedList, start); } @@ -397,7 +445,9 @@ public ListResult<String> listUrns( @Nonnull @Override public Integer countAspect(@Nonnull String aspectName, @Nullable String urnLike) { - ExpressionList<EbeanAspectV2> exp = _server.find(EbeanAspectV2.class) + ExpressionList<EbeanAspectV2> exp = + _server + .find(EbeanAspectV2.class) .select(EbeanAspectV2.KEY_ID) .where() .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) @@ -412,7 +462,9 @@ public Integer countAspect(@Nonnull String aspectName, @Nullable String urnLike) @Nonnull @Override public PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args) { - ExpressionList<EbeanAspectV2> exp = _server.find(EbeanAspectV2.class) + ExpressionList<EbeanAspectV2> exp = + _server + .find(EbeanAspectV2.class) .select(EbeanAspectV2.ALL_COLUMNS) .where() .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); @@ -425,24 +477,26 @@ public PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args) { if (args.urnLike != null) { exp = exp.like(EbeanAspectV2.URN_COLUMN, args.urnLike); } - return exp.orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .orderBy() - .asc(EbeanAspectV2.ASPECT_COLUMN) - .setFirstRow(args.start) - .setMaxRows(args.batchSize) - .findPagedList(); + return exp.orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .orderBy() + .asc(EbeanAspectV2.ASPECT_COLUMN) + .setFirstRow(args.start) + .setMaxRows(args.batchSize) + .findPagedList(); } @Override @Nonnull public Stream<EntityAspect> streamAspects(String entityName, String aspectName) { - ExpressionList<EbeanAspectV2> exp = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .like(EbeanAspectV2.URN_COLUMN, "urn:li:" + entityName + ":%"); + ExpressionList<EbeanAspectV2> exp = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .like(EbeanAspectV2.URN_COLUMN, "urn:li:" + entityName + ":%"); return exp.query().findStream().map(EbeanAspectV2::toEntityAspect); } @@ -450,14 +504,16 @@ public Stream<EntityAspect> streamAspects(String entityName, String aspectName) @Nonnull public Iterable<String> listAllUrns(int start, int pageSize) { validateConnection(); - PagedList<EbeanAspectV2> ebeanAspects = _server.find(EbeanAspectV2.class) - .setDistinct(true) - .select(EbeanAspectV2.URN_COLUMN) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .setFirstRow(start) - .setMaxRows(pageSize) - .findPagedList(); + PagedList<EbeanAspectV2> ebeanAspects = + _server + .find(EbeanAspectV2.class) + .setDistinct(true) + .select(EbeanAspectV2.URN_COLUMN) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .setFirstRow(start) + .setMaxRows(pageSize) + .findPagedList(); return ebeanAspects.getList().stream().map(EbeanAspectV2::getUrn).collect(Collectors.toList()); } @@ -473,21 +529,27 @@ public ListResult<String> listAspectMetadata( validateConnection(); final String urnPrefixMatcher = "urn:li:" + entityName + ":%"; - final PagedList<EbeanAspectV2> pagedList = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .eq(EbeanAspectV2.VERSION_COLUMN, version) - .setFirstRow(start) - .setMaxRows(pageSize) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .findPagedList(); + final PagedList<EbeanAspectV2> pagedList = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .eq(EbeanAspectV2.VERSION_COLUMN, version) + .setFirstRow(start) + .setMaxRows(pageSize) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .findPagedList(); - final List<String> aspects = pagedList.getList().stream().map(EbeanAspectV2::getMetadata).collect(Collectors.toList()); - final ListResultMetadata listResultMetadata = toListResultMetadata(pagedList.getList().stream().map( - EbeanAspectDao::toExtraInfo).collect(Collectors.toList())); + final List<String> aspects = + pagedList.getList().stream().map(EbeanAspectV2::getMetadata).collect(Collectors.toList()); + final ListResultMetadata listResultMetadata = + toListResultMetadata( + pagedList.getList().stream() + .map(EbeanAspectDao::toExtraInfo) + .collect(Collectors.toList())); return toListResult(aspects, listResultMetadata, pagedList, start); } @@ -504,21 +566,26 @@ public ListResult<String> listLatestAspectMetadata( @Override @Nonnull - public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, final int maxTransactionRetry) { + public <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, final int maxTransactionRetry) { return runInTransactionWithRetry(block, null, maxTransactionRetry); } @Override @Nonnull - public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, @Nullable AspectsBatch batch, - final int maxTransactionRetry) { + public <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, + @Nullable AspectsBatch batch, + final int maxTransactionRetry) { validateConnection(); int retryCount = 0; Exception lastException = null; T result = null; do { - try (Transaction transaction = _server.beginTransaction(TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction = + _server.beginTransaction( + TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction.setBatchMode(true); result = block.apply(transaction); transaction.commit(); @@ -526,8 +593,15 @@ public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> b break; } catch (PersistenceException exception) { if (exception instanceof DuplicateKeyException) { - if (batch != null && batch.getItems().stream().allMatch(a -> a.getAspectName().equals(a.getEntitySpec().getKeyAspectSpec().getName()))) { - log.warn("Skipping DuplicateKeyException retry since aspect is the key aspect. {}", batch.getUrnAspectsMap().keySet()); + if (batch != null + && batch.getItems().stream() + .allMatch( + a -> + a.getAspectName() + .equals(a.getEntitySpec().getKeyAspectSpec().getName()))) { + log.warn( + "Skipping DuplicateKeyException retry since aspect is the key aspect. {}", + batch.getUrnAspectsMap().keySet()); continue; } } @@ -540,7 +614,8 @@ public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> b if (lastException != null) { MetricUtils.counter(MetricRegistry.name(this.getClass(), "txFailedAfterRetries")).inc(); - throw new RetryLimitReached("Failed to add after " + maxTransactionRetry + " retries", lastException); + throw new RetryLimitReached( + "Failed to add after " + maxTransactionRetry + " retries", lastException); } return result; @@ -549,7 +624,9 @@ public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> b @Override public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { validateConnection(); - final List<EbeanAspectV2.PrimaryKey> result = _server.find(EbeanAspectV2.class) + final List<EbeanAspectV2.PrimaryKey> result = + _server + .find(EbeanAspectV2.class) .where() .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) @@ -561,37 +638,35 @@ public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspec return result.isEmpty() ? -1 : result.get(0).getVersion(); } - public Map<String, Map<String, Long>> getNextVersions(@Nonnull Map<String, Set<String>> urnAspects) { + public Map<String, Map<String, Long>> getNextVersions( + @Nonnull Map<String, Set<String>> urnAspects) { validateConnection(); - Junction<EbeanAspectV2> queryJunction = _server.find(EbeanAspectV2.class) + Junction<EbeanAspectV2> queryJunction = + _server + .find(EbeanAspectV2.class) .select("urn, aspect, max(version)") .where() .in("urn", urnAspects.keySet()) .or(); ExpressionList<EbeanAspectV2> exp = null; - for (Map.Entry<String, Set<String>> entry: urnAspects.entrySet()) { + for (Map.Entry<String, Set<String>> entry : urnAspects.entrySet()) { if (exp == null) { - exp = queryJunction.and() - .eq("urn", entry.getKey()) - .in("aspect", entry.getValue()) - .endAnd(); + exp = queryJunction.and().eq("urn", entry.getKey()).in("aspect", entry.getValue()).endAnd(); } else { - exp = exp.and() - .eq("urn", entry.getKey()) - .in("aspect", entry.getValue()) - .endAnd(); + exp = exp.and().eq("urn", entry.getKey()).in("aspect", entry.getValue()).endAnd(); } } Map<String, Map<String, Long>> result = new HashMap<>(); // Default next version 0 - urnAspects.forEach((key, value) -> { - Map<String, Long> defaultNextVersion = new HashMap<>(); - value.forEach(aspectName -> defaultNextVersion.put(aspectName, 0L)); - result.put(key, defaultNextVersion); - }); + urnAspects.forEach( + (key, value) -> { + Map<String, Long> defaultNextVersion = new HashMap<>(); + value.forEach(aspectName -> defaultNextVersion.put(aspectName, 0L)); + result.put(key, defaultNextVersion); + }); if (exp == null) { return result; @@ -599,7 +674,7 @@ public Map<String, Map<String, Long>> getNextVersions(@Nonnull Map<String, Set<S List<EbeanAspectV2.PrimaryKey> dbResults = exp.endOr().findIds(); - for (EbeanAspectV2.PrimaryKey key: dbResults) { + for (EbeanAspectV2.PrimaryKey key : dbResults) { if (result.get(key.getUrn()).get(key.getAspect()) <= key.getVersion()) { result.get(key.getUrn()).put(key.getAspect(), key.getVersion() + 1L); } @@ -615,7 +690,9 @@ private <T> ListResult<T> toListResult( @Nonnull final PagedList<?> pagedList, @Nullable final Integer start) { final int nextStart = - (start != null && pagedList.hasNext()) ? start + pagedList.getList().size() : ListResult.INVALID_NEXT_START; + (start != null && pagedList.hasNext()) + ? start + pagedList.getList().size() + : ListResult.INVALID_NEXT_START; return ListResult.<T>builder() // Format .values(values) @@ -667,32 +744,44 @@ private ListResultMetadata toListResultMetadata(@Nonnull final List<ExtraInfo> e @Override @Nonnull - public List<EntityAspect> getAspectsInRange(@Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis) { + public List<EntityAspect> getAspectsInRange( + @Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis) { validateConnection(); - List<EbeanAspectV2> ebeanAspects = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) - .in(EbeanAspectV2.ASPECT_COLUMN, aspectNames) - .inRange(EbeanAspectV2.CREATED_ON_COLUMN, new Timestamp(startTimeMillis), new Timestamp(endTimeMillis)) - .findList(); + List<EbeanAspectV2> ebeanAspects = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) + .in(EbeanAspectV2.ASPECT_COLUMN, aspectNames) + .inRange( + EbeanAspectV2.CREATED_ON_COLUMN, + new Timestamp(startTimeMillis), + new Timestamp(endTimeMillis)) + .findList(); return ebeanAspects.stream().map(EbeanAspectV2::toEntityAspect).collect(Collectors.toList()); } private static Map<String, EntityAspect> toAspectMap(Set<EbeanAspectV2> beans) { - return beans.stream().map(bean -> Map.entry(bean.getAspect(), bean)) - .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toEntityAspect())); + return beans.stream() + .map(bean -> Map.entry(bean.getAspect(), bean)) + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toEntityAspect())); } - private static Map<String, Map<String, EntityAspect>> toUrnAspectMap(Collection<EbeanAspectV2> beans) { + private static Map<String, Map<String, EntityAspect>> toUrnAspectMap( + Collection<EbeanAspectV2> beans) { return beans.stream() - .collect(Collectors.groupingBy(EbeanAspectV2::getUrn, Collectors.toSet())) - .entrySet().stream() - .map(e -> Map.entry(e.getKey(), toAspectMap(e.getValue()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.groupingBy(EbeanAspectV2::getUrn, Collectors.toSet())) + .entrySet() + .stream() + .map(e -> Map.entry(e.getKey(), toAspectMap(e.getValue()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } - private static String buildMetricName(EntitySpec entitySpec, AspectSpec aspectSpec, String status) { - return String.join(MetricUtils.DELIMITER, List.of(entitySpec.getName(), aspectSpec.getName(), status.toLowerCase())); + private static String buildMetricName( + EntitySpec entitySpec, AspectSpec aspectSpec, String status) { + return String.join( + MetricUtils.DELIMITER, + List.of(entitySpec.getName(), aspectSpec.getName(), status.toLowerCase())); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java index 3d2a4a5ae051c..648b7cd6a65b0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java @@ -16,10 +16,7 @@ import lombok.NonNull; import lombok.Setter; - -/** - * Schema definition for the legacy aspect table. - */ +/** Schema definition for the legacy aspect table. */ @Getter @Setter @Entity @@ -38,9 +35,7 @@ public class EbeanAspectV1 extends Model { public static final String CREATED_BY_COLUMN = "createdBy"; public static final String CREATED_FOR_COLUMN = "createdFor"; - /** - * Key for an aspect in the table. - */ + /** Key for an aspect in the table. */ @Embeddable @Getter @AllArgsConstructor @@ -65,10 +60,7 @@ public static class PrimaryKey { private long version; } - @NonNull - @EmbeddedId - @Index - protected PrimaryKey key; + @NonNull @EmbeddedId @Index protected PrimaryKey key; @NonNull @Lob diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java index 3215542ffd347..71e52ed403b9b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java @@ -19,10 +19,7 @@ import lombok.NonNull; import lombok.Setter; - -/** - * Schema definition for the new aspect table. - */ +/** Schema definition for the new aspect table. */ @Getter @Setter @NoArgsConstructor @@ -45,9 +42,7 @@ public class EbeanAspectV2 extends Model { public static final String SYSTEM_METADATA_COLUMN = "systemmetadata"; - /** - * Key for an aspect in the table. - */ + /** Key for an aspect in the table. */ @Embeddable @Getter @AllArgsConstructor @@ -80,10 +75,7 @@ public EntityAspectIdentifier toAspectIdentifier() { } } - @NonNull - @EmbeddedId - @Index - protected PrimaryKey key; + @NonNull @EmbeddedId @Index protected PrimaryKey key; @NonNull @Column(name = URN_COLUMN, length = 500, nullable = false) @@ -115,9 +107,24 @@ public EntityAspectIdentifier toAspectIdentifier() { @Column(name = SYSTEM_METADATA_COLUMN, nullable = true) protected String systemMetadata; - public EbeanAspectV2(String urn, String aspect, long version, String metadata, Timestamp createdOn, String createdBy, - String createdFor, String systemMetadata) { - this(new PrimaryKey(urn, aspect, version), urn, aspect, version, metadata, createdOn, createdBy, createdFor, + public EbeanAspectV2( + String urn, + String aspect, + long version, + String metadata, + Timestamp createdOn, + String createdBy, + String createdFor, + String systemMetadata) { + this( + new PrimaryKey(urn, aspect, version), + urn, + aspect, + version, + metadata, + createdOn, + createdBy, + createdFor, systemMetadata); } @@ -131,8 +138,7 @@ public EntityAspect toEntityAspect() { getSystemMetadata(), getCreatedOn(), getCreatedBy(), - getCreatedFor() - ); + getCreatedFor()); } public static EbeanAspectV2 fromEntityAspect(EntityAspect aspect) { @@ -144,7 +150,6 @@ public static EbeanAspectV2 fromEntityAspect(EntityAspect aspect) { aspect.getCreatedOn(), aspect.getCreatedBy(), aspect.getCreatedFor(), - aspect.getSystemMetadata() - ); + aspect.getSystemMetadata()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java index d94ec1fa7ae2b..e12f0f8f1b5d9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java @@ -1,7 +1,8 @@ package com.linkedin.metadata.entity.ebean; -import com.linkedin.common.urn.Urn; import com.datahub.util.RecordUtils; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; @@ -13,7 +14,6 @@ import com.linkedin.retention.Retention; import com.linkedin.retention.TimeBasedRetention; import com.linkedin.retention.VersionBasedRetention; -import com.linkedin.metadata.Constants; import io.ebean.Database; import io.ebean.Expression; import io.ebean.ExpressionList; @@ -36,7 +36,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class EbeanRetentionService extends RetentionService { @@ -53,22 +52,26 @@ public EntityService getEntityService() { @Override protected AspectsBatch buildAspectsBatch(List<MetadataChangeProposal> mcps) { - return AspectsBatchImpl.builder() - .mcps(mcps, _entityService.getEntityRegistry()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, _entityService.getEntityRegistry()).build(); } @Override @WithSpan protected void applyRetention(List<RetentionContext> retentionContexts) { - List<RetentionContext> nonEmptyContexts = retentionContexts.stream() - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()).collect(Collectors.toList()); + List<RetentionContext> nonEmptyContexts = + retentionContexts.stream() + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) + .collect(Collectors.toList()); // Only run delete if at least one of the retention policies are applicable if (!nonEmptyContexts.isEmpty()) { - ExpressionList<EbeanAspectV2> deleteQuery = _server.find(EbeanAspectV2.class) + ExpressionList<EbeanAspectV2> deleteQuery = + _server + .find(EbeanAspectV2.class) .where() .ne(EbeanAspectV2.VERSION_COLUMN, Constants.ASPECT_LATEST_VERSION) .or(); @@ -78,25 +81,32 @@ protected void applyRetention(List<RetentionContext> retentionContexts) { Retention retentionPolicy = context.getRetentionPolicy().get(); if (retentionPolicy.hasVersion()) { - boolean appliedVersion = getVersionBasedRetentionQuery(context.getUrn(), context.getAspectName(), - retentionPolicy.getVersion(), context.getMaxVersion()) - .map(expr -> - deleteQuery.and() - .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) - .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) - .add(expr) - .endAnd() - ).isPresent(); + boolean appliedVersion = + getVersionBasedRetentionQuery( + context.getUrn(), + context.getAspectName(), + retentionPolicy.getVersion(), + context.getMaxVersion()) + .map( + expr -> + deleteQuery + .and() + .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) + .add(expr) + .endAnd()) + .isPresent(); applied = appliedVersion || applied; } if (retentionPolicy.hasTime()) { - deleteQuery.and() - .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) - .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) - .add(getTimeBasedRetentionQuery(retentionPolicy.getTime())) - .endAnd(); + deleteQuery + .and() + .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) + .add(getTimeBasedRetentionQuery(retentionPolicy.getTime())) + .endAnd(); applied = true; } } @@ -108,13 +118,15 @@ protected void applyRetention(List<RetentionContext> retentionContexts) { } private long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { - List<EbeanAspectV2> result = _server.find(EbeanAspectV2.class) - .where() - .eq("urn", urn) - .eq("aspect", aspectName) - .orderBy() - .desc("version") - .findList(); + List<EbeanAspectV2> result = + _server + .find(EbeanAspectV2.class) + .where() + .eq("urn", urn) + .eq("aspect", aspectName) + .orderBy() + .desc("version") + .findList(); if (result.size() == 0) { return -1; } @@ -126,57 +138,63 @@ private Optional<Expression> getVersionBasedRetentionQuery( @Nonnull String aspectName, @Nonnull final VersionBasedRetention retention, @Nonnull final Optional<Long> maxVersionFromUpdate) { - long largestVersion = maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn.toString(), aspectName)); + long largestVersion = + maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn.toString(), aspectName)); if (largestVersion < retention.getMaxVersions()) { return Optional.empty(); } return Optional.of( - new SimpleExpression(EbeanAspectV2.VERSION_COLUMN, Op.LT, largestVersion - retention.getMaxVersions() + 1)); + new SimpleExpression( + EbeanAspectV2.VERSION_COLUMN, Op.LT, largestVersion - retention.getMaxVersions() + 1)); } private Expression getTimeBasedRetentionQuery(@Nonnull final TimeBasedRetention retention) { - return new SimpleExpression(EbeanAspectV2.CREATED_ON_COLUMN, Op.LT, + return new SimpleExpression( + EbeanAspectV2.CREATED_ON_COLUMN, + Op.LT, new Timestamp(_clock.millis() - retention.getMaxAgeInSeconds() * 1000)); } private void applyRetention( - PagedList<EbeanAspectV2> rows, - Map<String, DataHubRetentionConfig> retentionPolicyMap, - BulkApplyRetentionResult applyRetentionResult - ) { + PagedList<EbeanAspectV2> rows, + Map<String, DataHubRetentionConfig> retentionPolicyMap, + BulkApplyRetentionResult applyRetentionResult) { try (Transaction transaction = _server.beginTransaction(TxScope.required())) { transaction.setBatchMode(true); transaction.setBatchSize(_batchSize); - List<RetentionContext> retentionContexts = rows.getList().stream() + List<RetentionContext> retentionContexts = + rows.getList().stream() .filter(row -> row.getVersion() != 0) - .map(row -> { - // 1. Extract an Entity type from the entity Urn - Urn urn; - try { - urn = Urn.createFromString(row.getUrn()); - } catch (Exception e) { - log.error("Failed to serialize urn {}", row.getUrn(), e); - return null; - } - - final String aspectNameFromRecord = row.getAspect(); - log.debug("Handling urn {} aspect {}", row.getUrn(), row.getAspect()); - // Get the retention policies to apply from the local retention policy map - Optional<Retention> retentionPolicy = getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() - .map(key -> retentionPolicyMap.get(key.toString())) - .filter(Objects::nonNull) - .findFirst() - .map(DataHubRetentionConfig::getRetention); - - return RetentionService.RetentionContext.builder() + .map( + row -> { + // 1. Extract an Entity type from the entity Urn + Urn urn; + try { + urn = Urn.createFromString(row.getUrn()); + } catch (Exception e) { + log.error("Failed to serialize urn {}", row.getUrn(), e); + return null; + } + + final String aspectNameFromRecord = row.getAspect(); + log.debug("Handling urn {} aspect {}", row.getUrn(), row.getAspect()); + // Get the retention policies to apply from the local retention policy map + Optional<Retention> retentionPolicy = + getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() + .map(key -> retentionPolicyMap.get(key.toString())) + .filter(Objects::nonNull) + .findFirst() + .map(DataHubRetentionConfig::getRetention); + + return RetentionService.RetentionContext.builder() .urn(urn) .aspectName(aspectNameFromRecord) .retentionPolicy(retentionPolicy) .maxVersion(Optional.of(row.getVersion())) .build(); - }) + }) .filter(Objects::nonNull) .collect(Collectors.toList()); @@ -209,7 +227,8 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } @Override - public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args) { + public BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args) { long startTime = System.currentTimeMillis(); BulkApplyRetentionResult result = new BulkApplyRetentionResult(); @@ -223,13 +242,18 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe result.timeRetentionPolicyMapMs = System.currentTimeMillis() - startTime; startTime = System.currentTimeMillis(); - //only supports version based retention for batch apply - //find urn, aspect pair where distinct versions > 20 to apply retention policy - Query<EbeanAspectV2> query = _server.find(EbeanAspectV2.class) + // only supports version based retention for batch apply + // find urn, aspect pair where distinct versions > 20 to apply retention policy + Query<EbeanAspectV2> query = + _server + .find(EbeanAspectV2.class) .setDistinct(true) - .select(String.format( - "%s, %s, count(%s)", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, EbeanAspectV2.VERSION_COLUMN) - ); + .select( + String.format( + "%s, %s, count(%s)", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.VERSION_COLUMN)); ExpressionList<EbeanAspectV2> exp = null; if (args.urn != null || args.aspectName != null) { exp = query.where(); @@ -246,8 +270,8 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe exp = exp.having(); } - PagedList<EbeanAspectV2> rows = exp - .gt(String.format("count(%s)", EbeanAspectV2.VERSION_COLUMN), args.attemptWithVersion) + PagedList<EbeanAspectV2> rows = + exp.gt(String.format("count(%s)", EbeanAspectV2.VERSION_COLUMN), args.attemptWithVersion) .setFirstRow(args.start) .setMaxRows(args.count) .findPagedList(); @@ -262,7 +286,8 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe log.error("Failed to serialize urn {}", row.getUrn(), e); continue; } - PagedList<EbeanAspectV2> rowsToChange = queryCandidates(row.getUrn(), null, row.getAspect()) + PagedList<EbeanAspectV2> rowsToChange = + queryCandidates(row.getUrn(), null, row.getAspect()) .setFirstRow(args.start) .setMaxRows(args.count) .findPagedList(); @@ -275,25 +300,39 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe } private Map<String, DataHubRetentionConfig> getAllRetentionPolicies() { - return _server.find(EbeanAspectV2.class) - .select(String.format("%s, %s, %s", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, - EbeanAspectV2.METADATA_COLUMN)) + return _server + .find(EbeanAspectV2.class) + .select( + String.format( + "%s, %s, %s", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.METADATA_COLUMN)) .where() .eq(EbeanAspectV2.ASPECT_COLUMN, Constants.DATAHUB_RETENTION_ASPECT) .eq(EbeanAspectV2.VERSION_COLUMN, Constants.ASPECT_LATEST_VERSION) .findList() .stream() - .collect(Collectors.toMap(EbeanAspectV2::getUrn, - row -> RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, row.getMetadata()))); + .collect( + Collectors.toMap( + EbeanAspectV2::getUrn, + row -> + RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, row.getMetadata()))); } - private ExpressionList<EbeanAspectV2> queryCandidates(@Nullable String urn, - @Nullable String entityName, @Nullable String aspectName) { - ExpressionList<EbeanAspectV2> query = _server.find(EbeanAspectV2.class) - .setDistinct(true) - .select(String.format("%s, %s, max(%s)", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, - EbeanAspectV2.VERSION_COLUMN)) - .where(); + private ExpressionList<EbeanAspectV2> queryCandidates( + @Nullable String urn, @Nullable String entityName, @Nullable String aspectName) { + ExpressionList<EbeanAspectV2> query = + _server + .find(EbeanAspectV2.class) + .setDistinct(true) + .select( + String.format( + "%s, %s, max(%s)", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.VERSION_COLUMN)) + .where(); if (urn != null) { query.eq(EbeanAspectV2.URN_COLUMN, urn); } @@ -306,10 +345,13 @@ private ExpressionList<EbeanAspectV2> queryCandidates(@Nullable String urn, return query; } - private PagedList<EbeanAspectV2> getPagedAspects(@Nullable String entityName, @Nullable String aspectName, - final int start, final int pageSize) { - return queryCandidates(null, entityName, aspectName).orderBy( - EbeanAspectV2.URN_COLUMN + ", " + EbeanAspectV2.ASPECT_COLUMN) + private PagedList<EbeanAspectV2> getPagedAspects( + @Nullable String entityName, + @Nullable String aspectName, + final int start, + final int pageSize) { + return queryCandidates(null, entityName, aspectName) + .orderBy(EbeanAspectV2.URN_COLUMN + ", " + EbeanAspectV2.ASPECT_COLUMN) .setFirstRow(start) .setMaxRows(pageSize) .findPagedList(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java index ca5e070bc5ca7..11261afdaa0b2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java @@ -5,63 +5,67 @@ import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Getter @Builder(toBuilder = true) public class AspectsBatchImpl implements AspectsBatch { - private final List<? extends AbstractBatchItem> items; - - public static class AspectsBatchImplBuilder { - /** - * Just one aspect record template - * @param data aspect data - * @return builder - */ - public AspectsBatchImplBuilder one(AbstractBatchItem data) { - this.items = List.of(data); - return this; - } + private final List<? extends AbstractBatchItem> items; - public AspectsBatchImplBuilder mcps(List<MetadataChangeProposal> mcps, EntityRegistry entityRegistry) { - this.items = mcps.stream().map(mcp -> { - if (mcp.getChangeType().equals(ChangeType.PATCH)) { - return PatchBatchItem.PatchBatchItemBuilder.build(mcp, entityRegistry); - } else { - return UpsertBatchItem.UpsertBatchItemBuilder.build(mcp, entityRegistry); - } - }).collect(Collectors.toList()); - return this; - } + public static class AspectsBatchImplBuilder { + /** + * Just one aspect record template + * + * @param data aspect data + * @return builder + */ + public AspectsBatchImplBuilder one(AbstractBatchItem data) { + this.items = List.of(data); + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - AspectsBatchImpl that = (AspectsBatchImpl) o; - return Objects.equals(items, that.items); + public AspectsBatchImplBuilder mcps( + List<MetadataChangeProposal> mcps, EntityRegistry entityRegistry) { + this.items = + mcps.stream() + .map( + mcp -> { + if (mcp.getChangeType().equals(ChangeType.PATCH)) { + return PatchBatchItem.PatchBatchItemBuilder.build(mcp, entityRegistry); + } else { + return UpsertBatchItem.UpsertBatchItemBuilder.build(mcp, entityRegistry); + } + }) + .collect(Collectors.toList()); + return this; } + } - @Override - public int hashCode() { - return Objects.hash(items); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return "AspectsBatchImpl{" + "items=" + items + '}'; + if (o == null || getClass() != o.getClass()) { + return false; } + AspectsBatchImpl that = (AspectsBatchImpl) o; + return Objects.equals(items, that.items); + } + + @Override + public int hashCode() { + return Objects.hash(items); + } + + @Override + public String toString() { + return "AspectsBatchImpl{" + "items=" + items + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java index cc0b3d915b407..f9b1e340d5541 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean.transactions; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -20,169 +22,195 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Objects; - -import static com.linkedin.metadata.Constants.*; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Getter @Builder(toBuilder = true) public class PatchBatchItem extends AbstractBatchItem { - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + // urn an urn associated with the new aspect + private final Urn urn; + // aspectName name of the aspect being inserted + private final String aspectName; + private final SystemMetadata systemMetadata; + + private final Patch patch; + + private final MetadataChangeProposal metadataChangeProposal; + + // derived + private final EntitySpec entitySpec; + private final AspectSpec aspectSpec; + + @Override + public ChangeType getChangeType() { + return ChangeType.PATCH; + } + + @Override + public void validateUrn(EntityRegistry entityRegistry, Urn urn) { + EntityUtils.validateUrn(entityRegistry, urn); + } + + public UpsertBatchItem applyPatch(EntityRegistry entityRegistry, RecordTemplate recordTemplate) { + UpsertBatchItem.UpsertBatchItemBuilder builder = + UpsertBatchItem.builder() + .urn(getUrn()) + .aspectName(getAspectName()) + .metadataChangeProposal(getMetadataChangeProposal()) + .systemMetadata(getSystemMetadata()); + + AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + + RecordTemplate currentValue = + recordTemplate != null + ? recordTemplate + : aspectTemplateEngine.getDefaultTemplate(getAspectName()); + + if (currentValue == null) { + // Attempting to patch a value to an aspect which has no default value and no existing value. + throw new UnsupportedOperationException( + String.format( + "Patch not supported for aspect with name %s. " + + "Default aspect is required because no aspect currently exists for urn %s.", + getAspectName(), getUrn())); } - // urn an urn associated with the new aspect - private final Urn urn; - // aspectName name of the aspect being inserted - private final String aspectName; - private final SystemMetadata systemMetadata; - - private final Patch patch; - - private final MetadataChangeProposal metadataChangeProposal; - - // derived - private final EntitySpec entitySpec; - private final AspectSpec aspectSpec; - - @Override - public ChangeType getChangeType() { - return ChangeType.PATCH; + try { + builder.aspect(aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); + } catch (JsonProcessingException | JsonPatchException e) { + throw new RuntimeException(e); } - @Override - public void validateUrn(EntityRegistry entityRegistry, Urn urn) { - EntityUtils.validateUrn(entityRegistry, urn); - } + return builder.build(entityRegistry); + } + + public static class PatchBatchItemBuilder { - public UpsertBatchItem applyPatch(EntityRegistry entityRegistry, RecordTemplate recordTemplate) { - UpsertBatchItem.UpsertBatchItemBuilder builder = UpsertBatchItem.builder() - .urn(getUrn()) - .aspectName(getAspectName()) - .metadataChangeProposal(getMetadataChangeProposal()) - .systemMetadata(getSystemMetadata()); + public PatchBatchItem build(EntityRegistry entityRegistry) { + EntityUtils.validateUrn(entityRegistry, this.urn); + log.debug("entity type = {}", this.urn.getEntityType()); - AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + log.debug("entity spec = {}", this.entitySpec); - RecordTemplate currentValue = recordTemplate != null ? recordTemplate - : aspectTemplateEngine.getDefaultTemplate(getAspectName()); + aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); + log.debug("aspect spec = {}", this.aspectSpec); - if (currentValue == null) { - // Attempting to patch a value to an aspect which has no default value and no existing value. - throw new UnsupportedOperationException(String.format("Patch not supported for aspect with name %s. " - + "Default aspect is required because no aspect currently exists for urn %s.", getAspectName(), getUrn())); - } + if (this.patch == null) { + throw new IllegalArgumentException( + String.format("Missing patch to apply. Aspect: %s", this.aspectSpec.getName())); + } - try { - builder.aspect(aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); - } catch (JsonProcessingException | JsonPatchException e) { - throw new RuntimeException(e); - } + return new PatchBatchItem( + this.urn, + this.aspectName, + generateSystemMetadataIfEmpty(this.systemMetadata), + this.patch, + this.metadataChangeProposal, + this.entitySpec, + this.aspectSpec); + } - return builder.build(entityRegistry); + public static PatchBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { + log.debug("entity type = {}", mcp.getEntityType()); + EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + + if (!isValidChangeType(ChangeType.PATCH, aspectSpec)) { + throw new UnsupportedOperationException( + "ChangeType not supported: " + + mcp.getChangeType() + + " for aspect " + + mcp.getAspectName()); + } + + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + + PatchBatchItemBuilder builder = + PatchBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .systemMetadata(mcp.getSystemMetadata()) + .metadataChangeProposal(mcp) + .patch(convertToJsonPatch(mcp)); + + return builder.build(entityRegistry); } - public static class PatchBatchItemBuilder { - - public PatchBatchItem build(EntityRegistry entityRegistry) { - EntityUtils.validateUrn(entityRegistry, this.urn); - log.debug("entity type = {}", this.urn.getEntityType()); - - entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); - log.debug("entity spec = {}", this.entitySpec); - - aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); - log.debug("aspect spec = {}", this.aspectSpec); - - if (this.patch == null) { - throw new IllegalArgumentException(String.format("Missing patch to apply. Aspect: %s", - this.aspectSpec.getName())); - } - - return new PatchBatchItem(this.urn, this.aspectName, generateSystemMetadataIfEmpty(this.systemMetadata), - this.patch, this.metadataChangeProposal, this.entitySpec, this.aspectSpec); - } - - public static PatchBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { - log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); - AspectSpec aspectSpec = validateAspect(mcp, entitySpec); - - if (!isValidChangeType(ChangeType.PATCH, aspectSpec)) { - throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType() - + " for aspect " + mcp.getAspectName()); - } - - Urn urn = mcp.getEntityUrn(); - if (urn == null) { - urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); - } - - PatchBatchItemBuilder builder = PatchBatchItem.builder() - .urn(urn) - .aspectName(mcp.getAspectName()) - .systemMetadata(mcp.getSystemMetadata()) - .metadataChangeProposal(mcp) - .patch(convertToJsonPatch(mcp)); - - return builder.build(entityRegistry); - } - - private PatchBatchItemBuilder entitySpec(EntitySpec entitySpec) { - this.entitySpec = entitySpec; - return this; - } - - private PatchBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { - this.aspectSpec = aspectSpec; - return this; - } - - private static Patch convertToJsonPatch(MetadataChangeProposal mcp) { - JsonNode json; - try { - json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)); - return JsonPatch.fromJson(json); - } catch (IOException e) { - throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e); - } - } + private PatchBatchItemBuilder entitySpec(EntitySpec entitySpec) { + this.entitySpec = entitySpec; + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - PatchBatchItem that = (PatchBatchItem) o; - return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) && patch.equals(that.patch); + private PatchBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { + this.aspectSpec = aspectSpec; + return this; } - @Override - public int hashCode() { - return Objects.hash(urn, aspectName, systemMetadata, patch); + private static Patch convertToJsonPatch(MetadataChangeProposal mcp) { + JsonNode json; + try { + json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)); + return JsonPatch.fromJson(json); + } catch (IOException e) { + throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e); + } } + } - @Override - public String toString() { - return "PatchBatchItem{" - + "urn=" + urn - + ", aspectName='" + aspectName - + '\'' - + ", systemMetadata=" + systemMetadata - + ", patch=" + patch - + '}'; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } + PatchBatchItem that = (PatchBatchItem) o; + return urn.equals(that.urn) + && aspectName.equals(that.aspectName) + && Objects.equals(systemMetadata, that.systemMetadata) + && patch.equals(that.patch); + } + + @Override + public int hashCode() { + return Objects.hash(urn, aspectName, systemMetadata, patch); + } + + @Override + public String toString() { + return "PatchBatchItem{" + + "urn=" + + urn + + ", aspectName='" + + aspectName + + '\'' + + ", systemMetadata=" + + systemMetadata + + ", patch=" + + patch + + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java index bd58d267a8308..c232e4846f7d1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean.transactions; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.datahub.util.exception.ModelConversionException; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -16,158 +18,172 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.sql.Timestamp; +import java.util.Objects; import lombok.Builder; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import java.sql.Timestamp; -import java.util.Objects; - -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - - @Slf4j @Getter @Builder(toBuilder = true) public class UpsertBatchItem extends AbstractBatchItem { - // urn an urn associated with the new aspect - private final Urn urn; - // aspectName name of the aspect being inserted - private final String aspectName; - private final SystemMetadata systemMetadata; - - private final RecordTemplate aspect; - - private final MetadataChangeProposal metadataChangeProposal; - - // derived - private final EntitySpec entitySpec; - private final AspectSpec aspectSpec; - - @Override - public ChangeType getChangeType() { - return ChangeType.UPSERT; + // urn an urn associated with the new aspect + private final Urn urn; + // aspectName name of the aspect being inserted + private final String aspectName; + private final SystemMetadata systemMetadata; + + private final RecordTemplate aspect; + + private final MetadataChangeProposal metadataChangeProposal; + + // derived + private final EntitySpec entitySpec; + private final AspectSpec aspectSpec; + + @Override + public ChangeType getChangeType() { + return ChangeType.UPSERT; + } + + @Override + public void validateUrn(EntityRegistry entityRegistry, Urn urn) { + EntityUtils.validateUrn(entityRegistry, urn); + } + + public EntityAspect toLatestEntityAspect(AuditStamp auditStamp) { + EntityAspect latest = new EntityAspect(); + latest.setAspect(getAspectName()); + latest.setMetadata(EntityUtils.toJsonAspect(getAspect())); + latest.setUrn(getUrn().toString()); + latest.setVersion(ASPECT_LATEST_VERSION); + latest.setCreatedOn(new Timestamp(auditStamp.getTime())); + latest.setCreatedBy(auditStamp.getActor().toString()); + return latest; + } + + public static class UpsertBatchItemBuilder { + + public UpsertBatchItem build(EntityRegistry entityRegistry) { + EntityUtils.validateUrn(entityRegistry, this.urn); + log.debug("entity type = {}", this.urn.getEntityType()); + + entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + log.debug("entity spec = {}", this.entitySpec); + + aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); + log.debug("aspect spec = {}", this.aspectSpec); + + ValidationUtils.validateRecordTemplate( + entityRegistry, this.entitySpec, this.urn, this.aspect); + + return new UpsertBatchItem( + this.urn, + this.aspectName, + AbstractBatchItem.generateSystemMetadataIfEmpty(this.systemMetadata), + this.aspect, + this.metadataChangeProposal, + this.entitySpec, + this.aspectSpec); } - @Override - public void validateUrn(EntityRegistry entityRegistry, Urn urn) { - EntityUtils.validateUrn(entityRegistry, urn); + public static UpsertBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { + if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { + throw new IllegalArgumentException( + "Invalid MCP, this class only supports change type of UPSERT."); + } + + log.debug("entity type = {}", mcp.getEntityType()); + EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + + if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { + throw new UnsupportedOperationException( + "ChangeType not supported: " + + mcp.getChangeType() + + " for aspect " + + mcp.getAspectName()); + } + + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + + UpsertBatchItemBuilder builder = + UpsertBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .systemMetadata(mcp.getSystemMetadata()) + .metadataChangeProposal(mcp) + .aspect(convertToRecordTemplate(mcp, aspectSpec)); + + return builder.build(entityRegistry); } - public EntityAspect toLatestEntityAspect(AuditStamp auditStamp) { - EntityAspect latest = new EntityAspect(); - latest.setAspect(getAspectName()); - latest.setMetadata(EntityUtils.toJsonAspect(getAspect())); - latest.setUrn(getUrn().toString()); - latest.setVersion(ASPECT_LATEST_VERSION); - latest.setCreatedOn(new Timestamp(auditStamp.getTime())); - latest.setCreatedBy(auditStamp.getActor().toString()); - return latest; + private UpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { + this.entitySpec = entitySpec; + return this; } - public static class UpsertBatchItemBuilder { - - public UpsertBatchItem build(EntityRegistry entityRegistry) { - EntityUtils.validateUrn(entityRegistry, this.urn); - log.debug("entity type = {}", this.urn.getEntityType()); - - entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); - log.debug("entity spec = {}", this.entitySpec); - - aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); - log.debug("aspect spec = {}", this.aspectSpec); - - ValidationUtils.validateRecordTemplate(entityRegistry, this.entitySpec, this.urn, this.aspect); - - return new UpsertBatchItem(this.urn, this.aspectName, AbstractBatchItem.generateSystemMetadataIfEmpty(this.systemMetadata), - this.aspect, this.metadataChangeProposal, this.entitySpec, this.aspectSpec); - } - - public static UpsertBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { - if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { - throw new IllegalArgumentException("Invalid MCP, this class only supports change type of UPSERT."); - } - - log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); - AspectSpec aspectSpec = validateAspect(mcp, entitySpec); - - if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { - throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType() - + " for aspect " + mcp.getAspectName()); - } - - Urn urn = mcp.getEntityUrn(); - if (urn == null) { - urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); - } - - UpsertBatchItemBuilder builder = UpsertBatchItem.builder() - .urn(urn) - .aspectName(mcp.getAspectName()) - .systemMetadata(mcp.getSystemMetadata()) - .metadataChangeProposal(mcp) - .aspect(convertToRecordTemplate(mcp, aspectSpec)); - - return builder.build(entityRegistry); - } - - private UpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { - this.entitySpec = entitySpec; - return this; - } - - private UpsertBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { - this.aspectSpec = aspectSpec; - return this; - } - - private static RecordTemplate convertToRecordTemplate(MetadataChangeProposal mcp, AspectSpec aspectSpec) { - RecordTemplate aspect; - try { - aspect = GenericRecordUtils.deserializeAspect(mcp.getAspect().getValue(), - mcp.getAspect().getContentType(), aspectSpec); - ValidationUtils.validateOrThrow(aspect); - } catch (ModelConversionException e) { - throw new RuntimeException( - String.format("Could not deserialize %s for aspect %s", mcp.getAspect().getValue(), - mcp.getAspectName())); - } - log.debug("aspect = {}", aspect); - return aspect; - } + private UpsertBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { + this.aspectSpec = aspectSpec; + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - UpsertBatchItem that = (UpsertBatchItem) o; - return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) && aspect.equals(that.aspect); + private static RecordTemplate convertToRecordTemplate( + MetadataChangeProposal mcp, AspectSpec aspectSpec) { + RecordTemplate aspect; + try { + aspect = + GenericRecordUtils.deserializeAspect( + mcp.getAspect().getValue(), mcp.getAspect().getContentType(), aspectSpec); + ValidationUtils.validateOrThrow(aspect); + } catch (ModelConversionException e) { + throw new RuntimeException( + String.format( + "Could not deserialize %s for aspect %s", + mcp.getAspect().getValue(), mcp.getAspectName())); + } + log.debug("aspect = {}", aspect); + return aspect; } + } - @Override - public int hashCode() { - return Objects.hash(urn, aspectName, systemMetadata, aspect); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return "UpsertBatchItem{" - + "urn=" - + urn - + ", aspectName='" - + aspectName - + '\'' - + ", systemMetadata=" - + systemMetadata - + ", aspect=" - + aspect - + '}'; + if (o == null || getClass() != o.getClass()) { + return false; } + UpsertBatchItem that = (UpsertBatchItem) o; + return urn.equals(that.urn) + && aspectName.equals(that.aspectName) + && Objects.equals(systemMetadata, that.systemMetadata) + && aspect.equals(that.aspect); + } + + @Override + public int hashCode() { + return Objects.hash(urn, aspectName, systemMetadata, aspect); + } + + @Override + public String toString() { + return "UpsertBatchItem{" + + "urn=" + + urn + + ", aspectName='" + + aspectName + + '\'' + + ", systemMetadata=" + + systemMetadata + + ", aspect=" + + aspect + + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java index 4c4bfb41867ef..ad8fbfdf2eddd 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java @@ -24,12 +24,10 @@ import java.util.stream.Collectors; import lombok.Setter; - public class EntityRegistryUrnValidator implements Validator { private final EntityRegistry _entityRegistry; - @Setter - private EntitySpec currentEntitySpec = null; + @Setter private EntitySpec currentEntitySpec = null; public EntityRegistryUrnValidator(EntityRegistry entityRegistry) { _entityRegistry = entityRegistry; @@ -43,45 +41,61 @@ public void validate(ValidatorContext context) { } protected void validateUrnField(ValidatorContext context) { - if (Type.TYPEREF.equals(context.dataElement().getSchema().getType()) && ((NamedDataSchema) context.dataElement() - .getSchema()).getName().endsWith("Urn")) { + if (Type.TYPEREF.equals(context.dataElement().getSchema().getType()) + && ((NamedDataSchema) context.dataElement().getSchema()).getName().endsWith("Urn")) { try { // Validate Urn matches field type and that it generates a valid key String urnStr = (String) context.dataElement().getValue(); Urn urn = Urn.createFromString(urnStr); EntitySpec entitySpec = _entityRegistry.getEntitySpec(urn.getEntityType()); - RecordTemplate entityKey = EntityKeyUtils.convertUrnToEntityKey(urn, - entitySpec.getKeyAspectSpec()); + RecordTemplate entityKey = + EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); NamedDataSchema namedDataSchema = ((NamedDataSchema) context.dataElement().getSchema()); Class<? extends Urn> urnClass; try { - String schemaName = ((Map<String, String>) namedDataSchema.getProperties().get("java")).get("class"); + String schemaName = + ((Map<String, String>) namedDataSchema.getProperties().get("java")).get("class"); urnClass = (Class<? extends Urn>) Class.forName(schemaName); urnClass.getDeclaredMethod("createFromString", String.class).invoke(null, urnStr); } catch (ClassNotFoundException | ClassCastException | NoSuchMethodException e) { - throw new IllegalArgumentException("Unrecognized Urn class: " + namedDataSchema.getName(), e); + throw new IllegalArgumentException( + "Unrecognized Urn class: " + namedDataSchema.getName(), e); } catch (InvocationTargetException | IllegalAccessException e) { - throw new IllegalArgumentException("Unable to instantiate urn type: " + namedDataSchema.getName() + " with urn: " + urnStr, e); + throw new IllegalArgumentException( + "Unable to instantiate urn type: " + + namedDataSchema.getName() + + " with urn: " + + urnStr, + e); } // Validate generic Urn is valid entity type for relationship destination PathSpec fieldPath = context.dataElement().getSchemaPathSpec(); - List<RelationshipFieldSpec> relationshipSpecs = currentEntitySpec.getRelationshipFieldSpecs().stream().filter(relationshipFieldSpec -> - relationshipFieldSpec.getPath().equals(fieldPath)) - .collect(Collectors.toList()); + List<RelationshipFieldSpec> relationshipSpecs = + currentEntitySpec.getRelationshipFieldSpecs().stream() + .filter(relationshipFieldSpec -> relationshipFieldSpec.getPath().equals(fieldPath)) + .collect(Collectors.toList()); if (!relationshipSpecs.isEmpty()) { for (RelationshipFieldSpec relationshipFieldSpec : relationshipSpecs) { - boolean isValidDestination = relationshipFieldSpec.getValidDestinationTypes().stream() - .anyMatch(destinationType -> destinationType.equals(urn.getEntityType())); + boolean isValidDestination = + relationshipFieldSpec.getValidDestinationTypes().stream() + .anyMatch(destinationType -> destinationType.equals(urn.getEntityType())); if (!isValidDestination) { throw new IllegalArgumentException( - "Entity type for urn: " + urn + " is not a valid destination for field path: " + fieldPath); + "Entity type for urn: " + + urn + + " is not a valid destination for field path: " + + fieldPath); } } } } catch (URISyntaxException | IllegalArgumentException e) { - context.addResult(new Message(context.dataElement().path(), "\"Provided urn %s\" is invalid: %s", - context.dataElement().getValue(), e.getMessage())); + context.addResult( + new Message( + context.dataElement().path(), + "\"Provided urn %s\" is invalid: %s", + context.dataElement().getValue(), + e.getMessage())); context.setHasFix(false); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java index e7934bc47be3f..12e39f0349143 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java @@ -15,45 +15,41 @@ @Slf4j public class RecordTemplateValidator { - private static final ValidationOptions DEFAULT_VALIDATION_OPTIONS = new ValidationOptions( - RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, - CoercionMode.NORMAL, - UnrecognizedFieldMode.DISALLOW - ); - - private static final UrnValidator URN_VALIDATOR = new UrnValidator(); - - /** - * Validates a {@link RecordTemplate} and applies a function if validation fails - * - * @param record record to be validated.ailure. - */ - public static void validate(RecordTemplate record, Consumer<ValidationResult> onValidationFailure) { - final ValidationResult result = ValidateDataAgainstSchema.validate( - record, - DEFAULT_VALIDATION_OPTIONS, - URN_VALIDATOR); - if (!result.isValid()) { - onValidationFailure.accept(result); - } + private static final ValidationOptions DEFAULT_VALIDATION_OPTIONS = + new ValidationOptions( + RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, + CoercionMode.NORMAL, + UnrecognizedFieldMode.DISALLOW); + + private static final UrnValidator URN_VALIDATOR = new UrnValidator(); + + /** + * Validates a {@link RecordTemplate} and applies a function if validation fails + * + * @param record record to be validated.ailure. + */ + public static void validate( + RecordTemplate record, Consumer<ValidationResult> onValidationFailure) { + final ValidationResult result = + ValidateDataAgainstSchema.validate(record, DEFAULT_VALIDATION_OPTIONS, URN_VALIDATOR); + if (!result.isValid()) { + onValidationFailure.accept(result); } - - /** - * Validates a {@link RecordTemplate} and applies a function if validation fails - * - * @param record record to be validated.ailure. - */ - public static void validate(RecordTemplate record, Consumer<ValidationResult> onValidationFailure, Validator validator) { - final ValidationResult result = ValidateDataAgainstSchema.validate( - record, - DEFAULT_VALIDATION_OPTIONS, - validator); - if (!result.isValid()) { - onValidationFailure.accept(result); - } + } + + /** + * Validates a {@link RecordTemplate} and applies a function if validation fails + * + * @param record record to be validated.ailure. + */ + public static void validate( + RecordTemplate record, Consumer<ValidationResult> onValidationFailure, Validator validator) { + final ValidationResult result = + ValidateDataAgainstSchema.validate(record, DEFAULT_VALIDATION_OPTIONS, validator); + if (!result.isValid()) { + onValidationFailure.accept(result); } + } - private RecordTemplateValidator() { - - } + private RecordTemplateValidator() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java index e0b026fa84d18..6a86a02a94449 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.entity.validation; -/** - * Exception thrown when a metadata record cannot be validated against its schema. - */ +/** Exception thrown when a metadata record cannot be validated against its schema. */ public class ValidationException extends RuntimeException { public ValidationException(final String message) { super(message); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java index 6182b27333cbb..7f23bacdc4758 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java @@ -7,27 +7,27 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import lombok.extern.slf4j.Slf4j; - import java.util.function.Consumer; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class ValidationUtils { /** - * Validates a {@link RecordTemplate} and throws {@link com.linkedin.restli.server.RestLiServiceException} - * if validation fails. + * Validates a {@link RecordTemplate} and throws {@link + * com.linkedin.restli.server.RestLiServiceException} if validation fails. * * @param record record to be validated. */ public static void validateOrThrow(RecordTemplate record) { - RecordTemplateValidator.validate(record, validationResult -> { - throw new ValidationException( - String.format("Failed to validate record with class %s: %s", - record.getClass().getName(), - validationResult.getMessages().toString())); - }); + RecordTemplateValidator.validate( + record, + validationResult -> { + throw new ValidationException( + String.format( + "Failed to validate record with class %s: %s", + record.getClass().getName(), validationResult.getMessages().toString())); + }); } /** @@ -36,41 +36,51 @@ public static void validateOrThrow(RecordTemplate record) { * @param record record to be validated.ailure. */ public static void validateOrWarn(RecordTemplate record) { - RecordTemplateValidator.validate(record, validationResult -> { - log.warn(String.format("Failed to validate record %s against its schema.", record)); - }); + RecordTemplateValidator.validate( + record, + validationResult -> { + log.warn(String.format("Failed to validate record %s against its schema.", record)); + }); } public static AspectSpec validate(EntitySpec entitySpec, String aspectName) { if (aspectName == null || aspectName.isEmpty()) { - throw new UnsupportedOperationException("Aspect name is required for create and update operations"); + throw new UnsupportedOperationException( + "Aspect name is required for create and update operations"); } AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { throw new RuntimeException( - String.format("Unknown aspect %s for entity %s", aspectName, entitySpec.getName())); + String.format("Unknown aspect %s for entity %s", aspectName, entitySpec.getName())); } return aspectSpec; } - public static void validateRecordTemplate(EntityRegistry entityRegistry, EntitySpec entitySpec, Urn urn, RecordTemplate aspect) { + public static void validateRecordTemplate( + EntityRegistry entityRegistry, EntitySpec entitySpec, Urn urn, RecordTemplate aspect) { EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); validator.setCurrentEntitySpec(entitySpec); - Consumer<ValidationResult> resultFunction = validationResult -> { - throw new IllegalArgumentException("Invalid format for aspect: " + entitySpec.getName() + "\n Cause: " - + validationResult.getMessages()); }; - RecordTemplateValidator.validate(EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); + Consumer<ValidationResult> resultFunction = + validationResult -> { + throw new IllegalArgumentException( + "Invalid format for aspect: " + + entitySpec.getName() + + "\n Cause: " + + validationResult.getMessages()); + }; + RecordTemplateValidator.validate( + EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); RecordTemplateValidator.validate(aspect, resultFunction, validator); } - public static void validateRecordTemplate(EntityRegistry entityRegistry, Urn urn, RecordTemplate aspect) { + public static void validateRecordTemplate( + EntityRegistry entityRegistry, Urn urn, RecordTemplate aspect) { EntitySpec entitySpec = entityRegistry.getEntitySpec(urn.getEntityType()); validateRecordTemplate(entityRegistry, entitySpec, urn, aspect); } - private ValidationUtils() { - } -} \ No newline at end of file + private ValidationUtils() {} +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java b/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java index 90e171d3c357e..becf86cdbe92f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java @@ -4,22 +4,20 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.snapshot.Snapshot; -import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataAuditOperation; +import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. - */ +/** Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. */ public interface EntityEventProducer { /** - * Produces a {@link com.linkedin.mxe.MetadataAuditEvent} from a - * new & previous Entity {@link Snapshot}. - * @param urn the urn associated with the entity changed + * Produces a {@link com.linkedin.mxe.MetadataAuditEvent} from a new & previous Entity {@link + * Snapshot}. + * + * @param urn the urn associated with the entity changed * @param oldSnapshot a {@link RecordTemplate} corresponding to the old snapshot. * @param newSnapshot a {@link RecordTemplate} corresponding to the new snapshot. * @param oldSystemMetadata @@ -31,12 +29,10 @@ void produceMetadataAuditEvent( @Nonnull final Snapshot newSnapshot, @Nullable SystemMetadata oldSystemMetadata, @Nullable SystemMetadata newSystemMetadata, - MetadataAuditOperation operation - ); + MetadataAuditOperation operation); /** - * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a - * new & previous aspect. + * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a new & previous aspect. * * @param urn the urn associated with the entity changed * @param aspectSpec aspect spec of the aspect being updated @@ -45,6 +41,5 @@ void produceMetadataAuditEvent( void produceMetadataChangeLog( @Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog - ); + @Nonnull final MetadataChangeLog metadataChangeLog); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java b/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java index ffadc07124727..a809c7f9a3e31 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java @@ -12,61 +12,52 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. - */ +/** Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. */ public interface EventProducer { /** - * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a - * new & previous aspect. + * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a new & previous aspect. * * @param urn the urn associated with the entity changed * @param aspectSpec aspect spec of the aspect being updated * @param metadataChangeLog metadata change log to push into MCL kafka topic - * * @return A {@link Future} object that reports when the message has been produced. */ Future<?> produceMetadataChangeLog( @Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog - ); + @Nonnull final MetadataChangeLog metadataChangeLog); /** * Produces a {@link com.linkedin.mxe.MetadataChangeProposal} as an async update to an entity * * @param urn the urn associated with the change proposal. * @param metadataChangeProposal metadata change proposal to push into MCP kafka topic. - * * @return A {@link Future} object that reports when the message has been produced. */ @WithSpan - Future<?> produceMetadataChangeProposal(@Nonnull final Urn urn, - @Nonnull MetadataChangeProposal metadataChangeProposal); + Future<?> produceMetadataChangeProposal( + @Nonnull final Urn urn, @Nonnull MetadataChangeProposal metadataChangeProposal); /** * Produces a generic platform "event". * - * @param name the name, or type, of the event to produce, as defined in the {@link EntityRegistry}. - * @param key an optional partitioning key for the event. If not provided, the name of the event will be used. - * @param payload the event payload itself. This will be serialized to JSON and produced as a system event. - * + * @param name the name, or type, of the event to produce, as defined in the {@link + * EntityRegistry}. + * @param key an optional partitioning key for the event. If not provided, the name of the event + * will be used. + * @param payload the event payload itself. This will be serialized to JSON and produced as a + * system event. * @return A {@link Future} object that reports when the message has been produced. */ Future<?> producePlatformEvent( - @Nonnull String name, - @Nullable String key, - @Nonnull PlatformEvent payload - ); + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent payload); /** - * Creates an entry on the history log of when the indices were last rebuilt with the latest configuration. + * Creates an entry on the history log of when the indices were last rebuilt with the latest + * configuration. * * @param event the history event to send to the DataHub Upgrade history topic */ - void produceDataHubUpgradeHistoryEvent( - @Nonnull DataHubUpgradeHistoryEvent event - ); + void produceDataHubUpgradeHistoryEvent(@Nonnull DataHubUpgradeHistoryEvent event); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java index 891844045b016..c54ba4a222b73 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; + import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationshipArray; import com.linkedin.common.EntityRelationships; @@ -14,54 +16,60 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; - - @Slf4j public class JavaGraphClient implements GraphClient { GraphService _graphService; + public JavaGraphClient(@Nonnull GraphService graphService) { this._graphService = graphService; } /** - * Returns a list of related entities for a given entity, set of edge types, and direction relative to the - * source node + * Returns a list of related entities for a given entity, set of edge types, and direction + * relative to the source node */ @Nonnull @Override - public EntityRelationships getRelatedEntities(String rawUrn, List<String> relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count, String actor) { + public EntityRelationships getRelatedEntities( + String rawUrn, + List<String> relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count, + String actor) { start = start == null ? 0 : start; count = count == null ? DEFAULT_PAGE_SIZE : count; RelatedEntitiesResult relatedEntitiesResult = - _graphService.findRelatedEntities(null, + _graphService.findRelatedEntities( + null, QueryUtils.newFilter("urn", rawUrn), null, EMPTY_FILTER, relationshipTypes, QueryUtils.newRelationshipFilter(EMPTY_FILTER, direction), start, - count - ); + count); - final EntityRelationshipArray entityArray = new EntityRelationshipArray( - relatedEntitiesResult.getEntities().stream().map( - entity -> { - try { - return new EntityRelationship() - .setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to convert urnStr %s found in the Graph to an Urn object", entity.getUrn())); - } - } - ).collect(Collectors.toList()) - ); + final EntityRelationshipArray entityArray = + new EntityRelationshipArray( + relatedEntitiesResult.getEntities().stream() + .map( + entity -> { + try { + return new EntityRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to convert urnStr %s found in the Graph to an Urn object", + entity.getUrn())); + } + }) + .collect(Collectors.toList())); return new EntityRelationships() .setStart(relatedEntitiesResult.getStart()) @@ -71,14 +79,23 @@ public EntityRelationships getRelatedEntities(String rawUrn, List<String> relati } /** - * Returns lineage relationships for given entity in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * Returns lineage relationships for given entity in the DataHub graph. Lineage relationship + * denotes whether an entity is directly upstream or downstream of another entity */ @Nonnull @Override - public EntityLineageResult getLineageEntities(String rawUrn, LineageDirection direction, @Nullable Integer start, - @Nullable Integer count, int maxHops, String actor) { - return _graphService.getLineage(UrnUtils.getUrn(rawUrn), direction, start != null ? start : 0, - count != null ? count : 100, maxHops); + public EntityLineageResult getLineageEntities( + String rawUrn, + LineageDirection direction, + @Nullable Integer start, + @Nullable Integer count, + int maxHops, + String actor) { + return _graphService.getLineage( + UrnUtils.getUrn(rawUrn), + direction, + start != null ? start : 0, + count != null ? count : 100, + maxHops); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java index 7a2f0825b31cc..bdf405fe36c07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.Siblings; @@ -20,9 +22,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class SiblingGraphService { @@ -31,58 +30,55 @@ public class SiblingGraphService { private final GraphService _graphService; @Nonnull - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops) { - return ValidationUtils.validateEntityLineageResult(getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - false, - new HashSet<>(), - null, - null), - _entityService); + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops) { + return ValidationUtils.validateEntityLineageResult( + getLineage( + entityUrn, direction, offset, count, maxHops, false, new HashSet<>(), null, null), + _entityService); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - int offset, int count, int maxHops, boolean separateSiblings, @Nonnull Set<Urn> visitedUrns, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops, + boolean separateSiblings, + @Nonnull Set<Urn> visitedUrns, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { if (separateSiblings) { - return ValidationUtils.validateEntityLineageResult(_graphService.getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - startTimeMillis, - endTimeMillis), _entityService); + return ValidationUtils.validateEntityLineageResult( + _graphService.getLineage( + entityUrn, direction, offset, count, maxHops, startTimeMillis, endTimeMillis), + _entityService); } if (maxHops > 1) { throw new UnsupportedOperationException( - String.format("More than 1 hop is not supported for %s", this.getClass().getSimpleName())); + String.format( + "More than 1 hop is not supported for %s", this.getClass().getSimpleName())); } EntityLineageResult entityLineage = _graphService.getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - startTimeMillis, - endTimeMillis); + entityUrn, direction, offset, count, maxHops, startTimeMillis, endTimeMillis); - Siblings siblingAspectOfEntity = (Siblings) _entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); + Siblings siblingAspectOfEntity = + (Siblings) _entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); // if you have siblings, we want to fetch their lineage too and merge it in if (siblingAspectOfEntity != null && siblingAspectOfEntity.hasSiblings()) { @@ -104,19 +100,23 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi if (visitedUrns.contains(siblingUrn)) { continue; } - // need to call siblingGraphService to get sibling results for this sibling entity in case there is more than one sibling - EntityLineageResult nextEntityLineage = filterLineageResultFromSiblings(siblingUrn, allSiblingsInGroup, - getLineage( + // need to call siblingGraphService to get sibling results for this sibling entity in case + // there is more than one sibling + EntityLineageResult nextEntityLineage = + filterLineageResultFromSiblings( siblingUrn, - direction, - offset, - count, - maxHops, - false, - visitedUrns, - startTimeMillis, - endTimeMillis), - entityLineage); + allSiblingsInGroup, + getLineage( + siblingUrn, + direction, + offset, + count, + maxHops, + false, + visitedUrns, + startTimeMillis, + endTimeMillis), + entityLineage); // Update offset and count to fetch the correct number of edges from the next sibling node offset = Math.max(0, offset - nextEntityLineage.getTotal()); @@ -124,86 +124,116 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi entityLineage.setFiltered(getFiltered(entityLineage) + getFiltered(nextEntityLineage)); entityLineage = nextEntityLineage; - }; + } + ; } return ValidationUtils.validateEntityLineageResult(entityLineage, _entityService); } private int getFiltered(@Nullable EntityLineageResult entityLineageResult) { - return (entityLineageResult != null && entityLineageResult.getFiltered() != null ? entityLineageResult.getFiltered() : 0); + return (entityLineageResult != null && entityLineageResult.getFiltered() != null + ? entityLineageResult.getFiltered() + : 0); } - // takes a lineage result and removes any nodes that are siblings of some other node already in the result + // takes a lineage result and removes any nodes that are siblings of some other node already in + // the result private EntityLineageResult filterLineageResultFromSiblings( @Nonnull final Urn urn, @Nonnull final Set<Urn> allSiblingsInGroup, @Nonnull final EntityLineageResult entityLineageResult, - @Nullable final EntityLineageResult existingResult - ) { + @Nullable final EntityLineageResult existingResult) { int numFiltered = 0; // 1) remove the source entities siblings from this entity's downstreams - final Map<Boolean, List<LineageRelationship>> partitionedFilteredRelationships = entityLineageResult.getRelationships() - .stream().collect(Collectors.partitioningBy( - lineageRelationship -> !allSiblingsInGroup.contains(lineageRelationship.getEntity()) - || lineageRelationship.getEntity().equals(urn))); + final Map<Boolean, List<LineageRelationship>> partitionedFilteredRelationships = + entityLineageResult.getRelationships().stream() + .collect( + Collectors.partitioningBy( + lineageRelationship -> + !allSiblingsInGroup.contains(lineageRelationship.getEntity()) + || lineageRelationship.getEntity().equals(urn))); numFiltered += partitionedFilteredRelationships.get(Boolean.FALSE).size(); - final List<LineageRelationship> filteredRelationships = partitionedFilteredRelationships.get(Boolean.TRUE); + final List<LineageRelationship> filteredRelationships = + partitionedFilteredRelationships.get(Boolean.TRUE); // 2) filter out existing lineage to avoid duplicates in our combined result - final Set<Urn> existingUrns = existingResult != null - ? existingResult.getRelationships().stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()) - : new HashSet<>(); - - Map<Boolean, List<LineageRelationship>> partitionedUniqueFilteredRelationships = filteredRelationships.stream().collect( - Collectors.partitioningBy(lineageRelationship -> !existingUrns.contains(lineageRelationship.getEntity()))); + final Set<Urn> existingUrns = + existingResult != null + ? existingResult.getRelationships().stream() + .map(LineageRelationship::getEntity) + .collect(Collectors.toSet()) + : new HashSet<>(); + + Map<Boolean, List<LineageRelationship>> partitionedUniqueFilteredRelationships = + filteredRelationships.stream() + .collect( + Collectors.partitioningBy( + lineageRelationship -> + !existingUrns.contains(lineageRelationship.getEntity()))); numFiltered += partitionedUniqueFilteredRelationships.get(Boolean.FALSE).size(); - List<LineageRelationship> uniqueFilteredRelationships = partitionedUniqueFilteredRelationships.get(Boolean.TRUE); + List<LineageRelationship> uniqueFilteredRelationships = + partitionedUniqueFilteredRelationships.get(Boolean.TRUE); // 3) combine this entity's lineage with the lineage we've already seen - final List<LineageRelationship> combinedResults = Stream.concat( - uniqueFilteredRelationships.stream(), - existingResult != null ? existingResult.getRelationships().stream() : ImmutableList.<LineageRelationship>of().stream()) - .collect(Collectors.toList()); + final List<LineageRelationship> combinedResults = + Stream.concat( + uniqueFilteredRelationships.stream(), + existingResult != null + ? existingResult.getRelationships().stream() + : ImmutableList.<LineageRelationship>of().stream()) + .collect(Collectors.toList()); // 4) fetch the siblings of each lineage result - final Set<Urn> combinedResultUrns = combinedResults.stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()); + final Set<Urn> combinedResultUrns = + combinedResults.stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()); final Map<Urn, List<RecordTemplate>> siblingAspects = _entityService.getLatestAspects(combinedResultUrns, ImmutableSet.of(SIBLINGS_ASPECT_NAME)); - // 5) if you are not primary & your sibling is in the results, filter yourself out of the return set - Map<Boolean, List<LineageRelationship>> partitionedFilteredSiblings = combinedResults.stream().collect(Collectors.partitioningBy(result -> { - Optional<RecordTemplate> optionalSiblingsAspect = siblingAspects.get(result.getEntity()).stream().filter( - aspect -> aspect instanceof Siblings - ).findAny(); - - if (optionalSiblingsAspect.isEmpty()) { - return true; - } - - final Siblings siblingsAspect = (Siblings) optionalSiblingsAspect.get(); - - if (siblingsAspect.isPrimary()) { - return true; - } - - // if you are not primary and your sibling exists in the result set, filter yourself out - return siblingsAspect.getSiblings().stream().noneMatch(combinedResultUrns::contains); - })); + // 5) if you are not primary & your sibling is in the results, filter yourself out of the return + // set + Map<Boolean, List<LineageRelationship>> partitionedFilteredSiblings = + combinedResults.stream() + .collect( + Collectors.partitioningBy( + result -> { + Optional<RecordTemplate> optionalSiblingsAspect = + siblingAspects.get(result.getEntity()).stream() + .filter(aspect -> aspect instanceof Siblings) + .findAny(); + + if (optionalSiblingsAspect.isEmpty()) { + return true; + } + + final Siblings siblingsAspect = (Siblings) optionalSiblingsAspect.get(); + + if (siblingsAspect.isPrimary()) { + return true; + } + + // if you are not primary and your sibling exists in the result set, filter + // yourself out + return siblingsAspect.getSiblings().stream() + .noneMatch(combinedResultUrns::contains); + })); numFiltered += partitionedFilteredSiblings.get(Boolean.FALSE).size(); uniqueFilteredRelationships = partitionedFilteredSiblings.get(Boolean.TRUE); EntityLineageResult combinedLineageResult = new EntityLineageResult(); combinedLineageResult.setStart(entityLineageResult.getStart()); - combinedLineageResult.setRelationships(new LineageRelationshipArray(uniqueFilteredRelationships)); - combinedLineageResult.setTotal(entityLineageResult.getTotal() + (existingResult != null ? existingResult.getTotal() : 0)); + combinedLineageResult.setRelationships( + new LineageRelationshipArray(uniqueFilteredRelationships)); + combinedLineageResult.setTotal( + entityLineageResult.getTotal() + (existingResult != null ? existingResult.getTotal() : 0)); combinedLineageResult.setCount(uniqueFilteredRelationships.size()); - combinedLineageResult.setFiltered(numFiltered + getFiltered(existingResult) + getFiltered(entityLineageResult)); + combinedLineageResult.setFiltered( + numFiltered + getFiltered(existingResult) + getFiltered(entityLineageResult)); return ValidationUtils.validateEntityLineageResult(combinedLineageResult, _entityService); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java index dcef0f9f192ed..393297b64e0d9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java @@ -6,93 +6,96 @@ import io.github.resilience4j.retry.Retry; import io.github.resilience4j.retry.RetryConfig; import io.grpc.StatusRuntimeException; -import lombok.extern.slf4j.Slf4j; - import java.time.Duration; import java.util.concurrent.ExecutionException; import java.util.function.Consumer; import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; @Slf4j public class DgraphExecutor { - // requests are retried with an exponential randomized backoff - // wait 0.01s, 0.02s, 0.04s, 0.08s, ..., 10s, all ±50% - private static final Duration INITIAL_DURATION = Duration.ofMillis(10); - private static final Duration MAX_DURATION = Duration.ofSeconds(10); - private static final double BACKOFF_MULTIPLIER = 2.0; - private static final double RANDOMIZATION_FACTOR = 0.5; + // requests are retried with an exponential randomized backoff + // wait 0.01s, 0.02s, 0.04s, 0.08s, ..., 10s, all ±50% + private static final Duration INITIAL_DURATION = Duration.ofMillis(10); + private static final Duration MAX_DURATION = Duration.ofSeconds(10); + private static final double BACKOFF_MULTIPLIER = 2.0; + private static final double RANDOMIZATION_FACTOR = 0.5; - private final DgraphClient _client; - private final Retry _retry; + private final DgraphClient _client; + private final Retry _retry; - public DgraphExecutor(DgraphClient client, int maxAttempts) { - this._client = client; + public DgraphExecutor(DgraphClient client, int maxAttempts) { + this._client = client; - RetryConfig config = RetryConfig.custom() - .intervalFunction(IntervalFunction.ofExponentialRandomBackoff(INITIAL_DURATION, BACKOFF_MULTIPLIER, RANDOMIZATION_FACTOR, MAX_DURATION)) - .retryOnException(DgraphExecutor::isRetryableException) - .failAfterMaxAttempts(true) - .maxAttempts(maxAttempts) - .build(); - this._retry = Retry.of("DgraphExecutor", config); - } + RetryConfig config = + RetryConfig.custom() + .intervalFunction( + IntervalFunction.ofExponentialRandomBackoff( + INITIAL_DURATION, BACKOFF_MULTIPLIER, RANDOMIZATION_FACTOR, MAX_DURATION)) + .retryOnException(DgraphExecutor::isRetryableException) + .failAfterMaxAttempts(true) + .maxAttempts(maxAttempts) + .build(); + this._retry = Retry.of("DgraphExecutor", config); + } - /** - * Executes the given DgraphClient call and retries retry-able exceptions. - * Subsequent executions will experience an exponential randomized backoff. - * - * @param func call on the provided DgraphClient - * @param <T> return type of the function - * @return return value of the function - * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded - */ - public <T> T executeFunction(Function<DgraphClient, T> func) { - return Retry.decorateFunction(this._retry, func).apply(_client); - } + /** + * Executes the given DgraphClient call and retries retry-able exceptions. Subsequent executions + * will experience an exponential randomized backoff. + * + * @param func call on the provided DgraphClient + * @param <T> return type of the function + * @return return value of the function + * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded + */ + public <T> T executeFunction(Function<DgraphClient, T> func) { + return Retry.decorateFunction(this._retry, func).apply(_client); + } - /** - * Executes the given DgraphClient call and retries retry-able exceptions. - * Subsequent executions will experience an exponential randomized backoff. - * - * @param func call on the provided DgraphClient - * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded - */ - public void executeConsumer(Consumer<DgraphClient> func) { - this._retry.executeSupplier(() -> { - func.accept(_client); - return null; + /** + * Executes the given DgraphClient call and retries retry-able exceptions. Subsequent executions + * will experience an exponential randomized backoff. + * + * @param func call on the provided DgraphClient + * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded + */ + public void executeConsumer(Consumer<DgraphClient> func) { + this._retry.executeSupplier( + () -> { + func.accept(_client); + return null; }); - } + } - /** - * Defines which DgraphClient exceptions are being retried. - * - * @param t exception from DgraphClient - * @return true if this exception can be retried - */ - private static boolean isRetryableException(Throwable t) { - // unwrap RuntimeException and ExecutionException - while (true) { - if ((t instanceof RuntimeException || t instanceof ExecutionException) && t.getCause() != null) { - t = t.getCause(); - continue; - } - break; - } + /** + * Defines which DgraphClient exceptions are being retried. + * + * @param t exception from DgraphClient + * @return true if this exception can be retried + */ + private static boolean isRetryableException(Throwable t) { + // unwrap RuntimeException and ExecutionException + while (true) { + if ((t instanceof RuntimeException || t instanceof ExecutionException) + && t.getCause() != null) { + t = t.getCause(); + continue; + } + break; + } - // retry-able exceptions - if (t instanceof TxnConflictException - || t instanceof StatusRuntimeException && ( - t.getMessage().contains("operation opIndexing is already running") - || t.getMessage().contains("Please retry") - || t.getMessage().contains("DEADLINE_EXCEEDED:") - || t.getMessage().contains("context deadline exceeded") - || t.getMessage().contains("Only leader can decide to commit or abort") - )) { - log.debug("retrying request due to {}", t.getMessage()); - return true; - } - return false; + // retry-able exceptions + if (t instanceof TxnConflictException + || t instanceof StatusRuntimeException + && (t.getMessage().contains("operation opIndexing is already running") + || t.getMessage().contains("Please retry") + || t.getMessage().contains("DEADLINE_EXCEEDED:") + || t.getMessage().contains("context deadline exceeded") + || t.getMessage().contains("Only leader can decide to commit or abort"))) { + log.debug("retrying request due to {}", t.getMessage()); + return true; } + return false; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java index 14a9a17401702..0d8b7655fddeb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph.dgraph; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; @@ -41,665 +43,740 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DgraphGraphService implements GraphService { - // calls to Dgraph cluster will be retried if they throw retry-able exceptions - // with a max number of attempts of 160 a call will finally fail after around 15 minutes - private static final int MAX_ATTEMPTS = 160; - - private final @Nonnull DgraphExecutor _dgraph; - private final @Nonnull LineageRegistry _lineageRegistry; - - private static final String URN_RELATIONSHIP_TYPE = "urn"; - private static final String TYPE_RELATIONSHIP_TYPE = "type"; - private static final String KEY_RELATIONSHIP_TYPE = "key"; - - - @Getter(lazy = true) - // we want to defer initialization of schema (accessing Dgraph server) to the first time accessing _schema - private final DgraphSchema _schema = getSchema(); - - public DgraphGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull DgraphClient client) { - _lineageRegistry = lineageRegistry; - this._dgraph = new DgraphExecutor(client, MAX_ATTEMPTS); - } - - protected @Nonnull DgraphSchema getSchema() { - Response response = _dgraph.executeFunction(dgraphClient -> - dgraphClient.newReadOnlyTransaction().doRequest( - Request.newBuilder().setQuery("schema { predicate }").build() - ) - ); - DgraphSchema schema = getSchema(response.getJson().toStringUtf8()).withDgraph(_dgraph); - - if (schema.isEmpty()) { - Operation setSchema = Operation.newBuilder() - .setSchema("" - + "<urn>: string @index(hash) @upsert .\n" - + "<type>: string @index(hash) .\n" - + "<key>: string @index(hash) .\n" - ) - .build(); - _dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); - } - - return schema; + // calls to Dgraph cluster will be retried if they throw retry-able exceptions + // with a max number of attempts of 160 a call will finally fail after around 15 minutes + private static final int MAX_ATTEMPTS = 160; + + private final @Nonnull DgraphExecutor _dgraph; + private final @Nonnull LineageRegistry _lineageRegistry; + + private static final String URN_RELATIONSHIP_TYPE = "urn"; + private static final String TYPE_RELATIONSHIP_TYPE = "type"; + private static final String KEY_RELATIONSHIP_TYPE = "key"; + + @Getter(lazy = true) + // we want to defer initialization of schema (accessing Dgraph server) to the first time accessing + // _schema + private final DgraphSchema _schema = getSchema(); + + public DgraphGraphService( + @Nonnull LineageRegistry lineageRegistry, @Nonnull DgraphClient client) { + _lineageRegistry = lineageRegistry; + this._dgraph = new DgraphExecutor(client, MAX_ATTEMPTS); + } + + protected @Nonnull DgraphSchema getSchema() { + Response response = + _dgraph.executeFunction( + dgraphClient -> + dgraphClient + .newReadOnlyTransaction() + .doRequest(Request.newBuilder().setQuery("schema { predicate }").build())); + DgraphSchema schema = getSchema(response.getJson().toStringUtf8()).withDgraph(_dgraph); + + if (schema.isEmpty()) { + Operation setSchema = + Operation.newBuilder() + .setSchema( + "" + + "<urn>: string @index(hash) @upsert .\n" + + "<type>: string @index(hash) .\n" + + "<key>: string @index(hash) .\n") + .build(); + _dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); } - protected static @Nonnull DgraphSchema getSchema(@Nonnull String json) { - Map<String, Object> data = getDataFromResponseJson(json); - - Object schemaObj = data.get("schema"); - if (!(schemaObj instanceof List<?>)) { - log.info("The result from Dgraph did not contain a 'schema' field, or that field is not a List"); - return DgraphSchema.empty(); - } - - List<?> schemaList = (List<?>) schemaObj; - Set<String> fieldNames = schemaList.stream().flatMap(fieldObj -> { - if (!(fieldObj instanceof Map)) { - return Stream.empty(); - } - - Map<?, ?> fieldMap = (Map<?, ?>) fieldObj; - if (!(fieldMap.containsKey("predicate") && fieldMap.get("predicate") instanceof String)) { - return Stream.empty(); - } - - String fieldName = (String) fieldMap.get("predicate"); - return Stream.of(fieldName); - }).filter(f -> !f.startsWith("dgraph.")).collect(Collectors.toSet()); + return schema; + } - Object typesObj = data.get("types"); - if (!(typesObj instanceof List<?>)) { - log.info("The result from Dgraph did not contain a 'types' field, or that field is not a List"); - return DgraphSchema.empty(); - } + protected static @Nonnull DgraphSchema getSchema(@Nonnull String json) { + Map<String, Object> data = getDataFromResponseJson(json); - List<?> types = (List<?>) typesObj; - Map<String, Set<String>> typeFields = types.stream().flatMap(typeObj -> { - if (!(typeObj instanceof Map)) { - return Stream.empty(); - } + Object schemaObj = data.get("schema"); + if (!(schemaObj instanceof List<?>)) { + log.info( + "The result from Dgraph did not contain a 'schema' field, or that field is not a List"); + return DgraphSchema.empty(); + } - Map<?, ?> typeMap = (Map<?, ?>) typeObj; - if (!(typeMap.containsKey("fields") - && typeMap.containsKey("name") - && typeMap.get("fields") instanceof List<?> - && typeMap.get("name") instanceof String)) { - return Stream.empty(); - } + List<?> schemaList = (List<?>) schemaObj; + Set<String> fieldNames = + schemaList.stream() + .flatMap( + fieldObj -> { + if (!(fieldObj instanceof Map)) { + return Stream.empty(); + } - String typeName = (String) typeMap.get("name"); - List<?> fieldsList = (List<?>) typeMap.get("fields"); + Map<?, ?> fieldMap = (Map<?, ?>) fieldObj; + if (!(fieldMap.containsKey("predicate") + && fieldMap.get("predicate") instanceof String)) { + return Stream.empty(); + } + + String fieldName = (String) fieldMap.get("predicate"); + return Stream.of(fieldName); + }) + .filter(f -> !f.startsWith("dgraph.")) + .collect(Collectors.toSet()); + + Object typesObj = data.get("types"); + if (!(typesObj instanceof List<?>)) { + log.info( + "The result from Dgraph did not contain a 'types' field, or that field is not a List"); + return DgraphSchema.empty(); + } - Set<String> fields = fieldsList.stream().flatMap(fieldObj -> { - if (!(fieldObj instanceof Map<?, ?>)) { + List<?> types = (List<?>) typesObj; + Map<String, Set<String>> typeFields = + types.stream() + .flatMap( + typeObj -> { + if (!(typeObj instanceof Map)) { return Stream.empty(); - } + } - Map<?, ?> fieldMap = (Map<?, ?>) fieldObj; - if (!(fieldMap.containsKey("name") && fieldMap.get("name") instanceof String)) { + Map<?, ?> typeMap = (Map<?, ?>) typeObj; + if (!(typeMap.containsKey("fields") + && typeMap.containsKey("name") + && typeMap.get("fields") instanceof List<?> + && typeMap.get("name") instanceof String)) { return Stream.empty(); - } + } + + String typeName = (String) typeMap.get("name"); + List<?> fieldsList = (List<?>) typeMap.get("fields"); + + Set<String> fields = + fieldsList.stream() + .flatMap( + fieldObj -> { + if (!(fieldObj instanceof Map<?, ?>)) { + return Stream.empty(); + } + + Map<?, ?> fieldMap = (Map<?, ?>) fieldObj; + if (!(fieldMap.containsKey("name") + && fieldMap.get("name") instanceof String)) { + return Stream.empty(); + } + + String fieldName = (String) fieldMap.get("name"); + return Stream.of(fieldName); + }) + .filter(f -> !f.startsWith("dgraph.")) + .collect(Collectors.toSet()); + return Stream.of(Pair.of(typeName, fields)); + }) + .filter(t -> !t.getKey().startsWith("dgraph.")) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + + return new DgraphSchema(fieldNames, typeFields); + } + + @Override + public LineageRegistry getLineageRegistry() { + return _lineageRegistry; + } + + @Override + public void addEdge(Edge edge) { + log.debug( + String.format( + "Adding Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); + + // add the relationship type to the schema + // TODO: translate edge name to allowed dgraph uris + String sourceEntityType = getDgraphType(edge.getSource()); + String relationshipType = edge.getRelationshipType(); + get_schema() + .ensureField( + sourceEntityType, + relationshipType, + URN_RELATIONSHIP_TYPE, + TYPE_RELATIONSHIP_TYPE, + KEY_RELATIONSHIP_TYPE); + + // lookup the source and destination nodes + // TODO: add escape for string values + String query = + String.format( + "query {\n" + + " src as var(func: eq(urn, \"%s\"))\n" + + " dst as var(func: eq(urn, \"%s\"))\n" + + "}", + edge.getSource(), edge.getDestination()); + String srcVar = "uid(src)"; + String dstVar = "uid(dst)"; + + // edge case: source and destination are same node + if (edge.getSource().equals(edge.getDestination())) { + query = + String.format( + "query {\n" + " node as var(func: eq(urn, \"%s\"))\n" + "}", edge.getSource()); + srcVar = "uid(node)"; + dstVar = "uid(node)"; + } + + // create source and destination nodes if they do not exist + // and create the new edge between them + // TODO: add escape for string values + // TODO: translate edge name to allowed dgraph uris + StringJoiner mutations = new StringJoiner("\n"); + mutations.add( + String.format("%s <dgraph.type> \"%s\" .", srcVar, getDgraphType(edge.getSource()))); + mutations.add(String.format("%s <urn> \"%s\" .", srcVar, edge.getSource())); + mutations.add(String.format("%s <type> \"%s\" .", srcVar, edge.getSource().getEntityType())); + mutations.add(String.format("%s <key> \"%s\" .", srcVar, edge.getSource().getEntityKey())); + if (!edge.getSource().equals(edge.getDestination())) { + mutations.add( + String.format("%s <dgraph.type> \"%s\" .", dstVar, getDgraphType(edge.getDestination()))); + mutations.add(String.format("%s <urn> \"%s\" .", dstVar, edge.getDestination())); + mutations.add( + String.format("%s <type> \"%s\" .", dstVar, edge.getDestination().getEntityType())); + mutations.add( + String.format("%s <key> \"%s\" .", dstVar, edge.getDestination().getEntityKey())); + } + mutations.add(String.format("%s <%s> %s .", srcVar, edge.getRelationshipType(), dstVar)); + + log.debug("Query: " + query); + log.debug("Mutations: " + mutations); + + // construct the upsert + Mutation mutation = + Mutation.newBuilder().setSetNquads(ByteString.copyFromUtf8(mutations.toString())).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + // run the request + _dgraph.executeFunction(client -> client.newTransaction().doRequest(request)); + } + + private static @Nonnull String getDgraphType(@Nonnull Urn urn) { + return urn.getNamespace() + ":" + urn.getEntityType(); + } + + // Returns reversed and directed relationship types: + // <rel> returns <~rel> on outgoing and <rel> on incoming and both on undirected + private static List<String> getDirectedRelationshipTypes( + List<String> relationships, RelationshipDirection direction) { + + if (direction == RelationshipDirection.OUTGOING + || direction == RelationshipDirection.UNDIRECTED) { + List<String> outgoingRelationships = + relationships.stream().map(type -> "~" + type).collect(Collectors.toList()); + + if (direction == RelationshipDirection.OUTGOING) { + return outgoingRelationships; + } else { + relationships = new ArrayList<>(relationships); + relationships.addAll(outgoingRelationships); + } + } - String fieldName = (String) fieldMap.get("name"); - return Stream.of(fieldName); - }).filter(f -> !f.startsWith("dgraph.")).collect(Collectors.toSet()); - return Stream.of(Pair.of(typeName, fields)); - }).filter(t -> !t.getKey().startsWith("dgraph.")).collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + // we need to remove duplicates in order to not cause invalid queries in dgraph + return new ArrayList<>(new LinkedHashSet(relationships)); + } + + protected static String getQueryForRelatedEntities( + @Nullable List<String> sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List<String> destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List<String> relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + int offset, + int count) { + if (relationshipTypes.isEmpty()) { + // we would have to construct a query that never returns any results + // just do not call this method in the first place + throw new IllegalArgumentException("The relationship types must not be empty"); + } - return new DgraphSchema(fieldNames, typeFields); + if (sourceEntityFilter.hasCriteria() || destinationEntityFilter.hasCriteria()) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support criteria in source or destination entity filter"); } - @Override - public LineageRegistry getLineageRegistry() { - return _lineageRegistry; + //noinspection ConstantConditions + if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() > 1 + || destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() > 1) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support multiple OR criteria in source or destination entity filter"); } - @Override - public void addEdge(Edge edge) { - log.debug(String.format("Adding Edge source: %s, destination: %s, type: %s", - edge.getSource(), - edge.getDestination(), - edge.getRelationshipType())); - - // add the relationship type to the schema - // TODO: translate edge name to allowed dgraph uris - String sourceEntityType = getDgraphType(edge.getSource()); - String relationshipType = edge.getRelationshipType(); - get_schema().ensureField(sourceEntityType, relationshipType, URN_RELATIONSHIP_TYPE, TYPE_RELATIONSHIP_TYPE, KEY_RELATIONSHIP_TYPE); - - // lookup the source and destination nodes - // TODO: add escape for string values - String query = String.format("query {\n" - + " src as var(func: eq(urn, \"%s\"))\n" - + " dst as var(func: eq(urn, \"%s\"))\n" - + "}", edge.getSource(), edge.getDestination()); - String srcVar = "uid(src)"; - String dstVar = "uid(dst)"; - - // edge case: source and destination are same node - if (edge.getSource().equals(edge.getDestination())) { - query = String.format("query {\n" - + " node as var(func: eq(urn, \"%s\"))\n" - + "}", edge.getSource()); - srcVar = "uid(node)"; - dstVar = "uid(node)"; - } - - // create source and destination nodes if they do not exist - // and create the new edge between them - // TODO: add escape for string values - // TODO: translate edge name to allowed dgraph uris - StringJoiner mutations = new StringJoiner("\n"); - mutations.add(String.format("%s <dgraph.type> \"%s\" .", srcVar, getDgraphType(edge.getSource()))); - mutations.add(String.format("%s <urn> \"%s\" .", srcVar, edge.getSource())); - mutations.add(String.format("%s <type> \"%s\" .", srcVar, edge.getSource().getEntityType())); - mutations.add(String.format("%s <key> \"%s\" .", srcVar, edge.getSource().getEntityKey())); - if (!edge.getSource().equals(edge.getDestination())) { - mutations.add(String.format("%s <dgraph.type> \"%s\" .", dstVar, getDgraphType(edge.getDestination()))); - mutations.add(String.format("%s <urn> \"%s\" .", dstVar, edge.getDestination())); - mutations.add(String.format("%s <type> \"%s\" .", dstVar, edge.getDestination().getEntityType())); - mutations.add(String.format("%s <key> \"%s\" .", dstVar, edge.getDestination().getEntityKey())); - } - mutations.add(String.format("%s <%s> %s .", srcVar, edge.getRelationshipType(), dstVar)); - - log.debug("Query: " + query); - log.debug("Mutations: " + mutations); - - // construct the upsert - Mutation mutation = Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(mutations.toString())) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - // run the request - _dgraph.executeFunction(client -> client.newTransaction().doRequest(request)); + //noinspection ConstantConditions + if (relationshipFilter.hasCriteria() + || relationshipFilter.hasOr() && relationshipFilter.getOr().size() > 0) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support any criteria for the relationship filter"); } - private static @Nonnull String getDgraphType(@Nonnull Urn urn) { - return urn.getNamespace() + ":" + urn.getEntityType(); + // We are not querying for <src> <relationship> <dest> and return <dest> + // but we reverse the relationship and query for <dest> <~relationship> <src> + // this guarantees there are no duplicates among the returned <dest>s + final List<String> directedRelationshipTypes = + getDirectedRelationshipTypes(relationshipTypes, relationshipFilter.getDirection()); + + List<String> filters = new ArrayList<>(); + + Set<String> destinationNodeFilterNames = new HashSet<>(); + String sourceTypeFilterName = null; + String destinationTypeFilterName = null; + List<String> sourceFilterNames = new ArrayList<>(); + List<String> destinationFilterNames = new ArrayList<>(); + List<String> relationshipTypeFilterNames = new ArrayList<>(); + + if (sourceTypes != null && sourceTypes.size() > 0) { + sourceTypeFilterName = "sourceType"; + // TODO: escape string value + final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); + sourceTypes.forEach(type -> joiner.add(type)); + filters.add( + String.format( + "%s as var(func: eq(<type>, %s))", sourceTypeFilterName, joiner.toString())); } - // Returns reversed and directed relationship types: - // <rel> returns <~rel> on outgoing and <rel> on incoming and both on undirected - private static List<String> getDirectedRelationshipTypes(List<String> relationships, - RelationshipDirection direction) { - - if (direction == RelationshipDirection.OUTGOING || direction == RelationshipDirection.UNDIRECTED) { - List<String> outgoingRelationships = relationships.stream() - .map(type -> "~" + type).collect(Collectors.toList()); - - if (direction == RelationshipDirection.OUTGOING) { - return outgoingRelationships; - } else { - relationships = new ArrayList<>(relationships); - relationships.addAll(outgoingRelationships); - } - } - - // we need to remove duplicates in order to not cause invalid queries in dgraph - return new ArrayList<>(new LinkedHashSet(relationships)); + if (destinationTypes != null && destinationTypes.size() > 0) { + destinationTypeFilterName = "destinationType"; + final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); + destinationTypes.forEach(type -> joiner.add(type)); + // TODO: escape string value + filters.add( + String.format( + "%s as var(func: eq(<type>, %s))", destinationTypeFilterName, joiner.toString())); } - protected static String getQueryForRelatedEntities(@Nullable List<String> sourceTypes, - @Nonnull Filter sourceEntityFilter, - @Nullable List<String> destinationTypes, - @Nonnull Filter destinationEntityFilter, - @Nonnull List<String> relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter, - int offset, - int count) { - if (relationshipTypes.isEmpty()) { - // we would have to construct a query that never returns any results - // just do not call this method in the first place - throw new IllegalArgumentException("The relationship types must not be empty"); - } - - - if (sourceEntityFilter.hasCriteria() || destinationEntityFilter.hasCriteria()) { - throw new IllegalArgumentException("The DgraphGraphService does not support criteria in source or destination entity filter"); - } - - //noinspection ConstantConditions - if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() > 1 - || destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() > 1) { - throw new IllegalArgumentException("The DgraphGraphService does not support multiple OR criteria in source or destination entity filter"); - } - - //noinspection ConstantConditions - if (relationshipFilter.hasCriteria() || relationshipFilter.hasOr() && relationshipFilter.getOr().size() > 0) { - throw new IllegalArgumentException("The DgraphGraphService does not support any criteria for the relationship filter"); - } - - // We are not querying for <src> <relationship> <dest> and return <dest> - // but we reverse the relationship and query for <dest> <~relationship> <src> - // this guarantees there are no duplicates among the returned <dest>s - final List<String> directedRelationshipTypes = getDirectedRelationshipTypes( - relationshipTypes, relationshipFilter.getDirection() - ); - - List<String> filters = new ArrayList<>(); - - Set<String> destinationNodeFilterNames = new HashSet<>(); - String sourceTypeFilterName = null; - String destinationTypeFilterName = null; - List<String> sourceFilterNames = new ArrayList<>(); - List<String> destinationFilterNames = new ArrayList<>(); - List<String> relationshipTypeFilterNames = new ArrayList<>(); - - if (sourceTypes != null && sourceTypes.size() > 0) { - sourceTypeFilterName = "sourceType"; - // TODO: escape string value - final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); - sourceTypes.forEach(type -> joiner.add(type)); - filters.add(String.format("%s as var(func: eq(<type>, %s))", sourceTypeFilterName, joiner.toString())); - } - - if (destinationTypes != null && destinationTypes.size() > 0) { - destinationTypeFilterName = "destinationType"; - final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); - destinationTypes.forEach(type -> joiner.add(type)); - // TODO: escape string value - filters.add(String.format("%s as var(func: eq(<type>, %s))", destinationTypeFilterName, joiner.toString())); - } - - //noinspection ConstantConditions - if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() == 1) { - CriterionArray sourceCriteria = sourceEntityFilter.getOr().get(0).getAnd(); - IntStream.range(0, sourceCriteria.size()) - .forEach(idx -> { - String sourceFilterName = "sourceFilter" + (idx + 1); - sourceFilterNames.add(sourceFilterName); - Criterion criterion = sourceCriteria.get(idx); - // TODO: escape field name and string value - filters.add(String.format("%s as var(func: eq(<%s>, \"%s\"))", sourceFilterName, criterion.getField(), criterion.getValue())); - }); - } - - //noinspection ConstantConditions - if (destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() == 1) { - CriterionArray destinationCriteria = destinationEntityFilter.getOr().get(0).getAnd(); - IntStream.range(0, destinationCriteria.size()) - .forEach(idx -> { - String sourceFilterName = "destinationFilter" + (idx + 1); - destinationFilterNames.add(sourceFilterName); - Criterion criterion = destinationCriteria.get(idx); - // TODO: escape field name and string value - filters.add(String.format("%s as var(func: eq(<%s>, \"%s\"))", sourceFilterName, criterion.getField(), criterion.getValue())); - }); - } - - IntStream.range(0, directedRelationshipTypes.size()) - .forEach(idx -> { - String relationshipTypeFilterName = "relationshipType" + (idx + 1); - relationshipTypeFilterNames.add(relationshipTypeFilterName); - // TODO: escape string value - filters.add(String.format("%s as var(func: has(<%s>))", relationshipTypeFilterName, directedRelationshipTypes.get(idx))); - }); - - // the destination node filter is the first filter that is being applied on the destination node - // we can add multiple filters, they will combine as OR - if (destinationTypeFilterName != null) { - destinationNodeFilterNames.add(destinationTypeFilterName); - } - destinationNodeFilterNames.addAll(destinationFilterNames); - destinationNodeFilterNames.addAll(relationshipTypeFilterNames); - - StringJoiner destinationNodeFilterJoiner = new StringJoiner(", "); - destinationNodeFilterNames.stream().sorted().forEach(destinationNodeFilterJoiner::add); - String destinationNodeFilter = destinationNodeFilterJoiner.toString(); - - String filterConditions = getFilterConditions( - sourceTypeFilterName, destinationTypeFilterName, - sourceFilterNames, destinationFilterNames, - relationshipTypeFilterNames, directedRelationshipTypes - ); - - StringJoiner relationshipsJoiner = new StringJoiner("\n "); - getRelationships(sourceTypeFilterName, sourceFilterNames, directedRelationshipTypes) - .forEach(relationshipsJoiner::add); - String relationships = relationshipsJoiner.toString(); - - StringJoiner filterJoiner = new StringJoiner("\n "); - filters.forEach(filterJoiner::add); - String filterExpressions = filterJoiner.toString(); - - return String.format("query {\n" - + " %s\n" - + "\n" - + " result (func: uid(%s), first: %d, offset: %d) %s {\n" - + " <urn>\n" - + " %s\n" - + " }\n" - + "}", - filterExpressions, - destinationNodeFilter, - count, offset, - filterConditions, - relationships); + //noinspection ConstantConditions + if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() == 1) { + CriterionArray sourceCriteria = sourceEntityFilter.getOr().get(0).getAnd(); + IntStream.range(0, sourceCriteria.size()) + .forEach( + idx -> { + String sourceFilterName = "sourceFilter" + (idx + 1); + sourceFilterNames.add(sourceFilterName); + Criterion criterion = sourceCriteria.get(idx); + // TODO: escape field name and string value + filters.add( + String.format( + "%s as var(func: eq(<%s>, \"%s\"))", + sourceFilterName, criterion.getField(), criterion.getValue())); + }); } - @Override - public void upsertEdge(final Edge edge) { - throw new UnsupportedOperationException("Upsert edge not supported by Neo4JGraphService at this time."); + //noinspection ConstantConditions + if (destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() == 1) { + CriterionArray destinationCriteria = destinationEntityFilter.getOr().get(0).getAnd(); + IntStream.range(0, destinationCriteria.size()) + .forEach( + idx -> { + String sourceFilterName = "destinationFilter" + (idx + 1); + destinationFilterNames.add(sourceFilterName); + Criterion criterion = destinationCriteria.get(idx); + // TODO: escape field name and string value + filters.add( + String.format( + "%s as var(func: eq(<%s>, \"%s\"))", + sourceFilterName, criterion.getField(), criterion.getValue())); + }); } - @Override - public void removeEdge(final Edge edge) { - throw new UnsupportedOperationException("Remove edge not supported by DgraphGraphService at this time."); + IntStream.range(0, directedRelationshipTypes.size()) + .forEach( + idx -> { + String relationshipTypeFilterName = "relationshipType" + (idx + 1); + relationshipTypeFilterNames.add(relationshipTypeFilterName); + // TODO: escape string value + filters.add( + String.format( + "%s as var(func: has(<%s>))", + relationshipTypeFilterName, directedRelationshipTypes.get(idx))); + }); + + // the destination node filter is the first filter that is being applied on the destination node + // we can add multiple filters, they will combine as OR + if (destinationTypeFilterName != null) { + destinationNodeFilterNames.add(destinationTypeFilterName); + } + destinationNodeFilterNames.addAll(destinationFilterNames); + destinationNodeFilterNames.addAll(relationshipTypeFilterNames); + + StringJoiner destinationNodeFilterJoiner = new StringJoiner(", "); + destinationNodeFilterNames.stream().sorted().forEach(destinationNodeFilterJoiner::add); + String destinationNodeFilter = destinationNodeFilterJoiner.toString(); + + String filterConditions = + getFilterConditions( + sourceTypeFilterName, destinationTypeFilterName, + sourceFilterNames, destinationFilterNames, + relationshipTypeFilterNames, directedRelationshipTypes); + + StringJoiner relationshipsJoiner = new StringJoiner("\n "); + getRelationships(sourceTypeFilterName, sourceFilterNames, directedRelationshipTypes) + .forEach(relationshipsJoiner::add); + String relationships = relationshipsJoiner.toString(); + + StringJoiner filterJoiner = new StringJoiner("\n "); + filters.forEach(filterJoiner::add); + String filterExpressions = filterJoiner.toString(); + + return String.format( + "query {\n" + + " %s\n" + + "\n" + + " result (func: uid(%s), first: %d, offset: %d) %s {\n" + + " <urn>\n" + + " %s\n" + + " }\n" + + "}", + filterExpressions, destinationNodeFilter, count, offset, filterConditions, relationships); + } + + @Override + public void upsertEdge(final Edge edge) { + throw new UnsupportedOperationException( + "Upsert edge not supported by Neo4JGraphService at this time."); + } + + @Override + public void removeEdge(final Edge edge) { + throw new UnsupportedOperationException( + "Remove edge not supported by DgraphGraphService at this time."); + } + + @Nonnull + @Override + public RelatedEntitiesResult findRelatedEntities( + @Nullable List<String> sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List<String> destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List<String> relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + int offset, + int count) { + + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { + return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); + } + if (relationshipTypes.isEmpty() + || relationshipTypes.stream() + .noneMatch(relationship -> get_schema().hasField(relationship))) { + return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } - @Nonnull - @Override - public RelatedEntitiesResult findRelatedEntities(@Nullable List<String> sourceTypes, - @Nonnull Filter sourceEntityFilter, - @Nullable List<String> destinationTypes, - @Nonnull Filter destinationEntityFilter, - @Nonnull List<String> relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter, - int offset, - int count) { - - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { - return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); - } - if (relationshipTypes.isEmpty() || relationshipTypes.stream().noneMatch(relationship -> get_schema().hasField(relationship))) { - return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); - } - - String query = getQueryForRelatedEntities( - sourceTypes, sourceEntityFilter, - destinationTypes, destinationEntityFilter, - relationshipTypes.stream().filter(get_schema()::hasField).collect(Collectors.toList()), - relationshipFilter, - offset, count - ); - - Request request = Request.newBuilder() - .setQuery(query) - .build(); - - log.debug("Query: " + query); - Response response = _dgraph.executeFunction(client -> client.newReadOnlyTransaction().doRequest(request)); - String json = response.getJson().toStringUtf8(); - Map<String, Object> data = getDataFromResponseJson(json); - - List<RelatedEntity> entities = getRelatedEntitiesFromResponseData(data); - int total = offset + entities.size(); - if (entities.size() == count) { - // indicate that there might be more results - total++; - } - return new RelatedEntitiesResult(offset, entities.size(), total, entities); + String query = + getQueryForRelatedEntities( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes.stream().filter(get_schema()::hasField).collect(Collectors.toList()), + relationshipFilter, + offset, + count); + + Request request = Request.newBuilder().setQuery(query).build(); + + log.debug("Query: " + query); + Response response = + _dgraph.executeFunction(client -> client.newReadOnlyTransaction().doRequest(request)); + String json = response.getJson().toStringUtf8(); + Map<String, Object> data = getDataFromResponseJson(json); + + List<RelatedEntity> entities = getRelatedEntitiesFromResponseData(data); + int total = offset + entities.size(); + if (entities.size() == count) { + // indicate that there might be more results + total++; + } + return new RelatedEntitiesResult(offset, entities.size(), total, entities); + } + + // Creates filter conditions from destination to source nodes + protected static @Nonnull String getFilterConditions( + @Nullable String sourceTypeFilterName, + @Nullable String destinationTypeFilterName, + @Nonnull List<String> sourceFilterNames, + @Nonnull List<String> destinationFilterNames, + @Nonnull List<String> relationshipTypeFilterNames, + @Nonnull List<String> relationshipTypes) { + if (relationshipTypes.size() != relationshipTypeFilterNames.size()) { + throw new IllegalArgumentException( + "relationshipTypeFilterNames and relationshipTypes " + + "must have same size: " + + relationshipTypeFilterNames + + " vs. " + + relationshipTypes); } - // Creates filter conditions from destination to source nodes - protected static @Nonnull String getFilterConditions(@Nullable String sourceTypeFilterName, - @Nullable String destinationTypeFilterName, - @Nonnull List<String> sourceFilterNames, - @Nonnull List<String> destinationFilterNames, - @Nonnull List<String> relationshipTypeFilterNames, - @Nonnull List<String> relationshipTypes) { - if (relationshipTypes.size() != relationshipTypeFilterNames.size()) { - throw new IllegalArgumentException("relationshipTypeFilterNames and relationshipTypes " - + "must have same size: " + relationshipTypeFilterNames + " vs. " + relationshipTypes); - } - - if (sourceTypeFilterName == null - && destinationTypeFilterName == null - && sourceFilterNames.isEmpty() - && destinationFilterNames.isEmpty() - && relationshipTypeFilterNames.isEmpty()) { - return ""; - } - - StringJoiner andJoiner = new StringJoiner(" AND\n "); - if (destinationTypeFilterName != null) { - andJoiner.add(String.format("uid(%s)", destinationTypeFilterName)); - } - - destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid(%s)", filter))); - - if (!relationshipTypes.isEmpty()) { - StringJoiner orJoiner = new StringJoiner(" OR\n "); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> orJoiner.add(getRelationshipCondition( - relationshipTypes.get(idx), relationshipTypeFilterNames.get(idx), - sourceTypeFilterName, sourceFilterNames - ))); - String relationshipCondition = orJoiner.toString(); - andJoiner.add(String.format("(\n %s\n )", relationshipCondition)); - } - - String conditions = andJoiner.toString(); - return String.format("@filter(\n %s\n )", conditions); + if (sourceTypeFilterName == null + && destinationTypeFilterName == null + && sourceFilterNames.isEmpty() + && destinationFilterNames.isEmpty() + && relationshipTypeFilterNames.isEmpty()) { + return ""; } - protected static String getRelationshipCondition(@Nonnull String relationshipType, - @Nonnull String relationshipTypeFilterName, - @Nullable String objectFilterName, - @Nonnull List<String> destinationFilterNames) { - StringJoiner andJoiner = new StringJoiner(" AND "); - andJoiner.add(String.format("uid(%s)", relationshipTypeFilterName)); - if (objectFilterName != null) { - andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, objectFilterName)); - } - destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, filter))); - return andJoiner.toString(); + StringJoiner andJoiner = new StringJoiner(" AND\n "); + if (destinationTypeFilterName != null) { + andJoiner.add(String.format("uid(%s)", destinationTypeFilterName)); } + destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid(%s)", filter))); + + if (!relationshipTypes.isEmpty()) { + StringJoiner orJoiner = new StringJoiner(" OR\n "); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + orJoiner.add( + getRelationshipCondition( + relationshipTypes.get(idx), + relationshipTypeFilterNames.get(idx), + sourceTypeFilterName, + sourceFilterNames))); + String relationshipCondition = orJoiner.toString(); + andJoiner.add(String.format("(\n %s\n )", relationshipCondition)); + } - // Creates filter conditions from destination to source nodes - protected static @Nonnull List<String> getRelationships(@Nullable String sourceTypeFilterName, - @Nonnull List<String> sourceFilterNames, - @Nonnull List<String> relationshipTypes) { - return relationshipTypes.stream().map(relationshipType -> { - StringJoiner andJoiner = new StringJoiner(" AND "); - if (sourceTypeFilterName != null) { + String conditions = andJoiner.toString(); + return String.format("@filter(\n %s\n )", conditions); + } + + protected static String getRelationshipCondition( + @Nonnull String relationshipType, + @Nonnull String relationshipTypeFilterName, + @Nullable String objectFilterName, + @Nonnull List<String> destinationFilterNames) { + StringJoiner andJoiner = new StringJoiner(" AND "); + andJoiner.add(String.format("uid(%s)", relationshipTypeFilterName)); + if (objectFilterName != null) { + andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, objectFilterName)); + } + destinationFilterNames.forEach( + filter -> andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, filter))); + return andJoiner.toString(); + } + + // Creates filter conditions from destination to source nodes + protected static @Nonnull List<String> getRelationships( + @Nullable String sourceTypeFilterName, + @Nonnull List<String> sourceFilterNames, + @Nonnull List<String> relationshipTypes) { + return relationshipTypes.stream() + .map( + relationshipType -> { + StringJoiner andJoiner = new StringJoiner(" AND "); + if (sourceTypeFilterName != null) { andJoiner.add(String.format("uid(%s)", sourceTypeFilterName)); - } - sourceFilterNames.forEach(filterName -> andJoiner.add(String.format("uid(%s)", filterName))); + } + sourceFilterNames.forEach( + filterName -> andJoiner.add(String.format("uid(%s)", filterName))); - if (andJoiner.length() > 0) { + if (andJoiner.length() > 0) { return String.format("<%s> @filter( %s ) { <uid> }", relationshipType, andJoiner); - } else { + } else { return String.format("<%s> { <uid> }", relationshipType); - } - }).collect(Collectors.toList()); + } + }) + .collect(Collectors.toList()); + } + + protected static Map<String, Object> getDataFromResponseJson(String json) { + ObjectMapper mapper = new ObjectMapper(); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + TypeReference<HashMap<String, Object>> typeRef = + new TypeReference<HashMap<String, Object>>() {}; + try { + return mapper.readValue(json, typeRef); + } catch (IOException e) { + throw new RuntimeException("Failed to parse response json: " + json.substring(0, 1000), e); } - - protected static Map<String, Object> getDataFromResponseJson(String json) { - ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() { }; - try { - return mapper.readValue(json, typeRef); - } catch (IOException e) { - throw new RuntimeException("Failed to parse response json: " + json.substring(0, 1000), e); - } + } + + protected static List<RelatedEntity> getRelatedEntitiesFromResponseData( + Map<String, Object> data) { + Object obj = data.get("result"); + if (!(obj instanceof List<?>)) { + throw new IllegalArgumentException( + "The result from Dgraph did not contain a 'result' field, or that field is not a List"); } - protected static List<RelatedEntity> getRelatedEntitiesFromResponseData(Map<String, Object> data) { - Object obj = data.get("result"); - if (!(obj instanceof List<?>)) { - throw new IllegalArgumentException( - "The result from Dgraph did not contain a 'result' field, or that field is not a List" - ); - } - - List<?> results = (List<?>) obj; - return results.stream().flatMap(destinationObj -> { - if (!(destinationObj instanceof Map)) { + List<?> results = (List<?>) obj; + return results.stream() + .flatMap( + destinationObj -> { + if (!(destinationObj instanceof Map)) { return Stream.empty(); - } + } - Map<?, ?> destination = (Map<?, ?>) destinationObj; - if (destination.containsKey("urn") && destination.get("urn") instanceof String) { + Map<?, ?> destination = (Map<?, ?>) destinationObj; + if (destination.containsKey("urn") && destination.get("urn") instanceof String) { String urn = (String) destination.get("urn"); return destination.entrySet().stream() - .filter(entry -> !entry.getKey().equals("urn")) - .flatMap(entry -> { - Object relationshipObj = entry.getKey(); - Object sourcesObj = entry.getValue(); - if (!(relationshipObj instanceof String && sourcesObj instanceof List)) { - return Stream.empty(); - } - - String relationship = (String) relationshipObj; - List<?> sources = (List<?>) sourcesObj; - - if (sources.size() == 0) { - return Stream.empty(); - } - - if (relationship.startsWith("~")) { - relationship = relationship.substring(1); - } - - return Stream.of(relationship); + .filter(entry -> !entry.getKey().equals("urn")) + .flatMap( + entry -> { + Object relationshipObj = entry.getKey(); + Object sourcesObj = entry.getValue(); + if (!(relationshipObj instanceof String && sourcesObj instanceof List)) { + return Stream.empty(); + } + + String relationship = (String) relationshipObj; + List<?> sources = (List<?>) sourcesObj; + + if (sources.size() == 0) { + return Stream.empty(); + } + + if (relationship.startsWith("~")) { + relationship = relationship.substring(1); + } + + return Stream.of(relationship); }) - // for undirected we get duplicate relationships - .distinct() - .map(relationship -> new RelatedEntity(relationship, urn)); - } - - return Stream.empty(); - }).collect(Collectors.toList()); - } - - @Override - public void removeNode(@Nonnull Urn urn) { - String query = String.format("query {\n" - + " node as var(func: eq(urn, \"%s\"))\n" - + "}", urn); - String deletion = "uid(node) * * ."; - - log.debug("Query: " + query); - log.debug("Delete: " + deletion); - - Mutation mutation = Mutation.newBuilder() - .setDelNquads(ByteString.copyFromUtf8(deletion)) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + // for undirected we get duplicate relationships + .distinct() + .map(relationship -> new RelatedEntity(relationship, urn)); + } + + return Stream.empty(); + }) + .collect(Collectors.toList()); + } + + @Override + public void removeNode(@Nonnull Urn urn) { + String query = String.format("query {\n" + " node as var(func: eq(urn, \"%s\"))\n" + "}", urn); + String deletion = "uid(node) * * ."; + + log.debug("Query: " + query); + log.debug("Delete: " + deletion); + + Mutation mutation = + Mutation.newBuilder().setDelNquads(ByteString.copyFromUtf8(deletion)).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } + + @Override + public void removeEdgesFromNode( + @Nonnull Urn urn, + @Nonnull List<String> relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter) { + if (relationshipTypes.isEmpty()) { + return; } - @Override - public void removeEdgesFromNode(@Nonnull Urn urn, - @Nonnull List<String> relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter) { - if (relationshipTypes.isEmpty()) { - return; - } - - RelationshipDirection direction = relationshipFilter.getDirection(); + RelationshipDirection direction = relationshipFilter.getDirection(); - if (direction == RelationshipDirection.OUTGOING || direction == RelationshipDirection.UNDIRECTED) { - removeOutgoingEdgesFromNode(urn, relationshipTypes); - } - - if (direction == RelationshipDirection.INCOMING || direction == RelationshipDirection.UNDIRECTED) { - removeIncomingEdgesFromNode(urn, relationshipTypes); - } + if (direction == RelationshipDirection.OUTGOING + || direction == RelationshipDirection.UNDIRECTED) { + removeOutgoingEdgesFromNode(urn, relationshipTypes); } - private void removeOutgoingEdgesFromNode(@Nonnull Urn urn, - @Nonnull List<String> relationshipTypes) { - // TODO: add escape for string values - String query = String.format("query {\n" - + " node as var(func: eq(<urn>, \"%s\"))\n" - + "}", urn); - - Value star = Value.newBuilder().setDefaultVal("_STAR_ALL").build(); - List<NQuad> deletions = relationshipTypes.stream().map(relationshipType -> - NQuad.newBuilder() + if (direction == RelationshipDirection.INCOMING + || direction == RelationshipDirection.UNDIRECTED) { + removeIncomingEdgesFromNode(urn, relationshipTypes); + } + } + + private void removeOutgoingEdgesFromNode( + @Nonnull Urn urn, @Nonnull List<String> relationshipTypes) { + // TODO: add escape for string values + String query = + String.format("query {\n" + " node as var(func: eq(<urn>, \"%s\"))\n" + "}", urn); + + Value star = Value.newBuilder().setDefaultVal("_STAR_ALL").build(); + List<NQuad> deletions = + relationshipTypes.stream() + .map( + relationshipType -> + NQuad.newBuilder() .setSubject("uid(node)") .setPredicate(relationshipType) .setObjectValue(star) - .build() - ).collect(Collectors.toList()); - - log.debug("Query: " + query); - log.debug("Deletions: " + deletions); - - Mutation mutation = Mutation.newBuilder() - .addAllDel(deletions) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); - } - - private void removeIncomingEdgesFromNode(@Nonnull Urn urn, - @Nonnull List<String> relationshipTypes) { - // TODO: add escape for string values - StringJoiner reverseEdges = new StringJoiner("\n "); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> - reverseEdges.add("<~" + relationshipTypes.get(idx) + "> { uids" + (idx + 1) + " as uid }") - ); - String query = String.format("query {\n" + .build()) + .collect(Collectors.toList()); + + log.debug("Query: " + query); + log.debug("Deletions: " + deletions); + + Mutation mutation = Mutation.newBuilder().addAllDel(deletions).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } + + private void removeIncomingEdgesFromNode( + @Nonnull Urn urn, @Nonnull List<String> relationshipTypes) { + // TODO: add escape for string values + StringJoiner reverseEdges = new StringJoiner("\n "); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + reverseEdges.add( + "<~" + relationshipTypes.get(idx) + "> { uids" + (idx + 1) + " as uid }")); + String query = + String.format( + "query {\n" + " node as var(func: eq(<urn>, \"%s\"))\n" + "\n" + " var(func: uid(node)) @normalize {\n" + " %s\n" + " }\n" - + "}", urn, reverseEdges); - - StringJoiner deletions = new StringJoiner("\n"); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> - deletions.add("uid(uids" + (idx + 1) + ") <" + relationshipTypes.get(idx) + "> uid(node) .") - ); - - log.debug("Query: " + query); - log.debug("Deletions: " + deletions); - - Mutation mutation = Mutation.newBuilder() - .setDelNquads(ByteString.copyFromUtf8(deletions.toString())) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); - } + + "}", + urn, reverseEdges); - @Override - public void configure() { } + StringJoiner deletions = new StringJoiner("\n"); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + deletions.add( + "uid(uids" + (idx + 1) + ") <" + relationshipTypes.get(idx) + "> uid(node) .")); - @Override - public void clear() { - log.debug("dropping Dgraph data"); + log.debug("Query: " + query); + log.debug("Deletions: " + deletions); - Operation dropAll = Operation.newBuilder().setDropOp(Operation.DropOp.ALL).build(); - _dgraph.executeConsumer(client -> client.alter(dropAll)); + Mutation mutation = + Mutation.newBuilder().setDelNquads(ByteString.copyFromUtf8(deletions.toString())).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); - // drop schema cache - get_schema().clear(); + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } - // setup urn, type and key relationships - getSchema(); - } + @Override + public void configure() {} + + @Override + public void clear() { + log.debug("dropping Dgraph data"); + + Operation dropAll = Operation.newBuilder().setDropOp(Operation.DropOp.ALL).build(); + _dgraph.executeConsumer(client -> client.alter(dropAll)); + + // drop schema cache + get_schema().clear(); + + // setup urn, type and key relationships + getSchema(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java index fc1c64ea3cc03..8c4b37716e798 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java @@ -1,9 +1,6 @@ package com.linkedin.metadata.graph.dgraph; import io.dgraph.DgraphProto; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -11,118 +8,125 @@ import java.util.Set; import java.util.StringJoiner; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; -/** - * Provides a thread-safe Dgraph schema. Returned data structures are immutable. - */ +/** Provides a thread-safe Dgraph schema. Returned data structures are immutable. */ @Slf4j public class DgraphSchema { - private final @Nonnull Set<String> fields; - private final @Nonnull Map<String, Set<String>> types; - private final DgraphExecutor dgraph; - - public static DgraphSchema empty() { - return new DgraphSchema(Collections.emptySet(), Collections.emptyMap(), null); - } - - public DgraphSchema(@Nonnull Set<String> fields, @Nonnull Map<String, Set<String>> types) { - this(fields, types, null); - } - - public DgraphSchema(@Nonnull Set<String> fields, @Nonnull Map<String, Set<String>> types, DgraphExecutor dgraph) { - this.fields = fields; - this.types = types; - this.dgraph = dgraph; - } - - /** - * Adds the given DgraphExecutor to this schema returning a new instance. - * Be aware this and the new instance share the underlying fields and types datastructures. - * - * @param dgraph dgraph executor to add - * @return new instance - */ - public DgraphSchema withDgraph(DgraphExecutor dgraph) { - return new DgraphSchema(this.fields, this.types, dgraph); + private final @Nonnull Set<String> fields; + private final @Nonnull Map<String, Set<String>> types; + private final DgraphExecutor dgraph; + + public static DgraphSchema empty() { + return new DgraphSchema(Collections.emptySet(), Collections.emptyMap(), null); + } + + public DgraphSchema(@Nonnull Set<String> fields, @Nonnull Map<String, Set<String>> types) { + this(fields, types, null); + } + + public DgraphSchema( + @Nonnull Set<String> fields, @Nonnull Map<String, Set<String>> types, DgraphExecutor dgraph) { + this.fields = fields; + this.types = types; + this.dgraph = dgraph; + } + + /** + * Adds the given DgraphExecutor to this schema returning a new instance. Be aware this and the + * new instance share the underlying fields and types datastructures. + * + * @param dgraph dgraph executor to add + * @return new instance + */ + public DgraphSchema withDgraph(DgraphExecutor dgraph) { + return new DgraphSchema(this.fields, this.types, dgraph); + } + + public synchronized boolean isEmpty() { + return fields.isEmpty(); + } + + public synchronized Set<String> getFields() { + // Provide an unmodifiable copy + return Collections.unmodifiableSet(new HashSet<>(fields)); + } + + public synchronized Set<String> getFields(String typeName) { + // Provide an unmodifiable copy + return Collections.unmodifiableSet( + new HashSet<>(types.getOrDefault(typeName, Collections.emptySet()))); + } + + public synchronized Map<String, Set<String>> getTypes() { + // Provide an unmodifiable copy of the map and contained sets + return Collections.unmodifiableMap( + new HashSet<>(types.entrySet()) + .stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> Collections.unmodifiableSet(new HashSet<>(e.getValue()))))); + } + + public synchronized boolean hasType(String typeName) { + return types.containsKey(typeName); + } + + public synchronized boolean hasField(String fieldName) { + return fields.contains(fieldName); + } + + public synchronized boolean hasField(String typeName, String fieldName) { + return types.getOrDefault(typeName, Collections.emptySet()).contains(fieldName); + } + + public synchronized void ensureField( + String typeName, String fieldName, String... existingFieldNames) { + // quickly check if the field is known for this type + if (hasField(typeName, fieldName)) { + return; } - synchronized public boolean isEmpty() { - return fields.isEmpty(); - } - - synchronized public Set<String> getFields() { - // Provide an unmodifiable copy - return Collections.unmodifiableSet(new HashSet<>(fields)); - } - - synchronized public Set<String> getFields(String typeName) { - // Provide an unmodifiable copy - return Collections.unmodifiableSet(new HashSet<>(types.getOrDefault(typeName, Collections.emptySet()))); - } - - synchronized public Map<String, Set<String>> getTypes() { - // Provide an unmodifiable copy of the map and contained sets - return Collections.unmodifiableMap( - new HashSet<>(types.entrySet()).stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> Collections.unmodifiableSet(new HashSet<>(e.getValue())) - )) - ); - } - - synchronized public boolean hasType(String typeName) { - return types.containsKey(typeName); - } - - synchronized public boolean hasField(String fieldName) { - return fields.contains(fieldName); - } + // add type and field to schema + StringJoiner schema = new StringJoiner("\n"); - synchronized public boolean hasField(String typeName, String fieldName) { - return types.getOrDefault(typeName, Collections.emptySet()).contains(fieldName); + if (!fields.contains(fieldName)) { + schema.add(String.format("<%s>: [uid] @reverse .", fieldName)); } - synchronized public void ensureField(String typeName, String fieldName, String... existingFieldNames) { - // quickly check if the field is known for this type - if (hasField(typeName, fieldName)) { - return; - } - - // add type and field to schema - StringJoiner schema = new StringJoiner("\n"); - - if (!fields.contains(fieldName)) { - schema.add(String.format("<%s>: [uid] @reverse .", fieldName)); - } - - // update the schema on the Dgraph cluster - Set<String> allTypesFields = new HashSet<>(Arrays.asList(existingFieldNames)); - allTypesFields.addAll(types.getOrDefault(typeName, Collections.emptySet())); - allTypesFields.add(fieldName); - - if (dgraph != null) { - log.info("Adding predicate {} for type {} to schema", fieldName, typeName); - - StringJoiner type = new StringJoiner("\n "); - allTypesFields.stream().map(t -> "<" + t + ">").forEach(type::add); - schema.add(String.format("type <%s> {\n %s\n}", typeName, type)); - log.debug("Adding to schema: " + schema); - DgraphProto.Operation setSchema = DgraphProto.Operation.newBuilder().setSchema(schema.toString()).setRunInBackground(true).build(); - dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); - } - - // now that the schema has been updated on dgraph we can cache this new type / field - // ensure type and fields of type exist - if (!types.containsKey(typeName)) { - types.put(typeName, new HashSet<>()); - } - types.get(typeName).add(fieldName); - fields.add(fieldName); + // update the schema on the Dgraph cluster + Set<String> allTypesFields = new HashSet<>(Arrays.asList(existingFieldNames)); + allTypesFields.addAll(types.getOrDefault(typeName, Collections.emptySet())); + allTypesFields.add(fieldName); + + if (dgraph != null) { + log.info("Adding predicate {} for type {} to schema", fieldName, typeName); + + StringJoiner type = new StringJoiner("\n "); + allTypesFields.stream().map(t -> "<" + t + ">").forEach(type::add); + schema.add(String.format("type <%s> {\n %s\n}", typeName, type)); + log.debug("Adding to schema: " + schema); + DgraphProto.Operation setSchema = + DgraphProto.Operation.newBuilder() + .setSchema(schema.toString()) + .setRunInBackground(true) + .build(); + dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); } - synchronized public void clear() { - types.clear(); - fields.clear(); + // now that the schema has been updated on dgraph we can cache this new type / field + // ensure type and fields of type exist + if (!types.containsKey(typeName)) { + types.put(typeName, new HashSet<>()); } + types.get(typeName).add(fieldName); + fields.add(fieldName); + } + + public synchronized void clear() { + types.clear(); + fields.clear(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index 946931a54f4ec..92960bc9222ab 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -1,7 +1,8 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.*; + import com.codahale.metrics.Timer; -import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.datahub.util.exception.ESQueryException; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; @@ -10,6 +11,7 @@ import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.linkedin.metadata.graph.GraphFilters; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.graph.LineageRelationship; @@ -55,12 +57,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.*; - - -/** - * A search DAO for Elasticsearch backend. - */ +/** A search DAO for Elasticsearch backend. */ @Slf4j @RequiredArgsConstructor public class ESGraphQueryDAO { @@ -83,22 +80,29 @@ public class ESGraphQueryDAO { static final String UI = "UI"; @Nonnull - public static void addFilterToQueryBuilder(@Nonnull Filter filter, String node, BoolQueryBuilder rootQuery) { + public static void addFilterToQueryBuilder( + @Nonnull Filter filter, String node, BoolQueryBuilder rootQuery) { BoolQueryBuilder orQuery = new BoolQueryBuilder(); for (ConjunctiveCriterion conjunction : filter.getOr()) { final BoolQueryBuilder andQuery = new BoolQueryBuilder(); final List<Criterion> criterionArray = conjunction.getAnd(); - if (!criterionArray.stream().allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { - throw new RuntimeException("Currently Elastic query filter only supports EQUAL condition " + criterionArray); + if (!criterionArray.stream() + .allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { + throw new RuntimeException( + "Currently Elastic query filter only supports EQUAL condition " + criterionArray); } criterionArray.forEach( - criterion -> andQuery.must(QueryBuilders.termQuery(node + "." + criterion.getField(), criterion.getValue()))); + criterion -> + andQuery.must( + QueryBuilders.termQuery( + node + "." + criterion.getField(), criterion.getValue()))); orQuery.should(andQuery); } rootQuery.must(orQuery); } - private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, final int offset, final int count) { + private SearchResponse executeSearchQuery( + @Nonnull final QueryBuilder query, final int offset, final int count) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -121,8 +125,12 @@ private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, fin } } - private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, @Nullable Object[] sort, @Nullable String pitId, - @Nonnull String keepAlive, final int count) { + private SearchResponse executeSearchQuery( + @Nonnull final QueryBuilder query, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + final int count) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -141,36 +149,51 @@ private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, @Nu log.error("Search query failed", e); throw new ESQueryException("Search query failed:", e); } - } - public SearchResponse getSearchResponse(@Nullable final List<String> sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List<String> destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter, - final int offset, final int count) { + public SearchResponse getSearchResponse( + @Nullable final List<String> sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List<String> destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count) { BoolQueryBuilder finalQuery = - buildQuery(sourceTypes, sourceEntityFilter, destinationTypes, destinationEntityFilter, relationshipTypes, + buildQuery( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, relationshipFilter); return executeSearchQuery(finalQuery, offset, count); } - public static BoolQueryBuilder buildQuery(@Nullable final List<String> sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List<String> destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { + public static BoolQueryBuilder buildQuery( + @Nullable final List<String> sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List<String> destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); // set source filter - String sourceNode = relationshipDirection == RelationshipDirection.OUTGOING ? SOURCE : DESTINATION; + String sourceNode = + relationshipDirection == RelationshipDirection.OUTGOING ? SOURCE : DESTINATION; if (sourceTypes != null && sourceTypes.size() > 0) { finalQuery.must(QueryBuilders.termsQuery(sourceNode + ".entityType", sourceTypes)); } addFilterToQueryBuilder(sourceEntityFilter, sourceNode, finalQuery); // set destination filter - String destinationNode = relationshipDirection == RelationshipDirection.OUTGOING ? DESTINATION : SOURCE; + String destinationNode = + relationshipDirection == RelationshipDirection.OUTGOING ? DESTINATION : SOURCE; if (destinationTypes != null && destinationTypes.size() > 0) { finalQuery.must(QueryBuilders.termsQuery(destinationNode + ".entityType", destinationTypes)); } @@ -180,16 +203,24 @@ public static BoolQueryBuilder buildQuery(@Nullable final List<String> sourceTyp if (relationshipTypes.size() > 0) { BoolQueryBuilder relationshipQuery = QueryBuilders.boolQuery(); relationshipTypes.forEach( - relationshipType -> relationshipQuery.should(QueryBuilders.termQuery(RELATIONSHIP_TYPE, relationshipType))); + relationshipType -> + relationshipQuery.should( + QueryBuilders.termQuery(RELATIONSHIP_TYPE, relationshipType))); finalQuery.must(relationshipQuery); } return finalQuery; } @WithSpan - public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, - int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + public LineageResponse getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { List<LineageRelationship> result = new ArrayList<>(); long currentTime = System.currentTimeMillis(); long remainingTime = graphQueryConfiguration.getTimeoutSeconds() * 1000; @@ -207,8 +238,11 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect } if (remainingTime < 0) { - log.info("Timed out while fetching lineage for {} with direction {}, maxHops {}. Returning results so far", - entityUrn, direction, maxHops); + log.info( + "Timed out while fetching lineage for {} with direction {}, maxHops {}. Returning results so far", + entityUrn, + direction, + maxHops); break; } @@ -225,7 +259,10 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect startTimeMillis, endTimeMillis); result.addAll(oneHopRelationships); - currentLevel = oneHopRelationships.stream().map(LineageRelationship::getEntity).collect(Collectors.toList()); + currentLevel = + oneHopRelationships.stream() + .map(LineageRelationship::getEntity) + .collect(Collectors.toList()); currentTime = System.currentTimeMillis(); remainingTime = timeoutTime - currentTime; } @@ -235,7 +272,10 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect if (offset >= response.getTotal()) { subList = Collections.emptyList(); } else { - subList = response.getLineageRelationships().subList(offset, Math.min(offset + count, response.getTotal())); + subList = + response + .getLineageRelationships() + .subList(offset, Math.min(offset + count, response.getTotal())); } return new LineageResponse(response.getTotal(), subList); @@ -243,23 +283,35 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect // Get 1-hop lineage relationships asynchronously in batches with timeout @WithSpan - public List<LineageRelationship> getLineageRelationshipsInBatches(@Nonnull List<Urn> entityUrns, - @Nonnull LineageDirection direction, GraphFilters graphFilters, Set<Urn> visitedEntities, int numHops, - long remainingTime, Map<Urn, UrnArrayArray> existingPaths, @Nullable Long startTimeMillis, + public List<LineageRelationship> getLineageRelationshipsInBatches( + @Nonnull List<Urn> entityUrns, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + Set<Urn> visitedEntities, + int numHops, + long remainingTime, + Map<Urn, UrnArrayArray> existingPaths, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { List<List<Urn>> batches = Lists.partition(entityUrns, graphQueryConfiguration.getBatchSize()); - return ConcurrencyUtils.getAllCompleted(batches.stream() - .map(batchUrns -> CompletableFuture.supplyAsync( - () -> getLineageRelationships( - batchUrns, - direction, - graphFilters, - visitedEntities, - numHops, - existingPaths, - startTimeMillis, - endTimeMillis))) - .collect(Collectors.toList()), remainingTime, TimeUnit.MILLISECONDS) + return ConcurrencyUtils.getAllCompleted( + batches.stream() + .map( + batchUrns -> + CompletableFuture.supplyAsync( + () -> + getLineageRelationships( + batchUrns, + direction, + graphFilters, + visitedEntities, + numHops, + existingPaths, + startTimeMillis, + endTimeMillis))) + .collect(Collectors.toList()), + remainingTime, + TimeUnit.MILLISECONDS) .stream() .flatMap(List::stream) .collect(Collectors.toList()); @@ -267,42 +319,56 @@ public List<LineageRelationship> getLineageRelationshipsInBatches(@Nonnull List< // Get 1-hop lineage relationships @WithSpan - private List<LineageRelationship> getLineageRelationships(@Nonnull List<Urn> entityUrns, - @Nonnull LineageDirection direction, GraphFilters graphFilters, Set<Urn> visitedEntities, int numHops, - Map<Urn, UrnArrayArray> existingPaths, @Nullable Long startTimeMillis, + private List<LineageRelationship> getLineageRelationships( + @Nonnull List<Urn> entityUrns, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + Set<Urn> visitedEntities, + int numHops, + Map<Urn, UrnArrayArray> existingPaths, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - Map<String, List<Urn>> urnsPerEntityType = entityUrns.stream().collect(Collectors.groupingBy(Urn::getEntityType)); - Map<String, List<EdgeInfo>> edgesPerEntityType = urnsPerEntityType.keySet() - .stream() - .collect(Collectors.toMap(Function.identity(), - entityType -> lineageRegistry.getLineageRelationships(entityType, direction))); + Map<String, List<Urn>> urnsPerEntityType = + entityUrns.stream().collect(Collectors.groupingBy(Urn::getEntityType)); + Map<String, List<EdgeInfo>> edgesPerEntityType = + urnsPerEntityType.keySet().stream() + .collect( + Collectors.toMap( + Function.identity(), + entityType -> lineageRegistry.getLineageRelationships(entityType, direction))); BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); // Get all relation types relevant to the set of urns to hop from - urnsPerEntityType.forEach((entityType, urns) -> finalQuery.should( - getQueryForLineage( - urns, - edgesPerEntityType.getOrDefault(entityType, Collections.emptyList()), - graphFilters, - startTimeMillis, - endTimeMillis))); - SearchResponse response = executeSearchQuery(finalQuery, 0, graphQueryConfiguration.getMaxResult()); + urnsPerEntityType.forEach( + (entityType, urns) -> + finalQuery.should( + getQueryForLineage( + urns, + edgesPerEntityType.getOrDefault(entityType, Collections.emptyList()), + graphFilters, + startTimeMillis, + endTimeMillis))); + SearchResponse response = + executeSearchQuery(finalQuery, 0, graphQueryConfiguration.getMaxResult()); Set<Urn> entityUrnSet = new HashSet<>(entityUrns); // Get all valid edges given the set of urns to hop from - Set<Pair<String, EdgeInfo>> validEdges = edgesPerEntityType.entrySet() - .stream() - .flatMap(entry -> entry.getValue().stream().map(edgeInfo -> Pair.of(entry.getKey(), edgeInfo))) - .collect(Collectors.toSet()); - return extractRelationships(entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); + Set<Pair<String, EdgeInfo>> validEdges = + edgesPerEntityType.entrySet().stream() + .flatMap( + entry -> + entry.getValue().stream().map(edgeInfo -> Pair.of(entry.getKey(), edgeInfo))) + .collect(Collectors.toSet()); + return extractRelationships( + entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); } // Get search query for given list of edges and source urns @VisibleForTesting public static QueryBuilder getQueryForLineage( - @Nonnull List<Urn> urns, - @Nonnull List<EdgeInfo> lineageEdges, - @Nonnull GraphFilters graphFilters, - @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis) { + @Nonnull List<Urn> urns, + @Nonnull List<EdgeInfo> lineageEdges, + @Nonnull GraphFilters graphFilters, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { BoolQueryBuilder query = QueryBuilders.boolQuery(); if (lineageEdges.isEmpty()) { return query; @@ -328,43 +394,46 @@ public static QueryBuilder getQueryForLineage( if (startTimeMillis != null && endTimeMillis != null) { query.must(TimeFilterUtils.getEdgeTimeFilterQuery(startTimeMillis, endTimeMillis)); } else { - log.debug(String.format( - "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", - startTimeMillis, - endTimeMillis)); + log.debug( + String.format( + "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", + startTimeMillis, endTimeMillis)); } return query; } /** - * Adds an individual relationship edge to a running set of unique paths to each node in the graph. + * Adds an individual relationship edge to a running set of unique paths to each node in the + * graph. * - * Specifically, this method updates 'existingPaths', which is a map of an entity urn representing a node in the - * lineage graph to the full paths that can be traversed to reach it from a the origin node for which lineage - * was requested. + * <p>Specifically, this method updates 'existingPaths', which is a map of an entity urn + * representing a node in the lineage graph to the full paths that can be traversed to reach it + * from a the origin node for which lineage was requested. * - * This method strictly assumes that edges are being added IN ORDER, level-by-level working outwards from the originally - * requested source node. If edges are added to the path set in an out of order manner, then the paths to a given node - * may be partial / incomplete. + * <p>This method strictly assumes that edges are being added IN ORDER, level-by-level working + * outwards from the originally requested source node. If edges are added to the path set in an + * out of order manner, then the paths to a given node may be partial / incomplete. * - * Note that calling this method twice with the same edge is not safe. It will result in duplicate paths being appended - * into the list of paths to the provided child urn. + * <p>Note that calling this method twice with the same edge is not safe. It will result in + * duplicate paths being appended into the list of paths to the provided child urn. * - * @param existingPaths a running set of unique, uni-directional paths to each node in the graph starting from the original root node - * for which lineage was requested. - * @param parentUrn the "parent" node (or source node) in the edge to add. This is a logical source node in a uni-directional path from the source - * to the destination node. Note that this is NOT always the URN corresponding to the "source" field that is physically stored - * inside the Graph Store. - * @param childUrn the "child" node (or dest node) in the edge to add. This is a logical dest node in a uni-directional path from the - * source to the destination node. Note that this is NOT always the URN corresponding to the "destination" field that is - * physically stored inside the Graph Store. + * @param existingPaths a running set of unique, uni-directional paths to each node in the graph + * starting from the original root node for which lineage was requested. + * @param parentUrn the "parent" node (or source node) in the edge to add. This is a logical + * source node in a uni-directional path from the source to the destination node. Note that + * this is NOT always the URN corresponding to the "source" field that is physically stored + * inside the Graph Store. + * @param childUrn the "child" node (or dest node) in the edge to add. This is a logical dest node + * in a uni-directional path from the source to the destination node. Note that this is NOT + * always the URN corresponding to the "destination" field that is physically stored inside + * the Graph Store. */ @VisibleForTesting public static void addEdgeToPaths( - @Nonnull final Map<Urn, UrnArrayArray> existingPaths, - @Nonnull final Urn parentUrn, - @Nonnull final Urn childUrn) { + @Nonnull final Map<Urn, UrnArrayArray> existingPaths, + @Nonnull final Urn parentUrn, + @Nonnull final Urn childUrn) { // Collect all full-paths to this child node. This is what will be returned. UrnArrayArray pathsToParent = existingPaths.get(parentUrn); if (pathsToParent != null && pathsToParent.size() > 0) { @@ -388,16 +457,22 @@ public static void addEdgeToPaths( } } - // Given set of edges and the search response, extract all valid edges that originate from the input entityUrns + // Given set of edges and the search response, extract all valid edges that originate from the + // input entityUrns @WithSpan - private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> entityUrns, - @Nonnull SearchResponse searchResponse, Set<Pair<String, EdgeInfo>> validEdges, Set<Urn> visitedEntities, - int numHops, Map<Urn, UrnArrayArray> existingPaths) { + private static List<LineageRelationship> extractRelationships( + @Nonnull Set<Urn> entityUrns, + @Nonnull SearchResponse searchResponse, + Set<Pair<String, EdgeInfo>> validEdges, + Set<Urn> visitedEntities, + int numHops, + Map<Urn, UrnArrayArray> existingPaths) { final List<LineageRelationship> result = new LinkedList<>(); final SearchHit[] hits = searchResponse.getHits().getHits(); for (SearchHit hit : hits) { final Map<String, Object> document = hit.getSourceAsMap(); - final Urn sourceUrn = UrnUtils.getUrn(((Map<String, Object>) document.get(SOURCE)).get("urn").toString()); + final Urn sourceUrn = + UrnUtils.getUrn(((Map<String, Object>) document.get(SOURCE)).get("urn").toString()); final Urn destinationUrn = UrnUtils.getUrn(((Map<String, Object>) document.get(DESTINATION)).get("urn").toString()); final String type = document.get(RELATIONSHIP_TYPE).toString(); @@ -406,9 +481,11 @@ private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> final Number updatedOnNumber = (Number) document.getOrDefault(UPDATED_ON, null); final Long updatedOn = updatedOnNumber != null ? updatedOnNumber.longValue() : null; final String createdActorString = (String) document.getOrDefault(CREATED_ACTOR, null); - final Urn createdActor = createdActorString == null ? null : UrnUtils.getUrn(createdActorString); + final Urn createdActor = + createdActorString == null ? null : UrnUtils.getUrn(createdActorString); final String updatedActorString = (String) document.getOrDefault(UPDATED_ACTOR, null); - final Urn updatedActor = updatedActorString == null ? null : UrnUtils.getUrn(updatedActorString); + final Urn updatedActor = + updatedActorString == null ? null : UrnUtils.getUrn(updatedActorString); final Map<String, Object> properties; if (document.containsKey(PROPERTIES) && document.get(PROPERTIES) instanceof Map) { properties = (Map<String, Object>) document.get(PROPERTIES); @@ -422,9 +499,14 @@ private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> // Skip if already visited // Skip if edge is not a valid outgoing edge // TODO: Verify if this honors multiple paths to the same node. - if (!visitedEntities.contains(destinationUrn) && validEdges.contains( - Pair.of(sourceUrn.getEntityType(), - new EdgeInfo(type, RelationshipDirection.OUTGOING, destinationUrn.getEntityType().toLowerCase())))) { + if (!visitedEntities.contains(destinationUrn) + && validEdges.contains( + Pair.of( + sourceUrn.getEntityType(), + new EdgeInfo( + type, + RelationshipDirection.OUTGOING, + destinationUrn.getEntityType().toLowerCase())))) { visitedEntities.add(destinationUrn); // Append the edge to a set of unique graph paths. addEdgeToPaths(existingPaths, sourceUrn, destinationUrn); @@ -433,7 +515,9 @@ private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> type, destinationUrn, numHops, - existingPaths.getOrDefault(destinationUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. + existingPaths.getOrDefault( + destinationUrn, + new UrnArrayArray()), // Fetch the paths to the next level entity. createdOn, createdActor, updatedOn, @@ -448,21 +532,29 @@ private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> // Skip if already visited // Skip if edge is not a valid outgoing edge // TODO: Verify if this honors multiple paths to the same node. - if (!visitedEntities.contains(sourceUrn) && validEdges.contains( - Pair.of(destinationUrn.getEntityType(), new EdgeInfo(type, RelationshipDirection.INCOMING, sourceUrn.getEntityType().toLowerCase())))) { + if (!visitedEntities.contains(sourceUrn) + && validEdges.contains( + Pair.of( + destinationUrn.getEntityType(), + new EdgeInfo( + type, + RelationshipDirection.INCOMING, + sourceUrn.getEntityType().toLowerCase())))) { visitedEntities.add(sourceUrn); // Append the edge to a set of unique graph paths. addEdgeToPaths(existingPaths, destinationUrn, sourceUrn); - final LineageRelationship relationship = createLineageRelationship( - type, - sourceUrn, - numHops, - existingPaths.getOrDefault(sourceUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. - createdOn, - createdActor, - updatedOn, - updatedActor, - isManual); + final LineageRelationship relationship = + createLineageRelationship( + type, + sourceUrn, + numHops, + existingPaths.getOrDefault( + sourceUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. + createdOn, + createdActor, + updatedOn, + updatedActor, + isManual); result.add(relationship); } } @@ -479,10 +571,13 @@ private static LineageRelationship createLineageRelationship( @Nullable final Urn createdActor, @Nullable final Long updatedOn, @Nullable final Urn updatedActor, - final boolean isManual - ) { + final boolean isManual) { final LineageRelationship relationship = - new LineageRelationship().setType(type).setEntity(entityUrn).setDegree(numHops).setPaths(paths); + new LineageRelationship() + .setType(type) + .setEntity(entityUrn) + .setDegree(numHops) + .setPaths(paths); if (createdOn != null) { relationship.setCreatedOn(createdOn); } @@ -507,18 +602,19 @@ private static BoolQueryBuilder getOutGoingEdgeQuery( outgoingEdgeQuery.must(buildUrnFilters(urns, SOURCE)); outgoingEdgeQuery.must(buildEdgeFilters(outgoingEdges)); outgoingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - outgoingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); + outgoingEdgeQuery.must( + buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return outgoingEdgeQuery; } private static BoolQueryBuilder getIncomingEdgeQuery( - @Nonnull List<Urn> urns, List<EdgeInfo> incomingEdges, - @Nonnull GraphFilters graphFilters) { + @Nonnull List<Urn> urns, List<EdgeInfo> incomingEdges, @Nonnull GraphFilters graphFilters) { BoolQueryBuilder incomingEdgeQuery = QueryBuilders.boolQuery(); incomingEdgeQuery.must(buildUrnFilters(urns, DESTINATION)); incomingEdgeQuery.must(buildEdgeFilters(incomingEdges)); incomingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - incomingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); + incomingEdgeQuery.must( + buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return incomingEdgeQuery; } @@ -530,16 +626,21 @@ private static UrnArray clonePath(final UrnArray basePath) { } } - private static QueryBuilder buildEntityTypesFilter(@Nonnull List<String> entityTypes, @Nonnull String prefix) { - return QueryBuilders.termsQuery(prefix + ".entityType", entityTypes.stream().map(Object::toString).collect(Collectors.toList())); + private static QueryBuilder buildEntityTypesFilter( + @Nonnull List<String> entityTypes, @Nonnull String prefix) { + return QueryBuilders.termsQuery( + prefix + ".entityType", + entityTypes.stream().map(Object::toString).collect(Collectors.toList())); } private static QueryBuilder buildUrnFilters(@Nonnull List<Urn> urns, @Nonnull String prefix) { - return QueryBuilders.termsQuery(prefix + ".urn", urns.stream().map(Object::toString).collect(Collectors.toList())); + return QueryBuilders.termsQuery( + prefix + ".urn", urns.stream().map(Object::toString).collect(Collectors.toList())); } private static QueryBuilder buildEdgeFilters(@Nonnull List<EdgeInfo> edgeInfos) { - return QueryBuilders.termsQuery("relationshipType", + return QueryBuilders.termsQuery( + "relationshipType", edgeInfos.stream().map(EdgeInfo::getType).distinct().collect(Collectors.toList())); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java index f8b0e8a291e7a..5d722a034fafc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.buildQuery; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipFilter; @@ -16,10 +19,6 @@ import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.reindex.BulkByScrollResponse; -import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.buildQuery; -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; - - @Slf4j @RequiredArgsConstructor public class ESGraphWriteDAO { @@ -36,8 +35,8 @@ public class ESGraphWriteDAO { * @param docId the ID of the document */ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { - final UpdateRequest updateRequest = new UpdateRequest( - indexConvention.getIndexName(INDEX_NAME), docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexConvention.getIndexName(INDEX_NAME), docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -56,15 +55,24 @@ public void deleteDocument(@Nonnull String docId) { bulkProcessor.add(deleteRequest); } - public BulkByScrollResponse deleteByQuery(@Nullable final String sourceType, @Nonnull final Filter sourceEntityFilter, - @Nullable final String destinationType, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { + public BulkByScrollResponse deleteByQuery( + @Nullable final String sourceType, + @Nonnull final Filter sourceEntityFilter, + @Nullable final String destinationType, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter) { BoolQueryBuilder finalQuery = - buildQuery(sourceType == null ? ImmutableList.of() : ImmutableList.of(sourceType), sourceEntityFilter, - destinationType == null ? ImmutableList.of() : ImmutableList.of(destinationType), destinationEntityFilter, - relationshipTypes, relationshipFilter); + buildQuery( + sourceType == null ? ImmutableList.of() : ImmutableList.of(sourceType), + sourceEntityFilter, + destinationType == null ? ImmutableList.of() : ImmutableList.of(destinationType), + destinationEntityFilter, + relationshipTypes, + relationshipFilter); - return bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)) - .orElse(null); + return bulkProcessor + .deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)) + .orElse(null); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java index 5fdf4d45ffa3b..6c828c0e7c6ae 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java @@ -48,7 +48,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.index.query.QueryBuilders; - @Slf4j @RequiredArgsConstructor public class ElasticSearchGraphService implements GraphService, ElasticSearchIndexed { @@ -99,10 +98,7 @@ private String toDocument(@Nonnull final Edge edge) { throw new UnsupportedOperationException( String.format( "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", - entry.getKey(), - entry.getValue() - ) - ); + entry.getKey(), entry.getValue())); } } searchDocument.set("properties", propertiesObject); @@ -113,8 +109,11 @@ private String toDocument(@Nonnull final Edge edge) { private String toDocId(@Nonnull final Edge edge) { String rawDocId = - edge.getSource().toString() + DOC_DELIMETER + edge.getRelationshipType() + DOC_DELIMETER + edge.getDestination() - .toString(); + edge.getSource().toString() + + DOC_DELIMETER + + edge.getRelationshipType() + + DOC_DELIMETER + + edge.getDestination().toString(); try { byte[] bytesOfRawDocID = rawDocId.getBytes(StandardCharsets.UTF_8); @@ -160,48 +159,55 @@ public RelatedEntitiesResult findRelatedEntities( @Nonnull final RelationshipFilter relationshipFilter, final int offset, final int count) { - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); - String destinationNode = relationshipDirection == RelationshipDirection.OUTGOING ? "destination" : "source"; - - SearchResponse response = _graphReadDAO.getSearchResponse( - sourceTypes, - sourceEntityFilter, - destinationTypes, - destinationEntityFilter, - relationshipTypes, - relationshipFilter, - offset, - count - ); + String destinationNode = + relationshipDirection == RelationshipDirection.OUTGOING ? "destination" : "source"; + + SearchResponse response = + _graphReadDAO.getSearchResponse( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + offset, + count); if (response == null) { return new RelatedEntitiesResult(offset, 0, 0, ImmutableList.of()); } int totalCount = (int) response.getHits().getTotalHits().value; - final List<RelatedEntity> relationships = Arrays.stream(response.getHits().getHits()) - .map(hit -> { - final String urnStr = - ((HashMap<String, String>) hit.getSourceAsMap().getOrDefault(destinationNode, EMPTY_HASH)).getOrDefault( - "urn", null); - final String relationshipType = (String) hit.getSourceAsMap().get("relationshipType"); - - if (urnStr == null || relationshipType == null) { - log.error(String.format( - "Found null urn string, relationship type, aspect name or path spec in Elastic index. " - + "urnStr: %s, relationshipType: %s", - urnStr, relationshipType)); - return null; - } - - return new RelatedEntity(relationshipType, urnStr); - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + final List<RelatedEntity> relationships = + Arrays.stream(response.getHits().getHits()) + .map( + hit -> { + final String urnStr = + ((HashMap<String, String>) + hit.getSourceAsMap().getOrDefault(destinationNode, EMPTY_HASH)) + .getOrDefault("urn", null); + final String relationshipType = + (String) hit.getSourceAsMap().get("relationshipType"); + + if (urnStr == null || relationshipType == null) { + log.error( + String.format( + "Found null urn string, relationship type, aspect name or path spec in Elastic index. " + + "urnStr: %s, relationshipType: %s", + urnStr, relationshipType)); + return null; + } + + return new RelatedEntity(relationshipType, urnStr); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); return new RelatedEntitiesResult(offset, relationships.size(), totalCount, relationships); } @@ -209,22 +215,18 @@ public RelatedEntitiesResult findRelatedEntities( @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, GraphFilters graphFilters, int offset, - int count, int maxHops) { + int count, + int maxHops) { ESGraphQueryDAO.LineageResponse lineageResponse = _graphReadDAO.getLineage( - entityUrn, - direction, - graphFilters, - offset, - count, - maxHops, - null, - null); - return new EntityLineageResult().setRelationships( - new LineageRelationshipArray(lineageResponse.getLineageRelationships())) + entityUrn, direction, graphFilters, offset, count, maxHops, null, null); + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageResponse.getLineageRelationships())) .setStart(offset) .setCount(count) .setTotal(lineageResponse.getTotal()); @@ -233,10 +235,15 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, GraphFilters graphFilters, int offset, - int count, int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { ESGraphQueryDAO.LineageResponse lineageResponse = _graphReadDAO.getLineage( entityUrn, @@ -247,8 +254,8 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi maxHops, startTimeMillis, endTimeMillis); - return new EntityLineageResult().setRelationships( - new LineageRelationshipArray(lineageResponse.getLineageRelationships())) + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageResponse.getLineageRelationships())) .setStart(offset) .setCount(count) .setTotal(lineageResponse.getTotal()); @@ -262,7 +269,9 @@ private Filter createUrnFilter(@Nonnull final Urn urn) { criterion.setField("urn"); criterion.setValue(urn.toString()); criterionArray.add(criterion); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criterionArray)))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criterionArray)))); return filter; } @@ -272,26 +281,16 @@ public void removeNode(@Nonnull final Urn urn) { Filter emptyFilter = new Filter().setOr(new ConjunctiveCriterionArray()); List<String> relationshipTypes = new ArrayList<>(); - RelationshipFilter outgoingFilter = new RelationshipFilter().setDirection(RelationshipDirection.OUTGOING); - RelationshipFilter incomingFilter = new RelationshipFilter().setDirection(RelationshipDirection.INCOMING); + RelationshipFilter outgoingFilter = + new RelationshipFilter().setDirection(RelationshipDirection.OUTGOING); + RelationshipFilter incomingFilter = + new RelationshipFilter().setDirection(RelationshipDirection.INCOMING); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - outgoingFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, outgoingFilter); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - incomingFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, incomingFilter); return; } @@ -305,13 +304,7 @@ public void removeEdgesFromNode( Filter emptyFilter = new Filter().setOr(new ConjunctiveCriterionArray()); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - relationshipFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, relationshipFilter); } @Override @@ -328,8 +321,11 @@ public void configure() { @Override public List<ReindexConfig> buildReindexConfigs() throws IOException { - return List.of(_indexBuilder.buildReindexState(_indexConvention.getIndexName(INDEX_NAME), - GraphRelationshipMappingsBuilder.getMappings(), Collections.emptyMap())); + return List.of( + _indexBuilder.buildReindexState( + _indexConvention.getIndexName(INDEX_NAME), + GraphRelationshipMappingsBuilder.getMappings(), + Collections.emptyMap())); } @Override @@ -340,7 +336,8 @@ public void reindexAll() { @VisibleForTesting @Override public void clear() { - _esBulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); + _esBulkProcessor.deleteByQuery( + QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java index cf97cf56023ad..21f2bf6c89204 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java @@ -5,11 +5,10 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; - @Slf4j public class GraphRelationshipMappingsBuilder { - private GraphRelationshipMappingsBuilder() { } + private GraphRelationshipMappingsBuilder() {} public static Map<String, Object> getMappings() { Map<String, Object> mappings = new HashMap<>(); @@ -27,19 +26,19 @@ private static Map<String, Object> getMappingsForKeyword() { private static Map<String, Object> getMappingsForEntity() { - Map<String, Object> mappings = ImmutableMap.<String, Object>builder() - .put("urn", getMappingsForKeyword()) - .put("entityType", getMappingsForKeyword()) - .build(); + Map<String, Object> mappings = + ImmutableMap.<String, Object>builder() + .put("urn", getMappingsForKeyword()) + .put("entityType", getMappingsForKeyword()) + .build(); return ImmutableMap.of("properties", mappings); } private static Map<String, Object> getMappingsForEdgeProperties() { - Map<String, Object> propertyMappings = ImmutableMap.<String, Object>builder() - .put("source", getMappingsForKeyword()) - .build(); + Map<String, Object> propertyMappings = + ImmutableMap.<String, Object>builder().put("source", getMappingsForKeyword()).build(); return ImmutableMap.of("properties", propertyMappings); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java index 1df938f902e0f..7ee84ce834cfa 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java @@ -1,28 +1,33 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.*; + import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; -import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.*; - @Slf4j public class TimeFilterUtils { /** - * In order to filter for edges that fall into a specific filter window, we perform a range-overlap query. - * Note that both a start time and an end time must be provided in order to add the filters. + * In order to filter for edges that fall into a specific filter window, we perform a + * range-overlap query. Note that both a start time and an end time must be provided in order to + * add the filters. * - * A range overlap query compares 2 time windows for ANY overlap. This essentially equates to a union operation. - * Each window is characterized by 2 points in time: a start time (e.g. created time of the edge) and an end time - * (e.g. last updated time of an edge). + * <p>A range overlap query compares 2 time windows for ANY overlap. This essentially equates to a + * union operation. Each window is characterized by 2 points in time: a start time (e.g. created + * time of the edge) and an end time (e.g. last updated time of an edge). * * @param startTimeMillis the start of the time filter window * @param endTimeMillis the end of the time filter window */ - public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, final long endTimeMillis) { - log.debug(String.format("Adding edge time filters for start time: %s, end time: %s", startTimeMillis, endTimeMillis)); + public static QueryBuilder getEdgeTimeFilterQuery( + final long startTimeMillis, final long endTimeMillis) { + log.debug( + String.format( + "Adding edge time filters for start time: %s, end time: %s", + startTimeMillis, endTimeMillis)); /* * One of the following must be true in order for the edge to be returned (should = OR) * @@ -30,7 +35,7 @@ public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, fi * 2. The createdOn and updatedOn window does not exist on the edge at all (support legacy cases) * 3. Special lineage case: The edge is marked as a "manual" edge, meaning that the time filters should NOT be applied. */ - BoolQueryBuilder timeFilterQuery = QueryBuilders.boolQuery(); + BoolQueryBuilder timeFilterQuery = QueryBuilders.boolQuery(); timeFilterQuery.should(buildTimeWindowFilter(startTimeMillis, endTimeMillis)); timeFilterQuery.should(buildTimestampsMissingFilter()); timeFilterQuery.should(buildManualLineageFilter()); @@ -38,61 +43,54 @@ public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, fi } /** - * Builds a filter that compares 2 windows on a timeline and returns true for any overlap. This logic - * is a bit tricky so change with caution. - * - * The first window comes from start time and end time provided by the user. - * The second window comes from the createdOn and updatedOn timestamps present on graph edges. + * Builds a filter that compares 2 windows on a timeline and returns true for any overlap. This + * logic is a bit tricky so change with caution. * - * Also accounts for the case where createdOn or updatedOn is MISSING, and in such cases performs - * a point overlap instead of a range overlap. + * <p>The first window comes from start time and end time provided by the user. The second window + * comes from the createdOn and updatedOn timestamps present on graph edges. * - * Range Examples: + * <p>Also accounts for the case where createdOn or updatedOn is MISSING, and in such cases + * performs a point overlap instead of a range overlap. * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + * <p>Range Examples: * - * = true + * <p>start time -> end time |-----| createdOn -> updatedOn |-----| * - * start time -> end time |------| - * createdOn -> updatedOn |--| + * <p>= true * - * = true + * <p>start time -> end time |------| createdOn -> updatedOn |--| * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + * <p>= true * - * = true + * <p>start time -> end time |-----| createdOn -> updatedOn |-----| * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + * <p>= true * - * = false + * <p>start time -> end time |-----| createdOn -> updatedOn |-----| * + * <p>= false * - * Point Examples: + * <p>Point Examples: * - * start time -> end time |-----| - * updatedOn | + * <p>start time -> end time |-----| updatedOn | * - * = true + * <p>= true * - * start time -> end time |-----| - * updatedOn | + * <p>start time -> end time |-----| updatedOn | * - * = false + * <p>= false * - * and same for createdOn. + * <p>and same for createdOn. * - * Assumptions are that startTimeMillis is always before or equal to endTimeMillis, - * and createdOn is always before or equal to updatedOn. + * <p>Assumptions are that startTimeMillis is always before or equal to endTimeMillis, and + * createdOn is always before or equal to updatedOn. * * @param startTimeMillis the start time of the window in milliseconds * @param endTimeMillis the end time of the window in milliseconds - * * @return Query Builder with time window filters appended. */ - private static QueryBuilder buildTimeWindowFilter(final long startTimeMillis, final long endTimeMillis) { + private static QueryBuilder buildTimeWindowFilter( + final long startTimeMillis, final long endTimeMillis) { final BoolQueryBuilder timeWindowQuery = QueryBuilders.boolQuery(); /* @@ -107,12 +105,14 @@ private static QueryBuilder buildTimeWindowFilter(final long startTimeMillis, fi // Build filter comparing createdOn time to startTime->endTime window. BoolQueryBuilder createdOnFilter = QueryBuilders.boolQuery(); createdOnFilter.must(QueryBuilders.existsQuery(CREATED_ON)); - createdOnFilter.must(QueryBuilders.rangeQuery(CREATED_ON).gte(startTimeMillis).lte(endTimeMillis)); + createdOnFilter.must( + QueryBuilders.rangeQuery(CREATED_ON).gte(startTimeMillis).lte(endTimeMillis)); // Build filter comparing updatedOn time to startTime->endTime window. BoolQueryBuilder updatedOnFilter = QueryBuilders.boolQuery(); updatedOnFilter.must(QueryBuilders.existsQuery(UPDATED_ON)); - updatedOnFilter.must(QueryBuilders.rangeQuery(UPDATED_ON).gte(startTimeMillis).lte(endTimeMillis)); + updatedOnFilter.must( + QueryBuilders.rangeQuery(UPDATED_ON).gte(startTimeMillis).lte(endTimeMillis)); // Now - OR the 2 point comparison conditions together. timeWindowQuery.should(createdOnFilter); @@ -141,5 +141,5 @@ private static QueryBuilder buildManualLineageFilter() { return QueryBuilders.termQuery(String.format("%s.%s", PROPERTIES, SOURCE), UI); } - private TimeFilterUtils() { } + private TimeFilterUtils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index ac57fb7db2b78..217d54c5c0b0f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -56,7 +56,6 @@ import org.neo4j.driver.exceptions.Neo4jException; import org.neo4j.driver.types.Relationship; - @Slf4j public class Neo4jGraphService implements GraphService { @@ -69,7 +68,10 @@ public Neo4jGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull Driv this(lineageRegistry, driver, SessionConfig.defaultConfig()); } - public Neo4jGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull Driver driver, @Nonnull SessionConfig sessionConfig) { + public Neo4jGraphService( + @Nonnull LineageRegistry lineageRegistry, + @Nonnull Driver driver, + @Nonnull SessionConfig sessionConfig) { this._lineageRegistry = lineageRegistry; this._driver = driver; this._sessionConfig = sessionConfig; @@ -83,22 +85,24 @@ public LineageRegistry getLineageRegistry() { @Override public void addEdge(@Nonnull final Edge edge) { - log.debug(String.format("Adding Edge source: %s, destination: %s, type: %s", - edge.getSource(), - edge.getDestination(), - edge.getRelationshipType())); + log.debug( + String.format( + "Adding Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); final String sourceType = edge.getSource().getEntityType(); final String destinationType = edge.getDestination().getEntityType(); final String sourceUrn = edge.getSource().toString(); final String destinationUrn = edge.getDestination().toString(); - // Introduce startUrn, endUrn for real source node and destination node without consider direct or indirect pattern match + // Introduce startUrn, endUrn for real source node and destination node without consider direct + // or indirect pattern match String endUrn = destinationUrn; String startUrn = sourceUrn; String endType = destinationType; String startType = sourceType; - // Extra relationship typename start with r_ for direct-outgoing-downstream/indirect-incoming-upstream relationships + // Extra relationship typename start with r_ for + // direct-outgoing-downstream/indirect-incoming-upstream relationships String reverseRelationshipType = "r_" + edge.getRelationshipType(); if (isSourceDestReversed(sourceType, edge.getRelationshipType())) { @@ -117,10 +121,23 @@ public void addEdge(@Nonnull final Edge edge) { // Add/Update relationship final String mergeRelationshipTemplate = "MATCH (source:%s {urn: '%s'}),(destination:%s {urn: '%s'}) MERGE (source)-[r:%s]->(destination) "; - String statement = String.format(mergeRelationshipTemplate, sourceType, sourceUrn, destinationType, destinationUrn, - edge.getRelationshipType()); - - String statementR = String.format(mergeRelationshipTemplate, startType, startUrn, endType, endUrn, reverseRelationshipType); + String statement = + String.format( + mergeRelationshipTemplate, + sourceType, + sourceUrn, + destinationType, + destinationUrn, + edge.getRelationshipType()); + + String statementR = + String.format( + mergeRelationshipTemplate, + startType, + startUrn, + endType, + endUrn, + reverseRelationshipType); // Add/Update relationship properties String setCreatedOnTemplate; @@ -152,20 +169,23 @@ public void addEdge(@Nonnull final Edge edge) { Set.of("createdOn", "createdActor", "updatedOn", "updatedActor", "startUrn", "endUrn"); if (preservedKeySet.contains(entry.getKey())) { throw new UnsupportedOperationException( - String.format("Tried setting properties on graph edge but property key is preserved. Key: %s", + String.format( + "Tried setting properties on graph edge but property key is preserved. Key: %s", entry.getKey())); } if (entry.getValue() instanceof String) { setPropertyTemplate = String.format("r.%s = '%s'", entry.getKey(), entry.getValue()); propertiesTemplateJoiner.add(setPropertyTemplate); } else { - throw new UnsupportedOperationException(String.format( - "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", - entry.getKey(), entry.getValue())); + throw new UnsupportedOperationException( + String.format( + "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", + entry.getKey(), entry.getValue())); } } } - final String setStartEndUrnTemplate = String.format("r.startUrn = '%s', r.endUrn = '%s'", startUrn, endUrn); + final String setStartEndUrnTemplate = + String.format("r.startUrn = '%s', r.endUrn = '%s'", startUrn, endUrn); propertiesTemplateJoiner.add(setStartEndUrnTemplate); if (!StringUtils.isEmpty(propertiesTemplateJoiner.toString())) { statementR = String.format("%s SET %s", statementR, propertiesTemplateJoiner); @@ -184,8 +204,9 @@ public void upsertEdge(final Edge edge) { @Override public void removeEdge(final Edge edge) { log.debug( - String.format("Deleting Edge source: %s, destination: %s, type: %s", edge.getSource(), edge.getDestination(), - edge.getRelationshipType())); + String.format( + "Deleting Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); final String sourceType = edge.getSource().getEntityType(); final String destinationType = edge.getDestination().getEntityType(); @@ -208,11 +229,24 @@ public void removeEdge(final Edge edge) { final List<Statement> statements = new ArrayList<>(); // DELETE relationship - final String mergeRelationshipTemplate = "MATCH (source:%s {urn: '%s'})-[r:%s]->(destination:%s {urn: '%s'}) DELETE r"; + final String mergeRelationshipTemplate = + "MATCH (source:%s {urn: '%s'})-[r:%s]->(destination:%s {urn: '%s'}) DELETE r"; final String statement = - String.format(mergeRelationshipTemplate, sourceType, sourceUrn, edge.getRelationshipType(), destinationType, + String.format( + mergeRelationshipTemplate, + sourceType, + sourceUrn, + edge.getRelationshipType(), + destinationType, destinationUrn); - final String statementR = String.format(mergeRelationshipTemplate, startType, startUrn, reverseRelationshipType, endType, endUrn); + final String statementR = + String.format( + mergeRelationshipTemplate, + startType, + startUrn, + reverseRelationshipType, + endType, + endUrn); statements.add(buildStatement(statement, new HashMap<>())); statements.add(buildStatement(statementR, new HashMap<>())); @@ -222,49 +256,74 @@ public void removeEdge(final Edge edge) { @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops) { + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops) { return getLineage(entityUrn, direction, graphFilters, offset, count, maxHops, null, null); } @Nonnull @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops, @Nullable Long startTimeMillis, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { log.debug(String.format("Neo4j getLineage maxHops = %d", maxHops)); final var statementAndParams = - generateLineageStatementAndParameters(entityUrn, direction, graphFilters, maxHops, startTimeMillis, endTimeMillis); + generateLineageStatementAndParameters( + entityUrn, direction, graphFilters, maxHops, startTimeMillis, endTimeMillis); final var statement = statementAndParams.getFirst(); final var parameters = statementAndParams.getSecond(); List<Record> neo4jResult = - statement != null ? runQuery(buildStatement(statement, parameters)).list() : new ArrayList<>(); + statement != null + ? runQuery(buildStatement(statement, parameters)).list() + : new ArrayList<>(); LineageRelationshipArray relations = new LineageRelationshipArray(); - neo4jResult.stream().skip(offset).limit(count).forEach(item -> { - String urn = item.values().get(2).asNode().get("urn").asString(); - try { - final var path = item.get(1).asPath(); - final List<Urn> nodeListAsPath = StreamSupport.stream( - path.nodes().spliterator(), false) - .map(node -> createFromString(node.get("urn").asString())) - .collect(Collectors.toList()); - - final var firstRelationship = Optional.ofNullable(Iterables.getFirst(path.relationships(), null)); - - relations.add(new LineageRelationship().setEntity(Urn.createFromString(urn)) - // although firstRelationship should never be absent, provide "" as fallback value - .setType(firstRelationship.map(Relationship::type).orElse("")) - .setDegree(path.length()) - .setPaths(new UrnArrayArray(new UrnArray(nodeListAsPath)))); - } catch (URISyntaxException ignored) { - log.warn(String.format("Can't convert urn = %s, Error = %s", urn, ignored.getMessage())); - } - }); - EntityLineageResult result = new EntityLineageResult().setStart(offset) + neo4jResult.stream() + .skip(offset) + .limit(count) + .forEach( + item -> { + String urn = item.values().get(2).asNode().get("urn").asString(); + try { + final var path = item.get(1).asPath(); + final List<Urn> nodeListAsPath = + StreamSupport.stream(path.nodes().spliterator(), false) + .map(node -> createFromString(node.get("urn").asString())) + .collect(Collectors.toList()); + + final var firstRelationship = + Optional.ofNullable(Iterables.getFirst(path.relationships(), null)); + + relations.add( + new LineageRelationship() + .setEntity(Urn.createFromString(urn)) + // although firstRelationship should never be absent, provide "" as fallback + // value + .setType(firstRelationship.map(Relationship::type).orElse("")) + .setDegree(path.length()) + .setPaths(new UrnArrayArray(new UrnArray(nodeListAsPath)))); + } catch (URISyntaxException ignored) { + log.warn( + String.format("Can't convert urn = %s, Error = %s", urn, ignored.getMessage())); + } + }); + EntityLineageResult result = + new EntityLineageResult() + .setStart(offset) .setCount(relations.size()) .setRelationships(relations) .setTotal(neo4jResult.size()); @@ -277,7 +336,8 @@ private String getPathFindingLabelFilter(List<String> entityNames) { return entityNames.stream().map(x -> String.format("+%s", x)).collect(Collectors.joining("|")); } - private String getPathFindingRelationshipFilter(@Nonnull List<String> entityNames, @Nullable LineageDirection direction) { + private String getPathFindingRelationshipFilter( + @Nonnull List<String> entityNames, @Nullable LineageDirection direction) { // relationshipFilter supports mixing different directions for various relation types, // so simply transform entries lineage registry into format of filter final var filterComponents = new HashSet<String>(); @@ -293,8 +353,10 @@ private String getPathFindingRelationshipFilter(@Nonnull List<String> entityName } } else { // return disjunctive combination of edge types regardless of direction - for (final var direction1 : List.of(LineageDirection.UPSTREAM, LineageDirection.DOWNSTREAM)) { - for (final var edgeInfo : _lineageRegistry.getLineageRelationships(entityName, direction1)) { + for (final var direction1 : + List.of(LineageDirection.UPSTREAM, LineageDirection.DOWNSTREAM)) { + for (final var edgeInfo : + _lineageRegistry.getLineageRelationships(entityName, direction1)) { filterComponents.add(edgeInfo.getType()); } } @@ -304,87 +366,111 @@ private String getPathFindingRelationshipFilter(@Nonnull List<String> entityName } private Pair<String, Map<String, Object>> generateLineageStatementAndParameters( - @Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int maxHops, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { - final var parameterMap = new HashMap<String, Object>(Map.of( - "urn", entityUrn.toString(), - "labelFilter", getPathFindingLabelFilter(graphFilters.getAllowedEntityTypes()), - "relationshipFilter", getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), direction), - "maxHops", maxHops - )); + final var parameterMap = + new HashMap<String, Object>( + Map.of( + "urn", entityUrn.toString(), + "labelFilter", getPathFindingLabelFilter(graphFilters.getAllowedEntityTypes()), + "relationshipFilter", + getPathFindingRelationshipFilter( + graphFilters.getAllowedEntityTypes(), direction), + "maxHops", maxHops)); if (startTimeMillis == null && endTimeMillis == null) { // if no time filtering required, simply find all expansion paths to other nodes - final var statement = "MATCH (a {urn: $urn}) " - + "CALL apoc.path.spanningTree(a, { " - + " relationshipFilter: $relationshipFilter, " - + " labelFilter: $labelFilter, " - + " minLevel: 1, " - + " maxLevel: $maxHops " - + "}) " - + "YIELD path " - + "WITH a, path AS path " - + "RETURN a, path, last(nodes(path));"; + final var statement = + "MATCH (a {urn: $urn}) " + + "CALL apoc.path.spanningTree(a, { " + + " relationshipFilter: $relationshipFilter, " + + " labelFilter: $labelFilter, " + + " minLevel: 1, " + + " maxLevel: $maxHops " + + "}) " + + "YIELD path " + + "WITH a, path AS path " + + "RETURN a, path, last(nodes(path));"; return Pair.of(statement, parameterMap); } else { // when needing time filtering, possibility on multiple paths between two // nodes must be considered, and we need to construct more complex query // use r_ edges until they are no longer useful - final var relationFilter = getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), null) - .replaceAll("(\\w+)", "r_$1"); + final var relationFilter = + getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), null) + .replaceAll("(\\w+)", "r_$1"); final var relationshipPattern = String.format( (direction == LineageDirection.UPSTREAM ? "<-[:%s*1..%d]-" : "-[:%s*1..%d]->"), - relationFilter, maxHops); + relationFilter, + maxHops); // two steps: // 1. find list of nodes reachable within maxHops // 2. find the shortest paths from start node to every other node in these nodes - // (note: according to the docs of shortestPath, WHERE conditions are applied during path exploration, not + // (note: according to the docs of shortestPath, WHERE conditions are applied during path + // exploration, not // after path exploration is done) - final var statement = "MATCH (a {urn: $urn}) " - + "CALL apoc.path.subgraphNodes(a, { " - + " relationshipFilter: $relationshipFilter, " - + " labelFilter: $labelFilter, " - + " minLevel: 1, " - + " maxLevel: $maxHops " - + "}) " - + "YIELD node AS b " - + "WITH a, b " - + "MATCH path = shortestPath((a)" + relationshipPattern + "(b)) " - + "WHERE a <> b " - + " AND ALL(rt IN relationships(path) WHERE " - + " (EXISTS(rt.source) AND rt.source = 'UI') OR " - + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " - + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " - + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " - + " ) " - + "RETURN a, path, b;"; + final var statement = + "MATCH (a {urn: $urn}) " + + "CALL apoc.path.subgraphNodes(a, { " + + " relationshipFilter: $relationshipFilter, " + + " labelFilter: $labelFilter, " + + " minLevel: 1, " + + " maxLevel: $maxHops " + + "}) " + + "YIELD node AS b " + + "WITH a, b " + + "MATCH path = shortestPath((a)" + + relationshipPattern + + "(b)) " + + "WHERE a <> b " + + " AND ALL(rt IN relationships(path) WHERE " + + " (EXISTS(rt.source) AND rt.source = 'UI') OR " + + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " + + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " + + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " + + " ) " + + "RETURN a, path, b;"; // provide dummy start/end time when not provided, so no need to // format clause differently if either of them is missing parameterMap.put("startTimeMillis", startTimeMillis == null ? 0 : startTimeMillis); - parameterMap.put("endTimeMillis", endTimeMillis == null ? System.currentTimeMillis() : endTimeMillis); + parameterMap.put( + "endTimeMillis", endTimeMillis == null ? System.currentTimeMillis() : endTimeMillis); return Pair.of(statement, parameterMap); } } @Nonnull - public RelatedEntitiesResult findRelatedEntities(@Nullable final List<String> sourceTypes, - @Nonnull final Filter sourceEntityFilter, @Nullable final List<String> destinationTypes, - @Nonnull final Filter destinationEntityFilter, @Nonnull final List<String> relationshipTypes, - @Nonnull final RelationshipFilter relationshipFilter, final int offset, final int count) { - - log.debug(String.format("Finding related Neo4j nodes sourceType: %s, sourceEntityFilter: %s, destinationType: %s, ", - sourceTypes, sourceEntityFilter, destinationTypes) + String.format( - "destinationEntityFilter: %s, relationshipTypes: %s, relationshipFilter: %s, ", destinationEntityFilter, - relationshipTypes, relationshipFilter) + String.format("offset: %s, count: %s", offset, count)); + public RelatedEntitiesResult findRelatedEntities( + @Nullable final List<String> sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List<String> destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count) { - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { + log.debug( + String.format( + "Finding related Neo4j nodes sourceType: %s, sourceEntityFilter: %s, destinationType: %s, ", + sourceTypes, sourceEntityFilter, destinationTypes) + + String.format( + "destinationEntityFilter: %s, relationshipTypes: %s, relationshipFilter: %s, ", + destinationEntityFilter, relationshipTypes, relationshipFilter) + + String.format("offset: %s, count: %s", offset, count)); + + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } @@ -401,7 +487,9 @@ public RelatedEntitiesResult findRelatedEntities(@Nullable final List<String> so matchTemplate = "MATCH (src %s)-[r%s %s]->(dest %s)%s"; } - final String returnNodes = String.format("RETURN dest, type(r)"); // Return both related entity and the relationship type. + final String returnNodes = + String.format( + "RETURN dest, type(r)"); // Return both related entity and the relationship type. final String returnCount = "RETURN count(*)"; // For getting the total results. String relationshipTypeFilter = ""; @@ -411,44 +499,70 @@ public RelatedEntitiesResult findRelatedEntities(@Nullable final List<String> so String whereClause = computeEntityTypeWhereClause(sourceTypes, destinationTypes); - // Build Statement strings + // Build Statement strings String baseStatementString = - String.format(matchTemplate, srcCriteria, relationshipTypeFilter, edgeCriteria, destCriteria, whereClause); + String.format( + matchTemplate, + srcCriteria, + relationshipTypeFilter, + edgeCriteria, + destCriteria, + whereClause); log.info(baseStatementString); - final String resultStatementString = String.format("%s %s SKIP $offset LIMIT $count", baseStatementString, returnNodes); + final String resultStatementString = + String.format("%s %s SKIP $offset LIMIT $count", baseStatementString, returnNodes); final String countStatementString = String.format("%s %s", baseStatementString, returnCount); // Build Statements - final Statement resultStatement = new Statement(resultStatementString, ImmutableMap.of("offset", offset, "count", count)); - final Statement countStatement = new Statement(countStatementString, Collections.emptyMap()); + final Statement resultStatement = + new Statement(resultStatementString, ImmutableMap.of("offset", offset, "count", count)); + final Statement countStatement = new Statement(countStatementString, Collections.emptyMap()); // Execute Queries - final List<RelatedEntity> relatedEntities = runQuery(resultStatement).list(record -> - new RelatedEntity( - record.values().get(1).asString(), // Relationship Type - record.values().get(0).asNode().get("urn").asString())); // Urn TODO: Validate this works against Neo4j. + final List<RelatedEntity> relatedEntities = + runQuery(resultStatement) + .list( + record -> + new RelatedEntity( + record.values().get(1).asString(), // Relationship Type + record + .values() + .get(0) + .asNode() + .get("urn") + .asString())); // Urn TODO: Validate this works against Neo4j. final int totalCount = runQuery(countStatement).single().get(0).asInt(); return new RelatedEntitiesResult(offset, relatedEntities.size(), totalCount, relatedEntities); } - private String computeEntityTypeWhereClause(@Nonnull final List<String> sourceTypes, - @Nonnull final List<String> destinationTypes) { + private String computeEntityTypeWhereClause( + @Nonnull final List<String> sourceTypes, @Nonnull final List<String> destinationTypes) { String whereClause = " WHERE left(type(r), 2)<>'r_' "; Boolean hasSourceTypes = sourceTypes != null && !sourceTypes.isEmpty(); Boolean hasDestTypes = destinationTypes != null && !destinationTypes.isEmpty(); if (hasSourceTypes && hasDestTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s AND %s", - sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR ")), - destinationTypes.stream().map(type -> "dest:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s AND %s", + sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR ")), + destinationTypes.stream() + .map(type -> "dest:" + type) + .collect(Collectors.joining(" OR "))); } else if (hasSourceTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s", - sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s", + sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR "))); } else if (hasDestTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s", - destinationTypes.stream().map(type -> "dest:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s", + destinationTypes.stream() + .map(type -> "dest:" + type) + .collect(Collectors.joining(" OR "))); } return whereClause; } @@ -468,28 +582,25 @@ public void removeNode(@Nonnull final Urn urn) { } /** - * Remove relationships and reverse relationships by check incoming/outgoing relationships. - * for example: - * a-[consumes]->b, a<-[r_consumes]-b - * a-[produces]->b, a-[r_produces]->b - * should not remove a<-[r_downstreamOf]-b when relationshipDirection equal incoming. - * should remove a-[consumes]->b, a<-[r_consumes]-b, a-[produces]->b, a-[r_produces]->b - * when relationshipDirection equal outgoing. + * Remove relationships and reverse relationships by check incoming/outgoing relationships. for + * example: a-[consumes]->b, a<-[r_consumes]-b a-[produces]->b, a-[r_produces]->b should not + * remove a<-[r_downstreamOf]-b when relationshipDirection equal incoming. should remove + * a-[consumes]->b, a<-[r_consumes]-b, a-[produces]->b, a-[r_produces]->b when + * relationshipDirection equal outgoing. * * @param urn Entity relationship type * @param relationshipTypes Entity relationship type * @param relationshipFilter Query relationship filter - * */ public void removeEdgesFromNode( @Nonnull final Urn urn, @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { - log.debug(String.format("Removing Neo4j edge types from node with urn: %s, types: %s, filter: %s", - urn, - relationshipTypes, - relationshipFilter)); + log.debug( + String.format( + "Removing Neo4j edge types from node with urn: %s, types: %s, filter: %s", + urn, relationshipTypes, relationshipFilter)); // also delete any relationship going to or from it final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); @@ -518,9 +629,13 @@ public void removeEdgesFromNode( relationshipTypeFilter = ""; if (!relationshipTypes.isEmpty()) { relationshipTypeFilter = - ":" + StringUtils.join(relationshipTypes, "|") + "|r_" + StringUtils.join(relationshipTypes, "|r_"); + ":" + + StringUtils.join(relationshipTypes, "|") + + "|r_" + + StringUtils.join(relationshipTypes, "|r_"); } - final String statementNoDirection = String.format(matchDeleteTemplate, relationshipTypeFilter); + final String statementNoDirection = + String.format(matchDeleteTemplate, relationshipTypeFilter); runQuery(buildStatement(statementNoDirection, params)).consume(); } else { for (Record typeDest : neo4jResult) { @@ -602,12 +717,13 @@ private synchronized ExecutionResult executeStatements(@Nonnull List<Statement> try (final Session session = _driver.session(_sessionConfig)) { do { try { - session.writeTransaction(tx -> { - for (Statement statement : statements) { - tx.run(statement.getCommandText(), statement.getParams()); - } - return 0; - }); + session.writeTransaction( + tx -> { + for (Statement statement : statements) { + tx.run(statement.getCommandText(), statement.getParams()); + } + return 0; + }); lastException = null; break; } catch (Neo4jException e) { @@ -618,7 +734,8 @@ private synchronized ExecutionResult executeStatements(@Nonnull List<Statement> if (lastException != null) { throw new RetryLimitReached( - "Failed to execute Neo4j write transaction after " + MAX_TRANSACTION_RETRY + " retries", lastException); + "Failed to execute Neo4j write transaction after " + MAX_TRANSACTION_RETRY + " retries", + lastException); } stopWatch.stop(); @@ -650,7 +767,8 @@ private static String toCriterionString(@Nonnull String key, @Nonnull Object val } /** - * Converts {@link RelationshipFilter} to neo4j query criteria, filter criterion condition requires to be EQUAL. + * Converts {@link RelationshipFilter} to neo4j query criteria, filter criterion condition + * requires to be EQUAL. * * @param filter Query relationship filter * @return Neo4j criteria string @@ -661,7 +779,8 @@ private static String relationshipFilterToCriteria(@Nonnull RelationshipFilter f } /** - * Converts {@link Filter} to neo4j query criteria, filter criterion condition requires to be EQUAL. + * Converts {@link Filter} to neo4j query criteria, filter criterion condition requires to be + * EQUAL. * * @param filter Query Filter * @return Neo4j criteria string @@ -674,9 +793,11 @@ private static String filterToCriteria(@Nonnull Filter filter) { private static String disjunctionToCriteria(final ConjunctiveCriterionArray disjunction) { if (disjunction.size() > 1) { // TODO: Support disjunctions (ORs). - throw new UnsupportedOperationException("Neo4j query filter only supports 1 set of conjunction criteria"); + throw new UnsupportedOperationException( + "Neo4j query filter only supports 1 set of conjunction criteria"); } - final CriterionArray criterionArray = disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); + final CriterionArray criterionArray = + disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); return criterionToString(criterionArray); } @@ -688,20 +809,21 @@ private static String disjunctionToCriteria(final ConjunctiveCriterionArray disj */ @Nonnull private static String criterionToString(@Nonnull CriterionArray criterionArray) { - if (!criterionArray.stream().allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { - throw new RuntimeException("Neo4j query filter only support EQUAL condition " + criterionArray); + if (!criterionArray.stream() + .allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { + throw new RuntimeException( + "Neo4j query filter only support EQUAL condition " + criterionArray); } final StringJoiner joiner = new StringJoiner(",", "{", "}"); - criterionArray.forEach(criterion -> joiner.add(toCriterionString(criterion.getField(), criterion.getValue()))); + criterionArray.forEach( + criterion -> joiner.add(toCriterionString(criterion.getField(), criterion.getValue()))); return joiner.length() <= 2 ? "" : joiner.toString(); } - /** - * Gets Node based on Urn, if not exist, creates placeholder node. - */ + /** Gets Node based on Urn, if not exist, creates placeholder node. */ @Nonnull private Statement getOrInsertNode(@Nonnull Urn urn) { final String nodeType = urn.getEntityType(); @@ -721,32 +843,31 @@ public boolean supportsMultiHop() { } /** - * Reverse incoming/outgoing direction check by compare sourceType and relationshipType to LineageSpec. - * for example: - * sourceType: dataset, relationshipType: downstreamOf. - * downstreamOf relationship type and outgoing relationship direction for dataset from LineageSpec, - * is inside upstreamEdges. - * source(dataset) -[downstreamOf]-> dest means upstreamEdge for source(dataset) - * dest -[r_downstreamOf]-> source(dataset), need reverse source and dest - * * - * sourceType: datajob, relationshipType: produces. - * produces relationship type and outgoing relationship direction for datajob from LineageSpec, - * is inside downstreamEdges. - * source(datajob) -[produces]-> dest means downstreamEdge for source(datajob) - * source(dataset) -[r_produces]-> dest, do not need to reverse source and dest + * Reverse incoming/outgoing direction check by compare sourceType and relationshipType to + * LineageSpec. for example: sourceType: dataset, relationshipType: downstreamOf. downstreamOf + * relationship type and outgoing relationship direction for dataset from LineageSpec, is inside + * upstreamEdges. source(dataset) -[downstreamOf]-> dest means upstreamEdge for source(dataset) + * dest -[r_downstreamOf]-> source(dataset), need reverse source and dest * sourceType: datajob, + * relationshipType: produces. produces relationship type and outgoing relationship direction for + * datajob from LineageSpec, is inside downstreamEdges. source(datajob) -[produces]-> dest means + * downstreamEdge for source(datajob) source(dataset) -[r_produces]-> dest, do not need to reverse + * source and dest * * @param sourceType Entity type * @param relationshipType Entity relationship type - * */ - private boolean isSourceDestReversed(@Nonnull String sourceType, @Nonnull String relationshipType) { + private boolean isSourceDestReversed( + @Nonnull String sourceType, @Nonnull String relationshipType) { // Get real direction by check INCOMING/OUTGOING direction and RelationshipType LineageRegistry.LineageSpec sourceLineageSpec = getLineageRegistry().getLineageSpec(sourceType); if (sourceLineageSpec != null) { - List<LineageRegistry.EdgeInfo> upstreamCheck = sourceLineageSpec.getUpstreamEdges() - .stream() - .filter(t -> t.getDirection() == RelationshipDirection.OUTGOING && t.getType().equals(relationshipType)) - .collect(Collectors.toList()); + List<LineageRegistry.EdgeInfo> upstreamCheck = + sourceLineageSpec.getUpstreamEdges().stream() + .filter( + t -> + t.getDirection() == RelationshipDirection.OUTGOING + && t.getType().equals(relationshipType)) + .collect(Collectors.toList()); if (!upstreamCheck.isEmpty() || sourceType.equals("schemaField")) { return true; } @@ -754,8 +875,7 @@ private boolean isSourceDestReversed(@Nonnull String sourceType, @Nonnull String return false; } - protected static @Nullable - Urn createFromString(@Nonnull String rawUrn) { + protected static @Nullable Urn createFromString(@Nonnull String rawUrn) { try { return Urn.createFromString(rawUrn); } catch (URISyntaxException e) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java index 6985ceb00afd2..35d75de482007 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java @@ -40,24 +40,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class MostPopularSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set<String> SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set<String> SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -82,11 +81,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to determine whether DataHub usage index exists"); } @@ -95,15 +99,15 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getMostPopular").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) .map(Optional::get) @@ -122,13 +126,15 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for all entity view events query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); source.query(query); // Find the entities with the most views - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT * 2); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT * 2); source.aggregation(aggregation); source.size(0); @@ -139,12 +145,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional<RecommendationContent> buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java index dc30d4c80abc0..0815ffadd05c1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java @@ -41,24 +41,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlyEditedSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set<String> SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set<String> SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -83,11 +82,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -96,18 +100,19 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyEdited").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) - .map(Optional::get).limit(MAX_CONTENT) + .map(Optional::get) + .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { log.error("Search query to get most recently edited entities failed", e); @@ -122,16 +127,19 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity action events query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_ACTION_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_ACTION_EVENT.getType())); source.query(query); // Find the entity with the largest last viewed timestamp String lastViewed = "last_viewed"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT) - .order(BucketOrder.aggregation(lastViewed, false)) - .subAggregation(AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT) + .order(BucketOrder.aggregation(lastViewed, false)) + .subAggregation( + AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -142,13 +150,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional<RecommendationContent> buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java index 0836c569ed5d1..47ffebee2e947 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java @@ -41,24 +41,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlyViewedSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set<String> SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set<String> SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -83,11 +82,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -96,18 +100,19 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyViewed").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) - .map(Optional::get).limit(MAX_CONTENT) + .map(Optional::get) + .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { log.error("Search query to get most recently viewed entities failed", e); @@ -122,18 +127,23 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity view events of the user requesting recommendation query.must( - QueryBuilders.termQuery(ESUtils.toKeywordField(DataHubUsageEventConstants.ACTOR_URN, false), userUrn.toString())); + QueryBuilders.termQuery( + ESUtils.toKeywordField(DataHubUsageEventConstants.ACTOR_URN, false), + userUrn.toString())); query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); source.query(query); // Find the entity with the largest last viewed timestamp String lastViewed = "last_viewed"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT) - .order(BucketOrder.aggregation(lastViewed, false)) - .subAggregation(AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT) + .order(BucketOrder.aggregation(lastViewed, false)) + .subAggregation( + AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -144,12 +154,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional<RecommendationContent> buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java b/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java index 75375df77ed6f..b862de320db36 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java @@ -6,7 +6,6 @@ import java.time.temporal.TemporalUnit; import lombok.Data; - @Data public class EntityLineageResultCacheKey { private final Urn sourceUrn; @@ -15,17 +14,27 @@ public class EntityLineageResultCacheKey { private final Long endTimeMillis; private final Integer maxHops; - public EntityLineageResultCacheKey(Urn sourceUrn, LineageDirection direction, Long startTimeMillis, - Long endTimeMillis, Integer maxHops, TemporalUnit resolution) { + public EntityLineageResultCacheKey( + Urn sourceUrn, + LineageDirection direction, + Long startTimeMillis, + Long endTimeMillis, + Integer maxHops, + TemporalUnit resolution) { this.sourceUrn = sourceUrn; this.direction = direction; this.maxHops = maxHops; long endOffset = resolution.getDuration().getSeconds() * 1000; this.startTimeMillis = - startTimeMillis == null ? null : Instant.ofEpochMilli(startTimeMillis).truncatedTo(resolution).toEpochMilli(); - this.endTimeMillis = endTimeMillis == null ? null - : Instant.ofEpochMilli(endTimeMillis + endOffset).truncatedTo(resolution).toEpochMilli(); - + startTimeMillis == null + ? null + : Instant.ofEpochMilli(startTimeMillis).truncatedTo(resolution).toEpochMilli(); + this.endTimeMillis = + endTimeMillis == null + ? null + : Instant.ofEpochMilli(endTimeMillis + endOffset) + .truncatedTo(resolution) + .toEpochMilli(); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java index 9b8e9bce7e670..f6358e4aeb207 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java @@ -1,6 +1,8 @@ package com.linkedin.metadata.search; -import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -10,6 +12,7 @@ import com.linkedin.data.template.LongMap; import com.linkedin.data.template.StringArray; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; import com.linkedin.metadata.graph.EntityLineageResult; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.LineageDirection; @@ -27,7 +30,6 @@ import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.search.utils.SearchUtils; import io.opentelemetry.extension.annotations.WithSpan; - import java.net.URISyntaxException; import java.time.temporal.ChronoUnit; import java.util.Collections; @@ -44,21 +46,17 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections.CollectionUtils; import org.springframework.cache.Cache; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; - - @RequiredArgsConstructor @Slf4j public class LineageSearchService { - private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = new SearchFlags() + private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = + new SearchFlags() .setFulltext(false) .setMaxAggValues(20) .setSkipCache(false) @@ -66,8 +64,7 @@ public class LineageSearchService { .setSkipHighlighting(true); private final SearchService _searchService; private final GraphService _graphService; - @Nullable - private final Cache cache; + @Nullable private final Cache cache; private final boolean cacheEnabled; private final SearchLineageCacheConfiguration cacheConfiguration; @@ -75,20 +72,27 @@ public class LineageSearchService { private static final String DEGREE_FILTER = "degree"; private static final String DEGREE_FILTER_INPUT = "degree.keyword"; - private static final AggregationMetadata DEGREE_FILTER_GROUP = new AggregationMetadata().setName(DEGREE_FILTER) - .setDisplayName("Degree of Dependencies") - .setAggregations(new LongMap()) - .setFilterValues(new FilterValueArray(ImmutableList.of(new FilterValue().setValue("1").setFacetCount(0), - new FilterValue().setValue("2").setFacetCount(0), new FilterValue().setValue("3+").setFacetCount(0)))); + private static final AggregationMetadata DEGREE_FILTER_GROUP = + new AggregationMetadata() + .setName(DEGREE_FILTER) + .setDisplayName("Degree of Dependencies") + .setAggregations(new LongMap()) + .setFilterValues( + new FilterValueArray( + ImmutableList.of( + new FilterValue().setValue("1").setFacetCount(0), + new FilterValue().setValue("2").setFacetCount(0), + new FilterValue().setValue("3+").setFacetCount(0)))); private static final int MAX_RELATIONSHIPS = 1000000; private static final int MAX_TERMS = 50000; - private static final Set<String> PLATFORM_ENTITY_TYPES = ImmutableSet.of( - DATASET_ENTITY_NAME, - CHART_ENTITY_NAME, - DASHBOARD_ENTITY_NAME, - DATA_FLOW_ENTITY_NAME, - DATA_JOB_ENTITY_NAME); + private static final Set<String> PLATFORM_ENTITY_TYPES = + ImmutableSet.of( + DATASET_ENTITY_NAME, + CHART_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME); /** * Gets a list of documents that match given search request that is related to the input entity @@ -98,20 +102,32 @@ public class LineageSearchService { * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text * @param maxHops the maximum number of hops away to search for. If null, defaults to 1000 - * @param inputFilters the request map with fields and values as filters to be applied to search hits + * @param inputFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return - * @return a {@link LineageSearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link LineageSearchResult} that contains a list of matched documents and related + * search result metadata */ @Nonnull @WithSpan - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags) { - - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags) { + + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); long startTime = System.nanoTime(); log.debug("Cache enabled {}, Input :{}:", cacheEnabled, input); @@ -123,8 +139,9 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull } // Cache multihop result for faster performance - final EntityLineageResultCacheKey cacheKey = new EntityLineageResultCacheKey(sourceUrn, direction, startTimeMillis, - endTimeMillis, maxHops, ChronoUnit.DAYS); + final EntityLineageResultCacheKey cacheKey = + new EntityLineageResultCacheKey( + sourceUrn, direction, startTimeMillis, endTimeMillis, maxHops, ChronoUnit.DAYS); CachedEntityLineageResult cachedLineageResult = null; if (cacheEnabled) { @@ -139,12 +156,12 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull FreshnessStats freshnessStats = new FreshnessStats().setCached(Boolean.FALSE); if (cachedLineageResult == null || finalFlags.isSkipCache()) { lineageResult = - _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, - endTimeMillis); + _graphService.getLineage( + sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, endTimeMillis); if (cacheEnabled) { try { - cache.put(cacheKey, - new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); + cache.put( + cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); } catch (Exception e) { log.warn("Failed to add cacheKey {}", cacheKey, e); } @@ -156,22 +173,36 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull systemFreshness.put("LineageGraphCache", cachedLineageResult.getTimestamp()); freshnessStats.setSystemFreshness(systemFreshness); // set up cache refill if needed - if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis()) { + if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { log.info("Cached lineage entry for: {} is older than one day. Will refill.", sourceUrn); Integer finalMaxHops = maxHops; - this.cacheRefillExecutor.submit(() -> { - log.debug("Cache refill started."); - CachedEntityLineageResult reFetchLineageResult = cache.get(cacheKey, CachedEntityLineageResult.class); - if (reFetchLineageResult == null || System.currentTimeMillis() - reFetchLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis() - ) { - // we have to refetch - EntityLineageResult result = _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, finalMaxHops, startTimeMillis, endTimeMillis); - cache.put(cacheKey, result); - log.debug("Refilled Cached lineage entry for: {}.", sourceUrn); - } else { - log.debug("Cache refill not needed. {}", System.currentTimeMillis() - reFetchLineageResult.getTimestamp()); - } - }); + this.cacheRefillExecutor.submit( + () -> { + log.debug("Cache refill started."); + CachedEntityLineageResult reFetchLineageResult = + cache.get(cacheKey, CachedEntityLineageResult.class); + if (reFetchLineageResult == null + || System.currentTimeMillis() - reFetchLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { + // we have to refetch + EntityLineageResult result = + _graphService.getLineage( + sourceUrn, + direction, + 0, + MAX_RELATIONSHIPS, + finalMaxHops, + startTimeMillis, + endTimeMillis); + cache.put(cacheKey, result); + log.debug("Refilled Cached lineage entry for: {}.", sourceUrn); + } else { + log.debug( + "Cache refill not needed. {}", + System.currentTimeMillis() - reFetchLineageResult.getTimestamp()); + } + }); } } @@ -179,68 +210,92 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageRelationshipArray updatedRelationships = convertSchemaFieldRelationships(lineageResult); lineageResult.setRelationships(updatedRelationships); - // Filter hopped result based on the set of entities to return and inputFilters before sending to search + // Filter hopped result based on the set of entities to return and inputFilters before sending + // to search List<LineageRelationship> lineageRelationships = filterRelationships(lineageResult, new HashSet<>(entities), inputFilters); - String lineageGraphInfo = String.format("Lineage Graph = time(ms):%s size:%s", + String lineageGraphInfo = + String.format( + "Lineage Graph = time(ms):%s size:%s", (System.nanoTime() - startTime) / (1000.0 * 1000.0), lineageRelationships.size()); startTime = System.nanoTime(); long numEntities = 0; String codePath = null; try { Filter reducedFilters = - SearchUtils.removeCriteria(inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); + SearchUtils.removeCriteria( + inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); if (canDoLightning(lineageRelationships, input, reducedFilters, sortCriterion)) { codePath = "lightning"; // use lightning approach to return lineage search results - LineageSearchResult lineageSearchResult = getLightningSearchResult(lineageRelationships, - reducedFilters, from, size, new HashSet<>(entities)); + LineageSearchResult lineageSearchResult = + getLightningSearchResult( + lineageRelationships, reducedFilters, from, size, new HashSet<>(entities)); if (!lineageSearchResult.getEntities().isEmpty()) { - log.debug("Lightning Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); + log.debug( + "Lightning Lineage entity result: {}", + lineageSearchResult.getEntities().get(0).toString()); } numEntities = lineageSearchResult.getNumEntities(); return lineageSearchResult; } else { codePath = "tortoise"; - LineageSearchResult lineageSearchResult = getSearchResultInBatches(lineageRelationships, input, - reducedFilters, sortCriterion, from, size, finalFlags); + LineageSearchResult lineageSearchResult = + getSearchResultInBatches( + lineageRelationships, input, reducedFilters, sortCriterion, from, size, finalFlags); if (!lineageSearchResult.getEntities().isEmpty()) { - log.debug("Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); + log.debug( + "Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); } numEntities = lineageSearchResult.getNumEntities(); return lineageSearchResult; } } finally { - log.info("{}; Lineage Search({}) = time(ms):{} size:{}", lineageGraphInfo, codePath, - (System.nanoTime() - startTime) / (1000.0 * 1000.0), numEntities); + log.info( + "{}; Lineage Search({}) = time(ms):{} size:{}", + lineageGraphInfo, + codePath, + (System.nanoTime() - startTime) / (1000.0 * 1000.0), + numEntities); } } - @VisibleForTesting - boolean canDoLightning(List<LineageRelationship> lineageRelationships, String input, Filter inputFilters, + boolean canDoLightning( + List<LineageRelationship> lineageRelationships, + String input, + Filter inputFilters, SortCriterion sortCriterion) { - boolean simpleFilters = inputFilters == null || inputFilters.getOr() == null - || inputFilters.getOr() - .stream() - .allMatch(criterion -> criterion.getAnd() - .stream() - .allMatch(criterion1 -> "platform".equals(criterion1.getField()) || "origin".equals(criterion1.getField()) - ) - ); + boolean simpleFilters = + inputFilters == null + || inputFilters.getOr() == null + || inputFilters.getOr().stream() + .allMatch( + criterion -> + criterion.getAnd().stream() + .allMatch( + criterion1 -> + "platform".equals(criterion1.getField()) + || "origin".equals(criterion1.getField()))); return (lineageRelationships.size() > cacheConfiguration.getLightningThreshold()) - && input.equals("*") && simpleFilters && sortCriterion == null; + && input.equals("*") + && simpleFilters + && sortCriterion == null; } @VisibleForTesting - LineageSearchResult getLightningSearchResult(List<LineageRelationship> lineageRelationships, - Filter inputFilters, int from, int size, Set<String> entityNames) { + LineageSearchResult getLightningSearchResult( + List<LineageRelationship> lineageRelationships, + Filter inputFilters, + int from, + int size, + Set<String> entityNames) { // Contruct result objects - LineageSearchResult finalResult = new LineageSearchResult() - .setMetadata(new SearchResultMetadata()); + LineageSearchResult finalResult = + new LineageSearchResult().setMetadata(new SearchResultMetadata()); LineageSearchEntityArray lineageSearchEntityArray = new LineageSearchEntityArray(); AggregationMetadata entityTypeAgg = constructAggMetadata("Type", "entity"); AggregationMetadata platformTypeAgg = constructAggMetadata("Platform", "platform"); @@ -258,7 +313,7 @@ LineageSearchResult getLightningSearchResult(List<LineageRelationship> lineageRe // environment int start = 0; int numElements = 0; - for (LineageRelationship relnship: lineageRelationships) { + for (LineageRelationship relnship : lineageRelationships) { Urn entityUrn = relnship.getEntity(); String entityType = entityUrn.getEntityType(); // Apply platform, entity types, and environment filters @@ -274,16 +329,27 @@ LineageSearchResult getLightningSearchResult(List<LineageRelationship> lineageRe Set<String> platformCriteriaValues = null; Set<String> originCriteriaValues = null; if (inputFilters != null && inputFilters.getOr() != null) { - platformCriteriaValues = inputFilters.getOr().stream().map(ConjunctiveCriterion::getAnd).flatMap( - CriterionArray::stream).filter(criterion -> "platform".equals(criterion.getField())).map(Criterion::getValue) - .collect(Collectors.toSet()); - originCriteriaValues = inputFilters.getOr().stream().map(ConjunctiveCriterion::getAnd).flatMap( - CriterionArray::stream).filter(criterion -> "origin".equals(criterion.getField())).map(Criterion::getValue) - .collect(Collectors.toSet()); + platformCriteriaValues = + inputFilters.getOr().stream() + .map(ConjunctiveCriterion::getAnd) + .flatMap(CriterionArray::stream) + .filter(criterion -> "platform".equals(criterion.getField())) + .map(Criterion::getValue) + .collect(Collectors.toSet()); + originCriteriaValues = + inputFilters.getOr().stream() + .map(ConjunctiveCriterion::getAnd) + .flatMap(CriterionArray::stream) + .filter(criterion -> "origin".equals(criterion.getField())) + .map(Criterion::getValue) + .collect(Collectors.toSet()); } - boolean isNotFiltered = (entityNames.isEmpty() || entityNames.contains(entityUrn.getEntityType())) - && (CollectionUtils.isEmpty(platformCriteriaValues) || (platform != null && platformCriteriaValues.contains(platform))) - && (CollectionUtils.isEmpty(originCriteriaValues) || (environment != null && originCriteriaValues.contains(environment))); + boolean isNotFiltered = + (entityNames.isEmpty() || entityNames.contains(entityUrn.getEntityType())) + && (CollectionUtils.isEmpty(platformCriteriaValues) + || (platform != null && platformCriteriaValues.contains(platform))) + && (CollectionUtils.isEmpty(originCriteriaValues) + || (environment != null && originCriteriaValues.contains(environment))); if (isNotFiltered) { start++; @@ -297,53 +363,59 @@ LineageSearchResult getLightningSearchResult(List<LineageRelationship> lineageRe } // entityType - entityTypeAggregations.compute(entityType, - (key, value) -> value == null ? 1L : ++value); + entityTypeAggregations.compute(entityType, (key, value) -> value == null ? 1L : ++value); // platform if (platform != null) { - platformTypeAggregations.compute(platform, - (key, value) -> value == null ? 1L : ++value); + platformTypeAggregations.compute(platform, (key, value) -> value == null ? 1L : ++value); } // environment if (environment != null) { - environmentAggregations.compute(environment, - (key, value) -> value == null ? 1L : ++value); + environmentAggregations.compute( + environment, (key, value) -> value == null ? 1L : ++value); } } } aggregationMetadataArray.add(DEGREE_FILTER_GROUP); if (platformTypeAggregations.keySet().size() > 0) { - for (Map.Entry<String, Long> platformCount: platformTypeAggregations.entrySet()) { + for (Map.Entry<String, Long> platformCount : platformTypeAggregations.entrySet()) { try { - platformTypeAgg.getFilterValues().add(new FilterValue() - .setValue(platformCount.getKey()) - .setFacetCount(platformCount.getValue()) - .setEntity(Urn.createFromString(platformCount.getKey())) - ); + platformTypeAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(platformCount.getKey()) + .setFacetCount(platformCount.getValue()) + .setEntity(Urn.createFromString(platformCount.getKey()))); platformTypeAgg.getAggregations().put(platformCount.getKey(), platformCount.getValue()); } catch (URISyntaxException e) { log.warn("Unexpected exception: {}", e.getMessage()); } } - aggregationMetadataArray.add(platformTypeAgg); + aggregationMetadataArray.add(platformTypeAgg); } if (entityTypeAggregations.keySet().size() > 0) { - for (Map.Entry<String, Long> entityCount: entityTypeAggregations.entrySet()) { - entityTypeAgg.getFilterValues().add(new FilterValue() - .setValue(entityCount.getKey()) - .setFacetCount(entityCount.getValue())); + for (Map.Entry<String, Long> entityCount : entityTypeAggregations.entrySet()) { + entityTypeAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(entityCount.getKey()) + .setFacetCount(entityCount.getValue())); entityTypeAgg.getAggregations().put(entityCount.getKey(), entityCount.getValue()); } aggregationMetadataArray.add(entityTypeAgg); } if (environmentAggregations.keySet().size() > 0) { - for (Map.Entry<String, Long> entityCount: environmentAggregations.entrySet()) { - environmentAgg.getFilterValues().add(new FilterValue() - .setValue(entityCount.getKey()) - .setFacetCount(entityCount.getValue())); + for (Map.Entry<String, Long> entityCount : environmentAggregations.entrySet()) { + environmentAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(entityCount.getKey()) + .setFacetCount(entityCount.getValue())); environmentAgg.getAggregations().put(entityCount.getKey(), entityCount.getValue()); } aggregationMetadataArray.add(environmentAgg); @@ -374,18 +446,24 @@ private String getPlatform(String entityType, Urn entityUrn) { return platform; } - // Necessary so we don't filter out schemaField entities and so that we search to get the parent reference entity - private LineageRelationshipArray convertSchemaFieldRelationships(EntityLineageResult lineageResult) { - return lineageResult.getRelationships().stream().map(relationship -> { - if (relationship.getEntity().getEntityType().equals("schemaField")) { - Urn entity = getSchemaFieldReferenceUrn(relationship.getEntity()); - relationship.setEntity(entity); - } - return relationship; - }).collect(Collectors.toCollection(LineageRelationshipArray::new)); + // Necessary so we don't filter out schemaField entities and so that we search to get the parent + // reference entity + private LineageRelationshipArray convertSchemaFieldRelationships( + EntityLineageResult lineageResult) { + return lineageResult.getRelationships().stream() + .map( + relationship -> { + if (relationship.getEntity().getEntityType().equals("schemaField")) { + Urn entity = getSchemaFieldReferenceUrn(relationship.getEntity()); + relationship.setEntity(entity); + } + return relationship; + }) + .collect(Collectors.toCollection(LineageRelationshipArray::new)); } - private Map<Urn, LineageRelationship> generateUrnToRelationshipMap(List<LineageRelationship> lineageRelationships) { + private Map<Urn, LineageRelationship> generateUrnToRelationshipMap( + List<LineageRelationship> lineageRelationships) { Map<Urn, LineageRelationship> urnToRelationship = new HashMap<>(); for (LineageRelationship relationship : lineageRelationships) { LineageRelationship existingRelationship = urnToRelationship.get(relationship.getEntity()); @@ -401,32 +479,49 @@ private Map<Urn, LineageRelationship> generateUrnToRelationshipMap(List<LineageR } // Search service can only take up to 50K term filter, so query search service in batches - private LineageSearchResult getSearchResultInBatches(List<LineageRelationship> lineageRelationships, - @Nonnull String input, @Nullable Filter inputFilters, @Nullable SortCriterion sortCriterion, int from, int size, + private LineageSearchResult getSearchResultInBatches( + List<LineageRelationship> lineageRelationships, + @Nonnull String input, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, @Nonnull SearchFlags searchFlags) { - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); LineageSearchResult finalResult = - new LineageSearchResult().setEntities(new LineageSearchEntityArray(Collections.emptyList())) + new LineageSearchResult() + .setEntities(new LineageSearchEntityArray(Collections.emptyList())) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) .setFrom(from) .setPageSize(size) .setNumEntities(0); - List<List<LineageRelationship>> batchedRelationships = Lists.partition(lineageRelationships, MAX_TERMS); + List<List<LineageRelationship>> batchedRelationships = + Lists.partition(lineageRelationships, MAX_TERMS); int queryFrom = from; int querySize = size; for (List<LineageRelationship> batch : batchedRelationships) { - List<String> entitiesToQuery = batch.stream() - .map(relationship -> relationship.getEntity().getEntityType()) - .distinct() - .collect(Collectors.toList()); + List<String> entitiesToQuery = + batch.stream() + .map(relationship -> relationship.getEntity().getEntityType()) + .distinct() + .collect(Collectors.toList()); Map<Urn, LineageRelationship> urnToRelationship = generateUrnToRelationshipMap(batch); Filter finalFilter = buildFilter(urnToRelationship.keySet(), inputFilters); - LineageSearchResult resultForBatch = buildLineageSearchResult( - _searchService.searchAcrossEntities(entitiesToQuery, input, finalFilter, sortCriterion, queryFrom, querySize, - finalFlags), urnToRelationship); + LineageSearchResult resultForBatch = + buildLineageSearchResult( + _searchService.searchAcrossEntities( + entitiesToQuery, + input, + finalFilter, + sortCriterion, + queryFrom, + querySize, + finalFlags), + urnToRelationship); queryFrom = Math.max(0, from - resultForBatch.getNumEntities()); querySize = Math.max(0, size - resultForBatch.getEntities().size()); finalResult = merge(finalResult, resultForBatch); @@ -442,34 +537,44 @@ public static LineageSearchResult merge(LineageSearchResult one, LineageSearchRe finalResult.getEntities().addAll(two.getEntities()); finalResult.setNumEntities(one.getNumEntities() + two.getNumEntities()); - Map<String, AggregationMetadata> aggregations = one.getMetadata() + Map<String, AggregationMetadata> aggregations = + one.getMetadata().getAggregations().stream() + .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); + two.getMetadata() .getAggregations() - .stream() - .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); - two.getMetadata().getAggregations().forEach(metadata -> { - if (aggregations.containsKey(metadata.getName())) { - aggregations.put(metadata.getName(), SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); - } else { - aggregations.put(metadata.getName(), metadata); - } - }); - finalResult.getMetadata().setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); + .forEach( + metadata -> { + if (aggregations.containsKey(metadata.getName())) { + aggregations.put( + metadata.getName(), + SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); + } else { + aggregations.put(metadata.getName(), metadata); + } + }); + finalResult + .getMetadata() + .setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); return finalResult; } private Predicate<Integer> convertFilterToPredicate(List<String> degreeFilterValues) { - return degreeFilterValues.stream().map(value -> { - switch (value) { - case "1": - return (Predicate<Integer>) (Integer numHops) -> (numHops == 1); - case "2": - return (Predicate<Integer>) (Integer numHops) -> (numHops == 2); - case "3+": - return (Predicate<Integer>) (Integer numHops) -> (numHops > 2); - default: - throw new IllegalArgumentException(String.format("%s is not a valid filter value for degree filters", value)); - } - }).reduce(x -> false, Predicate::or); + return degreeFilterValues.stream() + .map( + value -> { + switch (value) { + case "1": + return (Predicate<Integer>) (Integer numHops) -> (numHops == 1); + case "2": + return (Predicate<Integer>) (Integer numHops) -> (numHops == 2); + case "3+": + return (Predicate<Integer>) (Integer numHops) -> (numHops > 2); + default: + throw new IllegalArgumentException( + String.format("%s is not a valid filter value for degree filters", value)); + } + }) + .reduce(x -> false, Predicate::or); } private Urn getSchemaFieldReferenceUrn(Urn urn) { @@ -484,24 +589,29 @@ private Urn getSchemaFieldReferenceUrn(Urn urn) { return urn; } - private List<LineageRelationship> filterRelationships(@Nonnull EntityLineageResult lineageResult, - @Nonnull Set<String> entities, @Nullable Filter inputFilters) { - Stream<LineageRelationship> relationshipsFilteredByEntities = lineageResult.getRelationships().stream(); + private List<LineageRelationship> filterRelationships( + @Nonnull EntityLineageResult lineageResult, + @Nonnull Set<String> entities, + @Nullable Filter inputFilters) { + Stream<LineageRelationship> relationshipsFilteredByEntities = + lineageResult.getRelationships().stream(); if (!entities.isEmpty()) { - relationshipsFilteredByEntities = relationshipsFilteredByEntities.filter( - relationship -> entities.contains(relationship.getEntity().getEntityType())); + relationshipsFilteredByEntities = + relationshipsFilteredByEntities.filter( + relationship -> entities.contains(relationship.getEntity().getEntityType())); } if (inputFilters != null && !CollectionUtils.isEmpty(inputFilters.getOr())) { ConjunctiveCriterion conjunctiveCriterion = inputFilters.getOr().get(0); if (conjunctiveCriterion.hasAnd()) { - List<String> degreeFilter = conjunctiveCriterion.getAnd() - .stream() - .filter(criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)) - .flatMap(c -> c.getValues().stream()) - .collect(Collectors.toList()); + List<String> degreeFilter = + conjunctiveCriterion.getAnd().stream() + .filter(criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)) + .flatMap(c -> c.getValues().stream()) + .collect(Collectors.toList()); if (!degreeFilter.isEmpty()) { Predicate<Integer> degreePredicate = convertFilterToPredicate(degreeFilter); - return relationshipsFilteredByEntities.filter(relationship -> degreePredicate.test(relationship.getDegree())) + return relationshipsFilteredByEntities + .filter(relationship -> degreePredicate.test(relationship.getDegree())) .collect(Collectors.toList()); } } @@ -510,9 +620,12 @@ private List<LineageRelationship> filterRelationships(@Nonnull EntityLineageResu } private Filter buildFilter(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters) { - Criterion urnMatchCriterion = new Criterion().setField("urn") - .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + Criterion urnMatchCriterion = + new Criterion() + .setField("urn") + .setValue("") + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); if (inputFilters == null) { return QueryUtils.newFilter(urnMatchCriterion); } @@ -527,21 +640,27 @@ private Filter buildFilter(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters return QueryUtils.newFilter(urnMatchCriterion); } - private LineageSearchResult buildLineageSearchResult(@Nonnull SearchResult searchResult, - Map<Urn, LineageRelationship> urnToRelationship) { - AggregationMetadataArray aggregations = new AggregationMetadataArray(searchResult.getMetadata().getAggregations()); - return new LineageSearchResult().setEntities(new LineageSearchEntityArray(searchResult.getEntities() - .stream() - .map(searchEntity -> buildLineageSearchEntity(searchEntity, urnToRelationship.get(searchEntity.getEntity()))) - .collect(Collectors.toList()))) + private LineageSearchResult buildLineageSearchResult( + @Nonnull SearchResult searchResult, Map<Urn, LineageRelationship> urnToRelationship) { + AggregationMetadataArray aggregations = + new AggregationMetadataArray(searchResult.getMetadata().getAggregations()); + return new LineageSearchResult() + .setEntities( + new LineageSearchEntityArray( + searchResult.getEntities().stream() + .map( + searchEntity -> + buildLineageSearchEntity( + searchEntity, urnToRelationship.get(searchEntity.getEntity()))) + .collect(Collectors.toList()))) .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) .setFrom(searchResult.getFrom()) .setPageSize(searchResult.getPageSize()) .setNumEntities(searchResult.getNumEntities()); } - private LineageSearchEntity buildLineageSearchEntity(@Nonnull SearchEntity searchEntity, - @Nullable LineageRelationship lineageRelationship) { + private LineageSearchEntity buildLineageSearchEntity( + @Nonnull SearchEntity searchEntity, @Nullable LineageRelationship lineageRelationship) { LineageSearchEntity entity = new LineageSearchEntity(searchEntity.data()); if (lineageRelationship != null) { entity.setPaths(lineageRelationship.getPaths()); @@ -558,34 +677,50 @@ private LineageSearchEntity buildLineageSearchEntity(@Nonnull SearchEntity searc * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text * @param maxHops the maximum number of hops away to search for. If null, defaults to 1000 - * @param inputFilters the request map with fields and values as filters to be applied to search hits + * @param inputFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return - * @return a {@link LineageSearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link LineageSearchResult} that contains a list of matched documents and related + * search result metadata */ @Nonnull @WithSpan - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int size, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nonnull SearchFlags searchFlags) { + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int size, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nonnull SearchFlags searchFlags) { // Cache multihop result for faster performance - final EntityLineageResultCacheKey cacheKey = new EntityLineageResultCacheKey(sourceUrn, direction, startTimeMillis, - endTimeMillis, maxHops, ChronoUnit.DAYS); - CachedEntityLineageResult cachedLineageResult = cacheEnabled - ? cache.get(cacheKey, CachedEntityLineageResult.class) : null; + final EntityLineageResultCacheKey cacheKey = + new EntityLineageResultCacheKey( + sourceUrn, direction, startTimeMillis, endTimeMillis, maxHops, ChronoUnit.DAYS); + CachedEntityLineageResult cachedLineageResult = + cacheEnabled ? cache.get(cacheKey, CachedEntityLineageResult.class) : null; EntityLineageResult lineageResult; if (cachedLineageResult == null) { maxHops = maxHops != null ? maxHops : 1000; - lineageResult = _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, - startTimeMillis, endTimeMillis); + lineageResult = + _graphService.getLineage( + sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, endTimeMillis); if (cacheEnabled) { - cache.put(cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); + cache.put( + cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); } } else { lineageResult = cachedLineageResult.getEntityLineageResult(); - if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis()) { + if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { log.warn("Cached lineage entry for: {} is older than one day.", sourceUrn); } } @@ -594,39 +729,67 @@ public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageRelationshipArray updatedRelationships = convertSchemaFieldRelationships(lineageResult); lineageResult.setRelationships(updatedRelationships); - // Filter hopped result based on the set of entities to return and inputFilters before sending to search + // Filter hopped result based on the set of entities to return and inputFilters before sending + // to search List<LineageRelationship> lineageRelationships = filterRelationships(lineageResult, new HashSet<>(entities), inputFilters); Filter reducedFilters = - SearchUtils.removeCriteria(inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); - return getScrollResultInBatches(lineageRelationships, input != null ? input : "*", reducedFilters, sortCriterion, - scrollId, keepAlive, size, searchFlags); + SearchUtils.removeCriteria( + inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); + return getScrollResultInBatches( + lineageRelationships, + input != null ? input : "*", + reducedFilters, + sortCriterion, + scrollId, + keepAlive, + size, + searchFlags); } // Search service can only take up to 50K term filter, so query search service in batches - private LineageScrollResult getScrollResultInBatches(List<LineageRelationship> lineageRelationships, - @Nonnull String input, @Nullable Filter inputFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, - @Nonnull String keepAlive, int size, @Nonnull SearchFlags searchFlags) { - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + private LineageScrollResult getScrollResultInBatches( + List<LineageRelationship> lineageRelationships, + @Nonnull String input, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int size, + @Nonnull SearchFlags searchFlags) { + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); LineageScrollResult finalResult = - new LineageScrollResult().setEntities(new LineageSearchEntityArray(Collections.emptyList())) + new LineageScrollResult() + .setEntities(new LineageSearchEntityArray(Collections.emptyList())) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) .setPageSize(size) .setNumEntities(0); - List<List<LineageRelationship>> batchedRelationships = Lists.partition(lineageRelationships, MAX_TERMS); + List<List<LineageRelationship>> batchedRelationships = + Lists.partition(lineageRelationships, MAX_TERMS); int querySize = size; for (List<LineageRelationship> batch : batchedRelationships) { - List<String> entitiesToQuery = batch.stream() - .map(relationship -> relationship.getEntity().getEntityType()) - .distinct() - .collect(Collectors.toList()); + List<String> entitiesToQuery = + batch.stream() + .map(relationship -> relationship.getEntity().getEntityType()) + .distinct() + .collect(Collectors.toList()); Map<Urn, LineageRelationship> urnToRelationship = generateUrnToRelationshipMap(batch); Filter finalFilter = buildFilter(urnToRelationship.keySet(), inputFilters); - LineageScrollResult resultForBatch = buildLineageScrollResult( - _searchService.scrollAcrossEntities(entitiesToQuery, input, finalFilter, sortCriterion, scrollId, keepAlive, querySize, - finalFlags), urnToRelationship); + LineageScrollResult resultForBatch = + buildLineageScrollResult( + _searchService.scrollAcrossEntities( + entitiesToQuery, + input, + finalFilter, + sortCriterion, + scrollId, + keepAlive, + querySize, + finalFlags), + urnToRelationship); querySize = Math.max(0, size - resultForBatch.getEntities().size()); finalResult = mergeScrollResult(finalResult, resultForBatch); } @@ -635,16 +798,23 @@ private LineageScrollResult getScrollResultInBatches(List<LineageRelationship> l return finalResult.setPageSize(size); } - private LineageScrollResult buildLineageScrollResult(@Nonnull ScrollResult scrollResult, - Map<Urn, LineageRelationship> urnToRelationship) { - AggregationMetadataArray aggregations = new AggregationMetadataArray(scrollResult.getMetadata().getAggregations()); - LineageScrollResult lineageScrollResult = new LineageScrollResult().setEntities(new LineageSearchEntityArray(scrollResult.getEntities() - .stream() - .map(searchEntity -> buildLineageSearchEntity(searchEntity, urnToRelationship.get(searchEntity.getEntity()))) - .collect(Collectors.toList()))) - .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) - .setPageSize(scrollResult.getPageSize()) - .setNumEntities(scrollResult.getNumEntities()); + private LineageScrollResult buildLineageScrollResult( + @Nonnull ScrollResult scrollResult, Map<Urn, LineageRelationship> urnToRelationship) { + AggregationMetadataArray aggregations = + new AggregationMetadataArray(scrollResult.getMetadata().getAggregations()); + LineageScrollResult lineageScrollResult = + new LineageScrollResult() + .setEntities( + new LineageSearchEntityArray( + scrollResult.getEntities().stream() + .map( + searchEntity -> + buildLineageSearchEntity( + searchEntity, urnToRelationship.get(searchEntity.getEntity()))) + .collect(Collectors.toList()))) + .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) + .setPageSize(scrollResult.getPageSize()) + .setNumEntities(scrollResult.getNumEntities()); if (scrollResult.getScrollId() != null) { lineageScrollResult.setScrollId(scrollResult.getScrollId()); @@ -653,23 +823,30 @@ private LineageScrollResult buildLineageScrollResult(@Nonnull ScrollResult scrol } @SneakyThrows - public static LineageScrollResult mergeScrollResult(LineageScrollResult one, LineageScrollResult two) { + public static LineageScrollResult mergeScrollResult( + LineageScrollResult one, LineageScrollResult two) { LineageScrollResult finalResult = one.clone(); finalResult.getEntities().addAll(two.getEntities()); finalResult.setNumEntities(one.getNumEntities() + two.getNumEntities()); - Map<String, AggregationMetadata> aggregations = one.getMetadata() + Map<String, AggregationMetadata> aggregations = + one.getMetadata().getAggregations().stream() + .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); + two.getMetadata() .getAggregations() - .stream() - .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); - two.getMetadata().getAggregations().forEach(metadata -> { - if (aggregations.containsKey(metadata.getName())) { - aggregations.put(metadata.getName(), SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); - } else { - aggregations.put(metadata.getName(), metadata); - } - }); - finalResult.getMetadata().setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); + .forEach( + metadata -> { + if (aggregations.containsKey(metadata.getName())) { + aggregations.put( + metadata.getName(), + SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); + } else { + aggregations.put(metadata.getName(), metadata); + } + }); + finalResult + .getMetadata() + .setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); if (two.getScrollId() != null) { finalResult.setScrollId(two.getScrollId()); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java index c99e4a94feb29..3bcc163613c5e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.codahale.metrics.Timer; import com.linkedin.data.template.LongMap; import com.linkedin.metadata.query.SearchFlags; @@ -21,9 +23,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class SearchService { private final CachingEntitySearchService _cachingEntitySearchService; @@ -41,36 +40,52 @@ public SearchService( public Map<String, Long> docCountPerEntity(@Nonnull List<String> entityNames) { return entityNames.stream() - .collect(Collectors.toMap(Function.identity(), - entityName -> _entityDocCountCache.getEntityDocCount().getOrDefault(entityName.toLowerCase(), 0L))); + .collect( + Collectors.toMap( + Function.identity(), + entityName -> + _entityDocCountCache + .getEntityDocCount() + .getOrDefault(entityName.toLowerCase(), 0L))); } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags) { + public SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags) { List<String> entitiesToSearch = getEntitiesToSearch(entityNames); if (entitiesToSearch.isEmpty()) { // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); } SearchResult result = - _cachingEntitySearchService.search(entitiesToSearch, input, postFilters, sortCriterion, from, size, searchFlags, null); + _cachingEntitySearchService.search( + entitiesToSearch, input, postFilters, sortCriterion, from, size, searchFlags, null); try { - return result.copy().setEntities(new SearchEntityArray(_searchRanker.rank(result.getEntities()))); + return result + .copy() + .setEntities(new SearchEntityArray(_searchRanker.rank(result.getEntities()))); } catch (Exception e) { log.error("Failed to rank: {}, exception - {}", result, e.toString()); throw new RuntimeException("Failed to rank " + result.toString()); @@ -78,37 +93,55 @@ public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String in } @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, @Nullable SearchFlags searchFlags) { - return searchAcrossEntities(entities, input, postFilters, sortCriterion, from, size, searchFlags, null); + return searchAcrossEntities( + entities, input, postFilters, sortCriterion, from, size, searchFlags, null); } /** - * Gets a list of documents that match given search request across multiple entities. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request across multiple entities. The results + * are aggregated and filters are applied to the search hits and not the aggregation results. * * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List<String> facets) { - log.debug(String.format( - "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entities, input, postFilters, sortCriterion, from, size)); + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets) { + log.debug( + String.format( + "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entities, input, postFilters, sortCriterion, from, size)); // DEPRECATED - // This is the legacy version of `_entityType`-- it operates as a special case and does not support ORs, Unions, etc. - // We will still provide it for backwards compatibility but when sending filters to the backend use the new - // filter name `_entityType` that we provide above. This is just provided to prevent a breaking change for old clients. + // This is the legacy version of `_entityType`-- it operates as a special case and does not + // support ORs, Unions, etc. + // We will still provide it for backwards compatibility but when sending filters to the backend + // use the new + // filter name `_entityType` that we provide above. This is just provided to prevent a breaking + // change for old clients. boolean aggregateByLegacyEntityFacet = facets != null && facets.contains("entity"); if (aggregateByLegacyEntityFacet) { facets = new ArrayList<>(facets); @@ -119,29 +152,49 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); } - SearchResult result = _cachingEntitySearchService.search(nonEmptyEntities, input, postFilters, sortCriterion, from, size, searchFlags, facets); + SearchResult result = + _cachingEntitySearchService.search( + nonEmptyEntities, input, postFilters, sortCriterion, from, size, searchFlags, facets); if (facets == null || facets.contains("entity") || facets.contains("_entityType")) { - Optional<AggregationMetadata> entityTypeAgg = result.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals(INDEX_VIRTUAL_FIELD)).findFirst(); + Optional<AggregationMetadata> entityTypeAgg = + result.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals(INDEX_VIRTUAL_FIELD)) + .findFirst(); if (entityTypeAgg.isPresent()) { LongMap numResultsPerEntity = entityTypeAgg.get().getAggregations(); - result.getMetadata() + result + .getMetadata() .getAggregations() - .add(new AggregationMetadata().setName("entity") - .setDisplayName("Type") - .setAggregations(numResultsPerEntity) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(numResultsPerEntity, Collections.emptySet())))); + .add( + new AggregationMetadata() + .setName("entity") + .setDisplayName("Type") + .setAggregations(numResultsPerEntity) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + numResultsPerEntity, Collections.emptySet())))); } else { - // Should not happen due to the adding of the _entityType aggregation before, but if it does, best-effort count of entity types + // Should not happen due to the adding of the _entityType aggregation before, but if it + // does, best-effort count of entity types // Will not include entity types that had 0 results - Map<String, Long> numResultsPerEntity = result.getEntities().stream().collect(Collectors.groupingBy( - entity -> entity.getEntity().getEntityType(), Collectors.counting())); - result.getMetadata() + Map<String, Long> numResultsPerEntity = + result.getEntities().stream() + .collect( + Collectors.groupingBy( + entity -> entity.getEntity().getEntityType(), Collectors.counting())); + result + .getMetadata() .getAggregations() - .add(new AggregationMetadata().setName("entity") - .setDisplayName("Type") - .setAggregations(new LongMap(numResultsPerEntity)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(numResultsPerEntity, Collections.emptySet())))); + .add( + new AggregationMetadata() + .setName("entity") + .setDisplayName("Type") + .setAggregations(new LongMap(numResultsPerEntity)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + numResultsPerEntity, Collections.emptySet())))); } } return result; @@ -149,15 +202,18 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul /** * If no entities are provided, fallback to the list of non-empty entities + * * @param inputEntities the requested entities * @return some entities to search */ private List<String> getEntitiesToSearch(@Nonnull List<String> inputEntities) { List<String> nonEmptyEntities; - List<String> lowercaseEntities = inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); + List<String> lowercaseEntities = + inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); if (lowercaseEntities.isEmpty()) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { nonEmptyEntities = _entityDocCountCache.getNonEmptyEntities(); } } else { @@ -168,35 +224,53 @@ private List<String> getEntitiesToSearch(@Nonnull List<String> inputEntities) { } /** - * Gets a list of documents that match given search request across multiple entities. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request across multiple entities. The results + * are aggregated and filters are applied to the search hits and not the aggregation results. * * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier for passing to search backend * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult scrollAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); List<String> entitiesToSearch = getEntitiesToSearch(entities); if (entitiesToSearch.isEmpty()) { // No indices with non-zero entries: skip querying and return empty result return getEmptyScrollResult(size); } - return _cachingEntitySearchService.scroll(entitiesToSearch, input, postFilters, sortCriterion, scrollId, keepAlive, size, searchFlags); + return _cachingEntitySearchService.scroll( + entitiesToSearch, + input, + postFilters, + sortCriterion, + scrollId, + keepAlive, + size, + searchFlags); } private static SearchResult getEmptySearchResult(int from, int size) { - return new SearchResult().setEntities(new SearchEntityArray()) + return new SearchResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setFrom(from) .setPageSize(size) @@ -204,7 +278,8 @@ private static SearchResult getEmptySearchResult(int from, int size) { } private static ScrollResult getEmptyScrollResult(int size) { - return new ScrollResult().setEntities(new SearchEntityArray()) + return new ScrollResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setPageSize(size) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java index cc7cd8ce28bae..0ecdb83ed20ee 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.cache; +import static com.datahub.util.RecordUtils.*; + import com.codahale.metrics.Timer; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; @@ -16,23 +18,17 @@ import lombok.Value; import org.springframework.cache.Cache; -import static com.datahub.util.RecordUtils.*; - - -/** - * Wrapper class to allow searching in batches and caching the results. - */ +/** Wrapper class to allow searching in batches and caching the results. */ @RequiredArgsConstructor public class CacheableSearcher<K> { - @Nonnull - private final Cache cache; + @Nonnull private final Cache cache; private final int batchSize; - // Function that executes search and retrieves the search result given the query batch (from, size) + // Function that executes search and retrieves the search result given the query batch (from, + // size) private final Function<QueryPagination, SearchResult> searcher; // Function that generates the cache key given the query batch (from, size) private final Function<QueryPagination, K> cacheKeyGenerator; - @Nullable - private final SearchFlags searchFlags; + @Nullable private final SearchFlags searchFlags; private final boolean enableCache; @Value @@ -42,9 +38,10 @@ public static class QueryPagination implements Serializable { } /** - * Get search results corresponding to the input "from" and "size" - * It goes through batches, starting from the beginning, until we get enough results to return - * This let's us have batches that return a variable number of results (we have no idea which batch the "from" "size" page corresponds to) + * Get search results corresponding to the input "from" and "size" It goes through batches, + * starting from the beginning, until we get enough results to return This let's us have batches + * that return a variable number of results (we have no idea which batch the "from" "size" page + * corresponds to) */ public SearchResult getSearchResults(int from, int size) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getSearchResults").time()) { @@ -67,14 +64,16 @@ public SearchResult getSearchResults(int from, int size) { resultEntities.addAll(batchedResult.getEntities().subList(startInBatch, endInBatch)); foundStart = true; } - // If current batch is smaller than the requested batch size, the next batch will return empty. + // If current batch is smaller than the requested batch size, the next batch will return + // empty. if (currentBatchSize < batchSize) { break; } resultsSoFar += currentBatchSize; batchId++; } while (resultsSoFar < from + size); - return new SearchResult().setEntities(new SearchEntityArray(resultEntities)) + return new SearchResult() + .setEntities(new SearchEntityArray(resultEntities)) .setMetadata(batchedResult.getMetadata()) .setFrom(from) .setPageSize(size) @@ -93,13 +92,16 @@ private SearchResult getBatch(int batchId) { if (enableCache) { K cacheKey = cacheKeyGenerator.apply(batch); if ((searchFlags == null || !searchFlags.isSkipCache())) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getBatch_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "getBatch_cache_access").time(); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getBatch_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "getBatch_cache_access").time(); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(SearchResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "getBatch_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "getBatch_cache_miss").time(); result = searcher.apply(batch); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java index 49fd3157437d1..9d4cb0c9ac613 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.search.cache; -import com.linkedin.metadata.graph.EntityLineageResult; -import java.io.Serializable; -import lombok.Data; - import static com.datahub.util.RecordUtils.*; import static com.linkedin.metadata.search.utils.GZIPUtil.*; +import com.linkedin.metadata.graph.EntityLineageResult; +import java.io.Serializable; +import lombok.Data; @Data public class CachedEntityLineageResult implements Serializable { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java index 95f208e185df1..2c99c71acf749 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.search.cache; -import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.google.common.base.Suppliers; +import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.utils.ConcurrencyUtils; @@ -13,24 +13,27 @@ import java.util.function.Supplier; import java.util.stream.Collectors; - public class EntityDocCountCache { private final EntityRegistry _entityRegistry; private final EntitySearchService _entitySearchService; private final Supplier<Map<String, Long>> entityDocCount; - public EntityDocCountCache(EntityRegistry entityRegistry, EntitySearchService entitySearchService, + public EntityDocCountCache( + EntityRegistry entityRegistry, + EntitySearchService entitySearchService, EntityDocCountCacheConfiguration config) { _entityRegistry = entityRegistry; _entitySearchService = entitySearchService; - entityDocCount = Suppliers.memoizeWithExpiration(this::fetchEntityDocCount, config.getTtlSeconds(), TimeUnit.SECONDS); + entityDocCount = + Suppliers.memoizeWithExpiration( + this::fetchEntityDocCount, config.getTtlSeconds(), TimeUnit.SECONDS); } private Map<String, Long> fetchEntityDocCount() { - return ConcurrencyUtils - .transformAndCollectAsync(_entityRegistry.getEntitySpecs().keySet(), - Function.identity(), - Collectors.toMap(Function.identity(), _entitySearchService::docCount)); + return ConcurrencyUtils.transformAndCollectAsync( + _entityRegistry.getEntitySpecs().keySet(), + Function.identity(), + Collectors.toMap(Function.identity(), _entitySearchService::docCount)); } @WithSpan @@ -39,8 +42,7 @@ public Map<String, Long> getEntityDocCount() { } public List<String> getNonEmptyEntities() { - return getEntityDocCount().entrySet() - .stream() + return getEntityDocCount().entrySet().stream() .filter(entry -> entry.getValue() > 0) .map(Map.Entry::getKey) .collect(Collectors.toList()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java index db414d70603dc..eaeae0cfc1556 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.client; +import static com.datahub.util.RecordUtils.toJsonString; +import static com.datahub.util.RecordUtils.toRecordTemplate; + import com.codahale.metrics.Timer; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; @@ -21,25 +24,23 @@ import org.springframework.cache.Cache; import org.springframework.cache.CacheManager; -import static com.datahub.util.RecordUtils.toJsonString; -import static com.datahub.util.RecordUtils.toRecordTemplate; - - @RequiredArgsConstructor public class CachingEntitySearchService { private static final String ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME = "entitySearchServiceSearch"; - private static final String ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME = "entitySearchServiceAutoComplete"; + private static final String ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME = + "entitySearchServiceAutoComplete"; private static final String ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME = "entitySearchServiceBrowse"; public static final String ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME = "entitySearchServiceScroll"; private final CacheManager cacheManager; - private final EntitySearchService entitySearchService; // This is a shared component, also used in search aggregation + private final EntitySearchService + entitySearchService; // This is a shared component, also used in search aggregation private final int batchSize; private final boolean enableCache; /** - * Retrieves cached search results. If the query has been cached, this will return quickly. If not, a full - * search request will be made. + * Retrieves cached search results. If the query has been cached, this will return quickly. If + * not, a full search request will be made. * * @param entityName the name of the entity to search * @param query the search query @@ -49,7 +50,6 @@ public class CachingEntitySearchService { * @param size the count * @param flags additional search flags * @param facets list of facets we want aggregations for - * * @return a {@link SearchResult} containing the requested batch of search results */ public SearchResult search( @@ -61,7 +61,8 @@ public SearchResult search( int size, @Nullable SearchFlags flags, @Nullable List<String> facets) { - return getCachedSearchResults(entityNames, query, filters, sortCriterion, from, size, flags, facets); + return getCachedSearchResults( + entityNames, query, filters, sortCriterion, from, size, flags, facets); } /** @@ -72,7 +73,6 @@ public SearchResult search( * @param filters the filters to include * @param limit the max number of results to return * @param flags additional search flags - * * @return a {@link SearchResult} containing the requested batch of search results */ public AutoCompleteResult autoComplete( @@ -93,7 +93,6 @@ public AutoCompleteResult autoComplete( * @param filters the request map with fields and values as filters * @param from index of the first entity located in path * @param size the max number of entities contained in the response - * * @return a {@link SearchResult} containing the requested batch of search results */ public BrowseResult browse( @@ -107,8 +106,8 @@ public BrowseResult browse( } /** - * Retrieves cached scroll results. If the query has been cached, this will return quickly. If not, a full - * scroll request will be made. + * Retrieves cached scroll results. If the query has been cached, this will return quickly. If + * not, a full scroll request will be made. * * @param entities the names of the entities to search * @param query the search query @@ -118,7 +117,6 @@ public BrowseResult browse( * @param keepAlive the string representation of how long to keep point in time alive * @param size the count * @param flags additional search flags - * * @return a {@link ScrollResult} containing the requested batch of scroll results */ public ScrollResult scroll( @@ -130,15 +128,15 @@ public ScrollResult scroll( @Nullable String keepAlive, int size, @Nullable SearchFlags flags) { - return getCachedScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, flags); + return getCachedScrollResults( + entities, query, filters, sortCriterion, scrollId, keepAlive, size, flags); } - - /** - * Get search results corresponding to the input "from" and "size" - * It goes through batches, starting from the beginning, until we get enough results to return - * This lets us have batches that return a variable number of results (we have no idea which batch the "from" "size" page corresponds to) + * Get search results corresponding to the input "from" and "size" It goes through batches, + * starting from the beginning, until we get enough results to return This lets us have batches + * that return a variable number of results (we have no idea which batch the "from" "size" page + * corresponds to) */ public SearchResult getCachedSearchResults( @Nonnull List<String> entityNames, @@ -150,19 +148,33 @@ public SearchResult getCachedSearchResults( @Nullable SearchFlags flags, @Nullable List<String> facets) { return new CacheableSearcher<>( - cacheManager.getCache(ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME), - batchSize, - querySize -> getRawSearchResults(entityNames, query, filters, sortCriterion, querySize.getFrom(), - querySize.getSize(), flags, facets), - querySize -> Septet.with(entityNames, query, filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, flags != null ? toJsonString(flags) : null, - facets, querySize), flags, enableCache).getSearchResults(from, size); + cacheManager.getCache(ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME), + batchSize, + querySize -> + getRawSearchResults( + entityNames, + query, + filters, + sortCriterion, + querySize.getFrom(), + querySize.getSize(), + flags, + facets), + querySize -> + Septet.with( + entityNames, + query, + filters != null ? toJsonString(filters) : null, + sortCriterion != null ? toJsonString(sortCriterion) : null, + flags != null ? toJsonString(flags) : null, + facets, + querySize), + flags, + enableCache) + .getSearchResults(from, size); } - - /** - * Returns cached auto-complete results. - */ + /** Returns cached auto-complete results. */ public AutoCompleteResult getCachedAutoCompleteResults( @Nonnull String entityName, @Nonnull String input, @@ -170,19 +182,29 @@ public AutoCompleteResult getCachedAutoCompleteResults( @Nullable Filter filters, int limit, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults").time()) { Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME); AutoCompleteResult result; if (enableCache(flags)) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time(); - Object cacheKey = Sextet.with(entityName, input, field, filters != null ? toJsonString(filters) : null, - flags != null ? toJsonString(flags) : null, limit); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time(); + Object cacheKey = + Sextet.with( + entityName, + input, + field, + filters != null ? toJsonString(filters) : null, + flags != null ? toJsonString(flags) : null, + limit); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(AutoCompleteResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "autocomplete_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "autocomplete_cache_miss").time(); result = getRawAutoCompleteResults(entityName, input, field, filters, limit); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); @@ -196,9 +218,7 @@ public AutoCompleteResult getCachedAutoCompleteResults( } } - /** - * Returns cached browse results. - */ + /** Returns cached browse results. */ public BrowseResult getCachedBrowseResults( @Nonnull String entityName, @Nonnull String path, @@ -206,19 +226,29 @@ public BrowseResult getCachedBrowseResults( int from, int size, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedBrowseResults").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedBrowseResults").time()) { Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME); BrowseResult result; if (enableCache(flags)) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedBrowseResults_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "browse_cache_access").time(); - Object cacheKey = Sextet.with(entityName, path, filters != null ? toJsonString(filters) : null, - flags != null ? toJsonString(flags) : null, from, size); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getCachedBrowseResults_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "browse_cache_access").time(); + Object cacheKey = + Sextet.with( + entityName, + path, + filters != null ? toJsonString(filters) : null, + flags != null ? toJsonString(flags) : null, + from, + size); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(BrowseResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "browse_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "browse_cache_miss").time(); result = getRawBrowseResults(entityName, path, filters, from, size); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); @@ -232,9 +262,7 @@ public BrowseResult getCachedBrowseResults( } } - /** - * Returns cached scroll results. - */ + /** Returns cached scroll results. */ public ScrollResult getCachedScrollResults( @Nonnull List<String> entities, @Nonnull String query, @@ -244,37 +272,62 @@ public ScrollResult getCachedScrollResults( @Nullable String keepAlive, int size, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedScrollResults").time()) { - boolean isFullText = Boolean.TRUE.equals(Optional.ofNullable(flags).orElse(new SearchFlags()).isFulltext()); + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedScrollResults").time()) { + boolean isFullText = + Boolean.TRUE.equals(Optional.ofNullable(flags).orElse(new SearchFlags()).isFulltext()); Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); ScrollResult result; if (enableCache(flags)) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "scroll_cache_access").time(); - Object cacheKey = Septet.with(entities, query, - filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, - flags != null ? toJsonString(flags) : null, - scrollId, size); + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "scroll_cache_access").time(); + Object cacheKey = + Septet.with( + entities, + query, + filters != null ? toJsonString(filters) : null, + sortCriterion != null ? toJsonString(sortCriterion) : null, + flags != null ? toJsonString(flags) : null, + scrollId, + size); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(ScrollResult.class, json) : null; cacheAccess.stop(); if (result == null) { Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "scroll_cache_miss").time(); - result = getRawScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, isFullText, flags); + result = + getRawScrollResults( + entities, + query, + filters, + sortCriterion, + scrollId, + keepAlive, + size, + isFullText, + flags); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); MetricUtils.counter(this.getClass(), "scroll_cache_miss_count").inc(); } } else { - result = getRawScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, isFullText, flags); + result = + getRawScrollResults( + entities, + query, + filters, + sortCriterion, + scrollId, + keepAlive, + size, + isFullText, + flags); } return result; } } - /** - * Executes the expensive search query using the {@link EntitySearchService} - */ + /** Executes the expensive search query using the {@link EntitySearchService} */ private SearchResult getRawSearchResults( final List<String> entityNames, final String input, @@ -284,46 +337,31 @@ private SearchResult getRawSearchResults( final int count, @Nullable final SearchFlags searchFlags, @Nullable final List<String> facets) { - return entitySearchService.search(entityNames, input, filters, sortCriterion, start, count, searchFlags, facets); + return entitySearchService.search( + entityNames, input, filters, sortCriterion, start, count, searchFlags, facets); } - /** - * Executes the expensive autocomplete query using the {@link EntitySearchService} - */ + /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ private AutoCompleteResult getRawAutoCompleteResults( final String entityName, final String input, final String field, final Filter filters, final int limit) { - return entitySearchService.autoComplete( - entityName, - input, - field, - filters, - limit); + return entitySearchService.autoComplete(entityName, input, field, filters, limit); } - /** - * Executes the expensive autocomplete query using the {@link EntitySearchService} - */ + /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ private BrowseResult getRawBrowseResults( final String entityName, final String input, final Filter filters, final int start, final int count) { - return entitySearchService.browse( - entityName, - input, - filters, - start, - count); + return entitySearchService.browse(entityName, input, filters, start, count); } - /** - * Executes the expensive search query using the {@link EntitySearchService} - */ + /** Executes the expensive search query using the {@link EntitySearchService} */ private ScrollResult getRawScrollResults( final List<String> entities, final String input, @@ -336,31 +374,15 @@ private ScrollResult getRawScrollResults( @Nullable final SearchFlags searchFlags) { if (fulltext) { return entitySearchService.fullTextScroll( - entities, - input, - filters, - sortCriterion, - scrollId, - keepAlive, - count, - searchFlags); + entities, input, filters, sortCriterion, scrollId, keepAlive, count, searchFlags); } else { - return entitySearchService.structuredScroll(entities, - input, - filters, - sortCriterion, - scrollId, - keepAlive, - count, - searchFlags); + return entitySearchService.structuredScroll( + entities, input, filters, sortCriterion, scrollId, keepAlive, count, searchFlags); } } - /** - * Returns true if the cache should be used or skipped when fetching search results - */ + /** Returns true if the cache should be used or skipped when fetching search results */ private boolean enableCache(final SearchFlags searchFlags) { return enableCache && (searchFlags == null || !searchFlags.isSkipCache()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index 68a5483fa469c..f40da59a149fa 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -17,19 +17,16 @@ import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.search.utils.SearchUtils; - +import com.linkedin.metadata.shared.ElasticSearchIndexed; import java.util.List; import java.util.Map; import java.util.Optional; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.shared.ElasticSearchIndexed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.search.SearchResponse; - @Slf4j @RequiredArgsConstructor public class ElasticSearchService implements EntitySearchService, ElasticSearchIndexed { @@ -66,15 +63,19 @@ public long docCount(@Nonnull String entityName) { } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { - log.debug(String.format("Upserting Search document entityName: %s, document: %s, docId: %s", entityName, document, - docId)); + public void upsertDocument( + @Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { + log.debug( + String.format( + "Upserting Search document entityName: %s, document: %s, docId: %s", + entityName, document, docId)); esWriteDAO.upsertDocument(entityName, document, docId); } @Override public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { - log.debug(String.format("Deleting Search document entityName: %s, docId: %s", entityName, docId)); + log.debug( + String.format("Deleting Search document entityName: %s, docId: %s", entityName, docId)); esWriteDAO.deleteDocument(entityName, docId); } @@ -82,12 +83,15 @@ public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { public void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable String runId) { final Optional<String> maybeDocId = SearchUtils.getDocId(urn); if (!maybeDocId.isPresent()) { - log.warn(String.format("Failed to append run id, could not generate a doc id for urn %s", urn)); + log.warn( + String.format("Failed to append run id, could not generate a doc id for urn %s", urn)); return; } final String docId = maybeDocId.get(); log.debug(String.format("Appending run id for entityName: %s, docId: %s", entityName, docId)); - esWriteDAO.applyScriptUpdate(entityName, docId, + esWriteDAO.applyScriptUpdate( + entityName, + docId, /* Script used to apply updates to the runId field of the index. This script saves the past N run ids which touched a particular URN in the search index. @@ -99,102 +103,161 @@ public void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable + "ctx._source.runId.add('%s'); " + "if (ctx._source.runId.length > %s) { ctx._source.runId.remove(0) } } " + "} else { ctx._source.runId = ['%s'] }", - runId, - runId, - MAX_RUN_IDS_INDEXED, - runId)); + runId, runId, MAX_RUN_IDS_INDEXED, runId)); } @Nonnull @Override - public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags) { + public SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags) { return search(entityNames, input, postFilters, sortCriterion, from, size, searchFlags, null); } @Nonnull - public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List<String> facets) { - log.debug(String.format( - "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entityNames, input, postFilters, sortCriterion, from, size)); - return esSearchDAO.search(entityNames, input, postFilters, sortCriterion, from, size, searchFlags, facets); + public SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets) { + log.debug( + String.format( + "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entityNames, input, postFilters, sortCriterion, from, size)); + return esSearchDAO.search( + entityNames, input, postFilters, sortCriterion, from, size, searchFlags, facets); } @Nonnull @Override - public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, int from, int size) { + public SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size) { log.debug( - String.format("Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s", + String.format( + "Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s", entityName, filters, sortCriterion, from, size)); return esSearchDAO.filter(entityName, filters, sortCriterion, from, size); } @Nonnull @Override - public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit) { - log.debug(String.format("Autocompleting query entityName: %s, query: %s, field: %s, requestParams: %s, limit: %s", - entityName, query, field, requestParams, limit)); + public AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit) { + log.debug( + String.format( + "Autocompleting query entityName: %s, query: %s, field: %s, requestParams: %s, limit: %s", + entityName, query, field, requestParams, limit)); return esSearchDAO.autoComplete(entityName, query, field, requestParams, limit); } @Nonnull @Override - public Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit) { - log.debug("Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", entityNames != null ? entityNames.toString() : null, field, - requestParams, limit); + public Map<String, Long> aggregateByValue( + @Nullable List<String> entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit) { + log.debug( + "Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", + entityNames != null ? entityNames.toString() : null, + field, + requestParams, + limit); return esSearchDAO.aggregateByValue(entityNames, field, requestParams, limit); } @Nonnull @Override - public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, int from, + public BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filters, + int from, int size) { log.debug( - String.format("Browsing entities entityName: %s, path: %s, filters: %s, from: %s, size: %s", entityName, - path, filters, from, size)); + String.format( + "Browsing entities entityName: %s, path: %s, filters: %s, from: %s, size: %s", + entityName, path, filters, from, size)); return esBrowseDAO.browse(entityName, path, filters, from, size); } @Nonnull @Override - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { return esBrowseDAO.browseV2(entityName, path, filter, input, start, count); } @Nonnull @Override public List<String> getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) { - log.debug(String.format("Getting browse paths for entity entityName: %s, urn: %s", entityName, urn)); + log.debug( + String.format("Getting browse paths for entity entityName: %s, urn: %s", entityName, urn)); return esBrowseDAO.getBrowsePaths(entityName, urn); } @Nonnull @Override - public ScrollResult fullTextScroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult fullTextScroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags()); flags.setFulltext(true); - return esSearchDAO.scroll(entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, - flags); + return esSearchDAO.scroll( + entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); } @Nonnull @Override - public ScrollResult structuredScroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult structuredScroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags()); flags.setFulltext(false); - return esSearchDAO.scroll(entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); + return esSearchDAO.scroll( + entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); } public Optional<SearchResponse> raw(@Nonnull String indexName, @Nullable String jsonQuery) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java index 43431e93622f7..388dcea784cbb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java @@ -1,11 +1,14 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.google.common.collect.ImmutableMap; - +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.version.GitVersion; +import com.linkedin.util.Pair; +import io.github.resilience4j.retry.Retry; +import io.github.resilience4j.retry.RetryConfig; +import io.github.resilience4j.retry.RetryRegistry; import java.io.IOException; import java.time.Duration; import java.time.Instant; @@ -21,11 +24,6 @@ import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -import com.linkedin.util.Pair; -import io.github.resilience4j.retry.Retry; -import io.github.resilience4j.retry.RetryConfig; -import io.github.resilience4j.retry.RetryRegistry; import javax.annotation.Nullable; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -37,6 +35,7 @@ import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; import org.opensearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.GetAliasesResponse; @@ -54,55 +53,52 @@ import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.reindex.ReindexRequest; -import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.tasks.TaskInfo; - @Slf4j public class ESIndexBuilder { private final RestHighLevelClient _searchClient; - @Getter - private final int numShards; + @Getter private final int numShards; - @Getter - private final int numReplicas; + @Getter private final int numReplicas; - @Getter - private final int numRetries; + @Getter private final int numRetries; - @Getter - private final int refreshIntervalSeconds; + @Getter private final int refreshIntervalSeconds; - @Getter - private final Map<String, Map<String, String>> indexSettingOverrides; + @Getter private final Map<String, Map<String, String>> indexSettingOverrides; - @Getter - private final boolean enableIndexSettingsReindex; + @Getter private final boolean enableIndexSettingsReindex; - @Getter - private final boolean enableIndexMappingsReindex; + @Getter private final boolean enableIndexMappingsReindex; - @Getter - private final ElasticSearchConfiguration elasticSearchConfiguration; + @Getter private final ElasticSearchConfiguration elasticSearchConfiguration; - @Getter - private final GitVersion gitVersion; + @Getter private final GitVersion gitVersion; - final private static RequestOptions REQUEST_OPTIONS = RequestOptions.DEFAULT.toBuilder() - .setRequestConfig(RequestConfig.custom() - .setSocketTimeout(180 * 1000).build()).build(); + private static final RequestOptions REQUEST_OPTIONS = + RequestOptions.DEFAULT.toBuilder() + .setRequestConfig(RequestConfig.custom().setSocketTimeout(180 * 1000).build()) + .build(); private final RetryRegistry retryRegistry; - public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numReplicas, int numRetries, - int refreshIntervalSeconds, Map<String, Map<String, String>> indexSettingOverrides, - boolean enableIndexSettingsReindex, boolean enableIndexMappingsReindex, - ElasticSearchConfiguration elasticSearchConfiguration, GitVersion gitVersion) { + public ESIndexBuilder( + RestHighLevelClient searchClient, + int numShards, + int numReplicas, + int numRetries, + int refreshIntervalSeconds, + Map<String, Map<String, String>> indexSettingOverrides, + boolean enableIndexSettingsReindex, + boolean enableIndexMappingsReindex, + ElasticSearchConfiguration elasticSearchConfiguration, + GitVersion gitVersion) { this._searchClient = searchClient; this.numShards = numShards; this.numReplicas = numReplicas; @@ -114,7 +110,8 @@ public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numRe this.elasticSearchConfiguration = elasticSearchConfiguration; this.gitVersion = gitVersion; - RetryConfig config = RetryConfig.custom() + RetryConfig config = + RetryConfig.custom() .maxAttempts(Math.max(1, numRetries)) .waitDuration(Duration.ofSeconds(10)) .retryOnException(e -> e instanceof OpenSearchException) @@ -125,8 +122,11 @@ public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numRe this.retryRegistry = RetryRegistry.of(config); } - public ReindexConfig buildReindexState(String indexName, Map<String, Object> mappings, Map<String, Object> settings) throws IOException { - ReindexConfig.ReindexConfigBuilder builder = ReindexConfig.builder() + public ReindexConfig buildReindexState( + String indexName, Map<String, Object> mappings, Map<String, Object> settings) + throws IOException { + ReindexConfig.ReindexConfigBuilder builder = + ReindexConfig.builder() .name(indexName) .enableIndexSettingsReindex(enableIndexSettingsReindex) .enableIndexMappingsReindex(enableIndexMappingsReindex) @@ -142,7 +142,8 @@ public ReindexConfig buildReindexState(String indexName, Map<String, Object> map builder.targetSettings(targetSetting); // Check if index exists - boolean exists = _searchClient.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); + boolean exists = + _searchClient.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); builder.exists(exists); // If index doesn't exist, no reindex @@ -150,7 +151,9 @@ public ReindexConfig buildReindexState(String indexName, Map<String, Object> map return builder.build(); } - Settings currentSettings = _searchClient.indices() + Settings currentSettings = + _searchClient + .indices() .getSettings(new GetSettingsRequest().indices(indexName), RequestOptions.DEFAULT) .getIndexToSettings() .values() @@ -158,7 +161,9 @@ public ReindexConfig buildReindexState(String indexName, Map<String, Object> map .next(); builder.currentSettings(currentSettings); - Map<String, Object> currentMappings = _searchClient.indices() + Map<String, Object> currentMappings = + _searchClient + .indices() .getMapping(new GetMappingsRequest().indices(indexName), RequestOptions.DEFAULT) .mappings() .values() @@ -172,16 +177,19 @@ public ReindexConfig buildReindexState(String indexName, Map<String, Object> map } /** - * Builds index with given name, mappings and settings - * Deprecated: Use the `buildIndex(ReindexConfig indexState) to enforce conventions via ReindexConfig class - * earlier in the process. + * Builds index with given name, mappings and settings Deprecated: Use the + * `buildIndex(ReindexConfig indexState) to enforce conventions via ReindexConfig class earlier in + * the process. + * * @param indexName index name * @param mappings ES mappings * @param settings ES settings * @throws IOException ES error */ @Deprecated - public void buildIndex(String indexName, Map<String, Object> mappings, Map<String, Object> settings) throws IOException { + public void buildIndex( + String indexName, Map<String, Object> mappings, Map<String, Object> settings) + throws IOException { buildIndex(buildReindexState(indexName, mappings, settings)); } @@ -210,15 +218,20 @@ public void buildIndex(ReindexConfig indexState) throws IOException { if (indexState.requiresApplySettings()) { UpdateSettingsRequest request = new UpdateSettingsRequest(indexState.name()); - Map<String, Object> indexSettings = ((Map<String, Object>) indexState.targetSettings().get("index")) + Map<String, Object> indexSettings = + ((Map<String, Object>) indexState.targetSettings().get("index")) .entrySet().stream() - .filter(e -> ReindexConfig.SETTINGS_DYNAMIC.contains(e.getKey())) - .collect(Collectors.toMap(e -> "index." + e.getKey(), Map.Entry::getValue)); + .filter(e -> ReindexConfig.SETTINGS_DYNAMIC.contains(e.getKey())) + .collect(Collectors.toMap(e -> "index." + e.getKey(), Map.Entry::getValue)); request.settings(indexSettings); - boolean ack = _searchClient.indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexState.name(), - ReindexConfig.OBJECT_MAPPER.writeValueAsString(indexSettings), ack); + boolean ack = + _searchClient.indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexState.name(), + ReindexConfig.OBJECT_MAPPER.writeValueAsString(indexSettings), + ack); } } else { try { @@ -231,30 +244,40 @@ public void buildIndex(ReindexConfig indexState) throws IOException { /** * Apply mappings changes if reindex is not required + * * @param indexState the state of the current and target index settings/mappings - * @param suppressError during reindex logic this is not an error, for structured properties it is an error + * @param suppressError during reindex logic this is not an error, for structured properties it is + * an error * @throws IOException communication issues with ES */ public void applyMappings(ReindexConfig indexState, boolean suppressError) throws IOException { if (indexState.isPureMappingsAddition()) { log.info("Updating index {} mappings in place.", indexState.name()); - PutMappingRequest request = new PutMappingRequest(indexState.name()).source(indexState.targetMappings()); + PutMappingRequest request = + new PutMappingRequest(indexState.name()).source(indexState.targetMappings()); _searchClient.indices().putMapping(request, RequestOptions.DEFAULT); log.info("Updated index {} with new mappings", indexState.name()); } else { if (!suppressError) { - log.error("Attempted to apply invalid mappings. Current: {} Target: {}", indexState.currentMappings(), - indexState.targetMappings()); + log.error( + "Attempted to apply invalid mappings. Current: {} Target: {}", + indexState.currentMappings(), + indexState.targetMappings()); } } } - public String reindexInPlaceAsync(String indexAlias, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options, ReindexConfig config) + public String reindexInPlaceAsync( + String indexAlias, + @Nullable QueryBuilder filterQuery, + BatchWriteOperationsOptions options, + ReindexConfig config) throws Exception { - GetAliasesResponse aliasesResponse = _searchClient.indices().getAlias( - new GetAliasesRequest(indexAlias), RequestOptions.DEFAULT); + GetAliasesResponse aliasesResponse = + _searchClient.indices().getAlias(new GetAliasesRequest(indexAlias), RequestOptions.DEFAULT); if (aliasesResponse.getAliases().isEmpty()) { - throw new IllegalArgumentException(String.format("Input to reindexInPlaceAsync should be an alias. %s is not", indexAlias)); + throw new IllegalArgumentException( + String.format("Input to reindexInPlaceAsync should be an alias. %s is not", indexAlias)); } // Point alias at new index @@ -262,9 +285,12 @@ public String reindexInPlaceAsync(String indexAlias, @Nullable QueryBuilder filt createIndex(nextIndexName, config); renameReindexedIndices(_searchClient, indexAlias, null, nextIndexName, false); - return submitReindex(aliasesResponse.getAliases().keySet().toArray(new String[0]), - nextIndexName, options.getBatchSize(), - TimeValue.timeValueSeconds(options.getTimeoutSeconds()), filterQuery); + return submitReindex( + aliasesResponse.getAliases().keySet().toArray(new String[0]), + nextIndexName, + options.getBatchSize(), + TimeValue.timeValueSeconds(options.getTimeoutSeconds()), + filterQuery); } private static String getNextIndexName(String base, long startTime) { @@ -286,10 +312,14 @@ private void reindex(ReindexConfig indexState) throws Throwable { String parentTaskId; if (previousTaskInfo.isPresent()) { - log.info("Reindex task {} in progress with description {}. Attempting to continue task from breakpoint.", - previousTaskInfo.get().getTaskId(), previousTaskInfo.get().getDescription()); + log.info( + "Reindex task {} in progress with description {}. Attempting to continue task from breakpoint.", + previousTaskInfo.get().getTaskId(), + previousTaskInfo.get().getDescription()); parentTaskId = previousTaskInfo.get().getParentTaskId().toString(); - tempIndexName = ESUtils.extractTargetIndex(previousTaskInfo.get().getHeaders().get(ESUtils.OPAQUE_ID_HEADER)); + tempIndexName = + ESUtils.extractTargetIndex( + previousTaskInfo.get().getHeaders().get(ESUtils.OPAQUE_ID_HEADER)); } else { // Create new index createIndex(tempIndexName, indexState); @@ -304,7 +334,11 @@ private void reindex(ReindexConfig indexState) throws Throwable { long documentCountsLastUpdated = System.currentTimeMillis(); while (System.currentTimeMillis() < timeoutAt) { - log.info("Task: {} - Reindexing from {} to {} in progress...", parentTaskId, indexState.name(), tempIndexName); + log.info( + "Task: {} - Reindexing from {} to {} in progress...", + parentTaskId, + indexState.name(), + tempIndexName); Pair<Long, Long> tempDocumentsCount = getDocumentCounts(indexState.name(), tempIndexName); if (!tempDocumentsCount.equals(documentCounts)) { @@ -313,18 +347,28 @@ private void reindex(ReindexConfig indexState) throws Throwable { } if (documentCounts.getFirst().equals(documentCounts.getSecond())) { - log.info("Task: {} - Reindexing {} to {} task was successful", parentTaskId, indexState.name(), tempIndexName); + log.info( + "Task: {} - Reindexing {} to {} task was successful", + parentTaskId, + indexState.name(), + tempIndexName); reindexTaskCompleted = true; break; } else { - log.warn("Task: {} - Document counts do not match {} != {}. Complete: {}%", parentTaskId, documentCounts.getFirst(), - documentCounts.getSecond(), 100 * (1.0f * documentCounts.getSecond()) / documentCounts.getFirst()); + log.warn( + "Task: {} - Document counts do not match {} != {}. Complete: {}%", + parentTaskId, + documentCounts.getFirst(), + documentCounts.getSecond(), + 100 * (1.0f * documentCounts.getSecond()) / documentCounts.getFirst()); long lastUpdateDelta = System.currentTimeMillis() - documentCountsLastUpdated; if (lastUpdateDelta > (300 * 1000)) { - if (reindexCount <= numRetries) { - log.warn("No change in index count after 5 minutes, re-triggering reindex #{}.", reindexCount); + if (reindexCount <= numRetries) { + log.warn( + "No change in index count after 5 minutes, re-triggering reindex #{}.", + reindexCount); submitReindex(indexState.name(), tempIndexName); reindexCount = reindexCount + 1; documentCountsLastUpdated = System.currentTimeMillis(); // reset timer @@ -341,37 +385,63 @@ private void reindex(ReindexConfig indexState) throws Throwable { if (!reindexTaskCompleted) { if (elasticSearchConfiguration.getBuildIndices().isAllowDocCountMismatch() - && elasticSearchConfiguration.getBuildIndices().isCloneIndices()) { - log.warn("Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}\n" - + "This condition is explicitly ALLOWED, please refer to latest clone if original index is required.", - indexState.name(), documentCounts.getFirst(), documentCounts.getSecond()); + && elasticSearchConfiguration.getBuildIndices().isCloneIndices()) { + log.warn( + "Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}\n" + + "This condition is explicitly ALLOWED, please refer to latest clone if original index is required.", + indexState.name(), + documentCounts.getFirst(), + documentCounts.getSecond()); } else { - log.error("Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}", - indexState.name(), documentCounts.getFirst(), documentCounts.getSecond()); - diff(indexState.name(), tempIndexName, Math.max(documentCounts.getFirst(), documentCounts.getSecond())); - throw new RuntimeException(String.format("Reindex from %s to %s failed. Document count %s != %s", indexState.name(), tempIndexName, - documentCounts.getFirst(), documentCounts.getSecond())); + log.error( + "Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}", + indexState.name(), + documentCounts.getFirst(), + documentCounts.getSecond()); + diff( + indexState.name(), + tempIndexName, + Math.max(documentCounts.getFirst(), documentCounts.getSecond())); + throw new RuntimeException( + String.format( + "Reindex from %s to %s failed. Document count %s != %s", + indexState.name(), + tempIndexName, + documentCounts.getFirst(), + documentCounts.getSecond())); } } } catch (Throwable e) { - log.error("Failed to reindex {} to {}: Exception {}", indexState.name(), tempIndexName, e.toString()); - _searchClient.indices().delete(new DeleteIndexRequest().indices(tempIndexName), RequestOptions.DEFAULT); + log.error( + "Failed to reindex {} to {}: Exception {}", + indexState.name(), + tempIndexName, + e.toString()); + _searchClient + .indices() + .delete(new DeleteIndexRequest().indices(tempIndexName), RequestOptions.DEFAULT); throw e; } log.info("Reindex from {} to {} succeeded", indexState.name(), tempIndexName); - renameReindexedIndices(_searchClient, indexState.name(), indexState.indexPattern(), tempIndexName, true); + renameReindexedIndices( + _searchClient, indexState.name(), indexState.indexPattern(), tempIndexName, true); log.info("Finished setting up {}", indexState.name()); } - public static void renameReindexedIndices(RestHighLevelClient searchClient, String originalName, @Nullable String pattern, String newName, boolean deleteOld) + public static void renameReindexedIndices( + RestHighLevelClient searchClient, + String originalName, + @Nullable String pattern, + String newName, + boolean deleteOld) throws IOException { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(originalName); if (pattern != null) { getAliasesRequest.indices(pattern); } - GetAliasesResponse aliasesResponse = searchClient.indices().getAlias( - getAliasesRequest, RequestOptions.DEFAULT); + GetAliasesResponse aliasesResponse = + searchClient.indices().getAlias(getAliasesRequest, RequestOptions.DEFAULT); // If not aliased, delete the original index final Collection<String> aliasedIndexDelete; @@ -384,23 +454,31 @@ public static void renameReindexedIndices(RestHighLevelClient searchClient, Stri } // Add alias for the new index - AliasActions removeAction = deleteOld ? AliasActions.removeIndex() : AliasActions.remove().alias(originalName); + AliasActions removeAction = + deleteOld ? AliasActions.removeIndex() : AliasActions.remove().alias(originalName); removeAction.indices(aliasedIndexDelete.toArray(new String[0])); AliasActions addAction = AliasActions.add().alias(originalName).index(newName); - searchClient.indices() - .updateAliases(new IndicesAliasesRequest().addAliasAction(removeAction).addAliasAction(addAction), + searchClient + .indices() + .updateAliases( + new IndicesAliasesRequest().addAliasAction(removeAction).addAliasAction(addAction), RequestOptions.DEFAULT); } - private String submitReindex(String[] sourceIndices, String destinationIndex, - int batchSize, @Nullable TimeValue timeout, - @Nullable QueryBuilder sourceFilterQuery) throws IOException { - ReindexRequest reindexRequest = new ReindexRequest() - .setSourceIndices(sourceIndices) - .setDestIndex(destinationIndex) - .setMaxRetries(numRetries) - .setAbortOnVersionConflict(false) - .setSourceBatchSize(batchSize); + private String submitReindex( + String[] sourceIndices, + String destinationIndex, + int batchSize, + @Nullable TimeValue timeout, + @Nullable QueryBuilder sourceFilterQuery) + throws IOException { + ReindexRequest reindexRequest = + new ReindexRequest() + .setSourceIndices(sourceIndices) + .setDestIndex(destinationIndex) + .setMaxRetries(numRetries) + .setAbortOnVersionConflict(false) + .setSourceBatchSize(batchSize); if (timeout != null) { reindexRequest.setTimeout(timeout); } @@ -408,26 +486,34 @@ private String submitReindex(String[] sourceIndices, String destinationIndex, reindexRequest.setSourceQuery(sourceFilterQuery); } - RequestOptions requestOptions = ESUtils.buildReindexTaskRequestOptions(gitVersion.getVersion(), sourceIndices[0], - destinationIndex); - TaskSubmissionResponse reindexTask = _searchClient.submitReindexTask(reindexRequest, requestOptions); + RequestOptions requestOptions = + ESUtils.buildReindexTaskRequestOptions( + gitVersion.getVersion(), sourceIndices[0], destinationIndex); + TaskSubmissionResponse reindexTask = + _searchClient.submitReindexTask(reindexRequest, requestOptions); return reindexTask.getTask(); } private String submitReindex(String sourceIndex, String destinationIndex) throws IOException { - return submitReindex(new String[]{sourceIndex}, destinationIndex, 2500, null, null); + return submitReindex(new String[] {sourceIndex}, destinationIndex, 2500, null, null); } - private Pair<Long, Long> getDocumentCounts(String sourceIndex, String destinationIndex) throws Throwable { + private Pair<Long, Long> getDocumentCounts(String sourceIndex, String destinationIndex) + throws Throwable { // Check whether reindex succeeded by comparing document count - // There can be some delay between the reindex finishing and count being fully up to date, so try multiple times + // There can be some delay between the reindex finishing and count being fully up to date, so + // try multiple times long originalCount = 0; long reindexedCount = 0; for (int i = 0; i < this.numRetries; i++) { // Check if reindex succeeded by comparing document counts - originalCount = retryRegistry.retry("retrySourceIndexCount") + originalCount = + retryRegistry + .retry("retrySourceIndexCount") .executeCheckedSupplier(() -> getCount(sourceIndex)); - reindexedCount = retryRegistry.retry("retryDestinationIndexCount") + reindexedCount = + retryRegistry + .retry("retryDestinationIndexCount") .executeCheckedSupplier(() -> getCount(destinationIndex)); if (originalCount == reindexedCount) { break; @@ -445,13 +531,20 @@ private Pair<Long, Long> getDocumentCounts(String sourceIndex, String destinatio private Optional<TaskInfo> getTaskInfoByHeader(String indexName) throws Throwable { Retry retryWithDefaultConfig = retryRegistry.retry("getTaskInfoByHeader"); - return retryWithDefaultConfig.executeCheckedSupplier(() -> { - ListTasksRequest listTasksRequest = new ListTasksRequest().setDetailed(true); - List<TaskInfo> taskInfos = _searchClient.tasks().list(listTasksRequest, REQUEST_OPTIONS).getTasks(); - return taskInfos.stream() - .filter(info -> ESUtils.prefixMatch(info.getHeaders().get(ESUtils.OPAQUE_ID_HEADER), gitVersion.getVersion(), - indexName)).findFirst(); - }); + return retryWithDefaultConfig.executeCheckedSupplier( + () -> { + ListTasksRequest listTasksRequest = new ListTasksRequest().setDetailed(true); + List<TaskInfo> taskInfos = + _searchClient.tasks().list(listTasksRequest, REQUEST_OPTIONS).getTasks(); + return taskInfos.stream() + .filter( + info -> + ESUtils.prefixMatch( + info.getHeaders().get(ESUtils.OPAQUE_ID_HEADER), + gitVersion.getVersion(), + indexName)) + .findFirst(); + }); } private void diff(String indexA, String indexB, long maxDocs) { @@ -470,12 +563,17 @@ private void diff(String indexA, String indexB, long maxDocs) { SearchResponse responseA = _searchClient.search(indexARequest, RequestOptions.DEFAULT); SearchResponse responseB = _searchClient.search(indexBRequest, RequestOptions.DEFAULT); - Set<String> actual = Arrays.stream(responseB.getHits().getHits()) - .map(SearchHit::getId).collect(Collectors.toSet()); + Set<String> actual = + Arrays.stream(responseB.getHits().getHits()) + .map(SearchHit::getId) + .collect(Collectors.toSet()); - log.error("Missing {}", Arrays.stream(responseA.getHits().getHits()) + log.error( + "Missing {}", + Arrays.stream(responseA.getHits().getHits()) .filter(doc -> !actual.contains(doc.getId())) - .map(SearchHit::getSourceAsString).collect(Collectors.toSet())); + .map(SearchHit::getSourceAsString) + .collect(Collectors.toSet())); } catch (IOException e) { throw new RuntimeException(e); } @@ -483,7 +581,10 @@ private void diff(String indexA, String indexB, long maxDocs) { } private long getCount(@Nonnull String indexName) throws IOException { - return _searchClient.count(new CountRequest(indexName).query(QueryBuilders.matchAllQuery()), RequestOptions.DEFAULT) + return _searchClient + .count( + new CountRequest(indexName).query(QueryBuilders.matchAllQuery()), + RequestOptions.DEFAULT) .getCount(); } @@ -496,30 +597,48 @@ private void createIndex(String indexName, ReindexConfig state) throws IOExcepti log.info("Created index {}", indexName); } - public static void cleanIndex(RestHighLevelClient searchClient, ElasticSearchConfiguration esConfig, ReindexConfig indexState) { - log.info("Checking for orphan index pattern {} older than {} {}", indexState.indexPattern(), - esConfig.getBuildIndices().getRetentionValue(), - esConfig.getBuildIndices().getRetentionUnit()); - - getOrphanedIndices(searchClient, esConfig, indexState).forEach(orphanIndex -> { - log.warn("Deleting orphan index {}.", orphanIndex); - try { - searchClient.indices().delete(new DeleteIndexRequest().indices(orphanIndex), RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + public static void cleanIndex( + RestHighLevelClient searchClient, + ElasticSearchConfiguration esConfig, + ReindexConfig indexState) { + log.info( + "Checking for orphan index pattern {} older than {} {}", + indexState.indexPattern(), + esConfig.getBuildIndices().getRetentionValue(), + esConfig.getBuildIndices().getRetentionUnit()); + + getOrphanedIndices(searchClient, esConfig, indexState) + .forEach( + orphanIndex -> { + log.warn("Deleting orphan index {}.", orphanIndex); + try { + searchClient + .indices() + .delete(new DeleteIndexRequest().indices(orphanIndex), RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } - private static List<String> getOrphanedIndices(RestHighLevelClient searchClient, ElasticSearchConfiguration esConfig, - ReindexConfig indexState) { + private static List<String> getOrphanedIndices( + RestHighLevelClient searchClient, + ElasticSearchConfiguration esConfig, + ReindexConfig indexState) { List<String> orphanedIndices = new ArrayList<>(); try { - Date retentionDate = Date.from(Instant.now() - .minus(Duration.of(esConfig.getBuildIndices().getRetentionValue(), - ChronoUnit.valueOf(esConfig.getBuildIndices().getRetentionUnit())))); - - GetIndexResponse response = searchClient.indices().get(new GetIndexRequest(indexState.indexCleanPattern()), RequestOptions.DEFAULT); + Date retentionDate = + Date.from( + Instant.now() + .minus( + Duration.of( + esConfig.getBuildIndices().getRetentionValue(), + ChronoUnit.valueOf(esConfig.getBuildIndices().getRetentionUnit())))); + + GetIndexResponse response = + searchClient + .indices() + .get(new GetIndexRequest(indexState.indexCleanPattern()), RequestOptions.DEFAULT); for (String index : response.getIndices()) { var creationDateStr = response.getSetting(index, "index.creation_date"); @@ -530,7 +649,8 @@ private static List<String> getOrphanedIndices(RestHighLevelClient searchClient, continue; } - if (response.getAliases().containsKey(index) && response.getAliases().get(index).size() == 0) { + if (response.getAliases().containsKey(index) + && response.getAliases().get(index).size() == 0) { log.info("Index {} is orphaned", index); orphanedIndices.add(index); } @@ -539,7 +659,9 @@ private static List<String> getOrphanedIndices(RestHighLevelClient searchClient, if (e.getMessage().contains("index_not_found_exception")) { log.info("No orphaned indices found with pattern {}", indexState.indexCleanPattern()); } else { - log.error("An error occurred when trying to identify orphaned indices. Exception: {}", e.getMessage()); + log.error( + "An error occurred when trying to identify orphaned indices. Exception: {}", + e.getMessage()); } } return orphanedIndices; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java index 56cb26b09dc33..4489c661bb2ed 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java @@ -3,50 +3,50 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @RequiredArgsConstructor @Slf4j public class EntityIndexBuilders implements ElasticSearchIndexed { - private final ESIndexBuilder indexBuilder; - private final EntityRegistry entityRegistry; - private final IndexConvention indexConvention; - private final SettingsBuilder settingsBuilder; - - public ESIndexBuilder getIndexBuilder() { - return indexBuilder; + private final ESIndexBuilder indexBuilder; + private final EntityRegistry entityRegistry; + private final IndexConvention indexConvention; + private final SettingsBuilder settingsBuilder; + + public ESIndexBuilder getIndexBuilder() { + return indexBuilder; + } + + @Override + public void reindexAll() { + for (ReindexConfig config : buildReindexConfigs()) { + try { + indexBuilder.buildIndex(config); + } catch (IOException e) { + throw new RuntimeException(e); + } } - - @Override - public void reindexAll() { - for (ReindexConfig config : buildReindexConfigs()) { - try { - indexBuilder.buildIndex(config); - } catch (IOException e) { + } + + @Override + public List<ReindexConfig> buildReindexConfigs() { + Map<String, Object> settings = settingsBuilder.getSettings(); + return entityRegistry.getEntitySpecs().values().stream() + .map( + entitySpec -> { + try { + Map<String, Object> mappings = MappingsBuilder.getMappings(entitySpec); + return indexBuilder.buildReindexState( + indexConvention.getIndexName(entitySpec), mappings, settings); + } catch (IOException e) { throw new RuntimeException(e); - } - } - } - - @Override - public List<ReindexConfig> buildReindexConfigs() { - Map<String, Object> settings = settingsBuilder.getSettings(); - return entityRegistry.getEntitySpecs().values().stream().map(entitySpec -> { - try { - Map<String, Object> mappings = MappingsBuilder.getMappings(entitySpec); - return indexBuilder.buildReindexState(indexConvention.getIndexName(entitySpec), mappings, settings); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList()); - } + } + }) + .collect(Collectors.toList()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index 13a0f57ccea99..f85a0dcb06a07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchScoreFieldSpec; @@ -14,20 +16,19 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; - - @Slf4j public class MappingsBuilder { - private static final Map<String, String> PARTIAL_NGRAM_CONFIG = ImmutableMap.of( + private static final Map<String, String> PARTIAL_NGRAM_CONFIG = + ImmutableMap.of( TYPE, "search_as_you_type", MAX_SHINGLE_SIZE, "4", DOC_VALUES, "false"); - public static Map<String, String> getPartialNgramConfigWithOverrides(Map<String, String> overrides) { + public static Map<String, String> getPartialNgramConfigWithOverrides( + Map<String, String> overrides) { return Stream.concat(PARTIAL_NGRAM_CONFIG.entrySet().stream(), overrides.entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } public static final Map<String, String> KEYWORD_TYPE_MAP = ImmutableMap.of(TYPE, KEYWORD); @@ -45,16 +46,19 @@ public static Map<String, String> getPartialNgramConfigWithOverrides(Map<String, public static final String PROPERTIES = "properties"; - private MappingsBuilder() { - } + private MappingsBuilder() {} public static Map<String, Object> getMappings(@Nonnull final EntitySpec entitySpec) { Map<String, Object> mappings = new HashMap<>(); - entitySpec.getSearchableFieldSpecs() + entitySpec + .getSearchableFieldSpecs() .forEach(searchableFieldSpec -> mappings.putAll(getMappingsForField(searchableFieldSpec))); - entitySpec.getSearchScoreFieldSpecs() - .forEach(searchScoreFieldSpec -> mappings.putAll(getMappingsForSearchScoreField(searchScoreFieldSpec))); + entitySpec + .getSearchScoreFieldSpecs() + .forEach( + searchScoreFieldSpec -> + mappings.putAll(getMappingsForSearchScoreField(searchScoreFieldSpec))); // Fixed fields mappings.put("urn", getMappingsForUrn()); @@ -65,64 +69,70 @@ public static Map<String, Object> getMappings(@Nonnull final EntitySpec entitySp private static Map<String, Object> getMappingsForUrn() { Map<String, Object> subFields = new HashMap<>(); - subFields.put(DELIMITED, ImmutableMap.of( + subFields.put( + DELIMITED, + ImmutableMap.of( TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, URN_ANALYZER, SEARCH_ANALYZER, URN_SEARCH_ANALYZER, - SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER) - ); - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - ImmutableMap.of( - ANALYZER, PARTIAL_URN_COMPONENT - ) - )); + SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER)); + subFields.put( + NGRAM, + getPartialNgramConfigWithOverrides(ImmutableMap.of(ANALYZER, PARTIAL_URN_COMPONENT))); return ImmutableMap.<String, Object>builder() - .put(TYPE, ESUtils.KEYWORD_FIELD_TYPE) - .put(FIELDS, subFields) - .build(); + .put(TYPE, ESUtils.KEYWORD_FIELD_TYPE) + .put(FIELDS, subFields) + .build(); } private static Map<String, Object> getMappingsForRunId() { return ImmutableMap.<String, Object>builder().put(TYPE, ESUtils.KEYWORD_FIELD_TYPE).build(); } - private static Map<String, Object> getMappingsForField(@Nonnull final SearchableFieldSpec searchableFieldSpec) { + private static Map<String, Object> getMappingsForField( + @Nonnull final SearchableFieldSpec searchableFieldSpec) { FieldType fieldType = searchableFieldSpec.getSearchableAnnotation().getFieldType(); Map<String, Object> mappings = new HashMap<>(); Map<String, Object> mappingForField = new HashMap<>(); if (fieldType == FieldType.KEYWORD) { mappingForField.putAll(getMappingsForKeyword()); - } else if (fieldType == FieldType.TEXT || fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) { + } else if (fieldType == FieldType.TEXT + || fieldType == FieldType.TEXT_PARTIAL + || fieldType == FieldType.WORD_GRAM) { mappingForField.putAll(getMappingsForSearchText(fieldType)); } else if (fieldType == FieldType.BROWSE_PATH) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); - mappingForField.put(FIELDS, - ImmutableMap.of(LENGTH, ImmutableMap.of( - TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, - ANALYZER, SLASH_PATTERN_ANALYZER))); + mappingForField.put( + FIELDS, + ImmutableMap.of( + LENGTH, + ImmutableMap.of( + TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, ANALYZER, SLASH_PATTERN_ANALYZER))); mappingForField.put(ANALYZER, BROWSE_PATH_HIERARCHY_ANALYZER); mappingForField.put(FIELDDATA, true); } else if (fieldType == FieldType.BROWSE_PATH_V2) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); - mappingForField.put(FIELDS, - ImmutableMap.of(LENGTH, ImmutableMap.of( - TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, - ANALYZER, UNIT_SEPARATOR_PATTERN_ANALYZER))); + mappingForField.put( + FIELDS, + ImmutableMap.of( + LENGTH, + ImmutableMap.of( + TYPE, + ESUtils.TOKEN_COUNT_FIELD_TYPE, + ANALYZER, + UNIT_SEPARATOR_PATTERN_ANALYZER))); mappingForField.put(ANALYZER, BROWSE_PATH_V2_HIERARCHY_ANALYZER); mappingForField.put(FIELDDATA, true); - } else if (fieldType == FieldType.URN || fieldType == FieldType.URN_PARTIAL) { + } else if (fieldType == FieldType.URN || fieldType == FieldType.URN_PARTIAL) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); mappingForField.put(ANALYZER, URN_ANALYZER); mappingForField.put(SEARCH_ANALYZER, URN_SEARCH_ANALYZER); mappingForField.put(SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER); Map<String, Object> subFields = new HashMap<>(); if (fieldType == FieldType.URN_PARTIAL) { - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - Map.of( - ANALYZER, PARTIAL_URN_COMPONENT - ) - )); + subFields.put( + NGRAM, getPartialNgramConfigWithOverrides(Map.of(ANALYZER, PARTIAL_URN_COMPONENT))); } subFields.put(KEYWORD, KEYWORD_TYPE_MAP); mappingForField.put(FIELDS, subFields); @@ -141,12 +151,17 @@ private static Map<String, Object> getMappingsForField(@Nonnull final Searchable } mappings.put(searchableFieldSpec.getSearchableAnnotation().getFieldName(), mappingForField); - searchableFieldSpec.getSearchableAnnotation() + searchableFieldSpec + .getSearchableAnnotation() .getHasValuesFieldName() - .ifPresent(fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.BOOLEAN_FIELD_TYPE))); - searchableFieldSpec.getSearchableAnnotation() + .ifPresent( + fieldName -> + mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.BOOLEAN_FIELD_TYPE))); + searchableFieldSpec + .getSearchableAnnotation() .getNumValuesFieldName() - .ifPresent(fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.LONG_FIELD_TYPE))); + .ifPresent( + fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.LONG_FIELD_TYPE))); mappings.putAll(getMappingsForFieldNameAliases(searchableFieldSpec)); return mappings; @@ -167,26 +182,25 @@ private static Map<String, Object> getMappingsForSearchText(FieldType fieldType) mappingForField.put(NORMALIZER, KEYWORD_NORMALIZER); Map<String, Object> subFields = new HashMap<>(); if (fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) { - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - ImmutableMap.of( - ANALYZER, PARTIAL_ANALYZER - ) - )); + subFields.put( + NGRAM, getPartialNgramConfigWithOverrides(ImmutableMap.of(ANALYZER, PARTIAL_ANALYZER))); if (fieldType == FieldType.WORD_GRAM) { - for (Map.Entry<String, String> entry : Map.of( - WORD_GRAMS_LENGTH_2, WORD_GRAM_2_ANALYZER, - WORD_GRAMS_LENGTH_3, WORD_GRAM_3_ANALYZER, - WORD_GRAMS_LENGTH_4, WORD_GRAM_4_ANALYZER).entrySet()) { + for (Map.Entry<String, String> entry : + Map.of( + WORD_GRAMS_LENGTH_2, WORD_GRAM_2_ANALYZER, + WORD_GRAMS_LENGTH_3, WORD_GRAM_3_ANALYZER, + WORD_GRAMS_LENGTH_4, WORD_GRAM_4_ANALYZER) + .entrySet()) { String fieldName = entry.getKey(); String analyzerName = entry.getValue(); - subFields.put(fieldName, ImmutableMap.of( - TYPE, ESUtils.TEXT_FIELD_TYPE, - ANALYZER, analyzerName - )); + subFields.put( + fieldName, ImmutableMap.of(TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, analyzerName)); } } } - subFields.put(DELIMITED, ImmutableMap.of( + subFields.put( + DELIMITED, + ImmutableMap.of( TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, TEXT_ANALYZER, SEARCH_ANALYZER, TEXT_SEARCH_ANALYZER, @@ -199,19 +213,23 @@ private static Map<String, Object> getMappingsForSearchText(FieldType fieldType) private static Map<String, Object> getMappingsForSearchScoreField( @Nonnull final SearchScoreFieldSpec searchScoreFieldSpec) { - return ImmutableMap.of(searchScoreFieldSpec.getSearchScoreAnnotation().getFieldName(), + return ImmutableMap.of( + searchScoreFieldSpec.getSearchScoreAnnotation().getFieldName(), ImmutableMap.of(TYPE, ESUtils.DOUBLE_FIELD_TYPE)); } - private static Map<String, Object> getMappingsForFieldNameAliases(@Nonnull final SearchableFieldSpec searchableFieldSpec) { + private static Map<String, Object> getMappingsForFieldNameAliases( + @Nonnull final SearchableFieldSpec searchableFieldSpec) { Map<String, Object> mappings = new HashMap<>(); - List<String> fieldNameAliases = searchableFieldSpec.getSearchableAnnotation().getFieldNameAliases(); - fieldNameAliases.forEach(alias -> { - Map<String, Object> aliasMappings = new HashMap<>(); - aliasMappings.put(TYPE, ALIAS); - aliasMappings.put(PATH, searchableFieldSpec.getSearchableAnnotation().getFieldName()); - mappings.put(alias, aliasMappings); - }); + List<String> fieldNameAliases = + searchableFieldSpec.getSearchableAnnotation().getFieldNameAliases(); + fieldNameAliases.forEach( + alias -> { + Map<String, Object> aliasMappings = new HashMap<>(); + aliasMappings.put(TYPE, ALIAS); + aliasMappings.put(PATH, searchableFieldSpec.getSearchableAnnotation().getFieldName()); + mappings.put(alias, aliasMappings); + }); return mappings; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java index 8b8a48f5d9cda..e3155c9f943cc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java @@ -1,256 +1,298 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; -import lombok.Builder; -import lombok.Getter; -import lombok.experimental.Accessors; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.common.settings.Settings; - import java.util.List; import java.util.Map; import java.util.Objects; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static com.linkedin.metadata.Constants.*; - +import lombok.Builder; +import lombok.Getter; +import lombok.experimental.Accessors; +import lombok.extern.slf4j.Slf4j; +import org.opensearch.common.settings.Settings; @Slf4j @Builder @Getter @Accessors(fluent = true) public class ReindexConfig { - public final static ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - /* - Most index settings are default values and populated by Elastic. This list is an include list to determine which - settings we care about when a difference is present. - */ - public static final List<String> SETTINGS_DYNAMIC = ImmutableList.of("number_of_replicas", "refresh_interval"); - // These setting require reindex - public static final List<String> SETTINGS_STATIC = ImmutableList.of("number_of_shards"); - public static final List<String> SETTINGS = Stream.concat( - SETTINGS_DYNAMIC.stream(), SETTINGS_STATIC.stream()).collect(Collectors.toList()); + public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - final private String name; - final private boolean exists; - final private Settings currentSettings; - final private Map<String, Object> targetSettings; - final private Map<String, Object> currentMappings; - final private Map<String, Object> targetMappings; - final private boolean enableIndexMappingsReindex; - final private boolean enableIndexSettingsReindex; - final private String version; + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } - /* Calculated */ - final private boolean requiresReindex; - final private boolean requiresApplySettings; - final private boolean requiresApplyMappings; - final private boolean isPureMappingsAddition; - final private boolean isSettingsReindex; + /* + Most index settings are default values and populated by Elastic. This list is an include list to determine which + settings we care about when a difference is present. + */ + public static final List<String> SETTINGS_DYNAMIC = + ImmutableList.of("number_of_replicas", "refresh_interval"); + // These setting require reindex + public static final List<String> SETTINGS_STATIC = ImmutableList.of("number_of_shards"); + public static final List<String> SETTINGS = + Stream.concat(SETTINGS_DYNAMIC.stream(), SETTINGS_STATIC.stream()) + .collect(Collectors.toList()); - public static ReindexConfigBuilder builder() { - return new CalculatedBuilder(); - } + private final String name; + private final boolean exists; + private final Settings currentSettings; + private final Map<String, Object> targetSettings; + private final Map<String, Object> currentMappings; + private final Map<String, Object> targetMappings; + private final boolean enableIndexMappingsReindex; + private final boolean enableIndexSettingsReindex; + private final String version; - public static class ReindexConfigBuilder { - // hide calculated fields - private ReindexConfigBuilder requiresReindex(boolean ignored) { - return this; - } - private ReindexConfigBuilder requiresApplySettings(boolean ignored) { - return this; - } - private ReindexConfigBuilder requiresApplyMappings(boolean ignored) { - return this; - } - private ReindexConfigBuilder isPureMappingsAddition(boolean ignored) { - return this; - } - private ReindexConfigBuilder isSettingsReindexRequired(boolean ignored) { - return this; - } + /* Calculated */ + private final boolean requiresReindex; + private final boolean requiresApplySettings; + private final boolean requiresApplyMappings; + private final boolean isPureMappingsAddition; + private final boolean isSettingsReindex; - // ensure sorted - public ReindexConfigBuilder currentMappings(Map<String, Object> currentMappings) { - this.currentMappings = sortMap(currentMappings); - return this; - } - public ReindexConfigBuilder targetMappings(Map<String, Object> targetMappings) { - this.targetMappings = sortMap(targetMappings); - return this; - } + public static ReindexConfigBuilder builder() { + return new CalculatedBuilder(); + } - private static TreeMap<String, Object> sortMap(Map<String, Object> input) { - return input.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> { - if (e.getValue() instanceof Map) { - return sortMap((Map<String, Object>) e.getValue()); - } else { - return String.valueOf(e.getValue()); - } - }, - (oldValue, newValue) -> newValue, TreeMap::new)); - } + public static class ReindexConfigBuilder { + // hide calculated fields + private ReindexConfigBuilder requiresReindex(boolean ignored) { + return this; } - /** - * Implement calculated fields - */ - public String indexPattern() { - return name + "*"; + private ReindexConfigBuilder requiresApplySettings(boolean ignored) { + return this; } - public String indexCleanPattern() { - return name + "_*"; + private ReindexConfigBuilder requiresApplyMappings(boolean ignored) { + return this; } - private static class CalculatedBuilder extends ReindexConfigBuilder { - @Override - public ReindexConfig build() { - if (super.exists) { - /* Consider mapping changes */ - MapDifference<String, Object> mappingsDiff = Maps.difference( - getOrDefault(super.currentMappings, List.of("properties")), - getOrDefault(super.targetMappings, List.of("properties"))); - super.requiresApplyMappings = !mappingsDiff.entriesDiffering().isEmpty() - || !mappingsDiff.entriesOnlyOnRight().isEmpty(); - super.isPureMappingsAddition = super.requiresApplyMappings - && mappingsDiff.entriesDiffering().isEmpty() - && !mappingsDiff.entriesOnlyOnRight().isEmpty(); + private ReindexConfigBuilder isPureMappingsAddition(boolean ignored) { + return this; + } - if (super.requiresApplyMappings && super.isPureMappingsAddition) { - log.info("Index: {} - New fields have been added to index. Adding: {}", - super.name, mappingsDiff.entriesOnlyOnRight()); - } else if (super.requiresApplyMappings) { - log.info("Index: {} - There's diff between new mappings (left) and old mappings (right): {}", - super.name, mappingsDiff.entriesDiffering()); - } + private ReindexConfigBuilder isSettingsReindexRequired(boolean ignored) { + return this; + } - /* Consider analysis and settings changes */ - super.requiresApplySettings = !isSettingsEqual() || !isAnalysisEqual(); - super.isSettingsReindex = isSettingsReindexRequired(); + // ensure sorted + public ReindexConfigBuilder currentMappings(Map<String, Object> currentMappings) { + this.currentMappings = sortMap(currentMappings); + return this; + } - /* Determine reindexing required - some settings and mappings do not require reindex, analysis always does */ - if (super.requiresApplyMappings && !super.isPureMappingsAddition) { - if (super.enableIndexMappingsReindex) { - super.requiresReindex = true; - } else { - log.warn("Index: {} - There's diff between new mappings, however reindexing is DISABLED.", super.name); - } - } - if (super.isSettingsReindex) { - try { - if (!isAnalysisEqual()) { - log.info("Index: {} - There's an update to `analysis` settings that requires reindexing. Target: {} Current: {}", - super.name, OBJECT_MAPPER.writeValueAsString(super.targetSettings), super.currentSettings); - } - if (!isSettingsEqual()) { - log.info("Index: {} - There's an update to settings that requires reindexing. Target: {} Current: {}", - super.name, OBJECT_MAPPER.writeValueAsString(super.targetSettings), super.currentSettings); - } - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - if (super.enableIndexSettingsReindex) { - super.requiresReindex = true; + public ReindexConfigBuilder targetMappings(Map<String, Object> targetMappings) { + this.targetMappings = sortMap(targetMappings); + return this; + } + + private static TreeMap<String, Object> sortMap(Map<String, Object> input) { + return input.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> { + if (e.getValue() instanceof Map) { + return sortMap((Map<String, Object>) e.getValue()); } else { - log.warn("Index: {} - There's an update to settings that requires reindexing, however reindexing is DISABLED", super.name); + return String.valueOf(e.getValue()); } - } - } - return super.build(); - } + }, + (oldValue, newValue) -> newValue, + TreeMap::new)); + } + } - private static TreeMap<String, Object> getOrDefault(Map<String, Object> map, List<String> path) { - if (map == null) { - return new TreeMap<>(); - } + /** Implement calculated fields */ + public String indexPattern() { + return name + "*"; + } - TreeMap<String, Object> item = (TreeMap<String, Object>) map.getOrDefault(path.get(0), new TreeMap()); - if (path.size() == 1) { - return item; - } else { - return getOrDefault(item, path.subList(1, path.size())); - } + public String indexCleanPattern() { + return name + "_*"; + } + + private static class CalculatedBuilder extends ReindexConfigBuilder { + @Override + public ReindexConfig build() { + if (super.exists) { + /* Consider mapping changes */ + MapDifference<String, Object> mappingsDiff = + Maps.difference( + getOrDefault(super.currentMappings, List.of("properties")), + getOrDefault(super.targetMappings, List.of("properties"))); + super.requiresApplyMappings = + !mappingsDiff.entriesDiffering().isEmpty() + || !mappingsDiff.entriesOnlyOnRight().isEmpty(); + super.isPureMappingsAddition = + super.requiresApplyMappings + && mappingsDiff.entriesDiffering().isEmpty() + && !mappingsDiff.entriesOnlyOnRight().isEmpty(); + + if (super.requiresApplyMappings && super.isPureMappingsAddition) { + log.info( + "Index: {} - New fields have been added to index. Adding: {}", + super.name, + mappingsDiff.entriesOnlyOnRight()); + } else if (super.requiresApplyMappings) { + log.info( + "Index: {} - There's diff between new mappings (left) and old mappings (right): {}", + super.name, + mappingsDiff.entriesDiffering()); } - private boolean isAnalysisEqual() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return true; + /* Consider analysis and settings changes */ + super.requiresApplySettings = !isSettingsEqual() || !isAnalysisEqual(); + super.isSettingsReindex = isSettingsReindexRequired(); + + /* Determine reindexing required - some settings and mappings do not require reindex, analysis always does */ + if (super.requiresApplyMappings && !super.isPureMappingsAddition) { + if (super.enableIndexMappingsReindex) { + super.requiresReindex = true; + } else { + log.warn( + "Index: {} - There's diff between new mappings, however reindexing is DISABLED.", + super.name); + } + } + if (super.isSettingsReindex) { + try { + if (!isAnalysisEqual()) { + log.info( + "Index: {} - There's an update to `analysis` settings that requires reindexing. Target: {} Current: {}", + super.name, + OBJECT_MAPPER.writeValueAsString(super.targetSettings), + super.currentSettings); } - Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); - if (!indexSettings.containsKey("analysis")) { - return true; + if (!isSettingsEqual()) { + log.info( + "Index: {} - There's an update to settings that requires reindexing. Target: {} Current: {}", + super.name, + OBJECT_MAPPER.writeValueAsString(super.targetSettings), + super.currentSettings); } - // Compare analysis section - Map<String, Object> newAnalysis = (Map<String, Object>) indexSettings.get("analysis"); - Settings oldAnalysis = super.currentSettings.getByPrefix("index.analysis."); - return equalsGroup(newAnalysis, oldAnalysis); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + if (super.enableIndexSettingsReindex) { + super.requiresReindex = true; + } else { + log.warn( + "Index: {} - There's an update to settings that requires reindexing, however reindexing is DISABLED", + super.name); + } } + } + return super.build(); + } - private boolean isSettingsEqual() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return true; - } - Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); - return SETTINGS.stream() - .allMatch(settingKey -> Objects.equals(indexSettings.get(settingKey).toString(), - super.currentSettings.get("index." + settingKey))); - } + private static TreeMap<String, Object> getOrDefault( + Map<String, Object> map, List<String> path) { + if (map == null) { + return new TreeMap<>(); + } - private boolean isSettingsReindexRequired() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return false; - } - Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); + TreeMap<String, Object> item = + (TreeMap<String, Object>) map.getOrDefault(path.get(0), new TreeMap()); + if (path.size() == 1) { + return item; + } else { + return getOrDefault(item, path.subList(1, path.size())); + } + } - if (SETTINGS_STATIC.stream().anyMatch(settingKey -> - !Objects.equals(indexSettings.get(settingKey).toString(), super.currentSettings.get("index." + settingKey)))) { - return true; - } + private boolean isAnalysisEqual() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return true; + } + Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); + if (!indexSettings.containsKey("analysis")) { + return true; + } + // Compare analysis section + Map<String, Object> newAnalysis = (Map<String, Object>) indexSettings.get("analysis"); + Settings oldAnalysis = super.currentSettings.getByPrefix("index.analysis."); + return equalsGroup(newAnalysis, oldAnalysis); + } - return indexSettings.containsKey("analysis") - && !equalsGroup((Map<String, Object>) indexSettings.get("analysis"), - super.currentSettings.getByPrefix("index.analysis.")); - } + private boolean isSettingsEqual() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return true; + } + Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); + return SETTINGS.stream() + .allMatch( + settingKey -> + Objects.equals( + indexSettings.get(settingKey).toString(), + super.currentSettings.get("index." + settingKey))); } - private static boolean equalsGroup(Map<String, Object> newSettings, Settings oldSettings) { - if (!newSettings.keySet().equals(oldSettings.names())) { - return false; - } + private boolean isSettingsReindexRequired() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return false; + } + Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); - for (String key : newSettings.keySet()) { - // Skip urn stop filter, as adding new entities will cause this filter to change - // No need to reindex every time a new entity is added - if (key.equals("urn_stop_filter")) { - continue; - } - if (newSettings.get(key) instanceof Map) { - if (!equalsGroup((Map<String, Object>) newSettings.get(key), oldSettings.getByPrefix(key + "."))) { - return false; - } - } else if (newSettings.get(key) instanceof List) { - if (!newSettings.get(key).equals(oldSettings.getAsList(key))) { - return false; - } - } else { - if (!newSettings.get(key).toString().equals(oldSettings.get(key))) { - return false; - } - } - } + if (SETTINGS_STATIC.stream() + .anyMatch( + settingKey -> + !Objects.equals( + indexSettings.get(settingKey).toString(), + super.currentSettings.get("index." + settingKey)))) { return true; + } + + return indexSettings.containsKey("analysis") + && !equalsGroup( + (Map<String, Object>) indexSettings.get("analysis"), + super.currentSettings.getByPrefix("index.analysis.")); + } + } + + private static boolean equalsGroup(Map<String, Object> newSettings, Settings oldSettings) { + if (!newSettings.keySet().equals(oldSettings.names())) { + return false; + } + + for (String key : newSettings.keySet()) { + // Skip urn stop filter, as adding new entities will cause this filter to change + // No need to reindex every time a new entity is added + if (key.equals("urn_stop_filter")) { + continue; + } + if (newSettings.get(key) instanceof Map) { + if (!equalsGroup( + (Map<String, Object>) newSettings.get(key), oldSettings.getByPrefix(key + "."))) { + return false; + } + } else if (newSettings.get(key) instanceof List) { + if (!newSettings.get(key).equals(oldSettings.getAsList(key))) { + return false; + } + } else { + if (!newSettings.get(key).toString().equals(oldSettings.get(key))) { + return false; + } + } } + return true; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java index e180c8296b48d..d1eedbbce0495 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java @@ -2,22 +2,18 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import java.util.List; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.lang3.StringUtils; -import org.springframework.core.io.Resource; -import org.springframework.core.io.support.PathMatchingResourcePatternResolver; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.lang3.StringUtils; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; - -/** - * Builder for generating settings for elasticsearch indices - */ +/** Builder for generating settings for elasticsearch indices */ public class SettingsBuilder { // ElasticSearch Property Map Keys @@ -42,7 +38,7 @@ public class SettingsBuilder { public static final String REPLACEMENT = "replacement"; public static final String PRESERVE_ORIGINAL = "preserve_original"; public static final String SEARCH_ANALYZER = "search_analyzer"; - public static final String SEARCH_QUOTE_ANALYZER = "search_quote_analyzer"; + public static final String SEARCH_QUOTE_ANALYZER = "search_quote_analyzer"; public static final String CUSTOM_QUOTE_ANALYZER = "quote_analyzer"; public static final String SPLIT_ON_NUMERICS = "split_on_numerics"; public static final String SPLIT_ON_CASE_CHANGE = "split_on_case_change"; @@ -98,9 +94,10 @@ public class SettingsBuilder { public static final String TRIM = "trim"; // MultiFilters - public static final String MULTIFILTER_GRAPH_1 = String.join(",", LOWERCASE, STICKY_DELIMITER_GRAPH); - public static final String MULTIFILTER_GRAPH_2 = String.join(",", LOWERCASE, ALPHANUM_SPACE_ONLY, - DEFAULT_SYN_GRAPH); + public static final String MULTIFILTER_GRAPH_1 = + String.join(",", LOWERCASE, STICKY_DELIMITER_GRAPH); + public static final String MULTIFILTER_GRAPH_2 = + String.join(",", LOWERCASE, ALPHANUM_SPACE_ONLY, DEFAULT_SYN_GRAPH); public static final String MULTIFILTER_1 = String.join(",", MULTIFILTER_GRAPH_1, FLATTEN_GRAPH); public static final String MULTIFILTER_2 = String.join(",", MULTIFILTER_GRAPH_2, FLATTEN_GRAPH); @@ -117,20 +114,15 @@ public class SettingsBuilder { public static final String UNIT_SEPARATOR_TOKENIZER = "unit_separator_tokenizer"; public static final String WORD_GRAM_TOKENIZER = "word_gram_tokenizer"; // Do not remove the space, needed for multi-term synonyms - public static final List<String> ALPHANUM_SPACE_PATTERNS = ImmutableList.of( - "([a-z0-9 _-]{2,})", - "([a-z0-9 ]{2,})", - "\\\"([^\\\"]*)\\\"" - ); + public static final List<String> ALPHANUM_SPACE_PATTERNS = + ImmutableList.of("([a-z0-9 _-]{2,})", "([a-z0-9 ]{2,})", "\\\"([^\\\"]*)\\\""); public static final List<String> DATAHUB_STOP_WORDS_LIST = ImmutableList.of("urn", "li"); - public static final List<String> WORD_DELIMITER_TYPE_TABLE = ImmutableList.of( - ": => SUBWORD_DELIM", - "_ => ALPHANUM", - "- => ALPHA" - ); - public static final List<String> INDEX_TOKEN_FILTERS = ImmutableList.of( + public static final List<String> WORD_DELIMITER_TYPE_TABLE = + ImmutableList.of(": => SUBWORD_DELIM", "_ => ALPHANUM", "- => ALPHA"); + public static final List<String> INDEX_TOKEN_FILTERS = + ImmutableList.of( ASCII_FOLDING, MULTIFILTER, TRIM, @@ -143,7 +135,8 @@ public class SettingsBuilder { UNIQUE, MIN_LENGTH); - public static final List<String> SEARCH_TOKEN_FILTERS = ImmutableList.of( + public static final List<String> SEARCH_TOKEN_FILTERS = + ImmutableList.of( ASCII_FOLDING, MULTIFILTER_GRAPH, TRIM, @@ -156,25 +149,15 @@ public class SettingsBuilder { UNIQUE, MIN_LENGTH); - public static final List<String> QUOTED_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - LOWERCASE, - REMOVE_QUOTES, - DATAHUB_STOP_WORDS, - STOP, - MIN_LENGTH); + public static final List<String> QUOTED_TOKEN_FILTERS = + ImmutableList.of( + ASCII_FOLDING, LOWERCASE, REMOVE_QUOTES, DATAHUB_STOP_WORDS, STOP, MIN_LENGTH); - public static final List<String> PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - AUTOCOMPLETE_CUSTOM_DELIMITER, - LOWERCASE); + public static final List<String> PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS = + ImmutableList.of(ASCII_FOLDING, AUTOCOMPLETE_CUSTOM_DELIMITER, LOWERCASE); - public static final List<String> WORD_GRAM_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - LOWERCASE, - TRIM, - REMOVE_QUOTES - ); + public static final List<String> WORD_GRAM_TOKEN_FILTERS = + ImmutableList.of(ASCII_FOLDING, LOWERCASE, TRIM, REMOVE_QUOTES); public final Map<String, Object> settings; @@ -193,7 +176,9 @@ public Map<String, Object> getSettings() { private static Map<String, Object> buildSettings(String mainTokenizer) throws IOException { ImmutableMap.Builder<String, Object> settings = ImmutableMap.builder(); settings.put(MAX_NGRAM_DIFF, 17); - settings.put(ANALYSIS, ImmutableMap.<String, Object>builder() + settings.put( + ANALYSIS, + ImmutableMap.<String, Object>builder() .put(FILTER, buildFilters()) .put(TOKENIZER, buildTokenizers()) .put(NORMALIZER, buildNormalizers()) @@ -203,12 +188,15 @@ private static Map<String, Object> buildSettings(String mainTokenizer) throws IO } private static Map<String, Object> buildFilters() throws IOException { - PathMatchingResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver(); + PathMatchingResourcePatternResolver resourceResolver = + new PathMatchingResourcePatternResolver(); ImmutableMap.Builder<String, Object> filters = ImmutableMap.builder(); // Filter to split string into words - filters.put(AUTOCOMPLETE_CUSTOM_DELIMITER, ImmutableMap.<String, Object>builder() + filters.put( + AUTOCOMPLETE_CUSTOM_DELIMITER, + ImmutableMap.<String, Object>builder() .put(TYPE, WORD_DELIMITER) .put(SPLIT_ON_NUMERICS, false) .put(SPLIT_ON_CASE_CHANGE, false) @@ -216,7 +204,9 @@ private static Map<String, Object> buildFilters() throws IOException { .put(TYPE_TABLE, WORD_DELIMITER_TYPE_TABLE) .build()); - filters.put(STICKY_DELIMITER_GRAPH, ImmutableMap.<String, Object>builder() + filters.put( + STICKY_DELIMITER_GRAPH, + ImmutableMap.<String, Object>builder() .put(TYPE, WORD_DELIMITER_GRAPH) .put(SPLIT_ON_NUMERICS, false) .put(SPLIT_ON_CASE_CHANGE, false) @@ -225,22 +215,30 @@ private static Map<String, Object> buildFilters() throws IOException { .put(TYPE_TABLE, WORD_DELIMITER_TYPE_TABLE) .build()); - filters.put(DATAHUB_STOP_WORDS, ImmutableMap.<String, Object>builder() + filters.put( + DATAHUB_STOP_WORDS, + ImmutableMap.<String, Object>builder() .put(TYPE, STOP) .put(IGNORE_CASE, "true") .put(STOPWORDS, DATAHUB_STOP_WORDS_LIST) .build()); - filters.put(MIN_LENGTH, ImmutableMap.<String, Object>builder() - .put(TYPE, "length") - .put("min", "3") - .build()); + filters.put( + MIN_LENGTH, + ImmutableMap.<String, Object>builder().put(TYPE, "length").put("min", "3").build()); - Resource stemOverride = resourceResolver.getResource("classpath:elasticsearch/stem_override.txt"); - try (BufferedReader reader = new BufferedReader(new InputStreamReader(stemOverride.getInputStream()))) { - filters.put(STEM_OVERRIDE, ImmutableMap.<String, Object>builder() + Resource stemOverride = + resourceResolver.getResource("classpath:elasticsearch/stem_override.txt"); + try (BufferedReader reader = + new BufferedReader(new InputStreamReader(stemOverride.getInputStream()))) { + filters.put( + STEM_OVERRIDE, + ImmutableMap.<String, Object>builder() .put(TYPE, "stemmer_override") - .put("rules", reader.lines() + .put( + "rules", + reader + .lines() .map(String::trim) .map(String::toLowerCase) .filter(line -> !line.isEmpty() && !line.startsWith("#")) @@ -248,42 +246,50 @@ private static Map<String, Object> buildFilters() throws IOException { .build()); } - filters.put(ALPHANUM_SPACE_ONLY, ImmutableMap.<String, Object>builder() + filters.put( + ALPHANUM_SPACE_ONLY, + ImmutableMap.<String, Object>builder() .put(TYPE, "pattern_capture") .put(PATTERNS, ALPHANUM_SPACE_PATTERNS) .build()); - filters.put(REMOVE_QUOTES, ImmutableMap.<String, Object>builder() + filters.put( + REMOVE_QUOTES, + ImmutableMap.<String, Object>builder() .put(TYPE, "pattern_replace") .put(PATTERN, "['\"]") .put(REPLACEMENT, "") .build()); // Index Time - filters.put(MULTIFILTER, ImmutableMap.<String, Object>builder() + filters.put( + MULTIFILTER, + ImmutableMap.<String, Object>builder() .put(TYPE, "multiplexer") - .put(FILTERS, ImmutableList.of( - MULTIFILTER_1, - MULTIFILTER_2 - )) + .put(FILTERS, ImmutableList.of(MULTIFILTER_1, MULTIFILTER_2)) .build()); // Search Time - filters.put(MULTIFILTER_GRAPH, ImmutableMap.<String, Object>builder() + filters.put( + MULTIFILTER_GRAPH, + ImmutableMap.<String, Object>builder() .put(TYPE, "multiplexer") - .put(FILTERS, ImmutableList.of( - MULTIFILTER_GRAPH_1, - MULTIFILTER_GRAPH_2 - )) + .put(FILTERS, ImmutableList.of(MULTIFILTER_GRAPH_1, MULTIFILTER_GRAPH_2)) .build()); Resource[] synonyms = resourceResolver.getResources("classpath:elasticsearch/synonyms/*.txt"); - for (Resource syn: synonyms) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(syn.getInputStream()))) { - filters.put(String.format("%s_syn_graph", FilenameUtils.getBaseName(syn.getFilename())), ImmutableMap.<String, Object>builder() + for (Resource syn : synonyms) { + try (BufferedReader reader = + new BufferedReader(new InputStreamReader(syn.getInputStream()))) { + filters.put( + String.format("%s_syn_graph", FilenameUtils.getBaseName(syn.getFilename())), + ImmutableMap.<String, Object>builder() .put(TYPE, "synonym_graph") .put(LENIENT, "false") - .put(SYNONYMS, reader.lines() + .put( + SYNONYMS, + reader + .lines() .map(String::trim) .map(String::toLowerCase) .filter(line -> !line.isEmpty() && !line.startsWith("#")) @@ -291,15 +297,18 @@ private static Map<String, Object> buildFilters() throws IOException { .build()); } - for (Map.Entry<String, Integer> entry : Map.of(WORD_GRAM_2_FILTER, 2, WORD_GRAM_3_FILTER, 3, WORD_GRAM_4_FILTER, 4).entrySet()) { + for (Map.Entry<String, Integer> entry : + Map.of(WORD_GRAM_2_FILTER, 2, WORD_GRAM_3_FILTER, 3, WORD_GRAM_4_FILTER, 4).entrySet()) { String filterName = entry.getKey(); Integer gramSize = entry.getValue(); - filters.put(filterName, ImmutableMap.<String, Object>builder() - .put(TYPE, SHINGLE) - .put("min_shingle_size", gramSize) - .put("max_shingle_size", gramSize) - .put("output_unigrams", false) - .build()); + filters.put( + filterName, + ImmutableMap.<String, Object>builder() + .put(TYPE, SHINGLE) + .put("min_shingle_size", gramSize) + .put("max_shingle_size", gramSize) + .put("output_unigrams", false) + .build()); } } @@ -309,20 +318,16 @@ private static Map<String, Object> buildFilters() throws IOException { private static Map<String, Object> buildTokenizers() { ImmutableMap.Builder<String, Object> tokenizers = ImmutableMap.builder(); // Tokenize by slashes - tokenizers.put(SLASH_TOKENIZER, - ImmutableMap.<String, Object>builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[/]") - .build()); + tokenizers.put( + SLASH_TOKENIZER, + ImmutableMap.<String, Object>builder().put(TYPE, PATTERN).put(PATTERN, "[/]").build()); + tokenizers.put( + UNIT_SEPARATOR_TOKENIZER, + ImmutableMap.<String, Object>builder().put(TYPE, PATTERN).put(PATTERN, "[␟]").build()); - tokenizers.put(UNIT_SEPARATOR_TOKENIZER, - ImmutableMap.<String, Object>builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[␟]") - .build()); - - tokenizers.put(UNIT_SEPARATOR_PATH_TOKENIZER, + tokenizers.put( + UNIT_SEPARATOR_PATH_TOKENIZER, ImmutableMap.<String, Object>builder() .put(TYPE, PATH_HIERARCHY_TOKENIZER) .put(DELIMITER, "␟") @@ -331,16 +336,15 @@ private static Map<String, Object> buildTokenizers() { // Tokenize by most special chars // Do NOT tokenize by whitespace to keep multi-word synonyms in the same token // The split by whitespace is done later in the token filters phase - tokenizers.put(MAIN_TOKENIZER, - ImmutableMap.<String, Object>builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[(),./:]") - .build()); + tokenizers.put( + MAIN_TOKENIZER, + ImmutableMap.<String, Object>builder().put(TYPE, PATTERN).put(PATTERN, "[(),./:]").build()); // Tokenize by whitespace and most special chars for wordgrams // only split on - when not preceded by a whitespace to preserve exclusion functionality // i.e. "logging-events-bkcp" and "logging-events -bckp" should be handled differently - tokenizers.put(WORD_GRAM_TOKENIZER, + tokenizers.put( + WORD_GRAM_TOKENIZER, ImmutableMap.<String, Object>builder() .put(TYPE, PATTERN) .put(PATTERN, "[(),./:\\s_]|(?<=\\S)(-)") @@ -353,8 +357,11 @@ private static Map<String, Object> buildTokenizers() { private static Map<String, Object> buildNormalizers() { ImmutableMap.Builder<String, Object> normalizers = ImmutableMap.builder(); // Analyzer for partial matching (i.e. autocomplete) - Prefix matching of each token - normalizers.put(KEYWORD_NORMALIZER, - ImmutableMap.<String, Object>builder().put(FILTER, ImmutableList.of(LOWERCASE, ASCII_FOLDING)).build()); + normalizers.put( + KEYWORD_NORMALIZER, + ImmutableMap.<String, Object>builder() + .put(FILTER, ImmutableList.of(LOWERCASE, ASCII_FOLDING)) + .build()); return normalizers.build(); } @@ -364,90 +371,119 @@ private static Map<String, Object> buildAnalyzers(String mainTokenizer) { ImmutableMap.Builder<String, Object> analyzers = ImmutableMap.builder(); // Analyzer for splitting by slashes (used to get depth of browsePath) - analyzers.put(SLASH_PATTERN_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + SLASH_PATTERN_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, SLASH_TOKENIZER) .put(FILTER, ImmutableList.of(LOWERCASE)) .build()); // Analyzer for splitting by unit-separator (used to get depth of browsePathV2) - analyzers.put(UNIT_SEPARATOR_PATTERN_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + UNIT_SEPARATOR_PATTERN_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, UNIT_SEPARATOR_TOKENIZER) .put(FILTER, ImmutableList.of(LOWERCASE)) .build()); // Analyzer for matching browse path - analyzers.put(BROWSE_PATH_HIERARCHY_ANALYZER, ImmutableMap.<String, Object>builder() - .put(TOKENIZER, PATH_HIERARCHY_TOKENIZER) - .build()); + analyzers.put( + BROWSE_PATH_HIERARCHY_ANALYZER, + ImmutableMap.<String, Object>builder().put(TOKENIZER, PATH_HIERARCHY_TOKENIZER).build()); // Analyzer for matching browse path v2 - analyzers.put(BROWSE_PATH_V2_HIERARCHY_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + BROWSE_PATH_V2_HIERARCHY_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, UNIT_SEPARATOR_PATH_TOKENIZER) .build()); // Analyzer for case-insensitive exact matching - Only used when building queries - analyzers.put(KEYWORD_LOWERCASE_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + KEYWORD_LOWERCASE_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, KEYWORD_TOKENIZER) .put(FILTER, ImmutableList.of("trim", LOWERCASE, ASCII_FOLDING, SNOWBALL)) .build()); // Analyzer for quotes words - analyzers.put(CUSTOM_QUOTE_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + CUSTOM_QUOTE_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, KEYWORD_TOKENIZER) .put(FILTER, QUOTED_TOKEN_FILTERS) .build()); // Analyzer for text tokenized into words (split by spaces, periods, and slashes) - analyzers.put(TEXT_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + TEXT_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, INDEX_TOKEN_FILTERS) .build()); - analyzers.put(TEXT_SEARCH_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + TEXT_SEARCH_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, SEARCH_TOKEN_FILTERS) .build()); // Analyzer for getting urn components - analyzers.put(URN_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + URN_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, INDEX_TOKEN_FILTERS) .build()); - analyzers.put(URN_SEARCH_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + URN_SEARCH_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, SEARCH_TOKEN_FILTERS) .build()); // Support word grams - for (Map.Entry<String, String> entry : Map.of( - WORD_GRAM_2_ANALYZER, WORD_GRAM_2_FILTER, - WORD_GRAM_3_ANALYZER, WORD_GRAM_3_FILTER, - WORD_GRAM_4_ANALYZER, WORD_GRAM_4_FILTER).entrySet()) { + for (Map.Entry<String, String> entry : + Map.of( + WORD_GRAM_2_ANALYZER, WORD_GRAM_2_FILTER, + WORD_GRAM_3_ANALYZER, WORD_GRAM_3_FILTER, + WORD_GRAM_4_ANALYZER, WORD_GRAM_4_FILTER) + .entrySet()) { String analyzerName = entry.getKey(); String filterName = entry.getValue(); - analyzers.put(analyzerName, ImmutableMap.<String, Object>builder() - .put(TOKENIZER, WORD_GRAM_TOKENIZER) - .put(FILTER, ImmutableList.<Object>builder() - .addAll(WORD_GRAM_TOKEN_FILTERS) - .add(filterName).build()) - .build()); + analyzers.put( + analyzerName, + ImmutableMap.<String, Object>builder() + .put(TOKENIZER, WORD_GRAM_TOKENIZER) + .put( + FILTER, + ImmutableList.<Object>builder() + .addAll(WORD_GRAM_TOKEN_FILTERS) + .add(filterName) + .build()) + .build()); } - // For special analysis, the substitution can be read from the configuration (chinese tokenizer: ik_smart / smartCN) + // For special analysis, the substitution can be read from the configuration (chinese tokenizer: + // ik_smart / smartCN) // Analyzer for partial matching (i.e. autocomplete) - Prefix matching of each token - analyzers.put(PARTIAL_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + PARTIAL_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS) .build()); // Analyzer for partial matching urn components - analyzers.put(PARTIAL_URN_COMPONENT, ImmutableMap.<String, Object>builder() + analyzers.put( + PARTIAL_URN_COMPONENT, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS) .build()); - return analyzers.build(); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index 5fd0a80d23c50..5ea60b24a577a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query; +import static com.linkedin.metadata.utils.SearchUtil.filterSoftDeletedByDefault; + import com.codahale.metrics.Timer; import com.datahub.util.exception.ESQueryException; import com.google.common.annotations.VisibleForTesting; @@ -54,9 +56,6 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.utils.SearchUtil.filterSoftDeletedByDefault; - - @Slf4j @RequiredArgsConstructor public class ESBrowseDAO { @@ -64,10 +63,8 @@ public class ESBrowseDAO { private final EntityRegistry entityRegistry; private final RestHighLevelClient client; private final IndexConvention indexConvention; - @Nonnull - private final SearchConfiguration searchConfiguration; - @Nullable - private final CustomSearchConfiguration customSearchConfiguration; + @Nonnull private final SearchConfiguration searchConfiguration; + @Nullable private final CustomSearchConfiguration customSearchConfiguration; private static final String BROWSE_PATH = "browsePaths"; private static final String BROWSE_PATH_DEPTH = "browsePaths.length"; @@ -107,19 +104,26 @@ private class BrowseGroupsResultV2 { * @return a {@link BrowseResult} that contains a list of groups/entities */ @Nonnull - public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, int from, + public BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filters, + int from, int size) { final Map<String, String> requestMap = SearchUtils.getRequestMap(filters); try { - final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); + final String indexName = + indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchResponse groupsResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { groupsResponse = - client.search(constructGroupsSearchRequest(indexName, path, requestMap), RequestOptions.DEFAULT); + client.search( + constructGroupsSearchRequest(indexName, path, requestMap), RequestOptions.DEFAULT); } - final BrowseGroupsResult browseGroupsResult = extractGroupsResponse(groupsResponse, path, from, size); + final BrowseGroupsResult browseGroupsResult = + extractGroupsResponse(groupsResponse, path, from, size); final int numGroups = browseGroupsResult.getTotalGroups(); // Based on the number of groups returned, compute the from and size to query for entities @@ -131,14 +135,19 @@ public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nu final SearchResponse entitiesResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esEntitiesSearch").time()) { entitiesResponse = - client.search(constructEntitiesSearchRequest(indexName, path, requestMap, entityFrom, entitySize), + client.search( + constructEntitiesSearchRequest(indexName, path, requestMap, entityFrom, entitySize), RequestOptions.DEFAULT); } final int numEntities = (int) entitiesResponse.getHits().getTotalHits().value; - final List<BrowseResultEntity> browseResultEntityList = extractEntitiesResponse(entitiesResponse, path); - - return new BrowseResult().setMetadata( - new BrowseResultMetadata().setTotalNumEntities(browseGroupsResult.getTotalNumEntities()).setPath(path)) + final List<BrowseResultEntity> browseResultEntityList = + extractEntitiesResponse(entitiesResponse, path); + + return new BrowseResult() + .setMetadata( + new BrowseResultMetadata() + .setTotalNumEntities(browseGroupsResult.getTotalNumEntities()) + .setPath(path)) .setEntities(new BrowseResultEntityArray(browseResultEntityList)) .setGroups(new BrowseResultGroupArray(browseGroupsResult.getGroups())) .setNumEntities(numEntities) @@ -176,8 +185,8 @@ private AggregationBuilder buildAggregations(@Nonnull String path) { * @return {@link SearchRequest} */ @Nonnull - protected SearchRequest constructGroupsSearchRequest(@Nonnull String indexName, @Nonnull String path, - @Nonnull Map<String, String> requestMap) { + protected SearchRequest constructGroupsSearchRequest( + @Nonnull String indexName, @Nonnull String path, @Nonnull Map<String, String> requestMap) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); @@ -196,8 +205,8 @@ protected SearchRequest constructGroupsSearchRequest(@Nonnull String indexName, * @return {@link QueryBuilder} */ @Nonnull - private QueryBuilder buildQueryString(@Nonnull String path, @Nonnull Map<String, String> requestMap, - boolean isGroupQuery) { + private QueryBuilder buildQueryString( + @Nonnull String path, @Nonnull Map<String, String> requestMap, boolean isGroupQuery) { final int browseDepthVal = getPathDepth(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); @@ -229,13 +238,17 @@ private QueryBuilder buildQueryString(@Nonnull String path, @Nonnull Map<String, */ @VisibleForTesting @Nonnull - SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull String path, - @Nonnull Map<String, String> requestMap, int from, int size) { + SearchRequest constructEntitiesSearchRequest( + @Nonnull String indexName, + @Nonnull String path, + @Nonnull Map<String, String> requestMap, + int from, + int size) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.from(from); searchSourceBuilder.size(size); - searchSourceBuilder.fetchSource(new String[]{BROWSE_PATH, URN}, null); + searchSourceBuilder.fetchSource(new String[] {BROWSE_PATH, URN}, null); searchSourceBuilder.sort(URN, SortOrder.ASC); searchSourceBuilder.query(buildQueryString(path, requestMap, false)); searchRequest.source(searchSourceBuilder); @@ -254,8 +267,13 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull */ @VisibleForTesting @Nonnull - SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull String path, - @Nonnull Map<String, String> requestMap, @Nullable Object[] sort, @Nullable String pitId, @Nonnull String keepAlive, + SearchRequest constructEntitiesSearchRequest( + @Nonnull String indexName, + @Nonnull String path, + @Nonnull Map<String, String> requestMap, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, int size) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -263,7 +281,7 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull ESUtils.setSearchAfter(searchSourceBuilder, sort, pitId, keepAlive); searchSourceBuilder.size(size); - searchSourceBuilder.fetchSource(new String[]{BROWSE_PATH, URN}, null); + searchSourceBuilder.fetchSource(new String[] {BROWSE_PATH, URN}, null); searchSourceBuilder.sort(URN, SortOrder.ASC); searchSourceBuilder.query(buildQueryString(path, requestMap, false)); searchRequest.source(searchSourceBuilder); @@ -278,19 +296,24 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull * @return {@link BrowseResultMetadata} */ @Nonnull - private BrowseGroupsResult extractGroupsResponse(@Nonnull SearchResponse groupsResponse, @Nonnull String path, - int from, int size) { + private BrowseGroupsResult extractGroupsResponse( + @Nonnull SearchResponse groupsResponse, @Nonnull String path, int from, int size) { final ParsedTerms groups = groupsResponse.getAggregations().get(GROUP_AGG); - final List<BrowseResultGroup> groupsAgg = groups.getBuckets() - .stream() - .map(group -> new BrowseResultGroup().setName(getSimpleName(group.getKeyAsString())) - .setCount(group.getDocCount())) - .collect(Collectors.toList()); + final List<BrowseResultGroup> groupsAgg = + groups.getBuckets().stream() + .map( + group -> + new BrowseResultGroup() + .setName(getSimpleName(group.getKeyAsString())) + .setCount(group.getDocCount())) + .collect(Collectors.toList()); // Get the groups that are in the from to from + size range - final List<BrowseResultGroup> paginatedGroups = groupsAgg.size() <= from ? Collections.emptyList() - : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); - return new BrowseGroupsResult(paginatedGroups, groupsAgg.size(), - (int) groupsResponse.getHits().getTotalHits().value); + final List<BrowseResultGroup> paginatedGroups = + groupsAgg.size() <= from + ? Collections.emptyList() + : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); + return new BrowseGroupsResult( + paginatedGroups, groupsAgg.size(), (int) groupsResponse.getHits().getTotalHits().value); } /** @@ -301,18 +324,22 @@ private BrowseGroupsResult extractGroupsResponse(@Nonnull SearchResponse groupsR */ @VisibleForTesting @Nonnull - List<BrowseResultEntity> extractEntitiesResponse(@Nonnull SearchResponse entitiesResponse, - @Nonnull String currentPath) { + List<BrowseResultEntity> extractEntitiesResponse( + @Nonnull SearchResponse entitiesResponse, @Nonnull String currentPath) { final List<BrowseResultEntity> entityMetadataArray = new ArrayList<>(); - Arrays.stream(entitiesResponse.getHits().getHits()).forEach(hit -> { - try { - final List<String> allPaths = (List<String>) hit.getSourceAsMap().get(BROWSE_PATH); - entityMetadataArray.add(new BrowseResultEntity().setName((String) hit.getSourceAsMap().get(URN)) - .setUrn(Urn.createFromString((String) hit.getSourceAsMap().get(URN)))); - } catch (URISyntaxException e) { - log.error("URN is not valid: " + e.toString()); - } - }); + Arrays.stream(entitiesResponse.getHits().getHits()) + .forEach( + hit -> { + try { + final List<String> allPaths = (List<String>) hit.getSourceAsMap().get(BROWSE_PATH); + entityMetadataArray.add( + new BrowseResultEntity() + .setName((String) hit.getSourceAsMap().get(URN)) + .setUrn(Urn.createFromString((String) hit.getSourceAsMap().get(URN)))); + } catch (URISyntaxException e) { + log.error("URN is not valid: " + e.toString()); + } + }); return entityMetadataArray; } @@ -344,7 +371,8 @@ private static int getPathDepth(@Nonnull String path) { public List<String> getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.source(new SearchSourceBuilder().query(QueryBuilders.termQuery(URN, urn.toString()))); + searchRequest.source( + new SearchSourceBuilder().query(QueryBuilders.termQuery(URN, urn.toString()))); final SearchHit[] searchHits; try { searchHits = client.search(searchRequest, RequestOptions.DEFAULT).getHits().getHits(); @@ -363,20 +391,32 @@ public List<String> getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) return (List<String>) sourceMap.get(BROWSE_PATH); } - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { try { final SearchResponse groupsResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { final String finalInput = input.isEmpty() ? "*" : input; groupsResponse = - client.search(constructGroupsSearchRequestV2(entityName, path, filter, finalInput), RequestOptions.DEFAULT); + client.search( + constructGroupsSearchRequestV2(entityName, path, filter, finalInput), + RequestOptions.DEFAULT); } - final BrowseGroupsResultV2 browseGroupsResult = extractGroupsResponseV2(groupsResponse, path, start, count); + final BrowseGroupsResultV2 browseGroupsResult = + extractGroupsResponseV2(groupsResponse, path, start, count); final int numGroups = browseGroupsResult.getTotalGroups(); - return new BrowseResultV2().setMetadata( - new BrowseResultMetadata().setTotalNumEntities(browseGroupsResult.getTotalNumEntities()).setPath(path)) + return new BrowseResultV2() + .setMetadata( + new BrowseResultMetadata() + .setTotalNumEntities(browseGroupsResult.getTotalNumEntities()) + .setPath(path)) .setGroups(new BrowseResultGroupV2Array(browseGroupsResult.getGroups())) .setNumGroups(numGroups) .setFrom(start) @@ -388,12 +428,21 @@ public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, } @Nonnull - private SearchRequest constructGroupsSearchRequestV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input) { + private SearchRequest constructGroupsSearchRequestV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); - searchSourceBuilder.query(buildQueryStringV2(entityName, path, SearchUtil.transformFilterForEntities(filter, indexConvention), input)); + searchSourceBuilder.query( + buildQueryStringV2( + entityName, + path, + SearchUtil.transformFilterForEntities(filter, indexConvention), + input)); searchSourceBuilder.aggregation(buildAggregationsV2(path)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -412,21 +461,24 @@ private String getSimpleNameV2(@Nonnull String path) { return path.substring(path.lastIndexOf(BROWSE_V2_DELIMITER) + 1); } - private static int getPathDepthV2(@Nonnull String path) { return StringUtils.countMatches(path, BROWSE_V2_DELIMITER); } @Nonnull - private QueryBuilder buildQueryStringV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input) { + private QueryBuilder buildQueryStringV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { final int browseDepthVal = getPathDepthV2(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - QueryBuilder query = SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) - .getQuery(input, false); + QueryBuilder query = + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + .getQuery(input, false); queryBuilder.must(query); filterSoftDeletedByDefault(filter, queryBuilder); @@ -467,19 +519,19 @@ private AggregationBuilder buildAggregationsV2(@Nonnull String path) { * @return {@link BrowseResultMetadata} */ @Nonnull - private BrowseGroupsResultV2 extractGroupsResponseV2(@Nonnull SearchResponse groupsResponse, @Nonnull String path, - int from, int size) { + private BrowseGroupsResultV2 extractGroupsResponseV2( + @Nonnull SearchResponse groupsResponse, @Nonnull String path, int from, int size) { final ParsedTerms groups = groupsResponse.getAggregations().get(GROUP_AGG); - final List<BrowseResultGroupV2> groupsAgg = groups.getBuckets() - .stream() - .map(this::mapBrowseResultGroupV2) - .collect(Collectors.toList()); + final List<BrowseResultGroupV2> groupsAgg = + groups.getBuckets().stream().map(this::mapBrowseResultGroupV2).collect(Collectors.toList()); // Get the groups that are in the from to from + size range - final List<BrowseResultGroupV2> paginatedGroups = groupsAgg.size() <= from ? Collections.emptyList() - : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); - return new BrowseGroupsResultV2(paginatedGroups, groupsAgg.size(), - (int) groupsResponse.getHits().getTotalHits().value); + final List<BrowseResultGroupV2> paginatedGroups = + groupsAgg.size() <= from + ? Collections.emptyList() + : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); + return new BrowseGroupsResultV2( + paginatedGroups, groupsAgg.size(), (int) groupsResponse.getHits().getTotalHits().value); } private boolean hasSubGroups(Terms.Bucket group) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 960a5b38826b1..0718448a6453e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -1,12 +1,16 @@ package com.linkedin.metadata.search.elasticsearch.query; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.codahale.metrics.Timer; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.datahub.util.exception.ESQueryException; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.annotations.VisibleForTesting; import com.linkedin.data.template.LongMap; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.AutoCompleteResult; @@ -45,24 +49,18 @@ import org.opensearch.client.core.CountRequest; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.SearchModule; import org.opensearch.search.builder.SearchSourceBuilder; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; -import static com.linkedin.metadata.utils.SearchUtil.*; - - -/** - * A search DAO for Elasticsearch backend. - */ +/** A search DAO for Elasticsearch backend. */ @Slf4j @RequiredArgsConstructor public class ESSearchDAO { private static final NamedXContentRegistry X_CONTENT_REGISTRY; + static { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); X_CONTENT_REGISTRY = new NamedXContentRegistry(searchModule.getNamedXContents()); @@ -73,15 +71,14 @@ public class ESSearchDAO { private final IndexConvention indexConvention; private final boolean pointInTimeCreationEnabled; private final String elasticSearchImplementation; - @Nonnull - private final SearchConfiguration searchConfiguration; - @Nullable - private final CustomSearchConfiguration customSearchConfiguration; + @Nonnull private final SearchConfiguration searchConfiguration; + @Nullable private final CustomSearchConfiguration customSearchConfiguration; public long docCount(@Nonnull String entityName) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); CountRequest countRequest = - new CountRequest(indexConvention.getIndexName(entitySpec)).query(SearchRequestHandler.getFilterQuery(null)); + new CountRequest(indexConvention.getIndexName(entitySpec)) + .query(SearchRequestHandler.getFilterQuery(null)); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "docCount").time()) { return client.count(countRequest, RequestOptions.DEFAULT).getCount(); } catch (IOException e) { @@ -92,15 +89,21 @@ public long docCount(@Nonnull String entityName) { @Nonnull @WithSpan - private SearchResult executeAndExtract(@Nonnull List<EntitySpec> entitySpec, @Nonnull SearchRequest searchRequest, - @Nullable Filter filter, int from, int size) { + private SearchResult executeAndExtract( + @Nonnull List<EntitySpec> entitySpec, + @Nonnull SearchRequest searchRequest, + @Nullable Filter filter, + int from, + int size) { long id = System.currentTimeMillis(); - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "executeAndExtract_search").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "executeAndExtract_search").time()) { log.debug("Executing request {}: {}", id, searchRequest); final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well - return transformIndexIntoEntityName(SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + return transformIndexIntoEntityName( + SearchRequestHandler.getBuilder( + entitySpec, searchConfiguration, customSearchConfiguration) .extractResult(searchResponse, filter, from, size)); } catch (Exception e) { log.error("Search query failed", e); @@ -116,33 +119,47 @@ private String transformIndexToken(String name, int entityTypeIdx) { } String[] tokens = name.split(AGGREGATION_SEPARATOR_CHAR); if (entityTypeIdx < tokens.length) { - tokens[entityTypeIdx] = indexConvention.getEntityName(tokens[entityTypeIdx]).orElse(tokens[entityTypeIdx]); + tokens[entityTypeIdx] = + indexConvention.getEntityName(tokens[entityTypeIdx]).orElse(tokens[entityTypeIdx]); } return String.join(AGGREGATION_SEPARATOR_CHAR, tokens); } - private AggregationMetadata transformAggregationMetadata(@Nonnull AggregationMetadata aggMeta, int entityTypeIdx) { + private AggregationMetadata transformAggregationMetadata( + @Nonnull AggregationMetadata aggMeta, int entityTypeIdx) { if (entityTypeIdx >= 0) { - aggMeta.setAggregations(new LongMap( - aggMeta.getAggregations().entrySet().stream().collect( - Collectors.toMap(entry -> transformIndexToken(entry.getKey(), entityTypeIdx), Map.Entry::getValue)))); + aggMeta.setAggregations( + new LongMap( + aggMeta.getAggregations().entrySet().stream() + .collect( + Collectors.toMap( + entry -> transformIndexToken(entry.getKey(), entityTypeIdx), + Map.Entry::getValue)))); aggMeta.setFilterValues( new FilterValueArray( - aggMeta.getFilterValues().stream().map( - filterValue -> filterValue.setValue(transformIndexToken(filterValue.getValue(), entityTypeIdx))) - .collect(Collectors.toList()) - )); - + aggMeta.getFilterValues().stream() + .map( + filterValue -> + filterValue.setValue( + transformIndexToken(filterValue.getValue(), entityTypeIdx))) + .collect(Collectors.toList()))); } return aggMeta; } @VisibleForTesting public SearchResult transformIndexIntoEntityName(SearchResult result) { - return result.setMetadata(result.getMetadata().setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); + return result.setMetadata( + result + .getMetadata() + .setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); } + private ScrollResult transformIndexIntoEntityName(ScrollResult result) { - return result.setMetadata(result.getMetadata().setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); + return result.setMetadata( + result + .getMetadata() + .setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); } private AggregationMetadataArray transformIndexIntoEntityName(AggregationMetadataArray aggArray) { @@ -157,15 +174,22 @@ private AggregationMetadataArray transformIndexIntoEntityName(AggregationMetadat @Nonnull @WithSpan - private ScrollResult executeAndExtract(@Nonnull List<EntitySpec> entitySpecs, @Nonnull SearchRequest searchRequest, @Nullable Filter filter, - @Nullable String scrollId, @Nullable String keepAlive, int size) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "executeAndExtract_scroll").time()) { + private ScrollResult executeAndExtract( + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull SearchRequest searchRequest, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int size) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "executeAndExtract_scroll").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well - return transformIndexIntoEntityName(SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .extractScrollResult(searchResponse, - filter, scrollId, keepAlive, size, supportsPointInTime())); + return transformIndexIntoEntityName( + SearchRequestHandler.getBuilder( + entitySpecs, searchConfiguration, customSearchConfiguration) + .extractScrollResult( + searchResponse, filter, scrollId, keepAlive, size, supportsPointInTime())); } catch (Exception e) { log.error("Search query failed: {}", searchRequest, e); throw new ESQueryException("Search query failed:", e); @@ -173,32 +197,42 @@ private ScrollResult executeAndExtract(@Nonnull List<EntitySpec> entitySpecs, @N } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags Structured or full text search modes, plus other misc options * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List<String> facets) { + public SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets) { final String finalInput = input.isEmpty() ? "*" : input; Timer.Context searchRequestTimer = MetricUtils.timer(this.getClass(), "searchRequest").time(); - List<EntitySpec> entitySpecs = entityNames.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); + List<EntitySpec> entitySpecs = + entityNames.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); Filter transformedFilters = transformFilterForEntities(postFilters, indexConvention); // Step 1: construct the query - final SearchRequest searchRequest = SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .getSearchRequest(finalInput, transformedFilters, sortCriterion, from, size, searchFlags, facets); - searchRequest.indices(entityNames.stream() - .map(indexConvention::getEntityIndexName) - .toArray(String[]::new)); + final SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getSearchRequest( + finalInput, transformedFilters, sortCriterion, from, size, searchFlags, facets); + searchRequest.indices( + entityNames.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new)); searchRequestTimer.stop(); // Step 2: execute the query and extract results, validated against document model as well return executeAndExtract(entitySpecs, searchRequest, transformedFilters, from, size); @@ -207,21 +241,26 @@ public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String in /** * Gets a list of documents after applying the input filters. * - * @param filters the request map with fields and values to be applied as filters to the search query + * @param filters the request map with fields and values to be applied as filters to the search + * query * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size number of search hits to return - * @return a {@link SearchResult} that contains a list of filtered documents and related search result metadata + * @return a {@link SearchResult} that contains a list of filtered documents and related search + * result metadata */ @Nonnull - public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, int from, int size) { + public SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); Filter transformedFilters = transformFilterForEntities(filters, indexConvention); final SearchRequest searchRequest = - SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) - .getFilterRequest(transformedFilters, sortCriterion, from, size); + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + .getFilterRequest(transformedFilters, sortCriterion, from, size); searchRequest.indices(indexConvention.getIndexName(entitySpec)); return executeAndExtract(List.of(entitySpec), searchRequest, transformedFilters, from, size); @@ -230,7 +269,8 @@ public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, /** * Returns a list of suggestions given type ahead query. * - * <p>The advanced auto complete can take filters and provides suggestions based on filtered context. + * <p>The advanced auto complete can take filters and provides suggestions based on filtered + * context. * * @param query the type ahead query text * @param field the field name for the auto complete @@ -239,12 +279,18 @@ public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, * @return A list of suggestions as string */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit) { + public AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit) { try { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); AutocompleteRequestHandler builder = AutocompleteRequestHandler.getBuilder(entitySpec); - SearchRequest req = builder.getSearchRequest(query, field, transformFilterForEntities(requestParams, indexConvention), limit); + SearchRequest req = + builder.getSearchRequest( + query, field, transformFilterForEntities(requestParams, indexConvention), limit); req.indices(indexConvention.getIndexName(entitySpec)); SearchResponse searchResponse = client.search(req, RequestOptions.DEFAULT); return builder.extractResult(searchResponse, query); @@ -264,18 +310,27 @@ public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull Stri * @return */ @Nonnull - public Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit) { - final SearchRequest searchRequest = SearchRequestHandler.getAggregationRequest(field, transformFilterForEntities(requestParams, indexConvention), limit); + public Map<String, Long> aggregateByValue( + @Nullable List<String> entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit) { + final SearchRequest searchRequest = + SearchRequestHandler.getAggregationRequest( + field, transformFilterForEntities(requestParams, indexConvention), limit); if (entityNames == null) { String indexName = indexConvention.getAllEntityIndicesPattern(); searchRequest.indices(indexName); } else { - Stream<String> stream = entityNames.stream().map(entityRegistry::getEntitySpec).map(indexConvention::getIndexName); + Stream<String> stream = + entityNames.stream() + .map(entityRegistry::getEntitySpec) + .map(indexConvention::getIndexName); searchRequest.indices(stream.toArray(String[]::new)); } - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well return SearchRequestHandler.extractTermAggregations(searchResponse, field); @@ -286,28 +341,35 @@ public Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @N } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll Id to convert to a PIT ID and Sort array to pass to ElasticSearch * @param keepAlive string representation of the time to keep a point in time alive * @param size the number of search hits to return - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public ScrollResult scroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, SearchFlags searchFlags) { + public ScrollResult scroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + SearchFlags searchFlags) { final String finalInput = input.isEmpty() ? "*" : input; - String[] indexArray = entities.stream() - .map(indexConvention::getEntityIndexName) - .toArray(String[]::new); + String[] indexArray = + entities.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new); Timer.Context scrollRequestTimer = MetricUtils.timer(this.getClass(), "scrollRequest").time(); - List<EntitySpec> entitySpecs = entities.stream() - .map(entityRegistry::getEntitySpec) - .collect(Collectors.toList()); + List<EntitySpec> entitySpecs = + entities.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); String pitId = null; Object[] sort = null; if (scrollId != null) { @@ -326,39 +388,55 @@ public ScrollResult scroll(@Nonnull List<String> entities, @Nonnull String input Filter transformedFilters = transformFilterForEntities(postFilters, indexConvention); // Step 1: construct the query - final SearchRequest searchRequest = SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .getSearchRequest(finalInput, transformedFilters, sortCriterion, sort, pitId, keepAlive, size, searchFlags); + final SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getSearchRequest( + finalInput, + transformedFilters, + sortCriterion, + sort, + pitId, + keepAlive, + size, + searchFlags); - // PIT specifies indices in creation so it doesn't support specifying indices on the request, so we only specify if not using PIT + // PIT specifies indices in creation so it doesn't support specifying indices on the request, so + // we only specify if not using PIT if (!supportsPointInTime()) { searchRequest.indices(indexArray); } scrollRequestTimer.stop(); // Step 2: execute the query and extract results, validated against document model as well - return executeAndExtract(entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); + return executeAndExtract( + entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); } public Optional<SearchResponse> raw(@Nonnull String indexName, @Nullable String jsonQuery) { - return Optional.ofNullable(jsonQuery).map(json -> { - try { - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, json); - SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); + return Optional.ofNullable(jsonQuery) + .map( + json -> { + try { + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, json); + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); - SearchRequest searchRequest = new SearchRequest(indexConvention.getIndexName(indexName)); - searchRequest.source(searchSourceBuilder); + SearchRequest searchRequest = + new SearchRequest(indexConvention.getIndexName(indexName)); + searchRequest.source(searchSourceBuilder); - return client.search(searchRequest, RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + return client.search(searchRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } private boolean supportsPointInTime() { - return pointInTimeCreationEnabled && ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH.equalsIgnoreCase(elasticSearchImplementation); + return pointInTimeCreationEnabled + && ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH.equalsIgnoreCase(elasticSearchImplementation); } private String createPointInTime(String[] indexArray, String keepAlive) { @@ -367,8 +445,8 @@ private String createPointInTime(String[] indexArray, String keepAlive) { request.addParameter("keep_alive", keepAlive); try { Response response = client.getLowLevelClient().performRequest(request); - Map<String, Object> mappedResponse = OBJECT_MAPPER.readValue(response.getEntity().getContent(), - new TypeReference<>() { }); + Map<String, Object> mappedResponse = + OBJECT_MAPPER.readValue(response.getEntity().getContent(), new TypeReference<>() {}); return (String) mappedResponse.get("id"); } catch (IOException e) { log.error("Failed to generate PointInTime Identifier.", e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java index e2bdea84eda0e..7a8056c0b59d1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.search.utils.ESUtils; @@ -14,9 +16,6 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class AggregationQueryBuilder { @@ -32,43 +31,51 @@ public AggregationQueryBuilder( this._allFacetFields = getAllFacetFields(annotations); } - /** - * Get the set of default aggregations, across all facets. - */ + /** Get the set of default aggregations, across all facets. */ public List<AggregationBuilder> getAggregations() { return getAggregations(null); } /** - * Get aggregations for a search request for the given facets provided, and if none are provided, then get aggregations for all. + * Get aggregations for a search request for the given facets provided, and if none are provided, + * then get aggregations for all. */ public List<AggregationBuilder> getAggregations(@Nullable List<String> facets) { final Set<String> facetsToAggregate; if (facets != null) { - facets.stream().filter(f -> !isValidAggregate(f)).forEach(facet -> { - log.warn(String.format("Requested facet for search filter aggregations that isn't part of the default filters. Provided: %s; Available: %s", facet, - _defaultFacetFields)); - }); - facetsToAggregate = facets.stream().filter(this::isValidAggregate).collect(Collectors.toSet()); + facets.stream() + .filter(f -> !isValidAggregate(f)) + .forEach( + facet -> { + log.warn( + String.format( + "Requested facet for search filter aggregations that isn't part of the default filters. Provided: %s; Available: %s", + facet, _defaultFacetFields)); + }); + facetsToAggregate = + facets.stream().filter(this::isValidAggregate).collect(Collectors.toSet()); } else { facetsToAggregate = _defaultFacetFields; } - return facetsToAggregate.stream().map(this::facetToAggregationBuilder).collect(Collectors.toList()); + return facetsToAggregate.stream() + .map(this::facetToAggregationBuilder) + .collect(Collectors.toList()); } - private Set<String> getDefaultFacetFields(final List<SearchableAnnotation> annotations) { - Set<String> facets = annotations.stream() - .flatMap(annotation -> getDefaultFacetFieldsFromAnnotation(annotation).stream()) - .collect(Collectors.toSet()); + Set<String> facets = + annotations.stream() + .flatMap(annotation -> getDefaultFacetFieldsFromAnnotation(annotation).stream()) + .collect(Collectors.toSet()); facets.add(INDEX_VIRTUAL_FIELD); return facets; } private Set<String> getAllFacetFields(final List<SearchableAnnotation> annotations) { - Set<String> facets = annotations.stream() - .flatMap(annotation -> getAllFacetFieldsFromAnnotation(annotation).stream()) - .collect(Collectors.toSet()); + Set<String> facets = + annotations.stream() + .flatMap(annotation -> getAllFacetFieldsFromAnnotation(annotation).stream()) + .collect(Collectors.toSet()); facets.add(INDEX_VIRTUAL_FIELD); return facets; } @@ -129,4 +136,4 @@ List<String> getAllFacetFieldsFromAnnotation(final SearchableAnnotation annotati } return facetsFromAnnotation; } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java index bba3a9fa4232d..cdcdae2f3d311 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; @@ -34,33 +36,32 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; -import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; - - @Slf4j public class AutocompleteRequestHandler { private final List<String> _defaultAutocompleteFields; - private static final Map<EntitySpec, AutocompleteRequestHandler> AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME = - new ConcurrentHashMap<>(); + private static final Map<EntitySpec, AutocompleteRequestHandler> + AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); public AutocompleteRequestHandler(@Nonnull EntitySpec entitySpec) { - _defaultAutocompleteFields = Stream.concat(entitySpec.getSearchableFieldSpecs() - .stream() - .map(SearchableFieldSpec::getSearchableAnnotation) - .filter(SearchableAnnotation::isEnableAutocomplete) - .map(SearchableAnnotation::getFieldName), - Stream.of("urn")) - .collect(Collectors.toList()); + _defaultAutocompleteFields = + Stream.concat( + entitySpec.getSearchableFieldSpecs().stream() + .map(SearchableFieldSpec::getSearchableAnnotation) + .filter(SearchableAnnotation::isEnableAutocomplete) + .map(SearchableAnnotation::getFieldName), + Stream.of("urn")) + .collect(Collectors.toList()); } public static AutocompleteRequestHandler getBuilder(@Nonnull EntitySpec entitySpec) { - return AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME.computeIfAbsent(entitySpec, - k -> new AutocompleteRequestHandler(entitySpec)); + return AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME.computeIfAbsent( + entitySpec, k -> new AutocompleteRequestHandler(entitySpec)); } - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable String field, @Nullable Filter filter, int limit) { + public SearchRequest getSearchRequest( + @Nonnull String input, @Nullable String field, @Nullable Filter filter, int limit) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(limit); @@ -78,25 +79,27 @@ private QueryBuilder getQuery(@Nonnull String query, @Nullable String field) { public static QueryBuilder getQuery(List<String> autocompleteFields, @Nonnull String query) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); // Search for exact matches with higher boost and ngram matches - MultiMatchQueryBuilder autocompleteQueryBuilder = QueryBuilders.multiMatchQuery(query) - .type(MultiMatchQueryBuilder.Type.BOOL_PREFIX); - - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - autocompleteFields.forEach(fieldName -> { - if ("urn".equals(fieldName)) { - autocompleteQueryBuilder.field(fieldName + ".ngram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._2gram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._3gram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._4gram", urnBoost); - } else { - autocompleteQueryBuilder.field(fieldName + ".ngram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._2gram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._3gram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._4gram"); - } - - finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(fieldName + ".delimited", query)); - }); + MultiMatchQueryBuilder autocompleteQueryBuilder = + QueryBuilders.multiMatchQuery(query).type(MultiMatchQueryBuilder.Type.BOOL_PREFIX); + + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + autocompleteFields.forEach( + fieldName -> { + if ("urn".equals(fieldName)) { + autocompleteQueryBuilder.field(fieldName + ".ngram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._2gram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._3gram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._4gram", urnBoost); + } else { + autocompleteQueryBuilder.field(fieldName + ".ngram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._2gram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._3gram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._4gram"); + } + + finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(fieldName + ".delimited", query)); + }); finalQuery.should(autocompleteQueryBuilder); @@ -111,11 +114,14 @@ private HighlightBuilder getHighlights(@Nullable String field) { highlightBuilder.preTags(""); highlightBuilder.postTags(""); // Check for each field name and any subfields - getAutocompleteFields(field).forEach(fieldName -> highlightBuilder - .field(fieldName) - .field(fieldName + ".*") - .field(fieldName + ".ngram") - .field(fieldName + ".delimited")); + getAutocompleteFields(field) + .forEach( + fieldName -> + highlightBuilder + .field(fieldName) + .field(fieldName + ".*") + .field(fieldName + ".ngram") + .field(fieldName + ".delimited")); return highlightBuilder; } @@ -126,19 +132,20 @@ private List<String> getAutocompleteFields(@Nullable String field) { return _defaultAutocompleteFields; } - public AutoCompleteResult extractResult(@Nonnull SearchResponse searchResponse, @Nonnull String input) { + public AutoCompleteResult extractResult( + @Nonnull SearchResponse searchResponse, @Nonnull String input) { Set<String> results = new LinkedHashSet<>(); Set<AutoCompleteEntity> entityResults = new HashSet<>(); for (SearchHit hit : searchResponse.getHits()) { - Optional<String> matchedFieldValue = hit.getHighlightFields() - .entrySet() - .stream() - .findFirst() - .map(entry -> entry.getValue().getFragments()[0].string()); + Optional<String> matchedFieldValue = + hit.getHighlightFields().entrySet().stream() + .findFirst() + .map(entry -> entry.getValue().getFragments()[0].string()); Optional<String> matchedUrn = Optional.ofNullable((String) hit.getSourceAsMap().get("urn")); try { if (matchedUrn.isPresent()) { - entityResults.add(new AutoCompleteEntity().setUrn(Urn.createFromString(matchedUrn.get()))); + entityResults.add( + new AutoCompleteEntity().setUrn(Urn.createFromString(matchedUrn.get()))); } } catch (URISyntaxException e) { throw new RuntimeException(String.format("Failed to create urn %s", matchedUrn.get()), e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java index 55a3474fd9f35..478d633fe3c55 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java @@ -2,42 +2,43 @@ import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nullable; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.regex.Pattern; import java.util.stream.Collectors; - +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Builder(builderMethodName = "hiddenBuilder") @Getter public class CustomizedQueryHandler { - private CustomSearchConfiguration customSearchConfiguration; - @Builder.Default - private List<Map.Entry<Pattern, QueryConfiguration>> queryConfigurations = List.of(); + private CustomSearchConfiguration customSearchConfiguration; - public Optional<QueryConfiguration> lookupQueryConfig(String query) { - return queryConfigurations.stream() - .filter(e -> e.getKey().matcher(query).matches()) - .map(Map.Entry::getValue) - .findFirst(); - } + @Builder.Default + private List<Map.Entry<Pattern, QueryConfiguration>> queryConfigurations = List.of(); + + public Optional<QueryConfiguration> lookupQueryConfig(String query) { + return queryConfigurations.stream() + .filter(e -> e.getKey().matcher(query).matches()) + .map(Map.Entry::getValue) + .findFirst(); + } - public static CustomizedQueryHandlerBuilder builder(@Nullable CustomSearchConfiguration customSearchConfiguration) { - CustomizedQueryHandlerBuilder builder = hiddenBuilder() - .customSearchConfiguration(customSearchConfiguration); + public static CustomizedQueryHandlerBuilder builder( + @Nullable CustomSearchConfiguration customSearchConfiguration) { + CustomizedQueryHandlerBuilder builder = + hiddenBuilder().customSearchConfiguration(customSearchConfiguration); - if (customSearchConfiguration != null) { - builder.queryConfigurations(customSearchConfiguration.getQueryConfigurations().stream() - .map(cfg -> Map.entry(Pattern.compile(cfg.getQueryRegex()), cfg)) - .collect(Collectors.toList())); - } - return builder; + if (customSearchConfiguration != null) { + builder.queryConfigurations( + customSearchConfiguration.getQueryConfigurations().stream() + .map(cfg -> Map.entry(Pattern.compile(cfg.getQueryRegex()), cfg)) + .collect(Collectors.toList())); } + return builder; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java index 79c00fc7cdd20..3a7e72deed2fe 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java @@ -3,7 +3,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; - public class PITAwareSearchRequest extends SearchRequest { private IndicesOptions indicesOptions; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java index 3e4f3427e7658..1fe4a74968e42 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import java.io.IOException; import java.io.Serializable; import java.nio.charset.StandardCharsets; @@ -8,10 +10,6 @@ import lombok.Data; import lombok.NoArgsConstructor; - -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - @Data @AllArgsConstructor @NoArgsConstructor @@ -22,7 +20,9 @@ public class SearchAfterWrapper implements Serializable { public static SearchAfterWrapper fromScrollId(String scrollId) { try { - return OBJECT_MAPPER.readValue(Base64.getDecoder().decode(scrollId.getBytes(StandardCharsets.UTF_8)), SearchAfterWrapper.class); + return OBJECT_MAPPER.readValue( + Base64.getDecoder().decode(scrollId.getBytes(StandardCharsets.UTF_8)), + SearchAfterWrapper.class); } catch (IOException e) { throw new IllegalStateException("Invalid scroll Id cannot be mapped: " + scrollId, e); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java index a75ed40ffca52..7709ff16f7940 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java @@ -1,175 +1,162 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; + import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import java.util.Set; +import javax.annotation.Nonnull; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.experimental.Accessors; -import javax.annotation.Nonnull; - -import java.util.Set; - -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; - - @Builder @Getter @Accessors(fluent = true) @EqualsAndHashCode public class SearchFieldConfig { - public static final float DEFAULT_BOOST = 1.0f; - - public static final Set<String> KEYWORD_FIELDS = Set.of("urn", "runId", "_index"); - public static final Set<String> PATH_HIERARCHY_FIELDS = Set.of("browsePathV2"); - - // These should not be used directly since there is a specific - // order in which these rules need to be evaluated for exceptions to - // the rules. - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_DELIMITED_SUBFIELD = - Set.of( - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.WORD_GRAM - // NOT URN_PARTIAL (urn field is special) - ); - // NOT comprehensive - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_KEYWORD_SUBFIELD = - Set.of( - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.URN_PARTIAL - ); - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BROWSE_PATH = - Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH - ); - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BROWSE_PATH_V2 = - Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH_V2 - ); - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BASE_KEYWORD = - Set.of( - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.WORD_GRAM, - // not analyzed - SearchableAnnotation.FieldType.BOOLEAN, - SearchableAnnotation.FieldType.COUNT, - SearchableAnnotation.FieldType.DATETIME, - SearchableAnnotation.FieldType.OBJECT - ); - // NOT true for `urn` - public static final Set<SearchableAnnotation.FieldType> TYPES_WITH_URN_TEXT = - Set.of( - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.URN_PARTIAL - ); - - public static final Set<SearchableAnnotation.FieldType> TYPES_WITH_WORD_GRAM = - Set.of( - SearchableAnnotation.FieldType.WORD_GRAM - ); - - @Nonnull - private final String fieldName; - @Nonnull - private final String shortName; - @Builder.Default - private final Float boost = DEFAULT_BOOST; - private final String analyzer; - private boolean hasKeywordSubfield; - private boolean hasDelimitedSubfield; - private boolean hasWordGramSubfields; - private boolean isQueryByDefault; - private boolean isDelimitedSubfield; - private boolean isKeywordSubfield; - private boolean isWordGramSubfield; - - public static SearchFieldConfig detectSubFieldType(@Nonnull SearchableFieldSpec fieldSpec) { - final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); - final String fieldName = searchableAnnotation.getFieldName(); - final float boost = (float) searchableAnnotation.getBoostScore(); - final SearchableAnnotation.FieldType fieldType = searchableAnnotation.getFieldType(); - return detectSubFieldType(fieldName, boost, fieldType, searchableAnnotation.isQueryByDefault()); - } - - public static SearchFieldConfig detectSubFieldType(String fieldName, - SearchableAnnotation.FieldType fieldType, - boolean isQueryByDefault) { - return detectSubFieldType(fieldName, DEFAULT_BOOST, fieldType, isQueryByDefault); + public static final float DEFAULT_BOOST = 1.0f; + + public static final Set<String> KEYWORD_FIELDS = Set.of("urn", "runId", "_index"); + public static final Set<String> PATH_HIERARCHY_FIELDS = Set.of("browsePathV2"); + + // These should not be used directly since there is a specific + // order in which these rules need to be evaluated for exceptions to + // the rules. + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_DELIMITED_SUBFIELD = + Set.of( + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.WORD_GRAM + // NOT URN_PARTIAL (urn field is special) + ); + // NOT comprehensive + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_KEYWORD_SUBFIELD = + Set.of( + SearchableAnnotation.FieldType.URN, + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.URN_PARTIAL); + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BROWSE_PATH = + Set.of(SearchableAnnotation.FieldType.BROWSE_PATH); + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BROWSE_PATH_V2 = + Set.of(SearchableAnnotation.FieldType.BROWSE_PATH_V2); + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BASE_KEYWORD = + Set.of( + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.WORD_GRAM, + // not analyzed + SearchableAnnotation.FieldType.BOOLEAN, + SearchableAnnotation.FieldType.COUNT, + SearchableAnnotation.FieldType.DATETIME, + SearchableAnnotation.FieldType.OBJECT); + // NOT true for `urn` + public static final Set<SearchableAnnotation.FieldType> TYPES_WITH_URN_TEXT = + Set.of(SearchableAnnotation.FieldType.URN, SearchableAnnotation.FieldType.URN_PARTIAL); + + public static final Set<SearchableAnnotation.FieldType> TYPES_WITH_WORD_GRAM = + Set.of(SearchableAnnotation.FieldType.WORD_GRAM); + + @Nonnull private final String fieldName; + @Nonnull private final String shortName; + @Builder.Default private final Float boost = DEFAULT_BOOST; + private final String analyzer; + private boolean hasKeywordSubfield; + private boolean hasDelimitedSubfield; + private boolean hasWordGramSubfields; + private boolean isQueryByDefault; + private boolean isDelimitedSubfield; + private boolean isKeywordSubfield; + private boolean isWordGramSubfield; + + public static SearchFieldConfig detectSubFieldType(@Nonnull SearchableFieldSpec fieldSpec) { + final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); + final String fieldName = searchableAnnotation.getFieldName(); + final float boost = (float) searchableAnnotation.getBoostScore(); + final SearchableAnnotation.FieldType fieldType = searchableAnnotation.getFieldType(); + return detectSubFieldType(fieldName, boost, fieldType, searchableAnnotation.isQueryByDefault()); + } + + public static SearchFieldConfig detectSubFieldType( + String fieldName, SearchableAnnotation.FieldType fieldType, boolean isQueryByDefault) { + return detectSubFieldType(fieldName, DEFAULT_BOOST, fieldType, isQueryByDefault); + } + + public static SearchFieldConfig detectSubFieldType( + String fieldName, + float boost, + SearchableAnnotation.FieldType fieldType, + boolean isQueryByDefault) { + return SearchFieldConfig.builder() + .fieldName(fieldName) + .boost(boost) + .analyzer(getAnalyzer(fieldName, fieldType)) + .hasKeywordSubfield(hasKeywordSubfield(fieldName, fieldType)) + .hasDelimitedSubfield(hasDelimitedSubfield(fieldName, fieldType)) + .hasWordGramSubfields(hasWordGramSubfields(fieldName, fieldType)) + .isQueryByDefault(isQueryByDefault) + .build(); + } + + public boolean isKeyword() { + return KEYWORD_ANALYZER.equals(analyzer()) || isKeyword(fieldName()); + } + + private static boolean hasDelimitedSubfield( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !fieldName.contains(".") + && ("urn".equals(fieldName) || TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType)); + } + + private static boolean hasWordGramSubfields( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !fieldName.contains(".") && (TYPES_WITH_WORD_GRAM.contains(fieldType)); + } + + private static boolean hasKeywordSubfield( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !"urn".equals(fieldName) + && !fieldName.contains(".") + && (TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType) // if delimited then also has keyword + || TYPES_WITH_KEYWORD_SUBFIELD.contains(fieldType)); + } + + private static boolean isKeyword(String fieldName) { + return fieldName.endsWith(".keyword") || KEYWORD_FIELDS.contains(fieldName); + } + + private static String getAnalyzer(String fieldName, SearchableAnnotation.FieldType fieldType) { + // order is important + if (TYPES_WITH_BROWSE_PATH.contains(fieldType)) { + return BROWSE_PATH_HIERARCHY_ANALYZER; + } else if (TYPES_WITH_BROWSE_PATH_V2.contains(fieldType)) { + return BROWSE_PATH_V2_HIERARCHY_ANALYZER; + // sub fields + } else if (isKeyword(fieldName)) { + return KEYWORD_ANALYZER; + } else if (fieldName.endsWith(".delimited")) { + return TEXT_SEARCH_ANALYZER; + // non-subfield cases below + } else if (TYPES_WITH_BASE_KEYWORD.contains(fieldType)) { + return KEYWORD_ANALYZER; + } else if (TYPES_WITH_URN_TEXT.contains(fieldType)) { + return URN_SEARCH_ANALYZER; + } else { + throw new IllegalStateException( + String.format("Unknown analyzer for fieldName: %s, fieldType: %s", fieldName, fieldType)); } - - public static SearchFieldConfig detectSubFieldType(String fieldName, - float boost, - SearchableAnnotation.FieldType fieldType, - boolean isQueryByDefault) { - return SearchFieldConfig.builder() - .fieldName(fieldName) - .boost(boost) - .analyzer(getAnalyzer(fieldName, fieldType)) - .hasKeywordSubfield(hasKeywordSubfield(fieldName, fieldType)) - .hasDelimitedSubfield(hasDelimitedSubfield(fieldName, fieldType)) - .hasWordGramSubfields(hasWordGramSubfields(fieldName, fieldType)) - .isQueryByDefault(isQueryByDefault) - .build(); - } - - public boolean isKeyword() { - return KEYWORD_ANALYZER.equals(analyzer()) || isKeyword(fieldName()); - } - - private static boolean hasDelimitedSubfield(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !fieldName.contains(".") - && ("urn".equals(fieldName) || TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType)); - } - - private static boolean hasWordGramSubfields(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !fieldName.contains(".") - && (TYPES_WITH_WORD_GRAM.contains(fieldType)); - } - private static boolean hasKeywordSubfield(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !"urn".equals(fieldName) - && !fieldName.contains(".") - && (TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType) // if delimited then also has keyword - || TYPES_WITH_KEYWORD_SUBFIELD.contains(fieldType)); - } - private static boolean isKeyword(String fieldName) { - return fieldName.endsWith(".keyword") - || KEYWORD_FIELDS.contains(fieldName); - } - - private static String getAnalyzer(String fieldName, SearchableAnnotation.FieldType fieldType) { - // order is important - if (TYPES_WITH_BROWSE_PATH.contains(fieldType)) { - return BROWSE_PATH_HIERARCHY_ANALYZER; - } else if (TYPES_WITH_BROWSE_PATH_V2.contains(fieldType)) { - return BROWSE_PATH_V2_HIERARCHY_ANALYZER; - // sub fields - } else if (isKeyword(fieldName)) { - return KEYWORD_ANALYZER; - } else if (fieldName.endsWith(".delimited")) { - return TEXT_SEARCH_ANALYZER; - // non-subfield cases below - } else if (TYPES_WITH_BASE_KEYWORD.contains(fieldType)) { - return KEYWORD_ANALYZER; - } else if (TYPES_WITH_URN_TEXT.contains(fieldType)) { - return URN_SEARCH_ANALYZER; - } else { - throw new IllegalStateException(String.format("Unknown analyzer for fieldName: %s, fieldType: %s", fieldName, fieldType)); - } - } - - public static class SearchFieldConfigBuilder { - public SearchFieldConfigBuilder fieldName(@Nonnull String fieldName) { - this.fieldName = fieldName; - isDelimitedSubfield(fieldName.endsWith(".delimited")); - isKeywordSubfield(fieldName.endsWith(".keyword")); - isWordGramSubfield(fieldName.contains("wordGrams")); - shortName(fieldName.split("[.]")[0]); - return this; - } + } + + public static class SearchFieldConfigBuilder { + public SearchFieldConfigBuilder fieldName(@Nonnull String fieldName) { + this.fieldName = fieldName; + isDelimitedSubfield(fieldName.endsWith(".delimited")); + isKeywordSubfield(fieldName.endsWith(".keyword")); + isWordGramSubfield(fieldName.contains("wordGrams")); + shortName(fieldName.split("[.]")[0]); + return this; } + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java index ce88f31449c35..7ddccb0d56724 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java @@ -1,6 +1,14 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.*; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; @@ -8,16 +16,12 @@ import com.linkedin.metadata.config.search.custom.BoolQueryConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.StreamReadConstraints; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.metadata.Constants; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchScoreFieldSpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchScoreAnnotation; import com.linkedin.metadata.models.annotation.SearchableAnnotation; - +import com.linkedin.metadata.search.utils.ESUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -32,16 +36,14 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.search.utils.ESUtils; import lombok.extern.slf4j.Slf4j; import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.FieldValueFactorFunction; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.Operator; @@ -54,20 +56,25 @@ import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; import org.opensearch.search.SearchModule; -import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.*; - - @Slf4j public class SearchQueryBuilder { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { OBJECT_MAPPER.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH, Constants.MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault( + Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH, + Constants.MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final NamedXContentRegistry X_CONTENT_REGISTRY; + static { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); X_CONTENT_REGISTRY = new NamedXContentRegistry(searchModule.getNamedXContents()); @@ -80,49 +87,63 @@ public class SearchQueryBuilder { private final CustomizedQueryHandler customizedQueryHandler; - public SearchQueryBuilder(@Nonnull SearchConfiguration searchConfiguration, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public SearchQueryBuilder( + @Nonnull SearchConfiguration searchConfiguration, + @Nullable CustomSearchConfiguration customSearchConfiguration) { this.exactMatchConfiguration = searchConfiguration.getExactMatch(); this.partialConfiguration = searchConfiguration.getPartial(); this.wordGramConfiguration = searchConfiguration.getWordGram(); this.customizedQueryHandler = CustomizedQueryHandler.builder(customSearchConfiguration).build(); } - public QueryBuilder buildQuery(@Nonnull List<EntitySpec> entitySpecs, @Nonnull String query, boolean fulltext) { - QueryConfiguration customQueryConfig = customizedQueryHandler.lookupQueryConfig(query).orElse(null); + public QueryBuilder buildQuery( + @Nonnull List<EntitySpec> entitySpecs, @Nonnull String query, boolean fulltext) { + QueryConfiguration customQueryConfig = + customizedQueryHandler.lookupQueryConfig(query).orElse(null); - final QueryBuilder queryBuilder = buildInternalQuery(customQueryConfig, entitySpecs, query, fulltext); + final QueryBuilder queryBuilder = + buildInternalQuery(customQueryConfig, entitySpecs, query, fulltext); return buildScoreFunctions(customQueryConfig, entitySpecs, queryBuilder); } /** * Constructs the search query. + * * @param customQueryConfig custom configuration * @param entitySpecs entities being searched * @param query search string * @param fulltext use fulltext queries * @return query builder */ - private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQueryConfig, @Nonnull List<EntitySpec> entitySpecs, - @Nonnull String query, boolean fulltext) { + private QueryBuilder buildInternalQuery( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull String query, + boolean fulltext) { final String sanitizedQuery = query.replaceFirst("^:+", ""); - final BoolQueryBuilder finalQuery = Optional.ofNullable(customQueryConfig) + final BoolQueryBuilder finalQuery = + Optional.ofNullable(customQueryConfig) .flatMap(cqc -> boolQueryBuilder(cqc, sanitizedQuery)) .orElse(QueryBuilders.boolQuery()); if (fulltext && !query.startsWith(STRUCTURED_QUERY_PREFIX)) { getSimpleQuery(customQueryConfig, entitySpecs, sanitizedQuery).ifPresent(finalQuery::should); - getPrefixAndExactMatchQuery(customQueryConfig, entitySpecs, sanitizedQuery).ifPresent(finalQuery::should); + getPrefixAndExactMatchQuery(customQueryConfig, entitySpecs, sanitizedQuery) + .ifPresent(finalQuery::should); } else { - final String withoutQueryPrefix = query.startsWith(STRUCTURED_QUERY_PREFIX) ? query.substring(STRUCTURED_QUERY_PREFIX.length()) : query; + final String withoutQueryPrefix = + query.startsWith(STRUCTURED_QUERY_PREFIX) + ? query.substring(STRUCTURED_QUERY_PREFIX.length()) + : query; QueryStringQueryBuilder queryBuilder = QueryBuilders.queryStringQuery(withoutQueryPrefix); queryBuilder.defaultOperator(Operator.AND); - getStandardFields(entitySpecs).forEach(entitySpec -> - queryBuilder.field(entitySpec.fieldName(), entitySpec.boost())); + getStandardFields(entitySpecs) + .forEach(entitySpec -> queryBuilder.field(entitySpec.fieldName(), entitySpec.boost())); finalQuery.should(queryBuilder); if (exactMatchConfiguration.isEnableStructured()) { - getPrefixAndExactMatchQuery(null, entitySpecs, withoutQueryPrefix).ifPresent(finalQuery::should); + getPrefixAndExactMatchQuery(null, entitySpecs, withoutQueryPrefix) + .ifPresent(finalQuery::should); } } @@ -130,7 +151,9 @@ private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQuery } /** - * Gets searchable fields from all entities in the input collection. De-duplicates fields across entities. + * Gets searchable fields from all entities in the input collection. De-duplicates fields across + * entities. + * * @param entitySpecs: Entity specs to extract searchable fields from * @return A set of SearchFieldConfigs containing the searchable fields from the input entities. */ @@ -138,31 +161,42 @@ private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQuery public Set<SearchFieldConfig> getStandardFields(@Nonnull Collection<EntitySpec> entitySpecs) { Set<SearchFieldConfig> fields = new HashSet<>(); // Always present - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - - fields.add(SearchFieldConfig.detectSubFieldType("urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); - fields.add(SearchFieldConfig.detectSubFieldType("urn.delimited", urnBoost * partialConfiguration.getUrnFactor(), - SearchableAnnotation.FieldType.URN, true)); + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn.delimited", + urnBoost * partialConfiguration.getUrnFactor(), + SearchableAnnotation.FieldType.URN, + true)); entitySpecs.stream() .map(this::getFieldsFromEntitySpec) .flatMap(Set::stream) - .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)).forEach((key, value) -> - fields.add( - new SearchFieldConfig( - key, - value.get(0).shortName(), - (float) value.stream().mapToDouble(SearchFieldConfig::boost).average().getAsDouble(), - value.get(0).analyzer(), - value.stream().anyMatch(SearchFieldConfig::hasKeywordSubfield), - value.stream().anyMatch(SearchFieldConfig::hasDelimitedSubfield), - value.stream().anyMatch(SearchFieldConfig::hasWordGramSubfields), - true, - value.stream().anyMatch(SearchFieldConfig::isDelimitedSubfield), - value.stream().anyMatch(SearchFieldConfig::isKeywordSubfield), - value.stream().anyMatch(SearchFieldConfig::isWordGramSubfield) - )) - ); + .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)) + .forEach( + (key, value) -> + fields.add( + new SearchFieldConfig( + key, + value.get(0).shortName(), + (float) + value.stream() + .mapToDouble(SearchFieldConfig::boost) + .average() + .getAsDouble(), + value.get(0).analyzer(), + value.stream().anyMatch(SearchFieldConfig::hasKeywordSubfield), + value.stream().anyMatch(SearchFieldConfig::hasDelimitedSubfield), + value.stream().anyMatch(SearchFieldConfig::hasWordGramSubfields), + true, + value.stream().anyMatch(SearchFieldConfig::isDelimitedSubfield), + value.stream().anyMatch(SearchFieldConfig::isKeywordSubfield), + value.stream().anyMatch(SearchFieldConfig::isWordGramSubfield)))); return fields; } @@ -182,38 +216,44 @@ public Set<SearchFieldConfig> getFieldsFromEntitySpec(EntitySpec entitySpec) { if (SearchFieldConfig.detectSubFieldType(fieldSpec).hasDelimitedSubfield()) { final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); - fields.add(SearchFieldConfig.detectSubFieldType(searchFieldConfig.fieldName() + ".delimited", - searchFieldConfig.boost() * partialConfiguration.getFactor(), - searchableAnnotation.getFieldType(), searchableAnnotation.isQueryByDefault())); + fields.add( + SearchFieldConfig.detectSubFieldType( + searchFieldConfig.fieldName() + ".delimited", + searchFieldConfig.boost() * partialConfiguration.getFactor(), + searchableAnnotation.getFieldType(), + searchableAnnotation.isQueryByDefault())); if (SearchFieldConfig.detectSubFieldType(fieldSpec).hasWordGramSubfields()) { - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams2") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getTwoGramFactor()) - .analyzer(WORD_GRAM_2_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams3") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getThreeGramFactor()) - .analyzer(WORD_GRAM_3_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams4") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getFourGramFactor()) - .analyzer(WORD_GRAM_4_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams2") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getTwoGramFactor()) + .analyzer(WORD_GRAM_2_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams3") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getThreeGramFactor()) + .analyzer(WORD_GRAM_3_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams4") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getFourGramFactor()) + .analyzer(WORD_GRAM_4_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); } } } @@ -224,11 +264,18 @@ private Set<SearchFieldConfig> getStandardFields(@Nonnull EntitySpec entitySpec) Set<SearchFieldConfig> fields = new HashSet<>(); // Always present - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - - fields.add(SearchFieldConfig.detectSubFieldType("urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); - fields.add(SearchFieldConfig.detectSubFieldType("urn.delimited", urnBoost * partialConfiguration.getUrnFactor(), - SearchableAnnotation.FieldType.URN, true)); + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn.delimited", + urnBoost * partialConfiguration.getUrnFactor(), + SearchableAnnotation.FieldType.URN, + true)); fields.addAll(getFieldsFromEntitySpec(entitySpec)); @@ -242,9 +289,11 @@ private static String unquote(String query) { private static boolean isQuoted(String query) { return Stream.of("\"", "'").anyMatch(query::contains); } - private Optional<QueryBuilder> getSimpleQuery(@Nullable QueryConfiguration customQueryConfig, - List<EntitySpec> entitySpecs, - String sanitizedQuery) { + + private Optional<QueryBuilder> getSimpleQuery( + @Nullable QueryConfiguration customQueryConfig, + List<EntitySpec> entitySpecs, + String sanitizedQuery) { Optional<QueryBuilder> result = Optional.empty(); final boolean executeSimpleQuery; @@ -263,25 +312,34 @@ private Optional<QueryBuilder> getSimpleQuery(@Nullable QueryConfiguration custo BoolQueryBuilder simplePerField = QueryBuilders.boolQuery(); // Simple query string does not use per field analyzers // Group the fields by analyzer - Map<String, List<SearchFieldConfig>> analyzerGroup = entitySpecs.stream() + Map<String, List<SearchFieldConfig>> analyzerGroup = + entitySpecs.stream() .map(this::getStandardFields) .flatMap(Set::stream) .filter(SearchFieldConfig::isQueryByDefault) .collect(Collectors.groupingBy(SearchFieldConfig::analyzer)); - analyzerGroup.keySet().stream().sorted().filter(str -> !str.contains("word_gram")).forEach(analyzer -> { - List<SearchFieldConfig> fieldConfigs = analyzerGroup.get(analyzer); - SimpleQueryStringBuilder simpleBuilder = QueryBuilders.simpleQueryStringQuery(sanitizedQuery); - simpleBuilder.analyzer(analyzer); - simpleBuilder.defaultOperator(Operator.AND); - Map<String, List<SearchFieldConfig>> fieldAnalyzers = fieldConfigs.stream().collect(Collectors.groupingBy(SearchFieldConfig::fieldName)); - // De-duplicate fields across different indices - for (Map.Entry<String, List<SearchFieldConfig>> fieldAnalyzer : fieldAnalyzers.entrySet()) { - SearchFieldConfig cfg = fieldAnalyzer.getValue().get(0); - simpleBuilder.field(cfg.fieldName(), cfg.boost()); - } - simplePerField.should(simpleBuilder); - }); + analyzerGroup.keySet().stream() + .sorted() + .filter(str -> !str.contains("word_gram")) + .forEach( + analyzer -> { + List<SearchFieldConfig> fieldConfigs = analyzerGroup.get(analyzer); + SimpleQueryStringBuilder simpleBuilder = + QueryBuilders.simpleQueryStringQuery(sanitizedQuery); + simpleBuilder.analyzer(analyzer); + simpleBuilder.defaultOperator(Operator.AND); + Map<String, List<SearchFieldConfig>> fieldAnalyzers = + fieldConfigs.stream() + .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)); + // De-duplicate fields across different indices + for (Map.Entry<String, List<SearchFieldConfig>> fieldAnalyzer : + fieldAnalyzers.entrySet()) { + SearchFieldConfig cfg = fieldAnalyzer.getValue().get(0); + simpleBuilder.field(cfg.fieldName(), cfg.boost()); + } + simplePerField.should(simpleBuilder); + }); result = Optional.of(simplePerField); } @@ -289,99 +347,133 @@ private Optional<QueryBuilder> getSimpleQuery(@Nullable QueryConfiguration custo return result; } - private Optional<QueryBuilder> getPrefixAndExactMatchQuery(@Nullable QueryConfiguration customQueryConfig, - @Nonnull List<EntitySpec> entitySpecs, - String query) { + private Optional<QueryBuilder> getPrefixAndExactMatchQuery( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List<EntitySpec> entitySpecs, + String query) { - final boolean isPrefixQuery = customQueryConfig == null ? exactMatchConfiguration.isWithPrefix() : customQueryConfig.isPrefixMatchQuery(); + final boolean isPrefixQuery = + customQueryConfig == null + ? exactMatchConfiguration.isWithPrefix() + : customQueryConfig.isPrefixMatchQuery(); final boolean isExactQuery = customQueryConfig == null || customQueryConfig.isExactMatchQuery(); - BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); + BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); String unquotedQuery = unquote(query); - getStandardFields(entitySpecs).forEach(searchFieldConfig -> { - if (searchFieldConfig.isDelimitedSubfield() && isPrefixQuery) { - finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(searchFieldConfig.fieldName(), query) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getPrefixFactor() - * exactMatchConfiguration.getCaseSensitivityFactor()) - .queryName(searchFieldConfig.shortName())); // less than exact - } - - if (searchFieldConfig.isKeyword() && isExactQuery) { - // It is important to use the subfield .keyword (it uses a different normalizer) - // The non-.keyword field removes case information - - // Exact match case-sensitive - finalQuery.should( - QueryBuilders.termQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), unquotedQuery) - .caseInsensitive(false) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor()) - .queryName(searchFieldConfig.shortName())); - - // Exact match case-insensitive - finalQuery.should( - QueryBuilders.termQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), unquotedQuery) - .caseInsensitive(true) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor() - * exactMatchConfiguration.getCaseSensitivityFactor()) - .queryName(searchFieldConfig.fieldName())); - } - - if (searchFieldConfig.isWordGramSubfield() && isPrefixQuery) { - finalQuery.should( - QueryBuilders.matchPhraseQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), - unquotedQuery) - .boost(searchFieldConfig.boost() * getWordGramFactor(searchFieldConfig.fieldName())) - .queryName(searchFieldConfig.shortName())); - } - }); + getStandardFields(entitySpecs) + .forEach( + searchFieldConfig -> { + if (searchFieldConfig.isDelimitedSubfield() && isPrefixQuery) { + finalQuery.should( + QueryBuilders.matchPhrasePrefixQuery(searchFieldConfig.fieldName(), query) + .boost( + searchFieldConfig.boost() + * exactMatchConfiguration.getPrefixFactor() + * exactMatchConfiguration.getCaseSensitivityFactor()) + .queryName(searchFieldConfig.shortName())); // less than exact + } + + if (searchFieldConfig.isKeyword() && isExactQuery) { + // It is important to use the subfield .keyword (it uses a different normalizer) + // The non-.keyword field removes case information + + // Exact match case-sensitive + finalQuery.should( + QueryBuilders.termQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .caseInsensitive(false) + .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor()) + .queryName(searchFieldConfig.shortName())); + + // Exact match case-insensitive + finalQuery.should( + QueryBuilders.termQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .caseInsensitive(true) + .boost( + searchFieldConfig.boost() + * exactMatchConfiguration.getExactFactor() + * exactMatchConfiguration.getCaseSensitivityFactor()) + .queryName(searchFieldConfig.fieldName())); + } + + if (searchFieldConfig.isWordGramSubfield() && isPrefixQuery) { + finalQuery.should( + QueryBuilders.matchPhraseQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .boost( + searchFieldConfig.boost() + * getWordGramFactor(searchFieldConfig.fieldName())) + .queryName(searchFieldConfig.shortName())); + } + }); return finalQuery.should().size() > 0 ? Optional.of(finalQuery) : Optional.empty(); } - private FunctionScoreQueryBuilder buildScoreFunctions(@Nullable QueryConfiguration customQueryConfig, - @Nonnull List<EntitySpec> entitySpecs, - @Nonnull QueryBuilder queryBuilder) { + private FunctionScoreQueryBuilder buildScoreFunctions( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull QueryBuilder queryBuilder) { if (customQueryConfig != null) { // Prefer configuration function scoring over annotation scoring return functionScoreQueryBuilder(customQueryConfig, queryBuilder); } else { - return QueryBuilders.functionScoreQuery(queryBuilder, buildAnnotationScoreFunctions(entitySpecs)) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) // Average score functions - .boostMode(CombineFunction.MULTIPLY); // Multiply score function with the score from query; + return QueryBuilders.functionScoreQuery( + queryBuilder, buildAnnotationScoreFunctions(entitySpecs)) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) // Average score functions + .boostMode( + CombineFunction.MULTIPLY); // Multiply score function with the score from query; } } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotationScoreFunctions(@Nonnull List<EntitySpec> entitySpecs) { + private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotationScoreFunctions( + @Nonnull List<EntitySpec> entitySpecs) { List<FunctionScoreQueryBuilder.FilterFunctionBuilder> finalScoreFunctions = new ArrayList<>(); // Add a default weight of 1.0 to make sure the score function is larger than 1 finalScoreFunctions.add( - new FunctionScoreQueryBuilder.FilterFunctionBuilder(ScoreFunctionBuilders.weightFactorFunction(1.0f))); - - Map<String, SearchableAnnotation> annotations = entitySpecs.stream() - .map(EntitySpec::getSearchableFieldSpecs) - .flatMap(List::stream) - .map(SearchableFieldSpec::getSearchableAnnotation) - .collect(Collectors.toMap(SearchableAnnotation::getFieldName, annotation -> annotation, (annotation1, annotation2) -> annotation1)); + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1.0f))); + + Map<String, SearchableAnnotation> annotations = + entitySpecs.stream() + .map(EntitySpec::getSearchableFieldSpecs) + .flatMap(List::stream) + .map(SearchableFieldSpec::getSearchableAnnotation) + .collect( + Collectors.toMap( + SearchableAnnotation::getFieldName, + annotation -> annotation, + (annotation1, annotation2) -> annotation1)); for (Map.Entry<String, SearchableAnnotation> annotationEntry : annotations.entrySet()) { SearchableAnnotation annotation = annotationEntry.getValue(); - annotation - .getWeightsPerFieldValue() - .entrySet() - .stream() - .map(entry -> buildWeightFactorFunction(annotation.getFieldName(), entry.getKey(), - entry.getValue())).forEach(finalScoreFunctions::add); + annotation.getWeightsPerFieldValue().entrySet().stream() + .map( + entry -> + buildWeightFactorFunction( + annotation.getFieldName(), entry.getKey(), entry.getValue())) + .forEach(finalScoreFunctions::add); } - Map<String, SearchScoreAnnotation> searchScoreAnnotationMap = entitySpecs.stream() - .map(EntitySpec::getSearchScoreFieldSpecs) - .flatMap(List::stream) - .map(SearchScoreFieldSpec::getSearchScoreAnnotation) - .collect(Collectors.toMap(SearchScoreAnnotation::getFieldName, annotation -> annotation, (annotation1, annotation2) -> annotation1)); - for (Map.Entry<String, SearchScoreAnnotation> searchScoreAnnotationEntry : searchScoreAnnotationMap.entrySet()) { + Map<String, SearchScoreAnnotation> searchScoreAnnotationMap = + entitySpecs.stream() + .map(EntitySpec::getSearchScoreFieldSpecs) + .flatMap(List::stream) + .map(SearchScoreFieldSpec::getSearchScoreAnnotation) + .collect( + Collectors.toMap( + SearchScoreAnnotation::getFieldName, + annotation -> annotation, + (annotation1, annotation2) -> annotation1)); + for (Map.Entry<String, SearchScoreAnnotation> searchScoreAnnotationEntry : + searchScoreAnnotationMap.entrySet()) { SearchScoreAnnotation annotation = searchScoreAnnotationEntry.getValue(); finalScoreFunctions.add(buildScoreFunctionFromSearchScoreAnnotation(annotation)); } @@ -389,14 +481,15 @@ private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotation return finalScoreFunctions.toArray(new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]); } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildWeightFactorFunction(@Nonnull String fieldName, - @Nonnull Object fieldValue, double weight) { - return new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery(fieldName, fieldValue), + private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildWeightFactorFunction( + @Nonnull String fieldName, @Nonnull Object fieldValue, double weight) { + return new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery(fieldName, fieldValue), ScoreFunctionBuilders.weightFactorFunction((float) weight)); } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildScoreFunctionFromSearchScoreAnnotation( - @Nonnull SearchScoreAnnotation annotation) { + private static FunctionScoreQueryBuilder.FilterFunctionBuilder + buildScoreFunctionFromSearchScoreAnnotation(@Nonnull SearchScoreAnnotation annotation) { FieldValueFactorFunctionBuilder scoreFunction = ScoreFunctionBuilders.fieldValueFactorFunction(annotation.getFieldName()); scoreFunction.factor((float) annotation.getWeight()); @@ -405,7 +498,8 @@ private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildScoreFunctio return new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction); } - private static FieldValueFactorFunction.Modifier mapModifier(SearchScoreAnnotation.Modifier modifier) { + private static FieldValueFactorFunction.Modifier mapModifier( + SearchScoreAnnotation.Modifier modifier) { switch (modifier) { case LOG: return FieldValueFactorFunction.Modifier.LOG1P; @@ -422,33 +516,43 @@ private static FieldValueFactorFunction.Modifier mapModifier(SearchScoreAnnotati } } - public FunctionScoreQueryBuilder functionScoreQueryBuilder(QueryConfiguration customQueryConfiguration, - QueryBuilder queryBuilder) { + public FunctionScoreQueryBuilder functionScoreQueryBuilder( + QueryConfiguration customQueryConfiguration, QueryBuilder queryBuilder) { return toFunctionScoreQueryBuilder(queryBuilder, customQueryConfiguration.getFunctionScore()); } - public Optional<BoolQueryBuilder> boolQueryBuilder(QueryConfiguration customQueryConfiguration, String query) { + public Optional<BoolQueryBuilder> boolQueryBuilder( + QueryConfiguration customQueryConfiguration, String query) { if (customQueryConfiguration.getBoolQuery() != null) { - log.debug("Using custom query configuration queryRegex: {}", customQueryConfiguration.getQueryRegex()); + log.debug( + "Using custom query configuration queryRegex: {}", + customQueryConfiguration.getQueryRegex()); } - return Optional.ofNullable(customQueryConfiguration.getBoolQuery()).map(bq -> toBoolQueryBuilder(query, bq)); + return Optional.ofNullable(customQueryConfiguration.getBoolQuery()) + .map(bq -> toBoolQueryBuilder(query, bq)); } private BoolQueryBuilder toBoolQueryBuilder(String query, BoolQueryConfiguration boolQuery) { try { - String jsonFragment = OBJECT_MAPPER.writeValueAsString(boolQuery) - .replace("\"{{query_string}}\"", OBJECT_MAPPER.writeValueAsString(query)) - .replace("\"{{unquoted_query_string}}\"", OBJECT_MAPPER.writeValueAsString(unquote(query))); - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, jsonFragment); + String jsonFragment = + OBJECT_MAPPER + .writeValueAsString(boolQuery) + .replace("\"{{query_string}}\"", OBJECT_MAPPER.writeValueAsString(query)) + .replace( + "\"{{unquoted_query_string}}\"", + OBJECT_MAPPER.writeValueAsString(unquote(query))); + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, jsonFragment); return BoolQueryBuilder.fromXContent(parser); } catch (IOException e) { throw new RuntimeException(e); } } - private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder(QueryBuilder queryBuilder, - Map<String, Object> params) { + private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder( + QueryBuilder queryBuilder, Map<String, Object> params) { try { HashMap<String, Object> body = new HashMap<>(params); if (!body.isEmpty()) { @@ -457,11 +561,11 @@ private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder(QueryBuilder query body.put("query", OBJECT_MAPPER.readValue(queryBuilder.toString(), Map.class)); - String jsonFragment = OBJECT_MAPPER.writeValueAsString(Map.of( - "function_score", body - )); - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, jsonFragment); + String jsonFragment = OBJECT_MAPPER.writeValueAsString(Map.of("function_score", body)); + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, jsonFragment); return (FunctionScoreQueryBuilder) FunctionScoreQueryBuilder.parseInnerQueryBuilder(parser); } catch (IOException e) { throw new RuntimeException(e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 0df6afd49c373..80da30229b74c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -1,13 +1,18 @@ package com.linkedin.metadata.search.elasticsearch.query.request; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import static com.linkedin.metadata.search.utils.ESUtils.NAME_SUGGESTION; +import static com.linkedin.metadata.search.utils.ESUtils.toFacetField; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.DoubleMap; import com.linkedin.data.template.LongMap; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; @@ -51,7 +56,6 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; import org.opensearch.action.search.SearchRequest; @@ -72,25 +76,21 @@ import org.opensearch.search.fetch.subphase.highlight.HighlightField; import org.opensearch.search.suggest.term.TermSuggestion; -import static com.linkedin.metadata.search.utils.ESUtils.NAME_SUGGESTION; -import static com.linkedin.metadata.search.utils.ESUtils.toFacetField; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class SearchRequestHandler { - private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = new SearchFlags() + private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = + new SearchFlags() .setFulltext(false) .setMaxAggValues(20) .setSkipCache(false) .setSkipAggregates(false) .setSkipHighlighting(false); - private static final Map<List<EntitySpec>, SearchRequestHandler> REQUEST_HANDLER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); + private static final Map<List<EntitySpec>, SearchRequestHandler> REQUEST_HANDLER_BY_ENTITY_NAME = + new ConcurrentHashMap<>(); private static final String REMOVED = "removed"; private static final String URN_FILTER = "urn"; - private static final String[] FIELDS_TO_FETCH = new String[]{"urn", "usageCountLast30Days"}; - private static final String[] URN_FIELD = new String[]{"urn"}; + private static final String[] FIELDS_TO_FETCH = new String[] {"urn", "usageCountLast30Days"}; + private static final String[] URN_FIELD = new String[] {"urn"}; private final List<EntitySpec> _entitySpecs; private final Set<String> _defaultQueryFieldNames; @@ -100,19 +100,24 @@ public class SearchRequestHandler { private final SearchQueryBuilder _searchQueryBuilder; private final AggregationQueryBuilder _aggregationQueryBuilder; - private SearchRequestHandler(@Nonnull EntitySpec entitySpec, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + private SearchRequestHandler( + @Nonnull EntitySpec entitySpec, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { this(ImmutableList.of(entitySpec), configs, customSearchConfiguration); } - private SearchRequestHandler(@Nonnull List<EntitySpec> entitySpecs, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + private SearchRequestHandler( + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { _entitySpecs = entitySpecs; List<SearchableAnnotation> annotations = getSearchableAnnotations(); _defaultQueryFieldNames = getDefaultQueryFieldNames(annotations); - _filtersToDisplayName = annotations.stream() - .flatMap(annotation -> getFacetFieldDisplayNameFromAnnotation(annotation).stream()) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, mapMerger())); + _filtersToDisplayName = + annotations.stream() + .flatMap(annotation -> getFacetFieldDisplayNameFromAnnotation(annotation).stream()) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, mapMerger())); _filtersToDisplayName.put(INDEX_VIRTUAL_FIELD, "Type"); _highlights = getHighlights(); _searchQueryBuilder = new SearchQueryBuilder(configs, customSearchConfiguration); @@ -120,16 +125,22 @@ private SearchRequestHandler(@Nonnull List<EntitySpec> entitySpecs, @Nonnull Sea _configs = configs; } - public static SearchRequestHandler getBuilder(@Nonnull EntitySpec entitySpec, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public static SearchRequestHandler getBuilder( + @Nonnull EntitySpec entitySpec, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { return REQUEST_HANDLER_BY_ENTITY_NAME.computeIfAbsent( - ImmutableList.of(entitySpec), k -> new SearchRequestHandler(entitySpec, configs, customSearchConfiguration)); + ImmutableList.of(entitySpec), + k -> new SearchRequestHandler(entitySpec, configs, customSearchConfiguration)); } - public static SearchRequestHandler getBuilder(@Nonnull List<EntitySpec> entitySpecs, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public static SearchRequestHandler getBuilder( + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { return REQUEST_HANDLER_BY_ENTITY_NAME.computeIfAbsent( - ImmutableList.copyOf(entitySpecs), k -> new SearchRequestHandler(entitySpecs, configs, customSearchConfiguration)); + ImmutableList.copyOf(entitySpecs), + k -> new SearchRequestHandler(entitySpecs, configs, customSearchConfiguration)); } private List<SearchableAnnotation> getSearchableAnnotations() { @@ -142,21 +153,22 @@ private List<SearchableAnnotation> getSearchableAnnotations() { @VisibleForTesting private Set<String> getDefaultQueryFieldNames(List<SearchableAnnotation> annotations) { - return Stream.concat(annotations.stream() - .filter(SearchableAnnotation::isQueryByDefault) - .map(SearchableAnnotation::getFieldName), + return Stream.concat( + annotations.stream() + .filter(SearchableAnnotation::isQueryByDefault) + .map(SearchableAnnotation::getFieldName), Stream.of("urn")) - .collect(Collectors.toSet()); + .collect(Collectors.toSet()); } // If values are not equal, throw error private BinaryOperator<String> mapMerger() { return (s1, s2) -> { - if (!StringUtils.equals(s1, s2)) { - throw new IllegalStateException(String.format("Unable to merge values %s and %s", s1, s2)); - } - return s1; - }; + if (!StringUtils.equals(s1, s2)) { + throw new IllegalStateException(String.format("Unable to merge values %s and %s", s1, s2)); + } + return s1; + }; } public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { @@ -168,7 +180,8 @@ public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { /** * Constructs the search query based on the query request. * - * <p>TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or later + * <p>TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or + * later * * @param input the search input text * @param filter the search filter @@ -180,10 +193,16 @@ public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { */ @Nonnull @WithSpan - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List<String> facets) { - SearchFlags finalSearchFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + public SearchRequest getSearchRequest( + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets) { + SearchFlags finalSearchFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -193,7 +212,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi searchSourceBuilder.fetchSource("urn", null); BoolQueryBuilder filterQuery = getFilterQuery(filter); - searchSourceBuilder.query(QueryBuilders.boolQuery() + searchSourceBuilder.query( + QueryBuilders.boolQuery() .must(getQuery(input, finalSearchFlags.isFulltext())) .filter(filterQuery)); if (!finalSearchFlags.isSkipAggregates()) { @@ -217,7 +237,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi /** * Constructs the search query based on the query request. * - * <p>TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or later + * <p>TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or + * later * * @param input the search input text * @param filter the search filter @@ -227,11 +248,18 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi */ @Nonnull @WithSpan - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable Object[] sort, @Nullable String pitId, @Nullable String keepAlive, - int size, SearchFlags searchFlags) { + public SearchRequest getSearchRequest( + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable Object[] sort, + @Nullable String pitId, + @Nullable String keepAlive, + int size, + SearchFlags searchFlags) { SearchRequest searchRequest = new PITAwareSearchRequest(); - SearchFlags finalSearchFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + SearchFlags finalSearchFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); ESUtils.setSearchAfter(searchSourceBuilder, sort, pitId, keepAlive); @@ -240,7 +268,10 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi searchSourceBuilder.fetchSource("urn", null); BoolQueryBuilder filterQuery = getFilterQuery(filter); - searchSourceBuilder.query(QueryBuilders.boolQuery().must(getQuery(input, finalSearchFlags.isFulltext())).filter(filterQuery)); + searchSourceBuilder.query( + QueryBuilders.boolQuery() + .must(getQuery(input, finalSearchFlags.isFulltext())) + .filter(filterQuery)); if (!finalSearchFlags.isSkipAggregates()) { _aggregationQueryBuilder.getAggregations().forEach(searchSourceBuilder::aggregation); } @@ -256,8 +287,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi } /** - * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion to be applied to - * search results. + * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion + * to be applied to search results. * * @param filters {@link Filter} list of conditions with fields and values * @param sortCriterion {@link SortCriterion} to be applied to the search results @@ -266,8 +297,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi * @return {@link SearchRequest} that contains the filtered query */ @Nonnull - public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, - int size) { + public SearchRequest getFilterRequest( + @Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, int size) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filters); @@ -281,10 +312,10 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr } /** - * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion to be applied to - * search results. + * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion + * to be applied to search results. * - * TODO: Used in batch ingestion from ingestion scheduler + * <p>TODO: Used in batch ingestion from ingestion scheduler * * @param filters {@link Filter} list of conditions with fields and values * @param sortCriterion {@link SortCriterion} to be applied to the search results @@ -295,8 +326,13 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr * @return {@link SearchRequest} that contains the filtered query */ @Nonnull - public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCriterion sortCriterion, @Nullable Object[] sort, - @Nullable String pitId, @Nonnull String keepAlive, int size) { + public SearchRequest getFilterRequest( + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + int size) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filters); @@ -320,14 +356,16 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr * @return {@link SearchRequest} that contains the aggregation query */ @Nonnull - public static SearchRequest getAggregationRequest(@Nonnull String field, @Nullable Filter filter, int limit) { + public static SearchRequest getAggregationRequest( + @Nonnull String field, @Nullable Filter filter, int limit) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filter); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(filterQuery); searchSourceBuilder.size(0); - searchSourceBuilder.aggregation(AggregationBuilders.terms(field).field(ESUtils.toKeywordField(field, false)).size(limit)); + searchSourceBuilder.aggregation( + AggregationBuilders.terms(field).field(ESUtils.toKeywordField(field, false)).size(limit)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -347,19 +385,22 @@ public HighlightBuilder getHighlights() { // Check for each field name and any subfields _defaultQueryFieldNames.stream() - .flatMap(fieldName -> Stream.of(fieldName, fieldName + ".*")).distinct() - .forEach(highlightBuilder::field); + .flatMap(fieldName -> Stream.of(fieldName, fieldName + ".*")) + .distinct() + .forEach(highlightBuilder::field); return highlightBuilder; } @WithSpan - public SearchResult extractResult(@Nonnull SearchResponse searchResponse, Filter filter, int from, int size) { + public SearchResult extractResult( + @Nonnull SearchResponse searchResponse, Filter filter, int from, int size) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; List<SearchEntity> resultList = getResults(searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); - return new SearchResult().setEntities(new SearchEntityArray(resultList)) + return new SearchResult() + .setEntities(new SearchEntityArray(resultList)) .setMetadata(searchResultMetadata) .setFrom(from) .setPageSize(size) @@ -367,8 +408,13 @@ public SearchResult extractResult(@Nonnull SearchResponse searchResponse, Filter } @WithSpan - public ScrollResult extractScrollResult(@Nonnull SearchResponse searchResponse, Filter filter, @Nullable String scrollId, - @Nullable String keepAlive, int size, boolean supportsPointInTime) { + public ScrollResult extractScrollResult( + @Nonnull SearchResponse searchResponse, + Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + boolean supportsPointInTime) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; List<SearchEntity> resultList = getResults(searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); @@ -379,15 +425,21 @@ public ScrollResult extractScrollResult(@Nonnull SearchResponse searchResponse, Object[] sort = searchHits[searchHits.length - 1].getSortValues(); long expirationTimeMs = 0L; if (keepAlive != null && supportsPointInTime) { - expirationTimeMs = TimeValue.parseTimeValue(keepAlive, "expirationTime").getMillis() + System.currentTimeMillis(); + expirationTimeMs = + TimeValue.parseTimeValue(keepAlive, "expirationTime").getMillis() + + System.currentTimeMillis(); } - nextScrollId = new SearchAfterWrapper(sort, searchResponse.pointInTimeId(), expirationTimeMs).toScrollId(); + nextScrollId = + new SearchAfterWrapper(sort, searchResponse.pointInTimeId(), expirationTimeMs) + .toScrollId(); } - ScrollResult scrollResult = new ScrollResult().setEntities(new SearchEntityArray(resultList)) - .setMetadata(searchResultMetadata) - .setPageSize(size) - .setNumEntities(totalCount); + ScrollResult scrollResult = + new ScrollResult() + .setEntities(new SearchEntityArray(resultList)) + .setMetadata(searchResultMetadata) + .setPageSize(size) + .setNumEntities(totalCount); if (nextScrollId != null) { scrollResult.setScrollId(nextScrollId); @@ -418,17 +470,20 @@ private List<MatchedField> extractMatchedFields(@Nonnull SearchHit hit) { if (!highlightedFieldNamesAndValues.containsKey(queryName)) { if (hit.getFields().containsKey(queryName)) { for (Object fieldValue : hit.getFields().get(queryName).getValues()) { - highlightedFieldNamesAndValues.computeIfAbsent(queryName, k -> new HashSet<>()).add(fieldValue.toString()); + highlightedFieldNamesAndValues + .computeIfAbsent(queryName, k -> new HashSet<>()) + .add(fieldValue.toString()); } } else { highlightedFieldNamesAndValues.put(queryName, Set.of("")); } } } - return highlightedFieldNamesAndValues.entrySet() - .stream() + return highlightedFieldNamesAndValues.entrySet().stream() .flatMap( - entry -> entry.getValue().stream().map(value -> new MatchedField().setName(entry.getKey()).setValue(value))) + entry -> + entry.getValue().stream() + .map(value -> new MatchedField().setName(entry.getKey()).setValue(value))) .collect(Collectors.toList()); } @@ -438,11 +493,13 @@ private Optional<String> getFieldName(String matchedField) { } private Map<String, Double> extractFeatures(@Nonnull SearchHit searchHit) { - return ImmutableMap.of(Features.Name.SEARCH_BACKEND_SCORE.toString(), (double) searchHit.getScore()); + return ImmutableMap.of( + Features.Name.SEARCH_BACKEND_SCORE.toString(), (double) searchHit.getScore()); } private SearchEntity getResult(@Nonnull SearchHit hit) { - return new SearchEntity().setEntity(getUrnFromSearchHit(hit)) + return new SearchEntity() + .setEntity(getUrnFromSearchHit(hit)) .setMatchedFields(new MatchedFieldArray(extractMatchedFields(hit))) .setScore(hit.getScore()) .setFeatures(new DoubleMap(extractFeatures(hit))); @@ -456,7 +513,9 @@ private SearchEntity getResult(@Nonnull SearchHit hit) { */ @Nonnull private List<SearchEntity> getResults(@Nonnull SearchResponse searchResponse) { - return Arrays.stream(searchResponse.getHits().getHits()).map(this::getResult).collect(Collectors.toList()); + return Arrays.stream(searchResponse.getHits().getHits()) + .map(this::getResult) + .collect(Collectors.toList()); } @Nonnull @@ -473,15 +532,17 @@ private Urn getUrnFromSearchHit(@Nonnull SearchHit hit) { * * @param searchResponse the raw {@link SearchResponse} as obtained from the search engine * @param filter the provided Filter to use with Elasticsearch - * - * @return {@link SearchResultMetadata} with aggregation and list of urns obtained from {@link SearchResponse} + * @return {@link SearchResultMetadata} with aggregation and list of urns obtained from {@link + * SearchResponse} */ @Nonnull - private SearchResultMetadata extractSearchResultMetadata(@Nonnull SearchResponse searchResponse, @Nullable Filter filter) { + private SearchResultMetadata extractSearchResultMetadata( + @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { final SearchResultMetadata searchResultMetadata = new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); - final List<AggregationMetadata> aggregationMetadataList = extractAggregationMetadata(searchResponse, filter); + final List<AggregationMetadata> aggregationMetadataList = + extractAggregationMetadata(searchResponse, filter); searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); final List<SearchSuggestion> searchSuggestions = extractSearchSuggestions(searchResponse); @@ -494,34 +555,43 @@ private String computeDisplayName(String name) { if (_filtersToDisplayName.containsKey(name)) { return _filtersToDisplayName.get(name); } else if (name.contains(AGGREGATION_SEPARATOR_CHAR)) { - return Arrays.stream(name.split(AGGREGATION_SEPARATOR_CHAR)).map(_filtersToDisplayName::get).collect( - Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); + return Arrays.stream(name.split(AGGREGATION_SEPARATOR_CHAR)) + .map(_filtersToDisplayName::get) + .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } return name; } - private List<AggregationMetadata> extractAggregationMetadata(@Nonnull SearchResponse searchResponse, @Nullable Filter filter) { + private List<AggregationMetadata> extractAggregationMetadata( + @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { final List<AggregationMetadata> aggregationMetadataList = new ArrayList<>(); if (searchResponse.getAggregations() == null) { return addFiltersToAggregationMetadata(aggregationMetadataList, filter); } - for (Map.Entry<String, Aggregation> entry : searchResponse.getAggregations().getAsMap().entrySet()) { - final Map<String, Long> oneTermAggResult = extractTermAggregations((ParsedTerms) entry.getValue(), entry.getKey().equals("_entityType")); + for (Map.Entry<String, Aggregation> entry : + searchResponse.getAggregations().getAsMap().entrySet()) { + final Map<String, Long> oneTermAggResult = + extractTermAggregations( + (ParsedTerms) entry.getValue(), entry.getKey().equals("_entityType")); if (oneTermAggResult.isEmpty()) { continue; } - final AggregationMetadata aggregationMetadata = new AggregationMetadata().setName(entry.getKey()) - .setDisplayName(computeDisplayName(entry.getKey())) - .setAggregations(new LongMap(oneTermAggResult)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(oneTermAggResult, Collections.emptySet()))); + final AggregationMetadata aggregationMetadata = + new AggregationMetadata() + .setName(entry.getKey()) + .setDisplayName(computeDisplayName(entry.getKey())) + .setAggregations(new LongMap(oneTermAggResult)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(oneTermAggResult, Collections.emptySet()))); aggregationMetadataList.add(aggregationMetadata); } return addFiltersToAggregationMetadata(aggregationMetadataList, filter); - } + } @WithSpan - public static Map<String, Long> extractTermAggregations(@Nonnull SearchResponse searchResponse, - @Nonnull String aggregationName) { + public static Map<String, Long> extractTermAggregations( + @Nonnull SearchResponse searchResponse, @Nonnull String aggregationName) { if (searchResponse.getAggregations() == null) { return Collections.emptyMap(); } @@ -530,7 +600,8 @@ public static Map<String, Long> extractTermAggregations(@Nonnull SearchResponse if (aggregation == null) { return Collections.emptyMap(); } - return extractTermAggregations((ParsedTerms) aggregation, aggregationName.equals("_entityType")); + return extractTermAggregations( + (ParsedTerms) aggregation, aggregationName.equals("_entityType")); } private List<SearchSuggestion> extractSearchSuggestions(@Nonnull SearchResponse searchResponse) { @@ -538,13 +609,18 @@ private List<SearchSuggestion> extractSearchSuggestions(@Nonnull SearchResponse if (searchResponse.getSuggest() != null) { TermSuggestion termSuggestion = searchResponse.getSuggest().getSuggestion(NAME_SUGGESTION); if (termSuggestion != null && termSuggestion.getEntries().size() > 0) { - termSuggestion.getEntries().get(0).getOptions().forEach(suggestOption -> { - SearchSuggestion searchSuggestion = new SearchSuggestion(); - searchSuggestion.setText(String.valueOf(suggestOption.getText())); - searchSuggestion.setFrequency(suggestOption.getFreq()); - searchSuggestion.setScore(suggestOption.getScore()); - searchSuggestions.add(searchSuggestion); - }); + termSuggestion + .getEntries() + .get(0) + .getOptions() + .forEach( + suggestOption -> { + SearchSuggestion searchSuggestion = new SearchSuggestion(); + searchSuggestion.setText(String.valueOf(suggestOption.getText())); + searchSuggestion.setFrequency(suggestOption.getFreq()); + searchSuggestion.setScore(suggestOption.getScore()); + searchSuggestions.add(searchSuggestion); + }); } } return searchSuggestions; @@ -552,6 +628,7 @@ private List<SearchSuggestion> extractSearchSuggestions(@Nonnull SearchResponse /** * Adds nested sub-aggregation values to the aggregated results + * * @param aggs The aggregations to traverse. Could be null (base case) * @return A map from names to aggregation count values */ @@ -568,8 +645,9 @@ private static Map<String, Long> recursivelyAddNestedSubAggs(@Nullable Aggregati String key = bucket.getKeyAsString(); // Gets filtered sub aggregation doc count if exist Map<String, Long> subAggs = recursivelyAddNestedSubAggs(bucket.getAggregations()); - for (Map.Entry<String, Long> subAggEntry: subAggs.entrySet()) { - aggResult.put(key + AGGREGATION_SEPARATOR_CHAR + subAggEntry.getKey(), subAggEntry.getValue()); + for (Map.Entry<String, Long> subAggEntry : subAggs.entrySet()) { + aggResult.put( + key + AGGREGATION_SEPARATOR_CHAR + subAggEntry.getKey(), subAggEntry.getValue()); } long docCount = bucket.getDocCount(); if (docCount > 0) { @@ -588,7 +666,8 @@ private static Map<String, Long> recursivelyAddNestedSubAggs(@Nullable Aggregati * @return a map with aggregation key and corresponding doc counts */ @Nonnull - private static Map<String, Long> extractTermAggregations(@Nonnull ParsedTerms terms, boolean includeZeroes) { + private static Map<String, Long> extractTermAggregations( + @Nonnull ParsedTerms terms, boolean includeZeroes) { final Map<String, Long> aggResult = new HashMap<>(); List<? extends Terms.Bucket> bucketList = terms.getBuckets(); @@ -598,7 +677,9 @@ private static Map<String, Long> extractTermAggregations(@Nonnull ParsedTerms te // Gets filtered sub aggregation doc count if exist Map<String, Long> subAggs = recursivelyAddNestedSubAggs(bucket.getAggregations()); for (Map.Entry<String, Long> subAggEntry : subAggs.entrySet()) { - aggResult.put(String.format("%s%s%s", key, AGGREGATION_SEPARATOR_CHAR, subAggEntry.getKey()), subAggEntry.getValue()); + aggResult.put( + String.format("%s%s%s", key, AGGREGATION_SEPARATOR_CHAR, subAggEntry.getKey()), + subAggEntry.getValue()); } long docCount = bucket.getDocCount(); if (includeZeroes || docCount > 0) { @@ -609,11 +690,10 @@ private static Map<String, Long> extractTermAggregations(@Nonnull ParsedTerms te return aggResult; } - /** - * Injects the missing conjunctive filters into the aggregations list. - */ - public List<AggregationMetadata> addFiltersToAggregationMetadata(@Nonnull final List<AggregationMetadata> originalMetadata, @Nullable final Filter filter) { - if (filter == null) { + /** Injects the missing conjunctive filters into the aggregations list. */ + public List<AggregationMetadata> addFiltersToAggregationMetadata( + @Nonnull final List<AggregationMetadata> originalMetadata, @Nullable final Filter filter) { + if (filter == null) { return originalMetadata; } if (filter.hasOr()) { @@ -624,14 +704,18 @@ public List<AggregationMetadata> addFiltersToAggregationMetadata(@Nonnull final return originalMetadata; } - void addOrFiltersToAggregationMetadata(@Nonnull final ConjunctiveCriterionArray or, @Nonnull final List<AggregationMetadata> originalMetadata) { + void addOrFiltersToAggregationMetadata( + @Nonnull final ConjunctiveCriterionArray or, + @Nonnull final List<AggregationMetadata> originalMetadata) { for (ConjunctiveCriterion conjunction : or) { // For each item in the conjunction, inject an empty aggregation if necessary addCriteriaFiltersToAggregationMetadata(conjunction.getAnd(), originalMetadata); } } - private void addCriteriaFiltersToAggregationMetadata(@Nonnull final CriterionArray criteria, @Nonnull final List<AggregationMetadata> originalMetadata) { + private void addCriteriaFiltersToAggregationMetadata( + @Nonnull final CriterionArray criteria, + @Nonnull final List<AggregationMetadata> originalMetadata) { for (Criterion criterion : criteria) { addCriterionFiltersToAggregationMetadata(criterion, originalMetadata); } @@ -642,19 +726,25 @@ private void addCriterionFiltersToAggregationMetadata( @Nonnull final List<AggregationMetadata> aggregationMetadata) { // We should never see duplicate aggregation for the same field in aggregation metadata list. - final Map<String, AggregationMetadata> aggregationMetadataMap = aggregationMetadata.stream().collect(Collectors.toMap( - AggregationMetadata::getName, agg -> agg)); + final Map<String, AggregationMetadata> aggregationMetadataMap = + aggregationMetadata.stream() + .collect(Collectors.toMap(AggregationMetadata::getName, agg -> agg)); // Map a filter criterion to a facet field (e.g. domains.keyword -> domains) final String finalFacetField = toFacetField(criterion.getField()); if (finalFacetField == null) { - log.warn(String.format("Found invalid filter field for entity search. Invalid or unrecognized facet %s", criterion.getField())); + log.warn( + String.format( + "Found invalid filter field for entity search. Invalid or unrecognized facet %s", + criterion.getField())); return; } - // We don't want to add urn filters to the aggregations we return as a sidecar to search results. - // They are automatically added by searchAcrossLineage and we dont need them to show up in the filter panel. + // We don't want to add urn filters to the aggregations we return as a sidecar to search + // results. + // They are automatically added by searchAcrossLineage and we dont need them to show up in the + // filter panel. if (finalFacetField.equals(URN_FILTER)) { return; } @@ -667,7 +757,10 @@ private void addCriterionFiltersToAggregationMetadata( */ AggregationMetadata originalAggMetadata = aggregationMetadataMap.get(finalFacetField); if (criterion.hasValues()) { - criterion.getValues().stream().forEach(value -> addMissingAggregationValueToAggregationMetadata(value, originalAggMetadata)); + criterion.getValues().stream() + .forEach( + value -> + addMissingAggregationValueToAggregationMetadata(value, originalAggMetadata)); } else { addMissingAggregationValueToAggregationMetadata(criterion.getValue(), originalAggMetadata); } @@ -678,21 +771,25 @@ private void addCriterionFiltersToAggregationMetadata( * If there are no results for a particular facet, it will NOT be in the original aggregation set returned by * Elasticsearch. */ - aggregationMetadata.add(buildAggregationMetadata( - finalFacetField, - _filtersToDisplayName.getOrDefault(finalFacetField, finalFacetField), - new LongMap(criterion.getValues().stream().collect(Collectors.toMap(i -> i, i -> 0L))), - new FilterValueArray(criterion.getValues().stream().map(value -> createFilterValue(value, 0L, true)).collect( - Collectors.toList()))) - ); + aggregationMetadata.add( + buildAggregationMetadata( + finalFacetField, + _filtersToDisplayName.getOrDefault(finalFacetField, finalFacetField), + new LongMap( + criterion.getValues().stream().collect(Collectors.toMap(i -> i, i -> 0L))), + new FilterValueArray( + criterion.getValues().stream() + .map(value -> createFilterValue(value, 0L, true)) + .collect(Collectors.toList())))); } } - private void addMissingAggregationValueToAggregationMetadata(@Nonnull final String value, @Nonnull final AggregationMetadata originalMetadata) { - if ( - originalMetadata.getAggregations().entrySet().stream().noneMatch(entry -> value.equals(entry.getKey())) - || originalMetadata.getFilterValues().stream().noneMatch(entry -> entry.getValue().equals(value)) - ) { + private void addMissingAggregationValueToAggregationMetadata( + @Nonnull final String value, @Nonnull final AggregationMetadata originalMetadata) { + if (originalMetadata.getAggregations().entrySet().stream() + .noneMatch(entry -> value.equals(entry.getKey())) + || originalMetadata.getFilterValues().stream() + .noneMatch(entry -> entry.getValue().equals(value))) { // No aggregation found for filtered value -- inject one! originalMetadata.getAggregations().put(value, 0L); originalMetadata.getFilterValues().add(createFilterValue(value, 0L, true)); @@ -712,8 +809,7 @@ private AggregationMetadata buildAggregationMetadata( } private List<Pair<String, String>> getFacetFieldDisplayNameFromAnnotation( - @Nonnull final SearchableAnnotation annotation - ) { + @Nonnull final SearchableAnnotation annotation) { final List<Pair<String, String>> facetsFromAnnotation = new ArrayList<>(); // Case 1: Default Keyword field if (annotation.isAddToFilters()) { @@ -721,9 +817,8 @@ private List<Pair<String, String>> getFacetFieldDisplayNameFromAnnotation( } // Case 2: HasX boolean field if (annotation.isAddHasValuesToFilters() && annotation.getHasValuesFieldName().isPresent()) { - facetsFromAnnotation.add(Pair.of( - annotation.getHasValuesFieldName().get(), annotation.getHasValuesFilterName() - )); + facetsFromAnnotation.add( + Pair.of(annotation.getHasValuesFieldName().get(), annotation.getHasValuesFilterName())); } return facetsFromAnnotation; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java index be64df3179a9d..b49218f4224a9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java @@ -1,6 +1,10 @@ package com.linkedin.metadata.search.elasticsearch.update; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.bulk.BulkProcessor; @@ -8,12 +12,6 @@ import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.support.WriteRequest; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.stream.Collectors; - - @Slf4j public class BulkListener implements BulkProcessor.Listener { private static final Map<WriteRequest.RefreshPolicy, BulkListener> INSTANCES = new HashMap<>(); @@ -21,6 +19,7 @@ public class BulkListener implements BulkProcessor.Listener { public static BulkListener getInstance() { return INSTANCES.computeIfAbsent(null, BulkListener::new); } + public static BulkListener getInstance(WriteRequest.RefreshPolicy refreshPolicy) { return INSTANCES.computeIfAbsent(refreshPolicy, BulkListener::new); } @@ -41,10 +40,18 @@ public void beforeBulk(long executionId, BulkRequest request) { @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (response.hasFailures()) { - log.error("Failed to feed bulk request. Number of events: " + response.getItems().length + " Took time ms: " - + response.getIngestTookInMillis() + " Message: " + response.buildFailureMessage()); + log.error( + "Failed to feed bulk request. Number of events: " + + response.getItems().length + + " Took time ms: " + + response.getIngestTookInMillis() + + " Message: " + + response.buildFailureMessage()); } else { - log.info("Successfully fed bulk request. Number of events: " + response.getItems().length + " Took time ms: " + log.info( + "Successfully fed bulk request. Number of events: " + + response.getItems().length + + " Took time ms: " + response.getIngestTookInMillis()); } incrementMetrics(response); @@ -53,20 +60,24 @@ public void afterBulk(long executionId, BulkRequest request, BulkResponse respon @Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { // Exception raised outside this method - log.error("Error feeding bulk request. No retries left. Request: {}", buildBulkRequestSummary(request), failure); + log.error( + "Error feeding bulk request. No retries left. Request: {}", + buildBulkRequestSummary(request), + failure); incrementMetrics(request, failure); } private static void incrementMetrics(BulkResponse response) { Arrays.stream(response.getItems()) - .map(req -> buildMetricName(req.getOpType(), req.status().name())) - .forEach(metricName -> MetricUtils.counter(BulkListener.class, metricName).inc()); + .map(req -> buildMetricName(req.getOpType(), req.status().name())) + .forEach(metricName -> MetricUtils.counter(BulkListener.class, metricName).inc()); } private static void incrementMetrics(BulkRequest request, Throwable failure) { request.requests().stream() - .map(req -> buildMetricName(req.opType(), "exception")) - .forEach(metricName -> MetricUtils.exceptionCounter(BulkListener.class, metricName, failure)); + .map(req -> buildMetricName(req.opType(), "exception")) + .forEach( + metricName -> MetricUtils.exceptionCounter(BulkListener.class, metricName, failure)); } private static String buildMetricName(DocWriteRequest.OpType opType, String status) { @@ -74,9 +85,12 @@ private static String buildMetricName(DocWriteRequest.OpType opType, String stat } public static String buildBulkRequestSummary(BulkRequest request) { - return request.requests().stream().map(req -> String.format( - "Failed to perform bulk request: index [%s], optype: [%s], type [%s], id [%s]", - req.index(), req.opType(), req.opType(), req.id()) - ).collect(Collectors.joining(";")); + return request.requests().stream() + .map( + req -> + String.format( + "Failed to perform bulk request: index [%s], optype: [%s], type [%s], id [%s]", + req.index(), req.opType(), req.opType(), req.id())) + .collect(Collectors.joining(";")); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java index a1e5b363d8a78..a2b9292eac6e4 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java @@ -1,6 +1,9 @@ package com.linkedin.metadata.search.elasticsearch.update; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.io.Closeable; +import java.io.IOException; +import java.util.Optional; import lombok.AccessLevel; import lombok.Builder; import lombok.Getter; @@ -21,168 +24,181 @@ import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryRequest; -import java.io.Closeable; -import java.io.IOException; -import java.util.Optional; - - @Slf4j @Builder(builderMethodName = "hiddenBuilder") public class ESBulkProcessor implements Closeable { - private static final String ES_WRITES_METRIC = "num_elasticSearch_writes"; - private static final String ES_BATCHES_METRIC = "num_elasticSearch_batches_submitted"; - private static final String ES_DELETE_EXCEPTION_METRIC = "delete_by_query"; - private static final String ES_SUBMIT_DELETE_EXCEPTION_METRIC = "submit_delete_by_query_task"; - private static final String ES_SUBMIT_REINDEX_METRIC = "reindex_submit"; - private static final String ES_REINDEX_SUCCESS_METRIC = "reindex_success"; - private static final String ES_REINDEX_FAILED_METRIC = "reindex_failed"; - - public static ESBulkProcessor.ESBulkProcessorBuilder builder(RestHighLevelClient searchClient) { - return hiddenBuilder().searchClient(searchClient); - } - - @NonNull - private final RestHighLevelClient searchClient; - @Builder.Default - @NonNull - private Boolean async = false; - @Builder.Default - @NonNull - private Boolean batchDelete = false; - @Builder.Default - private Integer bulkRequestsLimit = 500; - @Builder.Default - private Integer bulkFlushPeriod = 1; - @Builder.Default - private Integer numRetries = 3; - @Builder.Default - private Long retryInterval = 1L; - @Builder.Default - private TimeValue defaultTimeout = TimeValue.timeValueMinutes(1); - @Getter - private final WriteRequest.RefreshPolicy writeRequestRefreshPolicy; - @Setter(AccessLevel.NONE) - @Getter(AccessLevel.NONE) - private final BulkProcessor bulkProcessor; - - private ESBulkProcessor(@NonNull RestHighLevelClient searchClient, @NonNull Boolean async, @NonNull Boolean batchDelete, - Integer bulkRequestsLimit, Integer bulkFlushPeriod, Integer numRetries, Long retryInterval, - TimeValue defaultTimeout, WriteRequest.RefreshPolicy writeRequestRefreshPolicy, - BulkProcessor ignored) { - this.searchClient = searchClient; - this.async = async; - this.batchDelete = batchDelete; - this.bulkRequestsLimit = bulkRequestsLimit; - this.bulkFlushPeriod = bulkFlushPeriod; - this.numRetries = numRetries; - this.retryInterval = retryInterval; - this.defaultTimeout = defaultTimeout; - this.writeRequestRefreshPolicy = writeRequestRefreshPolicy; - this.bulkProcessor = async ? toAsyncBulkProcessor() : toBulkProcessor(); - } - - public ESBulkProcessor add(DocWriteRequest<?> request) { - MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(); - bulkProcessor.add(request); - return this; - } - - public Optional<BulkByScrollResponse> deleteByQuery(QueryBuilder queryBuilder, String... indices) { - return deleteByQuery(queryBuilder, true, bulkRequestsLimit, defaultTimeout, indices); - } - - public Optional<BulkByScrollResponse> deleteByQuery(QueryBuilder queryBuilder, boolean refresh, String... indices) { - return deleteByQuery(queryBuilder, refresh, bulkRequestsLimit, defaultTimeout, indices); - } - - public Optional<BulkByScrollResponse> deleteByQuery(QueryBuilder queryBuilder, boolean refresh, - int limit, TimeValue timeout, String... indices) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest() - .setQuery(queryBuilder) - .setBatchSize(limit) - .setMaxRetries(numRetries) - .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) - .setTimeout(timeout) - .setRefresh(refresh); - deleteByQueryRequest.indices(indices); - - try { - if (!batchDelete) { - // flush pending writes - bulkProcessor.flush(); - } - // perform delete after local flush - final BulkByScrollResponse deleteResponse = searchClient.deleteByQuery(deleteByQueryRequest, RequestOptions.DEFAULT); - MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(deleteResponse.getTotal()); - return Optional.of(deleteResponse); - } catch (Exception e) { - log.error("ERROR: Failed to delete by query. See stacktrace for a more detailed error:", e); - MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_DELETE_EXCEPTION_METRIC, e); - } - - return Optional.empty(); - } - public Optional<TaskSubmissionResponse> deleteByQueryAsync(QueryBuilder queryBuilder, boolean refresh, - int limit, @Nullable TimeValue timeout, String... indices) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest() + private static final String ES_WRITES_METRIC = "num_elasticSearch_writes"; + private static final String ES_BATCHES_METRIC = "num_elasticSearch_batches_submitted"; + private static final String ES_DELETE_EXCEPTION_METRIC = "delete_by_query"; + private static final String ES_SUBMIT_DELETE_EXCEPTION_METRIC = "submit_delete_by_query_task"; + private static final String ES_SUBMIT_REINDEX_METRIC = "reindex_submit"; + private static final String ES_REINDEX_SUCCESS_METRIC = "reindex_success"; + private static final String ES_REINDEX_FAILED_METRIC = "reindex_failed"; + + public static ESBulkProcessor.ESBulkProcessorBuilder builder(RestHighLevelClient searchClient) { + return hiddenBuilder().searchClient(searchClient); + } + + @NonNull private final RestHighLevelClient searchClient; + @Builder.Default @NonNull private Boolean async = false; + @Builder.Default @NonNull private Boolean batchDelete = false; + @Builder.Default private Integer bulkRequestsLimit = 500; + @Builder.Default private Integer bulkFlushPeriod = 1; + @Builder.Default private Integer numRetries = 3; + @Builder.Default private Long retryInterval = 1L; + @Builder.Default private TimeValue defaultTimeout = TimeValue.timeValueMinutes(1); + @Getter private final WriteRequest.RefreshPolicy writeRequestRefreshPolicy; + + @Setter(AccessLevel.NONE) + @Getter(AccessLevel.NONE) + private final BulkProcessor bulkProcessor; + + private ESBulkProcessor( + @NonNull RestHighLevelClient searchClient, + @NonNull Boolean async, + @NonNull Boolean batchDelete, + Integer bulkRequestsLimit, + Integer bulkFlushPeriod, + Integer numRetries, + Long retryInterval, + TimeValue defaultTimeout, + WriteRequest.RefreshPolicy writeRequestRefreshPolicy, + BulkProcessor ignored) { + this.searchClient = searchClient; + this.async = async; + this.batchDelete = batchDelete; + this.bulkRequestsLimit = bulkRequestsLimit; + this.bulkFlushPeriod = bulkFlushPeriod; + this.numRetries = numRetries; + this.retryInterval = retryInterval; + this.defaultTimeout = defaultTimeout; + this.writeRequestRefreshPolicy = writeRequestRefreshPolicy; + this.bulkProcessor = async ? toAsyncBulkProcessor() : toBulkProcessor(); + } + + public ESBulkProcessor add(DocWriteRequest<?> request) { + MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(); + bulkProcessor.add(request); + return this; + } + + public Optional<BulkByScrollResponse> deleteByQuery( + QueryBuilder queryBuilder, String... indices) { + return deleteByQuery(queryBuilder, true, bulkRequestsLimit, defaultTimeout, indices); + } + + public Optional<BulkByScrollResponse> deleteByQuery( + QueryBuilder queryBuilder, boolean refresh, String... indices) { + return deleteByQuery(queryBuilder, refresh, bulkRequestsLimit, defaultTimeout, indices); + } + + public Optional<BulkByScrollResponse> deleteByQuery( + QueryBuilder queryBuilder, boolean refresh, int limit, TimeValue timeout, String... indices) { + DeleteByQueryRequest deleteByQueryRequest = + new DeleteByQueryRequest() .setQuery(queryBuilder) .setBatchSize(limit) .setMaxRetries(numRetries) .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) + .setTimeout(timeout) .setRefresh(refresh); - if (timeout != null) { - deleteByQueryRequest.setTimeout(timeout); - } - // count the number of conflicts, but do not abort the operation - deleteByQueryRequest.setConflicts("proceed"); - deleteByQueryRequest.indices(indices); - try { - // flush pending writes - bulkProcessor.flush(); - TaskSubmissionResponse resp = searchClient.submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT); - MetricUtils.counter(this.getClass(), ES_BATCHES_METRIC).inc(); - return Optional.of(resp); - } catch (Exception e) { - log.error("ERROR: Failed to submit a delete by query task. See stacktrace for a more detailed error:", e); - MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_SUBMIT_DELETE_EXCEPTION_METRIC, e); - } - return Optional.empty(); - } - - private BulkProcessor toBulkProcessor() { - return BulkProcessor.builder((request, bulkListener) -> { - try { - BulkResponse response = searchClient.bulk(request, RequestOptions.DEFAULT); - bulkListener.onResponse(response); - } catch (IOException e) { - bulkListener.onFailure(e); - throw new RuntimeException(e); - } - }, BulkListener.getInstance(writeRequestRefreshPolicy)) - .setBulkActions(bulkRequestsLimit) - .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) - // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other retry methods) - .setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) - .build(); - } + deleteByQueryRequest.indices(indices); - private BulkProcessor toAsyncBulkProcessor() { - return BulkProcessor.builder((request, bulkListener) -> { - searchClient.bulkAsync(request, RequestOptions.DEFAULT, bulkListener); - }, BulkListener.getInstance(writeRequestRefreshPolicy)) - .setBulkActions(bulkRequestsLimit) - .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) - // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other retry methods) - .setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) - .build(); + try { + if (!batchDelete) { + // flush pending writes + bulkProcessor.flush(); + } + // perform delete after local flush + final BulkByScrollResponse deleteResponse = + searchClient.deleteByQuery(deleteByQueryRequest, RequestOptions.DEFAULT); + MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(deleteResponse.getTotal()); + return Optional.of(deleteResponse); + } catch (Exception e) { + log.error("ERROR: Failed to delete by query. See stacktrace for a more detailed error:", e); + MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_DELETE_EXCEPTION_METRIC, e); } - @Override - public void close() throws IOException { - bulkProcessor.close(); + return Optional.empty(); + } + + public Optional<TaskSubmissionResponse> deleteByQueryAsync( + QueryBuilder queryBuilder, + boolean refresh, + int limit, + @Nullable TimeValue timeout, + String... indices) { + DeleteByQueryRequest deleteByQueryRequest = + new DeleteByQueryRequest() + .setQuery(queryBuilder) + .setBatchSize(limit) + .setMaxRetries(numRetries) + .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) + .setRefresh(refresh); + if (timeout != null) { + deleteByQueryRequest.setTimeout(timeout); } - - public void flush() { - bulkProcessor.flush(); + // count the number of conflicts, but do not abort the operation + deleteByQueryRequest.setConflicts("proceed"); + deleteByQueryRequest.indices(indices); + try { + // flush pending writes + bulkProcessor.flush(); + TaskSubmissionResponse resp = + searchClient.submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT); + MetricUtils.counter(this.getClass(), ES_BATCHES_METRIC).inc(); + return Optional.of(resp); + } catch (Exception e) { + log.error( + "ERROR: Failed to submit a delete by query task. See stacktrace for a more detailed error:", + e); + MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_SUBMIT_DELETE_EXCEPTION_METRIC, e); } + return Optional.empty(); + } + + private BulkProcessor toBulkProcessor() { + return BulkProcessor.builder( + (request, bulkListener) -> { + try { + BulkResponse response = searchClient.bulk(request, RequestOptions.DEFAULT); + bulkListener.onResponse(response); + } catch (IOException e) { + bulkListener.onFailure(e); + throw new RuntimeException(e); + } + }, + BulkListener.getInstance(writeRequestRefreshPolicy)) + .setBulkActions(bulkRequestsLimit) + .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) + // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other + // retry methods) + .setBackoffPolicy( + BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) + .build(); + } + + private BulkProcessor toAsyncBulkProcessor() { + return BulkProcessor.builder( + (request, bulkListener) -> { + searchClient.bulkAsync(request, RequestOptions.DEFAULT, bulkListener); + }, + BulkListener.getInstance(writeRequestRefreshPolicy)) + .setBulkActions(bulkRequestsLimit) + .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) + // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other + // retry methods) + .setBackoffPolicy( + BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) + .build(); + } + + @Override + public void close() throws IOException { + bulkProcessor.close(); + } + + public void flush() { + bulkProcessor.flush(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java index edcdf5654028c..306352523118b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java @@ -16,7 +16,6 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.script.Script; - @Slf4j @RequiredArgsConstructor public class ESWriteDAO { @@ -34,10 +33,11 @@ public class ESWriteDAO { * @param document the document to update / insert * @param docId the ID of the document */ - public void upsertDocument(@Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { + public void upsertDocument( + @Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); - final UpdateRequest updateRequest = new UpdateRequest( - indexName, docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -57,12 +57,12 @@ public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { bulkProcessor.add(new DeleteRequest(indexName).id(docId)); } - /** - * Applies a script to a particular document - */ - public void applyScriptUpdate(@Nonnull String entityName, @Nonnull String docId, @Nonnull String script) { + /** Applies a script to a particular document */ + public void applyScriptUpdate( + @Nonnull String entityName, @Nonnull String docId, @Nonnull String script) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); - UpdateRequest updateRequest = new UpdateRequest(indexName, docId) + UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .scriptedUpsert(true) .retryOnConflict(numRetries) @@ -70,9 +70,7 @@ public void applyScriptUpdate(@Nonnull String entityName, @Nonnull String docId, bulkProcessor.add(updateRequest); } - /** - * Clear all documents in all the indices - */ + /** Clear all documents in all the indices */ public void clear() { String[] indices = getIndices(indexConvention.getAllEntityIndicesPattern()); bulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), indices); @@ -80,11 +78,12 @@ public void clear() { private String[] getIndices(String pattern) { try { - GetIndexResponse response = searchClient.indices().get(new GetIndexRequest(pattern), RequestOptions.DEFAULT); + GetIndexResponse response = + searchClient.indices().get(new GetIndexRequest(pattern), RequestOptions.DEFAULT); return response.getIndices(); } catch (IOException e) { log.error("Failed to get indices using pattern {}", pattern); - return new String[]{}; + return new String[] {}; } } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java b/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java index fb146a9f4d8cc..d0bcec9b4ef40 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java @@ -3,13 +3,8 @@ import com.linkedin.metadata.search.SearchEntity; import java.util.List; - -/** - * Interface for extractors that extract Features for each entity returned by search - */ +/** Interface for extractors that extract Features for each entity returned by search */ public interface FeatureExtractor { - /** - * Return the extracted features for each entity returned by search - */ + /** Return the extracted features for each entity returned by search */ List<Features> extractFeatures(List<SearchEntity> entities); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java index f1250ecd61021..2a9571b18b726 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java @@ -9,7 +9,6 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - @Slf4j @Value public class Features { @@ -54,11 +53,15 @@ public static Features merge(@Nonnull Features features1, @Nonnull Features feat } @Nonnull - public static List<Features> merge(@Nonnull List<Features> featureList1, @Nonnull List<Features> featureList2) { + public static List<Features> merge( + @Nonnull List<Features> featureList1, @Nonnull List<Features> featureList2) { if (featureList1.size() != featureList2.size()) { - throw new IllegalArgumentException(String.format("Expected both lists to have the same number of elements. %s != %s", + throw new IllegalArgumentException( + String.format( + "Expected both lists to have the same number of elements. %s != %s", featureList1.size(), featureList2.size())); } - return Streams.zip(featureList1.stream(), featureList2.stream(), Features::merge).collect(Collectors.toList()); + return Streams.zip(featureList1.stream(), featureList2.stream(), Features::merge) + .collect(Collectors.toList()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java index 4983cae3ddc27..555e90e189bc6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java @@ -13,10 +13,7 @@ import lombok.SneakyThrows; import lombok.Value; - -/** - * In memory ranker that re-ranks results returned by the search backend - */ +/** In memory ranker that re-ranks results returned by the search backend */ public abstract class SearchRanker<U extends Comparable<? super U>> { /** @@ -25,18 +22,19 @@ public abstract class SearchRanker<U extends Comparable<? super U>> { public abstract List<FeatureExtractor> getFeatureExtractors(); /** - * Return a comparable score for each entity returned by search backend. The ranker will rank based on this score + * Return a comparable score for each entity returned by search backend. The ranker will rank + * based on this score */ public abstract U score(SearchEntity searchEntity); - /** - * Rank the input list of entities - */ + /** Rank the input list of entities */ public List<SearchEntity> rank(List<SearchEntity> originalList) { List<SearchEntity> entitiesToRank = originalList; if (!getFeatureExtractors().isEmpty()) { - entitiesToRank = Streams.zip(originalList.stream(), fetchFeatures(originalList).stream(), this::updateFeatures) - .collect(Collectors.toList()); + entitiesToRank = + Streams.zip( + originalList.stream(), fetchFeatures(originalList).stream(), this::updateFeatures) + .collect(Collectors.toList()); } return entitiesToRank.stream() .map(entity -> new ScoredEntity<>(entity, score(entity))) @@ -45,26 +43,30 @@ public List<SearchEntity> rank(List<SearchEntity> originalList) { .collect(Collectors.toList()); } - /** - * Fetch features for each entity returned using the feature extractors - */ + /** Fetch features for each entity returned using the feature extractors */ private List<Features> fetchFeatures(List<SearchEntity> originalList) { List<Features> originalFeatures = - originalList.stream().map(SearchEntity::getFeatures).map(Features::from).collect(Collectors.toList()); - return ConcurrencyUtils.transformAndCollectAsync(getFeatureExtractors(), - extractor -> extractor.extractFeatures(originalList)).stream().reduce(originalFeatures, Features::merge); + originalList.stream() + .map(SearchEntity::getFeatures) + .map(Features::from) + .collect(Collectors.toList()); + return ConcurrencyUtils.transformAndCollectAsync( + getFeatureExtractors(), extractor -> extractor.extractFeatures(originalList)) + .stream() + .reduce(originalFeatures, Features::merge); } - /** - * Add the extracted features into each search entity to return the features in the response - */ + /** Add the extracted features into each search entity to return the features in the response */ @SneakyThrows private SearchEntity updateFeatures(SearchEntity originalEntity, Features features) { - return originalEntity.clone() - .setFeatures(new DoubleMap(features.getNumericFeatures() - .entrySet() - .stream() - .collect(Collectors.toMap(entry -> entry.getKey().toString(), Map.Entry::getValue)))); + return originalEntity + .clone() + .setFeatures( + new DoubleMap( + features.getNumericFeatures().entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), Map.Entry::getValue)))); } @Value diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java index 7d009495262cf..c3ab1b49f0e07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java @@ -6,9 +6,9 @@ import java.util.List; import java.util.Optional; - /** - * Simple ranker that diversifies the results between different entities. For the same entity, returns the same order from elasticsearch + * Simple ranker that diversifies the results between different entities. For the same entity, + * returns the same order from elasticsearch */ public class SimpleRanker extends SearchRanker<Double> { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java index 49809cf933936..bfeb993390571 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java @@ -14,20 +14,16 @@ import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation.FieldType; import com.linkedin.metadata.models.extractor.FieldExtractor; - import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - - /** * Class that provides a utility function that transforms the snapshot object into a search document */ @@ -47,24 +43,33 @@ public class SearchDocumentTransformer { private SystemEntityClient entityClient; - private static final String BROWSE_PATH_V2_DELIMITER = "␟"; + private static final String BROWSE_PATH_V2_DELIMITER = "␟"; - public Optional<String> transformSnapshot(final RecordTemplate snapshot, final EntitySpec entitySpec, - final Boolean forDelete) { + public Optional<String> transformSnapshot( + final RecordTemplate snapshot, final EntitySpec entitySpec, final Boolean forDelete) { final Map<SearchableFieldSpec, List<Object>> extractedSearchableFields = - FieldExtractor.extractFieldsFromSnapshot(snapshot, entitySpec, AspectSpec::getSearchableFieldSpecs, maxValueLength).entrySet() - // Delete expects urn to be preserved - .stream().filter(entry -> !forDelete || !"urn".equals(entry.getKey().getSearchableAnnotation().getFieldName())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + FieldExtractor.extractFieldsFromSnapshot( + snapshot, entitySpec, AspectSpec::getSearchableFieldSpecs, maxValueLength) + .entrySet() + // Delete expects urn to be preserved + .stream() + .filter( + entry -> + !forDelete + || !"urn".equals(entry.getKey().getSearchableAnnotation().getFieldName())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); final Map<SearchScoreFieldSpec, List<Object>> extractedSearchScoreFields = - FieldExtractor.extractFieldsFromSnapshot(snapshot, entitySpec, AspectSpec::getSearchScoreFieldSpecs, maxValueLength); + FieldExtractor.extractFieldsFromSnapshot( + snapshot, entitySpec, AspectSpec::getSearchScoreFieldSpecs, maxValueLength); if (extractedSearchableFields.isEmpty() && extractedSearchScoreFields.isEmpty()) { return Optional.empty(); } final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); searchDocument.put("urn", snapshot.data().get("urn").toString()); - extractedSearchableFields.forEach((key, value) -> setSearchableValue(key, value, searchDocument, forDelete)); - extractedSearchScoreFields.forEach((key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); + extractedSearchableFields.forEach( + (key, value) -> setSearchableValue(key, value, searchDocument, forDelete)); + extractedSearchScoreFields.forEach( + (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); return Optional.of(searchDocument.toString()); } @@ -83,51 +88,71 @@ public Optional<String> transformAspect( if (!extractedSearchableFields.isEmpty() || !extractedSearchScoreFields.isEmpty()) { final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); searchDocument.put("urn", urn.toString()); - extractedSearchableFields.forEach((key, values) -> setSearchableValue(key, values, searchDocument, forDelete)); - extractedSearchScoreFields.forEach((key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); + extractedSearchableFields.forEach( + (key, values) -> setSearchableValue(key, values, searchDocument, forDelete)); + extractedSearchScoreFields.forEach( + (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); result = Optional.of(searchDocument.toString()); } return result; } - public void setSearchableValue(final SearchableFieldSpec fieldSpec, final List<Object> fieldValues, - final ObjectNode searchDocument, final Boolean forDelete) { + public void setSearchableValue( + final SearchableFieldSpec fieldSpec, + final List<Object> fieldValues, + final ObjectNode searchDocument, + final Boolean forDelete) { DataSchema.Type valueType = fieldSpec.getPegasusSchema().getType(); Optional<Object> firstValue = fieldValues.stream().findFirst(); boolean isArray = fieldSpec.isArray(); // Set hasValues field if exists - fieldSpec.getSearchableAnnotation().getHasValuesFieldName().ifPresent(fieldName -> { - if (forDelete) { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(false)); - return; - } - if (valueType == DataSchema.Type.BOOLEAN) { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode((Boolean) firstValue.orElse(false))); - } else { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(!fieldValues.isEmpty())); - } - }); + fieldSpec + .getSearchableAnnotation() + .getHasValuesFieldName() + .ifPresent( + fieldName -> { + if (forDelete) { + searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(false)); + return; + } + if (valueType == DataSchema.Type.BOOLEAN) { + searchDocument.set( + fieldName, + JsonNodeFactory.instance.booleanNode((Boolean) firstValue.orElse(false))); + } else { + searchDocument.set( + fieldName, JsonNodeFactory.instance.booleanNode(!fieldValues.isEmpty())); + } + }); // Set numValues field if exists - fieldSpec.getSearchableAnnotation().getNumValuesFieldName().ifPresent(fieldName -> { - if (forDelete) { - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) 0)); - return; - } - switch (valueType) { - case INT: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) firstValue.orElse(0))); - break; - case LONG: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Long) firstValue.orElse(0L))); - break; - default: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode(fieldValues.size())); - break; - } - }); + fieldSpec + .getSearchableAnnotation() + .getNumValuesFieldName() + .ifPresent( + fieldName -> { + if (forDelete) { + searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) 0)); + return; + } + switch (valueType) { + case INT: + searchDocument.set( + fieldName, + JsonNodeFactory.instance.numberNode((Integer) firstValue.orElse(0))); + break; + case LONG: + searchDocument.set( + fieldName, JsonNodeFactory.instance.numberNode((Long) firstValue.orElse(0L))); + break; + default: + searchDocument.set( + fieldName, JsonNodeFactory.instance.numberNode(fieldValues.size())); + break; + } + }); final String fieldName = fieldSpec.getSearchableAnnotation().getFieldName(); final FieldType fieldType = fieldSpec.getSearchableAnnotation().getFieldType(); @@ -143,26 +168,35 @@ public void setSearchableValue(final SearchableFieldSpec fieldSpec, final List<O searchDocument.set(fieldName, JsonNodeFactory.instance.textNode(browsePathV2Value)); } else { ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(); - fieldValues.subList(0, Math.min(fieldValues.size(), maxArrayLength)) - .forEach(value -> getNodeForValue(valueType, value, fieldType).ifPresent(arrayNode::add)); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxArrayLength)) + .forEach( + value -> getNodeForValue(valueType, value, fieldType).ifPresent(arrayNode::add)); searchDocument.set(fieldName, arrayNode); } } else if (valueType == DataSchema.Type.MAP) { ObjectNode dictDoc = JsonNodeFactory.instance.objectNode(); - fieldValues.subList(0, Math.min(fieldValues.size(), maxObjectKeys)).forEach(fieldValue -> { - String[] keyValues = fieldValue.toString().split("="); - String key = keyValues[0]; - String value = keyValues[1]; - dictDoc.put(key, value); - }); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxObjectKeys)) + .forEach( + fieldValue -> { + String[] keyValues = fieldValue.toString().split("="); + String key = keyValues[0]; + String value = keyValues[1]; + dictDoc.put(key, value); + }); searchDocument.set(fieldName, dictDoc); } else if (!fieldValues.isEmpty()) { - getNodeForValue(valueType, fieldValues.get(0), fieldType).ifPresent(node -> searchDocument.set(fieldName, node)); + getNodeForValue(valueType, fieldValues.get(0), fieldType) + .ifPresent(node -> searchDocument.set(fieldName, node)); } } - public void setSearchScoreValue(final SearchScoreFieldSpec fieldSpec, final List<Object> fieldValues, - final ObjectNode searchDocument, final Boolean forDelete) { + public void setSearchScoreValue( + final SearchScoreFieldSpec fieldSpec, + final List<Object> fieldValues, + final ObjectNode searchDocument, + final Boolean forDelete) { DataSchema.Type valueType = fieldSpec.getPegasusSchema().getType(); final String fieldName = fieldSpec.getSearchScoreAnnotation().getFieldName(); @@ -193,12 +227,14 @@ public void setSearchScoreValue(final SearchScoreFieldSpec fieldSpec, final List default: // Only the above types are supported throw new IllegalArgumentException( - String.format("SearchScore fields must be a numeric type: field %s, value %s", fieldName, fieldValue)); + String.format( + "SearchScore fields must be a numeric type: field %s, value %s", + fieldName, fieldValue)); } } - private Optional<JsonNode> getNodeForValue(final DataSchema.Type schemaFieldType, final Object fieldValue, - final FieldType fieldType) { + private Optional<JsonNode> getNodeForValue( + final DataSchema.Type schemaFieldType, final Object fieldValue, final FieldType fieldType) { switch (schemaFieldType) { case BOOLEAN: return Optional.of(JsonNodeFactory.instance.booleanNode((Boolean) fieldValue)); @@ -206,30 +242,34 @@ private Optional<JsonNode> getNodeForValue(final DataSchema.Type schemaFieldType return Optional.of(JsonNodeFactory.instance.numberNode((Integer) fieldValue)); case LONG: return Optional.of(JsonNodeFactory.instance.numberNode((Long) fieldValue)); - // By default run toString + // By default run toString default: String value = fieldValue.toString(); // If index type is BROWSE_PATH, make sure the value starts with a slash if (fieldType == FieldType.BROWSE_PATH && !value.startsWith("/")) { value = "/" + value; } - return value.isEmpty() ? Optional.empty() + return value.isEmpty() + ? Optional.empty() : Optional.of(JsonNodeFactory.instance.textNode(fieldValue.toString())); } } /** - * The browsePathsV2 aspect is a list of objects and the @Searchable annotation specifies a - * list of strings that we receive. However, we want to aggregate those strings and store - * as a single string in ElasticSearch so we can do prefix matching against it. + * The browsePathsV2 aspect is a list of objects and the @Searchable annotation specifies a list + * of strings that we receive. However, we want to aggregate those strings and store as a single + * string in ElasticSearch so we can do prefix matching against it. */ private String getBrowsePathV2Value(@Nonnull final List<Object> fieldValues) { List<String> stringValues = new ArrayList<>(); - fieldValues.subList(0, Math.min(fieldValues.size(), maxArrayLength)).forEach(value -> { - if (value instanceof String) { - stringValues.add((String) value); - } - }); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxArrayLength)) + .forEach( + value -> { + if (value instanceof String) { + stringValues.add((String) value); + } + }); String aggregatedValue = String.join(BROWSE_PATH_V2_DELIMITER, stringValues); // ensure browse path v2 starts with our delimiter if it's not empty if (!aggregatedValue.equals("") && !aggregatedValue.startsWith(BROWSE_PATH_V2_DELIMITER)) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java index a4b59c30607a3..af0f537de8629 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java @@ -23,39 +23,59 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class BrowsePathUtils { public static String getDefaultBrowsePath( @Nonnull Urn urn, @Nonnull EntityRegistry entityRegistry, - @Nonnull Character dataPlatformDelimiter) throws URISyntaxException { + @Nonnull Character dataPlatformDelimiter) + throws URISyntaxException { switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataPlatformKey dpKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKey( - dsKey.getPlatform(), - getKeyAspectSpec(dsKey.getPlatform().getEntityType(), - entityRegistry)); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataPlatformKey dpKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKey( + dsKey.getPlatform(), + getKeyAspectSpec(dsKey.getPlatform().getEntityType(), entityRegistry)); String datasetNamePath = getDatasetPath(dsKey.getName(), dataPlatformDelimiter); - return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + datasetNamePath).toLowerCase(); + return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + datasetNamePath) + .toLowerCase(); case Constants.CHART_ENTITY_NAME: - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return ("/" + chartKey.getDashboardTool()); case Constants.DASHBOARD_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return ("/" + dashboardKey.getDashboardTool()).toLowerCase(); case Constants.DATA_FLOW_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster()) - .toLowerCase(); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster()).toLowerCase(); case Constants.DATA_JOB_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getCluster()).toLowerCase(); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getCluster()) + .toLowerCase(); default: return ""; } @@ -65,60 +85,130 @@ public static String getDefaultBrowsePath( public static Urn buildDataPlatformUrn(Urn urn, EntityRegistry entityRegistry) { switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return dsKey.getPlatform(); case Constants.CHART_ENTITY_NAME: - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, chartKey.getDashboardTool())); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, chartKey.getDashboardTool())); case Constants.DASHBOARD_ENTITY_NAME: - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, dashboardKey.getDashboardTool())); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, dashboardKey.getDashboardTool())); case Constants.DATA_FLOW_ENTITY_NAME: - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, dataFlowKey.getOrchestrator())); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, dataFlowKey.getOrchestrator())); case Constants.DATA_JOB_ENTITY_NAME: - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, parentFlowKey.getOrchestrator())); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, parentFlowKey.getOrchestrator())); case Constants.NOTEBOOK_ENTITY_NAME: - NotebookKey notebookKey = (NotebookKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, notebookKey.getNotebookTool())); + NotebookKey notebookKey = + (NotebookKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, notebookKey.getNotebookTool())); default: // Could not resolve a data platform return null; } } - public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRegistry) throws URISyntaxException { + public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRegistry) + throws URISyntaxException { switch (urn.getEntityType()) { case "dataset": - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataPlatformKey dpKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKey( - dsKey.getPlatform(), - getKeyAspectSpec(dsKey.getPlatform().getEntityType(), - entityRegistry)); - return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + "/" - + dsKey.getName()).replace('.', '/').toLowerCase(); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataPlatformKey dpKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKey( + dsKey.getPlatform(), + getKeyAspectSpec(dsKey.getPlatform().getEntityType(), entityRegistry)); + return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + "/" + dsKey.getName()) + .replace('.', '/') + .toLowerCase(); case "chart": - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + chartKey.getDashboardTool() + "/" + chartKey.getChartId()).toLowerCase(); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + chartKey.getDashboardTool() + "/" + chartKey.getChartId()).toLowerCase(); case "dashboard": - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dashboardKey.getDashboardTool() + "/" + dashboardKey.getDashboardId()).toLowerCase(); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + dashboardKey.getDashboardTool() + "/" + dashboardKey.getDashboardId()) + .toLowerCase(); case "dataFlow": - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster() + "/" + dataFlowKey.getFlowId()) + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + + dataFlowKey.getOrchestrator() + + "/" + + dataFlowKey.getCluster() + + "/" + + dataFlowKey.getFlowId()) .toLowerCase(); case "dataJob": - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getFlowId() + "/" - + dataJobKey.getJobId()).toLowerCase(); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return ("/" + + parentFlowKey.getOrchestrator() + + "/" + + parentFlowKey.getFlowId() + + "/" + + dataJobKey.getJobId()) + .toLowerCase(); case "glossaryTerm": // TODO: Is this the best way to represent glossary term key? - GlossaryTermKey glossaryTermKey = (GlossaryTermKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + GlossaryTermKey glossaryTermKey = + (GlossaryTermKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return "/" + glossaryTermKey.getName().replace('.', '/').toLowerCase(); default: return ""; @@ -126,26 +216,28 @@ public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRe } /** - * Attempts to convert a dataset name into a proper browse path by splitting it using the Data Platform delimiter. - * If there are not > 1 name parts, then an empty string will be returned. + * Attempts to convert a dataset name into a proper browse path by splitting it using the Data + * Platform delimiter. If there are not > 1 name parts, then an empty string will be returned. */ - private static String getDatasetPath(@Nonnull final String datasetName, @Nonnull final Character delimiter) { + private static String getDatasetPath( + @Nonnull final String datasetName, @Nonnull final Character delimiter) { if (datasetName.contains(delimiter.toString())) { - final List<String> datasetNamePathParts = Arrays.asList(datasetName.split(Pattern.quote(delimiter.toString()))); + final List<String> datasetNamePathParts = + Arrays.asList(datasetName.split(Pattern.quote(delimiter.toString()))); System.out.println(datasetNamePathParts); // Omit the name from the path. - final String datasetPath = String.join("/", datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1)); + final String datasetPath = + String.join("/", datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1)); return datasetPath.startsWith("/") ? datasetPath : String.format("/%s", datasetPath); } return ""; } protected static AspectSpec getKeyAspectSpec( - final String entityName, - final EntityRegistry registry) { + final String entityName, final EntityRegistry registry) { final EntitySpec spec = registry.getEntitySpec(entityName); return spec.getKeyAspectSpec(); } - private BrowsePathUtils() { } -} \ No newline at end of file + private BrowsePathUtils() {} +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java index a7f5ea7a51e29..961167663e11f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.linkedin.common.BrowsePathEntry; import com.linkedin.common.BrowsePathEntryArray; import com.linkedin.common.BrowsePathsV2; @@ -14,10 +16,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; @@ -25,52 +23,63 @@ import java.util.List; import java.util.regex.Pattern; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class BrowsePathV2Utils { - final private static String DEFAULT_FOLDER_NAME = "Default"; + private static final String DEFAULT_FOLDER_NAME = "Default"; /** * Generates a default browsePathsV2 aspect for a given urn. * - * If the entity has containers, get its whole container path and set those urns in the path of browsePathsV2. - * If it's a dataset, generate the path from the dataset name like we do for default browsePaths V1. - * If it's a data job, set its parent data flow in the path. - * For everything else, place it in a "Default" folder so we can still navigate to it through browse in the UI. - * This default method should be unneeded once ingestion produces higher quality browsePathsV2 aspects. + * <p>If the entity has containers, get its whole container path and set those urns in the path of + * browsePathsV2. If it's a dataset, generate the path from the dataset name like we do for + * default browsePaths V1. If it's a data job, set its parent data flow in the path. For + * everything else, place it in a "Default" folder so we can still navigate to it through browse + * in the UI. This default method should be unneeded once ingestion produces higher quality + * browsePathsV2 aspects. */ public static BrowsePathsV2 getDefaultBrowsePathV2( @Nonnull Urn urn, @Nonnull EntityRegistry entityRegistry, @Nonnull Character dataPlatformDelimiter, @Nonnull EntityService entityService, - boolean useContainerPaths) throws URISyntaxException { + boolean useContainerPaths) + throws URISyntaxException { BrowsePathsV2 result = new BrowsePathsV2(); BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - BrowsePathEntryArray datasetContainerPathEntries = useContainerPaths ? getContainerPathEntries(urn, entityService) : null; + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + BrowsePathEntryArray datasetContainerPathEntries = + useContainerPaths ? getContainerPathEntries(urn, entityService) : null; if (useContainerPaths && datasetContainerPathEntries.size() > 0) { browsePathEntries.addAll(datasetContainerPathEntries); } else { - BrowsePathEntryArray defaultDatasetPathEntries = getDefaultDatasetPathEntries(dsKey.getName(), dataPlatformDelimiter); + BrowsePathEntryArray defaultDatasetPathEntries = + getDefaultDatasetPathEntries(dsKey.getName(), dataPlatformDelimiter); if (defaultDatasetPathEntries.size() > 0) { - browsePathEntries.addAll(getDefaultDatasetPathEntries(dsKey.getName().toLowerCase(), dataPlatformDelimiter)); + browsePathEntries.addAll( + getDefaultDatasetPathEntries(dsKey.getName().toLowerCase(), dataPlatformDelimiter)); } else { browsePathEntries.add(createBrowsePathEntry(DEFAULT_FOLDER_NAME, null)); } } break; - // Some sources produce charts and dashboards with containers. If we have containers, use them, otherwise use default folder + // Some sources produce charts and dashboards with containers. If we have containers, use + // them, otherwise use default folder case Constants.CHART_ENTITY_NAME: case Constants.DASHBOARD_ENTITY_NAME: - BrowsePathEntryArray containerPathEntries = useContainerPaths ? getContainerPathEntries(urn, entityService) : null; + BrowsePathEntryArray containerPathEntries = + useContainerPaths ? getContainerPathEntries(urn, entityService) : null; if (useContainerPaths && containerPathEntries.size() > 0) { browsePathEntries.addAll(containerPathEntries); } else { @@ -78,8 +87,12 @@ public static BrowsePathsV2 getDefaultBrowsePathV2( } break; case Constants.DATA_JOB_ENTITY_NAME: - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - browsePathEntries.add(createBrowsePathEntry(dataJobKey.getFlow().toString(), dataJobKey.getFlow())); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + browsePathEntries.add( + createBrowsePathEntry(dataJobKey.getFlow().toString(), dataJobKey.getFlow())); break; default: browsePathEntries.add(createBrowsePathEntry(DEFAULT_FOLDER_NAME, null)); @@ -99,15 +112,15 @@ private static BrowsePathEntry createBrowsePathEntry(@Nonnull String id, @Nullab return pathEntry; } - private static void aggregateParentContainers(List<Urn> containerUrns, Urn entityUrn, EntityService entityService) { + private static void aggregateParentContainers( + List<Urn> containerUrns, Urn entityUrn, EntityService entityService) { try { - EntityResponse entityResponse = entityService.getEntityV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(CONTAINER_ASPECT_NAME) - ); + EntityResponse entityResponse = + entityService.getEntityV2( + entityUrn.getEntityType(), entityUrn, Collections.singleton(CONTAINER_ASPECT_NAME)); - if (entityResponse != null && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { + if (entityResponse != null + && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { DataMap dataMap = entityResponse.getAspects().get(CONTAINER_ASPECT_NAME).getValue().data(); com.linkedin.container.Container container = new com.linkedin.container.Container(dataMap); Urn containerUrn = container.getContainer(); @@ -116,50 +129,58 @@ private static void aggregateParentContainers(List<Urn> containerUrns, Urn entit aggregateParentContainers(containerUrns, containerUrn, entityService); } } catch (Exception e) { - log.error(String.format("Error getting containers for entity with urn %s while adding default browsePathV2", entityUrn), e); + log.error( + String.format( + "Error getting containers for entity with urn %s while adding default browsePathV2", + entityUrn), + e); } } /** - * Gets the path of containers for a given entity to create a browsePathV2 off of. - * Recursively call aggregateParentContainers to get the full container path to be included in this path. + * Gets the path of containers for a given entity to create a browsePathV2 off of. Recursively + * call aggregateParentContainers to get the full container path to be included in this path. */ - private static BrowsePathEntryArray getContainerPathEntries(@Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { + private static BrowsePathEntryArray getContainerPathEntries( + @Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); final List<Urn> containerUrns = new ArrayList<>(); aggregateParentContainers(containerUrns, entityUrn, entityService); - containerUrns.forEach(urn -> { - browsePathEntries.add(createBrowsePathEntry(urn.toString(), urn)); - }); + containerUrns.forEach( + urn -> { + browsePathEntries.add(createBrowsePathEntry(urn.toString(), urn)); + }); return browsePathEntries; } /** - * Attempts to convert a dataset name into a proper browse path by splitting it using the Data Platform delimiter. - * If there are not > 1 name parts, then an empty string will be returned. + * Attempts to convert a dataset name into a proper browse path by splitting it using the Data + * Platform delimiter. If there are not > 1 name parts, then an empty string will be returned. */ - private static BrowsePathEntryArray getDefaultDatasetPathEntries(@Nonnull final String datasetName, @Nonnull final Character delimiter) { + private static BrowsePathEntryArray getDefaultDatasetPathEntries( + @Nonnull final String datasetName, @Nonnull final Character delimiter) { BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); if (datasetName.contains(delimiter.toString())) { - final List<String> datasetNamePathParts = Arrays.stream(datasetName.split(Pattern.quote(delimiter.toString()))) + final List<String> datasetNamePathParts = + Arrays.stream(datasetName.split(Pattern.quote(delimiter.toString()))) .filter((name) -> !name.isEmpty()) .collect(Collectors.toList()); // Omit the name from the path. - datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1).forEach((part -> { - browsePathEntries.add(createBrowsePathEntry(part, null)); - })); + datasetNamePathParts + .subList(0, datasetNamePathParts.size() - 1) + .forEach( + (part -> { + browsePathEntries.add(createBrowsePathEntry(part, null)); + })); } return browsePathEntries; } protected static AspectSpec getKeyAspectSpec( - final String entityName, - final EntityRegistry registry) { + final String entityName, final EntityRegistry registry) { final EntitySpec spec = registry.getEntitySpec(entityName); return spec.getKeyAspectSpec(); } - private BrowsePathV2Utils() { - - } + private BrowsePathV2Utils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 53765acb8e29e..982b5c8d5f367 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; +import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.EntitySpec; @@ -19,8 +23,8 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RequestOptions; import org.apache.commons.lang.StringUtils; +import org.opensearch.client.RequestOptions; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -35,14 +39,7 @@ import org.opensearch.search.suggest.SuggestionBuilder; import org.opensearch.search.suggest.term.TermSuggestionBuilder; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; -import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; - - -/** - * TODO: Add more robust unit tests for this critical class. - */ +/** TODO: Add more robust unit tests for this critical class. */ @Slf4j public class ESUtils { @@ -64,33 +61,43 @@ public class ESUtils { public static final String TOKEN_COUNT_FIELD_TYPE = "token_count"; // End of field types - public static final Set<SearchableAnnotation.FieldType> FIELD_TYPES_STORED_AS_KEYWORD = Set.of( - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.WORD_GRAM); - public static final Set<SearchableAnnotation.FieldType> FIELD_TYPES_STORED_AS_TEXT = Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH, - SearchableAnnotation.FieldType.BROWSE_PATH_V2, - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.URN_PARTIAL); + public static final Set<SearchableAnnotation.FieldType> FIELD_TYPES_STORED_AS_KEYWORD = + Set.of( + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.WORD_GRAM); + public static final Set<SearchableAnnotation.FieldType> FIELD_TYPES_STORED_AS_TEXT = + Set.of( + SearchableAnnotation.FieldType.BROWSE_PATH, + SearchableAnnotation.FieldType.BROWSE_PATH_V2, + SearchableAnnotation.FieldType.URN, + SearchableAnnotation.FieldType.URN_PARTIAL); public static final String ENTITY_NAME_FIELD = "_entityName"; public static final String NAME_SUGGESTION = "nameSuggestion"; - // we use this to make sure we filter for editable & non-editable fields. Also expands out top-level properties + // we use this to make sure we filter for editable & non-editable fields. Also expands out + // top-level properties // to field level properties - public static final Map<String, List<String>> FIELDS_TO_EXPANDED_FIELDS_LIST = new HashMap<String, List<String>>() {{ - put("tags", ImmutableList.of("tags", "fieldTags", "editedFieldTags")); - put("glossaryTerms", ImmutableList.of("glossaryTerms", "fieldGlossaryTerms", "editedFieldGlossaryTerms")); - put("fieldTags", ImmutableList.of("fieldTags", "editedFieldTags")); - put("fieldGlossaryTerms", ImmutableList.of("fieldGlossaryTerms", "editedFieldGlossaryTerms")); - put("fieldDescriptions", ImmutableList.of("fieldDescriptions", "editedFieldDescriptions")); - put("description", ImmutableList.of("description", "editedDescription")); - }}; - - public static final Set<String> BOOLEAN_FIELDS = ImmutableSet.of( - "removed" - ); + public static final Map<String, List<String>> FIELDS_TO_EXPANDED_FIELDS_LIST = + new HashMap<String, List<String>>() { + { + put("tags", ImmutableList.of("tags", "fieldTags", "editedFieldTags")); + put( + "glossaryTerms", + ImmutableList.of("glossaryTerms", "fieldGlossaryTerms", "editedFieldGlossaryTerms")); + put("fieldTags", ImmutableList.of("fieldTags", "editedFieldTags")); + put( + "fieldGlossaryTerms", + ImmutableList.of("fieldGlossaryTerms", "editedFieldGlossaryTerms")); + put( + "fieldDescriptions", + ImmutableList.of("fieldDescriptions", "editedFieldDescriptions")); + put("description", ImmutableList.of("description", "editedDescription")); + } + }; + + public static final Set<String> BOOLEAN_FIELDS = ImmutableSet.of("removed"); /* * Refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/regexp-syntax.html for list of reserved @@ -98,17 +105,17 @@ public class ESUtils { */ private static final String ELASTICSEARCH_REGEXP_RESERVED_CHARACTERS = "?+*|{}[]()#@&<>~"; - private ESUtils() { - - } + private ESUtils() {} /** * Constructs the filter query given filter map. * - * <p>Multiple values can be selected for a filter, and it is currently modeled as string separated by comma + * <p>Multiple values can be selected for a filter, and it is currently modeled as string + * separated by comma * * @param filter the search filter - * @param isTimeseries whether filtering on timeseries index which has differing field type conventions + * @param isTimeseries whether filtering on timeseries index which has differing field type + * conventions * @return built filter query */ @Nonnull @@ -119,65 +126,82 @@ public static BoolQueryBuilder buildFilterQuery(@Nullable Filter filter, boolean } if (filter.getOr() != null) { // If caller is using the new Filters API, build boolean query from that. - filter.getOr().forEach(or -> finalQueryBuilder.should(ESUtils.buildConjunctiveFilterQuery(or, isTimeseries))); + filter + .getOr() + .forEach( + or -> + finalQueryBuilder.should(ESUtils.buildConjunctiveFilterQuery(or, isTimeseries))); } else if (filter.getCriteria() != null) { // Otherwise, build boolean query from the deprecated "criteria" field. log.warn("Received query Filter with a deprecated field 'criteria'. Use 'or' instead."); final BoolQueryBuilder andQueryBuilder = new BoolQueryBuilder(); - filter.getCriteria().forEach(criterion -> { - if (!criterion.getValue().trim().isEmpty() || criterion.hasValues() - || criterion.getCondition() == Condition.IS_NULL) { - andQueryBuilder.must(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } - }); + filter + .getCriteria() + .forEach( + criterion -> { + if (!criterion.getValue().trim().isEmpty() + || criterion.hasValues() + || criterion.getCondition() == Condition.IS_NULL) { + andQueryBuilder.must(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } + }); finalQueryBuilder.should(andQueryBuilder); } return finalQueryBuilder; } @Nonnull - public static BoolQueryBuilder buildConjunctiveFilterQuery(@Nonnull ConjunctiveCriterion conjunctiveCriterion, - boolean isTimeseries) { + public static BoolQueryBuilder buildConjunctiveFilterQuery( + @Nonnull ConjunctiveCriterion conjunctiveCriterion, boolean isTimeseries) { final BoolQueryBuilder andQueryBuilder = new BoolQueryBuilder(); - conjunctiveCriterion.getAnd().forEach(criterion -> { - if (Set.of(Condition.EXISTS, Condition.IS_NULL).contains(criterion.getCondition()) - || !criterion.getValue().trim().isEmpty() || criterion.hasValues()) { - if (!criterion.isNegated()) { - // `filter` instead of `must` (enables caching and bypasses scoring) - andQueryBuilder.filter(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } else { - andQueryBuilder.mustNot(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } - } - }); + conjunctiveCriterion + .getAnd() + .forEach( + criterion -> { + if (Set.of(Condition.EXISTS, Condition.IS_NULL).contains(criterion.getCondition()) + || !criterion.getValue().trim().isEmpty() + || criterion.hasValues()) { + if (!criterion.isNegated()) { + // `filter` instead of `must` (enables caching and bypasses scoring) + andQueryBuilder.filter(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } else { + andQueryBuilder.mustNot(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } + } + }); return andQueryBuilder; } /** - * Builds search query given a {@link Criterion}, containing field, value and association/condition between the two. + * Builds search query given a {@link Criterion}, containing field, value and + * association/condition between the two. * - * <p>If the condition between a field and value (specified in {@link Criterion}) is EQUAL, we construct a Terms query. - * In this case, a field can take multiple values, specified using comma as a delimiter - this method will split - * tokens accordingly. This is done because currently there is no support of associating two different {@link Criterion} - * in a {@link Filter} with an OR operator - default operator is AND. + * <p>If the condition between a field and value (specified in {@link Criterion}) is EQUAL, we + * construct a Terms query. In this case, a field can take multiple values, specified using comma + * as a delimiter - this method will split tokens accordingly. This is done because currently + * there is no support of associating two different {@link Criterion} in a {@link Filter} with an + * OR operator - default operator is AND. * - * <p>This approach of supporting multiple values using comma as delimiter, prevents us from specifying a value that has comma - * as one of it's characters. This is particularly true when one of the values is an urn e.g. "urn:li:example:(1,2,3)". - * Hence we do not split the value (using comma as delimiter) if the value starts with "urn:li:". - * TODO(https://github.com/datahub-project/datahub-gma/issues/51): support multiple values a field can take without using - * delimiters like comma. + * <p>This approach of supporting multiple values using comma as delimiter, prevents us from + * specifying a value that has comma as one of it's characters. This is particularly true when one + * of the values is an urn e.g. "urn:li:example:(1,2,3)". Hence we do not split the value (using + * comma as delimiter) if the value starts with "urn:li:". + * TODO(https://github.com/datahub-project/datahub-gma/issues/51): support multiple values a field + * can take without using delimiters like comma. * - * <p>If the condition between a field and value is not the same as EQUAL, a Range query is constructed. This - * condition does not support multiple values for the same field. + * <p>If the condition between a field and value is not the same as EQUAL, a Range query is + * constructed. This condition does not support multiple values for the same field. * - * <p>When CONTAIN, START_WITH and END_WITH conditions are used, the underlying logic is using wildcard query which is - * not performant according to ES. For details, please refer to: + * <p>When CONTAIN, START_WITH and END_WITH conditions are used, the underlying logic is using + * wildcard query which is not performant according to ES. For details, please refer to: * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-wildcard-query.html#wildcard-query-field-params * - * @param criterion {@link Criterion} single criterion which contains field, value and a comparison operator + * @param criterion {@link Criterion} single criterion which contains field, value and a + * comparison operator */ @Nonnull - public static QueryBuilder getQueryBuilderFromCriterion(@Nonnull final Criterion criterion, boolean isTimeseries) { + public static QueryBuilder getQueryBuilderFromCriterion( + @Nonnull final Criterion criterion, boolean isTimeseries) { final String fieldName = toFacetField(criterion.getField()); /* @@ -188,11 +212,12 @@ public static QueryBuilder getQueryBuilderFromCriterion(@Nonnull final Criterion * First we handle this expansion, if required, otherwise we build the filter as usual * without expansion. */ - final Optional<List<String>> maybeFieldToExpand = Optional.ofNullable(FIELDS_TO_EXPANDED_FIELDS_LIST.get( - fieldName)); + final Optional<List<String>> maybeFieldToExpand = + Optional.ofNullable(FIELDS_TO_EXPANDED_FIELDS_LIST.get(fieldName)); if (maybeFieldToExpand.isPresent()) { - return getQueryBuilderFromCriterionForFieldToExpand(maybeFieldToExpand.get(), criterion, isTimeseries); + return getQueryBuilderFromCriterionForFieldToExpand( + maybeFieldToExpand.get(), criterion, isTimeseries); } return getQueryBuilderFromCriterionForSingleField(criterion, isTimeseries); @@ -220,19 +245,21 @@ public static String getElasticTypeForFieldType(SearchableAnnotation.FieldType f /** * Populates source field of search query with the sort order as per the criterion provided. * - * <p> - * If no sort criterion is provided then the default sorting criterion is chosen which is descending order of score - * Furthermore to resolve conflicts, the results are further sorted by ascending order of urn - * If the input sort criterion is urn itself, then no additional sort criterion is applied as there will be no conflicts. - * When sorting, set the unmappedType param to arbitrary "keyword" so we essentially ignore sorting where indices do not - * have the field we are sorting on. - * </p> + * <p>If no sort criterion is provided then the default sorting criterion is chosen which is + * descending order of score Furthermore to resolve conflicts, the results are further sorted by + * ascending order of urn If the input sort criterion is urn itself, then no additional sort + * criterion is applied as there will be no conflicts. When sorting, set the unmappedType param to + * arbitrary "keyword" so we essentially ignore sorting where indices do not have the field we are + * sorting on. * - * @param searchSourceBuilder {@link SearchSourceBuilder} that needs to be populated with sort order + * @param searchSourceBuilder {@link SearchSourceBuilder} that needs to be populated with sort + * order * @param sortCriterion {@link SortCriterion} to be applied to the search results */ - public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuilder, - @Nullable SortCriterion sortCriterion, List<EntitySpec> entitySpecs) { + public static void buildSortOrder( + @Nonnull SearchSourceBuilder searchSourceBuilder, + @Nullable SortCriterion sortCriterion, + List<EntitySpec> entitySpecs) { if (sortCriterion == null) { searchSourceBuilder.sort(new ScoreSortBuilder().order(SortOrder.DESC)); } else { @@ -252,12 +279,17 @@ public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuild } } if (fieldTypeForDefault.isEmpty()) { - log.warn("Sort criterion field " + sortCriterion.getField() + " was not found in any entity spec to be searched"); + log.warn( + "Sort criterion field " + + sortCriterion.getField() + + " was not found in any entity spec to be searched"); } final SortOrder esSortOrder = - (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) ? SortOrder.ASC + (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + ? SortOrder.ASC : SortOrder.DESC; - FieldSortBuilder sortBuilder = new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); + FieldSortBuilder sortBuilder = + new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); if (fieldTypeForDefault.isPresent()) { String esFieldtype = getElasticTypeForFieldType(fieldTypeForDefault.get()); if (esFieldtype != null) { @@ -266,17 +298,22 @@ public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuild } searchSourceBuilder.sort(sortBuilder); } - if (sortCriterion == null || !sortCriterion.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)) { - searchSourceBuilder.sort(new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); + if (sortCriterion == null + || !sortCriterion.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)) { + searchSourceBuilder.sort( + new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); } } /** - * Populates source field of search query with the suggestions query so that we get search suggestions back. - * Right now we are only supporting suggestions based on the virtual _entityName field alias. + * Populates source field of search query with the suggestions query so that we get search + * suggestions back. Right now we are only supporting suggestions based on the virtual _entityName + * field alias. */ - public static void buildNameSuggestions(@Nonnull SearchSourceBuilder searchSourceBuilder, @Nullable String textInput) { - SuggestionBuilder<TermSuggestionBuilder> builder = SuggestBuilders.termSuggestion(ENTITY_NAME_FIELD).text(textInput); + public static void buildNameSuggestions( + @Nonnull SearchSourceBuilder searchSourceBuilder, @Nullable String textInput) { + SuggestionBuilder<TermSuggestionBuilder> builder = + SuggestBuilders.termSuggestion(ENTITY_NAME_FIELD).text(textInput); SuggestBuilder suggestBuilder = new SuggestBuilder(); suggestBuilder.addSuggestion(NAME_SUGGESTION, builder); searchSourceBuilder.suggest(suggestBuilder); @@ -302,34 +339,43 @@ public static String toFacetField(@Nonnull final String filterField) { } @Nonnull - public static String toKeywordField(@Nonnull final String filterField, @Nonnull final boolean skipKeywordSuffix) { + public static String toKeywordField( + @Nonnull final String filterField, @Nonnull final boolean skipKeywordSuffix) { return skipKeywordSuffix || KEYWORD_FIELDS.contains(filterField) || PATH_HIERARCHY_FIELDS.contains(filterField) - || filterField.contains(".") ? filterField : filterField + ESUtils.KEYWORD_SUFFIX; + || filterField.contains(".") + ? filterField + : filterField + ESUtils.KEYWORD_SUFFIX; } - public static RequestOptions buildReindexTaskRequestOptions(String version, String indexName, String tempIndexName) { + public static RequestOptions buildReindexTaskRequestOptions( + String version, String indexName, String tempIndexName) { return RequestOptions.DEFAULT.toBuilder() .addHeader(OPAQUE_ID_HEADER, getOpaqueIdHeaderValue(version, indexName, tempIndexName)) .build(); } - public static String getOpaqueIdHeaderValue(String version, String indexName, String tempIndexName) { + public static String getOpaqueIdHeaderValue( + String version, String indexName, String tempIndexName) { return String.join(HEADER_VALUE_DELIMITER, version, indexName, tempIndexName); } public static boolean prefixMatch(String id, String version, String indexName) { return Optional.ofNullable(id) - .map(t -> t.startsWith(String.join(HEADER_VALUE_DELIMITER, version, indexName))).orElse(false); + .map(t -> t.startsWith(String.join(HEADER_VALUE_DELIMITER, version, indexName))) + .orElse(false); } public static String extractTargetIndex(String id) { return id.split("[" + HEADER_VALUE_DELIMITER + "]", 3)[2]; } - public static void setSearchAfter(SearchSourceBuilder searchSourceBuilder, @Nullable Object[] sort, - @Nullable String pitId, @Nullable String keepAlive) { + public static void setSearchAfter( + SearchSourceBuilder searchSourceBuilder, + @Nullable Object[] sort, + @Nullable String pitId, + @Nullable String keepAlive) { if (sort != null && sort.length > 0) { searchSourceBuilder.searchAfter(sort); } @@ -357,41 +403,61 @@ private static QueryBuilder getQueryBuilderFromCriterionForFieldToExpand( criterionToQuery.setValue(criterion.getValue()); } criterionToQuery.setField(toKeywordField(field, isTimeseries)); - orQueryBuilder.should(getQueryBuilderFromCriterionForSingleField(criterionToQuery, isTimeseries)); + orQueryBuilder.should( + getQueryBuilderFromCriterionForSingleField(criterionToQuery, isTimeseries)); } return orQueryBuilder; } @Nonnull - private static QueryBuilder getQueryBuilderFromCriterionForSingleField(@Nonnull Criterion criterion, @Nonnull boolean isTimeseries) { + private static QueryBuilder getQueryBuilderFromCriterionForSingleField( + @Nonnull Criterion criterion, @Nonnull boolean isTimeseries) { final Condition condition = criterion.getCondition(); final String fieldName = toFacetField(criterion.getField()); if (condition == Condition.IS_NULL) { - return QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(criterion.getField())).queryName(fieldName); + return QueryBuilders.boolQuery() + .mustNot(QueryBuilders.existsQuery(criterion.getField())) + .queryName(fieldName); } else if (condition == Condition.EXISTS) { - return QueryBuilders.boolQuery().must(QueryBuilders.existsQuery(criterion.getField())).queryName(fieldName); + return QueryBuilders.boolQuery() + .must(QueryBuilders.existsQuery(criterion.getField())) + .queryName(fieldName); } else if (criterion.hasValues() || criterion.hasValue()) { if (condition == Condition.EQUAL) { return buildEqualsConditionFromCriterion(fieldName, criterion, isTimeseries); // TODO: Support multi-match on the following operators (using new 'values' field) } else if (condition == Condition.GREATER_THAN) { - return QueryBuilders.rangeQuery(criterion.getField()).gt(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .gt(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.GREATER_THAN_OR_EQUAL_TO) { - return QueryBuilders.rangeQuery(criterion.getField()).gte(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .gte(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.LESS_THAN) { - return QueryBuilders.rangeQuery(criterion.getField()).lt(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .lt(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.LESS_THAN_OR_EQUAL_TO) { - return QueryBuilders.rangeQuery(criterion.getField()).lte(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .lte(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.CONTAIN) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*").queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*") + .queryName(fieldName); } else if (condition == Condition.START_WITH) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*").queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*") + .queryName(fieldName); } else if (condition == Condition.END_WITH) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim())).queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim())) + .queryName(fieldName); } } throw new UnsupportedOperationException("Unsupported condition: " + condition); @@ -416,8 +482,8 @@ private static QueryBuilder buildEqualsConditionFromCriterion( } /** - * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which - * was created using the new multi-match 'values' field of Criterion.pdl model. + * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which was created + * using the new multi-match 'values' field of Criterion.pdl model. */ private static QueryBuilder buildEqualsConditionFromCriterionWithValues( @Nonnull final String fieldName, @@ -432,39 +498,47 @@ private static QueryBuilder buildEqualsConditionFromCriterionWithValues( return QueryBuilders.termQuery(fieldName, Boolean.parseBoolean(criterion.getValues().get(0))) .queryName(fieldName); } - return QueryBuilders.termsQuery(toKeywordField(criterion.getField(), isTimeseries), criterion.getValues()) + return QueryBuilders.termsQuery( + toKeywordField(criterion.getField(), isTimeseries), criterion.getValues()) .queryName(fieldName); } /** - * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which - * was created using the deprecated 'value' field of Criterion.pdl model. + * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which was created + * using the deprecated 'value' field of Criterion.pdl model. * - * Previously, we supported comma-separate values inside of a single string field, - * thus we have to account for splitting and matching against each value below. + * <p>Previously, we supported comma-separate values inside of a single string field, thus we have + * to account for splitting and matching against each value below. * - * For all new code, we should be using the new 'values' field for performing multi-match. This + * <p>For all new code, we should be using the new 'values' field for performing multi-match. This * is simply retained for backwards compatibility of the search API. */ private static QueryBuilder buildEqualsFromCriterionWithValue( @Nonnull final String fieldName, @Nonnull final Criterion criterion, final boolean isTimeseries) { - // If the value is an URN style value, then we do not attempt to split it by comma (for obvious reasons) + // If the value is an URN style value, then we do not attempt to split it by comma (for obvious + // reasons) if (isUrn(criterion.getValue())) { - return QueryBuilders.matchQuery(toKeywordField(criterion.getField(), isTimeseries), criterion.getValue().trim()) + return QueryBuilders.matchQuery( + toKeywordField(criterion.getField(), isTimeseries), criterion.getValue().trim()) .queryName(fieldName) .analyzer(KEYWORD_ANALYZER); } final BoolQueryBuilder filters = new BoolQueryBuilder(); // Cannot assume the existence of a .keyword or other subfield (unless contains `.`) // Cannot assume the type of the underlying field or subfield thus KEYWORD_ANALYZER is forced - List<String> fields = criterion.getField().contains(".") ? List.of(criterion.getField()) - : List.of(criterion.getField(), criterion.getField() + ".*"); + List<String> fields = + criterion.getField().contains(".") + ? List.of(criterion.getField()) + : List.of(criterion.getField(), criterion.getField() + ".*"); Arrays.stream(criterion.getValue().trim().split("\\s*,\\s*")) - .forEach(elem -> filters.should(QueryBuilders.multiMatchQuery(elem, fields.toArray(new String[0])) - .queryName(fieldName) - .analyzer(KEYWORD_ANALYZER))); + .forEach( + elem -> + filters.should( + QueryBuilders.multiMatchQuery(elem, fields.toArray(new String[0])) + .queryName(fieldName) + .analyzer(KEYWORD_ANALYZER))); return filters; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java index 62a8cd932885e..97eb6ade468ea 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java @@ -8,17 +8,24 @@ import java.util.Map; import java.util.Set; - public class FilterUtils { - private FilterUtils() { - } + private FilterUtils() {} private static final List<String> FILTER_RANKING = - ImmutableList.of("_entityType", "typeNames", "platform", "domains", "tags", "glossaryTerms", "container", "owners", + ImmutableList.of( + "_entityType", + "typeNames", + "platform", + "domains", + "tags", + "glossaryTerms", + "container", + "owners", "origin"); - public static List<AggregationMetadata> rankFilterGroups(Map<String, AggregationMetadata> aggregations) { + public static List<AggregationMetadata> rankFilterGroups( + Map<String, AggregationMetadata> aggregations) { Set<String> filterGroups = new HashSet<>(aggregations.keySet()); List<AggregationMetadata> finalAggregations = new ArrayList<>(aggregations.size()); for (String filterName : FILTER_RANKING) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java index b026686f7abfd..9f1041eaaeca3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java @@ -7,9 +7,8 @@ import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; - public class GZIPUtil { - private GZIPUtil() { } + private GZIPUtil() {} public static String gzipDecompress(byte[] gzipped) { String unzipped; @@ -30,7 +29,8 @@ public static String gzipDecompress(byte[] gzipped) { public static byte[] gzipCompress(String unzipped) { byte[] gzipped; - try (ByteArrayInputStream bis = new ByteArrayInputStream(unzipped.getBytes(StandardCharsets.UTF_8)); + try (ByteArrayInputStream bis = + new ByteArrayInputStream(unzipped.getBytes(StandardCharsets.UTF_8)); ByteArrayOutputStream bos = new ByteArrayOutputStream(); GZIPOutputStream gzipOutputStream = new GZIPOutputStream(bos)) { byte[] buffer = new byte[1024]; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java index 8b56ae0beb3f1..b8cf0626b7251 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java @@ -32,13 +32,10 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; - @Slf4j public class SearchUtils { - private SearchUtils() { - - } + private SearchUtils() {} public static Optional<String> getDocId(@Nonnull Urn urn) { try { @@ -64,23 +61,31 @@ public static Map<String, String> getRequestMap(@Nullable Filter requestParams) ConjunctiveCriterionArray disjunction = requestParams.getOr(); if (disjunction.size() > 1) { - throw new UnsupportedOperationException("To build request map, there must be only one conjunction group."); + throw new UnsupportedOperationException( + "To build request map, there must be only one conjunction group."); } - CriterionArray criterionArray = disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); + CriterionArray criterionArray = + disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); - criterionArray.forEach(criterion -> { - if (!com.linkedin.metadata.query.filter.Condition.EQUAL.equals(criterion.getCondition())) { - throw new UnsupportedOperationException("Unsupported condition: " + criterion.getCondition()); - } - }); + criterionArray.forEach( + criterion -> { + if (!com.linkedin.metadata.query.filter.Condition.EQUAL.equals( + criterion.getCondition())) { + throw new UnsupportedOperationException( + "Unsupported condition: " + criterion.getCondition()); + } + }); - return criterionArray.stream().collect(Collectors.toMap(Criterion::getField, Criterion::getValue)); + return criterionArray.stream() + .collect(Collectors.toMap(Criterion::getField, Criterion::getValue)); } public static boolean isUrn(@Nonnull String value) { - // TODO(https://github.com/datahub-project/datahub-gma/issues/51): This method is a bit of a hack to support searching for - // URNs that have commas in them, while also using commas a delimiter for search. We should stop supporting commas + // TODO(https://github.com/datahub-project/datahub-gma/issues/51): This method is a bit of a + // hack to support searching for + // URNs that have commas in them, while also using commas a delimiter for search. We should stop + // supporting commas // as delimiter, and then we can stop using this hack. return value.startsWith("urn:li:"); } @@ -104,40 +109,52 @@ public static String readResourceFile(@Nonnull Class clazz, @Nonnull String file } } - public static Filter removeCriteria(@Nullable Filter originalFilter, Predicate<Criterion> shouldRemove) { + public static Filter removeCriteria( + @Nullable Filter originalFilter, Predicate<Criterion> shouldRemove) { if (originalFilter != null && originalFilter.getOr() != null) { - return new Filter().setOr(new ConjunctiveCriterionArray(originalFilter.getOr() - .stream() - .map(criteria -> removeCriteria(criteria, shouldRemove)) - .filter(criteria -> !criteria.getAnd().isEmpty()) - .collect(Collectors.toList()))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + originalFilter.getOr().stream() + .map(criteria -> removeCriteria(criteria, shouldRemove)) + .filter(criteria -> !criteria.getAnd().isEmpty()) + .collect(Collectors.toList()))); } return originalFilter; } - private static ConjunctiveCriterion removeCriteria(@Nonnull ConjunctiveCriterion conjunctiveCriterion, - Predicate<Criterion> shouldRemove) { - return new ConjunctiveCriterion().setAnd(new CriterionArray(conjunctiveCriterion.getAnd() - .stream() - .filter(criterion -> !shouldRemove.test(criterion)) - .collect(Collectors.toList()))); + private static ConjunctiveCriterion removeCriteria( + @Nonnull ConjunctiveCriterion conjunctiveCriterion, Predicate<Criterion> shouldRemove) { + return new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + conjunctiveCriterion.getAnd().stream() + .filter(criterion -> !shouldRemove.test(criterion)) + .collect(Collectors.toList()))); } @SneakyThrows public static AggregationMetadata merge(AggregationMetadata one, AggregationMetadata two) { Map<String, Long> mergedMap = - Stream.concat(one.getAggregations().entrySet().stream(), two.getAggregations().entrySet().stream()) + Stream.concat( + one.getAggregations().entrySet().stream(), + two.getAggregations().entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, Long::sum)); - // we want to make sure the values that were used in the filter are prioritized to appear in the response aggregation - Set<String> filteredValues = Stream.concat(one.getFilterValues().stream(), two.getFilterValues().stream()).filter(val -> val.isFiltered()).map( - val -> val.getValue() - ).collect(Collectors.toSet()); + // we want to make sure the values that were used in the filter are prioritized to appear in the + // response aggregation + Set<String> filteredValues = + Stream.concat(one.getFilterValues().stream(), two.getFilterValues().stream()) + .filter(val -> val.isFiltered()) + .map(val -> val.getValue()) + .collect(Collectors.toSet()); return one.clone() - .setDisplayName(two.getDisplayName() != two.getName() ? two.getDisplayName() : one.getDisplayName()) + .setDisplayName( + two.getDisplayName() != two.getName() ? two.getDisplayName() : one.getDisplayName()) .setAggregations(new LongMap(mergedMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(mergedMap, filteredValues))); + .setFilterValues( + new FilterValueArray(SearchUtil.convertToFilters(mergedMap, filteredValues))); } public static ListResult toListResult(final SearchResult searchResult) { @@ -149,13 +166,16 @@ public static ListResult toListResult(final SearchResult searchResult) { listResult.setCount(searchResult.getPageSize()); listResult.setTotal(searchResult.getNumEntities()); listResult.setEntities( - new UrnArray(searchResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()))); + new UrnArray( + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); return listResult; } @SneakyThrows - public static SearchFlags applyDefaultSearchFlags(@Nullable SearchFlags inputFlags, @Nullable String query, - @Nonnull SearchFlags defaultFlags) { + public static SearchFlags applyDefaultSearchFlags( + @Nullable SearchFlags inputFlags, @Nullable String query, @Nonnull SearchFlags defaultFlags) { SearchFlags finalSearchFlags = inputFlags != null ? inputFlags : defaultFlags.copy(); if (!finalSearchFlags.hasFulltext() || finalSearchFlags.isFulltext() == null) { finalSearchFlags.setFulltext(defaultFlags.isFulltext()); @@ -163,7 +183,8 @@ public static SearchFlags applyDefaultSearchFlags(@Nullable SearchFlags inputFla if (query == null || Set.of("*", "").contains(query)) { // No highlighting if no query string finalSearchFlags.setSkipHighlighting(true); - } else if (!finalSearchFlags.hasSkipHighlighting() || finalSearchFlags.isSkipHighlighting() == null) { + } else if (!finalSearchFlags.hasSkipHighlighting() + || finalSearchFlags.isSkipHighlighting() == null) { finalSearchFlags.setSkipHighlighting(defaultFlags.isSkipHighlighting()); } if (!finalSearchFlags.hasSkipAggregates() || finalSearchFlags.isSkipAggregates() == null) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index ea7286112f870..b2c615c1f47f5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; @@ -41,7 +44,6 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; - import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; @@ -59,9 +61,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - @Slf4j public class UpdateIndicesService { private static final String DOWNSTREAM_OF = "DownstreamOf"; @@ -76,13 +75,12 @@ public class UpdateIndicesService { @Value("${featureFlags.graphServiceDiffModeEnabled:true}") private boolean _graphDiffMode; + @Value("${featureFlags.searchServiceDiffModeEnabled:true}") private boolean _searchDiffMode; - private static final Set<ChangeType> UPDATE_CHANGE_TYPES = ImmutableSet.of( - ChangeType.UPSERT, - ChangeType.RESTATE, - ChangeType.PATCH); + private static final Set<ChangeType> UPDATE_CHANGE_TYPES = + ImmutableSet.of(ChangeType.UPSERT, ChangeType.RESTATE, ChangeType.PATCH); @VisibleForTesting public void setGraphDiffMode(boolean graphDiffMode) { @@ -95,13 +93,13 @@ public void setSearchDiffMode(boolean searchDiffMode) { } public UpdateIndicesService( - GraphService graphService, - EntitySearchService entitySearchService, - TimeseriesAspectService timeseriesAspectService, - SystemMetadataService systemMetadataService, - EntityRegistry entityRegistry, - SearchDocumentTransformer searchDocumentTransformer, - EntityIndexBuilders entityIndexBuilders) { + GraphService graphService, + EntitySearchService entitySearchService, + TimeseriesAspectService timeseriesAspectService, + SystemMetadataService systemMetadataService, + EntityRegistry entityRegistry, + SearchDocumentTransformer searchDocumentTransformer, + EntityIndexBuilders entityIndexBuilders) { _graphService = graphService; _entitySearchService = entitySearchService; _timeseriesAspectService = timeseriesAspectService; @@ -123,14 +121,12 @@ public void handleChangeEvent(@Nonnull final MetadataChangeLog event) { } } - /** - * This very important method processes {@link MetadataChangeLog} events - * that represent changes to the Metadata Graph. + * This very important method processes {@link MetadataChangeLog} events that represent changes to + * the Metadata Graph. * - * In particular, it handles updating the Search, Graph, Timeseries, and - * System Metadata stores in response to a given change type to reflect - * the changes present in the new aspect. + * <p>In particular, it handles updating the Search, Graph, Timeseries, and System Metadata stores + * in response to a given change type to reflect the changes present in the new aspect. * * @param event the change event to be processed. */ @@ -147,23 +143,29 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro AspectSpec aspectSpec = entitySpec.getAspectSpec(event.getAspectName()); if (aspectSpec == null) { throw new RuntimeException( - String.format("Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", - event.getEntityType(), - event.getAspectName())); + String.format( + "Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", + event.getEntityType(), event.getAspectName())); } - RecordTemplate aspect = GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - aspectSpec); + RecordTemplate aspect = + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), event.getAspect().getContentType(), aspectSpec); GenericAspect previousAspectValue = event.getPreviousAspectValue(); - RecordTemplate previousAspect = previousAspectValue != null - ? GenericRecordUtils.deserializeAspect(previousAspectValue.getValue(), previousAspectValue.getContentType(), aspectSpec) - : null; + RecordTemplate previousAspect = + previousAspectValue != null + ? GenericRecordUtils.deserializeAspect( + previousAspectValue.getValue(), previousAspectValue.getContentType(), aspectSpec) + : null; // Step 0. If the aspect is timeseries, add to its timeseries index. if (aspectSpec.isTimeseries()) { - updateTimeseriesFields(event.getEntityType(), event.getAspectName(), urn, aspect, aspectSpec, + updateTimeseriesFields( + event.getEntityType(), + event.getAspectName(), + urn, + aspect, + aspectSpec, event.getSystemMetadata()); } else { // Inject into the System Metadata Index when an aspect is non-timeseries only. @@ -173,13 +175,16 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro } // Step 1. For all aspects, attempt to update Search - updateSearchService(entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); + updateSearchService( + entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); // Step 2. For all aspects, attempt to update Graph SystemMetadata systemMetadata = event.getSystemMetadata(); - if (_graphDiffMode && !(_graphService instanceof DgraphGraphService) - && (systemMetadata == null || systemMetadata.getProperties() == null - || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { + if (_graphDiffMode + && !(_graphService instanceof DgraphGraphService) + && (systemMetadata == null + || systemMetadata.getProperties() == null + || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { updateGraphServiceDiff(urn, aspectSpec, previousAspect, aspect, event); } else { updateGraphService(urn, aspectSpec, aspect, event); @@ -187,14 +192,14 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro } /** - * This very important method processes {@link MetadataChangeLog} deletion events - * to cleanup the Metadata Graph when an aspect or entity is removed. + * This very important method processes {@link MetadataChangeLog} deletion events to cleanup the + * Metadata Graph when an aspect or entity is removed. * - * In particular, it handles updating the Search, Graph, Timeseries, and - * System Metadata stores to reflect the deletion of a particular aspect. + * <p>In particular, it handles updating the Search, Graph, Timeseries, and System Metadata stores + * to reflect the deletion of a particular aspect. * - * Note that if an entity's key aspect is deleted, the entire entity will be purged - * from search, graph, timeseries, etc. + * <p>Note that if an entity's key aspect is deleted, the entire entity will be purged from + * search, graph, timeseries, etc. * * @param event the change event to be processed. */ @@ -211,19 +216,23 @@ public void handleDeleteChangeEvent(@Nonnull final MetadataChangeLog event) { AspectSpec aspectSpec = entitySpec.getAspectSpec(event.getAspectName()); if (aspectSpec == null) { throw new RuntimeException( - String.format("Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", - event.getEntityType(), - event.getAspectName())); + String.format( + "Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", + event.getEntityType(), event.getAspectName())); } - RecordTemplate aspect = GenericRecordUtils.deserializeAspect(event.getPreviousAspectValue().getValue(), - event.getPreviousAspectValue().getContentType(), aspectSpec); + RecordTemplate aspect = + GenericRecordUtils.deserializeAspect( + event.getPreviousAspectValue().getValue(), + event.getPreviousAspectValue().getContentType(), + aspectSpec); Boolean isDeletingKey = event.getAspectName().equals(entitySpec.getKeyAspectName()); if (!aspectSpec.isTimeseries()) { deleteSystemMetadata(urn, aspectSpec, isDeletingKey); deleteGraphData(urn, aspectSpec, aspect, isDeletingKey, event); - deleteSearchData(_entitySearchService, urn, entitySpec.getName(), aspectSpec, aspect, isDeletingKey); + deleteSearchData( + _entitySearchService, urn, entitySpec.getName(), aspectSpec, aspect, isDeletingKey); } } @@ -231,8 +240,7 @@ public void handleDeleteChangeEvent(@Nonnull final MetadataChangeLog event) { private void updateFineGrainedEdgesAndRelationships( RecordTemplate aspect, List<Edge> edgesToAdd, - HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded - ) { + HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded) { UpstreamLineage upstreamLineage = new UpstreamLineage(aspect.data()); if (upstreamLineage.getFineGrainedLineages() != null) { for (FineGrainedLineage fineGrainedLineage : upstreamLineage.getFineGrainedLineages()) { @@ -243,8 +251,10 @@ private void updateFineGrainedEdgesAndRelationships( for (Urn downstream : fineGrainedLineage.getDownstreams()) { for (Urn upstream : fineGrainedLineage.getUpstreams()) { // TODO: add edges uniformly across aspects - edgesToAdd.add(new Edge(downstream, upstream, DOWNSTREAM_OF, null, null, null, null, null)); - Set<String> relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(downstream, new HashSet<>()); + edgesToAdd.add( + new Edge(downstream, upstream, DOWNSTREAM_OF, null, null, null, null, null)); + Set<String> relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(downstream, new HashSet<>()); relationshipTypes.add(DOWNSTREAM_OF); urnToRelationshipTypesBeingAdded.put(downstream, relationshipTypes); } @@ -253,10 +263,14 @@ private void updateFineGrainedEdgesAndRelationships( } } - private Urn generateSchemaFieldUrn(@Nonnull final String resourceUrn, @Nonnull final String fieldPath) { - // we rely on schemaField fieldPaths to be encoded since we do that with fineGrainedLineage on the ingestion side - final String encodedFieldPath = fieldPath.replaceAll("\\(", "%28").replaceAll("\\)", "%29").replaceAll(",", "%2C"); - final SchemaFieldKey key = new SchemaFieldKey().setParent(UrnUtils.getUrn(resourceUrn)).setFieldPath(encodedFieldPath); + private Urn generateSchemaFieldUrn( + @Nonnull final String resourceUrn, @Nonnull final String fieldPath) { + // we rely on schemaField fieldPaths to be encoded since we do that with fineGrainedLineage on + // the ingestion side + final String encodedFieldPath = + fieldPath.replaceAll("\\(", "%28").replaceAll("\\)", "%29").replaceAll(",", "%2C"); + final SchemaFieldKey key = + new SchemaFieldKey().setParent(UrnUtils.getUrn(resourceUrn)).setFieldPath(encodedFieldPath); return EntityKeyUtils.convertEntityKeyToUrn(key, Constants.SCHEMA_FIELD_ENTITY_NAME); } @@ -265,15 +279,27 @@ private void updateInputFieldEdgesAndRelationships( @Nonnull final Urn urn, @Nonnull final InputFields inputFields, @Nonnull final List<Edge> edgesToAdd, - @Nonnull final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded - ) { + @Nonnull final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded) { if (inputFields.hasFields()) { for (final InputField field : inputFields.getFields()) { - if (field.hasSchemaFieldUrn() && field.hasSchemaField() && field.getSchemaField().hasFieldPath()) { - final Urn sourceFieldUrn = generateSchemaFieldUrn(urn.toString(), field.getSchemaField().getFieldPath()); + if (field.hasSchemaFieldUrn() + && field.hasSchemaField() + && field.getSchemaField().hasFieldPath()) { + final Urn sourceFieldUrn = + generateSchemaFieldUrn(urn.toString(), field.getSchemaField().getFieldPath()); // TODO: add edges uniformly across aspects - edgesToAdd.add(new Edge(sourceFieldUrn, field.getSchemaFieldUrn(), DOWNSTREAM_OF, null, null, null, null, null)); - final Set<String> relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(sourceFieldUrn, new HashSet<>()); + edgesToAdd.add( + new Edge( + sourceFieldUrn, + field.getSchemaFieldUrn(), + DOWNSTREAM_OF, + null, + null, + null, + null, + null)); + final Set<String> relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(sourceFieldUrn, new HashSet<>()); relationshipTypes.add(DOWNSTREAM_OF); urnToRelationshipTypesBeingAdded.put(sourceFieldUrn, relationshipTypes); } @@ -286,54 +312,59 @@ private Pair<List<Edge>, HashMap<Urn, Set<String>>> getEdgesAndRelationshipTypes @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, @Nonnull final MetadataChangeLog event, - final boolean isNewAspectVersion - ) { + final boolean isNewAspectVersion) { final List<Edge> edgesToAdd = new ArrayList<>(); final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = new HashMap<>(); - // we need to manually set schemaField <-> schemaField edges for fineGrainedLineage and inputFields + // we need to manually set schemaField <-> schemaField edges for fineGrainedLineage and + // inputFields // since @Relationship only links between the parent entity urn and something else. if (aspectSpec.getName().equals(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { updateFineGrainedEdgesAndRelationships(aspect, edgesToAdd, urnToRelationshipTypesBeingAdded); } if (aspectSpec.getName().equals(Constants.INPUT_FIELDS_ASPECT_NAME)) { final InputFields inputFields = new InputFields(aspect.data()); - updateInputFieldEdgesAndRelationships(urn, inputFields, edgesToAdd, urnToRelationshipTypesBeingAdded); + updateInputFieldEdgesAndRelationships( + urn, inputFields, edgesToAdd, urnToRelationshipTypesBeingAdded); } Map<RelationshipFieldSpec, List<Object>> extractedFields = FieldExtractor.extractFields(aspect, aspectSpec.getRelationshipFieldSpecs()); for (Map.Entry<RelationshipFieldSpec, List<Object>> entry : extractedFields.entrySet()) { - Set<String> relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(urn, new HashSet<>()); + Set<String> relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(urn, new HashSet<>()); relationshipTypes.add(entry.getKey().getRelationshipName()); urnToRelationshipTypesBeingAdded.put(urn, relationshipTypes); - final List<Edge> newEdges = GraphIndexUtils.extractGraphEdges(entry, aspect, urn, event, isNewAspectVersion); + final List<Edge> newEdges = + GraphIndexUtils.extractGraphEdges(entry, aspect, urn, event, isNewAspectVersion); edgesToAdd.addAll(newEdges); } return Pair.of(edgesToAdd, urnToRelationshipTypesBeingAdded); } - /** - * Process snapshot and update graph index - */ + /** Process snapshot and update graph index */ private void updateGraphService( @Nonnull final Urn urn, @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { Pair<List<Edge>, HashMap<Urn, Set<String>>> edgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, aspect, event, true); final List<Edge> edgesToAdd = edgeAndRelationTypes.getFirst(); - final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = edgeAndRelationTypes.getSecond(); + final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = + edgeAndRelationTypes.getSecond(); log.debug("Here's the relationship types found {}", urnToRelationshipTypesBeingAdded); if (urnToRelationshipTypesBeingAdded.size() > 0) { for (Map.Entry<Urn, Set<String>> entry : urnToRelationshipTypesBeingAdded.entrySet()) { - _graphService.removeEdgesFromNode(entry.getKey(), new ArrayList<>(entry.getValue()), - newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)); + _graphService.removeEdgesFromNode( + entry.getKey(), + new ArrayList<>(entry.getValue()), + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING)); } edgesToAdd.forEach(_graphService::addEdge); } @@ -344,15 +375,17 @@ private void updateGraphServiceDiff( @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspect, @Nonnull final RecordTemplate newAspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { Pair<List<Edge>, HashMap<Urn, Set<String>>> oldEdgeAndRelationTypes = null; if (oldAspect != null) { - oldEdgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, oldAspect, event, false); + oldEdgeAndRelationTypes = + getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, oldAspect, event, false); } final List<Edge> oldEdges = - oldEdgeAndRelationTypes != null ? oldEdgeAndRelationTypes.getFirst() : Collections.emptyList(); + oldEdgeAndRelationTypes != null + ? oldEdgeAndRelationTypes.getFirst() + : Collections.emptyList(); final Set<Edge> oldEdgeSet = new HashSet<>(oldEdges); Pair<List<Edge>, HashMap<Urn, Set<String>>> newEdgeAndRelationTypes = @@ -362,14 +395,12 @@ private void updateGraphServiceDiff( final Set<Edge> newEdgeSet = new HashSet<>(newEdges); // Edges to add - final List<Edge> additiveDifference = newEdgeSet.stream() - .filter(edge -> !oldEdgeSet.contains(edge)) - .collect(Collectors.toList()); + final List<Edge> additiveDifference = + newEdgeSet.stream().filter(edge -> !oldEdgeSet.contains(edge)).collect(Collectors.toList()); // Edges to remove - final List<Edge> subtractiveDifference = oldEdgeSet.stream() - .filter(edge -> !newEdgeSet.contains(edge)) - .collect(Collectors.toList()); + final List<Edge> subtractiveDifference = + oldEdgeSet.stream().filter(edge -> !newEdgeSet.contains(edge)).collect(Collectors.toList()); // Edges to update final List<Edge> mergedEdges = getMergedEdges(oldEdgeSet, newEdgeSet); @@ -394,17 +425,18 @@ private void updateGraphServiceDiff( } private static List<Edge> getMergedEdges(final Set<Edge> oldEdgeSet, final Set<Edge> newEdgeSet) { - final Map<Integer, com.linkedin.metadata.graph.Edge> oldEdgesMap = oldEdgeSet - .stream() - .map(edge -> Pair.of(edge.hashCode(), edge)) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); + final Map<Integer, com.linkedin.metadata.graph.Edge> oldEdgesMap = + oldEdgeSet.stream() + .map(edge -> Pair.of(edge.hashCode(), edge)) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); final List<com.linkedin.metadata.graph.Edge> mergedEdges = new ArrayList<>(); if (!oldEdgesMap.isEmpty()) { for (com.linkedin.metadata.graph.Edge newEdge : newEdgeSet) { if (oldEdgesMap.containsKey(newEdge.hashCode())) { final com.linkedin.metadata.graph.Edge oldEdge = oldEdgesMap.get(newEdge.hashCode()); - final com.linkedin.metadata.graph.Edge mergedEdge = GraphIndexUtils.mergeEdges(oldEdge, newEdge); + final com.linkedin.metadata.graph.Edge mergedEdge = + GraphIndexUtils.mergeEdges(oldEdge, newEdge); mergedEdges.add(mergedEdge); } } @@ -413,18 +445,21 @@ private static List<Edge> getMergedEdges(final Set<Edge> oldEdgeSet, final Set<E return mergedEdges; } - /** - * Process snapshot and update search index - */ - private void updateSearchService(String entityName, Urn urn, - AspectSpec aspectSpec, RecordTemplate aspect, - @Nullable SystemMetadata systemMetadata, @Nullable RecordTemplate previousAspect) { + /** Process snapshot and update search index */ + private void updateSearchService( + String entityName, + Urn urn, + AspectSpec aspectSpec, + RecordTemplate aspect, + @Nullable SystemMetadata systemMetadata, + @Nullable RecordTemplate previousAspect) { Optional<String> searchDocument; Optional<String> previousSearchDocument = Optional.empty(); try { searchDocument = _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, false); } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -439,14 +474,18 @@ private void updateSearchService(String entityName, Urn urn, } String searchDocumentValue = searchDocument.get(); - if (_searchDiffMode && (systemMetadata == null || systemMetadata.getProperties() == null - || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { + if (_searchDiffMode + && (systemMetadata == null + || systemMetadata.getProperties() == null + || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { if (previousAspect != null) { try { - previousSearchDocument = _searchDocumentTransformer.transformAspect(urn, previousAspect, aspectSpec, false); + previousSearchDocument = + _searchDocumentTransformer.transformAspect(urn, previousAspect, aspectSpec, false); } catch (Exception e) { log.error( - "Error in getting documents from previous aspect state: {} for aspect {}, continuing without diffing.", e, + "Error in getting documents from previous aspect state: {} for aspect {}, continuing without diffing.", + e, aspectSpec.getName()); } } @@ -463,11 +502,14 @@ private void updateSearchService(String entityName, Urn urn, _entitySearchService.upsertDocument(entityName, searchDocument.get(), docId.get()); } - /** - * Process snapshot and update time-series index - */ - private void updateTimeseriesFields(String entityType, String aspectName, Urn urn, RecordTemplate aspect, - AspectSpec aspectSpec, SystemMetadata systemMetadata) { + /** Process snapshot and update time-series index */ + private void updateTimeseriesFields( + String entityType, + String aspectName, + Urn urn, + RecordTemplate aspect, + AspectSpec aspectSpec, + SystemMetadata systemMetadata) { Map<String, JsonNode> documents; try { documents = TimeseriesAspectTransformer.transform(urn, aspect, aspectSpec, systemMetadata); @@ -475,12 +517,17 @@ private void updateTimeseriesFields(String entityType, String aspectName, Urn ur log.error("Failed to generate timeseries document from aspect: {}", e.toString()); return; } - documents.entrySet().forEach(document -> { - _timeseriesAspectService.upsertDocument(entityType, aspectName, document.getKey(), document.getValue()); - }); + documents + .entrySet() + .forEach( + document -> { + _timeseriesAspectService.upsertDocument( + entityType, aspectName, document.getKey(), document.getValue()); + }); } - private void updateSystemMetadata(SystemMetadata systemMetadata, Urn urn, AspectSpec aspectSpec, RecordTemplate aspect) { + private void updateSystemMetadata( + SystemMetadata systemMetadata, Urn urn, AspectSpec aspectSpec, RecordTemplate aspect) { _systemMetadataService.insert(systemMetadata, urn.toString(), aspectSpec.getName()); // If processing status aspect update all aspects for this urn to removed @@ -496,7 +543,9 @@ private void deleteSystemMetadata(Urn urn, AspectSpec aspectSpec, Boolean isKeyA _systemMetadataService.deleteUrn(urn.toString()); } else { // Delete all aspects from system metadata service - log.debug(String.format("Deleting system metadata for urn: %s, aspect: %s", urn, aspectSpec.getName())); + log.debug( + String.format( + "Deleting system metadata for urn: %s, aspect: %s", urn, aspectSpec.getName())); _systemMetadataService.deleteAspect(urn.toString(), aspectSpec.getName()); } } @@ -506,8 +555,7 @@ private void deleteGraphData( @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, @Nonnull final Boolean isKeyAspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { if (isKeyAspect) { _graphService.removeNode(urn); return; @@ -516,17 +564,27 @@ private void deleteGraphData( Pair<List<Edge>, HashMap<Urn, Set<String>>> edgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, aspect, event, true); - final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = edgeAndRelationTypes.getSecond(); + final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = + edgeAndRelationTypes.getSecond(); if (urnToRelationshipTypesBeingAdded.size() > 0) { for (Map.Entry<Urn, Set<String>> entry : urnToRelationshipTypesBeingAdded.entrySet()) { - _graphService.removeEdgesFromNode(entry.getKey(), new ArrayList<>(entry.getValue()), - createRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)); + _graphService.removeEdgesFromNode( + entry.getKey(), + new ArrayList<>(entry.getValue()), + createRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING)); } } } - private void deleteSearchData(EntitySearchService entitySearchService, Urn urn, String entityName, - AspectSpec aspectSpec, RecordTemplate aspect, Boolean isKeyAspect) { + private void deleteSearchData( + EntitySearchService entitySearchService, + Urn urn, + String entityName, + AspectSpec aspectSpec, + RecordTemplate aspect, + Boolean isKeyAspect) { String docId; try { docId = URLEncoder.encode(urn.toString(), "UTF-8"); @@ -542,9 +600,11 @@ private void deleteSearchData(EntitySearchService entitySearchService, Urn urn, Optional<String> searchDocument; try { - searchDocument = _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, true); // TODO + searchDocument = + _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, true); // TODO } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -560,14 +620,16 @@ private EntitySpec getEventEntitySpec(@Nonnull final MetadataChangeLog event) { return _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { throw new RuntimeException( - String.format("Failed to retrieve Entity Spec for entity with name %s. Cannot update indices for MCL.", + String.format( + "Failed to retrieve Entity Spec for entity with name %s. Cannot update indices for MCL.", event.getEntityType())); } } /** - * Allow internal use of the system entity client. Solves recursive dependencies between the UpdateIndicesService - * and the SystemJavaEntityClient + * Allow internal use of the system entity client. Solves recursive dependencies between the + * UpdateIndicesService and the SystemJavaEntityClient + * * @param systemEntityClient system entity client */ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java index 64ad88c08a741..9aa0cdca99f68 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java @@ -1,22 +1,20 @@ package com.linkedin.metadata.shared; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; - import java.io.IOException; import java.util.List; - public interface ElasticSearchIndexed { - /** - * The index configurations for the given service. - * @return List of reindex configurations - */ - List<ReindexConfig> buildReindexConfigs() throws IOException; + /** + * The index configurations for the given service. + * + * @return List of reindex configurations + */ + List<ReindexConfig> buildReindexConfigs() throws IOException; - /** - * Mirrors the service's functions which - * are expected to build/reindex as needed based - * on the reindex configurations above - */ - void reindexAll() throws IOException; + /** + * Mirrors the service's functions which are expected to build/reindex as needed based on the + * reindex configurations above + */ + void reindexAll() throws IOException; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java index 5eb03eb23d01a..cf1674ac00480 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.systemmetadata; +import static com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService.INDEX_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.search.utils.ESUtils; @@ -34,9 +36,6 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService.INDEX_NAME; - - @Slf4j @RequiredArgsConstructor public class ESSystemMetadataDAO { @@ -47,17 +46,18 @@ public class ESSystemMetadataDAO { /** * Gets the status of a Task running in ElasticSearch + * * @param taskId the task ID to get the status of */ public Optional<GetTaskResponse> getTaskStatus(@Nonnull String nodeId, long taskId) { - final GetTaskRequest taskRequest = new GetTaskRequest( - nodeId, - taskId - ); + final GetTaskRequest taskRequest = new GetTaskRequest(nodeId, taskId); try { return client.tasks().get(taskRequest, RequestOptions.DEFAULT); } catch (IOException e) { - log.error(String.format("ERROR: Failed to get task status for %s:%d. See stacktrace for a more detailed error:", nodeId, taskId)); + log.error( + String.format( + "ERROR: Failed to get task status for %s:%d. See stacktrace for a more detailed error:", + nodeId, taskId)); e.printStackTrace(); } return Optional.empty(); @@ -70,8 +70,8 @@ public Optional<GetTaskResponse> getTaskStatus(@Nonnull String nodeId, long task * @param docId the ID of the document */ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { - final UpdateRequest updateRequest = new UpdateRequest( - indexConvention.getIndexName(INDEX_NAME), docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexConvention.getIndexName(INDEX_NAME), docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -80,7 +80,8 @@ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { } public DeleteResponse deleteByDocId(@Nonnull final String docId) { - DeleteRequest deleteRequest = new DeleteRequest(indexConvention.getIndexName(INDEX_NAME), docId); + DeleteRequest deleteRequest = + new DeleteRequest(indexConvention.getIndexName(INDEX_NAME), docId); try { final DeleteResponse deleteResponse = client.delete(deleteRequest, RequestOptions.DEFAULT); @@ -96,24 +97,26 @@ public BulkByScrollResponse deleteByUrn(@Nonnull final String urn) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); finalQuery.must(QueryBuilders.termQuery("urn", urn)); - final Optional<BulkByScrollResponse> deleteResponse = bulkProcessor.deleteByQuery(finalQuery, - indexConvention.getIndexName(INDEX_NAME)); + final Optional<BulkByScrollResponse> deleteResponse = + bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)); return deleteResponse.orElse(null); } - public BulkByScrollResponse deleteByUrnAspect(@Nonnull final String urn, @Nonnull final String aspect) { + public BulkByScrollResponse deleteByUrnAspect( + @Nonnull final String urn, @Nonnull final String aspect) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); finalQuery.must(QueryBuilders.termQuery("urn", urn)); finalQuery.must(QueryBuilders.termQuery("aspect", aspect)); - final Optional<BulkByScrollResponse> deleteResponse = bulkProcessor.deleteByQuery(finalQuery, - indexConvention.getIndexName(INDEX_NAME)); + final Optional<BulkByScrollResponse> deleteResponse = + bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)); return deleteResponse.orElse(null); } - public SearchResponse findByParams(Map<String, String> searchParams, boolean includeSoftDeleted, int from, int size) { + public SearchResponse findByParams( + Map<String, String> searchParams, boolean includeSoftDeleted, int from, int size) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -147,8 +150,13 @@ public SearchResponse findByParams(Map<String, String> searchParams, boolean inc } // TODO: Scroll impl for searches bound by 10k limit - public SearchResponse findByParams(Map<String, String> searchParams, boolean includeSoftDeleted, @Nullable Object[] sort, - @Nullable String pitId, @Nonnull String keepAlive, int size) { + public SearchResponse findByParams( + Map<String, String> searchParams, + boolean includeSoftDeleted, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + int size) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -181,8 +189,8 @@ public SearchResponse findByParams(Map<String, String> searchParams, boolean inc return null; } - public SearchResponse findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, - int from, int size) { + public SearchResponse findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size) { Map<String, String> params = new HashMap<>(); params.put("registryName", registryName); params.put("registryVersion", registryVersion); @@ -210,11 +218,13 @@ public SearchResponse findRuns(Integer pageOffset, Integer pageSize) { bucketSort.size(pageSize); bucketSort.from(pageOffset); - TermsAggregationBuilder aggregation = AggregationBuilders.terms("runId") - .field("runId") - .subAggregation(AggregationBuilders.max("maxTimestamp").field("lastUpdated")) - .subAggregation(bucketSort) - .subAggregation(AggregationBuilders.filter("removed", QueryBuilders.termQuery("removed", "true"))); + TermsAggregationBuilder aggregation = + AggregationBuilders.terms("runId") + .field("runId") + .subAggregation(AggregationBuilders.max("maxTimestamp").field("lastUpdated")) + .subAggregation(bucketSort) + .subAggregation( + AggregationBuilders.filter("removed", QueryBuilders.termQuery("removed", "true"))); searchSourceBuilder.aggregation(aggregation); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java index e9ee1d6ee78d5..6fbe7cfe882ce 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java @@ -40,10 +40,10 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.ParsedMax; - @Slf4j @RequiredArgsConstructor -public class ElasticSearchSystemMetadataService implements SystemMetadataService, ElasticSearchIndexed { +public class ElasticSearchSystemMetadataService + implements SystemMetadataService, ElasticSearchIndexed { private final ESBulkProcessor _esBulkProcessor; private final IndexConvention _indexConvention; @@ -58,9 +58,15 @@ public class ElasticSearchSystemMetadataService implements SystemMetadataService private static final String FIELD_LAST_UPDATED = "lastUpdated"; private static final String FIELD_REGISTRY_NAME = "registryName"; private static final String FIELD_REGISTRY_VERSION = "registryVersion"; - private static final Set<String> INDEX_FIELD_SET = new HashSet<>( - Arrays.asList(FIELD_URN, FIELD_ASPECT, FIELD_RUNID, FIELD_LAST_UPDATED, FIELD_REGISTRY_NAME, - FIELD_REGISTRY_VERSION)); + private static final Set<String> INDEX_FIELD_SET = + new HashSet<>( + Arrays.asList( + FIELD_URN, + FIELD_ASPECT, + FIELD_RUNID, + FIELD_LAST_UPDATED, + FIELD_REGISTRY_NAME, + FIELD_REGISTRY_VERSION)); private String toDocument(SystemMetadata systemMetadata, String urn, String aspect) { final ObjectNode document = JsonNodeFactory.instance.objectNode(); @@ -112,12 +118,13 @@ public void setDocStatus(String urn, boolean removed) { final List<AspectRowSummary> aspectList = findByParams(ImmutableMap.of("urn", urn), !removed, 0, ESUtils.MAX_RESULT_SIZE); // for each -> toDocId and set removed to true for all - aspectList.forEach(aspect -> { - final String docId = toDocId(aspect.getUrn(), aspect.getAspectName()); - final ObjectNode document = JsonNodeFactory.instance.objectNode(); - document.put("removed", removed); - _esDAO.upsertDocument(docId, document.toString()); - }); + aspectList.forEach( + aspect -> { + final String docId = toDocId(aspect.getUrn(), aspect.getAspectName()); + final ObjectNode document = JsonNodeFactory.instance.objectNode(); + document.put("removed", removed); + _esDAO.upsertDocument(docId, document.toString()); + }); } @Override @@ -133,36 +140,44 @@ public void insert(@Nullable SystemMetadata systemMetadata, String urn, String a } @Override - public List<AspectRowSummary> findByRunId(String runId, boolean includeSoftDeleted, int from, int size) { - return findByParams(Collections.singletonMap(FIELD_RUNID, runId), includeSoftDeleted, from, size); + public List<AspectRowSummary> findByRunId( + String runId, boolean includeSoftDeleted, int from, int size) { + return findByParams( + Collections.singletonMap(FIELD_RUNID, runId), includeSoftDeleted, from, size); } @Override - public List<AspectRowSummary> findByUrn(String urn, boolean includeSoftDeleted, int from, int size) { + public List<AspectRowSummary> findByUrn( + String urn, boolean includeSoftDeleted, int from, int size) { return findByParams(Collections.singletonMap(FIELD_URN, urn), includeSoftDeleted, from, size); } @Override - public List<AspectRowSummary> findByParams(Map<String, String> systemMetaParams, boolean includeSoftDeleted, int from, - int size) { - SearchResponse searchResponse = _esDAO.findByParams(systemMetaParams, includeSoftDeleted, from, size); + public List<AspectRowSummary> findByParams( + Map<String, String> systemMetaParams, boolean includeSoftDeleted, int from, int size) { + SearchResponse searchResponse = + _esDAO.findByParams(systemMetaParams, includeSoftDeleted, from, size); if (searchResponse != null) { SearchHits hits = searchResponse.getHits(); - List<AspectRowSummary> summaries = Arrays.stream(hits.getHits()).map(hit -> { - Map<String, Object> values = hit.getSourceAsMap(); - AspectRowSummary summary = new AspectRowSummary(); - summary.setRunId((String) values.get(FIELD_RUNID)); - summary.setAspectName((String) values.get(FIELD_ASPECT)); - summary.setUrn((String) values.get(FIELD_URN)); - Object timestamp = values.get(FIELD_LAST_UPDATED); - if (timestamp instanceof Long) { - summary.setTimestamp((Long) timestamp); - } else if (timestamp instanceof Integer) { - summary.setTimestamp(Long.valueOf((Integer) timestamp)); - } - summary.setKeyAspect(((String) values.get(FIELD_ASPECT)).endsWith("Key")); - return summary; - }).collect(Collectors.toList()); + List<AspectRowSummary> summaries = + Arrays.stream(hits.getHits()) + .map( + hit -> { + Map<String, Object> values = hit.getSourceAsMap(); + AspectRowSummary summary = new AspectRowSummary(); + summary.setRunId((String) values.get(FIELD_RUNID)); + summary.setAspectName((String) values.get(FIELD_ASPECT)); + summary.setUrn((String) values.get(FIELD_URN)); + Object timestamp = values.get(FIELD_LAST_UPDATED); + if (timestamp instanceof Long) { + summary.setTimestamp((Long) timestamp); + } else if (timestamp instanceof Integer) { + summary.setTimestamp(Long.valueOf((Integer) timestamp)); + } + summary.setKeyAspect(((String) values.get(FIELD_ASPECT)).endsWith("Key")); + return summary; + }) + .collect(Collectors.toList()); return summaries; } else { return Collections.emptyList(); @@ -170,8 +185,8 @@ public List<AspectRowSummary> findByParams(Map<String, String> systemMetaParams, } @Override - public List<AspectRowSummary> findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, - int from, int size) { + public List<AspectRowSummary> findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size) { Map<String, String> registryParams = new HashMap<>(); registryParams.put(FIELD_REGISTRY_NAME, registryName); registryParams.put(FIELD_REGISTRY_VERSION, registryVersion); @@ -179,26 +194,34 @@ public List<AspectRowSummary> findByRegistry(String registryName, String registr } @Override - public List<IngestionRunSummary> listRuns(Integer pageOffset, Integer pageSize, boolean includeSoftDeleted) { + public List<IngestionRunSummary> listRuns( + Integer pageOffset, Integer pageSize, boolean includeSoftDeleted) { SearchResponse response = _esDAO.findRuns(pageOffset, pageSize); - List<? extends Terms.Bucket> buckets = ((ParsedStringTerms) response.getAggregations().get("runId")).getBuckets(); + List<? extends Terms.Bucket> buckets = + ((ParsedStringTerms) response.getAggregations().get("runId")).getBuckets(); if (!includeSoftDeleted) { - buckets.removeIf(bucket -> { - long totalDocs = bucket.getDocCount(); - long softDeletedDocs = ((ParsedFilter) bucket.getAggregations().get("removed")).getDocCount(); - return totalDocs == softDeletedDocs; - }); + buckets.removeIf( + bucket -> { + long totalDocs = bucket.getDocCount(); + long softDeletedDocs = + ((ParsedFilter) bucket.getAggregations().get("removed")).getDocCount(); + return totalDocs == softDeletedDocs; + }); } // TODO(gabe-lyons): add sample urns - return buckets.stream().map(bucket -> { - IngestionRunSummary entry = new IngestionRunSummary(); - entry.setRunId(bucket.getKeyAsString()); - entry.setTimestamp((long) ((ParsedMax) bucket.getAggregations().get("maxTimestamp")).getValue()); - entry.setRows(bucket.getDocCount()); - return entry; - }).collect(Collectors.toList()); + return buckets.stream() + .map( + bucket -> { + IngestionRunSummary entry = new IngestionRunSummary(); + entry.setRunId(bucket.getKeyAsString()); + entry.setTimestamp( + (long) ((ParsedMax) bucket.getAggregations().get("maxTimestamp")).getValue()); + entry.setRows(bucket.getDocCount()); + return entry; + }) + .collect(Collectors.toList()); } @Override @@ -215,8 +238,11 @@ public void configure() { @Override public List<ReindexConfig> buildReindexConfigs() throws IOException { - return List.of(_indexBuilder.buildReindexState(_indexConvention.getIndexName(INDEX_NAME), - SystemMetadataMappingsBuilder.getMappings(), Collections.emptyMap())); + return List.of( + _indexBuilder.buildReindexState( + _indexConvention.getIndexName(INDEX_NAME), + SystemMetadataMappingsBuilder.getMappings(), + Collections.emptyMap())); } @Override @@ -227,6 +253,7 @@ public void reindexAll() { @VisibleForTesting @Override public void clear() { - _esBulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); + _esBulkProcessor.deleteByQuery( + QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java index c19283aa44ac1..6bce654fb1481 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java @@ -4,7 +4,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @AllArgsConstructor @Data public class SystemMetadataEntry { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java index 535610ffbf37f..6623580548706 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java @@ -5,11 +5,10 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; - @Slf4j public class SystemMetadataMappingsBuilder { - private SystemMetadataMappingsBuilder() { } + private SystemMetadataMappingsBuilder() {} public static Map<String, Object> getMappings() { Map<String, Object> mappings = new HashMap<>(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java index c0b1239ffa835..0105215565117 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java @@ -6,5 +6,4 @@ @Value @EqualsAndHashCode(callSuper = false) -public class MissingEntityAspect extends EntityAspect { -} +public class MissingEntityAspect extends EntityAspect {} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java index 9658d9d3ab036..a93d4880a7979 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline; +import static com.linkedin.common.urn.VersionedUrnUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -44,24 +47,30 @@ import javax.annotation.Nonnull; import org.apache.commons.collections.CollectionUtils; -import static com.linkedin.common.urn.VersionedUrnUtils.*; -import static com.linkedin.metadata.Constants.*; - public class TimelineServiceImpl implements TimelineService { - private static final long DEFAULT_LOOKBACK_TIME_WINDOW_MILLIS = 7 * 24 * 60 * 60 * 1000L; // 1 week lookback + private static final long DEFAULT_LOOKBACK_TIME_WINDOW_MILLIS = + 7 * 24 * 60 * 60 * 1000L; // 1 week lookback private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final long FIRST_TRANSACTION_ID = 0; private static final String BUILD_VALUE_COMPUTED = "computed"; private final AspectDao _aspectDao; private final EntityChangeEventGeneratorFactory _entityChangeEventGeneratorFactory; private final EntityRegistry _entityRegistry; - private final HashMap<String, HashMap<ChangeCategory, Set<String>>> entityTypeElementAspectRegistry = new HashMap<>(); + private final HashMap<String, HashMap<ChangeCategory, Set<String>>> + entityTypeElementAspectRegistry = new HashMap<>(); public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry entityRegistry) { this._aspectDao = aspectDao; @@ -76,56 +85,97 @@ public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry for (ChangeCategory elementName : ChangeCategory.values()) { Set<String> aspects = new HashSet<>(); switch (elementName) { - case TAG: { - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - aspects.add(GLOBAL_TAGS_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, GLOBAL_TAGS_ASPECT_NAME, - new GlobalTagsChangeEventGenerator()); - } + case TAG: + { + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + aspects.add(GLOBAL_TAGS_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + GLOBAL_TAGS_ASPECT_NAME, + new GlobalTagsChangeEventGenerator()); + } break; - case OWNER: { - aspects.add(OWNERSHIP_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, OWNERSHIP_ASPECT_NAME, - new OwnershipChangeEventGenerator()); - } + case OWNER: + { + aspects.add(OWNERSHIP_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + OWNERSHIP_ASPECT_NAME, + new OwnershipChangeEventGenerator()); + } break; - case DOCUMENTATION: { - aspects.add(INSTITUTIONAL_MEMORY_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, INSTITUTIONAL_MEMORY_ASPECT_NAME, - new InstitutionalMemoryChangeEventGenerator()); - aspects.add(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - new EditableDatasetPropertiesChangeEventGenerator()); - aspects.add(DATASET_PROPERTIES_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, DATASET_PROPERTIES_ASPECT_NAME, - new DatasetPropertiesChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - } + case DOCUMENTATION: + { + aspects.add(INSTITUTIONAL_MEMORY_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + new InstitutionalMemoryChangeEventGenerator()); + aspects.add(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + new EditableDatasetPropertiesChangeEventGenerator()); + aspects.add(DATASET_PROPERTIES_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + DATASET_PROPERTIES_ASPECT_NAME, + new DatasetPropertiesChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + } break; - case GLOSSARY_TERM: { - aspects.add(GLOSSARY_TERMS_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, GLOSSARY_TERMS_ASPECT_NAME, - new GlossaryTermsChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - } + case GLOSSARY_TERM: + { + aspects.add(GLOSSARY_TERMS_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + GLOSSARY_TERMS_ASPECT_NAME, + new GlossaryTermsChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + } break; - case TECHNICAL_SCHEMA: { - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - } + case TECHNICAL_SCHEMA: + { + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + } break; default: break; @@ -139,25 +189,34 @@ public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry for (ChangeCategory elementName : ChangeCategory.values()) { Set<String> aspects = new HashSet<>(); switch (elementName) { - case OWNER: { - aspects.add(OWNERSHIP_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityTypeGlossaryTerm, elementName, OWNERSHIP_ASPECT_NAME, - new OwnershipChangeEventGenerator()); - } - break; - case DOCUMENTATION: { - aspects.add(GLOSSARY_TERM_INFO_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityTypeGlossaryTerm, elementName, GLOSSARY_TERM_INFO_ASPECT_NAME, - new GlossaryTermInfoChangeEventGenerator()); - } - break; + case OWNER: + { + aspects.add(OWNERSHIP_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityTypeGlossaryTerm, + elementName, + OWNERSHIP_ASPECT_NAME, + new OwnershipChangeEventGenerator()); + } + break; + case DOCUMENTATION: + { + aspects.add(GLOSSARY_TERM_INFO_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityTypeGlossaryTerm, + elementName, + GLOSSARY_TERM_INFO_ASPECT_NAME, + new GlossaryTermInfoChangeEventGenerator()); + } + break; default: break; } glossaryTermElementAspectRegistry.put(elementName, aspects); } entityTypeElementAspectRegistry.put(DATASET_ENTITY_NAME, datasetElementAspectRegistry); - entityTypeElementAspectRegistry.put(GLOSSARY_TERM_ENTITY_NAME, glossaryTermElementAspectRegistry); + entityTypeElementAspectRegistry.put( + GLOSSARY_TERM_ENTITY_NAME, glossaryTermElementAspectRegistry); } Set<String> getAspectsFromElements(String entityType, Set<ChangeCategory> elementNames) { @@ -173,15 +232,21 @@ Set<String> getAspectsFromElements(String entityType, Set<ChangeCategory> elemen @Nonnull @Override - public List<ChangeTransaction> getTimeline(@Nonnull final Urn urn, @Nonnull final Set<ChangeCategory> elementNames, - long startTimeMillis, long endTimeMillis, String startVersionStamp, String endVersionStamp, + public List<ChangeTransaction> getTimeline( + @Nonnull final Urn urn, + @Nonnull final Set<ChangeCategory> elementNames, + long startTimeMillis, + long endTimeMillis, + String startVersionStamp, + String endVersionStamp, boolean rawDiffRequested) { Set<String> aspectNames = getAspectsFromElements(urn.getEntityType(), elementNames); // TODO: Add more logic for defaults if (startVersionStamp != null && startTimeMillis != 0) { - throw new IllegalArgumentException("Cannot specify both VersionStamp start and timestamp start"); + throw new IllegalArgumentException( + "Cannot specify both VersionStamp start and timestamp start"); } if (endTimeMillis == 0) { @@ -195,58 +260,67 @@ public List<ChangeTransaction> getTimeline(@Nonnull final Urn urn, @Nonnull fina // query EntitySpec entitySpec = _entityRegistry.getEntitySpec(urn.getEntityType()); List<AspectSpec> aspectSpecs = entitySpec.getAspectSpecs(); - Set<String> fullAspectNames = aspectSpecs.stream() - .filter(aspectSpec -> !aspectSpec.isTimeseries()) - .map(AspectSpec::getName) - .collect(Collectors.toSet()); - List<EntityAspect> aspectsInRange = this._aspectDao.getAspectsInRange(urn, fullAspectNames, startTimeMillis, - endTimeMillis); + Set<String> fullAspectNames = + aspectSpecs.stream() + .filter(aspectSpec -> !aspectSpec.isTimeseries()) + .map(AspectSpec::getName) + .collect(Collectors.toSet()); + List<EntityAspect> aspectsInRange = + this._aspectDao.getAspectsInRange(urn, fullAspectNames, startTimeMillis, endTimeMillis); // Prepopulate with all versioned aspectNames -> ignore timeseries using // registry - Map<String, TreeSet<EntityAspect>> aspectRowSetMap = constructAspectRowSetMap(urn, fullAspectNames, aspectsInRange); + Map<String, TreeSet<EntityAspect>> aspectRowSetMap = + constructAspectRowSetMap(urn, fullAspectNames, aspectsInRange); - Map<Long, SortedMap<String, Long>> timestampVersionCache = constructTimestampVersionCache(aspectRowSetMap); + Map<Long, SortedMap<String, Long>> timestampVersionCache = + constructTimestampVersionCache(aspectRowSetMap); // TODO: There are some extra steps happening here, we need to clean up how // transactions get combined across differs - SortedMap<Long, List<ChangeTransaction>> semanticDiffs = aspectRowSetMap.entrySet() - .stream() - .filter(entry -> aspectNames.contains(entry.getKey())) - .map(Map.Entry::getValue) - .map(value -> computeDiffs(value, urn.getEntityType(), elementNames, rawDiffRequested)) - .collect(TreeMap::new, this::combineComputedDiffsPerTransactionId, this::combineComputedDiffsPerTransactionId); + SortedMap<Long, List<ChangeTransaction>> semanticDiffs = + aspectRowSetMap.entrySet().stream() + .filter(entry -> aspectNames.contains(entry.getKey())) + .map(Map.Entry::getValue) + .map(value -> computeDiffs(value, urn.getEntityType(), elementNames, rawDiffRequested)) + .collect( + TreeMap::new, + this::combineComputedDiffsPerTransactionId, + this::combineComputedDiffsPerTransactionId); // TODO:Move this down assignSemanticVersions(semanticDiffs); - List<ChangeTransaction> changeTransactions = semanticDiffs.values().stream().collect(ArrayList::new, - ArrayList::addAll, ArrayList::addAll); - List<ChangeTransaction> combinedChangeTransactions = combineTransactionsByTimestamp(changeTransactions, - timestampVersionCache); + List<ChangeTransaction> changeTransactions = + semanticDiffs.values().stream() + .collect(ArrayList::new, ArrayList::addAll, ArrayList::addAll); + List<ChangeTransaction> combinedChangeTransactions = + combineTransactionsByTimestamp(changeTransactions, timestampVersionCache); combinedChangeTransactions.sort(Comparator.comparing(ChangeTransaction::getTimestamp)); return combinedChangeTransactions; } /** - * Constructs a map from aspect name to a sorted set of DB aspects by created - * timestamp. Set includes all aspects - * relevant to an entity and does a lookback by 1 for all aspects, creating - * sentinel values for when the oldest aspect - * possible has been retrieved or no value exists in the DB for an aspect - * - * @param urn urn of the entity + * Constructs a map from aspect name to a sorted set of DB aspects by created timestamp. Set + * includes all aspects relevant to an entity and does a lookback by 1 for all aspects, creating + * sentinel values for when the oldest aspect possible has been retrieved or no value exists in + * the DB for an aspect + * + * @param urn urn of the entity * @param fullAspectNames full list of aspects relevant to the entity - * @param aspectsInRange aspects returned by the range query by timestampm + * @param aspectsInRange aspects returned by the range query by timestampm * @return map constructed as described */ - private Map<String, TreeSet<EntityAspect>> constructAspectRowSetMap(Urn urn, Set<String> fullAspectNames, - List<EntityAspect> aspectsInRange) { + private Map<String, TreeSet<EntityAspect>> constructAspectRowSetMap( + Urn urn, Set<String> fullAspectNames, List<EntityAspect> aspectsInRange) { Map<String, TreeSet<EntityAspect>> aspectRowSetMap = new HashMap<>(); fullAspectNames.forEach( - aspectName -> aspectRowSetMap.put(aspectName, new TreeSet<>(Comparator.comparing(EntityAspect::getCreatedOn)))); - aspectsInRange.forEach(row -> { - TreeSet<EntityAspect> rowList = aspectRowSetMap.get(row.getAspect()); - rowList.add(row); - }); + aspectName -> + aspectRowSetMap.put( + aspectName, new TreeSet<>(Comparator.comparing(EntityAspect::getCreatedOn)))); + aspectsInRange.forEach( + row -> { + TreeSet<EntityAspect> rowList = aspectRowSetMap.get(row.getAspect()); + rowList.add(row); + }); // we need to pull previous versions of these aspects that are currently at a 0 Map<String, Long> nextVersions = _aspectDao.getNextVersions(urn.toString(), fullAspectNames); @@ -267,9 +341,11 @@ private Map<String, TreeSet<EntityAspect>> constructAspectRowSetMap(Urn urn, Set // get the next version long versionToGet = 0; if (oldestAspect != null) { - versionToGet = (oldestAspect.getVersion() == 0L) ? nextVersion - 1 : oldestAspect.getVersion() - 1; + versionToGet = + (oldestAspect.getVersion() == 0L) ? nextVersion - 1 : oldestAspect.getVersion() - 1; } - EntityAspect row = _aspectDao.getAspect(urn.toString(), aspectMinVersion.getKey(), versionToGet); + EntityAspect row = + _aspectDao.getAspect(urn.toString(), aspectMinVersion.getKey(), versionToGet); if (row != null) { aspectRowSetMap.get(row.getAspect()).add(row); } else { @@ -281,8 +357,7 @@ private Map<String, TreeSet<EntityAspect>> constructAspectRowSetMap(Urn urn, Set } private boolean isOldestPossible(EntityAspect oldestAspect, long nextVersion) { - return (((oldestAspect.getVersion() == 0L) - && (nextVersion == 1L)) + return (((oldestAspect.getVersion() == 0L) && (nextVersion == 1L)) || (oldestAspect.getVersion() == 1L)); } @@ -295,19 +370,20 @@ private MissingEntityAspect createSentinel(String aspectName) { } /** - * Constructs a map from timestamp to a sorted map of aspect name -> version for - * use in constructing the version stamp - * - * @param aspectRowSetMap map constructed as described in - * {@link TimelineServiceImpl#constructAspectRowSetMap} + * Constructs a map from timestamp to a sorted map of aspect name -> version for use in + * constructing the version stamp + * + * @param aspectRowSetMap map constructed as described in {@link + * TimelineServiceImpl#constructAspectRowSetMap} * @return map as described */ private Map<Long, SortedMap<String, Long>> constructTimestampVersionCache( Map<String, TreeSet<EntityAspect>> aspectRowSetMap) { - Set<EntityAspect> aspects = aspectRowSetMap.values().stream() - .flatMap(TreeSet::stream) - .filter(aspect -> aspect.getVersion() != -1L) - .collect(Collectors.toSet()); + Set<EntityAspect> aspects = + aspectRowSetMap.values().stream() + .flatMap(TreeSet::stream) + .filter(aspect -> aspect.getVersion() != -1L) + .collect(Collectors.toSet()); Map<Long, SortedMap<String, Long>> timestampVersionCache = new HashMap<>(); for (EntityAspect aspect : aspects) { if (timestampVersionCache.containsKey(aspect.getCreatedOn().getTime())) { @@ -341,8 +417,11 @@ private Map<Long, SortedMap<String, Long>> constructTimestampVersionCache( return timestampVersionCache; } - private SortedMap<Long, List<ChangeTransaction>> computeDiffs(TreeSet<EntityAspect> aspectTimeline, - String entityType, Set<ChangeCategory> elementNames, boolean rawDiffsRequested) { + private SortedMap<Long, List<ChangeTransaction>> computeDiffs( + TreeSet<EntityAspect> aspectTimeline, + String entityType, + Set<ChangeCategory> elementNames, + boolean rawDiffsRequested) { EntityAspect previousValue = null; SortedMap<Long, List<ChangeTransaction>> changeTransactionsMap = new TreeMap<>(); long transactionId; @@ -350,7 +429,8 @@ private SortedMap<Long, List<ChangeTransaction>> computeDiffs(TreeSet<EntityAspe transactionId = currentValue.getCreatedOn().getTime(); if (previousValue != null) { // we skip the first element and only compare once we have two in hand - changeTransactionsMap.put(transactionId, + changeTransactionsMap.put( + transactionId, computeDiff(previousValue, currentValue, entityType, elementNames, rawDiffsRequested)); } previousValue = currentValue; @@ -358,30 +438,39 @@ private SortedMap<Long, List<ChangeTransaction>> computeDiffs(TreeSet<EntityAspe return changeTransactionsMap; } - private List<ChangeTransaction> computeDiff(@Nonnull EntityAspect previousValue, @Nonnull EntityAspect currentValue, - String entityType, Set<ChangeCategory> elementNames, boolean rawDiffsRequested) { + private List<ChangeTransaction> computeDiff( + @Nonnull EntityAspect previousValue, + @Nonnull EntityAspect currentValue, + String entityType, + Set<ChangeCategory> elementNames, + boolean rawDiffsRequested) { String aspectName = currentValue.getAspect(); List<ChangeTransaction> semanticChangeTransactions = new ArrayList<>(); JsonPatch rawDiff = getRawDiff(previousValue, currentValue); for (ChangeCategory element : elementNames) { EntityChangeEventGenerator entityChangeEventGenerator; - entityChangeEventGenerator = _entityChangeEventGeneratorFactory.getGenerator(entityType, element, aspectName); + entityChangeEventGenerator = + _entityChangeEventGeneratorFactory.getGenerator(entityType, element, aspectName); if (entityChangeEventGenerator != null) { try { ChangeTransaction changeTransaction = - entityChangeEventGenerator.getSemanticDiff(previousValue, currentValue, element, rawDiff, - rawDiffsRequested); + entityChangeEventGenerator.getSemanticDiff( + previousValue, currentValue, element, rawDiff, rawDiffsRequested); if (CollectionUtils.isNotEmpty(changeTransaction.getChangeEvents())) { semanticChangeTransactions.add(changeTransaction); } } catch (Exception e) { - semanticChangeTransactions.add(ChangeTransaction.builder() - .semVerChange(SemanticChangeType.EXCEPTIONAL) - .changeEvents(Collections.singletonList(ChangeEvent.builder() - .description(String.format("%s:%s", e.getClass().getName(), e.getMessage())) - .build())) - .build()); + semanticChangeTransactions.add( + ChangeTransaction.builder() + .semVerChange(SemanticChangeType.EXCEPTIONAL) + .changeEvents( + Collections.singletonList( + ChangeEvent.builder() + .description( + String.format("%s:%s", e.getClass().getName(), e.getMessage())) + .build())) + .build()); } } } @@ -401,7 +490,8 @@ private JsonPatch getRawDiff(EntityAspect previousValue, EntityAspect currentVal } } - private void combineComputedDiffsPerTransactionId(@Nonnull SortedMap<Long, List<ChangeTransaction>> semanticDiffs, + private void combineComputedDiffsPerTransactionId( + @Nonnull SortedMap<Long, List<ChangeTransaction>> semanticDiffs, @Nonnull SortedMap<Long, List<ChangeTransaction>> computedDiffs) { for (Map.Entry<Long, List<ChangeTransaction>> entry : computedDiffs.entrySet()) { if (!semanticDiffs.containsKey(entry.getKey())) { @@ -414,18 +504,22 @@ private void combineComputedDiffsPerTransactionId(@Nonnull SortedMap<Long, List< } } - private void assignSemanticVersions(SortedMap<Long, List<ChangeTransaction>> changeTransactionsMap) { + private void assignSemanticVersions( + SortedMap<Long, List<ChangeTransaction>> changeTransactionsMap) { SemanticVersion curGroupVersion = null; long transactionId = FIRST_TRANSACTION_ID - 1; for (Map.Entry<Long, List<ChangeTransaction>> entry : changeTransactionsMap.entrySet()) { if (transactionId >= entry.getKey()) { - throw new IllegalArgumentException(String.format("transactionId should be < previous. %s >= %s", - transactionId, entry.getKey())); + throw new IllegalArgumentException( + String.format( + "transactionId should be < previous. %s >= %s", transactionId, entry.getKey())); } transactionId = entry.getKey(); SemanticChangeType highestChangeInGroup = SemanticChangeType.NONE; - ChangeTransaction highestChangeTransaction = entry.getValue().stream() - .max(Comparator.comparing(ChangeTransaction::getSemVerChange)).orElse(null); + ChangeTransaction highestChangeTransaction = + entry.getValue().stream() + .max(Comparator.comparing(ChangeTransaction::getSemVerChange)) + .orElse(null); if (highestChangeTransaction != null) { highestChangeInGroup = highestChangeTransaction.getSemVerChange(); } @@ -436,8 +530,8 @@ private void assignSemanticVersions(SortedMap<Long, List<ChangeTransaction>> cha } } - private SemanticVersion getGroupSemanticVersion(SemanticChangeType highestChangeInGroup, - SemanticVersion previousVersion) { + private SemanticVersion getGroupSemanticVersion( + SemanticChangeType highestChangeInGroup, SemanticVersion previousVersion) { if (previousVersion == null) { // Start with all 0s if there is no previous version. return SemanticVersion.builder() @@ -477,10 +571,11 @@ private SemanticVersion getGroupSemanticVersion(SemanticChangeType highestChange return previousVersion; } - private List<ChangeTransaction> combineTransactionsByTimestamp(List<ChangeTransaction> changeTransactions, + private List<ChangeTransaction> combineTransactionsByTimestamp( + List<ChangeTransaction> changeTransactions, Map<Long, SortedMap<String, Long>> timestampVersionCache) { - Map<Long, List<ChangeTransaction>> transactionsByTimestamp = changeTransactions.stream() - .collect(Collectors.groupingBy(ChangeTransaction::getTimestamp)); + Map<Long, List<ChangeTransaction>> transactionsByTimestamp = + changeTransactions.stream().collect(Collectors.groupingBy(ChangeTransaction::getTimestamp)); List<ChangeTransaction> combinedChangeTransactions = new ArrayList<>(); for (List<ChangeTransaction> transactionList : transactionsByTimestamp.values()) { if (!transactionList.isEmpty()) { @@ -490,14 +585,17 @@ private List<ChangeTransaction> combineTransactionsByTimestamp(List<ChangeTransa for (int i = 1; i < transactionList.size(); i++) { ChangeTransaction element = transactionList.get(i); result.getChangeEvents().addAll(element.getChangeEvents()); - maxSemanticChangeType = maxSemanticChangeType.compareTo(element.getSemVerChange()) >= 0 - ? maxSemanticChangeType - : element.getSemVerChange(); - maxSemVer = maxSemVer.compareTo(element.getSemVer()) >= 0 ? maxSemVer : element.getSemVer(); + maxSemanticChangeType = + maxSemanticChangeType.compareTo(element.getSemVerChange()) >= 0 + ? maxSemanticChangeType + : element.getSemVerChange(); + maxSemVer = + maxSemVer.compareTo(element.getSemVer()) >= 0 ? maxSemVer : element.getSemVer(); } result.setSemVerChange(maxSemanticChangeType); result.setSemanticVersion(maxSemVer); - result.setVersionStamp(constructVersionStamp(timestampVersionCache.get(result.getTimestamp()))); + result.setVersionStamp( + constructVersionStamp(timestampVersionCache.get(result.getTimestamp()))); combinedChangeTransactions.add(result); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java index a9c5d56a7e445..84308d9b2311f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java @@ -27,8 +27,7 @@ public DatasetSchemaFieldChangeEvent( String description, String fieldPath, Urn fieldUrn, - boolean nullable - ) { + boolean nullable) { super( entityUrn, category, @@ -37,12 +36,9 @@ public DatasetSchemaFieldChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "fieldUrn", fieldUrn.toString(), - "nullable", nullable - ), + "nullable", nullable), auditStamp, semVerChange, - description - ); + description); } } - diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java index eddacf3714f61..f1d9862fb33a2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java @@ -4,8 +4,8 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeCategory; -import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.ChangeEvent; +import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.SemanticChangeType; import lombok.Builder; import lombok.EqualsAndHashCode; @@ -27,8 +27,7 @@ public SchemaFieldGlossaryTermChangeEvent( String description, String fieldPath, Urn parentUrn, - Urn termUrn - ) { + Urn termUrn) { super( entityUrn, category, @@ -37,11 +36,9 @@ public SchemaFieldGlossaryTermChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "parentUrn", parentUrn.toString(), - "termUrn", termUrn.toString() - ), + "termUrn", termUrn.toString()), auditStamp, semVerChange, - description - ); + description); } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java index b4553f9048e3a..10e6ae6ca4af8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java @@ -4,8 +4,8 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeCategory; -import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.ChangeEvent; +import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.SemanticChangeType; import lombok.Builder; import lombok.EqualsAndHashCode; @@ -27,8 +27,7 @@ public SchemaFieldTagChangeEvent( String description, String fieldPath, Urn parentUrn, - Urn tagUrn - ) { + Urn tagUrn) { super( entityUrn, category, @@ -37,11 +36,9 @@ public SchemaFieldTagChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "parentUrn", parentUrn.toString(), - "tagUrn", tagUrn.toString() - ), + "tagUrn", tagUrn.toString()), auditStamp, semVerChange, - description - ); + description); } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java index 5a306635f6a81..33dfdb68cb9e6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java @@ -12,7 +12,6 @@ import lombok.Getter; import lombok.Value; - @EqualsAndHashCode(callSuper = true) @Value @Getter @@ -26,19 +25,15 @@ public DomainChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn domainUrn - ) { + Urn domainUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "domainUrn", domainUrn.toString() - ), + ImmutableMap.of("domainUrn", domainUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java index dfa659dad67b1..564dc63c1a678 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java @@ -13,7 +13,6 @@ import lombok.Value; import lombok.experimental.NonFinal; - @EqualsAndHashCode(callSuper = true) @Value @NonFinal @@ -28,19 +27,15 @@ public GlossaryTermChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn termUrn - ) { + Urn termUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "termUrn", termUrn.toString() - ), + ImmutableMap.of("termUrn", termUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java index b9efe7113bcfb..fc4f0327b7704 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java @@ -27,8 +27,7 @@ public OwnerChangeEvent( SemanticChangeType semVerChange, String description, Urn ownerUrn, - OwnershipType ownerType - ) { + OwnershipType ownerType) { super( entityUrn, category, @@ -36,11 +35,9 @@ public OwnerChangeEvent( modifier, ImmutableMap.of( "ownerUrn", ownerUrn.toString(), - "ownerType", ownerType.toString() - ), + "ownerType", ownerType.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java index 09dba21ff3988..b19a4a1558ab6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java @@ -13,7 +13,6 @@ import lombok.Value; import lombok.experimental.NonFinal; - @EqualsAndHashCode(callSuper = true) @Value @NonFinal @@ -28,19 +27,15 @@ public TagChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn tagUrn - ) { + Urn tagUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "tagUrn", tagUrn.toString() - ), + ImmutableMap.of("tagUrn", tagUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java index 60008826afc61..f83eded55ff9c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java @@ -5,20 +5,13 @@ import lombok.AllArgsConstructor; import lombok.Value; - -/** - * Thin wrapper for an aspect value which is used within the Entity Change Event API. - */ +/** Thin wrapper for an aspect value which is used within the Entity Change Event API. */ @Value @AllArgsConstructor public class Aspect<T extends RecordTemplate> { - /** - * The aspect value itself. - */ + /** The aspect value itself. */ T value; - /** - * System metadata - */ + /** System metadata */ SystemMetadata systemMetadata; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java index b615189fe7314..1a8e54e5baf4a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSortedMap; import com.linkedin.assertion.AssertionResult; import com.linkedin.assertion.AssertionRunEvent; @@ -14,10 +16,8 @@ import java.util.Map; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class AssertionRunEventChangeEventGenerator extends EntityChangeEventGenerator<AssertionRunEvent> { +public class AssertionRunEventChangeEventGenerator + extends EntityChangeEventGenerator<AssertionRunEvent> { @Override public List<ChangeEvent> getChangeEvents( @Nonnull Urn urn, @@ -39,22 +39,22 @@ private List<ChangeEvent> computeDiffs( boolean isNewCompleted = isCompleted(newAspect); if (isNewCompleted && !isPreviousCompleted) { - return Collections.singletonList(ChangeEvent.builder() - .category(ChangeCategory.RUN) - .operation(ChangeOperation.COMPLETED) - .auditStamp(auditStamp) - .entityUrn(entityUrn) - .parameters(buildParameters(newAspect)) - .build()); + return Collections.singletonList( + ChangeEvent.builder() + .category(ChangeCategory.RUN) + .operation(ChangeOperation.COMPLETED) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .parameters(buildParameters(newAspect)) + .build()); } return Collections.emptyList(); } private boolean isCompleted(final AssertionRunEvent assertionRunEvent) { - return assertionRunEvent != null && assertionRunEvent.getStatus() - .toString() - .equals(ASSERTION_RUN_EVENT_STATUS_COMPLETE); + return assertionRunEvent != null + && assertionRunEvent.getStatus().toString().equals(ASSERTION_RUN_EVENT_STATUS_COMPLETE); } @Nonnull diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java index e5237cc5abc39..f6192294e5701 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java @@ -13,25 +13,29 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class ChangeEventGeneratorUtils { - public static Urn getSchemaFieldUrn(@Nonnull String datasetUrnStr, @Nonnull String schemaFieldPath) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrnStr, schemaFieldPath)); + public static Urn getSchemaFieldUrn( + @Nonnull String datasetUrnStr, @Nonnull String schemaFieldPath) { + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrnStr, schemaFieldPath)); } public static Urn getSchemaFieldUrn(@Nonnull Urn datasetUrn, @Nonnull String schemaFieldPath) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrn.toString(), schemaFieldPath)); + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrn.toString(), schemaFieldPath)); } public static Urn getSchemaFieldUrn(@Nonnull Urn datasetUrn, @Nonnull SchemaField schemaField) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrn, getFieldPathV1(schemaField))); + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrn, getFieldPathV1(schemaField))); } public static String getFieldPathV1(@Nonnull SchemaField field) { - String[] v1PathTokens = Arrays.stream(field.getFieldPath().split("\\.")) - .filter(x -> !(x.startsWith("[") || x.endsWith("]"))) - .toArray(String[]::new); + String[] v1PathTokens = + Arrays.stream(field.getFieldPath().split("\\.")) + .filter(x -> !(x.startsWith("[") || x.endsWith("]"))) + .toArray(String[]::new); return String.join(".", v1PathTokens); } @@ -42,19 +46,22 @@ public static List<ChangeEvent> convertEntityTagChangeEvents( return entityTagChangeEvents.stream() .filter(entityTagChangeEvent -> entityTagChangeEvent instanceof TagChangeEvent) .map(entityTagChangeEvent -> (TagChangeEvent) entityTagChangeEvent) - .map(entityTagChangeEvent -> - SchemaFieldTagChangeEvent.schemaFieldTagChangeEventBuilder() - .modifier(entityTagChangeEvent.getModifier()) - .entityUrn(entityTagChangeEvent.getEntityUrn()) - .category(entityTagChangeEvent.getCategory()) - .operation(entityTagChangeEvent.getOperation()) - .semVerChange(entityTagChangeEvent.getSemVerChange()) - .description(entityTagChangeEvent.getDescription()) - .tagUrn(UrnUtils.getUrn((String) entityTagChangeEvent.getParameters().get("tagUrn"))) - .auditStamp(entityTagChangeEvent.getAuditStamp()) - .fieldPath(fieldPath) - .parentUrn(parentUrn) - .build()) + .map( + entityTagChangeEvent -> + SchemaFieldTagChangeEvent.schemaFieldTagChangeEventBuilder() + .modifier(entityTagChangeEvent.getModifier()) + .entityUrn(entityTagChangeEvent.getEntityUrn()) + .category(entityTagChangeEvent.getCategory()) + .operation(entityTagChangeEvent.getOperation()) + .semVerChange(entityTagChangeEvent.getSemVerChange()) + .description(entityTagChangeEvent.getDescription()) + .tagUrn( + UrnUtils.getUrn( + (String) entityTagChangeEvent.getParameters().get("tagUrn"))) + .auditStamp(entityTagChangeEvent.getAuditStamp()) + .fieldPath(fieldPath) + .parentUrn(parentUrn) + .build()) .collect(Collectors.toList()); } @@ -63,23 +70,30 @@ public static List<ChangeEvent> convertEntityGlossaryTermChangeEvents( @Nonnull Urn parentUrn, @Nonnull List<ChangeEvent> entityGlossaryTermChangeEvents) { return entityGlossaryTermChangeEvents.stream() - .filter(entityGlossaryTermChangeEvent -> entityGlossaryTermChangeEvent instanceof GlossaryTermChangeEvent) - .map(entityGlossaryTermChangeEvent -> (GlossaryTermChangeEvent) entityGlossaryTermChangeEvent) - .map(entityGlossaryTermChangeEvent -> - SchemaFieldGlossaryTermChangeEvent.schemaFieldGlossaryTermChangeEventBuilder() - .modifier(entityGlossaryTermChangeEvent.getModifier()) - .entityUrn(entityGlossaryTermChangeEvent.getEntityUrn()) - .category(entityGlossaryTermChangeEvent.getCategory()) - .operation(entityGlossaryTermChangeEvent.getOperation()) - .semVerChange(entityGlossaryTermChangeEvent.getSemVerChange()) - .description(entityGlossaryTermChangeEvent.getDescription()) - .termUrn(UrnUtils.getUrn((String) entityGlossaryTermChangeEvent.getParameters().get("termUrn"))) - .auditStamp(entityGlossaryTermChangeEvent.getAuditStamp()) - .fieldPath(fieldPath) - .parentUrn(parentUrn) - .build()) + .filter( + entityGlossaryTermChangeEvent -> + entityGlossaryTermChangeEvent instanceof GlossaryTermChangeEvent) + .map( + entityGlossaryTermChangeEvent -> + (GlossaryTermChangeEvent) entityGlossaryTermChangeEvent) + .map( + entityGlossaryTermChangeEvent -> + SchemaFieldGlossaryTermChangeEvent.schemaFieldGlossaryTermChangeEventBuilder() + .modifier(entityGlossaryTermChangeEvent.getModifier()) + .entityUrn(entityGlossaryTermChangeEvent.getEntityUrn()) + .category(entityGlossaryTermChangeEvent.getCategory()) + .operation(entityGlossaryTermChangeEvent.getOperation()) + .semVerChange(entityGlossaryTermChangeEvent.getSemVerChange()) + .description(entityGlossaryTermChangeEvent.getDescription()) + .termUrn( + UrnUtils.getUrn( + (String) entityGlossaryTermChangeEvent.getParameters().get("termUrn"))) + .auditStamp(entityGlossaryTermChangeEvent.getAuditStamp()) + .fieldPath(fieldPath) + .parentUrn(parentUrn) + .build()) .collect(Collectors.toList()); } - private ChangeEventGeneratorUtils() { } + private ChangeEventGeneratorUtils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java index a3e5a051a47e3..ca30060b5ed29 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.dataprocess.DataProcessInstanceRelationships; @@ -18,15 +20,13 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class DataProcessInstanceRunEventChangeEventGenerator extends EntityChangeEventGenerator<DataProcessInstanceRunEvent> { private static final String COMPLETED_STATUS = "COMPLETED"; private static final String STARTED_STATUS = "STARTED"; - public DataProcessInstanceRunEventChangeEventGenerator(@Nonnull final SystemEntityClient entityClient) { + public DataProcessInstanceRunEventChangeEventGenerator( + @Nonnull final SystemEntityClient entityClient) { super(entityClient); } @@ -50,15 +50,17 @@ private List<ChangeEvent> computeDiffs( final DataProcessRunStatus newStatus = getStatus(newAspect); if (newStatus != null && !newStatus.equals(previousStatus)) { - String operationType = newStatus.equals(DataProcessRunStatus.COMPLETE) ? COMPLETED_STATUS : STARTED_STATUS; - - return Collections.singletonList(ChangeEvent.builder() - .category(ChangeCategory.RUN) - .operation(ChangeOperation.valueOf(operationType)) - .auditStamp(auditStamp) - .entityUrn(entityUrn) - .parameters(buildParameters(newAspect, entityUrn)) - .build()); + String operationType = + newStatus.equals(DataProcessRunStatus.COMPLETE) ? COMPLETED_STATUS : STARTED_STATUS; + + return Collections.singletonList( + ChangeEvent.builder() + .category(ChangeCategory.RUN) + .operation(ChangeOperation.valueOf(operationType)) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .parameters(buildParameters(newAspect, entityUrn)) + .build()); } return Collections.emptyList(); @@ -70,8 +72,8 @@ private DataProcessRunStatus getStatus(DataProcessInstanceRunEvent dataProcessIn } @Nonnull - private Map<String, Object> buildParameters(@Nonnull final DataProcessInstanceRunEvent runEvent, - @Nonnull final String entityUrnString) { + private Map<String, Object> buildParameters( + @Nonnull final DataProcessInstanceRunEvent runEvent, @Nonnull final String entityUrnString) { final Map<String, Object> parameters = new HashMap<>(); if (runEvent.hasAttempt()) { parameters.put(ATTEMPT_KEY, runEvent.getAttempt()); @@ -106,8 +108,9 @@ private DataProcessInstanceRelationships getRelationships(@Nonnull final String EntityResponse entityResponse; try { entityUrn = Urn.createFromString(entityUrnString); - entityResponse = _entityClient.getV2(entityUrn, - Collections.singleton(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME)); + entityResponse = + _entityClient.getV2( + entityUrn, Collections.singleton(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME)); } catch (Exception e) { return null; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java index ddfa6530c6999..850df81675b8c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -17,46 +20,55 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; - - -public class DatasetPropertiesChangeEventGenerator extends EntityChangeEventGenerator<DatasetProperties> { - private static List<ChangeEvent> computeDiffs(DatasetProperties baseDatasetProperties, - @Nonnull DatasetProperties targetDatasetProperties, @Nonnull String entityUrn, AuditStamp auditStamp) { +public class DatasetPropertiesChangeEventGenerator + extends EntityChangeEventGenerator<DatasetProperties> { + private static List<ChangeEvent> computeDiffs( + DatasetProperties baseDatasetProperties, + @Nonnull DatasetProperties targetDatasetProperties, + @Nonnull String entityUrn, + AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; if (baseDescription == null && targetDescription != null) { // Description added - changeEvents.add(ChangeEvent.builder().entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) + .auditStamp(auditStamp) + .build()); } else if (baseDescription != null && targetDescription == null) { // Description removed. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { // Description has been modified. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .auditStamp(auditStamp) + .build()); } return changeEvents; } @@ -70,17 +82,23 @@ private static DatasetProperties getDatasetPropertiesFromAspect(EntityAspect ent } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME) || !currentValue.getAspect() - .equals(DATASET_PROPERTIES_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME) + || !currentValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + DATASET_PROPERTIES_ASPECT_NAME); } List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { DatasetProperties baseDatasetProperties = getDatasetPropertiesFromAspect(previousValue); DatasetProperties targetDatasetProperties = getDatasetPropertiesFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs( + baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java index 3ef6f51f99203..59516bfae0533 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java @@ -12,14 +12,16 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Differ responsible for determining whether an entity has been soft-deleted or soft-created. - */ +/** Differ responsible for determining whether an entity has been soft-deleted or soft-created. */ public class DeprecationChangeEventGenerator extends EntityChangeEventGenerator<Deprecation> { @Override - public List<ChangeEvent> getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect<Deprecation> from, @Nonnull Aspect<Deprecation> to, @Nonnull AuditStamp auditStamp) { + public List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<Deprecation> from, + @Nonnull Aspect<Deprecation> to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } @@ -31,19 +33,21 @@ private List<ChangeEvent> computeDiffs( // Ensure that it is the deprecation status which has actually been changed. - // If the entity was not previously deprecated, but is now deprecated, then return a deprecated event. + // If the entity was not previously deprecated, but is now deprecated, then return a deprecated + // event. if (!isDeprecated(baseDeprecation) && isDeprecated(targetDeprecation)) { return Collections.singletonList( ChangeEvent.builder() - .category(ChangeCategory.DEPRECATION) - .operation(ChangeOperation.MODIFY) - .entityUrn(entityUrn) - .auditStamp(auditStamp) - .parameters(ImmutableMap.of("status", "DEPRECATED")) - .build()); + .category(ChangeCategory.DEPRECATION) + .operation(ChangeOperation.MODIFY) + .entityUrn(entityUrn) + .auditStamp(auditStamp) + .parameters(ImmutableMap.of("status", "DEPRECATED")) + .build()); } - // If the entity was previously deprecated, but is not not deprecated, then return a un-deprecated event. + // If the entity was previously deprecated, but is not not deprecated, then return a + // un-deprecated event. if (isDeprecated(baseDeprecation) && !isDeprecated(targetDeprecation)) { return Collections.singletonList( ChangeEvent.builder() diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java index a10565a7c958b..1ffcd3cfc2ba4 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -16,30 +18,37 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class EditableDatasetPropertiesChangeEventGenerator extends EntityChangeEventGenerator<EditableDatasetProperties> { public static final String DESCRIPTION_ADDED = "Documentation for '%s' has been added: '%s'."; public static final String DESCRIPTION_REMOVED = "Documentation for '%s' has been removed: '%s'."; - public static final String DESCRIPTION_CHANGED = "Documentation of '%s' has been changed from '%s' to '%s'."; + public static final String DESCRIPTION_CHANGED = + "Documentation of '%s' has been changed from '%s' to '%s'."; - private static List<ChangeEvent> computeDiffs(EditableDatasetProperties baseDatasetProperties, - EditableDatasetProperties targetDatasetProperties, String entityUrn, AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + EditableDatasetProperties baseDatasetProperties, + EditableDatasetProperties targetDatasetProperties, + String entityUrn, + AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); ChangeEvent descriptionChangeEvent = - getDescriptionChangeEvent(baseDatasetProperties, targetDatasetProperties, entityUrn, auditStamp); + getDescriptionChangeEvent( + baseDatasetProperties, targetDatasetProperties, entityUrn, auditStamp); if (descriptionChangeEvent != null) { changeEvents.add(descriptionChangeEvent); } return changeEvents; } - private static ChangeEvent getDescriptionChangeEvent(EditableDatasetProperties baseDatasetProperties, - EditableDatasetProperties targetDatasetProperties, String entityUrn, AuditStamp auditStamp) { - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; + private static ChangeEvent getDescriptionChangeEvent( + EditableDatasetProperties baseDatasetProperties, + EditableDatasetProperties targetDatasetProperties, + String entityUrn, + AuditStamp auditStamp) { + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; if (baseDescription == null && targetDescription != null) { // Description added return ChangeEvent.builder() @@ -60,45 +69,59 @@ private static ChangeEvent getDescriptionChangeEvent(EditableDatasetProperties b .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) .auditStamp(auditStamp) .build(); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { // Description has been modified. return ChangeEvent.builder() .entityUrn(entityUrn) .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.MODIFY) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) .auditStamp(auditStamp) .build(); } return null; } - private static EditableDatasetProperties getEditableDatasetPropertiesFromAspect(EntityAspect entityAspect) { + private static EditableDatasetProperties getEditableDatasetPropertiesFromAspect( + EntityAspect entityAspect) { if (entityAspect != null && entityAspect.getMetadata() != null) { - return RecordUtils.toRecordTemplate(EditableDatasetProperties.class, entityAspect.getMetadata()); + return RecordUtils.toRecordTemplate( + EditableDatasetProperties.class, entityAspect.getMetadata()); } return null; } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME) || !currentValue.getAspect() - .equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)) { - throw new IllegalArgumentException("Aspect is not " + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); + if (!previousValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME) + || !currentValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)) { + throw new IllegalArgumentException( + "Aspect is not " + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); } List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { - EditableDatasetProperties baseDatasetProperties = getEditableDatasetPropertiesFromAspect(previousValue); - EditableDatasetProperties targetDatasetProperties = getEditableDatasetPropertiesFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); + EditableDatasetProperties baseDatasetProperties = + getEditableDatasetPropertiesFromAspect(previousValue); + EditableDatasetProperties targetDatasetProperties = + getEditableDatasetPropertiesFromAspect(currentValue); + changeEvents.addAll( + computeDiffs( + baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java index 4a1de4c3421ed..1f094bb6ca989 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -16,7 +19,6 @@ import com.linkedin.schema.EditableSchemaFieldInfo; import com.linkedin.schema.EditableSchemaFieldInfoArray; import com.linkedin.schema.EditableSchemaMetadata; - import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -27,11 +29,8 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; - - -public class EditableSchemaMetadataChangeEventGenerator extends EntityChangeEventGenerator<EditableSchemaMetadata> { +public class EditableSchemaMetadataChangeEventGenerator + extends EntityChangeEventGenerator<EditableSchemaMetadata> { public static final String FIELD_DOCUMENTATION_ADDED_FORMAT = "Documentation for the field '%s' of '%s' has been added: '%s'"; public static final String FIELD_DOCUMENTATION_REMOVED_FORMAT = @@ -42,45 +41,59 @@ public class EditableSchemaMetadataChangeEventGenerator extends EntityChangeEven Stream.of(ChangeCategory.DOCUMENTATION, ChangeCategory.TAG, ChangeCategory.GLOSSARY_TERM) .collect(Collectors.toSet()); - private static void sortEditableSchemaMetadataByFieldPath(EditableSchemaMetadata editableSchemaMetadata) { + private static void sortEditableSchemaMetadataByFieldPath( + EditableSchemaMetadata editableSchemaMetadata) { if (editableSchemaMetadata == null) { return; } List<EditableSchemaFieldInfo> editableSchemaFieldInfos = new ArrayList<>(editableSchemaMetadata.getEditableSchemaFieldInfo()); editableSchemaFieldInfos.sort(Comparator.comparing(EditableSchemaFieldInfo::getFieldPath)); - editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray(editableSchemaFieldInfos)); + editableSchemaMetadata.setEditableSchemaFieldInfo( + new EditableSchemaFieldInfoArray(editableSchemaFieldInfos)); } - private static List<ChangeEvent> getAllChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, String entityUrn, ChangeCategory changeCategory, + private static List<ChangeEvent> getAllChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + String entityUrn, + ChangeCategory changeCategory, AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); Urn datasetFieldUrn = getDatasetFieldUrn(baseFieldInfo, targetFieldInfo, entityUrn); if (changeCategory == ChangeCategory.DOCUMENTATION) { - ChangeEvent documentationChangeEvent = getDocumentationChangeEvent(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp); + ChangeEvent documentationChangeEvent = + getDocumentationChangeEvent(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp); if (documentationChangeEvent != null) { changeEvents.add(documentationChangeEvent); } } if (changeCategory == ChangeCategory.TAG) { - changeEvents.addAll(getTagChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); + changeEvents.addAll( + getTagChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); } if (changeCategory == ChangeCategory.GLOSSARY_TERM) { - changeEvents.addAll(getGlossaryTermChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); + changeEvents.addAll( + getGlossaryTermChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); } return changeEvents; } - private static List<ChangeEvent> computeDiffs(EditableSchemaMetadata baseEditableSchemaMetadata, - EditableSchemaMetadata targetEditableSchemaMetadata, String entityUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + EditableSchemaMetadata baseEditableSchemaMetadata, + EditableSchemaMetadata targetEditableSchemaMetadata, + String entityUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { sortEditableSchemaMetadataByFieldPath(baseEditableSchemaMetadata); sortEditableSchemaMetadataByFieldPath(targetEditableSchemaMetadata); List<ChangeEvent> changeEvents = new ArrayList<>(); EditableSchemaFieldInfoArray baseFieldInfos = - (baseEditableSchemaMetadata != null) ? baseEditableSchemaMetadata.getEditableSchemaFieldInfo() + (baseEditableSchemaMetadata != null) + ? baseEditableSchemaMetadata.getEditableSchemaFieldInfo() : new EditableSchemaFieldInfoArray(); - EditableSchemaFieldInfoArray targetFieldInfos = targetEditableSchemaMetadata.getEditableSchemaFieldInfo(); + EditableSchemaFieldInfoArray targetFieldInfos = + targetEditableSchemaMetadata.getEditableSchemaFieldInfo(); int baseIdx = 0; int targetIdx = 0; while (baseIdx < baseFieldInfos.size() && targetIdx < targetFieldInfos.size()) { @@ -88,16 +101,20 @@ private static List<ChangeEvent> computeDiffs(EditableSchemaMetadata baseEditabl EditableSchemaFieldInfo targetFieldInfo = targetFieldInfos.get(targetIdx); int comparison = baseFieldInfo.getFieldPath().compareTo(targetFieldInfo.getFieldPath()); if (comparison == 0) { - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents( + baseFieldInfo, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++baseIdx; ++targetIdx; } else if (comparison < 0) { // EditableFieldInfo got removed. - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); ++baseIdx; } else { // EditableFieldInfo got added. - changeEvents.addAll(getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++targetIdx; } } @@ -105,29 +122,36 @@ private static List<ChangeEvent> computeDiffs(EditableSchemaMetadata baseEditabl while (baseIdx < baseFieldInfos.size()) { // Handle removed baseFieldInfo EditableSchemaFieldInfo baseFieldInfo = baseFieldInfos.get(baseIdx); - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); ++baseIdx; } while (targetIdx < targetFieldInfos.size()) { // Handle newly added targetFieldInfo EditableSchemaFieldInfo targetFieldInfo = targetFieldInfos.get(targetIdx); - changeEvents.addAll(getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++targetIdx; } return changeEvents; } - private static EditableSchemaMetadata getEditableSchemaMetadataFromAspect(EntityAspect entityAspect) { + private static EditableSchemaMetadata getEditableSchemaMetadataFromAspect( + EntityAspect entityAspect) { if (entityAspect != null && entityAspect.getMetadata() != null) { return RecordUtils.toRecordTemplate(EditableSchemaMetadata.class, entityAspect.getMetadata()); } return null; } - private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { + private static ChangeEvent getDocumentationChangeEvent( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { String baseFieldDescription = (baseFieldInfo != null) ? baseFieldInfo.getDescription() : null; - String targetFieldDescription = (targetFieldInfo != null) ? targetFieldInfo.getDescription() : null; + String targetFieldDescription = + (targetFieldInfo != null) ? targetFieldInfo.getDescription() : null; if (baseFieldDescription == null && targetFieldDescription != null) { return ChangeEvent.builder() @@ -136,8 +160,12 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.ADD) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(FIELD_DOCUMENTATION_ADDED_FORMAT, targetFieldInfo.getFieldPath(), datasetFieldUrn, - targetFieldDescription)) + .description( + String.format( + FIELD_DOCUMENTATION_ADDED_FORMAT, + targetFieldInfo.getFieldPath(), + datasetFieldUrn, + targetFieldDescription)) .auditStamp(auditStamp) .build(); } @@ -149,23 +177,32 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.REMOVE) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(FIELD_DOCUMENTATION_REMOVED_FORMAT, + .description( + String.format( + FIELD_DOCUMENTATION_REMOVED_FORMAT, Optional.ofNullable(targetFieldInfo).map(EditableSchemaFieldInfo::getFieldPath), - datasetFieldUrn, baseFieldDescription)) + datasetFieldUrn, + baseFieldDescription)) .auditStamp(auditStamp) .build(); } - if (baseFieldDescription != null && targetFieldDescription != null && !baseFieldDescription.equals( - targetFieldDescription)) { + if (baseFieldDescription != null + && targetFieldDescription != null + && !baseFieldDescription.equals(targetFieldDescription)) { return ChangeEvent.builder() .modifier(targetFieldInfo.getFieldPath()) .entityUrn(datasetFieldUrn.toString()) .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.MODIFY) .semVerChange(SemanticChangeType.PATCH) - .description(String.format(FIELD_DOCUMENTATION_UPDATED_FORMAT, targetFieldInfo.getFieldPath(), datasetFieldUrn, - baseFieldDescription, targetFieldDescription)) + .description( + String.format( + FIELD_DOCUMENTATION_UPDATED_FORMAT, + targetFieldInfo.getFieldPath(), + datasetFieldUrn, + baseFieldDescription, + targetFieldDescription)) .auditStamp(auditStamp) .build(); } @@ -173,69 +210,86 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b return null; } - private static List<ChangeEvent> getGlossaryTermChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { - GlossaryTerms baseGlossaryTerms = (baseFieldInfo != null) ? baseFieldInfo.getGlossaryTerms() : null; - GlossaryTerms targetGlossaryTerms = (targetFieldInfo != null) ? targetFieldInfo.getGlossaryTerms() : null; + private static List<ChangeEvent> getGlossaryTermChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { + GlossaryTerms baseGlossaryTerms = + (baseFieldInfo != null) ? baseFieldInfo.getGlossaryTerms() : null; + GlossaryTerms targetGlossaryTerms = + (targetFieldInfo != null) ? targetFieldInfo.getGlossaryTerms() : null; // 1. Get EntityGlossaryTermChangeEvent, then rebind into a SchemaFieldGlossaryTermChangeEvent. List<ChangeEvent> entityGlossaryTermsChangeEvents = - GlossaryTermsChangeEventGenerator.computeDiffs(baseGlossaryTerms, targetGlossaryTerms, - datasetFieldUrn.toString(), auditStamp); + GlossaryTermsChangeEventGenerator.computeDiffs( + baseGlossaryTerms, targetGlossaryTerms, datasetFieldUrn.toString(), auditStamp); if (targetFieldInfo != null || baseFieldInfo != null) { - String fieldPath = targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); + String fieldPath = + targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); // 2. Convert EntityGlossaryTermChangeEvent into a SchemaFieldGlossaryTermChangeEvent. return convertEntityGlossaryTermChangeEvents( - fieldPath, - datasetFieldUrn, - entityGlossaryTermsChangeEvents); + fieldPath, datasetFieldUrn, entityGlossaryTermsChangeEvents); } return Collections.emptyList(); } - private static List<ChangeEvent> getTagChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { + private static List<ChangeEvent> getTagChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { GlobalTags baseGlobalTags = (baseFieldInfo != null) ? baseFieldInfo.getGlobalTags() : null; - GlobalTags targetGlobalTags = (targetFieldInfo != null) ? targetFieldInfo.getGlobalTags() : null; + GlobalTags targetGlobalTags = + (targetFieldInfo != null) ? targetFieldInfo.getGlobalTags() : null; // 1. Get EntityTagChangeEvent, then rebind into a SchemaFieldTagChangeEvent. List<ChangeEvent> entityTagChangeEvents = - GlobalTagsChangeEventGenerator.computeDiffs(baseGlobalTags, targetGlobalTags, datasetFieldUrn.toString(), - auditStamp); + GlobalTagsChangeEventGenerator.computeDiffs( + baseGlobalTags, targetGlobalTags, datasetFieldUrn.toString(), auditStamp); if (targetFieldInfo != null || baseFieldInfo != null) { - String fieldPath = targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); + String fieldPath = + targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); // 2. Convert EntityTagChangeEvent into a SchemaFieldTagChangeEvent. - return convertEntityTagChangeEvents( - fieldPath, - datasetFieldUrn, - entityTagChangeEvents); + return convertEntityTagChangeEvents(fieldPath, datasetFieldUrn, entityTagChangeEvents); } return Collections.emptyList(); } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME) || !currentValue.getAspect() - .equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME) + || !currentValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - EditableSchemaMetadata baseEditableSchemaMetadata = getEditableSchemaMetadataFromAspect(previousValue); - EditableSchemaMetadata targetEditableSchemaMetadata = getEditableSchemaMetadataFromAspect(currentValue); + EditableSchemaMetadata baseEditableSchemaMetadata = + getEditableSchemaMetadataFromAspect(previousValue); + EditableSchemaMetadata targetEditableSchemaMetadata = + getEditableSchemaMetadataFromAspect(currentValue); List<ChangeEvent> changeEvents = new ArrayList<>(); if (SUPPORTED_CATEGORIES.contains(element)) { changeEvents.addAll( - computeDiffs(baseEditableSchemaMetadata, targetEditableSchemaMetadata, currentValue.getUrn(), element, null)); + computeDiffs( + baseEditableSchemaMetadata, + targetEditableSchemaMetadata, + currentValue.getUrn(), + element, + null)); } // Assess the highest change at the transaction(schema) level. @@ -264,14 +318,37 @@ public List<ChangeEvent> getChangeEvents( @Nonnull Aspect<EditableSchemaMetadata> to, @Nonnull AuditStamp auditStamp) { final List<ChangeEvent> changeEvents = new ArrayList<>(); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.DOCUMENTATION, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TAG, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.GLOSSARY_TERM, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.DOCUMENTATION, + auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TAG, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.TECHNICAL_SCHEMA, + auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.GLOSSARY_TERM, + auditStamp)); return changeEvents; } - private static Urn getDatasetFieldUrn(final EditableSchemaFieldInfo previous, final EditableSchemaFieldInfo latest, String entityUrn) { + private static Urn getDatasetFieldUrn( + final EditableSchemaFieldInfo previous, + final EditableSchemaFieldInfo latest, + String entityUrn) { return previous != null ? getSchemaFieldUrn(UrnUtils.getUrn(entityUrn), previous.getFieldPath()) : getSchemaFieldUrn(UrnUtils.getUrn(entityUrn), latest.getFieldPath()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java index d5539ec3d3822..0c98eefe90ef2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java @@ -13,38 +13,44 @@ import java.util.List; import javax.annotation.Nonnull; - -/** - * An abstract class to generate {@link ChangeEvent}s for a given entity aspect. - */ +/** An abstract class to generate {@link ChangeEvent}s for a given entity aspect. */ public abstract class EntityChangeEventGenerator<T extends RecordTemplate> { // TODO: Add a check for supported aspects protected SystemEntityClient _entityClient; protected Authentication _authentication; - public EntityChangeEventGenerator() { - } + public EntityChangeEventGenerator() {} public EntityChangeEventGenerator(@Nonnull final SystemEntityClient entityClient) { _entityClient = entityClient; } @Deprecated - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { // TODO: Migrate away from using getSemanticDiff. throw new UnsupportedOperationException(); } /** - * TODO: Migrate callers of the above API to below. The recommendation is to move timeline response creation into - * 2-stage. First stage generate change events, second stage derive semantic meaning + filter those change events. + * TODO: Migrate callers of the above API to below. The recommendation is to move timeline + * response creation into 2-stage. First stage generate change events, second stage derive + * semantic meaning + filter those change events. * - * Returns all {@link ChangeEvent}s computed from a raw aspect change. + * <p>Returns all {@link ChangeEvent}s computed from a raw aspect change. * - * Note that the {@link ChangeEvent} list can contain multiple {@link ChangeCategory} inside of it, - * it is expected that the caller will filter the set of events as required. + * <p>Note that the {@link ChangeEvent} list can contain multiple {@link ChangeCategory} inside of + * it, it is expected that the caller will filter the set of events as required. */ - public abstract List<ChangeEvent> getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect<T> from, @Nonnull Aspect<T> to, @Nonnull AuditStamp auditStamp); + public abstract List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<T> from, + @Nonnull Aspect<T> to, + @Nonnull AuditStamp auditStamp); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java index 330be8560c72b..824dc10ab2732 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java @@ -4,20 +4,25 @@ import java.util.HashMap; import java.util.Map; - /** - * A cheap factory for generating EntityChangeEvents, keyed by entity-type, element-type, aspect-name + * A cheap factory for generating EntityChangeEvents, keyed by entity-type, element-type, + * aspect-name */ public class EntityChangeEventGeneratorFactory { - private final Map<String, EntityChangeEventGenerator> _entityChangeEventGeneratorMap = new HashMap<>(); + private final Map<String, EntityChangeEventGenerator> _entityChangeEventGeneratorMap = + new HashMap<>(); - public void addGenerator(String entityName, ChangeCategory elementName, String aspectName, + public void addGenerator( + String entityName, + ChangeCategory elementName, + String aspectName, EntityChangeEventGenerator differ) { _entityChangeEventGeneratorMap.put(entityName + elementName.name() + aspectName, differ); } - public EntityChangeEventGenerator getGenerator(String entityName, ChangeCategory category, String aspectName) { + public EntityChangeEventGenerator getGenerator( + String entityName, ChangeCategory category, String aspectName) { return _entityChangeEventGeneratorMap.get(entityName + category.name() + aspectName); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java index 84c4343dc63ee..3a94bedcbd0ce 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java @@ -7,18 +7,15 @@ import java.util.Set; import javax.annotation.Nonnull; - -/** - * A registry that maps an aspect name to one or more {@link EntityChangeEventGenerator}s. - */ +/** A registry that maps an aspect name to one or more {@link EntityChangeEventGenerator}s. */ public class EntityChangeEventGeneratorRegistry { - private final Map<String, Set<EntityChangeEventGenerator<?>>> entityChangeEventGenerators = new HashMap<>(); + private final Map<String, Set<EntityChangeEventGenerator<?>>> entityChangeEventGenerators = + new HashMap<>(); - /** - * Registers a new EntityChangeEventGenerator for the given aspect. - */ - public void register(@Nonnull final String aspectName, + /** Registers a new EntityChangeEventGenerator for the given aspect. */ + public void register( + @Nonnull final String aspectName, @Nonnull final EntityChangeEventGenerator<?> entityChangeEventGenerator) { Objects.requireNonNull(aspectName); Objects.requireNonNull(entityChangeEventGenerator); @@ -26,10 +23,9 @@ public void register(@Nonnull final String aspectName, entityChangeEventGenerators.get(aspectName).add(entityChangeEventGenerator); } - /** - * Registers a new Enity Change Generator, or null if one does not exist. - */ - public Set<EntityChangeEventGenerator<?>> getEntityChangeEventGenerators(@Nonnull final String aspectName) { + /** Registers a new Enity Change Generator, or null if one does not exist. */ + public Set<EntityChangeEventGenerator<?>> getEntityChangeEventGenerators( + @Nonnull final String aspectName) { final String key = Objects.requireNonNull(aspectName); return this.entityChangeEventGenerators.getOrDefault(key, new HashSet<>()); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java index 7055c95a73eff..c90e96d3860fb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java @@ -10,11 +10,12 @@ import java.util.List; import javax.annotation.Nonnull; - /** - * A general purpose differ which simply determines whether an entity has been created or hard deleted. + * A general purpose differ which simply determines whether an entity has been created or hard + * deleted. */ -public class EntityKeyChangeEventGenerator<K extends RecordTemplate> extends EntityChangeEventGenerator<K> { +public class EntityKeyChangeEventGenerator<K extends RecordTemplate> + extends EntityChangeEventGenerator<K> { @Override public List<ChangeEvent> getChangeEvents( @Nonnull Urn urn, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java index 460a0ae399a5f..ef40c5dc81a3c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,19 +21,20 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GlobalTagsChangeEventGenerator extends EntityChangeEventGenerator<GlobalTags> { private static final String TAG_ADDED_FORMAT = "Tag '%s' added to entity '%s'."; private static final String TAG_REMOVED_FORMAT = "Tag '%s' removed from entity '%s'."; - public static List<ChangeEvent> computeDiffs(GlobalTags baseGlobalTags, GlobalTags targetGlobalTags, String entityUrn, + public static List<ChangeEvent> computeDiffs( + GlobalTags baseGlobalTags, + GlobalTags targetGlobalTags, + String entityUrn, AuditStamp auditStamp) { sortGlobalTagsByTagUrn(baseGlobalTags); sortGlobalTagsByTagUrn(targetGlobalTags); List<ChangeEvent> changeEvents = new ArrayList<>(); - TagAssociationArray baseTags = (baseGlobalTags != null) ? baseGlobalTags.getTags() : new TagAssociationArray(); + TagAssociationArray baseTags = + (baseGlobalTags != null) ? baseGlobalTags.getTags() : new TagAssociationArray(); TagAssociationArray targetTags = (targetGlobalTags != null) ? targetGlobalTags.getTags() : new TagAssociationArray(); int baseTagIdx = 0; @@ -39,36 +42,46 @@ public static List<ChangeEvent> computeDiffs(GlobalTags baseGlobalTags, GlobalTa while (baseTagIdx < baseTags.size() && targetTagIdx < targetTags.size()) { TagAssociation baseTagAssociation = baseTags.get(baseTagIdx); TagAssociation targetTagAssociation = targetTags.get(targetTagIdx); - int comparison = baseTagAssociation.getTag().toString().compareTo(targetTagAssociation.getTag().toString()); + int comparison = + baseTagAssociation + .getTag() + .toString() + .compareTo(targetTagAssociation.getTag().toString()); if (comparison == 0) { // No change to this tag. ++baseTagIdx; ++targetTagIdx; } else if (comparison < 0) { // Tag got removed. - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(baseTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(baseTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(baseTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(baseTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++baseTagIdx; } else { // Tag got added. - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(targetTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(targetTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(targetTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(targetTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++targetTagIdx; } } @@ -76,31 +89,35 @@ public static List<ChangeEvent> computeDiffs(GlobalTags baseGlobalTags, GlobalTa while (baseTagIdx < baseTags.size()) { // Handle removed tags. TagAssociation baseTagAssociation = baseTags.get(baseTagIdx); - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(baseTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(baseTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(baseTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(baseTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++baseTagIdx; } while (targetTagIdx < targetTags.size()) { // Handle newly added tags. TagAssociation targetTagAssociation = targetTags.get(targetTagIdx); - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(targetTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(targetTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(targetTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(targetTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++targetTagIdx; } return changeEvents; @@ -123,10 +140,14 @@ private static GlobalTags getGlobalTagsFromAspect(EntityAspect entityAspect) { } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOBAL_TAGS_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + GLOBAL_TAGS_ASPECT_NAME); } @@ -134,7 +155,8 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec GlobalTags targetGlobalTags = getGlobalTagsFromAspect(currentValue); List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.TAG) { - changeEvents.addAll(computeDiffs(baseGlobalTags, targetGlobalTags, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseGlobalTags, targetGlobalTags, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java index f8b7794df531f..eb002a9a83cea 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -17,97 +20,111 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; - +public class GlossaryTermInfoChangeEventGenerator + extends EntityChangeEventGenerator<GlossaryTermInfo> { + private static List<ChangeEvent> computeDiffs( + GlossaryTermInfo baseDatasetProperties, + @Nonnull GlossaryTermInfo targetDatasetProperties, + @Nonnull String entityUrn, + AuditStamp auditStamp) { + List<ChangeEvent> changeEvents = new ArrayList<>(); + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDefinition() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDefinition() : null; -public class GlossaryTermInfoChangeEventGenerator extends EntityChangeEventGenerator<GlossaryTermInfo> { - private static List<ChangeEvent> computeDiffs(GlossaryTermInfo baseDatasetProperties, - @Nonnull GlossaryTermInfo targetDatasetProperties, @Nonnull String entityUrn, AuditStamp auditStamp) { - List<ChangeEvent> changeEvents = new ArrayList<>(); - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDefinition() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDefinition() : null; - - if (baseDescription == null && targetDescription != null) { - // Description added - changeEvents.add(ChangeEvent.builder().entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription == null) { - // Description removed. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { - // Description has been modified. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) - .auditStamp(auditStamp) - .build()); - } - return changeEvents; + if (baseDescription == null && targetDescription != null) { + // Description added + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null && targetDescription == null) { + // Description removed. + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { + // Description has been modified. + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .auditStamp(auditStamp) + .build()); } + return changeEvents; + } - @Nullable - private static GlossaryTermInfo getGlossaryTermInfoFromAspect(EntityAspect entityAspect) { - if (entityAspect != null && entityAspect.getMetadata() != null) { - return RecordUtils.toRecordTemplate(GlossaryTermInfo.class, entityAspect.getMetadata()); - } - return null; + @Nullable + private static GlossaryTermInfo getGlossaryTermInfoFromAspect(EntityAspect entityAspect) { + if (entityAspect != null && entityAspect.getMetadata() != null) { + return RecordUtils.toRecordTemplate(GlossaryTermInfo.class, entityAspect.getMetadata()); } + return null; + } - @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERM_INFO_ASPECT_NAME); - } - List<ChangeEvent> changeEvents = new ArrayList<>(); - if (element == ChangeCategory.DOCUMENTATION) { - GlossaryTermInfo baseGlossaryTermInfo = getGlossaryTermInfoFromAspect(previousValue); - GlossaryTermInfo targetGlossaryTermInfo = getGlossaryTermInfoFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseGlossaryTermInfo, targetGlossaryTermInfo, currentValue.getUrn(), null)); - } - - // Assess the highest change at the transaction(schema) level. - SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; - ChangeEvent highestChangeEvent = - changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); - if (highestChangeEvent != null) { - highestSemanticChange = highestChangeEvent.getSemVerChange(); - } - - return ChangeTransaction.builder() - .semVerChange(highestSemanticChange) - .changeEvents(changeEvents) - .timestamp(currentValue.getCreatedOn().getTime()) - .rawDiff(rawDiffsRequested ? rawDiff : null) - .actor(currentValue.getCreatedBy()) - .build(); + @Override + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERM_INFO_ASPECT_NAME); + } + List<ChangeEvent> changeEvents = new ArrayList<>(); + if (element == ChangeCategory.DOCUMENTATION) { + GlossaryTermInfo baseGlossaryTermInfo = getGlossaryTermInfoFromAspect(previousValue); + GlossaryTermInfo targetGlossaryTermInfo = getGlossaryTermInfoFromAspect(currentValue); + changeEvents.addAll( + computeDiffs(baseGlossaryTermInfo, targetGlossaryTermInfo, currentValue.getUrn(), null)); } - @Override - public List<ChangeEvent> getChangeEvents( - @Nonnull Urn urn, - @Nonnull String entity, - @Nonnull String aspect, - @Nonnull Aspect<GlossaryTermInfo> from, - @Nonnull Aspect<GlossaryTermInfo> to, - @Nonnull AuditStamp auditStamp) { - return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); + // Assess the highest change at the transaction(schema) level. + SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; + ChangeEvent highestChangeEvent = + changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); + if (highestChangeEvent != null) { + highestSemanticChange = highestChangeEvent.getSemVerChange(); } + + return ChangeTransaction.builder() + .semVerChange(highestSemanticChange) + .changeEvents(changeEvents) + .timestamp(currentValue.getCreatedOn().getTime()) + .rawDiff(rawDiffsRequested ? rawDiff : null) + .actor(currentValue.getCreatedBy()) + .build(); + } + + @Override + public List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<GlossaryTermInfo> from, + @Nonnull Aspect<GlossaryTermInfo> to, + @Nonnull AuditStamp auditStamp) { + return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java index 22b2033ec52bc..6e56a7e7bbeb1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,62 +21,78 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GlossaryTermsChangeEventGenerator extends EntityChangeEventGenerator<GlossaryTerms> { private static final String GLOSSARY_TERM_ADDED_FORMAT = "Term '%s' added to entity '%s'."; private static final String GLOSSARY_TERM_REMOVED_FORMAT = "Term '%s' removed from entity '%s'."; - public static List<ChangeEvent> computeDiffs(GlossaryTerms baseGlossaryTerms, GlossaryTerms targetGlossaryTerms, - String entityUrn, AuditStamp auditStamp) { + public static List<ChangeEvent> computeDiffs( + GlossaryTerms baseGlossaryTerms, + GlossaryTerms targetGlossaryTerms, + String entityUrn, + AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); sortGlossaryTermsByGlossaryTermUrn(baseGlossaryTerms); sortGlossaryTermsByGlossaryTermUrn(targetGlossaryTerms); GlossaryTermAssociationArray baseTerms = - (baseGlossaryTerms != null) ? baseGlossaryTerms.getTerms() : new GlossaryTermAssociationArray(); + (baseGlossaryTerms != null) + ? baseGlossaryTerms.getTerms() + : new GlossaryTermAssociationArray(); GlossaryTermAssociationArray targetTerms = - (targetGlossaryTerms != null) ? targetGlossaryTerms.getTerms() : new GlossaryTermAssociationArray(); + (targetGlossaryTerms != null) + ? targetGlossaryTerms.getTerms() + : new GlossaryTermAssociationArray(); int baseGlossaryTermIdx = 0; int targetGlossaryTermIdx = 0; while (baseGlossaryTermIdx < baseTerms.size() && targetGlossaryTermIdx < targetTerms.size()) { GlossaryTermAssociation baseGlossaryTermAssociation = baseTerms.get(baseGlossaryTermIdx); - GlossaryTermAssociation targetGlossaryTermAssociation = targetTerms.get(targetGlossaryTermIdx); + GlossaryTermAssociation targetGlossaryTermAssociation = + targetTerms.get(targetGlossaryTermIdx); int comparison = - baseGlossaryTermAssociation.getUrn().toString().compareTo(targetGlossaryTermAssociation.getUrn().toString()); + baseGlossaryTermAssociation + .getUrn() + .toString() + .compareTo(targetGlossaryTermAssociation.getUrn().toString()); if (comparison == 0) { ++baseGlossaryTermIdx; ++targetGlossaryTermIdx; } else if (comparison < 0) { // GlossaryTerm got removed. - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(baseGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_REMOVED_FORMAT, baseGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(baseGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(baseGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_REMOVED_FORMAT, + baseGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(baseGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++baseGlossaryTermIdx; } else { // GlossaryTerm got added. - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(targetGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_ADDED_FORMAT, targetGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(targetGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(targetGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_ADDED_FORMAT, + targetGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(targetGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++targetGlossaryTermIdx; } } @@ -82,33 +100,42 @@ public static List<ChangeEvent> computeDiffs(GlossaryTerms baseGlossaryTerms, Gl while (baseGlossaryTermIdx < baseTerms.size()) { // Handle removed glossary terms. GlossaryTermAssociation baseGlossaryTermAssociation = baseTerms.get(baseGlossaryTermIdx); - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(baseGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_REMOVED_FORMAT, baseGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(baseGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(baseGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_REMOVED_FORMAT, + baseGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(baseGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++baseGlossaryTermIdx; } while (targetGlossaryTermIdx < targetTerms.size()) { // Handle newly added glossary terms. - GlossaryTermAssociation targetGlossaryTermAssociation = targetTerms.get(targetGlossaryTermIdx); - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(targetGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_ADDED_FORMAT, targetGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(targetGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + GlossaryTermAssociation targetGlossaryTermAssociation = + targetTerms.get(targetGlossaryTermIdx); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(targetGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_ADDED_FORMAT, + targetGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(targetGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++targetGlossaryTermIdx; } return changeEvents; @@ -119,7 +146,8 @@ private static void sortGlossaryTermsByGlossaryTermUrn(GlossaryTerms globalGloss return; } List<GlossaryTermAssociation> glossaryTerms = new ArrayList<>(globalGlossaryTerms.getTerms()); - glossaryTerms.sort(Comparator.comparing(GlossaryTermAssociation::getUrn, Comparator.comparing(Urn::toString))); + glossaryTerms.sort( + Comparator.comparing(GlossaryTermAssociation::getUrn, Comparator.comparing(Urn::toString))); globalGlossaryTerms.setTerms(new GlossaryTermAssociationArray(glossaryTerms)); } @@ -131,15 +159,19 @@ private static GlossaryTerms getGlossaryTermsFromAspect(EntityAspect entityAspec } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOSSARY_TERMS_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERMS_ASPECT_NAME); } @@ -147,7 +179,8 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec GlossaryTerms targetGlossaryTerms = getGlossaryTermsFromAspect(currentValue); List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.GLOSSARY_TERM) { - changeEvents.addAll(computeDiffs(baseGlossaryTerms, targetGlossaryTerms, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseGlossaryTerms, targetGlossaryTerms, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java index a23d76e47755c..bf3ff3293d2a2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,10 +21,8 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class InstitutionalMemoryChangeEventGenerator extends EntityChangeEventGenerator<InstitutionalMemory> { +public class InstitutionalMemoryChangeEventGenerator + extends EntityChangeEventGenerator<InstitutionalMemory> { private static final String INSTITUTIONAL_MEMORY_ADDED_FORMAT = "Institutional Memory '%s' with documentation of '%s' has been added: '%s'"; @@ -31,17 +31,22 @@ public class InstitutionalMemoryChangeEventGenerator extends EntityChangeEventGe private static final String INSTITUTIONAL_MEMORY_MODIFIED_FORMAT = "Documentation of Institutional Memory '%s' of '%s' has been changed from '%s' to '%s'."; - private static List<ChangeEvent> computeDiffs(InstitutionalMemory baseInstitutionalMemory, - InstitutionalMemory targetInstitutionalMemory, String entityUrn, AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + InstitutionalMemory baseInstitutionalMemory, + InstitutionalMemory targetInstitutionalMemory, + String entityUrn, + AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); sortElementsByUrl(baseInstitutionalMemory); sortElementsByUrl(targetInstitutionalMemory); InstitutionalMemoryMetadataArray baseElements = - (baseInstitutionalMemory != null) ? baseInstitutionalMemory.getElements() + (baseInstitutionalMemory != null) + ? baseInstitutionalMemory.getElements() : new InstitutionalMemoryMetadataArray(); InstitutionalMemoryMetadataArray targetElements = - (targetInstitutionalMemory != null) ? targetInstitutionalMemory.getElements() + (targetInstitutionalMemory != null) + ? targetInstitutionalMemory.getElements() : new InstitutionalMemoryMetadataArray(); int baseIdx = 0; @@ -53,46 +58,60 @@ private static List<ChangeEvent> computeDiffs(InstitutionalMemory baseInstitutio if (comparison == 0) { if (!baseElement.getDescription().equals(targetElement.getDescription())) { // InstitutionalMemory description has changed. - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.PATCH) - .description(String.format(INSTITUTIONAL_MEMORY_MODIFIED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription(), targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.PATCH) + .description( + String.format( + INSTITUTIONAL_MEMORY_MODIFIED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription(), + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); } ++baseIdx; ++targetIdx; } else if (comparison < 0) { // InstitutionalMemory got removed. - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_REMOVED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_REMOVED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++baseIdx; } else { // InstitutionalMemory got added.. - changeEvents.add(ChangeEvent.builder() - .modifier(targetElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_ADDED_FORMAT, targetElement.getUrl(), entityUrn, - targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(targetElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_ADDED_FORMAT, + targetElement.getUrl(), + entityUrn, + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++targetIdx; } } @@ -100,34 +119,42 @@ private static List<ChangeEvent> computeDiffs(InstitutionalMemory baseInstitutio while (baseIdx < baseElements.size()) { // InstitutionalMemory got removed. InstitutionalMemoryMetadata baseElement = baseElements.get(baseIdx); - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_REMOVED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_REMOVED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++baseIdx; } while (targetIdx < targetElements.size()) { // Newly added owners. InstitutionalMemoryMetadata targetElement = targetElements.get(targetIdx); // InstitutionalMemory got added.. - changeEvents.add(ChangeEvent.builder() - .modifier(targetElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_ADDED_FORMAT, targetElement.getUrl(), entityUrn, - targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(targetElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_ADDED_FORMAT, + targetElement.getUrl(), + entityUrn, + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++targetIdx; } return changeEvents; @@ -145,20 +172,26 @@ private static void sortElementsByUrl(InstitutionalMemory institutionalMemory) { return; } List<InstitutionalMemoryMetadata> elements = new ArrayList<>(institutionalMemory.getElements()); - elements.sort(Comparator.comparing(InstitutionalMemoryMetadata::getUrl, Comparator.comparing(Url::toString))); + elements.sort( + Comparator.comparing( + InstitutionalMemoryMetadata::getUrl, Comparator.comparing(Url::toString))); institutionalMemory.setElements(new InstitutionalMemoryMetadataArray(elements)); } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME) || !currentValue.getAspect() - .equals(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME) + || !currentValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + INSTITUTIONAL_MEMORY_ASPECT_NAME); } @@ -166,7 +199,9 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec InstitutionalMemory targetInstitutionalMemory = getInstitutionalMemoryFromAspect(currentValue); List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { - changeEvents.addAll(computeDiffs(baseInstitutionalMemory, targetInstitutionalMemory, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs( + baseInstitutionalMemory, targetInstitutionalMemory, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java index f5697aea25b9a..b32958508cf24 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,23 +21,21 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipChangeEventGenerator extends EntityChangeEventGenerator<Ownership> { private static final String OWNER_ADDED_FORMAT = "'%s' added as a `%s` of '%s'."; private static final String OWNER_REMOVED_FORMAT = "'%s' removed as a `%s` of '%s'."; private static final String OWNERSHIP_TYPE_CHANGE_FORMAT = "'%s''s ownership type changed from '%s' to '%s' for '%s'."; - private static List<ChangeEvent> computeDiffs(Ownership baseOwnership, Ownership targetOwnership, String entityUrn, - AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + Ownership baseOwnership, Ownership targetOwnership, String entityUrn, AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); sortOwnersByUrn(baseOwnership); sortOwnersByUrn(targetOwnership); OwnerArray baseOwners = (baseOwnership != null) ? baseOwnership.getOwners() : new OwnerArray(); - OwnerArray targetOwners = (targetOwnership != null) ? targetOwnership.getOwners() : new OwnerArray(); + OwnerArray targetOwners = + (targetOwnership != null) ? targetOwnership.getOwners() : new OwnerArray(); int baseOwnerIdx = 0; int targetOwnerIdx = 0; @@ -46,49 +46,66 @@ private static List<ChangeEvent> computeDiffs(Ownership baseOwnership, Ownership if (comparison == 0) { if (!baseOwner.getType().equals(targetOwner.getType())) { // Ownership type has changed. - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getType().name()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.PATCH) - .description( - String.format(OWNERSHIP_TYPE_CHANGE_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), - targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getType().name()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.PATCH) + .description( + String.format( + OWNERSHIP_TYPE_CHANGE_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); } ++baseOwnerIdx; ++targetOwnerIdx; } else if (comparison < 0) { // Owner got removed - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(baseOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_REMOVED_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), entityUrn)) - .ownerUrn(baseOwner.getOwner()) - .ownerType(baseOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(baseOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_REMOVED_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + entityUrn)) + .ownerUrn(baseOwner.getOwner()) + .ownerType(baseOwner.getType()) + .auditStamp(auditStamp) + .build()); ++baseOwnerIdx; } else { // Owner got added. - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_ADDED_FORMAT, targetOwner.getOwner().getId(), targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_ADDED_FORMAT, + targetOwner.getOwner().getId(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); ++targetOwnerIdx; } } @@ -96,33 +113,45 @@ private static List<ChangeEvent> computeDiffs(Ownership baseOwnership, Ownership while (baseOwnerIdx < baseOwners.size()) { // Handle removed owners. Owner baseOwner = baseOwners.get(baseOwnerIdx); - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(baseOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_REMOVED_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), entityUrn)) - .ownerUrn(baseOwner.getOwner()) - .ownerType(baseOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(baseOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_REMOVED_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + entityUrn)) + .ownerUrn(baseOwner.getOwner()) + .ownerType(baseOwner.getType()) + .auditStamp(auditStamp) + .build()); ++baseOwnerIdx; } while (targetOwnerIdx < targetOwners.size()) { // Newly added owners. Owner targetOwner = targetOwners.get(targetOwnerIdx); - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_ADDED_FORMAT, targetOwner.getOwner().getId(), targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_ADDED_FORMAT, + targetOwner.getOwner().getId(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); ++targetOwnerIdx; } return changeEvents; @@ -145,15 +174,19 @@ private static void sortOwnersByUrn(Ownership ownership) { } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(OWNERSHIP_ASPECT_NAME) || !currentValue.getAspect() - .equals(OWNERSHIP_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(OWNERSHIP_ASPECT_NAME) + || !currentValue.getAspect().equals(OWNERSHIP_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + OWNERSHIP_ASPECT_NAME); } @@ -162,11 +195,13 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.OWNER) { - changeEvents.addAll(computeDiffs(baseOwnership, targetOwnership, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseOwnership, targetOwnership, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. - // Why isn't this done at changeevent level - what if transaction contains multiple category events? + // Why isn't this done at changeevent level - what if transaction contains multiple category + // events? SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; ChangeEvent highestChangeEvent = changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java index 2e0a8586cba60..1fd5d6e2c0f7a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -29,14 +31,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; -import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; - - @Slf4j public class SchemaMetadataChangeEventGenerator extends EntityChangeEventGenerator<SchemaMetadata> { private static final String SCHEMA_METADATA_ASPECT_NAME = "schemaMetadata"; - private static final String BACKWARDS_INCOMPATIBLE_DESC = "A backwards incompatible change due to"; - private static final String BACK_AND_FORWARD_COMPATIBLE_DESC = "A forwards & backwards compatible change due to "; + private static final String BACKWARDS_INCOMPATIBLE_DESC = + "A backwards incompatible change due to"; + private static final String BACK_AND_FORWARD_COMPATIBLE_DESC = + "A forwards & backwards compatible change due to "; private static final String FIELD_DESCRIPTION_ADDED_FORMAT = "The description '%s' for the field '%s' has been added."; private static final String FIELD_DESCRIPTION_REMOVED_FORMAT = @@ -44,8 +45,11 @@ public class SchemaMetadataChangeEventGenerator extends EntityChangeEventGenerat private static final String FIELD_DESCRIPTION_MODIFIED_FORMAT = "The description for the field '%s' has been changed from '%s' to '%s'."; - private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, @Nullable SchemaField targetField, - String datasetFieldUrn, AuditStamp auditStamp) { + private static ChangeEvent getDescriptionChange( + @Nullable SchemaField baseField, + @Nullable SchemaField targetField, + String datasetFieldUrn, + AuditStamp auditStamp) { String baseDescription = (baseField != null) ? baseField.getDescription() : null; String targetDescription = (targetField != null) ? targetField.getDescription() : null; if (baseDescription == null && targetDescription != null) { @@ -55,7 +59,9 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.MINOR) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_ADDED_FORMAT, targetDescription, targetField.getFieldPath())) + .description( + String.format( + FIELD_DESCRIPTION_ADDED_FORMAT, targetDescription, targetField.getFieldPath())) .auditStamp(auditStamp) .build(); } @@ -66,7 +72,9 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.MINOR) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_REMOVED_FORMAT, baseDescription, baseField.getFieldPath())) + .description( + String.format( + FIELD_DESCRIPTION_REMOVED_FORMAT, baseDescription, baseField.getFieldPath())) .auditStamp(auditStamp) .build(); } @@ -77,26 +85,36 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.PATCH) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_MODIFIED_FORMAT, baseField.getFieldPath(), baseDescription, - targetDescription)) + .description( + String.format( + FIELD_DESCRIPTION_MODIFIED_FORMAT, + baseField.getFieldPath(), + baseDescription, + targetDescription)) .auditStamp(auditStamp) .build(); } return null; } - private static List<ChangeEvent> getGlobalTagChangeEvents(SchemaField baseField, SchemaField targetField, + private static List<ChangeEvent> getGlobalTagChangeEvents( + SchemaField baseField, + SchemaField targetField, String parentUrnStr, String datasetFieldUrn, AuditStamp auditStamp) { // 1. Get EntityTagChangeEvent, then rebind into a SchemaFieldTagChangeEvent. List<ChangeEvent> entityTagChangeEvents = - GlobalTagsChangeEventGenerator.computeDiffs(baseField != null ? baseField.getGlobalTags() : null, - targetField != null ? targetField.getGlobalTags() : null, datasetFieldUrn, auditStamp); + GlobalTagsChangeEventGenerator.computeDiffs( + baseField != null ? baseField.getGlobalTags() : null, + targetField != null ? targetField.getGlobalTags() : null, + datasetFieldUrn, + auditStamp); if (baseField != null || targetField != null) { - String fieldPath = targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); + String fieldPath = + targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); // 2. Convert EntityTagChangeEvent into a SchemaFieldTagChangeEvent. final Urn parentUrn; try { @@ -106,27 +124,30 @@ private static List<ChangeEvent> getGlobalTagChangeEvents(SchemaField baseField, return Collections.emptyList(); } - return convertEntityTagChangeEvents( - fieldPath, - parentUrn, - entityTagChangeEvents); + return convertEntityTagChangeEvents(fieldPath, parentUrn, entityTagChangeEvents); } return Collections.emptyList(); } - private static List<ChangeEvent> getGlossaryTermsChangeEvents(SchemaField baseField, SchemaField targetField, + private static List<ChangeEvent> getGlossaryTermsChangeEvents( + SchemaField baseField, + SchemaField targetField, String parentUrnStr, String datasetFieldUrn, AuditStamp auditStamp) { // 1. Get EntityGlossaryTermChangeEvent, then rebind into a SchemaFieldGlossaryTermChangeEvent. List<ChangeEvent> entityGlossaryTermsChangeEvents = - GlossaryTermsChangeEventGenerator.computeDiffs(baseField != null ? baseField.getGlossaryTerms() : null, - targetField != null ? targetField.getGlossaryTerms() : null, datasetFieldUrn, auditStamp); + GlossaryTermsChangeEventGenerator.computeDiffs( + baseField != null ? baseField.getGlossaryTerms() : null, + targetField != null ? targetField.getGlossaryTerms() : null, + datasetFieldUrn, + auditStamp); if (targetField != null || baseField != null) { - String fieldPath = targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); + String fieldPath = + targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); // 2. Convert EntityGlossaryTermChangeEvent into a SchemaFieldGlossaryTermChangeEvent. final Urn parentUrn; try { @@ -137,16 +158,18 @@ private static List<ChangeEvent> getGlossaryTermsChangeEvents(SchemaField baseFi } return convertEntityGlossaryTermChangeEvents( - fieldPath, - parentUrn, - entityGlossaryTermsChangeEvents); + fieldPath, parentUrn, entityGlossaryTermsChangeEvents); } return Collections.emptyList(); } - private static List<ChangeEvent> getFieldPropertyChangeEvents(SchemaField baseField, SchemaField targetField, - Urn datasetUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List<ChangeEvent> getFieldPropertyChangeEvents( + SchemaField baseField, + SchemaField targetField, + Urn datasetUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { List<ChangeEvent> propChangeEvents = new ArrayList<>(); String datasetFieldUrn; if (targetField != null) { @@ -157,7 +180,8 @@ private static List<ChangeEvent> getFieldPropertyChangeEvents(SchemaField baseFi // Description Change. if (ChangeCategory.DOCUMENTATION.equals(changeCategory)) { - ChangeEvent descriptionChangeEvent = getDescriptionChange(baseField, targetField, datasetFieldUrn, auditStamp); + ChangeEvent descriptionChangeEvent = + getDescriptionChange(baseField, targetField, datasetFieldUrn, auditStamp); if (descriptionChangeEvent != null) { propChangeEvents.add(descriptionChangeEvent); } @@ -165,20 +189,28 @@ private static List<ChangeEvent> getFieldPropertyChangeEvents(SchemaField baseFi // Global Tags if (ChangeCategory.TAG.equals(changeCategory)) { - propChangeEvents.addAll(getGlobalTagChangeEvents(baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); + propChangeEvents.addAll( + getGlobalTagChangeEvents( + baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); } // Glossary terms. if (ChangeCategory.GLOSSARY_TERM.equals(changeCategory)) { - propChangeEvents.addAll(getGlossaryTermsChangeEvents(baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); + propChangeEvents.addAll( + getGlossaryTermsChangeEvents( + baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); } return propChangeEvents; } // TODO: This could use some cleanup, lots of repeated logic and tenuous conditionals - private static List<ChangeEvent> computeDiffs(SchemaMetadata baseSchema, SchemaMetadata targetSchema, - Urn datasetUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + SchemaMetadata baseSchema, + SchemaMetadata targetSchema, + Urn datasetUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { // Sort the fields by their field path. if (baseSchema != null) { sortFieldsByPath(baseSchema); @@ -188,8 +220,10 @@ private static List<ChangeEvent> computeDiffs(SchemaMetadata baseSchema, SchemaM } // Performs ordinal based diff, primarily based on fixed field ordinals and their types. - SchemaFieldArray baseFields = (baseSchema != null ? baseSchema.getFields() : new SchemaFieldArray()); - SchemaFieldArray targetFields = targetSchema != null ? targetSchema.getFields() : new SchemaFieldArray(); + SchemaFieldArray baseFields = + (baseSchema != null ? baseSchema.getFields() : new SchemaFieldArray()); + SchemaFieldArray targetFields = + targetSchema != null ? targetSchema.getFields() : new SchemaFieldArray(); int baseFieldIdx = 0; int targetFieldIdx = 0; List<ChangeEvent> changeEvents = new ArrayList<>(); @@ -197,7 +231,7 @@ private static List<ChangeEvent> computeDiffs(SchemaMetadata baseSchema, SchemaM while (baseFieldIdx < baseFields.size() && targetFieldIdx < targetFields.size()) { SchemaField curBaseField = baseFields.get(baseFieldIdx); SchemaField curTargetField = targetFields.get(targetFieldIdx); - //TODO: Re-evaluate ordinal processing? + // TODO: Re-evaluate ordinal processing? int comparison = curBaseField.getFieldPath().compareTo(curTargetField.getFieldPath()); if (renamedFields.contains(curBaseField)) { baseFieldIdx++; @@ -208,61 +242,78 @@ private static List<ChangeEvent> computeDiffs(SchemaMetadata baseSchema, SchemaM if (!curBaseField.getNativeDataType().equals(curTargetField.getNativeDataType())) { // Non-backward compatible change + Major version bump if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(String.format("%s native datatype of the field '%s' changed from '%s' to '%s'.", - BACKWARDS_INCOMPATIBLE_DESC, getFieldPathV1(curTargetField), curBaseField.getNativeDataType(), - curTargetField.getNativeDataType())) - .fieldPath(curBaseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) - .nullable(curBaseField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + String.format( + "%s native datatype of the field '%s' changed from '%s' to '%s'.", + BACKWARDS_INCOMPATIBLE_DESC, + getFieldPathV1(curTargetField), + curBaseField.getNativeDataType(), + curTargetField.getNativeDataType())) + .fieldPath(curBaseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) + .nullable(curBaseField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; ++targetFieldIdx; } List<ChangeEvent> propChangeEvents = - getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; ++targetFieldIdx; } else if (comparison < 0) { - // Base Field was removed or was renamed. Non-backward compatible change + Major version bump + // Base Field was removed or was renamed. Non-backward compatible change + Major version + // bump // Check for rename, if rename coincides with other modifications we assume drop/add. - // Assumes that two different fields on the same schema would not have the same description, terms, + // Assumes that two different fields on the same schema would not have the same description, + // terms, // or tags and share the same type - SchemaField renamedField = findRenamedField(curBaseField, - targetFields.subList(targetFieldIdx, targetFields.size()), renamedFields); + SchemaField renamedField = + findRenamedField( + curBaseField, + targetFields.subList(targetFieldIdx, targetFields.size()), + renamedFields); if (renamedField == null) { processRemoval(changeCategory, changeEvents, datasetUrn, curBaseField, auditStamp); ++baseFieldIdx; } else { changeEvents.add(generateRenameEvent(datasetUrn, curBaseField, renamedField, auditStamp)); - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; renamedFields.add(renamedField); } } else { - // The targetField got added or a renaming occurred. Forward & backwards compatible change + minor version bump. - SchemaField renamedField = findRenamedField(curTargetField, - baseFields.subList(baseFieldIdx, baseFields.size()), renamedFields); + // The targetField got added or a renaming occurred. Forward & backwards compatible change + + // minor version bump. + SchemaField renamedField = + findRenamedField( + curTargetField, baseFields.subList(baseFieldIdx, baseFields.size()), renamedFields); if (renamedField == null) { processAdd(changeCategory, changeEvents, datasetUrn, curTargetField, auditStamp); ++targetFieldIdx; } else { - changeEvents.add(generateRenameEvent(datasetUrn, renamedField, curTargetField, auditStamp)); - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + changeEvents.add( + generateRenameEvent(datasetUrn, renamedField, curTargetField, auditStamp)); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++targetFieldIdx; renamedFields.add(renamedField); @@ -287,7 +338,8 @@ BACKWARDS_INCOMPATIBLE_DESC, getFieldPathV1(curTargetField), curBaseField.getNat } // Handle primary key constraint change events. - List<ChangeEvent> primaryKeyChangeEvents = getPrimaryKeyChangeEvents(baseSchema, targetSchema, datasetUrn, auditStamp); + List<ChangeEvent> primaryKeyChangeEvents = + getPrimaryKeyChangeEvents(baseSchema, targetSchema, datasetUrn, auditStamp); changeEvents.addAll(primaryKeyChangeEvents); // Handle foreign key constraint change events. @@ -306,16 +358,19 @@ private static void sortFieldsByPath(SchemaMetadata schemaMetadata) { schemaMetadata.setFields(new SchemaFieldArray(schemaFields)); } - private static SchemaField findRenamedField(SchemaField curField, List<SchemaField> targetFields, Set<SchemaField> renamedFields) { + private static SchemaField findRenamedField( + SchemaField curField, List<SchemaField> targetFields, Set<SchemaField> renamedFields) { return targetFields.stream() .filter(schemaField -> isRenamed(curField, schemaField)) .filter(field -> !renamedFields.contains(field)) - .findFirst().orElse(null); + .findFirst() + .orElse(null); } private static boolean isRenamed(SchemaField curField, SchemaField schemaField) { return curField.getNativeDataType().equals(schemaField.getNativeDataType()) - && parentFieldsMatch(curField, schemaField) && descriptionsMatch(curField, schemaField); + && parentFieldsMatch(curField, schemaField) + && descriptionsMatch(curField, schemaField); } private static boolean parentFieldsMatch(SchemaField curField, SchemaField schemaField) { @@ -324,73 +379,98 @@ private static boolean parentFieldsMatch(SchemaField curField, SchemaField schem if (curFieldIndex > 0 && schemaFieldIndex > 0) { String curFieldParentPath = curField.getFieldPath().substring(0, curFieldIndex); String schemaFieldParentPath = schemaField.getFieldPath().substring(0, schemaFieldIndex); - return StringUtils.isNotBlank(curFieldParentPath) && curFieldParentPath.equals(schemaFieldParentPath); + return StringUtils.isNotBlank(curFieldParentPath) + && curFieldParentPath.equals(schemaFieldParentPath); } // No parent field return curFieldIndex < 0 && schemaFieldIndex < 0; } private static boolean descriptionsMatch(SchemaField curField, SchemaField schemaField) { - return StringUtils.isNotBlank(curField.getDescription()) && curField.getDescription().equals(schemaField.getDescription()); + return StringUtils.isNotBlank(curField.getDescription()) + && curField.getDescription().equals(schemaField.getDescription()); } - private static void processRemoval(ChangeCategory changeCategory, List<ChangeEvent> changeEvents, Urn datasetUrn, - SchemaField baseField, AuditStamp auditStamp) { + private static void processRemoval( + ChangeCategory changeCategory, + List<ChangeEvent> changeEvents, + Urn datasetUrn, + SchemaField baseField, + AuditStamp auditStamp) { if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .modifier(getSchemaFieldUrn(datasetUrn, baseField).toString()) - .entityUrn(datasetUrn.toString()) - .category(ChangeCategory.TECHNICAL_SCHEMA) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " removal of field: '" + getFieldPathV1(baseField) + "'.") - .fieldPath(baseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, baseField)) - .nullable(baseField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .modifier(getSchemaFieldUrn(datasetUrn, baseField).toString()) + .entityUrn(datasetUrn.toString()) + .category(ChangeCategory.TECHNICAL_SCHEMA) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " removal of field: '" + + getFieldPathV1(baseField) + + "'.") + .fieldPath(baseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, baseField)) + .nullable(baseField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(baseField, null, datasetUrn, - changeCategory, auditStamp); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents(baseField, null, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); } - private static void processAdd(ChangeCategory changeCategory, List<ChangeEvent> changeEvents, Urn datasetUrn, - SchemaField targetField, AuditStamp auditStamp) { + private static void processAdd( + ChangeCategory changeCategory, + List<ChangeEvent> changeEvents, + Urn datasetUrn, + SchemaField targetField, + AuditStamp auditStamp) { if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .modifier(getSchemaFieldUrn(datasetUrn, targetField).toString()) - .entityUrn(datasetUrn.toString()) - .category(ChangeCategory.TECHNICAL_SCHEMA) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(BACK_AND_FORWARD_COMPATIBLE_DESC + "the newly added field '" + getFieldPathV1(targetField) + "'.") - .fieldPath(targetField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, targetField)) - .nullable(targetField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .modifier(getSchemaFieldUrn(datasetUrn, targetField).toString()) + .entityUrn(datasetUrn.toString()) + .category(ChangeCategory.TECHNICAL_SCHEMA) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + BACK_AND_FORWARD_COMPATIBLE_DESC + + "the newly added field '" + + getFieldPathV1(targetField) + + "'.") + .fieldPath(targetField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, targetField)) + .nullable(targetField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(null, targetField, datasetUrn, - changeCategory, auditStamp); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents(null, targetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); } - private static ChangeEvent generateRenameEvent(Urn datasetUrn, SchemaField curBaseField, SchemaField curTargetField, - AuditStamp auditStamp) { - return DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(BACK_AND_FORWARD_COMPATIBLE_DESC + "renaming of the field '" + getFieldPathV1(curBaseField) - + " to " + getFieldPathV1(curTargetField) + "'.") - .fieldPath(curBaseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) - .nullable(curBaseField.isNullable()) - .auditStamp(auditStamp) - .build(); + private static ChangeEvent generateRenameEvent( + Urn datasetUrn, SchemaField curBaseField, SchemaField curTargetField, AuditStamp auditStamp) { + return DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + BACK_AND_FORWARD_COMPATIBLE_DESC + + "renaming of the field '" + + getFieldPathV1(curBaseField) + + " to " + + getFieldPathV1(curTargetField) + + "'.") + .fieldPath(curBaseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) + .nullable(curBaseField.isNullable()) + .auditStamp(auditStamp) + .build(); } private static SchemaMetadata getSchemaMetadataFromAspect(EntityAspect entityAspect) { @@ -407,49 +487,73 @@ private static List<ChangeEvent> getForeignKeyChangeEvents() { return foreignKeyChangeEvents; } - private static List<ChangeEvent> getPrimaryKeyChangeEvents(SchemaMetadata baseSchema, SchemaMetadata targetSchema, - Urn datasetUrn, AuditStamp auditStamp) { + private static List<ChangeEvent> getPrimaryKeyChangeEvents( + SchemaMetadata baseSchema, + SchemaMetadata targetSchema, + Urn datasetUrn, + AuditStamp auditStamp) { List<ChangeEvent> primaryKeyChangeEvents = new ArrayList<>(); Set<String> basePrimaryKeys = - (baseSchema != null && baseSchema.getPrimaryKeys() != null) ? new HashSet<>(baseSchema.getPrimaryKeys()) + (baseSchema != null && baseSchema.getPrimaryKeys() != null) + ? new HashSet<>(baseSchema.getPrimaryKeys()) : new HashSet<>(); Set<String> targetPrimaryKeys = - (targetSchema != null && targetSchema.getPrimaryKeys() != null) ? new HashSet<>(targetSchema.getPrimaryKeys()) : new HashSet<>(); + (targetSchema != null && targetSchema.getPrimaryKeys() != null) + ? new HashSet<>(targetSchema.getPrimaryKeys()) + : new HashSet<>(); Set<String> removedBaseKeys = - basePrimaryKeys.stream().filter(key -> !targetPrimaryKeys.contains(key)).collect(Collectors.toSet()); + basePrimaryKeys.stream() + .filter(key -> !targetPrimaryKeys.contains(key)) + .collect(Collectors.toSet()); for (String removedBaseKeyField : removedBaseKeys) { - primaryKeyChangeEvents.add(ChangeEvent.builder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn.toString(), removedBaseKeyField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " removal of the primary key field '" + removedBaseKeyField + "'") - .auditStamp(auditStamp) - .build()); + primaryKeyChangeEvents.add( + ChangeEvent.builder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn.toString(), removedBaseKeyField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " removal of the primary key field '" + + removedBaseKeyField + + "'") + .auditStamp(auditStamp) + .build()); } Set<String> addedTargetKeys = - targetPrimaryKeys.stream().filter(key -> !basePrimaryKeys.contains(key)).collect(Collectors.toSet()); + targetPrimaryKeys.stream() + .filter(key -> !basePrimaryKeys.contains(key)) + .collect(Collectors.toSet()); for (String addedTargetKeyField : addedTargetKeys) { - primaryKeyChangeEvents.add(ChangeEvent.builder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, addedTargetKeyField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " addition of the primary key field '" + addedTargetKeyField + "'") - .auditStamp(auditStamp) - .build()); + primaryKeyChangeEvents.add( + ChangeEvent.builder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, addedTargetKeyField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " addition of the primary key field '" + + addedTargetKeyField + + "'") + .auditStamp(auditStamp) + .build()); } return primaryKeyChangeEvents; } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory changeCategory, JsonPatch rawDiff, boolean rawDiffRequested) { - if (!previousValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME) || !currentValue.getAspect() - .equals(SCHEMA_METADATA_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory changeCategory, + JsonPatch rawDiff, + boolean rawDiffRequested) { + if (!previousValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME) + || !currentValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + SCHEMA_METADATA_ASPECT_NAME); } @@ -462,9 +566,14 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec List<ChangeEvent> changeEvents; try { - changeEvents = new ArrayList<>( - computeDiffs(baseSchema, targetSchema, DatasetUrn.createFromString(currentValue.getUrn()), changeCategory, - null)); + changeEvents = + new ArrayList<>( + computeDiffs( + baseSchema, + targetSchema, + DatasetUrn.createFromString(currentValue.getUrn()), + changeCategory, + null)); } catch (URISyntaxException e) { throw new IllegalArgumentException("Malformed DatasetUrn " + currentValue.getUrn()); } @@ -472,7 +581,9 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec // Assess the highest change at the transaction(schema) level. SemanticChangeType highestSematicChange = SemanticChangeType.NONE; changeEvents = - changeEvents.stream().filter(changeEvent -> changeEvent.getCategory() == changeCategory).collect(Collectors.toList()); + changeEvents.stream() + .filter(changeEvent -> changeEvent.getCategory() == changeCategory) + .collect(Collectors.toList()); ChangeEvent highestChangeEvent = changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); if (highestChangeEvent != null) { @@ -496,11 +607,17 @@ public List<ChangeEvent> getChangeEvents( @Nonnull Aspect<SchemaMetadata> to, @Nonnull AuditStamp auditStamp) { final List<ChangeEvent> changeEvents = new ArrayList<>(); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.DOCUMENTATION, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TAG, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.GLOSSARY_TERM, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.DOCUMENTATION, auditStamp)); + changeEvents.addAll( + computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TAG, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.GLOSSARY_TERM, auditStamp)); return changeEvents; } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java index d97a3fa4f65dd..da8cf3e3b49c9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java @@ -13,22 +13,27 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** - * This is a simple differ that compares to Domains aspects and assumes that each domain - * will have a single domain (currently the semantic contract). + * This is a simple differ that compares to Domains aspects and assumes that each domain will have a + * single domain (currently the semantic contract). */ public class SingleDomainChangeEventGenerator extends EntityChangeEventGenerator<Domains> { @Override - public List<ChangeEvent> getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect<Domains> from, @Nonnull Aspect<Domains> to, @Nonnull AuditStamp auditStamp) { + public List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<Domains> from, + @Nonnull Aspect<Domains> to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } - private List<ChangeEvent> computeDiffs(Domains baseDomains, Domains targetDomains, String entityUrn, - AuditStamp auditStamp) { + private List<ChangeEvent> computeDiffs( + Domains baseDomains, Domains targetDomains, String entityUrn, AuditStamp auditStamp) { - // Simply fetch the first element from each domains list and compare. If they are different, emit + // Simply fetch the first element from each domains list and compare. If they are different, + // emit // a domain ADD / REMOVE event. if (isDomainSet(baseDomains, targetDomains)) { return Collections.singletonList( @@ -72,8 +77,7 @@ private List<ChangeEvent> computeDiffs(Domains baseDomains, Domains targetDomain .modifier(targetDomains.getDomains().get(0).toString()) .domainUrn(targetDomains.getDomains().get(0)) .auditStamp(auditStamp) - .build() - ); + .build()); } return Collections.emptyList(); @@ -88,7 +92,9 @@ private boolean isDomainUnset(@Nullable final Domains from, @Nullable final Doma } private boolean isDomainChanged(@Nullable final Domains from, @Nullable final Domains to) { - return !isDomainEmpty(from) && !isDomainEmpty(to) && !from.getDomains().get(0).equals(to.getDomains().get(0)); + return !isDomainEmpty(from) + && !isDomainEmpty(to) + && !from.getDomains().get(0).equals(to.getDomains().get(0)); } private boolean isDomainEmpty(@Nullable final Domains domains) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java index 17f6798586417..df8aa4dd4ca71 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java @@ -11,28 +11,31 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Differ responsible for determining whether an entity has been soft-deleted or soft-created. - */ +/** Differ responsible for determining whether an entity has been soft-deleted or soft-created. */ public class StatusChangeEventGenerator extends EntityChangeEventGenerator<Status> { @Override - public List<ChangeEvent> getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect<Status> from, @Nonnull Aspect<Status> to, @Nonnull AuditStamp auditStamp) { + public List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<Status> from, + @Nonnull Aspect<Status> to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } - private List<ChangeEvent> computeDiffs(Status baseStatus, Status targetStatus, String entityUrn, - AuditStamp auditStamp) { + private List<ChangeEvent> computeDiffs( + Status baseStatus, Status targetStatus, String entityUrn, AuditStamp auditStamp) { // If the new status is "removed", then return a soft-deletion event. if (isRemoved(targetStatus)) { return Collections.singletonList( ChangeEvent.builder() - .category(ChangeCategory.LIFECYCLE) - .operation(ChangeOperation.SOFT_DELETE) - .auditStamp(auditStamp) - .entityUrn(entityUrn).build()); + .category(ChangeCategory.LIFECYCLE) + .operation(ChangeOperation.SOFT_DELETE) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .build()); } // If the new status is "unremoved", then return an reinstatement event. @@ -42,7 +45,8 @@ private List<ChangeEvent> computeDiffs(Status baseStatus, Status targetStatus, S .category(ChangeCategory.LIFECYCLE) .operation(ChangeOperation.REINSTATE) .auditStamp(auditStamp) - .entityUrn(entityUrn).build()); + .entityUrn(entityUrn) + .build()); } return Collections.emptyList(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index 3e8f83a531b59..eec7680a56ecb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeseries.elastic; +import static com.linkedin.metadata.Constants.*; + import com.codahale.metrics.Timer; import com.datahub.util.RecordUtils; import com.datahub.util.exception.ESQueryException; @@ -35,7 +37,6 @@ import com.linkedin.timeseries.DeleteAspectValuesResult; import com.linkedin.timeseries.GenericTable; import com.linkedin.timeseries.GroupingBucket; - import com.linkedin.timeseries.TimeseriesIndexSizeResult; import com.linkedin.util.Pair; import java.io.IOException; @@ -70,16 +71,21 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.Constants.*; - - @Slf4j -public class ElasticSearchTimeseriesAspectService implements TimeseriesAspectService, ElasticSearchIndexed { +public class ElasticSearchTimeseriesAspectService + implements TimeseriesAspectService, ElasticSearchIndexed { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String TIMESTAMP_FIELD = "timestampMillis"; private static final String EVENT_FIELD = "event"; private static final Integer DEFAULT_LIMIT = 10000; @@ -92,9 +98,13 @@ public class ElasticSearchTimeseriesAspectService implements TimeseriesAspectSer private final ESAggregatedStatsDAO _esAggregatedStatsDAO; private final EntityRegistry _entityRegistry; - public ElasticSearchTimeseriesAspectService(@Nonnull RestHighLevelClient searchClient, - @Nonnull IndexConvention indexConvention, @Nonnull TimeseriesAspectIndexBuilders indexBuilders, - @Nonnull EntityRegistry entityRegistry, @Nonnull ESBulkProcessor bulkProcessor, int numRetries) { + public ElasticSearchTimeseriesAspectService( + @Nonnull RestHighLevelClient searchClient, + @Nonnull IndexConvention indexConvention, + @Nonnull TimeseriesAspectIndexBuilders indexBuilders, + @Nonnull EntityRegistry entityRegistry, + @Nonnull ESBulkProcessor bulkProcessor, + int numRetries) { _indexConvention = indexConvention; _indexBuilders = indexBuilders; _searchClient = searchClient; @@ -111,10 +121,14 @@ private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { Object event = docFields.get(EVENT_FIELD); GenericAspect genericAspect; try { - genericAspect = new GenericAspect().setValue( - ByteString.unsafeWrap(OBJECT_MAPPER.writeValueAsString(event).getBytes(StandardCharsets.UTF_8))); + genericAspect = + new GenericAspect() + .setValue( + ByteString.unsafeWrap( + OBJECT_MAPPER.writeValueAsString(event).getBytes(StandardCharsets.UTF_8))); } catch (JsonProcessingException e) { - throw new RuntimeException("Failed to deserialize event from the timeseries aspect index: " + e); + throw new RuntimeException( + "Failed to deserialize event from the timeseries aspect index: " + e); } genericAspect.setContentType("application/json"); envelopedAspect.setAspect(genericAspect); @@ -122,9 +136,11 @@ private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { if (systemMetadata != null) { try { envelopedAspect.setSystemMetadata( - RecordUtils.toRecordTemplate(SystemMetadata.class, OBJECT_MAPPER.writeValueAsString(systemMetadata))); + RecordUtils.toRecordTemplate( + SystemMetadata.class, OBJECT_MAPPER.writeValueAsString(systemMetadata))); } catch (JsonProcessingException e) { - throw new RuntimeException("Failed to deserialize system metadata from the timeseries aspect index: " + e); + throw new RuntimeException( + "Failed to deserialize system metadata from the timeseries aspect index: " + e); } } @@ -141,7 +157,8 @@ public List<ReindexConfig> buildReindexConfigs() { return _indexBuilders.buildReindexConfigs(); } - public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) + public String reindexAsync( + String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) throws Exception { return _indexBuilders.reindexAsync(index, filterQuery, options); } @@ -152,11 +169,14 @@ public void reindexAll() { } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull String docId, + public void upsertDocument( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull String docId, @Nonnull JsonNode document) { String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final UpdateRequest updateRequest = new UpdateRequest( - indexName, docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .docAsUpsert(true) .doc(document.toString(), XContentType.JSON) @@ -169,21 +189,29 @@ public List<TimeseriesIndexSizeResult> getIndexSizes() { List<TimeseriesIndexSizeResult> res = new ArrayList<>(); try { String indicesPattern = _indexConvention.getAllTimeseriesAspectIndicesPattern(); - Response r = _searchClient.getLowLevelClient().performRequest(new Request("GET", "/" + indicesPattern + "/_stats")); + Response r = + _searchClient + .getLowLevelClient() + .performRequest(new Request("GET", "/" + indicesPattern + "/_stats")); JsonNode body = new ObjectMapper().readTree(r.getEntity().getContent()); - body.get("indices").fields().forEachRemaining(entry -> { - TimeseriesIndexSizeResult elemResult = new TimeseriesIndexSizeResult(); - elemResult.setIndexName(entry.getKey()); - Optional<Pair<String, String>> indexEntityAndAspect = _indexConvention.getEntityAndAspectName(entry.getKey()); - if (indexEntityAndAspect.isPresent()) { - elemResult.setEntityName(indexEntityAndAspect.get().getFirst()); - elemResult.setAspectName(indexEntityAndAspect.get().getSecond()); - } - int sizeBytes = entry.getValue().get("primaries").get("store").get("size_in_bytes").asInt(); - float sizeMb = (float) sizeBytes / 1000; - elemResult.setSizeMb(sizeMb); - res.add(elemResult); - }); + body.get("indices") + .fields() + .forEachRemaining( + entry -> { + TimeseriesIndexSizeResult elemResult = new TimeseriesIndexSizeResult(); + elemResult.setIndexName(entry.getKey()); + Optional<Pair<String, String>> indexEntityAndAspect = + _indexConvention.getEntityAndAspectName(entry.getKey()); + if (indexEntityAndAspect.isPresent()) { + elemResult.setEntityName(indexEntityAndAspect.get().getFirst()); + elemResult.setAspectName(indexEntityAndAspect.get().getSecond()); + } + int sizeBytes = + entry.getValue().get("primaries").get("store").get("size_in_bytes").asInt(); + float sizeMb = (float) sizeBytes / 1000; + elemResult.setSizeMb(sizeMb); + res.add(elemResult); + }); return res; } catch (IOException e) { throw new RuntimeException(e); @@ -194,10 +222,10 @@ public List<TimeseriesIndexSizeResult> getIndexSizes() { public long countByFilter( @Nonnull final String entityName, @Nonnull final String aspectName, - @Nullable final Filter filter - ) { + @Nullable final Filter filter) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final BoolQueryBuilder filterQueryBuilder = QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); + final BoolQueryBuilder filterQueryBuilder = + QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); CountRequest countRequest = new CountRequest(); countRequest.query(filterQueryBuilder); countRequest.indices(indexName); @@ -220,20 +248,25 @@ public List<EnvelopedAspect> getAspectValues( @Nullable final Integer limit, @Nullable final Filter filter, @Nullable final SortCriterion sort) { - final BoolQueryBuilder filterQueryBuilder = QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); + final BoolQueryBuilder filterQueryBuilder = + QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); filterQueryBuilder.must(QueryBuilders.matchQuery("urn", urn.toString())); // NOTE: We are interested only in the un-exploded rows as only they carry the `event` payload. filterQueryBuilder.mustNot(QueryBuilders.termQuery(MappingsBuilder.IS_EXPLODED_FIELD, true)); if (startTimeMillis != null) { - Criterion startTimeCriterion = new Criterion().setField(TIMESTAMP_FIELD) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(startTimeMillis.toString()); + Criterion startTimeCriterion = + new Criterion() + .setField(TIMESTAMP_FIELD) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(startTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); } if (endTimeMillis != null) { - Criterion endTimeCriterion = new Criterion().setField(TIMESTAMP_FIELD) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(endTimeMillis.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(TIMESTAMP_FIELD) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(endTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); } final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -242,7 +275,8 @@ public List<EnvelopedAspect> getAspectValues( if (sort != null) { final SortOrder esSortOrder = - (sort.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) ? SortOrder.ASC + (sort.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + ? SortOrder.ASC : SortOrder.DESC; searchSourceBuilder.sort(SortBuilders.fieldSort(sort.getField()).order(esSortOrder)); } else { @@ -258,8 +292,10 @@ public List<EnvelopedAspect> getAspectValues( log.debug("Search request is: " + searchRequest); SearchHits hits; - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "searchAspectValues_search").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "searchAspectValues_search").time()) { + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); hits = searchResponse.getHits(); } catch (Exception e) { log.error("Search query failed:", e); @@ -272,17 +308,23 @@ public List<EnvelopedAspect> getAspectValues( @Override @Nonnull - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { - return _esAggregatedStatsDAO.getAggregatedStats(entityName, aspectName, aggregationSpecs, filter, groupingBuckets); + return _esAggregatedStatsDAO.getAggregatedStats( + entityName, aspectName, aggregationSpecs, filter, groupingBuckets); } /** - * A generic delete by filter API which uses elasticsearch's deleteByQuery. - * NOTE: There is no need for the client to explicitly walk each scroll page with this approach. Elastic will synchronously - * delete all of the documents matching the query that is specified by the filter, and internally handles the batching logic - * by the scroll page size specified(i.e. the DEFAULT_LIMIT value of 10,000). + * A generic delete by filter API which uses elasticsearch's deleteByQuery. NOTE: There is no need + * for the client to explicitly walk each scroll page with this approach. Elastic will + * synchronously delete all of the documents matching the query that is specified by the filter, + * and internally handles the batching logic by the scroll page size specified(i.e. the + * DEFAULT_LIMIT value of 10,000). + * * @param entityName the name of the entity. * @param aspectName the name of the aspect. * @param filter the filter to be used for deletion of the documents on the index. @@ -290,14 +332,18 @@ public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull Stri */ @Nonnull @Override - public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter) { + public DeleteAspectValuesResult deleteAspectValues( + @Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); - final Optional<DeleteAspectValuesResult> result = _bulkProcessor - .deleteByQuery(filterQueryBuilder, false, DEFAULT_LIMIT, TimeValue.timeValueMinutes(10), indexName) - .map(response -> new DeleteAspectValuesResult().setNumDocsDeleted(response.getDeleted())); + final Optional<DeleteAspectValuesResult> result = + _bulkProcessor + .deleteByQuery( + filterQueryBuilder, false, DEFAULT_LIMIT, TimeValue.timeValueMinutes(10), indexName) + .map( + response -> + new DeleteAspectValuesResult().setNumDocsDeleted(response.getDeleted())); if (result.isPresent()) { return result.get(); @@ -309,14 +355,20 @@ public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @ @Nonnull @Override - public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String deleteAspectValuesAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); final int batchSize = options.getBatchSize() > 0 ? options.getBatchSize() : DEFAULT_LIMIT; - TimeValue timeout = options.getTimeoutSeconds() > 0 ? TimeValue.timeValueSeconds(options.getTimeoutSeconds()) : null; - final Optional<TaskSubmissionResponse> result = _bulkProcessor - .deleteByQueryAsync(filterQueryBuilder, false, batchSize, timeout, indexName); + TimeValue timeout = + options.getTimeoutSeconds() > 0 + ? TimeValue.timeValueSeconds(options.getTimeoutSeconds()) + : null; + final Optional<TaskSubmissionResponse> result = + _bulkProcessor.deleteByQueryAsync(filterQueryBuilder, false, batchSize, timeout, indexName); if (result.isPresent()) { return result.get().getTask(); @@ -327,7 +379,10 @@ public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull Strin } @Override - public String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); @@ -350,10 +405,16 @@ public DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull String runId) for (Map.Entry<String, EntitySpec> entry : _entityRegistry.getEntitySpecs().entrySet()) { for (AspectSpec aspectSpec : entry.getValue().getAspectSpecs()) { if (aspectSpec.isTimeseries()) { - DeleteAspectValuesResult result = this.deleteAspectValues(entry.getKey(), aspectSpec.getName(), filter); - rollbackResult.setNumDocsDeleted(rollbackResult.getNumDocsDeleted() + result.getNumDocsDeleted()); - log.info("Number of timeseries docs deleted for entity:{}, aspect:{}, runId:{}={}", entry.getKey(), - aspectSpec.getName(), runId, result.getNumDocsDeleted()); + DeleteAspectValuesResult result = + this.deleteAspectValues(entry.getKey(), aspectSpec.getName(), filter); + rollbackResult.setNumDocsDeleted( + rollbackResult.getNumDocsDeleted() + result.getNumDocsDeleted()); + log.info( + "Number of timeseries docs deleted for entity:{}, aspect:{}, runId:{}={}", + entry.getKey(), + aspectSpec.getName(), + runId, + result.getNumDocsDeleted()); } } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java index 37a5dc304cf7f..5bb523c8a8c1e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java @@ -8,7 +8,6 @@ import java.util.Map; import javax.annotation.Nonnull; - public class MappingsBuilder { public static final String URN_FIELD = "urn"; @@ -24,13 +23,14 @@ public class MappingsBuilder { public static final String PARTITION_SPEC_TIME_PARTITION = "timePartition"; public static final String RUN_ID_FIELD = "runId"; - private MappingsBuilder() { - } + private MappingsBuilder() {} public static Map<String, Object> getMappings(@Nonnull final AspectSpec aspectSpec) { if (!aspectSpec.isTimeseries()) { throw new IllegalArgumentException( - String.format("Cannot apply timeseries field indexing for a non-timeseries aspect %s", aspectSpec.getName())); + String.format( + "Cannot apply timeseries field indexing for a non-timeseries aspect %s", + aspectSpec.getName())); } Map<String, Object> mappings = new HashMap<>(); @@ -41,16 +41,24 @@ public static Map<String, Object> getMappings(@Nonnull final AspectSpec aspectSp mappings.put(TIMESTAMP_FIELD, ImmutableMap.of("type", "date")); mappings.put(TIMESTAMP_MILLIS_FIELD, ImmutableMap.of("type", "date")); mappings.put(EVENT_GRANULARITY, ImmutableMap.of("type", "keyword")); - mappings.put(PARTITION_SPEC, ImmutableMap.of("properties", - ImmutableMap.of(PARTITION_SPEC_PARTITION, ImmutableMap.of("type", "keyword"), PARTITION_SPEC_TIME_PARTITION, - ImmutableMap.of("type", "keyword")))); + mappings.put( + PARTITION_SPEC, + ImmutableMap.of( + "properties", + ImmutableMap.of( + PARTITION_SPEC_PARTITION, + ImmutableMap.of("type", "keyword"), + PARTITION_SPEC_TIME_PARTITION, + ImmutableMap.of("type", "keyword")))); mappings.put(EVENT_FIELD, ImmutableMap.of("type", "object", "enabled", false)); mappings.put(SYSTEM_METADATA_FIELD, ImmutableMap.of("type", "object", "enabled", false)); mappings.put(IS_EXPLODED_FIELD, ImmutableMap.of("type", "boolean")); - aspectSpec.getTimeseriesFieldSpecs() + aspectSpec + .getTimeseriesFieldSpecs() .forEach(x -> mappings.put(x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); - aspectSpec.getTimeseriesFieldCollectionSpecs() + aspectSpec + .getTimeseriesFieldCollectionSpecs() .forEach(x -> mappings.put(x.getName(), getTimeseriesFieldCollectionSpecMapping(x))); return ImmutableMap.of("properties", mappings); @@ -59,11 +67,16 @@ public static Map<String, Object> getMappings(@Nonnull final AspectSpec aspectSp private static Map<String, Object> getTimeseriesFieldCollectionSpecMapping( TimeseriesFieldCollectionSpec timeseriesFieldCollectionSpec) { Map<String, Object> collectionMappings = new HashMap<>(); - collectionMappings.put(timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey(), + collectionMappings.put( + timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey(), getFieldMapping(DataSchema.Type.STRING)); - timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap() + timeseriesFieldCollectionSpec + .getTimeseriesFieldSpecMap() .values() - .forEach(x -> collectionMappings.put(x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); + .forEach( + x -> + collectionMappings.put( + x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); return ImmutableMap.of("properties", collectionMappings); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java index e9518ed8c39fa..564bcb2a242cb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java @@ -7,19 +7,17 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.util.Pair; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; - -import com.linkedin.util.Pair; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.QueryBuilder; - @Slf4j @RequiredArgsConstructor public class TimeseriesAspectIndexBuilders implements ElasticSearchIndexed { @@ -38,7 +36,8 @@ public void reindexAll() { } } - public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) + public String reindexAsync( + String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) throws Exception { Optional<Pair<String, String>> entityAndAspect = _indexConvention.getEntityAndAspectName(index); if (entityAndAspect.isEmpty()) { @@ -54,31 +53,42 @@ public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, Bat } } if (!entitySpec.hasAspect(aspectName)) { - throw new IllegalArgumentException(String.format("Could not find aspect %s of entity %s", aspectName, entityName)); + throw new IllegalArgumentException( + String.format("Could not find aspect %s of entity %s", aspectName, entityName)); } - ReindexConfig config = _indexBuilder.buildReindexState(index, - MappingsBuilder.getMappings(_entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName)), - Collections.emptyMap()); + ReindexConfig config = + _indexBuilder.buildReindexState( + index, + MappingsBuilder.getMappings( + _entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName)), + Collections.emptyMap()); return _indexBuilder.reindexInPlaceAsync(index, filterQuery, options, config); } @Override public List<ReindexConfig> buildReindexConfigs() { return _entityRegistry.getEntitySpecs().values().stream() - .flatMap(entitySpec -> entitySpec.getAspectSpecs().stream() + .flatMap( + entitySpec -> + entitySpec.getAspectSpecs().stream() .map(aspectSpec -> Pair.of(entitySpec, aspectSpec))) - .filter(pair -> pair.getSecond().isTimeseries()) - .map(pair -> { + .filter(pair -> pair.getSecond().isTimeseries()) + .map( + pair -> { try { return _indexBuilder.buildReindexState( - _indexConvention.getTimeseriesAspectIndexName(pair.getFirst().getName(), pair.getSecond().getName()), - MappingsBuilder.getMappings(pair.getSecond()), Collections.emptyMap()); + _indexConvention.getTimeseriesAspectIndexName( + pair.getFirst().getName(), pair.getSecond().getName()), + MappingsBuilder.getMappings(pair.getSecond()), + Collections.emptyMap()); } catch (IOException e) { - log.error("Issue while building timeseries field index for entity {} aspect {}", pair.getFirst().getName(), - pair.getSecond().getName()); + log.error( + "Issue while building timeseries field index for entity {} aspect {}", + pair.getFirst().getName(), + pair.getSecond().getName()); throw new RuntimeException(e); } - }).collect(Collectors.toList()); + }) + .collect(Collectors.toList()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java index 316d25d1f37f4..539e5dfbaa1d0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java @@ -49,7 +49,6 @@ import org.opensearch.search.aggregations.pipeline.ParsedBucketMetricValue; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j public class ESAggregatedStatsDAO { private static final String ES_AGGREGATION_PREFIX = "agg_"; @@ -66,7 +65,9 @@ public class ESAggregatedStatsDAO { private final RestHighLevelClient _searchClient; private final EntityRegistry _entityRegistry; - public ESAggregatedStatsDAO(@Nonnull IndexConvention indexConvention, @Nonnull RestHighLevelClient searchClient, + public ESAggregatedStatsDAO( + @Nonnull IndexConvention indexConvention, + @Nonnull RestHighLevelClient searchClient, @Nonnull EntityRegistry entityRegistry) { _indexConvention = indexConvention; _searchClient = searchClient; @@ -94,7 +95,8 @@ private static String getAggregationSpecAggDisplayName(final AggregationSpec agg prefix = "cardinality_"; break; default: - throw new IllegalArgumentException("Unknown AggregationSpec type" + aggregationSpec.getAggregationType()); + throw new IllegalArgumentException( + "Unknown AggregationSpec type" + aggregationSpec.getAggregationType()); } return prefix + aggregationSpec.getFieldPath(); } @@ -103,12 +105,19 @@ private static String getGroupingBucketAggName(final GroupingBucket groupingBuck if (groupingBucket.getType() == GroupingBucketType.DATE_GROUPING_BUCKET) { return toEsAggName(ES_AGGREGATION_PREFIX + groupingBucket.getKey()); } - return toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + groupingBucket.getKey()); + return toEsAggName( + ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + groupingBucket.getKey()); } - private static void rowGenHelper(final Aggregations lowestAggs, final int curLevel, final int lastLevel, - final List<StringArray> rows, final Stack<String> row, final ImmutableList<GroupingBucket> groupingBuckets, - final ImmutableList<AggregationSpec> aggregationSpecs, AspectSpec aspectSpec) { + private static void rowGenHelper( + final Aggregations lowestAggs, + final int curLevel, + final int lastLevel, + final List<StringArray> rows, + final Stack<String> row, + final ImmutableList<GroupingBucket> groupingBuckets, + final ImmutableList<AggregationSpec> aggregationSpecs, + AspectSpec aspectSpec) { if (curLevel == lastLevel) { // (Base-case): We are at the lowest level of nested bucket aggregations. // Append member aggregation values to the row and add the row to the output. @@ -123,7 +132,7 @@ private static void rowGenHelper(final Aggregations lowestAggs, final int curLev row.pop(); } } else if (curLevel < lastLevel) { - //(Recursive-case): We are still processing the nested group-by multi-bucket aggregations. + // (Recursive-case): We are still processing the nested group-by multi-bucket aggregations. // For each bucket, add the key to the row and recur down for full row construction. GroupingBucket curGroupingBucket = groupingBuckets.get(curLevel); String curGroupingBucketAggName = getGroupingBucketAggName(curGroupingBucket); @@ -136,7 +145,14 @@ private static void rowGenHelper(final Aggregations lowestAggs, final int curLev row.push(b.getKeyAsString()); } // Recur down - rowGenHelper(b.getAggregations(), curLevel + 1, lastLevel, rows, row, groupingBuckets, aggregationSpecs, + rowGenHelper( + b.getAggregations(), + curLevel + 1, + lastLevel, + rows, + row, + groupingBuckets, + aggregationSpecs, aspectSpec); // Remove the row value we have added for this level. row.pop(); @@ -179,11 +195,12 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str if (fieldPath.equals(MappingsBuilder.EVENT_GRANULARITY)) { return DataSchema.Type.RECORD; } - + String[] memberParts = fieldPath.split("\\."); if (memberParts.length == 1) { // Search in the timeseriesFieldSpecs. - TimeseriesFieldSpec timeseriesFieldSpec = aspectSpec.getTimeseriesFieldSpecMap().get(memberParts[0]); + TimeseriesFieldSpec timeseriesFieldSpec = + aspectSpec.getTimeseriesFieldSpecMap().get(memberParts[0]); if (timeseriesFieldSpec != null) { return timeseriesFieldSpec.getPegasusSchema().getType(); } @@ -196,8 +213,8 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str } else if (memberParts.length == 2) { // Check if partitionSpec if (memberParts[0].equals(MappingsBuilder.PARTITION_SPEC)) { - if (memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_PARTITION) || memberParts[1].equals( - MappingsBuilder.PARTITION_SPEC_TIME_PARTITION)) { + if (memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_PARTITION) + || memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION)) { return DataSchema.Type.STRING; } else { throw new IllegalArgumentException("Unknown partitionSpec member" + memberParts[1]); @@ -208,44 +225,53 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str TimeseriesFieldCollectionSpec timeseriesFieldCollectionSpec = aspectSpec.getTimeseriesFieldCollectionSpecMap().get(memberParts[0]); if (timeseriesFieldCollectionSpec != null) { - if (timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey().equals(memberParts[1])) { + if (timeseriesFieldCollectionSpec + .getTimeseriesFieldCollectionAnnotation() + .getKey() + .equals(memberParts[1])) { // Matched against the collection stat key. return DataSchema.Type.STRING; } - TimeseriesFieldSpec tsFieldSpec = timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap().get(memberParts[1]); + TimeseriesFieldSpec tsFieldSpec = + timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap().get(memberParts[1]); if (tsFieldSpec != null) { // Matched against a collection stat field. return tsFieldSpec.getPegasusSchema().getType(); } } } - throw new IllegalArgumentException("Unknown TimeseriesField or TimeseriesFieldCollection: " + fieldPath); + throw new IllegalArgumentException( + "Unknown TimeseriesField or TimeseriesFieldCollection: " + fieldPath); } - private static DataSchema.Type getGroupingBucketKeyType(@Nonnull AspectSpec aspectSpec, - @Nonnull GroupingBucket groupingBucket) { + private static DataSchema.Type getGroupingBucketKeyType( + @Nonnull AspectSpec aspectSpec, @Nonnull GroupingBucket groupingBucket) { return getTimeseriesFieldType(aspectSpec, groupingBucket.getKey()); } - private static DataSchema.Type getAggregationSpecMemberType(@Nonnull AspectSpec aspectSpec, - @Nonnull AggregationSpec aggregationSpec) { + private static DataSchema.Type getAggregationSpecMemberType( + @Nonnull AspectSpec aspectSpec, @Nonnull AggregationSpec aggregationSpec) { return getTimeseriesFieldType(aspectSpec, aggregationSpec.getFieldPath()); } - private static List<String> genColumnNames(GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { - List<String> groupingBucketNames = Arrays.stream(groupingBuckets).map(t -> t.getKey()).collect(Collectors.toList()); + private static List<String> genColumnNames( + GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { + List<String> groupingBucketNames = + Arrays.stream(groupingBuckets).map(t -> t.getKey()).collect(Collectors.toList()); - List<String> aggregationNames = Arrays.stream(aggregationSpecs) - .map(ESAggregatedStatsDAO::getAggregationSpecAggDisplayName) - .collect(Collectors.toList()); + List<String> aggregationNames = + Arrays.stream(aggregationSpecs) + .map(ESAggregatedStatsDAO::getAggregationSpecAggDisplayName) + .collect(Collectors.toList()); List<String> columnNames = - Stream.concat(groupingBucketNames.stream(), aggregationNames.stream()).collect(Collectors.toList()); + Stream.concat(groupingBucketNames.stream(), aggregationNames.stream()) + .collect(Collectors.toList()); return columnNames; } - private static List<String> genColumnTypes(AspectSpec aspectSpec, GroupingBucket[] groupingBuckets, - AggregationSpec[] aggregationSpecs) { + private static List<String> genColumnTypes( + AspectSpec aspectSpec, GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { List<String> columnTypes = new ArrayList<>(); for (GroupingBucket g : groupingBuckets) { DataSchema.Type type = getGroupingBucketKeyType(aspectSpec, g); @@ -282,14 +308,17 @@ private static List<String> genColumnTypes(AspectSpec aspectSpec, GroupingBucket break; default: throw new IllegalArgumentException( - "Type generation not yet supported for aggregation type: " + aggregationSpec.getAggregationType()); + "Type generation not yet supported for aggregation type: " + + aggregationSpec.getAggregationType()); } } return columnTypes; } - private static String extractAggregationValue(@Nonnull final Aggregations aggregations, - @Nonnull final AspectSpec aspectSpec, @Nonnull final AggregationSpec aggregationSpec) { + private static String extractAggregationValue( + @Nonnull final Aggregations aggregations, + @Nonnull final AspectSpec aspectSpec, + @Nonnull final AggregationSpec aggregationSpec) { String memberAggName = getAggregationSpecAggESName(aggregationSpec); Object memberAgg = aggregations.get(memberAggName); DataSchema.Type memberType = getAggregationSpecMemberType(aspectSpec, aggregationSpec); @@ -309,36 +338,42 @@ private static String extractAggregationValue(@Nonnull final Aggregations aggreg case FLOAT: return String.valueOf(((ParsedSum) memberAgg).getValue()); default: - throw new IllegalArgumentException("Unexpected type encountered for sum aggregation: " + memberType); + throw new IllegalArgumentException( + "Unexpected type encountered for sum aggregation: " + memberType); } } else if (memberAgg instanceof ParsedCardinality) { // This will always be a long value as string. return String.valueOf(((ParsedCardinality) memberAgg).getValue()); } else { - throw new UnsupportedOperationException("Member aggregations other than latest and sum not supported yet."); + throw new UnsupportedOperationException( + "Member aggregations other than latest and sum not supported yet."); } return defaultValue; } - private AspectSpec getTimeseriesAspectSpec(@Nonnull String entityName, @Nonnull String aspectName) { + private AspectSpec getTimeseriesAspectSpec( + @Nonnull String entityName, @Nonnull String aspectName) { EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - new IllegalArgumentException(String.format("Unrecognized aspect name {} for entity {}", aspectName, entityName)); + new IllegalArgumentException( + String.format("Unrecognized aspect name {} for entity {}", aspectName, entityName)); } else if (!aspectSpec.isTimeseries()) { new IllegalArgumentException( - String.format("aspect name {} for entity {} is not a timeseries aspect", aspectName, entityName)); + String.format( + "aspect name {} for entity {} is not a timeseries aspect", aspectName, entityName)); } return aspectSpec; } - /** - * Get the aggregated metrics for the given dataset or column from a time series aspect. - */ + /** Get the aggregated metrics for the given dataset or column from a time series aspect. */ @Nonnull - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { // Setup the filter query builder using the input filter provided. @@ -371,51 +406,62 @@ public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull Stri log.debug("Search request is: " + searchRequest); try { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); - return generateResponseFromElastic(searchResponse, groupingBuckets, aggregationSpecs, aspectSpec); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); + return generateResponseFromElastic( + searchResponse, groupingBuckets, aggregationSpecs, aspectSpec); } catch (Exception e) { log.error("Search query failed: " + e.getMessage()); throw new ESQueryException("Search query failed:", e); } } - private void addAggregationBuildersFromAggregationSpec(AspectSpec aspectSpec, AggregationBuilder baseAggregation, - AggregationSpec aggregationSpec) { + private void addAggregationBuildersFromAggregationSpec( + AspectSpec aspectSpec, AggregationBuilder baseAggregation, AggregationSpec aggregationSpec) { String fieldPath = aggregationSpec.getFieldPath(); String esFieldName = fieldPath; switch (aggregationSpec.getAggregationType()) { case LATEST: // Construct the terms aggregation with a max timestamp sub-aggregation. - String termsAggName = toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + fieldPath); - AggregationBuilder termsAgg = AggregationBuilders.terms(termsAggName) - .field(esFieldName) - .size(MAX_TERM_BUCKETS) - .subAggregation(AggregationBuilders.max(ES_AGG_MAX_TIMESTAMP).field(ES_FIELD_TIMESTAMP)); + String termsAggName = + toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + fieldPath); + AggregationBuilder termsAgg = + AggregationBuilders.terms(termsAggName) + .field(esFieldName) + .size(MAX_TERM_BUCKETS) + .subAggregation( + AggregationBuilders.max(ES_AGG_MAX_TIMESTAMP).field(ES_FIELD_TIMESTAMP)); baseAggregation.subAggregation(termsAgg); // Construct the max_bucket pipeline aggregation MaxBucketPipelineAggregationBuilder maxBucketPipelineAgg = - PipelineAggregatorBuilders.maxBucket(getAggregationSpecAggESName(aggregationSpec), + PipelineAggregatorBuilders.maxBucket( + getAggregationSpecAggESName(aggregationSpec), termsAggName + ">" + ES_AGG_MAX_TIMESTAMP); baseAggregation.subAggregation(maxBucketPipelineAgg); break; case SUM: AggregationBuilder sumAgg = - AggregationBuilders.sum(getAggregationSpecAggESName(aggregationSpec)).field(esFieldName); + AggregationBuilders.sum(getAggregationSpecAggESName(aggregationSpec)) + .field(esFieldName); baseAggregation.subAggregation(sumAgg); break; case CARDINALITY: AggregationBuilder cardinalityAgg = - AggregationBuilders.cardinality(getAggregationSpecAggESName(aggregationSpec)).field(esFieldName); + AggregationBuilders.cardinality(getAggregationSpecAggESName(aggregationSpec)) + .field(esFieldName); baseAggregation.subAggregation(cardinalityAgg); break; default: - throw new IllegalStateException("Unexpected value: " + aggregationSpec.getAggregationType()); + throw new IllegalStateException( + "Unexpected value: " + aggregationSpec.getAggregationType()); } } - private Pair<AggregationBuilder, AggregationBuilder> makeGroupingAggregationBuilder(AspectSpec aspectSpec, - @Nullable AggregationBuilder baseAggregationBuilder, @Nullable GroupingBucket[] groupingBuckets) { + private Pair<AggregationBuilder, AggregationBuilder> makeGroupingAggregationBuilder( + AspectSpec aspectSpec, + @Nullable AggregationBuilder baseAggregationBuilder, + @Nullable GroupingBucket[] groupingBuckets) { AggregationBuilder firstAggregationBuilder = baseAggregationBuilder; AggregationBuilder lastAggregationBuilder = baseAggregationBuilder; @@ -427,18 +473,20 @@ private Pair<AggregationBuilder, AggregationBuilder> makeGroupingAggregationBuil if (!curGroupingBucket.getKey().equals(ES_FIELD_TIMESTAMP)) { throw new IllegalArgumentException("Date Grouping bucket is not:" + ES_FIELD_TIMESTAMP); } - curAggregationBuilder = AggregationBuilders.dateHistogram(ES_AGG_TIMESTAMP) - .field(ES_FIELD_TIMESTAMP) - .calendarInterval(getHistogramInterval(curGroupingBucket.getTimeWindowSize())); + curAggregationBuilder = + AggregationBuilders.dateHistogram(ES_AGG_TIMESTAMP) + .field(ES_FIELD_TIMESTAMP) + .calendarInterval(getHistogramInterval(curGroupingBucket.getTimeWindowSize())); } else if (curGroupingBucket.getType() == GroupingBucketType.STRING_GROUPING_BUCKET) { // Process the string grouping bucket using the 'terms' aggregation. // The field can be Keyword, Numeric, ip, boolean, or binary. String fieldName = ESUtils.toKeywordField(curGroupingBucket.getKey(), true); DataSchema.Type fieldType = getGroupingBucketKeyType(aspectSpec, curGroupingBucket); - curAggregationBuilder = AggregationBuilders.terms(getGroupingBucketAggName(curGroupingBucket)) - .field(fieldName) - .size(MAX_TERM_BUCKETS) - .order(BucketOrder.aggregation("_key", true)); + curAggregationBuilder = + AggregationBuilders.terms(getGroupingBucketAggName(curGroupingBucket)) + .field(fieldName) + .size(MAX_TERM_BUCKETS) + .order(BucketOrder.aggregation("_key", true)); } if (firstAggregationBuilder == null) { firstAggregationBuilder = curAggregationBuilder; @@ -453,8 +501,11 @@ private Pair<AggregationBuilder, AggregationBuilder> makeGroupingAggregationBuil return Pair.of(firstAggregationBuilder, lastAggregationBuilder); } - private GenericTable generateResponseFromElastic(SearchResponse searchResponse, GroupingBucket[] groupingBuckets, - AggregationSpec[] aggregationSpecs, AspectSpec aspectSpec) { + private GenericTable generateResponseFromElastic( + SearchResponse searchResponse, + GroupingBucket[] groupingBuckets, + AggregationSpec[] aggregationSpecs, + AspectSpec aspectSpec) { GenericTable resultTable = new GenericTable(); // 1. Generate the column names. @@ -470,8 +521,15 @@ private GenericTable generateResponseFromElastic(SearchResponse searchResponse, Aggregations aggregations = searchResponse.getAggregations(); Stack<String> rowAcc = new Stack<>(); - rowGenHelper(aggregations, 0, groupingBuckets.length, rows, rowAcc, - ImmutableList.copyOf(groupingBuckets), ImmutableList.copyOf(aggregationSpecs), aspectSpec); + rowGenHelper( + aggregations, + 0, + groupingBuckets.length, + rows, + rowAcc, + ImmutableList.copyOf(groupingBuckets), + ImmutableList.copyOf(aggregationSpecs), + aspectSpec); if (!rowAcc.isEmpty()) { throw new IllegalStateException("Expected stack to be empty."); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java index 4e41603ca1411..c0f66acaaca5a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeseries.transformer; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -31,25 +33,28 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.codec.digest.DigestUtils; -import static com.linkedin.metadata.Constants.*; - - -/** - * Class that provides a utility function that transforms the timeseries aspect into a document - */ +/** Class that provides a utility function that transforms the timeseries aspect into a document */ @Slf4j public class TimeseriesAspectTransformer { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } - private TimeseriesAspectTransformer() { - } + private TimeseriesAspectTransformer() {} - public static Map<String, JsonNode> transform(@Nonnull final Urn urn, @Nonnull final RecordTemplate timeseriesAspect, - @Nonnull final AspectSpec aspectSpec, @Nullable final SystemMetadata systemMetadata) + public static Map<String, JsonNode> transform( + @Nonnull final Urn urn, + @Nonnull final RecordTemplate timeseriesAspect, + @Nonnull final AspectSpec aspectSpec, + @Nullable final SystemMetadata systemMetadata) throws JsonProcessingException { ObjectNode commonDocument = getCommonDocument(urn, timeseriesAspect, systemMetadata); Map<String, JsonNode> finalDocuments = new HashMap<>(); @@ -58,9 +63,12 @@ public static Map<String, JsonNode> transform(@Nonnull final Urn urn, @Nonnull f ObjectNode document = JsonNodeFactory.instance.objectNode(); document.setAll(commonDocument); document.set(MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(false)); - document.set(MappingsBuilder.EVENT_FIELD, OBJECT_MAPPER.readTree(RecordUtils.toJsonString(timeseriesAspect))); + document.set( + MappingsBuilder.EVENT_FIELD, + OBJECT_MAPPER.readTree(RecordUtils.toJsonString(timeseriesAspect))); if (systemMetadata != null) { - document.set(MappingsBuilder.SYSTEM_METADATA_FIELD, + document.set( + MappingsBuilder.SYSTEM_METADATA_FIELD, OBJECT_MAPPER.readTree(RecordUtils.toJsonString(systemMetadata))); } final Map<TimeseriesFieldSpec, List<Object>> timeseriesFieldValueMap = @@ -70,22 +78,30 @@ public static Map<String, JsonNode> transform(@Nonnull final Urn urn, @Nonnull f // Create new rows for the member collection fields. final Map<TimeseriesFieldCollectionSpec, List<Object>> timeseriesFieldCollectionValueMap = - FieldExtractor.extractFields(timeseriesAspect, aspectSpec.getTimeseriesFieldCollectionSpecs()); + FieldExtractor.extractFields( + timeseriesAspect, aspectSpec.getTimeseriesFieldCollectionSpecs()); timeseriesFieldCollectionValueMap.forEach( - (key, values) -> finalDocuments.putAll(getTimeseriesFieldCollectionDocuments(key, values, commonDocument))); + (key, values) -> + finalDocuments.putAll( + getTimeseriesFieldCollectionDocuments(key, values, commonDocument))); return finalDocuments; } - private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final RecordTemplate timeseriesAspect, + private static ObjectNode getCommonDocument( + @Nonnull final Urn urn, + final RecordTemplate timeseriesAspect, @Nullable final SystemMetadata systemMetadata) { if (!timeseriesAspect.data().containsKey(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)) { - throw new IllegalArgumentException("Input timeseries aspect does not contain a timestampMillis field"); + throw new IllegalArgumentException( + "Input timeseries aspect does not contain a timestampMillis field"); } ObjectNode document = JsonNodeFactory.instance.objectNode(); document.put(MappingsBuilder.URN_FIELD, urn.toString()); - document.put(MappingsBuilder.TIMESTAMP_FIELD, + document.put( + MappingsBuilder.TIMESTAMP_FIELD, (Long) timeseriesAspect.data().get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)); - document.put(MappingsBuilder.TIMESTAMP_MILLIS_FIELD, + document.put( + MappingsBuilder.TIMESTAMP_MILLIS_FIELD, (Long) timeseriesAspect.data().get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)); if (systemMetadata != null && systemMetadata.getRunId() != null) { // We need this as part of the common document for rollback support. @@ -94,7 +110,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record Object eventGranularity = timeseriesAspect.data().get(MappingsBuilder.EVENT_GRANULARITY); if (eventGranularity != null) { try { - document.put(MappingsBuilder.EVENT_GRANULARITY, OBJECT_MAPPER.writeValueAsString(eventGranularity)); + document.put( + MappingsBuilder.EVENT_GRANULARITY, OBJECT_MAPPER.writeValueAsString(eventGranularity)); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Failed to convert eventGranulairty to Json string!", e); } @@ -105,7 +122,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record Object partition = partitionSpec.get(MappingsBuilder.PARTITION_SPEC_PARTITION); Object timePartition = partitionSpec.get(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION); if (partition != null && timePartition != null) { - throw new IllegalArgumentException("Both partition and timePartition cannot be specified in partitionSpec!"); + throw new IllegalArgumentException( + "Both partition and timePartition cannot be specified in partitionSpec!"); } else if (partition != null) { ObjectNode partitionDoc = JsonNodeFactory.instance.objectNode(); partitionDoc.put(MappingsBuilder.PARTITION_SPEC_PARTITION, partition.toString()); @@ -113,14 +131,16 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record } else if (timePartition != null) { ObjectNode timePartitionDoc = JsonNodeFactory.instance.objectNode(); try { - timePartitionDoc.put(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION, + timePartitionDoc.put( + MappingsBuilder.PARTITION_SPEC_TIME_PARTITION, OBJECT_MAPPER.writeValueAsString(timePartition)); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Failed to convert timePartition to Json string!", e); } document.set(MappingsBuilder.PARTITION_SPEC, timePartitionDoc); } else { - throw new IllegalArgumentException("Both partition and timePartition cannot be null in partitionSpec."); + throw new IllegalArgumentException( + "Both partition and timePartition cannot be null in partitionSpec."); } } String messageId = (String) timeseriesAspect.data().get(MappingsBuilder.MESSAGE_ID_FIELD); @@ -131,8 +151,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record return document; } - private static void setTimeseriesField(final ObjectNode document, final TimeseriesFieldSpec fieldSpec, - List<Object> valueList) { + private static void setTimeseriesField( + final ObjectNode document, final TimeseriesFieldSpec fieldSpec, List<Object> valueList) { if (valueList.size() == 0) { return; } @@ -154,21 +174,26 @@ private static void setTimeseriesField(final ObjectNode document, final Timeseri case ARRAY: ArrayDataSchema dataSchema = (ArrayDataSchema) fieldSpec.getPegasusSchema(); if (valueList.get(0) instanceof List<?>) { - // This is the hack for non-stat-collection array fields. They will end up getting oddly serialized to a string otherwise. + // This is the hack for non-stat-collection array fields. They will end up getting oddly + // serialized to a string otherwise. valueList = (List<Object>) valueList.get(0); } ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(valueList.size()); - valueList.stream().map(x -> { - if (dataSchema.getItems().getType() == DataSchema.Type.RECORD) { - try { - return OBJECT_MAPPER.writeValueAsString(x); - } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to convert collection element to Json string!", e); - } - } else { - return x.toString(); - } - }).forEach(arrayNode::add); + valueList.stream() + .map( + x -> { + if (dataSchema.getItems().getType() == DataSchema.Type.RECORD) { + try { + return OBJECT_MAPPER.writeValueAsString(x); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException( + "Failed to convert collection element to Json string!", e); + } + } else { + return x.toString(); + } + }) + .forEach(arrayNode::add); valueNode = JsonNodeFactory.instance.textNode(arrayNode.toString()); break; case RECORD: @@ -189,15 +214,21 @@ private static void setTimeseriesField(final ObjectNode document, final Timeseri } private static Map<String, JsonNode> getTimeseriesFieldCollectionDocuments( - final TimeseriesFieldCollectionSpec fieldSpec, final List<Object> values, final ObjectNode commonDocument) { + final TimeseriesFieldCollectionSpec fieldSpec, + final List<Object> values, + final ObjectNode commonDocument) { return values.stream() .map(value -> getTimeseriesFieldCollectionDocument(fieldSpec, value, commonDocument)) .collect( - Collectors.toMap(keyDocPair -> getDocId(keyDocPair.getSecond(), keyDocPair.getFirst()), Pair::getSecond)); + Collectors.toMap( + keyDocPair -> getDocId(keyDocPair.getSecond(), keyDocPair.getFirst()), + Pair::getSecond)); } private static Pair<String, ObjectNode> getTimeseriesFieldCollectionDocument( - final TimeseriesFieldCollectionSpec fieldSpec, final Object value, final ObjectNode timeseriesInfoDocument) { + final TimeseriesFieldCollectionSpec fieldSpec, + final Object value, + final ObjectNode timeseriesInfoDocument) { ObjectNode finalDocument = JsonNodeFactory.instance.objectNode(); finalDocument.setAll(timeseriesInfoDocument); RecordTemplate collectionComponent = (RecordTemplate) value; @@ -205,18 +236,24 @@ private static Pair<String, ObjectNode> getTimeseriesFieldCollectionDocument( Optional<Object> key = RecordUtils.getFieldValue(collectionComponent, fieldSpec.getKeyPath()); if (!key.isPresent()) { throw new IllegalArgumentException( - String.format("Key %s for timeseries collection field %s is missing", fieldSpec.getKeyPath(), - fieldSpec.getName())); + String.format( + "Key %s for timeseries collection field %s is missing", + fieldSpec.getKeyPath(), fieldSpec.getName())); } - componentDocument.set(fieldSpec.getTimeseriesFieldCollectionAnnotation().getKey(), + componentDocument.set( + fieldSpec.getTimeseriesFieldCollectionAnnotation().getKey(), JsonNodeFactory.instance.textNode(key.get().toString())); - Map<TimeseriesFieldSpec, List<Object>> statFields = FieldExtractor.extractFields(collectionComponent, - new ArrayList<>(fieldSpec.getTimeseriesFieldSpecMap().values())); + Map<TimeseriesFieldSpec, List<Object>> statFields = + FieldExtractor.extractFields( + collectionComponent, new ArrayList<>(fieldSpec.getTimeseriesFieldSpecMap().values())); statFields.forEach((k, v) -> setTimeseriesField(componentDocument, k, v)); finalDocument.set(fieldSpec.getName(), componentDocument); - finalDocument.set(MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(true)); - // Return the pair of component key and the document. We use the key later to build the unique docId. - return new Pair<>(fieldSpec.getTimeseriesFieldCollectionAnnotation().getCollectionName() + key.get(), + finalDocument.set( + MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(true)); + // Return the pair of component key and the document. We use the key later to build the unique + // docId. + return new Pair<>( + fieldSpec.getTimeseriesFieldCollectionAnnotation().getCollectionName() + key.get(), finalDocument); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java b/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java index a482cdeb659ca..7914d82748fa5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class GitVersion { String version; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java index e9183738c786c..9a64e397c9184 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java @@ -15,11 +15,9 @@ import com.linkedin.mxe.SystemMetadata; import javax.annotation.Nonnull; - public class AspectGenerationUtils { - private AspectGenerationUtils() { - } + private AspectGenerationUtils() {} @Nonnull public static AuditStamp createAuditStamp() { @@ -37,7 +35,8 @@ public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull St } @Nonnull - public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull String runId, @Nonnull String lastRunId) { + public static SystemMetadata createSystemMetadata( + long lastObserved, @Nonnull String runId, @Nonnull String lastRunId) { SystemMetadata metadata = new SystemMetadata(); metadata.setLastObserved(lastObserved); metadata.setRunId(runId); @@ -47,7 +46,8 @@ public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull St @Nonnull public static CorpUserKey createCorpUserKey(Urn urn) { - return (CorpUserKey) EntityKeyUtils.convertUrnToEntityKeyInternal(urn, new CorpUserKey().schema()); + return (CorpUserKey) + EntityKeyUtils.convertUrnToEntityKeyInternal(urn, new CorpUserKey().schema()); } @Nonnull diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java index e95378a616d97..2113e5a04f3a2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java @@ -14,19 +14,19 @@ import java.util.Map; import javax.annotation.Nonnull; - public class AspectIngestionUtils { - private AspectIngestionUtils() { - } + private AspectIngestionUtils() {} @Nonnull - public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects(EntityService entityService, int aspectCount) { + public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects( + EntityService entityService, int aspectCount) { return ingestCorpUserKeyAspects(entityService, aspectCount, 0); } @Nonnull - public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects(EntityService entityService, int aspectCount, int startIndex) { + public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects( + EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new CorpUserKey()); Map<Urn, CorpUserKey> aspects = new HashMap<>(); List<UpsertBatchItem> items = new LinkedList<>(); @@ -34,24 +34,31 @@ public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects(EntityService entit Urn urn = UrnUtils.getUrn(String.format("urn:li:corpuser:tester%d", i)); CorpUserKey aspect = AspectGenerationUtils.createCorpUserKey(urn); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() - .urn(urn) - .aspectName(aspectName) - .aspect(aspect) - .systemMetadata(AspectGenerationUtils.createSystemMetadata()) + items.add( + UpsertBatchItem.builder() + .urn(urn) + .aspectName(aspectName) + .aspect(aspect) + .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } @Nonnull - public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects(@Nonnull final EntityService entityService, int aspectCount) { + public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects( + @Nonnull final EntityService entityService, int aspectCount) { return ingestCorpUserInfoAspects(entityService, aspectCount, 0); } @Nonnull - public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects(@Nonnull final EntityService entityService, int aspectCount, int startIndex) { + public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects( + @Nonnull final EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); Map<Urn, CorpUserInfo> aspects = new HashMap<>(); List<UpsertBatchItem> items = new LinkedList<>(); @@ -60,24 +67,31 @@ public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects(@Nonnull final En String email = String.format("email%d@test.com", i); CorpUserInfo aspect = AspectGenerationUtils.createCorpUserInfo(email); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(aspect) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } @Nonnull - public static Map<Urn, ChartInfo> ingestChartInfoAspects(@Nonnull final EntityService entityService, int aspectCount) { + public static Map<Urn, ChartInfo> ingestChartInfoAspects( + @Nonnull final EntityService entityService, int aspectCount) { return ingestChartInfoAspects(entityService, aspectCount, 0); } @Nonnull - public static Map<Urn, ChartInfo> ingestChartInfoAspects(@Nonnull final EntityService entityService, int aspectCount, int startIndex) { + public static Map<Urn, ChartInfo> ingestChartInfoAspects( + @Nonnull final EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new ChartInfo()); Map<Urn, ChartInfo> aspects = new HashMap<>(); List<UpsertBatchItem> items = new LinkedList<>(); @@ -87,14 +101,19 @@ public static Map<Urn, ChartInfo> ingestChartInfoAspects(@Nonnull final EntitySe String description = String.format("Test description %d", i); ChartInfo aspect = AspectGenerationUtils.createChartInfo(title, description); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(aspect) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java index 54fb2bc8b1f65..258b40cac6371 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java @@ -1,11 +1,13 @@ package com.linkedin.metadata; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; + import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.dataset.DatasetProperties; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.TestEntityRegistry; @@ -23,19 +25,16 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class AspectUtilsTest { protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); protected final EntityRegistry _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); - public AspectUtilsTest() throws EntityRegistryException { - } + public AspectUtilsTest() throws EntityRegistryException {} @Test public void testAdditionalChanges() { @@ -45,18 +44,21 @@ public void testAdditionalChanges() { EventProducer mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - EntityServiceImpl entityServiceImpl = new EntityServiceImpl(aspectDao, mockProducer, _testEntityRegistry, true, - null, preProcessHooks); + EntityServiceImpl entityServiceImpl = + new EntityServiceImpl( + aspectDao, mockProducer, _testEntityRegistry, true, null, preProcessHooks); MetadataChangeProposal proposal1 = new MetadataChangeProposal(); - proposal1.setEntityUrn(new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)); + proposal1.setEntityUrn( + new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)); proposal1.setAspectName("datasetProperties"); DatasetProperties datasetProperties = new DatasetProperties().setName("name"); proposal1.setAspect(GenericRecordUtils.serializeAspect(datasetProperties)); proposal1.setEntityType("dataset"); proposal1.setChangeType(ChangeType.PATCH); - List<MetadataChangeProposal> proposalList = AspectUtils.getAdditionalChanges(proposal1, entityServiceImpl); + List<MetadataChangeProposal> proposalList = + AspectUtils.getAdditionalChanges(proposal1, entityServiceImpl); // proposals for key aspect, browsePath, browsePathV2, dataPlatformInstance Assert.assertEquals(proposalList.size(), 4); Assert.assertEquals(proposalList.get(0).getChangeType(), ChangeType.UPSERT); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java index 5645573917f00..70b7f587f0c19 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java @@ -1,16 +1,13 @@ package com.linkedin.metadata; +import static org.testng.Assert.assertEquals; + import com.datastax.driver.core.KeyspaceMetadata; import com.datastax.driver.core.Row; import com.datastax.driver.core.Session; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.linkedin.metadata.entity.cassandra.CassandraAspect; -import org.testcontainers.containers.CassandraContainer; -import org.testcontainers.utility.DockerImageName; - -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; import java.net.InetSocketAddress; import java.time.Duration; import java.util.Arrays; @@ -18,32 +15,37 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; - -import static org.testng.Assert.assertEquals; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; +import org.testcontainers.containers.CassandraContainer; +import org.testcontainers.utility.DockerImageName; public class CassandraTestUtils { - private CassandraTestUtils() { - } + private CassandraTestUtils() {} private static final String KEYSPACE_NAME = "test"; private static final String IMAGE_NAME = "cassandra:3.11"; public static CassandraContainer setupContainer() { - final DockerImageName imageName = DockerImageName - .parse(IMAGE_NAME) - .asCompatibleSubstituteFor("cassandra"); + final DockerImageName imageName = + DockerImageName.parse(IMAGE_NAME).asCompatibleSubstituteFor("cassandra"); CassandraContainer container = new CassandraContainer(imageName); - container.withEnv("JVM_OPTS", "-Xms64M -Xmx96M") - .withStartupTimeout(Duration.ofMinutes(5)) // usually < 1min - .start(); + container + .withEnv("JVM_OPTS", "-Xms64M -Xmx96M") + .withStartupTimeout(Duration.ofMinutes(5)) // usually < 1min + .start(); try (Session session = container.getCluster().connect()) { - session.execute(String.format("CREATE KEYSPACE IF NOT EXISTS %s WITH replication = \n" - + "{'class':'SimpleStrategy','replication_factor':'1'};", KEYSPACE_NAME)); session.execute( - String.format("create table %s.%s (urn varchar, \n" + String.format( + "CREATE KEYSPACE IF NOT EXISTS %s WITH replication = \n" + + "{'class':'SimpleStrategy','replication_factor':'1'};", + KEYSPACE_NAME)); + session.execute( + String.format( + "create table %s.%s (urn varchar, \n" + "aspect varchar, \n" + "systemmetadata varchar, \n" + "version bigint, \n" @@ -54,14 +56,13 @@ public static CassandraContainer setupContainer() { + "entity varchar, \n" + "primary key ((urn), aspect, version)) \n" + "with clustering order by (aspect asc, version asc);", - KEYSPACE_NAME, - CassandraAspect.TABLE_NAME)); + KEYSPACE_NAME, CassandraAspect.TABLE_NAME)); List<KeyspaceMetadata> keyspaces = session.getCluster().getMetadata().getKeyspaces(); - List<KeyspaceMetadata> filteredKeyspaces = keyspaces - .stream() - .filter(km -> km.getName().equals(KEYSPACE_NAME)) - .collect(Collectors.toList()); + List<KeyspaceMetadata> filteredKeyspaces = + keyspaces.stream() + .filter(km -> km.getName().equals(KEYSPACE_NAME)) + .collect(Collectors.toList()); assertEquals(filteredKeyspaces.size(), 1); } @@ -72,20 +73,22 @@ public static CassandraContainer setupContainer() { public static CqlSession createTestSession(@Nonnull final CassandraContainer container) { Map<String, String> sessionConfig = createTestServerConfig(container); int port = Integer.parseInt(sessionConfig.get("port")); - List<InetSocketAddress> addresses = Arrays.stream(sessionConfig.get("hosts").split(",")) - .map(host -> new InetSocketAddress(host, port)) - .collect(Collectors.toList()); + List<InetSocketAddress> addresses = + Arrays.stream(sessionConfig.get("hosts").split(",")) + .map(host -> new InetSocketAddress(host, port)) + .collect(Collectors.toList()); String dc = sessionConfig.get("datacenter"); String ks = sessionConfig.get("keyspace"); String username = sessionConfig.get("username"); String password = sessionConfig.get("password"); - CqlSessionBuilder csb = CqlSession.builder() - .addContactPoints(addresses) - .withLocalDatacenter(dc) - .withKeyspace(ks) - .withAuthCredentials(username, password); + CqlSessionBuilder csb = + CqlSession.builder() + .addContactPoints(addresses) + .withLocalDatacenter(dc) + .withKeyspace(ks) + .withAuthCredentials(username, password); if (sessionConfig.containsKey("useSsl") && sessionConfig.get("useSsl").equals("true")) { try { @@ -99,22 +102,29 @@ public static CqlSession createTestSession(@Nonnull final CassandraContainer con } @Nonnull - private static Map<String, String> createTestServerConfig(@Nonnull final CassandraContainer container) { - return new HashMap<String, String>() {{ - put("keyspace", KEYSPACE_NAME); - put("username", container.getUsername()); - put("password", container.getPassword()); - put("hosts", container.getHost()); - put("port", container.getMappedPort(9042).toString()); - put("datacenter", "datacenter1"); - put("useSsl", "false"); - }}; + private static Map<String, String> createTestServerConfig( + @Nonnull final CassandraContainer container) { + return new HashMap<String, String>() { + { + put("keyspace", KEYSPACE_NAME); + put("username", container.getUsername()); + put("password", container.getPassword()); + put("hosts", container.getHost()); + put("port", container.getMappedPort(9042).toString()); + put("datacenter", "datacenter1"); + put("useSsl", "false"); + } + }; } public static void purgeData(CassandraContainer container) { try (Session session = container.getCluster().connect()) { session.execute(String.format("TRUNCATE %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)); - List<Row> rs = session.execute(String.format("SELECT * FROM %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)).all(); + List<Row> rs = + session + .execute( + String.format("SELECT * FROM %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)) + .all(); assertEquals(rs.size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java index 364ccd86d45fd..4ab421dab79dc 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java @@ -4,18 +4,20 @@ public class DockerTestUtils { - final private static int MIN_MEMORY_NEEDED_GB = 7; + private static final int MIN_MEMORY_NEEDED_GB = 7; - public static void checkContainerEngine(DockerClient dockerClient) { - final long dockerEngineMemoryBytes = dockerClient.infoCmd().exec().getMemTotal(); - final long dockerEngineMemoryGB = dockerEngineMemoryBytes / 1000 / 1000 / 1000; - if (dockerEngineMemoryGB < MIN_MEMORY_NEEDED_GB) { - final String error = String.format("Total Docker memory configured: %s GB (%d bytes) is below the minimum threshold " - + "of %d GB", dockerEngineMemoryGB, dockerEngineMemoryBytes, MIN_MEMORY_NEEDED_GB); - throw new IllegalStateException(error); - } + public static void checkContainerEngine(DockerClient dockerClient) { + final long dockerEngineMemoryBytes = dockerClient.infoCmd().exec().getMemTotal(); + final long dockerEngineMemoryGB = dockerEngineMemoryBytes / 1000 / 1000 / 1000; + if (dockerEngineMemoryGB < MIN_MEMORY_NEEDED_GB) { + final String error = + String.format( + "Total Docker memory configured: %s GB (%d bytes) is below the minimum threshold " + + "of %d GB", + dockerEngineMemoryGB, dockerEngineMemoryBytes, MIN_MEMORY_NEEDED_GB); + throw new IllegalStateException(error); } + } - private DockerTestUtils() { - } + private DockerTestUtils() {} } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java index c6eefede8a860..ed5c882ace23e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java @@ -4,13 +4,11 @@ import io.ebean.DatabaseFactory; import io.ebean.config.DatabaseConfig; import io.ebean.datasource.DataSourceConfig; - import javax.annotation.Nonnull; public class EbeanTestUtils { - private EbeanTestUtils() { - } + private EbeanTestUtils() {} @Nonnull public static Database createTestServer(String instanceId) { @@ -22,7 +20,8 @@ private static DatabaseConfig createTestingH2ServerConfig(String instanceId) { DataSourceConfig dataSourceConfig = new DataSourceConfig(); dataSourceConfig.setUsername("tester"); dataSourceConfig.setPassword(""); - dataSourceConfig.setUrl(String.format("jdbc:h2:mem:%s;IGNORECASE=TRUE;mode=mysql;", instanceId)); + dataSourceConfig.setUrl( + String.format("jdbc:h2:mem:%s;IGNORECASE=TRUE;mode=mysql;", instanceId)); dataSourceConfig.setDriver("org.h2.Driver"); DatabaseConfig serverConfig = new DatabaseConfig(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java b/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java index 57c14608a7881..fd218add2a945 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java @@ -4,11 +4,9 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EntitySpecBuilder; - public class TestEntitySpecBuilder { - private TestEntitySpecBuilder() { - } + private TestEntitySpecBuilder() {} public static EntitySpec getSpec() { return new EntitySpecBuilder().buildEntitySpec(new TestEntitySnapshot().schema()); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java b/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java index 4b1b8c89b030b..ea4bb69d942f9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java @@ -1,7 +1,5 @@ package com.linkedin.metadata; -import com.datahub.test.TestBrowsePaths; -import com.datahub.test.TestBrowsePathsV2; import com.datahub.test.BrowsePathEntry; import com.datahub.test.BrowsePathEntryArray; import com.datahub.test.KeyPartEnum; @@ -9,6 +7,8 @@ import com.datahub.test.SimpleNestedRecord1; import com.datahub.test.SimpleNestedRecord2; import com.datahub.test.SimpleNestedRecord2Array; +import com.datahub.test.TestBrowsePaths; +import com.datahub.test.TestBrowsePathsV2; import com.datahub.test.TestEntityAspect; import com.datahub.test.TestEntityAspectArray; import com.datahub.test.TestEntityInfo; @@ -21,10 +21,8 @@ import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; - public class TestEntityUtil { - private TestEntityUtil() { - } + private TestEntityUtil() {} public static Urn getTestEntityUrn() { return new TestEntityUrn("key", "urn", "VALUE_1"); @@ -38,15 +36,39 @@ public static TestEntityInfo getTestEntityInfo(Urn urn) { TestEntityInfo testEntityInfo = new TestEntityInfo(); testEntityInfo.setTextField("test"); testEntityInfo.setTextArrayField(new StringArray(ImmutableList.of("testArray1", "testArray2"))); - testEntityInfo.setNestedRecordField(new SimpleNestedRecord1().setNestedIntegerField(1).setNestedForeignKey(urn)); - testEntityInfo.setNestedRecordArrayField(new SimpleNestedRecord2Array( - ImmutableList.of(new SimpleNestedRecord2().setNestedArrayStringField("nestedArray1"), - new SimpleNestedRecord2().setNestedArrayStringField("nestedArray2") - .setNestedArrayArrayField(new StringArray(ImmutableList.of("testNestedArray1", "testNestedArray2")))))); - testEntityInfo.setCustomProperties(new StringMap(ImmutableMap.of("key1", "value1", "key2", "value2", - "shortValue", "123", "longValue", "0123456789"))); - testEntityInfo.setEsObjectField(new StringMap(ImmutableMap.of("key1", "value1", "key2", "value2", - "shortValue", "123", "longValue", "0123456789"))); + testEntityInfo.setNestedRecordField( + new SimpleNestedRecord1().setNestedIntegerField(1).setNestedForeignKey(urn)); + testEntityInfo.setNestedRecordArrayField( + new SimpleNestedRecord2Array( + ImmutableList.of( + new SimpleNestedRecord2().setNestedArrayStringField("nestedArray1"), + new SimpleNestedRecord2() + .setNestedArrayStringField("nestedArray2") + .setNestedArrayArrayField( + new StringArray( + ImmutableList.of("testNestedArray1", "testNestedArray2")))))); + testEntityInfo.setCustomProperties( + new StringMap( + ImmutableMap.of( + "key1", + "value1", + "key2", + "value2", + "shortValue", + "123", + "longValue", + "0123456789"))); + testEntityInfo.setEsObjectField( + new StringMap( + ImmutableMap.of( + "key1", + "value1", + "key2", + "value2", + "shortValue", + "123", + "longValue", + "0123456789"))); return testEntityInfo; } @@ -55,7 +77,8 @@ public static TestEntitySnapshot getSnapshot() { Urn urn = getTestEntityUrn(); snapshot.setUrn(urn); - TestBrowsePaths browsePaths = new TestBrowsePaths().setPaths(new StringArray(ImmutableList.of("/a/b/c", "d/e/f"))); + TestBrowsePaths browsePaths = + new TestBrowsePaths().setPaths(new StringArray(ImmutableList.of("/a/b/c", "d/e/f"))); BrowsePathEntryArray browsePathV2Entries = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("levelOne"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("levelTwo"); @@ -64,10 +87,14 @@ public static TestEntitySnapshot getSnapshot() { TestBrowsePathsV2 browsePathsV2 = new TestBrowsePathsV2().setPath(browsePathV2Entries); SearchFeatures searchFeatures = new SearchFeatures().setFeature1(2).setFeature2(1); - TestEntityAspectArray aspects = new TestEntityAspectArray( - ImmutableList.of(TestEntityAspect.create(getTestEntityKey(urn)), - TestEntityAspect.create(getTestEntityInfo(urn)), TestEntityAspect.create(browsePaths), - TestEntityAspect.create(searchFeatures), TestEntityAspect.create(browsePathsV2))); + TestEntityAspectArray aspects = + new TestEntityAspectArray( + ImmutableList.of( + TestEntityAspect.create(getTestEntityKey(urn)), + TestEntityAspect.create(getTestEntityInfo(urn)), + TestEntityAspect.create(browsePaths), + TestEntityAspect.create(searchFeatures), + TestEntityAspect.create(browsePathsV2))); snapshot.setAspects(aspects); return snapshot; } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java index f52bc26b5c538..fba11f24f4c44 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.client; +import static org.mockito.Mockito.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.codahale.metrics.Counter; import com.linkedin.data.template.RequiredFieldNotPresentException; import com.linkedin.entity.client.RestliEntityClient; @@ -12,124 +16,115 @@ import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.function.Supplier; import org.mockito.MockedStatic; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.util.function.Supplier; - -import static org.mockito.Mockito.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; - - public class JavaEntityClientTest { - private EntityService _entityService; - private DeleteEntityService _deleteEntityService; - private EntitySearchService _entitySearchService; - private CachingEntitySearchService _cachingEntitySearchService; - private SearchService _searchService; - private LineageSearchService _lineageSearchService; - private TimeseriesAspectService _timeseriesAspectService; - private EventProducer _eventProducer; - private RestliEntityClient _restliEntityClient; - private MockedStatic<MetricUtils> _metricUtils; - private Counter _counter; - - - @BeforeMethod - public void setupTest() { - _entityService = mock(EntityService.class); - _deleteEntityService = mock(DeleteEntityService.class); - _entitySearchService = mock(EntitySearchService.class); - _cachingEntitySearchService = mock(CachingEntitySearchService.class); - _searchService = mock(SearchService.class); - _lineageSearchService = mock(LineageSearchService.class); - _timeseriesAspectService = mock(TimeseriesAspectService.class); - _eventProducer = mock(EventProducer.class); - _restliEntityClient = mock(RestliEntityClient.class); - _metricUtils = mockStatic(MetricUtils.class); - _counter = mock(Counter.class); - when(MetricUtils.counter(any(), any())).thenReturn(_counter); - } - - @AfterMethod - public void closeTest() { - _metricUtils.close(); - } - - private JavaEntityClient getJavaEntityClient() { - return new JavaEntityClient( - _entityService, - _deleteEntityService, - _entitySearchService, - _cachingEntitySearchService, - _searchService, - _lineageSearchService, - _timeseriesAspectService, - _eventProducer, - _restliEntityClient); - } - - @Test - void testSuccessWithNoRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier<Object> mockSupplier = mock(Supplier.class); - - when(mockSupplier.get()).thenReturn(42); - - assertEquals(client.withRetry(mockSupplier, null), 42); - verify(mockSupplier, times(1)).get(); - _metricUtils.verify(() -> MetricUtils.counter(any(), any()), times(0)); - } - - @Test - void testSuccessAfterMultipleRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier<Object> mockSupplier = mock(Supplier.class); - Exception e = new IllegalArgumentException(); - - when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenReturn(42); - - assertEquals(client.withRetry(mockSupplier, "test"), 42); - verify(mockSupplier, times(4)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), - times(3) - ); - } - - @Test - void testThrowAfterMultipleRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier<Object> mockSupplier = mock(Supplier.class); - Exception e = new IllegalArgumentException(); - - when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenThrow(e); - - assertThrows(IllegalArgumentException.class, () -> client.withRetry(mockSupplier, "test")); - verify(mockSupplier, times(4)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), - times(4) - ); - } - - @Test - void testThrowAfterNonRetryableException() { - JavaEntityClient client = getJavaEntityClient(); - Supplier<Object> mockSupplier = mock(Supplier.class); - Exception e = new RequiredFieldNotPresentException("test"); - - when(mockSupplier.get()).thenThrow(e); - - assertThrows(RequiredFieldNotPresentException.class, () -> client.withRetry(mockSupplier, null)); - verify(mockSupplier, times(1)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "exception_" + e.getClass().getName()), - times(1) - ); - } -} \ No newline at end of file + private EntityService _entityService; + private DeleteEntityService _deleteEntityService; + private EntitySearchService _entitySearchService; + private CachingEntitySearchService _cachingEntitySearchService; + private SearchService _searchService; + private LineageSearchService _lineageSearchService; + private TimeseriesAspectService _timeseriesAspectService; + private EventProducer _eventProducer; + private RestliEntityClient _restliEntityClient; + private MockedStatic<MetricUtils> _metricUtils; + private Counter _counter; + + @BeforeMethod + public void setupTest() { + _entityService = mock(EntityService.class); + _deleteEntityService = mock(DeleteEntityService.class); + _entitySearchService = mock(EntitySearchService.class); + _cachingEntitySearchService = mock(CachingEntitySearchService.class); + _searchService = mock(SearchService.class); + _lineageSearchService = mock(LineageSearchService.class); + _timeseriesAspectService = mock(TimeseriesAspectService.class); + _eventProducer = mock(EventProducer.class); + _restliEntityClient = mock(RestliEntityClient.class); + _metricUtils = mockStatic(MetricUtils.class); + _counter = mock(Counter.class); + when(MetricUtils.counter(any(), any())).thenReturn(_counter); + } + + @AfterMethod + public void closeTest() { + _metricUtils.close(); + } + + private JavaEntityClient getJavaEntityClient() { + return new JavaEntityClient( + _entityService, + _deleteEntityService, + _entitySearchService, + _cachingEntitySearchService, + _searchService, + _lineageSearchService, + _timeseriesAspectService, + _eventProducer, + _restliEntityClient); + } + + @Test + void testSuccessWithNoRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier<Object> mockSupplier = mock(Supplier.class); + + when(mockSupplier.get()).thenReturn(42); + + assertEquals(client.withRetry(mockSupplier, null), 42); + verify(mockSupplier, times(1)).get(); + _metricUtils.verify(() -> MetricUtils.counter(any(), any()), times(0)); + } + + @Test + void testSuccessAfterMultipleRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier<Object> mockSupplier = mock(Supplier.class); + Exception e = new IllegalArgumentException(); + + when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenReturn(42); + + assertEquals(client.withRetry(mockSupplier, "test"), 42); + verify(mockSupplier, times(4)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), + times(3)); + } + + @Test + void testThrowAfterMultipleRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier<Object> mockSupplier = mock(Supplier.class); + Exception e = new IllegalArgumentException(); + + when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenThrow(e); + + assertThrows(IllegalArgumentException.class, () -> client.withRetry(mockSupplier, "test")); + verify(mockSupplier, times(4)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), + times(4)); + } + + @Test + void testThrowAfterNonRetryableException() { + JavaEntityClient client = getJavaEntityClient(); + Supplier<Object> mockSupplier = mock(Supplier.class); + Exception e = new RequiredFieldNotPresentException("test"); + + when(mockSupplier.get()).thenThrow(e); + + assertThrows( + RequiredFieldNotPresentException.class, () -> client.withRetry(mockSupplier, null)); + verify(mockSupplier, times(1)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "exception_" + e.getClass().getName()), + times(1)); + } +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java index 10a73cbe532a2..e13c2d9fd1005 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java @@ -1,39 +1,39 @@ package com.linkedin.metadata.elasticsearch.update; -import com.linkedin.metadata.search.elasticsearch.update.BulkListener; -import org.opensearch.action.bulk.BulkRequest; -import org.opensearch.action.support.WriteRequest; -import org.mockito.Mockito; -import org.testng.annotations.Test; - +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.ArgumentMatchers.any; -import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; + +import com.linkedin.metadata.search.elasticsearch.update.BulkListener; +import org.mockito.Mockito; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.support.WriteRequest; +import org.testng.annotations.Test; public class BulkListenerTest { - @Test - public void testConstructor() { - BulkListener test = BulkListener.getInstance(); - assertNotNull(test); - assertEquals(test, BulkListener.getInstance()); - assertNotEquals(test, BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE)); - } + @Test + public void testConstructor() { + BulkListener test = BulkListener.getInstance(); + assertNotNull(test); + assertEquals(test, BulkListener.getInstance()); + assertNotEquals(test, BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE)); + } - @Test - public void testDefaultPolicy() { - BulkListener test = BulkListener.getInstance(); + @Test + public void testDefaultPolicy() { + BulkListener test = BulkListener.getInstance(); - BulkRequest mockRequest1 = Mockito.mock(BulkRequest.class); - test.beforeBulk(0L, mockRequest1); - verify(mockRequest1, times(0)).setRefreshPolicy(any(WriteRequest.RefreshPolicy.class)); + BulkRequest mockRequest1 = Mockito.mock(BulkRequest.class); + test.beforeBulk(0L, mockRequest1); + verify(mockRequest1, times(0)).setRefreshPolicy(any(WriteRequest.RefreshPolicy.class)); - BulkRequest mockRequest2 = Mockito.mock(BulkRequest.class); - test = BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE); - test.beforeBulk(0L, mockRequest2); - verify(mockRequest2, times(1)).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - } + BulkRequest mockRequest2 = Mockito.mock(BulkRequest.class); + test = BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE); + test.beforeBulk(0L, mockRequest2); + verify(mockRequest2, times(1)).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java index 2d84c9f3444de..94e57b80d8113 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java @@ -1,18 +1,18 @@ package com.linkedin.metadata.elasticsearch.update; +import static org.testng.Assert.assertNotNull; + import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; -import org.opensearch.client.RestHighLevelClient; import org.mockito.Mockito; +import org.opensearch.client.RestHighLevelClient; import org.testng.annotations.Test; -import static org.testng.Assert.assertNotNull; - public class ESBulkProcessorTest { - @Test - public void testESBulkProcessorBuilder() { - RestHighLevelClient mock = Mockito.mock(RestHighLevelClient.class); - ESBulkProcessor test = ESBulkProcessor.builder(mock).build(); - assertNotNull(test); - } + @Test + public void testESBulkProcessorBuilder() { + RestHighLevelClient mock = Mockito.mock(RestHighLevelClient.class); + ESBulkProcessor test = ESBulkProcessor.builder(mock).build(); + assertNotNull(test); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java index 6a331647583d2..6d464d9cd9a10 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.AspectIngestionUtils; @@ -18,11 +21,7 @@ import org.testcontainers.shaded.com.google.common.collect.ImmutableList; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - -abstract public class AspectMigrationsDaoTest<T extends AspectMigrationsDao> { +public abstract class AspectMigrationsDaoTest<T extends AspectMigrationsDao> { protected T _migrationsDao; @@ -37,8 +36,11 @@ abstract public class AspectMigrationsDaoTest<T extends AspectMigrationsDao> { protected AspectMigrationsDaoTest() throws EntityRegistryException { _snapshotEntityRegistry = new TestEntityRegistry(); - _configEntityRegistry = new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); + _configEntityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + _testEntityRegistry = + new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); } @Test @@ -46,8 +48,10 @@ public void testListAllUrns() throws AssertionError { final int totalAspects = 30; final int pageSize = 25; final int lastPageSize = 5; - Map<Urn, CorpUserKey> ingestedAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); - List<String> ingestedUrns = ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); + Map<Urn, CorpUserKey> ingestedAspects = + AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); + List<String> ingestedUrns = + ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); List<String> seenUrns = new ArrayList<>(); Iterable<String> page1 = _migrationsDao.listAllUrns(0, pageSize); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java index 70161fe640707..d94de604bf44d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -15,15 +17,11 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class CassandraAspectMigrationsDaoTest extends AspectMigrationsDaoTest<CassandraAspectDao> { private CassandraContainer _cassandraContainer; - public CassandraAspectMigrationsDaoTest() throws EntityRegistryException { - } + public CassandraAspectMigrationsDaoTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -49,8 +47,14 @@ private void configureComponents() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(dao, _mockProducer, _testEntityRegistry, true, _mockUpdateIndicesService, - preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + dao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new CassandraRetentionService(_entityServiceImpl, session, 1000); _entityServiceImpl.setRetentionService(_retentionService); @@ -59,8 +63,8 @@ private void configureComponents() { /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java index 50e562b76c4e6..74c81ff2e8602 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java @@ -1,6 +1,8 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; @@ -8,6 +10,7 @@ import com.linkedin.metadata.AspectGenerationUtils; import com.linkedin.metadata.AspectIngestionUtils; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -28,22 +31,20 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - /** - * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a Cassandra database. + * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a + * Cassandra database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + * <p>This class also contains all the test methods where realities of an underlying storage leak + * into the {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ -public class CassandraEntityServiceTest extends EntityServiceTest<CassandraAspectDao, CassandraRetentionService> { +public class CassandraEntityServiceTest + extends EntityServiceTest<CassandraAspectDao, CassandraRetentionService> { private CassandraContainer _cassandraContainer; - public CassandraEntityServiceTest() throws EntityRegistryException { - } + public CassandraEntityServiceTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -69,16 +70,22 @@ private void configureComponents() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new CassandraRetentionService(_entityServiceImpl, session, 1000); _entityServiceImpl.setRetentionService(_retentionService); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { @@ -99,7 +106,8 @@ public void testIngestListLatestAspects() throws AssertionError { final int expectedTotalPages = 4; final int expectedEntitiesInLastPage = 10; - Map<Urn, CorpUserInfo> writtenAspects = AspectIngestionUtils.ingestCorpUserInfoAspects(_entityServiceImpl, totalEntities); + Map<Urn, CorpUserInfo> writtenAspects = + AspectIngestionUtils.ingestCorpUserInfoAspects(_entityServiceImpl, totalEntities); Set<Urn> writtenUrns = writtenAspects.keySet(); String entity = writtenUrns.stream().findFirst().get().getEntityType(); String aspect = AspectGenerationUtils.getAspectName(new CorpUserInfo()); @@ -111,7 +119,8 @@ public void testIngestListLatestAspects() throws AssertionError { int expectedEntityCount = isLastPage ? expectedEntitiesInLastPage : pageSize; int expectedNextStart = isLastPage ? -1 : pageStart + pageSize; - ListResult<RecordTemplate> page = _entityServiceImpl.listLatestAspects(entity, aspect, pageStart, pageSize); + ListResult<RecordTemplate> page = + _entityServiceImpl.listLatestAspects(entity, aspect, pageStart, pageSize); // Check paging metadata works as expected assertEquals(page.getNextStart(), expectedNextStart); @@ -121,15 +130,26 @@ public void testIngestListLatestAspects() throws AssertionError { assertEquals(page.getValues().size(), expectedEntityCount); // Remember all URNs we've seen returned for later assertions - readUrns.addAll(page.getMetadata().getExtraInfos().stream().map(ExtraInfo::getUrn).collect(Collectors.toList())); + readUrns.addAll( + page.getMetadata().getExtraInfos().stream() + .map(ExtraInfo::getUrn) + .collect(Collectors.toList())); } assertEquals(readUrns.size(), writtenUrns.size()); - // Check that all URNs we've created were seen in some page or other (also check that none were seen more than once) - // We can't be strict on exact order of items in the responses because Cassandra query limitations get in the way here. + // Check that all URNs we've created were seen in some page or other (also check that none were + // seen more than once) + // We can't be strict on exact order of items in the responses because Cassandra query + // limitations get in the way here. for (Urn wUrn : writtenUrns) { - long matchingUrnCount = readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); - assertEquals(matchingUrnCount, 1L, String.format("Each URN should appear exactly once. %s appeared %d times.", wUrn, matchingUrnCount)); + long matchingUrnCount = + readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); + assertEquals( + matchingUrnCount, + 1L, + String.format( + "Each URN should appear exactly once. %s appeared %d times.", + wUrn, matchingUrnCount)); } } @@ -147,7 +167,8 @@ public void testIngestListUrns() throws AssertionError { final int expectedTotalPages = 4; final int expectedEntitiesInLastPage = 10; - Map<Urn, CorpUserKey> writtenAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalEntities); + Map<Urn, CorpUserKey> writtenAspects = + AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalEntities); Set<Urn> writtenUrns = writtenAspects.keySet(); String entity = writtenUrns.stream().findFirst().get().getEntityType(); @@ -169,11 +190,19 @@ public void testIngestListUrns() throws AssertionError { } assertEquals(readUrns.size(), writtenUrns.size()); - // Check that all URNs we've created were seen in some page or other (also check that none were seen more than once) - // We can't be strict on exact order of items in the responses because Cassandra query limitations get in the way here. + // Check that all URNs we've created were seen in some page or other (also check that none were + // seen more than once) + // We can't be strict on exact order of items in the responses because Cassandra query + // limitations get in the way here. for (Urn wUrn : writtenUrns) { - long matchingUrnCount = readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); - assertEquals(matchingUrnCount, 1L, String.format("Each URN should appear exactly once. %s appeared %d times.", wUrn, matchingUrnCount)); + long matchingUrnCount = + readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); + assertEquals( + matchingUrnCount, + 1L, + String.format( + "Each URN should appear exactly once. %s appeared %d times.", + wUrn, matchingUrnCount)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java index 98f9ce241b850..496744770dba8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java @@ -1,6 +1,9 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.AssertJUnit.*; + import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -10,6 +13,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.graph.GraphService; @@ -28,11 +32,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.AssertJUnit.*; - - public class DeleteEntityServiceTest { protected EbeanAspectDao _aspectDao; @@ -46,14 +45,21 @@ public class DeleteEntityServiceTest { protected EntityRegistry _entityRegistry; public DeleteEntityServiceTest() { - _entityRegistry = new ConfigEntityRegistry(Snapshot.class.getClassLoader() - .getResourceAsStream("entity-registry.yml")); + _entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); _aspectDao = mock(EbeanAspectDao.class); _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, mock(EventProducer.class), _entityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + mock(EventProducer.class), + _entityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _deleteEntityService = new DeleteEntityService(_entityServiceImpl, _graphService); } @@ -66,11 +72,19 @@ public void testDeleteUniqueRefGeneratesValidMCP() { final Urn container = UrnUtils.getUrn("urn:li:container:d1006cf3-3ff9-48e3-85cd-26eb23775ab2"); final RelatedEntitiesResult mockRelatedEntities = - new RelatedEntitiesResult(0, 1, 1, ImmutableList.of(new RelatedEntity("IsPartOf", dataset.toString()))); - - Mockito.when(_graphService.findRelatedEntities(null, newFilter("urn", container.toString()), - null, EMPTY_FILTER, ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000)) + new RelatedEntitiesResult( + 0, 1, 1, ImmutableList.of(new RelatedEntity("IsPartOf", dataset.toString()))); + + Mockito.when( + _graphService.findRelatedEntities( + null, + newFilter("urn", container.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000)) .thenReturn(mockRelatedEntities); final EntityResponse entityResponse = new EntityResponse(); @@ -78,14 +92,16 @@ public void testDeleteUniqueRefGeneratesValidMCP() { entityResponse.setEntityName(dataset.getEntityType()); final Container containerAspect = new Container(); containerAspect.setContainer(container); - final EntityAspectIdentifier dbKey = new EntityAspectIdentifier(dataset.toString(), Constants.CONTAINER_ASPECT_NAME, 0); + final EntityAspectIdentifier dbKey = + new EntityAspectIdentifier(dataset.toString(), Constants.CONTAINER_ASPECT_NAME, 0); final EntityAspect dbValue = new EntityAspect(); dbValue.setUrn(dataset.toString()); dbValue.setVersion(0); dbValue.setAspect(Constants.CONTAINER_ASPECT_NAME); dbValue.setMetadata(RecordUtils.toJsonString(containerAspect)); - dbValue.setSystemMetadata(RecordUtils.toJsonString(SystemMetadataUtils.createDefaultSystemMetadata())); + dbValue.setSystemMetadata( + RecordUtils.toJsonString(SystemMetadataUtils.createDefaultSystemMetadata())); final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp(); dbValue.setCreatedBy(auditStamp.getActor().toString()); dbValue.setCreatedOn(new Timestamp(auditStamp.getTime())); @@ -93,15 +109,25 @@ public void testDeleteUniqueRefGeneratesValidMCP() { final Map<EntityAspectIdentifier, EntityAspect> dbEntries = Map.of(dbKey, dbValue); Mockito.when(_aspectDao.batchGet(Mockito.any())).thenReturn(dbEntries); - RollbackResult result = new RollbackResult(container, Constants.DATASET_ENTITY_NAME, - Constants.CONTAINER_ASPECT_NAME, containerAspect, null, null, null, - ChangeType.DELETE, false, 1); + RollbackResult result = + new RollbackResult( + container, + Constants.DATASET_ENTITY_NAME, + Constants.CONTAINER_ASPECT_NAME, + containerAspect, + null, + null, + null, + ChangeType.DELETE, + false, + 1); Mockito.when(_aspectDao.runInTransactionWithRetry(Mockito.any(), Mockito.anyInt())) .thenReturn(result); - final DeleteReferencesResponse response = _deleteEntityService.deleteReferencesTo(container, false); + final DeleteReferencesResponse response = + _deleteEntityService.deleteReferencesTo(container, false); assertEquals(1, (int) response.getTotal()); assertFalse(response.getRelatedAspects().isEmpty()); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java index 67c9bd0a9e014..943ad2967de42 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java @@ -14,71 +14,62 @@ public class DeleteEntityUtilsTest extends TestCase { - /** - * Tests that Aspect Processor deletes the entire struct if it no longer has any fields - */ + /** Tests that Aspect Processor deletes the entire struct if it no longer has any fields */ @Test public void testEmptyStructRemoval() { final String value = "{\"key_a\": \"hello\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: optional string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a")); assertFalse(updatedAspect.data().containsKey("key_a")); assertTrue(updatedAspect.data().isEmpty()); } - /** - * Tests that Aspect Processor deletes & removes optional values from a struct. - */ + /** Tests that Aspect Processor deletes & removes optional values from a struct. */ @Test public void testOptionalFieldRemoval() { final String value = "{\"key_a\": \"hello\", \"key_b\": \"world\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a")); assertFalse(updatedAspect.data().containsKey("key_a")); assertTrue(updatedAspect.data().containsKey("key_b")); assertEquals("world", updatedAspect.data().get("key_b")); } - /** - * Tests that Aspect Processor does not delete a non-optional value from a struct. - */ + /** Tests that Aspect Processor does not delete a non-optional value from a struct. */ @Test public void testNonOptionalFieldRemoval() { final String value = "{\"key_a\": \"hello\", \"key_b\": \"world\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: string\n" + "key_b: string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - assertNull(DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a"))); + assertNull( + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a"))); } /** - * Tests that Aspect Processor deletes a required value from a record referenced by another record. + * Tests that Aspect Processor deletes a required value from a record referenced by another + * record. */ @Test public void testNestedFieldRemoval() { @@ -86,24 +77,21 @@ public void testNestedFieldRemoval() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_a")); assertFalse(updatedAspect.data().containsKey("key_c")); } /** - * Tests that Aspect Processor is able to delete an optional sub-field while preserving nested structs. + * Tests that Aspect Processor is able to delete an optional sub-field while preserving nested + * structs. */ @Test public void testOptionalNestedFieldRemoval() { @@ -111,18 +99,15 @@ public void testOptionalNestedFieldRemoval() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertNotSame(aspect.data().get("key_c"), updatedAspect.data().get("key_c")); @@ -133,8 +118,8 @@ public void testOptionalNestedFieldRemoval() { } /** - * Tests that the Aspect Processor will delete an entire struct if after removal of a field, it becomes empty & - * is optional at some higher level. + * Tests that the Aspect Processor will delete an entire struct if after removal of a field, it + * becomes empty & is optional at some higher level. */ @Test public void testRemovalOptionalFieldWithNonOptionalSubfield() { @@ -142,18 +127,15 @@ public void testRemovalOptionalFieldWithNonOptionalSubfield() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("world", aspect, schema, - new PathSpec("key_c", "key_b")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "world", aspect, schema, new PathSpec("key_c", "key_b")); assertFalse(updatedAspect.data().containsKey("key_c")); } @@ -164,15 +146,14 @@ public void testRemovalFromSingleArray() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: array[string]\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: array[string]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_a")).size()); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a", "*")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a", "*")); assertTrue(updatedAspect.data().containsKey("key_a")); assertTrue(((DataList) updatedAspect.data().get("key_a")).isEmpty()); @@ -184,15 +165,14 @@ public void testRemovalFromMultipleArray() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: array[string]\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: array[string]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_a")).size()); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a", "*")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a", "*")); assertTrue(updatedAspect.data().containsKey("key_a")); assertEquals(1, ((DataList) updatedAspect.data().get("key_a")).size()); @@ -200,28 +180,27 @@ public void testRemovalFromMultipleArray() { } /** - * Tests that Aspect Processor is able to remove sub-field from array field while preserving nested structs. + * Tests that Aspect Processor is able to remove sub-field from array field while preserving + * nested structs. */ @Test public void testRemovalNestedFieldFromArray() { - final String value = "{\"key_c\": [{\"key_a\": \"hello\", \"key_b\": \"world\"}, {\"key_b\": \"extra info\"}]}"; + final String value = + "{\"key_c\": [{\"key_a\": \"hello\", \"key_b\": \"world\"}, {\"key_b\": \"extra info\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(2, ((DataList) updatedAspect.data().get("key_c")).size()); @@ -229,99 +208,95 @@ public void testRemovalNestedFieldFromArray() { assertNotSame(aspect.data().get("key_c"), updatedAspect.data().get("key_c")); // key_a field from first element from key_c should have been removed - assertFalse(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_b")); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); - assertEquals("world", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).get("key_b")); - assertEquals("extra info", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); + assertFalse( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_b")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); + assertEquals( + "world", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).get("key_b")); + assertEquals( + "extra info", + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); } - /** - * Tests that Aspect Processor is able to remove element from array field. - */ + /** Tests that Aspect Processor is able to remove element from array field. */ @Test public void testRemovalElementFromArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}, {\"key_b\": \"extra info\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(2, ((DataList) updatedAspect.data().get("key_c")).size()); // First element from key_c should have been emptied - assertFalse(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); + assertFalse( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).isEmpty()); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); - assertEquals("extra info", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); + assertEquals( + "extra info", + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); } - /** - * Tests that Aspect Processor removes array if empty when removing underlying structs - */ + /** Tests that Aspect Processor removes array if empty when removing underlying structs */ @Test public void testRemovalEmptyArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(1, ((DataList) updatedAspect.data().get("key_c")).size()); assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).isEmpty()); } - /** - * Tests that Aspect Processor removes optional array field from struct when it is empty - */ + /** Tests that Aspect Processor removes optional array field from struct when it is empty */ @Test public void testRemovalOptionalEmptyArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional array[simple_record]\n" - + "}"); + pdlSchemaParser.parse( + "record complex_record {\n" + "key_c: optional array[simple_record]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); // contains an empty key_c assertTrue(updatedAspect.data().containsKey("key_c")); @@ -329,7 +304,8 @@ public void testRemovalOptionalEmptyArray() { } /** - * Tests that Aspect Processor removes nested structs more than 1 level deep from an optional field. + * Tests that Aspect Processor removes nested structs more than 1 level deep from an optional + * field. */ @Test public void testNestedNonOptionalSubFieldsOnOptionalField() { @@ -337,46 +313,50 @@ public void testNestedNonOptionalSubFieldsOnOptionalField() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record_1 {\n" - + "key_a: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record_1 {\n" + "key_a: string\n" + "}"); - pdlSchemaParser.parse("record simple_record_2 {\n" - + "key_b: simple_record_1\n" - + "}"); + pdlSchemaParser.parse("record simple_record_2 {\n" + "key_b: simple_record_1\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record_2\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record_2\n" + "}"); assertTrue(aspect.data().containsKey("key_c")); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_b", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_b", "key_a")); assertFalse(updatedAspect.data().containsKey("key_c")); } /** - * Tests that the aspect processor is able to remove fields that are deeply nested where the top-level field is - * optional. - * This example is based on the SchemaMetadata object. + * Tests that the aspect processor is able to remove fields that are deeply nested where the + * top-level field is optional. This example is based on the SchemaMetadata object. */ @Test public void testSchemaMetadataDelete() { - final String value = "{\"fields\": [{\"globalTags\": {\"tags\": [{\"tag\": \"urn:li:tag:Dimension\"}]}}]}"; + final String value = + "{\"fields\": [{\"globalTags\": {\"tags\": [{\"tag\": \"urn:li:tag:Dimension\"}]}}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final Aspect updatedAspect = - DeleteEntityUtils.getAspectWithReferenceRemoved("urn:li:tag:Dimension", aspect, SchemaMetadata.dataSchema(), + DeleteEntityUtils.getAspectWithReferenceRemoved( + "urn:li:tag:Dimension", + aspect, + SchemaMetadata.dataSchema(), new PathSpec("fields", "*", "globalTags", "tags", "*", "tag")); assertFalse(updatedAspect.data().toString().contains("urn:li:tag:Dimension")); assertTrue(updatedAspect.data().containsKey("fields")); // tags must be empty, not field assertEquals(1, ((DataList) updatedAspect.data().get("fields")).size()); - assertEquals(0, ((DataList) ((DataMap) ((DataMap) ((DataList) updatedAspect.data().get("fields")).get(0)) - .get("globalTags")).get("tags")).size()); + assertEquals( + 0, + ((DataList) + ((DataMap) + ((DataMap) ((DataList) updatedAspect.data().get("fields")).get(0)) + .get("globalTags")) + .get("tags")) + .size()); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java index 30d821662d377..2430ebb1f94be 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java @@ -1,9 +1,13 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.AspectIngestionUtils; -import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -19,27 +23,28 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class EbeanAspectMigrationsDaoTest extends AspectMigrationsDaoTest<EbeanAspectDao> { - public EbeanAspectMigrationsDaoTest() throws EntityRegistryException { - } + public EbeanAspectMigrationsDaoTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { - Database server = EbeanTestUtils.createTestServer(EbeanAspectMigrationsDaoTest.class.getSimpleName()); + Database server = + EbeanTestUtils.createTestServer(EbeanAspectMigrationsDaoTest.class.getSimpleName()); _mockProducer = mock(EventProducer.class); EbeanAspectDao dao = new EbeanAspectDao(server); dao.setConnectionValidated(true); _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(dao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + dao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new EbeanRetentionService(_entityServiceImpl, server, 1000); _entityServiceImpl.setRetentionService(_retentionService); @@ -51,12 +56,15 @@ public void testStreamAspects() throws AssertionError { final int totalAspects = 30; Map<Urn, CorpUserKey> ingestedAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); - List<String> ingestedUrns = ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); + List<String> ingestedUrns = + ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); - Stream<EntityAspect> aspectStream = _migrationsDao.streamAspects(CORP_USER_ENTITY_NAME, CORP_USER_KEY_ASPECT_NAME); + Stream<EntityAspect> aspectStream = + _migrationsDao.streamAspects(CORP_USER_ENTITY_NAME, CORP_USER_KEY_ASPECT_NAME); List<EntityAspect> aspectList = aspectStream.collect(Collectors.toList()); assertEquals(ingestedUrns.size(), aspectList.size()); - Set<String> urnsFetched = aspectList.stream().map(EntityAspect::getUrn).collect(Collectors.toSet()); + Set<String> urnsFetched = + aspectList.stream().map(EntityAspect::getUrn).collect(Collectors.toSet()); for (String urn : ingestedUrns) { assertTrue(urnsFetched.contains(urn)); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java index e8a7d8740d328..eeb014f7afdc2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java @@ -1,15 +1,19 @@ package com.linkedin.metadata.entity; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.AuditStamp; -import com.linkedin.metadata.Constants; -import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.DataTemplateUtil; import com.linkedin.data.template.RecordTemplate; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.AspectGenerationUtils; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; @@ -27,11 +31,6 @@ import io.ebean.Transaction; import io.ebean.TxScope; import io.ebean.annotation.TxIsolation; -import org.apache.commons.lang3.tuple.Triple; -import org.testng.Assert; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collection; import java.util.List; @@ -41,23 +40,23 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static org.mockito.Mockito.mock; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - +import org.apache.commons.lang3.tuple.Triple; +import org.testng.Assert; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; /** - * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a relational database. + * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a + * relational database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + * <p>This class also contains all the test methods where realities of an underlying storage leak + * into the {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ -public class EbeanEntityServiceTest extends EntityServiceTest<EbeanAspectDao, EbeanRetentionService> { +public class EbeanEntityServiceTest + extends EntityServiceTest<EbeanAspectDao, EbeanRetentionService> { - public EbeanEntityServiceTest() throws EntityRegistryException { - } + public EbeanEntityServiceTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { @@ -69,16 +68,22 @@ public void setupTest() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new EbeanRetentionService(_entityServiceImpl, server, 1000); _entityServiceImpl.setRetentionService(_retentionService); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { @@ -111,30 +116,32 @@ public void testIngestListLatestAspects() throws AssertionError { // Ingest CorpUserInfo Aspect #3 CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - List<UpsertBatchItem> items = List.of( + List<UpsertBatchItem> items = + List.of( UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // List aspects - ListResult<RecordTemplate> batch1 = _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 0, 2); + ListResult<RecordTemplate> batch1 = + _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 0, 2); assertEquals(batch1.getNextStart(), 2); assertEquals(batch1.getPageSize(), 2); @@ -144,7 +151,8 @@ public void testIngestListLatestAspects() throws AssertionError { assertTrue(DataTemplateUtil.areEqual(writeAspect1, batch1.getValues().get(0))); assertTrue(DataTemplateUtil.areEqual(writeAspect2, batch1.getValues().get(1))); - ListResult<RecordTemplate> batch2 = _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 2, 2); + ListResult<RecordTemplate> batch2 = + _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 2, 2); assertEquals(batch2.getValues().size(), 1); assertTrue(DataTemplateUtil.areEqual(writeAspect3, batch2.getValues().get(0))); } @@ -175,27 +183,28 @@ public void testIngestListUrns() throws AssertionError { // Ingest CorpUserInfo Aspect #3 RecordTemplate writeAspect3 = AspectGenerationUtils.createCorpUserKey(entityUrn3); - List<UpsertBatchItem> items = List.of( + List<UpsertBatchItem> items = + List.of( UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // List aspects urns ListUrnsResult batch1 = _entityServiceImpl.listUrns(entityUrn1.getEntityType(), 0, 2); @@ -221,12 +230,13 @@ public void testIngestListUrns() throws AssertionError { public void testNestedTransactions() throws AssertionError { Database server = _aspectDao.getServer(); - try (Transaction transaction = server.beginTransaction(TxScope.requiresNew() - .setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction = + server.beginTransaction(TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction.setBatchMode(true); // Work 1 - try (Transaction transaction2 = server.beginTransaction(TxScope.requiresNew() - .setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction2 = + server.beginTransaction( + TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction2.setBatchMode(true); // Work 2 transaction2.commit(); @@ -240,20 +250,21 @@ public void testNestedTransactions() throws AssertionError { System.out.println("done"); } - @Test public void dataGeneratorThreadingTest() { DataGenerator dataGenerator = new DataGenerator(_entityServiceImpl); List<String> aspects = List.of("status", "globalTags", "glossaryTerms"); - List<List<MetadataChangeProposal>> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List<List<MetadataChangeProposal>> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); // Expected no duplicates aspects - List<String> duplicates = testData.stream() + List<String> duplicates = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.groupingBy(Triple::toString)) - .entrySet().stream() + .entrySet() + .stream() .filter(e -> e.getValue().size() > 1) .map(Map.Entry::getKey) .collect(Collectors.toList()); @@ -271,38 +282,48 @@ public void multiThreadingTest() { // Add data List<String> aspects = List.of("status", "globalTags", "glossaryTerms"); - List<List<MetadataChangeProposal>> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List<List<MetadataChangeProposal>> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); executeThreadingTest(_entityServiceImpl, testData, 15); // Expected aspects - Set<Triple<String, String, Long>> generatedAspectIds = testData.stream() + Set<Triple<String, String, Long>> generatedAspectIds = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.toSet()); // Actual inserts - Set<Triple<String, String, Long>> actualAspectIds = server.sqlQuery( - "select urn, aspect, version from metadata_aspect_v2").findList().stream() - .map(row -> Triple.of(row.getString("urn"), row.getString("aspect"), row.getLong("version"))) + Set<Triple<String, String, Long>> actualAspectIds = + server.sqlQuery("select urn, aspect, version from metadata_aspect_v2").findList().stream() + .map( + row -> + Triple.of( + row.getString("urn"), row.getString("aspect"), row.getLong("version"))) .collect(Collectors.toSet()); // Assert State - Set<Triple<String, String, Long>> additions = actualAspectIds.stream() + Set<Triple<String, String, Long>> additions = + actualAspectIds.stream() .filter(id -> !generatedAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); + assertEquals( + additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); - Set<Triple<String, String, Long>> missing = generatedAspectIds.stream() + Set<Triple<String, String, Long>> missing = + generatedAspectIds.stream() .filter(id -> !actualAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(missing.size(), 0, String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); + assertEquals( + missing.size(), + 0, + String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); } /** - * Don't blame multi-threading for what might not be a threading issue. - * Perform the multi-threading test with 1 thread. + * Don't blame multi-threading for what might not be a threading issue. Perform the + * multi-threading test with 1 thread. */ @Test public void singleThreadingTest() { @@ -311,85 +332,106 @@ public void singleThreadingTest() { // Add data List<String> aspects = List.of("status", "globalTags", "glossaryTerms"); - List<List<MetadataChangeProposal>> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List<List<MetadataChangeProposal>> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); executeThreadingTest(_entityServiceImpl, testData, 1); // Expected aspects - Set<Triple<String, String, Long>> generatedAspectIds = testData.stream() + Set<Triple<String, String, Long>> generatedAspectIds = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.toSet()); // Actual inserts - Set<Triple<String, String, Long>> actualAspectIds = server.sqlQuery( - "select urn, aspect, version from metadata_aspect_v2").findList().stream() - .map(row -> Triple.of(row.getString("urn"), row.getString("aspect"), row.getLong("version"))) + Set<Triple<String, String, Long>> actualAspectIds = + server.sqlQuery("select urn, aspect, version from metadata_aspect_v2").findList().stream() + .map( + row -> + Triple.of( + row.getString("urn"), row.getString("aspect"), row.getLong("version"))) .collect(Collectors.toSet()); // Assert State - Set<Triple<String, String, Long>> additions = actualAspectIds.stream() + Set<Triple<String, String, Long>> additions = + actualAspectIds.stream() .filter(id -> !generatedAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); + assertEquals( + additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); - Set<Triple<String, String, Long>> missing = generatedAspectIds.stream() + Set<Triple<String, String, Long>> missing = + generatedAspectIds.stream() .filter(id -> !actualAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(missing.size(), 0, String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); + assertEquals( + missing.size(), + 0, + String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); } - private static void executeThreadingTest(EntityServiceImpl entityService, List<List<MetadataChangeProposal>> testData, - int threadCount) { + private static void executeThreadingTest( + EntityServiceImpl entityService, + List<List<MetadataChangeProposal>> testData, + int threadCount) { Database server = ((EbeanAspectDao) entityService._aspectDao).getServer(); server.sqlUpdate("truncate metadata_aspect_v2"); - int count = Objects.requireNonNull(server.sqlQuery( - "select count(*) as cnt from metadata_aspect_v2").findOne()).getInteger("cnt"); + int count = + Objects.requireNonNull( + server.sqlQuery("select count(*) as cnt from metadata_aspect_v2").findOne()) + .getInteger("cnt"); assertEquals(count, 0, "Expected exactly 0 rows at the start."); // Create ingest proposals in parallel, mimic the smoke-test ingestion - final LinkedBlockingQueue<List<MetadataChangeProposal>> queue = new LinkedBlockingQueue<>(threadCount * 2); + final LinkedBlockingQueue<List<MetadataChangeProposal>> queue = + new LinkedBlockingQueue<>(threadCount * 2); // Spin up workers - List<Thread> writeThreads = IntStream.range(0, threadCount) + List<Thread> writeThreads = + IntStream.range(0, threadCount) .mapToObj(threadId -> new Thread(new MultiThreadTestWorker(queue, entityService))) .collect(Collectors.toList()); writeThreads.forEach(Thread::start); - testData.forEach(mcps -> { - try { - queue.put(mcps); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + testData.forEach( + mcps -> { + try { + queue.put(mcps); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); // Terminate workers with empty mcp - IntStream.range(0, threadCount).forEach(threadId -> { - try { - queue.put(List.of()); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + IntStream.range(0, threadCount) + .forEach( + threadId -> { + try { + queue.put(List.of()); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); // Wait for threads to finish - writeThreads.forEach(thread -> { - try { - thread.join(10000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + writeThreads.forEach( + thread -> { + try { + thread.join(10000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); } private static class MultiThreadTestWorker implements Runnable { private final EntityServiceImpl entityService; private final LinkedBlockingQueue<List<MetadataChangeProposal>> queue; - public MultiThreadTestWorker(LinkedBlockingQueue<List<MetadataChangeProposal>> queue, EntityServiceImpl entityService) { + public MultiThreadTestWorker( + LinkedBlockingQueue<List<MetadataChangeProposal>> queue, EntityServiceImpl entityService) { this.queue = queue; this.entityService = entityService; } @@ -404,9 +446,8 @@ public void run() { final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)); auditStamp.setTime(System.currentTimeMillis()); - AspectsBatchImpl batch = AspectsBatchImpl.builder() - .mcps(mcps, entityService.getEntityRegistry()) - .build(); + AspectsBatchImpl batch = + AspectsBatchImpl.builder().mcps(mcps, entityService.getEntityRegistry()).build(); entityService.ingestProposal(batch, auditStamp, false); } } catch (InterruptedException | URISyntaxException ie) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index f205adc128ed2..f03811da35ea8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -68,1417 +72,1586 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - /** * A class to test {@link EntityServiceImpl} * - * This class is generic to allow same integration tests to be reused to test all supported storage backends. - * If you're adding another storage backend - you should create a new test class that extends this one providing - * hard implementations of {@link AspectDao} and {@link RetentionService} and implements {@code @BeforeMethod} etc - * to set up and tear down state. + * <p>This class is generic to allow same integration tests to be reused to test all supported + * storage backends. If you're adding another storage backend - you should create a new test class + * that extends this one providing hard implementations of {@link AspectDao} and {@link + * RetentionService} and implements {@code @BeforeMethod} etc to set up and tear down state. * - * If you realise that a feature you want to test, sadly, has divergent behaviours between different storage implementations, - * that you can't rectify - you should make the test method abstract and implement it in all implementations of this class. + * <p>If you realise that a feature you want to test, sadly, has divergent behaviours between + * different storage implementations, that you can't rectify - you should make the test method + * abstract and implement it in all implementations of this class. * * @param <T_AD> {@link AspectDao} implementation. * @param <T_RS> {@link RetentionService} implementation. */ -abstract public class EntityServiceTest<T_AD extends AspectDao, T_RS extends RetentionService> { - - protected EntityServiceImpl _entityServiceImpl; - protected T_AD _aspectDao; - protected T_RS _retentionService; - - protected static final AuditStamp TEST_AUDIT_STAMP = AspectGenerationUtils.createAuditStamp(); - protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); - protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - protected final EntityRegistry _testEntityRegistry = - new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); - protected EventProducer _mockProducer; - protected UpdateIndicesService _mockUpdateIndicesService; - - protected EntityServiceTest() throws EntityRegistryException { - } - - // This test had to be split out because Cassandra relational databases have different result ordering restrictions - @Test - abstract public void testIngestListLatestAspects() throws Exception; - - // This test had to be split out because Cassandra relational databases have different result ordering restrictions - @Test - abstract public void testIngestListUrns() throws Exception; - - // This test had to be split out because Cassandra doesn't support nested transactions - @Test - abstract public void testNestedTransactions() throws Exception; - - @Test - public void testIngestGetEntity() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - // 1. Ingest Entity - _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); - - // 2. Retrieve Entity - com.linkedin.entity.Entity readEntity = _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); - - // 3. Compare Entity Objects - assertEquals(readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), +public abstract class EntityServiceTest<T_AD extends AspectDao, T_RS extends RetentionService> { + + protected EntityServiceImpl _entityServiceImpl; + protected T_AD _aspectDao; + protected T_RS _retentionService; + + protected static final AuditStamp TEST_AUDIT_STAMP = AspectGenerationUtils.createAuditStamp(); + protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); + protected final EntityRegistry _configEntityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + protected final EntityRegistry _testEntityRegistry = + new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); + protected EventProducer _mockProducer; + protected UpdateIndicesService _mockUpdateIndicesService; + + protected EntityServiceTest() throws EntityRegistryException {} + + // This test had to be split out because Cassandra relational databases have different result + // ordering restrictions + @Test + public abstract void testIngestListLatestAspects() throws Exception; + + // This test had to be split out because Cassandra relational databases have different result + // ordering restrictions + @Test + public abstract void testIngestListUrns() throws Exception; + + // This test had to be split out because Cassandra doesn't support nested transactions + @Test + public abstract void testNestedTransactions() throws Exception; + + @Test + public void testIngestGetEntity() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + // 1. Ingest Entity + _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); + + // 2. Retrieve Entity + com.linkedin.entity.Entity readEntity = + _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); + + // 3. Compare Entity Objects + assertEquals( + readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey = new CorpUserKey(); - expectedKey.setUsername("test"); - assertTrue(DataTemplateUtil.areEqual(expectedKey, - readEntity.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testAddKey() throws Exception { - // Test Writing a CorpUser Key - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - // 1. Ingest Entity - _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); - - // 2. Retrieve Entity - com.linkedin.entity.Entity readEntity = _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); - - // 3. Compare Entity Objects - assertEquals(readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), + CorpUserKey expectedKey = new CorpUserKey(); + expectedKey.setUsername("test"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey, + readEntity + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testAddKey() throws Exception { + // Test Writing a CorpUser Key + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + // 1. Ingest Entity + _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); + + // 2. Retrieve Entity + com.linkedin.entity.Entity readEntity = + _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); + + // 3. Compare Entity Objects + assertEquals( + readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey = new CorpUserKey(); - expectedKey.setUsername("test"); - assertTrue(DataTemplateUtil.areEqual(expectedKey, - readEntity.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestGetEntities() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map<Urn, Entity> readEntities = - _entityServiceImpl.getEntities(ImmutableSet.of(entityUrn1, entityUrn2), Collections.emptySet()); - - // 3. Compare Entity Objects - - // Entity 1 - com.linkedin.entity.Entity readEntity1 = readEntities.get(entityUrn1); - assertEquals(readEntity1.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0), + CorpUserKey expectedKey = new CorpUserKey(); + expectedKey.setUsername("test"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey, + readEntity + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntities() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map<Urn, Entity> readEntities = + _entityServiceImpl.getEntities( + ImmutableSet.of(entityUrn1, entityUrn2), Collections.emptySet()); + + // 3. Compare Entity Objects + + // Entity 1 + com.linkedin.entity.Entity readEntity1 = readEntities.get(entityUrn1); + assertEquals( + readEntity1.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity1.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, - readEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - // Entity 2 - com.linkedin.entity.Entity readEntity2 = readEntities.get(entityUrn2); - assertEquals(readEntity2.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - Optional<CorpUserAspect> writer2UserInfo = writeEntity2.getValue().getCorpUserSnapshot().getAspects() - .stream().filter(CorpUserAspect::isCorpUserInfo).findAny(); - Optional<CorpUserAspect> reader2UserInfo = writeEntity2.getValue().getCorpUserSnapshot().getAspects() - .stream().filter(CorpUserAspect::isCorpUserInfo).findAny(); - - assertTrue(writer2UserInfo.isPresent(), "Writer2 user info exists"); - assertTrue(reader2UserInfo.isPresent(), "Reader2 user info exists"); - assertTrue(DataTemplateUtil.areEqual(writer2UserInfo.get(), reader2UserInfo.get()), "UserInfo's are the same"); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, - readEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), - mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), - mclCaptor.capture()); - mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey1, + readEntity1 + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + // Entity 2 + com.linkedin.entity.Entity readEntity2 = readEntities.get(entityUrn2); + assertEquals( + readEntity2.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + Optional<CorpUserAspect> writer2UserInfo = + writeEntity2.getValue().getCorpUserSnapshot().getAspects().stream() + .filter(CorpUserAspect::isCorpUserInfo) + .findAny(); + Optional<CorpUserAspect> reader2UserInfo = + writeEntity2.getValue().getCorpUserSnapshot().getAspects().stream() + .filter(CorpUserAspect::isCorpUserInfo) + .findAny(); + + assertTrue(writer2UserInfo.isPresent(), "Writer2 user info exists"); + assertTrue(reader2UserInfo.isPresent(), "Reader2 user info exists"); + assertTrue( + DataTemplateUtil.areEqual(writer2UserInfo.get(), reader2UserInfo.get()), + "UserInfo's are the same"); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey2, + readEntity2 + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), mclCaptor.capture()); + mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntitiesV2() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + String aspectName = "corpUserInfo"; + String keyName = "corpUserKey"; + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map<Urn, EntityResponse> readEntities = + _entityServiceImpl.getEntitiesV2( + "corpuser", ImmutableSet.of(entityUrn1, entityUrn2), ImmutableSet.of(aspectName)); + + // 3. Compare Entity Objects + + // Entity 1 + EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); + assertEquals(readEntityResponse1.getAspects().size(), 2); // Key + Info aspect. + EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); + assertEquals(envelopedAspect1.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect1.getValue().data()))); + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); + + // Entity 2 + EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); + assertEquals(readEntityResponse2.getAspects().size(), 2); // Key + Info aspect. + EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); + assertEquals(envelopedAspect2.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect2.getValue().data()))); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), Mockito.any()); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntitiesVersionedV2() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + VersionedUrn versionedUrn1 = + new VersionedUrn().setUrn(entityUrn1).setVersionStamp("corpUserInfo:0"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + VersionedUrn versionedUrn2 = new VersionedUrn().setUrn(entityUrn2); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + String aspectName = "corpUserInfo"; + String keyName = "corpUserKey"; + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map<Urn, EntityResponse> readEntities = + _entityServiceImpl.getEntitiesVersionedV2( + ImmutableSet.of(versionedUrn1, versionedUrn2), ImmutableSet.of(aspectName)); + + // 3. Compare Entity Objects + + // Entity 1 + EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); + assertEquals(2, readEntityResponse1.getAspects().size()); // Key + Info aspect. + EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); + assertEquals(envelopedAspect1.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect1.getValue().data()))); + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); + + // Entity 2 + EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); + assertEquals(2, readEntityResponse2.getAspects().size()); // Key + Info aspect. + EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); + assertEquals(envelopedAspect2.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect2.getValue().data()))); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), Mockito.any()); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestAspectsGetLatestAspects() throws Exception { + + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + + Status writeAspect1 = new Status().setRemoved(false); + String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); + pairToIngest.add(getAspectRecordPair(writeAspect1, Status.class)); + + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName2 = AspectGenerationUtils.getAspectName(writeAspect2); + pairToIngest.add(getAspectRecordPair(writeAspect2, CorpUserInfo.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + Map<String, RecordTemplate> latestAspects = + _entityServiceImpl.getLatestAspectsForUrn( + entityUrn, new HashSet<>(Arrays.asList(aspectName1, aspectName2))); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(writeAspect2, latestAspects.get(aspectName2))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testReingestAspectsGetLatestAspects() throws Exception { + + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + writeAspect1.setCustomProperties(new StringMap()); + String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); + pairToIngest.add(getAspectRecordPair(writeAspect1, CorpUserInfo.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); + + GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); + + initialChangeLog.setAspect(aspect); + initialChangeLog.setSystemMetadata(metadata1); - @Test - public void testIngestGetEntitiesV2() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - String aspectName = "corpUserInfo"; - String keyName = "corpUserKey"; - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map<Urn, EntityResponse> readEntities = - _entityServiceImpl.getEntitiesV2("corpuser", ImmutableSet.of(entityUrn1, entityUrn2), ImmutableSet.of(aspectName)); - - // 3. Compare Entity Objects - - // Entity 1 - EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); - assertEquals(readEntityResponse1.getAspects().size(), 2); // Key + Info aspect. - EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); - assertEquals(envelopedAspect1.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect1.getValue().data()))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); - - // Entity 2 - EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); - assertEquals(readEntityResponse2.getAspects().size(), 2); // Key + Info aspect. - EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); - assertEquals(envelopedAspect2.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect2.getValue().data()))); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), - Mockito.any(), Mockito.any()); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(aspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(aspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map<String, RecordTemplate> latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); - @Test - public void testIngestGetEntitiesVersionedV2() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - VersionedUrn versionedUrn1 = new VersionedUrn().setUrn(entityUrn1).setVersionStamp("corpUserInfo:0"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - VersionedUrn versionedUrn2 = new VersionedUrn().setUrn(entityUrn2); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - String aspectName = "corpUserInfo"; - String keyName = "corpUserKey"; - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map<Urn, EntityResponse> readEntities = - _entityServiceImpl.getEntitiesVersionedV2(ImmutableSet.of(versionedUrn1, versionedUrn2), ImmutableSet.of(aspectName)); - - // 3. Compare Entity Objects - - // Entity 1 - EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); - assertEquals(2, readEntityResponse1.getAspects().size()); // Key + Info aspect. - EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); - assertEquals(envelopedAspect1.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect1.getValue().data()))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); - - // Entity 2 - EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); - assertEquals(2, readEntityResponse2.getAspects().size()); // Key + Info aspect. - EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); - assertEquals(envelopedAspect2.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect2.getValue().data()))); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), - Mockito.any(), Mockito.any()); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestAspectsGetLatestAspects() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); - - Status writeAspect1 = new Status().setRemoved(false); - String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); - pairToIngest.add(getAspectRecordPair(writeAspect1, Status.class)); - - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName2 = AspectGenerationUtils.getAspectName(writeAspect2); - pairToIngest.add(getAspectRecordPair(writeAspect2, CorpUserInfo.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - Map<String, RecordTemplate> latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(Arrays.asList(aspectName1, aspectName2)) - ); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); - assertTrue(DataTemplateUtil.areEqual(writeAspect2, latestAspects.get(aspectName2))); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testReingestAspectsGetLatestAspects() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - writeAspect1.setCustomProperties(new StringMap()); - String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); - pairToIngest.add(getAspectRecordPair(writeAspect1, CorpUserInfo.class)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + verifyNoMoreInteractions(_mockProducer); + } - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); + @Test + public void testReingestLineageAspect() throws Exception { - GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); + Urn entityUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - initialChangeLog.setAspect(aspect); - initialChangeLog.setSystemMetadata(metadata1); + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(aspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(aspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - Map<String, RecordTemplate> latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); + initialChangeLog.setAspect(aspect); + initialChangeLog.setSystemMetadata(metadata1); - - verifyNoMoreInteractions(_mockProducer); + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(aspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(aspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map<String, RecordTemplate> latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); + + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); + + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testReingestLineageProposal() throws Exception { + + Urn entityUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); + + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + MetadataChangeProposal mcp1 = new MetadataChangeProposal(); + mcp1.setEntityType(entityUrn.getEntityType()); + GenericAspect genericAspect = GenericRecordUtils.serializeAspect(upstreamLineage); + mcp1.setAspect(genericAspect); + mcp1.setEntityUrn(entityUrn); + mcp1.setChangeType(ChangeType.UPSERT); + mcp1.setSystemMetadata(metadata1); + mcp1.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); + + _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); + + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); + + initialChangeLog.setAspect(genericAspect); + initialChangeLog.setSystemMetadata(metadata1); + + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(genericAspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(genericAspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map<String, RecordTemplate> latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); + + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); + + _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestTimeseriesAspect() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProfile datasetProfile = new DatasetProfile(); + datasetProfile.setRowCount(1000); + datasetProfile.setColumnCount(15); + datasetProfile.setTimestampMillis(0L); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProfile"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + } + + @Test + public void testAsyncProposalVersioned() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProperties datasetProperties = new DatasetProperties(); + datasetProperties.setName("Foo Bar"); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProperties"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); + verify(_mockProducer, times(0)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + verify(_mockProducer, times(1)) + .produceMetadataChangeProposal(Mockito.eq(entityUrn), Mockito.eq(gmce)); + } + + @Test + public void testAsyncProposalTimeseries() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProfile datasetProfile = new DatasetProfile(); + datasetProfile.setRowCount(1000); + datasetProfile.setColumnCount(15); + datasetProfile.setTimestampMillis(0L); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProfile"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + verify(_mockProducer, times(0)) + .produceMetadataChangeProposal(Mockito.eq(entityUrn), Mockito.eq(gmce)); + } + + @Test + public void testUpdateGetAspect() throws AssertionError { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + AspectSpec corpUserInfoSpec = + _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #1 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); + + RecordTemplate readAspect1 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect1)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + // Ingest CorpUserInfo Aspect #2 + writeAspect.setEmail("newemail@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #2 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); + + RecordTemplate readAspect2 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect2)); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testGetAspectAtVersion() throws AssertionError { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + AspectSpec corpUserInfoSpec = + _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #1 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect1)), TEST_AUDIT_STAMP, null); + + VersionedAspect writtenVersionedAspect1 = new VersionedAspect(); + writtenVersionedAspect1.setAspect(Aspect.create(writeAspect1)); + writtenVersionedAspect1.setVersion(0); + + VersionedAspect readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + + // Validate retrieval of CorpUserInfo Aspect #2 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect2)), TEST_AUDIT_STAMP, null); + + VersionedAspect writtenVersionedAspect2 = new VersionedAspect(); + writtenVersionedAspect2.setAspect(Aspect.create(writeAspect2)); + writtenVersionedAspect2.setVersion(0); + + VersionedAspect readAspectVersion2 = + _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); + assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspectVersion2)); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect2, readAspectVersion2)); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); + assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testRollbackAspect() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); + Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Ingest CorpUserInfo Aspect #3 + CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since this run has been overwritten + AspectRowSummary rollbackOverwrittenAspect = new AspectRowSummary(); + rollbackOverwrittenAspect.setRunId("run-123"); + rollbackOverwrittenAspect.setAspectName(aspectName); + rollbackOverwrittenAspect.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-123", true); + + // assert nothing was deleted + RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); + + RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); + + // this should delete the most recent aspect + AspectRowSummary rollbackRecentAspect = new AspectRowSummary(); + rollbackRecentAspect.setRunId("run-456"); + rollbackRecentAspect.setAspectName(aspectName); + rollbackRecentAspect.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-456", true); + + // assert the new most recent aspect is the original one + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readNewRecentAspect)); + } + + @Test + public void testRollbackKey() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(keyAspectName) + .aspect(writeKey1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since the key should have been written in the furst run + AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); + rollbackKeyWithWrongRunId.setRunId("run-456"); + rollbackKeyWithWrongRunId.setAspectName("corpUserKey"); + rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithWrongRunId), "run-456", true); + + // assert nothing was deleted + RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); + + RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); + + // this should delete the most recent aspect + AspectRowSummary rollbackKeyWithCorrectRunId = new AspectRowSummary(); + rollbackKeyWithCorrectRunId.setRunId("run-123"); + rollbackKeyWithCorrectRunId.setAspectName("corpUserKey"); + rollbackKeyWithCorrectRunId.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithCorrectRunId), "run-123", true); + + // assert the new most recent aspect is null + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); + } + + @Test + public void testRollbackUrn() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); + Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Ingest CorpUserInfo Aspect #3 + CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(keyAspectName) + .aspect(writeKey1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since the key should have been written in the furst run + AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); + rollbackKeyWithWrongRunId.setRunId("run-456"); + rollbackKeyWithWrongRunId.setAspectName("CorpUserKey"); + rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); + + // this should delete all related aspects + _entityServiceImpl.deleteUrn(UrnUtils.getUrn("urn:li:corpuser:test1")); + + // assert the new most recent aspect is null + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); + + RecordTemplate deletedKeyAspect = _entityServiceImpl.getAspect(entityUrn1, "corpUserKey", 0); + assertTrue(DataTemplateUtil.areEqual(null, deletedKeyAspect)); + } + + @Test + public void testIngestGetLatestAspect() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + + reset(_mockProducer); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); + EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); + + assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNotNull(mcl.getPreviousAspectValue()); + assertNotNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetLatestEnvelopedAspect() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + EnvelopedAspect readAspect1 = + _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); + assertTrue( + DataTemplateUtil.areEqual(writeAspect1, new CorpUserInfo(readAspect1.getValue().data()))); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + EnvelopedAspect readAspect2 = + _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); + EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); + EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); + + assertTrue( + DataTemplateUtil.areEqual(writeAspect2, new CorpUserInfo(readAspect2.getValue().data()))); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestSameAspect() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + SystemMetadata metadata3 = + AspectGenerationUtils.createSystemMetadata(1635792689, "run-123", "run-456"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + + reset(_mockProducer); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + EntityAspect readAspectDao2 = + _aspectDao.getAspect(entityUrn.toString(), aspectName, ASPECT_LATEST_VERSION); + + assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); + assertFalse( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertFalse( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata1)); + + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata3)); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testRetention() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); + CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); + + String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); + // Ingest Status Aspect + Status writeAspect2 = new Status().setRemoved(true); + Status writeAspect2a = new Status().setRemoved(false); + Status writeAspect2b = new Status().setRemoved(true); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1a) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1b) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2a) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2b) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); + + _retentionService.setRetention( + null, + null, + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(2)))); + _retentionService.setRetention( + "corpuser", + "status", + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(4)))); + + // Ingest CorpUserInfo Aspect again + CorpUserInfo writeAspect1c = AspectGenerationUtils.createCorpUserInfo("email_c@test.com"); + // Ingest Status Aspect again + Status writeAspect2c = new Status().setRemoved(false); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1c) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2c) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); + + // Reset retention policies + _retentionService.setRetention( + null, + null, + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(1)))); + _retentionService.deleteRetention("corpuser", "status"); + // Invoke batch apply + _retentionService.batchApplyRetention(null, null); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10) + .getTotalCount(), + 1); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10) + .getTotalCount(), + 1); + } + + @Test + public void testIngestAspectIfNotPresent() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); + CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1a, TEST_AUDIT_STAMP, metadata1); + CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1b, TEST_AUDIT_STAMP, metadata1); + + String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); + // Ingest Status Aspect + Status writeAspect2 = new Status().setRemoved(true); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2, TEST_AUDIT_STAMP, metadata1); + Status writeAspect2a = new Status().setRemoved(false); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2a, TEST_AUDIT_STAMP, metadata1); + Status writeAspect2b = new Status().setRemoved(true); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2b, TEST_AUDIT_STAMP, metadata1); + + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 0), writeAspect1); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 0), writeAspect2); + + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1)); + + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10) + .getTotalCount(), + 1); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10) + .getTotalCount(), + 1); + } + + /** + * Equivalence for mocks fails when directly using the object as when converting from + * RecordTemplate from JSON it reorders the fields. This simulates pulling the historical + * SystemMetadata from the previous call. + */ + protected <T extends RecordTemplate> T simulatePullFromDB(T aspect, Class<T> clazz) + throws Exception { + final ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + return RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + } + + @Test + public void testRestoreIndices() throws Exception { + if (this instanceof EbeanEntityServiceTest) { + String urnStr = "urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset_unique,PROD)"; + Urn entityUrn = UrnUtils.getUrn(urnStr); + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + clearInvocations(_mockProducer); + + RestoreIndicesArgs args = new RestoreIndicesArgs(); + args.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); + args.setBatchSize(1); + args.setStart(0); + args.setBatchDelayMs(1L); + args.setNumThreads(1); + args.setUrn(urnStr); + _entityServiceImpl.restoreIndices(args, obj -> {}); + + ArgumentCaptor<MetadataChangeLog> mclCaptor = + ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "dataset"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.RESTATE); + assertEquals(mcl.getSystemMetadata().getProperties().get(FORCE_INDEXING_KEY), "true"); } - - @Test - public void testReingestLineageAspect() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); - - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); - - GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); - - initialChangeLog.setAspect(aspect); - initialChangeLog.setSystemMetadata(metadata1); - - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(aspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(aspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); - - Map<String, RecordTemplate> latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); - - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); - - - verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testValidateUrn() throws Exception { + // Valid URN + Urn validTestUrn = new Urn("li", "corpuser", new TupleKey("testKey")); + EntityUtils.validateUrn(_testEntityRegistry, validTestUrn); + + // URN with trailing whitespace + Urn testUrnWithTrailingWhitespace = new Urn("li", "corpuser", new TupleKey("testKey ")); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnWithTrailingWhitespace); + Assert.fail("Should have raised IllegalArgumentException for URN with trailing whitespace"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN with leading or trailing whitespace"); } - @Test - public void testReingestLineageProposal() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + // Urn purely too long + String stringTooLong = "a".repeat(510); - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - MetadataChangeProposal mcp1 = new MetadataChangeProposal(); - mcp1.setEntityType(entityUrn.getEntityType()); - GenericAspect genericAspect = GenericRecordUtils.serializeAspect(upstreamLineage); - mcp1.setAspect(genericAspect); - mcp1.setEntityUrn(entityUrn); - mcp1.setChangeType(ChangeType.UPSERT); - mcp1.setSystemMetadata(metadata1); - mcp1.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); - - _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); - - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); - - initialChangeLog.setAspect(genericAspect); - initialChangeLog.setSystemMetadata(metadata1); - - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(genericAspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(genericAspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); - - Map<String, RecordTemplate> latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); - - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); - - _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); - - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestTimeseriesAspect() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProfile datasetProfile = new DatasetProfile(); - datasetProfile.setRowCount(1000); - datasetProfile.setColumnCount(15); - datasetProfile.setTimestampMillis(0L); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProfile"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); - } - - @Test - public void testAsyncProposalVersioned() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProperties datasetProperties = new DatasetProperties(); - datasetProperties.setName("Foo Bar"); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProperties"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); - verify(_mockProducer, times(0)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - verify(_mockProducer, times(1)).produceMetadataChangeProposal(Mockito.eq(entityUrn), - Mockito.eq(gmce)); - } - - - @Test - public void testAsyncProposalTimeseries() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProfile datasetProfile = new DatasetProfile(); - datasetProfile.setRowCount(1000); - datasetProfile.setColumnCount(15); - datasetProfile.setTimestampMillis(0L); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProfile"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - verify(_mockProducer, times(0)).produceMetadataChangeProposal(Mockito.eq(entityUrn), - Mockito.eq(gmce)); - } - - @Test - public void testUpdateGetAspect() throws AssertionError { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - AspectSpec corpUserInfoSpec = _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); - - RecordTemplate readAspect1 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect1)); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - // Ingest CorpUserInfo Aspect #2 - writeAspect.setEmail("newemail@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); - - RecordTemplate readAspect2 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect2)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testGetAspectAtVersion() throws AssertionError { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - AspectSpec corpUserInfoSpec = _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect1)), TEST_AUDIT_STAMP, null); - - VersionedAspect writtenVersionedAspect1 = new VersionedAspect(); - writtenVersionedAspect1.setAspect(Aspect.create(writeAspect1)); - writtenVersionedAspect1.setVersion(0); - - VersionedAspect readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - - // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect2)), TEST_AUDIT_STAMP, null); - - VersionedAspect writtenVersionedAspect2 = new VersionedAspect(); - writtenVersionedAspect2.setAspect(Aspect.create(writeAspect2)); - writtenVersionedAspect2.setVersion(0); - - VersionedAspect readAspectVersion2 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); - assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspectVersion2)); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect2, readAspectVersion2)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); - assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testRollbackAspect() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Ingest CorpUserInfo Aspect #3 - CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since this run has been overwritten - AspectRowSummary rollbackOverwrittenAspect = new AspectRowSummary(); - rollbackOverwrittenAspect.setRunId("run-123"); - rollbackOverwrittenAspect.setAspectName(aspectName); - rollbackOverwrittenAspect.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-123", true); - - // assert nothing was deleted - RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); - - RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); - - // this should delete the most recent aspect - AspectRowSummary rollbackRecentAspect = new AspectRowSummary(); - rollbackRecentAspect.setRunId("run-456"); - rollbackRecentAspect.setAspectName(aspectName); - rollbackRecentAspect.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-456", true); - - // assert the new most recent aspect is the original one - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readNewRecentAspect)); - } - - @Test - public void testRollbackKey() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(keyAspectName) - .aspect(writeKey1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since the key should have been written in the furst run - AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); - rollbackKeyWithWrongRunId.setRunId("run-456"); - rollbackKeyWithWrongRunId.setAspectName("corpUserKey"); - rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithWrongRunId), "run-456", true); - - // assert nothing was deleted - RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); - - RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); - - // this should delete the most recent aspect - AspectRowSummary rollbackKeyWithCorrectRunId = new AspectRowSummary(); - rollbackKeyWithCorrectRunId.setRunId("run-123"); - rollbackKeyWithCorrectRunId.setAspectName("corpUserKey"); - rollbackKeyWithCorrectRunId.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithCorrectRunId), "run-123", true); - - // assert the new most recent aspect is null - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); - } - - @Test - public void testRollbackUrn() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Ingest CorpUserInfo Aspect #3 - CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(keyAspectName) - .aspect(writeKey1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since the key should have been written in the furst run - AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); - rollbackKeyWithWrongRunId.setRunId("run-456"); - rollbackKeyWithWrongRunId.setAspectName("CorpUserKey"); - rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); - - // this should delete all related aspects - _entityServiceImpl.deleteUrn(UrnUtils.getUrn("urn:li:corpuser:test1")); - - // assert the new most recent aspect is null - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); - - RecordTemplate deletedKeyAspect = _entityServiceImpl.getAspect(entityUrn1, "corpUserKey", 0); - assertTrue(DataTemplateUtil.areEqual(null, deletedKeyAspect)); + Urn testUrnTooLong = new Urn("li", "corpuser", new TupleKey(stringTooLong)); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLong); + Assert.fail("Should have raised IllegalArgumentException for URN too long"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); } - @Test - public void testIngestGetLatestAspect() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - - reset(_mockProducer); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNotNull(mcl.getPreviousAspectValue()); - assertNotNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); + // Urn too long when URL encoded + StringBuilder buildStringTooLongWhenEncoded = new StringBuilder(); + StringBuilder buildStringSameLengthWhenEncoded = new StringBuilder(); + for (int i = 0; i < 200; i++) { + buildStringTooLongWhenEncoded.append('>'); + buildStringSameLengthWhenEncoded.append('a'); } - - @Test - public void testIngestGetLatestEnvelopedAspect() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - EnvelopedAspect readAspect1 = _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, new CorpUserInfo(readAspect1.getValue().data()))); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - EnvelopedAspect readAspect2 = _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); - EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, new CorpUserInfo(readAspect2.getValue().data()))); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestSameAspect() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - SystemMetadata metadata3 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-123", "run-456"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - - reset(_mockProducer); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, ASPECT_LATEST_VERSION); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); - assertFalse(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertFalse(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata1)); - - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata3)); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testRetention() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); - CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); - - String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); - // Ingest Status Aspect - Status writeAspect2 = new Status().setRemoved(true); - Status writeAspect2a = new Status().setRemoved(false); - Status writeAspect2b = new Status().setRemoved(true); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1a) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1b) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2a) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2b) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); - - _retentionService.setRetention(null, null, new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(2)))); - _retentionService.setRetention("corpuser", "status", new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(4)))); - - // Ingest CorpUserInfo Aspect again - CorpUserInfo writeAspect1c = AspectGenerationUtils.createCorpUserInfo("email_c@test.com"); - // Ingest Status Aspect again - Status writeAspect2c = new Status().setRemoved(false); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1c) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2c) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); - - // Reset retention policies - _retentionService.setRetention(null, null, new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(1)))); - _retentionService.deleteRetention("corpuser", "status"); - // Invoke batch apply - _retentionService.batchApplyRetention(null, null); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10).getTotalCount(), 1); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10).getTotalCount(), 1); - } - - @Test - public void testIngestAspectIfNotPresent() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1a, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1b, TEST_AUDIT_STAMP, metadata1); - - String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); - // Ingest Status Aspect - Status writeAspect2 = new Status().setRemoved(true); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2, TEST_AUDIT_STAMP, metadata1); - Status writeAspect2a = new Status().setRemoved(false); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2a, TEST_AUDIT_STAMP, metadata1); - Status writeAspect2b = new Status().setRemoved(true); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2b, TEST_AUDIT_STAMP, metadata1); - - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 0), writeAspect1); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 0), writeAspect2); - - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1)); - - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10).getTotalCount(), 1); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10).getTotalCount(), 1); + Urn testUrnTooLongWhenEncoded = + new Urn("li", "corpUser", new TupleKey(buildStringTooLongWhenEncoded.toString())); + Urn testUrnSameLengthWhenEncoded = + new Urn("li", "corpUser", new TupleKey(buildStringSameLengthWhenEncoded.toString())); + // Same length when encoded should be allowed, the encoded one should not be + EntityUtils.validateUrn(_testEntityRegistry, testUrnSameLengthWhenEncoded); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLongWhenEncoded); + Assert.fail("Should have raised IllegalArgumentException for URN too long"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); } - /** - * Equivalence for mocks fails when directly using the object as when converting from RecordTemplate from JSON it - * reorders the fields. This simulates pulling the historical SystemMetadata from the previous call. - */ - protected <T extends RecordTemplate> T simulatePullFromDB(T aspect, Class<T> clazz) throws Exception { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - return RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + // Urn containing disallowed character + Urn testUrnSpecialCharValid = new Urn("li", "corpUser", new TupleKey("bob␇")); + Urn testUrnSpecialCharInvalid = new Urn("li", "corpUser", new TupleKey("bob␟")); + EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharValid); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharInvalid); + Assert.fail( + "Should have raised IllegalArgumentException for URN containing the illegal char"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "Error: URN cannot contain ␟ character"); } - - @Test - public void testRestoreIndices() throws Exception { - if (this instanceof EbeanEntityServiceTest) { - String urnStr = "urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset_unique,PROD)"; - Urn entityUrn = UrnUtils.getUrn(urnStr); - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); - - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - clearInvocations(_mockProducer); - - RestoreIndicesArgs args = new RestoreIndicesArgs(); - args.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); - args.setBatchSize(1); - args.setStart(0); - args.setBatchDelayMs(1L); - args.setNumThreads(1); - args.setUrn(urnStr); - _entityServiceImpl.restoreIndices(args, obj -> { - }); - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog( - Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "dataset"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.RESTATE); - assertEquals(mcl.getSystemMetadata().getProperties().get(FORCE_INDEXING_KEY), "true"); - } - } - - @Test - public void testValidateUrn() throws Exception { - // Valid URN - Urn validTestUrn = new Urn("li", "corpuser", new TupleKey("testKey")); - EntityUtils.validateUrn(_testEntityRegistry, validTestUrn); - - // URN with trailing whitespace - Urn testUrnWithTrailingWhitespace = new Urn("li", "corpuser", new TupleKey("testKey ")); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnWithTrailingWhitespace); - Assert.fail("Should have raised IllegalArgumentException for URN with trailing whitespace"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN with leading or trailing whitespace"); - } - - // Urn purely too long - String stringTooLong = "a".repeat(510); - - Urn testUrnTooLong = new Urn("li", "corpuser", new TupleKey(stringTooLong)); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLong); - Assert.fail("Should have raised IllegalArgumentException for URN too long"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); - } - - // Urn too long when URL encoded - StringBuilder buildStringTooLongWhenEncoded = new StringBuilder(); - StringBuilder buildStringSameLengthWhenEncoded = new StringBuilder(); - for (int i = 0; i < 200; i++) { - buildStringTooLongWhenEncoded.append('>'); - buildStringSameLengthWhenEncoded.append('a'); - } - Urn testUrnTooLongWhenEncoded = new Urn("li", "corpUser", new TupleKey(buildStringTooLongWhenEncoded.toString())); - Urn testUrnSameLengthWhenEncoded = new Urn("li", "corpUser", new TupleKey(buildStringSameLengthWhenEncoded.toString())); - // Same length when encoded should be allowed, the encoded one should not be - EntityUtils.validateUrn(_testEntityRegistry, testUrnSameLengthWhenEncoded); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLongWhenEncoded); - Assert.fail("Should have raised IllegalArgumentException for URN too long"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); - } - - // Urn containing disallowed character - Urn testUrnSpecialCharValid = new Urn("li", "corpUser", new TupleKey("bob␇")); - Urn testUrnSpecialCharInvalid = new Urn("li", "corpUser", new TupleKey("bob␟")); - EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharValid); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharInvalid); - Assert.fail("Should have raised IllegalArgumentException for URN containing the illegal char"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: URN cannot contain ␟ character"); - } - - Urn urnWithMismatchedParens = new Urn("li", "corpuser", new TupleKey("test(Key")); - try { - EntityUtils.validateUrn(_testEntityRegistry, urnWithMismatchedParens); - Assert.fail("Should have raised IllegalArgumentException for URN with mismatched parens"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("mismatched paren nesting")); - } - - Urn invalidType = new Urn("li", "fakeMadeUpType", new TupleKey("testKey")); - try { - EntityUtils.validateUrn(_testEntityRegistry, invalidType); - Assert.fail("Should have raised IllegalArgumentException for URN with non-existent entity type"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Failed to find entity with name fakeMadeUpType")); - } - - Urn validFabricType = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD")); - EntityUtils.validateUrn(_testEntityRegistry, validFabricType); - - Urn invalidFabricType = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "prod")); - try { - EntityUtils.validateUrn(_testEntityRegistry, invalidFabricType); - Assert.fail("Should have raised IllegalArgumentException for URN with invalid fabric type"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains(invalidFabricType.toString())); - } - - Urn urnEndingInComma = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD", "")); - try { - EntityUtils.validateUrn(_testEntityRegistry, urnEndingInComma); - Assert.fail("Should have raised IllegalArgumentException for URN ending in comma"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains(urnEndingInComma.toString())); - } + Urn urnWithMismatchedParens = new Urn("li", "corpuser", new TupleKey("test(Key")); + try { + EntityUtils.validateUrn(_testEntityRegistry, urnWithMismatchedParens); + Assert.fail("Should have raised IllegalArgumentException for URN with mismatched parens"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("mismatched paren nesting")); } - @Test - public void testUIPreProcessedProposal() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - EditableDatasetProperties datasetProperties = new EditableDatasetProperties(); - datasetProperties.setDescription("Foo Bar"); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("editableDatasetProperties"); - SystemMetadata systemMetadata = new SystemMetadata(); - StringMap properties = new StringMap(); - properties.put(APP_SOURCE, UI_SOURCE); - systemMetadata.setProperties(properties); - gmce.setSystemMetadata(systemMetadata); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); - ArgumentCaptor<MetadataChangeLog> captor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), captor.capture()); - assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); + Urn invalidType = new Urn("li", "fakeMadeUpType", new TupleKey("testKey")); + try { + EntityUtils.validateUrn(_testEntityRegistry, invalidType); + Assert.fail( + "Should have raised IllegalArgumentException for URN with non-existent entity type"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Failed to find entity with name fakeMadeUpType")); } - @Nonnull - protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) throws Exception { - CorpuserUrn corpuserUrn = CorpuserUrn.createFromUrn(entityUrn); - com.linkedin.entity.Entity entity = new com.linkedin.entity.Entity(); - Snapshot snapshot = new Snapshot(); - CorpUserSnapshot corpUserSnapshot = new CorpUserSnapshot(); - List<CorpUserAspect> userAspects = new ArrayList<>(); - userAspects.add(CorpUserAspect.create(AspectGenerationUtils.createCorpUserInfo(email))); - corpUserSnapshot.setAspects(new CorpUserAspectArray(userAspects)); - corpUserSnapshot.setUrn(corpuserUrn); - snapshot.setCorpUserSnapshot(corpUserSnapshot); - entity.setValue(snapshot); - return entity; + Urn validFabricType = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD")); + EntityUtils.validateUrn(_testEntityRegistry, validFabricType); + + Urn invalidFabricType = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "prod")); + try { + EntityUtils.validateUrn(_testEntityRegistry, invalidFabricType); + Assert.fail("Should have raised IllegalArgumentException for URN with invalid fabric type"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains(invalidFabricType.toString())); } - protected <T extends RecordTemplate> Pair<String, RecordTemplate> getAspectRecordPair(T aspect, Class<T> clazz) - throws Exception { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - RecordTemplate recordTemplate = RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); - return new Pair<>(AspectGenerationUtils.getAspectName(aspect), recordTemplate); + Urn urnEndingInComma = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD", "")); + try { + EntityUtils.validateUrn(_testEntityRegistry, urnEndingInComma); + Assert.fail("Should have raised IllegalArgumentException for URN ending in comma"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains(urnEndingInComma.toString())); } + } + + @Test + public void testUIPreProcessedProposal() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + EditableDatasetProperties datasetProperties = new EditableDatasetProperties(); + datasetProperties.setDescription("Foo Bar"); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("editableDatasetProperties"); + SystemMetadata systemMetadata = new SystemMetadata(); + StringMap properties = new StringMap(); + properties.put(APP_SOURCE, UI_SOURCE); + systemMetadata.setProperties(properties); + gmce.setSystemMetadata(systemMetadata); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + ArgumentCaptor<MetadataChangeLog> captor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), captor.capture()); + assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); + } + + @Nonnull + protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) + throws Exception { + CorpuserUrn corpuserUrn = CorpuserUrn.createFromUrn(entityUrn); + com.linkedin.entity.Entity entity = new com.linkedin.entity.Entity(); + Snapshot snapshot = new Snapshot(); + CorpUserSnapshot corpUserSnapshot = new CorpUserSnapshot(); + List<CorpUserAspect> userAspects = new ArrayList<>(); + userAspects.add(CorpUserAspect.create(AspectGenerationUtils.createCorpUserInfo(email))); + corpUserSnapshot.setAspects(new CorpUserAspectArray(userAspects)); + corpUserSnapshot.setUrn(corpuserUrn); + snapshot.setCorpUserSnapshot(corpUserSnapshot); + entity.setValue(snapshot); + return entity; + } + + protected <T extends RecordTemplate> Pair<String, RecordTemplate> getAspectRecordPair( + T aspect, Class<T> clazz) throws Exception { + final ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + RecordTemplate recordTemplate = + RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + return new Pair<>(AspectGenerationUtils.getAspectName(aspect), recordTemplate); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java index e90ffd8a4bcb7..680d4079851eb 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java @@ -14,16 +14,15 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class TestEntityRegistry implements EntityRegistry { private final Map<String, EntitySpec> entityNameToSpec; public TestEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) - .buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); } @Nonnull diff --git a/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java index c7ab24e87a873..a98386f6f871b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.extractor; +import static org.testng.AssertJUnit.assertEquals; + import com.datahub.test.TestEntityAspect; import com.datahub.test.TestEntityAspectArray; import com.datahub.test.TestEntityInfo; @@ -12,9 +14,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertEquals; - - public class AspectExtractorTest { @Test public void testExtractor() { @@ -23,7 +22,8 @@ public void testExtractor() { TestEntityKey testEntityKey = TestEntityUtil.getTestEntityKey(urn); TestEntityInfo testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); snapshot.setAspects( - new TestEntityAspectArray(TestEntityAspect.create(testEntityKey), TestEntityAspect.create(testEntityInfo))); + new TestEntityAspectArray( + TestEntityAspect.create(testEntityKey), TestEntityAspect.create(testEntityInfo))); Map<String, RecordTemplate> result = AspectExtractor.extractAspectRecords(snapshot); assertEquals(result.size(), 2); assertEquals(result.get("testEntityKey"), testEntityKey); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java index cbc4825a3b557..1adb5d1ab3952 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.extractor; +import static org.testng.Assert.assertEquals; + import com.datahub.test.TestEntityInfo; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntitySpecBuilder; @@ -15,65 +17,91 @@ import org.testcontainers.shaded.com.google.common.collect.ImmutableList; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; - - public class FieldExtractorTest { @Test public void testExtractor() { EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); AspectSpec testEntityInfoSpec = testEntitySpec.getAspectSpec("testEntityInfo"); - Map<String, SearchableFieldSpec> nameToSpec = testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); + Map<String, SearchableFieldSpec> nameToSpec = + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect( + Collectors.toMap( + spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); TestEntityInfo testEntityInfo = new TestEntityInfo(); Map<SearchableFieldSpec, List<Object>> result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); - assertEquals(result, testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); + assertEquals( + result, + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); Urn urn = TestEntityUtil.getTestEntityUrn(); testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); - result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); + result = + FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); assertEquals(result.get(nameToSpec.get("textFieldOverride")), ImmutableList.of("test")); assertEquals(result.get(nameToSpec.get("foreignKey")), ImmutableList.of()); assertEquals(result.get(nameToSpec.get("nestedForeignKey")), ImmutableList.of(urn)); - assertEquals(result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); + assertEquals( + result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); assertEquals(result.get(nameToSpec.get("nestedIntegerField")), ImmutableList.of(1)); - assertEquals(result.get(nameToSpec.get("nestedArrayStringField")), ImmutableList.of("nestedArray1", "nestedArray2")); - assertEquals(result.get(nameToSpec.get("nestedArrayArrayField")), ImmutableList.of("testNestedArray1", "testNestedArray2")); - assertEquals(result.get(nameToSpec.get("customProperties")), ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); - assertEquals(result.get(nameToSpec.get("esObjectField")), ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); + assertEquals( + result.get(nameToSpec.get("nestedArrayStringField")), + ImmutableList.of("nestedArray1", "nestedArray2")); + assertEquals( + result.get(nameToSpec.get("nestedArrayArrayField")), + ImmutableList.of("testNestedArray1", "testNestedArray2")); + assertEquals( + result.get(nameToSpec.get("customProperties")), + ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); + assertEquals( + result.get(nameToSpec.get("esObjectField")), + ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); } @Test public void testExtractorMaxValueLength() { EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); AspectSpec testEntityInfoSpec = testEntitySpec.getAspectSpec("testEntityInfo"); - Map<String, SearchableFieldSpec> nameToSpec = testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); + Map<String, SearchableFieldSpec> nameToSpec = + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect( + Collectors.toMap( + spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); TestEntityInfo testEntityInfo = new TestEntityInfo(); Map<SearchableFieldSpec, List<Object>> result = - FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); - assertEquals(result, testEntityInfoSpec.getSearchableFieldSpecs() - .stream() + FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); + assertEquals( + result, + testEntityInfoSpec.getSearchableFieldSpecs().stream() .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); Urn urn = TestEntityUtil.getTestEntityUrn(); testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); - result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs(), 1); + result = + FieldExtractor.extractFields( + testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs(), 1); assertEquals(result.get(nameToSpec.get("textFieldOverride")), ImmutableList.of("test")); assertEquals(result.get(nameToSpec.get("foreignKey")), ImmutableList.of()); assertEquals(result.get(nameToSpec.get("nestedForeignKey")), ImmutableList.of(urn)); - assertEquals(result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); + assertEquals( + result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); assertEquals(result.get(nameToSpec.get("nestedIntegerField")), ImmutableList.of(1)); - assertEquals(result.get(nameToSpec.get("nestedArrayStringField")), ImmutableList.of("nestedArray1", "nestedArray2")); - assertEquals(result.get(nameToSpec.get("nestedArrayArrayField")), ImmutableList.of("testNestedArray1", "testNestedArray2")); - assertEquals(result.get(nameToSpec.get("customProperties")), ImmutableList.of(), "Expected no matching values because of value limit of 1"); - assertEquals(result.get(nameToSpec.get("esObjectField")), ImmutableList.of(), "Expected no matching values because of value limit of 1"); + assertEquals( + result.get(nameToSpec.get("nestedArrayStringField")), + ImmutableList.of("nestedArray1", "nestedArray2")); + assertEquals( + result.get(nameToSpec.get("nestedArrayArrayField")), + ImmutableList.of("testNestedArray1", "testNestedArray2")); + assertEquals( + result.get(nameToSpec.get("customProperties")), + ImmutableList.of(), + "Expected no matching values because of value limit of 1"); + assertEquals( + result.get(nameToSpec.get("esObjectField")), + ImmutableList.of(), + "Expected no matching values because of value limit of 1"); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java index 38a20ef4b7a9b..2af1eeb46f2ba 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java @@ -1,17 +1,20 @@ package com.linkedin.metadata.graph; +import static org.testng.Assert.*; + import com.linkedin.common.urn.UrnUtils; import java.util.Collections; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EdgeTest { - private static final String SOURCE_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:foo,source1,PROD)"; - private static final String SOURCE_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:foo,source2,PROD)"; - private static final String DESTINATION_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:foo,destination1,PROD)"; - private static final String DESTINATION_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:foo,destination2,PROD)"; + private static final String SOURCE_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:foo,source1,PROD)"; + private static final String SOURCE_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:foo,source2,PROD)"; + private static final String DESTINATION_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:foo,destination1,PROD)"; + private static final String DESTINATION_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:foo,destination2,PROD)"; private static final String DOWNSTREAM_RELATIONSHIP_TYPE = "DownstreamOf"; private static final Long TIMESTAMP_1 = 1L; private static final Long TIMESTAMP_2 = 2L; @@ -21,39 +24,43 @@ public class EdgeTest { @Test public void testEdgeEquals() { // First edge - final Edge edge1 = new Edge( - UrnUtils.getUrn(SOURCE_URN_1), - UrnUtils.getUrn(DESTINATION_URN_1), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_2), - Collections.emptyMap()); + final Edge edge1 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_1), + UrnUtils.getUrn(DESTINATION_URN_1), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_2), + Collections.emptyMap()); - // Second edge has same source, destination, and relationship type as edge1, and should be considered the same edge. + // Second edge has same source, destination, and relationship type as edge1, and should be + // considered the same edge. // All other fields are different. - final Edge edge2 = new Edge( - UrnUtils.getUrn(SOURCE_URN_1), - UrnUtils.getUrn(DESTINATION_URN_1), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_2, - UrnUtils.getUrn(ACTOR_URN_2), - TIMESTAMP_2, - UrnUtils.getUrn(ACTOR_URN_2), - Collections.emptyMap()); + final Edge edge2 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_1), + UrnUtils.getUrn(DESTINATION_URN_1), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_2, + UrnUtils.getUrn(ACTOR_URN_2), + TIMESTAMP_2, + UrnUtils.getUrn(ACTOR_URN_2), + Collections.emptyMap()); assertEquals(edge1, edge2); // Third edge has different source and destination as edge1, and thus is not the same edge. - final Edge edge3 = new Edge( - UrnUtils.getUrn(SOURCE_URN_2), - UrnUtils.getUrn(DESTINATION_URN_2), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - Collections.emptyMap()); + final Edge edge3 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_2), + UrnUtils.getUrn(DESTINATION_URN_2), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + Collections.emptyMap()); assertNotEquals(edge1, edge3); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java index 12cd24ae9986d..3a51344d5779d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java @@ -1,5 +1,14 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataJobUrn; @@ -27,64 +36,56 @@ import java.util.stream.IntStream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - - /** - * Base class for testing any GraphService implementation. - * Derive the test class from this base and get your GraphService implementation - * tested with all these tests. + * Base class for testing any GraphService implementation. Derive the test class from this base and + * get your GraphService implementation tested with all these tests. * - * You can add implementation specific tests in derived classes, or add general tests - * here and have all existing implementations tested in the same way. + * <p>You can add implementation specific tests in derived classes, or add general tests here and + * have all existing implementations tested in the same way. * - * The `getPopulatedGraphService` method calls `GraphService.addEdge` to provide a populated Graph. - * Feel free to add a test to your test implementation that calls `getPopulatedGraphService` and - * asserts the state of the graph in an implementation specific way. + * <p>The `getPopulatedGraphService` method calls `GraphService.addEdge` to provide a populated + * Graph. Feel free to add a test to your test implementation that calls `getPopulatedGraphService` + * and asserts the state of the graph in an implementation specific way. */ -abstract public class GraphServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class GraphServiceTestBase extends AbstractTestNGSpringContextTests { private static class RelatedEntityComparator implements Comparator<RelatedEntity> { @Override public int compare(RelatedEntity left, RelatedEntity right) { - int cmp = left.relationshipType.compareTo(right.relationshipType); - if (cmp != 0) { - return cmp; - } - return left.urn.compareTo(right.urn); + int cmp = left.relationshipType.compareTo(right.relationshipType); + if (cmp != 0) { + return cmp; + } + return left.urn.compareTo(right.urn); } } - protected static final RelatedEntityComparator RELATED_ENTITY_COMPARATOR = new RelatedEntityComparator(); + protected static final RelatedEntityComparator RELATED_ENTITY_COMPARATOR = + new RelatedEntityComparator(); - /** - * Some test URN types. - */ + /** Some test URN types. */ protected static String datasetType = "dataset"; + protected static String userType = "user"; - /** - * Some test datasets. - */ - protected static String datasetOneUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; - protected static String datasetTwoUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; - protected static String datasetThreeUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; - protected static String datasetFourUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; - protected static String datasetFiveUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; + /** Some test datasets. */ + protected static String datasetOneUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; + + protected static String datasetTwoUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; + protected static String datasetThreeUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; + protected static String datasetFourUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; + protected static String datasetFiveUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; protected static Urn datasetOneUrn = createFromString(datasetOneUrnString); protected static Urn datasetTwoUrn = createFromString(datasetTwoUrnString); @@ -94,73 +95,85 @@ public int compare(RelatedEntity left, RelatedEntity right) { protected static String unknownUrnString = "urn:li:unknown:(urn:li:unknown:Unknown)"; - /** - * Some dataset owners. - */ - protected static String userOneUrnString = "urn:li:" + userType + ":(urn:li:user:system,Ingress,PROD)"; - protected static String userTwoUrnString = "urn:li:" + userType + ":(urn:li:user:individual,UserA,DEV)"; + /** Some dataset owners. */ + protected static String userOneUrnString = + "urn:li:" + userType + ":(urn:li:user:system,Ingress,PROD)"; + + protected static String userTwoUrnString = + "urn:li:" + userType + ":(urn:li:user:individual,UserA,DEV)"; protected static Urn userOneUrn = createFromString(userOneUrnString); protected static Urn userTwoUrn = createFromString(userTwoUrnString); protected static Urn unknownUrn = createFromString(unknownUrnString); - /** - * Some data jobs - */ - protected static Urn dataJobOneUrn = new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job1"); - protected static Urn dataJobTwoUrn = new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job2"); + /** Some data jobs */ + protected static Urn dataJobOneUrn = + new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job1"); - /** - * Some test relationships. - */ + protected static Urn dataJobTwoUrn = + new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job2"); + + /** Some test relationships. */ protected static String downstreamOf = "DownstreamOf"; + protected static String hasOwner = "HasOwner"; protected static String knowsUser = "KnowsUser"; protected static String produces = "Produces"; protected static String consumes = "Consumes"; - protected static Set<String> allRelationshipTypes = new HashSet<>(Arrays.asList(downstreamOf, hasOwner, knowsUser)); - - /** - * Some expected related entities. - */ - protected static RelatedEntity downstreamOfDatasetOneRelatedEntity = new RelatedEntity(downstreamOf, datasetOneUrnString); - protected static RelatedEntity downstreamOfDatasetTwoRelatedEntity = new RelatedEntity(downstreamOf, datasetTwoUrnString); - protected static RelatedEntity downstreamOfDatasetThreeRelatedEntity = new RelatedEntity(downstreamOf, datasetThreeUrnString); - protected static RelatedEntity downstreamOfDatasetFourRelatedEntity = new RelatedEntity(downstreamOf, datasetFourUrnString); - - protected static RelatedEntity hasOwnerDatasetOneRelatedEntity = new RelatedEntity(hasOwner, datasetOneUrnString); - protected static RelatedEntity hasOwnerDatasetTwoRelatedEntity = new RelatedEntity(hasOwner, datasetTwoUrnString); - protected static RelatedEntity hasOwnerDatasetThreeRelatedEntity = new RelatedEntity(hasOwner, datasetThreeUrnString); - protected static RelatedEntity hasOwnerDatasetFourRelatedEntity = new RelatedEntity(hasOwner, datasetFourUrnString); - protected static RelatedEntity hasOwnerUserOneRelatedEntity = new RelatedEntity(hasOwner, userOneUrnString); - protected static RelatedEntity hasOwnerUserTwoRelatedEntity = new RelatedEntity(hasOwner, userTwoUrnString); - - protected static RelatedEntity knowsUserOneRelatedEntity = new RelatedEntity(knowsUser, userOneUrnString); - protected static RelatedEntity knowsUserTwoRelatedEntity = new RelatedEntity(knowsUser, userTwoUrnString); - - /** - * Some relationship filters. - */ - protected static RelationshipFilter outgoingRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING); - protected static RelationshipFilter incomingRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING); - protected static RelationshipFilter undirectedRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.UNDIRECTED); - - /** - * Any source and destination type value. - */ + protected static Set<String> allRelationshipTypes = + new HashSet<>(Arrays.asList(downstreamOf, hasOwner, knowsUser)); + + /** Some expected related entities. */ + protected static RelatedEntity downstreamOfDatasetOneRelatedEntity = + new RelatedEntity(downstreamOf, datasetOneUrnString); + + protected static RelatedEntity downstreamOfDatasetTwoRelatedEntity = + new RelatedEntity(downstreamOf, datasetTwoUrnString); + protected static RelatedEntity downstreamOfDatasetThreeRelatedEntity = + new RelatedEntity(downstreamOf, datasetThreeUrnString); + protected static RelatedEntity downstreamOfDatasetFourRelatedEntity = + new RelatedEntity(downstreamOf, datasetFourUrnString); + + protected static RelatedEntity hasOwnerDatasetOneRelatedEntity = + new RelatedEntity(hasOwner, datasetOneUrnString); + protected static RelatedEntity hasOwnerDatasetTwoRelatedEntity = + new RelatedEntity(hasOwner, datasetTwoUrnString); + protected static RelatedEntity hasOwnerDatasetThreeRelatedEntity = + new RelatedEntity(hasOwner, datasetThreeUrnString); + protected static RelatedEntity hasOwnerDatasetFourRelatedEntity = + new RelatedEntity(hasOwner, datasetFourUrnString); + protected static RelatedEntity hasOwnerUserOneRelatedEntity = + new RelatedEntity(hasOwner, userOneUrnString); + protected static RelatedEntity hasOwnerUserTwoRelatedEntity = + new RelatedEntity(hasOwner, userTwoUrnString); + + protected static RelatedEntity knowsUserOneRelatedEntity = + new RelatedEntity(knowsUser, userOneUrnString); + protected static RelatedEntity knowsUserTwoRelatedEntity = + new RelatedEntity(knowsUser, userTwoUrnString); + + /** Some relationship filters. */ + protected static RelationshipFilter outgoingRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING); + + protected static RelationshipFilter incomingRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING); + protected static RelationshipFilter undirectedRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.UNDIRECTED); + + /** Any source and destination type value. */ protected static @Nullable List<String> anyType = null; - /** - * Timeout used to test concurrent ops in doTestConcurrentOp. - */ + /** Timeout used to test concurrent ops in doTestConcurrentOp. */ protected Duration getTestConcurrentOpTimeout() { - return Duration.ofMinutes(1); + return Duration.ofMinutes(1); } @BeforeMethod public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -176,26 +189,26 @@ public void testStaticUrns() { } /** - * Provides the current GraphService instance to test. This is being called by the test method - * at most once. The serviced graph should be empty. + * Provides the current GraphService instance to test. This is being called by the test method at + * most once. The serviced graph should be empty. * * @return the GraphService instance to test * @throws Exception on failure */ @Nonnull - abstract protected GraphService getGraphService() throws Exception; + protected abstract GraphService getGraphService() throws Exception; /** - * Allows the specific GraphService test implementation to wait for GraphService writes to - * be synced / become available to reads. + * Allows the specific GraphService test implementation to wait for GraphService writes to be + * synced / become available to reads. * * @throws Exception on failure */ - abstract protected void syncAfterWrite() throws Exception; + protected abstract void syncAfterWrite() throws Exception; /** - * Calls getGraphService to retrieve the test GraphService and populates it - * with edges via `GraphService.addEdge`. + * Calls getGraphService to retrieve the test GraphService and populates it with edges via + * `GraphService.addEdge`. * * @return test GraphService * @throws Exception on failure @@ -203,19 +216,17 @@ public void testStaticUrns() { protected GraphService getPopulatedGraphService() throws Exception { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( + List<Edge> edges = + Arrays.asList( new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null), new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetTwoUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetThreeUrn, userTwoUrn, hasOwner, null, null, null, null, null), new Edge(datasetFourUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null), - new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null) - ); + new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null)); edges.forEach(service::addEdge); syncAfterWrite(); @@ -226,27 +237,24 @@ protected GraphService getPopulatedGraphService() throws Exception { protected GraphService getLineagePopulatedGraphService() throws Exception { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( + List<Edge> edges = + Arrays.asList( new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null), new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetTwoUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetThreeUrn, userTwoUrn, hasOwner, null, null, null, null, null), new Edge(datasetFourUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null), new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(dataJobOneUrn, datasetOneUrn, consumes, null, null, null, null, null), new Edge(dataJobOneUrn, datasetTwoUrn, consumes, null, null, null, null, null), new Edge(dataJobOneUrn, datasetThreeUrn, produces, null, null, null, null, null), new Edge(dataJobOneUrn, datasetFourUrn, produces, null, null, null, null, null), new Edge(dataJobTwoUrn, datasetOneUrn, consumes, null, null, null, null, null), new Edge(dataJobTwoUrn, datasetTwoUrn, consumes, null, null, null, null, null), - new Edge(dataJobTwoUrn, dataJobOneUrn, downstreamOf, null, null, null, null, null) - ); + new Edge(dataJobTwoUrn, dataJobOneUrn, downstreamOf, null, null, null, null, null)); edges.forEach(service::addEdge); syncAfterWrite(); @@ -254,8 +262,7 @@ protected GraphService getLineagePopulatedGraphService() throws Exception { return service; } - protected static @Nullable - Urn createFromString(@Nonnull String rawUrn) { + protected static @Nullable Urn createFromString(@Nonnull String rawUrn) { try { return Urn.createFromString(rawUrn); } catch (URISyntaxException e) { @@ -264,10 +271,12 @@ Urn createFromString(@Nonnull String rawUrn) { } protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, List<RelatedEntity> expected) { - assertEqualsAnyOrder(actual, new RelatedEntitiesResult(0, expected.size(), expected.size(), expected)); + assertEqualsAnyOrder( + actual, new RelatedEntitiesResult(0, expected.size(), expected.size(), expected)); } - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { assertEquals(actual.start, expected.start); assertEquals(actual.count, expected.count); assertEquals(actual.total, expected.total); @@ -276,141 +285,156 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie protected <T> void assertEqualsAnyOrder(List<T> actual, List<T> expected) { assertEquals( - actual.stream().sorted().collect(Collectors.toList()), - expected.stream().sorted().collect(Collectors.toList()) - ); + actual.stream().sorted().collect(Collectors.toList()), + expected.stream().sorted().collect(Collectors.toList())); } - protected <T> void assertEqualsAnyOrder(List<T> actual, List<T> expected, Comparator<T> comparator) { + protected <T> void assertEqualsAnyOrder( + List<T> actual, List<T> expected, Comparator<T> comparator) { assertEquals( - actual.stream().sorted(comparator).collect(Collectors.toList()), - expected.stream().sorted(comparator).collect(Collectors.toList()) - ); + actual.stream().sorted(comparator).collect(Collectors.toList()), + expected.stream().sorted(comparator).collect(Collectors.toList())); } @DataProvider(name = "AddEdgeTests") public Object[][] getAddEdgeTests() { - return new Object[][]{ - new Object[]{ - Arrays.asList(), - Arrays.asList(), - Arrays.asList() - }, - new Object[]{ - Arrays.asList(new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)), - Arrays.asList(downstreamOfDatasetTwoRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity) - }, - new Object[]{ - Arrays.asList( - new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetTwoUrn, datasetThreeUrn, downstreamOf, null, null, null, null, null) - ), - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[]{ - Arrays.asList( - new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), - new Edge(datasetTwoUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null) - ), - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserTwoRelatedEntity - ), - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, - hasOwnerDatasetOneRelatedEntity, - hasOwnerDatasetTwoRelatedEntity, - knowsUserOneRelatedEntity - ) - }, - new Object[]{ - Arrays.asList( - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null) - ), - Arrays.asList(knowsUserOneRelatedEntity), - Arrays.asList(knowsUserOneRelatedEntity) - } + return new Object[][] { + new Object[] {Arrays.asList(), Arrays.asList(), Arrays.asList()}, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)), + Arrays.asList(downstreamOfDatasetTwoRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), + new Edge(datasetTwoUrn, datasetThreeUrn, downstreamOf, null, null, null, null, null)), + Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), + new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), + new Edge(datasetTwoUrn, userTwoUrn, hasOwner, null, null, null, null, null), + new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null)), + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, + hasOwnerUserTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserOneRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null)), + Arrays.asList(knowsUserOneRelatedEntity), + Arrays.asList(knowsUserOneRelatedEntity) + } }; } @Test(dataProvider = "AddEdgeTests") - public void testAddEdge(List<Edge> edges, List<RelatedEntity> expectedOutgoing, List<RelatedEntity> expectedIncoming) throws Exception { - GraphService service = getGraphService(); - - edges.forEach(service::addEdge); - syncAfterWrite(); - - RelatedEntitiesResult relatedOutgoing = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships, - 0, 100 - ); - assertEqualsAnyOrder(relatedOutgoing, expectedOutgoing); - - RelatedEntitiesResult relatedIncoming = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships, - 0, 100 - ); - assertEqualsAnyOrder(relatedIncoming, expectedIncoming); + public void testAddEdge( + List<Edge> edges, List<RelatedEntity> expectedOutgoing, List<RelatedEntity> expectedIncoming) + throws Exception { + GraphService service = getGraphService(); + + edges.forEach(service::addEdge); + syncAfterWrite(); + + RelatedEntitiesResult relatedOutgoing = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder(relatedOutgoing, expectedOutgoing); + + RelatedEntitiesResult relatedIncoming = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder(relatedIncoming, expectedIncoming); } @Test public void testPopulatedGraphService() throws Exception { - GraphService service = getPopulatedGraphService(); - - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); - assertEqualsAnyOrder( - relatedOutgoingEntitiesBeforeRemove, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - RelatedEntitiesResult relatedIncomingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships, - 0, 100); - assertEqualsAnyOrder( - relatedIncomingEntitiesBeforeRemove, - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity, - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); + GraphService service = getPopulatedGraphService(); + + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedOutgoingEntitiesBeforeRemove, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); + RelatedEntitiesResult relatedIncomingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedIncomingEntitiesBeforeRemove, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, + hasOwnerDatasetFourRelatedEntity, + knowsUserOneRelatedEntity, + knowsUserTwoRelatedEntity)); } @Test public void testPopulatedGraphServiceGetLineage() throws Exception { GraphService service = getLineagePopulatedGraphService(); - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 0); assertEquals(upstreamLineage.getRelationships().size(), 0); - EntityLineageResult downstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); + EntityLineageResult downstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 3); assertEquals(downstreamLineage.getRelationships().size(), 3); - Map<Urn, LineageRelationship> relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + Map<Urn, LineageRelationship> relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetTwoUrn)); assertEquals(relationships.get(datasetTwoUrn).getType(), downstreamOf); assertTrue(relationships.containsKey(dataJobOneUrn)); @@ -421,22 +445,25 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 2); assertEquals(upstreamLineage.getRelationships().size(), 2); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetTwoUrn)); assertEquals(relationships.get(datasetTwoUrn).getType(), downstreamOf); assertTrue(relationships.containsKey(dataJobOneUrn)); assertEquals(relationships.get(dataJobOneUrn).getType(), produces); - downstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); + downstreamLineage = + service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 0); assertEquals(downstreamLineage.getRelationships().size(), 0); upstreamLineage = service.getLineage(dataJobOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 2); assertEquals(upstreamLineage.getRelationships().size(), 2); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetOneUrn)); assertEquals(relationships.get(datasetOneUrn).getType(), consumes); assertTrue(relationships.containsKey(datasetTwoUrn)); @@ -445,8 +472,9 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { downstreamLineage = service.getLineage(dataJobOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 3); assertEquals(downstreamLineage.getRelationships().size(), 3); - relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetThreeUrn)); assertEquals(relationships.get(datasetThreeUrn).getType(), produces); assertTrue(relationships.containsKey(datasetFourUrn)); @@ -458,458 +486,425 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { @DataProvider(name = "FindRelatedEntitiesSourceEntityFilterTests") public Object[][] getFindRelatedEntitiesSourceEntityFilterTests() { return new Object[][] { - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) - } + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesSourceEntityFilterTests") - public void testFindRelatedEntitiesSourceEntityFilter(Filter sourceEntityFilter, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceEntityFilter( + Filter sourceEntityFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - sourceEntityFilter, - EMPTY_FILTER, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + sourceEntityFilter, + EMPTY_FILTER, + relationshipTypes, + relationships, + expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesDestinationEntityFilterTests") public Object[][] getFindRelatedEntitiesDestinationEntityFilterTests() { return new Object[][] { - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList() - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - } + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesDestinationEntityFilterTests") - public void testFindRelatedEntitiesDestinationEntityFilter(Filter destinationEntityFilter, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationEntityFilter( + Filter destinationEntityFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - EMPTY_FILTER, - destinationEntityFilter, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + EMPTY_FILTER, + destinationEntityFilter, + relationshipTypes, + relationships, + expectedRelatedEntities); } private void doTestFindRelatedEntities( - final Filter sourceEntityFilter, - final Filter destinationEntityFilter, - List<String> relationshipTypes, - final RelationshipFilter relationshipFilter, - List<RelatedEntity> expectedRelatedEntities - ) throws Exception { + final Filter sourceEntityFilter, + final Filter destinationEntityFilter, + List<String> relationshipTypes, + final RelationshipFilter relationshipFilter, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - anyType, sourceEntityFilter, - anyType, destinationEntityFilter, - relationshipTypes, relationshipFilter, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + anyType, + sourceEntityFilter, + anyType, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + 0, + 10); assertEqualsAnyOrder(relatedEntities, expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesSourceTypeTests") public Object[][] getFindRelatedEntitiesSourceTypeTests() { - return new Object[][]{ - new Object[] { - null, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - // "" used to be any type before v0.9.0, which is now encoded by null - new Object[] { - "", - Arrays.asList(downstreamOf), - outgoingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - incomingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - undirectedRelationships, - Collections.emptyList() - }, - - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[]{ - userType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList() - }, - - new Object[]{ - userType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - new Object[]{ - userType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - } + return new Object[][] { + new Object[] { + null, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + + // "" used to be any type before v0.9.0, which is now encoded by null + new Object[] { + "", Arrays.asList(downstreamOf), outgoingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), incomingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), undirectedRelationships, Collections.emptyList() + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] {userType, Arrays.asList(downstreamOf), outgoingRelationships, Arrays.asList()}, + new Object[] {userType, Arrays.asList(downstreamOf), incomingRelationships, Arrays.asList()}, + new Object[] { + userType, Arrays.asList(downstreamOf), undirectedRelationships, Arrays.asList() + }, + new Object[] {userType, Arrays.asList(hasOwner), outgoingRelationships, Arrays.asList()}, + new Object[] { + userType, + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + userType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesSourceTypeTests") - public void testFindRelatedEntitiesSourceType(String entityTypeFilter, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String entityTypeFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, - anyType, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, + anyType, + relationshipTypes, + relationships, + expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesDestinationTypeTests") public Object[][] getFindRelatedEntitiesDestinationTypeTests() { return new Object[][] { - new Object[] { - null, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[] { - "", - Arrays.asList(downstreamOf), - outgoingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - incomingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - undirectedRelationships, - Collections.emptyList() - }, - - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[] { - datasetType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - datasetType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - new Object[] { - datasetType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - - new Object[] { - userType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) - }, - new Object[] { - userType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - userType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) - } + new Object[] { + null, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + "", Arrays.asList(downstreamOf), outgoingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), incomingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), undirectedRelationships, Collections.emptyList() + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] {datasetType, Arrays.asList(hasOwner), outgoingRelationships, Arrays.asList()}, + new Object[] { + datasetType, + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + userType, + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) + }, + new Object[] {userType, Arrays.asList(hasOwner), incomingRelationships, Arrays.asList()}, + new Object[] { + userType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesDestinationTypeTests") - public void testFindRelatedEntitiesDestinationType(String entityTypeFilter, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String entityTypeFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - anyType, - entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + anyType, + entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, + relationshipTypes, + relationships, + expectedRelatedEntities); } private void doTestFindRelatedEntities( - final List<String> sourceType, - final List<String> destinationType, - final List<String> relationshipTypes, - final RelationshipFilter relationshipFilter, - List<RelatedEntity> expectedRelatedEntities - ) throws Exception { + final List<String> sourceType, + final List<String> destinationType, + final List<String> relationshipTypes, + final RelationshipFilter relationshipFilter, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - sourceType, EMPTY_FILTER, - destinationType, EMPTY_FILTER, - relationshipTypes, relationshipFilter, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + sourceType, + EMPTY_FILTER, + destinationType, + EMPTY_FILTER, + relationshipTypes, + relationshipFilter, + 0, + 10); assertEqualsAnyOrder(relatedEntities, expectedRelatedEntities); } - private void doTestFindRelatedEntitiesEntityType(@Nullable List<String> sourceType, - @Nullable List<String> destinationType, - @Nonnull String relationshipType, - @Nonnull RelationshipFilter relationshipFilter, - @Nonnull GraphService service, - @Nonnull RelatedEntity... expectedEntities) { - RelatedEntitiesResult actualEntities = service.findRelatedEntities( - sourceType, EMPTY_FILTER, - destinationType, EMPTY_FILTER, - Arrays.asList(relationshipType), relationshipFilter, - 0, 100 - ); + private void doTestFindRelatedEntitiesEntityType( + @Nullable List<String> sourceType, + @Nullable List<String> destinationType, + @Nonnull String relationshipType, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull GraphService service, + @Nonnull RelatedEntity... expectedEntities) { + RelatedEntitiesResult actualEntities = + service.findRelatedEntities( + sourceType, + EMPTY_FILTER, + destinationType, + EMPTY_FILTER, + Arrays.asList(relationshipType), + relationshipFilter, + 0, + 100); assertEqualsAnyOrder(actualEntities, Arrays.asList(expectedEntities)); } @@ -921,18 +916,41 @@ public void testFindRelatedEntitiesNullSourceType() throws Exception { assertNotNull(nullUrn); RelatedEntity nullRelatedEntity = new RelatedEntity(downstreamOf, nullUrn.toString()); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, null, downstreamOf, outgoingRelationships, service); - service.addEdge(new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); + service.addEdge( + new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + downstreamOfDatasetOneRelatedEntity); service.addEdge(new Edge(datasetOneUrn, nullUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service, nullRelatedEntity); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, nullRelatedEntity, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + ImmutableList.of("null"), + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity, + downstreamOfDatasetOneRelatedEntity); } @Test @@ -943,97 +961,143 @@ public void testFindRelatedEntitiesNullDestinationType() throws Exception { assertNotNull(nullUrn); RelatedEntity nullRelatedEntity = new RelatedEntity(downstreamOf, nullUrn.toString()); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, null, downstreamOf, outgoingRelationships, service); - service.addEdge(new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); + service.addEdge( + new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + downstreamOfDatasetOneRelatedEntity); service.addEdge(new Edge(datasetOneUrn, nullUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service, nullRelatedEntity); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, nullRelatedEntity, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + ImmutableList.of("null"), + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity, + downstreamOfDatasetOneRelatedEntity); } @Test public void testFindRelatedEntitiesRelationshipTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult allOutgoingRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ); - assertEqualsAnyOrder( - allOutgoingRelatedEntities, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - - RelatedEntitiesResult allIncomingRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships, - 0, 100 - ); + RelatedEntitiesResult allOutgoingRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder( - allIncomingRelatedEntities, - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity, - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - - RelatedEntitiesResult allUnknownRelationshipTypeRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList("unknownRelationshipType", "unseenRelationshipType"), outgoingRelationships, - 0, 100 - ); + allOutgoingRelatedEntities, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); + + RelatedEntitiesResult allIncomingRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); assertEqualsAnyOrder( - allUnknownRelationshipTypeRelatedEntities, - Collections.emptyList() - ); - - RelatedEntitiesResult someUnknownRelationshipTypeRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList("unknownRelationshipType", downstreamOf), outgoingRelationships, - 0, 100 - ); + allIncomingRelatedEntities, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, + hasOwnerDatasetFourRelatedEntity, + knowsUserOneRelatedEntity, + knowsUserTwoRelatedEntity)); + + RelatedEntitiesResult allUnknownRelationshipTypeRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList("unknownRelationshipType", "unseenRelationshipType"), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder(allUnknownRelationshipTypeRelatedEntities, Collections.emptyList()); + + RelatedEntitiesResult someUnknownRelationshipTypeRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList("unknownRelationshipType", downstreamOf), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder( - someUnknownRelationshipTypeRelatedEntities, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - ); + someUnknownRelationshipTypeRelatedEntities, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity)); } @Test public void testFindRelatedEntitiesNoRelationshipTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Collections.emptyList(), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Collections.emptyList(), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); - // does the test actually test something? is the Collections.emptyList() the only reason why we did not get any related urns? - RelatedEntitiesResult relatedEntitiesAll = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 10 - ); + // does the test actually test something? is the Collections.emptyList() the only reason why we + // did not get any related urns? + RelatedEntitiesResult relatedEntitiesAll = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 10); assertNotEquals(relatedEntitiesAll.entities, Collections.emptyList()); } @@ -1042,21 +1106,29 @@ public void testFindRelatedEntitiesNoRelationshipTypes() throws Exception { public void testFindRelatedEntitiesAllFilters() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(userType), newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(userType), + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Arrays.asList(hasOwnerUserOneRelatedEntity)); - relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(userType), newFilter("urn", userTwoUrnString), - Arrays.asList(hasOwner), incomingRelationships, - 0, 10 - ); + relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(userType), + newFilter("urn", userTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); } @@ -1065,21 +1137,29 @@ public void testFindRelatedEntitiesAllFilters() throws Exception { public void testFindRelatedEntitiesMultipleEntityTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType, userType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(datasetType, userType), newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType, userType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(datasetType, userType), + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Arrays.asList(hasOwnerUserOneRelatedEntity)); - relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType, userType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(datasetType, userType), newFilter("urn", userTwoUrnString), - Arrays.asList(hasOwner), incomingRelationships, - 0, 10 - ); + relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType, userType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(datasetType, userType), + newFilter("urn", userTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); } @@ -1089,161 +1169,227 @@ public void testFindRelatedEntitiesOffsetAndCount() throws Exception { GraphService service = getPopulatedGraphService(); // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult allRelatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ); + RelatedEntitiesResult allRelatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); List<RelatedEntity> individualRelatedEntities = new ArrayList<>(); IntStream.range(0, allRelatedEntities.entities.size()) - .forEach(idx -> individualRelatedEntities.addAll( + .forEach( + idx -> + individualRelatedEntities.addAll( service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - idx, 1 - ).entities - )); + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + idx, + 1) + .entities)); Assert.assertEquals(individualRelatedEntities, allRelatedEntities.entities); } @DataProvider(name = "RemoveEdgesFromNodeTests") public Object[][] getRemoveEdgesFromNodeTests() { return new Object[][] { - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(), - }, - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(), - Arrays.asList() - }, - - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - outgoingRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity) - }, - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - incomingRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList() - }, - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - undirectedRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(), - Arrays.asList() - } + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(), + }, + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(), + Arrays.asList() + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + outgoingRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity) + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + incomingRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList() + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + undirectedRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(), + Arrays.asList() + } }; } @Test(dataProvider = "RemoveEdgesFromNodeTests") - public void testRemoveEdgesFromNode(@Nonnull Urn nodeToRemoveFrom, - @Nonnull List<String> relationTypes, - @Nonnull RelationshipFilter relationshipFilter, - List<RelatedEntity> expectedOutgoingRelatedUrnsBeforeRemove, - List<RelatedEntity> expectedIncomingRelatedUrnsBeforeRemove, - List<RelatedEntity> expectedOutgoingRelatedUrnsAfterRemove, - List<RelatedEntity> expectedIncomingRelatedUrnsAfterRemove) throws Exception { + public void testRemoveEdgesFromNode( + @Nonnull Urn nodeToRemoveFrom, + @Nonnull List<String> relationTypes, + @Nonnull RelationshipFilter relationshipFilter, + List<RelatedEntity> expectedOutgoingRelatedUrnsBeforeRemove, + List<RelatedEntity> expectedIncomingRelatedUrnsBeforeRemove, + List<RelatedEntity> expectedOutgoingRelatedUrnsAfterRemove, + List<RelatedEntity> expectedIncomingRelatedUrnsAfterRemove) + throws Exception { GraphService service = getPopulatedGraphService(); List<String> allOtherRelationTypes = - allRelationshipTypes.stream() - .filter(relation -> !relationTypes.contains(relation)) - .collect(Collectors.toList()); + allRelationshipTypes.stream() + .filter(relation -> !relationTypes.contains(relation)) + .collect(Collectors.toList()); assertTrue(allOtherRelationTypes.size() > 0); - RelatedEntitiesResult actualOutgoingRelatedUrnsBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult actualIncomingRelatedUrnsBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(actualOutgoingRelatedUrnsBeforeRemove, expectedOutgoingRelatedUrnsBeforeRemove); - assertEqualsAnyOrder(actualIncomingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove); + RelatedEntitiesResult actualOutgoingRelatedUrnsBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult actualIncomingRelatedUrnsBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + actualOutgoingRelatedUrnsBeforeRemove, expectedOutgoingRelatedUrnsBeforeRemove); + assertEqualsAnyOrder( + actualIncomingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove); // we expect these do not change - RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, incomingRelationships, - 0, 100); + RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + incomingRelationships, + 0, + 100); - service.removeEdgesFromNode( - nodeToRemoveFrom, - relationTypes, - relationshipFilter - ); + service.removeEdgesFromNode(nodeToRemoveFrom, relationTypes, relationshipFilter); syncAfterWrite(); - RelatedEntitiesResult actualOutgoingRelatedUrnsAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult actualIncomingRelatedUrnsAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(actualOutgoingRelatedUrnsAfterRemove, expectedOutgoingRelatedUrnsAfterRemove); - assertEqualsAnyOrder(actualIncomingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); + RelatedEntitiesResult actualOutgoingRelatedUrnsAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult actualIncomingRelatedUrnsAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + actualOutgoingRelatedUrnsAfterRemove, expectedOutgoingRelatedUrnsAfterRemove); + assertEqualsAnyOrder( + actualIncomingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); // assert these did not change - RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove, relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove); - assertEqualsAnyOrder(relatedEntitiesOfOtherIncomingRelationTypesAfterRemove, relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove); + RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove, + relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove); + assertEqualsAnyOrder( + relatedEntitiesOfOtherIncomingRelationTypesAfterRemove, + relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove); } @Test @@ -1252,50 +1398,53 @@ public void testRemoveEdgesFromNodeNoRelationshipTypes() throws Exception { Urn nodeToRemoveFrom = datasetOneUrn; // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); - - // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported by all implementations - service.removeEdgesFromNode( - nodeToRemoveFrom, - Collections.emptyList(), - outgoingRelationships - ); - service.removeEdgesFromNode( - nodeToRemoveFrom, - Collections.emptyList(), - incomingRelationships - ); + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + + // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported + // by all implementations + service.removeEdgesFromNode(nodeToRemoveFrom, Collections.emptyList(), outgoingRelationships); + service.removeEdgesFromNode(nodeToRemoveFrom, Collections.emptyList(), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemove, relatedOutgoingEntitiesBeforeRemove); - // does the test actually test something? is the Collections.emptyList() the only reason why we did not see changes? + // does the test actually test something? is the Collections.emptyList() the only reason why we + // did not see changes? service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships); service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemoveAll = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemoveAll = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemoveAll, Collections.emptyList()); } @@ -1305,30 +1454,35 @@ public void testRemoveEdgesFromUnknownNode() throws Exception { Urn nodeToRemoveFrom = unknownUrn; // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); - // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported by all implementations + // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported + // by all implementations service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships); service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemove, relatedOutgoingEntitiesBeforeRemove); } @@ -1341,17 +1495,18 @@ public void testRemoveNode() throws Exception { // assert the modified graph assertEqualsAnyOrder( - service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ), - Arrays.asList( - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100), + Arrays.asList( + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); } @Test @@ -1359,20 +1514,30 @@ public void testRemoveUnknownNode() throws Exception { GraphService service = getPopulatedGraphService(); // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult entitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult entitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); service.removeNode(unknownUrn); syncAfterWrite(); - RelatedEntitiesResult entitiesAfterRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult entitiesAfterRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(entitiesBeforeRemove, entitiesAfterRemove); } @@ -1385,87 +1550,113 @@ public void testClear() throws Exception { service.clear(); syncAfterWrite(); - // assert the modified graph: check all nodes related to upstreamOf and nextVersionOf edges again + // assert the modified graph: check all nodes related to upstreamOf and nextVersionOf edges + // again assertEqualsAnyOrder( - service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); assertEqualsAnyOrder( - service.findRelatedEntities( - ImmutableList.of(userType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(hasOwner), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + ImmutableList.of(userType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); assertEqualsAnyOrder( - service.findRelatedEntities( - anyType, EMPTY_FILTER, - ImmutableList.of(userType), EMPTY_FILTER, - Arrays.asList(knowsUser), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + ImmutableList.of(userType), + EMPTY_FILTER, + Arrays.asList(knowsUser), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); } private List<Edge> getFullyConnectedGraph(int nodes, List<String> relationshipTypes) { - List<Edge> edges = new ArrayList<>(); - - for (int sourceNode = 1; sourceNode <= nodes; sourceNode++) { - for (int destinationNode = 1; destinationNode <= nodes; destinationNode++) { - for (String relationship : relationshipTypes) { - int sourceType = sourceNode % 3; - Urn source = createFromString("urn:li:type" + sourceType + ":(urn:li:node" + sourceNode + ")"); - int destinationType = destinationNode % 3; - Urn destination = createFromString("urn:li:type" + destinationType + ":(urn:li:node" + destinationNode + ")"); - - edges.add(new Edge(source, destination, relationship, null, null, null, null, null)); - } - } + List<Edge> edges = new ArrayList<>(); + + for (int sourceNode = 1; sourceNode <= nodes; sourceNode++) { + for (int destinationNode = 1; destinationNode <= nodes; destinationNode++) { + for (String relationship : relationshipTypes) { + int sourceType = sourceNode % 3; + Urn source = + createFromString("urn:li:type" + sourceType + ":(urn:li:node" + sourceNode + ")"); + int destinationType = destinationNode % 3; + Urn destination = + createFromString( + "urn:li:type" + destinationType + ":(urn:li:node" + destinationNode + ")"); + + edges.add(new Edge(source, destination, relationship, null, null, null, null, null)); + } } + } - return edges; + return edges; } @Test public void testConcurrentAddEdge() throws Exception { - final GraphService service = getGraphService(); - - // too many edges may cause too many threads throwing - // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked worker - int nodes = 5; - int relationshipTypes = 3; - List<String> allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); - List<Edge> edges = getFullyConnectedGraph(nodes, allRelationships); - - List<Runnable> operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.addEdge(edge); - } - }).collect(Collectors.toList()); - - doTestConcurrentOp(operations); - syncAfterWrite(); - - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); - - Set<RelatedEntity> expectedRelatedEntities = edges.stream() - .map(edge -> new RelatedEntity(edge.getRelationshipType(), edge.getDestination().toString())) - .collect(Collectors.toSet()); - assertEquals(new HashSet<>(relatedEntities.entities), expectedRelatedEntities); + final GraphService service = getGraphService(); + + // too many edges may cause too many threads throwing + // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked + // worker + int nodes = 5; + int relationshipTypes = 3; + List<String> allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); + List<Edge> edges = getFullyConnectedGraph(nodes, allRelationships); + + List<Runnable> operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.addEdge(edge); + } + }) + .collect(Collectors.toList()); + + doTestConcurrentOp(operations); + syncAfterWrite(); + + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); + + Set<RelatedEntity> expectedRelatedEntities = + edges.stream() + .map( + edge -> + new RelatedEntity(edge.getRelationshipType(), edge.getDestination().toString())) + .collect(Collectors.toSet()); + assertEquals(new HashSet<>(relatedEntities.entities), expectedRelatedEntities); } @Test @@ -1474,7 +1665,10 @@ public void testConcurrentRemoveEdgesFromNode() throws Exception { int nodes = 5; int relationshipTypes = 3; - List<String> allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); + List<String> allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); List<Edge> edges = getFullyConnectedGraph(nodes, allRelationships); // add fully connected graph @@ -1482,43 +1676,63 @@ public void testConcurrentRemoveEdgesFromNode() throws Exception { syncAfterWrite(); // assert the graph is there - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntities.entities.size(), nodes * relationshipTypes); // delete all edges concurrently - List<Runnable> operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.removeEdgesFromNode(edge.getSource(), Arrays.asList(edge.getRelationshipType()), outgoingRelationships); - } - }).collect(Collectors.toList()); + List<Runnable> operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.removeEdgesFromNode( + edge.getSource(), + Arrays.asList(edge.getRelationshipType()), + outgoingRelationships); + } + }) + .collect(Collectors.toList()); doTestConcurrentOp(operations); syncAfterWrite(); // assert the graph is gone - RelatedEntitiesResult relatedEntitiesAfterDeletion = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntitiesAfterDeletion = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntitiesAfterDeletion.entities.size(), 0); - } + } @Test public void testConcurrentRemoveNodes() throws Exception { final GraphService service = getGraphService(); // too many edges may cause too many threads throwing - // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked worker + // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked + // worker int nodes = 5; int relationshipTypes = 3; - List<String> allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); + List<String> allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); List<Edge> edges = getFullyConnectedGraph(nodes, allRelationships); // add fully connected graph @@ -1526,106 +1740,131 @@ public void testConcurrentRemoveNodes() throws Exception { syncAfterWrite(); // assert the graph is there - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntities.entities.size(), nodes * relationshipTypes); // remove all nodes concurrently // nodes will be removed multiple times - List<Runnable> operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.removeNode(edge.getSource()); - } - }).collect(Collectors.toList()); + List<Runnable> operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.removeNode(edge.getSource()); + } + }) + .collect(Collectors.toList()); doTestConcurrentOp(operations); syncAfterWrite(); // assert the graph is gone - RelatedEntitiesResult relatedEntitiesAfterDeletion = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntitiesAfterDeletion = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntitiesAfterDeletion.entities.size(), 0); } private void doTestConcurrentOp(List<Runnable> operations) throws Exception { - final Queue<Throwable> throwables = new ConcurrentLinkedQueue<>(); - final CountDownLatch started = new CountDownLatch(operations.size()); - final CountDownLatch finished = new CountDownLatch(operations.size()); - operations.forEach(operation -> new Thread(new Runnable() { - @Override - public void run() { - try { - started.countDown(); - - try { - if (!started.await(10, TimeUnit.SECONDS)) { - fail("Timed out waiting for all threads to start"); + final Queue<Throwable> throwables = new ConcurrentLinkedQueue<>(); + final CountDownLatch started = new CountDownLatch(operations.size()); + final CountDownLatch finished = new CountDownLatch(operations.size()); + operations.forEach( + operation -> + new Thread( + new Runnable() { + @Override + public void run() { + try { + started.countDown(); + + try { + if (!started.await(10, TimeUnit.SECONDS)) { + fail("Timed out waiting for all threads to start"); + } + } catch (InterruptedException e) { + fail("Got interrupted waiting for all threads to start"); + } + + operation.run(); + } catch (Throwable t) { + t.printStackTrace(); + throwables.add(t); + } + finished.countDown(); } - } catch (InterruptedException e) { - fail("Got interrupted waiting for all threads to start"); - } - - operation.run(); - } catch (Throwable t) { - t.printStackTrace(); - throwables.add(t); - } - finished.countDown(); - } - }).start()); - - assertTrue(finished.await(getTestConcurrentOpTimeout().toMillis(), TimeUnit.MILLISECONDS)); - throwables.forEach(throwable -> System.err.printf(System.currentTimeMillis() + ": exception occurred: %s%n", throwable)); - assertEquals(throwables.size(), 0); + }) + .start()); + + assertTrue(finished.await(getTestConcurrentOpTimeout().toMillis(), TimeUnit.MILLISECONDS)); + throwables.forEach( + throwable -> + System.err.printf( + System.currentTimeMillis() + ": exception occurred: %s%n", throwable)); + assertEquals(throwables.size(), 0); } @Test public void testPopulatedGraphServiceGetLineageMultihop() throws Exception { - GraphService service = getLineagePopulatedGraphService(); - - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 2); - assertEquals(upstreamLineage.getTotal().intValue(), 0); - assertEquals(upstreamLineage.getRelationships().size(), 0); - - EntityLineageResult downstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); - - assertEquals(downstreamLineage.getTotal().intValue(), 5); - assertEquals(downstreamLineage.getRelationships().size(), 5); - Map<Urn, LineageRelationship> relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); - assertTrue(relationships.containsKey(datasetTwoUrn)); - assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(datasetThreeUrn)); - assertEquals(relationships.get(datasetThreeUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(datasetFourUrn)); - assertEquals(relationships.get(datasetFourUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(dataJobOneUrn)); - assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(dataJobTwoUrn)); - assertEquals(relationships.get(dataJobTwoUrn).getDegree().intValue(), 1); - - upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 2); - assertEquals(upstreamLineage.getTotal().intValue(), 3); - assertEquals(upstreamLineage.getRelationships().size(), 3); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); - assertTrue(relationships.containsKey(datasetOneUrn)); - assertEquals(relationships.get(datasetOneUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(datasetTwoUrn)); - assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(dataJobOneUrn)); - assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); - - downstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); - assertEquals(downstreamLineage.getTotal().intValue(), 0); - assertEquals(downstreamLineage.getRelationships().size(), 0); + GraphService service = getLineagePopulatedGraphService(); + + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + assertEquals(upstreamLineage.getTotal().intValue(), 0); + assertEquals(upstreamLineage.getRelationships().size(), 0); + + EntityLineageResult downstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + + assertEquals(downstreamLineage.getTotal().intValue(), 5); + assertEquals(downstreamLineage.getRelationships().size(), 5); + Map<Urn, LineageRelationship> relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); + assertTrue(relationships.containsKey(datasetTwoUrn)); + assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(datasetThreeUrn)); + assertEquals(relationships.get(datasetThreeUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(datasetFourUrn)); + assertEquals(relationships.get(datasetFourUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(dataJobOneUrn)); + assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(dataJobTwoUrn)); + assertEquals(relationships.get(dataJobTwoUrn).getDegree().intValue(), 1); + + upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + assertEquals(upstreamLineage.getTotal().intValue(), 3); + assertEquals(upstreamLineage.getRelationships().size(), 3); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); + assertTrue(relationships.containsKey(datasetOneUrn)); + assertEquals(relationships.get(datasetOneUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(datasetTwoUrn)); + assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(dataJobOneUrn)); + assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); + + downstreamLineage = + service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + assertEquals(downstreamLineage.getTotal().intValue(), 0); + assertEquals(downstreamLineage.getRelationships().size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java index d8cd6ed05b2ec..481db53eafbbe 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java @@ -1,14 +1,9 @@ package com.linkedin.metadata.graph.dgraph; -import com.github.dockerjava.api.command.InspectContainerResponse; -import lombok.NonNull; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; -import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; -import org.testcontainers.containers.wait.strategy.WaitAllStrategy; -import org.testcontainers.containers.wait.strategy.WaitStrategy; -import org.testcontainers.utility.DockerImageName; +import static java.net.HttpURLConnection.HTTP_OK; +import static java.util.stream.Collectors.toSet; +import com.github.dockerjava.api.command.InspectContainerResponse; import java.time.Duration; import java.util.Arrays; import java.util.HashMap; @@ -16,223 +11,235 @@ import java.util.Set; import java.util.StringJoiner; import java.util.stream.Stream; - -import static java.net.HttpURLConnection.HTTP_OK; -import static java.util.stream.Collectors.toSet; +import lombok.NonNull; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; +import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; +import org.testcontainers.containers.wait.strategy.WaitAllStrategy; +import org.testcontainers.containers.wait.strategy.WaitStrategy; +import org.testcontainers.utility.DockerImageName; public class DgraphContainer extends GenericContainer<DgraphContainer> { - /** - * The image defaults to the official Dgraph image: <a href="https://hub.docker.com/_/dgraph/dgraph">Dgraph</a>. - */ - public static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("dgraph/dgraph"); + /** + * The image defaults to the official Dgraph image: <a + * href="https://hub.docker.com/_/dgraph/dgraph">Dgraph</a>. + */ + public static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("dgraph/dgraph"); - private static final int HTTP_PORT = 8080; + private static final int HTTP_PORT = 8080; - private static final int GRPC_PORT = 9080; + private static final int GRPC_PORT = 9080; - private boolean started = false; + private boolean started = false; - @Override - protected void containerIsStarted(InspectContainerResponse containerInfo) { - super.containerIsStarted(containerInfo); - started = true; - } + @Override + protected void containerIsStarted(InspectContainerResponse containerInfo) { + super.containerIsStarted(containerInfo); + started = true; + } - @Override - protected void containerIsStopped(InspectContainerResponse containerInfo) { - super.containerIsStopped(containerInfo); - started = false; - } + @Override + protected void containerIsStopped(InspectContainerResponse containerInfo) { + super.containerIsStopped(containerInfo); + started = false; + } - private final Map<String, String> zeroArguments = new HashMap<>(); + private final Map<String, String> zeroArguments = new HashMap<>(); - private final Map<String, String> alphaArguments = new HashMap<>(); + private final Map<String, String> alphaArguments = new HashMap<>(); - /** - * Creates a DgraphContainer using a specific docker image. Connect the container - * to another DgraphContainer to form a cluster via `peerAlias`. - * - * @param dockerImageName The docker image to use. - */ - public DgraphContainer(@NonNull final DockerImageName dockerImageName) { - super(dockerImageName); + /** + * Creates a DgraphContainer using a specific docker image. Connect the container to another + * DgraphContainer to form a cluster via `peerAlias`. + * + * @param dockerImageName The docker image to use. + */ + public DgraphContainer(@NonNull final DockerImageName dockerImageName) { + super(dockerImageName); - dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME); + dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME); - WaitStrategy waitForLeader = new LogMessageWaitStrategy() - .withRegEx(".* Got Zero leader: .*\n"); - WaitStrategy waitForCluster = new LogMessageWaitStrategy() - .withRegEx(".* Server is ready\n"); - WaitStrategy waitForHttp = new HttpWaitStrategy() + WaitStrategy waitForLeader = new LogMessageWaitStrategy().withRegEx(".* Got Zero leader: .*\n"); + WaitStrategy waitForCluster = new LogMessageWaitStrategy().withRegEx(".* Server is ready\n"); + WaitStrategy waitForHttp = + new HttpWaitStrategy() .forPort(HTTP_PORT) .forStatusCodeMatching(response -> response == HTTP_OK); - this.waitStrategy = new WaitAllStrategy() + this.waitStrategy = + new WaitAllStrategy() .withStrategy(waitForLeader) .withStrategy(waitForCluster) .withStrategy(waitForHttp) .withStartupTimeout(Duration.ofMinutes(1)); - if (dockerImageName.getVersionPart().compareTo("v21.03.0") < 0) { - withAlphaArgument("whitelist", "0.0.0.0/0"); - } else { - withAlphaArgumentValues("security", "whitelist=0.0.0.0/0"); - } - - addExposedPorts(HTTP_PORT, GRPC_PORT); - } - - /** - * Adds an argument to the zero command. - * - * @param argument name of the argument - * @param value value, null if argument is a flag - * @return this - */ - public DgraphContainer withZeroArgument(@NonNull String argument, String value) { - addArgument(zeroArguments, argument, value); - return this; - } - - /** - * Adds a value to an argument list to the zero command. - * - * Some arguments of the zero command form a list of values, e.g. `audit` or `raft`. - * These values are separated by a ";". Setting multiple values for those arguments should - * be done via this method. - * - * @param argument name of the argument - * @param values values to add to the argument - * @return this - */ - public DgraphContainer withZeroArgumentValues(@NonNull String argument, @NonNull String... values) { - addArgumentValues(zeroArguments, argument, values); - return this; - } - - /** - * Adds an argument to the alpha command. - * - * @param argument name of the argument - * @param value value, null if argument is a flag - * @return this - */ - public DgraphContainer withAlphaArgument(@NonNull String argument, String value) { - addArgument(alphaArguments, argument, value); - return this; - } - - /** - * Adds a value to an argument list to the alpha command. - * - * Some arguments of the alpha command form a list of values, e.g. `audit` or `raft`. - * These values are separated by a ";". Setting multiple values for those arguments should - * be done via this method. - * - * @param argument name of the argument - * @param values values to add to the argument - * @return this - */ - public DgraphContainer withAlphaArgumentValues(@NonNull String argument, @NonNull String... values) { - addArgumentValues(alphaArguments, argument, values); - return this; - } - - private void addArgument(Map<String, String> arguments, @NonNull String argument, String value) { - if (started) { - throw new IllegalStateException("The container started already, cannot amend command arguments"); - } - - arguments.put(argument, value); - } - - private void addArgumentValues(Map<String, String> arguments, @NonNull String argument, @NonNull String... values) { - if (started) { - throw new IllegalStateException("The container started already, cannot amend command arguments"); - } - - StringJoiner joiner = new StringJoiner("; "); - Arrays.stream(values).forEach(joiner::add); - String value = joiner.toString(); - - if (arguments.containsKey(argument)) { - arguments.put(argument, arguments.get(argument) + "; " + value); - } else { - arguments.put(argument, value); - } - } - - /** - * Provides the command used to start the zero process. Command line arguments can be added - * by calling `withZeroArgument` and `withZeroArgumentValues` before calling this method. - * @return command string - */ - public @NonNull String getZeroCommand() { - return getCommand("dgraph zero", zeroArguments); - } - - /** - * Provides the command used to start the alpha process. Command line arguments can be added - * by calling `withAlphaArgument` and `withAlphaArgumentValues` before calling this method. - * @return command string - */ - public @NonNull String getAlphaCommand() { - return getCommand("dgraph alpha", alphaArguments); - } - - private @NonNull String getCommand(@NonNull String command, @NonNull Map<String, String> arguments) { - StringJoiner joiner = new StringJoiner(" --"); - - arguments.entrySet().stream() - .sorted(Map.Entry.comparingByKey()) - .map(argument -> { - if (argument.getValue() == null) { - return argument.getKey(); - } else { - return argument.getKey() + " \"" + argument.getValue() + "\""; - } - }).forEach(joiner::add); - - if (joiner.length() == 0) { - return command; - } else { - return command + " --" + joiner; - } - } - - @Override - public void start() { - String zeroCommand = this.getZeroCommand(); - String alhpaCommand = this.getAlphaCommand(); - this.setCommand("/bin/bash", "-c", zeroCommand + " & " + alhpaCommand); - super.start(); - } - - @Override - public Set<Integer> getLivenessCheckPortNumbers() { - return Stream.of(getHttpPort(), getGrpcPort()) - .map(this::getMappedPort) - .collect(toSet()); - } - - @Override - protected void configure() { } - - public int getHttpPort() { - return getMappedPort(HTTP_PORT); - } - - public int getGrpcPort() { - return getMappedPort(GRPC_PORT); - } - - public String getHttpUrl() { - return String.format("http://%s:%d", getHost(), getHttpPort()); - } - - public String getGrpcUrl() { - return String.format("%s:%d", getHost(), getGrpcPort()); - } - + if (dockerImageName.getVersionPart().compareTo("v21.03.0") < 0) { + withAlphaArgument("whitelist", "0.0.0.0/0"); + } else { + withAlphaArgumentValues("security", "whitelist=0.0.0.0/0"); + } + + addExposedPorts(HTTP_PORT, GRPC_PORT); + } + + /** + * Adds an argument to the zero command. + * + * @param argument name of the argument + * @param value value, null if argument is a flag + * @return this + */ + public DgraphContainer withZeroArgument(@NonNull String argument, String value) { + addArgument(zeroArguments, argument, value); + return this; + } + + /** + * Adds a value to an argument list to the zero command. + * + * <p>Some arguments of the zero command form a list of values, e.g. `audit` or `raft`. These + * values are separated by a ";". Setting multiple values for those arguments should be done via + * this method. + * + * @param argument name of the argument + * @param values values to add to the argument + * @return this + */ + public DgraphContainer withZeroArgumentValues( + @NonNull String argument, @NonNull String... values) { + addArgumentValues(zeroArguments, argument, values); + return this; + } + + /** + * Adds an argument to the alpha command. + * + * @param argument name of the argument + * @param value value, null if argument is a flag + * @return this + */ + public DgraphContainer withAlphaArgument(@NonNull String argument, String value) { + addArgument(alphaArguments, argument, value); + return this; + } + + /** + * Adds a value to an argument list to the alpha command. + * + * <p>Some arguments of the alpha command form a list of values, e.g. `audit` or `raft`. These + * values are separated by a ";". Setting multiple values for those arguments should be done via + * this method. + * + * @param argument name of the argument + * @param values values to add to the argument + * @return this + */ + public DgraphContainer withAlphaArgumentValues( + @NonNull String argument, @NonNull String... values) { + addArgumentValues(alphaArguments, argument, values); + return this; + } + + private void addArgument(Map<String, String> arguments, @NonNull String argument, String value) { + if (started) { + throw new IllegalStateException( + "The container started already, cannot amend command arguments"); + } + + arguments.put(argument, value); + } + + private void addArgumentValues( + Map<String, String> arguments, @NonNull String argument, @NonNull String... values) { + if (started) { + throw new IllegalStateException( + "The container started already, cannot amend command arguments"); + } + + StringJoiner joiner = new StringJoiner("; "); + Arrays.stream(values).forEach(joiner::add); + String value = joiner.toString(); + + if (arguments.containsKey(argument)) { + arguments.put(argument, arguments.get(argument) + "; " + value); + } else { + arguments.put(argument, value); + } + } + + /** + * Provides the command used to start the zero process. Command line arguments can be added by + * calling `withZeroArgument` and `withZeroArgumentValues` before calling this method. + * + * @return command string + */ + public @NonNull String getZeroCommand() { + return getCommand("dgraph zero", zeroArguments); + } + + /** + * Provides the command used to start the alpha process. Command line arguments can be added by + * calling `withAlphaArgument` and `withAlphaArgumentValues` before calling this method. + * + * @return command string + */ + public @NonNull String getAlphaCommand() { + return getCommand("dgraph alpha", alphaArguments); + } + + private @NonNull String getCommand( + @NonNull String command, @NonNull Map<String, String> arguments) { + StringJoiner joiner = new StringJoiner(" --"); + + arguments.entrySet().stream() + .sorted(Map.Entry.comparingByKey()) + .map( + argument -> { + if (argument.getValue() == null) { + return argument.getKey(); + } else { + return argument.getKey() + " \"" + argument.getValue() + "\""; + } + }) + .forEach(joiner::add); + + if (joiner.length() == 0) { + return command; + } else { + return command + " --" + joiner; + } + } + + @Override + public void start() { + String zeroCommand = this.getZeroCommand(); + String alhpaCommand = this.getAlphaCommand(); + this.setCommand("/bin/bash", "-c", zeroCommand + " & " + alhpaCommand); + super.start(); + } + + @Override + public Set<Integer> getLivenessCheckPortNumbers() { + return Stream.of(getHttpPort(), getGrpcPort()).map(this::getMappedPort).collect(toSet()); + } + + @Override + protected void configure() {} + + public int getHttpPort() { + return getMappedPort(HTTP_PORT); + } + + public int getGrpcPort() { + return getMappedPort(GRPC_PORT); + } + + public String getHttpUrl() { + return String.format("http://%s:%d", getHost(), getHttpPort()); + } + + public String getGrpcUrl() { + return String.format("%s:%d", getHost(), getGrpcPort()); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java index abf9bf532ddd8..40b8e83b56d03 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java @@ -1,5 +1,12 @@ package com.linkedin.metadata.graph.dgraph; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.GraphServiceTestBase; @@ -16,15 +23,6 @@ import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; import io.grpc.MethodDescriptor; -import lombok.extern.slf4j.Slf4j; -import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testng.annotations.AfterClass; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.time.Duration; import java.util.Arrays; import java.util.Collections; @@ -32,89 +30,94 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testng.annotations.AfterClass; +import org.testng.annotations.AfterMethod; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") @Slf4j public class DgraphGraphServiceTest extends GraphServiceTestBase { - private ManagedChannel _channel; - private DgraphGraphService _service; - private DgraphContainer _container; - - @Override - protected Duration getTestConcurrentOpTimeout() { - return Duration.ofMinutes(5); - } - - @BeforeClass - public void setup() { - _container = new DgraphContainer(DgraphContainer.DEFAULT_IMAGE_NAME.withTag("v21.03.0")) - .withTmpFs(Collections.singletonMap("/dgraph", "rw,noexec,nosuid,size=1g")) - .withStartupTimeout(Duration.ofMinutes(1)) - .withStartupAttempts(3); - checkContainerEngine(_container.getDockerClient()); - _container.start(); - Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(log); - _container.followOutput(logConsumer); - } - - @BeforeMethod - public void connect() { - LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); - _channel = ManagedChannelBuilder - .forAddress(_container.getHost(), _container.getGrpcPort()) - .usePlaintext() - .build(); - - // https://discuss.dgraph.io/t/dgraph-java-client-setting-deadlines-per-call/3056 - ClientInterceptor timeoutInterceptor = new ClientInterceptor() { - @Override - public <REQ, RESP> ClientCall<REQ, RESP> interceptCall( - MethodDescriptor<REQ, RESP> method, CallOptions callOptions, Channel next) { - return next.newCall(method, callOptions.withDeadlineAfter(30, TimeUnit.SECONDS)); - } + private ManagedChannel _channel; + private DgraphGraphService _service; + private DgraphContainer _container; + + @Override + protected Duration getTestConcurrentOpTimeout() { + return Duration.ofMinutes(5); + } + + @BeforeClass + public void setup() { + _container = + new DgraphContainer(DgraphContainer.DEFAULT_IMAGE_NAME.withTag("v21.03.0")) + .withTmpFs(Collections.singletonMap("/dgraph", "rw,noexec,nosuid,size=1g")) + .withStartupTimeout(Duration.ofMinutes(1)) + .withStartupAttempts(3); + checkContainerEngine(_container.getDockerClient()); + _container.start(); + Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(log); + _container.followOutput(logConsumer); + } + + @BeforeMethod + public void connect() { + LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); + _channel = + ManagedChannelBuilder.forAddress(_container.getHost(), _container.getGrpcPort()) + .usePlaintext() + .build(); + + // https://discuss.dgraph.io/t/dgraph-java-client-setting-deadlines-per-call/3056 + ClientInterceptor timeoutInterceptor = + new ClientInterceptor() { + @Override + public <REQ, RESP> ClientCall<REQ, RESP> interceptCall( + MethodDescriptor<REQ, RESP> method, CallOptions callOptions, Channel next) { + return next.newCall(method, callOptions.withDeadlineAfter(30, TimeUnit.SECONDS)); + } }; - DgraphGrpc.DgraphStub stub = DgraphGrpc.newStub(_channel).withInterceptors(timeoutInterceptor); - _service = new DgraphGraphService(lineageRegistry, new DgraphClient(stub)); - } - - @AfterMethod - public void disconnect() throws InterruptedException { - try { - _channel.shutdownNow(); - _channel.awaitTermination(10, TimeUnit.SECONDS); - } finally { - _channel = null; - _service = null; - } - } - - @AfterClass - public void tearDown() { - _container.stop(); - } - - @Nonnull - @Override - protected GraphService getGraphService() { - _service.clear(); - return _service; + DgraphGrpc.DgraphStub stub = DgraphGrpc.newStub(_channel).withInterceptors(timeoutInterceptor); + _service = new DgraphGraphService(lineageRegistry, new DgraphClient(stub)); + } + + @AfterMethod + public void disconnect() throws InterruptedException { + try { + _channel.shutdownNow(); + _channel.awaitTermination(10, TimeUnit.SECONDS); + } finally { + _channel = null; + _service = null; } - - @Override - protected void syncAfterWrite() { } - - @Test - public void testGetSchema() { - DgraphSchema schema = DgraphGraphService.getSchema("{\n" + } + + @AfterClass + public void tearDown() { + _container.stop(); + } + + @Nonnull + @Override + protected GraphService getGraphService() { + _service.clear(); + return _service; + } + + @Override + protected void syncAfterWrite() {} + + @Test + public void testGetSchema() { + DgraphSchema schema = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -156,45 +159,69 @@ public void testGetSchema() { + " }\n" + " ]\n" + " }"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); + assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); - assertEquals(schema.getTypes(), new HashMap<String, Set<String>>() {{ + assertEquals( + schema.getTypes(), + new HashMap<String, Set<String>>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); - }}); - - assertEquals(schema.getFields("ns:typeOne"), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); - assertEquals(schema.getFields("ns:typeTwo"), new HashSet<>(Arrays.asList("PredTwo"))); - assertEquals(schema.getFields("ns:unknown"), Collections.emptySet()); - - schema.ensureField("newType", "newField"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField"))); - assertEquals(schema.getTypes(), new HashMap<String, Set<String>>() {{ + } + }); + + assertEquals( + schema.getFields("ns:typeOne"), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); + assertEquals(schema.getFields("ns:typeTwo"), new HashSet<>(Arrays.asList("PredTwo"))); + assertEquals(schema.getFields("ns:unknown"), Collections.emptySet()); + + schema.ensureField("newType", "newField"); + assertEquals( + schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField"))); + assertEquals( + schema.getTypes(), + new HashMap<String, Set<String>>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - - schema.ensureField("ns:typeOne", "otherField"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); - assertEquals(schema.getTypes(), new HashMap<String, Set<String>>() {{ + } + }); + + schema.ensureField("ns:typeOne", "otherField"); + assertEquals( + schema.getFields(), + new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); + assertEquals( + schema.getTypes(), + new HashMap<String, Set<String>>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo", "otherField"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - - schema.ensureField("ns:typeTwo", "PredTwo"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); - assertEquals(schema.getTypes(), new HashMap<String, Set<String>>() {{ + } + }); + + schema.ensureField("ns:typeTwo", "PredTwo"); + assertEquals( + schema.getFields(), + new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); + assertEquals( + schema.getTypes(), + new HashMap<String, Set<String>>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo", "otherField"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - } - - @Test - public void testGetSchemaIncomplete() { - DgraphSchema schemaWithNonListTypes = DgraphGraphService.getSchema("{\n" + } + }); + } + + @Test + public void testGetSchemaIncomplete() { + DgraphSchema schemaWithNonListTypes = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -208,9 +235,11 @@ public void testGetSchemaIncomplete() { + " ],\n" + " \"types\": \"not a list\"\n" + " }"); - assertTrue(schemaWithNonListTypes.isEmpty(), "Should be empty if type field is not a list"); + assertTrue(schemaWithNonListTypes.isEmpty(), "Should be empty if type field is not a list"); - DgraphSchema schemaWithoutTypes = DgraphGraphService.getSchema("{\n" + DgraphSchema schemaWithoutTypes = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -223,570 +252,575 @@ public void testGetSchemaIncomplete() { + " }\n" + " ]" + " }"); - assertTrue(schemaWithoutTypes.isEmpty(), "Should be empty if no type field exists"); - - DgraphSchema schemaWithNonListSchema = DgraphGraphService.getSchema("{\n" - + " \"schema\": \"not a list\"" - + " }"); - assertTrue(schemaWithNonListSchema.isEmpty(), "Should be empty if schema field is not a list"); - - DgraphSchema schemaWithoutSchema = DgraphGraphService.getSchema("{ }"); - assertTrue(schemaWithoutSchema.isEmpty(), "Should be empty if no schema field exists"); - } - - @Test - public void testGetSchemaDgraph() { - // TODO: test that dgraph schema gets altered - } - - @Test - public void testGetFilterConditions() { - // no filters - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList()), - "" - ); - - // source type not supported without restricting relationship types - // there must be as many relation type filter names as there are relationships - assertEquals( - DgraphGraphService.getFilterConditions( - "sourceTypeFilter", - null, - Collections.emptyList(), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceTypeFilter))\n" - + " )\n" - + " )" - ); - - // destination type - assertEquals( - DgraphGraphService.getFilterConditions( - null, - "destinationTypeFilter", - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationTypeFilter)\n" - + " )" - ); - - // source filter not supported without restricting relationship types - // there must be as many relation type filter names as there are relationships - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceFilter))\n" - + " )\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceFilter1)) AND " - + "uid_in(<relationship>, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter1", "RelationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter1) AND uid_in(<relationship1>, uid(sourceFilter1)) AND " - + "uid_in(<relationship1>, uid(sourceFilter2)) OR\n" - + " uid(RelationshipTypeFilter2) AND uid_in(<relationship2>, uid(sourceFilter1)) AND " - + "uid_in(<relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - - // destination filters - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Arrays.asList("destinationFilter"), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationFilter)\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Arrays.asList("destinationFilter1", "destinationFilter2"), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2)\n" - + " )" - ); - - // relationship type filters require relationship types - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Collections.emptyList(), - Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " (\n" - + " uid(relationshipTypeFilter1) OR\n" - + " uid(relationshipTypeFilter2)\n" - + " )\n" - + " )" - ); - - // all filters at once - assertEquals( - DgraphGraphService.getFilterConditions( - "sourceTypeFilter", - "destinationTypeFilter", - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("destinationFilter1", "destinationFilter2"), - Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " uid(destinationTypeFilter) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipTypeFilter1) AND uid_in(<relationship1>, uid(sourceTypeFilter)) AND " - + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipTypeFilter2) AND uid_in(<relationship2>, uid(sourceTypeFilter)) AND " - + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - - // TODO: check getFilterConditions throws an exception when relationshipTypes and - // relationshipTypeFilterNames do not have the same size - } - - @Test - public void testGetRelationships() { - // no relationships - assertEquals( - DgraphGraphService.getRelationships( - null, - Collections.emptyList(), - Collections.emptyList()), - Collections.emptyList() - ); - - // one relationship but no filters - assertEquals( - DgraphGraphService.getRelationships( - null, - Collections.emptyList(), - Arrays.asList("relationship") - ), - Arrays.asList("<relationship> { <uid> }") - ); - - // more relationship and source type filter - assertEquals( - DgraphGraphService.getRelationships( - "sourceTypeFilter", - Collections.emptyList(), - Arrays.asList("relationship1", "~relationship2") - ), - Arrays.asList( - "<relationship1> @filter( uid(sourceTypeFilter) ) { <uid> }", - "<~relationship2> @filter( uid(sourceTypeFilter) ) { <uid> }" - ) - ); - - // more relationship, source type and source filters - assertEquals( - DgraphGraphService.getRelationships( - "sourceTypeFilter", - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("relationship1", "~relationship2") - ), - Arrays.asList( - "<relationship1> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", - "<~relationship2> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }" - ) - ); - - // more relationship and only source filters - assertEquals( - DgraphGraphService.getRelationships( - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("relationship1", "~relationship2", "relationship3") - ), - Arrays.asList( - "<relationship1> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", - "<~relationship2> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", - "<relationship3> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }" - ) - ); - - // two relationship and only one source filter - assertEquals( - DgraphGraphService.getRelationships( - null, - Arrays.asList("sourceFilter"), - Arrays.asList("~relationship1", "~relationship2") - ), - Arrays.asList( - "<~relationship1> @filter( uid(sourceFilter) ) { <uid> }", - "<~relationship2> @filter( uid(sourceFilter) ) { <uid> }" - ) - ); - } - - @Test - public void testGetRelationshipCondition() { - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - null, - Collections.emptyList()), - "uid(relationshipFilter)" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Collections.emptyList()), - "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Arrays.asList("destinationFilter")), - "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter)) AND " - + "uid_in(<relationship>, uid(destinationFilter))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Arrays.asList("destinationFilter1", "destinationFilter2")), - "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter)) AND " - + "uid_in(<relationship>, uid(destinationFilter1)) AND uid_in(<relationship>, uid(destinationFilter2))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - null, - Arrays.asList("destinationFilter1", "destinationFilter2")), - "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationFilter1)) AND " - + "uid_in(<relationship>, uid(destinationFilter2))" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesOutgoing() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.OUTGOING, - "query {\n" - + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" - + " relationshipType1 as var(func: has(<~relationship1>))\n" - + " relationshipType2 as var(func: has(<~relationship2>))\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " - + "first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(<~relationship1>, uid(sourceType)) AND " - + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(<~relationship2>, uid(sourceType)) AND " - + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " <urn>\n" - + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " }\n" - + "}" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesIncoming() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.INCOMING, - "query {\n" - + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" - + " relationshipType1 as var(func: has(<relationship1>))\n" - + " relationshipType2 as var(func: has(<relationship2>))\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " - + "first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(<relationship1>, uid(sourceType)) AND " - + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(<relationship2>, uid(sourceType)) AND " - + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " <urn>\n" - + " <relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " }\n" - + "}" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesUndirected() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.UNDIRECTED, - "query {\n" - + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" - + " relationshipType1 as var(func: has(<relationship1>))\n" - + " relationshipType2 as var(func: has(<relationship2>))\n" - + " relationshipType3 as var(func: has(<~relationship1>))\n" - + " relationshipType4 as var(func: has(<~relationship2>))\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, " - + "relationshipType1, relationshipType2, relationshipType3, relationshipType4), first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(<relationship1>, uid(sourceType)) AND " - + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(<relationship2>, uid(sourceType)) AND " - + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType3) AND uid_in(<~relationship1>, uid(sourceType)) AND " - + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType4) AND uid_in(<~relationship2>, uid(sourceType)) AND " - + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " <urn>\n" - + " <relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " }\n" - + "}" - ); - } - - private void doTestGetQueryForRelatedEntitiesDirection(@Nonnull RelationshipDirection direction, @Nonnull String expectedQuery) { - assertEquals( - DgraphGraphService.getQueryForRelatedEntities( - ImmutableList.of("sourceType"), - newFilter(new HashMap<String, String>() {{ - put("urn", "urn:ns:type:source-key"); - put("key", "source-key"); - }}), - ImmutableList.of("destinationType"), - newFilter(new HashMap<String, String>() {{ + assertTrue(schemaWithoutTypes.isEmpty(), "Should be empty if no type field exists"); + + DgraphSchema schemaWithNonListSchema = + DgraphGraphService.getSchema("{\n" + " \"schema\": \"not a list\"" + " }"); + assertTrue(schemaWithNonListSchema.isEmpty(), "Should be empty if schema field is not a list"); + + DgraphSchema schemaWithoutSchema = DgraphGraphService.getSchema("{ }"); + assertTrue(schemaWithoutSchema.isEmpty(), "Should be empty if no schema field exists"); + } + + @Test + public void testGetSchemaDgraph() { + // TODO: test that dgraph schema gets altered + } + + @Test + public void testGetFilterConditions() { + // no filters + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList()), + ""); + + // source type not supported without restricting relationship types + // there must be as many relation type filter names as there are relationships + assertEquals( + DgraphGraphService.getFilterConditions( + "sourceTypeFilter", + null, + Collections.emptyList(), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceTypeFilter))\n" + + " )\n" + + " )"); + + // destination type + assertEquals( + DgraphGraphService.getFilterConditions( + null, + "destinationTypeFilter", + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + " uid(destinationTypeFilter)\n" + " )"); + + // source filter not supported without restricting relationship types + // there must be as many relation type filter names as there are relationships + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceFilter))\n" + + " )\n" + + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceFilter1)) AND " + + "uid_in(<relationship>, uid(sourceFilter2))\n" + + " )\n" + + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter1", "RelationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter1) AND uid_in(<relationship1>, uid(sourceFilter1)) AND " + + "uid_in(<relationship1>, uid(sourceFilter2)) OR\n" + + " uid(RelationshipTypeFilter2) AND uid_in(<relationship2>, uid(sourceFilter1)) AND " + + "uid_in(<relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " )"); + + // destination filters + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Arrays.asList("destinationFilter"), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + " uid(destinationFilter)\n" + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Arrays.asList("destinationFilter1", "destinationFilter2"), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2)\n" + + " )"); + + // relationship type filters require relationship types + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Collections.emptyList(), + Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " (\n" + + " uid(relationshipTypeFilter1) OR\n" + + " uid(relationshipTypeFilter2)\n" + + " )\n" + + " )"); + + // all filters at once + assertEquals( + DgraphGraphService.getFilterConditions( + "sourceTypeFilter", + "destinationTypeFilter", + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("destinationFilter1", "destinationFilter2"), + Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " uid(destinationTypeFilter) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipTypeFilter1) AND uid_in(<relationship1>, uid(sourceTypeFilter)) AND " + + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipTypeFilter2) AND uid_in(<relationship2>, uid(sourceTypeFilter)) AND " + + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " )"); + + // TODO: check getFilterConditions throws an exception when relationshipTypes and + // relationshipTypeFilterNames do not have the same size + } + + @Test + public void testGetRelationships() { + // no relationships + assertEquals( + DgraphGraphService.getRelationships(null, Collections.emptyList(), Collections.emptyList()), + Collections.emptyList()); + + // one relationship but no filters + assertEquals( + DgraphGraphService.getRelationships( + null, Collections.emptyList(), Arrays.asList("relationship")), + Arrays.asList("<relationship> { <uid> }")); + + // more relationship and source type filter + assertEquals( + DgraphGraphService.getRelationships( + "sourceTypeFilter", + Collections.emptyList(), + Arrays.asList("relationship1", "~relationship2")), + Arrays.asList( + "<relationship1> @filter( uid(sourceTypeFilter) ) { <uid> }", + "<~relationship2> @filter( uid(sourceTypeFilter) ) { <uid> }")); + + // more relationship, source type and source filters + assertEquals( + DgraphGraphService.getRelationships( + "sourceTypeFilter", + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("relationship1", "~relationship2")), + Arrays.asList( + "<relationship1> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", + "<~relationship2> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }")); + + // more relationship and only source filters + assertEquals( + DgraphGraphService.getRelationships( + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("relationship1", "~relationship2", "relationship3")), + Arrays.asList( + "<relationship1> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", + "<~relationship2> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", + "<relationship3> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }")); + + // two relationship and only one source filter + assertEquals( + DgraphGraphService.getRelationships( + null, Arrays.asList("sourceFilter"), Arrays.asList("~relationship1", "~relationship2")), + Arrays.asList( + "<~relationship1> @filter( uid(sourceFilter) ) { <uid> }", + "<~relationship2> @filter( uid(sourceFilter) ) { <uid> }")); + } + + @Test + public void testGetRelationshipCondition() { + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", "relationshipFilter", null, Collections.emptyList()), + "uid(relationshipFilter)"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", "relationshipFilter", "destinationTypeFilter", Collections.emptyList()), + "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + "destinationTypeFilter", + Arrays.asList("destinationFilter")), + "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter)) AND " + + "uid_in(<relationship>, uid(destinationFilter))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + "destinationTypeFilter", + Arrays.asList("destinationFilter1", "destinationFilter2")), + "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter)) AND " + + "uid_in(<relationship>, uid(destinationFilter1)) AND uid_in(<relationship>, uid(destinationFilter2))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + null, + Arrays.asList("destinationFilter1", "destinationFilter2")), + "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationFilter1)) AND " + + "uid_in(<relationship>, uid(destinationFilter2))"); + } + + @Test + public void testGetQueryForRelatedEntitiesOutgoing() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.OUTGOING, + "query {\n" + + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" + + " relationshipType1 as var(func: has(<~relationship1>))\n" + + " relationshipType2 as var(func: has(<~relationship2>))\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " + + "first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(<~relationship1>, uid(sourceType)) AND " + + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(<~relationship2>, uid(sourceType)) AND " + + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " <urn>\n" + + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " }\n" + + "}"); + } + + @Test + public void testGetQueryForRelatedEntitiesIncoming() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.INCOMING, + "query {\n" + + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" + + " relationshipType1 as var(func: has(<relationship1>))\n" + + " relationshipType2 as var(func: has(<relationship2>))\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " + + "first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(<relationship1>, uid(sourceType)) AND " + + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(<relationship2>, uid(sourceType)) AND " + + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " <urn>\n" + + " <relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " }\n" + + "}"); + } + + @Test + public void testGetQueryForRelatedEntitiesUndirected() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.UNDIRECTED, + "query {\n" + + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" + + " relationshipType1 as var(func: has(<relationship1>))\n" + + " relationshipType2 as var(func: has(<relationship2>))\n" + + " relationshipType3 as var(func: has(<~relationship1>))\n" + + " relationshipType4 as var(func: has(<~relationship2>))\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, " + + "relationshipType1, relationshipType2, relationshipType3, relationshipType4), first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(<relationship1>, uid(sourceType)) AND " + + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(<relationship2>, uid(sourceType)) AND " + + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType3) AND uid_in(<~relationship1>, uid(sourceType)) AND " + + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType4) AND uid_in(<~relationship2>, uid(sourceType)) AND " + + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " <urn>\n" + + " <relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " }\n" + + "}"); + } + + private void doTestGetQueryForRelatedEntitiesDirection( + @Nonnull RelationshipDirection direction, @Nonnull String expectedQuery) { + assertEquals( + DgraphGraphService.getQueryForRelatedEntities( + ImmutableList.of("sourceType"), + newFilter( + new HashMap<String, String>() { + { + put("urn", "urn:ns:type:source-key"); + put("key", "source-key"); + } + }), + ImmutableList.of("destinationType"), + newFilter( + new HashMap<String, String>() { + { + put("urn", "urn:ns:type:dest-key"); + put("key", "dest-key"); + } + }), + Arrays.asList("relationship1", "relationship2"), + newRelationshipFilter(EMPTY_FILTER, direction), + 0, + 100), + expectedQuery); + } + + @Test + public void testGetDestinationUrnsFromResponseData() { + // no results + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap<String, Object>() { + { + put("result", Collections.emptyList()); + } + }), + Collections.emptyList()); + + // one result and one relationship with two sources + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap<String, Object>() { + { + put( + "result", + Arrays.asList( + new HashMap<String, Object>() { + { put("urn", "urn:ns:type:dest-key"); - put("key", "dest-key"); - }}), - Arrays.asList("relationship1", "relationship2"), - newRelationshipFilter(EMPTY_FILTER, direction), - 0, 100 - ), - expectedQuery - ); - } - - @Test - public void testGetDestinationUrnsFromResponseData() { - // no results - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap<String, Object>() {{ - put("result", Collections.emptyList()); - }} - ), - Collections.emptyList() - ); - - // one result and one relationship with two sources - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap<String, Object>() {{ - put("result", Arrays.asList( - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key"); - put("~pred", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x1"); - }}, - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }} - )); - }} - ), - Arrays.asList(new RelatedEntity("pred", "urn:ns:type:dest-key")) - ); - - // multiple results and one relationship - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap<String, Object>() {{ - put("result", Arrays.asList( - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-1"); - put("~pred", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x1"); - }}, - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-2"); - put("~pred", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }} - )); - }} - ), - Arrays.asList( - new RelatedEntity("pred", "urn:ns:type:dest-key-1"), - new RelatedEntity("pred", "urn:ns:type:dest-key-2") - ) - ); - - // multiple results and relationships - assertEqualsAnyOrder( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap<String, Object>() {{ - put("result", Arrays.asList( - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-1"); - put("~pred1", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x1"); - }}, - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-2"); - put("~pred1", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-3"); - put("pred1", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x3"); - }} - )); - put("~pred1", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x1"); - }}, - new HashMap<String, Object>() {{ - put("uid", "0x4"); - }} - )); - }}, - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-4"); - put("pred2", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x5"); - }} - )); - }} - )); - }} - ), - Arrays.asList( - new RelatedEntity("pred1", "urn:ns:type:dest-key-1"), - new RelatedEntity("pred1", "urn:ns:type:dest-key-2"), - new RelatedEntity("pred1", "urn:ns:type:dest-key-3"), - new RelatedEntity("pred2", "urn:ns:type:dest-key-4") - ), - RELATED_ENTITY_COMPARATOR - ); - } - - @Override - public void testPopulatedGraphServiceGetLineageMultihop() { - // TODO: Remove this overridden method once the multihop for dGraph is implemented! - } + put( + "~pred", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x1"); + } + }, + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + })); + } + }), + Arrays.asList(new RelatedEntity("pred", "urn:ns:type:dest-key"))); + + // multiple results and one relationship + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap<String, Object>() { + { + put( + "result", + Arrays.asList( + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-1"); + put( + "~pred", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x1"); + } + }, + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-2"); + put( + "~pred", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + })); + } + }), + Arrays.asList( + new RelatedEntity("pred", "urn:ns:type:dest-key-1"), + new RelatedEntity("pred", "urn:ns:type:dest-key-2"))); + + // multiple results and relationships + assertEqualsAnyOrder( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap<String, Object>() { + { + put( + "result", + Arrays.asList( + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-1"); + put( + "~pred1", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x1"); + } + }, + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-2"); + put( + "~pred1", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-3"); + put( + "pred1", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x3"); + } + })); + put( + "~pred1", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x1"); + } + }, + new HashMap<String, Object>() { + { + put("uid", "0x4"); + } + })); + } + }, + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-4"); + put( + "pred2", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x5"); + } + })); + } + })); + } + }), + Arrays.asList( + new RelatedEntity("pred1", "urn:ns:type:dest-key-1"), + new RelatedEntity("pred1", "urn:ns:type:dest-key-2"), + new RelatedEntity("pred1", "urn:ns:type:dest-key-3"), + new RelatedEntity("pred2", "urn:ns:type:dest-key-4")), + RELATED_ENTITY_COMPARATOR); + } + + @Override + public void testPopulatedGraphServiceGetLineageMultihop() { + // TODO: Remove this overridden method once the multihop for dGraph is implemented! + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java index 6f63209f9c380..f1113368601c6 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.graph.neo4j; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.FabricType; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.DataPlatformUrn; @@ -18,7 +21,12 @@ import com.linkedin.metadata.query.filter.RelationshipFilter; import java.util.Arrays; import java.util.Collections; - +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.neo4j.driver.Driver; import org.neo4j.driver.GraphDatabase; import org.testng.SkipException; @@ -27,17 +35,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.testng.Assert.assertEquals; - - public class Neo4jGraphServiceTest extends GraphServiceTestBase { private Neo4jTestServerBuilder _serverBuilder; @@ -51,7 +48,8 @@ public void init() { _serverBuilder = new Neo4jTestServerBuilder(); _serverBuilder.newServer(); _driver = GraphDatabase.driver(_serverBuilder.boltURI()); - _client = new Neo4jGraphService(new LineageRegistry(SnapshotEntityRegistry.getInstance()), _driver); + _client = + new Neo4jGraphService(new LineageRegistry(SnapshotEntityRegistry.getInstance()), _driver); _client.clear(); } @@ -66,17 +64,16 @@ public void tearDown() { } @Override - protected @Nonnull - GraphService getGraphService() { + protected @Nonnull GraphService getGraphService() { return _client; } @Override - protected void syncAfterWrite() { - } + protected void syncAfterWrite() {} @Override - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { // https://github.com/datahub-project/datahub/issues/3118 // Neo4jGraphService produces duplicates, which is here ignored until fixed // actual.count and actual.total not tested due to duplicates @@ -85,20 +82,20 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie } @Override - protected <T> void assertEqualsAnyOrder(List<T> actual, List<T> expected, Comparator<T> comparator) { + protected <T> void assertEqualsAnyOrder( + List<T> actual, List<T> expected, Comparator<T> comparator) { // https://github.com/datahub-project/datahub/issues/3118 // Neo4jGraphService produces duplicates, which is here ignored until fixed - assertEquals( - new HashSet<>(actual), - new HashSet<>(expected) - ); + assertEquals(new HashSet<>(actual), new HashSet<>(expected)); } @Override - public void testFindRelatedEntitiesSourceType(String datasetType, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String datasetType, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3119 throw new SkipException("Neo4jGraphService does not support empty source type"); @@ -108,14 +105,17 @@ public void testFindRelatedEntitiesSourceType(String datasetType, // only test cases with "user" type fail due to this bug throw new SkipException("Neo4jGraphService does not apply source / destination types"); } - super.testFindRelatedEntitiesSourceType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesSourceType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationType(String datasetType, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String datasetType, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3119 throw new SkipException("Neo4jGraphService does not support empty destination type"); @@ -125,7 +125,8 @@ public void testFindRelatedEntitiesDestinationType(String datasetType, // only test cases with "HasOwner" relatioship fail due to this bug throw new SkipException("Neo4jGraphService does not apply source / destination types"); } - super.testFindRelatedEntitiesDestinationType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Test @@ -160,7 +161,8 @@ public void testRemoveEdgesFromNodeNoRelationshipTypes() { @Override public void testConcurrentAddEdge() { // https://github.com/datahub-project/datahub/issues/3141 - throw new SkipException("Neo4jGraphService does not manage to add all edges added concurrently"); + throw new SkipException( + "Neo4jGraphService does not manage to add all edges added concurrently"); } @Test @@ -179,28 +181,42 @@ public void testConcurrentRemoveNodes() { @Test public void testRemoveEdge() throws Exception { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); TagUrn tagUrn = new TagUrn("newTag"); Edge edge = new Edge(datasetUrn, tagUrn, TAG_RELATIONSHIP, null, null, null, null, null); getGraphService().addEdge(edge); - RelatedEntitiesResult result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + RelatedEntitiesResult result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 1); getGraphService().removeEdge(edge); - result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 0); } private Set<UrnArray> getPathUrnArraysFromLineageResult(EntityLineageResult result) { - return result.getRelationships() - .stream() + return result.getRelationships().stream() .map(x -> x.getPaths().get(0)) .collect(Collectors.toSet()); } @@ -209,22 +225,23 @@ private Set<UrnArray> getPathUrnArraysFromLineageResult(EntityLineageResult resu public void testGetLineage() { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d5 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - new Edge(datasetFiveUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null), - - // another path between d2 and d5 which is shorter - // d1 <-DownstreamOf- d4 <-DownstreamOf- d5 - new Edge(datasetFourUrn, datasetOneUrn, downstreamOf, 13L, null, 13L, null, null), - new Edge(datasetFiveUrn, datasetFourUrn, downstreamOf, 13L, null, 13L, null, null) - ); + List<Edge> edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d5 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + new Edge(datasetFiveUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null), + + // another path between d2 and d5 which is shorter + // d1 <-DownstreamOf- d4 <-DownstreamOf- d5 + new Edge(datasetFourUrn, datasetOneUrn, downstreamOf, 13L, null, 13L, null, null), + new Edge(datasetFiveUrn, datasetFourUrn, downstreamOf, 13L, null, 13L, null, null)); edges.forEach(service::addEdge); // simple path finding - final var upstreamLineageDataset3Hop3 = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); + final var upstreamLineageDataset3Hop3 = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); assertEquals(upstreamLineageDataset3Hop3.getTotal().intValue(), 3); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDataset3Hop3), @@ -234,7 +251,8 @@ public void testGetLineage() { new UrnArray(datasetThreeUrn, datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); // simple path finding - final var upstreamLineageDatasetFiveHop2 = service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + final var upstreamLineageDatasetFiveHop2 = + service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 2); assertEquals(upstreamLineageDatasetFiveHop2.getTotal().intValue(), 4); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDatasetFiveHop2), @@ -244,8 +262,10 @@ public void testGetLineage() { new UrnArray(datasetFiveUrn, datasetFourUrn), new UrnArray(datasetFiveUrn, datasetFourUrn, datasetOneUrn))); - // there are two paths from p5 to p1, one longer and one shorter, and the longer one is discarded from result - final var upstreamLineageDataset5Hop5 = service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 5); + // there are two paths from p5 to p1, one longer and one shorter, and the longer one is + // discarded from result + final var upstreamLineageDataset5Hop5 = + service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 5); assertEquals(upstreamLineageDataset5Hop5.getTotal().intValue(), 5); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDataset5Hop5), @@ -257,7 +277,8 @@ public void testGetLineage() { new UrnArray(datasetFiveUrn, datasetFourUrn, datasetOneUrn))); // downstream lookup - final var downstreamLineageDataset1Hop2 = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + final var downstreamLineageDataset1Hop2 = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); assertEquals(downstreamLineageDataset1Hop2.getTotal().intValue(), 4); assertEquals( getPathUrnArraysFromLineageResult(downstreamLineageDataset1Hop2), @@ -272,17 +293,18 @@ public void testGetLineage() { public void testGetLineageTimeFilterQuery() throws Exception { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d4 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - new Edge(datasetFourUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null) - ); + List<Edge> edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d4 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + new Edge(datasetFourUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null)); edges.forEach(service::addEdge); // no time filtering - EntityLineageResult upstreamLineageTwoHops = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + EntityLineageResult upstreamLineageTwoHops = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2); assertEquals(upstreamLineageTwoHops.getTotal().intValue(), 2); assertEquals(upstreamLineageTwoHops.getRelationships().size(), 2); assertEquals( @@ -292,16 +314,17 @@ public void testGetLineageTimeFilterQuery() throws Exception { new UrnArray(datasetFourUrn, datasetThreeUrn, datasetTwoUrn))); // with time filtering - EntityLineageResult upstreamLineageTwoHopsWithTimeFilter = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2, 10L, 12L); + EntityLineageResult upstreamLineageTwoHopsWithTimeFilter = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2, 10L, 12L); assertEquals(upstreamLineageTwoHopsWithTimeFilter.getTotal().intValue(), 1); assertEquals(upstreamLineageTwoHopsWithTimeFilter.getRelationships().size(), 1); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageTwoHopsWithTimeFilter), - Set.of( - new UrnArray(datasetFourUrn, datasetThreeUrn))); + Set.of(new UrnArray(datasetFourUrn, datasetThreeUrn))); // with time filtering - EntityLineageResult upstreamLineageTimeFilter = service.getLineage(datasetTwoUrn, LineageDirection.UPSTREAM, 0, 1000, 4, 2L, 6L); + EntityLineageResult upstreamLineageTimeFilter = + service.getLineage(datasetTwoUrn, LineageDirection.UPSTREAM, 0, 1000, 4, 2L, 6L); assertEquals(upstreamLineageTimeFilter.getTotal().intValue(), 2); assertEquals(upstreamLineageTimeFilter.getRelationships().size(), 2); assertEquals( @@ -311,32 +334,33 @@ public void testGetLineageTimeFilterQuery() throws Exception { new UrnArray(datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); // with time filtering - EntityLineageResult downstreamLineageTimeFilter = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 4, 0L, 4L); + EntityLineageResult downstreamLineageTimeFilter = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 4, 0L, 4L); assertEquals(downstreamLineageTimeFilter.getTotal().intValue(), 1); assertEquals(downstreamLineageTimeFilter.getRelationships().size(), 1); assertEquals( getPathUrnArraysFromLineageResult(downstreamLineageTimeFilter), - Set.of( - new UrnArray(datasetOneUrn, dataJobOneUrn))); + Set.of(new UrnArray(datasetOneUrn, dataJobOneUrn))); } @Test public void testGetLineageTimeFilteringSkipsShorterButNonMatchingPaths() { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 5L, null, 5L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 7L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + List<Edge> edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 5L, null, 5L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 7L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - // d1 <-DownstreamOf- d3 (shorter path from d3 to d1, but with very old time) - new Edge(datasetThreeUrn, datasetOneUrn, downstreamOf, 1L, null, 2L, null, null) - ); + // d1 <-DownstreamOf- d3 (shorter path from d3 to d1, but with very old time) + new Edge(datasetThreeUrn, datasetOneUrn, downstreamOf, 1L, null, 2L, null, null)); edges.forEach(service::addEdge); // no time filtering, shorter path from d3 to d1 is returned - EntityLineageResult upstreamLineageNoTimeFiltering = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); + EntityLineageResult upstreamLineageNoTimeFiltering = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageNoTimeFiltering), Set.of( @@ -345,7 +369,8 @@ public void testGetLineageTimeFilteringSkipsShorterButNonMatchingPaths() { new UrnArray(datasetThreeUrn, datasetOneUrn))); // with time filtering, shorter path from d3 to d1 is excluded so longer path is returned - EntityLineageResult upstreamLineageTimeFiltering = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3, 3L, 17L); + EntityLineageResult upstreamLineageTimeFiltering = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3, 3L, 17L); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageTimeFiltering), Set.of( diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java index ba4e4cec37914..fa04de340e12f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java @@ -1,9 +1,8 @@ package com.linkedin.metadata.graph.neo4j; +import apoc.path.PathExplorer; import java.io.File; import java.net.URI; - -import apoc.path.PathExplorer; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.harness.Neo4j; import org.neo4j.harness.Neo4jBuilder; @@ -19,9 +18,7 @@ private Neo4jTestServerBuilder(Neo4jBuilder builder) { } public Neo4jTestServerBuilder() { - this(new InProcessNeo4jBuilder() - .withProcedure(PathExplorer.class) - ); + this(new InProcessNeo4jBuilder().withProcedure(PathExplorer.class)); } public Neo4jTestServerBuilder(File workingDirectory) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java index baed3ade0d207..9fc9490bfd7ef 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java @@ -23,7 +23,8 @@ public class ESGraphQueryDAOTest { - private static final String TEST_QUERY_FILE = "elasticsearch/sample_filters/lineage_query_filters_1.json"; + private static final String TEST_QUERY_FILE = + "elasticsearch/sample_filters/lineage_query_filters_1.json"; @Test private static void testGetQueryForLineageFullArguments() throws Exception { @@ -32,20 +33,19 @@ private static void testGetQueryForLineageFullArguments() throws Exception { String expectedQuery = Resources.toString(url, StandardCharsets.UTF_8); List<Urn> urns = new ArrayList<>(); - List<LineageRegistry.EdgeInfo> edgeInfos = new ArrayList<>(ImmutableList.of( - new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, Constants.DATASET_ENTITY_NAME) - )); + List<LineageRegistry.EdgeInfo> edgeInfos = + new ArrayList<>( + ImmutableList.of( + new LineageRegistry.EdgeInfo( + "DownstreamOf", + RelationshipDirection.INCOMING, + Constants.DATASET_ENTITY_NAME))); GraphFilters graphFilters = new GraphFilters(ImmutableList.of(Constants.DATASET_ENTITY_NAME)); Long startTime = 0L; Long endTime = 1L; - QueryBuilder builder = ESGraphQueryDAO.getQueryForLineage( - urns, - edgeInfos, - graphFilters, - startTime, - endTime - ); + QueryBuilder builder = + ESGraphQueryDAO.getQueryForLineage(urns, edgeInfos, graphFilters, startTime, endTime); Assert.assertEquals(builder.toString(), expectedQuery); } @@ -59,73 +59,51 @@ private static void testAddEdgeToPaths() { // Case 0: Add with no existing paths. Map<Urn, UrnArrayArray> nodePaths = new HashMap<>(); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - UrnArrayArray expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + UrnArrayArray expectedPathsToChild = + new UrnArrayArray(ImmutableList.of(new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 1: No paths to parent. nodePaths = new HashMap<>(); - nodePaths.put(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,Other,PROD)"), new UrnArrayArray()); + nodePaths.put( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,Other,PROD)"), + new UrnArrayArray()); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray(ImmutableList.of(new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 2: 1 Existing Path to Parent Node nodePaths = new HashMap<>(); - Urn testParentParent = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent,PROD)"); - UrnArrayArray existingPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )) - )); + Urn testParentParent = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent,PROD)"); + UrnArrayArray existingPathsToParent = + new UrnArrayArray( + ImmutableList.of(new UrnArray(ImmutableList.of(testParentParent, testParent)))); nodePaths.put(testParent, existingPathsToParent); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 3: > 1 Existing Paths to Parent Node nodePaths = new HashMap<>(); - Urn testParentParent2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent2,PROD)"); - UrnArrayArray existingPathsToParent2 = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + Urn testParentParent2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent2,PROD)"); + UrnArrayArray existingPathsToParent2 = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); nodePaths.put(testParent, existingPathsToParent2); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)), + new UrnArray(ImmutableList.of(testParentParent2, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 4: Build graph from empty by adding multiple edges @@ -139,34 +117,23 @@ private static void testAddEdgeToPaths() { Assert.assertNull(nodePaths.get(testParentParent2)); // Verify paths to testParent - UrnArrayArray expectedPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + UrnArrayArray expectedPathsToParent = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); Assert.assertEquals(nodePaths.get(testParent), expectedPathsToParent); // Verify paths to testChild - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)), + new UrnArray(ImmutableList.of(testParentParent2, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); - // Case 5: Mainly documentation: Verify that if you build the graph out of order bad things happen. + // Case 5: Mainly documentation: Verify that if you build the graph out of order bad things + // happen. // Also test duplicate edge addition nodePaths = new HashMap<>(); // Add edge to testChild first! Before path to testParent has been constructed. @@ -182,29 +149,19 @@ private static void testAddEdgeToPaths() { Assert.assertNull(nodePaths.get(testParentParent2)); // Verify paths to testParent - expectedPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + expectedPathsToParent = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); Assert.assertEquals(nodePaths.get(testParent), expectedPathsToParent); // Verify paths to testChild are INCORRECT: partial & duplicated - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParent, testChild)), + new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java index 0ce43c9d31571..2f8fba0083aa7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.graph.search; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; @@ -26,6 +30,12 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import io.datahubproject.test.search.SearchTestUtils; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import javax.annotation.Nonnull; import org.junit.Assert; import org.opensearch.client.RestHighLevelClient; import org.testng.SkipException; @@ -33,27 +43,16 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; - -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.testng.Assert.assertEquals; - -abstract public class SearchGraphServiceTestBase extends GraphServiceTestBase { +public abstract class SearchGraphServiceTestBase extends GraphServiceTestBase { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); private final IndexConvention _indexConvention = new IndexConventionImpl(null); private final String _indexName = _indexConvention.getIndexName(INDEX_NAME); @@ -76,9 +75,19 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchGraphService buildService() { LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); - ESGraphQueryDAO readDAO = new ESGraphQueryDAO(getSearchClient(), lineageRegistry, _indexConvention, GraphQueryConfiguration.testDefaults); + ESGraphQueryDAO readDAO = + new ESGraphQueryDAO( + getSearchClient(), + lineageRegistry, + _indexConvention, + GraphQueryConfiguration.testDefaults); ESGraphWriteDAO writeDAO = new ESGraphWriteDAO(_indexConvention, getBulkProcessor(), 1); - return new ElasticSearchGraphService(lineageRegistry, getBulkProcessor(), _indexConvention, writeDAO, readDAO, + return new ElasticSearchGraphService( + lineageRegistry, + getBulkProcessor(), + _indexConvention, + writeDAO, + readDAO, getIndexBuilder()); } @@ -94,7 +103,8 @@ protected void syncAfterWrite() throws Exception { } @Override - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { // https://github.com/datahub-project/datahub/issues/3115 // ElasticSearchGraphService produces duplicates, which is here ignored until fixed // actual.count and actual.total not tested due to duplicates @@ -103,112 +113,160 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie } @Override - protected <T> void assertEqualsAnyOrder(List<T> actual, List<T> expected, Comparator<T> comparator) { + protected <T> void assertEqualsAnyOrder( + List<T> actual, List<T> expected, Comparator<T> comparator) { // https://github.com/datahub-project/datahub/issues/3115 // ElasticSearchGraphService produces duplicates, which is here ignored until fixed assertEquals(new HashSet<>(actual), new HashSet<>(expected)); } @Override - public void testFindRelatedEntitiesSourceEntityFilter(Filter sourceEntityFilter, List<String> relationshipTypes, - RelationshipFilter relationships, List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceEntityFilter( + Filter sourceEntityFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testFindRelatedEntitiesSourceEntityFilter(sourceEntityFilter, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesSourceEntityFilter( + sourceEntityFilter, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationEntityFilter(Filter destinationEntityFilter, - List<String> relationshipTypes, RelationshipFilter relationships, List<RelatedEntity> expectedRelatedEntities) + public void testFindRelatedEntitiesDestinationEntityFilter( + Filter destinationEntityFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testFindRelatedEntitiesDestinationEntityFilter(destinationEntityFilter, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationEntityFilter( + destinationEntityFilter, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesSourceType(String datasetType, List<String> relationshipTypes, - RelationshipFilter relationships, List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String datasetType, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3116 throw new SkipException("ElasticSearchGraphService does not support empty source type"); } - super.testFindRelatedEntitiesSourceType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesSourceType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationType(String datasetType, List<String> relationshipTypes, - RelationshipFilter relationships, List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String datasetType, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3116 throw new SkipException("ElasticSearchGraphService does not support empty destination type"); } - super.testFindRelatedEntitiesDestinationType(datasetType, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Test @Override public void testFindRelatedEntitiesNoRelationshipTypes() { // https://github.com/datahub-project/datahub/issues/3117 - throw new SkipException("ElasticSearchGraphService does not support empty list of relationship types"); + throw new SkipException( + "ElasticSearchGraphService does not support empty list of relationship types"); } @Override - public void testRemoveEdgesFromNode(@Nonnull Urn nodeToRemoveFrom, @Nonnull List<String> relationTypes, - @Nonnull RelationshipFilter relationshipFilter, List<RelatedEntity> expectedOutgoingRelatedUrnsBeforeRemove, + public void testRemoveEdgesFromNode( + @Nonnull Urn nodeToRemoveFrom, + @Nonnull List<String> relationTypes, + @Nonnull RelationshipFilter relationshipFilter, + List<RelatedEntity> expectedOutgoingRelatedUrnsBeforeRemove, List<RelatedEntity> expectedIncomingRelatedUrnsBeforeRemove, List<RelatedEntity> expectedOutgoingRelatedUrnsAfterRemove, - List<RelatedEntity> expectedIncomingRelatedUrnsAfterRemove) throws Exception { + List<RelatedEntity> expectedIncomingRelatedUrnsAfterRemove) + throws Exception { if (relationshipFilter.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testRemoveEdgesFromNode(nodeToRemoveFrom, relationTypes, relationshipFilter, - expectedOutgoingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove, - expectedOutgoingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); + super.testRemoveEdgesFromNode( + nodeToRemoveFrom, + relationTypes, + relationshipFilter, + expectedOutgoingRelatedUrnsBeforeRemove, + expectedIncomingRelatedUrnsBeforeRemove, + expectedOutgoingRelatedUrnsAfterRemove, + expectedIncomingRelatedUrnsAfterRemove); } @Test @Override public void testRemoveEdgesFromNodeNoRelationshipTypes() { // https://github.com/datahub-project/datahub/issues/3117 - throw new SkipException("ElasticSearchGraphService does not support empty list of relationship types"); + throw new SkipException( + "ElasticSearchGraphService does not support empty list of relationship types"); } @Test // TODO: Only in ES for now since unimplemented in other services public void testRemoveEdge() throws Exception { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); TagUrn tagUrn = new TagUrn("newTag"); Edge edge = new Edge(datasetUrn, tagUrn, TAG_RELATIONSHIP, null, null, null, null, null); getGraphService().addEdge(edge); syncAfterWrite(); - RelatedEntitiesResult result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + RelatedEntitiesResult result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 1); getGraphService().removeEdge(edge); syncAfterWrite(); - result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 0); } @@ -239,15 +297,39 @@ public void testTimestampLineage() throws Exception { // Populate one upstream and two downstream edges at initialTime Long initialTime = 1000L; - List<Edge> edges = Arrays.asList( - // One upstream edge - new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, initialTime, null, initialTime, null, null), - // Two downstream - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, initialTime, null, initialTime, null, null), - new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, initialTime, null, initialTime, null, null), - // One with null values, should always be returned - new Edge(datasetFiveUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null) - ); + List<Edge> edges = + Arrays.asList( + // One upstream edge + new Edge( + datasetTwoUrn, + datasetOneUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + // Two downstream + new Edge( + datasetThreeUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + new Edge( + datasetFourUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + // One with null values, should always be returned + new Edge(datasetFiveUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)); edges.forEach(getGraphService()::addEdge); syncAfterWrite(); @@ -259,120 +341,103 @@ public void testTimestampLineage() throws Exception { Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Timestamp before - upstreamResult = getUpstreamLineage(datasetTwoUrn, - 0L, - initialTime - 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - 0L, - initialTime - 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, 0L, initialTime - 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, 0L, initialTime - 10); Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); // Timestamp after - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime + 10, - initialTime + 100); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime + 10, - initialTime + 100); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); // Timestamp included - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime - 10, - initialTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime - 10, - initialTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Update only one of the downstream edges Long updatedTime = 2000L; - edges = Arrays.asList( - new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, initialTime, null, updatedTime, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, initialTime, null, updatedTime, null, null) - ); + edges = + Arrays.asList( + new Edge( + datasetTwoUrn, + datasetOneUrn, + downstreamOf, + initialTime, + null, + updatedTime, + null, + null), + new Edge( + datasetThreeUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + updatedTime, + null, + null)); edges.forEach(getGraphService()::addEdge); syncAfterWrite(); // Without timestamps - upstreamResult = getUpstreamLineage(datasetTwoUrn, - null, - null); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - null, - null); + upstreamResult = getUpstreamLineage(datasetTwoUrn, null, null); + downstreamResult = getDownstreamLineage(datasetTwoUrn, null, null); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Window includes initial time and updated time - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime - 10, - updatedTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime - 10, - updatedTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Window includes updated time but not initial time - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime + 10, - updatedTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime + 10, - updatedTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(2), downstreamResult.getTotal()); - } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The Upstream lineage for urn from the window from startTime to endTime */ private EntityLineageResult getUpstreamLineage(Urn urn, Long startTime, Long endTime) { - return getLineage(urn, - LineageDirection.UPSTREAM, - startTime, - endTime); + return getLineage(urn, LineageDirection.UPSTREAM, startTime, endTime); } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The Downstream lineage for urn from the window from startTime to endTime */ private EntityLineageResult getDownstreamLineage(Urn urn, Long startTime, Long endTime) { - return getLineage(urn, - LineageDirection.DOWNSTREAM, - startTime, - endTime); + return getLineage(urn, LineageDirection.DOWNSTREAM, startTime, endTime); } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param direction Direction to query (upstream/downstream) * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The lineage for urn from the window from startTime to endTime in direction */ - private EntityLineageResult getLineage(Urn urn, LineageDirection direction, Long startTime, Long endTime) { - return getGraphService().getLineage(urn, - direction, - 0, - 0, - 3, - startTime, - endTime); + private EntityLineageResult getLineage( + Urn urn, LineageDirection direction, Long startTime, Long endTime) { + return getGraphService().getLineage(urn, direction, 0, 0, 3, startTime, endTime); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java index 989f9ae197239..3c892dddb70e1 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java @@ -1,17 +1,18 @@ package com.linkedin.metadata.graph.search; import com.google.common.io.Resources; +import com.linkedin.metadata.graph.elastic.TimeFilterUtils; import java.net.URL; import java.nio.charset.StandardCharsets; - -import com.linkedin.metadata.graph.elastic.TimeFilterUtils; import org.opensearch.index.query.QueryBuilder; import org.testng.Assert; import org.testng.annotations.Test; public class TimeFilterUtilsTest { - private static final String TEST_QUERY_FILE = "elasticsearch/sample_filters/lineage_time_query_filters_1.json"; + private static final String TEST_QUERY_FILE = + "elasticsearch/sample_filters/lineage_time_query_filters_1.json"; + @Test private static void testGetEdgeTimeFilterQuery() throws Exception { URL url = Resources.getResource(TEST_QUERY_FILE); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java index 7b550311bf823..b2c49857cb0b9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.search.elasticsearch.ElasticSearchSuite; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; - import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -16,12 +15,9 @@ @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class SearchGraphServiceElasticSearchTest extends SearchGraphServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override @@ -45,5 +41,4 @@ protected ESIndexBuilder getIndexBuilder() { public void initTest() { AssertJUnit.assertNotNull(_searchClient); } - } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java index eabfb523fb910..28b545f817539 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java @@ -15,12 +15,9 @@ @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class SearchGraphServiceOpenSearchTest extends SearchGraphServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override @@ -44,5 +41,4 @@ protected ESIndexBuilder getIndexBuilder() { public void initTest() { AssertJUnit.assertNotNull(_searchClient); } - } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java index c6677c171b30e..df332cacaa751 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.graph.sibling; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.Siblings; @@ -24,27 +29,23 @@ import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class SiblingGraphServiceTest { - /** - * Some test URN types. - */ + /** Some test URN types. */ protected static String datasetType = "dataset"; - /** - * Some test datasets. - */ - protected static String datasetOneUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; - protected static String datasetTwoUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; - protected static String datasetThreeUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; - protected static String datasetFourUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; - protected static String datasetFiveUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; + /** Some test datasets. */ + protected static String datasetOneUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; + + protected static String datasetTwoUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; + protected static String datasetThreeUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; + protected static String datasetFourUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; + protected static String datasetFiveUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; protected static Urn datasetOneUrn = createFromString(datasetOneUrnString); protected static Urn datasetTwoUrn = createFromString(datasetTwoUrnString); @@ -52,11 +53,9 @@ public class SiblingGraphServiceTest { protected static Urn datasetFourUrn = createFromString(datasetFourUrnString); protected static Urn datasetFiveUrn = createFromString(datasetFiveUrnString); - - /** - * Some test relationships. - */ + /** Some test relationships. */ protected static String downstreamOf = "DownstreamOf"; + protected static String upstreamOf = "UpstreamOf"; private GraphService _graphService; @@ -100,15 +99,15 @@ public void testNoSiblingMetadata() { mockResult.setFiltered(0); mockResult.setRelationships(relationships); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(null); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert sibling graph service is a pass through in the case that there is no sibling metadataa assertEquals(upstreamLineage, mockResult); @@ -145,24 +144,23 @@ public void testNoSiblingInResults() { mockResult.setFiltered(0); mockResult.setRelationships(relationships); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); siblingMockResult.setStart(0); siblingMockResult.setTotal(0); siblingMockResult.setCount(0); siblingMockResult.setRelationships(new LineageRelationshipArray()); - when(_graphService.getLineage( - datasetFiveUrn, LineageDirection.UPSTREAM, 0, 97, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 97, 1, null, null)) + .thenReturn(siblingMockResult); Siblings noRelevantSiblingsResponse = new Siblings(); noRelevantSiblingsResponse.setPrimary(true); noRelevantSiblingsResponse.setSiblings(new UrnArray(ImmutableList.of(datasetFiveUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(noRelevantSiblingsResponse); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(noRelevantSiblingsResponse); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -176,17 +174,18 @@ public void testNoSiblingInResults() { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert sibling graph service is a pass through in the case that your sibling has no lineage assertEquals(upstreamLineage, mockResult); @@ -227,20 +226,18 @@ public void testSiblingInResult() throws Exception { siblingMockResult.setCount(0); siblingMockResult.setRelationships(new LineageRelationshipArray()); - when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 98, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 98, 1, null, null)) + .thenReturn(siblingMockResult); - - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -254,11 +251,11 @@ public void testSiblingInResult() throws Exception { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); @@ -270,7 +267,8 @@ public void testSiblingInResult() throws Exception { expectedResult.setFiltered(1); expectedResult.setRelationships(new LineageRelationshipArray(relationship1, relationship2)); - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your sibling will be filtered out of your lineage assertEquals(upstreamLineage, expectedResult); @@ -311,7 +309,8 @@ public void testCombineSiblingResult() { expectedRelationships.add(relationship2); expectedRelationships.add(relationship4); - expectedRelationships.add(relationship1); // expect just one relationship1 despite duplicates in sibling lineage + expectedRelationships.add( + relationship1); // expect just one relationship1 despite duplicates in sibling lineage expectedResult.setCount(3); expectedResult.setStart(0); @@ -326,27 +325,39 @@ public void testCombineSiblingResult() { siblingRelationships.add(relationship2); siblingRelationships.add(relationship4); - siblingRelationships.add(relationship1); // duplicate from sibling's lineage, we should not see duplicates in result + siblingRelationships.add( + relationship1); // duplicate from sibling's lineage, we should not see duplicates in result siblingMockResult.setStart(0); siblingMockResult.setTotal(3); siblingMockResult.setCount(2); siblingMockResult.setRelationships(siblingRelationships); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> siblingMockResult.clone()); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> siblingMockResult.clone()); when(_graphService.getLineage( - Mockito.eq(datasetFourUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockResult.clone()); + Mockito.eq(datasetFourUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockResult.clone()); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -360,18 +371,19 @@ public void testCombineSiblingResult() { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFiveUrn, ImmutableList.of(dataset3Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFiveUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will be combined with your siblings lineage assertEquals(upstreamLineage, expectedResult); @@ -430,20 +442,18 @@ public void testUpstreamOfSiblings() { siblingMockResult.setCount(2); siblingMockResult.setRelationships(siblingRelationships); - when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null)) + .thenReturn(siblingMockResult); - - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -465,37 +475,37 @@ public void testUpstreamOfSiblings() { dataset5Siblings.setPrimary(true); dataset5Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings), - datasetFiveUrn, ImmutableList.of(dataset5Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings), + datasetFiveUrn, ImmutableList.of(dataset5Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will not contain two siblings assertEquals(upstreamLineage, expectedResult); when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(siblingMockResult); - + datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(siblingMockResult); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null)) + .thenReturn(mockResult); siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(false); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetFourUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1); @@ -510,7 +520,8 @@ public void testUpstreamOfSiblings() { } // we should be combining lineage of siblings of siblings - // ie. dataset1 has sibling dataset2. dataset 2 has siblings dataset1 and dataset3. dataset3 has sibling dataset2. dataset3 has upstream dataset4. + // ie. dataset1 has sibling dataset2. dataset 2 has siblings dataset1 and dataset3. dataset3 has + // sibling dataset2. dataset3 has upstream dataset4. // requesting upstream for dataset1 should give us dataset4 @Test public void testUpstreamOfSiblingSiblings() { @@ -547,57 +558,77 @@ public void testUpstreamOfSiblingSiblings() { emptyLineageResult.setCount(0); when(_graphService.getLineage( - Mockito.eq(datasetOneUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(emptyLineageResult); + Mockito.eq(datasetOneUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(emptyLineageResult); when(_graphService.getLineage( - Mockito.eq(datasetTwoUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(emptyLineageResult); + Mockito.eq(datasetTwoUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(emptyLineageResult); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(mockResult); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(mockResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(true); dataset1Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - when(_mockEntityService.getLatestAspect(datasetOneUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset1Siblings); + when(_mockEntityService.getLatestAspect(datasetOneUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset1Siblings); Siblings dataset2Siblings = new Siblings(); dataset2Siblings.setPrimary(true); dataset2Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetOneUrn, datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetTwoUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset2Siblings); + when(_mockEntityService.getLatestAspect(datasetTwoUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset2Siblings); Siblings dataset3Siblings = new Siblings(); dataset3Siblings.setPrimary(true); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset3Siblings); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset3Siblings); Siblings dataset4Siblings = new Siblings(); dataset4Siblings.setPrimary(true); dataset4Siblings.setSiblings(new UrnArray()); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset4Siblings); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset4Siblings); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; for (Urn urn : List.of(datasetOneUrn, datasetTwoUrn, datasetThreeUrn)) { - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 100, 1); assertEquals(upstreamLineage, expectedResult); } @@ -659,26 +690,38 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { siblingMockResult.setRelationships(siblingRelationships); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> siblingMockResult.clone()); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> siblingMockResult.clone()); when(_graphService.getLineage( - Mockito.eq(datasetFourUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockResult.clone()); + Mockito.eq(datasetFourUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockResult.clone()); Siblings primarySibling = new Siblings(); primarySibling.setPrimary(true); primarySibling.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(primarySibling); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(primarySibling); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(false); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetFourUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -700,19 +743,20 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { dataset5Siblings.setPrimary(true); dataset5Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings), - datasetFiveUrn, ImmutableList.of(dataset5Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings), + datasetFiveUrn, ImmutableList.of(dataset5Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will not contain two siblings assertEquals(upstreamLineage, expectedResult); @@ -733,11 +777,19 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { @Test public void testSiblingCombinations() throws URISyntaxException { - Urn primarySiblingUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:dbt,PrimarySibling,PROD)"); - Urn alternateSiblingUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,SecondarySibling,PROD)"); - - Urn upstreamUrn1 = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream1,PROD)"); - Urn upstreamUrn2 = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream2,PROD)"); + Urn primarySiblingUrn = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:dbt,PrimarySibling,PROD)"); + Urn alternateSiblingUrn = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,SecondarySibling,PROD)"); + + Urn upstreamUrn1 = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream1,PROD)"); + Urn upstreamUrn2 = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream2,PROD)"); LineageRelationshipArray alternateDownstreamRelationships = new LineageRelationshipArray(); // Populate sibling service @@ -745,13 +797,15 @@ public void testSiblingCombinations() throws URISyntaxException { primarySiblings.setPrimary(true); primarySiblings.setSiblings(new UrnArray(ImmutableList.of(alternateSiblingUrn))); - when(_mockEntityService.getLatestAspect(primarySiblingUrn, SIBLINGS_ASPECT_NAME)).thenReturn(primarySiblings); + when(_mockEntityService.getLatestAspect(primarySiblingUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(primarySiblings); Siblings secondarySiblings = new Siblings(); secondarySiblings.setPrimary(false); secondarySiblings.setSiblings(new UrnArray(ImmutableList.of(primarySiblingUrn))); - when(_mockEntityService.getLatestAspect(alternateSiblingUrn, SIBLINGS_ASPECT_NAME)).thenReturn(secondarySiblings); + when(_mockEntityService.getLatestAspect(alternateSiblingUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(secondarySiblings); Map<Urn, List<RecordTemplate>> siblingsMap = new HashMap<>(); siblingsMap.put(primarySiblingUrn, ImmutableList.of(primarySiblings)); @@ -760,7 +814,13 @@ public void testSiblingCombinations() throws URISyntaxException { // Create many downstreams of the alternate URN string final int numDownstreams = 42; for (int i = 0; i < numDownstreams; i++) { - Urn downstreamUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Downstream" + i + ",PROD)"); + Urn downstreamUrn = + Urn.createFromString( + "urn:li:" + + datasetType + + ":(urn:li:dataPlatform:snowflake,Downstream" + + i + + ",PROD)"); LineageRelationship relationship = new LineageRelationship(); relationship.setDegree(0); relationship.setType(upstreamOf); @@ -785,9 +845,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockAlternateUpstreamResult.setCount(3); when(_graphService.getLineage( - Mockito.eq(alternateSiblingUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockAlternateUpstreamResult.clone()); + Mockito.eq(alternateSiblingUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockAlternateUpstreamResult.clone()); EntityLineageResult mockAlternateDownstreamResult = new EntityLineageResult(); mockAlternateDownstreamResult.setRelationships(alternateDownstreamRelationships); @@ -796,9 +861,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockAlternateDownstreamResult.setCount(numDownstreams); when(_graphService.getLineage( - Mockito.eq(alternateSiblingUrn), Mockito.eq(LineageDirection.DOWNSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockAlternateDownstreamResult.clone()); + Mockito.eq(alternateSiblingUrn), + Mockito.eq(LineageDirection.DOWNSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockAlternateDownstreamResult.clone()); // Set up mocks for primary sibling LineageRelationshipArray primaryUpstreamRelationships = new LineageRelationshipArray(); @@ -818,9 +888,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockPrimaryUpstreamResult.setCount(2); when(_graphService.getLineage( - Mockito.eq(primarySiblingUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockPrimaryUpstreamResult.clone()); + Mockito.eq(primarySiblingUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockPrimaryUpstreamResult.clone()); LineageRelationshipArray primaryDowntreamRelationships = new LineageRelationshipArray(); LineageRelationship relationship = new LineageRelationship(); @@ -836,26 +911,23 @@ public void testSiblingCombinations() throws URISyntaxException { mockPrimaryDownstreamResult.setCount(1); when(_graphService.getLineage( - Mockito.eq(primarySiblingUrn), Mockito.eq(LineageDirection.DOWNSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockPrimaryDownstreamResult.clone()); - + Mockito.eq(primarySiblingUrn), + Mockito.eq(LineageDirection.DOWNSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockPrimaryDownstreamResult.clone()); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; // Tests for separateSiblings = true: primary sibling - EntityLineageResult primaryDownstreamSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult primaryDownstreamSeparated = + service.getLineage( + primarySiblingUrn, LineageDirection.DOWNSTREAM, 0, 100, 1, true, Set.of(), null, null); LineageRelationshipArray expectedRelationships = new LineageRelationshipArray(); expectedRelationships.add(relationship); @@ -869,16 +941,9 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primaryDownstreamSeparated, expectedResultPrimarySeparated); - EntityLineageResult primaryUpstreamSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult primaryUpstreamSeparated = + service.getLineage( + primarySiblingUrn, LineageDirection.UPSTREAM, 0, 100, 1, true, Set.of(), null, null); EntityLineageResult expectedResultPrimaryUpstreamSeparated = new EntityLineageResult(); expectedResultPrimaryUpstreamSeparated.setCount(2); expectedResultPrimaryUpstreamSeparated.setStart(0); @@ -889,16 +954,17 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primaryUpstreamSeparated, expectedResultPrimaryUpstreamSeparated); // Test for separateSiblings = true, secondary sibling - EntityLineageResult secondarySiblingSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult secondarySiblingSeparated = + service.getLineage( + alternateSiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + true, + Set.of(), + null, + null); EntityLineageResult expectedResultSecondarySeparated = new EntityLineageResult(); expectedResultSecondarySeparated.setCount(numDownstreams); @@ -909,16 +975,9 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(secondarySiblingSeparated, expectedResultSecondarySeparated); - EntityLineageResult secondaryUpstreamSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult secondaryUpstreamSeparated = + service.getLineage( + alternateSiblingUrn, LineageDirection.UPSTREAM, 0, 100, 1, true, Set.of(), null, null); EntityLineageResult expectedResultSecondaryUpstreamSeparated = new EntityLineageResult(); expectedResultSecondaryUpstreamSeparated.setCount(3); expectedResultSecondaryUpstreamSeparated.setStart(0); @@ -929,16 +988,17 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(secondaryUpstreamSeparated, expectedResultSecondaryUpstreamSeparated); // Test for separateSiblings = false, primary sibling - EntityLineageResult primarySiblingNonSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null); + EntityLineageResult primarySiblingNonSeparated = + service.getLineage( + primarySiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); EntityLineageResult expectedResultPrimaryNonSeparated = new EntityLineageResult(); expectedResultPrimaryNonSeparated.setCount(numDownstreams); expectedResultPrimaryNonSeparated.setStart(0); @@ -947,17 +1007,17 @@ public void testSiblingCombinations() throws URISyntaxException { expectedResultPrimaryNonSeparated.setRelationships(alternateDownstreamRelationships); assertEquals(primarySiblingNonSeparated, expectedResultPrimaryNonSeparated); - EntityLineageResult primarySiblingNonSeparatedUpstream = service.getLineage( - primarySiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null - ); + EntityLineageResult primarySiblingNonSeparatedUpstream = + service.getLineage( + primarySiblingUrn, + LineageDirection.UPSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); EntityLineageResult expectedResultPrimaryUpstreamNonSeparated = new EntityLineageResult(); expectedResultPrimaryUpstreamNonSeparated.setCount(2); expectedResultPrimaryUpstreamNonSeparated.setStart(0); @@ -967,29 +1027,30 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primarySiblingNonSeparatedUpstream, expectedResultPrimaryUpstreamNonSeparated); // Test for separateSiblings = false, secondary sibling - EntityLineageResult secondarySiblingNonSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null); + EntityLineageResult secondarySiblingNonSeparated = + service.getLineage( + alternateSiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); assertEquals(secondarySiblingNonSeparated, expectedResultPrimaryNonSeparated); - EntityLineageResult secondarySiblingNonSeparatedUpstream = service.getLineage( - alternateSiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null - ); + EntityLineageResult secondarySiblingNonSeparatedUpstream = + service.getLineage( + alternateSiblingUrn, + LineageDirection.UPSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); assertEquals(secondarySiblingNonSeparatedUpstream, expectedResultPrimaryUpstreamNonSeparated); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java index 60e63ed001768..c0faf6fdfee6c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.recommendation; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntityUtil; @@ -11,34 +14,56 @@ import java.util.stream.Collectors; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class RecommendationsServiceTest { private final TestSource nonEligibleSource = - new TestSource("not eligible", "nonEligible", RecommendationRenderType.ENTITY_NAME_LIST, false, + new TestSource( + "not eligible", + "nonEligible", + RecommendationRenderType.ENTITY_NAME_LIST, + false, getContentFromString(ImmutableList.of("test"))); private final TestSource emptySource = - new TestSource("empty", "empty", RecommendationRenderType.ENTITY_NAME_LIST, true, ImmutableList.of()); + new TestSource( + "empty", "empty", RecommendationRenderType.ENTITY_NAME_LIST, true, ImmutableList.of()); private final TestSource valuesSource = - new TestSource("values", "values", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "values", + "values", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromString(ImmutableList.of("test"))); private final TestSource multiValuesSource = - new TestSource("multiValues", "multiValues", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "multiValues", + "multiValues", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromString(ImmutableList.of("test1", "test2", "test3", "test4"))); private final TestSource urnsSource = - new TestSource("urns", "urns", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "urns", + "urns", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromUrns(ImmutableList.of(TestEntityUtil.getTestEntityUrn()))); private final TestSource multiUrnsSource = - new TestSource("multiUrns", "multiUrns", RecommendationRenderType.ENTITY_NAME_LIST, true, - getContentFromUrns(ImmutableList.of(TestEntityUtil.getTestEntityUrn(), TestEntityUtil.getTestEntityUrn(), - TestEntityUtil.getTestEntityUrn()))); + new TestSource( + "multiUrns", + "multiUrns", + RecommendationRenderType.ENTITY_NAME_LIST, + true, + getContentFromUrns( + ImmutableList.of( + TestEntityUtil.getTestEntityUrn(), + TestEntityUtil.getTestEntityUrn(), + TestEntityUtil.getTestEntityUrn()))); private final RecommendationModuleRanker ranker = new SimpleRecommendationRanker(); private List<RecommendationContent> getContentFromString(List<String> values) { - return values.stream().map(value -> new RecommendationContent().setValue(value)).collect(Collectors.toList()); + return values.stream() + .map(value -> new RecommendationContent().setValue(value)) + .collect(Collectors.toList()); } private List<RecommendationContent> getContentFromUrns(List<Urn> urns) { @@ -50,15 +75,24 @@ private List<RecommendationContent> getContentFromUrns(List<Urn> urns) { @Test public void testService() throws URISyntaxException { // Test non-eligible and empty - RecommendationsService service = new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource), ranker); - List<RecommendationModule> result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + RecommendationsService service = + new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource), ranker); + List<RecommendationModule> result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertTrue(result.isEmpty()); // Test empty with one valid source - service = new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource, valuesSource), ranker); - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + service = + new RecommendationsService( + ImmutableList.of(nonEligibleSource, emptySource, valuesSource), ranker); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertEquals(result.size(), 1); RecommendationModule module = result.get(0); assertEquals(module.getTitle(), "values"); @@ -67,10 +101,14 @@ public void testService() throws URISyntaxException { assertEquals(module.getContent(), valuesSource.getContents()); // Test multiple sources - service = new RecommendationsService(ImmutableList.of(valuesSource, multiValuesSource, urnsSource, multiUrnsSource), - ranker); - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + service = + new RecommendationsService( + ImmutableList.of(valuesSource, multiValuesSource, urnsSource, multiUrnsSource), ranker); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertEquals(result.size(), 4); module = result.get(0); assertEquals(module.getTitle(), "values"); @@ -94,8 +132,11 @@ public void testService() throws URISyntaxException { assertEquals(module.getContent(), multiUrnsSource.getContents()); // Test limit - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 2); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 2); assertEquals(result.size(), 2); module = result.get(0); assertEquals(module.getTitle(), "values"); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java index 0dc517eaf0d1c..dcc59d0632954 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.recommendation.candidatesource; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; @@ -19,15 +27,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.eq; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - - public class EntitySearchAggregationCandidateSourceTest { private EntitySearchService _entitySearchService = Mockito.mock(EntitySearchService.class); private EntitySearchAggregationSource _valueBasedCandidateSource; @@ -44,7 +43,8 @@ public void setup() { _urnBasedCandidateSource = buildCandidateSource("testUrn", true); } - private EntitySearchAggregationSource buildCandidateSource(String identifier, boolean isValueUrn) { + private EntitySearchAggregationSource buildCandidateSource( + String identifier, boolean isValueUrn) { return new EntitySearchAggregationSource(_entitySearchService) { @Override protected String getSearchFieldName() { @@ -77,7 +77,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return true; } }; @@ -85,9 +86,11 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Test public void testWhenSearchServiceReturnsEmpty() { - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(Collections.emptyMap()); - List<RecommendationContent> candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + List<RecommendationContent> candidates = + _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertTrue(candidates.isEmpty()); assertFalse(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); } @@ -95,9 +98,11 @@ public void testWhenSearchServiceReturnsEmpty() { @Test public void testWhenSearchServiceReturnsValueResults() { // One result - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L)); - List<RecommendationContent> candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + List<RecommendationContent> candidates = + _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), "value1"); @@ -107,14 +112,16 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value1")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); assertTrue(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); // Multiple result - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L, "value2", 2L, "value3", 3L)); candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 2); @@ -126,7 +133,8 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value3")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 3L); @@ -138,7 +146,8 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value2")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); @@ -153,7 +162,8 @@ public void testWhenSearchServiceReturnsUrnResults() { Urn testUrn3 = new TestEntityUrn("testUrn3", "testUrn3", "testUrn3"); Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testUrn"), eq(null), anyInt())) .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L)); - List<RecommendationContent> candidates = _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); + List<RecommendationContent> candidates = + _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), testUrn1.toString()); @@ -163,7 +173,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn1.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); @@ -171,7 +182,9 @@ public void testWhenSearchServiceReturnsUrnResults() { // Multiple result Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testUrn"), eq(null), anyInt())) - .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); + .thenReturn( + ImmutableMap.of( + testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); candidates = _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 2); content = candidates.get(0); @@ -182,7 +195,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn3.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 3L); @@ -194,7 +208,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn2.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java index f5c3569821e00..3998e45195b25 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java @@ -14,13 +14,12 @@ public class RecommendationUtilsTest { private void testIsSupportedEntityType() { Urn testUrn = UrnUtils.getUrn("urn:li:corpuser:john"); Assert.assertTrue( - RecommendationUtils.isSupportedEntityType(testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME, Constants.CORP_USER_ENTITY_NAME)) - ); + RecommendationUtils.isSupportedEntityType( + testUrn, + ImmutableSet.of(Constants.DATASET_ENTITY_NAME, Constants.CORP_USER_ENTITY_NAME))); Assert.assertFalse( - RecommendationUtils.isSupportedEntityType(testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME)) - ); - Assert.assertFalse( - RecommendationUtils.isSupportedEntityType(testUrn, Collections.emptySet()) - ); + RecommendationUtils.isSupportedEntityType( + testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME))); + Assert.assertFalse(RecommendationUtils.isSupportedEntityType(testUrn, Collections.emptySet())); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java index 31672b6aa885f..666deb2c419d7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; - @Getter @RequiredArgsConstructor public class TestSource implements RecommendationSource { @@ -36,13 +35,14 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return eligible; } @Override - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return contents; } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java index 1757883f1a5a9..57fa51ffbdd90 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java @@ -1,32 +1,34 @@ package com.linkedin.metadata.search; -import java.time.temporal.ChronoUnit; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotSame; +import java.time.temporal.ChronoUnit; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; public class LineageSearchResultCacheKeyTest extends AbstractTestNGSpringContextTests { @Test public void testNulls() { // ensure no NPE - assertEquals(new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS)); + assertEquals( + new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS)); } @Test public void testDateTruncation() { // expect start of day milli - assertEquals(new EntityLineageResultCacheKey(null, null, 1679529600000L, - 1679615999999L, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, 1679530293000L, - 1679530293001L, null, ChronoUnit.DAYS)); - assertNotSame(new EntityLineageResultCacheKey(null, null, 1679529600000L, - 1679616000000L, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, 1679530293000L, - 1679530293001L, null, ChronoUnit.DAYS)); + assertEquals( + new EntityLineageResultCacheKey( + null, null, 1679529600000L, 1679615999999L, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey( + null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); + assertNotSame( + new EntityLineageResultCacheKey( + null, null, 1679529600000L, 1679616000000L, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey( + null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java index 696e3b62834bd..079ec08462515 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java @@ -1,5 +1,22 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -46,11 +63,21 @@ import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import org.opensearch.client.RestHighLevelClient; import org.opensearch.action.search.SearchRequest; +import org.opensearch.client.RestHighLevelClient; import org.springframework.cache.CacheManager; import org.springframework.cache.concurrent.ConcurrentMapCacheManager; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; @@ -58,50 +85,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anySet; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - -abstract public class LineageServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class LineageServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -116,11 +115,13 @@ abstract public class LineageServiceTestBase extends AbstractTestNGSpringContext private static final Urn TEST_URN = TestEntityUtil.getTestEntityUrn(); private static final String TEST = "test"; private static final String TEST1 = "test1"; - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); @BeforeClass public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -137,20 +138,29 @@ public void setup() { } private void resetService(boolean withCache, boolean withLightingCache) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); entityDocCountCacheConfiguration.setTtlSeconds(600L); - SearchLineageCacheConfiguration searchLineageCacheConfiguration = new SearchLineageCacheConfiguration(); + SearchLineageCacheConfiguration searchLineageCacheConfiguration = + new SearchLineageCacheConfiguration(); searchLineageCacheConfiguration.setTtlSeconds(600L); searchLineageCacheConfiguration.setLightningThreshold(withLightingCache ? -1 : 300); - _lineageSearchService = spy(new LineageSearchService( - new SearchService( - new EntityDocCountCache(_entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), - cachingEntitySearchService, - new SimpleRanker()), - _graphService, _cacheManager.getCache("test"), withCache, searchLineageCacheConfiguration)); + _lineageSearchService = + spy( + new LineageSearchService( + new SearchService( + new EntityDocCountCache( + _entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), + cachingEntitySearchService, + new SimpleRanker()), + _graphService, + _cacheManager.getCache("test"), + withCache, + searchLineageCacheConfiguration)); } @BeforeMethod @@ -163,13 +173,27 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildEntitySearchService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention, _settingsBuilder); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); _searchClientSpy = spy(getSearchClient()); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, _searchClientSpy, _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, _searchClientSpy, _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); - ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, _searchClientSpy, _indexConvention, getBulkProcessor(), 1); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + _searchClientSpy, + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + _searchClientSpy, + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); + ESWriteDAO writeDAO = + new ESWriteDAO(_entityRegistry, _searchClientSpy, _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); } @@ -179,7 +203,8 @@ private void clearCache(boolean withLightingCache) { } private EntityLineageResult mockResult(List<LineageRelationship> lineageRelationships) { - return new EntityLineageResult().setRelationships(new LineageRelationshipArray(lineageRelationships)) + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageRelationships)) .setStart(0) .setCount(10) .setTotal(lineageRelationships.size()); @@ -187,18 +212,34 @@ private EntityLineageResult mockResult(List<LineageRelationship> lineageRelation @Test public void testSearchService() throws Exception { - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageSearchResult searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); - //just testing null input does not throw any exception + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + // just testing null input does not throw any exception searchAcrossLineage(null, null); searchResult = searchAcrossLineage(null, TEST); @@ -216,16 +257,32 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); @@ -255,15 +312,25 @@ public void testSearchService() throws Exception { assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); // Verify that highlighting was turned off in the query - ArgumentCaptor<SearchRequest> searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); + ArgumentCaptor<SearchRequest> searchRequestCaptor = + ArgumentCaptor.forClass(SearchRequest.class); Mockito.verify(_searchClientSpy, times(1)).search(searchRequestCaptor.capture(), any()); SearchRequest capturedRequest = searchRequestCaptor.getValue(); assertNull(capturedRequest.source().highlighter()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); @@ -273,55 +340,136 @@ public void testSearchService() throws Exception { Mockito.reset(_graphService); // Case 1: Use the maxHops in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", 1000, null, null, 0, 10, null, null, - new SearchFlags().setSkipCache(false)); + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + 1000, + null, + null, + 0, + 10, + null, + null, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); // Case 2: Use the start and end time in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), "test1", - null, null, null, 0, 10, 0L, 1L, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + "test1", + null, + null, + null, + 0, + 10, + 0L, + 1L, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", null, null, null, 0, 10, 0L, 1L, - new SearchFlags().setSkipCache(false)); + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + null, + null, + null, + 0, + 10, + 0L, + 1L, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); clearCache(false); @@ -330,19 +478,28 @@ public void testSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); - } @Test public void testScrollAcrossLineage() throws Exception { - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageScrollResult scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); assertNull(scrollResult.getScrollId()); @@ -351,9 +508,18 @@ public void testScrollAcrossLineage() throws Exception { assertNull(scrollResult.getScrollId()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); // just testing null input does not throw any exception scrollAcrossLineage(null, null); @@ -374,17 +540,33 @@ public void testScrollAcrossLineage() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); assertEquals(scrollResult.getEntities().size(), 0); assertNull(scrollResult.getScrollId()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 1); assertEquals(scrollResult.getEntities().get(0).getEntity(), urn); @@ -407,9 +589,12 @@ public void testScrollAcrossLineage() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); @@ -426,15 +611,31 @@ public void testLightningSearchService() throws Exception { // Enable lightning resetService(true, true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageSearchResult searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); clearCache(true); @@ -448,32 +649,51 @@ public void testLightningSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(0).getDegree().intValue(), 1); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); searchResult = searchAcrossLineage(QueryUtils.newFilter("degree.keyword", "1"), testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(0).getDegree().intValue(), 1); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); searchResult = searchAcrossLineage(QueryUtils.newFilter("degree.keyword", "2"), testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); // resets spy Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -488,16 +708,27 @@ public void testLightningSearchService() throws Exception { searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().size(), 1); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); // Test Cache Behavior @@ -505,59 +736,144 @@ public void testLightningSearchService() throws Exception { reset(_lineageSearchService); // Case 1: Use the maxHops in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, - new SearchFlags().setSkipCache(false)); + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); - + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Case 2: Use the start and end time in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), "*", - null, null, null, 0, 10, 0L, 1L, - new SearchFlags().setSkipCache(false)); + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + "*", + null, + null, + null, + 0, + 10, + 0L, + 1L, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", null, null, null, 0, 10, 0L, 1L, + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + null, + null, + null, + 0, + 10, + 0L, + 1L, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(4)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(4)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); /* * Test filtering @@ -566,70 +882,163 @@ public void testLightningSearchService() throws Exception { // Entity searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(DATASET_ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(DATASET_ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(DATASET_ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(DATASET_ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); // Platform ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue("urn:li:dataPlatform:kafka").setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion() + .setField("platform") + .setValue("urn:li:dataPlatform:kafka") + .setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion degreeCrit = new Criterion().setField("degree.keyword").setValue("2").setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(degreeCrit)))); + Criterion degreeCrit = + new Criterion().setField("degree.keyword").setValue("2").setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(degreeCrit)))); Filter filter = new Filter().setOr(conCritArr); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, filter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, filter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(4)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(4)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); // Environment Filter originFilter = QueryUtils.newFilter("origin", "PROD"); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, originFilter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + originFilter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(5)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(5)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, originFilter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + originFilter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(6)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(6)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); @@ -640,13 +1049,15 @@ public void testLightningSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); - } @Test @@ -660,11 +1071,13 @@ public void testLightningEnvFiltering() throws Exception { platformCounts.put(kafkaPlatform, 200); platformCounts.put(hivePlatform, 50); platformCounts.put(bigQueryPlatform, 100); - List<LineageRelationship> prodLineageRelationships = constructGraph(platformCounts, FabricType.PROD); + List<LineageRelationship> prodLineageRelationships = + constructGraph(platformCounts, FabricType.PROD); // DEV platformCounts.put(kafkaPlatform, 300); - List<LineageRelationship> devLineageRelationships = constructGraph(platformCounts, FabricType.DEV); + List<LineageRelationship> devLineageRelationships = + constructGraph(platformCounts, FabricType.DEV); List<LineageRelationship> lineageRelationships = new ArrayList<>(); lineageRelationships.addAll(prodLineageRelationships); @@ -675,49 +1088,67 @@ public void testLightningEnvFiltering() throws Exception { int size = 10; Set<String> entityNames = Collections.emptySet(); - LineageSearchResult lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + LineageSearchResult lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); // assert that we have the right aggs per env - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("DEV")).findFirst().get(), Long.valueOf(300)); - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("PROD")).findFirst().get(), Long.valueOf(200)); + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("DEV")) + .findFirst() + .get(), + Long.valueOf(300)); + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("PROD")) + .findFirst() + .get(), + Long.valueOf(200)); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion originCrit = new Criterion().setField("origin").setValue("DEV").setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); + Criterion originCrit = + new Criterion().setField("origin").setValue("DEV").setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); from = 500; size = 10; filter = new Filter().setOr(conCritArr); - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); // assert that if the query has an env filter, it is applied correctly - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("DEV")).findFirst().get(), Long.valueOf(300)); - assertTrue(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") && x.getAggregations().containsKey("PROD")) - .collect(Collectors.toList()).isEmpty()); - - + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("DEV")) + .findFirst() + .get(), + Long.valueOf(300)); + assertTrue( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin") && x.getAggregations().containsKey("PROD")) + .collect(Collectors.toList()) + .isEmpty()); } - @Test public void testLightningPagination() throws Exception { Map<String, Integer> platformCounts = new HashMap<>(); @@ -731,35 +1162,41 @@ public void testLightningPagination() throws Exception { List<LineageRelationship> lineageRelationships = constructGraph(platformCounts); - Filter filter = QueryUtils.newFilter("platform", kafkaPlatform); int from = 0; int size = 10; Set<String> entityNames = Collections.emptySet(); - LineageSearchResult lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + LineageSearchResult lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); from = 50; size = 20; - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 20); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name50"); - + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name50"); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); - Criterion platform2Crit = new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform2Crit = + new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); critArr = new CriterionArray(ImmutableList.of(platform2Crit)); @@ -768,25 +1205,31 @@ public void testLightningPagination() throws Exception { from = 500; size = 10; filter = new Filter().setOr(conCritArr); - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(600)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), hivePlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); - + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), hivePlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); // Verify aggregations from = 0; size = 10; - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - null, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, null, from, size, entityNames); // Static Degree agg is the first element - LongMap platformAggs = lineageSearchResult.getMetadata().getAggregations().get(1).getAggregations(); - LongMap entityTypeAggs = lineageSearchResult.getMetadata().getAggregations().get(2).getAggregations(); - LongMap environmentAggs = lineageSearchResult.getMetadata().getAggregations().get(3).getAggregations(); + LongMap platformAggs = + lineageSearchResult.getMetadata().getAggregations().get(1).getAggregations(); + LongMap entityTypeAggs = + lineageSearchResult.getMetadata().getAggregations().get(2).getAggregations(); + LongMap environmentAggs = + lineageSearchResult.getMetadata().getAggregations().get(3).getAggregations(); assertEquals(platformAggs.get(kafkaPlatform), Long.valueOf(500)); assertEquals(platformAggs.get(hivePlatform), Long.valueOf(100)); assertEquals(platformAggs.get(bigQueryPlatform), Long.valueOf(200)); @@ -798,18 +1241,21 @@ private List<LineageRelationship> constructGraph(Map<String, Integer> platformCo return constructGraph(platformCounts, FabricType.PROD); } - private List<LineageRelationship> constructGraph(Map<String, Integer> platformCounts, final FabricType env) { + private List<LineageRelationship> constructGraph( + Map<String, Integer> platformCounts, final FabricType env) { List<LineageRelationship> lineageRelationships = new ArrayList<>(); - platformCounts.forEach((key, value) -> { - for (int i = 0; i < value; i++) { - try { - lineageRelationships.add( - constructLineageRelationship(new DatasetUrn(DataPlatformUrn.createFromString(key), "name" + i, env))); - } catch (URISyntaxException e) { - throw new RuntimeException(e); + platformCounts.forEach( + (key, value) -> { + for (int i = 0; i < value; i++) { + try { + lineageRelationships.add( + constructLineageRelationship( + new DatasetUrn(DataPlatformUrn.createFromString(key), "name" + i, env))); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } } - } - }); + }); return lineageRelationships; } @@ -820,19 +1266,40 @@ private LineageRelationship constructLineageRelationship(Urn urn) { .setType("DOWNSTREAM") .setDegree(1) .setPaths(new UrnArrayArray()); - } // Convenience method to reduce spots where we're sending the same params private LineageSearchResult searchAcrossLineage(@Nullable Filter filter, @Nullable String input) { - return _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), input, - null, filter, null, 0, 10, null, null, + return _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + input, + null, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(true)); } - private LineageScrollResult scrollAcrossLineage(@Nullable Filter filter, @Nullable String input, String scrollId, int size) { - return _lineageSearchService.scrollAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), input, - null, filter, null, scrollId, "5m", size, null, null, + private LineageScrollResult scrollAcrossLineage( + @Nullable Filter filter, @Nullable String input, String scrollId, int size) { + return _lineageSearchService.scrollAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + input, + null, + filter, + null, + scrollId, + "5m", + size, + null, + null, new SearchFlags().setSkipCache(true)); } @@ -851,29 +1318,39 @@ public void testCanDoLightning() throws Exception { platformCounts.put(hivePlatform, 100); platformCounts.put(bigQueryPlatform, 200); - List<LineageRelationship> lineageRelationships = constructGraph(platformCounts, FabricType.PROD); + List<LineageRelationship> lineageRelationships = + constructGraph(platformCounts, FabricType.PROD); Filter filter = QueryUtils.newFilter("platform", kafkaPlatform); int from = 0; int size = 10; Set<String> entityNames = Collections.emptySet(); - Assert.assertTrue(_lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); + Assert.assertTrue( + _lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); - Criterion platform2Crit = new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform2Crit = + new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); critArr = new CriterionArray(ImmutableList.of(platform2Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion originCrit = new Criterion().setField("origin").setValue(FabricType.PROD.name()).setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); + Criterion originCrit = + new Criterion() + .setField("origin") + .setValue(FabricType.PROD.name()) + .setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); from = 500; size = 10; filter = new Filter().setOr(conCritArr); - Assert.assertTrue(_lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); + Assert.assertTrue( + _lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java index c0144d36843f5..71f35adabce36 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -32,6 +36,7 @@ import com.linkedin.metadata.search.ranker.SimpleRanker; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.cache.CacheManager; import org.springframework.cache.concurrent.ConcurrentMapCacheManager; @@ -40,29 +45,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - - -abstract public class SearchServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class SearchServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -85,18 +83,18 @@ public void setup() { } private void resetSearchService() { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - _cacheManager, - _elasticSearchService, - 100, - true); + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); entityDocCountCacheConfiguration.setTtlSeconds(600L); - _searchService = new SearchService( - new EntityDocCountCache(_entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), - cachingEntitySearchService, - new SimpleRanker()); + _searchService = + new SearchService( + new EntityDocCountCache( + _entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), + cachingEntitySearchService, + new SimpleRanker()); } @BeforeMethod @@ -108,13 +106,26 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildEntitySearchService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention, _settingsBuilder); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, getSearchClient(), _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); - ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, - getBulkProcessor(), 1); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); + ESWriteDAO writeDAO = + new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); } @@ -126,11 +137,18 @@ private void clearCache() { @Test public void testSearchService() throws Exception { SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true).setSkipCache(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(true).setSkipCache(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -143,8 +161,9 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); clearCache(); @@ -158,8 +177,9 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "'test2'", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "'test2'", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); clearCache(); @@ -170,37 +190,46 @@ public void testSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "'test2'", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "'test2'", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); } @Test public void testAdvancedSearchOr() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Criterion subtypeCriterion = new Criterion() - .setField("subtypes") - .setCondition(Condition.EQUAL) - .setValue("") - .setValues(new StringArray(ImmutableList.of("view"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(subtypeCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Criterion subtypeCriterion = + new Criterion() + .setField("subtypes") + .setCondition(Condition.EQUAL) + .setValue("") + .setValues(new StringArray(ImmutableList.of("view"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))), + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(subtypeCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -237,8 +266,15 @@ public void testAdvancedSearchOr() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 2); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(1).getEntity(), urn2); @@ -247,28 +283,38 @@ public void testAdvancedSearchOr() throws Exception { @Test public void testAdvancedSearchSoftDelete() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Criterion removedCriterion = new Criterion() - .setField("removed") - .setCondition(Condition.EQUAL) - .setValue("") - .setValues(new StringArray(ImmutableList.of("true"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion, removedCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Criterion removedCriterion = + new Criterion() + .setField("removed") + .setCondition(Condition.EQUAL) + .setValue("") + .setValues(new StringArray(ImmutableList.of("true"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(filterCriterion, removedCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -308,8 +354,15 @@ public void testAdvancedSearchSoftDelete() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); clearCache(); @@ -317,23 +370,30 @@ public void testAdvancedSearchSoftDelete() throws Exception { @Test public void testAdvancedSearchNegated() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setNegated(true) - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setNegated(true) + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -373,8 +433,15 @@ public void testAdvancedSearchNegated() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn3); clearCache(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java index a4c359b3595c2..b544faa061f0e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -23,6 +27,8 @@ import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import java.util.List; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; @@ -30,29 +36,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.List; - -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - -abstract public class TestEntityTestBase extends AbstractTestNGSpringContextTests { +public abstract class TestEntityTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -72,7 +71,8 @@ public void setup() { @BeforeClass public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -84,10 +84,24 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, getSearchClient(), _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); @@ -95,12 +109,18 @@ private ElasticSearchService buildService() { @Test public void testElasticSearchServiceStructuredQuery() throws Exception { - SearchResult searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); + SearchResult searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); BrowseResult browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); Urn urn = new TestEntityUrn("test", "urn1", "VALUE_1"); ObjectNode document = JsonNodeFactory.instance.objectNode(); @@ -112,10 +132,20 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "foreignKey:Node", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), + "foreignKey:Node", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); @@ -125,7 +155,9 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 1); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -137,7 +169,9 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); @@ -148,23 +182,33 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 2); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); } @Test public void testElasticSearchServiceFulltext() throws Exception { - SearchResult searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + SearchResult searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); Urn urn = new TestEntityUrn("test", "urn1", "VALUE_1"); @@ -177,13 +221,17 @@ public void testElasticSearchServiceFulltext() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 1); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), - ImmutableMap.of("textFieldOverride", 1L)); + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); ObjectNode document2 = JsonNodeFactory.instance.objectNode(); @@ -194,21 +242,31 @@ public void testElasticSearchServiceFulltext() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 2); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), - ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java index 354b7dc5f609e..175c48e198185 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.cache; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.Streams; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; @@ -18,18 +21,19 @@ import org.springframework.cache.concurrent.ConcurrentMapCacheManager; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class CacheableSearcherTest { private CacheManager cacheManager = new ConcurrentMapCacheManager(); @Test public void testCacheableSearcherWhenEmpty() { CacheableSearcher<Integer> emptySearcher = - new CacheableSearcher<>(cacheManager.getCache("emptySearcher"), 10, this::getEmptySearchResult, - CacheableSearcher.QueryPagination::getFrom, null, true); + new CacheableSearcher<>( + cacheManager.getCache("emptySearcher"), + 10, + this::getEmptySearchResult, + CacheableSearcher.QueryPagination::getFrom, + null, + true); assertTrue(emptySearcher.getSearchResults(0, 0).getEntities().isEmpty()); assertTrue(emptySearcher.getSearchResults(0, 10).getEntities().isEmpty()); assertTrue(emptySearcher.getSearchResults(5, 10).getEntities().isEmpty()); @@ -38,8 +42,13 @@ public void testCacheableSearcherWhenEmpty() { @Test public void testCacheableSearcherWithFixedNumResults() { CacheableSearcher<Integer> fixedBatchSearcher = - new CacheableSearcher<>(cacheManager.getCache("fixedBatchSearcher"), 10, qs -> getSearchResult(qs, 10), - CacheableSearcher.QueryPagination::getFrom, null, true); + new CacheableSearcher<>( + cacheManager.getCache("fixedBatchSearcher"), + 10, + qs -> getSearchResult(qs, 10), + CacheableSearcher.QueryPagination::getFrom, + null, + true); SearchResult result = fixedBatchSearcher.getSearchResults(0, 0); assertTrue(result.getEntities().isEmpty()); @@ -48,21 +57,28 @@ public void testCacheableSearcherWithFixedNumResults() { result = fixedBatchSearcher.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); result = fixedBatchSearcher.getSearchResults(5, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()) + .collect(Collectors.toList())); } @Test public void testCacheableSearcherWithVariableNumResults() { CacheableSearcher<Integer> variableBatchSearcher = - new CacheableSearcher<>(cacheManager.getCache("variableBatchSearcher"), 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, null, + new CacheableSearcher<>( + cacheManager.getCache("variableBatchSearcher"), + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, true); SearchResult result = variableBatchSearcher.getSearchResults(0, 0); @@ -72,21 +88,30 @@ public void testCacheableSearcherWithVariableNumResults() { result = variableBatchSearcher.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); result = variableBatchSearcher.getSearchResults(5, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()) + .collect(Collectors.toList())); result = variableBatchSearcher.getSearchResults(5, 100); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 100); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 20).stream(), getUrns(0, 30).stream(), - getUrns(0, 40).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat( + getUrns(5, 10).stream(), + getUrns(0, 20).stream(), + getUrns(0, 30).stream(), + getUrns(0, 40).stream(), + getUrns(0, 5).stream()) + .collect(Collectors.toList())); } @Test @@ -94,26 +119,36 @@ public void testCacheableSearcherEnabled() { // Verify cache is not interacted with when cache disabled Cache mockCache = Mockito.mock(Cache.class); CacheableSearcher<Integer> cacheDisabled = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, null, + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, false); SearchResult result = cacheDisabled.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verifyNoInteractions(mockCache); Mockito.reset(mockCache); // Verify cache is updated when cache enabled, but skip cache passed through CacheableSearcher<Integer> skipCache = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - new SearchFlags().setSkipCache(true), true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + new SearchFlags().setSkipCache(true), + true); result = skipCache.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(0)).get(Mockito.any(), Mockito.any(Class.class)); @@ -121,13 +156,18 @@ public void testCacheableSearcherEnabled() { // Test cache hit when searchFlags is null CacheableSearcher<Integer> nullFlags = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - null, true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, + true); result = nullFlags.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(1)).get(Mockito.any(), Mockito.any(Class.class)); @@ -135,20 +175,26 @@ public void testCacheableSearcherEnabled() { // Test cache hit when skipCache is false CacheableSearcher<Integer> useCache = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - new SearchFlags().setSkipCache(false), true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + new SearchFlags().setSkipCache(false), + true); result = useCache.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(1)).get(Mockito.any(), Mockito.any(Class.class)); } private SearchResult getEmptySearchResult(CacheableSearcher.QueryPagination queryPagination) { - return new SearchResult().setEntities(new SearchEntityArray()) + return new SearchResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setFrom(queryPagination.getFrom()) .setPageSize(queryPagination.getSize()) @@ -161,11 +207,15 @@ private List<Urn> getUrns(int start, int end) { .collect(Collectors.toList()); } - private SearchResult getSearchResult(CacheableSearcher.QueryPagination queryPagination, int batchSize) { + private SearchResult getSearchResult( + CacheableSearcher.QueryPagination queryPagination, int batchSize) { assert (batchSize <= queryPagination.getSize()); List<SearchEntity> entities = - getUrns(0, batchSize).stream().map(urn -> new SearchEntity().setEntity(urn)).collect(Collectors.toList()); - return new SearchResult().setEntities(new SearchEntityArray(entities)) + getUrns(0, batchSize).stream() + .map(urn -> new SearchEntity().setEntity(urn)) + .collect(Collectors.toList()); + return new SearchResult() + .setEntities(new SearchEntityArray(entities)) .setNumEntities(1000) .setFrom(queryPagination.getFrom()) .setPageSize(queryPagination.getSize()) diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java index 750423a024dcc..0810bbc9d19f8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java @@ -7,26 +7,26 @@ import org.testcontainers.containers.GenericContainer; import org.testng.annotations.AfterSuite; - @TestConfiguration public class ElasticSearchSuite extends AbstractTestNGSpringContextTests { - private static final ElasticsearchTestContainer ELASTICSEARCH_TEST_CONTAINER; - private static GenericContainer<?> container; - static { - ELASTICSEARCH_TEST_CONTAINER = new ElasticsearchTestContainer(); - } + private static final ElasticsearchTestContainer ELASTICSEARCH_TEST_CONTAINER; + private static GenericContainer<?> container; - @AfterSuite - public void after() { - ELASTICSEARCH_TEST_CONTAINER.stopContainer(); - } + static { + ELASTICSEARCH_TEST_CONTAINER = new ElasticsearchTestContainer(); + } + + @AfterSuite + public void after() { + ELASTICSEARCH_TEST_CONTAINER.stopContainer(); + } - @Bean(name = "testSearchContainer") - public GenericContainer<?> testSearchContainer() { - if (container == null) { - container = ELASTICSEARCH_TEST_CONTAINER.startContainer(); - } - return container; + @Bean(name = "testSearchContainer") + public GenericContainer<?> testSearchContainer() { + if (container == null) { + container = ELASTICSEARCH_TEST_CONTAINER.startContainer(); } + return container; + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java index cfacd4c15409a..ea5b9a74b476e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java @@ -1,9 +1,11 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.search.fixtures.GoldenTestBase; -import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.search.fixtures.GoldenTestBase; +import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; @@ -11,34 +13,35 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class GoldenElasticSearchTest extends GoldenTestBase { - @Autowired - @Qualifier("longTailSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - - @NotNull - @Override - protected EntityRegistry getEntityRegistry() { - return entityRegistry; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - assertNotNull(searchService); - } + @Autowired + @Qualifier("longTailSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @NotNull + @Override + protected EntityRegistry getEntityRegistry() { + return entityRegistry; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + assertNotNull(searchService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java index 20f4ee52f0e62..911a21767bdea 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.search.indexbuilder.IndexBuilderTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; @@ -8,23 +10,19 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class IndexBuilderElasticSearchTest extends IndexBuilderTestBase { - @Autowired - private RestHighLevelClient _searchClient; + @Autowired private RestHighLevelClient _searchClient; - @NotNull - @Override - protected RestHighLevelClient getSearchClient() { - return _searchClient; - } + @NotNull + @Override + protected RestHighLevelClient getSearchClient() { + return _searchClient; + } - @Test - public void initTest() { - assertNotNull(_searchClient); - } + @Test + public void initTest() { + assertNotNull(_searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java index 0cb49bc555421..1fed3380a342d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.search.fixtures.LineageDataFixtureTestBase; -import io.datahubproject.test.fixtures.search.SearchLineageFixtureConfiguration; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.search.fixtures.LineageDataFixtureTestBase; +import io.datahubproject.test.fixtures.search.SearchLineageFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; @@ -12,32 +12,35 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({ElasticSearchSuite.class, SearchLineageFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchLineageFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageDataFixtureElasticSearchTest extends LineageDataFixtureTestBase { - @Autowired - @Qualifier("searchLineageSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("searchLineageLineageSearchService") - protected LineageSearchService lineageService; - - @NotNull - @Override - protected LineageSearchService getLineageService() { - return lineageService; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - AssertJUnit.assertNotNull(lineageService); - } + @Autowired + @Qualifier("searchLineageSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("searchLineageLineageSearchService") + protected LineageSearchService lineageService; + + @NotNull + @Override + protected LineageSearchService getLineageService() { + return lineageService; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + AssertJUnit.assertNotNull(lineageService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java index 613ec5a26ff66..8c4195f9ff534 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.search.elasticsearch; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.search.LineageServiceTestBase; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import com.linkedin.metadata.search.LineageServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; @@ -14,20 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageServiceElasticSearchTest extends LineageServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java index 855f46d239118..eea352a866042 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java @@ -1,11 +1,12 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.fixtures.SampleDataFixtureTestBase; import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; - import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import lombok.Getter; import org.opensearch.client.RestHighLevelClient; @@ -14,32 +15,30 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - -/** - * Runs sample data fixture tests for Elasticsearch test container - */ +/** Runs sample data fixture tests for Elasticsearch test container */ @Getter -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SampleDataFixtureElasticSearchTest extends SampleDataFixtureTestBase { - @Autowired - private RestHighLevelClient searchClient; + @Autowired private RestHighLevelClient searchClient; - @Autowired - @Qualifier("sampleDataSearchService") - protected SearchService searchService; + @Autowired + @Qualifier("sampleDataSearchService") + protected SearchService searchService; - @Autowired - @Qualifier("sampleDataEntityClient") - protected EntityClient entityClient; + @Autowired + @Qualifier("sampleDataEntityClient") + protected EntityClient entityClient; - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; - @Test - public void initTest() { - assertNotNull(searchClient); - } + @Test + public void initTest() { + assertNotNull(searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java index 1a6a20cd9df9d..e5af1978be5d2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java @@ -1,29 +1,29 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.search.query.SearchDAOTestBase; -import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; - +import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import lombok.Getter; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Import; - import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Getter -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchDAOElasticSearchTest extends SearchDAOTestBase { - @Autowired - private RestHighLevelClient searchClient; - @Autowired - private SearchConfiguration searchConfiguration; + @Autowired private RestHighLevelClient searchClient; + @Autowired private SearchConfiguration searchConfiguration; + @Autowired @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java index a9e9feac28007..7133971847f98 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.search.elasticsearch; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.search.SearchServiceTestBase; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import com.linkedin.metadata.search.SearchServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchServiceElasticSearchTest extends SearchServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java index 7365887fb9b2e..a23cd5b051ecb 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.systemmetadata.SystemMetadataServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.systemmetadata.SystemMetadataServiceTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -11,16 +11,12 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class SystemMetadataServiceElasticSearchTest extends SystemMetadataServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java index bec610b20dca1..843da17fbd132 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class TestEntityElasticSearchTest extends TestEntityTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java index 5b85904edc923..6ebe42d0181e4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.timeseries.search.TimeseriesAspectServiceTestBase; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.timeseries.search.TimeseriesAspectServiceTestBase; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; @@ -14,12 +14,9 @@ @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class TimeseriesAspectServiceElasticSearchTest extends TimeseriesAspectServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java index ed81f3cebd027..fba9d5359d29f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.fixtures; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossCustomEntities; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; +import static org.testng.Assert.assertTrue; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; @@ -8,151 +13,165 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossCustomEntities; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; -import static org.testng.Assert.assertTrue; -import static org.testng.AssertJUnit.assertNotNull; +public abstract class GoldenTestBase extends AbstractTestNGSpringContextTests { + + private static final List<String> SEARCHABLE_LONGTAIL_ENTITIES = + Stream.of( + EntityType.CHART, + EntityType.CONTAINER, + EntityType.DASHBOARD, + EntityType.DATASET, + EntityType.DOMAIN, + EntityType.TAG) + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + @Nonnull + protected abstract EntityRegistry getEntityRegistry(); -abstract public class GoldenTestBase extends AbstractTestNGSpringContextTests { - - private static final List<String> SEARCHABLE_LONGTAIL_ENTITIES = Stream.of(EntityType.CHART, EntityType.CONTAINER, - EntityType.DASHBOARD, EntityType.DATASET, EntityType.DOMAIN, EntityType.TAG - ).map(EntityTypeMapper::getName) - .collect(Collectors.toList()); - - @Nonnull - abstract protected EntityRegistry getEntityRegistry(); - - @Nonnull - abstract protected SearchService getSearchService(); - - @Test - public void testNameMatchPetProfiles() { - /* - Searching for "pet profiles" should return "pet_profiles" as the first 2 search results - */ - assertNotNull(getSearchService()); - assertNotNull(getEntityRegistry()); - SearchResult searchResult = searchAcrossCustomEntities(getSearchService(), "pet profiles", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("pet_profiles")); - assertTrue(secondResultUrn.toString().contains("pet_profiles")); - } - - @Test - public void testNameMatchPetProfile() { - /* - Searching for "pet profile" should return "pet_profiles" as the first 2 search results - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "pet profile", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("pet_profiles")); - assertTrue(secondResultUrn.toString().contains("pet_profiles")); - } - - @Test - public void testGlossaryTerms() { - /* - Searching for "ReturnRate" should return all tables that have the glossary term applied before - anything else - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "ReturnRate", SEARCHABLE_LONGTAIL_ENTITIES); - SearchEntityArray entities = searchResult.getEntities(); - assertTrue(searchResult.getEntities().size() >= 4); - MatchedFieldArray firstResultMatchedFields = entities.get(0).getMatchedFields(); - MatchedFieldArray secondResultMatchedFields = entities.get(1).getMatchedFields(); - MatchedFieldArray thirdResultMatchedFields = entities.get(2).getMatchedFields(); - MatchedFieldArray fourthResultMatchedFields = entities.get(3).getMatchedFields(); - - assertTrue(firstResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(secondResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(thirdResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(fourthResultMatchedFields.toString().contains("ReturnRate")); - } - - @Test - public void testNameMatchPartiallyQualified() { - /* - Searching for "analytics.pet_details" (partially qualified) should return the fully qualified table - name as the first search results before any others - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "analytics.pet_details", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("snowflake,long_tail_companions.analytics.pet_details")); - assertTrue(secondResultUrn.toString().contains("dbt,long_tail_companions.analytics.pet_details")); - } - - @Test - public void testNameMatchCollaborativeActionitems() { - /* - Searching for "collaborative actionitems" should return "collaborative_actionitems" as the first search - result, followed by "collaborative_actionitems_old" - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "collaborative actionitems", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - // Checks that the table name is not suffixed with anything - assertTrue(firstResultUrn.toString().contains("collaborative_actionitems,")); - assertTrue(secondResultUrn.toString().contains("collaborative_actionitems_old")); - - Double firstResultScore = searchResult.getEntities().get(0).getScore(); - Double secondResultScore = searchResult.getEntities().get(1).getScore(); - - // Checks that the scores aren't tied so that we are matching on table name more than column name - assertTrue(firstResultScore > secondResultScore); - } - - @Test - public void testNameMatchCustomerOrders() { - /* - Searching for "customer orders" should return "customer_orders" as the first search - result, not suffixed by anything - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "customer orders", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - - // Checks that the table name is not suffixed with anything - assertTrue(firstResultUrn.toString().contains("customer_orders,"), - "Expected firstResultUrn to contain `customer_orders,` but results are " - + searchResult.getEntities().stream() - .map(e -> String.format("(Score: %s Urn: %s)", e.getScore(), e.getEntity().getId())) - .collect(Collectors.joining(", "))); - - Double firstResultScore = searchResult.getEntities().get(0).getScore(); - Double secondResultScore = searchResult.getEntities().get(1).getScore(); - - // Checks that the scores aren't tied so that we are matching on table name more than column name - assertTrue(firstResultScore > secondResultScore); - } + @Nonnull + protected abstract SearchService getSearchService(); + @Test + public void testNameMatchPetProfiles() { + /* + Searching for "pet profiles" should return "pet_profiles" as the first 2 search results + */ + assertNotNull(getSearchService()); + assertNotNull(getEntityRegistry()); + SearchResult searchResult = + searchAcrossCustomEntities( + getSearchService(), "pet profiles", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue(firstResultUrn.toString().contains("pet_profiles")); + assertTrue(secondResultUrn.toString().contains("pet_profiles")); + } + + @Test + public void testNameMatchPetProfile() { + /* + Searching for "pet profile" should return "pet_profiles" as the first 2 search results + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "pet profile", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue(firstResultUrn.toString().contains("pet_profiles")); + assertTrue(secondResultUrn.toString().contains("pet_profiles")); + } + + @Test + public void testGlossaryTerms() { + /* + Searching for "ReturnRate" should return all tables that have the glossary term applied before + anything else + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "ReturnRate", SEARCHABLE_LONGTAIL_ENTITIES); + SearchEntityArray entities = searchResult.getEntities(); + assertTrue(searchResult.getEntities().size() >= 4); + MatchedFieldArray firstResultMatchedFields = entities.get(0).getMatchedFields(); + MatchedFieldArray secondResultMatchedFields = entities.get(1).getMatchedFields(); + MatchedFieldArray thirdResultMatchedFields = entities.get(2).getMatchedFields(); + MatchedFieldArray fourthResultMatchedFields = entities.get(3).getMatchedFields(); + + assertTrue(firstResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(secondResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(thirdResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(fourthResultMatchedFields.toString().contains("ReturnRate")); + } + + @Test + public void testNameMatchPartiallyQualified() { + /* + Searching for "analytics.pet_details" (partially qualified) should return the fully qualified table + name as the first search results before any others + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), "analytics.pet_details", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue( + firstResultUrn.toString().contains("snowflake,long_tail_companions.analytics.pet_details")); + assertTrue( + secondResultUrn.toString().contains("dbt,long_tail_companions.analytics.pet_details")); + } + + @Test + public void testNameMatchCollaborativeActionitems() { + /* + Searching for "collaborative actionitems" should return "collaborative_actionitems" as the first search + result, followed by "collaborative_actionitems_old" + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), "collaborative actionitems", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + // Checks that the table name is not suffixed with anything + assertTrue(firstResultUrn.toString().contains("collaborative_actionitems,")); + assertTrue(secondResultUrn.toString().contains("collaborative_actionitems_old")); + + Double firstResultScore = searchResult.getEntities().get(0).getScore(); + Double secondResultScore = searchResult.getEntities().get(1).getScore(); + + // Checks that the scores aren't tied so that we are matching on table name more than column + // name + assertTrue(firstResultScore > secondResultScore); + } + + @Test + public void testNameMatchCustomerOrders() { /* - Tests that should pass but do not yet can be added below here, with the following annotation: - @Test(enabled = false) - */ + Searching for "customer orders" should return "customer_orders" as the first search + result, not suffixed by anything + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "customer orders", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + + // Checks that the table name is not suffixed with anything + assertTrue( + firstResultUrn.toString().contains("customer_orders,"), + "Expected firstResultUrn to contain `customer_orders,` but results are " + + searchResult.getEntities().stream() + .map(e -> String.format("(Score: %s Urn: %s)", e.getScore(), e.getEntity().getId())) + .collect(Collectors.joining(", "))); + + Double firstResultScore = searchResult.getEntities().get(0).getScore(); + Double secondResultScore = searchResult.getEntities().get(1).getScore(); + + // Checks that the scores aren't tied so that we are matching on table name more than column + // name + assertTrue(firstResultScore > secondResultScore); + } + + /* + Tests that should pass but do not yet can be added below here, with the following annotation: + @Test(enabled = false) + */ } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java index eaf8feedeb6ed..44fe5ea8ac9ae 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java @@ -1,61 +1,64 @@ package com.linkedin.metadata.search.fixtures; +import static io.datahubproject.test.search.SearchTestUtils.lineage; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; +import java.net.URISyntaxException; +import javax.annotation.Nonnull; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.net.URISyntaxException; - -import static io.datahubproject.test.search.SearchTestUtils.lineage; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - -abstract public class LineageDataFixtureTestBase extends AbstractTestNGSpringContextTests { - - @Nonnull - abstract protected LineageSearchService getLineageService(); +public abstract class LineageDataFixtureTestBase extends AbstractTestNGSpringContextTests { - @Nonnull - abstract protected SearchService getSearchService(); + @Nonnull + protected abstract LineageSearchService getLineageService(); + @Nonnull + protected abstract SearchService getSearchService(); - @Test - public void testFixtureInitialization() { - assertNotNull(getSearchService()); - SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); - assertEquals(noResult.getEntities().size(), 0); + @Test + public void testFixtureInitialization() { + assertNotNull(getSearchService()); + SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); + assertEquals(noResult.getEntities().size(), 0); - SearchResult result = searchAcrossEntities(getSearchService(), "e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8"); - assertEquals(result.getEntities().size(), 1); + SearchResult result = + searchAcrossEntities( + getSearchService(), "e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8"); + assertEquals(result.getEntities().size(), 1); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); - LineageSearchResult lineageResult = lineage(getLineageService(), result.getEntities().get(0).getEntity(), 1); - assertEquals(lineageResult.getEntities().size(), 10); - } + LineageSearchResult lineageResult = + lineage(getLineageService(), result.getEntities().get(0).getEntity(), 1); + assertEquals(lineageResult.getEntities().size(), 10); + } - @Test - public void testDatasetLineage() throws URISyntaxException { - Urn testUrn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); + @Test + public void testDatasetLineage() throws URISyntaxException { + Urn testUrn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); - // 1 hops - LineageSearchResult lineageResult = lineage(getLineageService(), testUrn, 1); - assertEquals(lineageResult.getEntities().size(), 10); + // 1 hops + LineageSearchResult lineageResult = lineage(getLineageService(), testUrn, 1); + assertEquals(lineageResult.getEntities().size(), 10); - // 2 hops - lineageResult = lineage(getLineageService(), testUrn, 2); - assertEquals(lineageResult.getEntities().size(), 5); + // 2 hops + lineageResult = lineage(getLineageService(), testUrn, 2); + assertEquals(lineageResult.getEntities().size(), 5); - // 3 hops - lineageResult = lineage(getLineageService(), testUrn, 3); - assertEquals(lineageResult.getEntities().size(), 12); - } + // 3 hops + lineageResult = lineage(getLineageService(), testUrn, 3); + assertEquals(lineageResult.getEntities().size(), 12); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java index 69dd5c80bef1d..a1af2325ee0ed 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java @@ -1,5 +1,16 @@ package com.linkedin.metadata.search.fixtures; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder.STRUCTURED_QUERY_PREFIX; +import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; +import static io.datahubproject.test.search.SearchTestUtils.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertSame; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -32,6 +43,16 @@ import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.r2.RemoteInvocationException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import javax.annotation.Nonnull; import org.junit.Assert; import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; @@ -45,1470 +66,1882 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; - -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder.STRUCTURED_QUERY_PREFIX; -import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; -import static io.datahubproject.test.search.SearchTestUtils.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertSame; -import static org.testng.Assert.assertTrue; - -abstract public class SampleDataFixtureTestBase extends AbstractTestNGSpringContextTests { - protected static final Authentication AUTHENTICATION = - new Authentication(new Actor(ActorType.USER, "test"), ""); - - @Nonnull - abstract protected EntityRegistry getEntityRegistry(); - - @Nonnull - abstract protected SearchService getSearchService(); - - @Nonnull - abstract protected EntityClient getEntityClient(); - - @Nonnull - abstract protected RestHighLevelClient getSearchClient(); - - @Test - public void testSearchFieldConfig() throws IOException { - /* - For every field in every entity fixture, ensure proper detection of field types and analyzers - */ - Map<EntitySpec, String> fixtureEntities = new HashMap<>(); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dataset"), "smpldat_datasetindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("chart"), "smpldat_chartindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("container"), "smpldat_containerindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("corpgroup"), "smpldat_corpgroupindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("corpuser"), "smpldat_corpuserindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dashboard"), "smpldat_dashboardindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dataflow"), "smpldat_dataflowindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("datajob"), "smpldat_datajobindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("domain"), "smpldat_domainindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("glossarynode"), "smpldat_glossarynodeindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("glossaryterm"), "smpldat_glossarytermindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlfeature"), "smpldat_mlfeatureindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlfeaturetable"), "smpldat_mlfeaturetableindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodelgroup"), "smpldat_mlmodelgroupindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodel"), "smpldat_mlmodelindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlprimarykey"), "smpldat_mlprimarykeyindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("tag"), "smpldat_tagindex_v2"); - - for (Map.Entry<EntitySpec, String> entry : fixtureEntities.entrySet()) { - EntitySpec entitySpec = entry.getKey(); - GetMappingsRequest req = new GetMappingsRequest().indices(entry.getValue()); - - GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); - Map<String, Map<String, Object>> mappings = (Map<String, Map<String, Object>>) resp.mappings() - .get(entry.getValue()).sourceAsMap().get("properties"); - - // For every fieldSpec determine whether the SearchFieldConfig is accurate - for (SearchableFieldSpec fieldSpec : entitySpec.getSearchableFieldSpecs()) { - SearchFieldConfig test = SearchFieldConfig.detectSubFieldType(fieldSpec); - - if (!test.fieldName().contains(".")) { - Map<String, Object> actual = mappings.get(test.fieldName()); - - final String expectedAnalyzer; - if (actual.get("search_analyzer") != null) { - expectedAnalyzer = (String) actual.get("search_analyzer"); - } else if (actual.get("analyzer") != null) { - expectedAnalyzer = (String) actual.get("analyzer"); - } else { - expectedAnalyzer = "keyword"; - } - - assertEquals(test.analyzer(), expectedAnalyzer, - String.format("Expected search analyzer to match for entity: `%s`field: `%s`", - entitySpec.getName(), test.fieldName())); - - if (test.hasDelimitedSubfield()) { - assertTrue(((Map<String, Map<String, String>>) actual.get("fields")).containsKey("delimited"), - String.format("Expected entity: `%s` field to have .delimited subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } else { - boolean nosubfield = !actual.containsKey("fields") - || !((Map<String, Map<String, String>>) actual.get("fields")).containsKey("delimited"); - assertTrue(nosubfield, String.format("Expected entity: `%s` field to NOT have .delimited subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } - if (test.hasKeywordSubfield()) { - assertTrue(((Map<String, Map<String, String>>) actual.get("fields")).containsKey("keyword"), - String.format("Expected entity: `%s` field to have .keyword subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } else { - boolean nosubfield = !actual.containsKey("fields") - || !((Map<String, Map<String, String>>) actual.get("fields")).containsKey("keyword"); - assertTrue(nosubfield, String.format("Expected entity: `%s` field to NOT have .keyword subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } - } else { - // this is a subfield therefore cannot have a subfield - assertFalse(test.hasKeywordSubfield()); - assertFalse(test.hasDelimitedSubfield()); - assertFalse(test.hasWordGramSubfields()); - - String[] fieldAndSubfield = test.fieldName().split("[.]", 2); - - Map<String, Object> actualParent = mappings.get(fieldAndSubfield[0]); - Map<String, Object> actualSubfield = ((Map<String, Map<String, Object>>) actualParent.get("fields")).get(fieldAndSubfield[0]); - - String expectedAnalyzer = actualSubfield.get("search_analyzer") != null ? (String) actualSubfield.get("search_analyzer") - : "keyword"; - - assertEquals(test.analyzer(), expectedAnalyzer, - String.format("Expected search analyzer to match for field `%s`", test.fieldName())); - } - } +public abstract class SampleDataFixtureTestBase extends AbstractTestNGSpringContextTests { + protected static final Authentication AUTHENTICATION = + new Authentication(new Actor(ActorType.USER, "test"), ""); + + @Nonnull + protected abstract EntityRegistry getEntityRegistry(); + + @Nonnull + protected abstract SearchService getSearchService(); + + @Nonnull + protected abstract EntityClient getEntityClient(); + + @Nonnull + protected abstract RestHighLevelClient getSearchClient(); + + @Test + public void testSearchFieldConfig() throws IOException { + /* + For every field in every entity fixture, ensure proper detection of field types and analyzers + */ + Map<EntitySpec, String> fixtureEntities = new HashMap<>(); + fixtureEntities.put(getEntityRegistry().getEntitySpec("dataset"), "smpldat_datasetindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("chart"), "smpldat_chartindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("container"), "smpldat_containerindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("corpgroup"), "smpldat_corpgroupindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("corpuser"), "smpldat_corpuserindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("dashboard"), "smpldat_dashboardindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("dataflow"), "smpldat_dataflowindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("datajob"), "smpldat_datajobindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("domain"), "smpldat_domainindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("glossarynode"), "smpldat_glossarynodeindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("glossaryterm"), "smpldat_glossarytermindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlfeature"), "smpldat_mlfeatureindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlfeaturetable"), "smpldat_mlfeaturetableindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlmodelgroup"), "smpldat_mlmodelgroupindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodel"), "smpldat_mlmodelindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlprimarykey"), "smpldat_mlprimarykeyindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("tag"), "smpldat_tagindex_v2"); + + for (Map.Entry<EntitySpec, String> entry : fixtureEntities.entrySet()) { + EntitySpec entitySpec = entry.getKey(); + GetMappingsRequest req = new GetMappingsRequest().indices(entry.getValue()); + + GetMappingsResponse resp = + getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); + Map<String, Map<String, Object>> mappings = + (Map<String, Map<String, Object>>) + resp.mappings().get(entry.getValue()).sourceAsMap().get("properties"); + + // For every fieldSpec determine whether the SearchFieldConfig is accurate + for (SearchableFieldSpec fieldSpec : entitySpec.getSearchableFieldSpecs()) { + SearchFieldConfig test = SearchFieldConfig.detectSubFieldType(fieldSpec); + + if (!test.fieldName().contains(".")) { + Map<String, Object> actual = mappings.get(test.fieldName()); + + final String expectedAnalyzer; + if (actual.get("search_analyzer") != null) { + expectedAnalyzer = (String) actual.get("search_analyzer"); + } else if (actual.get("analyzer") != null) { + expectedAnalyzer = (String) actual.get("analyzer"); + } else { + expectedAnalyzer = "keyword"; + } + + assertEquals( + test.analyzer(), + expectedAnalyzer, + String.format( + "Expected search analyzer to match for entity: `%s`field: `%s`", + entitySpec.getName(), test.fieldName())); + + if (test.hasDelimitedSubfield()) { + assertTrue( + ((Map<String, Map<String, String>>) actual.get("fields")).containsKey("delimited"), + String.format( + "Expected entity: `%s` field to have .delimited subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } else { + boolean nosubfield = + !actual.containsKey("fields") + || !((Map<String, Map<String, String>>) actual.get("fields")) + .containsKey("delimited"); + assertTrue( + nosubfield, + String.format( + "Expected entity: `%s` field to NOT have .delimited subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } + if (test.hasKeywordSubfield()) { + assertTrue( + ((Map<String, Map<String, String>>) actual.get("fields")).containsKey("keyword"), + String.format( + "Expected entity: `%s` field to have .keyword subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } else { + boolean nosubfield = + !actual.containsKey("fields") + || !((Map<String, Map<String, String>>) actual.get("fields")) + .containsKey("keyword"); + assertTrue( + nosubfield, + String.format( + "Expected entity: `%s` field to NOT have .keyword subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } + } else { + // this is a subfield therefore cannot have a subfield + assertFalse(test.hasKeywordSubfield()); + assertFalse(test.hasDelimitedSubfield()); + assertFalse(test.hasWordGramSubfields()); + + String[] fieldAndSubfield = test.fieldName().split("[.]", 2); + + Map<String, Object> actualParent = mappings.get(fieldAndSubfield[0]); + Map<String, Object> actualSubfield = + ((Map<String, Map<String, Object>>) actualParent.get("fields")) + .get(fieldAndSubfield[0]); + + String expectedAnalyzer = + actualSubfield.get("search_analyzer") != null + ? (String) actualSubfield.get("search_analyzer") + : "keyword"; + + assertEquals( + test.analyzer(), + expectedAnalyzer, + String.format("Expected search analyzer to match for field `%s`", test.fieldName())); } + } } - - @Test - public void testGetSortOrder() { - String dateFieldName = "lastOperationTime"; - List<String> entityNamesToTestSearch = List.of("dataset", "chart", "corpgroup"); - List<EntitySpec> entitySpecs = entityNamesToTestSearch.stream().map( - name -> getEntityRegistry().getEntitySpec(name)) + } + + @Test + public void testGetSortOrder() { + String dateFieldName = "lastOperationTime"; + List<String> entityNamesToTestSearch = List.of("dataset", "chart", "corpgroup"); + List<EntitySpec> entitySpecs = + entityNamesToTestSearch.stream() + .map(name -> getEntityRegistry().getEntitySpec(name)) .collect(Collectors.toList()); - SearchSourceBuilder builder = new SearchSourceBuilder(); - SortCriterion sortCriterion = new SortCriterion().setOrder(SortOrder.DESCENDING).setField(dateFieldName); - ESUtils.buildSortOrder(builder, sortCriterion, entitySpecs); - List<SortBuilder<?>> sorts = builder.sorts(); - assertEquals(sorts.size(), 2); // sort by last modified and then by urn - for (SortBuilder sort : sorts) { - assertTrue(sort instanceof FieldSortBuilder); - FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; - if (fieldSortBuilder.getFieldName().equals(dateFieldName)) { - assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.DESC); - assertEquals(fieldSortBuilder.unmappedType(), "date"); - } else { - assertEquals(fieldSortBuilder.getFieldName(), "urn"); - } - } - - // Test alias field - String entityNameField = "_entityName"; - SearchSourceBuilder nameBuilder = new SearchSourceBuilder(); - SortCriterion nameCriterion = new SortCriterion().setOrder(SortOrder.ASCENDING).setField(entityNameField); - ESUtils.buildSortOrder(nameBuilder, nameCriterion, entitySpecs); - sorts = nameBuilder.sorts(); - assertEquals(sorts.size(), 2); - for (SortBuilder sort : sorts) { - assertTrue(sort instanceof FieldSortBuilder); - FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; - if (fieldSortBuilder.getFieldName().equals(entityNameField)) { - assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.ASC); - assertEquals(fieldSortBuilder.unmappedType(), "keyword"); - } else { - assertEquals(fieldSortBuilder.getFieldName(), "urn"); - } - } + SearchSourceBuilder builder = new SearchSourceBuilder(); + SortCriterion sortCriterion = + new SortCriterion().setOrder(SortOrder.DESCENDING).setField(dateFieldName); + ESUtils.buildSortOrder(builder, sortCriterion, entitySpecs); + List<SortBuilder<?>> sorts = builder.sorts(); + assertEquals(sorts.size(), 2); // sort by last modified and then by urn + for (SortBuilder sort : sorts) { + assertTrue(sort instanceof FieldSortBuilder); + FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; + if (fieldSortBuilder.getFieldName().equals(dateFieldName)) { + assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.DESC); + assertEquals(fieldSortBuilder.unmappedType(), "date"); + } else { + assertEquals(fieldSortBuilder.getFieldName(), "urn"); + } } - @Test - public void testDatasetHasTags() throws IOException { - GetMappingsRequest req = new GetMappingsRequest() - .indices("smpldat_datasetindex_v2"); - GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); - Map<String, Map<String, String>> mappings = (Map<String, Map<String, String>>) resp.mappings() - .get("smpldat_datasetindex_v2").sourceAsMap().get("properties"); - assertTrue(mappings.containsKey("hasTags")); - assertEquals(mappings.get("hasTags"), Map.of("type", "boolean")); + // Test alias field + String entityNameField = "_entityName"; + SearchSourceBuilder nameBuilder = new SearchSourceBuilder(); + SortCriterion nameCriterion = + new SortCriterion().setOrder(SortOrder.ASCENDING).setField(entityNameField); + ESUtils.buildSortOrder(nameBuilder, nameCriterion, entitySpecs); + sorts = nameBuilder.sorts(); + assertEquals(sorts.size(), 2); + for (SortBuilder sort : sorts) { + assertTrue(sort instanceof FieldSortBuilder); + FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; + if (fieldSortBuilder.getFieldName().equals(entityNameField)) { + assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.ASC); + assertEquals(fieldSortBuilder.unmappedType(), "keyword"); + } else { + assertEquals(fieldSortBuilder.getFieldName(), "urn"); + } } - - @Test - public void testFixtureInitialization() { - assertNotNull(getSearchService()); - SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); - assertEquals(0, noResult.getEntities().size()); - - final SearchResult result = searchAcrossEntities(getSearchService(), "test"); - - Map<String, Integer> expectedTypes = Map.of( - "dataset", 13, - "chart", 0, - "container", 1, - "dashboard", 0, - "tag", 0, - "mlmodel", 0 - ); - - Map<String, List<Urn>> actualTypes = new HashMap<>(); - for (String key : expectedTypes.keySet()) { - actualTypes.put(key, result.getEntities().stream() - .map(SearchEntity::getEntity).filter(entity -> key.equals(entity.getEntityType())).collect(Collectors.toList())); - } - - expectedTypes.forEach((key, value) -> - assertEquals(actualTypes.get(key).size(), value.intValue(), - String.format("Expected entity `%s` matches for %s. Found %s", value, key, - result.getEntities().stream() - .filter(e -> e.getEntity().getEntityType().equals(key)) - .map(e -> e.getEntity().getEntityKey()) - .collect(Collectors.toList())))); + } + + @Test + public void testDatasetHasTags() throws IOException { + GetMappingsRequest req = new GetMappingsRequest().indices("smpldat_datasetindex_v2"); + GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); + Map<String, Map<String, String>> mappings = + (Map<String, Map<String, String>>) + resp.mappings().get("smpldat_datasetindex_v2").sourceAsMap().get("properties"); + assertTrue(mappings.containsKey("hasTags")); + assertEquals(mappings.get("hasTags"), Map.of("type", "boolean")); + } + + @Test + public void testFixtureInitialization() { + assertNotNull(getSearchService()); + SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); + assertEquals(0, noResult.getEntities().size()); + + final SearchResult result = searchAcrossEntities(getSearchService(), "test"); + + Map<String, Integer> expectedTypes = + Map.of( + "dataset", 13, + "chart", 0, + "container", 1, + "dashboard", 0, + "tag", 0, + "mlmodel", 0); + + Map<String, List<Urn>> actualTypes = new HashMap<>(); + for (String key : expectedTypes.keySet()) { + actualTypes.put( + key, + result.getEntities().stream() + .map(SearchEntity::getEntity) + .filter(entity -> key.equals(entity.getEntityType())) + .collect(Collectors.toList())); } - @Test - public void testDataPlatform() { - Map<String, Integer> expected = ImmutableMap.<String, Integer>builder() - .put("urn:li:dataPlatform:BigQuery", 8) - .put("urn:li:dataPlatform:hive", 3) - .put("urn:li:dataPlatform:mysql", 5) - .put("urn:li:dataPlatform:s3", 1) - .put("urn:li:dataPlatform:hdfs", 1) - .put("urn:li:dataPlatform:graph", 1) - .put("urn:li:dataPlatform:dbt", 9) - .put("urn:li:dataplatform:BigQuery", 8) - .put("urn:li:dataplatform:hive", 3) - .put("urn:li:dataplatform:mysql", 5) - .put("urn:li:dataplatform:s3", 1) - .put("urn:li:dataplatform:hdfs", 1) - .put("urn:li:dataplatform:graph", 1) - .put("urn:li:dataplatform:dbt", 9) - .build(); - - expected.forEach((key, value) -> { - SearchResult result = searchAcrossEntities(getSearchService(), key); - assertEquals(result.getEntities().size(), value.intValue(), - String.format("Unexpected data platform `%s` hits.", key)); // max is 100 without pagination + expectedTypes.forEach( + (key, value) -> + assertEquals( + actualTypes.get(key).size(), + value.intValue(), + String.format( + "Expected entity `%s` matches for %s. Found %s", + value, + key, + result.getEntities().stream() + .filter(e -> e.getEntity().getEntityType().equals(key)) + .map(e -> e.getEntity().getEntityKey()) + .collect(Collectors.toList())))); + } + + @Test + public void testDataPlatform() { + Map<String, Integer> expected = + ImmutableMap.<String, Integer>builder() + .put("urn:li:dataPlatform:BigQuery", 8) + .put("urn:li:dataPlatform:hive", 3) + .put("urn:li:dataPlatform:mysql", 5) + .put("urn:li:dataPlatform:s3", 1) + .put("urn:li:dataPlatform:hdfs", 1) + .put("urn:li:dataPlatform:graph", 1) + .put("urn:li:dataPlatform:dbt", 9) + .put("urn:li:dataplatform:BigQuery", 8) + .put("urn:li:dataplatform:hive", 3) + .put("urn:li:dataplatform:mysql", 5) + .put("urn:li:dataplatform:s3", 1) + .put("urn:li:dataplatform:hdfs", 1) + .put("urn:li:dataplatform:graph", 1) + .put("urn:li:dataplatform:dbt", 9) + .build(); + + expected.forEach( + (key, value) -> { + SearchResult result = searchAcrossEntities(getSearchService(), key); + assertEquals( + result.getEntities().size(), + value.intValue(), + String.format( + "Unexpected data platform `%s` hits.", key)); // max is 100 without pagination }); - } - - @Test - public void testUrn() { + } + + @Test + public void testUrn() { + List.of( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.austin311_derived,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:graph,graph-test,PROD)", + "urn:li:chart:(looker,baz1)", + "urn:li:dashboard:(looker,baz)", + "urn:li:mlFeature:(test_feature_table_all_feature_dtypes,test_BOOL_LIST_feature)", + "urn:li:mlModel:(urn:li:dataPlatform:science,scienceModel,PROD)") + .forEach( + query -> + assertTrue( + searchAcrossEntities(getSearchService(), query).getEntities().size() >= 1, + String.format("Unexpected >1 urn result for `%s`", query))); + } + + @Test + public void testExactTable() { + SearchResult results = searchAcrossEntities(getSearchService(), "stg_customers"); + assertEquals( + results.getEntities().size(), 1, "Unexpected single urn result for `stg_customers`"); + assertEquals( + results.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)"); + } + + @Test + public void testStemming() { + List<Set<String>> testSets = List.of( - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.austin311_derived,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:graph,graph-test,PROD)", - "urn:li:chart:(looker,baz1)", - "urn:li:dashboard:(looker,baz)", - "urn:li:mlFeature:(test_feature_table_all_feature_dtypes,test_BOOL_LIST_feature)", - "urn:li:mlModel:(urn:li:dataPlatform:science,scienceModel,PROD)" - ).forEach(query -> - assertTrue(searchAcrossEntities(getSearchService(), query).getEntities().size() >= 1, - String.format("Unexpected >1 urn result for `%s`", query)) - ); - } - - @Test - public void testExactTable() { - SearchResult results = searchAcrossEntities(getSearchService(), "stg_customers"); - assertEquals(results.getEntities().size(), 1, "Unexpected single urn result for `stg_customers`"); - assertEquals(results.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)"); - } - - @Test - public void testStemming() { - List<Set<String>> testSets = List.of( - Set.of("log", "logs", "logging"), - Set.of("border", "borders", "bordered", "bordering"), - Set.of("indicates", "indicate", "indicated") - ); - - testSets.forEach(testSet -> { - Integer expectedResults = null; - for (String testQuery : testSet) { - SearchResult results = searchAcrossEntities(getSearchService(), testQuery); - - assertTrue(results.hasEntities() && !results.getEntities().isEmpty(), - String.format("Expected search results for `%s`", testQuery)); - if (expectedResults == null) { - expectedResults = results.getNumEntities(); - } - assertEquals(expectedResults, results.getNumEntities(), - String.format("Expected all result counts to match after stemming. %s", testSet)); + Set.of("log", "logs", "logging"), + Set.of("border", "borders", "bordered", "bordering"), + Set.of("indicates", "indicate", "indicated")); + + testSets.forEach( + testSet -> { + Integer expectedResults = null; + for (String testQuery : testSet) { + SearchResult results = searchAcrossEntities(getSearchService(), testQuery); + + assertTrue( + results.hasEntities() && !results.getEntities().isEmpty(), + String.format("Expected search results for `%s`", testQuery)); + if (expectedResults == null) { + expectedResults = results.getNumEntities(); } - }); - } - - @Test - public void testStemmingOverride() throws IOException { - Set<String> testSet = Set.of("customer", "customers"); - - Set<SearchResult> results = testSet.stream() - .map(test -> searchAcrossEntities(getSearchService(), test)) - .collect(Collectors.toSet()); - - results.forEach(r -> assertTrue(r.hasEntities() && !r.getEntities().isEmpty(), "Expected search results")); - assertEquals(results.stream().map(r -> r.getEntities().size()).distinct().count(), 1, + assertEquals( + expectedResults, + results.getNumEntities(), String.format("Expected all result counts to match after stemming. %s", testSet)); - - // Additional inspect token - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "customers" - ); - - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("customer"), "Expected `customer` and not `custom`"); - } - - @Test - public void testDelimitedSynonym() throws IOException { - List<String> expectedTokens = List.of("cac"); - List<String> analyzers = List.of( - "urn_component", - "word_delimited", - "query_urn_component", - "query_word_delimited" - ); - List<String> testTexts = List.of( - "customer acquisition cost", - "cac", - "urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)" - ); - - for (String analyzer : analyzers) { - for (String text : testTexts) { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - analyzer, text - ); - List<String> tokens = getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedTokens.forEach(expected -> assertTrue(tokens.contains(expected), - String.format("Analyzer: `%s` Text: `%s` - Expected token `%s` in tokens: %s", - analyzer, text, expected, tokens))); - } - } - - // {"urn":"urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)","id":"cac_table",... - List<String> testSet = List.of( - "cac", - "customer acquisition cost" - ); - List<Integer> resultCounts = testSet.stream().map(q -> { - SearchResult result = searchAcrossEntities(getSearchService(), q); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - "Expected search results for: " + q); - return result.getEntities().size(); - }).collect(Collectors.toList()); - } - - @Test - public void testNegateAnalysis() throws IOException { - String queryWithMinus = "logging_events -bckp"; - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("logging_events -bckp", "logging_ev", "-bckp", "log", "event", "bckp")); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_gram_3", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("logging events -bckp")); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_gram_4", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of()); - + } + }); + } + + @Test + public void testStemmingOverride() throws IOException { + Set<String> testSet = Set.of("customer", "customers"); + + Set<SearchResult> results = + testSet.stream() + .map(test -> searchAcrossEntities(getSearchService(), test)) + .collect(Collectors.toSet()); + + results.forEach( + r -> assertTrue(r.hasEntities() && !r.getEntities().isEmpty(), "Expected search results")); + assertEquals( + results.stream().map(r -> r.getEntities().size()).distinct().count(), + 1, + String.format("Expected all result counts to match after stemming. %s", testSet)); + + // Additional inspect token + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", "customers"); + + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("customer"), "Expected `customer` and not `custom`"); + } + + @Test + public void testDelimitedSynonym() throws IOException { + List<String> expectedTokens = List.of("cac"); + List<String> analyzers = + List.of("urn_component", "word_delimited", "query_urn_component", "query_word_delimited"); + List<String> testTexts = + List.of( + "customer acquisition cost", + "cac", + "urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)"); + + for (String analyzer : analyzers) { + for (String text : testTexts) { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, text); + List<String> tokens = + getTokens(request) + .map(AnalyzeResponse.AnalyzeToken::getTerm) + .collect(Collectors.toList()); + expectedTokens.forEach( + expected -> + assertTrue( + tokens.contains(expected), + String.format( + "Analyzer: `%s` Text: `%s` - Expected token `%s` in tokens: %s", + analyzer, text, expected, tokens))); + } } - @Test - public void testWordGram() throws IOException { - String text = "hello.cat_cool_customer"; - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat", "cat cool", "cool customer")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat cool", "cat cool customer")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat cool customer")); - - String testMoreSeparators = "quick.brown:fox jumped-LAZY_Dog"; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown", "brown fox", "fox jumped", "jumped lazy", "lazy dog")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown fox", "brown fox jumped", "fox jumped lazy", "jumped lazy dog")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown fox jumped", "brown fox jumped lazy", "fox jumped lazy dog")); - - String textWithQuotesAndDuplicateWord = "\"my_db.my_exact_table\""; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db", "db my", "my exact", "exact table")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db my", "db my exact", "my exact table")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db my exact", "db my exact table")); - - String textWithParens = "(hi) there"; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithParens); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hi there")); - - String oneWordText = "hello"; - for (String analyzer : List.of("word_gram_2", "word_gram_3", "word_gram_4")) { - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, oneWordText); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of()); - } + // {"urn":"urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)","id":"cac_table",... + List<String> testSet = List.of("cac", "customer acquisition cost"); + List<Integer> resultCounts = + testSet.stream() + .map( + q -> { + SearchResult result = searchAcrossEntities(getSearchService(), q); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + "Expected search results for: " + q); + return result.getEntities().size(); + }) + .collect(Collectors.toList()); + } + + @Test + public void testNegateAnalysis() throws IOException { + String queryWithMinus = "logging_events -bckp"; + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("logging_events -bckp", "logging_ev", "-bckp", "log", "event", "bckp")); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("logging events -bckp")); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of()); + } + + @Test + public void testWordGram() throws IOException { + String text = "hello.cat_cool_customer"; + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat", "cat cool", "cool customer")); + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat cool", "cat cool customer")); + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat cool customer")); + + String testMoreSeparators = "quick.brown:fox jumped-LAZY_Dog"; + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_2", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown", "brown fox", "fox jumped", "jumped lazy", "lazy dog")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_3", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown fox", "brown fox jumped", "fox jumped lazy", "jumped lazy dog")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_4", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown fox jumped", "brown fox jumped lazy", "fox jumped lazy dog")); + + String textWithQuotesAndDuplicateWord = "\"my_db.my_exact_table\""; + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_2", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db", "db my", "my exact", "exact table")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_3", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db my", "db my exact", "my exact table")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_4", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db my exact", "db my exact table")); + + String textWithParens = "(hi) there"; + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithParens); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hi there")); + + String oneWordText = "hello"; + for (String analyzer : List.of("word_gram_2", "word_gram_3", "word_gram_4")) { + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, oneWordText); + assertEquals( + getTokens(request) + .map(AnalyzeResponse.AnalyzeToken::getTerm) + .collect(Collectors.toList()), + List.of()); } - - @Test - public void testUrnSynonym() throws IOException { - List<String> expectedTokens = List.of("bigquery"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.bq_audit.cloudaudit_googleapis_com_activity,PROD)" - ); - List<String> indexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedTokens.forEach(expected -> assertTrue(indexTokens.contains(expected), + } + + @Test + public void testUrnSynonym() throws IOException { + List<String> expectedTokens = List.of("bigquery"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.bq_audit.cloudaudit_googleapis_com_activity,PROD)"); + List<String> indexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedTokens.forEach( + expected -> + assertTrue( + indexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, indexTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - "big query" - ); - List<String> queryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(queryTokens, List.of("big query", "big", "query", "bigquery")); - - List<String> testSet = List.of( - "bigquery", - "big query" - ); - List<SearchResult> results = testSet.stream().map(query -> { - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), "Expected search results for: " + query); - return result; - }).collect(Collectors.toList()); - - assertEquals(results.stream().map(r -> r.getEntities().size()).distinct().count(), 1, - String.format("Expected all result counts (%s) to match after synonyms. %s", results, testSet)); - Assert.assertArrayEquals(results.get(0).getEntities().stream().map(e -> e.getEntity().toString()).sorted().toArray(String[]::new), - results.get(1).getEntities().stream().map(e -> e.getEntity().toString()).sorted().toArray(String[]::new)); - } - - @Test - public void testTokenization() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "my_table" - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("my_tabl", "tabl"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "my_table" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("my_tabl", "tabl"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationWithNumber() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "harshal-playground-306419.test_schema.austin311_derived" - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "austin311_deriv", "austin311", "deriv"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "harshal-playground-306419.test_schema.austin311_derived" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "austin311_deriv", "austin311", "deriv"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationQuote() throws IOException { - String testQuery = "\"test2\""; - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - testQuery - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationQuoteUnderscore() throws IOException { - String testQuery = "\"raw_orders\""; - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - testQuery - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders", "raw_ord", "raw", "order"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders", "raw_ord", "raw", "order"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders"), String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationDataPlatform() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.excess_deaths_derived,PROD)" - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", "bigquery", "big", "query", - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "excess_deaths_deriv", "excess", "death", "deriv", - "prod", "production"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset-ac611929-c3ac-4b92-aafb-f4603ddb408a,PROD)" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", "hive", - "samplehivedataset-ac611929-c3ac-4b92-aafb-f4603ddb408a", - "samplehivedataset", "ac611929", "c3ac", "4b92", "aafb", "f4603ddb408a", - "prod", "production"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:test_rollback,rollback_test_dataset,TEST)" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", - "test_rollback", "test", "rollback", "rollback_test_dataset"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testChartAutoComplete() throws InterruptedException, IOException { - // Two charts exist Baz Chart 1 & Baz Chart 2 - List.of("B", "Ba", "Baz", "Baz ", "Baz C", "Baz Ch", "Baz Cha", "Baz Char", "Baz Chart", "Baz Chart ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new ChartType(getEntityClient()), query); - assertTrue(result.getEntities().size() == 2, - String.format("Expected 2 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testDatasetAutoComplete() { - List.of("excess", "excess_", "excess_d", "excess_de", "excess_death", "excess_deaths", "excess_deaths_d", - "excess_deaths_de", "excess_deaths_der", "excess_deaths_derived") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new DatasetType(getEntityClient()), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected >= 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testContainerAutoComplete() { - List.of("cont", "container", "container-a", "container-auto", "container-autocomp", "container-autocomp-te", - "container-autocomp-test") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new ContainerType(getEntityClient()), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected >= 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testGroupAutoComplete() { - List.of("T", "Te", "Tes", "Test ", "Test G", "Test Gro", "Test Group ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new CorpGroupType(getEntityClient()), query); - assertTrue(result.getEntities().size() == 1, - String.format("Expected 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testUserAutoComplete() { - List.of("D", "Da", "Dat", "Data ", "Data H", "Data Hu", "Data Hub", "Data Hub ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new CorpUserType(getEntityClient(), null), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected at least 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", "big query"); + List<String> queryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(queryTokens, List.of("big query", "big", "query", "bigquery")); + + List<String> testSet = List.of("bigquery", "big query"); + List<SearchResult> results = + testSet.stream() + .map( + query -> { + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + "Expected search results for: " + query); + return result; + }) + .collect(Collectors.toList()); - @Test - public void testSmokeTestQueries() { - Map<String, Integer> expectedFulltextMinimums = Map.of( - "sample", 3, - "covid", 2, - "\"raw_orders\"", 6, - STRUCTURED_QUERY_PREFIX + "sample", 3, - STRUCTURED_QUERY_PREFIX + "\"sample\"", 2, - STRUCTURED_QUERY_PREFIX + "covid", 2, - STRUCTURED_QUERY_PREFIX + "\"raw_orders\"", 1 - ); - - Map<String, SearchResult> results = expectedFulltextMinimums.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedFulltextMinimums.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s fulltext results, expected %s results.", key, actualCount, - expectedCount)); + assertEquals( + results.stream().map(r -> r.getEntities().size()).distinct().count(), + 1, + String.format( + "Expected all result counts (%s) to match after synonyms. %s", results, testSet)); + Assert.assertArrayEquals( + results.get(0).getEntities().stream() + .map(e -> e.getEntity().toString()) + .sorted() + .toArray(String[]::new), + results.get(1).getEntities().stream() + .map(e -> e.getEntity().toString()) + .sorted() + .toArray(String[]::new)); + } + + @Test + public void testTokenization() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", "my_table"); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("my_tabl", "tabl"), String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", "my_table"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("my_tabl", "tabl"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationWithNumber() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "word_delimited", + "harshal-playground-306419.test_schema.austin311_derived"); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "austin311_deriv", + "austin311", + "deriv"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "harshal-playground-306419.test_schema.austin311_derived"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "austin311_deriv", + "austin311", + "deriv"), + String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationQuote() throws IOException { + String testQuery = "\"test2\""; + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", testQuery); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationQuoteUnderscore() throws IOException { + String testQuery = "\"raw_orders\""; + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", testQuery); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of("raw_orders", "raw_ord", "raw", "order"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of("raw_orders", "raw_ord", "raw", "order"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("raw_orders"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationDataPlatform() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.excess_deaths_derived,PROD)"); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "bigquery", + "big", + "query", + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "excess_deaths_deriv", + "excess", + "death", + "deriv", + "prod", + "production"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset-ac611929-c3ac-4b92-aafb-f4603ddb408a,PROD)"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "hive", + "samplehivedataset-ac611929-c3ac-4b92-aafb-f4603ddb408a", + "samplehivedataset", + "ac611929", + "c3ac", + "4b92", + "aafb", + "f4603ddb408a", + "prod", + "production"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:test_rollback,rollback_test_dataset,TEST)"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "test_rollback", + "test", + "rollback", + "rollback_test_dataset"), + String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testChartAutoComplete() throws InterruptedException, IOException { + // Two charts exist Baz Chart 1 & Baz Chart 2 + List.of( + "B", + "Ba", + "Baz", + "Baz ", + "Baz C", + "Baz Ch", + "Baz Cha", + "Baz Char", + "Baz Chart", + "Baz Chart ") + .forEach( + query -> { + try { + AutoCompleteResults result = autocomplete(new ChartType(getEntityClient()), query); + assertTrue( + result.getEntities().size() == 2, + String.format( + "Expected 2 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testDatasetAutoComplete() { + List.of( + "excess", + "excess_", + "excess_d", + "excess_de", + "excess_death", + "excess_deaths", + "excess_deaths_d", + "excess_deaths_de", + "excess_deaths_der", + "excess_deaths_derived") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new DatasetType(getEntityClient()), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected >= 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testContainerAutoComplete() { + List.of( + "cont", + "container", + "container-a", + "container-auto", + "container-autocomp", + "container-autocomp-te", + "container-autocomp-test") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new ContainerType(getEntityClient()), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected >= 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testGroupAutoComplete() { + List.of("T", "Te", "Tes", "Test ", "Test G", "Test Gro", "Test Group ") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new CorpGroupType(getEntityClient()), query); + assertTrue( + result.getEntities().size() == 1, + String.format( + "Expected 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testUserAutoComplete() { + List.of("D", "Da", "Dat", "Data ", "Data H", "Data Hu", "Data Hub", "Data Hub ") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new CorpUserType(getEntityClient(), null), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected at least 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testSmokeTestQueries() { + Map<String, Integer> expectedFulltextMinimums = + Map.of( + "sample", + 3, + "covid", + 2, + "\"raw_orders\"", + 6, + STRUCTURED_QUERY_PREFIX + "sample", + 3, + STRUCTURED_QUERY_PREFIX + "\"sample\"", + 2, + STRUCTURED_QUERY_PREFIX + "covid", + 2, + STRUCTURED_QUERY_PREFIX + "\"raw_orders\"", + 1); + + Map<String, SearchResult> results = + expectedFulltextMinimums.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedFulltextMinimums.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s fulltext results, expected %s results.", + key, actualCount, expectedCount)); }); - Map<String, Integer> expectedStructuredMinimums = Map.of( - "sample", 3, - "covid", 2, - "\"raw_orders\"", 1 - ); - - results = expectedStructuredMinimums.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchStructured(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedStructuredMinimums.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s structured results, expected %s results.", key, actualCount, - expectedCount)); + Map<String, Integer> expectedStructuredMinimums = + Map.of( + "sample", 3, + "covid", 2, + "\"raw_orders\"", 1); + + results = + expectedStructuredMinimums.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchStructured(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedStructuredMinimums.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s structured results, expected %s results.", + key, actualCount, expectedCount)); }); - } - - @Test - public void testMinNumberLengthLimit() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "data2022.data22" - ); - List<String> expected = List.of("data2022", "data22"); - List<String> actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Expected: %s Actual: %s", expected, actual)); - } - - @Test - public void testUnderscore() throws IOException { - String testQuery = "bad_fraud_id"; - List<String> expected = List.of("bad_fraud_id", "bad", "fraud"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - testQuery - ); - - List<String> actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Analayzer: query_word_delimited Expected: %s Actual: %s", expected, actual)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - testQuery - ); - actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Analyzer: word_delimited Expected: %s Actual: %s", expected, actual)); - - } - - @Test - public void testFacets() { - Set<String> expectedFacets = Set.of("entity", "typeNames", "platform", "origin", "tags"); - SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress"); - expectedFacets.forEach(facet -> { - assertTrue(testResult.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + } + + @Test + public void testMinNumberLengthLimit() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_delimited", "data2022.data22"); + List<String> expected = List.of("data2022", "data22"); + List<String> actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(actual, expected, String.format("Expected: %s Actual: %s", expected, actual)); + } + + @Test + public void testUnderscore() throws IOException { + String testQuery = "bad_fraud_id"; + List<String> expected = List.of("bad_fraud_id", "bad", "fraud"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", testQuery); + + List<String> actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + actual, + expected, + String.format("Analayzer: query_word_delimited Expected: %s Actual: %s", expected, actual)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", testQuery); + actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + actual, + expected, + String.format("Analyzer: word_delimited Expected: %s Actual: %s", expected, actual)); + } + + @Test + public void testFacets() { + Set<String> expectedFacets = Set.of("entity", "typeNames", "platform", "origin", "tags"); + SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress"); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityAggMeta = testResult.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - Map<String, Long> expectedEntityTypeCounts = new HashMap<>(); - expectedEntityTypeCounts.put("container", 0L); - expectedEntityTypeCounts.put("corpuser", 0L); - expectedEntityTypeCounts.put("corpgroup", 0L); - expectedEntityTypeCounts.put("mlmodel", 0L); - expectedEntityTypeCounts.put("mlfeaturetable", 1L); - expectedEntityTypeCounts.put("mlmodelgroup", 1L); - expectedEntityTypeCounts.put("dataflow", 1L); - expectedEntityTypeCounts.put("glossarynode", 1L); - expectedEntityTypeCounts.put("mlfeature", 0L); - expectedEntityTypeCounts.put("datajob", 2L); - expectedEntityTypeCounts.put("domain", 0L); - expectedEntityTypeCounts.put("tag", 0L); - expectedEntityTypeCounts.put("glossaryterm", 2L); - expectedEntityTypeCounts.put("mlprimarykey", 1L); - expectedEntityTypeCounts.put("dataset", 9L); - expectedEntityTypeCounts.put("chart", 0L); - expectedEntityTypeCounts.put("dashboard", 0L); - assertEquals(entityAggMeta.getAggregations(), expectedEntityTypeCounts); - } - - @Test - public void testNestedAggregation() { - Set<String> expectedFacets = Set.of("platform"); - SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult.getMetadata().getAggregations().size(), 1); - expectedFacets.forEach(facet -> { - assertTrue(testResult.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityAggMeta = + testResult.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + Map<String, Long> expectedEntityTypeCounts = new HashMap<>(); + expectedEntityTypeCounts.put("container", 0L); + expectedEntityTypeCounts.put("corpuser", 0L); + expectedEntityTypeCounts.put("corpgroup", 0L); + expectedEntityTypeCounts.put("mlmodel", 0L); + expectedEntityTypeCounts.put("mlfeaturetable", 1L); + expectedEntityTypeCounts.put("mlmodelgroup", 1L); + expectedEntityTypeCounts.put("dataflow", 1L); + expectedEntityTypeCounts.put("glossarynode", 1L); + expectedEntityTypeCounts.put("mlfeature", 0L); + expectedEntityTypeCounts.put("datajob", 2L); + expectedEntityTypeCounts.put("domain", 0L); + expectedEntityTypeCounts.put("tag", 0L); + expectedEntityTypeCounts.put("glossaryterm", 2L); + expectedEntityTypeCounts.put("mlprimarykey", 1L); + expectedEntityTypeCounts.put("dataset", 9L); + expectedEntityTypeCounts.put("chart", 0L); + expectedEntityTypeCounts.put("dashboard", 0L); + assertEquals(entityAggMeta.getAggregations(), expectedEntityTypeCounts); + } + + @Test + public void testNestedAggregation() { + Set<String> expectedFacets = Set.of("platform"); + SearchResult testResult = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult.getMetadata().getAggregations().size(), 1); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - expectedFacets = Set.of("platform", "typeNames", "_entityType", "entity"); - SearchResult testResult2 = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult2.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResult2.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult2.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + expectedFacets = Set.of("platform", "typeNames", "_entityType", "entity"); + SearchResult testResult2 = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult2.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult2.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult2.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityTypeAggMeta = testResult2.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("_entityType")).findFirst().get(); - AggregationMetadata entityAggMeta = testResult2.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - assertEquals(entityTypeAggMeta.getAggregations(), entityAggMeta.getAggregations()); - Map<String, Long> expectedEntityTypeCounts = new HashMap<>(); - expectedEntityTypeCounts.put("container", 0L); - expectedEntityTypeCounts.put("corpuser", 0L); - expectedEntityTypeCounts.put("corpgroup", 0L); - expectedEntityTypeCounts.put("mlmodel", 0L); - expectedEntityTypeCounts.put("mlfeaturetable", 1L); - expectedEntityTypeCounts.put("mlmodelgroup", 1L); - expectedEntityTypeCounts.put("dataflow", 1L); - expectedEntityTypeCounts.put("glossarynode", 1L); - expectedEntityTypeCounts.put("mlfeature", 0L); - expectedEntityTypeCounts.put("datajob", 2L); - expectedEntityTypeCounts.put("domain", 0L); - expectedEntityTypeCounts.put("tag", 0L); - expectedEntityTypeCounts.put("glossaryterm", 2L); - expectedEntityTypeCounts.put("mlprimarykey", 1L); - expectedEntityTypeCounts.put("dataset", 9L); - expectedEntityTypeCounts.put("chart", 0L); - expectedEntityTypeCounts.put("dashboard", 0L); - assertEquals(entityTypeAggMeta.getAggregations(), expectedEntityTypeCounts); - - expectedFacets = Set.of("platform", "typeNames", "entity"); - SearchResult testResult3 = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult3.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResult3.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult3.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityTypeAggMeta = + testResult2.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("_entityType")) + .findFirst() + .get(); + AggregationMetadata entityAggMeta = + testResult2.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + assertEquals(entityTypeAggMeta.getAggregations(), entityAggMeta.getAggregations()); + Map<String, Long> expectedEntityTypeCounts = new HashMap<>(); + expectedEntityTypeCounts.put("container", 0L); + expectedEntityTypeCounts.put("corpuser", 0L); + expectedEntityTypeCounts.put("corpgroup", 0L); + expectedEntityTypeCounts.put("mlmodel", 0L); + expectedEntityTypeCounts.put("mlfeaturetable", 1L); + expectedEntityTypeCounts.put("mlmodelgroup", 1L); + expectedEntityTypeCounts.put("dataflow", 1L); + expectedEntityTypeCounts.put("glossarynode", 1L); + expectedEntityTypeCounts.put("mlfeature", 0L); + expectedEntityTypeCounts.put("datajob", 2L); + expectedEntityTypeCounts.put("domain", 0L); + expectedEntityTypeCounts.put("tag", 0L); + expectedEntityTypeCounts.put("glossaryterm", 2L); + expectedEntityTypeCounts.put("mlprimarykey", 1L); + expectedEntityTypeCounts.put("dataset", 9L); + expectedEntityTypeCounts.put("chart", 0L); + expectedEntityTypeCounts.put("dashboard", 0L); + assertEquals(entityTypeAggMeta.getAggregations(), expectedEntityTypeCounts); + + expectedFacets = Set.of("platform", "typeNames", "entity"); + SearchResult testResult3 = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult3.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult3.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult3.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityTypeAggMeta3 = testResult3.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("_entityType")).findFirst().get(); - AggregationMetadata entityAggMeta3 = testResult3.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - assertEquals(entityTypeAggMeta3.getAggregations(), entityAggMeta3.getAggregations()); - assertEquals(entityTypeAggMeta3.getAggregations(), expectedEntityTypeCounts); - - String singleNestedFacet = String.format("_entityType%sowners", AGGREGATION_SEPARATOR_CHAR); - expectedFacets = Set.of(singleNestedFacet); - SearchResult testResultSingleNested = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResultSingleNested.getMetadata().getAggregations().size(), 1); - Map<String, Long> expectedNestedFacetCounts = new HashMap<>(); - expectedNestedFacetCounts.put("datajob␞urn:li:corpuser:datahub", 2L); - expectedNestedFacetCounts.put("glossarynode␞urn:li:corpuser:jdoe", 1L); - expectedNestedFacetCounts.put("dataflow␞urn:li:corpuser:datahub", 1L); - expectedNestedFacetCounts.put("mlfeaturetable", 1L); - expectedNestedFacetCounts.put("mlmodelgroup", 1L); - expectedNestedFacetCounts.put("glossarynode", 1L); - expectedNestedFacetCounts.put("dataflow", 1L); - expectedNestedFacetCounts.put("mlmodelgroup␞urn:li:corpuser:some-user", 1L); - expectedNestedFacetCounts.put("datajob", 2L); - expectedNestedFacetCounts.put("glossaryterm␞urn:li:corpuser:jdoe", 2L); - expectedNestedFacetCounts.put("glossaryterm", 2L); - expectedNestedFacetCounts.put("dataset", 9L); - expectedNestedFacetCounts.put("mlprimarykey", 1L); - assertEquals(testResultSingleNested.getMetadata().getAggregations().get(0).getAggregations(), expectedNestedFacetCounts); - - expectedFacets = Set.of("platform", singleNestedFacet, "typeNames", "origin"); - SearchResult testResultNested = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResultNested.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResultNested.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResultNested.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityTypeAggMeta3 = + testResult3.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("_entityType")) + .findFirst() + .get(); + AggregationMetadata entityAggMeta3 = + testResult3.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + assertEquals(entityTypeAggMeta3.getAggregations(), entityAggMeta3.getAggregations()); + assertEquals(entityTypeAggMeta3.getAggregations(), expectedEntityTypeCounts); + + String singleNestedFacet = String.format("_entityType%sowners", AGGREGATION_SEPARATOR_CHAR); + expectedFacets = Set.of(singleNestedFacet); + SearchResult testResultSingleNested = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResultSingleNested.getMetadata().getAggregations().size(), 1); + Map<String, Long> expectedNestedFacetCounts = new HashMap<>(); + expectedNestedFacetCounts.put("datajob␞urn:li:corpuser:datahub", 2L); + expectedNestedFacetCounts.put("glossarynode␞urn:li:corpuser:jdoe", 1L); + expectedNestedFacetCounts.put("dataflow␞urn:li:corpuser:datahub", 1L); + expectedNestedFacetCounts.put("mlfeaturetable", 1L); + expectedNestedFacetCounts.put("mlmodelgroup", 1L); + expectedNestedFacetCounts.put("glossarynode", 1L); + expectedNestedFacetCounts.put("dataflow", 1L); + expectedNestedFacetCounts.put("mlmodelgroup␞urn:li:corpuser:some-user", 1L); + expectedNestedFacetCounts.put("datajob", 2L); + expectedNestedFacetCounts.put("glossaryterm␞urn:li:corpuser:jdoe", 2L); + expectedNestedFacetCounts.put("glossaryterm", 2L); + expectedNestedFacetCounts.put("dataset", 9L); + expectedNestedFacetCounts.put("mlprimarykey", 1L); + assertEquals( + testResultSingleNested.getMetadata().getAggregations().get(0).getAggregations(), + expectedNestedFacetCounts); + + expectedFacets = Set.of("platform", singleNestedFacet, "typeNames", "origin"); + SearchResult testResultNested = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResultNested.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResultNested.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResultNested.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - List<AggregationMetadata> expectedNestedAgg = testResultNested.getMetadata().getAggregations().stream().filter( - agg -> agg.getName().equals(singleNestedFacet)).collect(Collectors.toList()); - assertEquals(expectedNestedAgg.size(), 1); - AggregationMetadata nestedAgg = expectedNestedAgg.get(0); - assertEquals(nestedAgg.getDisplayName(), String.format("Type%sOwned By", AGGREGATION_SEPARATOR_CHAR)); - } - - @Test - public void testPartialUrns() throws IOException { - Set<String> expectedQueryTokens = Set.of("dataplatform", "data platform", "samplehdfsdataset", "prod", "production"); - Set<String> expectedIndexTokens = Set.of("dataplatform", "data platform", "hdfs", "samplehdfsdataset", "prod", "production"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - List<String> searchQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedQueryTokens.forEach(expected -> assertTrue(searchQueryTokens.contains(expected), + List<AggregationMetadata> expectedNestedAgg = + testResultNested.getMetadata().getAggregations().stream() + .filter(agg -> agg.getName().equals(singleNestedFacet)) + .collect(Collectors.toList()); + assertEquals(expectedNestedAgg.size(), 1); + AggregationMetadata nestedAgg = expectedNestedAgg.get(0); + assertEquals( + nestedAgg.getDisplayName(), String.format("Type%sOwned By", AGGREGATION_SEPARATOR_CHAR)); + } + + @Test + public void testPartialUrns() throws IOException { + Set<String> expectedQueryTokens = + Set.of("dataplatform", "data platform", "samplehdfsdataset", "prod", "production"); + Set<String> expectedIndexTokens = + Set.of("dataplatform", "data platform", "hdfs", "samplehdfsdataset", "prod", "production"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "query_urn_component", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + List<String> searchQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedQueryTokens.forEach( + expected -> + assertTrue( + searchQueryTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchQueryTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - List<String> searchIndexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedIndexTokens.forEach(expected -> assertTrue(searchIndexTokens.contains(expected), + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + List<String> searchIndexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedIndexTokens.forEach( + expected -> + assertTrue( + searchIndexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchIndexTokens))); - } - - @Test - public void testPartialUnderscoreUrns() throws IOException { - String testQuery = ":(urn:li:dataPlatform:hdfs,party_email,PROD)"; - Set<String> expectedQueryTokens = Set.of("dataplatform", "data platform", "hdfs", "party_email", "parti", - "email", "prod", "production"); - Set<String> expectedIndexTokens = Set.of("dataplatform", "data platform", "hdfs", "party_email", "parti", - "email", "prod", "production"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - testQuery - ); - List<String> searchQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedQueryTokens.forEach(expected -> assertTrue(searchQueryTokens.contains(expected), + } + + @Test + public void testPartialUnderscoreUrns() throws IOException { + String testQuery = ":(urn:li:dataPlatform:hdfs,party_email,PROD)"; + Set<String> expectedQueryTokens = + Set.of( + "dataplatform", + "data platform", + "hdfs", + "party_email", + "parti", + "email", + "prod", + "production"); + Set<String> expectedIndexTokens = + Set.of( + "dataplatform", + "data platform", + "hdfs", + "party_email", + "parti", + "email", + "prod", + "production"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", testQuery); + List<String> searchQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedQueryTokens.forEach( + expected -> + assertTrue( + searchQueryTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchQueryTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - testQuery - ); - List<String> searchIndexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedIndexTokens.forEach(expected -> assertTrue(searchIndexTokens.contains(expected), + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", testQuery); + List<String> searchIndexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedIndexTokens.forEach( + expected -> + assertTrue( + searchIndexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchIndexTokens))); - } - - @Test - public void testScrollAcrossEntities() throws IOException { - String query = "logging_events"; - final int batchSize = 1; - int totalResults = 0; - String scrollId = null; - do { - ScrollResult result = scroll(getSearchService(), query, batchSize, scrollId); - int numResults = result.hasEntities() ? result.getEntities().size() : 0; - assertTrue(numResults <= batchSize); - totalResults += numResults; - scrollId = result.getScrollId(); - } while (scrollId != null); - // expect 8 total matching results - assertEquals(totalResults, 8); - } - - @Test - public void testSearchAcrossMultipleEntities() { - String query = "logging_events"; - SearchResult result = search(getSearchService(), query); - assertEquals((int) result.getNumEntities(), 8); - result = search(getSearchService(), List.of(DATASET_ENTITY_NAME, DATA_JOB_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 8); - result = search(getSearchService(), List.of(DATASET_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 4); - result = search(getSearchService(), List.of(DATA_JOB_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 4); - } - - @Test - public void testQuotedAnalyzer() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"party_email\"" - ); - List<String> searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("party_email"), searchQuotedQueryTokens, String.format("Actual %s", searchQuotedQueryTokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test2\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test2"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"party_email\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("party_email"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test2\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test2"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test_BYTES_LIST_feature\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test_bytes_list_feature"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - "test_BYTES_LIST_feature" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertTrue(searchQuotedQueryTokens.contains("test_bytes_list_featur")); - } - - @Test - public void testFragmentUrns() { - List<String> testSet = List.of( - "hdfs,SampleHdfsDataset,PROD", - "hdfs,SampleHdfsDataset", - "SampleHdfsDataset", - "(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", - "urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD", - "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - - testSet.forEach(query -> { - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected partial urn search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); + } + + @Test + public void testScrollAcrossEntities() throws IOException { + String query = "logging_events"; + final int batchSize = 1; + int totalResults = 0; + String scrollId = null; + do { + ScrollResult result = scroll(getSearchService(), query, batchSize, scrollId); + int numResults = result.hasEntities() ? result.getEntities().size() : 0; + assertTrue(numResults <= batchSize); + totalResults += numResults; + scrollId = result.getScrollId(); + } while (scrollId != null); + // expect 8 total matching results + assertEquals(totalResults, 8); + } + + @Test + public void testSearchAcrossMultipleEntities() { + String query = "logging_events"; + SearchResult result = search(getSearchService(), query); + assertEquals((int) result.getNumEntities(), 8); + result = search(getSearchService(), List.of(DATASET_ENTITY_NAME, DATA_JOB_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 8); + result = search(getSearchService(), List.of(DATASET_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 4); + result = search(getSearchService(), List.of(DATA_JOB_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 4); + } + + @Test + public void testQuotedAnalyzer() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"party_email\""); + List<String> searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + List.of("party_email"), + searchQuotedQueryTokens, + String.format("Actual %s", searchQuotedQueryTokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", "\"test2\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test2"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"party_email\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("party_email"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", "\"test2\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test2"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"test_BYTES_LIST_feature\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test_bytes_list_feature"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", "test_BYTES_LIST_feature"); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertTrue(searchQuotedQueryTokens.contains("test_bytes_list_featur")); + } + + @Test + public void testFragmentUrns() { + List<String> testSet = + List.of( + "hdfs,SampleHdfsDataset,PROD", + "hdfs,SampleHdfsDataset", + "SampleHdfsDataset", + "(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", + "urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD", + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + + testSet.forEach( + query -> { + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected partial urn search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); }); - } - - @Test - public void testPlatformTest() { - List<String> testFields = List.of("platform.keyword", "platform"); - final String testPlatform = "urn:li:dataPlatform:dbt"; - - // Ensure backend code path works as expected - List<SearchResult> results = testFields.stream() - .map(fieldName -> { - final String query = String.format("%s:%s", fieldName, testPlatform.replaceAll(":", "\\\\:")); - SearchResult result = searchStructured(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - return result; + } + + @Test + public void testPlatformTest() { + List<String> testFields = List.of("platform.keyword", "platform"); + final String testPlatform = "urn:li:dataPlatform:dbt"; + + // Ensure backend code path works as expected + List<SearchResult> results = + testFields.stream() + .map( + fieldName -> { + final String query = + String.format("%s:%s", fieldName, testPlatform.replaceAll(":", "\\\\:")); + SearchResult result = searchStructured(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format( + "%s - Expected search results to include matched fields", query)); + return result; }) - .collect(Collectors.toList()); - - IntStream.range(0, testFields.size()).forEach(idx -> { - assertEquals(results.get(idx).getEntities().size(), 9, - String.format("Search results for fields `%s` != 9", testFields.get(idx))); - }); + .collect(Collectors.toList()); - // Construct problematic search entity query - List<Filter> testFilters = testFields.stream() - .map(fieldName -> { - Filter filter = new Filter(); - ArrayList<Criterion> criteria = new ArrayList<>(); - Criterion hasPlatformCriterion = new Criterion().setField(fieldName).setCondition(Condition.EQUAL).setValue(testPlatform); - criteria.add(hasPlatformCriterion); - filter.setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); - return filter; - }).collect(Collectors.toList()); - - // Test variations of fulltext flags - for (Boolean fulltextFlag : List.of(true, false)) { - - // Test field variations with/without .keyword - List<SearchResult> entityClientResults = testFilters.stream().map(filter -> { - try { - return getEntityClient().search("dataset", "*", filter, null, 0, 100, - AUTHENTICATION, new SearchFlags().setFulltext(fulltextFlag)); - } catch (RemoteInvocationException e) { - throw new RuntimeException(e); - } - }).collect(Collectors.toList()); - - IntStream.range(0, testFields.size()).forEach(idx -> { - assertEquals(entityClientResults.get(idx).getEntities().size(), 9, - String.format("Search results for entityClient fields (fulltextFlag: %s): `%s` != 9", fulltextFlag, testFields.get(idx))); + IntStream.range(0, testFields.size()) + .forEach( + idx -> { + assertEquals( + results.get(idx).getEntities().size(), + 9, + String.format("Search results for fields `%s` != 9", testFields.get(idx))); }); - } - } - - @Test - public void testStructQueryFieldMatch() { - String query = STRUCTURED_QUERY_PREFIX + "name: customers"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 1); - } - - @Test - public void testStructQueryFieldPrefixMatch() { - String query = STRUCTURED_QUERY_PREFIX + "name: customers*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 2); - } - - @Test - public void testStructQueryCustomPropertiesKeyPrefix() { - String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 9); - } - - @Test - public void testStructQueryCustomPropertiesMatch() { - String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=model"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + // Construct problematic search entity query + List<Filter> testFilters = + testFields.stream() + .map( + fieldName -> { + Filter filter = new Filter(); + ArrayList<Criterion> criteria = new ArrayList<>(); + Criterion hasPlatformCriterion = + new Criterion() + .setField(fieldName) + .setCondition(Condition.EQUAL) + .setValue(testPlatform); + criteria.add(hasPlatformCriterion); + filter.setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + return filter; + }) + .collect(Collectors.toList()); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); + // Test variations of fulltext flags + for (Boolean fulltextFlag : List.of(true, false)) { - assertEquals(result.getEntities().size(), 5); + // Test field variations with/without .keyword + List<SearchResult> entityClientResults = + testFilters.stream() + .map( + filter -> { + try { + return getEntityClient() + .search( + "dataset", + "*", + filter, + null, + 0, + 100, + AUTHENTICATION, + new SearchFlags().setFulltext(fulltextFlag)); + } catch (RemoteInvocationException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList()); + + IntStream.range(0, testFields.size()) + .forEach( + idx -> { + assertEquals( + entityClientResults.get(idx).getEntities().size(), + 9, + String.format( + "Search results for entityClient fields (fulltextFlag: %s): `%s` != 9", + fulltextFlag, testFields.get(idx))); + }); } - - @Test - public void testCustomPropertiesQuoted() { - Map<String, Integer> expectedResults = Map.of( - "\"materialization=view\"", 3, - STRUCTURED_QUERY_PREFIX + "customProperties:\"materialization=view\"", 3 - ); - - Map<String, SearchResult> results = expectedResults.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedResults.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s fulltext results, expected %s results.", key, actualCount, - expectedCount)); + } + + @Test + public void testStructQueryFieldMatch() { + String query = STRUCTURED_QUERY_PREFIX + "name: customers"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + } + + @Test + public void testStructQueryFieldPrefixMatch() { + String query = STRUCTURED_QUERY_PREFIX + "name: customers*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 2); + } + + @Test + public void testStructQueryCustomPropertiesKeyPrefix() { + String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 9); + } + + @Test + public void testStructQueryCustomPropertiesMatch() { + String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=model"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 5); + } + + @Test + public void testCustomPropertiesQuoted() { + Map<String, Integer> expectedResults = + Map.of( + "\"materialization=view\"", + 3, + STRUCTURED_QUERY_PREFIX + "customProperties:\"materialization=view\"", + 3); + + Map<String, SearchResult> results = + expectedResults.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedResults.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s fulltext results, expected %s results.", + key, actualCount, expectedCount)); }); + } + + @Test + public void testStructQueryFieldPaths() { + String query = STRUCTURED_QUERY_PREFIX + "fieldPaths: customer_id"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 3); + } + + @Test + public void testStructQueryBoolean() { + String query = + STRUCTURED_QUERY_PREFIX + + "editedFieldTags:urn\\:li\\:tag\\:Legacy OR tags:urn\\:li\\:tag\\:testTag"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 2); + + query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy"; + result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + + query = STRUCTURED_QUERY_PREFIX + "tags:urn\\:li\\:tag\\:testTag"; + result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + } + + @Test + public void testStructQueryBrowsePaths() { + String query = STRUCTURED_QUERY_PREFIX + "browsePaths:*/dbt/*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 9); + } + + @Test + public void testOr() { + String query = "stg_customers | logging_events"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 9); + + query = "stg_customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 1); + + query = "logging_events"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testNegate() { + String query = "logging_events -bckp"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 7); + + query = "logging_events"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testPrefix() { + String query = "bigquery"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + + query = "big*"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testParens() { + String query = "dbt | (bigquery + covid19)"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 11); + + query = "dbt"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 9); + + query = "bigquery + covid19"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 2); + + query = "bigquery"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + + query = "covid19"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 2); + } + + @Test + public void testGram() { + String query = "jaffle shop customers"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", + "Expected exact match in 1st position"); + + query = "shop customers source"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers_source,PROD)", + "Expected ngram match in 1st position"); + + query = "jaffle shop stg customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)", + "Expected ngram match in 1st position"); + + query = "jaffle shop transformers customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.transformers_customers,PROD)", + "Expected ngram match in 1st position"); + + query = "shop raw customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.raw_customers,PROD)", + "Expected ngram match in 1st position"); + } + + @Test + public void testPrefixVsExact() { + String query = "\"customers\""; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 10); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", + "Expected exact match and 1st position"); + } + + // Note: This test can fail if not using .keyword subfields (check for possible query builder + // regression) + @Test + public void testPrefixVsExactCaseSensitivity() { + List<String> insensitiveExactMatches = + List.of("testExactMatchCase", "testexactmatchcase", "TESTEXACTMATCHCASE"); + for (String query : insensitiveExactMatches) { + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), insensitiveExactMatches.size()); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", + "Expected exact match as first match with matching case"); } - - @Test - public void testStructQueryFieldPaths() { - String query = STRUCTURED_QUERY_PREFIX + "fieldPaths: customer_id"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 3); - } - - @Test - public void testStructQueryBoolean() { - String query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy OR tags:urn\\:li\\:tag\\:testTag"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 2); - - query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy"; - result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 1); - - query = STRUCTURED_QUERY_PREFIX + "tags:urn\\:li\\:tag\\:testTag"; - result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 1); - } - - @Test - public void testStructQueryBrowsePaths() { - String query = STRUCTURED_QUERY_PREFIX + "browsePaths:*/dbt/*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 9); - } - - @Test - public void testOr() { - String query = "stg_customers | logging_events"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 9); - - query = "stg_customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 1); - - query = "logging_events"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testNegate() { - String query = "logging_events -bckp"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 7); - - query = "logging_events"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testPrefix() { - String query = "bigquery"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - - query = "big*"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testParens() { - String query = "dbt | (bigquery + covid19)"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 11); - - query = "dbt"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 9); - - query = "bigquery + covid19"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 2); - - query = "bigquery"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - - query = "covid19"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 2); - } - @Test - public void testGram() { - String query = "jaffle shop customers"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", - "Expected exact match in 1st position"); - - query = "shop customers source"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers_source,PROD)", - "Expected ngram match in 1st position"); - - query = "jaffle shop stg customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)", - "Expected ngram match in 1st position"); - - query = "jaffle shop transformers customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.transformers_customers,PROD)", - "Expected ngram match in 1st position"); - - query = "shop raw customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.raw_customers,PROD)", - "Expected ngram match in 1st position"); - } - - @Test - public void testPrefixVsExact() { - String query = "\"customers\""; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", - "Expected exact match and 1st position"); - } - - // Note: This test can fail if not using .keyword subfields (check for possible query builder regression) - @Test - public void testPrefixVsExactCaseSensitivity() { - List<String> insensitiveExactMatches = List.of("testExactMatchCase", "testexactmatchcase", "TESTEXACTMATCHCASE"); - for (String query : insensitiveExactMatches) { - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), insensitiveExactMatches.size()); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", - "Expected exact match as first match with matching case"); - } - } - - @Test - public void testColumnExactMatch() { - String query = "unit_data"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", - "Expected table name exact match first"); - - query = "special_column_only_present_here_info"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + "important_units" + ",PROD)", - "Expected table with column name exact match first"); - } - - @Test - public void testSortOrdering() { - String query = "unit_data"; - SortCriterion criterion = new SortCriterion().setOrder(SortOrder.ASCENDING).setField("lastOperationTime"); - SearchResult result = getSearchService().searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, criterion, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true), null); - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - } - - private Stream<AnalyzeResponse.AnalyzeToken> getTokens(AnalyzeRequest request) throws IOException { - return getSearchClient().indices().analyze(request, RequestOptions.DEFAULT).getTokens().stream(); - } + } + + @Test + public void testColumnExactMatch() { + String query = "unit_data"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", + "Expected table name exact match first"); + + query = "special_column_only_present_here_info"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + "important_units" + ",PROD)", + "Expected table with column name exact match first"); + } + + @Test + public void testSortOrdering() { + String query = "unit_data"; + SortCriterion criterion = + new SortCriterion().setOrder(SortOrder.ASCENDING).setField("lastOperationTime"); + SearchResult result = + getSearchService() + .searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + criterion, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true), + null); + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + } + + private Stream<AnalyzeResponse.AnalyzeToken> getTokens(AnalyzeRequest request) + throws IOException { + return getSearchClient() + .indices() + .analyze(request, RequestOptions.DEFAULT) + .getTokens() + .stream(); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java index 4472af339c074..2c395875a1d6b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java @@ -1,10 +1,19 @@ package com.linkedin.metadata.search.indexbuilder; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.systemmetadata.SystemMetadataMappingsBuilder; import com.linkedin.metadata.version.GitVersion; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.opensearch.OpenSearchException; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; @@ -20,198 +29,270 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; +public abstract class IndexBuilderTestBase extends AbstractTestNGSpringContextTests { -import static org.testng.Assert.*; - -abstract public class IndexBuilderTestBase extends AbstractTestNGSpringContextTests { + @Nonnull + protected abstract RestHighLevelClient getSearchClient(); - @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + private static IndicesClient _indexClient; + private static final String TEST_INDEX_NAME = "esindex_builder_test"; + private static ESIndexBuilder testDefaultBuilder; - private static IndicesClient _indexClient; - private static final String TEST_INDEX_NAME = "esindex_builder_test"; - private static ESIndexBuilder testDefaultBuilder; + @BeforeClass + public void setup() { + _indexClient = getSearchClient().indices(); + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + testDefaultBuilder = + new ESIndexBuilder( + getSearchClient(), + 1, + 0, + 0, + 0, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + } + @BeforeMethod + public static void wipe() throws Exception { + try { + _indexClient + .getAlias(new GetAliasesRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT) + .getAliases() + .keySet() + .forEach( + index -> { + try { + _indexClient.delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); - @BeforeClass - public void setup() { - _indexClient = getSearchClient().indices(); - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - testDefaultBuilder = new ESIndexBuilder(getSearchClient(), 1, 0, 0, - 0, Map.of(), false, false, - new ElasticSearchConfiguration(), gitVersion); + _indexClient.delete(new DeleteIndexRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT); + } catch (OpenSearchException exception) { + if (exception.status() != RestStatus.NOT_FOUND) { + throw exception; + } } + } - @BeforeMethod - public static void wipe() throws Exception { - try { - _indexClient.getAlias(new GetAliasesRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT) - .getAliases().keySet().forEach(index -> { - try { - _indexClient.delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - - _indexClient.delete(new DeleteIndexRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT); - } catch (OpenSearchException exception) { - if (exception.status() != RestStatus.NOT_FOUND) { - throw exception; - } - } - } + public static GetIndexResponse getTestIndex() throws IOException { + return _indexClient.get( + new GetIndexRequest(TEST_INDEX_NAME).includeDefaults(true), RequestOptions.DEFAULT); + } - public static GetIndexResponse getTestIndex() throws IOException { - return _indexClient.get(new GetIndexRequest(TEST_INDEX_NAME).includeDefaults(true), RequestOptions.DEFAULT); - } + @Test + public void testESIndexBuilderCreation() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder customIndexBuilder = + new ESIndexBuilder( + getSearchClient(), + 2, + 0, + 1, + 0, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + customIndexBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + GetIndexResponse resp = getTestIndex(); - @Test - public void testESIndexBuilderCreation() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder customIndexBuilder = new ESIndexBuilder(getSearchClient(), 2, 0, 1, - 0, Map.of(), false, false, - new ElasticSearchConfiguration(), gitVersion); - customIndexBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - GetIndexResponse resp = getTestIndex(); - - assertEquals("2", resp.getSetting(TEST_INDEX_NAME, "index.number_of_shards")); - assertEquals("0", resp.getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals("0s", resp.getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - } + assertEquals("2", resp.getSetting(TEST_INDEX_NAME, "index.number_of_shards")); + assertEquals("0", resp.getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals("0s", resp.getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + } - @Test - public void testMappingReindex() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder enabledMappingReindex = new ESIndexBuilder(getSearchClient(), 1, 0, 0, - 0, Map.of(), false, true, - new ElasticSearchConfiguration(), gitVersion); + @Test + public void testMappingReindex() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder enabledMappingReindex = + new ESIndexBuilder( + getSearchClient(), + 1, + 0, + 0, + 0, + Map.of(), + false, + true, + new ElasticSearchConfiguration(), + gitVersion); - // No mappings - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + // No mappings + enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - // add new mappings - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + // add new mappings + enabledMappingReindex.buildIndex( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); - String afterAddedMappingCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - assertEquals(beforeCreationDate, afterAddedMappingCreationDate, "Expected no reindex on *adding* mappings"); + String afterAddedMappingCreationDate = + getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + assertEquals( + beforeCreationDate, + afterAddedMappingCreationDate, + "Expected no reindex on *adding* mappings"); - // change mappings - Map<String, Object> newProps = ((Map<String, Object>) SystemMetadataMappingsBuilder.getMappings().get("properties")) - .entrySet().stream() - .map(m -> !m.getKey().equals("urn") ? m - : Map.entry("urn", ImmutableMap.<String, Object>builder().put("type", "text").build())) + // change mappings + Map<String, Object> newProps = + ((Map<String, Object>) SystemMetadataMappingsBuilder.getMappings().get("properties")) + .entrySet().stream() + .map( + m -> + !m.getKey().equals("urn") + ? m + : Map.entry( + "urn", + ImmutableMap.<String, Object>builder().put("type", "text").build())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of("properties", newProps), Map.of()); + enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of("properties", newProps), Map.of()); - assertTrue(Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), - "Expected original index to be replaced with alias"); + assertTrue( + Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), + "Expected original index to be replaced with alias"); - Map.Entry<String, List<AliasMetadata>> newIndex = getTestIndex().getAliases().entrySet().stream() - .filter(e -> e.getValue().stream().anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) - .findFirst().get(); - String afterChangedMappingCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); - assertNotEquals(beforeCreationDate, afterChangedMappingCreationDate, "Expected reindex on *changing* mappings"); - } + Map.Entry<String, List<AliasMetadata>> newIndex = + getTestIndex().getAliases().entrySet().stream() + .filter( + e -> + e.getValue().stream() + .anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) + .findFirst() + .get(); + String afterChangedMappingCreationDate = + getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); + assertNotEquals( + beforeCreationDate, + afterChangedMappingCreationDate, + "Expected reindex on *changing* mappings"); + } + + @Test + public void testSettingsNumberOfShardsReindex() throws Exception { + // Set test defaults + testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertEquals("1", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_shards")); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + + String expectedShards = "5"; + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder changedShardBuilder = + new ESIndexBuilder( + getSearchClient(), + Integer.parseInt(expectedShards), + testDefaultBuilder.getNumReplicas(), + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + + // add new shard setting + changedShardBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertTrue( + Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), + "Expected original index to be replaced with alias"); + + Map.Entry<String, List<AliasMetadata>> newIndex = + getTestIndex().getAliases().entrySet().stream() + .filter( + e -> + e.getValue().stream() + .anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) + .findFirst() + .get(); - @Test - public void testSettingsNumberOfShardsReindex() throws Exception { - // Set test defaults - testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertEquals("1", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_shards")); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - String expectedShards = "5"; - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder changedShardBuilder = new ESIndexBuilder(getSearchClient(), - Integer.parseInt(expectedShards), + String afterCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); + assertNotEquals( + beforeCreationDate, afterCreationDate, "Expected reindex to result in different timestamp"); + assertEquals( + expectedShards, + getTestIndex().getSetting(newIndex.getKey(), "index.number_of_shards"), + "Expected number of shards: " + expectedShards); + } + + @Test + public void testSettingsNoReindex() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + List<ESIndexBuilder> noReindexBuilders = + List.of( + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas() + 1, + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas(), + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds() + 10, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards() + 1, testDefaultBuilder.getNumReplicas(), testDefaultBuilder.getNumRetries(), testDefaultBuilder.getRefreshIntervalSeconds(), Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion); - - // add new shard setting - changedShardBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertTrue(Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), - "Expected original index to be replaced with alias"); - - Map.Entry<String, List<AliasMetadata>> newIndex = getTestIndex().getAliases().entrySet().stream() - .filter(e -> e.getValue().stream().anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) - .findFirst().get(); - - String afterCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); - assertNotEquals(beforeCreationDate, afterCreationDate, "Expected reindex to result in different timestamp"); - assertEquals(expectedShards, getTestIndex().getSetting(newIndex.getKey(), "index.number_of_shards"), - "Expected number of shards: " + expectedShards); - } + false, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas() + 1, + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion)); - @Test - public void testSettingsNoReindex() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - List<ESIndexBuilder> noReindexBuilders = List.of( - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas() + 1, - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas(), - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds() + 10, - Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards() + 1, - testDefaultBuilder.getNumReplicas(), - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas() + 1, - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion) - ); - - for (ESIndexBuilder builder : noReindexBuilders) { - // Set test defaults - testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertEquals("0", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals("0s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - // build index with builder - builder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertTrue(Arrays.asList(getTestIndex().getIndices()).contains(TEST_INDEX_NAME), - "Expected original index to remain"); - String afterCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - assertEquals(beforeCreationDate, afterCreationDate, "Expected no difference in index timestamp"); - assertEquals(String.valueOf(builder.getNumReplicas()), getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals(builder.getRefreshIntervalSeconds() + "s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - - wipe(); - } - } + for (ESIndexBuilder builder : noReindexBuilders) { + // Set test defaults + testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertEquals("0", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals("0s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + + // build index with builder + builder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertTrue( + Arrays.asList(getTestIndex().getIndices()).contains(TEST_INDEX_NAME), + "Expected original index to remain"); + String afterCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + assertEquals( + beforeCreationDate, afterCreationDate, "Expected no difference in index timestamp"); + assertEquals( + String.valueOf(builder.getNumReplicas()), + getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals( + builder.getRefreshIntervalSeconds() + "s", + getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + + wipe(); + } + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index d9f2f0e5aac94..02bd186ccc183 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -1,16 +1,14 @@ package com.linkedin.metadata.search.indexbuilder; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.TestEntitySpecBuilder; -import java.util.Map; - import com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder; +import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class MappingsBuilderTest { @Test @@ -19,14 +17,33 @@ public void testMappingsBuilder() { assertEquals(result.size(), 1); Map<String, Object> properties = (Map<String, Object>) result.get("properties"); assertEquals(properties.size(), 20); - assertEquals(properties.get("urn"), ImmutableMap.of("type", "keyword", + assertEquals( + properties.get("urn"), + ImmutableMap.of( + "type", + "keyword", "fields", - ImmutableMap.of("delimited", - ImmutableMap.of("type", "text", "analyzer", "urn_component", "search_analyzer", "query_urn_component", - "search_quote_analyzer", "quote_analyzer"), - "ngram", - ImmutableMap.of("type", "search_as_you_type", "max_shingle_size", "4", "doc_values", "false", - "analyzer", "partial_urn_component")))); + ImmutableMap.of( + "delimited", + ImmutableMap.of( + "type", + "text", + "analyzer", + "urn_component", + "search_analyzer", + "query_urn_component", + "search_quote_analyzer", + "quote_analyzer"), + "ngram", + ImmutableMap.of( + "type", + "search_as_you_type", + "max_shingle_size", + "4", + "doc_values", + "false", + "analyzer", + "partial_urn_component")))); assertEquals(properties.get("runId"), ImmutableMap.of("type", "keyword")); assertTrue(properties.containsKey("browsePaths")); assertTrue(properties.containsKey("browsePathV2")); @@ -37,24 +54,30 @@ public void testMappingsBuilder() { Map<String, Object> keyPart3FieldSubfields = (Map<String, Object>) keyPart3Field.get("fields"); assertEquals(keyPart3FieldSubfields.size(), 1); assertTrue(keyPart3FieldSubfields.containsKey("keyword")); - Map<String, Object> customPropertiesField = (Map<String, Object>) properties.get("customProperties"); + Map<String, Object> customPropertiesField = + (Map<String, Object>) properties.get("customProperties"); assertEquals(customPropertiesField.get("type"), "keyword"); assertEquals(customPropertiesField.get("normalizer"), "keyword_normalizer"); - Map<String, Object> customPropertiesFieldSubfields = (Map<String, Object>) customPropertiesField.get("fields"); + Map<String, Object> customPropertiesFieldSubfields = + (Map<String, Object>) customPropertiesField.get("fields"); assertEquals(customPropertiesFieldSubfields.size(), 1); assertTrue(customPropertiesFieldSubfields.containsKey("keyword")); // TEXT - Map<String, Object> nestedArrayStringField = (Map<String, Object>) properties.get("nestedArrayStringField"); + Map<String, Object> nestedArrayStringField = + (Map<String, Object>) properties.get("nestedArrayStringField"); assertEquals(nestedArrayStringField.get("type"), "keyword"); assertEquals(nestedArrayStringField.get("normalizer"), "keyword_normalizer"); - Map<String, Object> nestedArrayStringFieldSubfields = (Map<String, Object>) nestedArrayStringField.get("fields"); + Map<String, Object> nestedArrayStringFieldSubfields = + (Map<String, Object>) nestedArrayStringField.get("fields"); assertEquals(nestedArrayStringFieldSubfields.size(), 2); assertTrue(nestedArrayStringFieldSubfields.containsKey("delimited")); assertTrue(nestedArrayStringFieldSubfields.containsKey("keyword")); - Map<String, Object> nestedArrayArrayField = (Map<String, Object>) properties.get("nestedArrayArrayField"); + Map<String, Object> nestedArrayArrayField = + (Map<String, Object>) properties.get("nestedArrayArrayField"); assertEquals(nestedArrayArrayField.get("type"), "keyword"); assertEquals(nestedArrayArrayField.get("normalizer"), "keyword_normalizer"); - Map<String, Object> nestedArrayArrayFieldSubfields = (Map<String, Object>) nestedArrayArrayField.get("fields"); + Map<String, Object> nestedArrayArrayFieldSubfields = + (Map<String, Object>) nestedArrayArrayField.get("fields"); assertEquals(nestedArrayArrayFieldSubfields.size(), 2); assertTrue(nestedArrayArrayFieldSubfields.containsKey("delimited")); assertTrue(nestedArrayArrayFieldSubfields.containsKey("keyword")); @@ -77,7 +100,8 @@ public void testMappingsBuilder() { Map<String, Object> textArrayField = (Map<String, Object>) properties.get("textArrayField"); assertEquals(textArrayField.get("type"), "keyword"); assertEquals(textArrayField.get("normalizer"), "keyword_normalizer"); - Map<String, Object> textArrayFieldSubfields = (Map<String, Object>) textArrayField.get("fields"); + Map<String, Object> textArrayFieldSubfields = + (Map<String, Object>) textArrayField.get("fields"); assertEquals(textArrayFieldSubfields.size(), 3); assertTrue(textArrayFieldSubfields.containsKey("delimited")); assertTrue(textArrayFieldSubfields.containsKey("ngram")); @@ -108,7 +132,8 @@ public void testMappingsBuilder() { Map<String, Object> nestedForeignKey = (Map<String, Object>) properties.get("nestedForeignKey"); assertEquals(nestedForeignKey.get("type"), "text"); assertEquals(nestedForeignKey.get("analyzer"), "urn_component"); - Map<String, Object> nestedForeignKeySubfields = (Map<String, Object>) nestedForeignKey.get("fields"); + Map<String, Object> nestedForeignKeySubfields = + (Map<String, Object>) nestedForeignKey.get("fields"); assertEquals(nestedForeignKeySubfields.size(), 2); assertTrue(nestedForeignKeySubfields.containsKey("keyword")); assertTrue(nestedForeignKeySubfields.containsKey("ngram")); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java index 3896ba749e85e..dd30010b08758 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.fixtures.GoldenTestBase; @@ -11,34 +13,35 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class GoldenOpenSearchTest extends GoldenTestBase { - @Autowired - @Qualifier("longTailSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - - @NotNull - @Override - protected EntityRegistry getEntityRegistry() { - return entityRegistry; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - assertNotNull(searchService); - } + @Autowired + @Qualifier("longTailSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @NotNull + @Override + protected EntityRegistry getEntityRegistry() { + return entityRegistry; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + assertNotNull(searchService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java index 312b56364bd91..ef1ed51eb4799 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.search.indexbuilder.IndexBuilderTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; @@ -8,23 +10,19 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class IndexBuilderOpenSearchTest extends IndexBuilderTestBase { - @Autowired - private RestHighLevelClient _searchClient; + @Autowired private RestHighLevelClient _searchClient; - @NotNull - @Override - protected RestHighLevelClient getSearchClient() { - return _searchClient; - } + @NotNull + @Override + protected RestHighLevelClient getSearchClient() { + return _searchClient; + } - @Test - public void initTest() { - assertNotNull(_searchClient); - } + @Test + public void initTest() { + assertNotNull(_searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java index 6fc0677ad6e39..cc17e3287544c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java @@ -12,32 +12,35 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({OpenSearchSuite.class, SearchLineageFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchLineageFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageDataFixtureOpenSearchTest extends LineageDataFixtureTestBase { - @Autowired - @Qualifier("searchLineageSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("searchLineageLineageSearchService") - protected LineageSearchService lineageService; - - @NotNull - @Override - protected LineageSearchService getLineageService() { - return lineageService; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - AssertJUnit.assertNotNull(lineageService); - } + @Autowired + @Qualifier("searchLineageSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("searchLineageLineageSearchService") + protected LineageSearchService lineageService; + + @NotNull + @Override + protected LineageSearchService getLineageService() { + return lineageService; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + AssertJUnit.assertNotNull(lineageService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java index 1a6242c2211fd..26c2cf28cdeca 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageServiceOpenSearchTest extends LineageServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java index 559c623c97d5a..42a178893e837 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java @@ -10,22 +10,23 @@ @TestConfiguration public class OpenSearchSuite extends AbstractTestNGSpringContextTests { - private static final OpenSearchTestContainer OPENSEARCH_TEST_CONTAINER; - private static GenericContainer<?> container; - static { - OPENSEARCH_TEST_CONTAINER = new OpenSearchTestContainer(); - } + private static final OpenSearchTestContainer OPENSEARCH_TEST_CONTAINER; + private static GenericContainer<?> container; - @AfterSuite - public void after() { - OPENSEARCH_TEST_CONTAINER.stopContainer(); - } + static { + OPENSEARCH_TEST_CONTAINER = new OpenSearchTestContainer(); + } + + @AfterSuite + public void after() { + OPENSEARCH_TEST_CONTAINER.stopContainer(); + } - @Bean(name = "testSearchContainer") - public GenericContainer<?> testSearchContainer() { - if (container == null) { - container = OPENSEARCH_TEST_CONTAINER.startContainer(); - } - return container; + @Bean(name = "testSearchContainer") + public GenericContainer<?> testSearchContainer() { + if (container == null) { + container = OPENSEARCH_TEST_CONTAINER.startContainer(); } + return container; + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java index 081eb5f70fc85..d358ba177f91d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; @@ -13,32 +15,30 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - -/** - * Runs sample data fixture tests for Opensearch test container - */ +/** Runs sample data fixture tests for Opensearch test container */ @Getter -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SampleDataFixtureOpenSearchTest extends SampleDataFixtureTestBase { - @Autowired - private RestHighLevelClient searchClient; + @Autowired private RestHighLevelClient searchClient; - @Autowired - @Qualifier("sampleDataSearchService") - protected SearchService searchService; + @Autowired + @Qualifier("sampleDataSearchService") + protected SearchService searchService; - @Autowired - @Qualifier("sampleDataEntityClient") - protected EntityClient entityClient; + @Autowired + @Qualifier("sampleDataEntityClient") + protected EntityClient entityClient; - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; - @Test - public void initTest() { - assertNotNull(searchClient); - } + @Test + public void initTest() { + assertNotNull(searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java index 0b166975da0d1..7f799d8eebf0a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.search.query.SearchDAOTestBase; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -12,16 +14,16 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Getter -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchDAOOpenSearchTest extends SearchDAOTestBase { - @Autowired - private RestHighLevelClient searchClient; - @Autowired - private SearchConfiguration searchConfiguration; + @Autowired private RestHighLevelClient searchClient; + @Autowired private SearchConfiguration searchConfiguration; + @Autowired @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java index 8a55ba7b37ef9..1127ba2089a91 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchServiceOpenSearchTest extends SearchServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java index f0bb8e1c12479..7ba90319cf1d3 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java @@ -11,16 +11,12 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class SystemMetadataServiceOpenSearchTest extends SystemMetadataServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java index 467f7fb43be1b..80db8864014c3 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class TestEntityOpenSearchTest extends TestEntityTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java index 3333b9f0942f5..63dffa9c21004 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java @@ -14,12 +14,9 @@ @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class TimeseriesAspectServiceOpenSearchTest extends TimeseriesAspectServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java index 91e7747afb4a1..a261b53f25c60 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java @@ -1,12 +1,18 @@ package com.linkedin.metadata.search.query; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.urn.Urn; -import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.entity.TestEntityRegistry; +import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -23,32 +29,24 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; - @Import(SearchCommonTestConfiguration.class) public class BrowseDAOTest extends AbstractTestNGSpringContextTests { private RestHighLevelClient _mockClient; private ESBrowseDAO _browseDAO; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @BeforeMethod public void setup() { _mockClient = mock(RestHighLevelClient.class); - _browseDAO = new ESBrowseDAO( - new TestEntityRegistry(), - _mockClient, - new IndexConventionImpl("es_browse_dao_test"), - _searchConfiguration, - _customSearchConfiguration - ); + _browseDAO = + new ESBrowseDAO( + new TestEntityRegistry(), + _mockClient, + new IndexConventionImpl("es_browse_dao_test"), + _searchConfiguration, + _customSearchConfiguration); } public static Urn makeUrn(Object id) { @@ -76,7 +74,7 @@ public void testGetBrowsePath() throws Exception { // Test the case of single search hit & browsePaths field doesn't exist sourceMap.remove("browse_paths"); when(mockSearchHit.getSourceAsMap()).thenReturn(sourceMap); - when(mockSearchHits.getHits()).thenReturn(new SearchHit[]{mockSearchHit}); + when(mockSearchHits.getHits()).thenReturn(new SearchHit[] {mockSearchHit}); when(mockSearchResponse.getHits()).thenReturn(mockSearchHits); when(_mockClient.search(any(), eq(RequestOptions.DEFAULT))).thenReturn(mockSearchResponse); assertEquals(_browseDAO.getBrowsePaths("dataset", dummyUrn).size(), 0); @@ -84,11 +82,11 @@ public void testGetBrowsePath() throws Exception { // Test the case of single search hit & browsePaths field exists sourceMap.put("browsePaths", Collections.singletonList("foo")); when(mockSearchHit.getSourceAsMap()).thenReturn(sourceMap); - when(mockSearchHits.getHits()).thenReturn(new SearchHit[]{mockSearchHit}); + when(mockSearchHits.getHits()).thenReturn(new SearchHit[] {mockSearchHit}); when(mockSearchResponse.getHits()).thenReturn(mockSearchHits); when(_mockClient.search(any(), eq(RequestOptions.DEFAULT))).thenReturn(mockSearchResponse); List<String> browsePaths = _browseDAO.getBrowsePaths("dataset", dummyUrn); assertEquals(browsePaths.size(), 1); assertEquals(browsePaths.get(0), "foo"); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java index 2dbc142d45071..ba909dc3822c5 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java @@ -1,5 +1,12 @@ package com.linkedin.metadata.search.query; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.fail; + import com.datahub.test.Snapshot; import com.google.common.collect.ImmutableList; import com.linkedin.data.template.LongMap; @@ -22,286 +29,404 @@ import com.linkedin.metadata.search.elasticsearch.query.ESSearchDAO; import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; -import org.opensearch.client.RestHighLevelClient; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import org.opensearch.client.RestHighLevelClient; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; - -abstract public class SearchDAOTestBase extends AbstractTestNGSpringContextTests { - - abstract protected RestHighLevelClient getSearchClient(); - - abstract protected SearchConfiguration getSearchConfiguration(); - - abstract protected IndexConvention getIndexConvention(); - - EntityRegistry _entityRegistry = new SnapshotEntityRegistry(new Snapshot()); - - - @Test - public void testTransformFilterForEntitiesNoChange() { - Criterion c = new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertEquals(f, transformedFilter); +public abstract class SearchDAOTestBase extends AbstractTestNGSpringContextTests { + + protected abstract RestHighLevelClient getSearchClient(); + + protected abstract SearchConfiguration getSearchConfiguration(); + + protected abstract IndexConvention getIndexConvention(); + + EntityRegistry _entityRegistry = new SnapshotEntityRegistry(new Snapshot()); + + @Test + public void testTransformFilterForEntitiesNoChange() { + Criterion c = + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertEquals(f, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesNullFilter() { + Filter transformedFilter = SearchUtil.transformFilterForEntities(null, getIndexConvention()); + assertNotNull(getIndexConvention()); + assertEquals(null, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesWithChanges() { + + Criterion c = + new Criterion() + .setValue("dataset") + .setValues(new StringArray(ImmutableList.of("dataset"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesNullFilter() { - Filter transformedFilter = SearchUtil.transformFilterForEntities(null, getIndexConvention()); - assertNotNull(getIndexConvention()); - assertEquals(null, transformedFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datasetindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datasetindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); + + assertEquals(expectedNewFilter, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesWithUnderscore() { + + Criterion c = + new Criterion() + .setValue("data_job") + .setValues(new StringArray(ImmutableList.of("data_job"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithChanges() { - - Criterion c = new Criterion().setValue("dataset").setValues( - new StringArray(ImmutableList.of("dataset")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datasetindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datasetindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); - - assertEquals(expectedNewFilter, transformedFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datajobindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datajobindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); + + assertEquals(transformedFilter, expectedNewFilter); + } + + @Test + public void testTransformFilterForEntitiesWithSomeChanges() { + + Criterion criterionChanged = + new Criterion() + .setValue("dataset") + .setValues(new StringArray(ImmutableList.of("dataset"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + Criterion criterionUnchanged = + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(criterionChanged, criterionUnchanged)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithUnderscore() { - - Criterion c = new Criterion().setValue("data_job").setValues( - new StringArray(ImmutableList.of("data_job")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datajobindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datajobindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); - - assertEquals(transformedFilter, expectedNewFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datasetindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datasetindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(expectedNewCriterion, criterionUnchanged)))); + + assertEquals(expectedNewFilter, transformedFilter); + } + + @Test + public void testTransformIndexIntoEntityNameSingle() { + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + getIndexConvention(), + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + // Empty aggregations + final SearchResultMetadata searchResultMetadata = + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); + SearchResult result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata(searchResultMetadata) + .setFrom(0) + .setPageSize(100) + .setNumEntities(30); + SearchResult expectedResult = null; + try { + expectedResult = result.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithSomeChanges() { - - Criterion criterionChanged = new Criterion().setValue("dataset").setValues( - new StringArray(ImmutableList.of("dataset")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - Criterion criterionUnchanged = new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criterionChanged, criterionUnchanged)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datasetindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datasetindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion, criterionUnchanged)))); - - assertEquals(expectedNewFilter, transformedFilter); + assertEquals(expectedResult, searchDAO.transformIndexIntoEntityName(result)); + + // one facet, do not transform + Map<String, Long> aggMap = Map.of("urn:li:corpuser:datahub", Long.valueOf(3)); + + List<AggregationMetadata> aggregationMetadataList = new ArrayList<>(); + aggregationMetadataList.add( + new AggregationMetadata() + .setName("owners") + .setDisplayName("Owned by") + .setAggregations(new LongMap(aggMap)) + .setFilterValues( + new FilterValueArray(SearchUtil.convertToFilters(aggMap, Collections.emptySet())))); + searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); + result.setMetadata(searchResultMetadata); + + try { + expectedResult = result.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - @Test - public void testTransformIndexIntoEntityNameSingle() { - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), getIndexConvention(), false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - // Empty aggregations - final SearchResultMetadata searchResultMetadata = - new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); - SearchResult result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(searchResultMetadata) - .setFrom(0) - .setPageSize(100) - .setNumEntities(30); - SearchResult expectedResult = null; - try { - expectedResult = result.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(expectedResult, searchDAO.transformIndexIntoEntityName(result)); - - // one facet, do not transform - Map<String, Long> aggMap = Map.of("urn:li:corpuser:datahub", Long.valueOf(3)); - - List<AggregationMetadata> aggregationMetadataList = new ArrayList<>(); - aggregationMetadataList.add(new AggregationMetadata().setName("owners") - .setDisplayName("Owned by") - .setAggregations(new LongMap(aggMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(aggMap, Collections.emptySet()))) - ); - searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); - result.setMetadata(searchResultMetadata); - - try { - expectedResult = result.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - - // one facet, transform - Map<String, Long> entityTypeMap = Map.of("smpldat_datasetindex_v2", Long.valueOf(3)); - - aggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType") + // one facet, transform + Map<String, Long> entityTypeMap = Map.of("smpldat_datasetindex_v2", Long.valueOf(3)); + + aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType") .setDisplayName("Type") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); - result.setMetadata(searchResultMetadata); - - Map<String, Long> expectedEntityTypeMap = Map.of("dataset", Long.valueOf(3)); - - List<AggregationMetadata> expectedAggregationMetadataList = List.of( - new AggregationMetadata().setName("_entityType") - .setDisplayName("Type") - .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - expectedResult.setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - } - - @Test - public void testTransformIndexIntoEntityNameNested() { - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), getIndexConvention(), false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - // One nested facet - Map<String, Long> entityTypeMap = Map.of( - String.format("smpldat_datasetindex_v2%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("smpldat_datasetindex_v2%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "smpldat_datasetindex_v2", Long.valueOf(20) - ); - List<AggregationMetadata> aggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType␞owners") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); + result.setMetadata(searchResultMetadata); + + Map<String, Long> expectedEntityTypeMap = Map.of("dataset", Long.valueOf(3)); + + List<AggregationMetadata> expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType") + .setDisplayName("Type") + .setAggregations(new LongMap(expectedEntityTypeMap)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + expectedResult.setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + } + + @Test + public void testTransformIndexIntoEntityNameNested() { + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + getIndexConvention(), + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + // One nested facet + Map<String, Long> entityTypeMap = + Map.of( + String.format( + "smpldat_datasetindex_v2%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format( + "smpldat_datasetindex_v2%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "smpldat_datasetindex_v2", + Long.valueOf(20)); + List<AggregationMetadata> aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType␞owners") .setDisplayName("Type␞Owned By") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - SearchResult result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(aggregationMetadataList) - )) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - - Map<String, Long> expectedEntityTypeMap = Map.of( - String.format("dataset%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("dataset%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "dataset", Long.valueOf(20) - ); - - List<AggregationMetadata> expectedAggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType␞owners") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + SearchResult result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(aggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + + Map<String, Long> expectedEntityTypeMap = + Map.of( + String.format("dataset%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format("dataset%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "dataset", + Long.valueOf(20)); + + List<AggregationMetadata> expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType␞owners") .setDisplayName("Type␞Owned By") .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - SearchResult expectedResult = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(expectedAggregationMetadataList))) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - - // One nested facet, opposite order - entityTypeMap = Map.of( - String.format("urn:li:corpuser:datahub%ssmpldat_datasetindex_v2", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("urn:li:corpuser:datahub%ssmpldat_chartindex_v2", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "urn:li:corpuser:datahub", Long.valueOf(20) - ); - aggregationMetadataList = List.of(new AggregationMetadata().setName("owners␞_entityType") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + SearchResult expectedResult = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + + // One nested facet, opposite order + entityTypeMap = + Map.of( + String.format( + "urn:li:corpuser:datahub%ssmpldat_datasetindex_v2", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format( + "urn:li:corpuser:datahub%ssmpldat_chartindex_v2", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "urn:li:corpuser:datahub", + Long.valueOf(20)); + aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("owners␞_entityType") .setDisplayName("Owned By␞Type") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(aggregationMetadataList) - )) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - - expectedEntityTypeMap = Map.of( - String.format("urn:li:corpuser:datahub%sdataset", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("urn:li:corpuser:datahub%schart", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "urn:li:corpuser:datahub", Long.valueOf(20) - ); - - expectedAggregationMetadataList = List.of(new AggregationMetadata().setName("owners␞_entityType") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(aggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + + expectedEntityTypeMap = + Map.of( + String.format("urn:li:corpuser:datahub%sdataset", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format("urn:li:corpuser:datahub%schart", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "urn:li:corpuser:datahub", + Long.valueOf(20)); + + expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("owners␞_entityType") .setDisplayName("Owned By␞Type") .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - expectedResult = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(expectedAggregationMetadataList))) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - } + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + expectedResult = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java index 66e7b62741f4c..b52f4cd4e92f7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java @@ -1,47 +1,45 @@ package com.linkedin.metadata.search.query.request; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.google.common.collect.ImmutableList; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.search.elasticsearch.query.request.AggregationQueryBuilder; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.search.elasticsearch.query.request.AggregationQueryBuilder; import org.opensearch.search.aggregations.AggregationBuilder; import org.testng.Assert; import org.testng.annotations.Test; - public class AggregationQueryBuilderTest { @Test public void testGetDefaultAggregationsHasFields() { - SearchableAnnotation annotation = new SearchableAnnotation( - "test", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - true, - Optional.empty(), - Optional.of("Has Test"), - 1.0, - Optional.of("hasTest"), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation = + new SearchableAnnotation( + "test", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + true, + Optional.empty(), + Optional.of("Has Test"), + 1.0, + Optional.of("hasTest"), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation)); List<AggregationBuilder> aggs = builder.getAggregations(); @@ -51,27 +49,27 @@ public void testGetDefaultAggregationsHasFields() { @Test public void testGetDefaultAggregationsFields() { - SearchableAnnotation annotation = new SearchableAnnotation( - "test", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - true, - false, - Optional.of("Test Filter"), - Optional.empty(), - 1.0, - Optional.empty(), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation = + new SearchableAnnotation( + "test", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + true, + false, + Optional.of("Test Filter"), + Optional.empty(), + 1.0, + Optional.empty(), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation)); List<AggregationBuilder> aggs = builder.getAggregations(); @@ -81,56 +79,53 @@ public void testGetDefaultAggregationsFields() { @Test public void testGetSpecificAggregationsHasFields() { - SearchableAnnotation annotation1 = new SearchableAnnotation( - "test1", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - false, - Optional.empty(), - Optional.of("Has Test"), - 1.0, - Optional.of("hasTest1"), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); - - SearchableAnnotation annotation2 = new SearchableAnnotation( - "test2", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - false, - Optional.of("Test Filter"), - Optional.empty(), - 1.0, - Optional.empty(), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation1 = + new SearchableAnnotation( + "test1", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.empty(), + Optional.of("Has Test"), + 1.0, + Optional.of("hasTest1"), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); + + SearchableAnnotation annotation2 = + new SearchableAnnotation( + "test2", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.of("Test Filter"), + Optional.empty(), + 1.0, + Optional.empty(), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation1, annotation2)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation1, annotation2)); // Case 1: Ask for fields that should exist. - List<AggregationBuilder> aggs = builder.getAggregations( - ImmutableList.of("test1", "test2", "hasTest1") - ); + List<AggregationBuilder> aggs = + builder.getAggregations(ImmutableList.of("test1", "test2", "hasTest1")); Assert.assertEquals(aggs.size(), 3); Set<String> facets = aggs.stream().map(AggregationBuilder::getName).collect(Collectors.toSet()); Assert.assertEquals(ImmutableSet.of("test1", "test2", "hasTest1"), facets); // Case 2: Ask for fields that should NOT exist. - aggs = builder.getAggregations( - ImmutableList.of("hasTest2") - ); + aggs = builder.getAggregations(ImmutableList.of("hasTest2")); Assert.assertEquals(aggs.size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java index 34b98f38254cd..ab832eb1ac24f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java @@ -1,10 +1,12 @@ package com.linkedin.metadata.search.query.request; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.linkedin.metadata.TestEntitySpecBuilder; +import com.linkedin.metadata.search.elasticsearch.query.request.AutocompleteRequestHandler; import java.util.List; import java.util.Map; - -import com.linkedin.metadata.search.elasticsearch.query.request.AutocompleteRequestHandler; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; @@ -14,12 +16,9 @@ import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class AutocompleteRequestHandlerTest { - private AutocompleteRequestHandler handler = AutocompleteRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec()); + private AutocompleteRequestHandler handler = + AutocompleteRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec()); @Test public void testDefaultAutocompleteRequest() { @@ -38,7 +37,8 @@ public void testDefaultAutocompleteRequest() { assertTrue(queryFields.containsKey("keyPart1.ngram._4gram")); assertEquals(autocompleteQuery.type(), MultiMatchQueryBuilder.Type.BOOL_PREFIX); - MatchPhrasePrefixQueryBuilder prefixQuery = (MatchPhrasePrefixQueryBuilder) query.should().get(0); + MatchPhrasePrefixQueryBuilder prefixQuery = + (MatchPhrasePrefixQueryBuilder) query.should().get(0); assertEquals("keyPart1.delimited", prefixQuery.fieldName()); assertEquals(query.mustNot().size(), 1); @@ -75,7 +75,8 @@ public void testAutocompleteRequestWithField() { assertTrue(queryFields.containsKey("field.ngram._4gram")); assertEquals(autocompleteQuery.type(), MultiMatchQueryBuilder.Type.BOOL_PREFIX); - MatchPhrasePrefixQueryBuilder prefixQuery = (MatchPhrasePrefixQueryBuilder) query.should().get(0); + MatchPhrasePrefixQueryBuilder prefixQuery = + (MatchPhrasePrefixQueryBuilder) query.should().get(0); assertEquals("field.delimited", prefixQuery.fieldName()); MatchQueryBuilder removedFilter = (MatchQueryBuilder) query.mustNot().get(0); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java index 6b6664ffdf30e..105ee2652dc30 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java @@ -1,14 +1,21 @@ package com.linkedin.metadata.search.query.request; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.BoolQueryConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.metadata.search.elasticsearch.query.request.CustomizedQueryHandler; import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.index.query.MatchAllQueryBuilder; @@ -17,172 +24,192 @@ import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; import org.testng.annotations.Test; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - public class CustomizedQueryHandlerTest { - public static final ObjectMapper TEST_MAPPER = new YAMLMapper(); - private static final CustomSearchConfiguration TEST_CONFIG; - static { - try { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_test.yml"); - TEST_CONFIG = customConfiguration.resolve(TEST_MAPPER); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - public static final SearchQueryBuilder SEARCH_QUERY_BUILDER; - static { - SEARCH_QUERY_BUILDER = new SearchQueryBuilder(new SearchConfiguration(), TEST_CONFIG); - } - private static final List<QueryConfiguration> EXPECTED_CONFIGURATION = List.of( - QueryConfiguration.builder() - .queryRegex("[*]|") - .simpleQuery(false) - .exactMatchQuery(false) - .prefixMatchQuery(false) - .functionScore(Map.of("score_mode", "avg", "boost_mode", "multiply", - "functions", List.of( - Map.of( - "weight", 1, - "filter", Map.<String, Object>of("match_all", Map.<String, Object>of())), - Map.of( - "weight", 0.5, - "filter", Map.<String, Object>of("term", Map.of( - "materialized", Map.of("value", true) - ))), - Map.of( - "weight", 0.5, - "filter", Map.<String, Object>of("term", Map.<String, Object>of( - "deprecated", Map.of("value", true) - ))) - ))) - .build(), - QueryConfiguration.builder() - .queryRegex(".*") - .simpleQuery(true) - .exactMatchQuery(true) - .prefixMatchQuery(true) - .boolQuery(BoolQueryConfiguration.builder() - .must(List.of( - Map.of("term", Map.of("name", "{{query_string}}")) - )) - .build()) - .functionScore(Map.of("score_mode", "avg", "boost_mode", "multiply", - "functions", List.of( - Map.of( - "weight", 1, - "filter", Map.<String, Object>of("match_all", Map.<String, Object>of())), - Map.of( - "weight", 0.5, - "filter", Map.<String, Object>of("term", Map.of( - "materialized", Map.of("value", true) - ))), - Map.of( - "weight", 1.5, - "filter", Map.<String, Object>of("term", Map.<String, Object>of( - "deprecated", Map.of("value", false) - ))) - ))) - .build() - ); - - - @Test - public void configParsingTest() { - assertNotNull(TEST_CONFIG); - assertEquals(TEST_CONFIG.getQueryConfigurations(), EXPECTED_CONFIGURATION); + public static final ObjectMapper TEST_MAPPER = new YAMLMapper(); + private static final CustomSearchConfiguration TEST_CONFIG; + + static { + try { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_test.yml"); + TEST_CONFIG = customConfiguration.resolve(TEST_MAPPER); + } catch (IOException e) { + throw new RuntimeException(e); } - - @Test - public void customizedQueryHandlerInitTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - - assertEquals(test.getQueryConfigurations().stream().map(e -> e.getKey().toString()).collect(Collectors.toList()), - List.of("[*]|", ".*")); - - assertEquals(test.getQueryConfigurations().stream() - .map(e -> Map.entry(e.getKey().toString(), e.getValue())) - .collect(Collectors.toList()), - EXPECTED_CONFIGURATION.stream() - .map(cfg -> Map.entry(cfg.getQueryRegex(), cfg)) - .collect(Collectors.toList())); - } - - @Test - public void patternMatchTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - - for (String selectAllQuery: List.of("*", "")) { - QueryConfiguration actual = test.lookupQueryConfig(selectAllQuery).get(); - assertEquals(actual, EXPECTED_CONFIGURATION.get(0), String.format("Failed to match: `%s`", selectAllQuery)); - } - - for (String otherQuery: List.of("foo", "bar")) { - QueryConfiguration actual = test.lookupQueryConfig(otherQuery).get(); - assertEquals(actual, EXPECTED_CONFIGURATION.get(1)); - } + } + + public static final SearchQueryBuilder SEARCH_QUERY_BUILDER; + + static { + SEARCH_QUERY_BUILDER = new SearchQueryBuilder(new SearchConfiguration(), TEST_CONFIG); + } + + private static final List<QueryConfiguration> EXPECTED_CONFIGURATION = + List.of( + QueryConfiguration.builder() + .queryRegex("[*]|") + .simpleQuery(false) + .exactMatchQuery(false) + .prefixMatchQuery(false) + .functionScore( + Map.of( + "score_mode", + "avg", + "boost_mode", + "multiply", + "functions", + List.of( + Map.of( + "weight", + 1, + "filter", + Map.<String, Object>of("match_all", Map.<String, Object>of())), + Map.of( + "weight", + 0.5, + "filter", + Map.<String, Object>of( + "term", Map.of("materialized", Map.of("value", true)))), + Map.of( + "weight", + 0.5, + "filter", + Map.<String, Object>of( + "term", + Map.<String, Object>of("deprecated", Map.of("value", true))))))) + .build(), + QueryConfiguration.builder() + .queryRegex(".*") + .simpleQuery(true) + .exactMatchQuery(true) + .prefixMatchQuery(true) + .boolQuery( + BoolQueryConfiguration.builder() + .must(List.of(Map.of("term", Map.of("name", "{{query_string}}")))) + .build()) + .functionScore( + Map.of( + "score_mode", + "avg", + "boost_mode", + "multiply", + "functions", + List.of( + Map.of( + "weight", + 1, + "filter", + Map.<String, Object>of("match_all", Map.<String, Object>of())), + Map.of( + "weight", + 0.5, + "filter", + Map.<String, Object>of( + "term", Map.of("materialized", Map.of("value", true)))), + Map.of( + "weight", + 1.5, + "filter", + Map.<String, Object>of( + "term", + Map.<String, Object>of("deprecated", Map.of("value", false))))))) + .build()); + + @Test + public void configParsingTest() { + assertNotNull(TEST_CONFIG); + assertEquals(TEST_CONFIG.getQueryConfigurations(), EXPECTED_CONFIGURATION); + } + + @Test + public void customizedQueryHandlerInitTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + + assertEquals( + test.getQueryConfigurations().stream() + .map(e -> e.getKey().toString()) + .collect(Collectors.toList()), + List.of("[*]|", ".*")); + + assertEquals( + test.getQueryConfigurations().stream() + .map(e -> Map.entry(e.getKey().toString(), e.getValue())) + .collect(Collectors.toList()), + EXPECTED_CONFIGURATION.stream() + .map(cfg -> Map.entry(cfg.getQueryRegex(), cfg)) + .collect(Collectors.toList())); + } + + @Test + public void patternMatchTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + + for (String selectAllQuery : List.of("*", "")) { + QueryConfiguration actual = test.lookupQueryConfig(selectAllQuery).get(); + assertEquals( + actual, + EXPECTED_CONFIGURATION.get(0), + String.format("Failed to match: `%s`", selectAllQuery)); } - @Test - public void functionScoreQueryBuilderTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - MatchAllQueryBuilder inputQuery = QueryBuilders.matchAllQuery(); - - /* - * Test select star - */ - FunctionScoreQueryBuilder selectStarTest = SEARCH_QUERY_BUILDER.functionScoreQueryBuilder(test.lookupQueryConfig("*").get(), - inputQuery); - - FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedSelectStarScoreFunctions = { - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - ScoreFunctionBuilders.weightFactorFunction(1f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("materialized", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("deprecated", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ) - }; - FunctionScoreQueryBuilder expectedSelectStar = new FunctionScoreQueryBuilder(expectedSelectStarScoreFunctions) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) - .boostMode(CombineFunction.MULTIPLY); - - assertEquals(selectStarTest, expectedSelectStar); - - /* - * Test default (non-select start) - */ - FunctionScoreQueryBuilder defaultTest = SEARCH_QUERY_BUILDER.functionScoreQueryBuilder(test.lookupQueryConfig("foobar").get(), inputQuery); - - FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedDefaultScoreFunctions = { - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - ScoreFunctionBuilders.weightFactorFunction(1f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("materialized", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("deprecated", false), - ScoreFunctionBuilders.weightFactorFunction(1.5f) - ) - }; - FunctionScoreQueryBuilder expectedDefault = new FunctionScoreQueryBuilder(expectedDefaultScoreFunctions) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) - .boostMode(CombineFunction.MULTIPLY); - - assertEquals(defaultTest, expectedDefault); + for (String otherQuery : List.of("foo", "bar")) { + QueryConfiguration actual = test.lookupQueryConfig(otherQuery).get(); + assertEquals(actual, EXPECTED_CONFIGURATION.get(1)); } + } + + @Test + public void functionScoreQueryBuilderTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + MatchAllQueryBuilder inputQuery = QueryBuilders.matchAllQuery(); + + /* + * Test select star + */ + FunctionScoreQueryBuilder selectStarTest = + SEARCH_QUERY_BUILDER.functionScoreQueryBuilder( + test.lookupQueryConfig("*").get(), inputQuery); + + FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedSelectStarScoreFunctions = { + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("materialized", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("deprecated", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)) + }; + FunctionScoreQueryBuilder expectedSelectStar = + new FunctionScoreQueryBuilder(expectedSelectStarScoreFunctions) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) + .boostMode(CombineFunction.MULTIPLY); + + assertEquals(selectStarTest, expectedSelectStar); + + /* + * Test default (non-select start) + */ + FunctionScoreQueryBuilder defaultTest = + SEARCH_QUERY_BUILDER.functionScoreQueryBuilder( + test.lookupQueryConfig("foobar").get(), inputQuery); + + FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedDefaultScoreFunctions = { + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("materialized", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("deprecated", false), + ScoreFunctionBuilders.weightFactorFunction(1.5f)) + }; + FunctionScoreQueryBuilder expectedDefault = + new FunctionScoreQueryBuilder(expectedDefaultScoreFunctions) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) + .boostMode(CombineFunction.MULTIPLY); + + assertEquals(defaultTest, expectedDefault); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java index 9c0815efdc8b4..42f4f480bfbdd 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java @@ -1,23 +1,33 @@ package com.linkedin.metadata.search.query.request; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.TEXT_SEARCH_ANALYZER; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.URN_SEARCH_ANALYZER; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.google.common.collect.ImmutableList; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.PathSpec; -import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; -import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.WordGramConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.TestEntitySpecBuilder; - import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; +import com.linkedin.util.Pair; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import java.io.IOException; import java.util.List; import java.util.Map; @@ -25,9 +35,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.util.Pair; +import org.mockito.Mockito; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; @@ -37,28 +45,18 @@ import org.opensearch.index.query.SimpleQueryStringBuilder; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; -import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Import; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.TEXT_SEARCH_ANALYZER; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.URN_SEARCH_ANALYZER; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - @Import(SearchCommonTestConfiguration.class) public class SearchQueryBuilderTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; + @Autowired private EntityRegistry entityRegistry; public static SearchConfiguration testQueryConfig; + static { testQueryConfig = new SearchConfiguration(); testQueryConfig.setMaxTermBucketSize(20); @@ -84,25 +82,31 @@ public class SearchQueryBuilderTest extends AbstractTestNGSpringContextTests { testQueryConfig.setWordGram(wordGramConfiguration); testQueryConfig.setPartial(partialConfiguration); } - public static final SearchQueryBuilder TEST_BUILDER = new SearchQueryBuilder(testQueryConfig, null); + + public static final SearchQueryBuilder TEST_BUILDER = + new SearchQueryBuilder(testQueryConfig, null); @Test public void testQueryBuilderFulltext() { FunctionScoreQueryBuilder result = - (FunctionScoreQueryBuilder) TEST_BUILDER.buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", - true); + (FunctionScoreQueryBuilder) + TEST_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 2); BoolQueryBuilder analyzerGroupQuery = (BoolQueryBuilder) shouldQueries.get(0); - SimpleQueryStringBuilder keywordQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(0); + SimpleQueryStringBuilder keywordQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(0); assertEquals(keywordQuery.value(), "testQuery"); assertEquals(keywordQuery.analyzer(), "keyword"); Map<String, Float> keywordFields = keywordQuery.fields(); assertEquals(keywordFields.size(), 9); - assertEquals(keywordFields, Map.of( + assertEquals( + keywordFields, + Map.of( "urn", 10.f, "textArrayField", 1.0f, "customProperties", 1.0f, @@ -111,47 +115,55 @@ public void testQueryBuilderFulltext() { "textFieldOverride", 1.0f, "nestedArrayStringField", 1.0f, "keyPart1", 10.0f, - "esObjectField", 1.0f - )); + "esObjectField", 1.0f)); - SimpleQueryStringBuilder urnComponentQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(1); + SimpleQueryStringBuilder urnComponentQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(1); assertEquals(urnComponentQuery.value(), "testQuery"); assertEquals(urnComponentQuery.analyzer(), URN_SEARCH_ANALYZER); - assertEquals(urnComponentQuery.fields(), Map.of( + assertEquals( + urnComponentQuery.fields(), + Map.of( "nestedForeignKey", 1.0f, - "foreignKey", 1.0f - )); + "foreignKey", 1.0f)); - SimpleQueryStringBuilder fulltextQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(2); + SimpleQueryStringBuilder fulltextQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(2); assertEquals(fulltextQuery.value(), "testQuery"); assertEquals(fulltextQuery.analyzer(), TEXT_SEARCH_ANALYZER); - assertEquals(fulltextQuery.fields(), Map.of( + assertEquals( + fulltextQuery.fields(), + Map.of( "textFieldOverride.delimited", 0.4f, "keyPart1.delimited", 4.0f, "nestedArrayArrayField.delimited", 0.4f, "urn.delimited", 7.0f, "textArrayField.delimited", 0.4f, "nestedArrayStringField.delimited", 0.4f, - "wordGramField.delimited", 0.4f - )); + "wordGramField.delimited", 0.4f)); BoolQueryBuilder boolPrefixQuery = (BoolQueryBuilder) shouldQueries.get(1); assertTrue(boolPrefixQuery.should().size() > 0); - List<Pair<String, Float>> prefixFieldWeights = boolPrefixQuery.should().stream().map(prefixQuery -> { - if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { - MatchPhrasePrefixQueryBuilder builder = (MatchPhrasePrefixQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } else if (prefixQuery instanceof TermQueryBuilder) { - // exact - TermQueryBuilder builder = (TermQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { - // ngram - MatchPhraseQueryBuilder builder = (MatchPhraseQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } - }).collect(Collectors.toList()); + List<Pair<String, Float>> prefixFieldWeights = + boolPrefixQuery.should().stream() + .map( + prefixQuery -> { + if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { + MatchPhrasePrefixQueryBuilder builder = + (MatchPhrasePrefixQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } else if (prefixQuery instanceof TermQueryBuilder) { + // exact + TermQueryBuilder builder = (TermQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { + // ngram + MatchPhraseQueryBuilder builder = (MatchPhraseQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } + }) + .collect(Collectors.toList()); assertEquals(prefixFieldWeights.size(), 28); @@ -165,19 +177,21 @@ public void testQueryBuilderFulltext() { Pair.of("wordGramField.wordGrams3", 2.25f), Pair.of("wordGramField.wordGrams4", 3.2399998f), Pair.of("wordGramField.keyword", 10.0f), - Pair.of("wordGramField.keyword", 7.0f) - ).forEach(p -> assertTrue(prefixFieldWeights.contains(p), "Missing: " + p)); + Pair.of("wordGramField.keyword", 7.0f)) + .forEach(p -> assertTrue(prefixFieldWeights.contains(p), "Missing: " + p)); // Validate scorer - FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = result.filterFunctionBuilders(); + FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = + result.filterFunctionBuilders(); assertEquals(scoringFunctions.length, 3); } @Test public void testQueryBuilderStructured() { FunctionScoreQueryBuilder result = - (FunctionScoreQueryBuilder) TEST_BUILDER.buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), - "testQuery", false); + (FunctionScoreQueryBuilder) + TEST_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", false); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 2); @@ -194,17 +208,20 @@ public void testQueryBuilderStructured() { assertEquals(keywordFields.get("esObjectField").floatValue(), 1.0f); // Validate scorer - FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = result.filterFunctionBuilders(); + FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = + result.filterFunctionBuilders(); assertEquals(scoringFunctions.length, 3); } private static final SearchQueryBuilder TEST_CUSTOM_BUILDER; + static { try { CustomConfiguration customConfiguration = new CustomConfiguration(); customConfiguration.setEnabled(true); customConfiguration.setFile("search_config_builder_test.yml"); - CustomSearchConfiguration customSearchConfiguration = customConfiguration.resolve(new YAMLMapper()); + CustomSearchConfiguration customSearchConfiguration = + customConfiguration.resolve(new YAMLMapper()); TEST_CUSTOM_BUILDER = new SearchQueryBuilder(testQueryConfig, customSearchConfiguration); } catch (IOException e) { throw new RuntimeException(e); @@ -214,8 +231,10 @@ public void testQueryBuilderStructured() { @Test public void testCustomSelectAll() { for (String triggerQuery : List.of("*", "")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); @@ -226,8 +245,10 @@ public void testCustomSelectAll() { @Test public void testCustomExactMatch() { for (String triggerQuery : List.of("test_table", "'single quoted'", "\"double quoted\"")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); @@ -236,18 +257,22 @@ public void testCustomExactMatch() { BoolQueryBuilder boolPrefixQuery = (BoolQueryBuilder) shouldQueries.get(0); assertTrue(boolPrefixQuery.should().size() > 0); - List<QueryBuilder> queries = boolPrefixQuery.should().stream().map(prefixQuery -> { - if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { - // prefix - return (MatchPhrasePrefixQueryBuilder) prefixQuery; - } else if (prefixQuery instanceof TermQueryBuilder) { - // exact - return (TermQueryBuilder) prefixQuery; - } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { - // ngram - return (MatchPhraseQueryBuilder) prefixQuery; - } - }).collect(Collectors.toList()); + List<QueryBuilder> queries = + boolPrefixQuery.should().stream() + .map( + prefixQuery -> { + if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { + // prefix + return (MatchPhrasePrefixQueryBuilder) prefixQuery; + } else if (prefixQuery instanceof TermQueryBuilder) { + // exact + return (TermQueryBuilder) prefixQuery; + } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { + // ngram + return (MatchPhraseQueryBuilder) prefixQuery; + } + }) + .collect(Collectors.toList()); assertFalse(queries.isEmpty(), "Expected queries with specific types"); } @@ -256,24 +281,30 @@ public void testCustomExactMatch() { @Test public void testCustomDefault() { for (String triggerQuery : List.of("foo", "bar", "foo\"bar", "foo:bar")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 3); - List<QueryBuilder> queries = mainQuery.should().stream().map(query -> { - if (query instanceof SimpleQueryStringBuilder) { - return (SimpleQueryStringBuilder) query; - } else if (query instanceof MatchAllQueryBuilder) { - // custom - return (MatchAllQueryBuilder) query; - } else { - // exact - return (BoolQueryBuilder) query; - } - }).collect(Collectors.toList()); + List<QueryBuilder> queries = + mainQuery.should().stream() + .map( + query -> { + if (query instanceof SimpleQueryStringBuilder) { + return (SimpleQueryStringBuilder) query; + } else if (query instanceof MatchAllQueryBuilder) { + // custom + return (MatchAllQueryBuilder) query; + } else { + // exact + return (BoolQueryBuilder) query; + } + }) + .collect(Collectors.toList()); assertEquals(queries.size(), 3, "Expected queries with specific types"); @@ -287,41 +318,52 @@ public void testCustomDefault() { } } - /** - * Tests to make sure that the fields are correctly combined across search-able entities - */ + /** Tests to make sure that the fields are correctly combined across search-able entities */ @Test public void testGetStandardFieldsEntitySpec() { - List<EntitySpec> entitySpecs = Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) + List<EntitySpec> entitySpecs = + Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) .map(entityRegistry::getEntitySpec) .collect(Collectors.toList()); assertTrue(entitySpecs.size() > 30, "Expected at least 30 searchable entities in the registry"); // Count of the distinct field names - Set<String> expectedFieldNames = Stream.concat( - // Standard urn fields plus entitySpec sourced fields - Stream.of("urn", "urn.delimited"), - entitySpecs.stream() - .flatMap(spec -> TEST_CUSTOM_BUILDER.getFieldsFromEntitySpec(spec).stream()) - .map(SearchFieldConfig::fieldName)) + Set<String> expectedFieldNames = + Stream.concat( + // Standard urn fields plus entitySpec sourced fields + Stream.of("urn", "urn.delimited"), + entitySpecs.stream() + .flatMap(spec -> TEST_CUSTOM_BUILDER.getFieldsFromEntitySpec(spec).stream()) + .map(SearchFieldConfig::fieldName)) .collect(Collectors.toSet()); - Set<String> actualFieldNames = TEST_CUSTOM_BUILDER.getStandardFields(entitySpecs).stream() + Set<String> actualFieldNames = + TEST_CUSTOM_BUILDER.getStandardFields(entitySpecs).stream() .map(SearchFieldConfig::fieldName) .collect(Collectors.toSet()); - assertEquals(actualFieldNames, expectedFieldNames, - String.format("Missing: %s Extra: %s", - expectedFieldNames.stream().filter(f -> !actualFieldNames.contains(f)).collect(Collectors.toSet()), - actualFieldNames.stream().filter(f -> !expectedFieldNames.contains(f)).collect(Collectors.toSet()))); + assertEquals( + actualFieldNames, + expectedFieldNames, + String.format( + "Missing: %s Extra: %s", + expectedFieldNames.stream() + .filter(f -> !actualFieldNames.contains(f)) + .collect(Collectors.toSet()), + actualFieldNames.stream() + .filter(f -> !expectedFieldNames.contains(f)) + .collect(Collectors.toSet()))); } @Test public void testGetStandardFields() { - Set<SearchFieldConfig> fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec())); + Set<SearchFieldConfig> fieldConfigs = + TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec())); assertEquals(fieldConfigs.size(), 21); - assertEquals(fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( + assertEquals( + fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), + Set.of( "nestedArrayArrayField", "esObjectField", "foreignKey", @@ -344,45 +386,90 @@ public void testGetStandardFields() { "urn", "wordGramField.wordGrams2")); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("keyPart1")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 10.0F)); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("nestedForeignKey")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("textFieldOverride")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("keyPart1")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(10.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("nestedForeignKey")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("textFieldOverride")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); EntitySpec mockEntitySpec = Mockito.mock(EntitySpec.class); - Mockito.when(mockEntitySpec.getSearchableFieldSpecs()).thenReturn(List.of( - new SearchableFieldSpec( + Mockito.when(mockEntitySpec.getSearchableFieldSpecs()) + .thenReturn( + List.of( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("fieldDoesntExistInOriginal", - SearchableAnnotation.FieldType.TEXT, - true, true, false, false, - Optional.empty(), Optional.empty(), 13.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), + new SearchableAnnotation( + "fieldDoesntExistInOriginal", + SearchableAnnotation.FieldType.TEXT, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 13.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), Mockito.mock(DataSchema.class)), - new SearchableFieldSpec( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("keyPart1", - SearchableAnnotation.FieldType.KEYWORD, - true, true, false, false, - Optional.empty(), Optional.empty(), 20.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), + new SearchableAnnotation( + "keyPart1", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 20.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), Mockito.mock(DataSchema.class)), - new SearchableFieldSpec( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("textFieldOverride", - SearchableAnnotation.FieldType.WORD_GRAM, - true, true, false, false, - Optional.empty(), Optional.empty(), 3.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), - Mockito.mock(DataSchema.class))) - ); - - fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec(), mockEntitySpec)); - // Same 21 from the original entity + newFieldNotInOriginal + 3 word gram fields from the textFieldOverride + new SearchableAnnotation( + "textFieldOverride", + SearchableAnnotation.FieldType.WORD_GRAM, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 3.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), + Mockito.mock(DataSchema.class)))); + + fieldConfigs = + TEST_CUSTOM_BUILDER.getStandardFields( + ImmutableList.of(TestEntitySpecBuilder.getSpec(), mockEntitySpec)); + // Same 21 from the original entity + newFieldNotInOriginal + 3 word gram fields from the + // textFieldOverride assertEquals(fieldConfigs.size(), 26); - assertEquals(fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( + assertEquals( + fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), + Set.of( "nestedArrayArrayField", "esObjectField", "foreignKey", @@ -411,13 +498,25 @@ public void testGetStandardFields() { "textFieldOverride.wordGrams4")); // Field which only exists in first one: Should be the same - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("nestedForeignKey")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("nestedForeignKey")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); // Average boost value: 10 vs. 20 -> 15 - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("keyPart1")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 15.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("keyPart1")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(15.0F)); // Field which added word gram fields: Original boost should be boost value averaged - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("textFieldOverride")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 2.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("textFieldOverride")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(2.0F)); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java index 0ea035a10f91d..3afb04afb917b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java @@ -1,14 +1,26 @@ package com.linkedin.metadata.search.query.request; +import static com.linkedin.metadata.utils.SearchUtil.*; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.google.common.collect.ImmutableList; -import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.config.search.WordGramConfiguration; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; -import com.linkedin.metadata.TestEntitySpecBuilder; -import com.linkedin.metadata.config.search.WordGramConfiguration; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -19,16 +31,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.ExistsQueryBuilder; @@ -45,16 +47,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.linkedin.metadata.utils.SearchUtil.*; -import static org.testng.Assert.*; - - @Import(SearchCommonTestConfiguration.class) public class SearchRequestHandlerTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; + @Autowired private EntityRegistry entityRegistry; public static SearchConfiguration testQueryConfig; + static { testQueryConfig = new SearchConfiguration(); testQueryConfig.setMaxTermBucketSize(20); @@ -84,29 +82,42 @@ public class SearchRequestHandlerTest extends AbstractTestNGSpringContextTests { @Test public void testDatasetFieldsAndHighlights() { EntitySpec entitySpec = entityRegistry.getEntitySpec("dataset"); - SearchRequestHandler datasetHandler = SearchRequestHandler.getBuilder(entitySpec, testQueryConfig, null); + SearchRequestHandler datasetHandler = + SearchRequestHandler.getBuilder(entitySpec, testQueryConfig, null); /* - Ensure efficient query performance, we do not expect upstream/downstream/fineGrained lineage - */ - List<String> highlightFields = datasetHandler.getHighlights().fields().stream() + Ensure efficient query performance, we do not expect upstream/downstream/fineGrained lineage + */ + List<String> highlightFields = + datasetHandler.getHighlights().fields().stream() .map(HighlightBuilder.Field::name) .collect(Collectors.toList()); - assertTrue(highlightFields.stream().noneMatch( - fieldName -> fieldName.contains("upstream") || fieldName.contains("downstream") - ), "unexpected lineage fields in highlights: " + highlightFields); + assertTrue( + highlightFields.stream() + .noneMatch( + fieldName -> fieldName.contains("upstream") || fieldName.contains("downstream")), + "unexpected lineage fields in highlights: " + highlightFields); } @Test public void testSearchRequestHandlerHighlightingTurnedOff() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - SearchRequest searchRequest = requestHandler.getSearchRequest("testQuery", null, null, 0, - 10, new SearchFlags().setFulltext(false).setSkipHighlighting(true), null); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "testQuery", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(false).setSkipHighlighting(true), + null); SearchSourceBuilder sourceBuilder = searchRequest.source(); assertEquals(sourceBuilder.from(), 0); assertEquals(sourceBuilder.size(), 10); // Filters - Collection<AggregationBuilder> aggBuilders = sourceBuilder.aggregations().getAggregatorFactories(); + Collection<AggregationBuilder> aggBuilders = + sourceBuilder.aggregations().getAggregatorFactories(); // Expect 2 aggregations: textFieldOverride and _index assertEquals(aggBuilders.size(), 2); for (AggregationBuilder aggBuilder : aggBuilders) { @@ -123,44 +134,73 @@ public void testSearchRequestHandlerHighlightingTurnedOff() { @Test public void testSearchRequestHandler() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - SearchRequest searchRequest = requestHandler.getSearchRequest("testQuery", null, null, 0, - 10, new SearchFlags().setFulltext(false), null); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "testQuery", null, null, 0, 10, new SearchFlags().setFulltext(false), null); SearchSourceBuilder sourceBuilder = searchRequest.source(); assertEquals(sourceBuilder.from(), 0); assertEquals(sourceBuilder.size(), 10); // Filters - Collection<AggregationBuilder> aggBuilders = sourceBuilder.aggregations().getAggregatorFactories(); + Collection<AggregationBuilder> aggBuilders = + sourceBuilder.aggregations().getAggregatorFactories(); // Expect 2 aggregations: textFieldOverride and _index - assertEquals(aggBuilders.size(), 2); - for (AggregationBuilder aggBuilder : aggBuilders) { - if (aggBuilder.getName().equals("textFieldOverride")) { - TermsAggregationBuilder filterPanelBuilder = (TermsAggregationBuilder) aggBuilder; - assertEquals(filterPanelBuilder.field(), "textFieldOverride.keyword"); - } else if (!aggBuilder.getName().equals("_entityType")) { - fail("Found unexepected aggregation: " + aggBuilder.getName()); - } - } + assertEquals(aggBuilders.size(), 2); + for (AggregationBuilder aggBuilder : aggBuilders) { + if (aggBuilder.getName().equals("textFieldOverride")) { + TermsAggregationBuilder filterPanelBuilder = (TermsAggregationBuilder) aggBuilder; + assertEquals(filterPanelBuilder.field(), "textFieldOverride.keyword"); + } else if (!aggBuilder.getName().equals("_entityType")) { + fail("Found unexepected aggregation: " + aggBuilder.getName()); + } + } // Highlights HighlightBuilder highlightBuilder = sourceBuilder.highlighter(); List<String> fields = - highlightBuilder.fields().stream().map(HighlightBuilder.Field::name).collect(Collectors.toList()); + highlightBuilder.fields().stream() + .map(HighlightBuilder.Field::name) + .collect(Collectors.toList()); assertEquals(fields.size(), 22); List<String> highlightableFields = - ImmutableList.of("keyPart1", "textArrayField", "textFieldOverride", "foreignKey", "nestedForeignKey", - "nestedArrayStringField", "nestedArrayArrayField", "customProperties", "esObjectField", "wordGramField"); - highlightableFields.forEach(field -> { - assertTrue(fields.contains(field), "Missing: " + field); - assertTrue(fields.contains(field + ".*"), "Missing: " + field + ".*"); - }); + ImmutableList.of( + "keyPart1", + "textArrayField", + "textFieldOverride", + "foreignKey", + "nestedForeignKey", + "nestedArrayStringField", + "nestedArrayArrayField", + "customProperties", + "esObjectField", + "wordGramField"); + highlightableFields.forEach( + field -> { + assertTrue(fields.contains(field), "Missing: " + field); + assertTrue(fields.contains(field + ".*"), "Missing: " + field + ".*"); + }); } @Test public void testAggregationsInSearch() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - final String nestedAggString = String.format("_entityType%stextFieldOverride", AGGREGATION_SEPARATOR_CHAR); - SearchRequest searchRequest = requestHandler.getSearchRequest("*", null, null, 0, - 10, new SearchFlags().setFulltext(true), List.of("textFieldOverride", "_entityType", nestedAggString, AGGREGATION_SEPARATOR_CHAR, "not_a_facet")); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + final String nestedAggString = + String.format("_entityType%stextFieldOverride", AGGREGATION_SEPARATOR_CHAR); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "*", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(true), + List.of( + "textFieldOverride", + "_entityType", + nestedAggString, + AGGREGATION_SEPARATOR_CHAR, + "not_a_facet")); SearchSourceBuilder sourceBuilder = searchRequest.source(); // Filters Collection<AggregationBuilder> aggregationBuilders = @@ -168,17 +208,28 @@ public void testAggregationsInSearch() { assertEquals(aggregationBuilders.size(), 3); // Expected aggregations - AggregationBuilder expectedTextFieldAggregationBuilder = AggregationBuilders.terms("textFieldOverride") - .field("textFieldOverride.keyword").size(testQueryConfig.getMaxTermBucketSize()); - AggregationBuilder expectedEntityTypeAggregationBuilder = AggregationBuilders.terms("_entityType") - .field("_index").size(testQueryConfig.getMaxTermBucketSize()).minDocCount(0); - AggregationBuilder expectedNestedAggregationBuilder = AggregationBuilders.terms(nestedAggString).field("_index") - .size(testQueryConfig.getMaxTermBucketSize()).minDocCount(0) - .subAggregation(AggregationBuilders.terms(nestedAggString) - .field("textFieldOverride.keyword").size(testQueryConfig.getMaxTermBucketSize())); + AggregationBuilder expectedTextFieldAggregationBuilder = + AggregationBuilders.terms("textFieldOverride") + .field("textFieldOverride.keyword") + .size(testQueryConfig.getMaxTermBucketSize()); + AggregationBuilder expectedEntityTypeAggregationBuilder = + AggregationBuilders.terms("_entityType") + .field("_index") + .size(testQueryConfig.getMaxTermBucketSize()) + .minDocCount(0); + AggregationBuilder expectedNestedAggregationBuilder = + AggregationBuilders.terms(nestedAggString) + .field("_index") + .size(testQueryConfig.getMaxTermBucketSize()) + .minDocCount(0) + .subAggregation( + AggregationBuilders.terms(nestedAggString) + .field("textFieldOverride.keyword") + .size(testQueryConfig.getMaxTermBucketSize())); for (AggregationBuilder builder : aggregationBuilders) { - if (builder.getName().equals("textFieldOverride") || builder.getName().equals("_entityType")) { + if (builder.getName().equals("textFieldOverride") + || builder.getName().equals("_entityType")) { assertTrue(builder.getSubAggregations().isEmpty()); if (builder.getName().equalsIgnoreCase("textFieldOverride")) { assertEquals(builder, expectedTextFieldAggregationBuilder); @@ -200,7 +251,8 @@ public void testAggregationsInSearch() { @Test public void testFilteredSearch() { - final SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + final SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); final BoolQueryBuilder testQuery = constructFilterQuery(requestHandler, false); @@ -210,7 +262,6 @@ public void testFilteredSearch() { testRemovedQuery(queryWithRemoved); - final BoolQueryBuilder testQueryScroll = constructFilterQuery(requestHandler, true); testFilterQuery(testQueryScroll); @@ -220,138 +271,190 @@ public void testFilteredSearch() { testRemovedQuery(queryWithRemovedScroll); } - private BoolQueryBuilder constructFilterQuery(SearchRequestHandler requestHandler, boolean scroll) { - final Criterion filterCriterion = new Criterion() - .setField("keyword") - .setCondition(Condition.EQUAL) - .setValue("some value"); + private BoolQueryBuilder constructFilterQuery( + SearchRequestHandler requestHandler, boolean scroll) { + final Criterion filterCriterion = + new Criterion().setField("keyword").setCondition(Condition.EQUAL).setValue("some value"); - final Filter filterWithoutRemovedCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); + final Filter filterWithoutRemovedCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); final BoolQueryBuilder testQuery; if (scroll) { - testQuery = (BoolQueryBuilder) requestHandler - .getSearchRequest("testQuery", filterWithoutRemovedCondition, null, null, null, - "5m", 10, new SearchFlags().setFulltext(false)) - .source() - .query(); + testQuery = + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithoutRemovedCondition, + null, + null, + null, + "5m", + 10, + new SearchFlags().setFulltext(false)) + .source() + .query(); } else { testQuery = - (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithoutRemovedCondition, null, - 0, 10, new SearchFlags().setFulltext(false), null).source().query(); + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithoutRemovedCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(false), + null) + .source() + .query(); } return testQuery; } private void testFilterQuery(BoolQueryBuilder testQuery) { - Optional<MatchQueryBuilder> mustNotHaveRemovedCondition = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .map(or -> (BoolQueryBuilder) or) - .flatMap(or -> { - System.out.println("processing: " + or.mustNot()); - return or.mustNot().stream(); - }) - .filter(and -> and instanceof MatchQueryBuilder) - .map(and -> (MatchQueryBuilder) and) - .filter(match -> match.fieldName().equals("removed")) - .findAny(); - - assertTrue(mustNotHaveRemovedCondition.isPresent(), "Expected must not have removed condition to exist" - + " if filter does not have it"); + Optional<MatchQueryBuilder> mustNotHaveRemovedCondition = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .map(or -> (BoolQueryBuilder) or) + .flatMap( + or -> { + System.out.println("processing: " + or.mustNot()); + return or.mustNot().stream(); + }) + .filter(and -> and instanceof MatchQueryBuilder) + .map(and -> (MatchQueryBuilder) and) + .filter(match -> match.fieldName().equals("removed")) + .findAny(); + + assertTrue( + mustNotHaveRemovedCondition.isPresent(), + "Expected must not have removed condition to exist" + " if filter does not have it"); } - private BoolQueryBuilder constructRemovedQuery(SearchRequestHandler requestHandler, boolean scroll) { - final Criterion filterCriterion = new Criterion() - .setField("keyword") - .setCondition(Condition.EQUAL) - .setValue("some value"); - - final Criterion removedCriterion = new Criterion() - .setField("removed") - .setCondition(Condition.EQUAL) - .setValue(String.valueOf(false)); - - final Filter filterWithRemovedCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion, removedCriterion))) - )); + private BoolQueryBuilder constructRemovedQuery( + SearchRequestHandler requestHandler, boolean scroll) { + final Criterion filterCriterion = + new Criterion().setField("keyword").setCondition(Condition.EQUAL).setValue("some value"); + + final Criterion removedCriterion = + new Criterion() + .setField("removed") + .setCondition(Condition.EQUAL) + .setValue(String.valueOf(false)); + + final Filter filterWithRemovedCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(filterCriterion, removedCriterion))))); final BoolQueryBuilder queryWithRemoved; if (scroll) { - queryWithRemoved = (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithRemovedCondition, - null, null, null, "5m", 10, new SearchFlags().setFulltext(false)).source().query(); + queryWithRemoved = + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithRemovedCondition, + null, + null, + null, + "5m", + 10, + new SearchFlags().setFulltext(false)) + .source() + .query(); } else { queryWithRemoved = - (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithRemovedCondition, - null, 0, 10, new SearchFlags().setFulltext(false), null).source().query(); + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithRemovedCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(false), + null) + .source() + .query(); } return queryWithRemoved; } private void testRemovedQuery(BoolQueryBuilder queryWithRemoved) { - Optional<MatchQueryBuilder> mustNotHaveRemovedCondition = queryWithRemoved.must() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .map(or -> (BoolQueryBuilder) or) - .flatMap(or -> { - System.out.println("processing: " + or.mustNot()); - return or.mustNot().stream(); - }) - .filter(and -> and instanceof MatchQueryBuilder) - .map(and -> (MatchQueryBuilder) and) - .filter(match -> match.fieldName().equals("removed")) - .findAny(); - - assertFalse(mustNotHaveRemovedCondition.isPresent(), "Expect `must not have removed` condition to not" - + " exist because filter already has it a condition for the removed property"); + Optional<MatchQueryBuilder> mustNotHaveRemovedCondition = + queryWithRemoved.must().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .map(or -> (BoolQueryBuilder) or) + .flatMap( + or -> { + System.out.println("processing: " + or.mustNot()); + return or.mustNot().stream(); + }) + .filter(and -> and instanceof MatchQueryBuilder) + .map(and -> (MatchQueryBuilder) and) + .filter(match -> match.fieldName().equals("removed")) + .findAny(); + + assertFalse( + mustNotHaveRemovedCondition.isPresent(), + "Expect `must not have removed` condition to not" + + " exist because filter already has it a condition for the removed property"); } // For fields that are one of EDITABLE_FIELD_TO_QUERY_PAIRS, we want to make sure - // a filter that has a list of values like below will filter on all values by generating a terms query + // a filter that has a list of values like below will filter on all values by generating a terms + // query // field EQUAL [value1, value2, ...] @Test public void testFilterFieldTagsByValues() { - final Criterion filterCriterion = new Criterion() - .setField("fieldTags") - .setCondition(Condition.EQUAL) - .setValue("v1") - .setValues(new StringArray("v1", "v2")); + final Criterion filterCriterion = + new Criterion() + .setField("fieldTags") + .setCondition(Condition.EQUAL) + .setValue("v1") + .setValues(new StringArray("v1", "v2")); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [bool] -> should -> [terms] - List<TermsQueryBuilder> termsQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof TermsQueryBuilder) - .map(should -> (TermsQueryBuilder) should) - .collect(Collectors.toList()); + List<TermsQueryBuilder> termsQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof TermsQueryBuilder) + .map(should -> (TermsQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(termsQueryBuilders.size() == 2, "Expected to find two terms queries"); Map<String, List<String>> termsMap = new HashMap<>(); - termsQueryBuilders.forEach(termsQueryBuilder -> { - String field = termsQueryBuilder.fieldName(); - List<Object> values = termsQueryBuilder.values(); - List<String> strValues = new ArrayList<>(); - for (Object value : values) { - assertTrue(value instanceof String, - "Expected value to be String, got: " + value.getClass()); - strValues.add((String) value); - } - Collections.sort(strValues); - termsMap.put(field, strValues); - }); + termsQueryBuilders.forEach( + termsQueryBuilder -> { + String field = termsQueryBuilder.fieldName(); + List<Object> values = termsQueryBuilder.values(); + List<String> strValues = new ArrayList<>(); + for (Object value : values) { + assertTrue( + value instanceof String, "Expected value to be String, got: " + value.getClass()); + strValues.add((String) value); + } + Collections.sort(strValues); + termsMap.put(field, strValues); + }); assertTrue(termsMap.containsKey("fieldTags.keyword")); assertTrue(termsMap.containsKey("editedFieldTags.keyword")); @@ -367,35 +470,35 @@ public void testFilterFieldTagsByValues() { // pair of fields @Test public void testFilterFieldTagsByValue() { - final Criterion filterCriterion = new Criterion() - .setField("fieldTags") - .setCondition(Condition.EQUAL) - .setValue("v1"); + final Criterion filterCriterion = + new Criterion().setField("fieldTags").setCondition(Condition.EQUAL).setValue("v1"); final BoolQueryBuilder testQuery = getQuery(filterCriterion); - // bool -> must -> [bool] -> should -> [bool] -> must -> [bool] -> should -> [bool] -> should -> [match] - List<MultiMatchQueryBuilder> matchQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).should().stream()) - .filter(should -> should instanceof MultiMatchQueryBuilder) - .map(should -> (MultiMatchQueryBuilder) should) - .collect(Collectors.toList()); + // bool -> must -> [bool] -> should -> [bool] -> must -> [bool] -> should -> [bool] -> should -> + // [match] + List<MultiMatchQueryBuilder> matchQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).should().stream()) + .filter(should -> should instanceof MultiMatchQueryBuilder) + .map(should -> (MultiMatchQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(matchQueryBuilders.size() == 2, "Expected to find two match queries"); Map<String, String> matchMap = new HashMap<>(); - matchQueryBuilders.forEach(matchQueryBuilder -> { - Set<String> fields = matchQueryBuilder.fields().keySet(); - assertTrue(matchQueryBuilder.value() instanceof String); - fields.forEach(field -> matchMap.put(field, (String) matchQueryBuilder.value())); - }); + matchQueryBuilders.forEach( + matchQueryBuilder -> { + Set<String> fields = matchQueryBuilder.fields().keySet(); + assertTrue(matchQueryBuilder.value() instanceof String); + fields.forEach(field -> matchMap.put(field, (String) matchQueryBuilder.value())); + }); assertTrue(matchMap.containsKey("fieldTags.keyword")); assertTrue(matchMap.containsKey("editedFieldTags.keyword")); @@ -407,65 +510,68 @@ public void testFilterFieldTagsByValue() { // Test fields not in EDITABLE_FIELD_TO_QUERY_PAIRS with a single value @Test public void testFilterPlatformByValue() { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("mysql"); + final Criterion filterCriterion = + new Criterion().setField("platform").setCondition(Condition.EQUAL).setValue("mysql"); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [bool] -> should -> [match] - List<MultiMatchQueryBuilder> matchQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof MultiMatchQueryBuilder) - .map(should -> (MultiMatchQueryBuilder) should) - .collect(Collectors.toList()); + List<MultiMatchQueryBuilder> matchQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof MultiMatchQueryBuilder) + .map(should -> (MultiMatchQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(matchQueryBuilders.size() == 1, "Expected to find one match query"); MultiMatchQueryBuilder matchQueryBuilder = matchQueryBuilders.get(0); - assertEquals(matchQueryBuilder.fields(), Map.of( + assertEquals( + matchQueryBuilder.fields(), + Map.of( "platform", 1.0f, - "platform.*", 1.0f) - ); + "platform.*", 1.0f)); assertEquals(matchQueryBuilder.value(), "mysql"); } // Test fields not in EDITABLE_FIELD_TO_QUERY_PAIRS with a list of values @Test public void testFilterPlatformByValues() { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("mysql") - .setValues(new StringArray("mysql", "bigquery")); + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("mysql") + .setValues(new StringArray("mysql", "bigquery")); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [terms] - List<TermsQueryBuilder> termsQueryBuilders = testQuery.filter() - .stream() - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof TermsQueryBuilder) - .map(must -> (TermsQueryBuilder) must) - .collect(Collectors.toList()); + List<TermsQueryBuilder> termsQueryBuilders = + testQuery.filter().stream() + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof TermsQueryBuilder) + .map(must -> (TermsQueryBuilder) must) + .collect(Collectors.toList()); assertTrue(termsQueryBuilders.size() == 1, "Expected to find one terms query"); final TermsQueryBuilder termsQueryBuilder = termsQueryBuilders.get(0); assertEquals(termsQueryBuilder.fieldName(), "platform.keyword"); Set<String> values = new HashSet<>(); - termsQueryBuilder.values().forEach(value -> { - assertTrue(value instanceof String); - values.add((String) value); - }); + termsQueryBuilder + .values() + .forEach( + value -> { + assertTrue(value instanceof String); + values.add((String) value); + }); assertEquals(values.size(), 2, "Expected two platform filter values"); assertTrue(values.contains("mysql")); @@ -511,18 +617,20 @@ public void testBrowsePathQueryFilter() { } private BoolQueryBuilder getQuery(final Criterion filterCriterion) { - final Filter filter = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); - - final SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder( - TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - - return (BoolQueryBuilder) requestHandler - .getSearchRequest("", filter, null, 0, 10, new SearchFlags().setFulltext(false), null) - .source() - .query(); + final Filter filter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); + + final SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + + return (BoolQueryBuilder) + requestHandler + .getSearchRequest("", filter, null, 0, 10, new SearchFlags().setFulltext(false), null) + .source() + .query(); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java index 8888ef59ad7d2..6e2d90287d5d9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.transformer; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + import com.datahub.test.TestEntitySnapshot; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -13,28 +18,29 @@ import com.linkedin.metadata.models.EntitySpec; import java.io.IOException; import java.util.Optional; - import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertTrue; - - public class SearchDocumentTransformerTest { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } @Test public void testTransform() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 1000); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 1000); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional<String> result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); + Optional<String> result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); assertEquals(parsedJson.get("urn").asText(), snapshot.getUrn().toString()); @@ -65,10 +71,12 @@ public void testTransform() throws IOException { @Test public void testTransformForDelete() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 1000); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 1000); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional<String> result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, true); + Optional<String> result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, true); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); assertEquals(parsedJson.get("urn").asText(), snapshot.getUrn().toString()); @@ -86,14 +94,18 @@ public void testTransformForDelete() throws IOException { @Test public void testTransformMaxFieldValue() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 5); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 5); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional<String> result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); + Optional<String> result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); - assertEquals(parsedJson.get("customProperties"), JsonNodeFactory.instance.arrayNode().add("shortValue=123")); + assertEquals( + parsedJson.get("customProperties"), + JsonNodeFactory.instance.arrayNode().add("shortValue=123")); assertEquals(parsedJson.get("esObjectField"), JsonNodeFactory.instance.arrayNode().add("123")); searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 20); @@ -103,10 +115,21 @@ public void testTransformMaxFieldValue() throws IOException { assertTrue(result.isPresent()); parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); - - assertEquals(parsedJson.get("customProperties"), JsonNodeFactory.instance.arrayNode() - .add("key1=value1").add("key2=value2").add("shortValue=123").add("longValue=0123456789")); - assertEquals(parsedJson.get("esObjectField"), JsonNodeFactory.instance.arrayNode() - .add("value1").add("value2").add("123").add("0123456789")); + assertEquals( + parsedJson.get("customProperties"), + JsonNodeFactory.instance + .arrayNode() + .add("key1=value1") + .add("key2=value2") + .add("shortValue=123") + .add("longValue=0123456789")); + assertEquals( + parsedJson.get("esObjectField"), + JsonNodeFactory.instance + .arrayNode() + .add("value1") + .add("value2") + .add("123") + .add("0123456789")); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java index 6127326db8ab9..e4e0d00391fa5 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java @@ -14,8 +14,6 @@ import org.testng.Assert; import org.testng.annotations.Test; - - public class BrowsePathUtilsTest { private final EntityRegistry registry = new TestEntityRegistry(); @@ -24,43 +22,40 @@ public class BrowsePathUtilsTest { public void testGetDefaultBrowsePath() throws URISyntaxException { // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); String datasetPath = BrowsePathUtils.getDefaultBrowsePath(datasetUrn, this.registry, '.'); Assert.assertEquals(datasetPath, "/prod/kafka/test/a"); // Charts - ChartKey chartKey = new ChartKey() - .setChartId("Test/A/B") - .setDashboardTool("looker"); + ChartKey chartKey = new ChartKey().setChartId("Test/A/B").setDashboardTool("looker"); Urn chartUrn = EntityKeyUtils.convertEntityKeyToUrn(chartKey, "chart"); String chartPath = BrowsePathUtils.getDefaultBrowsePath(chartUrn, this.registry, '/'); Assert.assertEquals(chartPath, "/looker"); // Dashboards - DashboardKey dashboardKey = new DashboardKey() - .setDashboardId("Test/A/B") - .setDashboardTool("looker"); + DashboardKey dashboardKey = + new DashboardKey().setDashboardId("Test/A/B").setDashboardTool("looker"); Urn dashboardUrn = EntityKeyUtils.convertEntityKeyToUrn(dashboardKey, "dashboard"); String dashboardPath = BrowsePathUtils.getDefaultBrowsePath(dashboardUrn, this.registry, '/'); Assert.assertEquals(dashboardPath, "/looker"); // Data Flows - DataFlowKey dataFlowKey = new DataFlowKey() - .setCluster("test") - .setFlowId("Test/A/B") - .setOrchestrator("airflow"); + DataFlowKey dataFlowKey = + new DataFlowKey().setCluster("test").setFlowId("Test/A/B").setOrchestrator("airflow"); Urn dataFlowUrn = EntityKeyUtils.convertEntityKeyToUrn(dataFlowKey, "dataFlow"); String dataFlowPath = BrowsePathUtils.getDefaultBrowsePath(dataFlowUrn, this.registry, '/'); Assert.assertEquals(dataFlowPath, "/airflow/test"); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = + new DataJobKey() + .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) + .setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); String dataJobPath = BrowsePathUtils.getDefaultBrowsePath(dataJobUrn, this.registry, '/'); Assert.assertEquals(dataJobPath, "/airflow/test"); @@ -69,46 +64,42 @@ public void testGetDefaultBrowsePath() throws URISyntaxException { @Test public void testBuildDataPlatformUrn() throws URISyntaxException { // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); Urn dataPlatformUrn1 = BrowsePathUtils.buildDataPlatformUrn(datasetUrn, this.registry); Assert.assertEquals(dataPlatformUrn1, Urn.createFromString("urn:li:dataPlatform:kafka")); // Charts - ChartKey chartKey = new ChartKey() - .setChartId("Test/A/B") - .setDashboardTool("looker"); + ChartKey chartKey = new ChartKey().setChartId("Test/A/B").setDashboardTool("looker"); Urn chartUrn = EntityKeyUtils.convertEntityKeyToUrn(chartKey, "chart"); Urn dataPlatformUrn2 = BrowsePathUtils.buildDataPlatformUrn(chartUrn, this.registry); Assert.assertEquals(dataPlatformUrn2, Urn.createFromString("urn:li:dataPlatform:looker")); // Dashboards - DashboardKey dashboardKey = new DashboardKey() - .setDashboardId("Test/A/B") - .setDashboardTool("looker"); + DashboardKey dashboardKey = + new DashboardKey().setDashboardId("Test/A/B").setDashboardTool("looker"); Urn dashboardUrn = EntityKeyUtils.convertEntityKeyToUrn(dashboardKey, "dashboard"); Urn dataPlatformUrn3 = BrowsePathUtils.buildDataPlatformUrn(dashboardUrn, this.registry); Assert.assertEquals(dataPlatformUrn3, Urn.createFromString("urn:li:dataPlatform:looker")); // Data Flows - DataFlowKey dataFlowKey = new DataFlowKey() - .setCluster("test") - .setFlowId("Test/A/B") - .setOrchestrator("airflow"); + DataFlowKey dataFlowKey = + new DataFlowKey().setCluster("test").setFlowId("Test/A/B").setOrchestrator("airflow"); Urn dataFlowUrn = EntityKeyUtils.convertEntityKeyToUrn(dataFlowKey, "dataFlow"); Urn dataPlatformUrn4 = BrowsePathUtils.buildDataPlatformUrn(dataFlowUrn, this.registry); Assert.assertEquals(dataPlatformUrn4, Urn.createFromString("urn:li:dataPlatform:airflow")); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = + new DataJobKey() + .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) + .setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); Urn dataPlatformUrn5 = BrowsePathUtils.buildDataPlatformUrn(dataJobUrn, this.registry); Assert.assertEquals(dataPlatformUrn5, Urn.createFromString("urn:li:dataPlatform:airflow")); - } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java index 8a85ae0396ee1..3041b13839768 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.linkedin.common.BrowsePathEntry; import com.linkedin.common.BrowsePathEntryArray; import com.linkedin.common.BrowsePathsV2; @@ -17,22 +22,17 @@ import com.linkedin.metadata.key.DatasetKey; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.eq; +import org.testng.Assert; +import org.testng.annotations.Test; public class BrowsePathV2UtilsTest { - private static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test.a.b,DEV)"; + private static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test.a.b,DEV)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; private static final String DATA_FLOW_URN = "urn:li:dataFlow:(orchestrator,flowId,cluster)"; @@ -46,12 +46,16 @@ public void testGetDefaultDatasetBrowsePathV2WithContainers() throws URISyntaxEx Urn datasetUrn = UrnUtils.getUrn(DATASET_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -62,9 +66,12 @@ public void testGetDefaultDatasetBrowsePathV2WithContainersFlagOff() throws URIS Urn datasetUrn = UrnUtils.getUrn(DATASET_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, false); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + datasetUrn, this.registry, '.', mockService, false); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("test"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("a"); @@ -78,12 +85,16 @@ public void testGetDefaultChartBrowsePathV2WithContainers() throws URISyntaxExce Urn chartUrn = UrnUtils.getUrn(CHART_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(chartUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(chartUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -94,12 +105,17 @@ public void testGetDefaultDashboardBrowsePathV2WithContainers() throws URISyntax Urn dashboardUrn = UrnUtils.getUrn(DASHBOARD_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(dashboardUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(dashboardUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dashboardUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dashboardUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -110,15 +126,19 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept EntityService mockService = mock(EntityService.class); // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); - when( - mockService.getEntityV2(eq(datasetUrn.getEntityType()), eq(datasetUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); + when(mockService.getEntityV2( + eq(datasetUrn.getEntityType()), + eq(datasetUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("test"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("a"); @@ -128,10 +148,13 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Charts Urn chartUrn = UrnUtils.getUrn(CHART_URN); - when( - mockService.getEntityV2(eq(chartUrn.getEntityType()), eq(chartUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(chartUrn.getEntityType()), + eq(chartUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); @@ -139,10 +162,14 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Dashboards Urn dashboardUrn = UrnUtils.getUrn(DASHBOARD_URN); - when( - mockService.getEntityV2(eq(dashboardUrn.getEntityType()), eq(dashboardUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dashboardUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(dashboardUrn.getEntityType()), + eq(dashboardUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dashboardUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); @@ -150,52 +177,64 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Data Flows Urn dataFlowUrn = UrnUtils.getUrn(DATA_FLOW_URN); - when( - mockService.getEntityV2(eq(dataFlowUrn.getEntityType()), eq(dataFlowUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dataFlowUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(dataFlowUrn.getEntityType()), + eq(dataFlowUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dataFlowUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(dataFlowUrn) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = new DataJobKey().setFlow(dataFlowUrn).setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dataJobUrn, this.registry, '/', mockService, true); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(dataJobUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId(dataFlowUrn.toString()).setUrn(dataFlowUrn); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); } - private EntityService initMockServiceWithContainerParents(Urn entityUrn, Urn containerUrn1, Urn containerUrn2) throws URISyntaxException { + private EntityService initMockServiceWithContainerParents( + Urn entityUrn, Urn containerUrn1, Urn containerUrn2) throws URISyntaxException { EntityService mockService = mock(EntityService.class); final Container container1 = new Container().setContainer(containerUrn1); final Map<String, EnvelopedAspect> aspectMap1 = new HashMap<>(); - aspectMap1.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container1.data()))); - final EntityResponse entityResponse1 = new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap1)); - when( - mockService.getEntityV2(eq(entityUrn.getEntityType()), eq(entityUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(entityResponse1); + aspectMap1.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container1.data()))); + final EntityResponse entityResponse1 = + new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap1)); + when(mockService.getEntityV2( + eq(entityUrn.getEntityType()), + eq(entityUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(entityResponse1); final Container container2 = new Container().setContainer(containerUrn2); final Map<String, EnvelopedAspect> aspectMap2 = new HashMap<>(); - aspectMap2.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container2.data()))); - final EntityResponse entityResponse2 = new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap2)); - when( - mockService.getEntityV2(eq(containerUrn1.getEntityType()), eq(containerUrn1), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(entityResponse2); - - when( - mockService.getEntityV2(eq(containerUrn2.getEntityType()), eq(containerUrn2), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + aspectMap2.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container2.data()))); + final EntityResponse entityResponse2 = + new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap2)); + when(mockService.getEntityV2( + eq(containerUrn1.getEntityType()), + eq(containerUrn1), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(entityResponse2); + + when(mockService.getEntityV2( + eq(containerUrn2.getEntityType()), + eq(containerUrn2), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); return mockService; - } } - diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java index ddd75a152c333..03abd9ffe29d7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java @@ -8,7 +8,6 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class ESUtilsTest { private static final String FIELD_TO_EXPAND = "fieldTags"; @@ -16,247 +15,241 @@ public class ESUtilsTest { @Test public void testGetQueryBuilderFromCriterionEqualsValues() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1" - ))); + final Criterion singleValueCriterion = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1"))); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"terms\" : {\n" + + " \"myTestField.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion multiValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion multiValueCriterion = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); result = ESUtils.getQueryBuilderFromCriterion(multiValueCriterion, false); expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField.keyword\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"terms\" : {\n" + + " \"myTestField.keyword\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion timeseriesField = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"terms\" : {\n" + + " \"myTestField\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - } @Test public void testGetQueryBuilderFromCriterionExists() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EXISTS); + final Criterion singleValueCriterion = + new Criterion().setField("myTestField").setCondition(Condition.EXISTS); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"bool\" : {\n" - + " \"must\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); // No diff in the timeseries field case for this condition. - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.EXISTS); + final Criterion timeseriesField = + new Criterion().setField("myTestField").setCondition(Condition.EXISTS); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"bool\" : {\n" - + " \"must\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } @Test public void testGetQueryBuilderFromCriterionIsNull() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.IS_NULL); + final Criterion singleValueCriterion = + new Criterion().setField("myTestField").setCondition(Condition.IS_NULL); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"bool\" : {\n" - + " \"must_not\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"must_not\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); // No diff in the timeseries case for this condition - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.IS_NULL); + final Criterion timeseriesField = + new Criterion().setField("myTestField").setCondition(Condition.IS_NULL); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"bool\" : {\n" - + " \"must_not\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"bool\" : {\n" + + " \"must_not\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } @Test public void testGetQueryBuilderFromCriterionFieldToExpand() { - final Criterion singleValueCriterion = new Criterion() - .setField(FIELD_TO_EXPAND) - .setCondition(Condition.EQUAL) - .setValue("") // Ignored - .setValues(new StringArray(ImmutableList.of( - "value1" - ))); + final Criterion singleValueCriterion = + new Criterion() + .setField(FIELD_TO_EXPAND) + .setCondition(Condition.EQUAL) + .setValue("") // Ignored + .setValues(new StringArray(ImmutableList.of("value1"))); // Ensure that the query is expanded! QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); - String expected = "{\n" - + " \"bool\" : {\n" - + " \"should\" : [\n" - + " {\n" - + " \"terms\" : {\n" - + " \"fieldTags.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"fieldTags\"\n" - + " }\n" - + " },\n" - + " {\n" - + " \"terms\" : {\n" - + " \"editedFieldTags.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"editedFieldTags\"\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; + String expected = + "{\n" + + " \"bool\" : {\n" + + " \"should\" : [\n" + + " {\n" + + " \"terms\" : {\n" + + " \"fieldTags.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"fieldTags\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"terms\" : {\n" + + " \"editedFieldTags.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"editedFieldTags\"\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion timeseriesField = new Criterion() - .setField(FIELD_TO_EXPAND) - .setCondition(Condition.EQUAL) - .setValue("") // Ignored - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion timeseriesField = + new Criterion() + .setField(FIELD_TO_EXPAND) + .setCondition(Condition.EQUAL) + .setValue("") // Ignored + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); // Ensure that the query is expanded without keyword. result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); expected = "{\n" - + " \"bool\" : {\n" - + " \"should\" : [\n" - + " {\n" - + " \"terms\" : {\n" - + " \"fieldTags\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"fieldTags\"\n" - + " }\n" - + " },\n" - + " {\n" - + " \"terms\" : {\n" - + " \"editedFieldTags\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"editedFieldTags\"\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"should\" : [\n" + + " {\n" + + " \"terms\" : {\n" + + " \"fieldTags\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"fieldTags\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"terms\" : {\n" + + " \"editedFieldTags\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"editedFieldTags\"\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java index 7aa3bb19f0df6..5ea58e3416205 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java @@ -1,75 +1,164 @@ package com.linkedin.metadata.search.utils; -import com.linkedin.metadata.query.SearchFlags; -import org.testng.annotations.Test; +import static org.testng.Assert.assertEquals; +import com.linkedin.metadata.query.SearchFlags; import java.util.Set; - -import static org.testng.Assert.assertEquals; +import org.testng.annotations.Test; public class SearchUtilsTest { - @Test - public void testApplyDefaultSearchFlags() { - SearchFlags defaultFlags = new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipAggregates(true) - .setMaxAggValues(1) - .setSkipHighlighting(true); + @Test + public void testApplyDefaultSearchFlags() { + SearchFlags defaultFlags = + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipAggregates(true) + .setMaxAggValues(1) + .setSkipHighlighting(true); - assertEquals(SearchUtils.applyDefaultSearchFlags(null, "not empty", defaultFlags), defaultFlags, - "Expected all default values"); + assertEquals( + SearchUtils.applyDefaultSearchFlags(null, "not empty", defaultFlags), + defaultFlags, + "Expected all default values"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(false), - "Expected no default values"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "not empty", + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "Expected no default values"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), null, defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(true), - "Expected skip highlight due to query null query"); - for (String query : Set.of("*", "")) { - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), query, defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(true), - String.format("Expected skip highlight due to query string `%s`", query)); - } - - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except fulltext"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipCache(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(false).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except skipCache"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipAggregates(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(false).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except skipAggregates"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setMaxAggValues(2), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(2).setSkipHighlighting(true), - "Expected all default values except maxAggValues"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(false), - "Expected all default values except skipHighlighting"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + null, + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(true), + "Expected skip highlight due to query null query"); + for (String query : Set.of("*", "")) { + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + query, + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(true), + String.format("Expected skip highlight due to query string `%s`", query)); } - @Test - public void testImmutableDefaults() throws CloneNotSupportedException { - SearchFlags defaultFlags = new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipAggregates(true) - .setMaxAggValues(1) - .setSkipHighlighting(true); - SearchFlags copyFlags = defaultFlags.copy(); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setFulltext(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except fulltext"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipCache(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(false) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except skipCache"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipAggregates(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except skipAggregates"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setMaxAggValues(2), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(2) + .setSkipHighlighting(true), + "Expected all default values except maxAggValues"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipHighlighting(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(false), + "Expected all default values except skipHighlighting"); + } - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(false), - "Expected no default values"); + @Test + public void testImmutableDefaults() throws CloneNotSupportedException { + SearchFlags defaultFlags = + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipAggregates(true) + .setMaxAggValues(1) + .setSkipHighlighting(true); + SearchFlags copyFlags = defaultFlags.copy(); - assertEquals(defaultFlags, copyFlags, "Expected defaults to be unmodified"); - } + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "not empty", + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "Expected no default values"); + assertEquals(defaultFlags, copyFlags, "Expected defaults to be unmodified"); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java index e6a9bd7d198f7..8643855162fa7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.systemmetadata; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.linkedin.metadata.run.AspectRowSummary; import com.linkedin.metadata.run.IngestionRunSummary; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; @@ -8,30 +11,27 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.mxe.SystemMetadata; +import java.util.List; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.List; - -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - -abstract public class SystemMetadataServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class SystemMetadataServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); - private final IndexConvention _indexConvention = new IndexConventionImpl("es_system_metadata_service_test"); + private final IndexConvention _indexConvention = + new IndexConventionImpl("es_system_metadata_service_test"); private ElasticSearchSystemMetadataService _client; @@ -48,8 +48,10 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchSystemMetadataService buildService() { - ESSystemMetadataDAO dao = new ESSystemMetadataDAO(getSearchClient(), _indexConvention, getBulkProcessor(), 1); - return new ElasticSearchSystemMetadataService(getBulkProcessor(), _indexConvention, dao, getIndexBuilder()); + ESSystemMetadataDAO dao = + new ESSystemMetadataDAO(getSearchClient(), _indexConvention, getBulkProcessor(), 1); + return new ElasticSearchSystemMetadataService( + getBulkProcessor(), _indexConvention, dao, getIndexBuilder()); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java index 407d2ae684ede..921fbac12df85 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.timeline; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.mock; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.event.EventProducer; @@ -14,21 +16,19 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; - /** - * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against a Cassandra database. + * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against + * a Cassandra database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + * <p>This class also contains all the test methods where realities of an underlying storage leak + * into the {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ public class CassandraTimelineServiceTest extends TimelineServiceTest<CassandraAspectDao> { private CassandraContainer _cassandraContainer; - public CassandraTimelineServiceTest() throws EntityRegistryException { - } + public CassandraTimelineServiceTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -54,14 +54,20 @@ private void configureComponents() { _mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java index 9e89328715510..4e47e596dddc2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java @@ -1,7 +1,9 @@ package com.linkedin.metadata.timeline; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.mock; + import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.event.EventProducer; @@ -11,37 +13,42 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; - /** - * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against a relational database. + * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against + * a relational database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + * <p>This class also contains all the test methods where realities of an underlying storage leak + * into the {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ public class EbeanTimelineServiceTest extends TimelineServiceTest<EbeanAspectDao> { - public EbeanTimelineServiceTest() throws EntityRegistryException { - } + public EbeanTimelineServiceTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { - Database server = EbeanTestUtils.createTestServer(EbeanTimelineServiceTest.class.getSimpleName()); + Database server = + EbeanTestUtils.createTestServer(EbeanTimelineServiceTest.class.getSimpleName()); _aspectDao = new EbeanAspectDao(server); _aspectDao.setConnectionValidated(true); _entityTimelineService = new TimelineServiceImpl(_aspectDao, _testEntityRegistry); _mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java index b3e4b84a4962d..6cea5a78201b7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline; +import static org.mockito.Mockito.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; @@ -26,8 +28,6 @@ import com.linkedin.schema.SchemaMetadata; import com.linkedin.schema.StringType; import com.linkedin.util.Pair; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -35,29 +35,30 @@ import java.util.List; import java.util.Map; import java.util.Set; - -import static org.mockito.Mockito.*; - +import org.testng.annotations.Test; /** * A class to test {@link TimelineServiceImpl} * - * This class is generic to allow same integration tests to be reused to test all supported storage backends. - * If you're adding another storage backend - you should create a new test class that extends this one providing - * hard implementations of {@link AspectDao} and implements {@code @BeforeMethod} etc to set up and tear down state. + * <p>This class is generic to allow same integration tests to be reused to test all supported + * storage backends. If you're adding another storage backend - you should create a new test class + * that extends this one providing hard implementations of {@link AspectDao} and implements + * {@code @BeforeMethod} etc to set up and tear down state. * - * If you realise that a feature you want to test, sadly, has divergent behaviours between different storage implementations, - * that you can't rectify - you should make the test method abstract and implement it in all implementations of this class. + * <p>If you realise that a feature you want to test, sadly, has divergent behaviours between + * different storage implementations, that you can't rectify - you should make the test method + * abstract and implement it in all implementations of this class. * * @param <T_AD> {@link AspectDao} implementation. */ -abstract public class TimelineServiceTest<T_AD extends AspectDao> { +public abstract class TimelineServiceTest<T_AD extends AspectDao> { protected T_AD _aspectDao; protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); protected final EntityRegistry _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); protected TimelineServiceImpl _entityTimelineService; @@ -65,14 +66,16 @@ abstract public class TimelineServiceTest<T_AD extends AspectDao> { protected EventProducer _mockProducer; protected UpdateIndicesService _mockUpdateIndicesService = mock(UpdateIndicesService.class); - protected TimelineServiceTest() throws EntityRegistryException { - } + protected TimelineServiceTest() throws EntityRegistryException {} @Test public void testGetTimeline() throws Exception { - Urn entityUrn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:hive,fooDb.fooTable" + System.currentTimeMillis() + ",PROD)"); + Urn entityUrn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:hive,fooDb.fooTable" + + System.currentTimeMillis() + + ",PROD)"); String aspectName = "schemaMetadata"; ArrayList<AuditStamp> timestamps = new ArrayList(); @@ -82,39 +85,49 @@ public void testGetTimeline() throws Exception { SchemaMetadata schemaMetadata = getSchemaMetadata("This is the new description for day " + i); AuditStamp daysAgo = createTestAuditStamp(i); timestamps.add(daysAgo); - _entityServiceImpl.ingestAspects(entityUrn, Collections.singletonList(new Pair<>(aspectName, schemaMetadata)), - daysAgo, getSystemMetadata(daysAgo, "run-" + i)); + _entityServiceImpl.ingestAspects( + entityUrn, + Collections.singletonList(new Pair<>(aspectName, schemaMetadata)), + daysAgo, + getSystemMetadata(daysAgo, "run-" + i)); } Map<String, RecordTemplate> latestAspects = - _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(Arrays.asList(aspectName))); + _entityServiceImpl.getLatestAspectsForUrn( + entityUrn, new HashSet<>(Arrays.asList(aspectName))); Set<ChangeCategory> elements = new HashSet<>(); elements.add(ChangeCategory.TECHNICAL_SCHEMA); List<ChangeTransaction> changes = - _entityTimelineService.getTimeline(entityUrn, elements, createTestAuditStamp(10).getTime(), 0, null, null, - false); - //Assert.assertEquals(changes.size(), 7); - //Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), ChangeOperation.ADD); - //Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(0).getTime().longValue()); - //Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(1).getTime().longValue()); + _entityTimelineService.getTimeline( + entityUrn, elements, createTestAuditStamp(10).getTime(), 0, null, null, false); + // Assert.assertEquals(changes.size(), 7); + // Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.ADD); + // Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(0).getTime().longValue()); + // Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(1).getTime().longValue()); changes = - _entityTimelineService.getTimeline(entityUrn, elements, timestamps.get(4).getTime() - 3000L, 0, null, null, - false); - //Assert.assertEquals(changes.size(), 3); - //Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(4).getTime().longValue()); - //Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(5).getTime().longValue()); + _entityTimelineService.getTimeline( + entityUrn, elements, timestamps.get(4).getTime() - 3000L, 0, null, null, false); + // Assert.assertEquals(changes.size(), 3); + // Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(4).getTime().longValue()); + // Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(5).getTime().longValue()); } private static AuditStamp createTestAuditStamp(int daysAgo) { try { Long timestamp = System.currentTimeMillis() - (daysAgo * 24 * 60 * 60 * 1000L); Long timestampRounded = 1000 * (timestamp / 1000); - return new AuditStamp().setTime(timestampRounded).setActor(Urn.createFromString("urn:li:principal:tester")); + return new AuditStamp() + .setTime(timestampRounded) + .setActor(Urn.createFromString("urn:li:principal:tester")); } catch (Exception e) { throw new RuntimeException("Failed to create urn"); } @@ -128,17 +141,22 @@ private SystemMetadata getSystemMetadata(AuditStamp twoDaysAgo, String s) { } private SchemaMetadata getSchemaMetadata(String s) { - SchemaField field1 = new SchemaField() - .setFieldPath("column1") - .setDescription(s) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string"); + SchemaField field1 = + new SchemaField() + .setFieldPath("column1") + .setDescription(s) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string"); SchemaFieldArray fieldArray = new SchemaFieldArray(); fieldArray.add(field1); - return new SchemaMetadata().setSchemaName("testSchema") - .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema("foo"))) + return new SchemaMetadata() + .setSchemaName("testSchema") + .setPlatformSchema( + SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema("foo"))) .setPlatform(new DataPlatformUrn("hive")) .setHash("") .setVersion(0L) diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java index 75508320abdce..3e9f1cd0fe092 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java @@ -1,44 +1,50 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeEvent; import com.linkedin.mxe.SystemMetadata; import com.linkedin.restli.internal.server.util.DataMapUtils; import com.linkedin.schema.SchemaMetadata; -import org.apache.commons.io.IOUtils; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; - -import static org.testng.AssertJUnit.assertEquals; +import org.apache.commons.io.IOUtils; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; public class SchemaMetadataChangeEventGeneratorTest extends AbstractTestNGSpringContextTests { - @Test - public void testDelete() throws Exception { - SchemaMetadataChangeEventGenerator test = new SchemaMetadataChangeEventGenerator(); - - Urn urn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); - String entity = "dataset"; - String aspect = "schemaMetadata"; - AuditStamp auditStamp = new AuditStamp() - .setActor(Urn.createFromString("urn:li:corpuser:__datahub_system")) - .setTime(1683829509553L); - Aspect<SchemaMetadata> from = new Aspect<>(DataMapUtils.read(IOUtils.toInputStream(TEST_OBJECT, StandardCharsets.UTF_8), - SchemaMetadata.class, Map.of()), new SystemMetadata()); - Aspect<SchemaMetadata> to = new Aspect<>(null, new SystemMetadata()); - - List<ChangeEvent> actual = test.getChangeEvents(urn, entity, aspect, from, to, auditStamp); - - assertEquals(14, actual.size()); - } - - //CHECKSTYLE:OFF - private static final String TEST_OBJECT = "{\"platformSchema\":{\"com.linkedin.schema.KafkaSchema\":{\"documentSchema\":\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SampleHdfsSchema\\\",\\\"namespace\\\":\\\"com.linkedin.dataset\\\",\\\"doc\\\":\\\"Sample HDFS dataset\\\",\\\"fields\\\":[{\\\"name\\\":\\\"field_foo\\\",\\\"type\\\":[\\\"string\\\"]},{\\\"name\\\":\\\"field_bar\\\",\\\"type\\\":[\\\"boolean\\\"]}]}\"}},\"created\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"lastModified\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"fields\":[{\"nullable\":false,\"fieldPath\":\"shipment_info\",\"description\":\"Shipment info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.date\",\"description\":\"Shipment info date description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.DateType\":{}}},\"recursive\":false,\"nativeDataType\":\"Date\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.target\",\"description\":\"Shipment info target description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"text\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.destination\",\"description\":\"Shipment info destination description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info\",\"description\":\"Shipment info geo_info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lat\",\"description\":\"Shipment info geo_info lat\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lng\",\"description\":\"Shipment info geo_info lng\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"}],\"schemaName\":\"SampleHdfsSchema\",\"version\":0,\"hash\":\"\",\"platform\":\"urn:li:dataPlatform:hdfs\"}"; - //CHECKSTYLE:ON + @Test + public void testDelete() throws Exception { + SchemaMetadataChangeEventGenerator test = new SchemaMetadataChangeEventGenerator(); + + Urn urn = + Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + String entity = "dataset"; + String aspect = "schemaMetadata"; + AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString("urn:li:corpuser:__datahub_system")) + .setTime(1683829509553L); + Aspect<SchemaMetadata> from = + new Aspect<>( + DataMapUtils.read( + IOUtils.toInputStream(TEST_OBJECT, StandardCharsets.UTF_8), + SchemaMetadata.class, + Map.of()), + new SystemMetadata()); + Aspect<SchemaMetadata> to = new Aspect<>(null, new SystemMetadata()); + + List<ChangeEvent> actual = test.getChangeEvents(urn, entity, aspect, from, to, auditStamp); + + assertEquals(14, actual.size()); + } + + // CHECKSTYLE:OFF + private static final String TEST_OBJECT = + "{\"platformSchema\":{\"com.linkedin.schema.KafkaSchema\":{\"documentSchema\":\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SampleHdfsSchema\\\",\\\"namespace\\\":\\\"com.linkedin.dataset\\\",\\\"doc\\\":\\\"Sample HDFS dataset\\\",\\\"fields\\\":[{\\\"name\\\":\\\"field_foo\\\",\\\"type\\\":[\\\"string\\\"]},{\\\"name\\\":\\\"field_bar\\\",\\\"type\\\":[\\\"boolean\\\"]}]}\"}},\"created\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"lastModified\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"fields\":[{\"nullable\":false,\"fieldPath\":\"shipment_info\",\"description\":\"Shipment info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.date\",\"description\":\"Shipment info date description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.DateType\":{}}},\"recursive\":false,\"nativeDataType\":\"Date\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.target\",\"description\":\"Shipment info target description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"text\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.destination\",\"description\":\"Shipment info destination description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info\",\"description\":\"Shipment info geo_info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lat\",\"description\":\"Shipment info geo_info lat\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lng\",\"description\":\"Shipment info geo_info lng\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"}],\"schemaName\":\"SampleHdfsSchema\",\"version\":0,\"hash\":\"\",\"platform\":\"urn:li:dataPlatform:hdfs\"}"; + // CHECKSTYLE:ON } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java index 1362a0f69eff2..13236e302c259 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.timeseries.search; +import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; +import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + import com.datahub.test.BatchType; import com.datahub.test.ComplexNestedRecord; import com.datahub.test.TestEntityComponentProfile; @@ -46,37 +54,35 @@ import com.linkedin.timeseries.GroupingBucketType; import com.linkedin.timeseries.TimeWindowSize; import com.linkedin.timeseries.TimeseriesIndexSizeResult; -import org.opensearch.client.RestHighLevelClient; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.Calendar; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; +import org.opensearch.client.RestHighLevelClient; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; -import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class TimeseriesAspectServiceTestBase extends AbstractTestNGSpringContextTests { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } private static final String ENTITY_NAME = "testEntity"; private static final String ASPECT_NAME = "testEntityProfile"; - private static final Urn TEST_URN = new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table1"); + private static final Urn TEST_URN = + new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table1"); private static final int NUM_PROFILES = 100; private static final long TIME_INCREMENT = 3600000; // hour in ms. private static final String CONTENT_TYPE = "application/json"; @@ -85,13 +91,13 @@ abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpri private static final String ES_FIELD_STAT = "stat"; @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -107,8 +113,12 @@ abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpri @BeforeClass public void setup() { - _entityRegistry = new ConfigEntityRegistry(new DataSchemaFactory("com.datahub.test"), - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + _entityRegistry = + new ConfigEntityRegistry( + new DataSchemaFactory("com.datahub.test"), + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); _indexConvention = new IndexConventionImpl("es_timeseries_aspect_service_test"); _elasticSearchTimeseriesAspectService = buildService(); _elasticSearchTimeseriesAspectService.configure(); @@ -118,9 +128,13 @@ public void setup() { @Nonnull private ElasticSearchTimeseriesAspectService buildService() { - return new ElasticSearchTimeseriesAspectService(getSearchClient(), _indexConvention, - new TimeseriesAspectIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention), _entityRegistry, getBulkProcessor(), 1); + return new ElasticSearchTimeseriesAspectService( + getSearchClient(), + _indexConvention, + new TimeseriesAspectIndexBuilders(getIndexBuilder(), _entityRegistry, _indexConvention), + _entityRegistry, + getBulkProcessor(), + 1); } /* @@ -128,10 +142,13 @@ private ElasticSearchTimeseriesAspectService buildService() { */ private void upsertDocument(TestEntityProfile dp, Urn urn) throws JsonProcessingException { - Map<String, JsonNode> documents = TimeseriesAspectTransformer.transform(urn, dp, _aspectSpec, null); + Map<String, JsonNode> documents = + TimeseriesAspectTransformer.transform(urn, dp, _aspectSpec, null); assertEquals(documents.size(), 3); documents.forEach( - (key, value) -> _elasticSearchTimeseriesAspectService.upsertDocument(ENTITY_NAME, ASPECT_NAME, key, value)); + (key, value) -> + _elasticSearchTimeseriesAspectService.upsertDocument( + ENTITY_NAME, ASPECT_NAME, key, value)); } private TestEntityProfile makeTestProfile(long eventTime, long stat, String messageId) { @@ -140,7 +157,8 @@ private TestEntityProfile makeTestProfile(long eventTime, long stat, String mess testEntityProfile.setStat(stat); testEntityProfile.setStrStat(String.valueOf(stat)); testEntityProfile.setStrArray(new StringArray("sa_" + stat, "sa_" + (stat + 1))); - testEntityProfile.setEventGranularity(new TimeWindowSize().setUnit(CalendarInterval.DAY).setMultiple(1)); + testEntityProfile.setEventGranularity( + new TimeWindowSize().setUnit(CalendarInterval.DAY).setMultiple(1)); if (messageId != null) { testEntityProfile.setMessageId(messageId); } @@ -152,14 +170,17 @@ private TestEntityProfile makeTestProfile(long eventTime, long stat, String mess TestEntityComponentProfile componentProfile2 = new TestEntityComponentProfile(); componentProfile2.setKey("col2"); componentProfile2.setStat(stat + 2); - testEntityProfile.setComponentProfiles(new TestEntityComponentProfileArray(componentProfile1, componentProfile2)); + testEntityProfile.setComponentProfiles( + new TestEntityComponentProfileArray(componentProfile1, componentProfile2)); StringMap stringMap1 = new StringMap(); stringMap1.put("p_key1", "p_val1"); StringMap stringMap2 = new StringMap(); stringMap2.put("p_key2", "p_val2"); - ComplexNestedRecord nestedRecord = new ComplexNestedRecord().setType(BatchType.PARTITION_BATCH) - .setPartitions(new StringMapArray(stringMap1, stringMap2)); + ComplexNestedRecord nestedRecord = + new ComplexNestedRecord() + .setType(BatchType.PARTITION_BATCH) + .setPartitions(new StringMapArray(stringMap1, stringMap2)); testEntityProfile.setAComplexNestedRecord(nestedRecord); return testEntityProfile; @@ -172,57 +193,74 @@ public void testUpsertProfiles() throws Exception { _startTime = _startTime - _startTime % 86400000; // Create the testEntity profiles that we would like to use for testing. TestEntityProfile firstProfile = makeTestProfile(_startTime, 20, null); - Stream<TestEntityProfile> testEntityProfileStream = Stream.iterate(firstProfile, - (TestEntityProfile prev) -> makeTestProfile(prev.getTimestampMillis() + TIME_INCREMENT, prev.getStat() + 10, - null)); - - _testEntityProfiles = testEntityProfileStream.limit(NUM_PROFILES) - .collect(Collectors.toMap(TestEntityProfile::getTimestampMillis, Function.identity())); + Stream<TestEntityProfile> testEntityProfileStream = + Stream.iterate( + firstProfile, + (TestEntityProfile prev) -> + makeTestProfile( + prev.getTimestampMillis() + TIME_INCREMENT, prev.getStat() + 10, null)); + + _testEntityProfiles = + testEntityProfileStream + .limit(NUM_PROFILES) + .collect(Collectors.toMap(TestEntityProfile::getTimestampMillis, Function.identity())); Long endTime = _startTime + (NUM_PROFILES - 1) * TIME_INCREMENT; assertNotNull(_testEntityProfiles.get(_startTime)); assertNotNull(_testEntityProfiles.get(endTime)); // Upsert the documents into the index. - _testEntityProfiles.values().forEach(x -> { - try { - upsertDocument(x, TEST_URN); - } catch (JsonProcessingException jsonProcessingException) { - jsonProcessingException.printStackTrace(); - } - }); + _testEntityProfiles + .values() + .forEach( + x -> { + try { + upsertDocument(x, TEST_URN); + } catch (JsonProcessingException jsonProcessingException) { + jsonProcessingException.printStackTrace(); + } + }); syncAfterWrite(getBulkProcessor()); } @Test(groups = "upsertUniqueMessageId") public void testUpsertProfilesWithUniqueMessageIds() throws Exception { - // Create the testEntity profiles that have the same value for timestampMillis, but use unique message ids. + // Create the testEntity profiles that have the same value for timestampMillis, but use unique + // message ids. // We should preserve all the documents we are going to upsert in the index. final long curTimeMillis = Calendar.getInstance().getTimeInMillis(); final long startTime = curTimeMillis - curTimeMillis % 86400000; final TestEntityProfile firstProfile = makeTestProfile(startTime, 20, "20"); - Stream<TestEntityProfile> testEntityProfileStream = Stream.iterate(firstProfile, - (TestEntityProfile prev) -> makeTestProfile(prev.getTimestampMillis(), prev.getStat() + 10, - String.valueOf(prev.getStat() + 10))); - - final List<TestEntityProfile> testEntityProfiles = testEntityProfileStream.limit(3).collect(Collectors.toList()); + Stream<TestEntityProfile> testEntityProfileStream = + Stream.iterate( + firstProfile, + (TestEntityProfile prev) -> + makeTestProfile( + prev.getTimestampMillis(), + prev.getStat() + 10, + String.valueOf(prev.getStat() + 10))); + + final List<TestEntityProfile> testEntityProfiles = + testEntityProfileStream.limit(3).collect(Collectors.toList()); // Upsert the documents into the index. - final Urn urn = new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table2"); - testEntityProfiles.forEach(x -> { - try { - upsertDocument(x, urn); - } catch (JsonProcessingException jsonProcessingException) { - jsonProcessingException.printStackTrace(); - } - }); + final Urn urn = + new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table2"); + testEntityProfiles.forEach( + x -> { + try { + upsertDocument(x, urn); + } catch (JsonProcessingException jsonProcessingException) { + jsonProcessingException.printStackTrace(); + } + }); syncAfterWrite(getBulkProcessor()); List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(urn, ENTITY_NAME, ASPECT_NAME, null, null, - testEntityProfiles.size(), null); + _elasticSearchTimeseriesAspectService.getAspectValues( + urn, ENTITY_NAME, ASPECT_NAME, null, null, testEntityProfiles.size(), null); assertEquals(resultAspects.size(), testEntityProfiles.size()); } @@ -232,8 +270,9 @@ public void testUpsertProfilesWithUniqueMessageIds() throws Exception { private void validateAspectValue(EnvelopedAspect envelopedAspectResult) { TestEntityProfile actualProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(envelopedAspectResult.getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + envelopedAspectResult.getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile expectedProfile = _testEntityProfiles.get(actualProfile.getTimestampMillis()); assertNotNull(expectedProfile); assertEquals(actualProfile.getStat(), expectedProfile.getStat()); @@ -248,18 +287,23 @@ private void validateAspectValues(List<EnvelopedAspect> aspects, long numResults @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesAll() { List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, null); validateAspectValues(resultAspects, NUM_PROFILES); TestEntityProfile firstProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(0).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(0).getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile lastProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); - - // Now verify that the first index is the one with the highest stat value, and the last the one with the lower. + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), + CONTENT_TYPE, + _aspectSpec); + + // Now verify that the first index is the one with the highest stat value, and the last the one + // with the lower. assertEquals((long) firstProfile.getStat(), 20 + (NUM_PROFILES - 1) * 10); assertEquals((long) lastProfile.getStat(), 20); } @@ -267,31 +311,43 @@ public void testGetAspectTimeseriesValuesAll() { @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesAllSorted() { List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null, new SortCriterion().setField("stat").setOrder(SortOrder.ASCENDING)); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + null, + null, + NUM_PROFILES, + null, + new SortCriterion().setField("stat").setOrder(SortOrder.ASCENDING)); validateAspectValues(resultAspects, NUM_PROFILES); TestEntityProfile firstProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(0).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(0).getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile lastProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); - - // Now verify that the first index is the one with the highest stat value, and the last the one with the lower. + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), + CONTENT_TYPE, + _aspectSpec); + + // Now verify that the first index is the one with the highest stat value, and the last the one + // with the lower. assertEquals((long) firstProfile.getStat(), 20); assertEquals((long) lastProfile.getStat(), 20 + (NUM_PROFILES - 1) * 10); - } @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesWithFilter() { Filter filter = new Filter(); - Criterion hasStatEqualsTwenty = new Criterion().setField("stat").setCondition(Condition.EQUAL).setValue("20"); + Criterion hasStatEqualsTwenty = + new Criterion().setField("stat").setCondition(Condition.EQUAL).setValue("20"); filter.setCriteria(new CriterionArray(hasStatEqualsTwenty)); List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, filter); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, filter); validateAspectValues(resultAspects, 1); } @@ -299,8 +355,14 @@ public void testGetAspectTimeseriesValuesWithFilter() { public void testGetAspectTimeseriesValuesSubRangeInclusiveOverlap() { int expectedNumRows = 10; List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, _startTime, - _startTime + TIME_INCREMENT * (expectedNumRows - 1), expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime, + _startTime + TIME_INCREMENT * (expectedNumRows - 1), + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -308,9 +370,14 @@ public void testGetAspectTimeseriesValuesSubRangeInclusiveOverlap() { public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlap() { int expectedNumRows = 10; List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, - expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -318,9 +385,14 @@ public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlap() { public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlapLatestValueOnly() { int expectedNumRows = 1; List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, - expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -328,17 +400,25 @@ public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlapLatestValueOnly public void testGetAspectTimeseriesValuesExactlyOneResponse() { int expectedNumRows = 1; List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * 3 / 2, expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * 3 / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } - @Test(groups = {"getAspectValues"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAspectValues"}, + dependsOnGroups = {"upsert"}) public void testGetAspectTimeseriesValueMissingUrn() { Urn nonExistingUrn = new TestEntityUrn("missing", "missing", "missing"); List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(nonExistingUrn, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + nonExistingUrn, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, null); validateAspectValues(resultAspects, 0); } @@ -347,71 +427,109 @@ public void testGetAspectTimeseriesValueMissingUrn() { */ /* Latest Aggregation Tests */ - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), - _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestAComplexNestedRecordForDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("aComplexNestedRecord"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("aComplexNestedRecord"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_aComplexNestedRecord")); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_aComplexNestedRecord")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "record")); // Validate rows @@ -421,86 +539,121 @@ public void testGetAggregatedStatsLatestAComplexNestedRecordForDay1() { try { ComplexNestedRecord latestAComplexNestedRecord = OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), ComplexNestedRecord.class); - assertEquals(latestAComplexNestedRecord, + assertEquals( + latestAComplexNestedRecord, _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getAComplexNestedRecord()); } catch (JsonProcessingException e) { fail("Unexpected exception thrown" + e); } } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStrArrayDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("strArray"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + "strArray")); + assertEquals( + resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + "strArray")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "array")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - StringArray expectedStrArray = _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStrArray(); - //assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), + StringArray expectedStrArray = + _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStrArray(); + // assertEquals(resultTable.getRows(), new StringArrayArray(new + // StringArray(_startTime.toString(), // expectedStrArray.toString()))); // Test array construction using object mapper as well try { - StringArray actualStrArray = OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), StringArray.class); + StringArray actualStrArray = + OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), StringArray.class); assertEquals(actualStrArray, expectedStrArray); } catch (JsonProcessingException e) { e.printStackTrace(); } } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForTwoDays() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 47 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 47 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows @@ -508,132 +661,223 @@ public void testGetAggregatedStatsLatestStatForTwoDays() { assertEquals(resultTable.getRows().size(), 2); Long latestDay1Ts = _startTime + 23 * TIME_INCREMENT; Long latestDay2Ts = _startTime + 47 * TIME_INCREMENT; - assertEquals(resultTable.getRows(), new StringArrayArray( - new StringArray(_startTime.toString(), _testEntityProfiles.get(latestDay1Ts).getStat().toString()), - new StringArray(String.valueOf(_startTime + 24 * TIME_INCREMENT), - _testEntityProfiles.get(latestDay2Ts).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), _testEntityProfiles.get(latestDay1Ts).getStat().toString()), + new StringArray( + String.valueOf(_startTime + 24 * TIME_INCREMENT), + _testEntityProfiles.get(latestDay2Ts).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForFirst10HoursOfDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), - _testEntityProfiles.get(_startTime + 9 * TIME_INCREMENT).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + _testEntityProfiles.get(_startTime + 9 * TIME_INCREMENT).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForCol1Day1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Criterion hasCol1 = - new Criterion().setField("componentProfiles.key").setCondition(Condition.EQUAL).setValue("col1"); + new Criterion() + .setField("componentProfiles.key") + .setCondition(Condition.EQUAL) + .setValue("col1"); - Filter filter = QueryUtils.getFilterFromCriteria( - ImmutableList.of(hasUrnCriterion, hasCol1, startTimeCriterion, endTimeCriterion)); + Filter filter = + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, hasCol1, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "col1", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(0).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + "col1", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(0) + .getStat() + .toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForAllColumnsDay1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "long")); // Validate rows - StringArray expectedRow1 = new StringArray(_startTime.toString(), "col1", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(0).getStat().toString()); - StringArray expectedRow2 = new StringArray(_startTime.toString(), "col2", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(1).getStat().toString()); + StringArray expectedRow1 = + new StringArray( + _startTime.toString(), + "col1", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(0) + .getStat() + .toString()); + StringArray expectedRow2 = + new StringArray( + _startTime.toString(), + "col2", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(1) + .getStat() + .toString()); assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 2); @@ -641,33 +885,48 @@ public void testGetAggregatedStatsLatestStatForAllColumnsDay1() { } /* Sum Aggregation Tests */ - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatForFirst10HoursOfDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate the sum of stat value AggregationSpec sumAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{sumAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {sumAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "sum_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "sum_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "double")); // Validate rows @@ -675,45 +934,68 @@ public void testGetAggregatedStatsSumStatForFirst10HoursOfDay1() { assertEquals(resultTable.getRows().size(), 1); // value is 20+30+40+... up to 10 terms = 650 // TODO: Compute this caching the documents. - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), String.valueOf(650)))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatForCol2Day1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Criterion hasCol2 = - new Criterion().setField("componentProfiles.key").setCondition(Condition.EQUAL).setValue("col2"); + new Criterion() + .setField("componentProfiles.key") + .setCondition(Condition.EQUAL) + .setValue("col2"); - Filter filter = QueryUtils.getFilterFromCriteria( - ImmutableList.of(hasUrnCriterion, hasCol2, startTimeCriterion, endTimeCriterion)); + Filter filter = + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, hasCol2, startTimeCriterion, endTimeCriterion)); // Aggregate the sum of stat value AggregationSpec sumStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{sumStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {sumStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "sum_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "sum_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "double")); // Validate rows @@ -721,115 +1003,166 @@ public void testGetAggregatedStatsSumStatForCol2Day1() { assertEquals(resultTable.getRows().size(), 1); // value = 22+32+42+... 24 terms = 3288 // TODO: Compute this caching the documents. - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "col2", String.valueOf(3288)))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsCardinalityAggStrStatDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec cardinalityStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("strStat"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("strStat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{cardinalityStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {cardinalityStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "cardinality_" + "strStat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "cardinality_" + "strStat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "24"))); + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "24"))); } - @Test(groups = {"getAggregatedStats", "usageStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats", "usageStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatsCollectionDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec cardinalityStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is only timestamp filed. GroupingBucket profileStatBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{cardinalityStatAggregationSpec}, filter, new GroupingBucket[]{profileStatBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {cardinalityStatAggregationSpec}, + filter, + new GroupingBucket[] {profileStatBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), + assertEquals( + resultTable.getColumnNames(), new StringArray("componentProfiles.key", "sum_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("string", "double")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 2); - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray("col1", "3264"), new StringArray("col2", "3288"))); } - @Test(groups = {"deleteAspectValues1"}, dependsOnGroups = {"getAggregatedStats", "getAspectValues", "testCountBeforeDelete"}) + @Test( + groups = {"deleteAspectValues1"}, + dependsOnGroups = {"getAggregatedStats", "getAspectValues", "testCountBeforeDelete"}) public void testDeleteAspectValuesByUrnAndTimeRangeDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); DeleteAspectValuesResult result = _elasticSearchTimeseriesAspectService.deleteAspectValues(ENTITY_NAME, ASPECT_NAME, filter); - // For day1, we expect 24 (number of hours) * 3 (each testEntityProfile aspect expands 3 elastic docs: + // For day1, we expect 24 (number of hours) * 3 (each testEntityProfile aspect expands 3 elastic + // docs: // 1 original + 2 for componentProfiles) = 72 total. assertEquals(result.getNumDocsDeleted(), Long.valueOf(72L)); } - @Test(groups = {"deleteAspectValues2"}, dependsOnGroups = {"deleteAspectValues1", "testCountAfterDelete"}) + @Test( + groups = {"deleteAspectValues2"}, + dependsOnGroups = {"deleteAspectValues1", "testCountAfterDelete"}) public void testDeleteAspectValuesByUrn() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); Filter filter = QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion)); DeleteAspectValuesResult result = _elasticSearchTimeseriesAspectService.deleteAspectValues(ENTITY_NAME, ASPECT_NAME, filter); - // Of the 300 elastic docs upserted for TEST_URN, 72 got deleted by deleteAspectValues1 test group leaving 228. + // Of the 300 elastic docs upserted for TEST_URN, 72 got deleted by deleteAspectValues1 test + // group leaving 228. assertEquals(result.getNumDocsDeleted(), Long.valueOf(228L)); } - @Test(groups = {"testCountBeforeDelete"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"testCountBeforeDelete"}, + dependsOnGroups = {"upsert"}) public void testCountByFilter() { // Test with filter Criterion hasUrnCriterion = @@ -840,17 +1173,23 @@ public void testCountByFilter() { assertEquals(count, 300L); // Test with filter with multiple criteria - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter urnAndTimeFilter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); count = - _elasticSearchTimeseriesAspectService.countByFilter(ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); + _elasticSearchTimeseriesAspectService.countByFilter( + ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); assertEquals(count, 72L); // test without filter @@ -860,7 +1199,9 @@ public void testCountByFilter() { assertTrue(count >= 300L); } - @Test(groups = {"testCountAfterDelete"}, dependsOnGroups = {"deleteAspectValues1"}) + @Test( + groups = {"testCountAfterDelete"}, + dependsOnGroups = {"deleteAspectValues1"}) public void testCountByFilterAfterDelete() throws InterruptedException { syncAfterWrite(getBulkProcessor()); // Test with filter @@ -872,24 +1213,32 @@ public void testCountByFilterAfterDelete() throws InterruptedException { assertEquals(count, 228L); // Test with filter with multiple criteria - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter urnAndTimeFilter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); count = - _elasticSearchTimeseriesAspectService.countByFilter(ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); + _elasticSearchTimeseriesAspectService.countByFilter( + ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); assertEquals(count, 0L); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetIndexSizes() { List<TimeseriesIndexSizeResult> result = _elasticSearchTimeseriesAspectService.getIndexSizes(); - //CHECKSTYLE:OFF + // CHECKSTYLE:OFF /* Example result: {aspectName=testentityprofile, sizeMb=52.234, @@ -897,11 +1246,17 @@ public void testGetIndexSizes() { {aspectName=testentityprofile, sizeMb=0.208, indexName=es_timeseries_aspect_service_test_testentitywithouttests_testentityprofileaspect_v1, entityName=testentitywithouttests} */ - // There may be other indices in there from other tests, so just make sure that index for entity + aspect is in there - //CHECKSTYLE:ON + // There may be other indices in there from other tests, so just make sure that index for entity + // + aspect is in there + // CHECKSTYLE:ON assertTrue(result.size() > 0); assertTrue( - result.stream().anyMatch(idxSizeResult -> idxSizeResult.getIndexName().equals( - "es_timeseries_aspect_service_test_testentity_testentityprofileaspect_v1"))); + result.stream() + .anyMatch( + idxSizeResult -> + idxSizeResult + .getIndexName() + .equals( + "es_timeseries_aspect_service_test_testentity_testentityprofileaspect_v1"))); } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java index 12a02f954e1bc..29c64abdc4d0d 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java +++ b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java @@ -1,5 +1,7 @@ package io.datahubproject.test; +import static org.mockito.Mockito.mock; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -24,13 +26,7 @@ import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; -import net.datafaker.Faker; import com.linkedin.mxe.MetadataChangeProposal; -import net.datafaker.providers.base.Animal; -import net.datafaker.providers.base.Cat; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URISyntaxException; @@ -46,72 +42,92 @@ import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; - -import static org.mockito.Mockito.mock; +import javax.annotation.Nonnull; +import net.datafaker.Faker; +import net.datafaker.providers.base.Animal; +import net.datafaker.providers.base.Cat; +import org.apache.commons.lang3.NotImplementedException; public class DataGenerator { - private final static Faker FAKER = new Faker(); - private final EntityRegistry entityRegistry; - private final EntityService entityService; - - public DataGenerator(EntityService entityService) { - this.entityService = entityService; - this.entityRegistry = entityService.getEntityRegistry(); - } - - public static DataGenerator build(EntityRegistry entityRegistry) { - EntityServiceImpl mockEntityServiceImpl = new EntityServiceImpl(mock(AspectDao.class), - mock(EventProducer.class), entityRegistry, false, - mock(UpdateIndicesService.class), mock(PreProcessHooks.class)); - return new DataGenerator(mockEntityServiceImpl); - } - - public Stream<List<MetadataChangeProposal>> generateDatasets() { - return generateMCPs("dataset", 10, List.of()); - } - - public List<MetadataChangeProposal> generateTags(long count) { - return generateMCPs("tag", count, List.of()).findFirst().get(); - } - - public Stream<List<MetadataChangeProposal>> generateMCPs(String entityName, long count, List<String> aspects) { - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - - // Prevent duplicate tags and terms generated as secondary entities - Set<Urn> secondaryUrns = new HashSet<>(); - - return LongStream.range(0, count).mapToObj(idx -> { - RecordTemplate key = randomKeyAspect(entitySpec); - MetadataChangeProposal mcp = new MetadataChangeProposal(); - mcp.setEntityType(entitySpec.getName()); - mcp.setAspectName(entitySpec.getKeyAspectName()); - mcp.setAspect(GenericRecordUtils.serializeAspect(key)); - mcp.setEntityUrn(EntityKeyUtils.convertEntityKeyToUrn(key, entityName)); - mcp.setChangeType(ChangeType.UPSERT); - return mcp; - }).flatMap(mcp -> { - // Expand with additional random aspects - List<MetadataChangeProposal> additionalMCPs = new LinkedList<>(); - - for (String aspectName : aspects) { + private static final Faker FAKER = new Faker(); + private final EntityRegistry entityRegistry; + private final EntityService entityService; + + public DataGenerator(EntityService entityService) { + this.entityService = entityService; + this.entityRegistry = entityService.getEntityRegistry(); + } + + public static DataGenerator build(EntityRegistry entityRegistry) { + EntityServiceImpl mockEntityServiceImpl = + new EntityServiceImpl( + mock(AspectDao.class), + mock(EventProducer.class), + entityRegistry, + false, + mock(UpdateIndicesService.class), + mock(PreProcessHooks.class)); + return new DataGenerator(mockEntityServiceImpl); + } + + public Stream<List<MetadataChangeProposal>> generateDatasets() { + return generateMCPs("dataset", 10, List.of()); + } + + public List<MetadataChangeProposal> generateTags(long count) { + return generateMCPs("tag", count, List.of()).findFirst().get(); + } + + public Stream<List<MetadataChangeProposal>> generateMCPs( + String entityName, long count, List<String> aspects) { + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + + // Prevent duplicate tags and terms generated as secondary entities + Set<Urn> secondaryUrns = new HashSet<>(); + + return LongStream.range(0, count) + .mapToObj( + idx -> { + RecordTemplate key = randomKeyAspect(entitySpec); + MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityType(entitySpec.getName()); + mcp.setAspectName(entitySpec.getKeyAspectName()); + mcp.setAspect(GenericRecordUtils.serializeAspect(key)); + mcp.setEntityUrn(EntityKeyUtils.convertEntityKeyToUrn(key, entityName)); + mcp.setChangeType(ChangeType.UPSERT); + return mcp; + }) + .flatMap( + mcp -> { + // Expand with additional random aspects + List<MetadataChangeProposal> additionalMCPs = new LinkedList<>(); + + for (String aspectName : aspects) { AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - throw new IllegalStateException("Aspect " + aspectName + " not found for entity " + entityName); + throw new IllegalStateException( + "Aspect " + aspectName + " not found for entity " + entityName); } - RecordTemplate aspect = randomAspectGenerators.getOrDefault(aspectName, - DataGenerator::defaultRandomAspect).apply(entitySpec, aspectSpec); + RecordTemplate aspect = + randomAspectGenerators + .getOrDefault(aspectName, DataGenerator::defaultRandomAspect) + .apply(entitySpec, aspectSpec); // Maybe generate nested entities at the same time, like globalTags/glossaryTerms - List<MetadataChangeProposal> secondaryEntities = nestedRandomAspectGenerators.getOrDefault(aspectSpec.getName(), - (a, c) -> List.of()).apply(aspect, 5).stream() - .filter(secondaryMCP -> { - if (!secondaryUrns.contains(secondaryMCP.getEntityUrn())) { + List<MetadataChangeProposal> secondaryEntities = + nestedRandomAspectGenerators + .getOrDefault(aspectSpec.getName(), (a, c) -> List.of()) + .apply(aspect, 5) + .stream() + .filter( + secondaryMCP -> { + if (!secondaryUrns.contains(secondaryMCP.getEntityUrn())) { secondaryUrns.add(secondaryMCP.getEntityUrn()); return true; - } - return false; - }) + } + return false; + }) .collect(Collectors.toList()); additionalMCPs.addAll(secondaryEntities); @@ -123,254 +139,327 @@ public Stream<List<MetadataChangeProposal>> generateMCPs(String entityName, long additionalMCP.setChangeType(ChangeType.UPSERT); additionalMCPs.add(additionalMCP); - } - - return Stream.concat(Stream.of(mcp), additionalMCPs.stream()); - }).map(mcp -> { - // Expand with default aspects per normal - return Stream.concat(Stream.of(mcp), - AspectUtils.getAdditionalChanges(mcp, entityService, true).stream()) - .collect(Collectors.toList()); - }); - } - - public static Map<String, BiFunction<EntitySpec, AspectSpec, ? extends RecordTemplate>> randomAspectGenerators = Map.of( - "glossaryTermInfo", (e, a) -> { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) defaultRandomAspect(e, a); + } + + return Stream.concat(Stream.of(mcp), additionalMCPs.stream()); + }) + .map( + mcp -> { + // Expand with default aspects per normal + return Stream.concat( + Stream.of(mcp), + AspectUtils.getAdditionalChanges(mcp, entityService, true).stream()) + .collect(Collectors.toList()); + }); + } + + public static Map<String, BiFunction<EntitySpec, AspectSpec, ? extends RecordTemplate>> + randomAspectGenerators = + Map.of( + "glossaryTermInfo", + (e, a) -> { + GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) defaultRandomAspect(e, a); glossaryTermInfo.setName(normalize(FAKER.company().buzzword())); return glossaryTermInfo; - } - ); - - public Map<String, BiFunction<RecordTemplate, Integer, List<MetadataChangeProposal>>> nestedRandomAspectGenerators = Map.of( - "globalTags", (aspect, count) -> { - try { - List<MetadataChangeProposal> tags = generateTags(count); - Method setTagsMethod = aspect.getClass().getMethod("setTags", TagAssociationArray.class); - TagAssociationArray tagAssociations = new TagAssociationArray(); - tagAssociations.addAll(tags.stream().map( - tagMCP -> { - try { - return new TagAssociation().setTag(TagUrn.createFromUrn(tagMCP.getEntityUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList())); - setTagsMethod.invoke(aspect, tagAssociations); - return tags; - } catch (Exception e) { - throw new RuntimeException(e); - } - }, - "glossaryTerms", (aspect, count) -> { - try { - List<MetadataChangeProposal> terms = generateMCPs("glossaryTerm", count, - List.of("glossaryTermInfo")) - .map(mcps -> mcps.get(0)) - .collect(Collectors.toList()); - Method setTermsMethod = aspect.getClass().getMethod("setTerms", GlossaryTermAssociationArray.class); - GlossaryTermAssociationArray termAssociations = new GlossaryTermAssociationArray(); - termAssociations.addAll(terms.stream().map( - termMCP -> { - try { - return new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(termMCP.getEntityUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList())); - setTermsMethod.invoke(aspect, termAssociations); - return terms; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - ); - - private static RecordTemplate defaultRandomAspect(@Nonnull EntitySpec entitySpec, @Nonnull AspectSpec aspectSpec) { - Class<RecordTemplate> aspectClass = aspectSpec.getDataTemplateClass(); - try { - Object aspect = aspectClass.getDeclaredConstructor().newInstance(); - - List<Method> booleanMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == Boolean.class) - .collect(Collectors.toList()); - - for (Method boolMethod : booleanMethods) { - boolMethod.invoke(aspect, FAKER.random().nextBoolean()); - } - - List<Method> stringMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == String.class) - .collect(Collectors.toList()); - - for (Method stringMethod : stringMethods) { - String value; - switch (aspectSpec.getName() + "_" + stringMethod.getName()) { - default: - value = FAKER.lorem().characters(8, 16, false); - break; - } - - // global - if (stringMethod.getName().toLowerCase().contains("description") - || stringMethod.getName().toLowerCase().contains("definition")) { - value = FAKER.lorem().paragraph(); - } + }); + + public Map<String, BiFunction<RecordTemplate, Integer, List<MetadataChangeProposal>>> + nestedRandomAspectGenerators = + Map.of( + "globalTags", + (aspect, count) -> { + try { + List<MetadataChangeProposal> tags = generateTags(count); + Method setTagsMethod = + aspect.getClass().getMethod("setTags", TagAssociationArray.class); + TagAssociationArray tagAssociations = new TagAssociationArray(); + tagAssociations.addAll( + tags.stream() + .map( + tagMCP -> { + try { + return new TagAssociation() + .setTag(TagUrn.createFromUrn(tagMCP.getEntityUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList())); + setTagsMethod.invoke(aspect, tagAssociations); + return tags; + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "glossaryTerms", + (aspect, count) -> { + try { + List<MetadataChangeProposal> terms = + generateMCPs("glossaryTerm", count, List.of("glossaryTermInfo")) + .map(mcps -> mcps.get(0)) + .collect(Collectors.toList()); + Method setTermsMethod = + aspect + .getClass() + .getMethod("setTerms", GlossaryTermAssociationArray.class); + GlossaryTermAssociationArray termAssociations = + new GlossaryTermAssociationArray(); + termAssociations.addAll( + terms.stream() + .map( + termMCP -> { + try { + return new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + termMCP.getEntityUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList())); + setTermsMethod.invoke(aspect, termAssociations); + return terms; + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + private static RecordTemplate defaultRandomAspect( + @Nonnull EntitySpec entitySpec, @Nonnull AspectSpec aspectSpec) { + Class<RecordTemplate> aspectClass = aspectSpec.getDataTemplateClass(); + try { + Object aspect = aspectClass.getDeclaredConstructor().newInstance(); + + List<Method> booleanMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == Boolean.class) + .collect(Collectors.toList()); + + for (Method boolMethod : booleanMethods) { + boolMethod.invoke(aspect, FAKER.random().nextBoolean()); + } + + List<Method> stringMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == String.class) + .collect(Collectors.toList()); + + for (Method stringMethod : stringMethods) { + String value; + switch (aspectSpec.getName() + "_" + stringMethod.getName()) { + default: + value = FAKER.lorem().characters(8, 16, false); + break; + } - stringMethod.invoke(aspect, value); - } - - List<Method> enumMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0].isEnum()) - .collect(Collectors.toList()); - - for (Method enumMethod : enumMethods) { - Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); - // Excluding $UNKNOWNs - enumMethod.invoke(aspect, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); - } - - // auditStamp - Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == AuditStamp.class) - .findFirst().ifPresent(auditStampMethod -> { - try { - AuditStamp auditStamp = new AuditStamp() - .setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)) - .setTime(System.currentTimeMillis()); - auditStampMethod.invoke(aspect, auditStamp); - } catch (URISyntaxException | IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } - }); - - return aspectClass.cast(aspect); - } catch (Exception e) { - throw new RuntimeException(e); + // global + if (stringMethod.getName().toLowerCase().contains("description") + || stringMethod.getName().toLowerCase().contains("definition")) { + value = FAKER.lorem().paragraph(); } - } - private static RecordTemplate randomKeyAspect(EntitySpec entitySpec) { - Class<RecordTemplate> keyClass = entitySpec.getKeyAspectSpec().getDataTemplateClass(); - try { - Object key = keyClass.getDeclaredConstructor().newInstance(); - - List<Method> stringMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == String.class) - .collect(Collectors.toList()); - - switch (entitySpec.getName()) { - case "tag": - stringMethods.get(0).invoke(key, normalize(FAKER.marketing().buzzwords())); - break; - case "glossaryTerm": - stringMethods.get(0).invoke(key, normalize(UUID.randomUUID().toString())); - break; - case "container": - stringMethods.get(0).invoke(key, FAKER.examplify("b5e95fce839e7d78151ed7e0a7420d84")); - break; - default: - switch (stringMethods.size()) { - case 1: - stringMethods.get(0).invoke(key, String.join(".", multiName(3))); - break; - case 2: - Cat cat = FAKER.cat(); - stringMethods.get(0).invoke(key, cat.breed().toLowerCase()); - stringMethods.get(1).invoke(key, cat.name().toLowerCase()); - break; - default: - Animal animal = FAKER.animal(); - stringMethods.get(0).invoke(key, animal.genus().toLowerCase()); - stringMethods.get(1).invoke(key, animal.species().toLowerCase()); - stringMethods.get(2).invoke(key, animal.name().toLowerCase()); - break; - } - break; - } - - List<Method> urnMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == Urn.class) - .collect(Collectors.toList()); - - for (Method urnMethod : urnMethods) { - switch (entitySpec.getName()) { - case "dataset": - urnMethod.invoke(key, randomUrnLowerCase("dataPlatform", - List.of(randomDataPlatform()))); - break; - default: - throw new NotImplementedException(entitySpec.getName()); + stringMethod.invoke(aspect, value); + } + + List<Method> enumMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0].isEnum()) + .collect(Collectors.toList()); + + for (Method enumMethod : enumMethods) { + Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); + // Excluding $UNKNOWNs + enumMethod.invoke(aspect, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); + } + + // auditStamp + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == AuditStamp.class) + .findFirst() + .ifPresent( + auditStampMethod -> { + try { + AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)) + .setTime(System.currentTimeMillis()); + auditStampMethod.invoke(aspect, auditStamp); + } catch (URISyntaxException + | IllegalAccessException + | InvocationTargetException e) { + throw new RuntimeException(e); } - } - - List<Method> enumMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0].isEnum()) - .collect(Collectors.toList()); - - for (Method enumMethod : enumMethods) { - Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); - // Excluding $UNKNOWNs - enumMethod.invoke(key, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); - } - - return keyClass.cast(key); - } catch (Exception e) { - throw new RuntimeException(e); - } - } + }); - private static List<String> multiName(int size) { - switch (size) { + return aspectClass.cast(aspect); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private static RecordTemplate randomKeyAspect(EntitySpec entitySpec) { + Class<RecordTemplate> keyClass = entitySpec.getKeyAspectSpec().getDataTemplateClass(); + try { + Object key = keyClass.getDeclaredConstructor().newInstance(); + + List<Method> stringMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == String.class) + .collect(Collectors.toList()); + + switch (entitySpec.getName()) { + case "tag": + stringMethods.get(0).invoke(key, normalize(FAKER.marketing().buzzwords())); + break; + case "glossaryTerm": + stringMethods.get(0).invoke(key, normalize(UUID.randomUUID().toString())); + break; + case "container": + stringMethods.get(0).invoke(key, FAKER.examplify("b5e95fce839e7d78151ed7e0a7420d84")); + break; + default: + switch (stringMethods.size()) { case 1: - return Stream.of(FAKER.marketing().buzzwords()) - .map(String::toLowerCase).collect(Collectors.toList()); + stringMethods.get(0).invoke(key, String.join(".", multiName(3))); + break; case 2: - Cat cat = FAKER.cat(); - return Stream.of(cat.breed(), cat.name()) - .map(String::toLowerCase).collect(Collectors.toList()); - case 3: - Animal animal = FAKER.animal(); - return Stream.of(animal.genus(), animal.species(), animal.name()) - .map(String::toLowerCase).collect(Collectors.toList()); + Cat cat = FAKER.cat(); + stringMethods.get(0).invoke(key, cat.breed().toLowerCase()); + stringMethods.get(1).invoke(key, cat.name().toLowerCase()); + break; default: - return IntStream.range(0, size).mapToObj(i -> FAKER.expression("#{numerify 'test####'}")).collect(Collectors.toList()); + Animal animal = FAKER.animal(); + stringMethods.get(0).invoke(key, animal.genus().toLowerCase()); + stringMethods.get(1).invoke(key, animal.species().toLowerCase()); + stringMethods.get(2).invoke(key, animal.name().toLowerCase()); + break; + } + break; + } + + List<Method> urnMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == Urn.class) + .collect(Collectors.toList()); + + for (Method urnMethod : urnMethods) { + switch (entitySpec.getName()) { + case "dataset": + urnMethod.invoke( + key, randomUrnLowerCase("dataPlatform", List.of(randomDataPlatform()))); + break; + default: + throw new NotImplementedException(entitySpec.getName()); } + } + + List<Method> enumMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0].isEnum()) + .collect(Collectors.toList()); + + for (Method enumMethod : enumMethods) { + Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); + // Excluding $UNKNOWNs + enumMethod.invoke(key, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); + } + + return keyClass.cast(key); + } catch (Exception e) { + throw new RuntimeException(e); } - - private static Urn randomUrnLowerCase(String entityType, List<String> tuple) { - return Urn.createFromTuple(entityType, - tuple.stream().map(DataGenerator::normalize).collect(Collectors.toList())); - } - - private static String normalize(String input) { - return input.toLowerCase().replaceAll("\\W+", "_"); - } - - private static String randomDataPlatform() { - String[] platforms = { - "ambry", "bigquery", "couchbase", "druid", "external", "feast", "glue", "hdfs", "hive", "kafka", "kusto", - "looker", "mongodb", "mssql", "mysql", "oracle", "pinot", "postgres", "presto", "redshift", "s3", - "sagemaker", "snowflake", "teradata", "voldemort" - }; - - return platforms[FAKER.random().nextInt(0, platforms.length - 1)]; + } + + private static List<String> multiName(int size) { + switch (size) { + case 1: + return Stream.of(FAKER.marketing().buzzwords()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + case 2: + Cat cat = FAKER.cat(); + return Stream.of(cat.breed(), cat.name()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + case 3: + Animal animal = FAKER.animal(); + return Stream.of(animal.genus(), animal.species(), animal.name()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + default: + return IntStream.range(0, size) + .mapToObj(i -> FAKER.expression("#{numerify 'test####'}")) + .collect(Collectors.toList()); } + } + + private static Urn randomUrnLowerCase(String entityType, List<String> tuple) { + return Urn.createFromTuple( + entityType, tuple.stream().map(DataGenerator::normalize).collect(Collectors.toList())); + } + + private static String normalize(String input) { + return input.toLowerCase().replaceAll("\\W+", "_"); + } + + private static String randomDataPlatform() { + String[] platforms = { + "ambry", + "bigquery", + "couchbase", + "druid", + "external", + "feast", + "glue", + "hdfs", + "hive", + "kafka", + "kusto", + "looker", + "mongodb", + "mssql", + "mysql", + "oracle", + "pinot", + "postgres", + "presto", + "redshift", + "s3", + "sagemaker", + "snowflake", + "teradata", + "voldemort" + }; + + return platforms[FAKER.random().nextInt(0, platforms.length - 1)]; + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java index 18fbf86f8668d..ff14b91a72c7f 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java @@ -1,5 +1,10 @@ package io.datahubproject.test.fixtures.search; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + +import java.io.IOException; +import java.util.Set; +import java.util.stream.Collectors; import lombok.Builder; import lombok.NonNull; import org.opensearch.action.search.SearchRequest; @@ -11,56 +16,50 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import java.io.IOException; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - @Builder public class EntityExporter { - @NonNull - private RestHighLevelClient client; - @Builder.Default - private int fetchSize = 3000; - @NonNull - private FixtureWriter writer; - @NonNull - private String fixtureName; - @Builder.Default - private String sourceIndexPrefix = ""; - @Builder.Default - private String sourceIndexSuffix = "index_v2"; - @Builder.Default - private Set<String> indexEntities = SEARCHABLE_ENTITY_TYPES.stream() - .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) - .collect(Collectors.toSet()); - + @NonNull private RestHighLevelClient client; + @Builder.Default private int fetchSize = 3000; + @NonNull private FixtureWriter writer; + @NonNull private String fixtureName; + @Builder.Default private String sourceIndexPrefix = ""; + @Builder.Default private String sourceIndexSuffix = "index_v2"; - public void export() throws IOException { - Set<String> searchIndexSuffixes = indexEntities.stream() - .map(entityName -> entityName + sourceIndexSuffix) - .collect(Collectors.toSet()); + @Builder.Default + private Set<String> indexEntities = + SEARCHABLE_ENTITY_TYPES.stream() + .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) + .collect(Collectors.toSet()); - // Fetch indices - GetMappingsResponse response = client.indices().getMapping(new GetMappingsRequest().indices("*"), - RequestOptions.DEFAULT); + public void export() throws IOException { + Set<String> searchIndexSuffixes = + indexEntities.stream() + .map(entityName -> entityName + sourceIndexSuffix) + .collect(Collectors.toSet()); - response.mappings().keySet().stream() - .filter(index -> searchIndexSuffixes.stream().anyMatch(index::contains) - && index.startsWith(sourceIndexPrefix)) - .map(index -> index.split(sourceIndexSuffix, 2)[0] + sourceIndexSuffix) - .forEach(indexName -> { + // Fetch indices + GetMappingsResponse response = + client.indices().getMapping(new GetMappingsRequest().indices("*"), RequestOptions.DEFAULT); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + response.mappings().keySet().stream() + .filter( + index -> + searchIndexSuffixes.stream().anyMatch(index::contains) + && index.startsWith(sourceIndexPrefix)) + .map(index -> index.split(sourceIndexSuffix, 2)[0] + sourceIndexSuffix) + .forEach( + indexName -> { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.source(searchSourceBuilder); + SearchRequest searchRequest = new SearchRequest(indexName); + searchRequest.source(searchSourceBuilder); - String outputPath = String.format("%s/%s.json", fixtureName, indexName.replaceFirst(sourceIndexPrefix, "")); - writer.write(searchRequest, outputPath, false); - }); - } + String outputPath = + String.format( + "%s/%s.json", fixtureName, indexName.replaceFirst(sourceIndexPrefix, "")); + writer.write(searchRequest, outputPath, false); + }); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java index 1b804a2346883..dff6b7ab5a898 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java @@ -1,13 +1,9 @@ package io.datahubproject.test.fixtures.search; +import static io.datahubproject.test.fixtures.search.SearchFixtureUtils.OBJECT_MAPPER; + import com.fasterxml.jackson.core.JsonProcessingException; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; -import lombok.Builder; -import lombok.NonNull; -import org.apache.commons.io.FilenameUtils; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.common.xcontent.XContentType; - import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.Closeable; @@ -22,105 +18,113 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; - -import static io.datahubproject.test.fixtures.search.SearchFixtureUtils.OBJECT_MAPPER; +import lombok.Builder; +import lombok.NonNull; +import org.apache.commons.io.FilenameUtils; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.common.xcontent.XContentType; @Builder public class FixtureReader { - @Builder.Default - private String inputBase = SearchFixtureUtils.FIXTURE_BASE; - @NonNull - private ESBulkProcessor bulkProcessor; - @NonNull - private String fixtureName; - @Builder.Default - private String targetIndexPrefix = ""; + @Builder.Default private String inputBase = SearchFixtureUtils.FIXTURE_BASE; + @NonNull private ESBulkProcessor bulkProcessor; + @NonNull private String fixtureName; + @Builder.Default private String targetIndexPrefix = ""; - private long refreshIntervalSeconds; + private long refreshIntervalSeconds; - public Set<String> read() throws IOException { - try (Stream<Path> files = Files.list(Paths.get(String.format("%s/%s", inputBase, fixtureName)))) { - return files.map(file -> { + public Set<String> read() throws IOException { + try (Stream<Path> files = + Files.list(Paths.get(String.format("%s/%s", inputBase, fixtureName)))) { + return files + .map( + file -> { String absolutePath = file.toAbsolutePath().toString(); - String indexName = String.format("%s_%s", targetIndexPrefix, FilenameUtils.getBaseName(absolutePath).split("[.]", 2)[0]); + String indexName = + String.format( + "%s_%s", + targetIndexPrefix, + FilenameUtils.getBaseName(absolutePath).split("[.]", 2)[0]); try (Stream<String> lines = getLines(absolutePath)) { - lines.forEach(line -> { + lines.forEach( + line -> { try { - UrnDocument doc = OBJECT_MAPPER.readValue(line, UrnDocument.class); - IndexRequest request = new IndexRequest(indexName) - .id(doc.urn) - .source(line.getBytes(), XContentType.JSON); + UrnDocument doc = OBJECT_MAPPER.readValue(line, UrnDocument.class); + IndexRequest request = + new IndexRequest(indexName) + .id(doc.urn) + .source(line.getBytes(), XContentType.JSON); - bulkProcessor.add(request); + bulkProcessor.add(request); } catch (JsonProcessingException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); + }); } catch (IOException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } return indexName; - }).collect(Collectors.toSet()); - } finally { - bulkProcessor.flush(); - try { - Thread.sleep(1000 * refreshIntervalSeconds); - } catch (InterruptedException ignored) { - } - } + }) + .collect(Collectors.toSet()); + } finally { + bulkProcessor.flush(); + try { + Thread.sleep(1000 * refreshIntervalSeconds); + } catch (InterruptedException ignored) { + } } + } - private Stream<String> getLines(String path) throws IOException { - if (FilenameUtils.getExtension(path).equals("gz")) { - return GZIPFiles.lines(Paths.get(path)); - } else { - return Files.lines(Paths.get(path)); - } + private Stream<String> getLines(String path) throws IOException { + if (FilenameUtils.getExtension(path).equals("gz")) { + return GZIPFiles.lines(Paths.get(path)); + } else { + return Files.lines(Paths.get(path)); } + } - public static class GZIPFiles { - /** - * Get a lazily loaded stream of lines from a gzipped file, similar to - * {@link Files#lines(java.nio.file.Path)}. - * - * @param path - * The path to the gzipped file. - * @return stream with lines. - */ - public static Stream<String> lines(Path path) { - InputStream fileIs = null; - BufferedInputStream bufferedIs = null; - GZIPInputStream gzipIs = null; - try { - fileIs = Files.newInputStream(path); - // Even though GZIPInputStream has a buffer it reads individual bytes - // when processing the header, better add a buffer in-between - bufferedIs = new BufferedInputStream(fileIs, 65535); - gzipIs = new GZIPInputStream(bufferedIs); - } catch (IOException e) { - closeSafely(gzipIs); - closeSafely(bufferedIs); - closeSafely(fileIs); - throw new UncheckedIOException(e); - } - BufferedReader reader = new BufferedReader(new InputStreamReader(gzipIs)); - return reader.lines().onClose(() -> closeSafely(reader)); - } + public static class GZIPFiles { + /** + * Get a lazily loaded stream of lines from a gzipped file, similar to {@link + * Files#lines(java.nio.file.Path)}. + * + * @param path The path to the gzipped file. + * @return stream with lines. + */ + public static Stream<String> lines(Path path) { + InputStream fileIs = null; + BufferedInputStream bufferedIs = null; + GZIPInputStream gzipIs = null; + try { + fileIs = Files.newInputStream(path); + // Even though GZIPInputStream has a buffer it reads individual bytes + // when processing the header, better add a buffer in-between + bufferedIs = new BufferedInputStream(fileIs, 65535); + gzipIs = new GZIPInputStream(bufferedIs); + } catch (IOException e) { + closeSafely(gzipIs); + closeSafely(bufferedIs); + closeSafely(fileIs); + throw new UncheckedIOException(e); + } + BufferedReader reader = new BufferedReader(new InputStreamReader(gzipIs)); + return reader.lines().onClose(() -> closeSafely(reader)); + } - private static void closeSafely(Closeable closeable) { - if (closeable != null) { - try { - closeable.close(); - } catch (IOException e) { - // Ignore - } - } + private static void closeSafely(Closeable closeable) { + if (closeable != null) { + try { + closeable.close(); + } catch (IOException e) { + // Ignore } + } } + } - public static class UrnDocument { - public String urn; - } + public static class UrnDocument { + public String urn; + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java index 0aefa006421fc..8a11de6c513a3 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java @@ -1,6 +1,11 @@ package io.datahubproject.test.fixtures.search; import com.fasterxml.jackson.core.JsonProcessingException; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.function.BiConsumer; +import javax.annotation.Nullable; import lombok.Builder; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; @@ -9,70 +14,72 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; -import javax.annotation.Nullable; -import java.io.BufferedWriter; -import java.io.FileWriter; -import java.io.IOException; -import java.util.function.BiConsumer; - -/** - * - */ +/** */ @Builder public class FixtureWriter { - private RestHighLevelClient client; + private RestHighLevelClient client; - @Builder.Default - private String outputBase = SearchFixtureUtils.FIXTURE_BASE; + @Builder.Default private String outputBase = SearchFixtureUtils.FIXTURE_BASE; - public void write(SearchRequest searchRequest, String relativeOutput, boolean append) { - write(searchRequest, relativeOutput, append, null, null, null); - } + public void write(SearchRequest searchRequest, String relativeOutput, boolean append) { + write(searchRequest, relativeOutput, append, null, null, null); + } - public <O, C> void write(SearchRequest searchRequest, String relativeOutput, boolean append, - @Nullable Class<O> outputType, Class<C> callbackType, BiConsumer<SearchHit, C> callback) { - try { - SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); - SearchHits hits = searchResponse.getHits(); - long remainingHits = hits.getTotalHits().value; + public <O, C> void write( + SearchRequest searchRequest, + String relativeOutput, + boolean append, + @Nullable Class<O> outputType, + Class<C> callbackType, + BiConsumer<SearchHit, C> callback) { + try { + SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); + SearchHits hits = searchResponse.getHits(); + long remainingHits = hits.getTotalHits().value; - if (remainingHits > 0) { - try (FileWriter writer = new FileWriter(String.format("%s/%s", outputBase, relativeOutput), append); - BufferedWriter bw = new BufferedWriter(writer)) { + if (remainingHits > 0) { + try (FileWriter writer = + new FileWriter(String.format("%s/%s", outputBase, relativeOutput), append); + BufferedWriter bw = new BufferedWriter(writer)) { - while (remainingHits > 0) { - SearchHit lastHit = null; - for (SearchHit hit : hits.getHits()) { - lastHit = hit; - remainingHits -= 1; + while (remainingHits > 0) { + SearchHit lastHit = null; + for (SearchHit hit : hits.getHits()) { + lastHit = hit; + remainingHits -= 1; - try { - if (outputType == null) { - bw.write(hit.getSourceAsString()); - } else { - O doc = SearchFixtureUtils.OBJECT_MAPPER.readValue(hit.getSourceAsString(), outputType); - bw.write(SearchFixtureUtils.OBJECT_MAPPER.writeValueAsString(doc)); - } - bw.newLine(); + try { + if (outputType == null) { + bw.write(hit.getSourceAsString()); + } else { + O doc = + SearchFixtureUtils.OBJECT_MAPPER.readValue( + hit.getSourceAsString(), outputType); + bw.write(SearchFixtureUtils.OBJECT_MAPPER.writeValueAsString(doc)); + } + bw.newLine(); - // Fire callback - if (callback != null) { - callback.accept(hit, SearchFixtureUtils.OBJECT_MAPPER.readValue(hit.getSourceAsString(), callbackType)); - } - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - if (lastHit != null) { - searchRequest.source().searchAfter(lastHit.getSortValues()); - hits = client.search(searchRequest, RequestOptions.DEFAULT).getHits(); - } - } + // Fire callback + if (callback != null) { + callback.accept( + hit, + SearchFixtureUtils.OBJECT_MAPPER.readValue( + hit.getSourceAsString(), callbackType)); } + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + if (lastHit != null) { + searchRequest.source().searchAfter(lastHit.getSortValues()); + hits = client.search(searchRequest, RequestOptions.DEFAULT).getHits(); } - } catch (IOException e) { - throw new RuntimeException(e); + } } + } + } catch (IOException e) { + throw new RuntimeException(e); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java index 5db07ee6fb8bc..4b7d81aa04416 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java @@ -1,15 +1,6 @@ package io.datahubproject.test.fixtures.search; import com.google.common.collect.Lists; -import lombok.Builder; -import lombok.NonNull; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.search.sort.SortBuilders; -import org.opensearch.search.sort.SortOrder; - import java.net.URLDecoder; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; @@ -20,174 +11,210 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.Builder; +import lombok.NonNull; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.sort.SortBuilders; +import org.opensearch.search.sort.SortOrder; @Builder public class LineageExporter<O> { - @Builder.Default - private int fetchSize = 3000; - @Builder.Default - private int queryStatementSize = 32000; - @NonNull - private FixtureWriter writer; - private String entityIndexName; - - private String graphIndexName; - - private String entityOutputPath; - private String graphOutputPath; - - private Class<O> anonymizerClazz; - - - private static String idToUrn(String id) { - return URLDecoder.decode(id, StandardCharsets.UTF_8); + @Builder.Default private int fetchSize = 3000; + @Builder.Default private int queryStatementSize = 32000; + @NonNull private FixtureWriter writer; + private String entityIndexName; + + private String graphIndexName; + + private String entityOutputPath; + private String graphOutputPath; + + private Class<O> anonymizerClazz; + + private static String idToUrn(String id) { + return URLDecoder.decode(id, StandardCharsets.UTF_8); + } + + public <O> void export(Set<String> ids) { + if (entityIndexName != null) { + assert (entityOutputPath != null); + exportEntityIndex( + ids.stream() + .map(id -> URLEncoder.encode(id, StandardCharsets.UTF_8)) + .collect(Collectors.toSet()), + new HashSet<>(), + 0); } - - public <O> void export(Set<String> ids) { - if (entityIndexName != null) { - assert (entityOutputPath != null); - exportEntityIndex(ids.stream().map(id -> URLEncoder.encode(id, StandardCharsets.UTF_8)).collect(Collectors.toSet()), - new HashSet<>(), 0); - } - if (graphIndexName != null) { - assert (graphOutputPath != null); - exportGraphIndex(ids, new HashSet<>(), new HashSet<>(), 0); - } + if (graphIndexName != null) { + assert (graphOutputPath != null); + exportGraphIndex(ids, new HashSet<>(), new HashSet<>(), 0); } - - public void exportGraphIndex(Set<String> urns, Set<String> visitedUrns, Set<String> visitedIds, int hops) { - Set<String> nextIds = new HashSet<>(); - if (!urns.isEmpty()) { - BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); - - boolQueryBuilder.must(QueryBuilders.termQuery("relationshipType", "DownstreamOf")); - - Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize).forEach(batch -> { - boolQueryBuilder.should(QueryBuilders.termsQuery("source.urn", batch.toArray(String[]::new))); - boolQueryBuilder.should(QueryBuilders.termsQuery("destination.urn", batch.toArray(String[]::new))); - }); - boolQueryBuilder.minimumShouldMatch(1); - - // Exclude visited - Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.mustNot(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.query(boolQueryBuilder); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - - SearchRequest searchRequest = new SearchRequest(graphIndexName); - searchRequest.source(searchSourceBuilder); - - Set<String> docIds = new HashSet<>(); - Set<GraphDocument> docs = new HashSet<>(); - - long startTime = System.currentTimeMillis(); - System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, urns.size(), visitedUrns.size()); - - writer.write(searchRequest, graphOutputPath, hops != 0, anonymizerClazz, - GraphDocument.class, (hit, doc) -> { - docIds.add(hit.getId()); - docs.add(doc); - }); - - long endTime = System.currentTimeMillis(); - System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); - - visitedIds.addAll(docIds); - visitedUrns.addAll(urns); - - Set<String> discoveredUrns = docs.stream().flatMap(d -> Stream.of(d.destination.urn, d.source.urn)) - .filter(Objects::nonNull) - .filter(urn -> !visitedUrns.contains(urn)) - .collect(Collectors.toSet()); - - nextIds.addAll(discoveredUrns); - } - - if (!nextIds.isEmpty()) { - exportGraphIndex(nextIds, visitedUrns, visitedIds, hops + 1); - } + } + + public void exportGraphIndex( + Set<String> urns, Set<String> visitedUrns, Set<String> visitedIds, int hops) { + Set<String> nextIds = new HashSet<>(); + if (!urns.isEmpty()) { + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + boolQueryBuilder.must(QueryBuilders.termQuery("relationshipType", "DownstreamOf")); + + Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> { + boolQueryBuilder.should( + QueryBuilders.termsQuery("source.urn", batch.toArray(String[]::new))); + boolQueryBuilder.should( + QueryBuilders.termsQuery("destination.urn", batch.toArray(String[]::new))); + }); + boolQueryBuilder.minimumShouldMatch(1); + + // Exclude visited + Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.mustNot( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.query(boolQueryBuilder); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + + SearchRequest searchRequest = new SearchRequest(graphIndexName); + searchRequest.source(searchSourceBuilder); + + Set<String> docIds = new HashSet<>(); + Set<GraphDocument> docs = new HashSet<>(); + + long startTime = System.currentTimeMillis(); + System.out.printf( + "Hops: %s (Ids: %s) [VisitedIds: %s]", hops, urns.size(), visitedUrns.size()); + + writer.write( + searchRequest, + graphOutputPath, + hops != 0, + anonymizerClazz, + GraphDocument.class, + (hit, doc) -> { + docIds.add(hit.getId()); + docs.add(doc); + }); + + long endTime = System.currentTimeMillis(); + System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); + + visitedIds.addAll(docIds); + visitedUrns.addAll(urns); + + Set<String> discoveredUrns = + docs.stream() + .flatMap(d -> Stream.of(d.destination.urn, d.source.urn)) + .filter(Objects::nonNull) + .filter(urn -> !visitedUrns.contains(urn)) + .collect(Collectors.toSet()); + + nextIds.addAll(discoveredUrns); } - public void exportEntityIndex(Set<String> ids, Set<String> visitedIds, int hops) { - Set<String> nextIds = new HashSet<>(); - - if (!ids.isEmpty()) { - Set<String> urns = ids.stream().map(LineageExporter::idToUrn).collect(Collectors.toSet()); - - BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); - - Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.should(QueryBuilders.termsQuery("upstreams.keyword", batch.toArray(String[]::new))) - ); - Lists.partition(Arrays.asList(ids.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.should(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - boolQueryBuilder.minimumShouldMatch(1); - - // Exclude visited - Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.mustNot(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.query(boolQueryBuilder); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - - SearchRequest searchRequest = new SearchRequest(entityIndexName); - searchRequest.source(searchSourceBuilder); - - Set<String> docIds = new HashSet<>(); - Set<UrnDocument> docs = new HashSet<>(); - - long startTime = System.currentTimeMillis(); - System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, ids.size(), visitedIds.size()); - - writer.write(searchRequest, entityOutputPath, hops != 0, anonymizerClazz, - UrnDocument.class, (hit, doc) -> { - docIds.add(hit.getId()); - docs.add(doc); - }); - - long endTime = System.currentTimeMillis(); - System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); - - visitedIds.addAll(docIds); - - nextIds.addAll(docIds.stream() - .filter(Objects::nonNull) - .filter(docId -> !visitedIds.contains(docId)) - .collect(Collectors.toSet())); - nextIds.addAll(docs.stream() - .filter(doc -> doc.upstreams != null && !doc.upstreams.isEmpty()) - .flatMap(doc -> doc.upstreams.stream()) - .map(urn -> URLEncoder.encode(urn, StandardCharsets.UTF_8)) - .filter(docId -> !visitedIds.contains(docId)) - .collect(Collectors.toSet())); - } - - if (!nextIds.isEmpty()) { - exportEntityIndex(nextIds, visitedIds, hops + 1); - } + if (!nextIds.isEmpty()) { + exportGraphIndex(nextIds, visitedUrns, visitedIds, hops + 1); + } + } + + public void exportEntityIndex(Set<String> ids, Set<String> visitedIds, int hops) { + Set<String> nextIds = new HashSet<>(); + + if (!ids.isEmpty()) { + Set<String> urns = ids.stream().map(LineageExporter::idToUrn).collect(Collectors.toSet()); + + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.should( + QueryBuilders.termsQuery("upstreams.keyword", batch.toArray(String[]::new)))); + Lists.partition(Arrays.asList(ids.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.should( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + boolQueryBuilder.minimumShouldMatch(1); + + // Exclude visited + Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.mustNot( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.query(boolQueryBuilder); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + + SearchRequest searchRequest = new SearchRequest(entityIndexName); + searchRequest.source(searchSourceBuilder); + + Set<String> docIds = new HashSet<>(); + Set<UrnDocument> docs = new HashSet<>(); + + long startTime = System.currentTimeMillis(); + System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, ids.size(), visitedIds.size()); + + writer.write( + searchRequest, + entityOutputPath, + hops != 0, + anonymizerClazz, + UrnDocument.class, + (hit, doc) -> { + docIds.add(hit.getId()); + docs.add(doc); + }); + + long endTime = System.currentTimeMillis(); + System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); + + visitedIds.addAll(docIds); + + nextIds.addAll( + docIds.stream() + .filter(Objects::nonNull) + .filter(docId -> !visitedIds.contains(docId)) + .collect(Collectors.toSet())); + nextIds.addAll( + docs.stream() + .filter(doc -> doc.upstreams != null && !doc.upstreams.isEmpty()) + .flatMap(doc -> doc.upstreams.stream()) + .map(urn -> URLEncoder.encode(urn, StandardCharsets.UTF_8)) + .filter(docId -> !visitedIds.contains(docId)) + .collect(Collectors.toSet())); } - public static class UrnDocument { - public String urn; - public List<String> upstreams; + if (!nextIds.isEmpty()) { + exportEntityIndex(nextIds, visitedIds, hops + 1); } + } + + public static class UrnDocument { + public String urn; + public List<String> upstreams; + } - public static class GraphDocument { - public String relationshipType; - public GraphNode source; - public GraphNode destination; + public static class GraphDocument { + public String relationshipType; + public GraphNode source; + public GraphNode destination; - public static class GraphNode { - public String urn; - public String entityType; - } + public static class GraphNode { + public String urn; + public String entityType; } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java index 45bbd912bc794..14e5259f90097 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java @@ -1,15 +1,20 @@ package io.datahubproject.test.fixtures.search; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import static com.linkedin.metadata.Constants.*; +import static io.datahubproject.test.search.config.SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.EntityAspectIdentifier; @@ -31,8 +36,11 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.metadata.version.GitVersion; - +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import java.io.IOException; +import java.util.Map; import java.util.Optional; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -42,244 +50,245 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Map; - -import static com.linkedin.metadata.Constants.*; -import static io.datahubproject.test.search.config.SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS; -import static org.mockito.ArgumentMatchers.anySet; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration @Import(SearchCommonTestConfiguration.class) public class SampleDataFixtureConfiguration { - /** - * Interested in adding more fixtures? Here's what you will need to update? - * 1. Create a new indexPrefix and FixtureName. Both are needed or else all fixtures will load on top of each other, - * overwriting each other - * 2. Create a new IndexConvention, IndexBuilder, and EntityClient. These are needed - * to index a different set of entities. - */ - - @Autowired - private ESBulkProcessor _bulkProcessor; - - @Autowired - private RestHighLevelClient _searchClient; - - @Autowired - private RestHighLevelClient _longTailSearchClient; - - @Autowired - private SearchConfiguration _searchConfiguration; - - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; - - @Bean(name = "sampleDataPrefix") - protected String sampleDataPrefix() { - return "smpldat"; - } - - @Bean(name = "longTailPrefix") - protected String longTailIndexPrefix() { - return "lngtl"; - } - - @Bean(name = "sampleDataIndexConvention") - protected IndexConvention indexConvention(@Qualifier("sampleDataPrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "longTailIndexConvention") - protected IndexConvention longTailIndexConvention(@Qualifier("longTailPrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "sampleDataFixtureName") - protected String sampleDataFixtureName() { - return "sample_data"; - } - - @Bean(name = "longTailFixtureName") - protected String longTailFixtureName() { - return "long_tail"; - } - - @Bean(name = "sampleDataEntityIndexBuilders") - protected EntityIndexBuilders entityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention - ) { - return entityIndexBuildersHelper(entityRegistry, indexConvention); - } - - @Bean(name = "longTailEntityIndexBuilders") - protected EntityIndexBuilders longTailEntityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailIndexConvention") IndexConvention indexConvention - ) { - return entityIndexBuildersHelper(longTailEntityRegistry, indexConvention); - } - - protected EntityIndexBuilders entityIndexBuildersHelper( - EntityRegistry entityRegistry, - IndexConvention indexConvention - ) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder indexBuilder = new ESIndexBuilder(_searchClient, 1, 0, 1, - 1, Map.of(), true, false, - new ElasticSearchConfiguration(), gitVersion); - SettingsBuilder settingsBuilder = new SettingsBuilder(null); - return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); - } - - @Bean(name = "sampleDataEntitySearchService") - protected ElasticSearchService entitySearchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention - ) throws IOException { - return entitySearchServiceHelper(entityRegistry, indexBuilders, indexConvention); - } - - @Bean(name = "longTailEntitySearchService") - protected ElasticSearchService longTailEntitySearchService( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailEndexBuilders, - @Qualifier("longTailIndexConvention") IndexConvention longTailIndexConvention - ) throws IOException { - return entitySearchServiceHelper(longTailEntityRegistry, longTailEndexBuilders, longTailIndexConvention); - } - - protected ElasticSearchService entitySearchServiceHelper( - EntityRegistry entityRegistry, - EntityIndexBuilders indexBuilders, - IndexConvention indexConvention - ) throws IOException { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_fixture_test.yml"); - CustomSearchConfiguration customSearchConfiguration = customConfiguration.resolve(new YAMLMapper()); - - ESSearchDAO searchDAO = new ESSearchDAO(entityRegistry, _searchClient, indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, _searchConfiguration, customSearchConfiguration); - ESBrowseDAO browseDAO = new ESBrowseDAO(entityRegistry, _searchClient, indexConvention, _searchConfiguration, _customSearchConfiguration); - ESWriteDAO writeDAO = new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); - return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); - } - - @Bean(name = "sampleDataSearchService") - @Nonnull - protected SearchService searchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("sampleDataPrefix") String prefix, - @Qualifier("sampleDataFixtureName") String sampleDataFixtureName - ) throws IOException { - return searchServiceHelper(entityRegistry, entitySearchService, indexBuilders, prefix, sampleDataFixtureName); - } - - @Bean(name = "longTailSearchService") - @Nonnull - protected SearchService longTailSearchService( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailEntitySearchService") ElasticSearchService longTailEntitySearchService, - @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailIndexBuilders, - @Qualifier("longTailPrefix") String longTailPrefix, - @Qualifier("longTailFixtureName") String longTailFixtureName - ) throws IOException { - return searchServiceHelper(longTailEntityRegistry, longTailEntitySearchService, longTailIndexBuilders, longTailPrefix, longTailFixtureName); - } - - public SearchService searchServiceHelper( - EntityRegistry entityRegistry, - ElasticSearchService entitySearchService, - EntityIndexBuilders indexBuilders, - String prefix, - String fixtureName - ) throws IOException { - int batchSize = 100; - SearchRanker<Double> ranker = new SimpleRanker(); - CacheManager cacheManager = new ConcurrentMapCacheManager(); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); - entityDocCountCacheConfiguration.setTtlSeconds(600L); - - SearchService service = new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, entityDocCountCacheConfiguration), - new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - false - ), - ranker - ); - - // Build indices & write fixture data - indexBuilders.reindexAll(); - - FixtureReader.builder() - .bulkProcessor(_bulkProcessor) - .fixtureName(fixtureName) - .targetIndexPrefix(prefix) - .refreshIntervalSeconds(REFRESH_INTERVAL_SECONDS) - .build() - .read(); - - return service; - } - - @Bean(name = "sampleDataEntityClient") - @Nonnull - protected EntityClient entityClient( - @Qualifier("sampleDataSearchService") SearchService searchService, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry entityRegistry - ) { - return entityClientHelper(searchService, entitySearchService, entityRegistry); - } - - @Bean(name = "longTailEntityClient") - @Nonnull - protected EntityClient longTailEntityClient( - @Qualifier("sampleDataSearchService") SearchService searchService, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry - ) { - return entityClientHelper(searchService, entitySearchService, longTailEntityRegistry); - } - - private EntityClient entityClientHelper( - SearchService searchService, - ElasticSearchService entitySearchService, - EntityRegistry entityRegistry - ) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - new ConcurrentMapCacheManager(), - entitySearchService, - 1, - false); - - AspectDao mockAspectDao = mock(AspectDao.class); - when(mockAspectDao.batchGet(anySet())).thenReturn(Map.of(mock(EntityAspectIdentifier.class), mock(EntityAspect.class))); - - PreProcessHooks preProcessHooks = new PreProcessHooks(); - preProcessHooks.setUiEnabled(true); - return new JavaEntityClient( - new EntityServiceImpl(mockAspectDao, null, entityRegistry, true, null, - preProcessHooks), - null, - entitySearchService, - cachingEntitySearchService, - searchService, - null, - null, - null, - null); - } + /** + * Interested in adding more fixtures? Here's what you will need to update? 1. Create a new + * indexPrefix and FixtureName. Both are needed or else all fixtures will load on top of each + * other, overwriting each other 2. Create a new IndexConvention, IndexBuilder, and EntityClient. + * These are needed to index a different set of entities. + */ + @Autowired private ESBulkProcessor _bulkProcessor; + + @Autowired private RestHighLevelClient _searchClient; + + @Autowired private RestHighLevelClient _longTailSearchClient; + + @Autowired private SearchConfiguration _searchConfiguration; + + @Autowired private CustomSearchConfiguration _customSearchConfiguration; + + @Bean(name = "sampleDataPrefix") + protected String sampleDataPrefix() { + return "smpldat"; + } + + @Bean(name = "longTailPrefix") + protected String longTailIndexPrefix() { + return "lngtl"; + } + + @Bean(name = "sampleDataIndexConvention") + protected IndexConvention indexConvention(@Qualifier("sampleDataPrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "longTailIndexConvention") + protected IndexConvention longTailIndexConvention(@Qualifier("longTailPrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "sampleDataFixtureName") + protected String sampleDataFixtureName() { + return "sample_data"; + } + + @Bean(name = "longTailFixtureName") + protected String longTailFixtureName() { + return "long_tail"; + } + + @Bean(name = "sampleDataEntityIndexBuilders") + protected EntityIndexBuilders entityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention) { + return entityIndexBuildersHelper(entityRegistry, indexConvention); + } + + @Bean(name = "longTailEntityIndexBuilders") + protected EntityIndexBuilders longTailEntityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailIndexConvention") IndexConvention indexConvention) { + return entityIndexBuildersHelper(longTailEntityRegistry, indexConvention); + } + + protected EntityIndexBuilders entityIndexBuildersHelper( + EntityRegistry entityRegistry, IndexConvention indexConvention) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder indexBuilder = + new ESIndexBuilder( + _searchClient, + 1, + 0, + 1, + 1, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + SettingsBuilder settingsBuilder = new SettingsBuilder(null); + return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); + } + + @Bean(name = "sampleDataEntitySearchService") + protected ElasticSearchService entitySearchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention) + throws IOException { + return entitySearchServiceHelper(entityRegistry, indexBuilders, indexConvention); + } + + @Bean(name = "longTailEntitySearchService") + protected ElasticSearchService longTailEntitySearchService( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailEndexBuilders, + @Qualifier("longTailIndexConvention") IndexConvention longTailIndexConvention) + throws IOException { + return entitySearchServiceHelper( + longTailEntityRegistry, longTailEndexBuilders, longTailIndexConvention); + } + + protected ElasticSearchService entitySearchServiceHelper( + EntityRegistry entityRegistry, + EntityIndexBuilders indexBuilders, + IndexConvention indexConvention) + throws IOException { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_fixture_test.yml"); + CustomSearchConfiguration customSearchConfiguration = + customConfiguration.resolve(new YAMLMapper()); + + ESSearchDAO searchDAO = + new ESSearchDAO( + entityRegistry, + _searchClient, + indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + _searchConfiguration, + customSearchConfiguration); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + entityRegistry, + _searchClient, + indexConvention, + _searchConfiguration, + _customSearchConfiguration); + ESWriteDAO writeDAO = + new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); + return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); + } + + @Bean(name = "sampleDataSearchService") + @Nonnull + protected SearchService searchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("sampleDataPrefix") String prefix, + @Qualifier("sampleDataFixtureName") String sampleDataFixtureName) + throws IOException { + return searchServiceHelper( + entityRegistry, entitySearchService, indexBuilders, prefix, sampleDataFixtureName); + } + + @Bean(name = "longTailSearchService") + @Nonnull + protected SearchService longTailSearchService( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailEntitySearchService") ElasticSearchService longTailEntitySearchService, + @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailIndexBuilders, + @Qualifier("longTailPrefix") String longTailPrefix, + @Qualifier("longTailFixtureName") String longTailFixtureName) + throws IOException { + return searchServiceHelper( + longTailEntityRegistry, + longTailEntitySearchService, + longTailIndexBuilders, + longTailPrefix, + longTailFixtureName); + } + + public SearchService searchServiceHelper( + EntityRegistry entityRegistry, + ElasticSearchService entitySearchService, + EntityIndexBuilders indexBuilders, + String prefix, + String fixtureName) + throws IOException { + int batchSize = 100; + SearchRanker<Double> ranker = new SimpleRanker(); + CacheManager cacheManager = new ConcurrentMapCacheManager(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); + entityDocCountCacheConfiguration.setTtlSeconds(600L); + + SearchService service = + new SearchService( + new EntityDocCountCache( + entityRegistry, entitySearchService, entityDocCountCacheConfiguration), + new CachingEntitySearchService(cacheManager, entitySearchService, batchSize, false), + ranker); + + // Build indices & write fixture data + indexBuilders.reindexAll(); + + FixtureReader.builder() + .bulkProcessor(_bulkProcessor) + .fixtureName(fixtureName) + .targetIndexPrefix(prefix) + .refreshIntervalSeconds(REFRESH_INTERVAL_SECONDS) + .build() + .read(); + + return service; + } + + @Bean(name = "sampleDataEntityClient") + @Nonnull + protected EntityClient entityClient( + @Qualifier("sampleDataSearchService") SearchService searchService, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry entityRegistry) { + return entityClientHelper(searchService, entitySearchService, entityRegistry); + } + + @Bean(name = "longTailEntityClient") + @Nonnull + protected EntityClient longTailEntityClient( + @Qualifier("sampleDataSearchService") SearchService searchService, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry) { + return entityClientHelper(searchService, entitySearchService, longTailEntityRegistry); + } + + private EntityClient entityClientHelper( + SearchService searchService, + ElasticSearchService entitySearchService, + EntityRegistry entityRegistry) { + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService( + new ConcurrentMapCacheManager(), entitySearchService, 1, false); + + AspectDao mockAspectDao = mock(AspectDao.class); + when(mockAspectDao.batchGet(anySet())) + .thenReturn(Map.of(mock(EntityAspectIdentifier.class), mock(EntityAspect.class))); + + PreProcessHooks preProcessHooks = new PreProcessHooks(); + preProcessHooks.setUiEnabled(true); + return new JavaEntityClient( + new EntityServiceImpl(mockAspectDao, null, entityRegistry, true, null, preProcessHooks), + null, + entitySearchService, + cachingEntitySearchService, + searchService, + null, + null, + null, + null); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java index d74dd041f082e..d3b16b2beed3d 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java @@ -1,13 +1,18 @@ package io.datahubproject.test.fixtures.search; +import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; +import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; -import io.datahubproject.test.search.ElasticsearchTestContainer; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import io.datahubproject.test.models.DatasetAnonymized; +import io.datahubproject.test.search.ElasticsearchTestContainer; import io.datahubproject.test.search.SearchTestUtils; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import java.io.IOException; +import java.util.Set; import org.opensearch.client.RestHighLevelClient; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; @@ -15,125 +20,127 @@ import org.testng.annotations.Ignore; import org.testng.annotations.Test; -import java.io.IOException; -import java.util.Set; - -import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; -import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; - -/** - * This class is used for extracting and moving search fixture data. - */ +/** This class is used for extracting and moving search fixture data. */ @TestConfiguration public class SearchFixtureUtils { - final public static String FIXTURE_BASE = "src/test/resources/elasticsearch"; - - final public static ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - - @Bean(name = "testSearchContainer") - public GenericContainer<?> testSearchContainer() { - return new ElasticsearchTestContainer().startContainer(); + public static final String FIXTURE_BASE = "src/test/resources/elasticsearch"; + + public static final ObjectMapper OBJECT_MAPPER = + new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + @Bean(name = "testSearchContainer") + public GenericContainer<?> testSearchContainer() { + return new ElasticsearchTestContainer().startContainer(); + } + + @Test + @Ignore("Fixture capture lineage") + /* + * Run this to capture test fixtures, repeat for graph & dataset + * 1. Configure anonymizer class (use builder or set to null) Do not commit non-anonymous data + * 2. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) + * 2. Update fixture name + * 3. Comment @Ignore + * 4. Create output directory + * 5. Run extraction + **/ + private void extractSearchLineageTestFixture() throws IOException { + String rootUrn = + "urn:li:dataset:(urn:li:dataPlatform:teradata,teradata.simba.pp_bi_tables.tmis_daily_metrics_final_agg,PROD)"; + + // Set.of("system_metadata_service_v1", "datasetindex_v2", "graph_service_v1") + try (RestHighLevelClient client = + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { + FixtureWriter fixtureWriter = FixtureWriter.builder().client(client).build(); + + /* + LineageExporter<GraphAnonymized> exporter = LineageExporter.<GraphAnonymized>builder() + .writer(fixtureWriter) + .anonymizerClazz(GraphAnonymized.class) + .graphIndexName("<namespace>_graph_service_v1-5shards") + .graphOutputPath(String.format("%s/%s.json", "search_lineage2", "graph_service_v1")) + .build(); + */ + + LineageExporter<DatasetAnonymized> exporter = + LineageExporter.<DatasetAnonymized>builder() + .writer(fixtureWriter) + .anonymizerClazz(DatasetAnonymized.class) + .entityIndexName("<namespace>_datasetindex_v2-5shards") + .entityOutputPath(String.format("%s/%s.json", "search_lineage2", "datasetindex_v2")) + .build(); + + exporter.export(Set.of(rootUrn)); } - - @Test - @Ignore("Fixture capture lineage") - /* - * Run this to capture test fixtures, repeat for graph & dataset - * 1. Configure anonymizer class (use builder or set to null) Do not commit non-anonymous data - * 2. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) - * 2. Update fixture name - * 3. Comment @Ignore - * 4. Create output directory - * 5. Run extraction - **/ - private void extractSearchLineageTestFixture() throws IOException { - String rootUrn = "urn:li:dataset:(urn:li:dataPlatform:teradata,teradata.simba.pp_bi_tables.tmis_daily_metrics_final_agg,PROD)"; - - // Set.of("system_metadata_service_v1", "datasetindex_v2", "graph_service_v1") - try (RestHighLevelClient client = new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { - FixtureWriter fixtureWriter = FixtureWriter.builder() - .client(client) - .build(); - - /* - LineageExporter<GraphAnonymized> exporter = LineageExporter.<GraphAnonymized>builder() - .writer(fixtureWriter) - .anonymizerClazz(GraphAnonymized.class) - .graphIndexName("<namespace>_graph_service_v1-5shards") - .graphOutputPath(String.format("%s/%s.json", "search_lineage2", "graph_service_v1")) - .build(); - */ - - LineageExporter<DatasetAnonymized> exporter = LineageExporter.<DatasetAnonymized>builder() - .writer(fixtureWriter) - .anonymizerClazz(DatasetAnonymized.class) - .entityIndexName("<namespace>_datasetindex_v2-5shards") - .entityOutputPath(String.format("%s/%s.json", "search_lineage2", "datasetindex_v2")) - .build(); - - exporter.export(Set.of(rootUrn)); - } - } - - @Test - @Ignore("Fixture capture logic") - /* - * Run this to capture test fixtures - * 1. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) - * 2. Update fixture name - * 3. Comment @Ignore - * 4. Run extraction - **/ - private void extractEntityTestFixture() throws IOException { - String fixtureName = "temp"; - String prefix = ""; - String commonSuffix = "index_v2"; - - try (RestHighLevelClient client = new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { - FixtureWriter fixtureWriter = FixtureWriter.builder() - .client(client) - .build(); - - EntityExporter exporter = EntityExporter.builder() - .client(client) - .writer(fixtureWriter) - .fixtureName(fixtureName) - .sourceIndexSuffix(commonSuffix) - .sourceIndexPrefix(prefix) - .build(); - - exporter.export(); - } - } - - @Test - @Ignore("Write capture logic to some external ES cluster for testing") - /* - * Can be used to write fixture data to external ES cluster - * 1. Set environment variables - * 2. Update fixture name and prefix - * 3. Uncomment and run test - */ - private void reindexTestFixtureData() throws IOException { - ESBulkProcessor bulkProcessor = ESBulkProcessor.builder(new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) - .async(true) - .bulkRequestsLimit(1000) - .retryInterval(1L) - .numRetries(2) - .build(); - - FixtureReader reader = FixtureReader.builder() - .bulkProcessor(bulkProcessor) - .fixtureName("long_tail") - .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) - .build(); - - reader.read(); + } + + @Test + @Ignore("Fixture capture logic") + /* + * Run this to capture test fixtures + * 1. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) + * 2. Update fixture name + * 3. Comment @Ignore + * 4. Run extraction + **/ + private void extractEntityTestFixture() throws IOException { + String fixtureName = "temp"; + String prefix = ""; + String commonSuffix = "index_v2"; + + try (RestHighLevelClient client = + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { + FixtureWriter fixtureWriter = FixtureWriter.builder().client(client).build(); + + EntityExporter exporter = + EntityExporter.builder() + .client(client) + .writer(fixtureWriter) + .fixtureName(fixtureName) + .sourceIndexSuffix(commonSuffix) + .sourceIndexPrefix(prefix) + .build(); + + exporter.export(); } + } + + @Test + @Ignore("Write capture logic to some external ES cluster for testing") + /* + * Can be used to write fixture data to external ES cluster + * 1. Set environment variables + * 2. Update fixture name and prefix + * 3. Uncomment and run test + */ + private void reindexTestFixtureData() throws IOException { + ESBulkProcessor bulkProcessor = + ESBulkProcessor.builder( + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) + .async(true) + .bulkRequestsLimit(1000) + .retryInterval(1L) + .numRetries(2) + .build(); + + FixtureReader reader = + FixtureReader.builder() + .bulkProcessor(bulkProcessor) + .fixtureName("long_tail") + .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) + .build(); + + reader.read(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java index 93d3f108d9e47..978471b53faad 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java @@ -1,7 +1,9 @@ package io.datahubproject.test.fixtures.search; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; @@ -9,8 +11,6 @@ import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.graph.elastic.ESGraphQueryDAO; import com.linkedin.metadata.graph.elastic.ESGraphWriteDAO; @@ -34,9 +34,12 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.metadata.version.GitVersion; - +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import java.io.IOException; +import java.util.Map; import java.util.Optional; - +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -46,180 +49,199 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Map; - -import static com.linkedin.metadata.Constants.*; - - @TestConfiguration @Import(SearchCommonTestConfiguration.class) public class SearchLineageFixtureConfiguration { - @Autowired - private ESBulkProcessor _bulkProcessor; - - @Autowired - private RestHighLevelClient _searchClient; - - @Autowired - private SearchConfiguration _searchConfiguration; - - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; - - @Bean(name = "searchLineagePrefix") - protected String indexPrefix() { - return "srchlin"; - } - - @Bean(name = "searchLineageIndexConvention") - protected IndexConvention indexConvention(@Qualifier("searchLineagePrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "searchLineageFixtureName") - protected String fixtureName() { - return "search_lineage"; - } - - @Bean(name = "lineageCacheConfiguration") - protected SearchLineageCacheConfiguration searchLineageCacheConfiguration() { - SearchLineageCacheConfiguration conf = new SearchLineageCacheConfiguration(); - conf.setLightningThreshold(300); - conf.setTtlSeconds(30); - return conf; - } - - @Bean(name = "searchLineageEntityIndexBuilders") - protected EntityIndexBuilders entityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder indexBuilder = new ESIndexBuilder(_searchClient, 1, 0, 1, - 1, Map.of(), true, false, - new ElasticSearchConfiguration(), gitVersion); - SettingsBuilder settingsBuilder = new SettingsBuilder(null); - return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); - } - - @Bean(name = "searchLineageEntitySearchService") - protected ElasticSearchService entitySearchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - ESSearchDAO searchDAO = new ESSearchDAO(entityRegistry, _searchClient, indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, _searchConfiguration, null); - ESBrowseDAO browseDAO = new ESBrowseDAO(entityRegistry, _searchClient, indexConvention, _searchConfiguration, _customSearchConfiguration); - ESWriteDAO writeDAO = new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); - return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); - } - - @Bean(name = "searchLineageESIndexBuilder") - @Nonnull - protected ESIndexBuilder esIndexBuilder() { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - return new ESIndexBuilder(_searchClient, 1, 1, 1, 1, Map.of(), - true, true, - new ElasticSearchConfiguration(), gitVersion); - } - - @Bean(name = "searchLineageGraphService") - @Nonnull - protected ElasticSearchGraphService graphService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageESIndexBuilder") ESIndexBuilder indexBuilder, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - ElasticSearchGraphService graphService = new ElasticSearchGraphService(lineageRegistry, _bulkProcessor, indexConvention, - new ESGraphWriteDAO(indexConvention, _bulkProcessor, 1), - new ESGraphQueryDAO(_searchClient, lineageRegistry, indexConvention, GraphQueryConfiguration.testDefaults), indexBuilder); - graphService.configure(); - return graphService; - } - - @Bean(name = "searchLineageLineageSearchService") - @Nonnull - protected LineageSearchService lineageSearchService( - @Qualifier("searchLineageSearchService") SearchService searchService, - @Qualifier("searchLineageGraphService") ElasticSearchGraphService graphService, - @Qualifier("searchLineagePrefix") String prefix, - @Qualifier("searchLineageFixtureName") String fixtureName, - @Qualifier("lineageCacheConfiguration") SearchLineageCacheConfiguration cacheConfiguration - ) throws IOException { - - // Load fixture data (after graphService mappings applied) - FixtureReader.builder() - .bulkProcessor(_bulkProcessor) - .fixtureName(fixtureName) - .targetIndexPrefix(prefix) - .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) - .build() - .read(); - - return new LineageSearchService(searchService, graphService, null, false, cacheConfiguration); - } - - @Bean(name = "searchLineageSearchService") - @Nonnull - protected SearchService searchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders - ) throws IOException { - - int batchSize = 100; - SearchRanker<Double> ranker = new SimpleRanker(); - CacheManager cacheManager = new ConcurrentMapCacheManager(); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); - entityDocCountCacheConfiguration.setTtlSeconds(600L); - - SearchService service = new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, entityDocCountCacheConfiguration), - new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - false - ), - ranker - ); - - // Build indices - indexBuilders.reindexAll(); - - return service; - } - - @Bean(name = "searchLineageEntityClient") - @Nonnull - protected EntityClient entityClient( - @Qualifier("searchLineageSearchService") SearchService searchService, - @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry entityRegistry - ) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - new ConcurrentMapCacheManager(), - entitySearchService, - 1, - false); - - PreProcessHooks preProcessHooks = new PreProcessHooks(); - preProcessHooks.setUiEnabled(true); - return new JavaEntityClient( - new EntityServiceImpl(null, null, entityRegistry, true, null, - preProcessHooks), - null, - entitySearchService, - cachingEntitySearchService, - searchService, - null, - null, - null, - null); - } + @Autowired private ESBulkProcessor _bulkProcessor; + + @Autowired private RestHighLevelClient _searchClient; + + @Autowired private SearchConfiguration _searchConfiguration; + + @Autowired private CustomSearchConfiguration _customSearchConfiguration; + + @Bean(name = "searchLineagePrefix") + protected String indexPrefix() { + return "srchlin"; + } + + @Bean(name = "searchLineageIndexConvention") + protected IndexConvention indexConvention(@Qualifier("searchLineagePrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "searchLineageFixtureName") + protected String fixtureName() { + return "search_lineage"; + } + + @Bean(name = "lineageCacheConfiguration") + protected SearchLineageCacheConfiguration searchLineageCacheConfiguration() { + SearchLineageCacheConfiguration conf = new SearchLineageCacheConfiguration(); + conf.setLightningThreshold(300); + conf.setTtlSeconds(30); + return conf; + } + + @Bean(name = "searchLineageEntityIndexBuilders") + protected EntityIndexBuilders entityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder indexBuilder = + new ESIndexBuilder( + _searchClient, + 1, + 0, + 1, + 1, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + SettingsBuilder settingsBuilder = new SettingsBuilder(null); + return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); + } + + @Bean(name = "searchLineageEntitySearchService") + protected ElasticSearchService entitySearchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + ESSearchDAO searchDAO = + new ESSearchDAO( + entityRegistry, + _searchClient, + indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + _searchConfiguration, + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + entityRegistry, + _searchClient, + indexConvention, + _searchConfiguration, + _customSearchConfiguration); + ESWriteDAO writeDAO = + new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); + return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); + } + + @Bean(name = "searchLineageESIndexBuilder") + @Nonnull + protected ESIndexBuilder esIndexBuilder() { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + return new ESIndexBuilder( + _searchClient, + 1, + 1, + 1, + 1, + Map.of(), + true, + true, + new ElasticSearchConfiguration(), + gitVersion); + } + + @Bean(name = "searchLineageGraphService") + @Nonnull + protected ElasticSearchGraphService graphService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageESIndexBuilder") ESIndexBuilder indexBuilder, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); + ElasticSearchGraphService graphService = + new ElasticSearchGraphService( + lineageRegistry, + _bulkProcessor, + indexConvention, + new ESGraphWriteDAO(indexConvention, _bulkProcessor, 1), + new ESGraphQueryDAO( + _searchClient, + lineageRegistry, + indexConvention, + GraphQueryConfiguration.testDefaults), + indexBuilder); + graphService.configure(); + return graphService; + } + + @Bean(name = "searchLineageLineageSearchService") + @Nonnull + protected LineageSearchService lineageSearchService( + @Qualifier("searchLineageSearchService") SearchService searchService, + @Qualifier("searchLineageGraphService") ElasticSearchGraphService graphService, + @Qualifier("searchLineagePrefix") String prefix, + @Qualifier("searchLineageFixtureName") String fixtureName, + @Qualifier("lineageCacheConfiguration") SearchLineageCacheConfiguration cacheConfiguration) + throws IOException { + + // Load fixture data (after graphService mappings applied) + FixtureReader.builder() + .bulkProcessor(_bulkProcessor) + .fixtureName(fixtureName) + .targetIndexPrefix(prefix) + .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) + .build() + .read(); + + return new LineageSearchService(searchService, graphService, null, false, cacheConfiguration); + } + + @Bean(name = "searchLineageSearchService") + @Nonnull + protected SearchService searchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders) + throws IOException { + + int batchSize = 100; + SearchRanker<Double> ranker = new SimpleRanker(); + CacheManager cacheManager = new ConcurrentMapCacheManager(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); + entityDocCountCacheConfiguration.setTtlSeconds(600L); + + SearchService service = + new SearchService( + new EntityDocCountCache( + entityRegistry, entitySearchService, entityDocCountCacheConfiguration), + new CachingEntitySearchService(cacheManager, entitySearchService, batchSize, false), + ranker); + + // Build indices + indexBuilders.reindexAll(); + + return service; + } + + @Bean(name = "searchLineageEntityClient") + @Nonnull + protected EntityClient entityClient( + @Qualifier("searchLineageSearchService") SearchService searchService, + @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry entityRegistry) { + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService( + new ConcurrentMapCacheManager(), entitySearchService, 1, false); + + PreProcessHooks preProcessHooks = new PreProcessHooks(); + preProcessHooks.setUiEnabled(true); + return new JavaEntityClient( + new EntityServiceImpl(null, null, entityRegistry, true, null, preProcessHooks), + null, + entitySearchService, + cachingEntitySearchService, + searchService, + null, + null, + null, + null); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java index 6036473063059..3b68ef50be18f 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java @@ -1,53 +1,48 @@ package io.datahubproject.test.models; import com.fasterxml.jackson.annotation.JsonSetter; - import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.regex.Matcher; import java.util.regex.Pattern; - - import org.apache.commons.codec.binary.Hex; public abstract class Anonymized { - public String urn; - - @JsonSetter - public void setUrn(String urn) { - this.urn = anonymizeUrn(urn); - } - - private static final Pattern URN_REGEX = Pattern.compile("^(.+)[(](.+),(.+),([A-Z]+)[)]$"); - - public static String anonymizeUrn(String urn) { - if (urn != null) { - Matcher m = URN_REGEX.matcher(urn); - if (m.find()) { - return String.format("%s(%s,%s,%s)", - m.group(1), - anonymizeLast(m.group(2), ":"), - hashFunction(m.group(3)), - m.group(4)); - } - } - return urn; + public String urn; + + @JsonSetter + public void setUrn(String urn) { + this.urn = anonymizeUrn(urn); + } + + private static final Pattern URN_REGEX = Pattern.compile("^(.+)[(](.+),(.+),([A-Z]+)[)]$"); + + public static String anonymizeUrn(String urn) { + if (urn != null) { + Matcher m = URN_REGEX.matcher(urn); + if (m.find()) { + return String.format( + "%s(%s,%s,%s)", + m.group(1), anonymizeLast(m.group(2), ":"), hashFunction(m.group(3)), m.group(4)); + } } - - protected static String anonymizeLast(String s, String sep) { - String[] splits = s.split(sep); - splits[splits.length - 1] = hashFunction(splits[splits.length - 1]); - return String.join(sep, splits); - } - - protected static String hashFunction(String s) { - try { - MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); - messageDigest.update(s.getBytes()); - char[] hex = Hex.encodeHex(messageDigest.digest()); - return new String(hex).substring(0, Math.min(s.length() - 1, hex.length - 1)); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } + return urn; + } + + protected static String anonymizeLast(String s, String sep) { + String[] splits = s.split(sep); + splits[splits.length - 1] = hashFunction(splits[splits.length - 1]); + return String.join(sep, splits); + } + + protected static String hashFunction(String s) { + try { + MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); + messageDigest.update(s.getBytes()); + char[] hex = Hex.encodeHex(messageDigest.digest()); + return new String(hex).substring(0, Math.min(s.length() - 1, hex.length - 1)); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java index 35813d22067a6..c870b4682a6b8 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java @@ -1,7 +1,6 @@ package io.datahubproject.test.models; import com.fasterxml.jackson.annotation.JsonGetter; - import java.util.Arrays; import java.util.Optional; import java.util.Set; @@ -9,35 +8,38 @@ public class DatasetAnonymized extends Anonymized { - public Set<String> upstreams; - public String id; - public String origin; - public String platform; - public boolean removed; - public Set<String> browsePaths; - - @JsonGetter("id") - public String getId() { - return Optional.ofNullable(id).map(Anonymized::hashFunction).orElse(null); - } - - @JsonGetter("platform") - public String getPlatform() { - return Optional.ofNullable(platform).map(p -> Anonymized.anonymizeLast(p, ":")).orElse(null); - } - - @JsonGetter("upstreams") - public Set<String> getUpstreams() { - return Optional.ofNullable(upstreams).orElse(Set.of()).stream() - .map(Anonymized::anonymizeUrn).collect(Collectors.toSet()); - } - - @JsonGetter("browsePaths") - public Set<String> getBrowsePaths() { - return Optional.ofNullable(browsePaths).orElse(Set.of()).stream() - .map(p -> Arrays.stream(p.split("/")) - .map(Anonymized::hashFunction) - .collect(Collectors.joining("/")) - ).collect(Collectors.toSet()); - } + public Set<String> upstreams; + public String id; + public String origin; + public String platform; + public boolean removed; + public Set<String> browsePaths; + + @JsonGetter("id") + public String getId() { + return Optional.ofNullable(id).map(Anonymized::hashFunction).orElse(null); + } + + @JsonGetter("platform") + public String getPlatform() { + return Optional.ofNullable(platform).map(p -> Anonymized.anonymizeLast(p, ":")).orElse(null); + } + + @JsonGetter("upstreams") + public Set<String> getUpstreams() { + return Optional.ofNullable(upstreams).orElse(Set.of()).stream() + .map(Anonymized::anonymizeUrn) + .collect(Collectors.toSet()); + } + + @JsonGetter("browsePaths") + public Set<String> getBrowsePaths() { + return Optional.ofNullable(browsePaths).orElse(Set.of()).stream() + .map( + p -> + Arrays.stream(p.split("/")) + .map(Anonymized::hashFunction) + .collect(Collectors.joining("/"))) + .collect(Collectors.toSet()); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java index 3d2360ae04228..bbd95671ee95a 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java @@ -3,17 +3,17 @@ import com.fasterxml.jackson.annotation.JsonSetter; public class GraphAnonymized { - public GraphNode source; - public GraphNode destination; - public String relationshipType; + public GraphNode source; + public GraphNode destination; + public String relationshipType; - public static class GraphNode extends Anonymized { - public String urn; - public String entityType; + public static class GraphNode extends Anonymized { + public String urn; + public String entityType; - @JsonSetter("urn") - public void setUrn(String urn) { - this.urn = anonymizeUrn(urn); - } + @JsonSetter("urn") + public void setUrn(String urn) { + this.urn = anonymizeUrn(urn); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java index 233a667d078dd..4129a2f997dc8 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java @@ -1,42 +1,46 @@ package io.datahubproject.test.search; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; + import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; - public class ElasticsearchTestContainer implements SearchTestContainer { - private static final String ELASTIC_VERSION = "7.10.1"; - private static final String ELASTIC_IMAGE_NAME = "docker.elastic.co/elasticsearch/elasticsearch"; - private static final String ENV_ELASTIC_IMAGE_FULL_NAME = System.getenv("ELASTIC_IMAGE_FULL_NAME"); - private static final String ELASTIC_IMAGE_FULL_NAME = ENV_ELASTIC_IMAGE_FULL_NAME != null - ? ENV_ELASTIC_IMAGE_FULL_NAME : ELASTIC_IMAGE_NAME + ":" + ELASTIC_VERSION; - private static final DockerImageName DOCKER_IMAGE_NAME = DockerImageName.parse(ELASTIC_IMAGE_FULL_NAME) - .asCompatibleSubstituteFor(ELASTIC_IMAGE_NAME); - - protected static final GenericContainer<?> ES_CONTAINER; - private boolean isStarted = false; - - // A helper method to create an ElasticsearchContainer defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - static { - ES_CONTAINER = new org.testcontainers.elasticsearch.ElasticsearchContainer(DOCKER_IMAGE_NAME); - checkContainerEngine(ES_CONTAINER.getDockerClient()); - ES_CONTAINER.withEnv("ES_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + private static final String ELASTIC_VERSION = "7.10.1"; + private static final String ELASTIC_IMAGE_NAME = "docker.elastic.co/elasticsearch/elasticsearch"; + private static final String ENV_ELASTIC_IMAGE_FULL_NAME = + System.getenv("ELASTIC_IMAGE_FULL_NAME"); + private static final String ELASTIC_IMAGE_FULL_NAME = + ENV_ELASTIC_IMAGE_FULL_NAME != null + ? ENV_ELASTIC_IMAGE_FULL_NAME + : ELASTIC_IMAGE_NAME + ":" + ELASTIC_VERSION; + private static final DockerImageName DOCKER_IMAGE_NAME = + DockerImageName.parse(ELASTIC_IMAGE_FULL_NAME).asCompatibleSubstituteFor(ELASTIC_IMAGE_NAME); + + protected static final GenericContainer<?> ES_CONTAINER; + private boolean isStarted = false; + + // A helper method to create an ElasticsearchContainer defaulting to the current image and + // version, with the ability + // within firewalled environments to override with an environment variable to point to the offline + // repository. + static { + ES_CONTAINER = new org.testcontainers.elasticsearch.ElasticsearchContainer(DOCKER_IMAGE_NAME); + checkContainerEngine(ES_CONTAINER.getDockerClient()); + ES_CONTAINER.withEnv("ES_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + } + + @Override + public GenericContainer<?> startContainer() { + if (!isStarted) { + ElasticsearchTestContainer.ES_CONTAINER.start(); + isStarted = true; } + return ES_CONTAINER; + } - @Override - public GenericContainer<?> startContainer() { - if (!isStarted) { - ElasticsearchTestContainer.ES_CONTAINER.start(); - isStarted = true; - } - return ES_CONTAINER; - } - - @Override - public void stopContainer() { - ES_CONTAINER.stop(); - } + @Override + public void stopContainer() { + ES_CONTAINER.stop(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java index d94b88b466f89..739169b834a57 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java @@ -1,43 +1,50 @@ package io.datahubproject.test.search; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; + import org.opensearch.testcontainers.OpensearchContainer; import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; - public class OpenSearchTestContainer implements SearchTestContainer { - private static final String OPENSEARCH_VERSION = "2.9.0"; - private static final String OPENSEARCH_IMAGE_NAME = "opensearchproject/opensearch"; - private static final String ENV_OPENSEARCH_IMAGE_FULL_NAME = System.getenv("OPENSEARCH_IMAGE_FULL_NAME"); - private static final String OPENSEARCH_IMAGE_FULL_NAME = ENV_OPENSEARCH_IMAGE_FULL_NAME != null - ? ENV_OPENSEARCH_IMAGE_FULL_NAME : OPENSEARCH_IMAGE_NAME + ":" + OPENSEARCH_VERSION; - private static final DockerImageName DOCKER_IMAGE_NAME = DockerImageName.parse(OPENSEARCH_IMAGE_FULL_NAME) - .asCompatibleSubstituteFor(OPENSEARCH_IMAGE_NAME); - - protected static final GenericContainer<?> OS_CONTAINER; - private boolean isStarted = false; - - // A helper method to create an ElasticseachContainer defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - static { - OS_CONTAINER = new OpensearchContainer(DOCKER_IMAGE_NAME); - checkContainerEngine(OS_CONTAINER.getDockerClient()); - OS_CONTAINER.withEnv("OPENSEARCH_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + private static final String OPENSEARCH_VERSION = "2.9.0"; + private static final String OPENSEARCH_IMAGE_NAME = "opensearchproject/opensearch"; + private static final String ENV_OPENSEARCH_IMAGE_FULL_NAME = + System.getenv("OPENSEARCH_IMAGE_FULL_NAME"); + private static final String OPENSEARCH_IMAGE_FULL_NAME = + ENV_OPENSEARCH_IMAGE_FULL_NAME != null + ? ENV_OPENSEARCH_IMAGE_FULL_NAME + : OPENSEARCH_IMAGE_NAME + ":" + OPENSEARCH_VERSION; + private static final DockerImageName DOCKER_IMAGE_NAME = + DockerImageName.parse(OPENSEARCH_IMAGE_FULL_NAME) + .asCompatibleSubstituteFor(OPENSEARCH_IMAGE_NAME); + + protected static final GenericContainer<?> OS_CONTAINER; + private boolean isStarted = false; + + // A helper method to create an ElasticseachContainer defaulting to the current image and version, + // with the ability + // within firewalled environments to override with an environment variable to point to the offline + // repository. + static { + OS_CONTAINER = new OpensearchContainer(DOCKER_IMAGE_NAME); + checkContainerEngine(OS_CONTAINER.getDockerClient()); + OS_CONTAINER + .withEnv("OPENSEARCH_JAVA_OPTS", SEARCH_JAVA_OPTS) + .withStartupTimeout(STARTUP_TIMEOUT); + } + + @Override + public GenericContainer<?> startContainer() { + if (!isStarted) { + OS_CONTAINER.start(); + isStarted = true; } + return OS_CONTAINER; + } - @Override - public GenericContainer<?> startContainer() { - if (!isStarted) { - OS_CONTAINER.start(); - isStarted = true; - } - return OS_CONTAINER; - } - - @Override - public void stopContainer() { - OS_CONTAINER.stop(); - } + @Override + public void stopContainer() { + OS_CONTAINER.stop(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java index 34aa6978f742f..cda6a4c179f48 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java @@ -1,16 +1,15 @@ package io.datahubproject.test.search; -import org.testcontainers.containers.GenericContainer; - import java.time.Duration; +import org.testcontainers.containers.GenericContainer; public interface SearchTestContainer { - String SEARCH_JAVA_OPTS = "-Xms446m -Xmx446m -XX:MaxDirectMemorySize=368435456"; + String SEARCH_JAVA_OPTS = "-Xms446m -Xmx446m -XX:MaxDirectMemorySize=368435456"; - Duration STARTUP_TIMEOUT = Duration.ofMinutes(5); // usually < 1min + Duration STARTUP_TIMEOUT = Duration.ofMinutes(5); // usually < 1min - GenericContainer<?> startContainer(); + GenericContainer<?> startContainer(); - void stopContainer(); + void stopContainer(); } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java index 414b9f927fada..58ea020e42565 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java @@ -1,5 +1,8 @@ package io.datahubproject.test.search; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; @@ -18,6 +21,11 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nullable; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; @@ -27,121 +35,174 @@ import org.opensearch.client.RestClient; import org.opensearch.client.RestClientBuilder; -import javax.annotation.Nullable; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - public class SearchTestUtils { - private SearchTestUtils() { - } - - public static void syncAfterWrite(ESBulkProcessor bulkProcessor) throws InterruptedException { - bulkProcessor.flush(); - Thread.sleep(1000); - } - - public final static List<String> SEARCHABLE_ENTITIES; - static { - SEARCHABLE_ENTITIES = Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) - .map(EntityTypeMapper::getName) - .distinct() - .collect(Collectors.toList()); - } - - public static SearchResult searchAcrossEntities(SearchService searchService, String query) { - return searchAcrossEntities(searchService, query, null); - } - - public static SearchResult searchAcrossEntities(SearchService searchService, String query, @Nullable List<String> facets) { - return searchService.searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true), facets); - } - - public static SearchResult searchAcrossCustomEntities(SearchService searchService, String query, List<String> searchableEntities) { - return searchService.searchAcrossEntities(searchableEntities, query, null, null, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static SearchResult search(SearchService searchService, String query) { - return search(searchService, SEARCHABLE_ENTITIES, query); - } - - public static SearchResult search(SearchService searchService, List<String> entities, String query) { - return searchService.search(entities, query, null, null, 0, 100, - new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static ScrollResult scroll(SearchService searchService, String query, int batchSize, @Nullable String scrollId) { - return searchService.scrollAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, - scrollId, "3m", batchSize, new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static SearchResult searchStructured(SearchService searchService, String query) { - return searchService.searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, 0, - 100, new SearchFlags().setFulltext(false).setSkipCache(true)); - } - - public static LineageSearchResult lineage(LineageSearchService lineageSearchService, Urn root, int hops) { - String degree = hops >= 3 ? "3+" : String.valueOf(hops); - List<FacetFilterInput> filters = List.of(FacetFilterInput.builder() + private SearchTestUtils() {} + + public static void syncAfterWrite(ESBulkProcessor bulkProcessor) throws InterruptedException { + bulkProcessor.flush(); + Thread.sleep(1000); + } + + public static final List<String> SEARCHABLE_ENTITIES; + + static { + SEARCHABLE_ENTITIES = + Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) + .map(EntityTypeMapper::getName) + .distinct() + .collect(Collectors.toList()); + } + + public static SearchResult searchAcrossEntities(SearchService searchService, String query) { + return searchAcrossEntities(searchService, query, null); + } + + public static SearchResult searchAcrossEntities( + SearchService searchService, String query, @Nullable List<String> facets) { + return searchService.searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true), + facets); + } + + public static SearchResult searchAcrossCustomEntities( + SearchService searchService, String query, List<String> searchableEntities) { + return searchService.searchAcrossEntities( + searchableEntities, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static SearchResult search(SearchService searchService, String query) { + return search(searchService, SEARCHABLE_ENTITIES, query); + } + + public static SearchResult search( + SearchService searchService, List<String> entities, String query) { + return searchService.search( + entities, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static ScrollResult scroll( + SearchService searchService, String query, int batchSize, @Nullable String scrollId) { + return searchService.scrollAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + scrollId, + "3m", + batchSize, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static SearchResult searchStructured(SearchService searchService, String query) { + return searchService.searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(false).setSkipCache(true)); + } + + public static LineageSearchResult lineage( + LineageSearchService lineageSearchService, Urn root, int hops) { + String degree = hops >= 3 ? "3+" : String.valueOf(hops); + List<FacetFilterInput> filters = + List.of( + FacetFilterInput.builder() .setField("degree") .setCondition(FilterOperator.EQUAL) .setValues(List.of(degree)) .setNegated(false) .build()); - return lineageSearchService.searchAcrossLineage(root, LineageDirection.DOWNSTREAM, - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", hops, ResolverUtils.buildFilter(filters, List.of()), null, 0, 100, null, - null, new SearchFlags().setSkipCache(true)); - } - - public static AutoCompleteResults autocomplete(SearchableEntityType<?, String> searchableEntityType, String query) throws Exception { - return searchableEntityType.autoComplete(query, null, null, 100, new QueryContext() { - @Override - public boolean isAuthenticated() { - return true; - } - - @Override - public Authentication getAuthentication() { - return null; - } - - @Override - public Authorizer getAuthorizer() { - return null; - } + return lineageSearchService.searchAcrossLineage( + root, + LineageDirection.DOWNSTREAM, + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + hops, + ResolverUtils.buildFilter(filters, List.of()), + null, + 0, + 100, + null, + null, + new SearchFlags().setSkipCache(true)); + } + + public static AutoCompleteResults autocomplete( + SearchableEntityType<?, String> searchableEntityType, String query) throws Exception { + return searchableEntityType.autoComplete( + query, + null, + null, + 100, + new QueryContext() { + @Override + public boolean isAuthenticated() { + return true; + } + + @Override + public Authentication getAuthentication() { + return null; + } + + @Override + public Authorizer getAuthorizer() { + return null; + } }); - } - - public static RestClientBuilder environmentRestClientBuilder() { - Integer port = Integer.parseInt(Optional.ofNullable(System.getenv("ELASTICSEARCH_PORT")).orElse("9200")); - return RestClient.builder( - new HttpHost(Optional.ofNullable(System.getenv("ELASTICSEARCH_HOST")).orElse("localhost"), - port, port.equals(443) ? "https" : "http")) - .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { - @Override - public HttpAsyncClientBuilder customizeHttpClient( - HttpAsyncClientBuilder httpClientBuilder) { - httpClientBuilder.disableAuthCaching(); - - if (System.getenv("ELASTICSEARCH_USERNAME") != null) { - final CredentialsProvider credentialsProvider = - new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials(System.getenv("ELASTICSEARCH_USERNAME"), - System.getenv("ELASTICSEARCH_PASSWORD"))); - httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); - } - - return httpClientBuilder; - } - }); - } + } + + public static RestClientBuilder environmentRestClientBuilder() { + Integer port = + Integer.parseInt(Optional.ofNullable(System.getenv("ELASTICSEARCH_PORT")).orElse("9200")); + return RestClient.builder( + new HttpHost( + Optional.ofNullable(System.getenv("ELASTICSEARCH_HOST")).orElse("localhost"), + port, + port.equals(443) ? "https" : "http")) + .setHttpClientConfigCallback( + new RestClientBuilder.HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient( + HttpAsyncClientBuilder httpClientBuilder) { + httpClientBuilder.disableAuthCaching(); + + if (System.getenv("ELASTICSEARCH_USERNAME") != null) { + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + AuthScope.ANY, + new UsernamePasswordCredentials( + System.getenv("ELASTICSEARCH_USERNAME"), + System.getenv("ELASTICSEARCH_PASSWORD"))); + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } + + return httpClientBuilder; + } + }); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java index 530d3f4d53625..17747d9ba1cc9 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java @@ -13,51 +13,50 @@ import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; -/** - * This is common configuration for search regardless of which - * test container implementation. - */ +/** This is common configuration for search regardless of which test container implementation. */ @TestConfiguration public class SearchCommonTestConfiguration { - @Bean - public SearchConfiguration searchConfiguration() { - SearchConfiguration searchConfiguration = new SearchConfiguration(); - searchConfiguration.setMaxTermBucketSize(20); - - ExactMatchConfiguration exactMatchConfiguration = new ExactMatchConfiguration(); - exactMatchConfiguration.setExclusive(false); - exactMatchConfiguration.setExactFactor(10.0f); - exactMatchConfiguration.setWithPrefix(true); - exactMatchConfiguration.setPrefixFactor(6.0f); - exactMatchConfiguration.setCaseSensitivityFactor(0.7f); - exactMatchConfiguration.setEnableStructured(true); - - WordGramConfiguration wordGramConfiguration = new WordGramConfiguration(); - wordGramConfiguration.setTwoGramFactor(1.2f); - wordGramConfiguration.setThreeGramFactor(1.5f); - wordGramConfiguration.setFourGramFactor(1.8f); - - PartialConfiguration partialConfiguration = new PartialConfiguration(); - partialConfiguration.setFactor(0.4f); - partialConfiguration.setUrnFactor(0.5f); - - searchConfiguration.setExactMatch(exactMatchConfiguration); - searchConfiguration.setWordGram(wordGramConfiguration); - searchConfiguration.setPartial(partialConfiguration); - return searchConfiguration; - } - - @Bean - public CustomSearchConfiguration customSearchConfiguration() throws Exception { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_builder_test.yml"); - return customConfiguration.resolve(new YAMLMapper()); - } - - @Bean(name = "entityRegistry") - public EntityRegistry entityRegistry() throws EntityRegistryException { - return new ConfigEntityRegistry( - SearchCommonTestConfiguration.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - } + @Bean + public SearchConfiguration searchConfiguration() { + SearchConfiguration searchConfiguration = new SearchConfiguration(); + searchConfiguration.setMaxTermBucketSize(20); + + ExactMatchConfiguration exactMatchConfiguration = new ExactMatchConfiguration(); + exactMatchConfiguration.setExclusive(false); + exactMatchConfiguration.setExactFactor(10.0f); + exactMatchConfiguration.setWithPrefix(true); + exactMatchConfiguration.setPrefixFactor(6.0f); + exactMatchConfiguration.setCaseSensitivityFactor(0.7f); + exactMatchConfiguration.setEnableStructured(true); + + WordGramConfiguration wordGramConfiguration = new WordGramConfiguration(); + wordGramConfiguration.setTwoGramFactor(1.2f); + wordGramConfiguration.setThreeGramFactor(1.5f); + wordGramConfiguration.setFourGramFactor(1.8f); + + PartialConfiguration partialConfiguration = new PartialConfiguration(); + partialConfiguration.setFactor(0.4f); + partialConfiguration.setUrnFactor(0.5f); + + searchConfiguration.setExactMatch(exactMatchConfiguration); + searchConfiguration.setWordGram(wordGramConfiguration); + searchConfiguration.setPartial(partialConfiguration); + return searchConfiguration; + } + + @Bean + public CustomSearchConfiguration customSearchConfiguration() throws Exception { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_builder_test.yml"); + return customConfiguration.resolve(new YAMLMapper()); + } + + @Bean(name = "entityRegistry") + public EntityRegistry entityRegistry() throws EntityRegistryException { + return new ConfigEntityRegistry( + SearchCommonTestConfiguration.class + .getClassLoader() + .getResourceAsStream("entity-registry.yml")); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java index 2cfa9f9187825..0ddfd77399325 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java @@ -4,8 +4,9 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.version.GitVersion; +import java.util.Map; import java.util.Optional; - +import javax.annotation.Nonnull; import org.apache.http.HttpHost; import org.apache.http.impl.nio.reactor.IOReactorConfig; import org.opensearch.action.support.WriteRequest; @@ -18,71 +19,81 @@ import org.springframework.context.annotation.Primary; import org.testcontainers.containers.GenericContainer; -import javax.annotation.Nonnull; - -import java.util.Map; - - /** - * This configuration is for `test containers` it builds these objects tied to - * the test container instantiated for tests. Could be ES or OpenSearch, etc. + * This configuration is for `test containers` it builds these objects tied to the test container + * instantiated for tests. Could be ES or OpenSearch, etc. * - * Does your test required a running instance? If no, {@link io.datahubproject.test.search.config.SearchCommonTestConfiguration} instead. + * <p>Does your test required a running instance? If no, {@link + * io.datahubproject.test.search.config.SearchCommonTestConfiguration} instead. */ @TestConfiguration public class SearchTestContainerConfiguration { - // This port is overridden by the specific test container instance - private static final int HTTP_PORT = 9200; - public static final int REFRESH_INTERVAL_SECONDS = 5; + // This port is overridden by the specific test container instance + private static final int HTTP_PORT = 9200; + public static final int REFRESH_INTERVAL_SECONDS = 5; - @Primary - @Bean(name = "searchRestHighLevelClient") - @Nonnull - public RestHighLevelClient getElasticsearchClient(@Qualifier("testSearchContainer") GenericContainer<?> searchContainer) { - // A helper method to create a search test container defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - // A helper method to construct a standard rest client for search. - final RestClientBuilder builder = - RestClient.builder(new HttpHost( - "localhost", - searchContainer.getMappedPort(HTTP_PORT), "http") - ).setHttpClientConfigCallback(httpAsyncClientBuilder -> - httpAsyncClientBuilder.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build())); + @Primary + @Bean(name = "searchRestHighLevelClient") + @Nonnull + public RestHighLevelClient getElasticsearchClient( + @Qualifier("testSearchContainer") GenericContainer<?> searchContainer) { + // A helper method to create a search test container defaulting to the current image and + // version, with the ability + // within firewalled environments to override with an environment variable to point to the + // offline repository. + // A helper method to construct a standard rest client for search. + final RestClientBuilder builder = + RestClient.builder( + new HttpHost("localhost", searchContainer.getMappedPort(HTTP_PORT), "http")) + .setHttpClientConfigCallback( + httpAsyncClientBuilder -> + httpAsyncClientBuilder.setDefaultIOReactorConfig( + IOReactorConfig.custom().setIoThreadCount(1).build())); - builder.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder. - setConnectionRequestTimeout(30000)); + builder.setRequestConfigCallback( + requestConfigBuilder -> requestConfigBuilder.setConnectionRequestTimeout(30000)); - return new RestHighLevelClient(builder); - } + return new RestHighLevelClient(builder); + } - /* - Cannot use the factory class without circular dependencies - */ - @Primary - @Bean(name = "searchBulkProcessor") - @Nonnull - public ESBulkProcessor getBulkProcessor(@Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { - return ESBulkProcessor.builder(searchClient) - .async(true) - /* - * Force a refresh as part of this request. This refresh policy does not scale for high indexing or search throughput but is useful - * to present a consistent view to for indices with very low traffic. And it is wonderful for tests! - */ - .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .bulkRequestsLimit(10000) - .bulkFlushPeriod(REFRESH_INTERVAL_SECONDS - 1) - .retryInterval(1L) - .numRetries(1) - .build(); - } + /* + Cannot use the factory class without circular dependencies + */ + @Primary + @Bean(name = "searchBulkProcessor") + @Nonnull + public ESBulkProcessor getBulkProcessor( + @Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { + return ESBulkProcessor.builder(searchClient) + .async(true) + /* + * Force a refresh as part of this request. This refresh policy does not scale for high indexing or search throughput but is useful + * to present a consistent view to for indices with very low traffic. And it is wonderful for tests! + */ + .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .bulkRequestsLimit(10000) + .bulkFlushPeriod(REFRESH_INTERVAL_SECONDS - 1) + .retryInterval(1L) + .numRetries(1) + .build(); + } - @Primary - @Bean(name = "searchIndexBuilder") - @Nonnull - protected ESIndexBuilder getIndexBuilder(@Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - return new ESIndexBuilder(searchClient, 1, 1, 3, 1, Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion); - } + @Primary + @Bean(name = "searchIndexBuilder") + @Nonnull + protected ESIndexBuilder getIndexBuilder( + @Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + return new ESIndexBuilder( + searchClient, + 1, + 1, + 3, + 1, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + } } diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index 0e47202a9d237..f9684871f39e2 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -10,26 +10,35 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") -@SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class, CassandraAutoConfiguration.class, - SolrHealthContributorAutoConfiguration.class}) -@ComponentScan(basePackages = { - //"com.linkedin.gms.factory.config", - //"com.linkedin.gms.factory.common", - "com.linkedin.gms.factory.kafka", - "com.linkedin.metadata.boot.kafka", - "com.linkedin.metadata.kafka", - "com.linkedin.metadata.dao.producer", - "com.linkedin.gms.factory.config", - "com.linkedin.gms.factory.entity.update.indices" -}, - excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ScheduledAnalyticsFactory.class), - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SiblingGraphServiceFactory.class)} - ) +@SpringBootApplication( + exclude = { + ElasticsearchRestClientAutoConfiguration.class, + CassandraAutoConfiguration.class, + SolrHealthContributorAutoConfiguration.class + }) +@ComponentScan( + basePackages = { + // "com.linkedin.gms.factory.config", + // "com.linkedin.gms.factory.common", + "com.linkedin.gms.factory.kafka", + "com.linkedin.metadata.boot.kafka", + "com.linkedin.metadata.kafka", + "com.linkedin.metadata.dao.producer", + "com.linkedin.gms.factory.config", + "com.linkedin.gms.factory.entity.update.indices" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = ScheduledAnalyticsFactory.class), + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = SiblingGraphServiceFactory.class) + }) public class MaeConsumerApplication { public static void main(String[] args) { Class<?>[] primarySources = {MaeConsumerApplication.class, MclConsumerConfig.class}; SpringApplication.run(primarySources, args); } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java index d0190279930fe..69288cec8220a 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static org.testng.AssertJUnit.*; + import com.linkedin.metadata.entity.EntityService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; @@ -7,15 +9,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - @ActiveProfiles("test") -@SpringBootTest(classes = {MaeConsumerApplication.class, MaeConsumerApplicationTestConfiguration.class}) +@SpringBootTest( + classes = {MaeConsumerApplication.class, MaeConsumerApplicationTestConfiguration.class}) public class MaeConsumerApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityService _mockEntityService; + @Autowired private EntityService _mockEntityService; @Test public void testMaeConsumerAutoWiring() { diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java index aa097a52c8fc6..7135e4e44d459 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java @@ -18,30 +18,21 @@ @Import(value = {SystemAuthenticationFactory.class}) public class MaeConsumerApplicationTestConfiguration { - @MockBean - private KafkaHealthChecker kafkaHealthChecker; + @MockBean private KafkaHealthChecker kafkaHealthChecker; - @MockBean - private EntityServiceImpl _entityServiceImpl; + @MockBean private EntityServiceImpl _entityServiceImpl; - @MockBean - private SystemRestliEntityClient restliEntityClient; + @MockBean private SystemRestliEntityClient restliEntityClient; - @MockBean - private Database ebeanServer; + @MockBean private Database ebeanServer; - @MockBean - private EntityRegistry entityRegistry; + @MockBean private EntityRegistry entityRegistry; - @MockBean - private GraphService _graphService; + @MockBean private GraphService _graphService; - @MockBean - private ElasticSearchSystemMetadataService _elasticSearchSystemMetadataService; + @MockBean private ElasticSearchSystemMetadataService _elasticSearchSystemMetadataService; - @MockBean - private ConfigEntityRegistry _configEntityRegistry; + @MockBean private ConfigEntityRegistry _configEntityRegistry; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java index fd15d36b109dd..1c7aa4fa22dd5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java @@ -22,7 +22,6 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @EnableKafka @@ -34,17 +33,22 @@ public class DataHubUsageEventsProcessor { private final DataHubUsageEventTransformer dataHubUsageEventTransformer; private final String indexName; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - public DataHubUsageEventsProcessor(ElasticsearchConnector elasticSearchConnector, - DataHubUsageEventTransformer dataHubUsageEventTransformer, IndexConvention indexConvention) { + public DataHubUsageEventsProcessor( + ElasticsearchConnector elasticSearchConnector, + DataHubUsageEventTransformer dataHubUsageEventTransformer, + IndexConvention indexConvention) { this.elasticSearchConnector = elasticSearchConnector; this.dataHubUsageEventTransformer = dataHubUsageEventTransformer; this.indexName = indexConvention.getIndexName("datahub_usage_event"); } - @KafkaListener(id = "${DATAHUB_USAGE_EVENT_KAFKA_CONSUMER_GROUP_ID:datahub-usage-event-consumer-job-client}", topics = - "${DATAHUB_USAGE_EVENT_NAME:" + Topics.DATAHUB_USAGE_EVENT + "}", containerFactory = "simpleKafkaConsumer") + @KafkaListener( + id = "${DATAHUB_USAGE_EVENT_KAFKA_CONSUMER_GROUP_ID:datahub-usage-event-consumer-job-client}", + topics = "${DATAHUB_USAGE_EVENT_NAME:" + Topics.DATAHUB_USAGE_EVENT + "}", + containerFactory = "simpleKafkaConsumer") public void consume(final ConsumerRecord<String, String> consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final String record = consumerRecord.value(); @@ -64,16 +68,20 @@ public void consume(final ConsumerRecord<String, String> consumerRecord) { } /** - * DataHub Usage Event is written to an append-only index called a data stream. Due to circumstances - * it is possible that the event's id, even though it contains an epoch millisecond, results in duplicate ids - * in the index. The collisions will stall processing of the topic. To prevent the collisions we append - * the last 5 digits, padded with zeros, of the kafka offset to prevent the collision. + * DataHub Usage Event is written to an append-only index called a data stream. Due to + * circumstances it is possible that the event's id, even though it contains an epoch millisecond, + * results in duplicate ids in the index. The collisions will stall processing of the topic. To + * prevent the collisions we append the last 5 digits, padded with zeros, of the kafka offset to + * prevent the collision. + * * @param eventId the event's id * @param kafkaOffset the kafka offset for the message * @return unique identifier for event */ private static String generateDocumentId(String eventId, long kafkaOffset) { - return URLEncoder.encode(String.format("%s_%05d", eventId, leastSignificant(kafkaOffset, 5)), StandardCharsets.UTF_8); + return URLEncoder.encode( + String.format("%s_%05d", eventId, leastSignificant(kafkaOffset, 5)), + StandardCharsets.UTF_8); } private static int leastSignificant(long kafkaOffset, int digits) { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java index d8cd49a736511..686e2a816ffb5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -13,9 +15,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static com.linkedin.metadata.Constants.*; - - @Controller @Import(GitVersionFactory.class) public class MclConsumerConfig { @@ -24,10 +23,15 @@ public class MclConsumerConfig { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public MclConsumerConfig(GitVersion gitVersion) throws JsonProcessingException { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java index 796f570a1732e..479617f0b6a82 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.Topics; - import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @@ -29,47 +28,56 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(MetadataChangeLogProcessorCondition.class) @Import({ - UpdateIndicesHook.class, - IngestionSchedulerHook.class, - EntityChangeEventGeneratorHook.class, - KafkaEventConsumerFactory.class, - SiblingAssociationHook.class + UpdateIndicesHook.class, + IngestionSchedulerHook.class, + EntityChangeEventGeneratorHook.class, + KafkaEventConsumerFactory.class, + SiblingAssociationHook.class }) @EnableKafka public class MetadataChangeLogProcessor { - @Getter - private final List<MetadataChangeLogHook> hooks; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + @Getter private final List<MetadataChangeLogHook> hooks; + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); @Autowired public MetadataChangeLogProcessor(List<MetadataChangeLogHook> metadataChangeLogHooks) { - this.hooks = metadataChangeLogHooks.stream() + this.hooks = + metadataChangeLogHooks.stream() .filter(MetadataChangeLogHook::isEnabled) .sorted(Comparator.comparing(MetadataChangeLogHook::executionOrder)) .collect(Collectors.toList()); this.hooks.forEach(MetadataChangeLogHook::init); } - @KafkaListener(id = "${METADATA_CHANGE_LOG_KAFKA_CONSUMER_GROUP_ID:generic-mae-consumer-job-client}", topics = { - "${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}", - "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES - + "}"}, containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_LOG_KAFKA_CONSUMER_GROUP_ID:generic-mae-consumer-job-client}", + topics = { + "${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}", + "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}" + }, + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); - log.debug("Got Generic MCL on topic: {}, partition: {}, offset: {}", consumerRecord.topic(), consumerRecord.partition(), consumerRecord.offset()); + log.debug( + "Got Generic MCL on topic: {}, partition: {}, offset: {}", + consumerRecord.topic(), + consumerRecord.partition(), + consumerRecord.offset()); MetricUtils.counter(this.getClass(), "received_mcl_count").inc(); MetadataChangeLog event; try { event = EventUtils.avroToPegasusMCL(record); - log.debug("Successfully converted Avro MCL to Pegasus MCL. urn: {}, key: {}", event.getEntityUrn(), + log.debug( + "Successfully converted Avro MCL to Pegasus MCL. urn: {}, key: {}", + event.getEntityUrn(), event.getEntityKeyAspect()); } catch (Exception e) { MetricUtils.counter(this.getClass(), "avro_to_pegasus_conversion_failure").inc(); @@ -78,15 +86,18 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) return; } - log.debug("Invoking MCL hooks for urn: {}, key: {}", event.getEntityUrn(), event.getEntityKeyAspect()); + log.debug( + "Invoking MCL hooks for urn: {}, key: {}", + event.getEntityUrn(), + event.getEntityKeyAspect()); // Here - plug in additional "custom processor hooks" for (MetadataChangeLogHook hook : this.hooks) { if (!hook.isEnabled()) { continue; } - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency") - .time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency").time()) { hook.invoke(event); } catch (Exception e) { // Just skip this hook and continue. - Note that this represents "at most once" processing. @@ -96,7 +107,9 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) } // TODO: Manually commit kafka offsets after full processing. MetricUtils.counter(this.getClass(), "consumed_mcl_count").inc(); - log.debug("Successfully completed MCL hooks for urn: {}, key: {}", event.getEntityUrn(), + log.debug( + "Successfully completed MCL hooks for urn: {}, key: {}", + event.getEntityUrn(), event.getEntityKeyAspect()); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java index b8334cd7fac27..f70eaf6084a00 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.boot.BootstrapManager; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.kafka.config.MetadataChangeLogProcessorCondition; - import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; @@ -14,23 +13,22 @@ import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Slf4j @Component @Conditional(MetadataChangeLogProcessorCondition.class) public class ApplicationStartupListener implements ApplicationListener<ContextRefreshedEvent> { - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final DataHubUpgradeKafkaListener _dataHubUpgradeKafkaListener; private final ConfigurationProvider _configurationProvider; private final BootstrapManager _mclBootstrapManager; public ApplicationStartupListener( - @Qualifier("dataHubUpgradeKafkaListener") DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, + @Qualifier("dataHubUpgradeKafkaListener") + DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, ConfigurationProvider configurationProvider, @Qualifier("mclBootstrapManager") BootstrapManager bootstrapManager) { _dataHubUpgradeKafkaListener = dataHubUpgradeKafkaListener; diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java index 9235a1d98014c..8ad1638115dae 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Scope; - @Configuration @Conditional(MetadataChangeLogProcessorCondition.class) public class MCLBootstrapManagerFactory { @@ -26,8 +25,7 @@ public class MCLBootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -36,8 +34,8 @@ public class MCLBootstrapManagerFactory { @Scope("singleton") @Nonnull protected BootstrapManager createInstance() { - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); final List<BootstrapStep> finalSteps = ImmutableList.of(waitForSystemUpdateStep); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java index 0413cd09c36b7..90069f5a56c39 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java @@ -5,15 +5,12 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class DataHubUsageEventsProcessorCondition implements Condition { @Override - public boolean matches( - ConditionContext context, - AnnotatedTypeMetadata metadata) { + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) && ( - env.getProperty("DATAHUB_ANALYTICS_ENABLED") == null + return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) + && (env.getProperty("DATAHUB_ANALYTICS_ENABLED") == null || "true".equals(env.getProperty("DATAHUB_ANALYTICS_ENABLED"))); } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java index a9e54e5354b42..036968f9f6759 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java @@ -11,7 +11,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({RestliEntityClientFactory.class}) public class EntityHydratorConfig { @@ -20,13 +19,16 @@ public class EntityHydratorConfig { @Qualifier("systemRestliEntityClient") private SystemRestliEntityClient _entityClient; - @Autowired - private EntityRegistry _entityRegistry; + @Autowired private EntityRegistry _entityRegistry; - public final static ImmutableSet<String> EXCLUDED_ASPECTS = ImmutableSet.<String>builder() + public static final ImmutableSet<String> EXCLUDED_ASPECTS = + ImmutableSet.<String>builder() .add("datasetUpstreamLineage", "upstreamLineage") .add("dataJobInputOutput") - .add("dataProcessInstanceRelationships", "dataProcessInstanceInput", "dataProcessInstanceOutput") + .add( + "dataProcessInstanceRelationships", + "dataProcessInstanceInput", + "dataProcessInstanceOutput") .add("inputFields") .build(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java index 4d7e60b74c858..db1c0b1a87541 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java @@ -5,12 +5,11 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class MetadataChangeLogProcessorCondition implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) || "true".equals( - env.getProperty("MCL_CONSUMER_ENABLED")); + return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) + || "true".equals(env.getProperty("MCL_CONSUMER_ENABLED")); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java index b0fade24e26ad..d757feef5aa95 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java @@ -15,4 +15,4 @@ public abstract class ElasticEvent { public XContentBuilder buildJson() { return null; } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java index bea75f7b282ee..5b5a4ab072109 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java @@ -1,9 +1,8 @@ package com.linkedin.metadata.kafka.elasticsearch; import com.linkedin.events.metadata.ChangeType; -import javax.annotation.Nonnull; - import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.delete.DeleteRequest; @@ -11,7 +10,6 @@ import org.opensearch.action.update.UpdateRequest; import org.opensearch.common.xcontent.XContentType; - @Slf4j public class ElasticsearchConnector { @@ -38,7 +36,8 @@ public void feedElasticEvent(@Nonnull ElasticEvent event) { @Nonnull private static IndexRequest createIndexRequest(@Nonnull ElasticEvent event) { - return new IndexRequest(event.getIndex()).id(event.getId()) + return new IndexRequest(event.getIndex()) + .id(event.getId()) .source(event.buildJson()) .opType(DocWriteRequest.OpType.CREATE); } @@ -50,12 +49,10 @@ private static DeleteRequest createDeleteRequest(@Nonnull ElasticEvent event) { @Nonnull private UpdateRequest createUpsertRequest(@Nonnull ElasticEvent event) { - return new UpdateRequest( - event.getIndex(), event.getId()) - .detectNoop(false) - .docAsUpsert(true) - .doc(event.buildJson(), XContentType.JSON) - .retryOnConflict(_numRetries); + return new UpdateRequest(event.getIndex(), event.getId()) + .detectNoop(false) + .docAsUpsert(true) + .doc(event.buildJson(), XContentType.JSON) + .retryOnConflict(_numRetries); } } - diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java index a3672975e42e6..884d74d3cd647 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.kafka.elasticsearch; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -8,9 +9,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - -import javax.annotation.Nonnull; - // TODO: Move this factory. @Slf4j @Configuration @@ -27,5 +25,4 @@ public class ElasticsearchConnectorFactory { public ElasticsearchConnector createInstance() { return new ElasticsearchConnector(bulkProcessor, numRetries); } - -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java index 230cd8433e6ff..d97290975ae26 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java @@ -1,14 +1,13 @@ package com.linkedin.metadata.kafka.elasticsearch; +import java.io.IOException; +import javax.annotation.Nullable; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; - -import java.io.IOException; -import javax.annotation.Nullable; public class JsonElasticEvent extends ElasticEvent { private final String _document; @@ -23,8 +22,12 @@ public XContentBuilder buildJson() { XContentBuilder builder = null; try { builder = XContentFactory.jsonBuilder().prettyPrint(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, _document); + XContentParser parser = + XContentFactory.xContent(XContentType.JSON) + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + _document); builder.copyCurrentStructure(parser); } catch (IOException e) { e.printStackTrace(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java index a3d6dca75068b..83d44cf609a41 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java @@ -1,17 +1,15 @@ package com.linkedin.metadata.kafka.elasticsearch; -import com.linkedin.data.template.RecordTemplate; import com.datahub.util.RecordUtils; +import com.linkedin.data.template.RecordTemplate; +import java.io.IOException; +import javax.annotation.Nullable; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; - -import java.io.IOException; -import javax.annotation.Nullable; - public class MCEElasticEvent extends ElasticEvent { @@ -28,8 +26,12 @@ public XContentBuilder buildJson() { try { String jsonString = RecordUtils.toJsonString(this._doc); builder = XContentFactory.jsonBuilder().prettyPrint(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, jsonString); + XContentParser parser = + XContentFactory.xContent(XContentType.JSON) + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + jsonString); builder.copyCurrentStructure(parser); } catch (IOException e) { e.printStackTrace(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java index 39b47768a6dcf..f7e110f53a019 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java @@ -3,35 +3,32 @@ import com.linkedin.mxe.MetadataChangeLog; import javax.annotation.Nonnull; - /** * Custom hook which is invoked on receiving a new {@link MetadataChangeLog} event. * - * The semantics of this hook are currently "at most once". That is, the hook will not be called + * <p>The semantics of this hook are currently "at most once". That is, the hook will not be called * with the same message. In the future, we intend to migrate to "at least once" semantics, meaning * that the hook will be responsible for implementing idempotency. */ public interface MetadataChangeLogHook { - /** - * Initialize the hook - */ - default void init() { } + /** Initialize the hook */ + default void init() {} /** - * Return whether the hook is enabled or not. If not enabled, the below invoke method is not triggered + * Return whether the hook is enabled or not. If not enabled, the below invoke method is not + * triggered */ default boolean isEnabled() { return true; } - /** - * Invoke the hook when a MetadataChangeLog is received - */ + /** Invoke the hook when a MetadataChangeLog is received */ void invoke(@Nonnull MetadataChangeLog log) throws Exception; /** * Controls hook execution ordering + * * @return order to execute */ default int executionOrder() { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java index 78c87ec8f4b3b..019d6b898ae6b 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.gms.factory.common.GraphServiceFactory; import com.linkedin.gms.factory.common.SystemMetadataServiceFactory; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; @@ -14,14 +16,17 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; -import static com.linkedin.metadata.Constants.*; - - // TODO: Backfill tests for this class in UpdateIndicesHookTest.java @Slf4j @Component -@Import({GraphServiceFactory.class, EntitySearchServiceFactory.class, TimeseriesAspectServiceFactory.class, - EntityRegistryFactory.class, SystemMetadataServiceFactory.class, SearchDocumentTransformerFactory.class}) +@Import({ + GraphServiceFactory.class, + EntitySearchServiceFactory.class, + TimeseriesAspectServiceFactory.class, + EntityRegistryFactory.class, + SystemMetadataServiceFactory.class, + SearchDocumentTransformerFactory.class +}) public class UpdateIndicesHook implements MetadataChangeLogHook { protected final UpdateIndicesService _updateIndicesService; @@ -44,7 +49,8 @@ public void invoke(@Nonnull final MetadataChangeLog event) { if (event.getSystemMetadata() != null) { if (event.getSystemMetadata().getProperties() != null) { if (UI_SOURCE.equals(event.getSystemMetadata().getProperties().get(APP_SOURCE))) { - // If coming from the UI, we pre-process the Update Indices hook as a fast path to avoid Kafka lag + // If coming from the UI, we pre-process the Update Indices hook as a fast path to avoid + // Kafka lag return; } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java index 3b65ecccad336..08790b1be3319 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java @@ -36,19 +36,20 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; - /** - * A {@link MetadataChangeLogHook} responsible for generating Entity Change Events - * to the Platform Events topic. + * A {@link MetadataChangeLogHook} responsible for generating Entity Change Events to the Platform + * Events topic. */ @Slf4j @Component -@Import({EntityChangeEventGeneratorRegistry.class, EntityRegistryFactory.class, RestliEntityClientFactory.class}) +@Import({ + EntityChangeEventGeneratorRegistry.class, + EntityRegistryFactory.class, + RestliEntityClientFactory.class +}) public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { - /** - * The list of aspects that are supported for generating semantic change events. - */ + /** The list of aspects that are supported for generating semantic change events. */ private static final Set<String> SUPPORTED_ASPECT_NAMES = ImmutableSet.of( Constants.GLOBAL_TAGS_ASPECT_NAME, @@ -74,10 +75,11 @@ public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { Constants.DOMAIN_KEY_ASPECT_NAME, Constants.TAG_KEY_ASPECT_NAME, Constants.STATUS_ASPECT_NAME); - /** - * The list of change types that are supported for generating semantic change events. - */ - private static final Set<String> SUPPORTED_OPERATIONS = ImmutableSet.of("CREATE", "UPSERT", "DELETE"); + + /** The list of change types that are supported for generating semantic change events. */ + private static final Set<String> SUPPORTED_OPERATIONS = + ImmutableSet.of("CREATE", "UPSERT", "DELETE"); + private final EntityChangeEventGeneratorRegistry _entityChangeEventGeneratorRegistry; private final SystemRestliEntityClient _entityClient; private final EntityRegistry _entityRegistry; @@ -89,7 +91,8 @@ public EntityChangeEventGeneratorHook( @Nonnull final SystemRestliEntityClient entityClient, @Nonnull final EntityRegistry entityRegistry, @Nonnull @Value("${entityChangeEvents.enabled:true}") Boolean isEnabled) { - _entityChangeEventGeneratorRegistry = Objects.requireNonNull(entityChangeEventGeneratorRegistry); + _entityChangeEventGeneratorRegistry = + Objects.requireNonNull(entityChangeEventGeneratorRegistry); _entityClient = Objects.requireNonNull(entityClient); _entityRegistry = Objects.requireNonNull(entityRegistry); _isEnabled = isEnabled; @@ -108,41 +111,46 @@ public void invoke(@Nonnull final MetadataChangeLog logEvent) throws Exception { // 2. Find and invoke a EntityChangeEventGenerator. // 3. Sink the output of the EntityChangeEventGenerator to a specific PDL change event. final AspectSpec aspectSpec = - _entityRegistry.getEntitySpec(logEvent.getEntityType()).getAspectSpec(logEvent.getAspectName()); + _entityRegistry + .getEntitySpec(logEvent.getEntityType()) + .getAspectSpec(logEvent.getAspectName()); assert aspectSpec != null; - final RecordTemplate fromAspect = logEvent.getPreviousAspectValue() != null - ? GenericRecordUtils.deserializeAspect( - logEvent.getPreviousAspectValue().getValue(), - logEvent.getPreviousAspectValue().getContentType(), - aspectSpec) - : null; - - final RecordTemplate toAspect = logEvent.getAspect() != null - ? GenericRecordUtils.deserializeAspect( - logEvent.getAspect().getValue(), - logEvent.getAspect().getContentType(), - aspectSpec) - : null; - - final List<ChangeEvent> changeEvents = generateChangeEvents( - logEvent.getEntityUrn(), - logEvent.getEntityType(), - logEvent.getAspectName(), - createAspect(fromAspect, logEvent.getPreviousSystemMetadata()), - createAspect(toAspect, logEvent.getSystemMetadata()), - logEvent.getCreated() - ); + final RecordTemplate fromAspect = + logEvent.getPreviousAspectValue() != null + ? GenericRecordUtils.deserializeAspect( + logEvent.getPreviousAspectValue().getValue(), + logEvent.getPreviousAspectValue().getContentType(), + aspectSpec) + : null; + + final RecordTemplate toAspect = + logEvent.getAspect() != null + ? GenericRecordUtils.deserializeAspect( + logEvent.getAspect().getValue(), + logEvent.getAspect().getContentType(), + aspectSpec) + : null; + + final List<ChangeEvent> changeEvents = + generateChangeEvents( + logEvent.getEntityUrn(), + logEvent.getEntityType(), + logEvent.getAspectName(), + createAspect(fromAspect, logEvent.getPreviousSystemMetadata()), + createAspect(toAspect, logEvent.getSystemMetadata()), + logEvent.getCreated()); // Iterate through each transaction, emit change events as platform events. for (final ChangeEvent event : changeEvents) { PlatformEvent platformEvent = buildPlatformEvent(event); emitPlatformEvent( platformEvent, - String.format("%s-%s", Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, event.getEntityUrn()) - ); - log.debug("Successfully emitted change event. category: {}, operation: {}, entity urn: {}", + String.format( + "%s-%s", Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, event.getEntityUrn())); + log.debug( + "Successfully emitted change event. category: {}, operation: {}, entity urn: {}", event.getCategory(), event.getOperation(), event.getEntityUrn()); @@ -156,35 +164,30 @@ private <T extends RecordTemplate> List<ChangeEvent> generateChangeEvents( @Nonnull final String aspectName, @Nonnull final Aspect from, @Nonnull final Aspect to, - @Nonnull AuditStamp auditStamp - ) { + @Nonnull AuditStamp auditStamp) { final List<EntityChangeEventGenerator<T>> entityChangeEventGenerators = - _entityChangeEventGeneratorRegistry - .getEntityChangeEventGenerators(aspectName) - .stream() + _entityChangeEventGeneratorRegistry.getEntityChangeEventGenerators(aspectName).stream() // Note: Assumes that correct types have been registered for the aspect. .map(changeEventGenerator -> (EntityChangeEventGenerator<T>) changeEventGenerator) .collect(Collectors.toList()); final List<ChangeEvent> allChangeEvents = new ArrayList<>(); for (EntityChangeEventGenerator<T> entityChangeEventGenerator : entityChangeEventGenerators) { allChangeEvents.addAll( - entityChangeEventGenerator.getChangeEvents(urn, entityName, aspectName, from, to, auditStamp)); + entityChangeEventGenerator.getChangeEvents( + urn, entityName, aspectName, from, to, auditStamp)); } return allChangeEvents; } private boolean isEligibleForProcessing(final MetadataChangeLog log) { - return SUPPORTED_OPERATIONS.contains(log.getChangeType().toString()) && SUPPORTED_ASPECT_NAMES.contains( - log.getAspectName()); + return SUPPORTED_OPERATIONS.contains(log.getChangeType().toString()) + && SUPPORTED_ASPECT_NAMES.contains(log.getAspectName()); } - private void emitPlatformEvent(@Nonnull final PlatformEvent event, @Nonnull final String partitioningKey) - throws Exception { + private void emitPlatformEvent( + @Nonnull final PlatformEvent event, @Nonnull final String partitioningKey) throws Exception { _entityClient.producePlatformEvent( - Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, - partitioningKey, - event - ); + Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, partitioningKey, event); } private PlatformEvent buildPlatformEvent(final ChangeEvent rawChangeEvent) { @@ -193,14 +196,15 @@ private PlatformEvent buildPlatformEvent(final ChangeEvent rawChangeEvent) { // 2. Build platform event PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME); - platformEvent.setHeader(new PlatformEventHeader().setTimestampMillis(rawChangeEvent.getAuditStamp().getTime())); + platformEvent.setHeader( + new PlatformEventHeader().setTimestampMillis(rawChangeEvent.getAuditStamp().getTime())); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); return platformEvent; } /** - * Thin mapping from internal Timeline API {@link ChangeEvent} to Kafka Platform Event {@link ChangeEvent}, which serves as a public - * API for outbound consumption. + * Thin mapping from internal Timeline API {@link ChangeEvent} to Kafka Platform Event {@link + * ChangeEvent}, which serves as a public API for outbound consumption. */ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeEvent) { com.linkedin.platform.event.v1.EntityChangeEvent changeEvent = @@ -216,7 +220,8 @@ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeE changeEvent.setAuditStamp(rawChangeEvent.getAuditStamp()); changeEvent.setVersion(0); if (rawChangeEvent.getParameters() != null) { - // This map should ideally contain only primitives at the leaves - integers, floats, booleans, strings. + // This map should ideally contain only primitives at the leaves - integers, floats, + // booleans, strings. changeEvent.setParameters(new Parameters(new DataMap(rawChangeEvent.getParameters()))); } return changeEvent; @@ -225,7 +230,8 @@ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeE } } - private Aspect createAspect(@Nullable final RecordTemplate value, @Nullable final SystemMetadata systemMetadata) { + private Aspect createAspect( + @Nullable final RecordTemplate value, @Nullable final SystemMetadata systemMetadata) { return new Aspect(value, systemMetadata); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java index 1a3febb623314..82f1de0a889bf 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java @@ -22,10 +22,9 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; - /** - * This hook updates a stateful {@link IngestionScheduler} of Ingestion Runs for Ingestion Sources defined - * within DataHub. + * This hook updates a stateful {@link IngestionScheduler} of Ingestion Runs for Ingestion Sources + * defined within DataHub. */ @Slf4j @Component @@ -41,8 +40,7 @@ public class IngestionSchedulerHook implements MetadataChangeLogHook { public IngestionSchedulerHook( @Nonnull final EntityRegistry entityRegistry, @Nonnull final IngestionScheduler scheduler, - @Nonnull @Value("${ingestionScheduler.enabled:true}") Boolean isEnabled - ) { + @Nonnull @Value("${ingestionScheduler.enabled:true}") Boolean isEnabled) { _entityRegistry = entityRegistry; _scheduler = scheduler; _isEnabled = isEnabled; @@ -62,7 +60,8 @@ public void init() { public void invoke(@Nonnull MetadataChangeLog event) { if (isEligibleForProcessing(event)) { - log.info("Received {} to Ingestion Source. Rescheduling the source (if applicable). urn: {}, key: {}.", + log.info( + "Received {} to Ingestion Source. Rescheduling the source (if applicable). urn: {}, key: {}.", event.getChangeType(), event.getEntityUrn(), event.getEntityKeyAspect()); @@ -80,8 +79,9 @@ public void invoke(@Nonnull MetadataChangeLog event) { } /** - * Returns true if the event should be processed, which is only true if the event represents a create, update, or delete - * of an Ingestion Source Info aspect, which in turn contains the schedule associated with the source. + * Returns true if the event should be processed, which is only true if the event represents a + * create, update, or delete of an Ingestion Source Info aspect, which in turn contains the + * schedule associated with the source. */ private boolean isEligibleForProcessing(final MetadataChangeLog event) { return isIngestionSourceUpdate(event) || isIngestionSourceDeleted(event); @@ -90,8 +90,8 @@ private boolean isEligibleForProcessing(final MetadataChangeLog event) { private boolean isIngestionSourceUpdate(final MetadataChangeLog event) { return Constants.INGESTION_INFO_ASPECT_NAME.equals(event.getAspectName()) && (ChangeType.UPSERT.equals(event.getChangeType()) - || ChangeType.CREATE.equals(event.getChangeType()) - || ChangeType.DELETE.equals(event.getChangeType())); + || ChangeType.CREATE.equals(event.getChangeType()) + || ChangeType.DELETE.equals(event.getChangeType())); } private boolean isIngestionSourceDeleted(final MetadataChangeLog event) { @@ -100,8 +100,8 @@ private boolean isIngestionSourceDeleted(final MetadataChangeLog event) { } /** - * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an entityUrn - * or entityKey field, depending on which is present. + * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an + * entityUrn or entityKey field, depending on which is present. */ private Urn getUrnFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -109,15 +109,17 @@ private Urn getUrnFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get urn from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get urn from MetadataChangeLog event. Skipping processing.", e); } // Extract an URN from the Log Event. return EntityKeyUtils.getUrnFromLog(event, entitySpec.getKeyAspectSpec()); } /** - * Deserializes and returns an instance of {@link DataHubIngestionSourceInfo} extracted from a {@link MetadataChangeLog} event. - * The incoming event is expected to have a populated "aspect" field. + * Deserializes and returns an instance of {@link DataHubIngestionSourceInfo} extracted from a + * {@link MetadataChangeLog} event. The incoming event is expected to have a populated "aspect" + * field. */ private DataHubIngestionSourceInfo getInfoFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -125,12 +127,15 @@ private DataHubIngestionSourceInfo getInfoFromEvent(final MetadataChangeLog even entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get Ingestion Source info from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get Ingestion Source info from MetadataChangeLog event. Skipping processing.", + e); } - return (DataHubIngestionSourceInfo) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(Constants.INGESTION_INFO_ASPECT_NAME)); + return (DataHubIngestionSourceInfo) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(Constants.INGESTION_INFO_ASPECT_NAME)); } @VisibleForTesting diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 064f987ff1ba9..67198d13772a3 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook.siblings; +import static com.linkedin.metadata.Constants.*; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,6 +24,12 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.EntityKeyUtils; @@ -42,26 +50,19 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; - -import static com.linkedin.metadata.Constants.*; - - -/** - * This hook associates dbt datasets with their sibling entities - */ +/** This hook associates dbt datasets with their sibling entities */ @Slf4j @Component @Singleton -@Import({EntityRegistryFactory.class, RestliEntityClientFactory.class, EntitySearchServiceFactory.class}) +@Import({ + EntityRegistryFactory.class, + RestliEntityClientFactory.class, + EntitySearchServiceFactory.class +}) public class SiblingAssociationHook implements MetadataChangeLogHook { - public static final String SIBLING_ASSOCIATION_SYSTEM_ACTOR = "urn:li:corpuser:__datahub_system_sibling_hook"; + public static final String SIBLING_ASSOCIATION_SYSTEM_ACTOR = + "urn:li:corpuser:__datahub_system_sibling_hook"; public static final String DBT_PLATFORM_NAME = "dbt"; // Older dbt sources produced lowercase subtypes, whereas we now @@ -80,8 +81,7 @@ public SiblingAssociationHook( @Nonnull final EntityRegistry entityRegistry, @Nonnull final SystemRestliEntityClient entityClient, @Nonnull final EntitySearchService searchService, - @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled - ) { + @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled) { _entityRegistry = entityRegistry; _entityClient = entityClient; _searchService = searchService; @@ -97,8 +97,7 @@ void setEnabled(Boolean newValue) { } @Override - public void init() { - } + public void init() {} @Override public boolean isEnabled() { @@ -135,28 +134,38 @@ public void invoke(@Nonnull MetadataChangeLog event) { private void handleEntityKeyEvent(DatasetUrn datasetUrn) { Filter entitiesWithYouAsSiblingFilter = createFilterForEntitiesWithYouAsSibling(datasetUrn); - final SearchResult searchResult = _searchService.search( - List.of(DATASET_ENTITY_NAME), - "*", - entitiesWithYouAsSiblingFilter, - null, - 0, - 10, + final SearchResult searchResult = + _searchService.search( + List.of(DATASET_ENTITY_NAME), + "*", + entitiesWithYouAsSiblingFilter, + null, + 0, + 10, new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); // we have a match of an entity with you as a sibling, associate yourself back - searchResult.getEntities().forEach(entity -> { - if (!entity.getEntity().equals(datasetUrn)) { - if (datasetUrn.getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) { - setSiblingsAndSoftDeleteSibling(datasetUrn, searchResult.getEntities().get(0).getEntity()); - } else { - setSiblingsAndSoftDeleteSibling(searchResult.getEntities().get(0).getEntity(), datasetUrn); - } - } - }); + searchResult + .getEntities() + .forEach( + entity -> { + if (!entity.getEntity().equals(datasetUrn)) { + if (datasetUrn + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) { + setSiblingsAndSoftDeleteSibling( + datasetUrn, searchResult.getEntities().get(0).getEntity()); + } else { + setSiblingsAndSoftDeleteSibling( + searchResult.getEntities().get(0).getEntity(), datasetUrn); + } + } + }); } - // If the upstream is a single source system node & subtype is source, then associate the upstream as your sibling + // If the upstream is a single source system node & subtype is source, then associate the upstream + // as your sibling private void handleDbtDatasetEvent(MetadataChangeLog event, DatasetUrn datasetUrn) { // we need both UpstreamLineage & Subtypes to determine whether to associate UpstreamLineage upstreamLineage = null; @@ -172,41 +181,54 @@ private void handleDbtDatasetEvent(MetadataChangeLog event, DatasetUrn datasetUr upstreamLineage = getUpstreamLineageFromEntityClient(datasetUrn); } - if ( - upstreamLineage != null - && subTypesAspectOfEntity != null - && upstreamLineage.hasUpstreams() - && subTypesAspectOfEntity.hasTypeNames() - && (subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V1) - || subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V2)) - ) { + if (upstreamLineage != null + && subTypesAspectOfEntity != null + && upstreamLineage.hasUpstreams() + && subTypesAspectOfEntity.hasTypeNames() + && (subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V1) + || subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V2))) { UpstreamArray upstreams = upstreamLineage.getUpstreams(); - if ( - upstreams.size() == 1 - && !upstreams.get(0).getDataset().getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) { + if (upstreams.size() == 1 + && !upstreams + .get(0) + .getDataset() + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) { setSiblingsAndSoftDeleteSibling(datasetUrn, upstreams.get(0).getDataset()); } } } - // if the dataset is not dbt--- it may be produced by a dbt dataset. If so, associate them as siblings + // if the dataset is not dbt--- it may be produced by a dbt dataset. If so, associate them as + // siblings private void handleSourceDatasetEvent(MetadataChangeLog event, DatasetUrn sourceUrn) { if (event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME)) { UpstreamLineage upstreamLineage = getUpstreamLineageFromEvent(event); if (upstreamLineage != null && upstreamLineage.hasUpstreams()) { UpstreamArray upstreams = upstreamLineage.getUpstreams(); - // an entity can have merged lineage (eg. dbt + snowflake), but by default siblings are only between dbt <> non-dbt - UpstreamArray dbtUpstreams = new UpstreamArray( - upstreams.stream() - .filter(obj -> obj.getDataset().getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) - .collect(Collectors.toList()) - ); - // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt model + // an entity can have merged lineage (eg. dbt + snowflake), but by default siblings are only + // between dbt <> non-dbt + UpstreamArray dbtUpstreams = + new UpstreamArray( + upstreams.stream() + .filter( + obj -> + obj.getDataset() + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) + .collect(Collectors.toList())); + // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt + // model if (dbtUpstreams.size() == 1) { setSiblingsAndSoftDeleteSibling(dbtUpstreams.get(0).getDataset(), sourceUrn); } else if (dbtUpstreams.size() > 1) { - log.error("{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", sourceUrn.toString(), dbtUpstreams.size()); + log.error( + "{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", + sourceUrn.toString(), + dbtUpstreams.size()); } } } @@ -218,12 +240,10 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { log.info("Associating {} and {} as siblings.", dbtUrn.toString(), sourceUrn.toString()); - if ( - existingDbtSiblingAspect != null - && existingSourceSiblingAspect != null - && existingDbtSiblingAspect.getSiblings().contains(sourceUrn.toString()) - && existingSourceSiblingAspect.getSiblings().contains(dbtUrn.toString()) - ) { + if (existingDbtSiblingAspect != null + && existingSourceSiblingAspect != null + && existingDbtSiblingAspect.getSiblings().contains(sourceUrn.toString()) + && existingSourceSiblingAspect.getSiblings().contains(dbtUrn.toString())) { // we have already connected them- we can abort here return; } @@ -266,20 +286,24 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { // clean up any references to stale siblings that have been deleted List<Urn> filteredNewSiblingsArray = - newSiblingsUrnArray.stream().filter(urn -> { - try { - return _entityClient.exists(urn); - } catch (RemoteInvocationException e) { - log.error("Error while checking existence of {}: {}", urn, e.toString()); - throw new RuntimeException("Error checking existence. Skipping processing.", e); - } - }).collect(Collectors.toList()); + newSiblingsUrnArray.stream() + .filter( + urn -> { + try { + return _entityClient.exists(urn); + } catch (RemoteInvocationException e) { + log.error("Error while checking existence of {}: {}", urn, e.toString()); + throw new RuntimeException("Error checking existence. Skipping processing.", e); + } + }) + .collect(Collectors.toList()); sourceSiblingAspect.setSiblings(new UrnArray(filteredNewSiblingsArray)); sourceSiblingAspect.setPrimary(false); MetadataChangeProposal sourceSiblingProposal = new MetadataChangeProposal(); - GenericAspect sourceSiblingAspectSerialized = GenericRecordUtils.serializeAspect(sourceSiblingAspect); + GenericAspect sourceSiblingAspectSerialized = + GenericRecordUtils.serializeAspect(sourceSiblingAspect); sourceSiblingProposal.setAspect(sourceSiblingAspectSerialized); sourceSiblingProposal.setAspectName(SIBLINGS_ASPECT_NAME); @@ -295,23 +319,21 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { } } - /** - * Returns true if the event should be processed, which is only true if the event represents a dataset for now + * Returns true if the event should be processed, which is only true if the event represents a + * dataset for now */ private boolean isEligibleForProcessing(final MetadataChangeLog event) { return event.getEntityType().equals("dataset") && !event.getChangeType().equals(ChangeType.DELETE) - && ( - event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME) - || event.getAspectName().equals(SUB_TYPES_ASPECT_NAME) - || event.getAspectName().equals(DATASET_KEY_ASPECT_NAME) - ); + && (event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME) + || event.getAspectName().equals(SUB_TYPES_ASPECT_NAME) + || event.getAspectName().equals(DATASET_KEY_ASPECT_NAME)); } /** - * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an entityUrn - * or entityKey field, depending on which is present. + * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an + * entityUrn or entityKey field, depending on which is present. */ private Urn getUrnFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -319,14 +341,16 @@ private Urn getUrnFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get urn from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get urn from MetadataChangeLog event. Skipping processing.", e); } // Extract an URN from the Log Event. return EntityKeyUtils.getUrnFromLog(event, entitySpec.getKeyAspectSpec()); } /** - * Deserializes and returns an instance of {@link UpstreamLineage} extracted from a {@link MetadataChangeLog} event. + * Deserializes and returns an instance of {@link UpstreamLineage} extracted from a {@link + * MetadataChangeLog} event. */ private UpstreamLineage getUpstreamLineageFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -338,16 +362,19 @@ private UpstreamLineage getUpstreamLineageFromEvent(final MetadataChangeLog even entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get UpstreamLineage from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get UpstreamLineage from MetadataChangeLog event. Skipping processing.", e); } - return (UpstreamLineage) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(UPSTREAM_LINEAGE_ASPECT_NAME)); + return (UpstreamLineage) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(UPSTREAM_LINEAGE_ASPECT_NAME)); } /** - * Deserializes and returns an instance of {@link SubTypes} extracted from a {@link MetadataChangeLog} event. + * Deserializes and returns an instance of {@link SubTypes} extracted from a {@link + * MetadataChangeLog} event. */ private SubTypes getSubtypesFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -359,22 +386,24 @@ private SubTypes getSubtypesFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get SubTypes from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get SubTypes from MetadataChangeLog event. Skipping processing.", e); } - return (SubTypes) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(SUB_TYPES_ASPECT_NAME)); + return (SubTypes) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(SUB_TYPES_ASPECT_NAME)); } @SneakyThrows private AuditStamp getAuditStamp() { - return new AuditStamp().setActor(Urn.createFromString(SIBLING_ASSOCIATION_SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + return new AuditStamp() + .setActor(Urn.createFromString(SIBLING_ASSOCIATION_SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); } - private Filter createFilterForEntitiesWithYouAsSibling( - final Urn entityUrn - ) { + private Filter createFilterForEntitiesWithYouAsSibling(final Urn entityUrn) { final Filter filter = new Filter(); final ConjunctiveCriterionArray disjunction = new ConjunctiveCriterionArray(); @@ -395,16 +424,16 @@ private Filter createFilterForEntitiesWithYouAsSibling( return filter; } - private SubTypes getSubtypesFromEntityClient( - final Urn urn - ) { + private SubTypes getSubtypesFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(SUB_TYPES_ASPECT_NAME)); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.SUB_TYPES_ASPECT_NAME)) { - return new SubTypes(entityResponse.getAspects().get(Constants.SUB_TYPES_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(SUB_TYPES_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.SUB_TYPES_ASPECT_NAME)) { + return new SubTypes( + entityResponse.getAspects().get(Constants.SUB_TYPES_ASPECT_NAME).getValue().data()); } else { return null; } @@ -413,17 +442,20 @@ private SubTypes getSubtypesFromEntityClient( } } - private UpstreamLineage getUpstreamLineageFromEntityClient( - final Urn urn - ) { + private UpstreamLineage getUpstreamLineageFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(UPSTREAM_LINEAGE_ASPECT_NAME) - ); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { - return new UpstreamLineage(entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(UPSTREAM_LINEAGE_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { + return new UpstreamLineage( + entityResponse + .getAspects() + .get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME) + .getValue() + .data()); } else { return null; } @@ -432,17 +464,16 @@ private UpstreamLineage getUpstreamLineageFromEntityClient( } } - private Siblings getSiblingsFromEntityClient( - final Urn urn - ) { + private Siblings getSiblingsFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(SIBLINGS_ASPECT_NAME) - ); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.SIBLINGS_ASPECT_NAME)) { - return new Siblings(entityResponse.getAspects().get(Constants.SIBLINGS_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(SIBLINGS_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.SIBLINGS_ASPECT_NAME)) { + return new Siblings( + entityResponse.getAspects().get(Constants.SIBLINGS_ASPECT_NAME).getValue().data()); } else { return null; } @@ -450,5 +481,4 @@ private Siblings getSiblingsFromEntityClient( throw new RuntimeException("Failed to retrieve UpstreamLineage", e); } } - } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java index 03303b7723b9c..5e0b10b3d5049 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java @@ -4,13 +4,10 @@ import com.linkedin.entity.EntityResponse; import lombok.extern.slf4j.Slf4j; - @Slf4j public abstract class BaseHydrator { - /** - * Use values in the entity response to hydrate the document - */ - protected abstract void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse); - + /** Use values in the entity response to hydrate the document */ + protected abstract void hydrateFromEntityResponse( + ObjectNode document, EntityResponse entityResponse); } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java index 493f7424758cc..9dfbdb280b215 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.chart.ChartInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.ChartKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class ChartHydrator extends BaseHydrator { @@ -21,9 +20,12 @@ public class ChartHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(CHART_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(TITLE, new ChartInfo(dataMap).getTitle())); - mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(DASHBOARD_TOOL, new ChartKey(dataMap).getDashboardTool())); + mappingHelper.mapToResult( + CHART_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(TITLE, new ChartInfo(dataMap).getTitle())); + mappingHelper.mapToResult( + CHART_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(DASHBOARD_TOOL, new ChartKey(dataMap).getDashboardTool())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java index 0b8735533ed06..8b7b63f1f3240 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.entity.EntityResponse; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.CorpUserKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class CorpUserHydrator extends BaseHydrator { @@ -21,9 +20,11 @@ public class CorpUserHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(CORP_USER_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new CorpUserInfo(dataMap).getDisplayName())); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(USER_NAME, new CorpUserKey(dataMap).getUsername())); + mappingHelper.mapToResult( + CORP_USER_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new CorpUserInfo(dataMap).getDisplayName())); + mappingHelper.mapToResult( + CORP_USER_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(USER_NAME, new CorpUserKey(dataMap).getUsername())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java index 8b376128b7d11..fcafb3aabc860 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.dashboard.DashboardInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DashboardKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DashboardHydrator extends BaseHydrator { private static final String DASHBOARD_TOOL = "dashboardTool"; @@ -20,9 +19,12 @@ public class DashboardHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DASHBOARD_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(TITLE, new DashboardInfo(dataMap).getTitle())); - mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(DASHBOARD_TOOL, new DashboardKey(dataMap).getDashboardTool())); + mappingHelper.mapToResult( + DASHBOARD_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(TITLE, new DashboardInfo(dataMap).getTitle())); + mappingHelper.mapToResult( + DASHBOARD_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(DASHBOARD_TOOL, new DashboardKey(dataMap).getDashboardTool())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java index d847168de7783..88efe53f5c53e 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datajob.DataFlowInfo; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DataFlowKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DataFlowHydrator extends BaseHydrator { @@ -21,9 +20,12 @@ public class DataFlowHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DATA_FLOW_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new DataFlowInfo(dataMap).getName())); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(ORCHESTRATOR, new DataFlowKey(dataMap).getOrchestrator())); + mappingHelper.mapToResult( + DATA_FLOW_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new DataFlowInfo(dataMap).getName())); + mappingHelper.mapToResult( + CORP_USER_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(ORCHESTRATOR, new DataFlowKey(dataMap).getOrchestrator())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java index d9c99e8570e68..d8ea57a467277 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -11,9 +13,6 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DataJobHydrator extends BaseHydrator { @@ -24,8 +23,9 @@ public class DataJobHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DATA_JOB_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new DataJobInfo(dataMap).getName())); + mappingHelper.mapToResult( + DATA_JOB_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new DataJobInfo(dataMap).getName())); try { mappingHelper.mapToResult(DATA_JOB_KEY_ASPECT_NAME, this::mapKey); } catch (Exception e) { @@ -35,8 +35,10 @@ protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse ent private void mapKey(ObjectNode jsonNodes, DataMap dataMap) { DataJobKey dataJobKey = new DataJobKey(dataMap); - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils - .convertUrnToEntityKeyInternal(dataJobKey.getFlow(), new DataFlowKey().schema()); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + dataJobKey.getFlow(), new DataFlowKey().schema()); jsonNodes.put(ORCHESTRATOR, dataFlowKey.getOrchestrator()); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java index 715b23e48b5b9..d95faf4373521 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DatasetKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DatasetHydrator extends BaseHydrator { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java index 0a3b38517eaad..7a8fdd11fac43 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.config.EntityHydratorConfig.EXCLUDED_ASPECTS; + import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.Urn; @@ -13,14 +16,9 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.config.EntityHydratorConfig.EXCLUDED_ASPECTS; - - @Slf4j @RequiredArgsConstructor public class EntityHydrator { @@ -47,12 +45,17 @@ public Optional<ObjectNode> getHydratedEntity(String entityTypeName, String urn) // Hydrate fields from snapshot EntityResponse entityResponse; try { - Set<String> aspectNames = Optional.ofNullable(_entityRegistry.getEntitySpecs().get(urnObj.getEntityType())) - .map(spec -> spec.getAspectSpecs().stream().map(AspectSpec::getName) - .filter(aspectName -> !EXCLUDED_ASPECTS.contains(aspectName)) - .collect(Collectors.toSet())) + Set<String> aspectNames = + Optional.ofNullable(_entityRegistry.getEntitySpecs().get(urnObj.getEntityType())) + .map( + spec -> + spec.getAspectSpecs().stream() + .map(AspectSpec::getName) + .filter(aspectName -> !EXCLUDED_ASPECTS.contains(aspectName)) + .collect(Collectors.toSet())) .orElse(Set.of()); - entityResponse = _entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); + entityResponse = + _entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); } catch (RemoteInvocationException | URISyntaxException e) { log.error("Error while calling GMS to hydrate entity for urn {}", urn); return Optional.empty(); @@ -83,7 +86,10 @@ public Optional<ObjectNode> getHydratedEntity(String entityTypeName, String urn) _datasetHydrator.hydrateFromEntityResponse(document, entityResponse); break; default: - log.error("Unable to find valid hydrator for entity type: {} urn: {}", entityResponse.getEntityName(), urn); + log.error( + "Unable to find valid hydrator for entity type: {} urn: {}", + entityResponse.getEntityName(), + urn); return Optional.empty(); } return Optional.of(document); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java index 5beb6bdd765a2..30250f14e93e5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.transformer; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.datahubusage.DataHubUsageEventConstants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -16,24 +19,28 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.datahubusage.DataHubUsageEventConstants.*; - - -/** - * Transformer that transforms usage event (schema defined HERE) into a search document - */ +/** Transformer that transforms usage event (schema defined HERE) into a search document */ @Slf4j @Component public class DataHubUsageEventTransformer { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final Set<DataHubUsageEventType> EVENTS_WITH_ENTITY_URN = - ImmutableSet.of(DataHubUsageEventType.SEARCH_RESULT_CLICK_EVENT, DataHubUsageEventType.BROWSE_RESULT_CLICK_EVENT, - DataHubUsageEventType.ENTITY_VIEW_EVENT, DataHubUsageEventType.ENTITY_SECTION_VIEW_EVENT, + ImmutableSet.of( + DataHubUsageEventType.SEARCH_RESULT_CLICK_EVENT, + DataHubUsageEventType.BROWSE_RESULT_CLICK_EVENT, + DataHubUsageEventType.ENTITY_VIEW_EVENT, + DataHubUsageEventType.ENTITY_SECTION_VIEW_EVENT, DataHubUsageEventType.ENTITY_ACTION_EVENT); private final EntityHydrator _entityHydrator; @@ -97,7 +104,8 @@ public Optional<TransformedDocument> transformDataHubUsageEvent(String dataHubUs try { return Optional.of( - new TransformedDocument(getId(eventDocument), OBJECT_MAPPER.writeValueAsString(eventDocument))); + new TransformedDocument( + getId(eventDocument), OBJECT_MAPPER.writeValueAsString(eventDocument))); } catch (JsonProcessingException e) { log.info("Failed to package document: {}", eventDocument); return Optional.empty(); @@ -128,13 +136,21 @@ private void setFieldsForEntity(EntityType entityType, String urn, ObjectNode se log.info("No matches for urn {}", urn); return; } - entityObject.get().fieldNames() + entityObject + .get() + .fieldNames() .forEachRemaining( - key -> searchObject.put(entityType.name().toLowerCase() + "_" + key, entityObject.get().get(key).asText())); + key -> + searchObject.put( + entityType.name().toLowerCase() + "_" + key, + entityObject.get().get(key).asText())); } private String getId(final ObjectNode eventDocument) { - return eventDocument.get(TYPE).asText() + "_" + eventDocument.get(ACTOR_URN).asText() + "_" + eventDocument.get( - TIMESTAMP).asText(); + return eventDocument.get(TYPE).asText() + + "_" + + eventDocument.get(ACTOR_URN).asText() + + "_" + + eventDocument.get(TIMESTAMP).asText(); } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java index a237e3e27f168..f2bb8a5fc9222 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java @@ -5,14 +5,18 @@ import com.linkedin.metadata.models.registry.EntityRegistry; public class EntityRegistryTestUtil { - private EntityRegistryTestUtil() { - } + private EntityRegistryTestUtil() {} - public static final EntityRegistry ENTITY_REGISTRY; + public static final EntityRegistry ENTITY_REGISTRY; - static { - EntityRegistryTestUtil.class.getClassLoader().setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - ENTITY_REGISTRY = new ConfigEntityRegistry( - EntityRegistryTestUtil.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); - } + static { + EntityRegistryTestUtil.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + ENTITY_REGISTRY = + new ConfigEntityRegistry( + EntityRegistryTestUtil.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); + } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java index 85b00e9ade6b8..a1a9bd4cd413a 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.graph.GraphIndexUtils.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; +import static org.testng.Assert.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; @@ -29,11 +33,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.graph.GraphIndexUtils.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; -import static org.testng.Assert.*; - - public class GraphIndexUtilsTest { private static final String UPSTREAM_RELATIONSHIP_PATH = "/upstreams/*/dataset"; @@ -54,7 +53,9 @@ public class GraphIndexUtilsTest { public void setupTest() { _createdActorUrn = UrnUtils.getUrn(CREATED_ACTOR_URN); _updatedActorUrn = UrnUtils.getUrn(UPDATED_ACTOR_URN); - _datasetUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)"); + _datasetUrn = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)"); _upstreamDataset1 = UrnUtils.toDatasetUrn("snowflake", "test", "DEV"); _upstreamDataset2 = UrnUtils.toDatasetUrn("snowflake", "test2", "DEV"); _mockRegistry = ENTITY_REGISTRY; @@ -74,29 +75,30 @@ public void testExtractGraphEdgesDefault() { for (Map.Entry<RelationshipFieldSpec, List<Object>> entry : extractedFields.entrySet()) { // check specifically for the upstreams relationship entry if (entry.getKey().getPath().toString().equals(UPSTREAM_RELATIONSHIP_PATH)) { - List<Edge> edgesToAdd = GraphIndexUtils.extractGraphEdges(entry, upstreamLineage, _datasetUrn, event, true); + List<Edge> edgesToAdd = + GraphIndexUtils.extractGraphEdges(entry, upstreamLineage, _datasetUrn, event, true); List<Edge> expectedEdgesToAdd = new ArrayList<>(); // edges contain default created event time and created actor from system metadata - Edge edge1 = new Edge( - _datasetUrn, - _upstreamDataset1, - entry.getKey().getRelationshipName(), - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_1, - _updatedActorUrn, - null - ); - Edge edge2 = new Edge( - _datasetUrn, - _upstreamDataset2, - entry.getKey().getRelationshipName(), - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - null - ); + Edge edge1 = + new Edge( + _datasetUrn, + _upstreamDataset1, + entry.getKey().getRelationshipName(), + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_1, + _updatedActorUrn, + null); + Edge edge2 = + new Edge( + _datasetUrn, + _upstreamDataset2, + entry.getKey().getRelationshipName(), + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + null); expectedEdgesToAdd.add(edge1); expectedEdgesToAdd.add(edge2); assertEquals(expectedEdgesToAdd.size(), edgesToAdd.size()); @@ -108,26 +110,26 @@ public void testExtractGraphEdgesDefault() { @Test public void testMergeEdges() { - final Edge edge1 = new Edge( - _datasetUrn, - _upstreamDataset1, - DOWNSTREAM_RELATIONSHIP_TYPE, - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_1, - _updatedActorUrn, - Collections.singletonMap("foo", "bar") - ); - final Edge edge2 = new Edge( - _datasetUrn, - _upstreamDataset1, - DOWNSTREAM_RELATIONSHIP_TYPE, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - Collections.singletonMap("foo", "baz") - ); + final Edge edge1 = + new Edge( + _datasetUrn, + _upstreamDataset1, + DOWNSTREAM_RELATIONSHIP_TYPE, + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_1, + _updatedActorUrn, + Collections.singletonMap("foo", "bar")); + final Edge edge2 = + new Edge( + _datasetUrn, + _upstreamDataset1, + DOWNSTREAM_RELATIONSHIP_TYPE, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + Collections.singletonMap("foo", "baz")); final Edge edge3 = mergeEdges(edge1, edge2); assertEquals(edge3.getSource(), edge1.getSource()); assertEquals(edge3.getDestination(), edge1.getDestination()); @@ -144,11 +146,13 @@ private UpstreamLineage createUpstreamLineage() { UpstreamArray upstreams = new UpstreamArray(); Upstream upstream1 = new Upstream(); upstream1.setDataset(_upstreamDataset1); - upstream1.setAuditStamp(new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); + upstream1.setAuditStamp( + new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); upstream1.setType(DatasetLineageType.TRANSFORMED); Upstream upstream2 = new Upstream(); upstream2.setDataset(_upstreamDataset2); - upstream2.setAuditStamp(new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); + upstream2.setAuditStamp( + new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); upstream2.setType(DatasetLineageType.TRANSFORMED); upstreams.add(upstream1); upstreams.add(upstream2); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java index 0897cfa01084f..724b91edbf8a1 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.UpdateIndicesHookTest.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; @@ -18,15 +21,9 @@ import java.util.HashMap; import java.util.Map; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.UpdateIndicesHookTest.*; - - public class MCLProcessingTestDataGenerator { - private MCLProcessingTestDataGenerator() { - - } + private MCLProcessingTestDataGenerator() {} public static MetadataChangeLog createBaseChangeLog() throws URISyntaxException { MetadataChangeLog event = new MetadataChangeLog(); @@ -39,7 +36,8 @@ public static MetadataChangeLog createBaseChangeLog() throws URISyntaxException event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); - event.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(TEST_ACTOR_URN)).setTime(EVENT_TIME)); + event.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn(TEST_ACTOR_URN)).setTime(EVENT_TIME)); return event; } @@ -68,7 +66,8 @@ public static MetadataChangeLog setSystemMetadataWithForceIndexing(MetadataChang return changeLog.setSystemMetadata(systemMetadata); } - public static MetadataChangeLog setPreviousData(MetadataChangeLog changeLog, MetadataChangeLog previousState) { + public static MetadataChangeLog setPreviousData( + MetadataChangeLog changeLog, MetadataChangeLog previousState) { changeLog.setPreviousAspectValue(previousState.getAspect()); return changeLog.setPreviousSystemMetadata(previousState.getSystemMetadata()); } @@ -93,7 +92,8 @@ public static MetadataChangeLog modifySystemMetadata2(MetadataChangeLog changeLo return changeLog.setSystemMetadata(systemMetadata); } - public static MetadataChangeLog modifyAspect(MetadataChangeLog changeLog, UpstreamLineage upstreamLineage) { + public static MetadataChangeLog modifyAspect( + MetadataChangeLog changeLog, UpstreamLineage upstreamLineage) { return changeLog.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); } @@ -109,7 +109,8 @@ public static UpstreamLineage createBaseLineageAspect() throws URISyntaxExceptio return upstreamLineage; } - public static UpstreamLineage addLineageEdge(UpstreamLineage upstreamLineage) throws URISyntaxException { + public static UpstreamLineage addLineageEdge(UpstreamLineage upstreamLineage) + throws URISyntaxException { UpstreamArray upstreamArray = upstreamLineage.getUpstreams(); Upstream upstream = new Upstream(); upstream.setType(DatasetLineageType.TRANSFORMED); @@ -127,5 +128,4 @@ public static UpstreamLineage modifyNonSearchableField(UpstreamLineage upstreamL upstreamArray.set(0, upstream); return upstreamLineage.setUpstreams(upstreamArray); } - } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java index 90f8f208c4cb6..12c8ad7d0c69b 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java @@ -1,7 +1,10 @@ package com.linkedin.metadata.kafka.hook; -import com.linkedin.metadata.config.SystemUpdateConfiguration; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; +import static com.linkedin.metadata.kafka.hook.MCLProcessingTestDataGenerator.*; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; + import com.linkedin.common.AuditStamp; import com.linkedin.common.InputField; import com.linkedin.common.InputFieldArray; @@ -22,9 +25,11 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; +import com.linkedin.metadata.config.SystemUpdateConfiguration; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.graph.Edge; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; import com.linkedin.metadata.key.ChartKey; import com.linkedin.metadata.models.AspectSpec; @@ -43,31 +48,28 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaField; - import java.net.URISyntaxException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Value; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.Collections; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; -import static com.linkedin.metadata.kafka.hook.MCLProcessingTestDataGenerator.*; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; - public class UpdateIndicesHookTest { -// going to want a test where we have an upstreamLineage aspect with finegrained, check that we call _graphService.addEdge for each edge -// as well as _graphService.removeEdgesFromNode for each field and their relationships + // going to want a test where we have an upstreamLineage aspect with finegrained, check that we + // call _graphService.addEdge for each edge + // as well as _graphService.removeEdgesFromNode for each field and their relationships static final long EVENT_TIME = 123L; - static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)"; - static final String TEST_DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)"; - static final String TEST_DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressKafkaDataset,PROD)"; + static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)"; + static final String TEST_DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)"; + static final String TEST_DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressKafkaDataset,PROD)"; static final String TEST_CHART_URN = "urn:li:chart:(looker,dashboard_elements.1)"; static final String TEST_ACTOR_URN = "urn:li:corpuser:test"; static final String DOWNSTREAM_OF = "DownstreamOf"; @@ -107,88 +109,112 @@ public void setupTest() { ElasticSearchConfiguration elasticSearchConfiguration = new ElasticSearchConfiguration(); SystemUpdateConfiguration systemUpdateConfiguration = new SystemUpdateConfiguration(); systemUpdateConfiguration.setWaitForSystemUpdate(false); - Mockito.when(_mockConfigurationProvider.getElasticSearch()).thenReturn(elasticSearchConfiguration); - _updateIndicesService = new UpdateIndicesService( - _mockGraphService, - _mockEntitySearchService, - _mockTimeseriesAspectService, - _mockSystemMetadataService, - ENTITY_REGISTRY, - _searchDocumentTransformer, - _mockEntityIndexBuilders - ); - _updateIndicesHook = new UpdateIndicesHook( - _updateIndicesService, - true - ); + Mockito.when(_mockConfigurationProvider.getElasticSearch()) + .thenReturn(elasticSearchConfiguration); + _updateIndicesService = + new UpdateIndicesService( + _mockGraphService, + _mockEntitySearchService, + _mockTimeseriesAspectService, + _mockSystemMetadataService, + ENTITY_REGISTRY, + _searchDocumentTransformer, + _mockEntityIndexBuilders); + _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); } @Test public void testFineGrainedLineageEdgesAreAdded() throws Exception { _updateIndicesService.setGraphDiffMode(false); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); MetadataChangeLog event = createUpstreamLineageMCL(upstreamUrn, downstreamUrn); _updateIndicesHook.invoke(event); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); } @Test public void testFineGrainedLineageEdgesAreAddedRestate() throws Exception { _updateIndicesService.setGraphDiffMode(false); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); - MetadataChangeLog event = createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.RESTATE); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + MetadataChangeLog event = + createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.RESTATE); _updateIndicesHook.invoke(event); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); Mockito.verify(_mockEntitySearchService, Mockito.times(1)) - .upsertDocument(Mockito.eq(DATASET_ENTITY_NAME), Mockito.any(), - Mockito.eq(URLEncoder.encode("urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)", - StandardCharsets.UTF_8))); + .upsertDocument( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.any(), + Mockito.eq( + URLEncoder.encode( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)", + StandardCharsets.UTF_8))); } @Test public void testInputFieldsEdgesAreAdded() throws Exception { - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,thelook.explore.order_items,PROD),users.count)"); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,thelook.explore.order_items,PROD),users.count)"); String downstreamFieldPath = "users.count"; MetadataChangeLog event = createInputFieldsMCL(upstreamUrn, downstreamFieldPath); EntityRegistry mockEntityRegistry = createMockEntityRegistry(); - _updateIndicesService = new UpdateIndicesService( - _mockGraphService, - _mockEntitySearchService, - _mockTimeseriesAspectService, - _mockSystemMetadataService, - mockEntityRegistry, - _searchDocumentTransformer, - _mockEntityIndexBuilders - ); + _updateIndicesService = + new UpdateIndicesService( + _mockGraphService, + _mockEntitySearchService, + _mockTimeseriesAspectService, + _mockSystemMetadataService, + mockEntityRegistry, + _searchDocumentTransformer, + _mockEntityIndexBuilders); _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); _updateIndicesHook.invoke(event); - Urn downstreamUrn = UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", TEST_CHART_URN, downstreamFieldPath)); + Urn downstreamUrn = + UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", TEST_CHART_URN, downstreamFieldPath)); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); } @Test @@ -207,7 +233,9 @@ public void testMCLProcessExhaustive() throws URISyntaxException { Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.any()); // Update document Mockito.verify(_mockEntitySearchService, Mockito.times(1)) - .upsertDocument(Mockito.eq(DATASET_ENTITY_NAME), Mockito.any(), + .upsertDocument( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.any(), Mockito.eq(URLEncoder.encode(TEST_DATASET_URN, StandardCharsets.UTF_8))); /* @@ -261,7 +289,6 @@ public void testMCLProcessExhaustive() throws URISyntaxException { Mockito.verify(_mockEntitySearchService, Mockito.times(0)) .upsertDocument(Mockito.any(), Mockito.any(), Mockito.any()); - /* * noOpUpsert */ @@ -356,8 +383,8 @@ public void testMCLProcessExhaustive() throws URISyntaxException { _updateIndicesHook.invoke(changeLog); // Forced removal of all edges - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode(Mockito.any(), - Mockito.any(), Mockito.any()); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode(Mockito.any(), Mockito.any(), Mockito.any()); // Forced add of edges Mockito.verify(_mockGraphService, Mockito.times(2)).addEdge(Mockito.any()); // Forced document update @@ -369,45 +396,64 @@ public void testMCLProcessExhaustive() throws URISyntaxException { public void testMCLUIPreProcessed() throws Exception { _updateIndicesService.setGraphDiffMode(true); _updateIndicesService.setSearchDiffMode(true); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); - - MetadataChangeLog changeLog = createUpstreamLineageMCLUIPreProcessed(upstreamUrn, downstreamUrn, ChangeType.UPSERT); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + + MetadataChangeLog changeLog = + createUpstreamLineageMCLUIPreProcessed(upstreamUrn, downstreamUrn, ChangeType.UPSERT); _updateIndicesHook.invoke(changeLog); - Mockito.verifyNoInteractions(_mockEntitySearchService, _mockGraphService, _mockTimeseriesAspectService, _mockSystemMetadataService); + Mockito.verifyNoInteractions( + _mockEntitySearchService, + _mockGraphService, + _mockTimeseriesAspectService, + _mockSystemMetadataService); } private EntityRegistry createMockEntityRegistry() { - // need to mock this registry instead of using test-entity-registry.yml because inputFields does not work due to a known bug + // need to mock this registry instead of using test-entity-registry.yml because inputFields does + // not work due to a known bug EntityRegistry mockEntityRegistry = Mockito.mock(EntityRegistry.class); EntitySpec entitySpec = Mockito.mock(EntitySpec.class); AspectSpec aspectSpec = createMockAspectSpec(InputFields.class, InputFields.dataSchema()); - AspectSpec upstreamLineageAspectSpec = createMockAspectSpec(UpstreamLineage.class, UpstreamLineage.dataSchema()); - Mockito.when(mockEntityRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(mockEntityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); - Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)).thenReturn(upstreamLineageAspectSpec); + AspectSpec upstreamLineageAspectSpec = + createMockAspectSpec(UpstreamLineage.class, UpstreamLineage.dataSchema()); + Mockito.when(mockEntityRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(mockEntityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) + .thenReturn(upstreamLineageAspectSpec); Mockito.when(aspectSpec.isTimeseries()).thenReturn(false); Mockito.when(aspectSpec.getName()).thenReturn(Constants.INPUT_FIELDS_ASPECT_NAME); Mockito.when(upstreamLineageAspectSpec.isTimeseries()).thenReturn(false); - Mockito.when(upstreamLineageAspectSpec.getName()).thenReturn(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + Mockito.when(upstreamLineageAspectSpec.getName()) + .thenReturn(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); AspectSpec chartKeyAspectSpec = createMockAspectSpec(ChartKey.class, ChartKey.dataSchema()); Mockito.when(entitySpec.getKeyAspectSpec()).thenReturn(chartKeyAspectSpec); return mockEntityRegistry; } - private <T extends RecordTemplate> AspectSpec createMockAspectSpec(Class<T> clazz, RecordDataSchema schema) { + private <T extends RecordTemplate> AspectSpec createMockAspectSpec( + Class<T> clazz, RecordDataSchema schema) { AspectSpec mockSpec = Mockito.mock(AspectSpec.class); Mockito.when(mockSpec.getDataTemplateClass()).thenReturn((Class<RecordTemplate>) clazz); Mockito.when(mockSpec.getPegasusSchema()).thenReturn(schema); return mockSpec; } - private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn) throws Exception { + private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn) + throws Exception { return createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.UPSERT); } - private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { + private MetadataChangeLog createUpstreamLineageMCL( + Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(Constants.DATASET_ENTITY_NAME); event.setAspectName(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); @@ -427,7 +473,9 @@ private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstre final UpstreamArray upstreamArray = new UpstreamArray(); final Upstream upstream = new Upstream(); upstream.setType(DatasetLineageType.TRANSFORMED); - upstream.setDataset(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)")); + upstream.setDataset( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)")); upstreamArray.add(upstream); upstreamLineage.setUpstreams(upstreamArray); @@ -438,8 +486,10 @@ private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstre return event; } - private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed(Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { - final MetadataChangeLog metadataChangeLog = createUpstreamLineageMCL(upstreamUrn, downstreamUrn, changeType); + private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed( + Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { + final MetadataChangeLog metadataChangeLog = + createUpstreamLineageMCL(upstreamUrn, downstreamUrn, changeType); final StringMap properties = new StringMap(); properties.put(APP_SOURCE, UI_SOURCE); final SystemMetadata systemMetadata = new SystemMetadata().setProperties(properties); @@ -447,7 +497,8 @@ private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed(Urn upstreamUrn return metadataChangeLog; } - private MetadataChangeLog createInputFieldsMCL(Urn upstreamUrn, String downstreamFieldPath) throws Exception { + private MetadataChangeLog createInputFieldsMCL(Urn upstreamUrn, String downstreamFieldPath) + throws Exception { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(Constants.CHART_ENTITY_NAME); event.setAspectName(Constants.INPUT_FIELDS_ASPECT_NAME); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java index 7d9619f3e2d1c..8400e19ce49a3 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka.hook.event; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.assertion.AssertionResult; @@ -64,30 +68,27 @@ import com.linkedin.platform.event.v1.Parameters; import java.net.URISyntaxException; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - - /** * Tests the {@link EntityChangeEventGeneratorHook}. * - * TODO: Include Schema Field Tests, description update tests. + * <p>TODO: Include Schema Field Tests, description update tests. */ public class EntityChangeEventGeneratorHookTest { private static final long EVENT_TIME = 123L; - private static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleDataset,PROD)"; + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleDataset,PROD)"; private static final String TEST_ACTOR_URN = "urn:li:corpuser:test"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:123"; private static final String TEST_RUN_ID = "runId"; - private static final String TEST_DATA_PROCESS_INSTANCE_URN = "urn:li:dataProcessInstance:instance"; - private static final String TEST_DATA_PROCESS_INSTANCE_PARENT_URN = "urn:li:dataProcessInstance:parent"; + private static final String TEST_DATA_PROCESS_INSTANCE_URN = + "urn:li:dataProcessInstance:instance"; + private static final String TEST_DATA_PROCESS_INSTANCE_PARENT_URN = + "urn:li:dataProcessInstance:parent"; private static final String TEST_DATA_FLOW_URN = "urn:li:dataFlow:flow"; private static final String TEST_DATA_JOB_URN = "urn:li:dataJob:job"; private Urn actorUrn; @@ -101,9 +102,11 @@ public void setupTest() throws URISyntaxException { actorUrn = Urn.createFromString(TEST_ACTOR_URN); _mockClient = Mockito.mock(SystemRestliEntityClient.class); _mockEntityService = Mockito.mock(EntityService.class); - EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry = createEntityChangeEventGeneratorRegistry(); + EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry = + createEntityChangeEventGeneratorRegistry(); _entityChangeEventHook = - new EntityChangeEventGeneratorHook(entityChangeEventGeneratorRegistry, _mockClient, createMockEntityRegistry(), true); + new EntityChangeEventGeneratorHook( + entityChangeEventGeneratorRegistry, _mockClient, createMockEntityRegistry(), true); } @Test @@ -114,10 +117,8 @@ public void testInvokeEntityAddTagChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlobalTags newTags = new GlobalTags(); final TagUrn newTagUrn = new TagUrn("Test"); - newTags.setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation() - .setTag(newTagUrn) - ))); + newTags.setTags( + new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(newTagUrn)))); event.setAspect(GenericRecordUtils.serializeAspect(newTags)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -128,8 +129,14 @@ public void testInvokeEntityAddTagChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.TAG, - ChangeOperation.ADD, newTagUrn.toString(), ImmutableMap.of("tagUrn", newTagUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.TAG, + ChangeOperation.ADD, + newTagUrn.toString(), + ImmutableMap.of("tagUrn", newTagUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -142,10 +149,8 @@ public void testInvokeEntityRemoveTagChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlobalTags existingTags = new GlobalTags(); final TagUrn newTagUrn = new TagUrn("Test"); - existingTags.setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation() - .setTag(newTagUrn) - ))); + existingTags.setTags( + new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(newTagUrn)))); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(existingTags)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -156,8 +161,14 @@ public void testInvokeEntityRemoveTagChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.TAG, - ChangeOperation.REMOVE, newTagUrn.toString(), ImmutableMap.of("tagUrn", newTagUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.TAG, + ChangeOperation.REMOVE, + newTagUrn.toString(), + ImmutableMap.of("tagUrn", newTagUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -170,11 +181,9 @@ public void testInvokeEntityAddTermChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlossaryTerms newTerms = new GlossaryTerms(); final GlossaryTermUrn glossaryTermUrn = new GlossaryTermUrn("TestTerm"); - newTerms.setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation() - .setUrn(glossaryTermUrn) - ) - )); + newTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of(new GlossaryTermAssociation().setUrn(glossaryTermUrn)))); final GlossaryTerms previousTerms = new GlossaryTerms(); previousTerms.setTerms(new GlossaryTermAssociationArray()); event.setAspect(GenericRecordUtils.serializeAspect(newTerms)); @@ -188,8 +197,13 @@ public void testInvokeEntityAddTermChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.GLOSSARY_TERM, - ChangeOperation.ADD, glossaryTermUrn.toString(), ImmutableMap.of("termUrn", glossaryTermUrn.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.GLOSSARY_TERM, + ChangeOperation.ADD, + glossaryTermUrn.toString(), + ImmutableMap.of("termUrn", glossaryTermUrn.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); @@ -205,11 +219,9 @@ public void testInvokeEntityRemoveTermChange() throws Exception { newTerms.setTerms(new GlossaryTermAssociationArray()); final GlossaryTerms previousTerms = new GlossaryTerms(); final GlossaryTermUrn glossaryTermUrn = new GlossaryTermUrn("TestTerm"); - previousTerms.setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation() - .setUrn(glossaryTermUrn) - ) - )); + previousTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of(new GlossaryTermAssociation().setUrn(glossaryTermUrn)))); event.setAspect(GenericRecordUtils.serializeAspect(newTerms)); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(previousTerms)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); @@ -221,8 +233,13 @@ public void testInvokeEntityRemoveTermChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.GLOSSARY_TERM, - ChangeOperation.REMOVE, glossaryTermUrn.toString(), ImmutableMap.of("termUrn", glossaryTermUrn.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.GLOSSARY_TERM, + ChangeOperation.REMOVE, + glossaryTermUrn.toString(), + ImmutableMap.of("termUrn", glossaryTermUrn.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); @@ -236,8 +253,7 @@ public void testInvokeEntitySetDomain() throws Exception { event.setChangeType(ChangeType.UPSERT); final Domains newDomains = new Domains(); final Urn domainUrn = Urn.createFromString("urn:li:domain:test"); - newDomains.setDomains(new UrnArray( - ImmutableList.of(domainUrn))); + newDomains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); event.setAspect(GenericRecordUtils.serializeAspect(newDomains)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -248,8 +264,14 @@ public void testInvokeEntitySetDomain() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DOMAIN, - ChangeOperation.ADD, domainUrn.toString(), ImmutableMap.of("domainUrn", domainUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DOMAIN, + ChangeOperation.ADD, + domainUrn.toString(), + ImmutableMap.of("domainUrn", domainUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -262,8 +284,7 @@ public void testInvokeEntityUnsetDomain() throws Exception { event.setChangeType(ChangeType.UPSERT); final Domains previousDomains = new Domains(); final Urn domainUrn = Urn.createFromString("urn:li:domain:test"); - previousDomains.setDomains(new UrnArray( - ImmutableList.of(domainUrn))); + previousDomains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(previousDomains)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -274,8 +295,14 @@ public void testInvokeEntityUnsetDomain() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DOMAIN, - ChangeOperation.REMOVE, domainUrn.toString(), ImmutableMap.of("domainUrn", domainUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DOMAIN, + ChangeOperation.REMOVE, + domainUrn.toString(), + ImmutableMap.of("domainUrn", domainUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -289,12 +316,11 @@ public void testInvokeEntityOwnerChange() throws Exception { final Ownership newOwners = new Ownership(); final Urn ownerUrn1 = Urn.createFromString("urn:li:corpuser:test1"); final Urn ownerUrn2 = Urn.createFromString("urn:li:corpuser:test2"); - newOwners.setOwners(new OwnerArray( - ImmutableList.of( - new Owner().setOwner(ownerUrn1).setType(OwnershipType.TECHNICAL_OWNER), - new Owner().setOwner(ownerUrn2).setType(OwnershipType.BUSINESS_OWNER) - ) - )); + newOwners.setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(ownerUrn1).setType(OwnershipType.TECHNICAL_OWNER), + new Owner().setOwner(ownerUrn2).setType(OwnershipType.BUSINESS_OWNER)))); final Ownership prevOwners = new Ownership(); prevOwners.setOwners(new OwnerArray()); event.setAspect(GenericRecordUtils.serializeAspect(newOwners)); @@ -308,16 +334,32 @@ public void testInvokeEntityOwnerChange() throws Exception { // Create Platform Event PlatformEvent platformEvent1 = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.OWNER, - ChangeOperation.ADD, ownerUrn1.toString(), - ImmutableMap.of("ownerUrn", ownerUrn1.toString(), "ownerType", OwnershipType.TECHNICAL_OWNER.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.OWNER, + ChangeOperation.ADD, + ownerUrn1.toString(), + ImmutableMap.of( + "ownerUrn", + ownerUrn1.toString(), + "ownerType", + OwnershipType.TECHNICAL_OWNER.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent1, false); PlatformEvent platformEvent2 = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.OWNER, - ChangeOperation.ADD, ownerUrn2.toString(), - ImmutableMap.of("ownerUrn", ownerUrn2.toString(), "ownerType", OwnershipType.BUSINESS_OWNER.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.OWNER, + ChangeOperation.ADD, + ownerUrn2.toString(), + ImmutableMap.of( + "ownerUrn", + ownerUrn2.toString(), + "ownerType", + OwnershipType.BUSINESS_OWNER.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent2, true); } @@ -344,8 +386,14 @@ public void testInvokeEntityTermDeprecation() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DEPRECATION, - ChangeOperation.MODIFY, null, ImmutableMap.of("status", "DEPRECATED"), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DEPRECATION, + ChangeOperation.MODIFY, + null, + ImmutableMap.of("status", "DEPRECATED"), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -372,8 +420,14 @@ public void testInvokeEntityCreate() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.CREATE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.CREATE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -400,8 +454,14 @@ public void testInvokeEntityHardDelete() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.HARD_DELETE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.HARD_DELETE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -426,8 +486,14 @@ public void testInvokeEntitySoftDelete() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.SOFT_DELETE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.SOFT_DELETE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -464,8 +530,14 @@ public void testInvokeAssertionRunEventCreate() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(ASSERTION_ENTITY_NAME, assertionUrn, ChangeCategory.RUN, ChangeOperation.COMPLETED, null, - paramsMap, actorUrn); + createChangeEvent( + ASSERTION_ENTITY_NAME, + assertionUrn, + ChangeCategory.RUN, + ChangeOperation.COMPLETED, + null, + paramsMap, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -487,25 +559,37 @@ public void testInvokeDataProcessInstanceRunEventStart() throws Exception { event.setCreated(new AuditStamp().setActor(actorUrn).setTime(EVENT_TIME)); DataProcessInstanceRelationships relationships = - new DataProcessInstanceRelationships().setParentInstance( - Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) + new DataProcessInstanceRelationships() + .setParentInstance(Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) .setParentTemplate(Urn.createFromString(TEST_DATA_JOB_URN)); final EntityResponse entityResponse = - buildEntityResponse(ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); + buildEntityResponse( + ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); Mockito.when(_mockClient.getV2(eq(dataProcessInstanceUrn), any())).thenReturn(entityResponse); _entityChangeEventHook.invoke(event); Map<String, Object> parameters = - ImmutableMap.of(ATTEMPT_KEY, 1, PARENT_INSTANCE_URN_KEY, TEST_DATA_PROCESS_INSTANCE_PARENT_URN, - DATA_JOB_URN_KEY, TEST_DATA_JOB_URN); + ImmutableMap.of( + ATTEMPT_KEY, + 1, + PARENT_INSTANCE_URN_KEY, + TEST_DATA_PROCESS_INSTANCE_PARENT_URN, + DATA_JOB_URN_KEY, + TEST_DATA_JOB_URN); // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATA_PROCESS_INSTANCE_ENTITY_NAME, dataProcessInstanceUrn, ChangeCategory.RUN, - ChangeOperation.STARTED, null, parameters, actorUrn); + createChangeEvent( + DATA_PROCESS_INSTANCE_ENTITY_NAME, + dataProcessInstanceUrn, + ChangeCategory.RUN, + ChangeOperation.STARTED, + null, + parameters, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent, false); } @@ -521,7 +605,8 @@ public void testInvokeDataProcessInstanceRunEventComplete() throws Exception { event.setChangeType(ChangeType.UPSERT); DataProcessInstanceRunEvent dataProcessInstanceRunEvent = - new DataProcessInstanceRunEvent().setStatus(DataProcessRunStatus.COMPLETE) + new DataProcessInstanceRunEvent() + .setStatus(DataProcessRunStatus.COMPLETE) .setAttempt(1) .setResult(new DataProcessInstanceRunResult().setType(RunResultType.SUCCESS)); @@ -529,24 +614,38 @@ public void testInvokeDataProcessInstanceRunEventComplete() throws Exception { event.setCreated(new AuditStamp().setActor(actorUrn).setTime(EVENT_TIME)); DataProcessInstanceRelationships relationships = - new DataProcessInstanceRelationships().setParentInstance( - Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) + new DataProcessInstanceRelationships() + .setParentInstance(Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) .setParentTemplate(Urn.createFromString(TEST_DATA_FLOW_URN)); final EntityResponse entityResponse = - buildEntityResponse(ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); + buildEntityResponse( + ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); Mockito.when(_mockClient.getV2(eq(dataProcessInstanceUrn), any())).thenReturn(entityResponse); _entityChangeEventHook.invoke(event); Map<String, Object> parameters = - ImmutableMap.of(ATTEMPT_KEY, 1, RUN_RESULT_KEY, RunResultType.SUCCESS.toString(), PARENT_INSTANCE_URN_KEY, - TEST_DATA_PROCESS_INSTANCE_PARENT_URN, DATA_FLOW_URN_KEY, TEST_DATA_FLOW_URN); + ImmutableMap.of( + ATTEMPT_KEY, + 1, + RUN_RESULT_KEY, + RunResultType.SUCCESS.toString(), + PARENT_INSTANCE_URN_KEY, + TEST_DATA_PROCESS_INSTANCE_PARENT_URN, + DATA_FLOW_URN_KEY, + TEST_DATA_FLOW_URN); // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATA_PROCESS_INSTANCE_ENTITY_NAME, dataProcessInstanceUrn, ChangeCategory.RUN, - ChangeOperation.COMPLETED, null, parameters, actorUrn); + createChangeEvent( + DATA_PROCESS_INSTANCE_ENTITY_NAME, + dataProcessInstanceUrn, + ChangeCategory.RUN, + ChangeOperation.COMPLETED, + null, + parameters, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent, false); } @@ -572,8 +671,14 @@ public void testInvokeIneligibleAspect() throws Exception { Mockito.verifyNoMoreInteractions(_mockClient); } - private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, ChangeCategory category, - ChangeOperation operation, String modifier, Map<String, Object> parameters, Urn actor) { + private PlatformEvent createChangeEvent( + String entityType, + Urn entityUrn, + ChangeCategory category, + ChangeOperation operation, + String modifier, + Map<String, Object> parameters, + Urn actor) { final EntityChangeEvent changeEvent = new EntityChangeEvent(); changeEvent.setEntityType(entityType); changeEvent.setEntityUrn(entityUrn); @@ -582,7 +687,8 @@ private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, Change if (modifier != null) { changeEvent.setModifier(modifier); } - changeEvent.setAuditStamp(new AuditStamp().setActor(actor).setTime(EntityChangeEventGeneratorHookTest.EVENT_TIME)); + changeEvent.setAuditStamp( + new AuditStamp().setActor(actor).setTime(EntityChangeEventGeneratorHookTest.EVENT_TIME)); changeEvent.setVersion(0); if (parameters != null) { changeEvent.setParameters(new Parameters(new DataMap(parameters))); @@ -590,7 +696,8 @@ private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, Change final PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(CHANGE_EVENT_PLATFORM_EVENT_NAME); platformEvent.setHeader( - new PlatformEventHeader().setTimestampMillis(EntityChangeEventGeneratorHookTest.EVENT_TIME)); + new PlatformEventHeader() + .setTimestampMillis(EntityChangeEventGeneratorHookTest.EVENT_TIME)); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); return platformEvent; } @@ -611,7 +718,8 @@ private EntityChangeEventGeneratorRegistry createEntityChangeEventGeneratorRegis // Run change event generators registry.register(ASSERTION_RUN_EVENT_ASPECT_NAME, new AssertionRunEventChangeEventGenerator()); - registry.register(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + registry.register( + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, new DataProcessInstanceRunEventChangeEventGenerator(_mockClient)); return registry; } @@ -637,7 +745,8 @@ private EntityRegistry createMockEntityRegistry() { Mockito.when(datasetSpec.getAspectSpec(eq(DOMAINS_ASPECT_NAME))).thenReturn(mockDomains); AspectSpec mockDeprecation = createMockAspectSpec(Deprecation.class); - Mockito.when(datasetSpec.getAspectSpec(eq(DEPRECATION_ASPECT_NAME))).thenReturn(mockDeprecation); + Mockito.when(datasetSpec.getAspectSpec(eq(DEPRECATION_ASPECT_NAME))) + .thenReturn(mockDeprecation); AspectSpec mockDatasetKey = createMockAspectSpec(DatasetKey.class); Mockito.when(datasetSpec.getAspectSpec(eq(DATASET_KEY_ASPECT_NAME))).thenReturn(mockDatasetKey); @@ -647,29 +756,39 @@ private EntityRegistry createMockEntityRegistry() { // Build Assertion Entity Spec EntitySpec assertionSpec = Mockito.mock(EntitySpec.class); AspectSpec mockAssertionRunEvent = createMockAspectSpec(AssertionRunEvent.class); - Mockito.when(assertionSpec.getAspectSpec(eq(ASSERTION_RUN_EVENT_ASPECT_NAME))).thenReturn(mockAssertionRunEvent); + Mockito.when(assertionSpec.getAspectSpec(eq(ASSERTION_RUN_EVENT_ASPECT_NAME))) + .thenReturn(mockAssertionRunEvent); Mockito.when(registry.getEntitySpec(eq(ASSERTION_ENTITY_NAME))).thenReturn(assertionSpec); // Build Data Process Instance Entity Spec EntitySpec dataProcessInstanceSpec = Mockito.mock(EntitySpec.class); - AspectSpec mockDataProcessInstanceRunEvent = createMockAspectSpec(DataProcessInstanceRunEvent.class); - Mockito.when(dataProcessInstanceSpec.getAspectSpec(eq(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME))) + AspectSpec mockDataProcessInstanceRunEvent = + createMockAspectSpec(DataProcessInstanceRunEvent.class); + Mockito.when( + dataProcessInstanceSpec.getAspectSpec(eq(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME))) .thenReturn(mockDataProcessInstanceRunEvent); - Mockito.when(registry.getEntitySpec(DATA_PROCESS_INSTANCE_ENTITY_NAME)).thenReturn(dataProcessInstanceSpec); + Mockito.when(registry.getEntitySpec(DATA_PROCESS_INSTANCE_ENTITY_NAME)) + .thenReturn(dataProcessInstanceSpec); return registry; } - private void verifyProducePlatformEvent(SystemRestliEntityClient mockClient, PlatformEvent platformEvent) throws Exception { + private void verifyProducePlatformEvent( + SystemRestliEntityClient mockClient, PlatformEvent platformEvent) throws Exception { verifyProducePlatformEvent(mockClient, platformEvent, true); } - private void verifyProducePlatformEvent(SystemRestliEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) throws Exception { + private void verifyProducePlatformEvent( + SystemRestliEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) + throws Exception { // Verify event has been emitted. - verify(mockClient, Mockito.times(1)).producePlatformEvent(eq(CHANGE_EVENT_PLATFORM_EVENT_NAME), Mockito.anyString(), - argThat(new PlatformEventMatcher(platformEvent))); + verify(mockClient, Mockito.times(1)) + .producePlatformEvent( + eq(CHANGE_EVENT_PLATFORM_EVENT_NAME), + Mockito.anyString(), + argThat(new PlatformEventMatcher(platformEvent))); if (noMoreInteractions) { Mockito.verifyNoMoreInteractions(_mockClient); @@ -686,9 +805,10 @@ private EntityResponse buildEntityResponse(Map<String, RecordTemplate> aspects) final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); for (Map.Entry<String, RecordTemplate> entry : aspects.entrySet()) { - aspectMap.put(entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); + aspectMap.put( + entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); } entityResponse.setAspects(aspectMap); return entityResponse; } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java index aafc87b2db5a2..8a3fb237e816f 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import org.mockito.ArgumentMatcher; - public class PlatformEventMatcher implements ArgumentMatcher<PlatformEvent> { private final PlatformEvent _expected; @@ -16,26 +15,34 @@ public PlatformEventMatcher(@Nonnull final PlatformEvent expected) { @Override public boolean matches(@Nonnull final PlatformEvent actual) { - return _expected.getName().equals(actual.getName()) && _expected.getHeader() - .getTimestampMillis() - .equals(actual.getHeader().getTimestampMillis()) && payloadMatches(actual); + return _expected.getName().equals(actual.getName()) + && _expected + .getHeader() + .getTimestampMillis() + .equals(actual.getHeader().getTimestampMillis()) + && payloadMatches(actual); } public boolean payloadMatches(@Nonnull final PlatformEvent actual) { final EntityChangeEvent expectedChangeEvent = - GenericRecordUtils.deserializePayload(_expected.getPayload().getValue(), EntityChangeEvent.class); + GenericRecordUtils.deserializePayload( + _expected.getPayload().getValue(), EntityChangeEvent.class); final EntityChangeEvent actualChangeEvent = - GenericRecordUtils.deserializePayload(actual.getPayload().getValue(), EntityChangeEvent.class); - boolean requiredFieldsMatch = expectedChangeEvent.getEntityType().equals(actualChangeEvent.getEntityType()) - && expectedChangeEvent.getEntityUrn().equals(actualChangeEvent.getEntityUrn()) - && expectedChangeEvent.getCategory().equals(actualChangeEvent.getCategory()) - && expectedChangeEvent.getOperation().equals(actualChangeEvent.getOperation()) - && expectedChangeEvent.getAuditStamp().equals(actualChangeEvent.getAuditStamp()) - && expectedChangeEvent.getVersion().equals(actualChangeEvent.getVersion()); + GenericRecordUtils.deserializePayload( + actual.getPayload().getValue(), EntityChangeEvent.class); + boolean requiredFieldsMatch = + expectedChangeEvent.getEntityType().equals(actualChangeEvent.getEntityType()) + && expectedChangeEvent.getEntityUrn().equals(actualChangeEvent.getEntityUrn()) + && expectedChangeEvent.getCategory().equals(actualChangeEvent.getCategory()) + && expectedChangeEvent.getOperation().equals(actualChangeEvent.getOperation()) + && expectedChangeEvent.getAuditStamp().equals(actualChangeEvent.getAuditStamp()) + && expectedChangeEvent.getVersion().equals(actualChangeEvent.getVersion()); boolean modifierMatches = - !expectedChangeEvent.hasModifier() || expectedChangeEvent.getModifier().equals(actualChangeEvent.getModifier()); - boolean parametersMatch = !expectedChangeEvent.hasParameters() || expectedChangeEvent.getParameters() - .equals(actualChangeEvent.getParameters()); + !expectedChangeEvent.hasModifier() + || expectedChangeEvent.getModifier().equals(actualChangeEvent.getModifier()); + boolean parametersMatch = + !expectedChangeEvent.hasParameters() + || expectedChangeEvent.getParameters().equals(actualChangeEvent.getParameters()); return requiredFieldsMatch && modifierMatches && parametersMatch; } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java index a4aa00e228725..843502b2eefad 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook.ingestion; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; + import com.datahub.metadata.ingestion.IngestionScheduler; import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; @@ -10,13 +13,8 @@ import com.linkedin.mxe.MetadataChangeLog; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; - import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; - - public class IngestionSchedulerHookTest { private IngestionSchedulerHook _ingestionSchedulerHook; @@ -33,18 +31,22 @@ public void testInvoke() throws Exception { event.setAspectName(INGESTION_INFO_ASPECT_NAME); event.setChangeType(ChangeType.UPSERT); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); event.setAspect(GenericRecordUtils.serializeAspect(newInfo)); event.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).scheduleNextIngestionSourceExecution(Mockito.any(), Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .scheduleNextIngestionSourceExecution(Mockito.any(), Mockito.any()); } @Test @@ -55,7 +57,8 @@ public void testInvokeDeleteKeyAspect() throws Exception { event2.setChangeType(ChangeType.DELETE); event2.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event2); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).unscheduleNextIngestionSourceExecution(Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .unscheduleNextIngestionSourceExecution(Mockito.any()); } @Test @@ -66,7 +69,8 @@ public void testInvokeDeleteInfoAspect() throws Exception { event2.setChangeType(ChangeType.DELETE); event2.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event2); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).unscheduleNextIngestionSourceExecution(Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .unscheduleNextIngestionSourceExecution(Mockito.any()); } @Test @@ -78,5 +82,3 @@ public void testInvokeWrongAspect() { Mockito.verifyNoInteractions(_ingestionSchedulerHook.scheduler()); } } - - diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index 93e98b7343cd4..d4c6d122a6689 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook.siblings; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.FabricType; @@ -31,16 +34,11 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; +import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.net.URISyntaxException; - -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; - - public class SiblingAssociationHookTest { private SiblingAssociationHook _siblingAssociationHook; SystemRestliEntityClient _mockEntityClient; @@ -48,11 +46,15 @@ public class SiblingAssociationHookTest { @BeforeMethod public void setupTest() { - EntityRegistry registry = new ConfigEntityRegistry( - SiblingAssociationHookTest.class.getClassLoader().getResourceAsStream("test-entity-registry-siblings.yml")); + EntityRegistry registry = + new ConfigEntityRegistry( + SiblingAssociationHookTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry-siblings.yml")); _mockEntityClient = Mockito.mock(SystemRestliEntityClient.class); _mockSearchService = Mockito.mock(EntitySearchService.class); - _siblingAssociationHook = new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); + _siblingAssociationHook = + new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); _siblingAssociationHook.setEnabled(true); } @@ -61,23 +63,28 @@ public void testInvokeWhenThereIsAPairWithDbtSourceNode() throws Exception { SubTypes mockSourceSubtypesAspect = new SubTypes(); mockSourceSubtypesAspect.setTypeNames(new StringArray(ImmutableList.of("source"))); EnvelopedAspectMap mockResponseMap = new EnvelopedAspectMap(); - mockResponseMap.put(SUB_TYPES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); + mockResponseMap.put( + SUB_TYPES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); EntityResponse mockResponse = new EntityResponse(); mockResponse.setAspects(mockResponseMap); Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - Mockito.when( - _mockEntityClient.getV2( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), - ImmutableSet.of(SUB_TYPES_ASPECT_NAME) - )).thenReturn(mockResponse); - - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); - - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + _mockEntityClient.getV2( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), + ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) + .thenReturn(mockResponse); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); @@ -85,34 +92,52 @@ public void testInvokeWhenThereIsAPairWithDbtSourceNode() throws Exception { upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } @Test @@ -123,20 +148,27 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); EnvelopedAspectMap mockResponseMap = new EnvelopedAspectMap(); - mockResponseMap.put(SUB_TYPES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); + mockResponseMap.put( + SUB_TYPES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); EntityResponse mockResponse = new EntityResponse(); mockResponse.setAspects(mockResponseMap); Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - Mockito.when( - _mockEntityClient.getV2( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), - ImmutableSet.of(SUB_TYPES_ASPECT_NAME))).thenReturn(mockResponse); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + _mockEntityClient.getV2( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), + ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) + .thenReturn(mockResponse); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); @@ -145,65 +177,96 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(0)) + .ingestProposal(Mockito.eq(proposal), eq(true)); } @Test public void testInvokeWhenThereIsAPairWithBigqueryDownstreamNode() throws Exception { Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(upstream); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } @Test @@ -214,126 +277,176 @@ public void testInvokeWhenThereIsAKeyBeingReingested() throws Exception { SearchEntityArray returnEntityArray = new SearchEntityArray(); SearchEntity returnArrayValue = new SearchEntity(); returnArrayValue.setEntity( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)") - ); + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); returnEntityArray.add(returnArrayValue); returnSearchResult.setEntities(returnEntityArray); Mockito.when( - _mockSearchService.search( - any(), anyString(), any(), any(), anyInt(), anyInt(), eq(new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)) - )).thenReturn(returnSearchResult); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, DATASET_KEY_ASPECT_NAME, ChangeType.UPSERT); + _mockSearchService.search( + any(), + anyString(), + any(), + any(), + anyInt(), + anyInt(), + eq( + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true)))) + .thenReturn(returnSearchResult); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, DATASET_KEY_ASPECT_NAME, ChangeType.UPSERT); final DatasetKey datasetKey = new DatasetKey(); datasetKey.setName("my-proj.jaffle_shop.customers"); datasetKey.setOrigin(FabricType.PROD); datasetKey.setPlatform(DataPlatformUrn.createFromString("urn:li:dataPlatform:bigquery")); event.setAspect(GenericRecordUtils.serializeAspect(datasetKey)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } + @Test public void testInvokeWhenSourceUrnHasTwoDbtUpstreams() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream dbtUpstream1 = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", DatasetLineageType.TRANSFORMED); - Upstream dbtUpstream2 = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity2,PROD)", DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream1 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", + DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream2 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity2,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(dbtUpstream1); upstreamArray.add(dbtUpstream2); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); - - } @Test public void testInvokeWhenSourceUrnHasTwoUpstreamsOneDbt() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream dbtUpstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", + DatasetLineageType.TRANSFORMED); Upstream snowflakeUpstream = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(dbtUpstream); upstreamArray.add(snowflakeUpstream); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(2)).ingestProposal(Mockito.any(), eq(true)); } @Test public void testInvokeWhenSourceUrnHasTwoUpstreamsNoDbt() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream snowflakeUpstream1 = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream snowflakeUpstream1 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", + DatasetLineageType.TRANSFORMED); Upstream snowflakeUpstream2 = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", DatasetLineageType.TRANSFORMED); + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(snowflakeUpstream1); upstreamArray.add(snowflakeUpstream2); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); } - private MetadataChangeLog createEvent(String entityType, String aspectName, ChangeType changeType) { + private MetadataChangeLog createEvent( + String entityType, String aspectName, ChangeType changeType) { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(entityType); event.setAspectName(aspectName); event.setChangeType(changeType); return event; } + private Upstream createUpstream(String urn, DatasetLineageType upstreamType) { final Upstream upstream = new Upstream(); @@ -346,6 +459,4 @@ private Upstream createUpstream(String urn, DatasetLineageType upstreamType) { return upstream; } - - - } +} diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java index b46308873ca16..6d1bdca9c116f 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook.spring; +import static org.testng.AssertJUnit.*; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.kafka.MetadataChangeLogProcessor; import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; @@ -13,29 +15,34 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - -@SpringBootTest(classes = { - MCLSpringTestConfiguration.class, ConfigurationProvider.class - }, +@SpringBootTest( + classes = {MCLSpringTestConfiguration.class, ConfigurationProvider.class}, properties = { "ingestionScheduler.enabled=false", "configEntityRegistry.path=../../metadata-jobs/mae-consumer/src/test/resources/test-entity-registry.yml", "kafka.schemaRegistry.type=INTERNAL" - }) -@TestPropertySource(locations = "classpath:/application.yml", properties = { - "MCL_CONSUMER_ENABLED=true" -}) + }) +@TestPropertySource( + locations = "classpath:/application.yml", + properties = {"MCL_CONSUMER_ENABLED=true"}) @EnableAutoConfiguration(exclude = {CassandraAutoConfiguration.class}) public class MCLSpringTest extends AbstractTestNGSpringContextTests { @Test public void testHooks() { - MetadataChangeLogProcessor metadataChangeLogProcessor = applicationContext.getBean(MetadataChangeLogProcessor.class); - assertTrue(metadataChangeLogProcessor.getHooks().stream().noneMatch(hook -> hook instanceof IngestionSchedulerHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof UpdateIndicesHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof SiblingAssociationHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); + MetadataChangeLogProcessor metadataChangeLogProcessor = + applicationContext.getBean(MetadataChangeLogProcessor.class); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .noneMatch(hook -> hook instanceof IngestionSchedulerHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof UpdateIndicesHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof SiblingAssociationHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java index 1d9c17c676990..9d646819932e9 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java @@ -19,40 +19,32 @@ import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; - @Configuration -@ComponentScan(basePackages = { - "com.linkedin.metadata.kafka", - "com.linkedin.gms.factory.entity.update.indices" -}) +@ComponentScan( + basePackages = { + "com.linkedin.metadata.kafka", + "com.linkedin.gms.factory.entity.update.indices" + }) public class MCLSpringTestConfiguration { - @MockBean - public EntityRegistry entityRegistry; + @MockBean public EntityRegistry entityRegistry; - @MockBean - public ElasticSearchGraphService graphService; + @MockBean public ElasticSearchGraphService graphService; - @MockBean - public TimeseriesAspectService timeseriesAspectService; + @MockBean public TimeseriesAspectService timeseriesAspectService; - @MockBean - public SystemMetadataService systemMetadataService; + @MockBean public SystemMetadataService systemMetadataService; - @MockBean - public SearchDocumentTransformer searchDocumentTransformer; + @MockBean public SearchDocumentTransformer searchDocumentTransformer; - @MockBean - public IngestionScheduler ingestionScheduler; + @MockBean public IngestionScheduler ingestionScheduler; @MockBean(name = "systemRestliEntityClient") public SystemRestliEntityClient entityClient; - @MockBean - public ElasticSearchService searchService; + @MockBean public ElasticSearchService searchService; - @MockBean - public Authentication systemAuthentication; + @MockBean public Authentication systemAuthentication; @MockBean(name = "dataHubUpgradeKafkaListener") public DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener; @@ -63,9 +55,7 @@ public class MCLSpringTestConfiguration { @MockBean(name = "duheKafkaConsumerFactory") public DefaultKafkaConsumerFactory<String, GenericRecord> defaultKafkaConsumerFactory; - @MockBean - public SchemaRegistryService schemaRegistryService; + @MockBean public SchemaRegistryService schemaRegistryService; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java index f0c59240a9ba4..0d8192a823865 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java @@ -12,34 +12,35 @@ import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.PropertySource; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") -@SpringBootApplication(exclude = { - ElasticsearchRestClientAutoConfiguration.class, - CassandraAutoConfiguration.class, - SolrHealthContributorAutoConfiguration.class -}) -@ComponentScan(basePackages = { - "com.linkedin.metadata.boot.kafka", - "com.linkedin.gms.factory.auth", - "com.linkedin.gms.factory.common", - "com.linkedin.gms.factory.config", - "com.linkedin.gms.factory.entity", - "com.linkedin.gms.factory.entityregistry", - "com.linkedin.gms.factory.kafka", - "com.linkedin.gms.factory.search", - "com.linkedin.gms.factory.secret", - "com.linkedin.gms.factory.timeseries", - "com.linkedin.restli.server", - "com.linkedin.metadata.restli", - "com.linkedin.metadata.kafka", - "com.linkedin.metadata.dao.producer" -}, excludeFilters = { - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = { - ScheduledAnalyticsFactory.class, - RestliEntityClientFactory.class - }) -}) +@SpringBootApplication( + exclude = { + ElasticsearchRestClientAutoConfiguration.class, + CassandraAutoConfiguration.class, + SolrHealthContributorAutoConfiguration.class + }) +@ComponentScan( + basePackages = { + "com.linkedin.metadata.boot.kafka", + "com.linkedin.gms.factory.auth", + "com.linkedin.gms.factory.common", + "com.linkedin.gms.factory.config", + "com.linkedin.gms.factory.entity", + "com.linkedin.gms.factory.entityregistry", + "com.linkedin.gms.factory.kafka", + "com.linkedin.gms.factory.search", + "com.linkedin.gms.factory.secret", + "com.linkedin.gms.factory.timeseries", + "com.linkedin.restli.server", + "com.linkedin.metadata.restli", + "com.linkedin.metadata.kafka", + "com.linkedin.metadata.dao.producer" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = {ScheduledAnalyticsFactory.class, RestliEntityClientFactory.class}) + }) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class MceConsumerApplication { diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java index abd73d03a7b55..990e0df102d37 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java @@ -1,74 +1,71 @@ package com.linkedin.metadata.restli; +import static com.linkedin.gms.factory.common.LocalEbeanServerConfigFactory.getListenerToTrackCounts; + import io.ebean.datasource.DataSourceConfig; +import java.util.HashMap; +import java.util.Map; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; -import java.util.HashMap; -import java.util.Map; - -import static com.linkedin.gms.factory.common.LocalEbeanServerConfigFactory.getListenerToTrackCounts; - @Configuration public class EbeanServerConfig { - @Value("${ebean.username}") - private String ebeanDatasourceUsername; - - @Value("${ebean.password}") - private String ebeanDatasourcePassword; + @Value("${ebean.username}") + private String ebeanDatasourceUsername; - @Value("${ebean.driver}") - private String ebeanDatasourceDriver; + @Value("${ebean.password}") + private String ebeanDatasourcePassword; - @Value("${ebean.minConnections:1}") - private Integer ebeanMinConnections; + @Value("${ebean.driver}") + private String ebeanDatasourceDriver; - @Value("${ebean.maxInactiveTimeSeconds:120}") - private Integer ebeanMaxInactiveTimeSecs; + @Value("${ebean.minConnections:1}") + private Integer ebeanMinConnections; - @Value("${ebean.maxAgeMinutes:120}") - private Integer ebeanMaxAgeMinutes; + @Value("${ebean.maxInactiveTimeSeconds:120}") + private Integer ebeanMaxInactiveTimeSecs; - @Value("${ebean.leakTimeMinutes:15}") - private Integer ebeanLeakTimeMinutes; + @Value("${ebean.maxAgeMinutes:120}") + private Integer ebeanMaxAgeMinutes; - @Value("${ebean.waitTimeoutMillis:1000}") - private Integer ebeanWaitTimeoutMillis; + @Value("${ebean.leakTimeMinutes:15}") + private Integer ebeanLeakTimeMinutes; - @Value("${ebean.autoCreateDdl:false}") - private Boolean ebeanAutoCreate; + @Value("${ebean.waitTimeoutMillis:1000}") + private Integer ebeanWaitTimeoutMillis; - @Value("${ebean.postgresUseIamAuth:false}") - private Boolean postgresUseIamAuth; + @Value("${ebean.autoCreateDdl:false}") + private Boolean ebeanAutoCreate; + @Value("${ebean.postgresUseIamAuth:false}") + private Boolean postgresUseIamAuth; - @Bean("ebeanDataSourceConfig") - @Primary - public DataSourceConfig buildDataSourceConfig( - @Value("${ebean.url}") String dataSourceUrl, - @Qualifier("parseqEngineThreads") int ebeanMaxConnections - ) { - DataSourceConfig dataSourceConfig = new DataSourceConfig(); - dataSourceConfig.setUsername(ebeanDatasourceUsername); - dataSourceConfig.setPassword(ebeanDatasourcePassword); - dataSourceConfig.setUrl(dataSourceUrl); - dataSourceConfig.setDriver(ebeanDatasourceDriver); - dataSourceConfig.setMinConnections(ebeanMinConnections); - dataSourceConfig.setMaxConnections(ebeanMaxConnections); - dataSourceConfig.setMaxInactiveTimeSecs(ebeanMaxInactiveTimeSecs); - dataSourceConfig.setMaxAgeMinutes(ebeanMaxAgeMinutes); - dataSourceConfig.setLeakTimeMinutes(ebeanLeakTimeMinutes); - dataSourceConfig.setWaitTimeoutMillis(ebeanWaitTimeoutMillis); - dataSourceConfig.setListener(getListenerToTrackCounts("mce-consumer")); - // Adding IAM auth access for AWS Postgres - if (postgresUseIamAuth) { - Map<String, String> custom = new HashMap<>(); - custom.put("wrapperPlugins", "iam"); - dataSourceConfig.setCustomProperties(custom); - } - return dataSourceConfig; + @Bean("ebeanDataSourceConfig") + @Primary + public DataSourceConfig buildDataSourceConfig( + @Value("${ebean.url}") String dataSourceUrl, + @Qualifier("parseqEngineThreads") int ebeanMaxConnections) { + DataSourceConfig dataSourceConfig = new DataSourceConfig(); + dataSourceConfig.setUsername(ebeanDatasourceUsername); + dataSourceConfig.setPassword(ebeanDatasourcePassword); + dataSourceConfig.setUrl(dataSourceUrl); + dataSourceConfig.setDriver(ebeanDatasourceDriver); + dataSourceConfig.setMinConnections(ebeanMinConnections); + dataSourceConfig.setMaxConnections(ebeanMaxConnections); + dataSourceConfig.setMaxInactiveTimeSecs(ebeanMaxInactiveTimeSecs); + dataSourceConfig.setMaxAgeMinutes(ebeanMaxAgeMinutes); + dataSourceConfig.setLeakTimeMinutes(ebeanLeakTimeMinutes); + dataSourceConfig.setWaitTimeoutMillis(ebeanWaitTimeoutMillis); + dataSourceConfig.setListener(getListenerToTrackCounts("mce-consumer")); + // Adding IAM auth access for AWS Postgres + if (postgresUseIamAuth) { + Map<String, String> custom = new HashMap<>(); + custom.put("wrapperPlugins", "iam"); + dataSourceConfig.setCustomProperties(custom); } + return dataSourceConfig; + } } diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java index 563cc5ce04c66..4d7e10d694c4e 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java @@ -6,6 +6,7 @@ import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; import com.linkedin.restli.server.RestliHandlerServlet; +import java.net.URI; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.web.servlet.FilterRegistrationBean; @@ -15,54 +16,53 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; -import java.net.URI; - @Configuration @Import({SystemAuthenticationFactory.class}) public class RestliServletConfig { - @Value("${server.port}") - private int configuredPort; + @Value("${server.port}") + private int configuredPort; - @Value("${entityClient.retryInterval:2}") - private int retryInterval; + @Value("${entityClient.retryInterval:2}") + private int retryInterval; - @Value("${entityClient.numRetries:3}") - private int numRetries; + @Value("${entityClient.numRetries:3}") + private int numRetries; - @Bean("restliEntityClient") - @Primary - public RestliEntityClient restliEntityClient() { - String selfUri = String.format("http://localhost:%s/gms/", configuredPort); - final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); - } + @Bean("restliEntityClient") + @Primary + public RestliEntityClient restliEntityClient() { + String selfUri = String.format("http://localhost:%s/gms/", configuredPort); + final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); + return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); + } - @Bean("restliServletRegistration") - public ServletRegistrationBean<RestliHandlerServlet> restliServletRegistration( - @Qualifier("restliHandlerServlet") RestliHandlerServlet servlet) { - return new ServletRegistrationBean<>(servlet, "/gms/*"); - } + @Bean("restliServletRegistration") + public ServletRegistrationBean<RestliHandlerServlet> restliServletRegistration( + @Qualifier("restliHandlerServlet") RestliHandlerServlet servlet) { + return new ServletRegistrationBean<>(servlet, "/gms/*"); + } - @Bean - public RestliHandlerServlet restliHandlerServlet() { - return new RestliHandlerServlet(); - } + @Bean + public RestliHandlerServlet restliHandlerServlet() { + return new RestliHandlerServlet(); + } - @Bean - public FilterRegistrationBean<AuthenticationFilter> authenticationFilterRegistrationBean( - @Qualifier("restliServletRegistration") ServletRegistrationBean<RestliHandlerServlet> servlet - ) { - FilterRegistrationBean<AuthenticationFilter> registrationBean = new FilterRegistrationBean<>(); - registrationBean.addServletRegistrationBeans(servlet); - registrationBean.setOrder(1); - return registrationBean; - } + @Bean + public FilterRegistrationBean<AuthenticationFilter> authenticationFilterRegistrationBean( + @Qualifier("restliServletRegistration") + ServletRegistrationBean<RestliHandlerServlet> servlet) { + FilterRegistrationBean<AuthenticationFilter> registrationBean = new FilterRegistrationBean<>(); + registrationBean.addServletRegistrationBeans(servlet); + registrationBean.setOrder(1); + return registrationBean; + } - @Bean - public AuthenticationFilter authenticationFilter(FilterRegistrationBean<AuthenticationFilter> filterReg) { - AuthenticationFilter filter = new AuthenticationFilter(); - filterReg.setFilter(filter); - return filter; - } + @Bean + public AuthenticationFilter authenticationFilter( + FilterRegistrationBean<AuthenticationFilter> filterReg) { + AuthenticationFilter filter = new AuthenticationFilter(); + filterReg.setFilter(filter); + return filter; + } } diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java index c23cf1ea3d165..714c7b899ff49 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; +import static org.testng.AssertJUnit.assertTrue; + import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import org.springframework.beans.factory.annotation.Autowired; @@ -9,30 +13,25 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.when; -import static org.testng.AssertJUnit.assertTrue; - @ActiveProfiles("test") -@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, - classes = {MceConsumerApplication.class, MceConsumerApplicationTestConfiguration.class}) +@SpringBootTest( + webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, + classes = {MceConsumerApplication.class, MceConsumerApplicationTestConfiguration.class}) public class MceConsumerApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - private TestRestTemplate restTemplate; + @Autowired private TestRestTemplate restTemplate; - @Autowired - private EntityService _mockEntityService; + @Autowired private EntityService _mockEntityService; - @Test - public void testRestliServletConfig() { - RestoreIndicesResult mockResult = new RestoreIndicesResult(); - mockResult.setRowsMigrated(100); - when(_mockEntityService.restoreIndices(any(), any())).thenReturn(mockResult); + @Test + public void testRestliServletConfig() { + RestoreIndicesResult mockResult = new RestoreIndicesResult(); + mockResult.setRowsMigrated(100); + when(_mockEntityService.restoreIndices(any(), any())).thenReturn(mockResult); - String response = this.restTemplate - .postForObject("/gms/aspects?action=restoreIndices", "{\"urn\":\"\"}", String.class); - assertTrue(response.contains(mockResult.toString())); - } + String response = + this.restTemplate.postForObject( + "/gms/aspects?action=restoreIndices", "{\"urn\":\"\"}", String.class); + assertTrue(response.contains(mockResult.toString())); + } } diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java index bee1441b5aaf6..1a44265c7a92a 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java @@ -13,6 +13,7 @@ import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; import io.ebean.Database; +import java.net.URI; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; @@ -21,44 +22,33 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; -import java.net.URI; - @TestConfiguration @Import(value = {SystemAuthenticationFactory.class}) public class MceConsumerApplicationTestConfiguration { - @Autowired - private TestRestTemplate restTemplate; + @Autowired private TestRestTemplate restTemplate; - @MockBean - public KafkaHealthChecker kafkaHealthChecker; + @MockBean public KafkaHealthChecker kafkaHealthChecker; - @MockBean - public EntityService _entityService; + @MockBean public EntityService _entityService; - @Bean("restliEntityClient") - @Primary - public RestliEntityClient restliEntityClient() { - String selfUri = restTemplate.getRootUri(); - final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(1), 1); - } + @Bean("restliEntityClient") + @Primary + public RestliEntityClient restliEntityClient() { + String selfUri = restTemplate.getRootUri(); + final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); + return new RestliEntityClient(restClient, new ExponentialBackoff(1), 1); + } - @MockBean - public Database ebeanServer; + @MockBean public Database ebeanServer; - @MockBean - protected TimeseriesAspectService timeseriesAspectService; + @MockBean protected TimeseriesAspectService timeseriesAspectService; - @MockBean - protected EntityRegistry entityRegistry; + @MockBean protected EntityRegistry entityRegistry; - @MockBean - protected ConfigEntityRegistry configEntityRegistry; + @MockBean protected ConfigEntityRegistry configEntityRegistry; - @MockBean - protected SiblingGraphService siblingGraphService; + @MockBean protected SiblingGraphService siblingGraphService; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java index 76e13d5e4da23..b04ecc7761eb6 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -13,9 +15,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static com.linkedin.metadata.Constants.*; - - @Controller @Import(GitVersionFactory.class) public class McpConsumerConfig { @@ -24,10 +23,15 @@ public class McpConsumerConfig { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public McpConsumerConfig(GitVersion gitVersion) throws JsonProcessingException { diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java index c30dd6e6f96dc..217b826689c7c 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java @@ -6,8 +6,8 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; -import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; import com.linkedin.metadata.snapshot.Snapshot; @@ -18,7 +18,6 @@ import com.linkedin.r2.RemoteInvocationException; import java.io.IOException; import javax.annotation.Nonnull; - import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -35,27 +34,38 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(MetadataChangeProposalProcessorCondition.class) -@Import({RestliEntityClientFactory.class, KafkaEventConsumerFactory.class, DataHubKafkaProducerFactory.class}) +@Import({ + RestliEntityClientFactory.class, + KafkaEventConsumerFactory.class, + DataHubKafkaProducerFactory.class +}) @EnableKafka @RequiredArgsConstructor public class MetadataChangeEventsProcessor { - @NonNull - private final Authentication systemAuthentication; + @NonNull private final Authentication systemAuthentication; private final SystemRestliEntityClient entityClient; private final Producer<String, IndexedRecord> kafkaProducer; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - @Value("${FAILED_METADATA_CHANGE_EVENT_NAME:${KAFKA_FMCE_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_EVENT + "}}") + @Value( + "${FAILED_METADATA_CHANGE_EVENT_NAME:${KAFKA_FMCE_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_EVENT + + "}}") private String fmceTopicName; - @KafkaListener(id = "${METADATA_CHANGE_EVENT_KAFKA_CONSUMER_GROUP_ID:mce-consumer-job-client}", topics = - "${METADATA_CHANGE_EVENT_NAME:${KAFKA_MCE_TOPIC_NAME:" + Topics.METADATA_CHANGE_EVENT + "}}", containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_EVENT_KAFKA_CONSUMER_GROUP_ID:mce-consumer-job-client}", + topics = + "${METADATA_CHANGE_EVENT_NAME:${KAFKA_MCE_TOPIC_NAME:" + + Topics.METADATA_CHANGE_EVENT + + "}}", + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); @@ -77,21 +87,26 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) } private void sendFailedMCE(@Nonnull MetadataChangeEvent event, @Nonnull Throwable throwable) { - final FailedMetadataChangeEvent failedMetadataChangeEvent = createFailedMCEEvent(event, throwable); + final FailedMetadataChangeEvent failedMetadataChangeEvent = + createFailedMCEEvent(event, throwable); try { - final GenericRecord genericFailedMCERecord = EventUtils.pegasusToAvroFailedMCE(failedMetadataChangeEvent); + final GenericRecord genericFailedMCERecord = + EventUtils.pegasusToAvroFailedMCE(failedMetadataChangeEvent); log.debug("Sending FailedMessages to topic - {}", fmceTopicName); - log.info("Error while processing MCE: FailedMetadataChangeEvent - {}", failedMetadataChangeEvent); + log.info( + "Error while processing MCE: FailedMetadataChangeEvent - {}", failedMetadataChangeEvent); kafkaProducer.send(new ProducerRecord<>(fmceTopicName, genericFailedMCERecord)); } catch (IOException e) { - log.error("Error while sending FailedMetadataChangeEvent: Exception - {}, FailedMetadataChangeEvent - {}", - e.getStackTrace(), failedMetadataChangeEvent); + log.error( + "Error while sending FailedMetadataChangeEvent: Exception - {}, FailedMetadataChangeEvent - {}", + e.getStackTrace(), + failedMetadataChangeEvent); } } @Nonnull - private FailedMetadataChangeEvent createFailedMCEEvent(@Nonnull MetadataChangeEvent event, - @Nonnull Throwable throwable) { + private FailedMetadataChangeEvent createFailedMCEEvent( + @Nonnull MetadataChangeEvent event, @Nonnull Throwable throwable) { final FailedMetadataChangeEvent fmce = new FailedMetadataChangeEvent(); fmce.setError(ExceptionUtils.getStackTrace(throwable)); fmce.setMetadataChangeEvent(event); @@ -103,6 +118,7 @@ private void processProposedSnapshot(@Nonnull MetadataChangeEvent metadataChange final Snapshot snapshotUnion = metadataChangeEvent.getProposedSnapshot(); final Entity entity = new Entity().setValue(snapshotUnion); // TODO: GMS Auth Part 2: Get the actor identity from the event header itself. - entityClient.updateWithSystemMetadata(entity, metadataChangeEvent.getSystemMetadata(), this.systemAuthentication); + entityClient.updateWithSystemMetadata( + entity, metadataChangeEvent.getSystemMetadata(), this.systemAuthentication); } } diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java index 79f8c90af8ec7..b487ded6a9439 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java @@ -4,8 +4,8 @@ import com.codahale.metrics.MetricRegistry; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; -import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -14,7 +14,6 @@ import com.linkedin.mxe.Topics; import java.io.IOException; import javax.annotation.Nonnull; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; @@ -30,10 +29,13 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component -@Import({RestliEntityClientFactory.class, KafkaEventConsumerFactory.class, DataHubKafkaProducerFactory.class}) +@Import({ + RestliEntityClientFactory.class, + KafkaEventConsumerFactory.class, + DataHubKafkaProducerFactory.class +}) @Conditional(MetadataChangeProposalProcessorCondition.class) @EnableKafka @RequiredArgsConstructor @@ -42,14 +44,19 @@ public class MetadataChangeProposalsProcessor { private final SystemRestliEntityClient entityClient; private final Producer<String, IndexedRecord> kafkaProducer; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - @Value("${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_PROPOSAL + "}") + @Value( + "${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_PROPOSAL + + "}") private String fmcpTopicName; - @KafkaListener(id = "${METADATA_CHANGE_PROPOSAL_KAFKA_CONSUMER_GROUP_ID:generic-mce-consumer-job-client}", topics = - "${METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.METADATA_CHANGE_PROPOSAL - + "}", containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_PROPOSAL_KAFKA_CONSUMER_GROUP_ID:generic-mce-consumer-job-client}", + topics = "${METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.METADATA_CHANGE_PROPOSAL + "}", + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); @@ -69,21 +76,27 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) } private void sendFailedMCP(@Nonnull MetadataChangeProposal event, @Nonnull Throwable throwable) { - final FailedMetadataChangeProposal failedMetadataChangeProposal = createFailedMCPEvent(event, throwable); + final FailedMetadataChangeProposal failedMetadataChangeProposal = + createFailedMCPEvent(event, throwable); try { - final GenericRecord genericFailedMCERecord = EventUtils.pegasusToAvroFailedMCP(failedMetadataChangeProposal); + final GenericRecord genericFailedMCERecord = + EventUtils.pegasusToAvroFailedMCP(failedMetadataChangeProposal); log.debug("Sending FailedMessages to topic - {}", fmcpTopicName); - log.info("Error while processing FMCP: FailedMetadataChangeProposal - {}", failedMetadataChangeProposal); + log.info( + "Error while processing FMCP: FailedMetadataChangeProposal - {}", + failedMetadataChangeProposal); kafkaProducer.send(new ProducerRecord<>(fmcpTopicName, genericFailedMCERecord)); } catch (IOException e) { - log.error("Error while sending FailedMetadataChangeProposal: Exception - {}, FailedMetadataChangeProposal - {}", - e.getStackTrace(), failedMetadataChangeProposal); + log.error( + "Error while sending FailedMetadataChangeProposal: Exception - {}, FailedMetadataChangeProposal - {}", + e.getStackTrace(), + failedMetadataChangeProposal); } } @Nonnull - private FailedMetadataChangeProposal createFailedMCPEvent(@Nonnull MetadataChangeProposal event, - @Nonnull Throwable throwable) { + private FailedMetadataChangeProposal createFailedMCPEvent( + @Nonnull MetadataChangeProposal event, @Nonnull Throwable throwable) { final FailedMetadataChangeProposal fmcp = new FailedMetadataChangeProposal(); fmcp.setError(ExceptionUtils.getStackTrace(throwable)); fmcp.setMetadataChangeProposal(event); diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java index 1b69b1113bdb1..2bbc8304f2e27 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java @@ -4,6 +4,7 @@ import com.linkedin.metadata.boot.BootstrapManager; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationListener; @@ -12,25 +13,22 @@ import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; -import javax.annotation.Nonnull; - - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Slf4j @Component @Conditional(MetadataChangeProposalProcessorCondition.class) public class ApplicationStartupListener implements ApplicationListener<ContextRefreshedEvent> { - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final DataHubUpgradeKafkaListener _dataHubUpgradeKafkaListener; private final ConfigurationProvider _configurationProvider; private final BootstrapManager _mcpBootstrapManager; public ApplicationStartupListener( - @Qualifier("dataHubUpgradeKafkaListener") DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, + @Qualifier("dataHubUpgradeKafkaListener") + DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, ConfigurationProvider configurationProvider, @Qualifier("mcpBootstrapManager") BootstrapManager bootstrapManager) { _dataHubUpgradeKafkaListener = dataHubUpgradeKafkaListener; diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java index 44e5c7cff8661..0220764cd99d6 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java @@ -7,6 +7,8 @@ import com.linkedin.metadata.boot.dependencies.BootstrapDependency; import com.linkedin.metadata.boot.steps.WaitForSystemUpdateStep; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; +import java.util.List; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -15,10 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Scope; -import javax.annotation.Nonnull; -import java.util.List; - - @Configuration @Conditional(MetadataChangeProposalProcessorCondition.class) public class MCPBootstrapManagerFactory { @@ -27,8 +25,7 @@ public class MCPBootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -37,8 +34,8 @@ public class MCPBootstrapManagerFactory { @Scope("singleton") @Nonnull protected BootstrapManager createInstance() { - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); final List<BootstrapStep> finalSteps = ImmutableList.of(waitForSystemUpdateStep); diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java index 9dd265736bfc2..1cdb05b04e0ac 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java @@ -5,12 +5,11 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class MetadataChangeProposalProcessorCondition implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MCE_CONSUMER_ENABLED")) || "true".equals( - env.getProperty("MCP_CONSUMER_ENABLED")); + return "true".equals(env.getProperty("MCE_CONSUMER_ENABLED")) + || "true".equals(env.getProperty("MCP_CONSUMER_ENABLED")); } } diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java index aa09679cb08a8..84d4f4ae4c095 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java @@ -21,7 +21,6 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(PlatformEventProcessorCondition.class) @@ -30,7 +29,8 @@ public class PlatformEventProcessor { private final List<PlatformEventHook> hooks; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); @Autowired public PlatformEventProcessor() { @@ -39,8 +39,9 @@ public PlatformEventProcessor() { this.hooks.forEach(PlatformEventHook::init); } - @KafkaListener(id = "${PLATFORM_EVENT_KAFKA_CONSUMER_GROUP_ID:generic-platform-event-job-client}", topics = { - "${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}" }, + @KafkaListener( + id = "${PLATFORM_EVENT_KAFKA_CONSUMER_GROUP_ID:generic-platform-event-job-client}", + topics = {"${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}"}, containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) { @@ -48,14 +49,17 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); - log.debug("Got Generic PE on topic: {}, partition: {}, offset: {}", consumerRecord.topic(), consumerRecord.partition(), consumerRecord.offset()); + log.debug( + "Got Generic PE on topic: {}, partition: {}, offset: {}", + consumerRecord.topic(), + consumerRecord.partition(), + consumerRecord.offset()); MetricUtils.counter(this.getClass(), "received_pe_count").inc(); PlatformEvent event; try { event = EventUtils.avroToPegasusPE(record); - log.debug("Successfully converted Avro PE to Pegasus PE. name: {}", - event.getName()); + log.debug("Successfully converted Avro PE to Pegasus PE. name: {}", event.getName()); } catch (Exception e) { MetricUtils.counter(this.getClass(), "avro_to_pegasus_conversion_failure").inc(); log.error("Error deserializing message due to: ", e); @@ -66,8 +70,8 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) log.debug("Invoking PE hooks for event name {}", event.getName()); for (PlatformEventHook hook : this.hooks) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency") - .time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency").time()) { hook.invoke(event); } catch (Exception e) { // Just skip this hook and continue. diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java index 878e4edd371bc..3083642c5bfb6 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java @@ -6,20 +6,15 @@ /** * Custom hook which is invoked on receiving a new {@link PlatformEvent} event. * - * The semantics of this hook are currently "at most once". That is, the hook will not be called + * <p>The semantics of this hook are currently "at most once". That is, the hook will not be called * with the same message. In the future, we intend to migrate to "at least once" semantics, meaning * that the hook will be responsible for implementing idempotency. */ public interface PlatformEventHook { - /** - * Initialize the hook - */ - default void init() { } + /** Initialize the hook */ + default void init() {} - /** - * Invoke the hook when a PlatformEvent is received - */ + /** Invoke the hook when a PlatformEvent is received */ void invoke(@Nonnull PlatformEvent event); - } diff --git a/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java b/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java index aa86568bed01f..56d909781fd51 100644 --- a/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java +++ b/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java @@ -4,25 +4,21 @@ import com.linkedin.pegasus.generator.DataSchemaParser; import java.io.IOException; - /** * Validates GMS PDL models by constructing a set of {@link EntitySpec}s from them. * - * The following validation rules are applied: - * - * 1. Each Entity Snapshot Model is annotated as @Entity with a common name - * 2. Each Aspect is annotated as @Aspect with a common name - * 3. Each @Searchable field is of primitive / list of primitive type - * 4. Each @Relationship field is of Urn / List of Urn type - * 5. Each Entity Snapshot includes a single Key Aspect + * <p>The following validation rules are applied: * + * <p>1. Each Entity Snapshot Model is annotated as @Entity with a common name 2. Each Aspect is + * annotated as @Aspect with a common name 3. Each @Searchable field is of primitive / list of + * primitive type 4. Each @Relationship field is of Urn / List of Urn type 5. Each Entity Snapshot + * includes a single Key Aspect */ public class ModelValidationTask { private static final String SNAPSHOT_SCHEMA_NAME = "com.linkedin.metadata.snapshot.Snapshot"; - private ModelValidationTask() { - } + private ModelValidationTask() {} public static void main(String[] args) throws IOException { if (args.length != 3) { @@ -34,21 +30,25 @@ public static void main(String[] args) throws IOException { final String modelPath = args[1]; final DataSchemaParser parser = new DataSchemaParser(resolverPath); - parser.parseSources(new String[]{modelPath}); + parser.parseSources(new String[] {modelPath}); - final DataSchema snapshotSchema = parser.getSchemaResolver().existingDataSchema(SNAPSHOT_SCHEMA_NAME); + final DataSchema snapshotSchema = + parser.getSchemaResolver().existingDataSchema(SNAPSHOT_SCHEMA_NAME); if (snapshotSchema == null) { throw new RuntimeException( - String.format("Failed to find Snapshot model with name %s in parsed schemas!", SNAPSHOT_SCHEMA_NAME)); + String.format( + "Failed to find Snapshot model with name %s in parsed schemas!", + SNAPSHOT_SCHEMA_NAME)); } - // TODO: Fix this so that aspects that are just in the entity registry don't fail because they aren't in the + // TODO: Fix this so that aspects that are just in the entity registry don't fail because they + // aren't in the // snapshot registry. -// try { -// new EntitySpecBuilder().buildEntitySpecs(snapshotSchema); -// } catch (Exception e) { -// throw new RuntimeException("Failed to validate DataHub PDL models", e); -// } + // try { + // new EntitySpecBuilder().buildEntitySpecs(snapshotSchema); + // } catch (Exception e) { + // throw new RuntimeException("Failed to validate DataHub PDL models", e); + // } } -} \ No newline at end of file +} diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index bd8052283e168..e90a4042c1921 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -75,11 +75,7 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: 'generateJsonSchema') } tasks.getByName("compileJava").dependsOn(openApiGenerate) -checkstyleMain.exclude '**/generated/**' - task cleanExtraDirs { delete "$projectDir/src/generatedJsonSchema" } clean.finalizedBy(cleanExtraDirs) - -checkstyleMain.exclude '**/generated/**' diff --git a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java index c3db318ece23e..29f58223a240a 100644 --- a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java +++ b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java @@ -1,5 +1,8 @@ package com.linkedin.metadata; +import static com.linkedin.metadata.ModelValidationConstants.*; +import static org.testng.AssertJUnit.*; + import com.datahub.util.validator.AspectValidator; import com.datahub.util.validator.DeltaValidator; import com.datahub.util.validator.SnapshotValidator; @@ -13,10 +16,6 @@ import javax.annotation.Nonnull; import org.testng.annotations.Test; -import static com.linkedin.metadata.ModelValidationConstants.*; -import static org.testng.AssertJUnit.*; - - public class ModelValidation { @Test @@ -39,23 +38,28 @@ public void validateSnapshots() throws Exception { @Test public void validateDeltas() throws Exception { - getRecordTemplatesInPackage("com.linkedin.metadata.delta", IGNORED_DELTA_CLASSES).forEach( - DeltaValidator::validateDeltaSchema); + getRecordTemplatesInPackage("com.linkedin.metadata.delta", IGNORED_DELTA_CLASSES) + .forEach(DeltaValidator::validateDeltaSchema); } - private List<? extends Class<? extends UnionTemplate>> getUnionTemplatesInPackage(@Nonnull String packageName, - @Nonnull Set<Class<? extends UnionTemplate>> ignoreClasses) throws IOException { + private List<? extends Class<? extends UnionTemplate>> getUnionTemplatesInPackage( + @Nonnull String packageName, @Nonnull Set<Class<? extends UnionTemplate>> ignoreClasses) + throws IOException { return getClassesInPackage(packageName, UnionTemplate.class, ignoreClasses); } - private List<? extends Class<? extends RecordTemplate>> getRecordTemplatesInPackage(@Nonnull String packageName, - @Nonnull Set<Class<? extends RecordTemplate>> ignoreClasses) throws IOException { + private List<? extends Class<? extends RecordTemplate>> getRecordTemplatesInPackage( + @Nonnull String packageName, @Nonnull Set<Class<? extends RecordTemplate>> ignoreClasses) + throws IOException { return getClassesInPackage(packageName, RecordTemplate.class, ignoreClasses); } @SuppressWarnings("unchecked") - private <T> List<? extends Class<? extends T>> getClassesInPackage(@Nonnull String packageName, - @Nonnull Class<T> parentClass, @Nonnull Set<Class<? extends T>> ignoreClasses) throws IOException { + private <T> List<? extends Class<? extends T>> getClassesInPackage( + @Nonnull String packageName, + @Nonnull Class<T> parentClass, + @Nonnull Set<Class<? extends T>> ignoreClasses) + throws IOException { return ClassPath.from(ClassLoader.getSystemClassLoader()) .getTopLevelClasses(packageName) .stream() diff --git a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java index 11fa8cdc965d4..f9e8fcc06bcbb 100644 --- a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java +++ b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.UnionTemplate; import java.util.Set; - public class ModelValidationConstants { private ModelValidationConstants() { diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java index d3c5ba822ac04..b8553235b3de7 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java @@ -3,31 +3,25 @@ import java.util.List; import lombok.Data; -/** - * POJO representing the "authentication" configuration block in application.yml. - */ +/** POJO representing the "authentication" configuration block in application.yml. */ @Data public class AuthenticationConfiguration { - /** - * Whether authentication is enabled - */ + /** Whether authentication is enabled */ private boolean enabled; + /** - * List of configurations for {@link com.datahub.plugins.auth.authentication.Authenticator}s to be registered + * List of configurations for {@link com.datahub.plugins.auth.authentication.Authenticator}s to be + * registered */ private List<AuthenticatorConfiguration> authenticators; - /** - * Unique id to identify internal system callers - */ + + /** Unique id to identify internal system callers */ private String systemClientId; - /** - * Unique secret to authenticate internal system callers - */ + + /** Unique secret to authenticate internal system callers */ private String systemClientSecret; - /** - * The lifespan of a UI session token. - */ + /** The lifespan of a UI session token. */ private long sessionTokenDurationMs; private TokenServiceConfiguration tokenService; diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java index 96a3f1b8f56bd..31cfe1c057468 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java @@ -1,29 +1,21 @@ package com.datahub.authentication; -/** - * A set of shared constants related to Authentication. - */ +/** A set of shared constants related to Authentication. */ public class AuthenticationConstants { - /** - * Name of the header which carries authorization information - */ + /** Name of the header which carries authorization information */ public static final String AUTHORIZATION_HEADER_NAME = "Authorization"; /** - * A deprecated header that previously carried the urn of the authenticated actor. - * This has been replaced by the DELEGATED_FOR_ACTOR_ID and DELEGATED_FOR_ACTOR_TYPE headers. + * A deprecated header that previously carried the urn of the authenticated actor. This has been + * replaced by the DELEGATED_FOR_ACTOR_ID and DELEGATED_FOR_ACTOR_TYPE headers. */ public static final String LEGACY_X_DATAHUB_ACTOR_HEADER = "X-DataHub-Actor"; - /** - * A header capturing the unique Actor Id that is delegating a request. - */ + /** A header capturing the unique Actor Id that is delegating a request. */ public static final String DELEGATED_FOR_ACTOR_ID_HEADER_NAME = "X-DataHub-Delegated-For-Id"; - /** - * A header capturing the unique Actor Type that is delegating a request. - */ + /** A header capturing the unique Actor Type that is delegating a request. */ public static final String DELEGATED_FOR_ACTOR_TYPE_HEADER_NAME = "X-DataHub-Delegated-For-Type"; public static final String SYSTEM_CLIENT_ID_CONFIG = "systemClientId"; @@ -32,6 +24,5 @@ public class AuthenticationConstants { public static final String ENTITY_SERVICE = "entityService"; public static final String TOKEN_SERVICE = "tokenService"; - private AuthenticationConstants() { - } + private AuthenticationConstants() {} } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java index 2d3cf5f588d7d..36814ee380e2f 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java @@ -3,18 +3,21 @@ import java.util.Map; import lombok.Data; - /** - * POJO representing {@link com.datahub.plugins.auth.authentication.Authenticator} configurations provided in the application.yml. + * POJO representing {@link com.datahub.plugins.auth.authentication.Authenticator} configurations + * provided in the application.yml. */ @Data public class AuthenticatorConfiguration { /** - * A fully-qualified class name for the {@link com.datahub.plugins.auth.authentication.Authenticator} implementation to be registered. + * A fully-qualified class name for the {@link + * com.datahub.plugins.auth.authentication.Authenticator} implementation to be registered. */ private String type; + /** - * A set of authenticator-specific configurations passed through during "init" of the authenticator. + * A set of authenticator-specific configurations passed through during "init" of the + * authenticator. */ private Map<String, Object> configs; } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java index 0a606f0f06d92..70b93544bebdf 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data -/** - * Configurations for DataHub token service - */ +/** Configurations for DataHub token service */ public class TokenServiceConfiguration { private String signingKey; private String salt; diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java index 2770fc5c41aa0..5ed69d3e2ff8c 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java @@ -4,18 +4,12 @@ import java.util.List; import lombok.Data; - -/** - * POJO representing the "authentication" configuration block in application.yml. - */ +/** POJO representing the "authentication" configuration block in application.yml. */ @Data public class AuthorizationConfiguration { - /** - * Configuration for the default DataHub Policies-based authorizer. - */ + /** Configuration for the default DataHub Policies-based authorizer. */ private DefaultAuthorizerConfiguration defaultAuthorizer; - /** - * List of configurations for {@link Authorizer}s to be registered - */ + + /** List of configurations for {@link Authorizer}s to be registered */ private List<AuthorizerConfiguration> authorizers; -} \ No newline at end of file +} diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java index 65cd6c17c739c..c4a26a1cd6276 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java @@ -4,22 +4,15 @@ import java.util.Map; import lombok.Data; - -/** - * POJO representing {@link Authorizer} configurations provided in the application.yml. - */ +/** POJO representing {@link Authorizer} configurations provided in the application.yml. */ @Data public class AuthorizerConfiguration { - /** - * Whether to enable this authorizer - */ + /** Whether to enable this authorizer */ private boolean enabled; - /** - * A fully-qualified class name for the {@link Authorizer} implementation to be registered. - */ + + /** A fully-qualified class name for the {@link Authorizer} implementation to be registered. */ private String type; - /** - * A set of authorizer-specific configurations passed through during "init" of the authorizer. - */ + + /** A set of authorizer-specific configurations passed through during "init" of the authorizer. */ private Map<String, Object> configs; } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java index dfec06dedd147..c06e5b10b23f9 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java @@ -2,15 +2,11 @@ import lombok.Data; - @Data public class DefaultAuthorizerConfiguration { - /** - * Whether authorization via DataHub policies is enabled. - */ + /** Whether authorization via DataHub policies is enabled. */ private boolean enabled; - /** - * The duration between policies cache refreshes. - */ + + /** The duration between policies cache refreshes. */ private int cacheRefreshIntervalSecs; } diff --git a/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java b/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java index 8c7b3ac8b98f0..335a30280c3be 100644 --- a/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java +++ b/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java @@ -1,19 +1,21 @@ package com.datahub.auth.authentication.filter; -import com.datahub.authentication.authenticator.AuthenticatorChain; -import com.datahub.authentication.authenticator.DataHubSystemAuthenticator; -import com.datahub.authentication.authenticator.HealthStatusAuthenticator; -import com.datahub.authentication.authenticator.NoOpAuthenticator; -import com.datahub.authentication.token.StatefulTokenService; -import com.datahub.plugins.PluginConstant; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.AuthenticationContext; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorConfiguration; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.authentication.authenticator.AuthenticatorChain; +import com.datahub.authentication.authenticator.DataHubSystemAuthenticator; +import com.datahub.authentication.authenticator.HealthStatusAuthenticator; +import com.datahub.authentication.authenticator.NoOpAuthenticator; +import com.datahub.authentication.token.StatefulTokenService; +import com.datahub.plugins.PluginConstant; +import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.plugins.common.PluginConfig; import com.datahub.plugins.common.PluginPermissionManager; import com.datahub.plugins.common.PluginType; @@ -49,18 +51,14 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.web.context.support.SpringBeanAutowiringSupport; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * A servlet {@link Filter} for authenticating requests inbound to the Metadata Service. This filter is applied to the - * GraphQL Servlet, the Rest.li Servlet, and the Auth (token) Servlet. + * A servlet {@link Filter} for authenticating requests inbound to the Metadata Service. This filter + * is applied to the GraphQL Servlet, the Rest.li Servlet, and the Auth (token) Servlet. */ @Slf4j public class AuthenticationFilter implements Filter { - @Inject - private ConfigurationProvider configurationProvider; + @Inject private ConfigurationProvider configurationProvider; @Inject @Named("entityService") @@ -90,23 +88,28 @@ public void doFilter(ServletRequest request, ServletResponse response, FilterCha authentication = this.authenticatorChain.authenticate(context, _logAuthenticatorExceptions); } catch (AuthenticationException e) { // For AuthenticationExpiredExceptions, terminate and provide that feedback to the user - log.debug("Failed to authenticate request. Received an AuthenticationExpiredException from authenticator chain.", + log.debug( + "Failed to authenticate request. Received an AuthenticationExpiredException from authenticator chain.", e); - ((HttpServletResponse) response).sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage()); + ((HttpServletResponse) response) + .sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage()); return; } if (authentication != null) { // Successfully authenticated. - log.debug(String.format("Successfully authenticated request for Actor with type: %s, id: %s", - authentication.getActor().getType(), authentication.getActor().getId())); + log.debug( + String.format( + "Successfully authenticated request for Actor with type: %s, id: %s", + authentication.getActor().getType(), authentication.getActor().getId())); AuthenticationContext.setAuthentication(authentication); chain.doFilter(request, response); } else { // Reject request - log.debug("Failed to authenticate request. Received 'null' Authentication value from authenticator chain."); - ((HttpServletResponse) response).sendError(HttpServletResponse.SC_UNAUTHORIZED, - "Unauthorized to perform this action."); + log.debug( + "Failed to authenticate request. Received 'null' Authentication value from authenticator chain."); + ((HttpServletResponse) response) + .sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized to perform this action."); return; } AuthenticationContext.remove(); @@ -120,9 +123,10 @@ public void destroy() { /** * Constructs an {@link AuthenticatorChain} via the provided {@link AuthenticationConfiguration}. * - * The process is simple: For each configured {@link Authenticator}, attempt to instantiate the class using a default (zero-arg) - * constructor, then call it's initialize method passing in a freeform block of associated configurations as a {@link Map}. Finally, - * register the {@link Authenticator} in the authenticator chain. + * <p>The process is simple: For each configured {@link Authenticator}, attempt to instantiate the + * class using a default (zero-arg) constructor, then call it's initialize method passing in a + * freeform block of associated configurations as a {@link Map}. Finally, register the {@link + * Authenticator} in the authenticator chain. */ private void buildAuthenticatorChain() { @@ -130,89 +134,123 @@ private void buildAuthenticatorChain() { boolean isAuthEnabled = this.configurationProvider.getAuthentication().isEnabled(); - // Create authentication context object to pass to authenticator instances. They can use it as needed. - final AuthenticatorContext authenticatorContext = new AuthenticatorContext( - ImmutableMap.of(ENTITY_SERVICE, this._entityService, TOKEN_SERVICE, this._tokenService)); + // Create authentication context object to pass to authenticator instances. They can use it as + // needed. + final AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of( + ENTITY_SERVICE, this._entityService, TOKEN_SERVICE, this._tokenService)); if (isAuthEnabled) { log.info("Auth is enabled. Building authenticator chain..."); - this.registerNativeAuthenticator(authenticatorChain, authenticatorContext); // Register native authenticators + this.registerNativeAuthenticator( + authenticatorChain, authenticatorContext); // Register native authenticators this.registerPlugins(authenticatorChain); // Register plugin authenticators } else { - // Authentication is not enabled. Populate authenticator chain with a purposely permissive Authenticator. + // Authentication is not enabled. Populate authenticator chain with a purposely permissive + // Authenticator. log.info("Auth is disabled. Building no-op authenticator chain..."); final NoOpAuthenticator noOpAuthenticator = new NoOpAuthenticator(); noOpAuthenticator.init( - ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, this.configurationProvider.getAuthentication().getSystemClientId()), + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId()), authenticatorContext); authenticatorChain.register(noOpAuthenticator); } } private AuthenticationRequest buildAuthContext(HttpServletRequest request) { - return new AuthenticationRequest(request.getServletPath(), request.getPathInfo(), Collections.list(request.getHeaderNames()) - .stream() - .collect(Collectors.toMap(headerName -> headerName, request::getHeader))); + return new AuthenticationRequest( + request.getServletPath(), + request.getPathInfo(), + Collections.list(request.getHeaderNames()).stream() + .collect(Collectors.toMap(headerName -> headerName, request::getHeader))); } private void registerPlugins(AuthenticatorChain authenticatorChain) { - // TODO: Introduce plugin factory to reduce duplicate code around authentication and authorization processing + // TODO: Introduce plugin factory to reduce duplicate code around authentication and + // authorization processing ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); - Path pluginBaseDirectory = Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); + Path pluginBaseDirectory = + Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); Optional<Config> optionalConfig = (new ConfigProvider(pluginBaseDirectory)).load(); - optionalConfig.ifPresent((config) -> { - log.info("Processing authenticator plugin from auth plugin directory {}", pluginBaseDirectory); - PluginConfigFactory authenticatorPluginPluginConfigFactory = - new PluginConfigFactory(config); + optionalConfig.ifPresent( + (config) -> { + log.info( + "Processing authenticator plugin from auth plugin directory {}", pluginBaseDirectory); + PluginConfigFactory authenticatorPluginPluginConfigFactory = + new PluginConfigFactory(config); - List<PluginConfig> authorizers = - authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - // Filter enabled authenticator plugins - List<PluginConfig> enabledAuthenticators = authorizers.stream().filter(pluginConfig -> { - if (!pluginConfig.getEnabled()) { - log.info(String.format("Authenticator %s is not enabled", pluginConfig.getName())); - } - return pluginConfig.getEnabled(); - }).collect(Collectors.toList()); + List<PluginConfig> authorizers = + authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); + // Filter enabled authenticator plugins + List<PluginConfig> enabledAuthenticators = + authorizers.stream() + .filter( + pluginConfig -> { + if (!pluginConfig.getEnabled()) { + log.info( + String.format( + "Authenticator %s is not enabled", pluginConfig.getName())); + } + return pluginConfig.getEnabled(); + }) + .collect(Collectors.toList()); - SecurityMode securityMode = - SecurityMode.valueOf(this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); - // Create permission manager with security mode - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); + SecurityMode securityMode = + SecurityMode.valueOf( + this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); + // Create permission manager with security mode + PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); - // Initiate Authenticators - enabledAuthenticators.forEach((pluginConfig) -> { - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, pluginConfig); - // Create context - AuthenticatorContext context = new AuthenticatorContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString())); + // Initiate Authenticators + enabledAuthenticators.forEach( + (pluginConfig) -> { + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, pluginConfig); + // Create context + AuthenticatorContext context = + new AuthenticatorContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + pluginConfig.getPluginHomeDirectory().toString())); - try { - Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); - log.info("Initializing plugin {}", pluginConfig.getName()); - authenticator.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); - authenticatorChain.register(authenticator); - log.info("Plugin {} is initialized", pluginConfig.getName()); - } catch (ClassNotFoundException e) { - throw new RuntimeException(String.format("Plugin className %s not found", pluginConfig.getClassName()), e); - } finally { - Thread.currentThread().setContextClassLoader(contextClassLoader); - } - }); - }); + try { + Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + log.info("Initializing plugin {}", pluginConfig.getName()); + authenticator.init( + pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); + authenticatorChain.register(authenticator); + log.info("Plugin {} is initialized", pluginConfig.getName()); + } catch (ClassNotFoundException e) { + throw new RuntimeException( + String.format("Plugin className %s not found", pluginConfig.getClassName()), + e); + } finally { + Thread.currentThread().setContextClassLoader(contextClassLoader); + } + }); + }); } - private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, AuthenticatorContext authenticatorContext) { + private void registerNativeAuthenticator( + AuthenticatorChain authenticatorChain, AuthenticatorContext authenticatorContext) { log.info("Registering native authenticators"); // Register system authenticator DataHubSystemAuthenticator systemAuthenticator = new DataHubSystemAuthenticator(); systemAuthenticator.init( - ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, this.configurationProvider.getAuthentication().getSystemClientId(), - SYSTEM_CLIENT_SECRET_CONFIG, this.configurationProvider.getAuthentication().getSystemClientSecret()), + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId(), + SYSTEM_CLIENT_SECRET_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientSecret()), authenticatorContext); - authenticatorChain.register(systemAuthenticator); // Always register authenticator for internal system. + authenticatorChain.register( + systemAuthenticator); // Always register authenticator for internal system. // Register authenticator define in application.yml final List<AuthenticatorConfiguration> authenticatorConfigurations = @@ -229,14 +267,16 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, clazz = (Class<? extends Authenticator>) Class.forName(type); } catch (ClassNotFoundException e) { throw new RuntimeException( - String.format("Failed to find Authenticator class with name %s on the classpath.", type)); + String.format( + "Failed to find Authenticator class with name %s on the classpath.", type)); } // Ensure class conforms to the correct type. if (!Authenticator.class.isAssignableFrom(clazz)) { - throw new IllegalArgumentException(String.format( - "Failed to instantiate invalid Authenticator with class name %s. Class does not implement the 'Authenticator' interface", - clazz.getCanonicalName())); + throw new IllegalArgumentException( + String.format( + "Failed to instantiate invalid Authenticator with class name %s. Class does not implement the 'Authenticator' interface", + clazz.getCanonicalName())); } // Else construct an instance of the class, each class should have an empty constructor. @@ -245,9 +285,14 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, // Successfully created authenticator. Now init and register it. log.debug(String.format("Initializing Authenticator with name %s", type)); if (authenticator instanceof HealthStatusAuthenticator) { - Map<String, Object> authenticatorConfig = new HashMap<>(Map.of(SYSTEM_CLIENT_ID_CONFIG, - this.configurationProvider.getAuthentication().getSystemClientId())); - authenticatorConfig.putAll(Optional.ofNullable(internalAuthenticatorConfig.getConfigs()).orElse(Collections.emptyMap())); + Map<String, Object> authenticatorConfig = + new HashMap<>( + Map.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId())); + authenticatorConfig.putAll( + Optional.ofNullable(internalAuthenticatorConfig.getConfigs()) + .orElse(Collections.emptyMap())); authenticator.init(authenticatorConfig, authenticatorContext); } else { authenticator.init(configs, authenticatorContext); @@ -256,8 +301,10 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, authenticatorChain.register(authenticator); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to instantiate Authenticator with class name %s", clazz.getCanonicalName()), e); + String.format( + "Failed to instantiate Authenticator with class name %s", clazz.getCanonicalName()), + e); } } } -} \ No newline at end of file +} diff --git a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java index 05ca428283a6c..471fdf8c36903 100644 --- a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java +++ b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java @@ -1,5 +1,7 @@ package com.datahub.auth.authentication; +import static org.mockito.Mockito.*; + import com.datahub.auth.authentication.filter.AuthenticationFilter; import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.AuthenticatorConfiguration; @@ -17,27 +19,25 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static org.mockito.Mockito.*; - @Configuration public class AuthTestConfiguration { - @Bean public EntityService entityService() { return mock(EntityService.class); } @Bean("dataHubTokenService") - public StatefulTokenService statefulTokenService(ConfigurationProvider configurationProvider, EntityService entityService) { - TokenServiceConfiguration tokenServiceConfiguration = configurationProvider.getAuthentication().getTokenService(); + public StatefulTokenService statefulTokenService( + ConfigurationProvider configurationProvider, EntityService entityService) { + TokenServiceConfiguration tokenServiceConfiguration = + configurationProvider.getAuthentication().getTokenService(); return new StatefulTokenService( tokenServiceConfiguration.getSigningKey(), tokenServiceConfiguration.getSigningAlgorithm(), tokenServiceConfiguration.getIssuer(), entityService, - tokenServiceConfiguration.getSalt() - ); + tokenServiceConfiguration.getSalt()); } @Bean @@ -59,8 +59,12 @@ public ConfigurationProvider configurationProvider() { authenticationConfiguration.setTokenService(tokenServiceConfiguration); AuthenticatorConfiguration authenticator = new AuthenticatorConfiguration(); authenticator.setType("com.datahub.authentication.authenticator.DataHubTokenAuthenticator"); - authenticator.setConfigs(Map.of("signingKey", "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94=", - "salt", "ohDVbJBvHHVJh9S/UA4BYF9COuNnqqVhr9MLKEGXk1O=")); + authenticator.setConfigs( + Map.of( + "signingKey", + "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94=", + "salt", + "ohDVbJBvHHVJh9S/UA4BYF9COuNnqqVhr9MLKEGXk1O=")); List<AuthenticatorConfiguration> authenticators = List.of(authenticator); authenticationConfiguration.setAuthenticators(authenticators); authPluginConfiguration.setPath(""); diff --git a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java index 2ac65bf09c912..746138e4ee90f 100644 --- a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java +++ b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java @@ -1,5 +1,8 @@ package com.datahub.auth.authentication; +import static com.datahub.authentication.AuthenticationConstants.*; +import static org.mockito.Mockito.*; + import com.datahub.auth.authentication.filter.AuthenticationFilter; import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; @@ -17,18 +20,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.datahub.authentication.AuthenticationConstants.*; -import static org.mockito.Mockito.*; - - -@ContextConfiguration(classes = { AuthTestConfiguration.class }) +@ContextConfiguration(classes = {AuthTestConfiguration.class}) public class AuthenticationFilterTest extends AbstractTestNGSpringContextTests { - @Autowired - AuthenticationFilter _authenticationFilter; + @Autowired AuthenticationFilter _authenticationFilter; - @Autowired - StatefulTokenService _statefulTokenService; + @Autowired StatefulTokenService _statefulTokenService; @Test public void testExpiredToken() throws ServletException, IOException, TokenException { @@ -37,17 +34,20 @@ public void testExpiredToken() throws ServletException, IOException, TokenExcept HttpServletResponse servletResponse = mock(HttpServletResponse.class); FilterChain filterChain = mock(FilterChain.class); Actor actor = new Actor(ActorType.USER, "datahub"); -// String token = _statefulTokenService.generateAccessToken(TokenType.SESSION, actor, 0L, System.currentTimeMillis(), "token", -// "token", actor.toUrnStr()); + // String token = _statefulTokenService.generateAccessToken(TokenType.SESSION, actor, 0L, + // System.currentTimeMillis(), "token", + // "token", actor.toUrnStr()); // Token generated 9/11/23, invalid for all future dates - String token = "eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIZCI6ImRhdGFodWIiLCJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwian" - + "RpIjoiMmI0MzZkZDAtYjEwOS00N2UwLWJmYTEtMzM2ZmU4MTU4MDE1Iiwic3ViIjoiZGF0YWh1YiIsImV4cCI6MTY5NDU0NzA2OCwiaXNzIjoiZGF" - + "0YWh1Yi1tZXRhZGF0YS1zZXJ2aWNlIn0.giqx7J5a9mxuubG6rXdAMoaGlcII-fqY-W82Wm7OlLI"; - when(servletRequest.getHeaderNames()).thenReturn(Collections.enumeration(List.of(AUTHORIZATION_HEADER_NAME))); - when(servletRequest.getHeader(AUTHORIZATION_HEADER_NAME)) - .thenReturn("Bearer " + token); + String token = + "eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIZCI6ImRhdGFodWIiLCJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwian" + + "RpIjoiMmI0MzZkZDAtYjEwOS00N2UwLWJmYTEtMzM2ZmU4MTU4MDE1Iiwic3ViIjoiZGF0YWh1YiIsImV4cCI6MTY5NDU0NzA2OCwiaXNzIjoiZGF" + + "0YWh1Yi1tZXRhZGF0YS1zZXJ2aWNlIn0.giqx7J5a9mxuubG6rXdAMoaGlcII-fqY-W82Wm7OlLI"; + when(servletRequest.getHeaderNames()) + .thenReturn(Collections.enumeration(List.of(AUTHORIZATION_HEADER_NAME))); + when(servletRequest.getHeader(AUTHORIZATION_HEADER_NAME)).thenReturn("Bearer " + token); _authenticationFilter.doFilter(servletRequest, servletResponse, filterChain); - verify(servletResponse, times(1)).sendError(eq(HttpServletResponse.SC_UNAUTHORIZED), anyString()); + verify(servletResponse, times(1)) + .sendError(eq(HttpServletResponse.SC_UNAUTHORIZED), anyString()); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java index e72225e6ee990..b69a8a7818485 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java @@ -1,27 +1,26 @@ package com.datahub.authentication.authenticator; import com.datahub.authentication.Authentication; - import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.util.Pair; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * A configurable chain of {@link Authenticator}s executed in series to attempt to authenticate an inbound request. + * A configurable chain of {@link Authenticator}s executed in series to attempt to authenticate an + * inbound request. * - * Individual {@link Authenticator}s are registered with the chain using {@link #register(Authenticator)}. - * The chain can be executed by invoking {@link #authenticate(AuthenticationRequest)} with an instance of {@link AuthenticationRequest}. + * <p>Individual {@link Authenticator}s are registered with the chain using {@link + * #register(Authenticator)}. The chain can be executed by invoking {@link + * #authenticate(AuthenticationRequest)} with an instance of {@link AuthenticationRequest}. */ @Slf4j public class AuthenticatorChain { @@ -39,21 +38,30 @@ public void register(@Nonnull final Authenticator authenticator) { } /** - * Executes a set of {@link Authenticator}s and returns the first successful authentication result. + * Executes a set of {@link Authenticator}s and returns the first successful authentication + * result. * - * Returns an instance of {@link Authentication} if the incoming request is successfully authenticated. - * Returns null if {@link Authentication} cannot be resolved for the incoming request. + * <p>Returns an instance of {@link Authentication} if the incoming request is successfully + * authenticated. Returns null if {@link Authentication} cannot be resolved for the incoming + * request. */ @Nullable - public Authentication authenticate(@Nonnull final AuthenticationRequest context, boolean logExceptions) throws AuthenticationException { + public Authentication authenticate( + @Nonnull final AuthenticationRequest context, boolean logExceptions) + throws AuthenticationException { Objects.requireNonNull(context); ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); List<Pair<String, Exception>> authenticationFailures = new ArrayList<>(); for (final Authenticator authenticator : this.authenticators) { try { - log.debug(String.format("Executing Authenticator with class name %s", authenticator.getClass().getCanonicalName())); - // The library came with plugin can use the contextClassLoader to load the classes. For example apache-ranger library does this. - // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class loading request from plugin's home directory, + log.debug( + String.format( + "Executing Authenticator with class name %s", + authenticator.getClass().getCanonicalName())); + // The library came with plugin can use the contextClassLoader to load the classes. For + // example apache-ranger library does this. + // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class + // loading request from plugin's home directory, // otherwise plugin's internal library wouldn't be able to find their dependent classes Thread.currentThread().setContextClassLoader(authenticator.getClass().getClassLoader()); Authentication result = authenticator.authenticate(context); @@ -65,13 +73,19 @@ public Authentication authenticate(@Nonnull final AuthenticationRequest context, } } catch (AuthenticationExpiredException e) { // Throw if it's an AuthenticationException to propagate the error message to the end user - log.debug(String.format("Unable to authenticate request using Authenticator %s", authenticator.getClass().getCanonicalName()), e); + log.debug( + String.format( + "Unable to authenticate request using Authenticator %s", + authenticator.getClass().getCanonicalName()), + e); throw e; } catch (Exception e) { // Log as a normal error otherwise. - log.debug(String.format( + log.debug( + String.format( "Caught exception while attempting to authenticate request using Authenticator %s", - authenticator.getClass().getCanonicalName()), e); + authenticator.getClass().getCanonicalName()), + e); authenticationFailures.add(new Pair<>(authenticator.getClass().getCanonicalName(), e)); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); @@ -79,14 +93,19 @@ public Authentication authenticate(@Nonnull final AuthenticationRequest context, } // No authentication resolved. Return null. if (!authenticationFailures.isEmpty()) { - List<Pair<String, String>> shortMessage = authenticationFailures.stream() - .peek(p -> { - if (logExceptions) { - log.error("Error during {} authentication: ", p.getFirst(), p.getSecond()); - } - }) - .map(p -> Pair.of(p.getFirst(), p.getSecond().getMessage())).collect(Collectors.toList()); - log.warn("Authentication chain failed to resolve a valid authentication. Errors: {}", shortMessage); + List<Pair<String, String>> shortMessage = + authenticationFailures.stream() + .peek( + p -> { + if (logExceptions) { + log.error("Error during {} authentication: ", p.getFirst(), p.getSecond()); + } + }) + .map(p -> Pair.of(p.getFirst(), p.getSecond().getMessage())) + .collect(Collectors.toList()); + log.warn( + "Authentication chain failed to resolve a valid authentication. Errors: {}", + shortMessage); } return null; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java index 524c12c56c266..635a87dc84c11 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java @@ -1,29 +1,28 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorContext; import com.datahub.authentication.token.DataHubJwtSigningKeyResolver; +import com.datahub.plugins.auth.authentication.Authenticator; import io.jsonwebtoken.Claims; -import io.jsonwebtoken.Jwts; import io.jsonwebtoken.Jws; +import io.jsonwebtoken.Jwts; +import java.util.HashSet; import java.util.Map; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.HashSet; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * This Authenticator verifies third party token and allows to pass claim for "id" part of resolved actor urn. - * Supported algorithm at this moment RSA + * This Authenticator verifies third party token and allows to pass claim for "id" part of resolved + * actor urn. Supported algorithm at this moment RSA */ @Slf4j public class DataHubJwtTokenAuthenticator implements Authenticator { @@ -33,57 +32,67 @@ public class DataHubJwtTokenAuthenticator implements Authenticator { static final String DEFAULT_SIGNING_ALG = "RSA"; /** - * idUserClaim allows you to select which claim will be used as the "id" part of the resolved actor urn, e.g. "urn:li:corpuser:" - * **/ + * idUserClaim allows you to select which claim will be used as the "id" part of the resolved + * actor urn, e.g. "urn:li:corpuser:" * + */ private String userIdClaim; - /** - * List of trusted issuers - * **/ + /** List of trusted issuers * */ private HashSet<String> trustedIssuers; /** - * This public key is optional and should be used if token public key is not available online or will not change for signed token. - * **/ + * This public key is optional and should be used if token public key is not available online or + * will not change for signed token. * + */ private String publicKey; /** - * Algorithm used to sign your token. - * This is optional and can be skiped if public key is available online. - * **/ + * Algorithm used to sign your token. This is optional and can be skiped if public key is + * available online. * + */ private String algorithm; @Override - public void init(@Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.userIdClaim = config.get("userIdClaim") == null ? DEFAULT_USER_CLAIM : (String) config.get("userIdClaim"); + this.userIdClaim = + config.get("userIdClaim") == null ? DEFAULT_USER_CLAIM : (String) config.get("userIdClaim"); - Map<String, String> issuers = Objects.requireNonNull((Map<String, String>) config.get("trustedIssuers"), - "Missing required config trusted issuers"); + Map<String, String> issuers = + Objects.requireNonNull( + (Map<String, String>) config.get("trustedIssuers"), + "Missing required config trusted issuers"); this.trustedIssuers = new HashSet<String>(issuers.values()); this.publicKey = (String) config.get("publicKey"); - this.algorithm = config.get("algorithm") == null ? DEFAULT_SIGNING_ALG : (String) config.get("algorithm"); + this.algorithm = + config.get("algorithm") == null ? DEFAULT_SIGNING_ALG : (String) config.get("algorithm"); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); try { String jwtToken = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); - if (jwtToken == null || (!jwtToken.startsWith("Bearer ") && !jwtToken.startsWith("bearer "))) { + if (jwtToken == null + || (!jwtToken.startsWith("Bearer ") && !jwtToken.startsWith("bearer "))) { throw new AuthenticationException("Invalid Authorization token"); } String token = getToken(jwtToken); - Jws<Claims> claims = Jwts.parserBuilder() - .setSigningKeyResolver(new DataHubJwtSigningKeyResolver(this.trustedIssuers, this.publicKey, this.algorithm)) - .build() - .parseClaimsJws(token); + Jws<Claims> claims = + Jwts.parserBuilder() + .setSigningKeyResolver( + new DataHubJwtSigningKeyResolver( + this.trustedIssuers, this.publicKey, this.algorithm)) + .build() + .parseClaimsJws(token); final String userClaim = claims.getBody().get(userIdClaim, String.class); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java index 70a4abc3fd18d..9a25a51b72622 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java @@ -1,34 +1,33 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; -import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; +import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; -import javax.annotation.Nonnull; +import com.datahub.plugins.auth.authentication.Authenticator; import java.util.Collections; import java.util.Map; import java.util.Objects; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * Authenticator that verifies system internal callers, such as the metadata-service itself OR datahub-frontend, - * using HTTP Basic Authentication. - * - * This makes use of a single "system client id" and "system shared secret" which each - * component in the system is configured to provide. + * Authenticator that verifies system internal callers, such as the metadata-service itself OR + * datahub-frontend, using HTTP Basic Authentication. * - * This authenticator requires the following configurations: + * <p>This makes use of a single "system client id" and "system shared secret" which each component + * in the system is configured to provide. * - * - systemClientId: an identifier for internal system callers, provided in the Authorization header via Basic Authentication. - * - systemClientSecret: a shared secret used to authenticate internal system callers + * <p>This authenticator requires the following configurations: * + * <p>- systemClientId: an identifier for internal system callers, provided in the Authorization + * header via Basic Authentication. - systemClientSecret: a shared secret used to authenticate + * internal system callers */ @Slf4j public class DataHubSystemAuthenticator implements Authenticator { @@ -37,16 +36,22 @@ public class DataHubSystemAuthenticator implements Authenticator { private String systemClientSecret; @Override - public void init(@Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); - this.systemClientSecret = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_SECRET_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_SECRET_CONFIG)); + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); + this.systemClientSecret = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_SECRET_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_SECRET_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); final String authorizationHeader = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); if (authorizationHeader != null) { @@ -57,16 +62,18 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw if (splitCredentials.length == 2 && this.systemClientId.equals(splitCredentials[0]) - && this.systemClientSecret.equals(splitCredentials[1]) - ) { + && this.systemClientSecret.equals(splitCredentials[1])) { // If this request was made internally, there may be a delegated id. return new Authentication( - new Actor(ActorType.USER, this.systemClientId), // todo: replace this with service actor type once they exist. + new Actor( + ActorType.USER, + this.systemClientId), // todo: replace this with service actor type once they + // exist. authorizationHeader, - Collections.emptyMap() - ); + Collections.emptyMap()); } else { - throw new AuthenticationException("Provided credentials do not match known system client id & client secret. Check your configuration values..."); + throw new AuthenticationException( + "Provided credentials do not match known system client id & client secret. Check your configuration values..."); } } else { throw new AuthenticationException("Authorization header is missing 'Basic' prefix."); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java index e7e776999f34e..f1d1f5a80119c 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java @@ -1,34 +1,33 @@ package com.datahub.authentication.authenticator; -import com.datahub.authentication.token.StatefulTokenService; -import com.datahub.authentication.token.StatelessTokenService; -import com.datahub.authentication.token.TokenClaims; -import com.datahub.authentication.token.TokenExpiredException; -import com.datahub.authentication.Actor; +import static com.datahub.authentication.AuthenticationConstants.*; +import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConstants; +import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.authentication.token.StatefulTokenService; +import com.datahub.authentication.token.StatelessTokenService; +import com.datahub.authentication.token.TokenClaims; +import com.datahub.authentication.token.TokenExpiredException; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; import java.util.Map; import java.util.Objects; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** * Authenticator that verifies DataHub-issued JSON web tokens. * - * This authenticator requires the following configurations: + * <p>This authenticator requires the following configurations: * - * - signingAlgorithm (optional): the algorithm used to verify JWT's. This should be THE SAME ONE used by the {@link StatelessTokenService}. Defaults to HS256. - * - signingKey: a key used to sign all JWT tokens using the provided signingAlgorithm + * <p>- signingAlgorithm (optional): the algorithm used to verify JWT's. This should be THE SAME ONE + * used by the {@link StatelessTokenService}. Defaults to HS256. - signingKey: a key used to sign + * all JWT tokens using the provided signingAlgorithm */ @Slf4j public class DataHubTokenAuthenticator implements Authenticator { @@ -47,28 +46,35 @@ public void init(@Nonnull final Map<String, Object> config, final AuthenticatorC Objects.requireNonNull(config, "Config parameter cannot be null"); Objects.requireNonNull(context, "Context parameter cannot be null"); final String signingKey = - Objects.requireNonNull((String) config.get(SIGNING_KEY_CONFIG_NAME), "signingKey is a required config"); + Objects.requireNonNull( + (String) config.get(SIGNING_KEY_CONFIG_NAME), "signingKey is a required config"); final String salt = Objects.requireNonNull((String) config.get(SALT_CONFIG_NAME), "salt is a required config"); - final String signingAlgorithm = (String) config.getOrDefault(SIGNING_ALG_CONFIG_NAME, DEFAULT_SIGNING_ALG); + final String signingAlgorithm = + (String) config.getOrDefault(SIGNING_ALG_CONFIG_NAME, DEFAULT_SIGNING_ALG); log.debug(String.format("Creating TokenService using signing algorithm %s", signingAlgorithm)); if (!context.data().containsKey(AuthenticationConstants.ENTITY_SERVICE)) { - throw new IllegalArgumentException("Unable to initialize DataHubTokenAuthenticator, entity service reference not" - + " found."); + throw new IllegalArgumentException( + "Unable to initialize DataHubTokenAuthenticator, entity service reference not" + + " found."); } final Object entityService = context.data().get(ENTITY_SERVICE); if (!(entityService instanceof EntityService)) { throw new RuntimeException( "Unable to initialize DataHubTokenAuthenticator, entity service reference is not of type: " - + "EntityService.class, found: " + entityService.getClass()); + + "EntityService.class, found: " + + entityService.getClass()); } - this._statefulTokenService = (StatefulTokenService) Objects.requireNonNull(context.data().get(TOKEN_SERVICE)); + this._statefulTokenService = + (StatefulTokenService) Objects.requireNonNull(context.data().get(TOKEN_SERVICE)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); - final String authorizationHeader = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); // Case insensitive + final String authorizationHeader = + context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); // Case insensitive if (authorizationHeader != null) { if (authorizationHeader.startsWith("Bearer ") || authorizationHeader.startsWith("bearer ")) { return validateAndExtract(authorizationHeader); @@ -79,12 +85,14 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw throw new AuthenticationException("Request is missing 'Authorization' header."); } - private Authentication validateAndExtract(final String credentials) throws AuthenticationException { + private Authentication validateAndExtract(final String credentials) + throws AuthenticationException { log.debug("Found authentication token. Verifying..."); final String token = credentials.substring(7); try { final TokenClaims claims = this._statefulTokenService.validateAccessToken(token); - return new Authentication(new Actor(claims.getActorType(), claims.getActorId()), credentials, claims.asMap()); + return new Authentication( + new Actor(claims.getActorType(), claims.getActorId()), credentials, claims.asMap()); } catch (TokenExpiredException e) { throw new AuthenticationExpiredException(e.getMessage(), e); } catch (Exception e) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java index 5749eacf5d25d..65581f1d5b635 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java @@ -1,5 +1,7 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.SYSTEM_CLIENT_ID_CONFIG; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -7,48 +9,45 @@ import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; import com.datahub.plugins.auth.authentication.Authenticator; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.Set; - -import static com.datahub.authentication.AuthenticationConstants.SYSTEM_CLIENT_ID_CONFIG; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; /** * This Authenticator is used for allowing access for unauthenticated health check endpoints * - * It exists to support load balancers, liveness/readiness checks - * + * <p>It exists to support load balancers, liveness/readiness checks */ @Slf4j public class HealthStatusAuthenticator implements Authenticator { - private static final Set<String> HEALTH_ENDPOINTS = Set.of( - "/openapi/check/", - "/openapi/up/" - ); + private static final Set<String> HEALTH_ENDPOINTS = Set.of("/openapi/check/", "/openapi/up/"); private String systemClientId; @Override - public void init(@Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); - if (HEALTH_ENDPOINTS.stream().anyMatch(prefix -> String.join("", context.getServletInfo(), context.getPathInfo()).startsWith(prefix))) { + if (HEALTH_ENDPOINTS.stream() + .anyMatch( + prefix -> + String.join("", context.getServletInfo(), context.getPathInfo()) + .startsWith(prefix))) { return new Authentication( - new Actor(ActorType.USER, systemClientId), - "", - Collections.emptyMap() - ); + new Actor(ActorType.USER, systemClientId), "", Collections.emptyMap()); } throw new AuthenticationException("Authorization not allowed. Non-health check endpoint."); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java index 4e1b3cf7f73aa..19f135debdae4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java @@ -1,12 +1,14 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.Constants; import java.util.Collections; @@ -16,16 +18,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * This Authenticator is used as a no-op to simply convert the X-DataHub-Actor header into a valid Authentication, or fall - * back to resolving a system {@link Actor} by default. + * This Authenticator is used as a no-op to simply convert the X-DataHub-Actor header into a valid + * Authentication, or fall back to resolving a system {@link Actor} by default. * - * It exists to support deployments that do not have Metadata Service Authentication enabled. + * <p>It exists to support deployments that do not have Metadata Service Authentication enabled. * - * Notice that this authenticator should generally be avoided in production. + * <p>Notice that this authenticator should generally be avoided in production. */ @Slf4j public class NoOpAuthenticator implements Authenticator { @@ -33,24 +32,29 @@ public class NoOpAuthenticator implements Authenticator { private String systemClientId; @Override - public void init(@Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); String actorUrn = context.getRequestHeaders().get(LEGACY_X_DATAHUB_ACTOR_HEADER); // For backwards compatibility, support pulling actor context from the deprecated // X-DataHub-Actor header. if (actorUrn == null || "".equals(actorUrn)) { - log.debug(String.format("Found no X-DataHub-Actor header provided with the request. Falling back to system creds %s", Constants.UNKNOWN_ACTOR)); - return new Authentication( - new Actor(ActorType.USER, this.systemClientId), "" - ); + log.debug( + String.format( + "Found no X-DataHub-Actor header provided with the request. Falling back to system creds %s", + Constants.UNKNOWN_ACTOR)); + return new Authentication(new Actor(ActorType.USER, this.systemClientId), ""); } // If not provided, fallback to system caller identity. @@ -58,8 +62,7 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw // When authentication is disabled, assume everyone is a normal user. new Actor(ActorType.USER, getActorIdFromUrn(actorUrn)), "", // No Credentials provided. - Collections.emptyMap() - ); + Collections.emptyMap()); } private String getActorIdFromUrn(final String urnStr) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java index 29ec2f73dc688..f33ae5de130da 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java @@ -1,5 +1,7 @@ package com.datahub.authentication.group; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -35,15 +37,14 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GroupService { private final EntityClient _entityClient; private final EntityService _entityService; private final GraphClient _graphClient; - public GroupService(@Nonnull EntityClient entityClient, @Nonnull EntityService entityService, + public GroupService( + @Nonnull EntityClient entityClient, + @Nonnull EntityService entityService, @Nonnull GraphClient graphClient) { Objects.requireNonNull(entityClient, "entityClient must not be null!"); Objects.requireNonNull(entityService, "entityService must not be null!"); @@ -64,7 +65,9 @@ public Origin getGroupOrigin(@Nonnull final Urn groupUrn) { return (Origin) _entityService.getLatestAspect(groupUrn, ORIGIN_ASPECT_NAME); } - public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn groupUrn, + public void addUserToNativeGroup( + @Nonnull final Urn userUrn, + @Nonnull final Urn groupUrn, final Authentication authentication) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(groupUrn, "groupUrn must not be null"); @@ -76,7 +79,8 @@ public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn try { // First, fetch user's group membership aspect. - NativeGroupMembership nativeGroupMembership = getExistingNativeGroupMembership(userUrn, authentication); + NativeGroupMembership nativeGroupMembership = + getExistingNativeGroupMembership(userUrn, authentication); // Handle the duplicate case. nativeGroupMembership.getNativeGroups().remove(groupUrn); nativeGroupMembership.getNativeGroups().add(groupUrn); @@ -94,13 +98,18 @@ public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn } } - public String createNativeGroup(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final String groupName, - @Nonnull final String groupDescription, final Authentication authentication) throws Exception { + public String createNativeGroup( + @Nonnull final CorpGroupKey corpGroupKey, + @Nonnull final String groupName, + @Nonnull final String groupDescription, + final Authentication authentication) + throws Exception { Objects.requireNonNull(corpGroupKey, "corpGroupKey must not be null"); Objects.requireNonNull(groupName, "groupName must not be null"); Objects.requireNonNull(groupDescription, "groupDescription must not be null"); - Urn corpGroupUrn = EntityKeyUtils.convertEntityKeyToUrn(corpGroupKey, Constants.CORP_GROUP_ENTITY_NAME); + Urn corpGroupUrn = + EntityKeyUtils.convertEntityKeyToUrn(corpGroupKey, Constants.CORP_GROUP_ENTITY_NAME); if (groupExists(corpGroupUrn)) { throw new IllegalArgumentException("This Group already exists!"); } @@ -110,22 +119,34 @@ public String createNativeGroup(@Nonnull final CorpGroupKey corpGroupKey, @Nonnu return groupInfo; } - public void removeExistingNativeGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List<Urn> userUrnList, - final Authentication authentication) throws Exception { + public void removeExistingNativeGroupMembers( + @Nonnull final Urn groupUrn, + @Nonnull final List<Urn> userUrnList, + final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); Objects.requireNonNull(userUrnList, "userUrnList must not be null"); final Set<Urn> userUrns = new HashSet<>(userUrnList); for (Urn userUrn : userUrns) { - final Map<Urn, EntityResponse> entityResponseMap = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, userUrns, - Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), authentication); + final Map<Urn, EntityResponse> entityResponseMap = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + userUrns, + Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + authentication); EntityResponse entityResponse = entityResponseMap.get(userUrn); if (entityResponse == null) { continue; } - final NativeGroupMembership nativeGroupMembership = new NativeGroupMembership( - entityResponse.getAspects().get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + final NativeGroupMembership nativeGroupMembership = + new NativeGroupMembership( + entityResponse + .getAspects() + .get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME) + .getValue() + .data()); if (nativeGroupMembership.getNativeGroups().remove(groupUrn)) { // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -139,8 +160,9 @@ public void removeExistingNativeGroupMembers(@Nonnull final Urn groupUrn, @Nonnu } } - public void migrateGroupMembershipToNativeGroupMembership(@Nonnull final Urn groupUrn, final String actorUrnStr, - final Authentication authentication) throws Exception { + public void migrateGroupMembershipToNativeGroupMembership( + @Nonnull final Urn groupUrn, final String actorUrnStr, final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); // Get the existing set of users @@ -153,26 +175,41 @@ public void migrateGroupMembershipToNativeGroupMembership(@Nonnull final Urn gro userUrnList.forEach(userUrn -> addUserToNativeGroup(userUrn, groupUrn, authentication)); } - NativeGroupMembership getExistingNativeGroupMembership(@Nonnull final Urn userUrn, - final Authentication authentication) throws Exception { + NativeGroupMembership getExistingNativeGroupMembership( + @Nonnull final Urn userUrn, final Authentication authentication) throws Exception { final EntityResponse entityResponse = - _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, Collections.singleton(userUrn), - Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), authentication).get(userUrn); + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + authentication) + .get(userUrn); NativeGroupMembership nativeGroupMembership; - if (entityResponse == null || !entityResponse.getAspects().containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { + if (entityResponse == null + || !entityResponse.getAspects().containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { // If the user doesn't have the NativeGroupMembership aspect, create one. nativeGroupMembership = new NativeGroupMembership(); nativeGroupMembership.setNativeGroups(new UrnArray()); } else { - nativeGroupMembership = new NativeGroupMembership( - entityResponse.getAspects().get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + nativeGroupMembership = + new NativeGroupMembership( + entityResponse + .getAspects() + .get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME) + .getValue() + .data()); } return nativeGroupMembership; } - String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final String groupName, - @Nonnull final String groupDescription, final Authentication authentication) throws Exception { + String createGroupInfo( + @Nonnull final CorpGroupKey corpGroupKey, + @Nonnull final String groupName, + @Nonnull final String groupDescription, + final Authentication authentication) + throws Exception { Objects.requireNonNull(corpGroupKey, "corpGroupKey must not be null"); Objects.requireNonNull(groupName, "groupName must not be null"); Objects.requireNonNull(groupDescription, "groupDescription must not be null"); @@ -184,7 +221,10 @@ String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final corpGroupInfo.setGroups(new CorpGroupUrnArray()); corpGroupInfo.setMembers(new CorpuserUrnArray()); corpGroupInfo.setAdmins(new CorpuserUrnArray()); - corpGroupInfo.setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + corpGroupInfo.setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -196,7 +236,8 @@ String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final return _entityClient.ingestProposal(proposal, authentication); } - void createNativeGroupOrigin(@Nonnull final Urn groupUrn, final Authentication authentication) throws Exception { + void createNativeGroupOrigin(@Nonnull final Urn groupUrn, final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); // Create the Group info. @@ -217,20 +258,33 @@ List<Urn> getExistingGroupMembers(@Nonnull final Urn groupUrn, final String acto Objects.requireNonNull(groupUrn, "groupUrn must not be null"); final EntityRelationships relationships = - _graphClient.getRelatedEntities(groupUrn.toString(), ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, 0, 500, actorUrnStr); - return relationships.getRelationships().stream().map(EntityRelationship::getEntity).collect(Collectors.toList()); + _graphClient.getRelatedEntities( + groupUrn.toString(), + ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + actorUrnStr); + return relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); } - void removeExistingGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List<Urn> userUrnList, - final Authentication authentication) throws Exception { + void removeExistingGroupMembers( + @Nonnull final Urn groupUrn, + @Nonnull final List<Urn> userUrnList, + final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); Objects.requireNonNull(userUrnList, "userUrnList must not be null"); final Set<Urn> userUrns = new HashSet<>(userUrnList); for (Urn userUrn : userUrns) { final Map<Urn, EntityResponse> entityResponseMap = - _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, userUrns, Collections.singleton(GROUP_MEMBERSHIP_ASPECT_NAME), + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + userUrns, + Collections.singleton(GROUP_MEMBERSHIP_ASPECT_NAME), authentication); EntityResponse entityResponse = entityResponseMap.get(userUrn); if (entityResponse == null) { @@ -238,7 +292,8 @@ void removeExistingGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List } final GroupMembership groupMembership = - new GroupMembership(entityResponse.getAspects().get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + new GroupMembership( + entityResponse.getAspects().get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); if (groupMembership.getGroups().remove(groupUrn)) { // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java index 35052810236a0..73add48958f60 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java @@ -1,5 +1,8 @@ package com.datahub.authentication.invite; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -24,10 +27,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class InviteTokenService { @@ -42,26 +41,33 @@ public Urn getInviteTokenUrn(@Nonnull final String inviteTokenStr) throws URISyn return Urn.createFromString(inviteTokenUrnStr); } - public boolean isInviteTokenValid(@Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + public boolean isInviteTokenValid( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) throws RemoteInvocationException { return _entityClient.exists(inviteTokenUrn, authentication); } @Nullable - public Urn getInviteTokenRole(@Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + public Urn getInviteTokenRole( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) throws URISyntaxException, RemoteInvocationException { - final com.linkedin.identity.InviteToken inviteToken = getInviteTokenEntity(inviteTokenUrn, authentication); + final com.linkedin.identity.InviteToken inviteToken = + getInviteTokenEntity(inviteTokenUrn, authentication); return inviteToken.hasRole() ? inviteToken.getRole() : null; } @Nonnull - public String getInviteToken(@Nullable final String roleUrnStr, boolean regenerate, - @Nonnull final Authentication authentication) throws Exception { + public String getInviteToken( + @Nullable final String roleUrnStr, + boolean regenerate, + @Nonnull final Authentication authentication) + throws Exception { final Filter inviteTokenFilter = roleUrnStr == null ? createInviteTokenFilter() : createInviteTokenFilter(roleUrnStr); final SearchResult searchResult = - _entityClient.filter(INVITE_TOKEN_ENTITY_NAME, inviteTokenFilter, null, 0, 10, authentication); + _entityClient.filter( + INVITE_TOKEN_ENTITY_NAME, inviteTokenFilter, null, 0, 10, authentication); final int numEntities = searchResult.getEntities().size(); // If there is more than one invite token, wipe all of them and generate a fresh one @@ -78,14 +84,19 @@ public String getInviteToken(@Nullable final String roleUrnStr, boolean regenera final SearchEntity searchEntity = searchResult.getEntities().get(0); final Urn inviteTokenUrn = searchEntity.getEntity(); - com.linkedin.identity.InviteToken inviteToken = getInviteTokenEntity(inviteTokenUrn, authentication); + com.linkedin.identity.InviteToken inviteToken = + getInviteTokenEntity(inviteTokenUrn, authentication); return _secretService.decrypt(inviteToken.getToken()); } - private com.linkedin.identity.InviteToken getInviteTokenEntity(@Nonnull final Urn inviteTokenUrn, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { + private com.linkedin.identity.InviteToken getInviteTokenEntity( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { final EntityResponse inviteTokenEntity = - _entityClient.getV2(INVITE_TOKEN_ENTITY_NAME, inviteTokenUrn, Collections.singleton(INVITE_TOKEN_ASPECT_NAME), + _entityClient.getV2( + INVITE_TOKEN_ENTITY_NAME, + inviteTokenUrn, + Collections.singleton(INVITE_TOKEN_ASPECT_NAME), authentication); if (inviteTokenEntity == null) { @@ -96,9 +107,12 @@ private com.linkedin.identity.InviteToken getInviteTokenEntity(@Nonnull final Ur // If invite token aspect is not present, create a new one. Otherwise, return existing one. if (!aspectMap.containsKey(INVITE_TOKEN_ASPECT_NAME)) { throw new RuntimeException( - String.format("Invite token %s does not contain aspect %s", inviteTokenUrn, INVITE_TOKEN_ASPECT_NAME)); + String.format( + "Invite token %s does not contain aspect %s", + inviteTokenUrn, INVITE_TOKEN_ASPECT_NAME)); } - return new com.linkedin.identity.InviteToken(aspectMap.get(INVITE_TOKEN_ASPECT_NAME).getValue().data()); + return new com.linkedin.identity.InviteToken( + aspectMap.get(INVITE_TOKEN_ASPECT_NAME).getValue().data()); } private Filter createInviteTokenFilter() { @@ -140,7 +154,8 @@ private Filter createInviteTokenFilter(@Nonnull final String roleUrnStr) { } @Nonnull - private String createInviteToken(@Nullable final String roleUrnStr, @Nonnull final Authentication authentication) + private String createInviteToken( + @Nullable final String roleUrnStr, @Nonnull final Authentication authentication) throws Exception { String inviteTokenStr = _secretService.generateUrlSafeToken(INVITE_TOKEN_LENGTH); String hashedInviteTokenStr = _secretService.hashString(inviteTokenStr); @@ -155,21 +170,26 @@ private String createInviteToken(@Nullable final String roleUrnStr, @Nonnull fin // Ingest new InviteToken aspect final MetadataChangeProposal proposal = - buildMetadataChangeProposal(INVITE_TOKEN_ENTITY_NAME, inviteTokenKey, INVITE_TOKEN_ASPECT_NAME, - inviteTokenAspect); + buildMetadataChangeProposal( + INVITE_TOKEN_ENTITY_NAME, inviteTokenKey, INVITE_TOKEN_ASPECT_NAME, inviteTokenAspect); _entityClient.ingestProposal(proposal, authentication); return inviteTokenStr; } - private void deleteExistingInviteTokens(@Nonnull final SearchResult searchResult, - @Nonnull final Authentication authentication) { - searchResult.getEntities().forEach(entity -> { - try { - _entityClient.deleteEntity(entity.getEntity(), authentication); - } catch (RemoteInvocationException e) { - log.error(String.format("Failed to delete invite token entity %s", entity.getEntity()), e); - } - }); + private void deleteExistingInviteTokens( + @Nonnull final SearchResult searchResult, @Nonnull final Authentication authentication) { + searchResult + .getEntities() + .forEach( + entity -> { + try { + _entityClient.deleteEntity(entity.getEntity(), authentication); + } catch (RemoteInvocationException e) { + log.error( + String.format("Failed to delete invite token entity %s", entity.getEntity()), + e); + } + }); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java index c3b7c4bcf3be7..ec5d5f1e436b7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java @@ -1,5 +1,8 @@ package com.datahub.authentication.post; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Media; import com.linkedin.common.MediaType; @@ -20,10 +23,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class PostService { @@ -38,9 +37,14 @@ public Media mapMedia(@Nonnull String type, @Nonnull String location) { } @Nonnull - public PostContent mapPostContent(@Nonnull String contentType, @Nonnull String title, @Nullable String description, @Nullable String link, + public PostContent mapPostContent( + @Nonnull String contentType, + @Nonnull String title, + @Nullable String description, + @Nullable String link, @Nullable Media media) { - final PostContent postContent = new PostContent().setType(PostContentType.valueOf(contentType)).setTitle(title); + final PostContent postContent = + new PostContent().setType(PostContentType.valueOf(contentType)).setTitle(title); if (description != null) { postContent.setDescription(description); } @@ -53,15 +57,20 @@ public PostContent mapPostContent(@Nonnull String contentType, @Nonnull String t return postContent; } - public boolean createPost(@Nonnull String postType, @Nonnull PostContent postContent, - @Nonnull Authentication authentication) throws RemoteInvocationException { + public boolean createPost( + @Nonnull String postType, + @Nonnull PostContent postContent, + @Nonnull Authentication authentication) + throws RemoteInvocationException { final String uuid = UUID.randomUUID().toString(); final PostKey postKey = new PostKey().setId(uuid); final long currentTimeMillis = Instant.now().toEpochMilli(); - final PostInfo postInfo = new PostInfo().setType(PostType.valueOf(postType)) - .setContent(postContent) - .setCreated(currentTimeMillis) - .setLastModified(currentTimeMillis); + final PostInfo postInfo = + new PostInfo() + .setType(PostType.valueOf(postType)) + .setContent(postContent) + .setCreated(currentTimeMillis) + .setLastModified(currentTimeMillis); final MetadataChangeProposal proposal = buildMetadataChangeProposal(POST_ENTITY_NAME, postKey, POST_INFO_ASPECT_NAME, postInfo); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java index ea6de3fc7dca0..bb2d5f0efd2c7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java @@ -12,16 +12,15 @@ import java.security.KeyFactory; import java.security.NoSuchAlgorithmException; import java.security.PublicKey; +import java.security.interfaces.RSAPublicKey; import java.security.spec.InvalidKeySpecException; +import java.security.spec.RSAPublicKeySpec; import java.security.spec.X509EncodedKeySpec; import java.util.Base64; import java.util.HashSet; -import java.security.spec.RSAPublicKeySpec; -import java.security.interfaces.RSAPublicKey; import org.json.JSONArray; import org.json.JSONObject; - public class DataHubJwtSigningKeyResolver extends SigningKeyResolverAdapter { public HttpClient client; @@ -38,9 +37,7 @@ public DataHubJwtSigningKeyResolver(HashSet<String> list, String publicKey, Stri client = HttpClient.newHttpClient(); } - /** - * inspect the header or claims, lookup and return the signing key - **/ + /** inspect the header or claims, lookup and return the signing key */ @Override public Key resolveSigningKey(JwsHeader jwsHeader, Claims claims) { @@ -66,12 +63,11 @@ public Key resolveSigningKey(JwsHeader jwsHeader, Claims claims) { return key; } - /** - * Get public keys from issuer and filter public key for token signature based on token keyId. - **/ + /** Get public keys from issuer and filter public key for token signature based on token keyId. */ private PublicKey loadPublicKey(String issuer, String keyId) throws Exception { - HttpRequest request = HttpRequest.newBuilder().uri(URI.create(issuer + "/protocol/openid-connect/certs")).build(); + HttpRequest request = + HttpRequest.newBuilder().uri(URI.create(issuer + "/protocol/openid-connect/certs")).build(); HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); var body = new JSONObject(response.body()); @@ -87,9 +83,9 @@ private PublicKey loadPublicKey(String issuer, String keyId) throws Exception { } /** - * Generate public key based on token algorithem and public token received from issuer. - * Supported algo RSA - **/ + * Generate public key based on token algorithem and public token received from issuer. Supported + * algo RSA + */ private PublicKey getPublicKey(JSONObject token) throws Exception { PublicKey publicKey = null; @@ -97,8 +93,10 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { case "RSA": try { KeyFactory kf = KeyFactory.getInstance("RSA"); - BigInteger modulus = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); - BigInteger exponent = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); + BigInteger modulus = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); + BigInteger exponent = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); publicKey = kf.generatePublic(new RSAPublicKeySpec(modulus, exponent)); } catch (InvalidKeySpecException e) { throw new InvalidKeySpecException("Invalid public key", e); @@ -113,10 +111,7 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { return publicKey; } - /** - * Generate public Key based on algorithem and 64 encoded public key. - * Supported algo RSA - **/ + /** Generate public Key based on algorithem and 64 encoded public key. Supported algo RSA */ private PublicKey generatePublicKey(String alg, String key) throws Exception { PublicKey publicKey = null; diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index 125bba7ec3280..2879f15784370 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -32,10 +32,10 @@ import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang.ArrayUtils; - /** - * Service responsible for generating JWT tokens & managing the associated metadata entities in GMS for use within - * DataHub that are stored in the entity service so that we can list & revoke tokens as needed. + * Service responsible for generating JWT tokens & managing the associated metadata entities in GMS + * for use within DataHub that are stored in the entity service so that we can list & revoke tokens + * as needed. */ @Slf4j public class StatefulTokenService extends StatelessTokenService { @@ -44,47 +44,65 @@ public class StatefulTokenService extends StatelessTokenService { private final LoadingCache<String, Boolean> _revokedTokenCache; private final String salt; - public StatefulTokenService(@Nonnull final String signingKey, @Nonnull final String signingAlgorithm, - @Nullable final String iss, @Nonnull final EntityService entityService, @Nonnull final String salt) { + public StatefulTokenService( + @Nonnull final String signingKey, + @Nonnull final String signingAlgorithm, + @Nullable final String iss, + @Nonnull final EntityService entityService, + @Nonnull final String salt) { super(signingKey, signingAlgorithm, iss); this._entityService = entityService; - this._revokedTokenCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(5, TimeUnit.MINUTES) - .build(new CacheLoader<String, Boolean>() { - @Override - public Boolean load(final String key) { - final Urn accessUrn = Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); - return !_entityService.exists(accessUrn); - } - }); + this._revokedTokenCache = + CacheBuilder.newBuilder() + .maximumSize(10000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build( + new CacheLoader<String, Boolean>() { + @Override + public Boolean load(final String key) { + final Urn accessUrn = + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); + return !_entityService.exists(accessUrn); + } + }); this.salt = salt; } /** * Generates a JWT for an actor with a default expiration time. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + * <p>Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Override public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor) { - throw new UnsupportedOperationException("Please use generateToken(Token, Actor, String, String, String) endpoint " - + "instead. Reason: StatefulTokenService requires that all tokens have a name & ownerUrn specified."); + throw new UnsupportedOperationException( + "Please use generateToken(Token, Actor, String, String, String) endpoint " + + "instead. Reason: StatefulTokenService requires that all tokens have a name & ownerUrn specified."); } @Nonnull - public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor, - @Nonnull final String name, final String description, final String actorUrn) { + public String generateAccessToken( + @Nonnull final TokenType type, + @Nonnull final Actor actor, + @Nonnull final String name, + final String description, + final String actorUrn) { Date date = new Date(); long timeMilli = date.getTime(); - return generateAccessToken(type, actor, DEFAULT_EXPIRES_IN_MS, timeMilli, name, description, actorUrn); + return generateAccessToken( + type, actor, DEFAULT_EXPIRES_IN_MS, timeMilli, name, description, actorUrn); } @Nonnull - public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor, - @Nullable final Long expiresInMs, @Nonnull final long createdAtInMs, @Nonnull final String tokenName, - @Nullable final String tokenDescription, final String actorUrn) { + public String generateAccessToken( + @Nonnull final TokenType type, + @Nonnull final Actor actor, + @Nullable final Long expiresInMs, + @Nonnull final long createdAtInMs, + @Nonnull final String tokenName, + @Nullable final String tokenDescription, + final String actorUrn) { Objects.requireNonNull(type); Objects.requireNonNull(actor); @@ -101,7 +119,8 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final final MetadataChangeProposal proposal = new MetadataChangeProposal(); - // Create the access token key --> use a hashed access token value as a unique id to ensure it's not duplicated. + // Create the access token key --> use a hashed access token value as a unique id to ensure it's + // not duplicated. final DataHubAccessTokenKey key = new DataHubAccessTokenKey(); key.setId(tokenHash); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); @@ -124,14 +143,20 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final proposal.setChangeType(ChangeType.UPSERT); log.info("About to ingest access token metadata {}", proposal); - final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp().setActor(UrnUtils.getUrn(actorUrn)); + final AuditStamp auditStamp = + AuditStampUtils.createDefaultAuditStamp().setActor(UrnUtils.getUrn(actorUrn)); - Stream<MetadataChangeProposal> proposalStream = Stream.concat(Stream.of(proposal), + Stream<MetadataChangeProposal> proposalStream = + Stream.concat( + Stream.of(proposal), AspectUtils.getAdditionalChanges(proposal, _entityService).stream()); - _entityService.ingestProposal(AspectsBatchImpl.builder() + _entityService.ingestProposal( + AspectsBatchImpl.builder() .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(), auditStamp, false); + .build(), + auditStamp, + false); return accessToken; } @@ -153,7 +178,8 @@ public TokenClaims validateAccessToken(@Nonnull String accessToken) throws Token this.revokeAccessToken(hash(accessToken)); throw e; } catch (final ExecutionException e) { - throw new TokenException("Failed to validate DataHub token: Unable to load token information from store", e); + throw new TokenException( + "Failed to validate DataHub token: Unable to load token information from store", e); } } @@ -171,9 +197,7 @@ public void revokeAccessToken(@Nonnull String hashedToken) throws TokenException throw new TokenException("Access token no longer exists"); } - /** - * Hashes the input after salting it. - */ + /** Hashes the input after salting it. */ public String hash(String input) { final byte[] saltingKeyBytes = this.salt.getBytes(); final byte[] inputBytes = input.getBytes(); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java index fa8ec8c818734..71f12477a33b2 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java @@ -21,10 +21,9 @@ import javax.annotation.Nullable; import javax.crypto.spec.SecretKeySpec; - /** - * Service responsible for generating JWT tokens for use within DataHub in stateless way. - * This service is responsible only for generating tokens, it will not do anything else with them. + * Service responsible for generating JWT tokens for use within DataHub in stateless way. This + * service is responsible only for generating tokens, it will not do anything else with them. */ public class StatelessTokenService { @@ -40,17 +39,14 @@ public class StatelessTokenService { private final String iss; public StatelessTokenService( - @Nonnull final String signingKey, - @Nonnull final String signingAlgorithm - ) { + @Nonnull final String signingKey, @Nonnull final String signingAlgorithm) { this(signingKey, signingAlgorithm, null); } public StatelessTokenService( @Nonnull final String signingKey, @Nonnull final String signingAlgorithm, - @Nullable final String iss - ) { + @Nullable final String iss) { this.signingKey = Objects.requireNonNull(signingKey); this.signingAlgorithm = validateAlgorithm(Objects.requireNonNull(signingAlgorithm)); this.iss = iss; @@ -59,8 +55,8 @@ public StatelessTokenService( /** * Generates a JWT for an actor with a default expiration time. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + * <p>Note that the caller of this method is expected to authorize the action of generating a + * token. */ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor) { return generateAccessToken(type, actor, DEFAULT_EXPIRES_IN_MS); @@ -69,19 +65,19 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final /** * Generates a JWT for an actor with a specific duration in milliseconds. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + * <p>Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Nonnull public String generateAccessToken( - @Nonnull final TokenType type, - @Nonnull final Actor actor, - @Nullable final Long expiresInMs) { + @Nonnull final TokenType type, @Nonnull final Actor actor, @Nullable final Long expiresInMs) { Objects.requireNonNull(type); Objects.requireNonNull(actor); Map<String, Object> claims = new HashMap<>(); - claims.put(TokenClaims.TOKEN_VERSION_CLAIM_NAME, String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. + claims.put( + TokenClaims.TOKEN_VERSION_CLAIM_NAME, + String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. claims.put(TokenClaims.TOKEN_TYPE_CLAIM_NAME, type.toString()); claims.put(TokenClaims.ACTOR_TYPE_CLAIM_NAME, actor.getType()); claims.put(TokenClaims.ACTOR_ID_CLAIM_NAME, actor.getId()); @@ -91,7 +87,8 @@ public String generateAccessToken( /** * Generates a JWT for a custom set of claims. * - * Note that the caller of this method is expected to authorize the action of generating a token. + * <p>Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Nonnull public String generateAccessToken( @@ -100,10 +97,8 @@ public String generateAccessToken( @Nullable final Long expiresInMs) { Objects.requireNonNull(sub); Objects.requireNonNull(claims); - final JwtBuilder builder = Jwts.builder() - .addClaims(claims) - .setId(UUID.randomUUID().toString()) - .setSubject(sub); + final JwtBuilder builder = + Jwts.builder().addClaims(claims).setId(UUID.randomUUID().toString()).setSubject(sub); if (expiresInMs != null) { builder.setExpiration(new Date(System.currentTimeMillis() + expiresInMs)); @@ -111,7 +106,7 @@ public String generateAccessToken( if (this.iss != null) { builder.setIssuer(this.iss); } - byte [] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); + byte[] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); final Key signingKey = new SecretKeySpec(apiKeySecretBytes, this.signingAlgorithm.getJcaName()); return builder.signWith(signingKey, this.signingAlgorithm).compact(); } @@ -119,18 +114,16 @@ public String generateAccessToken( /** * Validates a JWT issued by this service. * - * Throws an {@link TokenException} in the case that the token cannot be verified. + * <p>Throws an {@link TokenException} in the case that the token cannot be verified. */ @Nonnull public TokenClaims validateAccessToken(@Nonnull final String accessToken) throws TokenException { Objects.requireNonNull(accessToken); try { - byte [] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); + byte[] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); final String base64Key = Base64.getEncoder().encodeToString(apiKeySecretBytes); - final Jws<Claims> jws = Jwts.parserBuilder() - .setSigningKey(base64Key) - .build() - .parseClaimsJws(accessToken); + final Jws<Claims> jws = + Jwts.parserBuilder().setSigningKey(base64Key).build().parseClaimsJws(accessToken); validateTokenAlgorithm(jws.getHeader().getAlgorithm()); final Claims claims = jws.getBody(); final String tokenVersion = claims.get(TokenClaims.TOKEN_VERSION_CLAIM_NAME, String.class); @@ -138,33 +131,37 @@ public TokenClaims validateAccessToken(@Nonnull final String accessToken) throws final String actorId = claims.get(TokenClaims.ACTOR_ID_CLAIM_NAME, String.class); final String actorType = claims.get(TokenClaims.ACTOR_TYPE_CLAIM_NAME, String.class); if (tokenType != null && actorId != null && actorType != null) { - return new TokenClaims( - TokenVersion.fromNumericStringValue(tokenVersion), - TokenType.valueOf(tokenType), - ActorType.valueOf(actorType), - actorId, - claims.getExpiration() == null ? null : claims.getExpiration().getTime()); + return new TokenClaims( + TokenVersion.fromNumericStringValue(tokenVersion), + TokenType.valueOf(tokenType), + ActorType.valueOf(actorType), + actorId, + claims.getExpiration() == null ? null : claims.getExpiration().getTime()); } } catch (io.jsonwebtoken.ExpiredJwtException e) { throw new TokenExpiredException("Failed to validate DataHub token. Token has expired.", e); } catch (Exception e) { throw new TokenException("Failed to validate DataHub token", e); } - throw new TokenException("Failed to validate DataHub token: Found malformed or missing 'actor' claim."); + throw new TokenException( + "Failed to validate DataHub token: Found malformed or missing 'actor' claim."); } private void validateTokenAlgorithm(final String algorithm) throws TokenException { try { validateAlgorithm(algorithm); } catch (UnsupportedOperationException e) { - throw new TokenException(String.format("Failed to validate signing algorithm for provided JWT! Found %s", algorithm)); + throw new TokenException( + String.format( + "Failed to validate signing algorithm for provided JWT! Found %s", algorithm)); } } private SignatureAlgorithm validateAlgorithm(final String algorithm) { if (!SUPPORTED_ALGORITHMS.contains(algorithm)) { throw new UnsupportedOperationException( - String.format("Failed to create Token Service. Unsupported algorithm %s provided", algorithm)); + String.format( + "Failed to create Token Service. Unsupported algorithm %s provided", algorithm)); } return SignatureAlgorithm.valueOf(algorithm); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java index 05890cd2181ab..83e23a07918e7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java @@ -8,10 +8,7 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Contains strongly-typed claims that appear in all DataHub granted access tokens. - */ +/** Contains strongly-typed claims that appear in all DataHub granted access tokens. */ public class TokenClaims { public static final String TOKEN_VERSION_CLAIM_NAME = "version"; @@ -21,42 +18,40 @@ public class TokenClaims { public static final String EXPIRATION_CLAIM = "exp"; /** - * The type of the access token, e.g. a session token issued by the frontend or a personal access token - * generated for programmatic use. + * The type of the access token, e.g. a session token issued by the frontend or a personal access + * token generated for programmatic use. */ private final TokenVersion tokenVersion; /** - * The type of the access token, e.g. a session token issued by the frontend or a personal access token - * generated for programmatic use. + * The type of the access token, e.g. a session token issued by the frontend or a personal access + * token generated for programmatic use. */ private final TokenType tokenType; /** * The type of an authenticated DataHub actor. * - * E.g. "urn:li:corpuser:johnsmith" is of type USER. + * <p>E.g. "urn:li:corpuser:johnsmith" is of type USER. */ private final ActorType actorType; /** * A unique identifier for an actor of a particular type. * - * E.g. "johnsmith" inside urn:li:corpuser:johnsmith. + * <p>E.g. "johnsmith" inside urn:li:corpuser:johnsmith. */ private final String actorId; - /** - * The expiration time in milliseconds if one exists, null otherwise. - */ + /** The expiration time in milliseconds if one exists, null otherwise. */ private final Long expirationInMs; public TokenClaims( - @Nonnull TokenVersion tokenVersion, - @Nonnull TokenType tokenType, - @Nonnull final ActorType actorType, - @Nonnull final String actorId, - @Nullable Long expirationInMs) { + @Nonnull TokenVersion tokenVersion, + @Nonnull TokenType tokenType, + @Nonnull final ActorType actorType, + @Nonnull final String actorId, + @Nullable Long expirationInMs) { Objects.requireNonNull(tokenVersion); Objects.requireNonNull(tokenType); Objects.requireNonNull(actorType); @@ -68,51 +63,38 @@ public TokenClaims( this.expirationInMs = expirationInMs; } - /** - * Returns the version of the access token - */ + /** Returns the version of the access token */ public TokenVersion getTokenVersion() { return this.tokenVersion; } - /** - * Returns the type of an authenticated DataHub actor. - */ + /** Returns the type of an authenticated DataHub actor. */ public TokenType getTokenType() { return this.tokenType; } - /** - * Returns the type of an authenticated DataHub actor. - */ + /** Returns the type of an authenticated DataHub actor. */ public ActorType getActorType() { return this.actorType; } - /** - * Returns the expiration time in milliseconds if one exists, null otherwise. - */ + /** Returns the expiration time in milliseconds if one exists, null otherwise. */ public Long getExpirationInMs() { return this.expirationInMs; } - /** - * Returns a unique id associated with a DataHub actor of a particular type. - */ + /** Returns a unique id associated with a DataHub actor of a particular type. */ public String getActorId() { return this.actorId; } - /** - * Returns the claims in the DataHub Access token as a map. - */ + /** Returns the claims in the DataHub Access token as a map. */ public Map<String, Object> asMap() { return ImmutableMap.of( TOKEN_VERSION_CLAIM_NAME, this.tokenVersion.numericValue, TOKEN_TYPE_CLAIM_NAME, this.tokenType.toString(), ACTOR_TYPE_CLAIM_NAME, this.actorType.toString(), ACTOR_ID_CLAIM_NAME, this.actorId, - EXPIRATION_CLAIM, Optional.ofNullable(this.expirationInMs) - ); + EXPIRATION_CLAIM, Optional.ofNullable(this.expirationInMs)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java index 24b6daa830f47..9d239482f85f8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java @@ -1,8 +1,6 @@ package com.datahub.authentication.token; -/** - * A checked exception that is thrown when a DataHub-issued access token cannot be verified. - */ +/** A checked exception that is thrown when a DataHub-issued access token cannot be verified. */ public class TokenException extends Exception { public TokenException(final String message) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java index 6c4e5e037d4da..ae5d2daddcc0e 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java @@ -1,8 +1,6 @@ package com.datahub.authentication.token; -/** - * A checked exception that is thrown when a DataHub-issued access token cannot be verified. - */ +/** A checked exception that is thrown when a DataHub-issued access token cannot be verified. */ public class TokenExpiredException extends TokenException { public TokenExpiredException(final String message) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java index ca5de37b0fad4..475f79da3805c 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java @@ -1,16 +1,10 @@ package com.datahub.authentication.token; -/** - * Represents a type of JWT access token granted by the {@link StatelessTokenService}. - */ +/** Represents a type of JWT access token granted by the {@link StatelessTokenService}. */ public enum TokenType { - /** - * A UI-initiated session token - */ + /** A UI-initiated session token */ SESSION, - /** - * A personal token for programmatic use - */ + /** A personal token for programmatic use */ PERSONAL; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java index 8f9189bf17b95..f1b362b71dfb4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java @@ -4,19 +4,14 @@ import java.util.Objects; import java.util.Optional; - -/** - * Represents a type of JWT access token granted by the {@link StatelessTokenService}. - */ +/** Represents a type of JWT access token granted by the {@link StatelessTokenService}. */ public enum TokenVersion { - /** - * The first version of the DataHub access token. - */ + /** The first version of the DataHub access token. */ ONE(1), /** - * The second version of the DataHub access token (latest). - * Used to represent tokens that are stateful and are stored within DataHub. + * The second version of the DataHub access token (latest). Used to represent tokens that are + * stateful and are stored within DataHub. */ TWO(2); @@ -26,37 +21,35 @@ public enum TokenVersion { this.numericValue = numericValue; } - /** - * Returns the numeric representation of the version - */ + /** Returns the numeric representation of the version */ public int getNumericValue() { return this.numericValue; } - /** - * Returns a {@link TokenVersion} provided a numeric token version. - */ + /** Returns a {@link TokenVersion} provided a numeric token version. */ public static TokenVersion fromNumericValue(int num) { - Optional<TokenVersion> maybeVersion = Arrays.stream(TokenVersion.values()) - .filter(version -> num == version.getNumericValue()) - .findFirst(); + Optional<TokenVersion> maybeVersion = + Arrays.stream(TokenVersion.values()) + .filter(version -> num == version.getNumericValue()) + .findFirst(); if (maybeVersion.isPresent()) { return maybeVersion.get(); } - throw new IllegalArgumentException(String.format("Failed to find DataHubAccessTokenVersion %s", num)); + throw new IllegalArgumentException( + String.format("Failed to find DataHubAccessTokenVersion %s", num)); } - /** - * Returns a {@link TokenVersion} provided a stringified numeric token version. - */ + /** Returns a {@link TokenVersion} provided a stringified numeric token version. */ public static TokenVersion fromNumericStringValue(String num) { Objects.requireNonNull(num); - Optional<TokenVersion> maybeVersion = Arrays.stream(TokenVersion.values()) - .filter(version -> Integer.parseInt(num) == version.getNumericValue()) - .findFirst(); + Optional<TokenVersion> maybeVersion = + Arrays.stream(TokenVersion.values()) + .filter(version -> Integer.parseInt(num) == version.getNumericValue()) + .findFirst(); if (maybeVersion.isPresent()) { return maybeVersion.get(); } - throw new IllegalArgumentException(String.format("Failed to find DataHubAccessTokenVersion %s", num)); + throw new IllegalArgumentException( + String.format("Failed to find DataHubAccessTokenVersion %s", num)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java index bff675ddd9cb2..741d176f98c1b 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java @@ -1,5 +1,7 @@ package com.datahub.authentication.user; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConfiguration; import com.linkedin.common.AuditStamp; @@ -22,12 +24,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - -/** - * Service responsible for creating, updating and authenticating native DataHub users. - */ +/** Service responsible for creating, updating and authenticating native DataHub users. */ @Slf4j @RequiredArgsConstructor public class NativeUserService { @@ -38,8 +35,14 @@ public class NativeUserService { private final SecretService _secretService; private final AuthenticationConfiguration _authConfig; - public void createNativeUser(@Nonnull String userUrnString, @Nonnull String fullName, @Nonnull String email, - @Nonnull String title, @Nonnull String password, @Nonnull Authentication authentication) throws Exception { + public void createNativeUser( + @Nonnull String userUrnString, + @Nonnull String fullName, + @Nonnull String email, + @Nonnull String title, + @Nonnull String password, + @Nonnull Authentication authentication) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnSting must not be null!"); Objects.requireNonNull(fullName, "fullName must not be null!"); Objects.requireNonNull(email, "email must not be null!"); @@ -49,7 +52,8 @@ public void createNativeUser(@Nonnull String userUrnString, @Nonnull String full final Urn userUrn = Urn.createFromString(userUrnString); if (_entityService.exists(userUrn) - // Should never fail these due to Controller level check, but just in case more usages get put in + // Should never fail these due to Controller level check, but just in case more usages get + // put in || userUrn.toString().equals(SYSTEM_ACTOR) || userUrn.toString().equals(new CorpuserUrn(_authConfig.getSystemClientId()).toString()) || userUrn.toString().equals(DATAHUB_ACTOR) @@ -61,8 +65,13 @@ public void createNativeUser(@Nonnull String userUrnString, @Nonnull String full updateCorpUserCredentials(userUrn, password, authentication); } - void updateCorpUserInfo(@Nonnull Urn userUrn, @Nonnull String fullName, @Nonnull String email, @Nonnull String title, - Authentication authentication) throws Exception { + void updateCorpUserInfo( + @Nonnull Urn userUrn, + @Nonnull String fullName, + @Nonnull String email, + @Nonnull String title, + Authentication authentication) + throws Exception { // Construct corpUserInfo final CorpUserInfo corpUserInfo = new CorpUserInfo(); corpUserInfo.setFullName(fullName); @@ -86,7 +95,9 @@ void updateCorpUserStatus(@Nonnull Urn userUrn, Authentication authentication) t CorpUserStatus corpUserStatus = new CorpUserStatus(); corpUserStatus.setStatus(CORP_USER_STATUS_ACTIVE); corpUserStatus.setLastModified( - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); // Ingest corpUserStatus MCP final MetadataChangeProposal corpUserStatusProposal = new MetadataChangeProposal(); @@ -98,7 +109,8 @@ void updateCorpUserStatus(@Nonnull Urn userUrn, Authentication authentication) t _entityClient.ingestProposal(corpUserStatusProposal, authentication); } - void updateCorpUserCredentials(@Nonnull Urn userUrn, @Nonnull String password, @Nonnull Authentication authentication) + void updateCorpUserCredentials( + @Nonnull Urn userUrn, @Nonnull String password, @Nonnull Authentication authentication) throws Exception { // Construct corpUserCredentials CorpUserCredentials corpUserCredentials = new CorpUserCredentials(); @@ -118,15 +130,18 @@ void updateCorpUserCredentials(@Nonnull Urn userUrn, @Nonnull String password, @ _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); } - public String generateNativeUserPasswordResetToken(@Nonnull String userUrnString, Authentication authentication) - throws Exception { + public String generateNativeUserPasswordResetToken( + @Nonnull String userUrnString, Authentication authentication) throws Exception { Objects.requireNonNull(userUrnString, "userUrnString must not be null!"); Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { throw new RuntimeException("User does not exist or is a non-native user!"); } // Add reset token to CorpUserCredentials @@ -148,8 +163,12 @@ public String generateNativeUserPasswordResetToken(@Nonnull String userUrnString return passwordResetToken; } - public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull String password, - @Nonnull String resetToken, Authentication authentication) throws Exception { + public void resetCorpUserCredentials( + @Nonnull String userUrnString, + @Nonnull String password, + @Nonnull String resetToken, + Authentication authentication) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnString must not be null!"); Objects.requireNonNull(password, "password must not be null!"); Objects.requireNonNull(resetToken, "resetToken must not be null!"); @@ -157,24 +176,30 @@ public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull Str Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { throw new RuntimeException("User does not exist!"); } - if (!corpUserCredentials.hasPasswordResetToken() || !corpUserCredentials.hasPasswordResetTokenExpirationTimeMillis() + if (!corpUserCredentials.hasPasswordResetToken() + || !corpUserCredentials.hasPasswordResetTokenExpirationTimeMillis() || corpUserCredentials.getPasswordResetTokenExpirationTimeMillis() == null) { throw new RuntimeException("User has not generated a password reset token!"); } if (!_secretService.decrypt(corpUserCredentials.getPasswordResetToken()).equals(resetToken)) { - throw new RuntimeException("Invalid reset token. Please ask your administrator to send you an updated link!"); + throw new RuntimeException( + "Invalid reset token. Please ask your administrator to send you an updated link!"); } long currentTimeMillis = Instant.now().toEpochMilli(); if (currentTimeMillis > corpUserCredentials.getPasswordResetTokenExpirationTimeMillis()) { - throw new RuntimeException("Reset token has expired! Please ask your administrator to create a new one"); + throw new RuntimeException( + "Reset token has expired! Please ask your administrator to create a new one"); } // Construct corpUserCredentials @@ -194,14 +219,18 @@ public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull Str _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); } - public boolean doesPasswordMatch(@Nonnull String userUrnString, @Nonnull String password) throws Exception { + public boolean doesPasswordMatch(@Nonnull String userUrnString, @Nonnull String password) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnSting must not be null!"); Objects.requireNonNull(password, "Password must not be null!"); Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { return false; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java index 7e7a1de176f06..9e8c1928c9de0 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java @@ -14,12 +14,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * A configurable chain of {@link Authorizer}s executed in series to attempt to authenticate an inbound request. + * A configurable chain of {@link Authorizer}s executed in series to attempt to authenticate an + * inbound request. * - * Individual {@link Authorizer}s are registered with the chain using {@link #register(Authorizer)}. - * The chain can be executed by invoking {@link #authorize(AuthorizationRequest)}. + * <p>Individual {@link Authorizer}s are registered with the chain using {@link + * #register(Authorizer)}. The chain can be executed by invoking {@link + * #authorize(AuthorizationRequest)}. */ @Slf4j public class AuthorizerChain implements Authorizer { @@ -41,7 +42,7 @@ public void init(@Nonnull Map<String, Object> authorizerConfig, @Nonnull Authori /** * Executes a set of {@link Authorizer}s and returns the first successful authentication result. * - * Returns an instance of {@link AuthorizationResult}. + * <p>Returns an instance of {@link AuthorizationResult}. */ @Nullable public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request) { @@ -51,10 +52,13 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request for (final Authorizer authorizer : this.authorizers) { try { - log.debug("Executing Authorizer with class name {}", authorizer.getClass().getCanonicalName()); + log.debug( + "Executing Authorizer with class name {}", authorizer.getClass().getCanonicalName()); log.debug("Authorization Request: {}", request.toString()); - // The library came with plugin can use the contextClassLoader to load the classes. For example apache-ranger library does this. - // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class loading request from plugin's home directory, + // The library came with plugin can use the contextClassLoader to load the classes. For + // example apache-ranger library does this. + // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class + // loading request from plugin's home directory, // otherwise plugin's internal library wouldn't be able to find their dependent classes Thread.currentThread().setContextClassLoader(authorizer.getClass().getClassLoader()); AuthorizationResult result = authorizer.authorize(request); @@ -67,12 +71,16 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request return result; } else { - log.debug("Received DENY result from Authorizer with class name {}. message: {}", - authorizer.getClass().getCanonicalName(), result.getMessage()); + log.debug( + "Received DENY result from Authorizer with class name {}. message: {}", + authorizer.getClass().getCanonicalName(), + result.getMessage()); } } catch (Exception e) { - log.error("Caught exception while attempting to authorize request using Authorizer {}. Skipping authorizer.", - authorizer.getClass().getCanonicalName(), e); + log.error( + "Caught exception while attempting to authorize request using Authorizer {}. Skipping authorizer.", + authorizer.getClass().getCanonicalName(), + e); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); } @@ -87,16 +95,19 @@ public AuthorizedActors authorizedActors(String privilege, Optional<EntitySpec> return null; } - AuthorizedActors finalAuthorizedActors = this.authorizers.get(0).authorizedActors(privilege, resourceSpec); + AuthorizedActors finalAuthorizedActors = + this.authorizers.get(0).authorizedActors(privilege, resourceSpec); for (int i = 1; i < this.authorizers.size(); i++) { - finalAuthorizedActors = mergeAuthorizedActors(finalAuthorizedActors, - this.authorizers.get(i).authorizedActors(privilege, resourceSpec)); + finalAuthorizedActors = + mergeAuthorizedActors( + finalAuthorizedActors, + this.authorizers.get(i).authorizedActors(privilege, resourceSpec)); } return finalAuthorizedActors; } - private AuthorizedActors mergeAuthorizedActors(@Nullable AuthorizedActors original, - @Nullable AuthorizedActors other) { + private AuthorizedActors mergeAuthorizedActors( + @Nullable AuthorizedActors original, @Nullable AuthorizedActors other) { if (original == null) { return other; } @@ -139,10 +150,8 @@ private AuthorizedActors mergeAuthorizedActors(@Nullable AuthorizedActors origin .build(); } - /** - * Returns an instance of default {@link DataHubAuthorizer} - */ + /** Returns an instance of default {@link DataHubAuthorizer} */ public DataHubAuthorizer getDefaultAuthorizer() { return (DataHubAuthorizer) defaultAuthorizer; } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index 956d635c7901a..9ae95bd4e92b6 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -8,7 +8,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.policy.DataHubPolicyInfo; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; @@ -26,26 +25,23 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - /** - * The Authorizer is a singleton class responsible for authorizing - * operations on the DataHub platform via DataHub Policies. + * The Authorizer is a singleton class responsible for authorizing operations on the DataHub + * platform via DataHub Policies. * - * Currently, the authorizer is implemented as a spring-instantiated Singleton - * which manages its own thread-pool used for resolving policy predicates. + * <p>Currently, the authorizer is implemented as a spring-instantiated Singleton which manages its + * own thread-pool used for resolving policy predicates. */ // TODO: Decouple this from all Rest.li objects if possible. @Slf4j public class DataHubAuthorizer implements Authorizer { public enum AuthorizationMode { - /** - * Default mode simply means that authorization is enforced, with a DENY result returned - */ + /** Default mode simply means that authorization is enforced, with a DENY result returned */ DEFAULT, /** - * Allow all means that the DataHubAuthorizer will allow all actions. This is used as an override to disable the - * policies feature. + * Allow all means that the DataHubAuthorizer will allow all actions. This is used as an + * override to disable the policies feature. */ ALLOW_ALL } @@ -55,11 +51,13 @@ public enum AuthorizationMode { // Maps privilege name to the associated set of policies for fast access. // Not concurrent data structure because writes are always against the entire thing. - private final Map<String, List<DataHubPolicyInfo>> _policyCache = new HashMap<>(); // Shared Policy Cache. + private final Map<String, List<DataHubPolicyInfo>> _policyCache = + new HashMap<>(); // Shared Policy Cache. private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); private final Lock readLock = readWriteLock.readLock(); - private final ScheduledExecutorService _refreshExecutorService = Executors.newScheduledThreadPool(1); + private final ScheduledExecutorService _refreshExecutorService = + Executors.newScheduledThreadPool(1); private final PolicyRefreshRunnable _policyRefreshRunnable; private final PolicyEngine _policyEngine; private EntitySpecResolver _entitySpecResolver; @@ -77,9 +75,15 @@ public DataHubAuthorizer( _systemAuthentication = Objects.requireNonNull(systemAuthentication); _mode = Objects.requireNonNull(mode); _policyEngine = new PolicyEngine(systemAuthentication, Objects.requireNonNull(entityClient)); - _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache, - readWriteLock.writeLock(), policyFetchSize); - _refreshExecutorService.scheduleAtFixedRate(_policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); + _policyRefreshRunnable = + new PolicyRefreshRunnable( + systemAuthentication, + new PolicyFetcher(entityClient), + _policyCache, + readWriteLock.writeLock(), + policyFetchSize); + _refreshExecutorService.scheduleAtFixedRate( + _policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); } @Override @@ -95,41 +99,48 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, null); } - Optional<ResolvedEntitySpec> resolvedResourceSpec = request.getResourceSpec().map(_entitySpecResolver::resolve); + Optional<ResolvedEntitySpec> resolvedResourceSpec = + request.getResourceSpec().map(_entitySpecResolver::resolve); // 1. Fetch the policies relevant to the requested privilege. - final List<DataHubPolicyInfo> policiesToEvaluate = getOrDefault(request.getPrivilege(), new ArrayList<>()); + final List<DataHubPolicyInfo> policiesToEvaluate = + getOrDefault(request.getPrivilege(), new ArrayList<>()); // 2. Evaluate each policy. for (DataHubPolicyInfo policy : policiesToEvaluate) { if (isRequestGranted(policy, request, resolvedResourceSpec)) { // Short circuit if policy has granted privileges to this actor. - return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, + return new AuthorizationResult( + request, + AuthorizationResult.Type.ALLOW, String.format("Granted by policy with type: %s", policy.getType())); } } - return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); + return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); } - public List<String> getGrantedPrivileges(final String actor, final Optional<EntitySpec> resourceSpec) { + public List<String> getGrantedPrivileges( + final String actor, final Optional<EntitySpec> resourceSpec) { // 1. Fetch all policies final List<DataHubPolicyInfo> policiesToEvaluate = getOrDefault(ALL, new ArrayList<>()); Urn actorUrn = UrnUtils.getUrn(actor); - final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); + final ResolvedEntitySpec resolvedActorSpec = + _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); - Optional<ResolvedEntitySpec> resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + Optional<ResolvedEntitySpec> resolvedResourceSpec = + resourceSpec.map(_entitySpecResolver::resolve); - return _policyEngine.getGrantedPrivileges(policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); + return _policyEngine.getGrantedPrivileges( + policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); } /** - * Retrieves the current list of actors authorized to for a particular privilege against - * an optional resource + * Retrieves the current list of actors authorized to for a particular privilege against an + * optional resource */ public AuthorizedActors authorizedActors( - final String privilege, - final Optional<EntitySpec> resourceSpec) { + final String privilege, final Optional<EntitySpec> resourceSpec) { final List<Urn> authorizedUsers = new ArrayList<>(); final List<Urn> authorizedGroups = new ArrayList<>(); @@ -140,7 +151,8 @@ public AuthorizedActors authorizedActors( // Step 1: Find policies granting the privilege. final List<DataHubPolicyInfo> policiesToEvaluate = getOrDefault(privilege, new ArrayList<>()); - Optional<ResolvedEntitySpec> resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + Optional<ResolvedEntitySpec> resolvedResourceSpec = + resourceSpec.map(_entitySpecResolver::resolve); // Step 2: For each policy, determine whether the resource is a match. for (DataHubPolicyInfo policy : policiesToEvaluate) { @@ -149,7 +161,8 @@ public AuthorizedActors authorizedActors( continue; } - final PolicyEngine.PolicyActors matchingActors = _policyEngine.getMatchingActors(policy, resolvedResourceSpec); + final PolicyEngine.PolicyActors matchingActors = + _policyEngine.getMatchingActors(policy, resolvedResourceSpec); // Step 3: For each matching policy, add actors that are authorized. authorizedUsers.addAll(matchingActors.getUsers()); @@ -164,12 +177,13 @@ public AuthorizedActors authorizedActors( } // Step 4: Return all authorized users and groups. - return new AuthorizedActors(privilege, authorizedUsers, authorizedGroups, authorizedRoles, allUsers, allGroups); + return new AuthorizedActors( + privilege, authorizedUsers, authorizedGroups, authorizedRoles, allUsers, allGroups); } /** - * Invalidates the policy cache and fires off a refresh thread. Should be invoked - * when a policy is created, modified, or deleted. + * Invalidates the policy cache and fires off a refresh thread. Should be invoked when a policy is + * created, modified, or deleted. */ public void invalidateCache() { _refreshExecutorService.execute(_policyRefreshRunnable); @@ -184,17 +198,19 @@ public void setMode(final AuthorizationMode mode) { } /** - * Returns true if the request's is coming from the system itself, in which cases - * the action is always authorized. + * Returns true if the request's is coming from the system itself, in which cases the action is + * always authorized. */ - private boolean isSystemRequest(final AuthorizationRequest request, final Authentication systemAuthentication) { + private boolean isSystemRequest( + final AuthorizationRequest request, final Authentication systemAuthentication) { return systemAuthentication.getActor().toUrnStr().equals(request.getActorUrn()); } - /** - * Returns true if a policy grants the requested privilege for a given actor and resource. - */ - private boolean isRequestGranted(final DataHubPolicyInfo policy, final AuthorizationRequest request, final Optional<ResolvedEntitySpec> resourceSpec) { + /** Returns true if a policy grants the requested privilege for a given actor and resource. */ + private boolean isRequestGranted( + final DataHubPolicyInfo policy, + final AuthorizationRequest request, + final Optional<ResolvedEntitySpec> resourceSpec) { if (AuthorizationMode.ALLOW_ALL.equals(mode())) { return true; } @@ -204,14 +220,12 @@ private boolean isRequestGranted(final DataHubPolicyInfo policy, final Authoriza return false; } - final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve( + final ResolvedEntitySpec resolvedActorSpec = + _entitySpecResolver.resolve( new EntitySpec(actorUrn.get().getEntityType(), request.getActorUrn())); - final PolicyEngine.PolicyEvaluationResult result = _policyEngine.evaluatePolicy( - policy, - resolvedActorSpec, - request.getPrivilege(), - resourceSpec - ); + final PolicyEngine.PolicyEvaluationResult result = + _policyEngine.evaluatePolicy( + policy, resolvedActorSpec, request.getPrivilege(), resourceSpec); return result.isGranted(); } @@ -219,7 +233,10 @@ private Optional<Urn> getUrnFromRequestActor(String actor) { try { return Optional.of(Urn.createFromString(actor)); } catch (URISyntaxException e) { - log.error(String.format("Failed to bind actor %s to an URN. Actors must be URNs. Denying the authorization request", actor)); + log.error( + String.format( + "Failed to bind actor %s to an URN. Actors must be URNs. Denying the authorization request", + actor)); return Optional.empty(); } } @@ -237,8 +254,8 @@ private List<DataHubPolicyInfo> getOrDefault(String key, List<DataHubPolicyInfo> /** * A {@link Runnable} used to periodically fetch a new instance of the policies Cache. * - * Currently, the refresh logic is not very smart. When the cache is invalidated, we simply re-fetch the - * entire cache using Policies stored in the backend. + * <p>Currently, the refresh logic is not very smart. When the cache is invalidated, we simply + * re-fetch the entire cache using Policies stored in the backend. */ @VisibleForTesting @RequiredArgsConstructor @@ -260,8 +277,8 @@ public void run() { while (total == null || scrollId != null) { try { - final PolicyFetcher.PolicyFetchResult - policyFetchResult = _policyFetcher.fetchPolicies(count, scrollId, _systemAuthentication); + final PolicyFetcher.PolicyFetchResult policyFetchResult = + _policyFetcher.fetchPolicies(count, scrollId, _systemAuthentication); addPoliciesToCache(newCache, policyFetchResult.getPolicies()); @@ -269,7 +286,10 @@ public void run() { scrollId = policyFetchResult.getScrollId(); } catch (Exception e) { log.error( - "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. count: {}, scrollId: {}", count, scrollId, e); + "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. count: {}, scrollId: {}", + count, + scrollId, + e); return; } } @@ -285,23 +305,31 @@ public void run() { log.debug(String.format("Successfully fetched %s policies.", total)); } catch (Exception e) { - log.error("Caught exception while loading Policy cache. Will retry on next scheduled attempt.", e); + log.error( + "Caught exception while loading Policy cache. Will retry on next scheduled attempt.", + e); } } - private void addPoliciesToCache(final Map<String, List<DataHubPolicyInfo>> cache, + private void addPoliciesToCache( + final Map<String, List<DataHubPolicyInfo>> cache, final List<PolicyFetcher.Policy> policies) { policies.forEach(policy -> addPolicyToCache(cache, policy.getPolicyInfo())); } - private void addPolicyToCache(final Map<String, List<DataHubPolicyInfo>> cache, final DataHubPolicyInfo policy) { + private void addPolicyToCache( + final Map<String, List<DataHubPolicyInfo>> cache, final DataHubPolicyInfo policy) { final List<String> privileges = policy.getPrivileges(); for (String privilege : privileges) { - List<DataHubPolicyInfo> existingPolicies = cache.containsKey(privilege) ? new ArrayList<>(cache.get(privilege)) : new ArrayList<>(); + List<DataHubPolicyInfo> existingPolicies = + cache.containsKey(privilege) + ? new ArrayList<>(cache.get(privilege)) + : new ArrayList<>(); existingPolicies.add(policy); cache.put(privilege, existingPolicies); } - List<DataHubPolicyInfo> existingPolicies = cache.containsKey(ALL) ? new ArrayList<>(cache.get(ALL)) : new ArrayList<>(); + List<DataHubPolicyInfo> existingPolicies = + cache.containsKey(ALL) ? new ArrayList<>(cache.get(ALL)) : new ArrayList<>(); existingPolicies.add(policy); cache.put(ALL, existingPolicies); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java index 65b0329a9c4f2..c2d9c42693311 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java @@ -15,13 +15,14 @@ import java.util.Map; import java.util.stream.Collectors; - public class DefaultEntitySpecResolver implements EntitySpecResolver { private final List<EntityFieldResolverProvider> _entityFieldResolverProviders; public DefaultEntitySpecResolver(Authentication systemAuthentication, EntityClient entityClient) { _entityFieldResolverProviders = - ImmutableList.of(new EntityTypeFieldResolverProvider(), new EntityUrnFieldResolverProvider(), + ImmutableList.of( + new EntityTypeFieldResolverProvider(), + new EntityUrnFieldResolverProvider(), new DomainFieldResolverProvider(entityClient, systemAuthentication), new OwnerFieldResolverProvider(entityClient, systemAuthentication), new DataPlatformInstanceFieldResolverProvider(entityClient, systemAuthentication), @@ -35,7 +36,10 @@ public ResolvedEntitySpec resolve(EntitySpec entitySpec) { private Map<EntityFieldType, FieldResolver> getFieldResolvers(EntitySpec entitySpec) { return _entityFieldResolverProviders.stream() - .flatMap(resolver -> resolver.getFieldTypes().stream().map(fieldType -> Pair.of(fieldType, resolver))) - .collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue().getFieldResolver(entitySpec))); + .flatMap( + resolver -> + resolver.getFieldTypes().stream().map(fieldType -> Pair.of(fieldType, resolver))) + .collect( + Collectors.toMap(Pair::getKey, pair -> pair.getValue().getFieldResolver(entitySpec))); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java index 0dbb9cd132f8a..e4f6b483e09f8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java @@ -13,55 +13,56 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class FilterUtils { public static final PolicyMatchFilter EMPTY_FILTER = new PolicyMatchFilter().setCriteria(new PolicyMatchCriterionArray()); - private FilterUtils() { - } + private FilterUtils() {} - /** - * Creates new PolicyMatchCriterion with field and value, using EQUAL PolicyMatchCondition. - */ + /** Creates new PolicyMatchCriterion with field and value, using EQUAL PolicyMatchCondition. */ @Nonnull - public static PolicyMatchCriterion newCriterion(@Nonnull EntityFieldType field, @Nonnull List<String> values) { + public static PolicyMatchCriterion newCriterion( + @Nonnull EntityFieldType field, @Nonnull List<String> values) { return newCriterion(field, values, PolicyMatchCondition.EQUALS); } - /** - * Creates new PolicyMatchCriterion with field, value and PolicyMatchCondition. - */ + /** Creates new PolicyMatchCriterion with field, value and PolicyMatchCondition. */ @Nonnull - public static PolicyMatchCriterion newCriterion(@Nonnull EntityFieldType field, @Nonnull List<String> values, + public static PolicyMatchCriterion newCriterion( + @Nonnull EntityFieldType field, + @Nonnull List<String> values, @Nonnull PolicyMatchCondition policyMatchCondition) { - return new PolicyMatchCriterion().setField(field.name()) + return new PolicyMatchCriterion() + .setField(field.name()) .setValues(new StringArray(values)) .setCondition(policyMatchCondition); } /** - * Creates new PolicyMatchFilter from a map of Criteria by removing null-valued Criteria and using EQUAL PolicyMatchCondition (default). + * Creates new PolicyMatchFilter from a map of Criteria by removing null-valued Criteria and using + * EQUAL PolicyMatchCondition (default). */ @Nonnull public static PolicyMatchFilter newFilter(@Nullable Map<EntityFieldType, List<String>> params) { if (params == null) { return EMPTY_FILTER; } - PolicyMatchCriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue())) - .collect(Collectors.toCollection(PolicyMatchCriterionArray::new)); + PolicyMatchCriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue())) + .collect(Collectors.toCollection(PolicyMatchCriterionArray::new)); return new PolicyMatchFilter().setCriteria(criteria); } /** - * Creates new PolicyMatchFilter from a single PolicyMatchCriterion with EQUAL PolicyMatchCondition (default). + * Creates new PolicyMatchFilter from a single PolicyMatchCriterion with EQUAL + * PolicyMatchCondition (default). */ @Nonnull - public static PolicyMatchFilter newFilter(@Nonnull EntityFieldType field, @Nonnull List<String> values) { + public static PolicyMatchFilter newFilter( + @Nonnull EntityFieldType field, @Nonnull List<String> values) { return newFilter(Collections.singletonMap(field, values)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java index da0ae26f2b1da..123e5f3c55932 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java @@ -1,5 +1,7 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Owner; import com.linkedin.common.Ownership; @@ -20,7 +22,6 @@ import com.linkedin.policy.PolicyMatchCriterion; import com.linkedin.policy.PolicyMatchCriterionArray; import com.linkedin.policy.PolicyMatchFilter; - import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -31,16 +32,12 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nullable; - import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class PolicyEngine { @@ -59,13 +56,19 @@ public PolicyEvaluationResult evaluatePolicy( // If the privilege is not in scope, deny the request. if (!isPrivilegeMatch(privilege, policy.getPrivileges())) { - log.debug("Policy denied based on irrelevant privileges {} for {}", policy.getPrivileges(), privilege); + log.debug( + "Policy denied based on irrelevant privileges {} for {}", + policy.getPrivileges(), + privilege); return PolicyEvaluationResult.DENIED; } // If policy is not applicable, deny the request if (!isPolicyApplicable(policy, resolvedActorSpec, resource, context)) { - log.debug("Policy does not applicable for actor {} and resource {}", resolvedActorSpec.getSpec().getEntity(), resource); + log.debug( + "Policy does not applicable for actor {} and resource {}", + resolvedActorSpec.getSpec().getEntity(), + resource); return PolicyEvaluationResult.DENIED; } @@ -74,8 +77,7 @@ public PolicyEvaluationResult evaluatePolicy( } public PolicyActors getMatchingActors( - final DataHubPolicyInfo policy, - final Optional<ResolvedEntitySpec> resource) { + final DataHubPolicyInfo policy, final Optional<ResolvedEntitySpec> resource) { final List<Urn> users = new ArrayList<>(); final List<Urn> groups = new ArrayList<>(); final List<Urn> roles = new ArrayList<>(); @@ -118,8 +120,7 @@ private boolean isPolicyApplicable( final DataHubPolicyInfo policy, final ResolvedEntitySpec resolvedActorSpec, final Optional<ResolvedEntitySpec> resource, - final PolicyEvaluationContext context - ) { + final PolicyEvaluationContext context) { // If policy is inactive, simply return DENY. if (PoliciesConfig.INACTIVE_POLICY_STATE.equals(policy.getState())) { @@ -150,25 +151,27 @@ public List<String> getGrantedPrivileges( /** * Returns true if the policy matches the resource spec, false otherwise. * - * If the policy is of type "PLATFORM", the resource will always match (since there's no resource). - * If the policy is of type "METADATA", the resourceSpec parameter will be matched against the - * resource filter defined on the policy. + * <p>If the policy is of type "PLATFORM", the resource will always match (since there's no + * resource). If the policy is of type "METADATA", the resourceSpec parameter will be matched + * against the resource filter defined on the policy. */ - public Boolean policyMatchesResource(final DataHubPolicyInfo policy, final Optional<ResolvedEntitySpec> resourceSpec) { + public Boolean policyMatchesResource( + final DataHubPolicyInfo policy, final Optional<ResolvedEntitySpec> resourceSpec) { return isResourceMatch(policy.getType(), policy.getResources(), resourceSpec); } /** - * Returns true if the privilege portion of a DataHub policy matches a the privilege being evaluated, false otherwise. + * Returns true if the privilege portion of a DataHub policy matches a the privilege being + * evaluated, false otherwise. */ private boolean isPrivilegeMatch( - final String requestPrivilege, - final List<String> policyPrivileges) { + final String requestPrivilege, final List<String> policyPrivileges) { return policyPrivileges.contains(requestPrivilege); } /** - * Returns true if the resource portion of a DataHub policy matches a the resource being evaluated, false otherwise. + * Returns true if the resource portion of a DataHub policy matches a the resource being + * evaluated, false otherwise. */ private boolean isResourceMatch( final String policyType, @@ -192,8 +195,8 @@ private boolean isResourceMatch( } /** - * Get filter object from policy resource filter. Make sure it is backward compatible by constructing PolicyMatchFilter object - * from other fields if the filter field is not set + * Get filter object from policy resource filter. Make sure it is backward compatible by + * constructing PolicyMatchFilter object from other fields if the filter field is not set */ private PolicyMatchFilter getFilter(DataHubResourceFilter policyResourceFilter) { if (policyResourceFilter.hasFilter()) { @@ -201,13 +204,19 @@ private PolicyMatchFilter getFilter(DataHubResourceFilter policyResourceFilter) } PolicyMatchCriterionArray criteria = new PolicyMatchCriterionArray(); if (policyResourceFilter.hasType()) { - criteria.add(new PolicyMatchCriterion().setField(EntityFieldType.TYPE.name()) - .setValues(new StringArray(Collections.singletonList(policyResourceFilter.getType())))); + criteria.add( + new PolicyMatchCriterion() + .setField(EntityFieldType.TYPE.name()) + .setValues( + new StringArray(Collections.singletonList(policyResourceFilter.getType())))); } - if (policyResourceFilter.hasType() && policyResourceFilter.hasResources() + if (policyResourceFilter.hasType() + && policyResourceFilter.hasResources() && !policyResourceFilter.isAllResources()) { criteria.add( - new PolicyMatchCriterion().setField(EntityFieldType.URN.name()).setValues(policyResourceFilter.getResources())); + new PolicyMatchCriterion() + .setField(EntityFieldType.URN.name()) + .setValues(policyResourceFilter.getResources())); } return new PolicyMatchFilter().setCriteria(criteria); } @@ -216,7 +225,8 @@ private boolean checkFilter(final PolicyMatchFilter filter, final ResolvedEntity return filter.getCriteria().stream().allMatch(criterion -> checkCriterion(criterion, resource)); } - private boolean checkCriterion(final PolicyMatchCriterion criterion, final ResolvedEntitySpec resource) { + private boolean checkCriterion( + final PolicyMatchCriterion criterion, final ResolvedEntitySpec resource) { EntityFieldType entityFieldType; try { entityFieldType = EntityFieldType.valueOf(criterion.getField().toUpperCase()); @@ -226,12 +236,13 @@ private boolean checkCriterion(final PolicyMatchCriterion criterion, final Resol } Set<String> fieldValues = resource.getFieldValues(entityFieldType); - return criterion.getValues() - .stream() - .anyMatch(filterValue -> checkCondition(fieldValues, filterValue, criterion.getCondition())); + return criterion.getValues().stream() + .anyMatch( + filterValue -> checkCondition(fieldValues, filterValue, criterion.getCondition())); } - private boolean checkCondition(Set<String> fieldValues, String filterValue, PolicyMatchCondition condition) { + private boolean checkCondition( + Set<String> fieldValues, String filterValue, PolicyMatchCondition condition) { if (condition == PolicyMatchCondition.EQUALS) { return fieldValues.contains(filterValue); } @@ -240,8 +251,9 @@ private boolean checkCondition(Set<String> fieldValues, String filterValue, Poli } /** - * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, false otherwise. - * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, false otherwise. + * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, + * false otherwise. Returns true if the actor portion of a DataHub policy matches a the actor + * being evaluated, false otherwise. */ private boolean isActorMatch( final ResolvedEntitySpec resolvedActorSpec, @@ -259,7 +271,8 @@ private boolean isActorMatch( return true; } - // 3. If the actor is the owner, either directly or indirectly via a group, return true immediately. + // 3. If the actor is the owner, either directly or indirectly via a group, return true + // immediately. if (isOwnerMatch(resolvedActorSpec, actorFilter, resourceSpec, context)) { return true; } @@ -268,11 +281,14 @@ private boolean isActorMatch( return isRoleMatch(resolvedActorSpec, actorFilter, context); } - private boolean isUserMatch(final ResolvedEntitySpec resolvedActorSpec, final DataHubActorFilter actorFilter) { + private boolean isUserMatch( + final ResolvedEntitySpec resolvedActorSpec, final DataHubActorFilter actorFilter) { // If the actor is a matching "User" in the actor filter, return true immediately. - return actorFilter.isAllUsers() || (actorFilter.hasUsers() && Objects.requireNonNull(actorFilter.getUsers()) - .stream().map(Urn::toString) - .anyMatch(user -> user.equals(resolvedActorSpec.getSpec().getEntity()))); + return actorFilter.isAllUsers() + || (actorFilter.hasUsers() + && Objects.requireNonNull(actorFilter.getUsers()).stream() + .map(Urn::toString) + .anyMatch(user -> user.equals(resolvedActorSpec.getSpec().getEntity()))); } private boolean isGroupMatch( @@ -283,9 +299,10 @@ private boolean isGroupMatch( if (actorFilter.isAllGroups() || actorFilter.hasGroups()) { final Set<String> groups = resolveGroups(resolvedActorSpec, context); return (actorFilter.isAllGroups() && !groups.isEmpty()) - || (actorFilter.hasGroups() && Objects.requireNonNull(actorFilter.getGroups()) - .stream().map(Urn::toString) - .anyMatch(groups::contains)); + || (actorFilter.hasGroups() + && Objects.requireNonNull(actorFilter.getGroups()).stream() + .map(Urn::toString) + .anyMatch(groups::contains)); } // If there are no groups on the policy, return false for the group match. return false; @@ -296,7 +313,8 @@ private boolean isOwnerMatch( final DataHubActorFilter actorFilter, final Optional<ResolvedEntitySpec> requestResource, final PolicyEvaluationContext context) { - // If the policy does not apply to owners, or there is no resource to own, return false immediately. + // If the policy does not apply to owners, or there is no resource to own, return false + // immediately. if (!actorFilter.isResourceOwners() || requestResource.isEmpty()) { return false; } @@ -308,8 +326,12 @@ private Set<String> getOwnersForType(EntitySpec resourceSpec, List<Urn> ownershi Urn entityUrn = UrnUtils.getUrn(resourceSpec.getEntity()); EnvelopedAspect ownershipAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) { return Collections.emptySet(); } @@ -328,7 +350,8 @@ private Set<String> getOwnersForType(EntitySpec resourceSpec, List<Urn> ownershi private boolean isActorOwner( final ResolvedEntitySpec resolvedActorSpec, - ResolvedEntitySpec resourceSpec, List<Urn> ownershipTypes, + ResolvedEntitySpec resourceSpec, + List<Urn> ownershipTypes, PolicyEvaluationContext context) { Set<String> owners = this.getOwnersForType(resourceSpec.getSpec(), ownershipTypes); if (isUserOwner(resolvedActorSpec, owners)) { @@ -357,12 +380,11 @@ private boolean isRoleMatch( } // If the actor has a matching "Role" in the actor filter, return true immediately. Set<Urn> actorRoles = resolveRoles(resolvedActorSpec, context); - return Objects.requireNonNull(actorFilter.getRoles()) - .stream() - .anyMatch(actorRoles::contains); + return Objects.requireNonNull(actorFilter.getRoles()).stream().anyMatch(actorRoles::contains); } - private Set<Urn> resolveRoles(final ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { + private Set<Urn> resolveRoles( + final ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { if (context.roles != null) { return context.roles; } @@ -374,14 +396,21 @@ private Set<Urn> resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy try { Urn actorUrn = Urn.createFromString(actor); - final EntityResponse corpUser = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, Collections.singleton(actorUrn), - Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME), _systemAuthentication).get(actorUrn); + final EntityResponse corpUser = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(actorUrn), + Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME), + _systemAuthentication) + .get(actorUrn); if (corpUser == null || !corpUser.hasAspects()) { return roles; } aspectMap = corpUser.getAspects(); } catch (Exception e) { - log.error(String.format("Failed to fetch %s for urn %s", ROLE_MEMBERSHIP_ASPECT_NAME, actor), e); + log.error( + String.format("Failed to fetch %s for urn %s", ROLE_MEMBERSHIP_ASPECT_NAME, actor), e); return roles; } @@ -389,7 +418,8 @@ private Set<Urn> resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy return roles; } - RoleMembership roleMembership = new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); + RoleMembership roleMembership = + new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); if (roleMembership.hasRoles()) { roles.addAll(roleMembership.getRoles()); context.setRoles(roles); @@ -397,7 +427,8 @@ private Set<Urn> resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy return roles; } - private Set<String> resolveGroups(ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { + private Set<String> resolveGroups( + ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { if (context.groups != null) { return context.groups; } @@ -408,9 +439,7 @@ private Set<String> resolveGroups(ResolvedEntitySpec resolvedActorSpec, PolicyEv return groups; } - /** - * Class used to store state across a single Policy evaluation. - */ + /** Class used to store state across a single Policy evaluation. */ static class PolicyEvaluationContext { private Set<String> groups; private Set<Urn> roles; @@ -424,9 +453,7 @@ public void setRoles(Set<Urn> roles) { } } - /** - * Class used to represent the result of a Policy evaluation - */ + /** Class used to represent the result of a Policy evaluation */ static class PolicyEvaluationResult { public static final PolicyEvaluationResult GRANTED = new PolicyEvaluationResult(true); public static final PolicyEvaluationResult DENIED = new PolicyEvaluationResult(false); @@ -442,9 +469,7 @@ public boolean isGranted() { } } - /** - * Class used to represent all valid users of a policy. - */ + /** Class used to represent all valid users of a policy. */ @Value @AllArgsConstructor(access = AccessLevel.PUBLIC) public static class PolicyActors { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java index c06da4d245f91..9c5950985eea4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java @@ -1,5 +1,8 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.DATAHUB_POLICY_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -20,19 +23,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nullable; - -import static com.linkedin.metadata.Constants.DATAHUB_POLICY_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; - - -/** - * Wrapper around entity client to fetch policies in a paged manner - */ +/** Wrapper around entity client to fetch policies in a paged manner */ @Slf4j @RequiredArgsConstructor public class PolicyFetcher { @@ -42,49 +38,66 @@ public class PolicyFetcher { new SortCriterion().setField("lastUpdatedTimestamp").setOrder(SortOrder.DESCENDING); /** - * This is to provide a scroll implementation using the start/count api. It is not efficient - * and the scroll native functions should be used instead. This does fix a failure to fetch - * policies when deep pagination happens where there are >10k policies. - * Exists primarily to prevent breaking change to the graphql api. + * This is to provide a scroll implementation using the start/count api. It is not efficient and + * the scroll native functions should be used instead. This does fix a failure to fetch policies + * when deep pagination happens where there are >10k policies. Exists primarily to prevent + * breaking change to the graphql api. */ @Deprecated - public CompletableFuture<PolicyFetchResult> fetchPolicies(int start, String query, int count, Authentication authentication) { - return CompletableFuture.supplyAsync(() -> { - try { - PolicyFetchResult result = PolicyFetchResult.EMPTY; - String scrollId = ""; - int fetchedResults = 0; - - while (PolicyFetchResult.EMPTY.equals(result) && scrollId != null) { - PolicyFetchResult tmpResult = fetchPolicies(query, count, scrollId.isEmpty() ? null : scrollId, authentication); - fetchedResults += tmpResult.getPolicies().size(); - scrollId = tmpResult.getScrollId(); - if (fetchedResults > start) { - result = tmpResult; + public CompletableFuture<PolicyFetchResult> fetchPolicies( + int start, String query, int count, Authentication authentication) { + return CompletableFuture.supplyAsync( + () -> { + try { + PolicyFetchResult result = PolicyFetchResult.EMPTY; + String scrollId = ""; + int fetchedResults = 0; + + while (PolicyFetchResult.EMPTY.equals(result) && scrollId != null) { + PolicyFetchResult tmpResult = + fetchPolicies(query, count, scrollId.isEmpty() ? null : scrollId, authentication); + fetchedResults += tmpResult.getPolicies().size(); + scrollId = tmpResult.getScrollId(); + if (fetchedResults > start) { + result = tmpResult; + } + } + + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list policies", e); } - } - - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list policies", e); - } - }); + }); } - public PolicyFetchResult fetchPolicies(int count, @Nullable String scrollId, Authentication authentication) - throws RemoteInvocationException, URISyntaxException { + public PolicyFetchResult fetchPolicies( + int count, @Nullable String scrollId, Authentication authentication) + throws RemoteInvocationException, URISyntaxException { return fetchPolicies("", count, scrollId, authentication); } - public PolicyFetchResult fetchPolicies(String query, int count, @Nullable String scrollId, Authentication authentication) + public PolicyFetchResult fetchPolicies( + String query, int count, @Nullable String scrollId, Authentication authentication) throws RemoteInvocationException, URISyntaxException { log.debug(String.format("Batch fetching policies. count: %s, scroll: %s", count, scrollId)); // First fetch all policy urns - ScrollResult result = _entityClient.scrollAcrossEntities(List.of(POLICY_ENTITY_NAME), query, null, scrollId, - null, count, new SearchFlags().setSkipCache(true).setSkipAggregates(true) - .setSkipHighlighting(true).setFulltext(true), authentication); - List<Urn> policyUrns = result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + ScrollResult result = + _entityClient.scrollAcrossEntities( + List.of(POLICY_ENTITY_NAME), + query, + null, + scrollId, + null, + count, + new SearchFlags() + .setSkipCache(true) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setFulltext(true), + authentication); + List<Urn> policyUrns = + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); if (policyUrns.isEmpty()) { return PolicyFetchResult.EMPTY; @@ -92,23 +105,29 @@ null, count, new SearchFlags().setSkipCache(true).setSkipAggregates(true) // Fetch DataHubPolicyInfo aspects for each urn final Map<Urn, EntityResponse> policyEntities = - _entityClient.batchGetV2(POLICY_ENTITY_NAME, new HashSet<>(policyUrns), null, authentication); - return new PolicyFetchResult(policyUrns.stream() - .map(policyEntities::get) - .filter(Objects::nonNull) - .map(this::extractPolicy) - .filter(Objects::nonNull) - .collect(Collectors.toList()), result.getNumEntities(), result.getScrollId()); + _entityClient.batchGetV2( + POLICY_ENTITY_NAME, new HashSet<>(policyUrns), null, authentication); + return new PolicyFetchResult( + policyUrns.stream() + .map(policyEntities::get) + .filter(Objects::nonNull) + .map(this::extractPolicy) + .filter(Objects::nonNull) + .collect(Collectors.toList()), + result.getNumEntities(), + result.getScrollId()); } private Policy extractPolicy(EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); if (!aspectMap.containsKey(DATAHUB_POLICY_INFO_ASPECT_NAME)) { - // Right after deleting the policy, there could be a small time frame where search and local db is not consistent. + // Right after deleting the policy, there could be a small time frame where search and local + // db is not consistent. // Simply return null in that case return null; } - return new Policy(entityResponse.getUrn(), + return new Policy( + entityResponse.getUrn(), new DataHubPolicyInfo(aspectMap.get(DATAHUB_POLICY_INFO_ASPECT_NAME).getValue().data())); } @@ -116,10 +135,10 @@ private Policy extractPolicy(EntityResponse entityResponse) { public static class PolicyFetchResult { List<Policy> policies; int total; - @Nullable - String scrollId; + @Nullable String scrollId; - public static final PolicyFetchResult EMPTY = new PolicyFetchResult(Collections.emptyList(), 0, null); + public static final PolicyFetchResult EMPTY = + new PolicyFetchResult(Collections.emptyList(), 0, null); } @Value diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java index cbb237654e969..c24c65725830f 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java @@ -1,5 +1,7 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; @@ -16,11 +18,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - -/** - * Provides field resolver for domain given resourceSpec - */ +/** Provides field resolver for domain given resourceSpec */ @Slf4j @RequiredArgsConstructor public class DataPlatformInstanceFieldResolverProvider implements EntityFieldResolverProvider { @@ -40,7 +38,8 @@ public FieldResolver getFieldResolver(EntitySpec entitySpec) { private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) { Urn entityUrn = UrnUtils.getUrn(entitySpec.getEntity()); - // In the case that the entity is a platform instance, the associated platform instance entity is the instance itself + // In the case that the entity is a platform instance, the associated platform instance entity + // is the instance itself if (entityUrn.getEntityType().equals(DATA_PLATFORM_INSTANCE_ENTITY_NAME)) { return FieldResolver.FieldValue.builder() .values(Collections.singleton(entityUrn.toString())) @@ -49,9 +48,14 @@ private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) EnvelopedAspect dataPlatformInstanceAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME), _systemAuthentication); - if (response == null || !response.getAspects().containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME), + _systemAuthentication); + if (response == null + || !response.getAspects().containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } dataPlatformInstanceAspect = response.getAspects().get(DATA_PLATFORM_INSTANCE_ASPECT_NAME); @@ -59,12 +63,15 @@ private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) log.error("Error while retrieving platform instance aspect for urn {}", entityUrn, e); return FieldResolver.emptyFieldValue(); } - DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataPlatformInstanceAspect.getValue().data()); + DataPlatformInstance dataPlatformInstance = + new DataPlatformInstance(dataPlatformInstanceAspect.getValue().data()); if (dataPlatformInstance.getInstance() == null) { return FieldResolver.emptyFieldValue(); } return FieldResolver.FieldValue.builder() - .values(Collections.singleton(Objects.requireNonNull(dataPlatformInstance.getInstance()).toString())) + .values( + Collections.singleton( + Objects.requireNonNull(dataPlatformInstance.getInstance()).toString())) .build(); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java index 15d821b75c0bd..e99e13ce00145 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java @@ -1,9 +1,11 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.domain.DomainProperties; @@ -11,25 +13,17 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; - import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - - -/** - * Provides field resolver for domain given entitySpec - */ +/** Provides field resolver for domain given entitySpec */ @Slf4j @RequiredArgsConstructor public class DomainFieldResolverProvider implements EntityFieldResolverProvider { @@ -51,29 +45,35 @@ private Set<Urn> getBatchedParentDomains(@Nonnull final Set<Urn> urns) { final Set<Urn> parentUrns = new HashSet<>(); try { - final Map<Urn, EntityResponse> batchResponse = _entityClient.batchGetV2( - DOMAIN_ENTITY_NAME, - urns, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - _systemAuthentication - ); - - batchResponse.forEach((urn, entityResponse) -> { - if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - final DomainProperties properties = new DomainProperties(entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); - if (properties.hasParentDomain()) { - parentUrns.add(properties.getParentDomain()); - } - } - }); + final Map<Urn, EntityResponse> batchResponse = + _entityClient.batchGetV2( + DOMAIN_ENTITY_NAME, + urns, + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), + _systemAuthentication); + + batchResponse.forEach( + (urn, entityResponse) -> { + if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + final DomainProperties properties = + new DomainProperties( + entityResponse + .getAspects() + .get(DOMAIN_PROPERTIES_ASPECT_NAME) + .getValue() + .data()); + if (properties.hasParentDomain()) { + parentUrns.add(properties.getParentDomain()); + } + } + }); } catch (Exception e) { log.error( "Error while retrieving parent domains for {} urns including \"{}\"", urns.size(), urns.stream().findFirst().map(Urn::toString).orElse(""), - e - ); + e); } return parentUrns; @@ -90,8 +90,12 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { final EnvelopedAspect domainsAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(DOMAINS_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(DOMAINS_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(DOMAINS_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } @@ -106,7 +110,8 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { * To avoid cycles we remove any parents we've already visited to prevent an infinite loop cycle. */ - final Set<Urn> domainUrns = new HashSet<>(new Domains(domainsAspect.getValue().data()).getDomains()); + final Set<Urn> domainUrns = + new HashSet<>(new Domains(domainsAspect.getValue().data()).getDomains()); Set<Urn> batchedParentUrns = getBatchedParentDomains(domainUrns); batchedParentUrns.removeAll(domainUrns); @@ -116,9 +121,8 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { batchedParentUrns.removeAll(domainUrns); } - return FieldResolver.FieldValue.builder().values(domainUrns - .stream() - .map(Object::toString) - .collect(Collectors.toSet())).build(); + return FieldResolver.FieldValue.builder() + .values(domainUrns.stream().map(Object::toString).collect(Collectors.toSet())) + .build(); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java index 227d403a9cd1d..8cb612515e626 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java @@ -1,24 +1,20 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import java.util.List; - -/** - * Base class for defining a class that provides the field resolver for the given field type - */ +/** Base class for defining a class that provides the field resolver for the given field type */ public interface EntityFieldResolverProvider { /** * List of fields that this hydrator is hydrating. + * * @return */ List<EntityFieldType> getFieldTypes(); - /** - * Return resolver for fetching the field values given the entity - */ + /** Return resolver for fetching the field values given the entity */ FieldResolver getFieldResolver(EntitySpec entitySpec); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java index addac84c68b18..d4dbf86172954 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java @@ -1,16 +1,13 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Collections; import java.util.List; - -/** - * Provides field resolver for entity type given entitySpec - */ +/** Provides field resolver for entity type given entitySpec */ public class EntityTypeFieldResolverProvider implements EntityFieldResolverProvider { @Override diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java index 32960de687839..c4d27d959e023 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java @@ -1,16 +1,13 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Collections; import java.util.List; - -/** - * Provides field resolver for entity urn given entitySpec - */ +/** Provides field resolver for entity urn given entitySpec */ public class EntityUrnFieldResolverProvider implements EntityFieldResolverProvider { @Override diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java index b1202d9f4bbd3..a64dc3a8b5db8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java @@ -1,33 +1,29 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME; + import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.GroupMembership; import com.linkedin.identity.NativeGroupMembership; import com.linkedin.metadata.Constants; -import com.linkedin.identity.GroupMembership; -import java.util.Collections; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; -import static com.linkedin.metadata.Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME; - - -/** - * Provides field resolver for owners given entitySpec - */ +/** Provides field resolver for owners given entitySpec */ @Slf4j @RequiredArgsConstructor public class GroupMembershipFieldResolverProvider implements EntityFieldResolverProvider { @@ -51,21 +47,30 @@ private FieldResolver.FieldValue getGroupMembership(EntitySpec entitySpec) { EnvelopedAspect nativeGroupMembershipAspect; List<Urn> groups = new ArrayList<>(); try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null - || !(response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME) - || response.getAspects().containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME))) { + || !(response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME) + || response + .getAspects() + .containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME))) { return FieldResolver.emptyFieldValue(); } if (response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME)) { groupMembershipAspect = response.getAspects().get(Constants.GROUP_MEMBERSHIP_ASPECT_NAME); - GroupMembership groupMembership = new GroupMembership(groupMembershipAspect.getValue().data()); + GroupMembership groupMembership = + new GroupMembership(groupMembershipAspect.getValue().data()); groups.addAll(groupMembership.getGroups()); } if (response.getAspects().containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { - nativeGroupMembershipAspect = response.getAspects().get(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME); - NativeGroupMembership nativeGroupMembership = new NativeGroupMembership(nativeGroupMembershipAspect.getValue().data()); + nativeGroupMembershipAspect = + response.getAspects().get(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME); + NativeGroupMembership nativeGroupMembership = + new NativeGroupMembership(nativeGroupMembershipAspect.getValue().data()); groups.addAll(nativeGroupMembership.getNativeGroups()); } } catch (Exception e) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java index 3c27f9e6ce8d7..d26082bab6d63 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java @@ -1,9 +1,9 @@ package com.datahub.authorization.fieldresolverprovider; import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -17,10 +17,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - -/** - * Provides field resolver for owners given entitySpec - */ +/** Provides field resolver for owners given entitySpec */ @Slf4j @RequiredArgsConstructor public class OwnerFieldResolverProvider implements EntityFieldResolverProvider { @@ -42,8 +39,12 @@ private FieldResolver.FieldValue getOwners(EntitySpec entitySpec) { Urn entityUrn = UrnUtils.getUrn(entitySpec.getEntity()); EnvelopedAspect ownershipAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } @@ -54,7 +55,10 @@ private FieldResolver.FieldValue getOwners(EntitySpec entitySpec) { } Ownership ownership = new Ownership(ownershipAspect.getValue().data()); return FieldResolver.FieldValue.builder() - .values(ownership.getOwners().stream().map(owner -> owner.getOwner().toString()).collect(Collectors.toSet())) + .values( + ownership.getOwners().stream() + .map(owner -> owner.getOwner().toString()) + .collect(Collectors.toSet())) .build(); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java index cd7ae5c3bffc4..51a700a935274 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java @@ -1,5 +1,8 @@ package com.datahub.authorization.role; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; @@ -14,35 +17,45 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class RoleService { private final EntityClient _entityClient; - public void batchAssignRoleToActors(@Nonnull final List<String> actors, @Nullable final Urn roleUrn, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public void batchAssignRoleToActors( + @Nonnull final List<String> actors, + @Nullable final Urn roleUrn, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { if (roleUrn != null && !_entityClient.exists(roleUrn, authentication)) { - throw new RuntimeException(String.format("Role %s does not exist. Skipping batch role assignment", roleUrn)); + throw new RuntimeException( + String.format("Role %s does not exist. Skipping batch role assignment", roleUrn)); } - actors.forEach(actor -> { - try { - assignRoleToActor(actor, roleUrn, authentication); - } catch (Exception e) { - log.warn(String.format("Failed to assign role %s to actor %s. Skipping actor assignment", roleUrn, actor), e); - } - }); + actors.forEach( + actor -> { + try { + assignRoleToActor(actor, roleUrn, authentication); + } catch (Exception e) { + log.warn( + String.format( + "Failed to assign role %s to actor %s. Skipping actor assignment", + roleUrn, actor), + e); + } + }); } - private void assignRoleToActor(@Nonnull final String actor, @Nullable final Urn roleUrn, - @Nonnull final Authentication authentication) throws URISyntaxException, RemoteInvocationException { + private void assignRoleToActor( + @Nonnull final String actor, + @Nullable final Urn roleUrn, + @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { final Urn actorUrn = Urn.createFromString(actor); if (!_entityClient.exists(actorUrn, authentication)) { - log.warn(String.format("Failed to assign role %s to actor %s, actor does not exist. Skipping actor assignment", - roleUrn, actor)); + log.warn( + String.format( + "Failed to assign role %s to actor %s, actor does not exist. Skipping actor assignment", + roleUrn, actor)); return; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java index ac27e1a16c8b7..dc63b5e4a2897 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java @@ -1,5 +1,7 @@ package com.datahub.telemetry; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; @@ -27,9 +29,6 @@ import org.json.JSONException; import org.json.JSONObject; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class TrackingService { @@ -56,11 +55,29 @@ public class TrackingService { private static final String INTERVAL_FIELD = "interval"; private static final String VIEW_TYPE_FIELD = "viewType"; - private static final Set<String> ALLOWED_EVENT_FIELDS = new HashSet<>( - ImmutableList.of(EVENT_TYPE_FIELD, ENTITY_TYPE_FIELD, ENTITY_TYPE_FILTER_FIELD, - PAGE_NUMBER_FIELD, PAGE_FIELD, TOTAL_FIELD, INDEX_FIELD, RESULT_TYPE_FIELD, RENDER_ID_FIELD, MODULE_ID_FIELD, - RENDER_TYPE_FIELD, SCENARIO_TYPE_FIELD, SECTION_FIELD, ACCESS_TOKEN_TYPE_FIELD, DURATION_FIELD, - ROLE_URN_FIELD, POLICY_URN_FIELD, SOURCE_TYPE_FIELD, INTERVAL_FIELD, VIEW_TYPE_FIELD)); + private static final Set<String> ALLOWED_EVENT_FIELDS = + new HashSet<>( + ImmutableList.of( + EVENT_TYPE_FIELD, + ENTITY_TYPE_FIELD, + ENTITY_TYPE_FILTER_FIELD, + PAGE_NUMBER_FIELD, + PAGE_FIELD, + TOTAL_FIELD, + INDEX_FIELD, + RESULT_TYPE_FIELD, + RENDER_ID_FIELD, + MODULE_ID_FIELD, + RENDER_TYPE_FIELD, + SCENARIO_TYPE_FIELD, + SECTION_FIELD, + ACCESS_TOKEN_TYPE_FIELD, + DURATION_FIELD, + ROLE_URN_FIELD, + POLICY_URN_FIELD, + SOURCE_TYPE_FIELD, + INTERVAL_FIELD, + VIEW_TYPE_FIELD)); private static final String ACTOR_URN_FIELD = "actorUrn"; private static final String ORIGIN_FIELD = "origin"; @@ -72,9 +89,20 @@ public class TrackingService { private static final String USER_URN_FIELD = "userUrn"; private static final String USER_URNS_FIELD = "userUrns"; private static final String PARENT_NODE_URN_FIELD = "parentNodeUrn"; - private static final Set<String> ALLOWED_OBFUSCATED_EVENT_FIELDS = new HashSet<>( - ImmutableList.of(ACTOR_URN_FIELD, ORIGIN_FIELD, ENTITY_URN_FIELD, ENTITY_URNS_FIELD, GROUP_NAME_FIELD, - SECTION_FIELD, ENTITY_PAGE_FILTER_FIELD, PATH_FIELD, USER_URN_FIELD, USER_URNS_FIELD, PARENT_NODE_URN_FIELD)); + private static final Set<String> ALLOWED_OBFUSCATED_EVENT_FIELDS = + new HashSet<>( + ImmutableList.of( + ACTOR_URN_FIELD, + ORIGIN_FIELD, + ENTITY_URN_FIELD, + ENTITY_URNS_FIELD, + GROUP_NAME_FIELD, + SECTION_FIELD, + ENTITY_PAGE_FILTER_FIELD, + PATH_FIELD, + USER_URN_FIELD, + USER_URNS_FIELD, + PARENT_NODE_URN_FIELD)); private final MixpanelAPI _mixpanelAPI; private final MessageBuilder _mixpanelMessageBuilder; @@ -100,9 +128,11 @@ public void emitAnalyticsEvent(@Nonnull final JsonNode event) { } try { - _mixpanelAPI.sendMessage(_mixpanelMessageBuilder.event(getClientId(), eventType, sanitizedEvent)); + _mixpanelAPI.sendMessage( + _mixpanelMessageBuilder.event(getClientId(), eventType, sanitizedEvent)); } catch (IOException e) { - log.info("Failed to send event to Mixpanel; this does not affect the functionality of the application"); + log.info( + "Failed to send event to Mixpanel; this does not affect the functionality of the application"); log.debug("Failed to send event to Mixpanel", e); } } @@ -134,7 +164,8 @@ JSONObject sanitizeEvent(@Nonnull final JsonNode event) { final JSONObject unsanitizedEventObj; try { - unsanitizedEventObj = new JSONObject(_objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(event)); + unsanitizedEventObj = + new JSONObject(_objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(event)); } catch (Exception e) { log.warn("Failed to serialize event", e); return createFailedEvent(); @@ -145,18 +176,25 @@ JSONObject sanitizeEvent(@Nonnull final JsonNode event) { return createFailedEvent(); } - unsanitizedEventObj.keys().forEachRemaining(key -> { - String keyString = (String) key; - try { - if (ALLOWED_EVENT_FIELDS.contains(keyString)) { - sanitizedEventObj.put(keyString, unsanitizedEventObj.get(keyString).toString()); - } else if (ALLOWED_OBFUSCATED_EVENT_FIELDS.contains(keyString)) { - sanitizedEventObj.put(keyString, _secretService.hashString(unsanitizedEventObj.get(keyString).toString())); - } - } catch (JSONException e) { - log.warn(String.format("Failed to sanitize field %s. Skipping this field.", keyString), e); - } - }); + unsanitizedEventObj + .keys() + .forEachRemaining( + key -> { + String keyString = (String) key; + try { + if (ALLOWED_EVENT_FIELDS.contains(keyString)) { + sanitizedEventObj.put(keyString, unsanitizedEventObj.get(keyString).toString()); + } else if (ALLOWED_OBFUSCATED_EVENT_FIELDS.contains(keyString)) { + sanitizedEventObj.put( + keyString, + _secretService.hashString(unsanitizedEventObj.get(keyString).toString())); + } + } catch (JSONException e) { + log.warn( + String.format("Failed to sanitize field %s. Skipping this field.", keyString), + e); + } + }); return transformObjectNodeToJSONObject(sanitizedEventObj); } @@ -189,8 +227,8 @@ private static String createClientIdIfNotPresent(@Nonnull final EntityService en final AuditStamp clientIdStamp = new AuditStamp(); clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); clientIdStamp.setTime(System.currentTimeMillis()); - entityService.ingestAspectIfNotPresent(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, - null); + entityService.ingestAspectIfNotPresent( + UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); return uuid; } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java index 2e25493133b43..5b5702de4381a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java @@ -1,18 +1,17 @@ package com.datahub.authentication.authenticator; -import com.datahub.authentication.Authentication; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; +import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; +import com.datahub.plugins.auth.authentication.Authenticator; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - public class AuthenticatorChainTest { @Test @@ -23,7 +22,8 @@ public void testAuthenticateSuccess() throws Exception { final Authentication mockAuthentication = Mockito.mock(Authentication.class); Mockito.when(mockAuthenticator1.authenticate(Mockito.any())).thenReturn(mockAuthentication); - Mockito.when(mockAuthenticator2.authenticate(Mockito.any())).thenThrow(new AuthenticationException("Failed to authenticate")); + Mockito.when(mockAuthenticator2.authenticate(Mockito.any())) + .thenThrow(new AuthenticationException("Failed to authenticate")); authenticatorChain.register(mockAuthenticator1); authenticatorChain.register(mockAuthenticator2); @@ -40,13 +40,13 @@ public void testAuthenticateSuccess() throws Exception { verify(mockAuthenticator2, times(0)).authenticate(any()); } - @Test public void testAuthenticateFailure() throws Exception { final AuthenticatorChain authenticatorChain = new AuthenticatorChain(); final Authenticator mockAuthenticator = Mockito.mock(Authenticator.class); final Authentication mockAuthentication = Mockito.mock(Authentication.class); - Mockito.when(mockAuthenticator.authenticate(Mockito.any())).thenThrow(new AuthenticationException("Failed to authenticate")); + Mockito.when(mockAuthenticator.authenticate(Mockito.any())) + .thenThrow(new AuthenticationException("Failed to authenticate")); authenticatorChain.register(mockAuthenticator); @@ -55,7 +55,8 @@ public void testAuthenticateFailure() throws Exception { Authentication result = authenticatorChain.authenticate(mockContext, false); - // If the authenticator throws, verify that null is returned to indicate failure to authenticate. + // If the authenticator throws, verify that null is returned to indicate failure to + // authenticate. assertNull(result); } @@ -64,13 +65,16 @@ public void testAuthenticateThrows() throws Exception { final AuthenticatorChain authenticatorChain = new AuthenticatorChain(); final Authenticator mockAuthenticator = Mockito.mock(Authenticator.class); final Authentication mockAuthentication = Mockito.mock(Authentication.class); - Mockito.when(mockAuthenticator.authenticate(Mockito.any())).thenThrow(new AuthenticationExpiredException("Failed to authenticate, token has expired")); + Mockito.when(mockAuthenticator.authenticate(Mockito.any())) + .thenThrow(new AuthenticationExpiredException("Failed to authenticate, token has expired")); authenticatorChain.register(mockAuthenticator); // Verify that the mock authentication is returned on Authenticate. final AuthenticationRequest mockContext = Mockito.mock(AuthenticationRequest.class); - assertThrows(AuthenticationExpiredException.class, () -> authenticatorChain.authenticate(mockContext, false)); + assertThrows( + AuthenticationExpiredException.class, + () -> authenticatorChain.authenticate(mockContext, false)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java index 759ecaa8f3a4d..62395c77e3847 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java @@ -1,9 +1,12 @@ package com.datahub.authentication.authenticator; +import static org.mockito.Mockito.*; +import static org.testng.AssertJUnit.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; -import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticationException; +import com.datahub.authentication.AuthenticationRequest; import com.google.common.collect.ImmutableMap; import java.util.HashMap; import java.util.HashSet; @@ -11,10 +14,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.AssertJUnit.*; - - public class DataHubJwtTokenAuthenticatorTest { @Test @@ -28,14 +27,16 @@ void testPublicAuthentication() throws Exception { HashSet<String> set = new HashSet<>(); set.add("https://test.com/realm/domain"); - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); Map<String, Object> config = new HashMap<>(); config.put("userIdClaim", "username"); config.put("trustedIssuers", getTrustedIssuer()); - config.put("publicKey", + config.put( + "publicKey", "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAu1SU1LfVLPHCozMxH2Mo4lgOEePzNm0tRgeLezV6ffAt0gunVTLw7onLRnrq0/" + "IzW7yWR7QkrmBL7jTKEn5u+qKhbwKfBstIs+bMY2Zkp18gnTxKLxoS2tFczGkPLPgizskuemMghRniWaoLcyehkd3qqGElvW/VDL5AaWTg0nLVkjRo9z+40RQzuVaE" + "8AkAFmxZzow3x+VJYKdjykkJ0iT9wCS0DRTXu269V264Vf/3jvredZiKRkgwlL9xNAwxXFg0x/XFw005UWVRIkdgcKWTjpBP2dPwVZ4WWC+9aGVd+Gyn1o0CLelf" @@ -59,7 +60,8 @@ void testInvalidToken() throws Exception { + "L5lrwEO-rTXYNamy8gJOBoM8n7gHDOo6JDd25go4MsLbjHbQ-WNq5SErgaNOMfZdkg2jqKVldZvjW33v8aupx08fzONnuzaYIJBQpONhGzDkYZKkk" + "rewdrYYVl_naNRWsKt8uSVu83G3mLhMPazkxNT5CWfNR7sdXfladz8U6ruLFOGUJJ5KDjEVAReRpEbxaKOIY6oFio1TeUQsi" + "6vppLXB0RupTBmE5dr7rxdL4j9eDY94M2uowBDuOsEGA"; - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); @@ -84,14 +86,16 @@ void testUserClaim() throws Exception { HashSet<String> set = new HashSet<>(); set.add("https://test.com/realm/domain"); - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); Map<String, Object> config = new HashMap<>(); config.put("userId", "username"); config.put("trustedIssuers", getTrustedIssuer()); - config.put("publicKey", + config.put( + "publicKey", "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAu1SU1LfVLPHCozMxH2Mo4lgOEePzNm0tRgeLezV6" + "ffAt0gunVTLw7onLRnrq0/IzW7yWR7QkrmBL7jTKEn5u+qKhbwKfBstIs+bMY2Zkp18gnTxKLxoS2tFczGkPLPgizskuemM" + "ghRniWaoLcyehkd3qqGElvW/VDL5AaWTg0nLVkjRo9z+40RQzuVaE8AkAFmxZzow3x+VJYKdjykkJ0iT9wCS0DRTXu269V26" diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java index 72b2fd5769715..819caa80d3417 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; @@ -8,9 +11,6 @@ import java.util.Collections; import org.testng.annotations.Test; -import static com.datahub.authentication.AuthenticationConstants.*; -import static org.testng.Assert.*; - public class DataHubSystemAuthenticatorTest { private static final String TEST_CLIENT_ID = "clientId"; @@ -21,17 +21,33 @@ public void testInit() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); assertThrows(() -> authenticator.init(null, null)); assertThrows(() -> authenticator.init(Collections.emptyMap(), null)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID), null)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null)); + assertThrows( + () -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID), null)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of(SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null)); // Correct configs provided. - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); } @Test public void testAuthenticateFailureMissingAuthorizationHeader() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); @@ -40,22 +56,39 @@ public void testAuthenticateFailureMissingAuthorizationHeader() { @Test public void testAuthenticateFailureMissingBasicCredentials() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer something") // Missing basic authentication. - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, "Bearer something") // Missing basic authentication. + ); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); } @Test public void testAuthenticateFailureMismatchingCredentials() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic incorrectId:incorrectSecret") // Incorrect authentication - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, + "Basic incorrectId:incorrectSecret") // Incorrect authentication + ); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); } @@ -63,12 +96,19 @@ public void testAuthenticateFailureMismatchingCredentials() { public void testAuthenticateSuccessNoDelegatedActor() throws Exception { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final String authorizationHeaderValue = String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue) - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final String authorizationHeaderValue = + String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue)); final Authentication authentication = authenticator.authenticate(context); @@ -84,13 +124,23 @@ public void testAuthenticateSuccessNoDelegatedActor() throws Exception { public void testAuthenticateSuccessDelegatedActor() throws Exception { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final String authorizationHeaderValue = String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); - final AuthenticationRequest context = new AuthenticationRequest( + authenticator.init( ImmutableMap.of( - AUTHORIZATION_HEADER_NAME, authorizationHeaderValue, LEGACY_X_DATAHUB_ACTOR_HEADER, "urn:li:corpuser:datahub") - ); + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final String authorizationHeaderValue = + String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, + authorizationHeaderValue, + LEGACY_X_DATAHUB_ACTOR_HEADER, + "urn:li:corpuser:datahub")); final Authentication authentication = authenticator.authenticate(context); @@ -101,4 +151,4 @@ public void testAuthenticateSuccessDelegatedActor() throws Exception { assertEquals(authentication.getCredentials(), authorizationHeaderValue); assertEquals(authentication.getClaims(), Collections.emptyMap()); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java index f5ce938c411c6..5bd273f3bacf8 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java @@ -1,5 +1,17 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SALT_CONFIG_NAME; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_ALG_CONFIG_NAME; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_KEY_CONFIG_NAME; +import static com.datahub.authentication.token.TokenClaims.ACTOR_ID_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.ACTOR_TYPE_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.TOKEN_TYPE_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.TOKEN_VERSION_CLAIM_NAME; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,130 +27,167 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; -import org.mockito.Mockito; -import org.testng.annotations.Test; import java.util.Collections; import java.util.Map; - -import static com.datahub.authentication.AuthenticationConstants.*; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SALT_CONFIG_NAME; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_ALG_CONFIG_NAME; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_KEY_CONFIG_NAME; -import static com.datahub.authentication.token.TokenClaims.ACTOR_ID_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.ACTOR_TYPE_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.TOKEN_TYPE_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.TOKEN_VERSION_CLAIM_NAME; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertThrows; - +import org.mockito.Mockito; +import org.testng.annotations.Test; public class DataHubTokenAuthenticatorTest { - private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; - private static final String TEST_SALT = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI93="; - - final EntityService mockService = Mockito.mock(EntityService.class); - final StatefulTokenService statefulTokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALT); - - @Test - public void testInit() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - AuthenticatorContext authenticatorContext = - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService)); - assertThrows(() -> authenticator.init(null, authenticatorContext)); - assertThrows(() -> authenticator.init(Collections.emptyMap(), authenticatorContext)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, - SIGNING_ALG_CONFIG_NAME, "UNSUPPORTED_ALG"), authenticatorContext)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, - SIGNING_ALG_CONFIG_NAME, "HS256"), null)); - - // Correct configs provided. - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), authenticatorContext); - } - - @Test - public void testAuthenticateFailureMissingAuthorizationHeader() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateFailureMissingBearerCredentials() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic username:password") - ); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateFailureInvalidToken() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer someRandomToken") - ); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateSuccess() throws Exception { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - final ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - DataHubTokenAuthenticatorTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); - final AspectSpec keyAspectSpec = configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); - Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))).thenReturn(keyAspectSpec); - Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); - Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); - - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final Actor datahub = new Actor(ActorType.USER, "datahub"); - final String validToken = authenticator._statefulTokenService.generateAccessToken( - TokenType.PERSONAL, - datahub, - "some token", - "A token description", - datahub.toUrnStr() - ); - - final String authorizationHeaderValue = String.format("Bearer %s", validToken); - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue) - ); - - final Authentication authentication = authenticator.authenticate(context); - - // Validate the resulting authentication object - assertNotNull(authentication); - assertEquals(authentication.getActor().getType(), ActorType.USER); - assertEquals(authentication.getActor().getId(), "datahub"); - assertEquals(authentication.getCredentials(), authorizationHeaderValue); - - Map<String, Object> claimsMap = authentication.getClaims(); - assertEquals(claimsMap.get(TOKEN_VERSION_CLAIM_NAME), 2); - assertEquals(claimsMap.get(TOKEN_TYPE_CLAIM_NAME), "PERSONAL"); - assertEquals(claimsMap.get(ACTOR_TYPE_CLAIM_NAME), "USER"); - assertEquals(claimsMap.get(ACTOR_ID_CLAIM_NAME), "datahub"); - } + private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; + private static final String TEST_SALT = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI93="; + + final EntityService mockService = Mockito.mock(EntityService.class); + final StatefulTokenService statefulTokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALT); + + @Test + public void testInit() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService)); + assertThrows(() -> authenticator.init(null, authenticatorContext)); + assertThrows(() -> authenticator.init(Collections.emptyMap(), authenticatorContext)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SIGNING_ALG_CONFIG_NAME, + "UNSUPPORTED_ALG"), + authenticatorContext)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SIGNING_ALG_CONFIG_NAME, "HS256"), + null)); + + // Correct configs provided. + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + authenticatorContext); + } + + @Test + public void testAuthenticateFailureMissingAuthorizationHeader() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateFailureMissingBearerCredentials() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic username:password")); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateFailureInvalidToken() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer someRandomToken")); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateSuccess() throws Exception { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + final ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + DataHubTokenAuthenticatorTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + final AspectSpec keyAspectSpec = + configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); + Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) + .thenReturn(keyAspectSpec); + Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); + Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); + + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final Actor datahub = new Actor(ActorType.USER, "datahub"); + final String validToken = + authenticator._statefulTokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); + + final String authorizationHeaderValue = String.format("Bearer %s", validToken); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue)); + + final Authentication authentication = authenticator.authenticate(context); + + // Validate the resulting authentication object + assertNotNull(authentication); + assertEquals(authentication.getActor().getType(), ActorType.USER); + assertEquals(authentication.getActor().getId(), "datahub"); + assertEquals(authentication.getCredentials(), authorizationHeaderValue); + + Map<String, Object> claimsMap = authentication.getClaims(); + assertEquals(claimsMap.get(TOKEN_VERSION_CLAIM_NAME), 2); + assertEquals(claimsMap.get(TOKEN_TYPE_CLAIM_NAME), "PERSONAL"); + assertEquals(claimsMap.get(ACTOR_TYPE_CLAIM_NAME), "USER"); + assertEquals(claimsMap.get(ACTOR_ID_CLAIM_NAME), "datahub"); + } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java index 81cf94d3bfe02..6d0678d4f3558 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.group; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -30,11 +34,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class GroupServiceTest { private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; @@ -67,20 +66,36 @@ public void setupTest() throws Exception { _groupKey.setName(GROUP_ID); NativeGroupMembership nativeGroupMembership = new NativeGroupMembership(); - nativeGroupMembership.setNativeGroups(new UrnArray(Urn.createFromString(NATIVE_GROUP_URN_STRING))); + nativeGroupMembership.setNativeGroups( + new UrnArray(Urn.createFromString(NATIVE_GROUP_URN_STRING))); GroupMembership groupMembership = new GroupMembership(); groupMembership.setGroups(new UrnArray(Urn.createFromString(EXTERNAL_GROUP_URN_STRING))); - _entityResponseMap = ImmutableMap.of(USER_URN, new EntityResponse().setEntityName(CORP_USER_ENTITY_NAME) - .setUrn(USER_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data())), GROUP_MEMBERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(groupMembership.data())))))); - - _entityRelationships = new EntityRelationships().setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray(ImmutableList.of( - new EntityRelationship().setEntity(USER_URN).setType(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)))); + _entityResponseMap = + ImmutableMap.of( + USER_URN, + new EntityResponse() + .setEntityName(CORP_USER_ENTITY_NAME) + .setUrn(USER_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(nativeGroupMembership.data())), + GROUP_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(groupMembership.data())))))); + + _entityRelationships = + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(USER_URN) + .setType(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)))); _entityClient = mock(EntityClient.class); _entityService = mock(EntityService.class); @@ -118,7 +133,8 @@ public void testGetGroupOriginNullArguments() { @Test public void testGetGroupOriginPasses() { Origin groupOrigin = mock(Origin.class); - when(_entityService.getLatestAspect(eq(_groupUrn), eq(ORIGIN_ASPECT_NAME))).thenReturn(groupOrigin); + when(_entityService.getLatestAspect(eq(_groupUrn), eq(ORIGIN_ASPECT_NAME))) + .thenReturn(groupOrigin); assertEquals(groupOrigin, _groupService.getGroupOrigin(_groupUrn)); } @@ -132,8 +148,9 @@ public void testAddUserToNativeGroupNullArguments() { @Test public void testAddUserToNativeGroupPasses() throws Exception { when(_entityService.exists(USER_URN)).thenReturn(true); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); _groupService.addUserToNativeGroup(USER_URN, _groupUrn, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); @@ -141,68 +158,101 @@ public void testAddUserToNativeGroupPasses() throws Exception { @Test public void testCreateNativeGroupNullArguments() { - assertThrows(() -> _groupService.createNativeGroup(null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createNativeGroup(_groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createNativeGroup(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createNativeGroup( + null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createNativeGroup( + _groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.createNativeGroup(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); } @Test public void testCreateNativeGroupPasses() throws Exception { - _groupService.createNativeGroup(_groupKey, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION); + _groupService.createNativeGroup( + _groupKey, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION); verify(_entityClient, times(2)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingNativeGroupMembersNullArguments() { - assertThrows(() -> _groupService.removeExistingNativeGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.removeExistingNativeGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.removeExistingNativeGroupMembers( + null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.removeExistingNativeGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); } @Test - public void testRemoveExistingNativeGroupMembersGroupNotInNativeGroupMembership() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); - - _groupService.removeExistingNativeGroupMembers(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + public void testRemoveExistingNativeGroupMembersGroupNotInNativeGroupMembership() + throws Exception { + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); + + _groupService.removeExistingNativeGroupMembers( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingNativeGroupMembersPasses() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingNativeGroupMembers(Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingNativeGroupMembers( + Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testMigrateGroupMembershipToNativeGroupMembershipNullArguments() { - assertThrows(() -> _groupService.migrateGroupMembershipToNativeGroupMembership(null, USER_URN.toString(), - SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.migrateGroupMembershipToNativeGroupMembership( + null, USER_URN.toString(), SYSTEM_AUTHENTICATION)); } @Test public void testMigrateGroupMembershipToNativeGroupMembershipPasses() throws Exception { - when(_graphClient.getRelatedEntities(eq(EXTERNAL_GROUP_URN_STRING), - eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), eq(RelationshipDirection.INCOMING), anyInt(), - anyInt(), any())).thenReturn(_entityRelationships); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_graphClient.getRelatedEntities( + eq(EXTERNAL_GROUP_URN_STRING), + eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), + eq(RelationshipDirection.INCOMING), + anyInt(), + anyInt(), + any())) + .thenReturn(_entityRelationships); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); when(_entityService.exists(USER_URN)).thenReturn(true); - _groupService.migrateGroupMembershipToNativeGroupMembership(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), - USER_URN.toString(), SYSTEM_AUTHENTICATION); + _groupService.migrateGroupMembershipToNativeGroupMembership( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), + USER_URN.toString(), + SYSTEM_AUTHENTICATION); verify(_entityClient, times(3)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testCreateGroupInfoNullArguments() { - assertThrows(() -> _groupService.createGroupInfo(null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createGroupInfo(_groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createGroupInfo(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createGroupInfo( + null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createGroupInfo( + _groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.createGroupInfo(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); } @Test @@ -229,36 +279,46 @@ public void testGetExistingGroupMembersNullArguments() { @Test public void testGetExistingGroupMembersPasses() { - when(_graphClient.getRelatedEntities(eq(GROUP_URN_STRING), - eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), eq(RelationshipDirection.INCOMING), anyInt(), - anyInt(), any())).thenReturn(_entityRelationships); - - assertEquals(USER_URN_LIST, _groupService.getExistingGroupMembers(_groupUrn, USER_URN.toString())); + when(_graphClient.getRelatedEntities( + eq(GROUP_URN_STRING), + eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), + eq(RelationshipDirection.INCOMING), + anyInt(), + anyInt(), + any())) + .thenReturn(_entityRelationships); + + assertEquals( + USER_URN_LIST, _groupService.getExistingGroupMembers(_groupUrn, USER_URN.toString())); } @Test public void testRemoveExistingGroupMembersNullArguments() { - assertThrows(() -> _groupService.removeExistingGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.removeExistingGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.removeExistingGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.removeExistingGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingGroupMembersGroupNotInGroupMembership() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingGroupMembers(Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingGroupMembers( + Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingGroupMembersPasses() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingGroupMembers(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingGroupMembers( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java index 2eed108b40300..cd9d5972103c1 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.invite; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -17,11 +21,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class InviteTokenServiceTest { private static final String INVITE_TOKEN_URN_STRING = "urn:li:inviteToken:admin-invite-token"; private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; @@ -68,20 +67,24 @@ public void testIsInviteTokenValidTrue() throws Exception { @Test public void testGetInviteTokenRoleNullEntity() throws Exception { - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(null); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(null); - assertThrows(() -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); } @Test public void testGetInviteTokenRoleEmptyAspectMap() throws Exception { final EntityResponse entityResponse = new EntityResponse().setAspects(new EnvelopedAspectMap()); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); - assertThrows(() -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); } @Test @@ -89,11 +92,14 @@ public void testGetInviteTokenRoleNoRole() throws Exception { final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION); assertNull(roleUrn); @@ -103,12 +109,16 @@ public void testGetInviteTokenRoleNoRole() throws Exception { public void testGetInviteTokenRole() throws Exception { final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + final InviteToken inviteTokenAspect = + new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION); assertNotNull(roleUrn); @@ -119,15 +129,22 @@ public void testGetInviteTokenRole() throws Exception { public void getInviteTokenRoleUrnDoesNotExist() throws Exception { when(_entityClient.exists(eq(roleUrn), eq(SYSTEM_AUTHENTICATION))).thenReturn(false); - assertThrows(() -> _inviteTokenService.getInviteToken(roleUrn.toString(), false, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteToken(roleUrn.toString(), false, SYSTEM_AUTHENTICATION)); } @Test public void getInviteTokenRegenerate() throws Exception { final SearchResult searchResult = new SearchResult(); searchResult.setEntities(new SearchEntityArray()); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); when(_secretService.generateUrlSafeToken(anyInt())).thenReturn(INVITE_TOKEN_STRING); when(_secretService.hashString(anyString())).thenReturn(HASHED_INVITE_TOKEN_STRING); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -140,8 +157,14 @@ public void getInviteTokenRegenerate() throws Exception { public void getInviteTokenEmptySearchResult() throws Exception { final SearchResult searchResult = new SearchResult(); searchResult.setEntities(new SearchEntityArray()); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); when(_secretService.generateUrlSafeToken(anyInt())).thenReturn(INVITE_TOKEN_STRING); when(_secretService.hashString(anyString())).thenReturn(HASHED_INVITE_TOKEN_STRING); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -157,10 +180,17 @@ public void getInviteTokenNullEntity() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(null); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(null); assertThrows(() -> _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION)); } @@ -172,12 +202,19 @@ public void getInviteTokenNoInviteTokenAspect() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); final EntityResponse entityResponse = new EntityResponse().setAspects(new EnvelopedAspectMap()); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -191,19 +228,31 @@ public void getInviteToken() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + final InviteToken inviteTokenAspect = + new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); when(_secretService.decrypt(eq(ENCRYPTED_INVITE_TOKEN_STRING))).thenReturn(INVITE_TOKEN_STRING); - assertEquals(_inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION), INVITE_TOKEN_STRING); + assertEquals( + _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION), + INVITE_TOKEN_STRING); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java index 4c78ab13c9cda..d8a0716937525 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.post; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -16,10 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class PostServiceTest { private static final Urn POST_URN = UrnUtils.getUrn("urn:li:post:123"); private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; @@ -29,12 +28,15 @@ public class PostServiceTest { private static final String POST_TITLE = "title"; private static final String POST_DESCRIPTION = "description"; private static final String POST_LINK = "https://datahubproject.io"; - private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); - private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(MEDIA); + private static final Media MEDIA = + new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = + new PostContent() + .setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; private static final Authentication SYSTEM_AUTHENTICATION = @@ -57,7 +59,8 @@ public void testMapMedia() { @Test public void testMapPostContent() { PostContent postContent = - _postService.mapPostContent(POST_CONTENT_TYPE.toString(), POST_TITLE, POST_DESCRIPTION, POST_LINK, MEDIA); + _postService.mapPostContent( + POST_CONTENT_TYPE.toString(), POST_TITLE, POST_DESCRIPTION, POST_LINK, MEDIA); assertEquals(POST_CONTENT, postContent); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java index 811bdaaa0fcf8..155f1314a0190 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java @@ -1,5 +1,7 @@ package com.datahub.authentication.token; +import static org.testng.AssertJUnit.*; + import io.jsonwebtoken.Claims; import io.jsonwebtoken.JwsHeader; import java.math.BigInteger; @@ -20,13 +22,9 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class DataHubJwtSigningKeyResolverTest { - @InjectMocks - private DataHubJwtSigningKeyResolver resolver; + @InjectMocks private DataHubJwtSigningKeyResolver resolver; @Test public void testResolveSigningKeyWithPublicKey() throws Exception { @@ -55,11 +53,12 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { HttpResponse<String> httpResponse = Mockito.mock(HttpResponse.class); Mockito.when(httpResponse.statusCode()).thenReturn(200); - JSONObject token = new JSONObject( - "{\"kty\": \"RSA\", \"kid\": \"test_key\", \"n\": \"ueXyoaxgWhMTLwkowaskhiV85rbN9n_nLft8CxFUY3nbMpNybAWsWuhJ4SYLT4U-GbKdL-h-NYgBXKn" - + "GK1ieG6qSC25T3hWXTb3cNe73ZQUcZSivAV2tZouPYcb1XKSyKd-PsK8NsCpq1NHsJsrXSKq-7YCaf4MxIUaFXSZTE7ZNC0fPVqYH71jnyOU9FA_KJm0IC-x_Bs2g" - + "Ak3Eq1_6pZ_0VeYpczv82LACAUzi1vuU1gbbZLNHHl4DHwWb98eI1aCbWHNMux70Ba4aREOdKOWrxZ066W_NKUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH" - + "0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}"); + JSONObject token = + new JSONObject( + "{\"kty\": \"RSA\", \"kid\": \"test_key\", \"n\": \"ueXyoaxgWhMTLwkowaskhiV85rbN9n_nLft8CxFUY3nbMpNybAWsWuhJ4SYLT4U-GbKdL-h-NYgBXKn" + + "GK1ieG6qSC25T3hWXTb3cNe73ZQUcZSivAV2tZouPYcb1XKSyKd-PsK8NsCpq1NHsJsrXSKq-7YCaf4MxIUaFXSZTE7ZNC0fPVqYH71jnyOU9FA_KJm0IC-x_Bs2g" + + "Ak3Eq1_6pZ_0VeYpczv82LACAUzi1vuU1gbbZLNHHl4DHwWb98eI1aCbWHNMux70Ba4aREOdKOWrxZ066W_NKUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH" + + "0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}"); PublicKey expectedKey = getPublicKey(token); String responseJson = @@ -69,11 +68,14 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { + "KUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}]}"; Mockito.when(httpResponse.body()).thenReturn(responseJson); - Mockito.when(httpClient.send(Mockito.any(HttpRequest.class), Mockito.any(HttpResponse.BodyHandler.class))) + Mockito.when( + httpClient.send( + Mockito.any(HttpRequest.class), Mockito.any(HttpResponse.BodyHandler.class))) .thenReturn(httpResponse); HashSet<String> trustedIssuers = new HashSet<>(); trustedIssuers.add("https://example.com"); - DataHubJwtSigningKeyResolver resolver = new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); + DataHubJwtSigningKeyResolver resolver = + new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); resolver.client = httpClient; JwsHeader mockJwsHeader = Mockito.mock(JwsHeader.class); Mockito.when(mockJwsHeader.getKeyId()).thenReturn("test_key"); @@ -88,7 +90,8 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { void testInvalidIssuer() throws Exception { HashSet<String> trustedIssuers = new HashSet<>(); - DataHubJwtSigningKeyResolver resolver = new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); + DataHubJwtSigningKeyResolver resolver = + new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); JwsHeader mockJwsHeader = Mockito.mock(JwsHeader.class); Claims mockClaims = Mockito.mock(Claims.class); resolver.resolveSigningKey(mockJwsHeader, mockClaims); @@ -120,8 +123,10 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { if (token.get("kty").toString().equals("RSA")) { try { KeyFactory kf = KeyFactory.getInstance("RSA"); - BigInteger modulus = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); - BigInteger exponent = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); + BigInteger modulus = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); + BigInteger exponent = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); publicKey = kf.generatePublic(new RSAPublicKeySpec(modulus, exponent)); } catch (InvalidKeySpecException e) { throw new InvalidKeySpecException("Invalid public key", e); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java index 1c46e864a559e..ed10022632a56 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.token; +import static com.datahub.authentication.token.TokenClaims.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.authenticator.DataHubTokenAuthenticatorTest; @@ -13,14 +16,9 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import java.util.Date; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.datahub.authentication.token.TokenClaims.*; -import static org.testng.Assert.*; - - public class StatefulTokenServiceTest { private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; @@ -32,7 +30,8 @@ public class StatefulTokenServiceTest { public void testConstructor() { assertThrows(() -> new StatefulTokenService(null, null, null, null, null)); assertThrows(() -> new StatefulTokenService(TEST_SIGNING_KEY, null, null, null, null)); - assertThrows(() -> new StatefulTokenService(TEST_SIGNING_KEY, "UNSUPPORTED_ALG", null, null, null)); + assertThrows( + () -> new StatefulTokenService(TEST_SIGNING_KEY, "UNSUPPORTED_ALG", null, null, null)); // Succeeds: new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); @@ -40,11 +39,12 @@ public void testConstructor() { @Test public void testGenerateAccessTokenPersonalToken() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); // Verify token claims @@ -65,10 +65,15 @@ public void testGenerateAccessTokenPersonalToken() throws Exception { @Test public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, - null, System.currentTimeMillis(), + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, + datahub, + null, + System.currentTimeMillis(), "some token", "A token description", datahub.toUrnStr()); @@ -92,11 +97,12 @@ public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { @Test public void testGenerateAccessTokenSessionToken() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.SESSION, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.SESSION, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); @@ -118,14 +124,21 @@ public void testGenerateAccessTokenSessionToken() throws Exception { @Test public void testValidateAccessTokenFailsDueToExpiration() { - StatefulTokenService - tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); // Generate token that expires immediately. Date date = new Date(); - //This method returns the time in millis + // This method returns the time in millis long createdAtInMs = date.getTime(); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L, - createdAtInMs, "token", "", "urn:li:corpuser:datahub"); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, + new Actor(ActorType.USER, "datahub"), + 0L, + createdAtInMs, + "token", + "", + "urn:li:corpuser:datahub"); assertNotNull(token); // Validation should fail. @@ -134,12 +147,13 @@ public void testValidateAccessTokenFailsDueToExpiration() { @Test public void testValidateAccessTokenFailsDueToManipulation() { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); // Change single character @@ -152,23 +166,30 @@ public void testValidateAccessTokenFailsDueToManipulation() { @Test public void generateRevokeToken() throws TokenException { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - final ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - DataHubTokenAuthenticatorTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); - final AspectSpec keyAspectSpec = configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + final ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + DataHubTokenAuthenticatorTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + final AspectSpec keyAspectSpec = + configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); - Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))).thenReturn(keyAspectSpec); + Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) + .thenReturn(keyAspectSpec); Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); final RollbackRunResult result = new RollbackRunResult(ImmutableList.of(), 0); Mockito.when(mockService.deleteUrn(Mockito.any(Urn.class))).thenReturn(result); - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); // Revoke token tokenService.revokeAccessToken(tokenService.hash(token)); @@ -177,7 +198,5 @@ public void generateRevokeToken() throws TokenException { assertThrows(TokenException.class, () -> tokenService.validateAccessToken(token)); } - private void mockStateful() { - - } + private void mockStateful() {} } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java index 4268521a07c0c..841308441569d 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.token; +import static com.datahub.authentication.token.TokenClaims.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.authenticator.DataHubTokenAuthenticator; @@ -15,10 +18,6 @@ import javax.crypto.spec.SecretKeySpec; import org.testng.annotations.Test; -import static com.datahub.authentication.token.TokenClaims.*; -import static org.testng.Assert.*; - - public class StatelessTokenServiceTest { private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; @@ -37,8 +36,11 @@ public void testConstructor() { @Test public void testGenerateAccessTokenPersonalToken() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Verify token claims @@ -59,10 +61,11 @@ public void testGenerateAccessTokenPersonalToken() throws Exception { @Test public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, - new Actor(ActorType.USER, "datahub"), - null); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), null); assertNotNull(token); // Verify token claims @@ -83,8 +86,11 @@ public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { @Test public void testGenerateAccessTokenSessionToken() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.SESSION, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.SESSION, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Verify token claims @@ -105,26 +111,34 @@ public void testGenerateAccessTokenSessionToken() throws Exception { @Test public void testValidateAccessTokenFailsDueToExpiration() { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); // Generate token that expires immediately. - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L); assertNotNull(token); // Validation should fail. - assertThrows(TokenExpiredException.class, () -> statelessTokenService.validateAccessToken(token)); + assertThrows( + TokenExpiredException.class, () -> statelessTokenService.validateAccessToken(token)); } @Test public void testValidateAccessTokenFailsDueToManipulation() { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Change single character String changedToken = token.substring(1); // Validation should fail. - assertThrows(TokenException.class, () -> statelessTokenService.validateAccessToken(changedToken)); + assertThrows( + TokenException.class, () -> statelessTokenService.validateAccessToken(changedToken)); } @Test @@ -134,31 +148,37 @@ public void testValidateAccessTokenFailsDueToNoneAlgorithm() { "eyJhbGciOiJub25lIn0.eyJhY3RvclR5cGUiOiJVU0VSIiwiYWN0b3JJZCI6Il9fZGF0YWh1Yl9zeXN0ZW0iL" + "CJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwianRpIjoiN2VmOTkzYjQtMjBiOC00Y2Y5LTljNm" + "YtMTE2NjNjZWVmOTQzIiwic3ViIjoiZGF0YWh1YiIsImlzcyI6ImRhdGFodWItbWV0YWRhdGEtc2VydmljZSJ9."; - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); // Validation should fail. assertThrows(TokenException.class, () -> statelessTokenService.validateAccessToken(badToken)); } @Test public void testValidateAccessTokenFailsDueToUnsupportedSigningAlgorithm() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); Map<String, Object> claims = new HashMap<>(); - claims.put(TOKEN_VERSION_CLAIM_NAME, String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. + claims.put( + TOKEN_VERSION_CLAIM_NAME, + String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. claims.put(TOKEN_TYPE_CLAIM_NAME, "SESSION"); claims.put(ACTOR_TYPE_CLAIM_NAME, "USER"); claims.put(ACTOR_ID_CLAIM_NAME, "__datahub_system"); - final JwtBuilder builder = Jwts.builder() - .addClaims(claims) - .setId(UUID.randomUUID().toString()) - .setIssuer("datahub-metadata-service") - .setSubject("datahub"); - builder.setExpiration(new Date(System.currentTimeMillis() + 60)); + final JwtBuilder builder = + Jwts.builder() + .addClaims(claims) + .setId(UUID.randomUUID().toString()) + .setIssuer("datahub-metadata-service") + .setSubject("datahub"); + builder.setExpiration(new Date(System.currentTimeMillis() + 60)); final String testSigningKey = "TLHLdPSivAwIjXP4MT4TtlitsEGkOKjQGNnqsprisfghpU8g"; - byte [] apiKeySecretBytes = testSigningKey.getBytes(StandardCharsets.UTF_8); - final Key signingKey = new SecretKeySpec(apiKeySecretBytes, SignatureAlgorithm.HS384.getJcaName()); + byte[] apiKeySecretBytes = testSigningKey.getBytes(StandardCharsets.UTF_8); + final Key signingKey = + new SecretKeySpec(apiKeySecretBytes, SignatureAlgorithm.HS384.getJcaName()); final String badToken = builder.signWith(signingKey, SignatureAlgorithm.HS384).compact(); // Validation should fail. diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java index 0102311ff3b61..9cb5d5cb697cc 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.user; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,11 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class NativeUserServiceTest { private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; @@ -52,39 +51,60 @@ public void setupTest() throws Exception { AuthenticationConfiguration authenticationConfiguration = new AuthenticationConfiguration(); authenticationConfiguration.setSystemClientId("someCustomId"); - _nativeUserService = new NativeUserService(_entityService, _entityClient, _secretService, authenticationConfiguration); + _nativeUserService = + new NativeUserService( + _entityService, _entityClient, _secretService, authenticationConfiguration); } @Test public void testCreateNativeUserNullArguments() { assertThrows( - () -> _nativeUserService.createNativeUser(null, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, null, EMAIL, TITLE, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, null, TITLE, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, null, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, null, - SYSTEM_AUTHENTICATION)); - } - - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + () -> + _nativeUserService.createNativeUser( + null, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, null, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, null, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, null, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, null, SYSTEM_AUTHENTICATION)); + } + + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserAlreadyExists() throws Exception { // The user already exists when(_entityService.exists(any())).thenReturn(true); - _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserDatahub() throws Exception { - _nativeUserService.createNativeUser(DATAHUB_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + DATAHUB_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserSystemUser() throws Exception { - _nativeUserService.createNativeUser(SYSTEM_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + SYSTEM_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } @Test @@ -94,7 +114,8 @@ public void testCreateNativeUserPasses() throws Exception { when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); when(_secretService.getHashedPassword(any(), any())).thenReturn(HASHED_PASSWORD); - _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } @Test @@ -121,13 +142,17 @@ public void testUpdateCorpUserCredentialsPasses() throws Exception { @Test public void testGenerateNativeUserResetTokenNullArguments() { - assertThrows(() -> _nativeUserService.generateNativeUserPasswordResetToken(null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _nativeUserService.generateNativeUserPasswordResetToken(null, SYSTEM_AUTHENTICATION)); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "User does not exist or is a non-native user!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "User does not exist or is a non-native user!") public void testGenerateNativeUserResetTokenNotNativeUser() throws Exception { // Nonexistent corpUserCredentials - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn(null); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(null); _nativeUserService.generateNativeUserPasswordResetToken(USER_URN_STRING, SYSTEM_AUTHENTICATION); } @@ -135,8 +160,8 @@ public void testGenerateNativeUserResetTokenNotNativeUser() throws Exception { @Test public void testGenerateNativeUserResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); @@ -148,80 +173,101 @@ public void testGenerateNativeUserResetToken() throws Exception { @Test public void testResetCorpUserCredentialsNullArguments() { - assertThrows(() -> _nativeUserService.resetCorpUserCredentials(null, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION)); assertThrows( - () -> _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, null, RESET_TOKEN, SYSTEM_AUTHENTICATION)); + () -> + _nativeUserService.resetCorpUserCredentials( + null, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, null, RESET_TOKEN, SYSTEM_AUTHENTICATION)); assertThrows( - () -> _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, null, SYSTEM_AUTHENTICATION)); + () -> + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, null, SYSTEM_AUTHENTICATION)); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "User has not generated a password reset token!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "User has not generated a password reset token!") public void testResetCorpUserCredentialsNoPasswordResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); // No password reset token when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(false); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, - expectedExceptionsMessageRegExp = "Invalid reset token. Please ask your administrator to send you an updated link!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = + "Invalid reset token. Please ask your administrator to send you an updated link!") public void testResetCorpUserCredentialsBadResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn( - Instant.now().toEpochMilli()); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()) + .thenReturn(Instant.now().toEpochMilli()); // Reset token won't match when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn("badResetToken"); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, - expectedExceptionsMessageRegExp = "Reset token has expired! Please ask your administrator to create a new one") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = + "Reset token has expired! Please ask your administrator to create a new one") public void testResetCorpUserCredentialsExpiredResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - // Reset token expiration time will be before the system time when we run resetCorpUserCredentials + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + // Reset token expiration time will be before the system time when we run + // resetCorpUserCredentials when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn(0L); when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn(RESET_TOKEN); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } @Test public void testResetCorpUserCredentialsPasses() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn( - Instant.now().plusMillis(ONE_DAY_MILLIS).toEpochMilli()); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()) + .thenReturn(Instant.now().plusMillis(ONE_DAY_MILLIS).toEpochMilli()); when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn(RESET_TOKEN); when(_secretService.generateSalt(anyInt())).thenReturn(SALT); when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), any()); } @@ -233,7 +279,8 @@ public void testDoesPasswordMatchNullArguments() { @Test public void testDoesPasswordMatchNoCorpUserCredentials() throws Exception { - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn(null); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(null); assertFalse(_nativeUserService.doesPasswordMatch(USER_URN_STRING, PASSWORD)); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index b0b206001209c..ffee378a363c7 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -1,4 +1,19 @@ package com.datahub.authorization; + +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.PoliciesConfig.ACTIVE_POLICY_STATE; +import static com.linkedin.metadata.authorization.PoliciesConfig.INACTIVE_POLICY_STATE; +import static com.linkedin.metadata.authorization.PoliciesConfig.METADATA_POLICY_TYPE; +import static org.mockito.ArgumentMatchers.isNull; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -30,7 +45,6 @@ import com.linkedin.policy.DataHubActorFilter; import com.linkedin.policy.DataHubPolicyInfo; import com.linkedin.policy.DataHubResourceFilter; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -38,34 +52,18 @@ import java.util.Map; import java.util.Optional; import java.util.Set; - +import javax.annotation.Nullable; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nullable; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.authorization.PoliciesConfig.ACTIVE_POLICY_STATE; -import static com.linkedin.metadata.authorization.PoliciesConfig.INACTIVE_POLICY_STATE; -import static com.linkedin.metadata.authorization.PoliciesConfig.METADATA_POLICY_TYPE; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.assertFalse; - - public class DataHubAuthorizerTest { public static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; private static final Urn PARENT_DOMAIN_URN = UrnUtils.getUrn("urn:li:domain:parent"); private static final Urn CHILD_DOMAIN_URN = UrnUtils.getUrn("urn:li:domain:child"); - private static final Urn USER_WITH_ADMIN_ROLE = UrnUtils.getUrn("urn:li:corpuser:user-with-admin"); + private static final Urn USER_WITH_ADMIN_ROLE = + UrnUtils.getUrn("urn:li:corpuser:user-with-admin"); private EntityClient _entityClient; private DataHubAuthorizer _dataHubAuthorizer; @@ -76,102 +74,158 @@ public void setupTest() throws Exception { // Init mocks. final Urn activePolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:0"); - final DataHubPolicyInfo activePolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_TAGS"), null); + final DataHubPolicyInfo activePolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_TAGS"), null); final EnvelopedAspectMap activeAspectMap = new EnvelopedAspectMap(); - activeAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(activePolicy.data()))); + activeAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(activePolicy.data()))); final Urn inactivePolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:1"); - final DataHubPolicyInfo inactivePolicy = createDataHubPolicyInfo(false, ImmutableList.of("EDIT_ENTITY_OWNERS"), null); + final DataHubPolicyInfo inactivePolicy = + createDataHubPolicyInfo(false, ImmutableList.of("EDIT_ENTITY_OWNERS"), null); final EnvelopedAspectMap inactiveAspectMap = new EnvelopedAspectMap(); - inactiveAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inactivePolicy.data()))); + inactiveAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inactivePolicy.data()))); final Urn parentDomainPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:2"); - final DataHubPolicyInfo parentDomainPolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_DOCS"), PARENT_DOMAIN_URN); + final DataHubPolicyInfo parentDomainPolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_DOCS"), PARENT_DOMAIN_URN); final EnvelopedAspectMap parentDomainPolicyAspectMap = new EnvelopedAspectMap(); - parentDomainPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentDomainPolicy.data()))); + parentDomainPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentDomainPolicy.data()))); final Urn childDomainPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:3"); - final DataHubPolicyInfo childDomainPolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_STATUS"), CHILD_DOMAIN_URN); + final DataHubPolicyInfo childDomainPolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_STATUS"), CHILD_DOMAIN_URN); final EnvelopedAspectMap childDomainPolicyAspectMap = new EnvelopedAspectMap(); - childDomainPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(childDomainPolicy.data()))); + childDomainPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(childDomainPolicy.data()))); final Urn adminPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:4"); final DataHubActorFilter actorFilter = new DataHubActorFilter(); - actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Admin")))); - final DataHubPolicyInfo adminPolicy = createDataHubPolicyInfoFor(true, ImmutableList.of("EDIT_USER_PROFILE"), null, actorFilter); + actorFilter.setRoles( + new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Admin")))); + final DataHubPolicyInfo adminPolicy = + createDataHubPolicyInfoFor(true, ImmutableList.of("EDIT_USER_PROFILE"), null, actorFilter); final EnvelopedAspectMap adminPolicyAspectMap = new EnvelopedAspectMap(); - adminPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(adminPolicy.data()))); + adminPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(adminPolicy.data()))); - final ScrollResult policySearchResult1 = new ScrollResult() + final ScrollResult policySearchResult1 = + new ScrollResult() .setScrollId("1") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(activePolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(activePolicyUrn)))); - final ScrollResult policySearchResult2 = new ScrollResult() + final ScrollResult policySearchResult2 = + new ScrollResult() .setScrollId("2") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(inactivePolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(inactivePolicyUrn)))); - final ScrollResult policySearchResult3 = new ScrollResult() + final ScrollResult policySearchResult3 = + new ScrollResult() .setScrollId("3") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(parentDomainPolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(parentDomainPolicyUrn)))); - final ScrollResult policySearchResult4 = new ScrollResult() - .setScrollId("4") - .setNumEntities(5) + final ScrollResult policySearchResult4 = + new ScrollResult() + .setScrollId("4") + .setNumEntities(5) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(childDomainPolicyUrn)))); + + final ScrollResult policySearchResult5 = + new ScrollResult() + .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of( - new SearchEntity().setEntity(childDomainPolicyUrn)))); - - final ScrollResult policySearchResult5 = new ScrollResult() - .setNumEntities(5) - .setEntities( - new SearchEntityArray( - ImmutableList.of( - new SearchEntity().setEntity(adminPolicyUrn)))); - - when(_entityClient.scrollAcrossEntities(eq(List.of("dataHubPolicy")), eq(""), isNull(), any(), isNull(), - anyInt(), eq(new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipHighlighting(true).setSkipCache(true)), any())) - .thenReturn(policySearchResult1) - .thenReturn(policySearchResult2) - .thenReturn(policySearchResult3) - .thenReturn(policySearchResult4) - .thenReturn(policySearchResult5); - - when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), any(), eq(null), any())).thenAnswer(args -> { - Set<Urn> inputUrns = args.getArgument(1); - Urn urn = inputUrns.stream().findFirst().get(); - - switch (urn.toString()) { - case "urn:li:dataHubPolicy:0": - return Map.of(activePolicyUrn, new EntityResponse().setUrn(activePolicyUrn).setAspects(activeAspectMap)); - case "urn:li:dataHubPolicy:1": - return Map.of(inactivePolicyUrn, new EntityResponse().setUrn(inactivePolicyUrn).setAspects(inactiveAspectMap)); - case "urn:li:dataHubPolicy:2": - return Map.of(parentDomainPolicyUrn, new EntityResponse().setUrn(parentDomainPolicyUrn).setAspects(parentDomainPolicyAspectMap)); - case "urn:li:dataHubPolicy:3": - return Map.of(childDomainPolicyUrn, new EntityResponse().setUrn(childDomainPolicyUrn).setAspects(childDomainPolicyAspectMap)); - case "urn:li:dataHubPolicy:4": - return Map.of(adminPolicyUrn, new EntityResponse().setUrn(adminPolicyUrn).setAspects(adminPolicyAspectMap)); - default: - throw new IllegalStateException(); - } - }); - - final List<Urn> userUrns = ImmutableList.of(Urn.createFromString("urn:li:corpuser:user3"), Urn.createFromString("urn:li:corpuser:user4")); - final List<Urn> groupUrns = ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group3"), Urn.createFromString("urn:li:corpGroup:group4")); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(adminPolicyUrn)))); + + when(_entityClient.scrollAcrossEntities( + eq(List.of("dataHubPolicy")), + eq(""), + isNull(), + any(), + isNull(), + anyInt(), + eq( + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setSkipCache(true)), + any())) + .thenReturn(policySearchResult1) + .thenReturn(policySearchResult2) + .thenReturn(policySearchResult3) + .thenReturn(policySearchResult4) + .thenReturn(policySearchResult5); + + when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), any(), eq(null), any())) + .thenAnswer( + args -> { + Set<Urn> inputUrns = args.getArgument(1); + Urn urn = inputUrns.stream().findFirst().get(); + + switch (urn.toString()) { + case "urn:li:dataHubPolicy:0": + return Map.of( + activePolicyUrn, + new EntityResponse().setUrn(activePolicyUrn).setAspects(activeAspectMap)); + case "urn:li:dataHubPolicy:1": + return Map.of( + inactivePolicyUrn, + new EntityResponse().setUrn(inactivePolicyUrn).setAspects(inactiveAspectMap)); + case "urn:li:dataHubPolicy:2": + return Map.of( + parentDomainPolicyUrn, + new EntityResponse() + .setUrn(parentDomainPolicyUrn) + .setAspects(parentDomainPolicyAspectMap)); + case "urn:li:dataHubPolicy:3": + return Map.of( + childDomainPolicyUrn, + new EntityResponse() + .setUrn(childDomainPolicyUrn) + .setAspects(childDomainPolicyAspectMap)); + case "urn:li:dataHubPolicy:4": + return Map.of( + adminPolicyUrn, + new EntityResponse().setUrn(adminPolicyUrn).setAspects(adminPolicyAspectMap)); + default: + throw new IllegalStateException(); + } + }); + + final List<Urn> userUrns = + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user3"), + Urn.createFromString("urn:li:corpuser:user4")); + final List<Urn> groupUrns = + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group3"), + Urn.createFromString("urn:li:corpGroup:group4")); EntityResponse ownershipResponse = new EntityResponse(); EnvelopedAspectMap ownershipAspectMap = new EnvelopedAspectMap(); - ownershipAspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(userUrns, groupUrns).data()))); + ownershipAspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new com.linkedin.entity.Aspect(createOwnershipAspect(userUrns, groupUrns).data()))); ownershipResponse.setAspects(ownershipAspectMap); when(_entityClient.getV2(any(), any(), eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), any())) .thenReturn(ownershipResponse); @@ -181,31 +235,45 @@ public void setupTest() throws Exception { .thenReturn(createDomainsResponse(CHILD_DOMAIN_URN)); // Mocks to get parent domains on a domain - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(CHILD_DOMAIN_URN)), eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), any())) + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(CHILD_DOMAIN_URN)), + eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + any())) .thenReturn(createDomainPropertiesBatchResponse(PARENT_DOMAIN_URN)); // Mocks to reach the stopping point on domain parents - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(PARENT_DOMAIN_URN)), eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), any())) + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(PARENT_DOMAIN_URN)), + eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + any())) .thenReturn(createDomainPropertiesBatchResponse(null)); // Mocks to reach role membership for a user urn - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any()) - ).thenReturn(createUserRoleMembershipBatchResponse(USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); - - final Authentication systemAuthentication = new Authentication( - new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), - "" - ); - - _dataHubAuthorizer = new DataHubAuthorizer( - systemAuthentication, - _entityClient, - 10, - 10, - DataHubAuthorizer.AuthorizationMode.DEFAULT, - 1 // force pagination logic - ); - _dataHubAuthorizer.init(Collections.emptyMap(), createAuthorizerContext(systemAuthentication, _entityClient)); + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn( + createUserRoleMembershipBatchResponse( + USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + + final Authentication systemAuthentication = + new Authentication(new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), ""); + + _dataHubAuthorizer = + new DataHubAuthorizer( + systemAuthentication, + _entityClient, + 10, + 10, + DataHubAuthorizer.AuthorizationMode.DEFAULT, + 1 // force pagination logic + ); + _dataHubAuthorizer.init( + Collections.emptyMap(), createAuthorizerContext(systemAuthentication, _entityClient)); _dataHubAuthorizer.invalidateCache(); Thread.sleep(500); // Sleep so the runnable can execute. (not ideal) } @@ -217,11 +285,11 @@ public void testSystemAuthentication() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID).toUrnStr(), - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID).toUrnStr(), + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -231,11 +299,9 @@ public void testAuthorizeGranted() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -246,11 +312,9 @@ public void testAuthorizeNotGranted() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); // Policy for this privilege is inactive. - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_OWNERS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } @@ -263,11 +327,9 @@ public void testAllowAllMode() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); // Policy for this privilege is inactive. - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_OWNERS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -278,11 +340,9 @@ public void testInvalidateCache() throws Exception { // First make sure that the default policies are as expected. EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); @@ -291,64 +351,78 @@ public void testInvalidateCache() throws Exception { emptyResult.setNumEntities(0); emptyResult.setEntities(new SearchEntityArray()); - when(_entityClient.search(eq("dataHubPolicy"), eq(""), isNull(), any(), anyInt(), anyInt(), any(), - eq(new SearchFlags().setFulltext(true)))).thenReturn(emptyResult); - when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), eq(Collections.emptySet()), eq(null), any())).thenReturn( - Collections.emptyMap()); + when(_entityClient.search( + eq("dataHubPolicy"), + eq(""), + isNull(), + any(), + anyInt(), + anyInt(), + any(), + eq(new SearchFlags().setFulltext(true)))) + .thenReturn(emptyResult); + when(_entityClient.batchGetV2( + eq(POLICY_ENTITY_NAME), eq(Collections.emptySet()), eq(null), any())) + .thenReturn(Collections.emptyMap()); // Invalidate Cache. _dataHubAuthorizer.invalidateCache(); Thread.sleep(500); // Sleep so the runnable can execute. (not ideal) - // Now verify that invalidating the cache updates the policies by running the same authorization request. + // Now verify that invalidating the cache updates the policies by running the same authorization + // request. assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } @Test public void testAuthorizedActorsActivePolicy() throws Exception { final AuthorizedActors actors = - _dataHubAuthorizer.authorizedActors("EDIT_ENTITY_TAGS", // Should be inside the active policy. + _dataHubAuthorizer.authorizedActors( + "EDIT_ENTITY_TAGS", // Should be inside the active policy. Optional.of(new EntitySpec("dataset", "urn:li:dataset:1"))); assertTrue(actors.isAllUsers()); assertTrue(actors.isAllGroups()); - assertEquals(new HashSet<>(actors.getUsers()), ImmutableSet.of( - Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2"), - Urn.createFromString("urn:li:corpuser:user3"), - Urn.createFromString("urn:li:corpuser:user4") - )); - - assertEquals(new HashSet<>(actors.getGroups()), ImmutableSet.of( - Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2"), - Urn.createFromString("urn:li:corpGroup:group3"), - Urn.createFromString("urn:li:corpGroup:group4") - )); + assertEquals( + new HashSet<>(actors.getUsers()), + ImmutableSet.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2"), + Urn.createFromString("urn:li:corpuser:user3"), + Urn.createFromString("urn:li:corpuser:user4"))); + + assertEquals( + new HashSet<>(actors.getGroups()), + ImmutableSet.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2"), + Urn.createFromString("urn:li:corpGroup:group3"), + Urn.createFromString("urn:li:corpGroup:group4"))); } @Test public void testAuthorizedRoleActivePolicy() throws Exception { final AuthorizedActors actors = - _dataHubAuthorizer.authorizedActors("EDIT_USER_PROFILE", // Should be inside the active policy. + _dataHubAuthorizer.authorizedActors( + "EDIT_USER_PROFILE", // Should be inside the active policy. Optional.of(new EntitySpec("dataset", "urn:li:dataset:1"))); assertFalse(actors.isAllUsers()); assertFalse(actors.isAllGroups()); assertEquals(new HashSet<>(actors.getUsers()), ImmutableSet.of()); assertEquals(new HashSet<>(actors.getGroups()), ImmutableSet.of()); - assertEquals(new HashSet<>(actors.getRoles()), ImmutableSet.of(UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + assertEquals( + new HashSet<>(actors.getRoles()), + ImmutableSet.of(UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); } @Test public void testAuthorizationBasedOnRoleIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - USER_WITH_ADMIN_ROLE.toString(), - "EDIT_USER_PROFILE", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + USER_WITH_ADMIN_ROLE.toString(), "EDIT_USER_PROFILE", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -357,11 +431,9 @@ public void testAuthorizationBasedOnRoleIsAllowed() { public void testAuthorizationOnDomainWithPrivilegeIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_STATUS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_STATUS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -370,11 +442,9 @@ public void testAuthorizationOnDomainWithPrivilegeIsAllowed() { public void testAuthorizationOnDomainWithParentPrivilegeIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_DOCS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_DOCS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -383,19 +453,24 @@ public void testAuthorizationOnDomainWithParentPrivilegeIsAllowed() { public void testAuthorizationOnDomainWithoutPrivilegeIsDenied() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_DOC_LINKS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_DOC_LINKS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } - private DataHubPolicyInfo createDataHubPolicyInfo(boolean active, List<String> privileges, @Nullable final Urn domain) throws Exception { + private DataHubPolicyInfo createDataHubPolicyInfo( + boolean active, List<String> privileges, @Nullable final Urn domain) throws Exception { - List<Urn> users = ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2")); - List<Urn> groups = ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), Urn.createFromString("urn:li:corpGroup:group2")); + List<Urn> users = + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")); + List<Urn> groups = + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")); final DataHubActorFilter actorFilter = new DataHubActorFilter(); actorFilter.setResourceOwners(true); @@ -407,8 +482,12 @@ private DataHubPolicyInfo createDataHubPolicyInfo(boolean active, List<String> p return createDataHubPolicyInfoFor(active, privileges, domain, actorFilter); } - private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List<String> privileges, - @Nullable final Urn domain, DataHubActorFilter actorFilter) throws Exception { + private DataHubPolicyInfo createDataHubPolicyInfoFor( + boolean active, + List<String> privileges, + @Nullable final Urn domain, + DataHubActorFilter actorFilter) + throws Exception { final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo(); dataHubPolicyInfo.setType(METADATA_POLICY_TYPE); dataHubPolicyInfo.setState(active ? ACTIVE_POLICY_STATE : INACTIVE_POLICY_STATE); @@ -424,7 +503,10 @@ private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List<String resourceFilter.setType("dataset"); if (domain != null) { - resourceFilter.setFilter(FilterUtils.newFilter(ImmutableMap.of(EntityFieldType.DOMAIN, Collections.singletonList(domain.toString())))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.DOMAIN, Collections.singletonList(domain.toString())))); } dataHubPolicyInfo.setResources(resourceFilter); @@ -432,31 +514,34 @@ private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List<String return dataHubPolicyInfo; } - private Ownership createOwnershipAspect(final List<Urn> userOwners, final List<Urn> groupOwners) throws Exception { + private Ownership createOwnershipAspect(final List<Urn> userOwners, final List<Urn> groupOwners) + throws Exception { final Ownership ownershipAspect = new Ownership(); final OwnerArray owners = new OwnerArray(); if (userOwners != null) { - userOwners.forEach(userUrn -> { - final Owner userOwner = new Owner(); - userOwner.setOwner(userUrn); - userOwner.setType(OwnershipType.DATAOWNER); - owners.add(userOwner); - } - ); + userOwners.forEach( + userUrn -> { + final Owner userOwner = new Owner(); + userOwner.setOwner(userUrn); + userOwner.setType(OwnershipType.DATAOWNER); + owners.add(userOwner); + }); } if (groupOwners != null) { - groupOwners.forEach(groupUrn -> { - final Owner groupOwner = new Owner(); - groupOwner.setOwner(groupUrn); - groupOwner.setType(OwnershipType.DATAOWNER); - owners.add(groupOwner); - }); + groupOwners.forEach( + groupUrn -> { + final Owner groupOwner = new Owner(); + groupOwner.setOwner(groupUrn); + groupOwner.setType(OwnershipType.DATAOWNER); + owners.add(groupOwner); + }); } ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -466,13 +551,15 @@ private EntityResponse createDomainsResponse(final Urn domainUrn) { EnvelopedAspectMap domainsAspectMap = new EnvelopedAspectMap(); final Domains domains = new Domains(); domains.setDomains(new UrnArray(domainUrns)); - domainsAspectMap.put(DOMAINS_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(domains.data()))); + domainsAspectMap.put( + DOMAINS_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(domains.data()))); domainsResponse.setAspects(domainsAspectMap); return domainsResponse; } - private Map<Urn, EntityResponse> createDomainPropertiesBatchResponse(@Nullable final Urn parentDomainUrn) { + private Map<Urn, EntityResponse> createDomainPropertiesBatchResponse( + @Nullable final Urn parentDomainUrn) { final Map<Urn, EntityResponse> batchResponse = new HashMap<>(); final EntityResponse response = new EntityResponse(); EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); @@ -480,14 +567,16 @@ private Map<Urn, EntityResponse> createDomainPropertiesBatchResponse(@Nullable f if (parentDomainUrn != null) { properties.setParentDomain(parentDomainUrn); } - aspectMap.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(properties.data()))); + aspectMap.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(properties.data()))); response.setAspects(aspectMap); batchResponse.put(parentDomainUrn, response); return batchResponse; } - private Map<Urn, EntityResponse> createUserRoleMembershipBatchResponse(final Urn userUrn, @Nullable final Urn roleUrn) { + private Map<Urn, EntityResponse> createUserRoleMembershipBatchResponse( + final Urn userUrn, @Nullable final Urn roleUrn) { final Map<Urn, EntityResponse> batchResponse = new HashMap<>(); final EntityResponse response = new EntityResponse(); EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); @@ -495,14 +584,17 @@ private Map<Urn, EntityResponse> createUserRoleMembershipBatchResponse(final Urn if (roleUrn != null) { membership.setRoles(new UrnArray(roleUrn)); } - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(membership.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(membership.data()))); response.setAspects(aspectMap); batchResponse.put(userUrn, response); return batchResponse; } - private AuthorizerContext createAuthorizerContext(final Authentication systemAuthentication, final EntityClient entityClient) { - return new AuthorizerContext(Collections.emptyMap(), new DefaultEntitySpecResolver(systemAuthentication, entityClient)); + private AuthorizerContext createAuthorizerContext( + final Authentication systemAuthentication, final EntityClient entityClient) { + return new AuthorizerContext( + Collections.emptyMap(), new DefaultEntitySpecResolver(systemAuthentication, entityClient)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java index 2790c16ba75e6..08ec91d5287dc 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java @@ -1,5 +1,10 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.PoliciesConfig.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -32,12 +37,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.authorization.PoliciesConfig.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class PolicyEngineTest { private static final String AUTHORIZED_PRINCIPAL = "urn:li:corpuser:datahub"; @@ -46,7 +45,8 @@ public class PolicyEngineTest { private static final String RESOURCE_URN = "urn:li:dataset:test"; private static final String DOMAIN_URN = "urn:li:domain:domain1"; private static final String OWNERSHIP_TYPE_URN = "urn:li:ownershipType:__system__technical_owner"; - private static final String OTHER_OWNERSHIP_TYPE_URN = "urn:li:ownershipType:__system__data_steward"; + private static final String OTHER_OWNERSHIP_TYPE_URN = + "urn:li:ownershipType:__system__data_steward"; private EntityClient _entityClient; private PolicyEngine _policyEngine; @@ -63,10 +63,16 @@ public void setupTest() throws Exception { _policyEngine = new PolicyEngine(Mockito.mock(Authentication.class), _entityClient); authorizedUserUrn = Urn.createFromString(AUTHORIZED_PRINCIPAL); - resolvedAuthorizedUserSpec = buildEntityResolvers(CORP_USER_ENTITY_NAME, AUTHORIZED_PRINCIPAL, - Collections.emptySet(), Collections.emptySet(), Collections.singleton(AUTHORIZED_GROUP)); + resolvedAuthorizedUserSpec = + buildEntityResolvers( + CORP_USER_ENTITY_NAME, + AUTHORIZED_PRINCIPAL, + Collections.emptySet(), + Collections.emptySet(), + Collections.singleton(AUTHORIZED_GROUP)); unauthorizedUserUrn = Urn.createFromString(UNAUTHORIZED_PRINCIPAL); - resolvedUnauthorizedUserSpec = buildEntityResolvers(CORP_USER_ENTITY_NAME, UNAUTHORIZED_PRINCIPAL); + resolvedUnauthorizedUserSpec = + buildEntityResolvers(CORP_USER_ENTITY_NAME, UNAUTHORIZED_PRINCIPAL); resourceUrn = Urn.createFromString(RESOURCE_URN); // Init role membership mocks. @@ -74,25 +80,39 @@ public void setupTest() throws Exception { authorizedEntityResponse.setUrn(authorizedUserUrn); Map<Urn, EntityResponse> authorizedEntityResponseMap = Collections.singletonMap(authorizedUserUrn, authorizedEntityResponse); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any())).thenReturn(authorizedEntityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn(authorizedEntityResponseMap); EntityResponse unauthorizedEntityResponse = createUnauthorizedEntityResponse(); unauthorizedEntityResponse.setUrn(unauthorizedUserUrn); Map<Urn, EntityResponse> unauthorizedEntityResponseMap = Collections.singletonMap(unauthorizedUserUrn, unauthorizedEntityResponse); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(unauthorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any())).thenReturn(unauthorizedEntityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(unauthorizedUserUrn)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn(unauthorizedEntityResponseMap); // Init ownership type mocks. EntityResponse entityResponse = new EntityResponse(); EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(true, true).data()))); + envelopedAspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(true, true).data()))); entityResponse.setAspects(envelopedAspectMap); Map<Urn, EntityResponse> mockMap = mock(Map.class); - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(resourceUrn)), - eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), any())).thenReturn(mockMap); + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(resourceUrn)), + eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(mockMap); when(mockMap.get(eq(resourceUrn))).thenReturn(entityResponse); } @@ -121,7 +141,10 @@ public void testEvaluatePolicyInactivePolicyState() { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -150,7 +173,10 @@ public void testEvaluatePolicyPrivilegeFilterNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_OWNERS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -175,8 +201,8 @@ public void testEvaluatePlatformPolicyPrivilegeFilterMatch() throws Exception { dataHubPolicyInfo.setActors(actorFilter); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "MANAGE_POLICIES", - Optional.empty()); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, resolvedAuthorizedUserSpec, "MANAGE_POLICIES", Optional.empty()); assertTrue(result.isGranted()); // Verify no network calls @@ -211,7 +237,10 @@ public void testEvaluatePolicyActorFilterUserMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert Authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -248,7 +277,10 @@ public void testEvaluatePolicyActorFilterUserNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, buildEntityResolvers(CORP_USER_ENTITY_NAME, "urn:li:corpuser:test"), "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + buildEntityResolvers(CORP_USER_ENTITY_NAME, "urn:li:corpuser:test"), + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -285,7 +317,10 @@ public void testEvaluatePolicyActorFilterGroupMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -321,7 +356,10 @@ public void testEvaluatePolicyActorFilterGroupNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -358,14 +396,18 @@ public void testEvaluatePolicyActorFilterRoleMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult authorizedResult = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(authorizedResult.isGranted()); // Verify we are only calling for roles during these requests. - verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), - eq(Collections.singleton(authorizedUserUrn)), any(), any()); + verify(_entityClient, times(1)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), any(), any()); } @Test @@ -397,14 +439,21 @@ public void testEvaluatePolicyActorFilterNoRoleMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult unauthorizedResult = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(unauthorizedResult.isGranted()); // Verify we are only calling for roles during these requests. - verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), - eq(Collections.singleton(unauthorizedUserUrn)), any(), any()); + verify(_entityClient, times(1)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(unauthorizedUserUrn)), + any(), + any()); } @Test @@ -432,13 +481,19 @@ public void testEvaluatePolicyActorFilterAllUsersMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result2.isGranted()); @@ -471,13 +526,19 @@ public void testEvaluatePolicyActorFilterAllGroupsMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -509,17 +570,30 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersMatch() throws Except final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), Collections.emptySet()); // Assert authorized user can edit entity tags, because he is a user owner. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -542,7 +616,8 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeMatch() throws Ex actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setResourceOwnersTypes(new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); + actorFilter.setResourceOwnersTypes( + new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -552,18 +627,32 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeMatch() throws Ex final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspectWithTypeUrn(OWNERSHIP_TYPE_URN).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(createOwnershipAspectWithTypeUrn(OWNERSHIP_TYPE_URN).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), - Collections.emptySet()); - + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), + Collections.emptySet()); + PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec)); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Verify no network calls @@ -585,7 +674,8 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeNoMatch() throws actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setResourceOwnersTypes(new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); + actorFilter.setResourceOwnersTypes( + new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -595,17 +685,33 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeNoMatch() throws final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspectWithTypeUrn(OTHER_OWNERSHIP_TYPE_URN).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect(createOwnershipAspectWithTypeUrn(OTHER_OWNERSHIP_TYPE_URN).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), Collections.emptySet()); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), + Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec)); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertFalse(result1.isGranted()); // Verify no network calls @@ -636,17 +742,30 @@ public void testEvaluatePolicyActorFilterGroupResourceOwnersMatch() throws Excep final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(false, true).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(false, true).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_GROUP), Collections.emptySet(), + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_GROUP), + Collections.emptySet(), Collections.emptySet()); // Assert authorized user can edit entity tags, because he is a user owner. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -678,7 +797,10 @@ public void testEvaluatePolicyActorFilterGroupResourceOwnersNoMatch() throws Exc ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -708,9 +830,13 @@ public void testEvaluatePolicyResourceFilterAllResourcesMatch() throws Exception dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A dataset Authorized principal _does not own_. + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A dataset Authorized principal _does not own_. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -739,9 +865,13 @@ public void testEvaluatePolicyResourceFilterAllResourcesNoMatch() throws Excepti resourceFilter.setType("dataset"); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("chart", RESOURCE_URN); // Notice: Not a dataset. + ResolvedEntitySpec resourceSpec = + buildEntityResolvers("chart", RESOURCE_URN); // Notice: Not a dataset. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -776,7 +906,10 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatchLegacy() throws ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -801,14 +934,21 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatch() throws Excep dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -833,15 +973,23 @@ public void testEvaluatePolicyResourceFilterSpecificResourceNoMatch() throws Exc dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy. + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A resource not covered by the policy. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -866,15 +1014,27 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatchDomain() throws dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), Collections.singleton(DOMAIN_URN), Collections.emptySet()); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -899,15 +1059,27 @@ public void testEvaluatePolicyResourceFilterSpecificResourceNoMatchDomain() thro dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton("urn:li:domain:domain2"), Collections.emptySet()); // Domain doesn't match + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton("urn:li:domain:domain2"), + Collections.emptySet()); // Domain doesn't match PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -933,9 +1105,13 @@ public void testGetGrantedPrivileges() throws Exception { dataHubPolicyInfo1.setActors(actorFilter1); final DataHubResourceFilter resourceFilter1 = new DataHubResourceFilter(); - resourceFilter1.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter1.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo1.setResources(resourceFilter1); // Policy 2, match dataset type and resource @@ -954,9 +1130,13 @@ public void testGetGrantedPrivileges() throws Exception { dataHubPolicyInfo2.setActors(actorFilter2); final DataHubResourceFilter resourceFilter2 = new DataHubResourceFilter(); - resourceFilter2.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter2.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo2.setResources(resourceFilter2); // Policy 3, match dataset type and owner (legacy resource filter) @@ -982,43 +1162,80 @@ public void testGetGrantedPrivileges() throws Exception { final List<DataHubPolicyInfo> policies = ImmutableList.of(dataHubPolicyInfo1, dataHubPolicyInfo2, dataHubPolicyInfo3); - assertEquals(_policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.empty()), + assertEquals( + _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.empty()), Collections.emptyList()); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Everything matches + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Everything matches assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1", "PRIVILEGE_2_1", "PRIVILEGE_2_2")); - resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton("urn:li:domain:domain2"), Collections.emptySet()); // Domain doesn't match + resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton("urn:li:domain:domain2"), + Collections.emptySet()); // Domain doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_2_1", "PRIVILEGE_2_2")); - resourceSpec = buildEntityResolvers("dataset", "urn:li:dataset:random", Collections.emptySet(), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Resource doesn't match + resourceSpec = + buildEntityResolvers( + "dataset", + "urn:li:dataset:random", + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Resource doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1")); final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); - resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Is owner + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); + resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.singleton(AUTHORIZED_PRINCIPAL), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Is owner assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1", "PRIVILEGE_2_1", "PRIVILEGE_2_2", "PRIVILEGE_3")); - resourceSpec = buildEntityResolvers("chart", RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Resource type doesn't match + resourceSpec = + buildEntityResolvers( + "chart", + RESOURCE_URN, + Collections.singleton(AUTHORIZED_PRINCIPAL), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Resource type doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), Collections.emptyList()); } @@ -1037,10 +1254,16 @@ public void testGetMatchingActorsResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(true); actorFilter.setAllGroups(true); - actorFilter.setUsers(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2")))); - actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2")))); + actorFilter.setUsers( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")))); + actorFilter.setGroups( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")))); actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:role:Admin")))); dataHubPolicyInfo.setActors(actorFilter); @@ -1053,27 +1276,43 @@ public void testGetMatchingActorsResourceMatch() throws Exception { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL, AUTHORIZED_GROUP), - Collections.emptySet(), Collections.emptySet()); - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL, AUTHORIZED_GROUP), + Collections.emptySet(), + Collections.emptySet()); + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertTrue(actors.getAllUsers()); assertTrue(actors.getAllGroups()); - assertEquals(actors.getUsers(), - ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2"), + assertEquals( + actors.getUsers(), + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2"), Urn.createFromString(AUTHORIZED_PRINCIPAL) // Resource Owner - )); + )); - assertEquals(actors.getGroups(), ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2"), Urn.createFromString(AUTHORIZED_GROUP) // Resource Owner - )); + assertEquals( + actors.getGroups(), + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2"), + Urn.createFromString(AUTHORIZED_GROUP) // Resource Owner + )); assertEquals(actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:role:Admin"))); // Verify aspect client called, entity client not called. - verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(null), any()); + verify(_entityClient, times(0)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(null), + any()); } @Test @@ -1091,10 +1330,16 @@ public void testGetMatchingActorsNoResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(true); actorFilter.setAllGroups(true); - actorFilter.setUsers(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2")))); - actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2")))); + actorFilter.setUsers( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")))); + actorFilter.setGroups( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -1106,14 +1351,16 @@ public void testGetMatchingActorsNoResourceMatch() throws Exception { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy. - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A resource not covered by the policy. + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertFalse(actors.getAllUsers()); assertFalse(actors.getAllGroups()); assertEquals(actors.getUsers(), Collections.emptyList()); assertEquals(actors.getGroups(), Collections.emptyList()); - //assertEquals(actors.getRoles(), Collections.emptyList()); + // assertEquals(actors.getRoles(), Collections.emptyList()); // Verify no network calls verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any()); @@ -1133,7 +1380,8 @@ public void testGetMatchingActorsByRoleResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor")))); + actorFilter.setRoles( + new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor")))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -1144,24 +1392,36 @@ public void testGetMatchingActorsByRoleResourceMatch() throws Exception { resourceFilter.setResources(resourceUrns); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(), - Collections.emptySet(), Collections.emptySet()); + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(), + Collections.emptySet(), + Collections.emptySet()); - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertFalse(actors.getAllUsers()); assertFalse(actors.getAllGroups()); assertEquals(actors.getUsers(), ImmutableList.of()); assertEquals(actors.getGroups(), ImmutableList.of()); - assertEquals(actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor"))); + assertEquals( + actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor"))); // Verify aspect client called, entity client not called. - verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(null), any()); + verify(_entityClient, times(0)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(null), + any()); } - private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolean addGroupOwner) throws Exception { + private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolean addGroupOwner) + throws Exception { final Ownership ownershipAspect = new Ownership(); final OwnerArray owners = new OwnerArray(); @@ -1180,7 +1440,8 @@ private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolea } ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -1194,7 +1455,8 @@ private Ownership createOwnershipAspectWithTypeUrn(final String typeUrn) throws owners.add(userOwner); ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -1206,7 +1468,9 @@ private EntityResponse createAuthorizedEntityResponse() throws URISyntaxExceptio final UrnArray roles = new UrnArray(); roles.add(Urn.createFromString("urn:li:dataHubRole:admin")); rolesAspect.setRoles(roles); - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); entityResponse.setAspects(aspectMap); return entityResponse; @@ -1220,24 +1484,41 @@ private EntityResponse createUnauthorizedEntityResponse() throws URISyntaxExcept final UrnArray roles = new UrnArray(); roles.add(Urn.createFromString("urn:li:dataHubRole:reader")); rolesAspect.setRoles(roles); - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); entityResponse.setAspects(aspectMap); return entityResponse; } public static ResolvedEntitySpec buildEntityResolvers(String entityType, String entityUrn) { - return buildEntityResolvers(entityType, entityUrn, Collections.emptySet(), Collections.emptySet(), Collections.emptySet()); + return buildEntityResolvers( + entityType, + entityUrn, + Collections.emptySet(), + Collections.emptySet(), + Collections.emptySet()); } - public static ResolvedEntitySpec buildEntityResolvers(String entityType, String entityUrn, Set<String> owners, - Set<String> domains, Set<String> groups) { - return new ResolvedEntitySpec(new EntitySpec(entityType, entityUrn), - ImmutableMap.of(EntityFieldType.TYPE, - FieldResolver.getResolverFromValues(Collections.singleton(entityType)), EntityFieldType.URN, - FieldResolver.getResolverFromValues(Collections.singleton(entityUrn)), EntityFieldType.OWNER, - FieldResolver.getResolverFromValues(owners), EntityFieldType.DOMAIN, - FieldResolver.getResolverFromValues(domains), EntityFieldType.GROUP_MEMBERSHIP, + public static ResolvedEntitySpec buildEntityResolvers( + String entityType, + String entityUrn, + Set<String> owners, + Set<String> domains, + Set<String> groups) { + return new ResolvedEntitySpec( + new EntitySpec(entityType, entityUrn), + ImmutableMap.of( + EntityFieldType.TYPE, + FieldResolver.getResolverFromValues(Collections.singleton(entityType)), + EntityFieldType.URN, + FieldResolver.getResolverFromValues(Collections.singleton(entityUrn)), + EntityFieldType.OWNER, + FieldResolver.getResolverFromValues(owners), + EntityFieldType.DOMAIN, + FieldResolver.getResolverFromValues(domains), + EntityFieldType.GROUP_MEMBERSHIP, FieldResolver.getResolverFromValues(groups))); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java index 52a8d2454ffba..ca9ee92b77a5a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java @@ -1,5 +1,7 @@ package com.datahub.authorization; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -10,9 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class RoleServiceTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String FIRST_ACTOR_URN_STRING = "urn:li:corpuser:foo"; @@ -35,21 +34,23 @@ public void setupTest() throws Exception { @Test public void testBatchAssignRoleNoActorExists() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - false); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(false); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING), - roleUrn, - SYSTEM_AUTHENTICATION); + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); } @Test public void testBatchAssignRoleSomeActorExists() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, times(1)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); @@ -57,12 +58,15 @@ public void testBatchAssignRoleSomeActorExists() throws Exception { @Test public void testBatchAssignRoleAllActorsExist() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); - when(_entityClient.exists(eq(Urn.createFromString(SECOND_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); + when(_entityClient.exists( + eq(Urn.createFromString(SECOND_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, times(2)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); @@ -70,10 +74,12 @@ public void testBatchAssignRoleAllActorsExist() throws Exception { @Test public void testAssignNullRoleToActorAllActorsExist() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING), null, SYSTEM_AUTHENTICATION); + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING), null, SYSTEM_AUTHENTICATION); verify(_entityClient, times(1)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java index 5c7d87f1c05a9..d5d5b0c4e6c71 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java @@ -40,10 +40,8 @@ public class DataPlatformInstanceFieldResolverProviderTest { "urn:li:dataset:(urn:li:dataPlatform:s3,test-platform-instance.testDataset,PROD)"; private static final EntitySpec RESOURCE_SPEC = new EntitySpec(DATASET_ENTITY_NAME, RESOURCE_URN); - @Mock - private EntityClient entityClientMock; - @Mock - private Authentication systemAuthenticationMock; + @Mock private EntityClient entityClientMock; + @Mock private Authentication systemAuthenticationMock; private DataPlatformInstanceFieldResolverProvider dataPlatformInstanceFieldResolverProvider; @@ -56,37 +54,42 @@ public void setup() { @Test public void shouldReturnDataPlatformInstanceType() { - assertEquals(EntityFieldType.DATA_PLATFORM_INSTANCE, dataPlatformInstanceFieldResolverProvider.getFieldTypes().get(0)); + assertEquals( + EntityFieldType.DATA_PLATFORM_INSTANCE, + dataPlatformInstanceFieldResolverProvider.getFieldTypes().get(0)); } @Test public void shouldReturnFieldValueWithResourceSpecIfTypeIsDataPlatformInstance() { - var resourceSpec = new EntitySpec(DATA_PLATFORM_INSTANCE_ENTITY_NAME, DATA_PLATFORM_INSTANCE_URN); + var resourceSpec = + new EntitySpec(DATA_PLATFORM_INSTANCE_ENTITY_NAME, DATA_PLATFORM_INSTANCE_URN); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(resourceSpec); - assertEquals(Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); + assertEquals( + Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); verifyZeroInteractions(entityClientMock); } @Test - public void shouldReturnEmptyFieldValueWhenResponseIsNull() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenResponseIsNull() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(null); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(null); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test @@ -95,99 +98,104 @@ public void shouldReturnEmptyFieldValueWhenResourceHasNoDataPlatformInstance() var entityResponseMock = mock(EntityResponse.class); when(entityResponseMock.getAspects()).thenReturn(new EnvelopedAspectMap()); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test - public void shouldReturnEmptyFieldValueWhenThereIsAnException() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenThereIsAnException() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenThrow(new RemoteInvocationException()); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenThrow(new RemoteInvocationException()); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnEmptyFieldValueWhenDataPlatformInstanceHasNoInstance() throws RemoteInvocationException, URISyntaxException { - var dataPlatform = new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")); + var dataPlatform = + new DataPlatformInstance().setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, + envelopedAspectMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(dataPlatform.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithDataPlatformInstanceOfTheResource() throws RemoteInvocationException, URISyntaxException { - var dataPlatformInstance = new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")) - .setInstance(Urn.createFromString(DATA_PLATFORM_INSTANCE_URN)); + var dataPlatformInstance = + new DataPlatformInstance() + .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")) + .setInstance(Urn.createFromString(DATA_PLATFORM_INSTANCE_URN)); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, + envelopedAspectMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(dataPlatformInstance.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); - assertEquals(Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + assertEquals( + Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java index af547f14cd3fc..542f6c9f8bc79 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java @@ -1,5 +1,12 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; @@ -15,32 +22,23 @@ import com.linkedin.identity.GroupMembership; import com.linkedin.identity.NativeGroupMembership; import com.linkedin.r2.RemoteInvocationException; +import java.net.URISyntaxException; +import java.util.Set; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.net.URISyntaxException; -import java.util.Set; - -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - public class GroupMembershipFieldResolverProviderTest { private static final String CORPGROUP_URN = "urn:li:corpGroup:groupname"; private static final String NATIVE_CORPGROUP_URN = "urn:li:corpGroup:nativegroupname"; - private static final String RESOURCE_URN = "urn:li:dataset:(urn:li:dataPlatform:testPlatform,testDataset,PROD)"; + private static final String RESOURCE_URN = + "urn:li:dataset:(urn:li:dataPlatform:testPlatform,testDataset,PROD)"; private static final EntitySpec RESOURCE_SPEC = new EntitySpec(DATASET_ENTITY_NAME, RESOURCE_URN); - @Mock - private EntityClient entityClientMock; - @Mock - private Authentication systemAuthenticationMock; + @Mock private EntityClient entityClientMock; + @Mock private Authentication systemAuthenticationMock; private GroupMembershipFieldResolverProvider groupMembershipFieldResolverProvider; @@ -53,27 +51,30 @@ public void setup() { @Test public void shouldReturnGroupsMembershipType() { - assertEquals(EntityFieldType.GROUP_MEMBERSHIP, groupMembershipFieldResolverProvider.getFieldTypes().get(0)); + assertEquals( + EntityFieldType.GROUP_MEMBERSHIP, + groupMembershipFieldResolverProvider.getFieldTypes().get(0)); } @Test - public void shouldReturnEmptyFieldValueWhenResponseIsNull() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenResponseIsNull() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(null); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(null); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test @@ -82,131 +83,144 @@ public void shouldReturnEmptyFieldValueWhenResourceDoesNotBelongToAnyGroup() var entityResponseMock = mock(EntityResponse.class); when(entityResponseMock.getAspects()).thenReturn(new EnvelopedAspectMap()); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test - public void shouldReturnEmptyFieldValueWhenThereIsAnException() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenThereIsAnException() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenThrow(new RemoteInvocationException()); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenThrow(new RemoteInvocationException()); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithOnlyGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var groupMembership = new GroupMembership().setGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); + var groupMembership = + new GroupMembership() + .setGroups(new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(groupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertEquals(Set.of(CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithOnlyNativeGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var nativeGroupMembership = new NativeGroupMembership().setNativeGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); + var nativeGroupMembership = + new NativeGroupMembership() + .setNativeGroups( + new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertEquals(Set.of(NATIVE_CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithGroupsAndNativeGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var groupMembership = new GroupMembership().setGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); - var nativeGroupMembership = new NativeGroupMembership().setNativeGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); + var groupMembership = + new GroupMembership() + .setGroups(new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); + var nativeGroupMembership = + new NativeGroupMembership() + .setNativeGroups( + new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(groupMembership.data()))); - envelopedAspectMap.put(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); - assertEquals(Set.of(CORPGROUP_URN, NATIVE_CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + assertEquals( + Set.of(CORPGROUP_URN, NATIVE_CORPGROUP_URN), + result.getFieldValuesFuture().join().getValues()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java index 2e974d309f127..a0bbe69691db4 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.telemetry; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -19,16 +23,12 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class TrackingServiceTest { private static final String APP_VERSION_FIELD = "appVersion"; private static final String APP_VERSION = "1.0.0"; private static final String CLIENT_ID = "testClientId"; - private static final TelemetryClientId TELEMETRY_CLIENT_ID = new TelemetryClientId().setClientId(CLIENT_ID); + private static final TelemetryClientId TELEMETRY_CLIENT_ID = + new TelemetryClientId().setClientId(CLIENT_ID); private static final String NOT_ALLOWED_FIELD = "browserId"; private static final String NOT_ALLOWED_FIELD_VALUE = "testBrowserId"; private static final String EVENT_TYPE_FIELD = "type"; @@ -38,10 +38,17 @@ public class TrackingServiceTest { private static final String ACTOR_URN_STRING = "urn:li:corpuser:user"; private static final String HASHED_ACTOR_URN_STRING = "hashedActorUrn"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private Urn _clientIdUrn; private JSONObject _mixpanelMessage; private MixpanelAPI _mixpanelAPI; @@ -62,19 +69,28 @@ public void setupTest() { GitVersion gitVersion = new GitVersion(APP_VERSION, "", Optional.empty()); _trackingService = - new TrackingService(_mixpanelAPI, _mixpanelMessageBuilder, _secretService, _entityService, gitVersion); + new TrackingService( + _mixpanelAPI, _mixpanelMessageBuilder, _secretService, _entityService, gitVersion); } @Test public void testEmitAnalyticsEvent() throws IOException { when(_secretService.hashString(eq(ACTOR_URN_STRING))).thenReturn(HASHED_ACTOR_URN_STRING); when(_entityService.exists(_clientIdUrn)).thenReturn(true); - when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))).thenReturn(TELEMETRY_CLIENT_ID); - when(_mixpanelMessageBuilder.event(eq(CLIENT_ID), eq(EVENT_TYPE), any())).thenReturn(_mixpanelMessage); + when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) + .thenReturn(TELEMETRY_CLIENT_ID); + when(_mixpanelMessageBuilder.event(eq(CLIENT_ID), eq(EVENT_TYPE), any())) + .thenReturn(_mixpanelMessage); final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, ACTOR_URN_FIELD, - ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, + EVENT_TYPE, + ACTOR_URN_FIELD, + ACTOR_URN_STRING, + NOT_ALLOWED_FIELD, + NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); _trackingService.emitAnalyticsEvent(event); @@ -84,7 +100,8 @@ public void testEmitAnalyticsEvent() throws IOException { @Test public void testGetClientIdAlreadyExists() { when(_entityService.exists(_clientIdUrn)).thenReturn(true); - when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))).thenReturn(TELEMETRY_CLIENT_ID); + when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) + .thenReturn(TELEMETRY_CLIENT_ID); assertEquals(CLIENT_ID, _trackingService.getClientId()); } @@ -94,15 +111,17 @@ public void testGetClientIdDoesNotExist() { when(_entityService.exists(_clientIdUrn)).thenReturn(false); assertNotNull(_trackingService.getClientId()); - verify(_entityService, times(1)).ingestAspectIfNotPresent(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT), - any(TelemetryClientId.class), any(), eq(null)); + verify(_entityService, times(1)) + .ingestAspectIfNotPresent( + eq(_clientIdUrn), eq(CLIENT_ID_ASPECT), any(TelemetryClientId.class), any(), eq(null)); } @Test public void testSanitizeEventNoEventType() throws JsonProcessingException, JSONException { final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\"}", ACTOR_URN_FIELD, ACTOR_URN_STRING, NOT_ALLOWED_FIELD, - NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\"}", + ACTOR_URN_FIELD, ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); @@ -116,8 +135,9 @@ public void testSanitizeEventNoEventType() throws JsonProcessingException, JSONE @Test public void testSanitizeEventNoActorUrn() throws JsonProcessingException, JSONException { final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, NOT_ALLOWED_FIELD, - NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, EVENT_TYPE, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); @@ -133,8 +153,14 @@ public void testSanitizeEvent() throws JsonProcessingException, JSONException { when(_secretService.hashString(eq(ACTOR_URN_STRING))).thenReturn(HASHED_ACTOR_URN_STRING); final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, ACTOR_URN_FIELD, - ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, + EVENT_TYPE, + ACTOR_URN_FIELD, + ACTOR_URN_STRING, + NOT_ALLOWED_FIELD, + NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); diff --git a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java index 34354a47b7f04..4e9fe3e335dc3 100644 --- a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java +++ b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java @@ -1,13 +1,15 @@ package com.datahub.auth.authentication; -import com.datahub.authentication.invite.InviteTokenService; -import com.datahub.authentication.token.StatelessTokenService; -import com.datahub.authentication.token.TokenType; -import com.datahub.authentication.user.NativeUserService; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authentication.invite.InviteTokenService; +import com.datahub.authentication.token.StatelessTokenService; +import com.datahub.authentication.token.TokenType; +import com.datahub.authentication.user.NativeUserService; import com.datahub.telemetry.TrackingService; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -29,9 +31,6 @@ import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.HttpClientErrorException; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RestController public class AuthServiceController { @@ -46,55 +45,49 @@ public class AuthServiceController { private static final String INVITE_TOKEN_FIELD_NAME = "inviteToken"; private static final String RESET_TOKEN_FIELD_NAME = "resetToken"; private static final String IS_NATIVE_USER_CREATED_FIELD_NAME = "isNativeUserCreated"; - private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD_NAME = "areNativeUserCredentialsReset"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD_NAME = + "areNativeUserCredentialsReset"; private static final String DOES_PASSWORD_MATCH_FIELD_NAME = "doesPasswordMatch"; - @Inject - StatelessTokenService _statelessTokenService; + @Inject StatelessTokenService _statelessTokenService; - @Inject - Authentication _systemAuthentication; + @Inject Authentication _systemAuthentication; @Inject @Qualifier("configurationProvider") ConfigurationProvider _configProvider; - @Inject - NativeUserService _nativeUserService; + @Inject NativeUserService _nativeUserService; - @Inject - InviteTokenService _inviteTokenService; + @Inject InviteTokenService _inviteTokenService; - @Inject - @Nullable - TrackingService _trackingService; + @Inject @Nullable TrackingService _trackingService; /** - * Generates a JWT access token for as user UI session, provided a unique "user id" to generate the token for inside a JSON - * POST body. + * Generates a JWT access token for as user UI session, provided a unique "user id" to generate + * the token for inside a JSON POST body. * - * Example Request: + * <p>Example Request: * - * POST /generateSessionTokenForUser -H "Authorization: Basic <system-client-id>:<system-client-secret>" - * { - * "userId": "datahub" - * } + * <p>POST /generateSessionTokenForUser -H "Authorization: Basic + * <system-client-id>:<system-client-secret>" { "userId": "datahub" } * - * Example Response: + * <p>Example Response: * - * { - * "accessToken": "<the access token>" - * } + * <p>{ "accessToken": "<the access token>" } */ @PostMapping(value = "/generateSessionTokenForUser", produces = "application/json;charset=utf-8") - CompletableFuture<ResponseEntity<String>> generateSessionTokenForUser(final HttpEntity<String> httpEntity) { + CompletableFuture<ResponseEntity<String>> generateSessionTokenForUser( + final HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson = null; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to generate session token %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to generate session token %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -110,46 +103,45 @@ CompletableFuture<ResponseEntity<String>> generateSessionTokenForUser(final Http log.debug(String.format("Attempting to generate session token for user %s", userId.asText())); final String actorId = AuthenticationContext.getAuthentication().getActor().getId(); - return CompletableFuture.supplyAsync(() -> { - // 1. Verify that only those authorized to generate a token (datahub system) are able to. - if (isAuthorizedToGenerateSessionToken(actorId)) { - try { - // 2. Generate a new DataHub JWT - final String token = _statelessTokenService.generateAccessToken( - TokenType.SESSION, - new Actor(ActorType.USER, userId.asText()), - _configProvider.getAuthentication().getSessionTokenDurationMs()); - return new ResponseEntity<>(buildTokenResponse(token), HttpStatus.OK); - } catch (Exception e) { - log.error("Failed to generate session token for user", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - throw HttpClientErrorException.create(HttpStatus.UNAUTHORIZED, "Unauthorized to perform this action.", new HttpHeaders(), null, null); - }); + return CompletableFuture.supplyAsync( + () -> { + // 1. Verify that only those authorized to generate a token (datahub system) are able to. + if (isAuthorizedToGenerateSessionToken(actorId)) { + try { + // 2. Generate a new DataHub JWT + final String token = + _statelessTokenService.generateAccessToken( + TokenType.SESSION, + new Actor(ActorType.USER, userId.asText()), + _configProvider.getAuthentication().getSessionTokenDurationMs()); + return new ResponseEntity<>(buildTokenResponse(token), HttpStatus.OK); + } catch (Exception e) { + log.error("Failed to generate session token for user", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + } + throw HttpClientErrorException.create( + HttpStatus.UNAUTHORIZED, + "Unauthorized to perform this action.", + new HttpHeaders(), + null, + null); + }); } /** - * Creates a native DataHub user using the provided full name, email and password. The provided invite token must - * be current otherwise a new user will not be created. + * Creates a native DataHub user using the provided full name, email and password. The provided + * invite token must be current otherwise a new user will not be created. * - * Example Request: + * <p>Example Request: * - * POST /signUp -H "Authorization: Basic <system-client-id>:<system-client-secret>" - * { - * "fullName": "Full Name" - * "userUrn": "urn:li:corpuser:test" - * "email": "email@test.com" - * "title": "Data Scientist" - * "password": "password123" - * "inviteToken": "abcd" - * } + * <p>POST /signUp -H "Authorization: Basic <system-client-id>:<system-client-secret>" { + * "fullName": "Full Name" "userUrn": "urn:li:corpuser:test" "email": "email@test.com" "title": + * "Data Scientist" "password": "password123" "inviteToken": "abcd" } * - * Example Response: + * <p>Example Response: * - * { - * "isNativeUserCreated": true - * } + * <p>{ "isNativeUserCreated": true } */ @PostMapping(value = "/signUp", produces = "application/json;charset=utf-8") CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEntity) { @@ -159,7 +151,8 @@ CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEn try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to create native user %s", jsonStr)); + log.error( + String.format("Failed to parse json while attempting to create native user %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -174,15 +167,22 @@ CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEn JsonNode title = bodyJson.get(TITLE_FIELD_NAME); JsonNode password = bodyJson.get(PASSWORD_FIELD_NAME); JsonNode inviteToken = bodyJson.get(INVITE_TOKEN_FIELD_NAME); - if (fullName == null || userUrn == null || email == null || title == null || password == null + if (fullName == null + || userUrn == null + || email == null + || title == null + || password == null || inviteToken == null) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } String userUrnString = userUrn.asText(); - String systemClientUser = new CorpuserUrn(_configProvider.getAuthentication().getSystemClientId()).toString(); + String systemClientUser = + new CorpuserUrn(_configProvider.getAuthentication().getSystemClientId()).toString(); - if (userUrnString.equals(systemClientUser) || userUrnString.equals(DATAHUB_ACTOR) || userUrnString.equals(UNKNOWN_ACTOR)) { + if (userUrnString.equals(systemClientUser) + || userUrnString.equals(DATAHUB_ACTOR) + || userUrnString.equals(UNKNOWN_ACTOR)) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } String fullNameString = fullName.asText(); @@ -192,53 +192,52 @@ CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEn String inviteTokenString = inviteToken.asText(); Authentication auth = AuthenticationContext.getAuthentication(); log.debug(String.format("Attempting to create native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenString); - if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, auth)) { - log.error(String.format("Invalid invite token %s", inviteTokenString)); - return new ResponseEntity<>(HttpStatus.BAD_REQUEST); - } + return CompletableFuture.supplyAsync( + () -> { + try { + Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenString); + if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, auth)) { + log.error(String.format("Invalid invite token %s", inviteTokenString)); + return new ResponseEntity<>(HttpStatus.BAD_REQUEST); + } - _nativeUserService.createNativeUser(userUrnString, fullNameString, emailString, titleString, passwordString, - auth); - String response = buildSignUpResponse(); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to create credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + _nativeUserService.createNativeUser( + userUrnString, fullNameString, emailString, titleString, passwordString, auth); + String response = buildSignUpResponse(); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to create credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } /** - * Resets the credentials for a native DataHub user using the provided email and new password. The provided reset - * token must be current otherwise the credentials will not be updated + * Resets the credentials for a native DataHub user using the provided email and new password. The + * provided reset token must be current otherwise the credentials will not be updated * - * Example Request: + * <p>Example Request: * - * POST /resetNativeUserCredentials -H "Authorization: Basic <system-client-id>:<system-client-secret>" - * { - * "userUrn": "urn:li:corpuser:test" - * "password": "password123" - * "resetToken": "abcd" - * } + * <p>POST /resetNativeUserCredentials -H "Authorization: Basic + * <system-client-id>:<system-client-secret>" { "userUrn": "urn:li:corpuser:test" "password": + * "password123" "resetToken": "abcd" } * - * Example Response: + * <p>Example Response: * - * { - * "areNativeUserCredentialsReset": true - * } + * <p>{ "areNativeUserCredentialsReset": true } */ @PostMapping(value = "/resetNativeUserCredentials", produces = "application/json;charset=utf-8") - CompletableFuture<ResponseEntity<String>> resetNativeUserCredentials(final HttpEntity<String> httpEntity) { + CompletableFuture<ResponseEntity<String>> resetNativeUserCredentials( + final HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to create native user %s", jsonStr)); + log.error( + String.format("Failed to parse json while attempting to create native user %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -259,45 +258,46 @@ CompletableFuture<ResponseEntity<String>> resetNativeUserCredentials(final HttpE String resetTokenString = resetToken.asText(); Authentication auth = AuthenticationContext.getAuthentication(); log.debug(String.format("Attempting to reset credentials for native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - _nativeUserService.resetCorpUserCredentials(userUrnString, passwordString, resetTokenString, - auth); - String response = buildResetNativeUserCredentialsResponse(); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to reset credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _nativeUserService.resetCorpUserCredentials( + userUrnString, passwordString, resetTokenString, auth); + String response = buildResetNativeUserCredentialsResponse(); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to reset credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } /** * Verifies the credentials for a native DataHub user. * - * Example Request: + * <p>Example Request: * - * POST /verifyNativeUserCredentials -H "Authorization: Basic <system-client-id>:<system-client-secret>" - * { - * "userUrn": "urn:li:corpuser:test" - * "password": "password123" - * } + * <p>POST /verifyNativeUserCredentials -H "Authorization: Basic + * <system-client-id>:<system-client-secret>" { "userUrn": "urn:li:corpuser:test" "password": + * "password123" } * - * Example Response: + * <p>Example Response: * - * { - * "passwordMatches": true - * } + * <p>{ "passwordMatches": true } */ @PostMapping(value = "/verifyNativeUserCredentials", produces = "application/json;charset=utf-8") - CompletableFuture<ResponseEntity<String>> verifyNativeUserCredentials(final HttpEntity<String> httpEntity) { + CompletableFuture<ResponseEntity<String>> verifyNativeUserCredentials( + final HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to verify native user password %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to verify native user password %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -315,21 +315,22 @@ CompletableFuture<ResponseEntity<String>> verifyNativeUserCredentials(final Http String userUrnString = userUrn.asText(); String passwordString = password.asText(); log.debug(String.format("Attempting to verify credentials for native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - boolean doesPasswordMatch = _nativeUserService.doesPasswordMatch(userUrnString, passwordString); - String response = buildVerifyNativeUserPasswordResponse(doesPasswordMatch); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to verify credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + boolean doesPasswordMatch = + _nativeUserService.doesPasswordMatch(userUrnString, passwordString); + String response = buildVerifyNativeUserPasswordResponse(doesPasswordMatch); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to verify credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } - /** - * Tracking endpoint - */ + /** Tracking endpoint */ @PostMapping(value = "/track", produces = "application/json;charset=utf-8") CompletableFuture<ResponseEntity<String>> track(final HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); @@ -338,23 +339,26 @@ CompletableFuture<ResponseEntity<String>> track(final HttpEntity<String> httpEnt try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to track analytics event %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to track analytics event %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } - return CompletableFuture.supplyAsync(() -> { - try { - if (_trackingService != null) { - _trackingService.emitAnalyticsEvent(bodyJson); - } - return new ResponseEntity<>(HttpStatus.OK); - } catch (Exception e) { - log.error("Failed to track event", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (_trackingService != null) { + _trackingService.emitAnalyticsEvent(bodyJson); + } + return new ResponseEntity<>(HttpStatus.OK); + } catch (Exception e) { + log.error("Failed to track event", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } // Currently, only internal system is authorized to generate a token on behalf of a user! diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java index cf882f6ce6813..9f1aefd4cf17a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java @@ -2,12 +2,10 @@ import lombok.Data; - @Data public class AssetsConfiguration { - /** - * The url of the logo to render in the DataHub Application. - */ + /** The url of the logo to render in the DataHub Application. */ public String logoUrl; + public String faviconUrl; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java index 6a5c13970517a..eacbe7816f75c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data public class AuthPluginConfiguration { - /** - * Plugin base directory path, default to /etc/datahub/plugins/auth - */ + /** Plugin base directory path, default to /etc/datahub/plugins/auth */ String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java index 2374686b76d01..72dfbf84a4a00 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java @@ -1,15 +1,15 @@ package com.linkedin.metadata.config; import lombok.Data; -/** - * POJO representing the "datahub" configuration block in application.yml. - */ + +/** POJO representing the "datahub" configuration block in application.yml. */ @Data public class DataHubConfiguration { /** - * Indicates the type of server that has been deployed: quickstart, prod, or a custom configuration + * Indicates the type of server that has been deployed: quickstart, prod, or a custom + * configuration */ public String serverType; private PluginConfiguration plugin; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java index 7c4394d07bf9c..8ebea29a32659 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java @@ -2,11 +2,11 @@ import lombok.Data; - @Data public class EntityProfileConfig { /** - * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not present. + * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not + * present. */ public String domainDefaultTab; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java index 4b00346a469c3..4fcbca3527d2a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java @@ -2,8 +2,7 @@ import lombok.Data; - @Data public class EntityRegistryPluginConfiguration { String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java index 5b10b59ff0c20..2f3c3dc3bd546 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java @@ -2,18 +2,12 @@ import lombok.Data; -/** - * POJO representing the "ingestion" configuration block in application.yml. - */ +/** POJO representing the "ingestion" configuration block in application.yml. */ @Data public class IngestionConfiguration { - /** - * Whether managed ingestion is enabled - */ + /** Whether managed ingestion is enabled */ public boolean enabled; - /** - * The default CLI version to use in managed ingestion - */ + /** The default CLI version to use in managed ingestion */ public String defaultCliVersion; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java index 0645c1d7ea96c..5f34a6a5d4f05 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java @@ -2,28 +2,24 @@ import lombok.Data; - @Data public class PluginConfiguration { /** * Plugin security mode, either RESTRICTED or LENIENT * - * Note: Ideally the pluginSecurityMode should be of type com.datahub.plugin.common.SecurityMode from metadata-service/plugin, - * However avoiding to include metadata-service/plugin as dependency in this module (i.e. metadata-io) as some modules - * from metadata-service/ are dependent on metadata-io, so it might create a circular dependency + * <p>Note: Ideally the pluginSecurityMode should be of type + * com.datahub.plugin.common.SecurityMode from metadata-service/plugin, However avoiding to + * include metadata-service/plugin as dependency in this module (i.e. metadata-io) as some modules + * from metadata-service/ are dependent on metadata-io, so it might create a circular dependency */ private String pluginSecurityMode; - /** - * Directory path of entity registry, default to /etc/datahub/plugins/models - */ + /** Directory path of entity registry, default to /etc/datahub/plugins/models */ private EntityRegistryPluginConfiguration entityRegistry; - /** - * The location where the Retention config files live - */ + + /** The location where the Retention config files live */ private RetentionPluginConfiguration retention; - /** - * Plugin framework's plugin base directory path, default to /etc/datahub/plugins/auth - */ + + /** Plugin framework's plugin base directory path, default to /etc/datahub/plugins/auth */ private AuthPluginConfiguration auth; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java index 912abf525631b..987df7f307d39 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PreProcessHooks { private boolean uiEnabled; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java index ef03206996823..cc80954afd27e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data public class QueriesTabConfig { - /** - * The number of queries to show on the queries tab. - */ + /** The number of queries to show on the queries tab. */ public Integer queriesTabResultSize; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java index dde7ede34c659..d553f2e268509 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java @@ -2,8 +2,7 @@ import lombok.Data; - @Data public class RetentionPluginConfiguration { String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java index 7094bbd710f75..1901c433e82c8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java @@ -4,8 +4,9 @@ @Data public class SearchResultVisualConfig { - /** - * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not present. - */ - public Boolean enableNameHighlight; + /** + * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not + * present. + */ + public Boolean enableNameHighlight; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java index 96e4a1716974e..738a9684cc764 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java @@ -2,13 +2,9 @@ import lombok.Data; -/** - * POJO representing the "tests" configuration block in application.yml.on.yml - */ +/** POJO representing the "tests" configuration block in application.yml.on.yml */ @Data public class TestsConfiguration { - /** - * Whether tests are enabled - */ + /** Whether tests are enabled */ public boolean enabled; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java index 89c7376dfd110..670a412ec285e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java @@ -2,13 +2,9 @@ import lombok.Data; -/** - * POJO representing the "views" configuration block in application.yml.on.yml - */ +/** POJO representing the "views" configuration block in application.yml.on.yml */ @Data public class ViewsConfiguration { - /** - * Whether Views are enabled - */ + /** Whether Views are enabled */ public boolean enabled; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java index 14ac2406c2256..bc749a373c5b0 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java @@ -2,29 +2,18 @@ import lombok.Data; - -/** - * POJO representing visualConfig block in the application.yml. - */ +/** POJO representing visualConfig block in the application.yml. */ @Data public class VisualConfiguration { - /** - * Asset related configurations - */ + /** Asset related configurations */ public AssetsConfiguration assets; - /** - * Queries tab related configurations - */ + /** Queries tab related configurations */ public QueriesTabConfig queriesTab; - /** - * Queries tab related configurations - */ + /** Queries tab related configurations */ public EntityProfileConfig entityProfile; - /** - * Search result related configurations - */ + /** Search result related configurations */ public SearchResultVisualConfig searchResult; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java index aff0e23e3b337..70601b8a69fe6 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java @@ -3,7 +3,6 @@ import com.linkedin.metadata.config.cache.client.ClientCacheConfiguration; import lombok.Data; - @Data public class CacheConfiguration { PrimaryCacheConfiguration primary; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java index 3bd7ea1758b2b..9a684ee92f3f8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class EntityDocCountCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java index 3e60c4bf587e1..be39e71bb1b52 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class HomepageCacheConfiguration { EntityDocCountCacheConfiguration entityCounts; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java index a1b08695e4089..001eb41f05cb7 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PrimaryCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java index 290b566caf962..ab686cc266b9f 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchCacheConfiguration { SearchLineageCacheConfiguration lineage; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java index a121900435b1f..b8fb371dfa13c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchLineageCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java index 3cf7ef20797bb..32136929d4875 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.config.cache.client; - public interface ClientCacheConfig { - boolean isEnabled(); - boolean isStatsEnabled(); - int getStatsIntervalSeconds(); - int getDefaultTTLSeconds(); - int getMaxBytes(); + boolean isEnabled(); + + boolean isStatsEnabled(); + + int getStatsIntervalSeconds(); + + int getDefaultTTLSeconds(); + + int getMaxBytes(); } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java index d940bbe135e55..7564ee978e2bd 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java @@ -4,6 +4,6 @@ @Data public class ClientCacheConfiguration { - EntityClientCacheConfig entityClient; - UsageClientCacheConfig usageClient; + EntityClientCacheConfig entityClient; + UsageClientCacheConfig usageClient; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java index 595b614f2f599..3bb37373db0eb 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java @@ -1,17 +1,16 @@ package com.linkedin.metadata.config.cache.client; -import lombok.Data; - import java.util.Map; +import lombok.Data; @Data public class EntityClientCacheConfig implements ClientCacheConfig { - private boolean enabled; - private boolean statsEnabled; - private int statsIntervalSeconds; - private int defaultTTLSeconds; - private int maxBytes; + private boolean enabled; + private boolean statsEnabled; + private int statsIntervalSeconds; + private int defaultTTLSeconds; + private int maxBytes; - // entityName -> aspectName -> cache ttl override - private Map<String, Map<String, Integer>> entityAspectTTLSeconds; + // entityName -> aspectName -> cache ttl override + private Map<String, Map<String, Integer>> entityAspectTTLSeconds; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java index 3aebec9422ed8..f5a9c24c4b188 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java @@ -4,9 +4,9 @@ @Data public class UsageClientCacheConfig implements ClientCacheConfig { - private boolean enabled; - private boolean statsEnabled; - private int statsIntervalSeconds; - private int defaultTTLSeconds; - private int maxBytes; + private boolean enabled; + private boolean statsEnabled; + private int statsIntervalSeconds; + private int defaultTTLSeconds; + private int maxBytes; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java index 7a93119226a2d..b505674f2ed9c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ConsumerConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java index 26a8c6b649133..960baa9cd9172 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ProducerConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java index 7a8594853e0d0..ac0c248f5e559 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SchemaRegistryConfiguration { private String type; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java index 74db6fb9719d4..70ffa59ea40e2 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class BuildIndicesConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java index 7a0292c2adec1..82e3868fa3974 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java @@ -1,17 +1,15 @@ package com.linkedin.metadata.config.search; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; import lombok.Data; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.FileSystemResource; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; - - @Data @Slf4j public class CustomConfiguration { @@ -20,6 +18,7 @@ public class CustomConfiguration { /** * Materialize the search configuration from a location external to main application.yml + * * @param mapper yaml enabled jackson mapper * @return search configuration class * @throws IOException diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java index 30679bbaab9ce..130620a9ab918 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ElasticSearchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java index 89636ee3c47c5..b471116ebe349 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ExactMatchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java index 816a7e41470f5..6f3e1cb278f5f 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class GraphQueryConfiguration { @@ -11,6 +10,7 @@ public class GraphQueryConfiguration { private int maxResult; public static GraphQueryConfiguration testDefaults; + static { testDefaults = new GraphQueryConfiguration(); testDefaults.setBatchSize(1000); diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java index 5d24248be3650..5dbdcfb269a77 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PartialConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java index b2b5260dc5e70..befce024fbc1a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java index 624d2a4c63c4c..fcce110a56d9c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class WordGramConfiguration { private float twoGramFactor; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java index 460501cc91075..652aa0cc8842e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java @@ -7,21 +7,19 @@ import lombok.Getter; import lombok.ToString; - @Builder(toBuilder = true) @Getter @ToString @EqualsAndHashCode @JsonDeserialize(builder = BoolQueryConfiguration.BoolQueryConfigurationBuilder.class) public class BoolQueryConfiguration { - private Object must; - private Object should; - //CHECKSTYLE:OFF - private Object must_not; - //CHECKSTYLE:ON - private Object filter; + private Object must; + private Object should; + // CHECKSTYLE:OFF + private Object must_not; + // CHECKSTYLE:ON + private Object filter; - @JsonPOJOBuilder(withPrefix = "") - public static class BoolQueryConfigurationBuilder { - } + @JsonPOJOBuilder(withPrefix = "") + public static class BoolQueryConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java index 15deea7620e3d..e6756ca8f0da8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java @@ -2,13 +2,11 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; +import java.util.List; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; -import java.util.List; - - @Builder(toBuilder = true) @Getter @EqualsAndHashCode @@ -18,6 +16,5 @@ public class CustomSearchConfiguration { private List<QueryConfiguration> queryConfigurations; @JsonPOJOBuilder(withPrefix = "") - public static class CustomSearchConfigurationBuilder { - } + public static class CustomSearchConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java index cd4364a64a0c5..901bf803d2bca 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java @@ -2,15 +2,13 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; +import java.util.Map; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; import lombok.extern.slf4j.Slf4j; -import java.util.Map; - - @Slf4j @Builder(toBuilder = true) @Getter @@ -19,17 +17,13 @@ @JsonDeserialize(builder = QueryConfiguration.QueryConfigurationBuilder.class) public class QueryConfiguration { - private String queryRegex; - @Builder.Default - private boolean simpleQuery = true; - @Builder.Default - private boolean exactMatchQuery = true; - @Builder.Default - private boolean prefixMatchQuery = true; - private BoolQueryConfiguration boolQuery; - private Map<String, Object> functionScore; + private String queryRegex; + @Builder.Default private boolean simpleQuery = true; + @Builder.Default private boolean exactMatchQuery = true; + @Builder.Default private boolean prefixMatchQuery = true; + private BoolQueryConfiguration boolQuery; + private Map<String, Object> functionScore; - @JsonPOJOBuilder(withPrefix = "") - public static class QueryConfigurationBuilder { - } + @JsonPOJOBuilder(withPrefix = "") + public static class QueryConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java index 3821cbbed83e8..aa6825360a2df 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java @@ -1,26 +1,19 @@ package com.linkedin.metadata.config.telemetry; import lombok.Data; -/** - * POJO representing the "telemetry" configuration block in application.yml. - */ + +/** POJO representing the "telemetry" configuration block in application.yml. */ @Data public class TelemetryConfiguration { - /** - * Whether cli telemetry is enabled - */ - public boolean enabledCli; - /** - * Whether reporting telemetry is enabled - */ - public boolean enabledIngestion; - /** - * Whether or not third party logging should be enabled for this instance - */ - public boolean enableThirdPartyLogging; + /** Whether cli telemetry is enabled */ + public boolean enabledCli; + + /** Whether reporting telemetry is enabled */ + public boolean enabledIngestion; + + /** Whether or not third party logging should be enabled for this instance */ + public boolean enableThirdPartyLogging; - /** - * Whether or not server telemetry should be enabled - */ - public boolean enabledServer; -} \ No newline at end of file + /** Whether or not server telemetry should be enabled */ + public boolean enabledServer; +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java index c10399c4f3e70..f84ac9ec8bfe3 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java @@ -1,18 +1,14 @@ package com.linkedin.metadata.spring; +import java.io.IOException; +import java.util.Properties; import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; import org.springframework.core.env.PropertiesPropertySource; import org.springframework.core.env.PropertySource; import org.springframework.core.io.support.EncodedResource; import org.springframework.core.io.support.PropertySourceFactory; -import java.io.IOException; -import java.util.Properties; - - -/** - * Required for Spring to parse the application.yml provided by this module - */ +/** Required for Spring to parse the application.yml provided by this module */ public class YamlPropertySourceFactory implements PropertySourceFactory { @Override @@ -25,4 +21,4 @@ public PropertySource<?> createPropertySource(String name, EncodedResource encod return new PropertiesPropertySource(encodedResource.getResource().getFilename(), properties); } -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java index d506cf972c255..576969a1032dd 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class BatchWriteOperationsOptions { private int batchSize; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java index b90257870a8b2..ab6990dcf0603 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java @@ -1,13 +1,13 @@ package com.linkedin.gms.factory.auth; +import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizerChain; +import com.datahub.authorization.AuthorizerContext; import com.datahub.authorization.DataHubAuthorizer; import com.datahub.authorization.DefaultEntitySpecResolver; +import com.datahub.authorization.EntitySpecResolver; import com.datahub.plugins.PluginConstant; -import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; -import com.datahub.authorization.AuthorizerContext; -import com.datahub.authorization.EntitySpecResolver; import com.datahub.plugins.common.PluginConfig; import com.datahub.plugins.common.PluginPermissionManager; import com.datahub.plugins.common.PluginType; @@ -19,8 +19,8 @@ import com.datahub.plugins.loader.PluginPermissionManagerImpl; import com.google.common.collect.ImmutableMap; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; @@ -38,7 +38,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -86,61 +85,75 @@ private EntitySpecResolver initResolver() { private List<Authorizer> initCustomAuthorizers(EntitySpecResolver resolver) { final List<Authorizer> customAuthorizers = new ArrayList<>(); - Path pluginBaseDirectory = Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); - ConfigProvider configProvider = - new ConfigProvider(pluginBaseDirectory); + Path pluginBaseDirectory = + Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); + ConfigProvider configProvider = new ConfigProvider(pluginBaseDirectory); Optional<Config> optionalConfig = configProvider.load(); // Register authorizer plugins if present - optionalConfig.ifPresent((config) -> { - registerAuthorizer(customAuthorizers, resolver, config); - }); + optionalConfig.ifPresent( + (config) -> { + registerAuthorizer(customAuthorizers, resolver, config); + }); return customAuthorizers; } - private void registerAuthorizer(List<Authorizer> customAuthorizers, EntitySpecResolver resolver, Config config) { + private void registerAuthorizer( + List<Authorizer> customAuthorizers, EntitySpecResolver resolver, Config config) { PluginConfigFactory authorizerPluginPluginConfigFactory = new PluginConfigFactory(config); // Load only Authorizer configuration from plugin config factory List<PluginConfig> authorizers = authorizerPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); // Select only enabled authorizer for instantiation - List<PluginConfig> enabledAuthorizers = authorizers.stream().filter(pluginConfig -> { - if (!pluginConfig.getEnabled()) { - log.info(String.format("Authorizer %s is not enabled", pluginConfig.getName())); - } - return pluginConfig.getEnabled(); - }).collect(Collectors.toList()); + List<PluginConfig> enabledAuthorizers = + authorizers.stream() + .filter( + pluginConfig -> { + if (!pluginConfig.getEnabled()) { + log.info(String.format("Authorizer %s is not enabled", pluginConfig.getName())); + } + return pluginConfig.getEnabled(); + }) + .collect(Collectors.toList()); // Get security mode set by user SecurityMode securityMode = - SecurityMode.valueOf(this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); + SecurityMode.valueOf( + this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); // Create permission manager with security mode PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); - // Save ContextClassLoader. As some plugins are directly using context classloader from current thread to load libraries + // Save ContextClassLoader. As some plugins are directly using context classloader from current + // thread to load libraries // This will break plugin as their dependencies are inside plugin directory only ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); // Instantiate Authorizer plugins - enabledAuthorizers.forEach((pluginConfig) -> { - // Create context - AuthorizerContext context = new AuthorizerContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString()), resolver); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, pluginConfig); - try { - Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); - Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); - log.info("Initializing plugin {}", pluginConfig.getName()); - authorizer.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); - customAuthorizers.add(authorizer); - log.info("Plugin {} is initialized", pluginConfig.getName()); - } catch (ClassNotFoundException e) { - log.debug(String.format("Failed to init the plugin", pluginConfig.getName())); - throw new RuntimeException(e); - } finally { - Thread.currentThread().setContextClassLoader(contextClassLoader); - } - }); + enabledAuthorizers.forEach( + (pluginConfig) -> { + // Create context + AuthorizerContext context = + new AuthorizerContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString()), + resolver); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, pluginConfig); + try { + Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); + Authorizer authorizer = + (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); + log.info("Initializing plugin {}", pluginConfig.getName()); + authorizer.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); + customAuthorizers.add(authorizer); + log.info("Plugin {} is initialized", pluginConfig.getName()); + } catch (ClassNotFoundException e) { + log.debug(String.format("Failed to init the plugin", pluginConfig.getName())); + throw new RuntimeException(e); + } finally { + Thread.currentThread().setContextClassLoader(contextClassLoader); + } + }); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java index 363c657453b49..296aab95ae427 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java @@ -34,38 +34,30 @@ import software.amazon.awssdk.regions.Region; /** - * An {@link HttpRequestInterceptor} that signs requests using any AWS {@link Signer} - * and {@link AwsCredentialsProvider}. + * An {@link HttpRequestInterceptor} that signs requests using any AWS {@link Signer} and {@link + * AwsCredentialsProvider}. */ public class AwsRequestSigningApacheInterceptor implements HttpRequestInterceptor { - /** - * The service that we're connecting to. - */ + /** The service that we're connecting to. */ private final String service; - /** - * The particular signer implementation. - */ + /** The particular signer implementation. */ private final Signer signer; - /** - * The source of AWS credentials for signing. - */ + /** The source of AWS credentials for signing. */ private final AwsCredentialsProvider awsCredentialsProvider; - /** - * The region signing region. - */ + /** The region signing region. */ private final Region region; /** - * * @param service service that we're connecting to * @param signer particular signer implementation * @param awsCredentialsProvider source of AWS credentials for signing * @param region signing region */ - public AwsRequestSigningApacheInterceptor(final String service, + public AwsRequestSigningApacheInterceptor( + final String service, final Signer signer, final AwsCredentialsProvider awsCredentialsProvider, final Region region) { @@ -76,22 +68,20 @@ public AwsRequestSigningApacheInterceptor(final String service, } /** - * * @param service service that we're connecting to * @param signer particular signer implementation * @param awsCredentialsProvider source of AWS credentials for signing * @param region signing region */ - public AwsRequestSigningApacheInterceptor(final String service, + public AwsRequestSigningApacheInterceptor( + final String service, final Signer signer, final AwsCredentialsProvider awsCredentialsProvider, final String region) { this(service, signer, awsCredentialsProvider, Region.of(region)); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public void process(final HttpRequest request, final HttpContext context) throws HttpException, IOException { @@ -103,13 +93,13 @@ public void process(final HttpRequest request, final HttpContext context) } // Copy Apache HttpRequest to AWS Request - SdkHttpFullRequest.Builder requestBuilder = SdkHttpFullRequest.builder() - .method(SdkHttpMethod.fromValue(request.getRequestLine().getMethod())) - .uri(buildUri(context, uriBuilder)); + SdkHttpFullRequest.Builder requestBuilder = + SdkHttpFullRequest.builder() + .method(SdkHttpMethod.fromValue(request.getRequestLine().getMethod())) + .uri(buildUri(context, uriBuilder)); if (request instanceof HttpEntityEnclosingRequest) { - HttpEntityEnclosingRequest httpEntityEnclosingRequest = - (HttpEntityEnclosingRequest) request; + HttpEntityEnclosingRequest httpEntityEnclosingRequest = (HttpEntityEnclosingRequest) request; if (httpEntityEnclosingRequest.getEntity() != null) { InputStream content = httpEntityEnclosingRequest.getEntity().getContent(); requestBuilder.contentStreamProvider(() -> content); @@ -119,7 +109,8 @@ public void process(final HttpRequest request, final HttpContext context) requestBuilder.headers(headerArrayToMap(request.getAllHeaders())); ExecutionAttributes attributes = new ExecutionAttributes(); - attributes.putAttribute(AwsSignerExecutionAttribute.AWS_CREDENTIALS, awsCredentialsProvider.resolveCredentials()); + attributes.putAttribute( + AwsSignerExecutionAttribute.AWS_CREDENTIALS, awsCredentialsProvider.resolveCredentials()); attributes.putAttribute(AwsSignerExecutionAttribute.SERVICE_SIGNING_NAME, service); attributes.putAttribute(AwsSignerExecutionAttribute.SIGNING_REGION, region); @@ -129,13 +120,14 @@ public void process(final HttpRequest request, final HttpContext context) // Now copy everything back request.setHeaders(mapToHeaderArray(signedRequest.headers())); if (request instanceof HttpEntityEnclosingRequest) { - HttpEntityEnclosingRequest httpEntityEnclosingRequest = - (HttpEntityEnclosingRequest) request; + HttpEntityEnclosingRequest httpEntityEnclosingRequest = (HttpEntityEnclosingRequest) request; if (httpEntityEnclosingRequest.getEntity() != null) { BasicHttpEntity basicHttpEntity = new BasicHttpEntity(); - basicHttpEntity.setContent(signedRequest.contentStreamProvider() - .orElseThrow(() -> new IllegalStateException("There must be content")) - .newStream()); + basicHttpEntity.setContent( + signedRequest + .contentStreamProvider() + .orElseThrow(() -> new IllegalStateException("There must be content")) + .newStream()); // wrap into repeatable entity to support retries httpEntityEnclosingRequest.setEntity(new BufferedHttpEntity(basicHttpEntity)); } @@ -159,15 +151,13 @@ private URI buildUri(final HttpContext context, URIBuilder uriBuilder) throws IO } /** - * * @param params list of HTTP query params as NameValuePairs * @return a multimap of HTTP query params */ private static Map<String, List<String>> nvpToMapParams(final List<NameValuePair> params) { Map<String, List<String>> parameterMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (NameValuePair nvp : params) { - List<String> argsList = - parameterMap.computeIfAbsent(nvp.getName(), k -> new ArrayList<>()); + List<String> argsList = parameterMap.computeIfAbsent(nvp.getName(), k -> new ArrayList<>()); argsList.add(nvp.getValue()); } return parameterMap; @@ -181,9 +171,10 @@ private static Map<String, List<String>> headerArrayToMap(final Header[] headers Map<String, List<String>> headersMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (Header header : headers) { if (!skipHeader(header)) { - headersMap.put(header.getName(), headersMap - .getOrDefault(header.getName(), - new LinkedList<>(Collections.singletonList(header.getValue())))); + headersMap.put( + header.getName(), + headersMap.getOrDefault( + header.getName(), new LinkedList<>(Collections.singletonList(header.getValue())))); } } return headersMap; @@ -195,7 +186,7 @@ private static Map<String, List<String>> headerArrayToMap(final Header[] headers */ private static boolean skipHeader(final Header header) { return ("content-length".equalsIgnoreCase(header.getName()) - && "0".equals(header.getValue())) // Strip Content-Length: 0 + && "0".equals(header.getValue())) // Strip Content-Length: 0 || "host".equalsIgnoreCase(header.getName()); // Host comes from endpoint } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java index 663234e2519fa..3b23243f76742 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java @@ -2,8 +2,8 @@ import com.datahub.authentication.Authentication; import com.datahub.authorization.DataHubAuthorizer; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({RestliEntityClientFactory.class}) @@ -43,10 +42,17 @@ public class DataHubAuthorizerFactory { @Nonnull protected DataHubAuthorizer getInstance() { - final DataHubAuthorizer.AuthorizationMode mode = policiesEnabled ? DataHubAuthorizer.AuthorizationMode.DEFAULT - : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; - - return new DataHubAuthorizer(systemAuthentication, entityClient, 10, - policyCacheRefreshIntervalSeconds, mode, policyCacheFetchSize); + final DataHubAuthorizer.AuthorizationMode mode = + policiesEnabled + ? DataHubAuthorizer.AuthorizationMode.DEFAULT + : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; + + return new DataHubAuthorizer( + systemAuthentication, + entityClient, + 10, + policyCacheRefreshIntervalSeconds, + mode, + policyCacheFetchSize); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java index d47e1a0a73401..83544e4165ae3 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java @@ -2,9 +2,8 @@ import com.datahub.authentication.token.StatefulTokenService; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; - import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -29,12 +28,7 @@ public class DataHubTokenServiceFactory { @Value("${authentication.tokenService.issuer:datahub-metadata-service}") private String issuer; - /** - * + @Inject - * + @Named("entityService") - * + private EntityService _entityService; - * + - */ + /** + @Inject + @Named("entityService") + private EntityService _entityService; + */ @Autowired @Qualifier("entityService") private EntityService _entityService; @@ -44,11 +38,6 @@ public class DataHubTokenServiceFactory { @Nonnull protected StatefulTokenService getInstance() { return new StatefulTokenService( - this.signingKey, - this.signingAlgorithm, - this.issuer, - this._entityService, - this.saltingKey - ); + this.signingKey, this.signingAlgorithm, this.issuer, this._entityService, this.saltingKey); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java index 57598abf8095d..7c6c4384d7343 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java @@ -1,12 +1,10 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authentication.group.GroupService; import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +13,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class GroupServiceFactory { @@ -37,4 +34,4 @@ public class GroupServiceFactory { protected GroupService getInstance() throws Exception { return new GroupService(this._javaEntityClient, this._entityService, this._graphClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java index 105f4c677a9e4..c44eada46794d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.invite.InviteTokenService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class InviteTokenServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java index a0df661852935..844f3a094b6b7 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java @@ -1,13 +1,11 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authentication.user.NativeUserService; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -16,7 +14,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class NativeUserServiceFactory { @@ -32,14 +29,16 @@ public class NativeUserServiceFactory { @Qualifier("dataHubSecretService") private SecretService _secretService; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Bean(name = "nativeUserService") @Scope("singleton") @Nonnull protected NativeUserService getInstance() throws Exception { - return new NativeUserService(_entityService, _javaEntityClient, _secretService, + return new NativeUserService( + _entityService, + _javaEntityClient, + _secretService, _configurationProvider.getAuthentication()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java index cc6f5c8272f9d..a6ae703576a3e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.post.PostService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -11,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class PostServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java index 8a85f63cdd66d..7696d5201493a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java @@ -1,10 +1,8 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authorization.role.RoleService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -13,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RoleServiceFactory { @@ -28,4 +25,4 @@ public class RoleServiceFactory { protected RoleService getInstance() throws Exception { return new RoleService(this._javaEntityClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java index 5bdd8cbf83c65..52d13b05a654d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java @@ -13,10 +13,9 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - /** - * Factory responsible for instantiating an instance of {@link Authentication} used to authenticate requests - * made by the internal system. + * Factory responsible for instantiating an instance of {@link Authentication} used to authenticate + * requests made by the internal system. */ @Configuration @ConfigurationProperties @@ -37,7 +36,6 @@ protected Authentication getInstance() { // TODO: Change to service final Actor systemActor = new Actor(ActorType.USER, this.systemClientId); return new Authentication( - systemActor, String.format("Basic %s:%s", this.systemClientId, this.systemSecret) - ); + systemActor, String.format("Basic %s:%s", this.systemClientId, this.systemSecret)); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java index 820b272bedb67..44f1669546e33 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class CacheConfig { @@ -50,25 +49,30 @@ private Caffeine<Object, Object> caffeineCacheBuilder() { @ConditionalOnProperty(name = "searchService.cacheImplementation", havingValue = "hazelcast") public CacheManager hazelcastCacheManager() { Config config = new Config(); - // TODO: This setting is equivalent to expireAfterAccess, refreshes timer after a get, put, containsKey etc. + // TODO: This setting is equivalent to expireAfterAccess, refreshes timer after a get, put, + // containsKey etc. // is this behavior what we actually desire? Should we change it now? MapConfig mapConfig = new MapConfig().setMaxIdleSeconds(cacheTtlSeconds); - EvictionConfig evictionConfig = new EvictionConfig() - .setMaxSizePolicy(MaxSizePolicy.PER_NODE) - .setSize(cacheMaxSize) - .setEvictionPolicy(EvictionPolicy.LFU); + EvictionConfig evictionConfig = + new EvictionConfig() + .setMaxSizePolicy(MaxSizePolicy.PER_NODE) + .setSize(cacheMaxSize) + .setEvictionPolicy(EvictionPolicy.LFU); mapConfig.setEvictionConfig(evictionConfig); mapConfig.setName("default"); config.addMapConfig(mapConfig); config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false); - config.getNetworkConfig().getJoin().getKubernetesConfig().setEnabled(true) + config + .getNetworkConfig() + .getJoin() + .getKubernetesConfig() + .setEnabled(true) .setProperty("service-dns", hazelcastServiceName); - HazelcastInstance hazelcastInstance = Hazelcast.newHazelcastInstance(config); return new HazelcastCacheManager(hazelcastInstance); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java index d80d57799ee4d..ddfce627b56cd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java @@ -4,7 +4,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class DatasetUrnNameCasingFactory { @Nonnull @@ -13,4 +12,4 @@ protected Boolean getInstance() { String datasetUrnNameCasingEnv = System.getenv("DATAHUB_DATASET_URN_TO_LOWER"); return Boolean.parseBoolean(datasetUrnNameCasingEnv); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java index 51c7db5e37366..aa47a35f3d38a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java @@ -3,12 +3,12 @@ import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; -import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.graph.elastic.ESGraphQueryDAO; import com.linkedin.metadata.graph.elastic.ESGraphWriteDAO; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.models.registry.LineageRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class, EntityRegistryFactory.class}) @@ -30,17 +29,25 @@ public class ElasticSearchGraphServiceFactory { @Qualifier("entityRegistry") private EntityRegistry entityRegistry; - @Autowired - private ConfigurationProvider configurationProvider; + @Autowired private ConfigurationProvider configurationProvider; @Bean(name = "elasticSearchGraphService") @Nonnull protected ElasticSearchGraphService getInstance() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - return new ElasticSearchGraphService(lineageRegistry, components.getBulkProcessor(), components.getIndexConvention(), - new ESGraphWriteDAO(components.getIndexConvention(), components.getBulkProcessor(), components.getNumRetries()), - new ESGraphQueryDAO(components.getSearchClient(), lineageRegistry, components.getIndexConvention(), - configurationProvider.getElasticSearch().getSearch().getGraph()), + return new ElasticSearchGraphService( + lineageRegistry, + components.getBulkProcessor(), + components.getIndexConvention(), + new ESGraphWriteDAO( + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries()), + new ESGraphQueryDAO( + components.getSearchClient(), + lineageRegistry, + components.getIndexConvention(), + configurationProvider.getElasticSearch().getSearch().getGraph()), components.getIndexBuilder()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java index 504618ba9cc6a..20c3e92767ce4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class}) @@ -24,8 +23,14 @@ public class ElasticSearchSystemMetadataServiceFactory { @Bean(name = "elasticSearchSystemMetadataService") @Nonnull protected ElasticSearchSystemMetadataService getInstance() { - return new ElasticSearchSystemMetadataService(components.getBulkProcessor(), components.getIndexConvention(), - new ESSystemMetadataDAO(components.getSearchClient(), components.getIndexConvention(), - components.getBulkProcessor(), components.getNumRetries()), components.getIndexBuilder()); + return new ElasticSearchSystemMetadataService( + components.getBulkProcessor(), + components.getIndexConvention(), + new ESSystemMetadataDAO( + components.getSearchClient(), + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries()), + components.getIndexBuilder()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java index 0dce80b98964b..483251644b6c0 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java @@ -1,103 +1,117 @@ package com.linkedin.gms.factory.common; import com.linkedin.metadata.spring.YamlPropertySourceFactory; -import org.apache.http.ssl.SSLContextBuilder; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; +import org.apache.http.ssl.SSLContextBuilder; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ElasticsearchSSLContextFactory { - @Value("${elasticsearch.sslContext.protocol}") - private String sslProtocol; - - @Value("${elasticsearch.sslContext.secureRandomImplementation}") - private String sslSecureRandomImplementation; + @Value("${elasticsearch.sslContext.protocol}") + private String sslProtocol; - @Value("${elasticsearch.sslContext.trustStoreFile}") - private String sslTrustStoreFile; + @Value("${elasticsearch.sslContext.secureRandomImplementation}") + private String sslSecureRandomImplementation; - @Value("${elasticsearch.sslContext.trustStoreType}") - private String sslTrustStoreType; + @Value("${elasticsearch.sslContext.trustStoreFile}") + private String sslTrustStoreFile; - @Value("${elasticsearch.sslContext.trustStorePassword}") - private String sslTrustStorePassword; + @Value("${elasticsearch.sslContext.trustStoreType}") + private String sslTrustStoreType; - @Value("${elasticsearch.sslContext.keyStoreFile}") - private String sslKeyStoreFile; + @Value("${elasticsearch.sslContext.trustStorePassword}") + private String sslTrustStorePassword; - @Value("${elasticsearch.sslContext.keyStoreType}") - private String sslKeyStoreType; + @Value("${elasticsearch.sslContext.keyStoreFile}") + private String sslKeyStoreFile; - @Value("${elasticsearch.sslContext.keyStorePassword}") - private String sslKeyStorePassword; + @Value("${elasticsearch.sslContext.keyStoreType}") + private String sslKeyStoreType; - @Value("${elasticsearch.sslContext.keyPassword}") - private String sslKeyPassword; + @Value("${elasticsearch.sslContext.keyStorePassword}") + private String sslKeyStorePassword; - @Bean(name = "elasticSearchSSLContext") - public SSLContext createInstance() { - final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); - if (sslProtocol != null) { - sslContextBuilder.useProtocol(sslProtocol); - } + @Value("${elasticsearch.sslContext.keyPassword}") + private String sslKeyPassword; - if (sslTrustStoreFile != null && sslTrustStoreType != null && sslTrustStorePassword != null) { - loadTrustStore(sslContextBuilder, sslTrustStoreFile, sslTrustStoreType, sslTrustStorePassword); - } - - if (sslKeyStoreFile != null && sslKeyStoreType != null && sslKeyStorePassword != null && sslKeyPassword != null) { - loadKeyStore(sslContextBuilder, sslKeyStoreFile, sslKeyStoreType, sslKeyStorePassword, sslKeyPassword); - } - - final SSLContext sslContext; - try { - if (sslSecureRandomImplementation != null) { - sslContextBuilder.setSecureRandom(SecureRandom.getInstance(sslSecureRandomImplementation)); - } - sslContext = sslContextBuilder.build(); - } catch (NoSuchAlgorithmException | KeyManagementException e) { - throw new RuntimeException("Failed to build SSL Context", e); - } - return sslContext; + @Bean(name = "elasticSearchSSLContext") + public SSLContext createInstance() { + final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); + if (sslProtocol != null) { + sslContextBuilder.useProtocol(sslProtocol); } - private void loadKeyStore(@Nonnull SSLContextBuilder sslContextBuilder, @Nonnull String path, - @Nonnull String type, @Nonnull String password, @Nonnull String keyPassword) { - try (InputStream identityFile = new FileInputStream(path)) { - final KeyStore keystore = KeyStore.getInstance(type); - keystore.load(identityFile, password.toCharArray()); - sslContextBuilder.loadKeyMaterial(keystore, keyPassword.toCharArray()); - } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException | UnrecoverableKeyException e) { - throw new RuntimeException("Failed to load key store: " + path, e); - } + if (sslTrustStoreFile != null && sslTrustStoreType != null && sslTrustStorePassword != null) { + loadTrustStore( + sslContextBuilder, sslTrustStoreFile, sslTrustStoreType, sslTrustStorePassword); } - private void loadTrustStore(@Nonnull SSLContextBuilder sslContextBuilder, @Nonnull String path, - @Nonnull String type, @Nonnull String password) { - try (InputStream identityFile = new FileInputStream(path)) { - final KeyStore keystore = KeyStore.getInstance(type); - keystore.load(identityFile, password.toCharArray()); - sslContextBuilder.loadTrustMaterial(keystore, null); - } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException e) { - throw new RuntimeException("Failed to load key store: " + path, e); - } + if (sslKeyStoreFile != null + && sslKeyStoreType != null + && sslKeyStorePassword != null + && sslKeyPassword != null) { + loadKeyStore( + sslContextBuilder, sslKeyStoreFile, sslKeyStoreType, sslKeyStorePassword, sslKeyPassword); } + final SSLContext sslContext; + try { + if (sslSecureRandomImplementation != null) { + sslContextBuilder.setSecureRandom(SecureRandom.getInstance(sslSecureRandomImplementation)); + } + sslContext = sslContextBuilder.build(); + } catch (NoSuchAlgorithmException | KeyManagementException e) { + throw new RuntimeException("Failed to build SSL Context", e); + } + return sslContext; + } + + private void loadKeyStore( + @Nonnull SSLContextBuilder sslContextBuilder, + @Nonnull String path, + @Nonnull String type, + @Nonnull String password, + @Nonnull String keyPassword) { + try (InputStream identityFile = new FileInputStream(path)) { + final KeyStore keystore = KeyStore.getInstance(type); + keystore.load(identityFile, password.toCharArray()); + sslContextBuilder.loadKeyMaterial(keystore, keyPassword.toCharArray()); + } catch (IOException + | CertificateException + | NoSuchAlgorithmException + | KeyStoreException + | UnrecoverableKeyException e) { + throw new RuntimeException("Failed to load key store: " + path, e); + } + } + + private void loadTrustStore( + @Nonnull SSLContextBuilder sslContextBuilder, + @Nonnull String path, + @Nonnull String type, + @Nonnull String password) { + try (InputStream identityFile = new FileInputStream(path)) { + final KeyStore keystore = KeyStore.getInstance(type); + keystore.load(identityFile, password.toCharArray()); + sslContextBuilder.loadTrustMaterial(keystore, null); + } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException e) { + throw new RuntimeException("Failed to load key store: " + path, e); + } + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java index ba66b678d82b9..63a2e42de1d1a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource("classpath:git.properties") public class GitVersionFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java index 1e37c735b5bd4..db4928cfe3764 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({GraphServiceFactory.class}) public class GraphClientFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java index 94593eb1fb84c..d98dfcb617f84 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.common; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; +import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({Neo4jGraphServiceFactory.class, ElasticSearchGraphServiceFactory.class}) @@ -42,7 +41,8 @@ protected GraphService createInstance() { return _elasticSearchGraphService; } else { throw new RuntimeException( - "Error: Failed to initialize graph service. Graph Service provided: " + graphServiceImpl + "Error: Failed to initialize graph service. Graph Service provided: " + + graphServiceImpl + ". Valid options: [neo4j, elasticsearch]."); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java index ada8466d302e6..b268bb0937035 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java @@ -8,7 +8,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - /** * Creates a {@link IndexConvention} to generate search index names. * diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java index 9805d554d5941..62d4beddd1ab1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java @@ -1,10 +1,8 @@ package com.linkedin.gms.factory.common; -import lombok.extern.slf4j.Slf4j; - import java.util.HashMap; import java.util.Map; - +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -36,14 +34,16 @@ public class LocalCassandraSessionConfigFactory { @Bean(name = "gmsCassandraServiceConfig") protected Map<String, String> createInstance() { - return new HashMap<String, String>() {{ - put("username", datasourceUsername); - put("password", datasourcePassword); - put("hosts", hosts); - put("port", port); - put("datacenter", datacenter); - put("keyspace", keyspace); - put("useSsl", useSsl); - }}; + return new HashMap<String, String>() { + { + put("username", datasourceUsername); + put("password", datasourcePassword); + put("hosts", hosts); + put("port", port); + put("datacenter", datacenter); + put("keyspace", keyspace); + put("useSsl", useSsl); + } + }; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java index 6bf8ff123b221..08787cdb89aba 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java @@ -15,7 +15,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -93,7 +92,8 @@ public DataSourceConfig buildDataSourceConfig(@Value("${ebean.url}") String data } @Bean(name = "gmsEbeanServiceConfig") - protected ServerConfig createInstance(@Qualifier("ebeanDataSourceConfig") DataSourceConfig config) { + protected ServerConfig createInstance( + @Qualifier("ebeanDataSourceConfig") DataSourceConfig config) { ServerConfig serverConfig = new ServerConfig(); serverConfig.setName("gmsEbeanServiceConfig"); serverConfig.setDataSourceConfig(config); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java index 65b6115d6638e..04ed29407518d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java @@ -2,7 +2,6 @@ import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.util.concurrent.TimeUnit; - import org.neo4j.driver.AuthTokens; import org.neo4j.driver.Config; import org.neo4j.driver.Driver; @@ -12,7 +11,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class Neo4jDriverFactory { @@ -49,10 +47,12 @@ protected Driver createInstance() { Config.ConfigBuilder builder = Config.builder(); builder.withMaxConnectionPoolSize(neo4jMaxConnectionPoolSize); - builder.withConnectionAcquisitionTimeout(neo4jMaxConnectionAcquisitionTimeout, TimeUnit.SECONDS); + builder.withConnectionAcquisitionTimeout( + neo4jMaxConnectionAcquisitionTimeout, TimeUnit.SECONDS); builder.withMaxConnectionLifetime(neo4jMaxConnectionLifetime(), TimeUnit.SECONDS); builder.withMaxTransactionRetryTime(neo4jMaxTransactionRetryTime, TimeUnit.SECONDS); - builder.withConnectionLivenessCheckTimeout(neo4jConnectionLivenessCheckTimeout, TimeUnit.SECONDS); + builder.withConnectionLivenessCheckTimeout( + neo4jConnectionLivenessCheckTimeout, TimeUnit.SECONDS); return GraphDatabase.driver(uri, AuthTokens.basic(username, password), builder.build()); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java index 87670ce10f481..d3b0cd8aa6d92 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.common; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.models.registry.LineageRegistry; import javax.annotation.Nonnull; import org.neo4j.driver.Driver; import org.neo4j.driver.SessionConfig; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({Neo4jDriverFactory.class, EntityRegistryFactory.class}) public class Neo4jGraphServiceFactory { @@ -33,6 +32,7 @@ public class Neo4jGraphServiceFactory { @Nonnull protected Neo4jGraphService getInstance() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - return new Neo4jGraphService(lineageRegistry, neo4jDriver, SessionConfig.forDatabase(neo4jDatabase)); + return new Neo4jGraphService( + lineageRegistry, neo4jDriver, SessionConfig.forDatabase(neo4jDatabase)); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java index 3c40b30bfc7d1..ddd31f2692934 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java @@ -10,10 +10,15 @@ import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpHost; import org.apache.http.HttpRequestInterceptor; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.ssl.DefaultHostnameVerifier; import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.util.PublicSuffixMatcherLoader; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; @@ -33,11 +38,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; -import org.apache.http.auth.AuthScope; import org.springframework.context.annotation.PropertySource; import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; import software.amazon.awssdk.auth.signer.Aws4Signer; @@ -45,7 +45,7 @@ @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@Import({ ElasticsearchSSLContextFactory.class }) +@Import({ElasticsearchSSLContextFactory.class}) public class RestHighLevelClientFactory { @Value("${elasticsearch.host}") @@ -93,21 +93,26 @@ public RestHighLevelClient createInstance(RestClientBuilder restClientBuilder) { public RestClientBuilder loadRestClient() { final RestClientBuilder builder = createBuilder(useSSL ? "https" : "http"); - builder.setHttpClientConfigCallback(httpAsyncClientBuilder -> { - if (useSSL) { - httpAsyncClientBuilder.setSSLContext(sslContext).setSSLHostnameVerifier(new NoopHostnameVerifier()); - } - try { - httpAsyncClientBuilder.setConnectionManager(createConnectionManager()); - } catch (IOReactorException e) { - throw new IllegalStateException("Unable to start ElasticSearch client. Please verify connection configuration."); - } - httpAsyncClientBuilder.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(threadCount).build()); - - setCredentials(httpAsyncClientBuilder); - - return httpAsyncClientBuilder; - }); + builder.setHttpClientConfigCallback( + httpAsyncClientBuilder -> { + if (useSSL) { + httpAsyncClientBuilder + .setSSLContext(sslContext) + .setSSLHostnameVerifier(new NoopHostnameVerifier()); + } + try { + httpAsyncClientBuilder.setConnectionManager(createConnectionManager()); + } catch (IOReactorException e) { + throw new IllegalStateException( + "Unable to start ElasticSearch client. Please verify connection configuration."); + } + httpAsyncClientBuilder.setDefaultIOReactorConfig( + IOReactorConfig.custom().setIoThreadCount(threadCount).build()); + + setCredentials(httpAsyncClientBuilder); + + return httpAsyncClientBuilder; + }); return builder; } @@ -121,41 +126,47 @@ private RestClientBuilder createBuilder(String scheme) { } builder.setRequestConfigCallback( - requestConfigBuilder -> requestConfigBuilder.setConnectionRequestTimeout(connectionRequestTimeout)); + requestConfigBuilder -> + requestConfigBuilder.setConnectionRequestTimeout(connectionRequestTimeout)); return builder; } /** - * Needed to override ExceptionHandler behavior for cases where IO error would have put client in unrecoverable state - * We don't utilize system properties in the client builder, so setting defaults pulled from - * {@link HttpAsyncClientBuilder#build()}. + * Needed to override ExceptionHandler behavior for cases where IO error would have put client in + * unrecoverable state We don't utilize system properties in the client builder, so setting + * defaults pulled from {@link HttpAsyncClientBuilder#build()}. + * * @return */ private NHttpClientConnectionManager createConnectionManager() throws IOReactorException { SSLContext sslContext = SSLContexts.createDefault(); - HostnameVerifier hostnameVerifier = new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()); + HostnameVerifier hostnameVerifier = + new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()); SchemeIOSessionStrategy sslStrategy = new SSLIOSessionStrategy(sslContext, null, null, hostnameVerifier); - IOReactorConfig ioReactorConfig = IOReactorConfig.custom().setIoThreadCount(threadCount).build(); + IOReactorConfig ioReactorConfig = + IOReactorConfig.custom().setIoThreadCount(threadCount).build(); DefaultConnectingIOReactor ioReactor = new DefaultConnectingIOReactor(ioReactorConfig); - IOReactorExceptionHandler ioReactorExceptionHandler = new IOReactorExceptionHandler() { - @Override - public boolean handle(IOException ex) { - log.error("IO Exception caught during ElasticSearch connection.", ex); - return true; - } - - @Override - public boolean handle(RuntimeException ex) { - log.error("Runtime Exception caught during ElasticSearch connection.", ex); - return true; - } - }; + IOReactorExceptionHandler ioReactorExceptionHandler = + new IOReactorExceptionHandler() { + @Override + public boolean handle(IOException ex) { + log.error("IO Exception caught during ElasticSearch connection.", ex); + return true; + } + + @Override + public boolean handle(RuntimeException ex) { + log.error("Runtime Exception caught during ElasticSearch connection.", ex); + return true; + } + }; ioReactor.setExceptionHandler(ioReactorExceptionHandler); - return new PoolingNHttpClientConnectionManager(ioReactor, + return new PoolingNHttpClientConnectionManager( + ioReactor, RegistryBuilder.<SchemeIOSessionStrategy>create() .register("http", NoopIOSessionStrategy.INSTANCE) .register("https", sslStrategy) @@ -165,7 +176,8 @@ public boolean handle(RuntimeException ex) { private void setCredentials(HttpAsyncClientBuilder httpAsyncClientBuilder) { if (username != null && password != null) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password)); + credentialsProvider.setCredentials( + AuthScope.ANY, new UsernamePasswordCredentials(username, password)); httpAsyncClientBuilder.setDefaultCredentialsProvider(credentialsProvider); } if (opensearchUseAwsIamAuth) { @@ -177,11 +189,12 @@ private void setCredentials(HttpAsyncClientBuilder httpAsyncClientBuilder) { private HttpRequestInterceptor getAwsRequestSigningInterceptor(String region) { if (region == null) { - throw new IllegalArgumentException("Region must not be null when opensearchUseAwsIamAuth is enabled"); + throw new IllegalArgumentException( + "Region must not be null when opensearchUseAwsIamAuth is enabled"); } Aws4Signer signer = Aws4Signer.create(); // Uses default AWS credentials - return new AwsRequestSigningApacheInterceptor("es", signer, - DefaultCredentialsProvider.create(), region); + return new AwsRequestSigningApacheInterceptor( + "es", signer, DefaultCredentialsProvider.create(), region); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java index 3ba6965577204..5663162186b83 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({GraphServiceFactory.class, EntityServiceFactory.class}) public class SiblingGraphServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java index 241c93f438bf1..1c17e433d5507 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java @@ -11,7 +11,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchSystemMetadataServiceFactory.class}) public class SystemMetadataServiceFactory { @@ -24,6 +23,6 @@ public class SystemMetadataServiceFactory { @Bean(name = "systemMetadataService") @Primary protected SystemMetadataService createInstance() { - return _elasticSearchSystemMetadataService; + return _elasticSearchSystemMetadataService; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java index c7df8b1cde6ec..fac0bf0c46685 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - /** * Creates a {@link TopicConvention} to generate kafka metadata event topic names. * @@ -32,10 +31,14 @@ public class TopicConventionFactory { @Value("${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}") private String metadataChangeLogVersionedTopicName; - @Value("${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}") + @Value( + "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}") private String metadataChangeLogTimeseriesTopicName; - @Value("${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_PROPOSAL + "}") + @Value( + "${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_PROPOSAL + + "}") private String failedMetadataChangeProposalName; @Value("${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}") @@ -46,10 +49,17 @@ public class TopicConventionFactory { @Bean(name = TOPIC_CONVENTION_BEAN) protected TopicConvention createInstance() { - return new TopicConventionImpl(metadataChangeEventName, metadataAuditEventName, failedMetadataChangeEventName, - metadataChangeProposalName, metadataChangeLogVersionedTopicName, metadataChangeLogTimeseriesTopicName, - failedMetadataChangeProposalName, platformEventTopicName, + return new TopicConventionImpl( + metadataChangeEventName, + metadataAuditEventName, + failedMetadataChangeEventName, + metadataChangeProposalName, + metadataChangeLogVersionedTopicName, + metadataChangeLogTimeseriesTopicName, + failedMetadataChangeProposalName, + platformEventTopicName, // TODO once we start rolling out v5 add support for changing the new event names. - TopicConventionImpl.DEFAULT_EVENT_PATTERN, dataHubUpgradeHistoryTopicName); + TopicConventionImpl.DEFAULT_EVENT_PATTERN, + dataHubUpgradeHistoryTopicName); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java index 465480be344c7..5c7c2370ab337 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java @@ -2,6 +2,7 @@ import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authorization.AuthorizationConfiguration; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.metadata.config.DataHubConfiguration; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.config.SystemUpdateConfiguration; @@ -11,76 +12,57 @@ import com.linkedin.metadata.config.cache.CacheConfiguration; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; -import com.linkedin.datahub.graphql.featureflags.FeatureFlags; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.config.telemetry.TelemetryConfiguration; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @ConfigurationProperties @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Data public class ConfigurationProvider { - /** - * Authentication related configs - */ + /** Authentication related configs */ private AuthenticationConfiguration authentication; - /** - * Authorizer related configs - */ + + /** Authorizer related configs */ private AuthorizationConfiguration authorization; - /** - * Ingestion related configs - */ + + /** Ingestion related configs */ private IngestionConfiguration ingestion; - /** - * Telemetry related configs - */ + + /** Telemetry related configs */ private TelemetryConfiguration telemetry; - /** - * Viz related configs - */ + + /** Viz related configs */ private VisualConfiguration visualConfig; - /** - * Tests related configs - */ + + /** Tests related configs */ private TestsConfiguration metadataTests; - /** - * DataHub top-level server configurations - */ + + /** DataHub top-level server configurations */ private DataHubConfiguration datahub; - /** - * Views feature related configs - */ + + /** Views feature related configs */ private ViewsConfiguration views; - /** - * Feature flags indicating what is turned on vs turned off - */ + + /** Feature flags indicating what is turned on vs turned off */ private FeatureFlags featureFlags; - /** - * Kafka related configs. - */ + + /** Kafka related configs. */ private KafkaConfiguration kafka; - /** - * ElasticSearch configurations - */ + + /** ElasticSearch configurations */ private ElasticSearchConfiguration elasticSearch; - /** - * System Update configurations - */ + + /** System Update configurations */ private SystemUpdateConfiguration systemUpdate; - /** - * Configuration for caching - */ + /** Configuration for caching */ private CacheConfiguration cache; - /** - * Configuration for the health check server - */ + /** Configuration for the health check server */ private HealthCheckConfiguration healthCheck; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java index 6eadf06288d29..23b7ec9edd306 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class HealthCheckConfiguration { private int cacheDurationSeconds; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java index 6eab711603c52..739211855cacd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java @@ -1,9 +1,10 @@ package com.linkedin.gms.factory.dataproduct; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; @@ -11,8 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; -import javax.annotation.Nonnull; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class DataProductServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java index ae20f7e96ba40..326537ee07cbd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java @@ -2,6 +2,13 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.CqlSessionBuilder; +import java.net.InetSocketAddress; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -9,14 +16,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; -import java.net.InetSocketAddress; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - @Configuration public class CassandraSessionFactory { @@ -30,20 +29,22 @@ public class CassandraSessionFactory { @Nonnull protected CqlSession createSession() { int port = Integer.parseInt(sessionConfig.get("port")); - List<InetSocketAddress> addresses = Arrays.stream(sessionConfig.get("hosts").split(",")) - .map(host -> new InetSocketAddress(host, port)) - .collect(Collectors.toList()); + List<InetSocketAddress> addresses = + Arrays.stream(sessionConfig.get("hosts").split(",")) + .map(host -> new InetSocketAddress(host, port)) + .collect(Collectors.toList()); String dc = sessionConfig.get("datacenter"); String ks = sessionConfig.get("keyspace"); String username = sessionConfig.get("username"); String password = sessionConfig.get("password"); - CqlSessionBuilder csb = CqlSession.builder() - .addContactPoints(addresses) - .withLocalDatacenter(dc) - .withKeyspace(ks) - .withAuthCredentials(username, password); + CqlSessionBuilder csb = + CqlSession.builder() + .addContactPoints(addresses) + .withLocalDatacenter(dc) + .withKeyspace(ks) + .withAuthCredentials(username, password); if (sessionConfig.containsKey("useSsl") && sessionConfig.get("useSsl").equals("true")) { try { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java index 9feb7e469d018..2bfe7bff1b45a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java @@ -12,14 +12,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration @Slf4j public class EbeanServerFactory { public static final String EBEAN_MODEL_PACKAGE = EbeanAspectV2.class.getPackage().getName(); - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "ebeanServer") @DependsOn({"gmsEbeanServiceConfig"}) diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java index 925689c8609db..94aebb2a39efa 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java @@ -5,13 +5,12 @@ import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - @Configuration public class EntityAspectDaoFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java index 4000f7d6ed058..9123714de5bc8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java @@ -5,13 +5,12 @@ import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - @Configuration public class EntityAspectMigrationsDaoFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java index f1c1a7b743714..e75ec0c0dc44a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java @@ -11,6 +11,7 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.mxe.TopicConvention; +import javax.annotation.Nonnull; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.Producer; import org.springframework.beans.factory.annotation.Qualifier; @@ -19,9 +20,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - - @Configuration public class EntityServiceFactory { @@ -29,22 +27,35 @@ public class EntityServiceFactory { private Integer _ebeanMaxTransactionRetry; @Bean(name = "entityService") - @DependsOn({"entityAspectDao", "kafkaEventProducer", "kafkaHealthChecker", - TopicConventionFactory.TOPIC_CONVENTION_BEAN, "entityRegistry"}) + @DependsOn({ + "entityAspectDao", + "kafkaEventProducer", + "kafkaHealthChecker", + TopicConventionFactory.TOPIC_CONVENTION_BEAN, + "entityRegistry" + }) @Nonnull protected EntityService createInstance( - Producer<String, ? extends IndexedRecord> producer, - TopicConvention convention, - KafkaHealthChecker kafkaHealthChecker, - @Qualifier("entityAspectDao") AspectDao aspectDao, - EntityRegistry entityRegistry, - ConfigurationProvider configurationProvider, - UpdateIndicesService updateIndicesService) { - - final KafkaEventProducer eventProducer = new KafkaEventProducer(producer, convention, kafkaHealthChecker); + Producer<String, ? extends IndexedRecord> producer, + TopicConvention convention, + KafkaHealthChecker kafkaHealthChecker, + @Qualifier("entityAspectDao") AspectDao aspectDao, + EntityRegistry entityRegistry, + ConfigurationProvider configurationProvider, + UpdateIndicesService updateIndicesService) { + + final KafkaEventProducer eventProducer = + new KafkaEventProducer(producer, convention, kafkaHealthChecker); FeatureFlags featureFlags = configurationProvider.getFeatureFlags(); - EntityService entityService = new EntityServiceImpl(aspectDao, eventProducer, entityRegistry, - featureFlags.isAlwaysEmitChangeLog(), updateIndicesService, featureFlags.getPreProcessHooks(), _ebeanMaxTransactionRetry); + EntityService entityService = + new EntityServiceImpl( + aspectDao, + eventProducer, + entityRegistry, + featureFlags.isAlwaysEmitChangeLog(), + updateIndicesService, + featureFlags.getPreProcessHooks(), + _ebeanMaxTransactionRetry); return entityService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java index 3f2388f4829e3..080845147766f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java @@ -1,10 +1,10 @@ package com.linkedin.gms.factory.entity; import com.datahub.authentication.Authentication; -import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.entity.client.RestliEntityClient; +import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.client.SystemJavaEntityClient; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; @@ -21,7 +21,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @ConditionalOnExpression("'${entityClient.preferredImpl:java}'.equals('java')") @Import({DataHubKafkaProducerFactory.class}) @@ -60,7 +59,8 @@ public class JavaEntityClientFactory { private EventProducer _eventProducer; @Bean("javaEntityClient") - public JavaEntityClient getJavaEntityClient(@Qualifier("restliEntityClient") final RestliEntityClient restliEntityClient) { + public JavaEntityClient getJavaEntityClient( + @Qualifier("restliEntityClient") final RestliEntityClient restliEntityClient) { return new JavaEntityClient( _entityService, _deleteEntityService, @@ -74,10 +74,12 @@ public JavaEntityClient getJavaEntityClient(@Qualifier("restliEntityClient") fin } @Bean("systemJavaEntityClient") - public SystemJavaEntityClient systemJavaEntityClient(@Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication, - @Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) { - SystemJavaEntityClient systemJavaEntityClient = new SystemJavaEntityClient( + public SystemJavaEntityClient systemJavaEntityClient( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Qualifier("systemAuthentication") final Authentication systemAuthentication, + @Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) { + SystemJavaEntityClient systemJavaEntityClient = + new SystemJavaEntityClient( _entityService, _deleteEntityService, _entitySearchService, diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java index dfc5e835392df..1dee8c4aa4d27 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java @@ -4,19 +4,17 @@ import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.restli.DefaultRestliClientFactory; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; +import java.net.URI; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; -import java.net.URI; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RestliEntityClientFactory { @@ -48,21 +46,28 @@ public RestliEntityClient getRestliEntityClient() { if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); } else { - restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); + restClient = + DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); } return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); } @Bean("systemRestliEntityClient") - public SystemRestliEntityClient systemRestliEntityClient(@Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + public SystemRestliEntityClient systemRestliEntityClient( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { final Client restClient; if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); } else { - restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); + restClient = + DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); } - return new SystemRestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries, - systemAuthentication, configurationProvider.getCache().getClient().getEntityClient()); + return new SystemRestliEntityClient( + restClient, + new ExponentialBackoff(retryInterval), + numRetries, + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java index ff56f19e4f8fd..b02541586de49 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java @@ -1,12 +1,13 @@ package com.linkedin.gms.factory.entity; import com.datastax.oss.driver.api.core.CqlSession; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -16,9 +17,6 @@ import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.PropertySource; -import javax.annotation.Nonnull; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RetentionServiceFactory { @@ -30,24 +28,24 @@ public class RetentionServiceFactory { @Value("${RETENTION_APPLICATION_BATCH_SIZE:1000}") private Integer _batchSize; - @Bean(name = "retentionService") @DependsOn({"cassandraSession", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") @Nonnull protected RetentionService createCassandraInstance(CqlSession session) { - RetentionService retentionService = new CassandraRetentionService(_entityService, session, _batchSize); + RetentionService retentionService = + new CassandraRetentionService(_entityService, session, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } - @Bean(name = "retentionService") @DependsOn({"ebeanServer", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull protected RetentionService createEbeanInstance(Database server) { - RetentionService retentionService = new EbeanRetentionService(_entityService, server, _batchSize); + RetentionService retentionService = + new EbeanRetentionService(_entityService, server, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java index a4ea02af94bad..d8c1422f988c2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java @@ -17,23 +17,32 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import(EntityIndexBuildersFactory.class) public class UpdateIndicesServiceFactory { - @Autowired - private ApplicationContext context; + @Autowired private ApplicationContext context; + @Value("${entityClient.preferredImpl:java}") private String entityClientImpl; @Bean - public UpdateIndicesService updateIndicesService(GraphService graphService, EntitySearchService entitySearchService, - TimeseriesAspectService timeseriesAspectService, - SystemMetadataService systemMetadataService, - EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer, - EntityIndexBuilders entityIndexBuilders) { - UpdateIndicesService updateIndicesService = new UpdateIndicesService(graphService, entitySearchService, timeseriesAspectService, - systemMetadataService, entityRegistry, searchDocumentTransformer, entityIndexBuilders); + public UpdateIndicesService updateIndicesService( + GraphService graphService, + EntitySearchService entitySearchService, + TimeseriesAspectService timeseriesAspectService, + SystemMetadataService systemMetadataService, + EntityRegistry entityRegistry, + SearchDocumentTransformer searchDocumentTransformer, + EntityIndexBuilders entityIndexBuilders) { + UpdateIndicesService updateIndicesService = + new UpdateIndicesService( + graphService, + entitySearchService, + timeseriesAspectService, + systemMetadataService, + entityRegistry, + searchDocumentTransformer, + entityIndexBuilders); if ("restli".equals(entityClientImpl)) { updateIndicesService.setSystemEntityClient(context.getBean(SystemRestliEntityClient.class)); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java index cda21f8907867..356fb226937dd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.entityregistry; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistryException; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.io.IOException; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Value; @@ -11,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.core.io.Resource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ConfigEntityRegistryFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java index 962bab56cbbf5..2c65eeafe063b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java @@ -32,7 +32,8 @@ public class EntityRegistryFactory { @Primary @Nonnull protected EntityRegistry getInstance() throws EntityRegistryException { - MergedEntityRegistry baseEntityRegistry = new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(configEntityRegistry); + MergedEntityRegistry baseEntityRegistry = + new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(configEntityRegistry); pluginEntityRegistryLoader.withBaseRegistry(baseEntityRegistry).start(true); return baseEntityRegistry; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java index 6dbb07309c7cc..8c6a4ad998aff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java @@ -1,7 +1,7 @@ package com.linkedin.gms.factory.entityregistry; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.PluginEntityRegistryLoader; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.io.FileNotFoundException; import java.net.MalformedURLException; import javax.annotation.Nonnull; @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class PluginEntityRegistryFactory { @@ -20,7 +19,8 @@ public class PluginEntityRegistryFactory { @Bean(name = "pluginEntityRegistry") @Nonnull - protected PluginEntityRegistryLoader getInstance() throws FileNotFoundException, MalformedURLException { + protected PluginEntityRegistryLoader getInstance() + throws FileNotFoundException, MalformedURLException { return new PluginEntityRegistryLoader(pluginRegistryPath); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index c50b4c9088bc2..723715a13b1c1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -2,24 +2,24 @@ import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; +import com.datahub.authentication.post.PostService; import com.datahub.authentication.token.StatefulTokenService; import com.datahub.authentication.user.NativeUserService; import com.datahub.authorization.role.RoleService; -import com.datahub.authentication.post.PostService; import com.linkedin.datahub.graphql.GmsGraphQLEngine; import com.linkedin.datahub.graphql.GmsGraphQLEngineArgs; import com.linkedin.datahub.graphql.GraphQLEngine; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.gms.factory.auth.DataHubTokenServiceFactory; import com.linkedin.gms.factory.common.GitVersionFactory; import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; import com.linkedin.gms.factory.common.SiblingGraphServiceFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.recommendation.RecommendationServiceFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.client.SystemJavaEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; @@ -29,11 +29,11 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; -import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -48,11 +48,17 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, RestliEntityClientFactory.class, - RecommendationServiceFactory.class, EntityRegistryFactory.class, DataHubTokenServiceFactory.class, - GitVersionFactory.class, SiblingGraphServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + RestliEntityClientFactory.class, + RecommendationServiceFactory.class, + EntityRegistryFactory.class, + DataHubTokenServiceFactory.class, + GitVersionFactory.class, + SiblingGraphServiceFactory.class +}) public class GraphQLEngineFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") @@ -169,7 +175,6 @@ public class GraphQLEngineFactory { @Value("${platformAnalytics.enabled}") // TODO: Migrate to DATAHUB_ANALYTICS_ENABLED private Boolean isAnalyticsEnabled; - @Bean(name = "graphQLEngine") @Nonnull protected GraphQLEngine getInstance() { @@ -211,8 +216,6 @@ protected GraphQLEngine getInstance() { args.setQueryService(_queryService); args.setFeatureFlags(_configProvider.getFeatureFlags()); args.setDataProductService(_dataProductService); - return new GmsGraphQLEngine( - args - ).builder().build(); + return new GmsGraphQLEngine(args).builder().build(); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java index 9beb617c4f6e8..78b9c5d52efdd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java @@ -16,7 +16,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Import({SystemAuthenticationFactory.class, RestliEntityClientFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class IngestionSchedulerFactory { @@ -33,17 +32,23 @@ public class IngestionSchedulerFactory { @Qualifier("configurationProvider") private ConfigurationProvider _configProvider; - @Value("${ingestion.scheduler.delayIntervalSeconds:45}") // Boot up ingestion source cache after waiting 45 seconds for startup. + @Value("${ingestion.scheduler.delayIntervalSeconds:45}") // Boot up ingestion source cache after + // waiting 45 seconds for startup. private Integer _delayIntervalSeconds; - @Value("${ingestion.scheduler.refreshIntervalSeconds:43200}") // By default, refresh ingestion sources 2 times per day. + @Value("${ingestion.scheduler.refreshIntervalSeconds:43200}") // By default, refresh ingestion + // sources 2 times per day. private Integer _refreshIntervalSeconds; @Bean(name = "ingestionScheduler") @Scope("singleton") @Nonnull protected IngestionScheduler getInstance() { - return new IngestionScheduler(_systemAuthentication, _entityClient, _configProvider.getIngestion(), - _delayIntervalSeconds, _refreshIntervalSeconds); + return new IngestionScheduler( + _systemAuthentication, + _entityClient, + _configProvider.getIngestion(), + _delayIntervalSeconds, + _refreshIntervalSeconds); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java index 675f015d9e378..41807d0daaa72 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.kafka; import com.linkedin.gms.factory.common.TopicConventionFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.mxe.TopicConvention; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.Producer; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({DataHubKafkaProducerFactory.class, TopicConventionFactory.class, KafkaHealthChecker.class}) @@ -28,14 +27,10 @@ public class DataHubKafkaEventProducerFactory { @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; - @Autowired - private KafkaHealthChecker kafkaHealthChecker; + @Autowired private KafkaHealthChecker kafkaHealthChecker; @Bean(name = "kafkaEventProducer") protected KafkaEventProducer createInstance() { - return new KafkaEventProducer( - kafkaProducer, - topicConvention, - kafkaHealthChecker); + return new KafkaEventProducer(kafkaProducer, topicConvention, kafkaHealthChecker); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java index 78b3de501e0e5..0b331ffc40be4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java @@ -1,11 +1,11 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.schemaregistry.AwsGlueSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.KafkaSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.util.Arrays; import java.util.Map; @@ -23,11 +23,14 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @EnableConfigurationProperties({KafkaProperties.class}) -@Import({KafkaSchemaRegistryFactory.class, AwsGlueSchemaRegistryFactory.class, InternalSchemaRegistryFactory.class}) +@Import({ + KafkaSchemaRegistryFactory.class, + AwsGlueSchemaRegistryFactory.class, + InternalSchemaRegistryFactory.class +}) public class DataHubKafkaProducerFactory { @Autowired @@ -35,20 +38,26 @@ public class DataHubKafkaProducerFactory { private SchemaRegistryConfig _schemaRegistryConfig; @Bean(name = "kafkaProducer") - protected Producer<String, IndexedRecord> createInstance(@Qualifier("configurationProvider") ConfigurationProvider - provider, KafkaProperties properties) { + protected Producer<String, IndexedRecord> createInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); - return new KafkaProducer<>(buildProducerProperties(_schemaRegistryConfig, kafkaConfiguration, properties)); + return new KafkaProducer<>( + buildProducerProperties(_schemaRegistryConfig, kafkaConfiguration, properties)); } - public static Map<String, Object> buildProducerProperties(SchemaRegistryConfig schemaRegistryConfig, - KafkaConfiguration kafkaConfiguration, KafkaProperties properties) { + public static Map<String, Object> buildProducerProperties( + SchemaRegistryConfig schemaRegistryConfig, + KafkaConfiguration kafkaConfiguration, + KafkaProperties properties) { KafkaProperties.Producer producerProps = properties.getProducer(); producerProps.setKeySerializer(StringSerializer.class); // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - producerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + producerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); } // else we rely on KafkaProperties which defaults to localhost:9092 Map<String, Object> props = properties.buildProducerProperties(); @@ -56,18 +65,27 @@ public static Map<String, Object> buildProducerProperties(SchemaRegistryConfig s props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getSerializer()); props.put(ProducerConfig.RETRIES_CONFIG, kafkaConfiguration.getProducer().getRetryCount()); - props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, kafkaConfiguration.getProducer().getDeliveryTimeout()); - props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, kafkaConfiguration.getProducer().getRequestTimeout()); - props.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, kafkaConfiguration.getProducer().getBackoffTimeout()); - props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, kafkaConfiguration.getProducer().getCompressionType()); - props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, kafkaConfiguration.getProducer().getMaxRequestSize()); + props.put( + ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, + kafkaConfiguration.getProducer().getDeliveryTimeout()); + props.put( + ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, + kafkaConfiguration.getProducer().getRequestTimeout()); + props.put( + ProducerConfig.RETRY_BACKOFF_MS_CONFIG, + kafkaConfiguration.getProducer().getBackoffTimeout()); + props.put( + ProducerConfig.COMPRESSION_TYPE_CONFIG, + kafkaConfiguration.getProducer().getCompressionType()); + props.put( + ProducerConfig.MAX_REQUEST_SIZE_CONFIG, + kafkaConfiguration.getProducer().getMaxRequestSize()); // Override KafkaProperties with SchemaRegistryConfig only for non-empty values - schemaRegistryConfig.getProperties().entrySet() - .stream() - .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) - .forEach(entry -> props.put(entry.getKey(), entry.getValue())); + schemaRegistryConfig.getProperties().entrySet().stream() + .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) + .forEach(entry -> props.put(entry.getKey(), entry.getValue())); return props; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index 7a9e80781d639..2a6338ac15e93 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -1,16 +1,14 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.schemaregistry.AwsGlueSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.KafkaSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; - +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import java.time.Duration; import java.util.Arrays; import java.util.Map; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -26,98 +24,109 @@ @Slf4j @Configuration -@Import({KafkaSchemaRegistryFactory.class, AwsGlueSchemaRegistryFactory.class, InternalSchemaRegistryFactory.class}) +@Import({ + KafkaSchemaRegistryFactory.class, + AwsGlueSchemaRegistryFactory.class, + InternalSchemaRegistryFactory.class +}) public class KafkaEventConsumerFactory { - - private int kafkaEventConsumerConcurrency; - - @Bean(name = "kafkaConsumerFactory") - protected DefaultKafkaConsumerFactory<String, GenericRecord> createConsumerFactory( - @Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties baseKafkaProperties, - SchemaRegistryConfig schemaRegistryConfig) { - kafkaEventConsumerConcurrency = provider.getKafka().getListener().getConcurrency(); - - KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Map<String, Object> customizedProperties = buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, - schemaRegistryConfig); - - return new DefaultKafkaConsumerFactory<>(customizedProperties); - } - - @Bean(name = "duheKafkaConsumerFactory") - protected DefaultKafkaConsumerFactory<String, GenericRecord> duheKafkaConsumerFactory( - @Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties baseKafkaProperties, - @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig schemaRegistryConfig) { - - KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Map<String, Object> customizedProperties = buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, - schemaRegistryConfig); - - return new DefaultKafkaConsumerFactory<>(customizedProperties); - } - - private static Map<String, Object> buildCustomizedProperties(KafkaProperties baseKafkaProperties, - KafkaConfiguration kafkaConfiguration, - SchemaRegistryConfig schemaRegistryConfig) { - KafkaProperties.Consumer consumerProps = baseKafkaProperties.getConsumer(); - - // Specify (de)serializers for record keys and for record values. - consumerProps.setKeyDeserializer(StringDeserializer.class); - // Records will be flushed every 10 seconds. - consumerProps.setEnableAutoCommit(true); - consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); - - - // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); - } // else we rely on KafkaProperties which defaults to localhost:9092 - - Map<String, Object> customizedProperties = baseKafkaProperties.buildConsumerProperties(); - customizedProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getDeserializer()); - - // Override KafkaProperties with SchemaRegistryConfig only for non-empty values - schemaRegistryConfig.getProperties().entrySet() - .stream() - .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) - .forEach(entry -> customizedProperties.put(entry.getKey(), entry.getValue())); - - customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, - kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); - - return customizedProperties; - } - - @Bean(name = "kafkaEventConsumer") - protected KafkaListenerContainerFactory<?> createInstance( - @Qualifier("kafkaConsumerFactory") DefaultKafkaConsumerFactory<String, GenericRecord> kafkaConsumerFactory) { - - ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = - new ConcurrentKafkaListenerContainerFactory<>(); - factory.setConsumerFactory(kafkaConsumerFactory); - factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); - factory.setConcurrency(kafkaEventConsumerConcurrency); - - log.info(String.format("Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", - kafkaEventConsumerConcurrency)); - - return factory; - } - - @Bean(name = "duheKafkaEventConsumer") - protected KafkaListenerContainerFactory<?> duheKafkaEventConsumer( - @Qualifier("duheKafkaConsumerFactory") DefaultKafkaConsumerFactory<String, GenericRecord> kafkaConsumerFactory) { - - ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = - new ConcurrentKafkaListenerContainerFactory<>(); - factory.setConsumerFactory(kafkaConsumerFactory); - factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); - factory.setConcurrency(1); - - log.info("Event-based DUHE KafkaListenerContainerFactory built successfully. Consumer concurrency = 1"); - return factory; - } -} \ No newline at end of file + private int kafkaEventConsumerConcurrency; + + @Bean(name = "kafkaConsumerFactory") + protected DefaultKafkaConsumerFactory<String, GenericRecord> createConsumerFactory( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties baseKafkaProperties, + SchemaRegistryConfig schemaRegistryConfig) { + kafkaEventConsumerConcurrency = provider.getKafka().getListener().getConcurrency(); + + KafkaConfiguration kafkaConfiguration = provider.getKafka(); + Map<String, Object> customizedProperties = + buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, schemaRegistryConfig); + + return new DefaultKafkaConsumerFactory<>(customizedProperties); + } + + @Bean(name = "duheKafkaConsumerFactory") + protected DefaultKafkaConsumerFactory<String, GenericRecord> duheKafkaConsumerFactory( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties baseKafkaProperties, + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig schemaRegistryConfig) { + + KafkaConfiguration kafkaConfiguration = provider.getKafka(); + Map<String, Object> customizedProperties = + buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, schemaRegistryConfig); + + return new DefaultKafkaConsumerFactory<>(customizedProperties); + } + + private static Map<String, Object> buildCustomizedProperties( + KafkaProperties baseKafkaProperties, + KafkaConfiguration kafkaConfiguration, + SchemaRegistryConfig schemaRegistryConfig) { + KafkaProperties.Consumer consumerProps = baseKafkaProperties.getConsumer(); + + // Specify (de)serializers for record keys and for record values. + consumerProps.setKeyDeserializer(StringDeserializer.class); + // Records will be flushed every 10 seconds. + consumerProps.setEnableAutoCommit(true); + consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); + + // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + consumerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + } // else we rely on KafkaProperties which defaults to localhost:9092 + + Map<String, Object> customizedProperties = baseKafkaProperties.buildConsumerProperties(); + customizedProperties.put( + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getDeserializer()); + + // Override KafkaProperties with SchemaRegistryConfig only for non-empty values + schemaRegistryConfig.getProperties().entrySet().stream() + .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) + .forEach(entry -> customizedProperties.put(entry.getKey(), entry.getValue())); + + customizedProperties.put( + ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, + kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); + + return customizedProperties; + } + + @Bean(name = "kafkaEventConsumer") + protected KafkaListenerContainerFactory<?> createInstance( + @Qualifier("kafkaConsumerFactory") + DefaultKafkaConsumerFactory<String, GenericRecord> kafkaConsumerFactory) { + + ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = + new ConcurrentKafkaListenerContainerFactory<>(); + factory.setConsumerFactory(kafkaConsumerFactory); + factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); + factory.setConcurrency(kafkaEventConsumerConcurrency); + + log.info( + String.format( + "Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", + kafkaEventConsumerConcurrency)); + + return factory; + } + + @Bean(name = "duheKafkaEventConsumer") + protected KafkaListenerContainerFactory<?> duheKafkaEventConsumer( + @Qualifier("duheKafkaConsumerFactory") + DefaultKafkaConsumerFactory<String, GenericRecord> kafkaConsumerFactory) { + + ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = + new ConcurrentKafkaListenerContainerFactory<>(); + factory.setConsumerFactory(kafkaConsumerFactory); + factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); + factory.setConcurrency(1); + + log.info( + "Event-based DUHE KafkaListenerContainerFactory built successfully. Consumer concurrency = 1"); + return factory; + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java index 14ffc01d75781..58cb311c526bc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java @@ -1,11 +1,10 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import java.time.Duration; import java.util.Arrays; import java.util.Map; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -19,15 +18,15 @@ import org.springframework.kafka.config.KafkaListenerContainerFactory; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; - @Slf4j @Configuration @EnableConfigurationProperties({KafkaProperties.class}) public class SimpleKafkaConsumerFactory { @Bean(name = "simpleKafkaConsumer") - protected KafkaListenerContainerFactory<?> createInstance(@Qualifier("configurationProvider") ConfigurationProvider - provider, KafkaProperties properties) { + protected KafkaListenerContainerFactory<?> createInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); KafkaProperties.Consumer consumerProps = properties.getConsumer(); @@ -39,13 +38,16 @@ protected KafkaListenerContainerFactory<?> createInstance(@Qualifier("configurat consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + consumerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); } // else we rely on KafkaProperties which defaults to localhost:9092 Map<String, Object> customizedProperties = properties.buildConsumerProperties(); - customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, - kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); + customizedProperties.put( + ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, + kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = new ConcurrentKafkaListenerContainerFactory<>(); @@ -56,4 +58,4 @@ protected KafkaListenerContainerFactory<?> createInstance(@Qualifier("configurat return factory; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java index 07cbccd93c595..f79026c8ee337 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java @@ -5,13 +5,14 @@ import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; - public class ThreadPoolContainerCustomizer - implements ContainerCustomizer<String, GenericRecord, ConcurrentMessageListenerContainer<String, GenericRecord>> { + implements ContainerCustomizer< + String, GenericRecord, ConcurrentMessageListenerContainer<String, GenericRecord>> { @Override public void configure(ConcurrentMessageListenerContainer<String, GenericRecord> container) { ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor(); - // Default Queue Capacity is set to max, so we want to allow the thread pool to add concurrent threads up to configured value + // Default Queue Capacity is set to max, so we want to allow the thread pool to add concurrent + // threads up to configured value threadPoolTaskExecutor.setCorePoolSize(container.getConcurrency()); threadPoolTaskExecutor.setMaxPoolSize(container.getConcurrency()); threadPoolTaskExecutor.initialize(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java index ac1cbbc5cc5ff..a88e1d971973b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java @@ -17,17 +17,19 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = AwsGlueSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = AwsGlueSchemaRegistryFactory.TYPE) public class AwsGlueSchemaRegistryFactory { public static final String TYPE = "AWS_GLUE"; @Value("${kafka.schemaRegistry.awsGlue.region}") private String awsRegion; + @Value("${kafka.schemaRegistry.awsGlue.registryName}") private Optional<String> registryName; @@ -35,7 +37,8 @@ public class AwsGlueSchemaRegistryFactory { @Nonnull protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationProvider) { Map<String, Object> props = new HashMap<>(); - // FIXME: Properties for this factory should come from ConfigurationProvider object, specifically under the + // FIXME: Properties for this factory should come from ConfigurationProvider object, + // specifically under the // KafkaConfiguration class. See InternalSchemaRegistryFactory as an example. props.put(AWSSchemaRegistryConstants.AWS_REGION, awsRegion); props.put(AWSSchemaRegistryConstants.DATA_FORMAT, "AVRO"); @@ -43,7 +46,7 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr props.put(AWSSchemaRegistryConstants.AVRO_RECORD_TYPE, AvroRecordType.GENERIC_RECORD.getName()); registryName.ifPresent(s -> props.put(AWSSchemaRegistryConstants.REGISTRY_NAME, s)); log.info("Creating AWS Glue registry"); - return new SchemaRegistryConfig(GlueSchemaRegistryKafkaSerializer.class, GlueSchemaRegistryKafkaDeserializer.class, - props); + return new SchemaRegistryConfig( + GlueSchemaRegistryKafkaSerializer.class, GlueSchemaRegistryKafkaDeserializer.class, props); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java index aeef166a077c7..4819984307af9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java @@ -1,42 +1,40 @@ package com.linkedin.gms.factory.kafka.schemaregistry; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; +import static com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener.TOPIC_NAME; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.boot.kafka.MockDUHEDeserializer; import com.linkedin.metadata.boot.kafka.MockDUHESerializer; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; +import java.util.HashMap; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.HashMap; -import java.util.Map; - -import static com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener.TOPIC_NAME; - @Slf4j @Configuration public class DUHESchemaRegistryFactory { - public static final String DUHE_SCHEMA_REGISTRY_TOPIC_KEY = "duheTopicName"; + public static final String DUHE_SCHEMA_REGISTRY_TOPIC_KEY = "duheTopicName"; - @Value(TOPIC_NAME) - private String duheTopicName; + @Value(TOPIC_NAME) + private String duheTopicName; - /** - * Configure Kafka Producer/Consumer processes with a custom schema registry. - */ - @Bean("duheSchemaRegistryConfig") - protected SchemaRegistryConfig duheSchemaRegistryConfig(ConfigurationProvider provider) { - Map<String, Object> props = new HashMap<>(); - KafkaConfiguration kafkaConfiguration = provider.getKafka(); + /** Configure Kafka Producer/Consumer processes with a custom schema registry. */ + @Bean("duheSchemaRegistryConfig") + protected SchemaRegistryConfig duheSchemaRegistryConfig(ConfigurationProvider provider) { + Map<String, Object> props = new HashMap<>(); + KafkaConfiguration kafkaConfiguration = provider.getKafka(); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaConfiguration - .getSchemaRegistry().getUrl()); - props.put(DUHE_SCHEMA_REGISTRY_TOPIC_KEY, duheTopicName); + props.put( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + kafkaConfiguration.getSchemaRegistry().getUrl()); + props.put(DUHE_SCHEMA_REGISTRY_TOPIC_KEY, duheTopicName); - log.info("DataHub System Update Registry"); - return new SchemaRegistryConfig(MockDUHESerializer.class, MockDUHEDeserializer.class, props); - } + log.info("DataHub System Update Registry"); + return new SchemaRegistryConfig(MockDUHESerializer.class, MockDUHEDeserializer.class, props); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java index 217dc15bbc3e8..8c814e5054758 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.kafka.schemaregistry; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.registry.SchemaRegistryService; import com.linkedin.metadata.registry.SchemaRegistryServiceImpl; import com.linkedin.mxe.TopicConvention; @@ -19,27 +19,30 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Slf4j @Configuration -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = InternalSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) public class InternalSchemaRegistryFactory { public static final String TYPE = "INTERNAL"; - /** - * Configure Kafka Producer/Consumer processes with a custom schema registry. - */ + /** Configure Kafka Producer/Consumer processes with a custom schema registry. */ @Bean("schemaRegistryConfig") @Nonnull - protected SchemaRegistryConfig getInstance(@Qualifier("configurationProvider") ConfigurationProvider provider) { + protected SchemaRegistryConfig getInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider) { Map<String, Object> props = new HashMap<>(); KafkaConfiguration kafkaConfiguration = provider.getKafka(); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaConfiguration - .getSchemaRegistry().getUrl()); + props.put( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + kafkaConfiguration.getSchemaRegistry().getUrl()); - log.info("Creating internal registry configuration for url {}", kafkaConfiguration.getSchemaRegistry().getUrl()); + log.info( + "Creating internal registry configuration for url {}", + kafkaConfiguration.getSchemaRegistry().getUrl()); return new SchemaRegistryConfig(KafkaAvroSerializer.class, KafkaAvroDeserializer.class, props); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java index 7b72ba3f3bb88..e6c255b99a9ff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java @@ -21,7 +21,9 @@ @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = KafkaSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = KafkaSchemaRegistryFactory.TYPE) public class KafkaSchemaRegistryFactory { public static final String TYPE = "KAFKA"; @@ -48,7 +50,8 @@ public class KafkaSchemaRegistryFactory { @Nonnull protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationProvider) { Map<String, Object> props = new HashMap<>(); - // FIXME: Properties for this factory should come from ConfigurationProvider object, specifically under the + // FIXME: Properties for this factory should come from ConfigurationProvider object, + // specifically under the // KafkaConfiguration class. See InternalSchemaRegistryFactory as an example. props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaSchemaRegistryUrl); props.put(withNamespace(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), sslTruststoreLocation); @@ -60,8 +63,11 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr if (sslKeystoreLocation.isEmpty()) { log.info("creating schema registry config using url: {}", kafkaSchemaRegistryUrl); } else { - log.info("creating schema registry config using url: {}, keystore location: {} and truststore location: {}", - kafkaSchemaRegistryUrl, sslTruststoreLocation, sslKeystoreLocation); + log.info( + "creating schema registry config using url: {}, keystore location: {} and truststore location: {}", + kafkaSchemaRegistryUrl, + sslTruststoreLocation, + sslKeystoreLocation); } return new SchemaRegistryConfig(KafkaAvroSerializer.class, KafkaAvroDeserializer.class, props); @@ -70,4 +76,4 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr private String withNamespace(String configKey) { return SchemaRegistryClientConfig.CLIENT_NAMESPACE + configKey; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java index 1e2962bbda7c8..004a7abb88489 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java @@ -3,7 +3,6 @@ import java.util.Map; import lombok.Data; - @Data public class SchemaRegistryConfig { private final Class<?> serializer; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java index 8596a14b7fc24..1589b33862bfe 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.lineage; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; -import javax.annotation.Nonnull; - import com.linkedin.metadata.service.LineageService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; @@ -12,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class LineageServiceFactory { @@ -26,4 +24,4 @@ public class LineageServiceFactory { protected LineageService getInstance() throws Exception { return new LineageService(this._javaEntityClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java index 3a1f18692fdc6..ff48a922adf22 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.ownership; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.OwnershipTypeService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class OwnershipTypeServiceFactory { @@ -30,4 +29,4 @@ public class OwnershipTypeServiceFactory { protected OwnershipTypeService getInstance() throws Exception { return new OwnershipTypeService(_javaEntityClient, _authentication); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java index f98c5bd50467d..cf81cbf70d5eb 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.query; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.QueryService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class QueryServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java index 36b203f677c9c..dc68451c6fce1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java @@ -10,9 +10,9 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.recommendation.candidatesource.DomainsCandidateSource; import com.linkedin.metadata.recommendation.candidatesource.MostPopularSource; +import com.linkedin.metadata.recommendation.candidatesource.RecentlyEditedSource; import com.linkedin.metadata.recommendation.candidatesource.RecentlySearchedSource; import com.linkedin.metadata.recommendation.candidatesource.RecentlyViewedSource; -import com.linkedin.metadata.recommendation.candidatesource.RecentlyEditedSource; import com.linkedin.metadata.recommendation.candidatesource.RecommendationSource; import com.linkedin.metadata.recommendation.candidatesource.TopPlatformsSource; import com.linkedin.metadata.recommendation.candidatesource.TopTagsSource; @@ -26,10 +26,15 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({TopPlatformsCandidateSourceFactory.class, RecentlyEditedCandidateSourceFactory.class, - MostPopularCandidateSourceFactory.class, TopTagsCandidateSourceFactory.class, TopTermsCandidateSourceFactory.class, DomainsCandidateSourceFactory.class}) +@Import({ + TopPlatformsCandidateSourceFactory.class, + RecentlyEditedCandidateSourceFactory.class, + MostPopularCandidateSourceFactory.class, + TopTagsCandidateSourceFactory.class, + TopTermsCandidateSourceFactory.class, + DomainsCandidateSourceFactory.class +}) public class RecommendationServiceFactory { @Autowired @@ -69,11 +74,16 @@ public class RecommendationServiceFactory { protected RecommendationsService getInstance() { // TODO: Make this class-name pluggable to minimize merge conflict potential. // This is where you can add new recommendation modules. - final List<RecommendationSource> candidateSources = ImmutableList.of( - topPlatformsCandidateSource, - domainsCandidateSource, - recentlyViewedCandidateSource, recentlyEditedCandidateSource, _mostPopularCandidateSource, - topTagsCandidateSource, topTermsCandidateSource, recentlySearchedCandidateSource); + final List<RecommendationSource> candidateSources = + ImmutableList.of( + topPlatformsCandidateSource, + domainsCandidateSource, + recentlyViewedCandidateSource, + recentlyEditedCandidateSource, + _mostPopularCandidateSource, + topTagsCandidateSource, + topTermsCandidateSource, + recentlySearchedCandidateSource); return new RecommendationsService(candidateSources, new SimpleRecommendationRanker()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java index c266b3635b16f..f3be4db147399 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class MostPopularCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java index 109cc8dbc82d1..ac227faf06c4c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class RecentlyEditedCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java index 5209f65a2ec63..05b6f974eedca 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({RestHighLevelClientFactory.class, IndexConventionFactory.class}) public class RecentlySearchedCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java index aea40b4d8eb46..6f17846efc1cd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class RecentlyViewedCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java index fc04bbcce31ee..ad241e7717545 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntityServiceFactory.class, EntitySearchServiceFactory.class}) public class TopPlatformsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java index 857a788454c34..fe5c2d03d1907 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntitySearchServiceFactory.class}) public class TopTagsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java index b8d50169e49ab..36c53936094ff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntitySearchServiceFactory.class}) public class TopTermsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java index c99d429e986b6..e4e7d04e311da 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java @@ -2,27 +2,28 @@ import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Value; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - -/** - * Factory for components required for any services using elasticsearch - */ +/** Factory for components required for any services using elasticsearch */ @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, ElasticSearchBulkProcessorFactory.class, - ElasticSearchIndexBuilderFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + ElasticSearchBulkProcessorFactory.class, + ElasticSearchIndexBuilderFactory.class +}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class BaseElasticSearchComponentsFactory { @lombok.Value @@ -56,6 +57,7 @@ public static class BaseElasticSearchComponents { @Bean(name = "baseElasticSearchComponents") @Nonnull protected BaseElasticSearchComponents getInstance() { - return new BaseElasticSearchComponents(searchClient, indexConvention, bulkProcessor, indexBuilder, numRetries); + return new BaseElasticSearchComponents( + searchClient, indexConvention, bulkProcessor, indexBuilder, numRetries); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java index 845c63c32e0fd..d2292b215e62a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -13,7 +13,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class CachingEntitySearchServiceFactory { @@ -22,8 +21,7 @@ public class CachingEntitySearchServiceFactory { @Qualifier("entitySearchService") private EntitySearchService entitySearchService; - @Autowired - private CacheManager cacheManager; + @Autowired private CacheManager cacheManager; @Value("${searchService.resultBatchSize}") private Integer batchSize; @@ -36,9 +34,6 @@ public class CachingEntitySearchServiceFactory { @Nonnull protected CachingEntitySearchService getInstance() { return new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - enableCache); + cacheManager, entitySearchService, batchSize, enableCache); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java index 5deffdb01d247..64b1fcc2f5695 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; +import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; - -import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.support.WriteRequest; import org.opensearch.client.RestHighLevelClient; @@ -16,7 +15,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @Import({RestHighLevelClientFactory.class}) @@ -51,13 +49,13 @@ public class ElasticSearchBulkProcessorFactory { @Nonnull protected ESBulkProcessor getInstance() { return ESBulkProcessor.builder(searchClient) - .async(async) - .bulkFlushPeriod(bulkFlushPeriod) - .bulkRequestsLimit(bulkRequestsLimit) - .retryInterval(retryInterval) - .numRetries(numRetries) - .batchDelete(enableBatchDelete) - .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.valueOf(refreshPolicy)) - .build(); + .async(async) + .bulkFlushPeriod(bulkFlushPeriod) + .bulkRequestsLimit(bulkRequestsLimit) + .retryInterval(retryInterval) + .numRetries(numRetries) + .batchDelete(enableBatchDelete) + .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.valueOf(refreshPolicy)) + .build(); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java index b619ee9516dce..7bf04b467d205 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java @@ -1,18 +1,23 @@ package com.linkedin.gms.factory.search; +import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; + import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.linkedin.gms.factory.common.GitVersionFactory; import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.version.GitVersion; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -22,14 +27,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; - - @Configuration @Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, GitVersionFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -66,30 +63,41 @@ public class ElasticSearchIndexBuilderFactory { @Bean(name = "elasticSearchIndexSettingsOverrides") @Nonnull protected Map<String, Map<String, String>> getIndexSettingsOverrides( - @Qualifier(INDEX_CONVENTION_BEAN) IndexConvention indexConvention) { + @Qualifier(INDEX_CONVENTION_BEAN) IndexConvention indexConvention) { return Stream.concat( parseIndexSettingsMap(indexSettingOverrides).entrySet().stream() - .map(e -> Map.entry(indexConvention.getIndexName(e.getKey()), e.getValue())), - parseIndexSettingsMap(entityIndexSettingOverrides).entrySet().stream() - .map(e -> Map.entry(indexConvention.getEntityIndexName(e.getKey()), e.getValue()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .map(e -> Map.entry(indexConvention.getIndexName(e.getKey()), e.getValue())), + parseIndexSettingsMap(entityIndexSettingOverrides).entrySet().stream() + .map(e -> Map.entry(indexConvention.getEntityIndexName(e.getKey()), e.getValue()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } @Bean(name = "elasticSearchIndexBuilder") @Nonnull protected ESIndexBuilder getInstance( - @Qualifier("elasticSearchIndexSettingsOverrides") Map<String, Map<String, String>> overrides, - final ConfigurationProvider configurationProvider, final GitVersion gitVersion) { - return new ESIndexBuilder(searchClient, numShards, numReplicas, numRetries, refreshIntervalSeconds, overrides, - enableSettingsReindex, enableMappingsReindex, configurationProvider.getElasticSearch(), gitVersion); + @Qualifier("elasticSearchIndexSettingsOverrides") Map<String, Map<String, String>> overrides, + final ConfigurationProvider configurationProvider, + final GitVersion gitVersion) { + return new ESIndexBuilder( + searchClient, + numShards, + numReplicas, + numRetries, + refreshIntervalSeconds, + overrides, + enableSettingsReindex, + enableMappingsReindex, + configurationProvider.getElasticSearch(), + gitVersion); } @Nonnull private static Map<String, Map<String, String>> parseIndexSettingsMap(@Nullable String json) { - Optional<Map<String, Map<String, String>>> parseOpt = Optional.ofNullable( - new Gson().fromJson(json, - new TypeToken<Map<String, Map<String, String>>>() { }.getType())); + Optional<Map<String, Map<String, String>>> parseOpt = + Optional.ofNullable( + new Gson() + .fromJson(json, new TypeToken<Map<String, Map<String, String>>>() {}.getType())); return parseOpt.orElse(Map.of()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java index 6d8a62ac1fd18..2b6d495e4fe33 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java @@ -1,13 +1,12 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders; @@ -15,8 +14,9 @@ import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; import com.linkedin.metadata.search.elasticsearch.query.ESSearchDAO; import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import java.io.IOException; import javax.annotation.Nonnull; - import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -25,9 +25,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; -import java.io.IOException; - - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -47,30 +44,47 @@ public class ElasticSearchServiceFactory { @Qualifier("settingsBuilder") private SettingsBuilder settingsBuilder; - @Autowired - private EntityIndexBuilders entityIndexBuilders; + @Autowired private EntityIndexBuilders entityIndexBuilders; - @Autowired - private ConfigurationProvider configurationProvider; + @Autowired private ConfigurationProvider configurationProvider; @Bean(name = "elasticSearchService") @Nonnull - protected ElasticSearchService getInstance(ConfigurationProvider configurationProvider) throws IOException { + protected ElasticSearchService getInstance(ConfigurationProvider configurationProvider) + throws IOException { log.info("Search configuration: {}", configurationProvider.getElasticSearch().getSearch()); - ElasticSearchConfiguration elasticSearchConfiguration = configurationProvider.getElasticSearch(); + ElasticSearchConfiguration elasticSearchConfiguration = + configurationProvider.getElasticSearch(); SearchConfiguration searchConfiguration = elasticSearchConfiguration.getSearch(); - CustomSearchConfiguration customSearchConfiguration = searchConfiguration.getCustom() == null ? null + CustomSearchConfiguration customSearchConfiguration = + searchConfiguration.getCustom() == null + ? null : searchConfiguration.getCustom().resolve(YAML_MAPPER); ESSearchDAO esSearchDAO = - new ESSearchDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - configurationProvider.getFeatureFlags().isPointInTimeCreationEnabled(), - elasticSearchConfiguration.getImplementation(), searchConfiguration, customSearchConfiguration); - return new ElasticSearchService(entityIndexBuilders, esSearchDAO, - new ESBrowseDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - searchConfiguration, customSearchConfiguration), - new ESWriteDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - components.getBulkProcessor(), components.getNumRetries())); + new ESSearchDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + configurationProvider.getFeatureFlags().isPointInTimeCreationEnabled(), + elasticSearchConfiguration.getImplementation(), + searchConfiguration, + customSearchConfiguration); + return new ElasticSearchService( + entityIndexBuilders, + esSearchDAO, + new ESBrowseDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + searchConfiguration, + customSearchConfiguration), + new ESWriteDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries())); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java index 6bb206ee3ad61..334194b95c162 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java @@ -10,26 +10,28 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class EntityIndexBuildersFactory { - @Autowired - @Qualifier("baseElasticSearchComponents") - private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - @Autowired - @Qualifier("settingsBuilder") - private SettingsBuilder settingsBuilder; - - - @Bean - protected EntityIndexBuilders entityIndexBuilders() { - return new EntityIndexBuilders(components.getIndexBuilder(), entityRegistry, components.getIndexConvention(), settingsBuilder); - } -} \ No newline at end of file + @Autowired + @Qualifier("baseElasticSearchComponents") + private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @Autowired + @Qualifier("settingsBuilder") + private SettingsBuilder settingsBuilder; + + @Bean + protected EntityIndexBuilders entityIndexBuilders() { + return new EntityIndexBuilders( + components.getIndexBuilder(), + entityRegistry, + components.getIndexConvention(), + settingsBuilder); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java index 49dab31cca1d0..38fd27fb44024 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchServiceFactory.class}) public class EntitySearchServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java index e2eef83bc6e3f..17103240c938b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java @@ -2,10 +2,10 @@ import com.linkedin.gms.factory.common.GraphServiceFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.cache.CacheManager; import org.springframework.context.annotation.Bean; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @Import({GraphServiceFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -23,11 +22,17 @@ public class LineageSearchServiceFactory { @Bean(name = "relationshipSearchService") @Primary @Nonnull - protected LineageSearchService getInstance(CacheManager cacheManager, GraphService graphService, - SearchService searchService, ConfigurationProvider configurationProvider) { + protected LineageSearchService getInstance( + CacheManager cacheManager, + GraphService graphService, + SearchService searchService, + ConfigurationProvider configurationProvider) { boolean cacheEnabled = configurationProvider.getFeatureFlags().isLineageSearchCacheEnabled(); - return new LineageSearchService(searchService, graphService, - cacheEnabled ? cacheManager.getCache("relationshipSearchService") : null, cacheEnabled, - configurationProvider.getCache().getSearch().getLineage()); + return new LineageSearchService( + searchService, + graphService, + cacheEnabled ? cacheManager.getCache("relationshipSearchService") : null, + cacheEnabled, + configurationProvider.getCache().getSearch().getLineage()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java index a186d2de770f3..9d9018bd31f07 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java @@ -1,13 +1,12 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.transformer.SearchDocumentTransformer; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SearchDocumentTransformerFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java index 64bb0218a0d71..1cb905665e489 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java @@ -1,13 +1,13 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.cache.EntityDocCountCache; import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.search.ranker.SearchRanker; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -16,7 +16,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SearchServiceFactory { @@ -42,8 +41,10 @@ public class SearchServiceFactory { @Nonnull protected SearchService getInstance(ConfigurationProvider configurationProvider) { return new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, configurationProvider.getCache() - .getHomepage().getEntityCounts()), + new EntityDocCountCache( + entityRegistry, + entitySearchService, + configurationProvider.getCache().getHomepage().getEntityCounts()), cachingEntitySearchService, searchRanker); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java index 840a370957706..ce1d6f12c58b4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @Import(EntityRegistryFactory.class) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java index 1040edca30bfb..b010358bad81c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; - @Configuration public class SearchRankerFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java index 60bcd9ea22be6..32ad2175c9052 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.search.views; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.ViewService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ViewServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java index a1cac07e3fb03..64093c54d0410 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; - @Configuration public class SecretServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java index 2e22d43913493..f0d09a815628d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.settings; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.SettingsService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SettingsServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java index 2610ebd3528cd..b735e490f583e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java @@ -1,5 +1,7 @@ package com.linkedin.gms.factory.telemetry; +import static com.linkedin.gms.factory.telemetry.TelemetryUtils.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; import com.linkedin.datahub.graphql.generated.DateRange; @@ -12,13 +14,11 @@ import java.io.IOException; import java.util.Optional; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RestHighLevelClient; import org.joda.time.DateTime; import org.json.JSONObject; +import org.opensearch.client.RestHighLevelClient; import org.springframework.scheduling.annotation.Scheduled; -import static com.linkedin.gms.factory.telemetry.TelemetryUtils.*; - @Slf4j public class DailyReport { @@ -32,8 +32,12 @@ public class DailyReport { private MixpanelAPI mixpanel; private MessageBuilder mixpanelBuilder; - public DailyReport(IndexConvention indexConvention, RestHighLevelClient elasticClient, - ConfigurationProvider configurationProvider, EntityService entityService, GitVersion gitVersion) { + public DailyReport( + IndexConvention indexConvention, + RestHighLevelClient elasticClient, + ConfigurationProvider configurationProvider, + EntityService entityService, + GitVersion gitVersion) { this._indexConvention = indexConvention; this._elasticClient = elasticClient; this._configurationProvider = configurationProvider; @@ -43,7 +47,10 @@ public DailyReport(IndexConvention indexConvention, RestHighLevelClient elasticC String clientId = getClientId(entityService); // initialize MixPanel instance and message builder - mixpanel = new MixpanelAPI("https://track.datahubproject.io/mp/track", "https://track.datahubproject.io/mp/engage"); + mixpanel = + new MixpanelAPI( + "https://track.datahubproject.io/mp/track", + "https://track.datahubproject.io/mp/engage"); mixpanelBuilder = new MessageBuilder(MIXPANEL_TOKEN); // set user-level properties @@ -72,24 +79,48 @@ public void dailyReport() { DateTime lastWeek = endDate.minusWeeks(1); DateTime lastMonth = endDate.minusMonths(1); - DateRange dayRange = new DateRange(String.valueOf(yesterday.getMillis()), String.valueOf(endDate.getMillis())); - DateRange weekRange = new DateRange(String.valueOf(lastWeek.getMillis()), String.valueOf(endDate.getMillis())); - DateRange monthRange = new DateRange(String.valueOf(lastMonth.getMillis()), String.valueOf(endDate.getMillis())); + DateRange dayRange = + new DateRange(String.valueOf(yesterday.getMillis()), String.valueOf(endDate.getMillis())); + DateRange weekRange = + new DateRange(String.valueOf(lastWeek.getMillis()), String.valueOf(endDate.getMillis())); + DateRange monthRange = + new DateRange(String.valueOf(lastMonth.getMillis()), String.valueOf(endDate.getMillis())); int dailyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(dayRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(dayRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); int weeklyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(weekRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(weekRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); int monthlyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(monthRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(monthRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); // floor to nearest power of 10 - dailyActiveUsers = dailyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(dailyActiveUsers) / Math.log(2))); - weeklyActiveUsers = weeklyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(weeklyActiveUsers) / Math.log(2))); - monthlyActiveUsers = monthlyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(monthlyActiveUsers) / Math.log(2))); + dailyActiveUsers = + dailyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(dailyActiveUsers) / Math.log(2))); + weeklyActiveUsers = + weeklyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(weeklyActiveUsers) / Math.log(2))); + monthlyActiveUsers = + monthlyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(monthlyActiveUsers) / Math.log(2))); // set user-level properties JSONObject report = new JSONObject(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java index 8178ce1399aa3..b9330d5827419 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java @@ -8,7 +8,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class MixpanelApiFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java index 5385c5e81f804..f64766534469d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java @@ -8,10 +8,8 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) - public class MixpanelMessageBuilderFactory { private static final String MIXPANEL_TOKEN = "5ee83d940754d63cacbf7d34daa6f44a"; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java index 7cdca996a8131..4986e705fd7b4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java @@ -13,17 +13,20 @@ import org.springframework.context.annotation.Configuration; import org.springframework.scheduling.annotation.EnableScheduling; - @Slf4j @Configuration @EnableScheduling public class ScheduledAnalyticsFactory { - @Bean - @ConditionalOnProperty("telemetry.enabledServer") - public DailyReport dailyReport(@Qualifier("elasticSearchRestHighLevelClient") RestHighLevelClient elasticClient, - @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) IndexConvention indexConvention, - ConfigurationProvider configurationProvider, EntityService entityService, GitVersion gitVersion) { - return new DailyReport(indexConvention, elasticClient, configurationProvider, entityService, gitVersion); - } + @Bean + @ConditionalOnProperty("telemetry.enabledServer") + public DailyReport dailyReport( + @Qualifier("elasticSearchRestHighLevelClient") RestHighLevelClient elasticClient, + @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) IndexConvention indexConvention, + ConfigurationProvider configurationProvider, + EntityService entityService, + GitVersion gitVersion) { + return new DailyReport( + indexConvention, elasticClient, configurationProvider, entityService, gitVersion); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java index 3bbb542b2cf5a..748acb4a9499e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java @@ -1,45 +1,44 @@ package com.linkedin.gms.factory.telemetry; import com.linkedin.common.AuditStamp; -import com.linkedin.telemetry.TelemetryClientId; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.telemetry.TelemetryClientId; import java.util.UUID; - import lombok.extern.slf4j.Slf4j; - @Slf4j public final class TelemetryUtils { - public static final String CLIENT_ID_URN = "urn:li:telemetry:clientId"; - public static final String CLIENT_ID_ASPECT = "telemetryClientId"; - - private static String _clientId; + public static final String CLIENT_ID_URN = "urn:li:telemetry:clientId"; + public static final String CLIENT_ID_ASPECT = "telemetryClientId"; + private static String _clientId; - public static String getClientId(EntityService entityService) { - if (_clientId == null) { - createClientIdIfNotPresent(entityService); - RecordTemplate clientIdTemplate = entityService.getLatestAspect(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT); - // Should always be present here from above, so no need for null check - _clientId = ((TelemetryClientId) clientIdTemplate).getClientId(); - } - return _clientId; + public static String getClientId(EntityService entityService) { + if (_clientId == null) { + createClientIdIfNotPresent(entityService); + RecordTemplate clientIdTemplate = + entityService.getLatestAspect(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT); + // Should always be present here from above, so no need for null check + _clientId = ((TelemetryClientId) clientIdTemplate).getClientId(); } - - private static void createClientIdIfNotPresent(EntityService entityService) { - String uuid = UUID.randomUUID().toString(); - TelemetryClientId clientId = new TelemetryClientId().setClientId(uuid); - final AuditStamp clientIdStamp = new AuditStamp(); - clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); - clientIdStamp.setTime(System.currentTimeMillis()); - entityService.ingestAspectIfNotPresent(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); - } - private TelemetryUtils() { - throw new UnsupportedOperationException(); - } - + return _clientId; + } + + private static void createClientIdIfNotPresent(EntityService entityService) { + String uuid = UUID.randomUUID().toString(); + TelemetryClientId clientId = new TelemetryClientId().setClientId(uuid); + final AuditStamp clientIdStamp = new AuditStamp(); + clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); + clientIdStamp.setTime(System.currentTimeMillis()); + entityService.ingestAspectIfNotPresent( + UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); + } + + private TelemetryUtils() { + throw new UnsupportedOperationException(); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java index bb166af5501b3..4e858fb5cdefd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.telemetry; import com.datahub.telemetry.TrackingService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.version.GitVersion; import com.mixpanel.mixpanelapi.MessageBuilder; import com.mixpanel.mixpanelapi.MixpanelAPI; @@ -15,19 +15,21 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class TrackingServiceFactory { @Autowired(required = false) @Qualifier("mixpanelApi") private MixpanelAPI _mixpanelAPI; + @Autowired(required = false) @Qualifier("mixpanelMessageBuilder") private MessageBuilder _mixpanelMessageBuilder; + @Autowired @Qualifier("dataHubSecretService") private SecretService _secretService; + @Autowired @Qualifier("entityService") private EntityService _entityService; @@ -40,7 +42,11 @@ public class TrackingServiceFactory { @ConditionalOnProperty("telemetry.enabledServer") @Scope("singleton") protected TrackingService getInstance() throws Exception { - return new TrackingService(this._mixpanelAPI, this._mixpanelMessageBuilder, this._secretService, - this._entityService, this._gitVersion); + return new TrackingService( + this._mixpanelAPI, + this._mixpanelMessageBuilder, + this._secretService, + this._entityService, + this._gitVersion); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java index 89a7e7dd8d71a..f1b040ed78f86 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java @@ -1,16 +1,18 @@ package com.linkedin.gms.factory.timeline; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.metadata.timeline.eventgenerator.AssertionRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DataProcessInstanceRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DatasetPropertiesChangeEventGenerator; -import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermInfoChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DeprecationChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EditableSchemaMetadataChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EntityKeyChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.GlobalTagsChangeEventGenerator; +import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermInfoChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermsChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.InstitutionalMemoryChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.OwnershipChangeEventGenerator; @@ -25,35 +27,38 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static com.linkedin.metadata.Constants.*; - - @Configuration public class EntityChangeEventGeneratorRegistryFactory { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "entityChangeEventGeneratorRegistry") @DependsOn({"restliEntityClient", "systemAuthentication"}) @Singleton @Nonnull - protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry() { - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry + entityChangeEventGeneratorRegistry() { + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final Authentication systemAuthentication = applicationContext.getBean(Authentication.class); - final com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry registry = - new com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry(); + final com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry + registry = + new com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry(); registry.register(SCHEMA_METADATA_ASPECT_NAME, new SchemaMetadataChangeEventGenerator()); - registry.register(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataChangeEventGenerator()); + registry.register( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataChangeEventGenerator()); registry.register(GLOBAL_TAGS_ASPECT_NAME, new GlobalTagsChangeEventGenerator()); registry.register(GLOSSARY_TERMS_ASPECT_NAME, new GlossaryTermsChangeEventGenerator()); registry.register(OWNERSHIP_ASPECT_NAME, new OwnershipChangeEventGenerator()); - registry.register(INSTITUTIONAL_MEMORY_ASPECT_NAME, new InstitutionalMemoryChangeEventGenerator()); + registry.register( + INSTITUTIONAL_MEMORY_ASPECT_NAME, new InstitutionalMemoryChangeEventGenerator()); registry.register(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesChangeEventGenerator()); registry.register(GLOSSARY_TERM_INFO_ASPECT_NAME, new GlossaryTermInfoChangeEventGenerator()); registry.register(DOMAINS_ASPECT_NAME, new SingleDomainChangeEventGenerator()); registry.register(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesChangeEventGenerator()); - registry.register(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, new EditableDatasetPropertiesChangeEventGenerator()); + registry.register( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + new EditableDatasetPropertiesChangeEventGenerator()); // Entity Lifecycle Differs registry.register(DATASET_KEY_ASPECT_NAME, new EntityKeyChangeEventGenerator<>()); @@ -73,7 +78,8 @@ protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGenerat registry.register(ASSERTION_RUN_EVENT_ASPECT_NAME, new AssertionRunEventChangeEventGenerator()); // Data Process Instance differs - registry.register(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + registry.register( + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, new DataProcessInstanceRunEventChangeEventGenerator(entityClient)); // TODO: Add ML models. diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java index baa22d401387f..bc121da4e43dd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java @@ -1,19 +1,17 @@ package com.linkedin.gms.factory.timeline; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeline.TimelineServiceImpl; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.PropertySource; -import javax.annotation.Nonnull; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class TimelineServiceFactory { @@ -21,7 +19,8 @@ public class TimelineServiceFactory { @Bean(name = "timelineService") @DependsOn({"entityAspectDao", "entityService", "entityRegistry"}) @Nonnull - protected TimelineService timelineService(@Qualifier("entityAspectDao") AspectDao aspectDao, EntityRegistry entityRegistry) { + protected TimelineService timelineService( + @Qualifier("entityAspectDao") AspectDao aspectDao, EntityRegistry entityRegistry) { return new TimelineServiceImpl(aspectDao, entityRegistry); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java index e3cc772f21c40..bba82bb5d0569 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java @@ -2,8 +2,8 @@ import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.timeseries.elastic.ElasticSearchTimeseriesAspectService; import com.linkedin.metadata.timeseries.elastic.indexbuilder.TimeseriesAspectIndexBuilders; import javax.annotation.Nonnull; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class, EntityRegistryFactory.class}) @@ -30,8 +29,13 @@ public class ElasticSearchTimeseriesAspectServiceFactory { @Bean(name = "elasticSearchTimeseriesAspectService") @Nonnull protected ElasticSearchTimeseriesAspectService getInstance() { - return new ElasticSearchTimeseriesAspectService(components.getSearchClient(), components.getIndexConvention(), - new TimeseriesAspectIndexBuilders(components.getIndexBuilder(), entityRegistry, - components.getIndexConvention()), entityRegistry, components.getBulkProcessor(), components.getNumRetries()); + return new ElasticSearchTimeseriesAspectService( + components.getSearchClient(), + components.getIndexConvention(), + new TimeseriesAspectIndexBuilders( + components.getIndexBuilder(), entityRegistry, components.getIndexConvention()), + entityRegistry, + components.getBulkProcessor(), + components.getNumRetries()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java index 76090770ace11..7d4afa661aba0 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchTimeseriesAspectServiceFactory.class}) public class TimeseriesAspectServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java index d2bd89de8767a..03e066a912e44 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java @@ -2,12 +2,14 @@ import com.datahub.authentication.Authentication; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.restli.DefaultRestliClientFactory; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.Client; import com.linkedin.usage.UsageClient; +import java.util.HashMap; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -15,10 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; -import java.util.HashMap; -import java.util.Map; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class UsageClientFactory { @@ -49,13 +47,19 @@ public class UsageClientFactory { private ConfigurationProvider configurationProvider; @Bean("usageClient") - public UsageClient getUsageClient(@Qualifier("systemAuthentication") final Authentication systemAuthentication) { + public UsageClient getUsageClient( + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { Map<String, String> params = new HashMap<>(); params.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, String.valueOf(timeoutMs)); - Client restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); - return new UsageClient(restClient, new ExponentialBackoff(retryInterval), numRetries, systemAuthentication, - configurationProvider.getCache().getClient().getUsageClient()); + Client restClient = + DefaultRestliClientFactory.getRestLiClient( + gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); + return new UsageClient( + restClient, + new ExponentialBackoff(retryInterval), + numRetries, + systemAuthentication, + configurationProvider.getCache().getClient().getUsageClient()); } } - diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java index 811ea84bc7240..2d1b79fdace48 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java @@ -7,10 +7,7 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; - -/** - * Responsible for coordinating boot-time logic. - */ +/** Responsible for coordinating boot-time logic. */ @Slf4j @Component public class BootstrapManager { @@ -30,22 +27,39 @@ public void start() { for (int i = 0; i < stepsToExecute.size(); i++) { final BootstrapStep step = stepsToExecute.get(i); if (step.getExecutionMode() == BootstrapStep.ExecutionMode.BLOCKING) { - log.info("Executing bootstrap step {}/{} with name {}...", i + 1, stepsToExecute.size(), step.name()); + log.info( + "Executing bootstrap step {}/{} with name {}...", + i + 1, + stepsToExecute.size(), + step.name()); try { step.execute(); } catch (Exception e) { - log.error(String.format("Caught exception while executing bootstrap step %s. Exiting...", step.name()), e); + log.error( + String.format( + "Caught exception while executing bootstrap step %s. Exiting...", step.name()), + e); System.exit(1); } } else { // Async - log.info("Starting asynchronous bootstrap step {}/{} with name {}...", i + 1, stepsToExecute.size(), step.name()); - CompletableFuture.runAsync(() -> { - try { - step.execute(); - } catch (Exception e) { - log.error(String.format("Caught exception while executing bootstrap step %s. Continuing...", step.name()), e); - } - }, _asyncExecutor); + log.info( + "Starting asynchronous bootstrap step {}/{} with name {}...", + i + 1, + stepsToExecute.size(), + step.name()); + CompletableFuture.runAsync( + () -> { + try { + step.execute(); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while executing bootstrap step %s. Continuing...", + step.name()), + e); + } + }, + _asyncExecutor); } } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java index 876a0871fa4cb..dc82fc4907edc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java @@ -10,29 +10,19 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.upgrade.DataHubUpgradeResult; - -import javax.annotation.Nonnull; import java.net.URISyntaxException; +import javax.annotation.Nonnull; - -/** - * A single step in the Bootstrap process. - */ +/** A single step in the Bootstrap process. */ public interface BootstrapStep { - /** - * A human-readable name for the boot step. - */ + /** A human-readable name for the boot step. */ String name(); - /** - * Execute a boot-time step, or throw an exception on failure. - */ + /** Execute a boot-time step, or throw an exception on failure. */ void execute() throws Exception; - /** - * Return the execution mode of this step - */ + /** Return the execution mode of this step */ @Nonnull default ExecutionMode getExecutionMode() { return ExecutionMode.BLOCKING; @@ -46,16 +36,17 @@ enum ExecutionMode { } static Urn getUpgradeUrn(String upgradeId) { - return EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(upgradeId), - Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + return EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } static void setUpgradeResult(Urn urn, EntityService entityService) throws URISyntaxException { - final AuditStamp auditStamp = new AuditStamp() + final AuditStamp auditStamp = + new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) .setTime(System.currentTimeMillis()); - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult() - .setTimestampMs(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(urn); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java index 032b934a7ba87..801a902b7f835 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java @@ -16,24 +16,27 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.EventListener; import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; -import org.springframework.context.annotation.Configuration; - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Configuration @Slf4j @Component public class OnBootApplicationListener { - private static final Set<Integer> ACCEPTED_HTTP_CODES = Set.of(HttpStatus.SC_OK, HttpStatus.SC_MOVED_PERMANENTLY, - HttpStatus.SC_MOVED_TEMPORARILY, HttpStatus.SC_FORBIDDEN, HttpStatus.SC_UNAUTHORIZED); + private static final Set<Integer> ACCEPTED_HTTP_CODES = + Set.of( + HttpStatus.SC_OK, + HttpStatus.SC_MOVED_PERMANENTLY, + HttpStatus.SC_MOVED_TEMPORARILY, + HttpStatus.SC_FORBIDDEN, + HttpStatus.SC_UNAUTHORIZED); - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final CloseableHttpClient httpClient = HttpClients.createDefault(); @@ -52,8 +55,10 @@ public class OnBootApplicationListener { @EventListener(ContextRefreshedEvent.class) public void onApplicationEvent(@Nonnull ContextRefreshedEvent event) { - log.warn("OnBootApplicationListener context refreshed! {} event: {}", - ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId()), event); + log.warn( + "OnBootApplicationListener context refreshed! {} event: {}", + ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId()), + event); String schemaRegistryType = provider.getKafka().getSchemaRegistry().getType(); if (ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId())) { if (InternalSchemaRegistryFactory.TYPE.equals(schemaRegistryType)) { @@ -66,29 +71,31 @@ public void onApplicationEvent(@Nonnull ContextRefreshedEvent event) { public Runnable isSchemaRegistryAPIServletReady() { return () -> { - final HttpGet request = new HttpGet(provider.getKafka().getSchemaRegistry().getUrl()); - int timeouts = _servletsWaitTimeout; - boolean openAPIServeletReady = false; - while (!openAPIServeletReady && timeouts > 0) { - try { - log.info("Sleeping for 1 second"); - Thread.sleep(1000); - StatusLine statusLine = httpClient.execute(request).getStatusLine(); - if (ACCEPTED_HTTP_CODES.contains(statusLine.getStatusCode())) { - log.info("Connected! Authentication not tested."); - openAPIServeletReady = true; - } - } catch (IOException | InterruptedException e) { - log.info("Failed to connect to open servlet: {}", e.getMessage()); + final HttpGet request = new HttpGet(provider.getKafka().getSchemaRegistry().getUrl()); + int timeouts = _servletsWaitTimeout; + boolean openAPIServeletReady = false; + while (!openAPIServeletReady && timeouts > 0) { + try { + log.info("Sleeping for 1 second"); + Thread.sleep(1000); + StatusLine statusLine = httpClient.execute(request).getStatusLine(); + if (ACCEPTED_HTTP_CODES.contains(statusLine.getStatusCode())) { + log.info("Connected! Authentication not tested."); + openAPIServeletReady = true; } - timeouts--; + } catch (IOException | InterruptedException e) { + log.info("Failed to connect to open servlet: {}", e.getMessage()); } - if (!openAPIServeletReady) { - log.error("Failed to bootstrap DataHub, OpenAPI servlet was not ready after {} seconds", timeouts); - System.exit(1); - } else { + timeouts--; + } + if (!openAPIServeletReady) { + log.error( + "Failed to bootstrap DataHub, OpenAPI servlet was not ready after {} seconds", + timeouts); + System.exit(1); + } else { _bootstrapManager.start(); - } + } }; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java index dbbcf3a139bf1..9ccb2c3f650bd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java @@ -17,7 +17,6 @@ import java.util.Collections; import lombok.extern.slf4j.Slf4j; - @Slf4j public abstract class UpgradeStep implements BootstrapStep { @@ -30,8 +29,9 @@ public UpgradeStep(EntityService entityService, String version, String upgradeId this._entityService = entityService; this._version = version; this._upgradeId = upgradeId; - this._upgradeUrn = EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(upgradeId), - Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + this._upgradeUrn = + EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } @Override @@ -47,7 +47,8 @@ public void execute() throws Exception { upgrade(); ingestUpgradeResultAspect(); } catch (Exception e) { - String errorMessage = String.format("Error when running %s for version %s", _upgradeId, _version); + String errorMessage = + String.format("Error when running %s for version %s", _upgradeId, _version); cleanUpgradeAfterError(e, errorMessage); throw new RuntimeException(errorMessage, e); } @@ -62,18 +63,29 @@ public String name() { private boolean hasUpgradeRan() { try { - EntityResponse response = _entityService.getEntityV2(Constants.DATA_HUB_UPGRADE_ENTITY_NAME, _upgradeUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); - - if (response != null && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { - DataMap dataMap = response.getAspects().get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME).getValue().data(); + EntityResponse response = + _entityService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + _upgradeUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); + + if (response != null + && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { + DataMap dataMap = + response + .getAspects() + .get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) + .getValue() + .data(); DataHubUpgradeRequest request = new DataHubUpgradeRequest(dataMap); if (request.hasVersion() && request.getVersion().equals(_version)) { return true; } } } catch (Exception e) { - log.error("Error when checking to see if datahubUpgrade entity exists. Commencing with upgrade...", e); + log.error( + "Error when checking to see if datahubUpgrade entity exists. Commencing with upgrade...", + e); return false; } return false; @@ -81,7 +93,9 @@ private boolean hasUpgradeRan() { private void ingestUpgradeRequestAspect() throws URISyntaxException { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final DataHubUpgradeRequest upgradeRequest = new DataHubUpgradeRequest().setTimestampMs(System.currentTimeMillis()).setVersion(_version); @@ -97,8 +111,11 @@ private void ingestUpgradeRequestAspect() throws URISyntaxException { private void ingestUpgradeResultAspect() throws URISyntaxException { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(_upgradeUrn); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java index 4aed7791470da..8b0c72c4c91d5 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.boot.dependencies; -/** - * Empty interface for passing named bean references to bootstrap steps - */ +/** Empty interface for passing named bean references to bootstrap steps */ public interface BootstrapDependency { /** * Execute any dependent methods, avoids increasing module dependencies + * * @return true if the dependency has successfully executed its expected methods, false otherwise */ boolean waitForBootstrap(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java index c4e6c941303c8..70fa91ae61861 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java @@ -31,7 +31,6 @@ import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.transformer.SearchDocumentTransformer; - import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; @@ -44,10 +43,13 @@ import org.springframework.context.annotation.Scope; import org.springframework.core.io.Resource; - @Configuration -@Import({EntityServiceFactory.class, EntityRegistryFactory.class, EntitySearchServiceFactory.class, - SearchDocumentTransformerFactory.class}) +@Import({ + EntityServiceFactory.class, + EntityRegistryFactory.class, + EntitySearchServiceFactory.class, + SearchDocumentTransformerFactory.class +}) public class BootstrapManagerFactory { @Autowired @@ -82,8 +84,7 @@ public class BootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -103,9 +104,15 @@ public class BootstrapManagerFactory { protected BootstrapManager createInstance() { final IngestRootUserStep ingestRootUserStep = new IngestRootUserStep(_entityService); final IngestPoliciesStep ingestPoliciesStep = - new IngestPoliciesStep(_entityRegistry, _entityService, _entitySearchService, _searchDocumentTransformer, _policiesResource); + new IngestPoliciesStep( + _entityRegistry, + _entityService, + _entitySearchService, + _searchDocumentTransformer, + _policiesResource); final IngestRolesStep ingestRolesStep = new IngestRolesStep(_entityService, _entityRegistry); - final IngestDataPlatformsStep ingestDataPlatformsStep = new IngestDataPlatformsStep(_entityService); + final IngestDataPlatformsStep ingestDataPlatformsStep = + new IngestDataPlatformsStep(_entityService); final IngestDataPlatformInstancesStep ingestDataPlatformInstancesStep = new IngestDataPlatformInstancesStep(_entityService, _migrationsDao); final RestoreGlossaryIndices restoreGlossaryIndicesStep = @@ -114,28 +121,34 @@ protected BootstrapManager createInstance() { new IndexDataPlatformsStep(_entityService, _entitySearchService, _entityRegistry); final RestoreDbtSiblingsIndices restoreDbtSiblingsIndices = new RestoreDbtSiblingsIndices(_entityService, _entityRegistry); - final RemoveClientIdAspectStep removeClientIdAspectStep = new RemoveClientIdAspectStep(_entityService); - final RestoreColumnLineageIndices restoreColumnLineageIndices = new RestoreColumnLineageIndices(_entityService, _entityRegistry); - final IngestDefaultGlobalSettingsStep ingestSettingsStep = new IngestDefaultGlobalSettingsStep(_entityService); - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); - final IngestOwnershipTypesStep ingestOwnershipTypesStep = new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); - - final List<BootstrapStep> finalSteps = new ArrayList<>(ImmutableList.of( - waitForSystemUpdateStep, - ingestRootUserStep, - ingestPoliciesStep, - ingestRolesStep, - ingestDataPlatformsStep, - ingestDataPlatformInstancesStep, - _ingestRetentionPoliciesStep, - ingestOwnershipTypesStep, - ingestSettingsStep, - restoreGlossaryIndicesStep, - removeClientIdAspectStep, - restoreDbtSiblingsIndices, - indexDataPlatformsStep, - restoreColumnLineageIndices)); + final RemoveClientIdAspectStep removeClientIdAspectStep = + new RemoveClientIdAspectStep(_entityService); + final RestoreColumnLineageIndices restoreColumnLineageIndices = + new RestoreColumnLineageIndices(_entityService, _entityRegistry); + final IngestDefaultGlobalSettingsStep ingestSettingsStep = + new IngestDefaultGlobalSettingsStep(_entityService); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); + final IngestOwnershipTypesStep ingestOwnershipTypesStep = + new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); + + final List<BootstrapStep> finalSteps = + new ArrayList<>( + ImmutableList.of( + waitForSystemUpdateStep, + ingestRootUserStep, + ingestPoliciesStep, + ingestRolesStep, + ingestDataPlatformsStep, + ingestDataPlatformInstancesStep, + _ingestRetentionPoliciesStep, + ingestOwnershipTypesStep, + ingestSettingsStep, + restoreGlossaryIndicesStep, + removeClientIdAspectStep, + restoreDbtSiblingsIndices, + indexDataPlatformsStep, + restoreColumnLineageIndices)); if (_upgradeDefaultBrowsePathsEnabled) { finalSteps.add(new UpgradeDefaultBrowsePathsStep(_entityService)); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java index e038cb230c458..2436938c6c026 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java @@ -1,10 +1,10 @@ package com.linkedin.metadata.boot.factories; import com.linkedin.gms.factory.entity.RetentionServiceFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.boot.steps.IngestRetentionPoliciesStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @Import({RetentionServiceFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -42,6 +41,11 @@ public class IngestRetentionPoliciesStepFactory { @Scope("singleton") @Nonnull protected IngestRetentionPoliciesStep createInstance() { - return new IngestRetentionPoliciesStep(_retentionService, _entityService, _enableRetention, _applyOnBootstrap, _pluginRegistryPath); + return new IngestRetentionPoliciesStep( + _retentionService, + _entityService, + _enableRetention, + _applyOnBootstrap, + _pluginRegistryPath); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java index 11d12072e12b7..263cc335a8a40 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java @@ -7,11 +7,9 @@ import com.linkedin.metadata.version.GitVersion; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; import com.linkedin.mxe.Topics; - import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.Consumer; @@ -27,7 +25,8 @@ import org.springframework.kafka.listener.MessageListenerContainer; import org.springframework.stereotype.Component; -// We don't disable this on GMS since we want GMS to also wait until the system is ready to read in case of +// We don't disable this on GMS since we want GMS to also wait until the system is ready to read in +// case of // backwards incompatible query logic dependent on system updates. @Component("dataHubUpgradeKafkaListener") @Slf4j @@ -36,14 +35,17 @@ public class DataHubUpgradeKafkaListener implements ConsumerSeekAware, Bootstrap private final KafkaListenerEndpointRegistry registry; - private static final String CONSUMER_GROUP = "${DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID:generic-duhe-consumer-job-client}"; + private static final String CONSUMER_GROUP = + "${DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID:generic-duhe-consumer-job-client}"; private static final String SUFFIX = "temp"; - public static final String TOPIC_NAME = "${DATAHUB_UPGRADE_HISTORY_TOPIC_NAME:" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME + "}"; + public static final String TOPIC_NAME = + "${DATAHUB_UPGRADE_HISTORY_TOPIC_NAME:" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME + "}"; private final DefaultKafkaConsumerFactory<String, GenericRecord> _defaultKafkaConsumerFactory; @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") private String revision; + private final GitVersion _gitVersion; private final ConfigurationProvider _configurationProvider; @@ -53,35 +55,48 @@ public class DataHubUpgradeKafkaListener implements ConsumerSeekAware, Bootstrap @Value(TOPIC_NAME) private String topicName; - private final static AtomicBoolean IS_UPDATED = new AtomicBoolean(false); + private static final AtomicBoolean IS_UPDATED = new AtomicBoolean(false); - public DataHubUpgradeKafkaListener(KafkaListenerEndpointRegistry registry, - @Qualifier("duheKafkaConsumerFactory") DefaultKafkaConsumerFactory<String, GenericRecord> defaultKafkaConsumerFactory, - GitVersion gitVersion, - ConfigurationProvider configurationProvider) { + public DataHubUpgradeKafkaListener( + KafkaListenerEndpointRegistry registry, + @Qualifier("duheKafkaConsumerFactory") + DefaultKafkaConsumerFactory<String, GenericRecord> defaultKafkaConsumerFactory, + GitVersion gitVersion, + ConfigurationProvider configurationProvider) { this.registry = registry; this._defaultKafkaConsumerFactory = defaultKafkaConsumerFactory; this._gitVersion = gitVersion; this._configurationProvider = configurationProvider; } - // Constructs a consumer to read determine final offset to assign, prevents re-reading whole topic to get the latest version + // Constructs a consumer to read determine final offset to assign, prevents re-reading whole topic + // to get the latest version @Override - public void onPartitionsAssigned(Map<TopicPartition, Long> assignments, ConsumerSeekCallback callback) { + public void onPartitionsAssigned( + Map<TopicPartition, Long> assignments, ConsumerSeekCallback callback) { try (Consumer<String, GenericRecord> kafkaConsumer = _defaultKafkaConsumerFactory.createConsumer(consumerGroup, SUFFIX)) { final Map<TopicPartition, Long> offsetMap = kafkaConsumer.endOffsets(assignments.keySet()); assignments.entrySet().stream() .filter(entry -> topicName.equals(entry.getKey().topic())) - .forEach(entry -> { - log.info("Partition: {} Current Offset: {}", entry.getKey(), offsetMap.get(entry.getKey())); - long newOffset = offsetMap.get(entry.getKey()) - 1; - callback.seek(entry.getKey().topic(), entry.getKey().partition(), Math.max(0, newOffset)); - }); + .forEach( + entry -> { + log.info( + "Partition: {} Current Offset: {}", + entry.getKey(), + offsetMap.get(entry.getKey())); + long newOffset = offsetMap.get(entry.getKey()) - 1; + callback.seek( + entry.getKey().topic(), entry.getKey().partition(), Math.max(0, newOffset)); + }); } } - @KafkaListener(id = CONSUMER_GROUP, topics = {TOPIC_NAME}, containerFactory = "duheKafkaEventConsumer", concurrency = "1") + @KafkaListener( + id = CONSUMER_GROUP, + topics = {TOPIC_NAME}, + containerFactory = "duheKafkaEventConsumer", + concurrency = "1") public void checkSystemVersion(final ConsumerRecord<String, GenericRecord> consumerRecord) { final GenericRecord record = consumerRecord.value(); final String expectedVersion = String.format("%s-%s", _gitVersion.getVersion(), revision); @@ -96,7 +111,9 @@ public void checkSystemVersion(final ConsumerRecord<String, GenericRecord> consu log.warn("Wait for system update is disabled. Proceeding with startup."); IS_UPDATED.getAndSet(true); } else { - log.warn("System version is not up to date: {}. Waiting for datahub-upgrade to complete...", expectedVersion); + log.warn( + "System version is not up to date: {}. Waiting for datahub-upgrade to complete...", + expectedVersion); } } catch (Exception e) { @@ -113,15 +130,19 @@ public void waitForUpdate() { IS_UPDATED.getAndSet(true); } int maxBackOffs = Integer.parseInt(_configurationProvider.getSystemUpdate().getMaxBackOffs()); - long initialBackOffMs = Long.parseLong(_configurationProvider.getSystemUpdate().getInitialBackOffMs()); - int backOffFactor = Integer.parseInt(_configurationProvider.getSystemUpdate().getBackOffFactor()); + long initialBackOffMs = + Long.parseLong(_configurationProvider.getSystemUpdate().getInitialBackOffMs()); + int backOffFactor = + Integer.parseInt(_configurationProvider.getSystemUpdate().getBackOffFactor()); long backOffMs = initialBackOffMs; for (int i = 0; i < maxBackOffs; i++) { if (IS_UPDATED.get()) { log.debug("Finished waiting for updated indices."); try { - log.info("Containers: {}", registry.getListenerContainers().stream() + log.info( + "Containers: {}", + registry.getListenerContainers().stream() .map(MessageListenerContainer::getListenerId) .collect(Collectors.toList())); registry.getListenerContainer(consumerGroup).stop(); @@ -142,8 +163,9 @@ public void waitForUpdate() { if (!IS_UPDATED.get()) { - throw new IllegalStateException("Indices are not updated after exponential backoff." - + " Please try restarting and consider increasing back off settings."); + throw new IllegalStateException( + "Indices are not updated after exponential backoff." + + " Please try restarting and consider increasing back off settings."); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java index 408b212d52f48..e631f776abd08 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.kafka; +import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; +import static com.linkedin.metadata.boot.kafka.MockDUHESerializer.topicToSubjectName; + import com.linkedin.metadata.EventUtils; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; @@ -7,61 +10,55 @@ import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import io.confluent.kafka.serializers.KafkaAvroDeserializer; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.util.Map; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; -import static com.linkedin.metadata.boot.kafka.MockDUHESerializer.topicToSubjectName; - -/** - * Used for early bootstrap to avoid contact with not yet existing schema registry - */ +/** Used for early bootstrap to avoid contact with not yet existing schema registry */ @Slf4j public class MockDUHEDeserializer extends KafkaAvroDeserializer { - private String topicName; + private String topicName; - public MockDUHEDeserializer() { - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer() { + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - public MockDUHEDeserializer(SchemaRegistryClient client) { - super(client); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer(SchemaRegistryClient client) { + super(client); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - public MockDUHEDeserializer(SchemaRegistryClient client, Map<String, ?> props) { - super(client, props); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer(SchemaRegistryClient client, Map<String, ?> props) { + super(client, props); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - @Override - public void configure(Map<String, ?> configs, boolean isKey) { - super.configure(configs, isKey); - topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); - } + @Override + public void configure(Map<String, ?> configs, boolean isKey) { + super.configure(configs, isKey); + topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); + } - private MockSchemaRegistryClient buildMockSchemaRegistryClient() { - MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient2(); - try { - schemaRegistry.register(topicToSubjectName(topicName), - new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); - return schemaRegistry; - } catch (IOException | RestClientException e) { - throw new RuntimeException(e); - } + private MockSchemaRegistryClient buildMockSchemaRegistryClient() { + MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient2(); + try { + schemaRegistry.register( + topicToSubjectName(topicName), new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); + return schemaRegistry; + } catch (IOException | RestClientException e) { + throw new RuntimeException(e); } + } - public static class MockSchemaRegistryClient2 extends MockSchemaRegistryClient { - /** - * Previously used topics can have schema ids > 1 which fully match - * however we are replacing that registry so force schema id to 1 - */ - @Override - public synchronized ParsedSchema getSchemaById(int id) throws IOException, RestClientException { - return super.getSchemaById(1); - } + public static class MockSchemaRegistryClient2 extends MockSchemaRegistryClient { + /** + * Previously used topics can have schema ids > 1 which fully match however we are replacing + * that registry so force schema id to 1 + */ + @Override + public synchronized ParsedSchema getSchemaById(int id) throws IOException, RestClientException { + return super.getSchemaById(1); } + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java index 1421f952289b3..36fe514d5536f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java @@ -1,60 +1,57 @@ package com.linkedin.metadata.boot.kafka; +import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; + import com.linkedin.metadata.EventUtils; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import io.confluent.kafka.serializers.KafkaAvroSerializer; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.util.Map; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; - -/** - * Used for early bootstrap to avoid contact with not yet existing schema registry - */ +/** Used for early bootstrap to avoid contact with not yet existing schema registry */ @Slf4j public class MockDUHESerializer extends KafkaAvroSerializer { - private static final String DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX = "-value"; - - private String topicName; - - public MockDUHESerializer() { - this.schemaRegistry = buildMockSchemaRegistryClient(); + private static final String DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX = "-value"; + + private String topicName; + + public MockDUHESerializer() { + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + public MockDUHESerializer(SchemaRegistryClient client) { + super(client); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + public MockDUHESerializer(SchemaRegistryClient client, Map<String, ?> props) { + super(client, props); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + @Override + public void configure(Map<String, ?> configs, boolean isKey) { + super.configure(configs, isKey); + topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); + } + + private MockSchemaRegistryClient buildMockSchemaRegistryClient() { + MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient(); + try { + schemaRegistry.register( + topicToSubjectName(topicName), new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); + return schemaRegistry; + } catch (IOException | RestClientException e) { + throw new RuntimeException(e); } + } - public MockDUHESerializer(SchemaRegistryClient client) { - super(client); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - public MockDUHESerializer(SchemaRegistryClient client, Map<String, ?> props) { - super(client, props); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - @Override - public void configure(Map<String, ?> configs, boolean isKey) { - super.configure(configs, isKey); - topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); - } - - private MockSchemaRegistryClient buildMockSchemaRegistryClient() { - MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient(); - try { - schemaRegistry.register(topicToSubjectName(topicName), - new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); - return schemaRegistry; - } catch (IOException | RestClientException e) { - throw new RuntimeException(e); - } - } - - public static String topicToSubjectName(String topicName) { - return topicName + DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX; - } + public static String topicToSubjectName(String topicName) { + return topicName + DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX; + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java index ea9ac57778550..770c0d2840fe8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; @@ -21,28 +23,24 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class BackfillBrowsePathsV2Step extends UpgradeStep { - private static final Set<String> ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); + private static final Set<String> ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); private static final String VERSION = "2"; private static final String UPGRADE_ID = "backfill-default-browse-paths-v2-step"; private static final Integer BATCH_SIZE = 5000; @@ -63,14 +61,18 @@ public ExecutionMode getExecutionMode() { @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); String scrollId = null; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s of browse paths for entity type %s", - migratedCount, migratedCount + BATCH_SIZE, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); migratedCount += BATCH_SIZE; } while (scrollId != null); @@ -78,7 +80,7 @@ public void upgrade() throws Exception { } private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, String scrollId) - throws Exception { + throws Exception { // Condition: has `browsePaths` AND does NOT have `browsePathV2` Criterion missingBrowsePathV2 = new Criterion(); @@ -102,16 +104,9 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S Filter filter = new Filter(); filter.setOr(conjunctiveCriterionArray); - final ScrollResult scrollResult = _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - "5m", - BATCH_SIZE, - null - ); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + ImmutableList.of(entityType), "*", filter, null, scrollId, "5m", BATCH_SIZE, null); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -121,7 +116,11 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S ingestBrowsePathsV2(searchEntity.getEntity(), auditStamp); } catch (Exception e) { // don't stop the whole step because of one bad urn or one bad ingestion - log.error(String.format("Error ingesting default browsePathsV2 aspect for urn %s", searchEntity.getEntity()), e); + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); } } @@ -136,12 +135,9 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal( - proposal, - auditStamp, - false - ); + _entityService.ingestProposal(proposal, auditStamp, false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java index b26eb67465c0d..c46cfdd61158d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.search.EntitySearchService; - import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; @@ -25,7 +24,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class IndexDataPlatformsStep extends UpgradeStep { private static final String VERSION = "1"; @@ -35,7 +33,9 @@ public class IndexDataPlatformsStep extends UpgradeStep { private final EntitySearchService _entitySearchService; private final EntityRegistry _entityRegistry; - public IndexDataPlatformsStep(EntityService entityService, EntitySearchService entitySearchService, + public IndexDataPlatformsStep( + EntityService entityService, + EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entitySearchService = entitySearchService; @@ -44,11 +44,15 @@ public IndexDataPlatformsStep(EntityService entityService, EntitySearchService e @Override public void upgrade() throws Exception { - final AspectSpec dataPlatformSpec = _entityRegistry.getEntitySpec(Constants.DATA_PLATFORM_ENTITY_NAME) - .getAspectSpec(Constants.DATA_PLATFORM_INFO_ASPECT_NAME); + final AspectSpec dataPlatformSpec = + _entityRegistry + .getEntitySpec(Constants.DATA_PLATFORM_ENTITY_NAME) + .getAspectSpec(Constants.DATA_PLATFORM_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); getAndReIndexDataPlatforms(auditStamp, dataPlatformSpec); @@ -61,8 +65,8 @@ public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPlatformInfoAspectSpec) - throws Exception { + private int getAndReIndexDataPlatforms( + AuditStamp auditStamp, AspectSpec dataPlatformInfoAspectSpec) throws Exception { ListUrnsResult listResult = _entityService.listUrns(Constants.DATA_PLATFORM_ENTITY_NAME, 0, BATCH_SIZE); @@ -73,9 +77,10 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla } final Map<Urn, EntityResponse> dataPlatformInfoResponses = - _entityService.getEntitiesV2(Constants.DATA_PLATFORM_ENTITY_NAME, new HashSet<>(dataPlatformUrns), - Collections.singleton(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) - ); + _entityService.getEntitiesV2( + Constants.DATA_PLATFORM_ENTITY_NAME, + new HashSet<>(dataPlatformUrns), + Collections.singleton(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)); // Loop over Data platforms and produce changelog List<Future<?>> futures = new LinkedList<>(); @@ -92,26 +97,32 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - dpUrn, - Constants.DATA_PLATFORM_ENTITY_NAME, - Constants.DATA_PLATFORM_INFO_ASPECT_NAME, - dataPlatformInfoAspectSpec, - null, - dpInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + dpUrn, + Constants.DATA_PLATFORM_ENTITY_NAME, + Constants.DATA_PLATFORM_INFO_ASPECT_NAME, + dataPlatformInfoAspectSpec, + null, + dpInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return listResult.getTotal(); } @@ -122,6 +133,7 @@ private DataPlatformInfo mapDpInfo(EntityResponse entityResponse) { return null; } - return new DataPlatformInfo(aspectMap.get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); + return new DataPlatformInfo( + aspectMap.get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java index 30608e984a0f2..ae4baee37c822 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; @@ -13,16 +15,12 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.utils.DataPlatformInstanceUtils; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.LinkedList; import java.util.List; import java.util.Optional; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -62,7 +60,9 @@ public void execute() throws Exception { int start = 0; while (start < numEntities) { - log.info("Reading urns {} to {} from the aspects table to generate dataplatform instance aspects", start, + log.info( + "Reading urns {} to {} from the aspects table to generate dataplatform instance aspects", + start, start + BATCH_SIZE); List<UpsertBatchItem> items = new LinkedList<>(); @@ -71,7 +71,8 @@ public void execute() throws Exception { Urn urn = Urn.createFromString(urnStr); Optional<DataPlatformInstance> dataPlatformInstance = getDataPlatformInstance(urn); if (dataPlatformInstance.isPresent()) { - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(DATA_PLATFORM_INSTANCE_ASPECT_NAME) .aspect(dataPlatformInstance.get()) @@ -80,10 +81,14 @@ public void execute() throws Exception { } final AuditStamp aspectAuditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - _entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), aspectAuditStamp, true, true); - - log.info("Finished ingesting DataPlatformInstance for urn {} to {}", start, start + BATCH_SIZE); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + _entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), aspectAuditStamp, true, true); + + log.info( + "Finished ingesting DataPlatformInstance for urn {} to {}", start, start + BATCH_SIZE); start += BATCH_SIZE; } log.info("Finished ingesting DataPlatformInstance for all entities"); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java index e4ad215eec864..db8cad65caa8a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -10,6 +12,8 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; @@ -17,16 +21,10 @@ import java.util.Spliterators; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestDataPlatformsStep implements BootstrapStep { @@ -44,45 +42,60 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 1. Read from the file into JSON. - final JsonNode dataPlatforms = mapper.readTree(new ClassPathResource("./boot/data_platforms.json").getFile()); + final JsonNode dataPlatforms = + mapper.readTree(new ClassPathResource("./boot/data_platforms.json").getFile()); if (!dataPlatforms.isArray()) { - throw new RuntimeException(String.format("Found malformed data platforms file, expected an Array but found %s", - dataPlatforms.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed data platforms file, expected an Array but found %s", + dataPlatforms.getNodeType())); } // 2. For each JSON object, cast into a DataPlatformSnapshot object. - List<UpsertBatchItem> dataPlatformAspects = StreamSupport.stream( - Spliterators.spliteratorUnknownSize(dataPlatforms.iterator(), Spliterator.ORDERED), false) - .map(dataPlatform -> { - final String urnString; - final Urn urn; - try { - urnString = dataPlatform.get("urn").asText(); - urn = Urn.createFromString(urnString); - } catch (URISyntaxException e) { - log.error("Malformed urn: {}", dataPlatform.get("urn").asText()); - throw new RuntimeException("Malformed urn", e); - } - - final DataPlatformInfo info = - RecordUtils.toRecordTemplate(DataPlatformInfo.class, dataPlatform.get("aspect").toString()); - - return UpsertBatchItem.builder() + List<UpsertBatchItem> dataPlatformAspects = + StreamSupport.stream( + Spliterators.spliteratorUnknownSize(dataPlatforms.iterator(), Spliterator.ORDERED), + false) + .map( + dataPlatform -> { + final String urnString; + final Urn urn; + try { + urnString = dataPlatform.get("urn").asText(); + urn = Urn.createFromString(urnString); + } catch (URISyntaxException e) { + log.error("Malformed urn: {}", dataPlatform.get("urn").asText()); + throw new RuntimeException("Malformed urn", e); + } + + final DataPlatformInfo info = + RecordUtils.toRecordTemplate( + DataPlatformInfo.class, dataPlatform.get("aspect").toString()); + + return UpsertBatchItem.builder() .urn(urn) .aspectName(PLATFORM_ASPECT_NAME) .aspect(info) .build(_entityService.getEntityRegistry()); - }).collect(Collectors.toList()); - - _entityService.ingestAspects(AspectsBatchImpl.builder().items(dataPlatformAspects).build(), - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - true, - false); + }) + .collect(Collectors.toList()); + + _entityService.ingestAspects( + AspectsBatchImpl.builder().items(dataPlatformAspects).build(), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + true, + false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java index 5bc80f46e6478..0b812a6f818f4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -28,14 +30,12 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - /** - * This bootstrap step is responsible for ingesting a default Global Settings object if it does not already exist. + * This bootstrap step is responsible for ingesting a default Global Settings object if it does not + * already exist. * - * If settings already exist, we merge the defaults and the existing settings such that the container will also - * get new settings when they are added. + * <p>If settings already exist, we merge the defaults and the existing settings such that the + * container will also get new settings when they are added. */ @Slf4j public class IngestDefaultGlobalSettingsStep implements BootstrapStep { @@ -49,8 +49,7 @@ public IngestDefaultGlobalSettingsStep(@Nonnull final EntityService entityServic } public IngestDefaultGlobalSettingsStep( - @Nonnull final EntityService entityService, - @Nonnull final String resourcePath) { + @Nonnull final EntityService entityService, @Nonnull final String resourcePath) { _entityService = Objects.requireNonNull(entityService); _resourcePath = Objects.requireNonNull(resourcePath); } @@ -64,9 +63,13 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); log.info("Ingesting default global settings..."); @@ -76,37 +79,45 @@ public void execute() throws IOException, URISyntaxException { defaultSettingsObj = mapper.readTree(new ClassPathResource(_resourcePath).getFile()); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to parse global settings file. Could not parse valid json at resource path %s", - _resourcePath), - e); + String.format( + "Failed to parse global settings file. Could not parse valid json at resource path %s", + _resourcePath), + e); } if (!defaultSettingsObj.isObject()) { - throw new RuntimeException(String.format("Found malformed global settings info file, expected an Object but found %s", - defaultSettingsObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed global settings info file, expected an Object but found %s", + defaultSettingsObj.getNodeType())); } // 2. Bind the global settings json into a GlobalSettingsInfo aspect. GlobalSettingsInfo defaultSettings; - defaultSettings = RecordUtils.toRecordTemplate(GlobalSettingsInfo.class, defaultSettingsObj.toString()); - ValidationResult result = ValidateDataAgainstSchema.validate( - defaultSettings, - new ValidationOptions( - RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, - CoercionMode.NORMAL, - UnrecognizedFieldMode.DISALLOW - )); + defaultSettings = + RecordUtils.toRecordTemplate(GlobalSettingsInfo.class, defaultSettingsObj.toString()); + ValidationResult result = + ValidateDataAgainstSchema.validate( + defaultSettings, + new ValidationOptions( + RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, + CoercionMode.NORMAL, + UnrecognizedFieldMode.DISALLOW)); if (!result.isValid()) { - throw new RuntimeException(String.format( - "Failed to parse global settings file. Provided JSON does not match GlobalSettingsInfo.pdl model. %s", result.getMessages())); + throw new RuntimeException( + String.format( + "Failed to parse global settings file. Provided JSON does not match GlobalSettingsInfo.pdl model. %s", + result.getMessages())); } // 3. Get existing settings or empty settings object final GlobalSettingsInfo existingSettings = getExistingGlobalSettingsOrEmpty(); - // 4. Merge existing settings onto previous settings. Be careful - if we change the settings schema dramatically in future we may need to account for that. - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo(mergeDataMaps(defaultSettings.data(), existingSettings.data())); + // 4. Merge existing settings onto previous settings. Be careful - if we change the settings + // schema dramatically in future we may need to account for that. + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo(mergeDataMaps(defaultSettings.data(), existingSettings.data())); // 5. Ingest into DataHub. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -118,12 +129,15 @@ public void execute() throws IOException, URISyntaxException { _entityService.ingestProposal( proposal, - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), false); } - private GlobalSettingsInfo getExistingGlobalSettingsOrEmpty() { - RecordTemplate aspect = _entityService.getAspect(GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, 0); + private GlobalSettingsInfo getExistingGlobalSettingsOrEmpty() { + RecordTemplate aspect = + _entityService.getAspect(GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, 0); return aspect != null ? (GlobalSettingsInfo) aspect : new GlobalSettingsInfo(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java index 6d64ceea32339..f5a76b5f75778 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -16,22 +18,16 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.ownership.OwnershipTypeInfo; - +import java.util.List; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.Resource; -import java.util.List; - -import static com.linkedin.metadata.Constants.*; - - /** * This bootstrap step is responsible for ingesting default ownership types. - * <p></p> - * If system has never bootstrapped this step will: - * For each ownership type defined in the yaml file, it checks whether the urn exists. - * If not, it ingests the ownership type into DataHub. + * + * <p>If system has never bootstrapped this step will: For each ownership type defined in the yaml + * file, it checks whether the urn exists. If not, it ingests the ownership type into DataHub. */ @Slf4j @RequiredArgsConstructor @@ -54,19 +50,23 @@ public void execute() throws Exception { final JsonNode ownershipTypesObj = JSON_MAPPER.readTree(_ownershipTypesResource.getFile()); if (!ownershipTypesObj.isArray()) { - throw new RuntimeException(String.format("Found malformed ownership file, expected an Array but found %s", - ownershipTypesObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed ownership file, expected an Array but found %s", + ownershipTypesObj.getNodeType())); } final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); log.info("Ingesting {} ownership types", ownershipTypesObj.size()); int numIngested = 0; for (final JsonNode roleObj : ownershipTypesObj) { final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); - final OwnershipTypeInfo info = RecordUtils.toRecordTemplate(OwnershipTypeInfo.class, roleObj.get("info") - .toString()); + final OwnershipTypeInfo info = + RecordUtils.toRecordTemplate(OwnershipTypeInfo.class, roleObj.get("info").toString()); log.info(String.format("Ingesting default ownership type with urn %s", urn)); ingestOwnershipType(urn, info, auditStamp); numIngested++; @@ -74,13 +74,15 @@ public void execute() throws Exception { log.info("Ingested {} new ownership types", numIngested); } - private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipTypeInfo info, final AuditStamp auditStamp) { + private void ingestOwnershipType( + final Urn ownershipTypeUrn, final OwnershipTypeInfo info, final AuditStamp auditStamp) { // 3. Write key & aspect MCPs. final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(ownershipTypeUrn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(ownershipTypeUrn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(ownershipTypeUrn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(OWNERSHIP_TYPE_ENTITY_NAME); @@ -96,8 +98,11 @@ private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipType proposal.setAspect(GenericRecordUtils.serializeAspect(info)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityService.getEntityRegistry()).build(), auditStamp, - false); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityService.getEntityRegistry()) + .build(), + auditStamp, + false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java index cf29645214466..2aa5fe4f46b65 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -25,7 +27,6 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.policy.DataHubPolicyInfo; - import java.io.IOException; import java.net.URISyntaxException; import java.util.Collections; @@ -33,15 +34,10 @@ import java.util.List; import java.util.Map; import java.util.Optional; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.Resource; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestPoliciesStep implements BootstrapStep { @@ -65,9 +61,13 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 0. Execute preflight check to see whether we need to ingest policies log.info("Ingesting default access policies from: {}...", _policiesResource); @@ -77,14 +77,17 @@ public void execute() throws IOException, URISyntaxException { if (!policiesObj.isArray()) { throw new RuntimeException( - String.format("Found malformed policies file, expected an Array but found %s", policiesObj.getNodeType())); + String.format( + "Found malformed policies file, expected an Array but found %s", + policiesObj.getNodeType())); } // 2. For each JSON object, cast into a DataHub Policy Info object. for (final JsonNode policyObj : policiesObj) { final Urn urn = Urn.createFromString(policyObj.get("urn").asText()); - // If the info is not there, it means that the policy was there before, but must now be removed + // If the info is not there, it means that the policy was there before, but must now be + // removed if (!policyObj.has("info")) { _entityService.deleteUrn(urn); continue; @@ -107,7 +110,8 @@ public void execute() throws IOException, URISyntaxException { } } } - // If search index for policies is empty, update the policy index with the ingested policies from previous step. + // If search index for policies is empty, update the policy index with the ingested policies + // from previous step. // Directly update the ES index, does not produce MCLs if (_entitySearchService.docCount(Constants.POLICY_ENTITY_NAME) == 0) { updatePolicyIndex(); @@ -115,31 +119,37 @@ public void execute() throws IOException, URISyntaxException { log.info("Successfully ingested default access policies."); } - /** - * Update policy index and push in the relevant search documents into the search index - */ + /** Update policy index and push in the relevant search documents into the search index */ private void updatePolicyIndex() throws URISyntaxException { log.info("Pushing documents to the policy index"); - AspectSpec policyInfoAspectSpec = _entityRegistry.getEntitySpec(Constants.POLICY_ENTITY_NAME) - .getAspectSpec(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); + AspectSpec policyInfoAspectSpec = + _entityRegistry + .getEntitySpec(Constants.POLICY_ENTITY_NAME) + .getAspectSpec(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); int start = 0; int count = 30; int total = 100; while (start < total) { - ListUrnsResult listUrnsResult = _entityService.listUrns(Constants.POLICY_ENTITY_NAME, start, count); + ListUrnsResult listUrnsResult = + _entityService.listUrns(Constants.POLICY_ENTITY_NAME, start, count); total = listUrnsResult.getTotal(); start = start + count; final Map<Urn, EntityResponse> policyEntities = - _entityService.getEntitiesV2(POLICY_ENTITY_NAME, new HashSet<>(listUrnsResult.getEntities()), + _entityService.getEntitiesV2( + POLICY_ENTITY_NAME, + new HashSet<>(listUrnsResult.getEntities()), Collections.singleton(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME)); - policyEntities.values().forEach(entityResponse -> insertPolicyDocument(entityResponse, policyInfoAspectSpec)); + policyEntities + .values() + .forEach(entityResponse -> insertPolicyDocument(entityResponse, policyInfoAspectSpec)); } log.info("Successfully updated the policy index"); } private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspectSpec) { - EnvelopedAspect aspect = entityResponse.getAspects().get(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); + EnvelopedAspect aspect = + entityResponse.getAspects().get(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); if (aspect == null) { log.info("Missing policy info aspect for urn {}", entityResponse.getUrn()); return; @@ -147,10 +157,15 @@ private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspe Optional<String> searchDocument; try { - searchDocument = _searchDocumentTransformer.transformAspect(entityResponse.getUrn(), - new DataHubPolicyInfo(aspect.getValue().data()), aspectSpec, false); + searchDocument = + _searchDocumentTransformer.transformAspect( + entityResponse.getUrn(), + new DataHubPolicyInfo(aspect.getValue().data()), + aspectSpec, + false); } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -164,7 +179,8 @@ private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspe return; } - _entitySearchService.upsertDocument(Constants.POLICY_ENTITY_NAME, searchDocument.get(), docId.get()); + _entitySearchService.upsertDocument( + Constants.POLICY_ENTITY_NAME, searchDocument.get(), docId.get()); } private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws URISyntaxException { @@ -172,7 +188,8 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(urn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(urn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(urn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(POLICY_ENTITY_NAME); @@ -186,11 +203,14 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR proposal.setAspect(GenericRecordUtils.serializeAspect(info)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) - .build(), - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - false); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) + .build(), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); } private boolean hasPolicy(Urn policyUrn) { diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java index 9aed445a967b3..b24acc61ff6c1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java @@ -1,12 +1,14 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.boot.BootstrapStep; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.key.DataHubRetentionKey; @@ -22,9 +24,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRetentionPoliciesStep implements BootstrapStep { @@ -36,10 +35,17 @@ public class IngestRetentionPoliciesStep implements BootstrapStep { private final String pluginPath; private static final ObjectMapper YAML_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - YAML_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + YAML_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String UPGRADE_ID = "ingest-retention-policies"; private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); @@ -80,7 +86,8 @@ public void execute() throws IOException, URISyntaxException { log.info("Setting {} policies", retentionPolicyMap.size()); boolean hasUpdate = false; for (DataHubRetentionKey key : retentionPolicyMap.keySet()) { - if (_retentionService.setRetention(key.getEntityName(), key.getAspectName(), retentionPolicyMap.get(key))) { + if (_retentionService.setRetention( + key.getEntityName(), key.getAspectName(), retentionPolicyMap.get(key))) { hasUpdate = true; } } @@ -95,7 +102,8 @@ public void execute() throws IOException, URISyntaxException { } // Parse input yaml file or yaml files in the input directory to generate a retention policy map - private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File retentionFileOrDir) throws IOException { + private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File retentionFileOrDir) + throws IOException { // If path does not exist return empty if (!retentionFileOrDir.exists()) { return Collections.emptyMap(); @@ -107,7 +115,9 @@ private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File ret for (File retentionFile : retentionFileOrDir.listFiles()) { if (!retentionFile.isFile()) { - log.info("Element {} in plugin directory {} is not a file. Skipping", retentionFile.getPath(), + log.info( + "Element {} in plugin directory {} is not a file. Skipping", + retentionFile.getPath(), retentionFileOrDir.getPath()); continue; } @@ -116,7 +126,8 @@ private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File ret return result; } // If file, parse the yaml file and return result; - if (!retentionFileOrDir.getPath().endsWith(".yaml") && retentionFileOrDir.getPath().endsWith(".yml")) { + if (!retentionFileOrDir.getPath().endsWith(".yaml") + && retentionFileOrDir.getPath().endsWith(".yml")) { log.info("File {} is not a YAML file. Skipping", retentionFileOrDir.getPath()); return Collections.emptyMap(); } @@ -126,15 +137,16 @@ private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File ret /** * Parse yaml retention config * - * The structure of yaml must be a list of retention policies where each element specifies the entity, aspect - * to apply the policy to and the policy definition. The policy definition is converted into the - * {@link com.linkedin.retention.DataHubRetentionConfig} class. + * <p>The structure of yaml must be a list of retention policies where each element specifies the + * entity, aspect to apply the policy to and the policy definition. The policy definition is + * converted into the {@link com.linkedin.retention.DataHubRetentionConfig} class. */ - private Map<DataHubRetentionKey, DataHubRetentionConfig> parseYamlRetentionConfig(File retentionConfigFile) - throws IOException { + private Map<DataHubRetentionKey, DataHubRetentionConfig> parseYamlRetentionConfig( + File retentionConfigFile) throws IOException { final JsonNode retentionPolicies = YAML_MAPPER.readTree(retentionConfigFile); if (!retentionPolicies.isArray()) { - throw new IllegalArgumentException("Retention config file must contain an array of retention policies"); + throw new IllegalArgumentException( + "Retention config file must contain an array of retention policies"); } Map<DataHubRetentionKey, DataHubRetentionConfig> retentionPolicyMap = new HashMap<>(); @@ -158,9 +170,11 @@ private Map<DataHubRetentionKey, DataHubRetentionConfig> parseYamlRetentionConfi DataHubRetentionConfig retentionInfo; if (retentionPolicy.has("config")) { retentionInfo = - RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, retentionPolicy.get("config").toString()); + RecordUtils.toRecordTemplate( + DataHubRetentionConfig.class, retentionPolicy.get("config").toString()); } else { - throw new IllegalArgumentException("Each element in the retention config must contain field config"); + throw new IllegalArgumentException( + "Each element in the retention config must contain field config"); } retentionPolicyMap.put(key, retentionInfo); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java index 99be185113968..f3c395abdfc3a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -25,9 +27,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRolesStep implements BootstrapStep { @@ -49,9 +48,13 @@ public ExecutionMode getExecutionMode() { @Override public void execute() throws Exception { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // Sleep to ensure deployment process finishes. Thread.sleep(SLEEP_SECONDS * 1000); @@ -64,13 +67,19 @@ public void execute() throws Exception { if (!rolesObj.isArray()) { throw new RuntimeException( - String.format("Found malformed roles file, expected an Array but found %s", rolesObj.getNodeType())); + String.format( + "Found malformed roles file, expected an Array but found %s", + rolesObj.getNodeType())); } final AspectSpec roleInfoAspectSpec = - _entityRegistry.getEntitySpec(DATAHUB_ROLE_ENTITY_NAME).getAspectSpec(DATAHUB_ROLE_INFO_ASPECT_NAME); + _entityRegistry + .getEntitySpec(DATAHUB_ROLE_ENTITY_NAME) + .getAspectSpec(DATAHUB_ROLE_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); for (final JsonNode roleObj : rolesObj) { final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); @@ -81,20 +90,26 @@ public void execute() throws Exception { continue; } - final DataHubRoleInfo info = RecordUtils.toRecordTemplate(DataHubRoleInfo.class, roleObj.get("info").toString()); + final DataHubRoleInfo info = + RecordUtils.toRecordTemplate(DataHubRoleInfo.class, roleObj.get("info").toString()); ingestRole(urn, info, auditStamp, roleInfoAspectSpec); } log.info("Successfully ingested default Roles."); } - private void ingestRole(final Urn roleUrn, final DataHubRoleInfo dataHubRoleInfo, final AuditStamp auditStamp, - final AspectSpec roleInfoAspectSpec) throws URISyntaxException { + private void ingestRole( + final Urn roleUrn, + final DataHubRoleInfo dataHubRoleInfo, + final AuditStamp auditStamp, + final AspectSpec roleInfoAspectSpec) + throws URISyntaxException { // 3. Write key & aspect final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(roleUrn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(roleUrn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(roleUrn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(DATAHUB_ROLE_ENTITY_NAME); @@ -108,12 +123,25 @@ private void ingestRole(final Urn roleUrn, final DataHubRoleInfo dataHubRoleInfo proposal.setAspect(GenericRecordUtils.serializeAspect(dataHubRoleInfo)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityRegistry).build(), - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - false); - - _entityService.alwaysProduceMCLAsync(roleUrn, DATAHUB_ROLE_ENTITY_NAME, DATAHUB_ROLE_INFO_ASPECT_NAME, - roleInfoAspectSpec, null, dataHubRoleInfo, null, null, auditStamp, ChangeType.RESTATE); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) + .build(), + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + + _entityService.alwaysProduceMCLAsync( + roleUrn, + DATAHUB_ROLE_ENTITY_NAME, + DATAHUB_ROLE_INFO_ASPECT_NAME, + roleInfoAspectSpec, + null, + dataHubRoleInfo, + null, + null, + auditStamp, + ChangeType.RESTATE); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java index febcb9d4ec8a4..9e00b960482c5 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -7,25 +10,19 @@ import com.linkedin.common.urn.Urn; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.boot.BootstrapStep; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.EntityService; - import com.linkedin.metadata.key.CorpUserKey; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.util.Pair; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; - -import com.linkedin.util.Pair; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRootUserStep implements BootstrapStep { @@ -43,16 +40,23 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 1. Read from the file into JSON. - final JsonNode userObj = mapper.readTree(new ClassPathResource("./boot/root_user.json").getFile()); + final JsonNode userObj = + mapper.readTree(new ClassPathResource("./boot/root_user.json").getFile()); if (!userObj.isObject()) { - throw new RuntimeException(String.format("Found malformed root user file, expected an Object but found %s", - userObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed root user file, expected an Object but found %s", + userObj.getNodeType())); } // 2. Ingest the user info @@ -66,18 +70,22 @@ public void execute() throws IOException, URISyntaxException { final CorpUserInfo info = RecordUtils.toRecordTemplate(CorpUserInfo.class, userObj.get("info").toString()); - final CorpUserKey key = (CorpUserKey) EntityKeyUtils.convertUrnToEntityKey(urn, getUserKeyAspectSpec()); + final CorpUserKey key = + (CorpUserKey) EntityKeyUtils.convertUrnToEntityKey(urn, getUserKeyAspectSpec()); final AuditStamp aspectAuditStamp = - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - _entityService.ingestAspects(urn, List.of( - Pair.of(CORP_USER_KEY_ASPECT_NAME, key), - Pair.of(USER_INFO_ASPECT_NAME, info) - ), aspectAuditStamp, null); + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + _entityService.ingestAspects( + urn, + List.of(Pair.of(CORP_USER_KEY_ASPECT_NAME, key), Pair.of(USER_INFO_ASPECT_NAME, info)), + aspectAuditStamp, + null); } private AspectSpec getUserKeyAspectSpec() { final EntitySpec spec = _entityService.getEntityRegistry().getEntitySpec(CORP_USER_ENTITY_NAME); return spec.getKeyAspectSpec(); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java index 34147b166ecd7..3c62f695ddd5f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java @@ -9,7 +9,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class RemoveClientIdAspectStep implements BootstrapStep { @@ -33,7 +32,8 @@ public void execute() throws Exception { return; } // Remove invalid telemetry aspect - _entityService.deleteAspect(TelemetryUtils.CLIENT_ID_URN, INVALID_TELEMETRY_ASPECT_NAME, new HashMap<>(), true); + _entityService.deleteAspect( + TelemetryUtils.CLIENT_ID_URN, INVALID_TELEMETRY_ASPECT_NAME, new HashMap<>(), true); BootstrapStep.setUpgradeResult(REMOVE_UNKNOWN_ASPECTS_URN, _entityService); } catch (Exception e) { @@ -48,5 +48,4 @@ public void execute() throws Exception { public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java index 1f5f7f26ed89b..333928999f453 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java @@ -13,14 +13,13 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ExtraInfo; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.LinkedList; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class RestoreColumnLineageIndices extends UpgradeStep { @@ -30,7 +29,8 @@ public class RestoreColumnLineageIndices extends UpgradeStep { private final EntityRegistry _entityRegistry; - public RestoreColumnLineageIndices(@Nonnull final EntityService entityService, @Nonnull final EntityRegistry entityRegistry) { + public RestoreColumnLineageIndices( + @Nonnull final EntityService entityService, @Nonnull final EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entityRegistry = Objects.requireNonNull(entityRegistry, "entityRegistry must not be null"); } @@ -38,7 +38,9 @@ public RestoreColumnLineageIndices(@Nonnull final EntityService entityService, @ @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final int totalUpstreamLineageCount = getAndRestoreUpstreamLineageIndices(0, auditStamp); int upstreamLineageCount = BATCH_SIZE; @@ -47,17 +49,21 @@ public void upgrade() throws Exception { upstreamLineageCount += BATCH_SIZE; } - final int totalChartInputFieldsCount = getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, 0, auditStamp); + final int totalChartInputFieldsCount = + getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, 0, auditStamp); int chartInputFieldsCount = BATCH_SIZE; while (chartInputFieldsCount < totalChartInputFieldsCount) { - getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, chartInputFieldsCount, auditStamp); + getAndRestoreInputFieldsIndices( + Constants.CHART_ENTITY_NAME, chartInputFieldsCount, auditStamp); chartInputFieldsCount += BATCH_SIZE; } - final int totalDashboardInputFieldsCount = getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, 0, auditStamp); + final int totalDashboardInputFieldsCount = + getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, 0, auditStamp); int dashboardInputFieldsCount = BATCH_SIZE; while (dashboardInputFieldsCount < totalDashboardInputFieldsCount) { - getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, dashboardInputFieldsCount, auditStamp); + getAndRestoreInputFieldsIndices( + Constants.DASHBOARD_ENTITY_NAME, dashboardInputFieldsCount, auditStamp); dashboardInputFieldsCount += BATCH_SIZE; } } @@ -69,23 +75,29 @@ public ExecutionMode getExecutionMode() { } private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp) { - final AspectSpec upstreamLineageAspectSpec = _entityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME) - .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); - - final ListResult<RecordTemplate> latestAspects = _entityService.listLatestAspects( - Constants.DATASET_ENTITY_NAME, - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - start, - BATCH_SIZE); - - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { + final AspectSpec upstreamLineageAspectSpec = + _entityRegistry + .getEntitySpec(Constants.DATASET_ENTITY_NAME) + .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + + final ListResult<RecordTemplate> latestAspects = + _entityService.listLatestAspects( + Constants.DATASET_ENTITY_NAME, + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + start, + BATCH_SIZE); + + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { log.debug("Found 0 upstreamLineage aspects for datasets. Skipping migration."); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of upstreamLineages. - log.warn("Failed to match upstreamLineage aspects with corresponding urns. Found mismatched length between aspects ({})" + log.warn( + "Failed to match upstreamLineage aspects with corresponding urns. Found mismatched length between aspects ({})" + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), @@ -104,48 +116,56 @@ private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - urn, - Constants.DATASET_ENTITY_NAME, - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - upstreamLineageAspectSpec, - null, - upstreamLineage, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + urn, + Constants.DATASET_ENTITY_NAME, + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + upstreamLineageAspectSpec, + null, + upstreamLineage, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return latestAspects.getTotalCount(); } - private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditStamp auditStamp) throws Exception { - final AspectSpec inputFieldsAspectSpec = _entityRegistry.getEntitySpec(entityName) - .getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME); + private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditStamp auditStamp) + throws Exception { + final AspectSpec inputFieldsAspectSpec = + _entityRegistry.getEntitySpec(entityName).getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME); - final ListResult<RecordTemplate> latestAspects = _entityService.listLatestAspects( - entityName, - Constants.INPUT_FIELDS_ASPECT_NAME, - start, - BATCH_SIZE); + final ListResult<RecordTemplate> latestAspects = + _entityService.listLatestAspects( + entityName, Constants.INPUT_FIELDS_ASPECT_NAME, start, BATCH_SIZE); - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { log.debug("Found 0 inputFields aspects. Skipping migration."); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of inputFields. - log.warn("Failed to match inputFields aspects with corresponding urns. Found mismatched length between aspects ({})" + log.warn( + "Failed to match inputFields aspects with corresponding urns. Found mismatched length between aspects ({})" + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), @@ -164,26 +184,32 @@ private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditS continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - urn, - entityName, - Constants.INPUT_FIELDS_ASPECT_NAME, - inputFieldsAspectSpec, - null, - inputFields, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + urn, + entityName, + Constants.INPUT_FIELDS_ASPECT_NAME, + inputFieldsAspectSpec, + null, + inputFields, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return latestAspects.getTotalCount(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java index 355936fe1994c..bb7ad80ef73d2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -33,16 +35,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class RestoreDbtSiblingsIndices implements BootstrapStep { private static final String VERSION = "0"; private static final String UPGRADE_ID = "restore-dbt-siblings-indices"; private static final Urn SIBLING_UPGRADE_URN = - EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(UPGRADE_ID), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(UPGRADE_ID), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); private static final Integer BATCH_SIZE = 1000; private static final Integer SLEEP_SECONDS = 120; @@ -65,12 +65,19 @@ public void execute() throws Exception { log.info("Attempting to run RestoreDbtSiblingsIndices upgrade.."); log.info(String.format("Waiting %s seconds..", SLEEP_SECONDS)); - EntityResponse response = _entityService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, SIBLING_UPGRADE_URN, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - ); - if (response != null && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { - DataMap dataMap = response.getAspects().get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME).getValue().data(); + EntityResponse response = + _entityService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + SIBLING_UPGRADE_URN, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); + if (response != null + && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { + DataMap dataMap = + response + .getAspects() + .get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) + .getValue() + .data(); DataHubUpgradeRequest request = new DataHubUpgradeRequest(dataMap); if (request.hasVersion() && request.getVersion().equals(VERSION)) { log.info("RestoreDbtSiblingsIndices has run before with this version. Skipping"); @@ -89,11 +96,20 @@ public void execute() throws Exception { log.info("Found {} dataset entities to attempt to bootstrap", rowCount); final AspectSpec datasetAspectSpec = - _entityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME).getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); - final AuditStamp auditStamp = new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - final DataHubUpgradeRequest upgradeRequest = new DataHubUpgradeRequest().setTimestampMs(System.currentTimeMillis()).setVersion(VERSION); - ingestUpgradeAspect(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, upgradeRequest, auditStamp); + _entityRegistry + .getEntitySpec(Constants.DATASET_ENTITY_NAME) + .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + final DataHubUpgradeRequest upgradeRequest = + new DataHubUpgradeRequest() + .setTimestampMs(System.currentTimeMillis()) + .setVersion(VERSION); + ingestUpgradeAspect( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, upgradeRequest, auditStamp); int indexedCount = 0; while (indexedCount < rowCount) { @@ -101,19 +117,23 @@ public void execute() throws Exception { indexedCount += BATCH_SIZE; } - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); ingestUpgradeAspect(Constants.DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, upgradeResult, auditStamp); log.info("Successfully restored sibling aspects"); } catch (Exception e) { log.error("Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); _entityService.deleteUrn(SIBLING_UPGRADE_URN); - throw new RuntimeException("Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); + throw new RuntimeException( + "Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); } } - private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp, AspectSpec upstreamAspectSpec) { - ListUrnsResult datasetUrnsResult = _entityService.listUrns(DATASET_ENTITY_NAME, start, BATCH_SIZE); + private void getAndRestoreUpstreamLineageIndices( + int start, AuditStamp auditStamp, AspectSpec upstreamAspectSpec) { + ListUrnsResult datasetUrnsResult = + _entityService.listUrns(DATASET_ENTITY_NAME, start, BATCH_SIZE); List<Urn> datasetUrns = datasetUrnsResult.getEntities(); log.info("Re-indexing upstreamLineage aspect from {} with batch size {}", start, BATCH_SIZE); @@ -121,12 +141,16 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam return; } - final Map<Urn, EntityResponse> upstreamLineageResponse; + final Map<Urn, EntityResponse> upstreamLineageResponse; try { upstreamLineageResponse = - _entityService.getEntitiesV2(DATASET_ENTITY_NAME, new HashSet<>(datasetUrns), Collections.singleton(UPSTREAM_LINEAGE_ASPECT_NAME)); + _entityService.getEntitiesV2( + DATASET_ENTITY_NAME, + new HashSet<>(datasetUrns), + Collections.singleton(UPSTREAM_LINEAGE_ASPECT_NAME)); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Error fetching upstream lineage history: %s", e.toString())); + throw new RuntimeException( + String.format("Error fetching upstream lineage history: %s", e.toString())); } // Loop over datasets and produce changelog @@ -142,26 +166,32 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - datasetUrn, - DATASET_ENTITY_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - upstreamAspectSpec, - null, - upstreamLineage, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + datasetUrn, + DATASET_ENTITY_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + upstreamAspectSpec, + null, + upstreamLineage, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); } private UpstreamLineage getUpstreamLineage(EntityResponse entityResponse) { @@ -170,10 +200,12 @@ private UpstreamLineage getUpstreamLineage(EntityResponse entityResponse) { return null; } - return new UpstreamLineage(aspectMap.get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); + return new UpstreamLineage( + aspectMap.get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); } - private void ingestUpgradeAspect(String aspectName, RecordTemplate aspect, AuditStamp auditStamp) { + private void ingestUpgradeAspect( + String aspectName, RecordTemplate aspect, AuditStamp auditStamp) { final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(SIBLING_UPGRADE_URN); upgradeProposal.setEntityType(Constants.DATA_HUB_UPGRADE_ENTITY_NAME); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java index 4de2bea9a76a9..319bbd084e05c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java @@ -16,7 +16,6 @@ import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; - import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; @@ -29,7 +28,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class RestoreGlossaryIndices extends UpgradeStep { private static final String VERSION = "1"; @@ -39,7 +37,9 @@ public class RestoreGlossaryIndices extends UpgradeStep { private final EntitySearchService _entitySearchService; private final EntityRegistry _entityRegistry; - public RestoreGlossaryIndices(EntityService entityService, EntitySearchService entitySearchService, + public RestoreGlossaryIndices( + EntityService entityService, + EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entitySearchService = entitySearchService; @@ -48,12 +48,18 @@ public RestoreGlossaryIndices(EntityService entityService, EntitySearchService e @Override public void upgrade() throws Exception { - final AspectSpec termAspectSpec = _entityRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME) - .getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); - final AspectSpec nodeAspectSpec = _entityRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME) - .getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME); + final AspectSpec termAspectSpec = + _entityRegistry + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME) + .getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); + final AspectSpec nodeAspectSpec = + _entityRegistry + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME) + .getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final int totalTermsCount = getAndRestoreTermAspectIndices(0, auditStamp, termAspectSpec); int termsCount = BATCH_SIZE; @@ -76,20 +82,29 @@ public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - private int getAndRestoreTermAspectIndices(int start, AuditStamp auditStamp, AspectSpec termAspectSpec) - throws Exception { + private int getAndRestoreTermAspectIndices( + int start, AuditStamp auditStamp, AspectSpec termAspectSpec) throws Exception { SearchResult termsResult = - _entitySearchService.search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), "", null, - null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - List<Urn> termUrns = termsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + _entitySearchService.search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + start, + BATCH_SIZE, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + List<Urn> termUrns = + termsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); if (termUrns.size() == 0) { return 0; } final Map<Urn, EntityResponse> termInfoResponses = - _entityService.getEntitiesV2(Constants.GLOSSARY_TERM_ENTITY_NAME, new HashSet<>(termUrns), - Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) - ); + _entityService.getEntitiesV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(termUrns), + Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)); // Loop over Terms and produce changelog List<Future<?>> futures = new LinkedList<>(); @@ -105,43 +120,59 @@ null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - termUrn, - Constants.GLOSSARY_TERM_ENTITY_NAME, - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - termAspectSpec, - null, - termInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + termUrn, + Constants.GLOSSARY_TERM_ENTITY_NAME, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + termAspectSpec, + null, + termInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return termsResult.getNumEntities(); } - private int getAndRestoreNodeAspectIndices(int start, AuditStamp auditStamp, AspectSpec nodeAspectSpec) throws Exception { - SearchResult nodesResult = _entitySearchService.search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), "", - null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - List<Urn> nodeUrns = nodesResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + private int getAndRestoreNodeAspectIndices( + int start, AuditStamp auditStamp, AspectSpec nodeAspectSpec) throws Exception { + SearchResult nodesResult = + _entitySearchService.search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + start, + BATCH_SIZE, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + List<Urn> nodeUrns = + nodesResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); if (nodeUrns.size() == 0) { return 0; } - final Map<Urn, EntityResponse> nodeInfoResponses = _entityService.getEntitiesV2( - Constants.GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(nodeUrns), - Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) - ); + final Map<Urn, EntityResponse> nodeInfoResponses = + _entityService.getEntitiesV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(nodeUrns), + Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)); // Loop over Nodes and produce changelog List<Future<?>> futures = new LinkedList<>(); @@ -157,26 +188,32 @@ null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - nodeUrn, - Constants.GLOSSARY_NODE_ENTITY_NAME, - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - nodeAspectSpec, - null, - nodeInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + nodeUrn, + Constants.GLOSSARY_NODE_ENTITY_NAME, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + nodeAspectSpec, + null, + nodeInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return nodesResult.getNumEntities(); } @@ -187,7 +224,8 @@ private GlossaryTermInfo mapTermInfo(EntityResponse entityResponse) { return null; } - return new GlossaryTermInfo(aspectMap.get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); + return new GlossaryTermInfo( + aspectMap.get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); } private GlossaryNodeInfo mapNodeInfo(EntityResponse entityResponse) { @@ -196,6 +234,7 @@ private GlossaryNodeInfo mapNodeInfo(EntityResponse entityResponse) { return null; } - return new GlossaryNodeInfo(aspectMap.get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); + return new GlossaryNodeInfo( + aspectMap.get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java index 7fcafa24d7b45..e2d59b505a568 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePaths; @@ -19,9 +21,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * This is an opt-in optional upgrade step to migrate your browse paths to the new truncated form. * It is idempotent, can be retried as many times as necessary. @@ -29,13 +28,13 @@ @Slf4j public class UpgradeDefaultBrowsePathsStep extends UpgradeStep { - private static final Set<String> ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME - ); + private static final Set<String> ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME); private static final String VERSION = "1"; private static final String UPGRADE_ID = "upgrade-default-browse-paths-step"; private static final Integer BATCH_SIZE = 5000; @@ -47,14 +46,18 @@ public UpgradeDefaultBrowsePathsStep(EntityService entityService) { @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); int total = 0; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s out of %s of browse paths for entity type %s", - migratedCount, migratedCount + BATCH_SIZE, total, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s out of %s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, total, entityType)); total = getAndMigrateBrowsePaths(entityType, migratedCount, auditStamp); migratedCount += BATCH_SIZE; } while (migratedCount < total); @@ -71,21 +74,24 @@ public ExecutionMode getExecutionMode() { private int getAndMigrateBrowsePaths(String entityType, int start, AuditStamp auditStamp) throws Exception { - final ListResult<RecordTemplate> latestAspects = _entityService.listLatestAspects( - entityType, - Constants.BROWSE_PATHS_ASPECT_NAME, - start, - BATCH_SIZE); + final ListResult<RecordTemplate> latestAspects = + _entityService.listLatestAspects( + entityType, Constants.BROWSE_PATHS_ASPECT_NAME, start, BATCH_SIZE); - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { - log.debug(String.format("Found 0 browse paths for entity with type %s. Skipping migration!", entityType)); + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { + log.debug( + String.format( + "Found 0 browse paths for entity with type %s. Skipping migration!", entityType)); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of paths. - log.warn("Failed to match browse path aspects with corresponding urns. Found mismatched length between aspects ({})" - + "and metadata ({}) for metadata {}", + log.warn( + "Failed to match browse path aspects with corresponding urns. Found mismatched length between aspects ({})" + + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), latestAspects.getMetadata()); @@ -107,7 +113,8 @@ private int getAndMigrateBrowsePaths(String entityType, int start, AuditStamp au log.debug(String.format("Inspecting browse path for urn %s, value %s", urn, browsePaths)); if (browsePaths.hasPaths() && browsePaths.getPaths().size() == 1) { - String legacyBrowsePath = BrowsePathUtils.getLegacyDefaultBrowsePath(urn, _entityService.getEntityRegistry()); + String legacyBrowsePath = + BrowsePathUtils.getLegacyDefaultBrowsePath(urn, _entityService.getEntityRegistry()); log.debug(String.format("Legacy browse path for urn %s, value %s", urn, legacyBrowsePath)); if (legacyBrowsePath.equals(browsePaths.getPaths().get(0))) { migrateBrowsePath(urn, auditStamp); @@ -126,13 +133,9 @@ private void migrateBrowsePath(Urn urn, AuditStamp auditStamp) throws Exception proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(newPaths)); - _entityService.ingestProposal( - proposal, - auditStamp, - false - ); + _entityService.ingestProposal(proposal, auditStamp, false); } - -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java index 5cac32cfe1a42..409285fc8f1e9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java @@ -19,7 +19,8 @@ public String name() { @Override public void execute() throws Exception { if (!_dataHubUpgradeKafkaListener.waitForBootstrap()) { - throw new IllegalStateException("Build indices was unsuccessful, stopping bootstrap process."); + throw new IllegalStateException( + "Build indices was unsuccessful, stopping bootstrap process."); } } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java index 52fee1342755c..67d0976a1b0a8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java @@ -1,5 +1,7 @@ package com.linkedin.restli.server; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.linkedin.data.codec.AbstractJacksonDataCodec; import com.linkedin.metadata.filter.RestliLoggingFilter; @@ -10,59 +12,62 @@ import com.linkedin.r2.transport.http.server.RAPServlet; import com.linkedin.restli.docgen.DefaultDocumentationRequestHandler; import com.linkedin.restli.server.spring.SpringInjectResourceFactory; +import java.util.concurrent.Executors; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.concurrent.Executors; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j @Configuration public class RAPServletFactory { - @Value("#{systemEnvironment['RESTLI_SERVLET_THREADS']}") - private Integer environmentThreads; + @Value("#{systemEnvironment['RESTLI_SERVLET_THREADS']}") + private Integer environmentThreads; + + @Value("${" + INGESTION_MAX_SERIALIZED_STRING_LENGTH + ":16000000}") + private int maxSerializedStringLength; - @Value("${" + INGESTION_MAX_SERIALIZED_STRING_LENGTH + ":16000000}") - private int maxSerializedStringLength; + @Bean(name = "restliSpringInjectResourceFactory") + public SpringInjectResourceFactory springInjectResourceFactory() { + return new SpringInjectResourceFactory(); + } - @Bean(name = "restliSpringInjectResourceFactory") - public SpringInjectResourceFactory springInjectResourceFactory() { - return new SpringInjectResourceFactory(); - } + @Bean("parseqEngineThreads") + public int parseqEngineThreads() { + return environmentThreads != null + ? environmentThreads + : (Runtime.getRuntime().availableProcessors() + 1); + } - @Bean("parseqEngineThreads") - public int parseqEngineThreads() { - return environmentThreads != null ? environmentThreads : (Runtime.getRuntime().availableProcessors() + 1); - } - @Bean - public RAPServlet rapServlet( - @Qualifier("restliSpringInjectResourceFactory") SpringInjectResourceFactory springInjectResourceFactory, - @Qualifier("parseqEngineThreads") int threads) { - log.info("Starting restli servlet with {} threads.", threads); - Engine parseqEngine = new EngineBuilder() - .setTaskExecutor(Executors.newFixedThreadPool(threads)) - .setTimerScheduler(Executors.newSingleThreadScheduledExecutor()) - .build(); + @Bean + public RAPServlet rapServlet( + @Qualifier("restliSpringInjectResourceFactory") + SpringInjectResourceFactory springInjectResourceFactory, + @Qualifier("parseqEngineThreads") int threads) { + log.info("Starting restli servlet with {} threads.", threads); + Engine parseqEngine = + new EngineBuilder() + .setTaskExecutor(Executors.newFixedThreadPool(threads)) + .setTimerScheduler(Executors.newSingleThreadScheduledExecutor()) + .build(); - // !!!!!!! IMPORTANT !!!!!!! - // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. Without this the limit is - // whatever Jackson is defaulting to (5 MB currently). - AbstractJacksonDataCodec.JSON_FACTORY.setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSerializedStringLength).build()); - // !!!!!!! IMPORTANT !!!!!!! + // !!!!!!! IMPORTANT !!!!!!! + // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. + // Without this the limit is + // whatever Jackson is defaulting to (5 MB currently). + AbstractJacksonDataCodec.JSON_FACTORY.setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSerializedStringLength).build()); + // !!!!!!! IMPORTANT !!!!!!! - RestLiConfig config = new RestLiConfig(); - config.setDocumentationRequestHandler(new DefaultDocumentationRequestHandler()); - config.setResourcePackageNames("com.linkedin.metadata.resources"); - config.addFilter(new RestliLoggingFilter()); + RestLiConfig config = new RestLiConfig(); + config.setDocumentationRequestHandler(new DefaultDocumentationRequestHandler()); + config.setResourcePackageNames("com.linkedin.metadata.resources"); + config.addFilter(new RestliLoggingFilter()); - RestLiServer restLiServer = new RestLiServer(config, springInjectResourceFactory, parseqEngine); - return new RAPServlet(new FilterChainDispatcher(new DelegatingTransportDispatcher(restLiServer, restLiServer), - FilterChains.empty())); - } + RestLiServer restLiServer = new RestLiServer(config, springInjectResourceFactory, parseqEngine); + return new RAPServlet( + new FilterChainDispatcher( + new DelegatingTransportDispatcher(restLiServer, restLiServer), FilterChains.empty())); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java index 723f0333999dd..29211d295a2a1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java +++ b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java @@ -1,28 +1,28 @@ package com.linkedin.restli.server; import com.linkedin.r2.transport.http.server.RAPServlet; +import java.io.IOException; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.web.HttpRequestHandler; import org.springframework.web.context.support.HttpRequestHandlerServlet; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.IOException; - @Component public class RestliHandlerServlet extends HttpRequestHandlerServlet implements HttpRequestHandler { - @Autowired - private RAPServlet _r2Servlet; + @Autowired private RAPServlet _r2Servlet; - @Override - public void service(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { - _r2Servlet.service(req, res); - } + @Override + public void service(HttpServletRequest req, HttpServletResponse res) + throws ServletException, IOException { + _r2Servlet.service(req, res); + } - @Override - public void handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - service(request, response); - } + @Override + public void handleRequest(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + service(request, response); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java index 408c7b67b25f0..19a2a19fcaa4c 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.kafka; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -8,25 +11,18 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @SpringBootTest( - properties = { - "spring.kafka.properties.security.protocol=SSL" - }, - classes = { - SimpleKafkaConsumerFactory.class, - ConfigurationProvider.class - }) + properties = {"spring.kafka.properties.security.protocol=SSL"}, + classes = {SimpleKafkaConsumerFactory.class, ConfigurationProvider.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class SimpleKafkaConsumerFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ConcurrentKafkaListenerContainerFactory<?, ?> testFactory; + @Autowired ConcurrentKafkaListenerContainerFactory<?, ?> testFactory; - @Test - void testInitialization() { - assertNotNull(testFactory); - assertEquals(testFactory.getConsumerFactory().getConfigurationProperties().get("security.protocol"), "SSL"); - } + @Test + void testInitialization() { + assertNotNull(testFactory); + assertEquals( + testFactory.getConsumerFactory().getConfigurationProperties().get("security.protocol"), + "SSL"); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java index 017e8f32886af..6cc1d293e24e6 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.search; +import static com.datahub.util.RecordUtils.*; +import static com.linkedin.metadata.search.client.CachingEntitySearchService.*; + import com.google.common.collect.ImmutableList; import com.hazelcast.config.Config; import com.hazelcast.core.HazelcastInstance; @@ -34,136 +37,163 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.datahub.util.RecordUtils.*; -import static com.linkedin.metadata.search.client.CachingEntitySearchService.*; - - public class CacheTest extends JetTestSupport { - HazelcastCacheManager cacheManager1; - HazelcastCacheManager cacheManager2; - HazelcastInstance instance1; - HazelcastInstance instance2; + HazelcastCacheManager cacheManager1; + HazelcastCacheManager cacheManager2; + HazelcastInstance instance1; + HazelcastInstance instance2; - public CacheTest() { - Config config = new Config(); + public CacheTest() { + Config config = new Config(); - instance1 = createHazelcastInstance(config); - instance2 = createHazelcastInstance(config); + instance1 = createHazelcastInstance(config); + instance2 = createHazelcastInstance(config); - cacheManager1 = new HazelcastCacheManager(instance1); - cacheManager2 = new HazelcastCacheManager(instance2); - } + cacheManager1 = new HazelcastCacheManager(instance1); + cacheManager2 = new HazelcastCacheManager(instance2); + } - @Test - public void hazelcastTest() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); - SearchResult searchResult = new SearchResult() + @Test + public void hazelcastTest() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); + SearchResult searchResult = + new SearchResult() .setEntities(new SearchEntityArray(List.of(searchEntity))) .setNumEntities(1) .setFrom(0) .setPageSize(1) .setMetadata(new SearchResultMetadata()); - Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination> - quintet = Quintet.with(List.of(corpuserUrn.toString()), "*", null, null, - new CacheableSearcher.QueryPagination(0, 1)); - - CacheableSearcher<Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> cacheableSearcher1 = - new CacheableSearcher<>(cacheManager1.getCache("test"), 10, - querySize -> searchResult, - querySize -> quintet, null, true); - - CacheableSearcher<Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> cacheableSearcher2 = - new CacheableSearcher<>(cacheManager2.getCache("test"), 10, + Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination> + quintet = + Quintet.with( + List.of(corpuserUrn.toString()), + "*", + null, + null, + new CacheableSearcher.QueryPagination(0, 1)); + + CacheableSearcher< + Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> + cacheableSearcher1 = + new CacheableSearcher<>( + cacheManager1.getCache("test"), + 10, querySize -> searchResult, - querySize -> quintet, null, true); - - // Cache result - SearchResult result = cacheableSearcher1.getSearchResults(0, 1); - Assert.assertNotEquals(result, null); - - Assert.assertEquals(instance1.getMap("test").get(quintet), instance2.getMap("test").get(quintet)); - Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), searchResult); - Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), cacheableSearcher2.getSearchResults(0, 1)); - } - - @Test - public void hazelcastTestScroll() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); - ScrollResult scrollResult = new ScrollResult() + querySize -> quintet, + null, + true); + + CacheableSearcher< + Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> + cacheableSearcher2 = + new CacheableSearcher<>( + cacheManager2.getCache("test"), + 10, + querySize -> searchResult, + querySize -> quintet, + null, + true); + + // Cache result + SearchResult result = cacheableSearcher1.getSearchResults(0, 1); + Assert.assertNotEquals(result, null); + + Assert.assertEquals( + instance1.getMap("test").get(quintet), instance2.getMap("test").get(quintet)); + Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), searchResult); + Assert.assertEquals( + cacheableSearcher1.getSearchResults(0, 1), cacheableSearcher2.getSearchResults(0, 1)); + } + + @Test + public void hazelcastTestScroll() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); + ScrollResult scrollResult = + new ScrollResult() .setEntities(new SearchEntityArray(List.of(searchEntity))) .setNumEntities(1) .setPageSize(1) .setMetadata(new SearchResultMetadata()); - final Criterion filterCriterion = new Criterion() + final Criterion filterCriterion = + new Criterion() .setField("platform") .setCondition(Condition.EQUAL) .setValue("hive") .setValues(new StringArray(ImmutableList.of("hive"))); - final Criterion subtypeCriterion = new Criterion() + final Criterion subtypeCriterion = + new Criterion() .setField("subtypes") .setCondition(Condition.EQUAL) .setValue("") .setValues(new StringArray(ImmutableList.of("view"))); - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(subtypeCriterion))) - )); - - Sextet<List<String>, String, String, String, String, Integer> - sextet = Sextet.with(List.of(corpuserUrn.toString()), "*", toJsonString(filterWithCondition), null, null, 1); - - Cache cache1 = cacheManager1.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); - Cache cache2 = cacheManager2.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); - - // Cache result - String json = toJsonString(scrollResult); - cache1.put(sextet, json); - Assert.assertEquals(instance1.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet), - instance2.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet)); - String cachedResult1 = cache1.get(sextet, String.class); - String cachedResult2 = cache2.get(sextet, String.class); - Assert.assertEquals(cachedResult1, cachedResult2); - Assert.assertEquals(cache1.get(sextet, String.class), json); - Assert.assertEquals(cache2.get(sextet, String.class), json); - } - - @Test - public void testLineageCaching() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - EntityLineageResult lineageResult = new EntityLineageResult(); - LineageRelationshipArray array = new LineageRelationshipArray(); - LineageRelationship lineageRelationship = new LineageRelationship().setEntity(corpuserUrn).setType("type"); - for (int i = 0; i < 10000; i++) { - array.add(lineageRelationship); - } - lineageResult.setRelationships(array).setCount(1).setStart(0).setTotal(1); - CachedEntityLineageResult cachedEntityLineageResult = new CachedEntityLineageResult(lineageResult, - System.currentTimeMillis()); - - Cache cache1 = cacheManager1.getCache("relationshipSearchService"); - Cache cache2 = cacheManager2.getCache("relationshipSearchService"); - - EntityLineageResultCacheKey key = new EntityLineageResultCacheKey(corpuserUrn, LineageDirection.DOWNSTREAM, - 0L, 1L, 1, ChronoUnit.DAYS); - - cache1.put(key, cachedEntityLineageResult); - - Assert.assertEquals(instance1.getMap("relationshipSearchService").get(key), - instance2.getMap("relationshipSearchService").get(key)); - CachedEntityLineageResult cachedResult1 = cache1.get(key, CachedEntityLineageResult.class); - CachedEntityLineageResult cachedResult2 = cache2.get(key, CachedEntityLineageResult.class); - Assert.assertEquals(cachedResult1, cachedResult2); - Assert.assertEquals(cache1.get(key, CachedEntityLineageResult.class), cachedEntityLineageResult); - Assert.assertEquals(cache2.get(key, CachedEntityLineageResult.class).getEntityLineageResult(), lineageResult); + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))), + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(subtypeCriterion))))); + + Sextet<List<String>, String, String, String, String, Integer> sextet = + Sextet.with( + List.of(corpuserUrn.toString()), "*", toJsonString(filterWithCondition), null, null, 1); + + Cache cache1 = cacheManager1.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); + Cache cache2 = cacheManager2.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); + + // Cache result + String json = toJsonString(scrollResult); + cache1.put(sextet, json); + Assert.assertEquals( + instance1.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet), + instance2.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet)); + String cachedResult1 = cache1.get(sextet, String.class); + String cachedResult2 = cache2.get(sextet, String.class); + Assert.assertEquals(cachedResult1, cachedResult2); + Assert.assertEquals(cache1.get(sextet, String.class), json); + Assert.assertEquals(cache2.get(sextet, String.class), json); + } + + @Test + public void testLineageCaching() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + EntityLineageResult lineageResult = new EntityLineageResult(); + LineageRelationshipArray array = new LineageRelationshipArray(); + LineageRelationship lineageRelationship = + new LineageRelationship().setEntity(corpuserUrn).setType("type"); + for (int i = 0; i < 10000; i++) { + array.add(lineageRelationship); } + lineageResult.setRelationships(array).setCount(1).setStart(0).setTotal(1); + CachedEntityLineageResult cachedEntityLineageResult = + new CachedEntityLineageResult(lineageResult, System.currentTimeMillis()); + + Cache cache1 = cacheManager1.getCache("relationshipSearchService"); + Cache cache2 = cacheManager2.getCache("relationshipSearchService"); + + EntityLineageResultCacheKey key = + new EntityLineageResultCacheKey( + corpuserUrn, LineageDirection.DOWNSTREAM, 0L, 1L, 1, ChronoUnit.DAYS); + + cache1.put(key, cachedEntityLineageResult); + + Assert.assertEquals( + instance1.getMap("relationshipSearchService").get(key), + instance2.getMap("relationshipSearchService").get(key)); + CachedEntityLineageResult cachedResult1 = cache1.get(key, CachedEntityLineageResult.class); + CachedEntityLineageResult cachedResult2 = cache2.get(key, CachedEntityLineageResult.class); + Assert.assertEquals(cachedResult1, cachedResult2); + Assert.assertEquals( + cache1.get(key, CachedEntityLineageResult.class), cachedEntityLineageResult); + Assert.assertEquals( + cache2.get(key, CachedEntityLineageResult.class).getEntityLineageResult(), lineageResult); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java index 266039afb45d5..f910f7981b138 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import org.opensearch.action.support.WriteRequest; @@ -10,19 +13,15 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {ElasticSearchBulkProcessorFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchBulkProcessorFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ESBulkProcessor test; + @Autowired ESBulkProcessor test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(WriteRequest.RefreshPolicy.NONE, test.getWriteRequestRefreshPolicy()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(WriteRequest.RefreshPolicy.NONE, test.getWriteRequestRefreshPolicy()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java index 6ef623648640a..a3f3f469ea611 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java @@ -1,33 +1,31 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.util.Map; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @SpringBootTest( - properties = { - "elasticsearch.index.settingsOverrides=", - "elasticsearch.index.entitySettingsOverrides=", - "elasticsearch.index.prefix=test_prefix" - }, - classes = {ElasticSearchIndexBuilderFactory.class}) + properties = { + "elasticsearch.index.settingsOverrides=", + "elasticsearch.index.entitySettingsOverrides=", + "elasticsearch.index.prefix=test_prefix" + }, + classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchIndexBuilderFactoryEmptyTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(Map.of(), test.getIndexSettingOverrides()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(Map.of(), test.getIndexSettingOverrides()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java index 21c3265753ac5..fa4575c1e4142 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java @@ -1,31 +1,36 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.*; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import org.springframework.beans.factory.annotation.Autowired; - -import static org.testng.Assert.*; @SpringBootTest( - properties = { - "elasticsearch.index.settingsOverrides={\"my_index\":{\"number_of_shards\":\"10\"}}", - "elasticsearch.index.entitySettingsOverrides={\"my_entity\":{\"number_of_shards\":\"5\"}}", - "elasticsearch.index.prefix=test_prefix" - }, - classes = {ElasticSearchIndexBuilderFactory.class}) + properties = { + "elasticsearch.index.settingsOverrides={\"my_index\":{\"number_of_shards\":\"10\"}}", + "elasticsearch.index.entitySettingsOverrides={\"my_entity\":{\"number_of_shards\":\"5\"}}", + "elasticsearch.index.prefix=test_prefix" + }, + classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) -public class ElasticSearchIndexBuilderFactoryOverridesTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; +public class ElasticSearchIndexBuilderFactoryOverridesTest + extends AbstractTestNGSpringContextTests { + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals("10", test.getIndexSettingOverrides().get("test_prefix_my_index").get("number_of_shards")); - assertEquals("5", test.getIndexSettingOverrides().get("test_prefix_my_entityindex_v2").get("number_of_shards")); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals( + "10", test.getIndexSettingOverrides().get("test_prefix_my_index").get("number_of_shards")); + assertEquals( + "5", + test.getIndexSettingOverrides() + .get("test_prefix_my_entityindex_v2") + .get("number_of_shards")); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java index 4d63d18f370eb..2c309cb44b04e 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java @@ -1,7 +1,11 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; @@ -9,21 +13,15 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.util.Map; - -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertEquals; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchIndexBuilderFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(Map.of(), test.getIndexSettingOverrides()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(Map.of(), test.getIndexSettingOverrides()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java index 6461df2894326..a8e6b50089602 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java @@ -1,7 +1,12 @@ package com.linkedin.gms.factory.secret; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.secret.SecretService; +import java.io.IOException; +import java.nio.charset.StandardCharsets; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -10,28 +15,22 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.io.IOException; -import java.nio.charset.StandardCharsets; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {SecretServiceFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class SecretServiceFactoryTest extends AbstractTestNGSpringContextTests { - @Value("${secretService.encryptionKey}") - private String encryptionKey; + @Value("${secretService.encryptionKey}") + private String encryptionKey; - @Autowired - SecretService test; + @Autowired SecretService test; - @Test - void testInjection() throws IOException { - assertEquals(encryptionKey, "ENCRYPTION_KEY"); - assertNotNull(test); - assertEquals(test.getHashedPassword("".getBytes(StandardCharsets.UTF_8), "password"), - "XohImNooBHFR0OVvjcYpJ3NgPQ1qq73WKhHvch0VQtg="); - } + @Test + void testInjection() throws IOException { + assertEquals(encryptionKey, "ENCRYPTION_KEY"); + assertNotNull(test); + assertEquals( + test.getHashedPassword("".getBytes(StandardCharsets.UTF_8), "password"), + "XohImNooBHFR0OVvjcYpJ3NgPQ1qq73WKhHvch0VQtg="); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java index 49fce75ab7c61..8268eeff48c5e 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePathEntry; @@ -20,56 +22,57 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchService; import com.linkedin.mxe.MetadataChangeProposal; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class BackfillBrowsePathsV2StepTest { private static final String VERSION = "2"; - private static final String UPGRADE_URN = String.format( - "urn:li:%s:%s", - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - "backfill-default-browse-paths-v2-step"); + private static final String UPGRADE_URN = + String.format( + "urn:li:%s:%s", + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "backfill-default-browse-paths-v2-step"); - private static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; + private static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; - private static final String DATA_JOB_URN = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATA_JOB_URN = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; private static final String DATA_FLOW_URN = "urn:li:dataFlow:(orchestrator,flowId,cluster)"; - private static final String ML_MODEL_URN = "urn:li:mlModel:(urn:li:dataPlatform:sagemaker,trustmodel,PROD)"; - private static final String ML_MODEL_GROUP_URN = "urn:li:mlModelGroup:(urn:li:dataPlatform:sagemaker,a-model-package-group,PROD)"; - private static final String ML_FEATURE_TABLE_URN = "urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,user_features)"; + private static final String ML_MODEL_URN = + "urn:li:mlModel:(urn:li:dataPlatform:sagemaker,trustmodel,PROD)"; + private static final String ML_MODEL_GROUP_URN = + "urn:li:mlModelGroup:(urn:li:dataPlatform:sagemaker,a-model-package-group,PROD)"; + private static final String ML_FEATURE_TABLE_URN = + "urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,user_features)"; private static final String ML_FEATURE_URN = "urn:li:mlFeature:(test,feature_1)"; - private static final List<String> ENTITY_TYPES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); - private static final List<Urn> ENTITY_URNS = ImmutableList.of( - UrnUtils.getUrn(DATASET_URN), - UrnUtils.getUrn(DASHBOARD_URN), - UrnUtils.getUrn(CHART_URN), - UrnUtils.getUrn(DATA_JOB_URN), - UrnUtils.getUrn(DATA_FLOW_URN), - UrnUtils.getUrn(ML_MODEL_URN), - UrnUtils.getUrn(ML_MODEL_GROUP_URN), - UrnUtils.getUrn(ML_FEATURE_TABLE_URN), - UrnUtils.getUrn(ML_FEATURE_URN) - ); - + private static final List<String> ENTITY_TYPES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); + private static final List<Urn> ENTITY_URNS = + ImmutableList.of( + UrnUtils.getUrn(DATASET_URN), + UrnUtils.getUrn(DASHBOARD_URN), + UrnUtils.getUrn(CHART_URN), + UrnUtils.getUrn(DATA_JOB_URN), + UrnUtils.getUrn(DATA_FLOW_URN), + UrnUtils.getUrn(ML_MODEL_URN), + UrnUtils.getUrn(ML_MODEL_GROUP_URN), + UrnUtils.getUrn(ML_FEATURE_TABLE_URN), + UrnUtils.getUrn(ML_FEATURE_URN)); @Test public void testExecuteNoExistingBrowsePaths() throws Exception { @@ -77,31 +80,32 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - - BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = new BackfillBrowsePathsV2Step(mockService, mockSearchService); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + + BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = + new BackfillBrowsePathsV2Step(mockService, mockSearchService); backfillBrowsePathsV2Step.execute(); - Mockito.verify(mockSearchService, Mockito.times(9)).scrollAcrossEntities( - Mockito.any(), - Mockito.eq("*"), - Mockito.any(Filter.class), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq("5m"), - Mockito.eq(5000), - Mockito.eq(null) - ); - // Verify that 11 aspects are ingested, 2 for the upgrade request / result, 9 for ingesting 1 of each entity type - Mockito.verify(mockService, Mockito.times(11)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockSearchService, Mockito.times(9)) + .scrollAcrossEntities( + Mockito.any(), + Mockito.eq("*"), + Mockito.any(Filter.class), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq("5m"), + Mockito.eq(5000), + Mockito.eq(null)); + // Verify that 11 aspects are ingested, 2 for the upgrade request / result, 9 for ingesting 1 of + // each entity type + Mockito.verify(mockService, Mockito.times(11)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test @@ -110,42 +114,51 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION); Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(response); - - BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = new BackfillBrowsePathsV2Step(mockService, mockSearchService); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(response); + + BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = + new BackfillBrowsePathsV2Step(mockService, mockSearchService); backfillBrowsePathsV2Step.execute(); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } - private EntityService initMockService() throws URISyntaxException { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new UpgradeDefaultBrowsePathsStepTest.TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); for (int i = 0; i < ENTITY_TYPES.size(); i++) { - Mockito.when(mockService.buildDefaultBrowsePathV2(Mockito.eq(ENTITY_URNS.get(i)), Mockito.eq(true))).thenReturn( - new BrowsePathsV2().setPath(new BrowsePathEntryArray(new BrowsePathEntry().setId("test")))); - - Mockito.when(mockService.getEntityV2( - Mockito.any(), - Mockito.eq(ENTITY_URNS.get(i)), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)) - )).thenReturn(null); + Mockito.when( + mockService.buildDefaultBrowsePathV2( + Mockito.eq(ENTITY_URNS.get(i)), Mockito.eq(true))) + .thenReturn( + new BrowsePathsV2() + .setPath(new BrowsePathEntryArray(new BrowsePathEntry().setId("test")))); + + Mockito.when( + mockService.getEntityV2( + Mockito.any(), + Mockito.eq(ENTITY_URNS.get(i)), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(null); } return mockService; @@ -155,16 +168,21 @@ private SearchService initMockSearchService() { final SearchService mockSearchService = Mockito.mock(SearchService.class); for (int i = 0; i < ENTITY_TYPES.size(); i++) { - Mockito.when(mockSearchService.scrollAcrossEntities( - Mockito.eq(ImmutableList.of(ENTITY_TYPES.get(i))), - Mockito.eq("*"), - Mockito.any(Filter.class), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq("5m"), - Mockito.eq(5000), - Mockito.eq(null) - )).thenReturn(new ScrollResult().setNumEntities(1).setEntities(new SearchEntityArray(new SearchEntity().setEntity(ENTITY_URNS.get(i))))); + Mockito.when( + mockSearchService.scrollAcrossEntities( + Mockito.eq(ImmutableList.of(ENTITY_TYPES.get(i))), + Mockito.eq("*"), + Mockito.any(Filter.class), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq("5m"), + Mockito.eq(5000), + Mockito.eq(null))) + .thenReturn( + new ScrollResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray(new SearchEntity().setEntity(ENTITY_URNS.get(i))))); } return mockSearchService; diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java index 0ae8eb2cba808..976698f3032d2 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,31 +21,31 @@ import org.jetbrains.annotations.NotNull; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - /** * Test the behavior of IngestDataPlatformInstancesStep. * - * We expect it to check if any data platform instance aspects already exist in the database and if none are found, - * to go through all the stored entities and ingest a data platform instance aspect for any that are compatible with it. + * <p>We expect it to check if any data platform instance aspects already exist in the database and + * if none are found, to go through all the stored entities and ingest a data platform instance + * aspect for any that are compatible with it. * - * CorpUser is used as an example of an entity that is not compatible with data platform instance and therefore should be ignored. - * Char is used as an example of an entity that should get adorned with a data platform instance. + * <p>CorpUser is used as an example of an entity that is not compatible with data platform instance + * and therefore should be ignored. Char is used as an example of an entity that should get adorned + * with a data platform instance. * - * See {@link DataPlatformInstanceUtils} for the compatibility rules. + * <p>See {@link DataPlatformInstanceUtils} for the compatibility rules. */ public class IngestDataPlatformInstancesStepTest { @Test - public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() throws Exception { + public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() + throws Exception { final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); mockDBWithDataPlatformInstanceAspects(migrationsDao); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(migrationsDao, times(1)).checkIfAspectExists(anyString()); @@ -57,7 +60,8 @@ public void testExecuteCopesWithEmptyDB() throws Exception { mockEmptyDB(migrationsDao); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(migrationsDao, times(1)).checkIfAspectExists(anyString()); @@ -75,9 +79,15 @@ public void testExecuteChecksKeySpecForAllUrns() throws Exception { final int countOfChartEntities = 4; final int totalUrnsInDB = countOfCorpUserEntities + countOfChartEntities; - mockDBWithWorkToDo(entityRegistry, entityService, migrationsDao, countOfCorpUserEntities, countOfChartEntities); + mockDBWithWorkToDo( + entityRegistry, + entityService, + migrationsDao, + countOfCorpUserEntities, + countOfChartEntities); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(entityService, times(totalUrnsInDB)).getKeyAspectSpec(any(Urn.class)); @@ -91,35 +101,55 @@ public void testExecuteWhenSomeEntitiesShouldReceiveDataPlatformInstance() throw final int countOfCorpUserEntities = 5; final int countOfChartEntities = 7; - mockDBWithWorkToDo(entityRegistry, entityService, migrationsDao, countOfCorpUserEntities, countOfChartEntities); + mockDBWithWorkToDo( + entityRegistry, + entityService, + migrationsDao, + countOfCorpUserEntities, + countOfChartEntities); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(entityService, times(1)) .ingestAspects( - argThat(arg -> - arg.getItems().stream() - .allMatch(item -> item.getUrn().getEntityType().equals("chart") - && item.getAspectName().equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - && ((UpsertBatchItem) item).getAspect() instanceof DataPlatformInstance) - ), + argThat( + arg -> + arg.getItems().stream() + .allMatch( + item -> + item.getUrn().getEntityType().equals("chart") + && item.getAspectName() + .equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + && ((UpsertBatchItem) item).getAspect() + instanceof DataPlatformInstance)), any(), anyBoolean(), anyBoolean()); verify(entityService, times(0)) - .ingestAspects(argThat(arg -> - !arg.getItems().stream() - .allMatch(item -> item.getUrn().getEntityType().equals("chart") - && item.getAspectName().equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - && ((UpsertBatchItem) item).getAspect() instanceof DataPlatformInstance) - ), any(), anyBoolean(), anyBoolean()); + .ingestAspects( + argThat( + arg -> + !arg.getItems().stream() + .allMatch( + item -> + item.getUrn().getEntityType().equals("chart") + && item.getAspectName() + .equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + && ((UpsertBatchItem) item).getAspect() + instanceof DataPlatformInstance)), + any(), + anyBoolean(), + anyBoolean()); } @NotNull private ConfigEntityRegistry getTestEntityRegistry() { return new ConfigEntityRegistry( - IngestDataPlatformInstancesStepTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); + IngestDataPlatformInstancesStepTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); } private void mockDBWithDataPlatformInstanceAspects(AspectMigrationsDao migrationsDao) { @@ -137,18 +167,36 @@ private void mockDBWithWorkToDo( AspectMigrationsDao migrationsDao, int countOfCorpUserEntities, int countOfChartEntities) { - List<Urn> corpUserUrns = insertMockEntities(countOfCorpUserEntities, "corpuser", "urn:li:corpuser:test%d", entityRegistry, - entityService); - List<Urn> charUrns = insertMockEntities(countOfChartEntities, "chart", "urn:li:chart:(looker,test%d)", entityRegistry, - entityService); - List<String> allUrnsInDB = Stream.concat(corpUserUrns.stream(), charUrns.stream()).map(Urn::toString).collect(Collectors.toList()); + List<Urn> corpUserUrns = + insertMockEntities( + countOfCorpUserEntities, + "corpuser", + "urn:li:corpuser:test%d", + entityRegistry, + entityService); + List<Urn> charUrns = + insertMockEntities( + countOfChartEntities, + "chart", + "urn:li:chart:(looker,test%d)", + entityRegistry, + entityService); + List<String> allUrnsInDB = + Stream.concat(corpUserUrns.stream(), charUrns.stream()) + .map(Urn::toString) + .collect(Collectors.toList()); when(migrationsDao.checkIfAspectExists(DATA_PLATFORM_INSTANCE_ASPECT_NAME)).thenReturn(false); when(migrationsDao.countEntities()).thenReturn((long) allUrnsInDB.size()); when(migrationsDao.listAllUrns(anyInt(), anyInt())).thenReturn(allUrnsInDB); when(entityService.getEntityRegistry()).thenReturn(entityRegistry); } - private List<Urn> insertMockEntities(int count, String entity, String urnTemplate, EntityRegistry entityRegistry, EntityService entityService) { + private List<Urn> insertMockEntities( + int count, + String entity, + String urnTemplate, + EntityRegistry entityRegistry, + EntityService entityService) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entity); AspectSpec keySpec = entitySpec.getKeyAspectSpec(); List<Urn> urns = new ArrayList<>(); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java index 24bdd193a39c8..b28a6e9f5cc5b 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.events.metadata.ChangeType; @@ -12,15 +15,11 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - /** * Test the behavior of IngestDefaultGlobalSettingsStep. * - * We expect it to ingest a JSON file, throwing if the JSON file - * is malformed or does not match the PDL model for GlobalSettings.pdl. + * <p>We expect it to ingest a JSON file, throwing if the JSON file is malformed or does not match + * the PDL model for GlobalSettings.pdl. */ public class IngestDefaultGlobalSettingsStepTest { @@ -29,20 +28,21 @@ public void testExecuteValidSettingsNoExistingSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_valid.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_valid.json"); step.execute(); GlobalSettingsInfo expectedResult = new GlobalSettingsInfo(); - expectedResult.setViews(new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:test"))); - - Mockito.verify(entityService, times(1)).ingestProposal( - Mockito.eq(buildUpdateSettingsProposal(expectedResult)), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + expectedResult.setViews( + new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:test"))); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateSettingsProposal(expectedResult)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -50,26 +50,29 @@ public void testExecuteValidSettingsExistingSettings() throws Exception { // Verify that the user provided settings overrides are NOT overwritten. final EntityService entityService = mock(EntityService.class); - final GlobalSettingsInfo existingSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings() - .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); + final GlobalSettingsInfo existingSettings = + new GlobalSettingsInfo() + .setViews( + new GlobalViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); configureEntityServiceMock(entityService, existingSettings); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_valid.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_valid.json"); step.execute(); // Verify that the merge preserves the user settings. GlobalSettingsInfo expectedResult = new GlobalSettingsInfo(); - expectedResult.setViews(new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); - - Mockito.verify(entityService, times(1)).ingestProposal( - Mockito.eq(buildUpdateSettingsProposal(expectedResult)), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + expectedResult.setViews( + new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateSettingsProposal(expectedResult)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -77,9 +80,9 @@ public void testExecuteInvalidJsonSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_invalid_json.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_invalid_json.json"); Assert.assertThrows(RuntimeException.class, step::execute); @@ -92,9 +95,9 @@ public void testExecuteInvalidModelSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_invalid_model.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_invalid_model.json"); Assert.assertThrows(RuntimeException.class, step::execute); @@ -102,15 +105,18 @@ public void testExecuteInvalidModelSettings() throws Exception { verifyNoInteractions(entityService); } - private static void configureEntityServiceMock(final EntityService mockService, final GlobalSettingsInfo settingsInfo) { - Mockito.when(mockService.getAspect( - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(GLOBAL_SETTINGS_INFO_ASPECT_NAME), - Mockito.eq(0L) - )).thenReturn(settingsInfo); + private static void configureEntityServiceMock( + final EntityService mockService, final GlobalSettingsInfo settingsInfo) { + Mockito.when( + mockService.getAspect( + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(GLOBAL_SETTINGS_INFO_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(settingsInfo); } - private static MetadataChangeProposal buildUpdateSettingsProposal(final GlobalSettingsInfo settings) { + private static MetadataChangeProposal buildUpdateSettingsProposal( + final GlobalSettingsInfo settings) { final MetadataChangeProposal mcp = new MetadataChangeProposal(); mcp.setEntityUrn(GLOBAL_SETTINGS_URN); mcp.setEntityType(GLOBAL_SETTINGS_ENTITY_NAME); @@ -119,4 +125,4 @@ private static MetadataChangeProposal buildUpdateSettingsProposal(final GlobalSe mcp.setAspect(GenericRecordUtils.serializeAspect(settings)); return mcp; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java index aca5e322567d8..5a9e93f70c952 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java @@ -22,25 +22,27 @@ import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; -import org.mockito.Mockito; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Future; +import javax.annotation.Nonnull; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RestoreColumnLineageIndicesTest { private static final String VERSION_1 = "1"; private static final String VERSION_2 = "2"; private static final String COLUMN_LINEAGE_UPGRADE_URN = - String.format("urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-column-lineage-indices"); - private final Urn datasetUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + String.format( + "urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-column-lineage-indices"); + private final Urn datasetUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); private final Urn chartUrn = UrnUtils.getUrn("urn:li:chart:(looker,dashboard_elements.1)"); - private final Urn dashboardUrn = UrnUtils.getUrn("urn:li:dashboard:(looker,dashboards.thelook::web_analytics_overview)"); + private final Urn dashboardUrn = + UrnUtils.getUrn("urn:li:dashboard:(looker,dashboards.thelook::web_analytics_overview)"); @Test public void testExecuteFirstTime() throws Exception { @@ -54,54 +56,55 @@ public void testExecuteFirstTime() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test @@ -116,54 +119,55 @@ public void testExecuteWithNewVersion() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test @@ -178,106 +182,126 @@ public void testDoesNotExecuteWithSameVersion() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } private void mockGetUpstreamLineage(@Nonnull Urn datasetUrn, @Nonnull EntityService mockService) { - final List<ExtraInfo> extraInfos = ImmutableList.of( - new ExtraInfo() - .setUrn(datasetUrn) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); + final List<ExtraInfo> extraInfos = + ImmutableList.of( + new ExtraInfo() + .setUrn(datasetUrn) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(1000) - )).thenReturn(new ListResult<>( - ImmutableList.of(new UpstreamLineage()), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), - 1, - false, - 1, - 1, - 1)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn( + new ListResult<>( + ImmutableList.of(new UpstreamLineage()), + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), + 1, + false, + 1, + 1, + 1)); } - private void mockGetInputFields(@Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { - final List<ExtraInfo> extraInfos = ImmutableList.of( - new ExtraInfo() - .setUrn(entityUrn) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); + private void mockGetInputFields( + @Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { + final List<ExtraInfo> extraInfos = + ImmutableList.of( + new ExtraInfo() + .setUrn(entityUrn) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(entityName), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(1000) - )).thenReturn(new ListResult<>( - ImmutableList.of(new InputFields()), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), - 1, - false, - 1, - 1, - 1)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(entityName), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn( + new ListResult<>( + ImmutableList.of(new InputFields()), + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), + 1, + false, + 1, + 1, + 1)); } private AspectSpec mockAspectSpecs(@Nonnull EntityRegistry mockRegistry) { @@ -285,28 +309,39 @@ private AspectSpec mockAspectSpecs(@Nonnull EntityRegistry mockRegistry) { final AspectSpec aspectSpec = Mockito.mock(AspectSpec.class); // Mock for upstreamLineage Mockito.when(mockRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock inputFields for charts Mockito.when(mockRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock inputFields for dashboards - Mockito.when(mockRegistry.getEntitySpec(Constants.DASHBOARD_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.DASHBOARD_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); return aspectSpec; } - private void mockGetUpgradeStep(boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) throws Exception { + private void mockGetUpgradeStep( + boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) + throws Exception { final Urn upgradeEntityUrn = UrnUtils.getUrn(COLUMN_LINEAGE_UPGRADE_URN); - final com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(version); + final com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(version); final Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - final EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(shouldReturnResponse ? response : null); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + final EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(shouldReturnResponse ? response : null); } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java index 3753904053256..a4f0c5e0aaba0 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java @@ -13,59 +13,107 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.models.EntitySpec; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; -import java.util.List; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.concurrent.Future; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RestoreGlossaryIndicesTest { private static final String VERSION_1 = "1"; private static final String VERSION_2 = "2"; - private static final String GLOSSARY_UPGRADE_URN = String.format("urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-glossary-indices-ui"); + private static final String GLOSSARY_UPGRADE_URN = + String.format( + "urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-glossary-indices-ui"); - private void mockGetTermInfo(Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { + private void mockGetTermInfo( + Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) + throws Exception { Map<String, EnvelopedAspect> termInfoAspects = new HashMap<>(); - termInfoAspects.put(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(new GlossaryTermInfo().setName("test").data()))); + termInfoAspects.put( + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(new GlossaryTermInfo().setName("test").data()))); Map<Urn, EntityResponse> termInfoResponses = new HashMap<>(); - termInfoResponses.put(glossaryTermUrn, new EntityResponse().setUrn(glossaryTermUrn).setAspects(new EnvelopedAspectMap(termInfoAspects))); - Mockito.when(mockSearchService.search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), "", null, null, 0, 1000, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true))) - .thenReturn(new SearchResult().setNumEntities(1).setEntities(new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(glossaryTermUrn))))); - Mockito.when(mockService.getEntitiesV2( - Constants.GLOSSARY_TERM_ENTITY_NAME, - new HashSet<>(Collections.singleton(glossaryTermUrn)), - Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME))) + termInfoResponses.put( + glossaryTermUrn, + new EntityResponse() + .setUrn(glossaryTermUrn) + .setAspects(new EnvelopedAspectMap(termInfoAspects))); + Mockito.when( + mockSearchService.search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true))) + .thenReturn( + new SearchResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(glossaryTermUrn))))); + Mockito.when( + mockService.getEntitiesV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(Collections.singleton(glossaryTermUrn)), + Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME))) .thenReturn(termInfoResponses); } - private void mockGetNodeInfo(Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { + private void mockGetNodeInfo( + Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) + throws Exception { Map<String, EnvelopedAspect> nodeInfoAspects = new HashMap<>(); - nodeInfoAspects.put(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(new GlossaryNodeInfo().setName("test").data()))); + nodeInfoAspects.put( + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(new GlossaryNodeInfo().setName("test").data()))); Map<Urn, EntityResponse> nodeInfoResponses = new HashMap<>(); - nodeInfoResponses.put(glossaryNodeUrn, new EntityResponse().setUrn(glossaryNodeUrn).setAspects(new EnvelopedAspectMap(nodeInfoAspects))); - Mockito.when(mockSearchService.search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), "", null, null, 0, 1000, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true))) - .thenReturn(new SearchResult().setNumEntities(1).setEntities(new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(glossaryNodeUrn))))); - Mockito.when(mockService.getEntitiesV2( - Constants.GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(Collections.singleton(glossaryNodeUrn)), - Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) - )) + nodeInfoResponses.put( + glossaryNodeUrn, + new EntityResponse() + .setUrn(glossaryNodeUrn) + .setAspects(new EnvelopedAspectMap(nodeInfoAspects))); + Mockito.when( + mockSearchService.search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true))) + .thenReturn( + new SearchResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(glossaryNodeUrn))))); + Mockito.when( + mockService.getEntitiesV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(Collections.singleton(glossaryNodeUrn)), + Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME))) .thenReturn(nodeInfoResponses); } @@ -73,200 +121,257 @@ private AspectSpec mockGlossaryAspectSpecs(EntityRegistry mockRegistry) { EntitySpec entitySpec = Mockito.mock(EntitySpec.class); AspectSpec aspectSpec = Mockito.mock(AspectSpec.class); // Mock for Terms - Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock for Nodes - Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)) + .thenReturn(aspectSpec); return aspectSpec; } @Test public void testExecuteFirstTime() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(null); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(null); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); mockGetTermInfo(glossaryTermUrn, mockSearchService, mockService); mockGetNodeInfo(glossaryNodeUrn, mockSearchService, mockService); AspectSpec aspectSpec = mockGlossaryAspectSpecs(mockRegistry); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test public void testExecutesWithNewVersion() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_2); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_2); Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(response); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(response); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); mockGetTermInfo(glossaryTermUrn, mockSearchService, mockService); mockGetNodeInfo(glossaryNodeUrn, mockSearchService, mockService); AspectSpec aspectSpec = mockGlossaryAspectSpecs(mockRegistry); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test public void testDoesNotRunWhenAlreadyExecuted() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(response); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(response); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockSearchService, Mockito.times(0)).search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), - "", null, null, 0, 1000, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - Mockito.verify(mockSearchService, Mockito.times(0)).search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), - "", null, null, 0, 1000, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(0)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(0)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockSearchService, Mockito.times(0)) + .search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + Mockito.verify(mockSearchService, Mockito.times(0)) + .search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java index 5e4ad6e7fe880..17159ba1baf53 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java @@ -37,14 +37,13 @@ import org.mockito.Mockito; import org.testng.annotations.Test; - public class UpgradeDefaultBrowsePathsStepTest { private static final String VERSION_1 = "1"; - private static final String UPGRADE_URN = String.format( - "urn:li:%s:%s", - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - "upgrade-default-browse-paths-step"); + private static final String UPGRADE_URN = + String.format( + "urn:li:%s:%s", + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "upgrade-default-browse-paths-step"); @Test public void testExecuteNoExistingBrowsePaths() throws Exception { @@ -54,180 +53,218 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); final List<RecordTemplate> browsePaths1 = Collections.emptyList(); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths1, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), - 0, - false, - 0, - 0, - 2)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths1, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), + 0, + false, + 0, + 0, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); - // Verify that 4 aspects are ingested, 2 for the upgrade request / result, but none for ingesting - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); + // Verify that 4 aspects are ingested, 2 for the upgrade request / result, but none for + // ingesting + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test public void testExecuteFirstTime() throws Exception { - Urn testUrn1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset1,PROD)"); - Urn testUrn2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset2,PROD)"); + Urn testUrn1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset1,PROD)"); + Urn testUrn2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset2,PROD)"); final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); - Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))).thenReturn( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); - Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn2))).thenReturn( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); + Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))) + .thenReturn(new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); + Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn2))) + .thenReturn(new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - final List<RecordTemplate> browsePaths1 = ImmutableList.of( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of(BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn1, registry)))), - new BrowsePaths().setPaths(new StringArray(ImmutableList.of(BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn2, registry)))) - ); - - final List<ExtraInfo> extraInfos1 = ImmutableList.of( - new ExtraInfo() - .setUrn(testUrn1) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), - new ExtraInfo() - .setUrn(testUrn2) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); - - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths1, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos1)), - 2, - false, - 2, - 2, - 2)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + final List<RecordTemplate> browsePaths1 = + ImmutableList.of( + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn1, registry)))), + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn2, registry))))); + + final List<ExtraInfo> extraInfos1 = + ImmutableList.of( + new ExtraInfo() + .setUrn(testUrn1) + .setVersion(0L) + .setAudit( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), + new ExtraInfo() + .setUrn(testUrn2) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); + + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths1, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos1)), + 2, + false, + 2, + 2, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); - // Verify that 4 aspects are ingested, 2 for the upgrade request / result and 2 for the browse pahts - Mockito.verify(mockService, Mockito.times(4)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); + // Verify that 4 aspects are ingested, 2 for the upgrade request / result and 2 for the browse + // pahts + Mockito.verify(mockService, Mockito.times(4)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test public void testDoesNotRunWhenBrowsePathIsNotQualified() throws Exception { // Test for browse paths that are not ingested - Urn testUrn3 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset3,PROD)"); // Do not migrate - Urn testUrn4 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset4,PROD)"); // Do not migrate + Urn testUrn3 = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset3,PROD)"); // Do not + // migrate + Urn testUrn4 = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset4,PROD)"); // Do not + // migrate final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - - final List<RecordTemplate> browsePaths2 = ImmutableList.of( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of( - BrowsePathUtils.getDefaultBrowsePath(testUrn3, registry, '.')))), - new BrowsePaths().setPaths(new StringArray(ImmutableList.of( - BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn4, registry), - BrowsePathUtils.getDefaultBrowsePath(testUrn4, registry, '.')))) - ); - - final List<ExtraInfo> extraInfos2 = ImmutableList.of( - new ExtraInfo() - .setUrn(testUrn3) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), - new ExtraInfo() - .setUrn(testUrn4) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L))); - - - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths2, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos2)), - 2, - false, - 2, - 2, - 2)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + + final List<RecordTemplate> browsePaths2 = + ImmutableList.of( + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getDefaultBrowsePath(testUrn3, registry, '.')))), + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn4, registry), + BrowsePathUtils.getDefaultBrowsePath(testUrn4, registry, '.'))))); + + final List<ExtraInfo> extraInfos2 = + ImmutableList.of( + new ExtraInfo() + .setUrn(testUrn3) + .setVersion(0L) + .setAudit( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), + new ExtraInfo() + .setUrn(testUrn4) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); + + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths2, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos2)), + 2, + false, + 2, + 2, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); // Verify that 2 aspects are ingested, only those for the upgrade step - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test @@ -235,48 +272,55 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { final EntityService mockService = Mockito.mock(EntityService.class); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(response); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(response); UpgradeDefaultBrowsePathsStep step = new UpgradeDefaultBrowsePathsStep(mockService); step.execute(); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } private void initMockServiceOtherEntities(EntityService mockService) { - List<String> skippedEntityTypes = ImmutableList.of( - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME - ); + List<String> skippedEntityTypes = + ImmutableList.of( + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME); for (String entityType : skippedEntityTypes) { - Mockito.when(mockService.listLatestAspects( - Mockito.eq(entityType), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - Collections.emptyList(), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), - 0, - false, - 0, - 0, - 0)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(entityType), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + Collections.emptyList(), + new ListResultMetadata() + .setExtraInfos(new ExtraInfoArray(Collections.emptyList())), + 0, + false, + 0, + 0, + 0)); } } @@ -285,10 +329,10 @@ public static class TestEntityRegistry implements EntityRegistry { private final Map<String, EntitySpec> entityNameToSpec; public TestEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) - .buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); } @Nonnull diff --git a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java index fe0d61986b4a6..9931f044931b6 100644 --- a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java +++ b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java @@ -1,5 +1,8 @@ package io.datahubproject.telemetry; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.gms.factory.telemetry.TelemetryUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.telemetry.TelemetryClientId; @@ -7,10 +10,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.*; -import static org.testng.AssertJUnit.assertEquals; - - public class TelemetryUtilsTest { EntityService _entityService; @@ -18,7 +17,8 @@ public class TelemetryUtilsTest { @BeforeMethod public void init() { _entityService = Mockito.mock(EntityService.class); - Mockito.when(_entityService.getLatestAspect(any(), anyString())).thenReturn(new TelemetryClientId().setClientId("1234")); + Mockito.when(_entityService.getLatestAspect(any(), anyString())) + .thenReturn(new TelemetryClientId().setClientId("1234")); } @Test diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java index 043c142da8323..692208c42f90c 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java @@ -1,5 +1,7 @@ package com.datahub.graphql; +import static com.linkedin.metadata.Constants.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -31,9 +33,6 @@ import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RestController public class GraphQLController { @@ -43,20 +42,22 @@ public GraphQLController() { MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "call")); } - @Inject - GraphQLEngine _engine; + @Inject GraphQLEngine _engine; - @Inject - AuthorizerChain _authorizerChain; + @Inject AuthorizerChain _authorizerChain; @PostMapping(value = "/graphql", produces = "application/json;charset=utf-8") CompletableFuture<ResponseEntity<String>> postGraphQL(HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); JsonNode bodyJson = null; try { bodyJson = mapper.readTree(jsonStr); @@ -81,9 +82,11 @@ CompletableFuture<ResponseEntity<String>> postGraphQL(HttpEntity<String> httpEnt * Extract "variables" map */ JsonNode variablesJson = bodyJson.get("variables"); - final Map<String, Object> variables = (variablesJson != null && !variablesJson.isNull()) - ? new ObjectMapper().convertValue(variablesJson, new TypeReference<Map<String, Object>>() { }) - : Collections.emptyMap(); + final Map<String, Object> variables = + (variablesJson != null && !variablesJson.isNull()) + ? new ObjectMapper() + .convertValue(variablesJson, new TypeReference<Map<String, Object>>() {}) + : Collections.emptyMap(); log.debug(String.format("Executing graphQL query: %s, variables: %s", queryJson, variables)); @@ -91,61 +94,76 @@ CompletableFuture<ResponseEntity<String>> postGraphQL(HttpEntity<String> httpEnt * Init QueryContext */ Authentication authentication = AuthenticationContext.getAuthentication(); - SpringQueryContext context = new SpringQueryContext( - true, - authentication, - _authorizerChain); - - return CompletableFuture.supplyAsync(() -> { - /* - * Execute GraphQL Query - */ - ExecutionResult executionResult = _engine.execute(queryJson.asText(), variables, context); - - if (executionResult.getErrors().size() != 0) { - // There were GraphQL errors. Report in error logs. - log.error(String.format("Errors while executing graphQL query: %s, result: %s, errors: %s", - queryJson, - executionResult.toSpecification(), - executionResult.getErrors())); - } else { - log.debug(String.format("Executed graphQL query: %s, result: %s", - queryJson, - executionResult.toSpecification())); - } - - /* - * Format & Return Response - */ - try { - submitMetrics(executionResult); - // Remove tracing from response to reduce bulk, not used by the frontend - executionResult.getExtensions().remove("tracing"); - String responseBodyStr = new ObjectMapper().writeValueAsString(executionResult.toSpecification()); - return new ResponseEntity<>(responseBodyStr, HttpStatus.OK); - } catch (IllegalArgumentException | JsonProcessingException e) { - log.error(String.format("Failed to convert execution result %s into a JsonNode", executionResult.toSpecification())); - return new ResponseEntity<>(HttpStatus.SERVICE_UNAVAILABLE); - } - }); + SpringQueryContext context = new SpringQueryContext(true, authentication, _authorizerChain); + + return CompletableFuture.supplyAsync( + () -> { + /* + * Execute GraphQL Query + */ + ExecutionResult executionResult = _engine.execute(queryJson.asText(), variables, context); + + if (executionResult.getErrors().size() != 0) { + // There were GraphQL errors. Report in error logs. + log.error( + String.format( + "Errors while executing graphQL query: %s, result: %s, errors: %s", + queryJson, executionResult.toSpecification(), executionResult.getErrors())); + } else { + log.debug( + String.format( + "Executed graphQL query: %s, result: %s", + queryJson, executionResult.toSpecification())); + } + + /* + * Format & Return Response + */ + try { + submitMetrics(executionResult); + // Remove tracing from response to reduce bulk, not used by the frontend + executionResult.getExtensions().remove("tracing"); + String responseBodyStr = + new ObjectMapper().writeValueAsString(executionResult.toSpecification()); + return new ResponseEntity<>(responseBodyStr, HttpStatus.OK); + } catch (IllegalArgumentException | JsonProcessingException e) { + log.error( + String.format( + "Failed to convert execution result %s into a JsonNode", + executionResult.toSpecification())); + return new ResponseEntity<>(HttpStatus.SERVICE_UNAVAILABLE); + } + }); } @GetMapping("/graphql") - void getGraphQL(HttpServletRequest request, HttpServletResponse response) throws HttpRequestMethodNotSupportedException { + void getGraphQL(HttpServletRequest request, HttpServletResponse response) + throws HttpRequestMethodNotSupportedException { log.info("GET on GraphQL API is not supported"); throw new HttpRequestMethodNotSupportedException("GET"); } private void observeErrors(ExecutionResult executionResult) { - executionResult.getErrors().forEach(graphQLError -> { - if (graphQLError instanceof DataHubGraphQLError) { - DataHubGraphQLError dhGraphQLError = (DataHubGraphQLError) graphQLError; - int errorCode = dhGraphQLError.getErrorCode(); - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "errorCode", Integer.toString(errorCode))).inc(); - } else { - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "errorType", graphQLError.getErrorType().toString())).inc(); - } - }); + executionResult + .getErrors() + .forEach( + graphQLError -> { + if (graphQLError instanceof DataHubGraphQLError) { + DataHubGraphQLError dhGraphQLError = (DataHubGraphQLError) graphQLError; + int errorCode = dhGraphQLError.getErrorCode(); + MetricUtils.get() + .counter( + MetricRegistry.name( + this.getClass(), "errorCode", Integer.toString(errorCode))) + .inc(); + } else { + MetricUtils.get() + .counter( + MetricRegistry.name( + this.getClass(), "errorType", graphQLError.getErrorType().toString())) + .inc(); + } + }); if (executionResult.getErrors().size() != 0) { MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "error")).inc(); } @@ -162,14 +180,22 @@ private void submitMetrics(ExecutionResult executionResult) { long totalDuration = TimeUnit.NANOSECONDS.toMillis((long) tracingMap.get("duration")); Map<String, Object> executionData = (Map<String, Object>) tracingMap.get("execution"); // Extract top level resolver, parent is top level query. Assumes single query per call. - List<Map<String, Object>> resolvers = (List<Map<String, Object>>) executionData.get("resolvers"); - Optional<Map<String, Object>> - parentResolver = resolvers.stream().filter(resolver -> resolver.get("parentType").equals("Query")).findFirst(); - String fieldName = parentResolver.isPresent() ? (String) parentResolver.get().get("fieldName") : "UNKNOWN"; - MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), fieldName)).update(totalDuration); + List<Map<String, Object>> resolvers = + (List<Map<String, Object>>) executionData.get("resolvers"); + Optional<Map<String, Object>> parentResolver = + resolvers.stream() + .filter(resolver -> resolver.get("parentType").equals("Query")) + .findFirst(); + String fieldName = + parentResolver.isPresent() ? (String) parentResolver.get().get("fieldName") : "UNKNOWN"; + MetricUtils.get() + .histogram(MetricRegistry.name(this.getClass(), fieldName)) + .update(totalDuration); } } catch (Exception e) { - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "submitMetrics", "exception")).inc(); + MetricUtils.get() + .counter(MetricRegistry.name(this.getClass(), "submitMetrics", "exception")) + .inc(); log.error("Unable to submit metrics for GraphQL call.", e); } } diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java index 6dd71d84d6dc3..35636bf07eb10 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java @@ -1,5 +1,7 @@ package com.datahub.graphql; +import static java.nio.charset.StandardCharsets.*; + import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; @@ -14,9 +16,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static java.nio.charset.StandardCharsets.*; - - @Slf4j @Controller public class GraphiQLController { @@ -37,4 +36,4 @@ public GraphiQLController() { CompletableFuture<String> graphiQL() { return CompletableFuture.supplyAsync(() -> this.graphiqlHtml); } -} \ No newline at end of file +} diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java index a1ddc5a013f7d..379521eda0c1a 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java @@ -4,14 +4,16 @@ import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.datahub.graphql.QueryContext; - public class SpringQueryContext implements QueryContext { private final boolean isAuthenticated; private final Authentication authentication; private final Authorizer authorizer; - public SpringQueryContext(final boolean isAuthenticated, final Authentication authentication, final Authorizer authorizer) { + public SpringQueryContext( + final boolean isAuthenticated, + final Authentication authentication, + final Authorizer authorizer) { this.isAuthenticated = isAuthenticated; this.authentication = authentication; this.authorizer = authorizer; diff --git a/metadata-service/openapi-analytics-servlet/build.gradle b/metadata-service/openapi-analytics-servlet/build.gradle index 6475d215db5f5..8ecd48a03e09d 100644 --- a/metadata-service/openapi-analytics-servlet/build.gradle +++ b/metadata-service/openapi-analytics-servlet/build.gradle @@ -63,5 +63,3 @@ task openApiGenerate(type: GenerateSwaggerCode) { ] } tasks.getByName("compileJava").dependsOn(openApiGenerate) - -checkstyleMain.exclude '**/generated/**' \ No newline at end of file diff --git a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java index 7816e81fe4a6d..4322dc08887a5 100644 --- a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java +++ b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java @@ -7,8 +7,8 @@ @Configuration public class OpenapiAnalyticsConfig { - @Bean - public DatahubUsageEventsApiDelegate datahubUsageEventsApiDelegate() { - return new DatahubUsageEventsImpl(); - } + @Bean + public DatahubUsageEventsApiDelegate datahubUsageEventsApiDelegate() { + return new DatahubUsageEventsImpl(); + } } diff --git a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java index 99e47f32555df..0cedfc22ded6b 100644 --- a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java +++ b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java @@ -1,48 +1,50 @@ package io.datahubproject.openapi.delegates; -import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; -import io.datahubproject.openapi.generated.controller.DatahubUsageEventsApiDelegate; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.ResponseEntity; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.AuthorizerChain; -import org.springframework.beans.factory.annotation.Value; import com.google.common.collect.ImmutableList; -import io.datahubproject.openapi.exception.UnauthorizedException; -import com.datahub.authorization.AuthUtil; import com.linkedin.metadata.authorization.PoliciesConfig; - -import java.util.Optional; +import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; +import io.datahubproject.openapi.exception.UnauthorizedException; +import io.datahubproject.openapi.generated.controller.DatahubUsageEventsApiDelegate; import java.util.Objects; +import java.util.Optional; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.ResponseEntity; public class DatahubUsageEventsImpl implements DatahubUsageEventsApiDelegate { - @Autowired - private ElasticSearchService _searchService; - @Autowired - private AuthorizerChain _authorizationChain; - @Value("${authorization.restApiAuthorization:false}") - private boolean _restApiAuthorizationEnabled; + @Autowired private ElasticSearchService _searchService; + @Autowired private AuthorizerChain _authorizationChain; - final public static String DATAHUB_USAGE_INDEX = "datahub_usage_event"; + @Value("${authorization.restApiAuthorization:false}") + private boolean _restApiAuthorizationEnabled; - @Override - public ResponseEntity<String> raw(String body) { - Authentication authentication = AuthenticationContext.getAuthentication(); - checkAnalyticsAuthorized(authentication); - return ResponseEntity.of(_searchService.raw(DATAHUB_USAGE_INDEX, body).map(Objects::toString)); - } + public static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; + + @Override + public ResponseEntity<String> raw(String body) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAnalyticsAuthorized(authentication); + return ResponseEntity.of(_searchService.raw(DATAHUB_USAGE_INDEX, body).map(Objects::toString)); + } - private void checkAnalyticsAuthorized(Authentication authentication) { - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ANALYTICS_PRIVILEGE.getType())))); + private void checkAnalyticsAuthorized(Authentication authentication) { + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ANALYTICS_PRIVILEGE.getType())))); - if (_restApiAuthorizationEnabled && !AuthUtil.isAuthorized(_authorizationChain, actorUrnStr, Optional.empty(), orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to get analytics."); - } + if (_restApiAuthorizationEnabled + && !AuthUtil.isAuthorized(_authorizationChain, actorUrnStr, Optional.empty(), orGroup)) { + throw new UnauthorizedException(actorUrnStr + " is unauthorized to get analytics."); } + } } diff --git a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java index 83b1b3f87c724..eebef4c07f7b2 100644 --- a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java +++ b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java @@ -1,5 +1,10 @@ package io.datahubproject.openapi.config; +import static io.datahubproject.openapi.delegates.DatahubUsageEventsImpl.DATAHUB_USAGE_INDEX; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -7,42 +12,36 @@ import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.AuthorizerChain; import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; -import org.opensearch.action.search.SearchResponse; +import java.io.IOException; +import java.util.Optional; import org.mockito.Mockito; +import org.opensearch.action.search.SearchResponse; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Primary; -import java.io.IOException; -import java.util.Optional; - -import static io.datahubproject.openapi.delegates.DatahubUsageEventsImpl.DATAHUB_USAGE_INDEX; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration public class OpenAPIAnalyticsTestConfiguration { - @Bean - @Primary - public ElasticSearchService datahubUsageEventsApiDelegate() throws IOException { - ElasticSearchService elasticSearchService = mock(ElasticSearchService.class); - SearchResponse mockResp = mock(SearchResponse.class); - when(elasticSearchService.raw(eq(DATAHUB_USAGE_INDEX), anyString())) - .thenReturn(Optional.of(mockResp)); - return elasticSearchService; - } - - @Bean - public AuthorizerChain authorizerChain() { - AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - - Authentication authentication = Mockito.mock(Authentication.class); - when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); - AuthenticationContext.setAuthentication(authentication); - - return authorizerChain; - } + @Bean + @Primary + public ElasticSearchService datahubUsageEventsApiDelegate() throws IOException { + ElasticSearchService elasticSearchService = mock(ElasticSearchService.class); + SearchResponse mockResp = mock(SearchResponse.class); + when(elasticSearchService.raw(eq(DATAHUB_USAGE_INDEX), anyString())) + .thenReturn(Optional.of(mockResp)); + return elasticSearchService; + } + + @Bean + public AuthorizerChain authorizerChain() { + AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); + + Authentication authentication = Mockito.mock(Authentication.class); + when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + AuthenticationContext.setAuthentication(authentication); + + return authorizerChain; + } } diff --git a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java index af2a24391fea8..d445f321132ef 100644 --- a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java +++ b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.delegates; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import io.datahubproject.openapi.config.OpenAPIAnalyticsTestConfiguration; import io.datahubproject.openapi.config.SpringWebConfig; @@ -14,31 +17,27 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - - @SpringBootTest(classes = {SpringWebConfig.class}) @ComponentScan(basePackages = {"io.datahubproject.openapi.generated.controller"}) @Import({DatahubUsageEventsImpl.class, OpenAPIAnalyticsTestConfiguration.class}) public class DatahubUsageEventsImplTest extends AbstractTestNGSpringContextTests { - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } - @Autowired - private DatahubUsageEventsApiController analyticsController; + @Autowired private DatahubUsageEventsApiController analyticsController; - @Test - public void initTest() { - assertNotNull(analyticsController); - } + @Test + public void initTest() { + assertNotNull(analyticsController); + } - @Test - public void analyticsControllerTest() { - ResponseEntity<String> resp = analyticsController.raw(""); - assertEquals(resp.getStatusCode(), HttpStatus.OK); - } + @Test + public void analyticsControllerTest() { + ResponseEntity<String> resp = analyticsController.raw(""); + assertEquals(resp.getStatusCode(), HttpStatus.OK); + } } diff --git a/metadata-service/openapi-entity-servlet/build.gradle b/metadata-service/openapi-entity-servlet/build.gradle index 7f9c472b91fac..dbec469085b07 100644 --- a/metadata-service/openapi-entity-servlet/build.gradle +++ b/metadata-service/openapi-entity-servlet/build.gradle @@ -77,6 +77,4 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: [mergeApiComponents, 'delegatePattern' : "false" ] } -tasks.getByName("compileJava").dependsOn(openApiGenerate) - -checkstyleMain.exclude '**/generated/**' \ No newline at end of file +tasks.getByName("compileJava").dependsOn(openApiGenerate) \ No newline at end of file diff --git a/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java b/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java index ef36d8aa38785..2cd2935496898 100644 --- a/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java +++ b/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java @@ -1,43 +1,41 @@ package io.datahubproject; import io.swagger.codegen.v3.generators.java.SpringCodegen; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.Map; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class CustomSpringCodegen extends SpringCodegen { - public CustomSpringCodegen() { - super(); - } + public CustomSpringCodegen() { + super(); + } - @Override - public String getName() { - return "custom-spring"; - } + @Override + public String getName() { + return "custom-spring"; + } - @Override - public Map<String, Object> postProcessOperations(Map<String, Object> objs) { - Map<String, Object> result = super.postProcessOperations(objs); - List<Map<String, String>> imports = (List) objs.get("imports"); + @Override + public Map<String, Object> postProcessOperations(Map<String, Object> objs) { + Map<String, Object> result = super.postProcessOperations(objs); + List<Map<String, String>> imports = (List) objs.get("imports"); - for (Map<String, String> importMap : imports) { - for (String type : importMap.values()) { - if (type.contains("EntityRequest") && !type.contains(".Scroll")) { - additionalProperties.put("requestClass", type); - } - if (type.contains("EntityResponse") && !type.contains(".Scroll")) { - additionalProperties.put("responseClass", type); - } - if (type.contains("EntityResponse") && type.contains(".Scroll")) { - additionalProperties.put("scrollResponseClass", type); - } - } + for (Map<String, String> importMap : imports) { + for (String type : importMap.values()) { + if (type.contains("EntityRequest") && !type.contains(".Scroll")) { + additionalProperties.put("requestClass", type); } - - return result; + if (type.contains("EntityResponse") && !type.contains(".Scroll")) { + additionalProperties.put("responseClass", type); + } + if (type.contains("EntityResponse") && type.contains(".Scroll")) { + additionalProperties.put("scrollResponseClass", type); + } + } } + + return result; + } } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java index 207c2284e2673..31cd3e6c69e50 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java @@ -1,9 +1,18 @@ package io.datahubproject.openapi.delegates; -import com.linkedin.common.urn.Urn; -import com.linkedin.metadata.entity.EntityService; +import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.AuthorizerChain; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.authorization.EntitySpec; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; @@ -13,7 +22,6 @@ import io.datahubproject.openapi.dto.UpsertAspectRequest; import io.datahubproject.openapi.dto.UrnResponseMap; import io.datahubproject.openapi.entities.EntitiesController; -import com.datahub.authorization.AuthorizerChain; import io.datahubproject.openapi.exception.UnauthorizedException; import io.datahubproject.openapi.generated.BrowsePathsV2AspectRequestV2; import io.datahubproject.openapi.generated.BrowsePathsV2AspectResponseV2; @@ -43,18 +51,6 @@ import io.datahubproject.openapi.generated.StatusAspectRequestV2; import io.datahubproject.openapi.generated.StatusAspectResponseV2; import io.datahubproject.openapi.util.OpenApiEntitiesUtil; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; -import com.linkedin.metadata.authorization.PoliciesConfig; -import com.google.common.collect.ImmutableList; -import com.datahub.authorization.AuthUtil; -import org.springframework.http.HttpEntity; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; - -import javax.validation.Valid; -import javax.validation.constraints.Min; import java.net.URISyntaxException; import java.util.List; import java.util.Map; @@ -62,544 +58,678 @@ import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; +import javax.validation.Valid; +import javax.validation.constraints.Min; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; public class EntityApiDelegateImpl<I, O, S> { - final private EntityRegistry _entityRegistry; - final private EntityService _entityService; - final private SearchService _searchService; - final private EntitiesController _v1Controller; - final private AuthorizerChain _authorizationChain; - - final private boolean _restApiAuthorizationEnabled; - final private Class<I> _reqClazz; - final private Class<O> _respClazz; - final private Class<S> _scrollRespClazz; - - final private StackWalker walker = StackWalker.getInstance(); - - public EntityApiDelegateImpl(EntityService entityService, SearchService searchService, EntitiesController entitiesController, - boolean restApiAuthorizationEnabled, AuthorizerChain authorizationChain, - Class<I> reqClazz, Class<O> respClazz, Class<S> scrollRespClazz) { - this._entityService = entityService; - this._searchService = searchService; - this._entityRegistry = entityService.getEntityRegistry(); - this._v1Controller = entitiesController; - this._authorizationChain = authorizationChain; - this._restApiAuthorizationEnabled = restApiAuthorizationEnabled; - this._reqClazz = reqClazz; - this._respClazz = respClazz; - this._scrollRespClazz = scrollRespClazz; - } - - public ResponseEntity<O> get(String urn, Boolean systemMetadata, List<String> aspects) { - String[] requestedAspects = Optional.ofNullable(aspects).map(asp -> asp.stream().distinct().toArray(String[]::new)).orElse(null); - ResponseEntity<UrnResponseMap> result = _v1Controller.getEntities(new String[]{urn}, requestedAspects); - return ResponseEntity.of(OpenApiEntitiesUtil.convertEntity(Optional.ofNullable(result) - .map(HttpEntity::getBody).orElse(null), _respClazz, systemMetadata)); - } - - public ResponseEntity<List<O>> create(List<I> body) { - List<UpsertAspectRequest> aspects = body.stream() - .flatMap(b -> OpenApiEntitiesUtil.convertEntityToUpsert(b, _reqClazz, _entityRegistry).stream()) - .collect(Collectors.toList()); - _v1Controller.postEntities(aspects); - List<O> responses = body.stream() - .map(req -> OpenApiEntitiesUtil.convertToResponse(req, _respClazz, _entityRegistry)) - .collect(Collectors.toList()); - return ResponseEntity.ok(responses); - } - - public ResponseEntity<Void> delete(String urn) { - _v1Controller.deleteEntities(new String[]{urn}, false); - return new ResponseEntity<>(HttpStatus.OK); - } - - public ResponseEntity<Void> head(String urn) { - try { - Urn entityUrn = Urn.createFromString(urn); - if (_entityService.exists(entityUrn)) { - return new ResponseEntity<>(HttpStatus.NO_CONTENT); - } else { - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - } - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - - public <A> ResponseEntity<A> getAspect(String urn, Boolean systemMetadata, String aspect, Class<O> entityRespClass, - Class<A> aspectRespClazz) { - String[] requestedAspects = new String[]{aspect}; - ResponseEntity<UrnResponseMap> result = _v1Controller.getEntities(new String[]{urn}, requestedAspects); - return ResponseEntity.of(OpenApiEntitiesUtil.convertAspect(result.getBody(), aspect, entityRespClass, aspectRespClazz, - systemMetadata)); - } - - public <AQ, AR> ResponseEntity<AR> createAspect(String urn, String aspectName, AQ body, Class<AQ> reqClazz, Class<AR> respClazz) { - UpsertAspectRequest aspectUpsert = OpenApiEntitiesUtil.convertAspectToUpsert(urn, body, reqClazz); - _v1Controller.postEntities(Stream.of(aspectUpsert).filter(Objects::nonNull).collect(Collectors.toList())); - AR response = OpenApiEntitiesUtil.convertToResponseAspect(body, respClazz); - return ResponseEntity.ok(response); - } - - public ResponseEntity<Void> headAspect(String urn, String aspect) { - try { - Urn entityUrn = Urn.createFromString(urn); - if (_entityService.exists(entityUrn, aspect)) { - return new ResponseEntity<>(HttpStatus.NO_CONTENT); - } else { - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - } - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - - public ResponseEntity<Void> deleteAspect(String urn, String aspect) { - _entityService.deleteAspect(urn, aspect, Map.of(), false); - _v1Controller.deleteEntities(new String[]{urn}, false); - return new ResponseEntity<>(HttpStatus.OK); - } - - public ResponseEntity<DomainsAspectResponseV2> createDomains(DomainsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DomainsAspectRequestV2.class, DomainsAspectResponseV2.class); - } - - public ResponseEntity<GlobalTagsAspectResponseV2> createGlobalTags(GlobalTagsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, GlobalTagsAspectRequestV2.class, GlobalTagsAspectResponseV2.class); - } - - public ResponseEntity<GlossaryTermsAspectResponseV2> createGlossaryTerms(GlossaryTermsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, GlossaryTermsAspectRequestV2.class, GlossaryTermsAspectResponseV2.class); - } - - public ResponseEntity<OwnershipAspectResponseV2> createOwnership(OwnershipAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, OwnershipAspectRequestV2.class, OwnershipAspectResponseV2.class); - } - - public ResponseEntity<StatusAspectResponseV2> createStatus(StatusAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, StatusAspectRequestV2.class, StatusAspectResponseV2.class); - } - - public ResponseEntity<Void> deleteDomains(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteGlobalTags(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteGlossaryTerms(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteOwnership(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteStatus(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<DomainsAspectResponseV2> getDomains(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DomainsAspectResponseV2.class); - } - - public ResponseEntity<GlobalTagsAspectResponseV2> getGlobalTags(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - GlobalTagsAspectResponseV2.class); - } - - public ResponseEntity<GlossaryTermsAspectResponseV2> getGlossaryTerms(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - GlossaryTermsAspectResponseV2.class); - } - - public ResponseEntity<OwnershipAspectResponseV2> getOwnership(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - OwnershipAspectResponseV2.class); - } - - public ResponseEntity<StatusAspectResponseV2> getStatus(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - StatusAspectResponseV2.class); - } - - public ResponseEntity<Void> headDomains(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headGlobalTags(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headGlossaryTerms(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headOwnership(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headStatus(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - protected static String methodNameToAspectName(String methodName) { - return toLowerFirst(methodName.replaceFirst("^(get|head|delete|create)", "")); - } - - public ResponseEntity<Void> deleteDeprecation(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteBrowsePathsV2(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<DeprecationAspectResponseV2> getDeprecation(String urn, @Valid Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DeprecationAspectResponseV2.class); - } - - public ResponseEntity<Void> headDeprecation(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<DeprecationAspectResponseV2> createDeprecation(@Valid DeprecationAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DeprecationAspectRequestV2.class, - DeprecationAspectResponseV2.class); - } - - public ResponseEntity<Void> headBrowsePathsV2(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<BrowsePathsV2AspectResponseV2> getBrowsePathsV2(String urn, @Valid Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - BrowsePathsV2AspectResponseV2.class); - } - - public ResponseEntity<BrowsePathsV2AspectResponseV2> createBrowsePathsV2(@Valid BrowsePathsV2AspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, BrowsePathsV2AspectRequestV2.class, - BrowsePathsV2AspectResponseV2.class); - } - - public ResponseEntity<S> scroll(@Valid Boolean systemMetadata, @Valid List<String> aspects, @Min(1) @Valid Integer count, - @Valid String scrollId, @Valid List<String> sort, @Valid SortOrder sortOrder, @Valid String query) { - - Authentication authentication = AuthenticationContext.getAuthentication(); - com.linkedin.metadata.models.EntitySpec entitySpec = OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); - checkScrollAuthorized(authentication, entitySpec); - - // TODO multi-field sort - SortCriterion sortCriterion = new SortCriterion(); - sortCriterion.setField(Optional.ofNullable(sort).map(s -> s.get(0)).orElse("urn")); - sortCriterion.setOrder(com.linkedin.metadata.query.filter.SortOrder.valueOf(Optional.ofNullable(sortOrder) - .map(Enum::name).orElse("ASCENDING"))); - - SearchFlags searchFlags = new SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true); - - ScrollResult result = _searchService.scrollAcrossEntities( - List.of(entitySpec.getName()), - query, null, sortCriterion, scrollId, null, count, searchFlags); - - String[] urns = result.getEntities().stream() - .map(SearchEntity::getEntity) - .map(Urn::toString) - .toArray(String[]::new); - String[] requestedAspects = Optional.ofNullable(aspects) - .map(asp -> asp.stream().distinct().toArray(String[]::new)) - .orElse(null); - List<O> entities = Optional.ofNullable(_v1Controller.getEntities(urns, requestedAspects).getBody()) - .map(body -> body.getResponses().entrySet()) - .map(entries -> OpenApiEntitiesUtil.convertEntities(entries, _respClazz, systemMetadata)) - .orElse(List.of()); - - return ResponseEntity.of(OpenApiEntitiesUtil.convertToScrollResponse(_scrollRespClazz, result.getScrollId(), entities)); - } - - private void checkScrollAuthorized(Authentication authentication, com.linkedin.metadata.models.EntitySpec entitySpec) { - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); - - List<Optional<EntitySpec>> resourceSpecs = List.of(Optional.of(new EntitySpec(entitySpec.getName(), ""))); - if (_restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizationChain, actorUrnStr, resourceSpecs, orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); - } - } - - public ResponseEntity<DatasetPropertiesAspectResponseV2> createDatasetProperties(@Valid DatasetPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DatasetPropertiesAspectRequestV2.class, - DatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity<EditableDatasetPropertiesAspectResponseV2> createEditableDatasetProperties( - @Valid EditableDatasetPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, EditableDatasetPropertiesAspectRequestV2.class, - EditableDatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity<InstitutionalMemoryAspectResponseV2> createInstitutionalMemory( - @Valid InstitutionalMemoryAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, InstitutionalMemoryAspectRequestV2.class, - InstitutionalMemoryAspectResponseV2.class); - } - - public ResponseEntity<ChartInfoAspectResponseV2> createChartInfo(@Valid ChartInfoAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, ChartInfoAspectRequestV2.class, - ChartInfoAspectResponseV2.class); - } - - public ResponseEntity<EditableChartPropertiesAspectResponseV2> createEditableChartProperties( - @Valid EditableChartPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, EditableChartPropertiesAspectRequestV2.class, - EditableChartPropertiesAspectResponseV2.class); - } - - public ResponseEntity<DataProductPropertiesAspectResponseV2> createDataProductProperties( - @Valid DataProductPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DataProductPropertiesAspectRequestV2.class, - DataProductPropertiesAspectResponseV2.class); - } - - public ResponseEntity<Void> deleteDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteEditableDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteInstitutionalMemory(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteChartInfo(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<DatasetPropertiesAspectResponseV2> getDatasetProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity<EditableDatasetPropertiesAspectResponseV2> getEditableDatasetProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - EditableDatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity<InstitutionalMemoryAspectResponseV2> getInstitutionalMemory(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - InstitutionalMemoryAspectResponseV2.class); - } - - public ResponseEntity<EditableChartPropertiesAspectResponseV2> getEditableChartProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, EditableChartPropertiesAspectResponseV2.class); - } - - public ResponseEntity<ChartInfoAspectResponseV2> getChartInfo(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - ChartInfoAspectResponseV2.class); - } - - public ResponseEntity<DataProductPropertiesAspectResponseV2> getDataProductProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DataProductPropertiesAspectResponseV2.class); - } - - public ResponseEntity<Void> headDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headEditableDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headInstitutionalMemory(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headDataProductProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headEditableChartProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headChartInfo(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteEditableChartProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteDataProductProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } + private final EntityRegistry _entityRegistry; + private final EntityService _entityService; + private final SearchService _searchService; + private final EntitiesController _v1Controller; + private final AuthorizerChain _authorizationChain; + + private final boolean _restApiAuthorizationEnabled; + private final Class<I> _reqClazz; + private final Class<O> _respClazz; + private final Class<S> _scrollRespClazz; + + private final StackWalker walker = StackWalker.getInstance(); + + public EntityApiDelegateImpl( + EntityService entityService, + SearchService searchService, + EntitiesController entitiesController, + boolean restApiAuthorizationEnabled, + AuthorizerChain authorizationChain, + Class<I> reqClazz, + Class<O> respClazz, + Class<S> scrollRespClazz) { + this._entityService = entityService; + this._searchService = searchService; + this._entityRegistry = entityService.getEntityRegistry(); + this._v1Controller = entitiesController; + this._authorizationChain = authorizationChain; + this._restApiAuthorizationEnabled = restApiAuthorizationEnabled; + this._reqClazz = reqClazz; + this._respClazz = respClazz; + this._scrollRespClazz = scrollRespClazz; + } + + public ResponseEntity<O> get(String urn, Boolean systemMetadata, List<String> aspects) { + String[] requestedAspects = + Optional.ofNullable(aspects) + .map(asp -> asp.stream().distinct().toArray(String[]::new)) + .orElse(null); + ResponseEntity<UrnResponseMap> result = + _v1Controller.getEntities(new String[] {urn}, requestedAspects); + return ResponseEntity.of( + OpenApiEntitiesUtil.convertEntity( + Optional.ofNullable(result).map(HttpEntity::getBody).orElse(null), + _respClazz, + systemMetadata)); + } + + public ResponseEntity<List<O>> create(List<I> body) { + List<UpsertAspectRequest> aspects = + body.stream() + .flatMap( + b -> + OpenApiEntitiesUtil.convertEntityToUpsert(b, _reqClazz, _entityRegistry) + .stream()) + .collect(Collectors.toList()); + _v1Controller.postEntities(aspects); + List<O> responses = + body.stream() + .map(req -> OpenApiEntitiesUtil.convertToResponse(req, _respClazz, _entityRegistry)) + .collect(Collectors.toList()); + return ResponseEntity.ok(responses); + } + + public ResponseEntity<Void> delete(String urn) { + _v1Controller.deleteEntities(new String[] {urn}, false); + return new ResponseEntity<>(HttpStatus.OK); + } + + public ResponseEntity<Void> head(String urn) { + try { + Urn entityUrn = Urn.createFromString(urn); + if (_entityService.exists(entityUrn)) { + return new ResponseEntity<>(HttpStatus.NO_CONTENT); + } else { + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + } + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public <A> ResponseEntity<A> getAspect( + String urn, + Boolean systemMetadata, + String aspect, + Class<O> entityRespClass, + Class<A> aspectRespClazz) { + String[] requestedAspects = new String[] {aspect}; + ResponseEntity<UrnResponseMap> result = + _v1Controller.getEntities(new String[] {urn}, requestedAspects); + return ResponseEntity.of( + OpenApiEntitiesUtil.convertAspect( + result.getBody(), aspect, entityRespClass, aspectRespClazz, systemMetadata)); + } + + public <AQ, AR> ResponseEntity<AR> createAspect( + String urn, String aspectName, AQ body, Class<AQ> reqClazz, Class<AR> respClazz) { + UpsertAspectRequest aspectUpsert = + OpenApiEntitiesUtil.convertAspectToUpsert(urn, body, reqClazz); + _v1Controller.postEntities( + Stream.of(aspectUpsert).filter(Objects::nonNull).collect(Collectors.toList())); + AR response = OpenApiEntitiesUtil.convertToResponseAspect(body, respClazz); + return ResponseEntity.ok(response); + } + + public ResponseEntity<Void> headAspect(String urn, String aspect) { + try { + Urn entityUrn = Urn.createFromString(urn); + if (_entityService.exists(entityUrn, aspect)) { + return new ResponseEntity<>(HttpStatus.NO_CONTENT); + } else { + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + } + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public ResponseEntity<Void> deleteAspect(String urn, String aspect) { + _entityService.deleteAspect(urn, aspect, Map.of(), false); + _v1Controller.deleteEntities(new String[] {urn}, false); + return new ResponseEntity<>(HttpStatus.OK); + } + + public ResponseEntity<DomainsAspectResponseV2> createDomains( + DomainsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DomainsAspectRequestV2.class, + DomainsAspectResponseV2.class); + } + + public ResponseEntity<GlobalTagsAspectResponseV2> createGlobalTags( + GlobalTagsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + GlobalTagsAspectRequestV2.class, + GlobalTagsAspectResponseV2.class); + } + + public ResponseEntity<GlossaryTermsAspectResponseV2> createGlossaryTerms( + GlossaryTermsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + GlossaryTermsAspectRequestV2.class, + GlossaryTermsAspectResponseV2.class); + } + + public ResponseEntity<OwnershipAspectResponseV2> createOwnership( + OwnershipAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + OwnershipAspectRequestV2.class, + OwnershipAspectResponseV2.class); + } + + public ResponseEntity<StatusAspectResponseV2> createStatus( + StatusAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + StatusAspectRequestV2.class, + StatusAspectResponseV2.class); + } + + public ResponseEntity<Void> deleteDomains(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteGlobalTags(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteGlossaryTerms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteOwnership(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteStatus(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<DomainsAspectResponseV2> getDomains(String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DomainsAspectResponseV2.class); + } + + public ResponseEntity<GlobalTagsAspectResponseV2> getGlobalTags( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + GlobalTagsAspectResponseV2.class); + } + + public ResponseEntity<GlossaryTermsAspectResponseV2> getGlossaryTerms( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + GlossaryTermsAspectResponseV2.class); + } + + public ResponseEntity<OwnershipAspectResponseV2> getOwnership( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + OwnershipAspectResponseV2.class); + } + + public ResponseEntity<StatusAspectResponseV2> getStatus(String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + StatusAspectResponseV2.class); + } + + public ResponseEntity<Void> headDomains(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headGlobalTags(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headGlossaryTerms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headOwnership(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headStatus(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + protected static String methodNameToAspectName(String methodName) { + return toLowerFirst(methodName.replaceFirst("^(get|head|delete|create)", "")); + } + + public ResponseEntity<Void> deleteDeprecation(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteBrowsePathsV2(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<DeprecationAspectResponseV2> getDeprecation( + String urn, @Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DeprecationAspectResponseV2.class); + } + + public ResponseEntity<Void> headDeprecation(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<DeprecationAspectResponseV2> createDeprecation( + @Valid DeprecationAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DeprecationAspectRequestV2.class, + DeprecationAspectResponseV2.class); + } + + public ResponseEntity<Void> headBrowsePathsV2(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<BrowsePathsV2AspectResponseV2> getBrowsePathsV2( + String urn, @Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + BrowsePathsV2AspectResponseV2.class); + } + + public ResponseEntity<BrowsePathsV2AspectResponseV2> createBrowsePathsV2( + @Valid BrowsePathsV2AspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + BrowsePathsV2AspectRequestV2.class, + BrowsePathsV2AspectResponseV2.class); + } + + public ResponseEntity<S> scroll( + @Valid Boolean systemMetadata, + @Valid List<String> aspects, + @Min(1) @Valid Integer count, + @Valid String scrollId, + @Valid List<String> sort, + @Valid SortOrder sortOrder, + @Valid String query) { + + Authentication authentication = AuthenticationContext.getAuthentication(); + com.linkedin.metadata.models.EntitySpec entitySpec = + OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); + checkScrollAuthorized(authentication, entitySpec); + + // TODO multi-field sort + SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(Optional.ofNullable(sort).map(s -> s.get(0)).orElse("urn")); + sortCriterion.setOrder( + com.linkedin.metadata.query.filter.SortOrder.valueOf( + Optional.ofNullable(sortOrder).map(Enum::name).orElse("ASCENDING"))); + + SearchFlags searchFlags = + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true); + + ScrollResult result = + _searchService.scrollAcrossEntities( + List.of(entitySpec.getName()), + query, + null, + sortCriterion, + scrollId, + null, + count, + searchFlags); + + String[] urns = + result.getEntities().stream() + .map(SearchEntity::getEntity) + .map(Urn::toString) + .toArray(String[]::new); + String[] requestedAspects = + Optional.ofNullable(aspects) + .map(asp -> asp.stream().distinct().toArray(String[]::new)) + .orElse(null); + List<O> entities = + Optional.ofNullable(_v1Controller.getEntities(urns, requestedAspects).getBody()) + .map(body -> body.getResponses().entrySet()) + .map( + entries -> OpenApiEntitiesUtil.convertEntities(entries, _respClazz, systemMetadata)) + .orElse(List.of()); + + return ResponseEntity.of( + OpenApiEntitiesUtil.convertToScrollResponse( + _scrollRespClazz, result.getScrollId(), entities)); + } + + private void checkScrollAuthorized( + Authentication authentication, com.linkedin.metadata.models.EntitySpec entitySpec) { + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); + + List<Optional<EntitySpec>> resourceSpecs = + List.of(Optional.of(new EntitySpec(entitySpec.getName(), ""))); + if (_restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizationChain, actorUrnStr, resourceSpecs, orGroup)) { + throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); + } + } + + public ResponseEntity<DatasetPropertiesAspectResponseV2> createDatasetProperties( + @Valid DatasetPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DatasetPropertiesAspectRequestV2.class, + DatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity<EditableDatasetPropertiesAspectResponseV2> createEditableDatasetProperties( + @Valid EditableDatasetPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + EditableDatasetPropertiesAspectRequestV2.class, + EditableDatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity<InstitutionalMemoryAspectResponseV2> createInstitutionalMemory( + @Valid InstitutionalMemoryAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + InstitutionalMemoryAspectRequestV2.class, + InstitutionalMemoryAspectResponseV2.class); + } + + public ResponseEntity<ChartInfoAspectResponseV2> createChartInfo( + @Valid ChartInfoAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + ChartInfoAspectRequestV2.class, + ChartInfoAspectResponseV2.class); + } + + public ResponseEntity<EditableChartPropertiesAspectResponseV2> createEditableChartProperties( + @Valid EditableChartPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + EditableChartPropertiesAspectRequestV2.class, + EditableChartPropertiesAspectResponseV2.class); + } + + public ResponseEntity<DataProductPropertiesAspectResponseV2> createDataProductProperties( + @Valid DataProductPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DataProductPropertiesAspectRequestV2.class, + DataProductPropertiesAspectResponseV2.class); + } + + public ResponseEntity<Void> deleteDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteEditableDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteInstitutionalMemory(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteChartInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<DatasetPropertiesAspectResponseV2> getDatasetProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity<EditableDatasetPropertiesAspectResponseV2> getEditableDatasetProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + EditableDatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity<InstitutionalMemoryAspectResponseV2> getInstitutionalMemory( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + InstitutionalMemoryAspectResponseV2.class); + } + + public ResponseEntity<EditableChartPropertiesAspectResponseV2> getEditableChartProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + EditableChartPropertiesAspectResponseV2.class); + } + + public ResponseEntity<ChartInfoAspectResponseV2> getChartInfo( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + ChartInfoAspectResponseV2.class); + } + + public ResponseEntity<DataProductPropertiesAspectResponseV2> getDataProductProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DataProductPropertiesAspectResponseV2.class); + } + + public ResponseEntity<Void> headDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headEditableDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headInstitutionalMemory(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headDataProductProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headEditableChartProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headChartInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteEditableChartProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteDataProductProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java index 205d401dd956d..317f9311003e5 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.util; +import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; +import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -9,8 +12,6 @@ import io.datahubproject.openapi.generated.EntityResponse; import io.datahubproject.openapi.generated.OneOfGenericAspectValue; import io.datahubproject.openapi.generated.SystemMetadata; -import lombok.extern.slf4j.Slf4j; - import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; @@ -20,260 +21,338 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; -import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class OpenApiEntitiesUtil { - private final static String MODEL_VERSION = "V2"; - private final static String REQUEST_SUFFIX = "Request" + MODEL_VERSION; - private final static String RESPONSE_SUFFIX = "Response" + MODEL_VERSION; - - private final static String ASPECT_REQUEST_SUFFIX = "Aspect" + REQUEST_SUFFIX; - private final static String ASPECT_RESPONSE_SUFFIX = "Aspect" + RESPONSE_SUFFIX; - private final static String ENTITY_REQUEST_SUFFIX = "Entity" + REQUEST_SUFFIX; - private final static String ENTITY_RESPONSE_SUFFIX = "Entity" + RESPONSE_SUFFIX; + private static final String MODEL_VERSION = "V2"; + private static final String REQUEST_SUFFIX = "Request" + MODEL_VERSION; + private static final String RESPONSE_SUFFIX = "Response" + MODEL_VERSION; + + private static final String ASPECT_REQUEST_SUFFIX = "Aspect" + REQUEST_SUFFIX; + private static final String ASPECT_RESPONSE_SUFFIX = "Aspect" + RESPONSE_SUFFIX; + private static final String ENTITY_REQUEST_SUFFIX = "Entity" + REQUEST_SUFFIX; + private static final String ENTITY_RESPONSE_SUFFIX = "Entity" + RESPONSE_SUFFIX; + + private OpenApiEntitiesUtil() {} + + private static final ReflectionCache REFLECT = + ReflectionCache.builder().basePackage("io.datahubproject.openapi.generated").build(); + + public static <T> UpsertAspectRequest convertAspectToUpsert( + String entityUrn, Object aspectRequest, Class<T> aspectRequestClazz) { + try { + UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); + builder.entityType(Urn.createFromString(entityUrn).getEntityType()); + builder.entityUrn(entityUrn); + + // i.e. GlobalTagsAspectRequestV2 + if (aspectRequest != null) { + // i.e. GlobalTags + Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); + Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); + + if (aspect != null) { + builder.aspect((OneOfGenericAspectValue) aspect); + return builder.build(); + } + } - private OpenApiEntitiesUtil() { + return null; + } catch (Exception e) { + log.error("Error reflecting urn: {} aspect: {}", entityUrn, aspectRequestClazz.getName()); + throw new RuntimeException(e); } - - private final static ReflectionCache REFLECT = ReflectionCache.builder() - .basePackage("io.datahubproject.openapi.generated") - .build(); - - - public static <T> UpsertAspectRequest convertAspectToUpsert(String entityUrn, Object aspectRequest, Class<T> aspectRequestClazz) { - try { - UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); - builder.entityType(Urn.createFromString(entityUrn).getEntityType()); - builder.entityUrn(entityUrn); - - // i.e. GlobalTagsAspectRequestV2 - if (aspectRequest != null) { - // i.e. GlobalTags - Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); - Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); - - if (aspect != null) { + } + + public static <T> List<UpsertAspectRequest> convertEntityToUpsert( + Object openapiEntity, Class<T> fromClazz, EntityRegistry entityRegistry) { + final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, fromClazz); + + return entitySpec.getAspectSpecs().stream() + .map( + aspectSpec -> { + try { + UpsertAspectRequest.UpsertAspectRequestBuilder builder = + UpsertAspectRequest.builder(); + builder.entityType(entitySpec.getName()); + builder.entityUrn( + (String) REFLECT.lookupMethod(fromClazz, "getUrn").invoke(openapiEntity)); + + String upperAspectName = toUpperFirst(aspectSpec.getName()); + Method aspectMethod = REFLECT.lookupMethod(fromClazz, "get" + upperAspectName); + + // i.e. GlobalTagsAspectRequestV2 + Object aspectRequest = + aspectMethod == null ? null : aspectMethod.invoke(openapiEntity); + if (aspectRequest != null) { + Class<?> aspectRequestClazz = + REFLECT.lookupClass(upperAspectName + ASPECT_REQUEST_SUFFIX); + + // i.e. GlobalTags + Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); + Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); + + if (aspect != null) { builder.aspect((OneOfGenericAspectValue) aspect); return builder.build(); + } } - } - - return null; - } catch (Exception e) { - log.error("Error reflecting urn: {} aspect: {}", entityUrn, aspectRequestClazz.getName()); - throw new RuntimeException(e); - } - } - public static <T> List<UpsertAspectRequest> convertEntityToUpsert(Object openapiEntity, Class<T> fromClazz, EntityRegistry entityRegistry) { - final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, fromClazz); - - return entitySpec.getAspectSpecs().stream() - .map(aspectSpec -> { - try { - UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); - builder.entityType(entitySpec.getName()); - builder.entityUrn((String) REFLECT.lookupMethod(fromClazz, "getUrn").invoke(openapiEntity)); - - String upperAspectName = toUpperFirst(aspectSpec.getName()); - Method aspectMethod = REFLECT.lookupMethod(fromClazz, "get" + upperAspectName); - - // i.e. GlobalTagsAspectRequestV2 - Object aspectRequest = aspectMethod == null ? null : aspectMethod.invoke(openapiEntity); - if (aspectRequest != null) { - Class<?> aspectRequestClazz = REFLECT.lookupClass(upperAspectName + ASPECT_REQUEST_SUFFIX); - - // i.e. GlobalTags - Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); - Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); - - if (aspect != null) { - builder.aspect((OneOfGenericAspectValue) aspect); - return builder.build(); - } - } - - return null; - } catch (Exception e) { - log.error("Error reflecting entity: {} aspect: {}", entitySpec.getName(), aspectSpec.getName()); - throw new RuntimeException(e); - } - }).filter(Objects::nonNull).collect(Collectors.toList()); - } - public static <E, A> Optional<A> convertAspect(UrnResponseMap urnResponseMap, String aspectName, Class<E> entityClazz, - Class<A> aspectClazz, boolean withSystemMetadata) { - return convertEntity(urnResponseMap, entityClazz, withSystemMetadata).map(entity -> { - try { - Method aspectMethod = REFLECT.lookupMethod(entityClazz, "get" + toUpperFirst(aspectName)); + return null; + } catch (Exception e) { + log.error( + "Error reflecting entity: {} aspect: {}", + entitySpec.getName(), + aspectSpec.getName()); + throw new RuntimeException(e); + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } + + public static <E, A> Optional<A> convertAspect( + UrnResponseMap urnResponseMap, + String aspectName, + Class<E> entityClazz, + Class<A> aspectClazz, + boolean withSystemMetadata) { + return convertEntity(urnResponseMap, entityClazz, withSystemMetadata) + .map( + entity -> { + try { + Method aspectMethod = + REFLECT.lookupMethod(entityClazz, "get" + toUpperFirst(aspectName)); return aspectMethod == null ? null : aspectClazz.cast(aspectMethod.invoke(entity)); - } catch (IllegalAccessException | InvocationTargetException e) { + } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); - } - }); - - } - - public static <T> Optional<T> convertEntity(UrnResponseMap urnResponseMap, Class<T> toClazz, boolean withSystemMetadata) { - return Optional.ofNullable(urnResponseMap) - .flatMap(respMap -> respMap.getResponses().entrySet().stream().findFirst()) - .flatMap(entry -> convertEntities(Set.of(entry), toClazz, withSystemMetadata).stream().findFirst()); - } - - public static <T> List<T> convertEntities(Set<Map.Entry<String, EntityResponse>> entityResponseSet, Class<T> toClazz, boolean withSystemMetadata) { - if (entityResponseSet != null) { - return entityResponseSet.stream().map(entry -> { + } + }); + } + + public static <T> Optional<T> convertEntity( + UrnResponseMap urnResponseMap, Class<T> toClazz, boolean withSystemMetadata) { + return Optional.ofNullable(urnResponseMap) + .flatMap(respMap -> respMap.getResponses().entrySet().stream().findFirst()) + .flatMap( + entry -> + convertEntities(Set.of(entry), toClazz, withSystemMetadata).stream().findFirst()); + } + + public static <T> List<T> convertEntities( + Set<Map.Entry<String, EntityResponse>> entityResponseSet, + Class<T> toClazz, + boolean withSystemMetadata) { + if (entityResponseSet != null) { + return entityResponseSet.stream() + .map( + entry -> { try { - // i.e. DataContractEntityResponseV2.Builder - Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(toClazz); - Set<String> builderMethods = Arrays.stream(builderPair.getFirst().getMethods()) - .map(Method::getName).collect(Collectors.toSet()); - - REFLECT.lookupMethod(builderPair, "urn", String.class).invoke(builderPair.getSecond(), entry.getKey()); - - entry.getValue().getAspects().entrySet().forEach(aspectEntry -> { - try { - if (builderMethods.contains(aspectEntry.getKey())) { + // i.e. DataContractEntityResponseV2.Builder + Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(toClazz); + Set<String> builderMethods = + Arrays.stream(builderPair.getFirst().getMethods()) + .map(Method::getName) + .collect(Collectors.toSet()); + + REFLECT + .lookupMethod(builderPair, "urn", String.class) + .invoke(builderPair.getSecond(), entry.getKey()); + + entry + .getValue() + .getAspects() + .entrySet() + .forEach( + aspectEntry -> { + try { + if (builderMethods.contains(aspectEntry.getKey())) { String upperFirstAspect = toUpperFirst(aspectEntry.getKey()); Class<?> aspectClazz = REFLECT.lookupClass(upperFirstAspect); - Class<?> aspectRespClazz = REFLECT.lookupClass(upperFirstAspect + ASPECT_RESPONSE_SUFFIX); - Class<?> aspectRespClazzBuilder = REFLECT.lookupClass(String.join("", - upperFirstAspect, ASPECT_RESPONSE_SUFFIX, - "$", upperFirstAspect, ASPECT_RESPONSE_SUFFIX, "Builder")); - Object aspectBuilder = REFLECT.lookupMethod(aspectRespClazz, "builder").invoke(null); - - REFLECT.lookupMethod(aspectRespClazzBuilder, "value", aspectClazz).invoke(aspectBuilder, aspectEntry.getValue().getValue()); + Class<?> aspectRespClazz = + REFLECT.lookupClass(upperFirstAspect + ASPECT_RESPONSE_SUFFIX); + Class<?> aspectRespClazzBuilder = + REFLECT.lookupClass( + String.join( + "", + upperFirstAspect, + ASPECT_RESPONSE_SUFFIX, + "$", + upperFirstAspect, + ASPECT_RESPONSE_SUFFIX, + "Builder")); + Object aspectBuilder = + REFLECT.lookupMethod(aspectRespClazz, "builder").invoke(null); + + REFLECT + .lookupMethod(aspectRespClazzBuilder, "value", aspectClazz) + .invoke(aspectBuilder, aspectEntry.getValue().getValue()); if (withSystemMetadata) { - REFLECT.lookupMethod(aspectRespClazzBuilder, "systemMetadata", SystemMetadata.class) - .invoke(aspectBuilder, aspectEntry.getValue().getSystemMetadata()); + REFLECT + .lookupMethod( + aspectRespClazzBuilder, + "systemMetadata", + SystemMetadata.class) + .invoke( + aspectBuilder, + aspectEntry.getValue().getSystemMetadata()); } - REFLECT.lookupMethod(builderPair, aspectEntry.getKey(), aspectRespClazz).invoke(builderPair.getSecond(), - REFLECT.lookupMethod(aspectRespClazzBuilder, "build").invoke(aspectBuilder)); + REFLECT + .lookupMethod( + builderPair, aspectEntry.getKey(), aspectRespClazz) + .invoke( + builderPair.getSecond(), + REFLECT + .lookupMethod(aspectRespClazzBuilder, "build") + .invoke(aspectBuilder)); + } + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } - }); + }); - return toClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + return toClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }).collect(Collectors.toList()); - } - return List.of(); + }) + .collect(Collectors.toList()); } - - public static <I, T> T convertToResponseAspect(I source, Class<T> targetClazz) { - if (source != null) { - try { - Class<?> sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); - Method valueMethod = REFLECT.lookupMethod(sourceClazz, "getValue"); - Object aspect = valueMethod.invoke(source); - - Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(targetClazz); - REFLECT.lookupMethod(builderPair, "value", valueMethod.getReturnType()).invoke(builderPair.getSecond(), aspect); - - return targetClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return null; + return List.of(); + } + + public static <I, T> T convertToResponseAspect(I source, Class<T> targetClazz) { + if (source != null) { + try { + Class<?> sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); + Method valueMethod = REFLECT.lookupMethod(sourceClazz, "getValue"); + Object aspect = valueMethod.invoke(source); + + Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(targetClazz); + REFLECT + .lookupMethod(builderPair, "value", valueMethod.getReturnType()) + .invoke(builderPair.getSecond(), aspect); + + return targetClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - public static <I, T> T convertToResponse(I source, Class<T> targetClazz, EntityRegistry entityRegistry) { - if (source != null) { - try { - Class<?> sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); - Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(targetClazz); - copy(Pair.of(sourceClazz, source), builderPair, "urn"); - - final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, sourceClazz); - entitySpec.getAspectSpecs().stream() - .forEach(aspectSpec -> { - try { - copy(Pair.of(sourceClazz, source), builderPair, aspectSpec.getName()); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - }); - - return targetClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return null; + return null; + } + + public static <I, T> T convertToResponse( + I source, Class<T> targetClazz, EntityRegistry entityRegistry) { + if (source != null) { + try { + Class<?> sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); + Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(targetClazz); + copy(Pair.of(sourceClazz, source), builderPair, "urn"); + + final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, sourceClazz); + entitySpec.getAspectSpecs().stream() + .forEach( + aspectSpec -> { + try { + copy(Pair.of(sourceClazz, source), builderPair, aspectSpec.getName()); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } + }); + + return targetClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - public static <T, S> Optional<S> convertToScrollResponse(Class<S> scrollRespClazz, String scrollId, List<T> entityResults) { - if (entityResults != null) { - try { - Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(scrollRespClazz); - REFLECT.lookupMethod(builderPair.getFirst(), "scrollId", String.class).invoke(builderPair.getSecond(), scrollId); - REFLECT.lookupMethod(builderPair.getFirst(), "entities", List.class).invoke(builderPair.getSecond(), entityResults); - - return Optional.of(scrollRespClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond()))); - - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return Optional.empty(); + return null; + } + + public static <T, S> Optional<S> convertToScrollResponse( + Class<S> scrollRespClazz, String scrollId, List<T> entityResults) { + if (entityResults != null) { + try { + Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(scrollRespClazz); + REFLECT + .lookupMethod(builderPair.getFirst(), "scrollId", String.class) + .invoke(builderPair.getSecond(), scrollId); + REFLECT + .lookupMethod(builderPair.getFirst(), "entities", List.class) + .invoke(builderPair.getSecond(), entityResults); + + return Optional.of( + scrollRespClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond()))); + + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - - - private static void copy(Pair<Class<?>, Object> sourcePair, Pair<Class<?>, Object> builderPair, String method) - throws InvocationTargetException, IllegalAccessException { - Method sourceMethod = REFLECT.lookupMethod(sourcePair, String.format("get%s", toUpperFirst(method))); - if (sourceMethod != null) { - Class<?> paramClazz = null; - Object param = null; - if (sourceMethod.getReturnType().getSimpleName().contains("Request")) { - Object sourceParam = sourceMethod.invoke(sourcePair.getSecond()); - if (sourceParam != null) { - paramClazz = REFLECT.lookupClass(sourceMethod.getReturnType().getSimpleName().replace("Request", "Response")); - Pair<Class<?>, Object> aspectBuilder = REFLECT.getBuilder(paramClazz); - - for (Method m : sourceMethod.getReturnType().getMethods()) { - if (m.getName().startsWith("get") && !Objects.equals("getClass", m.getName())) { - String getterMethod = m.getName().replaceFirst("^get", ""); - copy(Pair.of(sourceMethod.getReturnType(), sourceMethod.invoke(sourcePair.getSecond())), - aspectBuilder, getterMethod); - } - } - - param = REFLECT.lookupMethod(aspectBuilder, "build").invoke(aspectBuilder.getSecond()); - } - } else { - paramClazz = sourceMethod.getReturnType(); - param = sourceMethod.invoke(sourcePair.getSecond()); + return Optional.empty(); + } + + private static void copy( + Pair<Class<?>, Object> sourcePair, Pair<Class<?>, Object> builderPair, String method) + throws InvocationTargetException, IllegalAccessException { + Method sourceMethod = + REFLECT.lookupMethod(sourcePair, String.format("get%s", toUpperFirst(method))); + if (sourceMethod != null) { + Class<?> paramClazz = null; + Object param = null; + if (sourceMethod.getReturnType().getSimpleName().contains("Request")) { + Object sourceParam = sourceMethod.invoke(sourcePair.getSecond()); + if (sourceParam != null) { + paramClazz = + REFLECT.lookupClass( + sourceMethod.getReturnType().getSimpleName().replace("Request", "Response")); + Pair<Class<?>, Object> aspectBuilder = REFLECT.getBuilder(paramClazz); + + for (Method m : sourceMethod.getReturnType().getMethods()) { + if (m.getName().startsWith("get") && !Objects.equals("getClass", m.getName())) { + String getterMethod = m.getName().replaceFirst("^get", ""); + copy( + Pair.of( + sourceMethod.getReturnType(), sourceMethod.invoke(sourcePair.getSecond())), + aspectBuilder, + getterMethod); } + } - if (param != null) { - Method targetMethod = REFLECT.lookupMethod(builderPair, toLowerFirst(method), paramClazz); - targetMethod.invoke(builderPair.getSecond(), param); - } - } else { - log.info("Class {} doesn't container method {}", sourcePair.getFirst(), - String.format("get%s", toUpperFirst(method))); + param = REFLECT.lookupMethod(aspectBuilder, "build").invoke(aspectBuilder.getSecond()); } + } else { + paramClazz = sourceMethod.getReturnType(); + param = sourceMethod.invoke(sourcePair.getSecond()); + } + + if (param != null) { + Method targetMethod = REFLECT.lookupMethod(builderPair, toLowerFirst(method), paramClazz); + targetMethod.invoke(builderPair.getSecond(), param); + } + } else { + log.info( + "Class {} doesn't container method {}", + sourcePair.getFirst(), + String.format("get%s", toUpperFirst(method))); } - - public static <T> EntitySpec requestClassToEntitySpec(EntityRegistry entityRegistry, Class<T> reqClazz) { - final String entityType = toLowerFirst(reqClazz.getSimpleName().replace(ENTITY_REQUEST_SUFFIX, "")); - return entityRegistry.getEntitySpec(entityType); - } - - public static <T> EntitySpec responseClassToEntitySpec(EntityRegistry entityRegistry, Class<T> respClazz) { - String entityType = toLowerFirst(respClazz.getSimpleName().replace(ENTITY_RESPONSE_SUFFIX, "")); - return entityRegistry.getEntitySpec(entityType); - } + } + + public static <T> EntitySpec requestClassToEntitySpec( + EntityRegistry entityRegistry, Class<T> reqClazz) { + final String entityType = + toLowerFirst(reqClazz.getSimpleName().replace(ENTITY_REQUEST_SUFFIX, "")); + return entityRegistry.getEntitySpec(entityType); + } + + public static <T> EntitySpec responseClassToEntitySpec( + EntityRegistry entityRegistry, Class<T> respClazz) { + String entityType = toLowerFirst(respClazz.getSimpleName().replace(ENTITY_RESPONSE_SUFFIX, "")); + return entityRegistry.getEntitySpec(entityType); + } } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java index cabaa2cbd75e6..920a13d998985 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java @@ -1,5 +1,11 @@ package io.datahubproject.openapi.config; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -26,6 +32,9 @@ import io.datahubproject.openapi.generated.EntityResponse; import io.datahubproject.openapi.relationships.RelationshipsController; import io.datahubproject.openapi.timeline.TimelineController; +import java.util.Arrays; +import java.util.Map; +import java.util.stream.Collectors; import org.mockito.Mockito; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; @@ -33,102 +42,96 @@ import org.springframework.context.annotation.Primary; import org.springframework.http.ResponseEntity; -import java.util.Arrays; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyList; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration public class OpenAPIEntityTestConfiguration { - @Bean - public ObjectMapper objectMapper() { - return new ObjectMapper(new YAMLFactory()); - } - - @Bean - @Primary - public EntityService entityService(final EntityRegistry mockRegistry) { - EntityService entityService = mock(EntityServiceImpl.class); - when(entityService.getEntityRegistry()).thenReturn(mockRegistry); - return entityService; - } - - @Bean - @Primary - public SearchService searchService() { - SearchService searchService = mock(SearchService.class); - when(searchService.scrollAcrossEntities(anyList(), any(), any(), any(), - any(), any(), anyInt(), any())) - .thenReturn(new ScrollResult().setEntities(new SearchEntityArray())); - - return searchService; - } - - @Bean - public AuthorizerChain authorizerChain() { - AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - - Authentication authentication = Mockito.mock(Authentication.class); - when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); - AuthenticationContext.setAuthentication(authentication); - - return authorizerChain; - } - - @MockBean(name = "elasticSearchSystemMetadataService") - public SystemMetadataService systemMetadataService; - - @MockBean - public TimelineService timelineService; - - @Bean("entityRegistry") - @Primary - public EntityRegistry entityRegistry() throws EntityRegistryException, InterruptedException { - /* - Considered a few different approach to loading a custom model. Chose this method - to as closely match a production configuration rather than direct project to project - dependency. - */ - PluginEntityRegistryLoader custom = new PluginEntityRegistryLoader( - getClass().getResource("/custom-model").getFile()); - - ConfigEntityRegistry standard = new ConfigEntityRegistry( - OpenAPIEntityTestConfiguration.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - MergedEntityRegistry entityRegistry = new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(standard); - custom.withBaseRegistry(entityRegistry).start(true); - - return entityRegistry; - } - - /* Controllers not under this module */ - @Bean - @Primary - public EntitiesController entitiesController() { - EntitiesController entitiesController = mock(EntitiesController.class); - when(entitiesController.getEntities(any(), any())) - .thenAnswer(params -> { - String[] urns = params.getArgument(0); - String[] aspects = params.getArgument(1); - return ResponseEntity.ok(UrnResponseMap.builder() - .responses(Arrays.stream(urns) - .map(urn -> Map.entry(urn, EntityResponse.builder().urn(urn).build())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) - .build()); - }); - - return entitiesController; - } - - @MockBean - public TimelineController timelineController; - - @MockBean - public RelationshipsController relationshipsController; + @Bean + public ObjectMapper objectMapper() { + return new ObjectMapper(new YAMLFactory()); + } + + @Bean + @Primary + public EntityService entityService(final EntityRegistry mockRegistry) { + EntityService entityService = mock(EntityServiceImpl.class); + when(entityService.getEntityRegistry()).thenReturn(mockRegistry); + return entityService; + } + + @Bean + @Primary + public SearchService searchService() { + SearchService searchService = mock(SearchService.class); + when(searchService.scrollAcrossEntities( + anyList(), any(), any(), any(), any(), any(), anyInt(), any())) + .thenReturn(new ScrollResult().setEntities(new SearchEntityArray())); + + return searchService; + } + + @Bean + public AuthorizerChain authorizerChain() { + AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); + + Authentication authentication = Mockito.mock(Authentication.class); + when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + AuthenticationContext.setAuthentication(authentication); + + return authorizerChain; + } + + @MockBean(name = "elasticSearchSystemMetadataService") + public SystemMetadataService systemMetadataService; + + @MockBean public TimelineService timelineService; + + @Bean("entityRegistry") + @Primary + public EntityRegistry entityRegistry() throws EntityRegistryException, InterruptedException { + /* + Considered a few different approach to loading a custom model. Chose this method + to as closely match a production configuration rather than direct project to project + dependency. + */ + PluginEntityRegistryLoader custom = + new PluginEntityRegistryLoader(getClass().getResource("/custom-model").getFile()); + + ConfigEntityRegistry standard = + new ConfigEntityRegistry( + OpenAPIEntityTestConfiguration.class + .getClassLoader() + .getResourceAsStream("entity-registry.yml")); + MergedEntityRegistry entityRegistry = + new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(standard); + custom.withBaseRegistry(entityRegistry).start(true); + + return entityRegistry; + } + + /* Controllers not under this module */ + @Bean + @Primary + public EntitiesController entitiesController() { + EntitiesController entitiesController = mock(EntitiesController.class); + when(entitiesController.getEntities(any(), any())) + .thenAnswer( + params -> { + String[] urns = params.getArgument(0); + String[] aspects = params.getArgument(1); + return ResponseEntity.ok( + UrnResponseMap.builder() + .responses( + Arrays.stream(urns) + .map(urn -> Map.entry(urn, EntityResponse.builder().urn(urn).build())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) + .build()); + }); + + return entitiesController; + } + + @MockBean public TimelineController timelineController; + + @MockBean public RelationshipsController relationshipsController; } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java index 57803ac904a93..1f8f0a5023513 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.delegates; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import static org.testng.Assert.*; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.metadata.models.registry.EntityRegistry; import io.datahubproject.openapi.config.OpenAPIEntityTestConfiguration; @@ -31,6 +34,7 @@ import io.datahubproject.openapi.generated.TagAssociation; import io.datahubproject.openapi.generated.controller.ChartApiController; import io.datahubproject.openapi.generated.controller.DatasetApiController; +import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; @@ -46,208 +50,245 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import java.util.List; - -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; -import static org.testng.Assert.*; - - @SpringBootTest(classes = {SpringWebConfig.class}) @ComponentScan(basePackages = {"io.datahubproject.openapi.generated.controller"}) @Import({OpenAPIEntityTestConfiguration.class}) @AutoConfigureMockMvc public class EntityApiDelegateImplTest extends AbstractTestNGSpringContextTests { - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } - - @Autowired - private ChartApiController chartApiController; - @Autowired - private DatasetApiController datasetApiController; - @Autowired - private EntityRegistry entityRegistry; - @Autowired - private MockMvc mockMvc; - - @Test - public void initTest() { - assertNotNull(chartApiController); - assertNotNull(datasetApiController); - - assertTrue(entityRegistry.getEntitySpec("dataset").getAspectSpecMap().containsKey("customDataQualityRules"), - "Failed to load custom model from custom registry"); - } - - @Test - public void chartApiControllerTest() { - final String testUrn = "urn:li:chart:(looker,baz1)"; - - ChartEntityRequestV2 req = ChartEntityRequestV2.builder() - .urn(testUrn) - .build(); - ChartEntityResponseV2 resp = chartApiController.create(List.of(req)).getBody().get(0); - assertEquals(resp.getUrn(), testUrn); - - resp = chartApiController.get(testUrn, false, List.of()).getBody(); - assertEquals(resp.getUrn(), testUrn); - - ResponseEntity<Void> deleteResp = chartApiController.delete(testUrn); - assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); - - ResponseEntity<Void> headResp = chartApiController.head(testUrn); - assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); - - ResponseEntity<ScrollChartEntityResponseV2> scrollResp = chartApiController.scroll( - false, List.of(), 10, null, null, null, null); - assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); - assertNotNull(scrollResp.getBody().getEntities()); - } - - @Test - public void datasetApiControllerTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DatasetEntityRequestV2 req = DatasetEntityRequestV2.builder() - .urn(testUrn) - .build(); - DatasetEntityResponseV2 resp = datasetApiController.create(List.of(req)).getBody().get(0); - assertEquals(resp.getUrn(), testUrn); - - resp = datasetApiController.get(testUrn, false, List.of()).getBody(); - assertEquals(resp.getUrn(), testUrn); - - ResponseEntity<Void> deleteResp = datasetApiController.delete(testUrn); - assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); - - ResponseEntity<Void> headResp = datasetApiController.head(testUrn); - assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); - - ResponseEntity<ScrollDatasetEntityResponseV2> scrollResp = datasetApiController.scroll( - false, List.of(), 10, null, null, null, null); - assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); - assertNotNull(scrollResp.getBody().getEntities()); - } - - @Test - public void browsePathsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - BrowsePathsV2AspectRequestV2 req = BrowsePathsV2AspectRequestV2.builder() - .value(BrowsePathsV2.builder().path(List.of(BrowsePathEntry.builder().urn(testUrn) - .id("path").build())).build()).build(); - assertEquals(datasetApiController.createBrowsePathsV2(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getBrowsePathsV2(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void deprecationTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DeprecationAspectRequestV2 req = DeprecationAspectRequestV2.builder() - .value(Deprecation.builder().deprecated(true).build()).build(); - assertEquals(datasetApiController.createDeprecation(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteDeprecation(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getDeprecation(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headDeprecation(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void domainsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DomainsAspectRequestV2 req = DomainsAspectRequestV2.builder() - .value(Domains.builder().domains(List.of("my_domain")).build()).build(); - assertEquals(datasetApiController.createDomains(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteDomains(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getDomains(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headDomains(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void ownershipTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - OwnershipAspectRequestV2 req = OwnershipAspectRequestV2.builder() - .value(Ownership.builder().owners(List.of(Owner.builder().owner("me").type(OwnershipType.BUSINESS_OWNER).build())).build()).build(); - assertEquals(datasetApiController.createOwnership(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteOwnership(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getOwnership(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headOwnership(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void statusTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - StatusAspectRequestV2 req = StatusAspectRequestV2.builder().value(Status.builder().removed(true).build()).build(); - assertEquals(datasetApiController.createStatus(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteStatus(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getStatus(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headStatus(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void globalTagsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - GlobalTagsAspectRequestV2 req = GlobalTagsAspectRequestV2.builder() - .value(GlobalTags.builder().tags(List.of(TagAssociation.builder().tag("tag").build())).build()).build(); - assertEquals(datasetApiController.createGlobalTags(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteGlobalTags(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getGlobalTags(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headGlobalTags(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void glossaryTermsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - GlossaryTermsAspectRequestV2 req = GlossaryTermsAspectRequestV2.builder() - .value(GlossaryTerms.builder().terms(List.of(GlossaryTermAssociation.builder().urn("term urn").build())).build()).build(); - assertEquals(datasetApiController.createGlossaryTerms(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteGlossaryTerms(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getGlossaryTerms(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headGlossaryTerms(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - - /** - * The purpose of this test is to ensure no errors when a custom aspect is encountered, - * not that the custom aspect is processed. The missing piece to support custom - * aspects is the openapi generated classes for the custom aspects and related request/responses. - */ - @Test - public void customModelTest() throws Exception { - String expectedUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"; - - //CHECKSTYLE:OFF - String body = "[\n" + - " {\n" + - " \"urn\": \"" + expectedUrn + "\",\n" + - " \"customDataQualityRules\": [\n" + - " {\n" + - " \"field\": \"my_event_data\",\n" + - " \"isFieldLevel\": false,\n" + - " \"type\": \"isNull\",\n" + - " \"checkDefinition\": \"n/a\",\n" + - " \"url\": \"https://github.com/datahub-project/datahub/blob/master/checks/nonNull.sql\"\n" + - " }\n" + - " ]\n" + - " }\n" + - "]"; - //CHECKSTYLE:ON - - mockMvc.perform(MockMvcRequestBuilders - .post("/v2/entity/dataset") - .content(body) - .contentType(MediaType.APPLICATION_JSON) - .accept(MediaType.APPLICATION_JSON)) - .andExpect(status().is2xxSuccessful()) - .andExpect(MockMvcResultMatchers.jsonPath("$.[0].urn").value(expectedUrn)); - } + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } + + @Autowired private ChartApiController chartApiController; + @Autowired private DatasetApiController datasetApiController; + @Autowired private EntityRegistry entityRegistry; + @Autowired private MockMvc mockMvc; + + @Test + public void initTest() { + assertNotNull(chartApiController); + assertNotNull(datasetApiController); + + assertTrue( + entityRegistry + .getEntitySpec("dataset") + .getAspectSpecMap() + .containsKey("customDataQualityRules"), + "Failed to load custom model from custom registry"); + } + + @Test + public void chartApiControllerTest() { + final String testUrn = "urn:li:chart:(looker,baz1)"; + + ChartEntityRequestV2 req = ChartEntityRequestV2.builder().urn(testUrn).build(); + ChartEntityResponseV2 resp = chartApiController.create(List.of(req)).getBody().get(0); + assertEquals(resp.getUrn(), testUrn); + + resp = chartApiController.get(testUrn, false, List.of()).getBody(); + assertEquals(resp.getUrn(), testUrn); + + ResponseEntity<Void> deleteResp = chartApiController.delete(testUrn); + assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); + + ResponseEntity<Void> headResp = chartApiController.head(testUrn); + assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); + + ResponseEntity<ScrollChartEntityResponseV2> scrollResp = + chartApiController.scroll(false, List.of(), 10, null, null, null, null); + assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); + assertNotNull(scrollResp.getBody().getEntities()); + } + + @Test + public void datasetApiControllerTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DatasetEntityRequestV2 req = DatasetEntityRequestV2.builder().urn(testUrn).build(); + DatasetEntityResponseV2 resp = datasetApiController.create(List.of(req)).getBody().get(0); + assertEquals(resp.getUrn(), testUrn); + + resp = datasetApiController.get(testUrn, false, List.of()).getBody(); + assertEquals(resp.getUrn(), testUrn); + + ResponseEntity<Void> deleteResp = datasetApiController.delete(testUrn); + assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); + + ResponseEntity<Void> headResp = datasetApiController.head(testUrn); + assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); + + ResponseEntity<ScrollDatasetEntityResponseV2> scrollResp = + datasetApiController.scroll(false, List.of(), 10, null, null, null, null); + assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); + assertNotNull(scrollResp.getBody().getEntities()); + } + + @Test + public void browsePathsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + BrowsePathsV2AspectRequestV2 req = + BrowsePathsV2AspectRequestV2.builder() + .value( + BrowsePathsV2.builder() + .path(List.of(BrowsePathEntry.builder().urn(testUrn).id("path").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createBrowsePathsV2(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getBrowsePathsV2(testUrn, false).getStatusCode(), + HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void deprecationTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DeprecationAspectRequestV2 req = + DeprecationAspectRequestV2.builder() + .value(Deprecation.builder().deprecated(true).build()) + .build(); + assertEquals( + datasetApiController.createDeprecation(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteDeprecation(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getDeprecation(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headDeprecation(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void domainsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DomainsAspectRequestV2 req = + DomainsAspectRequestV2.builder() + .value(Domains.builder().domains(List.of("my_domain")).build()) + .build(); + assertEquals(datasetApiController.createDomains(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteDomains(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getDomains(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headDomains(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void ownershipTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + OwnershipAspectRequestV2 req = + OwnershipAspectRequestV2.builder() + .value( + Ownership.builder() + .owners( + List.of( + Owner.builder().owner("me").type(OwnershipType.BUSINESS_OWNER).build())) + .build()) + .build(); + assertEquals(datasetApiController.createOwnership(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteOwnership(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getOwnership(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headOwnership(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void statusTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + StatusAspectRequestV2 req = + StatusAspectRequestV2.builder().value(Status.builder().removed(true).build()).build(); + assertEquals(datasetApiController.createStatus(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteStatus(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getStatus(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headStatus(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void globalTagsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + GlobalTagsAspectRequestV2 req = + GlobalTagsAspectRequestV2.builder() + .value( + GlobalTags.builder() + .tags(List.of(TagAssociation.builder().tag("tag").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createGlobalTags(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteGlobalTags(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getGlobalTags(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headGlobalTags(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void glossaryTermsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + GlossaryTermsAspectRequestV2 req = + GlossaryTermsAspectRequestV2.builder() + .value( + GlossaryTerms.builder() + .terms(List.of(GlossaryTermAssociation.builder().urn("term urn").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createGlossaryTerms(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteGlossaryTerms(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getGlossaryTerms(testUrn, false).getStatusCode(), + HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headGlossaryTerms(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + /** + * The purpose of this test is to ensure no errors when a custom aspect is encountered, not that + * the custom aspect is processed. The missing piece to support custom aspects is the openapi + * generated classes for the custom aspects and related request/responses. + */ + @Test + public void customModelTest() throws Exception { + String expectedUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"; + + // CHECKSTYLE:OFF + String body = + "[\n" + + " {\n" + + " \"urn\": \"" + + expectedUrn + + "\",\n" + + " \"customDataQualityRules\": [\n" + + " {\n" + + " \"field\": \"my_event_data\",\n" + + " \"isFieldLevel\": false,\n" + + " \"type\": \"isNull\",\n" + + " \"checkDefinition\": \"n/a\",\n" + + " \"url\": \"https://github.com/datahub-project/datahub/blob/master/checks/nonNull.sql\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "]"; + // CHECKSTYLE:ON + + mockMvc + .perform( + MockMvcRequestBuilders.post("/v2/entity/dataset") + .content(body) + .contentType(MediaType.APPLICATION_JSON) + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().is2xxSuccessful()) + .andExpect(MockMvcResultMatchers.jsonPath("$.[0].urn").value(expectedUrn)); + } } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java index b4e87eedea542..12596d9410874 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java @@ -1,13 +1,17 @@ package io.datahubproject.openapi.util; +import static org.testng.AssertJUnit.assertEquals; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import io.datahubproject.openapi.config.OpenAPIEntityTestConfiguration; import io.datahubproject.openapi.dto.UpsertAspectRequest; import io.datahubproject.openapi.generated.ContainerEntityRequestV2; import io.datahubproject.openapi.generated.ContainerKey; import io.datahubproject.openapi.generated.ContainerKeyAspectRequestV2; +import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; @@ -15,41 +19,44 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import java.util.List; - -import static org.testng.AssertJUnit.assertEquals; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({OpenAPIEntityTestConfiguration.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class OpenApiEntitiesUtilTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; - - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } - - @Test - public void testInitialization() { - assertNotNull(entityRegistry); - } - - @Test - public void containerConversionTest() { - ContainerEntityRequestV2 test = ContainerEntityRequestV2.builder() - .urn("urn:li:container:123") - .containerKey(ContainerKeyAspectRequestV2.builder().value(ContainerKey.builder().guid("123").build()).build()) - .build(); - List<UpsertAspectRequest> expected = List.of(UpsertAspectRequest.builder() + @Autowired private EntityRegistry entityRegistry; + + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } + + @Test + public void testInitialization() { + assertNotNull(entityRegistry); + } + + @Test + public void containerConversionTest() { + ContainerEntityRequestV2 test = + ContainerEntityRequestV2.builder() + .urn("urn:li:container:123") + .containerKey( + ContainerKeyAspectRequestV2.builder() + .value(ContainerKey.builder().guid("123").build()) + .build()) + .build(); + List<UpsertAspectRequest> expected = + List.of( + UpsertAspectRequest.builder() .entityType("container") .entityUrn("urn:li:container:123") .aspect(ContainerKey.builder().guid("123").build()) .build()); - assertEquals(expected, OpenApiEntitiesUtil.convertEntityToUpsert(test, ContainerEntityRequestV2.class, entityRegistry)); - } + assertEquals( + expected, + OpenApiEntitiesUtil.convertEntityToUpsert( + test, ContainerEntityRequestV2.class, entityRegistry)); + } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java index 47e2cfec3a9c0..cc040d29657b2 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java @@ -6,7 +6,6 @@ import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; - @ControllerAdvice public class GlobalControllerExceptionHandler { @ExceptionHandler(ConversionFailedException.class) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java index e4f49df90c392..ed98cf3ef4ce9 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java @@ -5,7 +5,6 @@ import io.swagger.v3.oas.annotations.info.Info; import io.swagger.v3.oas.annotations.servers.Server; import java.util.List; - import org.springdoc.core.GroupedOpenApi; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -18,10 +17,10 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - @EnableWebMvc -@OpenAPIDefinition(info = @Info(title = "DataHub OpenAPI", version = "2.0.0"), - servers = {@Server(url = "/openapi/", description = "Default Server URL")}) +@OpenAPIDefinition( + info = @Info(title = "DataHub OpenAPI", version = "2.0.0"), + servers = {@Server(url = "/openapi/", description = "Default Server URL")}) @Configuration public class SpringWebConfig implements WebMvcConfigurer { @@ -41,20 +40,17 @@ public void addFormatters(FormatterRegistry registry) { @Bean public GroupedOpenApi defaultOpenApiGroup() { return GroupedOpenApi.builder() - .group("default") - .packagesToExclude( - "io.datahubproject.openapi.operations", - "io.datahubproject.openapi.health" - ).build(); + .group("default") + .packagesToExclude( + "io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .build(); } @Bean public GroupedOpenApi operationsOpenApiGroup() { return GroupedOpenApi.builder() - .group("operations") - .packagesToScan( - "io.datahubproject.openapi.operations", - "io.datahubproject.openapi.health" - ).build(); + .group("operations") + .packagesToScan("io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .build(); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java index e88f499208af8..c092a2423fdf5 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.converter; +import static com.linkedin.metadata.timeline.data.ChangeCategory.*; + import com.linkedin.metadata.timeline.data.ChangeCategory; import java.util.List; import java.util.Optional; @@ -8,28 +10,29 @@ import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.converter.Converter; -import static com.linkedin.metadata.timeline.data.ChangeCategory.*; - - public class StringToChangeCategoryConverter implements Converter<String, ChangeCategory> { @Override public ChangeCategory convert(String source) { try { String upperCase = source.toUpperCase(); - // For compound enums, want to support different cases i.e. technical_schema, technical schema, technical-schema, etc. - Optional<ChangeCategory> compoundCategory = COMPOUND_CATEGORIES.keySet().stream() - .filter(compoundCategoryKey -> matchCompound(compoundCategoryKey, upperCase)) - .map(COMPOUND_CATEGORIES::get) - .findFirst(); + // For compound enums, want to support different cases i.e. technical_schema, technical + // schema, technical-schema, etc. + Optional<ChangeCategory> compoundCategory = + COMPOUND_CATEGORIES.keySet().stream() + .filter(compoundCategoryKey -> matchCompound(compoundCategoryKey, upperCase)) + .map(COMPOUND_CATEGORIES::get) + .findFirst(); return compoundCategory.orElseGet(() -> ChangeCategory.valueOf(upperCase)); } catch (Exception e) { - throw new ConversionFailedException(TypeDescriptor.valueOf(String.class), - TypeDescriptor.valueOf(ChangeCategory.class), source, e); + throw new ConversionFailedException( + TypeDescriptor.valueOf(String.class), + TypeDescriptor.valueOf(ChangeCategory.class), + source, + e); } } private boolean matchCompound(@Nonnull List<String> compoundCategoryKey, @Nonnull String source) { - return compoundCategoryKey.stream() - .allMatch(source::contains); + return compoundCategoryKey.stream().allMatch(source::contains); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java index 0be69e3264957..07a501885f1aa 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java @@ -6,7 +6,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder @JsonInclude(JsonInclude.Include.NON_NULL) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java index 67858581ba97a..d185e01804c24 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java @@ -9,7 +9,6 @@ import lombok.Builder; import lombok.Value; - @JsonInclude(JsonInclude.Include.NON_NULL) @Value @Builder @@ -17,15 +16,21 @@ public class UpsertAspectRequest { @JsonProperty("entityType") - @Schema(required = true, description = "The name of the entity matching with its definition in the entity registry") + @Schema( + required = true, + description = "The name of the entity matching with its definition in the entity registry") String entityType; @JsonProperty("entityUrn") - @Schema(description = "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") + @Schema( + description = + "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") String entityUrn; @JsonProperty("entityKeyAspect") - @Schema(description = "A key aspect referencing the entity to be updated, required if entityUrn is null") + @Schema( + description = + "A key aspect referencing the entity to be updated, required if entityUrn is null") OneOfGenericAspectValue entityKeyAspect; @JsonProperty("aspect") @@ -33,7 +38,5 @@ public class UpsertAspectRequest { OneOfGenericAspectValue aspect; @JsonPOJOBuilder(withPrefix = "") - public static class UpsertAspectRequestBuilder { - - } + public static class UpsertAspectRequestBuilder {} } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java index 02be0cc93eb1c..60062823a7d82 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java @@ -7,7 +7,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder @JsonInclude(JsonInclude.Include.NON_NULL) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java index 898f768cf999a..6e0fc5deb0b3c 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.entities; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; @@ -52,14 +54,13 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - - @RestController @RequiredArgsConstructor @RequestMapping("/entities/v1") @Slf4j -@Tag(name = "Entities", description = "APIs for ingesting and accessing entities and their constituent aspects") +@Tag( + name = "Entities", + description = "APIs for ingesting and accessing entities and their constituent aspects") public class EntitiesController { private final EntityService _entityService; @@ -76,27 +77,42 @@ public void initBinder(WebDataBinder binder) { @GetMapping(value = "/latest", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<UrnResponseMap> getEntities( - @Parameter(name = "urns", required = true, description = "A list of raw urn strings, only supports a single entity type per request.") - @RequestParam("urns") @Nonnull String[] urns, + @Parameter( + name = "urns", + required = true, + description = + "A list of raw urn strings, only supports a single entity type per request.") + @RequestParam("urns") + @Nonnull + String[] urns, @Parameter(name = "aspectNames", description = "The list of aspect names to retrieve") - @RequestParam(name = "aspectNames", required = false) @Nullable String[] aspectNames) { + @RequestParam(name = "aspectNames", required = false) + @Nullable + String[] aspectNames) { Timer.Context context = MetricUtils.timer("getEntities").time(); final Set<Urn> entityUrns = Arrays.stream(urns) - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access + // Have to decode here because of frontend routing, does No-op for already unencoded + // through direct API access .map(URLDecoder::decode) - .map(UrnUtils::getUrn).collect(Collectors.toSet()); + .map(UrnUtils::getUrn) + .collect(Collectors.toSet()); log.debug("GET ENTITIES {}", entityUrns); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) - ))); - - List<Optional<EntitySpec>> resourceSpecs = entityUrns.stream() - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); + + List<Optional<EntitySpec>> resourceSpecs = + entityUrns.stream() + .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); } if (entityUrns.size() <= 0) { @@ -104,19 +120,26 @@ public ResponseEntity<UrnResponseMap> getEntities( } // TODO: Only supports one entity type at a time, may cause confusion final String entityName = urnToEntityName(entityUrns.iterator().next()); - final Set<String> projectedAspects = aspectNames == null ? _entityService.getEntityAspectNames(entityName) - : new HashSet<>(Arrays.asList(aspectNames)); + final Set<String> projectedAspects = + aspectNames == null + ? _entityService.getEntityAspectNames(entityName) + : new HashSet<>(Arrays.asList(aspectNames)); Throwable exceptionally = null; try { - return ResponseEntity.ok(UrnResponseMap.builder() - .responses(MappingUtil.mapServiceResponse(_entityService - .getEntitiesV2(entityName, entityUrns, projectedAspects), _objectMapper)) - .build()); + return ResponseEntity.ok( + UrnResponseMap.builder() + .responses( + MappingUtil.mapServiceResponse( + _entityService.getEntitiesV2(entityName, entityUrns, projectedAspects), + _objectMapper)) + .build()); } catch (Exception e) { exceptionally = e; throw new RuntimeException( - String.format("Failed to batch get entities with urns: %s, projectedAspects: %s", entityUrns, - projectedAspects), e); + String.format( + "Failed to batch get entities with urns: %s, projectedAspects: %s", + entityUrns, projectedAspects), + e); } finally { if (exceptionally != null) { MetricUtils.counter(MetricRegistry.name("getEntities", "failed")).inc(); @@ -134,24 +157,34 @@ public ResponseEntity<List<String>> postEntities( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()) - ))); - List<com.linkedin.mxe.MetadataChangeProposal> proposals = aspectRequests.stream() - .map(MappingUtil::mapToProposal) - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .collect(Collectors.toList()); - - if (restApiAuthorizationEnabled && !MappingUtil.authorizeProposals(proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())))); + List<com.linkedin.mxe.MetadataChangeProposal> proposals = + aspectRequests.stream() + .map(MappingUtil::mapToProposal) + .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .collect(Collectors.toList()); + + if (restApiAuthorizationEnabled + && !MappingUtil.authorizeProposals( + proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - List<Pair<String, Boolean>> responses = proposals.stream() - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .collect(Collectors.toList()); + List<Pair<String, Boolean>> responses = + proposals.stream() + .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) + .collect(Collectors.toList()); if (responses.stream().anyMatch(Pair::getSecond)) { return ResponseEntity.status(HttpStatus.CREATED) - .body(responses.stream().filter(Pair::getSecond).map(Pair::getFirst).collect(Collectors.toList())); + .body( + responses.stream() + .filter(Pair::getSecond) + .map(Pair::getFirst) + .collect(Collectors.toList())); } else { return ResponseEntity.ok(Collections.emptyList()); } @@ -159,52 +192,83 @@ public ResponseEntity<List<String>> postEntities( @DeleteMapping(value = "/", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<List<RollbackRunResultDto>> deleteEntities( - @Parameter(name = "urns", required = true, description = "A list of raw urn strings, only supports a single entity type per request.") - @RequestParam("urns") @Nonnull String[] urns, - @Parameter(name = "soft", description = "Determines whether the delete will be soft or hard, defaults to true for soft delete") - @RequestParam(value = "soft", defaultValue = "true") boolean soft) { + @Parameter( + name = "urns", + required = true, + description = + "A list of raw urn strings, only supports a single entity type per request.") + @RequestParam("urns") + @Nonnull + String[] urns, + @Parameter( + name = "soft", + description = + "Determines whether the delete will be soft or hard, defaults to true for soft delete") + @RequestParam(value = "soft", defaultValue = "true") + boolean soft) { Throwable exceptionally = null; try (Timer.Context context = MetricUtils.timer("deleteEntities").time()) { - Authentication authentication = AuthenticationContext.getAuthentication(); - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType()) - ))); - final Set<Urn> entityUrns = Arrays.stream(urns) - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access - .map(URLDecoder::decode) - .map(UrnUtils::getUrn).collect(Collectors.toSet()); - - List<Optional<EntitySpec>> resourceSpecs = entityUrns.stream() - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { - UnauthorizedException unauthorizedException = new UnauthorizedException(actorUrnStr + " is unauthorized to delete entities."); - exceptionally = unauthorizedException; - throw unauthorizedException; - } + Authentication authentication = AuthenticationContext.getAuthentication(); + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); + final Set<Urn> entityUrns = + Arrays.stream(urns) + // Have to decode here because of frontend routing, does No-op for already unencoded + // through direct API access + .map(URLDecoder::decode) + .map(UrnUtils::getUrn) + .collect(Collectors.toSet()); - if (!soft) { - return ResponseEntity.ok(entityUrns.stream() - .map(_entityService::deleteUrn) - .map(rollbackRunResult -> MappingUtil.mapRollbackRunResult(rollbackRunResult, _objectMapper)) - .collect(Collectors.toList())); - } else { - List<UpsertAspectRequest> deleteRequests = entityUrns.stream() - .map(entityUrn -> MappingUtil.createStatusRemoval(entityUrn, _entityService)) - .collect(Collectors.toList()); - - return ResponseEntity.ok(Collections.singletonList(RollbackRunResultDto.builder() - .rowsRolledBack(deleteRequests.stream() - .map(MappingUtil::mapToProposal) - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .filter(Pair::getSecond) - .map(Pair::getFirst) - .map(urnString -> AspectRowSummary.builder().urn(urnString).build()) - .collect(Collectors.toList())) - .rowsDeletedFromEntityDeletion(deleteRequests.size()) - .build())); + List<Optional<EntitySpec>> resourceSpecs = + entityUrns.stream() + .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { + UnauthorizedException unauthorizedException = + new UnauthorizedException(actorUrnStr + " is unauthorized to delete entities."); + exceptionally = unauthorizedException; + throw unauthorizedException; + } + + if (!soft) { + return ResponseEntity.ok( + entityUrns.stream() + .map(_entityService::deleteUrn) + .map( + rollbackRunResult -> + MappingUtil.mapRollbackRunResult(rollbackRunResult, _objectMapper)) + .collect(Collectors.toList())); + } else { + List<UpsertAspectRequest> deleteRequests = + entityUrns.stream() + .map(entityUrn -> MappingUtil.createStatusRemoval(entityUrn, _entityService)) + .collect(Collectors.toList()); + + return ResponseEntity.ok( + Collections.singletonList( + RollbackRunResultDto.builder() + .rowsRolledBack( + deleteRequests.stream() + .map(MappingUtil::mapToProposal) + .map( + proposal -> + MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .map( + proposal -> + MappingUtil.ingestProposal( + proposal, actorUrnStr, _entityService)) + .filter(Pair::getSecond) + .map(Pair::getFirst) + .map(urnString -> AspectRowSummary.builder().urn(urnString).build()) + .collect(Collectors.toList())) + .rowsDeletedFromEntityDeletion(deleteRequests.size()) + .build())); } } catch (Exception e) { exceptionally = e; diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java index c90603bf88c31..79a219f891fc9 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java @@ -10,7 +10,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.client.RequestOptions; @@ -25,7 +24,6 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequestMapping("/") @Tag(name = "HealthCheck", description = "An API for checking health of GMS and its clients.") @@ -33,26 +31,31 @@ public class HealthCheckController { @Autowired @Qualifier("elasticSearchRestHighLevelClient") private RestHighLevelClient elasticClient; + private final Supplier<ResponseEntity<String>> memoizedSupplier; public HealthCheckController(ConfigurationProvider config) { - this.memoizedSupplier = Suppliers.memoizeWithExpiration( - this::getElasticHealth, config.getHealthCheck().getCacheDurationSeconds(), TimeUnit.SECONDS); + this.memoizedSupplier = + Suppliers.memoizeWithExpiration( + this::getElasticHealth, + config.getHealthCheck().getCacheDurationSeconds(), + TimeUnit.SECONDS); } @GetMapping(path = "/check/ready", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<Boolean> getCombinedHealthCheck(String... checks) { return ResponseEntity.status(getCombinedDebug(checks).getStatusCode()) - .body(getCombinedDebug(checks).getStatusCode().is2xxSuccessful()); + .body(getCombinedDebug(checks).getStatusCode().is2xxSuccessful()); } /** - * Combined health check endpoint for checking GMS clients. - * For now, just checks the health of the ElasticSearch client - * @return A ResponseEntity with a Map of String (component name) to ResponseEntity (the health check status of - * that component). The status code will be 200 if all components are okay, and 500 if one or more components are not - * healthy. + * Combined health check endpoint for checking GMS clients. For now, just checks the health of the + * ElasticSearch client + * + * @return A ResponseEntity with a Map of String (component name) to ResponseEntity (the health + * check status of that component). The status code will be 200 if all components are okay, + * and 500 if one or more components are not healthy. */ @GetMapping(path = "/debug/ready", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<Map<String, ResponseEntity<String>>> getCombinedDebug(String... checks) { @@ -60,19 +63,26 @@ public ResponseEntity<Map<String, ResponseEntity<String>>> getCombinedDebug(Stri healthChecks.put("elasticsearch", this::getElasticDebugWithCache); // Add new components here - List<String> componentsToCheck = checks != null && checks.length > 0 - ? Arrays.asList(checks) - : new ArrayList<>(healthChecks.keySet()); + List<String> componentsToCheck = + checks != null && checks.length > 0 + ? Arrays.asList(checks) + : new ArrayList<>(healthChecks.keySet()); Map<String, ResponseEntity<String>> componentHealth = new HashMap<>(); for (String check : componentsToCheck) { - componentHealth.put(check, - healthChecks.getOrDefault(check, - () -> ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE).body("Unrecognized component " + check)) + componentHealth.put( + check, + healthChecks + .getOrDefault( + check, + () -> + ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE) + .body("Unrecognized component " + check)) .get()); } - boolean isHealthy = componentHealth.values().stream().allMatch(resp -> resp.getStatusCode() == HttpStatus.OK); + boolean isHealthy = + componentHealth.values().stream().allMatch(resp -> resp.getStatusCode() == HttpStatus.OK); if (isHealthy) { return ResponseEntity.ok(componentHealth); } @@ -82,11 +92,12 @@ public ResponseEntity<Map<String, ResponseEntity<String>>> getCombinedDebug(Stri @GetMapping(path = "/check/elastic", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<Boolean> getElasticHealthWithCache() { return ResponseEntity.status(getElasticDebugWithCache().getStatusCode()) - .body(getElasticDebugWithCache().getStatusCode().is2xxSuccessful()); + .body(getElasticDebugWithCache().getStatusCode().is2xxSuccessful()); } /** * Checks the memoized cache for the latest elastic health check result + * * @return The ResponseEntity containing the health check result */ @GetMapping(path = "/debug/elastic", produces = MediaType.APPLICATION_JSON_VALUE) @@ -96,13 +107,15 @@ public ResponseEntity<String> getElasticDebugWithCache() { /** * Query ElasticSearch health endpoint + * * @return A response including the result from ElasticSearch */ private ResponseEntity<String> getElasticHealth() { String responseString = null; try { ClusterHealthRequest request = new ClusterHealthRequest(); - ClusterHealthResponse response = elasticClient.cluster().health(request, RequestOptions.DEFAULT); + ClusterHealthResponse response = + elasticClient.cluster().health(request, RequestOptions.DEFAULT); boolean isHealthy = !response.isTimedOut() && response.getStatus() != ClusterHealthStatus.RED; responseString = response.toString(); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java index 2e243f4c8df9e..3fa926924aabe 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java @@ -9,7 +9,6 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @Slf4j @RestController @RequestMapping("/up") diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java index f29461734ebfc..f7c848f91a64c 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java @@ -6,15 +6,15 @@ import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; -import io.datahubproject.openapi.util.ElasticsearchUtils; import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import io.datahubproject.openapi.util.ElasticsearchUtils; import io.swagger.v3.oas.annotations.tags.Tag; import java.util.List; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.tasks.GetTaskResponse; import org.json.JSONObject; +import org.opensearch.client.tasks.GetTaskResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -28,11 +28,12 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequestMapping("/operations/elasticSearch") @Slf4j -@Tag(name = "ElasticSearchOperations", description = "An API for managing your elasticsearch instance") +@Tag( + name = "ElasticSearchOperations", + description = "An API for managing your elasticsearch instance") public class OperationsController { private final AuthorizerChain _authorizerChain; @@ -51,26 +52,36 @@ public OperationsController(AuthorizerChain authorizerChain) { public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); } + @GetMapping(path = "/getTaskStatus", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<String> getTaskStatus(String task) { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE.getType()) - ))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { - return ResponseEntity.status(HttpStatus.FORBIDDEN).body( - String.format(actorUrnStr + " is not authorized to get ElasticSearch task status")); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE.getType())))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { + return ResponseEntity.status(HttpStatus.FORBIDDEN) + .body(String.format(actorUrnStr + " is not authorized to get ElasticSearch task status")); } if (!ElasticsearchUtils.isTaskIdValid(task)) { - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body( - String.format("Task ID should be in the form nodeId:taskId e.g. aB1cdEf2GHI-JKLMnoPQr3:123456 (got %s)", task)); + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body( + String.format( + "Task ID should be in the form nodeId:taskId e.g. aB1cdEf2GHI-JKLMnoPQr3:123456 (got %s)", + task)); } - String nodeIdToQuery = task.split(":")[0]; + String nodeIdToQuery = task.split(":")[0]; long taskIdToQuery = Long.parseLong(task.split(":")[1]); - java.util.Optional<GetTaskResponse> res = _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); + java.util.Optional<GetTaskResponse> res = + _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); if (res.isEmpty()) { - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); } GetTaskResponse resp = res.get(); JSONObject j = new JSONObject(); @@ -80,4 +91,4 @@ public ResponseEntity<String> getTaskStatus(String task) { j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); return ResponseEntity.ok(j.toString()); } -} \ No newline at end of file +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java index cfb516913eb09..370f2019a42dd 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java @@ -32,12 +32,13 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequiredArgsConstructor @RequestMapping("/platform/entities/v1") @Slf4j -@Tag(name = "Platform Entities", description = "Platform level APIs intended for lower level access to entities") +@Tag( + name = "Platform Entities", + description = "Platform level APIs intended for lower level access to entities") public class PlatformEntitiesController { private final EntityService _entityService; @@ -60,24 +61,33 @@ public ResponseEntity<List<String>> postEntities( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - List<com.linkedin.mxe.MetadataChangeProposal> proposals = metadataChangeProposals.stream() - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .collect(Collectors.toList()); - DisjunctivePrivilegeGroup - orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()) - ))); + List<com.linkedin.mxe.MetadataChangeProposal> proposals = + metadataChangeProposals.stream() + .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .collect(Collectors.toList()); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled && !MappingUtil.authorizeProposals(proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { + if (restApiAuthorizationEnabled + && !MappingUtil.authorizeProposals( + proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - List<Pair<String, Boolean>> responses = proposals.stream() - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .collect(Collectors.toList()); + List<Pair<String, Boolean>> responses = + proposals.stream() + .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) + .collect(Collectors.toList()); if (responses.stream().anyMatch(Pair::getSecond)) { return ResponseEntity.status(HttpStatus.CREATED) - .body(responses.stream().filter(Pair::getSecond).map(Pair::getFirst).collect(Collectors.toList())); + .body( + responses.stream() + .filter(Pair::getSecond) + .map(Pair::getFirst) + .collect(Collectors.toList())); } else { return ResponseEntity.ok(Collections.emptyList()); } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java index 4641fed3a8610..4ceed6a11b973 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.relationships; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; @@ -45,9 +47,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - - @RestController @RequiredArgsConstructor @RequestMapping("/relationships/v1") @@ -59,6 +58,7 @@ public enum RelationshipDirection { INCOMING, OUTGOING } + private static final int MAX_DOWNSTREAM_CNT = 200; private final GraphService _graphService; private final AuthorizerChain _authorizerChain; @@ -71,83 +71,127 @@ public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); } - private RelatedEntitiesResult getRelatedEntities(String rawUrn, List<String> relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count) { + private RelatedEntitiesResult getRelatedEntities( + String rawUrn, + List<String> relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count) { start = start == null ? 0 : start; count = count == null ? MAX_DOWNSTREAM_CNT : count; com.linkedin.metadata.query.filter.RelationshipDirection restLiDirection; switch (direction) { - case INCOMING: { - restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.INCOMING; - break; - } - case OUTGOING: { - restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.OUTGOING; - break; - } - default: { - throw new RuntimeException("Unexpected relationship direction " + direction); - } + case INCOMING: + { + restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.INCOMING; + break; + } + case OUTGOING: + { + restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.OUTGOING; + break; + } + default: + { + throw new RuntimeException("Unexpected relationship direction " + direction); + } } - return _graphService.findRelatedEntities(null, newFilter("urn", rawUrn), null, QueryUtils.EMPTY_FILTER, - relationshipTypes, newRelationshipFilter(QueryUtils.EMPTY_FILTER, restLiDirection), start, count); + return _graphService.findRelatedEntities( + null, + newFilter("urn", rawUrn), + null, + QueryUtils.EMPTY_FILTER, + relationshipTypes, + newRelationshipFilter(QueryUtils.EMPTY_FILTER, restLiDirection), + start, + count); } @GetMapping(value = "/", produces = MediaType.APPLICATION_JSON_VALUE) - @Operation(responses = { @ApiResponse(responseCode = "0", description = "", - content = @Content(schema = @Schema(implementation = RelatedEntitiesResult.class)))}) + @Operation( + responses = { + @ApiResponse( + responseCode = "0", + description = "", + content = @Content(schema = @Schema(implementation = RelatedEntitiesResult.class))) + }) public ResponseEntity<RelatedEntitiesResult> getRelationships( - @Parameter(name = "urn", required = true, - description = "The urn for the entity whose relationships are being queried") - @RequestParam("urn") - @Nonnull String urn, - @Parameter(name = "relationshipTypes", required = true, - description = "The list of relationship types to traverse") - @RequestParam(name = "relationshipTypes") - @Nonnull String[] relationshipTypes, - @Parameter(name = "direction", required = true, - description = "The directionality of the relationship") - @RequestParam(name = "direction") - @Nonnull RelationshipsController.RelationshipDirection direction, - @Parameter(name = "start", description = "An offset for the relationships to return from. " - + "Useful for pagination.") - @RequestParam(name = "start", defaultValue = "0") - @Nullable Integer start, - @Parameter(name = "count", description = "A count of relationships that will be returned " - + "starting from the offset. Useful for pagination.") - @RequestParam(name = "count", defaultValue = "200") - @Nullable Integer count) { + @Parameter( + name = "urn", + required = true, + description = "The urn for the entity whose relationships are being queried") + @RequestParam("urn") + @Nonnull + String urn, + @Parameter( + name = "relationshipTypes", + required = true, + description = "The list of relationship types to traverse") + @RequestParam(name = "relationshipTypes") + @Nonnull + String[] relationshipTypes, + @Parameter( + name = "direction", + required = true, + description = "The directionality of the relationship") + @RequestParam(name = "direction") + @Nonnull + RelationshipsController.RelationshipDirection direction, + @Parameter( + name = "start", + description = + "An offset for the relationships to return from. " + "Useful for pagination.") + @RequestParam(name = "start", defaultValue = "0") + @Nullable + Integer start, + @Parameter( + name = "count", + description = + "A count of relationships that will be returned " + + "starting from the offset. Useful for pagination.") + @RequestParam(name = "count", defaultValue = "200") + @Nullable + Integer count) { Timer.Context context = MetricUtils.timer("getRelationships").time(); - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access + // Have to decode here because of frontend routing, does No-op for already unencoded through + // direct API access final Urn entityUrn = UrnUtils.getUrn(URLDecoder.decode(urn, Charset.forName("UTF-8"))); log.debug("GET Relationships {}", entityUrn); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) - // Re-using GET_ENTITY_PRIVILEGE here as it doesn't make sense to split the privileges between these APIs. - ))); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) + // Re-using GET_ENTITY_PRIVILEGE here as it doesn't make sense to split the + // privileges between these APIs. + ))); List<Optional<EntitySpec>> resourceSpecs = - Collections.singletonList(Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString()))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, - orGroup)) { + Collections.singletonList( + Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString()))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to get relationships."); } Throwable exceptionally = null; try { return ResponseEntity.ok( - getRelatedEntities(entityUrn.toString(), Arrays.asList(relationshipTypes), direction, start, - count)); + getRelatedEntities( + entityUrn.toString(), Arrays.asList(relationshipTypes), direction, start, count)); } catch (Exception e) { exceptionally = e; throw new RuntimeException( - String.format("Failed to batch get relationships with urn: %s, relationshipTypes: %s", urn, - Arrays.toString(relationshipTypes)), e); + String.format( + "Failed to batch get relationships with urn: %s, relationshipTypes: %s", + urn, Arrays.toString(relationshipTypes)), + e); } finally { if (exceptionally != null) { MetricUtils.counter(MetricRegistry.name("getRelationships", "failed")).inc(); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java index fbde9e8072002..a84c50e74baf2 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java @@ -30,11 +30,13 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; - @RestController @AllArgsConstructor @RequestMapping("/timeline/v1") -@Tag(name = "Timeline", description = "An API for retrieving historical updates to entities and their related documentation.") +@Tag( + name = "Timeline", + description = + "An API for retrieving historical updates to entities and their related documentation.") public class TimelineController { private final TimelineService _timelineService; @@ -44,7 +46,6 @@ public class TimelineController { private Boolean restApiAuthorizationEnabled; /** - * * @param rawUrn * @param startTime * @param endTime @@ -60,7 +61,8 @@ public ResponseEntity<List<ChangeTransaction>> getTimeline( @RequestParam(defaultValue = "-1") long startTime, @RequestParam(defaultValue = "0") long endTime, @RequestParam(defaultValue = "false") boolean raw, - @RequestParam Set<ChangeCategory> categories) throws URISyntaxException, JsonProcessingException { + @RequestParam Set<ChangeCategory> categories) + throws URISyntaxException, JsonProcessingException { // Make request params when implemented String startVersionStamp = null; String endVersionStamp = null; @@ -68,11 +70,18 @@ public ResponseEntity<List<ChangeTransaction>> getTimeline( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); EntitySpec resourceSpec = new EntitySpec(urn.getEntityType(), rawUrn); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.GET_TIMELINE_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorized(_authorizerChain, actorUrnStr, Optional.of(resourceSpec), orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_TIMELINE_PRIVILEGE.getType())))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorized( + _authorizerChain, actorUrnStr, Optional.of(resourceSpec), orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - return ResponseEntity.ok(_timelineService.getTimeline(urn, categories, startTime, endTime, startVersionStamp, endVersionStamp, raw)); + return ResponseEntity.ok( + _timelineService.getTimeline( + urn, categories, startTime, endTime, startVersionStamp, endVersionStamp, raw)); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java index 9ef14eefc429b..7b13191bc1b38 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java @@ -1,7 +1,8 @@ package io.datahubproject.openapi.util; public class ElasticsearchUtils { - private ElasticsearchUtils() { } + private ElasticsearchUtils() {} + public static boolean isTaskIdValid(String task) { if (task.matches("^[a-zA-Z0-9-_]+:[0-9]+$")) { try { diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java index 21dc5a4c8a0d6..0eb3e2d6b8c6e 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java @@ -1,11 +1,15 @@ package io.datahubproject.openapi.util; +import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; +import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; +import static java.nio.charset.StandardCharsets.UTF_8; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authorization.AuthUtil; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -21,13 +25,13 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.Aspect; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.RollbackRunResult; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.entity.validation.ValidationException; -import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.GenericAspect; @@ -35,7 +39,15 @@ import com.linkedin.util.Pair; import io.datahubproject.openapi.dto.RollbackRunResultDto; import io.datahubproject.openapi.dto.UpsertAspectRequest; - +import io.datahubproject.openapi.generated.AspectRowSummary; +import io.datahubproject.openapi.generated.AspectType; +import io.datahubproject.openapi.generated.AuditStamp; +import io.datahubproject.openapi.generated.EntityResponse; +import io.datahubproject.openapi.generated.EnvelopedAspect; +import io.datahubproject.openapi.generated.MetadataChangeProposal; +import io.datahubproject.openapi.generated.OneOfEnvelopedAspectValue; +import io.datahubproject.openapi.generated.OneOfGenericAspectValue; +import io.datahubproject.openapi.generated.Status; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashMap; @@ -51,16 +63,6 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import io.datahubproject.openapi.generated.AspectRowSummary; -import io.datahubproject.openapi.generated.AspectType; -import io.datahubproject.openapi.generated.AuditStamp; -import io.datahubproject.openapi.generated.EntityResponse; -import io.datahubproject.openapi.generated.EnvelopedAspect; -import io.datahubproject.openapi.generated.MetadataChangeProposal; -import io.datahubproject.openapi.generated.OneOfEnvelopedAspectValue; -import io.datahubproject.openapi.generated.OneOfGenericAspectValue; -import io.datahubproject.openapi.generated.Status; import lombok.extern.slf4j.Slf4j; import org.apache.avro.Schema; import org.reflections.Reflections; @@ -72,36 +74,33 @@ import org.springframework.http.MediaType; import org.springframework.web.client.HttpClientErrorException; -import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; -import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; -import static java.nio.charset.StandardCharsets.UTF_8; - @Slf4j public class MappingUtil { - private MappingUtil() { - - } + private MappingUtil() {} private static final JsonNodeFactory NODE_FACTORY = JsonNodeFactory.instance; - private static final Map<String, Class<? extends OneOfEnvelopedAspectValue>> ENVELOPED_ASPECT_TYPE_MAP = - new HashMap<>(); + private static final Map<String, Class<? extends OneOfEnvelopedAspectValue>> + ENVELOPED_ASPECT_TYPE_MAP = new HashMap<>(); private static final Map<Class<? extends OneOfGenericAspectValue>, String> ASPECT_NAME_MAP = new HashMap<>(); - private static final Map<String, Class<? extends RecordTemplate>> PEGASUS_TYPE_MAP = new HashMap<>(); + private static final Map<String, Class<? extends RecordTemplate>> PEGASUS_TYPE_MAP = + new HashMap<>(); private static final String DISCRIMINATOR = "__type"; private static final String PEGASUS_PACKAGE = "com.linkedin"; private static final String OPENAPI_PACKAGE = "io.datahubproject.openapi.generated"; - private static final ReflectionCache REFLECT_AVRO = ReflectionCache.builder() - .basePackage("com.linkedin.pegasus2avro").build(); - private static final ReflectionCache REFLECT_OPENAPI = ReflectionCache.builder() - .basePackage(OPENAPI_PACKAGE).build(); + private static final ReflectionCache REFLECT_AVRO = + ReflectionCache.builder().basePackage("com.linkedin.pegasus2avro").build(); + private static final ReflectionCache REFLECT_OPENAPI = + ReflectionCache.builder().basePackage(OPENAPI_PACKAGE).build(); static { // Build a map from __type name to generated class - ClassPathScanningCandidateComponentProvider provider = new ClassPathScanningCandidateComponentProvider(false); + ClassPathScanningCandidateComponentProvider provider = + new ClassPathScanningCandidateComponentProvider(false); provider.addIncludeFilter(new AssignableTypeFilter(OneOfEnvelopedAspectValue.class)); - Set<BeanDefinition> components = provider.findCandidateComponents("io/datahubproject/openapi/generated"); + Set<BeanDefinition> components = + provider.findCandidateComponents("io/datahubproject/openapi/generated"); components.forEach(MappingUtil::putEnvelopedAspectEntry); provider = new ClassPathScanningCandidateComponentProvider(false); @@ -111,36 +110,43 @@ private MappingUtil() { // Build a map from fully qualified Pegasus generated class name to class new Reflections(PEGASUS_PACKAGE, new SubTypesScanner(false)) - .getSubTypesOf(RecordTemplate.class) - .forEach(aClass -> PEGASUS_TYPE_MAP.put(aClass.getSimpleName(), aClass)); + .getSubTypesOf(RecordTemplate.class) + .forEach(aClass -> PEGASUS_TYPE_MAP.put(aClass.getSimpleName(), aClass)); } - public static Map<String, EntityResponse> mapServiceResponse(Map<Urn, com.linkedin.entity.EntityResponse> serviceResponse, - ObjectMapper objectMapper) { - return serviceResponse.entrySet() - .stream() - .collect(Collectors.toMap(entry -> entry.getKey().toString(), entry -> mapEntityResponse(entry.getValue(), objectMapper))); + public static Map<String, EntityResponse> mapServiceResponse( + Map<Urn, com.linkedin.entity.EntityResponse> serviceResponse, ObjectMapper objectMapper) { + return serviceResponse.entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), + entry -> mapEntityResponse(entry.getValue(), objectMapper))); } - public static EntityResponse mapEntityResponse(com.linkedin.entity.EntityResponse entityResponse, ObjectMapper objectMapper) { + public static EntityResponse mapEntityResponse( + com.linkedin.entity.EntityResponse entityResponse, ObjectMapper objectMapper) { return EntityResponse.builder() - .entityName(entityResponse.getEntityName()) - .urn(entityResponse.getUrn().toString()) - .aspects(entityResponse.getAspects() - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> mapEnvelopedAspect(entry.getValue(), objectMapper)))).build(); + .entityName(entityResponse.getEntityName()) + .urn(entityResponse.getUrn().toString()) + .aspects( + entityResponse.getAspects().entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> mapEnvelopedAspect(entry.getValue(), objectMapper)))) + .build(); } - public static EnvelopedAspect mapEnvelopedAspect(com.linkedin.entity.EnvelopedAspect envelopedAspect, - ObjectMapper objectMapper) { + public static EnvelopedAspect mapEnvelopedAspect( + com.linkedin.entity.EnvelopedAspect envelopedAspect, ObjectMapper objectMapper) { return EnvelopedAspect.builder() - .name(envelopedAspect.getName()) - .timestamp(envelopedAspect.getTimestamp()) - .version(envelopedAspect.getVersion()) - .type(AspectType.fromValue(envelopedAspect.getType().name().toUpperCase(Locale.ROOT))) - .created(objectMapper.convertValue(envelopedAspect.getCreated().data(), AuditStamp.class)) - .value(mapAspectValue(envelopedAspect.getName(), envelopedAspect.getValue(), objectMapper)).build(); + .name(envelopedAspect.getName()) + .timestamp(envelopedAspect.getTimestamp()) + .version(envelopedAspect.getVersion()) + .type(AspectType.fromValue(envelopedAspect.getType().name().toUpperCase(Locale.ROOT))) + .created(objectMapper.convertValue(envelopedAspect.getCreated().data(), AuditStamp.class)) + .value(mapAspectValue(envelopedAspect.getName(), envelopedAspect.getValue(), objectMapper)) + .build(); } private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataMap dataMap) { @@ -148,20 +154,23 @@ private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataM dataMap.put(DISCRIMINATOR, parentClazz.getSimpleName()); } - Set<Map.Entry<String, DataMap>> requiresDiscriminator = dataMap.entrySet().stream() + Set<Map.Entry<String, DataMap>> requiresDiscriminator = + dataMap.entrySet().stream() .filter(e -> e.getValue() instanceof DataMap) .filter(e -> shouldCollapseClassToDiscriminator(e.getKey())) .map(e -> Map.entry(e.getKey(), (DataMap) e.getValue())) .collect(Collectors.toSet()); // DataMap doesn't support concurrent access - requiresDiscriminator.forEach(e -> { - dataMap.remove(e.getKey()); - dataMap.put(DISCRIMINATOR, e.getKey().substring(e.getKey().lastIndexOf(".") + 1)); - dataMap.putAll(e.getValue()); - }); + requiresDiscriminator.forEach( + e -> { + dataMap.remove(e.getKey()); + dataMap.put(DISCRIMINATOR, e.getKey().substring(e.getKey().lastIndexOf(".") + 1)); + dataMap.putAll(e.getValue()); + }); // Look through all the nested classes for possible discriminator requirements - Set<Pair<List<String>, DataMap>> nestedDataMaps = getDataMapPaths(new LinkedList<>(), dataMap).collect(Collectors.toSet()); + Set<Pair<List<String>, DataMap>> nestedDataMaps = + getDataMapPaths(new LinkedList<>(), dataMap).collect(Collectors.toSet()); // DataMap doesn't support concurrent access for (Pair<List<String>, DataMap> nestedDataMapPath : nestedDataMaps) { List<String> nestedPath = nestedDataMapPath.getFirst(); @@ -178,7 +187,10 @@ private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataM nextClazz = getMethod != null ? getMethod.getReturnType() : null; if (nextClazz != null && "List".equals(nextClazz.getSimpleName())) { - String listElemClassName = getMethod.getGenericReturnType().getTypeName() + String listElemClassName = + getMethod + .getGenericReturnType() + .getTypeName() .replace("java.util.List<", "") .replace(">", ""); try { @@ -192,7 +204,7 @@ private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataM } if ((nextClazz != parentClazz && shouldCheckTypeMethod(nextClazz)) - || nested.keySet().stream().anyMatch(MappingUtil::shouldCollapseClassToDiscriminator)) { + || nested.keySet().stream().anyMatch(MappingUtil::shouldCollapseClassToDiscriminator)) { insertDiscriminator(nextClazz, nested); } } @@ -201,42 +213,49 @@ private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataM return dataMap; } - /** * Stream paths to DataMaps + * * @param paths current path * @param data current DataMap or DataList * @return path to all nested DataMaps */ - private static Stream<Pair<List<String>, DataMap>> getDataMapPaths(List<String> paths, Object data) { + private static Stream<Pair<List<String>, DataMap>> getDataMapPaths( + List<String> paths, Object data) { if (data instanceof DataMap) { - return ((DataMap) data).entrySet().stream() + return ((DataMap) data) + .entrySet().stream() .filter(e -> e.getValue() instanceof DataMap || e.getValue() instanceof DataList) - .flatMap(entry -> { - List<String> thisPath = new LinkedList<>(paths); - thisPath.add(entry.getKey()); - if (entry.getValue() instanceof DataMap) { - return Stream.concat( + .flatMap( + entry -> { + List<String> thisPath = new LinkedList<>(paths); + thisPath.add(entry.getKey()); + if (entry.getValue() instanceof DataMap) { + return Stream.concat( Stream.of(Pair.of(thisPath, (DataMap) entry.getValue())), - getDataMapPaths(thisPath, entry.getValue()) - ); - } else { - // DataList - return getDataMapPaths(thisPath, entry.getValue()); - } - }); + getDataMapPaths(thisPath, entry.getValue())); + } else { + // DataList + return getDataMapPaths(thisPath, entry.getValue()); + } + }); } else if (data instanceof DataList) { DataList dataList = (DataList) data; return IntStream.range(0, dataList.size()) - .mapToObj(idx -> Pair.of(idx, dataList.get(idx))) - .filter(idxObject -> idxObject.getValue() instanceof DataMap || idxObject.getValue() instanceof DataList) - .flatMap(idxObject -> { + .mapToObj(idx -> Pair.of(idx, dataList.get(idx))) + .filter( + idxObject -> + idxObject.getValue() instanceof DataMap + || idxObject.getValue() instanceof DataList) + .flatMap( + idxObject -> { Object item = idxObject.getValue(); List<String> thisPath = new LinkedList<>(paths); thisPath.add("[" + idxObject.getKey() + "]"); if (item instanceof DataMap) { - return Stream.concat(Stream.of(Pair.of(thisPath, (DataMap) item)), - getDataMapPaths(thisPath, item)); + return Stream.concat( + Stream.of(Pair.of(thisPath, (DataMap) item)), + getDataMapPaths(thisPath, item)); } else { // DataList return getDataMapPaths(thisPath, item); @@ -246,8 +265,10 @@ private static Stream<Pair<List<String>, DataMap>> getDataMapPaths(List<String> return Stream.empty(); } - public static OneOfEnvelopedAspectValue mapAspectValue(String aspectName, Aspect aspect, ObjectMapper objectMapper) { - Class<? extends OneOfEnvelopedAspectValue> aspectClass = ENVELOPED_ASPECT_TYPE_MAP.get(aspectName); + public static OneOfEnvelopedAspectValue mapAspectValue( + String aspectName, Aspect aspect, ObjectMapper objectMapper) { + Class<? extends OneOfEnvelopedAspectValue> aspectClass = + ENVELOPED_ASPECT_TYPE_MAP.get(aspectName); DataMap wrapper = insertDiscriminator(aspectClass, aspect.data()); try { String dataMapAsJson = objectMapper.writeValueAsString(wrapper); @@ -261,7 +282,8 @@ public static OneOfEnvelopedAspectValue mapAspectValue(String aspectName, Aspect private static void putEnvelopedAspectEntry(BeanDefinition beanDefinition) { try { Class<? extends OneOfEnvelopedAspectValue> cls = - (Class<? extends OneOfEnvelopedAspectValue>) Class.forName(beanDefinition.getBeanClassName()); + (Class<? extends OneOfEnvelopedAspectValue>) + Class.forName(beanDefinition.getBeanClassName()); String aspectName = getAspectName(cls); ENVELOPED_ASPECT_TYPE_MAP.put(aspectName, cls); } catch (ClassNotFoundException e) { @@ -273,7 +295,8 @@ private static void putEnvelopedAspectEntry(BeanDefinition beanDefinition) { private static void putGenericAspectEntry(BeanDefinition beanDefinition) { try { Class<? extends OneOfGenericAspectValue> cls = - (Class<? extends OneOfGenericAspectValue>) Class.forName(beanDefinition.getBeanClassName()); + (Class<? extends OneOfGenericAspectValue>) + Class.forName(beanDefinition.getBeanClassName()); String aspectName = getAspectName(cls); ASPECT_NAME_MAP.put(cls, aspectName); } catch (ClassNotFoundException e) { @@ -288,14 +311,17 @@ private static String getAspectName(Class<?> cls) { } private static boolean shouldCheckTypeMethod(@Nullable Class<?> parentClazz) { - return Optional.ofNullable(parentClazz).map(cls -> cls.getName().startsWith(OPENAPI_PACKAGE + ".")).orElse(false); + return Optional.ofNullable(parentClazz) + .map(cls -> cls.getName().startsWith(OPENAPI_PACKAGE + ".")) + .orElse(false); } private static boolean shouldCollapseClassToDiscriminator(String className) { return className.startsWith(PEGASUS_PACKAGE + "."); } - private static Optional<String> shouldDiscriminate(String parentShortClass, String fieldName, ObjectNode node) { + private static Optional<String> shouldDiscriminate( + String parentShortClass, String fieldName, ObjectNode node) { try { if (parentShortClass != null) { Class<?> pegasus2AvroClazz = REFLECT_AVRO.lookupClass(parentShortClass, true); @@ -304,7 +330,8 @@ private static Optional<String> shouldDiscriminate(String parentShortClass, Stri Schema.Field avroField = avroSchema.getField(fieldName); if (avroField.schema().isUnion()) { - Class<?> discriminatedClazz = REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); + Class<?> discriminatedClazz = + REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); return Optional.of(discriminatedClazz.getName().replace(".pegasus2avro", "")); } } @@ -313,7 +340,8 @@ private static Optional<String> shouldDiscriminate(String parentShortClass, Stri Iterator<String> itr = node.fieldNames(); itr.next(); if (!itr.hasNext()) { // only contains discriminator - Class<?> discriminatedClazz = REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); + Class<?> discriminatedClazz = + REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); return Optional.of(discriminatedClazz.getName().replace(".pegasus2avro", "")); } @@ -326,16 +354,22 @@ private static Optional<String> shouldDiscriminate(String parentShortClass, Stri private static void replaceDiscriminator(ObjectNode node) { replaceDiscriminator(null, null, null, node); } - private static void replaceDiscriminator(@Nullable ObjectNode parentNode, @Nullable String parentDiscriminator, - @Nullable String propertyName, @Nonnull ObjectNode node) { + + private static void replaceDiscriminator( + @Nullable ObjectNode parentNode, + @Nullable String parentDiscriminator, + @Nullable String propertyName, + @Nonnull ObjectNode node) { final String discriminator; if (node.isObject() && node.has(DISCRIMINATOR)) { - Optional<String> discriminatorClassName = shouldDiscriminate(parentDiscriminator, propertyName, node); + Optional<String> discriminatorClassName = + shouldDiscriminate(parentDiscriminator, propertyName, node); if (parentNode != null && discriminatorClassName.isPresent()) { discriminator = node.remove(DISCRIMINATOR).asText(); parentNode.remove(propertyName); - parentNode.set(propertyName, NODE_FACTORY.objectNode().set(discriminatorClassName.get(), node)); + parentNode.set( + propertyName, NODE_FACTORY.objectNode().set(discriminatorClassName.get(), node)); } else { discriminator = node.remove(DISCRIMINATOR).asText(); } @@ -344,55 +378,75 @@ private static void replaceDiscriminator(@Nullable ObjectNode parentNode, @Nulla } List<Map.Entry<String, JsonNode>> objectChildren = new LinkedList<>(); - node.fields().forEachRemaining(entry -> { - if (entry.getValue().isObject()) { - objectChildren.add(entry); - } else if (entry.getValue().isArray()) { - entry.getValue().forEach(i -> { - if (i.isObject()) { - objectChildren.add(Map.entry(entry.getKey(), i)); - } - }); - } - }); + node.fields() + .forEachRemaining( + entry -> { + if (entry.getValue().isObject()) { + objectChildren.add(entry); + } else if (entry.getValue().isArray()) { + entry + .getValue() + .forEach( + i -> { + if (i.isObject()) { + objectChildren.add(Map.entry(entry.getKey(), i)); + } + }); + } + }); - objectChildren.forEach(entry -> - replaceDiscriminator(node, discriminator, entry.getKey(), (ObjectNode) entry.getValue()) - ); + objectChildren.forEach( + entry -> + replaceDiscriminator( + node, discriminator, entry.getKey(), (ObjectNode) entry.getValue())); } @Nonnull - public static GenericAspect convertGenericAspect(@Nonnull io.datahubproject.openapi.generated.GenericAspect genericAspect, + public static GenericAspect convertGenericAspect( + @Nonnull io.datahubproject.openapi.generated.GenericAspect genericAspect, ObjectMapper objectMapper) { try { ObjectNode jsonTree = (ObjectNode) objectMapper.valueToTree(genericAspect).get("value"); replaceDiscriminator(jsonTree); String pretty = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonTree); - return new GenericAspect().setContentType(genericAspect.getContentType()) + return new GenericAspect() + .setContentType(genericAspect.getContentType()) .setValue(ByteString.copyString(pretty, UTF_8)); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } - public static boolean authorizeProposals(List<com.linkedin.mxe.MetadataChangeProposal> proposals, EntityService entityService, - Authorizer authorizer, String actorUrnStr, DisjunctivePrivilegeGroup orGroup) { - List<Optional<EntitySpec>> resourceSpecs = proposals.stream() - .map(proposal -> { - com.linkedin.metadata.models.EntitySpec entitySpec = entityService.getEntityRegistry().getEntitySpec(proposal.getEntityType()); - Urn entityUrn = EntityKeyUtils.getUrnFromProposal(proposal, entitySpec.getKeyAspectSpec()); - return Optional.of(new EntitySpec(proposal.getEntityType(), entityUrn.toString())); - }) - .collect(Collectors.toList()); + public static boolean authorizeProposals( + List<com.linkedin.mxe.MetadataChangeProposal> proposals, + EntityService entityService, + Authorizer authorizer, + String actorUrnStr, + DisjunctivePrivilegeGroup orGroup) { + List<Optional<EntitySpec>> resourceSpecs = + proposals.stream() + .map( + proposal -> { + com.linkedin.metadata.models.EntitySpec entitySpec = + entityService.getEntityRegistry().getEntitySpec(proposal.getEntityType()); + Urn entityUrn = + EntityKeyUtils.getUrnFromProposal(proposal, entitySpec.getKeyAspectSpec()); + return Optional.of( + new EntitySpec(proposal.getEntityType(), entityUrn.toString())); + }) + .collect(Collectors.toList()); return AuthUtil.isAuthorizedForResources(authorizer, actorUrnStr, resourceSpecs, orGroup); } - public static Pair<String, Boolean> ingestProposal(com.linkedin.mxe.MetadataChangeProposal serviceProposal, String actorUrn, + public static Pair<String, Boolean> ingestProposal( + com.linkedin.mxe.MetadataChangeProposal serviceProposal, + String actorUrn, EntityService entityService) { // TODO: Use the actor present in the IC. Timer.Context context = MetricUtils.timer("postEntity").time(); final com.linkedin.common.AuditStamp auditStamp = - new com.linkedin.common.AuditStamp().setTime(System.currentTimeMillis()) + new com.linkedin.common.AuditStamp() + .setTime(System.currentTimeMillis()) .setActor(UrnUtils.getUrn(actorUrn)); final List<com.linkedin.mxe.MetadataChangeProposal> additionalChanges = @@ -401,20 +455,25 @@ public static Pair<String, Boolean> ingestProposal(com.linkedin.mxe.MetadataChan log.info("Proposal: {}", serviceProposal); Throwable exceptionally = null; try { - Stream<com.linkedin.mxe.MetadataChangeProposal> proposalStream = Stream.concat(Stream.of(serviceProposal), + Stream<com.linkedin.mxe.MetadataChangeProposal> proposalStream = + Stream.concat( + Stream.of(serviceProposal), AspectUtils.getAdditionalChanges(serviceProposal, entityService).stream()); - AspectsBatch batch = AspectsBatchImpl.builder().mcps(proposalStream.collect(Collectors.toList()), - entityService.getEntityRegistry()).build(); + AspectsBatch batch = + AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), entityService.getEntityRegistry()) + .build(); - Set<IngestResult> proposalResult = - entityService.ingestProposal(batch, auditStamp, false); + Set<IngestResult> proposalResult = entityService.ingestProposal(batch, auditStamp, false); Urn urn = proposalResult.stream().findFirst().get().getUrn(); - return new Pair<>(urn.toString(), proposalResult.stream().anyMatch(IngestResult::isSqlCommitted)); + return new Pair<>( + urn.toString(), proposalResult.stream().anyMatch(IngestResult::isSqlCommitted)); } catch (ValidationException ve) { exceptionally = ve; - throw HttpClientErrorException.create(HttpStatus.UNPROCESSABLE_ENTITY, ve.getMessage(), null, null, null); + throw HttpClientErrorException.create( + HttpStatus.UNPROCESSABLE_ENTITY, ve.getMessage(), null, null, null); } catch (Exception e) { exceptionally = e; throw e; @@ -429,18 +488,23 @@ public static Pair<String, Boolean> ingestProposal(com.linkedin.mxe.MetadataChan } public static MetadataChangeProposal mapToProposal(UpsertAspectRequest aspectRequest) { - MetadataChangeProposal.MetadataChangeProposalBuilder metadataChangeProposal = MetadataChangeProposal.builder(); - io.datahubproject.openapi.generated.GenericAspect - genericAspect = io.datahubproject.openapi.generated.GenericAspect.builder() - .value(aspectRequest.getAspect()) - .contentType(MediaType.APPLICATION_JSON_VALUE).build(); + MetadataChangeProposal.MetadataChangeProposalBuilder metadataChangeProposal = + MetadataChangeProposal.builder(); + io.datahubproject.openapi.generated.GenericAspect genericAspect = + io.datahubproject.openapi.generated.GenericAspect.builder() + .value(aspectRequest.getAspect()) + .contentType(MediaType.APPLICATION_JSON_VALUE) + .build(); io.datahubproject.openapi.generated.GenericAspect keyAspect = null; if (aspectRequest.getEntityKeyAspect() != null) { - keyAspect = io.datahubproject.openapi.generated.GenericAspect.builder() - .contentType(MediaType.APPLICATION_JSON_VALUE) - .value(aspectRequest.getEntityKeyAspect()).build(); + keyAspect = + io.datahubproject.openapi.generated.GenericAspect.builder() + .contentType(MediaType.APPLICATION_JSON_VALUE) + .value(aspectRequest.getEntityKeyAspect()) + .build(); } - metadataChangeProposal.aspect(genericAspect) + metadataChangeProposal + .aspect(genericAspect) .changeType(io.datahubproject.openapi.generated.ChangeType.UPSERT) .aspectName(ASPECT_NAME_MAP.get(aspectRequest.getAspect().getClass())) .entityKeyAspect(keyAspect) @@ -450,9 +514,10 @@ public static MetadataChangeProposal mapToProposal(UpsertAspectRequest aspectReq return metadataChangeProposal.build(); } - public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(MetadataChangeProposal metadataChangeProposal, - ObjectMapper objectMapper) { - io.datahubproject.openapi.generated.KafkaAuditHeader auditHeader = metadataChangeProposal.getAuditHeader(); + public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal( + MetadataChangeProposal metadataChangeProposal, ObjectMapper objectMapper) { + io.datahubproject.openapi.generated.KafkaAuditHeader auditHeader = + metadataChangeProposal.getAuditHeader(); com.linkedin.mxe.MetadataChangeProposal serviceProposal = new com.linkedin.mxe.MetadataChangeProposal() @@ -463,7 +528,8 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad } if (metadataChangeProposal.getSystemMetadata() != null) { serviceProposal.setSystemMetadata( - objectMapper.convertValue(metadataChangeProposal.getSystemMetadata(), SystemMetadata.class)); + objectMapper.convertValue( + metadataChangeProposal.getSystemMetadata(), SystemMetadata.class)); } if (metadataChangeProposal.getAspectName() != null) { serviceProposal.setAspectName(metadataChangeProposal.getAspectName()); @@ -471,7 +537,8 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad if (auditHeader != null) { KafkaAuditHeader kafkaAuditHeader = new KafkaAuditHeader(); - kafkaAuditHeader.setAuditVersion(auditHeader.getAuditVersion()) + kafkaAuditHeader + .setAuditVersion(auditHeader.getAuditVersion()) .setTime(auditHeader.getTime()) .setAppName(auditHeader.getAppName()) .setMessageId(new UUID(ByteString.copyString(auditHeader.getMessageId(), UTF_8))) @@ -491,30 +558,40 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad serviceProposal.setAuditHeader(kafkaAuditHeader); } - serviceProposal = metadataChangeProposal.getEntityKeyAspect() != null - ? serviceProposal.setEntityKeyAspect( - MappingUtil.convertGenericAspect(metadataChangeProposal.getEntityKeyAspect(), objectMapper)) - : serviceProposal; - serviceProposal = metadataChangeProposal.getAspect() != null - ? serviceProposal.setAspect( - MappingUtil.convertGenericAspect(metadataChangeProposal.getAspect(), objectMapper)) - : serviceProposal; + serviceProposal = + metadataChangeProposal.getEntityKeyAspect() != null + ? serviceProposal.setEntityKeyAspect( + MappingUtil.convertGenericAspect( + metadataChangeProposal.getEntityKeyAspect(), objectMapper)) + : serviceProposal; + serviceProposal = + metadataChangeProposal.getAspect() != null + ? serviceProposal.setAspect( + MappingUtil.convertGenericAspect(metadataChangeProposal.getAspect(), objectMapper)) + : serviceProposal; return serviceProposal; } - public static RollbackRunResultDto mapRollbackRunResult(RollbackRunResult rollbackRunResult, ObjectMapper objectMapper) { - List<AspectRowSummary> aspectRowSummaries = rollbackRunResult.getRowsRolledBack().stream() - .map(aspectRowSummary -> objectMapper.convertValue(aspectRowSummary.data(), AspectRowSummary.class)) - .collect(Collectors.toList()); + public static RollbackRunResultDto mapRollbackRunResult( + RollbackRunResult rollbackRunResult, ObjectMapper objectMapper) { + List<AspectRowSummary> aspectRowSummaries = + rollbackRunResult.getRowsRolledBack().stream() + .map( + aspectRowSummary -> + objectMapper.convertValue(aspectRowSummary.data(), AspectRowSummary.class)) + .collect(Collectors.toList()); return RollbackRunResultDto.builder() .rowsRolledBack(aspectRowSummaries) - .rowsDeletedFromEntityDeletion(rollbackRunResult.getRowsDeletedFromEntityDeletion()).build(); + .rowsDeletedFromEntityDeletion(rollbackRunResult.getRowsDeletedFromEntityDeletion()) + .build(); } public static UpsertAspectRequest createStatusRemoval(Urn urn, EntityService entityService) { - com.linkedin.metadata.models.EntitySpec entitySpec = entityService.getEntityRegistry().getEntitySpec(urn.getEntityType()); + com.linkedin.metadata.models.EntitySpec entitySpec = + entityService.getEntityRegistry().getEntitySpec(urn.getEntityType()); if (entitySpec == null || !entitySpec.getAspectSpecMap().containsKey(STATUS_ASPECT_NAME)) { - throw new IllegalArgumentException("Entity type is not valid for soft deletes: " + urn.getEntityType()); + throw new IllegalArgumentException( + "Entity type is not valid for soft deletes: " + urn.getEntityType()); } return UpsertAspectRequest.builder() .aspect(Status.builder().removed(true).build()) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java index 12f7652aff587..31577429df72d 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java @@ -2,9 +2,6 @@ import com.google.common.reflect.ClassPath; import com.linkedin.util.Pair; -import lombok.Builder; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -16,123 +13,132 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.stream.Collectors; +import lombok.Builder; +import lombok.extern.slf4j.Slf4j; @Slf4j @Builder public class ReflectionCache { - private static final ConcurrentHashMap<String, Method> METHOD_CACHE = new ConcurrentHashMap<>(); - private static final ConcurrentHashMap<String, Class<?>> CLASS_CACHE = new ConcurrentHashMap<>(); - - private final String basePackage; - private final Set<String> subPackages; - @Builder.Default // appropriate for lombok - private final Function<Class<?>, String> getBuilderName = clazz -> - String.join("", clazz.getSimpleName(), "$", clazz.getSimpleName(), "Builder"); - - public static class ReflectionCacheBuilder { - public ReflectionCacheBuilder basePackage(String basePackage) { - return basePackage(basePackage, Set.of()); - } - - public ReflectionCacheBuilder basePackage(String basePackage, Set<String> packageExclusions) { - this.basePackage = basePackage; - return subPackages(findSubPackages(basePackage, Optional.ofNullable(packageExclusions).orElse(Set.of()))); - } - - private ReflectionCacheBuilder subPackages(Set<String> subPackages) { - this.subPackages = subPackages; - return this; - } - - private Set<String> findSubPackages(String packageName, Set<String> exclusions) { - try { - return ClassPath.from(getClass().getClassLoader()) - .getAllClasses() - .stream() - .filter(clazz -> exclusions.stream().noneMatch(excl -> clazz.getPackageName().startsWith(excl)) - && !clazz.getName().contains("$") && clazz.getName().startsWith(packageName)) - .map(ClassPath.ClassInfo::getPackageName) - .collect(Collectors.toSet()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - - public Method lookupMethod(Class<?> clazz, String method, Class<?>... parameters) { - if (clazz == null) { - return null; - } else { - return METHOD_CACHE.computeIfAbsent( - String.join("_", clazz.getName(), method), - key -> { - try { - log.debug("Lookup: " + clazz.getName() + " Method: " + method + " Parameters: " + Arrays.toString(parameters)); - return clazz.getDeclaredMethod(method, parameters); - } catch (NoSuchMethodException e) { - return null; - } - } - ); - } - } - - public Class<?> lookupClass(String className, boolean searchSubclass) { - if (!searchSubclass) { - return lookupClass(className); - } else { - List<String> subclasses = new LinkedList<>(); - subclasses.add(basePackage); - if (subPackages != null) { - subclasses.addAll(subPackages); - } - - for (String packageName : subclasses) { - try { - return cachedClassLookup(packageName, className); - } catch (Exception e) { - log.debug("Class not found {}.{} ... continuing search", packageName, className); - } - } - } - throw new ClassCastException(String.format("Could not locate %s in package %s", className, basePackage)); + private static final ConcurrentHashMap<String, Method> METHOD_CACHE = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap<String, Class<?>> CLASS_CACHE = new ConcurrentHashMap<>(); + + private final String basePackage; + private final Set<String> subPackages; + @Builder.Default // appropriate for lombok + private final Function<Class<?>, String> getBuilderName = + clazz -> String.join("", clazz.getSimpleName(), "$", clazz.getSimpleName(), "Builder"); + + public static class ReflectionCacheBuilder { + public ReflectionCacheBuilder basePackage(String basePackage) { + return basePackage(basePackage, Set.of()); } - public Class<?> lookupClass(String className) { - return cachedClassLookup(basePackage, className); + public ReflectionCacheBuilder basePackage(String basePackage, Set<String> packageExclusions) { + this.basePackage = basePackage; + return subPackages( + findSubPackages(basePackage, Optional.ofNullable(packageExclusions).orElse(Set.of()))); } - private Class<?> cachedClassLookup(String packageName, String className) { - return CLASS_CACHE.computeIfAbsent( - String.format("%s.%s", packageName, className), - key -> { - try { - log.debug("Lookup: " + key); - return Class.forName(key); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - ); + private ReflectionCacheBuilder subPackages(Set<String> subPackages) { + this.subPackages = subPackages; + return this; } - /** - * Get builder instance and class - */ - public Pair<Class<?>, Object> getBuilder(Class<?> toClazz) throws InvocationTargetException, IllegalAccessException { - Class<?> toClazzBuilder = lookupClass(getBuilderName.apply(toClazz)); - return Pair.of(toClazzBuilder, lookupMethod(toClazz, "builder").invoke(null)); + private Set<String> findSubPackages(String packageName, Set<String> exclusions) { + try { + return ClassPath.from(getClass().getClassLoader()).getAllClasses().stream() + .filter( + clazz -> + exclusions.stream().noneMatch(excl -> clazz.getPackageName().startsWith(excl)) + && !clazz.getName().contains("$") + && clazz.getName().startsWith(packageName)) + .map(ClassPath.ClassInfo::getPackageName) + .collect(Collectors.toSet()); + } catch (IOException e) { + throw new RuntimeException(e); + } } - - public Method lookupMethod(Pair<Class<?>, Object> builderPair, String method, Class<?>... parameters) { - return lookupMethod(builderPair.getFirst(), method, parameters); - } - - public static String toLowerFirst(String s) { - return s.substring(0, 1).toLowerCase() + s.substring(1); + } + + public Method lookupMethod(Class<?> clazz, String method, Class<?>... parameters) { + if (clazz == null) { + return null; + } else { + return METHOD_CACHE.computeIfAbsent( + String.join("_", clazz.getName(), method), + key -> { + try { + log.debug( + "Lookup: " + + clazz.getName() + + " Method: " + + method + + " Parameters: " + + Arrays.toString(parameters)); + return clazz.getDeclaredMethod(method, parameters); + } catch (NoSuchMethodException e) { + return null; + } + }); } - - public static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + } + + public Class<?> lookupClass(String className, boolean searchSubclass) { + if (!searchSubclass) { + return lookupClass(className); + } else { + List<String> subclasses = new LinkedList<>(); + subclasses.add(basePackage); + if (subPackages != null) { + subclasses.addAll(subPackages); + } + + for (String packageName : subclasses) { + try { + return cachedClassLookup(packageName, className); + } catch (Exception e) { + log.debug("Class not found {}.{} ... continuing search", packageName, className); + } + } } + throw new ClassCastException( + String.format("Could not locate %s in package %s", className, basePackage)); + } + + public Class<?> lookupClass(String className) { + return cachedClassLookup(basePackage, className); + } + + private Class<?> cachedClassLookup(String packageName, String className) { + return CLASS_CACHE.computeIfAbsent( + String.format("%s.%s", packageName, className), + key -> { + try { + log.debug("Lookup: " + key); + return Class.forName(key); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + }); + } + + /** Get builder instance and class */ + public Pair<Class<?>, Object> getBuilder(Class<?> toClazz) + throws InvocationTargetException, IllegalAccessException { + Class<?> toClazzBuilder = lookupClass(getBuilderName.apply(toClazz)); + return Pair.of(toClazzBuilder, lookupMethod(toClazz, "builder").invoke(null)); + } + + public Method lookupMethod( + Pair<Class<?>, Object> builderPair, String method, Class<?>... parameters) { + return lookupMethod(builderPair.getFirst(), method, parameters); + } + + public static String toLowerFirst(String s) { + return s.substring(0, 1).toLowerCase() + s.substring(1); + } + + public static String toUpperFirst(String s) { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } diff --git a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java index 6c2ec108fe493..06640ba13fb8b 100644 --- a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java +++ b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java @@ -1,13 +1,17 @@ package entities; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.AuthorizerChain; -import com.linkedin.metadata.config.PreProcessHooks; import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.event.EventProducer; @@ -32,13 +36,12 @@ import io.datahubproject.openapi.generated.SubTypes; import io.datahubproject.openapi.generated.TagAssociation; import io.datahubproject.openapi.generated.ViewProperties; +import io.ebean.Transaction; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.function.Function; - -import io.ebean.Transaction; import mock.MockEntityRegistry; import mock.MockEntityService; import org.mockito.ArgumentMatchers; @@ -46,15 +49,11 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - - public class EntitiesControllerTest { public static final String S = "somerandomstring"; - public static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; + public static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; public static final String CORPUSER_URN = "urn:li:corpuser:datahub"; public static final String GLOSSARY_TERM_URN = "urn:li:glossaryTerm:SavingAccount"; public static final String DATA_PLATFORM_URN = "urn:li:dataPlatform:platform"; @@ -62,25 +61,38 @@ public class EntitiesControllerTest { @BeforeMethod public void setup() - throws NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { + throws NoSuchMethodException, + InvocationTargetException, + InstantiationException, + IllegalAccessException { EntityRegistry mockEntityRegistry = new MockEntityRegistry(); AspectDao aspectDao = Mockito.mock(AspectDao.class); - Mockito.when(aspectDao.runInTransactionWithRetry( - ArgumentMatchers.<Function<Transaction, UpdateAspectResult>>any(), any(), anyInt())).thenAnswer(i -> - ((Function<Transaction, UpdateAspectResult>) i.getArgument(0)).apply(Mockito.mock(Transaction.class)) - ); + Mockito.when( + aspectDao.runInTransactionWithRetry( + ArgumentMatchers.<Function<Transaction, UpdateAspectResult>>any(), any(), anyInt())) + .thenAnswer( + i -> + ((Function<Transaction, UpdateAspectResult>) i.getArgument(0)) + .apply(Mockito.mock(Transaction.class))); EventProducer mockEntityEventProducer = Mockito.mock(EventProducer.class); UpdateIndicesService mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - MockEntityService mockEntityService = new MockEntityService(aspectDao, mockEntityEventProducer, mockEntityRegistry, - mockUpdateIndicesService, preProcessHooks); + MockEntityService mockEntityService = + new MockEntityService( + aspectDao, + mockEntityEventProducer, + mockEntityRegistry, + mockUpdateIndicesService, + preProcessHooks); AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - _entitiesController = new EntitiesController(mockEntityService, new ObjectMapper(), authorizerChain); + _entitiesController = + new EntitiesController(mockEntityService, new ObjectMapper(), authorizerChain); Authentication authentication = Mockito.mock(Authentication.class); when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); AuthenticationContext.setAuthentication(authentication); } @@ -89,98 +101,130 @@ public void setup() @Test public void testIngestDataset() { List<UpsertAspectRequest> datasetAspects = new ArrayList<>(); - UpsertAspectRequest viewProperties = UpsertAspectRequest.builder() - .aspect(ViewProperties.builder() - .viewLogic(S) - .viewLanguage(S) - .materialized(true).build()) - .entityType(DATASET_ENTITY_NAME) - .entityUrn(DATASET_URN) - .build(); + UpsertAspectRequest viewProperties = + UpsertAspectRequest.builder() + .aspect( + ViewProperties.builder().viewLogic(S).viewLanguage(S).materialized(true).build()) + .entityType(DATASET_ENTITY_NAME) + .entityUrn(DATASET_URN) + .build(); datasetAspects.add(viewProperties); - UpsertAspectRequest subTypes = UpsertAspectRequest.builder() - .aspect(SubTypes.builder() - .typeNames(Collections.singletonList(S)).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest subTypes = + UpsertAspectRequest.builder() + .aspect(SubTypes.builder().typeNames(Collections.singletonList(S)).build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(subTypes); - UpsertAspectRequest datasetProfile = UpsertAspectRequest.builder() - .aspect(DatasetProfile.builder().build().timestampMillis(0L).addFieldProfilesItem( - DatasetFieldProfile.builder() - .fieldPath(S) - .histogram(Histogram.builder() - .boundaries(Collections.singletonList(S)).build()).build() - ) - ) + UpsertAspectRequest datasetProfile = + UpsertAspectRequest.builder() + .aspect( + DatasetProfile.builder() + .build() + .timestampMillis(0L) + .addFieldProfilesItem( + DatasetFieldProfile.builder() + .fieldPath(S) + .histogram( + Histogram.builder() + .boundaries(Collections.singletonList(S)) + .build()) + .build())) .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() + .entityKeyAspect( + DatasetKey.builder() .name("name") .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) + .origin(FabricType.PROD) + .build()) .build(); datasetAspects.add(datasetProfile); - UpsertAspectRequest schemaMetadata = UpsertAspectRequest.builder() - .aspect(SchemaMetadata.builder() - .schemaName(S) - .dataset(DATASET_URN) - .platform(DATA_PLATFORM_URN) - .hash(S) - .version(0L) - .platformSchema(MySqlDDL.builder().tableSchema(S).build()) - .fields(Collections.singletonList(SchemaField.builder() - .fieldPath(S) - .nativeDataType(S) - .type(SchemaFieldDataType.builder().type(StringType.builder().build()).build()) - .description(S) - .globalTags(GlobalTags.builder() - .tags(Collections.singletonList(TagAssociation.builder() - .tag(TAG_URN).build())).build()) - .glossaryTerms(GlossaryTerms.builder() - .terms(Collections.singletonList(GlossaryTermAssociation.builder() - .urn(GLOSSARY_TERM_URN).build())) - .auditStamp(AuditStamp.builder() - .time(0L) - .actor(CORPUSER_URN).build()).build()).build() - ) - ).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest schemaMetadata = + UpsertAspectRequest.builder() + .aspect( + SchemaMetadata.builder() + .schemaName(S) + .dataset(DATASET_URN) + .platform(DATA_PLATFORM_URN) + .hash(S) + .version(0L) + .platformSchema(MySqlDDL.builder().tableSchema(S).build()) + .fields( + Collections.singletonList( + SchemaField.builder() + .fieldPath(S) + .nativeDataType(S) + .type( + SchemaFieldDataType.builder() + .type(StringType.builder().build()) + .build()) + .description(S) + .globalTags( + GlobalTags.builder() + .tags( + Collections.singletonList( + TagAssociation.builder().tag(TAG_URN).build())) + .build()) + .glossaryTerms( + GlossaryTerms.builder() + .terms( + Collections.singletonList( + GlossaryTermAssociation.builder() + .urn(GLOSSARY_TERM_URN) + .build())) + .auditStamp( + AuditStamp.builder() + .time(0L) + .actor(CORPUSER_URN) + .build()) + .build()) + .build())) + .build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(schemaMetadata); - UpsertAspectRequest glossaryTerms = UpsertAspectRequest.builder() - .aspect(GlossaryTerms.builder() - .terms(Collections.singletonList(GlossaryTermAssociation.builder() - .urn(GLOSSARY_TERM_URN).build())) - .auditStamp(AuditStamp.builder() - .time(0L) - .actor(CORPUSER_URN).build()).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest glossaryTerms = + UpsertAspectRequest.builder() + .aspect( + GlossaryTerms.builder() + .terms( + Collections.singletonList( + GlossaryTermAssociation.builder().urn(GLOSSARY_TERM_URN).build())) + .auditStamp(AuditStamp.builder().time(0L).actor(CORPUSER_URN).build()) + .build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(glossaryTerms); _entitiesController.postEntities(datasetAspects); } -// @Test -// public void testGetDataset() { -// _entitiesController.getEntities(new String[] {DATASET_URN}, -// new String[] { -// SCHEMA_METADATA_ASPECT_NAME -// }); -// } + // @Test + // public void testGetDataset() { + // _entitiesController.getEntities(new String[] {DATASET_URN}, + // new String[] { + // SCHEMA_METADATA_ASPECT_NAME + // }); + // } } diff --git a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java index 852b6cfcb4b22..91e9e4fd4671e 100644 --- a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java +++ b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java @@ -1,6 +1,7 @@ package mock; -import com.linkedin.metadata.config.PreProcessHooks; +import static entities.EntitiesControllerTest.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -20,6 +21,7 @@ import com.linkedin.entity.AspectType; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ListResult; @@ -49,22 +51,25 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; -import static entities.EntitiesControllerTest.*; - - public class MockEntityService extends EntityServiceImpl { - public MockEntityService(@Nonnull AspectDao aspectDao, @Nonnull EventProducer producer, @Nonnull EntityRegistry entityRegistry, @Nonnull - UpdateIndicesService updateIndicesService, PreProcessHooks preProcessHooks) { + public MockEntityService( + @Nonnull AspectDao aspectDao, + @Nonnull EventProducer producer, + @Nonnull EntityRegistry entityRegistry, + @Nonnull UpdateIndicesService updateIndicesService, + PreProcessHooks preProcessHooks) { super(aspectDao, producer, entityRegistry, true, updateIndicesService, preProcessHooks); } @Override - public Map<Urn, List<RecordTemplate>> getLatestAspects(@Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) { + public Map<Urn, List<RecordTemplate>> getLatestAspects( + @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) { return null; } @Override - public Map<String, RecordTemplate> getLatestAspectsForUrn(@Nonnull Urn urn, @Nonnull Set<String> aspectNames) { + public Map<String, RecordTemplate> getLatestAspectsForUrn( + @Nonnull Urn urn, @Nonnull Set<String> aspectNames) { return Collections.emptyMap(); } @@ -74,42 +79,58 @@ public RecordTemplate getAspect(@Nonnull Urn urn, @Nonnull String aspectName, lo } @Override - public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects(@Nonnull String entityName, @Nonnull Set<Urn> urns, - @Nonnull Set<String> aspectNames) throws URISyntaxException { + public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects( + @Nonnull String entityName, @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) + throws URISyntaxException { Urn urn = UrnUtils.getUrn(DATASET_URN); Map<Urn, List<EnvelopedAspect>> envelopedAspectMap = new HashMap<>(); List<EnvelopedAspect> aspects = new ArrayList<>(); EnvelopedAspect schemaMetadata = new EnvelopedAspect(); SchemaMetadata pegasusSchemaMetadata = new SchemaMetadata(); - pegasusSchemaMetadata.setDataset(DatasetUrn.createFromUrn(UrnUtils.getUrn(DATASET_URN))) + pegasusSchemaMetadata + .setDataset(DatasetUrn.createFromUrn(UrnUtils.getUrn(DATASET_URN))) .setVersion(0L) - .setCreated(new AuditStamp().setActor(UrnUtils.getUrn(CORPUSER_URN)).setTime(System.currentTimeMillis())) + .setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(CORPUSER_URN)) + .setTime(System.currentTimeMillis())) .setHash(S) .setCluster(S) .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema(S))) - .setForeignKeys(new ForeignKeyConstraintArray(Collections.singletonList( - new ForeignKeyConstraint() - .setForeignDataset(urn) - .setName(S) - .setForeignFields(new UrnArray(Collections.singletonList(urn)))))) - .setFields(new SchemaFieldArray(Collections.singletonList( - new SchemaField() - .setDescription(S) - .setFieldPath(S) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags( - new GlobalTags() - .setTags(new TagAssociationArray(Collections.singletonList( - new TagAssociation().setTag(TagUrn.createFromUrn(UrnUtils.getUrn(TAG_URN))) - )))) - .setGlossaryTerms(new GlossaryTerms().setTerms( - new GlossaryTermAssociationArray(Collections.singletonList( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(UrnUtils.getUrn(GLOSSARY_TERM_URN))) - ))) - ) - )) - ); + .setForeignKeys( + new ForeignKeyConstraintArray( + Collections.singletonList( + new ForeignKeyConstraint() + .setForeignDataset(urn) + .setName(S) + .setForeignFields(new UrnArray(Collections.singletonList(urn)))))) + .setFields( + new SchemaFieldArray( + Collections.singletonList( + new SchemaField() + .setDescription(S) + .setFieldPath(S) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + Collections.singletonList( + new TagAssociation() + .setTag( + TagUrn.createFromUrn( + UrnUtils.getUrn(TAG_URN))))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + Collections.singletonList( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + UrnUtils.getUrn(GLOSSARY_TERM_URN)))))))))); schemaMetadata .setType(AspectType.VERSIONED) .setName("schemaMetadata") @@ -120,29 +141,31 @@ public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects(@Nonnull String } @Override - public Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects(@Nonnull Set<VersionedUrn> versionedUrns, - @Nonnull Set<String> aspectNames) throws URISyntaxException { + public Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects( + @Nonnull Set<VersionedUrn> versionedUrns, @Nonnull Set<String> aspectNames) + throws URISyntaxException { return null; } @Override - public EnvelopedAspect getLatestEnvelopedAspect(@Nonnull String entityName, @Nonnull Urn urn, - @Nonnull String aspectName) throws Exception { + public EnvelopedAspect getLatestEnvelopedAspect( + @Nonnull String entityName, @Nonnull Urn urn, @Nonnull String aspectName) throws Exception { return null; } @Override - public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version) { + public VersionedAspect getVersionedAspect( + @Nonnull Urn urn, @Nonnull String aspectName, long version) { return null; } @Override - public ListResult<RecordTemplate> listLatestAspects(@Nonnull String entityName, @Nonnull String aspectName, int start, - int count) { + public ListResult<RecordTemplate> listLatestAspects( + @Nonnull String entityName, @Nonnull String aspectName, int start, int count) { return null; } -/* @Nonnull + /* @Nonnull @Override protected UpdateAspectResult ingestAspectToLocalDB(@Nonnull Urn urn, @Nonnull String aspectName, @Nonnull Function<Optional<RecordTemplate>, RecordTemplate> updateLambda, @Nonnull AuditStamp auditStamp, @@ -161,8 +184,12 @@ protected List<Pair<String, UpdateAspectResult>> ingestAspectsToLocalDB(@Nonnull @Nullable @Override - public RecordTemplate ingestAspectIfNotPresent(@NotNull Urn urn, @NotNull String aspectName, - @NotNull RecordTemplate newValue, @NotNull AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata) { + public RecordTemplate ingestAspectIfNotPresent( + @NotNull Urn urn, + @NotNull String aspectName, + @NotNull RecordTemplate newValue, + @NotNull AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata) { return null; } @@ -172,13 +199,11 @@ public ListUrnsResult listUrns(@Nonnull String entityName, int start, int count) } @Override - public void setWritable(boolean canWrite) { - - } + public void setWritable(boolean canWrite) {} @Override - public RollbackRunResult rollbackWithConditions(List<AspectRowSummary> aspectRows, Map<String, String> conditions, - boolean hardDelete) { + public RollbackRunResult rollbackWithConditions( + List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete) { return null; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java index e632aa7eadff0..17163b937f417 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java @@ -4,32 +4,29 @@ import java.util.Optional; import lombok.Data; - /** * POJO for YAML section presents in config.yml at location plugins[].params. * - * These parameters are same for Authenticator and Authorizer plugins. + * <p>These parameters are same for Authenticator and Authorizer plugins. * - * {@link com.datahub.plugins.auth.provider.AuthPluginConfigProvider} uses this AuthParam to create instance of - * either {@link AuthenticatorPluginConfig} or {@link AuthorizerPluginConfig} + * <p>{@link com.datahub.plugins.auth.provider.AuthPluginConfigProvider} uses this AuthParam to + * create instance of either {@link AuthenticatorPluginConfig} or {@link AuthorizerPluginConfig} */ @Data public class AuthParam { - /** - * Fully-qualified class-name of plugin - */ + /** Fully-qualified class-name of plugin */ private String className; /** - * Default jarFileName is "<plugin-name>.jar". If plugin's jar file name is different from default value then set - * this property. + * Default jarFileName is "<plugin-name>.jar". If plugin's jar file name is different from default + * value then set this property. */ private Optional<String> jarFileName = Optional.empty(); /** - * These configs are specific to plugin. GMS pass this map as is to plugin - * {@link com.datahub.plugins.auth.authentication.Authenticator} or - * {@link com.datahub.plugins.auth.authorization.Authorizer} init method + * These configs are specific to plugin. GMS pass this map as is to plugin {@link + * com.datahub.plugins.auth.authentication.Authenticator} or {@link + * com.datahub.plugins.auth.authorization.Authorizer} init method */ private Optional<Map<String, Object>> configs = Optional.empty(); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java index b4546d9f5af16..8bc06c73a9439 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java @@ -9,16 +9,19 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - -/** - * Superclass for {@link AuthenticatorPluginConfig} and {@link AuthorizerPluginConfig} - */ +/** Superclass for {@link AuthenticatorPluginConfig} and {@link AuthorizerPluginConfig} */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthPluginConfig extends PluginConfig { - public AuthPluginConfig(PluginType type, String name, Boolean enabled, String className, Path pluginHomeDirectory, - Path pluginJarPath, Optional<Map<String, Object>> configs) { + public AuthPluginConfig( + PluginType type, + String name, + Boolean enabled, + String className, + Path pluginHomeDirectory, + Path pluginJarPath, + Optional<Map<String, Object>> configs) { super(type, name, enabled, className, pluginHomeDirectory, pluginJarPath, configs); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java index 276faed56f7ab..b10a178caa9fa 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java @@ -8,16 +8,20 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - /** - * Authenticator plugin configuration provided by user. - * {@link com.datahub.plugins.auth.provider.AuthenticatorPluginConfigProvider} instantiate this class + * Authenticator plugin configuration provided by user. {@link + * com.datahub.plugins.auth.provider.AuthenticatorPluginConfigProvider} instantiate this class */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthenticatorPluginConfig extends AuthPluginConfig { - public AuthenticatorPluginConfig(String name, Boolean enabled, String className, Path pluginDirectory, Path pluginJar, + public AuthenticatorPluginConfig( + String name, + Boolean enabled, + String className, + Path pluginDirectory, + Path pluginJar, Optional<Map<String, Object>> configs) { super(PluginType.AUTHENTICATOR, name, enabled, className, pluginDirectory, pluginJar, configs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java index 1a4bd1ea07906..de8c3d7ecaaa4 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java @@ -8,16 +8,20 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - /** - * Authorizer plugin configuration provided by user. - * {@link com.datahub.plugins.auth.provider.AuthorizerPluginConfigProvider} instantiate this class + * Authorizer plugin configuration provided by user. {@link + * com.datahub.plugins.auth.provider.AuthorizerPluginConfigProvider} instantiate this class */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthorizerPluginConfig extends AuthPluginConfig { - public AuthorizerPluginConfig(String name, Boolean enabled, String className, Path pluginDirectory, Path pluginJar, + public AuthorizerPluginConfig( + String name, + Boolean enabled, + String className, + Path pluginDirectory, + Path pluginJar, Optional<Map<String, Object>> configs) { super(PluginType.AUTHORIZER, name, enabled, className, pluginDirectory, pluginJar, configs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java index b970258aa3ea0..4e62d03620f7f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java @@ -11,9 +11,9 @@ import java.util.stream.Collectors; import java.util.stream.Stream; - /** - * Base class for {@link AuthenticatorPluginConfigProvider} and {@link AuthorizerPluginConfigProvider}. + * Base class for {@link AuthenticatorPluginConfigProvider} and {@link + * AuthorizerPluginConfigProvider}. */ public abstract class AuthPluginConfigProvider implements PluginConfigProvider<AuthPluginConfig> { public abstract PluginType getType(); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java index 546cee04d05a0..71563e79ef787 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java @@ -9,10 +9,9 @@ import java.nio.file.Path; import javax.annotation.Nonnull; - /** - * Responsible for creating {@link AuthenticatorPluginConfig} instance. - * This provider is register in {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authenticator + * Responsible for creating {@link AuthenticatorPluginConfig} instance. This provider is register in + * {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authenticator * configuration */ public class AuthenticatorPluginConfigProvider extends AuthPluginConfigProvider { @@ -24,13 +23,19 @@ public PluginType getType() { @Override public AuthPluginConfig createAuthPluginConfig(@Nonnull PluginConfig pluginConfig) { // Map Yaml section present in config.yml at plugins[].params to AuthParam - AuthParam authParam = (new YamlMapper<AuthParam>()).fromMap(pluginConfig.getParams(), AuthParam.class); - // Make the pluginJar file path either from name of plugin or explicitly from plugins[].params.jarFileName + AuthParam authParam = + (new YamlMapper<AuthParam>()).fromMap(pluginConfig.getParams(), AuthParam.class); + // Make the pluginJar file path either from name of plugin or explicitly from + // plugins[].params.jarFileName // This logic is common for authenticator and authorizer plugin and hence define in superclass Path pluginJar = formPluginJar(pluginConfig, authParam); - return new AuthenticatorPluginConfig(pluginConfig.getName(), pluginConfig.getEnabled(), authParam.getClassName(), - pluginConfig.getPluginHomeDirectory(), pluginJar, authParam.getConfigs()); + return new AuthenticatorPluginConfig( + pluginConfig.getName(), + pluginConfig.getEnabled(), + authParam.getClassName(), + pluginConfig.getPluginHomeDirectory(), + pluginJar, + authParam.getConfigs()); } } - diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java index 397dc3fd93b36..7899f55523595 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java @@ -9,11 +9,9 @@ import java.nio.file.Path; import javax.annotation.Nonnull; - /** - * Responsible for creating {@link AuthorizerPluginConfig} instance. - * This provider is register in {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authorizer - * configuration + * Responsible for creating {@link AuthorizerPluginConfig} instance. This provider is register in + * {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authorizer configuration */ public class AuthorizerPluginConfigProvider extends AuthPluginConfigProvider { @Override @@ -24,13 +22,20 @@ public PluginType getType() { @Override public AuthPluginConfig createAuthPluginConfig(@Nonnull PluginConfig pluginConfig) { // Map Yaml section present in config.yml at plugins[].params to AuthParam - AuthParam authParam = (new YamlMapper<AuthParam>()).fromMap(pluginConfig.getParams(), AuthParam.class); + AuthParam authParam = + (new YamlMapper<AuthParam>()).fromMap(pluginConfig.getParams(), AuthParam.class); - // Make the pluginJar file path either from name of plugin or explicitly from plugins[].params.jarFileName + // Make the pluginJar file path either from name of plugin or explicitly from + // plugins[].params.jarFileName // This logic is common for authenticator and authorizer plugin and hence define in superclass Path pluginJar = formPluginJar(pluginConfig, authParam); - return new AuthorizerPluginConfig(pluginConfig.getName(), pluginConfig.getEnabled(), authParam.getClassName(), - pluginConfig.getPluginHomeDirectory(), pluginJar, authParam.getConfigs()); + return new AuthorizerPluginConfig( + pluginConfig.getName(), + pluginConfig.getEnabled(), + authParam.getClassName(), + pluginConfig.getPluginHomeDirectory(), + pluginJar, + authParam.getConfigs()); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java index c4dc94b7c73d5..ba15fea2ccd50 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java @@ -7,15 +7,10 @@ import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; - -/** - * Common validations. - * Used in {@link com.datahub.plugins.configuration.PluginConfig} - */ +/** Common validations. Used in {@link com.datahub.plugins.configuration.PluginConfig} */ public class ConfigValidationUtils { - private ConfigValidationUtils() { - } + private ConfigValidationUtils() {} public static void whiteSpacesValidation(@Nonnull String fieldName, @Nonnull String value) throws IllegalArgumentException { @@ -25,7 +20,8 @@ public static void whiteSpacesValidation(@Nonnull String fieldName, @Nonnull Str } } - public static void mapShouldNotBeEmpty(@Nonnull String fieldName, @Nonnull Map<String, Object> attributeMap) + public static void mapShouldNotBeEmpty( + @Nonnull String fieldName, @Nonnull Map<String, Object> attributeMap) throws IllegalArgumentException { if (attributeMap.isEmpty()) { throw new IllegalArgumentException(String.format("%s should not be empty", fieldName)); @@ -39,15 +35,18 @@ public static void listShouldNotBeEmpty(@Nonnull String fieldName, @Nonnull List } } - public static void listShouldNotHaveDuplicate(@Nonnull String fieldName, @Nonnull List<String> list) { + public static void listShouldNotHaveDuplicate( + @Nonnull String fieldName, @Nonnull List<String> list) { Set<String> set = new HashSet<>(); - list.forEach((input) -> { - if (set.contains(input)) { - throw new IllegalArgumentException( - String.format("Duplicate entry of %s is found in %s. %s should not contain duplicate", input, fieldName, - fieldName)); - } - set.add(input); - }); + list.forEach( + (input) -> { + if (set.contains(input)) { + throw new IllegalArgumentException( + String.format( + "Duplicate entry of %s is found in %s. %s should not contain duplicate", + input, fieldName, fieldName)); + } + set.add(input); + }); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java index 02b3b4566d705..dfc26041ee627 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java @@ -7,52 +7,43 @@ import lombok.Data; import lombok.NoArgsConstructor; - -/** - * Flat form of plugin configuration configured in config.yaml at plugins[] and plugins[].params - */ +/** Flat form of plugin configuration configured in config.yaml at plugins[] and plugins[].params */ @Data @NoArgsConstructor @AllArgsConstructor public class PluginConfig { - /** - * Type of plugin. Supported types are {@link PluginType} - */ + /** Type of plugin. Supported types are {@link PluginType} */ private PluginType type; - /** - * name of the plugin. It should be unique in plugins[] list - */ + /** name of the plugin. It should be unique in plugins[] list */ private String name; /** - * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take authentication/authorization - * decisions. + * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take + * authentication/authorization decisions. */ private Boolean enabled; - /** - * Fully-qualified class-name of plugin - */ + /** Fully-qualified class-name of plugin */ private String className; /** - * It is always set to <plugin-base-directory>/<plugin-name>. - * For example if plugin-name is ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then - * pluginDirectory would be /etc/datahub/plugins/auth/ranger-authorizer + * It is always set to <plugin-base-directory>/<plugin-name>. For example if plugin-name is + * ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then pluginDirectory + * would be /etc/datahub/plugins/auth/ranger-authorizer */ private Path pluginHomeDirectory; /** - * Default jarFileName is "<plugin-name>.jar". If plugin's jar file name is different from default value then set - * this property. + * Default jarFileName is "<plugin-name>.jar". If plugin's jar file name is different from default + * value then set this property. */ private Path pluginJarPath; /** - * These configs are specific to plugin. GMS pass this map as is to plugin - * {@link com.datahub.plugins.auth.authentication.Authenticator} or - * {@link com.datahub.plugins.auth.authorization.Authorizer} init method + * These configs are specific to plugin. GMS pass this map as is to plugin {@link + * com.datahub.plugins.auth.authentication.Authenticator} or {@link + * com.datahub.plugins.auth.authorization.Authorizer} init method */ private Optional<Map<String, Object>> configs; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java index b1b0844f428b7..b068a009528d3 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java @@ -2,7 +2,6 @@ import java.util.List; - public interface PluginConfigProvider<T extends PluginConfig> { List<T> processConfig(List<com.datahub.plugins.configuration.PluginConfig> pluginConfigConfigs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java index 0a46be21155b6..713f5683a82a1 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java @@ -3,13 +3,12 @@ import java.nio.file.Path; import java.security.ProtectionDomain; - -/** - * Implement this interface to create Java SecurityManager's ProtectionDomain for the plugin. - */ +/** Implement this interface to create Java SecurityManager's ProtectionDomain for the plugin. */ public interface PluginPermissionManager { /** - * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to the plugin code + * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to + * the plugin code + * * @param pluginHome * @return ProtectionDomain */ diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java index ed3bf0a4f4473..7db9b7d40276e 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java @@ -1,17 +1,11 @@ package com.datahub.plugins.common; -/** - * Supported plugin types - */ +/** Supported plugin types */ public enum PluginType { - /** - * PluginType for Authenticator plugin - */ + /** PluginType for Authenticator plugin */ AUTHENTICATOR, - /** - * PluginType for Authorizer plugin - */ + /** PluginType for Authorizer plugin */ AUTHORIZER; @Override diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java index 7ab0032b86497..3eb01659eb99f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java @@ -7,28 +7,22 @@ import java.security.Permissions; import java.util.function.Function; - -/** - * Supported security modes - */ +/** Supported security modes */ public enum SecurityMode { /** * In this mode plugins has limited access. * - * Plugins are allowed to connect on below ports only - * 1) port greater than 1024 - * 2) port 80 - * 3) port 443 - * All other ports connection are disallowed. + * <p>Plugins are allowed to connect on below ports only 1) port greater than 1024 2) port 80 3) + * port 443 All other ports connection are disallowed. * - * Plugins are allowed to read and write files on PLUGIN_HOME directory only and all other read/write access are - * denied. + * <p>Plugins are allowed to read and write files on PLUGIN_HOME directory only and all other + * read/write access are denied. */ RESTRICTED(SecurityMode::restrictModePermissionSupplier), /** - * Plugins has full access. - * In this mode plugin can read/write to any directory, can connect to any port and can read environment variables. + * Plugins has full access. In this mode plugin can read/write to any directory, can connect to + * any port and can read environment variables. */ LENIENT(SecurityMode::lenientModePermissionSupplier); @@ -43,9 +37,12 @@ private static Permissions restrictModePermissionSupplier(Path sourceCodeDirecto permissions.add(new FilePermission(sourceCodeDirectory.toString() + "/*", "read,write,delete")); permissions.add( - new SocketPermission("*:1024-", "connect,resolve")); // Allow to connect access to all socket above 1024 - permissions.add(new SocketPermission("*:80", "connect,resolve")); // Allow to connect access to HTTP port - permissions.add(new SocketPermission("*:443", "connect,resolve")); // Allow to connect access to HTTPS port + new SocketPermission( + "*:1024-", "connect,resolve")); // Allow to connect access to all socket above 1024 + permissions.add( + new SocketPermission("*:80", "connect,resolve")); // Allow to connect access to HTTP port + permissions.add( + new SocketPermission("*:443", "connect,resolve")); // Allow to connect access to HTTPS port return permissions; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java index c4a79e9434923..309bbfb1b6485 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java @@ -9,16 +9,13 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * - * A mapper to map plugin configuration to java Pojo classes - */ +/** A mapper to map plugin configuration to java Pojo classes */ public class YamlMapper<T> { private final ObjectMapper objectMapper; public YamlMapper() { - this.objectMapper = YAMLMapper.builder().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS).build(); + this.objectMapper = + YAMLMapper.builder().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS).build(); objectMapper.registerModule(new Jdk8Module()); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java index 6cf1966787875..ff87176ebbd7f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java @@ -10,17 +10,13 @@ import lombok.Builder; import lombok.Getter; - -/** - * {@link Config} is getting loaded from /etc/datahub/plugins/auth/config.yaml - */ +/** {@link Config} is getting loaded from /etc/datahub/plugins/auth/config.yaml */ @Getter @Builder @JsonDeserialize(builder = Config.CustomBuilder.class) public class Config { - public static final String FIELD_PLUGINS = "plugins"; // for validation error messages - @Nonnull - private List<PluginConfig> plugins; + public static final String FIELD_PLUGINS = "plugins"; // for validation error messages + @Nonnull private List<PluginConfig> plugins; public static CustomBuilder builder() { return new CustomBuilder(); @@ -29,12 +25,14 @@ public static CustomBuilder builder() { @JsonPOJOBuilder(withPrefix = "") public static class CustomBuilder extends ConfigBuilder { public Config build() { - ConfigValidationUtils.listShouldNotBeEmpty(FIELD_PLUGINS, Collections.singletonList(super.plugins)); + ConfigValidationUtils.listShouldNotBeEmpty( + FIELD_PLUGINS, Collections.singletonList(super.plugins)); List<String> list = new ArrayList<>(super.plugins.size()); - super.plugins.forEach((pluginConfig) -> { - list.add(pluginConfig.getName()); - }); + super.plugins.forEach( + (pluginConfig) -> { + list.add(pluginConfig.getName()); + }); ConfigValidationUtils.listShouldNotHaveDuplicate(FIELD_PLUGINS, list); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java index ac2590209f4db..0c371263eea5f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java @@ -7,19 +7,19 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class ConfigProvider { public static final String CONFIG_FILE_NAME = "config.yml"; /** - * Yaml file path of plugin configuration file. Content of this file should match with {@link Config} + * Yaml file path of plugin configuration file. Content of this file should match with {@link + * Config} */ private final Path configFilePath; /** - * Directory where all plugins are mounted in DataHub GMS. - * Default pluginBaseDir is /etc/datahub/plugins/auth. + * Directory where all plugins are mounted in DataHub GMS. Default pluginBaseDir is + * /etc/datahub/plugins/auth. */ private final Path pluginBaseDir; @@ -36,7 +36,8 @@ private void setPluginDir(@Nonnull PluginConfig pluginConfig) { public Optional<Config> load() { // Check config file should exist if (!this.configFilePath.toFile().exists()) { - log.warn("Configuration {} file not found at location {}", CONFIG_FILE_NAME, this.pluginBaseDir); + log.warn( + "Configuration {} file not found at location {}", CONFIG_FILE_NAME, this.pluginBaseDir); return Optional.empty(); } @@ -45,4 +46,4 @@ public Optional<Config> load() { config.getPlugins().forEach(this::setPluginDir); return Optional.of(config); } -} \ No newline at end of file +} diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java index faeeabbf955eb..5280f520109fd 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java @@ -1,7 +1,7 @@ package com.datahub.plugins.configuration; -import com.datahub.plugins.common.PluginType; import com.datahub.plugins.common.ConfigValidationUtils; +import com.datahub.plugins.common.PluginType; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; @@ -13,10 +13,7 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - -/** - * POJO to map YAML section present in config.yml at plugins[] - */ +/** POJO to map YAML section present in config.yml at plugins[] */ @Data @NoArgsConstructor @AllArgsConstructor @@ -24,35 +21,30 @@ @JsonDeserialize(builder = PluginConfig.CustomBuilder.class) @EqualsAndHashCode(onlyExplicitlyIncluded = true) public class PluginConfig { - /** - * name of the plugin. It should be unique in plugins[] list - */ - @EqualsAndHashCode.Include - private String name; // In list of plugin, the name should be unique + /** name of the plugin. It should be unique in plugins[] list */ + @EqualsAndHashCode.Include private String name; // In list of plugin, the name should be unique - /** - * Type of plugin. Supported types are {@link PluginType} - */ + /** Type of plugin. Supported types are {@link PluginType} */ private PluginType type; /** - * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take authentication/authorization - * decisions. + * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take + * authentication/authorization decisions. */ private Boolean enabled; /** - * Attributes in params should be as per POJO {@link com.datahub.plugins.auth.configuration.AuthParam} + * Attributes in params should be as per POJO {@link + * com.datahub.plugins.auth.configuration.AuthParam} */ private Map<String, Object> params; /** - * It is always set to <plugin-base-directory>/<plugin-name>. - * For example if plugin-name is ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then - * pluginDirectory would be /etc/datahub/plugins/auth/ranger-authorizer + * It is always set to <plugin-base-directory>/<plugin-name>. For example if plugin-name is + * ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then pluginDirectory + * would be /etc/datahub/plugins/auth/ranger-authorizer */ - @JsonIgnore - private Path pluginHomeDirectory; + @JsonIgnore private Path pluginHomeDirectory; public static CustomBuilder builder() { return new CustomBuilder(); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java index b0a59a1656c8d..80837b966ba58 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java @@ -11,12 +11,9 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * Create instance of config provider as per type mentioned in {@link Config} - */ +/** Create instance of config provider as per type mentioned in {@link Config} */ public class PluginConfigFactory { - private final static Map<PluginType, PluginConfigProvider> CONFIG_PROVIDER_REGISTRY; + private static final Map<PluginType, PluginConfigProvider> CONFIG_PROVIDER_REGISTRY; static { CONFIG_PROVIDER_REGISTRY = new HashMap<>(2); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java index 92a7cae0647c5..1529df3ede676 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java @@ -30,10 +30,9 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; - /** - * IsolatedClassLoader to load custom implementation of DataHub Plugins. - * Override methods behave as per Java ClassLoader documentation. + * IsolatedClassLoader to load custom implementation of DataHub Plugins. Override methods behave as + * per Java ClassLoader documentation. */ @Slf4j public class IsolatedClassLoader extends ClassLoader { @@ -50,22 +49,30 @@ public class IsolatedClassLoader extends ClassLoader { private final Path _executionDirectory; - public IsolatedClassLoader(@Nonnull PluginPermissionManager pluginPermissionManager, - @Nonnull PluginConfig pluginToLoad, @Nonnull ClassLoader... applicationClassLoaders) { + public IsolatedClassLoader( + @Nonnull PluginPermissionManager pluginPermissionManager, + @Nonnull PluginConfig pluginToLoad, + @Nonnull ClassLoader... applicationClassLoaders) { this._pluginPermissionManager = pluginPermissionManager; this._pluginConfig = pluginToLoad; this._classLoaders.add(this.getClass().getClassLoader()); // then application class-loader this._classLoaders.addAll(Arrays.asList(applicationClassLoaders)); // if any extra class loaders this._executionDirectory = - Paths.get("/tmp", pluginToLoad.getPluginHomeDirectory().toString(), EXECUTION_DIR); // to store .so files i.e. libraries + Paths.get( + "/tmp", + pluginToLoad.getPluginHomeDirectory().toString(), + EXECUTION_DIR); // to store .so files i.e. libraries try { this.createJarEntryMap(); } catch (IOException e) { - // This would occur if we don't have permission on directory and chances of this is close to zero, hence catching + // This would occur if we don't have permission on directory and chances of this is close to + // zero, hence catching // this checked exception and throwing runtime exception // to make caller code more readable - String message = String.format("Unable to load jar file %s for plugin %s", pluginToLoad.getPluginJarPath(), - pluginToLoad.getName()); + String message = + String.format( + "Unable to load jar file %s for plugin %s", + pluginToLoad.getPluginJarPath(), pluginToLoad.getName()); throw new RuntimeException(message, e); } } @@ -85,15 +92,18 @@ private void createJarEntryMap() throws IOException { } /** - * Load plugin class from jar given in pluginToLoad parameter and return instance of class which implements Plugin - * interface. - * This method verifies whether loaded plugin is assignable to expectedInstanceOf class + * Load plugin class from jar given in pluginToLoad parameter and return instance of class which + * implements Plugin interface. This method verifies whether loaded plugin is assignable to + * expectedInstanceOf class + * * @param expectedInstanceOf class instance of interface caller is expecting * @return Instance of Plugin - * @throws ClassNotFoundException className parameter available in Plugin configuration is not found + * @throws ClassNotFoundException className parameter available in Plugin configuration is not + * found */ @Nonnull - public Plugin instantiatePlugin(@Nonnull Class<? extends Plugin> expectedInstanceOf) throws ClassNotFoundException { + public Plugin instantiatePlugin(@Nonnull Class<? extends Plugin> expectedInstanceOf) + throws ClassNotFoundException { Class<?> clazz = this.loadClass(this._pluginConfig.getClassName(), true); try { @@ -102,14 +112,17 @@ public Plugin instantiatePlugin(@Nonnull Class<? extends Plugin> expectedInstanc // Check loaded plugin has implemented the proper implementation of child interface if (!expectedInstanceOf.isAssignableFrom(clazz)) { throw new InstantiationException( - String.format("In plugin %s, the class %s has not implemented the interface %s", - this._pluginConfig.getName(), plugin.getClass().getCanonicalName(), + String.format( + "In plugin %s, the class %s has not implemented the interface %s", + this._pluginConfig.getName(), + plugin.getClass().getCanonicalName(), expectedInstanceOf.getCanonicalName())); } log.debug("Successfully created instance of plugin {}", this._pluginConfig.getClassName()); return plugin; } catch (InstantiationException | IllegalAccessException e) { - throw new RuntimeException(String.format("Failed to instantiate the plugin %s", this._pluginConfig.getName()), e); + throw new RuntimeException( + String.format("Failed to instantiate the plugin %s", this._pluginConfig.getName()), e); } } @@ -157,7 +170,8 @@ protected Class<?> loadClass(String s, boolean b) throws ClassNotFoundException byte[] classBytes = getClassData(this._classPathVsZipEntry.get(path)); ProtectionDomain protectionDomain = - this._pluginPermissionManager.createProtectionDomain(this._pluginConfig.getPluginHomeDirectory()); + this._pluginPermissionManager.createProtectionDomain( + this._pluginConfig.getPluginHomeDirectory()); return defineClass(s, classBytes, 0, classBytes.length, protectionDomain); } @@ -210,8 +224,11 @@ private Optional<URL> findResourceInPluginJar(String resource) { private Optional<URL> findResourceInPluginHome(String resource) { try { - try (Stream<Path> stream = Files.find(this._pluginConfig.getPluginHomeDirectory(), 1, - ((path, basicFileAttributes) -> path.toFile().getName().equals(resource)))) { + try (Stream<Path> stream = + Files.find( + this._pluginConfig.getPluginHomeDirectory(), + 1, + ((path, basicFileAttributes) -> path.toFile().getName().equals(resource)))) { List<Path> resources = stream.collect(Collectors.toList()); if (resources.size() > 0) { log.debug("Number of resources found {}", resources.size()); @@ -227,9 +244,9 @@ private Optional<URL> findResourceInPluginHome(String resource) { } /** - * Look for resource in below order - * - First search in plugin jar if not found - * - then search in plugin directory if not found then return null + * Look for resource in below order - First search in plugin jar if not found - then search in + * plugin directory if not found then return null + * * @param resource Resource to find * @return URL of the resource */ diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java index a20e9d0760968..f27a2e2551d58 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java @@ -8,15 +8,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j class JarExtractor { - private JarExtractor() { - } + private JarExtractor() {} /** * Write url content to destinationFilePath + * * @param url * @param destinationFilePath * @throws IOException @@ -30,4 +29,4 @@ public static void write(@Nonnull URL url, @Nonnull Path destinationFilePath) th } } } -} \ No newline at end of file +} diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java index 0596f8abcea74..7107787fdec3b 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java @@ -11,7 +11,6 @@ import java.security.cert.Certificate; import javax.annotation.Nonnull; - public class PluginPermissionManagerImpl implements PluginPermissionManager { private final SecurityMode _securityMode; @@ -21,7 +20,9 @@ public PluginPermissionManagerImpl(@Nonnull SecurityMode securityMode) { } /** - * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to the plugin code + * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to + * the plugin code + * * @param pluginHome * @return ProtectionDomain */ diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java index 64c53f1cb6db3..ccc95e4941ad0 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java @@ -8,7 +8,6 @@ import java.util.Map; import org.testng.annotations.Test; - @Test public class TestConfig { @Test @@ -16,8 +15,12 @@ public void testConfig() { PluginConfig authorizerConfig = new PluginConfig(); authorizerConfig.setName("apache-ranger-authorizer"); authorizerConfig.setType(PluginType.AUTHORIZER); - authorizerConfig.setParams(Map.of("className", "com.datahub.authorization.ranger.RangerAuthorizer", "configs", - Map.of("username", "foo", "password", "root123"))); + authorizerConfig.setParams( + Map.of( + "className", + "com.datahub.authorization.ranger.RangerAuthorizer", + "configs", + Map.of("username", "foo", "password", "root123"))); PluginConfig authenticatorConfig = new PluginConfig(); authorizerConfig.setName("sample-authenticator"); diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java index e311aae258109..bfb83f0ddfb24 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java @@ -11,7 +11,6 @@ import java.util.function.Consumer; import org.testng.annotations.Test; - @Test public class TestConfigProvider { @Test @@ -26,45 +25,57 @@ public void testConfigurationLoading() throws Exception { List<PluginConfig> authenticators = authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - List<PluginConfig> authorizers = authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); + List<PluginConfig> authorizers = + authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); assert authenticators.size() != 0; assert authorizers.size() != 0; - Consumer<PluginConfig> validateAuthenticationPlugin = (plugin) -> { - assert plugin.getName().equals("apache-ranger-authenticator"); + Consumer<PluginConfig> validateAuthenticationPlugin = + (plugin) -> { + assert plugin.getName().equals("apache-ranger-authenticator"); - assert "com.datahub.ranger.Authenticator".equals(plugin.getClassName()); + assert "com.datahub.ranger.Authenticator".equals(plugin.getClassName()); - assert plugin.getEnabled(); + assert plugin.getEnabled(); - String pluginJarPath = - Paths.get(pluginBaseDirectory.toString(), "apache-ranger-authenticator", "apache-ranger-authenticator.jar") - .toAbsolutePath() - .toString(); - assert pluginJarPath.equals(plugin.getPluginJarPath().toString()); + String pluginJarPath = + Paths.get( + pluginBaseDirectory.toString(), + "apache-ranger-authenticator", + "apache-ranger-authenticator.jar") + .toAbsolutePath() + .toString(); + assert pluginJarPath.equals(plugin.getPluginJarPath().toString()); - String pluginDirectory = Paths.get(pluginBaseDirectory.toString(), plugin.getName()).toAbsolutePath().toString(); - assert pluginDirectory.equals(plugin.getPluginHomeDirectory().toString()); - }; + String pluginDirectory = + Paths.get(pluginBaseDirectory.toString(), plugin.getName()) + .toAbsolutePath() + .toString(); + assert pluginDirectory.equals(plugin.getPluginHomeDirectory().toString()); + }; - Consumer<PluginConfig> validateAuthorizationPlugin = (plugin) -> { - assert plugin.getName().equals("apache-ranger-authorizer"); + Consumer<PluginConfig> validateAuthorizationPlugin = + (plugin) -> { + assert plugin.getName().equals("apache-ranger-authorizer"); - assert "com.datahub.ranger.Authorizer".equals(plugin.getClassName()); + assert "com.datahub.ranger.Authorizer".equals(plugin.getClassName()); - assert plugin.getEnabled(); + assert plugin.getEnabled(); - assert Paths.get(pluginBaseDirectory.toString(), "apache-ranger-authorizer", "apache-ranger-authorizer.jar") - .toAbsolutePath() - .toString() - .equals(plugin.getPluginJarPath().toString()); + assert Paths.get( + pluginBaseDirectory.toString(), + "apache-ranger-authorizer", + "apache-ranger-authorizer.jar") + .toAbsolutePath() + .toString() + .equals(plugin.getPluginJarPath().toString()); - assert Paths.get(pluginBaseDirectory.toString(), plugin.getName()) - .toAbsolutePath() - .toString() - .equals(plugin.getPluginHomeDirectory().toString()); - }; + assert Paths.get(pluginBaseDirectory.toString(), plugin.getName()) + .toAbsolutePath() + .toString() + .equals(plugin.getPluginHomeDirectory().toString()); + }; authenticators.forEach(validateAuthenticationPlugin); authorizers.forEach(validateAuthorizationPlugin); diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java index d85bfc0379d17..6596ca0c83f33 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java @@ -6,7 +6,6 @@ import java.util.List; import org.testng.annotations.Test; - @Test public class TestConfigValidationUtils { diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java index 314849e8ebea5..5e447caa292e2 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java @@ -30,35 +30,32 @@ import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; - /** - * This test case covers below scenarios - * 1. Loading plugin configuration and validating the loaded configuration against the expected configuration. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testConfigurationLoading()} - * test + * This test case covers below scenarios 1. Loading plugin configuration and validating the loaded + * configuration against the expected configuration. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testConfigurationLoading()} test * - * 2. Plugin name should be unique in config.yaml. The plugin framework should raise error if more than one plugin - * has the same name. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testDuplicatePluginName()} - * test + * <p>2. Plugin name should be unique in config.yaml. The plugin framework should raise error if + * more than one plugin has the same name. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testDuplicatePluginName()} test * - * 3. Developer can provide plugin jar file name in config.yaml. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testJarFileName()} test + * <p>3. Developer can provide plugin jar file name in config.yaml. This scenario is covered + * in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testJarFileName()} test * - * 4. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthenticatorPlugin()} covers the valid - * authenticator plugin execution. - * Plugin used in this test-case is metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub - * /plugins/test/TestAuthenticator.java + * <p>4. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthenticatorPlugin()} + * covers the valid authenticator plugin execution. Plugin used in this test-case is + * metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub + * /plugins/test/TestAuthenticator.java * - * 5. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthorizerPlugin()} covers the valid - * authorizer plugin execution - * Plugin used in this test-case is metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub - * /plugins/test/TestAuthorizer.java + * <p>5. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthorizerPlugin()} covers + * the valid authorizer plugin execution Plugin used in this test-case is + * metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub + * /plugins/test/TestAuthorizer.java * - * 6. The plugin framework should raise error if authenticator plugin is configured as authorizer plugin or vice-versa. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testIncorrectImplementation - * ()}. - * The test case tries to load authorizer plugin as authenticator plugin + * <p>6. The plugin framework should raise error if authenticator plugin is configured as authorizer + * plugin or vice-versa. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testIncorrectImplementation ()}. The test case + * tries to load authorizer plugin as authenticator plugin */ class TestIsolatedClassLoader { @@ -84,22 +81,34 @@ public void testDuplicatePluginName() { public void testJarFileName() throws Exception { Path configPath = Paths.get("src", "test", "resources", "plugin-jar-from-jarFileName"); - Path authenticatorPluginJarPath = Paths.get(configPath.toAbsolutePath().toString(), "apache-ranger-authenticator", - "apache-ranger-authenticator-v1.0.1.jar"); - Config config = (new ConfigProvider(configPath)).load().orElseThrow(() -> new Exception("Should not be empty")); - List<PluginConfig> pluginConfig = (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHENTICATOR); - pluginConfig.forEach((pluginConfigWithJar) -> { - assert pluginConfigWithJar.getPluginJarPath().equals(authenticatorPluginJarPath); - }); - - Path authorizerPluginJarPath = Paths.get(configPath.toAbsolutePath().toString(), "apache-ranger-authorizer", - "apache-ranger-authorizer-v2.0.1.jar"); + Path authenticatorPluginJarPath = + Paths.get( + configPath.toAbsolutePath().toString(), + "apache-ranger-authenticator", + "apache-ranger-authenticator-v1.0.1.jar"); + Config config = + (new ConfigProvider(configPath)) + .load() + .orElseThrow(() -> new Exception("Should not be empty")); + List<PluginConfig> pluginConfig = + (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHENTICATOR); + pluginConfig.forEach( + (pluginConfigWithJar) -> { + assert pluginConfigWithJar.getPluginJarPath().equals(authenticatorPluginJarPath); + }); + + Path authorizerPluginJarPath = + Paths.get( + configPath.toAbsolutePath().toString(), + "apache-ranger-authorizer", + "apache-ranger-authorizer-v2.0.1.jar"); List<PluginConfig> authorizerPluginConfigs = (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHORIZER); - authorizerPluginConfigs.forEach((pluginConfigWithJar) -> { - assert pluginConfigWithJar.getPluginJarPath().equals(authorizerPluginJarPath); - }); + authorizerPluginConfigs.forEach( + (pluginConfigWithJar) -> { + assert pluginConfigWithJar.getPluginJarPath().equals(authorizerPluginJarPath); + }); } public static Path getSamplePluginDirectory() { @@ -145,14 +154,21 @@ public void testAuthenticatorPlugin() throws ClassNotFoundException, Authenticat // authenticator plugin config instance AuthenticatorPluginConfig authenticatorPluginConfig = getAuthenticatorPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); // initiate and invoke the init and authenticate methods - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); - AuthenticatorContext authenticatorContext = new AuthenticatorContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, authenticatorPluginConfig.getPluginHomeDirectory().toString())); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + authenticatorPluginConfig.getPluginHomeDirectory().toString())); AuthenticationRequest request = new AuthenticationRequest(ImmutableMap.of("foo", "bar")); - authenticator.init(authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), authenticatorContext); + authenticator.init( + authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), authenticatorContext); Authentication authentication = authenticator.authenticate(request); assert authentication.getActor().getId().equals("fake"); @@ -163,13 +179,20 @@ public void testAuthorizerPlugin() throws ClassNotFoundException, Authentication // authenticator plugin config instance AuthorizerPluginConfig authorizerPluginConfig = getAuthorizerPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authorizerPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authorizerPluginConfig); // initiate and invoke the init and authenticate methods Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); - AuthorizerContext authorizerContext = new AuthorizerContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, authorizerPluginConfig.getPluginHomeDirectory().toString()), null); - AuthorizationRequest authorizationRequest = new AuthorizationRequest("urn:li:user:fake", "test", Optional.empty()); + AuthorizerContext authorizerContext = + new AuthorizerContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + authorizerPluginConfig.getPluginHomeDirectory().toString()), + null); + AuthorizationRequest authorizationRequest = + new AuthorizationRequest("urn:li:user:fake", "test", Optional.empty()); authorizer.init(authorizerPluginConfig.getConfigs().orElse(new HashMap<>()), authorizerContext); assert authorizer.authorize(authorizationRequest).getMessage().equals("fake message"); } @@ -178,13 +201,17 @@ public void testAuthorizerPlugin() throws ClassNotFoundException, Authentication public void testIncorrectImplementation() { AuthorizerPluginConfig authorizerPluginConfig = getAuthorizerPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authorizerPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authorizerPluginConfig); // initiate and invoke the init and authenticate methods try { - // Authorizer configuration is provided, however here we were expecting that plugin should be of type + // Authorizer configuration is provided, however here we were expecting that plugin should be + // of type // Authenticator.class - Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authenticator.class); + Authorizer authorizer = + (Authorizer) isolatedClassLoader.instantiatePlugin(Authenticator.class); assert authorizer != null; } catch (RuntimeException | ClassNotFoundException e) { assert e.getCause() instanceof java.lang.InstantiationException; @@ -197,10 +224,13 @@ public void testLenientMode() throws ClassNotFoundException, AuthenticationExcep AuthenticatorPluginConfig authenticatorPluginConfig = getAuthenticatorPluginConfig(); authenticatorPluginConfig.setClassName("com.datahub.plugins.test.TestLenientModeAuthenticator"); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.LENIENT); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.LENIENT); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); // initiate and invoke the init and authenticate methods - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); authenticator.init(authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), null); AuthenticationRequest request = new AuthenticationRequest(ImmutableMap.of("foo", "bar")); assert authenticator.authenticate(request) != null; diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java index 1d182f5fa8ea7..f620a1687064c 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java @@ -10,7 +10,6 @@ import java.util.List; import org.testng.annotations.Test; - public class TestPluginConfigFactory { @Test @@ -26,14 +25,20 @@ public void authConfig() throws Exception { // Load authenticator plugin configuration List<PluginConfig> authenticatorConfigs = authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - authenticatorConfigs.forEach(c -> { - assert c.getClassName().equals("com.datahub.ranger.Authenticator"); // className should match to Authenticator - }); + authenticatorConfigs.forEach( + c -> { + assert c.getClassName() + .equals( + "com.datahub.ranger.Authenticator"); // className should match to Authenticator + }); // Load authorizer plugin configuration - List<PluginConfig> authorizerConfigs = authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); - authorizerConfigs.forEach(c -> { - assert c.getClassName().equals("com.datahub.ranger.Authorizer"); // className should match to Authorizer - }); + List<PluginConfig> authorizerConfigs = + authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); + authorizerConfigs.forEach( + c -> { + assert c.getClassName() + .equals("com.datahub.ranger.Authorizer"); // className should match to Authorizer + }); } } diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java index e6882e7de3120..56e4c150b100c 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java @@ -14,19 +14,23 @@ import java.util.Map; import org.testng.annotations.Test; - @Test public class TestPluginPermissionManager { @Test public void testRestrictedMode() throws MalformedURLException { - PluginPermissionManagerImpl pluginPermissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + PluginPermissionManagerImpl pluginPermissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - Path pluginHome = Paths.get("src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); + Path pluginHome = + Paths.get( + "src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); - ProtectionDomain protectionDomain = pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); + ProtectionDomain protectionDomain = + pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); // provided pluginHome and codeSource in protection domain should be equal - assert pluginHome.toUri() + assert pluginHome + .toUri() .toURL() .toExternalForm() .equals(protectionDomain.getCodeSource().getLocation().toExternalForm()); @@ -43,21 +47,27 @@ public void testRestrictedMode() throws MalformedURLException { map.put(pluginHome.toAbsolutePath() + "/*", "read,write,delete"); // Compare actual with expected - permissions.forEach(permission -> { - assert map.keySet().contains(permission.getName()); - assert map.values().contains(permission.getActions()); - }); + permissions.forEach( + permission -> { + assert map.keySet().contains(permission.getName()); + assert map.values().contains(permission.getActions()); + }); } public void testLenientMode() throws MalformedURLException { - PluginPermissionManagerImpl pluginPermissionManager = new PluginPermissionManagerImpl(SecurityMode.LENIENT); + PluginPermissionManagerImpl pluginPermissionManager = + new PluginPermissionManagerImpl(SecurityMode.LENIENT); - Path pluginHome = Paths.get("src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); + Path pluginHome = + Paths.get( + "src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); - ProtectionDomain protectionDomain = pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); + ProtectionDomain protectionDomain = + pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); // provided pluginHome and codeSource in protection domain should be equal - assert pluginHome.toUri() + assert pluginHome + .toUri() .toURL() .toExternalForm() .equals(protectionDomain.getCodeSource().getLocation().toExternalForm()); @@ -68,8 +78,9 @@ public void testLenientMode() throws MalformedURLException { // It should have 1 permission assert permissions.size() == 1; - permissions.forEach(permission -> { - assert permission.getName().equals("<all permissions>"); - }); + permissions.forEach( + permission -> { + assert permission.getName().equals("<all permissions>"); + }); } } diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java index 4fb958de2edd6..e234a150ccd73 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java @@ -25,13 +25,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TestAuthenticator implements Authenticator { private AuthenticatorContext _authenticatorContext; @Override - public void init(@Nonnull Map<String, Object> authenticatorConfig, @Nullable AuthenticatorContext context) { + public void init( + @Nonnull Map<String, Object> authenticatorConfig, @Nullable AuthenticatorContext context) { /* * authenticatorConfig contains key, value pairs set in plugins[].params.configs of config.yml */ @@ -48,7 +48,8 @@ public void init(@Nonnull Map<String, Object> authenticatorConfig, @Nullable Aut private void readInputStream() { // Test resource as stream is working - try (InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { + try (InputStream inputStream = + this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { assert inputStream != null; BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); assert reader.readLine() != null; @@ -59,9 +60,12 @@ private void readInputStream() { } private void accessFile() { - // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write on plugin directory + // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write + // on plugin directory Path pluginDirectory = - Paths.get((String) this._authenticatorContext.data().get(PluginConstant.PLUGIN_HOME), "tmp_file1.txt"); + Paths.get( + (String) this._authenticatorContext.data().get(PluginConstant.PLUGIN_HOME), + "tmp_file1.txt"); try { try (BufferedWriter writer = new BufferedWriter(new FileWriter(pluginDirectory.toString()))) { @@ -79,7 +83,8 @@ private void accessFile() { public void accessSystemProperty() { try { System.getProperty("user.home"); - throw new RuntimeException("Plugin is able to access system properties"); // we should not reach here + throw new RuntimeException( + "Plugin is able to access system properties"); // we should not reach here } catch (AccessControlException accessControlException) { log.info("Expected: Don't have permission to read system properties"); } diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java index e5f3e223ff505..4dcace841205a 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java @@ -21,7 +21,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TestAuthorizer implements Authorizer { private AuthorizerContext _authorizerContext; @@ -45,9 +44,12 @@ public AuthorizationResult authorize(@Nonnull AuthorizationRequest request) { URL url = this.getClass().getClassLoader().getResource("foo_bar.json"); assert url != null; - // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write on plugin directory + // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write + // on plugin directory Path pluginDirectory = - Paths.get((String) this._authorizerContext.data().get(PluginConstant.PLUGIN_HOME), "tmp_file1.txt"); + Paths.get( + (String) this._authorizerContext.data().get(PluginConstant.PLUGIN_HOME), + "tmp_file1.txt"); try { try (BufferedWriter writer = new BufferedWriter(new FileWriter(pluginDirectory.toString()))) { @@ -62,7 +64,8 @@ public AuthorizationResult authorize(@Nonnull AuthorizationRequest request) { } // Test resource as stream is working - try (InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { + try (InputStream inputStream = + this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { assert inputStream != null; BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); assert reader.readLine() != null; @@ -78,4 +81,3 @@ public AuthorizedActors authorizedActors(String privilege, Optional<EntitySpec> return new AuthorizedActors("ALL", null, null, null, true, true); } } - diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java index 2cc27f11a6254..d143b3803ca34 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java @@ -11,18 +11,17 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class TestLenientModeAuthenticator implements Authenticator { @Override - public void init(@Nonnull Map<String, Object> authenticatorConfig, @Nullable AuthenticatorContext context) { - - } + public void init( + @Nonnull Map<String, Object> authenticatorConfig, @Nullable AuthenticatorContext context) {} @Nullable @Override public Authentication authenticate(@Nonnull AuthenticationRequest authenticationRequest) throws AuthenticationException { - // We should be able to access user directory as we are going to be loaded with Lenient mode IsolatedClassLoader + // We should be able to access user directory as we are going to be loaded with Lenient mode + // IsolatedClassLoader String userHome = System.getProperty("user.home"); assert userHome != null; return new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json index 3e1b975311b11..27581334814ce 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json @@ -4,10 +4,12 @@ "path" : "/analytics", "schema" : "com.linkedin.analytics.GetTimeseriesAggregatedStatsResponse", "doc" : "Rest.li entry point: /analytics\n\ngenerated from: com.linkedin.metadata.resources.analytics.Analytics", + "resourceClass" : "com.linkedin.metadata.resources.analytics.Analytics", "simple" : { "supports" : [ ], "actions" : [ { "name" : "getTimeseriesStats", + "javaMethodName" : "getTimeseriesStats", "parameters" : [ { "name" : "entityName", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json index 3a0df137a0469..917540aca8728 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json @@ -4,6 +4,7 @@ "path" : "/aspects", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.AspectResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.AspectResource", "collection" : { "identifier" : { "name" : "aspectsId", @@ -12,6 +13,7 @@ "supports" : [ "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.\n TODO: Get rid of this and migrate to getAspect.", "parameters" : [ { "name" : "aspect", @@ -25,6 +27,7 @@ } ], "actions" : [ { "name" : "getCount", + "javaMethodName" : "getCount", "parameters" : [ { "name" : "aspect", "type" : "string" @@ -36,6 +39,7 @@ "returns" : "int" }, { "name" : "getTimeseriesAspectValues", + "javaMethodName" : "getTimeseriesAspectValues", "parameters" : [ { "name" : "urn", "type" : "string" @@ -73,6 +77,7 @@ "returns" : "com.linkedin.aspect.GetTimeseriesAspectValuesResponse" }, { "name" : "ingestProposal", + "javaMethodName" : "ingestProposal", "parameters" : [ { "name" : "proposal", "type" : "com.linkedin.mxe.MetadataChangeProposal" @@ -84,6 +89,7 @@ "returns" : "string" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json index a9de21d08aedc..eac1cc690a60d 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json @@ -4,6 +4,7 @@ "path" : "/entities", "schema" : "com.linkedin.entity.Entity", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityResource", "collection" : { "identifier" : { "name" : "entitiesId", @@ -12,6 +13,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -20,6 +22,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -28,6 +31,7 @@ } ], "actions" : [ { "name" : "applyRetention", + "javaMethodName" : "applyRetention", "parameters" : [ { "name" : "start", "type" : "int", @@ -52,6 +56,7 @@ "returns" : "string" }, { "name" : "autocomplete", + "javaMethodName" : "autocomplete", "parameters" : [ { "name" : "entity", "type" : "string" @@ -73,6 +78,7 @@ "returns" : "com.linkedin.metadata.query.AutoCompleteResult" }, { "name" : "batchGetTotalEntityCount", + "javaMethodName" : "batchGetTotalEntityCount", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }" @@ -80,6 +86,7 @@ "returns" : "{ \"type\" : \"map\", \"values\" : \"long\" }" }, { "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.entity.Entity\" }" @@ -90,6 +97,7 @@ } ] }, { "name" : "browse", + "javaMethodName" : "browse", "parameters" : [ { "name" : "entity", "type" : "string" @@ -110,6 +118,7 @@ "returns" : "com.linkedin.metadata.browse.BrowseResult" }, { "name" : "delete", + "javaMethodName" : "deleteEntity", "doc" : "Deletes all data related to an individual urn(entity).\nService Returns: - a DeleteEntityResponse object.", "parameters" : [ { "name" : "urn", @@ -119,7 +128,7 @@ "name" : "aspectName", "type" : "string", "optional" : true, - "doc" : "- the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects)." + "doc" : "- the optional aspect name if only want to delete the aspect (applicable only\n for timeseries aspects)." }, { "name" : "startTimeMillis", "type" : "long", @@ -134,6 +143,7 @@ "returns" : "com.linkedin.metadata.run.DeleteEntityResponse" }, { "name" : "deleteAll", + "javaMethodName" : "deleteEntities", "parameters" : [ { "name" : "registryId", "type" : "string", @@ -146,6 +156,7 @@ "returns" : "com.linkedin.metadata.run.RollbackResponse" }, { "name" : "deleteReferences", + "javaMethodName" : "deleteReferencesTo", "parameters" : [ { "name" : "urn", "type" : "string" @@ -157,6 +168,7 @@ "returns" : "com.linkedin.metadata.run.DeleteReferencesResponse" }, { "name" : "exists", + "javaMethodName" : "exists", "parameters" : [ { "name" : "urn", "type" : "string" @@ -164,6 +176,7 @@ "returns" : "boolean" }, { "name" : "filter", + "javaMethodName" : "filter", "parameters" : [ { "name" : "entity", "type" : "string" @@ -184,6 +197,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "getBrowsePaths", + "javaMethodName" : "getBrowsePaths", "parameters" : [ { "name" : "urn", "type" : "com.linkedin.common.Urn" @@ -191,6 +205,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"string\" }" }, { "name" : "getTotalEntityCount", + "javaMethodName" : "getTotalEntityCount", "parameters" : [ { "name" : "entity", "type" : "string" @@ -198,6 +213,7 @@ "returns" : "long" }, { "name" : "ingest", + "javaMethodName" : "ingest", "parameters" : [ { "name" : "entity", "type" : "com.linkedin.entity.Entity" @@ -208,6 +224,7 @@ } ] }, { "name" : "list", + "javaMethodName" : "list", "parameters" : [ { "name" : "entity", "type" : "string" @@ -229,6 +246,7 @@ "returns" : "com.linkedin.metadata.query.ListResult" }, { "name" : "listUrns", + "javaMethodName" : "listUrns", "parameters" : [ { "name" : "entity", "type" : "string" @@ -242,6 +260,7 @@ "returns" : "com.linkedin.metadata.query.ListUrnsResult" }, { "name" : "scrollAcrossEntities", + "javaMethodName" : "scrollAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -274,6 +293,7 @@ "returns" : "com.linkedin.metadata.search.ScrollResult" }, { "name" : "scrollAcrossLineage", + "javaMethodName" : "scrollAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -325,6 +345,7 @@ "returns" : "com.linkedin.metadata.search.LineageScrollResult" }, { "name" : "search", + "javaMethodName" : "search", "parameters" : [ { "name" : "entity", "type" : "string" @@ -360,6 +381,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossEntities", + "javaMethodName" : "searchAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -389,6 +411,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossLineage", + "javaMethodName" : "searchAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -437,6 +460,7 @@ "returns" : "com.linkedin.metadata.search.LineageSearchResult" }, { "name" : "setWritable", + "javaMethodName" : "setWriteable", "parameters" : [ { "name" : "value", "type" : "boolean", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json index 0c92a981c7356..33cfba0f27802 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json @@ -4,6 +4,7 @@ "path" : "/entitiesV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityV2Resource", "collection" : { "identifier" : { "name" : "entitiesV2Id", @@ -12,6 +13,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -20,6 +22,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json index 579f1d7c7dddc..f3eb9d38dc6ae 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json @@ -4,6 +4,7 @@ "path" : "/entitiesVersionedV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", "collection" : { "identifier" : { "name" : "entitiesVersionedV2Id", @@ -12,6 +13,7 @@ "supports" : [ "batch_get" ], "methods" : [ { "method" : "batch_get", + "javaMethodName" : "batchGetVersioned", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json index 5eaa34bc7a2e9..7284cd2bac48f 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json @@ -4,6 +4,7 @@ "path" : "/runs", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "resource for showing information and rolling back runs\n\ngenerated from: com.linkedin.metadata.resources.entity.BatchIngestionRunResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.BatchIngestionRunResource", "collection" : { "identifier" : { "name" : "runsId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "describe", + "javaMethodName" : "describe", "parameters" : [ { "name" : "runId", "type" : "string" @@ -33,6 +35,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.AspectRowSummary\" }" }, { "name" : "list", + "javaMethodName" : "list", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "pageOffset", @@ -50,6 +53,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.IngestionRunSummary\" }" }, { "name" : "rollback", + "javaMethodName" : "rollback", "doc" : "Rolls back an ingestion run", "parameters" : [ { "name" : "runId", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json index 68f9fe8ae152e..7056368d82c7d 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json @@ -4,10 +4,12 @@ "path" : "/relationships", "schema" : "com.linkedin.common.EntityRelationships", "doc" : "Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types}\n\ngenerated from: com.linkedin.metadata.resources.lineage.Relationships", + "resourceClass" : "com.linkedin.metadata.resources.lineage.Relationships", "simple" : { "supports" : [ "delete", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "parameters" : [ { "name" : "urn", "type" : "string" @@ -28,6 +30,7 @@ } ] }, { "method" : "delete", + "javaMethodName" : "delete", "parameters" : [ { "name" : "urn", "type" : "string" @@ -35,6 +38,7 @@ } ], "actions" : [ { "name" : "getLineage", + "javaMethodName" : "getLineage", "parameters" : [ { "name" : "urn", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json index 958ec13b37fca..0fb6a18a7974b 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json @@ -4,6 +4,7 @@ "path" : "/operations", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Endpoints for performing maintenance operations\n\ngenerated from: com.linkedin.metadata.resources.operations.OperationsResource", + "resourceClass" : "com.linkedin.metadata.resources.operations.OperationsResource", "collection" : { "identifier" : { "name" : "operationsId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "getEsTaskStatus", + "javaMethodName" : "getTaskStatus", "parameters" : [ { "name" : "nodeId", "type" : "string", @@ -28,9 +30,11 @@ "returns" : "string" }, { "name" : "getIndexSizes", + "javaMethodName" : "getIndexSizes", "returns" : "com.linkedin.timeseries.TimeseriesIndicesSizesResult" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", @@ -55,6 +59,7 @@ "returns" : "string" }, { "name" : "truncateTimeseriesAspect", + "javaMethodName" : "truncateTimeseriesAspect", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json index 3346ddd23e3ba..9fbb3e9b6698e 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json @@ -4,6 +4,7 @@ "path" : "/platform", "schema" : "com.linkedin.entity.Entity", "doc" : "DataHub Platform Actions\n\ngenerated from: com.linkedin.metadata.resources.platform.PlatformResource", + "resourceClass" : "com.linkedin.metadata.resources.platform.PlatformResource", "collection" : { "identifier" : { "name" : "platformId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "producePlatformEvent", + "javaMethodName" : "producePlatformEvent", "parameters" : [ { "name" : "name", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json index 2a4cf40b58412..42f0894fbb7a6 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json @@ -7,6 +7,7 @@ "path" : "/usageStats", "schema" : "com.linkedin.usage.UsageAggregation", "doc" : "Rest.li entry point: /usageStats\n\ngenerated from: com.linkedin.metadata.resources.usage.UsageStats", + "resourceClass" : "com.linkedin.metadata.resources.usage.UsageStats", "simple" : { "supports" : [ ], "actions" : [ { @@ -14,12 +15,14 @@ "deprecated" : { } }, "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "buckets", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.usage.UsageAggregation\" }" } ] }, { "name" : "query", + "javaMethodName" : "query", "parameters" : [ { "name" : "resource", "type" : "string" @@ -42,6 +45,7 @@ "returns" : "com.linkedin.usage.UsageQueryResult" }, { "name" : "queryRange", + "javaMethodName" : "queryRange", "parameters" : [ { "name" : "resource", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json index d75ec58546465..c4532cba9e6be 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json @@ -222,10 +222,12 @@ "path" : "/analytics", "schema" : "com.linkedin.analytics.GetTimeseriesAggregatedStatsResponse", "doc" : "Rest.li entry point: /analytics\n\ngenerated from: com.linkedin.metadata.resources.analytics.Analytics", + "resourceClass" : "com.linkedin.metadata.resources.analytics.Analytics", "simple" : { "supports" : [ ], "actions" : [ { "name" : "getTimeseriesStats", + "javaMethodName" : "getTimeseriesStats", "parameters" : [ { "name" : "entityName", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index 0403fa2ceea6f..bca3e7161c8b8 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -3993,6 +3993,7 @@ "path" : "/aspects", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.AspectResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.AspectResource", "collection" : { "identifier" : { "name" : "aspectsId", @@ -4001,6 +4002,7 @@ "supports" : [ "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.\n TODO: Get rid of this and migrate to getAspect.", "parameters" : [ { "name" : "aspect", @@ -4014,6 +4016,7 @@ } ], "actions" : [ { "name" : "getCount", + "javaMethodName" : "getCount", "parameters" : [ { "name" : "aspect", "type" : "string" @@ -4025,6 +4028,7 @@ "returns" : "int" }, { "name" : "getTimeseriesAspectValues", + "javaMethodName" : "getTimeseriesAspectValues", "parameters" : [ { "name" : "urn", "type" : "string" @@ -4062,6 +4066,7 @@ "returns" : "com.linkedin.aspect.GetTimeseriesAspectValuesResponse" }, { "name" : "ingestProposal", + "javaMethodName" : "ingestProposal", "parameters" : [ { "name" : "proposal", "type" : "com.linkedin.mxe.MetadataChangeProposal" @@ -4073,6 +4078,7 @@ "returns" : "string" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index d79a4a1919af9..69184856e4f9e 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -6289,6 +6289,7 @@ "path" : "/entities", "schema" : "com.linkedin.entity.Entity", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityResource", "collection" : { "identifier" : { "name" : "entitiesId", @@ -6297,6 +6298,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -6305,6 +6307,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6313,6 +6316,7 @@ } ], "actions" : [ { "name" : "applyRetention", + "javaMethodName" : "applyRetention", "parameters" : [ { "name" : "start", "type" : "int", @@ -6337,6 +6341,7 @@ "returns" : "string" }, { "name" : "autocomplete", + "javaMethodName" : "autocomplete", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6358,6 +6363,7 @@ "returns" : "com.linkedin.metadata.query.AutoCompleteResult" }, { "name" : "batchGetTotalEntityCount", + "javaMethodName" : "batchGetTotalEntityCount", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }" @@ -6365,6 +6371,7 @@ "returns" : "{ \"type\" : \"map\", \"values\" : \"long\" }" }, { "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.entity.Entity\" }" @@ -6375,6 +6382,7 @@ } ] }, { "name" : "browse", + "javaMethodName" : "browse", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6395,6 +6403,7 @@ "returns" : "com.linkedin.metadata.browse.BrowseResult" }, { "name" : "delete", + "javaMethodName" : "deleteEntity", "doc" : "Deletes all data related to an individual urn(entity).\nService Returns: - a DeleteEntityResponse object.", "parameters" : [ { "name" : "urn", @@ -6404,7 +6413,7 @@ "name" : "aspectName", "type" : "string", "optional" : true, - "doc" : "- the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects)." + "doc" : "- the optional aspect name if only want to delete the aspect (applicable only\n for timeseries aspects)." }, { "name" : "startTimeMillis", "type" : "long", @@ -6419,6 +6428,7 @@ "returns" : "com.linkedin.metadata.run.DeleteEntityResponse" }, { "name" : "deleteAll", + "javaMethodName" : "deleteEntities", "parameters" : [ { "name" : "registryId", "type" : "string", @@ -6431,6 +6441,7 @@ "returns" : "com.linkedin.metadata.run.RollbackResponse" }, { "name" : "deleteReferences", + "javaMethodName" : "deleteReferencesTo", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6442,6 +6453,7 @@ "returns" : "com.linkedin.metadata.run.DeleteReferencesResponse" }, { "name" : "exists", + "javaMethodName" : "exists", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6449,6 +6461,7 @@ "returns" : "boolean" }, { "name" : "filter", + "javaMethodName" : "filter", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6469,6 +6482,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "getBrowsePaths", + "javaMethodName" : "getBrowsePaths", "parameters" : [ { "name" : "urn", "type" : "com.linkedin.common.Urn" @@ -6476,6 +6490,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"string\" }" }, { "name" : "getTotalEntityCount", + "javaMethodName" : "getTotalEntityCount", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6483,6 +6498,7 @@ "returns" : "long" }, { "name" : "ingest", + "javaMethodName" : "ingest", "parameters" : [ { "name" : "entity", "type" : "com.linkedin.entity.Entity" @@ -6493,6 +6509,7 @@ } ] }, { "name" : "list", + "javaMethodName" : "list", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6514,6 +6531,7 @@ "returns" : "com.linkedin.metadata.query.ListResult" }, { "name" : "listUrns", + "javaMethodName" : "listUrns", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6527,6 +6545,7 @@ "returns" : "com.linkedin.metadata.query.ListUrnsResult" }, { "name" : "scrollAcrossEntities", + "javaMethodName" : "scrollAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6559,6 +6578,7 @@ "returns" : "com.linkedin.metadata.search.ScrollResult" }, { "name" : "scrollAcrossLineage", + "javaMethodName" : "scrollAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6610,6 +6630,7 @@ "returns" : "com.linkedin.metadata.search.LineageScrollResult" }, { "name" : "search", + "javaMethodName" : "search", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6645,6 +6666,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossEntities", + "javaMethodName" : "searchAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6674,6 +6696,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossLineage", + "javaMethodName" : "searchAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6722,6 +6745,7 @@ "returns" : "com.linkedin.metadata.search.LineageSearchResult" }, { "name" : "setWritable", + "javaMethodName" : "setWriteable", "parameters" : [ { "name" : "value", "type" : "boolean", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json index c7618e5d3c5a1..3eac87e268f5d 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json @@ -162,6 +162,7 @@ "path" : "/entitiesV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityV2Resource", "collection" : { "identifier" : { "name" : "entitiesV2Id", @@ -170,6 +171,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -178,6 +180,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json index 45e542883b723..1733537e68f30 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json @@ -171,6 +171,7 @@ "path" : "/entitiesVersionedV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", "collection" : { "identifier" : { "name" : "entitiesVersionedV2Id", @@ -179,6 +180,7 @@ "supports" : [ "batch_get" ], "methods" : [ { "method" : "batch_get", + "javaMethodName" : "batchGetVersioned", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index b20953749ac35..09c0185f74f3a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -3748,6 +3748,7 @@ "path" : "/runs", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "resource for showing information and rolling back runs\n\ngenerated from: com.linkedin.metadata.resources.entity.BatchIngestionRunResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.BatchIngestionRunResource", "collection" : { "identifier" : { "name" : "runsId", @@ -3756,6 +3757,7 @@ "supports" : [ ], "actions" : [ { "name" : "describe", + "javaMethodName" : "describe", "parameters" : [ { "name" : "runId", "type" : "string" @@ -3777,6 +3779,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.AspectRowSummary\" }" }, { "name" : "list", + "javaMethodName" : "list", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "pageOffset", @@ -3794,6 +3797,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.IngestionRunSummary\" }" }, { "name" : "rollback", + "javaMethodName" : "rollback", "doc" : "Rolls back an ingestion run", "parameters" : [ { "name" : "runId", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json index 6febf225ad77d..9aa40edd0b118 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json @@ -180,10 +180,12 @@ "path" : "/relationships", "schema" : "com.linkedin.common.EntityRelationships", "doc" : "Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types}\n\ngenerated from: com.linkedin.metadata.resources.lineage.Relationships", + "resourceClass" : "com.linkedin.metadata.resources.lineage.Relationships", "simple" : { "supports" : [ "delete", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "parameters" : [ { "name" : "urn", "type" : "string" @@ -204,6 +206,7 @@ } ] }, { "method" : "delete", + "javaMethodName" : "delete", "parameters" : [ { "name" : "urn", "type" : "string" @@ -211,6 +214,7 @@ } ], "actions" : [ { "name" : "getLineage", + "javaMethodName" : "getLineage", "parameters" : [ { "name" : "urn", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index e29dd6809b968..339ce62de6298 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -3690,6 +3690,7 @@ "path" : "/operations", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Endpoints for performing maintenance operations\n\ngenerated from: com.linkedin.metadata.resources.operations.OperationsResource", + "resourceClass" : "com.linkedin.metadata.resources.operations.OperationsResource", "collection" : { "identifier" : { "name" : "operationsId", @@ -3698,6 +3699,7 @@ "supports" : [ ], "actions" : [ { "name" : "getEsTaskStatus", + "javaMethodName" : "getTaskStatus", "parameters" : [ { "name" : "nodeId", "type" : "string", @@ -3714,9 +3716,11 @@ "returns" : "string" }, { "name" : "getIndexSizes", + "javaMethodName" : "getIndexSizes", "returns" : "com.linkedin.timeseries.TimeseriesIndicesSizesResult" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", @@ -3741,6 +3745,7 @@ "returns" : "string" }, { "name" : "truncateTimeseriesAspect", + "javaMethodName" : "truncateTimeseriesAspect", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index 8391af60f8ece..cb253c458e6c4 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -5542,6 +5542,7 @@ "path" : "/platform", "schema" : "com.linkedin.entity.Entity", "doc" : "DataHub Platform Actions\n\ngenerated from: com.linkedin.metadata.resources.platform.PlatformResource", + "resourceClass" : "com.linkedin.metadata.resources.platform.PlatformResource", "collection" : { "identifier" : { "name" : "platformId", @@ -5550,6 +5551,7 @@ "supports" : [ ], "actions" : [ { "name" : "producePlatformEvent", + "javaMethodName" : "producePlatformEvent", "parameters" : [ { "name" : "name", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json index a21b0c1cd30be..e8e68dae4c368 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json @@ -164,6 +164,7 @@ "path" : "/usageStats", "schema" : "com.linkedin.usage.UsageAggregation", "doc" : "Rest.li entry point: /usageStats\n\ngenerated from: com.linkedin.metadata.resources.usage.UsageStats", + "resourceClass" : "com.linkedin.metadata.resources.usage.UsageStats", "simple" : { "supports" : [ ], "actions" : [ { @@ -171,12 +172,14 @@ "deprecated" : { } }, "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "buckets", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.usage.UsageAggregation\" }" } ] }, { "name" : "query", + "javaMethodName" : "query", "parameters" : [ { "name" : "resource", "type" : "string" @@ -199,6 +202,7 @@ "returns" : "com.linkedin.usage.UsageQueryResult" }, { "name" : "queryRange", + "javaMethodName" : "queryRange", "parameters" : [ { "name" : "resource", "type" : "string" diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java b/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java index a61c6e53ab814..eb04382dda45c 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java @@ -8,8 +8,6 @@ import com.linkedin.restli.client.base.BatchGetEntityRequestBuilderBase; import com.linkedin.restli.common.ComplexResourceKey; import com.linkedin.restli.common.EmptyRecord; - -import javax.annotation.Nonnull; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -18,47 +16,52 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public final class BatchGetUtils { - private BatchGetUtils() { - // not called - } - - private static int batchSize = 25; + private BatchGetUtils() { + // not called + } - public static < - U extends Urn, - T extends RecordTemplate, - CRK extends ComplexResourceKey<K, EmptyRecord>, - RB extends BatchGetEntityRequestBuilderBase<CRK, T, RB>, - K extends RecordTemplate> Map<U, T> batchGet( - @Nonnull Set<U> urns, - Function<Void, BatchGetEntityRequestBuilderBase<CRK, T, RB>> requestBuilders, - Function<U, CRK> getKeyFromUrn, - Function<CRK, U> getUrnFromKey, - Client client - ) throws RemoteInvocationException { - AtomicInteger index = new AtomicInteger(0); + private static int batchSize = 25; - final Collection<List<U>> entityUrnBatches = urns.stream() - .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) - .values(); + public static < + U extends Urn, + T extends RecordTemplate, + CRK extends ComplexResourceKey<K, EmptyRecord>, + RB extends BatchGetEntityRequestBuilderBase<CRK, T, RB>, + K extends RecordTemplate> + Map<U, T> batchGet( + @Nonnull Set<U> urns, + Function<Void, BatchGetEntityRequestBuilderBase<CRK, T, RB>> requestBuilders, + Function<U, CRK> getKeyFromUrn, + Function<CRK, U> getUrnFromKey, + Client client) + throws RemoteInvocationException { + AtomicInteger index = new AtomicInteger(0); - final Map<U, T> response = new HashMap<>(); + final Collection<List<U>> entityUrnBatches = + urns.stream() + .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) + .values(); - for (List<U> urnsInBatch : entityUrnBatches) { - BatchGetEntityRequest<CRK, T> batchGetRequest = - requestBuilders.apply(null) - .ids(urnsInBatch.stream().map(getKeyFromUrn).collect(Collectors.toSet())) - .build(); - final Map<U, T> batchResponse = client.sendRequest(batchGetRequest).getResponseEntity().getResults() - .entrySet().stream().collect(Collectors.toMap( - entry -> getUrnFromKey.apply(entry.getKey()), - entry -> entry.getValue().getEntity()) - ); - response.putAll(batchResponse); - } + final Map<U, T> response = new HashMap<>(); - return response; + for (List<U> urnsInBatch : entityUrnBatches) { + BatchGetEntityRequest<CRK, T> batchGetRequest = + requestBuilders + .apply(null) + .ids(urnsInBatch.stream().map(getKeyFromUrn).collect(Collectors.toSet())) + .build(); + final Map<U, T> batchResponse = + client.sendRequest(batchGetRequest).getResponseEntity().getResults().entrySet().stream() + .collect( + Collectors.toMap( + entry -> getUrnFromKey.apply(entry.getKey()), + entry -> entry.getValue().getEntity())); + response.putAll(batchResponse); } + + return response; + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java index 1ba0e5c3d555a..4474fd5ce67ec 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java @@ -5,20 +5,17 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.r2.RemoteInvocationException; - -import java.util.Objects; -import java.util.Set; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - import com.linkedin.restli.client.AbstractRequestBuilder; import com.linkedin.restli.client.Client; import com.linkedin.restli.client.Request; import com.linkedin.restli.client.Response; +import java.util.Objects; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpHeaders; - @Slf4j public abstract class BaseClient implements AutoCloseable { @@ -26,7 +23,8 @@ public abstract class BaseClient implements AutoCloseable { protected final BackoffPolicy _backoffPolicy; protected final int _retryCount; - protected final static Set<String> NON_RETRYABLE = Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); + protected static final Set<String> NON_RETRYABLE = + Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); protected BaseClient(@Nonnull Client restliClient, BackoffPolicy backoffPolicy, int retryCount) { _client = Objects.requireNonNull(restliClient); @@ -34,16 +32,20 @@ protected BaseClient(@Nonnull Client restliClient, BackoffPolicy backoffPolicy, _retryCount = retryCount; } - protected <T> Response<T> sendClientRequest(final AbstractRequestBuilder<?, ?, ? extends Request<T>> requestBuilder) throws RemoteInvocationException { + protected <T> Response<T> sendClientRequest( + final AbstractRequestBuilder<?, ?, ? extends Request<T>> requestBuilder) + throws RemoteInvocationException { return sendClientRequest(requestBuilder, null); } /** - * TODO: Remove unused "actor" parameter. Actor is now implied by the systemClientId + systemClientSecret. + * TODO: Remove unused "actor" parameter. Actor is now implied by the systemClientId + + * systemClientSecret. */ protected <T> Response<T> sendClientRequest( final AbstractRequestBuilder<?, ?, ? extends Request<T>> requestBuilder, - @Nullable final Authentication authentication) throws RemoteInvocationException { + @Nullable final Authentication authentication) + throws RemoteInvocationException { if (authentication != null) { requestBuilder.addHeader(HttpHeaders.AUTHORIZATION, authentication.getCredentials()); } @@ -54,10 +56,15 @@ protected <T> Response<T> sendClientRequest( try { return _client.sendRequest(requestBuilder.build()).getResponse(); } catch (Throwable ex) { - MetricUtils.counter(BaseClient.class, "exception" + MetricUtils.DELIMITER + ex.getClass().getName().toLowerCase()).inc(); - - final boolean skipRetry = NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) - || (ex.getCause() != null && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); + MetricUtils.counter( + BaseClient.class, + "exception" + MetricUtils.DELIMITER + ex.getClass().getName().toLowerCase()) + .inc(); + + final boolean skipRetry = + NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) + || (ex.getCause() != null + && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); if (attemptCount == _retryCount || skipRetry) { throw ex; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java index 79d473d1b0090..56565819afc30 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java @@ -9,126 +9,138 @@ import com.github.benmanes.caffeine.cache.stats.CacheStats; import com.linkedin.metadata.config.cache.client.ClientCacheConfig; import com.linkedin.metadata.utils.metrics.MetricUtils; -import lombok.Builder; -import lombok.NonNull; -import lombok.extern.slf4j.Slf4j; -import org.checkerframework.checker.nullness.qual.Nullable; - import java.util.Map; import java.util.Set; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Function; +import lombok.Builder; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; +import org.checkerframework.checker.nullness.qual.Nullable; /** * Generic cache with common configuration for limited weight, per item expiry, and batch loading + * * @param <K> key * @param <V> value */ @Slf4j @Builder public class ClientCache<K, V, C extends ClientCacheConfig> { - @NonNull - protected final C config; - @NonNull - protected final LoadingCache<K, V> cache; - @NonNull - private final Function<Iterable<? extends K>, Map<K, V>> loadFunction; - @NonNull - private final Weigher<K, V> weigher; - @NonNull - private final BiFunction<C, K, Integer> ttlSecondsFunction; - - public @Nullable V get(@NonNull K key) { - return cache.get(key); - } + @NonNull protected final C config; + @NonNull protected final LoadingCache<K, V> cache; + @NonNull private final Function<Iterable<? extends K>, Map<K, V>> loadFunction; + @NonNull private final Weigher<K, V> weigher; + @NonNull private final BiFunction<C, K, Integer> ttlSecondsFunction; + + public @Nullable V get(@NonNull K key) { + return cache.get(key); + } + + public @NonNull Map<@NonNull K, @NonNull V> getAll(@NonNull Iterable<? extends @NonNull K> keys) { + return cache.getAll(keys); + } - public @NonNull Map<@NonNull K, @NonNull V> getAll(@NonNull Iterable<? extends @NonNull K> keys) { - return cache.getAll(keys); + public void refresh(@NonNull K key) { + cache.refresh(key); + } + + public static class ClientCacheBuilder<K, V, C extends ClientCacheConfig> { + + private ClientCacheBuilder<K, V, C> cache(LoadingCache<K, V> cache) { + return null; } - public void refresh(@NonNull K key) { - cache.refresh(key); + private ClientCache<K, V, C> build() { + return null; } - public static class ClientCacheBuilder<K, V, C extends ClientCacheConfig> { - - private ClientCacheBuilder<K, V, C> cache(LoadingCache<K, V> cache) { - return null; - } - private ClientCache<K, V, C> build() { - return null; - } - - public ClientCache<K, V, C> build(Class<?> metricClazz) { - // loads data from entity client - CacheLoader<K, V> loader = new CacheLoader<K, V>() { - @Override - public V load(@NonNull K key) { - return loadAll(Set.of(key)).get(key); - } - - @Override - @NonNull - public Map<K, V> loadAll(@NonNull Set<? extends K> keys) { - return loadFunction.apply(keys); - } - }; - - // build cache - Caffeine<K, V> caffeine = Caffeine.newBuilder() - .maximumWeight(config.getMaxBytes()) - // limit total size - .weigher(weigher) - .softValues() - // define per entity/aspect ttls - .expireAfter(new Expiry<K, V>() { - public long expireAfterCreate(@NonNull K key, @NonNull V aspect, long currentTime) { - int ttlSeconds = ttlSecondsFunction.apply(config, key); - if (ttlSeconds < 0) { - ttlSeconds = Integer.MAX_VALUE; - } - return TimeUnit.SECONDS.toNanos(ttlSeconds); - } - public long expireAfterUpdate(@NonNull K key, @NonNull V aspect, - long currentTime, long currentDuration) { - return currentDuration; - } - public long expireAfterRead(@NonNull K key, @NonNull V aspect, - long currentTime, long currentDuration) { - return currentDuration; - } - }); - - if (config.isStatsEnabled()) { - caffeine.recordStats(); + public ClientCache<K, V, C> build(Class<?> metricClazz) { + // loads data from entity client + CacheLoader<K, V> loader = + new CacheLoader<K, V>() { + @Override + public V load(@NonNull K key) { + return loadAll(Set.of(key)).get(key); } - LoadingCache<K, V> cache = caffeine.build(loader); - - if (config.isStatsEnabled()) { - ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1); - executor.scheduleAtFixedRate(() -> { - CacheStats cacheStats = cache.stats(); - - MetricUtils.gauge(metricClazz, "hitRate", () -> (Gauge<Double>) cacheStats::hitRate); - MetricUtils.gauge(metricClazz, "loadFailureRate", () -> - (Gauge<Double>) cacheStats::loadFailureRate); - MetricUtils.gauge(metricClazz, "evictionCount", () -> - (Gauge<Long>) cacheStats::evictionCount); - MetricUtils.gauge(metricClazz, "loadFailureCount", () -> - (Gauge<Long>) cacheStats::loadFailureCount); - MetricUtils.gauge(metricClazz, "averageLoadPenalty", () -> - (Gauge<Double>) cacheStats::averageLoadPenalty); - MetricUtils.gauge(metricClazz, "evictionWeight", () -> - (Gauge<Long>) cacheStats::evictionWeight); - - log.debug(metricClazz.getSimpleName() + ": " + cacheStats); - }, 0, config.getStatsIntervalSeconds(), TimeUnit.SECONDS); + @Override + @NonNull + public Map<K, V> loadAll(@NonNull Set<? extends K> keys) { + return loadFunction.apply(keys); } - - return new ClientCache<>(config, cache, loadFunction, weigher, ttlSecondsFunction); - } + }; + + // build cache + Caffeine<K, V> caffeine = + Caffeine.newBuilder() + .maximumWeight(config.getMaxBytes()) + // limit total size + .weigher(weigher) + .softValues() + // define per entity/aspect ttls + .expireAfter( + new Expiry<K, V>() { + public long expireAfterCreate( + @NonNull K key, @NonNull V aspect, long currentTime) { + int ttlSeconds = ttlSecondsFunction.apply(config, key); + if (ttlSeconds < 0) { + ttlSeconds = Integer.MAX_VALUE; + } + return TimeUnit.SECONDS.toNanos(ttlSeconds); + } + + public long expireAfterUpdate( + @NonNull K key, @NonNull V aspect, long currentTime, long currentDuration) { + return currentDuration; + } + + public long expireAfterRead( + @NonNull K key, @NonNull V aspect, long currentTime, long currentDuration) { + return currentDuration; + } + }); + + if (config.isStatsEnabled()) { + caffeine.recordStats(); + } + + LoadingCache<K, V> cache = caffeine.build(loader); + + if (config.isStatsEnabled()) { + ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1); + executor.scheduleAtFixedRate( + () -> { + CacheStats cacheStats = cache.stats(); + + MetricUtils.gauge(metricClazz, "hitRate", () -> (Gauge<Double>) cacheStats::hitRate); + MetricUtils.gauge( + metricClazz, + "loadFailureRate", + () -> (Gauge<Double>) cacheStats::loadFailureRate); + MetricUtils.gauge( + metricClazz, "evictionCount", () -> (Gauge<Long>) cacheStats::evictionCount); + MetricUtils.gauge( + metricClazz, + "loadFailureCount", + () -> (Gauge<Long>) cacheStats::loadFailureCount); + MetricUtils.gauge( + metricClazz, + "averageLoadPenalty", + () -> (Gauge<Double>) cacheStats::averageLoadPenalty); + MetricUtils.gauge( + metricClazz, "evictionWeight", () -> (Gauge<Long>) cacheStats::evictionWeight); + + log.debug(metricClazz.getSimpleName() + ": " + cacheStats); + }, + 0, + config.getStatsIntervalSeconds(), + TimeUnit.SECONDS); + } + + return new ClientCache<>(config, cache, loadFunction, weigher, ttlSecondsFunction); } + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 84d0ed6b9594d..7bc50a8f3dc7e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -45,7 +45,8 @@ public EntityResponse getV2( @Nonnull String entityName, @Nonnull final Urn urn, @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull @Deprecated @@ -57,18 +58,21 @@ public Map<Urn, EntityResponse> batchGetV2( @Nonnull String entityName, @Nonnull final Set<Urn> urns, @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull Map<Urn, EntityResponse> batchGetVersionedV2( @Nonnull String entityName, @Nonnull final Set<VersionedUrn> versionedUrns, @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull @Deprecated - public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) + public Map<Urn, Entity> batchGet( + @Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -81,9 +85,14 @@ public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Au * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nullable String field, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Gets browse snapshot of a given path @@ -94,8 +103,12 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nonnull Authentication authentication) + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -109,8 +122,13 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, - @Nullable Map<String, String> requestFilters, int start, int limit, @Nonnull Authentication authentication) + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map<String, String> requestFilters, + int start, + int limit, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -125,8 +143,14 @@ public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, * @throws RemoteInvocationException */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated @@ -134,11 +158,15 @@ public void update(@Nonnull final Entity entity, @Nonnull final Authentication a throws RemoteInvocationException; @Deprecated - public void updateWithSystemMetadata(@Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException; + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException; @Deprecated - public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) + public void batchUpdate( + @Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -153,15 +181,20 @@ public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Auth * @throws RemoteInvocationException */ @Nonnull - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map<String, String> requestFilters, int start, int count, @Nonnull Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException; /** * Filters for entities matching to a given query and filters * - * TODO: This no longer has any usages, can we deprecate/remove? + * <p>TODO: This no longer has any usages, can we deprecate/remove? * * @param requestFilters search filters * @param start start offset for search results @@ -170,8 +203,13 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, * @throws RemoteInvocationException */ @Nonnull - public ListResult list(@Nonnull String entity, @Nullable Map<String, String> requestFilters, int start, int count, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public ListResult list( + @Nonnull String entity, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Searches for datasets matching to a given query and filters @@ -186,9 +224,16 @@ public ListResult list(@Nonnull String entity, @Nullable Map<String, String> req * @throws RemoteInvocationException */ @Nonnull - public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - SortCriterion sortCriterion, int start, int count, @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) throws RemoteInvocationException; + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException; /** * Searches for entities matching to a given query and filters across multiple entity types @@ -203,9 +248,15 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nulla * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull Authentication authentication) + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -222,9 +273,16 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull Authentication authentication, List<String> facets) + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull Authentication authentication, + List<String> facets) throws RemoteInvocationException; /** @@ -240,8 +298,14 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, @Nullable SearchFlags searchFlags, + ScrollResult scrollAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) throws RemoteInvocationException; @@ -258,43 +322,57 @@ ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull Strin * @param start index to start the search from * @param count the number of search hits to return * @param searchFlags configuration flags for the search request - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** - * Gets a list of documents that match given search request that is related to - * the input entity + * Gets a list of documents that match given search request that is related to the input entity * - * @param sourceUrn Urn of the source entity - * @param direction Direction of the relationship - * @param entities list of entities to search (If empty, searches - * across all entities) - * @param input the search input text - * @param maxHops the max number of hops away to search for. If null, - * searches all hops. - * @param filter the request map with fields and values as filters - * to be applied to search hits - * @param sortCriterion {@link SortCriterion} to be applied to search - * results - * @param start index to start the search from - * @param count the number of search hits to return - * @param endTimeMillis end time to filter to + * @param sourceUrn Urn of the source entity + * @param direction Direction of the relationship + * @param entities list of entities to search (If empty, searches across all entities) + * @param input the search input text + * @param maxHops the max number of hops away to search for. If null, searches all hops. + * @param filter the request map with fields and values as filters to be applied to search hits + * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param start index to start the search from + * @param count the number of search hits to return + * @param endTimeMillis end time to filter to * @param startTimeMillis start time to filter from * @param searchFlags configuration flags for the search request - * @return a {@link SearchResult} that contains a list of matched documents and - * related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -309,16 +387,27 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll ID indicating offset * @param keepAlive string representation of time to keep point in time alive, ex: 5m - * @param endTimeMillis end time to filter to + * @param endTimeMillis end time to filter to * @param startTimeMillis start time to filter from * @param count the number of search hits to return - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable final Long startTimeMillis, @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, + LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException; @@ -333,28 +422,29 @@ LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull Lineage public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; - public void setWritable(boolean canWrite, @Nonnull Authentication authentication) throws RemoteInvocationException; + public void setWritable(boolean canWrite, @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nonnull - public Map<String, Long> batchGetTotalEntityCount(@Nonnull List<String> entityName, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public Map<String, Long> batchGetTotalEntityCount( + @Nonnull List<String> entityName, @Nonnull Authentication authentication) + throws RemoteInvocationException; - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException; + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException; - /** - * Hard delete an entity with a particular urn. - */ + /** Hard delete an entity with a particular urn. */ public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; - /** - * Delete all references to an entity with a particular urn. - */ - public void deleteEntityReferences(@Nonnull final Urn urn, @Nonnull final Authentication authentication) + /** Delete all references to an entity with a particular urn. */ + public void deleteEntityReferences( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -369,68 +459,96 @@ public void deleteEntityReferences(@Nonnull final Urn urn, @Nonnull final Authen * @throws RemoteInvocationException */ @Nonnull - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull Authentication authentication) throws RemoteInvocationException; + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Checks whether an entity with a given urn exists * * @param urn the urn of the entity - * @return true if an entity exists, i.e. there are > 0 aspects in the DB for the entity. This means that the entity - * has not been hard-deleted. + * @return true if an entity exists, i.e. there are > 0 aspects in the DB for the entity. This + * means that the entity has not been hard-deleted. * @throws RemoteInvocationException */ @Nonnull - public boolean exists(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; + public boolean exists(@Nonnull Urn urn, @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nullable @Deprecated - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nullable @Deprecated - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; - default List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nonnull Authentication authentication) + default List<EnvelopedAspect> getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nonnull Authentication authentication) throws RemoteInvocationException { return getTimeseriesAspectValues( - urn, - entity, - aspect, - startTimeMillis, - endTimeMillis, - limit, - filter, - null, - authentication); + urn, entity, aspect, startTimeMillis, endTimeMillis, limit, filter, null, authentication); } - public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull Authentication authentication) + public List<EnvelopedAspect> getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated - default String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + default String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { return ingestProposal(metadataChangeProposal, authentication, false); } - String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException; + String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException; @Deprecated - default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataChangeProposal, + default String wrappedIngestProposal( + @Nonnull MetadataChangeProposal metadataChangeProposal, @Nonnull final Authentication authentication) { return wrappedIngestProposal(metadataChangeProposal, authentication, false); } - default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) { + default String wrappedIngestProposal( + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) { try { return ingestProposal(metadataChangeProposal, authentication, async); } catch (RemoteInvocationException e) { @@ -439,13 +557,18 @@ default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataCha } @Deprecated - default List<String> batchIngestProposals(@Nonnull final Collection<MetadataChangeProposal> metadataChangeProposals, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + default List<String> batchIngestProposals( + @Nonnull final Collection<MetadataChangeProposal> metadataChangeProposals, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { return batchIngestProposals(metadataChangeProposals, authentication, false); } - default List<String> batchIngestProposals(@Nonnull final Collection<MetadataChangeProposal> metadataChangeProposals, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException { + default List<String> batchIngestProposals( + @Nonnull final Collection<MetadataChangeProposal> metadataChangeProposals, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { return metadataChangeProposals.stream() .map(proposal -> wrappedIngestProposal(proposal, authentication, async)) .collect(Collectors.toList()); @@ -453,16 +576,29 @@ default List<String> batchIngestProposals(@Nonnull final Collection<MetadataChan @Nonnull @Deprecated - public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class<T> aspectClass, @Nonnull Authentication authentication) + public <T extends RecordTemplate> Optional<T> getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class<T> aspectClass, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, - @Nonnull Authentication authentication) throws Exception; + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull Authentication authentication) + throws Exception; - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) throws Exception; + public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + throws Exception; } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java index 8e103cff283ea..453eecab7b446 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java @@ -1,5 +1,7 @@ package com.linkedin.entity.client; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.github.benmanes.caffeine.cache.LoadingCache; import com.github.benmanes.caffeine.cache.Weigher; import com.linkedin.common.client.ClientCache; @@ -9,11 +11,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.util.Pair; -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - -import javax.annotation.Nonnull; import java.util.Collection; import java.util.Map; import java.util.Optional; @@ -22,116 +19,144 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; @Builder public class EntityClientCache { - @NonNull - private EntityClientCacheConfig config; - @NonNull - private final ClientCache<Key, EnvelopedAspect, EntityClientCacheConfig> cache; - @NonNull - private BiFunction<Set<Urn>, Set<String>, Map<Urn, EntityResponse>> loadFunction; - - public EntityResponse getV2(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { - return batchGetV2(Set.of(urn), aspectNames).get(urn); - } - - public Map<Urn, EntityResponse> batchGetV2(@Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { - final Map<Urn, EntityResponse> response; - - if (config.isEnabled()) { - Set<Key> keys = urns.stream() - .flatMap(urn -> aspectNames.stream().map(a -> Key.builder().urn(urn).aspectName(a).build())) - .collect(Collectors.toSet()); - Map<Key, EnvelopedAspect> envelopedAspects = cache.getAll(keys); - - Set<EntityResponse> responses = envelopedAspects.entrySet().stream() - .map(entry -> Pair.of(entry.getKey().getUrn(), entry.getValue())) - .collect(Collectors.groupingBy(Pair::getKey, Collectors.mapping(Pair::getValue, Collectors.toSet()))) - .entrySet().stream().map(e -> toEntityResponse(e.getKey(), e.getValue())) - .collect(Collectors.toSet()); - - response = responses.stream().collect(Collectors.toMap(EntityResponse::getUrn, Function.identity())); - } else { - response = loadFunction.apply(urns, aspectNames); - } - - return response; + @NonNull private EntityClientCacheConfig config; + @NonNull private final ClientCache<Key, EnvelopedAspect, EntityClientCacheConfig> cache; + @NonNull private BiFunction<Set<Urn>, Set<String>, Map<Urn, EntityResponse>> loadFunction; + + public EntityResponse getV2(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { + return batchGetV2(Set.of(urn), aspectNames).get(urn); + } + + public Map<Urn, EntityResponse> batchGetV2( + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + final Map<Urn, EntityResponse> response; + + if (config.isEnabled()) { + Set<Key> keys = + urns.stream() + .flatMap( + urn -> + aspectNames.stream().map(a -> Key.builder().urn(urn).aspectName(a).build())) + .collect(Collectors.toSet()); + Map<Key, EnvelopedAspect> envelopedAspects = cache.getAll(keys); + + Set<EntityResponse> responses = + envelopedAspects.entrySet().stream() + .map(entry -> Pair.of(entry.getKey().getUrn(), entry.getValue())) + .collect( + Collectors.groupingBy( + Pair::getKey, Collectors.mapping(Pair::getValue, Collectors.toSet()))) + .entrySet() + .stream() + .map(e -> toEntityResponse(e.getKey(), e.getValue())) + .collect(Collectors.toSet()); + + response = + responses.stream().collect(Collectors.toMap(EntityResponse::getUrn, Function.identity())); + } else { + response = loadFunction.apply(urns, aspectNames); } - private static EntityResponse toEntityResponse(Urn urn, Collection<EnvelopedAspect> envelopedAspects) { - final EntityResponse response = new EntityResponse(); - response.setUrn(urn); - response.setEntityName(urnToEntityName(urn)); - response.setAspects(new EnvelopedAspectMap( - envelopedAspects.stream() - .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)) - )); - return response; + return response; + } + + private static EntityResponse toEntityResponse( + Urn urn, Collection<EnvelopedAspect> envelopedAspects) { + final EntityResponse response = new EntityResponse(); + response.setUrn(urn); + response.setEntityName(urnToEntityName(urn)); + response.setAspects( + new EnvelopedAspectMap( + envelopedAspects.stream() + .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)))); + return response; + } + + public static class EntityClientCacheBuilder { + + private EntityClientCacheBuilder cache(LoadingCache<Key, EnvelopedAspect> cache) { + return this; } - public static class EntityClientCacheBuilder { - - private EntityClientCacheBuilder cache(LoadingCache<Key, EnvelopedAspect> cache) { - return this; - } - - public EntityClientCache build(Class<?> metricClazz) { - // estimate size - Weigher<Key, EnvelopedAspect> weighByEstimatedSize = (key, value) -> - value.getValue().data().toString().getBytes().length; - - // batch loads data from entity client (restli or java) - Function<Iterable<? extends Key>, Map<Key, EnvelopedAspect>> loader = (Iterable<? extends Key> keys) -> { - Map<String, Set<Key>> keysByEntity = StreamSupport.stream(keys.spliterator(), true) - .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())); - - Map<Key, EnvelopedAspect> results = keysByEntity.entrySet().stream() - .flatMap(entry -> { - Set<Urn> urns = entry.getValue().stream() - .map(Key::getUrn) - .collect(Collectors.toSet()); - Set<String> aspects = entry.getValue().stream() - .map(Key::getAspectName) - .collect(Collectors.toSet()); - return loadFunction.apply(urns, aspects).entrySet().stream(); + public EntityClientCache build(Class<?> metricClazz) { + // estimate size + Weigher<Key, EnvelopedAspect> weighByEstimatedSize = + (key, value) -> value.getValue().data().toString().getBytes().length; + + // batch loads data from entity client (restli or java) + Function<Iterable<? extends Key>, Map<Key, EnvelopedAspect>> loader = + (Iterable<? extends Key> keys) -> { + Map<String, Set<Key>> keysByEntity = + StreamSupport.stream(keys.spliterator(), true) + .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())); + + Map<Key, EnvelopedAspect> results = + keysByEntity.entrySet().stream() + .flatMap( + entry -> { + Set<Urn> urns = + entry.getValue().stream() + .map(Key::getUrn) + .collect(Collectors.toSet()); + Set<String> aspects = + entry.getValue().stream() + .map(Key::getAspectName) + .collect(Collectors.toSet()); + return loadFunction.apply(urns, aspects).entrySet().stream(); }) - .flatMap(resp -> resp.getValue().getAspects().values().stream() - .map(envAspect -> { - Key key = Key.builder().urn(resp.getKey()).aspectName(envAspect.getName()).build(); - return Map.entry(key, envAspect); - })).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - return results; - }; - - // ideally the cache time comes from caching headers from service, but configuration driven for now - BiFunction<EntityClientCacheConfig, Key, Integer> ttlSeconds = (config, key) -> - Optional.ofNullable(config.getEntityAspectTTLSeconds()).orElse(Map.of()) - .getOrDefault(key.getEntityName(), Map.of()) - .getOrDefault(key.getAspectName(), config.getDefaultTTLSeconds()); - - cache = ClientCache.<Key, EnvelopedAspect, EntityClientCacheConfig>builder() - .weigher(weighByEstimatedSize) - .config(config) - .loadFunction(loader) - .ttlSecondsFunction(ttlSeconds) - .build(metricClazz); - - return new EntityClientCache(config, cache, loadFunction); - } + .flatMap( + resp -> + resp.getValue().getAspects().values().stream() + .map( + envAspect -> { + Key key = + Key.builder() + .urn(resp.getKey()) + .aspectName(envAspect.getName()) + .build(); + return Map.entry(key, envAspect); + })) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return results; + }; + + // ideally the cache time comes from caching headers from service, but configuration driven + // for now + BiFunction<EntityClientCacheConfig, Key, Integer> ttlSeconds = + (config, key) -> + Optional.ofNullable(config.getEntityAspectTTLSeconds()) + .orElse(Map.of()) + .getOrDefault(key.getEntityName(), Map.of()) + .getOrDefault(key.getAspectName(), config.getDefaultTTLSeconds()); + + cache = + ClientCache.<Key, EnvelopedAspect, EntityClientCacheConfig>builder() + .weigher(weighByEstimatedSize) + .config(config) + .loadFunction(loader) + .ttlSecondsFunction(ttlSeconds) + .build(metricClazz); + + return new EntityClientCache(config, cache, loadFunction); } + } - @Data - @Builder - protected static class Key { - private final Urn urn; - private final String aspectName; + @Data + @Builder + protected static class Key { + private final Urn urn; + private final String aspectName; - public String getEntityName() { - return urn.getEntityType(); - } + public String getEntityName() { + return urn.getEntityType(); } + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index 2716e27518fcc..c854cb9dd279e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -91,74 +91,95 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; - @Slf4j public class RestliEntityClient extends BaseClient implements EntityClient { - private static final EntitiesRequestBuilders ENTITIES_REQUEST_BUILDERS = new EntitiesRequestBuilders(); - private static final EntitiesV2RequestBuilders ENTITIES_V2_REQUEST_BUILDERS = new EntitiesV2RequestBuilders(); + private static final EntitiesRequestBuilders ENTITIES_REQUEST_BUILDERS = + new EntitiesRequestBuilders(); + private static final EntitiesV2RequestBuilders ENTITIES_V2_REQUEST_BUILDERS = + new EntitiesV2RequestBuilders(); private static final EntitiesVersionedV2RequestBuilders ENTITIES_VERSIONED_V2_REQUEST_BUILDERS = new EntitiesVersionedV2RequestBuilders(); - private static final AspectsRequestBuilders ASPECTS_REQUEST_BUILDERS = new AspectsRequestBuilders(); - private static final PlatformRequestBuilders PLATFORM_REQUEST_BUILDERS = new PlatformRequestBuilders(); + private static final AspectsRequestBuilders ASPECTS_REQUEST_BUILDERS = + new AspectsRequestBuilders(); + private static final PlatformRequestBuilders PLATFORM_REQUEST_BUILDERS = + new PlatformRequestBuilders(); private static final RunsRequestBuilders RUNS_REQUEST_BUILDERS = new RunsRequestBuilders(); - public RestliEntityClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount) { + public RestliEntityClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount) { super(restliClient, backoffPolicy, retryCount); } @Nullable - public EntityResponse getV2(@Nonnull String entityName, @Nonnull final Urn urn, - @Nullable final Set<String> aspectNames, @Nonnull final Authentication authentication) + public EntityResponse getV2( + @Nonnull String entityName, + @Nonnull final Urn urn, + @Nullable final Set<String> aspectNames, + @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final EntitiesV2GetRequestBuilder requestBuilder = ENTITIES_V2_REQUEST_BUILDERS.get() - .aspectsParam(aspectNames) - .id(urn.toString()); + final EntitiesV2GetRequestBuilder requestBuilder = + ENTITIES_V2_REQUEST_BUILDERS.get().aspectsParam(aspectNames).id(urn.toString()); return sendClientRequest(requestBuilder, authentication).getEntity(); } @Nonnull public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - return sendClientRequest(ENTITIES_REQUEST_BUILDERS.get().id(urn.toString()), authentication).getEntity(); + return sendClientRequest(ENTITIES_REQUEST_BUILDERS.get().id(urn.toString()), authentication) + .getEntity(); } /** * Legacy! Use {#batchGetV2} instead, as this method leverages Snapshot models, and will not work * for fetching entities + aspects added by Entity Registry configuration. * - * Batch get a set of {@link Entity} objects by urn. + * <p>Batch get a set of {@link Entity} objects by urn. * * @param urns the urns of the entities to batch get * @param authentication the authentication to include in the request to the Metadata Service * @throws RemoteInvocationException */ @Nonnull - public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) + public Map<Urn, Entity> batchGet( + @Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) throws RemoteInvocationException { final Integer batchSize = 25; final AtomicInteger index = new AtomicInteger(0); final Collection<List<Urn>> entityUrnBatches = - urns.stream().collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)).values(); + urns.stream() + .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) + .values(); final Map<Urn, Entity> response = new HashMap<>(); for (List<Urn> urnsInBatch : entityUrnBatches) { EntitiesBatchGetRequestBuilder batchGetRequestBuilder = - ENTITIES_REQUEST_BUILDERS.batchGet().ids(urnsInBatch.stream().map(Urn::toString).collect(Collectors.toSet())); - final Map<Urn, Entity> batchResponse = sendClientRequest(batchGetRequestBuilder, authentication).getEntity() - .getResults() - .entrySet() - .stream() - .collect(Collectors.toMap(entry -> { - try { - return Urn.createFromString(entry.getKey()); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create Urn from key string %s", entry.getKey())); - } - }, entry -> entry.getValue().getEntity())); + ENTITIES_REQUEST_BUILDERS + .batchGet() + .ids(urnsInBatch.stream().map(Urn::toString).collect(Collectors.toSet())); + final Map<Urn, Entity> batchResponse = + sendClientRequest(batchGetRequestBuilder, authentication) + .getEntity() + .getResults() + .entrySet() + .stream() + .collect( + Collectors.toMap( + entry -> { + try { + return Urn.createFromString(entry.getKey()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to create Urn from key string %s", entry.getKey())); + } + }, + entry -> entry.getValue().getEntity())); response.putAll(batchResponse); } return response; @@ -174,25 +195,36 @@ public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Au * @throws RemoteInvocationException */ @Nonnull - public Map<Urn, EntityResponse> batchGetV2(@Nonnull String entityName, @Nonnull final Set<Urn> urns, - @Nullable final Set<String> aspectNames, @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { + public Map<Urn, EntityResponse> batchGetV2( + @Nonnull String entityName, + @Nonnull final Set<Urn> urns, + @Nullable final Set<String> aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { - final EntitiesV2BatchGetRequestBuilder requestBuilder = ENTITIES_V2_REQUEST_BUILDERS.batchGet() - .aspectsParam(aspectNames) - .ids(urns.stream().map(Urn::toString).collect(Collectors.toList())); + final EntitiesV2BatchGetRequestBuilder requestBuilder = + ENTITIES_V2_REQUEST_BUILDERS + .batchGet() + .aspectsParam(aspectNames) + .ids(urns.stream().map(Urn::toString).collect(Collectors.toList())); - return sendClientRequest(requestBuilder, authentication).getEntity() + return sendClientRequest(requestBuilder, authentication) + .getEntity() .getResults() .entrySet() .stream() - .collect(Collectors.toMap(entry -> { - try { - return Urn.createFromString(entry.getKey()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to bind urn string with value %s into urn", entry.getKey())); - } - }, entry -> entry.getValue().getEntity())); + .collect( + Collectors.toMap( + entry -> { + try { + return Urn.createFromString(entry.getKey()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to bind urn string with value %s into urn", entry.getKey())); + } + }, + entry -> entry.getValue().getEntity())); } /** @@ -209,21 +241,31 @@ public Map<Urn, EntityResponse> batchGetVersionedV2( @Nonnull String entityName, @Nonnull final Set<VersionedUrn> versionedUrns, @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - - final EntitiesVersionedV2BatchGetRequestBuilder requestBuilder = ENTITIES_VERSIONED_V2_REQUEST_BUILDERS.batchGet() - .aspectsParam(aspectNames) - .entityTypeParam(entityName) - .ids(versionedUrns.stream() - .map(versionedUrn -> com.linkedin.common.urn.VersionedUrn.of(versionedUrn.getUrn().toString(), versionedUrn.getVersionStamp())) - .collect(Collectors.toSet())); + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { - return sendClientRequest(requestBuilder, authentication).getEntity() + final EntitiesVersionedV2BatchGetRequestBuilder requestBuilder = + ENTITIES_VERSIONED_V2_REQUEST_BUILDERS + .batchGet() + .aspectsParam(aspectNames) + .entityTypeParam(entityName) + .ids( + versionedUrns.stream() + .map( + versionedUrn -> + com.linkedin.common.urn.VersionedUrn.of( + versionedUrn.getUrn().toString(), versionedUrn.getVersionStamp())) + .collect(Collectors.toSet())); + + return sendClientRequest(requestBuilder, authentication) + .getEntity() .getResults() .entrySet() .stream() - .collect(Collectors.toMap(entry -> - UrnUtils.getUrn(entry.getKey().getUrn()), entry -> entry.getValue().getEntity())); + .collect( + Collectors.toMap( + entry -> UrnUtils.getUrn(entry.getKey().getUrn()), + entry -> entry.getValue().getEntity())); } /** @@ -238,15 +280,22 @@ public Map<Urn, EntityResponse> batchGetVersionedV2( * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nullable String field, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoAutocompleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionAutocomplete() - .entityParam(entityType) - .queryParam(query) - .fieldParam(field) - .filterParam(filterOrDefaultEmptyFilter(requestFilters)) - .limitParam(limit); + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoAutocompleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionAutocomplete() + .entityParam(entityType) + .queryParam(query) + .fieldParam(field) + .filterParam(filterOrDefaultEmptyFilter(requestFilters)) + .limitParam(limit); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -260,14 +309,20 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nonnull final Authentication authentication) + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoAutocompleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionAutocomplete() - .entityParam(entityType) - .queryParam(query) - .filterParam(filterOrDefaultEmptyFilter(requestFilters)) - .limitParam(limit); + EntitiesDoAutocompleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionAutocomplete() + .entityParam(entityType) + .queryParam(query) + .filterParam(filterOrDefaultEmptyFilter(requestFilters)) + .limitParam(limit); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -282,14 +337,21 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, - @Nullable Map<String, String> requestFilters, int start, int limit, @Nonnull final Authentication authentication) + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map<String, String> requestFilters, + int start, + int limit, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoBrowseRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionBrowse() - .pathParam(path) - .entityParam(entityType) - .startParam(start) - .limitParam(limit); + EntitiesDoBrowseRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionBrowse() + .pathParam(path) + .entityParam(entityType) + .startParam(start) + .limitParam(limit); if (requestFilters != null) { requestBuilder.filterParam(newFilter(requestFilters)); } @@ -308,31 +370,45 @@ public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, * @throws RemoteInvocationException */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) { throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoIngestRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity); + EntitiesDoIngestRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity); sendClientRequest(requestBuilder, authentication); } - public void updateWithSystemMetadata(@Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { if (systemMetadata == null) { update(entity, authentication); return; } EntitiesDoIngestRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity).systemMetadataParam(systemMetadata); + ENTITIES_REQUEST_BUILDERS + .actionIngest() + .entityParam(entity) + .systemMetadataParam(systemMetadata); sendClientRequest(requestBuilder, authentication); } - public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) + public void batchUpdate( + @Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) throws RemoteInvocationException { EntitiesDoBatchIngestRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionBatchIngest().entitiesParam(new EntityArray(entities)); @@ -353,18 +429,25 @@ public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Auth */ @Nonnull @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map<String, String> requestFilters, int start, int count, @Nonnull final Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull final Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException { - final EntitiesDoSearchRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionSearch() - .entityParam(entity) - .inputParam(input) - .filterParam(newFilter(requestFilters)) - .startParam(start) - .fulltextParam(searchFlags != null ? searchFlags.isFulltext() : null) - .countParam(count); + final EntitiesDoSearchRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionSearch() + .entityParam(entity) + .inputParam(input) + .filterParam(newFilter(requestFilters)) + .startParam(start) + .fulltextParam(searchFlags != null ? searchFlags.isFulltext() : null) + .countParam(count); if (searchFlags != null) { requestBuilder.searchFlagsParam(searchFlags); } @@ -382,13 +465,20 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, * @throws RemoteInvocationException */ @Nonnull - public ListResult list(@Nonnull String entity, @Nullable Map<String, String> requestFilters, int start, int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - final EntitiesDoListRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionList() - .entityParam(entity) - .filterParam(newFilter(requestFilters)) - .startParam(start) - .countParam(count); + public ListResult list( + @Nonnull String entity, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + final EntitiesDoListRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionList() + .entityParam(entity) + .filterParam(newFilter(requestFilters)) + .startParam(start) + .countParam(count); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -406,16 +496,24 @@ public ListResult list(@Nonnull String entity, @Nullable Map<String, String> req */ @Nonnull @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - SortCriterion sortCriterion, int start, int count, @Nonnull final Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException { - final EntitiesDoSearchRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionSearch() - .entityParam(entity) - .inputParam(input) - .startParam(start) - .countParam(count); + final EntitiesDoSearchRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionSearch() + .entityParam(entity) + .inputParam(input) + .startParam(start) + .countParam(count); if (filter != null) { requestBuilder.filterParam(filter); @@ -434,11 +532,18 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nulla } @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull final Authentication authentication) + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - return searchAcrossEntities(entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); + return searchAcrossEntities( + entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); } /** @@ -454,13 +559,24 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull final Authentication authentication, @Nullable List<String> facets) + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication, + @Nullable List<String> facets) throws RemoteInvocationException { final EntitiesDoSearchAcrossEntitiesRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossEntities().inputParam(input).startParam(start).countParam(count); + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossEntities() + .inputParam(input) + .startParam(start) + .countParam(count); if (entities != null) { requestBuilder.entitiesParam(new StringArray(entities)); @@ -481,9 +597,15 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul @Nonnull @Override - public ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, - @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) + public ScrollResult scrollAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull Authentication authentication) throws RemoteInvocationException { final EntitiesDoScrollAcrossEntitiesRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionScrollAcrossEntities().inputParam(input).countParam(count); @@ -509,14 +631,23 @@ public ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnul @Nonnull @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoSearchAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -538,15 +669,25 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull @Nonnull @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoSearchAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -572,16 +713,26 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull return sendClientRequest(requestBuilder, authentication).getEntity(); } - @Override - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable final Long startTimeMillis, @Nullable final Long endTimeMillis, @Nullable final SearchFlags searchFlags, + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable final SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoScrollAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionScrollAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionScrollAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -633,51 +784,66 @@ public void setWritable(boolean canWrite, @Nonnull final Authentication authenti } @Nonnull - public Map<String, Long> batchGetTotalEntityCount(@Nonnull List<String> entityName, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public Map<String, Long> batchGetTotalEntityCount( + @Nonnull List<String> entityName, @Nonnull final Authentication authentication) + throws RemoteInvocationException { EntitiesDoBatchGetTotalEntityCountRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionBatchGetTotalEntityCount().entitiesParam(new StringArray(entityName)); + ENTITIES_REQUEST_BUILDERS + .actionBatchGetTotalEntityCount() + .entitiesParam(new StringArray(entityName)); return sendClientRequest(requestBuilder, authentication).getEntity(); } - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { EntitiesDoListUrnsRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionListUrns().entityParam(entityName).startParam(start).countParam(count); + ENTITIES_REQUEST_BUILDERS + .actionListUrns() + .entityParam(entityName) + .startParam(start) + .countParam(count); return sendClientRequest(requestBuilder, authentication).getEntity(); } - /** - * Hard delete an entity with a particular urn. - */ + /** Hard delete an entity with a particular urn. */ public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoDeleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionDelete().urnParam(urn.toString()); + EntitiesDoDeleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionDelete().urnParam(urn.toString()); sendClientRequest(requestBuilder, authentication); } - /** - * Delete all references to a particular entity. - */ + /** Delete all references to a particular entity. */ @Override public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException { - EntitiesDoDeleteReferencesRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionDeleteReferences().urnParam(urn.toString()); + EntitiesDoDeleteReferencesRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionDeleteReferences().urnParam(urn.toString()); sendClientRequest(requestBuilder, authentication); } @Nonnull @Override - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoFilterRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionFilter() - .entityParam(entity) - .filterParam(filter) - .startParam(start) - .countParam(count); + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoFilterRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionFilter() + .entityParam(entity) + .filterParam(filter) + .startParam(start) + .countParam(count); if (sortCriterion != null) { requestBuilder.sortParam(sortCriterion); } @@ -686,9 +852,10 @@ public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Null @Nonnull @Override - public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoExistsRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionExists() - .urnParam(urn.toString()); + public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoExistsRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionExists().urnParam(urn.toString()); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -700,8 +867,12 @@ public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentica * @throws RemoteInvocationException on remote request error. */ @Nonnull - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = ASPECTS_REQUEST_BUILDERS.get().id(urn).aspectParam(aspect).versionParam(version); @@ -717,8 +888,12 @@ public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @N * @throws RemoteInvocationException on remote request error. */ @Nullable - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = ASPECTS_REQUEST_BUILDERS.get().id(urn).aspectParam(aspect).versionParam(version); @@ -747,13 +922,21 @@ public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspe * @throws RemoteInvocationException on remote request error. */ @Nonnull - public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull final Authentication authentication) + public List<EnvelopedAspect> getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull final Authentication authentication) throws RemoteInvocationException { AspectsDoGetTimeseriesAspectValuesRequestBuilder requestBuilder = - ASPECTS_REQUEST_BUILDERS.actionGetTimeseriesAspectValues() + ASPECTS_REQUEST_BUILDERS + .actionGetTimeseriesAspectValues() .urnParam(urn) .entityParam(entity) .aspectParam(aspect); @@ -783,19 +966,29 @@ public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Non /** * Ingest a MetadataChangeProposal event. + * * @return */ @Override - public String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, - final boolean async) throws RemoteInvocationException { + public String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { final AspectsDoIngestProposalRequestBuilder requestBuilder = - ASPECTS_REQUEST_BUILDERS.actionIngestProposal().proposalParam(metadataChangeProposal).asyncParam(String.valueOf(async)); + ASPECTS_REQUEST_BUILDERS + .actionIngestProposal() + .proposalParam(metadataChangeProposal) + .asyncParam(String.valueOf(async)); return sendClientRequest(requestBuilder, authentication).getEntity(); } - public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class<T> aspectClass, @Nonnull final Authentication authentication) + public <T extends RecordTemplate> Optional<T> getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class<T> aspectClass, + @Nonnull final Authentication authentication) throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = @@ -825,18 +1018,24 @@ public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String @SneakyThrows @Override - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException { + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException { throw new MethodNotSupportedException(); } @Override - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, @Nonnull final Authentication authentication) + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull final Authentication authentication) throws Exception { final PlatformDoProducePlatformEventRequestBuilder requestBuilder = - PLATFORM_REQUEST_BUILDERS.actionProducePlatformEvent() - .nameParam(name) - .eventParam(event); + PLATFORM_REQUEST_BUILDERS.actionProducePlatformEvent().nameParam(name).eventParam(event); if (key != null) { requestBuilder.keyParam(key); } @@ -846,28 +1045,34 @@ public void producePlatformEvent(@Nonnull String name, @Nullable String key, @No @Override public void rollbackIngestion(@Nonnull String runId, @Nonnull final Authentication authentication) throws Exception { - final RunsDoRollbackRequestBuilder requestBuilder = RUNS_REQUEST_BUILDERS.actionRollback().runIdParam(runId).dryRunParam(false); + final RunsDoRollbackRequestBuilder requestBuilder = + RUNS_REQUEST_BUILDERS.actionRollback().runIdParam(runId).dryRunParam(false); sendClientRequest(requestBuilder, authentication); } - // TODO: Refactor QueryUtils inside of metadata-io to extract these methods into a single shared library location. - // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL condition (default). + // TODO: Refactor QueryUtils inside of metadata-io to extract these methods into a single shared + // library location. + // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL + // condition (default). @Nonnull public static Filter newFilter(@Nullable Map<String, String> params) { if (params == null) { return new Filter().setOr(new ConjunctiveCriterionArray()); } - CriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue(), Condition.EQUAL)) - .collect(Collectors.toCollection(CriterionArray::new)); - return new Filter().setOr( - new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); + CriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue(), Condition.EQUAL)) + .collect(Collectors.toCollection(CriterionArray::new)); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); } @Nonnull - public static Criterion newCriterion(@Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { + public static Criterion newCriterion( + @Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { return new Criterion().setField(field).setValue(value).setCondition(condition); } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java index 94067abd0cf65..babb290655d3d 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java @@ -7,85 +7,95 @@ import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import com.linkedin.r2.RemoteInvocationException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.Map; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -/** - * Adds entity/aspect cache and assumes system authentication - */ +/** Adds entity/aspect cache and assumes system authentication */ public interface SystemEntityClient extends EntityClient { - EntityClientCache getEntityClientCache(); - Authentication getSystemAuthentication(); + EntityClientCache getEntityClientCache(); + + Authentication getSystemAuthentication(); - /** - * Builds the cache - * @param systemAuthentication system authentication - * @param cacheConfig cache configuration - * @return the cache - */ - default EntityClientCache buildEntityClientCache(Class<?> metricClazz, Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { - return EntityClientCache.builder() - .config(cacheConfig) - .loadFunction((Set<Urn> urns, Set<String> aspectNames) -> { - try { - String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); + /** + * Builds the cache + * + * @param systemAuthentication system authentication + * @param cacheConfig cache configuration + * @return the cache + */ + default EntityClientCache buildEntityClientCache( + Class<?> metricClazz, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + return EntityClientCache.builder() + .config(cacheConfig) + .loadFunction( + (Set<Urn> urns, Set<String> aspectNames) -> { + try { + String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); - if (urns.stream().anyMatch(urn -> !urn.getEntityType().equals(entityName))) { - throw new IllegalArgumentException("Urns must be of the same entity type. RestliEntityClient API limitation."); - } + if (urns.stream().anyMatch(urn -> !urn.getEntityType().equals(entityName))) { + throw new IllegalArgumentException( + "Urns must be of the same entity type. RestliEntityClient API limitation."); + } - return batchGetV2(entityName, urns, aspectNames, systemAuthentication); - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException(e); - } - }).build(metricClazz); - } + return batchGetV2(entityName, urns, aspectNames, systemAuthentication); + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .build(metricClazz); + } - /** - * Get an entity by urn with the given aspects - * @param urn the id of the entity - * @param aspectNames aspects of the entity - * @return response object - * @throws RemoteInvocationException - * @throws URISyntaxException - */ - @Nullable - default EntityResponse getV2(@Nonnull Urn urn, @Nonnull Set<String> aspectNames) - throws RemoteInvocationException, URISyntaxException { - return getEntityClientCache().getV2(urn, aspectNames); - } + /** + * Get an entity by urn with the given aspects + * + * @param urn the id of the entity + * @param aspectNames aspects of the entity + * @return response object + * @throws RemoteInvocationException + * @throws URISyntaxException + */ + @Nullable + default EntityResponse getV2(@Nonnull Urn urn, @Nonnull Set<String> aspectNames) + throws RemoteInvocationException, URISyntaxException { + return getEntityClientCache().getV2(urn, aspectNames); + } - /** - * Batch get a set of aspects for a single entity type, multiple ids with the given aspects. - * - * @param urns the urns of the entities to batch get - * @param aspectNames the aspect names to batch get - * @throws RemoteInvocationException - */ - @Nonnull - default Map<Urn, EntityResponse> batchGetV2(@Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) - throws RemoteInvocationException, URISyntaxException { - return getEntityClientCache().batchGetV2(urns, aspectNames); - } + /** + * Batch get a set of aspects for a single entity type, multiple ids with the given aspects. + * + * @param urns the urns of the entities to batch get + * @param aspectNames the aspect names to batch get + * @throws RemoteInvocationException + */ + @Nonnull + default Map<Urn, EntityResponse> batchGetV2( + @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) + throws RemoteInvocationException, URISyntaxException { + return getEntityClientCache().batchGetV2(urns, aspectNames); + } - default void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) throws Exception { - producePlatformEvent(name, key, event, getSystemAuthentication()); - } + default void producePlatformEvent( + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) throws Exception { + producePlatformEvent(name, key, event, getSystemAuthentication()); + } - default boolean exists(@Nonnull Urn urn) throws RemoteInvocationException { - return exists(urn, getSystemAuthentication()); - } + default boolean exists(@Nonnull Urn urn) throws RemoteInvocationException { + return exists(urn, getSystemAuthentication()); + } - default String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, final boolean async) throws RemoteInvocationException { - return ingestProposal(metadataChangeProposal, getSystemAuthentication(), async); - } + default String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, final boolean async) + throws RemoteInvocationException { + return ingestProposal(metadataChangeProposal, getSystemAuthentication(), async); + } - default void setWritable(boolean canWrite) throws RemoteInvocationException { - setWritable(canWrite, getSystemAuthentication()); - } + default void setWritable(boolean canWrite) throws RemoteInvocationException { + setWritable(canWrite, getSystemAuthentication()); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java index f3c343534209c..a2f5596af9f4e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java @@ -4,22 +4,24 @@ import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.restli.client.Client; -import lombok.Getter; - import javax.annotation.Nonnull; +import lombok.Getter; -/** - * Restli backed SystemEntityClient - */ +/** Restli backed SystemEntityClient */ @Getter public class SystemRestliEntityClient extends RestliEntityClient implements SystemEntityClient { - private final EntityClientCache entityClientCache; - private final Authentication systemAuthentication; + private final EntityClientCache entityClientCache; + private final Authentication systemAuthentication; - public SystemRestliEntityClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { - super(restliClient, backoffPolicy, retryCount); - this.systemAuthentication = systemAuthentication; - this.entityClientCache = buildEntityClientCache(SystemRestliEntityClient.class, systemAuthentication, cacheConfig); - } + public SystemRestliEntityClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + super(restliClient, backoffPolicy, retryCount); + this.systemAuthentication = systemAuthentication; + this.entityClientCache = + buildEntityClientCache(SystemRestliEntityClient.class, systemAuthentication, cacheConfig); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java index 850847bfd262a..747e1e0e1a288 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java @@ -2,62 +2,66 @@ import com.datahub.authentication.Authentication; import com.linkedin.common.EntityRelationships; - import com.linkedin.common.WindowDuration; import com.linkedin.common.client.BaseClient; import com.linkedin.metadata.config.cache.client.UsageClientCacheConfig; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.restli.client.Client; - import java.net.URISyntaxException; import javax.annotation.Nonnull; - public class UsageClient extends BaseClient { - private static final UsageStatsRequestBuilders USAGE_STATS_REQUEST_BUILDERS = - new UsageStatsRequestBuilders(); - - private final UsageClientCache usageClientCache; - - public UsageClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, UsageClientCacheConfig cacheConfig) { - super(restliClient, backoffPolicy, retryCount); - this.usageClientCache = UsageClientCache.builder() - .config(cacheConfig) - .loadFunction((String resource, UsageTimeRange range) -> { - try { - return getUsageStats(resource, range, systemAuthentication); - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException(e); - } - }).build(); - } - - /** - * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. - * Using cache and system authentication. - * Validate permissions before use! - */ - @Nonnull - public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { - return usageClientCache.getUsageStats(resource, range); - } - - /** - * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. - */ - @Nonnull - private UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range, - @Nonnull Authentication authentication) - throws RemoteInvocationException, URISyntaxException { - - final UsageStatsDoQueryRangeRequestBuilder requestBuilder = USAGE_STATS_REQUEST_BUILDERS - .actionQueryRange() - .resourceParam(resource) - .durationParam(WindowDuration.DAY) - .rangeFromEndParam(range); - return sendClientRequest(requestBuilder, authentication).getEntity(); - } + private static final UsageStatsRequestBuilders USAGE_STATS_REQUEST_BUILDERS = + new UsageStatsRequestBuilders(); + + private final UsageClientCache usageClientCache; + + public UsageClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount, + Authentication systemAuthentication, + UsageClientCacheConfig cacheConfig) { + super(restliClient, backoffPolicy, retryCount); + this.usageClientCache = + UsageClientCache.builder() + .config(cacheConfig) + .loadFunction( + (String resource, UsageTimeRange range) -> { + try { + return getUsageStats(resource, range, systemAuthentication); + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .build(); + } + + /** + * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. Using + * cache and system authentication. Validate permissions before use! + */ + @Nonnull + public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { + return usageClientCache.getUsageStats(resource, range); + } + + /** Gets a specific version of downstream {@link EntityRelationships} for the given dataset. */ + @Nonnull + private UsageQueryResult getUsageStats( + @Nonnull String resource, + @Nonnull UsageTimeRange range, + @Nonnull Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + + final UsageStatsDoQueryRangeRequestBuilder requestBuilder = + USAGE_STATS_REQUEST_BUILDERS + .actionQueryRange() + .resourceParam(resource) + .durationParam(WindowDuration.DAY) + .rangeFromEndParam(range); + return sendClientRequest(requestBuilder, authentication).getEntity(); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java index 10a1ebb6dcccb..e4c7ed0b674c0 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java @@ -4,70 +4,68 @@ import com.github.benmanes.caffeine.cache.Weigher; import com.linkedin.common.client.ClientCache; import com.linkedin.metadata.config.cache.client.UsageClientCacheConfig; -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - -import javax.annotation.Nonnull; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; @Builder public class UsageClientCache { - @NonNull - private UsageClientCacheConfig config; - @NonNull - private final ClientCache<Key, UsageQueryResult, UsageClientCacheConfig> cache; - @NonNull - private BiFunction<String, UsageTimeRange, UsageQueryResult> loadFunction; + @NonNull private UsageClientCacheConfig config; + @NonNull private final ClientCache<Key, UsageQueryResult, UsageClientCacheConfig> cache; + @NonNull private BiFunction<String, UsageTimeRange, UsageQueryResult> loadFunction; - public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { - if (config.isEnabled()) { - return cache.get(Key.builder().resource(resource).range(range).build()); - } else { - return loadFunction.apply(resource, range); - } + public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { + if (config.isEnabled()) { + return cache.get(Key.builder().resource(resource).range(range).build()); + } else { + return loadFunction.apply(resource, range); } + } - public static class UsageClientCacheBuilder { + public static class UsageClientCacheBuilder { - private UsageClientCacheBuilder cache(LoadingCache<Key, UsageQueryResult> cache) { - return this; - } + private UsageClientCacheBuilder cache(LoadingCache<Key, UsageQueryResult> cache) { + return this; + } - public UsageClientCache build() { - // estimate size - Weigher<Key, UsageQueryResult> weighByEstimatedSize = (key, value) -> - value.data().toString().getBytes().length; + public UsageClientCache build() { + // estimate size + Weigher<Key, UsageQueryResult> weighByEstimatedSize = + (key, value) -> value.data().toString().getBytes().length; - // batch loads data from usage client - Function<Iterable<? extends Key>, Map<Key, UsageQueryResult>> loader = (Iterable<? extends Key> keys) -> - StreamSupport.stream(keys.spliterator(), true) - .map(k -> Map.entry(k, loadFunction.apply(k.getResource(), k.getRange()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + // batch loads data from usage client + Function<Iterable<? extends Key>, Map<Key, UsageQueryResult>> loader = + (Iterable<? extends Key> keys) -> + StreamSupport.stream(keys.spliterator(), true) + .map(k -> Map.entry(k, loadFunction.apply(k.getResource(), k.getRange()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - // default ttl only - BiFunction<UsageClientCacheConfig, Key, Integer> ttlSeconds = (config, key) -> config.getDefaultTTLSeconds(); + // default ttl only + BiFunction<UsageClientCacheConfig, Key, Integer> ttlSeconds = + (config, key) -> config.getDefaultTTLSeconds(); - cache = ClientCache.<Key, UsageQueryResult, UsageClientCacheConfig>builder() - .weigher(weighByEstimatedSize) - .config(config) - .loadFunction(loader) - .ttlSecondsFunction(ttlSeconds) - .build(UsageClientCache.class); + cache = + ClientCache.<Key, UsageQueryResult, UsageClientCacheConfig>builder() + .weigher(weighByEstimatedSize) + .config(config) + .loadFunction(loader) + .ttlSecondsFunction(ttlSeconds) + .build(UsageClientCache.class); - return new UsageClientCache(config, cache, loadFunction); - } + return new UsageClientCache(config, cache, loadFunction); } + } - @Data - @Builder - protected static class Key { - private final String resource; - private final UsageTimeRange range; - } + @Data + @Builder + protected static class Key { + private final String resource; + private final UsageTimeRange range; + } } diff --git a/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java b/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java index c4109f1daedb3..1f8342170a2ff 100644 --- a/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java +++ b/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java @@ -1,5 +1,12 @@ package com.linkedin.common.client; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,62 +22,59 @@ import com.linkedin.restli.client.ResponseFuture; import org.testng.annotations.Test; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertThrows; - public class BaseClientTest { - final static Authentication AUTH = new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); - - @Test - public void testZeroRetry() throws RemoteInvocationException { - MetadataChangeProposal mcp = new MetadataChangeProposal(); + static final Authentication AUTH = + new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); - ResponseFuture<String> mockFuture = mock(ResponseFuture.class); - when(mockRestliClient.sendRequest(any(ActionRequest.class))).thenReturn(mockFuture); + @Test + public void testZeroRetry() throws RemoteInvocationException { + MetadataChangeProposal mcp = new MetadataChangeProposal(); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 0); - testClient.sendClientRequest(testRequestBuilder, AUTH); - // Expected 1 actual try and 0 retries - verify(mockRestliClient).sendRequest(any(ActionRequest.class)); - } + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); + ResponseFuture<String> mockFuture = mock(ResponseFuture.class); + when(mockRestliClient.sendRequest(any(ActionRequest.class))).thenReturn(mockFuture); - @Test - public void testMultipleRetries() throws RemoteInvocationException { - MetadataChangeProposal mcp = new MetadataChangeProposal(); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); - ResponseFuture<String> mockFuture = mock(ResponseFuture.class); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 0); + testClient.sendClientRequest(testRequestBuilder, AUTH); + // Expected 1 actual try and 0 retries + verify(mockRestliClient).sendRequest(any(ActionRequest.class)); + } - when(mockRestliClient.sendRequest(any(ActionRequest.class))) - .thenThrow(new RuntimeException()) - .thenReturn(mockFuture); + @Test + public void testMultipleRetries() throws RemoteInvocationException { + MetadataChangeProposal mcp = new MetadataChangeProposal(); + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); + ResponseFuture<String> mockFuture = mock(ResponseFuture.class); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); - testClient.sendClientRequest(testRequestBuilder, AUTH); - // Expected 1 actual try and 1 retries - verify(mockRestliClient, times(2)).sendRequest(any(ActionRequest.class)); - } + when(mockRestliClient.sendRequest(any(ActionRequest.class))) + .thenThrow(new RuntimeException()) + .thenReturn(mockFuture); - @Test - public void testNonRetry() { - MetadataChangeProposal mcp = new MetadataChangeProposal(); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); + testClient.sendClientRequest(testRequestBuilder, AUTH); + // Expected 1 actual try and 1 retries + verify(mockRestliClient, times(2)).sendRequest(any(ActionRequest.class)); + } - when(mockRestliClient.sendRequest(any(ActionRequest.class))) - .thenThrow(new RuntimeException(new RequiredFieldNotPresentException("value"))); + @Test + public void testNonRetry() { + MetadataChangeProposal mcp = new MetadataChangeProposal(); + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); - assertThrows(RuntimeException.class, () -> testClient.sendClientRequest(testRequestBuilder, AUTH)); - } + when(mockRestliClient.sendRequest(any(ActionRequest.class))) + .thenThrow(new RuntimeException(new RequiredFieldNotPresentException("value"))); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); + assertThrows( + RuntimeException.class, () -> testClient.sendClientRequest(testRequestBuilder, AUTH)); + } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java index 98ecf6142ef2c..edd8270e87210 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java @@ -20,11 +20,9 @@ public CompletableFuture<Void> onRequest(final FilterRequestContext requestConte return CompletableFuture.completedFuture(null); } - @Override public CompletableFuture<Void> onResponse( - final FilterRequestContext requestContext, - final FilterResponseContext responseContext) { + final FilterRequestContext requestContext, final FilterResponseContext responseContext) { logResponse(requestContext, responseContext); return CompletableFuture.completedFuture(null); } @@ -40,8 +38,7 @@ public CompletableFuture<Void> onError( } private void logResponse( - final FilterRequestContext requestContext, - final FilterResponseContext responseContext) { + final FilterRequestContext requestContext, final FilterResponseContext responseContext) { long startTime = (long) requestContext.getFilterScratchpad().get(START_TIME); long endTime = System.currentTimeMillis(); long duration = endTime - startTime; @@ -54,5 +51,4 @@ private void logResponse( log.info("{} {} - {} - {} - {}ms", httpMethod, uri, method, status.getCode(), duration); } - } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java index 1f19094a74654..b8cbf1ceb6794 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java @@ -22,10 +22,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; - -/** - * Rest.li entry point: /analytics - */ +/** Rest.li entry point: /analytics */ @Slf4j @RestLiSimpleResource(name = "analytics", namespace = "com.linkedin.analytics") public class Analytics extends SimpleResourceTemplate<GetTimeseriesAggregatedStatsResponse> { @@ -35,6 +32,7 @@ public class Analytics extends SimpleResourceTemplate<GetTimeseriesAggregatedSta private static final String PARAM_FILTER = "filter"; private static final String PARAM_METRICS = "metrics"; private static final String PARAM_BUCKETS = "buckets"; + @Inject @Named("timeseriesAspectService") private TimeseriesAspectService _timeseriesAspectService; @@ -47,24 +45,25 @@ public Task<GetTimeseriesAggregatedStatsResponse> getTimeseriesStats( @ActionParam(PARAM_METRICS) @Nonnull AggregationSpec[] aggregationSpecs, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_BUCKETS) @Optional @Nullable GroupingBucket[] groupingBuckets) { - return RestliUtils.toTask(() -> { - log.info("Attempting to query timeseries stats"); - GetTimeseriesAggregatedStatsResponse resp = new GetTimeseriesAggregatedStatsResponse(); - resp.setEntityName(entityName); - resp.setAspectName(aspectName); - resp.setAggregationSpecs(new AggregationSpecArray(Arrays.asList(aggregationSpecs))); - if (filter != null) { - resp.setFilter(filter); - } - if (groupingBuckets != null) { - resp.setGroupingBuckets(new GroupingBucketArray(Arrays.asList(groupingBuckets))); - } + return RestliUtils.toTask( + () -> { + log.info("Attempting to query timeseries stats"); + GetTimeseriesAggregatedStatsResponse resp = new GetTimeseriesAggregatedStatsResponse(); + resp.setEntityName(entityName); + resp.setAspectName(aspectName); + resp.setAggregationSpecs(new AggregationSpecArray(Arrays.asList(aggregationSpecs))); + if (filter != null) { + resp.setFilter(filter); + } + if (groupingBuckets != null) { + resp.setGroupingBuckets(new GroupingBucketArray(Arrays.asList(groupingBuckets))); + } - GenericTable aggregatedStatsTable = - _timeseriesAspectService.getAggregatedStats(entityName, aspectName, aggregationSpecs, filter, - groupingBuckets); - resp.setTable(aggregatedStatsTable); - return resp; - }); + GenericTable aggregatedStatsTable = + _timeseriesAspectService.getAggregatedStats( + entityName, aspectName, aggregationSpecs, filter, groupingBuckets); + resp.setTable(aggregatedStatsTable); + return resp; + }); } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java index af76af90ce77f..f14dc2e8b2918 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.operations.OperationsResource.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -8,10 +13,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.linkedin.aspect.GetTimeseriesAspectValuesResponse; -import com.linkedin.metadata.entity.IngestResult; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.transactions.AspectsBatch; -import com.linkedin.metadata.resources.operations.Utils; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.aspect.EnvelopedAspectArray; @@ -19,9 +20,13 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.IngestResult; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.entity.validation.ValidationException; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.resources.operations.Utils; import com.linkedin.metadata.restli.RestliUtil; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; @@ -52,15 +57,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.operations.OperationsResource.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "aspects", namespace = "com.linkedin.entity") public class AspectResource extends CollectionResourceTaskTemplate<String, VersionedAspect> { @@ -114,66 +111,101 @@ void setAuthorizer(Authorizer authorizer) { @RestMethod.Get @Nonnull @WithSpan - public Task<AnyRecord> get(@Nonnull String urnStr, @QueryParam("aspect") @Optional @Nullable String aspectName, - @QueryParam("version") @Optional @Nullable Long version) throws URISyntaxException { + public Task<AnyRecord> get( + @Nonnull String urnStr, + @QueryParam("aspect") @Optional @Nullable String aspectName, + @QueryParam("version") @Optional @Nullable Long version) + throws URISyntaxException { log.info("GET ASPECT urn: {} aspect: {} version: {}", urnStr, aspectName, version); final Urn urn = Urn.createFromString(urnStr); - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect for " + urn); - } - final VersionedAspect aspect = _entityService.getVersionedAspect(urn, aspectName, version); - if (aspect == null) { - throw RestliUtil.resourceNotFoundException(String.format("Did not find urn: %s aspect: %s version: %s", urn, aspectName, version)); - } - return new AnyRecord(aspect.data()); - }, MetricRegistry.name(this.getClass(), "get")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect for " + urn); + } + final VersionedAspect aspect = + _entityService.getVersionedAspect(urn, aspectName, version); + if (aspect == null) { + throw RestliUtil.resourceNotFoundException( + String.format( + "Did not find urn: %s aspect: %s version: %s", urn, aspectName, version)); + } + return new AnyRecord(aspect.data()); + }, + MetricRegistry.name(this.getClass(), "get")); } @Action(name = ACTION_GET_TIMESERIES_ASPECT) @Nonnull @WithSpan public Task<GetTimeseriesAspectValuesResponse> getTimeseriesAspectValues( - @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_ASPECT) @Nonnull String aspectName, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, @ActionParam(PARAM_LIMIT) @Optional("10000") int limit, - @ActionParam(PARAM_LATEST_VALUE) @Optional("false") boolean latestValue, // This field is deprecated. + @ActionParam(PARAM_LATEST_VALUE) @Optional("false") + boolean latestValue, // This field is deprecated. @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sort) throws URISyntaxException { + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sort) + throws URISyntaxException { log.info( "Get Timeseries Aspect values for aspect {} for entity {} with startTimeMillis {}, endTimeMillis {} and limit {}.", - aspectName, entityName, startTimeMillis, endTimeMillis, limit); + aspectName, + entityName, + startTimeMillis, + endTimeMillis, + limit); final Urn urn = Urn.createFromString(urnStr); - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_TIMESERIES_ASPECT_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get timeseries aspect for " + urn); - } - GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); - response.setEntityName(entityName); - response.setAspectName(aspectName); - if (startTimeMillis != null) { - response.setStartTimeMillis(startTimeMillis); - } - if (endTimeMillis != null) { - response.setEndTimeMillis(endTimeMillis); - } - if (latestValue) { - response.setLimit(1); - } else { - response.setLimit(limit); - } - response.setValues(new EnvelopedAspectArray( - _timeseriesAspectService.getAspectValues(urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, sort))); - return response; - }, MetricRegistry.name(this.getClass(), "getTimeseriesAspectValues")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_TIMESERIES_ASPECT_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, + "User is unauthorized to get timeseries aspect for " + urn); + } + GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); + response.setEntityName(entityName); + response.setAspectName(aspectName); + if (startTimeMillis != null) { + response.setStartTimeMillis(startTimeMillis); + } + if (endTimeMillis != null) { + response.setEndTimeMillis(endTimeMillis); + } + if (latestValue) { + response.setLimit(1); + } else { + response.setLimit(limit); + } + response.setValues( + new EnvelopedAspectArray( + _timeseriesAspectService.getAspectValues( + urn, + entityName, + aspectName, + startTimeMillis, + endTimeMillis, + limit, + filter, + sort))); + return response; + }, + MetricRegistry.name(this.getClass(), "getTimeseriesAspectValues")); } @Action(name = ACTION_INGEST_PROPOSAL) @@ -181,7 +213,8 @@ public Task<GetTimeseriesAspectValuesResponse> getTimeseriesAspectValues( @WithSpan public Task<String> ingestProposal( @ActionParam(PARAM_PROPOSAL) @Nonnull MetadataChangeProposal metadataChangeProposal, - @ActionParam(PARAM_ASYNC) @Optional(UNSET) String async) throws URISyntaxException { + @ActionParam(PARAM_ASYNC) @Optional(UNSET) String async) + throws URISyntaxException { log.info("INGEST PROPOSAL proposal: {}", metadataChangeProposal); final boolean asyncBool; @@ -192,85 +225,111 @@ public Task<String> ingestProposal( } Authentication authentication = AuthenticationContext.getAuthentication(); - com.linkedin.metadata.models.EntitySpec entitySpec = _entityService.getEntityRegistry().getEntitySpec(metadataChangeProposal.getEntityType()); - Urn urn = EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, entitySpec.getKeyAspectSpec()); + com.linkedin.metadata.models.EntitySpec entitySpec = + _entityService.getEntityRegistry().getEntitySpec(metadataChangeProposal.getEntityType()); + Urn urn = + EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, entitySpec.getKeyAspectSpec()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to modify entity " + urn); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to modify entity " + urn); } String actorUrnStr = authentication.getActor().toUrnStr(); - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); - return RestliUtil.toTask(() -> { - log.debug("Proposal: {}", metadataChangeProposal); - try { - final AspectsBatch batch; - if (asyncBool) { - // if async we'll expand the getAdditionalChanges later, no need to do this early - batch = AspectsBatchImpl.builder() - .mcps(List.of(metadataChangeProposal), _entityService.getEntityRegistry()) - .build(); - } else { - Stream<MetadataChangeProposal> proposalStream = Stream.concat(Stream.of(metadataChangeProposal), - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService).stream()); + return RestliUtil.toTask( + () -> { + log.debug("Proposal: {}", metadataChangeProposal); + try { + final AspectsBatch batch; + if (asyncBool) { + // if async we'll expand the getAdditionalChanges later, no need to do this early + batch = + AspectsBatchImpl.builder() + .mcps(List.of(metadataChangeProposal), _entityService.getEntityRegistry()) + .build(); + } else { + Stream<MetadataChangeProposal> proposalStream = + Stream.concat( + Stream.of(metadataChangeProposal), + AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService) + .stream()); - batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(); - } + batch = + AspectsBatchImpl.builder() + .mcps( + proposalStream.collect(Collectors.toList()), + _entityService.getEntityRegistry()) + .build(); + } - Set<IngestResult> results = - _entityService.ingestProposal(batch, auditStamp, asyncBool); + Set<IngestResult> results = _entityService.ingestProposal(batch, auditStamp, asyncBool); - IngestResult one = results.stream() - .findFirst() - .get(); + IngestResult one = results.stream().findFirst().get(); - // Update runIds, only works for existing documents, so ES document must exist - Urn resultUrn = one.getUrn(); - if (one.isProcessedMCL() || one.isUpdate()) { - tryIndexRunId(resultUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); - } - return resultUrn.toString(); - } catch (ValidationException e) { - throw new RestLiServiceException(HttpStatus.S_422_UNPROCESSABLE_ENTITY, e.getMessage()); - } - }, MetricRegistry.name(this.getClass(), "ingestProposal")); + // Update runIds, only works for existing documents, so ES document must exist + Urn resultUrn = one.getUrn(); + if (one.isProcessedMCL() || one.isUpdate()) { + tryIndexRunId( + resultUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); + } + return resultUrn.toString(); + } catch (ValidationException e) { + throw new RestLiServiceException(HttpStatus.S_422_UNPROCESSABLE_ENTITY, e.getMessage()); + } + }, + MetricRegistry.name(this.getClass(), "ingestProposal")); } @Action(name = ACTION_GET_COUNT) @Nonnull @WithSpan - public Task<Integer> getCount(@ActionParam(PARAM_ASPECT) @Nonnull String aspectName, - @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike) { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), - (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect counts."); - } - return _entityService.getCountAspect(aspectName, urnLike); - }, MetricRegistry.name(this.getClass(), "getCount")); + public Task<Integer> getCount( + @ActionParam(PARAM_ASPECT) @Nonnull String aspectName, + @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike) { + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect counts."); + } + return _entityService.getCountAspect(aspectName, urnLike); + }, + MetricRegistry.name(this.getClass(), "getCount")); } @Action(name = ACTION_RESTORE_INDICES) @Nonnull @WithSpan - public Task<String> restoreIndices(@ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, - @ActionParam(PARAM_URN) @Optional @Nullable String urn, - @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, - @ActionParam("start") @Optional @Nullable Integer start, - @ActionParam("batchSize") @Optional @Nullable Integer batchSize - ) { - return RestliUtil.toTask(() -> { - return Utils.restoreIndices(aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); - }, MetricRegistry.name(this.getClass(), "restoreIndices")); + public Task<String> restoreIndices( + @ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, + @ActionParam(PARAM_URN) @Optional @Nullable String urn, + @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, + @ActionParam("start") @Optional @Nullable Integer start, + @ActionParam("batchSize") @Optional @Nullable Integer batchSize) { + return RestliUtil.toTask( + () -> { + return Utils.restoreIndices( + aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); + }, + MetricRegistry.name(this.getClass(), "restoreIndices")); } - private static void tryIndexRunId(final Urn urn, final @Nullable SystemMetadata systemMetadata, - final EntitySearchService entitySearchService) { + private static void tryIndexRunId( + final Urn urn, + final @Nullable SystemMetadata systemMetadata, + final EntitySearchService entitySearchService) { if (systemMetadata != null && systemMetadata.hasRunId()) { entitySearchService.appendRunId(urn.getEntityType(), urn, systemMetadata.getRunId()); } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java index 9bab846d1bdcc..294ded8a1e255 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -53,16 +56,11 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * resource for showing information and rolling back runs - */ +/** resource for showing information and rolling back runs */ @Slf4j @RestLiCollection(name = "runs", namespace = "com.linkedin.entity") -public class BatchIngestionRunResource extends CollectionResourceTaskTemplate<String, VersionedAspect> { +public class BatchIngestionRunResource + extends CollectionResourceTaskTemplate<String, VersionedAspect> { private static final Integer DEFAULT_OFFSET = 0; private static final Integer DEFAULT_PAGE_SIZE = 100; @@ -91,173 +89,239 @@ public class BatchIngestionRunResource extends CollectionResourceTaskTemplate<St @Named("authorizerChain") private Authorizer _authorizer; - /** - * Rolls back an ingestion run - */ + /** Rolls back an ingestion run */ @Action(name = "rollback") @Nonnull @WithSpan - public Task<RollbackResponse> rollback(@ActionParam("runId") @Nonnull String runId, + public Task<RollbackResponse> rollback( + @ActionParam("runId") @Nonnull String runId, @ActionParam("dryRun") @Optional Boolean dryRun, @Deprecated @ActionParam("hardDelete") @Optional Boolean hardDelete, - @ActionParam("safe") @Optional Boolean safe) throws Exception { + @ActionParam("safe") @Optional Boolean safe) + throws Exception { log.info("ROLLBACK RUN runId: {} dry run: {}", runId, dryRun); - boolean doHardDelete = safe != null ? !safe : hardDelete != null ? hardDelete : DEFAULT_HARD_DELETE; + boolean doHardDelete = + safe != null ? !safe : hardDelete != null ? hardDelete : DEFAULT_HARD_DELETE; if (safe != null && hardDelete != null) { - log.warn("Both Safe & hardDelete flags were defined, honouring safe flag as hardDelete is deprecated"); + log.warn( + "Both Safe & hardDelete flags were defined, honouring safe flag as hardDelete is deprecated"); } try { - return RestliUtil.toTask(() -> { - if (runId.equals(DEFAULT_RUN_ID)) { - throw new IllegalArgumentException(String.format( - "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", - runId)); - } - if (!dryRun) { - updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); - } - - RollbackResponse response = new RollbackResponse(); - List<AspectRowSummary> aspectRowsToDelete; - aspectRowsToDelete = _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - Set<String> urns = aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet(); - List<java.util.Optional<EntitySpec>> resourceSpecs = urns.stream() - .map(UrnUtils::getUrn) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entities."); - } - log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - if (dryRun) { - - final Map<Boolean, List<AspectRowSummary>> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream().collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List<AspectRowSummary> keyAspects = aspectsSplitByIsKeyAspects.get(true); - - long entitiesDeleted = keyAspects.size(); - long aspectsReverted = aspectRowsToDelete.size(); - - final long affectedEntities = - aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - // If we are soft deleting, remove key aspects from count of aspects being deleted - if (!doHardDelete) { - aspectsReverted -= keyAspects.size(); - rowSummaries.removeIf(AspectRowSummary::isKeyAspect); - } - // Compute the aspects that exist referencing the key aspects we are deleting - final List<AspectRowSummary> affectedAspectsList = keyAspects.stream() - .map((AspectRowSummary urn) -> _systemMetadataService.findByUrn(urn.getUrn(), false, 0, - ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter(row -> !row.getRunId().equals(runId) && !row.isKeyAspect() && !row.getAspectName() - .equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final List<UnsafeEntityInfo> unsafeEntityInfos = - affectedAspectsList.stream().map(AspectRowSummary::getUrn).distinct().map(urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE).collect(Collectors.toList()); - - return response.setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); - } - - RollbackRunResult rollbackRunResult = _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - final List<AspectRowSummary> deletedRows = rollbackRunResult.getRowsRolledBack(); - int rowsDeletedFromEntityDeletion = rollbackRunResult.getRowsDeletedFromEntityDeletion(); - - // since elastic limits how many rows we can access at once, we need to iteratively delete - while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { - sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); - aspectRowsToDelete = _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - log.info("{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - log.info("deleting..."); - rollbackRunResult = _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); - rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); - } - - // Rollback timeseries aspects - DeleteAspectValuesResult timeseriesRollbackResult = _timeseriesAspectService.rollbackTimeseriesAspects(runId); - rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); - - log.info("finished deleting {} rows", deletedRows.size()); - int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; - - final Map<Boolean, List<AspectRowSummary>> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream().collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List<AspectRowSummary> keyAspects = aspectsSplitByIsKeyAspects.get(true); - - final long entitiesDeleted = keyAspects.size(); - final long affectedEntities = - deletedRows.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - log.info("computing aspects affected by this rollback..."); - // Compute the aspects that exist referencing the key aspects we are deleting - final List<AspectRowSummary> affectedAspectsList = keyAspects.stream() - .map((AspectRowSummary urn) -> _systemMetadataService.findByUrn(urn.getUrn(), false, 0, - ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter(row -> !row.getRunId().equals(runId) && !row.isKeyAspect() && !row.getAspectName() - .equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final List<UnsafeEntityInfo> unsafeEntityInfos = - affectedAspectsList.stream().map(AspectRowSummary::getUrn).distinct().map(urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE).collect(Collectors.toList()); - - log.info("calculation done."); - - updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); - - return response.setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); - }, MetricRegistry.name(this.getClass(), "rollback")); + return RestliUtil.toTask( + () -> { + if (runId.equals(DEFAULT_RUN_ID)) { + throw new IllegalArgumentException( + String.format( + "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", + runId)); + } + if (!dryRun) { + updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); + } + + RollbackResponse response = new RollbackResponse(); + List<AspectRowSummary> aspectRowsToDelete; + aspectRowsToDelete = + _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); + Set<String> urns = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet(); + List<java.util.Optional<EntitySpec>> resourceSpecs = + urns.stream() + .map(UrnUtils::getUrn) + .map( + urn -> + java.util.Optional.of( + new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); + } + log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + if (dryRun) { + + final Map<Boolean, List<AspectRowSummary>> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List<AspectRowSummary> keyAspects = aspectsSplitByIsKeyAspects.get(true); + + long entitiesDeleted = keyAspects.size(); + long aspectsReverted = aspectRowsToDelete.size(); + + final long affectedEntities = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + // If we are soft deleting, remove key aspects from count of aspects being deleted + if (!doHardDelete) { + aspectsReverted -= keyAspects.size(); + rowSummaries.removeIf(AspectRowSummary::isKeyAspect); + } + // Compute the aspects that exist referencing the key aspects we are deleting + final List<AspectRowSummary> affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + _systemMetadataService.findByUrn( + urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .collect(Collectors.toList()); + + long affectedAspects = affectedAspectsList.size(); + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List<UnsafeEntityInfo> unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + return response + .setAspectsAffected(affectedAspects) + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + } + + RollbackRunResult rollbackRunResult = + _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); + final List<AspectRowSummary> deletedRows = rollbackRunResult.getRowsRolledBack(); + int rowsDeletedFromEntityDeletion = + rollbackRunResult.getRowsDeletedFromEntityDeletion(); + + // since elastic limits how many rows we can access at once, we need to iteratively + // delete + while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { + sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); + aspectRowsToDelete = + _systemMetadataService.findByRunId( + runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); + log.info( + "{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + log.info("deleting..."); + rollbackRunResult = + _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); + deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); + rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); + } + + // Rollback timeseries aspects + DeleteAspectValuesResult timeseriesRollbackResult = + _timeseriesAspectService.rollbackTimeseriesAspects(runId); + rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); + + log.info("finished deleting {} rows", deletedRows.size()); + int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; + + final Map<Boolean, List<AspectRowSummary>> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List<AspectRowSummary> keyAspects = aspectsSplitByIsKeyAspects.get(true); + + final long entitiesDeleted = keyAspects.size(); + final long affectedEntities = + deletedRows.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + log.info("computing aspects affected by this rollback..."); + // Compute the aspects that exist referencing the key aspects we are deleting + final List<AspectRowSummary> affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + _systemMetadataService.findByUrn( + urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .collect(Collectors.toList()); + + long affectedAspects = affectedAspectsList.size(); + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List<UnsafeEntityInfo> unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + log.info("calculation done."); + + updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); + + return response + .setAspectsAffected(affectedAspects) + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + }, + MetricRegistry.name(this.getClass(), "rollback")); } catch (Exception e) { updateExecutionRequestStatus(runId, ROLLBACK_FAILED_STATUS); - throw new RuntimeException(String.format("There was an issue rolling back ingestion run with runId %s", runId), e); + throw new RuntimeException( + String.format("There was an issue rolling back ingestion run with runId %s", runId), e); } } @@ -279,9 +343,14 @@ private void sleep(Integer seconds) { private void updateExecutionRequestStatus(String runId, String status) { try { - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn( + new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect(executionRequestUrn.getEntityType(), executionRequestUrn, Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + _entityService.getLatestEnvelopedAspect( + executionRequestUrn.getEntityType(), + executionRequestUrn, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); if (aspect == null) { log.warn("Aspect for execution request with runId {} not found", runId); } else { @@ -294,65 +363,82 @@ private void updateExecutionRequestStatus(String runId, String status) { proposal.setAspect(GenericRecordUtils.serializeAspect(requestResult)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(proposal, - new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), false); + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); } } catch (Exception e) { - log.error(String.format("Not able to update execution result aspect with runId %s and new status %s.", runId, status), e); + log.error( + String.format( + "Not able to update execution result aspect with runId %s and new status %s.", + runId, status), + e); } } - /** - * Retrieves the value for an entity that is made up of latest versions of specified aspects. - */ + /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @Action(name = "list") @Nonnull @WithSpan - public Task<IngestionRunSummaryArray> list(@ActionParam("pageOffset") @Optional @Nullable Integer pageOffset, + public Task<IngestionRunSummaryArray> list( + @ActionParam("pageOffset") @Optional @Nullable Integer pageOffset, @ActionParam("pageSize") @Optional @Nullable Integer pageSize, @ActionParam("includeSoft") @Optional @Nullable Boolean includeSoft) { log.info("LIST RUNS offset: {} size: {}", pageOffset, pageSize); - return RestliUtil.toTask(() -> { - List<IngestionRunSummary> summaries = - _systemMetadataService.listRuns(pageOffset != null ? pageOffset : DEFAULT_OFFSET, - pageSize != null ? pageSize : DEFAULT_PAGE_SIZE, - includeSoft != null ? includeSoft : DEFAULT_INCLUDE_SOFT_DELETED); - - return new IngestionRunSummaryArray(summaries); - }, MetricRegistry.name(this.getClass(), "list")); + return RestliUtil.toTask( + () -> { + List<IngestionRunSummary> summaries = + _systemMetadataService.listRuns( + pageOffset != null ? pageOffset : DEFAULT_OFFSET, + pageSize != null ? pageSize : DEFAULT_PAGE_SIZE, + includeSoft != null ? includeSoft : DEFAULT_INCLUDE_SOFT_DELETED); + + return new IngestionRunSummaryArray(summaries); + }, + MetricRegistry.name(this.getClass(), "list")); } @Action(name = "describe") @Nonnull @WithSpan - public Task<AspectRowSummaryArray> describe(@ActionParam("runId") @Nonnull String runId, - @ActionParam("start") Integer start, @ActionParam("count") Integer count, + public Task<AspectRowSummaryArray> describe( + @ActionParam("runId") @Nonnull String runId, + @ActionParam("start") Integer start, + @ActionParam("count") Integer count, @ActionParam("includeSoft") @Optional @Nullable Boolean includeSoft, @ActionParam("includeAspect") @Optional @Nullable Boolean includeAspect) { log.info("DESCRIBE RUN runId: {}, start: {}, count: {}", runId, start, count); - return RestliUtil.toTask(() -> { - List<AspectRowSummary> summaries = - _systemMetadataService.findByRunId(runId, includeSoft != null && includeSoft, start, count); - - if (includeAspect != null && includeAspect) { - summaries.forEach(summary -> { - Urn urn = UrnUtils.getUrn(summary.getUrn()); - try { - EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect(urn.getEntityType(), urn, summary.getAspectName()); - if (aspect == null) { - log.error("Aspect for summary {} not found", summary); - } else { - summary.setAspect(aspect.getValue()); - } - } catch (Exception e) { - log.error("Error while fetching aspect for summary {}", summary, e); + return RestliUtil.toTask( + () -> { + List<AspectRowSummary> summaries = + _systemMetadataService.findByRunId( + runId, includeSoft != null && includeSoft, start, count); + + if (includeAspect != null && includeAspect) { + summaries.forEach( + summary -> { + Urn urn = UrnUtils.getUrn(summary.getUrn()); + try { + EnvelopedAspect aspect = + _entityService.getLatestEnvelopedAspect( + urn.getEntityType(), urn, summary.getAspectName()); + if (aspect == null) { + log.error("Aspect for summary {} not found", summary); + } else { + summary.setAspect(aspect.getValue()); + } + } catch (Exception e) { + log.error("Error while fetching aspect for summary {}", summary, e); + } + }); } - }); - } - return new AspectRowSummaryArray(summaries); - }, MetricRegistry.name(this.getClass(), "describe")); + return new AspectRowSummaryArray(summaries); + }, + MetricRegistry.name(this.getClass(), "describe")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java index 3ee98b3244718..ddf5efa5027ca 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.validation.ValidationUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.search.utils.SearchUtils.*; +import static com.linkedin.metadata.shared.ValidationUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -80,18 +88,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.validation.ValidationUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.search.utils.SearchUtils.*; -import static com.linkedin.metadata.shared.ValidationUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.*; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "entities", namespace = "com.linkedin.entity") public class EntityResource extends CollectionResourceTaskTemplate<String, Entity> { @@ -121,6 +118,7 @@ public class EntityResource extends CollectionResourceTaskTemplate<String, Entit private static final String ES_FIELD_TIMESTAMP = "timestampMillis"; private static final Integer ELASTIC_MAX_PAGE_SIZE = 10000; private final Clock _clock = Clock.systemUTC(); + @Inject @Named("entityService") private EntityService _entityService; @@ -161,61 +159,79 @@ public class EntityResource extends CollectionResourceTaskTemplate<String, Entit @Named("authorizerChain") private Authorizer _authorizer; - /** - * Retrieves the value for an entity that is made up of latest versions of specified aspects. - */ + /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @RestMethod.Get @Nonnull @WithSpan - public Task<AnyRecord> get(@Nonnull String urnStr, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task<AnyRecord> get( + @Nonnull String urnStr, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.info("GET {}", urnStr); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity " + urn); - } - return RestliUtil.toTask(() -> { - final Set<String> projectedAspects = - aspectNames == null ? Collections.emptySet() : new HashSet<>(Arrays.asList(aspectNames)); - final Entity entity = _entityService.getEntity(urn, projectedAspects); - if (entity == null) { - throw RestliUtil.resourceNotFoundException(String.format("Did not find %s", urnStr)); - } - return new AnyRecord(entity.data()); - }, MetricRegistry.name(this.getClass(), "get")); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity " + urn); + } + return RestliUtil.toTask( + () -> { + final Set<String> projectedAspects = + aspectNames == null + ? Collections.emptySet() + : new HashSet<>(Arrays.asList(aspectNames)); + final Entity entity = _entityService.getEntity(urn, projectedAspects); + if (entity == null) { + throw RestliUtil.resourceNotFoundException(String.format("Did not find %s", urnStr)); + } + return new AnyRecord(entity.data()); + }, + MetricRegistry.name(this.getClass(), "get")); } @RestMethod.BatchGet @Nonnull @WithSpan - public Task<Map<String, AnyRecord>> batchGet(@Nonnull Set<String> urnStrs, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task<Map<String, AnyRecord>> batchGet( + @Nonnull Set<String> urnStrs, + @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.info("BATCH GET {}", urnStrs); final Set<Urn> urns = new HashSet<>(); for (final String urnStr : urnStrs) { urns.add(Urn.createFromString(urnStr)); } - List<java.util.Optional<EntitySpec>> resourceSpecs = urns.stream() - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List<java.util.Optional<EntitySpec>> resourceSpecs = + urns.stream() + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entities: " + urnStrs); - } - return RestliUtil.toTask(() -> { - final Set<String> projectedAspects = - aspectNames == null ? Collections.emptySet() : new HashSet<>(Arrays.asList(aspectNames)); - return _entityService.getEntities(urns, projectedAspects) - .entrySet() - .stream() - .collect( - Collectors.toMap(entry -> entry.getKey().toString(), entry -> new AnyRecord(entry.getValue().data()))); - }, MetricRegistry.name(this.getClass(), "batchGet")); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities: " + urnStrs); + } + return RestliUtil.toTask( + () -> { + final Set<String> projectedAspects = + aspectNames == null + ? Collections.emptySet() + : new HashSet<>(Arrays.asList(aspectNames)); + return _entityService.getEntities(urns, projectedAspects).entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), + entry -> new AnyRecord(entry.getValue().data()))); + }, + MetricRegistry.name(this.getClass(), "batchGet")); } private SystemMetadata populateDefaultFieldsIfEmpty(@Nullable SystemMetadata systemMetadata) { @@ -234,17 +250,21 @@ private SystemMetadata populateDefaultFieldsIfEmpty(@Nullable SystemMetadata sys @Action(name = ACTION_INGEST) @Nonnull @WithSpan - public Task<Void> ingest(@ActionParam(PARAM_ENTITY) @Nonnull Entity entity, + public Task<Void> ingest( + @ActionParam(PARAM_ENTITY) @Nonnull Entity entity, @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata providedSystemMetadata) throws URISyntaxException { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); final Urn urn = com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entity " + urn); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entity " + urn); } try { @@ -255,33 +275,43 @@ public Task<Void> ingest(@ActionParam(PARAM_ENTITY) @Nonnull Entity entity, SystemMetadata systemMetadata = populateDefaultFieldsIfEmpty(providedSystemMetadata); - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); // variables referenced in lambdas are required to be final final SystemMetadata finalSystemMetadata = systemMetadata; - return RestliUtil.toTask(() -> { - _entityService.ingestEntity(entity, auditStamp, finalSystemMetadata); - return null; - }, MetricRegistry.name(this.getClass(), "ingest")); + return RestliUtil.toTask( + () -> { + _entityService.ingestEntity(entity, auditStamp, finalSystemMetadata); + return null; + }, + MetricRegistry.name(this.getClass(), "ingest")); } @Action(name = ACTION_BATCH_INGEST) @Nonnull @WithSpan - public Task<Void> batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] entities, - @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata[] systemMetadataList) throws URISyntaxException { + public Task<Void> batchIngest( + @ActionParam(PARAM_ENTITIES) @Nonnull Entity[] entities, + @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata[] systemMetadataList) + throws URISyntaxException { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - List<java.util.Optional<EntitySpec>> resourceSpecs = Arrays.stream(entities) - .map(Entity::getValue) - .map(com.datahub.util.ModelUtils::getUrnFromSnapshotUnion) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List<java.util.Optional<EntitySpec>> resourceSpecs = + Arrays.stream(entities) + .map(Entity::getValue) + .map(com.datahub.util.ModelUtils::getUrnFromSnapshotUnion) + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entities."); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entities."); } for (Entity entity : entities) { @@ -292,7 +322,8 @@ public Task<Void> batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] ent } } - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); if (systemMetadataList == null) { systemMetadataList = new SystemMetadata[entities.length]; @@ -302,113 +333,186 @@ public Task<Void> batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] ent throw RestliUtil.invalidArgumentsException("entities and systemMetadata length must match"); } - final List<SystemMetadata> finalSystemMetadataList = Arrays.stream(systemMetadataList) - .map(systemMetadata -> populateDefaultFieldsIfEmpty(systemMetadata)) - .collect(Collectors.toList()); + final List<SystemMetadata> finalSystemMetadataList = + Arrays.stream(systemMetadataList) + .map(systemMetadata -> populateDefaultFieldsIfEmpty(systemMetadata)) + .collect(Collectors.toList()); - return RestliUtil.toTask(() -> { - _entityService.ingestEntities(Arrays.asList(entities), auditStamp, finalSystemMetadataList); - return null; - }, MetricRegistry.name(this.getClass(), "batchIngest")); + return RestliUtil.toTask( + () -> { + _entityService.ingestEntities( + Arrays.asList(entities), auditStamp, finalSystemMetadataList); + return null; + }, + MetricRegistry.name(this.getClass(), "batchIngest")); } @Action(name = ACTION_SEARCH) @Nonnull @WithSpan - public Task<SearchResult> search(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, - @ActionParam(PARAM_COUNT) int count, @Optional @Deprecated @Nullable @ActionParam(PARAM_FULLTEXT) Boolean fulltext, + public Task<SearchResult> search( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count, + @Optional @Deprecated @Nullable @ActionParam(PARAM_FULLTEXT) Boolean fulltext, @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET SEARCH RESULTS for {} with query {}", entityName, input); // TODO - change it to use _searchService once we are confident on it's latency return RestliUtil.toTask( - () -> { - final SearchResult result; - // This API is not used by the frontend for search bars so we default to structured - result = _entitySearchService.search(List.of(entityName), input, filter, sortCriterion, start, count, searchFlags); - return validateSearchResult(result, _entityService); - }, - MetricRegistry.name(this.getClass(), "search")); + () -> { + final SearchResult result; + // This API is not used by the frontend for search bars so we default to structured + result = + _entitySearchService.search( + List.of(entityName), input, filter, sortCriterion, start, count, searchFlags); + return validateSearchResult(result, _entityService); + }, + MetricRegistry.name(this.getClass(), "search")); } @Action(name = ACTION_SEARCH_ACROSS_ENTITIES) @Nonnull @WithSpan - public Task<SearchResult> searchAcrossEntities(@ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, - @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { + public Task<SearchResult> searchAcrossEntities( + @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count, + @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } List<String> entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); log.info("GET SEARCH RESULTS ACROSS ENTITIES for {} with query {}", entityList, input); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return RestliUtil.toTask(() -> validateSearchResult( - _searchService.searchAcrossEntities(entityList, input, filter, sortCriterion, start, count, finalFlags), - _entityService), "searchAcrossEntities"); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return RestliUtil.toTask( + () -> + validateSearchResult( + _searchService.searchAcrossEntities( + entityList, input, filter, sortCriterion, start, count, finalFlags), + _entityService), + "searchAcrossEntities"); } @Action(name = ACTION_SCROLL_ACROSS_ENTITIES) @Nonnull @WithSpan - public Task<ScrollResult> scrollAcrossEntities(@ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_SCROLL_ID) String scrollId, - @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, @ActionParam(PARAM_COUNT) int count, + public Task<ScrollResult> scrollAcrossEntities( + @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SCROLL_ID) String scrollId, + @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, + @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { List<String> entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", entityList, input, scrollId); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return RestliUtil.toTask(() -> validateScrollResult( - _searchService.scrollAcrossEntities(entityList, input, filter, sortCriterion, scrollId, keepAlive, count, finalFlags), - _entityService), "scrollAcrossEntities"); + log.info( + "GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", + entityList, + input, + scrollId); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return RestliUtil.toTask( + () -> + validateScrollResult( + _searchService.scrollAcrossEntities( + entityList, + input, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + finalFlags), + _entityService), + "scrollAcrossEntities"); } @Action(name = ACTION_SEARCH_ACROSS_LINEAGE) @Nonnull @WithSpan - public Task<LineageSearchResult> searchAcrossLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, + public Task<LineageSearchResult> searchAcrossLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, @ActionParam(PARAM_INPUT) @Optional @Nullable String input, @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, - @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) throws URISyntaxException { + @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) + throws URISyntaxException { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } Urn urn = Urn.createFromString(urnStr); List<String> entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SEARCH RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", - urnStr, direction, entityList, input); - return RestliUtil.toTask(() -> validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(urn, LineageDirection.valueOf(direction), entityList, input, maxHops, - filter, sortCriterion, start, count, startTimeMillis, endTimeMillis, searchFlags), _entityService), + log.info( + "GET SEARCH RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", + urnStr, + direction, + entityList, + input); + return RestliUtil.toTask( + () -> + validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + urn, + LineageDirection.valueOf(direction), + entityList, + input, + maxHops, + filter, + sortCriterion, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags), + _entityService), "searchAcrossRelationships"); } @Action(name = ACTION_SCROLL_ACROSS_LINEAGE) @Nonnull @WithSpan - public Task<LineageScrollResult> scrollAcrossLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, + public Task<LineageScrollResult> scrollAcrossLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, @ActionParam(PARAM_INPUT) @Optional @Nullable String input, @@ -420,72 +524,120 @@ public Task<LineageScrollResult> scrollAcrossLineage(@ActionParam(PARAM_URN) @No @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, - @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) throws URISyntaxException { + @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) + throws URISyntaxException { Urn urn = Urn.createFromString(urnStr); List<String> entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SCROLL RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", - urnStr, direction, entityList, input); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true); - return RestliUtil.toTask(() -> validateLineageScrollResult( - _lineageSearchService.scrollAcrossLineage(urn, LineageDirection.valueOf(direction), entityList, input, maxHops, - filter, sortCriterion, scrollId, keepAlive, count, startTimeMillis, endTimeMillis, finalFlags), - _entityService), + log.info( + "GET SCROLL RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", + urnStr, + direction, + entityList, + input); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true); + return RestliUtil.toTask( + () -> + validateLineageScrollResult( + _lineageSearchService.scrollAcrossLineage( + urn, + LineageDirection.valueOf(direction), + entityList, + input, + maxHops, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + finalFlags), + _entityService), "scrollAcrossLineage"); } @Action(name = ACTION_LIST) @Nonnull @WithSpan - public Task<ListResult> list(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, + public Task<ListResult> list( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET LIST RESULTS for {} with filter {}", entityName, filter); - return RestliUtil.toTask(() -> validateListResult( - toListResult(_entitySearchService.filter(entityName, filter, sortCriterion, start, count)), _entityService), + return RestliUtil.toTask( + () -> + validateListResult( + toListResult( + _entitySearchService.filter(entityName, filter, sortCriterion, start, count)), + _entityService), MetricRegistry.name(this.getClass(), "filter")); } @Action(name = ACTION_AUTOCOMPLETE) @Nonnull @WithSpan - public Task<AutoCompleteResult> autocomplete(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_QUERY) @Nonnull String query, @ActionParam(PARAM_FIELD) @Optional @Nullable String field, - @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_LIMIT) int limit) { + public Task<AutoCompleteResult> autocomplete( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_QUERY) @Nonnull String query, + @ActionParam(PARAM_FIELD) @Optional @Nullable String field, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_LIMIT) int limit) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } - return RestliUtil.toTask(() -> _entitySearchService.autoComplete(entityName, query, field, filter, limit), + return RestliUtil.toTask( + () -> _entitySearchService.autoComplete(entityName, query, field, filter, limit), MetricRegistry.name(this.getClass(), "autocomplete")); } @Action(name = ACTION_BROWSE) @Nonnull @WithSpan - public Task<BrowseResult> browse(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_PATH) @Nonnull String path, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_START) int start, @ActionParam(PARAM_LIMIT) int limit) { + public Task<BrowseResult> browse( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_PATH) @Nonnull String path, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_LIMIT) int limit) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET BROWSE RESULTS for {} at path {}", entityName, path); return RestliUtil.toTask( - () -> validateBrowseResult(_entitySearchService.browse(entityName, path, filter, start, limit), _entityService), + () -> + validateBrowseResult( + _entitySearchService.browse(entityName, path, filter, start, limit), + _entityService), MetricRegistry.name(this.getClass(), "browse")); } @@ -496,13 +648,17 @@ public Task<StringArray> getBrowsePaths( @ActionParam(value = PARAM_URN, typeref = com.linkedin.common.Urn.class) @Nonnull Urn urn) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity: " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity: " + urn); } log.info("GET BROWSE PATHS for {}", urn); - return RestliUtil.toTask(() -> new StringArray(_entitySearchService.getBrowsePaths(urnToEntityName(urn), urn)), + return RestliUtil.toTask( + () -> new StringArray(_entitySearchService.getBrowsePaths(urnToEntityName(urn), urn)), MetricRegistry.name(this.getClass(), "getBrowsePaths")); } @@ -515,12 +671,13 @@ private String stringifyRowCount(int size) { } /* - Used to delete all data related to a filter criteria based on registryId, runId etc. - */ + Used to delete all data related to a filter criteria based on registryId, runId etc. + */ @Action(name = "deleteAll") @Nonnull @WithSpan - public Task<RollbackResponse> deleteEntities(@ActionParam("registryId") @Optional String registryId, + public Task<RollbackResponse> deleteEntities( + @ActionParam("registryId") @Optional String registryId, @ActionParam("dryRun") @Optional Boolean dryRun) { String registryName = null; ComparableVersion registryVersion = new ComparableVersion("0.0.0-dev"); @@ -530,139 +687,188 @@ public Task<RollbackResponse> deleteEntities(@ActionParam("registryId") @Optiona registryName = registryId.split(":")[0]; registryVersion = new ComparableVersion(registryId.split(":")[1]); } catch (Exception e) { - throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, - "Failed to parse registry id: " + registryId, e); + throw new RestLiServiceException( + HttpStatus.S_500_INTERNAL_SERVER_ERROR, + "Failed to parse registry id: " + registryId, + e); } } String finalRegistryName = registryName; ComparableVersion finalRegistryVersion = registryVersion; String finalRegistryName1 = registryName; ComparableVersion finalRegistryVersion1 = registryVersion; - return RestliUtil.toTask(() -> { - RollbackResponse response = new RollbackResponse(); - List<AspectRowSummary> aspectRowsToDelete = - _systemMetadataService.findByRegistry(finalRegistryName, finalRegistryVersion.toString(), false, 0, - ESUtils.MAX_RESULT_SIZE); - log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - response.setAspectsAffected(aspectRowsToDelete.size()); - Set<String> urns = aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet(); - List<java.util.Optional<EntitySpec>> resourceSpecs = urns.stream() - .map(UrnUtils::getUrn) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entities."); - } - response.setEntitiesAffected(urns.size()); - response.setEntitiesDeleted(aspectRowsToDelete.stream().filter(AspectRowSummary::isKeyAspect).count()); - response.setAspectRowSummaries( - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size())))); - if ((dryRun == null) || (!dryRun)) { - Map<String, String> conditions = new HashMap(); - conditions.put("registryName", finalRegistryName1); - conditions.put("registryVersion", finalRegistryVersion1.toString()); - _entityService.rollbackWithConditions(aspectRowsToDelete, conditions, false); - } - return response; - }, MetricRegistry.name(this.getClass(), "deleteAll")); + return RestliUtil.toTask( + () -> { + RollbackResponse response = new RollbackResponse(); + List<AspectRowSummary> aspectRowsToDelete = + _systemMetadataService.findByRegistry( + finalRegistryName, + finalRegistryVersion.toString(), + false, + 0, + ESUtils.MAX_RESULT_SIZE); + log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + response.setAspectsAffected(aspectRowsToDelete.size()); + Set<String> urns = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet(); + List<java.util.Optional<EntitySpec>> resourceSpecs = + urns.stream() + .map(UrnUtils::getUrn) + .map( + urn -> + java.util.Optional.of( + new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); + } + response.setEntitiesAffected(urns.size()); + response.setEntitiesDeleted( + aspectRowsToDelete.stream().filter(AspectRowSummary::isKeyAspect).count()); + response.setAspectRowSummaries( + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size())))); + if ((dryRun == null) || (!dryRun)) { + Map<String, String> conditions = new HashMap(); + conditions.put("registryName", finalRegistryName1); + conditions.put("registryVersion", finalRegistryVersion1.toString()); + _entityService.rollbackWithConditions(aspectRowsToDelete, conditions, false); + } + return response; + }, + MetricRegistry.name(this.getClass(), "deleteAll")); } /** * Deletes all data related to an individual urn(entity). + * * @param urnStr - the urn of the entity. - * @param aspectName - the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects). + * @param aspectName - the optional aspect name if only want to delete the aspect (applicable only + * for timeseries aspects). * @param startTimeMills - the optional start time (applicable only for timeseries aspects). * @param endTimeMillis - the optional end time (applicable only for the timeseries aspects). - * @return - a DeleteEntityResponse object. + * @return - a DeleteEntityResponse object. * @throws URISyntaxException */ @Action(name = ACTION_DELETE) @Nonnull @WithSpan - public Task<DeleteEntityResponse> deleteEntity(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam(PARAM_ASPECT_NAME) @Optional String aspectName, - @ActionParam(PARAM_START_TIME_MILLIS) @Optional Long startTimeMills, - @ActionParam(PARAM_END_TIME_MILLIS) @Optional Long endTimeMillis) throws URISyntaxException { + public Task<DeleteEntityResponse> deleteEntity( + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_ASPECT_NAME) @Optional String aspectName, + @ActionParam(PARAM_START_TIME_MILLIS) @Optional Long startTimeMills, + @ActionParam(PARAM_END_TIME_MILLIS) @Optional Long endTimeMillis) + throws URISyntaxException { Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity: " + urnStr); - } - return RestliUtil.toTask(() -> { - // Find the timeseries aspects to delete. If aspectName is null, delete all. - List<String> timeseriesAspectNames = - EntitySpecUtils.getEntityTimeseriesAspectNames(_entityService.getEntityRegistry(), urn.getEntityType()); - if (aspectName != null && !timeseriesAspectNames.contains(aspectName)) { - throw new UnsupportedOperationException( + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity: " + urnStr); + } + return RestliUtil.toTask( + () -> { + // Find the timeseries aspects to delete. If aspectName is null, delete all. + List<String> timeseriesAspectNames = + EntitySpecUtils.getEntityTimeseriesAspectNames( + _entityService.getEntityRegistry(), urn.getEntityType()); + if (aspectName != null && !timeseriesAspectNames.contains(aspectName)) { + throw new UnsupportedOperationException( String.format("Not supported for non-timeseries aspect '{}'.", aspectName)); - } - List<String> timeseriesAspectsToDelete = + } + List<String> timeseriesAspectsToDelete = (aspectName == null) ? timeseriesAspectNames : ImmutableList.of(aspectName); - DeleteEntityResponse response = new DeleteEntityResponse(); - if (aspectName == null) { - RollbackRunResult result = _entityService.deleteUrn(urn); - response.setRows(result.getRowsDeletedFromEntityDeletion()); - } - Long numTimeseriesDocsDeleted = - deleteTimeseriesAspects(urn, startTimeMills, endTimeMillis, timeseriesAspectsToDelete); - log.info("Total number of timeseries aspect docs deleted: {}", numTimeseriesDocsDeleted); - - response.setUrn(urnStr); - response.setTimeseriesRows(numTimeseriesDocsDeleted); - - return response; - }, MetricRegistry.name(this.getClass(), "delete")); + DeleteEntityResponse response = new DeleteEntityResponse(); + if (aspectName == null) { + RollbackRunResult result = _entityService.deleteUrn(urn); + response.setRows(result.getRowsDeletedFromEntityDeletion()); + } + Long numTimeseriesDocsDeleted = + deleteTimeseriesAspects( + urn, startTimeMills, endTimeMillis, timeseriesAspectsToDelete); + log.info("Total number of timeseries aspect docs deleted: {}", numTimeseriesDocsDeleted); + + response.setUrn(urnStr); + response.setTimeseriesRows(numTimeseriesDocsDeleted); + + return response; + }, + MetricRegistry.name(this.getClass(), "delete")); } /** - * Deletes the set of timeseries aspect values for the specified aspects that are associated with the given - * entity urn between startTimeMillis and endTimeMillis. + * Deletes the set of timeseries aspect values for the specified aspects that are associated with + * the given entity urn between startTimeMillis and endTimeMillis. + * * @param urn The entity urn whose timeseries aspect values need to be deleted. - * @param startTimeMillis The start time in milliseconds from when the aspect values need to be deleted. - * If this is null, the deletion starts from the oldest value. - * @param endTimeMillis The end time in milliseconds up to when the aspect values need to be deleted. - * If this is null, the deletion will go till the most recent value. + * @param startTimeMillis The start time in milliseconds from when the aspect values need to be + * deleted. If this is null, the deletion starts from the oldest value. + * @param endTimeMillis The end time in milliseconds up to when the aspect values need to be + * deleted. If this is null, the deletion will go till the most recent value. * @param aspectsToDelete - The list of aspect names whose values need to be deleted. * @return The total number of documents deleted. */ - private Long deleteTimeseriesAspects(@Nonnull Urn urn, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, + private Long deleteTimeseriesAspects( + @Nonnull Urn urn, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, @Nonnull List<String> aspectsToDelete) { long totalNumberOfDocsDeleted = 0; Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity " + urn); } // Construct the filter. List<Criterion> criteria = new ArrayList<>(); criteria.add(QueryUtils.newCriterion("urn", urn.toString())); if (startTimeMillis != null) { criteria.add( - QueryUtils.newCriterion(ES_FIELD_TIMESTAMP, startTimeMillis.toString(), Condition.GREATER_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + ES_FIELD_TIMESTAMP, startTimeMillis.toString(), Condition.GREATER_THAN_OR_EQUAL_TO)); } if (endTimeMillis != null) { criteria.add( - QueryUtils.newCriterion(ES_FIELD_TIMESTAMP, endTimeMillis.toString(), Condition.LESS_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + ES_FIELD_TIMESTAMP, endTimeMillis.toString(), Condition.LESS_THAN_OR_EQUAL_TO)); } final Filter filter = QueryUtils.getFilterFromCriteria(criteria); // Delete all the timeseries aspects by the filter. final String entityType = urn.getEntityType(); for (final String aspect : aspectsToDelete) { - DeleteAspectValuesResult result = _timeseriesAspectService.deleteAspectValues(entityType, aspect, filter); + DeleteAspectValuesResult result = + _timeseriesAspectService.deleteAspectValues(entityType, aspect, filter); totalNumberOfDocsDeleted += result.getNumDocsDeleted(); - log.debug("Number of timeseries docs deleted for entity:{}, aspect:{}, urn:{}, startTime:{}, endTime:{}={}", - entityType, aspect, urn, startTimeMillis, endTimeMillis, result.getNumDocsDeleted()); + log.debug( + "Number of timeseries docs deleted for entity:{}, aspect:{}, urn:{}, startTime:{}, endTime:{}={}", + entityType, + aspect, + urn, + startTimeMillis, + endTimeMillis, + result.getNumDocsDeleted()); } return totalNumberOfDocsDeleted; } @@ -670,19 +876,24 @@ private Long deleteTimeseriesAspects(@Nonnull Urn urn, @Nullable Long startTimeM @Action(name = "deleteReferences") @Nonnull @WithSpan - public Task<DeleteReferencesResponse> deleteReferencesTo(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam("dryRun") @Optional Boolean dry) throws URISyntaxException { + public Task<DeleteReferencesResponse> deleteReferencesTo( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam("dryRun") @Optional Boolean dry) + throws URISyntaxException { boolean dryRun = dry != null ? dry : false; Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity " + urnStr); } - return RestliUtil.toTask(() -> _deleteEntityService.deleteReferencesTo(urn, dryRun), + return RestliUtil.toTask( + () -> _deleteEntityService.deleteReferencesTo(urn, dryRun), MetricRegistry.name(this.getClass(), "deleteReferences")); } @@ -692,18 +903,24 @@ public Task<DeleteReferencesResponse> deleteReferencesTo(@ActionParam(PARAM_URN) @Action(name = "setWritable") @Nonnull @WithSpan - public Task<Void> setWriteable(@ActionParam(PARAM_VALUE) @Optional("true") @Nonnull Boolean value) { + public Task<Void> setWriteable( + @ActionParam(PARAM_VALUE) @Optional("true") @Nonnull Boolean value) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SET_WRITEABLE_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to enable and disable write mode."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SET_WRITEABLE_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to enable and disable write mode."); } log.info("setting entity resource to be writable"); - return RestliUtil.toTask(() -> { - _entityService.setWritable(value); - return null; - }); + return RestliUtil.toTask( + () -> { + _entityService.setWritable(value); + return null; + }); } @Action(name = "getTotalEntityCount") @@ -712,9 +929,13 @@ public Task<Void> setWriteable(@ActionParam(PARAM_VALUE) @Optional("true") @Nonn public Task<Long> getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String entityName) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity counts."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); } return RestliUtil.toTask(() -> _entitySearchService.docCount(entityName)); } @@ -722,26 +943,39 @@ public Task<Long> getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String @Action(name = "batchGetTotalEntityCount") @Nonnull @WithSpan - public Task<LongMap> batchGetTotalEntityCount(@ActionParam(PARAM_ENTITIES) @Nonnull String[] entityNames) { + public Task<LongMap> batchGetTotalEntityCount( + @ActionParam(PARAM_ENTITIES) @Nonnull String[] entityNames) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity counts."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); } - return RestliUtil.toTask(() -> new LongMap(_searchService.docCountPerEntity(Arrays.asList(entityNames)))); + return RestliUtil.toTask( + () -> new LongMap(_searchService.docCountPerEntity(Arrays.asList(entityNames)))); } @Action(name = ACTION_LIST_URNS) @Nonnull @WithSpan - public Task<ListUrnsResult> listUrns(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) throws URISyntaxException { + public Task<ListUrnsResult> listUrns( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count) + throws URISyntaxException { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("LIST URNS for {} with start {} and count {}", entityName, start, count); return RestliUtil.toTask(() -> _entityService.listUrns(entityName, start, count), "listUrns"); @@ -750,12 +984,12 @@ public Task<ListUrnsResult> listUrns(@ActionParam(PARAM_ENTITY) @Nonnull String @Action(name = ACTION_APPLY_RETENTION) @Nonnull @WithSpan - public Task<String> applyRetention(@ActionParam(PARAM_START) @Optional @Nullable Integer start, - @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, - @ActionParam("attemptWithVersion") @Optional @Nullable Integer attemptWithVersion, - @ActionParam(PARAM_ASPECT_NAME) @Optional @Nullable String aspectName, - @ActionParam(PARAM_URN) @Optional @Nullable String urn - ) { + public Task<String> applyRetention( + @ActionParam(PARAM_START) @Optional @Nullable Integer start, + @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, + @ActionParam("attemptWithVersion") @Optional @Nullable Integer attemptWithVersion, + @ActionParam(PARAM_ASPECT_NAME) @Optional @Nullable String aspectName, + @ActionParam(PARAM_URN) @Optional @Nullable String urn) { Authentication auth = AuthenticationContext.getAuthentication(); EntitySpec resourceSpec = null; if (StringUtils.isNotBlank(urn)) { @@ -763,47 +997,66 @@ public Task<String> applyRetention(@ActionParam(PARAM_START) @Optional @Nullable resourceSpec = new EntitySpec(resource.getEntityType(), resource.toString()); } if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.APPLY_RETENTION_PRIVILEGE), resourceSpec)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to apply retention."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.APPLY_RETENTION_PRIVILEGE), + resourceSpec)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to apply retention."); } - return RestliUtil.toTask(() -> _entityService.batchApplyRetention( - start, count, attemptWithVersion, aspectName, urn), ACTION_APPLY_RETENTION); + return RestliUtil.toTask( + () -> _entityService.batchApplyRetention(start, count, attemptWithVersion, aspectName, urn), + ACTION_APPLY_RETENTION); } @Action(name = ACTION_FILTER) @Nonnull @WithSpan - public Task<SearchResult> filter(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, + public Task<SearchResult> filter( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_FILTER) Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("FILTER RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( - () -> validateSearchResult(_entitySearchService.filter(entityName, filter, sortCriterion, start, count), - _entityService), MetricRegistry.name(this.getClass(), "search")); + () -> + validateSearchResult( + _entitySearchService.filter(entityName, filter, sortCriterion, start, count), + _entityService), + MetricRegistry.name(this.getClass(), "search")); } @Action(name = ACTION_EXISTS) @Nonnull @WithSpan - public Task<Boolean> exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) throws URISyntaxException { + public Task<Boolean> exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) + throws URISyntaxException { Urn urn = UrnUtils.getUrn(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized get entity: " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized get entity: " + urnStr); } log.info("EXISTS for {}", urnStr); - return RestliUtil.toTask(() -> _entityService.exists(urn), MetricRegistry.name(this.getClass(), "exists")); + return RestliUtil.toTask( + () -> _entityService.exists(urn), MetricRegistry.name(this.getClass(), "exists")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java index 0c3e93273b863..9653a20bd8785 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java @@ -1,10 +1,16 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.entity.ResourceUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -34,16 +40,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.entity.ResourceUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "entitiesV2", namespace = "com.linkedin.entity") public class EntityV2Resource extends CollectionResourceTaskTemplate<String, EntityResponse> { @@ -56,68 +53,90 @@ public class EntityV2Resource extends CollectionResourceTaskTemplate<String, Ent @Named("authorizerChain") private Authorizer _authorizer; - /** - * Retrieves the value for an entity that is made up of latest versions of specified aspects. - */ + /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @RestMethod.Get @Nonnull @WithSpan - public Task<EntityResponse> get(@Nonnull String urnStr, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task<EntityResponse> get( + @Nonnull String urnStr, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.debug("GET V2 {}", urnStr); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity " + urn); } - return RestliUtil.toTask(() -> { - final String entityName = urnToEntityName(urn); - final Set<String> projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityName) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntityV2(entityName, urn, projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to get entity with urn: %s, aspects: %s", urn, projectedAspects), e); - } - }, MetricRegistry.name(this.getClass(), "get")); + return RestliUtil.toTask( + () -> { + final String entityName = urnToEntityName(urn); + final Set<String> projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityName) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntityV2(entityName, urn, projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to get entity with urn: %s, aspects: %s", urn, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "get")); } @RestMethod.BatchGet @Nonnull @WithSpan - public Task<Map<Urn, EntityResponse>> batchGet(@Nonnull Set<String> urnStrs, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task<Map<Urn, EntityResponse>> batchGet( + @Nonnull Set<String> urnStrs, + @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.debug("BATCH GET V2 {}", urnStrs.toString()); final Set<Urn> urns = new HashSet<>(); for (final String urnStr : urnStrs) { urns.add(Urn.createFromString(urnStr)); } Authentication auth = AuthenticationContext.getAuthentication(); - List<java.util.Optional<EntitySpec>> resourceSpecs = urns.stream() - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List<java.util.Optional<EntitySpec>> resourceSpecs = + urns.stream() + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entities " + urnStrs); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities " + urnStrs); } if (urns.size() <= 0) { return Task.value(Collections.emptyMap()); } final String entityName = urnToEntityName(urns.iterator().next()); - return RestliUtil.toTask(() -> { - final Set<String> projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityName) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntitiesV2(entityName, urns, projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to batch get entities with urns: %s, projectedAspects: %s", urns, projectedAspects), - e); - } - }, MetricRegistry.name(this.getClass(), "batchGet")); + return RestliUtil.toTask( + () -> { + final Set<String> projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityName) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntitiesV2(entityName, urns, projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch get entities with urns: %s, projectedAspects: %s", + urns, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "batchGet")); } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java index 05b7e6b3ff24b..fc556d15342c2 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java @@ -1,10 +1,15 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.entity.ResourceUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; @@ -35,19 +40,16 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.entity.ResourceUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - /** * Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities */ @Slf4j -@RestLiCollection(name = "entitiesVersionedV2", namespace = "com.linkedin.entity", +@RestLiCollection( + name = "entitiesVersionedV2", + namespace = "com.linkedin.entity", keyTyperefClass = com.linkedin.common.versioned.VersionedUrn.class) -public class EntityVersionedV2Resource extends CollectionResourceTaskTemplate<com.linkedin.common.urn.VersionedUrn, EntityResponse> { +public class EntityVersionedV2Resource + extends CollectionResourceTaskTemplate<com.linkedin.common.urn.VersionedUrn, EntityResponse> { @Inject @Named("entityService") @@ -65,36 +67,54 @@ public Task<Map<Urn, EntityResponse>> batchGetVersioned( @QueryParam(PARAM_ENTITY_TYPE) @Nonnull String entityType, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) { Authentication auth = AuthenticationContext.getAuthentication(); - List<java.util.Optional<EntitySpec>> resourceSpecs = versionedUrnStrs.stream() - .map(versionedUrn -> UrnUtils.getUrn(versionedUrn.getUrn())) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List<java.util.Optional<EntitySpec>> resourceSpecs = + versionedUrnStrs.stream() + .map(versionedUrn -> UrnUtils.getUrn(versionedUrn.getUrn())) + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities " + versionedUrnStrs); } log.debug("BATCH GET VERSIONED V2 {}", versionedUrnStrs); if (versionedUrnStrs.size() <= 0) { return Task.value(Collections.emptyMap()); } - return RestliUtil.toTask(() -> { - final Set<String> projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityType) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntitiesVersionedV2(versionedUrnStrs.stream() - .map(versionedUrnTyperef -> { - VersionedUrn versionedUrn = new VersionedUrn().setUrn(UrnUtils.getUrn(versionedUrnTyperef.getUrn())); - if (versionedUrnTyperef.getVersionStamp() != null) { - versionedUrn.setVersionStamp(versionedUrnTyperef.getVersionStamp()); - } - return versionedUrn; - }).collect(Collectors.toSet()), projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to batch get versioned entities: %s, projectedAspects: %s", versionedUrnStrs, projectedAspects), - e); - } - }, MetricRegistry.name(this.getClass(), "batchGet")); + return RestliUtil.toTask( + () -> { + final Set<String> projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityType) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntitiesVersionedV2( + versionedUrnStrs.stream() + .map( + versionedUrnTyperef -> { + VersionedUrn versionedUrn = + new VersionedUrn() + .setUrn(UrnUtils.getUrn(versionedUrnTyperef.getUrn())); + if (versionedUrnTyperef.getVersionStamp() != null) { + versionedUrn.setVersionStamp(versionedUrnTyperef.getVersionStamp()); + } + return versionedUrn; + }) + .collect(Collectors.toSet()), + projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch get versioned entities: %s, projectedAspects: %s", + versionedUrnStrs, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "batchGet")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java index 82d29ea00663b..1b22cc135b037 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java @@ -3,14 +3,11 @@ import com.linkedin.metadata.entity.EntityService; import java.util.Set; - public class ResourceUtils { - private ResourceUtils() { - - } + private ResourceUtils() {} - public static Set<String> getAllAspectNames(final EntityService entityService, final String entityName) { + public static Set<String> getAllAspectNames( + final EntityService entityService, final String entityName) { return entityService.getEntityAspectNames(entityName); } - } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java index 4a8e74c89039a..3fdd1d804a83f 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java @@ -1,10 +1,19 @@ package com.linkedin.metadata.resources.lineage; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_COUNT; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_DIRECTION; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_START; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_URN; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationshipArray; @@ -42,19 +51,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_COUNT; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_DIRECTION; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_START; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_URN; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; - - -/** - * Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types} - */ +/** Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types} */ @Slf4j @RestLiSimpleResource(name = "relationships", namespace = "com.linkedin.lineage") public final class Relationships extends SimpleResourceTemplate<EntityRelationships> { @@ -76,14 +73,25 @@ public Relationships() { super(); } - private RelatedEntitiesResult getRelatedEntities(String rawUrn, List<String> relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count) { + private RelatedEntitiesResult getRelatedEntities( + String rawUrn, + List<String> relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count) { start = start == null ? 0 : start; count = count == null ? MAX_DOWNSTREAM_CNT : count; - return _graphService.findRelatedEntities(null, newFilter("urn", rawUrn), null, QueryUtils.EMPTY_FILTER, - relationshipTypes, newRelationshipFilter(QueryUtils.EMPTY_FILTER, direction), start, count); + return _graphService.findRelatedEntities( + null, + newFilter("urn", rawUrn), + null, + QueryUtils.EMPTY_FILTER, + relationshipTypes, + newRelationshipFilter(QueryUtils.EMPTY_FILTER, direction), + start, + count); } static RelationshipDirection getOppositeDirection(RelationshipDirection direction) { @@ -99,40 +107,55 @@ static RelationshipDirection getOppositeDirection(RelationshipDirection directio @Nonnull @RestMethod.Get @WithSpan - public Task<EntityRelationships> get(@QueryParam("urn") @Nonnull String rawUrn, + public Task<EntityRelationships> get( + @QueryParam("urn") @Nonnull String rawUrn, @QueryParam("types") @Nonnull String[] relationshipTypesParam, - @QueryParam("direction") @Nonnull String rawDirection, @QueryParam("start") @Optional @Nullable Integer start, + @QueryParam("direction") @Nonnull String rawDirection, + @QueryParam("start") @Optional @Nullable Integer start, @QueryParam("count") @Optional @Nullable Integer count) { Urn urn = UrnUtils.getUrn(rawUrn); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity lineage: " + rawUrn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity lineage: " + rawUrn); } RelationshipDirection direction = RelationshipDirection.valueOf(rawDirection); final List<String> relationshipTypes = Arrays.asList(relationshipTypesParam); - return RestliUtil.toTask(() -> { - - final RelatedEntitiesResult relatedEntitiesResult = - getRelatedEntities(rawUrn, relationshipTypes, direction, start, count); - final EntityRelationshipArray entityArray = - new EntityRelationshipArray(relatedEntitiesResult.getEntities().stream().map(entity -> { - try { - return new EntityRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to convert urnStr %s found in the Graph to an Urn object", entity.getUrn())); - } - }).collect(Collectors.toList())); - - return new EntityRelationships().setStart(relatedEntitiesResult.getStart()) - .setCount(relatedEntitiesResult.getCount()) - .setTotal(relatedEntitiesResult.getTotal()) - .setRelationships(entityArray); - }, MetricRegistry.name(this.getClass(), "getLineage")); + return RestliUtil.toTask( + () -> { + final RelatedEntitiesResult relatedEntitiesResult = + getRelatedEntities(rawUrn, relationshipTypes, direction, start, count); + final EntityRelationshipArray entityArray = + new EntityRelationshipArray( + relatedEntitiesResult.getEntities().stream() + .map( + entity -> { + try { + return new EntityRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to convert urnStr %s found in the Graph to an Urn object", + entity.getUrn())); + } + }) + .collect(Collectors.toList())); + + return new EntityRelationships() + .setStart(relatedEntitiesResult.getStart()) + .setCount(relatedEntitiesResult.getCount()) + .setTotal(relatedEntitiesResult.getTotal()) + .setRelationships(entityArray); + }, + MetricRegistry.name(this.getClass(), "getLineage")); } @Nonnull @@ -141,10 +164,14 @@ public UpdateResponse delete(@QueryParam("urn") @Nonnull String rawUrn) throws E Urn urn = Urn.createFromString(rawUrn); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity: " + rawUrn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity: " + rawUrn); } _graphService.removeNode(urn); return new UpdateResponse(HttpStatus.S_200_OK); @@ -153,22 +180,34 @@ public UpdateResponse delete(@QueryParam("urn") @Nonnull String rawUrn) throws E @Action(name = ACTION_GET_LINEAGE) @Nonnull @WithSpan - public Task<EntityLineageResult> getLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_START) @Optional @Nullable Integer start, + public Task<EntityLineageResult> getLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_DIRECTION) String direction, + @ActionParam(PARAM_START) @Optional @Nullable Integer start, @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, - @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops) throws URISyntaxException { + @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops) + throws URISyntaxException { log.info("GET LINEAGE {} {} {} {} {}", urnStr, direction, start, count, maxHops); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity lineage: " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity lineage: " + urnStr); } return RestliUtil.toTask( - () -> _graphService.getLineage(urn, LineageDirection.valueOf(direction), start != null ? start : 0, - count != null ? count : 100, maxHops != null ? maxHops : 1), + () -> + _graphService.getLineage( + urn, + LineageDirection.valueOf(direction), + start != null ? start : 0, + count != null ? count : 100, + maxHops != null ? maxHops : 1), MetricRegistry.name(this.getClass(), "getLineage")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java index 1e6523e774d66..499fc0f5221fe 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.resources.operations; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -35,17 +39,10 @@ import javax.inject.Inject; import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.tasks.GetTaskResponse; import org.json.JSONObject; +import org.opensearch.client.tasks.GetTaskResponse; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Endpoints for performing maintenance operations - */ +/** Endpoints for performing maintenance operations */ @Slf4j @RestLiCollection(name = "operations", namespace = "com.linkedin.operations") public class OperationsResource extends CollectionResourceTaskTemplate<String, VersionedAspect> { @@ -67,9 +64,11 @@ public class OperationsResource extends CollectionResourceTaskTemplate<String, V @Inject @Named("entityService") private EntityService _entityService; + @Inject @Named("timeseriesAspectService") private TimeseriesAspectService _timeseriesAspectService; + @Inject @Named("elasticSearchSystemMetadataService") private SystemMetadataService _systemMetadataService; @@ -78,7 +77,7 @@ public class OperationsResource extends CollectionResourceTaskTemplate<String, V @Named("authorizerChain") private Authorizer _authorizer; - public OperationsResource() { } + public OperationsResource() {} @VisibleForTesting OperationsResource(TimeseriesAspectService timeseriesAspectService) { @@ -88,15 +87,18 @@ public OperationsResource() { } @Action(name = ACTION_RESTORE_INDICES) @Nonnull @WithSpan - public Task<String> restoreIndices(@ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, + public Task<String> restoreIndices( + @ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, @ActionParam(PARAM_URN) @Optional @Nullable String urn, @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, @ActionParam("start") @Optional @Nullable Integer start, - @ActionParam("batchSize") @Optional @Nullable Integer batchSize - ) { - return RestliUtil.toTask(() -> { - return Utils.restoreIndices(aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); - }, MetricRegistry.name(this.getClass(), "restoreIndices")); + @ActionParam("batchSize") @Optional @Nullable Integer batchSize) { + return RestliUtil.toTask( + () -> { + return Utils.restoreIndices( + aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); + }, + MetricRegistry.name(this.getClass(), "restoreIndices")); } @VisibleForTesting @@ -117,62 +119,86 @@ static boolean isTaskIdValid(String task) { public Task<String> getTaskStatus( @ActionParam(PARAM_NODE_ID) @Optional String nodeId, @ActionParam(PARAM_TASK_ID) @Optional("0") long taskId, - @ActionParam(PARAM_TASK) @Optional String task - ) { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get ES task status"); - } - boolean taskSpecified = task != null; - boolean nodeAndTaskIdSpecified = nodeId != null && taskId > 0; - if (!taskSpecified && !nodeAndTaskIdSpecified) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Please specify either Node ID + task ID OR composite task parameters"); - } + @ActionParam(PARAM_TASK) @Optional String task) { + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get ES task status"); + } + boolean taskSpecified = task != null; + boolean nodeAndTaskIdSpecified = nodeId != null && taskId > 0; + if (!taskSpecified && !nodeAndTaskIdSpecified) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + "Please specify either Node ID + task ID OR composite task parameters"); + } - if (taskSpecified && nodeAndTaskIdSpecified && !task.equals(String.format("%s:%d", nodeId, taskId))) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Please specify only one of Node ID + task ID OR composite task parameters"); - } + if (taskSpecified + && nodeAndTaskIdSpecified + && !task.equals(String.format("%s:%d", nodeId, taskId))) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + "Please specify only one of Node ID + task ID OR composite task parameters"); + } - if (taskSpecified && !isTaskIdValid(task)) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, - String.format("Task should be in the form nodeId:taskId e.g. aB1cdEf2GHIJKLMnoPQr3S:123456 (got %s)", task)); - } + if (taskSpecified && !isTaskIdValid(task)) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + String.format( + "Task should be in the form nodeId:taskId e.g. aB1cdEf2GHIJKLMnoPQr3S:123456 (got %s)", + task)); + } - String nodeIdToQuery = nodeAndTaskIdSpecified ? nodeId : task.split(":")[0]; - long taskIdToQuery = nodeAndTaskIdSpecified ? taskId : Long.parseLong(task.split(":")[1]); - java.util.Optional<GetTaskResponse> res = _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); - JSONObject j = new JSONObject(); - if (res.isEmpty()) { - j.put("error", String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); - return j.toString(); - } - GetTaskResponse resp = res.get(); - j.put("completed", resp.isCompleted()); - j.put("taskId", res.get().getTaskInfo().getTaskId()); - j.put("status", res.get().getTaskInfo().getStatus()); - j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); - return j.toString(); - }, MetricRegistry.name(this.getClass(), "getTaskStatus")); + String nodeIdToQuery = nodeAndTaskIdSpecified ? nodeId : task.split(":")[0]; + long taskIdToQuery = nodeAndTaskIdSpecified ? taskId : Long.parseLong(task.split(":")[1]); + java.util.Optional<GetTaskResponse> res = + _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); + JSONObject j = new JSONObject(); + if (res.isEmpty()) { + j.put( + "error", + String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); + return j.toString(); + } + GetTaskResponse resp = res.get(); + j.put("completed", resp.isCompleted()); + j.put("taskId", res.get().getTaskInfo().getTaskId()); + j.put("status", res.get().getTaskInfo().getStatus()); + j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); + return j.toString(); + }, + MetricRegistry.name(this.getClass(), "getTaskStatus")); } @Action(name = ACTION_GET_INDEX_SIZES) @Nonnull @WithSpan public Task<TimeseriesIndicesSizesResult> getIndexSizes() { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get index sizes."); - } - TimeseriesIndicesSizesResult result = new TimeseriesIndicesSizesResult(); - result.setIndexSizes(new TimeseriesIndexSizeResultArray(_timeseriesAspectService.getIndexSizes())); - return result; - }, MetricRegistry.name(this.getClass(), "getIndexSizes")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get index sizes."); + } + TimeseriesIndicesSizesResult result = new TimeseriesIndicesSizesResult(); + result.setIndexSizes( + new TimeseriesIndexSizeResultArray(_timeseriesAspectService.getIndexSizes())); + return result; + }, + MetricRegistry.name(this.getClass(), "getIndexSizes")); } @VisibleForTesting @@ -184,13 +210,16 @@ String executeTruncateTimeseriesAspect( @Nullable Integer batchSize, @Nullable Long timeoutSeconds, @Nullable Boolean forceDeleteByQuery, - @Nullable Boolean forceReindex - ) { + @Nullable Boolean forceReindex) { Authentication authentication = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.TRUNCATE_TIMESERIES_INDEX_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to truncate timeseries index"); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.TRUNCATE_TIMESERIES_INDEX_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to truncate timeseries index"); } if (forceDeleteByQuery != null && forceDeleteByQuery.equals(forceReindex)) { @@ -199,14 +228,20 @@ String executeTruncateTimeseriesAspect( List<Criterion> criteria = new ArrayList<>(); criteria.add( - QueryUtils.newCriterion("timestampMillis", String.valueOf(endTimeMillis), Condition.LESS_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + "timestampMillis", String.valueOf(endTimeMillis), Condition.LESS_THAN_OR_EQUAL_TO)); final Filter filter = QueryUtils.getFilterFromCriteria(criteria); long numToDelete = _timeseriesAspectService.countByFilter(entityType, aspectName, filter); long totalNum = _timeseriesAspectService.countByFilter(entityType, aspectName, new Filter()); - String deleteSummary = String.format("Delete %d out of %d rows (%.2f%%). ", numToDelete, totalNum, ((double) numToDelete) / totalNum * 100); - boolean reindex = !(forceDeleteByQuery != null && forceDeleteByQuery) && ((forceReindex != null && forceReindex) || numToDelete > (totalNum / 2)); + String deleteSummary = + String.format( + "Delete %d out of %d rows (%.2f%%). ", + numToDelete, totalNum, ((double) numToDelete) / totalNum * 100); + boolean reindex = + !(forceDeleteByQuery != null && forceDeleteByQuery) + && ((forceReindex != null && forceReindex) || numToDelete > (totalNum / 2)); if (reindex) { deleteSummary += "Reindexing the aspect without the deleted records. "; @@ -232,17 +267,22 @@ String executeTruncateTimeseriesAspect( } if (reindex) { - // need to invert query to retain only the ones that do NOT meet the criterion from the count + // need to invert query to retain only the ones that do NOT meet the criterion from the + // count List<Criterion> reindexCriteria = new ArrayList<>(); reindexCriteria.add( - QueryUtils.newCriterion("timestampMillis", String.valueOf(endTimeMillis), Condition.GREATER_THAN)); + QueryUtils.newCriterion( + "timestampMillis", String.valueOf(endTimeMillis), Condition.GREATER_THAN)); final Filter reindexFilter = QueryUtils.getFilterFromCriteria(reindexCriteria); - String taskId = _timeseriesAspectService.reindexAsync(entityType, aspectName, reindexFilter, options); + String taskId = + _timeseriesAspectService.reindexAsync(entityType, aspectName, reindexFilter, options); log.info("reindex request submitted with ID " + taskId); return taskId; } else { - String taskId = _timeseriesAspectService.deleteAspectValuesAsync(entityType, aspectName, filter, options); + String taskId = + _timeseriesAspectService.deleteAspectValuesAsync( + entityType, aspectName, filter, options); log.info("delete by query request submitted with ID " + taskId); return taskId; } @@ -260,10 +300,18 @@ public Task<String> truncateTimeseriesAspect( @ActionParam(PARAM_BATCH_SIZE) @Optional @Nullable Integer batchSize, @ActionParam(PARAM_TIMEOUT_SECONDS) @Optional @Nullable Long timeoutSeconds, @ActionParam(PARAM_FORCE_DELETE_BY_QUERY) @Optional @Nullable Boolean forceDeleteByQuery, - @ActionParam(PARAM_FORCE_REINDEX) @Optional @Nullable Boolean forceReindex - ) { - return RestliUtil.toTask(() -> - executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, dryRun, batchSize, timeoutSeconds, forceDeleteByQuery, forceReindex), + @ActionParam(PARAM_FORCE_REINDEX) @Optional @Nullable Boolean forceReindex) { + return RestliUtil.toTask( + () -> + executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + dryRun, + batchSize, + timeoutSeconds, + forceDeleteByQuery, + forceReindex), MetricRegistry.name(this.getClass(), "truncateTimeseriesAspect")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java index 12586b66495a9..bf07d0eb9dd5b 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.resources.operations; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; @@ -19,14 +22,11 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - @Slf4j public class Utils { - private Utils() { } + private Utils() {} + public static String restoreIndices( @Nonnull String aspectName, @Nullable String urn, @@ -34,8 +34,7 @@ public static String restoreIndices( @Nullable Integer start, @Nullable Integer batchSize, @Nonnull Authorizer authorizer, - @Nonnull EntityService entityService - ) { + @Nonnull EntityService entityService) { Authentication authentication = AuthenticationContext.getAuthentication(); EntitySpec resourceSpec = null; if (StringUtils.isNotBlank(urn)) { @@ -43,16 +42,21 @@ public static String restoreIndices( resourceSpec = new EntitySpec(resource.getEntityType(), resource.toString()); } if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, authorizer, ImmutableList.of(PoliciesConfig.RESTORE_INDICES_PRIVILEGE), - resourceSpec)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to restore indices."); + && !isAuthorized( + authentication, + authorizer, + ImmutableList.of(PoliciesConfig.RESTORE_INDICES_PRIVILEGE), + resourceSpec)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to restore indices."); } - RestoreIndicesArgs args = new RestoreIndicesArgs() - .setAspectName(aspectName) - .setUrnLike(urnLike) - .setUrn(urn) - .setStart(start) - .setBatchSize(batchSize); + RestoreIndicesArgs args = + new RestoreIndicesArgs() + .setAspectName(aspectName) + .setUrnLike(urnLike) + .setUrn(urn) + .setStart(start) + .setBatchSize(batchSize); Map<String, Object> result = new HashMap<>(); result.put("args", args); result.put("result", entityService.restoreIndices(args, log::info)); diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java index a8018074497c4..f4bc0dd72e4c6 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java @@ -1,9 +1,12 @@ package com.linkedin.metadata.resources.platform; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.entity.Entity; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -24,13 +27,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * DataHub Platform Actions - */ +/** DataHub Platform Actions */ @Slf4j @RestLiCollection(name = "platform", namespace = "com.linkedin.platform") public class PlatformResource extends CollectionResourceTaskTemplate<String, Entity> { @@ -54,14 +51,19 @@ public Task<Void> producePlatformEvent( @ActionParam("event") @Nonnull PlatformEvent event) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.PRODUCE_PLATFORM_EVENT_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to produce platform events."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.PRODUCE_PLATFORM_EVENT_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to produce platform events."); } log.info(String.format("Emitting platform event. name: %s, key: %s", eventName, key)); - return RestliUtil.toTask(() -> { - _eventProducer.producePlatformEvent(eventName, key, event); - return null; - }); + return RestliUtil.toTask( + () -> { + _eventProducer.producePlatformEvent(eventName, key, event); + return null; + }); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java index 270c52f380356..af6efb1ad8093 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.resources.restli; public final class RestliConstants { - private RestliConstants() { } + private RestliConstants() {} public static final String FINDER_SEARCH = "search"; public static final String FINDER_FILTER = "filter"; diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java index 9949556c99b81..278cd48bc455e 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java @@ -18,7 +18,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class RestliUtils { private RestliUtils() { @@ -26,8 +25,9 @@ private RestliUtils() { } /** - * Executes the provided supplier and convert the results to a {@link Task}. - * Exceptions thrown during the execution will be properly wrapped in {@link RestLiServiceException}. + * Executes the provided supplier and convert the results to a {@link Task}. Exceptions thrown + * during the execution will be properly wrapped in {@link RestLiServiceException}. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -38,7 +38,8 @@ public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier) { } catch (Throwable throwable) { // Convert IllegalArgumentException to BAD REQUEST - if (throwable instanceof IllegalArgumentException || throwable.getCause() instanceof IllegalArgumentException) { + if (throwable instanceof IllegalArgumentException + || throwable.getCause() instanceof IllegalArgumentException) { throwable = badRequestException(throwable.getMessage()); } @@ -51,8 +52,10 @@ public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier) { } /** - * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} instead. - * A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the optional is emtpy. + * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} + * instead. A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the + * optional is emtpy. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -81,22 +84,36 @@ public static RestLiServiceException invalidArgumentsException(@Nullable String return new RestLiServiceException(HttpStatus.S_412_PRECONDITION_FAILED, message); } - public static boolean isAuthorized(@Nonnull Authentication authentication, @Nonnull Authorizer authorizer, - @Nonnull final List<PoliciesConfig.Privilege> privileges, @Nonnull final List<java.util.Optional<EntitySpec>> resources) { + public static boolean isAuthorized( + @Nonnull Authentication authentication, + @Nonnull Authorizer authorizer, + @Nonnull final List<PoliciesConfig.Privilege> privileges, + @Nonnull final List<java.util.Optional<EntitySpec>> resources) { DisjunctivePrivilegeGroup orGroup = convertPrivilegeGroup(privileges); - return AuthUtil.isAuthorizedForResources(authorizer, authentication.getActor().toUrnStr(), resources, orGroup); + return AuthUtil.isAuthorizedForResources( + authorizer, authentication.getActor().toUrnStr(), resources, orGroup); } - public static boolean isAuthorized(@Nonnull Authentication authentication, @Nonnull Authorizer authorizer, - @Nonnull final List<PoliciesConfig.Privilege> privileges, @Nullable final EntitySpec resource) { + public static boolean isAuthorized( + @Nonnull Authentication authentication, + @Nonnull Authorizer authorizer, + @Nonnull final List<PoliciesConfig.Privilege> privileges, + @Nullable final EntitySpec resource) { DisjunctivePrivilegeGroup orGroup = convertPrivilegeGroup(privileges); - return AuthUtil.isAuthorized(authorizer, authentication.getActor().toUrnStr(), java.util.Optional.ofNullable(resource), orGroup); + return AuthUtil.isAuthorized( + authorizer, + authentication.getActor().toUrnStr(), + java.util.Optional.ofNullable(resource), + orGroup); } - private static DisjunctivePrivilegeGroup convertPrivilegeGroup(@Nonnull final List<PoliciesConfig.Privilege> privileges) { + private static DisjunctivePrivilegeGroup convertPrivilegeGroup( + @Nonnull final List<PoliciesConfig.Privilege> privileges) { return new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(privileges.stream() - .map(PoliciesConfig.Privilege::getType) - .collect(Collectors.toList())))); + ImmutableList.of( + new ConjunctivePrivilegeGroup( + privileges.stream() + .map(PoliciesConfig.Privilege::getType) + .collect(Collectors.toList())))); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java index 02d413301f3b4..554b6e909e9e3 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.resources.usage; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -67,22 +70,23 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Rest.li entry point: /usageStats - */ +/** Rest.li entry point: /usageStats */ @Slf4j @Deprecated @RestLiSimpleResource(name = "usageStats", namespace = "com.linkedin.usage") public class UsageStats extends SimpleResourceTemplate<UsageAggregation> { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String ACTION_BATCH_INGEST = "batchIngest"; private static final String PARAM_BUCKETS = "buckets"; @@ -122,18 +126,24 @@ public class UsageStats extends SimpleResourceTemplate<UsageAggregation> { @WithSpan public Task<Void> batchIngest(@ActionParam(PARAM_BUCKETS) @Nonnull UsageAggregation[] buckets) { log.info("Ingesting {} usage stats aggregations", buckets.length); - return RestliUtil.toTask(() -> { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entities."); - } - for (UsageAggregation agg : buckets) { - this.ingest(agg); - } - return null; - }, MetricRegistry.name(this.getClass(), "batchIngest")); + return RestliUtil.toTask( + () -> { + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entities."); + } + for (UsageAggregation agg : buckets) { + this.ingest(agg); + } + return null; + }, + MetricRegistry.name(this.getClass(), "batchIngest")); } private CalendarInterval windowToInterval(@Nonnull WindowDuration duration) { @@ -153,35 +163,50 @@ private CalendarInterval windowToInterval(@Nonnull WindowDuration duration) { } } - private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String resource, - @Nonnull WindowDuration duration) { - // NOTE: We will not populate the per-bucket userCounts and fieldCounts in this implementation because - // (a) it is very expensive to compute the un-explode equivalent queries for timeseries field collections, and - // (b) the equivalent data for the whole query will anyways be populated in the `aggregations` part of the results + private UsageAggregationArray getBuckets( + @Nonnull Filter filter, @Nonnull String resource, @Nonnull WindowDuration duration) { + // NOTE: We will not populate the per-bucket userCounts and fieldCounts in this implementation + // because + // (a) it is very expensive to compute the un-explode equivalent queries for timeseries field + // collections, and + // (b) the equivalent data for the whole query will anyways be populated in the `aggregations` + // part of the results // (see getAggregations). - // 1. Construct the aggregation specs for latest value of uniqueUserCount, totalSqlQueries & topSqlQueries. + // 1. Construct the aggregation specs for latest value of uniqueUserCount, totalSqlQueries & + // topSqlQueries. AggregationSpec uniqueUserCountAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("uniqueUserCount"); AggregationSpec totalSqlQueriesAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("totalSqlQueries"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("totalSqlQueries"); AggregationSpec topSqlQueriesAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("topSqlQueries"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("topSqlQueries"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{uniqueUserCountAgg, totalSqlQueriesAgg, topSqlQueriesAgg}; + new AggregationSpec[] {uniqueUserCountAgg, totalSqlQueriesAgg, topSqlQueriesAgg}; // 2. Construct the Grouping buckets with just the ts bucket. GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setKey(ES_FIELD_TIMESTAMP) + timestampBucket + .setKey(ES_FIELD_TIMESTAMP) .setType(GroupingBucketType.DATE_GROUPING_BUCKET) .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(windowToInterval(duration))); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{timestampBucket}; + GroupingBucket[] groupingBuckets = new GroupingBucket[] {timestampBucket}; // 3. Query GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // 4. Populate buckets from the result. UsageAggregationArray buckets = new UsageAggregationArray(); @@ -211,9 +236,11 @@ private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String } if (!row.get(3).equals(ES_NULL_VALUE)) { try { - usageAggregationMetrics.setTopSqlQueries(OBJECT_MAPPER.readValue(row.get(3), StringArray.class)); + usageAggregationMetrics.setTopSqlQueries( + OBJECT_MAPPER.readValue(row.get(3), StringArray.class)); } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to convert topSqlQueries from ES to object", e); + throw new IllegalArgumentException( + "Failed to convert topSqlQueries from ES to object", e); } } usageAggregation.setMetrics(usageAggregationMetrics); @@ -226,20 +253,31 @@ private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String private List<UserUsageCounts> getUserUsageCounts(Filter filter) { // Sum aggregation on userCounts.count AggregationSpec sumUserCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.count"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.count"); AggregationSpec latestUserEmailAggSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("userCounts.userEmail"); - AggregationSpec[] aggregationSpecs = new AggregationSpec[]{sumUserCountsCountAggSpec, latestUserEmailAggSpec}; + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("userCounts.userEmail"); + AggregationSpec[] aggregationSpecs = + new AggregationSpec[] {sumUserCountsCountAggSpec, latestUserEmailAggSpec}; // String grouping bucket on userCounts.user GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("userCounts.user").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("userCounts.user") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List<UserUsageCounts> userUsageCounts = new ArrayList<>(); for (StringArray row : result.getRows()) { @@ -253,7 +291,8 @@ private List<UserUsageCounts> getUserUsageCounts(Filter filter) { try { userUsageCount.setCount(Integer.valueOf(row.get(1))); } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user usage count from ES to int", e); } } if (!row.get(2).equals(ES_NULL_VALUE)) { @@ -267,18 +306,26 @@ private List<UserUsageCounts> getUserUsageCounts(Filter filter) { private List<FieldUsageCounts> getFieldUsageCounts(Filter filter) { // Sum aggregation on fieldCounts.count AggregationSpec sumFieldCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("fieldCounts.count"); - AggregationSpec[] aggregationSpecs = new AggregationSpec[]{sumFieldCountAggSpec}; + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("fieldCounts.count"); + AggregationSpec[] aggregationSpecs = new AggregationSpec[] {sumFieldCountAggSpec}; // String grouping bucket on fieldCounts.fieldName GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("fieldCounts.fieldPath").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("fieldCounts.fieldPath") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List<FieldUsageCounts> fieldUsageCounts = new ArrayList<>(); @@ -289,7 +336,8 @@ private List<FieldUsageCounts> getFieldUsageCounts(Filter filter) { try { fieldUsageCount.setCount(Integer.valueOf(row.get(1))); } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert field usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert field usage count from ES to int", e); } } fieldUsageCounts.add(fieldUsageCount); @@ -312,80 +360,100 @@ private UsageQueryResultAggregations getAggregations(Filter filter) { @Action(name = ACTION_QUERY) @Nonnull @WithSpan - public Task<UsageQueryResult> query(@ActionParam(PARAM_RESOURCE) @Nonnull String resource, + public Task<UsageQueryResult> query( + @ActionParam(PARAM_RESOURCE) @Nonnull String resource, @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, - @ActionParam(PARAM_START_TIME) @com.linkedin.restli.server.annotations.Optional Long startTime, + @ActionParam(PARAM_START_TIME) @com.linkedin.restli.server.annotations.Optional + Long startTime, @ActionParam(PARAM_END_TIME) @com.linkedin.restli.server.annotations.Optional Long endTime, - @ActionParam(PARAM_MAX_BUCKETS) @com.linkedin.restli.server.annotations.Optional Integer maxBuckets) { + @ActionParam(PARAM_MAX_BUCKETS) @com.linkedin.restli.server.annotations.Optional + Integer maxBuckets) { log.info("Attempting to query usage stats"); - return RestliUtil.toTask(() -> { - Authentication auth = AuthenticationContext.getAuthentication(); - Urn resourceUrn = UrnUtils.getUrn(resource); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), - new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to query usage."); - } - // 1. Populate the filter. This is common for all queries. - Filter filter = new Filter(); - ArrayList<Criterion> criteria = new ArrayList<>(); - Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(resource); - criteria.add(hasUrnCriterion); - if (startTime != null) { - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(startTime.toString()); - criteria.add(startTimeCriterion); - } - if (endTime != null) { - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(endTime.toString()); - criteria.add(endTimeCriterion); - } - - filter.setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); - - // 2. Get buckets. - UsageAggregationArray buckets = getBuckets(filter, resource, duration); - - // 3. Get aggregations. - UsageQueryResultAggregations aggregations = getAggregations(filter); - - // 4. Compute totalSqlQuery count from the buckets itself. - // We want to avoid issuing an additional query with a sum aggregation. - Integer totalQueryCount = null; - for (UsageAggregation bucket : buckets) { - if (bucket.getMetrics().getTotalSqlQueries() != null) { - if (totalQueryCount == null) { - totalQueryCount = 0; + return RestliUtil.toTask( + () -> { + Authentication auth = AuthenticationContext.getAuthentication(); + Urn resourceUrn = UrnUtils.getUrn(resource); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), + new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to query usage."); + } + // 1. Populate the filter. This is common for all queries. + Filter filter = new Filter(); + ArrayList<Criterion> criteria = new ArrayList<>(); + Criterion hasUrnCriterion = + new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(resource); + criteria.add(hasUrnCriterion); + if (startTime != null) { + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(startTime.toString()); + criteria.add(startTimeCriterion); + } + if (endTime != null) { + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(endTime.toString()); + criteria.add(endTimeCriterion); } - totalQueryCount += bucket.getMetrics().getTotalSqlQueries(); - } - } - if (totalQueryCount != null) { - aggregations.setTotalSqlQueries(totalQueryCount); - } + filter.setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + + // 2. Get buckets. + UsageAggregationArray buckets = getBuckets(filter, resource, duration); + + // 3. Get aggregations. + UsageQueryResultAggregations aggregations = getAggregations(filter); + + // 4. Compute totalSqlQuery count from the buckets itself. + // We want to avoid issuing an additional query with a sum aggregation. + Integer totalQueryCount = null; + for (UsageAggregation bucket : buckets) { + if (bucket.getMetrics().getTotalSqlQueries() != null) { + if (totalQueryCount == null) { + totalQueryCount = 0; + } + totalQueryCount += bucket.getMetrics().getTotalSqlQueries(); + } + } - // 5. Populate and return the result. - return new UsageQueryResult().setBuckets(buckets).setAggregations(aggregations); - }, MetricRegistry.name(this.getClass(), "query")); + if (totalQueryCount != null) { + aggregations.setTotalSqlQueries(totalQueryCount); + } + + // 5. Populate and return the result. + return new UsageQueryResult().setBuckets(buckets).setAggregations(aggregations); + }, + MetricRegistry.name(this.getClass(), "query")); } @Action(name = ACTION_QUERY_RANGE) @Nonnull @WithSpan - public Task<UsageQueryResult> queryRange(@ActionParam(PARAM_RESOURCE) @Nonnull String resource, - @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, @ActionParam(PARAM_RANGE) UsageTimeRange range) { + public Task<UsageQueryResult> queryRange( + @ActionParam(PARAM_RESOURCE) @Nonnull String resource, + @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, + @ActionParam(PARAM_RANGE) UsageTimeRange range) { Authentication auth = AuthenticationContext.getAuthentication(); Urn resourceUrn = UrnUtils.getUrn(resource); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to query usage."); + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to query usage."); } final long now = Instant.now().toEpochMilli(); return this.query(resource, duration, convertRangeToStartTime(range, now), now, null); @@ -418,7 +486,8 @@ private void ingest(@Nonnull UsageAggregation bucket) { datasetUsageStatistics.setUserCounts(datasetUserUsageCountsArray); } if (aggregationMetrics.hasFields()) { - DatasetFieldUsageCountsArray datasetFieldUsageCountsArray = new DatasetFieldUsageCountsArray(); + DatasetFieldUsageCountsArray datasetFieldUsageCountsArray = + new DatasetFieldUsageCountsArray(); for (FieldUsageCounts f : aggregationMetrics.getFields()) { DatasetFieldUsageCounts datasetFieldUsageCounts = new DatasetFieldUsageCounts(); datasetFieldUsageCounts.setFieldPath(f.getFieldName()); @@ -431,17 +500,23 @@ private void ingest(@Nonnull UsageAggregation bucket) { Map<String, JsonNode> documents; try { documents = - TimeseriesAspectTransformer.transform(bucket.getResource(), datasetUsageStatistics, getUsageStatsAspectSpec(), - null); + TimeseriesAspectTransformer.transform( + bucket.getResource(), datasetUsageStatistics, getUsageStatsAspectSpec(), null); } catch (JsonProcessingException e) { log.error("Failed to generate timeseries document from aspect: {}", e.toString()); return; } // 3. Upsert the exploded documents to timeseries aspect service. - documents.entrySet().forEach(document -> { - _timeseriesAspectService.upsertDocument(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, document.getKey(), - document.getValue()); - }); + documents + .entrySet() + .forEach( + document -> { + _timeseriesAspectService.upsertDocument( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + document.getKey(), + document.getValue()); + }); } @Nonnull diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java index 351a3d8f24e36..d6eeb1a01ac15 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,9 +18,9 @@ import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; -import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -27,15 +30,10 @@ import com.linkedin.mxe.MetadataChangeProposal; import java.net.URISyntaxException; import java.util.List; - import mock.MockEntityRegistry; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - public class AspectResourceTest { private AspectResource _aspectResource; private EntityService _entityService; @@ -54,7 +52,9 @@ public void setup() { _entityRegistry = new MockEntityRegistry(); _updateIndicesService = mock(UpdateIndicesService.class); _preProcessHooks = mock(PreProcessHooks.class); - _entityService = new EntityServiceImpl(_aspectDao, _producer, _entityRegistry, false, _updateIndicesService, _preProcessHooks); + _entityService = + new EntityServiceImpl( + _aspectDao, _producer, _entityRegistry, false, _updateIndicesService, _preProcessHooks); _authorizer = mock(Authorizer.class); _aspectResource.setAuthorizer(_authorizer); _aspectResource.setEntityService(_entityService); @@ -82,36 +82,49 @@ public void testAsyncDefaultAspects() throws URISyntaxException { reset(_producer, _aspectDao); - UpsertBatchItem req = UpsertBatchItem.builder() + UpsertBatchItem req = + UpsertBatchItem.builder() .urn(urn) .aspectName(mcp.getAspectName()) .aspect(mcp.getAspect()) .metadataChangeProposal(mcp) .build(_entityRegistry); when(_aspectDao.runInTransactionWithRetry(any(), any(), anyInt())) - .thenReturn(List.of( - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name1")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name2")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name3")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name4")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name5")) - .auditStamp(new AuditStamp()) - .request(req).build())); + .thenReturn( + List.of( + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name1")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name2")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name3")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name4")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name5")) + .auditStamp(new AuditStamp()) + .request(req) + .build())); _aspectResource.ingestProposal(mcp, "false"); - verify(_producer, times(5)).produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); + verify(_producer, times(5)) + .produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); verifyNoMoreInteractions(_producer); } } diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java index 470c6e87040ec..bdfe906f42af9 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java @@ -1,18 +1,16 @@ package com.linkedin.metadata.resources.operations; +import static org.testng.AssertJUnit.*; + import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.util.Pair; import java.util.List; import mock.MockTimeseriesAspectService; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class OperationsResourceTest { private static final String TASK_ID = "taskId123"; - @Test public void testDryRun() { TimeseriesAspectService mockTimeseriesAspectService = new MockTimeseriesAspectService(); @@ -20,11 +18,13 @@ public void testDryRun() { String aspectName = "datasetusagestatistics"; long endTimeMillis = 3000; OperationsResource testResource = new OperationsResource(mockTimeseriesAspectService); - String output = testResource.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, null, - null, null, null); + String output = + testResource.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, null, null); assertTrue(output.contains("This was a dry run")); - output = testResource.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, false, null, - null, null, null); + output = + testResource.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, false, null, null, null, null); assertEquals(TASK_ID, output); } @@ -42,59 +42,113 @@ public void testIsTaskIdValid() { @Test public void testForceFlags() { final String reindexTaskId = "REINDEX_TASK_ID"; - TimeseriesAspectService mockTimeseriesAspectServiceWouldDeleteByQuery = new MockTimeseriesAspectService(); - TimeseriesAspectService mockTimeseriesAspectServiceWouldReindex = new MockTimeseriesAspectService(30, 20, reindexTaskId); + TimeseriesAspectService mockTimeseriesAspectServiceWouldDeleteByQuery = + new MockTimeseriesAspectService(); + TimeseriesAspectService mockTimeseriesAspectServiceWouldReindex = + new MockTimeseriesAspectService(30, 20, reindexTaskId); String entityType = "dataset"; String aspectName = "datasetusagestatistics"; long endTimeMillis = 3000; - OperationsResource testResourceWouldReindex = new OperationsResource(mockTimeseriesAspectServiceWouldReindex); - OperationsResource testResourceWouldDeleteByQuery = new OperationsResource(mockTimeseriesAspectServiceWouldDeleteByQuery); + OperationsResource testResourceWouldReindex = + new OperationsResource(mockTimeseriesAspectServiceWouldReindex); + OperationsResource testResourceWouldDeleteByQuery = + new OperationsResource(mockTimeseriesAspectServiceWouldDeleteByQuery); - String result = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, true, true); + String result = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, true, true); String errorIfFlagsAreIncompatable = "please only set forceReindex OR forceDeleteByQuery flags"; assertEquals(errorIfFlagsAreIncompatable, result); - - result = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, false, false); + result = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, false, false); assertEquals(errorIfFlagsAreIncompatable, result); - - List<Pair<Boolean, Boolean>> - validOptionsNothingForced = List.of(Pair.of(null, null), Pair.of(null, false), Pair.of(false, null)); + List<Pair<Boolean, Boolean>> validOptionsNothingForced = + List.of(Pair.of(null, null), Pair.of(null, false), Pair.of(false, null)); for (Pair<Boolean, Boolean> values : validOptionsNothingForced) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); assertNotSame(errorIfFlagsAreIncompatable, reindexResult); assertTrue(reindexResult.contains("Reindexing the aspect without the deleted records")); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); assertNotSame(errorIfFlagsAreIncompatable, deleteResult); assertTrue(deleteResult.contains("Issuing a delete by query request. ")); } - List<Pair<Boolean, Boolean>> validOptionsForceDeleteByQuery = List.of(Pair.of(true, null), Pair.of(true, false)); + List<Pair<Boolean, Boolean>> validOptionsForceDeleteByQuery = + List.of(Pair.of(true, null), Pair.of(true, false)); for (Pair<Boolean, Boolean> values : validOptionsForceDeleteByQuery) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); for (String res : List.of(reindexResult, deleteResult)) { assertNotSame(errorIfFlagsAreIncompatable, res); assertTrue(res.contains("Issuing a delete by query request. ")); } } - List<Pair<Boolean, Boolean>> validOptionsForceReindex = List.of(Pair.of(null, true), Pair.of(false, true)); + List<Pair<Boolean, Boolean>> validOptionsForceReindex = + List.of(Pair.of(null, true), Pair.of(false, true)); for (Pair<Boolean, Boolean> values : validOptionsForceReindex) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); for (String res : List.of(reindexResult, deleteResult)) { assertNotSame(errorIfFlagsAreIncompatable, res); assertTrue(res.contains("Reindexing the aspect without the deleted records")); } } } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java index 81d2bbd88b3e6..2a12ecf6866bb 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java +++ b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java @@ -16,7 +16,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class MockTimeseriesAspectService implements TimeseriesAspectService { public static final long DEFAULT_COUNT = 30; @@ -32,6 +31,7 @@ public MockTimeseriesAspectService() { this._filteredCount = DEFAULT_FILTERED_COUNT; this._taskId = DEFAULT_TASK_ID; } + public MockTimeseriesAspectService(long count, long filteredCount, String taskId) { this._count = count; this._filteredCount = filteredCount; @@ -39,12 +39,11 @@ public MockTimeseriesAspectService(long count, long filteredCount, String taskId } @Override - public void configure() { - - } + public void configure() {} @Override - public long countByFilter(@Nonnull String entityName, @Nonnull String aspectName, @Nullable Filter filter) { + public long countByFilter( + @Nonnull String entityName, @Nonnull String aspectName, @Nullable Filter filter) { if (filter != null && !filter.equals(new Filter())) { return _filteredCount; } @@ -53,36 +52,51 @@ public long countByFilter(@Nonnull String entityName, @Nonnull String aspectName @Nonnull @Override - public List<EnvelopedAspect> getAspectValues(@Nonnull Urn urn, @Nonnull String entityName, - @Nonnull String aspectName, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, - @Nullable Integer limit, @Nullable Filter filter, @Nullable SortCriterion sort) { + public List<EnvelopedAspect> getAspectValues( + @Nonnull Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort) { return List.of(); } @Nonnull @Override - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { return new GenericTable(); } @Nonnull @Override - public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter) { + public DeleteAspectValuesResult deleteAspectValues( + @Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter) { return new DeleteAspectValuesResult(); } @Nonnull @Override - public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { + public String deleteAspectValuesAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, + @Nonnull BatchWriteOperationsOptions options) { return _taskId; } @Override - public String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { return _taskId; } @@ -94,10 +108,11 @@ public DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull String runId) } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull String docId, - @Nonnull JsonNode document) { - - } + public void upsertDocument( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull String docId, + @Nonnull JsonNode document) {} @Override public List<TimeseriesIndexSizeResult> getIndexSizes() { diff --git a/metadata-service/schema-registry-api/build.gradle b/metadata-service/schema-registry-api/build.gradle index 290126836eb4a..077d7d4f2d6a4 100644 --- a/metadata-service/schema-registry-api/build.gradle +++ b/metadata-service/schema-registry-api/build.gradle @@ -45,5 +45,3 @@ tasks.register('generateOpenApiPojos', GenerateSwaggerCode) { sourceSets.main.java.srcDirs "${generateOpenApiPojos.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" -// Disable checkstyle for this module. -checkstyleMain.source = "${projectDir}/src/main/java" diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java index dc7fd5e20d9cd..58058dc3332b0 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java @@ -1,24 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Compatibility check response - */ +/** Compatibility check response */ @io.swagger.v3.oas.annotations.media.Schema(description = "Compatibility check response") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class CompatibilityCheckResponse { +public class CompatibilityCheckResponse { @JsonProperty("is_compatible") private Boolean isCompatible = null; @@ -34,11 +32,12 @@ public CompatibilityCheckResponse isCompatible(Boolean isCompatible) { /** * Whether the compared schemas are compatible + * * @return isCompatible - **/ - @io.swagger.v3.oas.annotations.media.Schema(description = "Whether the compared schemas are compatible") - - public Boolean isIsCompatible() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + description = "Whether the compared schemas are compatible") + public Boolean isIsCompatible() { return isCompatible; } @@ -61,11 +60,11 @@ public CompatibilityCheckResponse addMessagesItem(String messagesItem) { /** * Error messages + * * @return messages - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "[]", description = "Error messages") - - public List<String> getMessages() { + public List<String> getMessages() { return messages; } @@ -73,7 +72,6 @@ public void setMessages(List<String> messages) { this.messages = messages; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -83,8 +81,8 @@ public boolean equals(java.lang.Object o) { return false; } CompatibilityCheckResponse compatibilityCheckResponse = (CompatibilityCheckResponse) o; - return Objects.equals(this.isCompatible, compatibilityCheckResponse.isCompatible) && - Objects.equals(this.messages, compatibilityCheckResponse.messages); + return Objects.equals(this.isCompatible, compatibilityCheckResponse.isCompatible) + && Objects.equals(this.messages, compatibilityCheckResponse.messages); } @Override @@ -96,7 +94,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class CompatibilityCheckResponse {\n"); - + sb.append(" isCompatible: ").append(toIndentedString(isCompatible)).append("\n"); sb.append(" messages: ").append(toIndentedString(messages)).append("\n"); sb.append("}"); @@ -104,8 +102,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java index 9e338b232e8da..0a223a88cfd33 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java @@ -1,40 +1,36 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Config - */ +/** Config */ @io.swagger.v3.oas.annotations.media.Schema(description = "Config") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Config { +public class Config { - /** - * Compatibility Level - */ + /** Compatibility Level */ public enum CompatibilityLevelEnum { BACKWARD("BACKWARD"), - + BACKWARD_TRANSITIVE("BACKWARD_TRANSITIVE"), - + FORWARD("FORWARD"), - + FORWARD_TRANSITIVE("FORWARD_TRANSITIVE"), - + FULL("FULL"), - + FULL_TRANSITIVE("FULL_TRANSITIVE"), - + NONE("NONE"); private String value; @@ -59,6 +55,7 @@ public static CompatibilityLevelEnum fromValue(String text) { return null; } } + @JsonProperty("compatibilityLevel") private CompatibilityLevelEnum compatibilityLevel = null; @@ -69,11 +66,13 @@ public Config compatibilityLevel(CompatibilityLevelEnum compatibilityLevel) { /** * Compatibility Level + * * @return compatibilityLevel - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "FULL_TRANSITIVE", description = "Compatibility Level") - - public CompatibilityLevelEnum getCompatibilityLevel() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "FULL_TRANSITIVE", + description = "Compatibility Level") + public CompatibilityLevelEnum getCompatibilityLevel() { return compatibilityLevel; } @@ -81,7 +80,6 @@ public void setCompatibilityLevel(CompatibilityLevelEnum compatibilityLevel) { this.compatibilityLevel = compatibilityLevel; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -103,15 +101,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Config {\n"); - + sb.append(" compatibilityLevel: ").append(toIndentedString(compatibilityLevel)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -119,4 +116,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java index 5b586e184c6ce..b179149b33d01 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java @@ -1,40 +1,36 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Config update request - */ +/** Config update request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Config update request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ConfigUpdateRequest { +public class ConfigUpdateRequest { - /** - * Compatibility Level - */ + /** Compatibility Level */ public enum CompatibilityEnum { BACKWARD("BACKWARD"), - + BACKWARD_TRANSITIVE("BACKWARD_TRANSITIVE"), - + FORWARD("FORWARD"), - + FORWARD_TRANSITIVE("FORWARD_TRANSITIVE"), - + FULL("FULL"), - + FULL_TRANSITIVE("FULL_TRANSITIVE"), - + NONE("NONE"); private String value; @@ -59,6 +55,7 @@ public static CompatibilityEnum fromValue(String text) { return null; } } + @JsonProperty("compatibility") private CompatibilityEnum compatibility = null; @@ -69,11 +66,13 @@ public ConfigUpdateRequest compatibility(CompatibilityEnum compatibility) { /** * Compatibility Level + * * @return compatibility - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "FULL_TRANSITIVE", description = "Compatibility Level") - - public CompatibilityEnum getCompatibility() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "FULL_TRANSITIVE", + description = "Compatibility Level") + public CompatibilityEnum getCompatibility() { return compatibility; } @@ -81,7 +80,6 @@ public void setCompatibility(CompatibilityEnum compatibility) { this.compatibility = compatibility; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -103,15 +101,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ConfigUpdateRequest {\n"); - + sb.append(" compatibility: ").append(toIndentedString(compatibility)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -119,4 +116,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java index f462d359bdea6..2f20d77b66137 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Error message - */ +/** Error message */ @io.swagger.v3.oas.annotations.media.Schema(description = "Error message") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ErrorMessage { +public class ErrorMessage { @JsonProperty("error_code") private Integer errorCode = null; @@ -31,11 +28,11 @@ public ErrorMessage errorCode(Integer errorCode) { /** * Error code + * * @return errorCode - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Error code") - - public Integer getErrorCode() { + public Integer getErrorCode() { return errorCode; } @@ -50,11 +47,11 @@ public ErrorMessage message(String message) { /** * Detailed error message + * * @return message - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Detailed error message") - - public String getMessage() { + public String getMessage() { return message; } @@ -62,7 +59,6 @@ public void setMessage(String message) { this.message = message; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -72,8 +68,8 @@ public boolean equals(java.lang.Object o) { return false; } ErrorMessage errorMessage = (ErrorMessage) o; - return Objects.equals(this.errorCode, errorMessage.errorCode) && - Objects.equals(this.message, errorMessage.message); + return Objects.equals(this.errorCode, errorMessage.errorCode) + && Objects.equals(this.message, errorMessage.message); } @Override @@ -85,7 +81,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ErrorMessage {\n"); - + sb.append(" errorCode: ").append(toIndentedString(errorCode)).append("\n"); sb.append(" message: ").append(toIndentedString(message)).append("\n"); sb.append("}"); @@ -93,8 +89,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -102,4 +97,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java index 2c6642c97c507..5a418401278d3 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java @@ -1,34 +1,30 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema Registry operating mode - */ +/** Schema Registry operating mode */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema Registry operating mode") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Mode { +public class Mode { - /** - * Schema Registry operating mode - */ + /** Schema Registry operating mode */ public enum ModeEnum { READWRITE("READWRITE"), - + READONLY("READONLY"), - + READONLY_OVERRIDE("READONLY_OVERRIDE"), - + IMPORT("IMPORT"); private String value; @@ -53,6 +49,7 @@ public static ModeEnum fromValue(String text) { return null; } } + @JsonProperty("mode") private ModeEnum mode = null; @@ -63,11 +60,13 @@ public Mode mode(ModeEnum mode) { /** * Schema Registry operating mode + * * @return mode - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "READWRITE", description = "Schema Registry operating mode") - - public ModeEnum getMode() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "READWRITE", + description = "Schema Registry operating mode") + public ModeEnum getMode() { return mode; } @@ -75,7 +74,6 @@ public void setMode(ModeEnum mode) { this.mode = mode; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -97,15 +95,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Mode {\n"); - + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java index c2fffea0034f9..2cbbe4d5351d8 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java @@ -1,34 +1,30 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Mode update request - */ +/** Mode update request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Mode update request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ModeUpdateRequest { +public class ModeUpdateRequest { - /** - * Schema Registry operating mode - */ + /** Schema Registry operating mode */ public enum ModeEnum { READWRITE("READWRITE"), - + READONLY("READONLY"), - + READONLY_OVERRIDE("READONLY_OVERRIDE"), - + IMPORT("IMPORT"); private String value; @@ -53,6 +49,7 @@ public static ModeEnum fromValue(String text) { return null; } } + @JsonProperty("mode") private ModeEnum mode = null; @@ -63,11 +60,13 @@ public ModeUpdateRequest mode(ModeEnum mode) { /** * Schema Registry operating mode + * * @return mode - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "READWRITE", description = "Schema Registry operating mode") - - public ModeEnum getMode() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "READWRITE", + description = "Schema Registry operating mode") + public ModeEnum getMode() { return mode; } @@ -75,7 +74,6 @@ public void setMode(ModeEnum mode) { this.mode = mode; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -97,15 +95,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ModeUpdateRequest {\n"); - + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java index 4f535f343f433..d7b2b28123b6f 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java @@ -1,25 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.datahubproject.schema_registry.openapi.generated.SchemaReference; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema register request - */ +/** Schema register request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema register request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class RegisterSchemaRequest { +public class RegisterSchemaRequest { @JsonProperty("version") private Integer version = null; @@ -44,11 +41,11 @@ public RegisterSchemaRequest version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -63,11 +60,12 @@ public RegisterSchemaRequest id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -82,11 +80,11 @@ public RegisterSchemaRequest schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -109,11 +107,12 @@ public RegisterSchemaRequest addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List<SchemaReference> getReferences() { + @Valid + public List<SchemaReference> getReferences() { return references; } @@ -128,11 +127,11 @@ public RegisterSchemaRequest schema(String schema) { /** * Schema definition string + * * @return schema - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema definition string") - - public String getSchema() { + public String getSchema() { return schema; } @@ -140,7 +139,6 @@ public void setSchema(String schema) { this.schema = schema; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -150,11 +148,11 @@ public boolean equals(java.lang.Object o) { return false; } RegisterSchemaRequest registerSchemaRequest = (RegisterSchemaRequest) o; - return Objects.equals(this.version, registerSchemaRequest.version) && - Objects.equals(this.id, registerSchemaRequest.id) && - Objects.equals(this.schemaType, registerSchemaRequest.schemaType) && - Objects.equals(this.references, registerSchemaRequest.references) && - Objects.equals(this.schema, registerSchemaRequest.schema); + return Objects.equals(this.version, registerSchemaRequest.version) + && Objects.equals(this.id, registerSchemaRequest.id) + && Objects.equals(this.schemaType, registerSchemaRequest.schemaType) + && Objects.equals(this.references, registerSchemaRequest.references) + && Objects.equals(this.schema, registerSchemaRequest.schema); } @Override @@ -166,7 +164,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class RegisterSchemaRequest {\n"); - + sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" schemaType: ").append(toIndentedString(schemaType)).append("\n"); @@ -177,8 +175,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -186,4 +183,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java index 7cdcb1093f34b..54e480078233b 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema register response - */ +/** Schema register response */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema register response") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class RegisterSchemaResponse { +public class RegisterSchemaResponse { @JsonProperty("id") private Integer id = null; @@ -28,11 +25,13 @@ public RegisterSchemaResponse id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "100001", description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "100001", + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -40,7 +39,6 @@ public void setId(Integer id) { this.id = id; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -62,15 +60,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class RegisterSchemaResponse {\n"); - + sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -78,4 +75,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java index b3ca087bdc5f3..cc4d5e7694976 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java @@ -8,14 +8,14 @@ import javax.validation.Valid; import org.springframework.validation.annotation.Validated; -/** - * Schema - */ +/** Schema */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Schema { +public class Schema { @JsonProperty("subject") private String subject = null; @@ -43,11 +43,11 @@ public Schema subject(String subject) { /** * Name of the subject + * * @return subject - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the subject") - - public String getSubject() { + public String getSubject() { return subject; } @@ -62,11 +62,11 @@ public Schema version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -81,11 +81,13 @@ public Schema id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "100001", description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "100001", + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -100,11 +102,11 @@ public Schema schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "AVRO", description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -127,11 +129,12 @@ public Schema addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List<SchemaReference> getReferences() { + @Valid + public List<SchemaReference> getReferences() { return references; } @@ -146,11 +149,13 @@ public Schema schema(String schema) { /** * Schema definition string + * * @return schema - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "{\"schema\": \"{\"type\": \"string\"}\"}", description = "Schema definition string") - - public String getSchema() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "{\"schema\": \"{\"type\": \"string\"}\"}", + description = "Schema definition string") + public String getSchema() { return schema; } @@ -158,7 +163,6 @@ public void setSchema(String schema) { this.schema = schema; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -168,12 +172,12 @@ public boolean equals(java.lang.Object o) { return false; } Schema schema = (Schema) o; - return Objects.equals(this.subject, schema.subject) && - Objects.equals(this.version, schema.version) && - Objects.equals(this.id, schema.id) && - Objects.equals(this.schemaType, schema.schemaType) && - Objects.equals(this.references, schema.references) && - Objects.equals(this.schema, schema.schema); + return Objects.equals(this.subject, schema.subject) + && Objects.equals(this.version, schema.version) + && Objects.equals(this.id, schema.id) + && Objects.equals(this.schemaType, schema.schemaType) + && Objects.equals(this.references, schema.references) + && Objects.equals(this.schema, schema.schema); } @Override @@ -185,7 +189,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Schema {\n"); - + sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); @@ -197,8 +201,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -206,4 +209,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java index 96fb685dc1bfc..a2dffa59778ed 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema reference - */ +/** Schema reference */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema reference") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaReference { +public class SchemaReference { @JsonProperty("name") private String name = null; @@ -34,11 +31,13 @@ public SchemaReference name(String name) { /** * Reference name + * * @return name - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "io.confluent.kafka.example.User", description = "Reference name") - - public String getName() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "io.confluent.kafka.example.User", + description = "Reference name") + public String getName() { return name; } @@ -53,11 +52,13 @@ public SchemaReference subject(String subject) { /** * Name of the referenced subject + * * @return subject - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the referenced subject") - - public String getSubject() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "User", + description = "Name of the referenced subject") + public String getSubject() { return subject; } @@ -72,11 +73,13 @@ public SchemaReference version(Integer version) { /** * Version number of the referenced subject + * * @return version - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number of the referenced subject") - - public Integer getVersion() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "1", + description = "Version number of the referenced subject") + public Integer getVersion() { return version; } @@ -84,7 +87,6 @@ public void setVersion(Integer version) { this.version = version; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -94,9 +96,9 @@ public boolean equals(java.lang.Object o) { return false; } SchemaReference schemaReference = (SchemaReference) o; - return Objects.equals(this.name, schemaReference.name) && - Objects.equals(this.subject, schemaReference.subject) && - Objects.equals(this.version, schemaReference.version); + return Objects.equals(this.name, schemaReference.name) + && Objects.equals(this.subject, schemaReference.subject) + && Objects.equals(this.version, schemaReference.version); } @Override @@ -108,7 +110,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaReference {\n"); - + sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); @@ -117,8 +119,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -126,4 +127,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java index d2832462a10c6..909416e6976b6 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java @@ -1,21 +1,18 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * SchemaRegistryServerVersion - */ +/** SchemaRegistryServerVersion */ @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaRegistryServerVersion { +public class SchemaRegistryServerVersion { @JsonProperty("version") private String version = null; @@ -30,11 +27,11 @@ public SchemaRegistryServerVersion version(String version) { /** * Get version + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getVersion() { + public String getVersion() { return version; } @@ -49,11 +46,11 @@ public SchemaRegistryServerVersion commitId(String commitId) { /** * Get commitId + * * @return commitId - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getCommitId() { + public String getCommitId() { return commitId; } @@ -61,7 +58,6 @@ public void setCommitId(String commitId) { this.commitId = commitId; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -71,8 +67,8 @@ public boolean equals(java.lang.Object o) { return false; } SchemaRegistryServerVersion schemaRegistryServerVersion = (SchemaRegistryServerVersion) o; - return Objects.equals(this.version, schemaRegistryServerVersion.version) && - Objects.equals(this.commitId, schemaRegistryServerVersion.commitId); + return Objects.equals(this.version, schemaRegistryServerVersion.version) + && Objects.equals(this.commitId, schemaRegistryServerVersion.commitId); } @Override @@ -84,7 +80,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaRegistryServerVersion {\n"); - + sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" commitId: ").append(toIndentedString(commitId)).append("\n"); sb.append("}"); @@ -92,8 +88,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -101,4 +96,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java index b2ea78e35ce22..977f5d410d667 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java @@ -1,25 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.datahubproject.schema_registry.openapi.generated.SchemaReference; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema definition - */ +/** Schema definition */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema definition") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaString { +public class SchemaString { @JsonProperty("schemaType") private String schemaType = null; @@ -41,11 +38,11 @@ public SchemaString schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "AVRO", description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -60,11 +57,13 @@ public SchemaString schema(String schema) { /** * Schema string identified by the ID + * * @return schema - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "{\"schema\": \"{\"type\": \"string\"}\"}", description = "Schema string identified by the ID") - - public String getSchema() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "{\"schema\": \"{\"type\": \"string\"}\"}", + description = "Schema string identified by the ID") + public String getSchema() { return schema; } @@ -87,11 +86,12 @@ public SchemaString addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List<SchemaReference> getReferences() { + @Valid + public List<SchemaReference> getReferences() { return references; } @@ -106,11 +106,11 @@ public SchemaString maxId(Integer maxId) { /** * Maximum ID + * * @return maxId - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Maximum ID") - - public Integer getMaxId() { + public Integer getMaxId() { return maxId; } @@ -118,7 +118,6 @@ public void setMaxId(Integer maxId) { this.maxId = maxId; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -128,10 +127,10 @@ public boolean equals(java.lang.Object o) { return false; } SchemaString schemaString = (SchemaString) o; - return Objects.equals(this.schemaType, schemaString.schemaType) && - Objects.equals(this.schema, schemaString.schema) && - Objects.equals(this.references, schemaString.references) && - Objects.equals(this.maxId, schemaString.maxId); + return Objects.equals(this.schemaType, schemaString.schemaType) + && Objects.equals(this.schema, schemaString.schema) + && Objects.equals(this.references, schemaString.references) + && Objects.equals(this.maxId, schemaString.maxId); } @Override @@ -143,7 +142,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaString {\n"); - + sb.append(" schemaType: ").append(toIndentedString(schemaType)).append("\n"); sb.append(" schema: ").append(toIndentedString(schema)).append("\n"); sb.append(" references: ").append(toIndentedString(references)).append("\n"); @@ -153,8 +152,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -162,4 +160,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java index 2ae476b0c3efc..e215d324f536e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java @@ -1,24 +1,21 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * ServerClusterId - */ +/** ServerClusterId */ @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ServerClusterId { +public class ServerClusterId { @JsonProperty("scope") @Valid @@ -42,11 +39,11 @@ public ServerClusterId putScopeItem(String key, Object scopeItem) { /** * Get scope + * * @return scope - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public Map<String, Object> getScope() { + public Map<String, Object> getScope() { return scope; } @@ -61,11 +58,11 @@ public ServerClusterId id(String id) { /** * Get id + * * @return id - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getId() { + public String getId() { return id; } @@ -73,7 +70,6 @@ public void setId(String id) { this.id = id; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -83,8 +79,8 @@ public boolean equals(java.lang.Object o) { return false; } ServerClusterId serverClusterId = (ServerClusterId) o; - return Objects.equals(this.scope, serverClusterId.scope) && - Objects.equals(this.id, serverClusterId.id); + return Objects.equals(this.scope, serverClusterId.scope) + && Objects.equals(this.id, serverClusterId.id); } @Override @@ -96,7 +92,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ServerClusterId {\n"); - + sb.append(" scope: ").append(toIndentedString(scope)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append("}"); @@ -104,8 +100,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +108,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java index 44379af934d5d..32b8979a0b71a 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Subject version pair - */ +/** Subject version pair */ @io.swagger.v3.oas.annotations.media.Schema(description = "Subject version pair") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SubjectVersion { +public class SubjectVersion { @JsonProperty("subject") private String subject = null; @@ -31,11 +28,11 @@ public SubjectVersion subject(String subject) { /** * Name of the subject + * * @return subject - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the subject") - - public String getSubject() { + public String getSubject() { return subject; } @@ -50,11 +47,11 @@ public SubjectVersion version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -62,7 +59,6 @@ public void setVersion(Integer version) { this.version = version; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -72,8 +68,8 @@ public boolean equals(java.lang.Object o) { return false; } SubjectVersion subjectVersion = (SubjectVersion) o; - return Objects.equals(this.subject, subjectVersion.subject) && - Objects.equals(this.version, subjectVersion.version); + return Objects.equals(this.subject, subjectVersion.subject) + && Objects.equals(this.version, subjectVersion.version); } @Override @@ -85,7 +81,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SubjectVersion {\n"); - + sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append("}"); @@ -93,8 +89,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -102,4 +97,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java index e30376002ae7b..6049cb96e1e45 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.CompatibilityCheckResponse; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.datahubproject.schema_registry.openapi.generated.RegisterSchemaRequest; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,96 +26,239 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface CompatibilityApi { - Logger log = LoggerFactory.getLogger(CompatibilityApi.class); + Logger log = LoggerFactory.getLogger(CompatibilityApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Test schema compatibility against a particular schema subject-version", description = "Test input schema against a particular version of a subject's schema for compatibility. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", tags={ "Compatibility (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Compatibility check result.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = CompatibilityCheckResponse.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/compatibility/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<CompatibilityCheckResponse> testCompatibilityBySubjectName(@Parameter(in = ParameterIn.PATH, description = "Subject of the schema version against which compatibility is to be tested", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the subject's schema against which compatibility is to be tested. Valid values for versionId are between [1,2^31-1] or the string \"latest\".\"latest\" checks compatibility of the input schema with the last registered schema under the specified subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to return detailed error messages" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "verbose", required = false) Boolean verbose) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", CompatibilityCheckResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); + @Operation( + summary = "Test schema compatibility against a particular schema subject-version", + description = + "Test input schema against a particular version of a subject's schema for compatibility. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", + tags = {"Compatibility (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Compatibility check result.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = CompatibilityCheckResponse.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/compatibility/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<CompatibilityCheckResponse> testCompatibilityBySubjectName( + @Parameter( + in = ParameterIn.PATH, + description = + "Subject of the schema version against which compatibility is to be tested", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the subject's schema against which compatibility is to be tested. Valid values for versionId are between [1,2^31-1] or the string \"latest\".\"latest\" checks compatibility of the input schema with the last registered schema under the specified subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return detailed error messages", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "verbose", required = false) + Boolean verbose) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", + CompatibilityCheckResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Test schema compatibility against all schemas under a subject", description = "Test input schema against a subject's schemas for compatibility, based on the configured compatibility level of the subject. In other words, it will perform the same compatibility check as register for that subject. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", tags={ "Compatibility (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Compatibility check result.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = CompatibilityCheckResponse.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/compatibility/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<CompatibilityCheckResponse> testCompatibilityForSubject(@Parameter(in = ParameterIn.PATH, description = "Subject of the schema version against which compatibility is to be tested", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to return detailed error messages" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "verbose", required = false) Boolean verbose) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", CompatibilityCheckResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); + @Operation( + summary = "Test schema compatibility against all schemas under a subject", + description = + "Test input schema against a subject's schemas for compatibility, based on the configured compatibility level of the subject. In other words, it will perform the same compatibility check as register for that subject. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", + tags = {"Compatibility (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Compatibility check result.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = CompatibilityCheckResponse.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/compatibility/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<CompatibilityCheckResponse> testCompatibilityForSubject( + @Parameter( + in = ParameterIn.PATH, + description = + "Subject of the schema version against which compatibility is to be tested", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return detailed error messages", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "verbose", required = false) + Boolean verbose) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", + CompatibilityCheckResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java index c30a01517d7d3..eac2fe8a3a02d 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class CompatibilityApiController implements CompatibilityApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public CompatibilityApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public CompatibilityApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java index f041211c6db4d..2e3df2f62fc32 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.Config; import io.datahubproject.schema_registry.openapi.generated.ConfigUpdateRequest; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,200 +26,462 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ConfigApi { - Logger log = LoggerFactory.getLogger(ConfigApi.class); + Logger log = LoggerFactory.getLogger(ConfigApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete subject compatibility level", description = "Deletes the specified subject-level compatibility level config and reverts to the global default.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<String> deleteSubjectConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"NONE\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Delete subject compatibility level", + description = + "Deletes the specified subject-level compatibility level config and reverts to the global default.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<String> deleteSubjectConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"NONE\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Delete global compatibility level", description = "Deletes the global compatibility level config and reverts to the default.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old global compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<String> deleteTopLevelConfig() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"NONE\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Delete global compatibility level", + description = "Deletes the global compatibility level config and reverts to the default.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old global compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<String> deleteTopLevelConfig() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"NONE\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get subject compatibility level", description = "Retrieves compatibility level for a subject.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The subject compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Config.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Config> getSubjectLevelConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to return the global compatibility level if subject compatibility level not found" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "defaultToGlobal", required = false) Boolean defaultToGlobal) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Get subject compatibility level", + description = "Retrieves compatibility level for a subject.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The subject compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Config.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Config> getSubjectLevelConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to return the global compatibility level if subject compatibility level not found", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "defaultToGlobal", required = false) + Boolean defaultToGlobal) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get global compatibility level", description = "Retrieves the global compatibility level.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The global compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Config.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Config> getTopLevelConfig() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Get global compatibility level", + description = "Retrieves the global compatibility level.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The global compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Config.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Config> getTopLevelConfig() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update subject compatibility level", description = "Update compatibility level for the specified subject. On success, echoes the original request back to the client.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ConfigUpdateRequest.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity<ConfigUpdateRequest> updateSubjectLevelConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Config Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ConfigUpdateRequest body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Update subject compatibility level", + description = + "Update compatibility level for the specified subject. On success, echoes the original request back to the client.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ConfigUpdateRequest.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity<ConfigUpdateRequest> updateSubjectLevelConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Config Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ConfigUpdateRequest body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update global compatibility level", description = "Updates the global compatibility level. On success, echoes the original request back to the client.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ConfigUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity<ConfigUpdateRequest> updateTopLevelConfig(@Parameter(in = ParameterIn.DEFAULT, description = "Config Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ConfigUpdateRequest body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Update global compatibility level", + description = + "Updates the global compatibility level. On success, echoes the original request back to the client.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ConfigUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity<ConfigUpdateRequest> updateTopLevelConfig( + @Parameter( + in = ParameterIn.DEFAULT, + description = "Config Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ConfigUpdateRequest body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java index cd3dc84fb4588..4fd6963797de2 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ConfigApiController implements ConfigApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ConfigApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ConfigApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java index 9ab0bc2388f7d..01b90a3c98c2d 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java @@ -1,84 +1,101 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; -import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import com.fasterxml.jackson.databind.ObjectMapper; +import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.List; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.validation.annotation.Validated; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ContextsApi { - Logger log = LoggerFactory.getLogger(ContextsApi.class); + Logger log = LoggerFactory.getLogger(ContextsApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "List contexts", description = "Retrieves a list of contexts.", tags={ "Contexts (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The contexts.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/contexts", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<String>> listContexts() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \".\", \".\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ContextsApi interface so no example is generated"); + @Operation( + summary = "List contexts", + description = "Retrieves a list of contexts.", + tags = {"Contexts (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The contexts.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/contexts", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<String>> listContexts() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \".\", \".\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ContextsApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java index faead1a2b37b0..8b601e8fb0f2e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ContextsApiController implements ContextsApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ContextsApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ContextsApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java index 1d69c76c86122..711029371d583 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -27,69 +26,122 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface DefaultApi { - Logger log = LoggerFactory.getLogger(DefaultApi.class); + Logger log = LoggerFactory.getLogger(DefaultApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Schema Registry Root Resource", description = "The Root resource is a no-op.", tags={ }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))) }) - @RequestMapping(value = "/", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<String> get() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); + @Operation( + summary = "Schema Registry Root Resource", + description = "The Root resource is a no-op.", + tags = {}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))) + }) + @RequestMapping( + value = "/", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<String> get() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"\"", String.class), HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "", description = "", tags={ }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "default response", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Map.class)))) }) - @RequestMapping(value = "/", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<Map<String, String>> post(@Parameter(in = ParameterIn.DEFAULT, description = "", schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody Map<String, String> body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"key\" : \"\"\n}", Map.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); + @Operation( + summary = "", + description = "", + tags = {}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "default response", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Map.class)))) + }) + @RequestMapping( + value = "/", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<Map<String, String>> post( + @Parameter( + in = ParameterIn.DEFAULT, + description = "", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + Map<String, String> body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"key\" : \"\"\n}", Map.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java index 53e64d43d9572..90768b88e2f28 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class DefaultApiController implements DefaultApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public DefaultApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public DefaultApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java index f2857069d05c8..7fca1cb53cfba 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.datahubproject.schema_registry.openapi.generated.Mode; import io.datahubproject.schema_registry.openapi.generated.ModeUpdateRequest; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,173 +26,398 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; - -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ModeApi { - Logger log = LoggerFactory.getLogger(ModeApi.class); + Logger log = LoggerFactory.getLogger(ModeApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete subject mode", description = "Deletes the specified subject-level mode and reverts to the global default.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old mode.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<Mode> deleteSubjectMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Delete subject mode", + description = "Deletes the specified subject-level mode and reverts to the global default.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old mode.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<Mode> deleteSubjectMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get subject mode", description = "Retrieves the subject mode.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The subject mode.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Mode> getMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to return the global mode if subject mode not found" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "defaultToGlobal", required = false) Boolean defaultToGlobal) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Get subject mode", + description = "Retrieves the subject mode.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The subject mode.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Mode> getMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return the global mode if subject mode not found", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "defaultToGlobal", required = false) + Boolean defaultToGlobal) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get global mode", description = "Retrieves global mode.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The global mode", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store") }) - @RequestMapping(value = "/mode", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Mode> getTopLevelMode() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Get global mode", + description = "Retrieves global mode.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The global mode", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store") + }) + @RequestMapping( + value = "/mode", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Mode> getTopLevelMode() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update subject mode", description = "Update mode for the specified subject. On success, echoes the original request back to the client.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ModeUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity<ModeUpdateRequest> updateMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ModeUpdateRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to force update if setting mode to IMPORT and schemas currently exist" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "force", required = false) Boolean force) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Update subject mode", + description = + "Update mode for the specified subject. On success, echoes the original request back to the client.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ModeUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity<ModeUpdateRequest> updateMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ModeUpdateRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to force update if setting mode to IMPORT and schemas currently exist", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "force", required = false) + Boolean force) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update global mode", description = "Update global mode. On success, echoes the original request back to the client.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ModeUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity<ModeUpdateRequest> updateTopLevelMode(@Parameter(in = ParameterIn.DEFAULT, description = "Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ModeUpdateRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to force update if setting mode to IMPORT and schemas currently exist" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "force", required = false) Boolean force) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Update global mode", + description = + "Update global mode. On success, echoes the original request back to the client.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ModeUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity<ModeUpdateRequest> updateTopLevelMode( + @Parameter( + in = ParameterIn.DEFAULT, + description = "Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ModeUpdateRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to force update if setting mode to IMPORT and schemas currently exist", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "force", required = false) + Boolean force) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java index 97ae54ea6c9a2..28ad6fbdfbc12 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ModeApiController implements ModeApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ModeApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ModeApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java index cfb0fe183ee88..e01df38fca64a 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -32,180 +31,526 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface SchemasApi { - Logger log = LoggerFactory.getLogger(SchemasApi.class); + Logger log = LoggerFactory.getLogger(SchemasApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Get schema string by ID", description = "Retrieves the schema string identified by the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = SchemaString.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<SchemaString> getSchema(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Name of the subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Desired output format, dependent on schema type" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "format", required = false) String format, @Parameter(in = ParameterIn.QUERY, description = "Whether to fetch the maximum schema identifier that exists" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "fetchMaxId", required = false, defaultValue="false") Boolean fetchMaxId) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"maxId\" : 1,\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"schemaType\" : \"AVRO\"\n}", SchemaString.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "Get schema string by ID", + description = "Retrieves the schema string identified by the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = SchemaString.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<SchemaString> getSchema( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Name of the subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Desired output format, dependent on schema type", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "format", required = false) + String format, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to fetch the maximum schema identifier that exists", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "fetchMaxId", required = false, defaultValue = "false") + Boolean fetchMaxId) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"maxId\" : 1,\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"schemaType\" : \"AVRO\"\n}", + SchemaString.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema by ID", description = "Retrieves the schema identified by the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Raw schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/schema", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<String> getSchemaOnly(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Name of the subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Desired output format, dependent on schema type" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "format", required = false) String format) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "Get schema by ID", + description = "Retrieves the schema identified by the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Raw schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/schema", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<String> getSchemaOnly( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Name of the subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Desired output format, dependent on schema type", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "format", required = false) + String format) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List supported schema types", description = "Retrieve the schema types supported by this registry.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of supported schema types.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/types", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<String>> getSchemaTypes() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"AVRO\", \"AVRO\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List supported schema types", + description = "Retrieve the schema types supported by this registry.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of supported schema types.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/types", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<String>> getSchemaTypes() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"AVRO\", \"AVRO\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List schemas", description = "Get the schemas matching the specified parameters.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of schemas matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<Schema>> getSchemas(@Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject prefix" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subjectPrefix", required = false) String subjectPrefix, @Parameter(in = ParameterIn.QUERY, description = "Whether to return soft deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "deleted", required = false, defaultValue="false") Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return latest schema versions only for each matching subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "latestOnly", required = false, defaultValue="false") Boolean latestOnly, @Parameter(in = ParameterIn.QUERY, description = "Pagination offset for results" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="0")) @Valid @RequestParam(value = "offset", required = false, defaultValue="0") Integer offset, @Parameter(in = ParameterIn.QUERY, description = "Pagination size for results. Ignored if negative" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="-1")) @Valid @RequestParam(value = "limit", required = false, defaultValue="-1") Integer limit) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}, {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n} ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List schemas", + description = "Get the schemas matching the specified parameters.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of schemas matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<Schema>> getSchemas( + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject prefix", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subjectPrefix", required = false) + String subjectPrefix, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return soft deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "deleted", required = false, defaultValue = "false") + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to return latest schema versions only for each matching subject", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "latestOnly", required = false, defaultValue = "false") + Boolean latestOnly, + @Parameter( + in = ParameterIn.QUERY, + description = "Pagination offset for results", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "0")) + @Valid + @RequestParam(value = "offset", required = false, defaultValue = "0") + Integer offset, + @Parameter( + in = ParameterIn.QUERY, + description = "Pagination size for results. Ignored if negative", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "-1")) + @Valid + @RequestParam(value = "limit", required = false, defaultValue = "-1") + Integer limit) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "[ {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}, {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n} ]", + List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subjects associated to schema ID", description = "Retrieves all the subjects associated with a particular schema ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subjects matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/subjects", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<String>> getSubjects(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include subjects where the schema was deleted" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List subjects associated to schema ID", + description = "Retrieves all the subjects associated with a particular schema ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subjects matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/subjects", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<String>> getSubjects( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include subjects where the schema was deleted", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subject-versions associated to schema ID", description = "Get all the subject-version pairs associated with the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subject versions matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = SubjectVersion.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<SubjectVersion>> getVersions(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include subject versions where the schema was deleted" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ {\n \"subject\" : \"User\",\n \"version\" : 1\n}, {\n \"subject\" : \"User\",\n \"version\" : 1\n} ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List subject-versions associated to schema ID", + description = "Get all the subject-version pairs associated with the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subject versions matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = SubjectVersion.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<SubjectVersion>> getVersions( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include subject versions where the schema was deleted", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "[ {\n \"subject\" : \"User\",\n \"version\" : 1\n}, {\n \"subject\" : \"User\",\n \"version\" : 1\n} ]", + List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java index 4131a93695f13..6581f5f39a647 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class SchemasApiController implements SchemasApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public SchemasApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public SchemasApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java index 8977fc06387c2..544a1aff8008e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -33,275 +32,847 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface SubjectsApi { - Logger log = LoggerFactory.getLogger(SubjectsApi.class); + Logger log = LoggerFactory.getLogger(SubjectsApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete schema version", description = "Deletes a specific version of the schema registered under this subject. This only deletes the version and the schema ID remains intact making it still possible to decode data using the schema ID. This API is recommended to be used only in development environments or under extreme circumstances where-in, its required to delete a previously registered schema for compatibility purposes or re-register previously registered schema.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns the schema version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<Integer> deleteSchemaVersion(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to perform a permanent delete" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "permanent", required = false) Boolean permanent) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("1", Integer.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Delete schema version", + description = + "Deletes a specific version of the schema registered under this subject. This only deletes the version and the schema ID remains intact making it still possible to decode data using the schema ID. This API is recommended to be used only in development environments or under extreme circumstances where-in, its required to delete a previously registered schema for compatibility purposes or re-register previously registered schema.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns the schema version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<Integer> deleteSchemaVersion( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to perform a permanent delete", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "permanent", required = false) + Boolean permanent) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("1", Integer.class), HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Delete subject", description = "Deletes the specified subject and its associated compatibility level if registered. It is recommended to use this API only when a topic needs to be recycled or in development environment.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns list of schema versions deleted", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<List<Integer>> deleteSubject(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to perform a permanent delete" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "permanent", required = false) Boolean permanent) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 1, 1 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Delete subject", + description = + "Deletes the specified subject and its associated compatibility level if registered. It is recommended to use this API only when a topic needs to be recycled or in development environment.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns list of schema versions deleted", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<List<Integer>> deleteSubject( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to perform a permanent delete", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "permanent", required = false) + Boolean permanent) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 1, 1 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List schemas referencing a schema", description = "Retrieves the IDs of schemas that reference the specified schema.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of IDs for schemas that reference the specified schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}/referencedby", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<Integer>> getReferencedBy(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 100001, 100001 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List schemas referencing a schema", + description = "Retrieves the IDs of schemas that reference the specified schema.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of IDs for schemas that reference the specified schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}/referencedby", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<Integer>> getReferencedBy( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 100001, 100001 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema by version", description = "Retrieves a specific version of the schema registered under this subject.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Schema> getSchemaByVersion(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", Schema.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Get schema by version", + description = "Retrieves a specific version of the schema registered under this subject.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Schema> getSchemaByVersion( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", + Schema.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema string by version", description = "Retrieves the schema for the specified version of this subject. Only the unescaped schema string is returned.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}/schema", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<String> getSchemaOnly2(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Get schema string by version", + description = + "Retrieves the schema for the specified version of this subject. Only the unescaped schema string is returned.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}/schema", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<String> getSchemaOnly2( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subjects", description = "Retrieves a list of registered subjects matching specified parameters.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subjects matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<String>> list(@Parameter(in = ParameterIn.QUERY, description = "Subject name prefix" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue=":*:")) @Valid @RequestParam(value = "subjectPrefix", required = false, defaultValue=":*:") String subjectPrefix, @Parameter(in = ParameterIn.QUERY, description = "Whether to look up deleted subjects" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return deleted subjects only" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deletedOnly", required = false) Boolean deletedOnly) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List subjects", + description = "Retrieves a list of registered subjects matching specified parameters.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subjects matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<String>> list( + @Parameter( + in = ParameterIn.QUERY, + description = "Subject name prefix", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = ":*:")) + @Valid + @RequestParam(value = "subjectPrefix", required = false, defaultValue = ":*:") + String subjectPrefix, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to look up deleted subjects", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return deleted subjects only", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deletedOnly", required = false) + Boolean deletedOnly) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List versions under subject", description = "Retrieves a list of versions registered under the specified subject.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of version numbers matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<Integer>> listVersions(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return deleted schemas only" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deletedOnly", required = false) Boolean deletedOnly) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 1, 1 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List versions under subject", + description = "Retrieves a list of versions registered under the specified subject.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of version numbers matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<Integer>> listVersions( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return deleted schemas only", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deletedOnly", required = false) + Boolean deletedOnly) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 1, 1 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Lookup schema under subject", description = "Check if a schema has already been registered under the specified subject. If so, this returns the schema string along with its globally unique identifier, its version under this subject and the subject name.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<Schema> lookUpSchemaUnderSubject(@Parameter(in = ParameterIn.PATH, description = "Subject under which the schema will be registered", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to lookup the normalized schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "normalize", required = false) Boolean normalize, @Parameter(in = ParameterIn.QUERY, description = "Whether to lookup deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", Schema.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Lookup schema under subject", + description = + "Check if a schema has already been registered under the specified subject. If so, this returns the schema string along with its globally unique identifier, its version under this subject and the subject name.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = "Internal Server Error.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<Schema> lookUpSchemaUnderSubject( + @Parameter( + in = ParameterIn.PATH, + description = "Subject under which the schema will be registered", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to lookup the normalized schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "normalize", required = false) + Boolean normalize, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to lookup deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", + Schema.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Register schema under a subject", description = "Register a new schema under the specified subject. If successfully registered, this returns the unique identifier of this schema in the registry. The returned identifier should be used to retrieve this schema from the schemas resource and is different from the schema's version which is associated with the subject. If the same schema is registered under a different subject, the same identifier will be returned. However, the version of the schema may be different under different subjects. A schema should be compatible with the previously registered schema or schemas (if there are any) as per the configured compatibility level. The configured compatibility level can be obtained by issuing a GET http:get:: /config/(string: subject). If that returns null, then GET http:get:: /config When there are multiple instances of Schema Registry running in the same cluster, the schema registration request will be forwarded to one of the instances designated as the primary. If the primary is not available, the client will get an error code indicating that the forwarding has failed.", tags={ "Subjects (v1)"}, hidden = true) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Schema successfully registered.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = RegisterSchemaResponse.class))), - - @ApiResponse(responseCode = "409", description = "Conflict. Incompatible schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.Error code 50002 indicates operation timed out. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<RegisterSchemaResponse> register(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to register the normalized schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "normalize", required = false) Boolean normalize) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"id\" : 100001\n}", RegisterSchemaResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Register schema under a subject", + description = + "Register a new schema under the specified subject. If successfully registered, this returns the unique identifier of this schema in the registry. The returned identifier should be used to retrieve this schema from the schemas resource and is different from the schema's version which is associated with the subject. If the same schema is registered under a different subject, the same identifier will be returned. However, the version of the schema may be different under different subjects. A schema should be compatible with the previously registered schema or schemas (if there are any) as per the configured compatibility level. The configured compatibility level can be obtained by issuing a GET http:get:: /config/(string: subject). If that returns null, then GET http:get:: /config When there are multiple instances of Schema Registry running in the same cluster, the schema registration request will be forwarded to one of the instances designated as the primary. If the primary is not available, the client will get an error code indicating that the forwarding has failed.", + tags = {"Subjects (v1)"}, + hidden = true) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Schema successfully registered.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = RegisterSchemaResponse.class))), + @ApiResponse( + responseCode = "409", + description = "Conflict. Incompatible schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.Error code 50002 indicates operation timed out. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<RegisterSchemaResponse> register( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to register the normalized schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "normalize", required = false) + Boolean normalize) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"id\" : 100001\n}", RegisterSchemaResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java index f9b634add7b2a..779a56d6de540 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class SubjectsApiController implements SubjectsApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public SubjectsApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public SubjectsApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java index 268d50aa3a68a..65961426ec364 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java @@ -1,86 +1,82 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; -import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.validation.annotation.Validated; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface V1Api { - Logger log = LoggerFactory.getLogger(V1Api.class); + Logger log = LoggerFactory.getLogger(V1Api.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Get the server metadata", description = "", tags={ "Server Metadata (v1)" }, hidden = true) - @ApiResponses(value = { - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store ") }) - @RequestMapping(value = "/v1/metadata/id", - method = RequestMethod.GET) - default ResponseEntity<Void> getClusterId() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); - } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + @Operation( + summary = "Get the server metadata", + description = "", + tags = {"Server Metadata (v1)"}, + hidden = true) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store ") + }) + @RequestMapping(value = "/v1/metadata/id", method = RequestMethod.GET) + default ResponseEntity<Void> getClusterId() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get Schema Registry server version", description = "", tags={ "Server Metadata (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store ") }) - @RequestMapping(value = "/v1/metadata/version", - method = RequestMethod.GET) - default ResponseEntity<Void> getSchemaRegistryVersion() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); - } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + @Operation( + summary = "Get Schema Registry server version", + description = "", + tags = {"Server Metadata (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store ") + }) + @RequestMapping(value = "/v1/metadata/version", method = RequestMethod.GET) + default ResponseEntity<Void> getSchemaRegistryVersion() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java index d65db3be11231..90e56a914652e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class V1ApiController implements V1Api { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public V1ApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public V1ApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java index 0cf57361e58f8..3790bbde8e39f 100644 --- a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java +++ b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java @@ -35,17 +35,23 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - -/** - * DataHub Rest Controller implementation for Confluent's Schema Registry OpenAPI spec. - */ +/** DataHub Rest Controller implementation for Confluent's Schema Registry OpenAPI spec. */ @Slf4j @RestController @RequestMapping("/api") @RequiredArgsConstructor -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = InternalSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) public class SchemaRegistryController - implements CompatibilityApi, ConfigApi, ContextsApi, DefaultApi, ModeApi, SchemasApi, SubjectsApi, V1Api { + implements CompatibilityApi, + ConfigApi, + ContextsApi, + DefaultApi, + ModeApi, + SchemasApi, + SubjectsApi, + V1Api { private final ObjectMapper objectMapper; @@ -82,7 +88,8 @@ public ResponseEntity<Void> getSchemaRegistryVersion() { } @Override - public ResponseEntity<Integer> deleteSchemaVersion(String subject, String version, Boolean permanent) { + public ResponseEntity<Integer> deleteSchemaVersion( + String subject, String version, Boolean permanent) { log.error("[SubjectsApi] deleteSchemaVersion method not implemented"); return SubjectsApi.super.deleteSchemaVersion(subject, version, permanent); } @@ -100,7 +107,8 @@ public ResponseEntity<List<Integer>> getReferencedBy(String subject, String vers } @Override - public ResponseEntity<Schema> getSchemaByVersion(String subject, String version, Boolean deleted) { + public ResponseEntity<Schema> getSchemaByVersion( + String subject, String version, Boolean deleted) { log.error("[SubjectsApi] getSchemaByVersion method not implemented"); return SubjectsApi.super.getSchemaByVersion(subject, version, deleted); } @@ -112,20 +120,22 @@ public ResponseEntity<String> getSchemaOnly2(String subject, String version, Boo } @Override - public ResponseEntity<List<String>> list(String subjectPrefix, Boolean deleted, Boolean deletedOnly) { + public ResponseEntity<List<String>> list( + String subjectPrefix, Boolean deleted, Boolean deletedOnly) { log.error("[SubjectsApi] list method not implemented"); return SubjectsApi.super.list(subjectPrefix, deleted, deletedOnly); } @Override - public ResponseEntity<List<Integer>> listVersions(String subject, Boolean deleted, Boolean deletedOnly) { + public ResponseEntity<List<Integer>> listVersions( + String subject, Boolean deleted, Boolean deletedOnly) { log.error("[SubjectsApi] listVersions method not implemented"); return SubjectsApi.super.listVersions(subject, deleted, deletedOnly); } @Override - public ResponseEntity<Schema> lookUpSchemaUnderSubject(String subject, RegisterSchemaRequest body, Boolean normalize, - Boolean deleted) { + public ResponseEntity<Schema> lookUpSchemaUnderSubject( + String subject, RegisterSchemaRequest body, Boolean normalize, Boolean deleted) { log.error("[SubjectsApi] lookUpSchemaUnderSubject method not implemented"); return SubjectsApi.super.lookUpSchemaUnderSubject(subject, body, normalize, deleted); } @@ -149,26 +159,33 @@ public ResponseEntity<Mode> getTopLevelMode() { } @Override - public ResponseEntity<ModeUpdateRequest> updateMode(String subject, ModeUpdateRequest body, Boolean force) { + public ResponseEntity<ModeUpdateRequest> updateMode( + String subject, ModeUpdateRequest body, Boolean force) { log.error("[ModeApi] updateMode method not implemented"); return ModeApi.super.updateMode(subject, body, force); } @Override - public ResponseEntity<ModeUpdateRequest> updateTopLevelMode(ModeUpdateRequest body, Boolean force) { + public ResponseEntity<ModeUpdateRequest> updateTopLevelMode( + ModeUpdateRequest body, Boolean force) { log.error("[ModeApi] updateTopLevelMode method not implemented"); return ModeApi.super.updateTopLevelMode(body, force); } @Override - @Operation(summary = "Schema Registry Root Resource", description = "The Root resource is a no-op, only used to " - + "validate endpoint is ready.", tags = { "Schema Registry Base" }) + @Operation( + summary = "Schema Registry Root Resource", + description = "The Root resource is a no-op, only used to " + "validate endpoint is ready.", + tags = {"Schema Registry Base"}) public ResponseEntity<String> get() { return new ResponseEntity<>(HttpStatus.OK); } @Override - @Operation(summary = "", description = "", tags = { "Schema Registry Base" }) + @Operation( + summary = "", + description = "", + tags = {"Schema Registry Base"}) public ResponseEntity<Map<String, String>> post(Map<String, String> body) { log.error("[DefaultApi] post method not implemented"); return DefaultApi.super.post(body); @@ -205,7 +222,8 @@ public ResponseEntity<Config> getTopLevelConfig() { } @Override - public ResponseEntity<ConfigUpdateRequest> updateSubjectLevelConfig(String subject, ConfigUpdateRequest body) { + public ResponseEntity<ConfigUpdateRequest> updateSubjectLevelConfig( + String subject, ConfigUpdateRequest body) { log.error("[ConfigApi] updateSubjectLevelConfig method not implemented"); return ConfigApi.super.updateSubjectLevelConfig(subject, body); } @@ -217,44 +235,55 @@ public ResponseEntity<ConfigUpdateRequest> updateTopLevelConfig(ConfigUpdateRequ } @Override - public ResponseEntity<CompatibilityCheckResponse> testCompatibilityBySubjectName(String subject, String version, - RegisterSchemaRequest body, Boolean verbose) { + public ResponseEntity<CompatibilityCheckResponse> testCompatibilityBySubjectName( + String subject, String version, RegisterSchemaRequest body, Boolean verbose) { log.error("[CompatibilityApi] testCompatibilityBySubjectName method not implemented"); return CompatibilityApi.super.testCompatibilityBySubjectName(subject, version, body, verbose); } @Override - public ResponseEntity<CompatibilityCheckResponse> testCompatibilityForSubject(String subject, - RegisterSchemaRequest body, Boolean verbose) { + public ResponseEntity<CompatibilityCheckResponse> testCompatibilityForSubject( + String subject, RegisterSchemaRequest body, Boolean verbose) { log.error("[CompatibilityApi] testCompatibilityForSubject method not implemented"); return CompatibilityApi.super.testCompatibilityForSubject(subject, body, verbose); } @Override - public ResponseEntity<RegisterSchemaResponse> register(String subject, RegisterSchemaRequest body, - Boolean normalize) { + public ResponseEntity<RegisterSchemaResponse> register( + String subject, RegisterSchemaRequest body, Boolean normalize) { final String topicName = subject.replaceFirst("-value", ""); - return _schemaRegistryService.getSchemaIdForTopic(topicName).map(id -> { - final RegisterSchemaResponse response = new RegisterSchemaResponse(); - return new ResponseEntity<>(response.id(id), HttpStatus.OK); - }).orElseGet(() -> { - log.error("Couldn't find topic with name {}.", topicName); - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - }); - } - - @Override - public ResponseEntity<SchemaString> getSchema(Integer id, String subject, String format, Boolean fetchMaxId) { - return _schemaRegistryService.getSchemaForId(id).map(schema -> { - SchemaString result = new SchemaString(); - result.setMaxId(id); - result.setSchemaType("AVRO"); - result.setSchema(schema.toString()); - return new ResponseEntity<>(result, HttpStatus.OK); - }).orElseGet(() -> { - log.error("Couldn't find topic with id {}.", id); - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - }); + return _schemaRegistryService + .getSchemaIdForTopic(topicName) + .map( + id -> { + final RegisterSchemaResponse response = new RegisterSchemaResponse(); + return new ResponseEntity<>(response.id(id), HttpStatus.OK); + }) + .orElseGet( + () -> { + log.error("Couldn't find topic with name {}.", topicName); + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + }); + } + + @Override + public ResponseEntity<SchemaString> getSchema( + Integer id, String subject, String format, Boolean fetchMaxId) { + return _schemaRegistryService + .getSchemaForId(id) + .map( + schema -> { + SchemaString result = new SchemaString(); + result.setMaxId(id); + result.setSchemaType("AVRO"); + result.setSchema(schema.toString()); + return new ResponseEntity<>(result, HttpStatus.OK); + }) + .orElseGet( + () -> { + log.error("Couldn't find topic with id {}.", id); + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + }); } @Override @@ -270,8 +299,8 @@ public ResponseEntity<List<String>> getSchemaTypes() { } @Override - public ResponseEntity<List<Schema>> getSchemas(String subjectPrefix, Boolean deleted, Boolean latestOnly, - Integer offset, Integer limit) { + public ResponseEntity<List<Schema>> getSchemas( + String subjectPrefix, Boolean deleted, Boolean latestOnly, Integer offset, Integer limit) { log.error("[SchemasApi] getSchemas method not implemented"); return SchemasApi.super.getSchemas(subjectPrefix, deleted, latestOnly, offset, limit); } @@ -283,7 +312,8 @@ public ResponseEntity<List<String>> getSubjects(Integer id, String subject, Bool } @Override - public ResponseEntity<List<SubjectVersion>> getVersions(Integer id, String subject, Boolean deleted) { + public ResponseEntity<List<SubjectVersion>> getVersions( + Integer id, String subject, Boolean deleted) { log.error("[SchemasApi] getVersions method not implemented"); return SchemasApi.super.getVersions(id, subject, deleted); } diff --git a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java index d217d501630e3..98163a7d91420 100644 --- a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java +++ b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java @@ -13,10 +13,10 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - @EnableWebMvc -@OpenAPIDefinition(info = @Info(title = "DataHub OpenAPI", version = "1.0.0"), - servers = {@Server(url = "/schema-registry/", description = "Schema Registry Server URL")}) +@OpenAPIDefinition( + info = @Info(title = "DataHub OpenAPI", version = "1.0.0"), + servers = {@Server(url = "/schema-registry/", description = "Schema Registry Server URL")}) @Configuration public class SpringWebSchemaRegistryConfig implements WebMvcConfigurer { @@ -27,4 +27,4 @@ public void configureMessageConverters(List<HttpMessageConverter<?>> messageConv messageConverters.add(new FormHttpMessageConverter()); messageConverters.add(new MappingJackson2HttpMessageConverter()); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java index 1d34008ebf1be..4dffe1e633c6c 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java @@ -12,5 +12,4 @@ public class OpenAPISpringTestServer { public static void main(String[] args) { SpringApplication.run(OpenAPISpringTestServer.class, args); } - } diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java index 4e31dea6dee1f..1aa0361117c18 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java @@ -6,9 +6,12 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.web.servlet.DispatcherServlet; - @TestConfiguration -@ComponentScan(basePackages = {"io.datahubproject.openapi.schema.registry", "com.linkedin.metadata.schema.registry"}) +@ComponentScan( + basePackages = { + "io.datahubproject.openapi.schema.registry", + "com.linkedin.metadata.schema.registry" + }) public class OpenAPISpringTestServerConfiguration { @Bean @@ -17,7 +20,8 @@ public DispatcherServlet dispatcherServlet() { } @Bean - public ServletRegistrationBean<DispatcherServlet> servletRegistrationBean(DispatcherServlet dispatcherServlet) { + public ServletRegistrationBean<DispatcherServlet> servletRegistrationBean( + DispatcherServlet dispatcherServlet) { return new ServletRegistrationBean<>(dispatcherServlet, "/"); } } diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java index 4e1bb09ab205b..664766f204e46 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.test; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.ByteString; @@ -45,25 +48,25 @@ import org.testcontainers.utility.DockerImageName; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - @ActiveProfiles("test") @ContextConfiguration @SpringBootTest( webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, - classes = {OpenAPISpringTestServer.class, OpenAPISpringTestServerConfiguration.class, - SchemaRegistryControllerTestConfiguration.class}) + classes = { + OpenAPISpringTestServer.class, + OpenAPISpringTestServerConfiguration.class, + SchemaRegistryControllerTestConfiguration.class + }) @EnableKafka public class SchemaRegistryControllerTest extends AbstractTestNGSpringContextTests { private static final String CONFLUENT_PLATFORM_VERSION = "7.2.2"; - static KafkaContainer kafka = new KafkaContainer( - DockerImageName.parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION)) - .withReuse(true) - .withStartupAttempts(5) - .withStartupTimeout(Duration.of(30, ChronoUnit.SECONDS)); + static KafkaContainer kafka = + new KafkaContainer( + DockerImageName.parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION)) + .withReuse(true) + .withStartupAttempts(5) + .withStartupTimeout(Duration.of(30, ChronoUnit.SECONDS)); @DynamicPropertySource static void kafkaProperties(DynamicPropertyRegistry registry) { @@ -73,8 +76,7 @@ static void kafkaProperties(DynamicPropertyRegistry registry) { registry.add("kafka.schemaRegistry.url", () -> "http://localhost:53222/api/"); } - @Autowired - EventProducer _producer; + @Autowired EventProducer _producer; private final CountDownLatch mcpLatch = new CountDownLatch(1); @@ -89,7 +91,8 @@ static void kafkaProperties(DynamicPropertyRegistry registry) { private final AtomicReference<PlatformEvent> peRef = new AtomicReference<>(); @Test - public void testMCPConsumption() throws IOException, InterruptedException, ExecutionException, TimeoutException { + public void testMCPConsumption() + throws IOException, InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final DatasetProperties datasetProperties = new DatasetProperties(); datasetProperties.setName("Foo Bar"); @@ -101,7 +104,8 @@ public void testMCPConsumption() throws IOException, InterruptedException, Execu gmce.setAspectName("datasetProperties"); final JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - final byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] datasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); final GenericAspect genericAspect = new GenericAspect(); genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); genericAspect.setContentType("application/json"); @@ -115,7 +119,8 @@ public void testMCPConsumption() throws IOException, InterruptedException, Execu } @Test - public void testMCLConsumption() throws IOException, InterruptedException, ExecutionException, TimeoutException { + public void testMCLConsumption() + throws IOException, InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final DatasetProperties datasetProperties = new DatasetProperties(); datasetProperties.setName("Foo Bar"); @@ -130,7 +135,8 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu // Set old aspect final GenericAspect oldAspect = new GenericAspect(); - final byte[] oldDatasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] oldDatasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); oldAspect.setValue(ByteString.unsafeWrap(oldDatasetPropertiesSerialized)); oldAspect.setContentType("application/json"); metadataChangeLog.setPreviousAspectValue(GenericRecordUtils.serializeAspect(oldAspect)); @@ -139,16 +145,20 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu // Set new aspect final GenericAspect newAspectValue = new GenericAspect(); datasetProperties.setDescription("Updated data"); - final byte[] newDatasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] newDatasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); newAspectValue.setValue(ByteString.unsafeWrap(newDatasetPropertiesSerialized)); newAspectValue.setContentType("application/json"); metadataChangeLog.setAspect(GenericRecordUtils.serializeAspect(newAspectValue)); metadataChangeLog.setSystemMetadata(SystemMetadataUtils.createDefaultSystemMetadata()); final MockEntitySpec entitySpec = new MockEntitySpec("dataset"); - final AspectSpec aspectSpec = entitySpec.createAspectSpec(datasetProperties, DATASET_PROPERTIES_ASPECT_NAME); + final AspectSpec aspectSpec = + entitySpec.createAspectSpec(datasetProperties, DATASET_PROPERTIES_ASPECT_NAME); - _producer.produceMetadataChangeLog(entityUrn, aspectSpec, metadataChangeLog).get(10, TimeUnit.SECONDS); + _producer + .produceMetadataChangeLog(entityUrn, aspectSpec, metadataChangeLog) + .get(10, TimeUnit.SECONDS); final boolean messageConsumed = mclLatch.await(10, TimeUnit.SECONDS); assertTrue(messageConsumed); assertEquals(mclLatch.getCount(), 0); @@ -156,7 +166,8 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu } @Test - public void testPEConsumption() throws InterruptedException, ExecutionException, TimeoutException { + public void testPEConsumption() + throws InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final EntityChangeEvent changeEvent = new EntityChangeEvent(); @@ -172,11 +183,11 @@ public void testPEConsumption() throws InterruptedException, ExecutionException, final PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(CHANGE_EVENT_PLATFORM_EVENT_NAME); - platformEvent.setHeader( - new PlatformEventHeader().setTimestampMillis(123L)); + platformEvent.setHeader(new PlatformEventHeader().setTimestampMillis(123L)); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); - _producer.producePlatformEvent(CHANGE_EVENT_PLATFORM_EVENT_NAME, "Some key", platformEvent) + _producer + .producePlatformEvent(CHANGE_EVENT_PLATFORM_EVENT_NAME, "Some key", platformEvent) .get(10, TimeUnit.SECONDS); final boolean messageConsumed = peLatch.await(10, TimeUnit.SECONDS); @@ -185,8 +196,11 @@ public void testPEConsumption() throws InterruptedException, ExecutionException, assertEquals(peRef.get(), platformEvent); } - @KafkaListener(id = "test-mcp-consumer", topics = Topics.METADATA_CHANGE_PROPOSAL, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-mcp-consumer", + topics = Topics.METADATA_CHANGE_PROPOSAL, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receiveMCP(ConsumerRecord<String, GenericRecord> consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -199,8 +213,11 @@ public void receiveMCP(ConsumerRecord<String, GenericRecord> consumerRecord) { } } - @KafkaListener(id = "test-mcl-consumer", topics = Topics.METADATA_CHANGE_LOG_VERSIONED, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-mcl-consumer", + topics = Topics.METADATA_CHANGE_LOG_VERSIONED, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receiveMCL(ConsumerRecord<String, GenericRecord> consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -212,8 +229,11 @@ public void receiveMCL(ConsumerRecord<String, GenericRecord> consumerRecord) { } } - @KafkaListener(id = "test-pe-consumer", topics = Topics.PLATFORM_EVENT, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-pe-consumer", + topics = Topics.PLATFORM_EVENT, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receivePE(ConsumerRecord<String, GenericRecord> consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -224,4 +244,4 @@ public void receivePE(ConsumerRecord<String, GenericRecord> consumerRecord) { throw new RuntimeException(e); } } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java index e9fb5887e29cc..ff5b951092070 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java @@ -4,10 +4,7 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.test.context.TestPropertySource; - @TestConfiguration @TestPropertySource(value = "classpath:/application.properties") @ComponentScan(basePackages = {"com.linkedin.gms.factory.kafka", "com.linkedin.gms.factory.config"}) -public class SchemaRegistryControllerTestConfiguration { - -} +public class SchemaRegistryControllerTestConfiguration {} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java index b622fc5bb6af2..caebc6a334e72 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java @@ -1,8 +1,7 @@ package com.linkedin.metadata.datahubusage; public class DataHubUsageEventConstants { - private DataHubUsageEventConstants() { - } + private DataHubUsageEventConstants() {} // Common fields public static final String TYPE = "type"; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java index c1018e2031b17..518b5f28a5b99 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java @@ -2,7 +2,6 @@ import lombok.Getter; - @Getter public enum DataHubUsageEventType { PAGE_VIEW_EVENT("PageViewEvent"), diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java index 40a5e3a07ae6d..eab482c7bab27 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java @@ -25,57 +25,67 @@ import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTimeUtils; - @Slf4j public class AspectUtils { - private AspectUtils() { - } + private AspectUtils() {} - public static final Set<ChangeType> SUPPORTED_TYPES = Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); + public static final Set<ChangeType> SUPPORTED_TYPES = + Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); public static List<MetadataChangeProposal> getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService, - boolean onPrimaryKeyInsertOnly) { + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull EntityService entityService, + boolean onPrimaryKeyInsertOnly) { // No additional changes for unsupported operations if (!SUPPORTED_TYPES.contains(metadataChangeProposal.getChangeType())) { return Collections.emptyList(); } - final Urn urn = EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, + final Urn urn = + EntityKeyUtils.getUrnFromProposal( + metadataChangeProposal, entityService.getKeyAspectSpec(metadataChangeProposal.getEntityType())); final Map<String, RecordTemplate> includedAspects; if (metadataChangeProposal.getChangeType() != ChangeType.PATCH) { - RecordTemplate aspectRecord = GenericRecordUtils.deserializeAspect(metadataChangeProposal.getAspect().getValue(), - metadataChangeProposal.getAspect().getContentType(), entityService.getEntityRegistry() - .getEntitySpec(urn.getEntityType()).getAspectSpec(metadataChangeProposal.getAspectName())); + RecordTemplate aspectRecord = + GenericRecordUtils.deserializeAspect( + metadataChangeProposal.getAspect().getValue(), + metadataChangeProposal.getAspect().getContentType(), + entityService + .getEntityRegistry() + .getEntitySpec(urn.getEntityType()) + .getAspectSpec(metadataChangeProposal.getAspectName())); includedAspects = ImmutableMap.of(metadataChangeProposal.getAspectName(), aspectRecord); } else { includedAspects = ImmutableMap.of(); } if (onPrimaryKeyInsertOnly) { - return entityService.generateDefaultAspectsOnFirstWrite(urn, includedAspects) - .getValue() - .stream() - .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + return entityService + .generateDefaultAspectsOnFirstWrite(urn, includedAspects) + .getValue() + .stream() + .map( + entry -> + getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } else { - return entityService.generateDefaultAspectsIfMissing(urn, includedAspects) - .stream() - .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + return entityService.generateDefaultAspectsIfMissing(urn, includedAspects).stream() + .map( + entry -> + getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } } public static List<MetadataChangeProposal> getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService) { + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull EntityService entityService) { return getAdditionalChanges(metadataChangeProposal, entityService, false); } @@ -85,12 +95,10 @@ public static Map<Urn, Aspect> batchGetLatestAspect( Set<Urn> urns, String aspectName, EntityClient entityClient, - Authentication authentication) throws Exception { - final Map<Urn, EntityResponse> gmsResponse = entityClient.batchGetV2( - entity, - urns, - ImmutableSet.of(aspectName), - authentication); + Authentication authentication) + throws Exception { + final Map<Urn, EntityResponse> gmsResponse = + entityClient.batchGetV2(entity, urns, ImmutableSet.of(aspectName), authentication); final Map<Urn, Aspect> finalResult = new HashMap<>(); for (Urn urn : urns) { EntityResponse response = gmsResponse.get(urn); @@ -101,8 +109,8 @@ public static Map<Urn, Aspect> batchGetLatestAspect( return finalResult; } - private static MetadataChangeProposal getProposalFromAspect(String aspectName, RecordTemplate aspect, - MetadataChangeProposal original) { + private static MetadataChangeProposal getProposalFromAspect( + String aspectName, RecordTemplate aspect, MetadataChangeProposal original) { MetadataChangeProposal proposal = new MetadataChangeProposal(); GenericAspect genericAspect = GenericRecordUtils.serializeAspect(aspect); // Set net new fields @@ -110,7 +118,8 @@ private static MetadataChangeProposal getProposalFromAspect(String aspectName, R proposal.setAspectName(aspectName); // Set fields determined from original - // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an UPSERT + // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an + // UPSERT proposal.setChangeType(original.getChangeType()); if (ChangeType.PATCH.equals(proposal.getChangeType())) { proposal.setChangeType(ChangeType.UPSERT); @@ -128,7 +137,7 @@ private static MetadataChangeProposal getProposalFromAspect(String aspectName, R if (original.getAuditHeader() != null) { proposal.setAuditHeader(original.getAuditHeader()); } - + proposal.setEntityType(original.getEntityType()); return proposal; @@ -145,8 +154,11 @@ public static MetadataChangeProposal buildMetadataChangeProposal( return proposal; } - public static MetadataChangeProposal buildMetadataChangeProposal(@Nonnull String entityType, - @Nonnull RecordTemplate keyAspect, @Nonnull String aspectName, @Nonnull RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposal( + @Nonnull String entityType, + @Nonnull RecordTemplate keyAspect, + @Nonnull String aspectName, + @Nonnull RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityType(entityType); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(keyAspect)); @@ -162,4 +174,4 @@ public static AuditStamp getAuditStamp(Urn actor) { auditStamp.setActor(actor); return auditStamp; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java index 40284efe7ac82..3b71c698e0c9f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,7 +28,6 @@ import com.linkedin.metadata.run.RelatedAspectArray; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; - import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; @@ -36,375 +37,465 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; - import lombok.AllArgsConstructor; import lombok.Data; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - - @Slf4j @RequiredArgsConstructor public class DeleteEntityService { - private final EntityService _entityService; - private final GraphService _graphService; - - private static final Integer ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; - - /** - * Public endpoint that deletes references to a given urn across DataHub's metadata graph. This is the entrypoint for - * addressing dangling pointers whenever a user deletes some entity. - * - * @param urn The urn for which to delete references in DataHub's metadata graph. - * @param dryRun Specifies if the delete logic should be executed to conclusion or if the caller simply wants a - * preview of the response. - * @return A {@link DeleteReferencesResponse} instance detailing the response of deleting references to the provided - * urn. - */ - public DeleteReferencesResponse deleteReferencesTo(final Urn urn, final boolean dryRun) { - final DeleteReferencesResponse result = new DeleteReferencesResponse(); - RelatedEntitiesResult relatedEntities = - _graphService.findRelatedEntities(null, newFilter("urn", urn.toString()), null, - EMPTY_FILTER, - ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000); - - final List<RelatedAspect> relatedAspects = relatedEntities.getEntities().stream() - .flatMap(relatedEntity -> getRelatedAspectStream(urn, UrnUtils.getUrn(relatedEntity.getUrn()), + private final EntityService _entityService; + private final GraphService _graphService; + + private static final Integer ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; + + /** + * Public endpoint that deletes references to a given urn across DataHub's metadata graph. This is + * the entrypoint for addressing dangling pointers whenever a user deletes some entity. + * + * @param urn The urn for which to delete references in DataHub's metadata graph. + * @param dryRun Specifies if the delete logic should be executed to conclusion or if the caller + * simply wants a preview of the response. + * @return A {@link DeleteReferencesResponse} instance detailing the response of deleting + * references to the provided urn. + */ + public DeleteReferencesResponse deleteReferencesTo(final Urn urn, final boolean dryRun) { + final DeleteReferencesResponse result = new DeleteReferencesResponse(); + RelatedEntitiesResult relatedEntities = + _graphService.findRelatedEntities( + null, + newFilter("urn", urn.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000); + + final List<RelatedAspect> relatedAspects = + relatedEntities.getEntities().stream() + .flatMap( + relatedEntity -> + getRelatedAspectStream( + urn, + UrnUtils.getUrn(relatedEntity.getUrn()), relatedEntity.getRelationshipType())) - .limit(10) - .collect(Collectors.toList()); - - result.setRelatedAspects(new RelatedAspectArray(relatedAspects)); - result.setTotal(relatedEntities.getTotal()); - - if (dryRun) { - return result; - } - - for (int processedEntities = 0; processedEntities < relatedEntities.getTotal(); processedEntities += relatedEntities.getCount()) { - log.info("Processing batch {} of {} aspects", processedEntities, relatedEntities.getTotal()); - relatedEntities.getEntities().forEach(entity -> deleteReference(urn, entity)); - if (processedEntities + relatedEntities.getEntities().size() < relatedEntities.getTotal()) { - sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); - relatedEntities = _graphService.findRelatedEntities(null, newFilter("urn", urn.toString()), - null, EMPTY_FILTER, ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000); - } - } - - return result; - } + .limit(10) + .collect(Collectors.toList()); - /** - * Gets a stream of relatedAspects Pojos (high-level, trimmed information) that relate an entity with urn `urn` to - * another entity of urn `relatedUrn` via a concrete relationship type. Used to give users of this API a summary of - * what aspects are related to a given urn and how. - * - * @param urn The identifier of the source entity. - * @param relatedUrn The identifier of the destination entity. - * @param relationshipType The name of the relationship type that links urn to relatedUrn. - * @return A stream of {@link RelatedAspect} instances that have the relationship from urn to relatedUrn. - */ - private Stream<RelatedAspect> getRelatedAspectStream(Urn urn, Urn relatedUrn, String relationshipType) { - return getAspects(urn, relatedUrn, relationshipType).map(enrichedAspect -> { - final RelatedAspect relatedAspect = new RelatedAspect(); - relatedAspect.setEntity(relatedUrn); - relatedAspect.setRelationship(relationshipType); - relatedAspect.setAspect(enrichedAspect.getName()); - return relatedAspect; - }); - } + result.setRelatedAspects(new RelatedAspectArray(relatedAspects)); + result.setTotal(relatedEntities.getTotal()); - /** - * Gets a stream of Enriched Aspect Pojos (Aspect + aspect spec tuple) that relate an entity with urn `urn` to - * another entity of urn `relatedUrn` via a concrete relationship type. - * - * @param urn The identifier of the source entity. - * @param relatedUrn The identifier of the destination entity. - * @param relationshipType The name of the relationship type that links urn to relatedUrn. - * @return A stream of {@link EnrichedAspect} instances that have the relationship from urn to relatedUrn. - */ - private Stream<EnrichedAspect> getAspects(Urn urn, Urn relatedUrn, String relationshipType) { - final String relatedEntityName = relatedUrn.getEntityType(); - final EntitySpec relatedEntitySpec = _entityService.getEntityRegistry().getEntitySpec(relatedEntityName); - final Map<String, AspectSpec> aspectSpecs = getAspectSpecsReferringTo(urn.getEntityType(), relationshipType, relatedEntitySpec); - - // If we have an empty map it means that we have a graph edge that points to some aspect spec that we can't find in - // the entity registry. It would be a corrupted edge in the graph index or backwards incompatible change in the - // entity registry (I.e: deleting the aspect from the metadata model without being consistent in the graph index). - if (aspectSpecs.isEmpty()) { - log.error("Unable to find any aspect spec that has a {} relationship to {} entities. This means that the entity " - + "registry does not have relationships that the graph index has stored.", - relationshipType, relatedEntityName); - handleError(new DeleteEntityServiceError("Unable to find aspect spec in entity registry", - DeleteEntityServiceErrorReason.ENTITY_REGISTRY_SPEC_NOT_FOUND, - ImmutableMap.of("relatedEntityName", relatedEntityName, "relationshipType", relationshipType, - "relatedEntitySpec", relatedEntitySpec))); - return Stream.empty(); - } - - final List<EnvelopedAspect> aspectList = getAspectsReferringTo(relatedUrn, aspectSpecs) - .collect(Collectors.toList()); - - // If we have an empty list it means that we have a graph edge that points to some aspect that we can't find in the - // entity service. It would be a corrupted edge in the graph index or corrupted record in the entity DB. - if (aspectList.isEmpty()) { - log.error("Unable to find an aspect instance that relates {} {} via relationship {} in the entity service. " - + "This is potentially a lack of consistency between the graph and entity DBs.", - urn, relatedUrn, relationshipType); - handleError(new DeleteEntityServiceError("Unable to find aspect instance in entity service", - DeleteEntityServiceErrorReason.ENTITY_SERVICE_ASPECT_NOT_FOUND, - ImmutableMap.of("urn", urn, "relatedUrn", relatedUrn, "relationship", relationshipType, - "aspectSpecs", aspectSpecs))); - return Stream.empty(); - } - - return aspectList.stream() - .filter(envelopedAspect -> hasRelationshipInstanceTo(envelopedAspect.getValue(), urn.getEntityType(), - relationshipType, aspectSpecs.get(envelopedAspect.getName()))) - .map(envelopedAspect -> new EnrichedAspect( - envelopedAspect.getName(), - envelopedAspect.getValue(), - aspectSpecs.get(envelopedAspect.getName())) - ); + if (dryRun) { + return result; } - /** - * Utility method to sleep the thread. - * - * @param seconds The number of seconds to sleep. - */ - private void sleep(final Integer seconds) { - try { - TimeUnit.SECONDS.sleep(seconds); - } catch (InterruptedException e) { - log.error("Interrupted sleep", e); - } + for (int processedEntities = 0; + processedEntities < relatedEntities.getTotal(); + processedEntities += relatedEntities.getCount()) { + log.info("Processing batch {} of {} aspects", processedEntities, relatedEntities.getTotal()); + relatedEntities.getEntities().forEach(entity -> deleteReference(urn, entity)); + if (processedEntities + relatedEntities.getEntities().size() < relatedEntities.getTotal()) { + sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); + relatedEntities = + _graphService.findRelatedEntities( + null, + newFilter("urn", urn.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000); + } } - /** - * Processes an aspect of a given {@link RelatedEntity} instance that references a given {@link Urn}, removes said - * urn from the aspects and submits an MCP with the updated aspects. - * - * @param urn The urn to be found. - * @param relatedEntity The entity to be modified. - */ - private void deleteReference(final Urn urn, final RelatedEntity relatedEntity) { - final Urn relatedUrn = UrnUtils.getUrn(relatedEntity.getUrn()); - final String relationshipType = relatedEntity.getRelationshipType(); - getAspects(urn, relatedUrn, relationshipType) - .forEach(enrichedAspect -> { - final String aspectName = enrichedAspect.getName(); - final Aspect aspect = enrichedAspect.getAspect(); - final AspectSpec aspectSpec = enrichedAspect.getSpec(); - - final AtomicReference<Aspect> updatedAspect; - try { - updatedAspect = new AtomicReference<>(aspect.copy()); - } catch (CloneNotSupportedException e) { - log.error("Failed to clone aspect {}", aspect); - handleError(new DeleteEntityServiceError("Failed to clone aspect", - DeleteEntityServiceErrorReason.CLONE_FAILED, - ImmutableMap.of("aspect", aspect))); - return; - } - - aspectSpec.getRelationshipFieldSpecs().stream() - .filter(relationshipFieldSpec -> relationshipFieldSpec.getRelationshipAnnotation().getName().equals(relationshipType)) - .forEach(relationshipFieldSpec -> { - final PathSpec path = relationshipFieldSpec.getPath(); - updatedAspect.set(DeleteEntityUtils.getAspectWithReferenceRemoved(urn.toString(), - updatedAspect.get(), aspectSpec.getPegasusSchema(), path)); - }); - - // If there has been an update, then we produce an MCE. - if (!aspect.equals(updatedAspect.get())) { - if (updatedAspect.get() == null) { - // Then we should remove the aspect. - deleteAspect(relatedUrn, aspectName, aspect); - } else { - // Then we should update the aspect. - updateAspect(relatedUrn, aspectName, aspect, updatedAspect.get()); - } - } - }); + return result; + } + + /** + * Gets a stream of relatedAspects Pojos (high-level, trimmed information) that relate an entity + * with urn `urn` to another entity of urn `relatedUrn` via a concrete relationship type. Used to + * give users of this API a summary of what aspects are related to a given urn and how. + * + * @param urn The identifier of the source entity. + * @param relatedUrn The identifier of the destination entity. + * @param relationshipType The name of the relationship type that links urn to relatedUrn. + * @return A stream of {@link RelatedAspect} instances that have the relationship from urn to + * relatedUrn. + */ + private Stream<RelatedAspect> getRelatedAspectStream( + Urn urn, Urn relatedUrn, String relationshipType) { + return getAspects(urn, relatedUrn, relationshipType) + .map( + enrichedAspect -> { + final RelatedAspect relatedAspect = new RelatedAspect(); + relatedAspect.setEntity(relatedUrn); + relatedAspect.setRelationship(relationshipType); + relatedAspect.setAspect(enrichedAspect.getName()); + return relatedAspect; + }); + } + + /** + * Gets a stream of Enriched Aspect Pojos (Aspect + aspect spec tuple) that relate an entity with + * urn `urn` to another entity of urn `relatedUrn` via a concrete relationship type. + * + * @param urn The identifier of the source entity. + * @param relatedUrn The identifier of the destination entity. + * @param relationshipType The name of the relationship type that links urn to relatedUrn. + * @return A stream of {@link EnrichedAspect} instances that have the relationship from urn to + * relatedUrn. + */ + private Stream<EnrichedAspect> getAspects(Urn urn, Urn relatedUrn, String relationshipType) { + final String relatedEntityName = relatedUrn.getEntityType(); + final EntitySpec relatedEntitySpec = + _entityService.getEntityRegistry().getEntitySpec(relatedEntityName); + final Map<String, AspectSpec> aspectSpecs = + getAspectSpecsReferringTo(urn.getEntityType(), relationshipType, relatedEntitySpec); + + // If we have an empty map it means that we have a graph edge that points to some aspect spec + // that we can't find in + // the entity registry. It would be a corrupted edge in the graph index or backwards + // incompatible change in the + // entity registry (I.e: deleting the aspect from the metadata model without being consistent in + // the graph index). + if (aspectSpecs.isEmpty()) { + log.error( + "Unable to find any aspect spec that has a {} relationship to {} entities. This means that the entity " + + "registry does not have relationships that the graph index has stored.", + relationshipType, + relatedEntityName); + handleError( + new DeleteEntityServiceError( + "Unable to find aspect spec in entity registry", + DeleteEntityServiceErrorReason.ENTITY_REGISTRY_SPEC_NOT_FOUND, + ImmutableMap.of( + "relatedEntityName", + relatedEntityName, + "relationshipType", + relationshipType, + "relatedEntitySpec", + relatedEntitySpec))); + return Stream.empty(); } - /** - * Delete an existing aspect for an urn. - * - * @param urn the urn of the entity to remove the aspect for - * @param aspectName the aspect to remove - * @param prevAspect the old value for the aspect - */ - private void deleteAspect(Urn urn, String aspectName, RecordTemplate prevAspect) { - final RollbackResult rollbackResult = _entityService.deleteAspect(urn.toString(), aspectName, - new HashMap<>(), true); - if (rollbackResult == null || rollbackResult.getNewValue() != null) { - log.error("Failed to delete aspect with references. Before {}, after: null, please check GMS logs" - + " logs for more information", prevAspect); - handleError(new DeleteEntityServiceError("Failed to ingest new aspect", - DeleteEntityServiceErrorReason.ASPECT_DELETE_FAILED, - ImmutableMap.of("urn", urn, "aspectName", aspectName))); - } + final List<EnvelopedAspect> aspectList = + getAspectsReferringTo(relatedUrn, aspectSpecs).collect(Collectors.toList()); + + // If we have an empty list it means that we have a graph edge that points to some aspect that + // we can't find in the + // entity service. It would be a corrupted edge in the graph index or corrupted record in the + // entity DB. + if (aspectList.isEmpty()) { + log.error( + "Unable to find an aspect instance that relates {} {} via relationship {} in the entity service. " + + "This is potentially a lack of consistency between the graph and entity DBs.", + urn, + relatedUrn, + relationshipType); + handleError( + new DeleteEntityServiceError( + "Unable to find aspect instance in entity service", + DeleteEntityServiceErrorReason.ENTITY_SERVICE_ASPECT_NOT_FOUND, + ImmutableMap.of( + "urn", + urn, + "relatedUrn", + relatedUrn, + "relationship", + relationshipType, + "aspectSpecs", + aspectSpecs))); + return Stream.empty(); } - /** - * Update an aspect for an urn. - * - * @param urn the urn of the entity to remove the aspect for - * @param aspectName the aspect to remove - * @param prevAspect the old value for the aspect - * @param newAspect the new value for the aspect - */ - private void updateAspect(Urn urn, String aspectName, RecordTemplate prevAspect, RecordTemplate newAspect) { - final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(urn); - proposal.setChangeType(ChangeType.UPSERT); - proposal.setEntityType(urn.getEntityType()); - proposal.setAspectName(aspectName); - proposal.setAspect(GenericRecordUtils.serializeAspect(newAspect)); - - final AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - final IngestResult ingestProposalResult = _entityService.ingestProposal(proposal, auditStamp, false); - - if (!ingestProposalResult.isSqlCommitted()) { - log.error("Failed to ingest aspect with references removed. Before {}, after: {}, please check MCP processor" - + " logs for more information", prevAspect, newAspect); - handleError(new DeleteEntityServiceError("Failed to ingest new aspect", - DeleteEntityServiceErrorReason.MCP_PROCESSOR_FAILED, - ImmutableMap.of("proposal", proposal))); - } + return aspectList.stream() + .filter( + envelopedAspect -> + hasRelationshipInstanceTo( + envelopedAspect.getValue(), + urn.getEntityType(), + relationshipType, + aspectSpecs.get(envelopedAspect.getName()))) + .map( + envelopedAspect -> + new EnrichedAspect( + envelopedAspect.getName(), + envelopedAspect.getValue(), + aspectSpecs.get(envelopedAspect.getName()))); + } + + /** + * Utility method to sleep the thread. + * + * @param seconds The number of seconds to sleep. + */ + private void sleep(final Integer seconds) { + try { + TimeUnit.SECONDS.sleep(seconds); + } catch (InterruptedException e) { + log.error("Interrupted sleep", e); } - - - /** - * Utility method that attempts to find Aspect information as well as the associated path spec for a given urn that - * has a relationship of type `relationType` to another urn. - * - * @param relatedUrn The urn of the related entity in which we want to find the aspect that has a relationship - * to `urn`. - * @param aspectSpecs The entity spec of the related entity. - * @return A {@link Stream} of {@link EnvelopedAspect} instances that contain relationships between `urn` & `relatedUrn`. - */ - private Stream<EnvelopedAspect> getAspectsReferringTo(final Urn relatedUrn, - final Map<String, AspectSpec> aspectSpecs) { - - // FIXME: Can we not depend on entity service? - final EntityResponse entityResponse; - try { - entityResponse = _entityService.getEntityV2(relatedUrn.getEntityType(), relatedUrn, aspectSpecs.keySet()); - } catch (URISyntaxException e) { - log.error("Unable to retrieve entity data for relatedUrn " + relatedUrn, e); - return Stream.empty(); - } - // Find aspect which contains the relationship with the value we are looking for - return entityResponse - .getAspects() - .values() - .stream() - // Get aspects which contain the relationship field specs found above - .filter(Objects::nonNull) - .filter(aspect -> aspectSpecs.containsKey(aspect.getName())); + } + + /** + * Processes an aspect of a given {@link RelatedEntity} instance that references a given {@link + * Urn}, removes said urn from the aspects and submits an MCP with the updated aspects. + * + * @param urn The urn to be found. + * @param relatedEntity The entity to be modified. + */ + private void deleteReference(final Urn urn, final RelatedEntity relatedEntity) { + final Urn relatedUrn = UrnUtils.getUrn(relatedEntity.getUrn()); + final String relationshipType = relatedEntity.getRelationshipType(); + getAspects(urn, relatedUrn, relationshipType) + .forEach( + enrichedAspect -> { + final String aspectName = enrichedAspect.getName(); + final Aspect aspect = enrichedAspect.getAspect(); + final AspectSpec aspectSpec = enrichedAspect.getSpec(); + + final AtomicReference<Aspect> updatedAspect; + try { + updatedAspect = new AtomicReference<>(aspect.copy()); + } catch (CloneNotSupportedException e) { + log.error("Failed to clone aspect {}", aspect); + handleError( + new DeleteEntityServiceError( + "Failed to clone aspect", + DeleteEntityServiceErrorReason.CLONE_FAILED, + ImmutableMap.of("aspect", aspect))); + return; + } + + aspectSpec.getRelationshipFieldSpecs().stream() + .filter( + relationshipFieldSpec -> + relationshipFieldSpec + .getRelationshipAnnotation() + .getName() + .equals(relationshipType)) + .forEach( + relationshipFieldSpec -> { + final PathSpec path = relationshipFieldSpec.getPath(); + updatedAspect.set( + DeleteEntityUtils.getAspectWithReferenceRemoved( + urn.toString(), + updatedAspect.get(), + aspectSpec.getPegasusSchema(), + path)); + }); + + // If there has been an update, then we produce an MCE. + if (!aspect.equals(updatedAspect.get())) { + if (updatedAspect.get() == null) { + // Then we should remove the aspect. + deleteAspect(relatedUrn, aspectName, aspect); + } else { + // Then we should update the aspect. + updateAspect(relatedUrn, aspectName, aspect, updatedAspect.get()); + } + } + }); + } + + /** + * Delete an existing aspect for an urn. + * + * @param urn the urn of the entity to remove the aspect for + * @param aspectName the aspect to remove + * @param prevAspect the old value for the aspect + */ + private void deleteAspect(Urn urn, String aspectName, RecordTemplate prevAspect) { + final RollbackResult rollbackResult = + _entityService.deleteAspect(urn.toString(), aspectName, new HashMap<>(), true); + if (rollbackResult == null || rollbackResult.getNewValue() != null) { + log.error( + "Failed to delete aspect with references. Before {}, after: null, please check GMS logs" + + " logs for more information", + prevAspect); + handleError( + new DeleteEntityServiceError( + "Failed to ingest new aspect", + DeleteEntityServiceErrorReason.ASPECT_DELETE_FAILED, + ImmutableMap.of("urn", urn, "aspectName", aspectName))); } - - /** - * Utility method that determines whether a given aspect has an instance of a relationship of type relationType - * to a given entity type. - * - * @param aspect The aspect in which to search for the relationship. - * @param entityType The name of the entity the method checks against. - * @param relationType The name of the relationship to search for. - * @param aspectSpec The aspect spec in which to search for a concrete relationship with name=relationType - * and that targets the entityType passed by parameter. - * @return {@code True} if the aspect has a relationship with the intended conditions, {@code False} otherwise. - */ - private boolean hasRelationshipInstanceTo(final Aspect aspect, final String entityType, final String relationType, - final AspectSpec aspectSpec) { - - final RecordTemplate recordTemplate = RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), - aspect.data()); - - final Map<RelationshipFieldSpec, List<Object>> extractFields = FieldExtractor.extractFields(recordTemplate, - aspectSpec.getRelationshipFieldSpecs()); - - // Is there is any instance of the relationship specs defined in the aspect's spec extracted from the - // aspect record instance? - return findRelationshipFor(aspectSpec, relationType, entityType) - .map(extractFields::get) - .filter(Objects::nonNull) - .anyMatch(list -> !list.isEmpty()); + } + + /** + * Update an aspect for an urn. + * + * @param urn the urn of the entity to remove the aspect for + * @param aspectName the aspect to remove + * @param prevAspect the old value for the aspect + * @param newAspect the new value for the aspect + */ + private void updateAspect( + Urn urn, String aspectName, RecordTemplate prevAspect, RecordTemplate newAspect) { + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(urn); + proposal.setChangeType(ChangeType.UPSERT); + proposal.setEntityType(urn.getEntityType()); + proposal.setAspectName(aspectName); + proposal.setAspect(GenericRecordUtils.serializeAspect(newAspect)); + + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + final IngestResult ingestProposalResult = + _entityService.ingestProposal(proposal, auditStamp, false); + + if (!ingestProposalResult.isSqlCommitted()) { + log.error( + "Failed to ingest aspect with references removed. Before {}, after: {}, please check MCP processor" + + " logs for more information", + prevAspect, + newAspect); + handleError( + new DeleteEntityServiceError( + "Failed to ingest new aspect", + DeleteEntityServiceErrorReason.MCP_PROCESSOR_FAILED, + ImmutableMap.of("proposal", proposal))); } - - /** - * Computes the set of aspect specs of an entity that contain a relationship of a given name to a specific entity type. - * - * @param relatedEntityType The name of the entity. - * @param relationshipType The name of the relationship. - * @param entitySpec The entity spec from which to retrieve the aspect specs, if any. - * @return A filtered dictionary of aspect name to aspect specs containing only aspects that have a relationship of - * name relationshipType to the given relatedEntityType. - */ - private Map<String, AspectSpec> getAspectSpecsReferringTo(final String relatedEntityType, final String relationshipType, - final EntitySpec entitySpec) { - return entitySpec - .getAspectSpecMap() - .entrySet() - .stream() - .filter(entry -> findRelationshipFor(entry.getValue(), relationshipType, relatedEntityType).findAny().isPresent()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - - /** - * Utility method to find the relationship specs within an AspectSpec with name relationshipName and which has - * relatedEntity name as a valid destination type. - * - * @param spec The aspect spec from which to extract relationship field specs. - * @param relationshipType The name of the relationship to find. - * @param entityType The name of the entity type (i.e: dataset, chart, usergroup, etc...) which the relationship - * is valid for. - * @return The list of relationship field specs which match the criteria. - */ - private Stream<RelationshipFieldSpec> findRelationshipFor(final AspectSpec spec, final String relationshipType, - final String entityType) { - return spec.getRelationshipFieldSpecs().stream() - .filter(relationship -> relationship.getRelationshipName().equals(relationshipType) - && relationship.getValidDestinationTypes().contains(entityType)); - } - - /** - * Entrypoint to handle the various errors that may occur during the execution of the delete entity service. - * @param error The error instance that provides context on what issue occured. - */ - private void handleError(final DeleteEntityServiceError error) { - // NO-OP for now. - } - - @AllArgsConstructor - @Data - private static class DeleteEntityServiceError { - String message; - DeleteEntityServiceErrorReason reason; - Map<String, Object> context; - } - - private enum DeleteEntityServiceErrorReason { - ENTITY_SERVICE_ASPECT_NOT_FOUND, - ENTITY_REGISTRY_SPEC_NOT_FOUND, - MCP_PROCESSOR_FAILED, - ASPECT_DELETE_FAILED, - CLONE_FAILED, - } - - @AllArgsConstructor - @Data - private static class EnrichedAspect { - String name; - Aspect aspect; - AspectSpec spec; + } + + /** + * Utility method that attempts to find Aspect information as well as the associated path spec for + * a given urn that has a relationship of type `relationType` to another urn. + * + * @param relatedUrn The urn of the related entity in which we want to find the aspect that has a + * relationship to `urn`. + * @param aspectSpecs The entity spec of the related entity. + * @return A {@link Stream} of {@link EnvelopedAspect} instances that contain relationships + * between `urn` & `relatedUrn`. + */ + private Stream<EnvelopedAspect> getAspectsReferringTo( + final Urn relatedUrn, final Map<String, AspectSpec> aspectSpecs) { + + // FIXME: Can we not depend on entity service? + final EntityResponse entityResponse; + try { + entityResponse = + _entityService.getEntityV2(relatedUrn.getEntityType(), relatedUrn, aspectSpecs.keySet()); + } catch (URISyntaxException e) { + log.error("Unable to retrieve entity data for relatedUrn " + relatedUrn, e); + return Stream.empty(); } + // Find aspect which contains the relationship with the value we are looking for + return entityResponse.getAspects().values().stream() + // Get aspects which contain the relationship field specs found above + .filter(Objects::nonNull) + .filter(aspect -> aspectSpecs.containsKey(aspect.getName())); + } + + /** + * Utility method that determines whether a given aspect has an instance of a relationship of type + * relationType to a given entity type. + * + * @param aspect The aspect in which to search for the relationship. + * @param entityType The name of the entity the method checks against. + * @param relationType The name of the relationship to search for. + * @param aspectSpec The aspect spec in which to search for a concrete relationship with + * name=relationType and that targets the entityType passed by parameter. + * @return {@code True} if the aspect has a relationship with the intended conditions, {@code + * False} otherwise. + */ + private boolean hasRelationshipInstanceTo( + final Aspect aspect, + final String entityType, + final String relationType, + final AspectSpec aspectSpec) { + + final RecordTemplate recordTemplate = + RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), aspect.data()); + + final Map<RelationshipFieldSpec, List<Object>> extractFields = + FieldExtractor.extractFields(recordTemplate, aspectSpec.getRelationshipFieldSpecs()); + + // Is there is any instance of the relationship specs defined in the aspect's spec extracted + // from the + // aspect record instance? + return findRelationshipFor(aspectSpec, relationType, entityType) + .map(extractFields::get) + .filter(Objects::nonNull) + .anyMatch(list -> !list.isEmpty()); + } + + /** + * Computes the set of aspect specs of an entity that contain a relationship of a given name to a + * specific entity type. + * + * @param relatedEntityType The name of the entity. + * @param relationshipType The name of the relationship. + * @param entitySpec The entity spec from which to retrieve the aspect specs, if any. + * @return A filtered dictionary of aspect name to aspect specs containing only aspects that have + * a relationship of name relationshipType to the given relatedEntityType. + */ + private Map<String, AspectSpec> getAspectSpecsReferringTo( + final String relatedEntityType, final String relationshipType, final EntitySpec entitySpec) { + return entitySpec.getAspectSpecMap().entrySet().stream() + .filter( + entry -> + findRelationshipFor(entry.getValue(), relationshipType, relatedEntityType) + .findAny() + .isPresent()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + /** + * Utility method to find the relationship specs within an AspectSpec with name relationshipName + * and which has relatedEntity name as a valid destination type. + * + * @param spec The aspect spec from which to extract relationship field specs. + * @param relationshipType The name of the relationship to find. + * @param entityType The name of the entity type (i.e: dataset, chart, usergroup, etc...) which + * the relationship is valid for. + * @return The list of relationship field specs which match the criteria. + */ + private Stream<RelationshipFieldSpec> findRelationshipFor( + final AspectSpec spec, final String relationshipType, final String entityType) { + return spec.getRelationshipFieldSpecs().stream() + .filter( + relationship -> + relationship.getRelationshipName().equals(relationshipType) + && relationship.getValidDestinationTypes().contains(entityType)); + } + + /** + * Entrypoint to handle the various errors that may occur during the execution of the delete + * entity service. + * + * @param error The error instance that provides context on what issue occured. + */ + private void handleError(final DeleteEntityServiceError error) { + // NO-OP for now. + } + + @AllArgsConstructor + @Data + private static class DeleteEntityServiceError { + String message; + DeleteEntityServiceErrorReason reason; + Map<String, Object> context; + } + + private enum DeleteEntityServiceErrorReason { + ENTITY_SERVICE_ASPECT_NOT_FOUND, + ENTITY_REGISTRY_SPEC_NOT_FOUND, + MCP_PROCESSOR_FAILED, + ASPECT_DELETE_FAILED, + CLONE_FAILED, + } + + @AllArgsConstructor + @Data + private static class EnrichedAspect { + String name; + Aspect aspect; + AspectSpec spec; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java index 58b5341c4ae0c..0a8b5880e5bce 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java @@ -13,43 +13,45 @@ import java.util.ListIterator; import lombok.extern.slf4j.Slf4j; - /** - * Utility class that encapsulates the logic on how to modify a {@link RecordTemplate} in place to remove a single value - * following a concrete set of rules. + * Utility class that encapsulates the logic on how to modify a {@link RecordTemplate} in place to + * remove a single value following a concrete set of rules. * - * It does this by a recursive tree traversal method, based on an aspect path provided for a given aspect. - * This so that we don't have to scan the entire aspect for the value to be removed and then figure out how to apply - * logical rules based on upstream optionality definitions. + * <p>It does this by a recursive tree traversal method, based on an aspect path provided for a + * given aspect. This so that we don't have to scan the entire aspect for the value to be removed + * and then figure out how to apply logical rules based on upstream optionality definitions. * - * For more information see {@link #getAspectWithReferenceRemoved(String, RecordTemplate, DataSchema, PathSpec)} + * <p>For more information see {@link #getAspectWithReferenceRemoved(String, RecordTemplate, + * DataSchema, PathSpec)} */ @Slf4j public class DeleteEntityUtils { - private DeleteEntityUtils() { } + private DeleteEntityUtils() {} /** - * Utility method that removes fields from a given aspect based on its aspect spec that follows the following logic: - * - * 1. If field is optional and not part of an array → remove the field. - * 2. If is a field that is part of an array (has an `*` in the path spec) - * → go up to the nearest array and remove the element. - * Extra → If array only has 1 element which is being deleted→ optional rules (if optional set null, otherwise delete) - * 3. If field is non-optional and does not belong to an array delete if and only if aspect becomes empty. + * Utility method that removes fields from a given aspect based on its aspect spec that follows + * the following logic: * - * @param value Value to be removed from Aspect. - * @param aspect Aspect in which the value property exists. - * @param schema {@link DataSchema} of the aspect being processed. - * @param aspectPath Path within the aspect to where the value can be found. + * <p>1. If field is optional and not part of an array → remove the field. 2. If is a field that + * is part of an array (has an `*` in the path spec) → go up to the nearest array and remove the + * element. Extra → If array only has 1 element which is being deleted→ optional rules (if + * optional set null, otherwise delete) 3. If field is non-optional and does not belong to an + * array delete if and only if aspect becomes empty. * - * @return A deep copy of the aspect. Modified if the value was found and according to the logic specified above. - * Otherwise, a copy of the original aspect is returned. + * @param value Value to be removed from Aspect. + * @param aspect Aspect in which the value property exists. + * @param schema {@link DataSchema} of the aspect being processed. + * @param aspectPath Path within the aspect to where the value can be found. + * @return A deep copy of the aspect. Modified if the value was found and according to the logic + * specified above. Otherwise, a copy of the original aspect is returned. */ - public static Aspect getAspectWithReferenceRemoved(String value, RecordTemplate aspect, DataSchema schema, PathSpec aspectPath) { + public static Aspect getAspectWithReferenceRemoved( + String value, RecordTemplate aspect, DataSchema schema, PathSpec aspectPath) { try { final DataMap copy = aspect.copy().data(); - final DataComplex newValue = removeValueBasedOnPath(value, schema, copy, aspectPath.getPathComponents(), 0); + final DataComplex newValue = + removeValueBasedOnPath(value, schema, copy, aspectPath.getPathComponents(), 0); if (newValue == null) { // If the new value is null, we should remove the aspect. return null; @@ -63,49 +65,54 @@ public static Aspect getAspectWithReferenceRemoved(String value, RecordTemplate /** * This method chooses which sub method to invoke based on the path being iterated on. * - * @param value The value to be removed from the data complex object. Merely propagated down in this method. - * @param schema The schema of the data complex being visited. Merely propagated down in this method. - * @param o The data complex instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data complex object. Merely propagated down in + * this method. + * @param schema The schema of the data complex being visited. Merely propagated down in this + * method. + * @param o The data complex instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueBasedOnPath(String value, DataSchema schema, DataComplex o, List<String> pathComponents, - int index) { + private static DataComplex removeValueBasedOnPath( + String value, DataSchema schema, DataComplex o, List<String> pathComponents, int index) { final String subPath = pathComponents.get(index); // Processing an array if (subPath.equals("*")) { // Process each entry - return removeValueFromArray(value, (ArrayDataSchema) schema, (DataList) o, pathComponents, index); + return removeValueFromArray( + value, (ArrayDataSchema) schema, (DataList) o, pathComponents, index); } else { // Processing a map - return removeValueFromMap(value, (RecordDataSchema) schema, (DataMap) o, pathComponents, index); + return removeValueFromMap( + value, (RecordDataSchema) schema, (DataMap) o, pathComponents, index); } } /** - * This method is used to visit and remove values from DataMap instances if they are the leaf nodes of the original - * data complex object. + * This method is used to visit and remove values from DataMap instances if they are the leaf + * nodes of the original data complex object. * - * Note that this method has side effects and mutates the provided DataMap instance. + * <p>Note that this method has side effects and mutates the provided DataMap instance. * - * @param value The value to be removed from the data map object. - * @param spec The schema of the data complex being visited. Used to get information of the optionallity of - * the data map being processed. - * @param record The data list instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data map object. + * @param spec The schema of the data complex being visited. Used to get information of the + * optionallity of the data map being processed. + * @param record The data list instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueFromMap(String value, RecordDataSchema spec, DataMap record, List<String> pathComponents, - int index) { + private static DataComplex removeValueFromMap( + String value, RecordDataSchema spec, DataMap record, List<String> pathComponents, int index) { // If in the last component of the path spec if (index == pathComponents.size() - 1) { boolean canDelete = spec.getField(pathComponents.get(index)).getOptional(); - boolean valueExistsInRecord = record.getOrDefault(pathComponents.get(index), "").equals(value); + boolean valueExistsInRecord = + record.getOrDefault(pathComponents.get(index), "").equals(value); if (valueExistsInRecord) { if (canDelete) { record.remove(pathComponents.get(index)); @@ -114,17 +121,26 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe return null; } } else { - log.error("[Reference removal logic] Unable to find value {} in data map {} at path {}", value, record, - pathComponents.subList(0, index)); + log.error( + "[Reference removal logic] Unable to find value {} in data map {} at path {}", + value, + record, + pathComponents.subList(0, index)); } } else { // else traverse further down the tree. final String key = pathComponents.get(index); final boolean optionalField = spec.getField(key).getOptional(); - // Check if key exists, this may not exist because you are in wrong branch of the tree (i.e: iterating for an array) + // Check if key exists, this may not exist because you are in wrong branch of the tree (i.e: + // iterating for an array) if (record.containsKey(key)) { - final DataComplex result = removeValueBasedOnPath(value, spec.getField(key).getType(), (DataComplex) record.get(key), pathComponents, - index + 1); + final DataComplex result = + removeValueBasedOnPath( + value, + spec.getField(key).getType(), + (DataComplex) record.get(key), + pathComponents, + index + 1); if (result == null) { if (optionalField) { @@ -134,7 +150,9 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe } else { // Not optional and not the only field, then this is a bad delete. Need to throw. throw new UnsupportedOperationException( - String.format("Delete failed! Failed to field with name %s from DataMap. The field is required!", key)); + String.format( + "Delete failed! Failed to field with name %s from DataMap. The field is required!", + key)); } } else { record.put(key, result); @@ -145,32 +163,40 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe } /** - * This method is used to visit and remove values from DataList instances if they are the leaf nodes of the original - * data complex object. + * This method is used to visit and remove values from DataList instances if they are the leaf + * nodes of the original data complex object. * - * Note that this method has side effects and mutates the provided DataMap instance. + * <p>Note that this method has side effects and mutates the provided DataMap instance. * - * @param value The value to be removed from the data list object. - * @param record The data list instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data list object. + * @param record The data list instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueFromArray(String value, ArrayDataSchema record, DataList aspectList, - List<String> pathComponents, int index) { + private static DataComplex removeValueFromArray( + String value, + ArrayDataSchema record, + DataList aspectList, + List<String> pathComponents, + int index) { // If in the last component of the path spec if (index == pathComponents.size() - 1) { final boolean found = aspectList.remove(value); if (!found) { - log.error(String.format("Unable to find value %s in aspect list %s at path %s", value, aspectList, - pathComponents.subList(0, index))); + log.error( + String.format( + "Unable to find value %s in aspect list %s at path %s", + value, aspectList, pathComponents.subList(0, index))); } } else { // else traverse further down the tree. final ListIterator<Object> it = aspectList.listIterator(); while (it.hasNext()) { final Object aspect = it.next(); - final DataComplex result = removeValueBasedOnPath(value, record.getItems(), (DataComplex) aspect, pathComponents, index + 1); + final DataComplex result = + removeValueBasedOnPath( + value, record.getItems(), (DataComplex) aspect, pathComponents, index + 1); if (result == null) { it.remove(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index b7607053df8e3..8654df4435cd6 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -33,11 +33,11 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface EntityService { /** * Just whether the entity/aspect exists + * * @param urn urn for the entity * @param aspectName aspect for the entity * @return exists or not @@ -45,35 +45,37 @@ public interface EntityService { Boolean exists(Urn urn, String aspectName); /** - * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided - * set of aspect names. + * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided set + * of aspect names. * * @param urns set of urns to fetch aspects for * @param aspectNames aspects to fetch for each urn in urns set * @return a map of provided {@link Urn} to a List containing the requested aspects. */ Map<Urn, List<RecordTemplate>> getLatestAspects( - @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames); + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames); - Map<String, RecordTemplate> getLatestAspectsForUrn(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames); + Map<String, RecordTemplate> getLatestAspectsForUrn( + @Nonnull final Urn urn, @Nonnull final Set<String> aspectNames); /** * Retrieves an aspect having a specific {@link Urn}, name, & version. * - * Note that once we drop support for legacy aspect-specific resources, - * we should make this a protected method. Only visible for backwards compatibility. + * <p>Note that once we drop support for legacy aspect-specific resources, we should make this a + * protected method. Only visible for backwards compatibility. * * @param urn an urn associated with the requested aspect * @param aspectName name of the aspect requested * @param version specific version of the aspect being requests - * @return the {@link RecordTemplate} representation of the requested aspect object, or null if one cannot be found + * @return the {@link RecordTemplate} representation of the requested aspect object, or null if + * one cannot be found */ - RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version); + RecordTemplate getAspect( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version); /** - * Retrieves the latest aspects for the given urn as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given urn as dynamic aspect objects (Without having to + * define union objects) * * @param entityName name of the entity to fetch * @param urn urn of entity to fetch @@ -83,11 +85,12 @@ Map<Urn, List<RecordTemplate>> getLatestAspects( EntityResponse getEntityV2( @Nonnull final String entityName, @Nonnull final Urn urn, - @Nonnull final Set<String> aspectNames) throws URISyntaxException; + @Nonnull final Set<String> aspectNames) + throws URISyntaxException; /** - * Retrieves the latest aspects for the given set of urns as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given set of urns as dynamic aspect objects (Without + * having to define union objects) * * @param entityName name of the entity to fetch * @param urns set of urns to fetch @@ -97,19 +100,21 @@ EntityResponse getEntityV2( Map<Urn, EntityResponse> getEntitiesV2( @Nonnull final String entityName, @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) throws URISyntaxException; + @Nonnull final Set<String> aspectNames) + throws URISyntaxException; /** - * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects (Without + * having to define union objects) * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link Entity} object */ Map<Urn, EntityResponse> getEntitiesVersionedV2( - @Nonnull final Set<VersionedUrn> versionedUrns, - @Nonnull final Set<String> aspectNames) throws URISyntaxException; + @Nonnull final Set<VersionedUrn> versionedUrns, @Nonnull final Set<String> aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects @@ -121,20 +126,20 @@ Map<Urn, EntityResponse> getEntitiesVersionedV2( */ Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects( // TODO: entityName is unused, can we remove this as a param? - @Nonnull String entityName, - @Nonnull Set<Urn> urns, - @Nonnull Set<String> aspectNames) throws URISyntaxException; + @Nonnull String entityName, @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link EnvelopedAspect} object */ Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects( - @Nonnull Set<VersionedUrn> versionedUrns, - @Nonnull Set<String> aspectNames) throws URISyntaxException; + @Nonnull Set<VersionedUrn> versionedUrns, @Nonnull Set<String> aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspect for the given urn as a list of enveloped aspects @@ -145,9 +150,8 @@ Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects( * @return {@link EnvelopedAspect} object, or null if one cannot be found */ EnvelopedAspect getLatestEnvelopedAspect( - @Nonnull final String entityName, - @Nonnull final Urn urn, - @Nonnull final String aspectName) throws Exception; + @Nonnull final String entityName, @Nonnull final Urn urn, @Nonnull final String aspectName) + throws Exception; @Deprecated VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version); @@ -158,19 +162,27 @@ ListResult<RecordTemplate> listLatestAspects( final int start, final int count); - List<UpdateAspectResult> ingestAspects(@Nonnull final Urn urn, @Nonnull List<Pair<String, RecordTemplate>> aspectRecordsToIngest, - @Nonnull final AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata); + List<UpdateAspectResult> ingestAspects( + @Nonnull final Urn urn, + @Nonnull List<Pair<String, RecordTemplate>> aspectRecordsToIngest, + @Nonnull final AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata); - List<UpdateAspectResult> ingestAspects(@Nonnull final AspectsBatch aspectsBatch, @Nonnull final AuditStamp auditStamp, - boolean emitMCL, boolean overwrite); + List<UpdateAspectResult> ingestAspects( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean emitMCL, + boolean overwrite); /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataAuditEvent}. * - * This method runs a read -> write atomically in a single transaction, this is to prevent multiple IDs from being created. + * <p>This method runs a read -> write atomically in a single transaction, this is to prevent + * multiple IDs from being created. * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. + * <p>Note that in general, this should not be used externally. It is currently serving upgrade + * scripts and is as such public. * * @param urn an urn associated with the new aspect * @param aspectName name of the aspect being inserted @@ -179,17 +191,22 @@ List<UpdateAspectResult> ingestAspects(@Nonnull final AspectsBatch aspectsBatch, * @param systemMetadata * @return the {@link RecordTemplate} representation of the written aspect object */ - RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, @Nonnull String aspectName, - @Nonnull RecordTemplate newValue, @Nonnull AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata); + RecordTemplate ingestAspectIfNotPresent( + @Nonnull Urn urn, + @Nonnull String aspectName, + @Nonnull RecordTemplate newValue, + @Nonnull AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata); // TODO: Why not in RetentionService? - String batchApplyRetention(Integer start, Integer count, Integer attemptWithVersion, String aspectName, - String urn); + String batchApplyRetention( + Integer start, Integer count, Integer attemptWithVersion, String aspectName, String urn); Integer getCountAspect(@Nonnull String aspectName, @Nullable String urnLike); // TODO: Extract this to a different service, doesn't need to be here - RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @Nonnull Consumer<String> logger); + RestoreIndicesResult restoreIndices( + @Nonnull RestoreIndicesArgs args, @Nonnull Consumer<String> logger); ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count); @@ -199,63 +216,76 @@ String batchApplyRetention(Integer start, Integer count, Integer attemptWithVers @Deprecated Map<Urn, Entity> getEntities(@Nonnull final Set<Urn> urns, @Nonnull Set<String> aspectNames); - Pair<Future<?>, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, AspectSpec aspectSpec, + Pair<Future<?>, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + AspectSpec aspectSpec, @Nonnull final MetadataChangeLog metadataChangeLog); - Pair<Future<?>, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, - @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, - @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, - @Nonnull final ChangeType changeType); + Pair<Future<?>, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull final AspectSpec aspectSpec, + @Nullable final RecordTemplate oldAspectValue, + @Nullable final RecordTemplate newAspectValue, + @Nullable final SystemMetadata oldSystemMetadata, + @Nullable final SystemMetadata newSystemMetadata, + @Nonnull AuditStamp auditStamp, + @Nonnull final ChangeType changeType); RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName); @Deprecated - void ingestEntities(@Nonnull final List<Entity> entities, @Nonnull final AuditStamp auditStamp, + void ingestEntities( + @Nonnull final List<Entity> entities, + @Nonnull final AuditStamp auditStamp, @Nonnull final List<SystemMetadata> systemMetadata); @Deprecated SystemMetadata ingestEntity(Entity entity, AuditStamp auditStamp); @Deprecated - void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, + void ingestEntity( + @Nonnull Entity entity, + @Nonnull AuditStamp auditStamp, @Nonnull SystemMetadata systemMetadata); void setRetentionService(RetentionService retentionService); AspectSpec getKeyAspectSpec(@Nonnull final Urn urn); - Optional<AspectSpec> getAspectSpec(@Nonnull final String entityName, @Nonnull final String aspectName); + Optional<AspectSpec> getAspectSpec( + @Nonnull final String entityName, @Nonnull final String aspectName); String getKeyAspectName(@Nonnull final Urn urn); /** * Generate default aspects if not present in the database. + * * @param urn entity urn * @param includedAspects aspects being written * @return additional aspects to be written */ - List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, - Map<String, RecordTemplate> includedAspects); + List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing( + @Nonnull final Urn urn, Map<String, RecordTemplate> includedAspects); /** - * Generate default aspects if the entity key aspect is NOT in the database **AND** - * the key aspect is being written, present in `includedAspects`. - * - * Does not automatically create key aspects. - * @see EntityService#generateDefaultAspectsIfMissing if key aspects need autogeneration + * Generate default aspects if the entity key aspect is NOT in the database **AND** the key aspect + * is being written, present in `includedAspects`. * - * This version is more efficient in that it only generates additional writes - * when a new entity is being minted for the first time. The drawback is that it will not automatically - * add key aspects, in case the producer is not bothering to ensure that the entity exists - * before writing non-key aspects. + * <p>Does not automatically create key aspects. * + * @see EntityService#generateDefaultAspectsIfMissing if key aspects need autogeneration + * <p>This version is more efficient in that it only generates additional writes when a new + * entity is being minted for the first time. The drawback is that it will not automatically + * add key aspects, in case the producer is not bothering to ensure that the entity exists + * before writing non-key aspects. * @param urn entity urn * @param includedAspects aspects being written * @return whether key aspect exists in database and the additional aspects to be written */ - Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstWrite(@Nonnull final Urn urn, - Map<String, RecordTemplate> includedAspects); + Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstWrite( + @Nonnull final Urn urn, Map<String, RecordTemplate> includedAspects); AspectSpec getKeyAspectSpec(@Nonnull final String entityName); @@ -263,24 +293,30 @@ Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstW EntityRegistry getEntityRegistry(); - RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map<String, String> conditions, boolean hardDelete); + RollbackResult deleteAspect( + String urn, String aspectName, @Nonnull Map<String, String> conditions, boolean hardDelete); RollbackRunResult deleteUrn(Urn urn); - RollbackRunResult rollbackRun(List<AspectRowSummary> aspectRows, String runId, boolean hardDelete); + RollbackRunResult rollbackRun( + List<AspectRowSummary> aspectRows, String runId, boolean hardDelete); - RollbackRunResult rollbackWithConditions(List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete); + RollbackRunResult rollbackWithConditions( + List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete); - Set<IngestResult> ingestProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async); + Set<IngestResult> ingestProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async); /** * If you have more than 1 proposal use the {AspectsBatch} method + * * @param proposal the metadata proposal to ingest * @param auditStamp audit information * @param async async ingestion or sync ingestion * @return ingestion result */ - IngestResult ingestProposal(MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); + IngestResult ingestProposal( + MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); Boolean exists(Urn urn); @@ -293,15 +329,17 @@ Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstW /** * Builds the default browse path V2 aspects for all entities. * - * This method currently supports datasets, charts, dashboards, and data jobs best. Everything else - * will have a basic "Default" folder added to their browsePathV2. + * <p>This method currently supports datasets, charts, dashboards, and data jobs best. Everything + * else will have a basic "Default" folder added to their browsePathV2. */ @Nonnull - BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException; + BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) + throws URISyntaxException; /** - * Allow internal use of the system entity client. Solves recursive dependencies between the EntityService - * and the SystemJavaEntityClient + * Allow internal use of the system entity client. Solves recursive dependencies between the + * EntityService and the SystemJavaEntityClient + * * @param systemEntityClient system entity client */ void setSystemEntityClient(SystemEntityClient systemEntityClient); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java index 27c51e050deff..1ef818559faae 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java @@ -3,7 +3,6 @@ import com.linkedin.common.urn.Urn; import lombok.Value; - @Value public class IngestProposalResult { Urn urn; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java index 5e4ed6259a7f7..3e72a763fb17c 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java @@ -8,11 +8,11 @@ @Builder(toBuilder = true) @Value public class IngestResult { - Urn urn; - AbstractBatchItem request; - boolean publishedMCL; - boolean processedMCL; - boolean publishedMCP; - boolean sqlCommitted; - boolean isUpdate; // update else insert + Urn urn; + AbstractBatchItem request; + boolean publishedMCL; + boolean processedMCL; + boolean publishedMCP; + boolean sqlCommitted; + boolean isUpdate; // update else insert } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java index 21b07e59a2bf0..e6bf82b764484 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java @@ -7,7 +7,8 @@ import lombok.Value; /** - * An immutable value class that holds the result of a list operation and other pagination information. + * An immutable value class that holds the result of a list operation and other pagination + * information. * * @param <T> the result type */ diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java index 1cdd9965c4bfc..51519f48bd975 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java @@ -25,18 +25,16 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.Builder; import lombok.SneakyThrows; import lombok.Value; - /** * Service coupled with an {@link EntityServiceImpl} to handle aspect record retention. * - * TODO: This class is abstract with storage-specific implementations. It'd be nice to pull storage and retention - * concerns apart, let (into {@link AspectDao}) deal with storage, and merge all retention concerns into a single - * class. + * <p>TODO: This class is abstract with storage-specific implementations. It'd be nice to pull + * storage and retention concerns apart, let (into {@link AspectDao}) deal with storage, and merge + * all retention concerns into a single class. */ public abstract class RetentionService { protected static final String ALL = "*"; @@ -44,8 +42,8 @@ public abstract class RetentionService { protected abstract EntityService getEntityService(); /** - * Fetch retention policies given the entityName and aspectName - * Uses the entity service to fetch the latest retention policies set for the input entity and aspect + * Fetch retention policies given the entityName and aspectName Uses the entity service to fetch + * the latest retention policies set for the input entity and aspect * * @param entityName Name of the entity * @param aspectName Name of the aspect @@ -55,19 +53,24 @@ public Retention getRetention(@Nonnull String entityName, @Nonnull String aspect // Prioritized list of retention keys to fetch List<Urn> retentionUrns = getRetentionKeys(entityName, aspectName); Map<Urn, List<RecordTemplate>> fetchedAspects = - getEntityService().getLatestAspects(new HashSet<>(retentionUrns), ImmutableSet.of(Constants.DATAHUB_RETENTION_ASPECT)); + getEntityService() + .getLatestAspects( + new HashSet<>(retentionUrns), ImmutableSet.of(Constants.DATAHUB_RETENTION_ASPECT)); // Find the first retention info that is set among the prioritized list of retention keys above - Optional<DataHubRetentionConfig> retentionInfo = retentionUrns.stream() - .flatMap(urn -> fetchedAspects.getOrDefault(urn, Collections.emptyList()) - .stream() - .filter(aspect -> aspect instanceof DataHubRetentionConfig)) - .map(retention -> (DataHubRetentionConfig) retention) - .findFirst(); + Optional<DataHubRetentionConfig> retentionInfo = + retentionUrns.stream() + .flatMap( + urn -> + fetchedAspects.getOrDefault(urn, Collections.emptyList()).stream() + .filter(aspect -> aspect instanceof DataHubRetentionConfig)) + .map(retention -> (DataHubRetentionConfig) retention) + .findFirst(); return retentionInfo.map(DataHubRetentionConfig::getRetention).orElse(new Retention()); } // Get list of datahub retention keys that match the input entity name and aspect name - protected static List<Urn> getRetentionKeys(@Nonnull String entityName, @Nonnull String aspectName) { + protected static List<Urn> getRetentionKeys( + @Nonnull String entityName, @Nonnull String aspectName) { return ImmutableList.of( new DataHubRetentionKey().setEntityName(entityName).setAspectName(aspectName), new DataHubRetentionKey().setEntityName(entityName).setAspectName(ALL), @@ -79,22 +82,26 @@ protected static List<Urn> getRetentionKeys(@Nonnull String entityName, @Nonnull } /** - * Set retention policy for given entity and aspect. If entity or aspect names are null, the policy is set as default + * Set retention policy for given entity and aspect. If entity or aspect names are null, the + * policy is set as default * - * @param entityName Entity name to apply policy to. If null, set as "*", - * meaning it will be the default for any entities without specified policy - * @param aspectName Aspect name to apply policy to. If null, set as "*", - * meaning it will be the default for any aspects without specified policy + * @param entityName Entity name to apply policy to. If null, set as "*", meaning it will be the + * default for any entities without specified policy + * @param aspectName Aspect name to apply policy to. If null, set as "*", meaning it will be the + * default for any aspects without specified policy * @param retentionConfig Retention policy */ @SneakyThrows - public boolean setRetention(@Nullable String entityName, @Nullable String aspectName, - @Nonnull DataHubRetentionConfig retentionConfig) { + public boolean setRetention( + @Nullable String entityName, + @Nullable String aspectName, + @Nonnull DataHubRetentionConfig retentionConfig) { validateRetention(retentionConfig.getRetention()); DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); - Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); + Urn retentionUrn = + EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); MetadataChangeProposal keyProposal = new MetadataChangeProposal(); GenericAspect keyAspect = GenericRecordUtils.serializeAspect(retentionKey); @@ -110,11 +117,13 @@ public boolean setRetention(@Nullable String entityName, @Nullable String aspect aspectProposal.setAspectName(Constants.DATAHUB_RETENTION_ASPECT); AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); AspectsBatch batch = buildAspectsBatch(List.of(keyProposal, aspectProposal)); return getEntityService().ingestProposal(batch, auditStamp, false).stream() - .anyMatch(IngestResult::isSqlCommitted); + .anyMatch(IngestResult::isSqlCommitted); } protected abstract AspectsBatch buildAspectsBatch(List<MetadataChangeProposal> mcps); @@ -122,28 +131,31 @@ public boolean setRetention(@Nullable String entityName, @Nullable String aspect /** * Delete the retention policy set for given entity and aspect. * - * @param entityName Entity name to apply policy to. If null, set as "*", - * meaning it will delete the default policy for any entities without specified policy - * @param aspectName Aspect name to apply policy to. If null, set as "*", - * meaning it will delete the default policy for any aspects without specified policy + * @param entityName Entity name to apply policy to. If null, set as "*", meaning it will delete + * the default policy for any entities without specified policy + * @param aspectName Aspect name to apply policy to. If null, set as "*", meaning it will delete + * the default policy for any aspects without specified policy */ public void deleteRetention(@Nullable String entityName, @Nullable String aspectName) { DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); - Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); + Urn retentionUrn = + EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); getEntityService().deleteUrn(retentionUrn); } private void validateRetention(Retention retention) { if (retention.hasVersion()) { if (retention.getVersion().getMaxVersions() <= 0) { - throw new IllegalArgumentException("Invalid maxVersions: " + retention.getVersion().getMaxVersions()); + throw new IllegalArgumentException( + "Invalid maxVersions: " + retention.getVersion().getMaxVersions()); } } if (retention.hasTime()) { if (retention.getTime().getMaxAgeInSeconds() <= 0) { - throw new IllegalArgumentException("Invalid maxAgeInSeconds: " + retention.getTime().getMaxAgeInSeconds()); + throw new IllegalArgumentException( + "Invalid maxAgeInSeconds: " + retention.getTime().getMaxAgeInSeconds()); } } } @@ -151,33 +163,39 @@ private void validateRetention(Retention retention) { /** * Apply retention policies given the urn and aspect name * - * @param retentionContexts urn, aspect name, and additional context that could be used to apply retention + * @param retentionContexts urn, aspect name, and additional context that could be used to apply + * retention */ public void applyRetentionWithPolicyDefaults(@Nonnull List<RetentionContext> retentionContexts) { - List<RetentionContext> withDefaults = retentionContexts.stream() - .map(context -> { - if (context.getRetentionPolicy().isEmpty()) { - Retention retentionPolicy = getRetention(context.getUrn().getEntityType(), context.getAspectName()); - return context.toBuilder() + List<RetentionContext> withDefaults = + retentionContexts.stream() + .map( + context -> { + if (context.getRetentionPolicy().isEmpty()) { + Retention retentionPolicy = + getRetention(context.getUrn().getEntityType(), context.getAspectName()); + return context.toBuilder() .retentionPolicy(Optional.of(retentionPolicy)) .build(); - } else { - return context; - } - }) - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()) + } else { + return context; + } + }) + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) .collect(Collectors.toList()); applyRetention(withDefaults); } /** - * Apply retention policies given the urn and aspect name and policies. This protected - * method assumes that the policy is provided, however we likely need to fetch these - * from system configuration. + * Apply retention policies given the urn and aspect name and policies. This protected method + * assumes that the policy is provided, however we likely need to fetch these from system + * configuration. * - * Users of this should use {@link #applyRetentionWithPolicyDefaults(List<RetentionContext>)}) + * <p>Users of this should use {@link #applyRetentionWithPolicyDefaults(List<RetentionContext>)}) * * @param retentionContexts Additional context that could be used to apply retention */ @@ -189,23 +207,19 @@ public void applyRetentionWithPolicyDefaults(@Nonnull List<RetentionContext> ret * @param entityName Name of the entity to apply retention to. If null, applies to all entities * @param aspectName Name of the aspect to apply retention to. If null, applies to all aspects */ - public abstract void batchApplyRetention(@Nullable String entityName, @Nullable String aspectName); + public abstract void batchApplyRetention( + @Nullable String entityName, @Nullable String aspectName); - /** - * Batch apply retention to all records within the start, end count - */ - public abstract BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args); + /** Batch apply retention to all records within the start, end count */ + public abstract BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args); @Value @Builder(toBuilder = true) public static class RetentionContext { - @Nonnull - Urn urn; - @Nonnull - String aspectName; - @Builder.Default - Optional<Retention> retentionPolicy = Optional.empty(); - @Builder.Default - Optional<Long> maxVersion = Optional.empty(); + @Nonnull Urn urn; + @Nonnull String aspectName; + @Builder.Default Optional<Retention> retentionPolicy = Optional.empty(); + @Builder.Default Optional<Long> maxVersion = Optional.empty(); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java index 76a12a67b3aaf..9955a58c65339 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java @@ -1,13 +1,11 @@ package com.linkedin.metadata.entity; import com.linkedin.common.urn.Urn; - import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.mxe.SystemMetadata; import lombok.Value; - @Value public class RollbackResult { public Urn urn; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java index 02776b7de4d09..a8c558df77e57 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java @@ -4,7 +4,6 @@ import java.util.List; import lombok.Value; - @Value public class RollbackRunResult { public List<AspectRowSummary> rowsRolledBack; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java index 06199814d30dd..a10c90bc45320 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java @@ -6,12 +6,10 @@ import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.SystemMetadata; +import java.util.concurrent.Future; import lombok.Builder; import lombok.Value; -import java.util.concurrent.Future; - - @Builder(toBuilder = true) @Value public class UpdateAspectResult { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java index 64511325d96d2..d8fcbe0b7d44d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java @@ -4,51 +4,51 @@ @Data public class RestoreIndicesArgs implements Cloneable { - public int start = 0; - public int batchSize = 10; - public int numThreads = 1; - public long batchDelayMs = 1; - public String aspectName; - public String urn; - public String urnLike; - - @Override - public RestoreIndicesArgs clone() { - try { - RestoreIndicesArgs clone = (RestoreIndicesArgs) super.clone(); - // TODO: copy mutable state here, so the clone can't change the internals of the original - return clone; - } catch (CloneNotSupportedException e) { - throw new AssertionError(); - } + public int start = 0; + public int batchSize = 10; + public int numThreads = 1; + public long batchDelayMs = 1; + public String aspectName; + public String urn; + public String urnLike; + + @Override + public RestoreIndicesArgs clone() { + try { + RestoreIndicesArgs clone = (RestoreIndicesArgs) super.clone(); + // TODO: copy mutable state here, so the clone can't change the internals of the original + return clone; + } catch (CloneNotSupportedException e) { + throw new AssertionError(); } - - public RestoreIndicesArgs setAspectName(String aspectName) { - this.aspectName = aspectName; - return this; - } - - public RestoreIndicesArgs setUrnLike(String urnLike) { - this.urnLike = urnLike; - return this; - } - - public RestoreIndicesArgs setUrn(String urn) { - this.urn = urn; - return this; - } - - public RestoreIndicesArgs setStart(Integer start) { - if (start != null) { - this.start = start; - } - return this; + } + + public RestoreIndicesArgs setAspectName(String aspectName) { + this.aspectName = aspectName; + return this; + } + + public RestoreIndicesArgs setUrnLike(String urnLike) { + this.urnLike = urnLike; + return this; + } + + public RestoreIndicesArgs setUrn(String urn) { + this.urn = urn; + return this; + } + + public RestoreIndicesArgs setStart(Integer start) { + if (start != null) { + this.start = start; } + return this; + } - public RestoreIndicesArgs setBatchSize(Integer batchSize) { - if (batchSize != null) { - this.batchSize = batchSize; - } - return this; + public RestoreIndicesArgs setBatchSize(Integer batchSize) { + if (batchSize != null) { + this.batchSize = batchSize; } + return this; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java index b7917d87f99fc..8479338660db0 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java @@ -4,13 +4,13 @@ @Data public class RestoreIndicesResult { - public int ignored = 0; - public int rowsMigrated = 0; - public long timeSqlQueryMs = 0; - public long timeGetRowMs = 0; - public long timeUrnMs = 0; - public long timeEntityRegistryCheckMs = 0; - public long aspectCheckMs = 0; - public long createRecordMs = 0; - public long sendMessageMs = 0; + public int ignored = 0; + public int rowsMigrated = 0; + public long timeSqlQueryMs = 0; + public long timeGetRowMs = 0; + public long timeUrnMs = 0; + public long timeEntityRegistryCheckMs = 0; + public long aspectCheckMs = 0; + public long createRecordMs = 0; + public long sendMessageMs = 0; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java index 0d9126026b9c8..89e337771752f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java @@ -4,9 +4,9 @@ @Data public class BulkApplyRetentionArgs { - public Integer start; - public Integer count; - public Integer attemptWithVersion; - public String aspectName; - public String urn; + public Integer start; + public Integer count; + public Integer attemptWithVersion; + public String aspectName; + public String urn; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java index ef032496c8451..c84c7364534fc 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java @@ -4,13 +4,13 @@ @Data public class BulkApplyRetentionResult { - public long argStart; - public long argCount; - public long argAttemptWithVersion; - public String argUrn; - public String argAspectName; - public long rowsHandled = 0; - public long timeRetentionPolicyMapMs; - public long timeRowMs; - public long timeApplyRetentionMs = 0; + public long argStart; + public long argCount; + public long argAttemptWithVersion; + public String argUrn; + public String argAspectName; + public long rowsHandled = 0; + public long timeRetentionPolicyMapMs; + public long timeRowMs; + public long timeApplyRetentionMs = 0; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java index 03a2b4e2a7f73..155385c62ecef 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.transactions; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.models.AspectSpec; @@ -8,85 +10,85 @@ import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; - import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public abstract class AbstractBatchItem { - // urn an urn associated with the new aspect - public abstract Urn getUrn(); + // urn an urn associated with the new aspect + public abstract Urn getUrn(); - // aspectName name of the aspect being inserted - public abstract String getAspectName(); + // aspectName name of the aspect being inserted + public abstract String getAspectName(); - public abstract SystemMetadata getSystemMetadata(); + public abstract SystemMetadata getSystemMetadata(); - public abstract ChangeType getChangeType(); + public abstract ChangeType getChangeType(); - public abstract EntitySpec getEntitySpec(); + public abstract EntitySpec getEntitySpec(); - public abstract AspectSpec getAspectSpec(); + public abstract AspectSpec getAspectSpec(); - public abstract MetadataChangeProposal getMetadataChangeProposal(); + public abstract MetadataChangeProposal getMetadataChangeProposal(); - public abstract void validateUrn(EntityRegistry entityRegistry, Urn urn); + public abstract void validateUrn(EntityRegistry entityRegistry, Urn urn); - @Nonnull - protected static SystemMetadata generateSystemMetadataIfEmpty(@Nullable SystemMetadata systemMetadata) { - if (systemMetadata == null) { - systemMetadata = new SystemMetadata(); - systemMetadata.setRunId(DEFAULT_RUN_ID); - systemMetadata.setLastObserved(System.currentTimeMillis()); - } - return systemMetadata; + @Nonnull + protected static SystemMetadata generateSystemMetadataIfEmpty( + @Nullable SystemMetadata systemMetadata) { + if (systemMetadata == null) { + systemMetadata = new SystemMetadata(); + systemMetadata.setRunId(DEFAULT_RUN_ID); + systemMetadata.setLastObserved(System.currentTimeMillis()); } + return systemMetadata; + } - protected static AspectSpec validateAspect(MetadataChangeProposal mcp, EntitySpec entitySpec) { - if (!mcp.hasAspectName() || !mcp.hasAspect()) { - throw new UnsupportedOperationException("Aspect and aspect name is required for create and update operations"); - } - - AspectSpec aspectSpec = entitySpec.getAspectSpec(mcp.getAspectName()); + protected static AspectSpec validateAspect(MetadataChangeProposal mcp, EntitySpec entitySpec) { + if (!mcp.hasAspectName() || !mcp.hasAspect()) { + throw new UnsupportedOperationException( + "Aspect and aspect name is required for create and update operations"); + } - if (aspectSpec == null) { - throw new RuntimeException( - String.format("Unknown aspect %s for entity %s", mcp.getAspectName(), - mcp.getEntityType())); - } + AspectSpec aspectSpec = entitySpec.getAspectSpec(mcp.getAspectName()); - return aspectSpec; + if (aspectSpec == null) { + throw new RuntimeException( + String.format( + "Unknown aspect %s for entity %s", mcp.getAspectName(), mcp.getEntityType())); } - /** - * Validates that a change type is valid for the given aspect - * @param changeType - * @param aspectSpec - * @return - */ - protected static boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { - if (aspectSpec.isTimeseries()) { - // Timeseries aspects only support UPSERT - return ChangeType.UPSERT.equals(changeType); - } else { - if (ChangeType.PATCH.equals(changeType)) { - return supportsPatch(aspectSpec); - } else { - return ChangeType.UPSERT.equals(changeType); - } - } + return aspectSpec; + } + + /** + * Validates that a change type is valid for the given aspect + * + * @param changeType + * @param aspectSpec + * @return + */ + protected static boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { + if (aspectSpec.isTimeseries()) { + // Timeseries aspects only support UPSERT + return ChangeType.UPSERT.equals(changeType); + } else { + if (ChangeType.PATCH.equals(changeType)) { + return supportsPatch(aspectSpec); + } else { + return ChangeType.UPSERT.equals(changeType); + } } - - protected static boolean supportsPatch(AspectSpec aspectSpec) { - // Limit initial support to defined templates - if (!AspectTemplateEngine.SUPPORTED_TEMPLATES.contains(aspectSpec.getName())) { - // Prevent unexpected behavior for aspects that do not currently have 1st class patch support, - // specifically having array based fields that require merging without specifying merge behavior can get into bad states - throw new UnsupportedOperationException("Aspect: " + aspectSpec.getName() + " does not currently support patch " - + "operations."); - } - return true; + } + + protected static boolean supportsPatch(AspectSpec aspectSpec) { + // Limit initial support to defined templates + if (!AspectTemplateEngine.SUPPORTED_TEMPLATES.contains(aspectSpec.getName())) { + // Prevent unexpected behavior for aspects that do not currently have 1st class patch support, + // specifically having array based fields that require merging without specifying merge + // behavior can get into bad states + throw new UnsupportedOperationException( + "Aspect: " + aspectSpec.getName() + " does not currently support patch " + "operations."); } + return true; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java index 1d3da08130071..4f2cf6073bdac 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java @@ -5,18 +5,22 @@ import java.util.Set; import java.util.stream.Collectors; - public interface AspectsBatch { - List<? extends AbstractBatchItem> getItems(); + List<? extends AbstractBatchItem> getItems(); - default boolean containsDuplicateAspects() { - return getItems().stream().map(i -> String.format("%s_%s", i.getClass().getName(), i.hashCode())) - .distinct().count() != getItems().size(); - } + default boolean containsDuplicateAspects() { + return getItems().stream() + .map(i -> String.format("%s_%s", i.getClass().getName(), i.hashCode())) + .distinct() + .count() + != getItems().size(); + } - default Map<String, Set<String>> getUrnAspectsMap() { - return getItems().stream() - .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) - .collect(Collectors.groupingBy(Map.Entry::getKey, Collectors.mapping(Map.Entry::getValue, Collectors.toSet()))); - } + default Map<String, Set<String>> getUrnAspectsMap() { + return getItems().stream() + .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) + .collect( + Collectors.groupingBy( + Map.Entry::getKey, Collectors.mapping(Map.Entry::getValue, Collectors.toSet()))); + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java index 56bddba5dc0fa..d27b0ed303972 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java @@ -1,30 +1,20 @@ package com.linkedin.metadata.graph; import com.linkedin.common.urn.Urn; +import java.util.Map; import lombok.AllArgsConstructor; import lombok.Data; - -import java.util.Map; import lombok.EqualsAndHashCode; - @Data @AllArgsConstructor public class Edge { - @EqualsAndHashCode.Include - private Urn source; - @EqualsAndHashCode.Include - private Urn destination; - @EqualsAndHashCode.Include - private String relationshipType; - @EqualsAndHashCode.Exclude - private Long createdOn; - @EqualsAndHashCode.Exclude - private Urn createdActor; - @EqualsAndHashCode.Exclude - private Long updatedOn; - @EqualsAndHashCode.Exclude - private Urn updatedActor; - @EqualsAndHashCode.Exclude - private Map<String, Object> properties; + @EqualsAndHashCode.Include private Urn source; + @EqualsAndHashCode.Include private Urn destination; + @EqualsAndHashCode.Include private String relationshipType; + @EqualsAndHashCode.Exclude private Long createdOn; + @EqualsAndHashCode.Exclude private Urn createdActor; + @EqualsAndHashCode.Exclude private Long updatedOn; + @EqualsAndHashCode.Exclude private Urn updatedActor; + @EqualsAndHashCode.Exclude private Map<String, Object> properties; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java index d47d1e12cceb0..96a711d3875b3 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java @@ -6,13 +6,12 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface GraphClient { public static final Integer DEFAULT_PAGE_SIZE = 100; /** - * Returns a list of related entities for a given entity, set of edge types, and direction relative to the - * source node + * Returns a list of related entities for a given entity, set of edge types, and direction + * relative to the source node */ @Nonnull EntityRelationships getRelatedEntities( @@ -24,10 +23,15 @@ EntityRelationships getRelatedEntities( String actor); /** - * Returns lineage relationships for given entity in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * Returns lineage relationships for given entity in the DataHub graph. Lineage relationship + * denotes whether an entity is directly upstream or downstream of another entity */ @Nonnull - EntityLineageResult getLineageEntities(String rawUrn, LineageDirection direction, @Nullable Integer start, - @Nullable Integer count, int maxHops, String actor); + EntityLineageResult getLineageEntities( + String rawUrn, + LineageDirection direction, + @Nullable Integer start, + @Nullable Integer count, + int maxHops, + String actor); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java index 3b47f244086a9..cb4eadb8824d5 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java @@ -5,7 +5,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class GraphFilters { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java index 2bbf2af1437d8..2afe907399745 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java @@ -18,10 +18,11 @@ @Slf4j public class GraphIndexUtils { - private GraphIndexUtils() { } + private GraphIndexUtils() {} @Nullable - private static List<Urn> getActorList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List<Urn> getActorList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -31,7 +32,8 @@ private static List<Urn> getActorList(@Nullable final String path, @Nonnull fina } @Nullable - private static List<Long> getTimestampList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List<Long> getTimestampList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -41,7 +43,8 @@ private static List<Long> getTimestampList(@Nullable final String path, @Nonnull } @Nullable - private static List<Map<String, Object>> getPropertiesList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List<Map<String, Object>> getPropertiesList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -50,10 +53,9 @@ private static List<Map<String, Object>> getPropertiesList(@Nullable final Strin return (List<Map<String, Object>>) value; } - - @Nullable - private static boolean isValueListValid(@Nullable final List<?> entryList, final int valueListSize) { + private static boolean isValueListValid( + @Nullable final List<?> entryList, final int valueListSize) { if (entryList == null) { return false; } @@ -64,7 +66,8 @@ private static boolean isValueListValid(@Nullable final List<?> entryList, final } @Nullable - private static Long getTimestamp(@Nullable final List<Long> timestampList, final int index, final int valueListSize) { + private static Long getTimestamp( + @Nullable final List<Long> timestampList, final int index, final int valueListSize) { if (isValueListValid(timestampList, valueListSize)) { return timestampList.get(index); } @@ -72,7 +75,8 @@ private static Long getTimestamp(@Nullable final List<Long> timestampList, final } @Nullable - private static Urn getActor(@Nullable final List<Urn> actorList, final int index, final int valueListSize) { + private static Urn getActor( + @Nullable final List<Urn> actorList, final int index, final int valueListSize) { if (isValueListValid(actorList, valueListSize)) { return actorList.get(index); } @@ -80,7 +84,10 @@ private static Urn getActor(@Nullable final List<Urn> actorList, final int index } @Nullable - private static Map<String, Object> getProperties(@Nullable final List<Map<String, Object>> propertiesList, final int index, final int valueListSize) { + private static Map<String, Object> getProperties( + @Nullable final List<Map<String, Object>> propertiesList, + final int index, + final int valueListSize) { if (isValueListValid(propertiesList, valueListSize)) { return propertiesList.get(index); } @@ -88,8 +95,8 @@ private static Map<String, Object> getProperties(@Nullable final List<Map<String } /** - * Used to create new edges for the graph db, adding all the metadata associated with each edge based on the aspect. - * Returns a list of Edges to be consumed by the graph service. + * Used to create new edges for the graph db, adding all the metadata associated with each edge + * based on the aspect. Returns a list of Edges to be consumed by the graph service. */ @Nonnull public static List<Edge> extractGraphEdges( @@ -97,14 +104,18 @@ public static List<Edge> extractGraphEdges( @Nonnull final RecordTemplate aspect, @Nonnull final Urn urn, @Nonnull final MetadataChangeLog event, - @Nonnull final boolean isNewAspectVersion - ) { + @Nonnull final boolean isNewAspectVersion) { final List<Edge> edgesToAdd = new ArrayList<>(); - final String createdOnPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedOn(); - final String createdActorPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedActor(); - final String updatedOnPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedOn(); - final String updatedActorPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedActor(); - final String propertiesPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getProperties(); + final String createdOnPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedOn(); + final String createdActorPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedActor(); + final String updatedOnPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedOn(); + final String updatedActorPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedActor(); + final String propertiesPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getProperties(); final List<Long> createdOnList = getTimestampList(createdOnPath, aspect); final List<Urn> createdActorList = getActorList(createdActorPath, aspect); @@ -114,27 +125,33 @@ public static List<Edge> extractGraphEdges( int index = 0; for (Object fieldValue : extractedFieldsEntry.getValue()) { - Long createdOn = createdOnList != null - ? getTimestamp(createdOnList, index, extractedFieldsEntry.getValue().size()) - : null; - Urn createdActor = createdActorList != null - ? getActor(createdActorList, index, extractedFieldsEntry.getValue().size()) - : null; - Long updatedOn = updatedOnList != null - ? getTimestamp(updatedOnList, index, extractedFieldsEntry.getValue().size()) - : null; - Urn updatedActor = updatedActorList != null - ? getActor(updatedActorList, index, extractedFieldsEntry.getValue().size()) - : null; - final Map<String, Object> properties = propertiesList != null - ? getProperties(propertiesList, index, extractedFieldsEntry.getValue().size()) - : null; + Long createdOn = + createdOnList != null + ? getTimestamp(createdOnList, index, extractedFieldsEntry.getValue().size()) + : null; + Urn createdActor = + createdActorList != null + ? getActor(createdActorList, index, extractedFieldsEntry.getValue().size()) + : null; + Long updatedOn = + updatedOnList != null + ? getTimestamp(updatedOnList, index, extractedFieldsEntry.getValue().size()) + : null; + Urn updatedActor = + updatedActorList != null + ? getActor(updatedActorList, index, extractedFieldsEntry.getValue().size()) + : null; + final Map<String, Object> properties = + propertiesList != null + ? getProperties(propertiesList, index, extractedFieldsEntry.getValue().size()) + : null; SystemMetadata systemMetadata; if (isNewAspectVersion) { systemMetadata = event.hasSystemMetadata() ? event.getSystemMetadata() : null; } else { - systemMetadata = event.hasPreviousSystemMetadata() ? event.getPreviousSystemMetadata() : null; + systemMetadata = + event.hasPreviousSystemMetadata() ? event.getPreviousSystemMetadata() : null; } if ((createdOn == null || createdOn == 0) && systemMetadata != null) { @@ -160,9 +177,7 @@ public static List<Edge> extractGraphEdges( createdActor, updatedOn, updatedActor, - properties - ) - ); + properties)); } catch (URISyntaxException e) { log.error("Invalid destination urn: {}", fieldValue, e); } @@ -183,7 +198,6 @@ public static Edge mergeEdges(@Nonnull final Edge oldEdge, @Nonnull final Edge n null, newEdge.getUpdatedOn(), newEdge.getUpdatedActor(), - newEdge.getProperties() - ); + newEdge.getProperties()); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java index 6f0ac4bc2f904..b3e713a906d01 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java @@ -18,9 +18,7 @@ import org.apache.commons.collections.CollectionUtils; public interface GraphService { - /** - * Return lineage registry to construct graph index - */ + /** Return lineage registry to construct graph index */ LineageRegistry getLineageRegistry(); /** @@ -29,157 +27,207 @@ public interface GraphService { void addEdge(final Edge edge); /** - * Adds or updates an edge to the graph. This creates the source and destination nodes, if they do not exist. + * Adds or updates an edge to the graph. This creates the source and destination nodes, if they do + * not exist. */ void upsertEdge(final Edge edge); /** * Remove an edge from the graph. + * * @param edge the edge to delete */ void removeEdge(final Edge edge); /** - * Find related entities (nodes) connected to a source entity via edges of given relationship types. Related entities - * can be filtered by source and destination type (use `null` for any type), by source and destination entity filter - * and relationship filter. Pagination of the result is controlled via `offset` and `count`. + * Find related entities (nodes) connected to a source entity via edges of given relationship + * types. Related entities can be filtered by source and destination type (use `null` for any + * type), by source and destination entity filter and relationship filter. Pagination of the + * result is controlled via `offset` and `count`. * - * Starting from a node as the source entity, determined by `sourceType` and `sourceEntityFilter`, - * related entities are found along the direction of edges (`RelationshipDirection.OUTGOING`) or in opposite - * direction of edges (`RelationshipDirection.INCOMING`). The destination entities are further filtered by `destinationType` - * and `destinationEntityFilter`, and then returned as related entities. + * <p>Starting from a node as the source entity, determined by `sourceType` and + * `sourceEntityFilter`, related entities are found along the direction of edges + * (`RelationshipDirection.OUTGOING`) or in opposite direction of edges + * (`RelationshipDirection.INCOMING`). The destination entities are further filtered by + * `destinationType` and `destinationEntityFilter`, and then returned as related entities. * - * This does not return duplicate related entities, even if entities are connected to source entities via multiple edges. - * An empty list of relationship types returns an empty result. + * <p>This does not return duplicate related entities, even if entities are connected to source + * entities via multiple edges. An empty list of relationship types returns an empty result. * - * In other words, the source and destination entity is not to be understood as the source and destination of the edge, - * but as the source and destination of "finding related entities", where always the destination entities are returned. - * This understanding is important when it comes to `RelationshipDirection.INCOMING`. The origin of the edge becomes - * the destination entity and the source entity is where the edge points to. + * <p>In other words, the source and destination entity is not to be understood as the source and + * destination of the edge, but as the source and destination of "finding related entities", where + * always the destination entities are returned. This understanding is important when it comes to + * `RelationshipDirection.INCOMING`. The origin of the edge becomes the destination entity and the + * source entity is where the edge points to. * - * Example I: - * dataset one --DownstreamOf-> dataset two --DownstreamOf-> dataset three + * <p>Example I: dataset one --DownstreamOf-> dataset two --DownstreamOf-> dataset three * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset two") - * - RelatedEntity("DownstreamOf", "dataset three") + * <p>findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset two") - RelatedEntity("DownstreamOf", "dataset three") * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset one") - * - RelatedEntity("DownstreamOf", "dataset two") + * <p>findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset one") - RelatedEntity("DownstreamOf", "dataset two") * - * Example II: - * dataset one --HasOwner-> user one + * <p>Example II: dataset one --HasOwner-> user one * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - * - RelatedEntity("HasOwner", "user one") + * <p>findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], + * RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - + * RelatedEntity("HasOwner", "user one") * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - * - RelatedEntity("HasOwner", "dataset one") + * <p>findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], + * RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - + * RelatedEntity("HasOwner", "dataset one") * - * Calling this method with {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` + * <p>Calling this method with {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` * is equivalent to the union of `OUTGOING` and `INCOMING` (without duplicates). * - * Example III: - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.UNDIRECTED), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset one") - * - RelatedEntity("DownstreamOf", "dataset two") - * - RelatedEntity("DownstreamOf", "dataset three") + * <p>Example III: findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.UNDIRECTED), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset one") - RelatedEntity("DownstreamOf", "dataset two") - + * RelatedEntity("DownstreamOf", "dataset three") */ @Nonnull - RelatedEntitiesResult findRelatedEntities(@Nullable final List<String> sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List<String> destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter, - final int offset, final int count); - + RelatedEntitiesResult findRelatedEntities( + @Nullable final List<String> sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List<String> destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count); /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops) { return getLineage( entityUrn, direction, - new GraphFilters(new ArrayList(getLineageRegistry().getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), + new GraphFilters( + new ArrayList( + getLineageRegistry() + .getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), offset, count, - maxHops - ); + maxHops); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { return getLineage( entityUrn, direction, - new GraphFilters(new ArrayList(getLineageRegistry().getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), + new GraphFilters( + new ArrayList( + getLineageRegistry() + .getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), offset, count, maxHops, startTimeMillis, - endTimeMillis - ); + endTimeMillis); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If entityTypes is not empty, - * will only return edges to entities that are within the entity types set. - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If + * entityTypes is not empty, will only return edges to entities that are within the entity types + * set. Abstracts away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops) { return getLineage(entityUrn, direction, graphFilters, offset, count, maxHops, null, null); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If entityTypes is not empty, - * will only return edges to entities that are within the entity types set. - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If + * entityTypes is not empty, will only return edges to entities that are within the entity types + * set. Abstracts away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops, @Nullable Long startTimeMillis, + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { if (maxHops > 1) { maxHops = 1; } List<LineageRegistry.EdgeInfo> edgesToFetch = getLineageRegistry().getLineageRelationships(entityUrn.getEntityType(), direction); - Map<Boolean, List<LineageRegistry.EdgeInfo>> edgesByDirection = edgesToFetch.stream() - .collect(Collectors.partitioningBy(edgeInfo -> edgeInfo.getDirection() == RelationshipDirection.OUTGOING)); - EntityLineageResult result = new EntityLineageResult().setStart(offset) - .setCount(count) - .setRelationships(new LineageRelationshipArray()) - .setTotal(0); + Map<Boolean, List<LineageRegistry.EdgeInfo>> edgesByDirection = + edgesToFetch.stream() + .collect( + Collectors.partitioningBy( + edgeInfo -> edgeInfo.getDirection() == RelationshipDirection.OUTGOING)); + EntityLineageResult result = + new EntityLineageResult() + .setStart(offset) + .setCount(count) + .setRelationships(new LineageRelationshipArray()) + .setTotal(0); Set<String> visitedUrns = new HashSet<>(); // Outgoing edges if (!CollectionUtils.isEmpty(edgesByDirection.get(true))) { List<String> relationshipTypes = new ArrayList( - edgesByDirection.get(true).stream().map(LineageRegistry.EdgeInfo::getType).collect(Collectors.toSet())); + edgesByDirection.get(true).stream() + .map(LineageRegistry.EdgeInfo::getType) + .collect(Collectors.toSet())); // Fetch outgoing edges RelatedEntitiesResult outgoingEdges = - findRelatedEntities(null, QueryUtils.newFilter("urn", entityUrn.toString()), graphFilters.getAllowedEntityTypes(), + findRelatedEntities( + null, + QueryUtils.newFilter("urn", entityUrn.toString()), + graphFilters.getAllowedEntityTypes(), QueryUtils.EMPTY_FILTER, - relationshipTypes, QueryUtils.newRelationshipFilter(QueryUtils.EMPTY_FILTER, RelationshipDirection.OUTGOING), offset, + relationshipTypes, + QueryUtils.newRelationshipFilter( + QueryUtils.EMPTY_FILTER, RelationshipDirection.OUTGOING), + offset, count); // Update offset and count to fetch the correct number of incoming edges below @@ -187,39 +235,59 @@ default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageD count = Math.max(0, count - outgoingEdges.getEntities().size()); result.setTotal(result.getTotal() + outgoingEdges.getTotal()); - outgoingEdges.getEntities().forEach(entity -> { - visitedUrns.add(entity.getUrn()); - try { - result.getRelationships() - .add(new LineageRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType())); - } catch (URISyntaxException ignored) { - } - }); + outgoingEdges + .getEntities() + .forEach( + entity -> { + visitedUrns.add(entity.getUrn()); + try { + result + .getRelationships() + .add( + new LineageRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType())); + } catch (URISyntaxException ignored) { + } + }); } // Incoming edges if (!CollectionUtils.isEmpty(edgesByDirection.get(false))) { List<String> relationshipTypes = - edgesByDirection.get(false).stream().map(LineageRegistry.EdgeInfo::getType).collect(Collectors.toList()); + edgesByDirection.get(false).stream() + .map(LineageRegistry.EdgeInfo::getType) + .collect(Collectors.toList()); RelatedEntitiesResult incomingEdges = - findRelatedEntities(null, QueryUtils.newFilter("urn", entityUrn.toString()), graphFilters.getAllowedEntityTypes(), + findRelatedEntities( + null, + QueryUtils.newFilter("urn", entityUrn.toString()), + graphFilters.getAllowedEntityTypes(), QueryUtils.EMPTY_FILTER, - relationshipTypes, QueryUtils.newRelationshipFilter(QueryUtils.EMPTY_FILTER, RelationshipDirection.INCOMING), offset, + relationshipTypes, + QueryUtils.newRelationshipFilter( + QueryUtils.EMPTY_FILTER, RelationshipDirection.INCOMING), + offset, count); result.setTotal(result.getTotal() + incomingEdges.getTotal()); - incomingEdges.getEntities().forEach(entity -> { - if (visitedUrns.contains(entity.getUrn())) { - return; - } - visitedUrns.add(entity.getUrn()); - try { - result.getRelationships() - .add(new LineageRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType())); - } catch (URISyntaxException ignored) { - } - }); + incomingEdges + .getEntities() + .forEach( + entity -> { + if (visitedUrns.contains(entity.getUrn())) { + return; + } + visitedUrns.add(entity.getUrn()); + try { + result + .getRelationships() + .add( + new LineageRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType())); + } catch (URISyntaxException ignored) { + } + }); } return result; @@ -231,26 +299,26 @@ default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageD void removeNode(@Nonnull final Urn urn); /** - * Removes edges of the given relationship types from the given node after applying the relationship filter. + * Removes edges of the given relationship types from the given node after applying the + * relationship filter. * - * An empty list of relationship types removes nothing from the node. + * <p>An empty list of relationship types removes nothing from the node. * - * Calling this method with a {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` - * is equivalent to the union of `OUTGOING` and `INCOMING` (without duplicates). + * <p>Calling this method with a {@link RelationshipDirection} `UNDIRECTED` in + * `relationshipFilter` is equivalent to the union of `OUTGOING` and `INCOMING` (without + * duplicates). */ - void removeEdgesFromNode(@Nonnull final Urn urn, @Nonnull final List<String> relationshipTypes, + void removeEdgesFromNode( + @Nonnull final Urn urn, + @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter); void configure(); - /** - * Removes all edges and nodes from the graph. - */ + /** Removes all edges and nodes from the graph. */ void clear(); - /** - * Whether or not this graph service supports multi-hop - */ + /** Whether or not this graph service supports multi-hop */ default boolean supportsMultiHop() { return false; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java index 2975d100933fd..be1b55655f671 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java @@ -6,13 +6,9 @@ @AllArgsConstructor @Data public class RelatedEntity { - /** - * How the entity is related, along which edge. - */ + /** How the entity is related, along which edge. */ String relationshipType; - /** - * Urn associated with the related entity. - */ + /** Urn associated with the related entity. */ String urn; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java index 27cb7fdec22d3..5676dc9ebac54 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java @@ -12,7 +12,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class RecommendationsService { @@ -28,16 +27,20 @@ public RecommendationsService( } private void validateRecommendationSources(final List<RecommendationSource> candidateSources) { - final Map<String, Long> moduleIdCount = candidateSources.stream() - .collect(Collectors.groupingBy(RecommendationSource::getModuleId, Collectors.counting())); - List<String> moduleIdsWithDuplicates = moduleIdCount.entrySet() - .stream() - .filter(entry -> entry.getValue() > 1) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); + final Map<String, Long> moduleIdCount = + candidateSources.stream() + .collect( + Collectors.groupingBy(RecommendationSource::getModuleId, Collectors.counting())); + List<String> moduleIdsWithDuplicates = + moduleIdCount.entrySet().stream() + .filter(entry -> entry.getValue() > 1) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); if (!moduleIdsWithDuplicates.isEmpty()) { throw new IllegalArgumentException( - String.format("Found recommendations candidate sources with duplicate module IDs: %s", moduleIdsWithDuplicates.toString())); + String.format( + "Found recommendations candidate sources with duplicate module IDs: %s", + moduleIdsWithDuplicates.toString())); } } @@ -52,16 +55,23 @@ private void validateRecommendationSources(final List<RecommendationSource> cand @Nonnull @WithSpan public List<RecommendationModule> listRecommendations( - @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext, - int limit) { + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext, int limit) { // Get recommendation candidates from sources which are eligible, in parallel - final List<RecommendationModule> candidateModules = ConcurrencyUtils.transformAndCollectAsync(_candidateSources.stream() - .filter(source -> source.isEligible(userUrn, requestContext)) - .collect(Collectors.toList()), source -> source.getRecommendationModule(userUrn, requestContext), (source, exception) -> { - log.error("Error while fetching candidate modules from source {}", source, exception); - return Optional.<RecommendationModule>empty(); - }).stream().filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList()); + final List<RecommendationModule> candidateModules = + ConcurrencyUtils.transformAndCollectAsync( + _candidateSources.stream() + .filter(source -> source.isEligible(userUrn, requestContext)) + .collect(Collectors.toList()), + source -> source.getRecommendationModule(userUrn, requestContext), + (source, exception) -> { + log.error( + "Error while fetching candidate modules from source {}", source, exception); + return Optional.<RecommendationModule>empty(); + }) + .stream() + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); // Rank recommendation modules, which determines their ordering during rendering return _moduleRanker.rank(candidateModules, userUrn, requestContext, limit); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java index 5aa097ccbb497..9392f50b4749e 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DomainsCandidateSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME; } @@ -53,4 +53,3 @@ protected boolean isValueUrn() { return true; } } - diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java index e1ebc6d5e97be..a19909576d25b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java @@ -27,49 +27,36 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; - /** * Base class for search aggregation based candidate source (e.g. top platform, top tags, top terms) - * Aggregates entities based on field value in the entity search index and gets the value with the most documents + * Aggregates entities based on field value in the entity search index and gets the value with the + * most documents */ @Slf4j @RequiredArgsConstructor public abstract class EntitySearchAggregationSource implements RecommendationSource { private final EntitySearchService _entitySearchService; - /** - * Field to aggregate on - */ + /** Field to aggregate on */ protected abstract String getSearchFieldName(); - /** - * Max number of contents in module - */ + /** Max number of contents in module */ protected abstract int getMaxContent(); - /** - * Whether the aggregate value is an urn - */ + /** Whether the aggregate value is an urn */ protected abstract boolean isValueUrn(); - /** - * Whether the urn candidate is valid - */ + /** Whether the urn candidate is valid */ protected boolean isValidCandidateUrn(Urn urn) { return true; } - /** - * Whether the string candidate is valid - */ + /** Whether the string candidate is valid */ protected boolean isValidCandidateValue(String candidateValue) { return true; } - /** - * Whether the candidate is valid - * Calls different functions if candidate is an Urn - */ + /** Whether the candidate is valid Calls different functions if candidate is an Urn */ protected <T> boolean isValidCandidate(T candidate) { if (candidate instanceof Urn) { return isValidCandidateUrn((Urn) candidate); @@ -79,10 +66,11 @@ protected <T> boolean isValidCandidate(T candidate) { @Override @WithSpan - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nullable RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nullable RecommendationRequestContext requestContext) { Map<String, Long> aggregationResult = - _entitySearchService.aggregateByValue(getEntityNames(), getSearchFieldName(), null, getMaxContent()); + _entitySearchService.aggregateByValue( + getEntityNames(), getSearchFieldName(), null, getMaxContent()); if (aggregationResult.isEmpty()) { return Collections.emptyList(); @@ -96,15 +84,21 @@ public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, } // If the aggregated values are urns, convert key into urns - Map<Urn, Long> urnCounts = aggregationResult.entrySet().stream().map(entry -> { - try { - Urn tagUrn = Urn.createFromString(entry.getKey()); - return Optional.of(Pair.of(tagUrn, entry.getValue())); - } catch (URISyntaxException e) { - log.error("Invalid tag urn {}", entry.getKey(), e); - return Optional.<Pair<Urn, Long>>empty(); - } - }).filter(Optional::isPresent).map(Optional::get).collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + Map<Urn, Long> urnCounts = + aggregationResult.entrySet().stream() + .map( + entry -> { + try { + Urn tagUrn = Urn.createFromString(entry.getKey()); + return Optional.of(Pair.of(tagUrn, entry.getValue())); + } catch (URISyntaxException e) { + log.error("Invalid tag urn {}", entry.getKey(), e); + return Optional.<Pair<Urn, Long>>empty(); + } + }) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); if (urnCounts.isEmpty()) { return Collections.emptyList(); @@ -128,13 +122,16 @@ private <T> List<Map.Entry<T, Long>> getTopKValues(Map<T, Long> countMap) { for (Map.Entry<T, Long> entry : countMap.entrySet()) { if (queue.size() < getMaxContent() && isValidCandidate(entry.getKey())) { queue.add(entry); - } else if (queue.size() > 0 && queue.peek().getValue() < entry.getValue() && isValidCandidate(entry.getKey())) { + } else if (queue.size() > 0 + && queue.peek().getValue() < entry.getValue() + && isValidCandidate(entry.getKey())) { queue.poll(); queue.add(entry); } } - // Since priority queue polls in reverse order (nature of heaps), need to reverse order before returning + // Since priority queue polls in reverse order (nature of heaps), need to reverse order before + // returning final LinkedList<Map.Entry<T, Long>> topK = new LinkedList<>(); while (!queue.isEmpty()) { topK.addFirst(queue.poll()); @@ -149,15 +146,25 @@ private Map<String, Long> mergeAggregation(Map<String, Long> first, Map<String, private <T> RecommendationContent buildRecommendationContent(T candidate, long count) { // Set filters for platform - SearchParams searchParams = new SearchParams().setQuery("") - .setFilters(new CriterionArray( - ImmutableList.of(new Criterion().setField(getSearchFieldName()).setValue(candidate.toString())))); + SearchParams searchParams = + new SearchParams() + .setQuery("") + .setFilters( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(getSearchFieldName()) + .setValue(candidate.toString())))); ContentParams contentParams = new ContentParams().setCount(count); RecommendationContent content = new RecommendationContent(); if (candidate instanceof Urn) { content.setEntity((Urn) candidate); } - return content.setValue(candidate.toString()) - .setParams(new RecommendationParams().setSearchParams(searchParams).setContentParams(contentParams)); + return content + .setValue(candidate.toString()) + .setParams( + new RecommendationParams() + .setSearchParams(searchParams) + .setContentParams(contentParams)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java index 357a5df2edd44..e133e3dc75ff3 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java @@ -33,7 +33,6 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlySearchedSource implements RecommendationSource { @@ -60,11 +59,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -72,15 +76,15 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo } @Override - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlySearched").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) .map(Optional::get) @@ -97,20 +101,26 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { SearchSourceBuilder source = new SearchSourceBuilder(); BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity view events of the user requesting recommendation - query.must(QueryBuilders.termQuery(DataHubUsageEventConstants.ACTOR_URN + ".keyword", userUrn.toString())); - query.must(QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, - DataHubUsageEventType.SEARCH_RESULTS_VIEW_EVENT.getType())); + query.must( + QueryBuilders.termQuery( + DataHubUsageEventConstants.ACTOR_URN + ".keyword", userUrn.toString())); + query.must( + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, + DataHubUsageEventType.SEARCH_RESULTS_VIEW_EVENT.getType())); query.must(QueryBuilders.rangeQuery("total").gt(0)); query.must(QueryBuilders.existsQuery(DataHubUsageEventConstants.QUERY)); source.query(query); // Find the entity with the largest last viewed timestamp String lastSearched = "last_searched"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(DataHubUsageEventConstants.QUERY + ".keyword") - .size(MAX_CONTENT * 2) // Fetch more than max to account for post-filtering - .order(BucketOrder.aggregation(lastSearched, false)) - .subAggregation(AggregationBuilders.max(lastSearched).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(DataHubUsageEventConstants.QUERY + ".keyword") + .size(MAX_CONTENT * 2) // Fetch more than max to account for post-filtering + .order(BucketOrder.aggregation(lastSearched, false)) + .subAggregation( + AggregationBuilders.max(lastSearched).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -127,7 +137,10 @@ private Optional<RecommendationContent> buildContent(@Nonnull String query) { if (isQueryInvalid(query)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setValue(query) - .setParams(new RecommendationParams().setSearchParams(new SearchParams().setQuery(query)))); + return Optional.of( + new RecommendationContent() + .setValue(query) + .setParams( + new RecommendationParams().setSearchParams(new SearchParams().setQuery(query)))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java index 7d43e3652b492..788ef728e294f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java @@ -11,25 +11,16 @@ import java.util.Optional; import javax.annotation.Nonnull; - -/** - * Base interface for defining a candidate source for recommendation module - */ +/** Base interface for defining a candidate source for recommendation module */ public interface RecommendationSource { - /** - * Returns the title of the module that is sourced (used in rendering) - */ + /** Returns the title of the module that is sourced (used in rendering) */ String getTitle(); - /** - * Returns a unique module id associated with the module - */ + /** Returns a unique module id associated with the module */ String getModuleId(); - /** - * Returns the template type used for rendering recommendations from this module - */ + /** Returns the template type used for rendering recommendations from this module */ RecommendationRenderType getRenderType(); /** @@ -49,7 +40,8 @@ public interface RecommendationSource { * @return list of recommendation candidates */ @WithSpan - List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext); + List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext); /** * Get the full recommendations module itself provided the request context. @@ -59,8 +51,7 @@ public interface RecommendationSource { * @return list of recommendation candidates */ default Optional<RecommendationModule> getRecommendationModule( - @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { if (!isEligible(userUrn, requestContext)) { return Optional.empty(); } @@ -70,9 +61,11 @@ default Optional<RecommendationModule> getRecommendationModule( return Optional.empty(); } - return Optional.of(new RecommendationModule().setTitle(getTitle()) - .setModuleId(getModuleId()) - .setRenderType(getRenderType()) - .setContent(new RecommendationContentArray(recommendations))); + return Optional.of( + new RecommendationModule() + .setTitle(getTitle()) + .setModuleId(getModuleId()) + .setRenderType(getRenderType()) + .setContent(new RecommendationContentArray(recommendations))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java index 3fd2b599b4d39..1fa47d1a13645 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java @@ -4,7 +4,6 @@ import java.util.Set; import javax.annotation.Nonnull; - public class RecommendationUtils { /** @@ -14,10 +13,11 @@ public class RecommendationUtils { * @param entityTypes the set of valid entity types * @return true if the type of the urn is in the set of valid entity types, false otherwise. */ - public static boolean isSupportedEntityType(@Nonnull final Urn urn, @Nonnull final Set<String> entityTypes) { + public static boolean isSupportedEntityType( + @Nonnull final Urn urn, @Nonnull final Set<String> entityTypes) { final String entityType = urn.getEntityType(); return entityTypes.contains(entityType); } - - private RecommendationUtils() { } + + private RecommendationUtils() {} } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java index 9562440889f63..3012e35baa607 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java @@ -14,29 +14,29 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopPlatformsSource extends EntitySearchAggregationSource { /** - * Set of entities that we want to consider for defining the top platform sources. - * This must match SearchUtils.SEARCHABLE_ENTITY_TYPES + * Set of entities that we want to consider for defining the top platform sources. This must match + * SearchUtils.SEARCHABLE_ENTITY_TYPES */ - private static final List<String> SEARCHABLE_ENTITY_TYPES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_PRIMARY_KEY_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.TAG_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.NOTEBOOK_ENTITY_NAME - ); + private static final List<String> SEARCHABLE_ENTITY_TYPES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_PRIMARY_KEY_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.TAG_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.NOTEBOOK_ENTITY_NAME); + private final EntityService _entityService; private static final String PLATFORM = "platform"; @@ -61,7 +61,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java index 6563ea7dc4f91..317f956e1ca8a 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopTagsSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME || requestContext.getScenario() == ScenarioType.SEARCH_RESULTS; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java index e885208a8b6db..6cdb5fdb65911 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopTermsSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME || requestContext.getScenario() == ScenarioType.SEARCH_RESULTS; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java index 7eae2e949d028..f09f83fd6ec25 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java @@ -6,7 +6,6 @@ import java.util.List; import javax.annotation.Nonnull; - public interface RecommendationModuleRanker { /** * Rank and return the final list of modules @@ -17,6 +16,9 @@ public interface RecommendationModuleRanker { * @param limit Max number of modules to return * @return ranked list of modules */ - List<RecommendationModule> rank(@Nonnull List<RecommendationModule> candidates, @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext, int limit); + List<RecommendationModule> rank( + @Nonnull List<RecommendationModule> candidates, + @Nonnull Urn userUrn, + @Nonnull RecommendationRequestContext requestContext, + int limit); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java index cefb9aec5ac51..13bc5af91c9e9 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java @@ -7,11 +7,13 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class SimpleRecommendationRanker implements RecommendationModuleRanker { @Override - public List<RecommendationModule> rank(@Nonnull List<RecommendationModule> candidates, @Nonnull Urn userUrn, - @Nullable RecommendationRequestContext requestContext, int limit) { + public List<RecommendationModule> rank( + @Nonnull List<RecommendationModule> candidates, + @Nonnull Urn userUrn, + @Nullable RecommendationRequestContext requestContext, + int limit) { return candidates.subList(0, Math.min(candidates.size(), limit)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java index 0a0be60969486..9b5630875cd15 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java @@ -4,7 +4,8 @@ import org.apache.avro.Schema; /** - * Internal Service logic to be used to emulate Confluent's Schema Registry component within DataHub. + * Internal Service logic to be used to emulate Confluent's Schema Registry component within + * DataHub. */ public interface SchemaRegistryService { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java index 8f7403c6aa428..6e6671c08242b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java @@ -18,7 +18,6 @@ import lombok.Getter; import org.apache.avro.Schema; - public class SchemaRegistryServiceImpl implements SchemaRegistryService { @AllArgsConstructor @@ -33,8 +32,7 @@ private enum TopicOrdinal { MAE_TOPIC(MetadataAuditEvent.getClassSchema()), DUHE_TOPIC(DataHubUpgradeHistoryEvent.getClassSchema()); - @Getter - private final Schema schema; + @Getter private final Schema schema; } private final Map<String, Schema> _schemaMap; @@ -44,28 +42,45 @@ private enum TopicOrdinal { public SchemaRegistryServiceImpl(final TopicConvention convention) { this._schemaMap = new HashMap<>(); this._subjectToIdMap = HashBiMap.create(); - this._schemaMap.put(convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataChangeLogTimeseriesTopicName(), + this._schemaMap.put( + convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeLogTimeseriesTopicName(), TopicOrdinal.MCL_TIMESERIES_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeLogTimeseriesTopicName(), + this._subjectToIdMap.put( + convention.getMetadataChangeLogTimeseriesTopicName(), TopicOrdinal.MCL_TIMESERIES_TOPIC.ordinal()); - this._schemaMap.put(convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.ordinal()); + this._schemaMap.put( + convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.ordinal()); this._schemaMap.put(convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.ordinal()); - this._schemaMap.put(convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.ordinal()); + this._subjectToIdMap.put( + convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.ordinal()); // Adding legacy topics as they are still produced in the EntityService IngestAspect code path. - this._schemaMap.put(convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.ordinal()); - this._schemaMap.put(convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.ordinal()); } @Override diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java index 9eb67ca25dd8b..e7a115d1a0518 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java @@ -4,22 +4,15 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class ResourceReference { - /** - * The urn of an entity - */ + /** The urn of an entity */ Urn urn; - /** - * The type of the SubResource - */ + /** The type of the SubResource */ SubResourceType subResourceType; - /** - * The subresource being targeted - */ + /** The subresource being targeted */ String subResource; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java index 6a23158aa1fd9..042c6d1407a13 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.resource; public enum SubResourceType { - /** - * A field in a dataset - */ + /** A field in a dataset */ DATASET_FIELD } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 9cd865bd888e2..09a63e769f025 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -7,20 +7,16 @@ import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; - import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface EntitySearchService { void configure(); - /** - * Clear all data within the service - */ + /** Clear all data within the service */ void clear(); /** @@ -30,7 +26,6 @@ public interface EntitySearchService { */ long docCount(@Nonnull String entityName); - /** * Updates or inserts the given search document. * @@ -58,64 +53,90 @@ public interface EntitySearchService { void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable String runId); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * - * Safe for non-structured, user input, queries with an attempt to provide some advanced features - * <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html">Impl</a> + * <p>Safe for non-structured, user input, queries with an attempt to provide some advanced + * features <a + * href="https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html">Impl</a> * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags); + SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * - * Safe for non-structured, user input, queries with an attempt to provide some advanced features - * <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html">Impl</a> + * <p>Safe for non-structured, user input, queries with an attempt to provide some advanced + * features <a + * href="https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html">Impl</a> * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags flags controlling search options * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List<String> facets); + SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets); /** * Gets a list of documents after applying the input filters. * * @param entityName name of the entity - * @param filters the request map with fields and values to be applied as filters to the search query + * @param filters the request map with fields and values to be applied as filters to the search + * query * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size number of search hits to return - * @return a {@link SearchResult} that contains a list of filtered documents and related search result metadata + * @return a {@link SearchResult} that contains a list of filtered documents and related search + * result metadata */ @Nonnull - SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, - int from, int size); + SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size); /** * Returns a list of suggestions given type ahead query. * - * <p>The advanced auto complete can take filters and provides suggestions based on filtered context. + * <p>The advanced auto complete can take filters and provides suggestions based on filtered + * context. * * @param entityName name of the entity * @param query the type ahead query text @@ -125,21 +146,29 @@ SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, @Nulla * @return A list of suggestions as string */ @Nonnull - AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit); + AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit); /** * Returns number of documents per field value given the field and filters * - * @param entityNames list of name of entities to aggregate across, if empty aggregate over all entities + * @param entityNames list of name of entities to aggregate across, if empty aggregate over all + * entities * @param field the field name for aggregate * @param requestParams filters to apply before aggregating * @param limit the number of aggregations to return * @return */ @Nonnull - Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit); + Map<String, Long> aggregateByValue( + @Nullable List<String> entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit); /** * Gets a list of groups/entities that match given browse request. @@ -152,7 +181,11 @@ Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @Nonnull * @return a {@link BrowseResult} that contains a list of groups/entities */ @Nonnull - BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter requestParams, int from, + BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter requestParams, + int from, int size); /** @@ -166,7 +199,13 @@ BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable * @param count max number of results requested */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count); + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count); /** * Gets a list of paths for a given urn. @@ -179,41 +218,57 @@ BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable List<String> getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entities name of the entities to scroll across * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - ScrollResult fullTextScroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags); + ScrollResult fullTextScroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entities name of the entities to scroll across * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - ScrollResult structuredScroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags); + ScrollResult structuredScroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags); - /** - * Max result size returned by the underlying search backend - */ + /** Max result size returned by the underlying search backend */ int maxResultSize(); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java index 31b94425d6815..842cc51e11777 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.ModelUtils; import com.google.common.collect.ImmutableList; import com.linkedin.data.template.RecordTemplate; @@ -22,15 +24,11 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class QueryUtils { public static final Filter EMPTY_FILTER = new Filter().setOr(new ConjunctiveCriterionArray()); - private QueryUtils() { - } + private QueryUtils() {} // Creates new Criterion with field and value, using EQUAL condition. @Nonnull @@ -40,23 +38,31 @@ public static Criterion newCriterion(@Nonnull String field, @Nonnull String valu // Creates new Criterion with field, value and condition. @Nonnull - public static Criterion newCriterion(@Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { - return new Criterion().setField(field).setValue(value).setValues(new StringArray(ImmutableList.of(value))).setCondition(condition); + public static Criterion newCriterion( + @Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { + return new Criterion() + .setField(field) + .setValue(value) + .setValues(new StringArray(ImmutableList.of(value))) + .setCondition(condition); } - // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL condition (default). + // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL + // condition (default). @Nonnull public static Filter newFilter(@Nullable Map<String, String> params) { if (params == null) { return EMPTY_FILTER; } - CriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue())) - .collect(Collectors.toCollection(CriterionArray::new)); - return new Filter().setOr( - new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); + CriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue())) + .collect(Collectors.toCollection(CriterionArray::new)); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); } // Creates new Filter from a single Criterion with EQUAL condition (default). @@ -68,8 +74,12 @@ public static Filter newFilter(@Nonnull String field, @Nonnull String value) { // Create singleton filter with one criterion @Nonnull public static Filter newFilter(@Nonnull Criterion criterion) { - return new Filter().setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(criterion)))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(criterion)))))); } @Nonnull @@ -78,13 +88,18 @@ public static Filter filterOrDefaultEmptyFilter(@Nullable Filter filter) { } /** - * Converts a set of aspect classes to a set of {@link AspectVersion} with the version all set to latest. + * Converts a set of aspect classes to a set of {@link AspectVersion} with the version all set to + * latest. */ @Nonnull - public static Set<AspectVersion> latestAspectVersions(@Nonnull Set<Class<? extends RecordTemplate>> aspectClasses) { + public static Set<AspectVersion> latestAspectVersions( + @Nonnull Set<Class<? extends RecordTemplate>> aspectClasses) { return aspectClasses.stream() - .map(aspectClass -> new AspectVersion().setAspect(ModelUtils.getAspectName(aspectClass)) - .setVersion(LATEST_VERSION)) + .map( + aspectClass -> + new AspectVersion() + .setAspect(ModelUtils.getAspectName(aspectClass)) + .setVersion(LATEST_VERSION)) .collect(Collectors.toSet()); } @@ -97,7 +112,9 @@ public static Set<AspectVersion> latestAspectVersions(@Nonnull Set<Class<? exten * @return RelationshipFilter */ @Nonnull - public static RelationshipFilter createRelationshipFilter(@Nonnull String field, @Nonnull String value, + public static RelationshipFilter createRelationshipFilter( + @Nonnull String field, + @Nonnull String value, @Nonnull RelationshipDirection relationshipDirection) { return createRelationshipFilter(newFilter(field, value), relationshipDirection); } @@ -110,14 +127,14 @@ public static RelationshipFilter createRelationshipFilter(@Nonnull String field, * @return RelationshipFilter */ @Nonnull - public static RelationshipFilter createRelationshipFilter(@Nonnull Filter filter, - @Nonnull RelationshipDirection relationshipDirection) { + public static RelationshipFilter createRelationshipFilter( + @Nonnull Filter filter, @Nonnull RelationshipDirection relationshipDirection) { return new RelationshipFilter().setOr(filter.getOr()).setDirection(relationshipDirection); } @Nonnull - public static RelationshipFilter newRelationshipFilter(@Nonnull Filter filter, - @Nonnull RelationshipDirection relationshipDirection) { + public static RelationshipFilter newRelationshipFilter( + @Nonnull Filter filter, @Nonnull RelationshipDirection relationshipDirection) { return new RelationshipFilter().setOr(filter.getOr()).setDirection(relationshipDirection); } @@ -152,7 +169,9 @@ public static boolean hasMore(int from, int size, int totalPageCount) { @Nonnull public static Filter getFilterFromCriteria(List<Criterion> criteria) { - return new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java index 1995e3c1b80a1..a735374b54858 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java @@ -13,7 +13,6 @@ import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; - public class SecretService { private static final int LOWERCASE_ASCII_START = 97; private static final int LOWERCASE_ASCII_END = 122; @@ -82,7 +81,8 @@ public String decrypt(String encryptedValue) { } public String generateUrlSafeToken(int length) { - return _secureRandom.ints(length, LOWERCASE_ASCII_START, LOWERCASE_ASCII_END + 1) + return _secureRandom + .ints(length, LOWERCASE_ASCII_START, LOWERCASE_ASCII_END + 1) .mapToObj(i -> String.valueOf((char) i)) .collect(Collectors.joining()); } @@ -98,7 +98,8 @@ public byte[] generateSalt(int length) { return randomBytes; } - public String getHashedPassword(@Nonnull byte[] salt, @Nonnull String password) throws IOException { + public String getHashedPassword(@Nonnull byte[] salt, @Nonnull String password) + throws IOException { byte[] saltedPassword = saltPassword(salt, password); byte[] hashedPassword = _messageDigest.digest(saltedPassword); return _encoder.encodeToString(hashedPassword); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java index 7fac2e0124897..ce7473fb29dc4 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -20,15 +22,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - @Slf4j public class BaseService { protected final EntityClient entityClient; protected final Authentication systemAuthentication; - public BaseService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public BaseService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { this.entityClient = Objects.requireNonNull(entityClient); this.systemAuthentication = Objects.requireNonNull(systemAuthentication); } @@ -44,13 +45,13 @@ protected Map<Urn, GlobalTags> getTagsAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.GLOBAL_TAGS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.GLOBAL_TAGS_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, GlobalTags> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -83,13 +84,13 @@ protected Map<Urn, EditableSchemaMetadata> getEditableSchemaMetadataAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, EditableSchemaMetadata> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -122,13 +123,13 @@ protected Map<Urn, Ownership> getOwnershipAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.OWNERSHIP_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.OWNERSHIP_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, Ownership> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -161,13 +162,13 @@ protected Map<Urn, GlossaryTerms> getGlossaryTermsAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.GLOSSARY_TERMS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, GlossaryTerms> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -200,13 +201,13 @@ protected Map<Urn, Domains> getDomainsAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.DOMAINS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.DOMAINS_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, Domains> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -228,7 +229,9 @@ protected Map<Urn, Domains> getDomainsAspects( } } - protected void ingestChangeProposals(@Nonnull List<MetadataChangeProposal> changes, @Nonnull Authentication authentication) throws Exception { + protected void ingestChangeProposals( + @Nonnull List<MetadataChangeProposal> changes, @Nonnull Authentication authentication) + throws Exception { // TODO: Replace this with a batch ingest proposals endpoint. for (MetadataChangeProposal change : changes) { this.entityClient.ingestProposal(change, authentication); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java index 87b96e4cef498..10016ee89605b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java @@ -22,22 +22,20 @@ import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; import java.util.Objects; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; /** * This class is used to permit easy CRUD operations on a DataProduct * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. - * + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class DataProductService { @@ -52,18 +50,15 @@ public DataProductService(@Nonnull EntityClient entityClient, @Nonnull GraphClie /** * Creates a new Data Product. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the DataProduct * @param description optional description of the DataProduct - * * @return the urn of the newly created DataProduct */ public Urn createDataProduct( - @Nullable String name, - @Nullable String description, - @Nonnull Authentication authentication) { + @Nullable String name, @Nullable String description, @Nonnull Authentication authentication) { // 1. Generate a unique id for the new DataProduct. final DataProductKey key = new DataProductKey(); @@ -76,10 +71,14 @@ public Urn createDataProduct( // 3. Write the new dataProduct to GMS, return the new URN. try { - final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATA_PRODUCT_ENTITY_NAME); - return UrnUtils.getUrn(_entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), authentication, - false)); + final Urn entityUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATA_PRODUCT_ENTITY_NAME); + return UrnUtils.getUrn( + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create DataProduct", e); } @@ -88,8 +87,8 @@ public Urn createDataProduct( /** * Updates an existing DataProduct. If a provided field is null, the previous value will be kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the DataProduct * @param name optional name of the DataProduct @@ -108,7 +107,9 @@ public Urn updateDataProduct( DataProductProperties properties = getDataProductProperties(urn, authentication); if (properties == null) { - throw new IllegalArgumentException(String.format("Failed to update DataProduct. DataProduct with urn %s does not exist.", urn)); + throw new IllegalArgumentException( + String.format( + "Failed to update DataProduct. DataProduct with urn %s does not exist.", urn)); } // 2. Apply changes to existing DataProduct @@ -121,9 +122,12 @@ public Urn updateDataProduct( // 3. Write changes to GMS try { - return UrnUtils.getUrn(_entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - urn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), authentication, - false)); + return UrnUtils.getUrn( + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), + authentication, + false)); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", urn), e); } @@ -132,16 +136,23 @@ public Urn updateDataProduct( /** * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * - * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not exist. + * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not + * exist. */ @Nullable - public DataProductProperties getDataProductProperties(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public DataProductProperties getDataProductProperties( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getDataProductEntityResponse(dataProductUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { - return new DataProductProperties(response.getAspects().get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { + return new DataProductProperties( + response + .getAspects() + .get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME) + .getValue() + .data()); } // No aspect found return null; @@ -150,41 +161,44 @@ public DataProductProperties getDataProductProperties(@Nonnull final Urn dataPro /** * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * - * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not exist. + * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not + * exist. */ @Nullable - public Domains getDataProductDomains(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public Domains getDataProductDomains( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - final EntityResponse response = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), - authentication - ); + final EntityResponse response = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), + authentication); if (response != null && response.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { - return new Domains(response.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data()); + return new Domains( + response.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); } } /** - * Returns an instance of {@link EntityResponse} for the specified DataProduct urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified DataProduct urn, or null if one + * cannot be found. * * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the DataProduct, null if it does not exist. */ @Nullable - public EntityResponse getDataProductEntityResponse(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public EntityResponse getDataProductEntityResponse( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -192,79 +206,92 @@ public EntityResponse getDataProductEntityResponse(@Nonnull final Urn dataProduc Constants.DATA_PRODUCT_ENTITY_NAME, dataProductUrn, ImmutableSet.of(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), - authentication - ); + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); } } - /** - * Sets a given domain on a given Data Product. - */ - public void setDomain(@Nonnull final Urn dataProductUrn, @Nonnull final Urn domainUrn, @Nonnull final Authentication authentication) { + /** Sets a given domain on a given Data Product. */ + public void setDomain( + @Nonnull final Urn dataProductUrn, + @Nonnull final Urn domainUrn, + @Nonnull final Authentication authentication) { try { Domains domains = new Domains(); - EntityResponse entityResponse = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), - authentication); + EntityResponse entityResponse = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), + authentication); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data(); domains = new Domains(dataMap); } final UrnArray newDomains = new UrnArray(); newDomains.add(domainUrn); domains.setDomains(newDomains); - _entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - dataProductUrn, Constants.DOMAINS_ASPECT_NAME, domains), authentication, false); + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + dataProductUrn, Constants.DOMAINS_ASPECT_NAME, domains), + authentication, + false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to set domain for DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to set domain for DataProduct with urn %s", dataProductUrn), e); } } /** * Deletes an existing DataProduct with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the DataProduct does not exist, no exception will be thrown. + * <p>If the DataProduct does not exist, no exception will be thrown. * * @param dataProductUrn the urn of the DataProduct * @param authentication the current authentication */ public void deleteDataProduct( - @Nonnull Urn dataProductUrn, - @Nonnull Authentication authentication) { + @Nonnull Urn dataProductUrn, @Nonnull Authentication authentication) { try { _entityClient.deleteEntity( Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"), Objects.requireNonNull(authentication, "authentication must not be null")); // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(dataProductUrn, authentication); - } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for DataProduct with urn %s", dataProductUrn), e); - } - }); + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(dataProductUrn, authentication); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for DataProduct with urn %s", + dataProductUrn), + e); + } + }); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to delete DataProduct with urn %s", dataProductUrn), e); } } /** * Sets a Data Product for a given list of entities. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * * @param dataProductUrn the urn of the Data Product to set - null if removing Data Product * @param resourceUrns the urns of the entities to add the Data Product to @@ -276,9 +303,11 @@ public void batchSetDataProduct( @Nonnull Authentication authentication, @Nonnull Urn actorUrn) { try { - DataProductProperties dataProductProperties = getDataProductProperties(dataProductUrn, authentication); + DataProductProperties dataProductProperties = + getDataProductProperties(dataProductUrn, authentication); if (dataProductProperties == null) { - throw new RuntimeException("Failed to batch set data product as data product does not exist"); + throw new RuntimeException( + "Failed to batch set data product as data product does not exist"); } DataProductAssociationArray dataProductAssociations = new DataProductAssociationArray(); @@ -286,15 +315,23 @@ public void batchSetDataProduct( dataProductAssociations = dataProductProperties.getAssets(); } - List<Urn> existingResourceUrns = dataProductAssociations.stream().map(DataProductAssociation::getDestinationUrn).collect(Collectors.toList()); - List<Urn> newResourceUrns = resourceUrns.stream().filter(urn -> !existingResourceUrns.contains(urn)).collect(Collectors.toList()); - - // unset existing data product on resources first as we only allow one data product on an entity at a time + List<Urn> existingResourceUrns = + dataProductAssociations.stream() + .map(DataProductAssociation::getDestinationUrn) + .collect(Collectors.toList()); + List<Urn> newResourceUrns = + resourceUrns.stream() + .filter(urn -> !existingResourceUrns.contains(urn)) + .collect(Collectors.toList()); + + // unset existing data product on resources first as we only allow one data product on an + // entity at a time for (Urn resourceUrn : resourceUrns) { unsetDataProduct(resourceUrn, authentication, actorUrn); } - AuditStamp nowAuditStamp = new AuditStamp().setTime(System.currentTimeMillis()).setActor(actorUrn); + AuditStamp nowAuditStamp = + new AuditStamp().setTime(System.currentTimeMillis()).setActor(actorUrn); for (Urn resourceUrn : newResourceUrns) { DataProductAssociation association = new DataProductAssociation(); association.setDestinationUrn(resourceUrn); @@ -306,53 +343,59 @@ public void batchSetDataProduct( dataProductProperties.setAssets(dataProductAssociations); _entityClient.ingestProposal( AspectUtils.buildMetadataChangeProposal( - dataProductUrn, - Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - dataProductProperties), + dataProductUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties), authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update assets for %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to update assets for %s", dataProductUrn), e); } } /** * Unsets a Data Product for a given entity. Remove this entity from its data product(s). * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * * @param resourceUrn the urn of the entity to remove the Data Product from * @param authentication the current authentication */ public void unsetDataProduct( - @Nonnull Urn resourceUrn, - @Nonnull Authentication authentication, - @Nonnull Urn actorUrn) { + @Nonnull Urn resourceUrn, @Nonnull Authentication authentication, @Nonnull Urn actorUrn) { try { List<String> relationshipTypes = ImmutableList.of("DataProductContains"); - EntityRelationships relationships = _graphClient.getRelatedEntities( - resourceUrn.toString(), - relationshipTypes, - RelationshipDirection.INCOMING, - 0, - 10, // should never be more than 1 as long as we only allow one - actorUrn.toString()); + EntityRelationships relationships = + _graphClient.getRelatedEntities( + resourceUrn.toString(), + relationshipTypes, + RelationshipDirection.INCOMING, + 0, + 10, // should never be more than 1 as long as we only allow one + actorUrn.toString()); if (relationships.hasRelationships() && relationships.getRelationships().size() > 0) { - relationships.getRelationships().forEach(relationship -> { - Urn dataProductUrn = relationship.getEntity(); - removeEntityFromDataProduct(dataProductUrn, resourceUrn, authentication); - }); + relationships + .getRelationships() + .forEach( + relationship -> { + Urn dataProductUrn = relationship.getEntity(); + removeEntityFromDataProduct(dataProductUrn, resourceUrn, authentication); + }); } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset data product for %s", resourceUrn), e); + throw new RuntimeException( + String.format("Failed to unset data product for %s", resourceUrn), e); } } - private void removeEntityFromDataProduct(@Nonnull Urn dataProductUrn, @Nonnull Urn resourceUrn, @Nonnull Authentication authentication) { + private void removeEntityFromDataProduct( + @Nonnull Urn dataProductUrn, + @Nonnull Urn resourceUrn, + @Nonnull Authentication authentication) { try { - DataProductProperties dataProductProperties = getDataProductProperties(dataProductUrn, authentication); + DataProductProperties dataProductProperties = + getDataProductProperties(dataProductUrn, authentication); if (dataProductProperties == null) { throw new RuntimeException("Failed to unset data product as data product does not exist"); } @@ -373,23 +416,22 @@ private void removeEntityFromDataProduct(@Nonnull Urn dataProductUrn, @Nonnull U dataProductProperties.setAssets(finalAssociations); _entityClient.ingestProposal( AspectUtils.buildMetadataChangeProposal( - dataProductUrn, - Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - dataProductProperties), + dataProductUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties), authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset data product for %s", resourceUrn), e); + throw new RuntimeException( + String.format("Failed to unset data product for %s", resourceUrn), e); } } public boolean verifyEntityExists( - @Nonnull Urn entityUrn, - @Nonnull Authentication authentication) { + @Nonnull Urn entityUrn, @Nonnull Authentication authentication) { try { return _entityClient.exists(entityUrn, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to determine if entity with urn %s exists", entityUrn), e); + throw new RuntimeException( + String.format("Failed to determine if entity with urn %s exists", entityUrn), e); } } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java index 782a261675add..c18122eb9bb31 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java @@ -1,10 +1,14 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.domain.Domains; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.mxe.MetadataChangeProposal; @@ -14,19 +18,15 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class DomainService extends BaseService { - public DomainService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public DomainService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -47,14 +47,19 @@ public void batchSetDomain(@Nonnull Urn domainUrn, @Nonnull List<ResourceReferen * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchSetDomain(@Nonnull Urn domainUrn, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { + public void batchSetDomain( + @Nonnull Urn domainUrn, + @Nonnull List<ResourceReference> resources, + @Nonnull Authentication authentication) { log.debug("Batch setting Domain to entities. domain: {}, resources: {}", resources, domainUrn); try { setDomainForResources(domainUrn, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Domain %s to resources with urns %s!", - domainUrn, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch set Domain %s to resources with urns %s!", + domainUrn, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -65,7 +70,8 @@ public void batchSetDomain(@Nonnull Urn domainUrn, @Nonnull List<ResourceReferen * @param domainUrns the urns of the domain to set * @param resources references to the resources to change */ - public void batchAddDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources) { + public void batchAddDomains( + @Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources) { batchAddDomains(domainUrns, resources, this.systemAuthentication); } @@ -76,14 +82,20 @@ public void batchAddDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<Resourc * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchAddDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { - log.debug("Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); + public void batchAddDomains( + @Nonnull List<Urn> domainUrns, + @Nonnull List<ResourceReference> resources, + @Nonnull Authentication authentication) { + log.debug( + "Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); try { addDomainsToResources(domainUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Domains %s to resources with urns %s!", - domainUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Domains %s to resources with urns %s!", + domainUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -103,13 +115,16 @@ public void batchUnsetDomain(@Nonnull List<ResourceReference> resources) { * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchUnsetDomain(@Nonnull List<ResourceReference> resources, @Nullable Authentication authentication) { + public void batchUnsetDomain( + @Nonnull List<ResourceReference> resources, @Nullable Authentication authentication) { log.debug("Batch unsetting Domains to entities. resources: {}", resources); try { unsetDomainForResources(resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset add Domain for resources with urns %s!", - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to unset add Domain for resources with urns %s!", + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -120,7 +135,8 @@ public void batchUnsetDomain(@Nonnull List<ResourceReference> resources, @Nullab * @param domainUrns the urns of domains to remove * @param resources references to the resources to change */ - public void batchRemoveDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources) { + public void batchRemoveDomains( + @Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources) { batchRemoveDomains(domainUrns, resources, this.systemAuthentication); } @@ -131,23 +147,29 @@ public void batchRemoveDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<Reso * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchRemoveDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources, @Nullable Authentication authentication) { - log.debug("Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); + public void batchRemoveDomains( + @Nonnull List<Urn> domainUrns, + @Nonnull List<ResourceReference> resources, + @Nullable Authentication authentication) { + log.debug( + "Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); try { removeDomainsFromResources(domainUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Domains %s to resources with urns %s!", - domainUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), - e); + throw new RuntimeException( + String.format( + "Failed to batch add Domains %s to resources with urns %s!", + domainUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + e); } } private void setDomainForResources( com.linkedin.common.urn.Urn domainUrn, List<ResourceReference> resources, - @Nullable Authentication authentication - ) throws Exception { + @Nullable Authentication authentication) + throws Exception { final List<MetadataChangeProposal> changes = buildSetDomainProposals(domainUrn, resources); ingestChangeProposals(changes, authentication); } @@ -155,40 +177,37 @@ private void setDomainForResources( private void addDomainsToResources( List<com.linkedin.common.urn.Urn> domainUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildAddDomainsProposals(domainUrns, resources, authentication); + @Nonnull Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildAddDomainsProposals(domainUrns, resources, authentication); ingestChangeProposals(changes, authentication); } private void unsetDomainForResources( - List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { + List<ResourceReference> resources, @Nonnull Authentication authentication) throws Exception { final List<MetadataChangeProposal> changes = buildUnsetDomainProposals(resources); ingestChangeProposals(changes, authentication); } public void removeDomainsFromResources( - List<Urn> domains, - List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildRemoveDomainsProposals(domains, resources, authentication); + List<Urn> domains, List<ResourceReference> resources, @Nonnull Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildRemoveDomainsProposals(domains, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting @Nonnull List<MetadataChangeProposal> buildSetDomainProposals( - com.linkedin.common.urn.Urn domainUrn, - List<ResourceReference> resources - ) { + com.linkedin.common.urn.Urn domainUrn, List<ResourceReference> resources) { List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { Domains domains = new Domains(); domains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + changes.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return changes; } @@ -198,40 +217,40 @@ List<MetadataChangeProposal> buildSetDomainProposals( List<MetadataChangeProposal> buildAddDomainsProposals( List<com.linkedin.common.urn.Urn> domainUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws URISyntaxException { + @Nonnull Authentication authentication) + throws URISyntaxException { - final Map<Urn, Domains> domainAspects = getDomainsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Domains(), - authentication - ); + final Map<Urn, Domains> domainAspects = + getDomainsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Domains(), + authentication); - final List<MetadataChangeProposal> proposals = new ArrayList<>(); - for (ResourceReference resource : resources) { - Domains domains = domainAspects.get(resource.getUrn()); - if (domains == null) { - continue; - } - if (!domains.hasDomains()) { - domains.setDomains(new UrnArray()); - } - addDomainsIfNotExists(domains, domainUrns); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); - } - return proposals; + final List<MetadataChangeProposal> proposals = new ArrayList<>(); + for (ResourceReference resource : resources) { + Domains domains = domainAspects.get(resource.getUrn()); + if (domains == null) { + continue; + } + if (!domains.hasDomains()) { + domains.setDomains(new UrnArray()); + } + addDomainsIfNotExists(domains, domainUrns); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + } + return proposals; } @VisibleForTesting @Nonnull - List<MetadataChangeProposal> buildUnsetDomainProposals( - List<ResourceReference> resources - ) { + List<MetadataChangeProposal> buildUnsetDomainProposals(List<ResourceReference> resources) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { Domains domains = new Domains(); domains.setDomains(new UrnArray(Collections.emptyList())); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + changes.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return changes; } @@ -241,13 +260,12 @@ List<MetadataChangeProposal> buildUnsetDomainProposals( List<MetadataChangeProposal> buildRemoveDomainsProposals( List<Urn> domainUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) { - final Map<Urn, Domains> domainAspects = getDomainsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Domains(), - authentication - ); + @Nonnull Authentication authentication) { + final Map<Urn, Domains> domainAspects = + getDomainsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Domains(), + authentication); final List<MetadataChangeProposal> proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -259,7 +277,8 @@ List<MetadataChangeProposal> buildRemoveDomainsProposals( domains.setDomains(new UrnArray()); } removeDomainsIfExists(domains, domainUrns); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return proposals; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java index 36f2ba85ec98f..902ad07354d5e 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java @@ -1,13 +1,17 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.AuditStamp; -import com.linkedin.common.GlossaryTerms; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; +import com.linkedin.common.GlossaryTerms; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.metadata.resource.SubResourceType; @@ -21,18 +25,14 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class GlossaryTermService extends BaseService { - public GlossaryTermService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public GlossaryTermService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -41,11 +41,9 @@ public GlossaryTermService(@Nonnull EntityClient entityClient, @Nonnull Authenti * * @param glossaryTermUrns the urns of the terms to add * @param resources references to the resources to change - * */ public void batchAddGlossaryTerms( - @Nonnull List<Urn> glossaryTermUrns, - @Nonnull List<ResourceReference> resources) { + @Nonnull List<Urn> glossaryTermUrns, @Nonnull List<ResourceReference> resources) { batchAddGlossaryTerms(glossaryTermUrns, resources, this.systemAuthentication); } @@ -55,19 +53,23 @@ public void batchAddGlossaryTerms( * @param glossaryTermUrns the urns of the terms to add * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ public void batchAddGlossaryTerms( @Nonnull List<Urn> glossaryTermUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { - log.debug("Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", resources, glossaryTermUrns); + log.debug( + "Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", + resources, + glossaryTermUrns); try { addGlossaryTermsToResources(glossaryTermUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add GlossaryTerms %s to resources with urns %s!", - glossaryTermUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add GlossaryTerms %s to resources with urns %s!", + glossaryTermUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -77,11 +79,9 @@ public void batchAddGlossaryTerms( * * @param glossaryTermUrns the urns of the terms to remove * @param resources references to the resources to change - * */ public void batchRemoveGlossaryTerms( - @Nonnull List<Urn> glossaryTermUrns, - @Nonnull List<ResourceReference> resources) { + @Nonnull List<Urn> glossaryTermUrns, @Nonnull List<ResourceReference> resources) { batchRemoveGlossaryTerms(glossaryTermUrns, resources, this.systemAuthentication); } @@ -91,59 +91,69 @@ public void batchRemoveGlossaryTerms( * @param glossaryTermUrns the urns of the terms to remove * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ public void batchRemoveGlossaryTerms( @Nonnull List<Urn> glossaryTermUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { - log.debug("Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", resources, glossaryTermUrns); + log.debug( + "Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", + resources, + glossaryTermUrns); try { removeGlossaryTermsFromResources(glossaryTermUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add GlossaryTerms %s to resources with urns %s!", - glossaryTermUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add GlossaryTerms %s to resources with urns %s!", + glossaryTermUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } private void addGlossaryTermsToResources( - List<Urn> glossaryTerms, - List<ResourceReference> resources, - Authentication authentication - ) throws Exception { - List<MetadataChangeProposal> changes = buildAddGlossaryTermsProposals(glossaryTerms, resources, authentication); + List<Urn> glossaryTerms, List<ResourceReference> resources, Authentication authentication) + throws Exception { + List<MetadataChangeProposal> changes = + buildAddGlossaryTermsProposals(glossaryTerms, resources, authentication); ingestChangeProposals(changes, authentication); } private void removeGlossaryTermsFromResources( - List<Urn> glossaryTerms, - List<ResourceReference> resources, - Authentication authentication - ) throws Exception { - List<MetadataChangeProposal> changes = buildRemoveGlossaryTermsProposals(glossaryTerms, resources, authentication); + List<Urn> glossaryTerms, List<ResourceReference> resources, Authentication authentication) + throws Exception { + List<MetadataChangeProposal> changes = + buildRemoveGlossaryTermsProposals(glossaryTerms, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting List<MetadataChangeProposal> buildAddGlossaryTermsProposals( - List<Urn> glossaryTermUrns, - List<ResourceReference> resources, - Authentication authentication - ) throws URISyntaxException { + List<Urn> glossaryTermUrns, List<ResourceReference> resources, Authentication authentication) + throws URISyntaxException { final List<MetadataChangeProposal> changes = new ArrayList<>(); - final List<ResourceReference> entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> entityProposals = buildAddGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); - - final List<ResourceReference> schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> schemaFieldProposals = buildAddGlossaryTermsToSubResourceProposals(glossaryTermUrns, schemaFieldRefs, authentication); + final List<ResourceReference> entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> entityProposals = + buildAddGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); + + final List<ResourceReference> schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> schemaFieldProposals = + buildAddGlossaryTermsToSubResourceProposals( + glossaryTermUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -155,20 +165,29 @@ List<MetadataChangeProposal> buildAddGlossaryTermsProposals( List<MetadataChangeProposal> buildRemoveGlossaryTermsProposals( List<Urn> glossaryTermUrns, List<ResourceReference> resources, - Authentication authentication - ) { + Authentication authentication) { final List<MetadataChangeProposal> changes = new ArrayList<>(); - final List<ResourceReference> entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> entityProposals = buildRemoveGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); - - final List<ResourceReference> schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> schemaFieldProposals = buildRemoveGlossaryTermsToSubResourceProposals(glossaryTermUrns, schemaFieldRefs, authentication); + final List<ResourceReference> entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> entityProposals = + buildRemoveGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); + + final List<ResourceReference> schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> schemaFieldProposals = + buildRemoveGlossaryTermsToSubResourceProposals( + glossaryTermUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -180,14 +199,14 @@ List<MetadataChangeProposal> buildRemoveGlossaryTermsProposals( List<MetadataChangeProposal> buildAddGlossaryTermsToEntityProposals( List<com.linkedin.common.urn.Urn> glossaryTermUrns, List<ResourceReference> resources, - Authentication authentication - ) throws URISyntaxException { + Authentication authentication) + throws URISyntaxException { - final Map<Urn, GlossaryTerms> glossaryTermAspects = getGlossaryTermsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlossaryTerms(), - authentication - ); + final Map<Urn, GlossaryTerms> glossaryTermAspects = + getGlossaryTermsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlossaryTerms(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -199,10 +218,15 @@ List<MetadataChangeProposal> buildAddGlossaryTermsToEntityProposals( if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); - glossaryTerms.setAuditStamp(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + glossaryTerms.setAuditStamp( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } addGlossaryTermsIfNotExists(glossaryTerms, glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms)); } return changes; } @@ -211,31 +235,36 @@ List<MetadataChangeProposal> buildAddGlossaryTermsToEntityProposals( List<MetadataChangeProposal> buildAddGlossaryTermsToSubResourceProposals( final List<Urn> glossaryTermUrns, final List<ResourceReference> resources, - final Authentication authentication - ) throws URISyntaxException { - final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Authentication authentication) + throws URISyntaxException { + final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } addGlossaryTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -245,14 +274,13 @@ List<MetadataChangeProposal> buildAddGlossaryTermsToSubResourceProposals( List<MetadataChangeProposal> buildRemoveGlossaryTermsToEntityProposals( List<Urn> glossaryTermUrns, List<ResourceReference> resources, - Authentication authentication - ) { + Authentication authentication) { - final Map<Urn, GlossaryTerms> glossaryTermAspects = getGlossaryTermsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlossaryTerms(), - authentication - ); + final Map<Urn, GlossaryTerms> glossaryTermAspects = + getGlossaryTermsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlossaryTerms(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -262,15 +290,15 @@ List<MetadataChangeProposal> buildRemoveGlossaryTermsToEntityProposals( } if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); - glossaryTerms.setAuditStamp(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + glossaryTerms.setAuditStamp( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } removeGlossaryTermsIfExists(glossaryTerms, glossaryTermUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms); changes.add(proposal); } @@ -281,37 +309,42 @@ List<MetadataChangeProposal> buildRemoveGlossaryTermsToEntityProposals( List<MetadataChangeProposal> buildRemoveGlossaryTermsToSubResourceProposals( List<Urn> glossaryTermUrns, List<ResourceReference> resources, - Authentication authentication - ) { + Authentication authentication) { - final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeGlossaryTermsIfExists(editableFieldInfo.getGlossaryTerms(), glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; } - private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List<Urn> glossaryTermUrns) throws URISyntaxException { + private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List<Urn> glossaryTermUrns) + throws URISyntaxException { if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); } @@ -320,7 +353,8 @@ private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List<Urn> List<Urn> glossaryTermsToAdd = new ArrayList<>(); for (Urn glossaryTermUrn : glossaryTermUrns) { - if (glossaryTermAssociationArray.stream().anyMatch(association -> association.getUrn().equals(glossaryTermUrn))) { + if (glossaryTermAssociationArray.stream() + .anyMatch(association -> association.getUrn().equals(glossaryTermUrn))) { continue; } glossaryTermsToAdd.add(glossaryTermUrn); @@ -338,30 +372,30 @@ private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List<Urn> } } - private static GlossaryTermAssociationArray removeGlossaryTermsIfExists(GlossaryTerms glossaryTerms, List<Urn> glossaryTermUrns) { + private static GlossaryTermAssociationArray removeGlossaryTermsIfExists( + GlossaryTerms glossaryTerms, List<Urn> glossaryTermUrns) { if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); } GlossaryTermAssociationArray glossaryTermAssociationArray = glossaryTerms.getTerms(); for (Urn glossaryTermUrn : glossaryTermUrns) { - glossaryTermAssociationArray.removeIf(association -> association.getUrn().equals(glossaryTermUrn)); + glossaryTermAssociationArray.removeIf( + association -> association.getUrn().equals(glossaryTermUrn)); } return glossaryTermAssociationArray; } private static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional<EditableSchemaFieldInfo> fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional<EditableSchemaFieldInfo> fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java index 5649be0c701ca..cd5202ce75b64 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.chart.ChartDataSourceTypeArray; @@ -24,15 +26,12 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import static com.linkedin.metadata.entity.AspectUtils.*; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -42,77 +41,96 @@ public class LineageService { private final EntityClient _entityClient; /** - * Validates that a given list of urns are all datasets and all exist. Throws error if either condition is false for any urn. + * Validates that a given list of urns are all datasets and all exist. Throws error if either + * condition is false for any urn. */ - public void validateDatasetUrns(@Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDatasetUrns( + @Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add lineage edge with non-dataset node when we expect a dataset. Upstream urn: %s", urn)); + throw new IllegalArgumentException( + String.format( + "Tried to add lineage edge with non-dataset node when we expect a dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } /** - * Validates that a given list of urns are all either datasets or charts and that they exist. Otherwise, throw an error. + * Validates that a given list of urns are all either datasets or charts and that they exist. + * Otherwise, throw an error. */ - public void validateDashboardUpstreamUrns(@Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDashboardUpstreamUrns( + @Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { - if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) && !urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add an upstream to a dashboard that isn't a chart or dataset. Upstream urn: %s", urn)); + if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) + && !urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Tried to add an upstream to a dashboard that isn't a chart or dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } - /** - * Validates that a given urn exists using the entityService - */ - public void validateUrnExists(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws Exception { + /** Validates that a given urn exists using the entityService */ + public void validateUrnExists( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) throws Exception { if (!_entityClient.exists(urn, authentication)) { throw new IllegalArgumentException(String.format("Error: urn does not exist: %s", urn)); } } /** - * Updates dataset lineage by taking in a list of upstreams to add and to remove and updating the existing - * upstreamLineage aspect. + * Updates dataset lineage by taking in a list of upstreams to add and to remove and updating the + * existing upstreamLineage aspect. */ public void updateDatasetLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDatasetUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDatasetLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDatasetLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update dataset lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update dataset lineage for urn %s", downstreamUrn), e); } } - /** - * Builds an MCP of UpstreamLineage for dataset entities. - */ + /** Builds an MCP of UpstreamLineage for dataset entities. */ @Nonnull public MetadataChangeProposal buildDatasetLineageProposal( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DATASET_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATASET_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + authentication); UpstreamLineage upstreamLineage = new UpstreamLineage(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data(); upstreamLineage = new UpstreamLineage(dataMap); } @@ -129,7 +147,6 @@ public MetadataChangeProposal buildDatasetLineageProposal( upstreamsToAdd.add(upstreamUrn); } - for (final Urn upstreamUrn : upstreamsToAdd) { final Upstream newUpstream = new Upstream(); newUpstream.setDataset(DatasetUrn.createFromUrn(upstreamUrn)); @@ -147,52 +164,59 @@ public MetadataChangeProposal buildDatasetLineageProposal( upstreamLineage.setUpstreams(upstreams); return buildMetadataChangeProposal( - downstreamUrn, Constants.UPSTREAM_LINEAGE_ASPECT_NAME, upstreamLineage - ); + downstreamUrn, Constants.UPSTREAM_LINEAGE_ASPECT_NAME, upstreamLineage); } - /** - * Updates Chart lineage by building and ingesting an MCP based on inputs. - */ + /** Updates Chart lineage by building and ingesting an MCP based on inputs. */ public void updateChartLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { // ensure all upstream urns are dataset urns and they exist validateDatasetUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildChartLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildChartLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } - /** - * Builds an MCP of ChartInfo for chart entities. - */ + /** Builds an MCP of ChartInfo for chart entities. */ @Nonnull public MetadataChangeProposal buildChartLineageProposal( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.CHART_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME), authentication); - - if (entityResponse == null || !entityResponse.getAspects().containsKey(Constants.CHART_INFO_ASPECT_NAME)) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s as chart info doesn't exist", downstreamUrn)); - } - - DataMap dataMap = entityResponse.getAspects().get(Constants.CHART_INFO_ASPECT_NAME).getValue().data(); + _entityClient.getV2( + Constants.CHART_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME), + authentication); + + if (entityResponse == null + || !entityResponse.getAspects().containsKey(Constants.CHART_INFO_ASPECT_NAME)) { + throw new RuntimeException( + String.format( + "Failed to update chart lineage for urn %s as chart info doesn't exist", + downstreamUrn)); + } + + DataMap dataMap = + entityResponse.getAspects().get(Constants.CHART_INFO_ASPECT_NAME).getValue().data(); ChartInfo chartInfo = new ChartInfo(dataMap); if (!chartInfo.hasInputEdges()) { chartInfo.setInputEdges(new EdgeArray()); @@ -205,10 +229,9 @@ public MetadataChangeProposal buildChartLineageProposal( final EdgeArray inputEdges = chartInfo.getInputEdges(); final List<Urn> upstreamsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamUrnsToAdd) { - if ( - inputEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || inputs.stream().anyMatch(input -> input.equals(upstreamUrn)) - ) { + if (inputEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || inputs.stream().anyMatch(input -> input.equals(upstreamUrn))) { continue; } upstreamsToAdd.add(upstreamUrn); @@ -219,7 +242,7 @@ public MetadataChangeProposal buildChartLineageProposal( } inputEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); - inputs.removeIf(input -> upstreamUrnsToRemove.contains(input.getDatasetUrn())); + inputs.removeIf(input -> upstreamUrnsToRemove.contains(input.getDatasetUrn())); chartInfo.setInputEdges(inputEdges); chartInfo.setInputs(inputs); @@ -227,31 +250,33 @@ public MetadataChangeProposal buildChartLineageProposal( return buildMetadataChangeProposal(downstreamUrn, Constants.CHART_INFO_ASPECT_NAME, chartInfo); } - /** - * Updates Dashboard lineage by building and ingesting an MCP based on inputs. - */ + /** Updates Dashboard lineage by building and ingesting an MCP based on inputs. */ public void updateDashboardLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDashboardUpstreamUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDashboardLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDashboardLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } /** - * Builds an MCP of DashboardInfo for dashboard entities. DashboardInfo has a list of chart urns and dataset urns pointing upstream. - * We need to filter out the chart urns and dataset urns separately in upstreamUrnsToAdd to add them to the correct fields. + * Builds an MCP of DashboardInfo for dashboard entities. DashboardInfo has a list of chart urns + * and dataset urns pointing upstream. We need to filter out the chart urns and dataset urns + * separately in upstreamUrnsToAdd to add them to the correct fields. */ @Nonnull public MetadataChangeProposal buildDashboardLineageProposal( @@ -259,41 +284,62 @@ public MetadataChangeProposal buildDashboardLineageProposal( @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DASHBOARD_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME), authentication); - - if (entityResponse == null || !entityResponse.getAspects().containsKey(Constants.DASHBOARD_INFO_ASPECT_NAME)) { - throw new RuntimeException(String.format("Failed to update dashboard lineage for urn %s as dashboard info doesn't exist", downstreamUrn)); - } - - DataMap dataMap = entityResponse.getAspects().get(Constants.DASHBOARD_INFO_ASPECT_NAME).getValue().data(); + _entityClient.getV2( + Constants.DASHBOARD_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME), + authentication); + + if (entityResponse == null + || !entityResponse.getAspects().containsKey(Constants.DASHBOARD_INFO_ASPECT_NAME)) { + throw new RuntimeException( + String.format( + "Failed to update dashboard lineage for urn %s as dashboard info doesn't exist", + downstreamUrn)); + } + + DataMap dataMap = + entityResponse.getAspects().get(Constants.DASHBOARD_INFO_ASPECT_NAME).getValue().data(); DashboardInfo dashboardInfo = new DashboardInfo(dataMap); // first, deal with chart edges - updateUpstreamCharts(dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamCharts( + dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); // next, deal with dataset edges - updateUpstreamDatasets(dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDatasets( + dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); - return buildMetadataChangeProposal(downstreamUrn, Constants.DASHBOARD_INFO_ASPECT_NAME, dashboardInfo); + return buildMetadataChangeProposal( + downstreamUrn, Constants.DASHBOARD_INFO_ASPECT_NAME, dashboardInfo); } /** - * Updates the charts and chartEdges fields on the DashboardInfo aspect. First, add any new lineage edges not already represented - * in the existing fields to chartEdges. Then, remove all lineage edges from charts and chartEdges fields that are in upstreamUrnsToRemove. - * Then update the DashboardInfo aspect. + * Updates the charts and chartEdges fields on the DashboardInfo aspect. First, add any new + * lineage edges not already represented in the existing fields to chartEdges. Then, remove all + * lineage edges from charts and chartEdges fields that are in upstreamUrnsToRemove. Then update + * the DashboardInfo aspect. */ - private void updateUpstreamCharts(DashboardInfo dashboardInfo, List<Urn> upstreamUrnsToAdd, List<Urn> upstreamUrnsToRemove, Urn dashboardUrn, Urn actor) { + private void updateUpstreamCharts( + DashboardInfo dashboardInfo, + List<Urn> upstreamUrnsToAdd, + List<Urn> upstreamUrnsToRemove, + Urn dashboardUrn, + Urn actor) { initializeChartEdges(dashboardInfo); final List<Urn> upstreamChartUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) + .collect(Collectors.toList()); final ChartUrnArray charts = dashboardInfo.getCharts(); final EdgeArray chartEdges = dashboardInfo.getChartEdges(); - final List<Urn> upstreamsChartsToAdd = getUpstreamChartToAdd(upstreamChartUrnsToAdd, chartEdges, charts); + final List<Urn> upstreamsChartsToAdd = + getUpstreamChartToAdd(upstreamChartUrnsToAdd, chartEdges, charts); for (final Urn upstreamUrn : upstreamsChartsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, chartEdges); @@ -305,7 +351,6 @@ private void updateUpstreamCharts(DashboardInfo dashboardInfo, List<Urn> upstrea dashboardInfo.setCharts(charts); } - private void initializeChartEdges(DashboardInfo dashboardInfo) { if (!dashboardInfo.hasChartEdges()) { dashboardInfo.setChartEdges(new EdgeArray()); @@ -316,15 +361,16 @@ private void initializeChartEdges(DashboardInfo dashboardInfo) { } /** - * Need to filter out any existing upstream chart urns in order to get a list of net new chart urns to add to dashboard lineage + * Need to filter out any existing upstream chart urns in order to get a list of net new chart + * urns to add to dashboard lineage */ - private List<Urn> getUpstreamChartToAdd(List<Urn> upstreamChartUrnsToAdd, List<Edge> chartEdges, ChartUrnArray charts) { + private List<Urn> getUpstreamChartToAdd( + List<Urn> upstreamChartUrnsToAdd, List<Edge> chartEdges, ChartUrnArray charts) { final List<Urn> upstreamsChartsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamChartUrnsToAdd) { - if ( - chartEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || charts.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (chartEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || charts.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamsChartsToAdd.add(upstreamUrn); @@ -332,25 +378,35 @@ private List<Urn> getUpstreamChartToAdd(List<Urn> upstreamChartUrnsToAdd, List<E return upstreamsChartsToAdd; } - private void removeChartLineageEdges(List<Edge> chartEdges, ChartUrnArray charts, List<Urn> upstreamUrnsToRemove) { + private void removeChartLineageEdges( + List<Edge> chartEdges, ChartUrnArray charts, List<Urn> upstreamUrnsToRemove) { chartEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); charts.removeIf(upstreamUrnsToRemove::contains); } /** - * Updates the datasets and datasetEdges fields on the DashboardInfo aspect. First, add any new lineage edges not already represented - * in the existing fields to datasetEdges.Then, remove all lineage edges from datasets and datasetEdges fields that are in upstreamUrnsToRemove. - * Then update the DashboardInfo aspect. + * Updates the datasets and datasetEdges fields on the DashboardInfo aspect. First, add any new + * lineage edges not already represented in the existing fields to datasetEdges.Then, remove all + * lineage edges from datasets and datasetEdges fields that are in upstreamUrnsToRemove. Then + * update the DashboardInfo aspect. */ - private void updateUpstreamDatasets(DashboardInfo dashboardInfo, List<Urn> upstreamUrnsToAdd, List<Urn> upstreamUrnsToRemove, Urn dashboardUrn, Urn actor) { + private void updateUpstreamDatasets( + DashboardInfo dashboardInfo, + List<Urn> upstreamUrnsToAdd, + List<Urn> upstreamUrnsToRemove, + Urn dashboardUrn, + Urn actor) { initializeDatasetEdges(dashboardInfo); final List<Urn> upstreamDatasetUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) + .collect(Collectors.toList()); final UrnArray datasets = dashboardInfo.getDatasets(); final EdgeArray datasetEdges = dashboardInfo.getDatasetEdges(); - final List<Urn> upstreamDatasetsToAdd = getUpstreamDatasetsToAdd(upstreamDatasetUrnsToAdd, datasetEdges, datasets); + final List<Urn> upstreamDatasetsToAdd = + getUpstreamDatasetsToAdd(upstreamDatasetUrnsToAdd, datasetEdges, datasets); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, datasetEdges); @@ -371,13 +427,13 @@ private void initializeDatasetEdges(DashboardInfo dashboardInfo) { } } - private List<Urn> getUpstreamDatasetsToAdd(List<Urn> upstreamDatasetUrnsToAdd, List<Edge> datasetEdges, UrnArray datasets) { + private List<Urn> getUpstreamDatasetsToAdd( + List<Urn> upstreamDatasetUrnsToAdd, List<Edge> datasetEdges, UrnArray datasets) { final List<Urn> upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - datasetEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || datasets.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (datasetEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || datasets.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -385,49 +441,60 @@ private List<Urn> getUpstreamDatasetsToAdd(List<Urn> upstreamDatasetUrnsToAdd, L return upstreamDatasetsToAdd; } - private void removeDatasetLineageEdges(List<Edge> datasetEdges, UrnArray datasets, List<Urn> upstreamUrnsToRemove) { - datasetEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeDatasetLineageEdges( + List<Edge> datasetEdges, UrnArray datasets, List<Urn> upstreamUrnsToRemove) { + datasetEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); datasets.removeIf(upstreamUrnsToRemove::contains); } /** - * Validates that a given list of urns are all either datasets or dataJobs and that they exist. Otherwise, throw an error. + * Validates that a given list of urns are all either datasets or dataJobs and that they exist. + * Otherwise, throw an error. */ - public void validateDataJobUpstreamUrns(@Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDataJobUpstreamUrns( + @Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { - if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) && !urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add an upstream to a dataJob that isn't a datJob or dataset. Upstream urn: %s", urn)); + if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) + && !urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Tried to add an upstream to a dataJob that isn't a datJob or dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } - /** - * Updates DataJob lineage by building and ingesting an MCP based on inputs. - */ + /** Updates DataJob lineage by building and ingesting an MCP based on inputs. */ public void updateDataJobUpstreamLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDataJobUpstreamUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDataJobUpstreamLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDataJobUpstreamLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } /** - * Builds an MCP of DataJobInputOutput for datajob entities. DataJobInputOutput has a list of dataset urns and datajob urns pointing upstream. - * We need to filter out the chart dataset and datajob urns separately in upstreamUrnsToAdd to add them to the correct fields. We deal with downstream - * pointing datasets in outputDatasets separately. + * Builds an MCP of DataJobInputOutput for datajob entities. DataJobInputOutput has a list of + * dataset urns and datajob urns pointing upstream. We need to filter out the chart dataset and + * datajob urns separately in upstreamUrnsToAdd to add them to the correct fields. We deal with + * downstream pointing datasets in outputDatasets separately. */ @Nonnull public MetadataChangeProposal buildDataJobUpstreamLineageProposal( @@ -435,46 +502,62 @@ public MetadataChangeProposal buildDataJobUpstreamLineageProposal( @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DATA_JOB_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATA_JOB_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), + authentication); DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .getValue() + .data(); dataJobInputOutput = new DataJobInputOutput(dataMap); } // first, deal with dataset edges - updateUpstreamDatasetsForDataJobs(dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDatasetsForDataJobs( + dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); // next, deal with dataJobs edges - updateUpstreamDataJobs(dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDataJobs( + dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); - return buildMetadataChangeProposal(downstreamUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); + return buildMetadataChangeProposal( + downstreamUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); } /** - * Updates the inputDatasets and inputDatasetEdges fields on the DataJobInputOutput aspect. First, add any new lineage - * edges not already represented in the existing fields to inputDatasetEdges. Then, remove all lineage edges from inputDatasets - * and inputDatasetEdges fields that are in upstreamUrnsToRemove. Then update the DataJobInputOutput aspect. + * Updates the inputDatasets and inputDatasetEdges fields on the DataJobInputOutput aspect. First, + * add any new lineage edges not already represented in the existing fields to inputDatasetEdges. + * Then, remove all lineage edges from inputDatasets and inputDatasetEdges fields that are in + * upstreamUrnsToRemove. Then update the DataJobInputOutput aspect. */ private void updateUpstreamDatasetsForDataJobs( DataJobInputOutput dataJobInputOutput, List<Urn> upstreamUrnsToAdd, List<Urn> upstreamUrnsToRemove, Urn dashboardUrn, - Urn actor - ) { + Urn actor) { initializeInputDatasetEdges(dataJobInputOutput); final List<Urn> upstreamDatasetUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) + .collect(Collectors.toList()); final DatasetUrnArray inputDatasets = dataJobInputOutput.getInputDatasets(); final EdgeArray inputDatasetEdges = dataJobInputOutput.getInputDatasetEdges(); - final List<Urn> upstreamDatasetsToAdd = getInputOutputDatasetsToAdd(upstreamDatasetUrnsToAdd, inputDatasetEdges, inputDatasets); + final List<Urn> upstreamDatasetsToAdd = + getInputOutputDatasetsToAdd(upstreamDatasetUrnsToAdd, inputDatasetEdges, inputDatasets); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, inputDatasetEdges); @@ -495,14 +578,15 @@ private void initializeInputDatasetEdges(DataJobInputOutput dataJobInputOutput) } } - // get new dataset edges that we should be adding to inputDatasetEdges and outputDatasetEdges for the DataJobInputOutput aspect - private List<Urn> getInputOutputDatasetsToAdd(List<Urn> upstreamDatasetUrnsToAdd, List<Edge> datasetEdges, DatasetUrnArray inputDatasets) { + // get new dataset edges that we should be adding to inputDatasetEdges and outputDatasetEdges for + // the DataJobInputOutput aspect + private List<Urn> getInputOutputDatasetsToAdd( + List<Urn> upstreamDatasetUrnsToAdd, List<Edge> datasetEdges, DatasetUrnArray inputDatasets) { final List<Urn> upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - datasetEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || inputDatasets.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (datasetEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || inputDatasets.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -510,31 +594,36 @@ private List<Urn> getInputOutputDatasetsToAdd(List<Urn> upstreamDatasetUrnsToAdd return upstreamDatasetsToAdd; } - private void removeDatasetEdges(List<Edge> datasetEdges, DatasetUrnArray datasets, List<Urn> upstreamUrnsToRemove) { - datasetEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeDatasetEdges( + List<Edge> datasetEdges, DatasetUrnArray datasets, List<Urn> upstreamUrnsToRemove) { + datasetEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); datasets.removeIf(upstreamUrnsToRemove::contains); } /** - * Updates the dataJobs and dataJobEdges fields on the DataJobInputOutput aspect. First, add any new lineage edges not already represented - * in the existing fields to dataJobEdges.Then, remove all lineage edges from dataJobs and dataJobEdges fields that are in upstreamUrnsToRemove. - * Then update the DataJobInputOutput aspect. + * Updates the dataJobs and dataJobEdges fields on the DataJobInputOutput aspect. First, add any + * new lineage edges not already represented in the existing fields to dataJobEdges.Then, remove + * all lineage edges from dataJobs and dataJobEdges fields that are in upstreamUrnsToRemove. Then + * update the DataJobInputOutput aspect. */ private void updateUpstreamDataJobs( DataJobInputOutput dataJobInputOutput, List<Urn> upstreamUrnsToAdd, List<Urn> upstreamUrnsToRemove, Urn dataJobUrn, - Urn actor - ) { + Urn actor) { initializeInputDatajobEdges(dataJobInputOutput); final List<Urn> upstreamDatajobUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) + .collect(Collectors.toList()); final DataJobUrnArray dataJobs = dataJobInputOutput.getInputDatajobs(); final EdgeArray dataJobEdges = dataJobInputOutput.getInputDatajobEdges(); - final List<Urn> upstreamDatasetsToAdd = getInputDatajobsToAdd(upstreamDatajobUrnsToAdd, dataJobEdges, dataJobs); + final List<Urn> upstreamDatasetsToAdd = + getInputDatajobsToAdd(upstreamDatajobUrnsToAdd, dataJobEdges, dataJobs); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dataJobUrn, actor, dataJobEdges); @@ -555,13 +644,13 @@ private void initializeInputDatajobEdges(DataJobInputOutput dataJobInputOutput) } } - private List<Urn> getInputDatajobsToAdd(List<Urn> upstreamDatasetUrnsToAdd, List<Edge> dataJobEdges, DataJobUrnArray dataJobs) { + private List<Urn> getInputDatajobsToAdd( + List<Urn> upstreamDatasetUrnsToAdd, List<Edge> dataJobEdges, DataJobUrnArray dataJobs) { final List<Urn> upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - dataJobEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || dataJobs.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (dataJobEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || dataJobs.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -569,30 +658,33 @@ private List<Urn> getInputDatajobsToAdd(List<Urn> upstreamDatasetUrnsToAdd, List return upstreamDatasetsToAdd; } - private void removeInputDatajobEdges(List<Edge> dataJobEdges, DataJobUrnArray dataJobs, List<Urn> upstreamUrnsToRemove) { - dataJobEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeInputDatajobEdges( + List<Edge> dataJobEdges, DataJobUrnArray dataJobs, List<Urn> upstreamUrnsToRemove) { + dataJobEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); dataJobs.removeIf(upstreamUrnsToRemove::contains); } - /** - * Updates DataJob lineage in the downstream direction (outputDatasets and outputDatasetEdges) - */ + /** Updates DataJob lineage in the downstream direction (outputDatasets and outputDatasetEdges) */ public void updateDataJobDownstreamLineage( @Nonnull final Urn dataJobUrn, @Nonnull final List<Urn> downstreamUrnsToAdd, @Nonnull final List<Urn> downstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDatasetUrns(downstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - final MetadataChangeProposal changeProposal = buildDataJobDownstreamLineageProposal( - dataJobUrn, downstreamUrnsToAdd, downstreamUrnsToRemove, actor, authentication); + final MetadataChangeProposal changeProposal = + buildDataJobDownstreamLineageProposal( + dataJobUrn, downstreamUrnsToAdd, downstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", dataJobUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", dataJobUrn), e); } } @@ -603,8 +695,9 @@ private void initializeOutputDatajobEdges(DataJobInputOutput dataJobInputOutput) } /** - * Builds an MCP of DataJobInputOutput for datajob entities. Specifically this is updating this aspect for lineage in the downstream - * direction. This includes the fields outputDatasets (deprecated) and outputDatasetEdges + * Builds an MCP of DataJobInputOutput for datajob entities. Specifically this is updating this + * aspect for lineage in the downstream direction. This includes the fields outputDatasets + * (deprecated) and outputDatasetEdges */ @Nonnull public MetadataChangeProposal buildDataJobDownstreamLineageProposal( @@ -612,14 +705,24 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( @Nonnull final List<Urn> downstreamUrnsToAdd, @Nonnull final List<Urn> downstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { final EntityResponse entityResponse = - _entityClient.getV2(Constants.DATA_JOB_ENTITY_NAME, dataJobUrn, ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATA_JOB_ENTITY_NAME, + dataJobUrn, + ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), + authentication); DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .getValue() + .data(); dataJobInputOutput = new DataJobInputOutput(dataMap); } @@ -628,7 +731,8 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( final DatasetUrnArray outputDatasets = dataJobInputOutput.getOutputDatasets(); final EdgeArray outputDatasetEdges = dataJobInputOutput.getOutputDatasetEdges(); - final List<Urn> downstreamDatasetsToAdd = getInputOutputDatasetsToAdd(downstreamUrnsToAdd, outputDatasetEdges, outputDatasets); + final List<Urn> downstreamDatasetsToAdd = + getInputOutputDatasetsToAdd(downstreamUrnsToAdd, outputDatasetEdges, outputDatasets); for (final Urn downstreamUrn : downstreamDatasetsToAdd) { addNewEdge(downstreamUrn, dataJobUrn, actor, outputDatasetEdges); @@ -639,15 +743,15 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( dataJobInputOutput.setOutputDatasetEdges(outputDatasetEdges); dataJobInputOutput.setOutputDatasets(outputDatasets); - return buildMetadataChangeProposal(dataJobUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); + return buildMetadataChangeProposal( + dataJobUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); } private void addNewEdge( @Nonnull final Urn upstreamUrn, @Nonnull final Urn downstreamUrn, @Nonnull final Urn actor, - @Nonnull final EdgeArray edgeArray - ) { + @Nonnull final EdgeArray edgeArray) { final Edge newEdge = new Edge(); newEdge.setDestinationUrn(upstreamUrn); newEdge.setSourceUrn(downstreamUrn); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java index 7385e8aa6acae..e030404cd2607 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java @@ -1,13 +1,17 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; -import com.linkedin.common.Ownership; import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.mxe.MetadataChangeProposal; @@ -15,20 +19,16 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class OwnerService extends BaseService { public static final String SYSTEM_ID = "__system__"; - public OwnerService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public OwnerService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -39,7 +39,10 @@ public OwnerService(@Nonnull EntityClient entityClient, @Nonnull Authentication * @param resources references to the resources to change * @param ownershipType the ownership type to add */ - public void batchAddOwners(@Nonnull List<Urn> ownerUrns, @Nonnull List<ResourceReference> resources, @Nonnull OwnershipType ownershipType) { + public void batchAddOwners( + @Nonnull List<Urn> ownerUrns, + @Nonnull List<ResourceReference> resources, + @Nonnull OwnershipType ownershipType) { batchAddOwners(ownerUrns, resources, ownershipType, this.systemAuthentication); } @@ -60,9 +63,11 @@ public void batchAddOwners( try { addOwnersToResources(ownerUrns, resources, ownershipType, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + ownerUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -73,7 +78,8 @@ public void batchAddOwners( * @param ownerUrns the urns of the owners to remove * @param resources references to the resources to change */ - public void batchRemoveOwners(@Nonnull List<Urn> ownerUrns, @Nonnull List<ResourceReference> resources) { + public void batchRemoveOwners( + @Nonnull List<Urn> ownerUrns, @Nonnull List<ResourceReference> resources) { batchRemoveOwners(ownerUrns, resources, this.systemAuthentication); } @@ -92,9 +98,11 @@ public void batchRemoveOwners( try { removeOwnersFromResources(ownerUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + ownerUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -103,18 +111,18 @@ private void addOwnersToResources( List<com.linkedin.common.urn.Urn> ownerUrns, List<ResourceReference> resources, OwnershipType ownershipType, - Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildAddOwnersProposals(ownerUrns, resources, ownershipType, authentication); + Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildAddOwnersProposals(ownerUrns, resources, ownershipType, authentication); ingestChangeProposals(changes, authentication); } private void removeOwnersFromResources( - List<Urn> owners, - List<ResourceReference> resources, - Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildRemoveOwnersProposals(owners, resources, authentication); + List<Urn> owners, List<ResourceReference> resources, Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildRemoveOwnersProposals(owners, resources, authentication); ingestChangeProposals(changes, authentication); } @@ -123,14 +131,13 @@ List<MetadataChangeProposal> buildAddOwnersProposals( List<com.linkedin.common.urn.Urn> ownerUrns, List<ResourceReference> resources, OwnershipType ownershipType, - Authentication authentication - ) { + Authentication authentication) { - final Map<Urn, Ownership> ownershipAspects = getOwnershipAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Ownership(), - authentication - ); + final Map<Urn, Ownership> ownershipAspects = + getOwnershipAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Ownership(), + authentication); final List<MetadataChangeProposal> proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -142,28 +149,26 @@ List<MetadataChangeProposal> buildAddOwnersProposals( if (!owners.hasOwners()) { owners.setOwners(new OwnerArray()); - owners.setLastModified(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - ); + owners.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } addOwnersIfNotExists(owners, ownerUrns, ownershipType); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); } return proposals; } @VisibleForTesting List<MetadataChangeProposal> buildRemoveOwnersProposals( - List<Urn> ownerUrns, - List<ResourceReference> resources, - Authentication authentication - ) { - final Map<Urn, Ownership> ownershipAspects = getOwnershipAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Ownership(), - authentication - ); + List<Urn> ownerUrns, List<ResourceReference> resources, Authentication authentication) { + final Map<Urn, Ownership> ownershipAspects = + getOwnershipAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Ownership(), + authentication); final List<MetadataChangeProposal> proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -175,16 +180,15 @@ List<MetadataChangeProposal> buildRemoveOwnersProposals( owners.setOwners(new OwnerArray()); } removeOwnersIfExists(owners, ownerUrns); - proposals.add(buildMetadataChangeProposal( - resource.getUrn(), - Constants.OWNERSHIP_ASPECT_NAME, owners - )); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); } return proposals; } - private void addOwnersIfNotExists(Ownership owners, List<Urn> ownerUrns, OwnershipType ownershipType) { + private void addOwnersIfNotExists( + Ownership owners, List<Urn> ownerUrns, OwnershipType ownershipType) { if (!owners.hasOwners()) { owners.setOwners(new OwnerArray()); } @@ -193,7 +197,8 @@ private void addOwnersIfNotExists(Ownership owners, List<Urn> ownerUrns, Ownersh List<Urn> ownersToAdd = new ArrayList<>(); for (Urn ownerUrn : ownerUrns) { - if (ownerAssociationArray.stream().anyMatch(association -> association.getOwner().equals(ownerUrn))) { + if (ownerAssociationArray.stream() + .anyMatch(association -> association.getOwner().equals(ownerUrn))) { continue; } ownersToAdd.add(ownerUrn); @@ -212,6 +217,7 @@ private void addOwnersIfNotExists(Ownership owners, List<Urn> ownerUrns, Ownersh ownerAssociationArray.add(newOwner); } } + @VisibleForTesting static Urn mapOwnershipTypeToEntity(String type) { final String typeName = SYSTEM_ID + type.toLowerCase(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java index 821321b634881..f91f9fbfd93f8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java @@ -20,39 +20,41 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a DataHub Ownership Type. - * Currently it supports creating, updating, and removing a Ownership Type. + * This class is used to permit easy CRUD operations on a DataHub Ownership Type. Currently it + * supports creating, updating, and removing a Ownership Type. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. * - * TODO: Ideally we have some basic caching of the view information inside of this class. + * <p>TODO: Ideally we have some basic caching of the view information inside of this class. */ @Slf4j public class OwnershipTypeService extends BaseService { public static final String SYSTEM_ID = "__system__"; - public OwnershipTypeService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public OwnershipTypeService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new Ownership Type. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the Ownership Type * @param description optional description of the Ownership Type * @param authentication the current authentication * @param currentTimeMs the current time in millis - * * @return the urn of the newly created Ownership Type */ - public Urn createOwnershipType(String name, @Nullable String description, @Nonnull Authentication authentication, + public Urn createOwnershipType( + String name, + @Nullable String description, + @Nonnull Authentication authentication, long currentTimeMs) { Objects.requireNonNull(name, "name must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); @@ -66,26 +68,33 @@ public Urn createOwnershipType(String name, @Nullable String description, @Nonnu ownershipTypeInfo.setName(name); ownershipTypeInfo.setDescription(description, SetMode.IGNORE_NULL); final AuditStamp auditStamp = - new AuditStamp().setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())).setTime(currentTimeMs); + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); ownershipTypeInfo.setCreated(auditStamp); ownershipTypeInfo.setLastModified(auditStamp); // 3. Write the new Ownership Type to GMS, return the new URN. try { - final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.OWNERSHIP_TYPE_ENTITY_NAME); - return UrnUtils.getUrn(this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(entityUrn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, - ownershipTypeInfo), authentication, false)); + final Urn entityUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.OWNERSHIP_TYPE_ENTITY_NAME); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, ownershipTypeInfo), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create Ownership Type", e); } } /** - * Updates an existing Ownership Type. If a provided field is null, the previous value will be kept. + * Updates an existing Ownership Type. If a provided field is null, the previous value will be + * kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the Ownership Type * @param name optional name of the Ownership Type @@ -93,8 +102,12 @@ public Urn createOwnershipType(String name, @Nullable String description, @Nonnu * @param authentication the current authentication * @param currentTimeMs the current time in millis */ - public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullable String description, - @Nonnull Authentication authentication, long currentTimeMs) { + public void updateOwnershipType( + @Nonnull Urn urn, + @Nullable String name, + @Nullable String description, + @Nonnull Authentication authentication, + long currentTimeMs) { Objects.requireNonNull(urn, "urn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); @@ -103,7 +116,8 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab if (info == null) { throw new IllegalArgumentException( - String.format("Failed to update Ownership Type. Ownership Type with urn %s does not exist.", urn)); + String.format( + "Failed to update Ownership Type. Ownership Type with urn %s does not exist.", urn)); } // 2. Apply changes to existing Ownership Type @@ -115,12 +129,16 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab } info.setLastModified( - new AuditStamp().setTime(currentTimeMs).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(urn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, info), authentication, + AspectUtils.buildMetadataChangeProposal( + urn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, info), + authentication, false); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", urn), e); @@ -130,15 +148,16 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab /** * Deletes an existing Ownership Type with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the Ownership Type does not exist, no exception will be thrown. + * <p>If the Ownership Type does not exist, no exception will be thrown. * * @param urn the urn of the Ownership Type * @param authentication the current authentication */ - public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Nonnull Authentication authentication) { + public void deleteOwnershipType( + @Nonnull Urn urn, boolean deleteReferences, @Nonnull Authentication authentication) { Objects.requireNonNull(urn, "Ownership TypeUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -146,8 +165,11 @@ public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Non log.info("Soft deleting ownership type: {}", urn); final Status statusAspect = new Status(); statusAspect.setRemoved(true); - this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal(urn, Constants.STATUS_ASPECT_NAME, - statusAspect), authentication, false); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.STATUS_ASPECT_NAME, statusAspect), + authentication, + false); } else { this.entityClient.deleteEntity(urn, authentication); if (deleteReferences) { @@ -155,12 +177,14 @@ public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Non } } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete Ownership Type with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to delete Ownership Type with urn %s", urn), e); } } /** * Return whether the provided urn is for a system provided ownership type. + * * @param urn the urn of the Ownership Type * @return true is the ownership type is a system default. */ @@ -169,21 +193,23 @@ private boolean isSystemOwnershipType(Urn urn) { } /** - * Returns an instance of {@link OwnershipTypeInfo} for the specified Ownership Type urn, - * or null if one cannot be found. + * Returns an instance of {@link OwnershipTypeInfo} for the specified Ownership Type urn, or null + * if one cannot be found. * * @param ownershipTypeUrn the urn of the Ownership Type * @param authentication the authentication to use - * - * @return an instance of {@link OwnershipTypeInfo} for the Ownership Type, null if it does not exist. + * @return an instance of {@link OwnershipTypeInfo} for the Ownership Type, null if it does not + * exist. */ @Nullable - public OwnershipTypeInfo getOwnershipTypeInfo(@Nonnull final Urn ownershipTypeUrn, - @Nonnull final Authentication authentication) { + public OwnershipTypeInfo getOwnershipTypeInfo( + @Nonnull final Urn ownershipTypeUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(ownershipTypeUrn, "ownershipTypeUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); - final EntityResponse response = getOwnershipTypeEntityResponse(ownershipTypeUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME)) { + final EntityResponse response = + getOwnershipTypeEntityResponse(ownershipTypeUrn, authentication); + if (response != null + && response.getAspects().containsKey(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME)) { return new OwnershipTypeInfo( response.getAspects().get(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME).getValue().data()); } @@ -192,24 +218,28 @@ public OwnershipTypeInfo getOwnershipTypeInfo(@Nonnull final Urn ownershipTypeUr } /** - * Returns an instance of {@link EntityResponse} for the specified Ownership Type urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified Ownership Type urn, or null if + * one cannot be found. * * @param ownershipTypeUrn the urn of the Ownership Type. * @param authentication the authentication to use - * - * @return an instance of {@link EntityResponse} for the Ownership Type, null if it does not exist. + * @return an instance of {@link EntityResponse} for the Ownership Type, null if it does not + * exist. */ @Nullable - public EntityResponse getOwnershipTypeEntityResponse(@Nonnull final Urn ownershipTypeUrn, - @Nonnull final Authentication authentication) { + public EntityResponse getOwnershipTypeEntityResponse( + @Nonnull final Urn ownershipTypeUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(ownershipTypeUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - return this.entityClient.getV2(Constants.OWNERSHIP_TYPE_ENTITY_NAME, ownershipTypeUrn, - ImmutableSet.of(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME), authentication); + return this.entityClient.getV2( + Constants.OWNERSHIP_TYPE_ENTITY_NAME, + ownershipTypeUrn, + ImmutableSet.of(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME), + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Ownership Type with urn %s", ownershipTypeUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve Ownership Type with urn %s", ownershipTypeUrn), e); } } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java index b3765d1d9a4e0..ae289c067a78f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java @@ -27,27 +27,26 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a Query - * Currently it supports creating and removing a Query. - * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * This class is used to permit easy CRUD operations on a Query Currently it supports creating and + * removing a Query. * + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class QueryService extends BaseService { - public QueryService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public QueryService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new Query. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the Query * @param description optional description of the Query @@ -56,7 +55,6 @@ public QueryService(@Nonnull EntityClient entityClient, @Nonnull Authentication * @param subjects the query subjects * @param authentication the current authentication * @param currentTimeMs the current time in millis - * * @return the urn of the newly created View */ public Urn createQuery( @@ -82,9 +80,10 @@ public Urn createQuery( queryProperties.setStatement(statement); queryProperties.setName(name, SetMode.IGNORE_NULL); queryProperties.setDescription(description, SetMode.IGNORE_NULL); - final AuditStamp auditStamp = new AuditStamp() - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - .setTime(currentTimeMs); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); queryProperties.setCreated(auditStamp); queryProperties.setLastModified(auditStamp); @@ -95,12 +94,17 @@ public Urn createQuery( // 3. Write the new query to GMS, return the new URN. try { final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.QUERY_ENTITY_NAME); - this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.QUERY_PROPERTIES_ASPECT_NAME, queryProperties), authentication, - false); - return UrnUtils.getUrn(this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.QUERY_SUBJECTS_ASPECT_NAME, querySubjects), authentication, - false)); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.QUERY_PROPERTIES_ASPECT_NAME, queryProperties), + authentication, + false); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.QUERY_SUBJECTS_ASPECT_NAME, querySubjects), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create Query", e); } @@ -109,8 +113,8 @@ public Urn createQuery( /** * Updates an existing Query. If a provided field is null, the previous value will be kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the query * @param name optional name of the Query @@ -135,7 +139,8 @@ public void updateQuery( QueryProperties properties = getQueryProperties(urn, authentication); if (properties == null) { - throw new IllegalArgumentException(String.format("Failed to update Query. Query with urn %s does not exist.", urn)); + throw new IllegalArgumentException( + String.format("Failed to update Query. Query with urn %s does not exist.", urn)); } // 2. Apply changes to existing Query @@ -149,17 +154,23 @@ public void updateQuery( properties.setStatement(statement); } - properties.setLastModified(new AuditStamp() - .setTime(currentTimeMs) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + properties.setLastModified( + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { final List<MetadataChangeProposal> aspectsToIngest = new ArrayList<>(); - aspectsToIngest.add(AspectUtils.buildMetadataChangeProposal(urn, Constants.QUERY_PROPERTIES_ASPECT_NAME, properties)); + aspectsToIngest.add( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.QUERY_PROPERTIES_ASPECT_NAME, properties)); if (subjects != null) { - aspectsToIngest.add(AspectUtils.buildMetadataChangeProposal(urn, Constants.QUERY_SUBJECTS_ASPECT_NAME, new QuerySubjects() - .setSubjects(new QuerySubjectArray(subjects)))); + aspectsToIngest.add( + AspectUtils.buildMetadataChangeProposal( + urn, + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new QuerySubjects().setSubjects(new QuerySubjectArray(subjects)))); } this.entityClient.batchIngestProposals(aspectsToIngest, authentication, false); } catch (Exception e) { @@ -170,17 +181,15 @@ public void updateQuery( /** * Deletes an existing Query with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the Query does not exist, no exception will be thrown. + * <p>If the Query does not exist, no exception will be thrown. * * @param queryUrn the urn of the Query * @param authentication the current authentication */ - public void deleteQuery( - @Nonnull Urn queryUrn, - @Nonnull Authentication authentication) { + public void deleteQuery(@Nonnull Urn queryUrn, @Nonnull Authentication authentication) { try { this.entityClient.deleteEntity( Objects.requireNonNull(queryUrn, "queryUrn must not be null"), @@ -191,69 +200,74 @@ public void deleteQuery( } /** - * Returns an instance of {@link QueryProperties} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link QueryProperties} for the specified Query urn, or null if one + * cannot be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link QueryProperties} for the Query, null if it does not exist. */ @Nullable - public QueryProperties getQueryProperties(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public QueryProperties getQueryProperties( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getQueryEntityResponse(queryUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.QUERY_PROPERTIES_ASPECT_NAME)) { - return new QueryProperties(response.getAspects().get(Constants.QUERY_PROPERTIES_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.QUERY_PROPERTIES_ASPECT_NAME)) { + return new QueryProperties( + response.getAspects().get(Constants.QUERY_PROPERTIES_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link QuerySubjects} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link QuerySubjects} for the specified Query urn, or null if one cannot + * be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link QuerySubjects} for the Query, null if it does not exist. */ @Nullable - public QuerySubjects getQuerySubjects(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public QuerySubjects getQuerySubjects( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getQueryEntityResponse(queryUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.QUERY_SUBJECTS_ASPECT_NAME)) { - return new QuerySubjects(response.getAspects().get(Constants.QUERY_SUBJECTS_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.QUERY_SUBJECTS_ASPECT_NAME)) { + return new QuerySubjects( + response.getAspects().get(Constants.QUERY_SUBJECTS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link EntityResponse} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified Query urn, or null if one + * cannot be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the Query, null if it does not exist. */ @Nullable - public EntityResponse getQueryEntityResponse(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public EntityResponse getQueryEntityResponse( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { return this.entityClient.getV2( Constants.QUERY_ENTITY_NAME, queryUrn, - ImmutableSet.of(Constants.QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME), - authentication - ); + ImmutableSet.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME), + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Query with urn %s", queryUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve Query with urn %s", queryUrn), e); } } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java index 58645166a21ef..08b14fc84d7c8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -15,20 +17,19 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * This class is used to permit easy CRUD operations on both <b>Global</b> and <b>Personal</b> * DataHub settings. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class SettingsService extends BaseService { - public SettingsService(@Nonnull final EntityClient entityClient, @Nonnull final Authentication systemAuthentication) { + public SettingsService( + @Nonnull final EntityClient entityClient, + @Nonnull final Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -37,37 +38,38 @@ public SettingsService(@Nonnull final EntityClient entityClient, @Nonnull final * * @param user the urn of the user to fetch settings for * @param authentication the current authentication - * * @return an instance of {@link CorpUserSettings} for the specified user, or null if none exists. */ @Nullable public CorpUserSettings getCorpUserSettings( - @Nonnull final Urn user, - @Nonnull final Authentication authentication) { + @Nonnull final Urn user, @Nonnull final Authentication authentication) { Objects.requireNonNull(user, "user must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - EntityResponse response = this.entityClient.getV2( - CORP_USER_ENTITY_NAME, - user, - ImmutableSet.of(CORP_USER_SETTINGS_ASPECT_NAME), - authentication - ); - if (response != null && response.getAspects().containsKey(Constants.CORP_USER_SETTINGS_ASPECT_NAME)) { - return new CorpUserSettings(response.getAspects().get(Constants.CORP_USER_SETTINGS_ASPECT_NAME).getValue().data()); + EntityResponse response = + this.entityClient.getV2( + CORP_USER_ENTITY_NAME, + user, + ImmutableSet.of(CORP_USER_SETTINGS_ASPECT_NAME), + authentication); + if (response != null + && response.getAspects().containsKey(Constants.CORP_USER_SETTINGS_ASPECT_NAME)) { + return new CorpUserSettings( + response.getAspects().get(Constants.CORP_USER_SETTINGS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Corp User settings for user with urn %s", user), e); + throw new RuntimeException( + String.format("Failed to retrieve Corp User settings for user with urn %s", user), e); } } /** * Updates the settings for a given user. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param user the urn of the user * @param authentication the current authentication @@ -80,13 +82,13 @@ public void updateCorpUserSettings( Objects.requireNonNull(newSettings, "newSettings must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - MetadataChangeProposal proposal = AspectUtils.buildMetadataChangeProposal( - user, - CORP_USER_SETTINGS_ASPECT_NAME, - newSettings); + MetadataChangeProposal proposal = + AspectUtils.buildMetadataChangeProposal( + user, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); this.entityClient.ingestProposal(proposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Corp User settings for user with urn %s", user), e); + throw new RuntimeException( + String.format("Failed to update Corp User settings for user with urn %s", user), e); } } @@ -99,17 +101,24 @@ public void updateCorpUserSettings( public GlobalSettingsInfo getGlobalSettings(@Nonnull final Authentication authentication) { Objects.requireNonNull(authentication, "authentication must not be null"); try { - EntityResponse response = this.entityClient.getV2( - GLOBAL_SETTINGS_ENTITY_NAME, - GLOBAL_SETTINGS_URN, - ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME), - authentication - ); - if (response != null && response.getAspects().containsKey(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME)) { - return new GlobalSettingsInfo(response.getAspects().get(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + this.entityClient.getV2( + GLOBAL_SETTINGS_ENTITY_NAME, + GLOBAL_SETTINGS_URN, + ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME), + authentication); + if (response != null + && response.getAspects().containsKey(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME)) { + return new GlobalSettingsInfo( + response + .getAspects() + .get(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) + .getValue() + .data()); } // No aspect found - log.warn("Failed to retrieve Global Settings. No settings exist, but they should. Returning null"); + log.warn( + "Failed to retrieve Global Settings. No settings exist, but they should. Returning null"); return null; } catch (Exception e) { throw new RuntimeException("Failed to retrieve Global Settings!", e); @@ -119,27 +128,25 @@ public GlobalSettingsInfo getGlobalSettings(@Nonnull final Authentication authen /** * Updates the Global settings. * - * This performs a read-modify-write of the underlying GlobalSettingsInfo aspect. + * <p>This performs a read-modify-write of the underlying GlobalSettingsInfo aspect. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param newSettings the new value for the global settings. * @param authentication the current authentication */ public void updateGlobalSettings( - @Nonnull final GlobalSettingsInfo newSettings, - @Nonnull final Authentication authentication) { + @Nonnull final GlobalSettingsInfo newSettings, @Nonnull final Authentication authentication) { Objects.requireNonNull(newSettings, "newSettings must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - MetadataChangeProposal proposal = AspectUtils.buildMetadataChangeProposal( - GLOBAL_SETTINGS_URN, - GLOBAL_SETTINGS_INFO_ASPECT_NAME, - newSettings); + MetadataChangeProposal proposal = + AspectUtils.buildMetadataChangeProposal( + GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, newSettings); this.entityClient.ingestProposal(proposal, authentication, false); } catch (Exception e) { throw new RuntimeException("Failed to update Global settings", e); } } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java index 9e12fc80a3cdb..a03c98411cb6f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -7,6 +10,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.metadata.resource.SubResourceType; @@ -20,18 +24,14 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class TagService extends BaseService { - public TagService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public TagService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -45,23 +45,26 @@ public void batchAddTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceRefer batchAddTags(tagUrns, resources, this.systemAuthentication); } - /** * Batch adds multiple tags for a set of resources. * * @param tagUrns the urns of the tags to add * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ - public void batchAddTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { + public void batchAddTags( + @Nonnull List<Urn> tagUrns, + @Nonnull List<ResourceReference> resources, + @Nonnull Authentication authentication) { log.debug("Batch adding Tags to entities. tags: {}, resources: {}", resources, tagUrns); try { addTagsToResources(tagUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -71,9 +74,9 @@ public void batchAddTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceRefer * * @param tagUrns the urns of the tags to remove * @param resources references to the resources to change - * */ - public void batchRemoveTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceReference> resources) { + public void batchRemoveTags( + @Nonnull List<Urn> tagUrns, @Nonnull List<ResourceReference> resources) { batchRemoveTags(tagUrns, resources, this.systemAuthentication); } @@ -83,16 +86,20 @@ public void batchRemoveTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceRe * @param tagUrns the urns of the tags to remove * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ - public void batchRemoveTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { + public void batchRemoveTags( + @Nonnull List<Urn> tagUrns, + @Nonnull List<ResourceReference> resources, + @Nonnull Authentication authentication) { log.debug("Batch adding Tags to entities. tags: {}, resources: {}", resources, tagUrns); try { removeTagsFromResources(tagUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -100,39 +107,46 @@ public void batchRemoveTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceRe private void addTagsToResources( List<com.linkedin.common.urn.Urn> tagUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildAddTagsProposals(tagUrns, resources, authentication); + @Nonnull Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildAddTagsProposals(tagUrns, resources, authentication); ingestChangeProposals(changes, authentication); } private void removeTagsFromResources( - List<Urn> tags, - List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildRemoveTagsProposals(tags, resources, authentication); + List<Urn> tags, List<ResourceReference> resources, @Nonnull Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildRemoveTagsProposals(tags, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting List<MetadataChangeProposal> buildAddTagsProposals( - List<Urn> tagUrns, - List<ResourceReference> resources, - Authentication authentication - ) throws URISyntaxException { + List<Urn> tagUrns, List<ResourceReference> resources, Authentication authentication) + throws URISyntaxException { final List<MetadataChangeProposal> changes = new ArrayList<>(); - final List<ResourceReference> entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> entityProposals = buildAddTagsToEntityProposals(tagUrns, entityRefs, authentication); - - final List<ResourceReference> schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> schemaFieldProposals = buildAddTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); + final List<ResourceReference> entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> entityProposals = + buildAddTagsToEntityProposals(tagUrns, entityRefs, authentication); + + final List<ResourceReference> schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> schemaFieldProposals = + buildAddTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -142,21 +156,27 @@ List<MetadataChangeProposal> buildAddTagsProposals( @VisibleForTesting List<MetadataChangeProposal> buildRemoveTagsProposals( - List<Urn> tagUrns, - List<ResourceReference> resources, - Authentication authentication - ) { + List<Urn> tagUrns, List<ResourceReference> resources, Authentication authentication) { final List<MetadataChangeProposal> changes = new ArrayList<>(); - final List<ResourceReference> entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> entityProposals = buildRemoveTagsToEntityProposals(tagUrns, entityRefs, authentication); - - final List<ResourceReference> schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> schemaFieldProposals = buildRemoveTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); + final List<ResourceReference> entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> entityProposals = + buildRemoveTagsToEntityProposals(tagUrns, entityRefs, authentication); + + final List<ResourceReference> schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> schemaFieldProposals = + buildRemoveTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -166,15 +186,13 @@ List<MetadataChangeProposal> buildRemoveTagsProposals( @VisibleForTesting List<MetadataChangeProposal> buildAddTagsToEntityProposals( - List<Urn> tagUrns, - List<ResourceReference> resources, - Authentication authentication - ) throws URISyntaxException { - final Map<Urn, GlobalTags> tagsAspects = getTagsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlobalTags(), - authentication - ); + List<Urn> tagUrns, List<ResourceReference> resources, Authentication authentication) + throws URISyntaxException { + final Map<Urn, GlobalTags> tagsAspects = + getTagsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlobalTags(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -186,11 +204,9 @@ List<MetadataChangeProposal> buildAddTagsToEntityProposals( globalTags.setTags(new TagAssociationArray()); } addTagsIfNotExists(globalTags, tagUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOBAL_TAGS_ASPECT_NAME, - globalTags - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, globalTags); changes.add(proposal); } return changes; @@ -200,32 +216,37 @@ List<MetadataChangeProposal> buildAddTagsToEntityProposals( List<MetadataChangeProposal> buildAddTagsToSubResourceProposals( final List<Urn> tagUrns, final List<ResourceReference> resources, - final Authentication authentication - ) throws URISyntaxException { + final Authentication authentication) + throws URISyntaxException { - final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } addTagsIfNotExists(editableFieldInfo.getGlobalTags(), tagUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -233,15 +254,12 @@ List<MetadataChangeProposal> buildAddTagsToSubResourceProposals( @VisibleForTesting List<MetadataChangeProposal> buildRemoveTagsToEntityProposals( - List<Urn> tagUrns, - List<ResourceReference> resources, - Authentication authentication - ) { - final Map<Urn, GlobalTags> tagsAspects = getTagsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlobalTags(), - authentication - ); + List<Urn> tagUrns, List<ResourceReference> resources, Authentication authentication) { + final Map<Urn, GlobalTags> tagsAspects = + getTagsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlobalTags(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -253,11 +271,9 @@ List<MetadataChangeProposal> buildRemoveTagsToEntityProposals( globalTags.setTags(new TagAssociationArray()); } removeTagsIfExists(globalTags, tagUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOBAL_TAGS_ASPECT_NAME, - globalTags - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, globalTags); changes.add(proposal); } @@ -268,30 +284,34 @@ List<MetadataChangeProposal> buildRemoveTagsToEntityProposals( List<MetadataChangeProposal> buildRemoveTagsToSubResourceProposals( List<Urn> tagUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) { - final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + @Nonnull Authentication authentication) { + final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTagsIfExists(editableFieldInfo.getGlobalTags(), tagUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -306,7 +326,8 @@ private void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) throws URISy List<Urn> tagsToAdd = new ArrayList<>(); for (Urn tagUrn : tagUrns) { - if (tagAssociationArray.stream().anyMatch(association -> association.getTag().equals(tagUrn))) { + if (tagAssociationArray.stream() + .anyMatch(association -> association.getTag().equals(tagUrn))) { continue; } tagsToAdd.add(tagUrn); @@ -336,18 +357,16 @@ private static TagAssociationArray removeTagsIfExists(GlobalTags tags, List<Urn> } private static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional<EditableSchemaFieldInfo> fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional<EditableSchemaFieldInfo> fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java index 026eb3cd61def..b4a683d2e2c68 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java @@ -21,35 +21,34 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a DataHub View. - * Currently it supports creating, updating, and removing a View. + * This class is used to permit easy CRUD operations on a DataHub View. Currently it supports + * creating, updating, and removing a View. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. * - * TODO: Ideally we have some basic caching of the view information inside of this class. + * <p>TODO: Ideally we have some basic caching of the view information inside of this class. */ @Slf4j public class ViewService extends BaseService { - public ViewService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public ViewService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new DataHub View. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param type the type of the View * @param name the name of the View * @param description the description of the View * @param definition the view definition, a.k.a. the View definition * @param authentication the current authentication - * * @return the urn of the newly created View */ public Urn createView( @@ -74,43 +73,49 @@ public Urn createView( newView.setName(name); newView.setDescription(description, SetMode.IGNORE_NULL); newView.setDefinition(definition); - final AuditStamp auditStamp = new AuditStamp() - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - .setTime(currentTimeMs); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); newView.setCreated(auditStamp); newView.setLastModified(auditStamp); - // 3. Write the new view to GMS, return the new URN. try { - return UrnUtils.getUrn(this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATAHUB_VIEW_ENTITY_NAME), Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, newView), authentication, - false)); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATAHUB_VIEW_ENTITY_NAME), + Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, + newView), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create View", e); } } /** - * Updates an existing DataHub View with a specific urn. The overwrites only the fields - * which are not null (provided). + * Updates an existing DataHub View with a specific urn. The overwrites only the fields which are + * not null (provided). * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * - * The View with the provided urn must exist, else an {@link IllegalArgumentException} will be + * <p>The View with the provided urn must exist, else an {@link IllegalArgumentException} will be * thrown. * - * This method will perform a read-modify-write. This can cause concurrent writes - * to conflict, and overwrite one another. The expected frequency of writes - * for views is very low, however. TODO: Convert this into a safer patch. + * <p>This method will perform a read-modify-write. This can cause concurrent writes to conflict, + * and overwrite one another. The expected frequency of writes for views is very low, however. + * TODO: Convert this into a safer patch. * * @param viewUrn the urn of the View * @param name the name of the View * @param description the description of the View * @param definition the view definition itself * @param authentication the current authentication - * @param currentTimeMs the current time in milliseconds, used for populating the lastUpdatedAt field. + * @param currentTimeMs the current time in milliseconds, used for populating the lastUpdatedAt + * field. */ public void updateView( @Nonnull Urn viewUrn, @@ -126,7 +131,8 @@ public void updateView( DataHubViewInfo existingInfo = getViewInfo(viewUrn, authentication); if (existingInfo == null) { - throw new IllegalArgumentException(String.format("Failed to update View. View with urn %s does not exist.", viewUrn)); + throw new IllegalArgumentException( + String.format("Failed to update View. View with urn %s does not exist.", viewUrn)); } // 2. Apply changes to existing View @@ -140,15 +146,18 @@ public void updateView( existingInfo.setDefinition(definition); } - existingInfo.setLastModified(new AuditStamp() - .setTime(currentTimeMs) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + existingInfo.setLastModified( + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(viewUrn, Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, existingInfo), - authentication, false); + AspectUtils.buildMetadataChangeProposal( + viewUrn, Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, existingInfo), + authentication, + false); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", viewUrn), e); } @@ -157,17 +166,15 @@ public void updateView( /** * Deletes an existing DataHub View with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the View does not exist, no exception will be thrown. + * <p>If the View does not exist, no exception will be thrown. * * @param viewUrn the urn of the View * @param authentication the current authentication */ - public void deleteView( - @Nonnull Urn viewUrn, - @Nonnull Authentication authentication) { + public void deleteView(@Nonnull Urn viewUrn, @Nonnull Authentication authentication) { try { this.entityClient.deleteEntity( Objects.requireNonNull(viewUrn, "viewUrn must not be null"), @@ -178,37 +185,39 @@ public void deleteView( } /** - * Returns an instance of {@link DataHubViewInfo} for the specified View urn, - * or null if one cannot be found. + * Returns an instance of {@link DataHubViewInfo} for the specified View urn, or null if one + * cannot be found. * * @param viewUrn the urn of the View * @param authentication the authentication to use - * * @return an instance of {@link DataHubViewInfo} for the View, null if it does not exist. */ @Nullable - public DataHubViewInfo getViewInfo(@Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { + public DataHubViewInfo getViewInfo( + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getViewEntityResponse(viewUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME)) { - return new DataHubViewInfo(response.getAspects().get(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME)) { + return new DataHubViewInfo( + response.getAspects().get(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link EntityResponse} for the specified View urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified View urn, or null if one cannot + * be found. * * @param viewUrn the urn of the View * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the View, null if it does not exist. */ @Nullable - public EntityResponse getViewEntityResponse(@Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { + public EntityResponse getViewEntityResponse( + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -216,8 +225,7 @@ public EntityResponse getViewEntityResponse(@Nonnull final Urn viewUrn, @Nonnull Constants.DATAHUB_VIEW_ENTITY_NAME, viewUrn, ImmutableSet.of(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME), - authentication - ); + authentication); } catch (Exception e) { throw new RuntimeException(String.format("Failed to retrieve View with urn %s", viewUrn), e); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java index 928c70a7b3de1..c618db801d9d6 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java @@ -11,154 +11,169 @@ import com.linkedin.metadata.search.LineageSearchEntityArray; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.ScrollResult; -import lombok.extern.slf4j.Slf4j; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import java.util.Objects; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class ValidationUtils { - - public static SearchResult validateSearchResult(final SearchResult searchResult, - @Nonnull final EntityService entityService) { + public static SearchResult validateSearchResult( + final SearchResult searchResult, @Nonnull final EntityService entityService) { if (searchResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - SearchResult validatedSearchResult = new SearchResult().setFrom(searchResult.getFrom()) - .setMetadata(searchResult.getMetadata()) - .setPageSize(searchResult.getPageSize()) - .setNumEntities(searchResult.getNumEntities()); - - SearchEntityArray validatedEntities = searchResult.getEntities() - .stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) - .collect(Collectors.toCollection(SearchEntityArray::new)); + SearchResult validatedSearchResult = + new SearchResult() + .setFrom(searchResult.getFrom()) + .setMetadata(searchResult.getMetadata()) + .setPageSize(searchResult.getPageSize()) + .setNumEntities(searchResult.getNumEntities()); + + SearchEntityArray validatedEntities = + searchResult.getEntities().stream() + .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + .collect(Collectors.toCollection(SearchEntityArray::new)); validatedSearchResult.setEntities(validatedEntities); return validatedSearchResult; } - public static ScrollResult validateScrollResult(final ScrollResult scrollResult, - @Nonnull final EntityService entityService) { + public static ScrollResult validateScrollResult( + final ScrollResult scrollResult, @Nonnull final EntityService entityService) { if (scrollResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - ScrollResult validatedScrollResult = new ScrollResult() - .setMetadata(scrollResult.getMetadata()) - .setPageSize(scrollResult.getPageSize()) - .setNumEntities(scrollResult.getNumEntities()); + ScrollResult validatedScrollResult = + new ScrollResult() + .setMetadata(scrollResult.getMetadata()) + .setPageSize(scrollResult.getPageSize()) + .setNumEntities(scrollResult.getNumEntities()); if (scrollResult.getScrollId() != null) { validatedScrollResult.setScrollId(scrollResult.getScrollId()); } - SearchEntityArray validatedEntities = scrollResult.getEntities() - .stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) - .collect(Collectors.toCollection(SearchEntityArray::new)); + SearchEntityArray validatedEntities = + scrollResult.getEntities().stream() + .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + .collect(Collectors.toCollection(SearchEntityArray::new)); validatedScrollResult.setEntities(validatedEntities); return validatedScrollResult; } - public static BrowseResult validateBrowseResult(final BrowseResult browseResult, - @Nonnull final EntityService entityService) { + public static BrowseResult validateBrowseResult( + final BrowseResult browseResult, @Nonnull final EntityService entityService) { if (browseResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - BrowseResult validatedBrowseResult = new BrowseResult().setGroups(browseResult.getGroups()) - .setMetadata(browseResult.getMetadata()) - .setFrom(browseResult.getFrom()) - .setPageSize(browseResult.getPageSize()) - .setNumGroups(browseResult.getNumGroups()) - .setNumEntities(browseResult.getNumEntities()) - .setNumElements(browseResult.getNumElements()); - - BrowseResultEntityArray validatedEntities = browseResult.getEntities() - .stream() - .filter(browseResultEntity -> entityService.exists(browseResultEntity.getUrn())) - .collect(Collectors.toCollection(BrowseResultEntityArray::new)); + BrowseResult validatedBrowseResult = + new BrowseResult() + .setGroups(browseResult.getGroups()) + .setMetadata(browseResult.getMetadata()) + .setFrom(browseResult.getFrom()) + .setPageSize(browseResult.getPageSize()) + .setNumGroups(browseResult.getNumGroups()) + .setNumEntities(browseResult.getNumEntities()) + .setNumElements(browseResult.getNumElements()); + + BrowseResultEntityArray validatedEntities = + browseResult.getEntities().stream() + .filter(browseResultEntity -> entityService.exists(browseResultEntity.getUrn())) + .collect(Collectors.toCollection(BrowseResultEntityArray::new)); validatedBrowseResult.setEntities(validatedEntities); - return validatedBrowseResult; } - public static ListResult validateListResult(final ListResult listResult, @Nonnull final EntityService entityService) { + public static ListResult validateListResult( + final ListResult listResult, @Nonnull final EntityService entityService) { if (listResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - ListResult validatedListResult = new ListResult().setStart(listResult.getStart()) - .setCount(listResult.getCount()) - .setTotal(listResult.getTotal()); + ListResult validatedListResult = + new ListResult() + .setStart(listResult.getStart()) + .setCount(listResult.getCount()) + .setTotal(listResult.getTotal()); UrnArray validatedEntities = - listResult.getEntities().stream().filter(entityService::exists).collect(Collectors.toCollection(UrnArray::new)); + listResult.getEntities().stream() + .filter(entityService::exists) + .collect(Collectors.toCollection(UrnArray::new)); validatedListResult.setEntities(validatedEntities); return validatedListResult; } - public static LineageSearchResult validateLineageSearchResult(final LineageSearchResult lineageSearchResult, - @Nonnull final EntityService entityService) { + public static LineageSearchResult validateLineageSearchResult( + final LineageSearchResult lineageSearchResult, @Nonnull final EntityService entityService) { if (lineageSearchResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); LineageSearchResult validatedLineageSearchResult = - new LineageSearchResult().setMetadata(lineageSearchResult.getMetadata()) + new LineageSearchResult() + .setMetadata(lineageSearchResult.getMetadata()) .setFrom(lineageSearchResult.getFrom()) .setPageSize(lineageSearchResult.getPageSize()) .setNumEntities(lineageSearchResult.getNumEntities()); - LineageSearchEntityArray validatedEntities = lineageSearchResult.getEntities() - .stream() - .filter(entity -> entityService.exists(entity.getEntity())) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + LineageSearchEntityArray validatedEntities = + lineageSearchResult.getEntities().stream() + .filter(entity -> entityService.exists(entity.getEntity())) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); validatedLineageSearchResult.setEntities(validatedEntities); return validatedLineageSearchResult; } - public static EntityLineageResult validateEntityLineageResult(@Nullable final EntityLineageResult entityLineageResult, + public static EntityLineageResult validateEntityLineageResult( + @Nullable final EntityLineageResult entityLineageResult, @Nonnull final EntityService entityService) { if (entityLineageResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - final EntityLineageResult validatedEntityLineageResult = new EntityLineageResult() - .setStart(entityLineageResult.getStart()) - .setCount(entityLineageResult.getCount()) - .setTotal(entityLineageResult.getTotal()); + final EntityLineageResult validatedEntityLineageResult = + new EntityLineageResult() + .setStart(entityLineageResult.getStart()) + .setCount(entityLineageResult.getCount()) + .setTotal(entityLineageResult.getTotal()); - final LineageRelationshipArray validatedRelationships = entityLineageResult.getRelationships().stream() - .filter(relationship -> entityService.exists(relationship.getEntity())) - .filter(relationship -> !entityService.isSoftDeleted(relationship.getEntity())) - .collect(Collectors.toCollection(LineageRelationshipArray::new)); + final LineageRelationshipArray validatedRelationships = + entityLineageResult.getRelationships().stream() + .filter(relationship -> entityService.exists(relationship.getEntity())) + .filter(relationship -> !entityService.isSoftDeleted(relationship.getEntity())) + .collect(Collectors.toCollection(LineageRelationshipArray::new)); validatedEntityLineageResult.setFiltered( - (entityLineageResult.hasFiltered() && entityLineageResult.getFiltered() != null ? entityLineageResult.getFiltered() : 0) - + entityLineageResult.getRelationships().size() - validatedRelationships.size()); + (entityLineageResult.hasFiltered() && entityLineageResult.getFiltered() != null + ? entityLineageResult.getFiltered() + : 0) + + entityLineageResult.getRelationships().size() + - validatedRelationships.size()); validatedEntityLineageResult.setRelationships(validatedRelationships); return validatedEntityLineageResult; } - public static LineageScrollResult validateLineageScrollResult(final LineageScrollResult lineageScrollResult, - @Nonnull final EntityService entityService) { + public static LineageScrollResult validateLineageScrollResult( + final LineageScrollResult lineageScrollResult, @Nonnull final EntityService entityService) { if (lineageScrollResult == null) { return null; } @@ -173,15 +188,14 @@ public static LineageScrollResult validateLineageScrollResult(final LineageScrol validatedLineageScrollResult.setScrollId(lineageScrollResult.getScrollId()); } - LineageSearchEntityArray validatedEntities = lineageScrollResult.getEntities() - .stream() - .filter(entity -> entityService.exists(entity.getEntity())) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + LineageSearchEntityArray validatedEntities = + lineageScrollResult.getEntities().stream() + .filter(entity -> entityService.exists(entity.getEntity())) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); validatedLineageScrollResult.setEntities(validatedEntities); return validatedLineageScrollResult; } - private ValidationUtils() { - } + private ValidationUtils() {} } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java index ea59885e8b6d5..f06671ac3c314 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java @@ -10,7 +10,6 @@ import javax.annotation.Nullable; import org.opensearch.client.tasks.GetTaskResponse; - public interface SystemMetadataService { /** * Deletes a specific aspect from the system metadata service. @@ -32,11 +31,14 @@ public interface SystemMetadataService { List<AspectRowSummary> findByUrn(String urn, boolean includeSoftDeleted, int from, int size); - List<AspectRowSummary> findByParams(Map<String, String> systemMetaParams, boolean includeSoftDeleted, int from, int size); + List<AspectRowSummary> findByParams( + Map<String, String> systemMetaParams, boolean includeSoftDeleted, int from, int size); - List<AspectRowSummary> findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size); + List<AspectRowSummary> findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size); - List<IngestionRunSummary> listRuns(Integer pageOffset, Integer pageSize, boolean includeSoftDeleted); + List<IngestionRunSummary> listRuns( + Integer pageOffset, Integer pageSize, boolean includeSoftDeleted); void configure(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java index ce9134896779a..1f794157b9cb9 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java @@ -3,19 +3,15 @@ import lombok.Builder; import lombok.Getter; - @Builder public class SemanticVersion { - @Getter - private int majorVersion; - @Getter - private int minorVersion; - @Getter - private int patchVersion; - @Getter - private String qualifier; + @Getter private int majorVersion; + @Getter private int minorVersion; + @Getter private int patchVersion; + @Getter private String qualifier; public String toString() { - return String.format(String.format("%d.%d.%d-%s", majorVersion, minorVersion, patchVersion, qualifier)); + return String.format( + String.format("%d.%d.%d-%s", majorVersion, minorVersion, patchVersion, qualifier)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java index ab0848c640e2a..949572359d754 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java @@ -8,14 +8,15 @@ import java.util.Set; import javax.annotation.Nonnull; - public interface TimelineService { - List<ChangeTransaction> getTimeline(@Nonnull final Urn urn, + List<ChangeTransaction> getTimeline( + @Nonnull final Urn urn, @Nonnull Set<ChangeCategory> elements, long startMillis, long endMillis, String startVersionStamp, String endVersionStamp, - boolean rawDiffRequested) throws JsonProcessingException; + boolean rawDiffRequested) + throws JsonProcessingException; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java index 72218c37fe5ce..141a963c3e014 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java @@ -5,17 +5,16 @@ import java.util.List; import java.util.Map; - public enum ChangeCategory { - //description, institutionalmemory, properties docs, field level docs/description etc. + // description, institutionalmemory, properties docs, field level docs/description etc. DOCUMENTATION, - //(field or top level) add term, remove term, etc. + // (field or top level) add term, remove term, etc. GLOSSARY_TERM, - //add new owner, remove owner, change ownership type etc. + // add new owner, remove owner, change ownership type etc. OWNER, - //new field, remove field, field type change, + // new field, remove field, field type change, TECHNICAL_SCHEMA, - //(field or top level) add tag, remove tag, + // (field or top level) add tag, remove tag, TAG, // Update the domain for an entity DOMAIN, diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java index dcd5f9d7dc2da..372e855841a36 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java @@ -7,52 +7,36 @@ import lombok.Value; import lombok.experimental.NonFinal; - -/** - * An event representing a high-level, semantic change to a DataHub entity. - */ +/** An event representing a high-level, semantic change to a DataHub entity. */ @Value @Builder @NonFinal @AllArgsConstructor public class ChangeEvent { - /** - * The urn of the entity being changed. - */ + /** The urn of the entity being changed. */ String entityUrn; - /** - * The category of the change. - */ + + /** The category of the change. */ ChangeCategory category; - /** - * The operation of the change. - */ + + /** The operation of the change. */ ChangeOperation operation; - /** - * An optional modifier associated with the change. For example, a tag urn. - */ + + /** An optional modifier associated with the change. For example, a tag urn. */ String modifier; - /** - * Parameters that determined by the combination of category + operation. - */ + + /** Parameters that determined by the combination of category + operation. */ Map<String, Object> parameters; - /** - * An audit stamp detailing who made the change and when. - */ + + /** An audit stamp detailing who made the change and when. */ AuditStamp auditStamp; - /** - * Optional: Semantic change version. - * TODO: Determine if this should be inside this structure. - */ + + /** Optional: Semantic change version. TODO: Determine if this should be inside this structure. */ SemanticChangeType semVerChange; + /** - * Optional: A human readable description of this change. - * TODO: Determine if this should be inside this structure. + * Optional: A human readable description of this change. TODO: Determine if this should be inside + * this structure. */ String description; } - - - - - diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java index c9aafa6a0330d..2321165cca529 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java @@ -1,40 +1,22 @@ package com.linkedin.metadata.timeline.data; public enum ChangeOperation { - /** - * Something is added to an entity, e.g. tag, glossary term. - */ + /** Something is added to an entity, e.g. tag, glossary term. */ ADD, - /** - * An entity is modified. e.g. Domain, description is updated. - */ + /** An entity is modified. e.g. Domain, description is updated. */ MODIFY, - /** - * Something is removed from an entity. e.g. tag, glossary term. - */ + /** Something is removed from an entity. e.g. tag, glossary term. */ REMOVE, - /** - * Entity is created. - */ + /** Entity is created. */ CREATE, - /** - * Entity is hard-deleted. - */ + /** Entity is hard-deleted. */ HARD_DELETE, - /** - * Entity is soft-deleted. - */ + /** Entity is soft-deleted. */ SOFT_DELETE, - /** - * Entity is reinstated after being soft-deleted. - */ + /** Entity is reinstated after being soft-deleted. */ REINSTATE, - /** - * Run has STARTED - */ + /** Run has STARTED */ STARTED, - /** - * Run is completed - */ + /** Run is completed */ COMPLETED } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java index 5037b8dde9a8b..3e963dba0cdb4 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java @@ -8,7 +8,6 @@ import lombok.Getter; import lombok.Setter; - @Getter @Builder public class ChangeTransaction { @@ -19,10 +18,11 @@ public class ChangeTransaction { String semVer; SemanticChangeType semVerChange; List<ChangeEvent> changeEvents; + @ArraySchema(schema = @Schema(implementation = PatchOperation.class)) JsonPatch rawDiff; - @Setter - String versionStamp; + + @Setter String versionStamp; public void setSemanticVersion(String semanticVersion) { this.semVer = semanticVersion; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java index 6b28664bcb0f6..abbbe1af37546 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java @@ -3,7 +3,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder public class PatchOperation { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java index dfaa74a0656fe..b8ae83e34eacf 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.timeline.data; public enum SemanticChangeType { - NONE, PATCH, MINOR, MAJOR, EXCEPTIONAL + NONE, + PATCH, + MINOR, + MAJOR, + EXCEPTIONAL } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java index 8dc1fdcba0cbf..32af2737756a8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java @@ -1,4 +1,3 @@ package com.linkedin.metadata.timeline.data; -public interface SemanticDifference { -} +public interface SemanticDifference {} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java index 0908d927cd40b..54480bb700398 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java @@ -14,16 +14,14 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface TimeseriesAspectService { - /** - * Configure the Time-Series aspect service one time at boot-up. - */ + /** Configure the Time-Series aspect service one time at boot-up. */ void configure(); /** * Count the number of entries using a filter + * * @param entityName the name of the entity to count entries for * @param aspectName the name of the timeseries aspect to count for that entity * @param filter the filter to apply to the count @@ -32,27 +30,30 @@ public interface TimeseriesAspectService { public long countByFilter( @Nonnull final String entityName, @Nonnull final String aspectName, - @Nullable final Filter filter - ); + @Nullable final Filter filter); /** - * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional filters, sorted by the timestampMillis - * field descending. + * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional + * filters, sorted by the timestampMillis field descending. * - * This method allows you to optionally filter for events that fall into a particular time window based on the timestampMillis - * field of the aspect, or simply retrieve the latest aspects sorted by time. + * <p>This method allows you to optionally filter for events that fall into a particular time + * window based on the timestampMillis field of the aspect, or simply retrieve the latest aspects + * sorted by time. * - * Note that this does not always indicate the event time, and is often used to reflect the reported - * time of a given event. + * <p>Note that this does not always indicate the event time, and is often used to reflect the + * reported time of a given event. * * @param urn the urn of the entity to retrieve aspects for * @param entityName the name of the entity to retrieve aspects for * @param aspectName the name of the timeseries aspect to retrieve for the entity - * @param startTimeMillis the start of a time window in milliseconds, compared against the standard timestampMillis field - * @param endTimeMillis the end of a time window in milliseconds, compared against the standard timestampMillis field + * @param startTimeMillis the start of a time window in milliseconds, compared against the + * standard timestampMillis field + * @param endTimeMillis the end of a time window in milliseconds, compared against the standard + * timestampMillis field * @param limit the maximum number of results to retrieve * @param filter a set of additional secondary filters to apply when finding the aspects - * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, or empty list if none were found. + * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, + * or empty list if none were found. */ @Nonnull default List<EnvelopedAspect> getAspectValues( @@ -63,28 +64,34 @@ default List<EnvelopedAspect> getAspectValues( @Nullable final Long endTimeMillis, @Nullable final Integer limit, @Nullable final Filter filter) { - return getAspectValues(urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, null); + return getAspectValues( + urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, null); } /** - * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional filters, sorted by the timestampMillis - * field descending. + * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional + * filters, sorted by the timestampMillis field descending. * - * This method allows you to optionally filter for events that fall into a particular time window based on the timestampMillis - * field of the aspect, or simply retrieve the latest aspects sorted by time. + * <p>This method allows you to optionally filter for events that fall into a particular time + * window based on the timestampMillis field of the aspect, or simply retrieve the latest aspects + * sorted by time. * - * Note that this does not always indicate the event time, and is often used to reflect the reported - * time of a given event. + * <p>Note that this does not always indicate the event time, and is often used to reflect the + * reported time of a given event. * * @param urn the urn of the entity to retrieve aspects for * @param entityName the name of the entity to retrieve aspects for * @param aspectName the name of the timeseries aspect to retrieve for the entity - * @param startTimeMillis the start of a time window in milliseconds, compared against the standard timestampMillis field - * @param endTimeMillis the end of a time window in milliseconds, compared against the standard timestampMillis field + * @param startTimeMillis the start of a time window in milliseconds, compared against the + * standard timestampMillis field + * @param endTimeMillis the end of a time window in milliseconds, compared against the standard + * timestampMillis field * @param limit the maximum number of results to retrieve * @param filter a set of additional secondary filters to apply when finding the aspects - * @param sort the sort criterion for the result set. If not provided, defaults to sorting by timestampMillis descending. - * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, or empty list if none were found. + * @param sort the sort criterion for the result set. If not provided, defaults to sorting by + * timestampMillis descending. + * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, + * or empty list if none were found. */ @Nonnull List<EnvelopedAspect> getAspectValues( @@ -98,16 +105,19 @@ List<EnvelopedAspect> getAspectValues( @Nullable final SortCriterion sort); /** - * Perform a arbitrary aggregation query over a set of Time-Series aspects. - * This is used to answer arbitrary questions about the Time-Series aspects that we have. + * Perform a arbitrary aggregation query over a set of Time-Series aspects. This is used to answer + * arbitrary questions about the Time-Series aspects that we have. * * @param entityName the name of the entity associated with the Time-Series aspect. * @param aspectName the name of the Time-Series aspect. - * @param aggregationSpecs a specification of the types of metric-value aggregations that should be performed - * @param filter an optional filter that should be applied prior to performing the requested aggregations. - * @param groupingBuckets an optional set of buckets to group the aggregations on the timeline -- For example, by a particular date or - * string value. - * @return a "table" representation of the results of performing the aggregation, with a row per group. + * @param aggregationSpecs a specification of the types of metric-value aggregations that should + * be performed + * @param filter an optional filter that should be applied prior to performing the requested + * aggregations. + * @param groupingBuckets an optional set of buckets to group the aggregations on the timeline -- + * For example, by a particular date or string value. + * @return a "table" representation of the results of performing the aggregation, with a row per + * group. */ @Nonnull GenericTable getAggregatedStats( @@ -121,7 +131,7 @@ GenericTable getAggregatedStats( * Generic filter based deletion for Time-Series Aspects. * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used for deletion of the documents on the index. * @return a summary of the aspects which were deleted */ @@ -135,7 +145,7 @@ DeleteAspectValuesResult deleteAspectValues( * Generic filter based deletion for Time-Series Aspects. * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used for deletion of the documents on the index. * @param options Options to control delete parameters * @return The Job ID of the deletion operation @@ -149,18 +159,22 @@ String deleteAspectValuesAsync( /** * Reindex the index represented by entityName and aspect name, applying the filter + * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used when reindexing * @param options Options to control reindex parameters * @return The Job ID of the reindex operation */ - String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options); /** - * Rollback the Time-Series aspects associated with a particular runId. This is invoked as a part of an - * ingestion rollback process. + * Rollback the Time-Series aspects associated with a particular runId. This is invoked as a part + * of an ingestion rollback process. * * @param runId The runId that needs to be rolled back. * @return a summary of the aspects which were deleted @@ -169,14 +183,15 @@ String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Non DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull final String runId); /** - * Upsert a raw timeseries aspect into a timeseries index. Note that this is a bit of a hack, and leaks - * too much implementation detail around Elasticsearch. + * Upsert a raw timeseries aspect into a timeseries index. Note that this is a bit of a hack, and + * leaks too much implementation detail around Elasticsearch. * - * TODO: Make this more general purpose. + * <p>TODO: Make this more general purpose. * * @param entityName the name of the entity * @param aspectName the name of an aspect - * @param docId the doc id for the elasticsearch document - this serves as the primary key for the document. + * @param docId the doc id for the elasticsearch document - this serves as the primary key for the + * document. * @param document the raw document to insert. */ void upsertDocument( diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java index 4338d883ece1d..4c5595d4d1468 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java @@ -32,8 +32,10 @@ public class DomainServiceTest { private static final Urn TEST_DOMAIN_URN_1 = UrnUtils.getUrn("urn:li:domain:test"); private static final Urn TEST_DOMAIN_URN_2 = UrnUtils.getUrn("urn:li:domain:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testSetDomainExistingDomain() throws Exception { @@ -41,64 +43,66 @@ private void testSetDomainExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); Urn newDomainUrn = UrnUtils.getUrn("urn:li:domain:newDomain"); - List<MetadataChangeProposal> events = service.buildSetDomainProposals(newDomainUrn, ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List<MetadataChangeProposal> events = + service.buildSetDomainProposals( + newDomainUrn, + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); } @Test private void testSetDomainNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); Urn newDomainUrn = UrnUtils.getUrn("urn:li:domain:newDomain"); - List<MetadataChangeProposal> events = service.buildSetDomainProposals(newDomainUrn, ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List<MetadataChangeProposal> events = + service.buildSetDomainProposals( + newDomainUrn, + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); } @Test @@ -107,62 +111,62 @@ private void testUnsetDomainExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildUnsetDomainProposals(ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List<MetadataChangeProposal> events = + service.buildUnsetDomainProposals( + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } @Test private void testUnsetDomainNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildUnsetDomainProposals(ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List<MetadataChangeProposal> events = + service.buildUnsetDomainProposals( + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } @Test @@ -171,165 +175,178 @@ private void testAddDomainsExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildAddDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildAddDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains() + .setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains() + .setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); } @Test private void testAddDomainsNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildAddDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildAddDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); } @Test private void testRemoveDomainsExistingDomain() throws Exception { Domains existingDomains = new Domains(); - existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2))); + existingDomains.setDomains( + new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); } @Test private void testRemoveDomainsNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } - private static EntityClient createMockEntityClient(@Nullable Domains existingDomains) throws Exception { + private static EntityClient createMockEntityClient(@Nullable Domains existingDomains) + throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(existingDomains != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingDomains.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingDomains.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + existingDomains != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingDomains.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingDomains.data())))))) + : Collections.emptyMap()); return mockClient; } diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java index 567a457efcf93..2048548f6cede 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java @@ -33,57 +33,64 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class GlossaryTermServiceTest { private static final Urn TEST_GLOSSARY_TERM_URN_1 = UrnUtils.getUrn("urn:li:glossaryTerm:test"); private static final Urn TEST_GLOSSARY_TERM_URN_2 = UrnUtils.getUrn("urn:li:glossaryTerm:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); - + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + @Test private void testAddGlossaryTermToEntityExistingGlossaryTerm() throws Exception { GlossaryTerms existingGlossaryTerms = new GlossaryTerms(); - existingGlossaryTerms.setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)) - ))); + existingGlossaryTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1))))); EntityClient mockClient = createMockGlossaryEntityClient(existingGlossaryTerms); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List<MetadataChangeProposal> events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expected); } @@ -91,37 +98,42 @@ private void testAddGlossaryTermToEntityExistingGlossaryTerm() throws Exception private void testAddGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exception { EntityClient mockClient = createMockGlossaryEntityClient(null); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTermAssociationArray expectedTermsArray = new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List<MetadataChangeProposal> events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTermAssociationArray expectedTermsArray = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expectedTermsArray); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expectedTermsArray); } @@ -129,50 +141,72 @@ private void testAddGlossaryTermsToEntityNoExistingGlossaryTerm() throws Excepti private void testAddGlossaryTermToSchemaFieldExistingGlossaryTerm() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_1))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List<MetadataChangeProposal> events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test @@ -180,90 +214,114 @@ private void testAddGlossaryTermsToSchemaFieldNoExistingGlossaryTerm() throws Ex EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms(new GlossaryTerms())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn))) - ); + List<MetadataChangeProposal> events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test private void testRemoveGlossaryTermToEntityExistingGlossaryTerm() throws Exception { GlossaryTerms existingGlossaryTerms = new GlossaryTerms(); - existingGlossaryTerms.setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)) - ))); + existingGlossaryTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); EntityClient mockClient = createMockGlossaryEntityClient(existingGlossaryTerms); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); - - List<MetadataChangeProposal> events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTerms expected = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); + + List<MetadataChangeProposal> events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTerms expected = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + RecordTemplate glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + RecordTemplate glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2, expected); } @@ -271,36 +329,38 @@ private void testRemoveGlossaryTermToEntityExistingGlossaryTerm() throws Excepti private void testRemoveGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exception { EntityClient mockClient = createMockGlossaryEntityClient(null); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expected); } @@ -308,51 +368,73 @@ private void testRemoveGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exce private void testRemoveGlossaryTermToSchemaFieldExistingGlossaryTerm() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_2))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)) - )); + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test @@ -360,78 +442,99 @@ private void testRemoveGlossaryTermsToSchemaFieldNoExistingGlossaryTerm() throws EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms(new GlossaryTerms())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_ENTITY_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_ENTITY_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), Collections.emptyList()); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + Collections.emptyList()); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), Collections.emptyList()); - + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + Collections.emptyList()); } - private static EntityClient createMockGlossaryEntityClient(@Nullable GlossaryTerms existingGlossaryTerms) throws Exception { + private static EntityClient createMockGlossaryEntityClient( + @Nullable GlossaryTerms existingGlossaryTerms) throws Exception { return createMockEntityClient(existingGlossaryTerms, Constants.GLOSSARY_TERMS_ASPECT_NAME); } - private static EntityClient createMockSchemaMetadataEntityClient(@Nullable EditableSchemaMetadata existingMetadata) throws Exception { + private static EntityClient createMockSchemaMetadataEntityClient( + @Nullable EditableSchemaMetadata existingMetadata) throws Exception { return createMockEntityClient(existingMetadata, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java index 9df8b9ecf46e8..5888067dbe268 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -46,8 +48,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class LineageServiceTest { private static AuditStamp _auditStamp; private static EntityClient _mockClient; @@ -57,18 +57,25 @@ public class LineageServiceTest { private static final String SOURCE_FIELD_NAME = "source"; private static final String UI_SOURCE = "UI"; private static final String ACTOR_URN = "urn:li:corpuser:test"; - private static final String DATASET_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; - private static final String DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; - private static final String DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; - private static final String DATASET_URN_4 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; + private static final String DATASET_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; + private static final String DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; + private static final String DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; + private static final String DATASET_URN_4 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; private static final String CHART_URN_1 = "urn:li:chart:(looker,baz1)"; private static final String CHART_URN_2 = "urn:li:chart:(looker,baz2)"; private static final String CHART_URN_3 = "urn:li:chart:(looker,baz3)"; private static final String DASHBOARD_URN_1 = "urn:li:dashboard:(airflow,id1)"; private static final String DASHBOARD_URN_2 = "urn:li:dashboard:(airflow,id2)"; - private static final String DATAJOB_URN_1 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; - private static final String DATAJOB_URN_2 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; - private static final String DATAJOB_URN_3 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test3)"; + private static final String DATAJOB_URN_1 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATAJOB_URN_2 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; + private static final String DATAJOB_URN_3 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test3)"; private Urn actorUrn; private Urn datasetUrn1; private Urn datasetUrn2; @@ -114,41 +121,41 @@ public void testUpdateDatasetLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn2, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn3, AUTHENTICATION)).thenReturn(true); - UpstreamLineage upstreamLineage = createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_3, DATASET_URN_4))); + UpstreamLineage upstreamLineage = + createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_3, DATASET_URN_4))); - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(datasetUrn1), - Mockito.eq(ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(datasetUrn1), + Mockito.eq(ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(datasetUrn1) .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(upstreamLineage.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upstreamLineage.data())))))); final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List<Urn> upstreamUrnsToRemove = Collections.singletonList(datasetUrn3); - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); // upstreamLineage without dataset3, keep dataset4, add dataset2 - final UpstreamLineage updatedDataset1UpstreamLineage = createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_4, DATASET_URN_2))); + final UpstreamLineage updatedDataset1UpstreamLineage = + createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_4, DATASET_URN_2))); final MetadataChangeProposal proposal1 = new MetadataChangeProposal(); proposal1.setEntityUrn(UrnUtils.getUrn(DATASET_URN_1)); proposal1.setEntityType(Constants.DATASET_ENTITY_NAME); proposal1.setAspectName(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(updatedDataset1UpstreamLineage)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal1), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -157,8 +164,11 @@ public void testFailUpdateWithMissingDataset() throws Exception { final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List<Urn> upstreamUrnsToRemove = Collections.singletonList(datasetUrn3); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -167,11 +177,15 @@ public void testFailUpdateDatasetWithInvalidEdge() throws Exception { final List<Urn> upstreamUrnsToAdd = Collections.singletonList(chartUrn1); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } - // Adds upstream for chart1 to dataset3 and removes edge to dataset1 while keeping edge to dataset2 + // Adds upstream for chart1 to dataset3 and removes edge to dataset1 while keeping edge to + // dataset2 @Test public void testUpdateChartLineage() throws Exception { Mockito.when(_mockClient.exists(chartUrn1, AUTHENTICATION)).thenReturn(true); @@ -179,30 +193,37 @@ public void testUpdateChartLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn2, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn3, AUTHENTICATION)).thenReturn(true); - ChartInfo chartInfo = createChartInfo(chartUrn1, Arrays.asList(datasetUrn1, datasetUrn2), Collections.emptyList()); + ChartInfo chartInfo = + createChartInfo( + chartUrn1, Arrays.asList(datasetUrn1, datasetUrn2), Collections.emptyList()); - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(chartUrn1), - Mockito.eq(ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(chartUrn1), + Mockito.eq(ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(chartUrn1) .setEntityName(Constants.CHART_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.CHART_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(chartInfo.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.CHART_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(chartInfo.data())))))); final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn3); final List<Urn> upstreamUrnsToRemove = Collections.singletonList(datasetUrn2); - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); // chartInfo with dataset1 in inputs and dataset3 in inputEdges - ChartInfo updatedChartInfo = createChartInfo(chartUrn1, Collections.singletonList(datasetUrn1), Collections.singletonList(datasetUrn3)); + ChartInfo updatedChartInfo = + createChartInfo( + chartUrn1, + Collections.singletonList(datasetUrn1), + Collections.singletonList(datasetUrn3)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(chartUrn1); @@ -210,11 +231,8 @@ public void testUpdateChartLineage() throws Exception { proposal.setAspectName(Constants.CHART_INFO_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedChartInfo)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -223,8 +241,11 @@ public void testFailUpdateChartWithMissingDataset() throws Exception { final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -234,8 +255,11 @@ public void testFailUpdateChartWithInvalidEdge() throws Exception { // charts can't have charts upstream of them final List<Urn> upstreamUrnsToAdd = Collections.singletonList(chartUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } // Adds upstreams for dashboard to dataset2 and chart2 and removes edge to dataset1 and chart1 @@ -248,42 +272,44 @@ public void testUpdateDashboardLineage() throws Exception { Mockito.when(_mockClient.exists(chartUrn2, AUTHENTICATION)).thenReturn(true); // existing dashboardInfo has upstreams to dataset1, dataset3, chart1, chart3 - DashboardInfo dashboardInfo = createDashboardInfo( - dashboardUrn1, - Arrays.asList(chartUrn1, chartUrn3), - Collections.emptyList(), - Arrays.asList(datasetUrn1, datasetUrn3), - Collections.emptyList() - ); - - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(dashboardUrn1), - Mockito.eq(ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + DashboardInfo dashboardInfo = + createDashboardInfo( + dashboardUrn1, + Arrays.asList(chartUrn1, chartUrn3), + Collections.emptyList(), + Arrays.asList(datasetUrn1, datasetUrn3), + Collections.emptyList()); + + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(dashboardUrn1), + Mockito.eq(ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(dashboardUrn1) .setEntityName(Constants.DASHBOARD_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DASHBOARD_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dashboardInfo.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DASHBOARD_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dashboardInfo.data())))))); final List<Urn> upstreamUrnsToAdd = Arrays.asList(datasetUrn2, chartUrn2); final List<Urn> upstreamUrnsToRemove = Arrays.asList(datasetUrn1, chartUrn1); - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); - - // dashboardInfo with chartUrn3 in charts, chartUrn2 in chartEdges, datasetUrn3 in datasets, datasetUrn2 in datasetEdges - DashboardInfo updatedDashboardInfo = createDashboardInfo( - dashboardUrn1, - Collections.singletonList(chartUrn3), - Collections.singletonList(chartUrn2), - Arrays.asList(datasetUrn3), - Collections.singletonList(datasetUrn2) - ); + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + + // dashboardInfo with chartUrn3 in charts, chartUrn2 in chartEdges, datasetUrn3 in datasets, + // datasetUrn2 in datasetEdges + DashboardInfo updatedDashboardInfo = + createDashboardInfo( + dashboardUrn1, + Collections.singletonList(chartUrn3), + Collections.singletonList(chartUrn2), + Arrays.asList(datasetUrn3), + Collections.singletonList(datasetUrn2)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(dashboardUrn1); @@ -291,11 +317,8 @@ public void testUpdateDashboardLineage() throws Exception { proposal.setAspectName(Constants.DASHBOARD_INFO_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedDashboardInfo)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -304,8 +327,11 @@ public void testFailUpdateDashboardWithMissingDataset() throws Exception { final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -315,11 +341,15 @@ public void testFailUpdateDashboardWithInvalidEdge() throws Exception { // dashboards can't have dashboards upstream of them final List<Urn> upstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } - // Adds upstream datajob3, upstream dataset3, downstream dataset4, removes upstream datajob2, upstream dataset1, downstream dataset1 + // Adds upstream datajob3, upstream dataset3, downstream dataset4, removes upstream datajob2, + // upstream dataset1, downstream dataset1 // has existing upstream datajob2, upstream dataset1 and dataset2, downstream dataset4 // Should result in upstream datajob3, upstream dataset3 and dataset2, downstream dataset5 @Test @@ -332,66 +362,71 @@ public void testUpdateDataJobLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn4, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn1, AUTHENTICATION)).thenReturn(true); - DataJobInputOutput firstDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1, datasetUrn2), - Collections.emptyList(), - Collections.singletonList(datajobUrn2), - Collections.emptyList(), - Collections.singletonList(datasetUrn1), - Collections.emptyList() - ); - - DataJobInputOutput secondDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1), - Arrays.asList(datasetUrn3), - Collections.emptyList(), - Arrays.asList(datajobUrn3), - Arrays.asList(datasetUrn1), - Collections.emptyList() - ); - - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DATA_JOB_ENTITY_NAME), - Mockito.eq(datajobUrn1), - Mockito.eq(ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + DataJobInputOutput firstDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1, datasetUrn2), + Collections.emptyList(), + Collections.singletonList(datajobUrn2), + Collections.emptyList(), + Collections.singletonList(datasetUrn1), + Collections.emptyList()); + + DataJobInputOutput secondDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1), + Arrays.asList(datasetUrn3), + Collections.emptyList(), + Arrays.asList(datajobUrn3), + Arrays.asList(datasetUrn1), + Collections.emptyList()); + + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DATA_JOB_ENTITY_NAME), + Mockito.eq(datajobUrn1), + Mockito.eq(ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(datajobUrn1) .setEntityName(Constants.DATA_JOB_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(firstDataJobInputOutput.data())) - ))), + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(firstDataJobInputOutput.data()))))), new EntityResponse() .setUrn(datajobUrn1) .setEntityName(Constants.DATA_JOB_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(secondDataJobInputOutput.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(secondDataJobInputOutput.data())))))); final List<Urn> upstreamUrnsToAdd = Arrays.asList(datajobUrn3, datasetUrn3); final List<Urn> upstreamUrnsToRemove = Arrays.asList(datajobUrn2, datasetUrn2); - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); final List<Urn> downstreamUrnsToAdd = Arrays.asList(datasetUrn4); final List<Urn> downstreamUrnsToRemove = Arrays.asList(datasetUrn1); - _lineageService.updateDataJobDownstreamLineage(datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION); - - DataJobInputOutput updatedDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1), - Arrays.asList(datasetUrn3), - Collections.emptyList(), - Arrays.asList(datajobUrn3), - Collections.emptyList(), - Collections.singletonList(datasetUrn4) - ); + _lineageService.updateDataJobDownstreamLineage( + datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION); + + DataJobInputOutput updatedDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1), + Arrays.asList(datasetUrn3), + Collections.emptyList(), + Arrays.asList(datajobUrn3), + Collections.emptyList(), + Collections.singletonList(datasetUrn4)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(datajobUrn1); @@ -399,11 +434,8 @@ public void testUpdateDataJobLineage() throws Exception { proposal.setAspectName(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedDataJobInputOutput)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -412,8 +444,11 @@ public void testFailUpdateUpstreamDataJobWithMissingUrnToAdd() throws Exception final List<Urn> upstreamUrnsToAdd = Arrays.asList(datajobUrn3); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDataJobUpstreamLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -423,8 +458,11 @@ public void testFailUpdateUpstreamDataJobWithInvalidEdge() throws Exception { // dataJobs can't have dashboards upstream of them final List<Urn> upstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -433,8 +471,15 @@ public void testFailUpdateDownstreamDataJobWithMissingUrnToAdd() throws Exceptio final List<Urn> downstreamUrnsToAdd = Arrays.asList(datasetUrn1); final List<Urn> downstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDataJobDownstreamLineage(dashboardUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDataJobDownstreamLineage( + dashboardUrn1, + downstreamUrnsToAdd, + downstreamUrnsToRemove, + actorUrn, + AUTHENTICATION)); } @Test @@ -444,8 +489,15 @@ public void testFailUpdateDownstreamDataJobWithInvalidEdge() throws Exception { // dataJobs can't have dashboards downstream of them final List<Urn> downstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List<Urn> downstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, + downstreamUrnsToAdd, + downstreamUrnsToRemove, + actorUrn, + AUTHENTICATION)); } private UpstreamLineage createUpstreamLineage(List<String> upstreamUrns) throws Exception { @@ -466,7 +518,8 @@ private UpstreamLineage createUpstreamLineage(List<String> upstreamUrns) throws return upstreamLineage; } - private ChartInfo createChartInfo(Urn entityUrn, List<Urn> inputsToAdd, List<Urn> inputEdgesToAdd) throws Exception { + private ChartInfo createChartInfo(Urn entityUrn, List<Urn> inputsToAdd, List<Urn> inputEdgesToAdd) + throws Exception { ChartInfo chartInfo = new ChartInfo(); ChartDataSourceTypeArray inputs = new ChartDataSourceTypeArray(); for (Urn input : inputsToAdd) { @@ -489,8 +542,8 @@ private DashboardInfo createDashboardInfo( List<Urn> chartsToAdd, List<Urn> chartEdgesToAdd, List<Urn> datasetsToAdd, - List<Urn> datasetEdgesToAdd - ) throws Exception { + List<Urn> datasetEdgesToAdd) + throws Exception { final DashboardInfo dashboardInfo = new DashboardInfo(); final ChartUrnArray charts = new ChartUrnArray(); @@ -525,8 +578,8 @@ private DataJobInputOutput createDataJobInputOutput( List<Urn> inputDatajobsToAdd, List<Urn> inputDatajobEdgesToAdd, List<Urn> outputDatasetsToAdd, - List<Urn> outputDatasetEdgesToAdd - ) throws Exception { + List<Urn> outputDatasetEdgesToAdd) + throws Exception { final DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); final DatasetUrnArray inputDatasets = new DatasetUrnArray(); @@ -571,8 +624,7 @@ private DataJobInputOutput createDataJobInputOutput( private void addNewEdge( @Nonnull final Urn upstreamUrn, @Nonnull final Urn downstreamUrn, - @Nonnull final EdgeArray edgeArray - ) { + @Nonnull final EdgeArray edgeArray) { final Edge newEdge = new Edge(); newEdge.setDestinationUrn(upstreamUrn); newEdge.setSourceUrn(downstreamUrn); diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java index c23a151e52734..fde1c32d53a92 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.service.OwnerService.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -29,61 +31,59 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.service.OwnerService.*; - - public class OwnerServiceTest { private static final Urn TEST_OWNER_URN_1 = UrnUtils.getUrn("urn:li:corpuser:test"); private static final Urn TEST_OWNER_URN_2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testAddOwnersExistingOwner() throws Exception { Ownership existingOwnership = new Ownership(); - existingOwnership.setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE) - ))); + existingOwnership.setOwners( + new OwnerArray( + ImmutableList.of(new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE)))); EntityClient mockClient = createMockOwnersClient(existingOwnership); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newOwnerUrn = UrnUtils.getUrn("urn:li:corpuser:newTag"); - List<MetadataChangeProposal> events = service.buildAddOwnersProposals( - ImmutableList.of(newOwnerUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - OwnershipType.NONE, - mockAuthentication()); - - OwnerArray expected = new OwnerArray( - ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE), - new Owner().setOwner(newOwnerUrn).setType(OwnershipType.NONE) - .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())) - )); + List<MetadataChangeProposal> events = + service.buildAddOwnersProposals( + ImmutableList.of(newOwnerUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + OwnershipType.NONE, + mockAuthentication()); + + OwnerArray expected = + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE), + new Owner() + .setOwner(newOwnerUrn) + .setType(OwnershipType.NONE) + .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect1.getOwners(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect2.getOwners(), expected); } @@ -91,81 +91,86 @@ private void testAddOwnersExistingOwner() throws Exception { private void testAddOwnersNoExistingOwners() throws Exception { EntityClient mockClient = createMockOwnersClient(null); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newOwnerUrn = UrnUtils.getUrn("urn:li:corpuser:newOwner"); - List<MetadataChangeProposal> events = service.buildAddOwnersProposals( - ImmutableList.of(newOwnerUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - OwnershipType.NONE, - mockAuthentication()); - - OwnerArray expectedOwners = new OwnerArray( - ImmutableList.of(new Owner().setOwner(newOwnerUrn).setType(OwnershipType.NONE) - .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); + List<MetadataChangeProposal> events = + service.buildAddOwnersProposals( + ImmutableList.of(newOwnerUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + OwnershipType.NONE, + mockAuthentication()); + + OwnerArray expectedOwners = + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(newOwnerUrn) + .setType(OwnershipType.NONE) + .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect1.getOwners(), expectedOwners); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect2.getOwners(), expectedOwners); } @Test private void testRemoveOwnerExistingOwners() throws Exception { Ownership existingOwnership = new Ownership(); - existingOwnership.setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.TECHNICAL_OWNER), - new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD) - ))); + existingOwnership.setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.TECHNICAL_OWNER), + new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD)))); EntityClient mockClient = createMockOwnersClient(existingOwnership); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); - - List<MetadataChangeProposal> events = service.buildRemoveOwnersProposals( - ImmutableList.of(TEST_OWNER_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - Ownership expected = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD)))); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); + + List<MetadataChangeProposal> events = + service.buildRemoveOwnersProposals( + ImmutableList.of(TEST_OWNER_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + Ownership expected = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(TEST_OWNER_URN_2) + .setType(OwnershipType.DATA_STEWARD)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate ownersAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + RecordTemplate ownersAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate ownersAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + RecordTemplate ownersAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect2, expected); } @@ -173,68 +178,72 @@ private void testRemoveOwnerExistingOwners() throws Exception { private void testRemoveOwnerNoExistingOwners() throws Exception { EntityClient mockClient = createMockOwnersClient(null); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:corpuser:newOwner"); - List<MetadataChangeProposal> events = service.buildRemoveOwnersProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveOwnersProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); OwnerArray expected = new OwnerArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownersAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownersAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect1.getOwners(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownersAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownersAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect2.getOwners(), expected); } - private static EntityClient createMockOwnersClient(@Nullable Ownership existingOwnership) throws Exception { + private static EntityClient createMockOwnersClient(@Nullable Ownership existingOwnership) + throws Exception { return createMockEntityClient(existingOwnership, Constants.OWNERSHIP_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } @@ -243,4 +252,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, Constants.SYSTEM_ACTOR)); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java index dcb4a745732b2..65ca25fc8524d 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -23,8 +25,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - public class OwnershipTypeServiceTest { private static final Urn TEST_OWNERSHIP_TYPE_URN = UrnUtils.getUrn("urn:li:ownershipType:test"); @@ -34,80 +34,62 @@ public class OwnershipTypeServiceTest { private void testCreateOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = createOwnershipTypeMockEntityClient(); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Case 1: With description - Urn urn = service.createOwnershipType( - "test OwnershipType", - "my description", - mockAuthentication(), - 0L - ); + Urn urn = + service.createOwnershipType( + "test OwnershipType", "my description", mockAuthentication(), 0L); Assert.assertEquals(urn, TEST_OWNERSHIP_TYPE_URN); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Without description - urn = service.createOwnershipType( - "test OwnershipType", - null, - mockAuthentication(), - 0L - ); + urn = service.createOwnershipType("test OwnershipType", null, mockAuthentication(), 0L); Assert.assertEquals(urn, TEST_OWNERSHIP_TYPE_URN); - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateOwnershipTypeErrorMissingInputs() throws Exception { final EntityClient mockClient = createOwnershipTypeMockEntityClient(); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Only case: missing OwnershipType Name Assert.assertThrows( RuntimeException.class, - () -> service.createOwnershipType( - null, - "my description", - mockAuthentication(), - 0L - ) - ); + () -> service.createOwnershipType(null, "my description", mockAuthentication(), 0L)); } @Test private void testCreateOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createOwnershipType( - "new name", - "my description", - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.createOwnershipType("new name", "my description", mockAuthentication(), 1L)); } @Test @@ -117,174 +99,134 @@ private void testUpdateOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); - - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); + + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; // Case 1: Update name only - service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, newName, oldDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateOwnershipType(TEST_OWNERSHIP_TYPE_URN, newName, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, newName, oldDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); // Case 2: Update description only service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - null, - newDescription, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, oldName, newDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_OWNERSHIP_TYPE_URN, null, newDescription, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, oldName, newDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); // Case 3: Update all fields at once service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - newDescription, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, newName, newDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_OWNERSHIP_TYPE_URN, newName, newDescription, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, newName, newDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateOwnershipTypeMissingOwnershipType() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateOwnershipType( + TEST_OWNERSHIP_TYPE_URN, newName, null, mockAuthentication(), 1L)); } @Test private void testUpdateOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - "new name", - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateOwnershipType( + TEST_OWNERSHIP_TYPE_URN, "new name", null, mockAuthentication(), 1L)); } @Test private void testDeleteOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); service.deleteOwnershipType(TEST_OWNERSHIP_TYPE_URN, true, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntityReferences( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntityReferences( + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, + Assert.assertThrows( + RuntimeException.class, () -> service.deleteOwnershipType(TEST_OWNERSHIP_TYPE_URN, false, mockAuthentication())); } @@ -296,20 +238,13 @@ private void testGetOwnershipTypeInfoSuccess() throws Exception { final String description = "description"; resetGetOwnershipTypeInfoMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - name, - description, - TEST_USER_URN, - 0L, - 1L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, name, description, TEST_USER_URN, 0L, 1L); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); - final OwnershipTypeInfo info = service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication()); + final OwnershipTypeInfo info = + service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication()); // Assert that the info is correct. Assert.assertEquals((long) info.getCreated().getTime(), 0L); @@ -323,37 +258,40 @@ private void testGetOwnershipTypeInfoSuccess() throws Exception { private void testGetOwnershipTypeInfoNoOwnershipTypeExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); - } @Test private void testGetOwnershipTypeInfoError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq( + ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateOwnershipTypeProposal( @@ -380,10 +318,12 @@ private static MetadataChangeProposal buildUpdateOwnershipTypeProposal( private static EntityClient createOwnershipTypeMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_OWNERSHIP_TYPE_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_OWNERSHIP_TYPE_URN.toString()); return mockClient; } @@ -394,34 +334,40 @@ private static void resetUpdateOwnershipTypeMockEntityClient( final String existingDescription, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(ownershipTypeUrn.toString()); - - final OwnershipTypeInfo existingInfo = new OwnershipTypeInfo() - .setName(existingName) - .setDescription(existingDescription) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(ownershipTypeUrn), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(ownershipTypeUrn.toString()); + + final OwnershipTypeInfo existingInfo = + new OwnershipTypeInfo() + .setName(existingName) + .setDescription(existingDescription) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(ownershipTypeUrn), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(ownershipTypeUrn) .setEntityName(OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static void resetGetOwnershipTypeInfoMockEntityClient( @@ -431,29 +377,33 @@ private static void resetGetOwnershipTypeInfoMockEntityClient( final String existingDescription, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - final OwnershipTypeInfo existingInfo = new OwnershipTypeInfo() - .setName(existingName) - .setDescription(existingDescription) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(ownershipTypeUrn), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + final OwnershipTypeInfo existingInfo = + new OwnershipTypeInfo() + .setName(existingName) + .setDescription(existingDescription) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(ownershipTypeUrn), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(ownershipTypeUrn) .setEntityName(OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static Authentication mockAuthentication() { @@ -461,5 +411,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } - -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java index 6ef0065b4d5db..5726dcc6cd17a 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -31,144 +33,138 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class QueryServiceTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:test"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); - private static final Urn TEST_DATASET_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @Test private void testCreateQuerySuccess() throws Exception { final EntityClient mockClient = createQueryMockEntityClient(); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Case 1: All fields provided - Urn urn = service.createQuery( - "test query", - "my description", - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 0L - ); + Urn urn = + service.createQuery( + "test query", + "my description", + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_QUERY_URN); // Ingests both aspects - properties and subjects - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Null fields provided - urn = service.createQuery( - null, - null, - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(), - mockAuthentication(), - 0L - ); + urn = + service.createQuery( + null, + null, + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_QUERY_URN); - Mockito.verify(mockClient, Mockito.times(4)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(4)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateQueryErrorMissingInputs() throws Exception { final EntityClient mockClient = createQueryMockEntityClient(); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Case 1: missing Query Source Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - null, // Cannot be null - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(), - mockAuthentication(), - 0L - ) - ); - + () -> + service.createQuery( + null, + null, + null, // Cannot be null + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(), + mockAuthentication(), + 0L)); // Case 2: missing Query Statement Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - QuerySource.MANUAL, // Cannot be null - null, - ImmutableList.of(), - mockAuthentication(), - 0L - ) - ); + () -> + service.createQuery( + null, + null, + QuerySource.MANUAL, // Cannot be null + null, + ImmutableList.of(), + mockAuthentication(), + 0L)); // Case 3: missing Query Subjects Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - QuerySource.MANUAL, // Cannot be null - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - null, - mockAuthentication(), - 0L - ) - ); + () -> + service.createQuery( + null, + null, + QuerySource.MANUAL, // Cannot be null + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + null, + mockAuthentication(), + 0L)); } @Test private void testCreateQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createQuery( - "test query", - "my description", - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 0L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.createQuery( + "test query", + "my description", + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 0L)); } @Test private void testUpdateQuerySuccess() throws Exception { final String oldName = "old name"; final String oldDescription = "old description"; - final QueryStatement oldStatement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final QueryStatement oldStatement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); final EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -181,35 +177,34 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; - final QueryStatement newStatement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); - final List<QuerySubject> newSubjects = ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)); + final QueryStatement newStatement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final List<QuerySubject> newSubjects = + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)); // Case 1: Update name only - service.updateQuery( - TEST_QUERY_URN, - newName, - null, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, newName, oldDescription, QuerySource.MANUAL, oldStatement, - 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, newName, null, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + newName, + oldDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -220,26 +215,25 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 2: Update description only - service.updateQuery( - TEST_QUERY_URN, - null, - newDescription, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, newDescription, QuerySource.MANUAL, - oldStatement, 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, newDescription, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + newDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -250,26 +244,25 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 3: Update definition only - service.updateQuery( - TEST_QUERY_URN, - null, - null, - newStatement, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, oldDescription, QuerySource.MANUAL, - newStatement, 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, null, newStatement, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + oldDescription, + QuerySource.MANUAL, + newStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -280,27 +273,26 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 4: Update subjects only - service.updateQuery( - TEST_QUERY_URN, - null, - null, - null, - newSubjects, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of( - buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, oldDescription, QuerySource.MANUAL, oldStatement, 0L, 1L), - buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, null, null, newSubjects, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + oldDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L), + buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 5: Update all fields service.updateQuery( @@ -310,103 +302,106 @@ private void testUpdateQuerySuccess() throws Exception { newStatement, newSubjects, mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of( - buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, newName, newDescription, QuerySource.MANUAL, newStatement, 0L, 1L), - buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects) - )), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + newName, + newDescription, + QuerySource.MANUAL, + newStatement, + 0L, + 1L), + buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateQueryMissingQuery() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateQuery( - TEST_QUERY_URN, - "new name", - null, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateQuery( + TEST_QUERY_URN, + "new name", + null, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 1L)); } @Test private void testUpdateQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateQuery( - TEST_QUERY_URN, - "new name", - null, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateQuery( + TEST_QUERY_URN, + "new name", + null, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 1L)); } @Test private void testDeleteQuerySuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); service.deleteQuery(TEST_QUERY_URN, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.deleteQuery(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.deleteQuery(TEST_QUERY_URN, mockAuthentication())); } @Test @@ -415,7 +410,8 @@ private void testGetQueryPropertiesSuccess() throws Exception { final String name = "name"; final String description = "description"; - final QueryStatement statement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final QueryStatement statement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); resetQueryPropertiesClient( mockClient, @@ -426,14 +422,12 @@ private void testGetQueryPropertiesSuccess() throws Exception { statement, TEST_USER_URN, 0L, - 1L - ); + 1L); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - final QueryProperties properties = service.getQueryProperties(TEST_QUERY_URN, mockAuthentication()); + final QueryProperties properties = + service.getQueryProperties(TEST_QUERY_URN, mockAuthentication()); // Assert that the info is correct. Assert.assertEquals((long) properties.getCreated().getTime(), 0L); @@ -449,16 +443,17 @@ private void testGetQueryPropertiesSuccess() throws Exception { private void testGetQueryPropertiesNoQueryExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of( + QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @@ -467,38 +462,40 @@ private void testGetQueryPropertiesNoQueryExists() throws Exception { private void testGetQueryPropertiesError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of( + QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @Test private void testGetQuerySubjectsSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QuerySubjects existingSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + final QuerySubjects existingSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - resetQuerySubjectsClient( - mockClient, - TEST_QUERY_URN, - existingSubjects - ); + resetQuerySubjectsClient(mockClient, TEST_QUERY_URN, existingSubjects); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - final QuerySubjects querySubjects = service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication()); + final QuerySubjects querySubjects = + service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication()); Assert.assertEquals(querySubjects, existingSubjects); } @@ -507,16 +504,16 @@ private void testGetQuerySubjectsSuccess() throws Exception { private void testGetQuerySubjectsNoQueryExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @@ -525,23 +522,24 @@ private void testGetQuerySubjectsNoQueryExists() throws Exception { private void testGetQuerySubjectsError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateQuerySubjectsProposal( - final Urn urn, - final List<QuerySubject> querySubjects) { + final Urn urn, final List<QuerySubject> querySubjects) { QuerySubjects subjects = new QuerySubjects(); subjects.setSubjects(new QuerySubjectArray(querySubjects)); @@ -583,10 +581,12 @@ private static MetadataChangeProposal buildUpdateQueryPropertiesProposal( private static EntityClient createQueryMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_QUERY_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_QUERY_URN.toString()); return mockClient; } @@ -599,63 +599,75 @@ private static void resetQueryPropertiesClient( final QueryStatement existingStatement, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(queryUrn.toString()); - - final QueryProperties existingProperties = new QueryProperties() - .setSource(existingSource) - .setName(existingName) - .setDescription(existingDescription) - .setStatement(existingStatement) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(queryUrn), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(queryUrn.toString()); + + final QueryProperties existingProperties = + new QueryProperties() + .setSource(existingSource) + .setName(existingName) + .setDescription(existingDescription) + .setStatement(existingStatement) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(queryUrn), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(queryUrn) .setEntityName(QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingProperties.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingProperties.data())))))); } private static void resetQuerySubjectsClient( - final EntityClient mockClient, - final Urn queryUrn, - final QuerySubjects subjects) throws Exception { + final EntityClient mockClient, final Urn queryUrn, final QuerySubjects subjects) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(queryUrn.toString()); - - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(queryUrn), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(queryUrn.toString()); + + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(queryUrn), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(queryUrn) .setEntityName(QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(subjects.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(subjects.data())))))); } private static Authentication mockAuthentication() { @@ -663,4 +675,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java index 43ebc53385ad4..b034111e7825f 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -25,9 +27,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class SettingsServiceTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -35,26 +34,28 @@ public class SettingsServiceTest { @Test private static void testGetCorpUserSettingsNullSettings() throws Exception { - final SettingsService service = new SettingsService( - getCorpUserSettingsEntityClientMock(null), - Mockito.mock(Authentication.class) - ); - final CorpUserSettings res = service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); + final SettingsService service = + new SettingsService( + getCorpUserSettingsEntityClientMock(null), Mockito.mock(Authentication.class)); + final CorpUserSettings res = + service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); Assert.assertNull(res); } @Test private static void testGetCorpUserSettingsValidSettings() throws Exception { - final CorpUserSettings existingSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - - final SettingsService service = new SettingsService( - getCorpUserSettingsEntityClientMock(existingSettings), - Mockito.mock(Authentication.class) - ); - - final CorpUserSettings res = service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); + final CorpUserSettings existingSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + + final SettingsService service = + new SettingsService( + getCorpUserSettingsEntityClientMock(existingSettings), + Mockito.mock(Authentication.class)); + + final CorpUserSettings res = + service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); Assert.assertEquals(existingSettings, res); } @@ -62,107 +63,94 @@ private static void testGetCorpUserSettingsValidSettings() throws Exception { private static void testGetCorpUserSettingsSettingsException() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.CORP_USER_ENTITY_NAME), - Mockito.eq(TEST_USER_URN), - Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.CORP_USER_ENTITY_NAME), + Mockito.eq(TEST_USER_URN), + Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - Assert.assertThrows(RuntimeException.class, () -> service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class))); + Assert.assertThrows( + RuntimeException.class, + () -> service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class))); } @Test private static void testUpdateCorpUserSettingsValidSettings() throws Exception { - final CorpUserSettings newSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final CorpUserSettings newSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal expectedProposal = buildUpdateCorpUserSettingsChangeProposal( - TEST_USER_URN, - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateCorpUserSettingsChangeProposal(TEST_USER_URN, newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenReturn(TEST_USER_URN.toString()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - service.updateCorpUserSettings( - TEST_USER_URN, - newSettings, - Mockito.mock(Authentication.class)); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenReturn(TEST_USER_URN.toString()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + service.updateCorpUserSettings(TEST_USER_URN, newSettings, Mockito.mock(Authentication.class)); Mockito.verify(mockClient, Mockito.times(1)) .ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test private static void testUpdateCorpUserSettingsSettingsException() throws Exception { - final CorpUserSettings newSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final CorpUserSettings newSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal expectedProposal = buildUpdateCorpUserSettingsChangeProposal( - TEST_USER_URN, - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateCorpUserSettingsChangeProposal(TEST_USER_URN, newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenThrow(new RemoteInvocationException()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - Assert.assertThrows(RuntimeException.class, () -> service.updateCorpUserSettings( - TEST_USER_URN, - newSettings, - Mockito.mock(Authentication.class))); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenThrow(new RemoteInvocationException()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateCorpUserSettings( + TEST_USER_URN, newSettings, Mockito.mock(Authentication.class))); } @Test private static void testGetGlobalSettingsNullSettings() throws Exception { - final SettingsService service = new SettingsService( - getGlobalSettingsEntityClientMock(null), - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService( + getGlobalSettingsEntityClientMock(null), Mockito.mock(Authentication.class)); final GlobalSettingsInfo res = service.getGlobalSettings(Mockito.mock(Authentication.class)); Assert.assertNull(res); } @Test private static void testGetGlobalSettingsValidSettings() throws Exception { - final GlobalSettingsInfo existingSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo existingSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final SettingsService service = new SettingsService( - getGlobalSettingsEntityClientMock(existingSettings), - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService( + getGlobalSettingsEntityClientMock(existingSettings), + Mockito.mock(Authentication.class)); final GlobalSettingsInfo res = service.getGlobalSettings(Mockito.mock(Authentication.class)); Assert.assertEquals(existingSettings, res); @@ -172,136 +160,131 @@ private static void testGetGlobalSettingsValidSettings() throws Exception { private static void testGetGlobalSettingsSettingsException() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.getV2( + Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - Assert.assertThrows(RuntimeException.class, () -> service.getGlobalSettings(Mockito.mock(Authentication.class))); + Assert.assertThrows( + RuntimeException.class, + () -> service.getGlobalSettings(Mockito.mock(Authentication.class))); } @Test private static void testUpdateGlobalSettingsValidSettings() throws Exception { - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final MetadataChangeProposal expectedProposal = buildUpdateGlobalSettingsChangeProposal(newSettings); + final MetadataChangeProposal expectedProposal = + buildUpdateGlobalSettingsChangeProposal(newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenReturn(GLOBAL_SETTINGS_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenReturn(GLOBAL_SETTINGS_URN.toString()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - service.updateGlobalSettings( - newSettings, - Mockito.mock(Authentication.class)); + service.updateGlobalSettings(newSettings, Mockito.mock(Authentication.class)); Mockito.verify(mockClient, Mockito.times(1)) .ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test private static void testUpdateGlobalSettingsSettingsException() throws Exception { - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final MetadataChangeProposal expectedProposal = buildUpdateGlobalSettingsChangeProposal( - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateGlobalSettingsChangeProposal(newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenThrow(new RemoteInvocationException()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - Assert.assertThrows(RuntimeException.class, () -> service.updateGlobalSettings( - newSettings, - Mockito.mock(Authentication.class))); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenThrow(new RemoteInvocationException()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + Assert.assertThrows( + RuntimeException.class, + () -> service.updateGlobalSettings(newSettings, Mockito.mock(Authentication.class))); } - private static EntityClient getCorpUserSettingsEntityClientMock(@Nullable final CorpUserSettings settings) - throws Exception { + private static EntityClient getCorpUserSettingsEntityClientMock( + @Nullable final CorpUserSettings settings) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EnvelopedAspectMap aspectMap = settings != null ? new EnvelopedAspectMap(ImmutableMap.of( - Constants.CORP_USER_SETTINGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(settings.data())) - )) : new EnvelopedAspectMap(); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.CORP_USER_ENTITY_NAME), - Mockito.eq(TEST_USER_URN), - Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - new EntityResponse() - .setEntityName(Constants.CORP_USER_ENTITY_NAME) - .setUrn(TEST_USER_URN) - .setAspects(aspectMap) - ); + EnvelopedAspectMap aspectMap = + settings != null + ? new EnvelopedAspectMap( + ImmutableMap.of( + Constants.CORP_USER_SETTINGS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(settings.data())))) + : new EnvelopedAspectMap(); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.CORP_USER_ENTITY_NAME), + Mockito.eq(TEST_USER_URN), + Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(Constants.CORP_USER_ENTITY_NAME) + .setUrn(TEST_USER_URN) + .setAspects(aspectMap)); return mockClient; } - private static EntityClient getGlobalSettingsEntityClientMock(@Nullable final GlobalSettingsInfo settings) - throws Exception { + private static EntityClient getGlobalSettingsEntityClientMock( + @Nullable final GlobalSettingsInfo settings) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EnvelopedAspectMap aspectMap = settings != null ? new EnvelopedAspectMap(ImmutableMap.of( - GLOBAL_SETTINGS_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(settings.data())) - )) : new EnvelopedAspectMap(); - - Mockito.when(mockClient.getV2( - Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - new EntityResponse() - .setEntityName(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) - .setUrn(GLOBAL_SETTINGS_URN) - .setAspects(aspectMap) - ); + EnvelopedAspectMap aspectMap = + settings != null + ? new EnvelopedAspectMap( + ImmutableMap.of( + GLOBAL_SETTINGS_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(settings.data())))) + : new EnvelopedAspectMap(); + + Mockito.when( + mockClient.getV2( + Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) + .setUrn(GLOBAL_SETTINGS_URN) + .setAspects(aspectMap)); return mockClient; } private static MetadataChangeProposal buildUpdateCorpUserSettingsChangeProposal( - final Urn urn, - final CorpUserSettings newSettings) { - final MetadataChangeProposal mcp = new MetadataChangeProposal(); - mcp.setEntityUrn(urn); - mcp.setEntityType(CORP_USER_ENTITY_NAME); - mcp.setAspectName(CORP_USER_SETTINGS_ASPECT_NAME); - mcp.setChangeType(ChangeType.UPSERT); - mcp.setAspect(GenericRecordUtils.serializeAspect(newSettings)); - return mcp; + final Urn urn, final CorpUserSettings newSettings) { + final MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityUrn(urn); + mcp.setEntityType(CORP_USER_ENTITY_NAME); + mcp.setAspectName(CORP_USER_SETTINGS_ASPECT_NAME); + mcp.setChangeType(ChangeType.UPSERT); + mcp.setAspect(GenericRecordUtils.serializeAspect(newSettings)); + return mcp; } private static MetadataChangeProposal buildUpdateGlobalSettingsChangeProposal( diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java index 125265540dc77..e7ed3db82d0f2 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java @@ -33,56 +33,55 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class TagServiceTest { private static final Urn TEST_TAG_URN_1 = UrnUtils.getUrn("urn:li:tag:test"); private static final Urn TEST_TAG_URN_2 = UrnUtils.getUrn("urn:li:tag:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testAddTagToEntityExistingTag() throws Exception { GlobalTags existingGlobalTags = new GlobalTags(); - existingGlobalTags.setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)) - ))); + existingGlobalTags.setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))))); EntityClient mockClient = createMockGlobalTagsClient(existingGlobalTags); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List<MetadataChangeProposal> events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expected); } @@ -90,37 +89,35 @@ private void testAddTagToEntityExistingTag() throws Exception { private void testAddGlobalTagsToEntityNoExistingTag() throws Exception { EntityClient mockClient = createMockGlobalTagsClient(null); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - TagAssociationArray expectedTermsArray = new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List<MetadataChangeProposal> events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + TagAssociationArray expectedTermsArray = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expectedTermsArray); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expectedTermsArray); } @@ -128,50 +125,59 @@ private void testAddGlobalTagsToEntityNoExistingTag() throws Exception { private void testAddTagToSchemaFieldExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List<MetadataChangeProposal> events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test @@ -179,90 +185,95 @@ private void testAddGlobalTagsToSchemaFieldNoExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags(new GlobalTags())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn))) - ); + List<MetadataChangeProposal> events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test private void testRemoveTagToEntityExistingTag() throws Exception { GlobalTags existingGlobalTags = new GlobalTags(); - existingGlobalTags.setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)) - ))); + existingGlobalTags.setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); EntityClient mockClient = createMockGlobalTagsClient(existingGlobalTags); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_TAG_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_TAG_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); - GlobalTags expected = new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); + GlobalTags expected = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + RecordTemplate tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + RecordTemplate tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2, expected); } @@ -270,36 +281,33 @@ private void testRemoveTagToEntityExistingTag() throws Exception { private void testRemoveGlobalTagsToEntityNoExistingTag() throws Exception { EntityClient mockClient = createMockGlobalTagsClient(null); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildRemoveTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); TagAssociationArray expected = new TagAssociationArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expected); } @@ -307,51 +315,58 @@ private void testRemoveGlobalTagsToEntityNoExistingTag() throws Exception { private void testRemoveTagToSchemaFieldExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_TAG_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_TAG_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); - TagAssociationArray expected = new TagAssociationArray(ImmutableList.of( - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)) - )); + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test @@ -359,78 +374,90 @@ private void testRemoveGlobalTagsToSchemaFieldNoExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags(new GlobalTags())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_ENTITY_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_ENTITY_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), Collections.emptyList()); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + Collections.emptyList()); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), Collections.emptyList()); - + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + Collections.emptyList()); } - private static EntityClient createMockGlobalTagsClient(@Nullable GlobalTags existingGlobalTags) throws Exception { + private static EntityClient createMockGlobalTagsClient(@Nullable GlobalTags existingGlobalTags) + throws Exception { return createMockEntityClient(existingGlobalTags, Constants.GLOBAL_TAGS_ASPECT_NAME); } - private static EntityClient createMockSchemaMetadataEntityClient(@Nullable EditableSchemaMetadata existingMetadata) throws Exception { + private static EntityClient createMockSchemaMetadataEntityClient( + @Nullable EditableSchemaMetadata existingMetadata) throws Exception { return createMockEntityClient(existingMetadata, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } @@ -439,4 +466,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, Constants.SYSTEM_ACTOR)); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java index 5841717e7db93..cd62cf3959103 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -33,9 +35,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class ViewServiceTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -45,150 +44,187 @@ public class ViewServiceTest { private void testCreateViewSuccess() throws Exception { final EntityClient mockClient = createViewMockEntityClient(); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Case 1: With description - Urn urn = service.createView(DataHubViewType.PERSONAL, - "test view", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ); + Urn urn = + service.createView( + DataHubViewType.PERSONAL, + "test view", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_VIEW_URN); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Without description - urn = service.createView(DataHubViewType.PERSONAL, - "test view", - null, - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ); + urn = + service.createView( + DataHubViewType.PERSONAL, + "test view", + null, + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_VIEW_URN); - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateViewErrorMissingInputs() throws Exception { final EntityClient mockClient = createViewMockEntityClient(); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Case 1: missing View Type Assert.assertThrows( RuntimeException.class, - () -> service.createView(null, - "test view", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ) - ); - + () -> + service.createView( + null, + "test view", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L)); // Case 2: missing View name Assert.assertThrows( RuntimeException.class, - () -> service.createView(DataHubViewType.PERSONAL, - null, - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ) - ); + () -> + service.createView( + DataHubViewType.PERSONAL, + null, + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L)); // Case 3: missing View definition Assert.assertThrows( RuntimeException.class, - () -> service.createView(DataHubViewType.PERSONAL, - "My name", - "my description", - null, - mockAuthentication(), - 0L - ) - ); + () -> + service.createView( + DataHubViewType.PERSONAL, + "My name", + "my description", + null, + mockAuthentication(), + 0L)); } @Test private void testCreateViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createView( - DataHubViewType.PERSONAL, - "new name", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.createView( + DataHubViewType.PERSONAL, + "new name", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 1L)); } @Test @@ -196,9 +232,10 @@ private void testUpdateViewSuccess() throws Exception { final DataHubViewType type = DataHubViewType.PERSONAL; final String oldName = "old name"; final String oldDescription = "old description"; - final DataHubViewDefinition oldDefinition = new DataHubViewDefinition() - .setEntityTypes(new StringArray()) - .setFilter(new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList()))); + final DataHubViewDefinition oldDefinition = + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList()))); final EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -211,39 +248,39 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; - final DataHubViewDefinition newDefinition = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))); + final DataHubViewDefinition newDefinition = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))); // Case 1: Update name only - service.updateView( - TEST_VIEW_URN, - newName, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, newName, oldDescription, oldDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, newName, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, newName, oldDescription, oldDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -254,24 +291,18 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 2: Update description only - service.updateView( - TEST_VIEW_URN, - null, - newDescription, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, oldName, newDescription, oldDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, null, newDescription, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, oldName, newDescription, oldDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -282,23 +313,18 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 3: Update definition only - service.updateView(TEST_VIEW_URN, - null, - null, - newDefinition, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, oldName, oldDescription, newDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, null, null, newDefinition, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, oldName, oldDescription, newDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -309,110 +335,88 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 4: Update all fields at once service.updateView( - TEST_VIEW_URN, - newName, - newDescription, - newDefinition, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, newName, newDescription, newDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_VIEW_URN, newName, newDescription, newDefinition, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, newName, newDescription, newDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateViewMissingView() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateView( - TEST_VIEW_URN, - newName, - null, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.updateView(TEST_VIEW_URN, newName, null, null, mockAuthentication(), 1L)); } @Test private void testUpdateViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateView( - TEST_VIEW_URN, - "new name", - null, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.updateView(TEST_VIEW_URN, "new name", null, null, mockAuthentication(), 1L)); } @Test private void testDeleteViewSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); service.deleteView(TEST_VIEW_URN, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.deleteView(TEST_VIEW_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.deleteView(TEST_VIEW_URN, mockAuthentication())); } @Test @@ -422,31 +426,28 @@ private void testGetViewInfoSuccess() throws Exception { final DataHubViewType type = DataHubViewType.PERSONAL; final String name = "name"; final String description = "description"; - final DataHubViewDefinition definition = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))); + final DataHubViewDefinition definition = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))); resetGetViewInfoMockEntityClient( - mockClient, - TEST_VIEW_URN, - type, - name, - description, - definition, - TEST_USER_URN, - 0L, - 1L - ); + mockClient, TEST_VIEW_URN, type, name, description, definition, TEST_USER_URN, 0L, 1L); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final DataHubViewInfo info = service.getViewInfo(TEST_VIEW_URN, mockAuthentication()); @@ -464,37 +465,36 @@ private void testGetViewInfoSuccess() throws Exception { private void testGetViewInfoNoViewExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); - } @Test private void testGetViewInfoError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateViewProposal( @@ -525,10 +525,12 @@ private static MetadataChangeProposal buildUpdateViewProposal( private static EntityClient createViewMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_VIEW_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_VIEW_URN.toString()); return mockClient; } @@ -541,36 +543,42 @@ private static void resetUpdateViewMockEntityClient( final DataHubViewDefinition existingDefinition, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(viewUrn.toString()); - - final DataHubViewInfo existingInfo = new DataHubViewInfo() - .setType(existingType) - .setName(existingName) - .setDescription(existingDescription) - .setDefinition(existingDefinition) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(viewUrn), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(viewUrn.toString()); + + final DataHubViewInfo existingInfo = + new DataHubViewInfo() + .setType(existingType) + .setName(existingName) + .setDescription(existingDescription) + .setDefinition(existingDefinition) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(viewUrn), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( - new EntityResponse() - .setUrn(viewUrn) - .setEntityName(DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + new EntityResponse() + .setUrn(viewUrn) + .setEntityName(DATAHUB_VIEW_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static void resetGetViewInfoMockEntityClient( @@ -582,31 +590,35 @@ private static void resetGetViewInfoMockEntityClient( final DataHubViewDefinition existingDefinition, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - final DataHubViewInfo existingInfo = new DataHubViewInfo() - .setType(existingType) - .setName(existingName) - .setDescription(existingDescription) - .setDefinition(existingDefinition) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(viewUrn), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + final DataHubViewInfo existingInfo = + new DataHubViewInfo() + .setType(existingType) + .setName(existingName) + .setDescription(existingDescription) + .setDefinition(existingDefinition) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(viewUrn), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(viewUrn) .setEntityName(DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static Authentication mockAuthentication() { @@ -614,4 +626,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java index 3ea2b01c3e214..1b3ef20cff00a 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java @@ -12,9 +12,9 @@ import com.linkedin.util.Pair; import java.io.IOException; import java.io.PrintWriter; +import java.time.ZoneId; import java.util.HashMap; import java.util.Map; -import java.time.ZoneId; import javax.servlet.ServletContext; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; @@ -27,30 +27,46 @@ public class Config extends HttpServlet { - Map<String, Object> config = new HashMap<String, Object>() {{ - put("noCode", "true"); - put("retention", "true"); - put("statefulIngestionCapable", true); - put("patchCapable", true); - put("timeZone", ZoneId.systemDefault().toString()); - }}; - ObjectMapper objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - - private Map<String, Map<ComparableVersion, EntityRegistryLoadResult>> getPluginModels(ServletContext servletContext) { - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext); + Map<String, Object> config = + new HashMap<String, Object>() { + { + put("noCode", "true"); + put("retention", "true"); + put("statefulIngestionCapable", true); + put("patchCapable", true); + put("timeZone", ZoneId.systemDefault().toString()); + } + }; + ObjectMapper objectMapper = + new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + + private Map<String, Map<ComparableVersion, EntityRegistryLoadResult>> getPluginModels( + ServletContext servletContext) { + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext); PluginEntityRegistryLoader pluginEntityRegistryLoader = (PluginEntityRegistryLoader) ctx.getBean("pluginEntityRegistry"); - Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> patchRegistries = - pluginEntityRegistryLoader.getPatchRegistries(); - Map<String, Map<ComparableVersion, EntityRegistryLoadResult>> patchDiagnostics = new HashMap<>(); + Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> + patchRegistries = pluginEntityRegistryLoader.getPatchRegistries(); + Map<String, Map<ComparableVersion, EntityRegistryLoadResult>> patchDiagnostics = + new HashMap<>(); patchRegistries.keySet().forEach(name -> patchDiagnostics.put(name, new HashMap<>())); - patchRegistries.entrySet().forEach(entry -> { - entry.getValue() - .entrySet() - .forEach(versionLoadEntry -> patchDiagnostics.get(entry.getKey()) - .put(versionLoadEntry.getKey(), versionLoadEntry.getValue().getSecond())); - }); + patchRegistries + .entrySet() + .forEach( + entry -> { + entry + .getValue() + .entrySet() + .forEach( + versionLoadEntry -> + patchDiagnostics + .get(entry.getKey()) + .put( + versionLoadEntry.getKey(), + versionLoadEntry.getValue().getSecond())); + }); return patchDiagnostics; } @@ -74,7 +90,8 @@ private boolean checkImpactAnalysisSupport(WebApplicationContext ctx) { protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { config.put("noCode", "true"); - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); config.put("supportsImpactAnalysis", checkImpactAnalysisSupport(ctx)); @@ -85,21 +102,30 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IO ConfigurationProvider configProvider = getConfigProvider(ctx); - Map<String, Object> telemetryConfig = new HashMap<String, Object>() {{ - put("enabledCli", configProvider.getTelemetry().enabledCli); - put("enabledIngestion", configProvider.getTelemetry().enabledIngestion); - }}; + Map<String, Object> telemetryConfig = + new HashMap<String, Object>() { + { + put("enabledCli", configProvider.getTelemetry().enabledCli); + put("enabledIngestion", configProvider.getTelemetry().enabledIngestion); + } + }; config.put("telemetry", telemetryConfig); - Map<String, Object> ingestionConfig = new HashMap<String, Object>() {{ - put("enabled", configProvider.getIngestion().enabled); - put("defaultCliVersion", configProvider.getIngestion().defaultCliVersion); - }}; + Map<String, Object> ingestionConfig = + new HashMap<String, Object>() { + { + put("enabled", configProvider.getIngestion().enabled); + put("defaultCliVersion", configProvider.getIngestion().defaultCliVersion); + } + }; config.put("managedIngestion", ingestionConfig); - Map<String, Object> datahubConfig = new HashMap<String, Object>() {{ - put("serverType", configProvider.getDatahub().serverType); - }}; + Map<String, Object> datahubConfig = + new HashMap<String, Object>() { + { + put("serverType", configProvider.getDatahub().serverType); + } + }; config.put("datahub", datahubConfig); resp.setContentType("application/json"); diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java index d788222c5d87b..ebcfaeca7059e 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java @@ -1,13 +1,22 @@ package com.datahub.gms.servlet; -import com.linkedin.metadata.config.search.SearchConfiguration; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.KEYWORD_ANALYZER; + import com.datahub.gms.util.CSVWriter; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; +import java.io.PrintWriter; +import java.util.Map; +import java.util.Optional; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; @@ -22,16 +31,6 @@ import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.support.WebApplicationContextUtils; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.PrintWriter; -import java.util.Map; -import java.util.Optional; - -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.KEYWORD_ANALYZER; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - @Slf4j public class ConfigSearchExport extends HttpServlet { @@ -49,40 +48,73 @@ private void writeSearchCsv(WebApplicationContext ctx, PrintWriter pw) { CSVWriter writer = CSVWriter.builder().printWriter(pw).build(); - String[] header = {"entity", "query_category", "match_category", "query_type", "field_name", - "field_weight", "search_analyzer", "case_insensitive", "query_boost", "raw"}; + String[] header = { + "entity", + "query_category", + "match_category", + "query_type", + "field_name", + "field_weight", + "search_analyzer", + "case_insensitive", + "query_boost", + "raw" + }; writer.println(header); SEARCHABLE_ENTITY_TYPES.stream() - .map(entityType -> { + .map( + entityType -> { try { - EntitySpec entitySpec = entityRegistry.getEntitySpec(EntityTypeMapper.getName(entityType)); + EntitySpec entitySpec = + entityRegistry.getEntitySpec(EntityTypeMapper.getName(entityType)); return Optional.of(entitySpec); } catch (IllegalArgumentException e) { log.warn("Failed to resolve entity `{}`", entityType.name()); return Optional.<EntitySpec>empty(); } }) - .filter(Optional::isPresent) - .forEach(entitySpecOpt -> { + .filter(Optional::isPresent) + .forEach( + entitySpecOpt -> { EntitySpec entitySpec = entitySpecOpt.get(); - SearchRequest searchRequest = SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, null) - .getSearchRequest("*", null, null, 0, 0, new SearchFlags() - .setFulltext(true).setSkipHighlighting(true).setSkipAggregates(true), null); + SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, null) + .getSearchRequest( + "*", + null, + null, + 0, + 0, + new SearchFlags() + .setFulltext(true) + .setSkipHighlighting(true) + .setSkipAggregates(true), + null); - FunctionScoreQueryBuilder rankingQuery = ((FunctionScoreQueryBuilder) ((BoolQueryBuilder) - searchRequest.source().query()).must().get(0)); + FunctionScoreQueryBuilder rankingQuery = + ((FunctionScoreQueryBuilder) + ((BoolQueryBuilder) searchRequest.source().query()).must().get(0)); BoolQueryBuilder relevancyQuery = (BoolQueryBuilder) rankingQuery.query(); - BoolQueryBuilder simpleQueryString = (BoolQueryBuilder) relevancyQuery.should().get(0); + BoolQueryBuilder simpleQueryString = + (BoolQueryBuilder) relevancyQuery.should().get(0); BoolQueryBuilder exactPrefixMatch = (BoolQueryBuilder) relevancyQuery.should().get(1); for (QueryBuilder simpBuilder : simpleQueryString.should()) { SimpleQueryStringBuilder sqsb = (SimpleQueryStringBuilder) simpBuilder; for (Map.Entry<String, Float> fieldWeight : sqsb.fields().entrySet()) { - String[] row = {entitySpec.getName(), "relevancy", "fulltext", sqsb.getClass().getSimpleName(), - fieldWeight.getKey(), - fieldWeight.getValue().toString(), sqsb.analyzer(), "true", String.valueOf(sqsb.boost()), - sqsb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "fulltext", + sqsb.getClass().getSimpleName(), + fieldWeight.getKey(), + fieldWeight.getValue().toString(), + sqsb.analyzer(), + "true", + String.valueOf(sqsb.boost()), + sqsb.toString().replaceAll("\n", "") + }; writer.println(row); } } @@ -90,66 +122,119 @@ private void writeSearchCsv(WebApplicationContext ctx, PrintWriter pw) { for (QueryBuilder builder : exactPrefixMatch.should()) { if (builder instanceof TermQueryBuilder) { TermQueryBuilder tqb = (TermQueryBuilder) builder; - String[] row = {entitySpec.getName(), "relevancy", "exact_match", tqb.getClass().getSimpleName(), - tqb.fieldName(), - String.valueOf(tqb.boost()), KEYWORD_ANALYZER, String.valueOf(tqb.caseInsensitive()), "", - tqb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "exact_match", + tqb.getClass().getSimpleName(), + tqb.fieldName(), + String.valueOf(tqb.boost()), + KEYWORD_ANALYZER, + String.valueOf(tqb.caseInsensitive()), + "", + tqb.toString().replaceAll("\n", "") + }; writer.println(row); } else if (builder instanceof MatchPhrasePrefixQueryBuilder) { MatchPhrasePrefixQueryBuilder mppqb = (MatchPhrasePrefixQueryBuilder) builder; - String[] row = {entitySpec.getName(), "relevancy", "prefix_match", mppqb.getClass().getSimpleName(), - mppqb.fieldName(), - String.valueOf(mppqb.boost()), "", "true", "", mppqb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "prefix_match", + mppqb.getClass().getSimpleName(), + mppqb.fieldName(), + String.valueOf(mppqb.boost()), + "", + "true", + "", + mppqb.toString().replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled exact prefix builder: " + builder.getClass().getName()); + throw new IllegalStateException( + "Unhandled exact prefix builder: " + builder.getClass().getName()); } } - for (FunctionScoreQueryBuilder.FilterFunctionBuilder ffb : rankingQuery.filterFunctionBuilders()) { + for (FunctionScoreQueryBuilder.FilterFunctionBuilder ffb : + rankingQuery.filterFunctionBuilders()) { if (ffb.getFilter() instanceof MatchAllQueryBuilder) { MatchAllQueryBuilder filter = (MatchAllQueryBuilder) ffb.getFilter(); if (ffb.getScoreFunction() instanceof WeightBuilder) { WeightBuilder scoreFunction = (WeightBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), "*", - String.valueOf(scoreFunction.getWeight()), "", "true", String.valueOf(filter.boost()), - String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, - CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + "*", + String.valueOf(scoreFunction.getWeight()), + "", + "true", + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else if (ffb.getScoreFunction() instanceof FieldValueFactorFunctionBuilder) { - FieldValueFactorFunctionBuilder scoreFunction = (FieldValueFactorFunctionBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), scoreFunction.fieldName(), - String.valueOf(scoreFunction.factor()), "", "true", String.valueOf(filter.boost()), - String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + FieldValueFactorFunctionBuilder scoreFunction = + (FieldValueFactorFunctionBuilder) ffb.getScoreFunction(); + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + scoreFunction.fieldName(), + String.valueOf(scoreFunction.factor()), + "", + "true", + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled score function: " + ffb.getScoreFunction()); + throw new IllegalStateException( + "Unhandled score function: " + ffb.getScoreFunction()); } } else if (ffb.getFilter() instanceof TermQueryBuilder) { TermQueryBuilder filter = (TermQueryBuilder) ffb.getFilter(); if (ffb.getScoreFunction() instanceof WeightBuilder) { WeightBuilder scoreFunction = (WeightBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), filter.fieldName() + "=" + filter.value().toString(), - String.valueOf(scoreFunction.getWeight()), KEYWORD_ANALYZER, String.valueOf(filter.caseInsensitive()), - String.valueOf(filter.boost()), String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, - CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + filter.fieldName() + "=" + filter.value().toString(), + String.valueOf(scoreFunction.getWeight()), + KEYWORD_ANALYZER, + String.valueOf(filter.caseInsensitive()), + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled score function: " + ffb.getScoreFunction()); + throw new IllegalStateException( + "Unhandled score function: " + ffb.getScoreFunction()); } } else { - throw new IllegalStateException("Unhandled function score filter: " + ffb.getFilter()); + throw new IllegalStateException( + "Unhandled function score filter: " + ffb.getFilter()); } } }); } - @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) { if (!"csv".equals(req.getParameter("format"))) { @@ -157,7 +242,8 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) { return; } - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); try { resp.setContentType("text/csv"); diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java b/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java index 79d4f7077b797..da5f0b75efdae 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java @@ -1,45 +1,41 @@ package com.datahub.gms.util; - -import lombok.Builder; -import org.opensearch.index.query.functionscore.FieldValueFactorFunctionBuilder; -import org.opensearch.index.query.functionscore.WeightBuilder; - import java.io.PrintWriter; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.Builder; +import org.opensearch.index.query.functionscore.FieldValueFactorFunctionBuilder; +import org.opensearch.index.query.functionscore.WeightBuilder; @Builder public class CSVWriter { - private PrintWriter printWriter; - - public CSVWriter println(String[] data) { - printWriter.println(convertToCSV(data)); - return this; - } - - private static String convertToCSV(String[] data) { - return Stream.of(data) - .map(CSVWriter::escapeSpecialCharacters) - .collect(Collectors.joining(",")); - } - - private static String escapeSpecialCharacters(String data) { - String escapedData = data.replaceAll("\\R", " "); - if (data.contains(",") || data.contains("\"") || data.contains("'")) { - data = data.replace("\"", "\"\""); - escapedData = "\"" + data + "\""; - } - return escapedData; - } - - public static String builderToString(FieldValueFactorFunctionBuilder in) { - return String.format("{\"field\":\"%s\",\"factor\":%s,\"missing\":%s,\"modifier\":\"%s\"}", - in.fieldName(), in.factor(), in.missing(), in.modifier()); - } - - public static String builderToString(WeightBuilder in) { - return String.format("{\"weight\":%s}", in.getWeight()); + private PrintWriter printWriter; + + public CSVWriter println(String[] data) { + printWriter.println(convertToCSV(data)); + return this; + } + + private static String convertToCSV(String[] data) { + return Stream.of(data).map(CSVWriter::escapeSpecialCharacters).collect(Collectors.joining(",")); + } + + private static String escapeSpecialCharacters(String data) { + String escapedData = data.replaceAll("\\R", " "); + if (data.contains(",") || data.contains("\"") || data.contains("'")) { + data = data.replace("\"", "\"\""); + escapedData = "\"" + data + "\""; } + return escapedData; + } + + public static String builderToString(FieldValueFactorFunctionBuilder in) { + return String.format( + "{\"field\":\"%s\",\"factor\":%s,\"missing\":%s,\"modifier\":\"%s\"}", + in.fieldName(), in.factor(), in.missing(), in.modifier()); + } + + public static String builderToString(WeightBuilder in) { + return String.format("{\"weight\":%s}", in.getWeight()); + } } - diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java index df960808d8a41..8258a7d226ed6 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java @@ -9,10 +9,7 @@ import lombok.Data; import lombok.Getter; - -/** - * This policies config file defines the base set of privileges that DataHub supports. - */ +/** This policies config file defines the base set of privileges that DataHub supports. */ public class PoliciesConfig { public static final String PLATFORM_POLICY_TYPE = "PLATFORM"; @@ -22,547 +19,580 @@ public class PoliciesConfig { // Platform Privileges // - public static final Privilege MANAGE_POLICIES_PRIVILEGE = Privilege.of( - "MANAGE_POLICIES", - "Manage Policies", - "Create and remove access control policies. Be careful - Actors with this privilege are effectively super users."); + public static final Privilege MANAGE_POLICIES_PRIVILEGE = + Privilege.of( + "MANAGE_POLICIES", + "Manage Policies", + "Create and remove access control policies. Be careful - Actors with this privilege are effectively super users."); + + public static final Privilege MANAGE_INGESTION_PRIVILEGE = + Privilege.of( + "MANAGE_INGESTION", + "Manage Metadata Ingestion", + "Create, remove, and update Metadata Ingestion sources."); + + public static final Privilege MANAGE_SECRETS_PRIVILEGE = + Privilege.of( + "MANAGE_SECRETS", "Manage Secrets", "Create & remove Secrets stored inside DataHub."); + + public static final Privilege MANAGE_USERS_AND_GROUPS_PRIVILEGE = + Privilege.of( + "MANAGE_USERS_AND_GROUPS", + "Manage Users & Groups", + "Create, remove, and update users and groups on DataHub."); + + public static final Privilege VIEW_ANALYTICS_PRIVILEGE = + Privilege.of("VIEW_ANALYTICS", "View Analytics", "View the DataHub analytics dashboard."); + + public static final Privilege GET_ANALYTICS_PRIVILEGE = + Privilege.of( + "GET_ANALYTICS_PRIVILEGE", + "Analytics API access", + "API read access to raw analytics data."); - public static final Privilege MANAGE_INGESTION_PRIVILEGE = Privilege.of( - "MANAGE_INGESTION", - "Manage Metadata Ingestion", - "Create, remove, and update Metadata Ingestion sources."); + public static final Privilege GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE = + Privilege.of( + "GENERATE_PERSONAL_ACCESS_TOKENS", + "Generate Personal Access Tokens", + "Generate personal access tokens for use with DataHub APIs."); - public static final Privilege MANAGE_SECRETS_PRIVILEGE = Privilege.of( - "MANAGE_SECRETS", - "Manage Secrets", - "Create & remove Secrets stored inside DataHub."); + public static final Privilege MANAGE_ACCESS_TOKENS = + Privilege.of( + "MANAGE_ACCESS_TOKENS", + "Manage All Access Tokens", + "Create, list and revoke access tokens on behalf of users in DataHub. Be careful - Actors with this " + + "privilege are effectively super users that can impersonate other users."); - public static final Privilege MANAGE_USERS_AND_GROUPS_PRIVILEGE = Privilege.of( - "MANAGE_USERS_AND_GROUPS", - "Manage Users & Groups", - "Create, remove, and update users and groups on DataHub."); + public static final Privilege MANAGE_DOMAINS_PRIVILEGE = + Privilege.of("MANAGE_DOMAINS", "Manage Domains", "Create and remove Asset Domains."); - public static final Privilege VIEW_ANALYTICS_PRIVILEGE = Privilege.of( - "VIEW_ANALYTICS", - "View Analytics", - "View the DataHub analytics dashboard."); + public static final Privilege MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = + Privilege.of( + "MANAGE_GLOBAL_ANNOUNCEMENTS", + "Manage Home Page Posts", + "Create and delete home page posts"); - public static final Privilege GET_ANALYTICS_PRIVILEGE = Privilege.of( - "GET_ANALYTICS_PRIVILEGE", - "Analytics API access", - "API read access to raw analytics data."); + public static final Privilege MANAGE_TESTS_PRIVILEGE = + Privilege.of("MANAGE_TESTS", "Manage Tests", "Create and remove Asset Tests."); - public static final Privilege GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE = Privilege.of( - "GENERATE_PERSONAL_ACCESS_TOKENS", - "Generate Personal Access Tokens", - "Generate personal access tokens for use with DataHub APIs."); - - public static final Privilege MANAGE_ACCESS_TOKENS = Privilege.of( - "MANAGE_ACCESS_TOKENS", - "Manage All Access Tokens", - "Create, list and revoke access tokens on behalf of users in DataHub. Be careful - Actors with this " - + "privilege are effectively super users that can impersonate other users." - ); - - public static final Privilege MANAGE_DOMAINS_PRIVILEGE = Privilege.of( - "MANAGE_DOMAINS", - "Manage Domains", - "Create and remove Asset Domains."); - - public static final Privilege MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = Privilege.of( - "MANAGE_GLOBAL_ANNOUNCEMENTS", - "Manage Home Page Posts", - "Create and delete home page posts"); - - public static final Privilege MANAGE_TESTS_PRIVILEGE = Privilege.of( - "MANAGE_TESTS", - "Manage Tests", - "Create and remove Asset Tests."); - - public static final Privilege MANAGE_GLOSSARIES_PRIVILEGE = Privilege.of( - "MANAGE_GLOSSARIES", - "Manage Glossaries", - "Create, edit, and remove Glossary Entities"); + public static final Privilege MANAGE_GLOSSARIES_PRIVILEGE = + Privilege.of( + "MANAGE_GLOSSARIES", "Manage Glossaries", "Create, edit, and remove Glossary Entities"); public static final Privilege MANAGE_USER_CREDENTIALS_PRIVILEGE = - Privilege.of("MANAGE_USER_CREDENTIALS", "Manage User Credentials", + Privilege.of( + "MANAGE_USER_CREDENTIALS", + "Manage User Credentials", "Manage credentials for native DataHub users, including inviting new users and resetting passwords"); - public static final Privilege MANAGE_TAGS_PRIVILEGE = Privilege.of( - "MANAGE_TAGS", - "Manage Tags", - "Create and remove Tags."); - - public static final Privilege CREATE_TAGS_PRIVILEGE = Privilege.of( - "CREATE_TAGS", - "Create Tags", - "Create new Tags."); - - public static final Privilege CREATE_DOMAINS_PRIVILEGE = Privilege.of( - "CREATE_DOMAINS", - "Create Domains", - "Create new Domains."); - - public static final Privilege CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = Privilege.of( - "CREATE_GLOBAL_ANNOUNCEMENTS", - "Create Global Announcements", - "Create new Global Announcements."); - - public static final Privilege MANAGE_GLOBAL_VIEWS = Privilege.of( - "MANAGE_GLOBAL_VIEWS", - "Manage Public Views", - "Create, update, and delete any Public (shared) Views."); - - public static final Privilege MANAGE_GLOBAL_OWNERSHIP_TYPES = Privilege.of( - "MANAGE_GLOBAL_OWNERSHIP_TYPES", - "Manage Ownership Types", - "Create, update and delete Ownership Types."); - - public static final List<Privilege> PLATFORM_PRIVILEGES = ImmutableList.of( - MANAGE_POLICIES_PRIVILEGE, - MANAGE_USERS_AND_GROUPS_PRIVILEGE, - VIEW_ANALYTICS_PRIVILEGE, - GET_ANALYTICS_PRIVILEGE, - MANAGE_DOMAINS_PRIVILEGE, - MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, - MANAGE_INGESTION_PRIVILEGE, - MANAGE_SECRETS_PRIVILEGE, - GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE, - MANAGE_ACCESS_TOKENS, - MANAGE_TESTS_PRIVILEGE, - MANAGE_GLOSSARIES_PRIVILEGE, - MANAGE_USER_CREDENTIALS_PRIVILEGE, - MANAGE_TAGS_PRIVILEGE, - CREATE_TAGS_PRIVILEGE, - CREATE_DOMAINS_PRIVILEGE, - CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, - MANAGE_GLOBAL_VIEWS, - MANAGE_GLOBAL_OWNERSHIP_TYPES - ); + public static final Privilege MANAGE_TAGS_PRIVILEGE = + Privilege.of("MANAGE_TAGS", "Manage Tags", "Create and remove Tags."); + + public static final Privilege CREATE_TAGS_PRIVILEGE = + Privilege.of("CREATE_TAGS", "Create Tags", "Create new Tags."); + + public static final Privilege CREATE_DOMAINS_PRIVILEGE = + Privilege.of("CREATE_DOMAINS", "Create Domains", "Create new Domains."); + + public static final Privilege CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = + Privilege.of( + "CREATE_GLOBAL_ANNOUNCEMENTS", + "Create Global Announcements", + "Create new Global Announcements."); + + public static final Privilege MANAGE_GLOBAL_VIEWS = + Privilege.of( + "MANAGE_GLOBAL_VIEWS", + "Manage Public Views", + "Create, update, and delete any Public (shared) Views."); + + public static final Privilege MANAGE_GLOBAL_OWNERSHIP_TYPES = + Privilege.of( + "MANAGE_GLOBAL_OWNERSHIP_TYPES", + "Manage Ownership Types", + "Create, update and delete Ownership Types."); + + public static final List<Privilege> PLATFORM_PRIVILEGES = + ImmutableList.of( + MANAGE_POLICIES_PRIVILEGE, + MANAGE_USERS_AND_GROUPS_PRIVILEGE, + VIEW_ANALYTICS_PRIVILEGE, + GET_ANALYTICS_PRIVILEGE, + MANAGE_DOMAINS_PRIVILEGE, + MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, + MANAGE_INGESTION_PRIVILEGE, + MANAGE_SECRETS_PRIVILEGE, + GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE, + MANAGE_ACCESS_TOKENS, + MANAGE_TESTS_PRIVILEGE, + MANAGE_GLOSSARIES_PRIVILEGE, + MANAGE_USER_CREDENTIALS_PRIVILEGE, + MANAGE_TAGS_PRIVILEGE, + CREATE_TAGS_PRIVILEGE, + CREATE_DOMAINS_PRIVILEGE, + CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, + MANAGE_GLOBAL_VIEWS, + MANAGE_GLOBAL_OWNERSHIP_TYPES); // Resource Privileges // - public static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = Privilege.of( - "VIEW_ENTITY_PAGE", - "View Entity Page", - "The ability to view the entity page."); - - public static final Privilege EDIT_ENTITY_TAGS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_TAGS", - "Edit Tags", - "The ability to add and remove tags to an asset."); - - public static final Privilege EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_GLOSSARY_TERMS", - "Edit Glossary Terms", - "The ability to add and remove glossary terms to an asset."); - - public static final Privilege EDIT_ENTITY_OWNERS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_OWNERS", - "Edit Owners", - "The ability to add and remove owners of an entity."); - - public static final Privilege EDIT_ENTITY_DOCS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DOCS", - "Edit Description", - "The ability to edit the description (documentation) of an entity."); - - public static final Privilege EDIT_ENTITY_DOC_LINKS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DOC_LINKS", - "Edit Links", - "The ability to edit links associated with an entity."); - - public static final Privilege EDIT_ENTITY_STATUS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_STATUS", - "Edit Status", - "The ability to edit the status of an entity (soft deleted or not)."); - - public static final Privilege EDIT_ENTITY_DOMAINS_PRIVILEGE = Privilege.of( - "EDIT_DOMAINS_PRIVILEGE", - "Edit Domain", - "The ability to edit the Domain of an entity."); - - public static final Privilege EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DATA_PRODUCTS", - "Edit Data Product", - "The ability to edit the Data Product of an entity."); - - public static final Privilege EDIT_ENTITY_DEPRECATION_PRIVILEGE = Privilege.of( - "EDIT_DEPRECATION_PRIVILEGE", - "Edit Deprecation", - "The ability to edit the Deprecation status of an entity."); - - public static final Privilege EDIT_ENTITY_ASSERTIONS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_ASSERTIONS", - "Edit Assertions", - "The ability to add and remove assertions from an entity."); - - public static final Privilege EDIT_ENTITY_OPERATIONS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_OPERATIONS", - "Edit Operations", - "The ability to report or edit operations information about an entity."); - - public static final Privilege EDIT_ENTITY_PRIVILEGE = Privilege.of( - "EDIT_ENTITY", - "Edit Entity", - "The ability to edit any information about an entity. Super user privileges for the entity."); - - public static final Privilege DELETE_ENTITY_PRIVILEGE = Privilege.of( - "DELETE_ENTITY", - "Delete", - "The ability to delete the delete this entity."); - - public static final Privilege EDIT_LINEAGE_PRIVILEGE = Privilege.of( - "EDIT_LINEAGE", - "Edit Lineage", - "The ability to add and remove lineage edges for this entity."); - - public static final Privilege EDIT_ENTITY_EMBED_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_EMBED", - "Edit Embedded Content", - "The ability to edit the embedded content for an entity."); - - public static final List<Privilege> COMMON_ENTITY_PRIVILEGES = ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_TAGS_PRIVILEGE, - EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_STATUS_PRIVILEGE, - EDIT_ENTITY_DOMAINS_PRIVILEGE, - EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE, - DELETE_ENTITY_PRIVILEGE - ); + public static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = + Privilege.of("VIEW_ENTITY_PAGE", "View Entity Page", "The ability to view the entity page."); + + public static final Privilege EDIT_ENTITY_TAGS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_TAGS", "Edit Tags", "The ability to add and remove tags to an asset."); + + public static final Privilege EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_GLOSSARY_TERMS", + "Edit Glossary Terms", + "The ability to add and remove glossary terms to an asset."); + + public static final Privilege EDIT_ENTITY_OWNERS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_OWNERS", + "Edit Owners", + "The ability to add and remove owners of an entity."); + + public static final Privilege EDIT_ENTITY_DOCS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DOCS", + "Edit Description", + "The ability to edit the description (documentation) of an entity."); + + public static final Privilege EDIT_ENTITY_DOC_LINKS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DOC_LINKS", + "Edit Links", + "The ability to edit links associated with an entity."); + + public static final Privilege EDIT_ENTITY_STATUS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_STATUS", + "Edit Status", + "The ability to edit the status of an entity (soft deleted or not)."); + + public static final Privilege EDIT_ENTITY_DOMAINS_PRIVILEGE = + Privilege.of( + "EDIT_DOMAINS_PRIVILEGE", "Edit Domain", "The ability to edit the Domain of an entity."); + + public static final Privilege EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DATA_PRODUCTS", + "Edit Data Product", + "The ability to edit the Data Product of an entity."); + + public static final Privilege EDIT_ENTITY_DEPRECATION_PRIVILEGE = + Privilege.of( + "EDIT_DEPRECATION_PRIVILEGE", + "Edit Deprecation", + "The ability to edit the Deprecation status of an entity."); + + public static final Privilege EDIT_ENTITY_ASSERTIONS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_ASSERTIONS", + "Edit Assertions", + "The ability to add and remove assertions from an entity."); + + public static final Privilege EDIT_ENTITY_OPERATIONS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_OPERATIONS", + "Edit Operations", + "The ability to report or edit operations information about an entity."); + + public static final Privilege EDIT_ENTITY_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY", + "Edit Entity", + "The ability to edit any information about an entity. Super user privileges for the entity."); + + public static final Privilege DELETE_ENTITY_PRIVILEGE = + Privilege.of("DELETE_ENTITY", "Delete", "The ability to delete the delete this entity."); + + public static final Privilege EDIT_LINEAGE_PRIVILEGE = + Privilege.of( + "EDIT_LINEAGE", + "Edit Lineage", + "The ability to add and remove lineage edges for this entity."); + + public static final Privilege EDIT_ENTITY_EMBED_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_EMBED", + "Edit Embedded Content", + "The ability to edit the embedded content for an entity."); + + public static final List<Privilege> COMMON_ENTITY_PRIVILEGES = + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_TAGS_PRIVILEGE, + EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_STATUS_PRIVILEGE, + EDIT_ENTITY_DOMAINS_PRIVILEGE, + EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE); // Dataset Privileges - public static final Privilege EDIT_DATASET_COL_TAGS_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_TAGS", - "Edit Dataset Column Tags", - "The ability to edit the column (field) tags associated with a dataset schema." - ); - - public static final Privilege EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_GLOSSARY_TERMS", - "Edit Dataset Column Glossary Terms", - "The ability to edit the column (field) glossary terms associated with a dataset schema." - ); - - public static final Privilege EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_DESCRIPTION", - "Edit Dataset Column Descriptions", - "The ability to edit the column (field) descriptions associated with a dataset schema." - ); - - public static final Privilege VIEW_DATASET_USAGE_PRIVILEGE = Privilege.of( - "VIEW_DATASET_USAGE", - "View Dataset Usage", - "The ability to access dataset usage information (includes usage statistics and queries)."); - - public static final Privilege VIEW_DATASET_PROFILE_PRIVILEGE = Privilege.of( - "VIEW_DATASET_PROFILE", - "View Dataset Profile", - "The ability to access dataset profile (snapshot statistics)"); - - public static final Privilege EDIT_QUERIES_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_QUERIES", - "Edit Dataset Queries", - "The ability to edit the Queries for a Dataset."); + public static final Privilege EDIT_DATASET_COL_TAGS_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_TAGS", + "Edit Dataset Column Tags", + "The ability to edit the column (field) tags associated with a dataset schema."); + + public static final Privilege EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_GLOSSARY_TERMS", + "Edit Dataset Column Glossary Terms", + "The ability to edit the column (field) glossary terms associated with a dataset schema."); + + public static final Privilege EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_DESCRIPTION", + "Edit Dataset Column Descriptions", + "The ability to edit the column (field) descriptions associated with a dataset schema."); + + public static final Privilege VIEW_DATASET_USAGE_PRIVILEGE = + Privilege.of( + "VIEW_DATASET_USAGE", + "View Dataset Usage", + "The ability to access dataset usage information (includes usage statistics and queries)."); + + public static final Privilege VIEW_DATASET_PROFILE_PRIVILEGE = + Privilege.of( + "VIEW_DATASET_PROFILE", + "View Dataset Profile", + "The ability to access dataset profile (snapshot statistics)"); + + public static final Privilege EDIT_QUERIES_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_QUERIES", + "Edit Dataset Queries", + "The ability to edit the Queries for a Dataset."); // Tag Privileges - public static final Privilege EDIT_TAG_COLOR_PRIVILEGE = Privilege.of( - "EDIT_TAG_COLOR", - "Edit Tag Color", - "The ability to change the color of a Tag."); + public static final Privilege EDIT_TAG_COLOR_PRIVILEGE = + Privilege.of("EDIT_TAG_COLOR", "Edit Tag Color", "The ability to change the color of a Tag."); // Group Privileges - public static final Privilege EDIT_GROUP_MEMBERS_PRIVILEGE = Privilege.of( - "EDIT_GROUP_MEMBERS", - "Edit Group Members", - "The ability to add and remove members to a group."); + public static final Privilege EDIT_GROUP_MEMBERS_PRIVILEGE = + Privilege.of( + "EDIT_GROUP_MEMBERS", + "Edit Group Members", + "The ability to add and remove members to a group."); // User Privileges - public static final Privilege EDIT_USER_PROFILE_PRIVILEGE = Privilege.of( - "EDIT_USER_PROFILE", - "Edit User Profile", - "The ability to change the user's profile including display name, bio, title, profile image, etc."); + public static final Privilege EDIT_USER_PROFILE_PRIVILEGE = + Privilege.of( + "EDIT_USER_PROFILE", + "Edit User Profile", + "The ability to change the user's profile including display name, bio, title, profile image, etc."); // User + Group Privileges - public static final Privilege EDIT_CONTACT_INFO_PRIVILEGE = Privilege.of( - "EDIT_CONTACT_INFO", - "Edit Contact Information", - "The ability to change the contact information such as email & chat handles."); + public static final Privilege EDIT_CONTACT_INFO_PRIVILEGE = + Privilege.of( + "EDIT_CONTACT_INFO", + "Edit Contact Information", + "The ability to change the contact information such as email & chat handles."); // Glossary Node Privileges - public static final Privilege MANAGE_GLOSSARY_CHILDREN_PRIVILEGE = Privilege.of( - "MANAGE_GLOSSARY_CHILDREN", - "Manage Direct Glossary Children", - "The ability to create and delete the direct children of this entity."); + public static final Privilege MANAGE_GLOSSARY_CHILDREN_PRIVILEGE = + Privilege.of( + "MANAGE_GLOSSARY_CHILDREN", + "Manage Direct Glossary Children", + "The ability to create and delete the direct children of this entity."); // Glossary Node Privileges - public static final Privilege MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE = Privilege.of( - "MANAGE_ALL_GLOSSARY_CHILDREN", - "Manage All Glossary Children", - "The ability to create and delete everything underneath this entity."); - - // REST API Specific Privileges (not adding to lists of privileges above as those affect GraphQL as well) - public static final Privilege GET_TIMELINE_PRIVILEGE = Privilege.of( - "GET_TIMELINE_PRIVILEGE", - "Get Timeline API", - "The ability to use the GET Timeline API."); - - public static final Privilege GET_ENTITY_PRIVILEGE = Privilege.of( - "GET_ENTITY_PRIVILEGE", - "Get Entity + Relationships API", - "The ability to use the GET Entity and Relationships API."); - - public static final Privilege GET_TIMESERIES_ASPECT_PRIVILEGE = Privilege.of( - "GET_TIMESERIES_ASPECT_PRIVILEGE", - "Get Timeseries Aspect API", - "The ability to use the GET Timeseries Aspect API."); - - public static final Privilege GET_COUNTS_PRIVILEGE = Privilege.of( - "GET_COUNTS_PRIVILEGE", - "Get Aspect/Entity Count APIs", - "The ability to use the GET Aspect/Entity Count APIs."); - - public static final Privilege RESTORE_INDICES_PRIVILEGE = Privilege.of( - "RESTORE_INDICES_PRIVILEGE", - "Restore Indicies API", - "The ability to use the Restore Indices API."); - - public static final Privilege GET_TIMESERIES_INDEX_SIZES_PRIVILEGE = Privilege.of( - "GET_TIMESERIES_INDEX_SIZES_PRIVILEGE", - "Get Timeseries index sizes API", - "The ability to use the get Timeseries indices size API."); - - public static final Privilege TRUNCATE_TIMESERIES_INDEX_PRIVILEGE = Privilege.of( - "TRUNCATE_TIMESERIES_INDEX_PRIVILEGE", - "Truncate timeseries aspect index size API", - "The ability to use the API to truncate a timeseries index."); - - public static final Privilege GET_ES_TASK_STATUS_PRIVILEGE = Privilege.of( - "GET_ES_TASK_STATUS_PRIVILEGE", - "Get ES task status API", - "The ability to use the get task status API for an ElasticSearch task."); - - public static final Privilege SEARCH_PRIVILEGE = Privilege.of( - "SEARCH_PRIVILEGE", - "Search API", - "The ability to access search APIs."); - - public static final Privilege SET_WRITEABLE_PRIVILEGE = Privilege.of( - "SET_WRITEABLE_PRIVILEGE", - "Enable/Disable Writeability API", - "The ability to enable or disable GMS writeability for data migrations."); - - public static final Privilege APPLY_RETENTION_PRIVILEGE = Privilege.of( - "APPLY_RETENTION_PRIVILEGE", - "Apply Retention API", - "The ability to apply retention using the API."); - - public static final Privilege PRODUCE_PLATFORM_EVENT_PRIVILEGE = Privilege.of( - "PRODUCE_PLATFORM_EVENT_PRIVILEGE", - "Produce Platform Event API", - "The ability to produce Platform Events using the API."); - - public static final ResourcePrivileges DATASET_PRIVILEGES = ResourcePrivileges.of( - "dataset", - "Datasets", - "Datasets indexed by DataHub", Stream.of( - COMMON_ENTITY_PRIVILEGES, - ImmutableList.of( - VIEW_DATASET_USAGE_PRIVILEGE, - VIEW_DATASET_PROFILE_PRIVILEGE, - EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE, - EDIT_DATASET_COL_TAGS_PRIVILEGE, - EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE, - EDIT_ENTITY_ASSERTIONS_PRIVILEGE, - EDIT_LINEAGE_PRIVILEGE, - EDIT_ENTITY_EMBED_PRIVILEGE, - EDIT_QUERIES_PRIVILEGE)) - .flatMap(Collection::stream) - .collect(Collectors.toList()) - ); + public static final Privilege MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE = + Privilege.of( + "MANAGE_ALL_GLOSSARY_CHILDREN", + "Manage All Glossary Children", + "The ability to create and delete everything underneath this entity."); + + // REST API Specific Privileges (not adding to lists of privileges above as those affect GraphQL + // as well) + public static final Privilege GET_TIMELINE_PRIVILEGE = + Privilege.of( + "GET_TIMELINE_PRIVILEGE", "Get Timeline API", "The ability to use the GET Timeline API."); + + public static final Privilege GET_ENTITY_PRIVILEGE = + Privilege.of( + "GET_ENTITY_PRIVILEGE", + "Get Entity + Relationships API", + "The ability to use the GET Entity and Relationships API."); + + public static final Privilege GET_TIMESERIES_ASPECT_PRIVILEGE = + Privilege.of( + "GET_TIMESERIES_ASPECT_PRIVILEGE", + "Get Timeseries Aspect API", + "The ability to use the GET Timeseries Aspect API."); + + public static final Privilege GET_COUNTS_PRIVILEGE = + Privilege.of( + "GET_COUNTS_PRIVILEGE", + "Get Aspect/Entity Count APIs", + "The ability to use the GET Aspect/Entity Count APIs."); + + public static final Privilege RESTORE_INDICES_PRIVILEGE = + Privilege.of( + "RESTORE_INDICES_PRIVILEGE", + "Restore Indicies API", + "The ability to use the Restore Indices API."); + + public static final Privilege GET_TIMESERIES_INDEX_SIZES_PRIVILEGE = + Privilege.of( + "GET_TIMESERIES_INDEX_SIZES_PRIVILEGE", + "Get Timeseries index sizes API", + "The ability to use the get Timeseries indices size API."); + + public static final Privilege TRUNCATE_TIMESERIES_INDEX_PRIVILEGE = + Privilege.of( + "TRUNCATE_TIMESERIES_INDEX_PRIVILEGE", + "Truncate timeseries aspect index size API", + "The ability to use the API to truncate a timeseries index."); + + public static final Privilege GET_ES_TASK_STATUS_PRIVILEGE = + Privilege.of( + "GET_ES_TASK_STATUS_PRIVILEGE", + "Get ES task status API", + "The ability to use the get task status API for an ElasticSearch task."); + + public static final Privilege SEARCH_PRIVILEGE = + Privilege.of("SEARCH_PRIVILEGE", "Search API", "The ability to access search APIs."); + + public static final Privilege SET_WRITEABLE_PRIVILEGE = + Privilege.of( + "SET_WRITEABLE_PRIVILEGE", + "Enable/Disable Writeability API", + "The ability to enable or disable GMS writeability for data migrations."); + + public static final Privilege APPLY_RETENTION_PRIVILEGE = + Privilege.of( + "APPLY_RETENTION_PRIVILEGE", + "Apply Retention API", + "The ability to apply retention using the API."); + + public static final Privilege PRODUCE_PLATFORM_EVENT_PRIVILEGE = + Privilege.of( + "PRODUCE_PLATFORM_EVENT_PRIVILEGE", + "Produce Platform Event API", + "The ability to produce Platform Events using the API."); + + public static final ResourcePrivileges DATASET_PRIVILEGES = + ResourcePrivileges.of( + "dataset", + "Datasets", + "Datasets indexed by DataHub", + Stream.of( + COMMON_ENTITY_PRIVILEGES, + ImmutableList.of( + VIEW_DATASET_USAGE_PRIVILEGE, + VIEW_DATASET_PROFILE_PRIVILEGE, + EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE, + EDIT_DATASET_COL_TAGS_PRIVILEGE, + EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_ASSERTIONS_PRIVILEGE, + EDIT_LINEAGE_PRIVILEGE, + EDIT_ENTITY_EMBED_PRIVILEGE, + EDIT_QUERIES_PRIVILEGE)) + .flatMap(Collection::stream) + .collect(Collectors.toList())); // Charts Privileges - public static final ResourcePrivileges CHART_PRIVILEGES = ResourcePrivileges.of( - "chart", - "Charts", - "Charts indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges CHART_PRIVILEGES = + ResourcePrivileges.of( + "chart", + "Charts", + "Charts indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Dashboard Privileges - public static final ResourcePrivileges DASHBOARD_PRIVILEGES = ResourcePrivileges.of( - "dashboard", - "Dashboards", - "Dashboards indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges DASHBOARD_PRIVILEGES = + ResourcePrivileges.of( + "dashboard", + "Dashboards", + "Dashboards indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Data Doc Privileges - public static final ResourcePrivileges NOTEBOOK_PRIVILEGES = ResourcePrivileges.of( - "notebook", - "Notebook", - "Notebook indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges NOTEBOOK_PRIVILEGES = + ResourcePrivileges.of( + "notebook", "Notebook", "Notebook indexed by DataHub", COMMON_ENTITY_PRIVILEGES); // Data Flow Privileges - public static final ResourcePrivileges DATA_FLOW_PRIVILEGES = ResourcePrivileges.of( - "dataFlow", - "Data Pipelines", - "Data Pipelines indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges DATA_FLOW_PRIVILEGES = + ResourcePrivileges.of( + "dataFlow", + "Data Pipelines", + "Data Pipelines indexed by DataHub", + COMMON_ENTITY_PRIVILEGES); // Data Job Privileges - public static final ResourcePrivileges DATA_JOB_PRIVILEGES = ResourcePrivileges.of( - "dataJob", - "Data Tasks", - "Data Tasks indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges DATA_JOB_PRIVILEGES = + ResourcePrivileges.of( + "dataJob", + "Data Tasks", + "Data Tasks indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Tag Privileges - public static final ResourcePrivileges TAG_PRIVILEGES = ResourcePrivileges.of( - "tag", - "Tags", - "Tags indexed by DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_TAG_COLOR_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges TAG_PRIVILEGES = + ResourcePrivileges.of( + "tag", + "Tags", + "Tags indexed by DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_TAG_COLOR_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE)); // Container Privileges - public static final ResourcePrivileges CONTAINER_PRIVILEGES = ResourcePrivileges.of( - "container", - "Containers", - "Containers indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges CONTAINER_PRIVILEGES = + ResourcePrivileges.of( + "container", "Containers", "Containers indexed by DataHub", COMMON_ENTITY_PRIVILEGES); // Domain Privileges - public static final Privilege MANAGE_DATA_PRODUCTS_PRIVILEGE = Privilege.of( - "MANAGE_DATA_PRODUCTS", - "Manage Data Products", - "The ability to create, edit, and delete Data Products within a Domain"); - + public static final Privilege MANAGE_DATA_PRODUCTS_PRIVILEGE = + Privilege.of( + "MANAGE_DATA_PRODUCTS", + "Manage Data Products", + "The ability to create, edit, and delete Data Products within a Domain"); // Domain Privileges - public static final ResourcePrivileges DOMAIN_PRIVILEGES = ResourcePrivileges.of( - "domain", - "Domains", - "Domains created on DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, MANAGE_DATA_PRODUCTS_PRIVILEGE) - ); + public static final ResourcePrivileges DOMAIN_PRIVILEGES = + ResourcePrivileges.of( + "domain", + "Domains", + "Domains created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + MANAGE_DATA_PRODUCTS_PRIVILEGE)); // Data Product Privileges - public static final ResourcePrivileges DATA_PRODUCT_PRIVILEGES = ResourcePrivileges.of( - "dataProduct", - "Data Products", - "Data Products created on DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, EDIT_ENTITY_TAGS_PRIVILEGE, - EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, EDIT_ENTITY_DOMAINS_PRIVILEGE) - ); + public static final ResourcePrivileges DATA_PRODUCT_PRIVILEGES = + ResourcePrivileges.of( + "dataProduct", + "Data Products", + "Data Products created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + EDIT_ENTITY_TAGS_PRIVILEGE, + EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_DOMAINS_PRIVILEGE)); // Glossary Term Privileges - public static final ResourcePrivileges GLOSSARY_TERM_PRIVILEGES = ResourcePrivileges.of( - "glossaryTerm", - "Glossary Terms", - "Glossary Terms created on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges GLOSSARY_TERM_PRIVILEGES = + ResourcePrivileges.of( + "glossaryTerm", + "Glossary Terms", + "Glossary Terms created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); // Glossary Node Privileges - public static final ResourcePrivileges GLOSSARY_NODE_PRIVILEGES = ResourcePrivileges.of( - "glossaryNode", - "Glossary Term Groups", - "Glossary Term Groups created on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE, - MANAGE_GLOSSARY_CHILDREN_PRIVILEGE, - MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE) - ); + public static final ResourcePrivileges GLOSSARY_NODE_PRIVILEGES = + ResourcePrivileges.of( + "glossaryNode", + "Glossary Term Groups", + "Glossary Term Groups created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + MANAGE_GLOSSARY_CHILDREN_PRIVILEGE, + MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)); // Group Privileges - public static final ResourcePrivileges CORP_GROUP_PRIVILEGES = ResourcePrivileges.of( - "corpGroup", - "Groups", - "Groups on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_GROUP_MEMBERS_PRIVILEGE, - EDIT_CONTACT_INFO_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges CORP_GROUP_PRIVILEGES = + ResourcePrivileges.of( + "corpGroup", + "Groups", + "Groups on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_GROUP_MEMBERS_PRIVILEGE, + EDIT_CONTACT_INFO_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); // User Privileges - public static final ResourcePrivileges CORP_USER_PRIVILEGES = ResourcePrivileges.of( - "corpuser", - "Users", - "Users on DataHub", + public static final ResourcePrivileges CORP_USER_PRIVILEGES = + ResourcePrivileges.of( + "corpuser", + "Users", + "Users on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_CONTACT_INFO_PRIVILEGE, + EDIT_USER_PROFILE_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); + + public static final List<ResourcePrivileges> ENTITY_RESOURCE_PRIVILEGES = ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_CONTACT_INFO_PRIVILEGE, - EDIT_USER_PROFILE_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); - - public static final List<ResourcePrivileges> ENTITY_RESOURCE_PRIVILEGES = ImmutableList.of( - DATASET_PRIVILEGES, - DASHBOARD_PRIVILEGES, - CHART_PRIVILEGES, - DATA_FLOW_PRIVILEGES, - DATA_JOB_PRIVILEGES, - TAG_PRIVILEGES, - CONTAINER_PRIVILEGES, - DOMAIN_PRIVILEGES, - GLOSSARY_TERM_PRIVILEGES, - GLOSSARY_NODE_PRIVILEGES, - CORP_GROUP_PRIVILEGES, - CORP_USER_PRIVILEGES, - NOTEBOOK_PRIVILEGES, - DATA_PRODUCT_PRIVILEGES - ); + DATASET_PRIVILEGES, + DASHBOARD_PRIVILEGES, + CHART_PRIVILEGES, + DATA_FLOW_PRIVILEGES, + DATA_JOB_PRIVILEGES, + TAG_PRIVILEGES, + CONTAINER_PRIVILEGES, + DOMAIN_PRIVILEGES, + GLOSSARY_TERM_PRIVILEGES, + GLOSSARY_NODE_PRIVILEGES, + CORP_GROUP_PRIVILEGES, + CORP_USER_PRIVILEGES, + NOTEBOOK_PRIVILEGES, + DATA_PRODUCT_PRIVILEGES); // Merge all entity specific resource privileges to create a superset of all resource privileges - public static final ResourcePrivileges ALL_RESOURCE_PRIVILEGES = ResourcePrivileges.of( - "all", - "All Types", - "All Types", - ENTITY_RESOURCE_PRIVILEGES.stream().flatMap(resourcePrivileges -> resourcePrivileges.getPrivileges().stream()).distinct().collect( - Collectors.toList()) - ); + public static final ResourcePrivileges ALL_RESOURCE_PRIVILEGES = + ResourcePrivileges.of( + "all", + "All Types", + "All Types", + ENTITY_RESOURCE_PRIVILEGES.stream() + .flatMap(resourcePrivileges -> resourcePrivileges.getPrivileges().stream()) + .distinct() + .collect(Collectors.toList())); public static final List<ResourcePrivileges> RESOURCE_PRIVILEGES = - ImmutableList.<ResourcePrivileges>builder().addAll(ENTITY_RESOURCE_PRIVILEGES) + ImmutableList.<ResourcePrivileges>builder() + .addAll(ENTITY_RESOURCE_PRIVILEGES) .add(ALL_RESOURCE_PRIVILEGES) .build(); @@ -593,9 +623,10 @@ static ResourcePrivileges of( String resourceTypeDisplayName, String resourceTypeDescription, List<Privilege> privileges) { - return new ResourcePrivileges(resourceType, resourceTypeDisplayName, resourceTypeDescription, privileges); + return new ResourcePrivileges( + resourceType, resourceTypeDisplayName, resourceTypeDescription, privileges); } } - private PoliciesConfig() { } + private PoliciesConfig() {} } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java b/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java index 2d4e355a93e53..00342ff2afbe2 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java @@ -9,31 +9,29 @@ import com.linkedin.r2.transport.common.bridge.client.TransportClientAdapter; import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.RestClient; -import org.apache.commons.lang.StringUtils; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLParameters; import java.net.URI; import java.security.InvalidParameterException; import java.security.NoSuchAlgorithmException; import java.util.Collections; import java.util.HashMap; import java.util.Map; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLParameters; +import org.apache.commons.lang.StringUtils; public class DefaultRestliClientFactory { private static final String DEFAULT_REQUEST_TIMEOUT_IN_MS = "10000"; - private DefaultRestliClientFactory() { - } + private DefaultRestliClientFactory() {} @Nonnull - public static RestClient getRestLiD2Client(@Nonnull String restLiClientD2ZkHost, - @Nonnull String restLiClientD2ZkPath) { - final D2Client d2Client = new D2ClientBuilder() + public static RestClient getRestLiD2Client( + @Nonnull String restLiClientD2ZkHost, @Nonnull String restLiClientD2ZkPath) { + final D2Client d2Client = + new D2ClientBuilder() .setZkHosts(restLiClientD2ZkHost) .setBasePath(restLiClientD2ZkPath) .build(); @@ -42,18 +40,27 @@ public static RestClient getRestLiD2Client(@Nonnull String restLiClientD2ZkHost, } @Nonnull - public static RestClient getRestLiClient(@Nonnull String restLiServerHost, int restLiServerPort, boolean useSSL, - @Nullable String sslProtocol) { + public static RestClient getRestLiClient( + @Nonnull String restLiServerHost, + int restLiServerPort, + boolean useSSL, + @Nullable String sslProtocol) { return getRestLiClient(restLiServerHost, restLiServerPort, useSSL, sslProtocol, null); } @Nonnull - public static RestClient getRestLiClient(@Nonnull String restLiServerHost, int restLiServerPort, boolean useSSL, - @Nullable String sslProtocol, @Nullable Map<String, String> params) { + public static RestClient getRestLiClient( + @Nonnull String restLiServerHost, + int restLiServerPort, + boolean useSSL, + @Nullable String sslProtocol, + @Nullable Map<String, String> params) { return getRestLiClient( - URI.create(String.format("%s://%s:%s", useSSL ? "https" : "http", restLiServerHost, restLiServerPort)), - sslProtocol, - params); + URI.create( + String.format( + "%s://%s:%s", useSSL ? "https" : "http", restLiServerHost, restLiServerPort)), + sslProtocol, + params); } @Nonnull @@ -62,8 +69,10 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s } @Nonnull - public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String sslProtocol, - @Nullable Map<String, String> inputParams) { + public static RestClient getRestLiClient( + @Nonnull URI gmsUri, + @Nullable String sslProtocol, + @Nullable Map<String, String> inputParams) { if (StringUtils.isBlank(gmsUri.getHost()) || gmsUri.getPort() <= 0) { throw new InvalidParameterException("Invalid restli server host name or port!"); } @@ -82,7 +91,7 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s SSLParameters sslParameters = new SSLParameters(); if (sslProtocol != null) { - sslParameters.setProtocols(new String[]{sslProtocol}); + sslParameters.setProtocols(new String[] {sslProtocol}); } params.put(HttpClientFactory.HTTP_SSL_PARAMS, sslParameters); } @@ -90,7 +99,8 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s return getHttpRestClient(gmsUri, params); } - private static RestClient getHttpRestClient(@Nonnull URI gmsUri, @Nonnull Map<String, Object> params) { + private static RestClient getHttpRestClient( + @Nonnull URI gmsUri, @Nonnull Map<String, Object> params) { Map<String, Object> finalParams = new HashMap<>(); finalParams.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, DEFAULT_REQUEST_TIMEOUT_IN_MS); finalParams.putAll(params); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java index 09220bb481a03..737f79dc1c441 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java @@ -11,7 +11,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class RestliUtil { private RestliUtil() { @@ -19,8 +18,9 @@ private RestliUtil() { } /** - * Executes the provided supplier and convert the results to a {@link Task}. - * Exceptions thrown during the execution will be properly wrapped in {@link RestLiServiceException}. + * Executes the provided supplier and convert the results to a {@link Task}. Exceptions thrown + * during the execution will be properly wrapped in {@link RestLiServiceException}. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -31,7 +31,8 @@ public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier) { } catch (Throwable throwable) { // Convert IllegalArgumentException to BAD REQUEST - if (throwable instanceof IllegalArgumentException || throwable.getCause() instanceof IllegalArgumentException) { + if (throwable instanceof IllegalArgumentException + || throwable.getCause() instanceof IllegalArgumentException) { throwable = badRequestException(throwable.getMessage()); } @@ -47,20 +48,24 @@ public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier) { public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier, String metricName) { Timer.Context context = MetricUtils.timer(metricName).time(); // Stop timer on success and failure - return toTask(supplier).transform(orig -> { - context.stop(); - if (orig.isFailed()) { - MetricUtils.counter(MetricRegistry.name(metricName, "failed")).inc(); - } else { - MetricUtils.counter(MetricRegistry.name(metricName, "success")).inc(); - } - return orig; - }); + return toTask(supplier) + .transform( + orig -> { + context.stop(); + if (orig.isFailed()) { + MetricUtils.counter(MetricRegistry.name(metricName, "failed")).inc(); + } else { + MetricUtils.counter(MetricRegistry.name(metricName, "success")).inc(); + } + return orig; + }); } /** - * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} instead. - * A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the optional is emtpy. + * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} + * instead. A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the + * optional is emtpy. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java index 913def2a040f4..5f3975b066fde 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java @@ -1,21 +1,19 @@ package com.linkedin.metadata.utils; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; -import lombok.extern.slf4j.Slf4j; - import java.time.Clock; - -import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; +import lombok.extern.slf4j.Slf4j; @Slf4j public class AuditStampUtils { - private AuditStampUtils() { - } + private AuditStampUtils() {} - public static AuditStamp createDefaultAuditStamp() { - return new AuditStamp() - .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) - .setTime(Clock.systemUTC().millis()); - } + public static AuditStamp createDefaultAuditStamp() { + return new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(Clock.systemUTC().millis()); + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java index 3b2116fa65127..f03d4c76c70d8 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java @@ -8,27 +8,29 @@ import com.linkedin.metadata.query.BrowseResultMetadata; import java.util.stream.Collectors; - public class BrowseUtil { - private BrowseUtil() { - } + private BrowseUtil() {} - public static com.linkedin.metadata.query.BrowseResult convertToLegacyResult(BrowseResult browseResult) { - com.linkedin.metadata.query.BrowseResult legacyResult = new com.linkedin.metadata.query.BrowseResult(); + public static com.linkedin.metadata.query.BrowseResult convertToLegacyResult( + BrowseResult browseResult) { + com.linkedin.metadata.query.BrowseResult legacyResult = + new com.linkedin.metadata.query.BrowseResult(); legacyResult.setFrom(browseResult.getFrom()); legacyResult.setPageSize(browseResult.getPageSize()); legacyResult.setNumEntities(browseResult.getNumEntities()); - legacyResult.setEntities(new BrowseResultEntityArray(browseResult.getEntities() - .stream() - .map(entity -> new BrowseResultEntity(entity.data())) - .collect(Collectors.toList()))); + legacyResult.setEntities( + new BrowseResultEntityArray( + browseResult.getEntities().stream() + .map(entity -> new BrowseResultEntity(entity.data())) + .collect(Collectors.toList()))); BrowseResultMetadata legacyMetadata = new BrowseResultMetadata(); - legacyMetadata.setGroups(new BrowseResultGroupArray(browseResult.getGroups() - .stream() - .map(group -> new BrowseResultGroup(group.data())) - .collect(Collectors.toList()))); + legacyMetadata.setGroups( + new BrowseResultGroupArray( + browseResult.getGroups().stream() + .map(group -> new BrowseResultGroup(group.data())) + .collect(Collectors.toList()))); legacyMetadata.setPath(browseResult.getMetadata().getPath()); legacyMetadata.setTotalNumEntities(browseResult.getMetadata().getTotalNumEntities()); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java index 551683153aadd..32422b2a2d4a8 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java @@ -11,62 +11,79 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class ConcurrencyUtils { - private ConcurrencyUtils() { - } + private ConcurrencyUtils() {} - public static <O, T> List<T> transformAndCollectAsync(List<O> originalList, Function<O, T> transformer) { + public static <O, T> List<T> transformAndCollectAsync( + List<O> originalList, Function<O, T> transformer) { return transformAndCollectAsync(originalList, transformer, Collectors.toList()); } /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion i.e. each element transform is run as a separate CompleteableFuture and then joined at + * the end */ - public static <O, T, OUTPUT> OUTPUT transformAndCollectAsync(Collection<O> originalCollection, - Function<O, T> transformer, Collector<T, ?, OUTPUT> collector) { + public static <O, T, OUTPUT> OUTPUT transformAndCollectAsync( + Collection<O> originalCollection, + Function<O, T> transformer, + Collector<T, ?, OUTPUT> collector) { return originalCollection.stream() .map(element -> CompletableFuture.supplyAsync(() -> transformer.apply(element))) - .collect(Collectors.collectingAndThen(Collectors.toList(), - completableFutureList -> completableFutureList.stream().map(CompletableFuture::join))) + .collect( + Collectors.collectingAndThen( + Collectors.toList(), + completableFutureList -> + completableFutureList.stream().map(CompletableFuture::join))) .collect(collector); } - /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * with exceptions handled by the input exceptionHandler - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion with exceptions handled by the input exceptionHandler i.e. each element transform is + * run as a separate CompleteableFuture and then joined at the end */ - public static <O, T> List<T> transformAndCollectAsync(List<O> originalList, Function<O, T> transformer, + public static <O, T> List<T> transformAndCollectAsync( + List<O> originalList, + Function<O, T> transformer, BiFunction<O, Throwable, ? extends T> exceptionHandler) { - return transformAndCollectAsync(originalList, transformer, exceptionHandler, Collectors.toList()); + return transformAndCollectAsync( + originalList, transformer, exceptionHandler, Collectors.toList()); } /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * with exceptions handled by the input exceptionHandler - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion with exceptions handled by the input exceptionHandler i.e. each element transform is + * run as a separate CompleteableFuture and then joined at the end */ - public static <O, T, OUTPUT> OUTPUT transformAndCollectAsync(Collection<O> originalCollection, - Function<O, T> transformer, BiFunction<O, Throwable, ? extends T> exceptionHandler, Collector<T, ?, OUTPUT> collector) { + public static <O, T, OUTPUT> OUTPUT transformAndCollectAsync( + Collection<O> originalCollection, + Function<O, T> transformer, + BiFunction<O, Throwable, ? extends T> exceptionHandler, + Collector<T, ?, OUTPUT> collector) { return originalCollection.stream() - .map(element -> CompletableFuture.supplyAsync(() -> transformer.apply(element)) - .exceptionally(e -> exceptionHandler.apply(element, e))) + .map( + element -> + CompletableFuture.supplyAsync(() -> transformer.apply(element)) + .exceptionally(e -> exceptionHandler.apply(element, e))) .filter(Objects::nonNull) - .collect(Collectors.collectingAndThen(Collectors.toList(), - completableFutureList -> completableFutureList.stream().map(CompletableFuture::join))) + .collect( + Collectors.collectingAndThen( + Collectors.toList(), + completableFutureList -> + completableFutureList.stream().map(CompletableFuture::join))) .collect(collector); } /** - * Wait for a list of futures to end with a timeout and only return results that were returned before the timeout - * expired + * Wait for a list of futures to end with a timeout and only return results that were returned + * before the timeout expired */ - public static <T> List<T> getAllCompleted(List<CompletableFuture<T>> futuresList, long timeout, TimeUnit unit) { - CompletableFuture<Void> allFuturesResult = CompletableFuture.allOf(futuresList.toArray(new CompletableFuture[0])); + public static <T> List<T> getAllCompleted( + List<CompletableFuture<T>> futuresList, long timeout, TimeUnit unit) { + CompletableFuture<Void> allFuturesResult = + CompletableFuture.allOf(futuresList.toArray(new CompletableFuture[0])); try { allFuturesResult.get(timeout, unit); } catch (Exception e) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java index 441661497cadc..a40cf4da7abbc 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java @@ -19,17 +19,16 @@ import java.util.Optional; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DataPlatformInstanceUtils { - private DataPlatformInstanceUtils() { - } + private DataPlatformInstanceUtils() {} private static DataPlatformUrn getPlatformUrn(String name) { return new DataPlatformUrn(name.toLowerCase()); } - private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyAspect) throws URISyntaxException { + private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyAspect) + throws URISyntaxException { switch (entityType) { case "dataset": return ((DatasetKey) keyAspect).getPlatform(); @@ -40,7 +39,8 @@ private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyA case "dataFlow": return getPlatformUrn(((DataFlowKey) keyAspect).getOrchestrator()); case "dataJob": - return getPlatformUrn(DataFlowUrn.createFromUrn(((DataJobKey) keyAspect).getFlow()).getOrchestratorEntity()); + return getPlatformUrn( + DataFlowUrn.createFromUrn(((DataJobKey) keyAspect).getFlow()).getOrchestratorEntity()); case "dataProcess": return getPlatformUrn(((DataProcessKey) keyAspect).getOrchestrator()); case "mlModel": @@ -52,17 +52,23 @@ private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyA case "mlModelGroup": return ((MLModelGroupKey) keyAspect).getPlatform(); default: - log.debug(String.format("Failed to generate default platform for unknown entity type %s", entityType)); + log.debug( + String.format( + "Failed to generate default platform for unknown entity type %s", entityType)); return null; } } - public static Optional<DataPlatformInstance> buildDataPlatformInstance(String entityType, RecordTemplate keyAspect) { + public static Optional<DataPlatformInstance> buildDataPlatformInstance( + String entityType, RecordTemplate keyAspect) { try { return Optional.ofNullable(getDefaultDataPlatform(entityType, keyAspect)) .map(platform -> new DataPlatformInstance().setPlatform(platform)); } catch (URISyntaxException e) { - log.error("Failed to generate data platform instance for entity {}, keyAspect {}", entityType, keyAspect); + log.error( + "Failed to generate data platform instance for entity {}, keyAspect {}", + entityType, + keyAspect); return Optional.empty(); } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java index 2ad2d5028ba7d..161b0f845f7e2 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java @@ -16,17 +16,16 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - @Slf4j public class EntityKeyUtils { private static final Logger logger = LoggerFactory.getLogger(EntityKeyUtils.class); - private EntityKeyUtils() { - } + private EntityKeyUtils() {} @Nonnull - public static Urn getUrnFromProposal(MetadataChangeProposal metadataChangeProposal, AspectSpec keyAspectSpec) { + public static Urn getUrnFromProposal( + MetadataChangeProposal metadataChangeProposal, AspectSpec keyAspectSpec) { if (metadataChangeProposal.hasEntityUrn()) { Urn urn = metadataChangeProposal.getEntityUrn(); @@ -39,11 +38,13 @@ public static Urn getUrnFromProposal(MetadataChangeProposal metadataChangePropos return urn; } if (metadataChangeProposal.hasEntityKeyAspect()) { - RecordTemplate keyAspectRecord = GenericRecordUtils.deserializeAspect( + RecordTemplate keyAspectRecord = + GenericRecordUtils.deserializeAspect( metadataChangeProposal.getEntityKeyAspect().getValue(), metadataChangeProposal.getEntityKeyAspect().getContentType(), keyAspectSpec); - return EntityKeyUtils.convertEntityKeyToUrn(keyAspectRecord, metadataChangeProposal.getEntityType()); + return EntityKeyUtils.convertEntityKeyToUrn( + keyAspectRecord, metadataChangeProposal.getEntityType()); } throw new IllegalArgumentException("One of urn and keyAspect must be set"); } @@ -61,39 +62,46 @@ public static Urn getUrnFromLog(MetadataChangeLog metadataChangeLog, AspectSpec return urn; } if (metadataChangeLog.hasEntityKeyAspect()) { - RecordTemplate keyAspectRecord = GenericRecordUtils.deserializeAspect( - metadataChangeLog.getEntityKeyAspect().getValue(), - metadataChangeLog.getEntityKeyAspect().getContentType(), - keyAspectSpec); - return EntityKeyUtils.convertEntityKeyToUrn(keyAspectRecord, metadataChangeLog.getEntityType()); + RecordTemplate keyAspectRecord = + GenericRecordUtils.deserializeAspect( + metadataChangeLog.getEntityKeyAspect().getValue(), + metadataChangeLog.getEntityKeyAspect().getContentType(), + keyAspectSpec); + return EntityKeyUtils.convertEntityKeyToUrn( + keyAspectRecord, metadataChangeLog.getEntityType()); } throw new IllegalArgumentException("One of urn and keyAspect must be set"); } /** - * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given - * the urn & the {@link AspectSpec} of the key. + * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given the urn + * & the {@link AspectSpec} of the key. * - * Parts of the urn are bound into fields in the keySchema based on field <b>index</b>. If the - * number of urn key parts does not match the number of fields in the key schema, an {@link IllegalArgumentException} will be thrown. + * <p>Parts of the urn are bound into fields in the keySchema based on field <b>index</b>. If the + * number of urn key parts does not match the number of fields in the key schema, an {@link + * IllegalArgumentException} will be thrown. * * @param urn raw entity urn * @param keyAspectSpec key aspect spec - * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of - * the provided key schema in order. - * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema (field number or type mismatch) + * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of the + * provided key schema in order. + * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema + * (field number or type mismatch) */ @Nonnull - public static RecordTemplate convertUrnToEntityKey(@Nonnull final Urn urn, @Nonnull final AspectSpec keyAspectSpec) { + public static RecordTemplate convertUrnToEntityKey( + @Nonnull final Urn urn, @Nonnull final AspectSpec keyAspectSpec) { RecordDataSchema keySchema = keyAspectSpec.getPegasusSchema(); // #1. Ensure we have a class to bind into. - Class<? extends RecordTemplate> clazz = keyAspectSpec.getDataTemplateClass().asSubclass(RecordTemplate.class); + Class<? extends RecordTemplate> clazz = + keyAspectSpec.getDataTemplateClass().asSubclass(RecordTemplate.class); // #2. Bind fields into a DataMap if (urn.getEntityKey().getParts().size() != keySchema.getFields().size()) { throw new IllegalArgumentException( - "Failed to convert urn to entity key: urns parts and key fields do not have same length for " + urn); + "Failed to convert urn to entity key: urns parts and key fields do not have same length for " + + urn); } final DataMap dataMap = new DataMap(); for (int i = 0; i < urn.getEntityKey().getParts().size(); i++) { @@ -107,28 +115,35 @@ public static RecordTemplate convertUrnToEntityKey(@Nonnull final Urn urn, @Nonn try { constructor = clazz.getConstructor(DataMap.class); return constructor.newInstance(dataMap); - } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) { + } catch (NoSuchMethodException + | InstantiationException + | IllegalAccessException + | InvocationTargetException e) { throw new IllegalArgumentException( - String.format("Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", + String.format( + "Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", clazz.getName())); } } /** - * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given - * the urn & the {@link RecordDataSchema} of the key. + * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given the urn + * & the {@link RecordDataSchema} of the key. * - * Parts of the urn are bound into fields in the keySchema based on field <b>index</b>. If the - * number of urn key parts does not match the number of fields in the key schema, an {@link IllegalArgumentException} will be thrown. + * <p>Parts of the urn are bound into fields in the keySchema based on field <b>index</b>. If the + * number of urn key parts does not match the number of fields in the key schema, an {@link + * IllegalArgumentException} will be thrown. * * @param urn raw entity urn * @param keySchema schema of the entity key - * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of - * the provided key schema in order. - * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema (field number or type mismatch) + * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of the + * provided key schema in order. + * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema + * (field number or type mismatch) */ @Nonnull - public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn urn, @Nonnull final RecordDataSchema keySchema) { + public static RecordTemplate convertUrnToEntityKeyInternal( + @Nonnull final Urn urn, @Nonnull final RecordDataSchema keySchema) { // #1. Ensure we have a class to bind into. Class<? extends RecordTemplate> clazz; @@ -136,8 +151,10 @@ public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn ur clazz = Class.forName(keySchema.getFullName()).asSubclass(RecordTemplate.class); } catch (ClassNotFoundException e) { throw new IllegalArgumentException( - String.format("Failed to find RecordTemplate class associated with provided RecordDataSchema named %s", - keySchema.getFullName()), e); + String.format( + "Failed to find RecordTemplate class associated with provided RecordDataSchema named %s", + keySchema.getFullName()), + e); } // #2. Bind fields into a DataMap @@ -157,29 +174,37 @@ public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn ur try { constructor = clazz.getConstructor(DataMap.class); return constructor.newInstance(dataMap); - } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) { + } catch (NoSuchMethodException + | InstantiationException + | IllegalAccessException + | InvocationTargetException e) { throw new IllegalArgumentException( - String.format("Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", + String.format( + "Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", clazz.getName())); } } /** - * Implicitly converts an Entity Key {@link RecordTemplate} into the corresponding {@link Urn} string. + * Implicitly converts an Entity Key {@link RecordTemplate} into the corresponding {@link Urn} + * string. * - * Parts of the key record are bound into fields in the urn based on field <b>index</b>. + * <p>Parts of the key record are bound into fields in the urn based on field <b>index</b>. * * @param keyAspect a {@link RecordTemplate} representing the key. * @param entityName name of the entity to use during Urn construction * @return an {@link Urn} created by binding the fields of the key aspect to an Urn. */ @Nonnull - public static Urn convertEntityKeyToUrn(@Nonnull final RecordTemplate keyAspect, @Nonnull final String entityName) { + public static Urn convertEntityKeyToUrn( + @Nonnull final RecordTemplate keyAspect, @Nonnull final String entityName) { final List<String> urnParts = new ArrayList<>(); for (RecordDataSchema.Field field : keyAspect.schema().getFields()) { Object value = keyAspect.data().get(field.getName()); String valueString = value == null ? "" : value.toString(); - urnParts.add(valueString); // TODO: Determine whether all fields, including urns, should be URL encoded. + urnParts.add( + valueString); // TODO: Determine whether all fields, including urns, should be URL + // encoded. } return Urn.createFromTuple(entityName, urnParts); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java index 3ef415b4d31be..fc28367e6c7ee 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java @@ -1,54 +1,51 @@ package com.linkedin.metadata.utils; +import com.datahub.util.RecordUtils; import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.GenericPayload; import java.nio.charset.StandardCharsets; import javax.annotation.Nonnull; - public class GenericRecordUtils { public static final String JSON = "application/json"; - private GenericRecordUtils() { - } + private GenericRecordUtils() {} - /** - * Deserialize the given value into the aspect based on the input aspectSpec - */ + /** Deserialize the given value into the aspect based on the input aspectSpec */ @Nonnull - public static RecordTemplate deserializeAspect(@Nonnull ByteString aspectValue, @Nonnull String contentType, + public static RecordTemplate deserializeAspect( + @Nonnull ByteString aspectValue, + @Nonnull String contentType, @Nonnull AspectSpec aspectSpec) { return deserializeAspect(aspectValue, contentType, aspectSpec.getDataTemplateClass()); } @Nonnull - public static <T extends RecordTemplate> T deserializeAspect(@Nonnull ByteString aspectValue, - @Nonnull String contentType, @Nonnull Class<T> clazz) { + public static <T extends RecordTemplate> T deserializeAspect( + @Nonnull ByteString aspectValue, @Nonnull String contentType, @Nonnull Class<T> clazz) { if (!contentType.equals(JSON)) { - throw new IllegalArgumentException(String.format("%s content type is not supported", contentType)); + throw new IllegalArgumentException( + String.format("%s content type is not supported", contentType)); } return RecordUtils.toRecordTemplate(clazz, aspectValue.asString(StandardCharsets.UTF_8)); } @Nonnull public static <T extends RecordTemplate> T deserializePayload( - @Nonnull ByteString payloadValue, - @Nonnull String contentType, - @Nonnull Class<T> clazz) { + @Nonnull ByteString payloadValue, @Nonnull String contentType, @Nonnull Class<T> clazz) { if (!contentType.equals(JSON)) { - throw new IllegalArgumentException(String.format("%s content type is not supported", contentType)); + throw new IllegalArgumentException( + String.format("%s content type is not supported", contentType)); } return RecordUtils.toRecordTemplate(clazz, payloadValue.asString(StandardCharsets.UTF_8)); } @Nonnull public static <T extends RecordTemplate> T deserializePayload( - @Nonnull ByteString payloadValue, - @Nonnull Class<T> clazz) { + @Nonnull ByteString payloadValue, @Nonnull Class<T> clazz) { return deserializePayload(payloadValue, JSON, clazz); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java index d923005c8c023..d9a4768ada05f 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java @@ -1,36 +1,38 @@ package com.linkedin.metadata.utils; +import javax.annotation.Nonnull; import org.json.JSONException; import org.json.JSONObject; -import javax.annotation.Nonnull; - - public class IngestionUtils { private static final String PIPELINE_NAME = "pipeline_name"; - private IngestionUtils() { - } + private IngestionUtils() {} /** - * Injects a pipeline_name into a recipe if there isn't a pipeline_name already there. - * The pipeline_name will be the urn of the ingestion source. + * Injects a pipeline_name into a recipe if there isn't a pipeline_name already there. The + * pipeline_name will be the urn of the ingestion source. * * @param pipelineName the new pipeline name in the recipe. * @return a modified recipe JSON string */ - public static String injectPipelineName(@Nonnull String originalJson, @Nonnull final String pipelineName) { + public static String injectPipelineName( + @Nonnull String originalJson, @Nonnull final String pipelineName) { try { final JSONObject jsonRecipe = new JSONObject(originalJson); - boolean hasPipelineName = jsonRecipe.has(PIPELINE_NAME) && jsonRecipe.get(PIPELINE_NAME) != null && !jsonRecipe.get(PIPELINE_NAME).equals(""); + boolean hasPipelineName = + jsonRecipe.has(PIPELINE_NAME) + && jsonRecipe.get(PIPELINE_NAME) != null + && !jsonRecipe.get(PIPELINE_NAME).equals(""); if (!hasPipelineName) { jsonRecipe.put(PIPELINE_NAME, pipelineName); return jsonRecipe.toString(); } } catch (JSONException e) { - throw new IllegalArgumentException("Failed to create execution request: Invalid recipe json provided.", e); + throw new IllegalArgumentException( + "Failed to create execution request: Invalid recipe json provided.", e); } return originalJson; } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java index 9794d101ecda9..cde83c1382283 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java @@ -17,49 +17,66 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - -/** - * Static utility class providing methods for extracting entity metadata from Pegasus models. - */ +/** Static utility class providing methods for extracting entity metadata from Pegasus models. */ @Slf4j public class PegasusUtils { - private PegasusUtils() { - } + private PegasusUtils() {} public static String getEntityNameFromSchema(final RecordDataSchema entitySnapshotSchema) { - final Object entityAnnotationObj = entitySnapshotSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { - return EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotSchema.getFullName()).getName(); + return EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract entity name from provided schema %s", entitySnapshotSchema.getName())); + log.error( + String.format( + "Failed to extract entity name from provided schema %s", + entitySnapshotSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract entity name from provided schema %s", entitySnapshotSchema.getName())); + String.format( + "Failed to extract entity name from provided schema %s", + entitySnapshotSchema.getName())); } // TODO: Figure out a better iteration strategy. - public static String getAspectNameFromFullyQualifiedName(final String fullyQualifiedRecordTemplateName) { - final RecordTemplate template = RecordUtils.toRecordTemplate(fullyQualifiedRecordTemplateName, new DataMap()); + public static String getAspectNameFromFullyQualifiedName( + final String fullyQualifiedRecordTemplateName) { + final RecordTemplate template = + RecordUtils.toRecordTemplate(fullyQualifiedRecordTemplateName, new DataMap()); final RecordDataSchema aspectSchema = template.schema(); return getAspectNameFromSchema(aspectSchema); } public static String getAspectNameFromSchema(final RecordDataSchema aspectSchema) { - final Object aspectAnnotationObj = aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { - return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()).getName(); + return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + log.error( + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); } - public static <T> Class<? extends T> getDataTemplateClassFromSchema(final NamedDataSchema schema, final Class<T> clazz) { + public static <T> Class<? extends T> getDataTemplateClassFromSchema( + final NamedDataSchema schema, final Class<T> clazz) { try { - return Class.forName(schema.getFullName()).asSubclass(clazz); + return Class.forName(schema.getFullName()).asSubclass(clazz); } catch (ClassNotFoundException e) { - log.error("Unable to find class for RecordDataSchema named " + schema.getFullName() + " " + e.getMessage()); - throw new ModelConversionException("Unable to find class for RecordDataSchema named " + schema.getFullName(), e); + log.error( + "Unable to find class for RecordDataSchema named " + + schema.getFullName() + + " " + + e.getMessage()); + throw new ModelConversionException( + "Unable to find class for RecordDataSchema named " + schema.getFullName(), e); } } @@ -67,9 +84,17 @@ public static String urnToEntityName(final Urn urn) { return urn.getEntityType(); } - public static MetadataChangeLog constructMCL(@Nullable MetadataChangeProposal base, String entityName, Urn urn, ChangeType changeType, - String aspectName, AuditStamp auditStamp, RecordTemplate newAspectValue, SystemMetadata newSystemMetadata, - RecordTemplate oldAspectValue, SystemMetadata oldSystemMetadata) { + public static MetadataChangeLog constructMCL( + @Nullable MetadataChangeProposal base, + String entityName, + Urn urn, + ChangeType changeType, + String aspectName, + AuditStamp auditStamp, + RecordTemplate newAspectValue, + SystemMetadata newSystemMetadata, + RecordTemplate oldAspectValue, + SystemMetadata oldSystemMetadata) { final MetadataChangeLog metadataChangeLog; if (base != null) { metadataChangeLog = new MetadataChangeLog(new DataMap(base.data())); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java index 69bd3b461eb12..35e15c1e5b693 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java @@ -8,23 +8,19 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.FilterValue; - +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.net.URISyntaxException; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilders; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - - @Slf4j public class SearchUtil { public static final String AGGREGATION_SEPARATOR_CHAR = "␞"; @@ -33,23 +29,29 @@ public class SearchUtil { private static final String URN_PREFIX = "urn:"; private static final String REMOVED = "removed"; - private SearchUtil() { - } + private SearchUtil() {} /* * @param aggregations the aggregations coming back from elasticsearch combined with the filters from the search request * @param filteredValues the set of values provided by the search request */ - public static List<FilterValue> convertToFilters(Map<String, Long> aggregations, Set<String> filteredValues) { - return aggregations.entrySet().stream().map(entry -> { - return createFilterValue(entry.getKey(), entry.getValue(), filteredValues.contains(entry.getKey())); - }).sorted(Comparator.comparingLong(value -> -value.getFacetCount())).collect(Collectors.toList()); + public static List<FilterValue> convertToFilters( + Map<String, Long> aggregations, Set<String> filteredValues) { + return aggregations.entrySet().stream() + .map( + entry -> { + return createFilterValue( + entry.getKey(), entry.getValue(), filteredValues.contains(entry.getKey())); + }) + .sorted(Comparator.comparingLong(value -> -value.getFacetCount())) + .collect(Collectors.toList()); } public static FilterValue createFilterValue(String value, Long facetCount, Boolean isFilteredOn) { // TODO(indy): test this String[] aggregationTokens = value.split(AGGREGATION_SEPARATOR_CHAR); - FilterValue result = new FilterValue().setValue(value).setFacetCount(facetCount).setFiltered(isFilteredOn); + FilterValue result = + new FilterValue().setValue(value).setFacetCount(facetCount).setFiltered(isFilteredOn); String lastValue = aggregationTokens[aggregationTokens.length - 1]; if (lastValue.startsWith(URN_PREFIX)) { try { @@ -61,56 +63,77 @@ public static FilterValue createFilterValue(String value, Long facetCount, Boole return result; } - private static Criterion transformEntityTypeCriterion(Criterion criterion, IndexConvention indexConvention) { - return criterion.setField("_index").setValues( - new StringArray(criterion.getValues().stream().map(value -> String.join("", value.split("_"))) - .map(indexConvention::getEntityIndexName) - .collect(Collectors.toList()))) - .setValue(indexConvention.getEntityIndexName(String.join("", criterion.getValue().split("_")))); + private static Criterion transformEntityTypeCriterion( + Criterion criterion, IndexConvention indexConvention) { + return criterion + .setField("_index") + .setValues( + new StringArray( + criterion.getValues().stream() + .map(value -> String.join("", value.split("_"))) + .map(indexConvention::getEntityIndexName) + .collect(Collectors.toList()))) + .setValue( + indexConvention.getEntityIndexName(String.join("", criterion.getValue().split("_")))); } - private static ConjunctiveCriterion transformConjunctiveCriterion(ConjunctiveCriterion conjunctiveCriterion, - IndexConvention indexConvention) { - return new ConjunctiveCriterion().setAnd( - conjunctiveCriterion.getAnd().stream().map( - criterion -> criterion.getField().equalsIgnoreCase(INDEX_VIRTUAL_FIELD) - ? transformEntityTypeCriterion(criterion, indexConvention) - : criterion) - .collect(Collectors.toCollection(CriterionArray::new))); + private static ConjunctiveCriterion transformConjunctiveCriterion( + ConjunctiveCriterion conjunctiveCriterion, IndexConvention indexConvention) { + return new ConjunctiveCriterion() + .setAnd( + conjunctiveCriterion.getAnd().stream() + .map( + criterion -> + criterion.getField().equalsIgnoreCase(INDEX_VIRTUAL_FIELD) + ? transformEntityTypeCriterion(criterion, indexConvention) + : criterion) + .collect(Collectors.toCollection(CriterionArray::new))); } - private static ConjunctiveCriterionArray transformConjunctiveCriterionArray(ConjunctiveCriterionArray criterionArray, - IndexConvention indexConvention) { + private static ConjunctiveCriterionArray transformConjunctiveCriterionArray( + ConjunctiveCriterionArray criterionArray, IndexConvention indexConvention) { return new ConjunctiveCriterionArray( - criterionArray.stream().map( - conjunctiveCriterion -> transformConjunctiveCriterion(conjunctiveCriterion, indexConvention)) + criterionArray.stream() + .map( + conjunctiveCriterion -> + transformConjunctiveCriterion(conjunctiveCriterion, indexConvention)) .collect(Collectors.toList())); } /** - * Allows filtering on entities which are stored as different indices under the hood by transforming the tag - * _entityType to _index and updating the type to the index name. + * Allows filtering on entities which are stored as different indices under the hood by + * transforming the tag _entityType to _index and updating the type to the index name. * - * @param filter The filter to parse and transform if needed + * @param filter The filter to parse and transform if needed * @param indexConvention The index convention used to generate the index name for an entity * @return A filter, with the changes if necessary */ - public static Filter transformFilterForEntities(Filter filter, @Nonnull IndexConvention indexConvention) { + public static Filter transformFilterForEntities( + Filter filter, @Nonnull IndexConvention indexConvention) { if (filter != null && filter.getOr() != null) { - return new Filter().setOr(transformConjunctiveCriterionArray(filter.getOr(), indexConvention)); + return new Filter() + .setOr(transformConjunctiveCriterionArray(filter.getOr(), indexConvention)); } return filter; } /** - * Applies a default filter to remove entities that are soft deleted only if there isn't a filter for the REMOVED field already + * Applies a default filter to remove entities that are soft deleted only if there isn't a filter + * for the REMOVED field already */ - public static BoolQueryBuilder filterSoftDeletedByDefault(@Nullable Filter filter, @Nullable BoolQueryBuilder filterQuery) { + public static BoolQueryBuilder filterSoftDeletedByDefault( + @Nullable Filter filter, @Nullable BoolQueryBuilder filterQuery) { boolean removedInOrFilter = false; if (filter != null) { - removedInOrFilter = filter.getOr().stream().anyMatch( - or -> or.getAnd().stream().anyMatch(criterion -> criterion.getField().equals(REMOVED) || criterion.getField().equals(REMOVED + KEYWORD_SUFFIX)) - ); + removedInOrFilter = + filter.getOr().stream() + .anyMatch( + or -> + or.getAnd().stream() + .anyMatch( + criterion -> + criterion.getField().equals(REMOVED) + || criterion.getField().equals(REMOVED + KEYWORD_SUFFIX))); } if (!removedInOrFilter) { filterQuery.mustNot(QueryBuilders.matchQuery(REMOVED, true)); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java index f4be950575624..b0f42231b27f3 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java @@ -7,12 +7,11 @@ @Slf4j public class SystemMetadataUtils { - private SystemMetadataUtils() { - } + private SystemMetadataUtils() {} - public static SystemMetadata createDefaultSystemMetadata() { - return new SystemMetadata() - .setRunId(Constants.DEFAULT_RUN_ID) - .setLastObserved(System.currentTimeMillis()); - } + public static SystemMetadata createDefaultSystemMetadata() { + return new SystemMetadata() + .setRunId(Constants.DEFAULT_RUN_ID) + .setLastObserved(System.currentTimeMillis()); + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java index 4179345370007..4a3f78fcef7bd 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java @@ -6,10 +6,7 @@ import java.util.Optional; import javax.annotation.Nonnull; - -/** - * The convention for naming search indices - */ +/** The convention for naming search indices */ public interface IndexConvention { Optional<String> getPrefix(); @@ -36,6 +33,7 @@ public interface IndexConvention { /** * Inverse of getEntityIndexName + * * @param indexName The index name to parse * @return a string, the entity name that that index is for, or empty if one cannot be extracted */ @@ -43,9 +41,10 @@ public interface IndexConvention { /** * Inverse of getEntityIndexName + * * @param timeseriesAspectIndexName The index name to parse - * @return a pair of strings, the entity name and the aspect name that that index is for, - * or empty if one cannot be extracted + * @return a pair of strings, the entity name and the aspect name that that index is for, or empty + * if one cannot be extracted */ Optional<Pair<String, String>> getEntityAndAspectName(String timeseriesAspectIndexName); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java index e607139203b57..764630eb73973 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java @@ -10,7 +10,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.StringUtils; - // Default implementation of search index naming convention public class IndexConventionImpl implements IndexConvention { // Map from Entity name -> Index name @@ -19,17 +18,25 @@ public class IndexConventionImpl implements IndexConvention { private final String _getAllEntityIndicesPattern; private final String _getAllTimeseriesIndicesPattern; - private final static String ENTITY_INDEX_VERSION = "v2"; - private final static String ENTITY_INDEX_SUFFIX = "index"; - private final static String TIMESERIES_INDEX_VERSION = "v1"; - private final static String TIMESERIES_ENTITY_INDEX_SUFFIX = "aspect"; + private static final String ENTITY_INDEX_VERSION = "v2"; + private static final String ENTITY_INDEX_SUFFIX = "index"; + private static final String TIMESERIES_INDEX_VERSION = "v1"; + private static final String TIMESERIES_ENTITY_INDEX_SUFFIX = "aspect"; public IndexConventionImpl(@Nullable String prefix) { _prefix = StringUtils.isEmpty(prefix) ? Optional.empty() : Optional.of(prefix); _getAllEntityIndicesPattern = - _prefix.map(p -> p + "_").orElse("") + "*" + ENTITY_INDEX_SUFFIX + "_" + ENTITY_INDEX_VERSION; + _prefix.map(p -> p + "_").orElse("") + + "*" + + ENTITY_INDEX_SUFFIX + + "_" + + ENTITY_INDEX_VERSION; _getAllTimeseriesIndicesPattern = - _prefix.map(p -> p + "_").orElse("") + "*" + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + TIMESERIES_INDEX_VERSION; + _prefix.map(p -> p + "_").orElse("") + + "*" + + TIMESERIES_ENTITY_INDEX_SUFFIX + + "_" + + TIMESERIES_INDEX_VERSION; } private String createIndexName(String baseName) { @@ -85,7 +92,9 @@ public String getEntityIndexName(String entityName) { @Nonnull @Override public String getTimeseriesAspectIndexName(String entityName, String aspectName) { - return this.getIndexName(entityName + "_" + aspectName) + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + return this.getIndexName(entityName + "_" + aspectName) + + TIMESERIES_ENTITY_INDEX_SUFFIX + + "_" + TIMESERIES_INDEX_VERSION; } @@ -108,8 +117,10 @@ public Optional<String> getEntityName(String indexName) { @Override public Optional<Pair<String, String>> getEntityAndAspectName(String timeseriesAspectIndexName) { - Optional<String> entityAndAspect = extractIndexBase(timeseriesAspectIndexName, TIMESERIES_ENTITY_INDEX_SUFFIX + "_" - + TIMESERIES_INDEX_VERSION); + Optional<String> entityAndAspect = + extractIndexBase( + timeseriesAspectIndexName, + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + TIMESERIES_INDEX_VERSION); if (entityAndAspect.isPresent()) { String[] entityAndAspectTokens = entityAndAspect.get().split("_"); if (entityAndAspectTokens.length == 2) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java index 982557f2b5358..885ed74d11471 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.utils.exception; -/** - * An exception to be thrown when certain graph entities are not supported. - */ +/** An exception to be thrown when certain graph entities are not supported. */ public class UnsupportedGraphEntities extends RuntimeException { public UnsupportedGraphEntities(String message) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java index d053272c19e7d..67f0ae4c77eaf 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java @@ -6,16 +6,13 @@ import java.util.ArrayList; import java.util.List; - /** - * A Log Filter that can be configured to omit logs containing a specific message string. - * Configured inside logback.xml. + * A Log Filter that can be configured to omit logs containing a specific message string. Configured + * inside logback.xml. */ public class LogMessageFilter extends AbstractMatcherFilter<ILoggingEvent> { - /** - * A set of messages to exclude. - */ + /** A set of messages to exclude. */ private final List<String> excluded = new ArrayList<>(); @Override @@ -33,4 +30,4 @@ public FilterReply decide(ILoggingEvent event) { public void addExcluded(String message) { this.excluded.add(message); } -} \ No newline at end of file +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java index 9a8848e090fb8..3a47c11f8d748 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java @@ -7,10 +7,8 @@ import com.codahale.metrics.Timer; import com.codahale.metrics.jmx.JmxReporter; - public class MetricUtils { - private MetricUtils() { - } + private MetricUtils() {} public static final String DELIMITER = "_"; @@ -32,7 +30,8 @@ public static Counter counter(Class<?> klass, String metricName) { public static void exceptionCounter(Class<?> klass, String metricName, Throwable t) { String[] splitClassName = t.getClass().getName().split("[.]"); - String snakeCase = splitClassName[splitClassName.length - 1].replaceAll("([A-Z][a-z])", DELIMITER + "$1"); + String snakeCase = + splitClassName[splitClassName.length - 1].replaceAll("([A-Z][a-z])", DELIMITER + "$1"); counter(klass, metricName).inc(); counter(klass, metricName + DELIMITER + snakeCase).inc(); @@ -50,7 +49,8 @@ public static Timer timer(String metricName) { return REGISTRY.timer(MetricRegistry.name(metricName)); } - public static <T extends Gauge<?>> T gauge(Class<?> clazz, String metricName, MetricRegistry.MetricSupplier<T> supplier) { + public static <T extends Gauge<?>> T gauge( + Class<?> clazz, String metricName, MetricRegistry.MetricSupplier<T> supplier) { return REGISTRY.gauge(MetricRegistry.name(clazz, metricName), supplier); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java index e120fdb3b342f..7a6479a313244 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.utils; +import static org.testng.Assert.*; + import com.datahub.test.KeyPartEnum; import com.datahub.test.TestEntityKey; import com.linkedin.common.urn.Urn; @@ -8,11 +10,8 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import org.testng.Assert; import org.testng.annotations.Test; -import static org.testng.Assert.*; -/** - * Tests the capabilities of {@link EntityKeyUtils} - */ +/** Tests the capabilities of {@link EntityKeyUtils} */ public class EntityKeyUtilsTest { @Test @@ -22,36 +21,42 @@ public void testConvertEntityKeyToUrn() throws Exception { key.setKeyPart2(Urn.createFromString("urn:li:testEntity2:part2")); key.setKeyPart3(KeyPartEnum.VALUE_1); - final Urn expectedUrn = Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); + final Urn expectedUrn = + Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); final Urn actualUrn = EntityKeyUtils.convertEntityKeyToUrn(key, "testEntity1"); assertEquals(actualUrn.toString(), expectedUrn.toString()); } @Test public void testConvertEntityKeyToUrnInternal() throws Exception { - final Urn urn = Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); + final Urn urn = + Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); final TestEntityKey expectedKey = new TestEntityKey(); expectedKey.setKeyPart1("part1"); expectedKey.setKeyPart2(Urn.createFromString("urn:li:testEntity2:part2")); expectedKey.setKeyPart3(KeyPartEnum.VALUE_1); - final RecordTemplate actualKey = EntityKeyUtils.convertUrnToEntityKeyInternal(urn, expectedKey.schema()); + final RecordTemplate actualKey = + EntityKeyUtils.convertUrnToEntityKeyInternal(urn, expectedKey.schema()); Assert.assertEquals(actualKey.data(), expectedKey.data()); } @Test public void testConvertEntityUrnToKey() throws Exception { - final Urn urn = Urn.createFromString("urn:li:testEntity:(part1,urn:li:testEntity:part2,VALUE_1)"); + final Urn urn = + Urn.createFromString("urn:li:testEntity:(part1,urn:li:testEntity:part2,VALUE_1)"); final TestEntityKey expectedKey = new TestEntityKey(); expectedKey.setKeyPart1("part1"); expectedKey.setKeyPart2(Urn.createFromString("urn:li:testEntity:part2")); expectedKey.setKeyPart3(KeyPartEnum.VALUE_1); - ConfigEntityRegistry entityRegistry = new ConfigEntityRegistry( - TestEntityKey.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry entityRegistry = + new ConfigEntityRegistry( + TestEntityKey.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); final EntitySpec entitySpec = entityRegistry.getEntitySpec(PegasusUtils.urnToEntityName(urn)); - final RecordTemplate actualKey = EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); + final RecordTemplate actualKey = + EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); Assert.assertEquals(actualKey.data(), expectedKey.data()); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java index 8b2078c7b9533..6288ed80e6881 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java @@ -1,29 +1,29 @@ package com.linkedin.metadata.utils; -import org.testng.annotations.Test; - - import static org.testng.Assert.assertEquals; +import org.testng.annotations.Test; + public class IngestionUtilsTest { private final String ingestionSourceUrn = "urn:li:ingestionSource:12345"; @Test public void injectPipelineNameWhenThere() { - String recipe = "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"test\"}"; + String recipe = + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"test\"}"; assertEquals(recipe, IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn)); } @Test public void injectPipelineNameWhenNotThere() { - String recipe = "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}}}"; + String recipe = + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}}}"; recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn); assertEquals( recipe, - "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"urn:li:ingestionSource:12345\"}" - ); + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"urn:li:ingestionSource:12345\"}"); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java index b60b7fb64f3f9..fd606f57477a0 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.utils; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.search.FilterValue; @@ -9,12 +11,7 @@ import java.util.Set; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - -/** - * Tests the capabilities of {@link EntityKeyUtils} - */ +/** Tests the capabilities of {@link EntityKeyUtils} */ public class SearchUtilTest { @Test @@ -25,21 +22,22 @@ public void testConvertToFilters() throws Exception { Set<String> filteredValues = ImmutableSet.of("urn:li:tag:def"); - List<FilterValue> filters = - SearchUtil.convertToFilters(aggregations, filteredValues); - - assertEquals(filters.get(0), new FilterValue() - .setFiltered(false) - .setValue("urn:li:tag:abc") - .setEntity(Urn.createFromString("urn:li:tag:abc")) - .setFacetCount(3L) - ); - - assertEquals(filters.get(1), new FilterValue() - .setFiltered(true) - .setValue("urn:li:tag:def") - .setEntity(Urn.createFromString("urn:li:tag:def")) - .setFacetCount(0L) - ); + List<FilterValue> filters = SearchUtil.convertToFilters(aggregations, filteredValues); + + assertEquals( + filters.get(0), + new FilterValue() + .setFiltered(false) + .setValue("urn:li:tag:abc") + .setEntity(Urn.createFromString("urn:li:tag:abc")) + .setFacetCount(3L)); + + assertEquals( + filters.get(1), + new FilterValue() + .setFiltered(true) + .setValue("urn:li:tag:def") + .setEntity(Urn.createFromString("urn:li:tag:def")) + .setFacetCount(0L)); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java index 5310871140fc9..f3e52c9989775 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.utils.elasticsearch; +import static org.testng.Assert.*; + import com.linkedin.util.Pair; import java.util.Optional; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class IndexConventionImplTest { @Test @@ -19,7 +18,9 @@ public void testIndexConventionNoPrefix() { assertEquals(indexConventionNoPrefix.getEntityName(expectedIndexName), Optional.of(entityName)); assertEquals(indexConventionNoPrefix.getEntityName("totally not an index"), Optional.empty()); assertEquals(indexConventionNoPrefix.getEntityName("dataset_v2"), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.of("dashboard")); + assertEquals( + indexConventionNoPrefix.getEntityName("dashboardindex_v2_1683649932260"), + Optional.of("dashboard")); } @Test @@ -32,22 +33,32 @@ public void testIndexConventionPrefix() { assertEquals(indexConventionPrefix.getEntityName(expectedIndexName), Optional.of(entityName)); assertEquals(indexConventionPrefix.getEntityName("totally not an index"), Optional.empty()); assertEquals(indexConventionPrefix.getEntityName("prefix_dataset_v2"), Optional.empty()); - assertEquals(indexConventionPrefix.getEntityName("prefix_dashboardindex_v2_1683649932260"), Optional.of("dashboard")); - assertEquals(indexConventionPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityName("prefix_dashboardindex_v2_1683649932260"), + Optional.of("dashboard")); + assertEquals( + indexConventionPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.empty()); } + @Test public void testTimeseriesIndexConventionNoPrefix() { IndexConvention indexConventionNoPrefix = new IndexConventionImpl(null); String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "dataset_datasetusagestatisticsaspect_v1"; - assertEquals(indexConventionNoPrefix.getTimeseriesAspectIndexName(entityName, aspectName), expectedIndexName); + assertEquals( + indexConventionNoPrefix.getTimeseriesAspectIndexName(entityName, aspectName), + expectedIndexName); assertEquals(indexConventionNoPrefix.getPrefix(), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName(expectedIndexName), Optional.of( - Pair.of(entityName, aspectName))); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName(expectedIndexName), + Optional.of(Pair.of(entityName, aspectName))); + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); assertEquals(indexConventionNoPrefix.getEntityAndAspectName("dataset_v2"), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName("dashboard_dashboardusagestatisticsaspect_v1"), + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName( + "dashboard_dashboardusagestatisticsaspect_v1"), Optional.of(Pair.of("dashboard", "dashboardusagestatistics"))); } @@ -57,10 +68,17 @@ public void testTimeseriesIndexConventionPrefix() { String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "prefix_dataset_datasetusagestatisticsaspect_v1"; - assertEquals(indexConventionPrefix.getTimeseriesAspectIndexName(entityName, aspectName), expectedIndexName); + assertEquals( + indexConventionPrefix.getTimeseriesAspectIndexName(entityName, aspectName), + expectedIndexName); assertEquals(indexConventionPrefix.getPrefix(), Optional.of("prefix")); - assertEquals(indexConventionPrefix.getEntityAndAspectName(expectedIndexName), Optional.of(Pair.of(entityName, aspectName))); - assertEquals(indexConventionPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); - assertEquals(indexConventionPrefix.getEntityAndAspectName("prefix_datasetusagestatisticsaspect_v1"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityAndAspectName(expectedIndexName), + Optional.of(Pair.of(entityName, aspectName))); + assertEquals( + indexConventionPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityAndAspectName("prefix_datasetusagestatisticsaspect_v1"), + Optional.empty()); } } diff --git a/mock-entity-registry/src/main/java/mock/MockAspectSpec.java b/mock-entity-registry/src/main/java/mock/MockAspectSpec.java index 594bc583eeef0..92321cce3d905 100644 --- a/mock-entity-registry/src/main/java/mock/MockAspectSpec.java +++ b/mock-entity-registry/src/main/java/mock/MockAspectSpec.java @@ -12,16 +12,24 @@ import java.util.List; import javax.annotation.Nonnull; - public class MockAspectSpec extends AspectSpec { - public MockAspectSpec(@Nonnull AspectAnnotation aspectAnnotation, + public MockAspectSpec( + @Nonnull AspectAnnotation aspectAnnotation, @Nonnull List<SearchableFieldSpec> searchableFieldSpecs, @Nonnull List<SearchScoreFieldSpec> searchScoreFieldSpecs, @Nonnull List<RelationshipFieldSpec> relationshipFieldSpecs, @Nonnull List<TimeseriesFieldSpec> timeseriesFieldSpecs, - @Nonnull List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs, RecordDataSchema schema, + @Nonnull List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs, + RecordDataSchema schema, Class<RecordTemplate> aspectClass) { - super(aspectAnnotation, searchableFieldSpecs, searchScoreFieldSpecs, relationshipFieldSpecs, timeseriesFieldSpecs, - timeseriesFieldCollectionSpecs, schema, aspectClass); + super( + aspectAnnotation, + searchableFieldSpecs, + searchScoreFieldSpecs, + relationshipFieldSpecs, + timeseriesFieldSpecs, + timeseriesFieldCollectionSpecs, + schema, + aspectClass); } } diff --git a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java index 54dd25613ed4c..a324f9ce0195b 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java +++ b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java @@ -11,7 +11,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class MockEntityRegistry implements EntityRegistry { @Nonnull @Override @@ -48,5 +47,4 @@ public AspectTemplateEngine getAspectTemplateEngine() { public Map<String, AspectSpec> getAspectSpecs() { return new HashMap<>(); } - } diff --git a/mock-entity-registry/src/main/java/mock/MockEntitySpec.java b/mock-entity-registry/src/main/java/mock/MockEntitySpec.java index d740fff29e258..0013d6615a71d 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntitySpec.java +++ b/mock-entity-registry/src/main/java/mock/MockEntitySpec.java @@ -1,5 +1,7 @@ package mock; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePaths; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -27,9 +29,6 @@ import java.util.Map; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.*; - - public class MockEntitySpec implements EntitySpec { private String _name; @@ -41,7 +40,8 @@ public MockEntitySpec(String name) { if (DATASET_ENTITY_NAME.equals(name)) { _aspectTypeMap.put(BROWSE_PATHS_ASPECT_NAME, getAspectSpec(BROWSE_PATHS_ASPECT_NAME)); _aspectTypeMap.put(BROWSE_PATHS_V2_ASPECT_NAME, getAspectSpec(BROWSE_PATHS_V2_ASPECT_NAME)); - _aspectTypeMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, getAspectSpec(DATA_PLATFORM_INSTANCE_ASPECT_NAME)); + _aspectTypeMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, getAspectSpec(DATA_PLATFORM_INSTANCE_ASPECT_NAME)); } } @@ -81,16 +81,23 @@ public AspectSpec getKeyAspectSpec() { return null; } - public <T extends RecordTemplate> AspectSpec createAspectSpec(T type, String name) { - return new MockAspectSpec(new AspectAnnotation(name, false, false, null), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), type.schema(), (Class<RecordTemplate>) type.getClass().asSubclass(RecordTemplate.class)); + public <T extends RecordTemplate> AspectSpec createAspectSpec(T type, String name) { + return new MockAspectSpec( + new AspectAnnotation(name, false, false, null), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + type.schema(), + (Class<RecordTemplate>) type.getClass().asSubclass(RecordTemplate.class)); } @Override public List<AspectSpec> getAspectSpecs() { - return ASPECT_TYPE_MAP.keySet().stream().map(name -> createAspectSpec(ASPECT_TYPE_MAP.get(name), name)).collect( - Collectors.toList()); + return ASPECT_TYPE_MAP.keySet().stream() + .map(name -> createAspectSpec(ASPECT_TYPE_MAP.get(name), name)) + .collect(Collectors.toList()); } @Override @@ -118,6 +125,7 @@ public Boolean hasAspect(String name) { ASPECT_TYPE_MAP.put(BROWSE_PATHS_V2_ASPECT_NAME, new BrowsePathsV2()); ASPECT_TYPE_MAP.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, new DataPlatformInstance()); } + @Override public AspectSpec getAspectSpec(String name) { return createAspectSpec(ASPECT_TYPE_MAP.get(name), name); diff --git a/test-models/src/main/java/com/datahub/utils/TestUtils.java b/test-models/src/main/java/com/datahub/utils/TestUtils.java index 1aca3a890caa6..6a2d219fa9b4d 100644 --- a/test-models/src/main/java/com/datahub/utils/TestUtils.java +++ b/test-models/src/main/java/com/datahub/utils/TestUtils.java @@ -9,7 +9,6 @@ import javax.annotation.Nonnull; import org.apache.commons.io.IOUtils; - public final class TestUtils { private TestUtils() { // Util class @@ -18,7 +17,8 @@ private TestUtils() { @Nonnull public static String loadJsonFromResource(@Nonnull String resourceName) throws IOException { final String jsonStr = - IOUtils.toString(ClassLoader.getSystemResourceAsStream(resourceName), Charset.defaultCharset()); + IOUtils.toString( + ClassLoader.getSystemResourceAsStream(resourceName), Charset.defaultCharset()); return jsonStr.replaceAll("\\s+", ""); } @@ -45,4 +45,3 @@ public static BarUrn makeBarUrn(int id) { return new BarUrn(id); } } - diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java index 6b1cd545ba00d..c9d308522f6b9 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class BarUrn extends Urn { public static final String ENTITY_TYPE = "bar"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public BarUrn(int id) { @@ -22,7 +22,8 @@ public int getBarIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java index 8970a011eca14..774da2687893b 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class BarUrnCoercer extends BaseUrnCoercer<BarUrn> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new BarUrnCoercer(), BarUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new BarUrnCoercer(), BarUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java index ab0c28f9fbb9b..4fffa8b4f2558 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java @@ -5,10 +5,8 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public abstract class BaseUrnCoercer<T extends Urn> implements DirectCoercer<T> { - public BaseUrnCoercer() { - } + public BaseUrnCoercer() {} public Object coerceInput(T object) throws ClassCastException { return object.toString(); diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java index dddf7721c64a8..81e0adab84472 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java @@ -3,11 +3,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class BazUrn extends Urn { public static final String ENTITY_TYPE = "baz"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public BazUrn(int id) throws URISyntaxException { @@ -21,7 +21,8 @@ public int getBazIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java index 87b8929d236db..33ca9d0b060c6 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class BazUrnCoercer extends BaseUrnCoercer<BazUrn> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new BazUrnCoercer(), BazUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new BazUrnCoercer(), BazUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java index a8f2bab3c21dd..1047e39f9905f 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java @@ -3,11 +3,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class FooUrn extends Urn { public static final String ENTITY_TYPE = "foo"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public FooUrn(int id) throws URISyntaxException { @@ -21,7 +21,8 @@ public int getFooIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java index a2d65dc5f8bd3..3e7bd95fdf3bc 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class FooUrnCoercer extends BaseUrnCoercer<FooUrn> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new FooUrnCoercer(), FooUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new FooUrnCoercer(), FooUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java index 6cec6042401a1..bfa22bdeb7f90 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class PizzaUrn extends Urn { public static final String ENTITY_TYPE = "pizza"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public PizzaUrn(int id) { @@ -22,7 +22,8 @@ public int getPizzaId() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java index 64bfffe03f77d..30af8171e0eef 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class PizzaUrnCoercer extends BaseUrnCoercer<PizzaUrn> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new PizzaUrnCoercer(), PizzaUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new PizzaUrnCoercer(), PizzaUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java index 8467f15f85a49..7af0eb39c70d9 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java @@ -3,7 +3,6 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class SingleAspectEntityUrn extends Urn { private static final String ENTITY_TYPE = "entitySingleAspectEntity"; From 3c0727e9b7195b05e55557bfa0a56f390808eb3a Mon Sep 17 00:00:00 2001 From: Aseem Bansal <asmbansal2@gmail.com> Date: Wed, 6 Dec 2023 15:07:50 +0530 Subject: [PATCH 203/792] feat(ci): split no cypress test suite (#9387) --- .github/workflows/docker-unified.yml | 12 ++++++------ smoke-test/smoke.sh | 8 +++++--- smoke-test/test_e2e.py | 2 ++ smoke-test/tests/privileges/test_privileges.py | 2 ++ smoke-test/tests/timeline/timeline_test.py | 2 ++ 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 8bb82a0a0608c..fef23f9efa85f 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -696,7 +696,12 @@ jobs: strategy: fail-fast: false matrix: - test_strategy: ["no_cypress", "cypress_suite1", "cypress_rest"] + test_strategy: [ + "no_cypress_suite0", + "no_cypress_suite1", + "cypress_suite1", + "cypress_rest" + ] needs: [ setup, @@ -792,11 +797,6 @@ jobs: ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml" run: | ./smoke-test/run-quickstart.sh - - name: sleep 60s - run: | - # we are doing this because gms takes time to get ready - # and we don't have a better readiness check when bootstrap is done - sleep 60s - name: Disk Check run: df -h . && docker images - name: Disable ES Disk Threshold diff --git a/smoke-test/smoke.sh b/smoke-test/smoke.sh index 3236a0e5c3f0c..db0389be1f489 100755 --- a/smoke-test/smoke.sh +++ b/smoke-test/smoke.sh @@ -24,12 +24,14 @@ source venv/bin/activate source ./set-cypress-creds.sh -# no_cypress, cypress_suite1, cypress_rest +# no_cypress_suite0, no_cypress_suite1, cypress_suite1, cypress_rest if [[ -z "${TEST_STRATEGY}" ]]; then pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke.xml else - if [ "$TEST_STRATEGY" == "no_cypress" ]; then - pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke_non_cypress.xml -k 'not test_run_cypress' + if [ "$TEST_STRATEGY" == "no_cypress_suite0" ]; then + pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke_non_cypress.xml -k 'not test_run_cypress' -m 'not no_cypress_suite1' + elif [ "$TEST_STRATEGY" == "no_cypress_suite1" ]; then + pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke_non_cypress.xml -m 'no_cypress_suite1' else pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke_cypress_${TEST_STRATEGY}.xml tests/cypress/integration_test.py fi diff --git a/smoke-test/test_e2e.py b/smoke-test/test_e2e.py index 4a0a122b79670..abb4841314c4a 100644 --- a/smoke-test/test_e2e.py +++ b/smoke-test/test_e2e.py @@ -8,6 +8,8 @@ import tenacity from datahub.ingestion.run.pipeline import Pipeline +pytestmark = pytest.mark.no_cypress_suite1 + from tests.utils import ( get_frontend_url, get_gms_url, diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index d0f00734ae9f3..aa54a50b04e7f 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -5,6 +5,8 @@ get_frontend_url, get_admin_credentials,get_sleep_info) from tests.privileges.utils import * +pytestmark = pytest.mark.no_cypress_suite1 + sleep_sec, sleep_times = get_sleep_info() @pytest.fixture(scope="session") diff --git a/smoke-test/tests/timeline/timeline_test.py b/smoke-test/tests/timeline/timeline_test.py index 4705343c1a2ba..c075d981487db 100644 --- a/smoke-test/tests/timeline/timeline_test.py +++ b/smoke-test/tests/timeline/timeline_test.py @@ -1,4 +1,5 @@ import json +import pytest from time import sleep from datahub.cli import timeline_cli @@ -7,6 +8,7 @@ from tests.utils import (get_datahub_graph, ingest_file_via_rest, wait_for_writes_to_sync) +pytestmark = pytest.mark.no_cypress_suite1 def test_all(): platform = "urn:li:dataPlatform:kafka" From a9c5c3903c3af88bd1aaf45b1a131f04d3ef57c1 Mon Sep 17 00:00:00 2001 From: Aseem Bansal <asmbansal2@gmail.com> Date: Wed, 6 Dec 2023 15:58:20 +0530 Subject: [PATCH 204/792] fix(ingest/redshift): too many values unpack (#9394) --- .../src/datahub/ingestion/source/redshift/lineage.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py index 05011b2d7a769..abed8505f168b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py @@ -381,7 +381,8 @@ def _get_upstream_lineages( qualified_table_name = dataset_urn.DatasetUrn.create_from_string( source.urn ).get_entity_id()[1] - db, schema, table = qualified_table_name.split(".") + # -3 because platform instance is optional and that can cause the split to have more than 3 elements + db, schema, table = qualified_table_name.split(".")[-3:] if db == raw_db_name: db = alias_db_name path = f"{db}.{schema}.{table}" From 2eee3332ead690178edf6a5e8f8f551fa0065163 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth <treff7es@gmail.com> Date: Wed, 6 Dec 2023 12:18:24 +0100 Subject: [PATCH 205/792] fix(ingest/redshift): Fix psycopg2 removal from Redshift Source (#9395) --- .../src/datahub/ingestion/source/redshift/config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 51ad8a050adc2..540adbf4bfd15 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -10,7 +10,7 @@ from datahub.configuration.source_common import DatasetLineageProviderConfigBase from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.data_lake_common.path_spec import PathSpec -from datahub.ingestion.source.sql.postgres import BasePostgresConfig +from datahub.ingestion.source.sql.sql_config import BasicSQLAlchemyConfig from datahub.ingestion.source.state.stateful_ingestion_base import ( StatefulLineageConfigMixin, StatefulProfilingConfigMixin, @@ -64,7 +64,7 @@ class RedshiftUsageConfig(BaseUsageConfig, StatefulUsageConfigMixin): class RedshiftConfig( - BasePostgresConfig, + BasicSQLAlchemyConfig, DatasetLineageProviderConfigBase, S3DatasetLineageProviderConfigBase, RedshiftUsageConfig, From 7a2b8bf5f9190441f667a733ce6328c50e62030c Mon Sep 17 00:00:00 2001 From: Jonas <150245047+accso-jo@users.noreply.github.com> Date: Wed, 6 Dec 2023 18:31:48 +0100 Subject: [PATCH 206/792] fix(ui): fixed font src spelling mistake (#9204) --- datahub-web-react/src/App.less | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/App.less b/datahub-web-react/src/App.less index a001aa103b33f..003e86981b2b2 100644 --- a/datahub-web-react/src/App.less +++ b/datahub-web-react/src/App.less @@ -4,5 +4,5 @@ @font-face { font-family: 'Manrope'; font-style: normal; - src: local('Mnarope'), url('./fonts/manrope.woff2') format('woff2'), + src: local('Manrope'), url('./fonts/manrope.woff2') format('woff2'), } From 27f23ecdd5d3635ac32ed51a10a339ee3e4870b3 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Wed, 6 Dec 2023 13:59:23 -0500 Subject: [PATCH 207/792] feat(ingest/unity): GE Profiling (#8951) --- docs/how/updating-datahub.md | 4 + .../sources/databricks/unity-catalog_pre.md | 3 +- .../databricks/unity-catalog_recipe.yml | 52 ++++-- metadata-ingestion/setup.py | 5 +- .../ingestion/source/bigquery_v2/profiler.py | 2 +- .../ingestion/source/ge_data_profiler.py | 24 ++- .../ingestion/source/redshift/profile.py | 3 +- .../source/snowflake/snowflake_profiler.py | 3 +- .../source/sql/sql_generic_profiler.py | 25 +-- .../{profiler.py => analyze_profiler.py} | 6 +- .../datahub/ingestion/source/unity/config.py | 78 ++++++-- .../ingestion/source/unity/ge_profiler.py | 170 ++++++++++++++++++ .../datahub/ingestion/source/unity/report.py | 7 +- .../datahub/ingestion/source/unity/source.py | 49 +++-- .../mysql/mysql_mces_no_db_golden.json | 27 +-- .../mysql/mysql_mces_with_db_golden.json | 73 ++++---- .../mysql_table_row_count_estimate_only.json | 121 ++++--------- .../tests/unit/test_unity_catalog_config.py | 8 +- 18 files changed, 449 insertions(+), 211 deletions(-) rename metadata-ingestion/src/datahub/ingestion/source/unity/{profiler.py => analyze_profiler.py} (96%) create mode 100644 metadata-ingestion/src/datahub/ingestion/source/unity/ge_profiler.py diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index df179b0d0d2f7..94ab1b0611c33 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -12,6 +12,10 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. - #9286: The `DataHubRestEmitter.emit` method no longer returns anything. It previously returned a tuple of timestamps. +- #8951: A great expectations based profiler has been added for the Unity Catalog source. +To use the old profiler, set `method: analyze` under the `profiling` section in your recipe. +To use the new profiler, set `method: ge`. Profiling is disabled by default, so to enable it, +one of these methods must be specified. ### Potential Downtime diff --git a/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md b/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md index ae2883343d7e8..12540e1977f64 100644 --- a/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md +++ b/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md @@ -15,7 +15,8 @@ * [Privileges documentation](https://docs.databricks.com/data-governance/unity-catalog/manage-privileges/privileges.html) + To ingest your workspace's notebooks and respective lineage, your service principal must have `CAN_READ` privileges on the folders containing the notebooks you want to ingest: [guide](https://docs.databricks.com/en/security/auth-authz/access-control/workspace-acl.html#folder-permissions). + To `include_usage_statistics` (enabled by default), your service principal must have `CAN_MANAGE` permissions on any SQL Warehouses you want to ingest: [guide](https://docs.databricks.com/security/auth-authz/access-control/sql-endpoint-acl.html). - + To ingest `profiling` information with `call_analyze` (enabled by default), your service principal must have ownership or `MODIFY` privilege on any tables you want to profile. + + To ingest `profiling` information with `method: ge`, you need `SELECT` privileges on all profiled tables. + + To ingest `profiling` information with `method: analyze` and `call_analyze: true` (enabled by default), your service principal must have ownership or `MODIFY` privilege on any tables you want to profile. * Alternatively, you can run [ANALYZE TABLE](https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-analyze-table.html) yourself on any tables you want to profile, then set `call_analyze` to `false`. You will still need `SELECT` privilege on those tables to fetch the results. - Check the starter recipe below and replace `workspace_url` and `token` with your information from the previous steps. diff --git a/metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml b/metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml index 7bc336d5f25fc..931552e7343d0 100644 --- a/metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml +++ b/metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml @@ -2,24 +2,38 @@ source: type: unity-catalog config: workspace_url: https://my-workspace.cloud.databricks.com - token: "mygenerated_databricks_token" - #metastore_id_pattern: - # deny: - # - 11111-2222-33333-44-555555 - #catalog_pattern: - # allow: - # - my-catalog - #schema_pattern: - # deny: - # - information_schema - #table_pattern: - # allow: - # - test.lineagedemo.dinner - # First you have to create domains on Datahub by following this guide -> https://datahubproject.io/docs/domains/#domains-setup-prerequisites-and-permissions - #domain: - # urn:li:domain:1111-222-333-444-555: - # allow: - # - main.* + token: "<token>" + include_metastore: false + include_ownership: true + profiling: + method: "ge" + enabled: true + warehouse_id: "<warehouse_id>" + profile_table_level_only: false + max_wait_secs: 60 + pattern: + deny: + - ".*\\.unwanted_schema" + +# profiling: +# method: "analyze" +# enabled: true +# warehouse_id: "<warehouse_id>" +# profile_table_level_only: true +# call_analyze: true + +# catalogs: ["my_catalog"] +# schema_pattern: +# deny: +# - information_schema +# table_pattern: +# allow: +# - my_catalog.my_schema.my_table +# First you have to create domains on Datahub by following this guide -> https://datahubproject.io/docs/domains/#domains-setup-prerequisites-and-permissions +# domain: +# urn:li:domain:1111-222-333-444-555: +# allow: +# - main.* stateful_ingestion: enabled: true @@ -27,4 +41,4 @@ source: pipeline_name: acme-corp-unity -# sink configs if needed \ No newline at end of file +# sink configs if needed diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 69cbe8d823450..dac865d2dac37 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -262,7 +262,8 @@ "databricks-sdk>=0.9.0", "pyspark~=3.3.0", "requests", - "databricks-sql-connector", + # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes + "databricks-sql-connector>=2.8.0", } mysql = sql_common | {"pymysql>=1.0.2"} @@ -393,7 +394,7 @@ "powerbi": microsoft_common | {"lark[regex]==1.1.4", "sqlparse"} | sqlglot_lib, "powerbi-report-server": powerbi_report_server, "vertica": sql_common | {"vertica-sqlalchemy-dialect[vertica-python]==0.0.8.1"}, - "unity-catalog": databricks | sqllineage_lib, + "unity-catalog": databricks | sql_common | sqllineage_lib, "fivetran": snowflake_common, } diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/profiler.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/profiler.py index 8ae17600e0eea..4083eb6db77c1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/profiler.py @@ -183,7 +183,7 @@ def get_workunits( return yield from self.generate_profile_workunits( profile_requests, - self.config.profiling.max_workers, + max_workers=self.config.profiling.max_workers, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py index c334a97680e3e..abb415c90cc8b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py @@ -27,6 +27,7 @@ import sqlalchemy as sa import sqlalchemy.sql.compiler +from great_expectations.core.profiler_types_mapping import ProfilerTypeMapping from great_expectations.core.util import convert_to_json_serializable from great_expectations.data_context import AbstractDataContext, BaseDataContext from great_expectations.data_context.types.base import ( @@ -77,8 +78,26 @@ SNOWFLAKE = "snowflake" BIGQUERY = "bigquery" REDSHIFT = "redshift" +DATABRICKS = "databricks" TRINO = "trino" +# Type names for Databricks, to match Title Case types in sqlalchemy +ProfilerTypeMapping.INT_TYPE_NAMES.append("Integer") +ProfilerTypeMapping.INT_TYPE_NAMES.append("SmallInteger") +ProfilerTypeMapping.INT_TYPE_NAMES.append("BigInteger") +ProfilerTypeMapping.FLOAT_TYPE_NAMES.append("Float") +ProfilerTypeMapping.FLOAT_TYPE_NAMES.append("Numeric") +ProfilerTypeMapping.STRING_TYPE_NAMES.append("String") +ProfilerTypeMapping.STRING_TYPE_NAMES.append("Text") +ProfilerTypeMapping.STRING_TYPE_NAMES.append("Unicode") +ProfilerTypeMapping.STRING_TYPE_NAMES.append("UnicodeText") +ProfilerTypeMapping.BOOLEAN_TYPE_NAMES.append("Boolean") +ProfilerTypeMapping.DATETIME_TYPE_NAMES.append("Date") +ProfilerTypeMapping.DATETIME_TYPE_NAMES.append("DateTime") +ProfilerTypeMapping.DATETIME_TYPE_NAMES.append("Time") +ProfilerTypeMapping.DATETIME_TYPE_NAMES.append("Interval") +ProfilerTypeMapping.BINARY_TYPE_NAMES.append("LargeBinary") + # The reason for this wacky structure is quite fun. GE basically assumes that # the config structures were generated directly from YML and further assumes that # they can be `deepcopy`'d without issue. The SQLAlchemy engine and connection @@ -697,6 +716,9 @@ def generate_dataset_profile( # noqa: C901 (complexity) 1, unique_count / non_null_count ) + if not profile.rowCount: + continue + self._get_dataset_column_sample_values(column_profile, column) if ( @@ -1172,7 +1194,7 @@ def _get_ge_dataset( }, ) - if platform == BIGQUERY: + if platform == BIGQUERY or platform == DATABRICKS: # This is done as GE makes the name as DATASET.TABLE # but we want it to be PROJECT.DATASET.TABLE instead for multi-project setups name_parts = pretty_name.split(".") diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py index 771636e8498a3..6fa3504ced139 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py @@ -59,8 +59,7 @@ def get_workunits( yield from self.generate_profile_workunits( profile_requests, - self.config.profiling.max_workers, - db, + max_workers=self.config.profiling.max_workers, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py index 8e18d85d6f3ca..67953de47e5a3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py @@ -62,8 +62,7 @@ def get_workunits( yield from self.generate_profile_workunits( profile_requests, - self.config.profiling.max_workers, - database.name, + max_workers=self.config.profiling.max_workers, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py index aaeee5717a867..e309ff0d15311 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py @@ -69,8 +69,8 @@ def __init__( def generate_profile_workunits( self, requests: List[TableProfilerRequest], + *, max_workers: int, - db_name: Optional[str] = None, platform: Optional[str] = None, profiler_args: Optional[Dict] = None, ) -> Iterable[MetadataWorkUnit]: @@ -98,7 +98,7 @@ def generate_profile_workunits( return # Otherwise, if column level profiling is enabled, use GE profiler. - ge_profiler = self.get_profiler_instance(db_name) + ge_profiler = self.get_profiler_instance() for ge_profiler_request, profile in ge_profiler.generate_profiles( ge_profile_requests, max_workers, platform, profiler_args @@ -149,12 +149,18 @@ def get_profile_request( profile_table_level_only = self.config.profiling.profile_table_level_only dataset_name = self.get_dataset_name(table.name, schema_name, db_name) if not self.is_dataset_eligible_for_profiling( - dataset_name, table.last_altered, table.size_in_bytes, table.rows_count + dataset_name, + last_altered=table.last_altered, + size_in_bytes=table.size_in_bytes, + rows_count=table.rows_count, ): # Profile only table level if dataset is filtered from profiling # due to size limits alone if self.is_dataset_eligible_for_profiling( - dataset_name, table.last_altered, 0, 0 + dataset_name, + last_altered=table.last_altered, + size_in_bytes=None, + rows_count=None, ): profile_table_level_only = True else: @@ -199,9 +205,7 @@ def get_inspectors(self) -> Iterable[Inspector]: inspector = inspect(conn) yield inspector - def get_profiler_instance( - self, db_name: Optional[str] = None - ) -> "DatahubGEProfiler": + def get_profiler_instance(self) -> "DatahubGEProfiler": logger.debug(f"Getting profiler instance from {self.platform}") url = self.config.get_sql_alchemy_url() @@ -221,9 +225,10 @@ def get_profiler_instance( def is_dataset_eligible_for_profiling( self, dataset_name: str, - last_altered: Optional[datetime], - size_in_bytes: Optional[int], - rows_count: Optional[int], + *, + last_altered: Optional[datetime] = None, + size_in_bytes: Optional[int] = None, + rows_count: Optional[int] = None, ) -> bool: dataset_urn = make_dataset_urn_with_platform_instance( self.platform, diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/profiler.py b/metadata-ingestion/src/datahub/ingestion/source/unity/analyze_profiler.py similarity index 96% rename from metadata-ingestion/src/datahub/ingestion/source/unity/profiler.py rename to metadata-ingestion/src/datahub/ingestion/source/unity/analyze_profiler.py index 8066932e3afe9..4c8b22f2399b2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/analyze_profiler.py @@ -6,7 +6,7 @@ from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.workunit import MetadataWorkUnit -from datahub.ingestion.source.unity.config import UnityCatalogProfilerConfig +from datahub.ingestion.source.unity.config import UnityCatalogAnalyzeProfilerConfig from datahub.ingestion.source.unity.proxy import UnityCatalogApiProxy from datahub.ingestion.source.unity.proxy_types import ( ColumnProfile, @@ -23,8 +23,8 @@ @dataclass -class UnityCatalogProfiler: - config: UnityCatalogProfilerConfig +class UnityCatalogAnalyzeProfiler: + config: UnityCatalogAnalyzeProfilerConfig report: UnityCatalogReport proxy: UnityCatalogApiProxy dataset_urn_builder: Callable[[TableReference], str] diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index 4e3deedddbc43..2c567120b4850 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -1,10 +1,12 @@ import logging import os from datetime import datetime, timedelta, timezone -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union +from urllib.parse import urlparse import pydantic from pydantic import Field +from typing_extensions import Literal from datahub.configuration.common import AllowDenyPattern, ConfigModel from datahub.configuration.source_common import ( @@ -13,6 +15,9 @@ ) from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.configuration.validate_field_rename import pydantic_renamed_field +from datahub.ingestion.source.ge_data_profiler import DATABRICKS +from datahub.ingestion.source.ge_profiling_config import GEProfilingConfig +from datahub.ingestion.source.sql.sql_config import SQLCommonConfig, make_sqlalchemy_uri from datahub.ingestion.source.state.stale_entity_removal_handler import ( StatefulStaleMetadataRemovalConfig, ) @@ -31,24 +36,20 @@ class UnityCatalogProfilerConfig(ConfigModel): - # TODO: Reduce duplicate code with DataLakeProfilerConfig, GEProfilingConfig, SQLAlchemyConfig - enabled: bool = Field( - default=False, description="Whether profiling should be done." - ) - operation_config: OperationConfig = Field( - default_factory=OperationConfig, - description="Experimental feature. To specify operation configs.", + method: str = Field( + description=( + "Profiling method to use." + " Options supported are `ge` and `analyze`." + " `ge` uses Great Expectations and runs SELECT SQL queries on profiled tables." + " `analyze` calls ANALYZE TABLE on profiled tables. Only works for delta tables." + ), ) + # TODO: Support cluster compute as well, for ge profiling warehouse_id: Optional[str] = Field( default=None, description="SQL Warehouse id, for running profiling queries." ) - profile_table_level_only: bool = Field( - default=False, - description="Whether to perform profiling at table-level only or include column-level profiling as well.", - ) - pattern: AllowDenyPattern = Field( default=AllowDenyPattern.allow_all(), description=( @@ -58,6 +59,24 @@ class UnityCatalogProfilerConfig(ConfigModel): ), ) + +class UnityCatalogAnalyzeProfilerConfig(UnityCatalogProfilerConfig): + method: Literal["analyze"] = "analyze" + + # TODO: Reduce duplicate code with DataLakeProfilerConfig, GEProfilingConfig, SQLAlchemyConfig + enabled: bool = Field( + default=False, description="Whether profiling should be done." + ) + operation_config: OperationConfig = Field( + default_factory=OperationConfig, + description="Experimental feature. To specify operation configs.", + ) + + profile_table_level_only: bool = Field( + default=False, + description="Whether to perform profiling at table-level only or include column-level profiling as well.", + ) + call_analyze: bool = Field( default=True, description=( @@ -89,7 +108,17 @@ def include_columns(self): return not self.profile_table_level_only +class UnityCatalogGEProfilerConfig(UnityCatalogProfilerConfig, GEProfilingConfig): + method: Literal["ge"] = "ge" + + max_wait_secs: Optional[int] = Field( + default=None, + description="Maximum time to wait for a table to be profiled.", + ) + + class UnityCatalogSourceConfig( + SQLCommonConfig, StatefulIngestionConfigBase, BaseUsageConfig, DatasetSourceConfigMixin, @@ -217,15 +246,34 @@ class UnityCatalogSourceConfig( description="Generate usage statistics.", ) - profiling: UnityCatalogProfilerConfig = Field( - default=UnityCatalogProfilerConfig(), description="Data profiling configuration" + profiling: Union[UnityCatalogGEProfilerConfig, UnityCatalogAnalyzeProfilerConfig] = Field( # type: ignore + default=UnityCatalogGEProfilerConfig(), + description="Data profiling configuration", + discriminator="method", ) + scheme: str = DATABRICKS + + def get_sql_alchemy_url(self): + return make_sqlalchemy_uri( + scheme=self.scheme, + username="token", + password=self.token, + at=urlparse(self.workspace_url).netloc, + db=None, + uri_opts={ + "http_path": f"/sql/1.0/warehouses/{self.profiling.warehouse_id}" + }, + ) + def is_profiling_enabled(self) -> bool: return self.profiling.enabled and is_profiling_enabled( self.profiling.operation_config ) + def is_ge_profiling(self) -> bool: + return self.profiling.method == "ge" + stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = pydantic.Field( default=None, description="Unity Catalog Stateful Ingestion Config." ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/ge_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/unity/ge_profiler.py new file mode 100644 index 0000000000000..e24ca8330777e --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/ge_profiler.py @@ -0,0 +1,170 @@ +import logging +from concurrent.futures import ThreadPoolExecutor, as_completed +from dataclasses import dataclass, field +from typing import Iterable, List, Optional + +from sqlalchemy import create_engine +from sqlalchemy.engine import Connection + +from datahub.ingestion.api.workunit import MetadataWorkUnit +from datahub.ingestion.source.sql.sql_config import SQLCommonConfig +from datahub.ingestion.source.sql.sql_generic import BaseTable +from datahub.ingestion.source.sql.sql_generic_profiler import ( + GenericProfiler, + TableProfilerRequest, +) +from datahub.ingestion.source.unity.config import UnityCatalogGEProfilerConfig +from datahub.ingestion.source.unity.proxy_types import Table, TableReference +from datahub.ingestion.source.unity.report import UnityCatalogReport + +logger = logging.getLogger(__name__) + + +@dataclass(init=False) +class UnityCatalogSQLGenericTable(BaseTable): + ref: TableReference = field(init=False) + + def __init__(self, table: Table): + self.name = table.name + self.comment = table.comment + self.created = table.created_at + self.last_altered = table.updated_at + self.column_count = len(table.columns) + self.ref = table.ref + self.size_in_bytes = None + self.rows_count = None + self.ddl = None + + +class UnityCatalogGEProfiler(GenericProfiler): + sql_common_config: SQLCommonConfig + profiling_config: UnityCatalogGEProfilerConfig + report: UnityCatalogReport + + def __init__( + self, + sql_common_config: SQLCommonConfig, + profiling_config: UnityCatalogGEProfilerConfig, + report: UnityCatalogReport, + ) -> None: + super().__init__(sql_common_config, report, "databricks") + self.profiling_config = profiling_config + # TODO: Consider passing dataset urn builder directly + # So there is no repeated logic between this class and source.py + + def get_workunits(self, tables: List[Table]) -> Iterable[MetadataWorkUnit]: + # Extra default SQLAlchemy option for better connection pooling and threading. + # https://docs.sqlalchemy.org/en/14/core/pooling.html#sqlalchemy.pool.QueuePool.params.max_overflow + self.config.options.setdefault( + "max_overflow", self.profiling_config.max_workers + ) + + url = self.config.get_sql_alchemy_url() + engine = create_engine(url, **self.config.options) + conn = engine.connect() + + profile_requests = [] + with ThreadPoolExecutor( + max_workers=self.profiling_config.max_workers + ) as executor: + futures = [ + executor.submit( + self.get_unity_profile_request, + UnityCatalogSQLGenericTable(table), + conn, + ) + for table in tables + ] + + try: + for i, completed in enumerate( + as_completed(futures, timeout=self.profiling_config.max_wait_secs) + ): + profile_request = completed.result() + if profile_request is not None: + profile_requests.append(profile_request) + if i > 0 and i % 100 == 0: + logger.info(f"Finished table-level profiling for {i} tables") + except TimeoutError: + logger.warning("Timed out waiting to complete table-level profiling.") + + if len(profile_requests) == 0: + return + + yield from self.generate_profile_workunits( + profile_requests, + max_workers=self.config.profiling.max_workers, + platform=self.platform, + profiler_args=self.get_profile_args(), + ) + + def get_dataset_name(self, table_name: str, schema_name: str, db_name: str) -> str: + # Note: unused... ideally should share logic with TableReference + return f"{db_name}.{schema_name}.{table_name}" + + def get_unity_profile_request( + self, table: UnityCatalogSQLGenericTable, conn: Connection + ) -> Optional[TableProfilerRequest]: + # TODO: Reduce code duplication with get_profile_request + skip_profiling = False + profile_table_level_only = self.profiling_config.profile_table_level_only + + dataset_name = table.ref.qualified_table_name + try: + table.size_in_bytes = _get_dataset_size_in_bytes(table, conn) + except Exception as e: + logger.warning(f"Failed to get table size for {dataset_name}: {e}") + + if table.size_in_bytes is None: + self.report.num_profile_missing_size_in_bytes += 1 + if not self.is_dataset_eligible_for_profiling( + dataset_name, + size_in_bytes=table.size_in_bytes, + last_altered=table.last_altered, + rows_count=0, # Can't get row count ahead of time + ): + # Profile only table level if dataset is filtered from profiling + # due to size limits alone + if self.is_dataset_eligible_for_profiling( + dataset_name, + last_altered=table.last_altered, + size_in_bytes=None, + rows_count=None, + ): + profile_table_level_only = True + else: + skip_profiling = True + + if table.column_count == 0: + skip_profiling = True + + if skip_profiling: + if self.profiling_config.report_dropped_profiles: + self.report.report_dropped(dataset_name) + return None + + self.report.report_entity_profiled(dataset_name) + logger.debug(f"Preparing profiling request for {dataset_name}") + return TableProfilerRequest( + table=table, + pretty_name=dataset_name, + batch_kwargs=dict(schema=table.ref.schema, table=table.name), + profile_table_level_only=profile_table_level_only, + ) + + +def _get_dataset_size_in_bytes( + table: UnityCatalogSQLGenericTable, conn: Connection +) -> Optional[int]: + name = ".".join( + conn.dialect.identifier_preparer.quote(c) + for c in [table.ref.catalog, table.ref.schema, table.ref.table] + ) + row = conn.execute(f"DESCRIBE DETAIL {name}").fetchone() + if row is None: + return None + else: + try: + return int(row._asdict()["sizeInBytes"]) + except Exception: + return None diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py index 4153d9dd88eb8..7f19b6e2103ea 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py @@ -2,15 +2,13 @@ from typing import Tuple from datahub.ingestion.api.report import EntityFilterReport -from datahub.ingestion.source.state.stale_entity_removal_handler import ( - StaleEntityRemovalSourceReport, -) +from datahub.ingestion.source.sql.sql_generic_profiler import ProfilingSqlReport from datahub.ingestion.source_report.ingestion_stage import IngestionStageReport from datahub.utilities.lossy_collections import LossyDict, LossyList @dataclass -class UnityCatalogReport(IngestionStageReport, StaleEntityRemovalSourceReport): +class UnityCatalogReport(IngestionStageReport, ProfilingSqlReport): metastores: EntityFilterReport = EntityFilterReport.field(type="metastore") catalogs: EntityFilterReport = EntityFilterReport.field(type="catalog") schemas: EntityFilterReport = EntityFilterReport.field(type="schema") @@ -36,5 +34,6 @@ class UnityCatalogReport(IngestionStageReport, StaleEntityRemovalSourceReport): profile_table_errors: LossyDict[str, LossyList[Tuple[str, str]]] = field( default_factory=LossyDict ) + num_profile_missing_size_in_bytes: int = 0 num_profile_failed_unsupported_column_type: int = 0 num_profile_failed_int_casts: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 44b5bbbcb0ceb..03b4f61a512d0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -2,7 +2,6 @@ import re import time from concurrent.futures import ThreadPoolExecutor -from datetime import timedelta from typing import Dict, Iterable, List, Optional, Set, Union from urllib.parse import urljoin @@ -52,9 +51,14 @@ from datahub.ingestion.source.state.stateful_ingestion_base import ( StatefulIngestionSourceBase, ) -from datahub.ingestion.source.unity.config import UnityCatalogSourceConfig +from datahub.ingestion.source.unity.analyze_profiler import UnityCatalogAnalyzeProfiler +from datahub.ingestion.source.unity.config import ( + UnityCatalogAnalyzeProfilerConfig, + UnityCatalogGEProfilerConfig, + UnityCatalogSourceConfig, +) from datahub.ingestion.source.unity.connection_test import UnityCatalogConnectionTest -from datahub.ingestion.source.unity.profiler import UnityCatalogProfiler +from datahub.ingestion.source.unity.ge_profiler import UnityCatalogGEProfiler from datahub.ingestion.source.unity.proxy import UnityCatalogApiProxy from datahub.ingestion.source.unity.proxy_types import ( DATA_TYPE_REGISTRY, @@ -170,6 +174,9 @@ def __init__(self, ctx: PipelineContext, config: UnityCatalogSourceConfig): self.view_refs: Set[TableReference] = set() self.notebooks: FileBackedDict[Notebook] = FileBackedDict() + # Global map of tables, for profiling + self.tables: FileBackedDict[Table] = FileBackedDict() + @staticmethod def test_connection(config_dict: dict) -> TestConnectionReport: return UnityCatalogConnectionTest(config_dict).get_connection_test() @@ -233,16 +240,24 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: if self.config.is_profiling_enabled(): self.report.report_ingestion_stage_start("Wait on warehouse") assert wait_on_warehouse - timeout = timedelta(seconds=self.config.profiling.max_wait_secs) - wait_on_warehouse.result(timeout) - profiling_extractor = UnityCatalogProfiler( - self.config.profiling, - self.report, - self.unity_catalog_api_proxy, - self.gen_dataset_urn, - ) + wait_on_warehouse.result() + self.report.report_ingestion_stage_start("Profiling") - yield from profiling_extractor.get_workunits(self.table_refs) + if isinstance(self.config.profiling, UnityCatalogAnalyzeProfilerConfig): + yield from UnityCatalogAnalyzeProfiler( + self.config.profiling, + self.report, + self.unity_catalog_api_proxy, + self.gen_dataset_urn, + ).get_workunits(self.table_refs) + elif isinstance(self.config.profiling, UnityCatalogGEProfilerConfig): + yield from UnityCatalogGEProfiler( + sql_common_config=self.config, + profiling_config=self.config.profiling, + report=self.report, + ).get_workunits(list(self.tables.values())) + else: + raise ValueError("Unknown profiling config method") def build_service_principal_map(self) -> None: try: @@ -358,6 +373,16 @@ def process_tables(self, schema: Schema) -> Iterable[MetadataWorkUnit]: self.report.tables.dropped(table.id, f"table ({table.table_type})") continue + if ( + self.config.is_profiling_enabled() + and self.config.is_ge_profiling() + and self.config.profiling.pattern.allowed( + table.ref.qualified_table_name + ) + and not table.is_view + ): + self.tables[table.ref.qualified_table_name] = table + if table.is_view: self.view_refs.add(table.ref) else: diff --git a/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json b/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json index 38b03ce238d1c..a86ed53406e40 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json +++ b/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json @@ -2254,30 +2254,17 @@ { "fieldPath": "id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "description", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "customer_id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 } ] } @@ -2625,8 +2612,7 @@ { "fieldPath": "col", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 } ] } @@ -2655,8 +2641,7 @@ { "fieldPath": "dummy", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 } ] } @@ -2738,4 +2723,4 @@ "lastRunId": "no-run-id-provided" } } -] \ No newline at end of file +] diff --git a/metadata-ingestion/tests/integration/mysql/mysql_mces_with_db_golden.json b/metadata-ingestion/tests/integration/mysql/mysql_mces_with_db_golden.json index 5cfba57247bd3..b5ebca424d9a2 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_mces_with_db_golden.json +++ b/metadata-ingestion/tests/integration/mysql/mysql_mces_with_db_golden.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -80,7 +84,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -95,7 +100,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -110,7 +116,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -230,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -247,7 +255,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,7 +273,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -284,7 +294,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -299,7 +310,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -395,7 +407,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -412,7 +425,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -429,7 +443,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -449,7 +464,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -572,7 +588,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -593,37 +610,25 @@ { "fieldPath": "id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "description", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "customer_id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 } ] } }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/mysql/mysql_table_row_count_estimate_only.json b/metadata-ingestion/tests/integration/mysql/mysql_table_row_count_estimate_only.json index 7597013bd873a..634e04984986d 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_table_row_count_estimate_only.json +++ b/metadata-ingestion/tests/integration/mysql/mysql_table_row_count_estimate_only.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -93,7 +98,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -213,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -230,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -250,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -265,7 +274,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -361,7 +371,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -378,7 +389,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -398,7 +410,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -420,88 +433,44 @@ "fieldPath": "id", "uniqueCount": 5, "uniqueProportion": 1, - "nullCount": 0, - "min": "1", - "max": "5", - "mean": "3.0", - "median": "3", - "stdev": "1.5811388300841898", - "sampleValues": [ - "1", - "2", - "3", - "4", - "5" - ] + "nullCount": 0 }, { "fieldPath": "company", "uniqueCount": 5, "uniqueProportion": 1, - "nullCount": 0, - "sampleValues": [ - "Company A", - "Company B", - "Company C", - "Company D", - "Company E" - ] + "nullCount": 0 }, { "fieldPath": "last_name", "uniqueCount": 5, "uniqueProportion": 1, - "nullCount": 0, - "sampleValues": [ - "Axen", - "Bedecs", - "Donnell", - "Gratacos Solsona", - "Lee" - ] + "nullCount": 0 }, { "fieldPath": "first_name", "uniqueCount": 5, "uniqueProportion": 1, - "nullCount": 0, - "sampleValues": [ - "Anna", - "Antonio", - "Christina", - "Martin", - "Thomas" - ] + "nullCount": 0 }, { "fieldPath": "email_address", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "priority", "uniqueCount": 3, "uniqueProportion": 0.75, - "nullCount": 0, - "min": "3.8", - "max": "4.9", - "mean": "4.175000011920929", - "median": "4.0", - "stdev": "0.49244294899530355", - "sampleValues": [ - "4.0", - "4.9", - "4.0", - "3.8" - ] + "nullCount": 0 } ] } }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -522,37 +491,25 @@ { "fieldPath": "id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "description", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "customer_id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 } ] } }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/test_unity_catalog_config.py b/metadata-ingestion/tests/unit/test_unity_catalog_config.py index 4be6f60171844..4098ed4074de2 100644 --- a/metadata-ingestion/tests/unit/test_unity_catalog_config.py +++ b/metadata-ingestion/tests/unit/test_unity_catalog_config.py @@ -38,7 +38,11 @@ def test_profiling_requires_warehouses_id(): { "token": "token", "workspace_url": "https://workspace_url", - "profiling": {"enabled": True, "warehouse_id": "my_warehouse_id"}, + "profiling": { + "enabled": True, + "method": "ge", + "warehouse_id": "my_warehouse_id", + }, } ) assert config.profiling.enabled is True @@ -47,7 +51,7 @@ def test_profiling_requires_warehouses_id(): { "token": "token", "workspace_url": "https://workspace_url", - "profiling": {"enabled": False}, + "profiling": {"enabled": False, "method": "ge"}, } ) assert config.profiling.enabled is False From 16fe22aafa13f9cbff33e4016658cf06df5b9adf Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Wed, 6 Dec 2023 15:21:56 -0500 Subject: [PATCH 208/792] feat(ui/last-updated): Calculate last updated time as max(properties time, operation time) (#9242) --- .../types/dataset/mappers/DatasetMapper.java | 11 +- .../src/main/resources/entity.graphql | 9 +- .../dataset/mappers/DatasetMapperTest.java | 26 ++- datahub-web-react/package.json | 1 + datahub-web-react/src/Mocks.tsx | 7 + .../src/app/entity/dataset/DatasetEntity.tsx | 5 +- .../stats/DatasetStatsSummarySubHeader.tsx | 5 +- .../src/app/entity/dataset/shared/utils.ts | 13 ++ .../AutoCompleteTooltipContent.tsx | 5 +- .../src/graphql/fragments.graphql | 4 + datahub-web-react/src/graphql/search.graphql | 12 + .../datahub/ingestion/source/unity/source.py | 38 +--- .../unity/unity_catalog_mces_golden.json | 207 ------------------ .../tests/unit/serde/test_serde.py | 2 +- .../openapi-entity-servlet/build.gradle | 2 +- 15 files changed, 79 insertions(+), 268 deletions(-) create mode 100644 datahub-web-react/src/app/entity/dataset/shared/utils.ts diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 8296bc8244995..7fa1decdf7f55 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -17,6 +17,7 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Dataset; @@ -200,10 +201,12 @@ private void mapDatasetProperties( } TimeStamp lastModified = gmsProperties.getLastModified(); if (lastModified != null) { - properties.setLastModified(lastModified.getTime()); - if (lastModified.hasActor()) { - properties.setLastModifiedActor(lastModified.getActor().toString()); - } + Urn actor = lastModified.getActor(); + properties.setLastModified( + new AuditStamp(lastModified.getTime(), actor == null ? null : actor.toString())); + properties.setLastModifiedActor(actor == null ? null : actor.toString()); + } else { + properties.setLastModified(new AuditStamp(0L, null)); } } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 4f3769d908815..feb344154d11e 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -1789,12 +1789,13 @@ type DatasetProperties { """ Last Modified timestamp millis associated with the Dataset """ - lastModified: Long + lastModified: AuditStamp! """ - Actor associated with the Dataset's lastModified timestamp + Actor associated with the Dataset's lastModified timestamp. + Deprecated - Use lastModified.actor instead. """ - lastModifiedActor: String + lastModifiedActor: String @deprecated } @@ -11234,4 +11235,4 @@ input UpdateOwnershipTypeInput { The description of the Custom Ownership Type """ description: String -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index 1959ae6d43208..b28dd287e3fe4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -2,6 +2,7 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetProperties; import com.linkedin.entity.Aspect; @@ -58,7 +59,8 @@ public void testDatasetPropertiesMapperWithCreatedAndLastModified() { expectedDatasetProperties.setQualifiedName("Test QualifiedName"); expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); - expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setLastModified( + new AuditStamp(20L, TEST_LAST_MODIFIED_ACTOR_URN.toString())); expectedDatasetProperties.setCreated(10L); expected.setProperties(expectedDatasetProperties); @@ -68,7 +70,11 @@ public void testDatasetPropertiesMapperWithCreatedAndLastModified() { actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); Assert.assertEquals( - actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); Assert.assertEquals( @@ -102,7 +108,7 @@ public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { expectedDatasetProperties.setName("Test"); expectedDatasetProperties.setLastModifiedActor(null); expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(null); + expectedDatasetProperties.setLastModified(new AuditStamp(0L, null)); expectedDatasetProperties.setCreated(null); expected.setProperties(expectedDatasetProperties); @@ -110,7 +116,11 @@ public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); Assert.assertEquals( - actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); Assert.assertEquals( @@ -152,7 +162,7 @@ public void testDatasetPropertiesMapperWithoutTimestampActors() { expectedDatasetProperties.setName("Test"); expectedDatasetProperties.setLastModifiedActor(null); expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setLastModified(new AuditStamp(20L, null)); expectedDatasetProperties.setCreated(10L); expected.setProperties(expectedDatasetProperties); @@ -160,7 +170,11 @@ public void testDatasetPropertiesMapperWithoutTimestampActors() { Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); Assert.assertEquals( - actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); Assert.assertEquals( diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index c26338ea285fb..b949c9ab9d11f 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -92,6 +92,7 @@ "scripts": { "analyze": "source-map-explorer 'dist/static/js/*.js'", "start": "yarn run generate && BROWSER=none REACT_APP_MOCK=false craco start", + "start:dev": "yarn run generate && DISABLE_ESLINT_PLUGIN=true BROWSER=none REACT_APP_MOCK=false craco start", "start:mock": "yarn run generate && BROWSER=none REACT_APP_MOCK=true craco start", "start:e2e": "REACT_APP_MOCK=cy BROWSER=none PORT=3010 craco start", "ec2-dev": "yarn run generate && CI=true;export CI;BROWSER=none craco start", diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index ada9a06ab5b95..17173fd28e07f 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -437,6 +437,11 @@ export const dataset3 = { }, ], externalUrl: 'https://data.hub', + lastModified: { + __typename: 'AuditStamp', + time: 0, + actor: null, + }, }, parentContainers: { __typename: 'ParentContainersResult', @@ -702,6 +707,7 @@ export const dataset5 = { origin: 'PROD', customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:5' }], externalUrl: 'https://data.hub', + lastModified: dataset3.properties?.lastModified, }, }; @@ -716,6 +722,7 @@ export const dataset6 = { origin: 'PROD', customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:6' }], externalUrl: 'https://data.hub', + lastModified: dataset3.properties?.lastModified, }, }; diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index 7d40b97a66b3b..f60eb95937452 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -33,6 +33,7 @@ import DataProductSection from '../shared/containers/profile/sidebar/DataProduct import { getDataProduct } from '../shared/utils'; import AccessManagement from '../shared/tabs/Dataset/AccessManagement/AccessManagement'; import { matchedFieldPathsRenderer } from '../../search/matches/matchedFieldPathsRenderer'; +import { getLastUpdatedMs } from './shared/utils'; const SUBTYPES = { VIEW: 'view', @@ -310,9 +311,7 @@ export class DatasetEntity implements Entity<Dataset> { rowCount={(data as any).lastProfile?.length && (data as any).lastProfile[0].rowCount} columnCount={(data as any).lastProfile?.length && (data as any).lastProfile[0].columnCount} sizeInBytes={(data as any).lastProfile?.length && (data as any).lastProfile[0].sizeInBytes} - lastUpdatedMs={ - (data as any).lastOperation?.length && (data as any).lastOperation[0].lastUpdatedTimestamp - } + lastUpdatedMs={getLastUpdatedMs(data.properties, (data as any)?.lastOperation)} health={data.health} degree={(result as any).degree} paths={(result as any).paths} diff --git a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx index 36b7d251950b4..c1e2c1aa298b6 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx @@ -3,6 +3,7 @@ import { DatasetStatsSummary as DatasetStatsSummaryObj } from '../../../../../.. import { useBaseEntity } from '../../../../shared/EntityContext'; import { GetDatasetQuery } from '../../../../../../graphql/dataset.generated'; import { DatasetStatsSummary } from '../../../shared/DatasetStatsSummary'; +import { getLastUpdatedMs } from '../../../shared/utils'; export const DatasetStatsSummarySubHeader = () => { const result = useBaseEntity<GetDatasetQuery>(); @@ -13,15 +14,13 @@ export const DatasetStatsSummarySubHeader = () => { const maybeLastProfile = dataset?.datasetProfiles && dataset.datasetProfiles.length ? dataset.datasetProfiles[0] : undefined; - const maybeLastOperation = dataset?.operations && dataset.operations.length ? dataset.operations[0] : undefined; - const rowCount = maybeLastProfile?.rowCount; const columnCount = maybeLastProfile?.columnCount; const sizeInBytes = maybeLastProfile?.sizeInBytes; const totalSqlQueries = dataset?.usageStats?.aggregations?.totalSqlQueries; const queryCountLast30Days = maybeStatsSummary?.queryCountLast30Days; const uniqueUserCountLast30Days = maybeStatsSummary?.uniqueUserCountLast30Days; - const lastUpdatedMs = maybeLastOperation?.lastUpdatedTimestamp; + const lastUpdatedMs = getLastUpdatedMs(dataset?.properties, dataset?.operations); return ( <DatasetStatsSummary diff --git a/datahub-web-react/src/app/entity/dataset/shared/utils.ts b/datahub-web-react/src/app/entity/dataset/shared/utils.ts new file mode 100644 index 0000000000000..fedd54385e7ab --- /dev/null +++ b/datahub-web-react/src/app/entity/dataset/shared/utils.ts @@ -0,0 +1,13 @@ +import { DatasetProperties, Operation } from '../../../../types.generated'; + +export function getLastUpdatedMs( + properties: Pick<DatasetProperties, 'lastModified'> | null | undefined, + operations: Pick<Operation, 'lastUpdatedTimestamp'>[] | null | undefined, +): number | undefined { + return ( + Math.max( + properties?.lastModified?.time || 0, + (operations?.length && operations[0].lastUpdatedTimestamp) || 0, + ) || undefined + ); +} diff --git a/datahub-web-react/src/app/search/autoComplete/AutoCompleteTooltipContent.tsx b/datahub-web-react/src/app/search/autoComplete/AutoCompleteTooltipContent.tsx index dfe32c7805a9b..4e40c29722c4d 100644 --- a/datahub-web-react/src/app/search/autoComplete/AutoCompleteTooltipContent.tsx +++ b/datahub-web-react/src/app/search/autoComplete/AutoCompleteTooltipContent.tsx @@ -3,6 +3,7 @@ import React from 'react'; import styled from 'styled-components'; import { Dataset, Entity, EntityType } from '../../../types.generated'; import { DatasetStatsSummary } from '../../entity/dataset/shared/DatasetStatsSummary'; +import { getLastUpdatedMs } from '../../entity/dataset/shared/utils'; import { useEntityRegistry } from '../../useEntityRegistry'; import { ArrowWrapper } from './ParentContainers'; @@ -48,9 +49,7 @@ export default function AutoCompleteTooltipContent({ entity }: Props) { rowCount={(entity as any).lastProfile?.length && (entity as any).lastProfile[0].rowCount} columnCount={(entity as any).lastProfile?.length && (entity as any).lastProfile[0].columnCount} sizeInBytes={(entity as any).lastProfile?.length && (entity as any).lastProfile[0].sizeInBytes} - lastUpdatedMs={ - (entity as any).lastOperation?.length && (entity as any).lastOperation[0].lastUpdatedTimestamp - } + lastUpdatedMs={getLastUpdatedMs((entity as any)?.properties, (entity as any)?.lastOperation)} queryCountLast30Days={(entity as Dataset).statsSummary?.queryCountLast30Days} uniqueUserCountLast30Days={(entity as Dataset).statsSummary?.uniqueUserCountLast30Days} mode="tooltip-content" diff --git a/datahub-web-react/src/graphql/fragments.graphql b/datahub-web-react/src/graphql/fragments.graphql index d693779d1169b..b77ef9d1ad29c 100644 --- a/datahub-web-react/src/graphql/fragments.graphql +++ b/datahub-web-react/src/graphql/fragments.graphql @@ -240,6 +240,10 @@ fragment nonRecursiveDatasetFields on Dataset { value } externalUrl + lastModified { + time + actor + } } editableProperties { description diff --git a/datahub-web-react/src/graphql/search.graphql b/datahub-web-react/src/graphql/search.graphql index 6ca2a78f93d25..7034116f76129 100644 --- a/datahub-web-react/src/graphql/search.graphql +++ b/datahub-web-react/src/graphql/search.graphql @@ -13,6 +13,10 @@ fragment autoCompleteFields on Entity { properties { name qualifiedName + lastModified { + time + actor + } } parentContainers { ...parentContainersFields @@ -39,6 +43,10 @@ fragment autoCompleteFields on Entity { description qualifiedName externalUrl + lastModified { + time + actor + } } } } @@ -336,6 +344,10 @@ fragment nonSiblingsDatasetSearchFields on Dataset { value } externalUrl + lastModified { + time + actor + } } ownership { ...ownershipFields diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 03b4f61a512d0..d1940c1d57607 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -1,6 +1,5 @@ import logging import re -import time from concurrent.futures import ThreadPoolExecutor from typing import Dict, Iterable, List, Optional, Set, Union from urllib.parse import urljoin @@ -87,8 +86,6 @@ DomainsClass, MySqlDDLClass, NullTypeClass, - OperationClass, - OperationTypeClass, OwnerClass, OwnershipClass, OwnershipTypeClass, @@ -402,7 +399,6 @@ def process_table(self, table: Table, schema: Schema) -> Iterable[MetadataWorkUn sub_type = self._create_table_sub_type_aspect(table) schema_metadata = self._create_schema_metadata_aspect(table) - operation = self._create_table_operation_aspect(table) domain = self._get_domain_aspect(dataset_name=table.ref.qualified_table_name) ownership = self._create_table_ownership_aspect(table) data_platform_instance = self._create_data_platform_instance_aspect() @@ -424,7 +420,6 @@ def process_table(self, table: Table, schema: Schema) -> Iterable[MetadataWorkUn view_props, sub_type, schema_metadata, - operation, domain, ownership, data_platform_instance, @@ -696,10 +691,10 @@ def _create_table_property_aspect(self, table: Table) -> DatasetPropertiesClass: int(table.created_at.timestamp() * 1000), make_user_urn(table.created_by) ) last_modified = created - if table.updated_at and table.updated_by is not None: + if table.updated_at: last_modified = TimeStampClass( int(table.updated_at.timestamp() * 1000), - make_user_urn(table.updated_by), + table.updated_by and make_user_urn(table.updated_by), ) return DatasetPropertiesClass( @@ -712,35 +707,6 @@ def _create_table_property_aspect(self, table: Table) -> DatasetPropertiesClass: externalUrl=f"{self.external_url_base}/{table.ref.external_path}", ) - def _create_table_operation_aspect(self, table: Table) -> OperationClass: - """Produce an operation aspect for a table. - - If a last updated time is present, we produce an update operation. - Otherwise, we produce a create operation. We do this in addition to - setting the last updated time in the dataset properties aspect, as - the UI is currently missing the ability to display the last updated - from the properties aspect. - """ - - reported_time = int(time.time() * 1000) - - operation = OperationClass( - timestampMillis=reported_time, - lastUpdatedTimestamp=int(table.created_at.timestamp() * 1000), - actor=make_user_urn(table.created_by), - operationType=OperationTypeClass.CREATE, - ) - - if table.updated_at and table.updated_by is not None: - operation = OperationClass( - timestampMillis=reported_time, - lastUpdatedTimestamp=int(table.updated_at.timestamp() * 1000), - actor=make_user_urn(table.updated_by), - operationType=OperationTypeClass.UPDATE, - ) - - return operation - def _create_table_ownership_aspect(self, table: Table) -> Optional[OwnershipClass]: owner_urn = self.get_owner_urn(table.owner) if owner_urn is not None: diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 2e92215d70b99..d25c86a3a1f9a 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -524,29 +524,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", @@ -877,29 +854,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", @@ -1230,29 +1184,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", @@ -1719,29 +1650,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", @@ -2072,29 +1980,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", @@ -2425,29 +2310,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", @@ -2914,29 +2776,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", @@ -3267,29 +3106,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", @@ -3620,29 +3436,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", diff --git a/metadata-ingestion/tests/unit/serde/test_serde.py b/metadata-ingestion/tests/unit/serde/test_serde.py index d2d6a0bdda5b9..53ffdf46a6d1e 100644 --- a/metadata-ingestion/tests/unit/serde/test_serde.py +++ b/metadata-ingestion/tests/unit/serde/test_serde.py @@ -100,7 +100,7 @@ def test_serde_to_avro( fo.seek(0) in_records = list(fastavro.reader(fo, return_record_name=True)) in_mces = [ - MetadataChangeEventClass.from_obj(record, tuples=True) + MetadataChangeEventClass.from_obj(record, tuples=True) # type: ignore for record in in_records ] diff --git a/metadata-service/openapi-entity-servlet/build.gradle b/metadata-service/openapi-entity-servlet/build.gradle index dbec469085b07..00353392dedef 100644 --- a/metadata-service/openapi-entity-servlet/build.gradle +++ b/metadata-service/openapi-entity-servlet/build.gradle @@ -77,4 +77,4 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: [mergeApiComponents, 'delegatePattern' : "false" ] } -tasks.getByName("compileJava").dependsOn(openApiGenerate) \ No newline at end of file +tasks.getByName("compileJava").dependsOn(openApiGenerate) From dc6f16984673948f45d466db576eb74b2f45e6f8 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Thu, 7 Dec 2023 08:59:25 +0900 Subject: [PATCH 209/792] docs: add youtube link to townhall button on docs (#9381) --- .../_components/TownhallButton/index.jsx | 24 +++++++----- .../TownhallButton/townhallbutton.module.scss | 37 +++++++++++++------ 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/docs-website/src/pages/_components/TownhallButton/index.jsx b/docs-website/src/pages/_components/TownhallButton/index.jsx index 11dc2dc5c8476..22643846f2cf2 100644 --- a/docs-website/src/pages/_components/TownhallButton/index.jsx +++ b/docs-website/src/pages/_components/TownhallButton/index.jsx @@ -11,20 +11,26 @@ const TownhallButton = () => { const daysUntilLastThursday = lastThursday - currentDay; - let showButton = false; - let currentMonth = ''; + let buttonText = ''; + let buttonLink = ''; + let townhallSeasonClass = ''; if (daysUntilLastThursday > 0 && daysUntilLastThursday <= 14) { - showButton = true; - currentMonth = new Intl.DateTimeFormat('en-US', { month: 'long' }).format(today); + const currentMonth = new Intl.DateTimeFormat('en-US', { month: 'long' }).format(today); + buttonText = `Join ${currentMonth} Townhall! ✨`; + buttonLink = 'http://rsvp.datahubproject.io'; + townhallSeasonClass = 'townhall-season' + } else { + buttonText = 'Watch Our Latest Townhall! 👀'; + buttonLink = 'https://www.youtube.com/playlist?list=PLdCtLs64vZvHTXGqybmOfyxXbGDn2Reb9'; + townhallSeasonClass = 'non-townhall-season' } + return ( - showButton && ( - <Link to="http://rsvp.datahubproject.io" className={clsx('button button--primary button--md', styles.feature)}> - Join {currentMonth} Townhall! ✨ - </Link> - ) + <Link to={buttonLink} className={clsx('button button--primary button--md', styles[townhallSeasonClass])}> + {buttonText} + </Link> ); }; diff --git a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss index 951bc99015302..3d30c65f89539 100644 --- a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss +++ b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss @@ -1,14 +1,29 @@ -.feature { - color: white; - border: 1px solid transparent; - background-image: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); - background-origin: border-box; - opacity: 90%; - - &:hover { - opacity: 100%; - background: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + .townhall-season { + color: white; + border: 1px solid transparent; background-image: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); background-origin: border-box; + opacity: 90%; + + &:hover { + opacity: 100%; + background: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + background-image: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + background-origin: border-box; + } } -} + + .non-townhall-season { + color: white; + border: 1px solid transparent; + background-image: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); + background-origin: border-box; + opacity: 90%; + + &:hover { + opacity: 100%; + background: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); + background-image: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); + background-origin: border-box; + } + } \ No newline at end of file From 1ce752ed6e825c12ad373cd4063083b95c252a56 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Thu, 7 Dec 2023 16:14:09 +0900 Subject: [PATCH 210/792] fix: set new sidebar section (#9393) --- docs-website/sidebars.js | 773 +++++++++++++++------------- docs-website/src/styles/global.scss | 25 +- 2 files changed, 424 insertions(+), 374 deletions(-) diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index c70a609a4cc4b..67943ba8d7016 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -8,6 +8,11 @@ module.exports = { // operators overviewSidebar: [ + { + type: "html", + value: "<div>Getting Started</div>", + defaultStyle: true, + }, { label: "What Is DataHub?", type: "category", @@ -31,82 +36,187 @@ module.exports = { }, { type: "category", - label: "Integrations", - link: { type: "doc", id: "metadata-ingestion/README" }, + label: "Features", + link: { + type: "generated-index", + title: "Feature Guides", + description: "Learn about the features of DataHub.", + }, items: [ - // The purpose of this section is to provide a deeper understanding of how ingestion works. - // Readers should be able to find details for ingesting from all systems, apply transformers, understand sinks, - // and understand key concepts of the Ingestion Framework (Sources, Sinks, Transformers, and Recipes) + "docs/ui-ingestion", + "docs/how/search", + "docs/schema-history", + // "docs/how/ui-tabs-guide", + "docs/domains", + "docs/dataproducts", + "docs/glossary/business-glossary", + "docs/tags", + "docs/ownership/ownership-types", + "docs/authorization/access-policies-guide", + "docs/features/dataset-usage-and-query-history", + "docs/posts", + "docs/sync-status", + "docs/generated/lineage/lineage-feature-guide", + { + type: "doc", + id: "docs/tests/metadata-tests", + className: "saasOnly", + }, + "docs/act-on-metadata/impact-analysis", { - "Quickstart Guides": [ - "metadata-ingestion/cli-ingestion", + label: "Observability", + type: "category", + items: [ { - BigQuery: [ - "docs/quick-ingestion-guides/bigquery/overview", - "docs/quick-ingestion-guides/bigquery/setup", - "docs/quick-ingestion-guides/bigquery/configuration", - ], + type: "doc", + id: "docs/managed-datahub/observe/freshness-assertions", + className: "saasOnly", }, { - Redshift: [ - "docs/quick-ingestion-guides/redshift/overview", - "docs/quick-ingestion-guides/redshift/setup", - "docs/quick-ingestion-guides/redshift/configuration", - ], + type: "doc", + id: "docs/managed-datahub/observe/volume-assertions", + className: "saasOnly", }, { - Snowflake: [ - "docs/quick-ingestion-guides/snowflake/overview", - "docs/quick-ingestion-guides/snowflake/setup", - "docs/quick-ingestion-guides/snowflake/configuration", - ], + type: "doc", + id: "docs/managed-datahub/observe/custom-sql-assertions", + className: "saasOnly", }, { - Tableau: [ - "docs/quick-ingestion-guides/tableau/overview", - "docs/quick-ingestion-guides/tableau/setup", - "docs/quick-ingestion-guides/tableau/configuration", - ], + type: "doc", + id: "docs/managed-datahub/observe/column-assertions", + className: "saasOnly", }, + ], + }, + { + Guides: ["docs/features/feature-guides/ui-lineage"], + }, + ], + }, + { + label: "Managed DataHub", + type: "category", + collapsed: true, + link: { + type: "doc", + id: "docs/managed-datahub/managed-datahub-overview", + }, + items: [ + "docs/managed-datahub/welcome-acryl", + { + type: "doc", + id: "docs/managed-datahub/saas-slack-setup", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/approval-workflows", + className: "saasOnly", + }, + { + "Metadata Ingestion With Acryl": [ + "docs/managed-datahub/metadata-ingestion-with-acryl/ingestion", + ], + }, + { + "DataHub API": [ { - PowerBI: [ - "docs/quick-ingestion-guides/powerbi/overview", - "docs/quick-ingestion-guides/powerbi/setup", - "docs/quick-ingestion-guides/powerbi/configuration", - ], + type: "doc", + id: "docs/managed-datahub/datahub-api/entity-events-api", + className: "saasOnly", }, { - Looker: [ - "docs/quick-ingestion-guides/looker/overview", - "docs/quick-ingestion-guides/looker/setup", - "docs/quick-ingestion-guides/looker/configuration", + "GraphQL API": [ + "docs/managed-datahub/datahub-api/graphql-api/getting-started", + { + type: "doc", + id: "docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta", + className: "saasOnly", + }, ], }, ], }, - "metadata-ingestion/recipe_overview", { - type: "category", - label: "Sources", - link: { type: "doc", id: "metadata-ingestion/source_overview" }, - items: [ - // collapse these; add push-based at top + Integrations: [ { type: "doc", - id: "docs/lineage/airflow", - label: "Airflow", + id: "docs/managed-datahub/integrations/aws-privatelink", + className: "saasOnly", }, - //"docker/airflow/local_airflow", - "metadata-integration/java/spark-lineage/README", - "metadata-ingestion/integration_docs/great-expectations", - "metadata-integration/java/datahub-protobuf/README", - //"metadata-ingestion/source-docs-template", { - type: "autogenerated", - dirName: "docs/generated/ingestion/sources", // '.' means the current docs folder + type: "doc", + id: "docs/managed-datahub/integrations/oidc-sso-integration", + className: "saasOnly", + }, + ], + }, + { + "Operator Guide": [ + { + type: "doc", + id: "docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge", + className: "saasOnly", }, ], }, + { + type: "doc", + id: "docs/managed-datahub/chrome-extension", + }, + { + type: "doc", + id: "docs/managed-datahub/subscription-and-notification", + className: "saasOnly", + }, + { + "Managed DataHub Release History": [ + "docs/managed-datahub/release-notes/v_0_2_13", + "docs/managed-datahub/release-notes/v_0_2_12", + "docs/managed-datahub/release-notes/v_0_2_11", + "docs/managed-datahub/release-notes/v_0_2_10", + "docs/managed-datahub/release-notes/v_0_2_9", + "docs/managed-datahub/release-notes/v_0_2_8", + "docs/managed-datahub/release-notes/v_0_2_7", + "docs/managed-datahub/release-notes/v_0_2_6", + "docs/managed-datahub/release-notes/v_0_2_5", + "docs/managed-datahub/release-notes/v_0_2_4", + "docs/managed-datahub/release-notes/v_0_2_3", + "docs/managed-datahub/release-notes/v_0_2_2", + "docs/managed-datahub/release-notes/v_0_2_1", + "docs/managed-datahub/release-notes/v_0_2_0", + "docs/managed-datahub/release-notes/v_0_1_73", + "docs/managed-datahub/release-notes/v_0_1_72", + "docs/managed-datahub/release-notes/v_0_1_70", + "docs/managed-datahub/release-notes/v_0_1_69", + ], + }, + ], + }, + { + type: "html", + value: "<div>Integrations</div>", + defaultStyle: true, + }, + { + type: "category", + link: { + type: "doc", + id: "metadata-ingestion/README", + }, + label: "Overview", + items: [ + { + type: "doc", + label: "Recipe", + id: "metadata-ingestion/recipe_overview", + }, { type: "category", label: "Sinks", @@ -127,30 +237,104 @@ module.exports = { }, items: ["metadata-ingestion/docs/transformer/dataset_transformer"], }, + ], + }, + { + "Quickstart Guides": [ + "metadata-ingestion/cli-ingestion", { - "Advanced Guides": [ - { - "Scheduling Ingestion": [ - "metadata-ingestion/schedule_docs/intro", - "metadata-ingestion/schedule_docs/cron", - "metadata-ingestion/schedule_docs/airflow", - "metadata-ingestion/schedule_docs/kubernetes", - ], - }, - - "docs/platform-instances", - "metadata-ingestion/docs/dev_guides/stateful", - "metadata-ingestion/docs/dev_guides/classification", - "metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source", - "metadata-ingestion/docs/dev_guides/sql_profiles", - "metadata-ingestion/docs/dev_guides/profiling_ingestions", + BigQuery: [ + "docs/quick-ingestion-guides/bigquery/overview", + "docs/quick-ingestion-guides/bigquery/setup", + "docs/quick-ingestion-guides/bigquery/configuration", + ], + }, + { + Redshift: [ + "docs/quick-ingestion-guides/redshift/overview", + "docs/quick-ingestion-guides/redshift/setup", + "docs/quick-ingestion-guides/redshift/configuration", + ], + }, + { + Snowflake: [ + "docs/quick-ingestion-guides/snowflake/overview", + "docs/quick-ingestion-guides/snowflake/setup", + "docs/quick-ingestion-guides/snowflake/configuration", + ], + }, + { + Tableau: [ + "docs/quick-ingestion-guides/tableau/overview", + "docs/quick-ingestion-guides/tableau/setup", + "docs/quick-ingestion-guides/tableau/configuration", + ], + }, + { + PowerBI: [ + "docs/quick-ingestion-guides/powerbi/overview", + "docs/quick-ingestion-guides/powerbi/setup", + "docs/quick-ingestion-guides/powerbi/configuration", + ], + }, + { + Looker: [ + "docs/quick-ingestion-guides/looker/overview", + "docs/quick-ingestion-guides/looker/setup", + "docs/quick-ingestion-guides/looker/configuration", ], }, ], }, { type: "category", - label: "Deployment", + label: "Sources", + link: { type: "doc", id: "metadata-ingestion/source_overview" }, + items: [ + // collapse these; add push-based at top + { + type: "doc", + id: "docs/lineage/airflow", + label: "Airflow", + }, + //"docker/airflow/local_airflow", + "metadata-integration/java/spark-lineage/README", + "metadata-ingestion/integration_docs/great-expectations", + "metadata-integration/java/datahub-protobuf/README", + //"metadata-ingestion/source-docs-template", + { + type: "autogenerated", + dirName: "docs/generated/ingestion/sources", // '.' means the current docs folder + }, + ], + }, + { + "Advanced Guides": [ + { + "Scheduling Ingestion": [ + "metadata-ingestion/schedule_docs/intro", + "metadata-ingestion/schedule_docs/cron", + "metadata-ingestion/schedule_docs/airflow", + "metadata-ingestion/schedule_docs/kubernetes", + ], + }, + + "docs/platform-instances", + "metadata-ingestion/docs/dev_guides/stateful", + "metadata-ingestion/docs/dev_guides/classification", + "metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source", + "metadata-ingestion/docs/dev_guides/sql_profiles", + "metadata-ingestion/docs/dev_guides/profiling_ingestions", + ], + }, + { + type: "html", + value: "<div>Deployment</div>", + defaultStyle: true, + }, + { + type: "category", + label: "Deployment Guides", link: { type: "generated-index", title: "Deployment Guides", @@ -158,109 +342,111 @@ module.exports = { "Learn how to deploy DataHub to your environment, set up authentication, manage upgrades, and more.", }, items: [ - // The purpose of this section is to provide the minimum steps required to deploy DataHub to the vendor of your choosing "docs/deploy/aws", "docs/deploy/gcp", "docs/deploy/azure", "docker/README", "docs/deploy/kubernetes", + ], + }, + { + type: "category", + label: "Advanced Guides", + items: [ "docs/deploy/confluent-cloud", "docs/deploy/environment-vars", "docs/how/extract-container-logs", ], }, { - type: "category", - label: "Admin", - items: [ - { - Authentication: [ - "docs/authentication/README", - "docs/authentication/concepts", - "docs/authentication/changing-default-credentials", - "docs/authentication/guides/add-users", - { - "Frontend Authentication": [ - "docs/authentication/guides/jaas", - "docs/authentication/guides/sso/configure-oidc-react", - "docs/authentication/guides/sso/configure-oidc-behind-proxy", - ], - }, - "docs/authentication/introducing-metadata-service-authentication", - "docs/authentication/personal-access-tokens", - ], - }, - { - Authorization: [ - "docs/authorization/README", - "docs/authorization/roles", - "docs/authorization/policies", - "docs/authorization/groups", - ], - }, - { - "Advanced Guides": [ - "docs/how/delete-metadata", - "docs/how/configuring-authorization-with-apache-ranger", - "docs/how/backup-datahub", - "docs/how/restore-indices", - "docs/advanced/db-retention", - "docs/advanced/monitoring", - "docs/deploy/telemetry", - "docs/how/kafka-config", - "docs/advanced/no-code-upgrade", - "docs/how/jattach-guide", + type: "html", + value: "<div>Admin</div>", + defaultStyle: true, + }, + { + Authentication: [ + "docs/authentication/README", + "docs/authentication/concepts", + "docs/authentication/changing-default-credentials", + "docs/authentication/guides/add-users", + { + "Frontend Authentication": [ + "docs/authentication/guides/jaas", + "docs/authentication/guides/sso/configure-oidc-react", + "docs/authentication/guides/sso/configure-oidc-behind-proxy", ], }, + "docs/authentication/introducing-metadata-service-authentication", + "docs/authentication/personal-access-tokens", ], }, { - Developers: [ - // The purpose of this section is to provide developers & technical users with - // concrete tutorials for how to work with the DataHub CLI & APIs - { - Architecture: [ - "docs/architecture/architecture", - "docs/components", - "docs/architecture/metadata-ingestion", - "docs/architecture/metadata-serving", - "docs/architecture/docker-containers", - ], - }, + Authorization: [ + "docs/authorization/README", + "docs/authorization/roles", + "docs/authorization/policies", + "docs/authorization/groups", + ], + }, + { + "Advanced Guides": [ + "docs/how/delete-metadata", + "docs/how/configuring-authorization-with-apache-ranger", + "docs/how/backup-datahub", + "docs/how/restore-indices", + "docs/advanced/db-retention", + "docs/advanced/monitoring", + "docs/deploy/telemetry", + "docs/how/kafka-config", + "docs/advanced/no-code-upgrade", + "docs/how/jattach-guide", + ], + }, + { + type: "html", + value: "<div>Developers</div>", + defaultStyle: true, + }, + { + Architecture: [ + "docs/architecture/architecture", + "docs/components", + "docs/architecture/metadata-ingestion", + "docs/architecture/metadata-serving", + "docs/architecture/docker-containers", + ], + }, + { + "Metadata Model": [ + "docs/modeling/metadata-model", + "docs/modeling/extending-the-metadata-model", + "docs/what/mxe", { - "Metadata Model": [ - "docs/modeling/metadata-model", - "docs/modeling/extending-the-metadata-model", - "docs/what/mxe", + Entities: [ { - Entities: [ - { - type: "autogenerated", - dirName: "docs/generated/metamodel/entities", // '.' means the current docs folder - }, - ], + type: "autogenerated", + dirName: "docs/generated/metamodel/entities", // '.' means the current docs folder }, ], }, - { - "Developing on DataHub": [ - "docs/developers", - "docs/docker/development", - "metadata-ingestion/developing", - "docs/api/graphql/graphql-endpoint-development", - { - Modules: [ - "datahub-web-react/README", - "datahub-frontend/README", - "datahub-graphql-core/README", - "metadata-service/README", - "metadata-jobs/mae-consumer-job/README", - "metadata-jobs/mce-consumer-job/README", - ], - }, + ], + }, + { + "Developing on DataHub": [ + "docs/developers", + "docs/docker/development", + "metadata-ingestion/developing", + "docs/api/graphql/graphql-endpoint-development", + { + Modules: [ + "datahub-web-react/README", + "datahub-frontend/README", + "datahub-graphql-core/README", + "metadata-service/README", + "metadata-jobs/mae-consumer-job/README", + "metadata-jobs/mce-consumer-job/README", ], }, - "docs/plugins", { Troubleshooting: [ "docs/troubleshooting/quickstart", @@ -268,24 +454,30 @@ module.exports = { "docs/troubleshooting/general", ], }, - { - Advanced: [ - "metadata-ingestion/docs/dev_guides/reporting_telemetry", - "docs/advanced/mcp-mcl", - "docker/datahub-upgrade/README", - "docs/advanced/no-code-modeling", - "datahub-web-react/src/app/analytics/README", - "docs/how/migrating-graph-service-implementation", - "docs/advanced/field-path-spec-v2", - "metadata-ingestion/adding-source", - "docs/how/add-custom-ingestion-source", - "docs/how/add-custom-data-platform", - "docs/advanced/browse-paths-upgrade", - "docs/browseV2/browse-paths-v2", - ], - }, ], }, + { + "Advanced Guides": [ + "metadata-ingestion/docs/dev_guides/reporting_telemetry", + "docs/advanced/mcp-mcl", + "docker/datahub-upgrade/README", + "docs/advanced/no-code-modeling", + "datahub-web-react/src/app/analytics/README", + "docs/how/migrating-graph-service-implementation", + "docs/advanced/field-path-spec-v2", + "metadata-ingestion/adding-source", + "docs/how/add-custom-ingestion-source", + "docs/how/add-custom-data-platform", + "docs/advanced/browse-paths-upgrade", + "docs/browseV2/browse-paths-v2", + "docs/plugins", + ], + }, + { + type: "html", + value: "<div>API & SDKs</div>", + defaultStyle: true, + }, { type: "category", label: "API", @@ -408,6 +600,13 @@ module.exports = { }, ], }, + ], + }, + { + type: "category", + label: "SDK", + link: { type: "doc", id: "docs/api/datahub-apis" }, + items: [ { "Python SDK": [ "metadata-ingestion/as-a-library", @@ -421,237 +620,81 @@ module.exports = { }, ], }, - "metadata-integration/java/as-a-library", - { - "API and SDK Guides": [ - "docs/advanced/patch", - "docs/api/tutorials/datasets", - "docs/api/tutorials/lineage", - "docs/api/tutorials/tags", - "docs/api/tutorials/terms", - "docs/api/tutorials/owners", - "docs/api/tutorials/domains", - "docs/api/tutorials/deprecation", - "docs/api/tutorials/descriptions", - "docs/api/tutorials/custom-properties", - "docs/api/tutorials/ml", - ], - }, - { - type: "category", - label: "DataHub CLI", - link: { type: "doc", id: "docs/cli" }, - items: ["docs/datahub_lite"], - }, { - type: "category", - label: "Datahub Actions", - link: { type: "doc", id: "docs/act-on-metadata" }, - items: [ - "docs/actions/README", - "docs/actions/quickstart", - "docs/actions/concepts", - { - Sources: [ - { - type: "autogenerated", - dirName: "docs/actions/sources", - }, - ], - }, - { - Events: [ - { - type: "autogenerated", - dirName: "docs/actions/events", - }, - ], - }, - { - Actions: [ - { - type: "autogenerated", - dirName: "docs/actions/actions", - }, - ], - }, - { - Guides: [ - { - type: "autogenerated", - dirName: "docs/actions/guides", - }, - ], - }, - ], + type: "doc", + label: "Java SDK", + id: "metadata-integration/java/as-a-library", }, ], }, { type: "category", - label: "Features", - link: { - type: "generated-index", - title: "Feature Guides", - description: "Learn about the features of DataHub.", - }, - items: [ - "docs/ui-ingestion", - "docs/how/search", - "docs/schema-history", - // "docs/how/ui-tabs-guide", - "docs/domains", - "docs/dataproducts", - "docs/glossary/business-glossary", - "docs/tags", - "docs/ownership/ownership-types", - "docs/authorization/access-policies-guide", - "docs/features/dataset-usage-and-query-history", - "docs/posts", - "docs/sync-status", - "docs/generated/lineage/lineage-feature-guide", - { - type: "doc", - id: "docs/tests/metadata-tests", - className: "saasOnly", - }, - "docs/act-on-metadata/impact-analysis", - { - label: "Observability", - type: "category", - items: [ - { - type: "doc", - id: "docs/managed-datahub/observe/freshness-assertions", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/observe/volume-assertions", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/observe/custom-sql-assertions", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/observe/column-assertions", - className: "saasOnly", - }, - ], - }, - { - Guides: ["docs/features/feature-guides/ui-lineage"], - }, - ], + label: "DataHub CLI", + link: { type: "doc", id: "docs/cli" }, + items: ["docs/datahub_lite"], }, { - label: "Managed DataHub", type: "category", - collapsed: true, - link: { - type: "doc", - id: "docs/managed-datahub/managed-datahub-overview", - }, + label: "Datahub Actions", + link: { type: "doc", id: "docs/act-on-metadata" }, items: [ - "docs/managed-datahub/welcome-acryl", - { - type: "doc", - id: "docs/managed-datahub/saas-slack-setup", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/approval-workflows", - className: "saasOnly", - }, + "docs/actions/README", + "docs/actions/quickstart", + "docs/actions/concepts", { - "Metadata Ingestion With Acryl": [ - "docs/managed-datahub/metadata-ingestion-with-acryl/ingestion", - ], - }, - { - "DataHub API": [ - { - type: "doc", - id: "docs/managed-datahub/datahub-api/entity-events-api", - className: "saasOnly", - }, + Sources: [ { - "GraphQL API": [ - "docs/managed-datahub/datahub-api/graphql-api/getting-started", - { - type: "doc", - id: "docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta", - className: "saasOnly", - }, - ], + type: "autogenerated", + dirName: "docs/actions/sources", }, ], }, { - Integrations: [ + Events: [ { - type: "doc", - id: "docs/managed-datahub/integrations/aws-privatelink", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/integrations/oidc-sso-integration", - className: "saasOnly", + type: "autogenerated", + dirName: "docs/actions/events", }, ], }, { - "Operator Guide": [ - { - type: "doc", - id: "docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws", - className: "saasOnly", - }, + Actions: [ { - type: "doc", - id: "docs/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge", - className: "saasOnly", + type: "autogenerated", + dirName: "docs/actions/actions", }, ], }, { - type: "doc", - id: "docs/managed-datahub/chrome-extension", - }, - { - type: "doc", - id: "docs/managed-datahub/subscription-and-notification", - className: "saasOnly", - }, - { - "Managed DataHub Release History": [ - "docs/managed-datahub/release-notes/v_0_2_13", - "docs/managed-datahub/release-notes/v_0_2_12", - "docs/managed-datahub/release-notes/v_0_2_11", - "docs/managed-datahub/release-notes/v_0_2_10", - "docs/managed-datahub/release-notes/v_0_2_9", - "docs/managed-datahub/release-notes/v_0_2_8", - "docs/managed-datahub/release-notes/v_0_2_7", - "docs/managed-datahub/release-notes/v_0_2_6", - "docs/managed-datahub/release-notes/v_0_2_5", - "docs/managed-datahub/release-notes/v_0_2_4", - "docs/managed-datahub/release-notes/v_0_2_3", - "docs/managed-datahub/release-notes/v_0_2_2", - "docs/managed-datahub/release-notes/v_0_2_1", - "docs/managed-datahub/release-notes/v_0_2_0", - "docs/managed-datahub/release-notes/v_0_1_73", - "docs/managed-datahub/release-notes/v_0_1_72", - "docs/managed-datahub/release-notes/v_0_1_70", - "docs/managed-datahub/release-notes/v_0_1_69", + Guides: [ + { + type: "autogenerated", + dirName: "docs/actions/guides", + }, ], }, ], }, + { + "API & SDK Guides": [ + "docs/advanced/patch", + "docs/api/tutorials/datasets", + "docs/api/tutorials/lineage", + "docs/api/tutorials/tags", + "docs/api/tutorials/terms", + "docs/api/tutorials/owners", + "docs/api/tutorials/domains", + "docs/api/tutorials/deprecation", + "docs/api/tutorials/descriptions", + "docs/api/tutorials/custom-properties", + "docs/api/tutorials/ml", + ], + }, + { + type: "html", + value: "<div>Community</div>", + defaultStyle: true, + }, { label: "Community", type: "category", diff --git a/docs-website/src/styles/global.scss b/docs-website/src/styles/global.scss index 16e3893ed08b7..1682b322d7cd5 100644 --- a/docs-website/src/styles/global.scss +++ b/docs-website/src/styles/global.scss @@ -47,7 +47,7 @@ --ifm-card-border-radius: calc(var(--ifm-global-radius) * 1.5); /* Menu */ - --ifm-menu-link-padding-vertical: 0.6rem; + --ifm-menu-link-padding-vertical: 0.3rem; --ifm-menu-link-padding-horizontal: 1rem; --ifm-menu-link-sublist-icon: url('data:image/svg+xml;utf8,<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M6.47 9.47L8 7.94333L9.53 9.47L10 9L8 7L6 9L6.47 9.47Z" fill="black" fill-opacity="0.5"/></svg>'); --ifm-menu-color-background-hover: var(--ifm-color-primary-opaque); @@ -286,22 +286,29 @@ div[class^="announcementBar"] { } } - .theme-doc-sidebar-item-category-level-1 .menu__link { + .menuHtmlItem_node_modules-\@docusaurus-theme-classic-lib-theme-DocSidebarItem-Html-styles-module { + font-weight: 600; + } + + .menu__link { font-weight: 400; + padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 0.2rem) calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 1rem); } - .theme-doc-sidebar-item-category-level-1 .menu__link--active { - font-weight: 600; + .menu__link--active { + font-weight: 400; + padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 0.2rem) calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 1rem); } .theme-doc-sidebar-item-category-level-1 > div > a:first-child { - color: var(--ifm-navbar-link-color); - font-weight: 600; - padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) var(--ifm-menu-link-padding-horizontal); + font-weight: 400; + color: var(--ifm-menu-color); + padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 0.2rem) calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 1rem); } + .theme-doc-sidebar-item-category-level-1 > div > a.menu__link--active { - color: var(--ifm-navbar-link-color); - font-weight: 600; + color: var(--ifm-menu-color); + font-weight: 400; } } From 1b48877abe2a368659be7005a17529e9b7a3ed9f Mon Sep 17 00:00:00 2001 From: Matthias De Geyter <matthias.degeyter@gmail.com> Date: Thu, 7 Dec 2023 14:54:02 +0100 Subject: [PATCH 211/792] fix(ingest/json-schema): take into account environment (#9385) Co-authored-by: Tamas Nemeth <treff7es@gmail.com> --- .../src/datahub/ingestion/source/schema/json_schema.py | 1 + 1 file changed, 1 insertion(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py b/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py index 2ac946b23deb0..f6e944f4fc3cb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py @@ -271,6 +271,7 @@ def _load_one_file( platform=self.config.platform, name=dataset_name, platform_instance=self.config.platform_instance, + env=self.config.env, ) yield MetadataChangeProposalWrapper( entityUrn=dataset_urn, aspect=meta From 4c348a8eea53194eb37c7dfb8d10820a83791030 Mon Sep 17 00:00:00 2001 From: haeniya <yanik.haeni@gmail.com> Date: Thu, 7 Dec 2023 15:39:43 +0100 Subject: [PATCH 212/792] feat(datahub-frontend): make Java memory options configurable via ENV variable (#9215) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Yanik Häni <Yanik.Haeni1@swisscom.com> --- docker/datahub-frontend/start.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker/datahub-frontend/start.sh b/docker/datahub-frontend/start.sh index 9dc1514144bb1..12e6b8915096d 100755 --- a/docker/datahub-frontend/start.sh +++ b/docker/datahub-frontend/start.sh @@ -43,8 +43,7 @@ fi # make sure there is no whitespace at the beginning and the end of # this string -export JAVA_OPTS="-Xms512m \ - -Xmx1024m \ +export JAVA_OPTS="${JAVA_MEMORY_OPTS:-"-Xms512m -Xmx1024m"} \ -Dhttp.port=$SERVER_PORT \ -Dconfig.file=datahub-frontend/conf/application.conf \ -Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf \ From a6726c12ddc749345df81d5783927eeb7b043b9c Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Thu, 7 Dec 2023 11:03:37 -0500 Subject: [PATCH 213/792] docs(ingest/sql-queries): Add documentation (#9406) --- .../docs/sources/sql-queries/sql-queries.md | 8 ++++++++ .../sources/sql-queries/sql-queries_recipe.yml | 9 +++++++++ .../datahub/ingestion/source/sql_queries.py | 18 ++++++++++++++++-- 3 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 metadata-ingestion/docs/sources/sql-queries/sql-queries.md create mode 100644 metadata-ingestion/docs/sources/sql-queries/sql-queries_recipe.yml diff --git a/metadata-ingestion/docs/sources/sql-queries/sql-queries.md b/metadata-ingestion/docs/sources/sql-queries/sql-queries.md new file mode 100644 index 0000000000000..e829b4366bb84 --- /dev/null +++ b/metadata-ingestion/docs/sources/sql-queries/sql-queries.md @@ -0,0 +1,8 @@ +### Example Queries File + +```json +{"query": "SELECT x FROM my_table", "timestamp": 1689232738.051, "user": "user_a", "downstream_tables": [], "upstream_tables": ["my_database.my_schema.my_table"]} +{"query": "INSERT INTO my_table VALUES (1, 'a')", "timestamp": 1689232737.669, "user": "user_b", "downstream_tables": ["my_database.my_schema.my_table"], "upstream_tables": []} +``` + +Note that this is not a valid standard JSON file, but rather a file containing one JSON object per line. diff --git a/metadata-ingestion/docs/sources/sql-queries/sql-queries_recipe.yml b/metadata-ingestion/docs/sources/sql-queries/sql-queries_recipe.yml new file mode 100644 index 0000000000000..58af21e8a5ba4 --- /dev/null +++ b/metadata-ingestion/docs/sources/sql-queries/sql-queries_recipe.yml @@ -0,0 +1,9 @@ +datahub_api: # Only necessary if using a non-DataHub sink, e.g. the file sink + server: http://localhost:8080 + timeout_sec: 60 +source: + type: sql-queries + config: + platform: "snowflake" + default_db: "SNOWFLAKE" + query_file: "./queries.json" diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py b/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py index fcf97e461967c..58e9682df935e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py @@ -88,11 +88,25 @@ def compute_stats(self) -> None: @platform_name("SQL Queries") @config_class(SqlQueriesSourceConfig) -@support_status(SupportStatus.TESTING) +@support_status(SupportStatus.INCUBATING) @capability(SourceCapability.LINEAGE_COARSE, "Parsed from SQL queries") @capability(SourceCapability.LINEAGE_FINE, "Parsed from SQL queries") class SqlQueriesSource(Source): - # TODO: Documentation + """ + This source reads a specifically-formatted JSON file containing SQL queries and parses them to generate lineage. + + This file should contain one JSON object per line, with the following fields: + - query: string - The SQL query to parse. + - timestamp (optional): number - The timestamp of the query, in seconds since the epoch. + - user (optional): string - The user who ran the query. + This user value will be directly converted into a DataHub user urn. + - operation_type (optional): string - Platform-specific operation type, used if the operation type can't be parsed. + - downstream_tables (optional): string[] - Fallback list of tables that the query writes to, + used if the query can't be parsed. + - upstream_tables (optional): string[] - Fallback list of tables the query reads from, + used if the query can't be parsed. + """ + urns: Optional[Set[str]] schema_resolver: SchemaResolver builder: SqlParsingBuilder From 923e76d20b1ecd52e8f813ec645e41e0be692e3d Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Fri, 8 Dec 2023 02:18:35 +0900 Subject: [PATCH 214/792] docs: fix duplicated overview link for api section (#9402) --- docs-website/sidebars.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 67943ba8d7016..5d7c6b06adad4 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -478,10 +478,14 @@ module.exports = { value: "<div>API & SDKs</div>", defaultStyle: true, }, + { + type: "doc", + id: "docs/api/datahub-apis", + label: "Overview", + }, { type: "category", label: "API", - link: { type: "doc", id: "docs/api/datahub-apis" }, items: [ { "GraphQL API": [ @@ -605,7 +609,6 @@ module.exports = { { type: "category", label: "SDK", - link: { type: "doc", id: "docs/api/datahub-apis" }, items: [ { "Python SDK": [ From 3096aa6ffa8148b9fdc4047f5916e75e22a83ee5 Mon Sep 17 00:00:00 2001 From: Olga Dimova <38855943+olgadimova@users.noreply.github.com> Date: Thu, 7 Dec 2023 20:44:24 +0300 Subject: [PATCH 215/792] =?UTF-8?q?feat(glossary):=20add=20toggle=20sideba?= =?UTF-8?q?r=20button=20and=20functionality=20to=20Busine=E2=80=A6=20(#922?= =?UTF-8?q?2)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Chris Collins <chriscollins3456@gmail.com> --- .../entity/shared/GlossaryEntityContext.tsx | 25 ++++++++++++++++--- .../src/app/glossary/BusinessGlossaryPage.tsx | 16 +++++++++++- .../src/app/glossary/GlossaryRoutes.tsx | 11 +++++++- .../src/app/glossary/GlossarySidebar.tsx | 23 +++++++++++++---- .../src/app/glossary/useToggleSidebar.tsx | 17 +++++++++++++ .../src/app/shared/sidebar/components.tsx | 1 + 6 files changed, 83 insertions(+), 10 deletions(-) create mode 100644 datahub-web-react/src/app/glossary/useToggleSidebar.tsx diff --git a/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx b/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx index f00f16647c94b..79ec142fd801d 100644 --- a/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx +++ b/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx @@ -10,6 +10,8 @@ export interface GlossaryEntityContextType { // This will happen when you edit a name, move a term/group, create a new term/group, and delete a term/group urnsToUpdate: string[]; setUrnsToUpdate: (updatdUrns: string[]) => void; + isSidebarOpen: boolean; + setIsSidebarOpen: (isOpen: boolean) => void; } export const GlossaryEntityContext = React.createContext<GlossaryEntityContextType>({ @@ -18,10 +20,27 @@ export const GlossaryEntityContext = React.createContext<GlossaryEntityContextTy setEntityData: () => {}, urnsToUpdate: [], setUrnsToUpdate: () => {}, + isSidebarOpen: true, + setIsSidebarOpen: () => {}, }); export const useGlossaryEntityData = () => { - const { isInGlossaryContext, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate } = - useContext(GlossaryEntityContext); - return { isInGlossaryContext, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate }; + const { + isInGlossaryContext, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + } = useContext(GlossaryEntityContext); + return { + isInGlossaryContext, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + }; }; diff --git a/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx b/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx index a5262265fd23d..4e424b776a8ce 100644 --- a/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx +++ b/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx @@ -20,6 +20,8 @@ import { import { OnboardingTour } from '../onboarding/OnboardingTour'; import { useGlossaryEntityData } from '../entity/shared/GlossaryEntityContext'; import { useUserContext } from '../context/useUserContext'; +import useToggleSidebar from './useToggleSidebar'; +import ToggleSidebarButton from '../search/ToggleSidebarButton'; export const HeaderWrapper = styled(TabToolbar)` padding: 15px 45px 10px 24px; @@ -38,6 +40,12 @@ const MainContentWrapper = styled.div` flex-direction: column; `; +const TitleContainer = styled.div` + display: flex; + align-items: center; + gap: 12px; +`; + export const MAX_BROWSER_WIDTH = 500; export const MIN_BROWSWER_WIDTH = 200; @@ -56,6 +64,7 @@ function BusinessGlossaryPage() { } = useGetRootGlossaryNodesQuery(); const entityRegistry = useEntityRegistry(); const { setEntityData } = useGlossaryEntityData(); + const { isOpen: isSidebarOpen, toggleSidebar } = useToggleSidebar(); useEffect(() => { setEntityData(null); @@ -94,7 +103,12 @@ function BusinessGlossaryPage() { )} <MainContentWrapper data-testid="glossary-entities-list"> <HeaderWrapper> - <Typography.Title level={3}>Business Glossary</Typography.Title> + <TitleContainer> + <ToggleSidebarButton isOpen={isSidebarOpen} onClick={toggleSidebar} /> + <Typography.Title style={{ margin: '0' }} level={3}> + Business Glossary + </Typography.Title> + </TitleContainer> <div> <Button data-testid="add-term-button" diff --git a/datahub-web-react/src/app/glossary/GlossaryRoutes.tsx b/datahub-web-react/src/app/glossary/GlossaryRoutes.tsx index abba77d1a302d..0062cefee067f 100644 --- a/datahub-web-react/src/app/glossary/GlossaryRoutes.tsx +++ b/datahub-web-react/src/app/glossary/GlossaryRoutes.tsx @@ -20,12 +20,21 @@ export default function GlossaryRoutes() { const entityRegistry = useEntityRegistry(); const [entityData, setEntityData] = useState<GenericEntityProperties | null>(null); const [urnsToUpdate, setUrnsToUpdate] = useState<string[]>([]); + const [isSidebarOpen, setIsSidebarOpen] = useState<boolean>(true); const isAtRootGlossary = window.location.pathname === PageRoutes.GLOSSARY; return ( <GlossaryEntityContext.Provider - value={{ isInGlossaryContext: true, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate }} + value={{ + isInGlossaryContext: true, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + }} > {!isAtRootGlossary && <GlossaryEntitiesPath />} <ContentWrapper> diff --git a/datahub-web-react/src/app/glossary/GlossarySidebar.tsx b/datahub-web-react/src/app/glossary/GlossarySidebar.tsx index 4126c8f2bb53f..4fa99da70eaa6 100644 --- a/datahub-web-react/src/app/glossary/GlossarySidebar.tsx +++ b/datahub-web-react/src/app/glossary/GlossarySidebar.tsx @@ -1,14 +1,25 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import GlossarySearch from './GlossarySearch'; import GlossaryBrowser from './GlossaryBrowser/GlossaryBrowser'; import { ProfileSidebarResizer } from '../entity/shared/containers/profile/sidebar/ProfileSidebarResizer'; import { SidebarWrapper } from '../shared/sidebar/components'; +import { useGlossaryEntityData } from '../entity/shared/GlossaryEntityContext'; export const MAX_BROWSER_WIDTH = 500; export const MIN_BROWSWER_WIDTH = 200; export default function GlossarySidebar() { - const [browserWidth, setBrowserWith] = useState(window.innerWidth * 0.2); + const [browserWidth, setBrowserWidth] = useState(window.innerWidth * 0.2); + const [previousBrowserWidth, setPreviousBrowserWidth] = useState(window.innerWidth * 0.2); + const { isSidebarOpen } = useGlossaryEntityData(); + + useEffect(() => { + if (isSidebarOpen) { + setBrowserWidth(previousBrowserWidth); + } else { + setBrowserWidth(0); + } + }, [isSidebarOpen, previousBrowserWidth]); return ( <> @@ -17,9 +28,11 @@ export default function GlossarySidebar() { <GlossaryBrowser openToEntity /> </SidebarWrapper> <ProfileSidebarResizer - setSidePanelWidth={(width) => - setBrowserWith(Math.min(Math.max(width, MIN_BROWSWER_WIDTH), MAX_BROWSER_WIDTH)) - } + setSidePanelWidth={(width) => { + const newWidth = Math.min(Math.max(width, MIN_BROWSWER_WIDTH), MAX_BROWSER_WIDTH); + setBrowserWidth(newWidth); + setPreviousBrowserWidth(newWidth); + }} initialSize={browserWidth} isSidebarOnLeft /> diff --git a/datahub-web-react/src/app/glossary/useToggleSidebar.tsx b/datahub-web-react/src/app/glossary/useToggleSidebar.tsx new file mode 100644 index 0000000000000..3f2e02385d84e --- /dev/null +++ b/datahub-web-react/src/app/glossary/useToggleSidebar.tsx @@ -0,0 +1,17 @@ +import { useGlossaryEntityData } from '../entity/shared/GlossaryEntityContext'; +import useToggle from '../shared/useToggle'; + +const useToggleSidebar = () => { + const { isSidebarOpen, setIsSidebarOpen } = useGlossaryEntityData(); + + const { isOpen, toggle: toggleSidebar } = useToggle({ + initialValue: isSidebarOpen ?? true, + onToggle: (isNowOpen: boolean) => { + setIsSidebarOpen(isNowOpen); + }, + }); + + return { isOpen, toggleSidebar } as const; +}; + +export default useToggleSidebar; diff --git a/datahub-web-react/src/app/shared/sidebar/components.tsx b/datahub-web-react/src/app/shared/sidebar/components.tsx index 5d123d6022790..c5e529bd3a91c 100644 --- a/datahub-web-react/src/app/shared/sidebar/components.tsx +++ b/datahub-web-react/src/app/shared/sidebar/components.tsx @@ -7,6 +7,7 @@ export const SidebarWrapper = styled.div<{ width: number }>` max-height: 100%; width: ${(props) => props.width}px; min-width: ${(props) => props.width}px; + display: ${(props) => (props.width ? 'block' : 'none')}; `; export function RotatingTriangle({ isOpen, onClick }: { isOpen: boolean; onClick?: () => void }) { From d182667eebd0e3d95057431cf7ce6f013ce713d0 Mon Sep 17 00:00:00 2001 From: John Joyce <john@acryl.io> Date: Thu, 7 Dec 2023 10:13:09 -0800 Subject: [PATCH 216/792] refactor(ui): Refactor entity registry to be inside App Providers (#9399) Merging due to unrelated table failing --- datahub-web-react/src/App.tsx | 59 ++----------------- datahub-web-react/src/app/AppProviders.tsx | 13 ++-- .../src/app/EntityRegistryProvider.tsx | 10 ++++ datahub-web-react/src/app/ProtectedRoutes.tsx | 20 +++---- .../src/app/buildEntityRegistry.ts | 48 +++++++++++++++ .../src/app/useBuildEntityRegistry.tsx | 8 +++ 6 files changed, 87 insertions(+), 71 deletions(-) create mode 100644 datahub-web-react/src/app/EntityRegistryProvider.tsx create mode 100644 datahub-web-react/src/app/buildEntityRegistry.ts create mode 100644 datahub-web-react/src/app/useBuildEntityRegistry.tsx diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index 342a89f350429..1d9f5d2b43993 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useMemo, useState } from 'react'; +import React, { useEffect, useState } from 'react'; import Cookies from 'js-cookie'; import { message } from 'antd'; import { BrowserRouter as Router } from 'react-router-dom'; @@ -8,34 +8,11 @@ import { ThemeProvider } from 'styled-components'; import { Helmet, HelmetProvider } from 'react-helmet-async'; import './App.less'; import { Routes } from './app/Routes'; -import EntityRegistry from './app/entity/EntityRegistry'; -import { DashboardEntity } from './app/entity/dashboard/DashboardEntity'; -import { ChartEntity } from './app/entity/chart/ChartEntity'; -import { UserEntity } from './app/entity/user/User'; -import { GroupEntity } from './app/entity/group/Group'; -import { DatasetEntity } from './app/entity/dataset/DatasetEntity'; -import { DataFlowEntity } from './app/entity/dataFlow/DataFlowEntity'; -import { DataJobEntity } from './app/entity/dataJob/DataJobEntity'; -import { TagEntity } from './app/entity/tag/Tag'; -import { EntityRegistryContext } from './entityRegistryContext'; import { Theme } from './conf/theme/types'; import defaultThemeConfig from './conf/theme/theme_light.config.json'; import { PageRoutes } from './conf/Global'; import { isLoggedInVar } from './app/auth/checkAuthStatus'; import { GlobalCfg } from './conf'; -import { GlossaryTermEntity } from './app/entity/glossaryTerm/GlossaryTermEntity'; -import { MLFeatureEntity } from './app/entity/mlFeature/MLFeatureEntity'; -import { MLPrimaryKeyEntity } from './app/entity/mlPrimaryKey/MLPrimaryKeyEntity'; -import { MLFeatureTableEntity } from './app/entity/mlFeatureTable/MLFeatureTableEntity'; -import { MLModelEntity } from './app/entity/mlModel/MLModelEntity'; -import { MLModelGroupEntity } from './app/entity/mlModelGroup/MLModelGroupEntity'; -import { DomainEntity } from './app/entity/domain/DomainEntity'; -import { ContainerEntity } from './app/entity/container/ContainerEntity'; -import GlossaryNodeEntity from './app/entity/glossaryNode/GlossaryNodeEntity'; -import { DataPlatformEntity } from './app/entity/dataPlatform/DataPlatformEntity'; -import { DataProductEntity } from './app/entity/dataProduct/DataProductEntity'; -import { DataPlatformInstanceEntity } from './app/entity/dataPlatformInstance/DataPlatformInstanceEntity'; -import { RoleEntity } from './app/entity/Access/RoleEntity'; import possibleTypesResult from './possibleTypes.generated'; /* @@ -101,32 +78,6 @@ const App: React.VFC = () => { }); }, []); - const entityRegistry = useMemo(() => { - const register = new EntityRegistry(); - register.register(new DatasetEntity()); - register.register(new DashboardEntity()); - register.register(new ChartEntity()); - register.register(new UserEntity()); - register.register(new GroupEntity()); - register.register(new TagEntity()); - register.register(new DataFlowEntity()); - register.register(new DataJobEntity()); - register.register(new GlossaryTermEntity()); - register.register(new MLFeatureEntity()); - register.register(new MLPrimaryKeyEntity()); - register.register(new MLFeatureTableEntity()); - register.register(new MLModelEntity()); - register.register(new MLModelGroupEntity()); - register.register(new DomainEntity()); - register.register(new ContainerEntity()); - register.register(new GlossaryNodeEntity()); - register.register(new RoleEntity()); - register.register(new DataPlatformEntity()); - register.register(new DataProductEntity()); - register.register(new DataPlatformInstanceEntity()); - return register; - }, []); - return ( <HelmetProvider> <ThemeProvider theme={dynamicThemeConfig}> @@ -134,11 +85,9 @@ const App: React.VFC = () => { <Helmet> <title>{dynamicThemeConfig.content.title} - - - - - + + + diff --git a/datahub-web-react/src/app/AppProviders.tsx b/datahub-web-react/src/app/AppProviders.tsx index 1ced44048b502..81a8ddbfc9bac 100644 --- a/datahub-web-react/src/app/AppProviders.tsx +++ b/datahub-web-react/src/app/AppProviders.tsx @@ -4,6 +4,7 @@ import { EducationStepsProvider } from '../providers/EducationStepsProvider'; import UserContextProvider from './context/UserContextProvider'; import QuickFiltersProvider from '../providers/QuickFiltersProvider'; import SearchContextProvider from './search/context/SearchContextProvider'; +import EntityRegistryProvider from './EntityRegistryProvider'; interface Props { children: React.ReactNode; @@ -13,11 +14,13 @@ export default function AppProviders({ children }: Props) { return ( - - - {children} - - + + + + {children} + + + ); diff --git a/datahub-web-react/src/app/EntityRegistryProvider.tsx b/datahub-web-react/src/app/EntityRegistryProvider.tsx new file mode 100644 index 0000000000000..9e283c0d07fc8 --- /dev/null +++ b/datahub-web-react/src/app/EntityRegistryProvider.tsx @@ -0,0 +1,10 @@ +import React from 'react'; +import { EntityRegistryContext } from '../entityRegistryContext'; +import useBuildEntityRegistry from './useBuildEntityRegistry'; + +const EntityRegistryProvider = ({ children }: { children: React.ReactNode }) => { + const entityRegistry = useBuildEntityRegistry(); + return {children}; +}; + +export default EntityRegistryProvider; diff --git a/datahub-web-react/src/app/ProtectedRoutes.tsx b/datahub-web-react/src/app/ProtectedRoutes.tsx index 469e0d6030b35..a3f072e764bea 100644 --- a/datahub-web-react/src/app/ProtectedRoutes.tsx +++ b/datahub-web-react/src/app/ProtectedRoutes.tsx @@ -13,25 +13,23 @@ import EmbedLookup from './embed/lookup'; * Container for all views behind an authentication wall. */ export const ProtectedRoutes = (): JSX.Element => { - const entityRegistry = useEntityRegistry(); - return ( - - - - } /> - } /> - {entityRegistry.getEntities().map((entity) => ( + + + } /> + } /> + {useEntityRegistry() + .getEntities() + .map((entity) => ( } /> ))} - } /> - - + } /> + ); diff --git a/datahub-web-react/src/app/buildEntityRegistry.ts b/datahub-web-react/src/app/buildEntityRegistry.ts new file mode 100644 index 0000000000000..4f74681570802 --- /dev/null +++ b/datahub-web-react/src/app/buildEntityRegistry.ts @@ -0,0 +1,48 @@ +import EntityRegistry from './entity/EntityRegistry'; +import { DashboardEntity } from './entity/dashboard/DashboardEntity'; +import { ChartEntity } from './entity/chart/ChartEntity'; +import { UserEntity } from './entity/user/User'; +import { GroupEntity } from './entity/group/Group'; +import { DatasetEntity } from './entity/dataset/DatasetEntity'; +import { DataFlowEntity } from './entity/dataFlow/DataFlowEntity'; +import { DataJobEntity } from './entity/dataJob/DataJobEntity'; +import { TagEntity } from './entity/tag/Tag'; +import { GlossaryTermEntity } from './entity/glossaryTerm/GlossaryTermEntity'; +import { MLFeatureEntity } from './entity/mlFeature/MLFeatureEntity'; +import { MLPrimaryKeyEntity } from './entity/mlPrimaryKey/MLPrimaryKeyEntity'; +import { MLFeatureTableEntity } from './entity/mlFeatureTable/MLFeatureTableEntity'; +import { MLModelEntity } from './entity/mlModel/MLModelEntity'; +import { MLModelGroupEntity } from './entity/mlModelGroup/MLModelGroupEntity'; +import { DomainEntity } from './entity/domain/DomainEntity'; +import { ContainerEntity } from './entity/container/ContainerEntity'; +import GlossaryNodeEntity from './entity/glossaryNode/GlossaryNodeEntity'; +import { DataPlatformEntity } from './entity/dataPlatform/DataPlatformEntity'; +import { DataProductEntity } from './entity/dataProduct/DataProductEntity'; +import { DataPlatformInstanceEntity } from './entity/dataPlatformInstance/DataPlatformInstanceEntity'; +import { RoleEntity } from './entity/Access/RoleEntity'; + +export default function buildEntityRegistry() { + const registry = new EntityRegistry(); + registry.register(new DatasetEntity()); + registry.register(new DashboardEntity()); + registry.register(new ChartEntity()); + registry.register(new UserEntity()); + registry.register(new GroupEntity()); + registry.register(new TagEntity()); + registry.register(new DataFlowEntity()); + registry.register(new DataJobEntity()); + registry.register(new GlossaryTermEntity()); + registry.register(new MLFeatureEntity()); + registry.register(new MLPrimaryKeyEntity()); + registry.register(new MLFeatureTableEntity()); + registry.register(new MLModelEntity()); + registry.register(new MLModelGroupEntity()); + registry.register(new DomainEntity()); + registry.register(new ContainerEntity()); + registry.register(new GlossaryNodeEntity()); + registry.register(new RoleEntity()); + registry.register(new DataPlatformEntity()); + registry.register(new DataProductEntity()); + registry.register(new DataPlatformInstanceEntity()); + return registry; +} \ No newline at end of file diff --git a/datahub-web-react/src/app/useBuildEntityRegistry.tsx b/datahub-web-react/src/app/useBuildEntityRegistry.tsx new file mode 100644 index 0000000000000..2beb5edae8b02 --- /dev/null +++ b/datahub-web-react/src/app/useBuildEntityRegistry.tsx @@ -0,0 +1,8 @@ +import { useMemo } from 'react'; +import buildEntityRegistry from './buildEntityRegistry'; + +export default function useBuildEntityRegistry() { + return useMemo(() => { + return buildEntityRegistry(); + }, []); +} From 81a93dc95151a59ec6b0d8ee8e9eefd3de8b6ca3 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 7 Dec 2023 15:59:10 -0500 Subject: [PATCH 217/792] feat(ui): handle content prop changes in Editor component (#9400) --- datahub-web-react/codegen.yml | 12 +--------- .../components/editor/Editor.tsx | 24 ++++++++++++++++--- 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/datahub-web-react/codegen.yml b/datahub-web-react/codegen.yml index 35728e8aeb7d4..417d6a8f1c2a6 100644 --- a/datahub-web-react/codegen.yml +++ b/datahub-web-react/codegen.yml @@ -1,16 +1,6 @@ overwrite: true schema: - - '../datahub-graphql-core/src/main/resources/app.graphql' - - '../datahub-graphql-core/src/main/resources/entity.graphql' - - '../datahub-graphql-core/src/main/resources/search.graphql' - - '../datahub-graphql-core/src/main/resources/analytics.graphql' - - '../datahub-graphql-core/src/main/resources/recommendation.graphql' - - '../datahub-graphql-core/src/main/resources/auth.graphql' - - '../datahub-graphql-core/src/main/resources/ingestion.graphql' - - '../datahub-graphql-core/src/main/resources/timeline.graphql' - - '../datahub-graphql-core/src/main/resources/tests.graphql' - - '../datahub-graphql-core/src/main/resources/step.graphql' - - '../datahub-graphql-core/src/main/resources/lineage.graphql' + - '../datahub-graphql-core/src/main/resources/*.graphql' config: scalars: Long: number diff --git a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx index 038507c620706..bd2e410fb30d9 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx @@ -1,4 +1,4 @@ -import React, { forwardRef, useEffect, useImperativeHandle } from 'react'; +import React, { forwardRef, useEffect, useImperativeHandle, useState } from 'react'; import DOMPurify from 'dompurify'; import { BlockquoteExtension, @@ -79,9 +79,20 @@ export const Editor = forwardRef((props: EditorProps, ref) => { manager.view.focus(); } }); + + // We need to track the modified content that we expect to be in the editor. + // This way, if the content prop changes, we can update the editor content to match + // if needed. However, we don't want to update the editor content on normal typing + // changes because that would cause the cursor to jump around unexpectedly. + const [modifiedContent, setModifiedContent] = useState(content); useEffect(() => { - if (readOnly && content) { + if (readOnly && content !== undefined) { + manager.store.commands.setContent(content); + } else if (!readOnly && content !== undefined && modifiedContent !== content) { + // If we get a content change that doesn't match what we're tracking to be in the editor, + // then we need to update the editor content to match the new props content. manager.store.commands.setContent(content); + setModifiedContent(content); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [readOnly, content]); @@ -97,7 +108,14 @@ export const Editor = forwardRef((props: EditorProps, ref) => { - {onChange && } + {onChange && ( + { + setModifiedContent(md); + onChange(md); + }} + /> + )} )} From e3e9904d214c0ae206b6fe9e51cec3703018f226 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 7 Dec 2023 16:01:23 -0500 Subject: [PATCH 218/792] fix(ingest/profiling): Add back db_name to sql_generic_profiler methods (#9407) --- .../src/datahub/ingestion/source/redshift/profile.py | 1 + .../ingestion/source/snowflake/snowflake_profiler.py | 1 + .../datahub/ingestion/source/sql/sql_generic_profiler.py | 7 +++++-- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py index 6fa3504ced139..b05850cef6e94 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py @@ -60,6 +60,7 @@ def get_workunits( yield from self.generate_profile_workunits( profile_requests, max_workers=self.config.profiling.max_workers, + db_name=db, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py index 67953de47e5a3..89857c4564267 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py @@ -63,6 +63,7 @@ def get_workunits( yield from self.generate_profile_workunits( profile_requests, max_workers=self.config.profiling.max_workers, + db_name=database.name, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py index e309ff0d15311..844a458d9f1ab 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py @@ -71,6 +71,7 @@ def generate_profile_workunits( requests: List[TableProfilerRequest], *, max_workers: int, + db_name: Optional[str] = None, platform: Optional[str] = None, profiler_args: Optional[Dict] = None, ) -> Iterable[MetadataWorkUnit]: @@ -98,7 +99,7 @@ def generate_profile_workunits( return # Otherwise, if column level profiling is enabled, use GE profiler. - ge_profiler = self.get_profiler_instance() + ge_profiler = self.get_profiler_instance(db_name) for ge_profiler_request, profile in ge_profiler.generate_profiles( ge_profile_requests, max_workers, platform, profiler_args @@ -205,7 +206,9 @@ def get_inspectors(self) -> Iterable[Inspector]: inspector = inspect(conn) yield inspector - def get_profiler_instance(self) -> "DatahubGEProfiler": + def get_profiler_instance( + self, db_name: Optional[str] = None + ) -> "DatahubGEProfiler": logger.debug(f"Getting profiler instance from {self.platform}") url = self.config.get_sql_alchemy_url() From 724736939aa33b28c561ec3814c1e1ba3ceffe3b Mon Sep 17 00:00:00 2001 From: Amanda Ng <10681923+ngamanda@users.noreply.github.com> Date: Fri, 8 Dec 2023 05:48:50 +0800 Subject: [PATCH 219/792] feat(observability): add actor urn to GraphQL spans (#9382) Co-authored-by: RyanHolstien --- metadata-service/graphql-servlet-impl/build.gradle | 2 +- .../src/main/java/com/datahub/graphql/GraphQLController.java | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/metadata-service/graphql-servlet-impl/build.gradle b/metadata-service/graphql-servlet-impl/build.gradle index 52fd20ef32389..51f67631159d3 100644 --- a/metadata-service/graphql-servlet-impl/build.gradle +++ b/metadata-service/graphql-servlet-impl/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation externalDependency.charle implementation externalDependency.jetbrains - + implementation externalDependency.opentelemetryApi } configurations.all{ diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java index 692208c42f90c..0cae64c507ad7 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java @@ -15,6 +15,7 @@ import com.linkedin.datahub.graphql.exception.DataHubGraphQLError; import com.linkedin.metadata.utils.metrics.MetricUtils; import graphql.ExecutionResult; +import io.opentelemetry.api.trace.Span; import java.util.Collections; import java.util.List; import java.util.Map; @@ -95,6 +96,7 @@ CompletableFuture> postGraphQL(HttpEntity httpEnt */ Authentication authentication = AuthenticationContext.getAuthentication(); SpringQueryContext context = new SpringQueryContext(true, authentication, _authorizerChain); + Span.current().setAttribute("actor.urn", context.getActorUrn()); return CompletableFuture.supplyAsync( () -> { From f03c66ca1f1d2ade0bd5d65da9c74d0f66ea1201 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 7 Dec 2023 17:18:16 -0500 Subject: [PATCH 220/792] fix(ingest/lookml): make deploy key optional (#9378) --- .../src/datahub/configuration/git.py | 23 ++- .../ingestion/source/git/git_import.py | 29 +++- .../ingestion/source/looker/lookml_source.py | 164 ++++++++---------- .../tests/integration/git/test_git_clone.py | 10 ++ .../tests/integration/lookml/test_lookml.py | 2 +- 5 files changed, 138 insertions(+), 90 deletions(-) diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index 80eb41c100b10..9ea9007553839 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -1,10 +1,12 @@ import os -from typing import Any, Dict, Optional +import pathlib +from typing import Any, Dict, Optional, Union from pydantic import Field, FilePath, SecretStr, validator from datahub.configuration.common import ConfigModel from datahub.configuration.validate_field_rename import pydantic_renamed_field +from datahub.ingestion.source.git.git_import import GitClone _GITHUB_PREFIX = "https://github.com/" _GITLAB_PREFIX = "https://gitlab.com/" @@ -141,3 +143,22 @@ def branch_for_clone(self) -> Optional[str]: if "branch" in self.__fields_set__: return self.branch return None + + def clone( + self, + tmp_path: Union[pathlib.Path, str], + fallback_deploy_key: Optional[SecretStr] = None, + ) -> pathlib.Path: + """Clones the repo into a temporary directory and returns the path to the checkout.""" + + assert self.repo_ssh_locator + + git_clone = GitClone(str(tmp_path)) + + checkout_dir = git_clone.clone( + ssh_key=self.deploy_key or fallback_deploy_key, + repo_url=self.repo_ssh_locator, + branch=self.branch_for_clone, + ) + + return checkout_dir diff --git a/metadata-ingestion/src/datahub/ingestion/source/git/git_import.py b/metadata-ingestion/src/datahub/ingestion/source/git/git_import.py index 55eeb2bc6dcab..2122374c1e404 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/git/git_import.py +++ b/metadata-ingestion/src/datahub/ingestion/source/git/git_import.py @@ -6,6 +6,7 @@ from uuid import uuid4 import git +from git.util import remove_password_if_present from pydantic import SecretStr logger = logging.getLogger(__name__) @@ -53,7 +54,10 @@ def clone( " -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" ) logger.debug(f"ssh_command={git_ssh_cmd}") - logger.info(f"⏳ Cloning repo '{repo_url}', this can take some time...") + + logger.info( + f"⏳ Cloning repo '{self.sanitize_repo_url(repo_url)}', this can take some time..." + ) self.last_repo_cloned = git.Repo.clone_from( repo_url, checkout_dir, @@ -69,3 +73,26 @@ def clone( def get_last_repo_cloned(self) -> Optional[git.Repo]: return self.last_repo_cloned + + @staticmethod + def sanitize_repo_url(repo_url: str) -> str: + """Sanitizes the repo URL for logging purposes. + + Args: + repo_url (str): The repository URL. + + Returns: + str: The sanitized repository URL. + + Examples: + >>> GitClone.sanitize_repo_url("https://username:password@github.com/org/repo.git") + 'https://*****:*****@github.com/org/repo.git' + + >>> GitClone.sanitize_repo_url("https://github.com/org/repo.git") + 'https://github.com/org/repo.git' + + >>> GitClone.sanitize_repo_url("git@github.com:org/repo.git") + 'git@github.com:org/repo.git' + """ + + return remove_password_if_present([repo_url])[0] diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 93c405f0a39f2..b76bef49a7e6f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -301,13 +301,13 @@ def check_base_folder_if_not_provided( ) -> Optional[pydantic.DirectoryPath]: if v is None: git_info: Optional[GitInfo] = values.get("git_info") - if git_info and git_info.deploy_key: - # We have git_info populated correctly, base folder is not needed - pass + if git_info: + if not git_info.deploy_key: + logger.warning( + "git_info is provided, but no SSH key is present. If the repo is not public, we'll fail to clone it." + ) else: - raise ValueError( - "base_folder is not provided. Neither has a github deploy_key or deploy_key_file been provided" - ) + raise ValueError("Neither base_folder nor git_info has been provided.") return v @@ -1831,14 +1831,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: assert self.source_config.git_info # we don't have a base_folder, so we need to clone the repo and process it locally start_time = datetime.now() - git_clone = GitClone(tmp_dir) - # Github info deploy key is always populated - assert self.source_config.git_info.deploy_key - assert self.source_config.git_info.repo_ssh_locator - checkout_dir = git_clone.clone( - ssh_key=self.source_config.git_info.deploy_key, - repo_url=self.source_config.git_info.repo_ssh_locator, - branch=self.source_config.git_info.branch_for_clone, + checkout_dir = self.source_config.git_info.clone( + tmp_path=tmp_dir, ) self.reporter.git_clone_latency = datetime.now() - start_time self.source_config.base_folder = checkout_dir.resolve() @@ -1853,29 +1847,20 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: for project, p_ref in self.source_config.project_dependencies.items(): # If we were given GitHub info, we need to clone the project. if isinstance(p_ref, GitInfo): - assert p_ref.repo_ssh_locator - - p_cloner = GitClone(f"{tmp_dir}/_included_/{project}") try: - p_checkout_dir = p_cloner.clone( - ssh_key=( - # If a deploy key was provided, use it. Otherwise, fall back - # to the main project deploy key. - p_ref.deploy_key - or ( - self.source_config.git_info.deploy_key - if self.source_config.git_info - else None - ) - ), - repo_url=p_ref.repo_ssh_locator, - branch=p_ref.branch_for_clone, + p_checkout_dir = p_ref.clone( + tmp_path=f"{tmp_dir}/_included_/{project}", + # If a deploy key was provided, use it. Otherwise, fall back + # to the main project deploy key, if present. + fallback_deploy_key=self.source_config.git_info.deploy_key + if self.source_config.git_info + else None, ) p_ref = p_checkout_dir.resolve() except Exception as e: logger.warning( - f"Failed to clone remote project {project}. This can lead to failures in parsing lookml files later on: {e}", + f"Failed to clone project dependency {project}. This can lead to failures in parsing lookml files later on: {e}", ) visited_projects.add(project) continue @@ -1910,68 +1895,73 @@ def _recursively_check_manifests( return manifest = self.get_manifest_if_present(project_path) - if manifest: - # Special case handling if the root project has a name in the manifest file. - if project_name == _BASE_PROJECT_NAME and manifest.project_name: - if ( - self.source_config.project_name is not None - and manifest.project_name != self.source_config.project_name - ): - logger.warning( - f"The project name in the manifest file '{manifest.project_name}'" - f"does not match the configured project name '{self.source_config.project_name}'. " - "This can lead to failures in LookML include resolution and lineage generation." - ) - elif self.source_config.project_name is None: - self.source_config.project_name = manifest.project_name + if not manifest: + return - # Clone the remote project dependencies. - for remote_project in manifest.remote_dependencies: - if remote_project.name in project_visited: - continue + # Special case handling if the root project has a name in the manifest file. + if project_name == _BASE_PROJECT_NAME and manifest.project_name: + if ( + self.source_config.project_name is not None + and manifest.project_name != self.source_config.project_name + ): + logger.warning( + f"The project name in the manifest file '{manifest.project_name}'" + f"does not match the configured project name '{self.source_config.project_name}'. " + "This can lead to failures in LookML include resolution and lineage generation." + ) + elif self.source_config.project_name is None: + self.source_config.project_name = manifest.project_name - p_cloner = GitClone(f"{tmp_dir}/_remote_/{project_name}") - try: - # TODO: For 100% correctness, we should be consulting - # the manifest lock file for the exact ref to use. + # Clone the remote project dependencies. + for remote_project in manifest.remote_dependencies: + if remote_project.name in project_visited: + continue + if remote_project.name in self.base_projects_folder: + # In case a remote_dependency is specified in the project_dependencies config, + # we don't need to clone it again. + continue - p_checkout_dir = p_cloner.clone( - ssh_key=( - self.source_config.git_info.deploy_key - if self.source_config.git_info - else None - ), - repo_url=remote_project.url, - ) + p_cloner = GitClone(f"{tmp_dir}/_remote_/{remote_project.name}") + try: + # TODO: For 100% correctness, we should be consulting + # the manifest lock file for the exact ref to use. + + p_checkout_dir = p_cloner.clone( + ssh_key=( + self.source_config.git_info.deploy_key + if self.source_config.git_info + else None + ), + repo_url=remote_project.url, + ) - self.base_projects_folder[ - remote_project.name - ] = p_checkout_dir.resolve() - repo = p_cloner.get_last_repo_cloned() - assert repo - remote_git_info = GitInfo( - url_template=remote_project.url, - repo="dummy/dummy", # set to dummy values to bypass validation - branch=repo.active_branch.name, - ) - remote_git_info.repo = ( - "" # set to empty because url already contains the full path - ) - self.remote_projects_git_info[remote_project.name] = remote_git_info + self.base_projects_folder[ + remote_project.name + ] = p_checkout_dir.resolve() + repo = p_cloner.get_last_repo_cloned() + assert repo + remote_git_info = GitInfo( + url_template=remote_project.url, + repo="dummy/dummy", # set to dummy values to bypass validation + branch=repo.active_branch.name, + ) + remote_git_info.repo = ( + "" # set to empty because url already contains the full path + ) + self.remote_projects_git_info[remote_project.name] = remote_git_info - except Exception as e: - logger.warning( - f"Failed to clone remote project {project_name}. This can lead to failures in parsing lookml files later on", - e, - ) - project_visited.add(project_name) - else: - self._recursively_check_manifests( - tmp_dir, remote_project.name, project_visited - ) + except Exception as e: + logger.warning( + f"Failed to clone remote project {project_name}. This can lead to failures in parsing lookml files later on: {e}", + ) + project_visited.add(project_name) + else: + self._recursively_check_manifests( + tmp_dir, remote_project.name, project_visited + ) - for project in manifest.local_dependencies: - self._recursively_check_manifests(tmp_dir, project, project_visited) + for project in manifest.local_dependencies: + self._recursively_check_manifests(tmp_dir, project, project_visited) def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 assert self.source_config.base_folder diff --git a/metadata-ingestion/tests/integration/git/test_git_clone.py b/metadata-ingestion/tests/integration/git/test_git_clone.py index 3436c692f5d95..2428a6dfb1c9e 100644 --- a/metadata-ingestion/tests/integration/git/test_git_clone.py +++ b/metadata-ingestion/tests/integration/git/test_git_clone.py @@ -1,3 +1,4 @@ +import doctest import os import pytest @@ -81,6 +82,15 @@ def test_github_branch(): assert config.branch_for_clone == "main" +def test_sanitize_repo_url(): + import datahub.ingestion.source.git.git_import + + assert doctest.testmod(datahub.ingestion.source.git.git_import) == ( + 0, + 3, + ) # 0 failures, 3 tests + + def test_git_clone_public(tmp_path): git_clone = GitClone(str(tmp_path)) checkout_dir = git_clone.clone( diff --git a/metadata-ingestion/tests/integration/lookml/test_lookml.py b/metadata-ingestion/tests/integration/lookml/test_lookml.py index a71b597863148..1ed0d05c84263 100644 --- a/metadata-ingestion/tests/integration/lookml/test_lookml.py +++ b/metadata-ingestion/tests/integration/lookml/test_lookml.py @@ -799,7 +799,7 @@ def test_lookml_base_folder(): ) with pytest.raises( - pydantic.ValidationError, match=r"base_folder.+not provided.+deploy_key" + pydantic.ValidationError, match=r"base_folder.+nor.+git_info.+provided" ): LookMLSourceConfig.parse_obj({"api": fake_api}) From 0e40d38f4c24aac34b5a54127077f5021a347b91 Mon Sep 17 00:00:00 2001 From: Teppo Naakka Date: Fri, 8 Dec 2023 14:02:03 +0200 Subject: [PATCH 221/792] fix(ingest/powerbi): fix powerbi chart input handling (#9415) --- .../src/datahub/ingestion/source/powerbi/powerbi.py | 8 ++++++-- .../integration/powerbi/golden_test_container.json | 12 ------------ 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py index dc4394efcf245..4b1d0403ac776 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py @@ -504,7 +504,9 @@ def to_datahub_chart_mcp( logger.info(f"{Constant.CHART_URN}={chart_urn}") - ds_input: List[str] = self.to_urn_set(ds_mcps) + ds_input: List[str] = self.to_urn_set( + [x for x in ds_mcps if x.entityType == Constant.DATASET] + ) def tile_custom_properties(tile: powerbi_data_classes.Tile) -> dict: custom_properties: dict = { @@ -927,7 +929,9 @@ def to_chart_mcps( logger.debug(f"{Constant.CHART_URN}={chart_urn}") - ds_input: List[str] = self.to_urn_set(ds_mcps) + ds_input: List[str] = self.to_urn_set( + [x for x in ds_mcps if x.entityType == Constant.DATASET] + ) # Create chartInfo mcp # Set chartUrl only if tile is created from Report diff --git a/metadata-ingestion/tests/integration/powerbi/golden_test_container.json b/metadata-ingestion/tests/integration/powerbi/golden_test_container.json index 91b5499eaadcb..7a9ce135b4e24 100644 --- a/metadata-ingestion/tests/integration/powerbi/golden_test_container.json +++ b/metadata-ingestion/tests/integration/powerbi/golden_test_container.json @@ -1400,9 +1400,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, @@ -1546,9 +1543,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)" }, @@ -2387,9 +2381,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, @@ -2514,9 +2505,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, From d52f0305eb86bc2902c59e5bea19aa1d7cf883d1 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 8 Dec 2023 13:13:49 -0500 Subject: [PATCH 222/792] fix(ingest): fix metadata for custom python packages (#9391) --- docs/modeling/extending-the-metadata-model.md | 17 ++++++--- metadata-ingestion/scripts/avro_codegen.py | 29 +++------------ .../scripts/custom_package_codegen.py | 7 ++++ .../src/datahub/_codegen/__init__.py | 0 .../src/datahub/_codegen/aspect.py | 36 +++++++++++++++++++ 5 files changed, 60 insertions(+), 29 deletions(-) create mode 100644 metadata-ingestion/src/datahub/_codegen/__init__.py create mode 100644 metadata-ingestion/src/datahub/_codegen/aspect.py diff --git a/docs/modeling/extending-the-metadata-model.md b/docs/modeling/extending-the-metadata-model.md index ba101be16b98e..293688a8b89e5 100644 --- a/docs/modeling/extending-the-metadata-model.md +++ b/docs/modeling/extending-the-metadata-model.md @@ -256,7 +256,7 @@ to deploy during development. This will allow Datahub to read and write your new import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - + If you're purely using the custom models locally, you can use a local development-mode install of the DataHub CLI. @@ -273,12 +273,21 @@ If you want to use your custom models beyond your local machine without forking This package should be installed alongside the base `acryl-datahub` package, and its metadata models will take precedence over the default ones. ```bash -cd metadata-ingestion -../gradlew customPackageGenerate -Ppackage_name=my-company-datahub-models -Ppackage_version="0.0.1" +$ cd metadata-ingestion +$ ../gradlew customPackageGenerate -Ppackage_name=my-company-datahub-models -Ppackage_version="0.0.1" + +Successfully built my-company-datahub-models-0.0.1.tar.gz and acryl_datahub_cloud-0.0.1-py3-none-any.whl + +Generated package at custom-package/my-company-datahub-models +This package should be installed alongside the main acryl-datahub package. + +Install the custom package locally with `pip install custom-package/my-company-datahub-models` +To enable others to use it, share the file at custom-package/my-company-datahub-models/dist/*.whl and have them install it with `pip install .whl` +Alternatively, publish it to PyPI with `twine upload custom-package/my-company-datahub-models/dist/*` ``` This will generate some Python build artifacts, which you can distribute within your team or publish to PyPI. -The command output will contain additional details and exact CLI commands you can use. +The command output contains additional details and exact CLI commands you can use. diff --git a/metadata-ingestion/scripts/avro_codegen.py b/metadata-ingestion/scripts/avro_codegen.py index c6f6bac128b79..bd4988f990534 100644 --- a/metadata-ingestion/scripts/avro_codegen.py +++ b/metadata-ingestion/scripts/avro_codegen.py @@ -252,34 +252,12 @@ def annotate_aspects(aspects: List[dict], schema_class_file: Path) -> None: schema_classes_lines = schema_class_file.read_text().splitlines() line_lookup_table = {line: i for i, line in enumerate(schema_classes_lines)} - # Create the Aspect class. - # We ensure that it cannot be instantiated directly, as - # per https://stackoverflow.com/a/7989101/5004662. + # Import the _Aspect class. schema_classes_lines[ line_lookup_table["__SCHEMAS: Dict[str, RecordSchema] = {}"] ] += """ -class _Aspect(DictWrapper): - ASPECT_NAME: ClassVar[str] = None # type: ignore - ASPECT_TYPE: ClassVar[str] = "default" - ASPECT_INFO: ClassVar[dict] = None # type: ignore - - def __init__(self): - if type(self) is _Aspect: - raise TypeError("_Aspect is an abstract class, and cannot be instantiated directly.") - super().__init__() - - @classmethod - def get_aspect_name(cls) -> str: - return cls.ASPECT_NAME # type: ignore - - @classmethod - def get_aspect_type(cls) -> str: - return cls.ASPECT_TYPE - - @classmethod - def get_aspect_info(cls) -> dict: - return cls.ASPECT_INFO +from datahub._codegen.aspect import _Aspect """ for aspect in aspects: @@ -776,6 +754,7 @@ def generate( import importlib from typing import TYPE_CHECKING +from datahub._codegen.aspect import _Aspect from datahub.utilities.docs_build import IS_SPHINX_BUILD from datahub.utilities._custom_package_loader import get_custom_models_package @@ -785,7 +764,7 @@ def generate( from ._schema_classes import * # Required explicitly because __all__ doesn't include _ prefixed names. - from ._schema_classes import _Aspect, __SCHEMA_TYPES + from ._schema_classes import __SCHEMA_TYPES if IS_SPHINX_BUILD: # Set __module__ to the current module so that Sphinx will document the diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index a5883c9ae9020..8582e165987ec 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -73,6 +73,8 @@ def generate( """ ) + (src_path / "py.typed").write_text("") + (package_path / "setup.py").write_text( f"""{autogen_header} from setuptools import setup @@ -87,6 +89,11 @@ def generate( "avro-gen3=={_avrogen_version}", "acryl-datahub", ], + package_data={{ + "{python_package_name}": ["py.typed"], + "{python_package_name}.models": ["schema.avsc"], + "{python_package_name}.models.schemas": ["*.avsc"], + }}, entry_points={{ "datahub.custom_packages": [ "models={python_package_name}.models.schema_classes", diff --git a/metadata-ingestion/src/datahub/_codegen/__init__.py b/metadata-ingestion/src/datahub/_codegen/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/_codegen/aspect.py b/metadata-ingestion/src/datahub/_codegen/aspect.py new file mode 100644 index 0000000000000..28fa3f1536a86 --- /dev/null +++ b/metadata-ingestion/src/datahub/_codegen/aspect.py @@ -0,0 +1,36 @@ +from typing import ClassVar + +from avrogen.dict_wrapper import DictWrapper + + +class _Aspect(DictWrapper): + """Base class for all aspects types. + + All codegened types inherit from DictWrapper, either directly or indirectly. + Types that are aspects inherit directly from _Aspect. + """ + + ASPECT_NAME: ClassVar[str] = None # type: ignore + ASPECT_TYPE: ClassVar[str] = "default" + ASPECT_INFO: ClassVar[dict] = None # type: ignore + + def __init__(self): + if type(self) is _Aspect: + # Ensure that it cannot be instantiated directly, as + # per https://stackoverflow.com/a/7989101/5004662. + raise TypeError( + "_Aspect is an abstract class, and cannot be instantiated directly." + ) + super().__init__() + + @classmethod + def get_aspect_name(cls) -> str: + return cls.ASPECT_NAME # type: ignore + + @classmethod + def get_aspect_type(cls) -> str: + return cls.ASPECT_TYPE + + @classmethod + def get_aspect_info(cls) -> dict: + return cls.ASPECT_INFO From 08a9b9b6de8abba52a887d6c8b3df19855cde377 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 8 Dec 2023 16:18:34 -0500 Subject: [PATCH 223/792] fix(ingest): bug fixes and docs updates (#9422) --- docs/modeling/extending-the-metadata-model.md | 2 +- metadata-ingestion/build.gradle | 5 ++++- .../scripts/custom_package_codegen.py | 4 +++- .../src/datahub/ingestion/source/dbt/dbt_common.py | 2 -- .../src/datahub/ingestion/source/dbt/dbt_core.py | 13 +++++++++++++ .../src/datahub/ingestion/source/sql/oracle.py | 2 +- ...t_most_config_and_modified_since_admin_only.json | 3 --- node_modules/.yarn-integrity | 12 ------------ yarn.lock | 4 ---- 9 files changed, 22 insertions(+), 25 deletions(-) delete mode 100644 node_modules/.yarn-integrity delete mode 100644 yarn.lock diff --git a/docs/modeling/extending-the-metadata-model.md b/docs/modeling/extending-the-metadata-model.md index 293688a8b89e5..dc4edd3306f95 100644 --- a/docs/modeling/extending-the-metadata-model.md +++ b/docs/modeling/extending-the-metadata-model.md @@ -282,7 +282,7 @@ Generated package at custom-package/my-company-datahub-models This package should be installed alongside the main acryl-datahub package. Install the custom package locally with `pip install custom-package/my-company-datahub-models` -To enable others to use it, share the file at custom-package/my-company-datahub-models/dist/*.whl and have them install it with `pip install .whl` +To enable others to use it, share the file at custom-package/my-company-datahub-models/dist/.whl and have them install it with `pip install .whl` Alternatively, publish it to PyPI with `twine upload custom-package/my-company-datahub-models/dist/*` ``` diff --git a/metadata-ingestion/build.gradle b/metadata-ingestion/build.gradle index 0d8de625ec709..047699f084c61 100644 --- a/metadata-ingestion/build.gradle +++ b/metadata-ingestion/build.gradle @@ -57,7 +57,10 @@ task installPackage(type: Exec, dependsOn: installPackageOnly) { } task codegen(type: Exec, dependsOn: [environmentSetup, installPackage, ':metadata-events:mxe-schemas:build']) { - inputs.files(project.fileTree(dir: "../metadata-events/mxe-schemas/src/", include: "**/*.avsc")) + inputs.files( + project.fileTree(dir: "../metadata-events/mxe-schemas/src/", include: "**/*.avsc"), + project.fileTree(dir: "scripts"), + ) outputs.dir('src/datahub/metadata') commandLine 'bash', '-c', "source ${venv_name}/bin/activate && ./scripts/codegen.sh" } diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index 8582e165987ec..3f59fdf2cc548 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -116,7 +116,9 @@ def generate( click.echo() click.echo(f"Install the custom package locally with `pip install {package_path}`") click.echo( - f"To enable others to use it, share the file at {package_path}/dist/*.whl and have them install it with `pip install .whl`" + "To enable others to use it, share the file at " + f"{package_path}/dist/{package_name}-{package_version}-py3-none-any.whl " + "and have them install it with `pip install .whl`" ) click.echo( f"Alternatively, publish it to PyPI with `twine upload {package_path}/dist/*`" diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index 919ba5a4b285a..af28be310587a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -1315,8 +1315,6 @@ def get_schema_metadata( self.config.strip_user_ids_from_email, ) - # TODO if infer_dbt_schemas, load from saved schemas too - canonical_schema: List[SchemaField] = [] for column in node.columns: description = None diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py index a7703b203bcee..ac2b2815f3caa 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py @@ -466,6 +466,19 @@ def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: catalog_version, ) = self.loadManifestAndCatalog() + # If catalog_version is between 1.7.0 and 1.7.2, report a warning. + if ( + catalog_version + and catalog_version.startswith("1.7.") + and catalog_version < "1.7.3" + ): + self.report.report_warning( + "dbt_catalog_version", + f"Due to a bug in dbt, dbt version {catalog_version} will have incomplete metadata on sources. " + "Please upgrade to dbt version 1.7.3 or later. " + "See https://github.com/dbt-labs/dbt-core/issues/9119 for details on the bug.", + ) + additional_custom_props = { "manifest_schema": manifest_schema, "manifest_version": manifest_version, diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py b/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py index 7ee54200c6493..122520a730801 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py @@ -157,7 +157,7 @@ def __getattr__(self, item: str) -> Any: @platform_name("Oracle") @config_class(OracleConfig) -@support_status(SupportStatus.CERTIFIED) +@support_status(SupportStatus.INCUBATING) @capability(SourceCapability.DOMAINS, "Enabled by default") class OracleSource(SQLAlchemySource): """ diff --git a/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json b/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json index b301ca1c1b988..52add6b002197 100644 --- a/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json +++ b/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json @@ -865,9 +865,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)" }, diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity deleted file mode 100644 index 42a6cb985ab1b..0000000000000 --- a/node_modules/.yarn-integrity +++ /dev/null @@ -1,12 +0,0 @@ -{ - "systemParams": "darwin-arm64-93", - "modulesFolders": [ - "node_modules" - ], - "flags": [], - "linkedModules": [], - "topLevelPatterns": [], - "lockfileEntries": {}, - "files": [], - "artifacts": {} -} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock deleted file mode 100644 index fb57ccd13afbd..0000000000000 --- a/yarn.lock +++ /dev/null @@ -1,4 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - From 3e79a1325cf8eca29a8bb818a50762366bfd5d22 Mon Sep 17 00:00:00 2001 From: noggi Date: Fri, 8 Dec 2023 13:37:12 -0800 Subject: [PATCH 224/792] Pin alpine base image version to 3.18 (#9421) --- docker/datahub-frontend/Dockerfile | 2 +- docker/datahub-gms/Dockerfile | 2 +- docker/datahub-mae-consumer/Dockerfile | 2 +- docker/datahub-mce-consumer/Dockerfile | 2 +- docker/datahub-upgrade/Dockerfile | 2 +- docker/elasticsearch-setup/Dockerfile | 2 +- docker/mysql-setup/Dockerfile | 2 +- docker/postgres-setup/Dockerfile | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 9c26d73f4f40b..0c4c229af34f0 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -1,7 +1,7 @@ # Defining environment ARG APP_ENV=prod -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Configurable repositories ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index 1e13fa492c7f0..9c79e1da542f0 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -24,7 +24,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 3bacd3b2dc81a..5bfa5f35ace17 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Re-declaring args from above to make them available in this stage (will inherit default values) ARG ALPINE_REPO_URL diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index bb22ab82f4402..cc79a3072c193 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Re-declaring args from above to make them available in this stage (will inherit default values) ARG ALPINE_REPO_URL diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index 551d61f41b979..2beb5b54dac38 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Re-declaring args from above to make them available in this stage (will inherit default values) ARG ALPINE_REPO_URL diff --git a/docker/elasticsearch-setup/Dockerfile b/docker/elasticsearch-setup/Dockerfile index f4dd1cb9b018e..ea64f94f88727 100644 --- a/docker/elasticsearch-setup/Dockerfile +++ b/docker/elasticsearch-setup/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base ARG ALPINE_REPO_URL diff --git a/docker/mysql-setup/Dockerfile b/docker/mysql-setup/Dockerfile index 8b7ca704c32cd..409f96a325830 100644 --- a/docker/mysql-setup/Dockerfile +++ b/docker/mysql-setup/Dockerfile @@ -17,7 +17,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 +FROM alpine:3.18 COPY --from=binary /go/bin/dockerize /usr/local/bin ARG ALPINE_REPO_URL diff --git a/docker/postgres-setup/Dockerfile b/docker/postgres-setup/Dockerfile index e10f70571501e..673ce979477be 100644 --- a/docker/postgres-setup/Dockerfile +++ b/docker/postgres-setup/Dockerfile @@ -17,7 +17,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 +FROM alpine:3.18 COPY --from=binary /go/bin/dockerize /usr/local/bin ARG ALPINE_REPO_URL From 159a013b0515f8a94b88d62e4ad20aad228fac9d Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Fri, 8 Dec 2023 17:52:13 -0500 Subject: [PATCH 225/792] fix(cypress) Fix flakiness of cypress test for glossary navigation (#9410) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- .../shared/EntityDropdown/useDeleteEntity.tsx | 6 ++++ .../src/app/glossary/cacheUtils.ts | 36 +++++++++++++++++++ .../e2e/glossary/glossary_navigation.js | 3 +- 3 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 datahub-web-react/src/app/glossary/cacheUtils.ts diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx index 1e4737135ed74..171a36b1cfbcc 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx @@ -7,6 +7,7 @@ import analytics, { EventType } from '../../../analytics'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getParentNodeToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; import { useHandleDeleteDomain } from './useHandleDeleteDomain'; +import { removeTermFromGlossaryNode } from '../../../glossary/cacheUtils'; /** * Performs the flow for deleting an entity of a given type. @@ -30,6 +31,7 @@ function useDeleteEntity( const maybeDeleteEntity = getDeleteEntityMutation(type)(); const deleteEntity = (maybeDeleteEntity && maybeDeleteEntity[0]) || undefined; + const client = maybeDeleteEntity?.[1].client; function handleDeleteEntity() { deleteEntity?.({ @@ -54,6 +56,10 @@ function useDeleteEntity( handleDeleteDomain(); } + if (client && entityData.type === EntityType.GlossaryTerm && entityData?.parentNodes?.nodes) { + removeTermFromGlossaryNode(client, entityData.parentNodes.nodes[0].urn, urn); + } + setTimeout( () => { setHasBeenDeleted(true); diff --git a/datahub-web-react/src/app/glossary/cacheUtils.ts b/datahub-web-react/src/app/glossary/cacheUtils.ts new file mode 100644 index 0000000000000..f70901bf71f2f --- /dev/null +++ b/datahub-web-react/src/app/glossary/cacheUtils.ts @@ -0,0 +1,36 @@ +import { ApolloClient } from '@apollo/client'; +import { GetGlossaryNodeDocument, GetGlossaryNodeQuery } from '../../graphql/glossaryNode.generated'; + +export function removeTermFromGlossaryNode( + client: ApolloClient, + glossaryNodeUrn: string, + glossaryTermUrn: string, +) { + // Read the data from our cache for this query. + const currData: GetGlossaryNodeQuery | null = client.readQuery({ + query: GetGlossaryNodeDocument, + variables: { urn: glossaryNodeUrn }, + }); + + // Remove the term from the existing children set. + const newTermChildren = { + relationships: [ + ...(currData?.glossaryNode?.children?.relationships || []).filter( + (relationship) => relationship.entity?.urn !== glossaryTermUrn, + ), + ], + total: (currData?.glossaryNode?.children?.total || 1) - 1, + }; + + // Write our data back to the cache. + client.writeQuery({ + query: GetGlossaryNodeDocument, + variables: { urn: glossaryNodeUrn }, + data: { + glossaryNode: { + ...currData?.glossaryNode, + children: newTermChildren, + }, + }, + }); +} diff --git a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js index c6e9d93f71b8c..7ddf36aa87c2d 100644 --- a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js +++ b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js @@ -1,6 +1,6 @@ const glossaryTerm = "CypressGlosssaryNavigationTerm"; const glossaryTermGroup = "CypressGlosssaryNavigationGroup"; -const glossaryParentGroup = "Cypress"; +const glossaryParentGroup = "CypressNode"; describe("glossary sidebar navigation test", () => { it("create term and term parent group, move and delete term group", () => { @@ -33,6 +33,7 @@ describe("glossary sidebar navigation test", () => { // Move a term group from the root level to be under a parent term group cy.goToGlossaryList(); cy.clickOptionWithText(glossaryTermGroup); + cy.wait(3000) cy.openThreeDotDropdown(); cy.clickOptionWithText("Move"); cy.get('[data-testid="move-glossary-entity-modal"]').contains(glossaryParentGroup).click({force: true}); From cff32e9c742f9bff2db686445e3f9cddaa6caf38 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Sat, 9 Dec 2023 05:37:00 +0530 Subject: [PATCH 226/792] fix(ingest/transformer): correct registration (#9418) --- metadata-ingestion/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index dac865d2dac37..e894cbf043338 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -650,7 +650,7 @@ "simple_add_dataset_properties = datahub.ingestion.transformer.add_dataset_properties:SimpleAddDatasetProperties", "pattern_add_dataset_schema_terms = datahub.ingestion.transformer.add_dataset_schema_terms:PatternAddDatasetSchemaTerms", "pattern_add_dataset_schema_tags = datahub.ingestion.transformer.add_dataset_schema_tags:PatternAddDatasetSchemaTags", - "extract_owners_from_tags = datahub.ingestion.transformer.extract_ownership_from_tags:ExtractOwnersFromTagsTransformer", + "extract_ownership_from_tags = datahub.ingestion.transformer.extract_ownership_from_tags:ExtractOwnersFromTagsTransformer", ], "datahub.ingestion.sink.plugins": [ "file = datahub.ingestion.sink.file:FileSink", From e4d8dcbc02d2dae73b7054813b900af239795485 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Mon, 11 Dec 2023 09:43:23 -0500 Subject: [PATCH 227/792] docs(ingest/sql-queries): Rearrange sections (#9426) --- .../sql-queries/{sql-queries.md => sql-queries_pre.md} | 5 +++-- .../src/datahub/ingestion/source/sql_queries.py | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) rename metadata-ingestion/docs/sources/sql-queries/{sql-queries.md => sql-queries_pre.md} (67%) diff --git a/metadata-ingestion/docs/sources/sql-queries/sql-queries.md b/metadata-ingestion/docs/sources/sql-queries/sql-queries_pre.md similarity index 67% rename from metadata-ingestion/docs/sources/sql-queries/sql-queries.md rename to metadata-ingestion/docs/sources/sql-queries/sql-queries_pre.md index e829b4366bb84..2d915f0bcf84d 100644 --- a/metadata-ingestion/docs/sources/sql-queries/sql-queries.md +++ b/metadata-ingestion/docs/sources/sql-queries/sql-queries_pre.md @@ -1,8 +1,9 @@ -### Example Queries File +#### Example Queries File ```json {"query": "SELECT x FROM my_table", "timestamp": 1689232738.051, "user": "user_a", "downstream_tables": [], "upstream_tables": ["my_database.my_schema.my_table"]} {"query": "INSERT INTO my_table VALUES (1, 'a')", "timestamp": 1689232737.669, "user": "user_b", "downstream_tables": ["my_database.my_schema.my_table"], "upstream_tables": []} ``` -Note that this is not a valid standard JSON file, but rather a file containing one JSON object per line. +Note that this file does not represent a single JSON object, but instead newline-delimited JSON, in which +each line is a separate JSON object. diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py b/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py index 58e9682df935e..c3d6657c81fa7 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py @@ -93,8 +93,9 @@ def compute_stats(self) -> None: @capability(SourceCapability.LINEAGE_FINE, "Parsed from SQL queries") class SqlQueriesSource(Source): """ - This source reads a specifically-formatted JSON file containing SQL queries and parses them to generate lineage. + This source reads a newline-delimited JSON file containing SQL queries and parses them to generate lineage. + ### Query File Format This file should contain one JSON object per line, with the following fields: - query: string - The SQL query to parse. - timestamp (optional): number - The timestamp of the query, in seconds since the epoch. From 5ac854dcb1f1516a5325ef5bbac466d08c016fcb Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Mon, 11 Dec 2023 22:43:58 +0530 Subject: [PATCH 228/792] fix(ui): Adjusting the view of the Column Stats (#9430) --- .../shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx index 080fba6619977..0cbb79dde49cd 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx @@ -14,6 +14,8 @@ type Props = { const StatSection = styled.div` padding: 20px 20px; overflow: auto; + display: flex; + flex-direction: column; `; const NameText = styled(Typography.Text)` @@ -162,7 +164,12 @@ export default function ColumnStats({ columnStats }: Props) { return ( Column Stats - + ); } From 8a1122049c02c4929d8029c25dac517e5fdafc48 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Mon, 11 Dec 2023 14:25:43 -0800 Subject: [PATCH 229/792] feat(patch): support fine grained lineage patches (#9408) Co-authored-by: Harshal Sheth --- .../dataset/UpstreamLineageTemplate.java | 271 ++++++++++++- .../registry/UpstreamLineageTemplateTest.java | 359 ++++++++++++++++++ .../java/com/linkedin/metadata/Constants.java | 5 + .../src/datahub/specific/dataset.py | 107 +++++- .../unit/patch/complex_dataset_patch.json | 45 ++- .../tests/unit/patch/test_patch_builder.py | 16 + .../dataset/UpstreamLineagePatchBuilder.java | 231 ++++++++++- .../java/datahub/client/patch/PatchTest.java | 24 +- 8 files changed, 1023 insertions(+), 35 deletions(-) create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java index 35816895669be..81a4065dedb1a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java @@ -1,20 +1,41 @@ package com.linkedin.metadata.models.registry.template.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.Streams; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.dataset.FineGrainedLineageArray; import com.linkedin.dataset.UpstreamArray; import com.linkedin.dataset.UpstreamLineage; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.models.registry.template.CompoundKeyTemplate; import java.util.Collections; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class UpstreamLineageTemplate implements ArrayMergingTemplate { +public class UpstreamLineageTemplate extends CompoundKeyTemplate { + // Fields private static final String UPSTREAMS_FIELD_NAME = "upstreams"; private static final String DATASET_FIELD_NAME = "dataset"; + private static final String FINE_GRAINED_LINEAGES_FIELD_NAME = "fineGrainedLineages"; + private static final String FINE_GRAINED_UPSTREAM_TYPE = "upstreamType"; + private static final String FINE_GRAINED_UPSTREAMS = "upstreams"; + private static final String FINE_GRAINED_DOWNSTREAM_TYPE = "downstreamType"; + private static final String FINE_GRAINED_DOWNSTREAMS = "downstreams"; + private static final String FINE_GRAINED_TRANSFORMATION_OPERATION = "transformOperation"; + private static final String FINE_GRAINED_CONFIDENCE_SCORE = "confidenceScore"; - // TODO: Fine Grained Lineages not patchable at this time, they don't have a well established key + // Template support + private static final String NONE_TRANSFORMATION_TYPE = "NONE"; + private static final Float DEFAULT_CONFIDENCE_SCORE = 1.0f; @Override public UpstreamLineage getSubtype(RecordTemplate recordTemplate) throws ClassCastException { @@ -42,14 +63,250 @@ public UpstreamLineage getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - return arrayFieldToMap( - baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + JsonNode transformedNode = + arrayFieldToMap( + baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + ((ObjectNode) transformedNode) + .set( + FINE_GRAINED_LINEAGES_FIELD_NAME, + combineAndTransformFineGrainedLineages( + transformedNode.get(FINE_GRAINED_LINEAGES_FIELD_NAME))); + + return transformedNode; } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray( - patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + JsonNode rebasedNode = + transformedMapToArray( + patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + ((ObjectNode) rebasedNode) + .set( + FINE_GRAINED_LINEAGES_FIELD_NAME, + reconstructFineGrainedLineages(rebasedNode.get(FINE_GRAINED_LINEAGES_FIELD_NAME))); + return rebasedNode; + } + + /** + * Combines fine grained lineage array into a map using upstream and downstream types as keys, + * defaulting when not present. Due to this construction, patches will look like: path: + * /fineGrainedLineages/TRANSFORMATION_OPERATION/(upstreamType || downstreamType)/TYPE/FIELD_URN, + * op: ADD/REMOVE, value: float (confidenceScore) Due to the way FineGrainedLineage was designed + * it doesn't necessarily have a consistent key we can reference, so this specialized method + * mimics the arrayFieldToMap of the super class with the specialization that it does not put the + * full value of the aspect at the end of the key, just the particular array. This prevents + * unintended overwrites through improper MCP construction that is technically allowed by the + * schema when combining under fields that form the natural key. + * + * @param fineGrainedLineages the fine grained lineage array node + * @return the modified {@link JsonNode} with array fields transformed to maps + */ + private JsonNode combineAndTransformFineGrainedLineages(@Nullable JsonNode fineGrainedLineages) { + ObjectNode mapNode = instance.objectNode(); + if (!(fineGrainedLineages instanceof ArrayNode) || fineGrainedLineages.isEmpty()) { + return mapNode; + } + JsonNode lineageCopy = fineGrainedLineages.deepCopy(); + + lineageCopy + .elements() + .forEachRemaining( + node -> { + JsonNode nodeClone = node.deepCopy(); + String transformationOperation = + nodeClone.has(FINE_GRAINED_TRANSFORMATION_OPERATION) + ? nodeClone.get(FINE_GRAINED_TRANSFORMATION_OPERATION).asText() + : NONE_TRANSFORMATION_TYPE; + + if (!mapNode.has(transformationOperation)) { + mapNode.set(transformationOperation, instance.objectNode()); + } + ObjectNode transformationOperationNode = + (ObjectNode) mapNode.get(transformationOperation); + + Float confidenceScore = + nodeClone.has(FINE_GRAINED_CONFIDENCE_SCORE) + ? nodeClone.get(FINE_GRAINED_CONFIDENCE_SCORE).floatValue() + : DEFAULT_CONFIDENCE_SCORE; + + String upstreamType = + nodeClone.has(FINE_GRAINED_UPSTREAM_TYPE) + ? nodeClone.get(FINE_GRAINED_UPSTREAM_TYPE).asText() + : null; + String downstreamType = + nodeClone.has(FINE_GRAINED_DOWNSTREAM_TYPE) + ? nodeClone.get(FINE_GRAINED_DOWNSTREAM_TYPE).asText() + : null; + ArrayNode upstreams = + nodeClone.has(FINE_GRAINED_UPSTREAMS) + ? (ArrayNode) nodeClone.get(FINE_GRAINED_UPSTREAMS) + : null; + ArrayNode downstreams = + nodeClone.has(FINE_GRAINED_DOWNSTREAMS) + ? (ArrayNode) nodeClone.get(FINE_GRAINED_DOWNSTREAMS) + : null; + + // Handle upstreams + if (upstreamType == null) { + // Determine default type + Urn upstreamUrn = + upstreams != null ? UrnUtils.getUrn(upstreams.get(0).asText()) : null; + if (upstreamUrn != null + && SCHEMA_FIELD_ENTITY_NAME.equals(upstreamUrn.getEntityType())) { + upstreamType = FINE_GRAINED_LINEAGE_FIELD_SET_TYPE; + } else { + upstreamType = FINE_GRAINED_LINEAGE_DATASET_TYPE; + } + } + if (!transformationOperationNode.has(FINE_GRAINED_UPSTREAM_TYPE)) { + transformationOperationNode.set(FINE_GRAINED_UPSTREAM_TYPE, instance.objectNode()); + } + ObjectNode upstreamTypeNode = + (ObjectNode) transformationOperationNode.get(FINE_GRAINED_UPSTREAM_TYPE); + if (!upstreamTypeNode.has(upstreamType)) { + upstreamTypeNode.set(upstreamType, instance.objectNode()); + } + if (upstreams != null) { + addUrnsToSubType(upstreamTypeNode, upstreams, upstreamType, confidenceScore); + } + + // Handle downstreams + if (downstreamType == null) { + // Determine default type + if (downstreams != null && downstreams.size() > 1) { + downstreamType = FINE_GRAINED_LINEAGE_FIELD_SET_TYPE; + } else { + downstreamType = FINE_GRAINED_LINEAGE_FIELD_TYPE; + } + } + if (!transformationOperationNode.has(FINE_GRAINED_DOWNSTREAM_TYPE)) { + transformationOperationNode.set( + FINE_GRAINED_DOWNSTREAM_TYPE, instance.objectNode()); + } + ObjectNode downstreamTypeNode = + (ObjectNode) transformationOperationNode.get(FINE_GRAINED_DOWNSTREAM_TYPE); + if (!downstreamTypeNode.has(downstreamType)) { + downstreamTypeNode.set(downstreamType, instance.objectNode()); + } + if (downstreams != null) { + addUrnsToSubType(downstreamTypeNode, downstreams, downstreamType, confidenceScore); + } + }); + return mapNode; + } + + private void addUrnsToSubType( + JsonNode superType, ArrayNode urnsList, String subType, Float confidenceScore) { + ObjectNode upstreamSubTypeNode = (ObjectNode) superType.get(subType); + // Will overwrite repeat urns with different confidence scores with the most recently seen + upstreamSubTypeNode.setAll( + Streams.stream(urnsList.elements()) + .map(JsonNode::asText) + .distinct() + .collect(Collectors.toMap(urn -> urn, urn -> instance.numberNode(confidenceScore)))); + } + + /** + * Takes the transformed fine grained lineages map from pre-processing and reconstructs an array + * of FineGrainedLineages Avoids producing side effects by copying nodes, use resulting node and + * not the original + * + * @param transformedFineGrainedLineages the transformed fine grained lineage map + * @return the modified {@link JsonNode} formatted consistent with the original schema + */ + private ArrayNode reconstructFineGrainedLineages(JsonNode transformedFineGrainedLineages) { + if (transformedFineGrainedLineages instanceof ArrayNode) { + // We already have an ArrayNode, no need to transform. This happens during `replace` + // operations + return (ArrayNode) transformedFineGrainedLineages; + } + ObjectNode mapNode = (ObjectNode) transformedFineGrainedLineages; + ArrayNode arrayNode = instance.arrayNode(); + + mapNode + .fieldNames() + .forEachRemaining( + transformationOperation -> { + final ObjectNode transformationOperationNode = + (ObjectNode) mapNode.get(transformationOperation); + final ObjectNode upstreamType = + transformationOperationNode.has(FINE_GRAINED_UPSTREAM_TYPE) + ? (ObjectNode) transformationOperationNode.get(FINE_GRAINED_UPSTREAM_TYPE) + : instance.objectNode(); + final ObjectNode downstreamType = + transformationOperationNode.has(FINE_GRAINED_DOWNSTREAM_TYPE) + ? (ObjectNode) transformationOperationNode.get(FINE_GRAINED_DOWNSTREAM_TYPE) + : instance.objectNode(); + + // Handle upstreams + if (!upstreamType.isEmpty()) { + populateTypeNode( + upstreamType, + transformationOperation, + FINE_GRAINED_UPSTREAM_TYPE, + FINE_GRAINED_UPSTREAMS, + FINE_GRAINED_DOWNSTREAM_TYPE, + arrayNode); + } + + // Handle downstreams + if (!downstreamType.isEmpty()) { + populateTypeNode( + downstreamType, + transformationOperation, + FINE_GRAINED_DOWNSTREAM_TYPE, + FINE_GRAINED_DOWNSTREAMS, + FINE_GRAINED_UPSTREAM_TYPE, + arrayNode); + } + }); + + return arrayNode; + } + + private void populateTypeNode( + JsonNode typeNode, + String transformationOperation, + String typeName, + String arrayTypeName, + String defaultTypeName, + ArrayNode arrayNode) { + typeNode + .fieldNames() + .forEachRemaining( + subTypeName -> { + ObjectNode subType = (ObjectNode) typeNode.get(subTypeName); + if (!subType.isEmpty()) { + ObjectNode fineGrainedLineage = instance.objectNode(); + AtomicReference minimumConfidenceScore = new AtomicReference<>(1.0f); + + fineGrainedLineage.put(typeName, subTypeName); + fineGrainedLineage.put( + FINE_GRAINED_TRANSFORMATION_OPERATION, transformationOperation); + // Array to actually be filled out + fineGrainedLineage.set(arrayTypeName, instance.arrayNode()); + // Added to pass model validation, because we have no way of appropriately pairing + // upstreams and downstreams + // within fine grained lineages consistently due to being able to have multiple + // downstream types paired with a single + // transform operation, we just set a default type because it's a required property + fineGrainedLineage.put(defaultTypeName, FINE_GRAINED_LINEAGE_FIELD_SET_TYPE); + subType + .fieldNames() + .forEachRemaining( + subTypeKey -> { + ((ArrayNode) fineGrainedLineage.get(arrayTypeName)).add(subTypeKey); + Float scoreValue = subType.get(subTypeKey).floatValue(); + if (scoreValue <= minimumConfidenceScore.get()) { + minimumConfidenceScore.set(scoreValue); + fineGrainedLineage.set( + FINE_GRAINED_CONFIDENCE_SCORE, + instance.numberNode(minimumConfidenceScore.get())); + } + }); + arrayNode.add(fineGrainedLineage); + } + }); } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java new file mode 100644 index 0000000000000..07982a87be56c --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java @@ -0,0 +1,359 @@ +package com.linkedin.metadata.models.registry; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + +import com.fasterxml.jackson.databind.node.NumericNode; +import com.github.fge.jackson.jsonpointer.JsonPointer; +import com.github.fge.jsonpatch.AddOperation; +import com.github.fge.jsonpatch.JsonPatch; +import com.github.fge.jsonpatch.JsonPatchOperation; +import com.github.fge.jsonpatch.RemoveOperation; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.DataMap; +import com.linkedin.dataset.FineGrainedLineage; +import com.linkedin.dataset.FineGrainedLineageDownstreamType; +import com.linkedin.dataset.FineGrainedLineageUpstreamType; +import com.linkedin.dataset.UpstreamLineage; +import com.linkedin.metadata.models.registry.template.dataset.UpstreamLineageTemplate; +import java.util.ArrayList; +import java.util.List; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class UpstreamLineageTemplateTest { + @Test + public void testPatchUpstream() throws Exception { + UpstreamLineageTemplate upstreamLineageTemplate = new UpstreamLineageTemplate(); + UpstreamLineage upstreamLineage = upstreamLineageTemplate.getDefault(); + List patchOperations = new ArrayList<>(); + NumericNode upstreamConfidenceScore = instance.numberNode(1.0f); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"), + upstreamConfidenceScore); + patchOperations.add(operation); + JsonPatch jsonPatch = new JsonPatch(patchOperations); + + // Initial population test + UpstreamLineage result = upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatch); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap = new DataMap(); + dataMap.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage = new FineGrainedLineage(dataMap); + UrnArray urns = new UrnArray(); + Urn urn1 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"); + urns.add(urn1); + fineGrainedLineage.setUpstreams(urns); + fineGrainedLineage.setTransformOperation("CREATE"); + fineGrainedLineage.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + fineGrainedLineage.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + Assert.assertEquals(result.getFineGrainedLineages().get(0), fineGrainedLineage); + + // Test non-overwrite upstreams and correct confidence score + JsonPatchOperation operation2 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"), + upstreamConfidenceScore); + NumericNode upstreamConfidenceScore2 = instance.numberNode(0.1f); + JsonPatchOperation operation3 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"), + upstreamConfidenceScore2); + List patchOperations2 = new ArrayList<>(); + patchOperations2.add(operation2); + patchOperations2.add(operation3); + JsonPatch jsonPatch2 = new JsonPatch(patchOperations2); + UpstreamLineage result2 = upstreamLineageTemplate.applyPatch(result, jsonPatch2); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap2 = new DataMap(); + dataMap2.put("confidenceScore", 0.1); + FineGrainedLineage fineGrainedLineage2 = new FineGrainedLineage(dataMap2); + UrnArray urns2 = new UrnArray(); + Urn urn2 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"); + urns2.add(urn1); + urns2.add(urn2); + fineGrainedLineage2.setUpstreams(urns2); + fineGrainedLineage2.setTransformOperation("CREATE"); + fineGrainedLineage2.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + fineGrainedLineage2.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + Assert.assertEquals(result2.getFineGrainedLineages().get(0), fineGrainedLineage2); + + // Check different upstream types + JsonPatchOperation operation4 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/DATASET/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)"), + upstreamConfidenceScore); + List patchOperations3 = new ArrayList<>(); + patchOperations3.add(operation4); + JsonPatch jsonPatch3 = new JsonPatch(patchOperations3); + UpstreamLineage result3 = upstreamLineageTemplate.applyPatch(result2, jsonPatch3); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap3 = new DataMap(); + dataMap3.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage3 = new FineGrainedLineage(dataMap3); + UrnArray urns3 = new UrnArray(); + Urn urn3 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)"); + urns3.add(urn3); + fineGrainedLineage3.setUpstreams(urns3); + fineGrainedLineage3.setTransformOperation("CREATE"); + fineGrainedLineage3.setUpstreamType(FineGrainedLineageUpstreamType.DATASET); + fineGrainedLineage3.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + // Splits into two for different types + Assert.assertEquals(result3.getFineGrainedLineages().get(1), fineGrainedLineage3); + + // Check different transform types + JsonPatchOperation operation5 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/TRANSFORM/upstreamType/DATASET/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)"), + upstreamConfidenceScore); + List patchOperations4 = new ArrayList<>(); + patchOperations4.add(operation5); + JsonPatch jsonPatch4 = new JsonPatch(patchOperations4); + UpstreamLineage result4 = upstreamLineageTemplate.applyPatch(result3, jsonPatch4); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap4 = new DataMap(); + dataMap4.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage4 = new FineGrainedLineage(dataMap4); + UrnArray urns4 = new UrnArray(); + Urn urn4 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)"); + urns4.add(urn4); + fineGrainedLineage4.setUpstreams(urns4); + fineGrainedLineage4.setTransformOperation("TRANSFORM"); + fineGrainedLineage4.setUpstreamType(FineGrainedLineageUpstreamType.DATASET); + fineGrainedLineage4.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + // New entry in array because of new transformation type + Assert.assertEquals(result4.getFineGrainedLineages().get(2), fineGrainedLineage4); + + // Remove + JsonPatchOperation removeOperation = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)")); + JsonPatchOperation removeOperation2 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)")); + JsonPatchOperation removeOperation3 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/DATASET/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)")); + JsonPatchOperation removeOperation4 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/TRANSFORM/upstreamType/DATASET/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)")); + + List removeOperations = new ArrayList<>(); + removeOperations.add(removeOperation); + removeOperations.add(removeOperation2); + removeOperations.add(removeOperation3); + removeOperations.add(removeOperation4); + JsonPatch removePatch = new JsonPatch(removeOperations); + UpstreamLineage finalResult = upstreamLineageTemplate.applyPatch(result4, removePatch); + Assert.assertEquals(upstreamLineageTemplate.getDefault(), finalResult); + } + + @Test + public void testPatchDownstream() throws Exception { + UpstreamLineageTemplate upstreamLineageTemplate = new UpstreamLineageTemplate(); + UpstreamLineage upstreamLineage = upstreamLineageTemplate.getDefault(); + List patchOperations = new ArrayList<>(); + NumericNode downstreamConfidenceScore = instance.numberNode(1.0f); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"), + downstreamConfidenceScore); + patchOperations.add(operation); + JsonPatch jsonPatch = new JsonPatch(patchOperations); + + // Initial population test + UpstreamLineage result = upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatch); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap = new DataMap(); + dataMap.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage = new FineGrainedLineage(dataMap); + UrnArray urns = new UrnArray(); + Urn urn1 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"); + urns.add(urn1); + fineGrainedLineage.setDownstreams(urns); + fineGrainedLineage.setTransformOperation("CREATE"); + fineGrainedLineage.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + fineGrainedLineage.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + Assert.assertEquals(result.getFineGrainedLineages().get(0), fineGrainedLineage); + + // Test non-overwrite downstreams and correct confidence score + JsonPatchOperation operation2 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"), + downstreamConfidenceScore); + NumericNode downstreamConfidenceScore2 = instance.numberNode(0.1f); + JsonPatchOperation operation3 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"), + downstreamConfidenceScore2); + List patchOperations2 = new ArrayList<>(); + patchOperations2.add(operation2); + patchOperations2.add(operation3); + JsonPatch jsonPatch2 = new JsonPatch(patchOperations2); + UpstreamLineage result2 = upstreamLineageTemplate.applyPatch(result, jsonPatch2); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap2 = new DataMap(); + dataMap2.put("confidenceScore", 0.1); + FineGrainedLineage fineGrainedLineage2 = new FineGrainedLineage(dataMap2); + UrnArray urns2 = new UrnArray(); + Urn urn2 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"); + urns2.add(urn1); + urns2.add(urn2); + fineGrainedLineage2.setDownstreams(urns2); + fineGrainedLineage2.setTransformOperation("CREATE"); + fineGrainedLineage2.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + fineGrainedLineage2.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + Assert.assertEquals(result2.getFineGrainedLineages().get(0), fineGrainedLineage2); + + // Check different downstream types + JsonPatchOperation operation4 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)"), + downstreamConfidenceScore); + List patchOperations3 = new ArrayList<>(); + patchOperations3.add(operation4); + JsonPatch jsonPatch3 = new JsonPatch(patchOperations3); + UpstreamLineage result3 = upstreamLineageTemplate.applyPatch(result2, jsonPatch3); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap3 = new DataMap(); + dataMap3.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage3 = new FineGrainedLineage(dataMap3); + UrnArray urns3 = new UrnArray(); + Urn urn3 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)"); + urns3.add(urn3); + fineGrainedLineage3.setDownstreams(urns3); + fineGrainedLineage3.setTransformOperation("CREATE"); + fineGrainedLineage3.setDownstreamType(FineGrainedLineageDownstreamType.FIELD); + fineGrainedLineage3.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + // Splits into two for different types + Assert.assertEquals(result3.getFineGrainedLineages().get(1), fineGrainedLineage3); + + // Check different transform types + JsonPatchOperation operation5 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/TRANSFORM/downstreamType/FIELD/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)"), + downstreamConfidenceScore); + List patchOperations4 = new ArrayList<>(); + patchOperations4.add(operation5); + JsonPatch jsonPatch4 = new JsonPatch(patchOperations4); + UpstreamLineage result4 = upstreamLineageTemplate.applyPatch(result3, jsonPatch4); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap4 = new DataMap(); + dataMap4.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage4 = new FineGrainedLineage(dataMap4); + UrnArray urns4 = new UrnArray(); + Urn urn4 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)"); + urns4.add(urn4); + fineGrainedLineage4.setDownstreams(urns4); + fineGrainedLineage4.setTransformOperation("TRANSFORM"); + fineGrainedLineage4.setDownstreamType(FineGrainedLineageDownstreamType.FIELD); + fineGrainedLineage4.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + // New entry in array because of new transformation type + Assert.assertEquals(result4.getFineGrainedLineages().get(2), fineGrainedLineage4); + + // Remove + JsonPatchOperation removeOperation = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)")); + JsonPatchOperation removeOperation2 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)")); + JsonPatchOperation removeOperation3 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)")); + JsonPatchOperation removeOperation4 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/TRANSFORM/downstreamType/FIELD/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)")); + + List removeOperations = new ArrayList<>(); + removeOperations.add(removeOperation); + removeOperations.add(removeOperation2); + removeOperations.add(removeOperation3); + removeOperations.add(removeOperation4); + JsonPatch removePatch = new JsonPatch(removeOperations); + UpstreamLineage finalResult = upstreamLineageTemplate.applyPatch(result4, removePatch); + Assert.assertEquals(upstreamLineageTemplate.getDefault(), finalResult); + } + + @Test + public void testUpAndDown() throws Exception { + UpstreamLineageTemplate upstreamLineageTemplate = new UpstreamLineageTemplate(); + UpstreamLineage upstreamLineage = upstreamLineageTemplate.getDefault(); + List patchOperations = new ArrayList<>(); + NumericNode downstreamConfidenceScore = instance.numberNode(1.0f); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"), + downstreamConfidenceScore); + patchOperations.add(operation); + NumericNode upstreamConfidenceScore = instance.numberNode(1.0f); + JsonPatchOperation operation2 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"), + upstreamConfidenceScore); + patchOperations.add(operation2); + JsonPatch jsonPatch = new JsonPatch(patchOperations); + + // Initial population test + UpstreamLineage result = upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatch); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap = new DataMap(); + dataMap.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage = new FineGrainedLineage(dataMap); + UrnArray urns = new UrnArray(); + Urn urn1 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"); + urns.add(urn1); + fineGrainedLineage.setTransformOperation("CREATE"); + fineGrainedLineage.setUpstreams(urns); + fineGrainedLineage.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + fineGrainedLineage.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + fineGrainedLineage.setDownstreams(urns); + + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap2 = new DataMap(); + dataMap2.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage2 = new FineGrainedLineage(dataMap2); + fineGrainedLineage2.setTransformOperation("CREATE"); + fineGrainedLineage2.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + fineGrainedLineage2.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + fineGrainedLineage2.setDownstreams(urns); + + Assert.assertEquals(result.getFineGrainedLineages().get(1), fineGrainedLineage2); + } +} diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index f5a3c9c12ff70..3d9b533dc8f72 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -125,6 +125,11 @@ public class Constants { public static final String VIEW_PROPERTIES_ASPECT_NAME = "viewProperties"; public static final String DATASET_PROFILE_ASPECT_NAME = "datasetProfile"; + // Aspect support + public static final String FINE_GRAINED_LINEAGE_DATASET_TYPE = "DATASET"; + public static final String FINE_GRAINED_LINEAGE_FIELD_SET_TYPE = "FIELD_SET"; + public static final String FINE_GRAINED_LINEAGE_FIELD_TYPE = "FIELD"; + // Chart public static final String CHART_KEY_ASPECT_NAME = "chartKey"; public static final String CHART_INFO_ASPECT_NAME = "chartInfo"; diff --git a/metadata-ingestion/src/datahub/specific/dataset.py b/metadata-ingestion/src/datahub/specific/dataset.py index fcfe049fb15cf..294a80572669b 100644 --- a/metadata-ingestion/src/datahub/specific/dataset.py +++ b/metadata-ingestion/src/datahub/specific/dataset.py @@ -1,4 +1,4 @@ -from typing import Dict, Generic, List, Optional, TypeVar, Union +from typing import Dict, Generic, List, Optional, Tuple, TypeVar, Union from urllib.parse import quote from datahub.emitter.mcp_patch_builder import MetadataPatchProposal @@ -6,6 +6,9 @@ DatasetPropertiesClass as DatasetProperties, EditableDatasetPropertiesClass as EditableDatasetProperties, EditableSchemaMetadataClass as EditableSchemaMetadata, + FineGrainedLineageClass as FineGrainedLineage, + FineGrainedLineageDownstreamTypeClass as FineGrainedLineageDownstreamType, + FineGrainedLineageUpstreamTypeClass as FineGrainedLineageUpstreamType, GlobalTagsClass as GlobalTags, GlossaryTermAssociationClass as Term, GlossaryTermsClass as GlossaryTerms, @@ -144,6 +147,108 @@ def set_upstream_lineages(self, upstreams: List[Upstream]) -> "DatasetPatchBuild ) return self + def add_fine_grained_upstream_lineage( + self, fine_grained_lineage: FineGrainedLineage + ) -> "DatasetPatchBuilder": + ( + transform_op, + upstream_type, + downstream_type, + ) = DatasetPatchBuilder.get_fine_grained_key(fine_grained_lineage) + for upstream_urn in fine_grained_lineage.upstreams or []: + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "add", + path=DatasetPatchBuilder.quote_fine_grained_upstream_path( + transform_op, upstream_type, upstream_urn + ), + value=fine_grained_lineage.confidenceScore, + ) + for downstream_urn in fine_grained_lineage.downstreams or []: + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "add", + path=DatasetPatchBuilder.quote_fine_grained_downstream_path( + transform_op, downstream_type, downstream_urn + ), + value=fine_grained_lineage.confidenceScore, + ) + return self + + @staticmethod + def get_fine_grained_key( + fine_grained_lineage: FineGrainedLineage, + ) -> Tuple[str, str, str]: + transform_op = fine_grained_lineage.transformOperation or "NONE" + upstream_type = ( + fine_grained_lineage.upstreamType + if isinstance(fine_grained_lineage.upstreamType, str) + else FineGrainedLineageUpstreamType.FIELD_SET + ) + downstream_type = ( + fine_grained_lineage.downstreamType + if isinstance(fine_grained_lineage.downstreamType, str) + else FineGrainedLineageDownstreamType.FIELD_SET + ) + return transform_op, upstream_type, downstream_type + + @staticmethod + def quote_fine_grained_downstream_path( + transform_op: str, downstream_type: str, downstream_urn: str + ) -> str: + return ( + f"/fineGrainedLineages/{quote(transform_op, safe='')}/downstreamType/" + f"{quote(downstream_type, safe='')}/{quote(downstream_urn, safe='')}" + ) + + @staticmethod + def quote_fine_grained_upstream_path( + transform_op: str, upstream_type: str, upstream_urn: str + ) -> str: + return ( + f"/fineGrainedLineages/{quote(transform_op, safe='')}/upstreamType/" + f"{quote(upstream_type, safe='')}/{quote(upstream_urn, safe='')}" + ) + + def remove_fine_grained_upstream_lineage( + self, fine_grained_lineage: FineGrainedLineage + ) -> "DatasetPatchBuilder": + ( + transform_op, + upstream_type, + downstream_type, + ) = DatasetPatchBuilder.get_fine_grained_key(fine_grained_lineage) + for upstream_urn in fine_grained_lineage.upstreams or []: + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "remove", + path=DatasetPatchBuilder.quote_fine_grained_upstream_path( + transform_op, upstream_type, upstream_urn + ), + value={}, + ) + for downstream_urn in fine_grained_lineage.downstreams or []: + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "remove", + path=DatasetPatchBuilder.quote_fine_grained_downstream_path( + transform_op, downstream_type, downstream_urn + ), + value={}, + ) + return self + + def set_fine_grained_upstream_lineages( + self, fine_grained_lineages: List[FineGrainedLineage] + ) -> "DatasetPatchBuilder": + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "add", + path="/fineGrainedLineages", + value=fine_grained_lineages, + ) + return self + def add_tag(self, tag: Tag) -> "DatasetPatchBuilder": self._add_patch( GlobalTags.ASPECT_NAME, "add", path=f"/tags/{tag.tag}", value=tag diff --git a/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json b/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json index d5dfe125942fb..ed5a7723ac2bf 100644 --- a/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json +++ b/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json @@ -42,26 +42,31 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD)", - "changeType": "PATCH", - "aspectName": "upstreamLineage", - "aspect": { - "json": [ - { - "op": "add", - "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2Cfct_users_created_upstream%2CPROD%29", - "value": { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created_upstream,PROD)", - "type": "TRANSFORMED" - } - } - ] - } + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD)", + "changeType": "PATCH", + "aspectName": "upstreamLineage", + "aspect": { + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2Cfct_users_created_upstream%2CPROD%29", + "value": { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created_upstream,PROD)", + "type": "TRANSFORMED" + } + }, + { + "op": "add", + "path": "/fineGrainedLineages/TRANSFORM/upstreamType/DATASET/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2Cfct_users_created_upstream%2CPROD%29", + "value": 1.0 + } + ] + } }, { "entityType": "dataset", diff --git a/metadata-ingestion/tests/unit/patch/test_patch_builder.py b/metadata-ingestion/tests/unit/patch/test_patch_builder.py index 0701b3d696895..f05c4978f8644 100644 --- a/metadata-ingestion/tests/unit/patch/test_patch_builder.py +++ b/metadata-ingestion/tests/unit/patch/test_patch_builder.py @@ -7,6 +7,9 @@ from datahub.ingestion.sink.file import write_metadata_file from datahub.metadata.schema_classes import ( DatasetLineageTypeClass, + FineGrainedLineageClass, + FineGrainedLineageDownstreamTypeClass, + FineGrainedLineageUpstreamTypeClass, GenericAspectClass, MetadataChangeProposalClass, TagAssociationClass, @@ -53,6 +56,19 @@ def test_complex_dataset_patch( type=DatasetLineageTypeClass.TRANSFORMED, ) ) + .add_fine_grained_upstream_lineage( + fine_grained_lineage=FineGrainedLineageClass( + upstreamType=FineGrainedLineageUpstreamTypeClass.DATASET, + upstreams=[ + make_dataset_urn( + platform="hive", name="fct_users_created_upstream", env="PROD" + ) + ], + downstreamType=FineGrainedLineageDownstreamTypeClass.FIELD_SET, + transformOperation="TRANSFORM", + confidenceScore=1.0, + ) + ) ) patcher.for_field("field1").add_tag(TagAssociationClass(tag=make_tag_urn("tag1"))) diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java index 6ded8a25b4e22..9db2ebc522e09 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java @@ -5,10 +5,14 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.common.urn.Urn; import com.linkedin.dataset.DatasetLineageType; +import com.linkedin.dataset.FineGrainedLineageDownstreamType; +import com.linkedin.dataset.FineGrainedLineageUpstreamType; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.ToString; import org.apache.commons.lang3.tuple.ImmutableTriple; @@ -16,7 +20,8 @@ public class UpstreamLineagePatchBuilder extends AbstractMultiFieldPatchBuilder { - private static final String PATH_START = "/upstreams/"; + private static final String UPSTREAMS_PATH_START = "/upstreams/"; + private static final String FINE_GRAINED_PATH_START = "/fineGrainedLineages/"; private static final String DATASET_KEY = "dataset"; private static final String AUDIT_STAMP_KEY = "auditStamp"; private static final String TIME_KEY = "time"; @@ -34,13 +39,233 @@ public UpstreamLineagePatchBuilder addUpstream( .set(AUDIT_STAMP_KEY, auditStamp); pathValues.add( - ImmutableTriple.of(PatchOperationType.ADD.getValue(), PATH_START + datasetUrn, value)); + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), UPSTREAMS_PATH_START + datasetUrn, value)); return this; } public UpstreamLineagePatchBuilder removeUpstream(@Nonnull DatasetUrn datasetUrn) { pathValues.add( - ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), PATH_START + datasetUrn, null)); + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), UPSTREAMS_PATH_START + datasetUrn, null)); + return this; + } + + /** + * Method for adding an upstream FineGrained Dataset + * + * @param datasetUrn dataset to be set as upstream + * @param confidenceScore optional, confidence score for the lineage edge. Defaults to 1.0 for + * full confidence + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @return this builder + */ + public UpstreamLineagePatchBuilder addFineGrainedUpstreamDataset( + @Nonnull DatasetUrn datasetUrn, + @Nullable Float confidenceScore, + @Nonnull String transformationOperation) { + Float finalConfidenceScore = getConfidenceScoreOrDefault(confidenceScore); + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "upstreamType" + + "/" + + "DATASET" + + "/" + + datasetUrn, + instance.numberNode(finalConfidenceScore))); + return this; + } + + /** + * Adds a field as a fine grained upstream + * + * @param schemaFieldUrn a schema field to be marked as upstream, format: + * urn:li:schemaField(DATASET_URN, COLUMN NAME) + * @param confidenceScore optional, confidence score for the lineage edge. Defaults to 1.0 for + * full confidence + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @param type the upstream lineage type, either Field or Field Set + * @return this builder + */ + public UpstreamLineagePatchBuilder addFineGrainedUpstreamField( + @Nonnull Urn schemaFieldUrn, + @Nullable Float confidenceScore, + @Nonnull String transformationOperation, + @Nullable FineGrainedLineageUpstreamType type) { + Float finalConfidenceScore = getConfidenceScoreOrDefault(confidenceScore); + String finalType; + if (type == null) { + // Default to set of fields if not explicitly a single field + finalType = FineGrainedLineageUpstreamType.FIELD_SET.toString(); + } else { + finalType = type.toString(); + } + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "upstreamType" + + "/" + + finalType + + "/" + + schemaFieldUrn, + instance.numberNode(finalConfidenceScore))); + + return this; + } + + /** + * Adds a field as a fine grained downstream + * + * @param schemaFieldUrn a schema field to be marked as downstream, format: + * urn:li:schemaField(DATASET_URN, COLUMN NAME) + * @param confidenceScore optional, confidence score for the lineage edge. Defaults to 1.0 for + * full confidence + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @param type the downstream lineage type, either Field or Field Set + * @return this builder + */ + public UpstreamLineagePatchBuilder addFineGrainedDownstreamField( + @Nonnull Urn schemaFieldUrn, + @Nullable Float confidenceScore, + @Nonnull String transformationOperation, + @Nullable FineGrainedLineageDownstreamType type) { + Float finalConfidenceScore = getConfidenceScoreOrDefault(confidenceScore); + String finalType; + if (type == null) { + // Default to set of fields if not explicitly a single field + finalType = FineGrainedLineageDownstreamType.FIELD_SET.toString(); + } else { + finalType = type.toString(); + } + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "downstreamType" + + "/" + + finalType + + "/" + + schemaFieldUrn, + instance.numberNode(finalConfidenceScore))); + return this; + } + + private Float getConfidenceScoreOrDefault(@Nullable Float confidenceScore) { + float finalConfidenceScore; + if (confidenceScore != null && confidenceScore > 0 && confidenceScore <= 1.0f) { + finalConfidenceScore = confidenceScore; + } else { + finalConfidenceScore = 1.0f; + } + + return finalConfidenceScore; + } + + /** + * Removes a field as a fine grained upstream + * + * @param schemaFieldUrn a schema field to be marked as upstream, format: + * urn:li:schemaField(DATASET_URN, COLUMN NAME) + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @param type the upstream lineage type, either Field or Field Set + * @return this builder + */ + public UpstreamLineagePatchBuilder removeFineGrainedUpstreamField( + @Nonnull Urn schemaFieldUrn, + @Nonnull String transformationOperation, + @Nullable FineGrainedLineageUpstreamType type) { + String finalType; + if (type == null) { + // Default to set of fields if not explicitly a single field + finalType = FineGrainedLineageUpstreamType.FIELD_SET.toString(); + } else { + finalType = type.toString(); + } + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "upstreamType" + + "/" + + finalType + + "/" + + schemaFieldUrn, + null)); + + return this; + } + + public UpstreamLineagePatchBuilder removeFineGrainedUpstreamDataset( + @Nonnull DatasetUrn datasetUrn, @Nonnull String transformationOperation) { + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "upstreamType" + + "/" + + "DATASET" + + "/" + + datasetUrn, + null)); + return this; + } + + /** + * Adds a field as a fine grained downstream + * + * @param schemaFieldUrn a schema field to be marked as downstream, format: + * urn:li:schemaField(DATASET_URN, COLUMN NAME) + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @param type the downstream lineage type, either Field or Field Set + * @return this builder + */ + public UpstreamLineagePatchBuilder removeFineGrainedDownstreamField( + @Nonnull Urn schemaFieldUrn, + @Nonnull String transformationOperation, + @Nullable FineGrainedLineageDownstreamType type) { + String finalType; + if (type == null) { + // Default to set of fields if not explicitly a single field + finalType = FineGrainedLineageDownstreamType.FIELD_SET.toString(); + } else { + finalType = type.toString(); + } + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "downstreamType" + + "/" + + finalType + + "/" + + schemaFieldUrn, + null)); return this; } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index 1d387acb0ce12..563742990f546 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -14,6 +14,7 @@ import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.dataset.DatasetLineageType; import com.linkedin.metadata.graph.LineageDirection; @@ -49,15 +50,21 @@ public class PatchTest { public void testLocalUpstream() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { + DatasetUrn upstreamUrn = + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"); + Urn schemaFieldUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD), foo)"); MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() .urn( UrnUtils.getUrn( "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addUpstream( - DatasetUrn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"), - DatasetLineageType.TRANSFORMED) + .addUpstream(upstreamUrn, DatasetLineageType.TRANSFORMED) + .addFineGrainedUpstreamDataset(upstreamUrn, null, "TRANSFORM") + .addFineGrainedUpstreamField(schemaFieldUrn, null, "TRANSFORM", null) + .addFineGrainedDownstreamField(schemaFieldUrn, null, "TRANSFORM", null) .build(); Future response = restEmitter.emit(upstreamPatch); @@ -73,6 +80,12 @@ public void testLocalUpstream() { public void testLocalUpstreamRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { + DatasetUrn upstreamUrn = + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"); + Urn schemaFieldUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD), foo)"); MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() .urn( @@ -81,6 +94,9 @@ public void testLocalUpstreamRemove() { .removeUpstream( DatasetUrn.createFromString( "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .removeFineGrainedUpstreamDataset(upstreamUrn, "TRANSFORM") + .removeFineGrainedUpstreamField(schemaFieldUrn, "TRANSFORM", null) + .removeFineGrainedDownstreamField(schemaFieldUrn, "TRANSFORM", null) .build(); Future response = restEmitter.emit(upstreamPatch); From 79ccbc57d1c3266025c8e52ce18fbfcff550c387 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Mon, 11 Dec 2023 14:41:23 -0800 Subject: [PATCH 230/792] fix(CVE-2023-6378): update logback classic (#9438) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index f5e5403e822e7..b16e3ca169c71 100644 --- a/build.gradle +++ b/build.gradle @@ -16,7 +16,7 @@ buildscript { ext.playVersion = '2.8.18' ext.log4jVersion = '2.19.0' ext.slf4jVersion = '1.7.36' - ext.logbackClassic = '1.2.12' + ext.logbackClassic = '1.2.13' ext.hadoop3Version = '3.3.5' ext.kafkaVersion = '2.3.0' ext.hazelcastVersion = '5.3.6' From ee4e8dd74c569d0dfc98e8eb13034c91b0ad61a8 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Tue, 12 Dec 2023 15:03:30 +0530 Subject: [PATCH 231/792] feat: allow the sidebar size to be draggable (#9401) --- .../src/app/search/SearchResults.tsx | 2 +- .../src/app/search/sidebar/BrowseSidebar.tsx | 51 ++++++++++++------- .../src/app/search/sidebar/EntityNode.tsx | 3 +- .../cypress/cypress/e2e/browse/browseV2.js | 10 ++-- 4 files changed, 41 insertions(+), 25 deletions(-) diff --git a/datahub-web-react/src/app/search/SearchResults.tsx b/datahub-web-react/src/app/search/SearchResults.tsx index 56e83e4235027..d7ad6d517d8fe 100644 --- a/datahub-web-react/src/app/search/SearchResults.tsx +++ b/datahub-web-react/src/app/search/SearchResults.tsx @@ -197,7 +197,7 @@ export const SearchResults = ({ {showBrowseV2 && ( - + )} diff --git a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx index 822e75b65febc..c16bcdcaf6c72 100644 --- a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx +++ b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useState } from 'react'; import styled from 'styled-components'; import { Typography } from 'antd'; import EntityNode from './EntityNode'; @@ -7,10 +7,16 @@ import SidebarLoadingError from './SidebarLoadingError'; import { SEARCH_RESULTS_BROWSE_SIDEBAR_ID } from '../../onboarding/config/SearchOnboardingConfig'; import useSidebarEntities from './useSidebarEntities'; import { ANTD_GRAY_V2 } from '../../entity/shared/constants'; +import { ProfileSidebarResizer } from '../../entity/shared/containers/profile/sidebar/ProfileSidebarResizer'; -const Sidebar = styled.div<{ visible: boolean; width: number }>` + +export const MAX_BROWSER_WIDTH = 500; +export const MIN_BROWSWER_WIDTH = 200; + +export const SidebarWrapper = styled.div<{ visible: boolean; width: number }>` height: 100%; width: ${(props) => (props.visible ? `${props.width}px` : '0')}; + min-width: ${(props) => (props.visible ? `${props.width}px` : '0')}; transition: width 250ms ease-in-out; border-right: 1px solid ${(props) => props.theme.styles['border-color-base']}; background-color: ${ANTD_GRAY_V2[1]}; @@ -37,29 +43,38 @@ const SidebarBody = styled.div<{ visible: boolean }>` type Props = { visible: boolean; - width: number; }; -const BrowseSidebar = ({ visible, width }: Props) => { +const BrowseSidebar = ({ visible }: Props) => { const { error, entityAggregations, retry } = useSidebarEntities({ skip: !visible, }); + const [browserWidth, setBrowserWith] = useState(window.innerWidth * 0.2); return ( - - - Navigate - - - {entityAggregations && !entityAggregations.length &&
No results found
} - {entityAggregations?.map((entityAggregation) => ( - - - - ))} - {error && } -
-
+ <> + + + Navigate + + + {entityAggregations && !entityAggregations.length &&
No results found
} + {entityAggregations?.map((entityAggregation) => ( + + + + ))} + {error && } +
+
+ + setBrowserWith(Math.min(Math.max(widthProp, MIN_BROWSWER_WIDTH), MAX_BROWSER_WIDTH)) + } + initialSize={browserWidth} + isSidebarOnLeft + /> + ); }; diff --git a/datahub-web-react/src/app/search/sidebar/EntityNode.tsx b/datahub-web-react/src/app/search/sidebar/EntityNode.tsx index e04e4253dca13..627d19c4fb10c 100644 --- a/datahub-web-react/src/app/search/sidebar/EntityNode.tsx +++ b/datahub-web-react/src/app/search/sidebar/EntityNode.tsx @@ -38,7 +38,8 @@ const EntityNode = () => { onToggle: (isNowOpen: boolean) => trackToggleNodeEvent(isNowOpen, 'entity'), }); - const onClickHeader = () => { + const onClickHeader = (e) => { + e.preventDefault(); if (count) toggle(); }; diff --git a/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js b/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js index a61b9030b13c6..f45edc5fa0481 100644 --- a/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js +++ b/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js @@ -46,31 +46,31 @@ describe("search", () => { cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\d\d\dpx$/); + .should("match", /\d\d\dpx$/); cy.get("[data-testid=browse-v2-toggle").click(); cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\dpx$/); + .should("match", /\dpx$/); cy.reload(); cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\dpx$/); + .should("match", /\dpx$/); cy.get("[data-testid=browse-v2-toggle").click(); cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\d\d\dpx$/); + .should("match", /\d\d\dpx$/); cy.reload(); cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\d\d\dpx$/); + .should("match", /\d\d\dpx$/); }); it("should take you to the old browse experience when clicking entity type on home page with the browse flag off", () => { From abbc4cdc577647d7b97a03117c4317805a3a8ce3 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Tue, 12 Dec 2023 17:26:29 +0530 Subject: [PATCH 232/792] fix(json-schema): do not send invalid URLs (#9417) --- .../ingestion/source/schema/json_schema.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py b/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py index f6e944f4fc3cb..c7e8a15d8dfa4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py @@ -9,6 +9,7 @@ from os.path import basename, dirname from pathlib import Path from typing import Any, Iterable, List, Optional, Union +from urllib.parse import urlparse import jsonref from pydantic import AnyHttpUrl, DirectoryPath, FilePath, validator @@ -53,6 +54,16 @@ logger = logging.getLogger(__name__) +def is_url_valid(url: Optional[str]) -> bool: + if url is None: + return False + try: + result = urlparse(url) + return all([result.scheme, result.netloc]) + except Exception: + return False + + class URIReplacePattern(ConfigModel): match: str = Field( description="Pattern to match on uri-s as part of reference resolution. See replace field", @@ -281,12 +292,14 @@ def _load_one_file( entityUrn=dataset_urn, aspect=models.StatusClass(removed=False) ).as_workunit() + external_url = JsonSchemaTranslator._get_id_from_any_schema(schema_dict) + if not is_url_valid(external_url): + external_url = None + yield MetadataChangeProposalWrapper( entityUrn=dataset_urn, aspect=models.DatasetPropertiesClass( - externalUrl=JsonSchemaTranslator._get_id_from_any_schema( - schema_dict - ), + externalUrl=external_url, name=dataset_simple_name, description=JsonSchemaTranslator._get_description_from_any_schema( schema_dict From ffccc6556110ea197402ad1de72117ffd5509a8d Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Tue, 12 Dec 2023 18:31:58 +0100 Subject: [PATCH 233/792] fix(ingest/profiling) Fixing profile eligibility check (#9446) --- .../datahub/ingestion/source/sql/sql_generic_profiler.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py index 844a458d9f1ab..a2f91e5fae1a9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py @@ -274,16 +274,16 @@ def is_dataset_eligible_for_profiling( return False if self.config.profiling.profile_table_size_limit is not None and ( - size_in_bytes is None - or size_in_bytes / (2**30) + size_in_bytes is not None + and size_in_bytes / (2**30) > self.config.profiling.profile_table_size_limit ): self.report.profiling_skipped_size_limit[schema_name] += 1 return False if self.config.profiling.profile_table_row_limit is not None and ( - rows_count is None - or rows_count > self.config.profiling.profile_table_row_limit + rows_count is not None + and rows_count > self.config.profiling.profile_table_row_limit ): self.report.profiling_skipped_row_limit[schema_name] += 1 return False From 66f90c7ffd483f397c99dbf494280d3cd9ef10dd Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 12 Dec 2023 12:32:59 -0500 Subject: [PATCH 234/792] fix(ingest): avoid git dependency in dbt (#9447) --- metadata-ingestion/src/datahub/configuration/git.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index 9ea9007553839..a5f88744661a4 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -6,7 +6,6 @@ from datahub.configuration.common import ConfigModel from datahub.configuration.validate_field_rename import pydantic_renamed_field -from datahub.ingestion.source.git.git_import import GitClone _GITHUB_PREFIX = "https://github.com/" _GITLAB_PREFIX = "https://gitlab.com/" @@ -151,6 +150,9 @@ def clone( ) -> pathlib.Path: """Clones the repo into a temporary directory and returns the path to the checkout.""" + # We import this here to avoid a hard dependency on gitpython. + from datahub.ingestion.source.git.git_import import GitClone + assert self.repo_ssh_locator git_clone = GitClone(str(tmp_path)) From 02982ed88600f9b11c2387e540299c437ca21ed6 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 12 Dec 2023 12:38:21 -0500 Subject: [PATCH 235/792] feat(ingest): add retries for tableau (#9437) --- .../src/datahub/ingestion/source/tableau.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index da44d09121c6c..f870e99df27c5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -21,7 +21,7 @@ import tableauserverclient as TSC from pydantic import root_validator, validator from pydantic.fields import Field -from requests.adapters import ConnectionError +from requests.adapters import ConnectionError, HTTPAdapter from tableauserverclient import ( PersonalAccessTokenAuth, Server, @@ -29,6 +29,7 @@ TableauAuth, ) from tableauserverclient.server.endpoint.exceptions import NonXMLResponseError +from urllib3 import Retry import datahub.emitter.mce_builder as builder import datahub.utilities.sqlglot_lineage as sqlglot_l @@ -174,6 +175,7 @@ class TableauConnectionConfig(ConfigModel): description="Unique relationship between the Tableau Server and site", ) + max_retries: int = Field(3, description="Number of retries for failed requests.") ssl_verify: Union[bool, str] = Field( default=True, description="Whether to verify SSL certificates. If using self-signed certificates, set to false or provide the path to the .pem certificate bundle.", @@ -224,6 +226,17 @@ def make_tableau_client(self) -> Server: # From https://stackoverflow.com/a/50159273/5004662. server._session.trust_env = False + # Setup request retries. + adapter = HTTPAdapter( + max_retries=Retry( + total=self.max_retries, + backoff_factor=1, + status_forcelist=[429, 500, 502, 503, 504], + ) + ) + server._session.mount("http://", adapter) + server._session.mount("https://", adapter) + server.auth.sign_in(authentication) return server except ServerResponseError as e: From 9899aca4995ec0bd5a7e3ccc6c7e1495b4ee78df Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 12 Dec 2023 12:16:27 -0600 Subject: [PATCH 236/792] docs(updating-datahub): update docs for v0.12.1 (#9441) --- docs/how/updating-datahub.md | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 94ab1b0611c33..36be572f2886e 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -7,15 +7,26 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ### Breaking Changes - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. + +### Potential Downtime + +### Deprecations + +### Other Notable Changes + +## 0.12.1 + +### Breaking Changes + - #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. - `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. - #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. - #9286: The `DataHubRestEmitter.emit` method no longer returns anything. It previously returned a tuple of timestamps. - #8951: A great expectations based profiler has been added for the Unity Catalog source. -To use the old profiler, set `method: analyze` under the `profiling` section in your recipe. -To use the new profiler, set `method: ge`. Profiling is disabled by default, so to enable it, -one of these methods must be specified. + To use the old profiler, set `method: analyze` under the `profiling` section in your recipe. + To use the new profiler, set `method: ge`. Profiling is disabled by default, so to enable it, + one of these methods must be specified. ### Potential Downtime From eb8cbd8b4150b31429cf09158cb1113f275ac544 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Wed, 13 Dec 2023 12:19:49 +0530 Subject: [PATCH 237/792] feat: Allow specifying Data Product URN via UI (#9386) Co-authored-by: Aseem Bansal --- .../DataHubDataFetcherExceptionHandler.java | 40 +++++++---- .../CreateDataProductResolver.java | 1 + .../src/main/resources/entity.graphql | 4 ++ .../CreateDataProductModal.tsx | 5 +- .../DataProductAdvancedOption.tsx | 68 +++++++++++++++++++ .../DataProductBuilderForm.tsx | 11 ++- .../entity/domain/DataProductsTab/types.ts | 6 ++ .../metadata/service/DataProductService.java | 22 +++++- .../tests/privileges/test_privileges.py | 7 +- 9 files changed, 137 insertions(+), 27 deletions(-) create mode 100644 datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java index 7c3ea1d581b6e..746ce0cdc10fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java @@ -12,6 +12,8 @@ @Slf4j public class DataHubDataFetcherExceptionHandler implements DataFetcherExceptionHandler { + private static final String DEFAULT_ERROR_MESSAGE = "An unknown error occurred."; + @Override public DataFetcherExceptionHandlerResult onException( DataFetcherExceptionHandlerParameters handlerParameters) { @@ -19,28 +21,40 @@ public DataFetcherExceptionHandlerResult onException( SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); - log.error("Failed to execute DataFetcher", exception); - DataHubGraphQLErrorCode errorCode = DataHubGraphQLErrorCode.SERVER_ERROR; - String message = "An unknown error occurred."; + String message = DEFAULT_ERROR_MESSAGE; - // note: make sure to access the true error message via `getCause()` - if (exception.getCause() instanceof IllegalArgumentException) { + IllegalArgumentException illException = + findFirstThrowableCauseOfClass(exception, IllegalArgumentException.class); + if (illException != null) { + log.error("Failed to execute", illException); errorCode = DataHubGraphQLErrorCode.BAD_REQUEST; - message = exception.getCause().getMessage(); + message = illException.getMessage(); } - if (exception instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception).errorCode(); - message = exception.getMessage(); + DataHubGraphQLException graphQLException = + findFirstThrowableCauseOfClass(exception, DataHubGraphQLException.class); + if (graphQLException != null) { + log.error("Failed to execute", graphQLException); + errorCode = graphQLException.errorCode(); + message = graphQLException.getMessage(); } - if (exception.getCause() instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception.getCause()).errorCode(); - message = exception.getCause().getMessage(); + if (illException == null && graphQLException == null) { + log.error("Failed to execute", exception); } - DataHubGraphQLError error = new DataHubGraphQLError(message, path, sourceLocation, errorCode); return DataFetcherExceptionHandlerResult.newResult().error(error).build(); } + + T findFirstThrowableCauseOfClass(Throwable throwable, Class clazz) { + while (throwable != null) { + if (clazz.isInstance(throwable)) { + return (T) throwable; + } else { + throwable = throwable.getCause(); + } + } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index 10c487a839f35..8ac7b2c3ce375 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -47,6 +47,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm try { final Urn dataProductUrn = _dataProductService.createDataProduct( + input.getId(), input.getProperties().getName(), input.getProperties().getDescription(), authentication); diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index feb344154d11e..307c7f7b383e3 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -11055,6 +11055,10 @@ input CreateDataProductInput { The primary key of the Domain """ domainUrn: String! + """ + An optional id for the new data product + """ + id: String } """ diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx index 2d82521a90df5..0610fbfa7a770 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx @@ -32,6 +32,7 @@ export default function CreateDataProductModal({ domain, onCreateDataProduct, on variables: { input: { domainUrn: domain.urn, + id: builderState.id, properties: { name: builderState.name, description: builderState.description || undefined, @@ -49,10 +50,10 @@ export default function CreateDataProductModal({ domain, onCreateDataProduct, on onClose(); } }) - .catch(() => { + .catch(( error ) => { onClose(); message.destroy(); - message.error({ content: 'Failed to create Data Product. An unexpected error occurred' }); + message.error({ content: `Failed to create Data Product: ${error.message}.` }); }); } diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx new file mode 100644 index 0000000000000..a077a0308af1f --- /dev/null +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx @@ -0,0 +1,68 @@ +import React from "react"; +import { Collapse, Form, Input, Typography } from "antd"; +import styled from "styled-components"; +import { validateCustomUrnId } from '../../../shared/textUtil'; +import { DataProductBuilderFormProps } from "./types"; + + +const FormItem = styled(Form.Item)` + .ant-form-item-label { + padding-bottom: 2px; + } +`; + +const FormItemWithMargin = styled(FormItem)` + margin-bottom: 16px; +`; + +const FormItemNoMargin = styled(FormItem)` + margin-bottom: 0; +`; + +const AdvancedLabel = styled(Typography.Text)` + color: #373d44; +`; + +export function DataProductAdvancedOption({builderState, updateBuilderState }: DataProductBuilderFormProps){ + + function updateDataProductId(id: string) { + updateBuilderState({ + ...builderState, + id, + }); + } + + return ( + + Advanced Options} key="1"> + Data Product Id} + help="By default, a random UUID will be generated to uniquely identify this data product. If + you'd like to provide a custom id instead to more easily keep track of this data product, + you may provide it here. Be careful, you cannot easily change the data product id after + creation." + > + ({ + validator(_, value) { + if (value && validateCustomUrnId(value)) { + return Promise.resolve(); + } + return Promise.reject(new Error('Please enter a valid Data product id')); + }, + }), + ]} + > + updateDataProductId(e.target.value)} + /> + + + + + ) +} \ No newline at end of file diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx index b5a27a6e1b876..98bb09098a36e 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx @@ -3,18 +3,14 @@ import React from 'react'; import styled from 'styled-components'; import { Editor as MarkdownEditor } from '../../shared/tabs/Documentation/components/editor/Editor'; import { ANTD_GRAY } from '../../shared/constants'; -import { DataProductBuilderState } from './types'; +import { DataProductBuilderFormProps } from './types'; +import { DataProductAdvancedOption } from './DataProductAdvancedOption'; const StyledEditor = styled(MarkdownEditor)` border: 1px solid ${ANTD_GRAY[4]}; `; -type Props = { - builderState: DataProductBuilderState; - updateBuilderState: (newState: DataProductBuilderState) => void; -}; - -export default function DataProductBuilderForm({ builderState, updateBuilderState }: Props) { +export default function DataProductBuilderForm({ builderState, updateBuilderState }: DataProductBuilderFormProps) { function updateName(name: string) { updateBuilderState({ ...builderState, @@ -47,6 +43,7 @@ export default function DataProductBuilderForm({ builderState, updateBuilderStat Description}> + ); } diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts b/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts index 1ed3ede39cfbe..fe22e3ed9a2a4 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts @@ -1,4 +1,10 @@ export type DataProductBuilderState = { name: string; + id?: string; description?: string; }; + +export type DataProductBuilderFormProps = { + builderState: DataProductBuilderState; + updateBuilderState: (newState: DataProductBuilderState) => void; +}; \ No newline at end of file diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java index 10016ee89605b..d60427a27a5c5 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,6 +24,7 @@ import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.r2.RemoteInvocationException; import java.util.List; import java.util.Objects; import java.util.UUID; @@ -58,11 +61,26 @@ public DataProductService(@Nonnull EntityClient entityClient, @Nonnull GraphClie * @return the urn of the newly created DataProduct */ public Urn createDataProduct( - @Nullable String name, @Nullable String description, @Nonnull Authentication authentication) { + @Nullable String id, + @Nullable String name, + @Nullable String description, + @Nonnull Authentication authentication) { // 1. Generate a unique id for the new DataProduct. final DataProductKey key = new DataProductKey(); - key.setId(UUID.randomUUID().toString()); + if (id != null && !id.isBlank()) { + key.setId(id); + } else { + key.setId(UUID.randomUUID().toString()); + } + try { + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, DATA_PRODUCT_ENTITY_NAME), authentication)) { + throw new IllegalArgumentException("This Data product already exists!"); + } + } catch (RemoteInvocationException e) { + throw new RuntimeException("Unable to check for existence of Data Product!"); + } // 2. Create a new instance of DataProductProperties final DataProductProperties properties = new DataProductProperties(); diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index aa54a50b04e7f..75e2265f1f555 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -63,7 +63,7 @@ def _ensure_cant_perform_action(session, json,assertion_key): action_response.raise_for_status() action_data = action_response.json() - assert action_data["errors"][0]["extensions"]["code"] == 403 + assert action_data["errors"][0]["extensions"]["code"] == 403, action_data["errors"][0] assert action_data["errors"][0]["extensions"]["type"] == "UNAUTHORIZED" assert action_data["data"][assertion_key] == None @@ -367,8 +367,9 @@ def test_privilege_to_create_and_manage_policies(): # Verify new user can't create a policy create_policy = { - "query": """mutation createPolicy($input: PolicyUpdateInput!) {\n - createPolicy(input: $input) }""", + "query": """mutation createPolicy($input: PolicyUpdateInput!) { + createPolicy(input: $input) + }""", "variables": { "input": { "type": "PLATFORM", From 5af799ee892a0a1f9655ff569c4da63ffa976e52 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Wed, 13 Dec 2023 14:31:24 +0530 Subject: [PATCH 238/792] feat(ownership): add button to copy urn of an Ownership Type (#9452) --- .../entity/ownership/table/ActionsColumn.tsx | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx b/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx index 41e07520a0ece..e08853ad150bf 100644 --- a/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx +++ b/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { Dropdown, MenuProps, Popconfirm, Typography, message, notification } from 'antd'; -import { DeleteOutlined, EditOutlined, MoreOutlined } from '@ant-design/icons'; +import { CopyOutlined, DeleteOutlined, EditOutlined, MoreOutlined } from '@ant-design/icons'; import styled from 'styled-components/macro'; import { OwnershipTypeEntity } from '../../../../types.generated'; import { useDeleteOwnershipTypeMutation } from '../../../../graphql/ownership.generated'; @@ -48,6 +48,10 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe setOwnershipType(ownershipType); }; + const onCopy=() => { + navigator.clipboard.writeText(ownershipType.urn); + } + const [deleteOwnershipTypeMutation] = useDeleteOwnershipTypeMutation(); const onDelete = () => { @@ -106,6 +110,15 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe ), }, + { + key: 'copy', + icon: ( + + + Copy Urn + + ), + }, ]; const onClick: MenuProps['onClick'] = (e) => { @@ -113,6 +126,9 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe if (key === 'edit') { editOnClick(); } + else if( key === 'copy') { + onCopy(); + } }; const menuProps: MenuProps = { From a92230b32162dc26776210a3278eadaafaa6e08e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EA=B0=80=EC=9C=A4?= <60080153+KaYunKIM@users.noreply.github.com> Date: Thu, 14 Dec 2023 02:30:18 +0900 Subject: [PATCH 239/792] docs(ingest/tableau): add token to sink config in sample recipe (#9411) Co-authored-by: KaYunKIM Co-authored-by: Harshal Sheth --- metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml b/metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml index ed6567b5889df..a9db27bb52a23 100644 --- a/metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml +++ b/metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml @@ -18,3 +18,4 @@ sink: type: "datahub-rest" config: server: "http://localhost:8080" + token: token_value # optional From 3cde9549a290d2560d9eebaa4fc5a3521266a841 Mon Sep 17 00:00:00 2001 From: allizex <150264485+allizex@users.noreply.github.com> Date: Wed, 13 Dec 2023 20:26:45 +0100 Subject: [PATCH 240/792] feat(glossary): add ability to clone glossary term(name and documentation) from term profile menu (#9445) Co-authored-by: Olga Dimova <38855943+olgadimova@users.noreply.github.com> --- .../glossaryTerm/GlossaryTermEntity.tsx | 7 +++- .../CreateGlossaryEntityModal.tsx | 34 ++++++++++++++++--- .../shared/EntityDropdown/EntityDropdown.tsx | 22 ++++++++++++ .../src/app/entity/shared/types.ts | 1 + 4 files changed, 59 insertions(+), 5 deletions(-) diff --git a/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx b/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx index 080ee5889aec9..a6f6d9b0e2867 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx @@ -65,7 +65,12 @@ export class GlossaryTermEntity implements Entity { useEntityQuery={useGetGlossaryTermQuery as any} headerActionItems={new Set([EntityActionItem.BATCH_ADD_GLOSSARY_TERM])} headerDropdownItems={ - new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.MOVE, EntityMenuItems.DELETE]) + new Set([ + EntityMenuItems.UPDATE_DEPRECATION, + EntityMenuItems.CLONE, + EntityMenuItems.MOVE, + EntityMenuItems.DELETE, + ]) } isNameEditable hideBrowseBar diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx index 9788d36af2c65..d60e86b0af8ca 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx @@ -1,8 +1,9 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import styled from 'styled-components/macro'; import { EditOutlined } from '@ant-design/icons'; import { message, Button, Input, Modal, Typography, Form, Collapse } from 'antd'; import DOMPurify from 'dompurify'; +import { useHistory } from 'react-router'; import { useCreateGlossaryTermMutation, useCreateGlossaryNodeMutation, @@ -16,6 +17,7 @@ import DescriptionModal from '../components/legacy/DescriptionModal'; import { validateCustomUrnId } from '../../../shared/textUtil'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getGlossaryRootToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; +import { getEntityPath } from '../containers/profile/utils'; const StyledItem = styled(Form.Item)` margin-bottom: 0; @@ -33,6 +35,7 @@ interface Props { entityType: EntityType; onClose: () => void; refetchData?: () => void; + isCloning?: boolean; } function CreateGlossaryEntityModal(props: Props) { @@ -43,15 +46,31 @@ function CreateGlossaryEntityModal(props: Props) { const entityRegistry = useEntityRegistry(); const [stagedId, setStagedId] = useState(undefined); const [stagedName, setStagedName] = useState(''); - const [selectedParentUrn, setSelectedParentUrn] = useState(entityData.urn); + const [selectedParentUrn, setSelectedParentUrn] = useState(props.isCloning ? '' : entityData.urn); const [documentation, setDocumentation] = useState(''); const [isDocumentationModalVisible, setIsDocumentationModalVisible] = useState(false); const [createButtonDisabled, setCreateButtonDisabled] = useState(true); const refetch = useRefetch(); + const history = useHistory(); const [createGlossaryTermMutation] = useCreateGlossaryTermMutation(); const [createGlossaryNodeMutation] = useCreateGlossaryNodeMutation(); + useEffect(() => { + if (props.isCloning && entityData.entityData) { + const { properties } = entityData.entityData; + + if (properties?.name) { + setStagedName(properties.name); + form.setFieldValue('name', properties.name); + } + + if (properties?.description) { + setDocumentation(properties.description); + } + } + }, [props.isCloning, entityData.entityData, form]); + function createGlossaryEntity() { const mutation = entityType === EntityType.GlossaryTerm ? createGlossaryTermMutation : createGlossaryNodeMutation; @@ -67,7 +86,7 @@ function CreateGlossaryEntityModal(props: Props) { }, }, }) - .then(() => { + .then((res) => { message.loading({ content: 'Updating...', duration: 2 }); setTimeout(() => { analytics.event({ @@ -82,12 +101,19 @@ function CreateGlossaryEntityModal(props: Props) { refetch(); if (isInGlossaryContext) { // either refresh this current glossary node or the root nodes or root terms - const nodeToUpdate = entityData?.urn || getGlossaryRootToUpdate(entityType); + const nodeToUpdate = selectedParentUrn || getGlossaryRootToUpdate(entityType); updateGlossarySidebar([nodeToUpdate], urnsToUpdate, setUrnsToUpdate); } if (refetchData) { refetchData(); } + if (props.isCloning) { + const redirectUrn = + entityType === EntityType.GlossaryTerm + ? res.data?.createGlossaryTerm + : res.data?.createGlossaryNode; + history.push(getEntityPath(entityType, redirectUrn, entityRegistry, false, false)); + } }, 2000); }) .catch((e) => { diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx index 5d4f9d9f875cf..8d7f1cca9c1cb 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx @@ -9,6 +9,7 @@ import { LinkOutlined, MoreOutlined, PlusOutlined, + CopyOutlined, } from '@ant-design/icons'; import { Redirect } from 'react-router'; import { EntityType } from '../../../../types.generated'; @@ -32,6 +33,7 @@ export enum EntityMenuItems { ADD_TERM_GROUP, DELETE, MOVE, + CLONE, } export const MenuIcon = styled(MoreOutlined)<{ fontSize?: number }>` @@ -107,6 +109,7 @@ function EntityDropdown(props: Props) { const [isCreateTermModalVisible, setIsCreateTermModalVisible] = useState(false); const [isCreateNodeModalVisible, setIsCreateNodeModalVisible] = useState(false); + const [isCloneEntityModalVisible, setIsCloneEntityModalVisible] = useState(false); const [isDeprecationModalVisible, setIsDeprecationModalVisible] = useState(false); const [isMoveModalVisible, setIsMoveModalVisible] = useState(false); @@ -230,6 +233,17 @@ function EntityDropdown(props: Props) { )} + {menuItems.has(EntityMenuItems.CLONE) && ( + setIsCloneEntityModalVisible(true)} + > + +  Clone + + + )} } trigger={['click']} @@ -250,6 +264,14 @@ function EntityDropdown(props: Props) { refetchData={refetchForNodes} /> )} + {isCloneEntityModalVisible && ( + setIsCloneEntityModalVisible(false)} + refetchData={entityType === EntityType.GlossaryTerm ? refetchForTerms : refetchForNodes} + isCloning + /> + )} {isDeprecationModalVisible && ( ; properties?: Maybe<{ + name?: Maybe; description?: Maybe; qualifiedName?: Maybe; sourceUrl?: Maybe; From a495d652e0e08885ce35eb3110a27853c2c05071 Mon Sep 17 00:00:00 2001 From: skrydal Date: Wed, 13 Dec 2023 20:34:20 +0100 Subject: [PATCH 241/792] feat(ingestion): Add typeUrn handling to ownership transformers (#9370) --- .../docs/transformer/dataset_transformer.md | 32 +++++++------- .../src/datahub/emitter/mce_builder.py | 31 ++++++------- .../transformer/add_dataset_ownership.py | 34 +++++--------- .../tests/unit/test_pipeline.py | 5 ++- .../tests/unit/test_transform_dataset.py | 44 ++++++++++++++++++- 5 files changed, 86 insertions(+), 60 deletions(-) diff --git a/metadata-ingestion/docs/transformer/dataset_transformer.md b/metadata-ingestion/docs/transformer/dataset_transformer.md index d1a1555a3ca02..1c84a2759d23e 100644 --- a/metadata-ingestion/docs/transformer/dataset_transformer.md +++ b/metadata-ingestion/docs/transformer/dataset_transformer.md @@ -55,12 +55,12 @@ transformers: ``` ## Simple Add Dataset ownership ### Config Details -| Field | Required | Type | Default | Description | -|-----------------------------|----------|--------------|---------------|------------------------------------------------------------------| -| `owner_urns` | ✅ | list[string] | | List of owner urns. | -| `ownership_type` | | string | `DATAOWNER` | ownership type of the owners. | -| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. | -| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. | +| Field | Required | Type | Default | Description | +|--------------------|----------|--------------|-------------|---------------------------------------------------------------------| +| `owner_urns` | ✅ | list[string] | | List of owner urns. | +| `ownership_type` | | string | "DATAOWNER" | ownership type of the owners (either as enum or ownership type urn) | +| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. | +| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. | For transformer behaviour on `replace_existing` and `semantics`, please refer section [Relationship Between replace_existing And semantics](#relationship-between-replace_existing-and-semantics). @@ -95,7 +95,7 @@ transformers: - "urn:li:corpuser:username1" - "urn:li:corpuser:username2" - "urn:li:corpGroup:groupname" - ownership_type: "PRODUCER" + ownership_type: "urn:li:ownershipType:__system__producer" ``` - Add owners, however overwrite the owners available for the dataset on DataHub GMS ```yaml @@ -107,7 +107,7 @@ transformers: - "urn:li:corpuser:username1" - "urn:li:corpuser:username2" - "urn:li:corpGroup:groupname" - ownership_type: "PRODUCER" + ownership_type: "urn:li:ownershipType:__system__producer" ``` - Add owners, however keep the owners available for the dataset on DataHub GMS ```yaml @@ -124,12 +124,12 @@ transformers: ## Pattern Add Dataset ownership ### Config Details -| Field | Required | Type | Default | Description | -|-----------------------------|--------- |-----------------------|------------------|-----------------------------------------------------------------------------------------| -| `owner_pattern` | ✅ | map[regx, list[urn]] | | entity urn with regular expression and list of owners urn apply to matching entity urn. | -| `ownership_type` | | string | `DATAOWNER` | ownership type of the owners. | -| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. | -| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. | +| Field | Required | Type | Default | Description | +|--------------------|----------|----------------------|-------------|-----------------------------------------------------------------------------------------| +| `owner_pattern` | ✅ | map[regx, list[urn]] | | entity urn with regular expression and list of owners urn apply to matching entity urn. | +| `ownership_type` | | string | "DATAOWNER" | ownership type of the owners (either as enum or ownership type urn) | +| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. | +| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. | let’s suppose we’d like to append a series of users who we know to own a different dataset from a data source but aren't detected during normal ingestion. To do so, we can use the `pattern_add_dataset_ownership` module that’s included in the ingestion framework. This will match the pattern to `urn` of the dataset and assign the respective owners. @@ -158,7 +158,7 @@ The config, which we’d append to our ingestion recipe YAML, would look like th rules: ".*example1.*": ["urn:li:corpuser:username1"] ".*example2.*": ["urn:li:corpuser:username2"] - ownership_type: "PRODUCER" + ownership_type: "urn:li:ownershipType:__system__producer" ``` - Add owner, however overwrite the owners available for the dataset on DataHub GMS ```yaml @@ -170,7 +170,7 @@ The config, which we’d append to our ingestion recipe YAML, would look like th rules: ".*example1.*": ["urn:li:corpuser:username1"] ".*example2.*": ["urn:li:corpuser:username2"] - ownership_type: "PRODUCER" + ownership_type: "urn:li:ownershipType:__system__producer" ``` - Add owner, however keep the owners available for the dataset on DataHub GMS ```yaml diff --git a/metadata-ingestion/src/datahub/emitter/mce_builder.py b/metadata-ingestion/src/datahub/emitter/mce_builder.py index 64c9ec1bb5704..3b2c87ea25a31 100644 --- a/metadata-ingestion/src/datahub/emitter/mce_builder.py +++ b/metadata-ingestion/src/datahub/emitter/mce_builder.py @@ -9,12 +9,13 @@ from typing import ( TYPE_CHECKING, Any, + Iterable, List, Optional, + Tuple, Type, TypeVar, Union, - cast, get_type_hints, ) @@ -342,26 +343,20 @@ def make_ml_model_group_urn(platform: str, group_name: str, env: str) -> str: ) -def is_valid_ownership_type(ownership_type: Optional[str]) -> bool: - return ownership_type is not None and ownership_type in [ - OwnershipTypeClass.TECHNICAL_OWNER, - OwnershipTypeClass.BUSINESS_OWNER, - OwnershipTypeClass.DATA_STEWARD, - OwnershipTypeClass.NONE, - OwnershipTypeClass.DEVELOPER, - OwnershipTypeClass.DATAOWNER, - OwnershipTypeClass.DELEGATE, - OwnershipTypeClass.PRODUCER, - OwnershipTypeClass.CONSUMER, - OwnershipTypeClass.STAKEHOLDER, +def get_class_fields(_class: Type[object]) -> Iterable[str]: + return [ + f + for f in dir(_class) + if not callable(getattr(_class, f)) and not f.startswith("_") ] -def validate_ownership_type(ownership_type: Optional[str]) -> str: - if is_valid_ownership_type(ownership_type): - return cast(str, ownership_type) - else: - raise ValueError(f"Unexpected ownership type: {ownership_type}") +def validate_ownership_type(ownership_type: str) -> Tuple[str, Optional[str]]: + if ownership_type.startswith("urn:li:"): + return OwnershipTypeClass.CUSTOM, ownership_type + if ownership_type in get_class_fields(OwnershipTypeClass): + return ownership_type, None + raise ValueError(f"Unexpected ownership type: {ownership_type}") def make_lineage_mce( diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_ownership.py b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_ownership.py index 71cf6cfa7e92b..73cb8e4d6739b 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_ownership.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_ownership.py @@ -14,11 +14,8 @@ from datahub.ingestion.transformer.dataset_transformer import ( DatasetOwnershipTransformer, ) -from datahub.metadata.schema_classes import ( - OwnerClass, - OwnershipClass, - OwnershipTypeClass, -) +from datahub.metadata._schema_classes import OwnershipTypeClass +from datahub.metadata.schema_classes import OwnerClass, OwnershipClass class AddDatasetOwnershipConfig(TransformerSemanticsConfigModel): @@ -102,7 +99,7 @@ def transform_aspect( class DatasetOwnershipBaseConfig(TransformerSemanticsConfigModel): - ownership_type: Optional[str] = OwnershipTypeClass.DATAOWNER + ownership_type: str = OwnershipTypeClass.DATAOWNER class SimpleDatasetOwnershipConfig(DatasetOwnershipBaseConfig): @@ -114,11 +111,14 @@ class SimpleAddDatasetOwnership(AddDatasetOwnership): """Transformer that adds a specified set of owners to each dataset.""" def __init__(self, config: SimpleDatasetOwnershipConfig, ctx: PipelineContext): - ownership_type = builder.validate_ownership_type(config.ownership_type) + ownership_type, ownership_type_urn = builder.validate_ownership_type( + config.ownership_type + ) owners = [ OwnerClass( owner=owner, type=ownership_type, + typeUrn=ownership_type_urn, ) for owner in config.owner_urns ] @@ -147,29 +147,17 @@ class PatternDatasetOwnershipConfig(DatasetOwnershipBaseConfig): class PatternAddDatasetOwnership(AddDatasetOwnership): """Transformer that adds a specified set of owners to each dataset.""" - def getOwners( - self, - key: str, - owner_pattern: KeyValuePattern, - ownership_type: Optional[str] = None, - ) -> List[OwnerClass]: - owners = [ - OwnerClass( - owner=owner, - type=builder.validate_ownership_type(ownership_type), - ) - for owner in owner_pattern.value(key) - ] - return owners - def __init__(self, config: PatternDatasetOwnershipConfig, ctx: PipelineContext): - ownership_type = builder.validate_ownership_type(config.ownership_type) owner_pattern = config.owner_pattern + ownership_type, ownership_type_urn = builder.validate_ownership_type( + config.ownership_type + ) generic_config = AddDatasetOwnershipConfig( get_owners_to_add=lambda urn: [ OwnerClass( owner=owner, type=ownership_type, + typeUrn=ownership_type_urn, ) for owner in owner_pattern.value(urn) ], diff --git a/metadata-ingestion/tests/unit/test_pipeline.py b/metadata-ingestion/tests/unit/test_pipeline.py index 7ce78f0ab3e13..0f3c984196a78 100644 --- a/metadata-ingestion/tests/unit/test_pipeline.py +++ b/metadata-ingestion/tests/unit/test_pipeline.py @@ -214,7 +214,10 @@ def test_run_including_registered_transformation(self): "transformers": [ { "type": "simple_add_dataset_ownership", - "config": {"owner_urns": ["urn:li:corpuser:foo"]}, + "config": { + "owner_urns": ["urn:li:corpuser:foo"], + "ownership_type": "urn:li:ownershipType:__system__technical_owner", + }, } ], "sink": {"type": "tests.test_helpers.sink_helpers.RecordingSink"}, diff --git a/metadata-ingestion/tests/unit/test_transform_dataset.py b/metadata-ingestion/tests/unit/test_transform_dataset.py index bc95451620d22..8014df2f5c519 100644 --- a/metadata-ingestion/tests/unit/test_transform_dataset.py +++ b/metadata-ingestion/tests/unit/test_transform_dataset.py @@ -234,7 +234,7 @@ def test_simple_dataset_ownership_transformation(mock_time): assert last_event.entityUrn == outputs[0].record.proposedSnapshot.urn assert all( [ - owner.type == models.OwnershipTypeClass.DATAOWNER + owner.type == models.OwnershipTypeClass.DATAOWNER and owner.typeUrn is None for owner in last_event.aspect.owners ] ) @@ -247,7 +247,7 @@ def test_simple_dataset_ownership_transformation(mock_time): assert len(second_ownership_aspect.owners) == 3 assert all( [ - owner.type == models.OwnershipTypeClass.DATAOWNER + owner.type == models.OwnershipTypeClass.DATAOWNER and owner.typeUrn is None for owner in second_ownership_aspect.owners ] ) @@ -293,6 +293,44 @@ def test_simple_dataset_ownership_with_type_transformation(mock_time): assert ownership_aspect.owners[0].type == models.OwnershipTypeClass.PRODUCER +def test_simple_dataset_ownership_with_type_urn_transformation(mock_time): + input = make_generic_dataset() + + transformer = SimpleAddDatasetOwnership.create( + { + "owner_urns": [ + builder.make_user_urn("person1"), + ], + "ownership_type": "urn:li:ownershipType:__system__technical_owner", + }, + PipelineContext(run_id="test"), + ) + + output = list( + transformer.transform( + [ + RecordEnvelope(input, metadata={}), + RecordEnvelope(EndOfStream(), metadata={}), + ] + ) + ) + + assert len(output) == 3 + + # original MCE is unchanged + assert input == output[0].record + + ownership_aspect = output[1].record.aspect + + assert isinstance(ownership_aspect, OwnershipClass) + assert len(ownership_aspect.owners) == 1 + assert ownership_aspect.owners[0].type == OwnershipTypeClass.CUSTOM + assert ( + ownership_aspect.owners[0].typeUrn + == "urn:li:ownershipType:__system__technical_owner" + ) + + def _test_extract_tags(in_urn: str, regex_str: str, out_tag: str) -> None: input = make_generic_dataset(entity_urn=in_urn) transformer = ExtractDatasetTags.create( @@ -883,6 +921,7 @@ def test_pattern_dataset_ownership_transformation(mock_time): ".*example2.*": [builder.make_user_urn("person2")], } }, + "ownership_type": "DATAOWNER", }, PipelineContext(run_id="test"), ) @@ -2233,6 +2272,7 @@ def fake_ownership_class(entity_urn: str) -> models.OwnershipClass: "replace_existing": False, "semantics": TransformerSemantics.PATCH, "owner_urns": [owner2], + "ownership_type": "DATAOWNER", }, pipeline_context=pipeline_context, ) From 32d237b56f54c83bd7b8d343b04d36f53ae72d0a Mon Sep 17 00:00:00 2001 From: Arun Vasudevan <12974850+arunvasudevan@users.noreply.github.com> Date: Wed, 13 Dec 2023 16:02:21 -0600 Subject: [PATCH 242/792] fix(ingest): reduce GraphQL Logs to warning for circuit breaker (#9436) --- .../src/datahub/api/circuit_breaker/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/metadata-ingestion/src/datahub/api/circuit_breaker/__init__.py b/metadata-ingestion/src/datahub/api/circuit_breaker/__init__.py index 4dcf40454736b..27317826264b8 100644 --- a/metadata-ingestion/src/datahub/api/circuit_breaker/__init__.py +++ b/metadata-ingestion/src/datahub/api/circuit_breaker/__init__.py @@ -1,3 +1,7 @@ +import logging + +from gql.transport.requests import log as requests_logger + from datahub.api.circuit_breaker.assertion_circuit_breaker import ( AssertionCircuitBreaker, AssertionCircuitBreakerConfig, @@ -6,3 +10,5 @@ OperationCircuitBreaker, OperationCircuitBreakerConfig, ) + +requests_logger.setLevel(logging.WARNING) From 288e458739ec15e0d294ed5c0eb54963fee01071 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Thu, 14 Dec 2023 06:19:05 +0530 Subject: [PATCH 243/792] refactor(ui): support Apollo caching for settings / Policies (#9442) --- .../app/permissions/policy/ManagePolicies.tsx | 194 ++------------- .../policy/_tests_/policyUtils.test.tsx | 110 +++++++++ .../src/app/permissions/policy/policyUtils.ts | 98 ++++++++ .../src/app/permissions/policy/usePolicy.ts | 227 ++++++++++++++++++ 4 files changed, 460 insertions(+), 169 deletions(-) create mode 100644 datahub-web-react/src/app/permissions/policy/_tests_/policyUtils.test.tsx create mode 100644 datahub-web-react/src/app/permissions/policy/usePolicy.ts diff --git a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx index 2f0c284fc4e8f..72c22f3bddc2c 100644 --- a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx +++ b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useMemo, useState } from 'react'; -import { Button, Empty, message, Modal, Pagination, Tag } from 'antd'; +import { Button, Empty, message, Pagination, Tag } from 'antd'; import styled from 'styled-components/macro'; import * as QueryString from 'query-string'; import { DeleteOutlined, PlusOutlined } from '@ant-design/icons'; @@ -7,26 +7,15 @@ import { useLocation } from 'react-router'; import PolicyBuilderModal from './PolicyBuilderModal'; import { Policy, - PolicyUpdateInput, PolicyState, - PolicyType, - Maybe, - ResourceFilterInput, - PolicyMatchFilter, - PolicyMatchFilterInput, - PolicyMatchCriterionInput, - EntityType, } from '../../../types.generated'; import { useAppConfig } from '../../useAppConfig'; import PolicyDetailsModal from './PolicyDetailsModal'; import { - useCreatePolicyMutation, - useDeletePolicyMutation, useListPoliciesQuery, - useUpdatePolicyMutation, } from '../../../graphql/policy.generated'; import { Message } from '../../shared/Message'; -import { EMPTY_POLICY } from './policyUtils'; +import { DEFAULT_PAGE_SIZE, EMPTY_POLICY } from './policyUtils'; import TabToolbar from '../../entity/shared/components/styled/TabToolbar'; import { StyledTable } from '../../entity/shared/components/styled/StyledTable'; import AvatarsGroup from '../AvatarsGroup'; @@ -37,6 +26,7 @@ import { scrollToTop } from '../../shared/searchUtils'; import analytics, { EventType } from '../../analytics'; import { POLICIES_CREATE_POLICY_ID, POLICIES_INTRO_ID } from '../../onboarding/config/PoliciesOnboardingConfig'; import { OnboardingTour } from '../../onboarding/OnboardingTour'; +import { usePolicy } from './usePolicy'; const SourceContainer = styled.div` overflow: auto; @@ -84,58 +74,6 @@ const PageContainer = styled.span` overflow: auto; `; -const DEFAULT_PAGE_SIZE = 10; - -type PrivilegeOptionType = { - type?: string; - name?: Maybe; -}; - -const toFilterInput = (filter: PolicyMatchFilter): PolicyMatchFilterInput => { - return { - criteria: filter.criteria?.map((criterion): PolicyMatchCriterionInput => { - return { - field: criterion.field, - values: criterion.values.map((criterionValue) => criterionValue.value), - condition: criterion.condition, - }; - }), - }; -}; - -const toPolicyInput = (policy: Omit): PolicyUpdateInput => { - let policyInput: PolicyUpdateInput = { - type: policy.type, - name: policy.name, - state: policy.state, - description: policy.description, - privileges: policy.privileges, - actors: { - users: policy.actors.users, - groups: policy.actors.groups, - allUsers: policy.actors.allUsers, - allGroups: policy.actors.allGroups, - resourceOwners: policy.actors.resourceOwners, - resourceOwnersTypes: policy.actors.resourceOwnersTypes, - }, - }; - if (policy.resources !== null && policy.resources !== undefined) { - let resourceFilter: ResourceFilterInput = { - type: policy.resources.type, - resources: policy.resources.resources, - allResources: policy.resources.allResources, - }; - if (policy.resources.filter) { - resourceFilter = { ...resourceFilter, filter: toFilterInput(policy.resources.filter) }; - } - // Add the resource filters. - policyInput = { - ...policyInput, - resources: resourceFilter, - }; - } - return policyInput; -}; // TODO: Cleanup the styling. export const ManagePolicies = () => { @@ -163,9 +101,7 @@ export const ManagePolicies = () => { const [focusPolicyUrn, setFocusPolicyUrn] = useState(undefined); const [focusPolicy, setFocusPolicy] = useState>(EMPTY_POLICY); - // Construct privileges - const platformPrivileges = policiesConfig?.platformPrivileges || []; - const resourcePrivileges = policiesConfig?.resourcePrivileges || []; + const { loading: policiesLoading, @@ -183,15 +119,6 @@ export const ManagePolicies = () => { fetchPolicy: (query?.length || 0) > 0 ? 'no-cache' : 'cache-first', }); - // Any time a policy is removed, edited, or created, refetch the list. - const [createPolicy, { error: createPolicyError }] = useCreatePolicyMutation(); - - const [updatePolicy, { error: updatePolicyError }] = useUpdatePolicyMutation(); - - const [deletePolicy, { error: deletePolicyError }] = useDeletePolicyMutation(); - - const updateError = createPolicyError || updatePolicyError || deletePolicyError; - const totalPolicies = policiesData?.listPolicies?.total || 0; const policies = useMemo(() => policiesData?.listPolicies?.policies || [], [policiesData]); @@ -212,28 +139,6 @@ export const ManagePolicies = () => { setShowPolicyBuilderModal(false); }; - const getPrivilegeNames = (policy: Omit) => { - let privileges: PrivilegeOptionType[] = []; - if (policy?.type === PolicyType.Platform) { - privileges = platformPrivileges - .filter((platformPrivilege) => policy.privileges.includes(platformPrivilege.type)) - .map((platformPrivilege) => { - return { type: platformPrivilege.type, name: platformPrivilege.displayName }; - }); - } else { - const allResourcePriviliges = resourcePrivileges.find( - (resourcePrivilege) => resourcePrivilege.resourceType === 'all', - ); - privileges = - allResourcePriviliges?.privileges - .filter((resourcePrivilege) => policy.privileges.includes(resourcePrivilege.type)) - .map((b) => { - return { type: b.type, name: b.displayName }; - }) || []; - } - return privileges; - }; - const onViewPolicy = (policy: Policy) => { setShowViewPolicyModal(true); setFocusPolicyUrn(policy?.urn); @@ -247,79 +152,30 @@ export const ManagePolicies = () => { }; const onEditPolicy = (policy: Policy) => { - setShowPolicyBuilderModal(true); - setFocusPolicyUrn(policy?.urn); - setFocusPolicy({ ...policy }); - }; - - // On Delete Policy handler - const onRemovePolicy = (policy: Policy) => { - Modal.confirm({ - title: `Delete ${policy?.name}`, - content: `Are you sure you want to remove policy?`, - onOk() { - deletePolicy({ variables: { urn: policy?.urn as string } }); // There must be a focus policy urn. - analytics.event({ - type: EventType.DeleteEntityEvent, - entityUrn: policy?.urn, - entityType: EntityType.DatahubPolicy, - }); - message.success('Successfully removed policy.'); - setTimeout(() => { - policiesRefetch(); - }, 3000); - onCancelViewPolicy(); - }, - onCancel() {}, - okText: 'Yes', - maskClosable: true, - closable: true, - }); + setShowPolicyBuilderModal(true); + setFocusPolicyUrn(policy?.urn); + setFocusPolicy({ ...policy }); }; - // On Activate and deactivate Policy handler - const onToggleActiveDuplicate = (policy: Policy) => { - const newState = policy?.state === PolicyState.Active ? PolicyState.Inactive : PolicyState.Active; - const newPolicy = { - ...policy, - state: newState, - }; - updatePolicy({ - variables: { - urn: policy?.urn as string, // There must be a focus policy urn. - input: toPolicyInput(newPolicy), - }, - }); - message.success(`Successfully ${newState === PolicyState.Active ? 'activated' : 'deactivated'} policy.`); - setTimeout(() => { - policiesRefetch(); - }, 3000); - setShowViewPolicyModal(false); - }; - - // On Add/Update Policy handler - const onSavePolicy = (savePolicy: Omit) => { - if (focusPolicyUrn) { - // If there's an URN associated with the focused policy, then we are editing an existing policy. - updatePolicy({ variables: { urn: focusPolicyUrn, input: toPolicyInput(savePolicy) } }); - analytics.event({ - type: EventType.UpdatePolicyEvent, - policyUrn: focusPolicyUrn, - }); - } else { - // If there's no URN associated with the focused policy, then we are creating. - createPolicy({ variables: { input: toPolicyInput(savePolicy) } }); - analytics.event({ - type: EventType.CreatePolicyEvent, - }); - } - message.success('Successfully saved policy.'); - setTimeout(() => { - policiesRefetch(); - }, 3000); - onClosePolicyBuilder(); - }; + const { + createPolicyError, + updatePolicyError, + deletePolicyError, + onSavePolicy, + onToggleActiveDuplicate, + onRemovePolicy, + getPrivilegeNames + } = usePolicy( + policiesConfig, + focusPolicyUrn, + policiesRefetch, + setShowViewPolicyModal, + onCancelViewPolicy, + onClosePolicyBuilder + ); + const updateError = createPolicyError || updatePolicyError || deletePolicyError; + const tableColumns = [ { title: 'Name', diff --git a/datahub-web-react/src/app/permissions/policy/_tests_/policyUtils.test.tsx b/datahub-web-react/src/app/permissions/policy/_tests_/policyUtils.test.tsx new file mode 100644 index 0000000000000..06d2e97255139 --- /dev/null +++ b/datahub-web-react/src/app/permissions/policy/_tests_/policyUtils.test.tsx @@ -0,0 +1,110 @@ +import { + addOrUpdatePoliciesInList, + updateListPoliciesCache, + removeFromListPoliciesCache, + } from '../policyUtils'; + + // Mock the Apollo Client readQuery and writeQuery methods + const mockReadQuery = jest.fn(); + const mockWriteQuery = jest.fn(); + + jest.mock('@apollo/client', () => ({ + ...jest.requireActual('@apollo/client'), + useApolloClient: () => ({ + readQuery: mockReadQuery, + writeQuery: mockWriteQuery, + }), + })); + + describe('addOrUpdatePoliciesInList', () => { + it('should add a new policy to the list', () => { + const existingPolicies = [{ urn: 'existing-urn' }]; + const newPolicies = { urn: 'new-urn' }; + + const result = addOrUpdatePoliciesInList(existingPolicies, newPolicies); + + expect(result.length).toBe(existingPolicies.length + 1); + expect(result).toContain(newPolicies); + }); + + it('should update an existing policy in the list', () => { + const existingPolicies = [{ urn: 'existing-urn' }]; + const newPolicies = { urn: 'existing-urn', updatedField: 'new-value' }; + + const result = addOrUpdatePoliciesInList(existingPolicies, newPolicies); + + expect(result.length).toBe(existingPolicies.length); + expect(result).toContainEqual(newPolicies); + }); + }); + + describe('updateListPoliciesCache', () => { + // Mock client.readQuery response + const mockReadQueryResponse = { + listPolicies: { + start: 0, + count: 1, + total: 1, + policies: [{ urn: 'existing-urn' }], + }, + }; + + beforeEach(() => { + mockReadQuery.mockReturnValueOnce(mockReadQueryResponse); + }); + + it('should update the list policies cache with a new policy', () => { + const mockClient = { + readQuery: mockReadQuery, + writeQuery: mockWriteQuery, + }; + + const policiesToAdd = [{ urn: 'new-urn' }]; + const pageSize = 10; + + updateListPoliciesCache(mockClient, policiesToAdd, pageSize); + + // Ensure writeQuery is called with the expected data + expect(mockWriteQuery).toHaveBeenCalledWith({ + query: expect.any(Object), + variables: { input: { start: 0, count: pageSize, query: undefined } }, + data: expect.any(Object), + }); + }); + }); + + describe('removeFromListPoliciesCache', () => { + // Mock client.readQuery response + const mockReadQueryResponse = { + listPolicies: { + start: 0, + count: 1, + total: 1, + policies: [{ urn: 'existing-urn' }], + }, + }; + + beforeEach(() => { + mockReadQuery.mockReturnValueOnce(mockReadQueryResponse); + }); + + it('should remove a policy from the list policies cache', () => { + const mockClient = { + readQuery: mockReadQuery, + writeQuery: mockWriteQuery, + }; + + const urnToRemove = 'existing-urn'; + const pageSize = 10; + + removeFromListPoliciesCache(mockClient, urnToRemove, pageSize); + + // Ensure writeQuery is called with the expected data + expect(mockWriteQuery).toHaveBeenCalledWith({ + query: expect.any(Object), + variables: { input: { start: 0, count: pageSize } }, + data: expect.any(Object), + }); + }); + }); + \ No newline at end of file diff --git a/datahub-web-react/src/app/permissions/policy/policyUtils.ts b/datahub-web-react/src/app/permissions/policy/policyUtils.ts index 2f178fcdeb5c3..27aa8fcd351e9 100644 --- a/datahub-web-react/src/app/permissions/policy/policyUtils.ts +++ b/datahub-web-react/src/app/permissions/policy/policyUtils.ts @@ -10,6 +10,9 @@ import { ResourceFilter, ResourcePrivileges, } from '../../../types.generated'; +import { ListPoliciesDocument, ListPoliciesQuery } from '../../../graphql/policy.generated'; + +export const DEFAULT_PAGE_SIZE = 10; export const EMPTY_POLICY = { type: PolicyType.Metadata, @@ -126,3 +129,98 @@ export const setFieldValues = ( } return { ...filter, criteria: [...restCriteria, createCriterion(resourceFieldType, fieldValues)] }; }; + +export const addOrUpdatePoliciesInList = (existingPolicies, newPolicies) => { + const policies = [...existingPolicies]; + let didUpdate = false; + const updatedPolicies = policies.map((policy) => { + if (policy.urn === newPolicies.urn) { + didUpdate = true; + return newPolicies; + } + return policy; + }); + return didUpdate ? updatedPolicies : [newPolicies, ...existingPolicies]; +}; + +/** + * Add an entry to the ListPolicies cache. + */ +export const updateListPoliciesCache = (client, policies, pageSize) => { + // Read the data from our cache for this query. + const currData: ListPoliciesQuery | null = client.readQuery({ + query: ListPoliciesDocument, + variables: { + input: { + start: 0, + count: pageSize, + query: undefined, + }, + }, + }); + + // Add our new policy into the existing list. + const existingPolicies = [...(currData?.listPolicies?.policies || [])]; + const newPolicies = addOrUpdatePoliciesInList(existingPolicies, policies); + const didAddTest = newPolicies.length > existingPolicies.length; + + // Write our data back to the cache. + client.writeQuery({ + query: ListPoliciesDocument, + variables: { + input: { + start: 0, + count: pageSize, + query: undefined, + }, + }, + data: { + + listPolicies: { + __typename: 'ListPoliciesResult', + start: 0, + count: didAddTest ? (currData?.listPolicies?.count || 0) + 1 : currData?.listPolicies?.count, + total: didAddTest ? (currData?.listPolicies?.total || 0) + 1 : currData?.listPolicies?.total, + policies: newPolicies, + }, + }, + }); +}; + +/** + * Remove an entry from the ListTests cache. + */ +export const removeFromListPoliciesCache = (client, urn, pageSize) => { + // Read the data from our cache for this query. + const currData: ListPoliciesQuery | null = client.readQuery({ + query: ListPoliciesDocument, + variables: { + input: { + start: 0, + count: pageSize, + }, + }, + }); + + // Remove the policy from the existing tests set. + const newPolicies = [...(currData?.listPolicies?.policies || []).filter((policy) => policy.urn !== urn)]; + + // Write our data back to the cache. + client.writeQuery({ + query: ListPoliciesDocument, + variables: { + input: { + start: 0, + count: pageSize, + }, + }, + data: { + listPolicies: { + start: currData?.listPolicies?.start || 0, + count: (currData?.listPolicies?.count || 1) - 1, + total: (currData?.listPolicies?.total || 1) - 1, + policies: newPolicies, + }, + }, + }); +}; diff --git a/datahub-web-react/src/app/permissions/policy/usePolicy.ts b/datahub-web-react/src/app/permissions/policy/usePolicy.ts new file mode 100644 index 0000000000000..6f359805e42db --- /dev/null +++ b/datahub-web-react/src/app/permissions/policy/usePolicy.ts @@ -0,0 +1,227 @@ +import { Modal, message } from 'antd'; +import { useApolloClient } from '@apollo/client'; +import { + EntityType, + Policy, + PolicyMatchCriterionInput, + PolicyMatchFilter, + PolicyMatchFilterInput, + PolicyState, + PolicyType, + Maybe, + PolicyUpdateInput, + ResourceFilterInput, +} from '../../../types.generated'; +import { useCreatePolicyMutation, useDeletePolicyMutation, useUpdatePolicyMutation } from '../../../graphql/policy.generated'; +import analytics, { EventType } from '../../analytics'; +import { DEFAULT_PAGE_SIZE, removeFromListPoliciesCache, updateListPoliciesCache } from './policyUtils'; + + +type PrivilegeOptionType = { + type?: string; + name?: Maybe; +}; + +export function usePolicy( + policiesConfig, + focusPolicyUrn, + policiesRefetch, + setShowViewPolicyModal, + onCancelViewPolicy, + onClosePolicyBuilder +){ + + const client = useApolloClient(); + + // Construct privileges + const platformPrivileges = policiesConfig?.platformPrivileges || []; + const resourcePrivileges = policiesConfig?.resourcePrivileges || []; + + // Any time a policy is removed, edited, or created, refetch the list. + const [createPolicy, { error: createPolicyError }] = useCreatePolicyMutation(); + + const [updatePolicy, { error: updatePolicyError }] = useUpdatePolicyMutation(); + + const [deletePolicy, { error: deletePolicyError }] = useDeletePolicyMutation(); + + const toFilterInput = (filter: PolicyMatchFilter): PolicyMatchFilterInput => { + return { + criteria: filter.criteria?.map((criterion): PolicyMatchCriterionInput => { + return { + field: criterion.field, + values: criterion.values.map((criterionValue) => criterionValue.value), + condition: criterion.condition, + }; + }), + }; + }; + + const toPolicyInput = (policy: Omit): PolicyUpdateInput => { + let policyInput: PolicyUpdateInput = { + type: policy.type, + name: policy.name, + state: policy.state, + description: policy.description, + privileges: policy.privileges, + actors: { + users: policy.actors.users, + groups: policy.actors.groups, + allUsers: policy.actors.allUsers, + allGroups: policy.actors.allGroups, + resourceOwners: policy.actors.resourceOwners, + resourceOwnersTypes: policy.actors.resourceOwnersTypes, + }, + }; + if (policy.resources !== null && policy.resources !== undefined) { + let resourceFilter: ResourceFilterInput = { + type: policy.resources.type, + resources: policy.resources.resources, + allResources: policy.resources.allResources, + }; + if (policy.resources.filter) { + resourceFilter = { ...resourceFilter, filter: toFilterInput(policy.resources.filter) }; + } + // Add the resource filters. + policyInput = { + ...policyInput, + resources: resourceFilter, + }; + } + return policyInput; + }; + + const getPrivilegeNames = (policy: Omit) => { + let privileges: PrivilegeOptionType[] = []; + if (policy?.type === PolicyType.Platform) { + privileges = platformPrivileges + .filter((platformPrivilege) => policy.privileges.includes(platformPrivilege.type)) + .map((platformPrivilege) => { + return { type: platformPrivilege.type, name: platformPrivilege.displayName }; + }); + } else { + const allResourcePriviliges = resourcePrivileges.find( + (resourcePrivilege) => resourcePrivilege.resourceType === 'all', + ); + privileges = + allResourcePriviliges?.privileges + .filter((resourcePrivilege) => policy.privileges.includes(resourcePrivilege.type)) + .map((b) => { + return { type: b.type, name: b.displayName }; + }) || []; + } + return privileges; + }; + + // On Delete Policy handler + const onRemovePolicy = (policy: Policy) => { + Modal.confirm({ + title: `Delete ${policy?.name}`, + content: `Are you sure you want to remove policy?`, + onOk() { + deletePolicy({ variables: { urn: policy?.urn as string } }) + .then(()=>{ + // There must be a focus policy urn. + analytics.event({ + type: EventType.DeleteEntityEvent, + entityUrn: policy?.urn, + entityType: EntityType.DatahubPolicy, + }); + message.success('Successfully removed policy.'); + removeFromListPoliciesCache(client,policy?.urn, DEFAULT_PAGE_SIZE); + setTimeout(() => { + policiesRefetch(); + }, 3000); + onCancelViewPolicy(); + }) + }, + onCancel() {}, + okText: 'Yes', + maskClosable: true, + closable: true, + }); + }; + + // On Activate and deactivate Policy handler + const onToggleActiveDuplicate = (policy: Policy) => { + const newState = policy?.state === PolicyState.Active ? PolicyState.Inactive : PolicyState.Active; + const newPolicy = { + ...policy, + state: newState, + }; + updatePolicy({ + variables: { + urn: policy?.urn as string, // There must be a focus policy urn. + input: toPolicyInput(newPolicy), + }, + }).then(()=>{ + const updatePolicies= { + ...newPolicy, + __typename: 'ListPoliciesResult', + } + updateListPoliciesCache(client,updatePolicies,DEFAULT_PAGE_SIZE); + message.success(`Successfully ${newState === PolicyState.Active ? 'activated' : 'deactivated'} policy.`); + setTimeout(() => { + policiesRefetch(); + }, 3000); + }) + + setShowViewPolicyModal(false); + }; + + // On Add/Update Policy handler + const onSavePolicy = (savePolicy: Omit) => { + if (focusPolicyUrn) { + // If there's an URN associated with the focused policy, then we are editing an existing policy. + updatePolicy({ variables: { urn: focusPolicyUrn, input: toPolicyInput(savePolicy) } }) + .then(()=>{ + const newPolicy = { + __typename: 'ListPoliciesResult', + urn: focusPolicyUrn, + ...savePolicy, + }; + analytics.event({ + type: EventType.UpdatePolicyEvent, + policyUrn: focusPolicyUrn, + }); + message.success('Successfully saved policy.'); + updateListPoliciesCache(client,newPolicy,DEFAULT_PAGE_SIZE); + setTimeout(() => { + policiesRefetch(); + }, 1000); + onClosePolicyBuilder(); + }) + } else { + // If there's no URN associated with the focused policy, then we are creating. + createPolicy({ variables: { input: toPolicyInput(savePolicy) } }) + .then((result)=>{ + const newPolicy = { + __typename: 'ListPoliciesResult', + urn: result?.data?.createPolicy, + ...savePolicy, + type: null, + actors: null, + resources: null, + }; + analytics.event({ + type: EventType.CreatePolicyEvent, + }); + message.success('Successfully saved policy.'); + setTimeout(() => { + policiesRefetch(); + }, 1000); + updateListPoliciesCache(client,newPolicy,DEFAULT_PAGE_SIZE); + onClosePolicyBuilder(); + }) + } + }; + + return{ + createPolicyError, + updatePolicyError, + deletePolicyError, + onSavePolicy, + onToggleActiveDuplicate, + onRemovePolicy, + getPrivilegeNames, + } +} \ No newline at end of file From b87f9774ae646180675023196871f5965a5d97c3 Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 14 Dec 2023 06:41:30 +0530 Subject: [PATCH 244/792] =?UTF-8?q?refactor=20|=20PRD-785=20|=20datahub=20?= =?UTF-8?q?oss:=20migrate=20use=20of=20useGetAuthenticatedU=E2=80=A6=20(#9?= =?UTF-8?q?456)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: John Joyce --- datahub-web-react/src/app/AdminConsole.tsx | 8 ++++---- datahub-web-react/src/app/embed/EmbeddedPage.tsx | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/datahub-web-react/src/app/AdminConsole.tsx b/datahub-web-react/src/app/AdminConsole.tsx index 8b14ca35763d1..f6395a3bd3cb8 100644 --- a/datahub-web-react/src/app/AdminConsole.tsx +++ b/datahub-web-react/src/app/AdminConsole.tsx @@ -4,9 +4,9 @@ import { Menu } from 'antd'; import styled from 'styled-components'; import { BankOutlined, BarChartOutlined, MenuOutlined } from '@ant-design/icons'; import Sider from 'antd/lib/layout/Sider'; -import { useGetAuthenticatedUser } from './useGetAuthenticatedUser'; import { useAppConfig } from './useAppConfig'; import { ANTD_GRAY } from './entity/shared/constants'; +import { useUserContext } from './context/useUserContext'; const ToggleContainer = styled.div` background-color: ${ANTD_GRAY[4]}; @@ -32,7 +32,7 @@ const ControlSlideOut = styled(Sider)` * Container for all views behind an authentication wall. */ export const AdminConsole = (): JSX.Element => { - const me = useGetAuthenticatedUser(); + const me = useUserContext(); const [adminConsoleOpen, setAdminConsoleOpen] = useState(false); const { config } = useAppConfig(); @@ -40,8 +40,8 @@ export const AdminConsole = (): JSX.Element => { const isAnalyticsEnabled = config?.analyticsConfig.enabled; const isPoliciesEnabled = config?.policiesConfig.enabled; - const showAnalytics = (isAnalyticsEnabled && me && me.platformPrivileges.viewAnalytics) || false; - const showPolicyBuilder = (isPoliciesEnabled && me && me.platformPrivileges.managePolicies) || false; + const showAnalytics = (isAnalyticsEnabled && me && me?.platformPrivileges?.viewAnalytics) || false; + const showPolicyBuilder = (isPoliciesEnabled && me && me?.platformPrivileges?.managePolicies) || false; const showAdminConsole = showAnalytics || showPolicyBuilder; const onMenuItemClick = () => { diff --git a/datahub-web-react/src/app/embed/EmbeddedPage.tsx b/datahub-web-react/src/app/embed/EmbeddedPage.tsx index 429f83f34af6e..603a72675c433 100644 --- a/datahub-web-react/src/app/embed/EmbeddedPage.tsx +++ b/datahub-web-react/src/app/embed/EmbeddedPage.tsx @@ -8,9 +8,9 @@ import { VIEW_ENTITY_PAGE } from '../entity/shared/constants'; import { decodeUrn } from '../entity/shared/utils'; import CompactContext from '../shared/CompactContext'; import { useEntityRegistry } from '../useEntityRegistry'; -import { useGetAuthenticatedUserUrn } from '../useGetAuthenticatedUser'; import analytics from '../analytics/analytics'; import { EventType } from '../analytics'; +import { useUserContext } from '../context/useUserContext'; const EmbeddedPageWrapper = styled.div` max-height: 100%; @@ -39,11 +39,11 @@ export default function EmbeddedPage({ entityType }: Props) { }); }, [entityType, urn]); - const authenticatedUserUrn = useGetAuthenticatedUserUrn(); + const { urn : authenticatedUserUrn } = useUserContext(); const { data } = useGetGrantedPrivilegesQuery({ variables: { input: { - actorUrn: authenticatedUserUrn, + actorUrn: authenticatedUserUrn as string, resourceSpec: { resourceType: entityType, resourceUrn: urn }, }, }, From ff0570edacdd967d8fef23ac3333ccc93e50e406 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Wed, 13 Dec 2023 17:12:48 -0800 Subject: [PATCH 245/792] refactor(ui): Minor improvements & refactoring (#9420) --- .../search/EmbeddedListSearchResults.tsx | 6 +- .../src/app/lineage/LineageLoadingSection.tsx | 5 +- datahub-web-react/src/graphql/domain.graphql | 4 +- datahub-web-react/src/graphql/lineage.graphql | 167 ++++++++++++------ datahub-web-react/src/graphql/query.graphql | 10 ++ .../com/linkedin/query/QueryProperties.pdl | 7 +- 6 files changed, 139 insertions(+), 60 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx index 1daf2a4c59b70..80fc2aa223fdf 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { Pagination, Typography } from 'antd'; +import { Pagination, Spin, Typography } from 'antd'; import { LoadingOutlined } from '@ant-design/icons'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata, SearchResults as SearchResultType } from '../../../../../../types.generated'; @@ -61,7 +61,7 @@ const LoadingContainer = styled.div` `; const StyledLoading = styled(LoadingOutlined)` - font-size: 36px; + font-size: 32px; color: ${ANTD_GRAY[7]}; padding-bottom: 18px; ]`; @@ -128,7 +128,7 @@ export const EmbeddedListSearchResults = ({ {loading && ( - + } /> )} {!loading && ( diff --git a/datahub-web-react/src/app/lineage/LineageLoadingSection.tsx b/datahub-web-react/src/app/lineage/LineageLoadingSection.tsx index 9d84de0c21172..3b7f0e48ecdf4 100644 --- a/datahub-web-react/src/app/lineage/LineageLoadingSection.tsx +++ b/datahub-web-react/src/app/lineage/LineageLoadingSection.tsx @@ -1,5 +1,6 @@ import * as React from 'react'; import styled from 'styled-components'; +import { Spin } from 'antd'; import { LoadingOutlined } from '@ant-design/icons'; import { ANTD_GRAY } from '../entity/shared/constants'; @@ -13,7 +14,7 @@ const Container = styled.div` `; const StyledLoading = styled(LoadingOutlined)` - font-size: 36px; + font-size: 32px; color: ${ANTD_GRAY[7]}; padding-bottom: 18px; ]`; @@ -21,7 +22,7 @@ const StyledLoading = styled(LoadingOutlined)` export default function LineageLoadingSection() { return ( - + } /> ); } diff --git a/datahub-web-react/src/graphql/domain.graphql b/datahub-web-react/src/graphql/domain.graphql index 951b93fcba9af..170a5b5df476b 100644 --- a/datahub-web-react/src/graphql/domain.graphql +++ b/datahub-web-react/src/graphql/domain.graphql @@ -27,9 +27,7 @@ query getDomain($urn: String!) { } } } - children: relationships(input: { types: ["IsPartOf"], direction: INCOMING, start: 0, count: 0 }) { - total - } + ...domainEntitiesFields } } diff --git a/datahub-web-react/src/graphql/lineage.graphql b/datahub-web-react/src/graphql/lineage.graphql index dc511ca411e8d..4e9b8aacfcfa1 100644 --- a/datahub-web-react/src/graphql/lineage.graphql +++ b/datahub-web-react/src/graphql/lineage.graphql @@ -164,6 +164,9 @@ fragment lineageNodeProperties on EntityWithRelationships { domain { ...entityDomain } + parentContainers { + ...parentContainersFields + } ...entityDataProduct status { removed @@ -188,6 +191,9 @@ fragment lineageNodeProperties on EntityWithRelationships { ownership { ...ownershipFields } + parentContainers { + ...parentContainersFields + } subTypes { typeNames } @@ -361,6 +367,60 @@ fragment partialLineageResults on EntityLineageResult { filtered } +fragment entityLineage on Entity { + urn + type + ...lineageNodeProperties + ...canEditLineageFragment + ... on Dataset { + schemaMetadata(version: 0) @include(if: $showColumns) { + ...schemaMetadataFields + } + siblings { + isPrimary + siblings { + urn + type + ... on Dataset { + exists + } + ...lineageNodeProperties + } + } + } + ... on Chart { + inputFields @include(if: $showColumns) { + ...inputFieldsFields + } + } + ... on EntityWithRelationships { + upstream: lineage( + input: { + direction: UPSTREAM + start: 0 + count: 100 + separateSiblings: $separateSiblings + startTimeMillis: $startTimeMillis + endTimeMillis: $endTimeMillis + } + ) @skip(if: $excludeUpstream) { + ...fullLineageResults + } + downstream: lineage( + input: { + direction: DOWNSTREAM + start: 0 + count: 100 + separateSiblings: $separateSiblings + startTimeMillis: $startTimeMillis + endTimeMillis: $endTimeMillis + } + ) @skip(if: $excludeDownstream) { + ...fullLineageResults + } + } +} + query getEntityLineage( $urn: String! $separateSiblings: Boolean @@ -371,57 +431,21 @@ query getEntityLineage( $excludeDownstream: Boolean = false ) { entity(urn: $urn) { - urn - type - ...lineageNodeProperties - ...canEditLineageFragment - ... on Dataset { - schemaMetadata(version: 0) @include(if: $showColumns) { - ...schemaMetadataFields - } - siblings { - isPrimary - siblings { - urn - type - ... on Dataset { - exists - } - ...lineageNodeProperties - } - } - } - ... on Chart { - inputFields @include(if: $showColumns) { - ...inputFieldsFields - } - } - ... on EntityWithRelationships { - upstream: lineage( - input: { - direction: UPSTREAM - start: 0 - count: 100 - separateSiblings: $separateSiblings - startTimeMillis: $startTimeMillis - endTimeMillis: $endTimeMillis - } - ) @skip(if: $excludeUpstream) { - ...fullLineageResults - } - downstream: lineage( - input: { - direction: DOWNSTREAM - start: 0 - count: 100 - separateSiblings: $separateSiblings - startTimeMillis: $startTimeMillis - endTimeMillis: $endTimeMillis - } - ) @skip(if: $excludeDownstream) { - ...fullLineageResults - } - } + ...entityLineage + } +} + +query getBulkEntityLineage( + $urns: [String!]!, + $separateSiblings: Boolean + $showColumns: Boolean! + $startTimeMillis: Long + $endTimeMillis: Long + $excludeUpstream: Boolean = false + $excludeDownstream: Boolean = false +) { + entities(urns: $urns) { + ...entityLineage } } @@ -489,3 +513,44 @@ query getLineageCounts( } } } + +query getSearchAcrossLineageCounts( + $urn: String! + $excludeUpstream: Boolean = false + $excludeDownstream: Boolean = false +) { + upstreams: searchAcrossLineage( + input: { + urn: $urn + query: "*" + start: 0 + count: 10000 + filters: [{ field: "degree", value: "1", values: ["1"] }] + direction: UPSTREAM + } + ) @skip(if: $excludeUpstream) { + start + count + total + facets { + ...facetFields + } + } + downstreams: searchAcrossLineage( + input: { + urn: $urn + query: "*" + start: 0 + count: 10000 + filters: [{ field: "degree", value: "1", values: ["1"] }] + direction: DOWNSTREAM + } + ) @skip(if: $excludeDownstream) { + start + count + total + facets { + ...facetFields + } + } +} \ No newline at end of file diff --git a/datahub-web-react/src/graphql/query.graphql b/datahub-web-react/src/graphql/query.graphql index 84908b24f9ae7..e24c12a4448b1 100644 --- a/datahub-web-react/src/graphql/query.graphql +++ b/datahub-web-react/src/graphql/query.graphql @@ -1,3 +1,13 @@ +query getQuery($urn: String!) { + entity(urn: $urn) { + urn + type + ... on QueryEntity { + ...query + } + } +} + fragment query on QueryEntity { urn properties { diff --git a/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl b/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl index 3ba19d348913b..9587775dbed3a 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl @@ -1,6 +1,7 @@ namespace com.linkedin.query import com.linkedin.common.AuditStamp +import com.linkedin.common.Urn /** * Information about a Query against one or more data assets (e.g. Tables or Views). @@ -22,7 +23,11 @@ record QueryProperties { /** * The query was entered manually by a user (via the UI). */ - MANUAL + MANUAL, + /** + * The query was discovered by a crawler. + */ + SYSTEM } /** From 70e64e80786a2112b3c77d790d9634ee17dd1d34 Mon Sep 17 00:00:00 2001 From: Seokyun Ha Date: Thu, 14 Dec 2023 18:02:37 +0900 Subject: [PATCH 246/792] feat(ingest): add ingest `--no-progress` option (#9300) --- docs/cli.md | 1 + metadata-ingestion/src/datahub/cli/ingest_cli.py | 10 ++++++++++ .../src/datahub/ingestion/run/pipeline.py | 6 +++++- 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/docs/cli.md b/docs/cli.md index 8845ed5a6dac7..cb5077db42906 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -98,6 +98,7 @@ Command Options: --preview-workunits The number of workunits to produce for preview --strict-warnings If enabled, ingestion runs with warnings will yield a non-zero error code --test-source-connection When set, ingestion will only test the source connection details from the recipe + --no-progress If enabled, mute intermediate progress ingestion reports ``` #### ingest --dry-run diff --git a/metadata-ingestion/src/datahub/cli/ingest_cli.py b/metadata-ingestion/src/datahub/cli/ingest_cli.py index b7827ec9f050b..569a836f3ef5c 100644 --- a/metadata-ingestion/src/datahub/cli/ingest_cli.py +++ b/metadata-ingestion/src/datahub/cli/ingest_cli.py @@ -97,6 +97,13 @@ def ingest() -> None: @click.option( "--no-spinner", type=bool, is_flag=True, default=False, help="Turn off spinner" ) +@click.option( + "--no-progress", + type=bool, + is_flag=True, + default=False, + help="If enabled, mute intermediate progress ingestion reports", +) @telemetry.with_telemetry( capture_kwargs=[ "dry_run", @@ -105,6 +112,7 @@ def ingest() -> None: "test_source_connection", "no_default_report", "no_spinner", + "no_progress", ] ) def run( @@ -117,6 +125,7 @@ def run( report_to: str, no_default_report: bool, no_spinner: bool, + no_progress: bool, ) -> None: """Ingest metadata into DataHub.""" @@ -170,6 +179,7 @@ async def run_ingestion_and_check_upgrade() -> int: preview_workunits, report_to, no_default_report, + no_progress, raw_pipeline_config, ) diff --git a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py index f2735c24ca19d..25e17d692109a 100644 --- a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py +++ b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py @@ -173,6 +173,7 @@ def __init__( preview_workunits: int = 10, report_to: Optional[str] = None, no_default_report: bool = False, + no_progress: bool = False, ): self.config = config self.dry_run = dry_run @@ -180,6 +181,7 @@ def __init__( self.preview_workunits = preview_workunits self.report_to = report_to self.reporters: List[PipelineRunListener] = [] + self.no_progress = no_progress self.num_intermediate_workunits = 0 self.last_time_printed = int(time.time()) self.cli_report = CliReport() @@ -330,6 +332,7 @@ def create( preview_workunits: int = 10, report_to: Optional[str] = "datahub", no_default_report: bool = False, + no_progress: bool = False, raw_config: Optional[dict] = None, ) -> "Pipeline": config = PipelineConfig.from_dict(config_dict, raw_config) @@ -340,6 +343,7 @@ def create( preview_workunits=preview_workunits, report_to=report_to, no_default_report=no_default_report, + no_progress=no_progress, ) def _time_to_print(self) -> bool: @@ -379,7 +383,7 @@ def run(self) -> None: self.preview_workunits if self.preview_mode else None, ): try: - if self._time_to_print(): + if self._time_to_print() and not self.no_progress: self.pretty_print_summary(currently_running=True) except Exception as e: logger.warning(f"Failed to print summary {e}") From b0de1dc0ce7a2de221a27f12dfecea9924380ab2 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Thu, 14 Dec 2023 18:41:50 +0530 Subject: [PATCH 247/792] fix(powerbi): add access token refresh (#9405) Co-authored-by: elish7lapid Co-authored-by: treff7es --- .../ingestion/source/powerbi/config.py | 1 + .../powerbi/rest_api_wrapper/data_resolver.py | 15 +- .../tests/integration/powerbi/test_powerbi.py | 235 +++++++++++++++--- 3 files changed, 212 insertions(+), 39 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py index f71afac737ca6..70786efff79a4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py @@ -95,6 +95,7 @@ class Constant: TITLE = "title" EMBED_URL = "embedUrl" ACCESS_TOKEN = "access_token" + ACCESS_TOKEN_EXPIRY = "expires_in" IS_READ_ONLY = "isReadOnly" WEB_URL = "webUrl" ODATA_COUNT = "@odata.count" diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py index c6314c212d104..3aeffa60bc28e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py @@ -1,6 +1,7 @@ import logging import math from abc import ABC, abstractmethod +from datetime import datetime, timedelta from time import sleep from typing import Any, Dict, List, Optional @@ -59,6 +60,7 @@ def __init__( tenant_id: str, ): self.__access_token: Optional[str] = None + self.__access_token_expiry_time: Optional[datetime] = None self.__tenant_id = tenant_id # Test connection by generating access token logger.info("Trying to connect to {}".format(self._get_authority_url())) @@ -128,7 +130,7 @@ def get_authorization_header(self): return {Constant.Authorization: self.get_access_token()} def get_access_token(self): - if self.__access_token is not None: + if self.__access_token is not None and not self._is_access_token_expired(): return self.__access_token logger.info("Generating PowerBi access token") @@ -150,11 +152,22 @@ def get_access_token(self): self.__access_token = "Bearer {}".format( auth_response.get(Constant.ACCESS_TOKEN) ) + safety_gap = 300 + self.__access_token_expiry_time = datetime.now() + timedelta( + seconds=( + max(auth_response.get(Constant.ACCESS_TOKEN_EXPIRY, 0) - safety_gap, 0) + ) + ) logger.debug(f"{Constant.PBIAccessToken}={self.__access_token}") return self.__access_token + def _is_access_token_expired(self) -> bool: + if not self.__access_token_expiry_time: + return True + return self.__access_token_expiry_time < datetime.now() + def get_dashboards(self, workspace: Workspace) -> List[Dashboard]: """ Get the list of dashboard from PowerBi for the given workspace identifier diff --git a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py index c9b0ded433749..b2cbccf983eb0 100644 --- a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py +++ b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py @@ -1,8 +1,10 @@ +import datetime import logging import re import sys from typing import Any, Dict, List, cast from unittest import mock +from unittest.mock import MagicMock import pytest from freezegun import freeze_time @@ -31,13 +33,23 @@ def enable_logging(): logging.getLogger().setLevel(logging.DEBUG) -def mock_msal_cca(*args, **kwargs): - class MsalClient: - def acquire_token_for_client(self, *args, **kwargs): - return { - "access_token": "dummy", - } +class MsalClient: + call_num = 0 + token: Dict[str, Any] = { + "access_token": "dummy", + } + + @staticmethod + def acquire_token_for_client(*args, **kwargs): + MsalClient.call_num += 1 + return MsalClient.token + + @staticmethod + def reset(): + MsalClient.call_num = 0 + +def mock_msal_cca(*args, **kwargs): return MsalClient() @@ -627,7 +639,13 @@ def default_source_config(): @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration -def test_powerbi_ingest(mock_msal, pytestconfig, tmp_path, mock_time, requests_mock): +def test_powerbi_ingest( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -658,7 +676,7 @@ def test_powerbi_ingest(mock_msal, pytestconfig, tmp_path, mock_time, requests_m mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_mces.json", + output_path=f"{tmp_path}/powerbi_mces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @@ -667,8 +685,12 @@ def test_powerbi_ingest(mock_msal, pytestconfig, tmp_path, mock_time, requests_m @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_powerbi_platform_instance_ingest( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -711,8 +733,12 @@ def test_powerbi_platform_instance_ingest( @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_powerbi_ingest_urn_lower_case( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" register_mock_api(request_mock=requests_mock) @@ -752,8 +778,12 @@ def test_powerbi_ingest_urn_lower_case( @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_override_ownership( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" register_mock_api(request_mock=requests_mock) @@ -783,7 +813,7 @@ def test_override_ownership( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_mces_disabled_ownership.json", + output_path=f"{tmp_path}/powerbi_mces_disabled_ownership.json", golden_path=f"{test_resources_dir}/{mce_out_file}", ) @@ -792,8 +822,13 @@ def test_override_ownership( @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_scan_all_workspaces( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: + test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" register_mock_api(request_mock=requests_mock) @@ -828,7 +863,7 @@ def test_scan_all_workspaces( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_mces_scan_all_workspaces.json", + output_path=f"{tmp_path}/powerbi_mces_scan_all_workspaces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @@ -836,7 +871,14 @@ def test_scan_all_workspaces( @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration -def test_extract_reports(mock_msal, pytestconfig, tmp_path, mock_time, requests_mock): +def test_extract_reports( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: + enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -868,7 +910,7 @@ def test_extract_reports(mock_msal, pytestconfig, tmp_path, mock_time, requests_ mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_report_mces.json", + output_path=f"{tmp_path}/powerbi_report_mces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @@ -876,7 +918,13 @@ def test_extract_reports(mock_msal, pytestconfig, tmp_path, mock_time, requests_ @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration -def test_extract_lineage(mock_msal, pytestconfig, tmp_path, mock_time, requests_mock): +def test_extract_lineage( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -925,8 +973,12 @@ def test_extract_lineage(mock_msal, pytestconfig, tmp_path, mock_time, requests_ @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_extract_endorsements( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" register_mock_api(request_mock=requests_mock) @@ -957,7 +1009,7 @@ def test_extract_endorsements( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_endorsement_mces.json", + output_path=f"{tmp_path}/powerbi_endorsement_mces.json", golden_path=f"{test_resources_dir}/{mce_out_file}", ) @@ -966,8 +1018,12 @@ def test_extract_endorsements( @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_admin_access_is_not_allowed( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -1024,8 +1080,12 @@ def test_admin_access_is_not_allowed( @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) def test_workspace_container( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -1062,11 +1122,92 @@ def test_workspace_container( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_container_mces.json", + output_path=f"{tmp_path}/powerbi_container_mces.json", golden_path=f"{test_resources_dir}/{mce_out_file}", ) +@mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) +def test_access_token_expiry_with_long_expiry( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: + enable_logging() + + register_mock_api(request_mock=requests_mock) + + pipeline = Pipeline.create( + { + "run_id": "powerbi-test", + "source": { + "type": "powerbi", + "config": { + **default_source_config(), + }, + }, + "sink": { + "type": "file", + "config": { + "filename": f"{tmp_path}/powerbi_access_token_mces.json", + }, + }, + } + ) + + # for long expiry, the token should only be requested once. + MsalClient.token = { + "access_token": "dummy2", + "expires_in": 3600, + } + + MsalClient.reset() + pipeline.run() + # We expect the token to be requested twice (once for AdminApiResolver and one for RegularApiResolver) + assert MsalClient.call_num == 2 + + +@mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) +def test_access_token_expiry_with_short_expiry( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: + enable_logging() + + register_mock_api(request_mock=requests_mock) + + pipeline = Pipeline.create( + { + "run_id": "powerbi-test", + "source": { + "type": "powerbi", + "config": { + **default_source_config(), + }, + }, + "sink": { + "type": "file", + "config": { + "filename": f"{tmp_path}/powerbi_access_token_mces.json", + }, + }, + } + ) + + # for short expiry, the token should be requested when expires. + MsalClient.token = { + "access_token": "dummy", + "expires_in": 0, + } + pipeline.run() + assert MsalClient.call_num > 2 + + def dataset_type_mapping_set_to_all_platform(pipeline: Pipeline) -> None: source_config: PowerBiDashboardSourceConfig = cast( PowerBiDashboardSource, pipeline.source @@ -1306,8 +1447,12 @@ def validate_pipeline(pipeline: Pipeline) -> None: @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_reports_with_failed_page_request( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: """ Test that all reports are fetched even if a single page request fails """ @@ -1419,8 +1564,12 @@ def test_reports_with_failed_page_request( @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) def test_independent_datasets_extraction( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -1503,14 +1652,20 @@ def test_independent_datasets_extraction( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_independent_mces.json", + output_path=f"{tmp_path}/powerbi_independent_mces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) -def test_cll_extraction(mock_msal, pytestconfig, tmp_path, mock_time, requests_mock): +def test_cll_extraction( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -1553,7 +1708,7 @@ def test_cll_extraction(mock_msal, pytestconfig, tmp_path, mock_time, requests_m mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_cll_mces.json", + output_path=f"{tmp_path}/powerbi_cll_mces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @@ -1561,8 +1716,12 @@ def test_cll_extraction(mock_msal, pytestconfig, tmp_path, mock_time, requests_m @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) def test_cll_extraction_flags( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: register_mock_api( request_mock=requests_mock, From 9ecda6485202ce89291bd1485c861cf7be1b8741 Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 14 Dec 2023 19:07:48 +0530 Subject: [PATCH 248/792] fix(analytics): do not ping the track endpoint before login (#9462) --- datahub-web-react/src/app/analytics/analytics.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/analytics/analytics.ts b/datahub-web-react/src/app/analytics/analytics.ts index a66d76a09cf4d..468164069cfd0 100644 --- a/datahub-web-react/src/app/analytics/analytics.ts +++ b/datahub-web-react/src/app/analytics/analytics.ts @@ -30,16 +30,17 @@ export function getMergedTrackingOptions(options?: any) { export default { page: (data?: PageData, options?: any, callback?: (...params: any[]) => any) => { + const actorUrn = Cookies.get(CLIENT_AUTH_COOKIE) || undefined; const modifiedData = { ...data, type: EventType[EventType.PageViewEvent], - actorUrn: Cookies.get(CLIENT_AUTH_COOKIE) || undefined, + actorUrn, timestamp: Date.now(), date: new Date().toString(), userAgent: navigator.userAgent, browserId: getBrowserId(), }; - if (NODE_ENV === 'test') { + if (NODE_ENV === 'test' || !actorUrn) { return null; } const trackingOptions = getMergedTrackingOptions(options); From aac1c55a14fdf65cb51f1fd0f441d93eb7757098 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 14 Dec 2023 21:05:06 +0530 Subject: [PATCH 249/792] feat(ingest/unity): enable hive metastore ingestion (#9416) --- metadata-ingestion/setup.py | 5 +- .../ingestion/source/bigquery_v2/bigquery.py | 4 + .../ingestion/source/source_registry.py | 9 + .../datahub/ingestion/source/unity/config.py | 51 +- .../source/unity/hive_metastore_proxy.py | 242 ++ .../datahub/ingestion/source/unity/proxy.py | 22 + .../ingestion/source/unity/proxy_types.py | 38 +- .../datahub/ingestion/source/unity/report.py | 4 +- .../datahub/ingestion/source/unity/source.py | 64 +- .../unity/test_unity_catalog_ingest.py | 77 +- .../unity/unity_catalog_mces_golden.json | 2509 +++++++++-------- .../tests/unit/test_unity_catalog_config.py | 65 +- 12 files changed, 1958 insertions(+), 1132 deletions(-) create mode 100644 metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index e894cbf043338..5d15d7167b63e 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -263,7 +263,8 @@ "pyspark~=3.3.0", "requests", # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes - "databricks-sql-connector>=2.8.0", + # Version 3.0.0 required SQLAlchemy > 2.0.21 + "databricks-sql-connector>=2.8.0,<3.0.0", } mysql = sql_common | {"pymysql>=1.0.2"} @@ -395,6 +396,8 @@ "powerbi-report-server": powerbi_report_server, "vertica": sql_common | {"vertica-sqlalchemy-dialect[vertica-python]==0.0.8.1"}, "unity-catalog": databricks | sql_common | sqllineage_lib, + # databricks is alias for unity-catalog and needs to be kept in sync + "databricks": databricks | sql_common | sqllineage_lib, "fivetran": snowflake_common, } diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py index 6959a48313010..9813945683289 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py @@ -1031,6 +1031,10 @@ def gen_dataset_urn_from_ref(self, ref: BigQueryTableRef) -> str: def gen_schema_fields(self, columns: List[BigqueryColumn]) -> List[SchemaField]: schema_fields: List[SchemaField] = [] + # Below line affects HiveColumnToAvroConverter._STRUCT_TYPE_SEPARATOR in global scope + # TODO: Refractor this such that + # converter = HiveColumnToAvroConverter(struct_type_separator=" "); + # converter.get_schema_fields_for_hive_column(...) HiveColumnToAvroConverter._STRUCT_TYPE_SEPARATOR = " " _COMPLEX_TYPE = re.compile("^(struct|array)") last_id = -1 diff --git a/metadata-ingestion/src/datahub/ingestion/source/source_registry.py b/metadata-ingestion/src/datahub/ingestion/source/source_registry.py index c3fbab3f9a012..e003c658f45e8 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/source_registry.py +++ b/metadata-ingestion/src/datahub/ingestion/source/source_registry.py @@ -14,3 +14,12 @@ "mssql-odbc", "mssql", ) + +# Use databricks as alias for unity-catalog ingestion source. +# As mentioned here - https://docs.databricks.com/en/data-governance/unity-catalog/enable-workspaces.html, +# Databricks is rolling out Unity Catalog gradually across accounts. +# TODO: Rename unity-catalog source to databricks source, once it is rolled out for all accounts +source_registry.register_alias( + "databricks", + "unity-catalog", +) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index 2c567120b4850..96971faeea69f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -129,6 +129,14 @@ class UnityCatalogSourceConfig( workspace_url: str = pydantic.Field( description="Databricks workspace url. e.g. https://my-workspace.cloud.databricks.com" ) + warehouse_id: Optional[str] = pydantic.Field( + default=None, + description="SQL Warehouse id, for running queries. If not set, will use the default warehouse.", + ) + include_hive_metastore: bool = pydantic.Field( + default=False, + description="Whether to ingest legacy `hive_metastore` catalog. This requires executing queries on SQL warehouse.", + ) workspace_name: Optional[str] = pydantic.Field( default=None, description="Name of the workspace. Default to deployment name present in workspace_url", @@ -254,16 +262,17 @@ class UnityCatalogSourceConfig( scheme: str = DATABRICKS - def get_sql_alchemy_url(self): + def get_sql_alchemy_url(self, database: Optional[str] = None) -> str: + uri_opts = {"http_path": f"/sql/1.0/warehouses/{self.warehouse_id}"} + if database: + uri_opts["catalog"] = database return make_sqlalchemy_uri( scheme=self.scheme, username="token", password=self.token, at=urlparse(self.workspace_url).netloc, - db=None, - uri_opts={ - "http_path": f"/sql/1.0/warehouses/{self.profiling.warehouse_id}" - }, + db=database, + uri_opts=uri_opts, ) def is_profiling_enabled(self) -> bool: @@ -304,3 +313,35 @@ def include_metastore_warning(cls, v: bool) -> bool: logger.warning(msg) add_global_warning(msg) return v + + @pydantic.root_validator(skip_on_failure=True) + def set_warehouse_id_from_profiling(cls, values: Dict[str, Any]) -> Dict[str, Any]: + profiling: Optional[UnityCatalogProfilerConfig] = values.get("profiling") + if not values.get("warehouse_id") and profiling and profiling.warehouse_id: + values["warehouse_id"] = profiling.warehouse_id + if ( + values.get("warehouse_id") + and profiling + and profiling.warehouse_id + and values["warehouse_id"] != profiling.warehouse_id + ): + raise ValueError( + "When `warehouse_id` is set, it must match the `warehouse_id` in `profiling`." + ) + + if values.get("include_hive_metastore") and not values.get("warehouse_id"): + raise ValueError( + "When `include_hive_metastore` is set, `warehouse_id` must be set." + ) + + if values.get("warehouse_id") and profiling and not profiling.warehouse_id: + profiling.warehouse_id = values["warehouse_id"] + + return values + + @pydantic.validator("schema_pattern", always=True) + def schema_pattern_should__always_deny_information_schema( + cls, v: AllowDenyPattern + ) -> AllowDenyPattern: + v.deny.append(".*\\.information_schema") + return v diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py new file mode 100644 index 0000000000000..99b2ff998662c --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py @@ -0,0 +1,242 @@ +import logging +from datetime import datetime +from functools import lru_cache +from typing import Iterable, List, Optional + +from databricks.sdk.service.catalog import ColumnTypeName, DataSourceFormat +from databricks.sql.types import Row +from sqlalchemy import create_engine, inspect +from sqlalchemy.engine.reflection import Inspector + +from datahub.ingestion.api.closeable import Closeable +from datahub.ingestion.source.unity.proxy_types import ( + Catalog, + Column, + CustomCatalogType, + HiveTableType, + Metastore, + Schema, + Table, +) + +logger = logging.getLogger(__name__) +HIVE_METASTORE = "hive_metastore" + +type_map = { + "boolean": ColumnTypeName.BOOLEAN, + "tinyint": ColumnTypeName.INT, + "smallint": ColumnTypeName.INT, + "int": ColumnTypeName.INT, + "bigint": ColumnTypeName.LONG, + "float": ColumnTypeName.FLOAT, + "double": ColumnTypeName.DOUBLE, + "decimal": ColumnTypeName.DECIMAL, + "string": ColumnTypeName.STRING, + "varchar": ColumnTypeName.STRING, + "timestamp": ColumnTypeName.TIMESTAMP, + "date": ColumnTypeName.DATE, + "binary": ColumnTypeName.BINARY, +} + + +class HiveMetastoreProxy(Closeable): + # TODO: Support for view lineage using SQL parsing + # Why not use hive ingestion source directly here ? + # 1. hive ingestion source assumes 2-level namespace heirarchy and currently + # there is no other intermediate interface except sqlalchemy inspector + # that can be used to fetch hive metadata. + # 2. hive recipe for databricks (databricks+pyhive dialect) does not + # readily support SQL warehouse. Also this dialect is not actively maintained. + """ + Proxy to read metadata from hive_metastore databricks catalog. This is required + as unity catalog apis do not return details about this legacy metastore. + """ + + def __init__(self, sqlalchemy_url: str, options: dict) -> None: + try: + self.inspector = HiveMetastoreProxy.get_inspector(sqlalchemy_url, options) + except Exception: + # This means that there is no `hive_metastore` catalog in databricks workspace + # Not tested but seems like the logical conclusion. + raise + + @staticmethod + def get_inspector(sqlalchemy_url: str, options: dict) -> Inspector: + engine = create_engine(sqlalchemy_url, **options) + return inspect(engine.connect()) + + def hive_metastore_catalog(self, metastore: Optional[Metastore]) -> Catalog: + return Catalog( + id=HIVE_METASTORE, + name=HIVE_METASTORE, + comment=None, + metastore=metastore, + owner=None, + type=CustomCatalogType.HIVE_METASTORE_CATALOG, + ) + + def hive_metastore_schemas(self, catalog: Catalog) -> Iterable[Schema]: + for schema_name in self.inspector.get_schema_names(): + yield Schema( + name=schema_name, + id=f"{catalog.id}.{schema_name}", + catalog=catalog, + comment=None, + owner=None, + ) + + def hive_metastore_tables(self, schema: Schema) -> Iterable[Table]: + views = self.inspector.get_view_names(schema.name) + for table_name in views: + yield self._get_table(schema, table_name, True) + + for table_name in self.inspector.get_table_names(schema.name): + if table_name in views: + continue + yield self._get_table(schema, table_name, False) + + def _get_table(self, schema: Schema, table_name: str, is_view: bool) -> Table: + columns = self._get_columns(schema, table_name) + detailed_info = self._get_table_info(schema, table_name) + + comment = detailed_info.pop("Comment", None) + storage_location = detailed_info.pop("Location", None) + datasource_format = self._get_datasource_format( + detailed_info.pop("Provider", None) + ) + + created_at = self._get_created_at(detailed_info.pop("Created Time", None)) + + return Table( + name=table_name, + id=f"{schema.id}.{table_name}", + table_type=self._get_table_type(detailed_info.pop("Type", None)), + schema=schema, + columns=columns, + storage_location=storage_location, + data_source_format=datasource_format, + view_definition=self._get_view_definition(schema.name, table_name) + if is_view + else None, + properties=detailed_info, + owner=None, + generation=None, + created_at=created_at, + created_by=None, + updated_at=None, + updated_by=None, + table_id=f"{schema.id}.{table_name}", + comment=comment, + ) + + def _get_created_at(self, created_at: Optional[str]) -> Optional[datetime]: + return ( + datetime.strptime(created_at, "%a %b %d %H:%M:%S %Z %Y") + if created_at + else None + ) + + def _get_datasource_format( + self, provider: Optional[str] + ) -> Optional[DataSourceFormat]: + raw_format = provider + if raw_format: + try: + return DataSourceFormat(raw_format.upper()) + except Exception: + logger.debug(f"Unknown datasource format : {raw_format}") + pass + return None + + def _get_view_definition(self, schema_name: str, table_name: str) -> Optional[str]: + try: + rows = self._execute_sql( + f"SHOW CREATE TABLE `{schema_name}`.`{table_name}`" + ) + for row in rows: + return row[0] + except Exception: + logger.debug( + f"Failed to get view definition for {schema_name}.{table_name}" + ) + return None + + def _get_table_type(self, type: Optional[str]) -> HiveTableType: + if type == "EXTERNAL": + return HiveTableType.HIVE_EXTERNAL_TABLE + elif type == "MANAGED": + return HiveTableType.HIVE_MANAGED_TABLE + elif type == "VIEW": + return HiveTableType.HIVE_VIEW + else: + return HiveTableType.UNKNOWN + + def _get_table_info(self, schema: Schema, table_name: str) -> dict: + rows = self._describe_extended(schema.name, table_name) + + index = rows.index(("# Detailed Table Information", "", "")) + rows = rows[index + 1 :] + # Copied from https://github.com/acryldata/PyHive/blob/master/pyhive/sqlalchemy_hive.py#L375 + # Generate properties dictionary. + properties = {} + active_heading = None + for col_name, data_type, value in rows: + col_name = col_name.rstrip() + if col_name.startswith("# "): + continue + elif col_name == "" and data_type is None: + active_heading = None + continue + elif col_name != "" and data_type is None: + active_heading = col_name + elif col_name != "" and data_type is not None: + properties[col_name] = data_type.strip() + else: + # col_name == "", data_type is not None + prop_name = "{} {}".format(active_heading, data_type.rstrip()) + properties[prop_name] = value.rstrip() + + return properties + + def _get_columns(self, schema: Schema, table_name: str) -> List[Column]: + rows = self._describe_extended(schema.name, table_name) + + columns: List[Column] = [] + for i, row in enumerate(rows): + if i == 0 and row[0].strip() == "col_name": + continue # first row + if row[0].strip() in ( + "", + "# Partition Information", + "# Detailed Table Information", + ): + break + columns.append( + Column( + name=row[0].strip(), + id=f"{schema.id}.{table_name}.{row[0].strip()}", + type_text=row[1].strip(), + type_name=type_map.get(row[1].strip().lower()), + type_scale=None, + type_precision=None, + position=None, + nullable=None, + comment=row[2], + ) + ) + + return columns + + @lru_cache(maxsize=1) + def _describe_extended(self, schema_name: str, table_name: str) -> List[Row]: + """ + Rows are structured as shown in examples here + https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-aux-describe-table.html#examples + """ + return self._execute_sql(f"DESCRIBE EXTENDED `{schema_name}`.`{table_name}`") + + def _execute_sql(self, sql: str) -> List[Row]: + return self.inspector.bind.execute(sql).fetchall() + + def close(self): + self.inspector.bind.close() # type:ignore diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py index 375c76db8e971..13baa8b57a639 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py @@ -26,6 +26,7 @@ from databricks.sdk.service.workspace import ObjectType import datahub +from datahub.ingestion.source.unity.hive_metastore_proxy import HiveMetastoreProxy from datahub.ingestion.source.unity.proxy_profiling import ( UnityCatalogProxyProfilingMixin, ) @@ -33,6 +34,7 @@ ALLOWED_STATEMENT_TYPES, Catalog, Column, + CustomCatalogType, ExternalTableReference, Metastore, Notebook, @@ -87,6 +89,7 @@ def __init__( personal_access_token: str, warehouse_id: Optional[str], report: UnityCatalogReport, + hive_metastore_proxy: Optional[HiveMetastoreProxy] = None, ): self._workspace_client = WorkspaceClient( host=workspace_url, @@ -96,6 +99,7 @@ def __init__( ) self.warehouse_id = warehouse_id or "" self.report = report + self.hive_metastore_proxy = hive_metastore_proxy def check_basic_connectivity(self) -> bool: return bool(self._workspace_client.catalogs.list()) @@ -105,6 +109,9 @@ def assigned_metastore(self) -> Metastore: return self._create_metastore(response) def catalogs(self, metastore: Optional[Metastore]) -> Iterable[Catalog]: + if self.hive_metastore_proxy: + yield self.hive_metastore_proxy.hive_metastore_catalog(metastore) + response = self._workspace_client.catalogs.list() if not response: logger.info("Catalogs not found") @@ -122,6 +129,12 @@ def catalog( return self._create_catalog(metastore, response) def schemas(self, catalog: Catalog) -> Iterable[Schema]: + if ( + self.hive_metastore_proxy + and catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG + ): + yield from self.hive_metastore_proxy.hive_metastore_schemas(catalog) + return response = self._workspace_client.schemas.list(catalog_name=catalog.name) if not response: logger.info(f"Schemas not found for catalog {catalog.id}") @@ -130,6 +143,12 @@ def schemas(self, catalog: Catalog) -> Iterable[Schema]: yield self._create_schema(catalog, schema) def tables(self, schema: Schema) -> Iterable[Table]: + if ( + self.hive_metastore_proxy + and schema.catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG + ): + yield from self.hive_metastore_proxy.hive_metastore_tables(schema) + return with patch("databricks.sdk.service.catalog.TableInfo", TableInfoWithGeneration): response = self._workspace_client.tables.list( catalog_name=schema.catalog.name, schema_name=schema.name @@ -244,6 +263,9 @@ def list_lineages_by_column(self, table_name: str, column_name: str) -> dict: ) def table_lineage(self, table: Table, include_entity_lineage: bool) -> None: + if table.schema.catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG: + # Lineage is not available for Hive Metastore Tables. + return None # Lineage endpoint doesn't exists on 2.1 version try: response: dict = self.list_lineages_by_table( diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py index 315c1c0d20186..e5951cb0fa4ff 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py @@ -4,7 +4,8 @@ import logging from dataclasses import dataclass, field from datetime import datetime -from typing import Dict, FrozenSet, List, Optional, Set +from enum import Enum +from typing import Dict, FrozenSet, List, Optional, Set, Union from databricks.sdk.service.catalog import ( CatalogType, @@ -75,6 +76,17 @@ NotebookId = int +class CustomCatalogType(Enum): + HIVE_METASTORE_CATALOG = "HIVE_METASTORE_CATALOG" + + +class HiveTableType(Enum): + HIVE_MANAGED_TABLE = "HIVE_MANAGED_TABLE" + HIVE_EXTERNAL_TABLE = "HIVE_EXTERNAL_TABLE" + HIVE_VIEW = "HIVE_VIEW" + UNKNOWN = "UNKNOWN" + + @dataclass class CommonProperty: id: str @@ -95,7 +107,7 @@ class Metastore(CommonProperty): class Catalog(CommonProperty): metastore: Optional[Metastore] owner: Optional[str] - type: CatalogType + type: Union[CatalogType, CustomCatalogType] @dataclass @@ -107,11 +119,11 @@ class Schema(CommonProperty): @dataclass class Column(CommonProperty): type_text: str - type_name: ColumnTypeName - type_precision: int - type_scale: int - position: int - nullable: bool + type_name: Optional[ColumnTypeName] + type_precision: Optional[int] + type_scale: Optional[int] + position: Optional[int] + nullable: Optional[bool] comment: Optional[str] @@ -212,11 +224,11 @@ class Table(CommonProperty): columns: List[Column] storage_location: Optional[str] data_source_format: Optional[DataSourceFormat] - table_type: TableType + table_type: Union[TableType, HiveTableType] owner: Optional[str] generation: Optional[int] - created_at: datetime - created_by: str + created_at: Optional[datetime] + created_by: Optional[str] updated_at: Optional[datetime] updated_by: Optional[str] table_id: str @@ -231,7 +243,11 @@ class Table(CommonProperty): def __post_init__(self): self.ref = TableReference.create(self) - self.is_view = self.table_type in [TableType.VIEW, TableType.MATERIALIZED_VIEW] + self.is_view = self.table_type in [ + TableType.VIEW, + TableType.MATERIALIZED_VIEW, + HiveTableType.HIVE_VIEW, + ] @dataclass diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py index 7f19b6e2103ea..0770d9d27055c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import Tuple +from typing import Optional, Tuple from datahub.ingestion.api.report import EntityFilterReport from datahub.ingestion.source.sql.sql_generic_profiler import ProfilingSqlReport @@ -16,6 +16,8 @@ class UnityCatalogReport(IngestionStageReport, ProfilingSqlReport): table_profiles: EntityFilterReport = EntityFilterReport.field(type="table profile") notebooks: EntityFilterReport = EntityFilterReport.field(type="notebook") + hive_metastore_catalog_found: Optional[bool] = None + num_column_lineage_skipped_column_count: int = 0 num_external_upstreams_lacking_permissions: int = 0 num_external_upstreams_unsupported: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index d1940c1d57607..43c5e24439377 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -58,6 +58,10 @@ ) from datahub.ingestion.source.unity.connection_test import UnityCatalogConnectionTest from datahub.ingestion.source.unity.ge_profiler import UnityCatalogGEProfiler +from datahub.ingestion.source.unity.hive_metastore_proxy import ( + HIVE_METASTORE, + HiveMetastoreProxy, +) from datahub.ingestion.source.unity.proxy import UnityCatalogApiProxy from datahub.ingestion.source.unity.proxy_types import ( DATA_TYPE_REGISTRY, @@ -142,12 +146,17 @@ def __init__(self, ctx: PipelineContext, config: UnityCatalogSourceConfig): self.config = config self.report: UnityCatalogReport = UnityCatalogReport() + + self.init_hive_metastore_proxy() + self.unity_catalog_api_proxy = UnityCatalogApiProxy( config.workspace_url, config.token, - config.profiling.warehouse_id, + config.warehouse_id, report=self.report, + hive_metastore_proxy=self.hive_metastore_proxy, ) + self.external_url_base = urljoin(self.config.workspace_url, "/explore/data") # Determine the platform_instance_name @@ -174,6 +183,23 @@ def __init__(self, ctx: PipelineContext, config: UnityCatalogSourceConfig): # Global map of tables, for profiling self.tables: FileBackedDict[Table] = FileBackedDict() + def init_hive_metastore_proxy(self): + self.hive_metastore_proxy: Optional[HiveMetastoreProxy] = None + if self.config.include_hive_metastore: + try: + self.hive_metastore_proxy = HiveMetastoreProxy( + self.config.get_sql_alchemy_url(HIVE_METASTORE), self.config.options + ) + self.report.hive_metastore_catalog_found = True + except Exception as e: + logger.debug("Exception", exc_info=True) + self.warn( + logger, + HIVE_METASTORE, + f"Failed to connect to hive_metastore due to {e}", + ) + self.report.hive_metastore_catalog_found = False + @staticmethod def test_connection(config_dict: dict) -> TestConnectionReport: return UnityCatalogConnectionTest(config_dict).get_connection_test() @@ -194,7 +220,7 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: self.report.report_ingestion_stage_start("Ingestion Setup") wait_on_warehouse = None - if self.config.is_profiling_enabled(): + if self.config.is_profiling_enabled() or self.config.include_hive_metastore: self.report.report_ingestion_stage_start("Start warehouse") # Can take several minutes, so start now and wait later wait_on_warehouse = self.unity_catalog_api_proxy.start_warehouse() @@ -204,6 +230,9 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: f"SQL warehouse {self.config.profiling.warehouse_id} not found", ) return + else: + # wait until warehouse is started + wait_on_warehouse.result() if self.config.include_ownership: self.report.report_ingestion_stage_start("Ingest service principals") @@ -678,18 +707,25 @@ def _create_table_property_aspect(self, table: Table) -> DatasetPropertiesClass: custom_properties["table_type"] = table.table_type.value - custom_properties["created_by"] = table.created_by - custom_properties["created_at"] = str(table.created_at) + if table.created_by: + custom_properties["created_by"] = table.created_by if table.properties: custom_properties.update({k: str(v) for k, v in table.properties.items()}) custom_properties["table_id"] = table.table_id - custom_properties["owner"] = table.owner - custom_properties["updated_by"] = table.updated_by - custom_properties["updated_at"] = str(table.updated_at) - - created = TimeStampClass( - int(table.created_at.timestamp() * 1000), make_user_urn(table.created_by) - ) + if table.owner: + custom_properties["owner"] = table.owner + if table.updated_by: + custom_properties["updated_by"] = table.updated_by + if table.updated_at: + custom_properties["updated_at"] = str(table.updated_at) + + created: Optional[TimeStampClass] = None + if table.created_at: + custom_properties["created_at"] = str(table.created_at) + created = TimeStampClass( + int(table.created_at.timestamp() * 1000), + make_user_urn(table.created_by) if table.created_by else None, + ) last_modified = created if table.updated_at: last_modified = TimeStampClass( @@ -780,3 +816,9 @@ def _create_schema_field(column: Column) -> List[SchemaFieldClass]: description=column.comment, ) ] + + def close(self): + if self.hive_metastore_proxy: + self.hive_metastore_proxy.close() + + super().close() diff --git a/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py b/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py index c43ba7eee5847..aab7630d57f46 100644 --- a/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py +++ b/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py @@ -3,6 +3,7 @@ from unittest.mock import patch import databricks +import pytest from databricks.sdk.service.catalog import ( CatalogInfo, GetMetastoreSummaryResponse, @@ -12,12 +13,15 @@ from freezegun import freeze_time from datahub.ingestion.run.pipeline import Pipeline +from datahub.ingestion.source.unity.hive_metastore_proxy import HiveMetastoreProxy from tests.test_helpers import mce_helpers FROZEN_TIME = "2021-12-07 07:00:00" SERVICE_PRINCIPAL_ID_1 = str(uuid.uuid4()) SERVICE_PRINCIPAL_ID_2 = str(uuid.uuid4()) +pytestmark = pytest.mark.integration_batch_1 + def register_mock_api(request_mock): api_vs_response = { @@ -215,6 +219,65 @@ def register_mock_data(workspace_client): ] +def mock_hive_sql(query): + if query == "DESCRIBE EXTENDED `bronze_kambi`.`bet`": + return [ + ("betStatusId", "bigint", None), + ("channelId", "bigint", None), + ( + "combination", + "struct>,eventId:bigint,eventName:string,eventStartDate:string,live:boolean,odds:double,outcomeIds:array,outcomeLabel:string,sportId:string,status:string,voidReason:string>>,payout:double,rewardExtraPayout:double,stake:double>", + None, + ), + ("", "", ""), + ("# Detailed Table Information", "", ""), + ("Catalog", "hive_metastore", ""), + ("Database", "bronze_kambi", ""), + ("Table", "bet", ""), + ("Created Time", "Wed Jun 22 05:14:56 UTC 2022", ""), + ("Last Access", "UNKNOWN", ""), + ("Created By", "Spark 3.2.1", ""), + ("Type", "MANAGED", ""), + ("Location", "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", ""), + ("Provider", "delta", ""), + ("Owner", "root", ""), + ("Is_managed_location", "true", ""), + ( + "Table Properties", + "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", + "", + ), + ] + elif query == "DESCRIBE EXTENDED `bronze_kambi`.`view1`": + return [ + ("betStatusId", "bigint", None), + ("channelId", "bigint", None), + ( + "combination", + "struct>,eventId:bigint,eventName:string,eventStartDate:string,live:boolean,odds:double,outcomeIds:array,outcomeLabel:string,sportId:string,status:string,voidReason:string>>,payout:double,rewardExtraPayout:double,stake:double>", + None, + ), + ("", "", ""), + ("# Detailed Table Information", "", ""), + ("Catalog", "hive_metastore", ""), + ("Database", "bronze_kambi", ""), + ("Table", "view1", ""), + ("Created Time", "Wed Jun 22 05:14:56 UTC 2022", ""), + ("Last Access", "UNKNOWN", ""), + ("Created By", "Spark 3.2.1", ""), + ("Type", "VIEW", ""), + ("Owner", "root", ""), + ] + elif query == "SHOW CREATE TABLE `bronze_kambi`.`view1`": + return [ + ( + "CREATE VIEW `hive_metastore`.`bronze_kambi`.`view1` AS SELECT * FROM `hive_metastore`.`bronze_kambi`.`bet`", + ) + ] + + return [] + + @freeze_time(FROZEN_TIME) def test_ingestion(pytestconfig, tmp_path, requests_mock): test_resources_dir = pytestconfig.rootpath / "tests/integration/unity" @@ -223,11 +286,21 @@ def test_ingestion(pytestconfig, tmp_path, requests_mock): output_file_name = "unity_catalog_mcps.json" - with patch("databricks.sdk.WorkspaceClient") as WorkspaceClient: + with patch("databricks.sdk.WorkspaceClient") as WorkspaceClient, patch.object( + HiveMetastoreProxy, "get_inspector" + ) as get_inspector, patch.object(HiveMetastoreProxy, "_execute_sql") as execute_sql: workspace_client: mock.MagicMock = mock.MagicMock() WorkspaceClient.return_value = workspace_client register_mock_data(workspace_client) + inspector = mock.MagicMock() + inspector.get_schema_names.return_value = ["bronze_kambi"] + inspector.get_view_names.return_value = ["view1"] + inspector.get_table_names.return_value = ["bet", "view1"] + get_inspector.return_value = inspector + + execute_sql.side_effect = mock_hive_sql + config_dict: dict = { "run_id": "unity-catalog-test", "pipeline_name": "unity-catalog-test-pipeline", @@ -237,6 +310,8 @@ def test_ingestion(pytestconfig, tmp_path, requests_mock): "workspace_url": "https://dummy.cloud.databricks.com", "token": "fake", "include_ownership": True, + "include_hive_metastore": True, + "warehouse_id": "test", }, }, "sink": { diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index d25c86a3a1f9a..98a6615dd2b52 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -114,7 +114,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -123,11 +123,10 @@ "platform": "databricks", "env": "PROD", "metastore": "acryl metastore", - "catalog": "main" + "catalog": "hive_metastore" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main", - "name": "main", - "description": "Main catalog (auto-created)" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore", + "name": "hive_metastore" } }, "systemMetadata": { @@ -138,7 +137,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -156,10 +155,18 @@ "entityType": "container", "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "containerProperties", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "main" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main", + "name": "main", + "description": "Main catalog (auto-created)" } }, "systemMetadata": { @@ -170,7 +177,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -188,21 +195,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "container", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" } }, "systemMetadata": { @@ -213,12 +211,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -229,7 +227,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -250,32 +248,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "default" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default", - "name": "default", - "description": "Default schema (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -291,7 +264,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -307,13 +280,13 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Schema" + "Catalog" ] } }, @@ -325,14 +298,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:abc@acryl.io", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -350,12 +323,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" } }, "systemMetadata": { @@ -366,21 +339,20 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "containerProperties", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - } - ] + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "hive_metastore", + "unity_schema": "bronze_kambi" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi", + "name": "bronze_kambi" } }, "systemMetadata": { @@ -390,13 +362,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "status", "aspect": { "json": { - "container": "urn:li:container:5ada0a9773235325e506410c512feabb" + "removed": false } }, "systemMetadata": { @@ -406,40 +378,18 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "browsePathsV2", "aspect": { "json": { - "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "main.default.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] + "path": [ + { + "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + } + ] } }, "systemMetadata": { @@ -449,14 +399,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Table" + "Schema" ] } }, @@ -467,55 +417,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "container", "aspect": { "json": { - "schemaName": "acryl_metastore.main.default.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ - { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - } - ] + "container": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" } }, "systemMetadata": { @@ -525,22 +433,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -551,7 +450,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -562,12 +477,8 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - }, - { - "id": "urn:li:container:5ada0a9773235325e506410c512feabb", - "urn": "urn:li:container:5ada0a9773235325e506410c512feabb" + "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" } ] } @@ -579,22 +490,33 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "information_schema" + "table_type": "HIVE_VIEW", + "Catalog": "hive_metastore", + "Database": "bronze_kambi", + "Table": "view1", + "Last Access": "UNKNOWN", + "Created By": "Spark 3.2.1", + "Owner": "root", + "table_id": "hive_metastore.bronze_kambi.view1", + "created_at": "2022-06-22 05:14:56" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/view1", + "name": "view1", + "qualifiedName": "hive_metastore.bronze_kambi.view1", + "created": { + "time": 1655874896000 }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/information_schema", - "name": "information_schema", - "description": "Information schema (auto-created)" + "lastModified": { + "time": 1655874896000 + }, + "tags": [] } }, "systemMetadata": { @@ -604,13 +526,15 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "viewProperties", "aspect": { "json": { - "removed": false + "materialized": false, + "viewLogic": "CREATE VIEW `hive_metastore`.`bronze_kambi`.`view1` AS SELECT * FROM `hive_metastore`.`bronze_kambi`.`bet`", + "viewLanguage": "SQL" } }, "systemMetadata": { @@ -621,13 +545,22 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "containerProperties", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" - } + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "main", + "unity_schema": "default" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default", + "name": "default", + "description": "Default schema (auto-created)" + } }, "systemMetadata": { "lastObserved": 1638860400000, @@ -636,14 +569,14 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Schema" + "View" ] } }, @@ -654,49 +587,8 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:Service Principal 1", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -707,8 +599,12 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" + }, + { + "id": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", + "urn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" } ] } @@ -720,74 +616,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "datasetProperties", - "aspect": { - "json": { - "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/information_schema/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "main.information_schema.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Table" - ] + "removed": false } }, "systemMetadata": { @@ -798,12 +633,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.main.information_schema.quickstart_table", + "schemaName": "hive_metastore.bronze_kambi.view1", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -822,144 +657,409 @@ }, "fields": [ { - "fieldPath": "columnA", - "nullable": true, + "fieldPath": "betStatusId", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "int", + "nativeDataType": "bigint", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "columnB", - "nullable": true, + "fieldPath": "channelId", + "nullable": false, "type": { "type": { - "com.linkedin.schema.StringType": {} + "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "string", + "nativeDataType": "bigint", "recursive": false, "isPartOfKey": false - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ + }, { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>,payout:double,rewardextrapayout:double,stake:double>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>,payout:double,rewardextrapayout:double,stake:double>\"}" + }, { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=long].combinationref", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" }, { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].currentodds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" }, { - "id": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", - "urn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "quickstart_schema" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema", - "name": "quickstart_schema", - "description": "A new Unity Catalog schema called quickstart_schema" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=boolean].eachway", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=boolean].livebetting", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].odds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].betoffertypeid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].criterionid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].criterionname", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=double].currentodds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].eventgroupid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath.[type=long].id", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath.[type=string].name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].eventid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].eventname", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].eventstartdate", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=boolean].live", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=double].odds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=long].outcomeids", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "long" + ] + } + } + }, + "nativeDataType": "array", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].outcomelabel", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].sportid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].status", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].voidreason", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].payout", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].rewardextrapayout", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].stake", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + } + ] + } + }, + "systemMetadata": { "lastObserved": 1638860400000, "runId": "unity-catalog-test", "lastRunId": "no-run-id-provided" @@ -967,7 +1067,23 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:databricks" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -985,14 +1101,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:account users", + "owner": "urn:li:corpuser:abc@acryl.io", "type": "DATAOWNER" } ], @@ -1009,13 +1125,13 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "container": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" } }, "systemMetadata": { @@ -1026,37 +1142,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:481380c5a355638fc626eca8380cdda9" + "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" } }, "systemMetadata": { @@ -1067,37 +1158,34 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "storage_location": "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "main.quickstart_schema.quickstart_table", + "table_type": "HIVE_MANAGED_TABLE", + "Catalog": "hive_metastore", + "Database": "bronze_kambi", + "Table": "bet", + "Last Access": "UNKNOWN", + "Created By": "Spark 3.2.1", + "Owner": "root", + "Is_managed_location": "true", + "Table Properties": "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", + "table_id": "hive_metastore.bronze_kambi.bet", + "created_at": "2022-06-22 05:14:56" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/bet", + "name": "bet", + "qualifiedName": "hive_metastore.bronze_kambi.bet", "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" + "time": 1655874896000 }, "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" + "time": 1655874896000 }, "tags": [] } @@ -1110,7 +1198,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1127,53 +1215,20 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "browsePathsV2", "aspect": { "json": { - "schemaName": "acryl_metastore.main.quickstart_schema.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ + "path": [ { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false + "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false + "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" } ] } @@ -1186,32 +1241,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1222,12 +1252,12 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" }, { - "id": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "urn": "urn:li:container:481380c5a355638fc626eca8380cdda9" + "id": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", + "urn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" } ] } @@ -1239,272 +1269,429 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "schemaMetadata", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "quickstart_catalog" + "schemaName": "hive_metastore.bronze_kambi.bet", + "platform": "urn:li:dataPlatform:databricks", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog", - "name": "quickstart_catalog", - "description": "" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Catalog" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "quickstart_catalog", - "unity_schema": "default" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default", - "name": "default", - "description": "Default schema (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Schema" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "betStatusId", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "channelId", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>,payout:double,rewardextrapayout:double,stake:double>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>,payout:double,rewardextrapayout:double,stake:double>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=long].combinationref", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].currentodds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=boolean].eachway", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=boolean].livebetting", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].odds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].betoffertypeid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].criterionid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].criterionname", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=double].currentodds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].eventgroupid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath.[type=long].id", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath.[type=string].name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].eventid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].eventname", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].eventstartdate", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=boolean].live", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=double].odds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=long].outcomeids", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "long" + ] + } + } + }, + "nativeDataType": "array", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array\"}" + }, { - "owner": "urn:li:corpuser:abc@acryl.io", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].outcomelabel", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].sportid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" }, { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].status", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].voidreason", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].payout", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].rewardextrapayout", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].stake", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" } ] } @@ -1517,12 +1704,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" + "container": "urn:li:container:5ada0a9773235325e506410c512feabb" } }, "systemMetadata": { @@ -1533,7 +1720,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -1554,9 +1741,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.default.quickstart_table", + "qualifiedName": "main.default.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -1576,7 +1763,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1594,12 +1781,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.default.quickstart_table", + "schemaName": "acryl_metastore.main.default.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1652,7 +1839,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -1677,7 +1864,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1688,12 +1875,12 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" }, { - "id": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "urn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" + "id": "urn:li:container:5ada0a9773235325e506410c512feabb", + "urn": "urn:li:container:5ada0a9773235325e506410c512feabb" } ] } @@ -1706,7 +1893,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -1715,12 +1902,12 @@ "platform": "databricks", "env": "PROD", "metastore": "acryl metastore", - "catalog": "quickstart_catalog", - "unity_schema": "information_schema" + "catalog": "main", + "unity_schema": "quickstart_schema" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/information_schema", - "name": "information_schema", - "description": "Information schema (auto-created)" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema", + "name": "quickstart_schema", + "description": "A new Unity Catalog schema called quickstart_schema" } }, "systemMetadata": { @@ -1731,7 +1918,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -1747,7 +1934,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -1763,7 +1950,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1781,14 +1968,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:Service Principal 1", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -1806,12 +1993,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" } }, "systemMetadata": { @@ -1822,7 +2009,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1833,8 +2020,8 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" } ] } @@ -1847,12 +2034,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:29f99476d533719be0cebc374d5265dc" + "container": "urn:li:container:481380c5a355638fc626eca8380cdda9" } }, "systemMetadata": { @@ -1863,7 +2050,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -1884,9 +2071,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/information_schema/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.information_schema.quickstart_table", + "qualifiedName": "main.quickstart_schema.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -1906,7 +2093,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1924,12 +2111,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.information_schema.quickstart_table", + "schemaName": "acryl_metastore.main.quickstart_schema.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1981,8 +2168,136 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + }, + { + "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + }, + { + "id": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "urn": "urn:li:container:481380c5a355638fc626eca8380cdda9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "quickstart_catalog" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog", + "name": "quickstart_catalog", + "description": "" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:databricks" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Catalog" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -2006,8 +2321,24 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2016,14 +2347,6 @@ { "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - }, - { - "id": "urn:li:container:29f99476d533719be0cebc374d5265dc", - "urn": "urn:li:container:29f99476d533719be0cebc374d5265dc" } ] } @@ -2036,7 +2359,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2046,11 +2369,11 @@ "env": "PROD", "metastore": "acryl metastore", "catalog": "quickstart_catalog", - "unity_schema": "quickstart_schema" + "unity_schema": "default" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema", - "name": "quickstart_schema", - "description": "A new Unity Catalog schema called quickstart_schema" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -2061,7 +2384,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2077,7 +2400,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -2093,7 +2416,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2111,14 +2434,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:account users", + "owner": "urn:li:corpuser:abc@acryl.io", "type": "DATAOWNER" } ], @@ -2136,7 +2459,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -2152,7 +2475,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2177,12 +2500,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:47a033e31b92a120f08f297c05d286f1" + "container": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" } }, "systemMetadata": { @@ -2193,7 +2516,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -2214,9 +2537,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.quickstart_schema.quickstart_table", + "qualifiedName": "quickstart_catalog.default.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -2236,7 +2559,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2254,12 +2577,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table", + "schemaName": "acryl_metastore.quickstart_catalog.default.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -2312,7 +2635,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -2324,153 +2647,9 @@ } ], "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - }, - { - "id": "urn:li:container:47a033e31b92a120f08f297c05d286f1", - "urn": "urn:li:container:47a033e31b92a120f08f297c05d286f1" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "system" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system", - "name": "system", - "description": "System catalog (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Catalog" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:Service Principal 2", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -2480,8 +2659,8 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2490,6 +2669,14 @@ { "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + }, + { + "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + }, + { + "id": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "urn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" } ] } @@ -2502,7 +2689,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2511,12 +2698,12 @@ "platform": "databricks", "env": "PROD", "metastore": "acryl metastore", - "catalog": "system", - "unity_schema": "default" + "catalog": "quickstart_catalog", + "unity_schema": "quickstart_schema" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default", - "name": "default", - "description": "Default schema (auto-created)" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema", + "name": "quickstart_schema", + "description": "A new Unity Catalog schema called quickstart_schema" } }, "systemMetadata": { @@ -2527,7 +2714,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2543,7 +2730,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -2559,7 +2746,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2577,14 +2764,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:abc@acryl.io", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -2602,12 +2789,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" } }, "systemMetadata": { @@ -2618,7 +2805,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2629,8 +2816,8 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" } ] } @@ -2643,12 +2830,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:b330768923270ff5450695bee1c94247" + "container": "urn:li:container:47a033e31b92a120f08f297c05d286f1" } }, "systemMetadata": { @@ -2659,7 +2846,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -2680,9 +2867,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", "name": "quickstart_table", - "qualifiedName": "system.default.quickstart_table", + "qualifiedName": "quickstart_catalog.quickstart_schema.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -2702,7 +2889,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2720,12 +2907,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.system.default.quickstart_table", + "schemaName": "acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -2778,7 +2965,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -2803,7 +2990,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2814,12 +3001,148 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" }, { - "id": "urn:li:container:b330768923270ff5450695bee1c94247", - "urn": "urn:li:container:b330768923270ff5450695bee1c94247" + "id": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "urn": "urn:li:container:47a033e31b92a120f08f297c05d286f1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "system" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system", + "name": "system", + "description": "System catalog (auto-created)" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:databricks" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Catalog" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:Service Principal 2", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" } ] } @@ -2832,7 +3155,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2842,11 +3165,11 @@ "env": "PROD", "metastore": "acryl metastore", "catalog": "system", - "unity_schema": "information_schema" + "unity_schema": "default" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/information_schema", - "name": "information_schema", - "description": "Information schema (auto-created)" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -2857,7 +3180,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2873,7 +3196,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -2889,7 +3212,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2907,14 +3230,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:Service Principal 1", + "owner": "urn:li:corpuser:abc@acryl.io", "type": "DATAOWNER" } ], @@ -2932,7 +3255,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -2948,7 +3271,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2973,12 +3296,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59" + "container": "urn:li:container:b330768923270ff5450695bee1c94247" } }, "systemMetadata": { @@ -2989,7 +3312,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -3010,9 +3333,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/information_schema/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", "name": "quickstart_table", - "qualifiedName": "system.information_schema.quickstart_table", + "qualifiedName": "system.default.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -3032,7 +3355,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -3050,12 +3373,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.system.information_schema.quickstart_table", + "schemaName": "acryl_metastore.system.default.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -3108,7 +3431,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -3133,7 +3456,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -3148,8 +3471,8 @@ "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" }, { - "id": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", - "urn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59" + "id": "urn:li:container:b330768923270ff5450695bee1c94247", + "urn": "urn:li:container:b330768923270ff5450695bee1c94247" } ] } @@ -3506,22 +3829,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", @@ -3556,7 +3863,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3572,7 +3879,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3588,7 +3895,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3604,7 +3911,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3620,7 +3927,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { diff --git a/metadata-ingestion/tests/unit/test_unity_catalog_config.py b/metadata-ingestion/tests/unit/test_unity_catalog_config.py index 4098ed4074de2..3c0994cde7889 100644 --- a/metadata-ingestion/tests/unit/test_unity_catalog_config.py +++ b/metadata-ingestion/tests/unit/test_unity_catalog_config.py @@ -67,7 +67,6 @@ def test_profiling_requires_warehouses_id(): @freeze_time(FROZEN_TIME) def test_workspace_url_should_start_with_https(): - with pytest.raises(ValueError, match="Workspace URL must start with http scheme"): UnityCatalogSourceConfig.parse_obj( { @@ -76,3 +75,67 @@ def test_workspace_url_should_start_with_https(): "profiling": {"enabled": True}, } ) + + +def test_global_warehouse_id_is_set_from_profiling(): + config = UnityCatalogSourceConfig.parse_obj( + { + "token": "token", + "workspace_url": "https://XXXXXXXXXXXXXXXXXXXXX", + "profiling": { + "method": "ge", + "enabled": True, + "warehouse_id": "my_warehouse_id", + }, + } + ) + assert config.profiling.warehouse_id == "my_warehouse_id" + assert config.warehouse_id == "my_warehouse_id" + + +def test_set_different_warehouse_id_from_profiling(): + with pytest.raises( + ValueError, + match="When `warehouse_id` is set, it must match the `warehouse_id` in `profiling`.", + ): + UnityCatalogSourceConfig.parse_obj( + { + "token": "token", + "workspace_url": "https://XXXXXXXXXXXXXXXXXXXXX", + "warehouse_id": "my_global_warehouse_id", + "profiling": { + "method": "ge", + "enabled": True, + "warehouse_id": "my_warehouse_id", + }, + } + ) + + +def test_warehouse_id_must_be_set_if_include_hive_metastore_is_true(): + with pytest.raises( + ValueError, + match="When `include_hive_metastore` is set, `warehouse_id` must be set.", + ): + UnityCatalogSourceConfig.parse_obj( + { + "token": "token", + "workspace_url": "https://XXXXXXXXXXXXXXXXXXXXX", + "include_hive_metastore": True, + } + ) + + +def test_set_profiling_warehouse_id_from_global(): + config = UnityCatalogSourceConfig.parse_obj( + { + "token": "token", + "workspace_url": "https://XXXXXXXXXXXXXXXXXXXXX", + "warehouse_id": "my_global_warehouse_id", + "profiling": { + "method": "ge", + "enabled": True, + }, + } + ) + assert config.profiling.warehouse_id == "my_global_warehouse_id" From 0d6a5e5df25b58af0a434d5d2f83f6ef463ba99b Mon Sep 17 00:00:00 2001 From: siddiquebagwan-gslab Date: Thu, 14 Dec 2023 21:06:28 +0530 Subject: [PATCH 250/792] feat(ingestion/transformer): create tag if not exist (#9076) --- .../src/datahub/ingestion/graph/client.py | 24 ++++++ .../ingestion/transformer/add_dataset_tags.py | 42 ++++++++++- .../ingestion/transformer/base_transformer.py | 75 +++++++++++++++---- .../tests/unit/test_transform_dataset.py | 32 ++++++-- 4 files changed, 154 insertions(+), 19 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/graph/client.py b/metadata-ingestion/src/datahub/ingestion/graph/client.py index d91165ac9777c..5c24b06dde999 100644 --- a/metadata-ingestion/src/datahub/ingestion/graph/client.py +++ b/metadata-ingestion/src/datahub/ingestion/graph/client.py @@ -787,9 +787,11 @@ def get_aspect_counts(self, aspect: str, urn_like: Optional[str] = None) -> int: def execute_graphql(self, query: str, variables: Optional[Dict] = None) -> Dict: url = f"{self.config.server}/api/graphql" + body: Dict = { "query": query, } + if variables: body["variables"] = variables @@ -1065,6 +1067,28 @@ def parse_sql_lineage( default_schema=default_schema, ) + def create_tag(self, tag_name: str) -> str: + graph_query: str = """ + mutation($tag_detail: CreateTagInput!) { + createTag(input: $tag_detail) + } + """ + + variables = { + "tag_detail": { + "name": tag_name, + "id": tag_name, + }, + } + + res = self.execute_graphql( + query=graph_query, + variables=variables, + ) + + # return urn + return res["createTag"] + def close(self) -> None: self._make_schema_resolver.cache_clear() super().close() diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_tags.py b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_tags.py index 5a276ad899c48..72a8c226e491e 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_tags.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_tags.py @@ -1,14 +1,24 @@ +import logging from typing import Callable, List, Optional, cast +import datahub.emitter.mce_builder as builder from datahub.configuration.common import ( KeyValuePattern, TransformerSemanticsConfigModel, ) from datahub.configuration.import_resolver import pydantic_resolve_key from datahub.emitter.mce_builder import Aspect +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.transformer.dataset_transformer import DatasetTagsTransformer -from datahub.metadata.schema_classes import GlobalTagsClass, TagAssociationClass +from datahub.metadata.schema_classes import ( + GlobalTagsClass, + TagAssociationClass, + TagKeyClass, +) +from datahub.utilities.urns.tag_urn import TagUrn + +logger = logging.getLogger(__name__) class AddDatasetTagsConfig(TransformerSemanticsConfigModel): @@ -22,11 +32,13 @@ class AddDatasetTags(DatasetTagsTransformer): ctx: PipelineContext config: AddDatasetTagsConfig + processed_tags: List[TagAssociationClass] def __init__(self, config: AddDatasetTagsConfig, ctx: PipelineContext): super().__init__() self.ctx = ctx self.config = config + self.processed_tags = [] @classmethod def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetTags": @@ -45,11 +57,38 @@ def transform_aspect( tags_to_add = self.config.get_tags_to_add(entity_urn) if tags_to_add is not None: out_global_tags_aspect.tags.extend(tags_to_add) + self.processed_tags.extend( + tags_to_add + ) # Keep track of tags added so that we can create them in handle_end_of_stream return self.get_result_semantics( self.config, self.ctx.graph, entity_urn, out_global_tags_aspect ) + def handle_end_of_stream(self) -> List[MetadataChangeProposalWrapper]: + + mcps: List[MetadataChangeProposalWrapper] = [] + + logger.debug("Generating tags") + + for tag_association in self.processed_tags: + ids: List[str] = TagUrn.create_from_string( + tag_association.tag + ).get_entity_id() + + assert len(ids) == 1, "Invalid Tag Urn" + + tag_name: str = ids[0] + + mcps.append( + MetadataChangeProposalWrapper( + entityUrn=builder.make_tag_urn(tag=tag_name), + aspect=TagKeyClass(name=tag_name), + ) + ) + + return mcps + class SimpleDatasetTagConfig(TransformerSemanticsConfigModel): tag_urns: List[str] @@ -82,6 +121,7 @@ class PatternAddDatasetTags(AddDatasetTags): """Transformer that adds a specified set of tags to each dataset.""" def __init__(self, config: PatternDatasetTagsConfig, ctx: PipelineContext): + config.tag_pattern.all tag_pattern = config.tag_pattern generic_config = AddDatasetTagsConfig( get_tags_to_add=lambda _: [ diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py b/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py index e0d6ae720c9a1..8b6f42dcfba4b 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py @@ -17,13 +17,30 @@ log = logging.getLogger(__name__) -class LegacyMCETransformer(Transformer, metaclass=ABCMeta): +def _update_work_unit_id( + envelope: RecordEnvelope, urn: str, aspect_name: str +) -> Dict[Any, Any]: + structured_urn = Urn.create_from_string(urn) + simple_name = "-".join(structured_urn.get_entity_id()) + record_metadata = envelope.metadata.copy() + record_metadata.update({"workunit_id": f"txform-{simple_name}-{aspect_name}"}) + return record_metadata + + +class HandleEndOfStreamTransformer: + def handle_end_of_stream(self) -> List[MetadataChangeProposalWrapper]: + return [] + + +class LegacyMCETransformer( + Transformer, HandleEndOfStreamTransformer, metaclass=ABCMeta +): @abstractmethod def transform_one(self, mce: MetadataChangeEventClass) -> MetadataChangeEventClass: pass -class SingleAspectTransformer(metaclass=ABCMeta): +class SingleAspectTransformer(HandleEndOfStreamTransformer, metaclass=ABCMeta): @abstractmethod def aspect_name(self) -> str: """Implement this method to specify a single aspect that the transformer is interested in subscribing to. No default provided.""" @@ -180,6 +197,32 @@ def _transform_or_record_mcpw( self._record_mcp(envelope.record) return envelope if envelope.record.aspect is not None else None + def _handle_end_of_stream( + self, envelope: RecordEnvelope + ) -> Iterable[RecordEnvelope]: + + if not isinstance(self, SingleAspectTransformer) and not isinstance( + self, LegacyMCETransformer + ): + return + + mcps: List[MetadataChangeProposalWrapper] = self.handle_end_of_stream() + + for mcp in mcps: + if mcp.aspect is None or mcp.entityUrn is None: # to silent the lint error + continue + + record_metadata = _update_work_unit_id( + envelope=envelope, + aspect_name=mcp.aspect.get_aspect_name(), # type: ignore + urn=mcp.entityUrn, + ) + + yield RecordEnvelope( + record=mcp, + metadata=record_metadata, + ) + def transform( self, record_envelopes: Iterable[RecordEnvelope] ) -> Iterable[RecordEnvelope]: @@ -216,17 +259,10 @@ def transform( else None, ) if transformed_aspect: - # for end of stream records, we modify the workunit-id structured_urn = Urn.create_from_string(urn) - simple_name = "-".join(structured_urn.get_entity_id()) - record_metadata = envelope.metadata.copy() - record_metadata.update( - { - "workunit_id": f"txform-{simple_name}-{self.aspect_name()}" - } - ) - yield RecordEnvelope( - record=MetadataChangeProposalWrapper( + + mcp: MetadataChangeProposalWrapper = ( + MetadataChangeProposalWrapper( entityUrn=urn, entityType=structured_urn.get_type(), systemMetadata=last_seen_mcp.systemMetadata @@ -234,8 +270,21 @@ def transform( else last_seen_mce_system_metadata, aspectName=self.aspect_name(), aspect=transformed_aspect, - ), + ) + ) + + record_metadata = _update_work_unit_id( + envelope=envelope, + aspect_name=mcp.aspect.get_aspect_name(), # type: ignore + urn=mcp.entityUrn, + ) + + yield RecordEnvelope( + record=mcp, metadata=record_metadata, ) + self._mark_processed(urn) + yield from self._handle_end_of_stream(envelope=envelope) + yield envelope diff --git a/metadata-ingestion/tests/unit/test_transform_dataset.py b/metadata-ingestion/tests/unit/test_transform_dataset.py index 8014df2f5c519..546549dcf37a4 100644 --- a/metadata-ingestion/tests/unit/test_transform_dataset.py +++ b/metadata-ingestion/tests/unit/test_transform_dataset.py @@ -813,13 +813,25 @@ def test_simple_dataset_tags_transformation(mock_time): ] ) ) - assert len(outputs) == 3 + + assert len(outputs) == 5 # Check that tags were added. tags_aspect = outputs[1].record.aspect + assert tags_aspect.tags[0].tag == builder.make_tag_urn("NeedsDocumentation") assert tags_aspect assert len(tags_aspect.tags) == 2 - assert tags_aspect.tags[0].tag == builder.make_tag_urn("NeedsDocumentation") + + # Check new tag entity should be there + assert outputs[2].record.aspectName == "tagKey" + assert outputs[2].record.aspect.name == "NeedsDocumentation" + assert outputs[2].record.entityUrn == builder.make_tag_urn("NeedsDocumentation") + + assert outputs[3].record.aspectName == "tagKey" + assert outputs[3].record.aspect.name == "Legacy" + assert outputs[3].record.entityUrn == builder.make_tag_urn("Legacy") + + assert isinstance(outputs[4].record, EndOfStream) def dummy_tag_resolver_method(dataset_snapshot): @@ -853,7 +865,7 @@ def test_pattern_dataset_tags_transformation(mock_time): ) ) - assert len(outputs) == 3 + assert len(outputs) == 5 tags_aspect = outputs[1].record.aspect assert tags_aspect assert len(tags_aspect.tags) == 2 @@ -1363,7 +1375,7 @@ def test_mcp_add_tags_missing(mock_time): ] input_stream.append(RecordEnvelope(record=EndOfStream(), metadata={})) outputs = list(transformer.transform(input_stream)) - assert len(outputs) == 3 + assert len(outputs) == 5 assert outputs[0].record == dataset_mcp # Check that tags were added, this will be the second result tags_aspect = outputs[1].record.aspect @@ -1395,13 +1407,23 @@ def test_mcp_add_tags_existing(mock_time): ] input_stream.append(RecordEnvelope(record=EndOfStream(), metadata={})) outputs = list(transformer.transform(input_stream)) - assert len(outputs) == 2 + + assert len(outputs) == 4 + # Check that tags were added, this will be the second result tags_aspect = outputs[0].record.aspect assert tags_aspect assert len(tags_aspect.tags) == 3 assert tags_aspect.tags[0].tag == builder.make_tag_urn("Test") assert tags_aspect.tags[1].tag == builder.make_tag_urn("NeedsDocumentation") + assert tags_aspect.tags[2].tag == builder.make_tag_urn("Legacy") + + # Check tag entities got added + assert outputs[1].record.entityType == "tag" + assert outputs[1].record.entityUrn == builder.make_tag_urn("NeedsDocumentation") + assert outputs[2].record.entityType == "tag" + assert outputs[2].record.entityUrn == builder.make_tag_urn("Legacy") + assert isinstance(outputs[-1].record, EndOfStream) From ecef50f8fc75309562cf2729380ed18d5020ae8b Mon Sep 17 00:00:00 2001 From: Shirshanka Das Date: Thu, 14 Dec 2023 08:03:36 -0800 Subject: [PATCH 251/792] =?UTF-8?q?fix(ingest):=20make=20user=5Furn=20and?= =?UTF-8?q?=20group=5Furn=20generation=20consider=20user=20and=E2=80=A6=20?= =?UTF-8?q?(#9026)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Aseem Bansal --- .../src/datahub/emitter/mce_builder.py | 8 +++---- .../tests/unit/test_mce_builder.py | 22 +++++++++++++++++++ 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/emitter/mce_builder.py b/metadata-ingestion/src/datahub/emitter/mce_builder.py index 3b2c87ea25a31..9da1b0ab56f89 100644 --- a/metadata-ingestion/src/datahub/emitter/mce_builder.py +++ b/metadata-ingestion/src/datahub/emitter/mce_builder.py @@ -193,20 +193,20 @@ def assertion_urn_to_key(assertion_urn: str) -> Optional[AssertionKeyClass]: def make_user_urn(username: str) -> str: """ - Makes a user urn if the input is not a user urn already + Makes a user urn if the input is not a user or group urn already """ return ( f"urn:li:corpuser:{username}" - if not username.startswith("urn:li:corpuser:") + if not username.startswith(("urn:li:corpuser:", "urn:li:corpGroup:")) else username ) def make_group_urn(groupname: str) -> str: """ - Makes a group urn if the input is not a group urn already + Makes a group urn if the input is not a user or group urn already """ - if groupname and groupname.startswith("urn:li:corpGroup:"): + if groupname and groupname.startswith(("urn:li:corpGroup:", "urn:li:corpuser:")): return groupname else: return f"urn:li:corpGroup:{groupname}" diff --git a/metadata-ingestion/tests/unit/test_mce_builder.py b/metadata-ingestion/tests/unit/test_mce_builder.py index b9025d76a3a1d..d7c84f7863b40 100644 --- a/metadata-ingestion/tests/unit/test_mce_builder.py +++ b/metadata-ingestion/tests/unit/test_mce_builder.py @@ -33,3 +33,25 @@ def test_create_dataset_urn_with_reserved_chars() -> None: ) == "urn:li:dataset:(urn:li:dataPlatform:platform%29,platform%2Cinstance.table_%28name%29,PROD)" ) + + +def test_make_user_urn() -> None: + assert builder.make_user_urn("someUser") == "urn:li:corpuser:someUser" + assert ( + builder.make_user_urn("urn:li:corpuser:someUser") == "urn:li:corpuser:someUser" + ) + assert ( + builder.make_user_urn("urn:li:corpGroup:someGroup") + == "urn:li:corpGroup:someGroup" + ) + + +def test_make_group_urn() -> None: + assert builder.make_group_urn("someGroup") == "urn:li:corpGroup:someGroup" + assert ( + builder.make_group_urn("urn:li:corpGroup:someGroup") + == "urn:li:corpGroup:someGroup" + ) + assert ( + builder.make_group_urn("urn:li:corpuser:someUser") == "urn:li:corpuser:someUser" + ) From 1741c07d769f56a9cf066172725384b4e8780839 Mon Sep 17 00:00:00 2001 From: Shubham Jagtap <132359390+shubhamjagtap639@users.noreply.github.com> Date: Thu, 14 Dec 2023 23:01:51 +0530 Subject: [PATCH 252/792] feat(ingestion): Add test_connection methods for important sources (#9334) --- .../datahub/ingestion/source/dbt/dbt_cloud.py | 89 ++-- .../datahub/ingestion/source/dbt/dbt_core.py | 56 ++- .../src/datahub/ingestion/source/kafka.py | 74 ++- .../ingestion/source/powerbi/powerbi.py | 22 +- .../ingestion/source/sql/sql_common.py | 26 +- .../src/datahub/ingestion/source/tableau.py | 23 +- .../ingestion/source_config/sql/snowflake.py | 2 +- .../tests/integration/dbt/test_dbt.py | 69 ++- .../tests/integration/kafka/test_kafka.py | 85 +++- .../tests/integration/mysql/test_mysql.py | 38 +- .../tests/integration/powerbi/test_powerbi.py | 23 +- .../tableau/test_tableau_ingest.py | 21 +- .../test_helpers/test_connection_helpers.py | 47 ++ .../tests/unit/test_snowflake_source.py | 428 +++++++----------- .../tests/unit/test_sql_common.py | 62 ++- 15 files changed, 684 insertions(+), 381 deletions(-) create mode 100644 metadata-ingestion/tests/test_helpers/test_connection_helpers.py diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py index a9685b2554553..069c1f2781460 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py @@ -14,7 +14,12 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import SourceCapability +from datahub.ingestion.api.source import ( + CapabilityReport, + SourceCapability, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.source.dbt.dbt_common import ( DBTColumn, DBTCommonConfig, @@ -177,7 +182,7 @@ class DBTCloudConfig(DBTCommonConfig): @support_status(SupportStatus.INCUBATING) @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion") @capability(SourceCapability.LINEAGE_COARSE, "Enabled by default") -class DBTCloudSource(DBTSourceBase): +class DBTCloudSource(DBTSourceBase, TestableSource): """ This source pulls dbt metadata directly from the dbt Cloud APIs. @@ -199,6 +204,57 @@ def create(cls, config_dict, ctx): config = DBTCloudConfig.parse_obj(config_dict) return cls(config, ctx, "dbt") + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + source_config = DBTCloudConfig.parse_obj_allow_extras(config_dict) + DBTCloudSource._send_graphql_query( + metadata_endpoint=source_config.metadata_endpoint, + token=source_config.token, + query=_DBT_GRAPHQL_QUERY.format(type="tests", fields="jobId"), + variables={ + "jobId": source_config.job_id, + "runId": source_config.run_id, + }, + ) + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + + @staticmethod + def _send_graphql_query( + metadata_endpoint: str, token: str, query: str, variables: Dict + ) -> Dict: + logger.debug(f"Sending GraphQL query to dbt Cloud: {query}") + response = requests.post( + metadata_endpoint, + json={ + "query": query, + "variables": variables, + }, + headers={ + "Authorization": f"Bearer {token}", + "X-dbt-partner-source": "acryldatahub", + }, + ) + + try: + res = response.json() + if "errors" in res: + raise ValueError( + f'Unable to fetch metadata from dbt Cloud: {res["errors"]}' + ) + data = res["data"] + except JSONDecodeError as e: + response.raise_for_status() + raise e + + return data + def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: # TODO: In dbt Cloud, commands are scheduled as part of jobs, where # each job can have multiple runs. We currently only fully support @@ -213,6 +269,8 @@ def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: for node_type, fields in _DBT_FIELDS_BY_TYPE.items(): logger.info(f"Fetching {node_type} from dbt Cloud") data = self._send_graphql_query( + metadata_endpoint=self.config.metadata_endpoint, + token=self.config.token, query=_DBT_GRAPHQL_QUERY.format(type=node_type, fields=fields), variables={ "jobId": self.config.job_id, @@ -232,33 +290,6 @@ def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: return nodes, additional_metadata - def _send_graphql_query(self, query: str, variables: Dict) -> Dict: - logger.debug(f"Sending GraphQL query to dbt Cloud: {query}") - response = requests.post( - self.config.metadata_endpoint, - json={ - "query": query, - "variables": variables, - }, - headers={ - "Authorization": f"Bearer {self.config.token}", - "X-dbt-partner-source": "acryldatahub", - }, - ) - - try: - res = response.json() - if "errors" in res: - raise ValueError( - f'Unable to fetch metadata from dbt Cloud: {res["errors"]}' - ) - data = res["data"] - except JSONDecodeError as e: - response.raise_for_status() - raise e - - return data - def _parse_into_dbt_node(self, node: Dict) -> DBTNode: key = node["uniqueId"] diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py index ac2b2815f3caa..563b005d7a88d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py @@ -18,7 +18,12 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import SourceCapability +from datahub.ingestion.api.source import ( + CapabilityReport, + SourceCapability, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.source.aws.aws_common import AwsConnectionConfig from datahub.ingestion.source.dbt.dbt_common import ( DBTColumn, @@ -60,11 +65,6 @@ class DBTCoreConfig(DBTCommonConfig): _github_info_deprecated = pydantic_renamed_field("github_info", "git_info") - @property - def s3_client(self): - assert self.aws_connection - return self.aws_connection.get_s3_client() - @validator("aws_connection") def aws_connection_needed_if_s3_uris_present( cls, aws_connection: Optional[AwsConnectionConfig], values: Dict, **kwargs: Any @@ -363,7 +363,7 @@ def load_test_results( @support_status(SupportStatus.CERTIFIED) @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion") @capability(SourceCapability.LINEAGE_COARSE, "Enabled by default") -class DBTCoreSource(DBTSourceBase): +class DBTCoreSource(DBTSourceBase, TestableSource): """ The artifacts used by this source are: - [dbt manifest file](https://docs.getdbt.com/reference/artifacts/manifest-json) @@ -387,12 +387,34 @@ def create(cls, config_dict, ctx): config = DBTCoreConfig.parse_obj(config_dict) return cls(config, ctx, "dbt") - def load_file_as_json(self, uri: str) -> Any: + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + source_config = DBTCoreConfig.parse_obj_allow_extras(config_dict) + DBTCoreSource.load_file_as_json( + source_config.manifest_path, source_config.aws_connection + ) + DBTCoreSource.load_file_as_json( + source_config.catalog_path, source_config.aws_connection + ) + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + + @staticmethod + def load_file_as_json( + uri: str, aws_connection: Optional[AwsConnectionConfig] + ) -> Dict: if re.match("^https?://", uri): return json.loads(requests.get(uri).text) elif re.match("^s3://", uri): u = urlparse(uri) - response = self.config.s3_client.get_object( + assert aws_connection + response = aws_connection.get_s3_client().get_object( Bucket=u.netloc, Key=u.path.lstrip("/") ) return json.loads(response["Body"].read().decode("utf-8")) @@ -410,12 +432,18 @@ def loadManifestAndCatalog( Optional[str], Optional[str], ]: - dbt_manifest_json = self.load_file_as_json(self.config.manifest_path) + dbt_manifest_json = self.load_file_as_json( + self.config.manifest_path, self.config.aws_connection + ) - dbt_catalog_json = self.load_file_as_json(self.config.catalog_path) + dbt_catalog_json = self.load_file_as_json( + self.config.catalog_path, self.config.aws_connection + ) if self.config.sources_path is not None: - dbt_sources_json = self.load_file_as_json(self.config.sources_path) + dbt_sources_json = self.load_file_as_json( + self.config.sources_path, self.config.aws_connection + ) sources_results = dbt_sources_json["results"] else: sources_results = {} @@ -491,7 +519,9 @@ def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: # This will populate the test_results field on each test node. all_nodes = load_test_results( self.config, - self.load_file_as_json(self.config.test_results_path), + self.load_file_as_json( + self.config.test_results_path, self.config.aws_connection + ), all_nodes, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/kafka.py b/metadata-ingestion/src/datahub/ingestion/source/kafka.py index 25520e7aa66ff..99ef737206ab0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/kafka.py +++ b/metadata-ingestion/src/datahub/ingestion/source/kafka.py @@ -15,6 +15,7 @@ ConfigResource, TopicMetadata, ) +from confluent_kafka.schema_registry.schema_registry_client import SchemaRegistryClient from datahub.configuration.common import AllowDenyPattern from datahub.configuration.kafka import KafkaConsumerConnectionConfig @@ -40,7 +41,13 @@ support_status, ) from datahub.ingestion.api.registry import import_path -from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceCapability +from datahub.ingestion.api.source import ( + CapabilityReport, + MetadataWorkUnitProcessor, + SourceCapability, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import DatasetSubTypes from datahub.ingestion.source.kafka_schema_registry_base import KafkaSchemaRegistryBase @@ -133,6 +140,18 @@ class KafkaSourceConfig( ) +def get_kafka_consumer( + connection: KafkaConsumerConnectionConfig, +) -> confluent_kafka.Consumer: + return confluent_kafka.Consumer( + { + "group.id": "test", + "bootstrap.servers": connection.bootstrap, + **connection.consumer_config, + } + ) + + @dataclass class KafkaSourceReport(StaleEntityRemovalSourceReport): topics_scanned: int = 0 @@ -145,6 +164,45 @@ def report_dropped(self, topic: str) -> None: self.filtered.append(topic) +class KafkaConnectionTest: + def __init__(self, config_dict: dict): + self.config = KafkaSourceConfig.parse_obj_allow_extras(config_dict) + self.report = KafkaSourceReport() + self.consumer: confluent_kafka.Consumer = get_kafka_consumer( + self.config.connection + ) + + def get_connection_test(self) -> TestConnectionReport: + capability_report = { + SourceCapability.SCHEMA_METADATA: self.schema_registry_connectivity(), + } + return TestConnectionReport( + basic_connectivity=self.basic_connectivity(), + capability_report={ + k: v for k, v in capability_report.items() if v is not None + }, + ) + + def basic_connectivity(self) -> CapabilityReport: + try: + self.consumer.list_topics(timeout=10) + return CapabilityReport(capable=True) + except Exception as e: + return CapabilityReport(capable=False, failure_reason=str(e)) + + def schema_registry_connectivity(self) -> CapabilityReport: + try: + SchemaRegistryClient( + { + "url": self.config.connection.schema_registry_url, + **self.config.connection.schema_registry_config, + } + ).get_subjects() + return CapabilityReport(capable=True) + except Exception as e: + return CapabilityReport(capable=False, failure_reason=str(e)) + + @platform_name("Kafka") @config_class(KafkaSourceConfig) @support_status(SupportStatus.CERTIFIED) @@ -160,7 +218,7 @@ def report_dropped(self, topic: str) -> None: SourceCapability.SCHEMA_METADATA, "Schemas associated with each topic are extracted from the schema registry. Avro and Protobuf (certified), JSON (incubating). Schema references are supported.", ) -class KafkaSource(StatefulIngestionSourceBase): +class KafkaSource(StatefulIngestionSourceBase, TestableSource): """ This plugin extracts the following: - Topics from the Kafka broker @@ -183,12 +241,8 @@ def create_schema_registry( def __init__(self, config: KafkaSourceConfig, ctx: PipelineContext): super().__init__(config, ctx) self.source_config: KafkaSourceConfig = config - self.consumer: confluent_kafka.Consumer = confluent_kafka.Consumer( - { - "group.id": "test", - "bootstrap.servers": self.source_config.connection.bootstrap, - **self.source_config.connection.consumer_config, - } + self.consumer: confluent_kafka.Consumer = get_kafka_consumer( + self.source_config.connection ) self.init_kafka_admin_client() self.report: KafkaSourceReport = KafkaSourceReport() @@ -226,6 +280,10 @@ def init_kafka_admin_client(self) -> None: f"Failed to create Kafka Admin Client due to error {e}.", ) + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + return KafkaConnectionTest(config_dict).get_connection_test() + @classmethod def create(cls, config_dict: Dict, ctx: PipelineContext) -> "KafkaSource": config: KafkaSourceConfig = KafkaSourceConfig.parse_obj(config_dict) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py index 4b1d0403ac776..cdf7c975c0614 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py @@ -19,7 +19,13 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceReport +from datahub.ingestion.api.source import ( + CapabilityReport, + MetadataWorkUnitProcessor, + SourceReport, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.api.source_helpers import auto_workunit from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import ( @@ -1147,7 +1153,7 @@ def report_to_datahub_work_units( SourceCapability.LINEAGE_FINE, "Disabled by default, configured using `extract_column_level_lineage`. ", ) -class PowerBiDashboardSource(StatefulIngestionSourceBase): +class PowerBiDashboardSource(StatefulIngestionSourceBase, TestableSource): """ This plugin extracts the following: - Power BI dashboards, tiles and datasets @@ -1186,6 +1192,18 @@ def __init__(self, config: PowerBiDashboardSourceConfig, ctx: PipelineContext): self, self.source_config, self.ctx ) + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + PowerBiAPI(PowerBiDashboardSourceConfig.parse_obj_allow_extras(config_dict)) + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + @classmethod def create(cls, config_dict, ctx): config = PowerBiDashboardSourceConfig.parse_obj(config_dict) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 590bc7f696784..a831dfa50342d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -15,6 +15,7 @@ Tuple, Type, Union, + cast, ) import sqlalchemy.dialects.postgresql.base @@ -35,7 +36,12 @@ from datahub.emitter.sql_parsing_builder import SqlParsingBuilder from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.incremental_lineage_helper import auto_incremental_lineage -from datahub.ingestion.api.source import MetadataWorkUnitProcessor +from datahub.ingestion.api.source import ( + CapabilityReport, + MetadataWorkUnitProcessor, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import ( DatasetContainerSubTypes, @@ -298,7 +304,7 @@ class ProfileMetadata: dataset_name_to_storage_bytes: Dict[str, int] = field(default_factory=dict) -class SQLAlchemySource(StatefulIngestionSourceBase): +class SQLAlchemySource(StatefulIngestionSourceBase, TestableSource): """A Base class for all SQL Sources that use SQLAlchemy to extend""" def __init__(self, config: SQLCommonConfig, ctx: PipelineContext, platform: str): @@ -348,6 +354,22 @@ def __init__(self, config: SQLCommonConfig, ctx: PipelineContext, platform: str) else: self._view_definition_cache = {} + @classmethod + def test_connection(cls, config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + source = cast( + SQLAlchemySource, + cls.create(config_dict, PipelineContext(run_id="test_connection")), + ) + list(source.get_inspectors()) + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + def warn(self, log: logging.Logger, key: str, reason: str) -> None: self.report.report_warning(key, reason[:100]) log.warning(f"{key} => {reason}") diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index f870e99df27c5..ed5fe543310b8 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -58,7 +58,13 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import MetadataWorkUnitProcessor, Source +from datahub.ingestion.api.source import ( + CapabilityReport, + MetadataWorkUnitProcessor, + Source, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source import tableau_constant as c from datahub.ingestion.source.common.subtypes import ( @@ -469,7 +475,7 @@ class TableauSourceReport(StaleEntityRemovalSourceReport): SourceCapability.LINEAGE_FINE, "Enabled by default, configure using `extract_column_level_lineage`", ) -class TableauSource(StatefulIngestionSourceBase): +class TableauSource(StatefulIngestionSourceBase, TestableSource): platform = "tableau" def __hash__(self): @@ -509,6 +515,19 @@ def __init__( self._authenticate() + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + source_config = TableauConfig.parse_obj_allow_extras(config_dict) + source_config.make_tableau_client() + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + def close(self) -> None: try: if self.server is not None: diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py index ccc4e115729a2..46bd24c7e1f4c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py @@ -143,7 +143,7 @@ def _check_oauth_config(oauth_config: Optional[OAuthConfiguration]) -> None: "'oauth_config' is none but should be set when using OAUTH_AUTHENTICATOR authentication" ) if oauth_config.use_certificate is True: - if oauth_config.provider == OAuthIdentityProvider.OKTA.value: + if oauth_config.provider == OAuthIdentityProvider.OKTA: raise ValueError( "Certificate authentication is not supported for Okta." ) diff --git a/metadata-ingestion/tests/integration/dbt/test_dbt.py b/metadata-ingestion/tests/integration/dbt/test_dbt.py index 95b5374bbb41d..587831495c1ea 100644 --- a/metadata-ingestion/tests/integration/dbt/test_dbt.py +++ b/metadata-ingestion/tests/integration/dbt/test_dbt.py @@ -10,20 +10,25 @@ from datahub.ingestion.run.pipeline import Pipeline from datahub.ingestion.run.pipeline_config import PipelineConfig, SourceConfig from datahub.ingestion.source.dbt.dbt_common import DBTEntitiesEnabled, EmitDirective -from datahub.ingestion.source.dbt.dbt_core import DBTCoreConfig +from datahub.ingestion.source.dbt.dbt_core import DBTCoreConfig, DBTCoreSource from datahub.ingestion.source.sql.sql_types import ( ATHENA_SQL_TYPES_MAP, TRINO_SQL_TYPES_MAP, resolve_athena_modified_type, resolve_trino_modified_type, ) -from tests.test_helpers import mce_helpers +from tests.test_helpers import mce_helpers, test_connection_helpers FROZEN_TIME = "2022-02-03 07:00:00" GMS_PORT = 8080 GMS_SERVER = f"http://localhost:{GMS_PORT}" +@pytest.fixture(scope="module") +def test_resources_dir(pytestconfig): + return pytestconfig.rootpath / "tests/integration/dbt" + + @dataclass class DbtTestConfig: run_id: str @@ -195,7 +200,14 @@ def set_paths( ) @pytest.mark.integration @freeze_time(FROZEN_TIME) -def test_dbt_ingest(dbt_test_config, pytestconfig, tmp_path, mock_time, requests_mock): +def test_dbt_ingest( + dbt_test_config, + test_resources_dir, + pytestconfig, + tmp_path, + mock_time, + requests_mock, +): config: DbtTestConfig = dbt_test_config test_resources_dir = pytestconfig.rootpath / "tests/integration/dbt" @@ -233,11 +245,48 @@ def test_dbt_ingest(dbt_test_config, pytestconfig, tmp_path, mock_time, requests ) +@pytest.mark.parametrize( + "config_dict, is_success", + [ + ( + { + "manifest_path": "dbt_manifest.json", + "catalog_path": "dbt_catalog.json", + "target_platform": "postgres", + }, + True, + ), + ( + { + "manifest_path": "dbt_manifest.json", + "catalog_path": "dbt_catalog-this-file-does-not-exist.json", + "target_platform": "postgres", + }, + False, + ), + ], +) @pytest.mark.integration @freeze_time(FROZEN_TIME) -def test_dbt_tests(pytestconfig, tmp_path, mock_time, **kwargs): - test_resources_dir = pytestconfig.rootpath / "tests/integration/dbt" +def test_dbt_test_connection(test_resources_dir, config_dict, is_success): + config_dict["manifest_path"] = str( + (test_resources_dir / config_dict["manifest_path"]).resolve() + ) + config_dict["catalog_path"] = str( + (test_resources_dir / config_dict["catalog_path"]).resolve() + ) + report = test_connection_helpers.run_test_connection(DBTCoreSource, config_dict) + if is_success: + test_connection_helpers.assert_basic_connectivity_success(report) + else: + test_connection_helpers.assert_basic_connectivity_failure( + report, "No such file or directory" + ) + +@pytest.mark.integration +@freeze_time(FROZEN_TIME) +def test_dbt_tests(test_resources_dir, pytestconfig, tmp_path, mock_time, **kwargs): # Run the metadata ingestion pipeline. output_file = tmp_path / "dbt_test_events.json" golden_path = test_resources_dir / "dbt_test_events_golden.json" @@ -340,9 +389,9 @@ def test_resolve_athena_modified_type(data_type, expected_data_type): @pytest.mark.integration @freeze_time(FROZEN_TIME) -def test_dbt_tests_only_assertions(pytestconfig, tmp_path, mock_time, **kwargs): - test_resources_dir = pytestconfig.rootpath / "tests/integration/dbt" - +def test_dbt_tests_only_assertions( + test_resources_dir, pytestconfig, tmp_path, mock_time, **kwargs +): # Run the metadata ingestion pipeline. output_file = tmp_path / "test_only_assertions.json" @@ -418,10 +467,8 @@ def test_dbt_tests_only_assertions(pytestconfig, tmp_path, mock_time, **kwargs): @pytest.mark.integration @freeze_time(FROZEN_TIME) def test_dbt_only_test_definitions_and_results( - pytestconfig, tmp_path, mock_time, **kwargs + test_resources_dir, pytestconfig, tmp_path, mock_time, **kwargs ): - test_resources_dir = pytestconfig.rootpath / "tests/integration/dbt" - # Run the metadata ingestion pipeline. output_file = tmp_path / "test_only_definitions_and_assertions.json" diff --git a/metadata-ingestion/tests/integration/kafka/test_kafka.py b/metadata-ingestion/tests/integration/kafka/test_kafka.py index 63d284801c94c..dfdbea5de5cbf 100644 --- a/metadata-ingestion/tests/integration/kafka/test_kafka.py +++ b/metadata-ingestion/tests/integration/kafka/test_kafka.py @@ -3,18 +3,22 @@ import pytest from freezegun import freeze_time -from tests.test_helpers import mce_helpers +from datahub.ingestion.api.source import SourceCapability +from datahub.ingestion.source.kafka import KafkaSource +from tests.test_helpers import mce_helpers, test_connection_helpers from tests.test_helpers.click_helpers import run_datahub_cmd from tests.test_helpers.docker_helpers import wait_for_port FROZEN_TIME = "2020-04-14 07:00:00" -@freeze_time(FROZEN_TIME) -@pytest.mark.integration -def test_kafka_ingest(docker_compose_runner, pytestconfig, tmp_path, mock_time): - test_resources_dir = pytestconfig.rootpath / "tests/integration/kafka" +@pytest.fixture(scope="module") +def test_resources_dir(pytestconfig): + return pytestconfig.rootpath / "tests/integration/kafka" + +@pytest.fixture(scope="module") +def mock_kafka_service(docker_compose_runner, test_resources_dir): with docker_compose_runner( test_resources_dir / "docker-compose.yml", "kafka", cleanup=False ) as docker_services: @@ -31,14 +35,67 @@ def test_kafka_ingest(docker_compose_runner, pytestconfig, tmp_path, mock_time): command = f"{test_resources_dir}/send_records.sh {test_resources_dir}" subprocess.run(command, shell=True, check=True) - # Run the metadata ingestion pipeline. - config_file = (test_resources_dir / "kafka_to_file.yml").resolve() - run_datahub_cmd(["ingest", "-c", f"{config_file}"], tmp_path=tmp_path) + yield docker_compose_runner + + +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_kafka_ingest( + mock_kafka_service, test_resources_dir, pytestconfig, tmp_path, mock_time +): + # Run the metadata ingestion pipeline. + config_file = (test_resources_dir / "kafka_to_file.yml").resolve() + run_datahub_cmd(["ingest", "-c", f"{config_file}"], tmp_path=tmp_path) - # Verify the output. - mce_helpers.check_golden_file( - pytestconfig, - output_path=tmp_path / "kafka_mces.json", - golden_path=test_resources_dir / "kafka_mces_golden.json", - ignore_paths=[], + # Verify the output. + mce_helpers.check_golden_file( + pytestconfig, + output_path=tmp_path / "kafka_mces.json", + golden_path=test_resources_dir / "kafka_mces_golden.json", + ignore_paths=[], + ) + + +@pytest.mark.parametrize( + "config_dict, is_success", + [ + ( + { + "connection": { + "bootstrap": "localhost:29092", + "schema_registry_url": "http://localhost:28081", + }, + }, + True, + ), + ( + { + "connection": { + "bootstrap": "localhost:2909", + "schema_registry_url": "http://localhost:2808", + }, + }, + False, + ), + ], +) +@pytest.mark.integration +@freeze_time(FROZEN_TIME) +def test_kafka_test_connection(mock_kafka_service, config_dict, is_success): + report = test_connection_helpers.run_test_connection(KafkaSource, config_dict) + if is_success: + test_connection_helpers.assert_basic_connectivity_success(report) + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[SourceCapability.SCHEMA_METADATA], + ) + else: + test_connection_helpers.assert_basic_connectivity_failure( + report, "Failed to get metadata" + ) + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + failure_capabilities={ + SourceCapability.SCHEMA_METADATA: "Failed to establish a new connection" + }, ) diff --git a/metadata-ingestion/tests/integration/mysql/test_mysql.py b/metadata-ingestion/tests/integration/mysql/test_mysql.py index 23fd97ff2671e..c19198c7d2bbd 100644 --- a/metadata-ingestion/tests/integration/mysql/test_mysql.py +++ b/metadata-ingestion/tests/integration/mysql/test_mysql.py @@ -3,7 +3,8 @@ import pytest from freezegun import freeze_time -from tests.test_helpers import mce_helpers +from datahub.ingestion.source.sql.mysql import MySQLSource +from tests.test_helpers import mce_helpers, test_connection_helpers from tests.test_helpers.click_helpers import run_datahub_cmd from tests.test_helpers.docker_helpers import wait_for_port @@ -75,3 +76,38 @@ def test_mysql_ingest_no_db( output_path=tmp_path / "mysql_mces.json", golden_path=test_resources_dir / golden_file, ) + + +@pytest.mark.parametrize( + "config_dict, is_success", + [ + ( + { + "host_port": "localhost:53307", + "database": "northwind", + "username": "root", + "password": "example", + }, + True, + ), + ( + { + "host_port": "localhost:5330", + "database": "wrong_db", + "username": "wrong_user", + "password": "wrong_pass", + }, + False, + ), + ], +) +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_mysql_test_connection(mysql_runner, config_dict, is_success): + report = test_connection_helpers.run_test_connection(MySQLSource, config_dict) + if is_success: + test_connection_helpers.assert_basic_connectivity_success(report) + else: + test_connection_helpers.assert_basic_connectivity_failure( + report, "Connection refused" + ) diff --git a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py index b2cbccf983eb0..4e8469f919db9 100644 --- a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py +++ b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py @@ -21,7 +21,7 @@ Report, Workspace, ) -from tests.test_helpers import mce_helpers +from tests.test_helpers import mce_helpers, test_connection_helpers pytestmark = pytest.mark.integration_batch_2 FROZEN_TIME = "2022-02-03 07:00:00" @@ -681,6 +681,27 @@ def test_powerbi_ingest( ) +@freeze_time(FROZEN_TIME) +@mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) +@pytest.mark.integration +def test_powerbi_test_connection_success(mock_msal): + report = test_connection_helpers.run_test_connection( + PowerBiDashboardSource, default_source_config() + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_powerbi_test_connection_failure(): + report = test_connection_helpers.run_test_connection( + PowerBiDashboardSource, default_source_config() + ) + test_connection_helpers.assert_basic_connectivity_failure( + report, "Unable to get authority configuration" + ) + + @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration diff --git a/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py b/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py index 0510f4a40f659..90fa71013338d 100644 --- a/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py +++ b/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py @@ -28,7 +28,7 @@ ) from datahub.metadata.schema_classes import MetadataChangeProposalClass, UpstreamClass from datahub.utilities.sqlglot_lineage import SqlParsingResult -from tests.test_helpers import mce_helpers +from tests.test_helpers import mce_helpers, test_connection_helpers from tests.test_helpers.state_helpers import ( get_current_checkpoint_from_pipeline, validate_all_providers_have_committed_successfully, @@ -290,6 +290,25 @@ def test_tableau_ingest(pytestconfig, tmp_path, mock_datahub_graph): ) +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_tableau_test_connection_success(): + with mock.patch("datahub.ingestion.source.tableau.Server"): + report = test_connection_helpers.run_test_connection( + TableauSource, config_source_default + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_tableau_test_connection_failure(): + report = test_connection_helpers.run_test_connection( + TableauSource, config_source_default + ) + test_connection_helpers.assert_basic_connectivity_failure(report, "Unable to login") + + @freeze_time(FROZEN_TIME) @pytest.mark.integration def test_tableau_cll_ingest(pytestconfig, tmp_path, mock_datahub_graph): diff --git a/metadata-ingestion/tests/test_helpers/test_connection_helpers.py b/metadata-ingestion/tests/test_helpers/test_connection_helpers.py new file mode 100644 index 0000000000000..45543033ae010 --- /dev/null +++ b/metadata-ingestion/tests/test_helpers/test_connection_helpers.py @@ -0,0 +1,47 @@ +from typing import Dict, List, Optional, Type, Union + +from datahub.ingestion.api.source import ( + CapabilityReport, + SourceCapability, + TestableSource, + TestConnectionReport, +) + + +def run_test_connection( + source_cls: Type[TestableSource], config_dict: Dict +) -> TestConnectionReport: + return source_cls.test_connection(config_dict) + + +def assert_basic_connectivity_success(report: TestConnectionReport) -> None: + assert report is not None + assert report.basic_connectivity + assert report.basic_connectivity.capable + assert report.basic_connectivity.failure_reason is None + + +def assert_basic_connectivity_failure( + report: TestConnectionReport, expected_reason: str +) -> None: + assert report is not None + assert report.basic_connectivity + assert not report.basic_connectivity.capable + assert report.basic_connectivity.failure_reason + assert expected_reason in report.basic_connectivity.failure_reason + + +def assert_capability_report( + capability_report: Optional[Dict[Union[SourceCapability, str], CapabilityReport]], + success_capabilities: List[SourceCapability] = [], + failure_capabilities: Dict[SourceCapability, str] = {}, +) -> None: + assert capability_report + for capability in success_capabilities: + assert capability_report[capability] + assert capability_report[capability].failure_reason is None + for capability, expected_reason in failure_capabilities.items(): + assert not capability_report[capability].capable + failure_reason = capability_report[capability].failure_reason + assert failure_reason + assert expected_reason in failure_reason diff --git a/metadata-ingestion/tests/unit/test_snowflake_source.py b/metadata-ingestion/tests/unit/test_snowflake_source.py index 343f4466fd6fd..536c91ace4f5e 100644 --- a/metadata-ingestion/tests/unit/test_snowflake_source.py +++ b/metadata-ingestion/tests/unit/test_snowflake_source.py @@ -1,3 +1,4 @@ +from typing import Any, Dict from unittest.mock import MagicMock, patch import pytest @@ -24,10 +25,20 @@ SnowflakeObjectAccessEntry, ) from datahub.ingestion.source.snowflake.snowflake_v2 import SnowflakeV2Source +from tests.test_helpers import test_connection_helpers + +default_oauth_dict: Dict[str, Any] = { + "client_id": "client_id", + "client_secret": "secret", + "use_certificate": False, + "provider": "microsoft", + "scopes": ["datahub_role"], + "authority_url": "https://dev-abc.okta.com/oauth2/def/v1/token", +} def test_snowflake_source_throws_error_on_account_id_missing(): - with pytest.raises(ValidationError): + with pytest.raises(ValidationError, match="account_id\n field required"): SnowflakeV2Config.parse_obj( { "username": "user", @@ -37,27 +48,21 @@ def test_snowflake_source_throws_error_on_account_id_missing(): def test_no_client_id_invalid_oauth_config(): - oauth_dict = { - "provider": "microsoft", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "client_secret": "6Hb9apkbc6HD7", - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - } - with pytest.raises(ValueError): + oauth_dict = default_oauth_dict.copy() + del oauth_dict["client_id"] + with pytest.raises(ValueError, match="client_id\n field required"): OAuthConfiguration.parse_obj(oauth_dict) def test_snowflake_throws_error_on_client_secret_missing_if_use_certificate_is_false(): - oauth_dict = { - "client_id": "882e9831-7ea51cb2b954", - "provider": "microsoft", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "use_certificate": False, - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - } + oauth_dict = default_oauth_dict.copy() + del oauth_dict["client_secret"] OAuthConfiguration.parse_obj(oauth_dict) - with pytest.raises(ValueError): + with pytest.raises( + ValueError, + match="'oauth_config.client_secret' was none but should be set when using use_certificate false for oauth_config", + ): SnowflakeV2Config.parse_obj( { "account_id": "test", @@ -68,16 +73,13 @@ def test_snowflake_throws_error_on_client_secret_missing_if_use_certificate_is_f def test_snowflake_throws_error_on_encoded_oauth_private_key_missing_if_use_certificate_is_true(): - oauth_dict = { - "client_id": "882e9831-7ea51cb2b954", - "provider": "microsoft", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "use_certificate": True, - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - "encoded_oauth_public_key": "fkdsfhkshfkjsdfiuwrwfkjhsfskfhksjf==", - } + oauth_dict = default_oauth_dict.copy() + oauth_dict["use_certificate"] = True OAuthConfiguration.parse_obj(oauth_dict) - with pytest.raises(ValueError): + with pytest.raises( + ValueError, + match="'base64_encoded_oauth_private_key' was none but should be set when using certificate for oauth_config", + ): SnowflakeV2Config.parse_obj( { "account_id": "test", @@ -88,16 +90,13 @@ def test_snowflake_throws_error_on_encoded_oauth_private_key_missing_if_use_cert def test_snowflake_oauth_okta_does_not_support_certificate(): - oauth_dict = { - "client_id": "882e9831-7ea51cb2b954", - "provider": "okta", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "use_certificate": True, - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - "encoded_oauth_public_key": "fkdsfhkshfkjsdfiuwrwfkjhsfskfhksjf==", - } + oauth_dict = default_oauth_dict.copy() + oauth_dict["use_certificate"] = True + oauth_dict["provider"] = "okta" OAuthConfiguration.parse_obj(oauth_dict) - with pytest.raises(ValueError): + with pytest.raises( + ValueError, match="Certificate authentication is not supported for Okta." + ): SnowflakeV2Config.parse_obj( { "account_id": "test", @@ -108,79 +107,52 @@ def test_snowflake_oauth_okta_does_not_support_certificate(): def test_snowflake_oauth_happy_paths(): - okta_dict = { - "client_id": "client_id", - "client_secret": "secret", - "provider": "okta", - "scopes": ["datahub_role"], - "authority_url": "https://dev-abc.okta.com/oauth2/def/v1/token", - } + oauth_dict = default_oauth_dict.copy() + oauth_dict["provider"] = "okta" assert SnowflakeV2Config.parse_obj( { "account_id": "test", "authentication_type": "OAUTH_AUTHENTICATOR", - "oauth_config": okta_dict, + "oauth_config": oauth_dict, } ) - - microsoft_dict = { - "client_id": "client_id", - "provider": "microsoft", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "use_certificate": True, - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - "encoded_oauth_public_key": "publickey", - "encoded_oauth_private_key": "privatekey", - } + oauth_dict["use_certificate"] = True + oauth_dict["provider"] = "microsoft" + oauth_dict["encoded_oauth_public_key"] = "publickey" + oauth_dict["encoded_oauth_private_key"] = "privatekey" assert SnowflakeV2Config.parse_obj( { "account_id": "test", "authentication_type": "OAUTH_AUTHENTICATOR", - "oauth_config": microsoft_dict, + "oauth_config": oauth_dict, } ) +default_config_dict: Dict[str, Any] = { + "username": "user", + "password": "password", + "account_id": "https://acctname.snowflakecomputing.com", + "warehouse": "COMPUTE_WH", + "role": "sysadmin", +} + + def test_account_id_is_added_when_host_port_is_present(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "host_port": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) + config_dict = default_config_dict.copy() + del config_dict["account_id"] + config_dict["host_port"] = "acctname" + config = SnowflakeV2Config.parse_obj(config_dict) assert config.account_id == "acctname" def test_account_id_with_snowflake_host_suffix(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "https://acctname.snowflakecomputing.com", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) + config = SnowflakeV2Config.parse_obj(default_config_dict) assert config.account_id == "acctname" def test_snowflake_uri_default_authentication(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) - + config = SnowflakeV2Config.parse_obj(default_config_dict) assert config.get_sql_alchemy_url() == ( "snowflake://user:password@acctname" "?application=acryl_datahub" @@ -191,17 +163,10 @@ def test_snowflake_uri_default_authentication(): def test_snowflake_uri_external_browser_authentication(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "authentication_type": "EXTERNAL_BROWSER_AUTHENTICATOR", - } - ) - + config_dict = default_config_dict.copy() + del config_dict["password"] + config_dict["authentication_type"] = "EXTERNAL_BROWSER_AUTHENTICATOR" + config = SnowflakeV2Config.parse_obj(config_dict) assert config.get_sql_alchemy_url() == ( "snowflake://user@acctname" "?application=acryl_datahub" @@ -212,18 +177,12 @@ def test_snowflake_uri_external_browser_authentication(): def test_snowflake_uri_key_pair_authentication(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "authentication_type": "KEY_PAIR_AUTHENTICATOR", - "private_key_path": "/a/random/path", - "private_key_password": "a_random_password", - } - ) + config_dict = default_config_dict.copy() + del config_dict["password"] + config_dict["authentication_type"] = "KEY_PAIR_AUTHENTICATOR" + config_dict["private_key_path"] = "/a/random/path" + config_dict["private_key_password"] = "a_random_password" + config = SnowflakeV2Config.parse_obj(config_dict) assert config.get_sql_alchemy_url() == ( "snowflake://user@acctname" @@ -235,63 +194,35 @@ def test_snowflake_uri_key_pair_authentication(): def test_options_contain_connect_args(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) + config = SnowflakeV2Config.parse_obj(default_config_dict) connect_args = config.get_options().get("connect_args") assert connect_args is not None def test_snowflake_config_with_view_lineage_no_table_lineage_throws_error(): - with pytest.raises(ValidationError): - SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "include_view_lineage": True, - "include_table_lineage": False, - } - ) + config_dict = default_config_dict.copy() + config_dict["include_view_lineage"] = True + config_dict["include_table_lineage"] = False + with pytest.raises( + ValidationError, + match="include_table_lineage must be True for include_view_lineage to be set", + ): + SnowflakeV2Config.parse_obj(config_dict) def test_snowflake_config_with_column_lineage_no_table_lineage_throws_error(): - with pytest.raises(ValidationError): - SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "include_column_lineage": True, - "include_table_lineage": False, - } - ) + config_dict = default_config_dict.copy() + config_dict["include_column_lineage"] = True + config_dict["include_table_lineage"] = False + with pytest.raises( + ValidationError, + match="include_table_lineage must be True for include_column_lineage to be set", + ): + SnowflakeV2Config.parse_obj(config_dict) def test_snowflake_config_with_no_connect_args_returns_base_connect_args(): - config: SnowflakeV2Config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) + config: SnowflakeV2Config = SnowflakeV2Config.parse_obj(default_config_dict) assert config.get_options()["connect_args"] is not None assert config.get_options()["connect_args"] == { CLIENT_PREFETCH_THREADS: 10, @@ -300,7 +231,10 @@ def test_snowflake_config_with_no_connect_args_returns_base_connect_args(): def test_private_key_set_but_auth_not_changed(): - with pytest.raises(ValidationError): + with pytest.raises( + ValidationError, + match="Either `private_key` and `private_key_path` is set but `authentication_type` is DEFAULT_AUTHENTICATOR. Should be set to 'KEY_PAIR_AUTHENTICATOR' when using key pair authentication", + ): SnowflakeV2Config.parse_obj( { "account_id": "acctname", @@ -310,19 +244,11 @@ def test_private_key_set_but_auth_not_changed(): def test_snowflake_config_with_connect_args_overrides_base_connect_args(): - config: SnowflakeV2Config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "connect_args": { - CLIENT_PREFETCH_THREADS: 5, - }, - } - ) + config_dict = default_config_dict.copy() + config_dict["connect_args"] = { + CLIENT_PREFETCH_THREADS: 5, + } + config: SnowflakeV2Config = SnowflakeV2Config.parse_obj(config_dict) assert config.get_options()["connect_args"] is not None assert config.get_options()["connect_args"][CLIENT_PREFETCH_THREADS] == 5 assert config.get_options()["connect_args"][CLIENT_SESSION_KEEP_ALIVE] is True @@ -331,35 +257,20 @@ def test_snowflake_config_with_connect_args_overrides_base_connect_args(): @patch("snowflake.connector.connect") def test_test_connection_failure(mock_connect): mock_connect.side_effect = Exception("Failed to connect to snowflake") - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert not report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason - assert "Failed to connect to snowflake" in report.basic_connectivity.failure_reason + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_failure( + report, "Failed to connect to snowflake" + ) @patch("snowflake.connector.connect") def test_test_connection_basic_success(mock_connect): - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) def setup_mock_connect(mock_connect, query_results=None): @@ -400,31 +311,18 @@ def query_results(query): return [] raise ValueError(f"Unexpected query: {query}") - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } setup_mock_connect(mock_connect, query_results) - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None - - assert report.capability_report - assert report.capability_report[SourceCapability.CONTAINERS].capable - assert not report.capability_report[SourceCapability.SCHEMA_METADATA].capable - failure_reason = report.capability_report[ - SourceCapability.SCHEMA_METADATA - ].failure_reason - assert failure_reason - - assert ( - "Current role TEST_ROLE does not have permissions to use warehouse" - in failure_reason + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[SourceCapability.CONTAINERS], + failure_capabilities={ + SourceCapability.SCHEMA_METADATA: "Current role TEST_ROLE does not have permissions to use warehouse" + }, ) @@ -445,25 +343,17 @@ def query_results(query): setup_mock_connect(mock_connect, query_results) - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None - assert report.capability_report - - assert report.capability_report[SourceCapability.CONTAINERS].capable - assert not report.capability_report[SourceCapability.SCHEMA_METADATA].capable - assert ( - report.capability_report[SourceCapability.SCHEMA_METADATA].failure_reason - is not None + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[SourceCapability.CONTAINERS], + failure_capabilities={ + SourceCapability.SCHEMA_METADATA: "Either no tables exist or current role does not have permissions to access them" + }, ) @@ -488,24 +378,19 @@ def query_results(query): setup_mock_connect(mock_connect, query_results) - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None - assert report.capability_report - - assert report.capability_report[SourceCapability.CONTAINERS].capable - assert report.capability_report[SourceCapability.SCHEMA_METADATA].capable - assert report.capability_report[SourceCapability.DESCRIPTIONS].capable + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[ + SourceCapability.CONTAINERS, + SourceCapability.SCHEMA_METADATA, + SourceCapability.DESCRIPTIONS, + ], + ) @patch("snowflake.connector.connect") @@ -538,25 +423,21 @@ def query_results(query): setup_mock_connect(mock_connect, query_results) - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None - assert report.capability_report - - assert report.capability_report[SourceCapability.CONTAINERS].capable - assert report.capability_report[SourceCapability.SCHEMA_METADATA].capable - assert report.capability_report[SourceCapability.DATA_PROFILING].capable - assert report.capability_report[SourceCapability.DESCRIPTIONS].capable - assert report.capability_report[SourceCapability.LINEAGE_COARSE].capable + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[ + SourceCapability.CONTAINERS, + SourceCapability.SCHEMA_METADATA, + SourceCapability.DATA_PROFILING, + SourceCapability.DESCRIPTIONS, + SourceCapability.LINEAGE_COARSE, + ], + ) def test_aws_cloud_region_from_snowflake_region_id(): @@ -610,11 +491,10 @@ def test_azure_cloud_region_from_snowflake_region_id(): def test_unknown_cloud_region_from_snowflake_region_id(): - with pytest.raises(Exception) as e: + with pytest.raises(Exception, match="Unknown snowflake region"): SnowflakeV2Source.get_cloud_region_from_snowflake_region_id( "somecloud_someregion" ) - assert "Unknown snowflake region" in str(e) def test_snowflake_object_access_entry_missing_object_id(): diff --git a/metadata-ingestion/tests/unit/test_sql_common.py b/metadata-ingestion/tests/unit/test_sql_common.py index e23d290b611f4..a98bf64171122 100644 --- a/metadata-ingestion/tests/unit/test_sql_common.py +++ b/metadata-ingestion/tests/unit/test_sql_common.py @@ -1,8 +1,7 @@ from typing import Dict -from unittest.mock import Mock +from unittest import mock import pytest -from sqlalchemy.engine.reflection import Inspector from datahub.ingestion.source.sql.sql_common import PipelineContext, SQLAlchemySource from datahub.ingestion.source.sql.sql_config import SQLCommonConfig @@ -13,19 +12,24 @@ class _TestSQLAlchemyConfig(SQLCommonConfig): def get_sql_alchemy_url(self): - pass + return "mysql+pymysql://user:pass@localhost:5330" class _TestSQLAlchemySource(SQLAlchemySource): - pass + @classmethod + def create(cls, config_dict, ctx): + config = _TestSQLAlchemyConfig.parse_obj(config_dict) + return cls(config, ctx, "TEST") + + +def get_test_sql_alchemy_source(): + return _TestSQLAlchemySource.create( + config_dict={}, ctx=PipelineContext(run_id="test_ctx") + ) def test_generate_foreign_key(): - config: SQLCommonConfig = _TestSQLAlchemyConfig() - ctx: PipelineContext = PipelineContext(run_id="test_ctx") - platform: str = "TEST" - inspector: Inspector = Mock() - source = _TestSQLAlchemySource(config=config, ctx=ctx, platform=platform) + source = get_test_sql_alchemy_source() fk_dict: Dict[str, str] = { "name": "test_constraint", "referred_table": "test_table", @@ -37,7 +41,7 @@ def test_generate_foreign_key(): dataset_urn="test_urn", schema="test_schema", fk_dict=fk_dict, - inspector=inspector, + inspector=mock.Mock(), ) assert fk_dict.get("name") == foreign_key.name @@ -48,11 +52,7 @@ def test_generate_foreign_key(): def test_use_source_schema_for_foreign_key_if_not_specified(): - config: SQLCommonConfig = _TestSQLAlchemyConfig() - ctx: PipelineContext = PipelineContext(run_id="test_ctx") - platform: str = "TEST" - inspector: Inspector = Mock() - source = _TestSQLAlchemySource(config=config, ctx=ctx, platform=platform) + source = get_test_sql_alchemy_source() fk_dict: Dict[str, str] = { "name": "test_constraint", "referred_table": "test_table", @@ -63,7 +63,7 @@ def test_use_source_schema_for_foreign_key_if_not_specified(): dataset_urn="test_urn", schema="test_schema", fk_dict=fk_dict, - inspector=inspector, + inspector=mock.Mock(), ) assert fk_dict.get("name") == foreign_key.name @@ -105,14 +105,32 @@ def test_get_platform_from_sqlalchemy_uri(uri: str, expected_platform: str) -> N def test_get_db_schema_with_dots_in_view_name(): - config: SQLCommonConfig = _TestSQLAlchemyConfig() - ctx: PipelineContext = PipelineContext(run_id="test_ctx") - platform: str = "TEST" - source = _TestSQLAlchemySource(config=config, ctx=ctx, platform=platform) - + source = get_test_sql_alchemy_source() database, schema = source.get_db_schema( dataset_identifier="database.schema.long.view.name1" ) - assert database == "database" assert schema == "schema" + + +def test_test_connection_success(): + source = get_test_sql_alchemy_source() + with mock.patch( + "datahub.ingestion.source.sql.sql_common.SQLAlchemySource.get_inspectors", + side_effect=lambda: [], + ): + report = source.test_connection({}) + assert report is not None + assert report.basic_connectivity + assert report.basic_connectivity.capable + assert report.basic_connectivity.failure_reason is None + + +def test_test_connection_failure(): + source = get_test_sql_alchemy_source() + report = source.test_connection({}) + assert report is not None + assert report.basic_connectivity + assert not report.basic_connectivity.capable + assert report.basic_connectivity.failure_reason + assert "Connection refused" in report.basic_connectivity.failure_reason From 26114dfeb2d255f1b2a562396908f48c8dd0ad64 Mon Sep 17 00:00:00 2001 From: naoki kuroda <68233204+nnnkkk7@users.noreply.github.com> Date: Fri, 15 Dec 2023 05:42:45 +0900 Subject: [PATCH 253/792] docs: fix sample command for container logs (#9427) --- docs/how/extract-container-logs.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/how/extract-container-logs.md b/docs/how/extract-container-logs.md index 9251d0665c02c..b5fbb4c83cc64 100644 --- a/docs/how/extract-container-logs.md +++ b/docs/how/extract-container-logs.md @@ -86,7 +86,7 @@ Depending on your issue, you may be interested to view both debug and normal inf Since log files are named based on the current date, you'll need to use "ls" to see which files currently exist. To do so, you can use the `kubectl exec` command, using the pod name recorded in step one: ``` -kubectl exec datahub-frontend-1231ead-6767 -n default -- ls -la /tmp/datahub/logs/gms +kubectl exec datahub-gms-c578b47cd-7676 -n default -- ls -la /tmp/datahub/logs/gms total 36388 drwxr-xr-x 2 datahub datahub 4096 Jul 29 07:45 . @@ -131,5 +131,5 @@ Now you should be able to view the logs locally. There are a few ways to get files out of the pod and into a local file. You can either use `kubectl cp` or simply `cat` and pipe the file of interest. We'll show an example using the latter approach: ``` -kubectl exec datahub-frontend-1231ead-6767 -n default -- cat /tmp/datahub/logs/gms/gms.log > my-local-gms.log +kubectl exec datahub-gms-c578b47cd-7676 -n default -- cat /tmp/datahub/logs/gms/gms.log > my-local-gms.log ``` \ No newline at end of file From 4354af20126d1befb2c7391c23310a4eca5bb688 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 14 Dec 2023 16:54:40 -0500 Subject: [PATCH 254/792] fix(ingest): bump source configs json schema version (#9424) --- docs-website/genJsonSchema/gen_json_schema.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs-website/genJsonSchema/gen_json_schema.py b/docs-website/genJsonSchema/gen_json_schema.py index 81c1d5a2c1a30..4af72487644bd 100644 --- a/docs-website/genJsonSchema/gen_json_schema.py +++ b/docs-website/genJsonSchema/gen_json_schema.py @@ -7,7 +7,7 @@ def get_base() -> Any: return { - "$schema": "http://json-schema.org/draft-04/schema#", + "$schema": "https://json-schema.org/draft/2020-12/schema", "id": "https://json.schemastore.org/datahub-ingestion", "title": "Datahub Ingestion", "description": "Root schema of Datahub Ingestion", @@ -116,7 +116,7 @@ def get_base() -> Any: "bootstrap": { "type": "string", "description": "Kafka bootstrap URL.", - "default": "localhost:9092" + "default": "localhost:9092", }, "producer_config": { "type": "object", @@ -125,7 +125,7 @@ def get_base() -> Any: "schema_registry_url": { "type": "string", "description": "URL of schema registry being used.", - "default": "http://localhost:8081" + "default": "http://localhost:8081", }, "schema_registry_config": { "type": "object", From 0ea6145a9d491a1b882ba5a7a4667fb323d31dc4 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Fri, 15 Dec 2023 00:12:45 +0100 Subject: [PATCH 255/792] fix(ingest/profiling): Add option to enable external table profiling (#9463) --- .../datahub/ingestion/source/ge_profiling_config.py | 5 +++++ .../src/datahub/ingestion/source/redshift/profile.py | 9 +++++++++ .../ingestion/source/snowflake/snowflake_profiler.py | 10 ++++++++++ .../ingestion/source/snowflake/snowflake_schema.py | 3 +++ .../ingestion/source/sql/sql_generic_profiler.py | 3 +++ .../tests/integration/snowflake/common.py | 1 + 6 files changed, 31 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/ge_profiling_config.py b/metadata-ingestion/src/datahub/ingestion/source/ge_profiling_config.py index 24a3e520d8caf..f340a7b41b7af 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/ge_profiling_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/ge_profiling_config.py @@ -167,6 +167,11 @@ class GEProfilingConfig(ConfigModel): "Applicable only if `use_sampling` is set to True.", ) + profile_external_tables: bool = Field( + default=False, + description="Whether to profile external tables. Only Snowflake and Redshift supports this.", + ) + @pydantic.root_validator(pre=True) def deprecate_bigquery_temp_table_schema(cls, values): # TODO: Update docs to remove mention of this field. diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py index b05850cef6e94..eed82ec4d83e7 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py @@ -48,6 +48,15 @@ def get_workunits( if not self.config.schema_pattern.allowed(schema): continue for table in tables[db].get(schema, {}): + if ( + not self.config.profiling.profile_external_tables + and table.type == "EXTERNAL_TABLE" + ): + self.report.profiling_skipped_other[schema] += 1 + logger.info( + f"Skipping profiling of external table {db}.{schema}.{table.name}" + ) + continue # Emit the profile work unit profile_request = self.get_profile_request(table, schema, db) if profile_request is not None: diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py index 89857c4564267..4bda7da422e9d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py @@ -50,6 +50,16 @@ def get_workunits( profile_requests = [] for schema in database.schemas: for table in db_tables[schema.name]: + if ( + not self.config.profiling.profile_external_tables + and table.type == "EXTERNAL TABLE" + ): + logger.info( + f"Skipping profiling of external table {database.name}.{schema.name}.{table.name}" + ) + self.report.profiling_skipped_other[schema.name] += 1 + continue + profile_request = self.get_profile_request( table, schema.name, database.name ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema.py index e5b214ba35e4b..9526bdec4b05d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema.py @@ -77,6 +77,7 @@ def get_precise_native_type(self): @dataclass class SnowflakeTable(BaseTable): + type: Optional[str] = None clustering_key: Optional[str] = None pk: Optional[SnowflakePK] = None columns: List[SnowflakeColumn] = field(default_factory=list) @@ -265,6 +266,7 @@ def get_tables_for_database( tables[table["TABLE_SCHEMA"]].append( SnowflakeTable( name=table["TABLE_NAME"], + type=table["TABLE_TYPE"], created=table["CREATED"], last_altered=table["LAST_ALTERED"], size_in_bytes=table["BYTES"], @@ -288,6 +290,7 @@ def get_tables_for_schema( tables.append( SnowflakeTable( name=table["TABLE_NAME"], + type=table["TABLE_TYPE"], created=table["CREATED"], last_altered=table["LAST_ALTERED"], size_in_bytes=table["BYTES"], diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py index a2f91e5fae1a9..30fad9ad584c1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py @@ -35,6 +35,9 @@ class DetailedProfilerReportMixin: profiling_skipped_row_limit: TopKDict[str, int] = field( default_factory=int_top_k_dict ) + + profiling_skipped_other: TopKDict[str, int] = field(default_factory=int_top_k_dict) + num_tables_not_eligible_profiling: Dict[str, int] = field( default_factory=int_top_k_dict ) diff --git a/metadata-ingestion/tests/integration/snowflake/common.py b/metadata-ingestion/tests/integration/snowflake/common.py index b21cea5f0988d..53b87636068bf 100644 --- a/metadata-ingestion/tests/integration/snowflake/common.py +++ b/metadata-ingestion/tests/integration/snowflake/common.py @@ -79,6 +79,7 @@ def default_query_results( # noqa: C901 { "TABLE_SCHEMA": "TEST_SCHEMA", "TABLE_NAME": "TABLE_{}".format(tbl_idx), + "TABLE_TYPE": "BASE TABLE", "CREATED": datetime(2021, 6, 8, 0, 0, 0, 0), "LAST_ALTERED": datetime(2021, 6, 8, 0, 0, 0, 0), "BYTES": 1024, From 6a169357283790e158472957f87f8c6cfbe67136 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Fri, 15 Dec 2023 11:23:04 -0600 Subject: [PATCH 256/792] fix(operations): fix get index sizes integer wrap (#9450) --- .../ElasticSearchTimeseriesAspectService.java | 8 +- .../TimeseriesAspectServiceUnitTest.java | 78 +++++++++++++++++++ .../timeseries/TimeseriesIndexSizeResult.pdl | 3 + ...nkedin.operations.operations.snapshot.json | 5 ++ 4 files changed, 90 insertions(+), 4 deletions(-) create mode 100644 metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceUnitTest.java diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index eec7680a56ecb..f9ab86d41335d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -206,10 +206,10 @@ public List getIndexSizes() { elemResult.setEntityName(indexEntityAndAspect.get().getFirst()); elemResult.setAspectName(indexEntityAndAspect.get().getSecond()); } - int sizeBytes = - entry.getValue().get("primaries").get("store").get("size_in_bytes").asInt(); - float sizeMb = (float) sizeBytes / 1000; - elemResult.setSizeMb(sizeMb); + long sizeBytes = + entry.getValue().get("primaries").get("store").get("size_in_bytes").asLong(); + double sizeMb = (double) sizeBytes / 1000000; + elemResult.setSizeInMb(sizeMb); res.add(elemResult); }); return res; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceUnitTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceUnitTest.java new file mode 100644 index 0000000000000..a23267dcf6f55 --- /dev/null +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceUnitTest.java @@ -0,0 +1,78 @@ +package com.linkedin.metadata.timeseries.search; + +import static org.mockito.Mockito.*; + +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.NumericNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.timeseries.elastic.ElasticSearchTimeseriesAspectService; +import com.linkedin.metadata.timeseries.elastic.indexbuilder.TimeseriesAspectIndexBuilders; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.timeseries.TimeseriesIndexSizeResult; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpEntity; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.RestClient; +import org.opensearch.client.RestHighLevelClient; +import org.testng.Assert; +import org.testng.annotations.Test; + +/** + * Test using mocks instead of integration for testing functionality not dependent on a real server + * response + */ +public class TimeseriesAspectServiceUnitTest { + + private final RestHighLevelClient _searchClient = mock(RestHighLevelClient.class); + private final IndexConvention _indexConvention = mock(IndexConvention.class); + private final TimeseriesAspectIndexBuilders _timeseriesAspectIndexBuilders = + mock(TimeseriesAspectIndexBuilders.class); + private final EntityRegistry _entityRegistry = mock(EntityRegistry.class); + private final ESBulkProcessor _bulkProcessor = mock(ESBulkProcessor.class); + private final RestClient _restClient = mock(RestClient.class); + private final TimeseriesAspectService _timeseriesAspectService = + new ElasticSearchTimeseriesAspectService( + _searchClient, + _indexConvention, + _timeseriesAspectIndexBuilders, + _entityRegistry, + _bulkProcessor, + 0); + + private static final String INDEX_PATTERN = "indexPattern"; + + @Test + public void testGetIndicesIntegerWrap() throws IOException { + when(_indexConvention.getAllTimeseriesAspectIndicesPattern()).thenReturn(INDEX_PATTERN); + when(_searchClient.getLowLevelClient()).thenReturn(_restClient); + ObjectNode jsonNode = JsonNodeFactory.instance.objectNode(); + ObjectNode indicesNode = JsonNodeFactory.instance.objectNode(); + ObjectNode indexNode = JsonNodeFactory.instance.objectNode(); + ObjectNode primariesNode = JsonNodeFactory.instance.objectNode(); + ObjectNode storeNode = JsonNodeFactory.instance.objectNode(); + NumericNode bytesNode = JsonNodeFactory.instance.numberNode(8078398031L); + storeNode.set("size_in_bytes", bytesNode); + primariesNode.set("store", storeNode); + indexNode.set("primaries", primariesNode); + indicesNode.set("someIndexName", indexNode); + jsonNode.set("indices", indicesNode); + + Response response = mock(Response.class); + HttpEntity responseEntity = mock(HttpEntity.class); + when(response.getEntity()).thenReturn(responseEntity); + when(responseEntity.getContent()) + .thenReturn(IOUtils.toInputStream(jsonNode.toString(), StandardCharsets.UTF_8)); + when(_restClient.performRequest(any(Request.class))).thenReturn(response); + + List results = _timeseriesAspectService.getIndexSizes(); + + Assert.assertEquals(results.get(0).getSizeInMb(), 8078.398031); + } +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/timeseries/TimeseriesIndexSizeResult.pdl b/metadata-models/src/main/pegasus/com/linkedin/timeseries/TimeseriesIndexSizeResult.pdl index b888ef7c0716b..35297314187bf 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/timeseries/TimeseriesIndexSizeResult.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/timeseries/TimeseriesIndexSizeResult.pdl @@ -22,5 +22,8 @@ record TimeseriesIndexSizeResult{ /** * Size */ + @deprecated = "use sizeInMb instead" sizeMb: float = 0 + + sizeInMb: double = 0 } diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index 339ce62de6298..eae0eed2dd50b 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -3668,6 +3668,11 @@ "name" : "sizeMb", "type" : "float", "doc" : "Size", + "default" : 0.0, + "deprecated" : "use sizeInMb instead" + }, { + "name" : "sizeInMb", + "type" : "double", "default" : 0.0 } ] }, { From 824df5a6a3e9fed2f18f3e454c40b8d822011b5c Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 15 Dec 2023 13:28:33 -0600 Subject: [PATCH 257/792] feat(build): gradle 8, jdk17, neo4j 5 (#9458) --- .github/workflows/airflow-plugin.yml | 5 + .github/workflows/build-and-test.yml | 4 +- .github/workflows/check-datahub-jars.yml | 4 +- .github/workflows/docker-unified.yml | 39 +- .github/workflows/documentation.yml | 4 +- .github/workflows/metadata-ingestion.yml | 5 + .github/workflows/metadata-io.yml | 4 +- .github/workflows/metadata-model.yml | 5 + .github/workflows/publish-datahub-jars.yml | 4 +- .github/workflows/spark-smoke-test.yml | 4 +- build.gradle | 137 +- buildSrc/build.gradle | 13 +- .../pegasus/gradle/PegasusPlugin.java | 2444 +++++++++++++++++ .../gradle/tasks/ChangedFileReportTask.java | 124 + datahub-frontend/build.gradle | 22 +- datahub-frontend/play.gradle | 19 +- datahub-graphql-core/build.gradle | 3 +- datahub-web-react/build.gradle | 10 +- docker/datahub-frontend/Dockerfile | 7 +- docker/datahub-frontend/start.sh | 2 + docker/datahub-gms/Dockerfile | 4 +- docker/datahub-ingestion/build.gradle | 6 +- docker/datahub-mae-consumer/Dockerfile | 4 +- docker/datahub-mce-consumer/Dockerfile | 4 +- docker/datahub-upgrade/Dockerfile | 4 +- docker/kafka-setup/Dockerfile | 2 +- docs-website/build.gradle | 18 +- docs-website/vercel-setup.sh | 2 +- docs/developers.md | 10 +- docs/how/updating-datahub.md | 4 + docs/troubleshooting/build.md | 4 +- entity-registry/build.gradle | 7 +- gradle/wrapper/gradle-wrapper.properties | 2 +- li-utils/build.gradle | 20 +- metadata-auth/auth-api/build.gradle | 9 +- metadata-events/mxe-utils-avro/build.gradle | 5 +- .../java/datahub-client/build.gradle | 16 +- .../datahub-protobuf-example/build.gradle | 4 - .../java/datahub-protobuf/build.gradle | 8 +- .../java/examples/build.gradle | 16 +- .../java/spark-lineage/build.gradle | 68 +- .../java/spark-lineage/scripts/check_jar.sh | 4 +- .../docker/SparkBase.Dockerfile | 2 +- .../python_test_run.sh | 13 +- .../spark-smoke-test/spark-docker.conf | 4 + .../test-spark-lineage/build.gradle | 11 - .../datahub/spark/TestCoalesceJobLineage.java | 5 +- .../datahub/spark/TestSparkJobsLineage.java | 3 + metadata-io/build.gradle | 5 +- .../graph/neo4j/Neo4jGraphService.java | 4 +- metadata-jobs/mae-consumer/build.gradle | 1 + metadata-jobs/mce-consumer/build.gradle | 3 +- metadata-jobs/pe-consumer/build.gradle | 3 +- metadata-models-custom/build.gradle | 2 +- metadata-models-validator/build.gradle | 4 +- metadata-models/build.gradle | 20 +- metadata-service/auth-config/build.gradle | 4 +- metadata-service/auth-filter/build.gradle | 4 +- metadata-service/auth-impl/build.gradle | 4 +- ...formInstanceFieldResolverProviderTest.java | 4 +- .../auth-servlet-impl/build.gradle | 4 +- metadata-service/factories/build.gradle | 4 +- .../graphql-servlet-impl/build.gradle | 4 +- metadata-service/openapi-servlet/build.gradle | 4 +- metadata-service/plugin/build.gradle | 6 +- .../src/test/sample-test-plugins/build.gradle | 4 +- metadata-service/restli-api/build.gradle | 6 +- metadata-service/restli-client/build.gradle | 6 +- .../restli-servlet-impl/build.gradle | 6 +- .../schema-registry-api/build.gradle | 7 +- .../schema-registry-servlet/build.gradle | 4 +- metadata-service/services/build.gradle | 6 +- metadata-service/servlet/build.gradle | 4 +- metadata-utils/build.gradle | 4 +- mock-entity-registry/build.gradle | 4 +- smoke-test/build.gradle | 7 +- test-models/build.gradle | 16 +- vercel.json | 2 +- 78 files changed, 3008 insertions(+), 266 deletions(-) create mode 100644 buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java create mode 100644 buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index d0c0f52781b9a..cd1e159b7d53c 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -49,6 +49,11 @@ jobs: extra_pip_extras: plugin-v2 fail-fast: false steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 10c137a206531..dab64cf2dca5e 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -37,11 +37,11 @@ jobs: with: timezoneLinux: ${{ matrix.timezone }} - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 8e507ea40fd96..46d97ffec8861 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -28,11 +28,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index fef23f9efa85f..169a86000adcc 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -79,6 +79,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -135,6 +140,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -191,6 +201,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -247,6 +262,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -303,6 +323,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -537,6 +562,11 @@ jobs: needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_slim_build] steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -618,6 +648,11 @@ jobs: needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_full_build] steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -720,11 +755,11 @@ jobs: run: df -h . && docker images - name: Check out the repo uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index c94282938120e..29953b8b70d91 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -27,11 +27,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index ec6bd4141cc6f..4e04fef3b3980 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -44,6 +44,11 @@ jobs: - python-version: "3.10" fail-fast: false steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 48f230ce14c8d..2188fcb07c77a 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -29,11 +29,11 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index eb098a327e4cb..d0112f1b14e7a 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -29,6 +29,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index ec7985ef3b3d0..24d1c5436b315 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -49,11 +49,11 @@ jobs: if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }} steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 70b66d6452b26..60e183cce5179 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -30,11 +30,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/build.gradle b/build.gradle index b16e3ca169c71..a7a85db0398e2 100644 --- a/build.gradle +++ b/build.gradle @@ -1,17 +1,20 @@ buildscript { + ext.jdkVersion = 17 + ext.javaClassVersion = 11 + ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md - ext.pegasusVersion = '29.46.8' + ext.pegasusVersion = '29.48.4' ext.mavenVersion = '3.6.3' ext.springVersion = '5.3.29' ext.springBootVersion = '2.7.14' ext.openTelemetryVersion = '1.18.0' - ext.neo4jVersion = '4.4.9' - ext.neo4jTestVersion = '4.4.25' - ext.neo4jApocVersion = '4.4.0.20:all' + ext.neo4jVersion = '5.14.0' + ext.neo4jTestVersion = '5.14.0' + ext.neo4jApocVersion = '5.14.0' ext.testContainersVersion = '1.17.4' ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x - ext.jacksonVersion = '2.15.2' + ext.jacksonVersion = '2.15.3' ext.jettyVersion = '9.4.46.v20220331' ext.playVersion = '2.8.18' ext.log4jVersion = '2.19.0' @@ -29,19 +32,19 @@ buildscript { buildscript.repositories.addAll(project.repositories) dependencies { classpath 'com.linkedin.pegasus:gradle-plugins:' + pegasusVersion - classpath 'com.github.node-gradle:gradle-node-plugin:2.2.4' + classpath 'com.github.node-gradle:gradle-node-plugin:7.0.1' classpath 'io.acryl.gradle.plugin:gradle-avro-plugin:0.2.0' classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0" classpath "com.palantir.gradle.gitversion:gradle-git-version:3.0.0" classpath "org.gradle.playframework:gradle-playframework:0.14" - classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.1" + classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.2" } } plugins { - id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' - id 'com.github.johnrengelman.shadow' version '6.1.0' + id 'com.gorylenko.gradle-git-properties' version '2.4.1' + id 'com.github.johnrengelman.shadow' version '8.1.1' apply false id 'com.palantir.docker' version '0.35.0' apply false id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ @@ -149,19 +152,20 @@ project.ext.externalDependency = [ 'log4jApi': "org.apache.logging.log4j:log4j-api:$log4jVersion", 'log4j12Api': "org.slf4j:log4j-over-slf4j:$slf4jVersion", 'log4j2Api': "org.apache.logging.log4j:log4j-to-slf4j:$log4jVersion", - 'lombok': 'org.projectlombok:lombok:1.18.16', + 'lombok': 'org.projectlombok:lombok:1.18.30', 'mariadbConnector': 'org.mariadb.jdbc:mariadb-java-client:2.6.0', 'mavenArtifact': "org.apache.maven:maven-artifact:$mavenVersion", 'mixpanel': 'com.mixpanel:mixpanel-java:1.4.4', - 'mockito': 'org.mockito:mockito-core:3.0.0', - 'mockitoInline': 'org.mockito:mockito-inline:3.0.0', + 'mockito': 'org.mockito:mockito-core:4.11.0', + 'mockitoInline': 'org.mockito:mockito-inline:4.11.0', 'mockServer': 'org.mock-server:mockserver-netty:5.11.2', 'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2', 'mysqlConnector': 'mysql:mysql-connector-java:8.0.20', 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jTestVersion, 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion, 'neo4jTestJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jTestVersion, - 'neo4jApoc': 'org.neo4j.procedure:apoc:' + neo4jApocVersion, + 'neo4jApocCore': 'org.neo4j.procedure:apoc-core:' + neo4jApocVersion, + 'neo4jApocCommon': 'org.neo4j.procedure:apoc-common:' + neo4jApocVersion, 'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:' + openTelemetryVersion, 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion, 'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15', @@ -190,8 +194,8 @@ project.ext.externalDependency = [ 'servletApi': 'javax.servlet:javax.servlet-api:3.1.0', 'shiroCore': 'org.apache.shiro:shiro-core:1.11.0', 'snakeYaml': 'org.yaml:snakeyaml:2.0', - 'sparkSql' : 'org.apache.spark:spark-sql_2.11:2.4.8', - 'sparkHive' : 'org.apache.spark:spark-hive_2.11:2.4.8', + 'sparkSql' : 'org.apache.spark:spark-sql_2.12:3.0.3', + 'sparkHive' : 'org.apache.spark:spark-hive_2.12:3.0.3', 'springBeans': "org.springframework:spring-beans:$springVersion", 'springContext': "org.springframework:spring-context:$springVersion", 'springCore': "org.springframework:spring-core:$springVersion", @@ -210,7 +214,6 @@ project.ext.externalDependency = [ 'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion", 'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.2.15', 'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46', - 'testngJava8': 'org.testng:testng:7.5.1', 'testng': 'org.testng:testng:7.8.0', 'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion, 'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion, @@ -226,13 +229,69 @@ project.ext.externalDependency = [ 'charle': 'com.charleskorn.kaml:kaml:0.53.0', 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', - 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' + 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0', + 'annotationApi': 'javax.annotation:javax.annotation-api:1.3.2' ] allprojects { apply plugin: 'idea' apply plugin: 'eclipse' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' + + tasks.withType(Test).configureEach { + // https://docs.gradle.org/current/userguide/performance.html + maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 + + if (project.configurations.getByName("testImplementation").getDependencies() + .any{ it.getName().contains("testng") }) { + useTestNG() + } + } + + if (project.plugins.hasPlugin('java') + || project.plugins.hasPlugin('java-library') + || project.plugins.hasPlugin('application') + || project.plugins.hasPlugin('pegasus')) { + + java { + toolchain { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + compileJava { + options.release = javaClassVersion + } + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + // not duplicated, need to set this outside and inside afterEvaluate + afterEvaluate { + compileJava { + options.release = javaClassVersion + } + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + } + } } configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) { @@ -264,8 +323,9 @@ subprojects { failOnNoGitDirectory = false } - plugins.withType(JavaPlugin) { + plugins.withType(JavaPlugin).configureEach { dependencies { + implementation externalDependency.annotationApi constraints { implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") implementation('io.netty:netty-all:4.1.100.Final') @@ -276,18 +336,30 @@ subprojects { implementation("com.fasterxml.jackson.core:jackson-dataformat-cbor:$jacksonVersion") } } + spotless { java { googleJavaFormat() target project.fileTree(project.projectDir) { - include '**/*.java' - exclude 'build/**/*.java' - exclude '**/generated/**/*.*' - exclude '**/mainGeneratedDataTemplate/**/*.*' - exclude '**/mainGeneratedRest/**/*.*' + include 'src/**/*.java' + exclude 'src/**/resources/' + exclude 'src/**/generated/' + exclude 'src/**/mainGeneratedDataTemplate/' + exclude 'src/**/mainGeneratedRest/' + exclude 'src/renamed/avro/' + exclude 'src/test/sample-test-plugins/' } } } + + if (project.plugins.hasPlugin('pegasus')) { + dependencies { + dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 + restClientCompile spec.product.pegasus.restliClient + } + } + afterEvaluate { def spotlessJavaTask = tasks.findByName('spotlessJava') def processTask = tasks.findByName('processResources') @@ -305,28 +377,11 @@ subprojects { } } - tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(11) - } - } - tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(11) - } - // https://docs.gradle.org/current/userguide/performance.html - maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 - - if (project.configurations.getByName("testImplementation").getDependencies() - .any{ it.getName().contains("testng") }) { - useTestNG() - } - } - afterEvaluate { if (project.plugins.hasPlugin('pegasus')) { dependencies { dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 restClientCompile spec.product.pegasus.restliClient } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 1f9d30d520171..0c2d91e1f7ac1 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -1,9 +1,11 @@ -apply plugin: 'java' - buildscript { apply from: '../repositories.gradle' } +plugins { + id 'java' +} + dependencies { /** * Forked version of abandoned repository: https://github.com/fge/json-schema-avro @@ -21,6 +23,9 @@ dependencies { implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.5' implementation 'commons-io:commons-io:2.11.0' - compileOnly 'org.projectlombok:lombok:1.18.14' - annotationProcessor 'org.projectlombok:lombok:1.18.14' + compileOnly 'org.projectlombok:lombok:1.18.30' + annotationProcessor 'org.projectlombok:lombok:1.18.30' + + // pegasus dependency, overrides for tasks + implementation 'com.linkedin.pegasus:gradle-plugins:29.48.4' } \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java new file mode 100644 index 0000000000000..2460abcad6f9e --- /dev/null +++ b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java @@ -0,0 +1,2444 @@ +/* + * Copyright (c) 2019 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.linkedin.pegasus.gradle; + +import com.linkedin.pegasus.gradle.PegasusOptions.IdlOptions; +import com.linkedin.pegasus.gradle.internal.CompatibilityLogChecker; +import com.linkedin.pegasus.gradle.tasks.ChangedFileReportTask; +import com.linkedin.pegasus.gradle.tasks.CheckIdlTask; +import com.linkedin.pegasus.gradle.tasks.CheckPegasusSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.CheckRestModelTask; +import com.linkedin.pegasus.gradle.tasks.CheckSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.GenerateAvroSchemaTask; +import com.linkedin.pegasus.gradle.tasks.GenerateDataTemplateTask; +import com.linkedin.pegasus.gradle.tasks.GeneratePegasusSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.GenerateRestClientTask; +import com.linkedin.pegasus.gradle.tasks.GenerateRestModelTask; +import com.linkedin.pegasus.gradle.tasks.PublishRestModelTask; +import com.linkedin.pegasus.gradle.tasks.TranslateSchemasTask; +import com.linkedin.pegasus.gradle.tasks.ValidateExtensionSchemaTask; +import com.linkedin.pegasus.gradle.tasks.ValidateSchemaAnnotationTask; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Method; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.TreeSet; +import java.util.function.Function; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.gradle.api.Action; +import org.gradle.api.GradleException; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.ConfigurationContainer; +import org.gradle.api.file.FileCollection; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginConvention; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.publish.PublishingExtension; +import org.gradle.api.publish.ivy.IvyPublication; +import org.gradle.api.publish.ivy.plugins.IvyPublishPlugin; +import org.gradle.api.tasks.Copy; +import org.gradle.api.tasks.Delete; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.Sync; +import org.gradle.api.tasks.TaskProvider; +import org.gradle.api.tasks.bundling.Jar; +import org.gradle.api.tasks.compile.JavaCompile; +import org.gradle.api.tasks.javadoc.Javadoc; +import org.gradle.language.base.plugins.LifecycleBasePlugin; +import org.gradle.language.jvm.tasks.ProcessResources; +import org.gradle.plugins.ide.eclipse.EclipsePlugin; +import org.gradle.plugins.ide.eclipse.model.EclipseModel; +import org.gradle.plugins.ide.idea.IdeaPlugin; +import org.gradle.plugins.ide.idea.model.IdeaModule; +import org.gradle.util.GradleVersion; + + +/** + * Pegasus code generation plugin. + * The supported project layout for this plugin is as follows: + * + *
+ *   --- api/
+ *   |   --- build.gradle
+ *   |   --- src/
+ *   |       --- <sourceSet>/
+ *   |       |   --- idl/
+ *   |       |   |   --- <published idl (.restspec.json) files>
+ *   |       |   --- java/
+ *   |       |   |   --- <packageName>/
+ *   |       |   |       --- <common java files>
+ *   |       |   --- pegasus/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <data schema (.pdsc) files>
+ *   |       --- <sourceSet>GeneratedDataTemplate/
+ *   |       |   --- java/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <data template source files generated from data schema (.pdsc) files>
+ *   |       --- <sourceSet>GeneratedAvroSchema/
+ *   |       |   --- avro/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <avsc avro schema files (.avsc) generated from pegasus schema files>
+ *   |       --- <sourceSet>GeneratedRest/
+ *   |           --- java/
+ *   |               --- <packageName>/
+ *   |                   --- <rest client source (.java) files generated from published idl>
+ *   --- impl/
+ *   |   --- build.gradle
+ *   |   --- src/
+ *   |       --- <sourceSet>/
+ *   |       |   --- java/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <resource class source (.java) files>
+ *   |       --- <sourceSet>GeneratedRest/
+ *   |           --- idl/
+ *   |               --- <generated idl (.restspec.json) files>
+ *   --- <other projects>/
+ * 
+ *
    + *
  • + * api: contains all the files which are commonly depended by the server and + * client implementation. The common files include the data schema (.pdsc) files, + * the idl (.restspec.json) files and potentially Java interface files used by both sides. + *
  • + *
  • + * impl: contains the resource class for server implementation. + *
  • + *
+ *

Performs the following functions:

+ * + *

Generate data model and data template jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, the plugin generates the data template source (.java) files from the + * data schema (.pdsc) files, and furthermore compiles the source files and packages them + * to jar files. Details of jar contents will be explained in following paragraphs. + * In general, data schema files should exist only in api projects. + *

+ * + *

+ * Configure the server and client implementation projects to depend on the + * api project's dataTemplate configuration to get access to the generated data templates + * from within these projects. This allows api classes to be built first so that implementation + * projects can consume them. We recommend this structure to avoid circular dependencies + * (directly or indirectly) among implementation projects. + *

+ * + *

Detail:

+ * + *

+ * Generates data template source (.java) files from data schema (.pdsc) files, + * compiles the data template source (.java) files into class (.class) files, + * creates a data model jar file and a data template jar file. + * The data model jar file contains the source data schema (.pdsc) files. + * The data template jar file contains both the source data schema (.pdsc) files + * and the generated data template class (.class) files. + *

+ * + *

+ * In the data template generation phase, the plugin creates a new target source set + * for the generated files. The new target source set's name is the input source set name's + * suffixed with "GeneratedDataTemplate", e.g. "mainGeneratedDataTemplate". + * The plugin invokes PegasusDataTemplateGenerator to generate data template source (.java) files + * for all data schema (.pdsc) files present in the input source set's pegasus + * directory, e.g. "src/main/pegasus". The generated data template source (.java) files + * will be in the new target source set's java source directory, e.g. + * "src/mainGeneratedDataTemplate/java". In addition to + * the data schema (.pdsc) files in the pegasus directory, the dataModel configuration + * specifies resolver path for the PegasusDataTemplateGenerator. The resolver path + * provides the data schemas and previously generated data template classes that + * may be referenced by the input source set's data schemas. In most cases, the dataModel + * configuration should contain data template jars. + *

+ * + *

+ * The next phase is the data template compilation phase, the plugin compiles the generated + * data template source (.java) files into class files. The dataTemplateCompile configuration + * specifies the pegasus jars needed to compile these classes. The compileClasspath of the + * target source set is a composite of the dataModel configuration which includes the data template + * classes that were previously generated and included in the dependent data template jars, + * and the dataTemplateCompile configuration. + * This configuration should specify a dependency on the Pegasus data jar. + *

+ * + *

+ * The following phase is creating the the data model jar and the data template jar. + * This plugin creates the data model jar that includes the contents of the + * input source set's pegasus directory, and sets the jar file's classification to + * "data-model". Hence, the resulting jar file's name should end with "-data-model.jar". + * It adds the data model jar as an artifact to the dataModel configuration. + * This jar file should only contain data schema (.pdsc) files. + *

+ * + *

+ * This plugin also create the data template jar that includes the contents of the input + * source set's pegasus directory and the java class output directory of the + * target source set. It sets the jar file's classification to "data-template". + * Hence, the resulting jar file's name should end with "-data-template.jar". + * It adds the data template jar file as an artifact to the dataTemplate configuration. + * This jar file contains both data schema (.pdsc) files and generated data template + * class (.class) files. + *

+ * + *

+ * This plugin will ensure that data template source files are generated before + * compiling the input source set and before the idea and eclipse tasks. It + * also adds the generated classes to the compileClasspath of the input source set. + *

+ * + *

+ * The configurations that apply to generating the data model and data template jars + * are as follow: + *

    + *
  • + * The dataTemplateCompile configuration specifies the classpath for compiling + * the generated data template source (.java) files. In most cases, + * it should be the Pegasus data jar. + * (The default compile configuration is not used for compiling data templates because + * it is not desirable to include non data template dependencies in the data template jar.) + * The configuration should not directly include data template jars. Data template jars + * should be included in the dataModel configuration. + *
  • + *
  • + * The dataModel configuration provides the value of the "generator.resolver.path" + * system property that is passed to PegasusDataTemplateGenerator. In most cases, + * this configuration should contain only data template jars. The data template jars + * contain both data schema (.pdsc) files and generated data template (.class) files. + * PegasusDataTemplateGenerator will not generate data template (.java) files for + * classes that can be found in the resolver path. This avoids redundant generation + * of the same classes, and inclusion of these classes in multiple jars. + * The dataModel configuration is also used to publish the data model jar which + * contains only data schema (.pdsc) files. + *
  • + *
  • + * The testDataModel configuration is similar to the dataModel configuration + * except it is used when generating data templates from test source sets. + * It extends from the dataModel configuration. It is also used to publish + * the data model jar from test source sets. + *
  • + *
  • + * The dataTemplate configuration is used to publish the data template + * jar which contains both data schema (.pdsc) files and the data template class + * (.class) files generated from these data schema (.pdsc) files. + *
  • + *
  • + * The testDataTemplate configuration is similar to the dataTemplate configuration + * except it is used when publishing the data template jar files generated from + * test source sets. + *
  • + *
+ *

+ * + *

Performs the following functions:

+ * + *

Generate avro schema jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, the task 'generateAvroSchema' generates the avro schema (.avsc) + * files from pegasus schema (.pdsc) files. In general, data schema files should exist + * only in api projects. + *

+ * + *

+ * Configure the server and client implementation projects to depend on the + * api project's avroSchema configuration to get access to the generated avro schemas + * from within these projects. + *

+ * + *

+ * This plugin also create the avro schema jar that includes the contents of the input + * source set's avro directory and the avsc schema files. + * The resulting jar file's name should end with "-avro-schema.jar". + *

+ * + *

Generate rest model and rest client jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, generates rest client source (.java) files from the idl, + * compiles the rest client source (.java) files to rest client class (.class) files + * and puts them in jar files. In general, the api project should be only place that + * contains the publishable idl files. If the published idl changes an existing idl + * in the api project, the plugin will emit message indicating this has occurred and + * suggest that the entire project be rebuilt if it is desirable for clients of the + * idl to pick up the newly published changes. + *

+ * + *

+ * In the impl project, generates the idl (.restspec.json) files from the input + * source set's resource class files, then compares them against the existing idl + * files in the api project for compatibility checking. If incompatible changes are + * found, the build fails (unless certain flag is specified, see below). If the + * generated idl passes compatibility checks (see compatibility check levels below), + * publishes the generated idl (.restspec.json) to the api project. + *

+ * + *

Detail:

+ * + *

rest client generation phase: in api project

+ * + *

+ * In this phase, the rest client source (.java) files are generated from the + * api project idl (.restspec.json) files using RestRequestBuilderGenerator. + * The generated rest client source files will be in the new target source set's + * java source directory, e.g. "src/mainGeneratedRest/java". + *

+ * + *

+ * RestRequestBuilderGenerator requires access to the data schemas referenced + * by the idl. The dataModel configuration specifies the resolver path needed + * by RestRequestBuilderGenerator to access the data schemas referenced by + * the idl that is not in the source set's pegasus directory. + * This plugin automatically includes the data schema (.pdsc) files in the + * source set's pegasus directory in the resolver path. + * In most cases, the dataModel configuration should contain data template jars. + * The data template jars contains both data schema (.pdsc) files and generated + * data template class (.class) files. By specifying data template jars instead + * of data model jars, redundant generation of data template classes is avoided + * as classes that can be found in the resolver path are not generated. + *

+ * + *

rest client compilation phase: in api project

+ * + *

+ * In this phase, the plugin compiles the generated rest client source (.java) + * files into class files. The restClientCompile configuration specifies the + * pegasus jars needed to compile these classes. The compile classpath is a + * composite of the dataModel configuration which includes the data template + * classes that were previously generated and included in the dependent data template + * jars, and the restClientCompile configuration. + * This configuration should specify a dependency on the Pegasus restli-client jar. + *

+ * + *

+ * The following stage is creating the the rest model jar and the rest client jar. + * This plugin creates the rest model jar that includes the + * generated idl (.restspec.json) files, and sets the jar file's classification to + * "rest-model". Hence, the resulting jar file's name should end with "-rest-model.jar". + * It adds the rest model jar as an artifact to the restModel configuration. + * This jar file should only contain idl (.restspec.json) files. + *

+ * + *

+ * This plugin also create the rest client jar that includes the generated + * idl (.restspec.json) files and the java class output directory of the + * target source set. It sets the jar file's classification to "rest-client". + * Hence, the resulting jar file's name should end with "-rest-client.jar". + * It adds the rest client jar file as an artifact to the restClient configuration. + * This jar file contains both idl (.restspec.json) files and generated rest client + * class (.class) files. + *

+ * + *

idl generation phase: in server implementation project

+ * + *

+ * Before entering this phase, the plugin will ensure that generating idl will + * occur after compiling the input source set. It will also ensure that IDEA + * and Eclipse tasks runs after rest client source (.java) files are generated. + *

+ * + *

+ * In this phase, the plugin creates a new target source set for the generated files. + * The new target source set's name is the input source set name's* suffixed with + * "GeneratedRest", e.g. "mainGeneratedRest". The plugin invokes + * RestLiResourceModelExporter to generate idl (.restspec.json) files for each + * IdlItem in the input source set's pegasus IdlOptions. The generated idl files + * will be in target source set's idl directory, e.g. "src/mainGeneratedRest/idl". + * For example, the following adds an IdlItem to the source set's pegasus IdlOptions. + * This line should appear in the impl project's build.gradle. If no IdlItem is added, + * this source set will be excluded from generating idl and checking idl compatibility, + * even there are existing idl files. + *

+ *   pegasus.main.idlOptions.addIdlItem(["com.linkedin.restli.examples.groups.server"])
+ * 
+ *

+ * + *

+ * After the idl generation phase, each included idl file is checked for compatibility against + * those in the api project. In case the current interface breaks compatibility, + * by default the build fails and reports all compatibility errors and warnings. Otherwise, + * the build tasks in the api project later will package the resource classes into jar files. + * User can change the compatibility requirement between the current and published idl by + * setting the "rest.model.compatibility" project property, i.e. + * "gradle -Prest.model.compatibility= ..." The following levels are supported: + *

    + *
  • ignore: idl compatibility check will occur but its result will be ignored. + * The result will be aggregated and printed at the end of the build.
  • + *
  • backwards: build fails if there are backwards incompatible changes in idl. + * Build continues if there are only compatible changes.
  • + *
  • equivalent (default): build fails if there is any functional changes (compatible or + * incompatible) in the current idl. Only docs and comments are allowed to be different.
  • + *
+ * The plugin needs to know where the api project is. It searches the api project in the + * following steps. If all searches fail, the build fails. + *
    + *
  1. + * Use the specified project from the impl project build.gradle file. The ext.apiProject + * property explicitly assigns the api project. E.g. + *
    + *       ext.apiProject = project(':groups:groups-server-api')
    + *     
    + * If multiple such statements exist, the last will be used. Wrong project path causes Gradle + * evaluation error. + *
  2. + *
  3. + * If no ext.apiProject property is defined, the plugin will try to guess the + * api project name with the following conventions. The search stops at the first successful match. + *
      + *
    1. + * If the impl project name ends with the following suffixes, substitute the suffix with "-api". + *
        + *
      1. -impl
      2. + *
      3. -service
      4. + *
      5. -server
      6. + *
      7. -server-impl
      8. + *
      + * This list can be overridden by inserting the following line to the project build.gradle: + *
      + *           ext.apiProjectSubstitutionSuffixes = ['-new-suffix-1', '-new-suffix-2']
      + *         
      + * Alternatively, this setting could be applied globally to all projects by putting it in + * the subprojects section of the root build.gradle. + *
    2. + *
    3. + * Append "-api" to the impl project name. + *
    4. + *
    + *
  4. + *
+ * The plugin invokes RestLiResourceModelCompatibilityChecker to check compatibility. + *

+ * + *

+ * The idl files in the api project are not generated by the plugin, but rather + * "published" from the impl project. The publishRestModel task is used to copy the + * idl files to the api project. This task is invoked automatically if the idls are + * verified to be "safe". "Safe" is determined by the "rest.model.compatibility" + * property. Because this task is skipped if the idls are functionally equivalent + * (not necessarily identical, e.g. differ in doc fields), if the default "equivalent" + * compatibility level is used, no file will be copied. If such automatic publishing + * is intended to be skip, set the "rest.model.skipPublish" property to true. + * Note that all the properties are per-project and can be overridden in each project's + * build.gradle file. + *

+ * + *

+ * Please always keep in mind that if idl publishing is happened, a subsequent whole-project + * rebuild is necessary to pick up the changes. Otherwise, the Hudson job will fail and + * the source code commit will fail. + *

+ * + *

+ * The configurations that apply to generating the rest model and rest client jars + * are as follow: + *

    + *
  • + * The restClientCompile configuration specifies the classpath for compiling + * the generated rest client source (.java) files. In most cases, + * it should be the Pegasus restli-client jar. + * (The default compile configuration is not used for compiling rest client because + * it is not desirable to include non rest client dependencies, such as + * the rest server implementation classes, in the data template jar.) + * The configuration should not directly include data template jars. Data template jars + * should be included in the dataModel configuration. + *
  • + *
  • + * The dataModel configuration provides the value of the "generator.resolver.path" + * system property that is passed to RestRequestBuilderGenerator. + * This configuration should contain only data template jars. The data template jars + * contain both data schema (.pdsc) files and generated data template (.class) files. + * The RestRequestBuilderGenerator will only generate rest client classes. + * The dataModel configuration is also included in the compile classpath for the + * generated rest client source files. The dataModel configuration does not + * include generated data template classes, then the Java compiler may not able to + * find the data template classes referenced by the generated rest client. + *
  • + *
  • + * The testDataModel configuration is similar to the dataModel configuration + * except it is used when generating rest client source files from + * test source sets. + *
  • + *
  • + * The restModel configuration is used to publish the rest model jar + * which contains generated idl (.restspec.json) files. + *
  • + *
  • + * The testRestModel configuration is similar to the restModel configuration + * except it is used to publish rest model jar files generated from + * test source sets. + *
  • + *
  • + * The restClient configuration is used to publish the rest client jar + * which contains both generated idl (.restspec.json) files and + * the rest client class (.class) files generated from from these + * idl (.restspec.json) files. + *
  • + *
  • + * The testRestClient configuration is similar to the restClient configuration + * except it is used to publish rest client jar files generated from + * test source sets. + *
  • + *
+ *

+ * + *

+ * This plugin considers test source sets whose names begin with 'test' or 'integTest' to be + * test source sets. + *

+ */ +public class PegasusPlugin implements Plugin +{ + public static boolean debug = false; + + private static final GradleVersion MIN_REQUIRED_VERSION = GradleVersion.version("1.0"); // Next: 5.2.1 + private static final GradleVersion MIN_SUGGESTED_VERSION = GradleVersion.version("5.2.1"); // Next: 5.3 + + // + // Constants for generating sourceSet names and corresponding directory names + // for generated code + // + private static final String DATA_TEMPLATE_GEN_TYPE = "DataTemplate"; + private static final String REST_GEN_TYPE = "Rest"; + private static final String AVRO_SCHEMA_GEN_TYPE = "AvroSchema"; + + public static final String DATA_TEMPLATE_FILE_SUFFIX = ".pdsc"; + public static final String PDL_FILE_SUFFIX = ".pdl"; + // gradle property to opt OUT schema annotation validation, by default this feature is enabled. + private static final String DISABLE_SCHEMA_ANNOTATION_VALIDATION = "schema.annotation.validation.disable"; + // gradle property to opt in for destroying stale files from the build directory, + // by default it is disabled, because it triggers hot-reload (even if it results in a no-op) + private static final String DESTROY_STALE_FILES_ENABLE = "enableDestroyStaleFiles"; + public static final Collection DATA_TEMPLATE_FILE_SUFFIXES = new ArrayList<>(); + + public static final String IDL_FILE_SUFFIX = ".restspec.json"; + public static final String SNAPSHOT_FILE_SUFFIX = ".snapshot.json"; + public static final String SNAPSHOT_COMPAT_REQUIREMENT = "rest.model.compatibility"; + public static final String IDL_COMPAT_REQUIREMENT = "rest.idl.compatibility"; + // Pegasus schema compatibility level configuration, which is used to define the {@link CompatibilityLevel}. + public static final String PEGASUS_SCHEMA_SNAPSHOT_REQUIREMENT = "pegasusPlugin.pegasusSchema.compatibility"; + // Pegasus extension schema compatibility level configuration, which is used to define the {@link CompatibilityLevel} + public static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_REQUIREMENT = "pegasusPlugin.extensionSchema.compatibility"; + // CompatibilityOptions Mode configuration, which is used to define the {@link CompatibilityOptions#Mode} in the compatibility checker. + private static final String PEGASUS_COMPATIBILITY_MODE = "pegasusPlugin.pegasusSchemaCompatibilityCheckMode"; + + private static final Pattern TEST_DIR_REGEX = Pattern.compile("^(integ)?[Tt]est"); + private static final String SNAPSHOT_NO_PUBLISH = "rest.model.noPublish"; + private static final String SNAPSHOT_FORCE_PUBLISH = "rest.model.forcePublish"; + private static final String PROCESS_EMPTY_IDL_DIR = "rest.idl.processEmptyIdlDir"; + private static final String IDL_NO_PUBLISH = "rest.idl.noPublish"; + private static final String IDL_FORCE_PUBLISH = "rest.idl.forcePublish"; + private static final String SKIP_IDL_CHECK = "rest.idl.skipCheck"; + // gradle property to skip running GenerateRestModel task. + // Note it affects GenerateRestModel task only, and does not skip tasks depends on GenerateRestModel. + private static final String SKIP_GENERATE_REST_MODEL= "rest.model.skipGenerateRestModel"; + private static final String SUPPRESS_REST_CLIENT_RESTLI_2 = "rest.client.restli2.suppress"; + private static final String SUPPRESS_REST_CLIENT_RESTLI_1 = "rest.client.restli1.suppress"; + + private static final String GENERATOR_CLASSLOADER_NAME = "pegasusGeneratorClassLoader"; + + private static final String CONVERT_TO_PDL_REVERSE = "convertToPdl.reverse"; + private static final String CONVERT_TO_PDL_KEEP_ORIGINAL = "convertToPdl.keepOriginal"; + private static final String CONVERT_TO_PDL_SKIP_VERIFICATION = "convertToPdl.skipVerification"; + private static final String CONVERT_TO_PDL_PRESERVE_SOURCE_CMD = "convertToPdl.preserveSourceCmd"; + + // Below variables are used to collect data across all pegasus projects (sub-projects) and then print information + // to the user at the end after build is finished. + private static StringBuffer _restModelCompatMessage = new StringBuffer(); + private static final Collection _needCheckinFiles = new ArrayList<>(); + private static final Collection _needBuildFolders = new ArrayList<>(); + private static final Collection _possibleMissingFilesInEarlierCommit = new ArrayList<>(); + + private static final String RUN_ONCE = "runOnce"; + private static final Object STATIC_PROJECT_EVALUATED_LOCK = new Object(); + + private static final List UNUSED_CONFIGURATIONS = Arrays.asList( + "dataTemplateGenerator", "restTools", "avroSchemaGenerator"); + // Directory in the dataTemplate jar that holds schemas translated from PDL to PDSC. + private static final String TRANSLATED_SCHEMAS_DIR = "legacyPegasusSchemas"; + // Enable the use of argFiles for the tasks that support them + private static final String ENABLE_ARG_FILE = "pegasusPlugin.enableArgFile"; + // Enable the generation of fluent APIs + private static final String ENABLE_FLUENT_API = "pegasusPlugin.enableFluentApi"; + + // This config impacts GenerateDataTemplateTask and GenerateRestClientTask; + // If not set, by default all paths generated in these two tasks will be lower-case. + // This default behavior is needed because Linux, MacOS, Windows treat case sensitive paths differently, + // and we want to be consistent, so we choose lower-case as default case for path generated + private static final String CODE_GEN_PATH_CASE_SENSITIVE = "pegasusPlugin.generateCaseSensitivePath"; + + private static final String PEGASUS_PLUGIN_CONFIGURATION = "pegasusPlugin"; + + // Enable the use of generic pegasus schema compatibility checker + private static final String ENABLE_PEGASUS_SCHEMA_COMPATIBILITY_CHECK = "pegasusPlugin.enablePegasusSchemaCompatibilityCheck"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT = "PegasusSchemaSnapshot"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT = "PegasusExtensionSchemaSnapshot"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT_DIR = "pegasusSchemaSnapshot"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR = "pegasusExtensionSchemaSnapshot"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT_DIR_OVERRIDE = "overridePegasusSchemaSnapshotDir"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR_OVERRIDE = "overridePegasusExtensionSchemaSnapshotDir"; + + private static final String SRC = "src"; + + private static final String SCHEMA_ANNOTATION_HANDLER_CONFIGURATION = "schemaAnnotationHandler"; + + private static final String COMPATIBILITY_OPTIONS_MODE_EXTENSION = "EXTENSION"; + + + @SuppressWarnings("unchecked") + private Class> _thisPluginType = (Class>) + getClass().asSubclass(Plugin.class); + + private Task _generateSourcesJarTask; + private Javadoc _generateJavadocTask; + private Task _generateJavadocJarTask; + private boolean _configureIvyPublications = true; + + public void setPluginType(Class> pluginType) + { + _thisPluginType = pluginType; + } + + public void setSourcesJarTask(Task sourcesJarTask) + { + _generateSourcesJarTask = sourcesJarTask; + } + + public void setJavadocJarTask(Task javadocJarTask) + { + _generateJavadocJarTask = javadocJarTask; + } + + public void setConfigureIvyPublications(boolean configureIvyPublications) { + _configureIvyPublications = configureIvyPublications; + } + + @Override + public void apply(Project project) + { + checkGradleVersion(project); + + project.getPlugins().apply(JavaPlugin.class); + + // this HashMap will have a PegasusOptions per sourceSet + project.getExtensions().getExtraProperties().set("pegasus", new HashMap<>()); + // this map will extract PegasusOptions.GenerationMode to project property + project.getExtensions().getExtraProperties().set("PegasusGenerationMode", + Arrays.stream(PegasusOptions.GenerationMode.values()) + .collect(Collectors.toMap(PegasusOptions.GenerationMode::name, Function.identity()))); + + synchronized (STATIC_PROJECT_EVALUATED_LOCK) + { + // Check if this is the first time the block will run. Pegasus plugin can run multiple times in a build if + // multiple sub-projects applied the plugin. + if (!project.getRootProject().hasProperty(RUN_ONCE) + || !Boolean.parseBoolean(String.valueOf(project.getRootProject().property(RUN_ONCE)))) + { + project.getGradle().projectsEvaluated(gradle -> + gradle.getRootProject().subprojects(subproject -> + UNUSED_CONFIGURATIONS.forEach(configurationName -> { + Configuration conf = subproject.getConfigurations().findByName(configurationName); + if (conf != null && !conf.getDependencies().isEmpty()) { + subproject.getLogger().warn("*** Project {} declares dependency to unused configuration \"{}\". " + + "This configuration is deprecated and you can safely remove the dependency. ***", + subproject.getPath(), configurationName); + } + }) + ) + ); + + // Re-initialize the static variables as they might have stale values from previous run. With Gradle 3.0 and + // gradle daemon enabled, the plugin class might not be loaded for every run. + DATA_TEMPLATE_FILE_SUFFIXES.clear(); + DATA_TEMPLATE_FILE_SUFFIXES.add(DATA_TEMPLATE_FILE_SUFFIX); + DATA_TEMPLATE_FILE_SUFFIXES.add(PDL_FILE_SUFFIX); + + _restModelCompatMessage = new StringBuffer(); + _needCheckinFiles.clear(); + _needBuildFolders.clear(); + _possibleMissingFilesInEarlierCommit.clear(); + + project.getGradle().buildFinished(result -> + { + StringBuilder endOfBuildMessage = new StringBuilder(); + if (_restModelCompatMessage.length() > 0) + { + endOfBuildMessage.append(_restModelCompatMessage); + } + + if (!_needCheckinFiles.isEmpty()) + { + endOfBuildMessage.append(createModifiedFilesMessage(_needCheckinFiles, _needBuildFolders)); + } + + if (!_possibleMissingFilesInEarlierCommit.isEmpty()) + { + endOfBuildMessage.append(createPossibleMissingFilesMessage(_possibleMissingFilesInEarlierCommit)); + } + + if (endOfBuildMessage.length() > 0) + { + result.getGradle().getRootProject().getLogger().quiet(endOfBuildMessage.toString()); + } + }); + + // Set an extra property on the root project to indicate the initialization is complete for the current build. + project.getRootProject().getExtensions().getExtraProperties().set(RUN_ONCE, true); + } + } + + ConfigurationContainer configurations = project.getConfigurations(); + + // configuration for getting the required classes to make pegasus call main methods + configurations.maybeCreate(PEGASUS_PLUGIN_CONFIGURATION); + + // configuration for compiling generated data templates + Configuration dataTemplateCompile = configurations.maybeCreate("dataTemplateCompile"); + dataTemplateCompile.setVisible(false); + + // configuration for running rest client generator + Configuration restClientCompile = configurations.maybeCreate("restClientCompile"); + restClientCompile.setVisible(false); + + // configuration for running data template generator + // DEPRECATED! This configuration is no longer used. Please stop using it. + Configuration dataTemplateGenerator = configurations.maybeCreate("dataTemplateGenerator"); + dataTemplateGenerator.setVisible(false); + + // configuration for running rest client generator + // DEPRECATED! This configuration is no longer used. Please stop using it. + Configuration restTools = configurations.maybeCreate("restTools"); + restTools.setVisible(false); + + // configuration for running Avro schema generator + // DEPRECATED! To skip avro schema generation, use PegasusOptions.generationModes + Configuration avroSchemaGenerator = configurations.maybeCreate("avroSchemaGenerator"); + avroSchemaGenerator.setVisible(false); + + // configuration for depending on data schemas and potentially generated data templates + // and for publishing jars containing data schemas to the project artifacts for including in the ivy.xml + Configuration dataModel = configurations.maybeCreate("dataModel"); + Configuration testDataModel = configurations.maybeCreate("testDataModel"); + testDataModel.extendsFrom(dataModel); + + // configuration for depending on data schemas and potentially generated data templates + // and for publishing jars containing data schemas to the project artifacts for including in the ivy.xml + Configuration avroSchema = configurations.maybeCreate("avroSchema"); + Configuration testAvroSchema = configurations.maybeCreate("testAvroSchema"); + testAvroSchema.extendsFrom(avroSchema); + + // configuration for depending on rest idl and potentially generated client builders + // and for publishing jars containing rest idl to the project artifacts for including in the ivy.xml + Configuration restModel = configurations.maybeCreate("restModel"); + Configuration testRestModel = configurations.maybeCreate("testRestModel"); + testRestModel.extendsFrom(restModel); + + // configuration for publishing jars containing data schemas and generated data templates + // to the project artifacts for including in the ivy.xml + // + // published data template jars depends on the configurations used to compile the classes + // in the jar, this includes the data models/templates used by the data template generator + // and the classes used to compile the generated classes. + Configuration dataTemplate = configurations.maybeCreate("dataTemplate"); + dataTemplate.extendsFrom(dataTemplateCompile, dataModel); + Configuration testDataTemplate = configurations.maybeCreate("testDataTemplate"); + testDataTemplate.extendsFrom(dataTemplate, testDataModel); + + // configuration for processing and validating schema annotation during build time. + // + // The configuration contains dependencies to schema annotation handlers which would process schema annotations + // and validate. + Configuration schemaAnnotationHandler = configurations.maybeCreate(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION); + + // configuration for publishing jars containing rest idl and generated client builders + // to the project artifacts for including in the ivy.xml + // + // published client builder jars depends on the configurations used to compile the classes + // in the jar, this includes the data models/templates (potentially generated by this + // project and) used by the data template generator and the classes used to compile + // the generated classes. + Configuration restClient = configurations.maybeCreate("restClient"); + restClient.extendsFrom(restClientCompile, dataTemplate); + Configuration testRestClient = configurations.maybeCreate("testRestClient"); + testRestClient.extendsFrom(restClient, testDataTemplate); + + Properties properties = new Properties(); + InputStream inputStream = getClass().getResourceAsStream("/pegasus-version.properties"); + if (inputStream != null) + { + try + { + properties.load(inputStream); + } + catch (IOException e) + { + throw new GradleException("Unable to read pegasus-version.properties file.", e); + } + + String version = properties.getProperty("pegasus.version"); + + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:data:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:data-avro-generator:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:generator:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:restli-tools:" + version); + } + else + { + project.getLogger().lifecycle("Unable to add pegasus dependencies to {}. Please be sure that " + + "'com.linkedin.pegasus:data', 'com.linkedin.pegasus:data-avro-generator', 'com.linkedin.pegasus:generator', 'com.linkedin.pegasus:restli-tools'" + + " are available on the configuration pegasusPlugin", + project.getPath()); + } + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "org.slf4j:slf4j-simple:1.7.2"); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, project.files(System.getProperty("java.home") + "/../lib/tools.jar")); + + // this call has to be here because: + // 1) artifact cannot be published once projects has been evaluated, so we need to first + // create the tasks and artifact handler, then progressively append sources + // 2) in order to append sources progressively, the source and documentation tasks and artifacts must be + // configured/created before configuring and creating the code generation tasks. + + configureGeneratedSourcesAndJavadoc(project); + + ChangedFileReportTask changedFileReportTask = project.getTasks() + .create("changedFilesReport", ChangedFileReportTask.class); + + project.getTasks().getByName("check").dependsOn(changedFileReportTask); + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + sourceSets.all(sourceSet -> + { + if (sourceSet.getName().toLowerCase(Locale.US).contains("generated")) + { + return; + } + + checkAvroSchemaExist(project, sourceSet); + + // the idl Generator input options will be inside the PegasusOptions class. Users of the + // plugin can set the inputOptions in their build.gradle + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + pegasusOptions.put(sourceSet.getName(), new PegasusOptions()); + + // rest model generation could fail on incompatibility + // if it can fail, fail it early + configureRestModelGeneration(project, sourceSet); + + // Do compatibility check for schemas under "pegasus" directory if the configuration property is provided. + if (isPropertyTrue(project, ENABLE_PEGASUS_SCHEMA_COMPATIBILITY_CHECK)) + { + configurePegasusSchemaSnapshotGeneration(project, sourceSet, false); + } + + configurePegasusSchemaSnapshotGeneration(project, sourceSet, true); + + configureConversionUtilities(project, sourceSet); + + GenerateDataTemplateTask generateDataTemplateTask = configureDataTemplateGeneration(project, sourceSet); + + configureAvroSchemaGeneration(project, sourceSet); + + configureRestClientGeneration(project, sourceSet); + + if (!isPropertyTrue(project, DISABLE_SCHEMA_ANNOTATION_VALIDATION)) + { + configureSchemaAnnotationValidation(project, sourceSet, generateDataTemplateTask); + } + + Task cleanGeneratedDirTask = project.task(sourceSet.getTaskName("clean", "GeneratedDir")); + cleanGeneratedDirTask.doLast(new CacheableAction<>(task -> + { + deleteGeneratedDir(project, sourceSet, REST_GEN_TYPE); + deleteGeneratedDir(project, sourceSet, AVRO_SCHEMA_GEN_TYPE); + deleteGeneratedDir(project, sourceSet, DATA_TEMPLATE_GEN_TYPE); + })); + + // make clean depends on deleting the generated directories + project.getTasks().getByName("clean").dependsOn(cleanGeneratedDirTask); + + // Set data schema directories as resource roots + configureDataSchemaResourcesRoot(project, sourceSet); + }); + + project.getExtensions().getExtraProperties().set(GENERATOR_CLASSLOADER_NAME, getClass().getClassLoader()); + } + + protected void configureSchemaAnnotationValidation(Project project, + SourceSet sourceSet, + GenerateDataTemplateTask generateDataTemplatesTask) + { + // Task would execute based on the following order. + // generateDataTemplatesTask -> validateSchemaAnnotationTask + + // Create ValidateSchemaAnnotation task + ValidateSchemaAnnotationTask validateSchemaAnnotationTask = project.getTasks() + .create(sourceSet.getTaskName("validate", "schemaAnnotation"), ValidateSchemaAnnotationTask.class, task -> + { + task.setInputDir(generateDataTemplatesTask.getInputDir()); + task.setResolverPath(getDataModelConfig(project, sourceSet)); // same resolver path as generateDataTemplatesTask + task.setClassPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION) + .plus(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME))); + task.setHandlerJarPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + } + ); + + // validateSchemaAnnotationTask depend on generateDataTemplatesTask + validateSchemaAnnotationTask.dependsOn(generateDataTemplatesTask); + + // Check depends on validateSchemaAnnotationTask. + project.getTasks().getByName("check").dependsOn(validateSchemaAnnotationTask); + } + + + + @SuppressWarnings("deprecation") + protected void configureGeneratedSourcesAndJavadoc(Project project) + { + _generateJavadocTask = project.getTasks().create("generateJavadoc", Javadoc.class); + + if (_generateSourcesJarTask == null) + { + // + // configuration for publishing jars containing sources for generated classes + // to the project artifacts for including in the ivy.xml + // + ConfigurationContainer configurations = project.getConfigurations(); + Configuration generatedSources = configurations.maybeCreate("generatedSources"); + Configuration testGeneratedSources = configurations.maybeCreate("testGeneratedSources"); + testGeneratedSources.extendsFrom(generatedSources); + + _generateSourcesJarTask = project.getTasks().create("generateSourcesJar", Jar.class, jarTask -> { + jarTask.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); + jarTask.setDescription("Generates a jar file containing the sources for the generated Java classes."); + // FIXME change to #getArchiveClassifier().set("sources"); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + jarTask.getArchiveClassifier().set("sources"); + }); + + project.getArtifacts().add("generatedSources", _generateSourcesJarTask); + } + + if (_generateJavadocJarTask == null) + { + // + // configuration for publishing jars containing Javadoc for generated classes + // to the project artifacts for including in the ivy.xml + // + ConfigurationContainer configurations = project.getConfigurations(); + Configuration generatedJavadoc = configurations.maybeCreate("generatedJavadoc"); + Configuration testGeneratedJavadoc = configurations.maybeCreate("testGeneratedJavadoc"); + testGeneratedJavadoc.extendsFrom(generatedJavadoc); + + _generateJavadocJarTask = project.getTasks().create("generateJavadocJar", Jar.class, jarTask -> { + jarTask.dependsOn(_generateJavadocTask); + jarTask.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); + jarTask.setDescription("Generates a jar file containing the Javadoc for the generated Java classes."); + // FIXME change to #getArchiveClassifier().set("sources"); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + jarTask.getArchiveClassifier().set("javadoc"); + jarTask.from(_generateJavadocTask.getDestinationDir()); + }); + + project.getArtifacts().add("generatedJavadoc", _generateJavadocJarTask); + } + else + { + // TODO: Tighten the types so that _generateJavadocJarTask must be of type Jar. + ((Jar) _generateJavadocJarTask).from(_generateJavadocTask.getDestinationDir()); + _generateJavadocJarTask.dependsOn(_generateJavadocTask); + } + } + + private static void deleteGeneratedDir(Project project, SourceSet sourceSet, String dirType) + { + String generatedDirPath = getGeneratedDirPath(project, sourceSet, dirType); + project.getLogger().info("Delete generated directory {}", generatedDirPath); + project.delete(generatedDirPath); + } + + private static > Class getCompatibilityLevelClass(Project project) + { + ClassLoader generatorClassLoader = (ClassLoader) project.property(GENERATOR_CLASSLOADER_NAME); + + String className = "com.linkedin.restli.tools.idlcheck.CompatibilityLevel"; + try + { + @SuppressWarnings("unchecked") + Class enumClass = (Class) generatorClassLoader.loadClass(className).asSubclass(Enum.class); + return enumClass; + } + catch (ClassNotFoundException e) + { + throw new RuntimeException("Could not load class " + className); + } + } + + private static void addGeneratedDir(Project project, SourceSet sourceSet, Collection configurations) + { + project.getPlugins().withType(IdeaPlugin.class, ideaPlugin -> { + IdeaModule ideaModule = ideaPlugin.getModel().getModule(); + // stupid if block needed because of stupid assignment required to update source dirs + if (isTestSourceSet(sourceSet)) + { + Set sourceDirs = ideaModule.getTestSourceDirs(); + sourceDirs.addAll(sourceSet.getJava().getSrcDirs()); + // this is stupid but assignment is required + ideaModule.setTestSourceDirs(sourceDirs); + if (debug) + { + System.out.println("Added " + sourceSet.getJava().getSrcDirs() + " to IdeaModule testSourceDirs " + + ideaModule.getTestSourceDirs()); + } + } + else + { + Set sourceDirs = ideaModule.getSourceDirs(); + sourceDirs.addAll(sourceSet.getJava().getSrcDirs()); + // this is stupid but assignment is required + ideaModule.setSourceDirs(sourceDirs); + if (debug) + { + System.out.println("Added " + sourceSet.getJava().getSrcDirs() + " to IdeaModule sourceDirs " + + ideaModule.getSourceDirs()); + } + } + Collection compilePlus = ideaModule.getScopes().get("COMPILE").get("plus"); + compilePlus.addAll(configurations); + ideaModule.getScopes().get("COMPILE").put("plus", compilePlus); + }); + } + + private static void checkAvroSchemaExist(Project project, SourceSet sourceSet) + { + String sourceDir = "src" + File.separatorChar + sourceSet.getName(); + File avroSourceDir = project.file(sourceDir + File.separatorChar + "avro"); + if (avroSourceDir.exists()) + { + project.getLogger().lifecycle("{}'s {} has non-empty avro directory. pegasus plugin does not process avro directory", + project.getName(), sourceDir); + } + } + + // Compute the name of the source set that will contain a type of an input generated code. + // e.g. genType may be 'DataTemplate' or 'Rest' + private static String getGeneratedSourceSetName(SourceSet sourceSet, String genType) + { + return sourceSet.getName() + "Generated" + genType; + } + + // Compute the directory name that will contain a type generated code of an input source set. + // e.g. genType may be 'DataTemplate' or 'Rest' + public static String getGeneratedDirPath(Project project, SourceSet sourceSet, String genType) + { + String override = getOverridePath(project, sourceSet, "overrideGeneratedDir"); + String sourceSetName = getGeneratedSourceSetName(sourceSet, genType); + String base = override == null ? "src" : override; + + return base + File.separatorChar + sourceSetName; + } + + public static String getDataSchemaPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overridePegasusDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "pegasus"; + } + else + { + return override; + } + } + + private static String getExtensionSchemaPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideExtensionSchemaDir"); + if(override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "extensions"; + } + else + { + return override; + } + } + + private static String getSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideSnapshotDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "snapshot"; + } + else + { + return override; + } + } + + private static String getIdlPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideIdlDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "idl"; + } + else + { + return override; + } + } + + private static String getPegasusSchemaSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, PEGASUS_SCHEMA_SNAPSHOT_DIR_OVERRIDE); + if (override == null) + { + return SRC + File.separatorChar + sourceSet.getName() + File.separatorChar + PEGASUS_SCHEMA_SNAPSHOT_DIR; + } + else + { + return override; + } + } + + private static String getPegasusExtensionSchemaSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR_OVERRIDE); + if (override == null) + { + return SRC + File.separatorChar + sourceSet.getName() + File.separatorChar + PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR; + } + else + { + return override; + } + } + + private static String getOverridePath(Project project, SourceSet sourceSet, String overridePropertyName) + { + String sourceSetPropertyName = sourceSet.getName() + '.' + overridePropertyName; + String override = getNonEmptyProperty(project, sourceSetPropertyName); + + if (override == null && sourceSet.getName().equals("main")) + { + override = getNonEmptyProperty(project, overridePropertyName); + } + + return override; + } + + private static boolean isTestSourceSet(SourceSet sourceSet) + { + return TEST_DIR_REGEX.matcher(sourceSet.getName()).find(); + } + + private static Configuration getDataModelConfig(Project project, SourceSet sourceSet) + { + return isTestSourceSet(sourceSet) + ? project.getConfigurations().getByName("testDataModel") + : project.getConfigurations().getByName("dataModel"); + } + + private static boolean isTaskSuccessful(Task task) + { + return task.getState().getExecuted() + // Task is not successful if it is not upto date and is skipped. + && !(task.getState().getSkipped() && !task.getState().getUpToDate()) + && task.getState().getFailure() == null; + } + + private static boolean isResultEquivalent(File compatibilityLogFile) + { + return isResultEquivalent(compatibilityLogFile, false); + } + + private static boolean isResultEquivalent(File compatibilityLogFile, boolean restSpecOnly) + { + CompatibilityLogChecker logChecker = new CompatibilityLogChecker(); + try + { + logChecker.write(Files.readAllBytes(compatibilityLogFile.toPath())); + } + catch (IOException e) + { + throw new GradleException("Error while processing compatibility report: " + e.getMessage()); + } + return logChecker.getRestSpecCompatibility().isEmpty() && + (restSpecOnly || logChecker.getModelCompatibility().isEmpty()); + } + + protected void configureRestModelGeneration(Project project, SourceSet sourceSet) + { + if (sourceSet.getAllSource().isEmpty()) + { + project.getLogger().info("No source files found for sourceSet {}. Skipping idl generation.", sourceSet.getName()); + return; + } + + // afterEvaluate needed so that api project can be overridden via ext.apiProject + project.afterEvaluate(p -> + { + // find api project here instead of in each project's plugin configuration + // this allows api project relation options (ext.api*) to be specified anywhere in the build.gradle file + // alternatively, pass closures to task configuration, and evaluate the closures when task is executed + Project apiProject = getCheckedApiProject(project); + + // make sure the api project is evaluated. Important for configure-on-demand mode. + if (apiProject != null) + { + project.evaluationDependsOn(apiProject.getPath()); + + if (!apiProject.getPlugins().hasPlugin(_thisPluginType)) + { + apiProject = null; + } + } + + if (apiProject == null) + { + return; + } + + Task untypedJarTask = project.getTasks().findByName(sourceSet.getJarTaskName()); + if (!(untypedJarTask instanceof Jar)) + { + return; + } + Jar jarTask = (Jar) untypedJarTask; + + String snapshotCompatPropertyName = findProperty(FileCompatibilityType.SNAPSHOT); + if (project.hasProperty(snapshotCompatPropertyName) && "off".equalsIgnoreCase((String) project.property(snapshotCompatPropertyName))) + { + project.getLogger().lifecycle("Project {} snapshot compatibility level \"OFF\" is deprecated. Default to \"IGNORE\".", + project.getPath()); + } + + // generate the rest model + FileCollection restModelCodegenClasspath = project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)) + .plus(sourceSet.getRuntimeClasspath()); + String destinationDirPrefix = getGeneratedDirPath(project, sourceSet, REST_GEN_TYPE) + File.separatorChar; + FileCollection restModelResolverPath = apiProject.files(getDataSchemaPath(project, sourceSet)) + .plus(getDataModelConfig(apiProject, sourceSet)); + Set watchedRestModelInputDirs = buildWatchedRestModelInputDirs(project, sourceSet); + Set restModelInputDirs = difference(sourceSet.getAllSource().getSrcDirs(), + sourceSet.getResources().getSrcDirs()); + + Task generateRestModelTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "restModel"), GenerateRestModelTask.class, task -> + { + task.dependsOn(project.getTasks().getByName(sourceSet.getClassesTaskName())); + task.setCodegenClasspath(restModelCodegenClasspath); + task.setWatchedCodegenClasspath(restModelCodegenClasspath + .filter(file -> !"main".equals(file.getName()) && !"classes".equals(file.getName()))); + task.setInputDirs(restModelInputDirs); + task.setWatchedInputDirs(watchedRestModelInputDirs.isEmpty() + ? restModelInputDirs : watchedRestModelInputDirs); + // we need all the artifacts from runtime for any private implementation classes the server code might need. + task.setSnapshotDestinationDir(project.file(destinationDirPrefix + "snapshot")); + task.setIdlDestinationDir(project.file(destinationDirPrefix + "idl")); + + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + task.setIdlOptions(pegasusOptions.get(sourceSet.getName()).idlOptions); + + task.setResolverPath(restModelResolverPath); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> !isPropertyTrue(project, SKIP_GENERATE_REST_MODEL)); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, REST_GEN_TYPE))); + }); + + File apiSnapshotDir = apiProject.file(getSnapshotPath(apiProject, sourceSet)); + File apiIdlDir = apiProject.file(getIdlPath(apiProject, sourceSet)); + apiSnapshotDir.mkdirs(); + + if (!isPropertyTrue(project, SKIP_IDL_CHECK)) + { + apiIdlDir.mkdirs(); + } + + CheckRestModelTask checkRestModelTask = project.getTasks() + .create(sourceSet.getTaskName("check", "RestModel"), CheckRestModelTask.class, task -> + { + task.dependsOn(generateRestModelTask); + task.setCurrentSnapshotFiles(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.setPreviousSnapshotDirectory(apiSnapshotDir); + task.setCurrentIdlFiles(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.setPreviousIdlDirectory(apiIdlDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setModelCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.SNAPSHOT)); + task.onlyIf(t -> !isPropertyTrue(project, SKIP_IDL_CHECK)); + + task.doLast(new CacheableAction<>(t -> + { + if (!task.isEquivalent()) + { + _restModelCompatMessage.append(task.getWholeMessage()); + } + })); + }); + + CheckSnapshotTask checkSnapshotTask = project.getTasks() + .create(sourceSet.getTaskName("check", "Snapshot"), CheckSnapshotTask.class, task -> { + task.dependsOn(generateRestModelTask); + task.setCurrentSnapshotFiles(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.setPreviousSnapshotDirectory(apiSnapshotDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSnapshotCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.SNAPSHOT)); + + task.onlyIf(t -> isPropertyTrue(project, SKIP_IDL_CHECK)); + }); + + CheckIdlTask checkIdlTask = project.getTasks() + .create(sourceSet.getTaskName("check", "Idl"), CheckIdlTask.class, task -> + { + task.dependsOn(generateRestModelTask); + task.setCurrentIdlFiles(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.setPreviousIdlDirectory(apiIdlDir); + task.setResolverPath(restModelResolverPath); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setIdlCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.IDL)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + + task.onlyIf(t -> !isPropertyTrue(project, SKIP_IDL_CHECK) + && !"OFF".equals(PropertyUtil.findCompatLevel(project, FileCompatibilityType.IDL))); + }); + + // rest model publishing involves cross-project reference + // configure after all projects have been evaluated + // the file copy can be turned off by "rest.model.noPublish" flag + Task publishRestliSnapshotTask = project.getTasks() + .create(sourceSet.getTaskName("publish", "RestliSnapshot"), PublishRestModelTask.class, task -> + { + task.dependsOn(checkRestModelTask, checkSnapshotTask, checkIdlTask); + task.from(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.into(apiSnapshotDir); + task.setSuffix(SNAPSHOT_FILE_SUFFIX); + + task.onlyIf(t -> + isPropertyTrue(project, SNAPSHOT_FORCE_PUBLISH) || + ( + !isPropertyTrue(project, SNAPSHOT_NO_PUBLISH) && + ( + ( + isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkSnapshotTask) && + checkSnapshotTask.getSummaryTarget().exists() && + !isResultEquivalent(checkSnapshotTask.getSummaryTarget()) + ) || + ( + !isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkRestModelTask) && + checkRestModelTask.getSummaryTarget().exists() && + !isResultEquivalent(checkRestModelTask.getSummaryTarget()) + ) + )) + ); + }); + + Task publishRestliIdlTask = project.getTasks() + .create(sourceSet.getTaskName("publish", "RestliIdl"), PublishRestModelTask.class, task -> { + task.dependsOn(checkRestModelTask, checkIdlTask, checkSnapshotTask); + task.from(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.into(apiIdlDir); + task.setSuffix(IDL_FILE_SUFFIX); + + task.onlyIf(t -> + isPropertyTrue(project, IDL_FORCE_PUBLISH) || + ( + !isPropertyTrue(project, IDL_NO_PUBLISH) && + ( + ( + isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkSnapshotTask) && + checkSnapshotTask.getSummaryTarget().exists() && + !isResultEquivalent(checkSnapshotTask.getSummaryTarget(), true) + ) || + ( + !isPropertyTrue(project, SKIP_IDL_CHECK) && + ( + (isTaskSuccessful(checkRestModelTask) && + checkRestModelTask.getSummaryTarget().exists() && + !isResultEquivalent(checkRestModelTask.getSummaryTarget(), true)) || + (isTaskSuccessful(checkIdlTask) && + checkIdlTask.getSummaryTarget().exists() && + !isResultEquivalent(checkIdlTask.getSummaryTarget())) + ) + ) + )) + ); + }); + + project.getLogger().info("API project selected for {} is {}", + publishRestliIdlTask.getPath(), apiProject.getPath()); + + jarTask.from(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + // add generated .restspec.json files as resources to the jar + jarTask.dependsOn(publishRestliSnapshotTask, publishRestliIdlTask); + + ChangedFileReportTask changedFileReportTask = (ChangedFileReportTask) project.getTasks() + .getByName("changedFilesReport"); + + // Use the files from apiDir for generating the changed files report as we need to notify user only when + // source system files are modified. + changedFileReportTask.setIdlFiles(SharedFileUtils.getSuffixedFiles(project, apiIdlDir, IDL_FILE_SUFFIX)); + changedFileReportTask.setSnapshotFiles(SharedFileUtils.getSuffixedFiles(project, apiSnapshotDir, + SNAPSHOT_FILE_SUFFIX)); + changedFileReportTask.mustRunAfter(publishRestliSnapshotTask, publishRestliIdlTask); + changedFileReportTask.doLast(new CacheableAction<>(t -> + { + if (!changedFileReportTask.getNeedCheckinFiles().isEmpty()) + { + project.getLogger().info("Adding modified files to need checkin list..."); + _needCheckinFiles.addAll(changedFileReportTask.getNeedCheckinFiles()); + _needBuildFolders.add(getCheckedApiProject(project).getPath()); + } + })); + }); + } + + protected void configurePegasusSchemaSnapshotGeneration(Project project, SourceSet sourceSet, boolean isExtensionSchema) + { + File schemaDir = isExtensionSchema? project.file(getExtensionSchemaPath(project, sourceSet)) + : project.file(getDataSchemaPath(project, sourceSet)); + + if ((isExtensionSchema && SharedFileUtils.getSuffixedFiles(project, schemaDir, PDL_FILE_SUFFIX).isEmpty()) || + (!isExtensionSchema && SharedFileUtils.getSuffixedFiles(project, schemaDir, DATA_TEMPLATE_FILE_SUFFIXES).isEmpty())) + { + return; + } + + Path publishablePegasusSchemaSnapshotDir = project.getBuildDir().toPath().resolve(sourceSet.getName() + + (isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT)); + + Task generatePegasusSchemaSnapshot = generatePegasusSchemaSnapshot(project, sourceSet, + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT, schemaDir, + publishablePegasusSchemaSnapshotDir.toFile(), isExtensionSchema); + + File pegasusSchemaSnapshotDir = project.file(isExtensionSchema ? getPegasusExtensionSchemaSnapshotPath(project, sourceSet) + : getPegasusSchemaSnapshotPath(project, sourceSet)); + pegasusSchemaSnapshotDir.mkdirs(); + + Task checkSchemaSnapshot = project.getTasks().create(sourceSet.getTaskName("check", + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT), + CheckPegasusSnapshotTask.class, task -> + { + task.dependsOn(generatePegasusSchemaSnapshot); + task.setCurrentSnapshotDirectory(publishablePegasusSchemaSnapshotDir.toFile()); + task.setPreviousSnapshotDirectory(pegasusSchemaSnapshotDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION) + .plus(project.getConfigurations().getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME))); + task.setCompatibilityLevel(isExtensionSchema ? + PropertyUtil.findCompatLevel(project, FileCompatibilityType.PEGASUS_EXTENSION_SCHEMA_SNAPSHOT) + :PropertyUtil.findCompatLevel(project, FileCompatibilityType.PEGASUS_SCHEMA_SNAPSHOT)); + task.setCompatibilityMode(isExtensionSchema ? COMPATIBILITY_OPTIONS_MODE_EXTENSION : + PropertyUtil.findCompatMode(project, PEGASUS_COMPATIBILITY_MODE)); + task.setExtensionSchema(isExtensionSchema); + task.setHandlerJarPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)); + + task.onlyIf(t -> + { + String pegasusSnapshotCompatPropertyName = isExtensionSchema ? + findProperty(FileCompatibilityType.PEGASUS_EXTENSION_SCHEMA_SNAPSHOT) + : findProperty(FileCompatibilityType.PEGASUS_SCHEMA_SNAPSHOT); + return !project.hasProperty(pegasusSnapshotCompatPropertyName) || + !"off".equalsIgnoreCase((String) project.property(pegasusSnapshotCompatPropertyName)); + }); + }); + + Task publishPegasusSchemaSnapshot = publishPegasusSchemaSnapshot(project, sourceSet, + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT, checkSchemaSnapshot, + publishablePegasusSchemaSnapshotDir.toFile(), pegasusSchemaSnapshotDir); + + project.getTasks().getByName(LifecycleBasePlugin.ASSEMBLE_TASK_NAME).dependsOn(publishPegasusSchemaSnapshot); + } + + @SuppressWarnings("deprecation") + protected void configureAvroSchemaGeneration(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + File avroDir = project.file(getGeneratedDirPath(project, sourceSet, AVRO_SCHEMA_GEN_TYPE) + + File.separatorChar + "avro"); + + // generate avro schema files from data schema + Task generateAvroSchemaTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "avroSchema"), GenerateAvroSchemaTask.class, task -> { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(avroDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> + { + if (task.getInputDir().exists()) + { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + if (pegasusOptions.get(sourceSet.getName()).hasGenerationMode(PegasusOptions.GenerationMode.AVRO)) + { + return true; + } + } + + return !project.getConfigurations().getByName("avroSchemaGenerator").isEmpty(); + }); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, AVRO_SCHEMA_GEN_TYPE))); + }); + + project.getTasks().getByName(sourceSet.getCompileJavaTaskName()).dependsOn(generateAvroSchemaTask); + + // create avro schema jar file + + Task avroSchemaJarTask = project.getTasks().create(sourceSet.getName() + "AvroSchemaJar", Jar.class, task -> + { + // add path prefix to each file in the data schema directory + task.from(avroDir, copySpec -> + copySpec.eachFile(fileCopyDetails -> + fileCopyDetails.setPath("avro" + File.separatorChar + fileCopyDetails.getPath()))); + + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "avro-schema")); + task.setDescription("Generate an avro schema jar"); + }); + + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("avroSchema", avroSchemaJarTask); + } + else + { + project.getArtifacts().add("testAvroSchema", avroSchemaJarTask); + } + } + + protected void configureConversionUtilities(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + boolean reverse = isPropertyTrue(project, CONVERT_TO_PDL_REVERSE); + boolean keepOriginal = isPropertyTrue(project, CONVERT_TO_PDL_KEEP_ORIGINAL); + boolean skipVerification = isPropertyTrue(project, CONVERT_TO_PDL_SKIP_VERIFICATION); + String preserveSourceCmd = getNonEmptyProperty(project, CONVERT_TO_PDL_PRESERVE_SOURCE_CMD); + + // Utility task for migrating between PDSC and PDL. + project.getTasks().create(sourceSet.getTaskName("convert", "ToPdl"), TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(dataSchemaDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setPreserveSourceCmd(preserveSourceCmd); + if (reverse) + { + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDSC); + } + else + { + task.setSourceFormat(SchemaFileType.PDSC); + task.setDestinationFormat(SchemaFileType.PDL); + } + task.setKeepOriginal(keepOriginal); + task.setSkipVerification(skipVerification); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> task.getInputDir().exists()); + task.doLast(new CacheableAction<>(t -> + { + project.getLogger().lifecycle("Pegasus schema conversion complete."); + project.getLogger().lifecycle("All pegasus schema files in " + dataSchemaDir + " have been converted"); + project.getLogger().lifecycle("You can use '-PconvertToPdl.reverse=true|false' to change the direction of conversion."); + })); + }); + + // Helper task for reformatting existing PDL schemas by generating them again. + project.getTasks().create(sourceSet.getTaskName("reformat", "Pdl"), TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(dataSchemaDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDL); + task.setKeepOriginal(true); + task.setSkipVerification(true); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> task.getInputDir().exists()); + task.doLast(new CacheableAction<>(t -> project.getLogger().lifecycle("PDL reformat complete."))); + }); + } + + @SuppressWarnings("deprecation") + protected GenerateDataTemplateTask configureDataTemplateGeneration(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + File generatedDataTemplateDir = project.file(getGeneratedDirPath(project, sourceSet, DATA_TEMPLATE_GEN_TYPE) + + File.separatorChar + "java"); + File publishableSchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "Schemas"); + File publishableLegacySchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "LegacySchemas"); + File publishableExtensionSchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "ExtensionSchemas"); + + // generate data template source files from data schema + GenerateDataTemplateTask generateDataTemplatesTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "dataTemplate"), GenerateDataTemplateTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(generatedDataTemplateDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + if (isPropertyTrue(project, CODE_GEN_PATH_CASE_SENSITIVE)) + { + task.setGenerateLowercasePath(false); + } + + task.onlyIf(t -> + { + if (task.getInputDir().exists()) + { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + return pegasusOptions.get(sourceSet.getName()).hasGenerationMode(PegasusOptions.GenerationMode.PEGASUS); + } + + return false; + }); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, DATA_TEMPLATE_GEN_TYPE))); + }); + + // TODO: Tighten the types so that _generateSourcesJarTask must be of type Jar. + ((Jar) _generateSourcesJarTask).from(generateDataTemplatesTask.getDestinationDir()); + _generateSourcesJarTask.dependsOn(generateDataTemplatesTask); + + _generateJavadocTask.source(generateDataTemplatesTask.getDestinationDir()); + _generateJavadocTask.setClasspath(_generateJavadocTask.getClasspath() + .plus(project.getConfigurations().getByName("dataTemplateCompile")) + .plus(generateDataTemplatesTask.getResolverPath())); + _generateJavadocTask.dependsOn(generateDataTemplatesTask); + + // Add extra dependencies for data model compilation + project.getDependencies().add("dataTemplateCompile", "com.google.code.findbugs:jsr305:3.0.2"); + + // create new source set for generated java source and class files + String targetSourceSetName = getGeneratedSourceSetName(sourceSet, DATA_TEMPLATE_GEN_TYPE); + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + SourceSet targetSourceSet = sourceSets.create(targetSourceSetName, ss -> + { + ss.java(sourceDirectorySet -> sourceDirectorySet.srcDir(generatedDataTemplateDir)); + ss.setCompileClasspath(getDataModelConfig(project, sourceSet) + .plus(project.getConfigurations().getByName("dataTemplateCompile"))); + }); + + // idea plugin needs to know about new generated java source directory and its dependencies + addGeneratedDir(project, targetSourceSet, Arrays.asList( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("dataTemplateCompile"))); + + // Set source compatibility to 1.8 as the data-templates now generate code with Java 8 features. + JavaCompile compileTask = project.getTasks() + .withType(JavaCompile.class).getByName(targetSourceSet.getCompileJavaTaskName()); + compileTask.doFirst(new CacheableAction<>(task -> { + ((JavaCompile) task).setSourceCompatibility("1.8"); + ((JavaCompile) task).setTargetCompatibility("1.8"); + })); + // make sure that java source files have been generated before compiling them + compileTask.dependsOn(generateDataTemplatesTask); + + // Dummy task to maintain backward compatibility + // TODO: Delete this task once use cases have had time to reference the new task + Task destroyStaleFiles = project.getTasks().create(sourceSet.getName() + "DestroyStaleFiles", Delete.class); + destroyStaleFiles.onlyIf(task -> { + project.getLogger().lifecycle("{} task is a NO-OP task.", task.getPath()); + return false; + }); + + // Dummy task to maintain backward compatibility, as this task was replaced by CopySchemas + // TODO: Delete this task once use cases have had time to reference the new task + Task copyPdscSchemasTask = project.getTasks().create(sourceSet.getName() + "CopyPdscSchemas", Copy.class); + copyPdscSchemasTask.dependsOn(destroyStaleFiles); + copyPdscSchemasTask.onlyIf(task -> { + project.getLogger().lifecycle("{} task is a NO-OP task.", task.getPath()); + return false; + }); + + // Prepare schema files for publication by syncing schema folders. + Task prepareSchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "CopySchemas", Sync.class, task -> + { + task.from(dataSchemaDir, syncSpec -> DATA_TEMPLATE_FILE_SUFFIXES.forEach(suffix -> syncSpec.include("**/*" + suffix))); + task.into(publishableSchemasBuildDir); + }); + prepareSchemasForPublishTask.dependsOn(copyPdscSchemasTask); + + Collection dataTemplateJarDepends = new ArrayList<>(); + dataTemplateJarDepends.add(compileTask); + dataTemplateJarDepends.add(prepareSchemasForPublishTask); + + // Convert all PDL files back to PDSC for publication + // TODO: Remove this conversion permanently once translated PDSCs are no longer needed. + Task prepareLegacySchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "TranslateSchemas", TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(publishableLegacySchemasBuildDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDSC); + task.setKeepOriginal(true); + task.setSkipVerification(true); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + + prepareLegacySchemasForPublishTask.dependsOn(destroyStaleFiles); + dataTemplateJarDepends.add(prepareLegacySchemasForPublishTask); + + // extension schema directory + File extensionSchemaDir = project.file(getExtensionSchemaPath(project, sourceSet)); + + if (!SharedFileUtils.getSuffixedFiles(project, extensionSchemaDir, PDL_FILE_SUFFIX).isEmpty()) + { + // Validate extension schemas if extension schemas are provided. + ValidateExtensionSchemaTask validateExtensionSchemaTask = project.getTasks() + .create(sourceSet.getTaskName("validate", "ExtensionSchemas"), ValidateExtensionSchemaTask.class, task -> + { + task.setInputDir(extensionSchemaDir); + task.setResolverPath( + getDataModelConfig(project, sourceSet).plus(project.files(getDataSchemaPath(project, sourceSet)))); + task.setClassPath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + + Task prepareExtensionSchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "CopyExtensionSchemas", Sync.class, task -> + { + task.from(extensionSchemaDir, syncSpec -> syncSpec.include("**/*" + PDL_FILE_SUFFIX)); + task.into(publishableExtensionSchemasBuildDir); + }); + + prepareExtensionSchemasForPublishTask.dependsOn(validateExtensionSchemaTask); + prepareExtensionSchemasForPublishTask.dependsOn(copyPdscSchemasTask); + dataTemplateJarDepends.add(prepareExtensionSchemasForPublishTask); + } + + // include pegasus files in the output of this SourceSet + project.getTasks().withType(ProcessResources.class).getByName(targetSourceSet.getProcessResourcesTaskName(), it -> + { + it.from(prepareSchemasForPublishTask, copy -> copy.into("pegasus")); + // TODO: Remove this permanently once translated PDSCs are no longer needed. + it.from(prepareLegacySchemasForPublishTask, copy -> copy.into(TRANSLATED_SCHEMAS_DIR)); + Sync copyExtensionSchemasTask = project.getTasks().withType(Sync.class).findByName(sourceSet.getName() + "CopyExtensionSchemas"); + if (copyExtensionSchemasTask != null) + { + it.from(copyExtensionSchemasTask, copy -> copy.into("extensions")); + } + }); + + // create data template jar file + Jar dataTemplateJarTask = project.getTasks() + .create(sourceSet.getName() + "DataTemplateJar", Jar.class, task -> + { + task.dependsOn(dataTemplateJarDepends); + task.from(targetSourceSet.getOutput()); + + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "data-template")); + task.setDescription("Generate a data template jar"); + }); + + // add the data model and date template jars to the list of project artifacts. + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("dataTemplate", dataTemplateJarTask); + } + else + { + project.getArtifacts().add("testDataTemplate", dataTemplateJarTask); + } + + // include additional dependencies into the appropriate configuration used to compile the input source set + // must include the generated data template classes and their dependencies the configuration. + // "compile" and "testCompile" configurations have been removed in Gradle 7, + // but to keep the maximum backward compatibility, here we handle Gradle 7 and earlier version differently + // Once MIN_REQUIRED_VERSION reaches 7.0, we can remove the check of isAtLeastGradle7() + String compileConfigName; + if (isAtLeastGradle7()) { + compileConfigName = isTestSourceSet(sourceSet) ? "testImplementation" : project.getConfigurations().findByName("api") != null ? "api" : "implementation"; + } + else + { + compileConfigName = isTestSourceSet(sourceSet) ? "testCompile" : "compile"; + } + + Configuration compileConfig = project.getConfigurations().maybeCreate(compileConfigName); + compileConfig.extendsFrom( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("dataTemplateCompile")); + + // The getArchivePath() API doesn’t carry any task dependency and has been deprecated. + // Replace it with getArchiveFile() on Gradle 7, + // but keep getArchivePath() to be backwards-compatibility with Gradle version older than 5.1 + // DataHub Note - applied FIXME + project.getDependencies().add(compileConfigName, project.files( + isAtLeastGradle7() ? dataTemplateJarTask.getArchiveFile() : dataTemplateJarTask.getArchivePath())); + + if (_configureIvyPublications) { + // The below Action is only applied when the 'ivy-publish' is applied by the consumer. + // If the consumer does not use ivy-publish, this is a noop. + // this Action prepares the project applying the pegasus plugin to publish artifacts using these steps: + // 1. Registers "feature variants" for pegasus-specific artifacts; + // see https://docs.gradle.org/6.1/userguide/feature_variants.html + // 2. Wires legacy configurations like `dataTemplateCompile` to auto-generated feature variant *Api and + // *Implementation configurations for backwards compatibility. + // 3. Configures the Ivy Publication to include auto-generated feature variant *Api and *Implementation + // configurations and their dependencies. + project.getPlugins().withType(IvyPublishPlugin.class, ivyPublish -> { + if (!isAtLeastGradle61()) + { + throw new GradleException("Using the ivy-publish plugin with the pegasus plugin requires Gradle 6.1 or higher " + + "at build time. Please upgrade."); + } + + JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); + // create new capabilities per source set; automatically creates api and implementation configurations + String featureName = mapSourceSetToFeatureName(targetSourceSet); + try + { + /* + reflection is required to preserve compatibility with Gradle 5.2.1 and below + TODO once Gradle 5.3+ is required, remove reflection and replace with: + java.registerFeature(featureName, featureSpec -> { + featureSpec.usingSourceSet(targetSourceSet); + }); + */ + Method registerFeature = JavaPluginExtension.class.getDeclaredMethod("registerFeature", String.class, Action.class); + Action/**/ featureSpecAction = createFeatureVariantFromSourceSet(targetSourceSet); + registerFeature.invoke(java, featureName, featureSpecAction); + } + catch (ReflectiveOperationException e) + { + throw new GradleException("Unable to register new feature variant", e); + } + + // expose transitive dependencies to consumers via variant configurations + Configuration featureConfiguration = project.getConfigurations().getByName(featureName); + Configuration mainGeneratedDataTemplateApi = project.getConfigurations().getByName(targetSourceSet.getApiConfigurationName()); + featureConfiguration.extendsFrom(mainGeneratedDataTemplateApi); + mainGeneratedDataTemplateApi.extendsFrom( + getDataModelConfig(project, targetSourceSet), + project.getConfigurations().getByName("dataTemplateCompile")); + + // Configure the existing IvyPublication + // For backwards-compatibility, make the legacy dataTemplate/testDataTemplate configurations extend + // their replacements, auto-created when we registered the new feature variant + project.afterEvaluate(p -> { + PublishingExtension publishing = p.getExtensions().getByType(PublishingExtension.class); + // When configuring a Gradle Publication, use this value to find the name of the publication to configure. Defaults to "ivy". + String publicationName = p.getExtensions().getExtraProperties().getProperties().getOrDefault("PegasusPublicationName", "ivy").toString(); + IvyPublication ivyPublication = publishing.getPublications().withType(IvyPublication.class).getByName(publicationName); + ivyPublication.configurations(configurations -> configurations.create(featureName, legacyConfiguration -> { + legacyConfiguration.extend(p.getConfigurations().getByName(targetSourceSet.getApiElementsConfigurationName()).getName()); + legacyConfiguration.extend(p.getConfigurations().getByName(targetSourceSet.getRuntimeElementsConfigurationName()).getName()); + })); + }); + }); + } + + if (debug) + { + System.out.println("configureDataTemplateGeneration sourceSet " + sourceSet.getName()); + System.out.println(compileConfigName + ".allDependencies : " + + project.getConfigurations().getByName(compileConfigName).getAllDependencies()); + System.out.println(compileConfigName + ".extendsFrom: " + + project.getConfigurations().getByName(compileConfigName).getExtendsFrom()); + System.out.println(compileConfigName + ".transitive: " + + project.getConfigurations().getByName(compileConfigName).isTransitive()); + } + + project.getTasks().getByName(sourceSet.getCompileJavaTaskName()).dependsOn(dataTemplateJarTask); + return generateDataTemplatesTask; + } + + private String mapSourceSetToFeatureName(SourceSet sourceSet) { + String featureName = ""; + switch (sourceSet.getName()) { + case "mainGeneratedDataTemplate": + featureName = "dataTemplate"; + break; + case "testGeneratedDataTemplate": + featureName = "testDataTemplate"; + break; + case "mainGeneratedRest": + featureName = "restClient"; + break; + case "testGeneratedRest": + featureName = "testRestClient"; + break; + case "mainGeneratedAvroSchema": + featureName = "avroSchema"; + break; + case "testGeneratedAvroSchema": + featureName = "testAvroSchema"; + break; + default: + String msg = String.format("Unable to map %s to an appropriate feature name", sourceSet); + throw new GradleException(msg); + } + return featureName; + } + + // Generate rest client from idl files generated from java source files in the specified source set. + // + // This generates rest client source files from idl file generated from java source files + // in the source set. The generated rest client source files will be in a new source set. + // It also compiles the rest client source files into classes, and creates both the + // rest model and rest client jar files. + // + @SuppressWarnings("deprecation") + protected void configureRestClientGeneration(Project project, SourceSet sourceSet) + { + // idl directory for api project + File idlDir = project.file(getIdlPath(project, sourceSet)); + if (SharedFileUtils.getSuffixedFiles(project, idlDir, IDL_FILE_SUFFIX).isEmpty() && !isPropertyTrue(project, + PROCESS_EMPTY_IDL_DIR)) + { + return; + } + File generatedRestClientDir = project.file(getGeneratedDirPath(project, sourceSet, REST_GEN_TYPE) + + File.separatorChar + "java"); + + // always include imported data template jars in compileClasspath of rest client + FileCollection dataModelConfig = getDataModelConfig(project, sourceSet); + + // if data templates generated from this source set, add the generated data template jar to compileClasspath + // of rest client. + String dataTemplateSourceSetName = getGeneratedSourceSetName(sourceSet, DATA_TEMPLATE_GEN_TYPE); + + Jar dataTemplateJarTask = null; + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + FileCollection dataModels; + if (sourceSets.findByName(dataTemplateSourceSetName) != null) + { + if (debug) + { + System.out.println("sourceSet " + sourceSet.getName() + " has generated sourceSet " + dataTemplateSourceSetName); + } + dataTemplateJarTask = (Jar) project.getTasks().getByName(sourceSet.getName() + "DataTemplateJar"); + // The getArchivePath() API doesn’t carry any task dependency and has been deprecated. + // Replace it with getArchiveFile() on Gradle 7, + // but keep getArchivePath() to be backwards-compatibility with Gradle version older than 5.1 + // DataHub Note - applied FIXME + dataModels = dataModelConfig.plus(project.files( + isAtLeastGradle7() ? dataTemplateJarTask.getArchiveFile() : dataTemplateJarTask.getArchivePath())); + } + else + { + dataModels = dataModelConfig; + } + + // create source set for generated rest model, rest client source and class files. + String targetSourceSetName = getGeneratedSourceSetName(sourceSet, REST_GEN_TYPE); + SourceSet targetSourceSet = sourceSets.create(targetSourceSetName, ss -> + { + ss.java(sourceDirectorySet -> sourceDirectorySet.srcDir(generatedRestClientDir)); + ss.setCompileClasspath(dataModels.plus(project.getConfigurations().getByName("restClientCompile"))); + }); + + project.getPlugins().withType(EclipsePlugin.class, eclipsePlugin -> { + EclipseModel eclipseModel = (EclipseModel) project.getExtensions().findByName("eclipse"); + eclipseModel.getClasspath().getPlusConfigurations() + .add(project.getConfigurations().getByName("restClientCompile")); + }); + + // idea plugin needs to know about new rest client source directory and its dependencies + addGeneratedDir(project, targetSourceSet, Arrays.asList( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("restClientCompile"))); + + // generate the rest client source files + GenerateRestClientTask generateRestClientTask = project.getTasks() + .create(targetSourceSet.getTaskName("generate", "restClient"), GenerateRestClientTask.class, task -> + { + task.dependsOn(project.getConfigurations().getByName("dataTemplate")); + task.setInputDir(idlDir); + task.setResolverPath(dataModels.plus(project.getConfigurations().getByName("restClientCompile"))); + task.setRuntimeClasspath(project.getConfigurations().getByName("dataModel") + .plus(project.getConfigurations().getByName("dataTemplate").getArtifacts().getFiles())); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setDestinationDir(generatedRestClientDir); + task.setRestli2FormatSuppressed(project.hasProperty(SUPPRESS_REST_CLIENT_RESTLI_2)); + task.setRestli1FormatSuppressed(project.hasProperty(SUPPRESS_REST_CLIENT_RESTLI_1)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + if (isPropertyTrue(project, CODE_GEN_PATH_CASE_SENSITIVE)) + { + task.setGenerateLowercasePath(false); + } + if (isPropertyTrue(project, ENABLE_FLUENT_API)) + { + task.setGenerateFluentApi(true); + } + task.doFirst(new CacheableAction<>(t -> project.delete(generatedRestClientDir))); + }); + + if (dataTemplateJarTask != null) + { + generateRestClientTask.dependsOn(dataTemplateJarTask); + } + + // TODO: Tighten the types so that _generateSourcesJarTask must be of type Jar. + ((Jar) _generateSourcesJarTask).from(generateRestClientTask.getDestinationDir()); + _generateSourcesJarTask.dependsOn(generateRestClientTask); + + _generateJavadocTask.source(generateRestClientTask.getDestinationDir()); + _generateJavadocTask.setClasspath(_generateJavadocTask.getClasspath() + .plus(project.getConfigurations().getByName("restClientCompile")) + .plus(generateRestClientTask.getResolverPath())); + _generateJavadocTask.dependsOn(generateRestClientTask); + + // make sure rest client source files have been generated before compiling them + JavaCompile compileGeneratedRestClientTask = (JavaCompile) project.getTasks() + .getByName(targetSourceSet.getCompileJavaTaskName()); + compileGeneratedRestClientTask.dependsOn(generateRestClientTask); + compileGeneratedRestClientTask.getOptions().getCompilerArgs().add("-Xlint:-deprecation"); + + // create the rest model jar file + Task restModelJarTask = project.getTasks().create(sourceSet.getName() + "RestModelJar", Jar.class, task -> + { + task.from(idlDir, copySpec -> + { + copySpec.eachFile(fileCopyDetails -> project.getLogger() + .info("Add idl file: {}", fileCopyDetails)); + copySpec.setIncludes(Collections.singletonList('*' + IDL_FILE_SUFFIX)); + }); + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "rest-model")); + task.setDescription("Generate rest model jar"); + }); + + // create the rest client jar file + Task restClientJarTask = project.getTasks() + .create(sourceSet.getName() + "RestClientJar", Jar.class, task -> + { + task.dependsOn(compileGeneratedRestClientTask); + task.from(idlDir, copySpec -> { + copySpec.eachFile(fileCopyDetails -> { + project.getLogger().info("Add interface file: {}", fileCopyDetails); + fileCopyDetails.setPath("idl" + File.separatorChar + fileCopyDetails.getPath()); + }); + copySpec.setIncludes(Collections.singletonList('*' + IDL_FILE_SUFFIX)); + }); + task.from(targetSourceSet.getOutput()); + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "rest-client")); + task.setDescription("Generate rest client jar"); + }); + + // add the rest model jar and the rest client jar to the list of project artifacts. + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("restModel", restModelJarTask); + project.getArtifacts().add("restClient", restClientJarTask); + } + else + { + project.getArtifacts().add("testRestModel", restModelJarTask); + project.getArtifacts().add("testRestClient", restClientJarTask); + } + } + + // Return the appendix for generated jar files. + // The source set name is not included for the main source set. + private static String getAppendix(SourceSet sourceSet, String suffix) + { + return sourceSet.getName().equals("main") ? suffix : sourceSet.getName() + '-' + suffix; + } + + private static Project getApiProject(Project project) + { + if (project.getExtensions().getExtraProperties().has("apiProject")) + { + return (Project) project.getExtensions().getExtraProperties().get("apiProject"); + } + + List subsSuffixes; + if (project.getExtensions().getExtraProperties().has("apiProjectSubstitutionSuffixes")) + { + @SuppressWarnings("unchecked") + List suffixValue = (List) project.getExtensions() + .getExtraProperties().get("apiProjectSubstitutionSuffixes"); + + subsSuffixes = suffixValue; + } + else + { + subsSuffixes = Arrays.asList("-impl", "-service", "-server", "-server-impl"); + } + + for (String suffix : subsSuffixes) + { + if (project.getPath().endsWith(suffix)) + { + String searchPath = project.getPath().substring(0, project.getPath().length() - suffix.length()) + "-api"; + Project apiProject = project.findProject(searchPath); + if (apiProject != null) + { + return apiProject; + } + } + } + + return project.findProject(project.getPath() + "-api"); + } + + private static Project getCheckedApiProject(Project project) + { + Project apiProject = getApiProject(project); + + if (apiProject == project) + { + throw new GradleException("The API project of ${project.path} must not be itself."); + } + + return apiProject; + } + + /** + * return the property value if the property exists and is not empty (-Pname=value) + * return null if property does not exist or the property is empty (-Pname) + * + * @param project the project where to look for the property + * @param propertyName the name of the property + */ + public static String getNonEmptyProperty(Project project, String propertyName) + { + if (!project.hasProperty(propertyName)) + { + return null; + } + + String propertyValue = project.property(propertyName).toString(); + if (propertyValue.isEmpty()) + { + return null; + } + + return propertyValue; + } + + /** + * Return true if the given property exists and its value is true + * + * @param project the project where to look for the property + * @param propertyName the name of the property + */ + public static boolean isPropertyTrue(Project project, String propertyName) + { + return project.hasProperty(propertyName) && Boolean.valueOf(project.property(propertyName).toString()); + } + + private static String createModifiedFilesMessage(Collection nonEquivExpectedFiles, + Collection foldersToBeBuilt) + { + StringBuilder builder = new StringBuilder(); + builder.append("\nRemember to checkin the changes to the following new or modified files:\n"); + for (String file : nonEquivExpectedFiles) + { + builder.append(" "); + builder.append(file); + builder.append("\n"); + } + + if (!foldersToBeBuilt.isEmpty()) + { + builder.append("\nThe file modifications include service interface changes, you can build the the following projects " + + "to re-generate the client APIs accordingly:\n"); + for (String folder : foldersToBeBuilt) + { + builder.append(" "); + builder.append(folder); + builder.append("\n"); + } + } + + return builder.toString(); + } + + private static String createPossibleMissingFilesMessage(Collection missingFiles) + { + StringBuilder builder = new StringBuilder(); + builder.append("If this is the result of an automated build, then you may have forgotten to check in some snapshot or idl files:\n"); + for (String file : missingFiles) + { + builder.append(" "); + builder.append(file); + builder.append("\n"); + } + + return builder.toString(); + } + + private static String findProperty(FileCompatibilityType type) + { + String property; + switch (type) + { + case SNAPSHOT: + property = SNAPSHOT_COMPAT_REQUIREMENT; + break; + case IDL: + property = IDL_COMPAT_REQUIREMENT; + break; + case PEGASUS_SCHEMA_SNAPSHOT: + property = PEGASUS_SCHEMA_SNAPSHOT_REQUIREMENT; + break; + case PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: + property = PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_REQUIREMENT; + break; + default: + throw new GradleException("No property defined for compatibility type " + type); + } + return property; + } + + private static Set buildWatchedRestModelInputDirs(Project project, SourceSet sourceSet) { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + File rootPath = new File(project.getProjectDir(), + pegasusOptions.get(sourceSet.getName()).restModelOptions.getRestResourcesRootPath()); + + IdlOptions idlOptions = pegasusOptions.get(sourceSet.getName()).idlOptions; + + // if idlItems exist, only watch the smaller subset + return idlOptions.getIdlItems().stream() + .flatMap(idlItem -> Arrays.stream(idlItem.packageNames)) + .map(packageName -> new File(rootPath, packageName.replace('.', '/'))) + .collect(Collectors.toCollection(TreeSet::new)); + } + + private static Set difference(Set left, Set right) + { + Set result = new HashSet<>(left); + result.removeAll(right); + return result; + } + + /** + * Configures the given source set so that its data schema directory (usually 'pegasus') is marked as a resource root. + * The purpose of this is to improve the IDE experience. Makes sure to exclude this directory from being packaged in + * with the default Jar task. + */ + private static void configureDataSchemaResourcesRoot(Project project, SourceSet sourceSet) + { + sourceSet.resources(sourceDirectorySet -> { + final String dataSchemaPath = getDataSchemaPath(project, sourceSet); + final File dataSchemaRoot = project.file(dataSchemaPath); + sourceDirectorySet.srcDir(dataSchemaPath); + project.getLogger().info("Adding resource root '{}'", dataSchemaPath); + + final String extensionsSchemaPath = getExtensionSchemaPath(project, sourceSet); + final File extensionsSchemaRoot = project.file(extensionsSchemaPath); + sourceDirectorySet.srcDir(extensionsSchemaPath); + project.getLogger().info("Adding resource root '{}'", extensionsSchemaPath); + + // Exclude the data schema and extensions schema directory from being copied into the default Jar task + sourceDirectorySet.getFilter().exclude(fileTreeElement -> { + final File file = fileTreeElement.getFile(); + // Traversal starts with the children of a resource root, so checking the direct parent is sufficient + final boolean underDataSchemaRoot = dataSchemaRoot.equals(file.getParentFile()); + final boolean underExtensionsSchemaRoot = extensionsSchemaRoot.equals(file.getParentFile()); + final boolean exclude = (underDataSchemaRoot || underExtensionsSchemaRoot); + if (exclude) + { + project.getLogger().info("Excluding resource directory '{}'", file); + } + return exclude; + }); + }); + } + + private Task generatePegasusSchemaSnapshot(Project project, SourceSet sourceSet, String taskName, File inputDir, File outputDir, + boolean isExtensionSchema) + { + return project.getTasks().create(sourceSet.getTaskName("generate", taskName), + GeneratePegasusSnapshotTask.class, task -> + { + task.setInputDir(inputDir); + task.setResolverPath(getDataModelConfig(project, sourceSet).plus(project.files(getDataSchemaPath(project, sourceSet)))); + task.setClassPath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setPegasusSchemaSnapshotDestinationDir(outputDir); + task.setExtensionSchema(isExtensionSchema); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + } + + private Task publishPegasusSchemaSnapshot(Project project, SourceSet sourceSet, String taskName, Task checkPegasusSnapshotTask, + File inputDir, File outputDir) + { + return project.getTasks().create(sourceSet.getTaskName("publish", taskName), + Sync.class, task -> + { + task.dependsOn(checkPegasusSnapshotTask); + task.from(inputDir); + task.into(outputDir); + task.onlyIf(t -> !SharedFileUtils.getSuffixedFiles(project, inputDir, PDL_FILE_SUFFIX).isEmpty()); + }); + } + + private void checkGradleVersion(Project project) + { + if (MIN_REQUIRED_VERSION.compareTo(GradleVersion.current()) > 0) + { + throw new GradleException(String.format("This plugin does not support %s. Please use %s or later.", + GradleVersion.current(), + MIN_REQUIRED_VERSION)); + } + if (MIN_SUGGESTED_VERSION.compareTo(GradleVersion.current()) > 0) + { + project.getLogger().warn(String.format("Pegasus supports %s, but it may not be supported in the next major release. Please use %s or later.", + GradleVersion.current(), + MIN_SUGGESTED_VERSION)); + } + } + + /** + * Reflection is necessary to obscure types introduced in Gradle 5.3 + * + * @param sourceSet the target sourceset upon which to create a new feature variant + * @return an Action which modifies a org.gradle.api.plugins.FeatureSpec instance + */ + private Action/**/ createFeatureVariantFromSourceSet(SourceSet sourceSet) + { + return featureSpec -> { + try + { + Class clazz = Class.forName("org.gradle.api.plugins.FeatureSpec"); + Method usingSourceSet = clazz.getDeclaredMethod("usingSourceSet", SourceSet.class); + usingSourceSet.invoke(featureSpec, sourceSet); + } + catch (ReflectiveOperationException e) + { + throw new GradleException("Unable to invoke FeatureSpec#usingSourceSet(SourceSet)", e); + } + }; + } + + protected static boolean isAtLeastGradle61() + { + return GradleVersion.current().getBaseVersion().compareTo(GradleVersion.version("6.1")) >= 0; + } + + public static boolean isAtLeastGradle7() { + return GradleVersion.current().getBaseVersion().compareTo(GradleVersion.version("7.0")) >= 0; + } +} \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java new file mode 100644 index 0000000000000..a2aafaf1be017 --- /dev/null +++ b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java @@ -0,0 +1,124 @@ +package com.linkedin.pegasus.gradle.tasks; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import org.gradle.api.DefaultTask; +import org.gradle.api.file.FileCollection; +import org.gradle.api.specs.Specs; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.TaskAction; +import org.gradle.work.FileChange; +import org.gradle.work.InputChanges; + + +public class ChangedFileReportTask extends DefaultTask +{ + private final Collection _needCheckinFiles = new ArrayList<>(); + + private FileCollection _idlFiles = getProject().files(); + private FileCollection _snapshotFiles = getProject().files(); + + public ChangedFileReportTask() + { + //with Gradle 6.0, Declaring an incremental task without outputs is not allowed. + getOutputs().upToDateWhen(Specs.satisfyNone()); + } + + // DataHub Note - updated for InputChanges + @TaskAction + public void checkFilesForChanges(InputChanges inputChanges) + { + getLogger().lifecycle("Checking idl and snapshot files for changes..."); + getLogger().info("idlFiles: " + _idlFiles.getAsPath()); + getLogger().info("snapshotFiles: " + _snapshotFiles.getAsPath()); + + Set filesRemoved = new HashSet<>(); + Set filesAdded = new HashSet<>(); + Set filesChanged = new HashSet<>(); + + if (inputChanges.isIncremental()) + { + Consumer handleChange = change -> + { + switch (change.getChangeType()) { + case ADDED: + filesAdded.add(change.getFile().getAbsolutePath()); + break; + case REMOVED: + filesRemoved.add(change.getFile().getAbsolutePath()); + break; + case MODIFIED: + filesChanged.add(change.getFile().getAbsolutePath()); + break; + } + }; + + inputChanges.getFileChanges(_idlFiles).forEach(handleChange); + inputChanges.getFileChanges(_snapshotFiles).forEach(handleChange); + + if (!filesRemoved.isEmpty()) + { + String files = joinByComma(filesRemoved); + _needCheckinFiles.add(files); + getLogger().lifecycle( + "The following files have been removed, be sure to remove them from source control: {}", files); + } + + if (!filesAdded.isEmpty()) + { + String files = joinByComma(filesAdded); + _needCheckinFiles.add(files); + getLogger().lifecycle("The following files have been added, be sure to add them to source control: {}", files); + } + + if (!filesChanged.isEmpty()) + { + String files = joinByComma(filesChanged); + _needCheckinFiles.add(files); + getLogger().lifecycle( + "The following files have been changed, be sure to commit the changes to source control: {}", files); + } + } + } + + private String joinByComma(Set files) + { + return files.stream().collect(Collectors.joining(", ")); + } + + @InputFiles + @SkipWhenEmpty + public FileCollection getSnapshotFiles() + { + return _snapshotFiles; + } + + public void setSnapshotFiles(FileCollection snapshotFiles) + { + _snapshotFiles = snapshotFiles; + } + + @InputFiles + @SkipWhenEmpty + public FileCollection getIdlFiles() + { + return _idlFiles; + } + + public void setIdlFiles(FileCollection idlFiles) + { + _idlFiles = idlFiles; + } + + @Internal + public Collection getNeedCheckinFiles() + { + return _needCheckinFiles; + } +} \ No newline at end of file diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index a1b97701dbf88..437c72e6394ea 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -2,6 +2,7 @@ plugins { id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" id 'scala' id 'com.palantir.docker' + id 'org.gradle.playframework' } apply from: "../gradle/versioning/versioning.gradle" @@ -20,7 +21,6 @@ model { } task myTar(type: Tar) { - extension = "tgz" compression = Compression.GZIP from("${buildDir}/stage") @@ -119,3 +119,23 @@ task cleanLocalDockerImages { } } dockerClean.finalizedBy(cleanLocalDockerImages) + +// gradle 8 fixes +tasks.getByName('createDatahub-frontendTarDist').dependsOn 'stageMainDist' +tasks.getByName('createDatahub-frontendZipDist').dependsOn 'stageMainDist' +stagePlayBinaryDist.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistTar.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistZip.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +tasks.getByName('stageDatahub-frontendDist').dependsOn stagePlayBinaryDist +tasks.getByName('stageDatahub-frontendDist').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createMainStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createMainStartScripts +playBinaryDistTar.dependsOn createMainStartScripts +playBinaryDistZip.dependsOn createMainStartScripts +createMainStartScripts.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryZipDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageMainDist' +createPlayBinaryZipDist.dependsOn 'stageMainDist' diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index dd1ceee411f74..84fb4c02620b8 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -1,4 +1,3 @@ -apply plugin: "org.gradle.playframework" // Change this to listen on a different port project.ext.httpPort = 9001 @@ -101,4 +100,22 @@ play { test { useJUnitPlatform() + + def playJava17CompatibleJvmArgs = [ + "--add-opens=java.base/java.lang=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED", + //"--add-opens=java.base/java.io=ALL-UNNAMED", + //"--add-opens=java.base/java.net=ALL-UNNAMED", + //"--add-opens=java.base/java.nio=ALL-UNNAMED", + "--add-opens=java.base/java.util=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED", + //"--add-opens=java.base/sun.security.action=ALL-UNNAMED", + //"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED", + //"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED", + ] + jvmArgs = playJava17CompatibleJvmArgs } diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index fba0031351b58..6e8cb93966922 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -1,7 +1,8 @@ plugins { + id 'java' id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" } -apply plugin: 'java' + dependencies { implementation project(':metadata-service:restli-client') diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index fd36e5ac4bc2c..72821d8b97dc0 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -1,8 +1,8 @@ plugins { id 'java' + id 'distribution' + id 'com.github.node-gradle.node' } -apply plugin: 'distribution' -apply plugin: 'com.github.node-gradle.node' node { @@ -35,7 +35,7 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } @@ -94,7 +94,7 @@ configurations { distZip { dependsOn yarnQuickBuild - baseName 'datahub-web-react' + archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}" from 'dist' } @@ -112,5 +112,5 @@ jar { into('public') { from zipTree(distZip.outputs.files.first()) } - classifier = 'assets' + archiveClassifier = 'assets' } diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 0c4c229af34f0..17d691177aa34 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -17,7 +17,7 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ ENV LD_LIBRARY_PATH="/lib:/lib64" @@ -25,7 +25,10 @@ ENV LD_LIBRARY_PATH="/lib:/lib64" FROM base as prod-install COPY ./datahub-frontend.zip / -RUN unzip datahub-frontend.zip && rm datahub-frontend.zip +RUN unzip datahub-frontend.zip -d /datahub-frontend \ + && mv /datahub-frontend/main/* /datahub-frontend \ + && rmdir /datahub-frontend/main \ + && rm datahub-frontend.zip COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/ RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend diff --git a/docker/datahub-frontend/start.sh b/docker/datahub-frontend/start.sh index 12e6b8915096d..f5de9c87968b0 100755 --- a/docker/datahub-frontend/start.sh +++ b/docker/datahub-frontend/start.sh @@ -49,6 +49,8 @@ export JAVA_OPTS="${JAVA_MEMORY_OPTS:-"-Xms512m -Xmx1024m"} \ -Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf \ -Dlogback.configurationFile=datahub-frontend/conf/logback.xml \ -Dlogback.debug=false \ + --add-opens java.base/java.lang=ALL-UNNAMED \ + --add-opens=java.base/java.util=ALL-UNNAMED \ ${PROMETHEUS_AGENT:-} ${OTEL_AGENT:-} \ ${TRUSTSTORE_FILE:-} ${TRUSTSTORE_TYPE:-} ${TRUSTSTORE_PASSWORD:-} \ ${HTTP_PROXY:-} ${HTTPS_PROXY:-} ${NO_PROXY:-} \ diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index 9c79e1da542f0..b26a02c1d3b15 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -40,14 +40,14 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/datahub-ingestion/build.gradle b/docker/datahub-ingestion/build.gradle index 52db594e2ef85..36444210f1938 100644 --- a/docker/datahub-ingestion/build.gradle +++ b/docker/datahub-ingestion/build.gradle @@ -45,9 +45,9 @@ docker { buildArgs(dockerBuildArgs) } -tasks.getByName('docker').dependsOn(['build', - ':docker:datahub-ingestion-base:docker', - ':metadata-ingestion:codegen']) +tasks.getByName('dockerPrepare').dependsOn(['build', + ':docker:datahub-ingestion-base:docker', + ':metadata-ingestion:codegen']) task mkdirBuildDocker { doFirst { diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 5bfa5f35ace17..9b7c6e762462e 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -38,11 +38,11 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index cc79a3072c193..4da94794e0ead 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -38,11 +38,11 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin FROM base as prod-install diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index 2beb5b54dac38..00dae87dfc3de 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -38,13 +38,13 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index f6a4b62a79356..53353863b6e5f 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -31,7 +31,7 @@ LABEL name="kafka" version=${KAFKA_VERSION} RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi RUN apk add --no-cache bash coreutils -RUN apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community +RUN apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community RUN apk add --no-cache -t .build-deps git curl ca-certificates jq gcc musl-dev libffi-dev zip RUN mkdir -p /opt \ diff --git a/docs-website/build.gradle b/docs-website/build.gradle index a213ec1ae8194..2644491a2a5f8 100644 --- a/docs-website/build.gradle +++ b/docs-website/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'distribution' -apply plugin: 'com.github.node-gradle.node' +plugins { + id 'distribution' + id 'com.github.node-gradle.node' +} node { @@ -12,10 +14,10 @@ node { } // Version of node to use. - version = '16.16.0' + version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.0' + yarnVersion = '1.22.1' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -31,7 +33,7 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } /* @@ -122,7 +124,11 @@ task yarnBuild(type: YarnTask, dependsOn: [yarnLint, yarnGenerate, downloadHisto // See https://stackoverflow.com/questions/53230823/fatal-error-ineffective-mark-compacts-near-heap-limit-allocation-failed-java // and https://github.com/facebook/docusaurus/issues/8329. // TODO: As suggested in https://github.com/facebook/docusaurus/issues/4765, try switching to swc-loader. - environment = ['NODE_OPTIONS': '--max-old-space-size=10248'] + if (project.hasProperty('useSystemNode') && project.getProperty('useSystemNode').toBoolean()) { + environment = ['NODE_OPTIONS': '--max-old-space-size=10248'] + } else { + environment = ['NODE_OPTIONS': '--max-old-space-size=10248 --openssl-legacy-provider'] + } args = ['run', 'build'] } diff --git a/docs-website/vercel-setup.sh b/docs-website/vercel-setup.sh index db532e167b59f..915635b24ee88 100755 --- a/docs-website/vercel-setup.sh +++ b/docs-website/vercel-setup.sh @@ -12,7 +12,7 @@ set -euxo pipefail yum groupinstall "Development Tools" -y yum erase openssl-devel -y -yum install openssl11 openssl11-devel libffi-devel bzip2-devel wget -y +yum install openssl11 openssl11-devel libffi-devel bzip2-devel wget nodejs -y wget https://www.python.org/ftp/python/3.10.11/Python-3.10.11.tgz tar -xf Python-3.10.11.tgz diff --git a/docs/developers.md b/docs/developers.md index c3c3a59283e66..60d31f5e4523f 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -6,16 +6,12 @@ title: "Local Development" ## Requirements -- Both [Java 11 JDK](https://openjdk.org/projects/jdk/11/) and [Java 8 JDK](https://openjdk.java.net/projects/jdk8/) +- [Java 17 JDK](https://openjdk.org/projects/jdk/17/) - [Python 3.10](https://www.python.org/downloads/release/python-3100/) - [Docker](https://www.docker.com/) - [Docker Compose](https://docs.docker.com/compose/) - Docker engine with at least 8GB of memory to run tests. -:::caution - -Do not try to use a JDK newer than JDK 11. The build process does not currently work with newer JDKs versions. - ::: On macOS, these can be installed using [Homebrew](https://brew.sh/). @@ -147,11 +143,11 @@ You're probably using a Java version that's too new for gradle. Run the followin java --version ``` -While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11). +While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 17](https://openjdk.org/projects/jdk/17/) (aka Java 17). #### Getting `cannot find symbol` error for `javax.annotation.Generated` -Similar to the previous issue, please use Java 1.8 to build the project. +Similar to the previous issue, please use Java 17 to build the project. You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details. #### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 36be572f2886e..61ad2d623d72a 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -7,11 +7,15 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ### Breaking Changes - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. +- Neo4j 5.x, may require migration from 4.x +- Build now requires JDK17 (Runtime Java 11) ### Potential Downtime ### Deprecations +- Spark 2.x (including previous JDK8 build requirements) + ### Other Notable Changes ## 0.12.1 diff --git a/docs/troubleshooting/build.md b/docs/troubleshooting/build.md index 112bcdc47e956..7b4ae98cdb03b 100644 --- a/docs/troubleshooting/build.md +++ b/docs/troubleshooting/build.md @@ -10,11 +10,11 @@ You're probably using a Java version that's too new for gradle. Run the followin java --version ``` -While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11). +While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 17](https://openjdk.org/projects/jdk/17/) (aka Java 17). ## Getting `cannot find symbol` error for `javax.annotation.Generated` -Similar to the previous issue, please use Java 1.8 to build the project. +Similar to the previous issue, please use Java 17 to build the project. You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details. ## `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error diff --git a/entity-registry/build.gradle b/entity-registry/build.gradle index 3da0bf5bb4fb8..77cca24c0e723 100644 --- a/entity-registry/build.gradle +++ b/entity-registry/build.gradle @@ -1,10 +1,13 @@ -apply plugin: 'pegasus' -apply plugin: 'java-library' +plugins { + id 'pegasus' + id 'java-library' +} dependencies { implementation spec.product.pegasus.data implementation spec.product.pegasus.generator api project(path: ':metadata-models') + api project(path: ':metadata-models', configuration: "dataTemplate") implementation externalDependency.slf4jApi compileOnly externalDependency.lombok implementation externalDependency.guava diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 4e86b9270786f..bdc9a83b1e652 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.0.2-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/li-utils/build.gradle b/li-utils/build.gradle index 1d5222e39185a..975cd2bccccf3 100644 --- a/li-utils/build.gradle +++ b/li-utils/build.gradle @@ -1,17 +1,9 @@ -apply plugin: 'java-library' -apply plugin: 'pegasus' - -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } +plugins { + id 'java-library' + id 'pegasus' } + dependencies { api spec.product.pegasus.data implementation externalDependency.commonsLang @@ -28,7 +20,7 @@ dependencies { testImplementation externalDependency.commonsIo testImplementation project(':test-models') testImplementation project(path: ':test-models', configuration: 'testDataTemplate') - testImplementation externalDependency.testngJava8 + testImplementation externalDependency.testng } idea { @@ -38,4 +30,4 @@ idea { } // Need to compile backing java parameterDefinitions with the data template. -sourceSets.mainGeneratedDataTemplate.java.srcDirs('src/main/javaPegasus/') \ No newline at end of file +sourceSets.mainGeneratedDataTemplate.java.srcDirs('src/main/javaPegasus/') diff --git a/metadata-auth/auth-api/build.gradle b/metadata-auth/auth-api/build.gradle index 7159aa5f15e61..c68c3019bd2b4 100644 --- a/metadata-auth/auth-api/build.gradle +++ b/metadata-auth/auth-api/build.gradle @@ -15,13 +15,12 @@ test { } jar { - archiveName = "$project.name-lib.jar" + archiveClassifier = "lib" } shadowJar { zip64 true - classifier = null - archiveName = "$project.name-${version}.jar" + archiveClassifier = "" exclude "META-INF/*.RSA", "META-INF/*.SF","META-INF/*.DSA" } @@ -39,12 +38,12 @@ dependencies() { } task sourcesJar(type: Jar) { - classifier 'sources' + archiveClassifier = 'sources' from sourceSets.main.allJava } task javadocJar(type: Jar, dependsOn: javadoc) { - classifier 'javadoc' + archiveClassifier = 'javadoc' from javadoc.destinationDir } diff --git a/metadata-events/mxe-utils-avro/build.gradle b/metadata-events/mxe-utils-avro/build.gradle index 3493797ab4f97..98bfb9127b209 100644 --- a/metadata-events/mxe-utils-avro/build.gradle +++ b/metadata-events/mxe-utils-avro/build.gradle @@ -1,8 +1,11 @@ -apply plugin: 'java-library' +plugins { + id 'java-library' +} dependencies { api project(':metadata-events:mxe-avro') api project(':metadata-models') + api project(path: ':metadata-models', configuration: "dataTemplate") api spec.product.pegasus.dataAvro testImplementation externalDependency.testng diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index 7ae01faaaabdd..b14953d7ce021 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -14,19 +14,9 @@ import org.apache.tools.ant.filters.ReplaceTokens jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } -} - dependencies { implementation project(':metadata-models') + implementation project(path: ':metadata-models', configuration: "dataTemplate") implementation(externalDependency.kafkaAvroSerializer) { exclude group: "org.apache.avro" } @@ -49,7 +39,7 @@ dependencies { annotationProcessor externalDependency.lombok // VisibleForTesting compileOnly externalDependency.guava - testImplementation externalDependency.testngJava8 + testImplementation externalDependency.testng testImplementation externalDependency.mockito testImplementation externalDependency.mockServer testImplementation externalDependency.mockServerClient @@ -241,4 +231,4 @@ sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/res clean { project.delete("$projectDir/generated") -} +} \ No newline at end of file diff --git a/metadata-integration/java/datahub-protobuf-example/build.gradle b/metadata-integration/java/datahub-protobuf-example/build.gradle index 4e53d8ed763ba..1efb43360457a 100644 --- a/metadata-integration/java/datahub-protobuf-example/build.gradle +++ b/metadata-integration/java/datahub-protobuf-example/build.gradle @@ -64,10 +64,6 @@ protobuf { task publishSchema(dependsOn: build) { description "Publishes protobuf schema in the `main` sourceSet to DataHub" - def javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(11) - } - fileTree("schema").matching { exclude "protobuf/meta/**" }.each {f -> diff --git a/metadata-integration/java/datahub-protobuf/build.gradle b/metadata-integration/java/datahub-protobuf/build.gradle index bc919119f8fac..2cb36a14cb9c7 100644 --- a/metadata-integration/java/datahub-protobuf/build.gradle +++ b/metadata-integration/java/datahub-protobuf/build.gradle @@ -12,12 +12,6 @@ apply from: '../versioning.gradle' jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation -afterEvaluate { - if (project.plugins.hasPlugin('java')) { - sourceCompatibility = 11 - targetCompatibility = 11 - } -} ext { javaMainClass = "datahub.protobuf.Proto2DataHub" } @@ -211,4 +205,4 @@ nexusStaging { password = System.getenv("NEXUS_PASSWORD") } - +startScripts.dependsOn shadowJar \ No newline at end of file diff --git a/metadata-integration/java/examples/build.gradle b/metadata-integration/java/examples/build.gradle index 581e9f82da0dc..ddf574e8c8905 100644 --- a/metadata-integration/java/examples/build.gradle +++ b/metadata-integration/java/examples/build.gradle @@ -1,16 +1,6 @@ -apply plugin: 'java' -apply plugin: 'jacoco' - - -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } +plugins { + id 'java' + id 'jacoco' } dependencies { diff --git a/metadata-integration/java/spark-lineage/build.gradle b/metadata-integration/java/spark-lineage/build.gradle index 7143ac4833143..c5dd9b5012c29 100644 --- a/metadata-integration/java/spark-lineage/build.gradle +++ b/metadata-integration/java/spark-lineage/build.gradle @@ -11,17 +11,6 @@ apply from: '../versioning.gradle' jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } -} - //to rename artifacts for publish project.archivesBaseName = 'datahub-'+project.name @@ -34,18 +23,19 @@ configurations { dependencies { - //Needed for tie breaking of guava version need for spark and wiremock - provided(externalDependency.hadoopMapreduceClient) { - force = true + constraints { + provided(externalDependency.hadoopMapreduceClient) { + because 'Needed for tie breaking of guava version need for spark and wiremock' + } + provided(externalDependency.hadoopCommon) { + because 'required for org.apache.hadoop.util.StopWatch' + } + provided(externalDependency.commonsIo) { + because 'required for org.apache.commons.io.Charsets that is used internally' + } } - provided(externalDependency.hadoopCommon) { - force = true - } // required for org.apache.hadoop.util.StopWatch - - provided(externalDependency.commonsIo) { - force = true - } // required for org.apache.commons.io.Charsets that is used internally + provided 'org.scala-lang:scala-library:2.12.18' implementation externalDependency.slf4jApi compileOnly externalDependency.lombok @@ -86,7 +76,7 @@ task checkShadowJar(type: Exec) { shadowJar { zip64=true - classifier='' + archiveClassifier = '' mergeServiceFiles() def exclude_modules = project @@ -107,7 +97,7 @@ shadowJar { // preventing java multi-release JAR leakage // https://github.com/johnrengelman/shadow/issues/729 - exclude('module-info.class', 'META-INF/versions/**') + exclude('module-info.class', 'META-INF/versions/**', 'LICENSE', 'NOTICE') // prevent jni conflict with spark exclude '**/libzstd-jni.*' @@ -138,6 +128,25 @@ jacocoTestReport { test { forkEvery = 1 useJUnit() + + def sparkJava17CompatibleJvmArgs = [ + "--add-opens=java.base/java.lang=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED", + //"--add-opens=java.base/java.io=ALL-UNNAMED", + "--add-opens=java.base/java.net=ALL-UNNAMED", + "--add-opens=java.base/java.nio=ALL-UNNAMED", + //"--add-opens=java.base/java.util=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED", + "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED", + //"--add-opens=java.base/sun.security.action=ALL-UNNAMED", + //"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED", + //"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED", + ] + jvmArgs = sparkJava17CompatibleJvmArgs + finalizedBy jacocoTestReport } @@ -151,12 +160,12 @@ task integrationTest(type: Exec, dependsOn: [shadowJar, ':docker:quickstartSlim' } task sourcesJar(type: Jar) { - classifier 'sources' + archiveClassifier = 'sources' from sourceSets.main.allJava } task javadocJar(type: Jar, dependsOn: javadoc) { - classifier 'javadoc' + archiveClassifier = 'javadoc' from javadoc.destinationDir } @@ -224,3 +233,12 @@ nexusStaging { username = System.getenv("NEXUS_USERNAME") password = System.getenv("NEXUS_PASSWORD") } + +task cleanExtraDirs { + delete "$projectDir/derby.log" + delete "$projectDir/src/test/resources/data/hive" + delete "$projectDir/src/test/resources/data/out.csv" + delete "$projectDir/src/test/resources/data/out_persist.csv" + delete "$projectDir/spark-smoke-test/venv" +} +clean.finalizedBy(cleanExtraDirs) diff --git a/metadata-integration/java/spark-lineage/scripts/check_jar.sh b/metadata-integration/java/spark-lineage/scripts/check_jar.sh index dd9cae68f31cb..275b91304e7ee 100755 --- a/metadata-integration/java/spark-lineage/scripts/check_jar.sh +++ b/metadata-integration/java/spark-lineage/scripts/check_jar.sh @@ -34,7 +34,9 @@ jar -tvf $jarFile |\ grep -v "linux/" |\ grep -v "darwin" |\ grep -v "MetadataChangeProposal.avsc" |\ - grep -v "aix" + grep -v "aix" |\ + grep -v "library.properties" |\ + grep -v "rootdoc.txt" if [ $? -ne 0 ]; then echo "✅ No unexpected class paths found in ${jarFile}" diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/docker/SparkBase.Dockerfile b/metadata-integration/java/spark-lineage/spark-smoke-test/docker/SparkBase.Dockerfile index 119338be6c2a9..21d0701fcfcd6 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/docker/SparkBase.Dockerfile +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/docker/SparkBase.Dockerfile @@ -17,7 +17,7 @@ RUN apt-get update -y && \ apt-get install /tmp/zulu-repo_1.0.0-3_all.deb && \ apt-get update && \ # apt-cache search zulu && \ - apt-get install -y --no-install-recommends zulu11-jre && \ + apt-get install -y --no-install-recommends zulu17-jre && \ apt-get clean && \ curl -sS https://archive.apache.org/dist/spark/spark-${spark_version}/spark-${spark_version}-bin-hadoop${hadoop_version}.tgz -o spark.tgz && \ tar -xf spark.tgz && \ diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/python-spark-lineage-test/python_test_run.sh b/metadata-integration/java/spark-lineage/spark-smoke-test/python-spark-lineage-test/python_test_run.sh index 429f692500c80..c06e2faec0bcb 100755 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/python-spark-lineage-test/python_test_run.sh +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/python-spark-lineage-test/python_test_run.sh @@ -7,25 +7,24 @@ saluation () { echo "--------------------------------------------------------" - echo "Starting execution $1" + echo "Starting execution $1 (properties: $2)" echo "--------------------------------------------------------" } -saluation "HdfsIn2HdfsOut1.py" - +saluation "HdfsIn2HdfsOut1.py" $2 spark-submit --properties-file $2 HdfsIn2HdfsOut1.py -saluation "HdfsIn2HdfsOut2.py" +saluation "HdfsIn2HdfsOut2.py" $2 spark-submit --properties-file $2 HdfsIn2HdfsOut2.py -saluation "HdfsIn2HiveCreateTable.py" +saluation "HdfsIn2HiveCreateTable.py" $2 spark-submit --properties-file $2 HdfsIn2HiveCreateTable.py -saluation "HdfsIn2HiveCreateInsertTable.py" +saluation "HdfsIn2HiveCreateInsertTable.py" $2 spark-submit --properties-file $2 HdfsIn2HiveCreateInsertTable.py -saluation "HiveInHiveOut.py" +saluation "HiveInHiveOut.py" $2 spark-submit --properties-file $2 HiveInHiveOut.py diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/spark-docker.conf b/metadata-integration/java/spark-lineage/spark-smoke-test/spark-docker.conf index 43103c3db65ad..a511d9f114f2b 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/spark-docker.conf +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/spark-docker.conf @@ -4,3 +4,7 @@ spark.jars file:///opt/workspace/datahub-spark-lineage*.jar spark.extraListeners datahub.spark.DatahubSparkListener spark.datahub.rest.server http://datahub-gms:8080 + +spark.driver.extraJavaOptions --add-opens java.base/java.lang=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED +spark.executor.extraJavaOptions --add-opens java.base/java.lang=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED + diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/build.gradle b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/build.gradle index 12aa1775d6104..6337f8c9beec6 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/build.gradle +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/build.gradle @@ -17,17 +17,6 @@ repositories { jcenter() } -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } -} - dependencies { implementation 'org.apache.spark:spark-sql_2.11:2.4.8' } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java index 2df468fc03e74..053055716eaa0 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java @@ -37,7 +37,7 @@ public class TestCoalesceJobLineage { private static final String APP_NAME = "sparkCoalesceTestApp"; - private static final String TEST_RELATIVE_PATH = "../"; + private static final String TEST_RELATIVE_PATH = ""; private static final String RESOURCE_DIR = "src/test/resources"; private static final String DATA_DIR = TEST_RELATIVE_PATH + RESOURCE_DIR + "/data"; private static final String WAREHOUSE_LOC = DATA_DIR + "/hive/warehouse/coalesce"; @@ -142,6 +142,9 @@ public void setup() { "spark.datahub.parent.datajob_urn", "urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)") .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .config( + "javax.jdo.option.ConnectionURL", + "jdbc:derby:;databaseName=build/tmp/metastore_db_coalesce;create=true") .enableHiveSupport() .getOrCreate(); diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java index 3a70c10e0c1f9..fa896814d16f6 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java @@ -191,6 +191,9 @@ public static void setup() { .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .config( + "javax.jdo.option.ConnectionURL", + "jdbc:derby:;databaseName=build/tmp/metastore_db_spark;create=true") .enableHiveSupport() .getOrCreate(); diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index 48f80f06d07c2..568b99acdf894 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -62,7 +62,10 @@ dependencies { testImplementation externalDependency.h2 testImplementation externalDependency.mysqlConnector testImplementation externalDependency.neo4jHarness - testImplementation (externalDependency.neo4jApoc) { + testImplementation (externalDependency.neo4jApocCore) { + exclude group: 'org.yaml', module: 'snakeyaml' + } + testImplementation (externalDependency.neo4jApocCommon) { exclude group: 'org.yaml', module: 'snakeyaml' } testImplementation externalDependency.mockito diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index 217d54c5c0b0f..c8d3147711eba 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -432,8 +432,8 @@ private Pair> generateLineageStatementAndParameters( + "(b)) " + "WHERE a <> b " + " AND ALL(rt IN relationships(path) WHERE " - + " (EXISTS(rt.source) AND rt.source = 'UI') OR " - + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " + + " (rt.source IS NOT NULL AND rt.source = 'UI') OR " + + " (rt.createdOn IS NULL AND rt.updatedOn IS NULL) OR " + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " + " ) " diff --git a/metadata-jobs/mae-consumer/build.gradle b/metadata-jobs/mae-consumer/build.gradle index fcb8b62e4ac9d..2e068d5a3501e 100644 --- a/metadata-jobs/mae-consumer/build.gradle +++ b/metadata-jobs/mae-consumer/build.gradle @@ -60,6 +60,7 @@ task avroSchemaSources(type: Copy) { } compileJava.dependsOn avroSchemaSources +processResources.dependsOn avroSchemaSources clean { project.delete("src/main/resources/avro") diff --git a/metadata-jobs/mce-consumer/build.gradle b/metadata-jobs/mce-consumer/build.gradle index 97eec9fcff051..5fa65c06de714 100644 --- a/metadata-jobs/mce-consumer/build.gradle +++ b/metadata-jobs/mce-consumer/build.gradle @@ -1,8 +1,8 @@ plugins { id 'java' + id 'pegasus' } -apply plugin: 'pegasus' configurations { avro @@ -49,6 +49,7 @@ task avroSchemaSources(type: Copy) { } compileJava.dependsOn avroSchemaSources +processResources.dependsOn avroSchemaSources clean { project.delete("src/main/resources/avro") diff --git a/metadata-jobs/pe-consumer/build.gradle b/metadata-jobs/pe-consumer/build.gradle index 81e8b8c9971f0..2fd19af92971e 100644 --- a/metadata-jobs/pe-consumer/build.gradle +++ b/metadata-jobs/pe-consumer/build.gradle @@ -1,7 +1,7 @@ plugins { id 'java' + id 'pegasus' } -apply plugin: 'pegasus' configurations { avro @@ -37,6 +37,7 @@ task avroSchemaSources(type: Copy) { } compileJava.dependsOn avroSchemaSources +processResources.dependsOn avroSchemaSources clean { project.delete("src/main/resources/avro") diff --git a/metadata-models-custom/build.gradle b/metadata-models-custom/build.gradle index 71d3b0fd1f736..3ac08dca7c0db 100644 --- a/metadata-models-custom/build.gradle +++ b/metadata-models-custom/build.gradle @@ -16,8 +16,8 @@ buildscript { plugins { id 'base' id 'maven-publish' + id 'pegasus' } -apply plugin: 'pegasus' if (project.hasProperty('projVersion')) { project.version = project.projVersion diff --git a/metadata-models-validator/build.gradle b/metadata-models-validator/build.gradle index c8d1d2e6651d6..1dae53e817ae1 100644 --- a/metadata-models-validator/build.gradle +++ b/metadata-models-validator/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(":entity-registry") diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index e90a4042c1921..04c90fa444f0c 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -1,20 +1,12 @@ import io.datahubproject.GenerateJsonSchemaTask -apply plugin: 'java-library' -apply plugin: 'pegasus' -apply plugin: 'org.hidetake.swagger.generator' - -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } +plugins { + id 'pegasus' + id 'java-library' + id 'org.hidetake.swagger.generator' } + dependencies { api spec.product.pegasus.data constraints { @@ -35,7 +27,7 @@ dependencies { swaggerCodegen externalDependency.swaggerCli testImplementation externalDependency.guava - testImplementation externalDependency.testngJava8 + testImplementation externalDependency.testng } sourceSets { diff --git a/metadata-service/auth-config/build.gradle b/metadata-service/auth-config/build.gradle index c7a1128897dd5..8302e3b0c2fe6 100644 --- a/metadata-service/auth-config/build.gradle +++ b/metadata-service/auth-config/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(path: ':metadata-models') diff --git a/metadata-service/auth-filter/build.gradle b/metadata-service/auth-filter/build.gradle index 61e9015adc942..9d763ca11421b 100644 --- a/metadata-service/auth-filter/build.gradle +++ b/metadata-service/auth-filter/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':metadata-auth:auth-api') diff --git a/metadata-service/auth-impl/build.gradle b/metadata-service/auth-impl/build.gradle index 60d622dea5447..4f4b0658caf24 100644 --- a/metadata-service/auth-impl/build.gradle +++ b/metadata-service/auth-impl/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} compileJava { diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java index d5d5b0c4e6c71..f03113f3eb9bd 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java @@ -8,7 +8,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; @@ -68,7 +68,7 @@ public void shouldReturnFieldValueWithResourceSpecIfTypeIsDataPlatformInstance() assertEquals( Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); - verifyZeroInteractions(entityClientMock); + verifyNoMoreInteractions(entityClientMock); } @Test diff --git a/metadata-service/auth-servlet-impl/build.gradle b/metadata-service/auth-servlet-impl/build.gradle index 7945b3b4e9a06..b8310bbd4ebc0 100644 --- a/metadata-service/auth-servlet-impl/build.gradle +++ b/metadata-service/auth-servlet-impl/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':metadata-auth:auth-api') diff --git a/metadata-service/factories/build.gradle b/metadata-service/factories/build.gradle index 86644e3b034da..145ec7e65188c 100644 --- a/metadata-service/factories/build.gradle +++ b/metadata-service/factories/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java-library' +plugins { + id 'java-library' +} dependencies { api project(':metadata-io') diff --git a/metadata-service/graphql-servlet-impl/build.gradle b/metadata-service/graphql-servlet-impl/build.gradle index 51f67631159d3..5767698242118 100644 --- a/metadata-service/graphql-servlet-impl/build.gradle +++ b/metadata-service/graphql-servlet-impl/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':datahub-graphql-core') diff --git a/metadata-service/openapi-servlet/build.gradle b/metadata-service/openapi-servlet/build.gradle index 1909b4862d294..0430d4427528d 100644 --- a/metadata-service/openapi-servlet/build.gradle +++ b/metadata-service/openapi-servlet/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { diff --git a/metadata-service/plugin/build.gradle b/metadata-service/plugin/build.gradle index 00a6384b923a0..3f91b8f6ae6ba 100644 --- a/metadata-service/plugin/build.gradle +++ b/metadata-service/plugin/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { @@ -30,4 +32,4 @@ test { clean { dependsOn ':metadata-service:plugin:src:test:sample-test-plugins:clean' -} +} \ No newline at end of file diff --git a/metadata-service/plugin/src/test/sample-test-plugins/build.gradle b/metadata-service/plugin/src/test/sample-test-plugins/build.gradle index f299a35db0f64..d4b2b4c92ad63 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/build.gradle +++ b/metadata-service/plugin/src/test/sample-test-plugins/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} jar { archiveFileName = "sample-plugins.jar" diff --git a/metadata-service/restli-api/build.gradle b/metadata-service/restli-api/build.gradle index 352738d01f8da..505320e8267ee 100644 --- a/metadata-service/restli-api/build.gradle +++ b/metadata-service/restli-api/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'pegasus' +plugins { + id 'pegasus' +} dependencies { dataModel project(':metadata-models') @@ -17,4 +19,4 @@ dependencies { because("CVE-2023-1428, CVE-2023-32731") } } -} \ No newline at end of file +} diff --git a/metadata-service/restli-client/build.gradle b/metadata-service/restli-client/build.gradle index 7cad1981ad911..86336755dc095 100644 --- a/metadata-service/restli-client/build.gradle +++ b/metadata-service/restli-client/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'pegasus' -apply plugin: 'java-library' +plugins { + id 'pegasus' + id 'java-library' +} dependencies { api project(':metadata-service:restli-api') diff --git a/metadata-service/restli-servlet-impl/build.gradle b/metadata-service/restli-servlet-impl/build.gradle index de6fb6690e693..ec5b645ee233c 100644 --- a/metadata-service/restli-servlet-impl/build.gradle +++ b/metadata-service/restli-servlet-impl/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'java' -apply plugin: 'pegasus' +plugins { + id 'java' + id 'pegasus' +} sourceSets { integTest { diff --git a/metadata-service/schema-registry-api/build.gradle b/metadata-service/schema-registry-api/build.gradle index 077d7d4f2d6a4..c146d5202fef9 100644 --- a/metadata-service/schema-registry-api/build.gradle +++ b/metadata-service/schema-registry-api/build.gradle @@ -1,5 +1,8 @@ -apply plugin: 'java' -apply plugin: 'org.hidetake.swagger.generator' +plugins { + id 'org.hidetake.swagger.generator' + id 'java' +} + dependencies { // Dependencies for open api diff --git a/metadata-service/schema-registry-servlet/build.gradle b/metadata-service/schema-registry-servlet/build.gradle index 554ac696c94fd..7bab51d51a86c 100644 --- a/metadata-service/schema-registry-servlet/build.gradle +++ b/metadata-service/schema-registry-servlet/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':metadata-service:factories') diff --git a/metadata-service/services/build.gradle b/metadata-service/services/build.gradle index b6af3d330d185..c683b0c75f40a 100644 --- a/metadata-service/services/build.gradle +++ b/metadata-service/services/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'java' -apply plugin: 'org.hidetake.swagger.generator' +plugins { + id 'org.hidetake.swagger.generator' + id 'java' +} configurations { enhance diff --git a/metadata-service/servlet/build.gradle b/metadata-service/servlet/build.gradle index eb2cd9c2d3de7..f961bf6a9de7e 100644 --- a/metadata-service/servlet/build.gradle +++ b/metadata-service/servlet/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':metadata-io') diff --git a/metadata-utils/build.gradle b/metadata-utils/build.gradle index 7bc6aa2d43442..3d65675219624 100644 --- a/metadata-utils/build.gradle +++ b/metadata-utils/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java-library' +plugins { + id 'java-library' +} dependencies { api externalDependency.avro diff --git a/mock-entity-registry/build.gradle b/mock-entity-registry/build.gradle index 12d7e58eee0a1..8242d6451dd60 100644 --- a/mock-entity-registry/build.gradle +++ b/mock-entity-registry/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':entity-registry') diff --git a/smoke-test/build.gradle b/smoke-test/build.gradle index ee0ea3c7be384..1614a4b8527dc 100644 --- a/smoke-test/build.gradle +++ b/smoke-test/build.gradle @@ -11,10 +11,10 @@ node { } // Version of node to use. - version = '16.8.0' + version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.0' + yarnVersion = '1.22.1' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -30,11 +30,12 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } task yarnInstall(type: YarnTask) { println "Root directory: ${project.rootDir}"; + environment = ['NODE_OPTIONS': '--openssl-legacy-provider'] args = ['install', '--cwd', "${project.rootDir}/smoke-test/tests/cypress"] } \ No newline at end of file diff --git a/test-models/build.gradle b/test-models/build.gradle index c74f7249fa1d9..e8733f0525870 100644 --- a/test-models/build.gradle +++ b/test-models/build.gradle @@ -1,17 +1,9 @@ -apply plugin: 'pegasus' -apply plugin: 'java-library' - -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } +plugins { + id 'pegasus' + id 'java-library' } + dependencies { implementation spec.product.pegasus.data implementation externalDependency.commonsIo diff --git a/vercel.json b/vercel.json index d5515e68b05bd..a1815cab8ae88 100644 --- a/vercel.json +++ b/vercel.json @@ -1,5 +1,5 @@ { - "buildCommand": "./gradlew :docs-website:build", + "buildCommand": "./gradlew -PuseSystemNode=true :docs-website:build", "github": { "silent": true, "autoJobCancelation": true From caef6771b828d8ee94f76801a9121f4e1a2e7561 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 15 Dec 2023 15:07:56 -0500 Subject: [PATCH 258/792] feat(ingest/redshift): drop repeated operations (#9440) --- metadata-ingestion/setup.py | 6 +- .../ingestion/source/redshift/report.py | 3 +- .../ingestion/source/redshift/usage.py | 68 +++++++++++++++++-- .../redshift-usage/test_redshift_usage.py | 54 ++++++++++++++- 4 files changed, 121 insertions(+), 10 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 5d15d7167b63e..1bc1bc5100b08 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -368,7 +368,11 @@ | {"psycopg2-binary", "pymysql>=1.0.2"}, "pulsar": {"requests"}, "redash": {"redash-toolbelt", "sql-metadata"} | sqllineage_lib, - "redshift": sql_common | redshift_common | usage_common | sqlglot_lib, + "redshift": sql_common + | redshift_common + | usage_common + | sqlglot_lib + | {"cachetools"}, "s3": {*s3_base, *data_lake_profiling}, "gcs": {*s3_base, *data_lake_profiling}, "sagemaker": aws_common, diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py index b845580f35939..333c851650fb3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py @@ -29,7 +29,8 @@ class RedshiftReport(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowRep lineage_mem_size: Dict[str, str] = field(default_factory=TopKDict) tables_in_mem_size: Dict[str, str] = field(default_factory=TopKDict) views_in_mem_size: Dict[str, str] = field(default_factory=TopKDict) - num_operational_stats_skipped: int = 0 + num_operational_stats_filtered: int = 0 + num_repeated_operations_dropped: int = 0 num_usage_stat_skipped: int = 0 num_lineage_tables_dropped: int = 0 num_lineage_dropped_query_parser: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py index c789e605b9c29..409027a8805a0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py @@ -4,6 +4,7 @@ from datetime import datetime from typing import Callable, Dict, Iterable, List, Optional, Tuple, Union +import cachetools import pydantic.error_wrappers import redshift_connector from pydantic.fields import Field @@ -251,7 +252,7 @@ def _get_workunits_internal( ) -> Iterable[MetadataWorkUnit]: self.report.num_usage_workunits_emitted = 0 self.report.num_usage_stat_skipped = 0 - self.report.num_operational_stats_skipped = 0 + self.report.num_operational_stats_filtered = 0 if self.config.include_operational_stats: self.report.report_ingestion_stage_start(USAGE_EXTRACTION_OPERATIONAL_STATS) @@ -304,8 +305,13 @@ def _gen_operation_aspect_workunits( ) # Generate operation aspect work units from the access events - yield from self._gen_operation_aspect_workunits_from_access_events( - access_events_iterable, all_tables=all_tables + yield from ( + mcpw.as_workunit() + for mcpw in self._drop_repeated_operations( + self._gen_operation_aspect_workunits_from_access_events( + access_events_iterable, all_tables=all_tables + ) + ) ) def _should_process_event( @@ -366,11 +372,61 @@ def _gen_access_events_from_history_query( yield access_event results = cursor.fetchmany() + def _drop_repeated_operations( + self, events: Iterable[MetadataChangeProposalWrapper] + ) -> Iterable[MetadataChangeProposalWrapper]: + """Drop repeated operations on the same entity. + + ASSUMPTION: Events are ordered by lastUpdatedTimestamp, descending. + + Operations are only dropped if they were within 1 minute of each other, + and have the same operation type, user, and entity. + + This is particularly useful when we see a string of insert operations + that are all really part of the same overall operation. + """ + + OPERATION_CACHE_MAXSIZE = 1000 + DROP_WINDOW_SEC = 10 + + # All timestamps are in milliseconds. + timestamp_low_watermark = 0 + + def timer(): + return -timestamp_low_watermark + + # dict of entity urn -> (last event's actor, operation type) + # TODO: Remove the type ignore and use TTLCache[key_type, value_type] directly once that's supported in Python 3.9. + last_events: Dict[str, Tuple[Optional[str], str]] = cachetools.TTLCache( # type: ignore[assignment] + maxsize=OPERATION_CACHE_MAXSIZE, ttl=DROP_WINDOW_SEC * 1000, timer=timer + ) + + for event in events: + assert isinstance(event.aspect, OperationClass) + + timestamp_low_watermark = min( + timestamp_low_watermark, event.aspect.lastUpdatedTimestamp + ) + + urn = event.entityUrn + assert urn + assert isinstance(event.aspect.operationType, str) + value: Tuple[Optional[str], str] = ( + event.aspect.actor, + event.aspect.operationType, + ) + if urn in last_events and last_events[urn] == value: + self.report.num_repeated_operations_dropped += 1 + continue + + last_events[urn] = value + yield event + def _gen_operation_aspect_workunits_from_access_events( self, events_iterable: Iterable[RedshiftAccessEvent], all_tables: Dict[str, Dict[str, List[Union[RedshiftView, RedshiftTable]]]], - ) -> Iterable[MetadataWorkUnit]: + ) -> Iterable[MetadataChangeProposalWrapper]: self.report.num_operational_stats_workunits_emitted = 0 for event in events_iterable: if not ( @@ -384,7 +440,7 @@ def _gen_operation_aspect_workunits_from_access_events( continue if not self._should_process_event(event, all_tables=all_tables): - self.report.num_operational_stats_skipped += 1 + self.report.num_operational_stats_filtered += 1 continue assert event.operation_type in ["insert", "delete"] @@ -406,7 +462,7 @@ def _gen_operation_aspect_workunits_from_access_events( resource: str = f"{event.database}.{event.schema_}.{event.table}".lower() yield MetadataChangeProposalWrapper( entityUrn=self.dataset_urn_builder(resource), aspect=operation_aspect - ).as_workunit() + ) self.report.num_operational_stats_workunits_emitted += 1 def _aggregate_access_events( diff --git a/metadata-ingestion/tests/integration/redshift-usage/test_redshift_usage.py b/metadata-ingestion/tests/integration/redshift-usage/test_redshift_usage.py index 74eec82b39ba3..a9eebb8d54154 100644 --- a/metadata-ingestion/tests/integration/redshift-usage/test_redshift_usage.py +++ b/metadata-ingestion/tests/integration/redshift-usage/test_redshift_usage.py @@ -2,11 +2,11 @@ import pathlib from pathlib import Path from typing import Dict, List, Union -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch from freezegun import freeze_time -from datahub.emitter.mce_builder import make_dataset_urn +from datahub.emitter.mce_builder import make_dataset_urn, make_user_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.sink.file import write_metadata_file from datahub.ingestion.source.redshift.config import RedshiftConfig @@ -20,6 +20,7 @@ MetadataChangeEvent, MetadataChangeProposal, ) +from datahub.metadata.schema_classes import OperationClass, OperationTypeClass from tests.test_helpers import mce_helpers FROZEN_TIME = "2021-09-15 09:00:00" @@ -243,3 +244,52 @@ def load_access_events(test_resources_dir: pathlib.Path) -> List[Dict]: with access_events_history_file.open() as access_events_json: access_events = json.loads(access_events_json.read()) return access_events + + +def test_duplicate_operations_dropped(): + report = RedshiftReport() + usage_extractor = RedshiftUsageExtractor( + config=MagicMock(), + connection=MagicMock(), + report=report, + dataset_urn_builder=MagicMock(), + redundant_run_skip_handler=None, + ) + + user = make_user_urn("jdoe") + urnA = "urn:li:dataset:(urn:li:dataPlatform:redshift,db.schema.tableA,PROD)" + urnB = "urn:li:dataset:(urn:li:dataPlatform:redshift,db.schema.tableB,PROD)" + + opA1 = MetadataChangeProposalWrapper( + entityUrn=urnA, + aspect=OperationClass( + timestampMillis=100 * 1000, + lastUpdatedTimestamp=95 * 1000, + actor=user, + operationType=OperationTypeClass.INSERT, + ), + ) + opB1 = MetadataChangeProposalWrapper( + entityUrn=urnB, + aspect=OperationClass( + timestampMillis=101 * 1000, + lastUpdatedTimestamp=94 * 1000, + actor=user, + operationType=OperationTypeClass.INSERT, + ), + ) + opA2 = MetadataChangeProposalWrapper( + entityUrn=urnA, + aspect=OperationClass( + timestampMillis=102 * 1000, + lastUpdatedTimestamp=90 * 1000, + actor=user, + operationType=OperationTypeClass.INSERT, + ), + ) + + dedups = list(usage_extractor._drop_repeated_operations([opA1, opB1, opA2])) + assert dedups == [ + opA1, + opB1, + ] From e58e2bf3be6cf43923ff400667406ee6dc95cd3a Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Mon, 18 Dec 2023 11:02:33 +0530 Subject: [PATCH 259/792] feat: Deprecation 'Note' changed to Markdown Renderable (#9396) Setting auto merge after test cases are passed --- .../EntityDropdown/UpdateDeprecationModal.tsx | 14 +++- .../components/styled/DeprecationPill.tsx | 82 +++++++++++++++++-- .../tests/cypress/cypress/support/commands.js | 2 +- 3 files changed, 86 insertions(+), 12 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx index 6ae893e12575f..25527497b33a8 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx @@ -1,7 +1,10 @@ import React from 'react'; -import { Button, DatePicker, Form, Input, message, Modal } from 'antd'; +import { Button, DatePicker, Form, message, Modal } from 'antd'; +import styled from 'styled-components'; import { useBatchUpdateDeprecationMutation } from '../../../../graphql/mutations.generated'; import { handleBatchError } from '../utils'; +import { Editor } from '../tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../constants'; type Props = { urns: string[]; @@ -9,6 +12,10 @@ type Props = { refetch?: () => void; }; +const StyledEditor = styled(Editor)` + border: 1px solid ${ANTD_GRAY[4.5]}; +`; + export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { const [batchUpdateDeprecation] = useBatchUpdateDeprecationMutation(); const [form] = Form.useForm(); @@ -64,10 +71,11 @@ export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { } + width='40%' >
- - + + diff --git a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx index f60a74247ebcc..9ec2aab193aa0 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useState } from 'react'; import { InfoCircleOutlined } from '@ant-design/icons'; import { Divider, message, Modal, Popover, Tooltip, Typography } from 'antd'; import { blue } from '@ant-design/colors'; @@ -8,6 +8,8 @@ import { Deprecation } from '../../../../../types.generated'; import { getLocaleTimezone } from '../../../../shared/time/timeUtils'; import { ANTD_GRAY } from '../../constants'; import { useBatchUpdateDeprecationMutation } from '../../../../../graphql/mutations.generated'; +import { Editor } from '../../tabs/Documentation/components/editor/Editor'; +import StripMarkdownText, { removeMarkdown } from './StripMarkdownText'; const DeprecatedContainer = styled.div` height: 18px; @@ -38,11 +40,6 @@ const DeprecatedTitle = styled(Typography.Text)` font-weight: bold; `; -const DeprecatedSubTitle = styled(Typography.Text)` - display: block; - margin-bottom: 5px; -`; - const LastEvaluatedAtLabel = styled.div` padding: 0; margin: 0; @@ -70,15 +67,42 @@ const IconGroup = styled.div` } `; +const DescriptionContainer = styled.div` + position: relative; + display: flex; + flex-direction: column; + width: 100%; + height: 100%; + min-height: 22px; + margin-bottom: 14px; +`; +const StyledViewer = styled(Editor)` + padding-right: 8px; + display: block; + + .remirror-editor.ProseMirror { + padding: 0; + } +`; + +const ExpandedActions = styled.div` + height: 10px; +`; +const ReadLessText = styled(Typography.Link)` + margin-right: 4px; +`; type Props = { urn: string; deprecation: Deprecation; refetch?: () => void; showUndeprecate: boolean | null; }; +const ABBREVIATED_LIMIT = 80; export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: Props) => { const [batchUpdateDeprecationMutation] = useBatchUpdateDeprecationMutation(); + const [expanded, setExpanded] = useState(false); + const overLimit = deprecation?.note && removeMarkdown(deprecation?.note).length > 80; /** * Deprecation Decommission Timestamp */ @@ -131,14 +155,56 @@ export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: return ( {deprecation?.note !== '' && Deprecation note} {isDividerNeeded && } - {deprecation?.note !== '' && {deprecation.note}} + + {expanded || !overLimit ? ( + <> + { + deprecation?.note && deprecation?.note !== '' && + <> + + + {overLimit && ( + { + setExpanded(false); + }} + > + Read Less + + )} + + + } + + ) : ( + <> + + { + setExpanded(true); + }} + > + Read More + + + } + shouldWrap + > + {deprecation.note} + + + )} + {deprecation?.decommissionTime !== null && ( diff --git a/smoke-test/tests/cypress/cypress/support/commands.js b/smoke-test/tests/cypress/cypress/support/commands.js index 5e3664f944edf..ffbd050488181 100644 --- a/smoke-test/tests/cypress/cypress/support/commands.js +++ b/smoke-test/tests/cypress/cypress/support/commands.js @@ -171,7 +171,7 @@ Cypress.Commands.add("deleteFromDropdown", () => { Cypress.Commands.add("addViaFormModal", (text, modelHeader) => { cy.waitTextVisible(modelHeader); - cy.get(".ant-form-item-control-input-content > input[type='text']").first().type(text); + cy.get('.ProseMirror-focused').type(text); cy.get(".ant-modal-footer > button:nth-child(2)").click(); }); From b4fe451d932315546ebd98623f1572a66c41ad43 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Mon, 18 Dec 2023 12:38:30 +0530 Subject: [PATCH 260/792] feat : markdown support for group description (#9455) --- .../group/EditGroupDescriptionModal.tsx | 64 ++++++++ .../src/app/entity/group/GroupInfoSideBar.tsx | 145 ++++++++++++++++-- .../app/identity/group/CreateGroupModal.tsx | 106 +++++++------ .../cypress/e2e/settings/managing_groups.js | 6 +- 4 files changed, 261 insertions(+), 60 deletions(-) create mode 100644 datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx diff --git a/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx b/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx new file mode 100644 index 0000000000000..a898a73c254ef --- /dev/null +++ b/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx @@ -0,0 +1,64 @@ +import React, { useState } from 'react'; +import { Button, Modal, Form } from 'antd'; +import styled from 'styled-components'; + +import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../shared/constants'; + +type Props = { + onClose: () => void; + onSaveAboutMe: () => void; + setStagedDescription: (des: string) => void; + stagedDescription: string | undefined; +}; +const StyledEditor = styled(Editor)` + border: 1px solid ${ANTD_GRAY[4]}; +`; + +export default function EditGroupDescriptionModal({ + onClose, + onSaveAboutMe, + setStagedDescription, + stagedDescription, +}: Props) { + const [form] = Form.useForm(); + const [aboutText,setAboutText] = useState(stagedDescription) + + function updateDescription(description: string) { + setAboutText(aboutText) + setStagedDescription(description); + + } + + const saveDescription = () => { + onSaveAboutMe(); + onClose(); + }; + + return ( + + + + + } + > + + +
+ +
+
+ +
+ ); +} diff --git a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx index d9eaed2682ea1..07885a4d0f630 100644 --- a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx @@ -16,14 +16,15 @@ import { EmptyValue, SocialDetails, EditButton, - AboutSection, - AboutSectionText, GroupsSection, + AboutSection, } from '../shared/SidebarStyledComponents'; import GroupMembersSideBarSection from './GroupMembersSideBarSection'; import { useUserContext } from '../../context/useUserContext'; - -const { Paragraph } = Typography; +import StripMarkdownText, { removeMarkdown } from '../shared/components/styled/StripMarkdownText'; +import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; +import EditGroupDescriptionModal from './EditGroupDescriptionModal'; +import { REDESIGN_COLORS } from '../shared/constants'; type SideBarData = { photoUrl: string | undefined; @@ -80,6 +81,61 @@ const GroupTitle = styled(Typography.Title)` } `; +const EditIcon = styled(EditOutlined)` + cursor: pointer; + color: ${REDESIGN_COLORS.BLUE}; +`; +const AddNewDescription = styled(Button)` + display: none; + margin: -4px; + width: 140px; +`; + +const StyledViewer = styled(Editor)` + padding-right: 8px; + display: block; + + .remirror-editor.ProseMirror { + padding: 0; + } +`; + +const DescriptionContainer = styled.div` + position: relative; + display: flex; + flex-direction: column; + width: 100%; + text-align:left; + font-weight: normal; + font + min-height: 22px; + + &:hover ${AddNewDescription} { + display: block; + } + & ins.diff { + background-color: #b7eb8f99; + text-decoration: none; + &:hover { + background-color: #b7eb8faa; + } + } + & del.diff { + background-color: #ffa39e99; + text-decoration: line-through; + &: hover { + background-color: #ffa39eaa; + } + } +`; + +const ExpandedActions = styled.div` + height: 10px; +`; +const ReadLessText = styled(Typography.Link)` + margin-right: 4px; +`; + /** * Responsible for reading & writing users. */ @@ -106,7 +162,17 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { const me = useUserContext(); const canEditGroup = me?.platformPrivileges?.manageIdentities; const [groupTitle, setGroupTitle] = useState(name); + const [expanded, setExpanded] = useState(false); + const [isUpdatingDescription, SetIsUpdatingDescription] = useState(false); + const [stagedDescription, setStagedDescription] = useState(aboutText); + const [updateName] = useUpdateNameMutation(); + const overLimit = removeMarkdown(aboutText || '').length > 80; + const ABBREVIATED_LIMIT = 80; + + useEffect(() => { + setStagedDescription(aboutText); + }, [aboutText]); useEffect(() => { setGroupTitle(groupTitle); @@ -136,12 +202,12 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { }; // About Text save - const onSaveAboutMe = (inputString) => { + const onSaveAboutMe = () => { updateCorpGroupPropertiesMutation({ variables: { urn: urn || '', input: { - description: inputString, + description: stagedDescription, }, }, }) @@ -201,16 +267,65 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { - {TITLES.about} - - - {aboutText || } - - + + {TITLES.about} + + SetIsUpdatingDescription(true)} data-testid="edit-icon" /> + + + + {(aboutText && expanded) || !overLimit ? ( + <> + {/* Read only viewer for displaying group description */} + + + {overLimit && ( + { + setExpanded(false); + }} + > + Read Less + + )} + + + ) : ( + <> + {/* Display abbreviated description with option to read more */} + + { + setExpanded(true); + }} + > + Read More + + + } + shouldWrap + > + {aboutText} + + + )} + + {/* Modal for updating group description */} + {isUpdatingDescription && ( + { + SetIsUpdatingDescription(false); + setStagedDescription(aboutText); + }} + onSaveAboutMe={onSaveAboutMe} + setStagedDescription={setStagedDescription} + stagedDescription={stagedDescription} + /> + )} diff --git a/datahub-web-react/src/app/identity/group/CreateGroupModal.tsx b/datahub-web-react/src/app/identity/group/CreateGroupModal.tsx index 214cb251767c9..4ba714ca23ae0 100644 --- a/datahub-web-react/src/app/identity/group/CreateGroupModal.tsx +++ b/datahub-web-react/src/app/identity/group/CreateGroupModal.tsx @@ -1,16 +1,23 @@ -import React, { useState } from 'react'; +import React, { useRef, useState } from 'react'; import { message, Button, Input, Modal, Typography, Form, Collapse } from 'antd'; +import styled from 'styled-components'; import { useCreateGroupMutation } from '../../../graphql/group.generated'; import { useEnterKeyListener } from '../../shared/useEnterKeyListener'; import { validateCustomUrnId } from '../../shared/textUtil'; import analytics, { EventType } from '../../analytics'; import { CorpGroup, EntityType } from '../../../types.generated'; +import { Editor as MarkdownEditor } from '../../entity/shared/tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../../entity/shared/constants'; type Props = { onClose: () => void; onCreate: (group: CorpGroup) => void; }; +const StyledEditor = styled(MarkdownEditor)` + border: 1px solid ${ANTD_GRAY[4]}; +`; + export default function CreateGroupModal({ onClose, onCreate }: Props) { const [stagedName, setStagedName] = useState(''); const [stagedDescription, setStagedDescription] = useState(''); @@ -19,45 +26,54 @@ export default function CreateGroupModal({ onClose, onCreate }: Props) { const [createButtonEnabled, setCreateButtonEnabled] = useState(true); const [form] = Form.useForm(); + // Reference to the styled editor for handling focus + const styledEditorRef = useRef(null); + const onCreateGroup = () => { - createGroupMutation({ - variables: { - input: { - id: stagedId, - name: stagedName, - description: stagedDescription, - }, - }, - }) - .then(({ data, errors }) => { - if (!errors) { - analytics.event({ - type: EventType.CreateGroupEvent, - }); - message.success({ - content: `Created group!`, - duration: 3, - }); - // TODO: Get a full corp group back from create endpoint. - onCreate({ - urn: data?.createGroup || '', - type: EntityType.CorpGroup, + // Check if the Enter key was pressed inside the styled editor to prevent unintended form submission + const isEditorNewlineKeypress = + document.activeElement !== styledEditorRef.current && + !styledEditorRef.current?.contains(document.activeElement); + if (isEditorNewlineKeypress) { + createGroupMutation({ + variables: { + input: { + id: stagedId, name: stagedName, - info: { - description: stagedDescription, - }, - }); - } - }) - .catch((e) => { - message.destroy(); - message.error({ content: `Failed to create group!: \n ${e.message || ''}`, duration: 3 }); + description: stagedDescription, + }, + }, }) - .finally(() => { - setStagedName(''); - setStagedDescription(''); - }); - onClose(); + .then(({ data, errors }) => { + if (!errors) { + analytics.event({ + type: EventType.CreateGroupEvent, + }); + message.success({ + content: `Created group!`, + duration: 3, + }); + // TODO: Get a full corp group back from create endpoint. + onCreate({ + urn: data?.createGroup || '', + type: EntityType.CorpGroup, + name: stagedName, + info: { + description: stagedDescription, + }, + }); + } + }) + .catch((e) => { + message.destroy(); + message.error({ content: `Failed to create group!: \n ${e.message || ''}`, duration: 3 }); + }) + .finally(() => { + setStagedName(''); + setStagedDescription(''); + }); + onClose(); + } }; // Handle the Enter press @@ -65,8 +81,13 @@ export default function CreateGroupModal({ onClose, onCreate }: Props) { querySelectorToExecuteClick: '#createGroupButton', }); + function updateDescription(description: string) { + setStagedDescription(description); + } + return ( Description
}> An optional description for your new group. - - setStagedDescription(event.target.value)} - /> + + {/* Styled editor for the group description */} +
+ +
diff --git a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js index 70219a550cd8b..978a245c3d9e3 100644 --- a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js +++ b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js @@ -72,8 +72,10 @@ describe("create and manage group", () => { cy.focused().clear().type(`Test group EDITED ${test_id}{enter}`); cy.waitTextVisible("Name Updated"); cy.contains(`Test group EDITED ${test_id}`).should("be.visible"); - cy.contains("Test group description").find('[aria-label="edit"]').click(); - cy.focused().type(" EDITED{enter}"); + cy.get('[data-testid="edit-icon"]').click(); + cy.waitTextVisible("Edit Description"); + cy.get("#description").should("be.visible").type(" EDITED"); + cy.get("#updateGroupButton").click(); cy.waitTextVisible("Changes saved."); cy.contains("Test group description EDITED").should("be.visible"); cy.clickOptionWithText("Add Owners"); From 9d386fbd6f9a0436b25daa2b4603d1fa0b8f44ee Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 18 Dec 2023 05:38:16 -0500 Subject: [PATCH 261/792] feat(ingest): enable CLL for dbt by default (#9466) --- .../ingestion/source/dbt/dbt_common.py | 7 +- .../ingestion/source/looker/looker_common.py | 2 +- .../source/looker/looker_lib_wrapper.py | 2 +- .../dbt_enabled_with_schemas_mces_golden.json | 248 ++++++++++++ .../dbt_test_column_meta_mapping_golden.json | 383 ++++++++++++++++++ ...th_complex_owner_patterns_mces_golden.json | 248 ++++++++++++ ...th_data_platform_instance_mces_golden.json | 248 ++++++++++++ ...h_non_incremental_lineage_mces_golden.json | 248 ++++++++++++ ..._target_platform_instance_mces_golden.json | 248 ++++++++++++ 9 files changed, 1630 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index af28be310587a..7bec07b40c4bd 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -300,7 +300,7 @@ class DBTCommonConfig( description="When enabled, schemas will be inferred from the dbt node definition.", ) include_column_lineage: bool = Field( - default=False, + default=True, description="When enabled, column-level lineage will be extracted from the dbt node definition. Requires `infer_dbt_schemas` to be enabled. " "If you run into issues where the column name casing does not match up with properly, providing a datahub_api or using the rest sink will improve accuracy.", ) @@ -696,7 +696,10 @@ def get_column_type( @support_status(SupportStatus.CERTIFIED) @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion") @capability(SourceCapability.LINEAGE_COARSE, "Enabled by default") -@capability(SourceCapability.LINEAGE_FINE, "Enabled using `include_column_lineage`") +@capability( + SourceCapability.LINEAGE_FINE, + "Enabled by default, configure using `include_column_lineage`", +) class DBTSourceBase(StatefulIngestionSourceBase): def __init__(self, config: DBTCommonConfig, ctx: PipelineContext, platform: str): super().__init__(config, ctx) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py index e440750cba0d0..53533a8d27c9b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py @@ -1015,7 +1015,7 @@ def __init__( self.report = report self.source_config = source_config - @lru_cache() + @lru_cache(maxsize=200) def get_explore(self, model: str, explore: str) -> Optional[LookerExplore]: looker_explore = LookerExplore.from_api( model, diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py index 988caba1c0d74..8959868c27114 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py @@ -114,7 +114,7 @@ def get_available_permissions(self) -> Set[str]: return permissions - @lru_cache(maxsize=2000) + @lru_cache(maxsize=1000) def get_user(self, id_: str, user_fields: str) -> Optional[User]: self.client_stats.user_calls += 1 try: diff --git a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json index e4f01ef7a6c53..4deb725ed2b44 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json @@ -247,6 +247,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -428,6 +508,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -650,6 +765,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -789,6 +1002,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json index 4d5b008b695f9..588470ef41631 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json @@ -201,6 +201,98 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer_snapshot,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer_snapshot,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),initial_full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -360,6 +452,52 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD),email)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -574,6 +712,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -741,6 +977,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an_aliased_view_for_payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an_aliased_view_for_payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an_aliased_view_for_payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an_aliased_view_for_payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -1011,6 +1282,118 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),active)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),active)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),activebool)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),activebool)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),address_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),address_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),create_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),create_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),first_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),last_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_update)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),last_update)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),store_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),store_id)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json index 0bdd5e3c895c2..926e8b8c8ed84 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json @@ -211,6 +211,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -375,6 +455,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -597,6 +712,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -736,6 +949,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json index 5ab0b11e37771..3727603266f25 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json @@ -212,6 +212,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -376,6 +456,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -598,6 +713,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -737,6 +950,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json index 3725e590fee9e..ec879e6af766a 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json @@ -212,6 +212,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -376,6 +456,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -598,6 +713,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -737,6 +950,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json index a47abab6b40f7..e25c5e4faf6af 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json @@ -212,6 +212,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -376,6 +456,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -598,6 +713,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -737,6 +950,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, From 03590a194885b2fbbb5249aef909d761c3ffc12c Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Mon, 18 Dec 2023 19:54:31 +0100 Subject: [PATCH 262/792] fix(ingest/snowflake) - Fixing snowflake url with default region (#9443) --- metadata-ingestion/setup.py | 8 +- .../source/snowflake/snowflake_utils.py | 28 ++++- .../snowflake/snowflake_golden.json | 116 +++++++++--------- .../integration/sql_server/test_sql_server.py | 5 + .../tests/unit/test_snowflake_source.py | 27 ++++ 5 files changed, 120 insertions(+), 64 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 1bc1bc5100b08..cb13a40125c0d 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -354,7 +354,11 @@ "mlflow": {"mlflow-skinny>=2.3.0"}, "mode": {"requests", "tenacity>=8.0.1"} | sqllineage_lib, "mongodb": {"pymongo[srv]>=3.11", "packaging"}, - "mssql": sql_common | {"sqlalchemy-pytds>=0.3", "pyOpenSSL"}, + "mssql": sql_common + | { + "sqlalchemy-pytds>=0.3", + "pyOpenSSL", + }, "mssql-odbc": sql_common | {"pyodbc"}, "mysql": mysql, # mariadb should have same dependency as mysql @@ -559,7 +563,7 @@ "kafka-connect", "ldap", "mongodb", - "mssql", + "mssql" if sys.version_info >= (3, 8) else None, "mysql", "mariadb", "redash", diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py index 5a451bf197d34..af8d8824a4b17 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py @@ -9,8 +9,8 @@ from datahub.configuration.pattern_utils import is_schema_allowed from datahub.ingestion.source.snowflake.constants import ( GENERIC_PERMISSION_ERROR_KEY, - SNOWFLAKE_DEFAULT_CLOUD, SNOWFLAKE_REGION_CLOUD_REGION_MAPPING, + SnowflakeCloudProvider, SnowflakeObjectDomain, ) from datahub.ingestion.source.snowflake.snowflake_config import SnowflakeV2Config @@ -72,6 +72,15 @@ def report_error(self, key: str, reason: str) -> None: class SnowflakeCommonMixin: platform = "snowflake" + CLOUD_REGION_IDS_WITHOUT_CLOUD_SUFFIX = [ + "us-west-2", + "us-east-1", + "eu-west-1", + "eu-central-1", + "ap-southeast-1", + "ap-southeast-2", + ] + @staticmethod def create_snowsight_base_url( account_locator: str, @@ -79,12 +88,23 @@ def create_snowsight_base_url( cloud: str, privatelink: bool = False, ) -> Optional[str]: + if cloud: + url_cloud_provider_suffix = f".{cloud}" + + if cloud == SnowflakeCloudProvider.AWS: + # Some AWS regions do not have cloud suffix. See below the list: + # https://docs.snowflake.com/en/user-guide/admin-account-identifier#non-vps-account-locator-formats-by-cloud-platform-and-region + if ( + cloud_region_id + in SnowflakeCommonMixin.CLOUD_REGION_IDS_WITHOUT_CLOUD_SUFFIX + ): + url_cloud_provider_suffix = "" + else: + url_cloud_provider_suffix = f".{cloud}" if privatelink: url = f"https://app.{account_locator}.{cloud_region_id}.privatelink.snowflakecomputing.com/" - elif cloud == SNOWFLAKE_DEFAULT_CLOUD: - url = f"https://app.snowflake.com/{cloud_region_id}/{account_locator}/" else: - url = f"https://app.snowflake.com/{cloud_region_id}.{cloud}/{account_locator}/" + url = f"https://app.snowflake.com/{cloud_region_id}{url_cloud_provider_suffix}/{account_locator}/" return url @staticmethod diff --git a/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json b/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json index c7273fee5a2e5..ece54f00eeaa0 100644 --- a/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json +++ b/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json @@ -11,20 +11,20 @@ "env": "PROD", "database": "test_db" }, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/", "name": "TEST_DB", "description": "Comment for TEST_DB", "created": { - "time": 1623110400000 + "time": 1623103200000 }, "lastModified": { - "time": 1623110400000 + "time": 1623103200000 } } }, "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", + "lastObserved": 1615443388097, + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -144,20 +144,20 @@ "database": "test_db", "schema": "test_schema" }, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/", "name": "TEST_SCHEMA", "description": "comment for TEST_DB.TEST_SCHEMA", "created": { - "time": 1623110400000 + "time": 1623103200000 }, "lastModified": { - "time": 1623110400000 + "time": 1623103200000 } } }, "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", + "lastObserved": 1615443388097, + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -489,22 +489,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_1/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_1/", "name": "TABLE_1", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_1", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -788,22 +788,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_2/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_2/", "name": "TABLE_2", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_2", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -1087,22 +1087,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_3/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_3/", "name": "TABLE_3", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_3", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -1386,22 +1386,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_4/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_4/", "name": "TABLE_4", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_4", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -1685,22 +1685,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_5/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_5/", "name": "TABLE_5", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_5", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -1984,22 +1984,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_6/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_6/", "name": "TABLE_6", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_6", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -2283,22 +2283,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_7/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_7/", "name": "TABLE_7", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_7", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -2582,22 +2582,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_8/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_8/", "name": "TABLE_8", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_8", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -2881,22 +2881,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_9/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_9/", "name": "TABLE_9", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_9", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -3180,22 +3180,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_10/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_10/", "name": "TABLE_10", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_10", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -3470,22 +3470,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_1/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_1/", "name": "VIEW_1", "qualifiedName": "TEST_DB.TEST_SCHEMA.VIEW_1", "description": "Comment for View", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -3805,22 +3805,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_2/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_2/", "name": "VIEW_2", "qualifiedName": "TEST_DB.TEST_SCHEMA.VIEW_2", "description": "Comment for View", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, diff --git a/metadata-ingestion/tests/integration/sql_server/test_sql_server.py b/metadata-ingestion/tests/integration/sql_server/test_sql_server.py index f439a322c2677..5ed672d527264 100644 --- a/metadata-ingestion/tests/integration/sql_server/test_sql_server.py +++ b/metadata-ingestion/tests/integration/sql_server/test_sql_server.py @@ -1,5 +1,6 @@ import os import subprocess +import sys import time import pytest @@ -8,6 +9,10 @@ from tests.test_helpers.click_helpers import run_datahub_cmd from tests.test_helpers.docker_helpers import cleanup_image, wait_for_port +pytestmark = pytest.mark.skipif( + sys.version_info < (3, 8), reason="requires python 3.8 or higher" +) + @pytest.fixture(scope="module") def mssql_runner(docker_compose_runner, pytestconfig): diff --git a/metadata-ingestion/tests/unit/test_snowflake_source.py b/metadata-ingestion/tests/unit/test_snowflake_source.py index 536c91ace4f5e..69a7510692df1 100644 --- a/metadata-ingestion/tests/unit/test_snowflake_source.py +++ b/metadata-ingestion/tests/unit/test_snowflake_source.py @@ -24,6 +24,7 @@ from datahub.ingestion.source.snowflake.snowflake_usage_v2 import ( SnowflakeObjectAccessEntry, ) +from datahub.ingestion.source.snowflake.snowflake_utils import SnowflakeCommonMixin from datahub.ingestion.source.snowflake.snowflake_v2 import SnowflakeV2Source from tests.test_helpers import test_connection_helpers @@ -584,3 +585,29 @@ def test_email_filter_query_generation_with_case_insensitive_filter(): filter_query == "AND (rlike(user_name, '.*@example.com','c')) AND NOT (rlike(user_name, '.*@example2.com','c'))" ) + + +def test_create_snowsight_base_url_us_west(): + ( + cloud, + cloud_region_id, + ) = SnowflakeCommonMixin.get_cloud_region_from_snowflake_region_id("aws_us_west_2") + + result = SnowflakeCommonMixin.create_snowsight_base_url( + "account_locator", cloud_region_id, cloud, False + ) + assert result == "https://app.snowflake.com/us-west-2/account_locator/" + + +def test_create_snowsight_base_url_ap_northeast_1(): + ( + cloud, + cloud_region_id, + ) = SnowflakeCommonMixin.get_cloud_region_from_snowflake_region_id( + "aws_ap_northeast_1" + ) + + result = SnowflakeCommonMixin.create_snowsight_base_url( + "account_locator", cloud_region_id, cloud, False + ) + assert result == "https://app.snowflake.com/ap-northeast-1.aws/account_locator/" From 193d1464a628fc800e926f04fcd4bd1d6774d858 Mon Sep 17 00:00:00 2001 From: noggi Date: Mon, 18 Dec 2023 14:06:17 -0800 Subject: [PATCH 263/792] Fix downstream CI issue (#9479) --- docker/datahub-ingestion-base/Dockerfile | 2 +- docker/datahub-ingestion/Dockerfile | 2 +- docker/datahub-ingestion/build.gradle | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index e0f9fdc997071..81fec61ea5073 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -4,7 +4,7 @@ ARG BASE_IMAGE=base # Defining custom repo urls for use in enterprise environments. Re-used between stages below. ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine ARG GITHUB_REPO_URL=https://github.com -ARG DEBIAN_REPO_URL=http://deb.debian.org/debian +ARG DEBIAN_REPO_URL=https://deb.debian.org/debian ARG PIP_MIRROR_URL=null FROM golang:1-alpine3.18 AS dockerize-binary diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 9516c31a19e21..2898a363a0a18 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -3,7 +3,7 @@ ARG APP_ENV=full ARG BASE_IMAGE=acryldata/datahub-ingestion-base ARG DOCKER_VERSION=head ARG PIP_MIRROR_URL=null -ARG DEBIAN_REPO_URL=http://deb.debian.org/debian +ARG DEBIAN_REPO_URL=https://deb.debian.org/debian FROM $BASE_IMAGE:$DOCKER_VERSION as base USER 0 diff --git a/docker/datahub-ingestion/build.gradle b/docker/datahub-ingestion/build.gradle index 36444210f1938..0b08f189e6b45 100644 --- a/docker/datahub-ingestion/build.gradle +++ b/docker/datahub-ingestion/build.gradle @@ -33,7 +33,7 @@ docker { i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } - def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", '')] + def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", ''), BASE_IMAGE: "${docker_registry}/datahub-ingestion-base"] // Add build args if they are defined (needed for some CI or enterprise environments) if (project.hasProperty('pipMirrorUrl')) { From ecda3e618704c5eb335ad1a21c30f0c935581f64 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 18 Dec 2023 18:26:33 -0500 Subject: [PATCH 264/792] feat(ingest): pydantic v2 compatibility (#9434) --- .github/workflows/airflow-plugin.yml | 7 ++-- .../airflow-plugin/tox.ini | 9 +++++ metadata-ingestion/setup.py | 39 ++++++++++++++++--- .../api/entities/datacontract/assertion.py | 4 +- .../datacontract/assertion_operator.py | 16 ++++---- .../datacontract/data_quality_assertion.py | 11 +++--- .../api/entities/datacontract/datacontract.py | 23 +++++------ .../datacontract/freshness_assertion.py | 15 ++++--- .../entities/datacontract/schema_assertion.py | 14 ++++--- .../src/datahub/cli/check_cli.py | 13 ++++++- .../src/datahub/configuration/common.py | 16 +++++++- .../src/datahub/configuration/datetimes.py | 4 +- .../pydantic_migration_helpers.py | 29 ++++++++++++++ .../configuration/time_window_config.py | 16 ++++++-- .../configuration/validate_field_rename.py | 4 +- .../ingestion/glossary/datahub_classifier.py | 11 +++++- .../source/bigquery_v2/bigquery_config.py | 2 +- .../ingestion/source/delta_lake/config.py | 4 +- .../source/snowflake/snowflake_config.py | 2 +- .../ingestion/source_config/sql/snowflake.py | 2 +- .../src/datahub/utilities/urns/urn_iter.py | 2 +- .../integration/snowflake/test_snowflake.py | 16 ++++---- .../unit/{ => config}/test_allow_deny.py | 0 .../unit/{ => config}/test_config_clean.py | 0 .../tests/unit/config/test_config_model.py | 18 +++++++-- .../{ => config}/test_pydantic_validators.py | 13 +++++-- .../{ => config}/test_time_window_config.py | 0 27 files changed, 209 insertions(+), 81 deletions(-) rename metadata-ingestion/tests/unit/{ => config}/test_allow_deny.py (100%) rename metadata-ingestion/tests/unit/{ => config}/test_config_clean.py (100%) rename metadata-ingestion/tests/unit/{ => config}/test_pydantic_validators.py (92%) rename metadata-ingestion/tests/unit/{ => config}/test_time_window_config.py (100%) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index cd1e159b7d53c..70816e5f093d1 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -32,6 +32,7 @@ jobs: strategy: matrix: include: + # Note: this should be kept in sync with tox.ini. - python-version: "3.8" extra_pip_requirements: "apache-airflow~=2.1.4" extra_pip_extras: plugin-v1 @@ -39,13 +40,13 @@ jobs: extra_pip_requirements: "apache-airflow~=2.2.4" extra_pip_extras: plugin-v1 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.4.0" + extra_pip_requirements: 'apache-airflow~=2.4.0 pluggy==1.0.0 "pendulum<3.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.6.0" + extra_pip_requirements: 'apache-airflow~=2.6.0 "pendulum<3.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow>=2.7.0" + extra_pip_requirements: "apache-airflow>=2.7.0 pydantic==2.4.2" extra_pip_extras: plugin-v2 fail-fast: false steps: diff --git a/metadata-ingestion-modules/airflow-plugin/tox.ini b/metadata-ingestion-modules/airflow-plugin/tox.ini index 1010bd2933e45..27ae2ce65ba65 100644 --- a/metadata-ingestion-modules/airflow-plugin/tox.ini +++ b/metadata-ingestion-modules/airflow-plugin/tox.ini @@ -10,6 +10,7 @@ envlist = py38-airflow21, py38-airflow22, py310-airflow24, py310-airflow26, py31 use_develop = true extras = dev,integration-tests,plugin-v1 deps = + # This should be kept in sync with the Github Actions matrix. -e ../../metadata-ingestion/ # Airflow version airflow21: apache-airflow~=2.1.0 @@ -20,7 +21,15 @@ deps = # See https://github.com/datahub-project/datahub/pull/9365 airflow24: apache-airflow~=2.4.0,pluggy==1.0.0 airflow26: apache-airflow~=2.6.0 + # Respect the constraints file on pendulum. + # See https://github.com/apache/airflow/issues/36274 + airflow24,airflow26: pendulum>=2.0,<3.0 + # The Airflow 2.7 constraints file points at pydantic v2, so we match that here. + # https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt + # Note that Airflow is actually compatible with both pydantic v1 and v2, and the + # constraints file is overly restrictive. airflow27: apache-airflow~=2.7.0 + airflow27: pydantic==2.4.2 commands = pytest --cov-append {posargs} diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index cb13a40125c0d..13c9d3c99aaca 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -14,9 +14,10 @@ "mypy_extensions>=0.4.3", # Actual dependencies. "typing-inspect", + # pydantic 1.8.2 is incompatible with mypy 0.910. + # See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910. # pydantic 1.10.3 is incompatible with typing-extensions 4.1.1 - https://github.com/pydantic/pydantic/issues/4885 - # pydantic 2 makes major, backwards-incompatible changes - https://github.com/pydantic/pydantic/issues/4887 - "pydantic>=1.5.1,!=1.10.3,<2", + "pydantic>=1.10.0,!=1.10.3", "mixpanel>=4.9.0", "sentry-sdk", } @@ -53,6 +54,18 @@ "ruamel.yaml", } +pydantic_no_v2 = { + # pydantic 2 makes major, backwards-incompatible changes - https://github.com/pydantic/pydantic/issues/4887 + # Tags sources that require the pydantic v2 API. + "pydantic<2", +} + +plugin_common = { + # While pydantic v2 support is experimental, require that all plugins + # continue to use v1. This will ensure that no ingestion recipes break. + *pydantic_no_v2, +} + rest_common = {"requests", "requests_file"} kafka_common = { @@ -118,6 +131,7 @@ "sqlalchemy>=1.4.39, <2", # Required for SQL profiling. "great-expectations>=0.15.12, <=0.15.50", + *pydantic_no_v2, # because of great-expectations # scipy version restricted to reduce backtracking, used by great-expectations, "scipy>=1.7.2", # GE added handling for higher version of jinja2 @@ -229,6 +243,7 @@ iceberg_common = { # Iceberg Python SDK "pyiceberg", + *pydantic_no_v2, # because of pyiceberg "pyarrow>=9.0.0, <13.0.0", } @@ -477,9 +492,6 @@ "flake8-bugbear==23.3.12", "isort>=5.7.0", "mypy==1.0.0", - # pydantic 1.8.2 is incompatible with mypy 0.910. - # See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910. - "pydantic>=1.10.0", *test_api_requirements, pytest_dep, "pytest-asyncio>=0.16.0", @@ -740,7 +752,22 @@ extras_require={ "base": list(framework_common), **{ - plugin: list(framework_common | dependencies) + plugin: list( + framework_common + | ( + plugin_common + if plugin + not in { + "airflow", + "datahub-rest", + "datahub-kafka", + "sync-file-emitter", + "sql-parser", + } + else set() + ) + | dependencies + ) for (plugin, dependencies) in plugins.items() }, "all": list( diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/assertion.py b/metadata-ingestion/src/datahub/api/entities/datacontract/assertion.py index c45d4ddc92458..89ac528efe81a 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/assertion.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/assertion.py @@ -1,7 +1,7 @@ from typing import Optional -from datahub.configuration import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel -class BaseAssertion(ConfigModel): +class BaseAssertion(v1_ConfigModel): description: Optional[str] = None diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/assertion_operator.py b/metadata-ingestion/src/datahub/api/entities/datacontract/assertion_operator.py index a41b0f7aafd9f..dc0c97d1c74e5 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/assertion_operator.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/assertion_operator.py @@ -2,7 +2,7 @@ from typing_extensions import Literal, Protocol -from datahub.configuration import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel from datahub.metadata.schema_classes import ( AssertionStdOperatorClass, AssertionStdParameterClass, @@ -58,7 +58,7 @@ def _generate_assertion_std_parameters( ) -class EqualToOperator(ConfigModel): +class EqualToOperator(v1_ConfigModel): type: Literal["equal_to"] value: Union[str, int, float] @@ -71,7 +71,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class BetweenOperator(ConfigModel): +class BetweenOperator(v1_ConfigModel): type: Literal["between"] min: Union[int, float] max: Union[int, float] @@ -87,7 +87,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: ) -class LessThanOperator(ConfigModel): +class LessThanOperator(v1_ConfigModel): type: Literal["less_than"] value: Union[int, float] @@ -100,7 +100,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class GreaterThanOperator(ConfigModel): +class GreaterThanOperator(v1_ConfigModel): type: Literal["greater_than"] value: Union[int, float] @@ -113,7 +113,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class LessThanOrEqualToOperator(ConfigModel): +class LessThanOrEqualToOperator(v1_ConfigModel): type: Literal["less_than_or_equal_to"] value: Union[int, float] @@ -126,7 +126,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class GreaterThanOrEqualToOperator(ConfigModel): +class GreaterThanOrEqualToOperator(v1_ConfigModel): type: Literal["greater_than_or_equal_to"] value: Union[int, float] @@ -139,7 +139,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class NotNullOperator(ConfigModel): +class NotNullOperator(v1_ConfigModel): type: Literal["not_null"] operator: str = AssertionStdOperatorClass.NOT_NULL diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/data_quality_assertion.py b/metadata-ingestion/src/datahub/api/entities/datacontract/data_quality_assertion.py index 6a3944ba36baf..975aa359bd203 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/data_quality_assertion.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/data_quality_assertion.py @@ -1,12 +1,11 @@ from typing import List, Optional, Union -import pydantic from typing_extensions import Literal import datahub.emitter.mce_builder as builder from datahub.api.entities.datacontract.assertion import BaseAssertion from datahub.api.entities.datacontract.assertion_operator import Operators -from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel, v1_Field from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.metadata.schema_classes import ( AssertionInfoClass, @@ -25,7 +24,7 @@ class IdConfigMixin(BaseAssertion): - id_raw: Optional[str] = pydantic.Field( + id_raw: Optional[str] = v1_Field( default=None, alias="id", description="The id of the assertion. If not provided, one will be generated using the type.", @@ -38,7 +37,7 @@ def generate_default_id(self) -> str: class CustomSQLAssertion(IdConfigMixin, BaseAssertion): type: Literal["custom_sql"] sql: str - operator: Operators = pydantic.Field(discriminator="type") + operator: Operators = v1_Field(discriminator="type") def generate_default_id(self) -> str: return f"{self.type}-{self.sql}-{self.operator.id()}" @@ -89,11 +88,11 @@ def generate_assertion_info(self, entity_urn: str) -> AssertionInfoClass: ) -class DataQualityAssertion(ConfigModel): +class DataQualityAssertion(v1_ConfigModel): __root__: Union[ CustomSQLAssertion, ColumnUniqueAssertion, - ] = pydantic.Field(discriminator="type") + ] = v1_Field(discriminator="type") @property def id(self) -> str: diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/datacontract.py b/metadata-ingestion/src/datahub/api/entities/datacontract/datacontract.py index f3c6be55e5fea..e0ef85d5fd66c 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/datacontract.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/datacontract.py @@ -1,7 +1,6 @@ import collections from typing import Iterable, List, Optional, Tuple -import pydantic from ruamel.yaml import YAML from typing_extensions import Literal @@ -11,7 +10,11 @@ ) from datahub.api.entities.datacontract.freshness_assertion import FreshnessAssertion from datahub.api.entities.datacontract.schema_assertion import SchemaAssertion -from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import ( + v1_ConfigModel, + v1_Field, + v1_validator, +) from datahub.emitter.mce_builder import datahub_guid, make_assertion_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.metadata.schema_classes import ( @@ -26,7 +29,7 @@ from datahub.utilities.urns.urn import guess_entity_type -class DataContract(ConfigModel): +class DataContract(v1_ConfigModel): """A yml representation of a Data Contract. This model is used as a simpler, Python-native representation of a DataHub data contract. @@ -36,29 +39,27 @@ class DataContract(ConfigModel): version: Literal[1] - id: Optional[str] = pydantic.Field( + id: Optional[str] = v1_Field( default=None, alias="urn", description="The data contract urn. If not provided, one will be generated.", ) - entity: str = pydantic.Field( + entity: str = v1_Field( description="The entity urn that the Data Contract is associated with" ) # TODO: add support for properties # properties: Optional[Dict[str, str]] = None - schema_field: Optional[SchemaAssertion] = pydantic.Field( - default=None, alias="schema" - ) + schema_field: Optional[SchemaAssertion] = v1_Field(default=None, alias="schema") - freshness: Optional[FreshnessAssertion] = pydantic.Field(default=None) + freshness: Optional[FreshnessAssertion] = v1_Field(default=None) # TODO: Add a validator to ensure that ids are unique - data_quality: Optional[List[DataQualityAssertion]] = pydantic.Field(default=None) + data_quality: Optional[List[DataQualityAssertion]] = v1_Field(default=None) _original_yaml_dict: Optional[dict] = None - @pydantic.validator("data_quality") + @v1_validator("data_quality") # type: ignore def validate_data_quality( cls, data_quality: Optional[List[DataQualityAssertion]] ) -> Optional[List[DataQualityAssertion]]: diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/freshness_assertion.py b/metadata-ingestion/src/datahub/api/entities/datacontract/freshness_assertion.py index 71741d76b22fc..8694276688967 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/freshness_assertion.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/freshness_assertion.py @@ -3,11 +3,10 @@ from datetime import timedelta from typing import List, Union -import pydantic from typing_extensions import Literal from datahub.api.entities.datacontract.assertion import BaseAssertion -from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel, v1_Field from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.metadata.schema_classes import ( AssertionInfoClass, @@ -25,10 +24,10 @@ class CronFreshnessAssertion(BaseAssertion): type: Literal["cron"] - cron: str = pydantic.Field( + cron: str = v1_Field( description="The cron expression to use. See https://crontab.guru/ for help." ) - timezone: str = pydantic.Field( + timezone: str = v1_Field( "UTC", description="The timezone to use for the cron schedule. Defaults to UTC.", ) @@ -58,10 +57,10 @@ def generate_freshness_assertion_schedule(self) -> FreshnessAssertionScheduleCla ) -class FreshnessAssertion(ConfigModel): - __root__: Union[ - CronFreshnessAssertion, FixedIntervalFreshnessAssertion - ] = pydantic.Field(discriminator="type") +class FreshnessAssertion(v1_ConfigModel): + __root__: Union[CronFreshnessAssertion, FixedIntervalFreshnessAssertion] = v1_Field( + discriminator="type" + ) @property def id(self): diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/schema_assertion.py b/metadata-ingestion/src/datahub/api/entities/datacontract/schema_assertion.py index b62f94e0592fc..39297d1a98d02 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/schema_assertion.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/schema_assertion.py @@ -3,11 +3,10 @@ import json from typing import List, Union -import pydantic from typing_extensions import Literal from datahub.api.entities.datacontract.assertion import BaseAssertion -from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel, v1_Field from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.extractor.json_schema_util import get_schema_metadata from datahub.metadata.schema_classes import ( @@ -23,7 +22,7 @@ class JsonSchemaContract(BaseAssertion): type: Literal["json-schema"] - json_schema: dict = pydantic.Field(alias="json-schema") + json_schema: dict = v1_Field(alias="json-schema") _schema_metadata: SchemaMetadataClass @@ -37,7 +36,10 @@ def _init_private_attributes(self) -> None: ) -class FieldListSchemaContract(BaseAssertion, arbitrary_types_allowed=True): +class FieldListSchemaContract(BaseAssertion): + class Config: + arbitrary_types_allowed = True + type: Literal["field-list"] fields: List[SchemaFieldClass] @@ -56,8 +58,8 @@ def _init_private_attributes(self) -> None: ) -class SchemaAssertion(ConfigModel): - __root__: Union[JsonSchemaContract, FieldListSchemaContract] = pydantic.Field( +class SchemaAssertion(v1_ConfigModel): + __root__: Union[JsonSchemaContract, FieldListSchemaContract] = v1_Field( discriminator="type" ) diff --git a/metadata-ingestion/src/datahub/cli/check_cli.py b/metadata-ingestion/src/datahub/cli/check_cli.py index f7996900f7a7a..2732a72aea539 100644 --- a/metadata-ingestion/src/datahub/cli/check_cli.py +++ b/metadata-ingestion/src/datahub/cli/check_cli.py @@ -126,10 +126,21 @@ def metadata_diff( default=False, help="Include extra information for each plugin.", ) +@click.option( + "--source", + type=str, + default=None, +) @telemetry.with_telemetry() -def plugins(verbose: bool) -> None: +def plugins(source: Optional[str], verbose: bool) -> None: """List the enabled ingestion plugins.""" + if source: + # Quick helper for one-off checks with full stack traces. + source_registry.get(source) + click.echo(f"Source {source} is enabled.") + return + click.secho("Sources:", bold=True) click.echo(source_registry.summary(verbose=verbose, col_width=25)) click.echo() diff --git a/metadata-ingestion/src/datahub/configuration/common.py b/metadata-ingestion/src/datahub/configuration/common.py index f225856ca43ce..0030332bcfd54 100644 --- a/metadata-ingestion/src/datahub/configuration/common.py +++ b/metadata-ingestion/src/datahub/configuration/common.py @@ -99,8 +99,20 @@ def _schema_extra(schema: Dict[str, Any], model: Type["ConfigModel"]) -> None: @classmethod def parse_obj_allow_extras(cls: Type[_ConfigSelf], obj: Any) -> _ConfigSelf: - with unittest.mock.patch.object(cls.Config, "extra", pydantic.Extra.allow): - return cls.parse_obj(obj) + if PYDANTIC_VERSION_2: + try: + with unittest.mock.patch.dict( + cls.model_config, # type: ignore + {"extra": "allow"}, + clear=False, + ): + cls.model_rebuild(force=True) # type: ignore + return cls.parse_obj(obj) + finally: + cls.model_rebuild(force=True) # type: ignore + else: + with unittest.mock.patch.object(cls.Config, "extra", pydantic.Extra.allow): + return cls.parse_obj(obj) class PermissiveConfigModel(ConfigModel): diff --git a/metadata-ingestion/src/datahub/configuration/datetimes.py b/metadata-ingestion/src/datahub/configuration/datetimes.py index 41af7565593d9..1520462fa9bf8 100644 --- a/metadata-ingestion/src/datahub/configuration/datetimes.py +++ b/metadata-ingestion/src/datahub/configuration/datetimes.py @@ -65,6 +65,8 @@ def parse_absolute_time(input: str) -> datetime: def parse_relative_timespan(input: str) -> timedelta: + raw_input = input + neg = False input = input.strip() @@ -79,7 +81,7 @@ def parse_relative_timespan(input: str) -> timedelta: if neg: delta = -delta - logger.debug(f'Parsed "{input}" as {delta}.') + logger.debug(f'Parsed "{raw_input}" as {delta}.') return delta diff --git a/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py b/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py index f1876b500598b..bd931abe2e84d 100644 --- a/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py +++ b/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py @@ -19,12 +19,41 @@ class PydanticDeprecatedSince20(Warning): # type: ignore if PYDANTIC_VERSION_2: from pydantic import BaseModel as GenericModel + from pydantic.v1 import ( # type: ignore + BaseModel as v1_BaseModel, + Extra as v1_Extra, + Field as v1_Field, + root_validator as v1_root_validator, + validator as v1_validator, + ) else: + from pydantic import ( # type: ignore + BaseModel as v1_BaseModel, + Extra as v1_Extra, + Field as v1_Field, + root_validator as v1_root_validator, + validator as v1_validator, + ) from pydantic.generics import GenericModel # type: ignore +class v1_ConfigModel(v1_BaseModel): + """A simplified variant of our main ConfigModel class. + + This one only uses pydantic v1 features. + """ + + class Config: + extra = v1_Extra.forbid + underscore_attrs_are_private = True + + __all__ = [ "PYDANTIC_VERSION_2", "PydanticDeprecatedSince20", "GenericModel", + "v1_ConfigModel", + "v1_Field", + "v1_root_validator", + "v1_validator", ] diff --git a/metadata-ingestion/src/datahub/configuration/time_window_config.py b/metadata-ingestion/src/datahub/configuration/time_window_config.py index 15de7470e4d82..f20ab85be0585 100644 --- a/metadata-ingestion/src/datahub/configuration/time_window_config.py +++ b/metadata-ingestion/src/datahub/configuration/time_window_config.py @@ -68,6 +68,12 @@ def default_start_time( assert abs(delta) >= get_bucket_duration_delta( values["bucket_duration"] ), "Relative start time should be in terms of configured bucket duration. e.g '-2 days' or '-2 hours'." + + # The end_time's default value is not yet populated, in which case + # we can just manually generate it here. + if "end_time" not in values: + values["end_time"] = datetime.now(tz=timezone.utc) + return get_time_bucket( values["end_time"] + delta, values["bucket_duration"] ) @@ -80,9 +86,13 @@ def default_start_time( @pydantic.validator("start_time", "end_time") def ensure_timestamps_in_utc(cls, v: datetime) -> datetime: - assert ( - v.tzinfo == timezone.utc - ), 'timezone is not UTC; try adding a "Z" to the value e.g. "2021-07-20T00:00:00Z"' + if v.tzinfo is None: + raise ValueError( + "Timestamps must be in UTC. Try adding a 'Z' to the value e.g. '2021-07-20T00:00:00Z'" + ) + + # If the timestamp is timezone-aware but not in UTC, convert it to UTC. + v = v.astimezone(timezone.utc) return v diff --git a/metadata-ingestion/src/datahub/configuration/validate_field_rename.py b/metadata-ingestion/src/datahub/configuration/validate_field_rename.py index bb01f2b787123..de2a16e9bf247 100644 --- a/metadata-ingestion/src/datahub/configuration/validate_field_rename.py +++ b/metadata-ingestion/src/datahub/configuration/validate_field_rename.py @@ -49,4 +49,6 @@ def _validate_field_rename(cls: Type, values: dict) -> dict: # validator with pre=True gets all the values that were passed in. # Given that a renamed field doesn't show up in the fields list, we can't use # the field-level validator, even with a different field name. - return pydantic.root_validator(pre=True, allow_reuse=True)(_validate_field_rename) + return pydantic.root_validator(pre=True, skip_on_failure=True, allow_reuse=True)( + _validate_field_rename + ) diff --git a/metadata-ingestion/src/datahub/ingestion/glossary/datahub_classifier.py b/metadata-ingestion/src/datahub/ingestion/glossary/datahub_classifier.py index 1f2b7f5689ea3..42eb930c80f9d 100644 --- a/metadata-ingestion/src/datahub/ingestion/glossary/datahub_classifier.py +++ b/metadata-ingestion/src/datahub/ingestion/glossary/datahub_classifier.py @@ -8,6 +8,7 @@ from pydantic.fields import Field from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import PYDANTIC_VERSION_2 from datahub.ingestion.glossary.classifier import Classifier @@ -50,7 +51,10 @@ class ValuesFactorConfig(ConfigModel): class PredictionFactorsAndWeights(ConfigModel): class Config: - allow_population_by_field_name = True + if PYDANTIC_VERSION_2: + populate_by_name = True + else: + allow_population_by_field_name = True Name: float = Field(alias="name") Description: float = Field(alias="description") @@ -60,7 +64,10 @@ class Config: class InfoTypeConfig(ConfigModel): class Config: - allow_population_by_field_name = True + if PYDANTIC_VERSION_2: + populate_by_name = True + else: + allow_population_by_field_name = True Prediction_Factors_and_Weights: PredictionFactorsAndWeights = Field( description="Factors and their weights to consider when predicting info types", diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py index cbe68a454ea43..c13b08a6d9656 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py @@ -284,7 +284,7 @@ def validate_bigquery_audit_metadata_datasets( return v - @root_validator(pre=False) + @root_validator(pre=False, skip_on_failure=True) def backward_compatibility_configs_set(cls, values: Dict) -> Dict: project_id = values.get("project_id") project_id_pattern = values.get("project_id_pattern") diff --git a/metadata-ingestion/src/datahub/ingestion/source/delta_lake/config.py b/metadata-ingestion/src/datahub/ingestion/source/delta_lake/config.py index f3616ca648a3e..81a54d1327d05 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/delta_lake/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/delta_lake/config.py @@ -4,6 +4,7 @@ import pydantic from cached_property import cached_property from pydantic import Field +from typing_extensions import Literal from datahub.configuration.common import AllowDenyPattern from datahub.configuration.source_common import ( @@ -46,10 +47,9 @@ class DeltaLakeSourceConfig(PlatformInstanceConfigMixin, EnvConfigMixin): "'/' and URNs will be created using " "relative_path only.", ) - platform: str = Field( + platform: Literal["delta-lake"] = Field( default="delta-lake", description="The platform that this source connects to", - const=True, ) platform_instance: Optional[str] = Field( default=None, diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py index 032bdef178fdf..b896df1fa340e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py @@ -176,7 +176,7 @@ def validate_include_column_lineage(cls, v, values): ) return v - @root_validator(pre=False) + @root_validator(pre=False, skip_on_failure=True) def validate_unsupported_configs(cls, values: Dict) -> Dict: value = values.get("include_read_operational_stats") if value is not None and value: diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py index 46bd24c7e1f4c..e9db82ce75cd9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py @@ -107,7 +107,7 @@ def validate_account_id(cls, account_id: str) -> str: return account_id @pydantic.validator("authentication_type", always=True) - def authenticator_type_is_valid(cls, v, values, field): + def authenticator_type_is_valid(cls, v, values): if v not in VALID_AUTH_TYPES.keys(): raise ValueError( f"unsupported authenticator type '{v}' was provided," diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py index 4f228494f416b..3389a6fb05ee8 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py @@ -150,7 +150,7 @@ def modify_urn(urn: str) -> str: if guess_entity_type(urn) == "dataset": return _lowercase_dataset_urn(urn) elif guess_entity_type(urn) == "schemaField": - cur_urn = Urn.create_from_string(urn) + cur_urn = Urn.from_string(urn) cur_urn._entity_ids[0] = _lowercase_dataset_urn(cur_urn._entity_ids[0]) return str(cur_urn) return urn diff --git a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py index 1b58696e4014c..39a62056a7e4a 100644 --- a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py +++ b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py @@ -87,18 +87,18 @@ def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph): confidence_level_threshold=0.58, info_types_config={ "Age": InfoTypeConfig( - Prediction_Factors_and_Weights=PredictionFactorsAndWeights( - Name=0, Values=1, Description=0, Datatype=0 + prediction_factors_and_weights=PredictionFactorsAndWeights( + name=0, values=1, description=0, datatype=0 ) ), "CloudRegion": InfoTypeConfig( - Prediction_Factors_and_Weights=PredictionFactorsAndWeights( - Name=0, - Description=0, - Datatype=0, - Values=1, + prediction_factors_and_weights=PredictionFactorsAndWeights( + name=0, + description=0, + datatype=0, + values=1, ), - Values=ValuesFactorConfig( + values=ValuesFactorConfig( prediction_type="regex", regex=[ r"(af|ap|ca|eu|me|sa|us)-(central|north|(north(?:east|west))|south|south(?:east|west)|east|west)-\d+" diff --git a/metadata-ingestion/tests/unit/test_allow_deny.py b/metadata-ingestion/tests/unit/config/test_allow_deny.py similarity index 100% rename from metadata-ingestion/tests/unit/test_allow_deny.py rename to metadata-ingestion/tests/unit/config/test_allow_deny.py diff --git a/metadata-ingestion/tests/unit/test_config_clean.py b/metadata-ingestion/tests/unit/config/test_config_clean.py similarity index 100% rename from metadata-ingestion/tests/unit/test_config_clean.py rename to metadata-ingestion/tests/unit/config/test_config_clean.py diff --git a/metadata-ingestion/tests/unit/config/test_config_model.py b/metadata-ingestion/tests/unit/config/test_config_model.py index ffac5c465f554..f53390a3deb18 100644 --- a/metadata-ingestion/tests/unit/config/test_config_model.py +++ b/metadata-ingestion/tests/unit/config/test_config_model.py @@ -3,8 +3,11 @@ import pydantic import pytest -from datahub.configuration.common import ConfigModel, redact_raw_config -from datahub.ingestion.source.unity.config import UnityCatalogSourceConfig +from datahub.configuration.common import ( + AllowDenyPattern, + ConfigModel, + redact_raw_config, +) def test_extras_not_allowed(): @@ -76,8 +79,15 @@ def test_config_redaction(): def test_shared_defaults(): - c1 = UnityCatalogSourceConfig(token="s", workspace_url="https://workspace_url") - c2 = UnityCatalogSourceConfig(token="s", workspace_url="https://workspace_url") + class SourceConfig(ConfigModel): + token: str + workspace_url: str + catalog_pattern: AllowDenyPattern = pydantic.Field( + default=AllowDenyPattern.allow_all(), + ) + + c1 = SourceConfig(token="s", workspace_url="https://workspace_url") + c2 = SourceConfig(token="s", workspace_url="https://workspace_url") assert c2.catalog_pattern.allow == [".*"] c1.catalog_pattern.allow += ["foo"] diff --git a/metadata-ingestion/tests/unit/test_pydantic_validators.py b/metadata-ingestion/tests/unit/config/test_pydantic_validators.py similarity index 92% rename from metadata-ingestion/tests/unit/test_pydantic_validators.py rename to metadata-ingestion/tests/unit/config/test_pydantic_validators.py index 3e9ec6cbaf357..399245736805c 100644 --- a/metadata-ingestion/tests/unit/test_pydantic_validators.py +++ b/metadata-ingestion/tests/unit/config/test_pydantic_validators.py @@ -7,7 +7,10 @@ from datahub.configuration.validate_field_deprecation import pydantic_field_deprecated from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.configuration.validate_field_rename import pydantic_renamed_field -from datahub.utilities.global_warning_util import get_global_warnings +from datahub.utilities.global_warning_util import ( + clear_global_warnings, + get_global_warnings, +) def test_field_rename(): @@ -76,9 +79,11 @@ class TestModel(ConfigModel): def test_field_deprecated(): + clear_global_warnings() + class TestModel(ConfigModel): - d1: Optional[str] - d2: Optional[str] + d1: Optional[str] = None + d2: Optional[str] = None b: str _validate_deprecated_d1 = pydantic_field_deprecated("d1") @@ -93,3 +98,5 @@ class TestModel(ConfigModel): assert v.d2 == "deprecated" assert any(["d1 is deprecated" in warning for warning in get_global_warnings()]) assert any(["d2 is deprecated" in warning for warning in get_global_warnings()]) + + clear_global_warnings() diff --git a/metadata-ingestion/tests/unit/test_time_window_config.py b/metadata-ingestion/tests/unit/config/test_time_window_config.py similarity index 100% rename from metadata-ingestion/tests/unit/test_time_window_config.py rename to metadata-ingestion/tests/unit/config/test_time_window_config.py From 7b067822bd8602c00fe5a0efdd15a6bb7a33bad6 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Mon, 18 Dec 2023 18:35:02 -0800 Subject: [PATCH 265/792] feat(gms): Add support for platform-based browse (#9376) Co-authored-by: John Joyce --- .../graphql/featureflags/FeatureFlags.java | 1 + .../resolvers/chart/BrowseV2Resolver.java | 20 +++- .../resolvers/config/AppConfigResolver.java | 1 + .../graphql/resolvers/search/SearchUtils.java | 14 +++ .../src/main/resources/app.graphql | 5 + .../src/main/resources/search.graphql | 9 +- .../browse/BrowseV2ResolverTest.java | 2 +- datahub-web-react/src/appConfigContext.tsx | 1 + datahub-web-react/src/graphql/app.graphql | 1 + .../metadata/client/JavaEntityClient.java | 24 +++++ .../elasticsearch/ElasticSearchService.java | 12 +++ .../elasticsearch/query/ESBrowseDAO.java | 91 +++++++++++++++++++ .../src/main/resources/application.yml | 1 + .../linkedin/entity/client/EntityClient.java | 22 +++++ .../entity/client/RestliEntityClient.java | 14 +++ .../metadata/search/EntitySearchService.java | 19 ++++ 16 files changed, 231 insertions(+), 6 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 07bd1fba5d8a8..e74ed09849763 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -12,6 +12,7 @@ public class FeatureFlags { private boolean readOnlyModeEnabled = false; private boolean showSearchFiltersV2 = false; private boolean showBrowseV2 = false; + private boolean platformBrowseV2 = false; private PreProcessHooks preProcessHooks; private boolean showAcrylInfo = false; private boolean showAccessManagement = false; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 292d6108b7a04..da4a3a76dd7e0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -2,14 +2,16 @@ import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; import com.linkedin.datahub.graphql.generated.BrowseResultMetadata; import com.linkedin.datahub.graphql.generated.BrowseResultsV2; import com.linkedin.datahub.graphql.generated.BrowseV2Input; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.search.SearchUtils; @@ -43,8 +45,8 @@ public class BrowseV2Resolver implements DataFetcher get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final BrowseV2Input input = bindArgument(environment.getArgument("input"), BrowseV2Input.class); - final String entityName = EntityTypeMapper.getName(input.getType()); + final List entityNames = getEntityNames(input); final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; final String query = input.getQuery() != null ? input.getQuery() : "*"; @@ -70,7 +72,7 @@ public CompletableFuture get(DataFetchingEnvironment environmen BrowseResultV2 browseResults = _entityClient.browseV2( - entityName, + entityNames, pathStr, maybeResolvedView != null ? SearchUtils.combineFilters( @@ -87,6 +89,18 @@ public CompletableFuture get(DataFetchingEnvironment environmen }); } + public static List getEntityNames(BrowseV2Input input) { + List entityTypes; + if (input.getTypes() != null && input.getTypes().size() > 0) { + entityTypes = input.getTypes(); + } else if (input.getType() != null) { + entityTypes = ImmutableList.of(input.getType()); + } else { + entityTypes = BROWSE_ENTITY_TYPES; + } + return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + } + private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { BrowseResultsV2 results = new BrowseResultsV2(); results.setTotal(browseResults.getNumGroups()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index 34f7f133f6fb9..81b52991cde90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -175,6 +175,7 @@ public CompletableFuture get(final DataFetchingEnvironment environmen .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) .setShowAccessManagement(_featureFlags.isShowAccessManagement()) .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) + .setPlatformBrowseV2(_featureFlags.isPlatformBrowseV2()) .build(); appConfig.setFeatureFlags(featureFlagsConfig); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index d04cb57e1a860..444ab4bcc3c3c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -92,6 +92,20 @@ private SearchUtils() {} EntityType.NOTEBOOK, EntityType.DATA_PRODUCT); + /** Entities that are part of browse by default */ + public static final List BROWSE_ENTITY_TYPES = + ImmutableList.of( + EntityType.DATASET, + EntityType.DASHBOARD, + EntityType.CHART, + EntityType.CONTAINER, + EntityType.MLMODEL, + EntityType.MLMODEL_GROUP, + EntityType.MLFEATURE_TABLE, + EntityType.DATA_FLOW, + EntityType.DATA_JOB, + EntityType.NOTEBOOK); + /** A prioritized list of source filter types used to generate quick filters */ public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of( diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 075a3b0fac43b..52451e195ee84 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -437,6 +437,11 @@ type FeatureFlagsConfig { """ showBrowseV2: Boolean! + """ + Whether browse v2 is platform mode, which means that platforms are displayed instead of entity types at the root. + """ + platformBrowseV2: Boolean! + """ Whether we should show CTAs in the UI related to moving to Managed DataHub by Acryl. """ diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index e0cde5a2db9f9..8f2377edb546e 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -1176,9 +1176,14 @@ Input required for browse queries """ input BrowseV2Input { """ - The browse entity type + The browse entity type - deprecated use types instead """ - type: EntityType! + type: EntityType + + """ + The browse entity type - deprecated use types instead. If not provided, all types will be used. + """ + types: [EntityType!] """ The browse path V2 - a list with each entry being part of the browse path V2 diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index bffc2b31af2b9..433772d7e2cfe 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -249,7 +249,7 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.browseV2( - Mockito.eq(entityName), + Mockito.eq(ImmutableList.of(entityName)), Mockito.eq(path), Mockito.eq(filter), Mockito.eq(query), diff --git a/datahub-web-react/src/appConfigContext.tsx b/datahub-web-react/src/appConfigContext.tsx index 4087ad453687c..8c1089b868e5a 100644 --- a/datahub-web-react/src/appConfigContext.tsx +++ b/datahub-web-react/src/appConfigContext.tsx @@ -50,6 +50,7 @@ export const DEFAULT_APP_CONFIG = { showAcrylInfo: false, showAccessManagement: false, nestedDomainsEnabled: true, + platformBrowseV2: false, }, }; diff --git a/datahub-web-react/src/graphql/app.graphql b/datahub-web-react/src/graphql/app.graphql index 4e9bbb11d8c5a..fe28340349147 100644 --- a/datahub-web-react/src/graphql/app.graphql +++ b/datahub-web-react/src/graphql/app.graphql @@ -65,6 +65,7 @@ query appConfig { showAcrylInfo showAccessManagement nestedDomainsEnabled + platformBrowseV2 } } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 53b974b560e2a..e7ec4d313b5f5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -235,6 +235,30 @@ public BrowseResultV2 browseV2( return _entitySearchService.browseV2(entityName, path, filter, input, start, count); } + /** + * Gets browse V2 snapshot of a given path + * + * @param entityNames entities being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) { + // TODO: cache browseV2 results + return _entitySearchService.browseV2(entityNames, path, filter, input, start, count); + } + @SneakyThrows @Deprecated public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index f40da59a149fa..fd7491fe32ea3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -210,6 +210,18 @@ public BrowseResultV2 browseV2( return esBrowseDAO.browseV2(entityName, path, filter, input, start, count); } + @Nonnull + @Override + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { + return esBrowseDAO.browseV2(entityNames, path, filter, input, start, count); + } + @Nonnull @Override public List getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index 5ea60b24a577a..3c71a2dfd9180 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -427,6 +427,44 @@ public BrowseResultV2 browseV2( } } + public BrowseResultV2 browseV2( + @Nonnull List entities, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { + try { + final SearchResponse groupsResponse; + + try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { + final String finalInput = input.isEmpty() ? "*" : input; + groupsResponse = + client.search( + constructGroupsSearchRequestBrowseAcrossEntities( + entities, path, filter, finalInput), + RequestOptions.DEFAULT); + } + + final BrowseGroupsResultV2 browseGroupsResult = + extractGroupsResponseV2(groupsResponse, path, start, count); + final int numGroups = browseGroupsResult.getTotalGroups(); + + return new BrowseResultV2() + .setMetadata( + new BrowseResultMetadata() + .setTotalNumEntities(browseGroupsResult.getTotalNumEntities()) + .setPath(path)) + .setGroups(new BrowseResultGroupV2Array(browseGroupsResult.getGroups())) + .setNumGroups(numGroups) + .setFrom(start) + .setPageSize(count); + } catch (Exception e) { + log.error("Browse Across Entities query failed: " + e.getMessage()); + throw new ESQueryException("Browse Across Entities query failed: ", e); + } + } + @Nonnull private SearchRequest constructGroupsSearchRequestV2( @Nonnull String entityName, @@ -448,6 +486,33 @@ private SearchRequest constructGroupsSearchRequestV2( return searchRequest; } + @Nonnull + private SearchRequest constructGroupsSearchRequestBrowseAcrossEntities( + @Nonnull List entities, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { + + List entitySpecs = + entities.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); + + String[] indexArray = + entities.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new); + + final SearchRequest searchRequest = new SearchRequest(indexArray); + final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(0); + searchSourceBuilder.query( + buildQueryStringBrowseAcrossEntities( + entitySpecs, + path, + SearchUtil.transformFilterForEntities(filter, indexConvention), + input)); + searchSourceBuilder.aggregation(buildAggregationsV2(path)); + searchRequest.source(searchSourceBuilder); + return searchRequest; + } + /** * Extracts the name of group from path. * @@ -494,6 +559,32 @@ private QueryBuilder buildQueryStringV2( return queryBuilder; } + @Nonnull + private QueryBuilder buildQueryStringBrowseAcrossEntities( + @Nonnull List entitySpecs, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { + final int browseDepthVal = getPathDepthV2(path); + + final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); + + QueryBuilder query = + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getQuery(input, false); + queryBuilder.must(query); + + if (!path.isEmpty()) { + queryBuilder.filter(QueryBuilders.matchQuery(BROWSE_PATH_V2, path)); + } + + queryBuilder.filter(QueryBuilders.rangeQuery(BROWSE_PATH_V2_DEPTH).gt(browseDepthVal)); + + queryBuilder.filter(SearchRequestHandler.getFilterQuery(filter)); + + return queryBuilder; + } + @Nonnull private AggregationBuilder buildAggregationsV2(@Nonnull String path) { final String currentLevel = ESUtils.escapeReservedCharacters(path) + "␟.*"; diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index a52b705cb8da6..0ea6b8712953e 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -317,6 +317,7 @@ featureFlags: showAccessManagement: ${SHOW_ACCESS_MANAGEMENT:false} #Whether we should show AccessManagement tab in the datahub UI. showSearchFiltersV2: ${SHOW_SEARCH_FILTERS_V2:true} # Enables showing the search filters V2 experience. showBrowseV2: ${SHOW_BROWSE_V2:true} # Enables showing the browse v2 sidebar experience. + platformBrowseV2: ${PLATFORM_BROWSE_V2:false} # Enables the platform browse experience, instead of the entity-oriented browse default. preProcessHooks: uiEnabled: ${PRE_PROCESS_HOOKS_UI_ENABLED:true} # Circumvents Kafka for processing index updates for UI changes sourced from GraphQL to avoid processing delays showAcrylInfo: ${SHOW_ACRYL_INFO:false} # Show different CTAs within DataHub around moving to Managed DataHub. Set to true for the demo site. diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 7bc50a8f3dc7e..598c252b4f766 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -153,6 +153,28 @@ public BrowseResultV2 browseV2( @Nonnull Authentication authentication) throws RemoteInvocationException; + /** + * Gets browse snapshot of a given path + * + * @param entityNames entities being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException; + @Deprecated public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index c854cb9dd279e..d68c472ea9170 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -381,6 +381,20 @@ public BrowseResultV2 browseV2( throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } + @Nonnull + @Override + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException { + throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); + } + public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException { EntitiesDoIngestRequestBuilder requestBuilder = diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 09a63e769f025..189ae09e1b938 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -207,6 +207,25 @@ public BrowseResultV2 browseV2( int start, int count); + /** + * Gets browse snapshot of a given path + * + * @param entityNames set of entities being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count); + /** * Gets a list of paths for a given urn. * From 1124ccc4ee02e60980af19d525d5203dd6719a1d Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Tue, 19 Dec 2023 17:29:37 +0530 Subject: [PATCH 266/792] fix(ui/users): searching for users on Users page shows incorrect roles (#9474) --- datahub-web-react/src/app/identity/user/UserList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index dce3aa2c68a8d..8e2bc21f0693f 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -77,7 +77,7 @@ export const UserList = () => { query: (query?.length && query) || undefined, }, }, - fetchPolicy: (query?.length || 0) > 0 ? 'no-cache' : 'cache-first', + fetchPolicy: 'no-cache', }); const totalUsers = usersData?.listUsers?.total || 0; From 94a1603676b6a0fb9e2129b416caf39b100f6d0f Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Tue, 19 Dec 2023 16:30:21 +0100 Subject: [PATCH 267/792] fix(ingest/redshift: Fixing operation query to not return duplicate operations (#9481) --- .../ingestion/source/redshift/usage.py | 26 ++++++++++++------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py index 409027a8805a0..e40406b994c9b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py @@ -85,15 +85,18 @@ sq.endtime AS endtime, 'insert' AS operation_type FROM - stl_insert si + (select userid, query, sum(rows) as rows, tbl + from stl_insert si + where si.rows > 0 + AND si.starttime >= '{start_time}' + AND si.starttime < '{end_time}' + group by userid, query, tbl + ) as si JOIN svv_table_info sti ON si.tbl = sti.table_id JOIN stl_query sq ON si.query = sq.query JOIN svl_user_info sui ON sq.userid = sui.usesysid WHERE - si.starttime >= '{start_time}' - AND si.starttime < '{end_time}' - AND si.rows > 0 - AND sq.aborted = 0) + sq.aborted = 0) UNION (SELECT DISTINCT sd.userid AS userid, @@ -109,15 +112,18 @@ sq.endtime AS endtime, 'delete' AS operation_type FROM - stl_delete sd + (select userid, query, sum(rows) as rows, tbl + from stl_delete sd + where sd.rows > 0 + AND sd.starttime >= '{start_time}' + AND sd.starttime < '{end_time}' + group by userid, query, tbl + ) as sd JOIN svv_table_info sti ON sd.tbl = sti.table_id JOIN stl_query sq ON sd.query = sq.query JOIN svl_user_info sui ON sq.userid = sui.usesysid WHERE - sd.starttime >= '{start_time}' - AND sd.starttime < '{end_time}' - AND sd.rows > 0 - AND sq.aborted = 0) + sq.aborted = 0) ORDER BY endtime DESC """.strip() From 265d6bdb534c17b1b370033b81a5c20c434b49d0 Mon Sep 17 00:00:00 2001 From: purnimagarg1 <139125209+purnimagarg1@users.noreply.github.com> Date: Tue, 19 Dec 2023 22:41:18 +0530 Subject: [PATCH 268/792] Fade recipe section to transparent on Ingestion Run Details (#9404) --- .../ExecutionRequestDetailsModal.tsx | 35 +++++++++++-------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx b/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx index 96dfc05e39153..0799f8af1173d 100644 --- a/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx +++ b/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx @@ -83,11 +83,11 @@ const ShowMoreButton = styled(Button)` padding: 0px; `; -const LogsContainer = styled.div` +const DetailsContainer = styled.div` margin-bottom: -25px; ${(props) => - props.areLogsExpandable && - !props.showExpandedLogs && + props.areDetailsExpandable && + !props.showExpandedDetails && ` -webkit-mask-image: linear-gradient(to bottom, rgba(0,0,0,1) 50%, rgba(255,0,0,0.5) 60%, rgba(255,0,0,0) 90% ); mask-image: linear-gradient(to bottom, rgba(0,0,0,1) 50%, rgba(255,0,0,0.5) 60%, rgba(255,0,0,0) 90%); @@ -102,9 +102,9 @@ const modalBodyStyle = { padding: 0, }; -type LogsContainerProps = { - showExpandedLogs: boolean; - areLogsExpandable: boolean; +type DetailsContainerProps = { + showExpandedDetails: boolean; + areDetailsExpandable: boolean; }; type Props = { @@ -124,7 +124,7 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { downloadFile(output, `exec-${urn}.log`); }; - const logs = (showExpandedLogs && output) || output.slice(0, 250); + const logs = (showExpandedLogs && output) || output?.split('\n').slice(0, 5).join('\n'); const result = data?.executionRequest?.result?.status; useEffect(() => { @@ -154,10 +154,10 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { } catch (e) { recipeYaml = ''; } - const recipe = showExpandedRecipe ? recipeYaml : recipeYaml?.split('\n').slice(0, 1).join('\n'); + const recipe = showExpandedRecipe ? recipeYaml : recipeYaml?.split('\n').slice(0, 5).join('\n'); - const areLogsExpandable = output.length > 250; - const isRecipeExpandable = recipeYaml?.includes('\n'); + const areLogsExpandable = output?.split(/\r\n|\r|\n/)?.length > 5; + const isRecipeExpandable = recipeYaml?.split(/\r\n|\r|\n/)?.length > 5; return ( { Download - +
{`${logs}${!showExpandedLogs && areLogsExpandable ? '...' : ''}`}
-
+ {areLogsExpandable && ( setShowExpandedLogs(!showExpandedLogs)}> {showExpandedLogs ? 'Hide' : 'Show More'} @@ -216,9 +216,14 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { The recipe used for this ingestion run. - -
{`${recipe}${!showExpandedRecipe && isRecipeExpandable ? '\n...' : ''}`}
-
+ + +
{`${recipe}${!showExpandedRecipe && isRecipeExpandable ? '...' : ''}`}
+
+
{isRecipeExpandable && ( setShowExpandedRecipe((v) => !v)}> {showExpandedRecipe ? 'Hide' : 'Show More'} From 92c9940bbd5fd2109f62b7145cfaf981d40704c3 Mon Sep 17 00:00:00 2001 From: Ellie O'Neil <110510035+eboneil@users.noreply.github.com> Date: Tue, 19 Dec 2023 09:24:03 -0800 Subject: [PATCH 269/792] Allow message_name field for protobuf ingestion (#9480) --- .../java/datahub-protobuf/build.gradle | 9 +++------ .../src/main/java/datahub/protobuf/Proto2DataHub.java | 11 +++++++++++ .../java/datahub/protobuf/ProtobufDatasetTest.java | 6 +++--- .../test/java/datahub/protobuf/ProtobufUtilsTest.java | 4 ++-- .../java/datahub/protobuf/model/ProtobufEnumTest.java | 4 ++-- .../datahub/protobuf/model/ProtobufFieldTest.java | 4 ++-- .../datahub/protobuf/model/ProtobufGraphTest.java | 4 ++-- .../datahub/protobuf/model/ProtobufMessageTest.java | 4 ++-- .../protobuf/model/ProtobufOneOfFieldTest.java | 4 ++-- .../datahub/protobuf/visitors/VisitContextTest.java | 4 ++-- .../protobuf/visitors/dataset/DatasetVisitorTest.java | 4 ++-- .../visitors/dataset/DescriptionVisitorTest.java | 4 ++-- .../protobuf/visitors/dataset/DomainVisitorTest.java | 4 ++-- .../dataset/InstitutionalMemoryVisitorTest.java | 4 ++-- .../dataset/KafkaTopicPropertyVisitorTest.java | 4 ++-- .../visitors/dataset/OwnershipVisitorTest.java | 4 ++-- .../visitors/dataset/PropertyVisitorTest.java | 4 ++-- .../visitors/dataset/TermAssociationVisitorTest.java | 4 ++-- .../field/ProtobufExtensionFieldVisitorTest.java | 4 ++-- .../visitors/field/SchemaFieldVisitorTest.java | 4 ++-- .../datahub/protobuf/visitors/tag/TagVisitorTest.java | 4 ++-- 21 files changed, 53 insertions(+), 45 deletions(-) diff --git a/metadata-integration/java/datahub-protobuf/build.gradle b/metadata-integration/java/datahub-protobuf/build.gradle index 2cb36a14cb9c7..c8082b875d321 100644 --- a/metadata-integration/java/datahub-protobuf/build.gradle +++ b/metadata-integration/java/datahub-protobuf/build.gradle @@ -31,10 +31,10 @@ dependencies { implementation externalDependency.commonsCli implementation externalDependency.httpAsyncClient implementation externalDependency.slf4jApi + implementation externalDependency.jacksonCore compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - testImplementation externalDependency.junitJupiterApi - testRuntimeOnly externalDependency.junitJupiterEngine + testImplementation externalDependency.testng } import java.nio.file.Paths @@ -61,10 +61,7 @@ jacocoTestReport { dependsOn test // tests are required to run before generating the report } -test { - useJUnit() - finalizedBy jacocoTestReport -} +test.finalizedBy jacocoTestReport task checkShadowJar(type: Exec) { diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java index dcc95222fabf2..429c6d6bfeba4 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java @@ -67,6 +67,13 @@ public class Proto2DataHub { "[Optional if using --directory] The protobuf source file. Typically a .proto file.") .build(); + private static final Option OPTION_MESSAGE_NAME = + Option.builder() + .longOpt("message_name") + .hasArg() + .desc("[Optional] The protobuf message name to read from.") + .build(); + private static final Option OPTION_DIR = Option.builder() .longOpt("directory") @@ -166,6 +173,7 @@ static class AppConfig { private final String dataPlatform; private final String protoc; private final String inputFile; + private final String messageName; private final String inputDir; private final TransportOptions transport; private final String filename; @@ -191,6 +199,7 @@ static class AppConfig { dataPlatform = cli.getOptionValue(OPTION_DATAHUB_PLATFORM, "kafka").toLowerCase(Locale.ROOT); protoc = cli.getOptionValue(OPTION_DESCRIPTOR); inputFile = cli.getOptionValue(OPTION_FILE, null); + messageName = cli.getOptionValue(OPTION_MESSAGE_NAME, null); transport = TransportOptions.valueOf( cli.getOptionValue(OPTION_TRANSPORT, "rest").toUpperCase(Locale.ROOT)); @@ -250,6 +259,7 @@ public static void main(String[] args) throws Exception { .addOption(OPTION_DATAHUB_TOKEN) .addOption(OPTION_DESCRIPTOR) .addOption(OPTION_FILE) + .addOption(OPTION_MESSAGE_NAME) .addOption(OPTION_DIR) .addOption(OPTION_EXCLUDE_PATTERN) .addOption(OPTION_DATAHUB_USER) @@ -354,6 +364,7 @@ public static void main(String[] args) throws Exception { .setGithubOrganization(config.githubOrg) .setSlackTeamId(config.slackId) .setSubType(config.subType) + .setMessageName(config.messageName) .build(); dataset diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java index e96bb63220b04..62f3b0453be09 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java @@ -1,8 +1,8 @@ package datahub.protobuf; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; import com.linkedin.common.FabricType; import com.linkedin.common.GlobalTags; @@ -34,7 +34,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufDatasetTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java index e2599cb4c3f68..9bf649041e035 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java @@ -2,13 +2,13 @@ import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtoc; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.ExtensionRegistry; import datahub.protobuf.model.ProtobufGraph; import java.io.IOException; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufUtilsTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java index fed9f250b359f..ae539a8e8fa4a 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java @@ -1,6 +1,6 @@ package datahub.protobuf.model; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.EnumDescriptorProto; @@ -11,7 +11,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufEnumTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java index 6d4dc8bc4d585..9508f4778e5c8 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java @@ -1,7 +1,7 @@ package datahub.protobuf.model; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Set; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufFieldTest { private static final DescriptorProto EXPECTED_MESSAGE_PROTO = diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java index 488222b87766d..6ca0c5b45cb5e 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java @@ -2,14 +2,14 @@ import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufGraphTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java index 1d6b3907d76d9..1126895aec57a 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java @@ -1,6 +1,6 @@ package datahub.protobuf.model; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; @@ -11,7 +11,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufMessageTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java index c8bd8a322aad5..9db06f23a2bdf 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java @@ -1,6 +1,6 @@ package datahub.protobuf.model; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; @@ -12,7 +12,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufOneOfFieldTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java index 2fc5f3834a749..fe27af7461860 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.testng.Assert.assertNotEquals; import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import datahub.protobuf.model.FieldTypeEdge; @@ -13,7 +13,7 @@ import java.util.Set; import java.util.stream.Collectors; import org.jgrapht.GraphPath; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class VisitContextTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java index de9a0f5ec4abe..6e99599c852b4 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java @@ -1,7 +1,7 @@ package datahub.protobuf.visitors.dataset; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.data.template.RecordTemplate; @@ -14,7 +14,7 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class DatasetVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java index 679048fb48a53..42d8f1ad4c83c 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java @@ -1,14 +1,14 @@ package datahub.protobuf.visitors.dataset; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import datahub.protobuf.model.ProtobufGraph; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class DescriptionVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java index c24fc30766f0e..3330c09c49436 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.urn.Urn; import datahub.protobuf.model.ProtobufGraph; @@ -10,7 +10,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class DomainVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java index a57916441bfcb..45be30fe96210 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java @@ -1,7 +1,7 @@ package datahub.protobuf.visitors.dataset; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; @@ -9,7 +9,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class InstitutionalMemoryVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java index 5f8572cf6ddd8..2da53dad2c0be 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; @@ -11,7 +11,7 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class KafkaTopicPropertyVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java index 1b0aff28eb517..adc94487dab3c 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; @@ -14,7 +14,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class OwnershipVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java index 13912100f28a5..be65330954051 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java @@ -3,7 +3,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; import static java.util.Map.entry; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; @@ -11,7 +11,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class PropertyVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java index f734c00bb76e0..79e7075c65209 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.GlossaryTermUrn; @@ -10,7 +10,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class TermAssociationVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java index eec397011a4ce..ff1aa643ac8df 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java @@ -1,7 +1,7 @@ package datahub.protobuf.visitors.field; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -23,7 +23,7 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufExtensionFieldVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java index af31a80d3b53a..59d9e0ca6e518 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.schema.NumberType; import com.linkedin.schema.SchemaField; @@ -15,7 +15,7 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class SchemaFieldVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java index 258d816d9d1da..ab477e19aabe4 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.tag.TagProperties; import datahub.event.MetadataChangeProposalWrapper; @@ -11,7 +11,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class TagVisitorTest { From 8f19138f68ce6376588f4e09617be7e3c325a70f Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 19 Dec 2023 12:00:54 -0600 Subject: [PATCH 270/792] feat(docker-compose): consolidate docker-compose profiles (#9478) --- build.gradle | 1 + .../upgrade/config/NoCodeCleanupConfig.java | 12 + .../upgrade/config/NoCodeUpgradeConfig.java | 12 + .../upgrade/config/RestoreBackupConfig.java | 12 + .../upgrade/config/RestoreIndicesConfig.java | 12 + .../datahub/upgrade/nocode/NoCodeUpgrade.java | 12 +- .../nocodecleanup/NoCodeCleanupUpgrade.java | 12 +- .../upgrade/restorebackup/RestoreBackup.java | 12 +- .../restoreindices/RestoreIndices.java | 9 +- docker/build.gradle | 216 ++++----- docker/profiles/README.md | 104 +++++ docker/profiles/cassandra | 1 + docker/profiles/datahub-actions | 1 + docker/profiles/datahub-frontend | 1 + docker/profiles/datahub-gms | 1 + docker/profiles/datahub-mae-consumer | 1 + docker/profiles/datahub-mce-consumer | 1 + docker/profiles/datahub-upgrade | 1 + docker/profiles/docker-compose.actions.yml | 45 ++ docker/profiles/docker-compose.frontend.yml | 119 +++++ docker/profiles/docker-compose.gms.yml | 429 ++++++++++++++++++ .../profiles/docker-compose.prerequisites.yml | 387 ++++++++++++++++ docker/profiles/docker-compose.yml | 13 + docker/profiles/elasticsearch | 1 + docker/profiles/elasticsearch-setup | 1 + docker/profiles/kafka-broker | 1 + docker/profiles/kafka-setup | 1 + docker/profiles/monitoring | 1 + docker/profiles/mysql | 1 + docker/profiles/mysql-setup | 1 + docker/profiles/neo4j | 1 + docker/profiles/postgres | 1 + docker/profiles/postgres-setup | 1 + 33 files changed, 1288 insertions(+), 136 deletions(-) create mode 100644 docker/profiles/README.md create mode 120000 docker/profiles/cassandra create mode 120000 docker/profiles/datahub-actions create mode 120000 docker/profiles/datahub-frontend create mode 120000 docker/profiles/datahub-gms create mode 120000 docker/profiles/datahub-mae-consumer create mode 120000 docker/profiles/datahub-mce-consumer create mode 120000 docker/profiles/datahub-upgrade create mode 100644 docker/profiles/docker-compose.actions.yml create mode 100644 docker/profiles/docker-compose.frontend.yml create mode 100644 docker/profiles/docker-compose.gms.yml create mode 100644 docker/profiles/docker-compose.prerequisites.yml create mode 100644 docker/profiles/docker-compose.yml create mode 120000 docker/profiles/elasticsearch create mode 120000 docker/profiles/elasticsearch-setup create mode 120000 docker/profiles/kafka-broker create mode 120000 docker/profiles/kafka-setup create mode 120000 docker/profiles/monitoring create mode 120000 docker/profiles/mysql create mode 120000 docker/profiles/mysql-setup create mode 120000 docker/profiles/neo4j create mode 120000 docker/profiles/postgres create mode 120000 docker/profiles/postgres-setup diff --git a/build.gradle b/build.gradle index a7a85db0398e2..bb01a15a7db8d 100644 --- a/build.gradle +++ b/build.gradle @@ -46,6 +46,7 @@ plugins { id 'com.gorylenko.gradle-git-properties' version '2.4.1' id 'com.github.johnrengelman.shadow' version '8.1.1' apply false id 'com.palantir.docker' version '0.35.0' apply false + id 'com.avast.gradle.docker-compose' version '0.17.5' id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 24bcec5852b4f..5ba5c8a90fd4a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -7,13 +7,16 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class NoCodeCleanupConfig { @@ -26,6 +29,7 @@ public class NoCodeCleanupConfig { "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeCleanupUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -34,4 +38,12 @@ public NoCodeCleanupUpgrade createInstance() { final IndexConvention indexConvention = applicationContext.getBean(IndexConvention.class); return new NoCodeCleanupUpgrade(ebeanServer, graphClient, searchClient, indexConvention); } + + @Bean(name = "noCodeCleanup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeCleanupUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeCleanupUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index 68009d7ed1718..d968e8521867e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -6,12 +6,15 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class NoCodeUpgradeConfig { @@ -19,6 +22,7 @@ public class NoCodeUpgradeConfig { @Bean(name = "noCodeUpgrade") @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -29,4 +33,12 @@ public NoCodeUpgrade createInstance() { return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); } + + @Bean(name = "noCodeUpgrade") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 743e4ffe84b0e..116d62878f5c6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -8,12 +8,15 @@ import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class RestoreBackupConfig { @Autowired ApplicationContext applicationContext; @@ -27,6 +30,7 @@ public class RestoreBackupConfig { "searchService", "entityRegistry" }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -40,4 +44,12 @@ public RestoreBackup createInstance() { return new RestoreBackup( ebeanServer, entityService, entityRegistry, entityClient, graphClient, searchClient); } + + @Bean(name = "restoreBackup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreBackup createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreBackup(null, null, null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index d258c4a4d1a52..9d229f315d709 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -7,18 +7,22 @@ import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class RestoreIndicesConfig { @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -31,4 +35,12 @@ public RestoreIndices createInstance() { return new RestoreIndices( ebeanServer, entityService, entityRegistry, entitySearchService, graphService); } + + @Bean(name = "restoreIndices") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreIndices createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreIndices(null, null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index 6753d309b9f50..674efb2b8ba78 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; public class NoCodeUpgrade implements Upgrade { @@ -26,12 +27,17 @@ public class NoCodeUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeUpgrade( - final Database server, + @Nullable final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient) { - _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); - _cleanupSteps = buildCleanupSteps(); + if (server != null) { + _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index 8a267be6ad808..6d3125423b443 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; import org.opensearch.client.RestHighLevelClient; public class NoCodeCleanupUpgrade implements Upgrade { @@ -18,12 +19,17 @@ public class NoCodeCleanupUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeCleanupUpgrade( - final Database server, + @Nullable final Database server, final GraphService graphClient, final RestHighLevelClient searchClient, final IndexConvention indexConvention) { - _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); - _cleanupSteps = buildCleanupSteps(); + if (server != null) { + _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index b11abb2d6bc23..4ac295b4fdfb7 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -16,20 +16,26 @@ import io.ebean.Database; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; public class RestoreBackup implements Upgrade { private final List _steps; public RestoreBackup( - final Database server, + @Nullable final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { - _steps = - buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); + if (server != null) { + _steps = + buildSteps( + server, entityService, entityRegistry, entityClient, graphClient, searchClient); + } else { + _steps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 8bb3b0073710a..d38685553dff2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -13,6 +13,7 @@ import io.ebean.Database; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; public class RestoreIndices implements Upgrade { public static final String BATCH_SIZE_ARG_NAME = "batchSize"; @@ -29,12 +30,16 @@ public class RestoreIndices implements Upgrade { private final List _steps; public RestoreIndices( - final Database server, + @Nullable final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, final GraphService graphService) { - _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + if (server != null) { + _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + } else { + _steps = List.of(); + } } @Override diff --git a/docker/build.gradle b/docker/build.gradle index bc79be501b395..190202620c382 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -1,6 +1,9 @@ plugins { id 'java' // required by versioning + id 'docker-compose' } +import com.avast.gradle.dockercompose.tasks.ComposeUp +import com.avast.gradle.dockercompose.tasks.ComposeDownForced apply from: "../gradle/versioning/versioning.gradle" @@ -18,144 +21,107 @@ ext { debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job', ':metadata-jobs:mae-consumer-job'] - debug_compose_args = [ - '-f', 'docker-compose-without-neo4j.yml', - '-f', 'docker-compose-without-neo4j.override.yml', - '-f', 'docker-compose-without-neo4j.m1.yml', // updates to mariadb - '-f', 'docker-compose.dev.yml' - ] + compose_args = ['-f', 'profiles/docker-compose.yml'] debug_reloadable = [ - 'datahub-gms', - 'datahub-frontend-react' + 'datahub-gms-debug', + 'system-update-debug', + 'frontend-debug' ] - // Postgres pg_quickstart_modules = quickstart_modules - [':docker:mysql-setup'] + [':docker:postgres-setup'] - pg_compose_args = [ - '-f', 'docker-compose-without-neo4j.yml', - '-f', 'docker-compose-without-neo4j.postgres.override.yml' - ] } -task quickstart(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(quickstart_modules.collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - // environment "ACTIONS_VERSION", 'alpine3.18-slim' - // environment "DATAHUB_ACTIONS_IMAGE", 'nginx' - - // Elastic - // environment "DATAHUB_SEARCH_IMAGE", 'elasticsearch' - // environment "DATAHUB_SEARCH_TAG", '7.10.1' - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] +tasks.register('quickstart') {} +tasks.register('quickstartSlim') {} +tasks.register('quickstartDebug') {} +tasks.register('quickstartPg') {} - commandLine 'bash', '-c', cmd.join(" ") +tasks.withType(ComposeDownForced) { + removeVolumes = true } - -task quickstartSlim(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(([':docker:datahub-ingestion'] + quickstart_modules).collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - environment "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion" - environment "ACTIONS_VERSION", "v${version}-slim" - environment "ACTIONS_EXTRA_PACKAGES", 'acryl-datahub-actions[executor] acryl-datahub-actions' - environment "ACTIONS_CONFIG", 'https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] - - commandLine 'bash', '-c', cmd.join(" ") +task quickstartNuke { + finalizedBy(tasks.withType(ComposeDownForced)) } -task quickstartNuke(type: Exec, dependsOn: ":metadata-ingestion:install") { - shouldRunAfter(':metadata-ingestion:clean') - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker nuke' - ] - commandLine 'bash', '-c', cmd.join(" ") +dockerCompose { + quickstart { + isRequiredBy(tasks.named('quickstart')) + composeAdditionalArgs = ['--profile', 'quickstart-consumers'] + + environment.put 'DATAHUB_VERSION', "v${version}" + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } + + quickstartPg { + isRequiredBy(tasks.named('quickstartPg')) + composeAdditionalArgs = ['--profile', 'quickstart-postgres'] + + environment.put 'DATAHUB_VERSION', "v${version}" + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } + + quickstartSlim { + isRequiredBy(tasks.named('quickstartSlim')) + composeAdditionalArgs = ['--profile', 'quickstart-consumers'] + + environment.put 'DATAHUB_VERSION', "v${version}" + environment.put "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion" + environment.put "ACTIONS_VERSION", "v${version}-slim" + environment.put "ACTIONS_EXTRA_PACKAGES", 'acryl-datahub-actions[executor] acryl-datahub-actions' + environment.put "ACTIONS_CONFIG", 'https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml' + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } + + quickstartDebug { + isRequiredBy(tasks.named('quickstartDebug')) + composeAdditionalArgs = ['--profile', 'debug'] + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } } - -task quickstartDebug(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(debug_modules.collect { it + ':dockerTagDebug' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - - // Elastic - // environment "DATAHUB_SEARCH_IMAGE", 'elasticsearch' - // environment "DATAHUB_SEARCH_TAG", '7.10.1' - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--version', "debug", - '--dump-logs-on-failure' - ] + debug_compose_args - commandLine 'bash', '-c', cmd.join(" ") +tasks.getByName('quickstartComposeUp').dependsOn( + quickstart_modules.collect { it + ':dockerTag' }) +tasks.getByName('quickstartPgComposeUp').dependsOn( + pg_quickstart_modules.collect { it + ':dockerTag' }) +tasks.getByName('quickstartSlimComposeUp').dependsOn( + ([':docker:datahub-ingestion'] + quickstart_modules) + .collect { it + ':dockerTag' }) +tasks.getByName('quickstartDebugComposeUp').dependsOn( + debug_modules.collect { it + ':dockerTagDebug' } +) +tasks.withType(ComposeUp).configureEach { + shouldRunAfter('quickstartNuke') } + task debugReload(type: Exec) { - def cmd = ['docker compose -p datahub'] + debug_compose_args + ['restart'] + debug_reloadable + def cmd = ['docker compose -p datahub --profile debug'] + compose_args + ['restart'] + debug_reloadable commandLine 'bash', '-c', cmd.join(" ") } - -task quickstartPg(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(pg_quickstart_modules.collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - environment "DATAHUB_POSTGRES_VERSION", "15.5" - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] + pg_compose_args - - commandLine 'bash', '-c', cmd.join(" ") -} \ No newline at end of file diff --git a/docker/profiles/README.md b/docker/profiles/README.md new file mode 100644 index 0000000000000..df09f15cd85ce --- /dev/null +++ b/docker/profiles/README.md @@ -0,0 +1,104 @@ +# Docker Compose Profiles + +This directory contains a set of docker compose definitions which are designed to run several configurations +for quickstart use-cases as well as development use-cases. These configurations cover a few of the wide variety of +infrastructure configurations that DataHub can operate on. + +Requirements: +* Use the profiles requires a modern version of docker. +* If using the debug/development profiles, you will need to have built the `debug` docker images locally. See the Development Profiles section for more details. + +```bash +$ cd docker/profiles +$ docker compose --profile up +``` + +Use Control-c (`^c`) to terminate the running system. This will automatically stop all running containers. + +To remove the containers use the following: + +```bash +docker compose --profile rm +``` + +Please refer to docker's documentation for more details. + +The following sections detail a few of the profiles and their intended use-cases. For a complete list of profiles +and their configuration please see the table at the end of each section. + +## Quickstart Profiles + +Quickstart profiles are primarily a way to test drive DataHub features before committing to a production ready deployment. +A couple of these profiles are also used in our continuous integration (CI) tests. + +Note: Quickstart profiles use docker images with the `head` tag. These images up updated when changes are committed +to the DataHub github repository. This can be overridden to use a stable release tag by prefixing the commands with +`DATAHUB_VERSION=v0.12.1` for example. + +### `quickstart` + +This is the default configuration MySQL and OpenSearch for the storage and GMS running with integrated consumers. + +### `quickstart-consumers` + +This configuration is identical to `quickstart` how it runs standalone consumers instead of consumers integrated with the GMS container. + +### `quickstart-postgres` + +Identical to `quickstart` with Postgres instead of MySQL. + +### `quickstart-cassandra` + +Uses Cassandra as the primary data store along with Neo4j as the graph database. + +### `quickstart-storage` + +Just run the `quickstart` data stores without the DataHub components. This mode is useful for debugging when running the frontend and GMS components outside +of docker. + +### Quickstart Profiles Table +| Profile Name | MySQL | Postgres | Cassandra | Neo4j | Frontend | GMS | Actions | SystemUpdate | MAE | MCE | Kafka | OpenSearch | +|----------------------|-------|----------|-----------|-------|----------|-----|---------|--------------|-----|-----|-------|------------| +| quickstart | X | | | | X | X | X | X | | | X | X | +| quickstart-frontend | X | | | | X | | | X | | | X | X | +| quickstart-backend | X | | | | | X | X | X | | | X | X | +| quickstart-postgres | | X | | | X | X | X | X | | | X | X | +| quickstart-cassandra | | | X | X | X | X | X | X | | | X | X | +| quickstart-consumers | X | | | | X | X | X | X | X | X | X | X | +| quickstart-storage | X | | | | | | | | | | X | X | + +## Development Profiles + +* Runs `debug` tagged images +* JVM Debug Mode Enabled +* Exposes local jars and scripts to the containers +* Can run non-default one-off configurations (neo4j, cassandra, elasticsearch) + +The docker images used are the `debug` images which are created by building locally. These images are +created by running the gradle command. + +```bash +./gradlew dockerTagDebug +``` + +For a complete list of profiles see the table at the end of this section. + +### `quickstart-backend` + +Run everything except for the `frontend` component. Useful for running just a local (non-docker) frontend. + +### `quickstart-frontend` + +Runs everything except for the GMS. Useful for running just a local (non-docker) GMS instance. + +### Development Profiles Table +| Profile Name | MySQL | Postgres | Cassandra | Neo4j | Frontend | GMS | Actions | SystemUpdate | MAE | MCE | Kafka | OpenSearch | Elasticsearch | +|---------------------|-------|----------|-----------|-------|----------|-----|---------|--------------|-----|-----|-------|------------|---------------| +| debug | X | | | | X | X | X | X | | | X | X | | +| debug-frontend | X | | | | X | | | X | | | X | X | | +| debug-backend | X | | | | | X | X | X | | | X | X | | +| debug-postgres | | X | | | X | X | X | X | | | X | X | | +| debug-cassandra | | | X | | X | X | X | X | | | X | X | | +| debug-consumers | X | | | | X | X | X | X | X | X | X | X | | +| debug-neo4j | X | | | X | X | X | X | X | | | X | X | | +| debug-elasticsearch | X | | | | X | X | X | X | | | X | | X | \ No newline at end of file diff --git a/docker/profiles/cassandra b/docker/profiles/cassandra new file mode 120000 index 0000000000000..d9af9adbce5ca --- /dev/null +++ b/docker/profiles/cassandra @@ -0,0 +1 @@ +../cassandra \ No newline at end of file diff --git a/docker/profiles/datahub-actions b/docker/profiles/datahub-actions new file mode 120000 index 0000000000000..fea4275be45ff --- /dev/null +++ b/docker/profiles/datahub-actions @@ -0,0 +1 @@ +../datahub-actions/ \ No newline at end of file diff --git a/docker/profiles/datahub-frontend b/docker/profiles/datahub-frontend new file mode 120000 index 0000000000000..74a18b81b7e3b --- /dev/null +++ b/docker/profiles/datahub-frontend @@ -0,0 +1 @@ +../datahub-frontend \ No newline at end of file diff --git a/docker/profiles/datahub-gms b/docker/profiles/datahub-gms new file mode 120000 index 0000000000000..de2f067e4c0e0 --- /dev/null +++ b/docker/profiles/datahub-gms @@ -0,0 +1 @@ +../datahub-gms \ No newline at end of file diff --git a/docker/profiles/datahub-mae-consumer b/docker/profiles/datahub-mae-consumer new file mode 120000 index 0000000000000..90974047792c5 --- /dev/null +++ b/docker/profiles/datahub-mae-consumer @@ -0,0 +1 @@ +../datahub-mae-consumer \ No newline at end of file diff --git a/docker/profiles/datahub-mce-consumer b/docker/profiles/datahub-mce-consumer new file mode 120000 index 0000000000000..288c9d91c28b3 --- /dev/null +++ b/docker/profiles/datahub-mce-consumer @@ -0,0 +1 @@ +../datahub-mce-consumer \ No newline at end of file diff --git a/docker/profiles/datahub-upgrade b/docker/profiles/datahub-upgrade new file mode 120000 index 0000000000000..8ff77fd5562e7 --- /dev/null +++ b/docker/profiles/datahub-upgrade @@ -0,0 +1 @@ +../datahub-upgrade \ No newline at end of file diff --git a/docker/profiles/docker-compose.actions.yml b/docker/profiles/docker-compose.actions.yml new file mode 100644 index 0000000000000..a509a6a67d270 --- /dev/null +++ b/docker/profiles/docker-compose.actions.yml @@ -0,0 +1,45 @@ + +x-datahub-actions-service: &datahub-actions-service + hostname: actions + image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} + env_file: datahub-actions/env/docker.env + environment: + ACTIONS_EXTRA_PACKAGES: ${ACTIONS_EXTRA_PACKAGES:-} + ACTIONS_CONFIG: ${ACTIONS_CONFIG:-} + KAFKA_BOOTSTRAP_SERVER: kafka-broker:29092 + SCHEMA_REGISTRY_URL: http://datahub-gms:8080/schema-registry/api/ + +services: + datahub-actions-quickstart: + <<: *datahub-actions-service + container_name: actions + profiles: + - quickstart + - quickstart-backend + depends_on: + datahub-gms-quickstart: + condition: service_healthy + datahub-actions-quickstart-cassandra: + <<: *datahub-actions-service + container_name: actions + profiles: + - quickstart-cassandra + depends_on: + datahub-gms-quickstart-cassandra: + condition: service_healthy + datahub-actions-quickstart-postgres: + <<: *datahub-actions-service + container_name: actions + profiles: + - quickstart-postgres + depends_on: + datahub-gms-quickstart-postgres: + condition: service_healthy + datahub-actions-quickstart-consumers: + <<: *datahub-actions-service + container_name: actions + profiles: + - quickstart-consumers + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml new file mode 100644 index 0000000000000..2b82829648dac --- /dev/null +++ b/docker/profiles/docker-compose.frontend.yml @@ -0,0 +1,119 @@ + +x-datahub-frontend-service: &datahub-frontend-service + hostname: datahub-frontend-react + image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + ports: + - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 + env_file: datahub-frontend/env/docker.env + environment: &datahub-frontend-service-env + KAFKA_BOOTSTRAP_SERVER: kafka-broker:29092 + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +x-datahub-frontend-service-dev: &datahub-frontend-service-dev + <<: *datahub-frontend-service + image: linkedin/datahub-frontend-react:debug + ports: + - ${DATAHUB_MAPPED_FRONTEND_DEBUG_PORT:-5002}:5002 + - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 + environment: + <<: *datahub-frontend-service-env + JAVA_TOOL_OPTIONS: -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5002 + DATAHUB_ANALYTICS_ENABLED: ${DATAHUB_ANALYTICS_ENABLED:-true} + volumes: + - ../../datahub-frontend/build/stage/playBinary:/datahub-frontend + +services: + frontend-quickstart: + <<: *datahub-frontend-service + container_name: frontend + profiles: + - quickstart + - quickstart-frontend + depends_on: + system-update-quickstart: + condition: service_completed_successfully + frontend-quickstart-cassandra: + <<: *datahub-frontend-service + container_name: frontend + profiles: + - quickstart-cassandra + depends_on: + system-update-quickstart-cassandra: + condition: service_completed_successfully + frontend-quickstart-postgres: + <<: *datahub-frontend-service + container_name: frontend + profiles: + - quickstart-postgres + depends_on: + system-update-quickstart-postgres: + condition: service_completed_successfully + frontend-quickstart-consumers: + <<: *datahub-frontend-service + container_name: frontend + profiles: + - quickstart-consumers + depends_on: + system-update-quickstart: + condition: service_completed_successfully + frontend-debug: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug + depends_on: + system-update-debug: + condition: service_completed_successfully + frontend-debug-frontend: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-frontend + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + frontend-debug-postgres: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-postgres + depends_on: + system-update-debug-postgres: + condition: service_completed_successfully + frontend-debug-cassandra: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-cassandra + depends_on: + system-update-debug-cassandra: + condition: service_completed_successfully + frontend-debug-consumers: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-consumers + depends_on: + system-update-debug: + condition: service_completed_successfully + frontend-debug-neo4j: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-neo4j + depends_on: + system-update-debug-neo4j: + condition: service_completed_successfully + frontend-debug-elasticsearch: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-elasticsearch + depends_on: + system-update-debug-elasticsearch: + condition: service_completed_successfully \ No newline at end of file diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml new file mode 100644 index 0000000000000..01602c8b906b9 --- /dev/null +++ b/docker/profiles/docker-compose.gms.yml @@ -0,0 +1,429 @@ +################################# +# Common Environment Variables +################################# +x-primary-datastore-mysql-env: &primary-datastore-mysql-env + EBEAN_DATASOURCE_HOST: mysql:3306 + EBEAN_DATASOURCE_URL: 'jdbc:mysql://mysql:3306/datahub?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8&enabledTLSProtocols=TLSv1.2' + EBEAN_DATASOURCE_DRIVER: com.mysql.jdbc.Driver + +x-primary-datastore-postgres-env: &primary-datastore-postgres-env + EBEAN_DATASOURCE_HOST: postgres:5432 + EBEAN_DATASOURCE_URL: 'jdbc:postgresql://postgres:5432/datahub' + EBEAN_DATASOURCE_DRIVER: org.postgresql.Driver + EBEAN_POSTGRES_USE_AWS_IAM_AUTH: ${EBEAN_POSTGRES_USE_AWS_IAM_AUTH:-false} + +x-primary-datastore-cassandra-env: &primary-datastore-cassandra-env + CASSANDRA_DATASOURCE_USERNAME: cassandra + CASSANDRA_DATASOURCE_PASSWORD: cassandra + CASSANDRA_HOSTS: cassandra + CASSANDRA_PORT: 9042 + CASSANDRA_DATASOURCE_HOST: 'cassandra:9042' + ENTITY_SERVICE_IMPL: cassandra + +x-graph-datastore-neo4j-env: &graph-datastore-neo4j-env + GRAPH_SERVICE_IMPL: neo4j + NEO4J_HOST: 'http://neo4j:7474' + NEO4J_URI: 'bolt://neo4j' + NEO4J_USERNAME: neo4j + NEO4J_PASSWORD: datahub +x-graph-datastore-search-env: &graph-datastore-search-env + GRAPH_SERVICE_IMPL: elasticsearch + +x-search-datastore-elasticsearch-env: &search-datastore-env + ELASTICSEARCH_HOST: search + ELASTICSEARCH_PORT: 9200 + ELASTICSEARCH_PROTOCOL: http + ELASTICSEARCH_USE_SSL: ${ELASTICSEARCH_USE_SSL:-false} + +x-kafka-env: &kafka-env + KAFKA_BOOTSTRAP_SERVER: kafka-broker:29092 + # KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 + SCHEMA_REGISTRY_TYPE: INTERNAL + KAFKA_SCHEMAREGISTRY_URL: http://datahub-gms:8080/schema-registry/api/ + +x-datahub-quickstart-telemetry-env: &datahub-quickstart-telemetry-env + DATAHUB_SERVER_TYPE: ${DATAHUB_SERVER_TYPE:-quickstart} + DATAHUB_TELEMETRY_ENABLED: ${DATAHUB_TELEMETRY_ENABLED:-true} + +x-datahub-dev-telemetry-env: &datahub-dev-telemetry-env + DATAHUB_SERVER_TYPE: ${DATAHUB_SERVER_TYPE:-dev} + DATAHUB_TELEMETRY_ENABLED: ${DATAHUB_TELEMETRY_ENABLED:-true} + +################################# +# System Update +################################# +x-datahub-system-update-service: &datahub-system-update-service + hostname: datahub-system-update + image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} + command: + - -u + - SystemUpdate + env_file: datahub-upgrade/env/docker.env + environment: &datahub-system-update-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *kafka-env] + SCHEMA_REGISTRY_SYSTEM_UPDATE: ${SCHEMA_REGISTRY_SYSTEM_UPDATE:-true} + SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS: ${SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS:-true} + SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION: ${SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION:-true} + +x-datahub-system-update-service-dev: &datahub-system-update-service-dev + <<: *datahub-system-update-service + image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:debug + ports: + - ${DATAHUB_MAPPED_UPGRADE_DEBUG_PORT:-5003}:5003 + environment: &datahub-system-update-dev-env + <<: [*datahub-dev-telemetry-env, *datahub-system-update-env] + SKIP_ELASTICSEARCH_CHECK: false + REPROCESS_DEFAULT_BROWSE_PATHS_V2: ${REPROCESS_DEFAULT_BROWSE_PATHS_V2:-false} + JAVA_TOOL_OPTIONS: '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5003' + volumes: + - ../../datahub-upgrade/build/libs/:/datahub/datahub-upgrade/bin/ + - ../../metadata-models/src/main/resources/:/datahub/datahub-gms/resources + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +################################# +# GMS +################################# +x-datahub-gms-service: &datahub-gms-service + hostname: datahub-gms + image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + ports: + - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 + env_file: datahub-gms/env/docker.env + environment: &datahub-gms-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *datahub-quickstart-telemetry-env, *kafka-env] + healthcheck: + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health + start_period: 90s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +x-datahub-gms-service-dev: &datahub-gms-service-dev + <<: *datahub-gms-service + image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:debug + ports: + - ${DATAHUB_MAPPED_GMS_DEBUG_PORT:-5001}:5001 + - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 + environment: &datahub-gms-dev-env + <<: [*datahub-dev-telemetry-env, *datahub-gms-env] + SKIP_ELASTICSEARCH_CHECK: false + METADATA_SERVICE_AUTH_ENABLED: false + JAVA_TOOL_OPTIONS: '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5001' + BOOTSTRAP_SYSTEM_UPDATE_WAIT_FOR_SYSTEM_UPDATE: false + SEARCH_SERVICE_ENABLE_CACHE: false + LINEAGE_SEARCH_CACHE_ENABLED: false + SHOW_BROWSE_V2: true + volumes: + - ./datahub-gms/start.sh:/datahub/datahub-gms/scripts/start.sh + - ./datahub-gms/jetty.xml:/datahub/datahub-gms/scripts/jetty.xml + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-gms/scripts/prometheus-config.yaml + - ../../metadata-models/src/main/resources/:/datahub/datahub-gms/resources + - ../../metadata-service/war/build/libs/:/datahub/datahub-gms/bin + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +################################# +# MAE Consumer +################################# +x-datahub-mae-consumer-service: &datahub-mae-consumer-service + hostname: datahub-mae-consumer + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + ports: + - 9091:9091 + env_file: datahub-mae-consumer/env/docker.env + environment: &datahub-mae-consumer-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *kafka-env] + +x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev + <<: *datahub-mae-consumer-service + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:debug + environment: + <<: [*datahub-dev-telemetry-env, *datahub-mae-consumer-env] + volumes: + - ./datahub-mae-consumer/start.sh:/datahub/datahub-mae-consumer/scripts/start.sh + - ../../metadata-models/src/main/resources/:/datahub/datahub-mae-consumer/resources + - ../../metadata-jobs/mae-consumer-job/build/libs/:/datahub/datahub-mae-consumer/bin/ + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-mae-consumer/scripts/prometheus-config.yaml + +################################# +# MCE Consumer +################################# +x-datahub-mce-consumer-service: &datahub-mce-consumer-service + hostname: datahub-mce-consumer + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + ports: + - 9090:9090 + env_file: datahub-mce-consumer/env/docker.env + environment: &datahub-mce-consumer-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *datahub-quickstart-telemetry-env, *kafka-env] + +x-datahub-mce-consumer-service-dev: &datahub-mce-consumer-service-dev + <<: *datahub-mce-consumer-service + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:debug + environment: + <<: [*datahub-dev-telemetry-env, *datahub-mce-consumer-env] + volumes: + - ./datahub-mce-consumer/start.sh:/datahub/datahub-mce-consumer/scripts/start.sh + - ../../metadata-jobs/mce-consumer-job/build/libs/:/datahub/datahub-mce-consumer/bin + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-mce-consumer/scripts/prometheus-config.yaml + +services: + ################################# + # System Update + ################################# + system-update-quickstart: + <<: *datahub-system-update-service + container_name: system-update + profiles: + - quickstart + - quickstart-storage + - quickstart-consumers + - quickstart-frontend + - quickstart-backend + depends_on: + mysql-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-quickstart-cassandra: + <<: *datahub-system-update-service + container_name: system-update + profiles: + - quickstart-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *graph-datastore-neo4j-env, *datahub-system-update-env] + depends_on: + neo4j: + condition: service_healthy + cassandra-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-quickstart-postgres: + <<: *datahub-system-update-service + container_name: system-update + profiles: + - quickstart-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-system-update-env] + depends_on: + postgres-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-debug: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug + - debug-backend + - debug-consumers + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-elasticsearch: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug-elasticsearch + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + elasticsearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-postgres: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-system-update-dev-env] + depends_on: + postgres-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-cassandra: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *datahub-system-update-dev-env] + depends_on: + cassandra-setup: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-neo4j: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug-neo4j + environment: + <<: [*graph-datastore-neo4j-env, *datahub-system-update-dev-env] + depends_on: + neo4j: + condition: service_healthy + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + ################################# + # GMS + ################################# + datahub-gms-quickstart: + <<: *datahub-gms-service + profiles: + - quickstart + - quickstart-backend + container_name: datahub-gms + depends_on: + system-update-quickstart: + condition: service_completed_successfully + datahub-gms-quickstart-cassandra: + <<: *datahub-gms-service + profiles: + - quickstart-cassandra + container_name: datahub-gms + environment: + <<: [*primary-datastore-cassandra-env, *graph-datastore-neo4j-env, *datahub-gms-env] + depends_on: + system-update-quickstart-cassandra: + condition: service_completed_successfully + datahub-gms-quickstart-postgres: + <<: *datahub-gms-service + profiles: + - quickstart-postgres + container_name: datahub-gms + environment: + <<: [*primary-datastore-postgres-env, *datahub-gms-env] + depends_on: + system-update-quickstart-postgres: + condition: service_completed_successfully + datahub-gms-quickstart-consumers: + <<: *datahub-gms-service + profiles: + - quickstart-consumers + container_name: datahub-gms + environment: + <<: *datahub-gms-env + MAE_CONSUMER_ENABLED: false + MCE_CONSUMER_ENABLED: false + depends_on: + system-update-quickstart: + condition: service_completed_successfully + datahub-gms-debug: + <<: *datahub-gms-service-dev + profiles: + - debug + - debug-backend + container_name: datahub-gms-dev + depends_on: + system-update-debug: + condition: service_completed_successfully + datahub-gms-debug-postgres: + <<: *datahub-gms-service-dev + profiles: + - debug-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-gms-dev-env] + container_name: datahub-gms-dev + depends_on: + system-update-debug-postgres: + condition: service_completed_successfully + datahub-gms-debug-cassandra: + <<: *datahub-gms-service-dev + profiles: + - debug-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *datahub-gms-dev-env] + container_name: datahub-gms-dev + depends_on: + system-update-debug-cassandra: + condition: service_completed_successfully + datahub-gms-debug-consumers: + <<: *datahub-gms-service-dev + profiles: + - debug-consumers + environment: + <<: *datahub-gms-dev-env + MAE_CONSUMER_ENABLED: false + MCE_CONSUMER_ENABLED: false + container_name: datahub-gms-dev + depends_on: + system-update-debug: + condition: service_completed_successfully + datahub-gms-debug-neo4j: + <<: *datahub-gms-service-dev + profiles: + - debug-neo4j + environment: + <<: [*graph-datastore-neo4j-env, *datahub-gms-dev-env] + container_name: datahub-gms-dev + depends_on: + system-update-debug-neo4j: + condition: service_completed_successfully + datahub-gms-debug-elasticsearch: + <<: *datahub-gms-service-dev + profiles: + - debug-elasticsearch + container_name: datahub-gms-dev + depends_on: + system-update-debug-elasticsearch: + condition: service_completed_successfully + ################################# + # MAE Consumer + ################################# + datahub-mae-consumer-quickstart-consumers: + <<: *datahub-mae-consumer-service + profiles: + - quickstart-consumers + container_name: datahub-mae-consumer + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy + datahub-mae-consumer-quickstart-consumers-dev: + <<: *datahub-mae-consumer-service-dev + profiles: + - debug-consumers + container_name: datahub-mae-consumer-dev + depends_on: + datahub-gms-debug-consumers: + condition: service_healthy + ################################# + # MCE Consumer + ################################# + datahub-mce-consumer-quickstart-consumers: + <<: *datahub-mce-consumer-service + profiles: + - quickstart-consumers + container_name: datahub-mce-consumer + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy + datahub-mce-consumer-quickstart-consumers-dev: + <<: *datahub-mce-consumer-service-dev + profiles: + - debug-consumers + container_name: datahub-mce-consumer-dev + depends_on: + datahub-gms-debug-consumers: + condition: service_healthy \ No newline at end of file diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml new file mode 100644 index 0000000000000..d90d4a252f993 --- /dev/null +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -0,0 +1,387 @@ +# Common environment +x-search-datastore-search: &search-datastore-environment + ELASTICSEARCH_HOST: search + ELASTICSEARCH_PORT: 9200 + ELASTICSEARCH_PROTOCOL: http + ELASTICSEARCH_USE_SSL: ${ELASTICSEARCH_USE_SSL:-false} + +# Primary Storage Profiles +x-mysql-profiles-quickstart: &mysql-profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-consumers +x-mysql-profiles-dev: &mysql-profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-consumers + - debug-neo4j + - debug-elasticsearch +x-mysql-profiles: &mysql-profiles + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-consumers + - debug + - debug-frontend + - debug-backend + - debug-consumers + - debug-neo4j + - debug-elasticsearch + +x-postgres-profiles-quickstart: &postgres-profiles-quickstart + - quickstart-postgres +x-postgres-profiles-dev: &postgres-profiles-dev + - debug-postgres +x-postgres-profiles: &postgres-profiles + - quickstart-postgres + - debug-postgres + +x-cassandra-profiles: &cassandra-profiles + - quickstart-cassandra + - debug-cassandra + +# Graph Storage Profiles +x-neo4j-profiles: &neo4j-profiles + - quickstart-cassandra + - debug-neo4j + +# Search Storage Profiles +x-elasticsearch-profiles: &elasticsearch-profiles + - debug-elasticsearch + +x-opensearch-profiles-quickstart: &opensearch-profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers +x-opensearch-profiles-dev: &opensearch-profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j +x-opensearch-profiles: &opensearch-profiles + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j + +# Debug vs Quickstart Profiles +x-profiles-quickstart: &profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers +x-profiles-dev: &profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j + - debug-elasticsearch + +services: + mysql: + container_name: mysql + profiles: *mysql-profiles + hostname: mysql + image: mysql:${DATAHUB_MYSQL_VERSION:-8.2} + command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=caching_sha2_password + ports: + - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 + env_file: mysql/env/docker.env + restart: on-failure + healthcheck: + test: mysqladmin ping -h mysql -u $$MYSQL_USER --password=$$MYSQL_PASSWORD + start_period: 10s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - ./mysql/init.sql:/docker-entrypoint-initdb.d/init.sql + - mysqldata:/var/lib/mysql + mysql-setup: &mysql-setup + container_name: mysql-setup + profiles: *mysql-profiles-quickstart + hostname: mysql-setup + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:${DATAHUB_VERSION:-head} + env_file: mysql-setup/env/docker.env + depends_on: + mysql: + condition: service_healthy + labels: + datahub_setup_job: true + mysql-setup-dev: + <<: *mysql-setup + container_name: mysql-setup-dev + profiles: *mysql-profiles-dev + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:debug + postgres: + container_name: postgres + profiles: *postgres-profiles + hostname: postgres + image: postgres:${DATAHUB_POSTGRES_VERSION:-15.5} + env_file: postgres/env/docker.env + ports: + - '5432:5432' + restart: on-failure + healthcheck: + test: [ "CMD-SHELL", "pg_isready" ] + start_period: 20s + interval: 2s + timeout: 10s + retries: 5 + volumes: + - ./postgres/init.sql:/docker-entrypoint-initdb.d/init.sql + - postgresdata:/var/lib/postgresql/data + postgres-setup: &postgres-setup + container_name: postgres-setup + profiles: *postgres-profiles-quickstart + hostname: postgres-setup + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:${DATAHUB_VERSION:-head} + env_file: postgres-setup/env/docker.env + depends_on: + postgres: + condition: service_healthy + labels: + datahub_setup_job: true + postgres-setup-dev: + <<: *postgres-setup + container_name: postgres-setup-dev + profiles: *postgres-profiles-dev + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:debug + cassandra: + container_name: cassandra + profiles: *cassandra-profiles + hostname: cassandra + image: cassandra:4.1 + ports: + - 9042:9042 + healthcheck: + test: cqlsh -u cassandra -p cassandra -e 'describe keyspaces' + interval: 15s + timeout: 10s + retries: 10 + volumes: + - cassandradata:/var/lib/cassandra + cassandra-setup: + container_name: cassandra-setup + profiles: *cassandra-profiles + hostname: cassandra-setup + image: cassandra:4.1 + command: /bin/bash -c "cqlsh cassandra -f /init.cql" + depends_on: + cassandra: + condition: service_healthy + volumes: + - ./cassandra/init.cql:/init.cql + labels: + datahub_setup_job: true + neo4j: + container_name: neo4j + profiles: *neo4j-profiles + hostname: neo4j + image: neo4j:4.4.28-community + ports: + - ${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474}:7474 + - ${DATAHUB_MAPPED_NEO4J_BOLT_PORT:-7687}:7687 + env_file: neo4j/env/docker.env + healthcheck: + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} + start_period: 5s + interval: 1s + retries: 5 + timeout: 5s + volumes: + - neo4jdata:/data + kafka-broker: + container_name: kafka-broker + hostname: kafka-broker + image: confluentinc/cp-kafka:7.4.0 + command: + - /bin/bash + - -c + - | + # Generate KRaft clusterID + file_path="/var/lib/kafka/data/clusterID" + + if [ ! -f "$$file_path" ]; then + /bin/kafka-storage random-uuid > $$file_path + echo "Cluster id has been created..." + # KRaft required step: Format the storage directory with a new cluster ID + kafka-storage format --ignore-formatted -t $$(cat "$$file_path") -c /etc/kafka/kafka.properties + fi + + export CLUSTER_ID=$$(cat "$$file_path") + echo "CLUSTER_ID=$$CLUSTER_ID" + + /etc/confluent/docker/run + ports: + - ${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092}:9092 + env_file: kafka-broker/env/docker.env + environment: + KAFKA_NODE_ID: 1 + KAFKA_ADVERTISED_LISTENERS: BROKER://kafka-broker:29092,EXTERNAL://kafka-broker:9092 + KAFKA_LISTENERS: BROKER://kafka-broker:29092,EXTERNAL://kafka-broker:9092,CONTROLLER://kafka-broker:39092 + KAFKA_INTER_BROKER_LISTENER_NAME: BROKER + KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,BROKER:PLAINTEXT,EXTERNAL:PLAINTEXT + KAFKA_PROCESS_ROLES: controller, broker + KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka-broker:39092 + # https://github.com/confluentinc/cp-all-in-one/issues/120 + KAFKA_LOG4J_LOGGERS: 'org.apache.kafka.image.loader.MetadataLoader=WARN' + KAFKA_ZOOKEEPER_CONNECT: null + healthcheck: + test: nc -z kafka-broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} + start_period: 60s + interval: 1s + retries: 5 + timeout: 5s + volumes: + - broker:/var/lib/kafka/data/ + kafka-setup: &kafka-setup + container_name: kafka-setup + profiles: *profiles-quickstart + hostname: kafka-setup + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + env_file: kafka-setup/env/docker.env + environment: &kafka-setup-env + DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-false} + KAFKA_BOOTSTRAP_SERVER: kafka-broker:29092 + USE_CONFLUENT_SCHEMA_REGISTRY: false + depends_on: + kafka-broker: + condition: service_healthy + labels: + datahub_setup_job: true + kafka-setup-dev: + <<: *kafka-setup + container_name: kafka-setup-dev + profiles: *profiles-dev + environment: + <<: *kafka-setup-env + DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-true} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:debug + elasticsearch: + container_name: elasticsearch + profiles: *elasticsearch-profiles + hostname: search + image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} + ports: + - ${DATAHUB_MAPPED_ELASTIC_PORT:-9200}:9200 + env_file: elasticsearch/env/docker.env + environment: + - discovery.type=single-node + - ${XPACK_SECURITY_ENABLED:-xpack.security.enabled=false} + deploy: + resources: + limits: + memory: 1G + healthcheck: + test: curl -sS --fail http://search:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + start_period: 20s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - esdata:/usr/share/elasticsearch/data + elasticsearch-setup-dev: &elasticsearch-setup-dev + container_name: elasticsearch-setup-dev + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:debug + profiles: *elasticsearch-profiles + hostname: elasticsearch-setup + env_file: elasticsearch-setup/env/docker.env + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-false} + depends_on: + elasticsearch: + condition: service_healthy + labels: + datahub_setup_job: true + opensearch: + container_name: opensearch + profiles: *opensearch-profiles + hostname: search + image: ${DATAHUB_SEARCH_IMAGE:-opensearchproject/opensearch}:${DATAHUB_SEARCH_TAG:-2.9.0} + ports: + - ${DATAHUB_MAPPED_ELASTIC_PORT:-9200}:9200 + env_file: elasticsearch/env/docker.env + environment: + - discovery.type=single-node + - ${XPACK_SECURITY_ENABLED:-plugins.security.disabled=true} + deploy: + resources: + limits: + memory: 1G + healthcheck: + test: curl -sS --fail http://search:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + start_period: 20s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - osdata:/usr/share/elasticsearch/data + opensearch-setup: &opensearch-setup + <<: *elasticsearch-setup-dev + container_name: opensearch-setup + profiles: *opensearch-profiles-quickstart + hostname: opensearch-setup + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} + depends_on: + opensearch: + condition: service_healthy + labels: + datahub_setup_job: true + opensearch-setup-dev: + <<: *opensearch-setup + container_name: opensearch-setup-dev + profiles: *opensearch-profiles-dev + hostname: opensearch-setup-dev + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:debug + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} + depends_on: + opensearch: + condition: service_healthy + +networks: + default: + name: datahub_network + +volumes: + neo4jdata: + esdata: + osdata: + broker: + mysqldata: + cassandradata: + postgresdata: diff --git a/docker/profiles/docker-compose.yml b/docker/profiles/docker-compose.yml new file mode 100644 index 0000000000000..534ca9702e2d7 --- /dev/null +++ b/docker/profiles/docker-compose.yml @@ -0,0 +1,13 @@ +--- +version: '3.9' +name: datahub + +include: + # Contains storage layers: i.e. mysql, kafka, elasticsearch + - docker-compose.prerequisites.yml + # Actions pod + - docker-compose.actions.yml + # Frontend + - docker-compose.frontend.yml + # Remaining components: i.e. gms, system-update, consumers + - docker-compose.gms.yml diff --git a/docker/profiles/elasticsearch b/docker/profiles/elasticsearch new file mode 120000 index 0000000000000..7712783b3e8d6 --- /dev/null +++ b/docker/profiles/elasticsearch @@ -0,0 +1 @@ +../elasticsearch \ No newline at end of file diff --git a/docker/profiles/elasticsearch-setup b/docker/profiles/elasticsearch-setup new file mode 120000 index 0000000000000..670a10e8c3786 --- /dev/null +++ b/docker/profiles/elasticsearch-setup @@ -0,0 +1 @@ +../elasticsearch-setup \ No newline at end of file diff --git a/docker/profiles/kafka-broker b/docker/profiles/kafka-broker new file mode 120000 index 0000000000000..23b248a4e0bbd --- /dev/null +++ b/docker/profiles/kafka-broker @@ -0,0 +1 @@ +../broker \ No newline at end of file diff --git a/docker/profiles/kafka-setup b/docker/profiles/kafka-setup new file mode 120000 index 0000000000000..35b9c167ac26e --- /dev/null +++ b/docker/profiles/kafka-setup @@ -0,0 +1 @@ +../kafka-setup \ No newline at end of file diff --git a/docker/profiles/monitoring b/docker/profiles/monitoring new file mode 120000 index 0000000000000..1371b42ae4593 --- /dev/null +++ b/docker/profiles/monitoring @@ -0,0 +1 @@ +../monitoring \ No newline at end of file diff --git a/docker/profiles/mysql b/docker/profiles/mysql new file mode 120000 index 0000000000000..057b59f760165 --- /dev/null +++ b/docker/profiles/mysql @@ -0,0 +1 @@ +../mysql \ No newline at end of file diff --git a/docker/profiles/mysql-setup b/docker/profiles/mysql-setup new file mode 120000 index 0000000000000..f9199ec3fc58f --- /dev/null +++ b/docker/profiles/mysql-setup @@ -0,0 +1 @@ +../mysql-setup \ No newline at end of file diff --git a/docker/profiles/neo4j b/docker/profiles/neo4j new file mode 120000 index 0000000000000..0d4849d989d43 --- /dev/null +++ b/docker/profiles/neo4j @@ -0,0 +1 @@ +../neo4j \ No newline at end of file diff --git a/docker/profiles/postgres b/docker/profiles/postgres new file mode 120000 index 0000000000000..be56a57bd0ab8 --- /dev/null +++ b/docker/profiles/postgres @@ -0,0 +1 @@ +../postgres \ No newline at end of file diff --git a/docker/profiles/postgres-setup b/docker/profiles/postgres-setup new file mode 120000 index 0000000000000..38f51721feacb --- /dev/null +++ b/docker/profiles/postgres-setup @@ -0,0 +1 @@ +../postgres-setup/ \ No newline at end of file From a29fce9d823dee31480e2efee1dc1bf16fd4c739 Mon Sep 17 00:00:00 2001 From: Nate Bryant Date: Tue, 19 Dec 2023 15:08:55 -0500 Subject: [PATCH 271/792] Adds urnBasedPagination option to datahub-upgrade RestoreIndices (#9232) Co-authored-by: RyanHolstien --- .../restoreindices/RestoreIndices.java | 1 + .../upgrade/restoreindices/SendMAEStep.java | 62 ++++++++++++++++--- docker/datahub-upgrade/README.md | 12 +++- .../metadata/entity/EntityServiceImpl.java | 2 + .../metadata/entity/ebean/EbeanAspectDao.java | 22 ++++++- .../restoreindices/RestoreIndicesArgs.java | 8 +++ .../restoreindices/RestoreIndicesResult.java | 2 + 7 files changed, 96 insertions(+), 13 deletions(-) diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index d38685553dff2..f46bb9b05624d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -24,6 +24,7 @@ public class RestoreIndices implements Upgrade { public static final String WRITER_POOL_SIZE = "WRITER_POOL_SIZE"; public static final String URN_ARG_NAME = "urn"; public static final String URN_LIKE_ARG_NAME = "urnLike"; + public static final String URN_BASED_PAGINATION_ARG_NAME = "urnBasedPagination"; public static final String STARTING_OFFSET_ARG_NAME = "startingOffset"; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index ce59cf2edb84e..574b1f08b5f54 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -31,6 +31,7 @@ public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_STARTING_OFFSET = 0; private static final int DEFAULT_THREADS = 1; + private static final boolean DEFAULT_URN_BASED_PAGINATION = false; private final Database _server; private final EntityService _entityService; @@ -89,6 +90,7 @@ private RestoreIndicesArgs getArgs(UpgradeContext context) { result.numThreads = getThreadCount(context.parsedArgs()); result.batchDelayMs = getBatchDelayMs(context.parsedArgs()); result.start = getStartingOffset(context.parsedArgs()); + result.urnBasedPagination = getUrnBasedPagination(context.parsedArgs()); if (containsKey(context.parsedArgs(), RestoreIndices.ASPECT_NAME_ARG_NAME)) { result.aspectName = context.parsedArgs().get(RestoreIndices.ASPECT_NAME_ARG_NAME).get(); } @@ -140,18 +142,49 @@ public Function executable() { List> futures = new ArrayList<>(); startTime = System.currentTimeMillis(); - while (start < rowCount) { - args = args.clone(); - args.start = start; - futures.add(executor.submit(new KafkaJob(context, args))); - start = start + args.batchSize; - } - while (futures.size() > 0) { - List tmpResults = iterateFutures(futures); - for (RestoreIndicesResult tmpResult : tmpResults) { - reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + if (args.urnBasedPagination) { + RestoreIndicesResult previousResult = null; + int rowsProcessed = 1; + while (rowsProcessed > 0) { + args = args.clone(); + if (previousResult != null) { + args.lastUrn = previousResult.lastUrn; + args.lastAspect = previousResult.lastAspect; + } + args.start = start; + context + .report() + .addLine( + String.format( + "Getting next batch of urns + aspects, starting with %s - %s", + args.lastUrn, args.lastAspect)); + Future future = executor.submit(new KafkaJob(context, args)); + try { + RestoreIndicesResult result = future.get(); + reportStats(context, finalJobResult, result, rowCount, startTime); + previousResult = result; + rowsProcessed = result.rowsMigrated + result.ignored; + context.report().addLine(String.format("Rows processed this loop %d", rowsProcessed)); + start += args.batchSize; + } catch (InterruptedException | ExecutionException e) { + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + } + } else { + while (start < rowCount) { + args = args.clone(); + args.start = start; + futures.add(executor.submit(new KafkaJob(context, args))); + start = start + args.batchSize; + } + while (futures.size() > 0) { + List tmpResults = iterateFutures(futures); + for (RestoreIndicesResult tmpResult : tmpResults) { + reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + } } } + executor.shutdown(); if (finalJobResult.rowsMigrated != rowCount) { float percentFailed = 0.0f; @@ -233,6 +266,15 @@ private int getThreadCount(final Map> parsedArgs) { return getInt(parsedArgs, DEFAULT_THREADS, RestoreIndices.NUM_THREADS_ARG_NAME); } + private boolean getUrnBasedPagination(final Map> parsedArgs) { + boolean urnBasedPagination = DEFAULT_URN_BASED_PAGINATION; + if (containsKey(parsedArgs, RestoreIndices.URN_BASED_PAGINATION_ARG_NAME)) { + urnBasedPagination = + Boolean.parseBoolean(parsedArgs.get(RestoreIndices.URN_BASED_PAGINATION_ARG_NAME).get()); + } + return urnBasedPagination; + } + private int getInt( final Map> parsedArgs, int defaultVal, String argKey) { int result = defaultVal; diff --git a/docker/datahub-upgrade/README.md b/docker/datahub-upgrade/README.md index 0d019971604d6..9c96114cdb2dd 100644 --- a/docker/datahub-upgrade/README.md +++ b/docker/datahub-upgrade/README.md @@ -15,8 +15,16 @@ to metadata_aspect_v2 table. Arguments: 2. **NoCodeDataMigrationCleanup**: Cleanses graph index, search index, and key-value store of legacy DataHub data (metadata_aspect table) once the No Code Data Migration has completed successfully. No arguments. -3. **RestoreIndices**: Restores indices by fetching the latest version of each aspect and producing MAE - +3. **RestoreIndices**: Restores indices by fetching the latest version of each aspect and producing MAE. Arguments: + - *batchSize* (Optional): The number of rows to migrate at a time. Defaults to 1000. + - *batchDelayMs* (Optional): The number of milliseconds of delay between migrated batches. Used for rate limiting. Defaults to 250. + - *numThreads* (Optional): The number of threads to use, defaults to 1. Note that this is not used if `urnBasedPagination` is true. + - *aspectName* (Optional): The aspect name for producing events. + - *urn* (Optional): The urn for producing events. + - *urnLike* (Optional): The urn pattern for producing events, using `%` as a wild card + - *urnBasedPagination* (Optional): Paginate the SQL results using the urn + aspect string instead of `OFFSET`. Defaults to false, + though should improve performance for large amounts of data. + 4. **RestoreBackup**: Restores the storage stack from a backup of the local database ## Environment Variables diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index a333839416556..7bd8e763cdc27 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -1161,6 +1161,7 @@ public RestoreIndicesResult restoreIndices( Urn urn; try { urn = Urn.createFromString(aspect.getKey().getUrn()); + result.lastUrn = urn.toString(); } catch (Exception e) { logger.accept( String.format( @@ -1188,6 +1189,7 @@ public RestoreIndicesResult restoreIndices( result.timeEntityRegistryCheckMs += System.currentTimeMillis() - startTime; startTime = System.currentTimeMillis(); final String aspectName = aspect.getKey().getAspect(); + result.lastAspect = aspectName; // 3. Verify that the aspect is a valid aspect associated with the entity AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java index b2b47c1d5ba32..26946890daa3b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java @@ -477,11 +477,31 @@ public PagedList getPagedAspects(final RestoreIndicesArgs args) { if (args.urnLike != null) { exp = exp.like(EbeanAspectV2.URN_COLUMN, args.urnLike); } + + int start = args.start; + if (args.urnBasedPagination) { + start = 0; + if (args.lastUrn != null && !args.lastUrn.isEmpty()) { + exp = exp.where().ge(EbeanAspectV2.URN_COLUMN, args.lastUrn); + + // To prevent processing the same aspect multiple times in a restore, it compares against + // the last aspect if the urn matches the last urn + if (args.lastAspect != null && !args.lastAspect.isEmpty()) { + exp = + exp.where() + .and() + .or() + .ne(EbeanAspectV2.URN_COLUMN, args.lastUrn) + .gt(EbeanAspectV2.ASPECT_COLUMN, args.lastAspect); + } + } + } + return exp.orderBy() .asc(EbeanAspectV2.URN_COLUMN) .orderBy() .asc(EbeanAspectV2.ASPECT_COLUMN) - .setFirstRow(args.start) + .setFirstRow(start) .setMaxRows(args.batchSize) .findPagedList(); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java index d8fcbe0b7d44d..e50b44b7f0eca 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java @@ -11,6 +11,9 @@ public class RestoreIndicesArgs implements Cloneable { public String aspectName; public String urn; public String urnLike; + public Boolean urnBasedPagination = false; + public String lastUrn = ""; + public String lastAspect = ""; @Override public RestoreIndicesArgs clone() { @@ -51,4 +54,9 @@ public RestoreIndicesArgs setBatchSize(Integer batchSize) { } return this; } + + public RestoreIndicesArgs setUrnBasedPagination(Boolean urnBasedPagination) { + this.urnBasedPagination = urnBasedPagination; + return this; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java index 8479338660db0..a270cf4548bed 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java @@ -13,4 +13,6 @@ public class RestoreIndicesResult { public long aspectCheckMs = 0; public long createRecordMs = 0; public long sendMessageMs = 0; + public String lastUrn = ""; + public String lastAspect = ""; } From 3777730d782bc1069f7752f74a199aa6447be0d0 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 19 Dec 2023 15:30:47 -0600 Subject: [PATCH 272/792] fix(quickstart): force strings for mysql version (#9485) --- docker/quickstart/quickstart_version_mapping.yaml | 8 ++++---- .../src/datahub/cli/quickstart_versioning.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docker/quickstart/quickstart_version_mapping.yaml b/docker/quickstart/quickstart_version_mapping.yaml index 9948bd55fdc0b..b08cfda175aa9 100644 --- a/docker/quickstart/quickstart_version_mapping.yaml +++ b/docker/quickstart/quickstart_version_mapping.yaml @@ -23,7 +23,7 @@ quickstart_version_map: default: composefile_git_ref: master docker_tag: head - mysql_tag: 5.7 + mysql_tag: "5.7" # default: # Use this to pin default to a specific version. # composefile_git_ref: fd1bd51541a132017a648f4a2f037eec8f70ba26 # v0.10.0 + quickstart compose file fixes # docker_tag: v0.10.0 @@ -31,19 +31,19 @@ quickstart_version_map: head: composefile_git_ref: master docker_tag: head - mysql_tag: 5.7 + mysql_tag: "5.7" # v0.13.0 we upgraded MySQL image for EOL v0.13.0: composefile_git_ref: master docker_tag: head - mysql_tag: 8.2 + mysql_tag: "8.2" # v0.9.6 images contain security vulnerabilities v0.9.6: composefile_git_ref: v0.9.6.1 docker_tag: v0.9.6.1 - mysql_tag: 5.7 + mysql_tag: "5.7" # If stable is not defined the latest released version will be used. # stable: diff --git a/metadata-ingestion/src/datahub/cli/quickstart_versioning.py b/metadata-ingestion/src/datahub/cli/quickstart_versioning.py index be7439f330dfb..1c3ce93c1f788 100644 --- a/metadata-ingestion/src/datahub/cli/quickstart_versioning.py +++ b/metadata-ingestion/src/datahub/cli/quickstart_versioning.py @@ -94,7 +94,7 @@ def fetch_quickstart_config(cls) -> "QuickstartVersionMappingConfig": try: release = cls._fetch_latest_version() config.quickstart_version_map["stable"] = QuickstartExecutionPlan( - composefile_git_ref=release, docker_tag=release, mysql_tag=release + composefile_git_ref=release, docker_tag=release, mysql_tag="5.7" ) except Exception: click.echo( @@ -123,7 +123,7 @@ def get_quickstart_execution_plan( QuickstartExecutionPlan( composefile_git_ref=composefile_git_ref, docker_tag=docker_tag, - mysql_tag=mysql_tag, + mysql_tag=str(mysql_tag), ), ) # new CLI version is downloading the composefile corresponding to the requested version From 76be5173b292b936216aad1409090b70615a78f8 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 19 Dec 2023 15:52:59 -0600 Subject: [PATCH 273/792] fix(docker): fix frontend dev docker path (#9488) --- docker/docker-compose.dev.yml | 2 +- docker/profiles/docker-compose.frontend.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index 774c4e17bee21..a69fb977a3417 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -24,7 +24,7 @@ services: - JAVA_TOOL_OPTIONS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5002 - DATAHUB_ANALYTICS_ENABLED=${DATAHUB_ANALYTICS_ENABLED:-true} volumes: - - ../datahub-frontend/build/stage/playBinary:/datahub-frontend + - ../datahub-frontend/build/stage/main:/datahub-frontend datahub-gms: image: linkedin/datahub-gms:debug ports: diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml index 2b82829648dac..80cb4e7b4b596 100644 --- a/docker/profiles/docker-compose.frontend.yml +++ b/docker/profiles/docker-compose.frontend.yml @@ -21,7 +21,7 @@ x-datahub-frontend-service-dev: &datahub-frontend-service-dev JAVA_TOOL_OPTIONS: -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5002 DATAHUB_ANALYTICS_ENABLED: ${DATAHUB_ANALYTICS_ENABLED:-true} volumes: - - ../../datahub-frontend/build/stage/playBinary:/datahub-frontend + - ../../datahub-frontend/build/stage/main:/datahub-frontend services: frontend-quickstart: From 16d3df620f07c4d41118be9c8f38dc0cf46df76f Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Wed, 20 Dec 2023 16:32:52 +0530 Subject: [PATCH 274/792] fix(ui): Tab doesn't represent the page you are on for non-data asset pages (#9468) --- datahub-web-react/src/app/AppProviders.tsx | 13 ++++---- .../src/app/entity/group/GroupInfoSideBar.tsx | 17 +++++++++++ .../src/app/entity/user/UserInfoSideBar.tsx | 19 +++++++++++- .../src/app/search/SearchablePage.tsx | 27 +++++++++++++++++ .../src/app/shared/BrowserTabTitleContext.tsx | 30 +++++++++++++++++++ 5 files changed, 100 insertions(+), 6 deletions(-) create mode 100644 datahub-web-react/src/app/shared/BrowserTabTitleContext.tsx diff --git a/datahub-web-react/src/app/AppProviders.tsx b/datahub-web-react/src/app/AppProviders.tsx index 81a8ddbfc9bac..00597e1cf7640 100644 --- a/datahub-web-react/src/app/AppProviders.tsx +++ b/datahub-web-react/src/app/AppProviders.tsx @@ -5,6 +5,7 @@ import UserContextProvider from './context/UserContextProvider'; import QuickFiltersProvider from '../providers/QuickFiltersProvider'; import SearchContextProvider from './search/context/SearchContextProvider'; import EntityRegistryProvider from './EntityRegistryProvider'; +import { BrowserTitleProvider } from './shared/BrowserTabTitleContext'; interface Props { children: React.ReactNode; @@ -15,11 +16,13 @@ export default function AppProviders({ children }: Props) { - - - {children} - - + + + + {children} + + + diff --git a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx index 07885a4d0f630..044b09dc185e5 100644 --- a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx @@ -21,6 +21,7 @@ import { } from '../shared/SidebarStyledComponents'; import GroupMembersSideBarSection from './GroupMembersSideBarSection'; import { useUserContext } from '../../context/useUserContext'; +import { useBrowserTitle } from '../../shared/BrowserTabTitleContext'; import StripMarkdownText, { removeMarkdown } from '../shared/components/styled/StripMarkdownText'; import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; import EditGroupDescriptionModal from './EditGroupDescriptionModal'; @@ -157,6 +158,22 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { const { url } = useRouteMatch(); const history = useHistory(); + const { updateTitle } = useBrowserTitle(); + + useEffect(()=>{ + // You can use the title and updateTitle function here + // For example, updating the title when the component mounts + if(name){ + updateTitle(`Group | ${name}`); + } + // // Don't forget to clean up the title when the component unmounts + return () => { + if(name){ // added to condition for rerendering issue + updateTitle(''); + } + }; + }, [name, updateTitle]); + /* eslint-disable @typescript-eslint/no-unused-vars */ const [editGroupModal, showEditGroupModal] = useState(false); const me = useUserContext(); diff --git a/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx b/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx index c01dd3a635924..71bfbfcd49a16 100644 --- a/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx @@ -1,5 +1,5 @@ import { Divider, message, Space, Button, Typography, Tag } from 'antd'; -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { EditOutlined, MailOutlined, PhoneOutlined, SlackOutlined } from '@ant-design/icons'; import { useUpdateCorpUserPropertiesMutation } from '../../../graphql/user.generated'; import { EntityRelationship, DataHubRole } from '../../../types.generated'; @@ -21,6 +21,7 @@ import { import EntityGroups from '../shared/EntityGroups'; import { mapRoleIcon } from '../../identity/user/UserUtils'; import { useUserContext } from '../../context/useUserContext'; +import { useBrowserTitle } from '../../shared/BrowserTabTitleContext'; const { Paragraph } = Typography; @@ -61,6 +62,22 @@ export default function UserInfoSideBar({ sideBarData, refetch }: Props) { const me = useUserContext(); const isProfileOwner = me?.user?.urn === urn; + const { updateTitle } = useBrowserTitle(); + + useEffect(()=>{ + // You can use the title and updateTitle function here + // For example, updating the title when the component mounts + if(name){ + updateTitle(`User | ${name}`); + } + // // Don't forget to clean up the title when the component unmounts + return () => { + if(name){ // added to condition for rerendering issue + updateTitle(''); + } + }; + }, [name, updateTitle]); + const getEditModalData = { urn, name, diff --git a/datahub-web-react/src/app/search/SearchablePage.tsx b/datahub-web-react/src/app/search/SearchablePage.tsx index 9d02d85d3634c..53dfc866b9b64 100644 --- a/datahub-web-react/src/app/search/SearchablePage.tsx +++ b/datahub-web-react/src/app/search/SearchablePage.tsx @@ -3,6 +3,7 @@ import { useHistory, useLocation } from 'react-router'; import { debounce } from 'lodash'; import * as QueryString from 'query-string'; import { useTheme } from 'styled-components'; +import { Helmet } from 'react-helmet-async'; import { SearchHeader } from './SearchHeader'; import { useEntityRegistry } from '../useEntityRegistry'; import { EntityType, FacetFilterInput } from '../../types.generated'; @@ -19,6 +20,7 @@ import { useQuickFiltersContext } from '../../providers/QuickFiltersContext'; import { useUserContext } from '../context/useUserContext'; import { useSelectedSortOption } from './context/SearchContext'; import { HALF_SECOND_IN_MS } from '../entity/shared/tabs/Dataset/Queries/utils/constants'; +import { useBrowserTitle } from '../shared/BrowserTabTitleContext'; const styles = { children: { @@ -68,6 +70,28 @@ export const SearchablePage = ({ onSearch, onAutoComplete, children }: Props) => const { user } = userContext; const viewUrn = userContext.localState?.selectedViewUrn; + const { title, updateTitle } = useBrowserTitle(); + + useEffect(() => { + // Update the title only if it's not already set and there is a valid pathname + if (!title && location.pathname) { + const formattedPath = location.pathname + .split('/') + .filter(word => word !== '') + .map(word => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' | '); + + if (formattedPath) { + return updateTitle(formattedPath); + } + } + + // Clean up the title when the component unmounts + return () => { + updateTitle(''); + }; + }, [location.pathname, title, updateTitle]); + useEffect(() => { if (suggestionsData !== undefined) { setNewSuggestionData(suggestionsData); @@ -140,6 +164,9 @@ export const SearchablePage = ({ onSearch, onAutoComplete, children }: Props) => authenticatedUserPictureLink={user?.editableProperties?.pictureLink} entityRegistry={entityRegistry} /> + + {title} +
{children}
); diff --git a/datahub-web-react/src/app/shared/BrowserTabTitleContext.tsx b/datahub-web-react/src/app/shared/BrowserTabTitleContext.tsx new file mode 100644 index 0000000000000..284e2771124c8 --- /dev/null +++ b/datahub-web-react/src/app/shared/BrowserTabTitleContext.tsx @@ -0,0 +1,30 @@ +import React, { createContext, ReactNode, useContext } from 'react'; + +interface BrowserTitleContextProps { + title: string; + updateTitle: (newTitle: string) => void; +} + +const BrowserTitleContext = createContext(undefined); + +export const BrowserTitleProvider: React.FC<{ children: ReactNode }> = ({ children }) => { + const [title, setTitle] = React.useState(''); + + const updateTitle = (newTitle: string) => { + setTitle(newTitle); + }; + + return ( + + {children} + + ); +}; + +export const useBrowserTitle = () => { + const context = useContext(BrowserTitleContext); + if (!context) { + throw new Error('useBrowserTitle must be used within a BrowserTitleProvider'); + } + return context; +}; From c8e59aabedb9a6f43f4bcfbf20bdffad6abc85d5 Mon Sep 17 00:00:00 2001 From: noggi Date: Wed, 20 Dec 2023 12:33:23 -0800 Subject: [PATCH 275/792] Do not sync demo in downstream repos (#9493) --- .github/workflows/docker-unified.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 169a86000adcc..7cef38b1cd47c 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -911,13 +911,13 @@ jobs: ] steps: - uses: aws-actions/configure-aws-credentials@v1 - if: ${{ needs.setup.outputs.publish != 'false' }} + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SQS_ACCESS_KEY }} aws-region: us-west-2 - uses: isbang/sqs-action@v0.2.0 - if: ${{ needs.setup.outputs.publish != 'false' }} + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: sqs-url: ${{ secrets.DATAHUB_HEAD_SYNC_QUEUE }} message: '{ "command": "git-sync", "args" : {"repoName": "${{ needs.setup.outputs.repository_name }}", "repoOrg": "${{ github.repository_owner }}", "repoBranch": "${{ needs.setup.outputs.branch_name }}", "repoShaShort": "${{ needs.setup.outputs.short_sha }}" }}' From bf813d1d24107d858260dc2852489e034eb4cf8c Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 20 Dec 2023 15:49:03 -0500 Subject: [PATCH 276/792] fix(ingest): update ingest_stats event with transformer types (#9487) --- metadata-ingestion/src/datahub/ingestion/run/pipeline.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py index 25e17d692109a..d7c70dbea0b14 100644 --- a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py +++ b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py @@ -528,6 +528,9 @@ def log_ingestion_stats(self) -> None: { "source_type": self.config.source.type, "sink_type": self.config.sink.type, + "transformer_types": [ + transformer.type for transformer in self.config.transformers or [] + ], "records_written": stats.discretize( self.sink.get_report().total_records_written ), From 50be329492048534cb83c6f81bad87c5c49ee05c Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 21 Dec 2023 13:24:33 +0530 Subject: [PATCH 277/792] feat(ui/glossary): Keep the same tab selected when browsing Glossary (#9469) --- .../shared/EntityDropdown/EntityDropdown.tsx | 1 + .../containers/profile/header/EntityTabs.tsx | 1 + .../entity/shared/containers/profile/utils.ts | 16 ++++++++ .../app/glossary/GlossaryBrowser/NodeItem.tsx | 2 +- .../app/glossary/GlossaryBrowser/TermItem.tsx | 9 ++++- .../e2e/glossary/glossary_navigation.js | 38 +++++++++++++++++++ 6 files changed, 64 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx index 8d7f1cca9c1cb..664a77a731d34 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx @@ -180,6 +180,7 @@ function EntityDropdown(props: Props) { )} {menuItems.has(EntityMenuItems.ADD_TERM) && ( setIsCreateTermModalVisible(true)} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index 58693eca8af0e..25e044259f240 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -39,6 +39,7 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { return ( ( - + ))} )} diff --git a/datahub-web-react/src/app/glossary/GlossaryBrowser/TermItem.tsx b/datahub-web-react/src/app/glossary/GlossaryBrowser/TermItem.tsx index 6980c15a1c256..56495b53eded3 100644 --- a/datahub-web-react/src/app/glossary/GlossaryBrowser/TermItem.tsx +++ b/datahub-web-react/src/app/glossary/GlossaryBrowser/TermItem.tsx @@ -5,6 +5,7 @@ import { useEntityRegistry } from '../../useEntityRegistry'; import { ANTD_GRAY } from '../../entity/shared/constants'; import { ChildGlossaryTermFragment } from '../../../graphql/glossaryNode.generated'; import { useGlossaryEntityData } from '../../entity/shared/GlossaryEntityContext'; +import { useGlossaryActiveTabPath } from '../../entity/shared/containers/profile/utils'; const TermWrapper = styled.div` font-weight: normal; @@ -47,13 +48,15 @@ interface Props { term: ChildGlossaryTermFragment; isSelecting?: boolean; selectTerm?: (urn: string, displayName: string) => void; + includeActiveTabPath?: boolean; } function TermItem(props: Props) { - const { term, isSelecting, selectTerm } = props; + const { term, isSelecting, selectTerm, includeActiveTabPath } = props; const { entityData } = useGlossaryEntityData(); const entityRegistry = useEntityRegistry(); + const activeTabPath = useGlossaryActiveTabPath(); function handleSelectTerm() { if (selectTerm) { @@ -68,7 +71,9 @@ function TermItem(props: Props) { {!isSelecting && ( {entityRegistry.getDisplayName(term.type, isOnEntityPage ? entityData : term)} diff --git a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js index 7ddf36aa87c2d..dd3b0a567c75f 100644 --- a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js +++ b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js @@ -1,4 +1,5 @@ const glossaryTerm = "CypressGlosssaryNavigationTerm"; +const glossarySecondTerm = "CypressGlossarySecondTerm"; const glossaryTermGroup = "CypressGlosssaryNavigationGroup"; const glossaryParentGroup = "CypressNode"; @@ -30,6 +31,39 @@ describe("glossary sidebar navigation test", () => { cy.get('[data-testid="glossary-browser-sidebar"]').contains(glossaryTermGroup).click().wait(3000); cy.get('*[class^="GlossaryEntitiesList"]').contains(glossaryTerm).should("be.visible"); + // Create another term and move it to the same term group + cy.clickOptionWithText(glossaryTermGroup); + cy.openThreeDotDropdown(); + cy.clickOptionWithTestId("entity-menu-add-term-button"); + + // Wait for the create term modal to be visible + cy.waitTextVisible("Create Glossary Term"); + cy.enterTextInTestId("create-glossary-entity-modal-name", glossarySecondTerm); + cy.clickOptionWithTestId("glossary-entity-modal-create-button"); + + // Wait for the new term to be visible in the sidebar + cy.clickOptionWithText(glossarySecondTerm).wait(3000); + + // Move the term to the created term group + cy.openThreeDotDropdown(); + cy.clickOptionWithTestId("entity-menu-move-button"); + cy.get('[data-testid="move-glossary-entity-modal"]').contains(glossaryTermGroup).click({ force: true }); + cy.get('[data-testid="move-glossary-entity-modal"]').contains(glossaryTermGroup).should("be.visible"); + cy.clickOptionWithTestId("glossary-entity-modal-move-button"); + cy.waitTextVisible("Moved Glossary Term!"); + + // Ensure the new term is under the parent term group in the navigation sidebar + cy.get('[data-testid="glossary-browser-sidebar"]').contains(glossaryTermGroup).click(); + cy.get('*[class^="GlossaryEntitiesList"]').contains(glossarySecondTerm).should("be.visible"); + + + // Switch between terms and ensure the "Properties" tab is active + cy.clickOptionWithText(glossaryTerm); + cy.get('[data-testid="entity-tab-headers-test-id"]').contains("Properties").click({ force: true }); + cy.get('[data-node-key="Properties"]').contains("Properties").should("have.attr", "aria-selected", "true"); + cy.clickOptionWithText(glossarySecondTerm); + cy.get('[data-node-key="Properties"]').contains("Properties").should("have.attr", "aria-selected", "true"); + // Move a term group from the root level to be under a parent term group cy.goToGlossaryList(); cy.clickOptionWithText(glossaryTermGroup); @@ -52,6 +86,10 @@ describe("glossary sidebar navigation test", () => { cy.clickOptionWithText(glossaryTerm).wait(3000); cy.deleteFromDropdown(); cy.waitTextVisible("Deleted Glossary Term!"); + cy.clickOptionWithText(glossaryTermGroup); + cy.clickOptionWithText(glossarySecondTerm).wait(3000); + cy.deleteFromDropdown(); + cy.waitTextVisible("Deleted Glossary Term!"); cy.clickOptionWithText(glossaryParentGroup); cy.clickOptionWithText(glossaryTermGroup).wait(3000); cy.deleteFromDropdown(); From 80fb145a7b85b323f339d7901658dd9fde5bd4db Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 21 Dec 2023 17:57:41 +0530 Subject: [PATCH 278/792] style(search): Tag overflow add padding (#9497) --- datahub-web-react/src/app/preview/DefaultPreviewCard.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx b/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx index 36c4c020e7131..a6d8422f827d5 100644 --- a/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx +++ b/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx @@ -114,6 +114,7 @@ const TagContainer = styled.div` margin-left: 0px; margin-top: 3px; flex-wrap: wrap; + margin-right: 8px; `; const TagSeparator = styled.div` From a49a435eef92b20cdc9878c8189b8ca0288e8b7f Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Thu, 21 Dec 2023 19:38:46 +0530 Subject: [PATCH 279/792] feat(analytics): change MAU chart to be until last month (#9499) --- .../datahub/graphql/analytics/resolver/GetChartsResolver.java | 3 ++- .../main/java/com/linkedin/datahub/graphql/util/DateUtil.java | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index 3f635872747a5..6ba3c5090f1c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -91,6 +91,7 @@ private List getProductAnalyticsCharts(Authentication authentica final List charts = new ArrayList<>(); DateUtil dateUtil = new DateUtil(); final DateTime startOfNextWeek = dateUtil.getStartOfNextWeek(); + final DateTime startOfThisMonth = dateUtil.getStartOfThisMonth(); final DateTime startOfNextMonth = dateUtil.getStartOfNextMonth(); final DateRange trailingWeekDateRange = dateUtil.getTrailingWeekDateRange(); @@ -103,7 +104,7 @@ private List getProductAnalyticsCharts(Authentication authentica charts.add( getActiveUsersTimeSeriesChart( startOfNextMonth.minusMonths(12), - startOfNextMonth.minusMillis(1), + startOfThisMonth.minusMillis(1), "Monthly Active Users", DateInterval.MONTH)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java index 4b837605d4e31..677ad8afbaca3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java @@ -13,6 +13,10 @@ public DateTime getStartOfNextWeek() { return setTimeToZero(getNow().withDayOfWeek(DateTimeConstants.SUNDAY).plusDays(1)); } + public DateTime getStartOfThisMonth() { + return setTimeToZero(getNow().withDayOfMonth(1)); + } + public DateTime getStartOfNextMonth() { return setTimeToZero(getNow().withDayOfMonth(1).plusMonths(1)); } From 55cb56821c00ec993ee5a4c560d7b49d8d71258b Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Thu, 21 Dec 2023 10:33:25 -0600 Subject: [PATCH 280/792] fix(kafka): fix infinite deserialization logging (#9494) --- docker/docker-compose-without-neo4j.yml | 2 ++ ...docker-compose.consumers-without-neo4j.yml | 3 ++ docker/docker-compose.consumers.yml | 3 ++ docker/docker-compose.dev.yml | 1 + docker/docker-compose.yml | 2 ++ .../docker-compose-m1.quickstart.yml | 1 + ...er-compose-without-neo4j-m1.quickstart.yml | 1 + ...ocker-compose-without-neo4j.quickstart.yml | 1 + ...ose.consumers-without-neo4j.quickstart.yml | 2 ++ .../docker-compose.consumers.quickstart.yml | 2 ++ .../quickstart/docker-compose.quickstart.yml | 1 + .../config/kafka/ConsumerConfiguration.java | 1 + .../src/main/resources/application.yml | 1 + .../kafka/KafkaEventConsumerFactory.java | 30 ++++++++++++++++--- 14 files changed, 47 insertions(+), 4 deletions(-) diff --git a/docker/docker-compose-without-neo4j.yml b/docker/docker-compose-without-neo4j.yml index 6191994eaa1ea..0d58a1d91b70b 100644 --- a/docker/docker-compose-without-neo4j.yml +++ b/docker/docker-compose-without-neo4j.yml @@ -43,6 +43,8 @@ services: context: ../ dockerfile: docker/datahub-gms/Dockerfile env_file: datahub-gms/env/docker-without-neo4j.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} healthcheck: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 90s diff --git a/docker/docker-compose.consumers-without-neo4j.yml b/docker/docker-compose.consumers-without-neo4j.yml index 8228951d9385f..f1be585232a1a 100644 --- a/docker/docker-compose.consumers-without-neo4j.yml +++ b/docker/docker-compose.consumers-without-neo4j.yml @@ -15,6 +15,8 @@ services: context: ../ dockerfile: docker/datahub-mae-consumer/Dockerfile env_file: datahub-mae-consumer/env/docker-without-neo4j.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} datahub-mce-consumer: container_name: datahub-mce-consumer hostname: datahub-mce-consumer @@ -28,3 +30,4 @@ services: environment: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} diff --git a/docker/docker-compose.consumers.yml b/docker/docker-compose.consumers.yml index 2d37094035859..8d331cea2f0b9 100644 --- a/docker/docker-compose.consumers.yml +++ b/docker/docker-compose.consumers.yml @@ -15,6 +15,8 @@ services: context: ../ dockerfile: docker/datahub-mae-consumer/Dockerfile env_file: datahub-mae-consumer/env/docker.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} depends_on: neo4j: condition: service_healthy @@ -36,6 +38,7 @@ services: - NEO4J_USERNAME=neo4j - NEO4J_PASSWORD=datahub - GRAPH_SERVICE_IMPL=neo4j + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} depends_on: neo4j: condition: service_healthy diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index a69fb977a3417..7067b68fba3f9 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -45,6 +45,7 @@ services: - SEARCH_SERVICE_ENABLE_CACHE=false - LINEAGE_SEARCH_CACHE_ENABLED=false - SHOW_BROWSE_V2=true + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} volumes: - ./datahub-gms/start.sh:/datahub/datahub-gms/scripts/start.sh - ./datahub-gms/jetty.xml:/datahub/datahub-gms/scripts/jetty.xml diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 95f56fe47e3cc..146055830d04e 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -36,6 +36,8 @@ services: container_name: datahub-gms hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 build: diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 7b7ca4052f324..8b87001915283 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -97,6 +97,7 @@ services: - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index 53dacaf6ef63b..5373e93da6bcb 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -97,6 +97,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 1ca91aa19206d..51a40395e3459 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -97,6 +97,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml index d05933df96a43..4ed57dca1f080 100644 --- a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml @@ -6,6 +6,7 @@ services: datahub-mae-consumer: container_name: datahub-mae-consumer environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -44,6 +45,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose.consumers.quickstart.yml b/docker/quickstart/docker-compose.consumers.quickstart.yml index f0bd3a0f927c8..ba8432d8a89af 100644 --- a/docker/quickstart/docker-compose.consumers.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers.quickstart.yml @@ -9,6 +9,7 @@ services: neo4j: condition: service_healthy environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -54,6 +55,7 @@ services: - GRAPH_SERVICE_IMPL=neo4j - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index c77b4418b6f36..56071cfe1e9e6 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -97,6 +97,7 @@ services: - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java index b505674f2ed9c..61b9d5c816790 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java @@ -6,4 +6,5 @@ public class ConsumerConfiguration { private int maxPartitionFetchBytes; + private boolean stopOnDeserializationError; } diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 0ea6b8712953e..36498f7c45fea 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -236,6 +236,7 @@ kafka: maxRequestSize: ${KAFKA_PRODUCER_MAX_REQUEST_SIZE:5242880} # the max bytes sent by the producer, also see kafka-setup MAX_MESSAGE_BYTES for matching value consumer: maxPartitionFetchBytes: ${KAFKA_CONSUMER_MAX_PARTITION_FETCH_BYTES:5242880} # the max bytes consumed per partition + stopOnDeserializationError: ${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:true} # Stops kafka listener container on deserialization error, allows user to fix problems before moving past problematic offset. If false will log and move forward past the offset schemaRegistry: type: ${SCHEMA_REGISTRY_TYPE:KAFKA} # INTERNAL or KAFKA or AWS_GLUE url: ${KAFKA_SCHEMAREGISTRY_URL:http://localhost:8081} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index 2a6338ac15e93..4c0308546d857 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -21,6 +21,11 @@ import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; import org.springframework.kafka.config.KafkaListenerContainerFactory; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; +import org.springframework.kafka.listener.CommonContainerStoppingErrorHandler; +import org.springframework.kafka.listener.CommonDelegatingErrorHandler; +import org.springframework.kafka.listener.DefaultErrorHandler; +import org.springframework.kafka.support.serializer.DeserializationException; +import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer; @Slf4j @Configuration @@ -66,8 +71,6 @@ private static Map buildCustomizedProperties( SchemaRegistryConfig schemaRegistryConfig) { KafkaProperties.Consumer consumerProps = baseKafkaProperties.getConsumer(); - // Specify (de)serializers for record keys and for record values. - consumerProps.setKeyDeserializer(StringDeserializer.class); // Records will be flushed every 10 seconds. consumerProps.setEnableAutoCommit(true); consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); @@ -81,7 +84,13 @@ private static Map buildCustomizedProperties( Map customizedProperties = baseKafkaProperties.buildConsumerProperties(); customizedProperties.put( - ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getDeserializer()); + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class); + customizedProperties.put( + ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS, StringDeserializer.class); + customizedProperties.put( + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class); + customizedProperties.put( + ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, schemaRegistryConfig.getDeserializer()); // Override KafkaProperties with SchemaRegistryConfig only for non-empty values schemaRegistryConfig.getProperties().entrySet().stream() @@ -98,7 +107,8 @@ private static Map buildCustomizedProperties( @Bean(name = "kafkaEventConsumer") protected KafkaListenerContainerFactory createInstance( @Qualifier("kafkaConsumerFactory") - DefaultKafkaConsumerFactory kafkaConsumerFactory) { + DefaultKafkaConsumerFactory kafkaConsumerFactory, + @Qualifier("configurationProvider") ConfigurationProvider configurationProvider) { ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); @@ -106,6 +116,18 @@ protected KafkaListenerContainerFactory createInstance( factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); factory.setConcurrency(kafkaEventConsumerConcurrency); + /* Sets up a delegating error handler for Deserialization errors, if disabled will + use DefaultErrorHandler (does back-off retry and then logs) rather than stopping the container. Stopping the container + prevents lost messages until the error can be examined, disabling this will allow progress, but may lose data + */ + if (configurationProvider.getKafka().getConsumer().isStopOnDeserializationError()) { + CommonDelegatingErrorHandler delegatingErrorHandler = + new CommonDelegatingErrorHandler(new DefaultErrorHandler()); + delegatingErrorHandler.addDelegate( + DeserializationException.class, new CommonContainerStoppingErrorHandler()); + factory.setCommonErrorHandler(delegatingErrorHandler); + } + log.info( String.format( "Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", From b80d2f471c559cd31cedb47a79cf07e779b065b9 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 21 Dec 2023 13:35:34 -0500 Subject: [PATCH 281/792] fix(ingest/fivetran): only materialize upstream lineage (#9490) --- .../ingestion/source/fivetran/fivetran.py | 19 +++++++---- .../integration/fivetran/fivetran_golden.json | 32 ------------------- 2 files changed, 12 insertions(+), 39 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py index c0395b4e4e796..12e362fa8a3e3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py +++ b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py @@ -7,6 +7,7 @@ DataProcessInstance, InstanceRunResult, ) +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SourceCapability, @@ -248,13 +249,17 @@ def _get_connector_workunits( # Map Fivetran's connector entity with Datahub's datajob entity datajob = self._generate_datajob_from_connector(connector) - for mcp in datajob.generate_mcp(materialize_iolets=True): - if mcp.entityType == "dataset" and isinstance(mcp.aspect, StatusClass): - # While we "materialize" the referenced datasets, we don't want them - # to be tracked by stateful ingestion. - yield mcp.as_workunit(is_primary_source=False) - else: - yield mcp.as_workunit() + for mcp in datajob.generate_mcp(materialize_iolets=False): + yield mcp.as_workunit() + + # Materialize the upstream referenced datasets. + # We assume that the downstreams are materialized by other ingestion sources. + for iolet in datajob.inlets: + # We don't want these to be tracked by stateful ingestion. + yield MetadataChangeProposalWrapper( + entityUrn=str(iolet), + aspect=StatusClass(removed=False), + ).as_workunit(is_primary_source=False) # Map Fivetran's job/sync history entity with Datahub's data process entity for job in connector.jobs: diff --git a/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json b/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json index a72c960a72296..b8f05fa6e93aa 100644 --- a/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json +++ b/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json @@ -178,38 +178,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataJob", "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", From a18c72083d763b08282b67146881d4f918b257de Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 21 Dec 2023 13:50:39 -0500 Subject: [PATCH 282/792] feat(ingest): handle multiline string coercion (#9484) --- docs-website/download_historical_versions.py | 4 +- docs/developers.md | 6 +-- .../src/datahub/configuration/git.py | 12 +---- .../validate_multiline_string.py | 31 ++++++++++++ .../ingestion/source/bigquery_v2/lineage.py | 2 +- .../ingestion/source/looker/lookml_source.py | 7 ++- .../source_config/usage/bigquery_usage.py | 3 ++ .../src/datahub/utilities/logging_manager.py | 1 + .../unit/config/test_pydantic_validators.py | 50 +++++++++++++++---- 9 files changed, 86 insertions(+), 30 deletions(-) create mode 100644 metadata-ingestion/src/datahub/configuration/validate_multiline_string.py diff --git a/docs-website/download_historical_versions.py b/docs-website/download_historical_versions.py index 53ee9cf1e63ef..7493210ffa2a5 100644 --- a/docs-website/download_historical_versions.py +++ b/docs-website/download_historical_versions.py @@ -37,9 +37,9 @@ def fetch_urls( except Exception as e: if attempt < max_retries: print(f"Attempt {attempt + 1}/{max_retries}: {e}") - time.sleep(retry_delay) + time.sleep(retry_delay * 2**attempt) else: - print(f"Max retries reached. Unable to fetch data.") + print("Max retries reached. Unable to fetch data.") raise diff --git a/docs/developers.md b/docs/developers.md index 60d31f5e4523f..fe007a56ddc68 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -17,10 +17,8 @@ title: "Local Development" On macOS, these can be installed using [Homebrew](https://brew.sh/). ```shell -# Install Java 8 and 11 -brew tap homebrew/cask-versions -brew install java11 -brew install --cask zulu8 +# Install Java +brew install openjdk@17 # Install Python brew install python@3.10 # you may need to add this to your PATH diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index a5f88744661a4..3c76c8da0d571 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -1,4 +1,3 @@ -import os import pathlib from typing import Any, Dict, Optional, Union @@ -6,6 +5,7 @@ from datahub.configuration.common import ConfigModel from datahub.configuration.validate_field_rename import pydantic_renamed_field +from datahub.configuration.validate_multiline_string import pydantic_multiline_string _GITHUB_PREFIX = "https://github.com/" _GITLAB_PREFIX = "https://gitlab.com/" @@ -92,15 +92,7 @@ class GitInfo(GitReference): description="The url to call `git clone` on. We infer this for github and gitlab repos, but it is required for other hosts.", ) - @validator("deploy_key_file") - def deploy_key_file_should_be_readable( - cls, v: Optional[FilePath] - ) -> Optional[FilePath]: - if v is not None: - # pydantic does existence checks, we just need to check if we can read it - if not os.access(v, os.R_OK): - raise ValueError(f"Unable to read deploy key file {v}") - return v + _fix_deploy_key_newlines = pydantic_multiline_string("deploy_key") @validator("deploy_key", pre=True, always=True) def deploy_key_filled_from_deploy_key_file( diff --git a/metadata-ingestion/src/datahub/configuration/validate_multiline_string.py b/metadata-ingestion/src/datahub/configuration/validate_multiline_string.py new file mode 100644 index 0000000000000..0baaf4f0264b9 --- /dev/null +++ b/metadata-ingestion/src/datahub/configuration/validate_multiline_string.py @@ -0,0 +1,31 @@ +from typing import Optional, Type, Union + +import pydantic + + +def pydantic_multiline_string(field: str) -> classmethod: + """If the field is present and contains an escaped newline, replace it with a real newline. + + This makes the assumption that the field value is never supposed to have a + r"\n" in it, and instead should only have newline characters. This is generally + a safe assumption for SSH keys and similar. + + The purpose of this helper is to make us more forgiving of small formatting issues + in recipes, without sacrificing correctness across the board. + """ + + def _validate_field( + cls: Type, v: Union[None, str, pydantic.SecretStr] + ) -> Optional[str]: + if v is not None: + if isinstance(v, pydantic.SecretStr): + v = v.get_secret_value() + v = v.replace(r"\n", "\n") + + return v + + # Hack: Pydantic maintains unique list of validators by referring its __name__. + # https://github.com/pydantic/pydantic/blob/v1.10.9/pydantic/main.py#L264 + # This hack ensures that multiple field deprecated do not overwrite each other. + _validate_field.__name__ = f"{_validate_field.__name__}_{field}" + return pydantic.validator(field, pre=True, allow_reuse=True)(_validate_field) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index eddd08c92b808..b44b06feb95af 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -175,7 +175,7 @@ def make_lineage_edges_from_parsing_result( table_name = str( BigQueryTableRef.from_bigquery_table( BigqueryTableIdentifier.from_string_name( - DatasetUrn.create_from_string(table_urn).get_dataset_name() + DatasetUrn.from_string(table_urn).name ) ) ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index b76bef49a7e6f..33079f3fd9ac1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -2060,10 +2060,9 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 ) logger.debug("Failed to process explore", exc_info=e) - processed_view_files = processed_view_map.get(model.connection) - if processed_view_files is None: - processed_view_map[model.connection] = set() - processed_view_files = processed_view_map[model.connection] + processed_view_files = processed_view_map.setdefault( + model.connection, set() + ) project_name = self.get_project_name(model_name) logger.debug(f"Model: {model_name}; Includes: {model.resolved_includes}") diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py b/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py index 5eb9c83236e4f..13abe73cc4e09 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py @@ -11,6 +11,7 @@ from datahub.configuration.common import AllowDenyPattern, ConfigurationError from datahub.configuration.source_common import EnvConfigMixin from datahub.configuration.validate_field_removal import pydantic_removed_field +from datahub.configuration.validate_multiline_string import pydantic_multiline_string from datahub.ingestion.source.usage.usage_common import BaseUsageConfig from datahub.ingestion.source_config.bigquery import BigQueryBaseConfig @@ -44,6 +45,8 @@ class BigQueryCredential(ConfigModel): description="If not set it will be default to https://www.googleapis.com/robot/v1/metadata/x509/client_email", ) + _fix_private_key_newlines = pydantic_multiline_string("private_key") + @pydantic.root_validator(skip_on_failure=True) def validate_config(cls, values: Dict[str, Any]) -> Dict[str, Any]: if values.get("client_x509_cert_url") is None: diff --git a/metadata-ingestion/src/datahub/utilities/logging_manager.py b/metadata-ingestion/src/datahub/utilities/logging_manager.py index a8eacb0a9938d..62aa1ca7ab791 100644 --- a/metadata-ingestion/src/datahub/utilities/logging_manager.py +++ b/metadata-ingestion/src/datahub/utilities/logging_manager.py @@ -199,6 +199,7 @@ def configure_logging(debug: bool, log_file: Optional[str] = None) -> Iterator[N for handler in handlers: root_logger.removeHandler(handler) for lib in DATAHUB_PACKAGES: + lib_logger = logging.getLogger(lib) lib_logger.removeHandler(handler) lib_logger.propagate = True diff --git a/metadata-ingestion/tests/unit/config/test_pydantic_validators.py b/metadata-ingestion/tests/unit/config/test_pydantic_validators.py index 399245736805c..f687a2776f6e2 100644 --- a/metadata-ingestion/tests/unit/config/test_pydantic_validators.py +++ b/metadata-ingestion/tests/unit/config/test_pydantic_validators.py @@ -1,12 +1,14 @@ from typing import Optional +import pydantic import pytest from pydantic import ValidationError -from datahub.configuration.common import ConfigModel +from datahub.configuration.common import ConfigModel, ConfigurationWarning from datahub.configuration.validate_field_deprecation import pydantic_field_deprecated from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.configuration.validate_field_rename import pydantic_renamed_field +from datahub.configuration.validate_multiline_string import pydantic_multiline_string from datahub.utilities.global_warning_util import ( clear_global_warnings, get_global_warnings, @@ -22,8 +24,9 @@ class TestModel(ConfigModel): v = TestModel.parse_obj({"b": "original"}) assert v.b == "original" - v = TestModel.parse_obj({"a": "renamed"}) - assert v.b == "renamed" + with pytest.warns(ConfigurationWarning, match="a is deprecated"): + v = TestModel.parse_obj({"a": "renamed"}) + assert v.b == "renamed" with pytest.raises(ValidationError): TestModel.parse_obj({"a": "foo", "b": "bar"}) @@ -44,9 +47,10 @@ class TestModel(ConfigModel): assert v.b == "original" assert v.b1 == "original" - v = TestModel.parse_obj({"a": "renamed", "a1": "renamed"}) - assert v.b == "renamed" - assert v.b1 == "renamed" + with pytest.warns(ConfigurationWarning, match=r"a.* is deprecated"): + v = TestModel.parse_obj({"a": "renamed", "a1": "renamed"}) + assert v.b == "renamed" + assert v.b1 == "renamed" with pytest.raises(ValidationError): TestModel.parse_obj({"a": "foo", "b": "bar", "b1": "ok"}) @@ -74,8 +78,9 @@ class TestModel(ConfigModel): v = TestModel.parse_obj({"b": "original"}) assert v.b == "original" - v = TestModel.parse_obj({"b": "original", "r1": "removed", "r2": "removed"}) - assert v.b == "original" + with pytest.warns(ConfigurationWarning, match=r"r\d was removed"): + v = TestModel.parse_obj({"b": "original", "r1": "removed", "r2": "removed"}) + assert v.b == "original" def test_field_deprecated(): @@ -92,7 +97,10 @@ class TestModel(ConfigModel): v = TestModel.parse_obj({"b": "original"}) assert v.b == "original" - v = TestModel.parse_obj({"b": "original", "d1": "deprecated", "d2": "deprecated"}) + with pytest.warns(ConfigurationWarning, match=r"d\d.+ deprecated"): + v = TestModel.parse_obj( + {"b": "original", "d1": "deprecated", "d2": "deprecated"} + ) assert v.b == "original" assert v.d1 == "deprecated" assert v.d2 == "deprecated" @@ -100,3 +108,27 @@ class TestModel(ConfigModel): assert any(["d2 is deprecated" in warning for warning in get_global_warnings()]) clear_global_warnings() + + +def test_multiline_string_fixer(): + class TestModel(ConfigModel): + s: str + m: Optional[pydantic.SecretStr] = None + + _validate_s = pydantic_multiline_string("s") + _validate_m = pydantic_multiline_string("m") + + v = TestModel.parse_obj({"s": "foo\nbar"}) + assert v.s == "foo\nbar" + + v = TestModel.parse_obj({"s": "foo\\nbar"}) + assert v.s == "foo\nbar" + + v = TestModel.parse_obj({"s": "normal", "m": "foo\\nbar"}) + assert v.s == "normal" + assert v.m + assert v.m.get_secret_value() == "foo\nbar" + + v = TestModel.parse_obj({"s": "normal", "m": pydantic.SecretStr("foo\\nbar")}) + assert v.m + assert v.m.get_secret_value() == "foo\nbar" From cfc641f0d03408b85ae75c2e4830c5f307ce6a68 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Thu, 21 Dec 2023 20:32:51 +0100 Subject: [PATCH 283/792] fix(ingest/databricks): Pinning databricks sdk to not fail on mypy issues (#9500) --- metadata-ingestion/setup.py | 4 +++- .../src/datahub/ingestion/source/aws/aws_common.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 13c9d3c99aaca..0dcac7a7fc1b4 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -274,7 +274,9 @@ databricks = { # 0.1.11 appears to have authentication issues with azure databricks - "databricks-sdk>=0.9.0", + # 0.16.0 added py.typed support which caused mypy to fail. The databricks sdk is pinned until we resolve mypy issues. + # https://github.com/databricks/databricks-sdk-py/pull/483 + "databricks-sdk>=0.9.0,<0.16.0", "pyspark~=3.3.0", "requests", # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes diff --git a/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py b/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py index 0fb211a5d7b16..421991a0966c3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py @@ -167,7 +167,7 @@ def get_session(self) -> Session: return session - def get_credentials(self) -> Dict[str, str]: + def get_credentials(self) -> Dict[str, Optional[str]]: credentials = self.get_session().get_credentials() if credentials is not None: return { From ca518d6c78d994d59879b29f5afa8ffd1cff56df Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 21 Dec 2023 20:28:45 -0500 Subject: [PATCH 284/792] feat(ingest): remove librdkafka hacks (#9507) --- docker/datahub-ingestion-base/Dockerfile | 9 ----- .../base-requirements.txt | 2 +- metadata-ingestion/developing.md | 3 +- .../scripts/datahub_preflight.sh | 19 +++------- metadata-ingestion/scripts/install_deps.sh | 5 ++- metadata-ingestion/setup.py | 35 ++++--------------- 6 files changed, 14 insertions(+), 59 deletions(-) diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index 81fec61ea5073..558a5afe2c2cf 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -30,9 +30,6 @@ ARG DEBIAN_REPO_URL ARG PIP_MIRROR_URL ARG GITHUB_REPO_URL -ENV LIBRDKAFKA_VERSION=1.6.2 -ENV CONFLUENT_KAFKA_VERSION=1.6.1 - ENV DEBIAN_FRONTEND noninteractive # Optionally set corporate mirror for apk and pip @@ -40,7 +37,6 @@ RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi RUN apt-get update && apt-get install -y -qq \ - make \ python3-ldap \ libldap2-dev \ libsasl2-dev \ @@ -53,11 +49,6 @@ RUN apt-get update && apt-get install -y -qq \ unzip \ ldap-utils \ && python -m pip install --no-cache --upgrade pip wheel setuptools \ - && wget -q ${GITHUB_REPO_URL}/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz -O - | \ - tar -xz -C /root \ - && cd /root/librdkafka-${LIBRDKAFKA_VERSION} \ - && ./configure --prefix /usr && make && make install && cd .. && rm -rf /root/librdkafka-${LIBRDKAFKA_VERSION} \ - && apt-get remove -y make \ && rm -rf /var/lib/apt/lists/* /var/cache/apk/* # compiled against newer golang for security fixes diff --git a/docker/datahub-ingestion-base/base-requirements.txt b/docker/datahub-ingestion-base/base-requirements.txt index eb082d50b3020..141382466ab9f 100644 --- a/docker/datahub-ingestion-base/base-requirements.txt +++ b/docker/datahub-ingestion-base/base-requirements.txt @@ -65,7 +65,7 @@ colorlog==4.8.0 comm==0.1.4 confection==0.1.3 ConfigUpdater==3.1.1 -confluent-kafka==1.8.2 +confluent-kafka==2.3.0 connexion==2.14.2 cron-descriptor==1.4.0 croniter==2.0.1 diff --git a/metadata-ingestion/developing.md b/metadata-ingestion/developing.md index d5f834936cdcf..d1eef21974f1d 100644 --- a/metadata-ingestion/developing.md +++ b/metadata-ingestion/developing.md @@ -11,8 +11,7 @@ Also take a look at the guide to [adding a source](./adding-source.md). 1. Python 3.7+ must be installed in your host environment. 2. Java8 (gradle won't work with newer versions) -3. On MacOS: `brew install librdkafka` -4. On Debian/Ubuntu: `sudo apt install librdkafka-dev python3-dev python3-venv` +4. On Debian/Ubuntu: `sudo apt install python3-dev python3-venv` 5. On Fedora (if using LDAP source integration): `sudo yum install openldap-devel` ### Set up your Python environment diff --git a/metadata-ingestion/scripts/datahub_preflight.sh b/metadata-ingestion/scripts/datahub_preflight.sh index e82be9d7b27b7..9676964f4d49d 100755 --- a/metadata-ingestion/scripts/datahub_preflight.sh +++ b/metadata-ingestion/scripts/datahub_preflight.sh @@ -45,8 +45,6 @@ arm64_darwin_preflight() { pip3 install --no-use-pep517 scipy fi - printf "✨ Setting up librdkafka prerequisities\n" - brew_install "librdkafka" "1.9.1" brew_install "openssl@1.1" brew install "postgresql@14" @@ -69,25 +67,16 @@ arm64_darwin_preflight() { export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1 export GRPC_PYTHON_BUILD_SYSTEM_ZLIB - CPPFLAGS="-I$(brew --prefix openssl@1.1)/include -I$(brew --prefix librdkafka)/include" + CPPFLAGS="-I$(brew --prefix openssl@1.1)/include" export CPPFLAGS - LDFLAGS="-L$(brew --prefix openssl@1.1)/lib -L$(brew --prefix librdkafka)/lib" + LDFLAGS="-L$(brew --prefix openssl@1.1)/lib" export LDFLAGS - CPATH="$(brew --prefix librdkafka)/include" - export CPATH - C_INCLUDE_PATH="$(brew --prefix librdkafka)/include" - export C_INCLUDE_PATH - LIBRARY_PATH="$(brew --prefix librdkafka)/lib" - export LIBRARY_PATH cat << EOF export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 export GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1 - export CPPFLAGS="-I$(brew --prefix openssl@1.1)/include -I$(brew --prefix librdkafka)/include" - export LDFLAGS="-L$(brew --prefix openssl@1.1)/lib -L$(brew --prefix librdkafka)/lib -L$(brew --prefix postgresql@14)/lib/postgresql@14" - export CPATH="$(brew --prefix librdkafka)/include" - export C_INCLUDE_PATH="$(brew --prefix librdkafka)/include" - export LIBRARY_PATH="$(brew --prefix librdkafka)/lib" + export CPPFLAGS="-I$(brew --prefix openssl@1.1)/include" + export LDFLAGS="-L$(brew --prefix openssl@1.1)/lib -L$(brew --prefix postgresql@14)/lib/postgresql@14" EOF diff --git a/metadata-ingestion/scripts/install_deps.sh b/metadata-ingestion/scripts/install_deps.sh index 7e6b6956d8bb8..bae0278056ebb 100755 --- a/metadata-ingestion/scripts/install_deps.sh +++ b/metadata-ingestion/scripts/install_deps.sh @@ -2,7 +2,8 @@ set -euxo pipefail if [ "$(uname)" == "Darwin" ]; then - brew install librdkafka + # None + true else sudo_cmd="" if command -v sudo; then @@ -11,7 +12,6 @@ else if command -v yum; then $sudo_cmd yum install -y \ - librdkafka-devel \ openldap-devel \ cyrus-sasl-devel \ openldap-clients \ @@ -21,7 +21,6 @@ else libxslt-devel else $sudo_cmd apt-get update && $sudo_cmd apt-get install -y \ - librdkafka-dev \ python3-ldap \ libldap2-dev \ libsasl2-dev \ diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 0dcac7a7fc1b4..c834700388d62 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -69,35 +69,12 @@ rest_common = {"requests", "requests_file"} kafka_common = { - # The confluent_kafka package provides a number of pre-built wheels for - # various platforms and architectures. However, it does not provide wheels - # for arm64 (including M1 Macs) or aarch64 (Docker's linux/arm64). This has - # remained an open issue on the confluent_kafka project for a year: - # - https://github.com/confluentinc/confluent-kafka-python/issues/1182 - # - https://github.com/confluentinc/confluent-kafka-python/pull/1161 - # - # When a wheel is not available, we must build from source instead. - # Building from source requires librdkafka to be installed. - # Most platforms have an easy way to install librdkafka: - # - MacOS: `brew install librdkafka` gives latest, which is 1.9.x or newer. - # - Debian: `apt install librdkafka` gives 1.6.0 (https://packages.debian.org/bullseye/librdkafka-dev). - # - Ubuntu: `apt install librdkafka` gives 1.8.0 (https://launchpad.net/ubuntu/+source/librdkafka). - # - # Moreover, confluent_kafka 1.9.0 introduced a hard compatibility break, and - # requires librdkafka >=1.9.0. As such, installing confluent_kafka 1.9.x on - # most arm64 Linux machines will fail, since it will build from source but then - # fail because librdkafka is too old. Hence, we have added an extra requirement - # that requires confluent_kafka<1.9.0 on non-MacOS arm64/aarch64 machines, which - # should ideally allow the builds to succeed in default conditions. We still - # want to allow confluent_kafka >= 1.9.0 for M1 Macs, which is why we can't - # broadly restrict confluent_kafka to <1.9.0. - # - # Note that this is somewhat of a hack, since we don't actually require the - # older version of confluent_kafka on those machines. Additionally, we will - # need monitor the Debian/Ubuntu PPAs and modify this rule if they start to - # support librdkafka >= 1.9.0. - "confluent_kafka>=1.5.0", - 'confluent_kafka<1.9.0; platform_system != "Darwin" and (platform_machine == "aarch64" or platform_machine == "arm64")', + # Note that confluent_kafka 1.9.0 introduced a hard compatibility break, and + # requires librdkafka >=1.9.0. This is generally not an issue, since they + # now provide prebuilt wheels for most platforms, including M1 Macs and + # Linux aarch64 (e.g. Docker's linux/arm64). Installing confluent_kafka + # from source remains a pain. + "confluent_kafka>=1.9.0", # We currently require both Avro libraries. The codegen uses avro-python3 (above) # schema parsers at runtime for generating and reading JSON into Python objects. # At the same time, we use Kafka's AvroSerializer, which internally relies on From be329986ab4b177899d16990fec31597ae765c58 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 21 Dec 2023 20:30:36 -0500 Subject: [PATCH 285/792] feat(ingest): rename custom package path from models to metadata (#9502) --- docs/modeling/extending-the-metadata-model.md | 8 ++++++++ metadata-ingestion/scripts/custom_package_codegen.py | 10 +++++----- metadata-ingestion/src/datahub/telemetry/telemetry.py | 5 +++++ 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/docs/modeling/extending-the-metadata-model.md b/docs/modeling/extending-the-metadata-model.md index dc4edd3306f95..8b308fb65d243 100644 --- a/docs/modeling/extending-the-metadata-model.md +++ b/docs/modeling/extending-the-metadata-model.md @@ -289,6 +289,14 @@ Alternatively, publish it to PyPI with `twine upload custom-package/my-company-d This will generate some Python build artifacts, which you can distribute within your team or publish to PyPI. The command output contains additional details and exact CLI commands you can use. +Once this package is installed, you can use the DataHub CLI as normal, and it will use your custom models. +You'll also be able to import those models, with IDE support, by changing your imports. + +```diff +- from datahub.metadata.schema_classes import DatasetPropertiesClass ++ from my_company_datahub_models.metadata.schema_classes import DatasetPropertiesClass +``` + diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index 3f59fdf2cc548..714728087d4b6 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -62,7 +62,7 @@ def generate( entity_registry=entity_registry, pdl_path=pdl_path, schemas_path=schemas_path, - outdir=str(src_path / "models"), + outdir=str(src_path / "metadata"), enable_custom_loader=False, ) @@ -91,13 +91,13 @@ def generate( ], package_data={{ "{python_package_name}": ["py.typed"], - "{python_package_name}.models": ["schema.avsc"], - "{python_package_name}.models.schemas": ["*.avsc"], + "{python_package_name}.metadata": ["schema.avsc"], + "{python_package_name}.metadata.schemas": ["*.avsc"], }}, entry_points={{ "datahub.custom_packages": [ - "models={python_package_name}.models.schema_classes", - "urns={python_package_name}.models._urns.urn_defs", + "models={python_package_name}.metadata.schema_classes", + "urns={python_package_name}.metadata._urns.urn_defs", ], }}, ) diff --git a/metadata-ingestion/src/datahub/telemetry/telemetry.py b/metadata-ingestion/src/datahub/telemetry/telemetry.py index 615be00d5455f..c399f2e1a27e5 100644 --- a/metadata-ingestion/src/datahub/telemetry/telemetry.py +++ b/metadata-ingestion/src/datahub/telemetry/telemetry.py @@ -16,6 +16,7 @@ from datahub.cli.cli_utils import DATAHUB_ROOT_FOLDER, get_boolean_env_variable from datahub.configuration.common import ExceptionWithProps from datahub.ingestion.graph.client import DataHubGraph +from datahub.metadata.schema_classes import _custom_package_path from datahub.utilities.perf_timer import PerfTimer logger = logging.getLogger(__name__) @@ -89,6 +90,10 @@ if any(var in os.environ for var in CI_ENV_VARS): ENV_ENABLED = False +# Also disable if a custom metadata model package is in use. +if _custom_package_path: + ENV_ENABLED = False + TIMEOUT = int(os.environ.get("DATAHUB_TELEMETRY_TIMEOUT", "10")) MIXPANEL_ENDPOINT = "track.datahubproject.io/mp" MIXPANEL_TOKEN = "5ee83d940754d63cacbf7d34daa6f44a" From 4fe1df6892a7e45fe59a26990b441a67dd4faf93 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Fri, 22 Dec 2023 11:57:24 +0530 Subject: [PATCH 286/792] feat(ui): edit link option (#9498) --- .../Documentation/components/LinkList.tsx | 119 ++++++++++++++++-- 1 file changed, 110 insertions(+), 9 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx index bcce994c3f0f8..1b5c3d54009da 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx @@ -1,14 +1,15 @@ -import React from 'react'; +import React, { useState } from 'react'; import { Link } from 'react-router-dom'; import styled from 'styled-components/macro'; -import { message, Button, List, Typography } from 'antd'; -import { LinkOutlined, DeleteOutlined } from '@ant-design/icons'; +import { message, Button, List, Typography, Modal, Form, Input } from 'antd'; +import { LinkOutlined, DeleteOutlined, EditOutlined } from '@ant-design/icons'; import { EntityType, InstitutionalMemoryMetadata } from '../../../../../../types.generated'; -import { useEntityData } from '../../../EntityContext'; +import { useEntityData, useMutationUrn } from '../../../EntityContext'; import { useEntityRegistry } from '../../../../../useEntityRegistry'; import { ANTD_GRAY } from '../../../constants'; import { formatDateString } from '../../../containers/profile/utils'; -import { useRemoveLinkMutation } from '../../../../../../graphql/mutations.generated'; +import { useAddLinkMutation, useRemoveLinkMutation } from '../../../../../../graphql/mutations.generated'; +import analytics, { EntityActionType, EventType } from '../../../../../analytics'; const LinkListItem = styled(List.Item)` border-radius: 5px; @@ -33,10 +34,15 @@ type LinkListProps = { }; export const LinkList = ({ refetch }: LinkListProps) => { - const { urn: entityUrn, entityData } = useEntityData(); + const [editModalVisble, setEditModalVisible] = useState(false); + const [linkDetails, setLinkDetails] = useState(undefined); + const { urn: entityUrn, entityData, entityType } = useEntityData(); const entityRegistry = useEntityRegistry(); const [removeLinkMutation] = useRemoveLinkMutation(); const links = entityData?.institutionalMemory?.elements || []; + const [form] = Form.useForm(); + const [addLinkMutation] = useAddLinkMutation(); + const mutationUrn = useMutationUrn(); const handleDeleteLink = async (metadata: InstitutionalMemoryMetadata) => { try { @@ -53,8 +59,98 @@ export const LinkList = ({ refetch }: LinkListProps) => { refetch?.(); }; + const handleEditLink = (metadata: InstitutionalMemoryMetadata) => { + form.setFieldsValue({ + url: metadata.url, + label: metadata.description, + }); + setLinkDetails(metadata); + setEditModalVisible(true); + }; + + const handleClose = () => { + form.resetFields(); + setEditModalVisible(false); + }; + + const handleEdit = async (formData: any) => { + if (!linkDetails) return; + try { + await removeLinkMutation({ + variables: { input: { linkUrl: linkDetails.url, resourceUrn: linkDetails.associatedUrn || entityUrn } }, + }); + await addLinkMutation({ + variables: { input: { linkUrl: formData.url, label: formData.label, resourceUrn: mutationUrn } }, + }); + + message.success({ content: 'Link Updated', duration: 2 }); + + analytics.event({ + type: EventType.EntityActionEvent, + entityType, + entityUrn: mutationUrn, + actionType: EntityActionType.UpdateLinks, + }); + + refetch?.(); + handleClose(); + } catch (e: unknown) { + message.destroy(); + + if (e instanceof Error) { + message.error({ content: `Error updating link: \n ${e.message || ''}`, duration: 2 }); + } + } + }; + return entityData ? ( <> + + Cancel + , + , + ]} + > +
+ + + + + + +
+
{links.length > 0 && ( { renderItem={(link) => ( handleDeleteLink(link)} type="text" shape="circle" danger> - - + <> + + + } > Date: Fri, 22 Dec 2023 02:18:22 -0500 Subject: [PATCH 287/792] feat(ingest): support CLL for redshift materialized views with auto refresh (#9508) --- metadata-ingestion/setup.py | 2 +- .../src/datahub/utilities/sqlglot_lineage.py | 122 ++++++++++++------ ...dshift_materialized_view_auto_refresh.json | 54 ++++++++ .../tests/unit/sql_parsing/test_sql_detach.py | 46 +++++++ .../unit/sql_parsing/test_sqlglot_lineage.py | 72 ++++------- 5 files changed, 207 insertions(+), 89 deletions(-) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_materialized_view_auto_refresh.json create mode 100644 metadata-ingestion/tests/unit/sql_parsing/test_sql_detach.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index c834700388d62..4632c20cd3b96 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -98,7 +98,7 @@ sqlglot_lib = { # Using an Acryl fork of sqlglot. # https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:hsheth?expand=1 - "acryl-sqlglot==19.0.2.dev10", + "acryl-sqlglot==20.4.1.dev14", } sql_common = ( diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index fc3efef2ba532..f84b3f8b94a2e 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -5,7 +5,7 @@ import logging import pathlib from collections import defaultdict -from typing import Any, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union import pydantic.dataclasses import sqlglot @@ -60,6 +60,8 @@ ), ) ) +# Quick check that the rules were loaded correctly. +assert 0 < len(RULES_BEFORE_TYPE_ANNOTATION) < len(sqlglot.optimizer.optimizer.RULES) class GraphQLSchemaField(TypedDict): @@ -150,12 +152,16 @@ class _TableName(_FrozenModel): def as_sqlglot_table(self) -> sqlglot.exp.Table: return sqlglot.exp.Table( - catalog=self.database, db=self.db_schema, this=self.table + catalog=sqlglot.exp.Identifier(this=self.database) + if self.database + else None, + db=sqlglot.exp.Identifier(this=self.db_schema) if self.db_schema else None, + this=sqlglot.exp.Identifier(this=self.table), ) def qualified( self, - dialect: str, + dialect: sqlglot.Dialect, default_db: Optional[str] = None, default_schema: Optional[str] = None, ) -> "_TableName": @@ -271,7 +277,9 @@ def make_from_error(cls, error: Exception) -> "SqlParsingResult": ) -def _parse_statement(sql: sqlglot.exp.ExpOrStr, dialect: str) -> sqlglot.Expression: +def _parse_statement( + sql: sqlglot.exp.ExpOrStr, dialect: sqlglot.Dialect +) -> sqlglot.Expression: statement: sqlglot.Expression = sqlglot.maybe_parse( sql, dialect=dialect, error_level=sqlglot.ErrorLevel.RAISE ) @@ -279,8 +287,7 @@ def _parse_statement(sql: sqlglot.exp.ExpOrStr, dialect: str) -> sqlglot.Express def _table_level_lineage( - statement: sqlglot.Expression, - dialect: str, + statement: sqlglot.Expression, dialect: sqlglot.Dialect ) -> Tuple[Set[_TableName], Set[_TableName]]: # Generate table-level lineage. modified = { @@ -482,6 +489,26 @@ def close(self) -> None: ] _SupportedColumnLineageTypesTuple = (sqlglot.exp.Subqueryable, sqlglot.exp.DerivedTable) +DIALECTS_WITH_CASE_INSENSITIVE_COLS = { + # Column identifiers are case-insensitive in BigQuery, so we need to + # do a normalization step beforehand to make sure it's resolved correctly. + "bigquery", + # Our snowflake source lowercases column identifiers, so we are forced + # to do fuzzy (case-insensitive) resolution instead of exact resolution. + "snowflake", + # Teradata column names are case-insensitive. + # A name, even when enclosed in double quotation marks, is not case sensitive. For example, CUSTOMER and Customer are the same. + # See more below: + # https://documentation.sas.com/doc/en/pgmsascdc/9.4_3.5/acreldb/n0ejgx4895bofnn14rlguktfx5r3.htm + "teradata", +} +DIALECTS_WITH_DEFAULT_UPPERCASE_COLS = { + # In some dialects, column identifiers are effectively case insensitive + # because they are automatically converted to uppercase. Most other systems + # automatically lowercase unquoted identifiers. + "snowflake", +} + class UnsupportedStatementTypeError(TypeError): pass @@ -495,8 +522,8 @@ class SqlUnderstandingError(Exception): # TODO: Break this up into smaller functions. def _column_level_lineage( # noqa: C901 statement: sqlglot.exp.Expression, - dialect: str, - input_tables: Dict[_TableName, SchemaInfo], + dialect: sqlglot.Dialect, + table_schemas: Dict[_TableName, SchemaInfo], output_table: Optional[_TableName], default_db: Optional[str], default_schema: Optional[str], @@ -515,19 +542,9 @@ def _column_level_lineage( # noqa: C901 column_lineage: List[_ColumnLineageInfo] = [] - use_case_insensitive_cols = dialect in { - # Column identifiers are case-insensitive in BigQuery, so we need to - # do a normalization step beforehand to make sure it's resolved correctly. - "bigquery", - # Our snowflake source lowercases column identifiers, so we are forced - # to do fuzzy (case-insensitive) resolution instead of exact resolution. - "snowflake", - # Teradata column names are case-insensitive. - # A name, even when enclosed in double quotation marks, is not case sensitive. For example, CUSTOMER and Customer are the same. - # See more below: - # https://documentation.sas.com/doc/en/pgmsascdc/9.4_3.5/acreldb/n0ejgx4895bofnn14rlguktfx5r3.htm - "teradata", - } + use_case_insensitive_cols = _is_dialect_instance( + dialect, DIALECTS_WITH_CASE_INSENSITIVE_COLS + ) sqlglot_db_schema = sqlglot.MappingSchema( dialect=dialect, @@ -537,14 +554,16 @@ def _column_level_lineage( # noqa: C901 table_schema_normalized_mapping: Dict[_TableName, Dict[str, str]] = defaultdict( dict ) - for table, table_schema in input_tables.items(): + for table, table_schema in table_schemas.items(): normalized_table_schema: SchemaInfo = {} for col, col_type in table_schema.items(): if use_case_insensitive_cols: col_normalized = ( # This is required to match Sqlglot's behavior. col.upper() - if dialect in {"snowflake"} + if _is_dialect_instance( + dialect, DIALECTS_WITH_DEFAULT_UPPERCASE_COLS + ) else col.lower() ) else: @@ -561,7 +580,7 @@ def _column_level_lineage( # noqa: C901 if use_case_insensitive_cols: def _sqlglot_force_column_normalizer( - node: sqlglot.exp.Expression, dialect: "sqlglot.DialectType" = None + node: sqlglot.exp.Expression, ) -> sqlglot.exp.Expression: if isinstance(node, sqlglot.exp.Column): node.this.set("quoted", False) @@ -572,9 +591,7 @@ def _sqlglot_force_column_normalizer( # "Prior to case normalization sql %s", # statement.sql(pretty=True, dialect=dialect), # ) - statement = statement.transform( - _sqlglot_force_column_normalizer, dialect, copy=False - ) + statement = statement.transform(_sqlglot_force_column_normalizer, copy=False) # logger.debug( # "Sql after casing normalization %s", # statement.sql(pretty=True, dialect=dialect), @@ -595,7 +612,8 @@ def _schema_aware_fuzzy_column_resolve( # Optimize the statement + qualify column references. logger.debug( - "Prior to qualification sql %s", statement.sql(pretty=True, dialect=dialect) + "Prior to column qualification sql %s", + statement.sql(pretty=True, dialect=dialect), ) try: # Second time running qualify, this time with: @@ -678,7 +696,7 @@ def _schema_aware_fuzzy_column_resolve( # Otherwise, we can't process it. continue - if dialect == "bigquery" and output_col.lower() in { + if _is_dialect_instance(dialect, "bigquery") and output_col.lower() in { "_partitiontime", "_partitiondate", }: @@ -923,7 +941,7 @@ def _translate_sqlglot_type( def _translate_internal_column_lineage( table_name_urn_mapping: Dict[_TableName, str], raw_column_lineage: _ColumnLineageInfo, - dialect: str, + dialect: sqlglot.Dialect, ) -> ColumnLineageInfo: downstream_urn = None if raw_column_lineage.downstream.table: @@ -956,18 +974,44 @@ def _translate_internal_column_lineage( ) -def _get_dialect(platform: str) -> str: +def _get_dialect_str(platform: str) -> str: # TODO: convert datahub platform names to sqlglot dialect if platform == "presto-on-hive": return "hive" - if platform == "mssql": + elif platform == "mssql": return "tsql" - if platform == "athena": + elif platform == "athena": return "trino" + elif platform == "mysql": + # In sqlglot v20+, MySQL is now case-sensitive by default, which is the + # default behavior on Linux. However, MySQL's default case sensitivity + # actually depends on the underlying OS. + # For us, it's simpler to just assume that it's case-insensitive, and + # let the fuzzy resolution logic handle it. + return "mysql, normalization_strategy = lowercase" else: return platform +def _get_dialect(platform: str) -> sqlglot.Dialect: + return sqlglot.Dialect.get_or_raise(_get_dialect_str(platform)) + + +def _is_dialect_instance( + dialect: sqlglot.Dialect, platforms: Union[str, Iterable[str]] +) -> bool: + if isinstance(platforms, str): + platforms = [platforms] + else: + platforms = list(platforms) + + dialects = [sqlglot.Dialect.get_or_raise(platform) for platform in platforms] + + if any(isinstance(dialect, dialect_class.__class__) for dialect_class in dialects): + return True + return False + + def _sqlglot_lineage_inner( sql: sqlglot.exp.ExpOrStr, schema_resolver: SchemaResolver, @@ -975,7 +1019,7 @@ def _sqlglot_lineage_inner( default_schema: Optional[str] = None, ) -> SqlParsingResult: dialect = _get_dialect(schema_resolver.platform) - if dialect == "snowflake": + if _is_dialect_instance(dialect, "snowflake"): # in snowflake, table identifiers must be uppercased to match sqlglot's behavior. if default_db: default_db = default_db.upper() @@ -1064,7 +1108,7 @@ def _sqlglot_lineage_inner( column_lineage = _column_level_lineage( select_statement, dialect=dialect, - input_tables=table_name_schema_mapping, + table_schemas=table_name_schema_mapping, output_table=downstream_table, default_db=default_db, default_schema=default_schema, @@ -1204,13 +1248,13 @@ def replace_cte_refs(node: sqlglot.exp.Expression) -> sqlglot.exp.Expression: full_new_name, dialect=dialect, into=sqlglot.exp.Table ) - # We expect node.parent to be a Table or Column. - # Either way, it should support catalog/db/name. parent = node.parent - if "catalog" in parent.arg_types: + # We expect node.parent to be a Table or Column, both of which support catalog/db/name. + # However, we check the parent's arg_types to be safe. + if "catalog" in parent.arg_types and table_expr.catalog: parent.set("catalog", table_expr.catalog) - if "db" in parent.arg_types: + if "db" in parent.arg_types and table_expr.db: parent.set("db", table_expr.db) new_node = sqlglot.exp.Identifier(this=table_expr.name) diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_materialized_view_auto_refresh.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_materialized_view_auto_refresh.json new file mode 100644 index 0000000000000..fce65056a32f7 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_materialized_view_auto_refresh.json @@ -0,0 +1,54 @@ +{ + "query_type": "CREATE", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:redshift,customer,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:redshift,orders,PROD)" + ], + "out_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:redshift,mv_total_orders,PROD)" + ], + "column_lineage": [ + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,mv_total_orders,PROD)", + "column": "cust_id", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,customer,PROD)", + "column": "cust_id" + } + ] + }, + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,mv_total_orders,PROD)", + "column": "first_name", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,customer,PROD)", + "column": "first_name" + } + ] + }, + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,mv_total_orders,PROD)", + "column": "total_amount", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,orders,PROD)", + "column": "amount" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sql_detach.py b/metadata-ingestion/tests/unit/sql_parsing/test_sql_detach.py new file mode 100644 index 0000000000000..c99b05c35e0f5 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sql_detach.py @@ -0,0 +1,46 @@ +from datahub.utilities.sqlglot_lineage import detach_ctes + + +def test_detach_ctes_simple(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "_my_cte_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table ON table2.id = _my_cte_table.id" + ) + + +def test_detach_ctes_with_alias(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 AS tablealias ON table2.id = tablealias.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "_my_cte_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table AS tablealias ON table2.id = tablealias.id" + ) + + +def test_detach_ctes_with_multipart_replacement(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "my_db.my_schema.my_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN my_db.my_schema.my_table ON table2.id = my_db.my_schema.my_table.id" + ) diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index 7f69e358f8f11..eb1ba06669112 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -3,59 +3,11 @@ import pytest from datahub.testing.check_sql_parser_result import assert_sql_result -from datahub.utilities.sqlglot_lineage import ( - _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT, - detach_ctes, -) +from datahub.utilities.sqlglot_lineage import _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT RESOURCE_DIR = pathlib.Path(__file__).parent / "goldens" -def test_detach_ctes_simple(): - original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" - detached_expr = detach_ctes( - original, - platform="snowflake", - cte_mapping={"__cte_0": "_my_cte_table"}, - ) - detached = detached_expr.sql(dialect="snowflake") - - assert ( - detached - == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table ON table2.id = _my_cte_table.id" - ) - - -def test_detach_ctes_with_alias(): - original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 AS tablealias ON table2.id = tablealias.id" - detached_expr = detach_ctes( - original, - platform="snowflake", - cte_mapping={"__cte_0": "_my_cte_table"}, - ) - detached = detached_expr.sql(dialect="snowflake") - - assert ( - detached - == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table AS tablealias ON table2.id = tablealias.id" - ) - - -def test_detach_ctes_with_multipart_replacement(): - original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" - detached_expr = detach_ctes( - original, - platform="snowflake", - cte_mapping={"__cte_0": "my_db.my_schema.my_table"}, - ) - detached = detached_expr.sql(dialect="snowflake") - - assert ( - detached - == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN my_db.my_schema.my_table ON table2.id = my_db.my_schema.my_table.id" - ) - - def test_select_max(): # The COL2 should get normalized to col2. assert_sql_result( @@ -1023,3 +975,25 @@ def test_postgres_complex_update(): }, expected_file=RESOURCE_DIR / "test_postgres_complex_update.json", ) + + +def test_redshift_materialized_view_auto_refresh(): + # Example query from the redshift docs: https://docs.aws.amazon.com/prescriptive-guidance/latest/materialized-views-redshift/refreshing-materialized-views.html + assert_sql_result( + """ +CREATE MATERIALIZED VIEW mv_total_orders +AUTO REFRESH YES -- Add this clause to auto refresh the MV +AS + SELECT c.cust_id, + c.first_name, + sum(o.amount) as total_amount + FROM orders o + JOIN customer c + ON c.cust_id = o.customer_id + GROUP BY c.cust_id, + c.first_name; +""", + dialect="redshift", + expected_file=RESOURCE_DIR + / "test_redshift_materialized_view_auto_refresh.json", + ) From db55fadb734546b796352aeb38ec2719ce770cf9 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Fri, 22 Dec 2023 19:48:30 +0530 Subject: [PATCH 288/792] feat(ui): add custom cron option for UI based ingestion (#9510) --- .../source/builder/CreateScheduleStep.tsx | 38 ++++++++++++++----- .../source/builder/SelectTemplateStep.tsx | 4 +- 2 files changed, 32 insertions(+), 10 deletions(-) diff --git a/datahub-web-react/src/app/ingest/source/builder/CreateScheduleStep.tsx b/datahub-web-react/src/app/ingest/source/builder/CreateScheduleStep.tsx index 7a14b6a794189..3745ee0f44dc0 100644 --- a/datahub-web-react/src/app/ingest/source/builder/CreateScheduleStep.tsx +++ b/datahub-web-react/src/app/ingest/source/builder/CreateScheduleStep.tsx @@ -1,4 +1,4 @@ -import { Button, Form, Switch, Typography } from 'antd'; +import { Button, Checkbox, Form, Input, Switch, Typography } from 'antd'; import React, { useMemo, useState } from 'react'; import { Cron } from 'react-js-cron'; import 'react-js-cron/dist/styles.css'; @@ -31,6 +31,10 @@ const CronText = styled(Typography.Paragraph)` color: ${ANTD_GRAY[7]}; `; +const AdvancedCheckBox = styled(Typography.Text)` + margin-right: 10px; + margin-bottom: 8px; +`; const CronSuccessCheck = styled(CheckCircleOutlined)` color: ${REDESIGN_COLORS.BLUE}; margin-right: 4px; @@ -68,8 +72,8 @@ export const CreateScheduleStep = ({ state, updateState, goTo, prev }: StepProps const { schedule } = state; const interval = schedule?.interval?.replaceAll(', ', ' ') || DAILY_MIDNIGHT_CRON_INTERVAL; const timezone = schedule?.timezone || Intl.DateTimeFormat().resolvedOptions().timeZone; - const [scheduleEnabled, setScheduleEnabled] = useState(!!schedule); + const [advancedCronCheck, setAdvancedCronCheck] = useState(false); const [scheduleCronInterval, setScheduleCronInterval] = useState(interval); const [scheduleTimezone, setScheduleTimezone] = useState(timezone); @@ -137,13 +141,29 @@ export const CreateScheduleStep = ({ state, updateState, goTo, prev }: StepProps )}
Schedule}> - +
+ Advanced + setAdvancedCronCheck(event.target.checked)} + /> +
+ {advancedCronCheck ? ( + setScheduleCronInterval(e.target.value)} + /> + ) : ( + + )} {cronAsText.error && <>Invalid cron schedule. Cron must be of UNIX form:} {!cronAsText.text && ( diff --git a/datahub-web-react/src/app/ingest/source/builder/SelectTemplateStep.tsx b/datahub-web-react/src/app/ingest/source/builder/SelectTemplateStep.tsx index 8aaa4f3448686..6b771d459c4ef 100644 --- a/datahub-web-react/src/app/ingest/source/builder/SelectTemplateStep.tsx +++ b/datahub-web-react/src/app/ingest/source/builder/SelectTemplateStep.tsx @@ -70,7 +70,9 @@ export const SelectTemplateStep = ({ state, updateState, goTo, cancel, ingestion }; const filteredSources = ingestionSources.filter( - (source) => source.displayName.includes(searchFilter) || source.name.includes(searchFilter), + (source) => + source.displayName.toLocaleLowerCase().includes(searchFilter.toLocaleLowerCase()) || + source.name.toLocaleLowerCase().includes(searchFilter.toLocaleLowerCase()), ); return ( From 0d8568e087b5489b49161423ed299dec84e32f1e Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 22 Dec 2023 14:59:14 -0500 Subject: [PATCH 289/792] fix(ingest): update dbt type inference (#9512) --- .../integration/dbt/dbt_enabled_with_schemas_mces_golden.json | 2 +- .../integration/dbt/dbt_test_column_meta_mapping_golden.json | 2 +- .../dbt/dbt_test_with_complex_owner_patterns_mces_golden.json | 2 +- .../dbt/dbt_test_with_data_platform_instance_mces_golden.json | 2 +- .../dbt/dbt_test_with_non_incremental_lineage_mces_golden.json | 2 +- .../dbt/dbt_test_with_target_platform_instance_mces_golden.json | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json index 4deb725ed2b44..fa26a93479a4f 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json @@ -153,7 +153,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json index 588470ef41631..f2208fd98c203 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json @@ -87,7 +87,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json index 926e8b8c8ed84..a27eeb3775960 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json @@ -117,7 +117,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json index 3727603266f25..43336ca585bcc 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json @@ -118,7 +118,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json index ec879e6af766a..27ea568d010fa 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json @@ -118,7 +118,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json index e25c5e4faf6af..07296e175d9ec 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json @@ -118,7 +118,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, From ed5bdfc5aec65978145a72d2701941ed21b35554 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 22 Dec 2023 17:12:31 -0500 Subject: [PATCH 290/792] feat(ingest/redshift): merge CLL instead of overwriting (#9513) --- .../ingestion/source/redshift/lineage.py | 74 ++++++++++++------- .../src/datahub/utilities/sqlglot_lineage.py | 5 +- 2 files changed, 49 insertions(+), 30 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py index abed8505f168b..8135e1d44c102 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py @@ -41,6 +41,7 @@ UpstreamLineageClass, ) from datahub.utilities import memory_footprint +from datahub.utilities.dedup_list import deduplicate_list from datahub.utilities.urns import dataset_urn logger: logging.Logger = logging.getLogger(__name__) @@ -85,6 +86,30 @@ def __post_init__(self): else: self.dataset_lineage_type = DatasetLineageTypeClass.TRANSFORMED + def merge_lineage( + self, + upstreams: Set[LineageDataset], + cll: Optional[List[sqlglot_l.ColumnLineageInfo]], + ) -> None: + self.upstreams = self.upstreams.union(upstreams) + + # Merge CLL using the output column name as the merge key. + self.cll = self.cll or [] + existing_cll: Dict[str, sqlglot_l.ColumnLineageInfo] = { + c.downstream.column: c for c in self.cll + } + for c in cll or []: + if c.downstream.column in existing_cll: + # Merge using upstream + column name as the merge key. + existing_cll[c.downstream.column].upstreams = deduplicate_list( + [*existing_cll[c.downstream.column].upstreams, *c.upstreams] + ) + else: + # New output column, just add it as is. + self.cll.append(c) + + self.cll = self.cll or None + class RedshiftLineageExtractor: def __init__( @@ -161,7 +186,12 @@ def _get_sources_from_query( ) sources.append(source) - return sources, parsed_result.column_lineage + return ( + sources, + parsed_result.column_lineage + if self.config.include_view_column_lineage + else None, + ) def _build_s3_path_from_row(self, filename: str) -> str: path = filename.strip() @@ -208,7 +238,7 @@ def _get_sources( "Only s3 source supported with copy. The source was: {path}." ) self.report.num_lineage_dropped_not_support_copy_path += 1 - return sources, cll + return [], None path = strip_s3_prefix(self._get_s3_path(path)) urn = make_dataset_urn_with_platform_instance( platform=platform.value, @@ -284,7 +314,6 @@ def _populate_lineage_map( ddl=lineage_row.ddl, filename=lineage_row.filename, ) - target.cll = cll target.upstreams.update( self._get_upstream_lineages( @@ -294,13 +323,13 @@ def _populate_lineage_map( raw_db_name=raw_db_name, ) ) + target.cll = cll - # Merging downstreams if dataset already exists and has downstreams + # Merging upstreams if dataset already exists and has upstreams if target.dataset.urn in self._lineage_map: - self._lineage_map[target.dataset.urn].upstreams = self._lineage_map[ - target.dataset.urn - ].upstreams.union(target.upstreams) - + self._lineage_map[target.dataset.urn].merge_lineage( + upstreams=target.upstreams, cll=target.cll + ) else: self._lineage_map[target.dataset.urn] = target @@ -420,7 +449,10 @@ def populate_lineage( ) -> None: populate_calls: List[Tuple[str, LineageCollectorType]] = [] - if self.config.table_lineage_mode == LineageMode.STL_SCAN_BASED: + if self.config.table_lineage_mode in { + LineageMode.STL_SCAN_BASED, + LineageMode.MIXED, + }: # Populate table level lineage by getting upstream tables from stl_scan redshift table query = RedshiftQuery.stl_scan_based_lineage_query( self.config.database, @@ -428,15 +460,10 @@ def populate_lineage( self.end_time, ) populate_calls.append((query, LineageCollectorType.QUERY_SCAN)) - elif self.config.table_lineage_mode == LineageMode.SQL_BASED: - # Populate table level lineage by parsing table creating sqls - query = RedshiftQuery.list_insert_create_queries_sql( - db_name=database, - start_time=self.start_time, - end_time=self.end_time, - ) - populate_calls.append((query, LineageCollectorType.QUERY_SQL_PARSER)) - elif self.config.table_lineage_mode == LineageMode.MIXED: + if self.config.table_lineage_mode in { + LineageMode.SQL_BASED, + LineageMode.MIXED, + }: # Populate table level lineage by parsing table creating sqls query = RedshiftQuery.list_insert_create_queries_sql( db_name=database, @@ -445,15 +472,7 @@ def populate_lineage( ) populate_calls.append((query, LineageCollectorType.QUERY_SQL_PARSER)) - # Populate table level lineage by getting upstream tables from stl_scan redshift table - query = RedshiftQuery.stl_scan_based_lineage_query( - db_name=database, - start_time=self.start_time, - end_time=self.end_time, - ) - populate_calls.append((query, LineageCollectorType.QUERY_SCAN)) - - if self.config.include_views: + if self.config.include_views and self.config.include_view_lineage: # Populate table level lineage for views query = RedshiftQuery.view_lineage_query() populate_calls.append((query, LineageCollectorType.VIEW)) @@ -540,7 +559,6 @@ def get_lineage( dataset_urn: str, schema: RedshiftSchema, ) -> Optional[Tuple[UpstreamLineageClass, Dict[str, str]]]: - upstream_lineage: List[UpstreamClass] = [] cll_lineage: List[FineGrainedLineage] = [] diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index f84b3f8b94a2e..b43c8de4c8f3d 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -193,7 +193,7 @@ class _ColumnRef(_FrozenModel): column: str -class ColumnRef(_ParserBaseModel): +class ColumnRef(_FrozenModel): table: Urn column: str @@ -929,6 +929,7 @@ def _translate_sqlglot_type( TypeClass = ArrayTypeClass elif sqlglot_type in { sqlglot.exp.DataType.Type.UNKNOWN, + sqlglot.exp.DataType.Type.NULL, }: return None else: @@ -1090,7 +1091,7 @@ def _sqlglot_lineage_inner( table_schemas_resolved=total_schemas_resolved, ) logger.debug( - f"Resolved {len(table_name_schema_mapping)} of {len(tables)} table schemas" + f"Resolved {total_schemas_resolved} of {total_tables_discovered} table schemas" ) # Simplify the input statement for column-level lineage generation. From 4448cf1f2d777c82d913e5ee0aeabd0e2785fad3 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 26 Dec 2023 16:30:24 +0530 Subject: [PATCH 291/792] fix(ui/ingestion): add debounce on search on ingestion listing page (#9516) --- .../entity/shared/tabs/Dataset/Queries/utils/constants.ts | 1 + datahub-web-react/src/app/ingest/secret/SecretsList.tsx | 8 +++++++- .../src/app/ingest/source/IngestionSourceList.tsx | 8 +++++++- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/utils/constants.ts b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/utils/constants.ts index 5176c1207874c..025705abc580e 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/utils/constants.ts +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/utils/constants.ts @@ -16,5 +16,6 @@ export const DEFAULT_MAX_RECENT_QUERIES = 9; */ export const MAX_ROWS_BEFORE_DEBOUNCE = 50; export const HALF_SECOND_IN_MS = 500; +export const ONE_SECOND_IN_MS = 1000; export const ADD_UNAUTHORIZED_MESSAGE = 'You are not authorized to add Queries to this entity.'; diff --git a/datahub-web-react/src/app/ingest/secret/SecretsList.tsx b/datahub-web-react/src/app/ingest/secret/SecretsList.tsx index 2728fff0ccba3..1a960997e6bee 100644 --- a/datahub-web-react/src/app/ingest/secret/SecretsList.tsx +++ b/datahub-web-react/src/app/ingest/secret/SecretsList.tsx @@ -1,5 +1,6 @@ import React, { useEffect, useState } from 'react'; import { Button, Empty, message, Modal, Pagination, Typography } from 'antd'; +import { debounce } from 'lodash'; import { DeleteOutlined, PlusOutlined } from '@ant-design/icons'; import * as QueryString from 'query-string'; import { useLocation } from 'react-router'; @@ -18,6 +19,7 @@ import { SearchBar } from '../../search/SearchBar'; import { useEntityRegistry } from '../../useEntityRegistry'; import { scrollToTop } from '../../shared/searchUtils'; import { addSecretToListSecretsCache, removeSecretFromListSecretsCache } from './cacheUtils'; +import { ONE_SECOND_IN_MS } from '../../entity/shared/tabs/Dataset/Queries/utils/constants'; const DeleteButtonContainer = styled.div` display: flex; @@ -84,6 +86,10 @@ export const SecretsList = () => { setPage(newPage); }; + const debouncedSetQuery = debounce((newQuery: string | undefined) => { + setQuery(newQuery); + }, ONE_SECOND_IN_MS); + const onSubmit = (state: SecretBuilderState, resetBuilderState: () => void) => { createSecretMutation({ variables: { @@ -199,7 +205,7 @@ export const SecretsList = () => { onSearch={() => null} onQueryChange={(q) => { setPage(1); - setQuery(q); + debouncedSetQuery(q); }} entityRegistry={entityRegistry} hideRecommendations diff --git a/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx b/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx index 6188845694f9e..e6db6bfcc9a61 100644 --- a/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx +++ b/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx @@ -1,5 +1,6 @@ import { PlusOutlined, RedoOutlined } from '@ant-design/icons'; import React, { useCallback, useEffect, useState } from 'react'; +import { debounce } from 'lodash'; import * as QueryString from 'query-string'; import { useLocation } from 'react-router'; import { Button, message, Modal, Pagination, Select } from 'antd'; @@ -30,6 +31,7 @@ import { INGESTION_CREATE_SOURCE_ID, INGESTION_REFRESH_SOURCES_ID, } from '../../onboarding/config/IngestionOnboardingConfig'; +import { ONE_SECOND_IN_MS } from '../../entity/shared/tabs/Dataset/Queries/utils/constants'; const PLACEHOLDER_URN = 'placeholder-urn'; @@ -133,6 +135,10 @@ export const IngestionSourceList = () => { setLastRefresh(new Date().getTime()); }, [refetch]); + const debouncedSetQuery = debounce((newQuery: string | undefined) => { + setQuery(newQuery); + }, ONE_SECOND_IN_MS); + function hasActiveExecution() { return !!filteredSources.find((source) => source.executions?.executionRequests.find((request) => isExecutionRequestActive(request)), @@ -401,7 +407,7 @@ export const IngestionSourceList = () => { onSearch={() => null} onQueryChange={(q) => { setPage(1); - setQuery(q); + debouncedSetQuery(q); }} entityRegistry={entityRegistry} hideRecommendations From d399a530576974da9beb1af24d7ea5f98922b6d3 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Tue, 26 Dec 2023 18:26:40 +0530 Subject: [PATCH 292/792] fix(ui): correct the color of edit links (#9517) --- .../entity/shared/tabs/Documentation/components/LinkList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx index 1b5c3d54009da..9f94a830ac1cf 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx @@ -159,7 +159,7 @@ export const LinkList = ({ refetch }: LinkListProps) => { - + + } + trigger={['click']} + > + + ), }, From b7a0bbcb3d6000d3d9827ab19f13c3118d0bfc19 Mon Sep 17 00:00:00 2001 From: Fernando Marino` Date: Thu, 28 Dec 2023 01:24:25 +0100 Subject: [PATCH 298/792] feat(ingest/openapi): support proxies and alternate auth schemes (#9492) Co-authored-by: Fernando Marino Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/openapi.py | 41 +++++++++++++++---- .../ingestion/source/openapi_parser.py | 26 ++++++++---- 2 files changed, 51 insertions(+), 16 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/openapi.py b/metadata-ingestion/src/datahub/ingestion/source/openapi.py index 3925ba51c16dd..ad62ef7362aeb 100755 --- a/metadata-ingestion/src/datahub/ingestion/source/openapi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/openapi.py @@ -52,6 +52,13 @@ class OpenApiConfig(ConfigModel): ignore_endpoints: list = Field(default=[], description="") username: str = Field(default="", description="") password: str = Field(default="", description="") + proxies: Optional[dict] = Field( + default=None, + description="Eg. " + "`{'http': 'http://10.10.1.10:3128', 'https': 'http://10.10.1.10:1080'}`." + "If authentication is required, add it to the proxy url directly e.g. " + "`http://user:pass@10.10.1.10:3128/`.", + ) forced_examples: dict = Field(default={}, description="") token: Optional[str] = Field(default=None, description="") get_token: dict = Field(default={}, description="") @@ -87,9 +94,13 @@ def get_swagger(self) -> Dict: password=self.password, tok_url=url4req, method=self.get_token["request_type"], + proxies=self.proxies, ) sw_dict = get_swag_json( - self.url, token=self.token, swagger_file=self.swagger_file + self.url, + token=self.token, + swagger_file=self.swagger_file, + proxies=self.proxies, ) # load the swagger file else: # using basic auth for accessing endpoints @@ -98,6 +109,7 @@ def get_swagger(self) -> Dict: username=self.username, password=self.password, swagger_file=self.swagger_file, + proxies=self.proxies, ) return sw_dict @@ -258,10 +270,15 @@ def get_workunits_internal(self) -> Iterable[ApiWorkUnit]: # noqa: C901 tot_url = clean_url(config.url + self.url_basepath + endpoint_k) if config.token: - response = request_call(tot_url, token=config.token) + response = request_call( + tot_url, token=config.token, proxies=config.proxies + ) else: response = request_call( - tot_url, username=config.username, password=config.password + tot_url, + username=config.username, + password=config.password, + proxies=config.proxies, ) if response.status_code == 200: fields2add, root_dataset_samples[dataset_name] = extract_fields( @@ -281,10 +298,15 @@ def get_workunits_internal(self) -> Iterable[ApiWorkUnit]: # noqa: C901 url_guess = try_guessing(endpoint_k, root_dataset_samples) tot_url = clean_url(config.url + self.url_basepath + url_guess) if config.token: - response = request_call(tot_url, token=config.token) + response = request_call( + tot_url, token=config.token, proxies=config.proxies + ) else: response = request_call( - tot_url, username=config.username, password=config.password + tot_url, + username=config.username, + password=config.password, + proxies=config.proxies, ) if response.status_code == 200: fields2add, _ = extract_fields(response, dataset_name) @@ -304,10 +326,15 @@ def get_workunits_internal(self) -> Iterable[ApiWorkUnit]: # noqa: C901 ) tot_url = clean_url(config.url + self.url_basepath + composed_url) if config.token: - response = request_call(tot_url, token=config.token) + response = request_call( + tot_url, token=config.token, proxies=config.proxies + ) else: response = request_call( - tot_url, username=config.username, password=config.password + tot_url, + username=config.username, + password=config.password, + proxies=config.proxies, ) if response.status_code == 200: fields2add, _ = extract_fields(response, dataset_name) diff --git a/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py b/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py index 1ab40bc8be73d..84bb3ad452611 100755 --- a/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py +++ b/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py @@ -51,6 +51,7 @@ def request_call( token: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, + proxies: Optional[dict] = None, ) -> requests.Response: headers = {"accept": "application/json"} @@ -60,8 +61,8 @@ def request_call( ) elif token is not None: - headers["Authorization"] = f"Bearer {token}" - return requests.get(url, headers=headers) + headers["Authorization"] = f"{token}" + return requests.get(url, proxies=proxies, headers=headers) else: return requests.get(url, headers=headers) @@ -72,12 +73,15 @@ def get_swag_json( username: Optional[str] = None, password: Optional[str] = None, swagger_file: str = "", + proxies: Optional[dict] = None, ) -> Dict: tot_url = url + swagger_file if token is not None: - response = request_call(url=tot_url, token=token) + response = request_call(url=tot_url, token=token, proxies=proxies) else: - response = request_call(url=tot_url, username=username, password=password) + response = request_call( + url=tot_url, username=username, password=password, proxies=proxies + ) if response.status_code != 200: raise Exception(f"Unable to retrieve {tot_url}, error {response.status_code}") @@ -251,7 +255,7 @@ def compose_url_attr(raw_url: str, attr_list: list) -> str: attr_list=["2",]) asd2 == "http://asd.com/2" """ - splitted = re.split(r"\{[^}]+\}", raw_url) + splitted = re.split(r"\{[^}]+}", raw_url) if splitted[-1] == "": # it can happen that the last element is empty splitted = splitted[:-1] composed_url = "" @@ -265,7 +269,7 @@ def compose_url_attr(raw_url: str, attr_list: list) -> str: def maybe_theres_simple_id(url: str) -> str: - dets = re.findall(r"(\{[^}]+\})", url) # searching the fields between parenthesis + dets = re.findall(r"(\{[^}]+})", url) # searching the fields between parenthesis if len(dets) == 0: return url dets_w_id = [det for det in dets if "id" in det] # the fields containing "id" @@ -349,6 +353,7 @@ def get_tok( password: str = "", tok_url: str = "", method: str = "post", + proxies: Optional[dict] = None, ) -> str: """ Trying to post username/password to get auth. @@ -357,12 +362,15 @@ def get_tok( url4req = url + tok_url if method == "post": # this will make a POST call with username and password - data = {"username": username, "password": password} + data = {"username": username, "password": password, "maxDuration": True} # url2post = url + "api/authenticate/" - response = requests.post(url4req, data=data) + response = requests.post(url4req, proxies=proxies, json=data) if response.status_code == 200: cont = json.loads(response.content) - token = cont["tokens"]["access"] + if "token" in cont: # other authentication scheme + token = cont["token"] + else: # works only for bearer authentication scheme + token = f"Bearer {cont['tokens']['access']}" elif method == "get": # this will make a GET call with username and password response = requests.get(url4req) From 754d8814477d050e907aeca6c561d98372b60dc5 Mon Sep 17 00:00:00 2001 From: cburroughs Date: Wed, 27 Dec 2023 19:33:41 -0500 Subject: [PATCH 299/792] build(ingest/feast): upgrade to latest feast version (#9439) --- metadata-ingestion/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 4632c20cd3b96..32d49ffc73fa3 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -316,7 +316,7 @@ # https://github.com/elastic/elasticsearch-py/issues/1639#issuecomment-883587433 "elasticsearch": {"elasticsearch==7.13.4"}, "feast": { - "feast~=0.31.1", + "feast~=0.34.1", "flask-openid>=1.3.0", # typeguard 3.x, released on 2023-03-14, seems to cause issues with Feast. "typeguard<3", From 9f79f44dd69a5a86864ccc31473305bdf1c2f4bb Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 27 Dec 2023 20:05:17 -0500 Subject: [PATCH 300/792] build: enable gradle caching (#9525) --- .github/workflows/airflow-plugin.yml | 1 + .github/workflows/build-and-test.yml | 1 + .github/workflows/check-datahub-jars.yml | 1 + .github/workflows/docker-unified.yml | 27 ++++++++++++--------- .github/workflows/documentation.yml | 1 + .github/workflows/metadata-ingestion.yml | 1 + .github/workflows/metadata-io.yml | 2 ++ .github/workflows/metadata-model.yml | 2 ++ .github/workflows/publish-datahub-jars.yml | 2 ++ .github/workflows/spark-smoke-test.yml | 2 ++ gradle.properties | 2 +- gradle/wrapper/gradle-wrapper.jar | Bin 61624 -> 61608 bytes gradlew | 4 +-- 13 files changed, 32 insertions(+), 14 deletions(-) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index 70816e5f093d1..97a0da8546ed1 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -55,6 +55,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index dab64cf2dca5e..6daf1904ba3ae 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -42,6 +42,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 46d97ffec8861..556cd87f12df0 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -33,6 +33,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 7cef38b1cd47c..454e766140245 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -84,6 +84,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -145,6 +146,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -206,6 +208,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -267,6 +270,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -328,6 +332,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -567,6 +572,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -653,6 +659,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -731,12 +738,13 @@ jobs: strategy: fail-fast: false matrix: - test_strategy: [ - "no_cypress_suite0", - "no_cypress_suite1", - "cypress_suite1", - "cypress_rest" - ] + test_strategy: + [ + "no_cypress_suite0", + "no_cypress_suite1", + "cypress_suite1", + "cypress_rest", + ] needs: [ setup, @@ -760,6 +768,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" @@ -904,11 +913,7 @@ jobs: deploy_datahub_head: name: Deploy to Datahub HEAD runs-on: ubuntu-latest - needs: - [ - setup, - smoke_test - ] + needs: [setup, smoke_test] steps: - uses: aws-actions/configure-aws-credentials@v1 if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 29953b8b70d91..e1671cc021919 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -32,6 +32,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index 4e04fef3b3980..af73db483f9ae 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -49,6 +49,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 2188fcb07c77a..96229642244b6 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -34,9 +34,11 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Gradle build (and test) # there is some race condition in gradle build, which makes gradle never terminate in ~30% of the runs # running build first without datahub-web-react:yarnBuild and then with it is 100% stable diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index d0112f1b14e7a..265a66aa236ae 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -34,10 +34,12 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh - name: Run model generation diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index 24d1c5436b315..0a311be33cd30 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -54,9 +54,11 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: checkout upstream repo run: | git remote add upstream https://github.com/datahub-project/datahub.git diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 60e183cce5179..94692bd3c2336 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -35,9 +35,11 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh - name: Remove images diff --git a/gradle.properties b/gradle.properties index 1cd349344b432..f410ff01bf397 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,7 +1,7 @@ org.gradle.daemon=false org.gradle.configureondemand=true org.gradle.parallel=true -org.gradle.caching=false +org.gradle.caching=true # Increase gradle JVM memory to 3GB to allow tests to run locally org.gradle.jvmargs=-Xmx3000m diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index afba109285af78dbd2a1d187e33ac4f87c76e392..ccebba7710deaf9f98673a68957ea02138b60d0a 100644 GIT binary patch delta 11632 zcmZvCWmsHIvn}otoWWsmcPF@o;2JczyX!!3mq9{scP9i11PcU$4GzKGB{&IihkW_o zbKd)$`O#H-t*YJKyL-C(sjk*_`0{mlL^UON1bi?o20=0j9xOh%ei@J~uLo722sQ!? z42-jzH3vM*|5z;${D%Z1z>-6?+!a8R2&^_5R82;A{zEDJH88;dK(!yMfTk1-S2$Sw zCIswy1gQ9kmj(+JN(3g)qKZ%!32Jsub^v`?A}@l6ieT!WPuco!LiX35zyZJ<06|Me)};q$8UbE@JzIV7 zjabmR_!-#~61M#3PnCX3B*N=plNEo@)#a7bm0u@^k2l!XFA*mqTmj$fmF4iiq}LnC zF*Umt9<5P>#-!gN=S1d=2veDI!b$hC1Ln*>#i7yq^5KWPfkp2dxeSgIp8vhCpfFTE zV1wZQzExKaNRfI@`aAs-h+&iccdh_M%o_IV1>F#1(UCqNF2(lopwF*!G|`Yi+*}KY z_^VrRSQ!Go{X8FQ)7(c)FxfA3b^cwoy{7NHo*g_DAfbd4l;(a`|9s2tSzN_Vx?Em; z;hF;K(bxSLnqQ=w$d`>%SU^n@^Pi|(mMvuh0UNbZ*3pPEqIl6{lDJInhNCldqRNw9 z=$)@$*s!12V~KYN7t(143=hwuLM(2DGf-$GhYeQgp>5QzNMHH6(LkJ>v>BOt=Oi#|f+HvN*zP?|OMLfmXfJOvg~^ z+P0-<6_#rYiv&UYihhcOFRn*&!j^szdCaBEt|z&sf|%3)F<{;T^hTp{k+(<;eKdg;pqg8c@Q` zvCZht=B@Tg0J)?P!+-rXNJTM^34+xq)yspMn|7Anogw~^%`&!pux&Y%yDi%Mb7G(m zjY7wbTtyllcE!Hrs5L07C!{8~cWPum|1DckPu~UHMdQq}>5oNYcGnd@$)0$-K z#}DDT47{IUYHZ~MA|y6n4;#JRzjWeiFK9xX-#%#B(dX|(jG1efSmt#Sd(tTAct61T z)lZ?Q%2zprzKM^&ZMOUre~z3cju$$pHN_ya<43#01FE66$%`B`Hi|iNYsJIiZI5{G zv1T@cX`#C_P44Iwdju!pycPl!wR10ll2wz+!%2g0Toe{r2#O|WVb!mSl7B^25B2i~ zqHg!|n_0MZ3abhl!52%AOp^trBq>g1{glL>jFeQRZ9K&aA6aoSs90x1D%)HFi5_vl!N2ggjGZ#w)G+DyCm7Cdn zMp32|$lo&_%lM`T$)rI(^D?Ux(fSdVJe1*(NWW<|#aOr5Brdj@BBp^L%CL%W@88nJ z$5O#=5Tr(Ds4-!gcli+TZJ-c>V&~LYj0f5JnKAC}} zo4;Tk!k;E25$qg9CrG~x3%6E>n3ROQ?5Td7+>=MS35r`Ne&$x)>M`tnjcN9wzpdUk@Fq^Ro zEY4~4y-m&6j=jeH^V5$MH(L*=21{y3s_a+<&Kol< z#C{>*WrRxAJ_83`ePJ4_3xKX%BWo?OZL|fx~xpRi-Q73L-#oVqhw14kdH6 zanOGujOl8pv3Ko{)%?paa|;Y1N==Molf3)Z%D_kocrd;@J1};jBc*E{pYj*~AV%D4 z6BbLqXd-)Y@Z#xieFLC#-<36^E62EO6c_wt;(ETD^c(nR^KL5Ret6zggSP-pU3`bn z>fsguY(YTsXYV`{LvA_{9Djz@w*Li#bGLDt&_%1BH|tMgcTkI_28q*WQ6R1LEUsmR zo;Phy#X?-Dm@>dXfr>SAAQS|v?*W>~t=pe{=WQmT;`v+w?zSWOtDV^a|JSzuxr4wa z2p$H87zqZ39(-;`2GX~6(R8qNVK;X$b$1WfQu3aX!+)f21b)@=LDA?fsvx0I#81+v z3a8~ol(Ml^_IV#hUmP3FBPeY-lr~VjUz+t8eT(hUSLk8twy4>Dns~5JzJNe9A{m?6 zme{uHO<4qbeAuK5zPPOo7JUbou!)D4r!VNdAq7jsr5zkH4!Yb=3IFbw*TPx!V8vz# z`MInAzUeGHyh^{zYP~(&7hQmr72gHV1Z$0DX|lAAEx^36e*z>cr!mS!yovW6qwUT| zn)8TbNP(5#?vTc}dro>%u%xxO&oWnm%{x`-Ba>zXL?c@()UH=R3Wqlp10KMXEF+4c zW7$bIe9zh!0!kGpq-B?3UNGL`YY(#ed>JObE%~j;PE98|uCfWwxeA40_;-$ROHboQ z+Ntw}U*wc&CcCV&Ip$TbzNMgXH1Mls>Pk{{dAZ&%@igsP3o#XqT4oh>b4x%; z=iLLvEjM@gLwXm1q)Ll+w~r(>lBL%V!98)BCcqDRE$1)FUI*BlBUbTKgcy$2=xV#H z&K*3dJAsHklpeWePq&)AKY>KZmk78Yi6M#^DDc=1dlcyC{+Q0zI+s!ZK20BbRoOJX zb&_?FCCU^_NkUT@<$%D7?6tBneYk(kH#AcqFE8}3XGHJ?}g&)N=vHD78 zoJ7>3)EALmaLa@^ef|nfT`YHi%wh(RSFr$Ifih9Z8>&m+L_2>yb<9*)cuYIKaVl z6$=UE-2g(iI?>&Vv-aN`WMrAxBXl8*bV;ztb3@(x!ng3%qB2%dT)D|Ljb0)V6gk4- zZ;=g;Fe|g-vm=CT;POEWc~`BJ^k;To=D>A!LEL1PgA zz=?m2z*Jzpg?WkQdL#oUMPxLxG??w~dD3}tF?T}=lWQj4&FxgP;T0^>dT9P*P>fPJ zB+Yu!=Eg98)-DglC(^ePp>|-gTwv;4V!_)jiEAqALdJ?=@_;*+K}=vW93*i&Ol*nG z^9Da=y-4s=pN);>hl)LL8aTQU5-aK*ZtlYRabLtXr_o>451$GwqnzFC#PSkMX<2-+ zS2CvMOlUcYQPkD6h~a;HlbUJAQh&{n`YMi0m-z{ZitrF%ho?=0*fNIarq(!rm8{tK znd(rlO+EL%mC_8kgF2uZ4dgJUe_-Wimy+DL96E-~U6a`P0{cuYKX82j@c@+CMV?&| zhBtrPQPD`PUx;|kp_jpyZ3iB352>T$`GIck$dORnCCCj6VkJj+$vCXJm<^^EePm!y z%(IVe3pAJxya?=&@W`$B^idQyMKFACi}udo}EqNUCI_{_C^E zsm|pN26tJ;3LkH!H|3mb5p#nvmq5MZ;W~YbOSrK_s;=u9>A#0`*o`3&6%hso0}Td- z?eCD{gZ{Gu$LkpA;_2c)Vky4o#4x5olMKfU53(xRTj+2>>~_~&(gc{pym84`bbcqHy- z`b`TAE{+4YGMe&7DdDnzf8X~RgOZT;GaHDS4j3m3&~?!s;QI_rrvpe9TN(~xHn1YQ zYAUQ?(g4@f!?*xBddcS$E6#(C0aWXKR9fpc+N^IHwrE`dfi!-^_P9SWhCiFSuvApF zj5zjOVd2T;by%&!gV=KtQVlX`-WkC|JzSJF;|1H z%)UACPG#~|-?~CuhgXcWVhMPBy?DOICRmZQgn2TLez4loup9bT>;7H2mSeHo0EidU zWvy~YX1L&-Ut>JY+UuxqfY0riUd>AM%B7V%F6<(omjo?Z)kM|lS9J41KMcUqKwqgf&yPcGMII@*lJ?0LAb_xM=ls8~lf z1J+$Fk85i3EjB#;|xIr*Xybc096dJ7(kBml}>Z! zm2@E}AAeQyds@R6NGG}p{npJ>h{OPq>S?|w>1199ij~52`gwdBh<Jf>sij;7IoV z@^tO(yvaa8y|_nf+=K-c845Y|;aD7p2d~jU!_ce?%Y|sdr1#ut{gZ$P4@!Ly(GiM; zej;8!o{coegW297K8=FL7}IANaSkLK8?0XH37P^SY!5o>bHc&*Sr(|9{R-&5%o_xL z7s+{wuP4T-#C>DdLA=Iv%Z?uF7{{S~U^Hz^1`kFt3(m&)zL0AJLYtkT}Q zaoP*W#Rz+EX0qBie?pnucfs=)p_hAsQ7aImOUo9BiJS%3Ba!e%;=>n-m|rSlW{FF< zG0pxcFpSY&XcY9g5r*IJ*cFnk!j2+bV0e6DH40sv+^BxxP-MHKKOr31!nPppF5jC_ z4NIg>9W9APBY9EIKJ+0zH~aZ$&0O~N zf5*bCl`L21p`kGl8w?E9pY^m*uP9u78QR2|iV?&v#00W|_@ zL~vM1Nc(46j-hX?t~;2tlZ!F4ogZsHvdu2Hcr?_M)4r{=`Pgb;?_D#mZ*OootL>=W z_0;BU#jQvI_xMubB=D5o`XSGKyuc2EAz@+lk;kK_a{dtTIW^G+hk@l>wYEF_Gd1R2;Rx=IvTiJ&ocJF7}PAo49=yUuQOXXr}l*O2$rj zB;E0V&x+oTMvmK-J`;|`4y3E&T)pZt7Sld}d3c!ayrlN2Wj)(-hE#`(U^>s*L4X-3 zSKZkNc@<`Uto^#renX1G%8{FfX=+fH#}{~h?Zwl>Tk$g2rp`f?xCW=mO}=;Uh!-#I z$_`f~u;!e+L177craEY@carrpZuKD1k>`_%Qwy*!Fuu6=6UtdQ7`Ps zmhc~%sC{UI=Gt^EQ=q={R}Imvg>>Gz6p~o614C2l@&}Ts5G%Q;#$Q$ zN%fw`7331JG|<=_?_+!KpeXxxg)Bssi9(^IpUIDS!;QRtCL3gDkEgfeXf(EUz}dk` z5#js;zwa~SJBt`)D%+M~T+^x!H_QoCn_84lS&Hb*hK_Kdy{vQXde*~m3{z3Bn)$r@ zennVvZijwmgPjVa)?m|3i7FyxE3*7SQlz>5deffw@tvGM?<37r>#-Hh(rk`>&(*nL zlIH+bO(gSiR0c0drEGhLK8o`t5eCbMg%Zt@?XUS|5*3Lb>imHt^2~>Q!x%wXmz6&F z5u^f~Z`(z9B=N|aU56wZGIVTzmjv-mtylHGigDj1!qz@|9|LUsSboea*kjY6AtxhA zCk{}h(|=9yqHm;DrRgiT!Au;7nK^DHQqFm5RlEGeyAt3h5SC#j#Qg0fu~Q;IhZ%OK zDM8XciVr5W*mlMX)<2!tB_}f~0<0Jf)4%rCPK{T!UXWVEDRkJu$eE3o0KEh-t7-5dl-X5dWB2nYh*z;m- z`_l&;%}K}Jo|k&%beab~A*ntWA3Y>;!cgBPE*zvX7zTI9)pdWZm`60V2}m)|-uCM@ zAE=h`o<$w*p4X3N)nVZ|K#<$1*ZhEGZ7~(zuN<+m2+DRHOb zNf1j3{6Xw6Z04&Is7RQ4vf+cVfM)vsCB9hAl*zt!4n;n)=RG|1UWM9Erfht&MA+zc z#@@8jFVFUvFK`14T35rRbq5}ZMio<*9&H0AQ@$Vple&w!JEVB$)Ql{Tu1sgholX}b z-(F;XV9SdR<=r25q-xZ<(zMY7+A{=<*&dJdAoNTcZ+?a-N!a$E6pt-qtLvu+zr`M7 z#=OsNlIyd-6UT${FR<@aa&CSa39pXcC^V_=Qglh~8v7;hFHNO=?pe|R3KlU60q02s zz&#+nCA7kgk8v)nHvR1ltRb61>_RS9_56W4$TdFom<*B=N4*Uc`&eURwTHRU%q1qR zA4hjh^>!ZuBzFDVRCg)Qjb2-mYe(^(((3hxLK5ji#%|6ZNGPZ8V1truuNwFW=K!eG zA7;e%>d{j{BiQ>Mq|3DzeZTQ?uzQM^8Wcgpr7p3<8$CW*Q?qZgV0T+`4e?oG_+Xb3 z>-*-Nkg}9{2%F<<%J6Y|$W5!vt<+x!8qpR?R`u0Syzc=dtYY!J%ZV1`*I$<0 zgRp&eh|(#h7UmJd4n-_LIhj^A;4{nmhnB0R8KhM5VJ zTOZq&S*XYPNdl{|3mq{wMYiF!6y_^WX3cAt*$lRQVV>g8aNIq1+N@Za6L&aWuvhBh zU9)9oI)Zjqhs3Du^6J``)~g3Hy?g2S)feWMg=2{qc*5dLs;WI99(rcIt7Hi!n|dgs z@yr`G1oJQ+)?Lh^Y~&EZSM+necNdN14*` zRB`d;)-vab?Ic%p9$=4*AByKT<$98OyDq+w{>jrXOaxPVn+}n*LnUS$CB4Ez8h>5nG#}f z%ehQGu|h+NuIZ|3Jh(EMzHMOlS9TwgXP-yXKWxGc1K}L zR5c>?qP8L#5-(W7)WcDrgw9>0e+SI@&=6DP2r{Ov} zakYS6{E03GaI>^GYd8&7=?vrv3#sRt=N<)n_+Yw_>i&q}o$6vMyX5>%a zxgvlrm|4#rurzD>Rg>mCN8%t`STLjw3Gq#lB`xcDw*u3zgLo~BuaagTLy{}TK8ZZ2 zr_5NZ-2ZT!__;av)jl_|UB=C@A(U-fs=o`lp-I!zotr}OO=WEL6Yjg122x`kk?!+%b?G$#IcP z4`8#FRE|uxkqLXRFw0D`griN1Z`C83q>?WIa+wo3K&WrF5^<-|LI+^z6 zutc8neP67R$LPT-r8!zsG8AYNW&1q+9ylvMZHNfIeMQVg-y)R0%7AQFetB0bBaUQW zS6BrJ!g9`oMHB$t+m>{unk>pcmcBDB+J_iV-ayVK8v~2e(occTwqIY8H0ZkCiY`WP z$%>GQu-LMWhG4kcGLIH%+P*Xj&D6-E4-Dy2D9)9k*HNX>&h^7>GbDb%HHyVawi3Tn zl>X9O9Vu5U$x~TyNkI}438D7xUY16rPV_LBGLFN>_W1>}anQ8wCOu2d(7x0##g6w6 z9#|Dp7?YiFf7$xLxi8)akJ)wt(2LXNh0(A}%b--y&P2~9JcH+a>t zk7UD->bIgGU^>IZo9Q$F&T#5+67YiFsoj=-x?1nx+UXndM8vz+e5*{U?7)0jqKH|G zcmgTflkWm0l}+S$^C;J%`1slUg!3-=RSI-YLl&)-Vid9*PtIWRvpJxcls5K7@xe~N znNXzmrbNv0+uuqW{Mc=xPY&bq&B*6ApQ8xymcYq-V?Dzr?(! zkHdo*b%s#9Z^Rp4jVRz-gnJQ)+_cet*jU?^(1k7ytd`jJ_lQh+(71p30K+XXv41J8 z<{J77q|pw(ez#&Y&y&~;q&l3jDU;jeYd0wJ*DDGPbFQsm;X61Flqfo{RZm(i+9#R$ ztkj3x+DMet48M(H<0tc|;WII%<{E6pdB)k2KOab$#@jYbdZW1?#N2_$v;9q1z`WAz zEcFm(bOb1&HSsx*nX;$wO(@|||B*-<#zyQuC^hj~fA7K4CV(6f_Pd5eV@wxTjdT9T z;Egtzb-q_7bf&Xbj%cOTxmevTzWHwK%SwUu3;XrI6FCn{e{t5ad>>wVc#roZo_5Y3 ze|m4N|Hg)-TAt=6bYC!#EUsUSk07$Vz4>#-PS_Fw?tAyTvW4!w&!dPJd?h+>%)zAa z=pd3TQG$5o?#gn&&r3*eX>eHvA^B34Y);MoanfEEFGe{rdLkNqXTM(FW<$=H=oHXt1Z z^{Kcz8PAqne|sRt3IG1h6QM%sdsv@jJjEb(pNuBrO6?H>p?E>c9{4seCbz{L1_^Y@ z4592bZEz51ySawkmNvdiywbQLaoQ`|R$lp4UOJ>*4`<9#9zMQ)pBCTg(Vo+(OYIN5 zm_9B|3v6z$NF1|Cxw$vipHjl4wz&9MeL7nVDy=iOsKtUk>Y!Jq^Kiuv%Snr&d>Q)mC!c)IDUVr@t;X8*7=>u-rXBKm! z3Z-BT^05gV8?|s~5aKK$d9#yM5Q8Mdd6kS#%vg9@AbOX-@zz6)EMDy)Y z*-l4!Nu>VaceD`-yAGf~+%M2fin!#CZtC^s5u0tB=iY-qL9y@8xeVTQ+SAhLtY5FDGRFa5Mo`9BH$@N<)$KM@4eL;U&Xqf$+6bc=j_= zcFgC{zlk>95lwN)K|Q#J%cA1mFSpdu0uxs;eBz_Wpr7m^Jg&{t!<1 zF1Q+={Dz!C!J2r@31~)+!R~U^(|SwhdhA(j0V2C=Oby7lm}7dWH&?3}fI_Lb3}9G6 zdkOTh_%JTPup`uT=fZ%NMHY6(*k2mTHJ^E{9w+G=2 z^-2`8%EzT!cgs;3;-Tj7lmp|w#|Q2aWCD+E zJ@?=c3M>gi0U=~HC0ICo_~(odAl#7%7$PDJ#@>3)#%zHC`(l5b5O$zh9pDBD0GM+J z4Q^B74`{jb{H6kF05}T@PD%f%?}s$bU-yk2ARMFIACOay;xEXl2*7|QZ~plehnOis zX&TJ`Tr2*!9Ywo9I7CA**&Yq}U>5^g!|^qtEmE zAIE=P$NtL^+ZF1_@lU6_4uEjxUQmbtdg1Wm9~k=(2q)+L7e)mKc+>qu1wpAf|9>dV z0OEhBH7NBR^p`?O2}b&d;Wz@q?WX*ff!e~@LbhJ*Pz{-a|fC?-U znB(|4?dR1o5Uwx>ELlbfp2)%a$2mUte@~CNT&Mv#cTv+|U@C(G9N?EHNdId{ z{&n^7uWxf&0W~N4|E3q_7XY~W1Pu=H%bzBVo;=U&wG$v*L(d-|_!JY&a7qCMbNT>a zlT$Rfw7$Q@b0+hLQy|>(@E>6M^mzrig#yRpe}MQiWU$cLvz6u<5Kd+4FG!6B#Dh+} I<>$8jKcJOiWB>pF delta 11565 zcmZ{K1yoy27j1AU(&Ad&wYa+$cZcFq+-ZSe#XV5my|@(D;_g!1gS)oK3;n;>@BiOg zFIkzp@7`xl=H9t0=Vay-Z@@Hez`!ZX!N4Mepir>m2r!|LJuo$}p4+%a^QWQY;^-xv zB=+K6{R8+lKS~zYNtWXWCP7euYcKcE3DYdSx4vbLn{z}&9HsHs0$=7;Oz=8cx0P=4#l(@j~f z;OEdqG}B`CT!MR)x?0FaqpQ}U9i_sk!cvdtdBS1hlgE2@kb?gT(};b6F@b$VXZs@l zr*dFb5i-ApMc@22V$fn4td0kKo@>Ex?)@8u#X>AmdcLy8uLqO1fgLa(29DVWG!j25 zd>pv?p1S=g_lhAlD?xxp&!RL#{`Rnt!O_jG;`5;e!D8sw9j&-w#Sb`SeGTq5F#c|} zyf~@H?)yvns*)e9k0Z^;6c0`aM)4c+8|zPin&UjVL^AtF1sE(lv@T2yN)7XH1-U4m z8zbJk`AUj)fn~90=|NzsR?@S}XJIh1Da++Wjn$H>;n(x2kzr>;O-EwIKp5WErT0rt zkGg>{dMSa^1N+(Q&Sl%?A)U+T2ln6X?D&&-jM|FVP4sg~!=>O9w7-48PsL{Jmqe}r zcI)~t49*Txc73yR5)3`KSZ^kiy#2ODHJeJM7?oP(D$U8H<@UY2`CEz&wHILtXB5q8 zvx43gkM#|Pp~ba3y(e4?tY&@cw^iYF`|A8?$GYZRqb;2caFf&`Io zHi_X*O_Bn9Je9h9qb1}l`F04@O?Bp?`SO>3)uoVh{zgWtxh7ffmNmQ`mh2*PzmvYx z{y_t6le4Mfh$jupEHs))J0UP?lPSSLgI;DVWha{LOU=ooEZC+gY3JEGv}mw02h<-f z^fzkEhO;T0TznW4MNOJx?QJ&7d-`mhDO;0cY9l)7ILJ=AyOSn1B2bVf+b|BARTKnX z4EuHJlE3@HM-glx3KtbRr#?h5q=-aN62DQl`Ag7T_ud`ZHrYK(IAcz)uy?C@i_x&& zgzkF1K4mPSH8b5tT`_pVG$Q@$AP~fV!P9lE3dL;WR}#`9TYW^Sh) zP+C(x#F2#bE&}f-QAN5$`k9sXl2aQbrzMbXV+)g1JzhE35kb{=e{kvTO!W?0` zg$uRQ>0HW(IZ8m>!lw?-2vxG}`0-)=vV(r+H zHCn(y*qCG^im|S>H^w${z)?1dFjbwFbgv4QWK2%uEv*;@ZCIbw>*%={k#Md^Qj83x z&MAtwF*v%oV$eN$v~L`;^;@I7Ot`=GyiKRMF$wcT(83r)&SJ0lG3)1HqVUDT#2}&- z^P}EThW6U)m4=N}krqM9FV(>2+`L$${s2?j!*XRYBU4QWrz@ABmX6M&Jf5wk1==C@ zc^Bu(eSHi0XejLJ~Hs?^fJ!EweF#@mXefD);HCG|e z$J>|g6NycFz1nYuKlZOD6g>8)qEsU2P3cY^kMAGg#p^3GEj}`5$#MgF)?n2B^ggGf z@3mC!4W-dO$A=5h`rIf+MJi|jZ9Q7Yu1Xo}PD_}^+TELki6;py6+nV4)HX5DKo&r3 zxrUZ!5%8^@VtnjS1_$@)M{Q)jrrVv_t35%^aF^EZzKQn8S7W2F=EhIik#X~rI>J)! zpam~i9;0=B#)ELq$jtyC%=6Put&V^cFK4ueJA7qx1UC6hDuk5npK>bV0`0tBGpV>k zdv`7W@5T{`acdMB2!KQES3Cn924m7DI&Q5M^Fj6Ur|Y57*O{jUINDJcCfLU2RsRaU z&&hi&T?lKNR3s7+lc>Op&y65Skw-&IfPWa68|M_E#o>L>TTYpWNGQ)~L$U?Xtqs9H$Z;PQ z#PE>>_utq|VC1tL0|NkvL*g?#$oL}>P}$hkO5NGm-Q3y5*jC)i-1MWlGqb6!v5QNh zy4>d_2`oX(pK-PfKE*-Q)=1<&l$v2PII07I@tcLpj?;diggWDQPKEWlI&2iYWDHyn zVRz_N`_S5r_v541uj1NH|vwIUH-%b&rSE=Io&PmP`dj>K!*abJ@9dG%+VfTNvm z#HTywLc13ej^Y_YtYvFh7)RnX~#%7j;s5^KDQ{ z{z*TIA#XfqCH1{MX{+WYLM90{P%K#BY|c~FD9KxCAQ_>cxmXC>Ij$@6-ZaOxv{lrX zc|0NOOz;Fwpv)^#;+L^q^_{~TsNj+|P8n`hS7`P} zKOR}zkL6Sp$i1qDZXXDg9%#)5?$2R_m!odBIONQ-DkYe7usyuLunW;j0kt-gd14ym zN3sW4uQBfIUxyN)+YjoNlKn8-Y58X59=E(NcV2hg?~X^s)qw4lHf*!9hRLNf1j8Ti z4qCTOL`X96-oJht!_2MN$_&$3)|&W8-QLrJGQxhIxY=2+z_V}MB>{IxT|xU8E0UM0Zr_pQo-ro9Lm4Eopxy7p#3QePg(>#)QDR zy(Pot`GZwBTRUc`5<#F*2_7ePQSWvuQHq^68>0^?2??94gdLCNK&gDGQPGBdMZ9V2(28U}|Kl7=2KMXzCYN_hOA;F8cn_C14Thk1v$;(!a&N zdMJ+oQDol-Y)H0zsMg zZ}5+QzWW6$2{%HHj%!JKhNNGhT`|dGUmr85PXaQwv9n9z6B!kY8A0ziD^r7=lhXXW zt6jujwU~f#C{Iu5R>rLwwy&K>S1>;)uZn@CjmE9qDKKW@FT*H~F%tjF;2D98IOWF| zW{TAQw|V66RM)HzM=8=D;Y^7m^)Q3JO=uhOCRt}F$Q?bw{Q0T*n`-AG^3h;WXkzm>E^w{1@bp$Wn&zrMHkCun4(GtJh)fXnU$Lkq`}B-?@;u zjLkOPj*T0w0Rf)jZGg+h+RzrLla2Nm1rl^`4mg}l>%0{gf8A0a=5=Cl2}UFS$Gp|Z z5JB^CGZxdrH{h_5v4UB10fkTY*{}S8XNm8I-dfhsVs-aB+5c8q8{$x#q+yVzFwevF zIzjGVSGSpVo8mKX$a19>p$;S9^)J`-CKDc#0Z+nteUn8C}h3Q z3Ni(uBnS#D0+kA{tBFkzTA+^^xS5ro^RDbp#i-Id7uOULQ718&syup|Z8rkBtOc;| zblGZo+2?MK_6}+(Fwa7!ssSENi_NyERzJ3Yn-_OYyPiBGQuBL~bph_a&xAcm2fXWT z%2eo%86G@P6U>vU><4NWNR!{CYL-P!PZ(^nEca9-9TZ8`KJBlF<$b!Xj!gX4W7DhO z%~u|>w9twA2};GVx69#wXVDspv6KW30H8z$i5B65M4X6#OKO-}c+bQxKdTr+-_eH! zREx<1Fj>B9R%u4jPQWoi+0`vF50aJHyYE}%P1u~4w+Y<(H#{NTP%W^1pH6Zw1cJ7% z_*-OUO%V08Fs@V;fg0Guu_>4oDQ9dCbCi(Yf7Oc znd$d^X4{PgV)wU3zQ!@r@6vE;B@t6puK40;@B;xum5scS%#?_m%6lQq(3?{9m9fgg zP(&Un8ndx%pDz??aBh$}y!jAEQ^vbG=aeM;fFf5l4@LGnj6nvJr$~);lqHRo?b^gC z@&@pC$v|4;*`bep*rRj3pQH9XzPi=KsWY`CGcW3P0oV6i>g#KM9AEJcI}f3?jaW6O zxN$6P-0+BJGghF$gdH9*9Y=rV;;2m?Yn)D1*t`6?f2Pros`tGPn#N>inzadJn5;&qE!HGaY=F@n4Zp@L+lcdk zf5NB-2MpE@^czh=$@G|d8$;>AO=E9Ykl89z`ctjDNVYw~Ik7`9ue{L&!_}FqIu*TB z2Q*rjgEQLrc#!{Tial6kH@6+Rz3T(9emMb0ggOe;wjSKthJV26xnJQCc8KkIXaLC> zMfU^)J?oWFCD8h!UK@4i$T~wxms0Ml!IV{v5_M+AY~T6yfu_UcZrJLqlvbI)pDELb zLfo1}jgwVTU)mv`21q?CPW{XI*P$7H3ZUw?qI>z#nd;>y8A?4G*N=j+L*}Eq)V*nC z4mHh1V5=}5f(xz=TbC$|<;2Q`AIgt{Q`>WS$Y$qddGr-pf{)g+XCrAW0A=G76f#U z%28wCW~@gG^c2(aZb%(X(UoNsIL~-rec>;y1^q5C6a#OgX39W{!T{iIZ$TQ+n-V^<&+^4f>`X8qLR-~<O4pA~qUV}!D^Lv)@+bmZ)3&!1 z(zAowG~^Qd`DI(*j5e&>%17u|tI5n%HOHZF2#(b?6&BWu6T6 z653gtSdk*6n|ILkV$q;w(q<<{cuP6~XD~c+JV_w{9TN{q0QLs9ACYI!Y`L9BX&=UH z^VCD_%kw`gU=;_DRN8|Gl;K_u$$Xy=Aj?-C9xsbp_J@#E{PW^V4n6@Sgsh^0NRIG9 zaG*H|@;!oRpyQY2tu+XINd5Mbf){1%LyY4x(-9#D=AvHc|4N?uOooisumFHiCIEmG zWbA+|JN@k}zLgTDHi+9{ zF^Jwq5>8a*r|%VCI<^PSwri?(~qFvt1ZJi z&){Vjhqpe@W1mBzIszF3J(0f}g`)B@G}ra3ZC$e!-oM3FC0 z@c9i>9b=W&F$;L<2H2Y@Wwb6Ctt~$46Xb{KQ(q2qRv$J&GGqrkBHJQZkD!~$BM zb_bWdz4Wrre~^Ch(v8JE-z1$q$GF$0A7!&9&)XxNhxKggqZvL6C-q5JiW@_RYt+nS z2-NQ{1w!kyP?SpkQZwI7B~{oB22>`3tqHi{t&u9H8|h?*JJB)@3x1N0?u%LGG1{S> zQSbjOJI9G?fWPa8vX6_l6V)ADD!7-4&D@Hyd3YB!9y`w-=t@8+sykdx&;V!Mrff8P z48^sRC!QMGmm#`1Vn84r=#7)bq`)YnS3vVNA9yUl)QRLpT@VJ_ls1l3g}sfn^d)_l zICp9ca63?|1lN*xAJc9%R*Is zkE8v-t8?u&c^5$1#?-zSziZd7yv1VW{RXreEAvx|QFGqBKBHU+vrq$SM{q39zD&(k zHWc|P$-evK+n3PuxnyObNxQD#2VQO0bl}fv9J!)g{*l?qWSylsi$c}I$ee*yyc-#4)x!3Bx0xO-LI=5n)jOWl%54mOG`7OA6CMjc?UPHw^_P93kFL$~MAS za)*C<|9eaB*)oULSacXJUT!cX~Oea?jgfsKi1E6YW;BP%u)wOt;;RA3oS zS}4QfMSsdrbSf5y4Z>%J#ni8-qneS{k>Wzei?eCmTp!*$Jtgcin?B|Cj_&hJ-z68f z(>p-!Y;KE_C-?LZ3SF<=P>uC6B(R1G1I%!OQ zu2wJTEN#Ao_bGZ;tUWvMA4)k_CV^yCj0^4YgVW|W*}}+c?zF*hPOyWnAtFPza(XH) z4WEc7kgO`g-BalZ>ja3Q12>80gC#r`;c2ErQ2ad;Anq9t$_OdZO~ zOM+55qMn9*?CyPJ7o$7jo-R(ZRejJ`%qBiT)<%sVvO$fCn!I{qI}sGZq%Wj?gw06+ zbWr4nN|XAlJIlduKuS*3jtCO zk-XS!6@TW1#x0UAtJ~2{jvA2j6Rdn{UItRPqTUCgTf{mkZ5R>R6n7u=D$58c?qp4Y z)c3Nmr0^kXfw^!2y>v~!2QmRF$nO0WJs2>& zGT(7)%gPQjOiw0@Bk|KK;PHw|jgW0r3E_U{S2 zVBb-S*+6F5Y`=W`Fo_aKr=AC0Gw9`dh`HaGGJNrtDn~2Sw5#c78LwA9Y04GfTE@&p zcy}1{h8JW$VGisDPgYyy`#CHp*zWNs49F0^v9?J2Phw)?!lLe^rnrE|%nw{w=xcu; zJX_WY_3Rv%=zpqx#cU-HgRc&-0oYm10iwK&yFr`T_3NZ&-GYPSkQ&^NTmQ#t9 z9%x(H5$d<9<7D|_MZ|Eb;a)M~ZP&;*&JeLa+NOi}7jM)!%GCK8 z%^b(6d#NWhHpwR|!`Fe(t~-Jc1{odw-1`qc?PB1xdjuKn6Z6QvhP?;zb`5ZwciLFP zCGphJbW=>_Y*Uuu2fj;nDhHm98N_jB*-^9xno)J0xVmm&v8NqHDk5Q+p?qzXAPp75 z7}C7u%|TTLipK{U#5BQ$In%ZcuO_cH7v%yJORtHotKXR_H$WMqLodc)CJhj^E%~mFPk5+c9$2XATCL@>V=1%jTNtO z7E$IX;%W-mdf;;runR0*AF=W;+fh|doS=%w7;fx{)X~FC@}@Z9aWG=5^P~nWe$_^2 zc1WT!?hdd@5kPdMb5GgXKx4oU@v&vdz)<4pT?P`VFs2)khN!Gb{-7#}m*C0?F5pWf zE8+(a*UM+%s~AG1IF{4nNTcjMyf%vXW%b&Ka2ko9fZN3_5_YA#m>#~?q1+t5Ek3}_ zH^^|}Ud2})+`n^-#1osDl#Ey}g_**G1Fe)B&Z7^<1-tT7?Mr1jL#D*wwoZ*z+?cRv znLltyjqf=jYwq3>`cC#FkxXsC{L;P8^@;ac4DnGl)d*FEUW@nO;bV2E))Ae`P1;di z4tDm~_&^;#i9W2&L)loz4%pS#&>6fP$4hM*aM5hWH&;Q9V9sX3sz(#jg79j`QFhY&?m$l4|nYVg6rJ}Q{fp)pGy6&m-N#!=sYsuEh=C~ z*+^bp$?*FV*XTIYU`@{AYw4cB*C9I^4f2>P?^WI6V=vn3azr!j;ayq`MCq2T7sbDm zXz6l2d@>)ZlJ&yH!IU$or2QdX!#|DR@0@WZyfTV~im8`T>U@A|I0O5R#Y4g6=~=%D z_4}d<1K3dZSJa4X<=pj1p`*V6&W9c#)I>!^pd)kz8LpJ#PXL^>sAz#`E-Jy8OH(i{ z{}0YMT-uMg-gZVh)Z9w@C+^)}2~dd9w!Q7D&5#A3Zsa9A<%YL)Z5}3UODa^Pe-3g6 zHpuoCpjO~rZ0XNhd(hdwQmW1%Dk=Ip7nPpX;`dFU)!lv?#+{8J1V>@Ezr`*c==(@) zW2sD2{HCWqrmm{=u4&OHbLNDmCEV=_EZ{)@iMj01pGI1n5p3B|Wt1YnhzOPUnHago zVyVjZyi=R*H$jvubg%#SIgxRc+og;y+QEHxVV8O$uE#yM^E7Zaa~zS@fr(txYo`;@ z^dt49)c1W;^>pV#wz9sh$%2Y~K$e8z%Gt5RIZqMCAcD!6Z?;jdJ`)^^%*aZ&17Rba zUKftZP+}IVm_~De-WxC*s&nM`Op)8S=fOLq)MZG`NIm+Giu76-QfH+nWe2bJDLQrJ zzxKt14`FLCpCXJcwo}(K8ID03q+p(-l&lzD72BP)Uzk-l!OM`9_<5SM1Pqwgqwp~L z86cZs5ks$BtL2{4ZKySXEnnkerSLQ28{dgpJ8FtpruCdkb=jb*v%A$#?QKOT7nhS5 z9WNM8`ZP{z5At%t_~#md3vlBFq8Yk%{Nx4oj2pftJ9&>pjppQfR0(+w55jJat60Ik zBH^bJuKHn_ayeZKpOMIJe&7dAdgGggl<^Jwp`ACVnlAQ~mr76J?am*5gq9q8fWDZ1 zO=L?Sfc#mb%M+LUR=Z3hywv23j=Jg6qoM28-W$BUK+OEwo4;B%#TEKEmyP^FB~(Z@ zi@Z>2tJKe3EpGY4dYeoI`^@E~tN$lM8KX`Y=v_kz+`LEfb|E-qLjj=W*IPB?djOnX zxKvb8$Zy#->AYcvn>bCwrkIBvjo(%yhP@kdRxuR9i(c9~3YgYHX4)d`R%96x5qlVs z3H%)U!&lhk*VJs)pD;_1XVM0CL{GK$Rj)!he$&YiWHQkIvO419*h(Eb&3@;JPjVjI z>Z6WzK4>b|6Z14l_6z_j*wUfji?~rUS1;^^_HJR{;rOPeS|6ToRdWL%3Jq zWf7PD>73Yxg^u;sM^_VxSJpnR@wcA{urH$0mZaGmg1x6SYOcM7PkFEMtJ2&Uf{0#i zr9(q0<>+2t(bn1lb4=>^v*#2?_TXH{zC?ITo?khoDr9B9=R-eq?@rF@`nIAT118|9 zN2*vsZm9oJz2c|G0=TsnITpJz$bG_e>&%zrh-g>f{=q{w@ql*hPuFt(WPqoeX9t+P zWu>oZ5dfzS?^ELu7{k%86{RH}W#y4fFvT{7DHf0fgySv)wI<5zaIfsUQ@`XBL+f-% zwQ!`CO&$}hbP*g=5Vc#B%@wnp2}SicNE!PkSy-#zqD8&k9P$;JLP`+N-p9Nj-VilX zn}tdp?d5R&t4Lp&3PqsR}SB6{ij%@+~0if5I8Y=9YgxOR~WJbbF zf{olEeA3enqPc|DSjz9-yv3Fu2@u9zZ%VK)N}MU;Fo3#|^Uo(Kf#nMSI`QZU%2!L8 zZjXMe9Hz2LDf4P(DMo{Jtx%Qb85j0)<&jh{Ve;&CCPvdo1S@Ln)P?5h0Qohlk}c##czc|4z;*%r%M!X3nQ6t=QIT&gu|`*L8L;nSxCwc@+7Pmd ziw4?l=CHaW+WFN7Zc2v>ElQJM;%!b!TV743e9lpLr#c5$rz6NBdEoe=F^W3J2XD?7 z_U1Fn@13pWb4t=X%PG5skKy$jrihe3zyG3-v?mBewKw3%@P&7T@3f|{dWNNB`y)4+ z1cqe|#VqvoYmnB>Xz3uh>E0Wi{1{H&$n&zQ>;f6@5&Oun!0C!0;MfgI$)OX;VM^bd zrLe)dOEWkHn4R=tl{|sA2HmTwf`}g2S|(9Bn9>^jlrPV`ORSK2TP%|&e0k*|k}vYq znot6D((UnDjrnah^UudTg@xg9jP1ACwdSwh?J)Hz^D~DzIm;EmN}32$Q zU|@m)k1=SmUnXR~2R%HR-Kz2BCYJs(mX!UnQqHH^&e6+coNvrqkDSfpe{S(Y@rL(rr>j=kRowbHWB$RMNgQ;dvidvC>Gm74y zr+&+;&x!UXu9)m(g1>!G*m&4X2TrmSc39TeMeLIxgTK%UJ;^=?q}Aw*>)bn(Va?CP z!%x03B51pQ%QQ=ircVNW`JfytYAKP|fW#@bwxoVXLa!_-W*@PD;tt6=+4OqmCuvxj z@J}V_=GCE=6hG=5wFUR7YnDKtEDMGh!r)nwo?oHZyL_YGF}b+JA{Zx21@=uV`^p!k z2v9g+vJ6`5Lm#f?axjaAw&!p;6I99P zt8*QC9uOI^6>3?@Km~Th8c;#@i&rax{a{d}h(zj5>1LU6r*MYfZOZN)%var+F^a&i zvan4S)KA7`5ZQH7hi-~OFXCz{q|^x|8hg^^Z;XqEBAO_6dzi%^`F+$l+rA!^={v+BEq~W#UV%Lc|K$OKBrMsBeY# z9}0Gr2!hB~dHy+fSY?2qv{fxg8}|wygtEr{N5KaftO@>OQ?*6|K?`e!kd}Mh3esk; zzu@O}O-NgcgAa1p;DexZB79KyhQLcLli`J zTiOtHl?@-nxQz^*%Ldw@r2y$|Lxv71_Mm{u2IXu^LGT6-J_@7)^3N;ZzkuhD8SBo= z<(%{5zxr4D4szNNg|H`r_@I#;Wat{fe@x*)%t9y#%5qT9STM*StX&G|GLiq1AhPJ| zzcLUar;GoQ`*(36?$#j0x#S;$222Sd#xciUSI0Blkb_0}v5( zc7O)Cwr2<_=JA&VdQkthw?CwSp8ot-syZZr*iS&ne&9bO+Fx6eBMRuP_`kNKAe_IF zFGQLK{gE1vs39)aAjD$o9|Aq?-{J8cQ$S~C{GnZsA!WRvxXk~4>OwRBNTlBoK*Pr` zBWEu9Z<(s-uS|G?|6jN0fA%*1-k_JMIV}073+mso2A)trBX&S&vcJ>>gsSTTRd Date: Thu, 28 Dec 2023 04:06:41 -0500 Subject: [PATCH 301/792] chore(build): update base-requirements + add script for regeneration (#9524) --- .../base-requirements.txt | 317 +++++++++--------- .../regenerate-base-requirements.sh | 37 ++ 2 files changed, 195 insertions(+), 159 deletions(-) create mode 100755 docker/datahub-ingestion-base/regenerate-base-requirements.sh diff --git a/docker/datahub-ingestion-base/base-requirements.txt b/docker/datahub-ingestion-base/base-requirements.txt index 141382466ab9f..9092875902794 100644 --- a/docker/datahub-ingestion-base/base-requirements.txt +++ b/docker/datahub-ingestion-base/base-requirements.txt @@ -1,149 +1,147 @@ -# Excluded for slim -# pyspark==3.0.3 -# pydeequ==1.0.1 - +# Generated requirements file. Run ./regenerate-base-requirements.sh to regenerate. acryl-datahub-classify==0.0.8 -acryl-PyHive==0.6.14 -acryl-sqlglot==18.5.2.dev45 +acryl-PyHive==0.6.16 +acryl-sqlglot==20.4.1.dev14 aenum==3.1.15 -aiohttp==3.8.6 +aiohttp==3.9.1 aiosignal==1.3.1 -alembic==1.12.0 +alembic==1.13.1 altair==4.2.0 +annotated-types==0.6.0 anyio==3.7.1 -apache-airflow==2.7.2 -apache-airflow-providers-common-sql==1.7.2 -apache-airflow-providers-ftp==3.5.2 -apache-airflow-providers-http==4.5.2 -apache-airflow-providers-imap==3.3.2 -apache-airflow-providers-sqlite==3.4.3 -apispec==6.3.0 +apache-airflow==2.7.3 +apache-airflow-providers-common-sql==1.9.0 +apache-airflow-providers-ftp==3.7.0 +apache-airflow-providers-http==4.8.0 +apache-airflow-providers-imap==3.5.0 +apache-airflow-providers-sqlite==3.6.0 +apispec==6.3.1 appdirs==1.4.4 appnope==0.1.3 -argcomplete==3.1.2 +argcomplete==3.2.1 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 asgiref==3.7.2 asn1crypto==1.5.1 -asttokens==2.4.0 +asttokens==2.4.1 async-timeout==4.0.3 -asynch==0.2.2 +asynch==0.2.3 attrs==23.1.0 -avro==1.10.2 +avro==1.11.3 avro-gen3==0.7.11 -Babel==2.13.0 -backcall==0.2.0 +Babel==2.14.0 backoff==2.2.1 beautifulsoup4==4.12.2 bleach==6.1.0 -blinker==1.6.3 +blinker==1.7.0 blis==0.7.11 -boto3==1.28.62 -botocore==1.31.62 +boto3==1.34.8 +botocore==1.34.8 bowler==0.9.0 bracex==2.4 cached-property==1.5.2 cachelib==0.9.0 -cachetools==5.3.1 +cachetools==5.3.2 catalogue==2.0.10 -cattrs==23.1.2 -certifi==2023.7.22 +cattrs==23.2.3 +certifi==2023.11.17 cffi==1.16.0 chardet==5.2.0 -charset-normalizer==3.3.0 -ciso8601==2.3.0 +charset-normalizer==3.3.2 +ciso8601==2.3.1 click==8.1.7 click-default-group==1.2.4 click-spinner==0.1.10 clickclick==20.10.2 -clickhouse-cityhash==1.0.2.4 clickhouse-driver==0.2.6 clickhouse-sqlalchemy==0.2.4 -cloudpickle==2.2.1 +cloudpickle==3.0.0 colorama==0.4.6 colorlog==4.8.0 -comm==0.1.4 -confection==0.1.3 -ConfigUpdater==3.1.1 +comm==0.2.0 +confection==0.1.4 +ConfigUpdater==3.2 confluent-kafka==2.3.0 connexion==2.14.2 cron-descriptor==1.4.0 croniter==2.0.1 -cryptography==41.0.4 +cryptography==41.0.7 cx-Oracle==8.3.0 cymem==2.0.8 -dask==2023.9.3 +dask==2023.12.1 databricks-cli==0.18.0 databricks-dbapi==0.6.0 -databricks-sdk==0.10.0 +databricks-sdk==0.15.0 +databricks-sql-connector==2.9.3 debugpy==1.8.0 decorator==5.1.1 defusedxml==0.7.1 -deltalake==0.11.0 +deltalake==0.14.0 Deprecated==1.2.14 dill==0.3.7 dnspython==2.4.2 -docker==6.1.3 +docker==7.0.0 docutils==0.20.1 ecdsa==0.18.0 elasticsearch==7.13.4 email-validator==1.3.1 entrypoints==0.4 et-xmlfile==1.1.0 -exceptiongroup==1.1.3 -executing==2.0.0 -expandvars==0.11.0 -fastapi==0.103.2 -fastavro==1.8.4 -fastjsonschema==2.18.1 +exceptiongroup==1.2.0 +executing==2.0.1 +expandvars==0.12.0 +fastapi==0.108.0 +fastavro==1.9.2 +fastjsonschema==2.19.0 feast==0.31.1 -filelock==3.12.4 +filelock==3.13.1 fissix==21.11.13 Flask==2.2.5 flatdict==4.0.1 -frozenlist==1.4.0 -fsspec==2023.9.2 +frozenlist==1.4.1 +fsspec==2023.12.2 future==0.18.3 -GeoAlchemy2==0.14.1 -gitdb==4.0.10 -GitPython==3.1.37 -google-api-core==2.12.0 -google-auth==2.23.3 -google-cloud-appengine-logging==1.3.2 +GeoAlchemy2==0.14.3 +gitdb==4.0.11 +GitPython==3.1.40 +google-api-core==2.15.0 +google-auth==2.25.2 +google-cloud-appengine-logging==1.4.0 google-cloud-audit-log==0.2.5 -google-cloud-bigquery==3.12.0 -google-cloud-core==2.3.3 +google-cloud-bigquery==3.14.1 +google-cloud-core==2.4.1 google-cloud-datacatalog-lineage==0.2.2 google-cloud-logging==3.5.0 google-crc32c==1.5.0 google-re2==1.1 -google-resumable-media==2.6.0 -googleapis-common-protos==1.60.0 +google-resumable-media==2.7.0 +googleapis-common-protos==1.62.0 gql==3.4.1 graphql-core==3.2.3 graphviz==0.20.1 great-expectations==0.15.50 -greenlet==3.0.0 -grpc-google-iam-v1==0.12.6 -grpcio==1.59.0 -grpcio-reflection==1.59.0 -grpcio-status==1.59.0 -grpcio-tools==1.59.0 +greenlet==3.0.3 +grpc-google-iam-v1==0.13.0 +grpcio==1.60.0 +grpcio-reflection==1.60.0 +grpcio-status==1.60.0 +grpcio-tools==1.60.0 gssapi==1.8.3 gunicorn==21.2.0 h11==0.14.0 -httpcore==0.18.0 -httptools==0.6.0 -httpx==0.25.0 +hdbcli==2.19.20 +httpcore==1.0.2 +httptools==0.6.1 +httpx==0.26.0 humanfriendly==10.0 -idna==3.4 +idna==3.6 ijson==3.2.3 -importlib-metadata==6.8.0 -importlib-resources==6.1.0 +importlib-metadata==6.11.0 +importlib-resources==6.1.1 inflection==0.5.1 ipaddress==1.0.23 ipykernel==6.17.1 -ipython==8.16.1 +ipython==8.19.0 ipython-genutils==0.2.0 ipywidgets==8.1.1 iso3166==2.1.1 @@ -152,34 +150,34 @@ itsdangerous==2.1.2 jedi==0.19.1 Jinja2==3.1.2 jmespath==1.0.1 -JPype1==1.4.1 +JPype1==1.5.0 jsonlines==4.0.0 jsonpatch==1.33 jsonpointer==2.4 jsonref==1.1.0 -jsonschema==4.19.1 -jsonschema-specifications==2023.7.1 +jsonschema==4.20.0 +jsonschema-specifications==2023.12.1 jupyter-server==1.24.0 jupyter_client==7.4.9 jupyter_core==4.12.0 -jupyterlab-pygments==0.2.2 jupyterlab-widgets==3.0.9 +jupyterlab_pygments==0.3.0 langcodes==3.3.0 lark==1.1.4 -lazy-object-proxy==1.9.0 +lazy-object-proxy==1.10.0 leb128==1.0.5 -limits==3.6.0 +limits==3.7.0 linear-tsv==1.1.0 linkify-it-py==2.0.2 -lkml==1.3.1 +lkml==1.3.3 locket==1.0.0 lockfile==0.12.2 looker-sdk==23.0.0 -lxml==4.9.3 +lxml==4.9.4 lz4==4.3.2 -makefun==1.15.1 -Mako==1.2.4 -Markdown==3.5 +makefun==1.15.2 +Mako==1.3.0 +Markdown==3.5.1 markdown-it-py==3.0.0 MarkupSafe==2.1.3 marshmallow==3.20.1 @@ -190,26 +188,26 @@ mdit-py-plugins==0.4.0 mdurl==0.1.2 mistune==3.0.2 mixpanel==4.10.0 -mlflow-skinny==2.7.1 +mlflow-skinny==2.9.2 mmh3==4.0.1 mmhash3==3.0.1 more-itertools==10.1.0 moreorless==0.4.0 -moto==4.2.5 +moto==4.2.12 msal==1.22.0 multidict==6.0.4 murmurhash==1.0.10 -mypy==1.6.0 +mypy==1.8.0 mypy-extensions==1.0.0 nbclassic==1.0.0 nbclient==0.6.3 -nbconvert==7.9.2 +nbconvert==7.13.1 nbformat==5.9.1 nest-asyncio==1.5.8 -networkx==3.1 +networkx==3.2.1 notebook==6.5.6 notebook_shim==0.2.3 -numpy==1.26.0 +numpy==1.26.2 oauthlib==3.2.2 okta==1.7.0 openlineage-airflow==1.2.0 @@ -217,110 +215,107 @@ openlineage-integration-common==1.2.0 openlineage-python==1.2.0 openlineage_sql==1.2.0 openpyxl==3.1.2 -opentelemetry-api==1.20.0 -opentelemetry-exporter-otlp==1.20.0 -opentelemetry-exporter-otlp-proto-common==1.20.0 -opentelemetry-exporter-otlp-proto-grpc==1.20.0 -opentelemetry-exporter-otlp-proto-http==1.20.0 -opentelemetry-proto==1.20.0 -opentelemetry-sdk==1.20.0 -opentelemetry-semantic-conventions==0.41b0 +opentelemetry-api==1.22.0 +opentelemetry-exporter-otlp==1.22.0 +opentelemetry-exporter-otlp-proto-common==1.22.0 +opentelemetry-exporter-otlp-proto-grpc==1.22.0 +opentelemetry-exporter-otlp-proto-http==1.22.0 +opentelemetry-proto==1.22.0 +opentelemetry-sdk==1.22.0 +opentelemetry-semantic-conventions==0.43b0 ordered-set==4.1.0 -oscrypto==1.3.0 packaging==23.2 pandas==1.5.3 pandavro==1.5.2 pandocfilters==1.5.0 -parse==1.19.1 +parse==1.20.0 parso==0.8.3 partd==1.4.1 -pathspec==0.11.2 -pathy==0.10.2 +pathspec==0.12.1 +pathy==0.10.3 pendulum==2.1.2 -pexpect==4.8.0 +pexpect==4.9.0 phonenumbers==8.13.0 -pickleshare==0.7.5 platformdirs==3.11.0 pluggy==1.3.0 preshed==3.0.9 prison==0.2.1 -progressbar2==4.2.0 -prometheus-client==0.17.1 -prompt-toolkit==3.0.39 -proto-plus==1.22.3 -protobuf==4.24.4 -psutil==5.9.5 +progressbar2==4.3.2 +prometheus-client==0.19.0 +prompt-toolkit==3.0.43 +proto-plus==1.23.0 +protobuf==4.25.1 +psutil==5.9.7 psycopg2-binary==2.9.9 ptyprocess==0.7.0 pure-eval==0.2.2 pure-sasl==0.6.2 -py-partiql-parser==0.3.7 +py-partiql-parser==0.5.0 pyarrow==11.0.0 -pyasn1==0.5.0 +pyasn1==0.5.1 pyasn1-modules==0.3.0 -pyathena==2.4.1 -pycountry==22.3.5 +pyathena==2.25.2 +pycountry==23.12.11 pycparser==2.21 pycryptodome==3.19.0 -pycryptodomex==3.19.0 pydantic==1.10.13 +pydantic_core==2.14.6 pydash==7.0.6 -pydruid==0.6.5 -Pygments==2.16.1 +pydruid==0.6.6 +Pygments==2.17.2 pyiceberg==0.4.0 -pymongo==4.5.0 +pymongo==4.6.1 PyMySQL==1.1.0 -pyOpenSSL==23.2.0 +pyOpenSSL==23.3.0 pyparsing==3.0.9 pyspnego==0.10.2 python-daemon==3.0.1 python-dateutil==2.8.2 python-dotenv==1.0.0 python-jose==3.3.0 -python-ldap==3.4.3 +python-ldap==3.4.4 python-nvd3==0.15.0 python-slugify==8.0.1 python-stdnum==1.19 -python-tds==1.13.0 +python-tds==1.14.0 python-utils==3.8.1 python3-openid==3.2.0 pytz==2023.3.post1 pytzdata==2020.1 PyYAML==6.0.1 pyzmq==24.0.1 -ratelimiter==1.2.0.post0 redash-toolbelt==0.1.9 -redshift-connector==2.0.914 -referencing==0.30.2 -regex==2023.10.3 +redshift-connector==2.0.918 +referencing==0.32.0 +regex==2023.12.25 requests==2.31.0 requests-file==1.5.1 requests-gssapi==1.2.3 requests-ntlm==1.2.0 requests-toolbelt==0.10.1 -responses==0.23.3 +responses==0.24.1 rfc3339-validator==0.1.4 rfc3986==2.0.0 -rich==13.6.0 -rich-argparse==1.3.0 -rpds-py==0.10.6 +rich==13.7.0 +rich-argparse==1.4.0 +rpds-py==0.15.2 rsa==4.9 ruamel.yaml==0.17.17 ruamel.yaml.clib==0.2.8 -s3transfer==0.7.0 -schwifty==2023.9.0 -scipy==1.11.3 +s3transfer==0.10.0 +schwifty==2023.11.2 +scipy==1.11.4 scramp==1.4.4 Send2Trash==1.8.2 -sentry-sdk==1.32.0 +sentry-sdk==1.39.1 setproctitle==1.3.3 simple-salesforce==1.12.5 six==1.16.0 smart-open==6.4.0 smmap==5.0.1 sniffio==1.3.0 -snowflake-connector-python==3.2.1 -snowflake-sqlalchemy==1.5.0 +snowflake-connector-python==3.6.0 +snowflake-sqlalchemy==1.5.1 sortedcontainers==2.4.0 soupsieve==2.5 spacy==3.4.3 @@ -328,67 +323,71 @@ spacy-legacy==3.0.12 spacy-loggers==1.0.5 sql-metadata==2.2.2 SQLAlchemy==1.4.44 -sqlalchemy-bigquery==1.8.0 -SQLAlchemy-JSONField==1.0.1.post0 +sqlalchemy-bigquery==1.9.0 +sqlalchemy-hana==1.1.1 +SQLAlchemy-JSONField==1.0.2 sqlalchemy-pytds==0.3.5 sqlalchemy-redshift==0.8.14 SQLAlchemy-Utils==0.41.1 -sqlalchemy2-stubs==0.0.2a35 +sqlalchemy2-stubs==0.0.2a37 sqllineage==1.3.8 sqlparse==0.4.4 srsly==2.4.8 stack-data==0.6.3 -starlette==0.27.0 +starlette==0.32.0.post1 strictyaml==1.7.3 tableauserverclient==0.25 tableschema==1.20.2 tabulate==0.9.0 tabulator==1.53.5 tenacity==8.2.3 -termcolor==2.3.0 -terminado==0.17.1 +teradatasql==20.0.0.2 +teradatasqlalchemy==17.20.0.0 +termcolor==2.4.0 +terminado==0.18.0 text-unidecode==1.3 thinc==8.1.12 -thrift==0.13.0 +thrift==0.16.0 thrift-sasl==0.4.3 tinycss2==1.2.1 toml==0.10.2 tomli==2.0.1 -tomlkit==0.12.1 +tomlkit==0.12.3 toolz==0.12.0 -tornado==6.3.3 +tornado==6.4 tqdm==4.66.1 traitlets==5.2.1.post0 trino==0.327.0 typeguard==2.13.3 typer==0.7.0 -types-PyYAML==6.0.12.12 typing-inspect==0.9.0 -typing_extensions==4.8.0 -tzlocal==5.1 +typing_extensions==4.9.0 +tzlocal==5.2 uc-micro-py==1.0.2 -ujson==5.8.0 +ujson==5.9.0 unicodecsv==0.14.1 -urllib3==1.26.17 -uvicorn==0.23.2 -uvloop==0.17.0 -vertica-python==1.3.5 -vertica-sqlalchemy-dialect==0.0.8 +universal-pathlib==0.1.4 +urllib3==1.26.18 +uvicorn==0.25.0 +uvloop==0.19.0 +vertica-python==1.3.8 +vertica-sqlalchemy-dialect==0.0.8.1 vininfo==1.7.0 volatile==2.1.0 wasabi==0.10.1 -watchfiles==0.20.0 +watchfiles==0.21.0 wcmatch==8.5 -wcwidth==0.2.8 +wcwidth==0.2.12 webencodings==0.5.1 -websocket-client==1.6.4 -websockets==11.0.3 +websocket-client==1.7.0 +websockets==12.0 Werkzeug==2.2.3 widgetsnbextension==4.0.9 -wrapt==1.15.0 -WTForms==3.1.0 +wrapt==1.16.0 +WTForms==3.0.1 xlrd==2.0.1 xmltodict==0.13.0 -yarl==1.9.2 +yarl==1.9.4 zeep==4.2.1 -zstd==1.5.5.1 \ No newline at end of file +zipp==3.17.0 +zstd==1.5.5.1 diff --git a/docker/datahub-ingestion-base/regenerate-base-requirements.sh b/docker/datahub-ingestion-base/regenerate-base-requirements.sh new file mode 100755 index 0000000000000..6fb331afa484a --- /dev/null +++ b/docker/datahub-ingestion-base/regenerate-base-requirements.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# This script is used to regenerate the base-requirements.txt file + +set -euxo pipefail +cd "$( dirname "${BASH_SOURCE[0]}" )" + +SCRIPT_NAME=$(basename "$0") +DATAHUB_DIR=$(pwd)/../.. + +# Create a virtualenv. +VENV_DIR=$(mktemp -d) +python -c "import sys; assert sys.version_info >= (3, 9), 'Python 3.9 or higher is required.'" +python -m venv $VENV_DIR +source $VENV_DIR/bin/activate +pip install --upgrade pip setuptools wheel +echo "Using virtualenv at $VENV_DIR" + +# Install stuff. +pushd $DATAHUB_DIR/metadata-ingestion +pip install -e . +pip install -e '../metadata-ingestion-modules/airflow-plugin/[plugin-v2]' +pip install -e '.[all]' +popd + +# Generate the requirements file. +# Removing Flask deps due as per https://github.com/datahub-project/datahub/pull/6867/files +# Removing py4j and PyJWT due to https://github.com/datahub-project/datahub/pull/6868/files +# Removing pyspark and pydeequ because we don't want them in the slim image, so they can be added separately. +# TODO: It's unclear if these removals are still actually needed. +echo "# Generated requirements file. Run ./$SCRIPT_NAME to regenerate." > base-requirements.txt +pip freeze \ + | grep -v -E "^-e" \ + | grep -v "Flask-" \ + | grep -v -E "(py4j|PyJWT)==" \ + | grep -v -E "(pyspark|pydeequ)==" \ + >> base-requirements.txt From 4efa46f8c91dfdedc21b7081143d196c7a0be0da Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Thu, 28 Dec 2023 15:05:14 +0530 Subject: [PATCH 302/792] test(cypress/users): add automatic reset password test (#9515) --- .../src/app/identity/user/UserListItem.tsx | 17 ++- .../app/identity/user/ViewResetTokenModal.tsx | 7 +- .../cypress/e2e/mutations/add_users.js | 135 +++++++++++++----- 3 files changed, 114 insertions(+), 45 deletions(-) diff --git a/datahub-web-react/src/app/identity/user/UserListItem.tsx b/datahub-web-react/src/app/identity/user/UserListItem.tsx index 69b8a6c2d1355..8ad3d7d93d657 100644 --- a/datahub-web-react/src/app/identity/user/UserListItem.tsx +++ b/datahub-web-react/src/app/identity/user/UserListItem.tsx @@ -98,8 +98,8 @@ export default function UserListItem({ user, canManageUserCredentials, selectRol
{displayName}
-
- {user.username} +
+ {user.username}
{userStatus && ( @@ -121,8 +121,12 @@ export default function UserListItem({ user, canManageUserCredentials, selectRol trigger={['click']} overlay={ - setIsViewingResetToken(true)}> -   Reset user password + setIsViewingResetToken(true)} + data-testid="reset-menu-item" + > +   Reset user password  Delete @@ -130,7 +134,10 @@ export default function UserListItem({ user, canManageUserCredentials, selectRol } > - + Generate a new reset link! Note, any old links will cease to be active. - + diff --git a/smoke-test/tests/cypress/cypress/e2e/mutations/add_users.js b/smoke-test/tests/cypress/cypress/e2e/mutations/add_users.js index e19c6065d4274..ba225ba37884b 100644 --- a/smoke-test/tests/cypress/cypress/e2e/mutations/add_users.js +++ b/smoke-test/tests/cypress/cypress/e2e/mutations/add_users.js @@ -1,47 +1,104 @@ const tryToSignUp = () => { - let number = Math.floor(Math.random() * 100000); - let name = `Example Name ${number}`; - cy.enterTextInTestId("email", `example${number}@example.com`); - cy.enterTextInTestId("name", name); - cy.enterTextInTestId("password", "Example password"); - cy.enterTextInTestId("confirmPassword", "Example password"); - - cy.mouseover("#title").click(); - cy.waitTextVisible("Other").click(); - - cy.get("[type=submit]").click(); - return name; + let number = Math.floor(Math.random() * 100000); + let name = `Example Name ${number}`; + let email = `example${number}@example.com`; + cy.enterTextInTestId("email", email); + cy.enterTextInTestId("name", name); + cy.enterTextInTestId("password", "Example password"); + cy.enterTextInTestId("confirmPassword", "Example password"); + + cy.mouseover("#title").click(); + cy.waitTextVisible("Other").click(); + + cy.get("[type=submit]").click(); + return { name, email }; }; describe("add_user", () => { - it("go to user link and invite a user", () => { - cy.login(); + let registeredEmail = ""; + it("go to user link and invite a user", () => { + cy.login(); + + cy.visit("/settings/identities/users"); + cy.waitTextVisible("Invite Users"); + cy.clickOptionWithText("Invite Users"); + + cy.waitTextVisible(/signup\?invite_token=\w{32}/) + .then(($elem) => { + const inviteLink = $elem.text(); + cy.log(inviteLink); cy.visit("/settings/identities/users"); - cy.waitTextVisible("Invite Users"); - - cy.clickOptionWithText("Invite Users"); - - cy.waitTextVisible(/signup\?invite_token=\w{32}/).then(($elem) => { - const inviteLink = $elem.text(); - cy.log(inviteLink); - cy.visit("/settings/identities/users"); - cy.logout(); - cy.visit(inviteLink); - let name = tryToSignUp(); - cy.waitTextVisible("Welcome to DataHub"); - cy.hideOnboardingTour(); - cy.waitTextVisible(name); - }).then(() => { - cy.logout(); - cy.visit("/signup?invite_token=bad_token"); - tryToSignUp(); - cy.waitTextVisible("Failed to log in! An unexpected error occurred."); - }); + cy.logout(); + cy.visit(inviteLink); + const { name, email } = tryToSignUp(); + registeredEmail = email; + cy.waitTextVisible("Welcome to DataHub"); + cy.hideOnboardingTour(); + cy.waitTextVisible(name); + }) + .then(() => { + cy.logout(); + cy.visit("/signup?invite_token=bad_token"); + tryToSignUp(); + cy.waitTextVisible("Failed to log in! An unexpected error occurred."); + }); + }); + + it("Verify you can’t generate a reset password link for a non-native user", () => { + cy.login(); + cy.visit("/settings/identities/users"); + cy.waitTextVisible("Invite Users"); + cy.get("[data-testid=userItem-non-native]").first().click(); + cy.get('[data-testid="reset-menu-item"]').should( + "have.attr", + "aria-disabled", + "true" + ); + }); + + it("Generate a reset password link for a native user", () => { + cy.login(); + cy.visit("/settings/identities/users"); + cy.waitTextVisible("Invite Users"); + cy.get(`[data-testid="email-native"]`) + .contains(registeredEmail) + .should("exist") + .parents(".ant-list-item") + .find('[data-testid="userItem-native"]') + .should("be.visible") + .click(); + + cy.get("[data-testid=resetButton]").first().click(); + cy.get("[data-testid=refreshButton]").click(); + cy.waitTextVisible("Generated new link to reset credentials"); + + cy.window().then((win) => { + cy.stub(win, "prompt"); }); -}); + cy.get(".ant-typography-copy").should("be.visible").click(); + cy.get(".ant-modal-close").should("be.visible").click(); -// Verify you can’t generate a reset password link for a non-native user (root, for example) -// Generate a reset password link for a native user -// Log out, then verify that using a bad reset token in the URL doesn’t allow you to reset password -// Use the correct reset link to reset native user credentials \ No newline at end of file + cy.waitTextVisible(/reset\?reset_token=\w{32}/) + .then(($elem) => { + const inviteLink = $elem.text(); + cy.logout(); + cy.visit(inviteLink); + cy.enterTextInTestId("email", registeredEmail); + cy.enterTextInTestId("password", "Example Reset Password"); + cy.enterTextInTestId("confirmPassword", "Example Reset Password"); + cy.get("[type=submit]").click(); + cy.waitTextVisible("Welcome back"); + cy.hideOnboardingTour(); + }) + .then(() => { + cy.logout(); + cy.visit("/reset?reset_token=bad_token"); + cy.enterTextInTestId("email", registeredEmail); + cy.enterTextInTestId("password", "Example Reset Password"); + cy.enterTextInTestId("confirmPassword", "Example Reset Password"); + cy.get("[type=submit]").click(); + cy.waitTextVisible("Failed to log in!"); + }); + }); +}); From 3635c1c2213cfb8421d89b7cc106ab236d72c7ec Mon Sep 17 00:00:00 2001 From: Shubham Jagtap <132359390+shubhamjagtap639@users.noreply.github.com> Date: Thu, 28 Dec 2023 15:24:26 +0530 Subject: [PATCH 303/792] feat(ingestion/bigquery): Use sqlglot_lineage for usage and add more perf timers (#9247) Co-authored-by: Andrew Sikowitz --- metadata-ingestion/setup.py | 2 - .../ingestion/source/bigquery_v2/bigquery.py | 22 +- .../source/bigquery_v2/bigquery_audit.py | 16 +- .../source/bigquery_v2/bigquery_config.py | 5 + .../source/bigquery_v2/bigquery_report.py | 12 +- .../ingestion/source/bigquery_v2/usage.py | 86 ++--- .../datahub/utilities/bigquery_sql_parser.py | 92 ----- .../src/datahub/utilities/sqlglot_lineage.py | 8 +- .../bigquery/test_bigquery_usage.py | 8 +- .../tests/unit/test_bigquery_sql_lineage.py | 66 +++- .../tests/unit/test_bigquery_sql_parser.py | 327 ------------------ .../tests/unit/test_bigquery_usage.py | 14 +- .../unit/test_bigqueryv2_usage_source.py | 6 +- 13 files changed, 159 insertions(+), 505 deletions(-) delete mode 100644 metadata-ingestion/src/datahub/utilities/bigquery_sql_parser.py delete mode 100644 metadata-ingestion/tests/unit/test_bigquery_sql_parser.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 32d49ffc73fa3..8e4791e253c7c 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -295,8 +295,6 @@ "bigquery": sql_common | bigquery_common | { - # TODO: I doubt we need all three sql parsing libraries. - *sqllineage_lib, *sqlglot_lib, "sqlalchemy-bigquery>=1.4.1", "google-cloud-datacatalog-lineage==0.2.2", diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py index 9813945683289..3704eae96aece 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py @@ -221,6 +221,7 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): self.bigquery_data_dictionary = BigQuerySchemaApi( self.report.schema_api_perf, self.config.get_bigquery_client() ) + self.sql_parser_schema_resolver = self._init_schema_resolver() redundant_lineage_run_skip_handler: Optional[ RedundantLineageRunSkipHandler @@ -253,6 +254,7 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): self.usage_extractor = BigQueryUsageExtractor( config, self.report, + schema_resolver=self.sql_parser_schema_resolver, dataset_urn_builder=self.gen_dataset_urn_from_ref, redundant_run_skip_handler=redundant_usage_run_skip_handler, ) @@ -283,8 +285,6 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): # Maps view ref -> actual sql self.view_definitions: FileBackedDict[str] = FileBackedDict() - self.sql_parser_schema_resolver = self._init_schema_resolver() - self.add_config_to_report() atexit.register(cleanup, config) @@ -371,7 +371,10 @@ def usage_capability_test( report: BigQueryV2Report, ) -> CapabilityReport: usage_extractor = BigQueryUsageExtractor( - connection_conf, report, lambda ref: "" + connection_conf, + report, + schema_resolver=SchemaResolver(platform="bigquery"), + dataset_urn_builder=lambda ref: "", ) for project_id in project_ids: try: @@ -447,7 +450,9 @@ def _init_schema_resolver(self) -> SchemaResolver: self.config.lineage_parse_view_ddl or self.config.lineage_use_sql_parser ) schema_ingestion_enabled = ( - self.config.include_views and self.config.include_tables + self.config.include_schema_metadata + and self.config.include_tables + and self.config.include_views ) if schema_resolution_required and not schema_ingestion_enabled: @@ -545,10 +550,11 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: if not projects: return - for project_id in projects: - self.report.set_ingestion_stage(project_id.id, METADATA_EXTRACTION) - logger.info(f"Processing project: {project_id.id}") - yield from self._process_project(project_id) + if self.config.include_schema_metadata: + for project_id in projects: + self.report.set_ingestion_stage(project_id.id, METADATA_EXTRACTION) + logger.info(f"Processing project: {project_id.id}") + yield from self._process_project(project_id) if self.config.include_usage_statistics: yield from self.usage_extractor.get_usage_workunits( diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_audit.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_audit.py index 55366d6c57cf8..8cef10ca23448 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_audit.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_audit.py @@ -12,6 +12,7 @@ get_first_missing_key, get_first_missing_key_any, ) +from datahub.utilities.urns.dataset_urn import DatasetUrn AuditLogEntry = Any @@ -178,6 +179,17 @@ def from_string_name(cls, ref: str) -> "BigQueryTableRef": raise ValueError(f"invalid BigQuery table reference: {ref}") return cls(BigqueryTableIdentifier(parts[1], parts[3], parts[5])) + @classmethod + def from_urn(cls, urn: str) -> "BigQueryTableRef": + """Raises: ValueError if urn is not a valid BigQuery table URN.""" + dataset_urn = DatasetUrn.create_from_string(urn) + split = dataset_urn.get_dataset_name().rsplit(".", 3) + if len(split) == 3: + project, dataset, table = split + else: + _, project, dataset, table = split + return cls(BigqueryTableIdentifier(project, dataset, table)) + def is_temporary_table(self, prefixes: List[str]) -> bool: for prefix in prefixes: if self.table_identifier.dataset.startswith(prefix): @@ -566,7 +578,7 @@ def from_query_event( query_event: QueryEvent, debug_include_full_payloads: bool = False, ) -> "ReadEvent": - readEvent = ReadEvent( + return ReadEvent( actor_email=query_event.actor_email, timestamp=query_event.timestamp, resource=read_resource, @@ -577,8 +589,6 @@ def from_query_event( from_query=True, ) - return readEvent - @classmethod def from_exported_bigquery_audit_metadata( cls, row: BigQueryAuditMetadata, debug_include_full_payloads: bool = False diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py index c13b08a6d9656..58f2a600c2ff7 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py @@ -94,6 +94,11 @@ class BigQueryV2Config( description="Regex patterns for project_id to filter in ingestion.", ) + include_schema_metadata: bool = Field( + default=True, + description="Whether to ingest the BigQuery schema, i.e. projects, schemas, tables, and views.", + ) + usage: BigQueryUsageConfig = Field( default=BigQueryUsageConfig(), description="Usage related configs" ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py index 9d92b011ee285..69913b383af87 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py @@ -33,6 +33,13 @@ class BigQueryAuditLogApiPerfReport(Report): list_log_entries: PerfTimer = field(default_factory=PerfTimer) +@dataclass +class BigQueryProcessingPerfReport(Report): + sql_parsing_sec: PerfTimer = field(default_factory=PerfTimer) + store_usage_event_sec: PerfTimer = field(default_factory=PerfTimer) + usage_state_size: Optional[str] = None + + @dataclass class BigQueryV2Report(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowReport): num_total_lineage_entries: TopKDict[str, int] = field(default_factory=TopKDict) @@ -120,8 +127,6 @@ class BigQueryV2Report(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowR read_reasons_stat: Counter[str] = field(default_factory=collections.Counter) operation_types_stat: Counter[str] = field(default_factory=collections.Counter) - usage_state_size: Optional[str] = None - exclude_empty_projects: Optional[bool] = None schema_api_perf: BigQuerySchemaApiPerfReport = field( @@ -130,6 +135,9 @@ class BigQueryV2Report(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowR audit_log_api_perf: BigQueryAuditLogApiPerfReport = field( default_factory=BigQueryAuditLogApiPerfReport ) + processing_perf: BigQueryProcessingPerfReport = field( + default_factory=BigQueryProcessingPerfReport + ) lineage_start_time: Optional[datetime] = None lineage_end_time: Optional[datetime] = None diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py index 65b559550ffc5..ccc64184f3346 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py @@ -35,7 +35,6 @@ AuditEvent, AuditLogEntry, BigQueryAuditMetadata, - BigqueryTableIdentifier, BigQueryTableRef, QueryEvent, ReadEvent, @@ -60,9 +59,9 @@ USAGE_EXTRACTION_USAGE_AGGREGATION, ) from datahub.metadata.schema_classes import OperationClass, OperationTypeClass -from datahub.utilities.bigquery_sql_parser import BigQuerySQLParser from datahub.utilities.file_backed_collections import ConnectionWrapper, FileBackedDict from datahub.utilities.perf_timer import PerfTimer +from datahub.utilities.sqlglot_lineage import SchemaResolver, sqlglot_lineage logger: logging.Logger = logging.getLogger(__name__) @@ -284,7 +283,7 @@ def delete_original_read_events_for_view_query_events(self) -> None: ) def report_disk_usage(self, report: BigQueryV2Report) -> None: - report.usage_state_size = str( + report.processing_perf.usage_state_size = str( { "main": humanfriendly.format_size(os.path.getsize(self.conn.filename)), "queries": humanfriendly.format_size( @@ -310,11 +309,14 @@ def __init__( self, config: BigQueryV2Config, report: BigQueryV2Report, + *, + schema_resolver: SchemaResolver, dataset_urn_builder: Callable[[BigQueryTableRef], str], redundant_run_skip_handler: Optional[RedundantUsageRunSkipHandler] = None, ): self.config: BigQueryV2Config = config self.report: BigQueryV2Report = report + self.schema_resolver = schema_resolver self.dataset_urn_builder = dataset_urn_builder # Replace hash of query with uuid if there are hash conflicts self.uuid_to_query: Dict[str, str] = {} @@ -415,10 +417,11 @@ def generate_read_events_from_query( ) -> Iterable[AuditEvent]: try: tables = self.get_tables_from_query( - query_event_on_view.project_id, query_event_on_view.query, + default_project=query_event_on_view.project_id, + default_dataset=query_event_on_view.default_dataset, ) - assert tables is not None and len(tables) != 0 + assert len(tables) != 0 for table in tables: yield AuditEvent.create( ReadEvent.from_query_event(table, query_event_on_view) @@ -462,12 +465,15 @@ def _ingest_events( self.report.num_view_query_events += 1 for new_event in self.generate_read_events_from_query(query_event): - num_generated += self._store_usage_event( - new_event, usage_state, table_refs - ) - num_aggregated += self._store_usage_event( - audit_event, usage_state, table_refs - ) + with self.report.processing_perf.store_usage_event_sec: + num_generated += self._store_usage_event( + new_event, usage_state, table_refs + ) + with self.report.processing_perf.store_usage_event_sec: + num_aggregated += self._store_usage_event( + audit_event, usage_state, table_refs + ) + except Exception as e: logger.warning( f"Unable to store usage event {audit_event}", exc_info=True @@ -905,54 +911,38 @@ def _generate_filter(self, corrected_start_time, corrected_end_time): ) def get_tables_from_query( - self, default_project: str, query: str - ) -> Optional[List[BigQueryTableRef]]: + self, query: str, default_project: str, default_dataset: Optional[str] = None + ) -> List[BigQueryTableRef]: """ This method attempts to parse bigquery objects read in the query """ if not query: - return None + return [] - parsed_tables = set() try: - parser = BigQuerySQLParser( - query, - self.config.sql_parser_use_external_process, - use_raw_names=self.config.lineage_sql_parser_use_raw_names, - ) - tables = parser.get_tables() - except Exception as ex: + with self.report.processing_perf.sql_parsing_sec: + result = sqlglot_lineage( + query, + self.schema_resolver, + default_db=default_project, + default_schema=default_dataset, + ) + except Exception: logger.debug( - f"Sql parsing failed on this query on view: {query}. " - f"Usage won't be added. The error was {ex}." + f"Sql parsing failed on this query on view: {query}. Usage won't be added." ) - return None + logger.debug(result.debug_info) + return [] - for table in tables: - parts = table.split(".") - if len(parts) == 2: - parsed_tables.add( - BigQueryTableRef( - BigqueryTableIdentifier( - project_id=default_project, dataset=parts[0], table=parts[1] - ) - ).get_sanitized_table_ref() - ) - elif len(parts) == 3: - parsed_tables.add( - BigQueryTableRef( - BigqueryTableIdentifier( - project_id=parts[0], dataset=parts[1], table=parts[2] - ) - ).get_sanitized_table_ref() - ) - else: - logger.debug( - f"Invalid table identifier {table} when parsing query on view {query}" - ) + parsed_table_refs = [] + for urn in result.in_tables: + try: + parsed_table_refs.append(BigQueryTableRef.from_urn(urn)) + except ValueError: + logger.debug(f"Invalid urn {urn} when parsing query on view {query}") self.report.num_view_query_events_failed_table_identification += 1 - return list(parsed_tables) + return parsed_table_refs def _report_error( self, label: str, e: Exception, group: Optional[str] = None diff --git a/metadata-ingestion/src/datahub/utilities/bigquery_sql_parser.py b/metadata-ingestion/src/datahub/utilities/bigquery_sql_parser.py deleted file mode 100644 index 4ad41f1fe23c9..0000000000000 --- a/metadata-ingestion/src/datahub/utilities/bigquery_sql_parser.py +++ /dev/null @@ -1,92 +0,0 @@ -import re -from typing import List - -import sqlparse - -from datahub.utilities.sql_parser import SqlLineageSQLParser, SQLParser - - -class BigQuerySQLParser(SQLParser): - parser: SQLParser - - def __init__( - self, - sql_query: str, - use_external_process: bool = False, - use_raw_names: bool = False, - ) -> None: - super().__init__(sql_query) - - self._parsed_sql_query = self.parse_sql_query(sql_query) - self.parser = SqlLineageSQLParser( - self._parsed_sql_query, use_external_process, use_raw_names - ) - - def parse_sql_query(self, sql_query: str) -> str: - sql_query = BigQuerySQLParser._parse_bigquery_comment_sign(sql_query) - sql_query = BigQuerySQLParser._escape_keyword_from_as_field_name(sql_query) - sql_query = BigQuerySQLParser._escape_cte_name_after_keyword_with(sql_query) - - sql_query = sqlparse.format( - sql_query.strip(), - reindent_aligned=True, - strip_comments=True, - ) - - sql_query = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - sql_query - ) - sql_query = BigQuerySQLParser._escape_object_name_after_keyword_from(sql_query) - sql_query = BigQuerySQLParser._remove_comma_before_from(sql_query) - - return sql_query - - @staticmethod - def _parse_bigquery_comment_sign(sql_query: str) -> str: - return re.sub(r"#(.*)", r"-- \1", sql_query, flags=re.IGNORECASE) - - @staticmethod - def _escape_keyword_from_as_field_name(sql_query: str) -> str: - return re.sub(r"(\w*\.from)", r"`\1`", sql_query, flags=re.IGNORECASE) - - @staticmethod - def _escape_cte_name_after_keyword_with(sql_query: str) -> str: - """ - Escape the first cte name in case it is one of reserved words - """ - return re.sub(r"(with\s)([^`\s()]+)", r"\1`\2`", sql_query, flags=re.IGNORECASE) - - @staticmethod - def _escape_table_or_view_name_at_create_statement(sql_query: str) -> str: - """ - Reason: in case table name contains hyphens which breaks sqllineage later on - """ - return re.sub( - r"(create.*\s)(table\s|view\s)([^`\s()]+)(?=\sas)", - r"\1\2`\3`", - sql_query, - flags=re.IGNORECASE, - ) - - @staticmethod - def _remove_comma_before_from(sql_query: str) -> str: - return re.sub(r",(\s*?)(?=from)", r" ", sql_query, flags=re.IGNORECASE) - - @staticmethod - def _escape_object_name_after_keyword_from(sql_query: str) -> str: - """ - Reason: in case table name contains hyphens which breaks sqllineage later on - Note: ignore cases of having keyword FROM as part of datetime function EXTRACT - """ - return re.sub( - r"(? List[str]: - return self.parser.get_tables() - - def get_columns(self) -> List[str]: - return self.parser.get_columns() diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index b43c8de4c8f3d..0f84871d6c96a 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -333,6 +333,9 @@ def _table_level_lineage( return tables, modified +TABLE_CASE_SENSITIVE_PLATFORMS = {"bigquery"} + + class SchemaResolver(Closeable): def __init__( self, @@ -402,7 +405,10 @@ def resolve_table(self, table: _TableName) -> Tuple[str, Optional[SchemaInfo]]: if schema_info: return urn_lower, schema_info - return urn_lower, None + if self.platform in TABLE_CASE_SENSITIVE_PLATFORMS: + return urn, None + else: + return urn_lower, None def _resolve_schema_info(self, urn: str) -> Optional[SchemaInfo]: if urn in self._schema_cache: diff --git a/metadata-ingestion/tests/performance/bigquery/test_bigquery_usage.py b/metadata-ingestion/tests/performance/bigquery/test_bigquery_usage.py index bbc3378450bff..9bbe9c45887a8 100644 --- a/metadata-ingestion/tests/performance/bigquery/test_bigquery_usage.py +++ b/metadata-ingestion/tests/performance/bigquery/test_bigquery_usage.py @@ -14,6 +14,7 @@ from datahub.ingestion.source.bigquery_v2.bigquery_report import BigQueryV2Report from datahub.ingestion.source.bigquery_v2.usage import BigQueryUsageExtractor from datahub.utilities.perf_timer import PerfTimer +from datahub.utilities.sqlglot_lineage import SchemaResolver from tests.performance.bigquery.bigquery_events import generate_events, ref_from_table from tests.performance.data_generation import ( NormalDistribution, @@ -47,7 +48,10 @@ def run_test(): usage_extractor = BigQueryUsageExtractor( config, report, - lambda ref: make_dataset_urn("bigquery", str(ref.table_identifier)), + schema_resolver=SchemaResolver(platform="bigquery"), + dataset_urn_builder=lambda ref: make_dataset_urn( + "bigquery", str(ref.table_identifier) + ), ) report.set_ingestion_stage("All", "Event Generation") @@ -83,7 +87,7 @@ def run_test(): print( f"Peak Memory Used: {humanfriendly.format_size(peak_memory_usage - pre_mem_usage)}" ) - print(f"Disk Used: {report.usage_state_size}") + print(f"Disk Used: {report.processing_perf.usage_state_size}") print(f"Hash collisions: {report.num_usage_query_hash_collisions}") diff --git a/metadata-ingestion/tests/unit/test_bigquery_sql_lineage.py b/metadata-ingestion/tests/unit/test_bigquery_sql_lineage.py index f807be747a193..755e9081dda39 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_sql_lineage.py +++ b/metadata-ingestion/tests/unit/test_bigquery_sql_lineage.py @@ -1,4 +1,35 @@ -from datahub.utilities.bigquery_sql_parser import BigQuerySQLParser +from typing import List + +from datahub.ingestion.source.bigquery_v2.bigquery_audit import BigQueryTableRef +from datahub.utilities.sqlglot_lineage import SchemaResolver, sqlglot_lineage + + +class BigQuerySQLParser: + def __init__(self, sql_query: str, schema_resolver: SchemaResolver) -> None: + self.result = sqlglot_lineage(sql_query, schema_resolver) + + def get_tables(self) -> List[str]: + ans = [] + for urn in self.result.in_tables: + table_ref = BigQueryTableRef.from_urn(urn) + ans.append(str(table_ref.table_identifier)) + return ans + + def get_columns(self) -> List[str]: + ans = [] + for col_info in self.result.column_lineage or []: + for col_ref in col_info.upstreams: + ans.append(col_ref.column) + return ans + + +def test_bigquery_sql_lineage_basic(): + parser = BigQuerySQLParser( + sql_query="""SELECT * FROM project_1.database_1.view_1""", + schema_resolver=SchemaResolver(platform="bigquery"), + ) + + assert parser.get_tables() == ["project_1.database_1.view_1"] def test_bigquery_sql_lineage_hash_as_comment_sign_is_accepted(): @@ -14,7 +45,8 @@ def test_bigquery_sql_lineage_hash_as_comment_sign_is_accepted(): -- this comment will not break sqllineage either # this comment will not break sqllineage either FROM `project.dataset.src_tbl` - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.dataset.src_tbl"] @@ -39,7 +71,7 @@ def test_bigquery_sql_lineage_camel_case_table(): # this comment will not break sqllineage either FROM `project.dataset.CamelCaseTable` """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.dataset.CamelCaseTable"] @@ -64,7 +96,7 @@ def test_bigquery_sql_lineage_camel_case_dataset(): # this comment will not break sqllineage either FROM `project.DataSet.table` """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.DataSet.table"] @@ -89,7 +121,7 @@ def test_bigquery_sql_lineage_camel_case_table_and_dataset(): # this comment will not break sqllineage either FROM `project.DataSet.CamelTable` """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.DataSet.CamelTable"] @@ -117,7 +149,7 @@ def test_bigquery_sql_lineage_camel_case_table_and_dataset_subquery(): SELECT * FROM `project.DataSet.CamelTable` ) """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.DataSet.CamelTable"] @@ -146,7 +178,7 @@ def test_bigquery_sql_lineage_camel_case_table_and_dataset_joins(): LEFT JOIN `project.DataSet3.CamelTable3` on c.id = b.id """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == [ @@ -179,7 +211,7 @@ def test_bigquery_sql_lineage_camel_case_table_and_dataset_joins_and_subquery(): LEFT JOIN (SELECT * FROM `project.DataSet3.CamelTable3`) c ON c.id = b.id """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == [ @@ -199,7 +231,8 @@ def test_bigquery_sql_lineage_keyword_data_is_accepted(): FROM `project.example_dataset.example_table` ) SELECT * FROM data - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.example_dataset.example_table"] @@ -213,7 +246,8 @@ def test_bigquery_sql_lineage_keyword_admin_is_accepted(): FROM `project.example_dataset.example_table` ) SELECT * FROM admin - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.example_dataset.example_table"] @@ -238,7 +272,8 @@ def test_bigquery_sql_lineage_cte_alias_as_keyword_is_accepted(): ) SELECT * FROM map - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == [ @@ -255,7 +290,8 @@ def test_bigquery_sql_lineage_create_or_replace_view_name_with_hyphens_is_accept FROM project.dataset.src_table_a UNION SELECT * FROM `project.dataset.src_table_b` - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == [ @@ -270,7 +306,8 @@ def test_bigquery_sql_lineage_source_table_name_with_hyphens_is_accepted(): CREATE OR REPLACE VIEW `project.dataset.test_view` AS SELECT * FROM test-project.dataset.src_table - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["test-project.dataset.src_table"] @@ -282,7 +319,8 @@ def test_bigquery_sql_lineage_from_as_column_name_is_accepted(): CREATE OR REPLACE VIEW `project.dataset.test_view` AS SELECT x.from AS col FROM project.dataset.src_table AS x - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.dataset.src_table"] diff --git a/metadata-ingestion/tests/unit/test_bigquery_sql_parser.py b/metadata-ingestion/tests/unit/test_bigquery_sql_parser.py deleted file mode 100644 index 2a73bfc5e8b68..0000000000000 --- a/metadata-ingestion/tests/unit/test_bigquery_sql_parser.py +++ /dev/null @@ -1,327 +0,0 @@ -import pytest - -from datahub.utilities.bigquery_sql_parser import BigQuerySQLParser - - -def test_bigquery_sql_parser_comments_are_removed(): - parser = BigQuerySQLParser( - sql_query=""" -/* -HERE IS A STANDARD COMMENT BLOCK -THIS WILL NOT BREAK sqllineage -*/ -CREATE OR REPLACE TABLE `project.dataset.test_view` AS -#This, comment will not break sqllineage -SELECT foo --- this comment will not break sqllineage either -# this comment will not break sqllineage either - FROM `project.dataset.src_table` -""" - ) - - assert ( - parser._parsed_sql_query - == """CREATE OR REPLACE TABLE `project.dataset.test_view` AS SELECT foo - FROM `project.dataset.src_table`""" - ) - - assert parser.get_tables() == ["project.dataset.src_table"] - - -def test_bigquery_sql_parser_formats_input_sql(): - parser = BigQuerySQLParser( - sql_query=""" -CREATE OR REPLACE TABLE `project.dataset.test_view` AS -SELECT foo FROM `project.dataset.src_table_a` AS a -INNER JOIN `project.dataset.src_table_b` AS b ON a.key_field = b.key_field -""" - ) - - assert ( - parser._parsed_sql_query - == """CREATE OR REPLACE TABLE `project.dataset.test_view` AS SELECT foo - FROM `project.dataset.src_table_a` AS a - INNER JOIN `project.dataset.src_table_b` AS b - ON a.key_field = b.key_field""" - ) - - assert parser.get_tables() == [ - "project.dataset.src_table_a", - "project.dataset.src_table_b", - ] - - -def test_remove_comma_before_from(): - assert ( - BigQuerySQLParser._remove_comma_before_from( - """ -select a, b,from `project.dataset.table_name_1` -""" - ) - == """ -select a, b from `project.dataset.table_name_1` -""" - ) - - assert ( - BigQuerySQLParser._remove_comma_before_from( - """ -select a, b from `project.dataset.table_name_1` -""" - ) - == """ -select a, b from `project.dataset.table_name_1` -""" - ) - - assert ( - BigQuerySQLParser._remove_comma_before_from( - """ -select - a, - b, -from `project.dataset.table_name_1` -""" - ) - == """ -select - a, - b from `project.dataset.table_name_1` -""" - ) - - -def test_bigquery_sql_parser_subquery(): - parser = BigQuerySQLParser( - sql_query=""" - create or replace table smoke_test_db.table_from_view_and_table - as (select b.date_utc, v.revenue from smoke_test_db.base_table b, smoke_test_db.view_from_table v - """ - ) - assert parser.get_tables() == [ - "smoke_test_db.base_table", - "smoke_test_db.view_from_table", - ] - - -def test_bigquery_sql_parser_comment_sign_switched_correctly(): - sql_query = BigQuerySQLParser._parse_bigquery_comment_sign( - """ -#upper comment -SELECT * FROM hello -# lower comment -""" - ) - - assert ( - sql_query - == """ --- upper comment -SELECT * FROM hello --- lower comment -""" - ) - - -def test_bigquery_sql_parser_keyword_from_is_escaped_if_used_as_fieldname(): - sql_query = BigQuerySQLParser._escape_keyword_from_as_field_name( - """ -SELECT hello.from AS col FROM hello -""" - ) - - assert ( - sql_query - == """ -SELECT `hello.from` AS col FROM hello -""" - ) - - -def test_bigquery_sql_parser_first_cte_name_is_escaped(): - sql_query = BigQuerySQLParser._escape_cte_name_after_keyword_with( - """ -CREATE OR REPLACE VIEW `test_view` AS -WITH cte_1 AS ( - SELECT * FROM foo -), -cte_2 AS ( - SELECT * FROM bar -) -SELECT * FROM cte_1 UNION ALL -SELECT * FROM cte_2 -""" - ) - - assert ( - sql_query - == """ -CREATE OR REPLACE VIEW `test_view` AS -WITH `cte_1` AS ( - SELECT * FROM foo -), -cte_2 AS ( - SELECT * FROM bar -) -SELECT * FROM cte_1 UNION ALL -SELECT * FROM cte_2 -""" - ) - - -def test_bigquery_sql_parser_table_name_is_escaped_at_create_statement(): - sql_query_create = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - """ -CREATE TABLE project.dataset.test_table AS -col_1 STRING, -col_2 STRING -""" - ) - - sql_query_create_or_replace = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - """ -CREATE OR REPLACE TABLE project.dataset.test_table AS -col_1 STRING, -col_2 STRING -""" - ) - - assert ( - sql_query_create - == """ -CREATE TABLE `project.dataset.test_table` AS -col_1 STRING, -col_2 STRING -""" - ) - assert ( - sql_query_create_or_replace - == """ -CREATE OR REPLACE TABLE `project.dataset.test_table` AS -col_1 STRING, -col_2 STRING -""" - ) - - -def test_bigquery_sql_parser_view_name_is_escaped_at_create_statement(): - sql_query_create = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - """ -CREATE VIEW project.dataset.test_view AS -SELECT * FROM project.dataset.src_table -""" - ) - - sql_query_create_or_replace = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - """ -CREATE OR REPLACE VIEW project.dataset.test_view AS -SELECT * FROM project.dataset.src_table -""" - ) - - assert ( - sql_query_create - == """ -CREATE VIEW `project.dataset.test_view` AS -SELECT * FROM project.dataset.src_table -""" - ) - assert ( - sql_query_create_or_replace - == """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT * FROM project.dataset.src_table -""" - ) - - -def test_bigquery_sql_parser_object_name_is_escaped_after_keyword_from(): - sql_query = BigQuerySQLParser._escape_object_name_after_keyword_from( - """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT * FROM src-project.dataset.src_table_a UNION ALL -SELECT * FROM project.dataset.src_table_b -""" - ) - - assert ( - sql_query - == """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT * FROM `src-project.dataset.src_table_a` UNION ALL -SELECT * FROM `project.dataset.src_table_b` -""" - ) - - -def test_bigquery_sql_parser_field_name_is_not_escaped_after_keyword_from_in_datetime_functions(): - sql_query = BigQuerySQLParser._escape_object_name_after_keyword_from( - """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT -EXTRACT(MICROSECOND FROM time_field) AS col_1, -EXTRACT(MILLISECOND FROM time_field) AS col_2, -EXTRACT(SECOND FROM time_field) AS col_3, -EXTRACT(MINUTE FROM time_field) AS col_4, -EXTRACT(HOUR FROM time_field) AS col_5, -EXTRACT(DAYOFWEEK FROM time_field) AS col_6, -EXTRACT(DAY FROM time_field) AS col_7, -EXTRACT(DAYOFYEAR FROM time_field) AS col_8, -EXTRACT(WEEK FROM time_field) AS col_9, -EXTRACT(WEEK FROM time_field) AS col_10, -EXTRACT(ISOWEEK FROM time_field) AS col_11, -EXTRACT(MONTH FROM time_field) AS col_12, -EXTRACT(QUARTER FROM time_field) AS col_13, -EXTRACT(YEAR FROM time_field) AS col_14, -EXTRACT(ISOYEAR FROM time_field) AS col_15, -EXTRACT(DATE FROM time_field) AS col_16, -EXTRACT(TIME FROM time_field) AS col_17 -FROM src-project.dataset.src_table_a -""" - ) - - assert ( - sql_query - == """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT -EXTRACT(MICROSECOND FROM time_field) AS col_1, -EXTRACT(MILLISECOND FROM time_field) AS col_2, -EXTRACT(SECOND FROM time_field) AS col_3, -EXTRACT(MINUTE FROM time_field) AS col_4, -EXTRACT(HOUR FROM time_field) AS col_5, -EXTRACT(DAYOFWEEK FROM time_field) AS col_6, -EXTRACT(DAY FROM time_field) AS col_7, -EXTRACT(DAYOFYEAR FROM time_field) AS col_8, -EXTRACT(WEEK FROM time_field) AS col_9, -EXTRACT(WEEK FROM time_field) AS col_10, -EXTRACT(ISOWEEK FROM time_field) AS col_11, -EXTRACT(MONTH FROM time_field) AS col_12, -EXTRACT(QUARTER FROM time_field) AS col_13, -EXTRACT(YEAR FROM time_field) AS col_14, -EXTRACT(ISOYEAR FROM time_field) AS col_15, -EXTRACT(DATE FROM time_field) AS col_16, -EXTRACT(TIME FROM time_field) AS col_17 -FROM `src-project.dataset.src_table_a` -""" - ) - - -def test_bigquery_sql_parser_with_semicolon_in_from(): - sql_query = """CREATE VIEW `acryl-staging.smoke_test_db.view_from_table`\nAS select * from smoke_test_db.base_table;""" - - table_list = BigQuerySQLParser(sql_query).get_tables() - table_list.sort() - assert table_list == ["smoke_test_db.base_table"] - - -@pytest.mark.xfail -def test_bigquery_sql_parser_with_parenthesis_in_from(): - sql_query = """ - CREATE VIEW `acryl-staging.smoke_test_db.view_from_table` AS - select * from smoke_test_db.base_table LEFT JOIN UNNEST(my_array) ON day1 = day2; - """ - - table_list = BigQuerySQLParser(sql_query).get_tables() - table_list.sort() - assert table_list == ["smoke_test_db.base_table"] diff --git a/metadata-ingestion/tests/unit/test_bigquery_usage.py b/metadata-ingestion/tests/unit/test_bigquery_usage.py index c0055763bc15b..664d3112810ff 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_usage.py +++ b/metadata-ingestion/tests/unit/test_bigquery_usage.py @@ -35,6 +35,7 @@ TimeWindowSizeClass, ) from datahub.testing.compare_metadata_json import diff_metadata_json +from datahub.utilities.sqlglot_lineage import SchemaResolver from tests.performance.bigquery.bigquery_events import generate_events, ref_from_table from tests.performance.data_generation import generate_data, generate_queries from tests.performance.data_model import Container, FieldAccess, Query, Table, View @@ -202,7 +203,10 @@ def usage_extractor(config: BigQueryV2Config) -> BigQueryUsageExtractor: return BigQueryUsageExtractor( config, report, - lambda ref: make_dataset_urn("bigquery", str(ref.table_identifier)), + schema_resolver=SchemaResolver(platform="bigquery"), + dataset_urn_builder=lambda ref: make_dataset_urn( + "bigquery", str(ref.table_identifier) + ), ) @@ -961,21 +965,21 @@ def test_operational_stats( def test_get_tables_from_query(usage_extractor): assert usage_extractor.get_tables_from_query( - PROJECT_1, "SELECT * FROM project-1.database_1.view_1" + "SELECT * FROM project-1.database_1.view_1", default_project=PROJECT_1 ) == [ BigQueryTableRef(BigqueryTableIdentifier("project-1", "database_1", "view_1")) ] assert usage_extractor.get_tables_from_query( - PROJECT_1, "SELECT * FROM database_1.view_1" + "SELECT * FROM database_1.view_1", default_project=PROJECT_1 ) == [ BigQueryTableRef(BigqueryTableIdentifier("project-1", "database_1", "view_1")) ] assert sorted( usage_extractor.get_tables_from_query( - PROJECT_1, "SELECT v.id, v.name, v.total, t.name as name1 FROM database_1.view_1 as v inner join database_1.table_1 as t on v.id=t.id", + default_project=PROJECT_1, ) ) == [ BigQueryTableRef(BigqueryTableIdentifier("project-1", "database_1", "table_1")), @@ -984,8 +988,8 @@ def test_get_tables_from_query(usage_extractor): assert sorted( usage_extractor.get_tables_from_query( - PROJECT_1, "CREATE TABLE database_1.new_table AS SELECT v.id, v.name, v.total, t.name as name1 FROM database_1.view_1 as v inner join database_1.table_1 as t on v.id=t.id", + default_project=PROJECT_1, ) ) == [ BigQueryTableRef(BigqueryTableIdentifier("project-1", "database_1", "table_1")), diff --git a/metadata-ingestion/tests/unit/test_bigqueryv2_usage_source.py b/metadata-ingestion/tests/unit/test_bigqueryv2_usage_source.py index 44fd840f28d59..25e849a509293 100644 --- a/metadata-ingestion/tests/unit/test_bigqueryv2_usage_source.py +++ b/metadata-ingestion/tests/unit/test_bigqueryv2_usage_source.py @@ -10,6 +10,7 @@ from datahub.ingestion.source.bigquery_v2.bigquery_config import BigQueryV2Config from datahub.ingestion.source.bigquery_v2.bigquery_report import BigQueryV2Report from datahub.ingestion.source.bigquery_v2.usage import BigQueryUsageExtractor +from datahub.utilities.sqlglot_lineage import SchemaResolver FROZEN_TIME = "2021-07-20 00:00:00" @@ -114,7 +115,10 @@ def test_bigqueryv2_filters(): corrected_start_time = config.start_time - config.max_query_duration corrected_end_time = config.end_time + config.max_query_duration filter: str = BigQueryUsageExtractor( - config, BigQueryV2Report(), lambda x: "" + config, + BigQueryV2Report(), + schema_resolver=SchemaResolver(platform="bigquery"), + dataset_urn_builder=lambda x: "", )._generate_filter(corrected_start_time, corrected_end_time) assert filter == expected_filter From 60347d6735ea2136d721bbf6644ae82df6519d9c Mon Sep 17 00:00:00 2001 From: Diego Reiriz Cores Date: Thu, 28 Dec 2023 12:09:10 +0100 Subject: [PATCH 304/792] fix(ingest/mongodb): support disabling schemaSamplingSize (#9295) Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/mongodb.py | 8 +++++--- .../tests/integration/mongodb/test_mongodb.py | 1 + 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py index 2aa8b1d37d477..283ab652f23c6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py @@ -102,7 +102,7 @@ class MongoDBConfig( ) schemaSamplingSize: Optional[PositiveInt] = Field( default=1000, - description="Number of documents to use when inferring schema size. If set to `0`, all documents will be scanned.", + description="Number of documents to use when inferring schema size. If set to `null`, all documents will be scanned.", ) useRandomSampling: bool = Field( default=True, @@ -225,13 +225,15 @@ def construct_schema_pymongo( ] if use_random_sampling: # get sample documents in collection - aggregations.append({"$sample": {"size": sample_size}}) + if sample_size: + aggregations.append({"$sample": {"size": sample_size}}) documents = collection.aggregate( aggregations, allowDiskUse=True, ) else: - aggregations.append({"$limit": sample_size}) + if sample_size: + aggregations.append({"$limit": sample_size}) documents = collection.aggregate(aggregations, allowDiskUse=True) return construct_schema(list(documents), delimiter) diff --git a/metadata-ingestion/tests/integration/mongodb/test_mongodb.py b/metadata-ingestion/tests/integration/mongodb/test_mongodb.py index 56fb471d4c9f1..0a0ba55ff5b80 100644 --- a/metadata-ingestion/tests/integration/mongodb/test_mongodb.py +++ b/metadata-ingestion/tests/integration/mongodb/test_mongodb.py @@ -26,6 +26,7 @@ def test_mongodb_ingest(docker_compose_runner, pytestconfig, tmp_path, mock_time "password": "examplepass", "maxDocumentSize": 25000, "platform_instance": "instance", + "schemaSamplingSize": None, }, }, "sink": { From 2cd38a469d5ac607bd510a0ca045d151b4657afd Mon Sep 17 00:00:00 2001 From: Tony Ouyang Date: Thu, 28 Dec 2023 03:09:30 -0800 Subject: [PATCH 305/792] fix(ingest): Fix mongodb ingestion when platform_instance is missing from recipe (#9486) Co-authored-by: Harshal Sheth --- metadata-ingestion/src/datahub/ingestion/source/mongodb.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py index 283ab652f23c6..577da91ee82da 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py @@ -379,6 +379,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: platform_instance=self.config.platform_instance, ) + # Initialize data_platform_instance with a default value + data_platform_instance = None if self.config.platform_instance: data_platform_instance = DataPlatformInstanceClass( platform=make_data_platform_urn(platform), From e343b69ce4881ceefdf4af0cafea29188092de52 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 28 Dec 2023 16:50:13 +0530 Subject: [PATCH 306/792] fix(ingest/snowflake): explicit set schema if public schema is absent (#9526) --- .../source/snowflake/snowflake_profiler.py | 14 ++++++++++++++ .../ingestion/source/snowflake/snowflake_query.py | 4 ++++ .../source/state/stateful_ingestion_base.py | 2 +- 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py index 4bda7da422e9d..9a37f779bbcd5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py @@ -24,6 +24,8 @@ logger = logging.getLogger(__name__) +PUBLIC_SCHEMA = "PUBLIC" + class SnowflakeProfiler(GenericProfiler, SnowflakeCommonMixin): def __init__( @@ -36,6 +38,7 @@ def __init__( self.config: SnowflakeV2Config = config self.report: SnowflakeV2Report = report self.logger = logger + self.database_default_schema: Dict[str, str] = dict() def get_workunits( self, database: SnowflakeDatabase, db_tables: Dict[str, List[SnowflakeTable]] @@ -47,6 +50,10 @@ def get_workunits( "max_overflow", self.config.profiling.max_workers ) + if PUBLIC_SCHEMA not in db_tables: + # If PUBLIC schema is absent, we use any one of schemas as default schema + self.database_default_schema[database.name] = list(db_tables.keys())[0] + profile_requests = [] for schema in database.schemas: for table in db_tables[schema.name]: @@ -136,9 +143,16 @@ def get_profiler_instance( ) def callable_for_db_connection(self, db_name: str) -> Callable: + schema_name = self.database_default_schema.get(db_name) + def get_db_connection(): conn = self.config.get_connection() conn.cursor().execute(SnowflakeQuery.use_database(db_name)) + + # As mentioned here - https://docs.snowflake.com/en/sql-reference/sql/use-database#usage-notes + # no schema is selected if PUBLIC schema is absent. We need to explicitly call `USE SCHEMA ` + if schema_name: + conn.cursor().execute(SnowflakeQuery.use_schema(schema_name)) return conn return get_db_connection diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py index 267f7cf074909..724e4392f1d61 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py @@ -80,6 +80,10 @@ def show_tags() -> str: def use_database(db_name: str) -> str: return f'use database "{db_name}"' + @staticmethod + def use_schema(schema_name: str) -> str: + return f'use schema "{schema_name}"' + @staticmethod def get_databases(db_name: Optional[str]) -> str: db_clause = f'"{db_name}".' if db_name is not None else "" diff --git a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py index 8a448f40e95b4..61d39b18f523d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py +++ b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py @@ -98,7 +98,7 @@ class StatefulIngestionConfigBase(GenericModel, Generic[CustomConfig]): ) -class StatefulLineageConfigMixin: +class StatefulLineageConfigMixin(ConfigModel): enable_stateful_lineage_ingestion: bool = Field( default=True, description="Enable stateful lineage ingestion." From 4de2c24249697fa68831f880fda216ddb46fba3d Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 28 Dec 2023 21:37:57 +0530 Subject: [PATCH 307/792] style(search): Border is too thick for sidebar (#9528) --- .../src/app/search/sidebar/BrowseSidebar.tsx | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx index c16bcdcaf6c72..1731727c14cfc 100644 --- a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx +++ b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx @@ -9,7 +9,6 @@ import useSidebarEntities from './useSidebarEntities'; import { ANTD_GRAY_V2 } from '../../entity/shared/constants'; import { ProfileSidebarResizer } from '../../entity/shared/containers/profile/sidebar/ProfileSidebarResizer'; - export const MAX_BROWSER_WIDTH = 500; export const MIN_BROWSWER_WIDTH = 200; @@ -18,7 +17,6 @@ export const SidebarWrapper = styled.div<{ visible: boolean; width: number }>` width: ${(props) => (props.visible ? `${props.width}px` : '0')}; min-width: ${(props) => (props.visible ? `${props.width}px` : '0')}; transition: width 250ms ease-in-out; - border-right: 1px solid ${(props) => props.theme.styles['border-color-base']}; background-color: ${ANTD_GRAY_V2[1]}; background: white; `; @@ -53,7 +51,12 @@ const BrowseSidebar = ({ visible }: Props) => { return ( <> - + Navigate From 5321352852a511bf92685290fc8a4371faaed876 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Fri, 29 Dec 2023 12:53:58 +0530 Subject: [PATCH 308/792] style(ui): humanise duration shown on ingestion page (#9530) --- .../executions/IngestionExecutionTable.tsx | 8 +++---- .../src/app/shared/formatDuration.ts | 21 +++++++++++++++++++ 2 files changed, 24 insertions(+), 5 deletions(-) create mode 100644 datahub-web-react/src/app/shared/formatDuration.ts diff --git a/datahub-web-react/src/app/ingest/source/executions/IngestionExecutionTable.tsx b/datahub-web-react/src/app/ingest/source/executions/IngestionExecutionTable.tsx index 8c81cc36ae3f9..a9d9283ef1377 100644 --- a/datahub-web-react/src/app/ingest/source/executions/IngestionExecutionTable.tsx +++ b/datahub-web-react/src/app/ingest/source/executions/IngestionExecutionTable.tsx @@ -4,6 +4,7 @@ import { StyledTable } from '../../../entity/shared/components/styled/StyledTabl import { ExecutionRequest } from '../../../../types.generated'; import { ButtonsColumn, SourceColumn, StatusColumn, TimeColumn } from './IngestionExecutionTableColumns'; import { SUCCESS } from '../utils'; +import { formatDuration } from '../../../shared/formatDuration'; interface Props { executionRequests: ExecutionRequest[]; @@ -34,13 +35,10 @@ export default function IngestionExecutionTable({ render: TimeColumn, }, { - title: 'Duration (s)', + title: 'Duration', dataIndex: 'duration', key: 'duration', - render: (durationMs: number) => { - const seconds = (durationMs && `${durationMs / 1000}s`) || 'None'; - return seconds; - }, + render: (durationMs: number) => formatDuration(durationMs), }, { title: 'Status', diff --git a/datahub-web-react/src/app/shared/formatDuration.ts b/datahub-web-react/src/app/shared/formatDuration.ts new file mode 100644 index 0000000000000..1028b46f70b31 --- /dev/null +++ b/datahub-web-react/src/app/shared/formatDuration.ts @@ -0,0 +1,21 @@ +export const formatDuration = (durationMs: number): string => { + if (!durationMs) return 'None'; + + const seconds = durationMs / 1000; + + if (seconds < 60) { + return `${seconds.toFixed(1)} s`; + } + + const minutes = Math.floor(seconds / 60); + const remainingSeconds = Math.round(seconds % 60); + + if (minutes < 60) { + return `${minutes} min ${remainingSeconds} s`; + } + + const hours = Math.floor(minutes / 60); + const remainingMinutes = Math.round(minutes % 60); + + return `${hours} hr ${remainingMinutes} min`; +}; From 06bd9b988d3006d57350476ccec18b2a5e7aac37 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Fri, 29 Dec 2023 21:34:06 +0530 Subject: [PATCH 309/792] fix(cli): upsert for data product external url (#9534) --- metadata-ingestion/src/datahub/specific/dataproduct.py | 2 +- .../entities/dataproducts/golden_dataproduct_out_upsert.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/specific/dataproduct.py b/metadata-ingestion/src/datahub/specific/dataproduct.py index 301a0ff63f2f0..bb49ac47b3ef8 100644 --- a/metadata-ingestion/src/datahub/specific/dataproduct.py +++ b/metadata-ingestion/src/datahub/specific/dataproduct.py @@ -152,7 +152,7 @@ def set_external_url(self, external_url: str) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, "replace", - path="/external_url", + path="/externalUrl", value=external_url, ) return self diff --git a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json index ca4aafe848f60..97c2330f58bc7 100644 --- a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json +++ b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json @@ -5,7 +5,7 @@ "changeType": "PATCH", "aspectName": "dataProductProperties", "aspect": { - "value": "[{\"op\": \"replace\", \"path\": \"/name\", \"value\": \"Pet of the Week Campaign\"}, {\"op\": \"replace\", \"path\": \"/assets\", \"value\": [{\"destinationUrn\": \"urn:li:container:DATABASE\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:container:SCHEMA\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}]}, {\"op\": \"replace\", \"path\": \"/customProperties\", \"value\": {\"version\": \"2.0\", \"classification\": \"pii\"}}, {\"op\": \"replace\", \"path\": \"/external_url\", \"value\": \"https://github.com/datahub-project/datahub\"}]", + "value": "[{\"op\": \"replace\", \"path\": \"/name\", \"value\": \"Pet of the Week Campaign\"}, {\"op\": \"replace\", \"path\": \"/assets\", \"value\": [{\"destinationUrn\": \"urn:li:container:DATABASE\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:container:SCHEMA\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}]}, {\"op\": \"replace\", \"path\": \"/customProperties\", \"value\": {\"version\": \"2.0\", \"classification\": \"pii\"}}, {\"op\": \"replace\", \"path\": \"/externalUrl\", \"value\": \"https://github.com/datahub-project/datahub\"}]", "contentType": "application/json-patch+json" } }, From 31f9c796763677a4d452066d9b49b4088e65da19 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 2 Jan 2024 13:22:22 +0530 Subject: [PATCH 310/792] fix posts are failing to be created as Admin user (#9533) --- datahub-web-react/src/app/settings/posts/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/settings/posts/utils.ts b/datahub-web-react/src/app/settings/posts/utils.ts index ce48c7400738c..9958a0e8d9f0e 100644 --- a/datahub-web-react/src/app/settings/posts/utils.ts +++ b/datahub-web-react/src/app/settings/posts/utils.ts @@ -16,7 +16,7 @@ export const addToListPostCache = (client, newPost, pageSize) => { }); // Add our new post into the existing list. - const newPosts = [newPost, ...(currData?.listPosts?.posts || [])]; + const newPosts = [...(currData?.listPosts?.posts || [])]; // Write our data back to the cache. client.writeQuery({ From 0bb838b904807c8fdc8266b6395023079b4dce4f Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 2 Jan 2024 21:45:55 +0530 Subject: [PATCH 311/792] fix(ui): while creating secrets via UI validate validate characters (#9548) --- datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx b/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx index 30f04d61b8fc9..c099d9a580efa 100644 --- a/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx +++ b/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx @@ -81,7 +81,7 @@ export const SecretBuilderModal = ({ initialState, visible, onSubmit, onCancel } }, { whitespace: false }, { min: 1, max: 50 }, - { pattern: /^[^\s\t${}\\,'"]+$/, message: 'This secret name is not allowed.' }, + { pattern: /^[a-zA-Z_]+[a-zA-Z0-9_]*$/, message: 'Please start the secret name with a letter, followed by letters, digits, or underscores only.' }, ]} hasFeedback > From 6d72640e9149343363885ec275d89fb48d9a9626 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:47:58 +0530 Subject: [PATCH 312/792] feat(ui): add databricks logo (#9473) --- datahub-web-react/src/app/ingest/source/builder/constants.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/datahub-web-react/src/app/ingest/source/builder/constants.ts b/datahub-web-react/src/app/ingest/source/builder/constants.ts index 08538729de40b..bd792d78856d5 100644 --- a/datahub-web-react/src/app/ingest/source/builder/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/constants.ts @@ -103,6 +103,8 @@ export const CUSTOM = 'custom'; export const CUSTOM_URN = `urn:li:dataPlatform:${CUSTOM}`; export const UNITY_CATALOG = 'unity-catalog'; export const UNITY_CATALOG_URN = `urn:li:dataPlatform:${UNITY_CATALOG}`; +export const DATABRICKS = 'databricks'; +export const DATABRICKS_URN = `urn:li:dataPlatform:${DATABRICKS}`; export const DBT_CLOUD = 'dbt-cloud'; export const DBT_CLOUD_URN = `urn:li:dataPlatform:dbt`; export const VERTICA = 'vertica'; @@ -143,6 +145,7 @@ export const PLATFORM_URN_TO_LOGO = { [TRINO_URN]: trinoLogo, [SUPERSET_URN]: supersetLogo, [UNITY_CATALOG_URN]: databricksLogo, + [DATABRICKS_URN]: databricksLogo, [VERTICA_URN]: verticaLogo, [FIVETRAN_URN]: fivetranLogo, [CSV_URN]: csvLogo, From 29f2142a2c128f7f165f9011eff3bc647ae92185 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:48:43 +0530 Subject: [PATCH 313/792] feat(databricks): add hive metastore analyze profiling (#9511) --- metadata-ingestion/setup.py | 4 +- .../ingestion/source/sql/sql_config.py | 8 +- .../datahub/ingestion/source/unity/config.py | 16 +- .../source/unity/hive_metastore_proxy.py | 109 ++++++++- .../datahub/ingestion/source/unity/proxy.py | 125 +++++++--- .../ingestion/source/unity/proxy_profiling.py | 50 ++-- .../ingestion/source/unity/proxy_types.py | 24 +- .../datahub/ingestion/source/unity/report.py | 6 + .../datahub/ingestion/source/unity/source.py | 27 ++- .../datahub/ingestion/source/unity/usage.py | 5 +- .../unity/test_unity_catalog_ingest.py | 104 +++++++- .../unity/unity_catalog_mces_golden.json | 228 ++++++++++++++++-- 12 files changed, 600 insertions(+), 106 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 8e4791e253c7c..10db019b51381 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -251,9 +251,7 @@ databricks = { # 0.1.11 appears to have authentication issues with azure databricks - # 0.16.0 added py.typed support which caused mypy to fail. The databricks sdk is pinned until we resolve mypy issues. - # https://github.com/databricks/databricks-sdk-py/pull/483 - "databricks-sdk>=0.9.0,<0.16.0", + "databricks-sdk>=0.9.0", "pyspark~=3.3.0", "requests", # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py index 54edab6f3b84b..c0dc70301ba34 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py @@ -112,7 +112,13 @@ def ensure_profiling_pattern_is_passed_to_profiling( cls, values: Dict[str, Any] ) -> Dict[str, Any]: profiling: Optional[GEProfilingConfig] = values.get("profiling") - if profiling is not None and profiling.enabled: + # Note: isinstance() check is required here as unity-catalog source reuses + # SQLCommonConfig with different profiling config than GEProfilingConfig + if ( + profiling is not None + and isinstance(profiling, GEProfilingConfig) + and profiling.enabled + ): profiling._allow_deny_patterns = values["profile_pattern"] return values diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index 96971faeea69f..df36153af9d83 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -95,14 +95,6 @@ class UnityCatalogAnalyzeProfilerConfig(UnityCatalogProfilerConfig): description="Number of worker threads to use for profiling. Set to 1 to disable.", ) - @pydantic.root_validator(skip_on_failure=True) - def warehouse_id_required_for_profiling( - cls, values: Dict[str, Any] - ) -> Dict[str, Any]: - if values.get("enabled") and not values.get("warehouse_id"): - raise ValueError("warehouse_id must be set when profiling is enabled.") - return values - @property def include_columns(self): return not self.profile_table_level_only @@ -254,6 +246,7 @@ class UnityCatalogSourceConfig( description="Generate usage statistics.", ) + # TODO: Remove `type:ignore` by refactoring config profiling: Union[UnityCatalogGEProfilerConfig, UnityCatalogAnalyzeProfilerConfig] = Field( # type: ignore default=UnityCatalogGEProfilerConfig(), description="Data profiling configuration", @@ -316,7 +309,9 @@ def include_metastore_warning(cls, v: bool) -> bool: @pydantic.root_validator(skip_on_failure=True) def set_warehouse_id_from_profiling(cls, values: Dict[str, Any]) -> Dict[str, Any]: - profiling: Optional[UnityCatalogProfilerConfig] = values.get("profiling") + profiling: Optional[ + Union[UnityCatalogGEProfilerConfig, UnityCatalogAnalyzeProfilerConfig] + ] = values.get("profiling") if not values.get("warehouse_id") and profiling and profiling.warehouse_id: values["warehouse_id"] = profiling.warehouse_id if ( @@ -337,6 +332,9 @@ def set_warehouse_id_from_profiling(cls, values: Dict[str, Any]) -> Dict[str, An if values.get("warehouse_id") and profiling and not profiling.warehouse_id: profiling.warehouse_id = values["warehouse_id"] + if profiling and profiling.enabled and not profiling.warehouse_id: + raise ValueError("warehouse_id must be set when profiling is enabled.") + return values @pydantic.validator("schema_pattern", always=True) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py index 99b2ff998662c..814d86a2f3234 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py @@ -12,11 +12,14 @@ from datahub.ingestion.source.unity.proxy_types import ( Catalog, Column, + ColumnProfile, CustomCatalogType, HiveTableType, Metastore, Schema, Table, + TableProfile, + TableReference, ) logger = logging.getLogger(__name__) @@ -38,6 +41,18 @@ "binary": ColumnTypeName.BINARY, } +NUM_NULLS = "num_nulls" +DISTINCT_COUNT = "distinct_count" +MIN = "min" +MAX = "max" +AVG_COL_LEN = "avg_col_len" +MAX_COL_LEN = "max_col_len" +VERSION = "version" + +ROWS = "rows" +BYTES = "bytes" +TABLE_STAT_LIST = {ROWS, BYTES} + class HiveMetastoreProxy(Closeable): # TODO: Support for view lineage using SQL parsing @@ -67,7 +82,7 @@ def get_inspector(sqlalchemy_url: str, options: dict) -> Inspector: def hive_metastore_catalog(self, metastore: Optional[Metastore]) -> Catalog: return Catalog( - id=HIVE_METASTORE, + id=f"{metastore.id}.{HIVE_METASTORE}" if metastore else HIVE_METASTORE, name=HIVE_METASTORE, comment=None, metastore=metastore, @@ -95,9 +110,14 @@ def hive_metastore_tables(self, schema: Schema) -> Iterable[Table]: continue yield self._get_table(schema, table_name, False) - def _get_table(self, schema: Schema, table_name: str, is_view: bool) -> Table: + def _get_table( + self, + schema: Schema, + table_name: str, + is_view: bool = False, + ) -> Table: columns = self._get_columns(schema, table_name) - detailed_info = self._get_table_info(schema, table_name) + detailed_info = self._get_table_info(schema.name, table_name) comment = detailed_info.pop("Comment", None) storage_location = detailed_info.pop("Location", None) @@ -129,6 +149,74 @@ def _get_table(self, schema: Schema, table_name: str, is_view: bool) -> Table: comment=comment, ) + def get_table_profile( + self, ref: TableReference, include_column_stats: bool = False + ) -> TableProfile: + columns = self._get_columns( + Schema( + id=ref.schema, + name=ref.schema, + # This is okay, as none of this is used in profiling + catalog=self.hive_metastore_catalog(None), + comment=None, + owner=None, + ), + ref.table, + ) + detailed_info = self._get_table_info(ref.schema, ref.table) + + table_stats = ( + self._get_cached_table_statistics(detailed_info["Statistics"]) + if detailed_info.get("Statistics") + else {} + ) + + return TableProfile( + num_rows=int(table_stats[ROWS]) + if table_stats.get(ROWS) is not None + else None, + total_size=int(table_stats[BYTES]) + if table_stats.get(BYTES) is not None + else None, + num_columns=len(columns), + column_profiles=[ + self._get_column_profile(column.name, ref) for column in columns + ] + if include_column_stats + else [], + ) + + def _get_column_profile(self, column: str, ref: TableReference) -> ColumnProfile: + + props = self._column_describe_extended(ref.schema, ref.table, column) + col_stats = {} + for prop in props: + col_stats[prop[0]] = prop[1] + return ColumnProfile( + name=column, + null_count=int(col_stats[NUM_NULLS]) + if col_stats.get(NUM_NULLS) is not None + else None, + distinct_count=int(col_stats[DISTINCT_COUNT]) + if col_stats.get(DISTINCT_COUNT) is not None + else None, + min=col_stats.get(MIN), + max=col_stats.get(MAX), + avg_len=col_stats.get(AVG_COL_LEN), + max_len=col_stats.get(MAX_COL_LEN), + version=col_stats.get(VERSION), + ) + + def _get_cached_table_statistics(self, statistics: str) -> dict: + # statistics is in format "xx bytes" OR "1382 bytes, 2 rows" + table_stats = dict() + for prop in statistics.split(","): + value_key_list = prop.strip().split(" ") # value_key_list -> [value, key] + if len(value_key_list) == 2 and value_key_list[1] in TABLE_STAT_LIST: + table_stats[value_key_list[1]] = value_key_list[0] + + return table_stats + def _get_created_at(self, created_at: Optional[str]) -> Optional[datetime]: return ( datetime.strptime(created_at, "%a %b %d %H:%M:%S %Z %Y") @@ -171,8 +259,8 @@ def _get_table_type(self, type: Optional[str]) -> HiveTableType: else: return HiveTableType.UNKNOWN - def _get_table_info(self, schema: Schema, table_name: str) -> dict: - rows = self._describe_extended(schema.name, table_name) + def _get_table_info(self, schema_name: str, table_name: str) -> dict: + rows = self._describe_extended(schema_name, table_name) index = rows.index(("# Detailed Table Information", "", "")) rows = rows[index + 1 :] @@ -235,6 +323,17 @@ def _describe_extended(self, schema_name: str, table_name: str) -> List[Row]: """ return self._execute_sql(f"DESCRIBE EXTENDED `{schema_name}`.`{table_name}`") + def _column_describe_extended( + self, schema_name: str, table_name: str, column_name: str + ) -> List[Row]: + """ + Rows are structured as shown in examples here + https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-aux-describe-table.html#examples + """ + return self._execute_sql( + f"DESCRIBE EXTENDED `{schema_name}`.`{table_name}` {column_name}" + ) + def _execute_sql(self, sql: str) -> List[Row]: return self.inspector.bind.execute(sql).fetchall() diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py index 13baa8b57a639..b414f3f188c23 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py @@ -4,7 +4,7 @@ import dataclasses import logging from datetime import datetime, timezone -from typing import Any, Dict, Iterable, List, Optional, Union +from typing import Any, Dict, Iterable, List, Optional, Union, cast from unittest.mock import patch from databricks.sdk import WorkspaceClient @@ -49,16 +49,19 @@ logger: logging.Logger = logging.getLogger(__name__) +@dataclasses.dataclass class TableInfoWithGeneration(TableInfo): generation: Optional[int] = None - @classmethod def as_dict(self) -> dict: return {**super().as_dict(), "generation": self.generation} @classmethod def from_dict(cls, d: Dict[str, Any]) -> "TableInfoWithGeneration": - table_info = super().from_dict(d) + table_info: TableInfoWithGeneration = cast( + TableInfoWithGeneration, + super().from_dict(d), + ) table_info.generation = d.get("generation") return table_info @@ -72,7 +75,10 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> "QueryFilterWithStatementTypes": - v = super().from_dict(d) + v: QueryFilterWithStatementTypes = cast( + QueryFilterWithStatementTypes, + super().from_dict(d), + ) v.statement_types = d["statement_types"] return v @@ -104,7 +110,7 @@ def __init__( def check_basic_connectivity(self) -> bool: return bool(self._workspace_client.catalogs.list()) - def assigned_metastore(self) -> Metastore: + def assigned_metastore(self) -> Optional[Metastore]: response = self._workspace_client.metastores.summary() return self._create_metastore(response) @@ -117,7 +123,9 @@ def catalogs(self, metastore: Optional[Metastore]) -> Iterable[Catalog]: logger.info("Catalogs not found") return [] for catalog in response: - yield self._create_catalog(metastore, catalog) + optional_catalog = self._create_catalog(metastore, catalog) + if optional_catalog: + yield optional_catalog def catalog( self, catalog_name: str, metastore: Optional[Metastore] @@ -126,7 +134,11 @@ def catalog( if not response: logger.info(f"Catalog {catalog_name} not found") return None - return self._create_catalog(metastore, response) + optional_catalog = self._create_catalog(metastore, response) + if optional_catalog: + return optional_catalog + + return None def schemas(self, catalog: Catalog) -> Iterable[Schema]: if ( @@ -140,7 +152,9 @@ def schemas(self, catalog: Catalog) -> Iterable[Schema]: logger.info(f"Schemas not found for catalog {catalog.id}") return [] for schema in response: - yield self._create_schema(catalog, schema) + optional_schema = self._create_schema(catalog, schema) + if optional_schema: + yield optional_schema def tables(self, schema: Schema) -> Iterable[Table]: if ( @@ -158,28 +172,38 @@ def tables(self, schema: Schema) -> Iterable[Table]: return [] for table in response: try: - yield self._create_table(schema, table) + optional_table = self._create_table( + schema, cast(TableInfoWithGeneration, table) + ) + if optional_table: + yield optional_table except Exception as e: logger.warning(f"Error parsing table: {e}") self.report.report_warning("table-parse", str(e)) def service_principals(self) -> Iterable[ServicePrincipal]: for principal in self._workspace_client.service_principals.list(): - yield self._create_service_principal(principal) + optional_sp = self._create_service_principal(principal) + if optional_sp: + yield optional_sp def workspace_notebooks(self) -> Iterable[Notebook]: for obj in self._workspace_client.workspace.list("/", recursive=True): - if obj.object_type == ObjectType.NOTEBOOK: + if obj.object_type == ObjectType.NOTEBOOK and obj.object_id and obj.path: yield Notebook( id=obj.object_id, path=obj.path, language=obj.language, created_at=datetime.fromtimestamp( obj.created_at / 1000, tz=timezone.utc - ), + ) + if obj.created_at + else None, modified_at=datetime.fromtimestamp( obj.modified_at / 1000, tz=timezone.utc - ), + ) + if obj.modified_at + else None, ) def query_history( @@ -204,7 +228,9 @@ def query_history( ) for query_info in self._query_history(filter_by=filter_by): try: - yield self._create_query(query_info) + optional_query = self._create_query(query_info) + if optional_query: + yield optional_query except Exception as e: logger.warning(f"Error parsing query: {e}") self.report.report_warning("query-parse", str(e)) @@ -229,15 +255,16 @@ def _query_history( "max_results": max_results, # Max batch size } - response: dict = self._workspace_client.api_client.do( + response: dict = self._workspace_client.api_client.do( # type: ignore method, path, body={**body, "filter_by": filter_by.as_dict()} ) + # we use default raw=False in above request, therefore will always get dict while True: if "res" not in response or not response["res"]: return for v in response["res"]: yield QueryInfo.from_dict(v) - response = self._workspace_client.api_client.do( + response = self._workspace_client.api_client.do( # type: ignore method, path, body={**body, "page_token": response["next_page_token"]} ) @@ -245,7 +272,7 @@ def list_lineages_by_table( self, table_name: str, include_entity_lineage: bool ) -> dict: """List table lineage by table name.""" - return self._workspace_client.api_client.do( + return self._workspace_client.api_client.do( # type: ignore method="GET", path="/api/2.0/lineage-tracking/table-lineage", body={ @@ -256,7 +283,7 @@ def list_lineages_by_table( def list_lineages_by_column(self, table_name: str, column_name: str) -> dict: """List column lineage by table name and column name.""" - return self._workspace_client.api_client.do( + return self._workspace_client.api_client.do( # type: ignore "GET", "/api/2.0/lineage-tracking/column-lineage", body={"table_name": table_name, "column_name": column_name}, @@ -325,7 +352,9 @@ def _escape_sequence(value: str) -> str: @staticmethod def _create_metastore( obj: Union[GetMetastoreSummaryResponse, MetastoreInfo] - ) -> Metastore: + ) -> Optional[Metastore]: + if not obj.name: + return None return Metastore( name=obj.name, id=UnityCatalogApiProxy._escape_sequence(obj.name), @@ -339,7 +368,10 @@ def _create_metastore( def _create_catalog( self, metastore: Optional[Metastore], obj: CatalogInfo - ) -> Catalog: + ) -> Optional[Catalog]: + if not obj.name: + self.report.num_catalogs_missing_name += 1 + return None catalog_name = self._escape_sequence(obj.name) return Catalog( name=obj.name, @@ -350,7 +382,10 @@ def _create_catalog( type=obj.catalog_type, ) - def _create_schema(self, catalog: Catalog, obj: SchemaInfo) -> Schema: + def _create_schema(self, catalog: Catalog, obj: SchemaInfo) -> Optional[Schema]: + if not obj.name: + self.report.num_schemas_missing_name += 1 + return None return Schema( name=obj.name, id=f"{catalog.id}.{self._escape_sequence(obj.name)}", @@ -359,11 +394,14 @@ def _create_schema(self, catalog: Catalog, obj: SchemaInfo) -> Schema: owner=obj.owner, ) - def _create_column(self, table_id: str, obj: ColumnInfo) -> Column: + def _create_column(self, table_id: str, obj: ColumnInfo) -> Optional[Column]: + if not obj.name: + self.report.num_columns_missing_name += 1 + return None return Column( name=obj.name, id=f"{table_id}.{self._escape_sequence(obj.name)}", - type_text=obj.type_text, + type_text=obj.type_text or "", type_name=obj.type_name, type_scale=obj.type_scale, type_precision=obj.type_precision, @@ -372,7 +410,12 @@ def _create_column(self, table_id: str, obj: ColumnInfo) -> Column: comment=obj.comment, ) - def _create_table(self, schema: Schema, obj: TableInfoWithGeneration) -> Table: + def _create_table( + self, schema: Schema, obj: TableInfoWithGeneration + ) -> Optional[Table]: + if not obj.name: + self.report.num_tables_missing_name += 1 + return None table_id = f"{schema.id}.{self._escape_sequence(obj.name)}" return Table( name=obj.name, @@ -381,26 +424,40 @@ def _create_table(self, schema: Schema, obj: TableInfoWithGeneration) -> Table: schema=schema, storage_location=obj.storage_location, data_source_format=obj.data_source_format, - columns=[ - self._create_column(table_id, column) for column in obj.columns or [] - ], + columns=list(self._extract_columns(obj.columns, table_id)) + if obj.columns + else [], view_definition=obj.view_definition or None, properties=obj.properties or {}, owner=obj.owner, generation=obj.generation, - created_at=datetime.fromtimestamp(obj.created_at / 1000, tz=timezone.utc), + created_at=datetime.fromtimestamp(obj.created_at / 1000, tz=timezone.utc) + if obj.created_at + else None, created_by=obj.created_by, updated_at=datetime.fromtimestamp(obj.updated_at / 1000, tz=timezone.utc) if obj.updated_at + else None + if obj.updated_at else None, updated_by=obj.updated_by, table_id=obj.table_id, comment=obj.comment, ) + def _extract_columns( + self, columns: List[ColumnInfo], table_id: str + ) -> Iterable[Column]: + for column in columns: + optional_column = self._create_column(table_id, column) + if optional_column: + yield optional_column + def _create_service_principal( self, obj: DatabricksServicePrincipal - ) -> ServicePrincipal: + ) -> Optional[ServicePrincipal]: + if not obj.display_name or not obj.application_id: + return None return ServicePrincipal( id=f"{obj.id}.{self._escape_sequence(obj.display_name)}", display_name=obj.display_name, @@ -408,8 +465,14 @@ def _create_service_principal( active=obj.active, ) - @staticmethod - def _create_query(info: QueryInfo) -> Query: + def _create_query(self, info: QueryInfo) -> Optional[Query]: + if ( + not info.query_text + or not info.query_start_time_ms + or not info.query_end_time_ms + ): + self.report.num_queries_missing_info += 1 + return None return Query( query_id=info.query_id, query_text=info.query_text, diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py index ab38119d01a9b..5992f103ccac3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py @@ -14,6 +14,10 @@ StatementStatus, ) +from datahub.ingestion.source.unity.hive_metastore_proxy import ( + HIVE_METASTORE, + HiveMetastoreProxy, +) from datahub.ingestion.source.unity.proxy_types import ( ColumnProfile, TableProfile, @@ -30,6 +34,7 @@ class UnityCatalogProxyProfilingMixin: _workspace_client: WorkspaceClient report: UnityCatalogReport warehouse_id: str + hive_metastore_proxy: Optional[HiveMetastoreProxy] def check_profiling_connectivity(self): self._workspace_client.warehouses.get(self.warehouse_id) @@ -136,6 +141,8 @@ def _analyze_table( def _check_analyze_table_statement_status( self, execute_response: ExecuteStatementResponse, max_wait_secs: int ) -> bool: + if not execute_response.statement_id or not execute_response.status: + return False statement_id: str = execute_response.statement_id status: StatementStatus = execute_response.status @@ -152,13 +159,15 @@ def _check_analyze_table_statement_status( statement_id ) self._raise_if_error(response, "get-statement") - status = response.status + status = response.status # type: ignore return status.state == StatementState.SUCCEEDED def _get_table_profile( self, ref: TableReference, include_columns: bool ) -> TableProfile: + if self.hive_metastore_proxy and ref.catalog == HIVE_METASTORE: + return self.hive_metastore_proxy.get_table_profile(ref, include_columns) table_info = self._workspace_client.tables.get(ref.qualified_table_name) return self._create_table_profile(table_info, include_columns=include_columns) @@ -166,7 +175,12 @@ def _create_table_profile( self, table_info: TableInfo, include_columns: bool ) -> TableProfile: # Warning: this implementation is brittle -- dependent on properties that can change - columns_names = [column.name for column in table_info.columns] + columns_names = ( + [column.name for column in table_info.columns if column.name] + if table_info.columns + else [] + ) + return TableProfile( num_rows=self._get_int(table_info, "spark.sql.statistics.numRows"), total_size=self._get_int(table_info, "spark.sql.statistics.totalSize"), @@ -182,6 +196,7 @@ def _create_table_profile( def _create_column_profile( self, column: str, table_info: TableInfo ) -> ColumnProfile: + tblproperties = table_info.properties or {} return ColumnProfile( name=column, null_count=self._get_int( @@ -190,25 +205,18 @@ def _create_column_profile( distinct_count=self._get_int( table_info, f"spark.sql.statistics.colStats.{column}.distinctCount" ), - min=table_info.properties.get( - f"spark.sql.statistics.colStats.{column}.min" - ), - max=table_info.properties.get( - f"spark.sql.statistics.colStats.{column}.max" - ), - avg_len=table_info.properties.get( - f"spark.sql.statistics.colStats.{column}.avgLen" - ), - max_len=table_info.properties.get( - f"spark.sql.statistics.colStats.{column}.maxLen" - ), - version=table_info.properties.get( + min=tblproperties.get(f"spark.sql.statistics.colStats.{column}.min"), + max=tblproperties.get(f"spark.sql.statistics.colStats.{column}.max"), + avg_len=tblproperties.get(f"spark.sql.statistics.colStats.{column}.avgLen"), + max_len=tblproperties.get(f"spark.sql.statistics.colStats.{column}.maxLen"), + version=tblproperties.get( f"spark.sql.statistics.colStats.{column}.version" ), ) def _get_int(self, table_info: TableInfo, field: str) -> Optional[int]: - value = table_info.properties.get(field) + tblproperties = table_info.properties or {} + value = tblproperties.get(field) if value is not None: try: return int(value) @@ -223,14 +231,18 @@ def _get_int(self, table_info: TableInfo, field: str) -> Optional[int]: def _raise_if_error( response: Union[ExecuteStatementResponse, GetStatementResponse], key: str ) -> None: - if response.status.state in [ + if response.status and response.status.state in [ StatementState.FAILED, StatementState.CANCELED, StatementState.CLOSED, ]: raise DatabricksError( - response.status.error.message, - error_code=response.status.error.error_code.value, + response.status.error.message + if response.status.error and response.status.error.message + else "Unknown Error", + error_code=response.status.error.error_code.value + if response.status.error and response.status.error.error_code + else "Unknown Error Code", status=response.status.state.value, context=key, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py index e5951cb0fa4ff..c66189d99f738 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py @@ -96,8 +96,8 @@ class CommonProperty: @dataclass class Metastore(CommonProperty): - global_metastore_id: str # Global across clouds and regions - metastore_id: str + global_metastore_id: Optional[str] # Global across clouds and regions + metastore_id: Optional[str] owner: Optional[str] cloud: Optional[str] region: Optional[str] @@ -107,7 +107,7 @@ class Metastore(CommonProperty): class Catalog(CommonProperty): metastore: Optional[Metastore] owner: Optional[str] - type: Union[CatalogType, CustomCatalogType] + type: Optional[Union[CatalogType, CustomCatalogType]] @dataclass @@ -224,14 +224,14 @@ class Table(CommonProperty): columns: List[Column] storage_location: Optional[str] data_source_format: Optional[DataSourceFormat] - table_type: Union[TableType, HiveTableType] + table_type: Optional[Union[TableType, HiveTableType]] owner: Optional[str] generation: Optional[int] created_at: Optional[datetime] created_by: Optional[str] updated_at: Optional[datetime] updated_by: Optional[str] - table_id: str + table_id: Optional[str] view_definition: Optional[str] properties: Dict[str, str] upstreams: Dict[TableReference, Dict[str, List[str]]] = field(default_factory=dict) @@ -252,16 +252,16 @@ def __post_init__(self): @dataclass class Query: - query_id: str + query_id: Optional[str] query_text: str - statement_type: QueryStatementType + statement_type: Optional[QueryStatementType] start_time: datetime end_time: datetime # User who ran the query - user_id: int + user_id: Optional[int] user_name: Optional[str] # Email or username # User whose credentials were used to run the query - executed_as_user_id: int + executed_as_user_id: Optional[int] executed_as_user_name: Optional[str] @@ -310,9 +310,9 @@ def __bool__(self): class Notebook: id: NotebookId path: str - language: Language - created_at: datetime - modified_at: datetime + language: Optional[Language] + created_at: Optional[datetime] + modified_at: Optional[datetime] upstreams: FrozenSet[TableReference] = field(default_factory=frozenset) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py index 0770d9d27055c..02eedb67f4cc2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py @@ -39,3 +39,9 @@ class UnityCatalogReport(IngestionStageReport, ProfilingSqlReport): num_profile_missing_size_in_bytes: int = 0 num_profile_failed_unsupported_column_type: int = 0 num_profile_failed_int_casts: int = 0 + + num_catalogs_missing_name: int = 0 + num_schemas_missing_name: int = 0 + num_tables_missing_name: int = 0 + num_columns_missing_name: int = 0 + num_queries_missing_info: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 43c5e24439377..1bc47c6307849 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -304,22 +304,28 @@ def process_notebooks(self) -> Iterable[MetadataWorkUnit]: yield from self._gen_notebook_workunits(notebook) def _gen_notebook_workunits(self, notebook: Notebook) -> Iterable[MetadataWorkUnit]: + + properties = {"path": notebook.path} + if notebook.language: + properties["language"] = notebook.language.value + mcps = MetadataChangeProposalWrapper.construct_many( entityUrn=self.gen_notebook_urn(notebook), aspects=[ DatasetPropertiesClass( name=notebook.path.rsplit("/", 1)[-1], - customProperties={ - "path": notebook.path, - "language": notebook.language.value, - }, + customProperties=properties, externalUrl=urljoin( self.config.workspace_url, f"#notebook/{notebook.id}" ), - created=TimeStampClass(int(notebook.created_at.timestamp() * 1000)), + created=TimeStampClass(int(notebook.created_at.timestamp() * 1000)) + if notebook.created_at + else None, lastModified=TimeStampClass( int(notebook.modified_at.timestamp() * 1000) - ), + ) + if notebook.modified_at + else None, ), SubTypesClass(typeNames=[DatasetSubTypes.NOTEBOOK]), BrowsePathsClass(paths=notebook.path.split("/")), @@ -352,6 +358,9 @@ def process_metastores(self) -> Iterable[MetadataWorkUnit]: metastore: Optional[Metastore] = None if self.config.include_metastore: metastore = self.unity_catalog_api_proxy.assigned_metastore() + if not metastore: + self.report.report_failure("Metastore", "Not found") + return yield from self.gen_metastore_containers(metastore) yield from self.process_catalogs(metastore) if metastore and self.config.include_metastore: @@ -705,13 +714,15 @@ def _create_table_property_aspect(self, table: Table) -> DatasetPropertiesClass: if table.generation is not None: custom_properties["generation"] = str(table.generation) - custom_properties["table_type"] = table.table_type.value + if table.table_type: + custom_properties["table_type"] = table.table_type.value if table.created_by: custom_properties["created_by"] = table.created_by if table.properties: custom_properties.update({k: str(v) for k, v in table.properties.items()}) - custom_properties["table_id"] = table.table_id + if table.table_id: + custom_properties["table_id"] = table.table_id if table.owner: custom_properties["owner"] = table.owner if table.updated_by: diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/usage.py b/metadata-ingestion/src/datahub/ingestion/source/unity/usage.py index ab21c1a318659..f07e7a92d8762 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/usage.py @@ -117,7 +117,10 @@ def _get_workunits_internal( def _generate_operation_workunit( self, query: Query, table_info: QueryTableInfo ) -> Iterable[MetadataWorkUnit]: - if query.statement_type not in OPERATION_STATEMENT_TYPES: + if ( + not query.statement_type + or query.statement_type not in OPERATION_STATEMENT_TYPES + ): return None # Not sure about behavior when there are multiple target tables. This is a best attempt. diff --git a/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py b/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py index aab7630d57f46..05f1db0b932f8 100644 --- a/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py +++ b/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py @@ -186,6 +186,8 @@ def register_mock_data(workspace_client): "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", }, "generation": 2, "metastore_id": "2c983545-d403-4f87-9063-5b7e3b6d3736", @@ -200,6 +202,57 @@ def register_mock_data(workspace_client): ) ] + workspace_client.tables.get = lambda *args, **kwargs: databricks.sdk.service.catalog.TableInfo.from_dict( + { + "name": "quickstart_table", + "catalog_name": "quickstart_catalog", + "schema_name": "quickstart_schema", + "table_type": "MANAGED", + "data_source_format": "DELTA", + "columns": [ + { + "name": "columnA", + "type_text": "int", + "type_json": '{"name":"columnA","type":"integer","nullable":true,"metadata":{}}', + "type_name": "INT", + "type_precision": 0, + "type_scale": 0, + "position": 0, + "nullable": True, + }, + { + "name": "columnB", + "type_text": "string", + "type_json": '{"name":"columnB","type":"string","nullable":true,"metadata":{}}', + "type_name": "STRING", + "type_precision": 0, + "type_scale": 0, + "position": 1, + "nullable": True, + }, + ], + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "properties": { + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + }, + "generation": 2, + "metastore_id": "2c983545-d403-4f87-9063-5b7e3b6d3736", + "full_name": "quickstart_catalog.quickstart_schema.quickstart_table", + "data_access_configuration_id": "00000000-0000-0000-0000-000000000000", + "created_at": 1666185698688, + "created_by": "abc@acryl.io", + "updated_at": 1666186049633, + "updated_by": "abc@acryl.io", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + } + ) + workspace_client.service_principals.list.return_value = [ ServicePrincipal.from_dict(d) for d in [ @@ -220,7 +273,50 @@ def register_mock_data(workspace_client): def mock_hive_sql(query): - if query == "DESCRIBE EXTENDED `bronze_kambi`.`bet`": + + if query == "DESCRIBE EXTENDED `bronze_kambi`.`bet` betStatusId": + return [ + ("col_name", "betStatusId"), + ("data_type", "bigint"), + ("comment", None), + ("min", None), + ("max", None), + ("num_nulls", 0), + ("distinct_count", 1), + ("avg_col_len", 8), + ("max_col_len", 8), + ("histogram", None), + ] + elif query == "DESCRIBE EXTENDED `bronze_kambi`.`bet` channelId": + return [ + ("col_name", "channelId"), + ("data_type", "bigint"), + ("comment", None), + ("min", None), + ("max", None), + ("num_nulls", 0), + ("distinct_count", 1), + ("avg_col_len", 8), + ("max_col_len", 8), + ("histogram", None), + ] + elif query == "DESCRIBE EXTENDED `bronze_kambi`.`bet` combination": + return [ + ("col_name", "combination"), + ( + "data_type", + "struct>,eventId:bigint,eventName:string,eventStartDate:string,live:boolean,odds:double,outcomeIds:array,outcomeLabel:string,sportId:string,status:string,voidReason:string>>,payout:double,rewardExtraPayout:double,stake:double>", + ), + ("comment", None), + ("min", None), + ("max", None), + ("num_nulls", None), + ("distinct_count", None), + ("avg_col_len", None), + ("max_col_len", None), + ("histogram", None), + ] + elif query == "DESCRIBE EXTENDED `bronze_kambi`.`bet`": return [ ("betStatusId", "bigint", None), ("channelId", "bigint", None), @@ -237,6 +333,7 @@ def mock_hive_sql(query): ("Created Time", "Wed Jun 22 05:14:56 UTC 2022", ""), ("Last Access", "UNKNOWN", ""), ("Created By", "Spark 3.2.1", ""), + ("Statistics", "1024 bytes, 3 rows", ""), ("Type", "MANAGED", ""), ("Location", "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", ""), ("Provider", "delta", ""), @@ -312,6 +409,11 @@ def test_ingestion(pytestconfig, tmp_path, requests_mock): "include_ownership": True, "include_hive_metastore": True, "warehouse_id": "test", + "profiling": { + "enabled": True, + "method": "analyze", + "call_analyze": False, + }, }, }, "sink": { diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 98a6615dd2b52..383f94144ffdc 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -504,7 +504,7 @@ "Last Access": "UNKNOWN", "Created By": "Spark 3.2.1", "Owner": "root", - "table_id": "hive_metastore.bronze_kambi.view1", + "table_id": "acryl_metastore.hive_metastore.bronze_kambi.view1", "created_at": "2022-06-22 05:14:56" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/view1", @@ -638,7 +638,7 @@ "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "hive_metastore.bronze_kambi.view1", + "schemaName": "acryl_metastore.hive_metastore.bronze_kambi.view1", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1172,10 +1172,11 @@ "Table": "bet", "Last Access": "UNKNOWN", "Created By": "Spark 3.2.1", + "Statistics": "1024 bytes, 3 rows", "Owner": "root", "Is_managed_location": "true", "Table Properties": "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", - "table_id": "hive_metastore.bronze_kambi.bet", + "table_id": "acryl_metastore.hive_metastore.bronze_kambi.bet", "created_at": "2022-06-22 05:14:56" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/bet", @@ -1275,7 +1276,7 @@ "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "hive_metastore.bronze_kambi.bet", + "schemaName": "acryl_metastore.hive_metastore.bronze_kambi.bet", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1731,15 +1732,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", "name": "quickstart_table", @@ -2061,15 +2064,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", "name": "quickstart_table", @@ -2527,15 +2532,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", "name": "quickstart_table", @@ -2857,15 +2864,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", "name": "quickstart_table", @@ -3323,15 +3332,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", "name": "quickstart_table", @@ -3653,15 +3664,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/quickstart_schema/quickstart_table", "name": "quickstart_table", @@ -3813,6 +3826,69 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920011, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703581191932, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 3, + "columnCount": 3, + "fieldProfiles": [ + { + "fieldPath": "betStatusId", + "uniqueCount": 1, + "uniqueProportion": 0.3333333333333333, + "nullCount": 0, + "nullProportion": 0.0 + }, + { + "fieldPath": "channelId", + "uniqueCount": 1, + "uniqueProportion": 0.3333333333333333, + "nullCount": 0, + "nullProportion": 0.0 + } + ], + "sizeInBytes": 1024 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", @@ -3829,6 +3905,30 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580406273, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", @@ -3845,6 +3945,78 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920008, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920011, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920012, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", @@ -3877,6 +4049,30 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920010, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", From 2d302fe754969a4ec64b678d6a4002558eee66b3 Mon Sep 17 00:00:00 2001 From: Kunal-kankriya <127090035+Kunal-kankriya@users.noreply.github.com> Date: Wed, 3 Jan 2024 13:59:20 +0530 Subject: [PATCH 314/792] fix(cypress): make setting manage policy test not flaky (#9547) --- .../cypress/e2e/settings/manage_policies.js | 247 ++++++++---------- 1 file changed, 104 insertions(+), 143 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/settings/manage_policies.js b/smoke-test/tests/cypress/cypress/e2e/settings/manage_policies.js index 6515d92285e2e..0e69a4e7f287a 100644 --- a/smoke-test/tests/cypress/cypress/e2e/settings/manage_policies.js +++ b/smoke-test/tests/cypress/cypress/e2e/settings/manage_policies.js @@ -4,149 +4,110 @@ const platform_policy_edited = `Platform test policy ${test_id} EDITED`; const metadata_policy_name = `Metadata test policy ${test_id}`; const metadata_policy_edited = `Metadata test policy ${test_id} EDITED`; + + +function searchAndToggleMetadataPolicyStatus(metadataPolicyName, targetStatus) { + cy.get('[data-testid="search-input"]').should('be.visible'); + cy.get('[data-testid="search-input"]').eq(1).type(metadataPolicyName); + cy.contains('tr', metadataPolicyName).as('metadataPolicyRow'); + cy.contains(targetStatus).click(); +} + +function clickFocusAndType(Id, text) { + cy.clickOptionWithTestId(Id) + .focused().clear() + .type(text); +} + +function updateAndSave(Id, groupName, text) { + cy.clickOptionWithTestId(Id).type(groupName); + cy.get(`[title='${text}']`).click(); + cy.focused().blur(); +} + +function clickOnButton(saveButton) { + cy.get(`#${saveButton}`).click(); +} + +function createPolicy(decription, policyName) { + clickFocusAndType("policy-description", decription) + clickOnButton("nextButton"); + updateAndSave("privileges", "All", "All Privileges", "nextButton") + clickOnButton("nextButton"); + updateAndSave("users", "All", "All Users") + updateAndSave("groups", "All", "All Groups") + clickOnButton("saveButton"); + cy.waitTextVisible("Successfully saved policy."); + cy.waitTextVisible(policyName); +} + +function editPolicy(policyName, editPolicy, description, policyEdited, visibleDiscription) { + searchAndToggleMetadataPolicyStatus(policyName, 'EDIT') + cy.clickOptionWithTestId("policy-name") + cy.focused().clear().type(editPolicy); + cy.clickOptionWithTestId("policy-description"); + cy.focused().clear().type(description); + clickOnButton("nextButton"); + clickOnButton("nextButton"); + clickOnButton("saveButton"); + cy.waitTextVisible("Successfully saved policy."); + cy.waitTextVisible(policyEdited); + cy.waitTextVisible(visibleDiscription);; +} + +function deletePolicy(policyEdited, deletePolicy) { + searchAndToggleMetadataPolicyStatus(policyEdited, 'DEACTIVATE') + cy.waitTextVisible("Successfully deactivated policy.") + cy.contains('DEACTIVATE').should('not.exist') + cy.contains('ACTIVATE').click(); + cy.waitTextVisible("Successfully activated policy.") + cy.get("[data-icon='delete']").click(); + cy.waitTextVisible(deletePolicy); + cy.clickOptionWithText("Yes"); + cy.waitTextVisible("Successfully removed policy."); + cy.ensureTextNotPresent(policyEdited); +} + describe("create and manage platform and metadata policies", () => { + beforeEach(() => { + cy.loginWithCredentials(); + cy.visit("/settings/permissions/policies"); + }); + + it("create platform policy", () => { + cy.waitTextVisible("Manage Permissions"); + cy.clickOptionWithText("Create new policy"); + clickFocusAndType("policy-name", platform_policy_name) + cy.get('[data-testid="policy-type"] [title="Metadata"]').click(); + cy.clickOptionWithTestId("platform"); + createPolicy(`Platform policy description ${test_id}`, platform_policy_name) + }); + + it("edit platform policy", () => { + editPolicy(`${platform_policy_name}`, platform_policy_edited, + `Platform policy description ${test_id} EDITED`, + platform_policy_edited, `Platform policy description ${test_id} EDITED`) + }); + + it("deactivate and activate platform policy", () => { + deletePolicy(`${platform_policy_edited}`, `Delete ${platform_policy_edited}`, `${platform_policy_edited}`) + }); + + it("create metadata policy", () => { + cy.clickOptionWithText("Create new policy"); + clickFocusAndType("policy-name", metadata_policy_name) + cy.get('[data-testid="policy-type"]').should('have.text', 'Metadata'); + createPolicy(`Metadata policy description ${test_id}`, metadata_policy_name) + }); + + it("edit metadata policy", () => { + editPolicy(`${metadata_policy_name}`, metadata_policy_edited, + `Metadata policy description ${test_id} EDITED`, + metadata_policy_edited, `Metadata policy description ${test_id} EDITED`) + }); + + it("deactivate and activate metadata policy", () => { + deletePolicy(`${metadata_policy_name}`, `Delete ${metadata_policy_name}`, `${metadata_policy_edited}`) + }); - it("create platform policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.waitTextVisible("Manage Permissions"); - cy.clickOptionWithText("Create new policy"); - cy.clickOptionWithTestId("policy-name") - .focused() - .type(platform_policy_name); - cy.get('[data-testid="policy-type"] [title="Metadata"]').click(); - cy.clickOptionWithTestId("platform"); - cy.clickOptionWithTestId("policy-description") - .focused() - .type(`Platform policy description ${test_id}`); - cy.get("#nextButton").click(); - cy.get('[data-testid="privileges"]').type("All"); - cy.clickOptionWithText("All Privileges").focused().blur(); - cy.get("#nextButton").click(); - cy.get('[data-testid="users"]').type("All"); - cy.get("[title='All Users']").click(); - cy.focused().blur(); - cy.get('[data-testid="groups"]').type("All"); - cy.get("[title='All Groups']").click(); - cy.focused().blur(); - cy.get("#saveButton").click(); - cy.waitTextVisible("Successfully saved policy."); - cy.waitTextVisible(platform_policy_name); - }); - - it("edit platform policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.contains('tr', `${platform_policy_name}` ) - .contains('EDIT') - .click(); - cy.clickOptionWithTestId("policy-name"); - cy.focused().clear().type(platform_policy_edited); - cy.clickOptionWithTestId("policy-description"); - cy.focused().clear().type(`Platform policy description ${test_id} EDITED`); - cy.get("#nextButton").click(); - cy.get("#nextButton").click(); - cy.get("#saveButton").click(); - cy.waitTextVisible("Successfully saved policy."); - cy.waitTextVisible(platform_policy_edited); - cy.waitTextVisible(`Platform policy description ${test_id} EDITED`); - }); - - it("deactivate and activate platform policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.contains('tr', `${platform_policy_edited}` ) - .contains('DEACTIVATE') - .click(); - cy.waitTextVisible("Successfully deactivated policy.") - cy.contains('tr', `${platform_policy_edited}` ) - .contains('INACTIVE') - .should("be.visible"); - cy.contains('tr', `${platform_policy_edited}` ) - .contains('ACTIVATE') - .click(); - cy.waitTextVisible("Successfully activated policy.") - cy.contains('tr', `${platform_policy_edited}` ) - .contains('ACTIVE') - .should("be.visible"); - cy.contains('tr', `${platform_policy_edited}` ) - .find("[data-icon='delete']") - .click(); - cy.waitTextVisible(`Delete ${platform_policy_edited}`); - cy.clickOptionWithText("Yes"); - cy.waitTextVisible("Successfully removed policy."); - cy.ensureTextNotPresent(`${platform_policy_edited}`); - - }); - - it("create metadata policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.clickOptionWithText("Create new policy"); - cy.clickOptionWithTestId("policy-name") - .focused() - .type(metadata_policy_name); - cy.get('[data-testid="policy-type"]').should('have.text', 'Metadata'); - cy.clickOptionWithTestId("policy-description") - .focused() - .type(`Metadata policy description ${test_id}`); - cy.get("#nextButton").click(); - cy.get('[data-testid="privileges"]').type("All"); - cy.clickOptionWithText("All Privileges").focused().blur(); - cy.get("#nextButton").click(); - cy.get('[data-testid="users"]').type("All"); - cy.get("[title='All Users']").click(); - cy.focused().blur(); - cy.get('[data-testid="groups"]').type("All"); - cy.get("[title='All Groups']").click(); - cy.focused().blur(); - cy.get("#saveButton").click(); - cy.waitTextVisible("Successfully saved policy."); - cy.waitTextVisible(metadata_policy_name); - }); - - it("edit metadata policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.contains('tr', `${metadata_policy_name}` ) - .contains('EDIT') - .click(); - cy.clickOptionWithTestId("policy-name") - cy.focused().clear().type(metadata_policy_edited); - cy.clickOptionWithTestId("policy-description"); - cy.focused().clear().type(`Metadata policy description ${test_id} EDITED`); - cy.get("#nextButton").click(); - cy.get("#nextButton").click(); - cy.get("#saveButton").click(); - cy.waitTextVisible("Successfully saved policy."); - cy.waitTextVisible(metadata_policy_edited); - cy.waitTextVisible(`Metadata policy description ${test_id} EDITED`); - }); - - it("deactivate and activate metadata policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.contains('tr', `${metadata_policy_edited}` ) - .contains('DEACTIVATE') - .click(); - cy.waitTextVisible("Successfully deactivated policy.") - cy.contains('tr', `${metadata_policy_edited}` ) - .contains('INACTIVE') - .should("be.visible"); - cy.contains('tr', `${metadata_policy_edited}` ) - .contains('ACTIVATE') - .click(); - cy.waitTextVisible("Successfully activated policy.") - cy.contains('tr', `${metadata_policy_edited}` ) - .contains('ACTIVE') - .should("be.visible"); - cy.contains('tr', `${metadata_policy_edited}` ) - .find("[data-icon='delete']") - .click(); - cy.waitTextVisible(`Delete ${metadata_policy_edited}`); - cy.clickOptionWithText("Yes"); - cy.waitTextVisible("Successfully removed policy."); - cy.ensureTextNotPresent(`${metadata_policy_edited}`); - }); - }); \ No newline at end of file From c395d86139c773cd374fa6a52587614787580192 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Wed, 3 Jan 2024 14:00:28 +0530 Subject: [PATCH 315/792] fix(ui): search user incorrect role shown (#9532) --- datahub-web-react/src/app/identity/user/SelectRole.tsx | 6 +++++- datahub-web-react/src/app/identity/user/UserList.tsx | 9 ++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/datahub-web-react/src/app/identity/user/SelectRole.tsx b/datahub-web-react/src/app/identity/user/SelectRole.tsx index 011eae0fbd8b3..deaa85f14b088 100644 --- a/datahub-web-react/src/app/identity/user/SelectRole.tsx +++ b/datahub-web-react/src/app/identity/user/SelectRole.tsx @@ -1,4 +1,4 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { UserOutlined } from '@ant-design/icons'; import { Select } from 'antd'; import { useApolloClient } from '@apollo/client'; @@ -49,6 +49,10 @@ export default function SelectRole({ user, userRoleUrn, selectRoleOptions, refet const [currentRoleUrn, setCurrentRoleUrn] = useState(defaultRoleUrn); const [isViewingAssignRole, setIsViewingAssignRole] = useState(false); + useEffect(() => { + setCurrentRoleUrn(defaultRoleUrn); + }, [defaultRoleUrn]); + const onSelectRole = (roleUrn: string) => { setCurrentRoleUrn(roleUrn); setIsViewingAssignRole(true); diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index 8e2bc21f0693f..22b44e5f2d625 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -52,6 +52,7 @@ export const UserList = () => { const params = QueryString.parse(location.search, { arrayFormat: 'comma' }); const paramsQuery = (params?.query as string) || undefined; const [query, setQuery] = useState(undefined); + const [usersList, setUsersList] = useState>([]); useEffect(() => setQuery(paramsQuery), [paramsQuery]); const [page, setPage] = useState(1); @@ -81,8 +82,9 @@ export const UserList = () => { }); const totalUsers = usersData?.listUsers?.total || 0; - const users = usersData?.listUsers?.users || []; - + useEffect(()=> { + setUsersList(usersData?.listUsers?.users || []); + }, [usersData]); const onChangePage = (newPage: number) => { scrollToTop(); setPage(newPage); @@ -145,6 +147,7 @@ export const UserList = () => { onQueryChange={(q) => { setPage(1); setQuery(q); + setUsersList([]); }} entityRegistry={entityRegistry} hideRecommendations @@ -155,7 +158,7 @@ export const UserList = () => { locale={{ emptyText: , }} - dataSource={users} + dataSource={usersList} renderItem={(item: any) => ( handleDelete(item.urn as string)} From 21075e606707df42f25c4ab2d37ef6b2d97daf0d Mon Sep 17 00:00:00 2001 From: Shirshanka Das Date: Wed, 3 Jan 2024 00:39:58 -0800 Subject: [PATCH 316/792] fix(ci): make test flexible to allow sha-based cli versions (#9551) --- smoke-test/tests/read_only/test_services_up.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/smoke-test/tests/read_only/test_services_up.py b/smoke-test/tests/read_only/test_services_up.py index cbe92625f4689..b1b3b1d6f4bd7 100644 --- a/smoke-test/tests/read_only/test_services_up.py +++ b/smoke-test/tests/read_only/test_services_up.py @@ -2,6 +2,7 @@ import pytest import requests +import re from tests.utils import get_gms_url, wait_for_healthcheck_util @@ -13,6 +14,8 @@ def test_services_up(): wait_for_healthcheck_util() +def looks_like_a_short_sha(sha: str) -> bool: + return len(sha) == 7 and re.match(r"[0-9a-f]{7}", sha) is not None @pytest.mark.read_only def test_gms_config_accessible(): @@ -30,4 +33,4 @@ def test_gms_config_accessible(): default_cli_version: str = gms_config["managedIngestion"]["defaultCliVersion"] print(f"Default CLI version: {default_cli_version}") assert not default_cli_version.startswith("@") - assert "." in default_cli_version + assert "." in default_cli_version or looks_like_a_short_sha(default_cli_version), "Default CLI version does not look like a version string" From 2e3141e1db5be0b24c343812a885dc494168a7de Mon Sep 17 00:00:00 2001 From: Kunal-kankriya <127090035+Kunal-kankriya@users.noreply.github.com> Date: Wed, 3 Jan 2024 18:59:16 +0530 Subject: [PATCH 317/792] tests(cypress): add navigation in search test (#9545) --- .../e2e/search/query_and_filter_search.js | 156 ++++++++++++------ 1 file changed, 102 insertions(+), 54 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/search/query_and_filter_search.js b/smoke-test/tests/cypress/cypress/e2e/search/query_and_filter_search.js index 4637310b86496..59105be587803 100644 --- a/smoke-test/tests/cypress/cypress/e2e/search/query_and_filter_search.js +++ b/smoke-test/tests/cypress/cypress/e2e/search/query_and_filter_search.js @@ -1,57 +1,105 @@ +const datasetNames = { + dashboardsType: "Baz Dashboard", + pipelinesType: "Users", + MlmoduleType: "cypress-model", + glossaryTermsType: "CypressColumnInfoType", + tags: "some-cypress-feature-1", + hivePlatform: "cypress_logging_events", + airflowPlatform: "User Creations", + awsPlatform: "project/root/events/logging_events_bckp", + hdfsPlatform: "SampleHdfsDataset" +}; + +const searchToExecute = (value) => { + cy.get("input[data-testid=search-input]").eq(0).type(`${value}{enter}`); + cy.waitTextPresent("Type"); +}; + +const selectFilteredEntity = (textToClick, entity, url) => { + cy.get(`[data-testid=filter-dropdown-${textToClick}]`).click({ force: true }); + cy.get(`[data-testid="filter-option-${entity}"]`).click({ force: true }); + cy.get("[data-testid=update-filters]").click({ force: true }); + cy.url().should("include", `${url}`); + cy.get("[data-testid=update-filters]").should("not.be.visible"); + cy.get('.ant-pagination-next').scrollIntoView().should('be.visible'); +}; + +const verifyFilteredEntity = (text) => { + cy.get('.ant-typography').contains(text).should('be.visible'); +}; + describe("auto-complete dropdown, filter plus query search test", () => { + + beforeEach(() => { + cy.loginWithCredentials(); + cy.visit('/'); + }); + + it.skip("Verify the 'filter by type' section + query", () => { + + //Dashboard + searchToExecute("*"); + selectFilteredEntity("Type", "Dashboards", "filter__entityType"); + cy.clickOptionWithText(datasetNames.dashboardsType); + verifyFilteredEntity('Dashboard'); + + //Ml Models + searchToExecute("*"); + selectFilteredEntity("Type", "ML Models", "filter__entityType"); + cy.clickOptionWithText(datasetNames.MlmoduleType); + verifyFilteredEntity('ML Model'); + + //Piplines + searchToExecute("*"); + selectFilteredEntity("Type", "Pipelines", "filter__entityType"); + cy.clickOptionWithText(datasetNames.pipelinesType); + verifyFilteredEntity('Pipeline'); + + }); + + it("Verify the 'filter by Glossary term' section + query", () => { + + //Glossary Term + searchToExecute("*"); + selectFilteredEntity("Type", "Glossary Terms", "filter__entityType"); + cy.clickOptionWithText(datasetNames.glossaryTermsType); + verifyFilteredEntity('Glossary Term'); +}); + + it("Verify the 'filter by platform' section + query", () => { + + //Hive + searchToExecute("*"); + selectFilteredEntity("Platform", "Hive", "filter_platform"); + cy.clickOptionWithText(datasetNames.hivePlatform); + verifyFilteredEntity('Hive'); + + //AWS S3 + searchToExecute("*"); + selectFilteredEntity("Platform", "AWS S3", "filter_platform"); + cy.clickOptionWithText(datasetNames.awsPlatform); + verifyFilteredEntity('AWS S3'); + + //HDFS + searchToExecute("*"); + selectFilteredEntity("Platform", "HDFS", "filter_platform"); + cy.clickOptionWithText(datasetNames.hdfsPlatform); + verifyFilteredEntity('HDFS'); + + //Airflow + searchToExecute("*"); + selectFilteredEntity("Platform", "Airflow", "filter_platform"); + cy.clickOptionWithText(datasetNames.airflowPlatform); + verifyFilteredEntity('Airflow'); + }); - const platformQuerySearch = (query,test_id,active_filter) => { - cy.visit("/"); - cy.get("input[data-testid=search-input]").type(query); - cy.get(`[data-testid="quick-filter-urn:li:dataPlatform:${test_id}"]`).click(); - cy.focused().type("{enter}").wait(3000); - cy.url().should( - "include", - `?filter_platform___false___EQUAL___0=urn%3Ali%3AdataPlatform%3A${test_id}` - ); - cy.get('[data-testid="search-input"]').should("have.value", query); - cy.get(`[data-testid="active-filter-${active_filter}"]`).should("be.visible"); - cy.contains("of 0 results").should("not.exist"); - cy.contains(/of [0-9]+ results/); - } - - const entityQuerySearch = (query,test_id,active_filter) => { - cy.visit("/"); - cy.get("input[data-testid=search-input]").type(query); - cy.get(`[data-testid="quick-filter-${test_id}"]`).click(); - cy.focused().type("{enter}").wait(3000); - cy.url().should( - "include", - `?filter__entityType___false___EQUAL___0=${test_id}` - ); - cy.get('[data-testid="search-input"]').should("have.value", query); - cy.get(`[data-testid="active-filter-${active_filter}"]`).should("be.visible"); - cy.contains("of 0 results").should("not.exist"); - cy.contains(/of [0-9]+ results/); - } - - it("verify the 'filter by' section + query (result in search page with query applied + filter applied)", () => { - // Platform query plus filter test - cy.loginWithCredentials(); - // Airflow - platformQuerySearch ("cypress","airflow","Airflow"); - // BigQuery - platformQuerySearch ("cypress","bigquery","BigQuery"); - // dbt - platformQuerySearch ("cypress","dbt","dbt"); - // Hive - platformQuerySearch ("cypress","hive","Hive"); - - // Entity type query plus filter test - // Datasets - entityQuerySearch ("cypress","DATASET","Datasets"); - // Dashboards - entityQuerySearch ("cypress","DASHBOARD","Dashboards"); - // Pipelines - entityQuerySearch ("cypress","DATA_FLOW","Pipelines"); - // Domains - entityQuerySearch ("Marketing","DOMAIN","Domains"); - // Glossary Terms - entityQuerySearch ("cypress","GLOSSARY_TERM","Glossary Terms"); + it("Verify the 'filter by tag' section + query", () => { + + //CypressFeatureTag + searchToExecute("*"); + selectFilteredEntity("Tag", "CypressFeatureTag", "filter_tags"); + cy.clickOptionWithText(datasetNames.tags); + cy.mouseover('[data-testid="tag-CypressFeatureTag"]'); + verifyFilteredEntity('Feature'); }); -}); \ No newline at end of file +}); From ff78e3c172fee880cdbe1aa3333cf4a73926c910 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 3 Jan 2024 19:47:19 +0530 Subject: [PATCH 318/792] docs(acryl cloud): release notes for 0.2.14.1 (#9554) --- docs-website/sidebars.js | 1 + docs/managed-datahub/release-notes/v_0_2_14.md | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 docs/managed-datahub/release-notes/v_0_2_14.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 5d7c6b06adad4..2b8873c678778 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -177,6 +177,7 @@ module.exports = { }, { "Managed DataHub Release History": [ + "docs/managed-datahub/release-notes/v_0_2_14", "docs/managed-datahub/release-notes/v_0_2_13", "docs/managed-datahub/release-notes/v_0_2_12", "docs/managed-datahub/release-notes/v_0_2_11", diff --git a/docs/managed-datahub/release-notes/v_0_2_14.md b/docs/managed-datahub/release-notes/v_0_2_14.md new file mode 100644 index 0000000000000..8ad1f19503e06 --- /dev/null +++ b/docs/managed-datahub/release-notes/v_0_2_14.md @@ -0,0 +1,17 @@ +# v0.2.14.1 +--- + +Release Availability Date +--- +02-Jan-2023 + +Recommended CLI/SDK +--- +- `v0.12.1.3` with release notes at https://github.com/acryldata/datahub/releases/tag/v0.12.1.3 + +If you are using an older CLI/SDK version then please upgrade it. This applies for all CLI/SDK usages, if you are using it through your terminal, github actions, airflow, in python SDK somewhere, Java SKD etc. This is a strong recommendation to upgrade as we keep on pushing fixes in the CLI and it helps us support you better. + +## Release Changelog +--- +- Since `v0.2.13` these changes from OSS DataHub https://github.com/datahub-project/datahub/compare/d9de854d276c118afc55264ecc9e2712b91b4ab2...31f9c796763677a4d452066d9b49b4088e65da19 have been pulled in. + From c3c4bef1ad746a57a1a6cff821a732fe8114f695 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 3 Jan 2024 22:59:39 +0530 Subject: [PATCH 319/792] ci(doc): tweak build rule to avoid docker build for docs (#9555) --- .github/workflows/docker-unified.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 454e766140245..8afce059572c7 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -4,12 +4,14 @@ on: branches: - master paths-ignore: + - "docs-website/**" - "docs/**" - "**.md" pull_request: branches: - "**" paths-ignore: + - "docs-website/**" - "docs/**" - "**.md" release: From c9613043c86e169a888d5ac60f0efdcd1551a2b0 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 3 Jan 2024 14:28:22 -0500 Subject: [PATCH 320/792] fix(ingest): improve kafka-connect test stability (#9519) --- .../tests/integration/kafka/docker-compose.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/tests/integration/kafka/docker-compose.yml b/metadata-ingestion/tests/integration/kafka/docker-compose.yml index 43f30cbe1e665..0a4422e07515c 100644 --- a/metadata-ingestion/tests/integration/kafka/docker-compose.yml +++ b/metadata-ingestion/tests/integration/kafka/docker-compose.yml @@ -1,5 +1,5 @@ --- -version: '3.8' +version: "3.8" services: zookeeper: image: confluentinc/cp-zookeeper:7.2.2 @@ -9,7 +9,8 @@ services: ports: - "52181" volumes: - - test_zkdata:/var/opt/zookeeper + - test_zkdata:/var/lib/zookeeper/data + - test_zklogs:/var/lib/zookeeper/log broker: image: confluentinc/cp-kafka:7.2.2 @@ -34,3 +35,4 @@ services: volumes: test_zkdata: + test_zklogs: From 83b904e379b0e9a13d22659e483c6d3d4c9b29ba Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 3 Jan 2024 14:28:32 -0500 Subject: [PATCH 321/792] fix(ingest/looker): add user stats to report (#9505) --- .../ingestion/source/looker/looker_common.py | 5 +++++ .../ingestion/source/looker/looker_config.py | 5 ----- .../ingestion/source/looker/looker_source.py | 13 +++++-------- 3 files changed, 10 insertions(+), 13 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py index 53533a8d27c9b..94a56bb9281cb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py @@ -1059,6 +1059,7 @@ class LookerDashboardSourceReport(StaleEntityRemovalSourceReport): dashboards_scanned_for_usage: int = 0 charts_scanned_for_usage: int = 0 charts_with_activity: LossySet[str] = dataclasses_field(default_factory=LossySet) + accessed_dashboards: int = 0 dashboards_with_activity: LossySet[str] = dataclasses_field( default_factory=LossySet ) @@ -1066,6 +1067,10 @@ class LookerDashboardSourceReport(StaleEntityRemovalSourceReport): _looker_explore_registry: Optional[LookerExploreRegistry] = None total_explores: int = 0 explores_scanned: int = 0 + + resolved_user_ids: int = 0 + email_ids_missing: int = 0 # resolved users with missing email addresses + _looker_api: Optional[LookerAPI] = None query_latency: Dict[str, datetime.timedelta] = dataclasses_field( default_factory=dict diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py index 514f22b4f2158..52a21e8f12259 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py @@ -160,11 +160,6 @@ class LookerDashboardSourceConfig( description="When enabled, extracts ownership from Looker directly. When disabled, ownership is left empty " "for dashboards and charts.", ) - actor: Optional[str] = Field( - None, - description="This config is deprecated in favor of `extract_owners`. Previously, was the actor to use in " - "ownership properties of ingested metadata.", - ) strip_user_ids_from_email: bool = Field( False, description="When enabled, converts Looker user emails of the form name@domain.com to urn:li:corpuser:name " diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index 7e8fbfde12042..0cce267bf5579 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -129,9 +129,6 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase): source_config: LookerDashboardSourceConfig reporter: LookerDashboardSourceReport user_registry: LookerUserRegistry - accessed_dashboards: int = 0 - resolved_user_ids: int = 0 - email_ids_missing: int = 0 # resolved users with missing email addresses reachable_look_registry: Set[ str ] # Keep track of look-id which are reachable from Dashboard @@ -866,7 +863,7 @@ def _get_folder_path(self, folder: FolderBase, client: LookerAPI) -> str: def _get_looker_dashboard( self, dashboard: Dashboard, client: LookerAPI ) -> LookerDashboard: - self.accessed_dashboards += 1 + self.reporter.accessed_dashboards += 1 if dashboard.folder is None: logger.debug(f"{dashboard.id} has no folder") dashboard_folder_path = None @@ -928,9 +925,9 @@ def _get_looker_user(self, user_id: Optional[str]) -> Optional[LookerUser]: if user is not None and self.source_config.extract_owners: # Keep track of how many user ids we were able to resolve - self.resolved_user_ids += 1 + self.reporter.resolved_user_ids += 1 if user.email is None: - self.email_ids_missing += 1 + self.reporter.email_ids_missing += 1 return user @@ -1313,8 +1310,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: if ( self.source_config.extract_owners - and self.resolved_user_ids > 0 - and self.email_ids_missing == self.resolved_user_ids + and self.reporter.resolved_user_ids > 0 + and self.reporter.email_ids_missing == self.reporter.resolved_user_ids ): # Looks like we tried to extract owners and could not find their email addresses. This is likely a permissions issue self.reporter.report_warning( From 186b6f942d3fa7f0ce379add72cbcb57bccd4bb0 Mon Sep 17 00:00:00 2001 From: Shirshanka Das Date: Wed, 3 Jan 2024 12:21:06 -0800 Subject: [PATCH 322/792] perf(lineage): Rewrite lineage query for Elastic graph store (#9552) --- .../graph/elastic/ESGraphQueryDAO.java | 82 ++++--- .../graph/search/ESGraphQueryDAOTest.java | 94 ++++++- ...1.json => lineage_query_filters_full.json} | 98 ++++---- ...eage_query_filters_full_empty_filters.json | 60 +++++ ...e_query_filters_full_multiple_filters.json | 229 ++++++++++++++++++ .../lineage_query_filters_limited.json | 32 +++ 6 files changed, 508 insertions(+), 87 deletions(-) rename metadata-io/src/test/resources/elasticsearch/sample_filters/{lineage_query_filters_1.json => lineage_query_filters_full.json} (81%) create mode 100644 metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json create mode 100644 metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json create mode 100644 metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_limited.json diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index 92960bc9222ab..97cb186ce948c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -336,17 +336,10 @@ private List getLineageRelationships( Collectors.toMap( Function.identity(), entityType -> lineageRegistry.getLineageRelationships(entityType, direction))); - BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); - // Get all relation types relevant to the set of urns to hop from - urnsPerEntityType.forEach( - (entityType, urns) -> - finalQuery.should( - getQueryForLineage( - urns, - edgesPerEntityType.getOrDefault(entityType, Collections.emptyList()), - graphFilters, - startTimeMillis, - endTimeMillis))); + + QueryBuilder finalQuery = + getLineageQuery( + urnsPerEntityType, edgesPerEntityType, graphFilters, startTimeMillis, endTimeMillis); SearchResponse response = executeSearchQuery(finalQuery, 0, graphQueryConfiguration.getMaxResult()); Set entityUrnSet = new HashSet<>(entityUrns); @@ -361,18 +354,53 @@ private List getLineageRelationships( entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); } - // Get search query for given list of edges and source urns @VisibleForTesting - public static QueryBuilder getQueryForLineage( - @Nonnull List urns, - @Nonnull List lineageEdges, + public static QueryBuilder getLineageQuery( + @Nonnull Map> urnsPerEntityType, + @Nonnull Map> edgesPerEntityType, @Nonnull GraphFilters graphFilters, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - BoolQueryBuilder query = QueryBuilders.boolQuery(); - if (lineageEdges.isEmpty()) { - return query; + BoolQueryBuilder entityTypeQueries = QueryBuilders.boolQuery(); + // Get all relation types relevant to the set of urns to hop from + urnsPerEntityType.forEach( + (entityType, urns) -> { + if (edgesPerEntityType.containsKey(entityType) + && !edgesPerEntityType.get(entityType).isEmpty()) { + entityTypeQueries.should( + getLineageQueryForEntityType( + urns, edgesPerEntityType.get(entityType), graphFilters)); + } + }); + + BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); + + finalQuery.filter(entityTypeQueries); + finalQuery.filter(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); + finalQuery.filter(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); + + /* + * Optional - Add edge filtering based on time windows. + */ + if (startTimeMillis != null && endTimeMillis != null) { + finalQuery.filter(TimeFilterUtils.getEdgeTimeFilterQuery(startTimeMillis, endTimeMillis)); + } else { + log.debug( + String.format( + "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", + startTimeMillis, endTimeMillis)); } + + return finalQuery; + } + + // Get search query for given list of edges and source urns + @VisibleForTesting + public static QueryBuilder getLineageQueryForEntityType( + @Nonnull List urns, + @Nonnull List lineageEdges, + @Nonnull GraphFilters graphFilters) { + BoolQueryBuilder query = QueryBuilders.boolQuery(); Map> edgesByDirection = lineageEdges.stream().collect(Collectors.groupingBy(EdgeInfo::getDirection)); @@ -388,18 +416,6 @@ public static QueryBuilder getQueryForLineage( query.should(getIncomingEdgeQuery(urns, incomingEdges, graphFilters)); } - /* - * Optional - Add edge filtering based on time windows. - */ - if (startTimeMillis != null && endTimeMillis != null) { - query.must(TimeFilterUtils.getEdgeTimeFilterQuery(startTimeMillis, endTimeMillis)); - } else { - log.debug( - String.format( - "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", - startTimeMillis, endTimeMillis)); - } - return query; } @@ -601,9 +617,6 @@ private static BoolQueryBuilder getOutGoingEdgeQuery( BoolQueryBuilder outgoingEdgeQuery = QueryBuilders.boolQuery(); outgoingEdgeQuery.must(buildUrnFilters(urns, SOURCE)); outgoingEdgeQuery.must(buildEdgeFilters(outgoingEdges)); - outgoingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - outgoingEdgeQuery.must( - buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return outgoingEdgeQuery; } @@ -612,9 +625,6 @@ private static BoolQueryBuilder getIncomingEdgeQuery( BoolQueryBuilder incomingEdgeQuery = QueryBuilders.boolQuery(); incomingEdgeQuery.must(buildUrnFilters(urns, DESTINATION)); incomingEdgeQuery.must(buildEdgeFilters(incomingEdges)); - incomingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - incomingEdgeQuery.must( - buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return incomingEdgeQuery; } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java index 9fc9490bfd7ef..5b7f880e6d83a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java @@ -23,16 +23,40 @@ public class ESGraphQueryDAOTest { - private static final String TEST_QUERY_FILE = - "elasticsearch/sample_filters/lineage_query_filters_1.json"; + private static final String TEST_QUERY_FILE_LIMITED = + "elasticsearch/sample_filters/lineage_query_filters_limited.json"; + private static final String TEST_QUERY_FILE_FULL = + "elasticsearch/sample_filters/lineage_query_filters_full.json"; + private static final String TEST_QUERY_FILE_FULL_EMPTY_FILTERS = + "elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json"; + private static final String TEST_QUERY_FILE_FULL_MULTIPLE_FILTERS = + "elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json"; @Test private static void testGetQueryForLineageFullArguments() throws Exception { - URL url = Resources.getResource(TEST_QUERY_FILE); - String expectedQuery = Resources.toString(url, StandardCharsets.UTF_8); - - List urns = new ArrayList<>(); + URL urlLimited = Resources.getResource(TEST_QUERY_FILE_LIMITED); + String expectedQueryLimited = Resources.toString(urlLimited, StandardCharsets.UTF_8); + URL urlFull = Resources.getResource(TEST_QUERY_FILE_FULL); + String expectedQueryFull = Resources.toString(urlFull, StandardCharsets.UTF_8); + URL urlFullEmptyFilters = Resources.getResource(TEST_QUERY_FILE_FULL_EMPTY_FILTERS); + String expectedQueryFullEmptyFilters = + Resources.toString(urlFullEmptyFilters, StandardCharsets.UTF_8); + URL urlFullMultipleFilters = Resources.getResource(TEST_QUERY_FILE_FULL_MULTIPLE_FILTERS); + String expectedQueryFullMultipleFilters = + Resources.toString(urlFullMultipleFilters, StandardCharsets.UTF_8); + + List urns = List.of(Urn.createFromString("urn:li:dataset:test-urn")); + List urnsMultiple1 = + ImmutableList.of( + UrnUtils.getUrn("urn:li:dataset:test-urn"), + UrnUtils.getUrn("urn:li:dataset:test-urn2"), + UrnUtils.getUrn("urn:li:dataset:test-urn3")); + List urnsMultiple2 = + ImmutableList.of( + UrnUtils.getUrn("urn:li:chart:test-urn"), + UrnUtils.getUrn("urn:li:chart:test-urn2"), + UrnUtils.getUrn("urn:li:chart:test-urn3")); List edgeInfos = new ArrayList<>( ImmutableList.of( @@ -40,14 +64,64 @@ private static void testGetQueryForLineageFullArguments() throws Exception { "DownstreamOf", RelationshipDirection.INCOMING, Constants.DATASET_ENTITY_NAME))); + List edgeInfosMultiple1 = + ImmutableList.of( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.OUTGOING, Constants.DATASET_ENTITY_NAME), + new LineageRegistry.EdgeInfo( + "Consumes", RelationshipDirection.OUTGOING, Constants.DATASET_ENTITY_NAME)); + List edgeInfosMultiple2 = + ImmutableList.of( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.OUTGOING, Constants.DATA_JOB_ENTITY_NAME), + new LineageRegistry.EdgeInfo( + "Consumes", RelationshipDirection.OUTGOING, Constants.DATA_JOB_ENTITY_NAME)); + String entityType = "testEntityType"; + Map> urnsPerEntityType = Map.of(entityType, urns); + Map> urnsPerEntityTypeMultiple = + Map.of( + Constants.DATASET_ENTITY_NAME, + urnsMultiple1, + Constants.CHART_ENTITY_NAME, + urnsMultiple2); + Map> edgesPerEntityType = Map.of(entityType, edgeInfos); + Map> edgesPerEntityTypeMultiple = + Map.of( + Constants.DATASET_ENTITY_NAME, edgeInfosMultiple1, + Constants.DATA_JOB_ENTITY_NAME, edgeInfosMultiple2); GraphFilters graphFilters = new GraphFilters(ImmutableList.of(Constants.DATASET_ENTITY_NAME)); + GraphFilters graphFiltersMultiple = + new GraphFilters( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME)); Long startTime = 0L; Long endTime = 1L; - QueryBuilder builder = - ESGraphQueryDAO.getQueryForLineage(urns, edgeInfos, graphFilters, startTime, endTime); - - Assert.assertEquals(builder.toString(), expectedQuery); + QueryBuilder limitedBuilder = + ESGraphQueryDAO.getLineageQueryForEntityType(urns, edgeInfos, graphFilters); + + QueryBuilder fullBuilder = + ESGraphQueryDAO.getLineageQuery( + urnsPerEntityType, edgesPerEntityType, graphFilters, startTime, endTime); + + QueryBuilder fullBuilderEmptyFilters = + ESGraphQueryDAO.getLineageQuery( + urnsPerEntityType, edgesPerEntityType, GraphFilters.emptyGraphFilters, null, null); + + QueryBuilder fullBuilderMultipleFilters = + ESGraphQueryDAO.getLineageQuery( + urnsPerEntityTypeMultiple, + edgesPerEntityTypeMultiple, + graphFiltersMultiple, + startTime, + endTime); + + Assert.assertEquals(limitedBuilder.toString(), expectedQueryLimited); + Assert.assertEquals(fullBuilder.toString(), expectedQueryFull); + Assert.assertEquals(fullBuilderEmptyFilters.toString(), expectedQueryFullEmptyFilters); + Assert.assertEquals(fullBuilderMultipleFilters.toString(), expectedQueryFullMultipleFilters); } @Test diff --git a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_1.json b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full.json similarity index 81% rename from metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_1.json rename to metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full.json index eb84638f0ccd0..0a1cee08414a9 100644 --- a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_1.json +++ b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full.json @@ -1,6 +1,62 @@ { "bool" : { - "must" : [ + "filter" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "terms" : { + "destination.urn" : [ + "urn:li:dataset:test-urn" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "relationshipType" : [ + "DownstreamOf" + ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "terms" : { + "source.entityType" : [ + "dataset" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "destination.entityType" : [ + "dataset" + ], + "boost" : 1.0 + } + }, { "bool" : { "should" : [ @@ -160,46 +216,6 @@ } } ], - "should" : [ - { - "bool" : { - "must" : [ - { - "terms" : { - "destination.urn" : [ ], - "boost" : 1.0 - } - }, - { - "terms" : { - "relationshipType" : [ - "DownstreamOf" - ], - "boost" : 1.0 - } - }, - { - "terms" : { - "source.entityType" : [ - "dataset" - ], - "boost" : 1.0 - } - }, - { - "terms" : { - "destination.entityType" : [ - "dataset" - ], - "boost" : 1.0 - } - } - ], - "adjust_pure_negative" : true, - "boost" : 1.0 - } - } - ], "adjust_pure_negative" : true, "boost" : 1.0 } diff --git a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json new file mode 100644 index 0000000000000..ab2841d6602d8 --- /dev/null +++ b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json @@ -0,0 +1,60 @@ +{ + "bool" : { + "filter" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "terms" : { + "destination.urn" : [ + "urn:li:dataset:test-urn" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "relationshipType" : [ + "DownstreamOf" + ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "terms" : { + "source.entityType" : [ ], + "boost" : 1.0 + } + }, + { + "terms" : { + "destination.entityType" : [ ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } +} \ No newline at end of file diff --git a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json new file mode 100644 index 0000000000000..39f595e0e8dd2 --- /dev/null +++ b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json @@ -0,0 +1,229 @@ +{ + "bool" : { + "filter" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "terms" : { + "source.urn" : [ + "urn:li:dataset:test-urn", + "urn:li:dataset:test-urn2", + "urn:li:dataset:test-urn3" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "relationshipType" : [ + "DownstreamOf", + "Consumes" + ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "terms" : { + "source.entityType" : [ + "dataset", + "dashboard", + "dataJob" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "destination.entityType" : [ + "dataset", + "dashboard", + "dataJob" + ], + "boost" : 1.0 + } + }, + { + "bool" : { + "should" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "exists" : { + "field" : "createdOn", + "boost" : 1.0 + } + }, + { + "range" : { + "createdOn" : { + "from" : 0, + "to" : 1, + "include_lower" : true, + "include_upper" : true, + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "must" : [ + { + "exists" : { + "field" : "updatedOn", + "boost" : 1.0 + } + }, + { + "range" : { + "updatedOn" : { + "from" : 0, + "to" : 1, + "include_lower" : true, + "include_upper" : true, + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "must" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must_not" : [ + { + "exists" : { + "field" : "createdOn", + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "must" : [ + { + "term" : { + "createdOn" : { + "value" : 0, + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "should" : [ + { + "bool" : { + "must_not" : [ + { + "exists" : { + "field" : "updatedOn", + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "must" : [ + { + "term" : { + "updatedOn" : { + "value" : 0, + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "term" : { + "properties.source" : { + "value" : "UI", + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } +} \ No newline at end of file diff --git a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_limited.json b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_limited.json new file mode 100644 index 0000000000000..95d468ec3dac8 --- /dev/null +++ b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_limited.json @@ -0,0 +1,32 @@ +{ + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "terms" : { + "destination.urn" : [ + "urn:li:dataset:test-urn" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "relationshipType" : [ + "DownstreamOf" + ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } +} \ No newline at end of file From f06b5c782099ace00116fd33dda73af5a48e4184 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 3 Jan 2024 15:30:11 -0500 Subject: [PATCH 323/792] feat(ingest): improve config loading helpers (#9477) --- .../datahub/configuration/config_loader.py | 48 ++++++++------ .../datahub/ingestion/run/pipeline_config.py | 3 +- .../src/datahub/secret/__init__.py | 0 .../datahub/secret/datahub_secret_store.py | 66 +++++++++++++++++++ .../datahub/secret/datahub_secrets_client.py | 45 +++++++++++++ .../src/datahub/secret/secret_common.py | 59 +++++++++++++++++ .../src/datahub/secret/secret_store.py | 43 ++++++++++++ 7 files changed, 244 insertions(+), 20 deletions(-) create mode 100644 metadata-ingestion/src/datahub/secret/__init__.py create mode 100644 metadata-ingestion/src/datahub/secret/datahub_secret_store.py create mode 100644 metadata-ingestion/src/datahub/secret/datahub_secrets_client.py create mode 100644 metadata-ingestion/src/datahub/secret/secret_common.py create mode 100644 metadata-ingestion/src/datahub/secret/secret_store.py diff --git a/metadata-ingestion/src/datahub/configuration/config_loader.py b/metadata-ingestion/src/datahub/configuration/config_loader.py index 2f41af6f7286e..4266bac0c79ab 100644 --- a/metadata-ingestion/src/datahub/configuration/config_loader.py +++ b/metadata-ingestion/src/datahub/configuration/config_loader.py @@ -1,56 +1,59 @@ import io +import os import pathlib import re import sys import tempfile import unittest.mock -from typing import Any, Dict, Set, Union +from typing import Any, Dict, Mapping, Optional, Set, Union from urllib import parse import requests -from expandvars import UnboundVariable, expandvars +from expandvars import UnboundVariable, expand from datahub.configuration.common import ConfigurationError, ConfigurationMechanism from datahub.configuration.json_loader import JsonConfigurationMechanism from datahub.configuration.toml import TomlConfigurationMechanism from datahub.configuration.yaml import YamlConfigurationMechanism +Environ = Mapping[str, str] -def _resolve_element(element: str) -> str: + +def _resolve_element(element: str, environ: Environ) -> str: if re.search(r"(\$\{).+(\})", element): - return expandvars(element, nounset=True) + return expand(element, nounset=True, environ=environ) elif element.startswith("$"): try: - return expandvars(element, nounset=True) + return expand(element, nounset=True, environ=environ) except UnboundVariable: return element else: return element -def _resolve_list(ele_list: list) -> list: +def _resolve_list(ele_list: list, environ: Environ) -> list: new_v: list = [] for ele in ele_list: if isinstance(ele, str): - new_v.append(_resolve_element(ele)) + new_v.append(_resolve_element(ele, environ=environ)) elif isinstance(ele, list): - new_v.append(_resolve_list(ele)) + new_v.append(_resolve_list(ele, environ=environ)) elif isinstance(ele, dict): - new_v.append(resolve_env_variables(ele)) + new_v.append(resolve_env_variables(ele, environ=environ)) else: new_v.append(ele) return new_v -def resolve_env_variables(config: dict) -> dict: +def resolve_env_variables(config: dict, environ: Environ) -> dict: new_dict: Dict[Any, Any] = {} for k, v in config.items(): if isinstance(v, dict): - new_dict[k] = resolve_env_variables(v) + new_dict[k] = resolve_env_variables(v, environ=environ) elif isinstance(v, list): - new_dict[k] = _resolve_list(v) + new_dict[k] = _resolve_list(v, environ=environ) elif isinstance(v, str): - new_dict[k] = _resolve_element(v) + new_dict[k] = _resolve_element(v, environ=environ) else: new_dict[k] = v return new_dict @@ -60,13 +63,20 @@ def list_referenced_env_variables(config: dict) -> Set[str]: # This is a bit of a hack, but expandvars does a bunch of escaping # and other logic that we don't want to duplicate here. - with unittest.mock.patch("expandvars.getenv") as mock_getenv: - mock_getenv.return_value = "mocked_value" + vars = set() + + def mock_get_env(key: str, default: Optional[str] = None) -> str: + vars.add(key) + if default is not None: + return default + return "mocked_value" + + mock = unittest.mock.MagicMock() + mock.get.side_effect = mock_get_env - resolve_env_variables(config) + resolve_env_variables(config, environ=mock) - calls = mock_getenv.mock_calls - return set([call[1][0] for call in calls]) + return vars WRITE_TO_FILE_DIRECTIVE_PREFIX = "__DATAHUB_TO_FILE_" @@ -147,7 +157,7 @@ def load_config_file( config = raw_config.copy() if resolve_env_vars: - config = resolve_env_variables(config) + config = resolve_env_variables(config, environ=os.environ) if process_directives: config = _process_directives(config) diff --git a/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py b/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py index f22f94c9e9351..c0f6add6df006 100644 --- a/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py +++ b/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py @@ -1,5 +1,6 @@ import datetime import logging +import os import uuid from typing import Any, Dict, List, Optional @@ -112,7 +113,7 @@ def default_sink_is_datahub_rest(cls, values: Dict[str, Any]) -> Any: } # resolve env variables if present default_sink_config = config_loader.resolve_env_variables( - default_sink_config + default_sink_config, environ=os.environ ) values["sink"] = default_sink_config diff --git a/metadata-ingestion/src/datahub/secret/__init__.py b/metadata-ingestion/src/datahub/secret/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/secret/datahub_secret_store.py b/metadata-ingestion/src/datahub/secret/datahub_secret_store.py new file mode 100644 index 0000000000000..8301ff2d9dc1a --- /dev/null +++ b/metadata-ingestion/src/datahub/secret/datahub_secret_store.py @@ -0,0 +1,66 @@ +import logging +from typing import Any, Dict, List, Optional, Union + +from pydantic import BaseModel, validator + +from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph +from datahub.secret.datahub_secrets_client import DataHubSecretsClient +from datahub.secret.secret_store import SecretStore + +logger = logging.getLogger(__name__) + + +class DataHubSecretStoreConfig(BaseModel): + graph_client: Optional[DataHubGraph] = None + graph_client_config: Optional[DatahubClientConfig] = None + + class Config: + arbitrary_types_allowed = True + + @validator("graph_client") + def check_graph_connection(cls, v: DataHubGraph) -> DataHubGraph: + if v is not None: + v.test_connection() + return v + + +# An implementation of SecretStore that fetches secrets from DataHub +class DataHubSecretStore(SecretStore): + # Client for fetching secrets from DataHub GraphQL API + client: DataHubSecretsClient + + def __init__(self, config: DataHubSecretStoreConfig): + # Attempt to establish an outbound connection to DataHub and create a client. + if config.graph_client is not None: + self.client = DataHubSecretsClient(graph=config.graph_client) + elif config.graph_client_config is not None: + graph = DataHubGraph(config.graph_client_config) + self.client = DataHubSecretsClient(graph) + else: + raise Exception( + "Invalid configuration provided: unable to construct DataHub Graph Client." + ) + + def get_secret_values(self, secret_names: List[str]) -> Dict[str, Union[str, None]]: + # Fetch the secret from DataHub, using the credentials provided in the configuration. + # Use the GraphQL API. + try: + return self.client.get_secret_values(secret_names) + except Exception: + # Failed to resolve secrets, return empty. + logger.exception( + f"Caught exception while attempting to fetch secrets from DataHub. Secret names: {secret_names}" + ) + return {} + + def get_secret_value(self, secret_name: str) -> Union[str, None]: + secret_value_dict = self.get_secret_values([secret_name]) + return secret_value_dict.get(secret_name) + + def get_id(self) -> str: + return "datahub" + + @classmethod + def create(cls, config: Any) -> "DataHubSecretStore": + config = DataHubSecretStoreConfig.parse_obj(config) + return cls(config) diff --git a/metadata-ingestion/src/datahub/secret/datahub_secrets_client.py b/metadata-ingestion/src/datahub/secret/datahub_secrets_client.py new file mode 100644 index 0000000000000..c60aeff5db2f3 --- /dev/null +++ b/metadata-ingestion/src/datahub/secret/datahub_secrets_client.py @@ -0,0 +1,45 @@ +from typing import Dict, List, Optional + +from datahub.ingestion.graph.client import DataHubGraph + + +class DataHubSecretsClient: + """Class used to fetch secrets from DataHub.""" + + graph: DataHubGraph + + def __init__(self, graph: DataHubGraph): + self.graph = graph + + def get_secret_values(self, secret_names: List[str]) -> Dict[str, Optional[str]]: + if len(secret_names) == 0: + return {} + + request_json = { + "query": """query getSecretValues($input: GetSecretValuesInput!) {\n + getSecretValues(input: $input) {\n + name\n + value\n + }\n + }""", + "variables": {"input": {"secrets": secret_names}}, + } + # TODO: Use graph.execute_graphql() instead. + + # Fetch secrets using GraphQL API f + response = self.graph._session.post( + f"{self.graph.config.server}/api/graphql", json=request_json + ) + response.raise_for_status() + + # Verify response + res_data = response.json() + if "errors" in res_data: + raise Exception("Failed to retrieve secrets from DataHub.") + + # Convert list of name, value secret pairs into a dict and return + secret_value_list = res_data["data"]["getSecretValues"] + secret_value_dict = dict() + for secret_value in secret_value_list: + secret_value_dict[secret_value["name"]] = secret_value["value"] + return secret_value_dict diff --git a/metadata-ingestion/src/datahub/secret/secret_common.py b/metadata-ingestion/src/datahub/secret/secret_common.py new file mode 100644 index 0000000000000..2f7a584d87538 --- /dev/null +++ b/metadata-ingestion/src/datahub/secret/secret_common.py @@ -0,0 +1,59 @@ +import json +import logging +from typing import List + +from datahub.configuration.config_loader import ( + list_referenced_env_variables, + resolve_env_variables, +) +from datahub.secret.secret_store import SecretStore + +logger = logging.getLogger(__name__) + + +def resolve_secrets(secret_names: List[str], secret_stores: List[SecretStore]) -> dict: + # Attempt to resolve secret using by checking each configured secret store. + final_secret_values = dict({}) + + for secret_store in secret_stores: + try: + # Retrieve secret values from the store. + secret_values_dict = secret_store.get_secret_values(secret_names) + # Overlay secret values from each store, if not None. + for secret_name, secret_value in secret_values_dict.items(): + if secret_value is not None: + # HACK: We previously, incorrectly replaced newline characters with + # a r'\n' string. This was a lossy conversion, since we can no longer + # distinguish between a newline character and the literal '\n' in + # the secret value. For now, we assume that all r'\n' strings are + # actually newline characters. This will break if a secret value + # genuinely contains the string r'\n'. + # Once this PR https://github.com/datahub-project/datahub/pull/9484 + # has baked for a while, we should be able to remove this hack. + # TODO: This logic should live in the DataHub secret client/store, + # not the general secret resolution logic. + secret_value = secret_value.replace(r"\n", "\n") + + final_secret_values[secret_name] = secret_value + except Exception: + logger.exception( + f"Failed to fetch secret values from secret store with id {secret_store.get_id()}" + ) + return final_secret_values + + +def resolve_recipe(recipe: str, secret_stores: List[SecretStore]) -> dict: + json_recipe_raw = json.loads(recipe) + + # 1. Extract all secrets needing resolved. + secrets_to_resolve = list_referenced_env_variables(json_recipe_raw) + + # 2. Resolve secret values + secret_values_dict = resolve_secrets(list(secrets_to_resolve), secret_stores) + + # 3. Substitute secrets into recipe file + json_recipe_resolved = resolve_env_variables( + json_recipe_raw, environ=secret_values_dict + ) + + return json_recipe_resolved diff --git a/metadata-ingestion/src/datahub/secret/secret_store.py b/metadata-ingestion/src/datahub/secret/secret_store.py new file mode 100644 index 0000000000000..d6d61d8c3c924 --- /dev/null +++ b/metadata-ingestion/src/datahub/secret/secret_store.py @@ -0,0 +1,43 @@ +from abc import abstractmethod +from typing import Dict, List, Optional + +from datahub.configuration.common import ConfigModel + + +class SecretStoreConfig(ConfigModel): + type: str + config: Dict + + +class SecretStore: + """ + Abstract base class for a Secret Store, or a class that resolves "secret" values by name. + """ + + @classmethod + @abstractmethod + def create(cls, configs: dict) -> "SecretStore": + pass + + @abstractmethod + def get_secret_values(self, secret_names: List[str]) -> Dict[str, Optional[str]]: + """ + Attempt to fetch a group of secrets, returning a Dictionary of the secret of None if one + cannot be resolved by the store. + """ + + def get_secret_value(self, secret_name: str) -> Optional[str]: + secret_value_dict = self.get_secret_values([secret_name]) + return secret_value_dict.get(secret_name) + + @abstractmethod + def get_id(self) -> str: + """ + Get a unique name or id associated with the Secret Store. + """ + + @abstractmethod + def close(self) -> None: + """ + Wraps up the task + """ From 822d0eb014080fef030cdee84731878787c38c61 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Wed, 3 Jan 2024 15:11:07 -0600 Subject: [PATCH 324/792] feat(patch): add dashboardInfo and chartInfo support for patch (#9536) --- .../registry/SnapshotEntityRegistry.java | 4 + .../template/AspectTemplateEngine.java | 4 +- .../template/chart/ChartInfoTemplate.java | 82 ++++ .../dashboard/DashboardInfoTemplate.java | 105 +++++ .../datajob/DataJobInputOutputTemplate.java | 2 - .../registry/patch/ChartInfoTemplateTest.java | 41 ++ .../patch/DashboardInfoTemplateTest.java | 41 ++ .../UpstreamLineageTemplateTest.java | 2 +- .../src/datahub/specific/chart.py | 316 ++++++++++++++ .../src/datahub/specific/dashboard.py | 410 ++++++++++++++++++ .../src/datahub/specific/datajob.py | 12 +- .../src/datahub/specific/dataproduct.py | 10 +- .../src/datahub/specific/dataset.py | 8 +- .../src/datahub/specific/ownership.py | 2 +- .../golden_dataproduct_out_upsert.json | 2 +- .../unit/patch/complex_dataset_patch.json | 2 +- .../tests/unit/patch/test_patch_builder.py | 47 +- .../patch/chart/ChartInfoPatchBuilder.java | 41 ++ .../client/patch/common/PatchUtil.java | 84 ++++ .../dashboard/DashboardInfoPatchBuilder.java | 103 +++++ .../DataJobInputOutputPatchBuilder.java | 73 +--- .../java/datahub/client/patch/PatchTest.java | 89 ++++ 22 files changed, 1385 insertions(+), 95 deletions(-) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/ChartInfoTemplateTest.java create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java rename entity-registry/src/test/java/com/linkedin/metadata/models/registry/{ => patch}/UpstreamLineageTemplateTest.java (99%) create mode 100644 metadata-ingestion/src/datahub/specific/chart.py create mode 100644 metadata-ingestion/src/datahub/specific/dashboard.py create mode 100644 metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java create mode 100644 metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java create mode 100644 metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index cfc2c0901ce0d..bb0113abc9ed6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -12,9 +12,11 @@ import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.models.registry.template.Template; +import com.linkedin.metadata.models.registry.template.chart.ChartInfoTemplate; import com.linkedin.metadata.models.registry.template.common.GlobalTagsTemplate; import com.linkedin.metadata.models.registry.template.common.GlossaryTermsTemplate; import com.linkedin.metadata.models.registry.template.common.OwnershipTemplate; +import com.linkedin.metadata.models.registry.template.dashboard.DashboardInfoTemplate; import com.linkedin.metadata.models.registry.template.dataflow.DataFlowInfoTemplate; import com.linkedin.metadata.models.registry.template.datajob.DataJobInfoTemplate; import com.linkedin.metadata.models.registry.template.datajob.DataJobInputOutputTemplate; @@ -79,6 +81,8 @@ private AspectTemplateEngine populateTemplateEngine(Map aspe aspectSpecTemplateMap.put(DATA_JOB_INFO_ASPECT_NAME, new DataJobInfoTemplate()); aspectSpecTemplateMap.put( DATA_PRODUCT_PROPERTIES_ASPECT_NAME, new DataProductPropertiesTemplate()); + aspectSpecTemplateMap.put(CHART_INFO_ASPECT_NAME, new ChartInfoTemplate()); + aspectSpecTemplateMap.put(DASHBOARD_INFO_ASPECT_NAME, new DashboardInfoTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, new DataJobInputOutputTemplate()); return new AspectTemplateEngine(aspectSpecTemplateMap); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java index 95849a94bae29..029eb688c5291 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java @@ -32,7 +32,9 @@ public class AspectTemplateEngine { DATA_FLOW_INFO_ASPECT_NAME, DATA_JOB_INFO_ASPECT_NAME, DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + CHART_INFO_ASPECT_NAME, + DASHBOARD_INFO_ASPECT_NAME) .collect(Collectors.toSet()); private final Map> _aspectTemplateMap; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java new file mode 100644 index 0000000000000..654f923e7322d --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java @@ -0,0 +1,82 @@ +package com.linkedin.metadata.models.registry.template.chart; + +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.chart.ChartDataSourceTypeArray; +import com.linkedin.chart.ChartInfo; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.ChangeAuditStamps; +import com.linkedin.common.EdgeArray; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import java.util.Collections; +import javax.annotation.Nonnull; + +public class ChartInfoTemplate implements ArrayMergingTemplate { + + private static final String INPUT_EDGES_FIELD_NAME = "inputEdges"; + private static final String INPUTS_FIELD_NAME = "inputs"; + private static final String DESTINATION_URN_FIELD_NAME = "destinationUrn"; + + @Override + public ChartInfo getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + if (recordTemplate instanceof ChartInfo) { + return (ChartInfo) recordTemplate; + } + throw new ClassCastException("Unable to cast RecordTemplate to DataJobInputOutput"); + } + + @Override + public Class getTemplateType() { + return ChartInfo.class; + } + + @Nonnull + @Override + public ChartInfo getDefault() { + ChartInfo chartInfo = new ChartInfo(); + chartInfo.setDescription(""); + chartInfo.setTitle(""); + ChangeAuditStamps changeAuditStamps = new ChangeAuditStamps(); + AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + changeAuditStamps.setCreated(auditStamp).setLastModified(auditStamp); + chartInfo.setLastModified(changeAuditStamps); + chartInfo.setInputEdges(new EdgeArray()); + + // Deprecated fields + chartInfo.setInputs(new ChartDataSourceTypeArray()); + + return chartInfo; + } + + @Nonnull + @Override + public JsonNode transformFields(JsonNode baseNode) { + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + INPUT_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = arrayFieldToMap(transformedNode, INPUTS_FIELD_NAME, Collections.emptyList()); + + return transformedNode; + } + + @Nonnull + @Override + public JsonNode rebaseFields(JsonNode patched) { + JsonNode rebasedNode = + transformedMapToArray( + patched, INPUT_EDGES_FIELD_NAME, Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = transformedMapToArray(rebasedNode, INPUTS_FIELD_NAME, Collections.emptyList()); + + return rebasedNode; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java new file mode 100644 index 0000000000000..eae04b5285adf --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java @@ -0,0 +1,105 @@ +package com.linkedin.metadata.models.registry.template.dashboard; + +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.ChangeAuditStamps; +import com.linkedin.common.ChartUrnArray; +import com.linkedin.common.EdgeArray; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.dashboard.DashboardInfo; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import java.util.Collections; +import javax.annotation.Nonnull; + +public class DashboardInfoTemplate implements ArrayMergingTemplate { + + private static final String CHART_EDGES_FIELD_NAME = "chartEdges"; + private static final String DATASET_EDGES_FIELD_NAME = "datasetEdges"; + private static final String DATASETS_FIELD_NAME = "datasets"; + private static final String CHARTS_FIELD_NAME = "charts"; + private static final String DESTINATION_URN_FIELD_NAME = "destinationUrn"; + + @Override + public DashboardInfo getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + if (recordTemplate instanceof DashboardInfo) { + return (DashboardInfo) recordTemplate; + } + throw new ClassCastException("Unable to cast RecordTemplate to DataJobInputOutput"); + } + + @Override + public Class getTemplateType() { + return DashboardInfo.class; + } + + @Nonnull + @Override + public DashboardInfo getDefault() { + DashboardInfo dashboardInfo = new DashboardInfo(); + dashboardInfo.setTitle(""); + dashboardInfo.setDescription(""); + ChangeAuditStamps changeAuditStamps = new ChangeAuditStamps(); + AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + changeAuditStamps.setCreated(auditStamp).setLastModified(auditStamp); + dashboardInfo.setLastModified(changeAuditStamps); + dashboardInfo.setChartEdges(new EdgeArray()); + dashboardInfo.setDatasetEdges(new EdgeArray()); + + // Deprecated fields + dashboardInfo.setDatasets(new UrnArray()); + dashboardInfo.setCharts(new ChartUrnArray()); + + return dashboardInfo; + } + + @Nonnull + @Override + public JsonNode transformFields(JsonNode baseNode) { + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + CHART_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap( + transformedNode, + DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap(transformedNode, DATASETS_FIELD_NAME, Collections.emptyList()); + + transformedNode = arrayFieldToMap(transformedNode, CHARTS_FIELD_NAME, Collections.emptyList()); + + return transformedNode; + } + + @Nonnull + @Override + public JsonNode rebaseFields(JsonNode patched) { + JsonNode rebasedNode = + transformedMapToArray( + patched, + DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = + transformedMapToArray( + rebasedNode, + CHART_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = transformedMapToArray(rebasedNode, DATASETS_FIELD_NAME, Collections.emptyList()); + rebasedNode = transformedMapToArray(rebasedNode, CHARTS_FIELD_NAME, Collections.emptyList()); + + return rebasedNode; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java index 889297734e977..6761892b1b31b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java @@ -23,8 +23,6 @@ public class DataJobInputOutputTemplate implements ArrayMergingTemplate patchOperations = new ArrayList<>(); + ObjectNode edgeNode = instance.objectNode(); + edgeNode.put( + "destinationUrn", "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/inputEdges/urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"), + edgeNode); + patchOperations.add(operation); + JsonPatch patch = new JsonPatch(patchOperations); + ChartInfo result = chartInfoTemplate.applyPatch(dashboardInfo, patch); + + Assert.assertEquals( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"), + result.getInputEdges().get(0).getDestinationUrn()); + } +} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java new file mode 100644 index 0000000000000..962ff1d40d873 --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java @@ -0,0 +1,41 @@ +package com.linkedin.metadata.models.registry.patch; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.fge.jackson.jsonpointer.JsonPointer; +import com.github.fge.jsonpatch.AddOperation; +import com.github.fge.jsonpatch.JsonPatch; +import com.github.fge.jsonpatch.JsonPatchOperation; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.dashboard.DashboardInfo; +import com.linkedin.metadata.models.registry.template.dashboard.DashboardInfoTemplate; +import java.util.ArrayList; +import java.util.List; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class DashboardInfoTemplateTest { + + @Test + public void testDashboardInfoTemplate() throws Exception { + DashboardInfoTemplate dashboardInfoTemplate = new DashboardInfoTemplate(); + DashboardInfo dashboardInfo = dashboardInfoTemplate.getDefault(); + List patchOperations = new ArrayList<>(); + ObjectNode edgeNode = instance.objectNode(); + edgeNode.put( + "destinationUrn", "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/datasetEdges/urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"), + edgeNode); + patchOperations.add(operation); + JsonPatch patch = new JsonPatch(patchOperations); + DashboardInfo result = dashboardInfoTemplate.applyPatch(dashboardInfo, patch); + + Assert.assertEquals( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"), + result.getDatasetEdges().get(0).getDestinationUrn()); + } +} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java similarity index 99% rename from entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java rename to entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java index 07982a87be56c..8f410ae8da085 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry; +package com.linkedin.metadata.models.registry.patch; import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; diff --git a/metadata-ingestion/src/datahub/specific/chart.py b/metadata-ingestion/src/datahub/specific/chart.py new file mode 100644 index 0000000000000..5dc394e8ebe0f --- /dev/null +++ b/metadata-ingestion/src/datahub/specific/chart.py @@ -0,0 +1,316 @@ +import time +from typing import Dict, List, Optional, TypeVar, Union +from urllib.parse import quote + +from datahub.emitter.mcp_patch_builder import MetadataPatchProposal +from datahub.metadata.schema_classes import ( + AuditStampClass, + ChartInfoClass as ChartInfo, + EdgeClass as Edge, + GlobalTagsClass as GlobalTags, + GlossaryTermAssociationClass as Term, + GlossaryTermsClass as GlossaryTerms, + KafkaAuditHeaderClass, + OwnerClass as Owner, + OwnershipTypeClass, + SystemMetadataClass, + TagAssociationClass as Tag, +) +from datahub.specific.custom_properties import CustomPropertiesPatchHelper +from datahub.specific.ownership import OwnershipPatchHelper +from datahub.utilities.urns.tag_urn import TagUrn +from datahub.utilities.urns.urn import Urn + +T = TypeVar("T", bound=MetadataPatchProposal) + + +class ChartPatchBuilder(MetadataPatchProposal): + def __init__( + self, + urn: str, + system_metadata: Optional[SystemMetadataClass] = None, + audit_header: Optional[KafkaAuditHeaderClass] = None, + ) -> None: + """ + Initializes a ChartPatchBuilder instance. + + Args: + urn: The URN of the chart + system_metadata: The system metadata of the chart (optional). + audit_header: The Kafka audit header of the chart (optional). + """ + super().__init__( + urn, "chart", system_metadata=system_metadata, audit_header=audit_header + ) + self.custom_properties_patch_helper = CustomPropertiesPatchHelper( + self, ChartInfo.ASPECT_NAME + ) + self.ownership_patch_helper = OwnershipPatchHelper(self) + + def _mint_auditstamp(self, message: Optional[str] = None) -> AuditStampClass: + """ + Creates an AuditStampClass instance with the current timestamp and other default values. + + Args: + message: The message associated with the audit stamp (optional). + + Returns: + An instance of AuditStampClass. + """ + return AuditStampClass( + time=int(time.time() * 1000.0), + actor="urn:li:corpuser:datahub", + message=message, + ) + + def _ensure_urn_type( + self, entity_type: str, edges: List[Edge], context: str + ) -> None: + """ + Ensures that the destination URNs in the given edges have the specified entity type. + + Args: + entity_type: The entity type to check against. + edges: A list of Edge objects. + context: The context or description of the operation. + + Raises: + ValueError: If any of the destination URNs is not of the specified entity type. + """ + for e in edges: + urn = Urn.create_from_string(e.destinationUrn) + if not urn.get_type() == entity_type: + raise ValueError( + f"{context}: {e.destinationUrn} is not of type {entity_type}" + ) + + def add_owner(self, owner: Owner) -> "ChartPatchBuilder": + """ + Adds an owner to the ChartPatchBuilder. + + Args: + owner: The Owner object to add. + + Returns: + The ChartPatchBuilder instance. + """ + self.ownership_patch_helper.add_owner(owner) + return self + + def remove_owner( + self, owner: str, owner_type: Optional[OwnershipTypeClass] = None + ) -> "ChartPatchBuilder": + """ + Removes an owner from the ChartPatchBuilder. + + Args: + owner: The owner to remove. + owner_type: The ownership type of the owner (optional). + + Returns: + The ChartPatchBuilder instance. + + Notes: + `owner_type` is optional. + """ + self.ownership_patch_helper.remove_owner(owner, owner_type) + return self + + def set_owners(self, owners: List[Owner]) -> "ChartPatchBuilder": + """ + Sets the owners of the ChartPatchBuilder. + + Args: + owners: A list of Owner objects. + + Returns: + The ChartPatchBuilder instance. + """ + self.ownership_patch_helper.set_owners(owners) + return self + + def add_input_edge(self, input: Union[Edge, Urn, str]) -> "ChartPatchBuilder": + """ + Adds an input to the ChartPatchBuilder. + + Args: + input: The input, which can be an Edge object, Urn object, or a string. + + Returns: + The ChartPatchBuilder instance. + + Notes: + If `input` is an Edge object, it is used directly. If `input` is a Urn object or string, + it is converted to an Edge object and added with default audit stamps. + """ + if isinstance(input, Edge): + input_urn: str = input.destinationUrn + input_edge: Edge = input + elif isinstance(input, (Urn, str)): + input_urn = str(input) + + input_edge = Edge( + destinationUrn=input_urn, + created=self._mint_auditstamp(), + lastModified=self._mint_auditstamp(), + ) + + self._ensure_urn_type("dataset", [input_edge], "add_dataset") + self._add_patch( + ChartInfo.ASPECT_NAME, + "add", + path=f"/inputEdges/{quote(input_urn, safe='')}", + value=input_urn, + ) + return self + + def remove_input_edge(self, input: Union[str, Urn]) -> "ChartPatchBuilder": + """ + Removes an input from the ChartPatchBuilder. + + Args: + input: The input to remove, specified as a string or Urn object. + + Returns: + The ChartPatchBuilder instance. + """ + self._add_patch( + ChartInfo.ASPECT_NAME, + "remove", + path=f"/inputEdges/{input}", + value={}, + ) + return self + + def set_input_edges(self, inputs: List[Edge]) -> "ChartPatchBuilder": + """ + Sets the input edges for the ChartPatchBuilder. + + Args: + inputs: A list of Edge objects representing the input edges. + + Returns: + The ChartPatchBuilder instance. + + Notes: + This method replaces all existing inputs with the given inputs. + """ + self._add_patch( + ChartInfo.ASPECT_NAME, + "add", + path="/inputEdges", + value=inputs, + ) + return self + + def add_tag(self, tag: Tag) -> "ChartPatchBuilder": + """ + Adds a tag to the ChartPatchBuilder. + + Args: + tag: The Tag object representing the tag to be added. + + Returns: + The ChartPatchBuilder instance. + """ + self._add_patch( + GlobalTags.ASPECT_NAME, "add", path=f"/tags/{tag.tag}", value=tag + ) + return self + + def remove_tag(self, tag: Union[str, Urn]) -> "ChartPatchBuilder": + """ + Removes a tag from the ChartPatchBuilder. + + Args: + tag: The tag to remove, specified as a string or Urn object. + + Returns: + The ChartPatchBuilder instance. + """ + if isinstance(tag, str) and not tag.startswith("urn:li:tag:"): + tag = TagUrn.create_from_id(tag) + self._add_patch(GlobalTags.ASPECT_NAME, "remove", path=f"/tags/{tag}", value={}) + return self + + def add_term(self, term: Term) -> "ChartPatchBuilder": + """ + Adds a glossary term to the ChartPatchBuilder. + + Args: + term: The Term object representing the glossary term to be added. + + Returns: + The ChartPatchBuilder instance. + """ + self._add_patch( + GlossaryTerms.ASPECT_NAME, "add", path=f"/terms/{term.urn}", value=term + ) + return self + + def remove_term(self, term: Union[str, Urn]) -> "ChartPatchBuilder": + """ + Removes a glossary term from the ChartPatchBuilder. + + Args: + term: The term to remove, specified as a string or Urn object. + + Returns: + The ChartPatchBuilder instance. + """ + if isinstance(term, str) and not term.startswith("urn:li:glossaryTerm:"): + term = "urn:li:glossaryTerm:" + term + self._add_patch( + GlossaryTerms.ASPECT_NAME, "remove", path=f"/terms/{term}", value={} + ) + return self + + def set_custom_properties( + self, custom_properties: Dict[str, str] + ) -> "ChartPatchBuilder": + """ + Sets the custom properties for the ChartPatchBuilder. + + Args: + custom_properties: A dictionary containing the custom properties to be set. + + Returns: + The ChartPatchBuilder instance. + + Notes: + This method replaces all existing custom properties with the given dictionary. + """ + self._add_patch( + ChartInfo.ASPECT_NAME, + "add", + path="/customProperties", + value=custom_properties, + ) + return self + + def add_custom_property(self, key: str, value: str) -> "ChartPatchBuilder": + """ + Adds a custom property to the ChartPatchBuilder. + + Args: + key: The key of the custom property. + value: The value of the custom property. + + Returns: + The ChartPatchBuilder instance. + """ + self.custom_properties_patch_helper.add_property(key, value) + return self + + def remove_custom_property(self, key: str) -> "ChartPatchBuilder": + """ + Removes a custom property from the ChartPatchBuilder. + + Args: + key: The key of the custom property to remove. + + Returns: + The ChartPatchBuilder instance. + """ + self.custom_properties_patch_helper.remove_property(key) + return self diff --git a/metadata-ingestion/src/datahub/specific/dashboard.py b/metadata-ingestion/src/datahub/specific/dashboard.py new file mode 100644 index 0000000000000..855dcc5685cea --- /dev/null +++ b/metadata-ingestion/src/datahub/specific/dashboard.py @@ -0,0 +1,410 @@ +import time +from typing import Dict, List, Optional, TypeVar, Union +from urllib.parse import quote + +from datahub.emitter.mcp_patch_builder import MetadataPatchProposal +from datahub.metadata.schema_classes import ( + AuditStampClass, + DashboardInfoClass as DashboardInfo, + EdgeClass as Edge, + GlobalTagsClass as GlobalTags, + GlossaryTermAssociationClass as Term, + GlossaryTermsClass as GlossaryTerms, + KafkaAuditHeaderClass, + OwnerClass as Owner, + OwnershipTypeClass, + SystemMetadataClass, + TagAssociationClass as Tag, +) +from datahub.specific.custom_properties import CustomPropertiesPatchHelper +from datahub.specific.ownership import OwnershipPatchHelper +from datahub.utilities.urns.tag_urn import TagUrn +from datahub.utilities.urns.urn import Urn + +T = TypeVar("T", bound=MetadataPatchProposal) + + +class DashboardPatchBuilder(MetadataPatchProposal): + def __init__( + self, + urn: str, + system_metadata: Optional[SystemMetadataClass] = None, + audit_header: Optional[KafkaAuditHeaderClass] = None, + ) -> None: + """ + Initializes a DashboardPatchBuilder instance. + + Args: + urn: The URN of the dashboard + system_metadata: The system metadata of the dashboard (optional). + audit_header: The Kafka audit header of the dashboard (optional). + """ + super().__init__( + urn, "dashboard", system_metadata=system_metadata, audit_header=audit_header + ) + self.custom_properties_patch_helper = CustomPropertiesPatchHelper( + self, DashboardInfo.ASPECT_NAME + ) + self.ownership_patch_helper = OwnershipPatchHelper(self) + + def _mint_auditstamp(self, message: Optional[str] = None) -> AuditStampClass: + """ + Creates an AuditStampClass instance with the current timestamp and other default values. + + Args: + message: The message associated with the audit stamp (optional). + + Returns: + An instance of AuditStampClass. + """ + return AuditStampClass( + time=int(time.time() * 1000.0), + actor="urn:li:corpuser:datahub", + message=message, + ) + + def _ensure_urn_type( + self, entity_type: str, edges: List[Edge], context: str + ) -> None: + """ + Ensures that the destination URNs in the given edges have the specified entity type. + + Args: + entity_type: The entity type to check against. + edges: A list of Edge objects. + context: The context or description of the operation. + + Raises: + ValueError: If any of the destination URNs is not of the specified entity type. + """ + for e in edges: + urn = Urn.create_from_string(e.destinationUrn) + if not urn.get_type() == entity_type: + raise ValueError( + f"{context}: {e.destinationUrn} is not of type {entity_type}" + ) + + def add_owner(self, owner: Owner) -> "DashboardPatchBuilder": + """ + Adds an owner to the DashboardPatchBuilder. + + Args: + owner: The Owner object to add. + + Returns: + The DashboardPatchBuilder instance. + """ + self.ownership_patch_helper.add_owner(owner) + return self + + def remove_owner( + self, owner: str, owner_type: Optional[OwnershipTypeClass] = None + ) -> "DashboardPatchBuilder": + """ + Removes an owner from the DashboardPatchBuilder. + + Args: + owner: The owner to remove. + owner_type: The ownership type of the owner (optional). + + Returns: + The DashboardPatchBuilder instance. + + Notes: + `owner_type` is optional. + """ + self.ownership_patch_helper.remove_owner(owner, owner_type) + return self + + def set_owners(self, owners: List[Owner]) -> "DashboardPatchBuilder": + """ + Sets the owners of the DashboardPatchBuilder. + + Args: + owners: A list of Owner objects. + + Returns: + The DashboardPatchBuilder instance. + """ + self.ownership_patch_helper.set_owners(owners) + return self + + def add_dataset_edge( + self, dataset: Union[Edge, Urn, str] + ) -> "DashboardPatchBuilder": + """ + Adds an dataset to the DashboardPatchBuilder. + + Args: + dataset: The dataset, which can be an Edge object, Urn object, or a string. + + Returns: + The DashboardPatchBuilder instance. + + Raises: + ValueError: If the dataset is not a Dataset urn. + + Notes: + If `dataset` is an Edge object, it is used directly. If `dataset` is a Urn object or string, + it is converted to an Edge object and added with default audit stamps. + """ + if isinstance(dataset, Edge): + dataset_urn: str = dataset.destinationUrn + dataset_edge: Edge = dataset + elif isinstance(dataset, (Urn, str)): + dataset_urn = str(dataset) + if not dataset_urn.startswith("urn:li:dataset:"): + raise ValueError(f"Input {dataset} is not a Dataset urn") + + dataset_edge = Edge( + destinationUrn=dataset_urn, + created=self._mint_auditstamp(), + lastModified=self._mint_auditstamp(), + ) + + self._ensure_urn_type("dataset", [dataset_edge], "add_dataset") + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path=f"/datasetEdges/{quote(dataset_urn, safe='')}", + value=dataset_edge, + ) + return self + + def remove_dataset_edge(self, dataset: Union[str, Urn]) -> "DashboardPatchBuilder": + """ + Removes a dataset edge from the DashboardPatchBuilder. + + Args: + dataset: The dataset to remove, specified as a string or Urn object. + + Returns: + The DashboardPatchBuilder instance. + """ + self._add_patch( + DashboardInfo.ASPECT_NAME, + "remove", + path=f"/datasetEdges/{dataset}", + value={}, + ) + return self + + def set_dataset_edges(self, datasets: List[Edge]) -> "DashboardPatchBuilder": + """ + Sets the dataset edges for the DashboardPatchBuilder. + + Args: + datasets: A list of Edge objects representing the dataset edges. + + Returns: + The DashboardPatchBuilder instance. + + Raises: + ValueError: If any of the input edges are not of type 'Datset'. + + Notes: + This method replaces all existing datasets with the given inputs. + """ + self._ensure_urn_type("dataset", datasets, "dataset edges") + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path="/datasetEdges", + value=datasets, + ) + return self + + def add_chart_edge(self, chart: Union[Edge, Urn, str]) -> "DashboardPatchBuilder": + """ + Adds a chart edge to the DashboardPatchBuilder. + + Args: + chart: The dataset, which can be an Edge object, Urn object, or a string. + + Returns: + The DashboardPatchBuilder instance. + + Raises: + ValueError: If the edge is not a Chart urn. + + Notes: + If `chart` is an Edge object, it is used directly. If `chart` is a Urn object or string, + it is converted to an Edge object and added with default audit stamps. + """ + if isinstance(chart, Edge): + chart_urn: str = chart.destinationUrn + chart_edge: Edge = chart + elif isinstance(chart, (Urn, str)): + chart_urn = str(chart) + if not chart_urn.startswith("urn:li:chart:"): + raise ValueError(f"Input {chart} is not a Chart urn") + + chart_edge = Edge( + destinationUrn=chart_urn, + created=self._mint_auditstamp(), + lastModified=self._mint_auditstamp(), + ) + + self._ensure_urn_type("dataset", [chart_edge], "add_chart_edge") + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path=f"/chartEdges/{quote(chart_urn, safe='')}", + value=chart_edge, + ) + return self + + def remove_chart_edge(self, chart: Union[str, Urn]) -> "DashboardPatchBuilder": + """ + Removes an chart edge from the DashboardPatchBuilder. + + Args: + chart: The chart to remove, specified as a string or Urn object. + + Returns: + The DashboardPatchBuilder instance. + """ + self._add_patch( + DashboardInfo.ASPECT_NAME, + "remove", + path=f"/chartEdges/{chart}", + value={}, + ) + return self + + def set_chart_edges(self, charts: List[Edge]) -> "DashboardPatchBuilder": + """ + Sets the chart edges for the DashboardPatchBuilder. + + Args: + charts: A list of Edge objects representing the chart edges. + + Returns: + The DashboardPatchBuilder instance. + + Raises: + ValueError: If any of the edges are not of type 'chart'. + + Notes: + This method replaces all existing charts with the given charts. + """ + self._ensure_urn_type("chart", charts, "set_charts") + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path="/chartEdges", + value=charts, + ) + return self + + def add_tag(self, tag: Tag) -> "DashboardPatchBuilder": + """ + Adds a tag to the DashboardPatchBuilder. + + Args: + tag: The Tag object representing the tag to be added. + + Returns: + The DashboardPatchBuilder instance. + """ + self._add_patch( + GlobalTags.ASPECT_NAME, "add", path=f"/tags/{tag.tag}", value=tag + ) + return self + + def remove_tag(self, tag: Union[str, Urn]) -> "DashboardPatchBuilder": + """ + Removes a tag from the DashboardPatchBuilder. + + Args: + tag: The tag to remove, specified as a string or Urn object. + + Returns: + The DashboardPatchBuilder instance. + """ + if isinstance(tag, str) and not tag.startswith("urn:li:tag:"): + tag = TagUrn.create_from_id(tag) + self._add_patch(GlobalTags.ASPECT_NAME, "remove", path=f"/tags/{tag}", value={}) + return self + + def add_term(self, term: Term) -> "DashboardPatchBuilder": + """ + Adds a glossary term to the DashboardPatchBuilder. + + Args: + term: The Term object representing the glossary term to be added. + + Returns: + The DashboardPatchBuilder instance. + """ + self._add_patch( + GlossaryTerms.ASPECT_NAME, "add", path=f"/terms/{term.urn}", value=term + ) + return self + + def remove_term(self, term: Union[str, Urn]) -> "DashboardPatchBuilder": + """ + Removes a glossary term from the DashboardPatchBuilder. + + Args: + term: The term to remove, specified as a string or Urn object. + + Returns: + The DashboardPatchBuilder instance. + """ + if isinstance(term, str) and not term.startswith("urn:li:glossaryTerm:"): + term = "urn:li:glossaryTerm:" + term + self._add_patch( + GlossaryTerms.ASPECT_NAME, "remove", path=f"/terms/{term}", value={} + ) + return self + + def set_custom_properties( + self, custom_properties: Dict[str, str] + ) -> "DashboardPatchBuilder": + """ + Sets the custom properties for the DashboardPatchBuilder. + + Args: + custom_properties: A dictionary containing the custom properties to be set. + + Returns: + The DashboardPatchBuilder instance. + + Notes: + This method replaces all existing custom properties with the given dictionary. + """ + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path="/customProperties", + value=custom_properties, + ) + return self + + def add_custom_property(self, key: str, value: str) -> "DashboardPatchBuilder": + """ + Adds a custom property to the DashboardPatchBuilder. + + Args: + key: The key of the custom property. + value: The value of the custom property. + + Returns: + The DashboardPatchBuilder instance. + """ + self.custom_properties_patch_helper.add_property(key, value) + return self + + def remove_custom_property(self, key: str) -> "DashboardPatchBuilder": + """ + Removes a custom property from the DashboardPatchBuilder. + + Args: + key: The key of the custom property to remove. + + Returns: + The DashboardPatchBuilder instance. + """ + self.custom_properties_patch_helper.remove_property(key) + return self diff --git a/metadata-ingestion/src/datahub/specific/datajob.py b/metadata-ingestion/src/datahub/specific/datajob.py index 7ebaee6b918c1..0338a1320c15b 100644 --- a/metadata-ingestion/src/datahub/specific/datajob.py +++ b/metadata-ingestion/src/datahub/specific/datajob.py @@ -207,7 +207,7 @@ def set_input_datajobs(self, inputs: List[Edge]) -> "DataJobPatchBuilder": self._ensure_urn_type("dataJob", inputs, "input datajobs") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/inputDatajobEdges", value=inputs, ) @@ -290,7 +290,7 @@ def set_input_datasets(self, inputs: List[Edge]) -> "DataJobPatchBuilder": self._ensure_urn_type("dataset", inputs, "set_input_datasets") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/inputDatasetEdges", value=inputs, ) @@ -375,7 +375,7 @@ def set_output_datasets(self, outputs: List[Edge]) -> "DataJobPatchBuilder": self._ensure_urn_type("dataset", outputs, "set_output_datasets") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/outputDatasetEdges", value=outputs, ) @@ -463,7 +463,7 @@ def set_input_dataset_fields(self, inputs: List[Edge]) -> "DataJobPatchBuilder": self._ensure_urn_type("schemaField", inputs, "set_input_dataset_fields") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/inputDatasetFields", value=inputs, ) @@ -551,7 +551,7 @@ def set_output_dataset_fields(self, outputs: List[Edge]) -> "DataJobPatchBuilder self._ensure_urn_type("schemaField", outputs, "set_output_dataset_fields") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/outputDatasetFields", value=outputs, ) @@ -636,7 +636,7 @@ def set_custom_properties( """ self._add_patch( DataJobInfo.ASPECT_NAME, - "replace", + "add", path="/customProperties", value=custom_properties, ) diff --git a/metadata-ingestion/src/datahub/specific/dataproduct.py b/metadata-ingestion/src/datahub/specific/dataproduct.py index bb49ac47b3ef8..2c174e0c9a6cb 100644 --- a/metadata-ingestion/src/datahub/specific/dataproduct.py +++ b/metadata-ingestion/src/datahub/specific/dataproduct.py @@ -85,7 +85,7 @@ def remove_term(self, term: Union[str, Urn]) -> "DataProductPatchBuilder": def set_name(self, name: str) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/name", value=name, ) @@ -94,7 +94,7 @@ def set_name(self, name: str) -> "DataProductPatchBuilder": def set_description(self, description: str) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/description", value=description, ) @@ -105,7 +105,7 @@ def set_custom_properties( ) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/customProperties", value=custom_properties, ) @@ -124,7 +124,7 @@ def set_assets( ) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/assets", value=assets, ) @@ -151,7 +151,7 @@ def remove_asset(self, asset_urn: str) -> "DataProductPatchBuilder": def set_external_url(self, external_url: str) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/externalUrl", value=external_url, ) diff --git a/metadata-ingestion/src/datahub/specific/dataset.py b/metadata-ingestion/src/datahub/specific/dataset.py index 294a80572669b..62ee4fc57b61b 100644 --- a/metadata-ingestion/src/datahub/specific/dataset.py +++ b/metadata-ingestion/src/datahub/specific/dataset.py @@ -143,7 +143,7 @@ def remove_upstream_lineage( def set_upstream_lineages(self, upstreams: List[Upstream]) -> "DatasetPatchBuilder": self._add_patch( - UpstreamLineage.ASPECT_NAME, "replace", path="/upstreams", value=upstreams + UpstreamLineage.ASPECT_NAME, "add", path="/upstreams", value=upstreams ) return self @@ -297,7 +297,7 @@ def set_description( DatasetProperties.ASPECT_NAME if not editable else EditableDatasetProperties.ASPECT_NAME, - "replace", + "add", path="/description", value=description, ) @@ -308,7 +308,7 @@ def set_custom_properties( ) -> "DatasetPatchBuilder": self._add_patch( DatasetProperties.ASPECT_NAME, - "replace", + "add", path="/customProperties", value=custom_properties, ) @@ -326,7 +326,7 @@ def set_display_name(self, display_name: str) -> "DatasetPatchBuilder": if display_name is not None: self._add_patch( DatasetProperties.ASPECT_NAME, - "replace", + "add", path="/name", value=display_name, ) diff --git a/metadata-ingestion/src/datahub/specific/ownership.py b/metadata-ingestion/src/datahub/specific/ownership.py index 334b45a67437f..c2a3874a3a33f 100644 --- a/metadata-ingestion/src/datahub/specific/ownership.py +++ b/metadata-ingestion/src/datahub/specific/ownership.py @@ -43,6 +43,6 @@ def remove_owner( def set_owners(self, owners: List[OwnerClass]) -> "OwnershipPatchHelper": self._parent._add_patch( - OwnershipClass.ASPECT_NAME, "replace", path="/owners", value=owners + OwnershipClass.ASPECT_NAME, "add", path="/owners", value=owners ) return self diff --git a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json index 97c2330f58bc7..66bc2ce0c2a0c 100644 --- a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json +++ b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json @@ -5,7 +5,7 @@ "changeType": "PATCH", "aspectName": "dataProductProperties", "aspect": { - "value": "[{\"op\": \"replace\", \"path\": \"/name\", \"value\": \"Pet of the Week Campaign\"}, {\"op\": \"replace\", \"path\": \"/assets\", \"value\": [{\"destinationUrn\": \"urn:li:container:DATABASE\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:container:SCHEMA\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}]}, {\"op\": \"replace\", \"path\": \"/customProperties\", \"value\": {\"version\": \"2.0\", \"classification\": \"pii\"}}, {\"op\": \"replace\", \"path\": \"/externalUrl\", \"value\": \"https://github.com/datahub-project/datahub\"}]", + "value": "[{\"op\": \"add\", \"path\": \"/name\", \"value\": \"Pet of the Week Campaign\"}, {\"op\": \"add\", \"path\": \"/assets\", \"value\": [{\"destinationUrn\": \"urn:li:container:DATABASE\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:container:SCHEMA\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}]}, {\"op\": \"add\", \"path\": \"/customProperties\", \"value\": {\"version\": \"2.0\", \"classification\": \"pii\"}}, {\"op\": \"add\", \"path\": \"/externalUrl\", \"value\": \"https://github.com/datahub-project/datahub\"}]", "contentType": "application/json-patch+json" } }, diff --git a/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json b/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json index ed5a7723ac2bf..bcc619a09401e 100644 --- a/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json +++ b/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json @@ -7,7 +7,7 @@ "aspect": { "json": [ { - "op": "replace", + "op": "add", "path": "/description", "value": "test description" }, diff --git a/metadata-ingestion/tests/unit/patch/test_patch_builder.py b/metadata-ingestion/tests/unit/patch/test_patch_builder.py index f05c4978f8644..e68f948be8aa0 100644 --- a/metadata-ingestion/tests/unit/patch/test_patch_builder.py +++ b/metadata-ingestion/tests/unit/patch/test_patch_builder.py @@ -3,7 +3,12 @@ import pytest -from datahub.emitter.mce_builder import make_dataset_urn, make_tag_urn +from datahub.emitter.mce_builder import ( + make_chart_urn, + make_dashboard_urn, + make_dataset_urn, + make_tag_urn, +) from datahub.ingestion.sink.file import write_metadata_file from datahub.metadata.schema_classes import ( DatasetLineageTypeClass, @@ -15,6 +20,8 @@ TagAssociationClass, UpstreamClass, ) +from datahub.specific.chart import ChartPatchBuilder +from datahub.specific.dashboard import DashboardPatchBuilder from datahub.specific.dataset import DatasetPatchBuilder @@ -80,3 +87,41 @@ def test_complex_dataset_patch( pytestconfig.rootpath / "tests/unit/patch/complex_dataset_patch.json" ).read_text() ) + + +def test_basic_chart_patch_builder(): + patcher = ChartPatchBuilder( + make_chart_urn(platform="hive", name="fct_users_created") + ).add_tag(TagAssociationClass(tag=make_tag_urn("test_tag"))) + + assert patcher.build() == [ + MetadataChangeProposalClass( + entityType="chart", + entityUrn="urn:li:chart:(hive,fct_users_created)", + changeType="PATCH", + aspectName="globalTags", + aspect=GenericAspectClass( + value=b'[{"op": "add", "path": "/tags/urn:li:tag:test_tag", "value": {"tag": "urn:li:tag:test_tag"}}]', + contentType="application/json-patch+json", + ), + ), + ] + + +def test_basic_dashboard_patch_builder(): + patcher = DashboardPatchBuilder( + make_dashboard_urn(platform="hive", name="fct_users_created") + ).add_tag(TagAssociationClass(tag=make_tag_urn("test_tag"))) + + assert patcher.build() == [ + MetadataChangeProposalClass( + entityType="dashboard", + entityUrn="urn:li:dashboard:(hive,fct_users_created)", + changeType="PATCH", + aspectName="globalTags", + aspect=GenericAspectClass( + value=b'[{"op": "add", "path": "/tags/urn:li:tag:test_tag", "value": {"tag": "urn:li:tag:test_tag"}}]', + contentType="application/json-patch+json", + ), + ), + ] diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java new file mode 100644 index 0000000000000..0655d2b3eb8eb --- /dev/null +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java @@ -0,0 +1,41 @@ +package datahub.client.patch.chart; + +import static com.linkedin.metadata.Constants.*; +import static datahub.client.patch.common.PatchUtil.*; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.common.urn.Urn; +import datahub.client.patch.AbstractMultiFieldPatchBuilder; +import datahub.client.patch.PatchOperationType; +import javax.annotation.Nonnull; +import org.apache.commons.lang3.tuple.ImmutableTriple; + +public class ChartInfoPatchBuilder extends AbstractMultiFieldPatchBuilder { + private static final String INPUT_EDGES_PATH_START = "/inputEdges/"; + + // Simplified with just Urn + public ChartInfoPatchBuilder addInputEdge(@Nonnull Urn urn) { + ObjectNode value = createEdgeValue(urn); + + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_EDGES_PATH_START + urn, value)); + return this; + } + + public ChartInfoPatchBuilder removeInputEdge(@Nonnull Urn urn) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), INPUT_EDGES_PATH_START + urn, null)); + return this; + } + + @Override + protected String getAspectName() { + return CHART_INFO_ASPECT_NAME; + } + + @Override + protected String getEntityType() { + return CHART_ENTITY_NAME; + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java new file mode 100644 index 0000000000000..69db36c6e038c --- /dev/null +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java @@ -0,0 +1,84 @@ +package datahub.client.patch.common; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.common.Edge; +import com.linkedin.common.urn.Urn; +import javax.annotation.Nonnull; + +public class PatchUtil { + private PatchUtil() {} + + private static final String TIME_KEY = "time"; + private static final String ACTOR_KEY = "actor"; + private static final String IMPERSONATOR_KEY = "impersonator"; + private static final String MESSAGE_KEY = "message"; + private static final String LAST_MODIFIED_KEY = "lastModified"; + private static final String CREATED_KEY = "created"; + private static final String DESTINATION_URN_KEY = "destinationUrn"; + private static final String SOURCE_URN_KEY = "sourceUrn"; + + private static final String PROPERTIES_KEY = "properties"; + + public static ObjectNode createEdgeValue(@Nonnull Edge edge) { + ObjectNode value = instance.objectNode(); + + ObjectNode created = instance.objectNode(); + if (edge.getCreated() == null) { + created.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + } else { + created + .put(TIME_KEY, edge.getCreated().getTime()) + .put(ACTOR_KEY, edge.getCreated().getActor().toString()); + if (edge.getCreated().getImpersonator() != null) { + created.put(IMPERSONATOR_KEY, edge.getCreated().getImpersonator().toString()); + } + if (edge.getCreated().getMessage() != null) { + created.put(MESSAGE_KEY, edge.getCreated().getMessage()); + } + } + value.set(CREATED_KEY, created); + + ObjectNode lastModified = instance.objectNode(); + if (edge.getLastModified() == null) { + lastModified.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + } else { + lastModified + .put(TIME_KEY, edge.getLastModified().getTime()) + .put(ACTOR_KEY, edge.getLastModified().getActor().toString()); + if (edge.getLastModified().getImpersonator() != null) { + lastModified.put(IMPERSONATOR_KEY, edge.getLastModified().getImpersonator().toString()); + } + if (edge.getLastModified().getMessage() != null) { + lastModified.put(MESSAGE_KEY, edge.getLastModified().getMessage()); + } + } + value.set(LAST_MODIFIED_KEY, lastModified); + + if (edge.getProperties() != null) { + ObjectNode propertiesNode = instance.objectNode(); + edge.getProperties().forEach((k, v) -> propertiesNode.set(k, instance.textNode(v))); + value.set(PROPERTIES_KEY, propertiesNode); + } + + value.put(DESTINATION_URN_KEY, edge.getDestinationUrn().toString()); + if (edge.getSourceUrn() != null) { + value.put(SOURCE_URN_KEY, edge.getSourceUrn().toString()); + } + + return value; + } + + public static ObjectNode createEdgeValue(@Nonnull Urn urn) { + ObjectNode value = instance.objectNode(); + ObjectNode auditStamp = instance.objectNode(); + auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + + value.put(DESTINATION_URN_KEY, urn.toString()).set(LAST_MODIFIED_KEY, auditStamp); + value.set(CREATED_KEY, auditStamp); + + return value; + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java new file mode 100644 index 0000000000000..cadde582f1c64 --- /dev/null +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java @@ -0,0 +1,103 @@ +package datahub.client.patch.dashboard; + +import static com.linkedin.metadata.Constants.*; +import static datahub.client.patch.common.PatchUtil.*; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.common.Edge; +import com.linkedin.common.urn.ChartUrn; +import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.common.urn.Urn; +import datahub.client.patch.AbstractMultiFieldPatchBuilder; +import datahub.client.patch.PatchOperationType; +import javax.annotation.Nonnull; +import org.apache.commons.lang3.tuple.ImmutableTriple; + +public class DashboardInfoPatchBuilder + extends AbstractMultiFieldPatchBuilder { + private static final String CHART_EDGES_PATH_START = "/chartEdges/"; + private static final String DATASET_EDGES_PATH_START = "/datasetEdges/"; + + // Simplified with just Urn + public DashboardInfoPatchBuilder addChartEdge(@Nonnull ChartUrn urn) { + ObjectNode value = createEdgeValue(urn); + + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), CHART_EDGES_PATH_START + urn, value)); + return this; + } + + public DashboardInfoPatchBuilder removeChartEdge(@Nonnull ChartUrn urn) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), CHART_EDGES_PATH_START + urn, null)); + return this; + } + + public DashboardInfoPatchBuilder addDatasetEdge(@Nonnull DatasetUrn urn) { + ObjectNode value = createEdgeValue(urn); + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), DATASET_EDGES_PATH_START + urn, value)); + return this; + } + + public DashboardInfoPatchBuilder removeDatasetEdge(@Nonnull DatasetUrn urn) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), DATASET_EDGES_PATH_START + urn, null)); + return this; + } + + // Full Edge modification + public DashboardInfoPatchBuilder addEdge(@Nonnull Edge edge) { + ObjectNode value = createEdgeValue(edge); + String path = getEdgePath(edge); + + pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), path, value)); + return this; + } + + public DashboardInfoPatchBuilder removeEdge(@Nonnull Edge edge) { + String path = getEdgePath(edge); + + pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), path, null)); + return this; + } + + /** + * Determines Edge path based on supplied Urn, if not a valid entity type throws + * IllegalArgumentException + * + * @param edge + * @return + * @throws IllegalArgumentException if destinationUrn is an invalid entity type + */ + private String getEdgePath(@Nonnull Edge edge) { + Urn destinationUrn = edge.getDestinationUrn(); + + if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType())) { + return DATASET_EDGES_PATH_START + destinationUrn; + } + + if (CHART_ENTITY_NAME.equals(destinationUrn.getEntityType())) { + return CHART_EDGES_PATH_START + destinationUrn; + } + + // TODO: Output Data Jobs not supported by aspect, add here if this changes + + throw new IllegalArgumentException( + String.format("Unsupported entity type: %s", destinationUrn.getEntityType())); + } + + @Override + protected String getAspectName() { + return DASHBOARD_INFO_ASPECT_NAME; + } + + @Override + protected String getEntityType() { + return DASHBOARD_ENTITY_NAME; + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java index 0fb0454533fc0..fc250daffe916 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java @@ -2,6 +2,7 @@ import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; import static com.linkedin.metadata.Constants.*; +import static datahub.client.patch.common.PatchUtil.*; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; @@ -20,21 +21,9 @@ public class DataJobInputOutputPatchBuilder private static final String INPUT_DATA_JOB_EDGES_PATH_START = "/inputDatajobEdges/"; private static final String INPUT_DATASET_EDGES_PATH_START = "/inputDatasetEdges/"; private static final String OUTPUT_DATASET_EDGES_PATH_START = "/outputDatasetEdges/"; - - private static final String DESTINATION_URN_KEY = "destinationUrn"; - private static final String SOURCE_URN_KEY = "sourceUrn"; - private static final String LAST_MODIFIED_KEY = "lastModified"; - private static final String CREATED_KEY = "created"; - private static final String PROPERTIES_KEY = "properties"; - private static final String INPUT_DATASET_FIELDS_PATH_START = "/inputDatasetFields/"; private static final String OUTPUT_DATASET_FIELDS_PATH_START = "/outputDatasetFields/"; - private static final String TIME_KEY = "time"; - private static final String ACTOR_KEY = "actor"; - private static final String IMPERSONATOR_KEY = "impersonator"; - private static final String MESSAGE_KEY = "message"; - // Simplified with just Urn public DataJobInputOutputPatchBuilder addInputDatajobEdge(@Nonnull DataJobUrn dataJobUrn) { ObjectNode value = createEdgeValue(dataJobUrn); @@ -144,66 +133,6 @@ public DataJobInputOutputPatchBuilder removeEdge( return this; } - private ObjectNode createEdgeValue(@Nonnull Urn urn) { - ObjectNode value = instance.objectNode(); - ObjectNode auditStamp = instance.objectNode(); - auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - - value.put(DESTINATION_URN_KEY, urn.toString()).set(LAST_MODIFIED_KEY, auditStamp); - value.set(CREATED_KEY, auditStamp); - - return value; - } - - private ObjectNode createEdgeValue(@Nonnull Edge edge) { - ObjectNode value = instance.objectNode(); - - ObjectNode created = instance.objectNode(); - if (edge.getCreated() == null) { - created.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - } else { - created - .put(TIME_KEY, edge.getCreated().getTime()) - .put(ACTOR_KEY, edge.getCreated().getActor().toString()); - if (edge.getCreated().getImpersonator() != null) { - created.put(IMPERSONATOR_KEY, edge.getCreated().getImpersonator().toString()); - } - if (edge.getCreated().getMessage() != null) { - created.put(MESSAGE_KEY, edge.getCreated().getMessage()); - } - } - value.set(CREATED_KEY, created); - - ObjectNode lastModified = instance.objectNode(); - if (edge.getLastModified() == null) { - lastModified.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - } else { - lastModified - .put(TIME_KEY, edge.getLastModified().getTime()) - .put(ACTOR_KEY, edge.getLastModified().getActor().toString()); - if (edge.getLastModified().getImpersonator() != null) { - lastModified.put(IMPERSONATOR_KEY, edge.getLastModified().getImpersonator().toString()); - } - if (edge.getLastModified().getMessage() != null) { - lastModified.put(MESSAGE_KEY, edge.getLastModified().getMessage()); - } - } - value.set(LAST_MODIFIED_KEY, lastModified); - - if (edge.getProperties() != null) { - ObjectNode propertiesNode = instance.objectNode(); - edge.getProperties().forEach((k, v) -> propertiesNode.set(k, instance.textNode(v))); - value.set(PROPERTIES_KEY, propertiesNode); - } - - value.put(DESTINATION_URN_KEY, edge.getDestinationUrn().toString()); - if (edge.getSourceUrn() != null) { - value.put(SOURCE_URN_KEY, edge.getSourceUrn().toString()); - } - - return value; - } - /** * Determines Edge path based on supplied Urn, if not a valid entity type throws * IllegalArgumentException diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index 563742990f546..5bd10245899e4 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -8,6 +8,7 @@ import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.OwnershipType; import com.linkedin.common.TagAssociation; +import com.linkedin.common.urn.ChartUrn; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DataPlatformUrn; @@ -22,7 +23,9 @@ import datahub.client.MetadataWriteResponse; import datahub.client.file.FileEmitter; import datahub.client.file.FileEmitterConfig; +import datahub.client.patch.chart.ChartInfoPatchBuilder; import datahub.client.patch.common.OwnershipPatchBuilder; +import datahub.client.patch.dashboard.DashboardInfoPatchBuilder; import datahub.client.patch.dataflow.DataFlowInfoPatchBuilder; import datahub.client.patch.datajob.DataJobInfoPatchBuilder; import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; @@ -551,4 +554,90 @@ public void testLocalDataJobInputAddEdge() { System.out.println(Arrays.asList(e.getStackTrace())); } } + + @Test + @Ignore + public void testLocalChartInfoAdd() { + RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); + try { + MetadataChangeProposal chartInfoPatch = + new ChartInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:chart:(dashboardTool,chartId)")) + .addInputEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .build(); + Future response = restEmitter.emit(chartInfoPatch); + + System.out.println(response.get().getResponseContent()); + + } catch (URISyntaxException | IOException | ExecutionException | InterruptedException e) { + System.out.println(Arrays.asList(e.getStackTrace())); + } + } + + @Test + @Ignore + public void testLocalChartInfoRemove() { + RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); + try { + MetadataChangeProposal chartInfoPatch = + new ChartInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:chart:(dashboardTool,chartId)")) + .removeInputEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .build(); + Future response = restEmitter.emit(chartInfoPatch); + + System.out.println(response.get().getResponseContent()); + + } catch (URISyntaxException | IOException | ExecutionException | InterruptedException e) { + System.out.println(Arrays.asList(e.getStackTrace())); + } + } + + @Test + @Ignore + public void testLocalDashboardInfoAdd() { + RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); + try { + MetadataChangeProposal dashboardInfoPatch = + new DashboardInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dashboard:(dashboardTool,dashboardId)")) + .addDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .addChartEdge(ChartUrn.createFromString("urn:li:chart:(dashboartTool, chartId)")) + .build(); + Future response = restEmitter.emit(dashboardInfoPatch); + + System.out.println(response.get().getResponseContent()); + + } catch (URISyntaxException | IOException | ExecutionException | InterruptedException e) { + System.out.println(Arrays.asList(e.getStackTrace())); + } + } + + @Test + @Ignore + public void testLocalDashboardInfoRemove() { + RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); + try { + MetadataChangeProposal dashboardInfoPatch = + new DashboardInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dashboard:(dashboardTool,dashboardId)")) + .removeDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .removeChartEdge(ChartUrn.createFromString("urn:li:chart:(dashboardTool, chartId)")) + .build(); + Future response = restEmitter.emit(dashboardInfoPatch); + + System.out.println(response.get().getResponseContent()); + + } catch (URISyntaxException | IOException | ExecutionException | InterruptedException e) { + System.out.println(Arrays.asList(e.getStackTrace())); + } + } } From 296e41dfed325116c2a5661c32ae27790b28aafd Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 3 Jan 2024 15:58:50 -0600 Subject: [PATCH 325/792] feat(docker): docker compose profiles updates (#9514) Co-authored-by: Harshal Sheth --- docker/build.gradle | 7 ++++++- docker/profiles/README.md | 2 +- docker/profiles/docker-compose.actions.yml | 2 +- docker/profiles/docker-compose.frontend.yml | 4 ++-- docker/profiles/docker-compose.gms.yml | 16 ++++++++-------- .../profiles/docker-compose.prerequisites.yml | 18 +++++++++--------- docs/developers.md | 2 +- docs/how/updating-datahub.md | 3 ++- 8 files changed, 30 insertions(+), 24 deletions(-) diff --git a/docker/build.gradle b/docker/build.gradle index 190202620c382..189c4959e0442 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -31,6 +31,11 @@ ext { pg_quickstart_modules = quickstart_modules - [':docker:mysql-setup'] + [':docker:postgres-setup'] } +tasks.register('minDockerCompose2.20', Exec) { + executable 'bash' + args '-c', 'echo -e "$(docker compose version --short)\n2.20"|sort --version-sort --check=quiet --reverse' +} + tasks.register('quickstart') {} tasks.register('quickstartSlim') {} tasks.register('quickstartDebug') {} @@ -118,9 +123,9 @@ tasks.getByName('quickstartDebugComposeUp').dependsOn( ) tasks.withType(ComposeUp).configureEach { shouldRunAfter('quickstartNuke') + dependsOn tasks.named("minDockerCompose2.20") } - task debugReload(type: Exec) { def cmd = ['docker compose -p datahub --profile debug'] + compose_args + ['restart'] + debug_reloadable commandLine 'bash', '-c', cmd.join(" ") diff --git a/docker/profiles/README.md b/docker/profiles/README.md index df09f15cd85ce..fb3c9e3c84a7a 100644 --- a/docker/profiles/README.md +++ b/docker/profiles/README.md @@ -5,7 +5,7 @@ for quickstart use-cases as well as development use-cases. These configurations infrastructure configurations that DataHub can operate on. Requirements: -* Use the profiles requires a modern version of docker. +* Using profiles requires docker compose >= 2.20. * If using the debug/development profiles, you will need to have built the `debug` docker images locally. See the Development Profiles section for more details. ```bash diff --git a/docker/profiles/docker-compose.actions.yml b/docker/profiles/docker-compose.actions.yml index a509a6a67d270..676a72bae3201 100644 --- a/docker/profiles/docker-compose.actions.yml +++ b/docker/profiles/docker-compose.actions.yml @@ -1,7 +1,7 @@ x-datahub-actions-service: &datahub-actions-service hostname: actions - image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} + image: ${DATAHUB_ACTIONS_IMAGE:-${DATAHUB_ACTIONS_REPO:-acryldata}/datahub-actions}:${ACTIONS_VERSION:-head} env_file: datahub-actions/env/docker.env environment: ACTIONS_EXTRA_PACKAGES: ${ACTIONS_EXTRA_PACKAGES:-} diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml index 80cb4e7b4b596..6e1bbc0be70f5 100644 --- a/docker/profiles/docker-compose.frontend.yml +++ b/docker/profiles/docker-compose.frontend.yml @@ -1,7 +1,7 @@ x-datahub-frontend-service: &datahub-frontend-service hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 env_file: datahub-frontend/env/docker.env @@ -12,7 +12,7 @@ x-datahub-frontend-service: &datahub-frontend-service x-datahub-frontend-service-dev: &datahub-frontend-service-dev <<: *datahub-frontend-service - image: linkedin/datahub-frontend-react:debug + image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-frontend-react}:debug ports: - ${DATAHUB_MAPPED_FRONTEND_DEBUG_PORT:-5002}:5002 - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml index 01602c8b906b9..93072a76d4041 100644 --- a/docker/profiles/docker-compose.gms.yml +++ b/docker/profiles/docker-compose.gms.yml @@ -54,7 +54,7 @@ x-datahub-dev-telemetry-env: &datahub-dev-telemetry-env ################################# x-datahub-system-update-service: &datahub-system-update-service hostname: datahub-system-update - image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_UPGRADE_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-upgrade}:${DATAHUB_VERSION:-head} command: - -u - SystemUpdate @@ -67,7 +67,7 @@ x-datahub-system-update-service: &datahub-system-update-service x-datahub-system-update-service-dev: &datahub-system-update-service-dev <<: *datahub-system-update-service - image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:debug + image: ${DATAHUB_UPGRADE_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-upgrade}:debug ports: - ${DATAHUB_MAPPED_UPGRADE_DEBUG_PORT:-5003}:5003 environment: &datahub-system-update-dev-env @@ -85,7 +85,7 @@ x-datahub-system-update-service-dev: &datahub-system-update-service-dev ################################# x-datahub-gms-service: &datahub-gms-service hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 env_file: datahub-gms/env/docker.env @@ -102,7 +102,7 @@ x-datahub-gms-service: &datahub-gms-service x-datahub-gms-service-dev: &datahub-gms-service-dev <<: *datahub-gms-service - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:debug + image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-gms}:debug ports: - ${DATAHUB_MAPPED_GMS_DEBUG_PORT:-5001}:5001 - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 @@ -128,7 +128,7 @@ x-datahub-gms-service-dev: &datahub-gms-service-dev ################################# x-datahub-mae-consumer-service: &datahub-mae-consumer-service hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 env_file: datahub-mae-consumer/env/docker.env @@ -137,7 +137,7 @@ x-datahub-mae-consumer-service: &datahub-mae-consumer-service x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev <<: *datahub-mae-consumer-service - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:debug + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mae-consumer}:debug environment: <<: [*datahub-dev-telemetry-env, *datahub-mae-consumer-env] volumes: @@ -151,7 +151,7 @@ x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev ################################# x-datahub-mce-consumer-service: &datahub-mce-consumer-service hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 env_file: datahub-mce-consumer/env/docker.env @@ -160,7 +160,7 @@ x-datahub-mce-consumer-service: &datahub-mce-consumer-service x-datahub-mce-consumer-service-dev: &datahub-mce-consumer-service-dev <<: *datahub-mce-consumer-service - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:debug + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mce-consumer}:debug environment: <<: [*datahub-dev-telemetry-env, *datahub-mce-consumer-env] volumes: diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml index d90d4a252f993..232239c6c70d0 100644 --- a/docker/profiles/docker-compose.prerequisites.yml +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -128,7 +128,7 @@ services: container_name: mysql-setup profiles: *mysql-profiles-quickstart hostname: mysql-setup - image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mysql-setup}:${DATAHUB_VERSION:-head} env_file: mysql-setup/env/docker.env depends_on: mysql: @@ -139,7 +139,7 @@ services: <<: *mysql-setup container_name: mysql-setup-dev profiles: *mysql-profiles-dev - image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:debug + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mysql-setup}:debug postgres: container_name: postgres profiles: *postgres-profiles @@ -162,7 +162,7 @@ services: container_name: postgres-setup profiles: *postgres-profiles-quickstart hostname: postgres-setup - image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-postgres-setup}:${DATAHUB_VERSION:-head} env_file: postgres-setup/env/docker.env depends_on: postgres: @@ -173,7 +173,7 @@ services: <<: *postgres-setup container_name: postgres-setup-dev profiles: *postgres-profiles-dev - image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:debug + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-postgres-setup}:debug cassandra: container_name: cassandra profiles: *cassandra-profiles @@ -267,7 +267,7 @@ services: container_name: kafka-setup profiles: *profiles-quickstart hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-kafka-setup}:${DATAHUB_VERSION:-head} env_file: kafka-setup/env/docker.env environment: &kafka-setup-env DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-false} @@ -285,7 +285,7 @@ services: environment: <<: *kafka-setup-env DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-true} - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:debug + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-kafka-setup}:debug elasticsearch: container_name: elasticsearch profiles: *elasticsearch-profiles @@ -311,7 +311,7 @@ services: - esdata:/usr/share/elasticsearch/data elasticsearch-setup-dev: &elasticsearch-setup-dev container_name: elasticsearch-setup-dev - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:debug + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:debug profiles: *elasticsearch-profiles hostname: elasticsearch-setup env_file: elasticsearch-setup/env/docker.env @@ -351,7 +351,7 @@ services: container_name: opensearch-setup profiles: *opensearch-profiles-quickstart hostname: opensearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} environment: <<: *search-datastore-environment USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} @@ -365,7 +365,7 @@ services: container_name: opensearch-setup-dev profiles: *opensearch-profiles-dev hostname: opensearch-setup-dev - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:debug + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:debug environment: <<: *search-datastore-environment USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} diff --git a/docs/developers.md b/docs/developers.md index fe007a56ddc68..4e31aceeb4382 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -9,7 +9,7 @@ title: "Local Development" - [Java 17 JDK](https://openjdk.org/projects/jdk/17/) - [Python 3.10](https://www.python.org/downloads/release/python-3100/) - [Docker](https://www.docker.com/) -- [Docker Compose](https://docs.docker.com/compose/) +- [Docker Compose >=2.20](https://docs.docker.com/compose/) - Docker engine with at least 8GB of memory to run tests. ::: diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 61ad2d623d72a..fb082bea7d151 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -8,7 +8,8 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. - Neo4j 5.x, may require migration from 4.x -- Build now requires JDK17 (Runtime Java 11) +- Build requires JDK17 (Runtime Java 11) +- Build requires Docker Compose > 2.20 ### Potential Downtime From 424057862790b520e6d6e7d9d0a04f52aa46e500 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 3 Jan 2024 17:16:16 -0500 Subject: [PATCH 326/792] feat(ui): switch to vite and vitest (#9451) --- .github/workflows/metadata-io.yml | 3 - .github/workflows/spark-smoke-test.yml | 3 +- build.gradle | 4 + datahub-frontend/build.gradle | 18 - datahub-frontend/conf/routes | 11 +- datahub-web-react/.env | 4 +- datahub-web-react/.eslintrc.js | 3 +- datahub-web-react/build.gradle | 66 +- datahub-web-react/craco.config.js | 75 - datahub-web-react/datahub-frontend.graphql | 389 - datahub-web-react/{public => }/index.html | 14 +- datahub-web-react/package.json | 59 +- .../public/{ => assets}/favicon.ico | Bin .../public/{ => assets}/logo.png | Bin datahub-web-react/public/manifest.json | 2 +- datahub-web-react/src/App.less | 5 +- datahub-web-react/src/App.test.tsx | 15 +- datahub-web-react/src/App.tsx | 35 +- datahub-web-react/src/Mocks.tsx | 12 + datahub-web-react/src/app/Routes.tsx | 4 +- .../src/app/analytics/analytics.ts | 2 +- .../src/app/domain/DomainIcon.tsx | 2 +- .../src/app/entity/dataJob/tabs/RunsTab.tsx | 2 +- .../entity/dataset/profile/OperationsTab.tsx | 2 +- .../dataset/profile/__tests__/Schema.test.tsx | 70 +- .../__tests__/SchemaDescriptionField.test.tsx | 4 +- .../__tests__/PlatformContent.test.tsx | 6 +- .../embed/UpstreamHealth/FailingEntity.tsx | 2 +- .../embed/UpstreamHealth/UpstreamHealth.tsx | 2 +- .../__tests__/DocumentationTab.test.tsx | 8 +- .../editor/__tests__/Editor.test.tsx | 2 +- .../Entity/__tests__/DataJobFlowTab.test.tsx | 6 +- .../entity/user/__tests__/UserHeader.test.tsx | 11 - .../ingest/source/builder/RecipeBuilder.tsx | 8 +- .../source/builder/RecipeForm/FormField.tsx | 8 +- .../source/builder/RecipeForm/RecipeForm.tsx | 8 +- .../RecipeForm/SecretField/SecretField.tsx | 16 +- .../TestConnection/TestConnectionModal.tsx | 2 +- .../app/ingest/source/builder/YamlEditor.tsx | 3 +- .../lineage/__tests__/LineageEdges.test.tsx | 22 +- .../__tests__/LineageEntityView.test.tsx | 2 +- .../lineage/__tests__/LineageTree.test.tsx | 12 +- .../policy/_tests_/policyUtils.test.tsx | 175 +- .../src/app/preview/DefaultPreviewCard.tsx | 4 +- .../__tests__/Recommendations.test.tsx | 1 + .../src/app/search/ToggleSidebarButton.tsx | 4 +- .../__tests__/FilterRendererRegistry.test.tsx | 6 +- .../src/app/search/filters/utils.tsx | 2 +- .../src/app/search/sidebar/EntityLink.tsx | 2 +- .../app/search/sorting/SearchSortSelect.tsx | 2 +- datahub-web-react/src/conf/Global.ts | 1 - .../src/conf/theme/global-variables.less | 26 +- .../src/graphql-mock/createServer.ts | 12 - datahub-web-react/src/graphql-mock/server.ts | 84 - datahub-web-react/src/index.tsx | 3 +- datahub-web-react/src/react-app-env.d.ts | 1 - datahub-web-react/src/setupProxy.js | 37 - datahub-web-react/src/setupTests.ts | 21 +- .../utils/test-utils/TestPageContainer.tsx | 2 +- datahub-web-react/src/vite-env.d.ts | 2 + datahub-web-react/tsconfig.json | 5 +- datahub-web-react/vite.config.ts | 100 + datahub-web-react/yarn.lock | 8860 +++-------------- smoke-test/tests/cypress/package-lock.json | 2031 ---- .../tests/read_only/test_services_up.py | 2 +- 65 files changed, 1905 insertions(+), 10400 deletions(-) delete mode 100644 datahub-web-react/craco.config.js delete mode 100644 datahub-web-react/datahub-frontend.graphql rename datahub-web-react/{public => }/index.html (66%) rename datahub-web-react/public/{ => assets}/favicon.ico (100%) rename datahub-web-react/public/{ => assets}/logo.png (100%) delete mode 100644 datahub-web-react/src/graphql-mock/createServer.ts delete mode 100644 datahub-web-react/src/graphql-mock/server.ts delete mode 100644 datahub-web-react/src/react-app-env.d.ts delete mode 100644 datahub-web-react/src/setupProxy.js create mode 100644 datahub-web-react/src/vite-env.d.ts create mode 100644 datahub-web-react/vite.config.ts delete mode 100644 smoke-test/tests/cypress/package-lock.json diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 96229642244b6..c964352c3e129 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -40,9 +40,6 @@ jobs: python-version: "3.10" cache: "pip" - name: Gradle build (and test) - # there is some race condition in gradle build, which makes gradle never terminate in ~30% of the runs - # running build first without datahub-web-react:yarnBuild and then with it is 100% stable - # datahub-frontend:unzipAssets depends on datahub-web-react:yarnBuild but gradle does not know about it run: | ./gradlew :metadata-io:test - uses: actions/upload-artifact@v3 diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 94692bd3c2336..bd99905a513d6 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -51,8 +51,7 @@ jobs: -x :datahub-web-react:yarnLint \ -x :datahub-web-react:yarnGenerate \ -x :datahub-web-react:yarnInstall \ - -x :datahub-web-react:yarnQuickBuild \ - -x :datahub-web-react:copyAssets \ + -x :datahub-web-react:yarnBuild \ -x :datahub-web-react:distZip \ -x :datahub-web-react:jar - uses: actions/upload-artifact@v3 diff --git a/build.gradle b/build.gradle index bb01a15a7db8d..4680598165d28 100644 --- a/build.gradle +++ b/build.gradle @@ -325,6 +325,10 @@ subprojects { } plugins.withType(JavaPlugin).configureEach { + if (project.name == 'datahub-web-react') { + return + } + dependencies { implementation externalDependency.annotationApi constraints { diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 437c72e6394ea..1174c5c5cfd5d 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -1,5 +1,4 @@ plugins { - id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" id 'scala' id 'com.palantir.docker' id 'org.gradle.playframework' @@ -39,23 +38,6 @@ artifacts { archives myTar } -graphqlCodegen { - // For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md - graphqlSchemaPaths = ["$projectDir/conf/datahub-frontend.graphql".toString()] - outputDir = new File("$projectDir/app/graphql") - packageName = "generated" - generateApis = true - modelValidationAnnotation = "" - customTypesMapping = [ - Long: "Long", - ] -} - -tasks.withType(Checkstyle) { - exclude "**/generated/**" -} - - /* PLAY UPGRADE NOTE Generates the distribution jars under the expected names. The playFramework plugin only accepts certain name values diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index 3102c26497fed..6b53a2789e7cc 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -36,11 +36,14 @@ PUT /openapi/*path c HEAD /openapi/*path controllers.Application.proxy(path: String, request: Request) PATCH /openapi/*path controllers.Application.proxy(path: String, request: Request) -# Map static resources from the /public folder to the /assets URL path -GET /assets/*file controllers.Assets.at(path="/public", file) - # Analytics route POST /track controllers.TrackingController.track(request: Request) -# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle +# Known React asset routes +GET /assets/*file controllers.Assets.at(path="/public/assets", file) +GET /node_modules/*file controllers.Assets.at(path="/public/node_modules", file) +GET /manifest.json controllers.Assets.at(path="/public", file="manifest.json") +GET /robots.txt controllers.Assets.at(path="/public", file="robots.txt") + +# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle's index.html GET /*path controllers.Application.index(path) diff --git a/datahub-web-react/.env b/datahub-web-react/.env index e5529bbdaa56d..7c02340752104 100644 --- a/datahub-web-react/.env +++ b/datahub-web-react/.env @@ -1,5 +1,3 @@ -PUBLIC_URL=/assets REACT_APP_THEME_CONFIG=theme_light.config.json SKIP_PREFLIGHT_CHECK=true -BUILD_PATH=build/yarn -REACT_APP_PROXY_TARGET=http://localhost:9002 \ No newline at end of file +REACT_APP_PROXY_TARGET=http://localhost:9002 diff --git a/datahub-web-react/.eslintrc.js b/datahub-web-react/.eslintrc.js index 2806942dd1053..e48dfdb23a4e7 100644 --- a/datahub-web-react/.eslintrc.js +++ b/datahub-web-react/.eslintrc.js @@ -5,7 +5,7 @@ module.exports = { 'airbnb-typescript', 'airbnb/hooks', 'plugin:@typescript-eslint/recommended', - 'plugin:jest/recommended', + 'plugin:vitest/recommended', 'prettier', ], plugins: ['@typescript-eslint'], @@ -46,6 +46,7 @@ module.exports = { argsIgnorePattern: '^_', }, ], + 'vitest/prefer-to-be': 'off', }, settings: { react: { diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index 72821d8b97dc0..c0355b935137a 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -19,7 +19,7 @@ node { version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.1' + yarnVersion = '1.22.21' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -44,10 +44,33 @@ node { */ task yarnInstall(type: YarnTask) { args = ['install'] + + // The node_modules directory can contain built artifacts, so + // it's not really safe to cache it. + outputs.cacheIf { false } + + inputs.files( + file('yarn.lock'), + file('package.json'), + ) + outputs.dir('node_modules') } task yarnGenerate(type: YarnTask, dependsOn: yarnInstall) { args = ['run', 'generate'] + + outputs.cacheIf { true } + + inputs.files( + yarnInstall.inputs.files, + file('codegen.yml'), + project.fileTree(dir: "../datahub-graphql-core/src/main/resources/", include: "*.graphql"), + project.fileTree(dir: "src", include: "**/*.graphql"), + ) + + outputs.files( + project.fileTree(dir: "src", include: "**/*.generated.ts"), + ) } task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -55,7 +78,8 @@ task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { } task yarnTest(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - args = ['run', 'test', '--watchAll', 'false'] + // Explicitly runs in non-watch mode. + args = ['run', 'test', 'run'] } task yarnLint(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -68,13 +92,24 @@ task yarnLintFix(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { args = ['run', 'lint-fix'] } -task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnTest, yarnLint]) { - args = ['run', 'build'] -} - -task yarnQuickBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { +task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { environment = [NODE_OPTIONS: "--max-old-space-size=3072 --openssl-legacy-provider"] args = ['run', 'build'] + + outputs.cacheIf { true } + inputs.files( + file('index.html'), + project.fileTree(dir: "src"), + project.fileTree(dir: "public"), + + yarnInstall.inputs.files, + yarnGenerate.outputs.files, + + file('.env'), + file('vite.config.ts'), + file('tsconfig.json'), + ) + outputs.dir('dist') } task cleanExtraDirs { @@ -82,9 +117,8 @@ task cleanExtraDirs { delete 'dist' delete 'tmp' delete 'just' - delete 'src/types.generated.ts' delete fileTree('../datahub-frontend/public') - delete fileTree(dir: 'src/graphql', include: '*.generated.ts') + delete fileTree(dir: 'src', include: '*.generated.ts') } clean.finalizedBy(cleanExtraDirs) @@ -93,24 +127,16 @@ configurations { } distZip { - dependsOn yarnQuickBuild + dependsOn yarnBuild archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}" from 'dist' } -task copyAssets(dependsOn: distZip) { - doLast { - copy { - from zipTree(distZip.outputs.files.first()) - into "../datahub-frontend/public" - } - } -} - jar { - dependsOn distZip, copyAssets + dependsOn distZip into('public') { from zipTree(distZip.outputs.files.first()) } archiveClassifier = 'assets' } +build.dependsOn jar diff --git a/datahub-web-react/craco.config.js b/datahub-web-react/craco.config.js deleted file mode 100644 index 6ede45902128f..0000000000000 --- a/datahub-web-react/craco.config.js +++ /dev/null @@ -1,75 +0,0 @@ -/* eslint-disable @typescript-eslint/no-var-requires */ -require('dotenv').config(); -const { whenProd } = require('@craco/craco'); -const CracoAntDesignPlugin = require('craco-antd'); -const path = require('path'); -const CopyWebpackPlugin = require('copy-webpack-plugin'); - -// eslint-disable-next-line import/no-dynamic-require -const themeConfig = require(`./src/conf/theme/${process.env.REACT_APP_THEME_CONFIG}`); - -function addLessPrefixToKeys(styles) { - const output = {}; - Object.keys(styles).forEach((key) => { - output[`@${key}`] = styles[key]; - }); - return output; -} - -module.exports = { - webpack: { - configure: { - optimization: whenProd(() => ({ - splitChunks: { - cacheGroups: { - vendor: { - test: /[\\/]node_modules[\\/]/, - name: 'vendors', - chunks: 'all', - }, - }, - }, - })), - // Webpack 5 no longer automatically pollyfill core Node.js modules - resolve: { fallback: { fs: false } }, - // Ignore Webpack 5's missing source map warnings from node_modules - ignoreWarnings: [{ module: /node_modules/, message: /source-map-loader/ }], - }, - plugins: { - add: [ - // Self host images by copying them to the build directory - new CopyWebpackPlugin({ - patterns: [{ from: 'src/images', to: 'platforms' }], - }), - // Copy monaco-editor files to the build directory - new CopyWebpackPlugin({ - patterns: [ - { from: 'node_modules/monaco-editor/min/vs/', to: 'monaco-editor/vs' }, - { from: 'node_modules/monaco-editor/min-maps/vs/', to: 'monaco-editor/min-maps/vs' }, - ], - }), - ], - }, - }, - plugins: [ - { - plugin: CracoAntDesignPlugin, - options: { - customizeThemeLessPath: path.join(__dirname, 'src/conf/theme/global-variables.less'), - customizeTheme: addLessPrefixToKeys(themeConfig.styles), - }, - }, - ], - jest: { - configure: { - // Use dist files instead of source files - moduleNameMapper: { - '^d3-interpolate-path': `d3-interpolate-path/build/d3-interpolate-path`, - '^d3-(.*)$': `d3-$1/dist/d3-$1`, - '^lib0/((?!dist).*)$': 'lib0/dist/$1.cjs', - '^y-protocols/(.*)$': 'y-protocols/dist/$1.cjs', - '\\.(css|less)$': '/src/__mocks__/styleMock.js', - }, - }, - }, -}; diff --git a/datahub-web-react/datahub-frontend.graphql b/datahub-web-react/datahub-frontend.graphql deleted file mode 100644 index 6df3c387e14fe..0000000000000 --- a/datahub-web-react/datahub-frontend.graphql +++ /dev/null @@ -1,389 +0,0 @@ -scalar Long - -schema { - query: Query - mutation: Mutation -} - -type Query { - dataset(urn: String!): Dataset - user(urn: String!): CorpUser - search(input: SearchInput!): SearchResults - autoComplete(input: AutoCompleteInput!): AutoCompleteResults - browse(input: BrowseInput!): BrowseResults - browsePaths(input: BrowsePathsInput!): [[String!]!] -} - -type Mutation { - logIn(username: String!, password: String!): CorpUser - updateDataset(input: DatasetUpdateInput!): Dataset -} - -input DatasetUpdateInput { - urn: String! - ownership: OwnershipUpdate -} - -input OwnershipUpdate { - owners: [OwnerUpdate!] -} - -input OwnerUpdate { - # The owner URN, eg urn:li:corpuser:1 - owner: String! - - # The owner role type - type: OwnershipType! -} - -enum OwnershipSourceType { - AUDIT - DATABASE - FILE_SYSTEM - ISSUE_TRACKING_SYSTEM - MANUAL - SERVICE - SOURCE_CONTROL - OTHER -} - -type OwnershipSource { - """ - The type of the source - """ - type: OwnershipSourceType! - - """ - A reference URL for the source - """ - url: String -} - -enum OwnershipType { - """ - A person or group that is in charge of developing the code - """ - DEVELOPER - - """ - A person or group that is owning the data - """ - DATAOWNER - - """ - A person or a group that overseas the operation, e.g. a DBA or SRE. - """ - DELEGATE - - """ - A person, group, or service that produces/generates the data - """ - PRODUCER - - """ - A person, group, or service that consumes the data - """ - CONSUMER - - """ - A person or a group that has direct business interest - """ - STAKEHOLDER -} - -type Owner { - """ - Owner object - """ - owner: CorpUser! - - """ - The type of the ownership - """ - type: OwnershipType - - """ - Source information for the ownership - """ - source: OwnershipSource -} - -type Ownership { - owners: [Owner!] - - lastModified: Long! -} - -enum FabricType { - """ - Designates development fabrics - """ - DEV - - """ - Designates early-integration (staging) fabrics - """ - EI - - """ - Designates production fabrics - """ - PROD - - """ - Designates corporation fabrics - """ - CORP -} - -enum PlatformNativeType { - """ - Table - """ - TABLE - - """ - View - """ - VIEW - - """ - Directory in file system - """ - DIRECTORY - - """ - Stream - """ - STREAM - - """ - Bucket in key value store - """ - BUCKET -} - -type PropertyTuple { - key: String! - value: String -} - -type SubTypes { - typeNames: [String!] -} - -type Dataset { - urn: String! - - platform: String! - - name: String! - - origin: FabricType! - - description: String - - uri: String - - platformNativeType: PlatformNativeType - - tags: [String!]! - - properties: [PropertyTuple!] - - createdTime: Long! - - modifiedTime: Long! - - ownership: Ownership - - subTypes: SubTypes -} - -type CorpUserInfo { - active: Boolean! - - displayName: String - - email: String! - - title: String - - manager: CorpUser - - departmentId: Long - - departmentName: String - - firstName: String - - lastName: String - - fullName: String - - countryCode: String -} - -type CorpUserEditableInfo { - aboutMe: String - - teams: [String!] - - skills: [String!] - - pictureLink: String -} - -type CorpUser { - urn: String! - - username: String! - - info: CorpUserInfo - - editableInfo: CorpUserEditableInfo -} - -type CorpGroup implements Entity { - """ - The unique user URN - """ - urn: String! - - """ - GMS Entity Type - """ - type: EntityType! - - """ - group name e.g. wherehows-dev, ask_metadata - """ - name: String - - """ - Information of the corp group - """ - info: CorpGroupInfo -} - - -type CorpGroupInfo { - """ - email of this group - """ - email: String! - - """ - owners of this group - """ - admins: [String!]! - - """ - List of ldap urn in this group. - """ - members: [String!]! - - """ - List of groups in this group. - """ - groups: [String!]! -} - -enum EntityType { - DATASET - USER - DATA_FLOW - DATA_JOB - CORP_USER - CORP_GROUP -} - -# Search Input -input SearchInput { - type: EntityType! - query: String! - start: Int - count: Int - filters: [FacetFilterInput!] -} - -input FacetFilterInput { - field: String! # Facet Field Name - value: String! # Facet Value -} - -# Search Output -type SearchResults { - start: Int! - count: Int! - total: Int! - elements: [SearchResult!]! - facets: [FacetMetadata!] -} - -union SearchResult = Dataset | CorpUser - -type FacetMetadata { - field: String! - aggregations: [AggregationMetadata!]! -} - -type AggregationMetadata { - value: String! - count: Long! -} - -# Autocomplete Input -input AutoCompleteInput { - type: EntityType! - query: String! - field: String # Field name - limit: Int - filters: [FacetFilterInput!] -} - -# Autocomplete Output -type AutoCompleteResults { - query: String! - suggestions: [String!]! -} - -# Browse Inputs -input BrowseInput { - type: EntityType! - path: [String!] - start: Int - count: Int - filters: [FacetFilterInput!] -} - -# Browse Output -type BrowseResults { - entities: [BrowseResultEntity!]! - start: Int! - count: Int! - total: Int! - metadata: BrowseResultMetadata! -} - -type BrowseResultEntity { - name: String! - urn: String! -} - -type BrowseResultMetadata { - path: [String!] - groups: [BrowseResultGroup!]! - totalNumEntities: Long! -} - -type BrowseResultGroup { - name: String! - count: Long! -} - -# Browse Paths Input -input BrowsePathsInput { - type: EntityType! - urn: String! -} diff --git a/datahub-web-react/public/index.html b/datahub-web-react/index.html similarity index 66% rename from datahub-web-react/public/index.html rename to datahub-web-react/index.html index ead3a0aba82cb..9490881246e12 100644 --- a/datahub-web-react/public/index.html +++ b/datahub-web-react/index.html @@ -2,7 +2,7 @@ - + @@ -10,21 +10,13 @@ manifest.json provides metadata used when your web app is installed on a user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/ --> - - + DataHub
+ {YZsV6YUR2!Z;~3eE%z z=hzm_B}$};S9&?&2jbwL5QZ7Hwu!>u7qK>2yk%S})6iaGd~kHs3FtKV7w6My6K z@C(@Q>BzUZ`4KBFt>W8z?(rMSoquQ*e?@{0HKV1Xb%NI{nyiKMv*7UjjZyiNU&o9~ z1<{1jtv5XA+lStv3q5r)6!T224d0-@;_15SO1F=H8sDb>y?=;+XLhgoYZq_d|JrM| zdv50rhCG+1iSx63+5esNTx|KBu>I1H-MW4D%g@vs<%ix4EWrA*toXn9%g^23`2XF$ zBWL1$(J`wxG2$)+i8&fvW&40i_ikO{}1WG{ym_~6hS zz9(Q+%vhE?b|try?P_$8Ke^&OTJmQaeyzvQ(#b_r?q5&tg;;w`4>l*#gga^l?>p+9 zzRzfYNLse&Gfq*)h5{EX+hJ(SX0NFh$}urW+gYg64?Du-&w3j-uY|8f@0beQexsLH zrB>1CV&_ri-(@J`QaJ(l{nvxjk{H{A`GdKmFT3X1tJ}P~F8KW6*BsHwwBMyu-u>p= zx8MG?{yhK8Kl~xq1SbLb6Ne^gZ74>Ww2bMizTskDypZwlwq5t3d$-?t%bofj#GTt! z@7ABW4glKZM;%IH&Sq$} z|Ci7G`kf~(|Nd34-R}>P7k|as+poRucE2RJ^54n7u;I`(n-3G3wu8)bmSnFW0rV_Gu65J%ZPL@5gM~+{i!ih#@}i(nqqMJk3{{wgb%oC-^0_ z7ktrvuVI_Sa^PjDxBlF%+i(2*ZTggtE+MZrPi}&#&BDxOL#&&ZtLYUVTreze*|kCB z{`1axlwsxXlc=4BH{shqpyRJQ$$e4-qc-AT*83kh{+jc$(^a~?{-53Km$;t$%Col@ ze#yBTm+N?}?fH6B{TKhmE!%_h$08kP)-L(OeDETOxy@*#`>tQUYkR>TIa^<7JbfyT zG2)pOwr~BJJ9p#BekYFR&4v8uZbAV#2eGT}WAA-nyYZtBY)ACv#p5-8_Lc__8n_+T zb?)=mF@n_V*I|1(>C4=PjT5ik^LN69u#XFX^$i?=u{RQbL6}3wj~n-2DTm!{!$GVT zMI;Hp+NQ3IWNEvA=V)=(iD3&Xj^marU2qtC#WFCjAT>zr%1S?J3m}yYiUW4h+tDYA zA|m&q(T@NPn1rzLzv>NJwq~R8!07^YAoU&FIKm*%x)~SKvMnwoVC=?_Z&5SnuyUG? zVNT>E#L7RSiKKFaM)z1rD}lM_g_k?C0P*R6NI$+HuwY z;jg)P?+XmR;a}b0tHsLO{)92k8Vg|&FlTeOhsCZxvf`tG?6c{gsb4(g&F06_-uJdU zw}1NWAKAY9o8G^@;m2=W9z4Pgcw!_T$R?LLs*<2LUl(|>vD^^h?_=QsM}Q@NJmbxA zv2!F+&AK+9t^+)bMXzS>=0r~%qyZnMT*3F3fBe?%AAj>lw*TilKDK@2UCSE_Mtbza z>1Um|J>>;w7@rvMN+v&PoKj}J_ORP!$kGOa{UG*#1j_WNW z-{zZWIVUBxFWgL>191S<7@rk0zICji;fB2;8YJ#m0R>QESB!`fw_+JezF9x~V(Z?V z!qWjp3}jd&p2ZLVtN_EmMPWmDbBbqSM7L9`ircpH%r|}t<3$gmk4dxyMbxvfivoWK z&y=M3=LV1Ar-A+t+hG#2afp5M81Z{0hS54m;V>C-cd%<>7mX|2kiwq=8z2Y+$AwR< zwzqpvq>4#_R*cDC0@=pA2W49r)WhLp8so}$&7V7ueV={YrSGKt;=kfkSH!C5?sSl- zLQ7ZtorI(nxAX7#tG%5h;L_^*w(CB8@AiY=_TlY^|Mpe-R`kJNs7w9>%QPxt*I&F= z4(piurGRz}y>_qsJ?#S}SsEdXp$L_}v;iRhg0gImcL9K$I&f;%{9A9a18(ns+dbQl z{H<&CKys}=Ag~}l>hJ=6ymxJI@BKUCc6jLqFAa99m^akl`G$k*v7*oMxkvP4PhXh# zD^{G|BW;PUc^uS1Cbe#BJrnw+yY_kz<9(oKzEsbami2RPl#_L0k5t`f`41XbTs!(u zc+(DP=jY`svtHp?4&hPxh=fd|ANiMgNoVpFWc}=hMzX6R`bO{lXz+b(3 z`}l|T#>5_t-$?0}{Cfs2;F@=UWf;z}>720r)qiuFuNXX0jy-zWYtA)i$F02kPP@HE z{^HQ*ZxU|{^>B3eh(8o?)o<^Y`!imqXJ6%KfAXZL)!=1)^)07w&bEP@g*uG&50LpM z%czK9TVlzEIX8y-x88r*vi0gm{_;oMt;cX#6G-jK*7g-oR*6MF$IYX0nFoX8k{SlY zTKOLkO&-A@O3Y0}?uAGU-A;6M49o%iBuqgLZi-mMhOzb3+xbC^a0z66gHKV}#s_Bo z&vDxxXBy1ab}%JN5{AOaHdxV`KVrMLx~#B|e| zgiRIRIFNHIWzFbPtHfH*kLHxh#-aDeDByuut^w_B+3}-^)mRWcgVy+r1!W=RXKGK7uC4SUiTU z#&rhCPUdB4(?9{FE>KL6r>5lsE0QUMvZ~x|mQv=~Yz!!kFMWp%-{?xtf z4O4o$o%9#~tX*4Y=j`&wo3?{6>gRan&h~ZlK1L{QZ;n!RH|vj$)5xAMRs&%j@PUm) zBKzA<`*!8#g3AJf4$^%GUwi8mkNzmK$N{gu^|2L-6m$)9tkFO357@!FgIoh}=%pIsk<@C;i&yDc{ol)vEAiwf*elR-uSPN{)q1M+#S8>%g;78c*I!q zZ_AGDn97;?j;+F?<4EJ0TYUjZ3yyK{m5llQcKqTR%Ub`AW8}3!wweyHOO3Mn70`~w zO=I&{eHxE2fkK0UZbOKDZpaza_xgAK!zLDn3q;k)xZubV3e;{zispk!~`4MfCB3RNsc22*31K%N3cO<= zui~^ns$S4xCV(}rOw4QI_S}G|ork$)mZJcJnWOnjgBMuFtDtaj%ryuorDte|TC^Sh z!=~a0qtdh+7#*|m;F4P7qZ|LrjyU}8$UcovUh%7k_@-t=N}C*{bQ7;0E%`@g0Otro z+w}n1tv4v+%PPk!4K+h6~Yr*2>L^+z^7mfE-n^AG<*V+|x|TT8tzEB{%) zVF!rpcK92m?hw#aQGlcVo*E$QkMB;CuJQ31dxSU$s$tW1aOgOw&%gBK?Umnf!S>pJ zdgb<2-+GBYjkWupMnN32<7aadYsOe`!-%m+owI@S*MA! zK1N>ockFOF8X>$!HD>Hd-@iT24_WQgp7Mgc50aHF4{Gc>XYTqdx1%|&JXmk`LTYDQ zJA$D>)drgJDqnS`ZUvL+k40IHKDaF{y?uz27M?Rw!e^gWv^G^AEO;}*3;)5tE#Z&6 z=kD8=i8Rh};V*U4uKPc%Fk5`9EczaZg$E85&0&o{7s15_O>K*9x*{We+hPwes)Hs+X z=Sm4DfLD2m0E$}toqzEsr^?@Q`KDoHp!5j!@z2ixEL~3Hb6&pRU#P$SbGOtpgJ3pi z@I9sn-^W@P397uaRz$Dd@MTOgZ3q9`Z`|)0h{G+13-sOz&q!YNr!U?<_>Q~u9T460 zeHDIRoLIDc7bEL$i2@VT^<%&dJKY$~KK|Aw0V|!UeCW|-U&(elTIxJCV*kcH=sj+I zxL_y%a|zc542#L_$mRNly%(LeJ>w;3ZCC1ARIJ7Cw($|@WBE5fGtP+J^wW4 z)iv%I?n~&d@+T%OG283tb?r6v%J7kUOlxgDR92lwiDe+onC#{sA0n%nL;SIa3;Ls5 zq@kSkEBY0Nc>GVSj?w)DmBd0B3n0VzkYCnkwXrZX8IJuA!?|lxOZfAXwx|T&i|Ne( zG|4q<*|Z*e+Nv|Oc(l)sElkxq*`7HLpO;rl7yxRbKFQ5KI;?GEr%{j{{;_B-cu+Z# zbp@F>gbKvd;io;nf5oqj)c3ILdrD7p>Q+=F7vVxBU&-cq$HAQ{cgXJJN(~$q zzTj-%zT$tlRF^BcZ+qW=;md^j-h{qTcOm9!}a{-`zrj7`xBmZ{5$b}05O&h9dqaWuYj#H z4`OG-9p&@E-_Tdn_h@HdaME_<%9C_K##~^M1siLr(TFE3mggX+!}fps7%59zbz`6O zxdFKvYyBNl0g%AKC(_`8ZMC7(96};MhtIlx;e|MIOj;HPd?Kq&8>|sCTn85b)Qp#* zkBJ!pOuYXG$K+*g*CCk}g0mJcf03}k6WE1TW|iJfB6xRVwrjXbwhM8v#|)Vhw~u0( zEOEfZz>0yNB9$F#`8zFe)H?pbSe>uqm7T|pFO;z$((|#{zx3VD0P-`Uz2~iWY&Tt} zzfP4+YO9eFuClq4T&zOtNL*2aqFIoU&c^lvB5BKd6^6Ki2tstFAcA%X1g% zjr!xYCqMu6?S>EEnH+0Pl8Xb{z4Px1^t|I^)G|ueW8+BBcIM62FlyZwY0SK~jH@^& z7{}i}`fXtE&RQ4Sfz>wEgH>Yp%_nX~z-72&WSdmK)>1Ohqr)M*6|TsRyvpC8#UZ)Q zbtDF|=BC+*W;WB_&Nr(Tg#az&_*AllyhWTkb?Z8I7Oneb(om<6GmirhIdq2a}sVcE4Y);}Z3E-db1Wtt<1w&3i(Q z*=J$R4)CaQ^ggsqd(8@OKo{UN{ei)ic+b{Lh-Y#YUiD|4ccQKzrHd!l1r*QaeVloA zHO)_2@Ap5*!2GkDy}u>s^TcV0!D0Kq7Tx}AmV@U%FC+0OuYE4F{+us3bGuf*xeiV! zE$8ha=Op}yxALM;Ssw=5_QAK`x7P#6mCreCd-Ln=+T|~RR0PyV&%gI}xFi9~;8?Y7 zn+B~kNfq=&v#qB&$SC(?;G`ZYkkkX9R)`uQcCDhv#Kq(Yfkdg8=+c^-V-;{)TPHKe z$DI8M7i1OFVh)bMAokb>d-BJ9ZDFr#VYRQH&PqB@gix`QW23A961Lqe&3EIerWrx zH{E9HRqjfo6YcyFs|Hg{Whkd*abhlmYOQnqB}T()u`ajiSqRS@K2?pqcm5@(`1aHn z9Qz`C59#H+2laAKU%s77qB9=7F7eKm|wn}{eTM|gg@A^K zoxczBm$(RVgUjGwdx-Wyg>G_^0bTNR|J*{|e)Ui8rg`DBq69Mqld^?v-uV z);@hM+Gvf~rY6$-ds16osts}l|5`6NvU;DS$?6W?7{t`p*>PX+S5QuQPWrRT) zcK)uv#Q=mgY=ev&Yr`xOyK89v(uAV&Tnd6d;GNN`^7`;^-s>wzuRjTm@4?2)&qtnc@^<6ZdP%P^ z9MAhS`7_V`7uXan96JBzvtpZn;n_!ddH>rvmd2G$FN4jO{9_LXEIZd(2jfN!A>7qi z7y5Z_W-U8PdawuoxY;)#ie@tz8DEpW|7{q@NsNBkcfb%`05Iw}Jdk)yln}*=nZ5_& zL7avo0gz+Ut;XS`NdcwUZInKXLL$?N@tBv`G33H@=)rXm)Qr4QVp3;mzt)wDIbO3ZoBDs z_erdv|H1LGR)Cl8+-gDSdjF60ALmY)j>XL0n9COsA9X;^{`ufL@7eo1KF|96GZVjJ z`c6hz<2>Er7d{+YPe1sJ0Pk;I$NMUKw9B8FDb z;?($-lwsT3SNM)8+s=PMw7(AJeh^^hk8jT3iaolH7A{$-8Nt}b&-st0QwDT>qebN-Ds6Ak=;A<&KK1P9V;uxsEaLo*3(& z^B1epj2R66YUfJ5`?Syc9DO9Y{$cMBOZdUF&(i~-eO~AM*PhbZp0|>tvKN|cM z-1}dAx?lAn$6ikUt%EK%T56aC#(k3H&C^CP^sxPgEhkLe3lzc8{#}=G|0NsqmpP>} zOoImji;U3sT3VumVhS5T>c%x?wIc2V%Pp{nK@>@l*u5{;Oa+m#w&%p zjO`=5bs+{$@OO^AIoi+4;?oPAzG7EM+l}zDaB?XMn9<9CA$G2O+qJsEx|eC;xTrDv zv_AgEFWj;{%CJ6^y4Us3sxr)4RNw2=ROY&79g&Z9)Cq8uo^{s0-_!*T_i0>`o-alY9QI3v z@;S=5_F3DkxZ%P%-GBHzvR$p;ZQrL|t{?0&rtuTU;VDA35p&WSK8M|Z#dRbglq4|f z>XS+0>djk|^r_tvUa?|}K3sP1zrw%3>R#TmqA>W|KS?!C?(SrGApfxdn2P}D<{$S= z5Z3xfUv}8G9k`ycPjIVPam}Sn#v&ucQ3qrO#o0m>v^l{d-~m4B5n9$Gc`#iZgP2T4 zH8)9(vCF{<_|BHtIqoRP(k;5gVm#tckTFW}$Mn;B@b+K2m8z-f$e?QdX!+Z&>K3~~ zNN2QB*Z~>*xM9dkSpAvWaydr#{@=KB`>tyBQWH-#Ia2blUK7!&oR}; zCZnDFCG9ZC2V4o=#VtQ5H5Oc&&Z|GwfOqsMQIBDt)O-y(hfzl6^#PX!CL9S6_GE z_NIS(i?0Z9<+IP&uKesXx67V%`gYlqPv0)m7xVb0`loAq_~A$Vr8q9pe(eYE+diZR zj%(g`-}Z?1+`+Zj=T4eB_);$}33mo_*LnZ5F5NWDDzdJ*!&tOqx3)R{UQ;dbu+xA~ zM7{7!kN@DtrCH2DfK6i{u}P{Sb1co*adTYluKUmf$9(|7{*o(C-mZDCUh-04xSRh- zg0!er?hCNDYKN0-oPO$r4XsZXpFa`BxqSiZWRZTcvNAXJoPjRQ;*KP919)oR0&sjwbwDOFyK6izWg<U%TN9;k+4#aj$v@u-5e#R;T10N_fY`3dqUaVIJq!|_spL$ z-xlYAn9IUETI0)i_&Z1A1RO;CKlqNjbtkLeyGE?r$)}yLJ?=_9m^?+dC!MATl{2=- zso(dv;05t1YkZNB2MTW2eJK6)SKY6VRd+VsQ_p4pd%d^4zBorRH|g8>S))pgHSua? zO;TIw6mIO%ikEd~Wp~%fYZ4cOX6`6@0NL-QM!rmWp}viO%e8d{=$$*$;-R@!7)v0$wOZ(Q)(P}*^lD0fe2xqlX41u6-n-1OuMim2(b`D8cYKT;8zoTNg z7;U9qF0+WmLE`pbED4ZeG&$TQYuiNUNTpJ})`1f}qqW5!Z0pOYIpOcPa&h?8wa3_~ zjr@f%<3@L)Ue>+*X=k>_qxSEAdp>yH8JBns`=I`)*GA$cGLJRqG9sUdXD>Xk@!-H^>wUY0@dLW)oA1iw4$Zk<*PUF}e$CvH4$_{e@A55MF}{b4e4Bkw`L zM2@;o8L?ysMUt-q?C=2Rs8;8{x9S+6cv1L#;3h$u+G(ebcW@C{LD}Vir&0=D(r9gfbBC_f!7PejB_Vbr? z3cCx64b47FVajZDArjp7ok|s&zhd~W-g?H3=MZ911xWVswn~oW?!LKTu^TJat9o_ddz=VaSx?TENXKbhL@mu3}>9QTyejx9E zG>3I<9M6ZuVW%6P45az|f zR|A!yE9^Q^sz@!)a- zS0mRS%)HdZx7Hcof8Qh9{d%8*PdhQbPoEsC9S~mT6Z$!(Mz9>wQ<(5~9#}@}R5f$y zG%^$8xaEsr*~={2y8nx+-C92OzsND>eUN<~KzJGJ zZ9jJ>A>8`>CG}CA(qZ5@p}4~--kYC+^!U;Gi}ehoo8gK;&Q0Y@9VoP_PE6ky)M~Ex zU$H4(0M_DkT@qs^kU9l-`x+XfsaK2iW?;-)z<)t4rGti?c=GFc@l2w<2CM4iVPnutN;jT~9z zfp7nI$FdV&7olb7?Jt7Ew|tDj!O*e_9kjTmBTsHl92+J~v58$-Terr=F80c#jM{&I zOYS^J?7(8KW%04|cjSa`I`+lPxE(}Z<;n=9<~DLFh~RnUvyOd<4s5#lyB|oc}!Y8JHz=9jk%1%2|k9fyvd<^>T+m#rg0J;Dj1LuIco=hE72gI@sFQTSJ z#@L3o@eG$T!-w~7Ui4+>#_3Uxk9+c|BaU&vb@&<6?g?jJa5JwCD1+wDMI*WL@F~an zL+Xrk^hd&(Cv5lKoj7AX9o__ly34^firIJW=G7eF2QP%mziVe3AIiYvsw=(~qWK%a zAlWJLvcM`_{P^!p5*r|=zs(ZG#_}zW#?qLbg*(mnGagxM=j6$-$XU-JHiZyvW0!lN zG$D?HAWm!F`W{G&y9dURF@n2{nae+x7+{v(WBlYpe7h5KVA#A4<)aQ;Q}Q_K@UNKVfelcG8}5qZn8gV! z*&uT3;F_lv@L|Jl+Y_I;@5@)b|3PzPfHWq6Zv5|O)=QZy+DoJuSm4HrOJ!#7>RV#;i^ANKbyq z+&Yj12lMxS#b^akG+Y7zhu$X{+Z-$nI^_!=zx&|=Yz}Mv5ncyq043*PIe;1d6{K=T zZ7%4GVDGtuv8*m3wja@tJjLgrg zdl_r}v=*_7KzIwcMZ_c*{v9iS+qR2r%?Tgf(z*v2yzplC#1^47Q999zX?rQ{gAJ`F zz_I`9Uuo#l9KZIzB<3FqZPAAAF^9AFgvU7PW}M!aj&3ndhLdetITe7hq<|ld{qH|} zW?`(25~nhBw-PN?1Filob8>@7lC{j}TnnY)w4 zZ}6|d8ozwNZ_3^XJ(G}f>_8B+^z>^k=INq)WK{EUO@m|K5x4vmp!0v6Uc%a^U89fh z8m6(TCth+1>mB|8%WiOHT*10`!kwRlroQsGVO!^diu>^GU*03)4rM0byRYF6V6SKk zSAWLg(wM0-wy8r=w=)&7eH*LC+w$tOe>=Y+9D4xQcvat6RkKb-o@zl~IoPYgBd8F}nf3mL+t?eHJmr-Ao>$?v)2VZCIi*Y}RocxmwbOX?E#OFX#l z@Da`~f6W!kd$#(vzwL(O$A#8;9k*&?tXhjI|72eaJ=RY#M=5Kxu0J~FmK91$vv|Q< zxXA;DGIW11|J;8C10UJ?U;sOZe__z|0yeJf=bmpK1!HpzBL^Dyc4dHUWDOQR_J~iC zk)WD$Zw`_rHa}@A=e1bF#sLUiutY3EbZv(enNycW# zJT1-|Cw^==8VjLX`|OOJN9>wg_tgTJGJyBK;HuiaXdHHDJGcomy*pCH7X z#hhz#=@a+61mN`_z1QxQaVxPFLRqQaUqlTb&riCyZk3*eDK=$awg~L zjc@SCzhkAwh!wFoHHWqt{Ndt|(x3#(92w)Ty=VjAY9RT}xa>Ms_gHs4Kt>$KV6{v= zA=`H6WH*=cH8;E2e_>OR)TtfO$CdYKe*Z(5<~ayx#u$16IXZt>>U|#gtK}Fw^L8%O ztTi?ZX$^z-h%TMK-*Y^^y~_E7NP)_SVUUdCTbAojCDN$;`j>&;^1q4 zE;HdldYtSS{*B+f8lrPUU#O&f2><9if1=vHVi>nz?-pjsXYT3nW)W2F8H~TOf}-2txp!6arW_F*0}qQ2e-TL&|hdH zmO#KV!3aBsIT1s;sHL|pHIl6I_ZU01VdsfwmiJ-O)*IV<{*Dq4YQ+nW8Gp?`HFJ1_ zXU*-6kjlAZA7{EQzg^0YZtqWE`58#@5U{y5u*rrmBG*=f&S^0<+Fu8*(p@vn9)H3(%WR( zxF+_REIa@8{O>*EwUudXl7MSUTw&*&Dy$0MEC@|J&uwb~V6}ZB3?8C`0MaK&rIDDD zR?Dcl4gXfFAN+%W#Q%s?G}ISj?K89RzQS4`<81>1ASjv|#lQoP>m(82AS2_(?@wIJ zLN92x)4Y4(wVex8qfoMhrXTHMGfAbyu`x6I?y*U+F7K7kJ-ynAWE_Z!GiH}6T2i2++kW@ai9 z_P$Avjbuf^4p@@|&Th;zF&?yS&!Ek$c}x!=u0L)9^QUx_C{qx<=LkY`cpa))hmGy$ zelwnI?1p){w;w&a8?WB;83-Romc8Rt{>Cco!5>`s=%3k`>Ow$6&R@QbzE9(ifq^v~ z4C-0yI{#@Bn|c2g&BA2l(|Or~`gfkAc!f$0*?>#_*^j9UI5yD{Uakb@`j?e4u*B%n zS~vJ7c;d*U_kaA`YxW(-^>=i^cNolwWhyW`&`efJ+lLe9A0xj9;*(LzoRn}XBXBev z@8zo-^zv1jFy!p| zlW)fZnYsQE1#vPh{`f>>xk0dQZj#1F$I8tf#UEE17`N^H41{M^bN%C;BRItC+G^;A zjPT~7-gYxdv;`0&RHiFApKkiI-}S`p)qm--qaHi}=K315j^VgqRZ>cqQ!~W{CBMC&ELWW%3MOoxoHUJeIG#1)UQPD9i~$m@atnv>U)%{aYBb(N z2e64}A)r0mI!kZ}(PGv9B*;J8?*iD`X^aBRM69es7Nz(gtArC*^TKUTOglcdzDd}6 z1VeB40ic-$LvQygP?R|~*5L0TOAK;y;b;Dy7Kqgi$b_7jml0#n)n9PQey@pNe|0~A zq*mmZjnuHs8f>AXcbtlZNOK?HVJ_l-SpL%4HU*GVHgqBQ9^N_=_&X@ayn#FHSIP{!&l_rYD^E$o38Y%N5&Gbe*l^wI_YfncLU@?aTA! z!vb)=!cNT2KYM8;(EN)6aSq%6Wo8<^PE5|xdqT(im@0OB-{)R^jF%Q(>qy)bfSbvh ze`<=A`MbW%ro4f`8~(6o{aa~|{hxvGiL&9}L_@%lfm>_QVI03E&}Rq-zQnmtJ5wL! z?fi*fP{Vc2hcO{&O|kR$*?UO-gEYD&fgZbtfpq?6-{8>c^;v?p+P>&vBiqh@va6UC zcjPhvBq@A+UH{eCV*%gZvY~yGB(Ns7iSlw7m$Vv103`7&jOvHB1qJ$Qpw?$?E zRKVEBmgM+{ZTID#pp}^QE5l*bxDlinaq`Ak>=eKaRdwESSy=o#|AmO_*K1a`BLU6#y(U&(#;TJ795)83IdvVp8C1-{zkHg?;7~P1j@yg> z=mpzTUvT_O!4!e*=`T8ad%+j;eTM=lUR5RUiRO|}(U=Yg-6H9AXJ&1XhMsBiKcc_b z_i4T=?kGx7n-|F@JOL!_u+_I+tSXwhM1!xJ-H{#5pWmSOuU(euu{y2A z|6nE=@5Q+Rt!k`!zfd@pzt5k>qA&mU3xDLgA?)4*iI(-3R?-2?)Xn@Q#? ze&IjWIsco0IR)C79nyOFk7pn=U=}t-h-Z%*8Ywa_2t=DmT-h4Q`WV?qzUx8^ z92@T*Qe2@DtBX=Sv63i##ZH)j)Y$yZmCX%I0~Dr(lF4S}GRFl(jKXo^-jdPx`ve+3YP<28`3xjrPC!2J=a@D^BJm{ocLhq+Dm%`caS1OB8gm%C zT+#3Q0CGO>gMdl0WNZQ&c3=F%UgBZa{3F93h_E6jEK4G+FZdG|HN@!o#`$~Sdgu1O zx7~T@Zb0nzdB=$<0(~dry5m2?#Qw~4PC7JyFpMiw-S>Gt;%E^5 zNHl)%gzbJ^72tS{pSR_G4BwxCQ2869#+`r5od3ebW)!S>R?vZ!GFUqJFYDiS2`6J( z7Qh4K?KgGF*L6_fgonQ^d}v;if6m{zelqp~5~dIwEEvtpkqS35A;`QrbsX}?b~GuB zD$$T)ETJDjY(6x}xgYHSZ=wwdn43|t*P#5Z(*ar33Dhn@Z};BxNupy;+sHfCs&}%B zA;lebL^cup7o!qJwWHYOFBf8&|1Q7^R>en@JCS&J+6S>Z5C_!amO=Dc#3=TC26E@E z>j@p4>?1L$FuU~E*x3}Yr(u`qWfP!mu9zHs#G%rI|Gp0(yzfwK17MsGXb$P&RRLVL zmg3g((4x-P*5OiJ(f5K&cV7y8@PS9RANcDZ-hSloe02Ni@4K!O4nQ6GZTD~f->=-B z_J?@pBCuJQs!zv@y7P}+@l#hLsTZCWE=pPL`w6VP|1ox$c>&kxBaY!YBu?swWpVN_ zcR+(AI1Pl5|6c#v*KyzO-t*<8Q}wOzaycshmNy^smazNCrxpD99?J3B3A!}R8F~=u zX8z#Hf3lI*(1lN@ysdX%-jc8Rk7U|?nC;$wx#rjuJZ8Or5$(#+&NkN1A~7nAW&24J zp^tOHB3#r&7jlSnL`TWyqv&lr z7HAL>vGJ}x#9)vT^_U6`f5t^=m33J74SB^iXC;ka^Itg?Tw*7@IY0$0V`4<%sARQ{ z(>**}+kOXajm0M}jExq2cb1t~5o(T(DIN-6@!9C%#<#WiY4_gEWjbd51Z&c|YYZ8m#VL_E9TmhWKKq!qj2NB#huaaMhscY+@^;sSv8 z>pDT*Qld2nWk2}KO5wwx2fGAV&t5*;aTxw>8-$KYKPCG=1My3Af}FnDIwoME(e(QD z`mgrpun6D|OGMmGK4tzjYFiy1(lZd+5q;V1xnFSnYdc;3)HC$P`W4%^e*crU7kee zdCs9K4^Ra+c$K-uODTGu^MA%2>)*im7p9>+E`jkTDALZ~?!zyGSPtP)`S{#D>il)o zqA+7cpez;0+ZQuI5B@^mZBmw~ey)cZWTqjc0qmmCyLHcGh}a-JdUnGZ|JYbpeskd% z#lzUM7Kw{?jaASqACHj(Q}jpCjb1Uxx3NZFe(suo_()XzHJ3dbv11LKn8ehWGXdun z*Kjjlu+fETacf<<^PPp0Pd)n1&w-Z1{rBpmBCfe*v6EW!5WWTQ$F$;C@0vyAN-YJn zii3Ty4}ha13;&s#c}TkVx6$<;NV0EE6{7Pq$7#{=P@7ZpDQ=)33h{J@JsWz)OU~ZT zI{&2Ym4EWcF`s=9<6z@ut*?3QW!qo-p{H!0_nHf~({*)UVupP?ez&_aY z{@>@l2)s|#GZ4a#0Lw7^Q582Fur(1OoGNF-e)6O3Wk6kBxsjtDQga4Gk0XPIkm)y82!I{l~}&{cI)-;{N2ZR5-9VoRi-(Hzs3vMc5k8oXgLA=^U_E~F;nQKOUiiqX=>UR~Y6CO*mezt%%Ooqv{Z5OC_~x()vL2O#As zlYxzcZQ8T@`CxR|{a3(ZoBcCmTMD(Mey}kA&Pj7*Znc0bLPW_w^ag@lYH!-e)1tG3 z;P~XmKb>7FkAo-XdPGheMbUhW%aGy@cry8*a4jhtV>y z4hY#c{!lNmiPiep0?e&g5NlMuR2Wzv+m%;iB^E$w$XB~SfFph5J60Br=oMzDPg(Bp z8uzgNzIy)yXxBlpyG6!~5iR+*t@#fl4Jv;SCNt!PfGzmcWAHDX(c^d9P3xQbQXHe@ z0ih_aiWBm-h*B`C8r$smlId}!^Zp~;>*A0 zqV2E#@Kd(0`x}qnE_-TSIe;21buzo+XtBV^I{Y8mj_mm(QhciEPkh^DdRdNhW9I0! z08zqqPgYedXY828t7|2phv=-|NwmBSB&~YzAJcmvm3KH!@Q&pO*kmAsR0T`BT19OA zR!hC~I8hECaWTiWb!^8Jf9Bjqttv)H#1jPDxj<-&BlRmC{>X>U8Xd4%;v_YWYn$`% zNj&b1N3-LMO&s)%S5$B;4$=%Eez`@@q;g*JA2zm5Apkv8jy;*eT2co+^NiWCLBA#dO z@f+w^-gj?(!HT>gpgw3!H6NT9NGE;jmN@8LvT{di1VB2Wf^i5R;uJ(260_^Y7YX*U9UO1I;zm7_LyXkooiv%_!MF`@aX$kr!?R8Liin z{nP6wC3R9a`O8QDAKdd9$cbDIOBn4x!WlYp^xYhrjZ)MSohkQ+YF6ps$@GgV_c{LP z@BkH zlbUB_t&Lm@=ThWipkr+Oq|b$vU>2i|3%Q(9h>(HgN_60AFcv~E#&&{?)dZPj7#7jh znQ?_Tj>e$q&}w?O`*pM42vpOKAcAYOSk#;DO0k+jP$Jv}XTs|`){ms-gh zi#Ux1kgfiC69r&kGVod;uLc6|Jsi3G(J#e<-?{o(+FL%Zc#hCqiv?meN3Yy$hg`ls z7cw;E!MjcpIvU?%{^$SORogdx=au?y!hzQ}V)6+)_9wS))wQ+W@-w$=zy2R@%g0ZP z6D9S2hQIz%t0Q|}#=S{i`Rp^cKmXlN*nZ@1U!z}T)XxG7B)PgsGRs=I9+IeFk#s>GNg7+-E=(#9+A&;Ae%Ku6p)V%xqnck`M3q z#CAJL&(N|b9nARPbTb!XDu3*_9oCv3e7sgEN=!+`Yf7!qAJR(bu436oris~JN!D;+ zN$gPc!1iAW2+ag}@n;mVb#oHVcE=VNCNu(+KlaDuUs>5)W82}1w_}DNT(Z)dufqnOnw04WIVI$7A7kKdTn|&$y#& z*abQ|e=oF>?mlZetB{&y?q#gCQ~@nybMy!SID{N+tv};Z9QNk*@L2}=vSfP|f9V^^ zyroV|>>w~19LgU({L2(Ceoy_M{FM)G@7EUxKNXGlN&d(;Ub6koAAQR9n%6#Vd;EBL zixjA88Z^0X)MIUYg6z-!{mcEk@L4OwC$cfaq)-y4uAQN`;MZ3UB~L)VMK$5fAbr~ zi3%m;hu&DFYoFYI1&rg-_g^?vFp%IaT=%HDRPK`#;DCqd?>=ml}Y4WQmbJAg;vL^BEGjvhkj z+s3g;ya1TnTJtqQ^dqJ%5(8PwyD#-;WXJD)k5)BS0bD=wXCc-)WQ@nGD??`O?!8qL zp4ksPyV-L2qsZ3Nref~<0K&J?KLW0sgJo2_#cFU1Pfj8=_({MiLJR-OwK%{#-@`~f zZo2k??FYa8BirMjcKY_Lmz?8^7xKk3E)e*Ex8Lb&WqtlvU9_E{_ZmJ?jgJw(@Qcs$ z?HawI&oh#D{?hGvGhF=G$NT;c$U#ngd;OdK{uSE~ef!nh9r}IvJ}1U{75>ipyHX(K z%<_5WsVe9-mqsOREQ<;UGk#BGeP)5|SZ#blPzH_HwvH<-$1EXhX;v23TC%=ncha7Z zvk&Nfua39&gz-7C^yX3X9ouXxT-$HDz?8i|~ROMM47Tpl*J<~8cyHr3-!BOGnzrkVa#tg_2XSQuL)CFC?t zU&$CX9?h>8WE7$lK5+>Gj)NuHcuP>t{B#(xWbH*jhNlhGRVJMX?jvSnb+(#!(c7;~ zKI1w*Yo8iZt14FIPf^!8P%rA7#z3BSVXaeZcdoq;wlR(uf)!zX)MW10T|1^6k@x!S z`vAf-5Fo0?$c!|W}L2mf}c-upmG^!yh4weRQSunUkBAO5X-wqNzc&|Q%wdXzKA!Pg@ z@?BSKKlnGV-tM}U2a(zau0+C_dBqEl*UV4 zUEw|tdw|zwbJ@xN zAb^bx&M37?oLEL5&W-}|PJ^31iH=w|Ykkn%3?i?%Wl+f`AZP?|kcL25tT|4Qa)$vN11c&ACn?XJxkQ0Ow(z?%#TJ>m-yI#Glt|zNwTOEdCA|hh_ z+*5y4-m&0nW|^fM?*Z)7_#_q(cDF-wXz$%QcD?>sgl0}&RMw+7q=k2UlCO-4e+WFH zk3~Pc{nEd>ar=kg_`dC@zyG?uzteI^xMPa=Nb!rl^!)8F{^KjRZ~eX}ZM+HpdubQx zmkht*@95K6`snw;{IeQ4Iz}1v`jda^A{%u6uR~Mnc@bpQPvt7gd$faZwr8E^9r_>} z0!FM|6?Dlx7YEwD4)-`YMD0Zp6VsNXPgpux8*G(BZV zKE&w0)}reYsKvt|!Ck{J?)Zr__rC}XjtOMzb|1R-Ke+!xDd+fbS_X&Z5C8fip+;sD zTAl1Jk?6|Mi2y+JVrblt;!V=(N{sCa32$2zRm1>*s(%8+CS(DuqEK7=q_^Ez1Qfe1 z@sF18T77!YxHEEws##~+eb=_v$K@;Klyy<51GU#t2g>Tw+Fl3d`bQKzY^!7Mkv8)J z$^Z5}e^~0g<#H{?C=L$cNa5t@3T)op!)5HB_BI21PThi*R6?`)3nxuILj`$2-?w<9 zo{fC>H@$EBH~;u!+YMLu6_3mG6V&Z{5yj($yW$yVY_I;GE*Y6%PyVhKb!vbJ9yR}+ z^VcQOK>)q}>vw7z_OPDsD9t8suUpoPGDB{xQSq|8oXnq$9R3l%IX>}@J$kTy)6Cd* zpQwG7I7dN@7#drLJzU6l>@5uW-}}GrzwB>E9nyp0^9O6{AB}x=aL(>MocTAOGIy8w z5MpWqwVs+5a5&jKBAZ8Pe-CRe4EUMG0MIOIb0c;0C)|>MhVe8PkYw97dInO>mI3ve z0_2eBlW;o)2$1+9?;E7D5Yrp0brzgvACUI@WY9xPjoLl$Lq9;(ln zfnSeKF`N_Gj*Pv4Rj10h>&vv)ZFi67}c>fI^pZ`VYZqI!s7k_lt{{7agc_}2}2-+#3W!5SaEjb|5M;xjV;RyK#! zyg7_qT3y_U!O|0d#3r6w8;6SnNj^VsXCs(mpoxa&V^N%wm<{W&E4NEWh6$~@3~ga? z7iAcr(k*3RfwGC{M?;^G(f4E+WZ;c(K=zq7Wi5Qkh~4tAWV1+&S%<3A!S$QvYmjU- znHE+I40lT_gBj!HWu(hKE)zi`D_o^~K){*x>a^5yjEWLjgF6Oy{hy)&y&-0&4 zweQm&tS5^O(Fqze864dIjoEVF=Nyv%u=5_P8Mv|~%xER!yf9YvtKOSSf}L70uN$`D zMp8JTpS8rGo9$wWDEu{;?xj3D|9Sq-EQk%P_dl3ZUy$t;lR)Df#vQiU5VvcD5 z0Y}lO>0L+-t&w3!On~^HPb5bWjyD;8#!OfTlQ|p7tQX8 z*0BSby~-lAynASb3zqtQk#qNZ0O^a1+P4z)4ozB)GhzhVM`XV9RaD=f5dARzi@bft zzEYv=k_~mJzufshV&Z$){Z~Sd1Z1BCkeinmMdIqocks70{2j)J=7nAI%K4kKw=oOM zCso-M#;nD}ZJhBE7U0dx%$nRnqyxfgf`;fy7*`5I!*2CMHsUZ4zV#KyIvtQjZ3En@ z*}`Ki;7xO!ifO^`gTuKO@p~6De@;f)tPtv1*j^KC4z6G01VWV+2srJIk0k?q-7m4T z{lh10ciepZm$7(b^y~}tX{VwWJ^|ld#xfwf)jH^ZqzE)}m!2Vl^zdqbNmk$?w}VhB zA3K($8&F~)=gOIZfb@GnQg z?$SNLUyO<(b~Th&Mm{b82j@gt^KZY%Ii?ZgWSC$aJ~N^5B}mF6!^Fe3ipl`m5cD{p zV_}Nmsw-p1qUdew1;Bm85%MGegT%5)#t<3)j4Ojx_ARe`6R74l^k0)@bVzrJt`hsv_uN&O z!mb!L1&lnA7OSSRf{@{yDI^FGM^TdsFSz+<=b^t&K; z-10#D18Ha?g>yPMIJ(4)Cw~jV4-<6w^8Xn5XWeBW{n7c$uM+o1m4U4M9Vu=jKEG4` z3qN+l_J*Ihar>NCoV&g3)fa3}eAb!v`mb@|0r}!5p0Zu{q5D%)3Ss_pU9%Ok=ER6>f)Iha|_N5hy?+h`&S z9~-mwCuJD1WNxhD52$m*$gT1REnnoaeSk4Afa`v&{Hsv538e2qJcJsd)D0f`6R?6q zMrTb^iHZG!i2_e7#@V%w0~-&S7|4e~xP&(GLJwm>QxrqU$8mzeaBPHDz@cm!g86PS z_4^Ow8amV11@HpIGexoUl9z^~66H%O44oK*k|Xjgn?u7i3A1rz=Xlvfy_MRGzBy%l z!gga_x_h-bY<$TTrw^8$f7cjJVWy5Gt92Td^rTEq80(6$*h`OVY9vWJc~F=MNEAtVd(J^p6l#Sc`|RryM;U`aYn<4_ z)!rSTb?s=1<`l`py6<|&8*bC>_U-XcIemNSSDdfwW1YA67w;PP6Vab@;VIk4Kiuy_ zCF9PYDyAwK?Zi`(2d~%VJTaB-g3C_b>to0KcDgTq=w%>2>&m+IY9#7G2AsS6-?5YX zzRx~x*B2_ma6}GjBjOSanPZi{$Ia1>?OQkpbp&a@w)edDzun3gJ-Wn=wN*QuWaIQx zr^d`aLwC@)ECBKY1-)*|`J3=L;y%_A@`HWG5)6)v-8#4?Bmck|Apz@6bn_cnxr-xd z7ez<6O2S}*pwAes*;j9C=@Phu)4O%gWQcYPY=!+}?02S7lcDBoXWkhX972Vtbgm=NXI-}{KUHE z6qx1iPV>%x#3&SVVsqeQKh`oEjD7%tU;+Fq7ini5m|#!GfXD!C`+pZim;G>a9Q-S~ z=7{=+yyK4EQ9}SbbHtE_p!AH28Jgf6%paS@s8A~=;C944(A7Ikq&fzGF9`nR_guIA z{cm{R_PT#^-FD6U`T^30pY+2$ckgZZJahZ0Hs~YyU!B4siAHUz1 zD{s1{z6T_?#K1lG7u2YEBub9hQN3>FW$KUUOP%|@Y{ob8p-K#T^RFY)j_X*(r^og> zR{n5n{#jzo(;P3@x!;h$;Z49i;Bwgg7p^O(wE*z4zWNSF-<)mJb;EdYV-W1FfmO1N zLOr=M=f+U~*84A8wqE_nA6zRZlzv+@lYnJ`TePq$BXy3Qz$sdTCD1g4XK$QES~7HC zb>TaS=HUuLM4?j7VDw_(kJ<;4s>Hg$wzh#sI|poN=1`jUJ1SNPlod`XBJ`U*qxNS! z8b*q>gDF|th91Y60J@v5-S4{xJOfD)CMW#8L4@k?gUm8Gn7?WE`c)xq9&TkG$&M}h zY8996c3n_$pSFG%gc>2a#I3tiMc#+Zas0qcW0>nP(Ag*mc9!I`!8W^_t1V z8T=6)$REGark!GC8);adB$4bt^#a39pKaxEAm)zp#gb454I=iwTIU2w=w#R(vG#Pc! zc(l}y>B@Kd9f;My?O*vr4g8|G6W6-Zlh8yArH%zXy$6CNhZPJN#&VX*FKu9rSp*)B z#j=fEb`HHz9k9vlB)-yfHcNr@^vp4MYN+^(8;db5R}4+q#_hQFSr;T`<7h19?HJ^@ zHo^D%Fn(#T^6&TwFHdupIuXD?r9*QeMDt`Ei;P;n_XEfi^Z?Q^0udgpky>-> zmnJOJp{cq~&dUT*yV={K;1DGIrS5tTBV@V>v~h3Mdm#HXz6(MtcL5Ya^EaYbXHoSX zOzi+CxkLj({>JoJbK&+7%Zg$<0~4=o(BsR&`(HzLNbGRQxF}AW0b0&L6t|Cmqsl<|yZR{(=;OrmtCYq?XuY zI}@-Dn@&>J`#yl&=w~27=-1)ty?=-*%$-;r42H#_<1#<*(mfwQcm`6fY4`a{>e&G@ zSmI?ZBkS0WLEk*Q1I4B1!LsmB zIV6rF&it&1N$2nTFIm{L@tA1r@QkbWi@UgmB|SM4FQ$bh<6O#QNe)m~0bbC;aYeP# zDx@(l8wbGukmh6^XDTgQ%tKIQ{ZDqTyNQSc^8cT`_kg?gy2?9W_1>G+Em?B!HZC|| ziU}AS7>3ROA%GJGLMkO;;>5o|>JXR=VaNnXC4?}LfMJXiz{J=z7i>$o$+C)7Y{` z06yTtpW@UljvNJ4oY%Apyko$B*aQPrZ`ffbfH^MciN`3dgpLq?n;An~smVQeuRa%;4`~4jOmNGI1@j+xf?Y`Zc<73%28T zOyZ?xL`qvZG@05LQj-cd_(W>}3rk!e>5vg~ulzeFaIiCQK&@&Xc1N$qUNT-R4BK}2 z^FaEccigw!s}Gs~xZB5Xe0cfIpSj)f9n)N=`|li(veZ23YsSDFovpRlwB4>}4!->+ z{_{|t?t+kYZnOkXt=4$VZ8w>nw&N?1dw2!X{2dn-$v@GiW9cnX_&x9clR3linusA| zXRP!+M@C%NK;3$upA4;)((aSI*1+yT7~C|S+RP)5E5z;kb@*_IEm1NmUv-Xb1(Veu zi?SL$*xt~^!g|T_{kR6foZZJ-l*i)MGA3!s8^*PFv+By@2x-)e~^ zAz@6s$1z0KG52s!c)Q3-w(|x7VI#f;;X)4PH0YZE7HS={{IMCq*Ze6;EHQ|e2aGG1 z@UNHyqpkSxUvEWRyW4x`XY(pVjb#JMa0wkAj#-?jg=`SH`H==exZs^ytu|dN>@&`j zZG0+TzKkShn{ms{Y9N?w|Qo7gCuYYGWi+BPQ_v2^(G zuYERbO_Gc&XZvK_FF%tP08%nhp9-#hbT~J8N8Kl;ylrvghwfi4xcmeh4%jY!>Iuu$ z@4hGe<=OdD*HJ%7&7aU}VIUd7+~noJ@;CoxFWd5IugOFBu}jLy8=#$^K>97LDQ4$C z^DnitAint%ZX{6rD}OA^yC}Ihc#T|;NVyMx8EkRse2t0EXv7R)@1-tzYSzJ66SxMN zvj?24Z%Q4kJ^IKem*4oex9MxS$1G=Gbo6rCIY;@I=6wQ2zCwKShaXzr`<8o_cfIK@ zzjc)a9c{!*oR;m6KUTR9&G)6|c+#0i`f7+h-(UZzP9<{Zt+~+ER$s`~A5o@hC;+1TK zXKR4r{kwUeGY`Cr3o-1TO~uqL+Y}9ry|{AR$lu$r>O2u3&LQpp-oVA3fJFBbNRuTW z7XoEDSO-NjbZwyP0zsYD*p_S3i_kqbDR>pjKF(~|YG5E^*@3c5d{86j7*p%pra6T_ zCTQZH*q)Asv0X$%)_k{n%#4xH%&JbN;`}Szz%otc)Nza%p`&idr5u3K_qPHRrPo~lU9 zu4MgSWeM?Y&ur#KZvJO^k2D#a*Ie2!{Uy&j4zq){+i%WqdU~&E%*JZ6) zY;B<&xJ2#g=N|mY7JI*K>gB8YldLe5T)`!OCfN&9EQV5BKGv_-aq`CuOtt;;pI~j* zJI3Kl350nOv}z3juOvH6ZHhxo%j~wv3)8M2ER=n&QuyLuI(HSn7aZ965I~r!XNyZRj9M=q@~H^N1H|G52K=&zPYUgWRcb?Cu{3ofB1*9Z3MXOP8{-gEE$D1`c@6+MNvEq40aXs_ zTW?gWA}KSO#~vRObrk?Z(mMI{d;mt*{#}q>2<+P%B-%VDS<9yQkoDBQKA4jYXK&j{ zUww1f2QQB6(IG1VkBOH-Nj*0=iJPMVw&tyCe-?+<`M2Ie9d5k({+&NazesOmHHTio zo;ObyN)yMgT(EWg3Od9+p9Ed@=HIySS2Ww|P>Q>DzAlLVn<;1W?c~)@p*1sc_lX}Z zB_wWU&7nay=jLbo^0is+yhy^oeK&hjLZ7~v_8r;;I#wACyu=Dx#=E`(x&7vc*BU5R z^6s7o2{q@F{LN|(hj-2QF`Cs(?YC3!%XwB zrmML*o&Ch!o@{TqE}y&DiG#rRsYhWaRxH=YFU_r=2xw@Gn2z!Pe?5_^QSEB^Ga-t3$K%T% zceMI*9LdK_bBH4Zr-L5&?F%9=@m%vEK-9wF~B9`D$H6RTVYW;*+9hh z;1QCR4Z9XL^2R~lxA+SHS)6^GXQM!+p=*B@l|S2W0@ymKNWnlYSQ+bhgoEo~e6g?mJ2t)R!+-*nU82`fGK+0sqnfWSu?&{Y zz>AY8FKZ67~`x3KhfP{l~hiZyBz27`Z5z@^u} z0D>}6>2*R;>B|&@7^VJEUh?eS&h5o3CVvhAEf)Z^!ZUKuR(Yx2d3{Z$)?d=IcYOlU zxxXAqhlJm{|GR)9Q2{$=BqdTE;Me&_-gBFw-*8;>uxINP%a0#{!qsg9OL3q?YqQRu zwafl%P3*=(jrEVbIbq!%j<4CEt6+_9uQF_V`@$at^KY=W$m3B1j$EYP=QNiM)|-%i zViwjqQh-5lmI}af8&FK#LnMg}V-P%Pc`zfhBU*&j%%#bII|9i7J7o2@!G8r9oUPwC z0bBN>F!5rRUGr?XiHb$R)Q}Y5(>(*txBa^=bFN-!+*P=%c65+q(K1WT+Ymza9@+cBxFQBF7gs9tEz6R+h%0oLJ*5F_Q#b0rW+Q`Io{W@sb4hzkb zA71U5o+E8Lf3Nchz#uV^4_gKmllaCepQbW6z*a)!?KW_Xe%Q`G$m0X)w2Fy;gQ9S# zWM(=XkfGDD*WW_R@NYc=;fm#lqtF-aar;Hsyi&3h3&D;>Ez z`*=+x$M)RIc7FxZKp>jG{Kx%Z5P(z~_>NT~=KRq}!NckY&$U>#>x0+DE`_<}KkN$M zbMmLot)wqm^X`6_U~F+83$29b^= zi!K_hGWH;(Gc@DSOvK?M5B`m&G2GFw$)Uwta>b1vyA)<{D!uKDz^Hui2e)~5Oo#8u zwmx$E;9l_>)8Km?vJk~Tw41(JS+5Z)*I^Bhz-`VSQEyl!5G)IwkaN1`z4z{P&55J+ z&74b~apFcD%pqZmpC-azYgIAD0-1StUw)h2x-P60t@|XH%sY`?KLg<{EH7WuZuga^ z;xs=DDUn+{*>B~K&Z^ZC$#9vm!oLF#N81O(Zt<=-1#TJs`lm7Ys!#QnOaMJR_JqTh z6Hf6L9|OFf+i>t%Cl|S2i!64{3kBt?c%`SlNH}1@^XBLD*Uyguu9HsxKVt zSHqdec)-MOR(Nj5_I6>IU^n|A8yt$jSmf3@e7xYbI&63w*LsN^C-*3fMjJfAob+4t zoxhzbcE{YbgK*R7%2_lD8H>bvOy9V<=1Tp>V~6(q&pX}u*wi3F>ikv+$vcP#;o&?h zzoEvd6TodF|1JL1SuG~|G2!j~KWK~hdl;uPk$d72XaMcAPd0$CD;im+LP%GpE6?Tz zez6UJaYCRFiVNwtzcf?+qb?&#?SBK5t-95pALH%zKk7SlCW6E=Ko@3kBBWUntLeeB z`4N8NRl^caN?YLvjQF=xd0+gDW0#Y4rh{GDwO7`+1`W@_gUUa^KGtFIZ=Rh8vGwo5 zr|tH^eV=)lI*wP%p^qrk4tI}r zIVfYIl8=+3b27+coL=^x>d>x$;AwB{^%wuxx|sIs#g>IZY~}-VyxfDm%?+JyEJ|z> z!_I>fHeg$7+mb{8Lhp4l#&!Ljn2?1(RXQlg%|(C!^7k<*Nohdj9BoSZJIx&vBuWJ zUjr-bKWtD#WUH5_1$rGCV~nG_Tf zUdCY{S$LreA0N#hL;P$McQEk-D%}Rxj_WOhdmGuK_CJx(=?_W)F2IW0`H#;)jKjfX z>=sA+6_WxhZRu+^1&jo$85w(a+H*X`fU|5MNgGMl+%i|rYV&Y$&A(*o+T6P060LAI z!+A~|Vs_j{8u1Yf?maF+<=;NZe$}mhlIdSKHO4ew0nFoyx8B*31NO(Cbma2%7oXys zyY3zHK=~KLk$(V!FmSwp3E%cEUr(fp$y(I=@BZXYZ@+O7q{DT_ZaDVx8J5EV*1wCf zQovvF%Wrar9bk3|q{e01IOgwzbQQPr@AVHtdkjbmaP24+Y2X-ER=b~&kXInMSHuR; zeC9BZy~fz#)BM}Ln0eX<$WAT%lL7Ah{^IE`J8`nu|L~gk=+%b+I{#RTYw@VfVR472 zS{@f{_^&Z8d|FP?vr6_J-E>v{qk@0de~McH(2ZCcY*uj)03mi|JnW`g4Vm2Laqp34 ziLUeC))9nQb7MZ183i1Q$KX{C7EkUyCk`PEOwTFMLzX*Cfi&6+sIi<R#TR9U`Y>3mb75799_@{kQX% z9FZK{DpJJ)939`l+UUUQPxx4eKX%Mpw#8rKr@Z+1 z<+u}fyB5clZ@#wxD!!57o*HiO7j~5+xs~nM|56@%!r?l7%WhvCzV?a-90$y9&e7>5 zKjf~X`w$r1tkl7zm}nrAUVVDn=moTK|M$X^{>-5K$$zq6V;ItL8at00#VrJ)w`gVwAF6?LULr4nc(O z3E83)#cu0CN-wbb#EG?Ka}|JXa;rDO_#*?uf5LQJgWJX~K&zqa=8a0S?O8T$(hF|f zz2bvsf5MmmP$Q@9SdP1fFI$HPpjad)W*e>IU-8yEHpKgzJpCo7`p(z9?7jsFfv&aV zp*R2TMYbIGtUtl0{OxXk_KlSs+;({a!SIwb`6_SLgw8OMSNa;Q1%%V6KhD*-ZDFaFR=q6Kt$!0qpt;gs;TX_R1dy9lsfxwj$FH z{+jeY*yP{-Hb_Dmrf%@h{r@?iyW87WcYN%T<%SRF19Y$7R{lyLoFJygWQvt#@+u^* zmHOt_^N!Qm73^uZ{T=x5+!vCPLFWjM&U-3gVmo~DHDZ9xz6eDeUW3JaDkREKzOf=(c)>S0!Mr$6;0bfTd zo*2)?4Y8>}KIMZrs&_6}!r@9}^CW zwE}}%7Tp`g%xe9cclX|7d_uvgrh>Ta`MZ69o>w1<$ zB%fO0^@NZB*k+CEdD%C963Q%mlEBChRpr?E)1$Yz@tA1Lc_cIfgn2xRt7T4Ba-Uqz z5)OeghWv+3+(#m|6SNE1x{BpaGD70-f{gg#Gs0=mg|kB`ECZ;_2mc1QN(bWT53_!} z=nQ!O{rpRJyR*!E{(Bw2@C$eX`8b)nF3vN}x z0t(P87&W%~xATt)`>ech*1x_ZRQT+XDb5IAG5Zd=z5b!xcU-(yXcKg>_0BuX>wk!hf(b;xi`o^<`4&g5xkAsJdKVt6Tz8UmS*z8HqK54n&vicuf z<-fIjRlnrl>0v`HTn`tIlIf3HOW9-eRsO6s05*v?Yl5EJA-w|Wm4FC<+>8Uyl9-Fe zMIh^sHZ2SojXJvg2J7?&O`K8Y@b0g(KQf;((>ef4ph-@bwo zv5UWF856McPfXUPd8GGRN#^F7;k5dpM=T*s+XY6WI{hbh5WzYwrA^_lknAfSrC$+mR* z6Bu=Bp8_cU34$(rkuq%A77x0CRRr5-GVu^R2T&LOnOf}fg^m69$lrGD0)IHEH&#V( zSH93`tk{+_K<1hibe^b?*~d>HGcd5ys0|>`EI`;Zn*m63Zx$L?KTN$IS%lTQiX8V~ z$a{nAK}w6;it=CUzQ?KM{ynRMU45_c3jB?9VuF_Ycsq56kq9J|)l4d?y0M(CpEB z|F=FcCe<*gx@zvr_GvFYK_6i6#`QgK%BgmYW&b=-t=tJr93&mtqQpi)lW+6C>;=2M z`uO-q9$D_YD_3^xwF_8|dHm`ClBFoCiBxm`#*odxn&@J8VE*yz$1nPA`#c^M4|T-p zSlydkQ8t#Vg7NBS8@SE0=kIu&N_Te*9OWArF=W8#hEapXm(P8C@8h`tg8j1N z`MR#_JkQ4|ohXK`-4{?6?fNg_v-7$@J4vJby|ZJ(3MSt-yxN!f*T;9?(DwuPl)T2t z#UbCb?dAjH7V`*%H1`QyvIB=)-oK#pL?6^nilbj2gmyyMgM!=7TF(!8nrAhMkVo4% zY0gN4YY4Y!T4$*aXwlV;8+Dl;;5Ph3IcGzpHgRM{{&_@MT5^epjB?e$r_DSaxJy+O zZ^I4I&*h5!(6icw#0XQkuNxJ;jiXWS3BXdMF6MUv?^SGC^*@v+mQsZY)U&Zm!?!2~4+<=To`a|!N{QWD_Ju^)PYh!SbJ^l)*G<81gV|ETUcZP~mv6YIm1rVOk7{t) zPo);qXPLFyE=&E#HSjFXM<`x5NCv)iVol?g5ju}9SSgybADS|It+FZb&~@!W?K*LW zca6iM`UcGAt)@u#y?!#&s}{J-C{&^2c+~ejai2XKL01f?LuN{Ig>%}uBMbR2|MEeC zgc@uo$C`jC?tWX+>1Z*!xwmA}Zg6wPcEC+hdu}-AzXhsKT-b8xkKTPNm1+YzZ@%6i zEe7(0Z8$oc7^g{NLojBerRj&>wgHFOOd-*wiEqs6wpoCo=n-*_k?ro=e|gz$sk+hj zA>iSEW&{7+m+}nIzmv3tZy8#(5DJ4t{>OpLX~87_`3o0XE`}2?U8tX=3wSP_Y$=q| z>|k7e5m_FT??L^7F9F`V-5qC};)8$jpk+{ZLP8jprnvdEzs{H$xO9DCPs8dUUn zvO=$KKMxi;UzgA@8DQC1?G&!q%AAI`pN&F4+dEx#yQ=ENI!j9~gyf6k%HS8unD^cf zHAPWBmmjS+?DTapJPoNP{2qh z)fIlCy?br*%E0m>ExncRL$inG`pk*@P-?UQ7~iZ@E|La{8xThu z|JYWTx^BGTyx`7tJvkUQ?7a?DWsuuk$e-#8LXrYZF#9N~vwABb4=nBIxYY;xJ;o@j zv6LNzFmBehHyFQqjA)e^ar*@AcAAs)#U=5O81+ubN;|JFfAAH;uQ$!(xT`kFQhv2Z zrqqj?95d}puvV(l&G{}2s=SwN3m-QDEqJ?~!ZO|_%Zsf6zKo+G1%AhC9k>Z@*yc>` z`@34=BiI*M9_DGFI~nu((>F!;XW~;V?hE|X@2mIPQ_iW|Cw@iCk}rGW(WouF7N{iz zCeRNxgjBAM2kjizM*JXxv+q}-yq6zWs>_)s353iUL1TR`=W9ryV%rGM+QdhD)#bl; z-1v-UO&Op3$O>Q*V&5|B8(PHX8mc{f=l7%*I7s2UrNoOmR)5GyF+6Cpm-MOYr#cE} zo^sEhs>kO?h>2kN%a4yUbC{uuC$E$CpNn1U)3osY(cs27j*QiveS~9k2(Z@ zCE^v#T4q`OAG>T%Uf?CQbZ^K-wA+N803CnoSnU~evOGnek~gyfu8Ow$&h|gp5t<_+ z8|h*Wkf}_6EMe;LCpl!QzNzO-V5+dy4wYF8n3eWrax))GqiPa!3PuEtdDvTS-w3nr z1JHROD@nNrN_E4%8{Pr;X?We@2#WT8i++ZXR8*V($Vs5!V|=DabI!$JAXcjCR=&n& z99^ei=_~YoLGZ(lZiQiu zkLC6Wz9D~kKWXunD0sxCkiR~qSw>cEY5czYyc@AORp<)-dGARDL;!K#D$s=QrvPk| zM&2D6DK@jPxnw7BbS~J72C3lto9WH|=7GhVPUU$S&|bXXEdtLsFW(frdL_}UeFm@~ zI8UTnl3(RojEU7^{#C;>G8Hs@_csT<)u{u78t~`C(UhMo^>O+M_hoTMLi9pNrn0A-LS|U$O0#jIM-goa`&B5jwca0?;$RLWEfEN9bu^hjv z!GSz|{$KPSqBhQ4t@YNkuO3VY3$yG+M`{h-rz4#zIlucI$D$Iq?a{0Y)z^^Mo&raG zS7A8~?S`zKoQdSD-p`qOuLre9wubwxy6ksz{pF|74y!%}AGBbI!Fw>KN3GW`E++}`%Kr89Y!R1Kh zfmF4Mq1y(ormoi<$4q{h*`b3d1llKg5%XxVo`3XWThz1uX;Tg8mbL_aXRFFPpnwuJ zqYAt+XPKabR~-+LvQT5tDQq*O7nHnG%0Z8bB-No!3lkMHJj{R@02&Kcf%`@5@?Jzg zsM|__L5e?*>5V%d6r!edSqP#P&ei##GiJeOXWtFLHGtv6%RbF#;Hde7yXvl}iV=x= z&M7Q+UY$KwRj1S!7?28>uuL3_0Q?s^=9g!M=8ta0041xn9qF2yT|e6m9|8YbS(K`X zAkBK$IpepFu8GgW5#4S9%ZQa~9|G{Fc@bG|N5^k+T~c@GHyrbrJ?s|8(?xWIKPya` zb%%pZw*U|B0Rw$nxGpcPBC++t+p7-tzyne_MXVF=@!-HkL7y5bi;3WBO};%Eio0fs ze(N}5MJ_;G@tn&$5W(^5C11|_u9R-`~TE_UB!*noIUu6p_-->AP@wWanjRP zSqf~9L*qZPo6h0lT25kDHWY3deg zamf~UyVl-hE*&ALRlgJ*PW_OyfLWSa?g~cxnaE%+#aCI3fY?lM8|K8X%k5Xjg0#WC z2f@FzhxqR~-B0;j%FzdA5Oh3vP(&Y{|Hv>yOG9Z*{n z+*qSS)I>D@)|4HSLx0xPjQf6C8~b|-8?BegT8k=V!$2Z9dahanZhr%uMXTZ0yh0fy3P8>6n2%6!2w5rSjtIGhtYfGg`)#DUd zUVYr;TnHLU!c^kXm7Zk!=eM(N-@>r_Yv?N6qI2?mfo(U+&5-Py!t*C}1Zgj3gPUBw zZifpaNlq$sERPbu%FI0Zn+xIJSAFQ~-bUFZ%R0c5Yr)qeX!GOxRs5s=I+|=B8yRM?|Sl!S((b$qG|#+9OhX=r5NA&!aZEZ zU6s~Z0MP5m^A8hNt8(Gkr|a5*n_plpJ!Ud>uD&j}{RWi4g`Zs%dJeT+jt9*RjG%I%+eOJ zO_CB3r1}VrM2wu7>KJp`Rj{%o7OnyJW|MpUHSs zx>yD;J6klyox()U_7g#PSE98rIwaJZs%5Cu;Bm`5UdPdGbfzD`pW*NsaBv@F-$-52-8 zj}6;a=Kv+LYq5bTuxb{0>(0ZG=*Ze!ZM*^q0%12hYDa#=DjJDnRo4nh6~8x&*2>;l zw+&nu?V{0hgR$Iz3IjSdUhe$R9|FWa9sDH2hY8#}r85lxkD0D30T4*OZHjkD-iDzD!o?HZrk9U9|E^?b_AT~@gdro=Ae^fMM>26 zV55cByn2)30bl>TuU*!bRV+(F9PW1>-yR0u{;3mw;VpvD!G+){b0b%Z+(4d|c*;&@ z##|ZqbZ0K@)v5zXZK+U7sri`P*MMbKmdPGA@XbV!fv3d8gxCF%Vb{%gt!q?dhV5sa zXO@#Ui>`4qN(j&&@e$EfEA8xzIZ{>o?~ZbAs-xTrtg+4yt(K+C6-#MaI5gH&SfV(; zZSh+wbxaBSt$P={Tf%VXP0!j1%>!m#8gIyc=BvIQAvOk$X^MSG@V87x>(Aj+qt>=+^`OtsgKq@I!FL+1F#qn`EsNn>r$K);Tx$G|H!KHyExqf9Us zGA|{k!ui#q!_ev8Wc``i7u6$a&`r7ZT8<-ZR^%#wl#s$W6b(i+$smPwOxt{K0R~7y zOkRBQg$pPvc~9fArxSpRs%D?te${2EArQLmyYVraPmO(oCvH$y?q*>yWa7+J=UH2C zTl5_v;x8m~9uEMp-0dCpq`wPRHZ+^4GCq&RHg@A1TN|Dc4!yW-y`sO}S+`?vmD%?4 zy#>pAtX%80cKF&Vd_W0BXlZw|jofvemHNUjaoc=Nru*0=V7HU7{?CdZZ`N@+w;57p zlglrHHr!O$Jn_elsb5ZbHNw+syxx;LCseml{NfLJSce7o_fsFNTj2-P#KgUHVl3QP ztX;k!!ykta6o5BWWy*I2nmNBDj@{Qh*^z-r__Njc5b50Oz87mp#1d(}o;x7==0SsB zcz#{^r@XDO!I$4$NNUe06pARMMO4IOoa64t6%_l)jGj`4Ed8l?-hUj$uIs_vLKIQ{ z$`<8rxJviXRGgvL)BC)^;eWiJ$4g#^xifj7+e2C&VtqRc{fZ38x#fB$X#LyfRM%z0 zXHeMR_rMU?SqnZG?l}S2BL7|3n*A0AA=H?wg6j z3XPq!wG<0c^vjvpDDI|3g`gV0&|#3xRA2Rxe4TTVa{1%cTk3Bt*;P+TK(R@G;XlBq zzxDJt2~|zJHmv5igo~2WPb6!u5OmfE%slCM&vCPsLHR!8k5~Q7HE8HSfPPmZM*4QF z*FnP?=L@{Z^{^sWgNsQHZMxx@bCZYiLH6cPjBi5N^{jtj9#g^N>iexGlNVfvHLL_F zNV}}kqX<&fU8m2rNp>H|!GCZQ@;GptUD}s;>>FObcuUg4C>E2d!#ZMF*f%Z4*q{8> zym!%-fBS;xb9s4?U}=N59AhSz28Y zX8r|qX}OX&J4cg!ZBcX;hl^AS{~PmzKa1bKyfLEZhF**8fb zaBd7*4>`Uh#$SrtCr1n?ewj^qcuKmpRIL2Yp!+y;{Mo_n_vRK0;)DLk|B(|j*!SCdUBi_Tu^E5~5gj#y%O}NnH@HmBHS7is2q3bVN6YrvXkuCqj9c8bF zJ%rgBZ|_i$4k`%M^r{o2)e*gic)KhPSuj+@O82*M`ZRp6w%}tkgh_l;P1)RPvr$td zCw*IAKDlKxb1H;z9jP(-lIbtY<^8(PwEslv+_KPtiQVSuE(bk+3s`5yYpv&`sXP)T zU%ziVL!qB%@PJ467XdGRO|8b5J;=Ki@$JNSejbE)PLb-*uI<%UBQy>Yt$)45+R5+V z|5qkD=qQDn|Hkpi6pFvex!R|jF1zf%(M0OpT#I41kP!SIQ79ab;{K#ZR|P8&^+2mW zUgY|`tN}C54$NE#BEzefhx|0BYTVs<`UX4C{-yE4PaZt90v6n;c14J`|F z*TRy{B++G>I9|AUO#F2HNmD#17dC2Y)u@mETNF7S{*9sZ){)*+CM?Y;~BrnFu0zG}rh_z2~3P1R-!2Q$%D(knWTYTdZ z18oMBQNLFW0W@HL$k(i~NwK7tV`!DNZy=QYE0-hi%pjk*w7ZoluO`&*1ZaO8y#iJj z0_xx{z&$?bc0k7%!?I_$#JS?M9)jj`h3u676Q9B7m30FP{VdNYUtSmZPJV=f(b5b@ z7%@(u`t=p)>P7EoW^Kh+1*He++}~{sp~PWbWnQX^m`OzE`Lq=l_`ZJ zbne(e+GGzWaC`Y`PL8el%xaiDPfEWY0hzydyLswX3lRuLnSFb`l*xCvp7bu_laJDN zsj@7L)P~vBnVjYQUeRMxCIws9fj!Ecyk`pyyLMlYmO8xHj|t{#F}&beRp;C>KxO-9 zzGn}UkmhxVR_rlkcarSSE5)0Yz+wx35tK>?a2 zY`7Cq(3X{Z4 zU}ej0p3(AA5{?8F#GfkC#>Qsz^w zS^S`tu|O*P^_fNp@89~@*fpmf{!rz$-t^u#0*7B7#ST=HxCqT*JYyG@}VrD>sC+!?DYiH=SJ}6qxQXppy6LD@_Qcx zKoy>IWjGhc+>Zv*RY8QAJ|UjHR@%2k@ZqeI^6;wEMYTD2(~gpd!a{GNG? ze4{-9lNu2H%$2yx@ykb6B!X(bC0=`=^=U-SBr zJH2x(h+C-O3&g!B(V_f{%pybg*dz8>rv*-1ro!nlhbJxmOlk|G4r|BZ=!<4V1Y745 z*cQ^{>aN@`x5M zekoUU$8o#$T?xGZgnqUD{1&WPSy;bzJ;{fP+TRCv=f{FpI<62LM$f3pxr5z$Pjp-j zC;uMH@&{eU;#6+Z3cS{}%bbmv@U1MTy_>nUq2}mmz;dTa3H;)ChIV8yAOAw&9mH0n z+7h{h?jtEp2|5JH6f5ZrzA%lIVwa5qnQXU9Y2c9vENMjE>uq^#T5@1^7iA(aPsBEg z%{02ob$^Qf2X#uO~JqOku#{g3?9tjcCXU5V^?CtBAUFW!)* zRx|{0%K{{dlKU2Ph=14NS!H;}l4br0w|oDGVA4NQFV+@fo~kXaS{aDd4+|Q*t@{No ztZ7E>52P%WW!TurAr@A+v+RmVkHBSg*IW!X&n{S{W?C^;0;Z7g!X-*;Cq;nt4p z-=xQ$VOoKp+>&iG&*ih??VT^jH*;4;4D6kG)y7~OLlV3ev(M`CXYr7o@J&_h8{7+7 zk7fTkBB;_8>T)(uYey3M2OK!e>69^WApaPy+j9nyq-3Olky!h6d}S1Bz#@Cz&d%d5 zP-P(w7lf2|AdBR==1Hh#)=F7iD*MRYcuaKVq z)&hq-^SD|85xg65_?ScaY#{=P+diRs*GR#F?)#Tl{H7o<(s;Z%th&kEXI1j=)C-`X_#hzGFpsUb@iaUYt5z z@a9lyD%gjP?YGaw^qugJp)ZjV)5Lf+!g_kc*JI&eNFv7 zQ=#Izj{B%NSYfwtKd~(;yE?wOJwl3Xw4z#!r))IvX(Hcq-Q`#OjY*PEhV$F2=d99R z(s!Ih^nRMcjL&2U+FeCLL}Rl>4A${2pnI8E!9G z)@mDIAF?K1aJ-+{dy(n-IA{;p-X5%noBSaIy({e=ZhbAd7tZGQj2!Z2md!}$rrYT) z3B5x0`@WN3^Sj~%9TSJwX-U(xV;&uBzqr$_f=_w*HGR&Q)X+2+y#-Ai5%QY-t3SBVE{{}zr6DZjuj!jV>1o9MPT!a^Glh@O#yb`#7 z$^z_Z!s71b?}Puow&v$jn?FfFCBdS%82B)6J?Aec@t$QXzB2~4DhnP``rqrx|E(sc z=5#Vo1${S>0&&P)QEBI&m4f`#K7S{0;!q9RbQ_rjk}jg6O%b39&4z(qb;xX5WSGDa zosQfZFi1Zmzt`o@WZrbeV^^BMu4cSdr)Pay6zjT@!UTeDl+hHm$7q?E_lMmJnj??e zy{o@)qurIJm+ir@c!-Q)8J>^)mFyw#=I6z{`tIYaxkkpyq()TPTK*fzKLjVByyEy( zB--=K;Xkxdp@tO`SW--jz=FuIEFfmVZ8K8l`M~njZyF!X-dHXJYNE-m}qb}E8h1m;B)n2$%_Jg)m9rgCciG7 z7lN?W)F$nf0tW##Q=kXX#ZbWC+8MwMTr>FFki!{ZdhY0Go)$7lxdjSz*OZI349-g} za;G;Eu;r3VufQB0TR(b_d#%T8I4eCz^{lO*g`54yrZ4U^nPFwq75G!~F%*pLWI$(L z(Cz;GS1m5Hfe6mZ^S8mrSK`EwYPNk_2($tq2%C6YOD+^NmYMgvmD}(3JH!W6ZU8XQ z!=V)i&f!{cSPy6bqs`{XY$qVH&?S^3ZmOk+qrlDUf_F}(1UEUZ?-ztTPlf6kEK zB7-U^<2S%1g_73I1qx9kXHbg>{zAaY!d~(OS-LpxTnnlvk9-o+?AJQ;`C9D`LD{eg z>A59>M%Q+Ud$1u=_fkYH6axmFghBL`zxO^m(qHjAOb|R+d;pV4@q5oP)q-m69Q0D3 z-HMD9zFsNAYx+TR1dXpUD_c6m$8$i^m428lTkud*>{iUZx4mC|#o*{cpLDGiKJl&% zx|yKD!dQ`k+IlMXl6V8`BA`pXtZnhInv)~o471n^nmLOpkkAUDvYNOX;>1A}tg`1p zbkA}n#{)AvH1FZHhb5oTji7T2|!(V88f zAAZQ$2RadEPWE10+Htbno#jtB1T6|lE0tpzinR;l99TtaK$>Dn6HpE*H9NAcutNoS@1n)WQ3`Th6$MQKCNngL(r zJoa7ZkpxNje0+QF)y5{76l8YXhRyG5eq)g~no>HuI{nYWr_|&x4we>s6IA&@qX#oE zpVOYyo*+(Q=R>l=g#YzvM9OOaa#Xr2)SxJUMKh2jN+#SF^f&nFJxD|RkEYD!loCW_ z2+%aHQNmDt&eP`@1mcUY#${`3de)j5c(->o`=kF`Ihk_%TcOxwkdu`5;FwykL7CJ3 zlJ%}kgSS$~g^9#Jq_F3{??F;o+ct~&{Mn+KXrp_P;+xvvvFFsaXf7dWgOdF9uUAQ< z4L2Wy_=uH~oglHDOb1-Tmp||){Xo#akv0!DCEAM~1pWLaCEh=m>%-=ZSfW<)xJ2A8 z_x~G3DJ_(oh1Z-WMs>Zd&aBq9`7S0M_P0~`?1$7@Lu(=hi8-yx(Jyj15lCRNw8NI; zyG%a83neHozd3P7b6#75H19A|$C&#Townz||B@*6l&j~B-DsUr#smqKzBby4Owe#U zloGf6DAOQ#0xhM1z#%|evY09CpS-k}KFp(A{T3%beoLyzY08|GT1A>tbg-kjpm;Ra zfUgVO>x2~HHmTY(38DS7>oVssvMbF(ExxxQ5Ov)Rd_PEiR7X$1?C{%7d=r4a0pljgMf*dgrhbW5-OxxORHmMze(+zb;FL z%TXt5GX{CAM2L%DtHHoeY^mJsPD?%rNt5hh;CCPl*;)<1vT5v>8}k3YD-=r%(8sBtfNPgnl^jXq-b}EHC71fOHdjEK&^2iwX zPyWU|_h1$r2>S~K-XOp&Cg(qp!|@lC?UM9wv&%}gf1bIuKYA{WdXw8RGq{MUkSEF6 zK#bA~Vg600C1Y>2wGgb9BOhFdCjfD$oY_j9d+ zjFokYnId~;e=DKRLLW`Z0)1T1T7#Eo`$I=rJGaf=bx(tf=EP`zYsROJK50%&qQ75C z!xVnf|D^runb@T5{kmk1T^Bn9!FY?hKFc(WBSZRwJ6pW^Fr<52)^s;|rY^1L&h5Vk z!5Lk-)%#^K<}T-a8Km~V6Qqa)kb)l$PJrmF21yToUeUxk#Y~SaPt?duLHKMx$+y^_ z+l}fCo&p<81mm(>FJvT{{dnZ|d@p_`ekW{HSC{63ycTx4Vs)eWdMk(aQ;ZX$XW(zR z=z54_!LMy?BE_o(`h^yO(nmvFnfNuh07+lG>Ua`duKSe?U|AH^Mtn`A%eSa@l?3bz zL0^Etq_HqS)%N)@;(|-ZxZvz-T4T0*))`_R_&2?GU*G4LzpEvNVeV-)hA1U{{~}lX zJsjT6&50B(I4hGdg_z;9_ILVU5K6$#R1&o|rT2Q(aQ(&1V~ELy+~w-ZpZ8x?57hPy zgQyZqQODPfveYcA*J;UyW~k%>fA!3%;r#t_Oyzv2GLZ2n{TQG<_p1+970XjQRvhim zP3HK?rzq&OQfF?s#rJkK5AnC_w9M7X&w|_PxRSb}@(nRlxvw=frSkp_C9b-anzG30 z5#A@M0fnf<4dt-lT7IdfgGk?sTZVT|>Oc51|9`SA=1MQ7{wFn2nW7e8)CG6ZmZL+v zWvM60{gte|{8xiS-$*=)S&1H$+ddh>?l>D2J%IO4|Gh}GhCQQ+h-n!8_>E$XDbz*} z`=`o#xa`nbEWzKFL=wa{Xq`e^d2OxvL576jUhc2f#Pz?FDyp>J78!ZKPx!m-4X#gm zD3AKQ&8rjiH_%cN5(R?fQQ69||H|D-GgdYweD(Zh806dCW6|6rYW^#NwT*Yb6(7O9 zZ~4Kln4$qJ`B~*%MjIQ8c4;rOBy*MS?n%5LD-%mp2WoqTzOP*}^jPhlzsvG-vpcBq zK=WnBm?nmh_k-p0H~!CI(>S_ceSDAbYe^Pp(kRKI9t9bbQua_40_n2Ir;inOS+WaS z^6{_i*qSkDP;hMMv4=f4V7XF&5d+0<28N8i1!QEaHGC!ITTWARWF>K5nFMI}=nOaK zWp4VLZe5{zEn0=SR&OT7SDJ&*2cDk!(v#zqOMx-fq(Q76PiTAA;(h2L!e6KHcwIAm;X#KTf(@vOg>3#`| zIaw9(vRqAXV`jRo+q}cj;`*d-LN@8VK)>9sd!U%7TniP_&5Ii|J#2_TO;+ED-a}QK zuEIXB>qrSoiZ{RcqRYbmb6Ysi4QH(LMGp1;Y~R;1>H5+s>ETZpX3hGhx+O#D)?jmb zpQKkb`N35tD4}2H+mMFI$zVh(p^r$*_VrMPA&4%8nDq;;7-Aif6urIm(wrJpL2hMKqN&t^aHrYcAkYBTr1goHd`0C`Hhe>$`&pp15 zEmaW&Z6=36+`g5XUUl{Z2Lx_ljec@h^gt-C85CrEq{ij0wy>cb?DWla&eXQT`5*{n z7*oDF4zyZse3YJs>&m6b0y#^8l)hHLT({^~iDRVdMPH1N;Mae2gb!lnwI`~dF2A2Y z-M&?W+QJLnJYRByMkblGhe|RJPk7jOI$TaNvD8zN$1G-cA&suk3UP{?71N-F7y2r! zgxRzv7MoVg5$0)*V9q4d;d;tkm4>;8O+OWda+1Q8tA7?|-@V~O6U3J-i!r)ebM58# zyo$q{qZ0)?R>gx)dNvz-4#=PuxEgrr;blT=!!_^59|Ufw0PP09T7L99W@}#Y=7;t9 zj^y@SCnEUv><2@^9$;l+itk{CTDG6B{8Tp6je`kU?}O{jn?9ccoY#N`CAaSLf!*rMX!_7(?y=y+KlKodnT0&trNLB?z@;}C|Q2}7lhxA zRf)a-+w#I{dCiRX2l0dW-OtoGd?>Y2lYU z6Y~U1f9fg`s=fYJCPGqvKe14j)boB=L_CiRL|`y+GS`v^1btz7=~uAkhw`}+=)HK~ z@o6{d(54n zsa47pwxcn_?UqzWrWin`f4wP>8=ZhguLK~aW}?EMX%0*AV*Aa>f@n69zW;2l;Z9IlF*Y>CYJH97&f8^3XglSJ1K^|$O-cs}qQvPg=!sdq5Q9x0Ua?vF+tbR?FnTA2JkzXBcp7@xr<&-jEz zVbBPe%hxb#a8G3Y#Yb}}gH>3zbeL&=_#^HB2DKdOK7O(4qD8j6lRye&3%aW~gn*uOm{fm9gLbspn|4te= zq50i@sB?~6yJp9`5kW8G__o51KXY%1Yv`D{hCV?TAk}6`lvqlG>;8z0eO%%#86C9A zyIFpJrv%V1s>8RcRIQ&|L-+BGHs2ad=pqRaYp4IRlE~CWRkJM;|NrRC zSck<)E0F4SX$jPW`_4K9r8MgbUK0l+firMx2vHsg4G!@}M*z3Ob2{{Y56WMao`l&s z*J+(^3>2euutvF)(pZDgsrI}1bjqNcRpF8;?1D$$tZceoY~SvK*G`CTkb;=IpJq(E zgMNH=NSK&d%f3*OhWULCu(}ULkpCY4+ghxFxNAexL!@zvF|eRKOOLU$#7%Kewke1y>(ym*>d8pETR53_r^MIC z`E}^lnj29r;#ZzU;^ymZqc7kDe~mY&Dwnxl=>wQPS&SQ{MPIOxhbEsc-zgaeZIFET z4bQmAZSSbbDyOQr>^#_l*}?}FHD{7cvFe_AVXOr-qNC}c6Ti;}6*ddrdkQ|x07Q20 z=zuHq(m?+iFt7^EpEri4hdBwbmkxx(7v4tn3>h(a6|XK#i#RXtO%g%%8wI!WbhS{?3VnIubR>+Fely9)rd#As$);AoX@7+8pMI>JV!KC8<3Az@<~D6YeP> zW54o_(c%w}tKI84e4KaD7ZWv&{2#!r(r0&q)J-e(Q#NPT8NRAk3Y${ne_bH{uNzat z&-nQxiKkb%y`_N`3BI3zwlxup7=R@{h<$Rbttx!q!;Hz1wNsIwV!|eD$#n(K*3vF$ z=iK&;CM{$aK55A4_RWvgDt&-*l|?mn#zYLw5WPi1A}6NfrL5a^!c8(^I1yc7ytwJq zRsExh@bGuy-`6qC<30RwO$&nj-Ooy1PRNl)t)bi{v#RAEFNh>$@(DVwn)L|EW1 z1Guqk=Mm~KT@s%pYf1BA;&iSxxTR5Hm$8#Sotr`OX6-l-Je5D^+j)suN598oRG{G7 z+sxZMJ+{V5sjb-<`T5?WPC=()v-lbVeE~IwPY;n=PSbL#yG3r*DqelIY|%ub5l)uA zrq2lC{XLlPO!TDtb&ZFx8-~(*Bt$HC$skBNmBYep`9b#cB>tZBO*>u$PAQYWZ12mf z=p_l2F&%v0W2iLfaB{W(J{8LD1BU6hO|MNCOjG}JkjQn=)MuP&^rM2ngR?j{DV|Ab zb{&bs(M|Qqk?BiQfvGjsgCT&J#WefcKozXF-Pdg{CIvGnFGtp}HCHhr?}Qqo5sN+m zU*O~+oCO+{?`0ctgCtcxW1>}Lmd{Oio+RjtzWR9~9vm|#HhSt-(>Rqej)D4KI`Eo1 z3_Ufo`l3#4$k=@PFfstbW12wq?gJ})3bp!Ugn-}{tT-tvR&QMY{Ef&Nzj}~{h-gqN zS4gtBGi*RFh3cnY%G!U~-WGv!G&Ea#1;c_XV)!^VQ|agWGhhAQNIAFfnfmbskM!>< z`a0;$^F;8h1V#h9%@H0uL0$5!=e*a;3?V*sMNw??mHykch!&HZsTRpO=1W zaZLUd)bXGAWAi@HH5$|*X@eZ^FV_<|hFrB29AqW?55^GI+fx^K{S|`Z&zIseGQ0N3 zu^euUBi7MenNwJU+HSLXZXpE7N;cjw&}TV-5fFS{T05>>n$n^bwXBkG!J%(T#=m7V2xi1#5{)HHL7%*KZ` zg7gQR{;*fbb2&%pB+_{G-quU{AjYhn=$tQw)3&m-EEO_AJ9UdOq_sleS1`&Wc{Xphbs!^rd4p#=N%+F_$H^NG^9~JU7~Q~88q!%$2s!a}J>t-o3l?!D z-?3bcKb61Q?Cee-mTV{}b$gFpsa@%i`7J!Gzi|HbSOU%xIa&>Un3^1^;d1`hqMH$) z-|KPfiL>G6-QFQv`v4b`3P5~>14V^@&(iJ<2??v_E0`dPD>+UbxRDU4mc~;diad6V7=}R+6s$^`R zaG;(qdi|(p_h8eh_qb>?!HW{B!4@34eVPVB-rOb8WmXOIuVU1GCfbs`5cG=G*exA* z2bTb`-P>rFmdB%P+;#<^bI+tu=j)Z$l{9R7cwWxIAVN$yLX@%D$LQk>hcrPJ*OFP; z7vy(!#~F5w3^DB zExgc{`)bD$+`2T~{5bNDvrPCiz}x@6rUfLww?gnzO*h>enkJC4cQ*aTEZjK%Qr+Ff zG2vN>$br@In$^xgj(%}|;tKV~4}Ti(HD{{fA?tX&;G4ICQd`rawbWm{pCQ2-`kIL@ zlDcxv?Q45MnepJj{|3?t{2VaM7*mFy0xuQWK!vkp)l{poBVFabV(b@4i-UR*ltuiN2|2+&k#8%;C?JAWe zC{_#&cEY{}!PDifI^mESWC}J?QOssGBtc(iV_`6pUYd;O#m5)6)JQ9&)Pjco&3rwy zo8WHEL4hrwhW@e3YYN_{G;5GSmXrm*tCceM{GX8)hrTlDT)v?;t&4nN9(^v6H#l@4 z6ywggIPAF>ETOsFbDo?`(;&_|tNnO5Q{QDPVNE#LPrzmo_yI!BA$2t?FHw#OL%5zmZ>GykS%CqrN{R zk<-Ec9NuMj|1)k#xY@rI6k}+CUvPN&cVjpO#`*2x(j#gWmT5KcdeDp2*q1jg+26G+ z;w*zB7gHhYr#E|tR!~uc zJ&>Jax7Zpmw}IieWz|J|n55{h5_)o(YNglxqJI$hmd*@mZxY~&BGM~K$)yL@Q4@20 zhYC_+7!T~5cL48xbQ0qv&U>`~LqX9&2F@`aTwZn-4?gn@{RpLx=35y7qQIG@%c`XM zxt-uO;#9+7U`N8YPAhn+e_^;*5k&s#xrUZ71`Yeq0N?y5!ypveE|-Ri^j9Brs%djW z-`yg)p!uzGH2z)EPbn84Q5P=d_Clrp#N0Z<7wcX_J&#Htymx2YZP2@*=jHpS&fPYU zxrYdR=T$)Oi+eYmed>ehpzZ(iEOT#a&Bpam_xd!eKZAXRfXBHOqf$=(7xy zrg~Ol!o;WDRkt-ph=0y?INu6Sk-U!hB7!u2qWzXb*ksXAUnoekw^y}_y4F5XWP=#z z&zTz9bx$GP_~%6Xb&|R52zl0@SSAI&xqhw2H-eKXoCmQZ2gG&noZ-}s1msTu@U!k2Wujy*m!8F@-ayq|FY~K1&eSl;>$FniCTJD_Gp{jWE!2Z(vtbE`4M; z@0zpErsK&W+xi4PZayc#^+s?U~8ovs^{WInT$OP8C4Jh}*udGlZaMGitFbk1|mOO>iDPMHg{L zbS{3aWpg`p=eSXY(d-4W0@hes20z`_(=aco7c;U0ue8|3a?Y2{gA4HeJxFm^ zWB!@@6%x|NWlR2)_~Dxr?AVmNQ5GfoW}ip@QVn%fz%1Un)mTV;@fP^+G;`TD8jhTfE%W58x>L2K~gl}Ve-X*Tyk zFxo<}g{4>cW*5+9fyM&Yazpd&J||$qn}#I}w06EE_|$^t$@s*$1lnzb7d#JQqtTEA z6S1r0PFV#5KAFt}u1rdtoI!400Cfe=f`dBn;xKf^Rv16~wQA<_kG#)YFh=~*v`01H zkMCq%`!;UTM(D>#X65cZQoN`NqvpP%?Zx$HF__zCZw*)QlakY&xf`LUjjU>{i%1=% zk>37C7FOviwQpk--a|gtJa4=gJN#|DSff?pBBhasf_%XviXk92)zYd4#`x6Zz@j%r zO>}Xg0drqa#5c)v)Yh1cueOaV7~Tdcdi*|oh%+enO7)|G@P5Jv6X({bXk3Wf?a*0O zWq_yuoEXNyWy$7ZCx2>`Ai-;g3 zBt$Fi4p3L*1s9Zy9NXPyMH)Oad475X2f!BN0KcW~DXJJkhAu>+sSAD6T}a(1yTUEY zB-NKM9d`Z*q37A|pkHDOVwH?T&5=nTg1JEu7f4(>R;PL}u9CRtMs%AWi2cXF$FSeZ z2L1z&)EnKrxkB_}Hh{gp?Ypn@v5V(1jrx_9$m@yNo}*sj45lR#4%h#*uYmA(zE8H* zX6Psd+Aw;cVG+nbr{KGnke@r>W8;iaKK7I9iN<$d4Xwpd1@%D9cT}g2JP~hq4AFie zOHVY7T{e@gf_DQ_K7+#!rG_2Yu7od{UIQ+h^PdsbT1`k{jLkfD$AR6s7sH@4n-~}5 z3}EM!=aMA;-vjcLX4VA1p~k5>I}Z95MM{l`bm+!vYSaT{%HO^hgsY=JU{(ixttO<& z{~-6;8JvABP574UN``YjBCUD5wY(QN;-0po`tiyu8GpOFdHX#v{PXHksIb1GH_nOg zpk=E6{{x^vU%z(dCRL1W^Y|wZTi*M&JC>Jx(b+qH9>NFQPt`ZG-u_>vZ)ORtW0-&C zAAX_3m<>bg9IcmV)MC=vun+oNN0tY59rr;1 z{u5D{Wv%mHjUJ~aASQqF_bgF*4lt>@Dmrj|kFV2b9#4M3p?%Tf%HO(s;q9qIY3E&b ztS*InZvO>?*$zZzEXp82sm26z93~e!-u% zNB-jp#2epa=cL=SW%N}pS0;J~DRd+ywQT~MrHAddA_sG2XEWE&wwi`_dvqT=jxGiv zGC>gKWZJ_G{&9$T6$epl$+NwN(6JCl=Ru7NAd!ukx`@Au>v8icn+NWFY`OWG`}TXu z+cMkePCotU<%v%_X?gpv>CB~sR5SK8?zquz9$Oba_BPkCIm$fQule&i#_zr1;|s4O z&b#!`e+|9KV$rYdkM=d;KSx(&Jnu=z>zWjg`P3>SE}_@(W72hRm3!)1`B&lX8r#jt zoqz12LpD8*l!Lsd^7itzA=vp-8YDwM>=@vZ{4p4Ip4g*|(tX)qc*63$ z&tES+5b1%BZ~cYamtXwRTb2j4-=F-#Lwn*m|MFw?rGmqC{x1E-h%n@r(Fof4o3aiz zvCYcAN94NV6hs5G&Jn`cD=vakHhLKp5H9bt0J!8x!d}NNf{u z_!$vY1=1h>bMr!sot{{>alE#S{~%(_=+*>hTgEF>_QL z*`t@c^cL5>pLld#T_|O-94>nd!4`k!-UuY3iG7mc+_fqxA?oS+#*J@ zmO`_Xq-->IExPMEFxPF=tm^f7Uw_u}*y_{H}-kFbT8hGGntqn^I_YZo8MTB)yDSiSt;VbE?goEZe`msI^NvJ zWAoVd`(RV&Z+yWUQwtq!DBrb+#j2B0Z+KbWUwc+2!?LUeEK$kHwQlIjSXpN=$ z3coH!Q%C|6?~b#w8>52Wjeql=`w5^k0DSuy9NAp>SwD}#I80Zj|BFBJO#N{1G~9M> zZ~l)TU4HgQuE~a(yu8rUe1F5)ZsyeXaO?kBbx$d%_i_5=@t1ttg*sjRp`X~V@ekDB z{;`LatKNC{a@8N)?Ni9#bB9hyN$s0J?;p#i{o#!iA;;{iwVz`iEdK;t5xm|4U2Aoi zuS5RCXPmHH{=5_Qzj+$Qh8b4oVU5*O$q5pZubjfevr!M@XGVgjB4+m-DjPe9p#I&v3yaOTUN|uYn|4w|w z*ij0?1lc%GvNO)mb?a!bG#E-=%1{>FQQ5}3uaO802XO;0x(?4lt@}y6?w2oG_lFE) z`{6&hb^7i9>}Q_G@3Q=ZU+mk4RFZ>ML@}@4{5h@fvZ_YBx!-sEpJz;GoPX$G{P+(y zEnOwd#RCw*BJw~upanAMXmVPfT*<= z1+vqn`P1h7XCFGf{KrnwPs*6CtMN7}{~Z^P+c){cjNU(e|G(g6M^9h-ovYn9i-G_0 zop($hz=NlHOJ-QVDh1PFGDE-n@MJJV8ir+_*n?&^6#pu0!!nmwKI4yhMkq$6B{0l;P|7<2 z!rA&l(LO0bQGi%9YsJBA=D-U-SogRK4mkCOu)-GyG6|65a<8Bzw~>|93-8TrJa9|- zR~#%?uZ0}gSCMwTFd60s%#T@wEP(ivOVQ+8UVrG#7wOkHbIZq`edIm2PH%scFCs$H zAGploq~=hCN4zeZTUW%CSgJ<*vaEb5c+NLIar(yZK4snSDogCW?fGl2tvB93ZNb;p zo7{7J*Omv&Z#VgqviOHP2~aSsKkLDtu#UxD0FFBazb?d+|9D3L&W}Fvz?DvbB`b2jE58~I+yUpU~O{xyeB{|Y~O@K=rN zZI9qrZNK@GH{n-K{49q{E9=x*3ozjV^2F%{FI(+bV)XatZ@+!|i{HKzxi|y!r-sen z;F!NT>zkgu$be9I0Bp@UaFxI8ieECeuQIG!GLmgjeVR)^S^499bigTqK4)3+Sm!T? zDFH z#;C$AkX*Fpyjd!c1YRt0ir(^N$IcJlQlKHOUwCzKSR@elm4Dl;L!R+Z(acxdPR?TF z78@=4sHpL&=Wd+-!+*PQ<=>lM`ra+Lh+N&$l)K6)3s2b?H*#&R$H>2yv0VB)sYM&k z_s2Vuzw%p-pT6$pCr*bRvD%aJxqBlG=ut25v3eE*I|^x;j7@m=vQ{5s`f zybJoE4g2atk_R1JJw7mbV2uyOJAXF*1M%m{_tSrQ_w<3^-cq+BsS~1Je&flJ#p;XJ z_7$YerVsrtZ%?nJdM*QK9t*EvKicJpV5BDCx|2Q8mOA61>iT_L0*Myy^UvKV0_L@7p@P z^^IRd30N<$g+rt^ir_FGF5$Tm5%bgXmoZ>7{?Ow$u^a(=nm!9yr zeo9$k|A`*>2lBuDv%97bykpCB2Y>C84WoQ(|6`n$%u8urj#>b&Oojx=D3-DCss&~U zj^+4;uReDAYCQRH)%KV8H2k0b#?}OF=1C{K`kvVbkG?%-;>Bv!_WrkQoi2Sh9s%8= z^=uN4J{40%=*XNW#+d8f|Fmt@Q%KAZWEeI^i4#8Q6&D7y8hroN&RyqYYbz?2fWr3tRlMj&{w@$= zjY?wZaPptD-AZ+1L zVcT5sOSm`=r}Tm~`l`D8P%d%RI{82R;G?*A*S*t+|6mK=p8JUYuJd@k6R9Zwlj-YdOWk^u4(7?hh5WFTh}`Ydp-O9w!ANZ*V3=wwl)~_<*W76PR{ye|HCgn zv+~XV?gZQ%arIv4FGmo^>7(Ni*PUMI#P0d33t^Q#k(Sf_AR3GVvDk zZ|5gEAQbjGm()3CF;>BfKP%=Yu8A3$Sdkn4!LMSG--vVh9g}szzt%EDKk{DW)bJRH zKVt0nG+aQau)`27k0L>^zlbB!jeW7A6jSnL<5(?1^rMKnW*xAwRx}-^jzEb$1Re(} zXe&tP1B2FN9YgjRHw3-A@D$0XTmf5)5R+sQeCFX}`{E6wz6cBaE4KM7Xp2SFO;_ zaGI|E?6&EHzqbYN9@;S-hR=TRb0q)Vrs=+W9-2P(zB{Jhdc%#=r~dqIeHf6n=H9IG zwO;jtoGoj|>3Du{(dP>=V#-|p>A$#VdiO8f zGUH^f*&N|NM(_FXIqe&d*bh%!T>lWyS*%!-bX{R6l>_uWA!C~^dBI=e3_Uq@UNWLH zd_k&!6+-5N;fSGGHT$9sv2J__6~A*o>#2uDw66ZhV-K93{#8d$SA6U)KaYJeJiH{% z;s#-W=*S5(%+aMB)muljoB7vwHY1*D*lfOj2Tqu`>waGQ<5n2Q;+FRN;e_?SQsb|w zKKIdW)BE0X%k+DAOYY}Cwhb4KhgD@CPz@A&v3y*E`2u*2w9VRj0D-Z$&xp4yy7Z3H zaR(f@&-CiopE^C|*{kg}uHWZBx_x@*8*kRhrtxoFi0E;W2YHTCPQLK*dvSxt1Mpm$ zgJ`twcFva^GHt`#ym=8>PyURxnEx1z`(I@xK{h%~;ZkqNsIDnN82e-^v#wCAoWIhW zC!M=#4x7C8B4^*j*9kpB5s=Xcmyg7eVkBC;$e#aZxXMNxM*biWT+SEaS+{d{d?k*d zZ84LeW*BQ!poS@iOwxqPmv9X10+ch(s3l2K~C~wi|c(>3ghM)yf6qOdFupvdYKeTBpKdjLn>Sd!4dgNjP(Rhs3?=Nu0(8) ziQC{n$qq3(phn`)7a!+1j6^L_4z2K%5j^rA!p}1icuWxj+jGNa>{^L{WaFXnA9#KK zlXvU024_EQ{hwB+Pd;;m7m?dH@0vE_4!~?J6@=#on7@z&alW;lwlOZ_T6BYTWG+K~ znI^>+v}ytDBlsA{=4*HAVObw~_m=54{N3k(gZ7>H)6?T|pQwS{KgjTw>vm3`yyVX5 zUB7VCbjfdR!A%eEn;yj9gSfujTdw|+odCTsSbyz1AyZw~&wEh|2(58_d$Q>B1<*P} z%9f>IPu%I}A3S~Eo6gW%lqp%I-SD{`)35);jnhNJuSywU9>X!|cQI>GfIMu8Va~jP z;!(#RFddH@Dz3ND?%Zc>obKN8;I!qs2UKI`?UvqL`b&%Z+A)U;i-Y8`PC}e>TrDH^ z-u>lEcnAxSDb2hVOD-hkfO+t1JpVLDw5JZ_EQq9+D0|W} zxho9)CT7O$)Ao7A)8F@~3CpMAm=2GF7ZNI>DY{QSbY+V;l>{Q=N-9?!7w^UvxH-VQ z04;auQV{Wy!u&EE`{rQaMm4X-(+3sx9?;CD7p@BSaxY zEPq$ud9x1MJ!7q^fZl)9x=EZmQPjpj0#s&&766C2lsLA$exHZ8&;HBrI)0_kc-X^) zJnx;}dhw0ZgSfrTeE&OU=q0A@dN$5)3AQNnkNGe8!4szopTF9#)c7{&cm4b=(`6sz z-w@a4PYK1@*X9>qX31mc&*I9;!h+DEr1E74{@u6tO4X7Xl^_OXCVPACcv|;dX_kHw58a16mBjMzZlYOzR!~@DaZXI2=r4(2sEn z*Xp9Y7~Zvt8`ePl8HYOIEFoshpHhP9bFp=t`A{F+$p1yJJ$3q~?>=qOd|mVFKev7Q zl^_2C9`NK(2a_uK#((CRjDoR%b-UMVP@w&X+p zq{13R)V3c5Y%r1@iqyP&7N8thpY>~g>;%Wr>d|Qf?x_5dA3S-l{@y{Cmwja0^v?fu zbNgn*-uwDH9+7$dDisQ<7A@-|<5yO&2_O^^3^w{_HJ&#yTR*_Tx|s zugS9g-|N-(e^HjEEoPM;T=hDF@7^=pQ ze1vu_$VG|J`&#?4UdbP@wyfEU3ZDxICvC%WC_#&3iJVjgW`!uQI$Mfk6@p^d54xdB zA~%`}H(;n!HN6m;5<@Zj6oQIs5wS3AE-FW!XVmQJDLhS*kQPG|&ZBGmDeI=OX>mfb z9N2vAV#R-Y=LA9(PyETI)1SWK)u&7^{=U;=vQp27554tguF_k5UH_hc%W;Xv=(0&u zhDE6PLt6Q>w4$B^3==nNH2RRPFqAVMTjF!f8%dsow=AEG3(E1h5hHIAe+0fZ9=`t! zN9^CF@lnECZ`!3#yz|rU`NTW#@$}#W{&m!%fAc3_XnJWqPf@B*{?0IUDnnO|1yG3O z&zVX|Y1@F^Lv0(`%nyDIorGt2yz<9R#jl9g_Z*tJk7@eE`|q0G{a-gvkK#N1krQ!V zVjM@3;_ukRShbKN71ivsJJ=WZiF?_P@?y8zt#bH=h2MGO=IL{P?RPV()?-+NNGwKU z6aQXa3+tF%wH~T__VEm6pPhcKA3Xbi%U>tywtn3QFJ{! zj7euqT*j>aqP@(MbOW|*$~6^iMspND9>)-UykXXwTYrgn>Bata}@`*$6^H>g% z{Cllrw&q}5i4E4|Ip1*H^vc(rjdwEb-F=uYdFQ6-5B|pu6My7qjBMJao00meOLBBg zn4|4UD!O8;POF#l8l=J`CW|!7aWocQ9wg1?T?-+<37>@DM^Ad>A7S|vMZSZL58^ry ze+$|l?=pGU?YJ$9H-Hzd z_%N9GYpHww+AK_H*5IM?rzYbzfUxE7Z6<4)c;NgUDr@CP)Y~Tw?c2+gNyTFh06gb^ zJ9_%2?>|8g2$Sy>4+>3=W)>?2mXIjc4U>vT|CZ9u2VApGtl+|_T=%ejBsb+CxhB)TDdybl-adAB;ym{mhfn|d4NuwY zPkUc?#rEl~Z@5bLlWG@;dCl}g$QG+`jT_7(O1w30wZ!HzxU4*-?D>g6v1mnh{zeWS z)&!c@iMMF^wts)}^t9)%_L-s~!t^Ju|BwvhSS`(6GqXOf3jil+y z(OQnm2zLA7t>3Tw2|UPZ#pj7Ugb%g8{iilfSL1mijX*e6_*Onqu9yQC!kRSTB$8TG z-*B|ZB#Br43{MrC$GhMEMA&tl#nSdXwmn(nuIZy@E&c@JD|N^ieIIhPwk4KcXHesL zAg{QXu+&2XGL9=Kx>F+2L}H9y_7EumAp-i46GWsTL9OEw?I?MH+tNpoZ3|@^MOVBa zi=`Y=+ZiODV-X)+$0;RhFlZ<2g-t?_zgCQ@V8a-#eW zyt|5@I=|pKNA1-`gztFziWeQHXEj{=xozls{r25G}%XzCf-6UTQwo4ZEYJz#9lBb2*E1>jk`#DL!SB} z^=h2rPyvIp(EOcE%P;Y&4`yaQJRVi zbqF6i<2W4%OT{_9*6?o}=jEe)EOu8v{-k`xrT6O7>PMZh{$IcG?cwKt#m4ETD|h1U z><^lgIM@0OoT&6#IS=#d+{`f;+c&CE3~lcK$h7y9934BKFaD3C1} z+KrA1j}d#}G{9sT|Kese%>f=9IF22RCq^RK!eiEhTf@SX@Amn`C0nO+@XnRv z-n`2yACr3_p0|Ge=kC?RoET64QU}vna}+Pr@0Gk&t|RtzILXq7xF36A zvwsw0FsNaR6GO_2RF7qTk`J5&9k9fwX)LTIjN?zDV=fp2@{0+wWsH8X8(a#6gSV8- zIa_N^X9TOwd}Tu8-85;!g_tE+TgOKcXsp2>Hj?6?0((YhEvgIe6O_X&o~#U|MURjf zW0b2>hO*?NQi}2go7zth#6kHPI&lM#uFbzIcj5B|AODlBc(UIHeRa+HM|{NWbH3>~ z{f*%2&)(DjHiDRo@kX!Q8QT0UztV~*#?{A1!&Z%kCBo48Tg4Z7GZzP*WZ2?Mli@%) zzZwo6+6yzK6`y!!#fhoPQ`}X3OurSUVH1~m*8+|hI;AR2yfmqL(h(am<&;r8wQ|l4 zn#zB4JeVsWzvF%$?ql_;A3tq6;mp-;3nD&sxB(Badh_e9oA`zS#!DQkIc{ws|Awpl zD>eDB&7gol=GAx4&cDxzgJ^fv=S_NMXXj&J{|oA71=L>2+}$)sou{7I55-I5>i{9498K4_7# zJSdbcDUXg4PS`(!+wt=va^{5xPlq3~{>KL7B68hjJMpP?-?Y+h<()&cT5HDUKg2Zu zju9_|byU8UzpzD`7O_%aawR1}dB&I{H4MC-5E2s`4P{h%mYiH^WjRM|F`=KvEkf$+ zpTNS$e)t9zqU28;$E|T#Q{9c1im2HqnOX2*vpVh3DXipxJ-*6;hOHpYYeiskjBFE4 zIG1q5+I5!@B&Wv42OXi6w(=RU<)Cw-IrZg0*7HNh0+SbrY>Z1$)yJ2N%1;Z=O=tqP z;kf5d3ic6BMRp0dk(2l@(7)!gZPOS2=I-e!&pHyny4Zi7&^l*)76w1TekMM9anl#? zo$kf2$#Q{k&Fm;+a_2aG)?f6UYKu(Fnx6`W{Kxg5VdJ4{N}OUY=!XI`g1og_Su&_$toozhaOi&1 zi~sFO)3^TbCryVQx&9NXf(!?K6Y`$_evAHK{)qk_=54Py%$g)B8B(6{FS&$8mLi!{ z43BA9Bjp{JtVqUXAc<>LF6|x@-pSzd5911E`bMjwkvVjqL4%yN>s3em6Ag`SY zZHgB)4sQ(o$YuGZJ-?&=vTrzQdi779q5D{kd&B-$JMP%_@bo9YdF#YpyqOAJ`^05K@N#yLOZ@`_NSGkDntG|g{ z{i*w=d++SG&v&fK3qgj{pZGZaBGR*l&TVloM$W!xZ~Im~C4(8955tBp&t4DuNq9RI zF6SI$%L2f>Mzt?!HyWG3Xacou#AHbvf|EzS3B=z(UVgD0XW;>gZ}kkCO3H)h48|D7 zdb2yQr6LVGk)j=d=HIO-X0zOVk_adHG0a#T8?WM)ShSIdKJl;NG@?0TS9szy6fa_P zLZcKqcj#SI$)c!QampqPd;SxFO1APBNMwxZv7~9SNGS2f-l%fmiQ^QvF=lu_c>kl* z$NqGyerL=-u=BGId(-HfPf2_(E+pi4(--fX_zVlh9JvhRZ0Ph-HTvY?eL=azp@!bM z%^`(gbTwv6&* z)a0LD#4;&+P75if`{KLkXTOMl=Qw?O#@8IB&$kHjcRcWOGEaT(;rQ^~J=4y6A8Fh; z5GDFo;k*`|f4$bCPqHKbaxmJs?0qlLbzh&pwoc0qh|$R4=^?yJj2Dq7;kNF4lH_{Z z{`fo)|FHf=Ja+fKyDC=_s{GQ2kNi?C(yba6%Z9q}U%UR{MNH?<31%&+*r)HcY3B?& zKqD{y%f3%p6OgXBZRlG z*xSZiu%GwLBXA)(4mWAsciMCfZVrh1AT4Xzv}3RQ0B-qK!o5~d#$9T~{PB-A&8~Eb zoBU_7q8$1WCl@?0USerr0*S?1=k=X6mmkQ93AZ7Phh;c&>g!ekE1vuzY>Pz?4Z?v* zG9EuD`P^?lW_s0+ojyJN1xHQ??d7|xOmg{=w~0CbD-NHo_$%LPKKVEPeIRbLT0tV} zEg8)6`mLOZub~N)znD;%5MHw^lC7E!IB6hw2>Xy1k#lid_l-xae~qZb832z%VHXCH3XbH?U1t&@KWXjRL;S$D^FC;OgT_E@XhLU`B6F@vp8 zvu?z-WIBEEqCH!Wg3l$}i zJxv|c+jwRL!@9D;zT+1C*)QYIt!q|t(cl3s$&MDH7ze~)oE&8|9J;PB)X4$*8Bu37 z&mWExXXweL^O8e4qhpwpOofp7U^rrER?WU>L+rfqzD*x_&n^0!$Z1dBxD+$D*!_&R zmOlp{V0_-U_;IzoknpL4V+#jzaz5y?9jdpkTbX~>f56)V%&klB8h3kzB6dmsjF8Sv*5Z^n=@;`4$HqVPLaYN(^XGo6f8x0CE0548>>>Vp zYrHU>{}qQ#mtV?@2!Gn_b(-^^Wi39+IBQTnbBY~%E=S9=Wl_F>)d;w_No~fSy0TEu z&#PR9zlreE>WAQ#HtTJ?i16K6d=|-$d+JwrGtcI+hQBZ)gC0@&V%qp~&>T=fUG|x# z5^B$tyuDte@yVJNZC^6<&=gBGTTlSg!*d<@Iw~Ot;YC?B^RL$;Wx~ySa?!t+UGzt| z8*fnT!9t5nHHUdoW5!@?#i$j^(sqvI*G^c5y8m6Qv0Of4qGkvt>W7Y}(I z9n&Xt2_IjLffSC>>(?Dv*5~w}P|a626t59&j*#HL7tG@%Kqs6ejAdiQ^eIBh&~-|2hablP;x$*X-P>fXB^ zn%?rd8>ibh)rSp9h)Iw?216>ExBZj#BNSh0bN;GERCbG5_GI>&0CIu@97wPF8QW?V zPvR%2yjhG_l*|*84&e#@yq7smU4tQFfj=eqzij^pD~7p%s7~@tF`YRuM@j@THoY)A zm=Q$CS`#5b$;*@HMEHf>O@G$x|hZFlH4szm2 zjNa3`{d@Ch2KDm%Q}~?BT@N{EE_?Wltk-PD-wUpYL;jpEw#Gla;-`Sje?NQ%>&4%5 z`t-FgJ?U|KkQ47v`}vRGJze^qEz@Oxb(jA35^mYNoO}Op5Ur#*kZ9>S2ygWOGaoeh zg3{|K(d>}?y+*7XZ92qz+0MO9>tbW~0;~Q50o&Ney!_^6PNipO#}+KigJw4y-I zg~i3e_n)amFG*Z*NdT5r4K zq3PFt=tkV(0q;6&du+}ysjw%9?6d4&@q#k>%Q{9fks(LB>ffa1m#*e4ed>Sh_uC$zS6IjYI*MZI9Y#=eNa)SDv3 zmuDVkEj3SW=C`)^G;)F=CP^bBKjqn5juT)LfJK1YY}EA4f%32eae*l#F=jT292nA$ zjMW#B;)*#8RX)TvFL`bD3R@=B*gW|M>we@SiHLSr86{3Ir=EMr^y(iuXFBPuy}Qjt z{4VzQ-0{Hl(Ldg*i^&~00kKYOS|<)nyY*wOqDubiX@h3|H1h={eP2L;oZK>Vc`%@_ zpS-553{q|6L{Yi5HWO_Y^9t|u<%WS^OsQkCmsyFuRQUS_-bD1Y=N~ye2Y(ztFa;T68+x3X_qb{+wn9>HuK-}>$eqc$lsM< z&8jRIWM7sP8c1gKVN^C+vGw&|gRKSNTYJ9;7m-!(I;>CMzwRd7_Jq04S5urEUVGUW z4+_GIW4F$~5~0|OEdVWAEdrLGiW$J*dba;ze`t%IB=OeOYnULr>k{*G2xG)8rGRMhVxPX_9FOvxuy@NJvG&C`xX*pj@zaZ5d)nXWGX&u=_KbVw-)_^z z>lH$fRni&iPj%oo^M;YVSvtzm`QP9JfDyopm~Hd$QUE^X{z=PQQv@XZaKB^YyQ2@Ls7R zi?;4uXA*ypSO4%skKo;4+on%_ z@Q&#+Tukn{11F+^Q{psEMUtfzEC;Pef3d|bJ?+QZaLgvZ0ab5ogmv+5oQgoORY38H zN(n&X5DYSwIgZ^o-J{bPPu)0O^i@Yr7vU$4zsm>z<^krj&wS|a>Ak{_hv{hM@^- zI&EwCpvKR2#PCuf$achQ|fu zy_5(iK2?o4@WNT&tH$YD#JVeE5aDw<2tTCe|wrGRU*X#>BO2{#UWKK5YI0@qBu^qA3OhE3z@AsIL{Ir ztjRo<>&Ku+A9vvNE&t~9=}W%$m^Hp0F9H8v!duCI`h$1k?0gH5B~98 zLFawxVbhbJbvWK7blA!#ZpK}2y9Ljg|NURwG+p}{-vE(3s$@v3@3oW8!bRj~&zd%@ zcoErh11}=i;2leN+pB8D`h{IZa#0|3{sI8pYDGgD$=;meK_X0=OIEN$;|hJnlA+PU zm4j$0nsQ9~jR@&LG$kn!Rk_<2u=tHe)P~V>4Vp%r1_5;BiU4A(98F zY0P9$2y@jjDG_3fMmW%MidL>8qLHl$?jy~bSsPF+U%i+q4;8@0#Sl-itR<-N_p2cW zFzv(QPr;bE%ym%wh5WnLqh_6C696tF8Yy@3<*6Jb*JP^2RHAckcJCy#Hi@@L4D&Vo z;p&VF4xPU32hNzzx^T6}i7s)!N71%BAHZ|Ox9RI@^tW8U6B(I%Gwub1h`IKb4L-90 z-g4l$HpqWop~0Ry?h!QNmIF;7J~?YmHpL0!gE#Isoq7I7{iN}vryn|COM~T6}h* zKE(~E3PA*GQ7xlc-BYXJvNi>ZjTmxLWexF|C5HO|2Wt&W4j)g%yRyFLO{eM)>~q`4 zp54BQKZ)F^4;zY%_|I*U5;=Mg&N3FbnAHA)K8a*)s&6`7%||1SbCZ`R4b1a$$Sg(Y@2_mBX|IWMAE3Ed-rv(r*~Flwr}6$xV4sAUp1 zQ!FHHsZFUpJo36cl`?UGm9n&q>;Wpg2)zbl$5DqC(D_`;7L;mB%bVwf#y-IDb zs~)wwR%NN7aV%0f0PkwzV|7o&$@sMM4$=L?jz9e%uQ36C&yBUYefUXa)my$ZF5jKCbGttSH17XKKq2O@f7Lu->T3h5#NBZS-Mm?N{V*L`*7Vp@57bX0t9(v`ZnoaI3qOh6q+7g*#mHYwi#L>sRcRHA zSvCzrCGlemawY>t`*tCgFlC-SDSsq6w9e@yw0WWiFp0MlrtJqU}9h=LM2D!ghk)KHxAZUU@M&j(wY- zB_lj;)05qJ5^5npqO4o~mBg%bw8DlD5j{Zn6Cz_jIPgo;?nh1un1vpDhw^vi9OvcT z^T$X@NeN!=c{`J@f7Qv;MbA5W&8DZHzwfqV`$M{z@Pe`lUt91U_g(m7=Qh0Ki=RYZ zmcR30K47P`yh3wq3fePZy%yV{_=NLOCmc8(eZoQbdID~hzF|5Ew@W|vl!G3(w?Z$* ze60Cq{Hp4bx8cI^;d=~QwttdZ>ua?On=DT`Z^QI`|LM$?KiR(d>YdY@f9N_~MDUYH zZJRm^Ev24#wWYkzf@}`FToyOtnA*y(x4wUap08*5X7k^vun%AOyeRClI1c#`H7K!cm8@GxNCN%Q6KaTecK{fiNxUU?os# zfT{3H%;}6^wVAI>VjR%W%Q9RjUxKxFALmS!Fic!HCgB3F%_fEC0y#`ytk9Gkhi)B> z^OwAop)9(Z&>G4%u3?EVBVcJ0H}InC@LU8M!w4q8Rm;Ytj^pD`J$U+;_=)68@i!9Q z%6aJ*aKhW>I@1g0TU61Pa_Pk#bZ@co~u7@?}gNgWg2=eFcLH5UU z<=JmIbbp*!4#3|i_R|SwBgTgwb-;A=@lU}0ruOckOpkYcuKn!2(+7WVD{kFU7mmJt zuv%V^`TEta(4O(SYX20mn4gYY!GA9ekzCx%PftMDE9Z z!g?cXGhhg|RnK2ezl`l-%`yC?NYb2?nY;KPA2!azZSnji!0ezHw5SC1n#VZufa8_( zeO~&s_tq!Tt*&sIj0N!+rp~s&KttpcjYT{PD@P24RyGOjt>SGiL@Nl5XwPKrSAt=S zSYJ}Z+vczoN0Uu$yS3uLsPtZGumOp3B^PZg7yUR8f0+R9l)2s0;TR`lq2;N? z)B@F_$wOhoBPVftmje%rwCjb*FmKplAl$Ga#NP`zpqP#Mb^cI8R8r~rM7XcO1E}~A zsw4N}NrC)-nBWe{$(r{cu3aS?gLDJ%c|2D7iH002M$NklnH|v7gF4W@B zh-^OLnzQp?eEo_S0OAXaF@M<=aa$X=$lswV3Y781LDthn6D7e_*IIuH9_#$Y+BQF) z$OjLz^U_Q4F02r3h&G_63Q`P{e68pLlFoZFtuQLE9ViK495ZP2!!Xr|nG-1j1F-8! zV~K->C`J`diBRIUXw7-^sX&E;qSwp)~YOL!%yntlkM519riD&>Z&H7T($6XN%_RXNiqb^*S z-ze`k6_2)P@;669N-x$B!!{-yzt&jSNP~Q{9ukvH@{in}8#Re3D+k>sT&xHeLbU)K zjI9x{P}Xqn?+Umy4iS?sah!JH2>^X!E{m6Rf}z=BOu0&4Wl-4|JaP^WM&1ki;Jx;I zEbg)T$>iwc50bloKCD)Oo%cSB&&k|9ee8X=PuE?3-(Z{_k)EoBT@e*}YPdvf6zh=n zi$Zc}>%|=NvoG2>y%tY48@Pk6rihUqmweF}c+S}j*TU;Q_J{HEzaTtwn)EOO$Bzi9H%k@K-x z=C!%>^^5%bYd$gIQ*PlG0f!+Ra>nME;gen}nfG&!WOWD^PM7gXD)<%Fd+;l)(aD_X z?um$Q@xypw;Y;h_)TLDhAyw)1;>a*R^;Az=*9e2=IUd(+oScNoNx3!TU`-xu!K~q5 zby!>RflJaJYi74GOKmAt#I7)qa>!=CPAV>q8<8#)eC$3J14uvQlsY#D6*cm)RxD(% z?mYhrPqHn4%GhR{#9`9Ak)HniqxDC3{^9wb+wLpCi^k=+XngvEcTHdXo9*p+D{XCH znk^A8T0bm+)`d%g0hflW(I(jkG7_?{uff8${Z}#~&w0ki>AQXk7ZE%+W0iL8W%o^Q z`O$0f_oq&SBpn@|6a%kIF)2A@-v6afG9&+Tz!WuH!@uXBx}`UNJD{1}t5}+xb^$IT zKXuB=KXqMy`Htx~f8u863RcQV1QDwOxwdA&l*d!p+lGarXJiz8tDcS9T6FVQ10ov1PHqO%!Ydk~ z3_=@xE%Jvjn<2d%g$B)5xKy5fJ~(N5gfg+BnvKsLW=eC99jp050xZL^zCX0PbAf;E|~(Xc99h;grI z`B+mfoZ5T^wQLbJh`|yMdyVRIzvR&AHLpJ%Z!lQxB69Vo@11__$FA2C17}-mM!-bc z-omu=&!qQ8GqsE=f!f=W*1qoBJy`Z`+5b&b;4?1VFunTqC$Icd7axuL)}O>hY* zFCuIeuLZHhFPlNA*GpNTWB#&kBum&6QvQkKa@t;o&7NFbtJyh1&T+6({kXT*VCB?@ zrF#*6^b<%y>=|T9DKTccL8F`iYPe?9?QM%7tq%Q4D?$Veo3{HZgIb7g-{3H0K(=P3 zv7*DkSJbE*xd`fo$CAZUVPLxi4P1SPQ5NC^TTST$Moxv+^VW5Rp_)Rr%HNaX5`*Np zmJl?yH!oUuP+x4*TbdDK%)YaLy?ljN2+4Elf?+I&cYq;$-;lks!q)USJYx64uRLlx z7ta*ohZI-+{qsQU|5roa%7uSy{^F~a75B9U#9o@?sen=*L) zTm5I{pRXSmPdGShu(o~XyXCwuI~0%1#YJSryZB!HDPKe$!eeK%oyXq%$-Oa9BHC6X z5FEXq!5T{D&;1nsGHRAk0;E(tEVU#yta$XrVY!pfGoQ9$de!SsS^1|fJ{tG8UcX6i zCzoF{sCc53W$yfiA-%?c6E)}Sx>*gfS@?n{|8Cu1zsWx#Tx&-=hwH(xp zu>hfntzj|})^h5@Y0jUTA(P@E$>^9qp_5AwN2}%_=7}Zeu=QGx4*P;NY&L|AW_eMh z=x`za)W)53Yx9?$MM;y%;`&ADzVjcP!-v)kN=ix7;rr%m3OVR{dZjJfQvRbaINop+8iS5Tzfg5DUUxet9Cpf|K08X^R%KXLlXZ`)NSQBcMRFA{Leh!8f-`v%o~)Y%nL<#fS8@?ZA)R{+L<4eJ#wwS_0*ko)`scK3l5u3#0T|G#P~$~%IhEdMTO7Vxb^z`^+Cr?*X*1&U$bMn z>B{@?Sroplb#ebo z!t%;U@_`rUd_v)O{x~ioxahCauKdLI={GOF0e{yC|Gs6&p(j&%%9JDPRxOEOx|sj& z`O8gr4_jmDb#Y_~qtXAl&pKp!`H$ivf@jaK(yqi)4d3xIn{W|vjh9t8sjx>h@bmf) z3@S(GFQze)N$1ov8LaI%DXN!wB{F;cmQ00aAhfoBYTtH8T`$fC*H0jgV5byO_>{Rj zI)_46H$)f4Ycj;^zRIZMD!5h`WBFo_uee#9QPYwGDDlmYmh(pW?1d)B9hX?Nl3#*b zgSG(jQ*v(zPDH=~D*wbv-yF!19i&dCQk#b=Ct*+>*DiF$q<$Wr3@jXCMb!AqhB!7c zXnm_?Bun_vDsf~Dj5ku%ih!Z5QT;hs(x~IJ9e6?Eb4%DCbJ9W65yw1X zI{fGZ@Lln0zn@(^z^WHO$QN;(aeb>o}93LwGXu13j~6tXEv%Y{K~_pSN!N{ zD_=xD|FLb;TVH?U^vJ``!@O(%C_thpln$K}bVL63P#G6za$S~xuTT6Y&f5GNwtPPM z*@sRq`{9#T{;BKpf4zNr=NmU8*O8OsGbi!KA^Tr>ihno`*uo`$7&TD5Q=omJ8bykJ z7{G15S1fD%_69`Iq6AX ztYpaCS}XP;=N{Us0qxp}N=7Y1LIiB_Sr|!s;^p4qA(n>1Y8*w@{2+n&wmlHB8HSA{ z@z9U?(?NX1Z(^pm%gWDzdYtf+-{jR7zxlvG{1L5xw{HnCqgzS|CfCK*appsPaJ%JS zNj81Lrl$|`s`@zQOgYofaYnnY&fh1z&Wi}%4xJa6BcFKSbQt<0@S#TDru+$b3o74+ zsxSNzcpUGGxBWkePpR{H8T@(xpHR>I0eouxft?TQxfgs2-#xfs+_iO=o>rF=pqcTA z>`=<^PG1WmnDU3f`?A&#vr@_jH%`kF4u=XcSq3mpntA?q#3OxIO=Qap;|rgE#Po_EK6T~K`&{wY+oreu-#6$Yl3KV7L*N;KLa`pI4~ZxP@!}|1 zIBV7|b#A~`zU1!^%1R#EXpsiAd{+8*{17f8EB@4V`A4=-zx$@m6YqFHmYo;pW-E;n zNPfMaV)84udf~kw^j-#% z%NrJRX3t4BecZYCNr#qYKzj_cX`)ddr;0%nqk^HwK9&=4bGCvLvd|p2(r7w|f=+zg z>qXlT9*BQ16p35UKYSX04u+< z{KbPrmJ%7Lc z284#;1h`@ht|4fFrA)Mjp@$fT$wL0bKI5y8m|priTtxP5$^YP15cC|)HQ1(nvAV;a&Aw3-l5Y=e(=PVFPNYD zvwNm@|G&2?9{sDel!;Ih5Tb1q;QErA@TI=|1E!AWKydb6A9UP z!SjA2U@b3V_dEZ%mtD-ksGgI84ob;z&{#b!g0yvX2}dnuJ-8$kG(R2#WMCy6K#8Rr zV|nwaHU*_0$3gmnhF|e7^Kew5$)ECy9gFUXs>jY>=BeK3fKJftnhG|fISLPtp%(;) zF{y%!E98kaU;O?;;wlZIiCi7g(TqtOBrOP_~TG6H7(-2qaQM zI)34nov_KF$ty?kg2Pq|h3xs;bHxr93TrH%WQ70n6@$edv7^t)do6@ciGw>P38x&> zH{&hVs&RIB`wwnhfBKPw=9pv^MEqMU$kf1$yUva#34mZY$c`mGP_t9ml=T`JA!A?ra+ok0FDzUdDBZ1Mp+`&7e?Jq zBbm(2ih=Bjp-c9ie@RPD6?7FZX2#HZZf}XW0%f}v{(3=EeRcfnpof=v01h69en za8mDpjx(pPoDx$m!iBywmP1Z`;aS8d7T4eLW(c{J3-Kj^;?Dq#F~+HhJ#;^U(B9^B zpfAchik{sXry&o@hAkRos}%(e-Ubb(IOFNfoXUu>IESrt5Z#I!3-ZR5e1}%!MC=~g zai<-GzhNCXU2*BQmNO|S1Z~C7Q4%dnYhJ}*$Rqia9JNMp;*luTaBwfN4^$>fNn~!L zE!XXu?z!!u>B(Qd+C}87i#AOB#5ylP%thYB$$mqCBIZwka0XgXpja?ht0N%{<65Ig z?I$xhSKR6#rY2fq$BTnP5>qyIU2&V0iS2j6@h`ua!+_~@UP>t&K`OezS-Bha~uhG83qcfj1{D)WtQEbH;4&fu;-D%5zfNB`L<3f~j*nu$+u6YqjGta+W zOraan$E+%ig2qqNC!QfdWN)MjJwG7m#cOd^B1T1CZfW7 z;0tAVr*H8puKdAt^ELNR+irhwddiotb`d!qkJLR1cR0S{uXsmc6Oe1plH0&5bXy+` z@Zy$^WdkU@(>MNOEGlE?5svkZg691|PRx~WqB!qa>%WNXgJ1ET3;Y%Mn+R`t-?(ug zmW;u1r6K;<8|o#lL-4^8;{;T%<{;iiTlzLWdrdql|Ez!ZWb-$;X!i8O8pQ~h{hnGE zkdd`%LaRjSsU(;?xC?DHb^IbD;|eXT#wBR@;lvo%-I{J-!2dP~qscpuX6sK6xCH3r^AL1zpe<_VHe~%S}R`Y4iYcVT!Fhdl@<-9r2#0R=e zt;C-K2_Y?K5rw@0w1-C68Wsl%#}gxwY_88j51WRSJ4fmHcOHr*M{=jwM8{s12lf0FBa~V>xgXxCA6a|55xTg4WhV8;PFtCAAKbc&`ycNwwy{mXw;IM@Er~GIUzIo%+ryM+e^8NQHHaWGEFdaMmPynVyAzPH5DIp2X z=(?@t2xxNVX#(TMY&Hr%$HWaW{92ZOFpGVQ!JNh>q7YP{_Yq6VC%V=Sw!y>$=)uNAEw~cGCmXmK$-u6SPFJ zaTufa7-}|hE4rgqEaVFlr%&nbTlfo+q&X=wckwai&oQ}iFQbW1aJYc*Zy;ix%|SQ= zu&*yoBq%xbhQxV8D2TUV6(qi7YI_@FtjtBPE!hx;MEvCnC0C}SfR2t~#mK^oKTY|s z>1-4KO)PWhhWz7?7W`ThC&sdIp2Vb?A1vj72eg`MD0`kLA(px@0$OHq@$Wz(=zp|u zjspWa)qmTYe=MvU9nfCZ<)YWD2sK{z5xp*M4m1Nce()P?gN4^XV4e+J6;;~uD;LoO zX+8&Dx#&%UJ#t%9fNzCpaeT zZ~o%_xRv`O)03WYNL=i(KmLpZr%(KmpI2`$^_qj_xVB`Q^&yU9^A`if5w9Vl@Rh%? zMVc0|QeScsZH zf@FQgCr&l7MeNv9&DIJ-Tsi0krT4)~LmY&Sz0F*Z<`qg4P+D*9E7auCcn=r1@PR4WDH{y}OU-X6;y%usO@pmyP98JEn&nE1!cN z<}$@WKY_Gxl$s^EWF0g%4G0Hka=BV*SuDnsIC+UHC-ekl@lelk za2#SBzd2!PZgc544~9N-LE$)!`mEn99suIddJ~AO!kA;@Wx~O9e@VnqXE7lvbJtQc z$7VTX$pgOvv>P26EBV8LG1Oov9;|_dUoLHix5BArn(55ETa9EM$ zM=r~+R8m7Q?EmXOze5kdI_H_IT|`bf7q_gzN%jl4^@(B@mmU+No(wQ1S}@f``f!25 z{FAwJ&CLAey?ohynTx}SNoe+odHrQO^o;dmPd(7kJseNKqjW$1-n)F)f>!5bLgRS$ z`jLyIqG?&@Uk(f@+_3)#xqP@nILcYCo|_?RW4+mB%>eMQhCiH2HH%=U zmRp|_g21UmVxw;{X~eH!&~q-4aK(uNWke^m@swf>WkxJGOG2q!Rqst9SripZ0361e zC+`_I0;{Nu(PPxA<3?pCph@h_v8*e9^BZ!LMb^hy+Vd+h(tGvFA39+ayxT1-U#&a!T`ICR5*#@7539jiH-dO6%N7iL7=T+cQBn%FOi%*Os zaD*CG9<;WH#)DIUY@d@;tmv>eS;vrl#vKCh+=R3*pK=9k_|pk>;ZgX^CAPWKCKhq} zCbWaa(DVG^U;xlUG!&IHeA$irf(O(1HxNv!B@X+Hqg(8$5AXDG+47a3P97#;vUFA) z{C!{2Tv1fZM!o zSeK!knxK|LAG4lGu_{HNls2peS)}A?JI<(ihH3iBwvt6jEhXXv( zx?Z-4UV+?MNK?HK)z@9)5|}$2U+l!7;@HwhYQm0w(&E>~kav|JZ3>|55c83Wr!XZq zuJWLzlyiRxJH~IT2$S%aDI8=jBGebTW)CjJYA#MhHpZOH+M>PU;6VGtaz8Wkq;L$_ z;ssVl@h`v9C4WoeyYUBx8Q3n^@Mj=8!J#>{F811_M8L5*l?lzN#T%#L7kfb%h9G=N zk3Jh*LV<7)PRbsybTfa!r>E~EpfTflkhC~Y>~NGhVB@ zm6yTc5Wbm*rwWxi3=YgcKIVZdRS7c=|M;~%qRA%?O+hmUqKJId11M>|{^d_HV->6{ z|LpBsrayh_tr4xefBv^0GkyJQk1I|#6F(4`^Y{K`WbP@Gk@T-HgrW_@IXC#A?Y^!saB&CV_H1oXk)Y9 zSN=rPMm>k}gF}3mK;|`82tt}u8j(9)v94t3aU*#TD=Q)4H?cm^%9cjsmtg zhflh-y#i3_|1E!5NrVIIfq&V**=@;xUjyvW#9>Q2MQ-68&_bGsgk5985Y*v|w%Lf3 z_twXWGpcEyqqj&Dq`}G^Qjh%T?&ybC?<7!H$at!`(l31kk{$Ia!`tx^g<#46;H7`GQ z`np#g=a_U~*7EK0RZQyqk7DnC48+GqvI&1UvUxHo?!-gE8rU=u>BIiZ+pHt8O;_JP z-M-16^RROF$J5R~2ud>uN5*qZYS!-m=;$@;-WQQaJqvcW;qR(Lrl-u!LZkFo|Pn@}+wrW_b6{*gCxNFiIx-@tkOXf&7GXE{x+ zlN`{qsxedxBqagx=zlZ*484P#k=<8ovNyVuJaQ>ZNIei~*jERPWbKpqmX$7qcM^Kjk(#wodA>^jJeRE@u&@?3HT={Aa+Sm~)Z8xizaNh_9W zOeYX2En$l{<>Ie_4^B@$&7W3m?i+tfYCT$>kY!9B&ZPJkf#|z(;6K<~pIVSqj|W(b z!|`I{=4Twmn$s66=`o^8EJKuzYApV$vDdo2peVB;36aQ*OUo~g=(7A6?f?yksW>n; z8poncx$G#cn3b2pLlzn`oE>`pjLDXs5rzaXCVxwbYaxl##QD?4acy2I88)7>H?Q~p z^5*G7zqk5DjUx*33y#OX8C= zIlw@EjR{uv5RuE3pSX{pJ+>#F^aMFo^&i_H_uK(^viKD zNy*}QPKsA?s80bXSKN}3KQ185ULjpg#ceN*Za|P33lXQD*j~YOAe4BvGK#8idcw#jpOM`2ph4A-$l|0SUw0Pluew_=i3mgcsFGn2JK_S-$YRJb z2zK0okTFtGt=z5{LLw&4Q7o9i$|-Hr}4;RUTW&0tpc4 z^)p<#Qtm10hq3D~q{p(VHN|)zfP)WbO2+&-XE?`~1Nqjdc#OINH@}Vlviv&%t^izi z{$d6q&7NaXvf;#w&PgTgza zvm_!*B0WB}YS5LrI1u~$zqknxXqu*Hzlaxv^|x>Q-V^jh!wgGDoX3T%!U-apd{xwyu#-!%-pu};!@9u{*+tc9*2kS2fZLQ47R>^60{;CRc zO;k+{`dTS}nY+?fdu;x5Y+HZYlrIit!8yn1IP(6HNKk^BJ+SS8(zdgSJjED&`7ANR z*ttV$DyI$|DR7mU0ve{H#X(1brH9DgA(MG#9UBR=Yh%DwcsY=vbSbQK2~$ShqZKsO z=40sQ*n_w=(l3aN*%zt`o;f#O>=&X*JjgO1MUaI@csZtzrvz~c+B;TXa((#-SDXhsqybxoS6R?_#f5_3^V`E!TGn|K`1Let>i5pML)?Yfg zY(Zw6oPF0-DI6Q(#=k)p2tghmEq)RTu~`p~$e)nZJux3E{|evow*z}*!(LcI_4p(X~`qw8;&v`K}JQXX( z;@F5fZt_ojOJjk45YUtvQ50*{5_7plpXRaFKDg05oYfULi!IV^-Xe`iq3)*@(MDWap6+a}Qf4s0E9U;nWH3D0VAtW3h2y_$M$2_;6L z-Lsfbk|FyV6Q0dRdztenhvGhNYEzRTVMi72*N&)=LB zu)GlC{27-`ygbmnGb>%?AKVz8j2izM$%#M}ix|um*Ge46D8v~nOx0`j)66iipVXZi z8dL5ePXxqRsR@FLQOwjsMikEJ(+y^~VHutc5!D%~9Di zqT!C!>mV|6ZXRYu9zx=f{M-Jg34#@Jde<-9JbmK*cbeb|$8Y|D6Q?hK(NSdR_~^tj zKA4cSGiHSi7=7}GhW6zyUd^8~>FCoY=@rIO9C^ZhblU#_f*-p*^00Z?BYkH7lpP)O zKY9)w-e}cSAvlk0fxpDj7p7q`*P7F}$BL=~blzmB__QlLwVjX0`_hhI1lvb<>KBvP zHqXIIfJw9>!vOe-tQ=Zc9%4_7<7w0^Ly&Ol?TcR1`pHB(1C*E$DGF^JAcoaA^Bc`xD|I~DjZ#+g9;MDj7&I-}WapfGs@>3mkX++z;EDI9| zb*?+~@e||f6JL7T<4ZF3aU^j$@8Jt;8}iIQ#Z@L1gk8m-`Gl;+7qntjT=}6L|5FvU zusp%CYH(J|st9&i#8tax^zba`6Z!&NFJ9l_DA_ZVuP= zn$xuGi%O3jUFq44`Ewqg4&$SvOWXNZ{;Y#uc#H}Htrmd0MrBv|dxJ3y>ukL8A|78E z%U7HA8Zk#Uu8s2L8qqi97HF*MQCw;n4Ttau2LkEB{0+3Ej6}?wDcJ{kE5?w2e8rBK zwEvI2cZ>DETd%s--rd+zTB%fuVoL*fKp-NDhY$@YiAFED078fu5;akyiKnO-0YQue z;mRB0A%L6`jEP>E7=sW%2{%Yupot*eJphJsY)cQj-R*XK#(2gv=lAzt@9x_Mh$fwD z{l0U~=NZo!{~l`(105x4L)##oV*Ge0O@LS5`*P^FMiF>cKiRf;)TG^4M^X z&yT&TJF9 zXaCVt{I)nftv6^!$Be8syw>Hv|Bc`L#;^GJhkoAQ=J?0C;O7_o+dp2vO5}6;ri)0dH+%{Rf4tx9VF9I@*qvi+$I(Av)R3ig7bfc<%h!sQv{d! zbeX1NQE|rPE#!m(B1e#yOM^UX#Ct$c7zBQRp12t%1YwMgPcFj6N0T<7qN@5wR6#d; zSW_K`k=C_6)<0=O@DOb3=BiM`_CFeK=4z-k)*;@ak`W`UJ(zUQYAQW(NiHjIb?HM4 zKBN63Q*3YiL4myUifMc%j<5H%9Hi=RVIsJ7kqDku zM(ao1ll>RKEW&W++H=7M9_@2PeUA7)=EF33Z^#X%`lP3ARs+Mom$_(N&4FmIk2N3w zpM$f9Ay6noSmQB>i=25{C+&LaMG zwaIwFk88&y6S(+i5uRz~4(lh(`2oQnc>JaQ*IG6^dMfMrqcHNvoAMj;YTk3W8#C#D z?vugdQO^EvTgsJ-8b6mQwueH&6qiPKH;MT_(L<6l3yPGOkIoAUxe003Aq%I*IGO-v z!!CC6te{{wZ&pL6#>iZtFdu4y`-UpmwKX0FazLqz1!F^(LcA&1hl4&Cdz)g*5&LIv zNNR?)LGz#iZN_ZaI~amouWI{_7uSJhOIe80o+FByKo^p1S%NnY4+@D^4gpZG-j4qi zB|+XFR8B46av5t-a3TYjny^;DCBtVf(Zv(E!D`iU?qzbuerSKRR3(^u1><#?i1C5u zJLM~F6#iw@H5?37Gsi1|_%`3w|3lP@Xihy;D;_W2qctBwc*V?UFY!N`%Mnbj@u_0h z*pkXLT?=D{xuoEb&%Nb3}PKF`yTp)oi_F z%9;Cb$Nt5Ozx&sJ)7#hl{=XW!k9p;vTz=8N_m%pmm;5rr_J;!uSPP?Fvr6*vuYfU1 za#rLA!xV?qWyj32C4w0B_y1!b@3#-{Ppq4O=jCXO)5^yzIDvX@JYNL%`M(JiJ^s^w z-X(JuRKkU38)L_YPY1~cV_erJ#*3m(_~$?Z#T)@sxx5M zn2F*&5Z1{qXMl;K^YBxdR!u$B?h6?TD4cOWtb|Ba)w4w=p~>X{?YFE17*Q&{xxAS- zm-~02(XJMuBNKM-7eN&W1;>?WruX`fe_hK{ux9#wgdw;|MEzd>=DML;gWHGI8a(vq zdSjc^fr%Hq^E%^5CvMyjq-Lbwy5utb;gFiNR zp1U`M>EFJvf|q)jx%ke@*M)Gc=?pooGJOBK!s^apZW3rxYVE3p8$66 zhoBC3&hvXY;27nPi9u;jZRT>BA+2s`SbZibhvD)MA6obOcY&=wfmB1$|BN@bOd*vu z2I)BVZ^3mTJm(8aYRr{vZ-Qaql7kcMx&}Ac`>^uH0oXC#&S{v!H9+i97|r%VXyTqn zqc^NM%MW%Rb`#WbxVy^%L(DKROI_j0P`}!MU7cpGSY%W*-r9fVQ#*Z_TzvGY(HvoW zQy^!Se4>jyeyAS~&+EDMA-}zVD{Ol*EcYK3hAnEQ>-JB)p6_*I)=?q*YJw{ljna{G zFx&g|sHwPO=EMgdFX5Z}q55jCCIF3S#4qa`7sTl$4#u;R(JyMa0$Luj<v z#C_i0<7fJBkESTlHQB^V@Hn#)2c?DeN8A0c5(Go0JYMQ?JqvQymw)0@p6J|Vk&6 zuQ#7RQsl=|yuRaKwc%AY1aPV5XZ^EZ_Vy#baDOU-#7B-#f9-eXoO{-nW!JR5gmcM5O~iB5(KCpNY6I@DN5m%D`Iap#+i|!I)fa; zel+hxLhz-TU^tWGDMl{;6{da$c!sq`-dtMS|JDEcA0BYQPQd+_aq7*T6G?xAC2rxc zT1)*i!|Bm!_ILmH+BB`vrk?KEe6{Hfxba9Ft(mQ_wLQ_llcb*n;Hm*-p8qjTV21g$ zzC909bd|H#7%(SmQRPTkm|z%LXIXDId)cjkuG4}1@ZS^Roc3gPtvldzo8#9S z9HB&7@;v`}Xct>v4xCzDmN2>2_X{F>$CM6&x{P~Yf2}tH89=V}MRFCqs*Tfr=9}YQ zkL;ODF?>T;{HowT1~Ru=_ zUkQkCFNV|i244G0ZmM01hn(%419kO3K=RaswdXPFb^Xt5+|*kWj%(cgkAX-3K;g|j zA(cZ}uIdoi`U9*9l%NlW%j;kGtG6tx6=EM#{fA*lAVlq)6Kg)L)tPfrz2&vOY=o`c zlW^Ir(Po(UswAFs#lHZ?>7sT05GFy&JyMg9<9Q&29&g|Gy`Ovg?|#iUyzwV)e=y?< zKJ|&WfALp-_1jPV$N4|C;lrDknWcKp*beMtAXhG`ZbwP`*KFK0`4j(vFMj){e`$VO z@W=7^HNP+4K+YMd3PN4-{G|UxxaO>y3-t2uKqx-AH_n~@dk)-z(|{I(Zn5xv)qRTwj6UPLUF{pZZrk{tpCgKH^_-*S-+6zQnFUQI)L8Pyaw# zJ&3bk9x}mgy$ITWvbj!)HuLao15x41i?GlCHb4-9>ZZgMXUR%0`_Gc|$cU!o5OFA+kXg*ODpl|a7{RyOV2Mlim8b8?x(GKV!L?+EOL8( z_TRca9uc(O<$8!#E0(r@Q``U!78ReQm!|+CdqiLDDnOT>$6=}FT z%{I-U7m15_JrP#Kj&If}F+n%Rfu5-2sU|4*BnFT^wRec%4ClkpsIe}-2uze3dvTt9 z{14ksRQd}6d42FOuF*7O>?OaoHZ6h={g2P9K1I)XI1`(L%Q=jKz0?f0R!9JfiOomf zM5$SVcr*0<{y~N}HF+~VeI^5NFcYvQFdEDbWj10ixhHHW zd9VMl&V3WZ-G605W`g)!cB(b@8PPgIMttDkU{d1p$eB`ZtsOh~K@ymQ$`mU@X<6bXXQvI^7<|M?@saY$ zdor#G(zO#^4ysY=m*85Tcx%$1up)Cw=c%{GteKW|Vl+|zXHE~Y7~b*adM;?_bAyu)#{97+=Sn2 z1cob1=d>Jb@1K>7W#v&{Agw_aBXhmrtv%)1;bIs~eeUh=`G@9`9d-uYy8pQ`>k((;hR6>UeZ5Wr~kR_qob5P_24pd4yj7s)hgVxQl6G5 zeZBt*59Q%%|E=%r>cA+=eH`Lqn%@5t-H9;H!MgTgq=G(7r_GwCkzqUWF5%5u*WovW zF0Zi<`4sZTwvKTV6n;I-Iy4+&$?~QOa1J+A$DW{uUw-cX0ao$&N3a?tVBZI^eG(fF zWY-trJTM?gK-aYOofw&y{=r&)Jj8zq?*0ReQu@Ezw8Bsfk zGN}P3Z=LCH)9AGU${zi{@@5~1ypK%Fp|bFj-_J{AlA6NNy8Hj$@BaR`-~1~+{r2fU z{_*~BQ#5n^r+&#-y!~sx@h850)ld0i+ZeY>YK1UHN18>v5Dt>qmwx$=c>DSP=HL1D zFa6rDe*2MMqTfgVF~?`W=lkD&&+nK&>09EdE&Ix#4JMcM1^g~`9f6x*&J19&`&=%; zl{88O3i<1Ai?;gNKb{sM!>m8?i+=k5q?H*mO(#4oEaM4ioo3ylJe6cr{rl22E<5Ev(>{l{2yc>d9Kd|O9(M15+v~h!n`SbPv^f&%# zN~2WkpZMs%c9c|Zn#*;0tv9Cq-2_%EH3>%0tjd#h3pMWnvIYn?@oME^o4i0i3q#Kr zWM!xP^Y?$w3MU_qoYTYu_J&jGn3c|3)AwCjc7tj);@>GRfV-Oln!2?KBMVa;V8Fh&N)diuZ5YCS7&2xp;{dz(H* zuraUi`1p8eA=~D>?nSJj8ZE^mnuXTelW;4u<>{Xil3@9J8vp9A|4DCO{Zl{Q5AKz! z^Vj|1Z+rXS^*2Sn{*Qf!{=NwM&3tGS^}pjMe$m^{{CPk6?Pve|FaOv->rqDL-}5`a zw2$(d@i1dtUTwE23EEavi3!UTaS zjz8|7p6lUtnR&sg;(P;vK4v21?FW~TMT(@ccr_`Jm|%{?U`R2JgX~qf29;}#c}gXf z)jfdJ`&wY?>*;s_6by*FzhHAfrpDi(z0l>xKm3!>XiG?>+EM3vDTY3-!}8j_jmo3G zn;x$8T#n`t*S|S88eFyGpNgy+@7(h@Cj#$`t^UCOcpB(REq)}0pTAU5$4-UREjiimYp z6Vle&rJ147^Z%TDD?|o7M_u{<=Xrl$k;CutQ_|C&PB?5Xi+CUG+ zJ69iGp?$+0PEHK{LeYlbdtlS(JN6H`litF7dCGB}Z!|~!_@$1K2e#|X=iJ7B11fU= zt}C_}_8~nM+v{MBNbF#9!(Hw2aL~~0tqh!Z&Xk#cJncF-IGXH9PDTa!BD{>VIJhHG z?9m+N7W@9=C)H#%R=&NS@Fw@HUi-fY29xUHSpUb9Gspg%Tke0Jt??z`KCf#vWFQ&jy|tPz)|zDG`FR$G6cgHl)f+QqWxyk74(VcwLSn6f3?x{u&uNI&fxcd=MHGS?;!t9;Zm0A~w~U_mH4aNzWr@p*|*=q#Atu zhdZn(Q{hL)-P_zyCxo@0&t2}yZ$`s5+vl`&T)4vZ3lj1J=@F2b%hR=Ag)>67{Brf^ z*gvR(q2l-Zxi%49(+@_Q>!X`)6YhHlBC#gria1hz&=SHpSHV}`#E(t=Bx#?PKJ^s1 zyi7U1M%M#74=;Ez6V={c(*cYPuObguA8v=hl%4M#?06ES48A)mHcvoLLOpZpr@FjQ zPNIiUg3onw7|YS5!%`7wtmG72VZ+)uTwmqabp-q-W^qyVl22v)NFq<)Z#%sL&AHFE zAp9NQRk>f2p52RvgMQyCj)ii#418}C+nHCHT?w#5y9hoGziq}SAd%FD_+TWcWEV^{ z*=DIeN(WtDs#pB=AEo!*e1{Qpwq3}z8etJLT;OC^WC^=r2ftyyWrjQLqD+sIIrUBd zaP@m}wc_`|a$@Ck7rG6x2D8j&o$#s#5Yz8#eHM z{uxIi=a7DKV_C&dVd{g|>de~AuQG&603)Bqr&D^ITHm8IH$;=1C)j z1bGxt8%NZsw4ADx?}?j9nu?#=F@GAIb*~O@7WVu$p$`xe9%;Q$HnzzEV%^Ps^5b80hXB zgnu`1a=obYuPWF#U{NGXK4gd>4B|1^z({IuAF#kjf!R^7Vx14N3MM&*!y^4?9nBRdw&UW)&q$&)Xh8 zvj$;2VHWz8@Z7#xUXW5_T6IY$G zND)0Tw2}5TlA?2Mq20X0zR?0>b|fWVLZA!+a1P0CpF}>dPIB<#y_ET@k=3iy-;l<8 zHrO{J59|C|4Ll5rj1zSp-^$Lr(H1k$D7_|!3qzA?+HawTAn9ma@%Gef`xVc1y87e& zOm_Rf#-Vd0f_Q7q0}h;s=gLLC#Vf;zlVLNTQ)^(}=%)_B zXr(8Tdn1qvU|I59;oxNu^t=Lk7UO-lXm^vadVTpF`mz1>mu?iFmykSFee~ruZ(&TI zisR`1k)(M^dZ$sP9D6~kx1ArpGZlt?8*Ye|d={eOPkP=_&=7@1$vwwwQ>!Ycrm<-2 z7MSs0$5$z+jEJzOS$!-YiH!>dV=2$5Imrn4_;!l|4HbPXgJdE)bq|6buii&w4%S-j zmku0+p8qt33doj}!0?mlQ>9uNcM>RU_=yxt7gC0_TPBJ{>z~)YWaRH2r!=?0Qz5Td z%aInxPI4L2nHq-EM!68(-{foxV+;KdUVZqiZHs2tU@x`_We#6{e35)<7td3BHb1F( zBGFLs!Zbek*g&X9MQ~tk7NGv2Db)!0yMaf0PN*u2PgC{R-<5jujV;r3U9?{oKw0_~ zPBW4!BXpovMVXgQ586YV_y4p9*v(O|1S@OG^Glv9Fx_0m(X{G2MJI5=Xj0Q&?q0qX zJ#BT$gaUTmiRpIvDMCv?6?N$ay{g|e0Q+<=8cadIAcd^GGkpi|LH8i&`e8k~#Ln~1 z7R6C_X#hE`cKc1N`)2HhZOTL#p5P=5Pmc8Ba=(RpkAM45claBHlf8na$8T0VH@gNcNNPRtO%)kD8$A6lWIyS6x zcNx~3u*g*aN0h*!Gsqb>J`85h99%(9%;ddbABysHKx2*i2;%B zWSSv}NtJ|A6WVEZ%fr98qy=Z4Ybi(EYg(RqK?;Cap@ORbN|S0H`#eRDTcO07l7iA~ zvE+*5`6?5s#U(y(VQLp<1A=0Th1mQQ8EN2GSvYFm@BJJ1xZKx%K z3vzVh6BFRE?tgO7Y(21D?;GLUKV$Ch=~(r>U?r@-jpz98Slk1+|1_R!Tj^Dm+Xz=3 zV57W2&fWGn0jMJ$|D3BEEMr!4#WaQs!W-$Xtx=ixYc5McgUh1+v!3s56tiL5*&Lis zM8V@<{oub6*VJ4DA`RIlB0T$ozV4Z_1c^)JDf|5HTv68%4~F||1jm7`I{DLOoViC5 z>YUtHkdn9!u-aXb##?4^Q*U3+66rv);HAYG{fs<3r!b^AxPApk=A}W9pK%;@t3C=; zagEfNN^rdK;Il-0p)rSgmd@_fhD8yoW~9W-3;{0sLriNZoa)mnGs&0xcd!WS)mDtL z8tUnPZ(PU1oKX~?u=pf0+e4FDh1k6(0<4g~(iSNUy(Wv>5K+c`OL?JYyswa=kB`vh zIJP>X|MXliqIpnn#muzaya$qe_75{}OeK#F!68=%P33R)5s%6IiRX8PUL~-l+z@FKQXFNC}Jn>mrMR_AW;mW_jKU&zCXMvA5ZN_C!1ew`!RT_(|BEw-~O%(rK1$+?SMT zEESfa5f1Rs%me}FAHeM^2TIp`S@7;ws9_6^kX;@>mLHbAksW4(S7dIi_brDjj}xmn zc!TJ;k%jl43$Ki|qPMUkS$8qxyD@amLayIcioA48@{nEE%Frf)J@4E1(><6R&kk8u#JP(EOZ|+31wxdvtyK8N3~S$hFIbhi4SjisBhqPg@n`C8d->0E)+&RHKo{t7n@nkMSBQq0n zkBOEfRUI}>8wqT;Ycd|pi>?`_*}F_MGsb%B@$=odquJ)4*!1+pwvkbziveCucBQZ6 zc449JAlE%Y_v6bbI@0~>M6Op_PkuD7x2O3$$ucGh2~PRV;xL+n$~@0NuOhi%I#ag$ zPU@t>XfN*6Y+V4^BX;Qu^CXLsW5+Pt(x1fj(v!%6N*WF}OVUu{3goJ=8rN{J7FNC@ zDk>m*qxcXpf^+RDY@=C(eoYh3mHE*C*&o>V{(CBcp@E!GjA)|{e<;azy)qoNlZ*!q`! z;nas$eWy^1=ZGdTRf~R>QtN48DQQZUd*@24WDt7~8m-wj=*n(zCuz=pVA~PH;jncR zv*SZoiPALv$(Y>st7CAIkhDxm)6!0Mx|j;kNjBdey=t9DzkG(vmU(sfmgW8;5B#cj z$s)1JF4e?+!CK%_JusH!vV~V=K9TUP$LDkA+)-Iwo{H><>imyt)dMsJu7m8fqcAIBfo2OOm>4X?(q*&{)OqbjelgbJ=93B=pJTY93K?%E<5z1~>CazW@x5)h)xB_J zGFCEfIhjfczN|!wCy8s`D-47JcC4Ox+fSvv1uLnP`dI|u zt@!*W3-Ay9O-W58y-UN)wE;?q(H{i-ax`?c|2B zd@OBWpYw0sZgGeh%~46cliqlvLzk;D{On?n$H+uU6g!^wmvBtd2c}I&&OE)TyV22k z?(eYgx0w%S31RC%O+C7bY^Q*0ZJ1cpsI;hFi=YY98Ou?(OH*jN=VlN1hL*9468UW8 z6w{;uk&rgUikt{qFN28jw3^Cp)WP=~!X_=$RDTU=qaO?;TZKTkZBGLsUGEehKW z>qElHbIc+wCiGJ)IKM@E%xHXvVHK2P4bf21qiDqlxl%U5b=3{c=#&go;P z<%2YbM(l{M3E0LK+N87t+2wYSkZ<>8EWJ%3?X?xz`k5n{ui|9ecF5d_w>5#Gz`CSZ zJCaSwKh~?-aGF^Qum>jHr6h_hhZ+Cyt7uU$cYRU~9b;DarW3NM`b6xOqB=pXO?O&2 z`OeOxS8Q<_!6dvt@y+=;$J^FXBJsD6t;gQCq?qdoOsH!_d~CG$VK-GG)72wCXgU>t zmr_Ev%?7pag$BRDRt!8JV}GkBUx+_BjUcPV$WVpikx;Zi$`IrM)+#BjRM8Asf-_;0 ze2-zrt7kZDw4XJJf!^Sq8@;c|+_Wlh{#qGzf8i1z*Rq8^AUh*V}Xq} zHOyD3XTh_lY8%|cqe|35wC=evbnXtP&q)#KxIUc5+PF@)V-=LmzSoU+@)Tl$?sMkS zU6x!MdtLa+Us1#wo!italX+unk^WMdhnZhDIvb)pUMm9_a%`wM1$Ky?R+JqcXI5Pn z*D0lix9uGr))myL%xucBw(#zd%wnrR&uyjt6Q%p8b>5{XT{(yp?KZ zYVdq*V=X~#O_fK&p7}{ z<#zoUjT zvI^Il3Uf9m2JNLk^ka|esM4J2&n1Z11^J<}oTdL{nPy;jpPkbw#1PS0-&Pa1@z-M6 zhJptAZjVp!JbgZ}d(MX{=5onvUtis6FE?WK8dR;L(jpQ4{A=HyWMfarCmW;yJVu(U zaD`XHe<1R3^=<6V(o#tp6HGrD0?fa9UOTPico!3bn#U{&KNTr3xI1p_1dM=PkU4Ya z51)xBV|@sig!F14Q#m>D}6fy?CHBD9Y!xtIM| zL7%*B?ovAfO?He@HBJb3%cC*H5)synF@OgVw?uT3;KGeO5TmAuh!-bKY%Km4c28#f z(;eaAii%oaR%|XCaeyXfLrIE_v$MA8u_B?boR}hnSQ%(0|9A|*fD85E!_e~~ zjHdCQm&fV4RI5XO;Q-bbhskh)HQJ;@_SNr(#9G|lPtuv-Iig9tnLjmsJ`C^^KCc z=FHjlv^#Gnea5tzgfQtzpMEf>erh^U`AAN8rY&B@hJRl6!baS9un&4#2$cBmQ@>bk z$1YF1DCN5*je1gPgb`A)zGpyk|0T*#Wvpgl#bz#8p=vTB<$NKCVy0P`CNu_ezpE}j z*)Uin`Jm^Dd6_F!8F6)-l~oAke2zwWq-nDdtp5plXfvx-Sb3Sgh#g5|kOX7mgvf@z zWl2#ny~ehe$}4+PS_s7|T#6?^t{Aj4}W7#?{w#{Hlukr4>0zvm6F zqH0=GJy+2sO#9sDv1u$gvMIdwfZ&5aNY8n)8VNkpZ|YXYYcbHXz&{uckRGaQHc!$#SXEsi z-7@8T&z7_VzUS;(HwIjI!uSrhRdYL=^p-}J5&KZmsJXv`ia^W2$vGghkj2+0-6~uK zLPP=$Qw2-Bxub-oI0`agsJ_{JsG5(HnFEWb>lh%WW1#y;$H*H|>vPpRN58LmA^NvQ zdbn)KJhvd_UX4}mFv=%9AQx}#m-9~FeSU3X0q^iKB~%_zyv#T5`#sW(UC{y4$6|z6 zP5*Y~%!3m8(;l4p8=_MDnJ5Z}Ter;|xgk;i;RM4}BtFq-ld@4(@UXO3wgUYG0cT3O zP72x3o`tx5&SB;}uSnP)8>+C$!4~(9CdI!*bc_eX2%(bP`Iq6H! z%y4K$DPQEPmuGX;6&*AfuF)BJ35Wnek!Pts$tRz8C46`%G&1T6fw+3;&O%<6#Aad2 z`&JqS_Xt}5hYs9P((iqP?w_z!=_)JKu<3gvPgC?Nj37-7mt*N(oMi%slWT?r6)m}& zsh#zhbrmCZ%}#RtJ2UO5(&Vi&AKdq|;|>3c4Oq{5*`yFL51nfVRn=|MuX5YU9gqeU zv;KR3`2sTSS5OpAV>>Fq-sTDA%kcAic-m8qQt)C)t+~4H_7_cJca^4T!X@N@XjmEb z|9`N+_O|k2+miZQH9Kigs1|k!k3h&*E z0EVQEg~jtLz1)TpZN3BZ(LQTq_&jHj#~lnkR9k+Erif(edCY&a5@z?B-PQ(t}^Wy1sUFL?<$GSdzB@%KpqJh7A>~MsTKM`oIrXsjb-jP zFd`Ue(kRU5H52n#-%;vW+^i|Tk-bHi&G0^Unnk~4R%E&V{pB&XfGn@IokpC8#lb2m zp+|xOe=&yA^FyS;tJI!4+@x6bJeTh=s>kIF?!xNjAw@=a<_~mUA=nD>)B4Mg$QUl} zJs7!q82A5+64UZTOz#>I@vym@DnYib}e0ji^Q3f#kd`Q?Tspku^epW@PQ zFLd-ho=|}s_*9Z$(2WwBa961o6C@Yaez_{?vm+sla}qPe#}`|9@~^ei|ND|9^CT$*=_CsckI4eeabLF*`#WcKm|2 z_=(?G=X^IQ>o11a*GHuWN^2r~!hF%812(d&zAM>UuxoW2GXyR7W(V)A zq{s^R;)N1MHkLoBwJba_#Dz0|++ZIO4N4$SBYtq_XWp`!_CA*Pkn!3kTPdT+e#P-z z>9E~e=$iXyU>UiYiNet17b80aH8no*3IsZhYm#RRh0FuQsfD#_{xy$jEmF7(KU-<0 z3F{YGDe;i({Z3%hzOlL$A>D7i1K|pP8gUMf`!IcD4hVuFvo6SVts_fsX;O@OQSY6n zvSP&kj#t80yho9;m!?@J*u0lqesdU4H@gk!4Xj#;TX`nZ%}F?kovflYF}2dh#y%N? zCaOj@L#gmC&cY~;J$!cpr=6cpUpxhJ8)-v^nItB{pgS#(dH@S$oIlyT%ZUohshc0I z&w|88iI>cO@ZEgoIN9BKNDPcHGz&i&j~KIh>i_j|mrSc5GD~d5z450tVlEzYl=ob8 zcaqd0!;E{v+1X`)@AYv4rovCOQsvSYf1$T#iEns?`A)QM!Gkd*w z8g=uSB*g`@Olfz(NA^_v5jA5Lg-1pe9~{a}V81h#@i(YB3$OQui&b%Oe~wa26N%8h zG0zgdNNMdXo!OxeH>11a^Y@yAzR@H4EOD<^u-qj;kb$yaP!Ma8D9ck3sF4lIpuSiO zKl~lS(P<9snXc%vH;oO=Fghcq+E0;%jRbYEA%`7uD+o#TYs&OP(`7Df7n`=U_f^wl zVqH6N{lT}iXI0TIV1hah!X!awN-la3HE2l4VKanYAdA0%e6`XxY}vWY4zT{_i4nbK zZ;9{`o@dKX-7lc(i2rz^LrGDao&^uTN~{h=7^ zYw*Rb8;V=(cC2+z$&UqHTJVDI*RG;S?s@)^JAbR<(4mo;B%yNX?s{f}y*S^_=BxB4 z%FjOCQSx0LPZw)f81eoRo${Zu+Q)B2WVZBTzG-Gt16Ldjb3eK(R5oEQ58siH$jhi+ z4G^G_L8J_-J_E-KgZ`Z)(odkC~6NUK!y z(7MyH3aYT~JH3jLb_!3?C^5z7V|(j4ua8#WTo5F%tpWkzvLPO9HujBfQBq%{uMs>_|TjEd8VHy}IJJpVYm%-N{Gry)} zbscUhrhRflDLW)M4lSgm?sAWi+l^An-Vd_(!Hd2v`%Ce%f=s-t8$^1 zYfPkE!$WX)Z8(Z6__dt`e<6F`ke;F`(W*iVRj#AiU459~Zt}6?v2M>*M$bfm|cl-6{43r{($L9+~+H#|p|`wZ<97 z5rQ3xHrTqAE*E^t}Fkm z<7rUF^;JN*gBt*P^U|@ne$h$XCPw=LoffaUZj;RYNqUTGfuSR}UjZ;g9PZ*SZ2af> zS48$n2^^u{*aal_$lhIf<~{F6UrOGXG^?vPkpgTfhq&8n*L0msKT%_%X{0T4z%BKY zQjWiZa5KgUzgI){>WR8If^K)zHWNGiuVe4f-LlWbA7;<@Hm@}@j>w~;&T)F#)`$p- zL(e|x4)uRraPGNG53vxlFb5&|=NX)R7;Jk6L5FJgO0g&fz8bVUZ!WGT5VCK4s1aR6 z_`9Nw%~=G+3hf?fRHksCW>Es-r`;{RlfmwqTo_1(hc%|ICF1$=8=c?)k^F1FPx_vm zk0Cy45-n}BcgY2nRx_KpGmAiaH*vbl&4@K7)$hXh+KyKc++vPdoDw- zR=9*E98bhz$67vI#pihL`nwrqO0aHzxA_J09(t>#&{l?MZ$Z+PQ3^^MQR1)n^P%EW zui9^Dy-FnAN@}IXjNDn$Gu*+lW2j$aBC`e8_dFNZp5{GnR+E)dlH+q+UX8?2L#fX! z-O1e!u9-&vSy^r`y&apQ!ho|(Apgny?e%U-HoYXc92rzB!8K;Wp>MD zHt~Z&(Cnm_wA@PYj(_%yg}JVvD)c~amEPM^%gOBXjA0c1xnl}iucG0ODA3;Q_x2om z`ERm~-MSz5?ay9~+hb8-zJDX0P8%0?NDK}u8?FydpfD9Xj+)A2LodLF-M6933u&x}$tOH*k}HmA$9GQ=-y5ty8yjkI-LMJ?W1S?w@wg zAX%b*dMkJG9{9hKnfOkxH3w#N63an6{pyNriaS_Jz$^v@)&#Y|lGDCksf&o{m5tJz zE3=0&QyST433#|rZ{(b;wOH1>WNfy(LS}~kQ&Z~wGqbaWd#&F zXs5!?#r?$HsT&e0YSvSP$6QB4JHC&4$T8iXAQWAyLxPawwITYwqkUh^2&a}5K?+@Bt7PIiQA;n`rucq~^WWo3weD}F@v>idAo^)&#d%v62?@JgZ zsJib`5^)5w$iuUQXohAybqi2nO|S&Zf=Lm zeeuDbq^Oq(>s-+w0||>~-EB$A;6Ub7Rj5S)v_*|!AIaZLfJY7#c7eLp!Bi`>vKx&0 zkRWoN)c^`0i8;T^zk;jd!Lbj#dTf}szze%l!N9_D(HHP&;KU@0B_B;!j{?4pCcLJ# zK#FMRY675=H~k&|ckp@bUw1{KptP~y)+2?$+vK-jZBnGawy)Sc#|j+(rsOvMACupz zDHw&7f`h=0_CA)xFb`tLoADBWA|MNnVMfv7Th$wiK)OJ_KhzfMCvm`xb|D z`{Zo2(^}u8Y8AcqN(!H%Pqb|#_J_QiWGnQrS)IE!C&cAtYWF}C_b4oAJ@;MPmo7p> z^v9nhZmss8qg2|l9~AQVxy9TTSScB_NiH`vM|but;t^*{I@J>W`opi0`$)FB8Cv@% zWQO1ipIohlfs!T#Z@-u|5<~i0r#|oM(lQ2l2$oLzx+3NegF;A5;>uXJa>V4$={YOTFEo%vfxzE1HcCoKs zR2H#z8B^7EZysFu-wt0wIs)DK9V-triOl2P5qUtpL=ZCCunsbV^goxRT9w}ugta5B zlnoL!%LFl-Dhr|fR2g5IsmD_#$Z+c8)>=LS)9;10;~m#;oqfE&ygf?7#jINYI4g#W zAE?j9_PpX+$p8G*Y)eZ;-oiPRUar+8DvL#4@ALwM5$J?p@O6#@doGXH3oG&W@N|JF zkFwpvtoTKD;{!glo@ejptL7Wqx4HgQ23pt7j#DfXW#*h7=id;0jyY^E)Lxs%*Zw`- zCVlrrFuJ_IO~Bg_A0c`lGMuf5Ay1RmeR=@>5o$~+5F8>g)H83H5rJfIEg~KHpUFM~ zS2Z z&Ad%6Fr$Ze)2!S>aT-c%R`1~DyfXGzP#37X!d&1uOl1{1A_6eb92|Z97y4ZFIGxji zNnEbbHOdl{Ggj|XfhJtN!-OZa-(l*|y)TaxJ}s@ZVnKcoLzR-eu`dP94|P05lqH(q z(G#j|%Z7ea!BSiiomwrJu*Ui&ehb|mk@z2%moZ&uYG37w{b)rX|FC)bSfWqPzPpOr zf-Vy&x7?saCnhIQ$ZvkU|K0D~@185&7Js{2o^MsD5+~3v$puJjK68pUWYg2`3jXV# zpalL`wu(N@2~S*N%FHK1T^BQGw^{_mp1nL?wqSw^^mno*pG~h@BI8o{(n4o$j9-$g z_g-HZITZ=|C5yjGhrM`mdDPzU#EE74>%iBd0=hst;ZPQlN_FX3&ok1zmo&#Ctw@@o zI zFaQ(K?bDjup(?+QS#i{F8l~W3TDxMzWX z@-)77s%DB-1i#(wo$NPzs;|MenTyV^4$x$Or+b4d@j@FK4}M+bY_P4HiP;@@2J0LdpOxWC$tmNfA$MK&-CsA$9ox$wl(`wcXfH7Su_nM-7j91U0oVD zO`E?s2=7~0CBa-Z=P%VM@cnXv4lY=3f(=DFw?BTq6WoP>G8Fz1MONQ_FJg?*irjyQ z0sBNlYvqczUe-_SNn#s)rCaR?BR@W~{Y9{KBt5e^Ct`$kykY5g{s^~En5)InpgO}R zean;a;n_`SyoD*5f_98X^KOY;E}jEhy_a7A?dW2U~qw zbP{)=-9Gqg)tbSFU|Y($&M@sO(d}j@Dv zyK-=(<)fef%*ej~4Khw=HEb`#4O{lxRN`TZFD}(sXQ-UPN{izjTab49jS<@M7Wy>Z#Q}mPP+(8a6ijr-#3MU_WBm^!M(w#mLG#p)c*Yp|#LxD@qvrH&7`_M6MgA?keRVqXmyA^Yz$| z(F37~3rL>!C{7mT0X6dnqcO=J`jA$XYNh7gF)@kgRzCCPg=Ft58>uD3+o%7CKmitC z9B(zt5}3nm&*(EBJn{L)mrJ$(+E{MN+K~CGH8SI9vS<+P!RNoXskvm|#Kcb#CJ}W% z3;*-PPqssbBtpy!(v?1Rs103zQjXsmN6LfXvvWY z5cD{FazJPCzkQm=PhK+aWJg1+UwKuO$2fYg8Q`r?$v><@f`$==NWRD+(_?8I_A_7C zr=VHhjjTFzKew&c*ToInsR0<30bAW^3oVwPjZYP8B>X$I=Gcg8e~MUeIpe?5?qrI0 z;&3t+c~Oq5T#Mqv2q?6|8o-G#`6Q~uhN*-!|{+V!W2Q!KFqRpAE`f56vWC?MVc4KVZPC-cP zmO+5{6G=Vz%_-IG(UDipn56msxq0^b-KLkw75}=J7D;euiO<;C@Gwe92TVQkn(C^x z8pbs^in=;C(X~o1s2al=OMVP7|L2Z=@!K4sxedUfkgfQA6B4!lJ`AOE@!{H$`H@bc z4i85$5e|6~_f2nm%#-4HK5RqC7mIU_zR)@Ds&>2|a5DqOwZUT-Z~v@mBq!~XMVvv# zK-vsGj@(N4fi5_@@Mm&rO~4#A8hT+Vi#29ZA|){ja;hyBK@2u6>gcEkdqatcataU zLg(`Qz5(xLVxYGC_A5hu)`K`r^elgvjp`e~Kt+rs?PyQC~83 z{(j!CzgEgO`^}#%t(+=94+_)$u^N5(6d7NJpObs0;rs+RzA_*Z(5MGaAI<<L<07 z0MoSqQtE5(jRd;b|2YDH$ev`OGd&ol{b$vH$=_YJ`IDS$v_H+i@NV1 zK5hTDcH$i6oUDBCWXOPN;!(ml)}4`cQVk7H!2|M0=zi`ma+(kDkwJohZ9H}zd*g{| zbh9AZ=uxYTOlx)a13W0u73T9-VN_B7ErW(x}CuBECmIyS`nR5P=ECia!7 zGlBhI@kQPcsT$u^^_VWlW)O$)r$*|xsJF4Z7Rql7?`BuN1{S%B!?)^6|H?iT?!arD zMbb(8{GY$|Mt|y;RC#9ONii|`oJwffg}&!n>OA8tLGaT*A4nI@%PcO%eEs8b{RI!GiP$Y!Lp=m$(6VSiw2H6@} z*EvXjt70GTBYIhYA(Qxs*4>ePR&^vDxXt6`5%$zN9GzIvd;D$LaQZfdBmzf zs~X>~r;3J2tu@9N&wArOd}4_&#vsWA%X)h~@(9=RJEMbnVXMEBfth9lt};a zWlGel<+&YvhGx0ZOC}jDuP>k~@9N3kc7EpKfPh2|mD0B#;=DNO|2nX7)Q*nD9nLaL ze0>l|$<`XFvkbZe7g_2$Pq*f`ZNtDc{3m0SN=jNHtXR3l<;Sw!eDcCXpBKjtSBtln zM|L+Oj96V(o^*FOoIE5Ao)gv{6nsV%Gdo^Eubw(5q?G%T*IYO;m*1PWm_*WpisNyJ zSNr}*Q~h3@B^sALNa-lhQUxsVbn(je`hsg3iv@hSXW{nzU&$n1()_-Z{8T};^l&1^ z-d*^pczMHLdvU?_9YzBe?u4!;42JcO3`X0pe9htwK&;-FPeamlJXdSp!FwWpg+2lSql%DVz7i#FF9ZILod#U zW}z7sgToo@tS3?K95DCOD^Zd@KFZd8ob*Oa&w71n!#?%Wn$1zV@9+ZP(jwgH-ivD^ zf7ShQlnB$(kVP|qK!3*??1zfA%|S)!fHUe5x#eUbGe0|SdnUoJ-t&v9HSVJW?%;~q zg{yD< zMH3cW$;iY?lCoV9Qoiraqc$nT-xkReVB)qAC0Bu|LorMVc7C>CaKVN3gr;~qHTO45 zxl9HX5&Foebh5cjA6-mlJnG-1D#3Bq`%@@!+`fRrv=8Wg1a=slOD5i_{cImTpA}dm zsy#YGk*Z2QxoxJ{0}tZ)mNX;M{W#NPMf`$)1V+_zXF&#LH&}5bCil; zO&6WJ(Z86THCki72*pl&=6==4VIOByZV$}~k#$r{v*G1c(DHbR%rwW4=xU zVmR1*`>8<(|LX2%`VFIU7&cmqBk83n17%vYmI|DQx~%+487LI%{go#lUSb-st5ikZ z2pue4B4Yl)!0EZ8Z$0)kU1-9^n6uxtG;fFg3Z#6CKXICA+;usoL2qK-8cYB?XBkLV zrl)(N5c&MtVjGXTm2Da$nT$qv|)kQDoQK&|s%s5iWe%7bt|a`3=mNclG=@e*muGEW_-#wDR$9bddnH#EvQelA2>hR$W6?Rr*rF}V(c5U z<=Y!}zN?tQKs}V+1)C5Fjdc?^7 z@x8R#5(8Kbg>{IkDRy5Y@O{4zcE)oAYVZ@$Z+J|1^7V0IJdZ;~@ZY+C7v zg)z8C66^ z$wsO9YpyL`nS>r>#I}R@PwN5cXHfPyE{5x0Q}`yHNf`eF-U8|w!&wWLvaKQ?hr=Yl zM>qabEwnf2&}qNVIiJ6dbGa{!PQ3&Jflww3pUqt22*iKIH7C>lL(5!{-#zvE*ogMt zQ8&ITC=Fhz2sXpPc{?%b`;bX80&1usenXN=?Jk$rx6;yWs6_=mW`JnU~U@U&_N_jAn zfb}`Z>p-z$i-MaS<2vu}h~SbGC7TK-IOzO7Q`eAm3&F|628M9!(BqHN*Df66LI04= z<;+vDCylKz5QvAMS_qe#w^UDw1qNCN~xk@aJ>Q9+|QW^E0umWXXOhggxY~ccqa~`7c89|DmOgHD0X$L;N8y&OBM(CU@ABAwCxeyacO^UEH(xFyL!CbO)L@Im zo$?AKYG-$9Nn8QzV7A9Mlh)_@K*v!-<~-YN2jKVxM?78o#F3AWY#d!jS4gK>Y0<1( zJ?u~PxqAA#v5N8?wW2-@&9iZ5{^+*mB##)W8JRyn;zd4!#saDt4L>>-6ukJYGi9!& zZB5+CR?i~J`iE_>%)oA^(3dndN4D&ASeL4vV&6Mc(@%CZ{h^LV7nNphWfj~MY51ew z#$#Atmwi|Dft1L7Kz5!hRUoqExT$;0 zM$)Qu7xq!tn6az+NIgnYcY7@K!M*jdkUop+6{KFL+bXyy#jMVPOG$H1g1!^v2__9{ zM=FF<7&(z}y6r!B(XCiZ7gH}G66yNASVN`2zo&sF1M}@#UI1%TkeM~Ks)?(FG6jKS z=R+6BXJKV1uGbji>BH%oMkqK_dE=)mBLWvUUv6}{TDtN;q(}HP@82c<)SD&j&{_m8 zAVtRpRQBM$k*Jo|bR>h)fhRgPMPf~aC84hTsll+tAP&#nQ6wBBXWys1`gNouiON+j zgw=%U`|E#Oj+mpREl!5z(dG*HRTBYvZuj6;*$uy{n?;Va>&lZy0lH%qo4le*9UodH zdXJVzqyK?}Co&QSGymOZ?lOfcnShA|6?#VK{JJd(KJ*CMj$(#>=r)CuwlQ}9(bv`a=jx6e>$D0XwDm*6P(T9a1N zkzRn@Pr(@rT*8!rd2&8tpBrH0d;ZS7Tr7}SaKn&kz{fznVQkzAqm_9w4}(Jx9DQZR zlZl^=`To1MmpOn_@2Y6clIc@+LJ|D>@PmXG5#&upLN5v0G;ytySKX{3V^pyI{7}H} zT&Splvk&d#!BGw!vL5Rz1l5JITJcBCi5V9eMV+y%na6YMX)j|usm{NLN*>mKeMMyY z2PnovfPpyITu##784G02@4|pJZel&1Q1>)o$@+l}$a3u|zeg*aLp&xhQ+<~>Ju6M} zZ?kSxd!0Z^oU3l`gMhXK79`XzKEFpy1!V$TE}xmOE;k|sQ;rKL$6NSEEHw(P7Nmn8 z^SOT^7l*;Alh!$Z6s~8P-0++~=kd|~Q;XmG{?}7|+_?c;fm10qCq%RKgeT3%fWql7 z?M$!gY3o^!?>iQz_}lpC=1knd`oBF5U9?9MG*;lE-kKlLfAs{kd-pVT^;L|y_?=MZ zHj*;S5wou`RFCnWuDw>?B|X3)usUnSeY9Gye+SY_Je@Nwjn&5s454+?+G+L@fQDsE zs%7T(@7?sz+7;s|y(s2-h=lMaQ_xZ~$?e~PcrqcXit!nv(otfFLHFEP_?E%NI1z9BS^rdSqy1n3B9f_$N&%yQFB8({Oizo8h_vbZ@mm#FbvBx zN|~si#-?s2rH@u(V&E{Y`e=hwbP_&%s;4~RToW3gF}`(XT->XFXWQ10P7FinETa}Z zzlZ(azcP4P5dD?#;3YU(+2kHv6gVxneX(QcJVHdJoQ_Z^3`hVze+2z0mHbB+` z5A{)B%`x|$H0FJcwlGF=HU+|at#>#5lS`gHvoiNx^zQoV2M%FA=)b;{fAl1*_x4|N z;b3=8wmBDclNtZ2N3*&GJkEc;E9>7(JGfcuF=6xP0#;meIRb{RcW=)Qc%-YNOL@Z?tjMHN|Hpi z_kqkcp%mmM=ewYdiOuCId`S|vxO0NC+xx{BVz$0>gm_8uKHM`FFj zKN|TkC$hKyF;I+BMhAO=5!wDhWw;z=H#-<(CnOjcr(MD>Ds#`Bvwu#G%@t^`JpI(AbZZ;FXLnWK^_Jmj@{U{ez2Sv%OSpivfF zUpyPuB{gbZ&YGNCO7LJI&^XTEL^MXqY1o4DJj;PUR&ThhHGZ;20i7ivFy7NYUr4Lv zgb{$USkA^t@JU{;yMJ6wl~!4s8~GXR{V|>!XSUY(i7r9C#(v(1gw6*!YlMJnYQ_pk zdo)qfEL|fH?cWlYrzs!q(*S&_A06uj9WB!Nuo|3G?n!%pc#fZk*4%#ytB!l)$@3Dy zKtgliVL0dDROE$89YnT1{Xbm$-C5OqFjm652}~YD^+>z^ zwSO=dXjn&6JLis8c%qyC3QK_YumY;e!$dWs^OztaRieD8vk9#`&i?{wgqo>u;V|T2 z=4KWS#v|rg`awm@YbSjvB0iMmbNm^wTzxZ(iM(;Qe-13*)7{Y_6|ag>KdHv>(O7xr zJ{#G3Fp=>}yHxsNFf#dYwL22^KRP4k`$yUY3i25NqSrX}_D{WDIX7J7Y*Roo&a#Jd zunL4f80+GVcwYiL`?SC3clwWRjF?!LXnA4QtibfFS+~VKH!ZgBNxsN$y>T@=1noaz zyRUF&g{-gn^j}X;{Q9^VV|7reY?8z5rUxqmgHhc%|g>Hc~&kcBHq7xR+u5Ns1 z+F3#Oe+QBunu2-s#Qay)i=6*>GX2-!9HfioH>o|V!wBc#)bNWnfV5qRK7}3T`|nd| zav!s`>H`M_b9*4pMV);QblO2fmW!>Ua1P6@WZHiYz4=|$?Py70XGFf$!)CNhqK~{b z>Ta)fyeSWjajwn3f1d^>J~MP6qD6ijOSiY{v{KysJi|c$`3HpatYd}Xrm1YPqmaJ$ zR^i5b?PlKV>A4*5{FFl?30Lk>cj53h|9}5|B6|qCGc%0CR5njBtGI>75MU zll?s0(|bj0+yjur{rtC2&F@@q%NAmu_kWm8CJuO&dHQGe$@Pn7zYHh;vhms52Lz3N z=%VdU*J*oEpJm(U9}ja_=&}!5N3(S|u>qY;@@p-ubGoJ5GyjR$6Ugr`L2Mcy)B3aG zGLrf>w{_=nJ>qiA$hNP)Sk`*pzuL`{UXMw&_vgP}U^jQ4H$OdS)yc-Sof#A^|F5tj zAH4w7ks9^ARHIXV5~yG!fu#u7{NY9drrt*jit86dT;niL0CSv`#5>@0<0%ekUyna~ ztH6Q#xvKA4axIJVpSBkDq)*vb`&M;Bg?cCdfDh+4zr~pMVmT4Mn%vg+%vK*ho^X(& zi}%TYV9lnzEnUwl*1B)T^1U_W-}gUyDap@1)uy^-1v{$esc=raA_hQ{-r zUE!BwI^Kd&!RIeZSXgRBeIP&giH~k_g=E^z3;TXy2oM}0OJ#YBPwC@eQeMYo)PBl69mHTsn_3YENfQw6Il~SoWuR~_`&A*v|+8yEY zcr3^Lt4vgNgS(Y^>_B;mSrfn^SwPT5ZyyexK8UnFcZU_pPx93B1G4=IVC=%0M_&Q1 zXG2uv-C0H0WC|~^>URL2O^Fu^QaBGwv!1LQ=y7#Av?#kY52Akw5l9dPAyHbSBEk=IVyMi@}a@*wgZ}ST(^- zD%djp(5(XV^yfecws z6Tu|V;#~J#ruWzT2OKXintyA$*av|pZvR}hHcD80%ptcfpNXEyN+E0OJ@dO!4r{(V z9W|tmIdyH=;Xzu#V()yUtY4(!kiB{h=!f*kwYCcKa4C zK4+mR!2aU}C|Pi^W@N3GZU-TV10WL4sW{7YGXKdS7PzR0s4F>`f006D(4> zr?PrHBeubet0oFriorsUafcg^XKCl@(>XJV3wej6Wm=cz{I2Ii_CTr1`{S;PzDoDg zH(~k&J#{U0l|LEb{>*S$>LX6N9L$NbR~hU=X_oPw+A_a_fuJbG^4v&OUim%m!xztN#+ zplGHBXFNDwGb=-(KfgZT_es+}i$|6nMxVp)dGxk;{l}GUCG-BaBa(6~k{5WkfAX7O z1qc(1l5XM+@H#Ff;a{FH^?nen_yXzFr+RFU?! zp`72X%Q{+f&5(5mr$YJ23 z4>wr&n?D-K5?vC_E4AgAt`Ajr`=hiffafW~Rxkb70C z#wj|e#I4gO9nWP5LQv|}1`qQ4W{3kg6ZnkFYM1&Q*b(t6Iu3~(QE_g7v$mh*tf1AM zKhn$*zn4yhGJa3gV3+BA3cE~aMSmFjc7W9YkAjM>gFkaQAc^8|o(5f~OjIq4aXsYd=A3cctVsXd0vdY~vx3Em_)2+okxMoa%y|kX+ddURk{QOo2{du_m zIjfrJ0cQPGD{*J+{B{-IUZUklS%}GI*D{Fbs~)WF*DUY*yY!Hu_#Lr5UKWU7i(t6k z`K@4q5JAk#YcnPj{wII?H}+V2|9tMyWFDLUL#OQgXbEI&*N511p#0AN=KuU{yqy)i z#oztbd!b-BlpPR#LUGwZ&+Vyg*zxy)W}k1Pe#lWkZ^If3bQi*C;Px0k%S`V89BlB= z&jDMD92Lz8-Mz}|nGe^{%x~JbmB=GS=a)d9|NZxg7E+K19R)ezp3XzYdgr z-p!sl`Om#LUcm7ipf7g%yrWzSj)cKpHI&;PI{xD&4SM=-sUzdw=5AO6_!uYEVm zH@?fBM=R5#aeh&~KMcmH-{-6GuIGE64%7j>?)eGOYR*3abT0@2E#Ljt$M0wU^^f;w z(Vttn!apvaCqP%-NV4shQ^Yg>EAZ+a-Fokxv)y|6ryIW9Opb@?+iL@sJbnu3)-ylk zR!!w*%-M-)gPlXV`~8)j6h5TyzI0*Awf6n}_2nMpqGJD7{?FgWyH5qf_=8ApM#0nU zKDAkp*9r)id)l16YCW&>41Wl@f%l1Az7=erJj(@Z2ZA3)$?J~&S#kNe06Tzp_Br_n z6m}VxE&IHP(Z3j5WKL(ue)Bt6Gx-0)utyB%F$L>#V_xMAigO0RjY^(gtwh^k-5N65 zkGrrr+v*B;JhV1_GPvw;-MI7HvS5EM@a}~*N?@#QT=vTyX#DJPyiHDi3BJme0<}o@hMRJHNA+XNW^l-8(^AMgDA{ z`B1d|4;CTvS1a`8r-eYSEXR+qkExhRG=NB%f0=w1p*f2^JHNSFPh-!W?Pr`eZ*8LF zBzxv3mZKT38+(1>0>fC{JtLfyT(<~l3Ahf9K6e|lgLK~?-C&b}eOTM0cj?Lh^2d%2 z4)6INHNoiF{$TX4v^`)?4gw6r3T{SfpRE(!IxFzzupuRMaBTI>di$2C($oecz0jUj(X{KtW3bQegdz52gCcU;M9t zMUbsJ)c@`a^8 z8&TaQWPAF=`hVshuhE@teA9J+$J>TLVrpq;W_9a6%+@#mI~wc8)H7r& zI~A|DzvcCnPDQ?<@*T`lqb)4`PiE>hhG3G?|24mQ##(?C?|5XW__GI4mjJTUx?5vi zG(A<+`7#^-rCf{W*+$Fs1Oo&^vWC z{F0Cbqvx>3(PzP>{sul6b9NBHo=oOIiURufvRM4?d^66olw>NN0Om^v+uWN^v~T`- zKKaF%%N35MkFQa0rg`v;K9lYuLp(Eo^2A}N_03_kv#1Cf#=5>8H~{=`P}`NPx5A2A zWG5d=6C8ftN~;bYwj?C*+FPfV-**`-t!0QOs(e%3Q%ha8x43umISJh7X13~hagz2N z?BuWd>W3{))&9*N;P?FIMy^cY#LG0VwCwOXHLQ-2+Tf`5>yzK?_M6%Ld9B~)doE<# zemM{PXr&TOt*Mh|PZrw$^2zG!nH4Y7@I?llbLE||rT*9aJAs{5o)P%f@{4-(Svt{s zE-|veKlvNq!v*0?(CxKn>^=V|@#=0FT+AEvxr?_J682xVVh=+5%!el52KaeAUorK4 zey7GFXHNZ`)zAFaDu*clQ9l(0giIV%)S1U5jk`PA=f3t7azvho$kaehMc&-VQL(YphH6d|{1~mG9 zBvTtL0ch6Dy#Y?<(>Jo%%gOtdngU!OlOXsw(nvp3-9AI)EThRB=$w;6S6`dMxmtVY z7vOSO9i!;wy--hM?f7X&4n+La>K}pVkDtESWD%AuQ1(3e$u&=8@z6Dn{pPhN{UOoy z+KXsqX>To0|8T(1$Y#w2(jKt5@>~`u`#df)sIAeTtU2)YXZ_n;CfL9ID3}AC^FHS@ z_3+;Ne$K~pFY%pfKev>e@|Eh^`8z))IKHTbNFV~AO!SuzA-e|-LD@_7aaA5X45lZzrb@ zJ4iZ|O&OL=@>{_DXc~E!@F>-14QlZ3e~FHu#|an?*AqEs14e&xaS)Q4cGpz6eY9m_|K|06CsFT({NkdY`!jF^xR2)f!JYGW_j}`rqMx7hbG|m1%a0EZ zY1J)#`}E9D8@mkv?fHECn=3kjC`oNbPx2PMc|R|)iElCW`qw8)zC-AE!BruT0`L6G z#{5ki0qdxuXQ<~SpYx}gu7x|;|MH8^2ZN&e3;foQ^Djty^kpHo(?M6?ADsHub|YAa zrY;`(-goU#y3PMeTi9)C*xI-HG|sr~p<8tp?!R8?UoVTIZD}Kuj~c~vI_{IdA%h8S z=j6YKinE0qVb8n#nO_F>un7X5%?Wk@D5#xU@|^Q~Fi?$KCNbb7+fKukQ{P$pT_d1t zwfsF=8Q=LEvd0wB^Y@5}5LLp(ixHh!3(!HtzT>N2 z9mrPT8MN;nW0>RM*^G41P=CGd`A0Leu;!ersfT??XRl$g=+7Kzfvitk*UrC(_~{jV zbcg+Oe*OZc1nBv^XJymlmHdFO|C`DE9xPrbzb!UP-orB4TYAoRju+oMzY{qMfL!D6 z$>FVge1EXXCzQ2mlF1L(oVkI%`Fkb>o3anksi7NTpODte5$K&Cv*n%)wMr?$a(QU) ziFMA}Hg-WNQ;O_d9xGmSsL9MI^R43&4Mfi)Xkjh+=650aEQqUW{Ap;rTkG`_KsEHv ze+NQ)2lEc(oG2!;9~PzbUfSpIl*D0pba78}6*JrQ`hiqcY!88MN(b-7H$d_~I53B= zgQwN~(FwfwJO9BQ;Pr}x8Y-Q(6p|5~s%>okZ~io+cojIixumN5Z~i7tdFU4X{*HBG zl<%?Wt}5 zmQ-7g<+5yVdnc=Tzn~*pm;A5rQj6*SK$(DDD$JRc>4mxb=lbzLLzw&dZ*RHO|I8$; z_s*|oufgxho(_cXr9E@vIo|<(=U@G=`Pa+z9TcyhnNR)$`3a=@avc8+G?UL$iq4N! zML*izn4@*=PPOyP6k{tK9lyYs4R883Y6bGnAa?V*hwQw)_y*uSz|nHs3$Hv^sM&kn z+pT2&2sHm`;+0JzXDP)nKq%7-t3;eWf@4X#??Pj032E4pnXl@~mIV?xszF>uc=2srLUu z$m~2);A`|xewWivHew8`=fIedO(OfSSud*1RgAGWDjZ6=tUyIVt!SAuW}owBb@Lb< zcxR&TOMA4Je9>6`R#(b4^MnD5&sMbHpLcE#w1RwH!XiVyk001jY_+wEONp7f z^!CkX+rG3C(yOiMd8Ab#a`ox_sYIvqPt&ji%KS>;uX#+%Ud@xWjdVK#c0!ZBF?>dv zWjX=*9t@h<>JAxxmhb$e%C&s)vHd#u@-@%adAxbNc2T#+-|vwsScl%xnr5?w&S!-D z%Z5=o!teKmEyfQ7dbR21vX1%MPqI9EiJF9F&@0^!?fvEFH2XAGD4t!vr6Ys_4g3sG zkb4epSro750KMa8>{r}1GDg|-Ze*Oiv!w~bs%{u-3gZn?P Wh@#)nB!!{?0000Awxkyp()5qe}#gAPW*2Hz(9V%STD?hY@l7g%1J_1 zPm&x%zDQf>C|IhfKrun~0Z`Bp-=N_B%K}-5APW=}YymVBEMyD)-){vl|1T9fu>kh} z@Bf#Pn8R@#3Q8PGL0Uq?3;Ns-@jLav^1EW4o$h=`-pjJ*(7FT#5~d;pV}m#oCDM;j zaf;AO6r_(td4v2)Ixc(UxF%B-YI889E^Iv2uJwp90G1eO*&60Z1C zNLPox7FulH%%0tF&ME`)l{(XQm#1g%T%3ReB>KoKp)N?DPhuASYpyGi^36kfdD>vs z0LSu)%^@`{lFS>G*(om9$UO@lq_A=25l7Z3Afl|y0dj0)e$NbrD*g)$iU>5O!Oh_j zI%RhlqzU3HFOR~ZGZ{EC+tn`t($}?OY+)RPE$LX~Ai-7+Vu|{${g{ zF29Y0DN>Xe*r260H)TLSrHN08yZ#!jpe7o0Wa1gIeLU>hWSY4x6XvYp?C*t#SdCEJ zedGX^@s8vCw?&d8AoaXjVXF+J+npDN7_9k4_T8f+dX3h$sEwTpH5TfslhYY=224!6 zx}xtjPPhCy5H(~cf%F+0RDu2_9b|8G&lU_; zc)2zfq;K!{48|LcT<7iV3hP`m8VjZogmlmTz4`0Pr9L%bx`LZcj|(QScDYLgDAfjq zy-SQBTy1@VTB(IPJ9HuNL$?b~Z5O7&dJ(G8Jj8yrzl`n8(aTzOMf{zV+<{0-h|NS;-XHn~amVdTt5WeKNrF*-B+ zG(Y4E+mmDhg}l!mLufqPn+;e+dBW8?+KKt3m1I>_w{n7FhxP=I2G5R^yu8>@7g8_y zVQj1?Gu^i$Md5jet=Wg?`<<5Fr+kvt^(_CIQPYI~XtRaA%M}D&zD@Ca14`M5z?EuV zTwV){&S6?c{?`Xm)mt#`O&!mtPuzP_G7Ad^i7jaAo4f&ava{}QRN|quW@lIDz2D0Z zTyZfg#=K&Mm@PlQKX^~$h|1)h=mdV@_&29F#lWgPl<~c0Qij-um0hBW9A`ewyxtM0 zVT0EsjGyi0#i@ryx`zsBJqA)pl&Z@SF%p{78e2YrLO7Pnor|xXiwECpo)4R*%))!$ zeqC?{7aVS6%B`&8q(J#99Il>Cm&-8O)s|HL6MMhftIOv_bN>jhJcq(s&{N*2N%b%I zb)hu>#Ix{7{5#KVfcrMt@!+y^d2#re2B9Au$ip_a2nhin|ym@Ei12?FpYJ z-d!iv#yYs!m97}fzpB+sVvh&AR*;Tsx>XW;>>|}6!>H6?_a+nLD^yYT@VBCSaDNoA zk~*gis|RE~h>D85BnZ@zDnQj@XMp9|gjxvdWX^bH$3m2yR%gyby+0+A2vPJ#lTVl5 zj-GZJq^)=eb<(SUo{D{+6uRXj_p{nbnphIJQxex#+YDQ>-8!U(?IIb`(bRBC$D-)< zv4sM9xXxjmm>@hZ&|Xmga*lu75PB%U_YZ0zFN#&93LNmAnQ=I4mh_e!arq5op%mw= z?D!dEA#-{k7sleq!pa|kRCHIEGen`dSSLzvW$g9?jjDHqzRZ0c-lm-fQsGq6YQ}4B zpJNP!_#cr7+MPe?X(ThKBS6MvH8@CbhqgH-zO+ss&|0!3e;fd;B{Fj&XaFGNyc3m) zTQAeE2}J!KoeUf{hQ~X9Pp4;Eax4E)S;*LpOx67&qX||#{{)ZQALq&IS`MZbg|XD6 z?#e1S7dwxoXzaHJ5M+XtC2t$u;qMKag4D`ZZcB_FRoPcwB;IJ@xWfXSkt$BsTy%Wp z(Gd>RGj-73Z8RwK^%TGiu#|})I8;TQr}+i<3@t@d6xXJBlr}SwB6(3sLI^OSa0L*g zf=^g$(oxND2+%Hsn{EB^vt}$W?tdmDL3;xTlE4K+YCe)!#PrtH&H3vJ*OyBSjQ;p5 zCr(kcN(0i@yuZ}8Zj=G-*L?_}!<5%~<)E*1ptRh9qMKzhn#cu;pjAN5iXiBx$jZe$EkLrwmx^d-%HRTYA zT&8cpDl}6~2xAykw-of&4s1C4XArbSzCU7`OmaU)PiwO-QpO8oobr7?3mS=%lS#<9 zDl_u&N;x^vQv&kB|A>s4nSxK4r&!0Bd2_9)1c#maNJUCv72r_i zbeib!KvAk*$Dd{3wkk=tnEte-E<<=M=v(d2aS@GHhp8pu+wZ;Er~cj_XU=my`3yEbj@p!V+b?$6V-lT}^n ze>5IyGvR$#qxUke&oW^S`CLuLLk zH69V9Ux<>}tH;u|u;F4YF!f_z5q7;+u+AtTVEz)VAyJZ)?b)C&eG|Grr~i5O7rUw3 z?W_|C0mhA)64I%z!5aC-7sA720cn$IQc%GUqxG2&3?R^!&yQ8&%&?mTTrPHMXCYXU zK^Ce?FQ==DKkrs(-n2C9F>l;+PY_82baO3n(U+rf|#dRS>uhI7E!T(&VL2E+CdbtfOD`q@#B#RCrfBQ8 ze9B-WHAyD(5yEM^-F66q|lc_yG-LHRyoOGe1@>#y?irXD_%^ATz9(iL8Kel`eLn z+q%zgN{OSh>P5V9Yw5X0aqE|cB-0;vikLM0=Ss!v)l19@p|sGn#d>dIgk&wYB(2)) z(;T|GC?rvJS`HzP(eW;n+jia_%m2u`GL3%R+UFQQ?5kpKy)yki)?^octT0G4Qixg2 zr`tRi-bAeyhTtQ1_@NA+PA-18Rf321QBi~OjfWj3XIh-sY~Oe{$|x(!9td#0=<*ME zfqAumUOmhxbsIO%G|IZs-4lHX*yuJ%BxjyD0KVvO=>L#u@F*1vgTyq+&@`PU>zhPJ zT@J-JcVGF*!0AlUdAj2Azm|Tf16K`Lol2J@o#7>W*rQ zMl;p6iHsXYOQVsF=K~wl@dy_N4?`_6{_Z3qWiMZ~lpVvgYC)E|0G|I8`Z0xyQZp|OC=-8=wGsJMR5Ov#oPJA2b75)@YN zj2=^^`mwQ2OldRIY&4cBmI{j92)*&Pq$wDP-5ED&P|gC$zzTNI50 z4B0NLKo2!%#zJmtHr$e3{f|#_+u(GGWldfX$R|uC%9_#^gPn`C^_$(fyND5`bKCPz z+0YX91GV}aQQk1P0~hiWID7B-j*_FxJG0e)|l@mQPFLdCn#4~3R zLwaXlUfCD`v-+=)7D)UhAqgLLYHz~orQ^}r*W-bc`+H%SZSm{y&X3T2+`rDsH4c+e z6B0M;c~@w|uQF7X<7TmG^B3^Ng~kaIprP85Y4^xJKk%G)qTyJro_b>cyph4kf;>@& zCl!BFF~Uw(O-TQF))2o08l*RikF3!Gn*3H@<)!!%dW?n^)B=wp9$3O2-OjYFovps- zCw1AnWQ4-sKb4RJO;a&IAJbI=oRCf^WLCiC11a;*D^3&)lq=JC?&~8X3oXhotWHGp{hg;W^T4-2%`d3aNJPb4N$(q#4MlOUhAqE2Y#NH zHmPH(*a$LS-wP93L#H7a;*d!bVJGOu-Cqj*1 zU>3|T-uUNUkMA~YNr^^rYa+FK6G_LoQL9fz(aQT&y~#nqv&T`fxY~@U$^B4mp&H`M)H)s9)7ZpA&z?HCt2tQZBue_5-+Tu&{&a%ULa-Z6;S})=vt zpzzNPEqsS^0bHzn;g+)cC3@SCH=deQv7x6oj}Y_T^3sZ2a85Z=CXT2e{HE`j+?{{( z!|i`#f7{^wj$5J?aB1b70s1`v*^qGYT}CHm^mCer(M*OUs!E=NjQ|D~-r|wQ5B>OW zpR~0k0w?)Mx~Y3q7*ZuW5>#S(=v*#p!1G!z!md91`edCfp2gb9J03i81YPkHVwE|Y z9Y>q2t3SiD&o$av4g;2Y9I4NkLYO?qIr@f z-LDoVbAeUrekCbjNNN z^bUJ5>VS=xjj&Nw4NS$ZqXpmDUp{Z%p_m5tJ{bz;dGt&%1@l&)f4f6|{eiwNN+vQ9 z1beuOqr_gn)Mzd_59DQ>(k~){DJ0?2`NTRx5yy-~fBGj`iFUxzk zug$}7?Pn?pDeu(ty_wafHM2jts}E1T@OLX!m6|K9@$>222KnF39NR)Q6EWe`!2TDr zSm|ZNm}J>G6qJF=B?&k3m za*fosp1i>qu8!H(3F{7rM=cgfJ}w`$#^2}Ut7z2xvU!@#$}r=FN@8jnul^XH-Flxy zVrufiu9`@K0zps%AE4x?4jcnSA>NI7)H!yKrax_73ENJkx6FPuWUZvxm^u;^09OOD+{yN$v3rVA zC_u}fCZQQ-HNv3Zu6L>E{8-%>dxJuMjCk7xmde9<$*o3OGA1IqfU!H>Q?iQj|G@W? zr26MM7vH{#H8WElgN@8`dQ?ahm2-Q`a2$GnLKK?Wy3vH zM_GsEJt^7Oy{vGA*%Uoi{*?zT!mm$%Pan_fBaCI}i$=AYU$(e+!l%ARwEsQHcF$N8 z28HJPuLu2Qu(v1Y4qq7Iy^D9VD=!EyUB2ip+W2aSVT`>Qd0~H74okI~);Crq@1b7E z)tyhO?qvp5)&N^pQ0)66KL(lvIqBSeE1PCLERh_v1xaL~W`6!WwSJ+xQ(oT@kptSg zpsd|MIKzd5e;TeSB3V;#@X@4eRF(8=tdfpS_W6@3TdM7j(NDNC%fjdS{g*d?5t^S! z7ergwub+Bda%R4Gv64K+ZKo0t2Fh;)&oeC{a{)(k!2;{vs#dI7NlG4BNj=@6w1A%L zl6aI>0nbJXm%(`dK0aF>+>2C>2IP(8H4d_usdzlA5>N>Rcp# z8kCaIEHo7zaq~_y(4ve;t}(CmrbgZupKf(x2>mpPw?yBsSBC!saTx@juomuArT#a2 z7acTRuD|3Ho(L@zQc+DO*mKwQHZH<=_C&)3l&dEE0cMX)G(Q(k(BD_YEFeO@{P`l* zSa>te8D5r6;w>S4oc8x%Yl6FDhtxi1u5SCml5APhgnh~1vaFI_zCCW*Rdn|PdoYfu zU8w3qMruuFMBPh`ux&GjUCW@3ePpL01G=0b$A7ig0Y4N-IFH(!?RRnS%|Es%G<;Q# z(Bwj9ZM7TKwv^)_=9j?H(Ny0<*^7NG0krr6GkOjufUbTNqbkb7<#PCV*5&rw1b6Yr zl6v=!=wCu#TK%DE#r6BAi{*eLT+lG)io>^1qzsxsHOysaA1l??`Si0ka|8&_6dyaV zzOVSrkU)VOYtsC+N8xf;9rIAha={;gzaca|(PIVawB?+96D^MBoI9$XR*k@+?e~Z) zKcc=0TXTNxP+)vDe^NuP@w32Wr9c*1+t|x`8yOKt^qp#~x>G7jb>cdr5EdN;JLd3_ zoW3(gz}&H$Vgx^#TgLPYCaFy{TlaW6@wX>yH%hVX$-`ZB4C z!#pxnf(AnqGwp~FIey(X^s|M&B4J6{t!pgmub0L79ZEeQm+SrvSauUy8O`}ampZ0r z$)OcJJ+HX`VCtE}DExkLzTTdlIxckOeDbsBvGe;cy5VrT%2LOg5D(3RtwEZKBON~M zPy>Js#%9n~O$*=aiZjg6McYVei;6i}0K%E-Z=XdVx3NBAnN)IGFOK=plVRuL?7ca& zZqf!9K?WFeq~-3DmQKg;w4?*)`xVUtCgFuCjWNvYxM>^OxAl@VHQjJTetx7Z|8IEu zi^)@#9DT2o@OdmOJo8V@egj5XmjPw8RyZAyy(CrRELBUEp{C!-`^o;vpM;775}TppYdvr{Wdkk zc_I*mlM(Em9x&e3Y_uR}Ot)k5#}Z;a$$M&J6+u^cP`yS)Sf{P*Dk?o$;5M za4;Vts)*Gk+pDk(H?u#Gk=nYJY`ev=XNPH~K+?T>{|3s5ERVb+vpl}LVCcJIz_|Z` z>d#odM{k6IiRh=L$+WE%BOpKy{5rr*f*K2_>2w*VP$EWIT@h|A=*BKO>eG{|(qgqhppW=e0dySG0&rV6g`l=!V*%f+a>!y7{DIi15KqK$E9C`98Rl&7;NI-b>8Atrl^y~c?GD2v=%66={bqV*~c%uYy;i-=Az zv;T-8{^V$*H$;kJ1w{B6$c66HDZ>BY%SH{mt{4t=T9I=`XlVS8q-R#RpT_j+Iq8!i zK(HHw4jb-PtsTZ2B4>e?GbO(e`oN{&<7&nK=Gy>*ck5N(W#*EPqd%uE1Zb}8@?IUc zSw>2_7r*8~Xwfr$ou(VLmR?0B3+`(QGC42_O(mwxfgsuV{@=)3AWIv&2k9O-y>?@8 zq|4e<1aJ?}z;V^i`?iSIm%;s6buXlE(op7&LC0`VG#jx0;M*Q5AU z95FqbeZkp|B@~lIpCFvS4`}(*&(7WH5s>Sr^3x|x4J|B~;j`L&VU8@Kl3hEx z!YMZiXdkzpTZ{`ti}8)GcOZ4$Z zqRg@Oi#McOeg{hG#r43)s5WtN+9QKaW>Nbr^BB3>F&_Lq)u=bU4f&{djj<;R`wS8# zzKQeL@5;18&AVnCC#!;*h}E&1kokq97~E?XPj*omLbn>&vYM&-`+%yJPzaULdjk1iUqF zJj#NuxBvKVbX*?m%iM}`WO_=K6LDGSbhsX^(fiQFk#oSj*e{I(P8-=}%e z{?wmhG7z3b58mwBS7d{pyOtQf&33cGRet7S`xIo$0Or~26JQcXKhQpm$=B4fElC?X zMz%69uy!vem6p%?@?+rv5rnzn;fkX_#4}5JxjP3Su2wlFhIL}ur0Uh<5u~N}eh$d4 znJ_`b+>lbh{iYeCtqS%JC6-KTk7mL1By;OAkxe;libQ-7?FqCc|J#e()X9)udqrIu z<{`TR<-8hh%tx6UYM#Q*vF55t2fMQ#nJ*Da9*AXzUIgw9c2b~(Ie7Y z%m70>RC_10twiKN|EY>St(P4QPQRz!;06?$7#P-)u2MkLR&WiuwJ1bD*RPj14-rW` zx=t1CG;t^gu3SemZ=4t=Z-y`W(H17mX1nVXkE!3uEU|^A&c_=rYR$P^gkZ6>X0Hc2 zS4(~M3!?J45!dB0pc#_Dv3in{3nG8aw8C06ds=x!hZo#P+M3U;JWIa`bd_qfVkSU@ zVPUG;L-vg2%g%HNk$s!lG$<-RBn$w<>u`PC^$-M><)ZEf_1omvJ%m`3J;0$XDj{};FBrY( z{^;RQk+VzT;ntf}4jteTWG7jDK0^2-X~_cmqgbTuixpVU-c&NPH>b-b5U7rJq%U!* z=+dyI-t`tc8QP;rlV5_$_@8=-vZBb)w*uj`TQ)|r@vrc#9>1U6Q^5SNU>*fFEb*@FsIjKrM_{DbgZnwO0er#_bJSTv zx(Q7P>$e%o`jKeF;Um%fNiGn;j4Qug=Xd(kzN*V^>xopgB-kKe&^0y29xU=?jwk%5 zuf6?#P}a*dF_gS5=={)))eT!;L9)aN(q1&6d60De+WCWVOTiaXIzcR7t0*$^&1w+0 zw-GtiH7~?Gw69R$KqclK(-vW!xV>&l2gRt-)Z#+M6%=8(1XCDxXI%dU5s$T`Z$CU= zDf#!uIxRiyZn?c;qm*Z6fbgF8iy@dFeVz<$npiZ21D&^AhSS!I6ku6v>$8MACH7_W%`6NftETITkx7e;H|{weYtz#1%e7PJh*XEOeuJz?Om)=g7;O+?VyBq zmFn=Qb129VTySDDMniFc7H;QGjFTIN*Vo2+l8$T6C&B2(C<3P==60M`xm$MqwbQ?M zGO2A)J<~>}6|Jlw%_`0Q*?w%*XzA}&9Feso*gf72HGRAkHe$yt>m}qRS9%P{KX4XF zANead8KY0`_Cm?0)Oa?LSz?6u&>SHfL`$X>8hG(ENwHJNYh)PHjTyz|Y%oy+*2>cj zr;+{o*yi{gru#U>U3Y-~Cx_HnYU1AHZP5m^R@a5#x~=ggz_%=Ktaq@)d^I*b2oS@w zBGv1O$zvFRTf8mZwE&2TqB_vvA5TkD(u;i%P+ zWPD4oE15}zTSCwuL1eUfe2qXb!9?W7$-AYPe|1rrTcSI}HD-qY_5&z*G=>3?;$mA*Az}i_&OAL$@G$H z808bf8`7{}u$fX~*tYt!cQBf_{zg zd&qZ{Pp@FHpvO3V%tht6Wk{IWL&u6SNti|uNKHU4Hn&V|YABzh?yhinU|&JW z3z& zRJiwX9eNnUoYp~>>+gV2zUQ34;Er5v*I61r^t5wDy89+Zs#R{pB6nsGWd ziDNq?hxDW%ufq^(7;?rw`R0T^;~y{VWiRab1`bZnpCF8<* zE}9HEbMlDIDlnhItZux$5v8QbO5!vpho-8ui-%5^RI|tL=&530Fm2NO)7f1+;IL_M zv{Zw+8m~q>Vfm8jej8IOEmF5vPhzdDEb{Zp2UDGXC3Cfbj!o>G6?vlvip~HT*#DOx2!v@n7EKNGv zqRw^$X;di10+@+ew*ymYusGZ-<2$w9`?oBlU!48mG#~e!q=`k_X+rTN*^%_}^wGC* z6mNH&tvgegk)^JEGojV{dQ)AXT?&M2uO}_P_-dUvK5V+Sv7X6x6?4w|*;7)8G_2S- zHtDOsME->=JzVuhb{cd zu}v@S`bLB2v^?MSO=o{kx(C{2mg2T!Y6_M#<*xW##-5#~$~Fz|y9n*dXJ#98FCS`y ziC-&QE<9`E%u%>l2Df~ zbWqaP5Z!+3QO_B~T{aLSc4kKeh0X z85x<&?3pNymJX7n53Mfx_vk1)o^QbXzscYu=8(H!W8=3G16wSQ#F8`zY~f1}N(q1N z>*2o;4+6kAw`K6C$UO2XW@7?1F{SB^!ULA0naM86l35zVR;(T;zRwMsl!ucLqD_w^ z>bjHG{_yUF*(C%jLnJ=&-V<17Bzm$Rl4?hq7K8_I1+}=wbDvBk=8&JY9?)hl++d_6 zz__`jLvBmbn{q}e`P$`}N>A*F8Hjrs zYErikb}GBIV?aZwZr*xA=h@nchKGsjM{}g9A~W*h>kkfi@nAZ*yO!;rs;DZD!JmJ3 zygrU{yT~R!sGeg@XW{SVO2!t)pcKYq&hPoig@d5cKe zlgR?qgzQnQp%ry|0VpEJ4%T#RITu${Cj7lOT>?|x2-R_BJXNNyl98P+vw3`~7nz|a zYAY*gkw!`HsVq(2D}uaA2+At(QUILAkIFo&f3WFN)9ASCCQcuk-=lHs$>`9+*M+yk z)#73yWsOWE_#@elJ7e)V9kFYvjrHQ`@?{mPfSvSBN@s(vf(o&^4c$dYNq1*P12u0# z@;2eblsbLBt9k|K~_(M-^;|o!w=u?ejGl-uKh&FmyYw zr@<17s!7WScwBlLNGl{EZxs{RMw$AB=)ZR1Pd`Lv5_XHbNV>=arY@Ir8sG$-`CsOc zFW(;wdGt~9e}tk02LuoaJ-hMu|NrD+dwWzx36|HFpdCxkp}U$*UA zv|S>OC$PT6QBQ7EN5K70F@jcylLPrm1l!kJE9jf;lR$a-96#%Q*j}zcEM|+3eJRP| zja)V3C2a7!b~(?>1;hj4=&Vr|jmQW&5Nl1g&1%@eGUQAvcMN}bmf9z z0zamI=+Rm4GCc=%mg4Nd2qjd7)NUTgmLF z@4yl31KTM*v=F{{lc*#+DfU^QG7j(hYTfOc>aX%K8-`NHBmx1<7z*zALjZlO##s)s z_Fd^fvWtG;fDwkOh)_%zF@4^2Qy8el?EpOOF4OgogRp}4G12-RWO$5N9A*j;!mH!65tqK|8!sd~P2y^r`E21XhZJodOr3<$pg8oY+R- z*c9O;C{tB+q$z1HL90`Cgsaeg4yc%9tZ0i3fpBt%tFUHWR_8_ini0kY;g0h+aDOFi zJp&3O#B}ToWZbfm;QrHQc9!n%{rgiB@;K4KpS$)G?1dG@V0kb%_-7Pbp3YwhCcu3;dr3Pb0N4lG1MG0ro@ck5FQMKoQK zX*Sw~3ZPTt+#7N17QQNX*sMvMaVC4|`M)5PfySreqjei%R*?Bjh?q&&XFvG<+0H~L zGl8Y>!-{$rWB<{Ozp2YdfR`HtO5Eas<|VPyoL#EDBe@7&ciEk96u#&)Bj@hr&^Ojt zlkwA{GGWp>QrWDExlm;nCvJC6Rr+L&r$+Y+IdlLAUhwPgu;FgVVgkEOnf|h5_faVP z8i?l4?wv^PEpTsWgZyvs7Ea4HG0zijd#m0ukqOVKMGa!@Q@4j#;PK~M9(Q5UQV{<9 z#~jFG5t9V253bzi$yr2F1I#tF0r7n_914ZhY9Hwc0SL5@$b=BChNfRSdhW#>BRhsg z!&6d5xyC5$gF5H0&Uc+YPfq6FdSW33SFue#upuAnvWQhhWChWvO&5k3exfFRrPIze zik<VATffP`u4c2{0F?`Ts>$TFRQCkAcMCC+VDz`0AwGe) zO{1X`K$}Br4hYUVoVNQf;RUmRXM8eR#ETj|b}ZZwd0uc3NN_SBMYHhw^z44?tirYD zy8pck0&Vpjt-E72>q2k>GJ{9ciZ$ns{BRy|lYlI`;n8!7;M~xF8H0z9l*-%eE{8ip zyzfmteiqqwxTF@Lufe~E7UWi&UJKED5|(&kHb&23D|-zhzy^VI`WU7Tt3K+~3H)m> z{ceXJyHr@474O{IM_X;rV->Ex!)_}=AvUdYvM3b@1OobF=JMtO5rhYGd40L;!OVgy z7A%wc*@i}=-#&_*-p2Dl?g6e)tm3=VAz62Wt2r#_>Gb=<)HxM> zz=RuT7G2*H;b|-qG?((@D!<%na{%;z)1o*v8Wv)BkO1Ez+;aubL*vlLk4p3NgP8dh zJkd|M6?*hm{O1`El2mRGGeAzgIG%HzGfmeQc*+wy)iokvu;KAj?zj3BMQ{v8q>sIH zP3ljLxC4gJVh&>No;L53JJChzC7A3N$>5VpPKY=ufd0_3T*&N&0DFy$#ov0KUK;Y= zvUz~0zdc_JF;V8#Fn``&o(O?&d~r%Q+*rA%P)6t5m>Hk*`(p`_&d7A_;JFL1h117d z*=fY7HNH}#hMA1jJ+1f**Vm!2&6C9hN@RoPlgL<5*6zwW{^upM)ew+*;l=9D%J-x)hQ?hGDT-Qbes&qDzg`QuR zw&{ay($7xLx()>=FT+*x%_kcPF;7}`e56*&GB+wzONcz=P%x?w#va@Gm2V0$-3tPIWzw-?dNXhelOCU?(lAuuC{K)i} zoN8tp_hgk65)O0G@Hc;RzfT0wR($FtG!g?@-LdjgZ8by;sX{qmtxg>)T0OOIH~Fy= z_QI5Egz&{Z8|m+7x4iyMreok>&md&?f4_WLdAg7ODN+)mOhNI+oDrp0uTN*mT|%z} zP78SXJ_K(F=)_*t0HVsgH0H>SeP$kTpqOfAeQ+KzJ2LV~j9+VO@6AQu^ayx)eN|Ok zf+_WN5*cRmO0QH+LRFKyB1zNm31c!{44LM~H*C>TxO9Q~_>r#~Q>AtfnXYgV@&gi% z^zN~P#)+-nK?93dD(S~moPP}rmeJgipXZ=pWOG41ZSwoaz>GE>6}yke3v(Z-q%VDn zJ+}_{{zWnI&eum)%$y4*69mcFc!fGx0HFIOz*ix< z9*VQ~C~AY)P}R%Jc42l~=#`v_nB9b|>7!y=RAc!rHhoBzp$VU87KOBtjHRiF^(DE9 z{*W1G>jifMn02qA%2HlfRat)87K+R3`pGx-a7FU;YI)OovmVxIbn^3>y?V@DMRm1y@nSw0xzU%rX{rgLw2>aDsxlL-Xw=a<%wIq?&!ZSS15-a zqT}zyg)fD>tKWl@DjFHhIgweFe(~ zm8#KYMxJ+Jx{#CY5}ctnWIdO8aV|lC!K0sOFjE!SGt^;^k-#*Nqw@Fy&vk&LyTsjG^4EgCO3>O;Z~ z_E%zK;zvD(UnyG9aSs;f(4}0{p11b;7R>d$Ioa9;aV3`S8rsMZ%BBjY>MYgtPosf5 zqc1%>ORlWXb(J5Kbq7Ril5+mErwN5NyYii&ZHs_NCpmm=^g+r+WVl-O?_>wq_rcip z%+6*bEXOkL^S60vsd{%TP!$o8&Z@};)a|aOHjiuYrMEdQ;U?8mLF(Qz zhr>90rj2}3m!JbuNd6bOWCtTMn@T&zx%w6U#`|iv^Is`p%`X{g7Mvm^UE&&#x+!l~ zVv@BXx-yibck=SzaVTQO{z(P_(X@+S%c|wfco5P|hg*wOpX8uPKZjs4KzpBeTmkvH zs6&i^=qau#S3zP3?F)O+q#+uYPwOQE7M?Aj@E=^nexzx>5Z+kjh2mxix#;2QepTvA zfFLTO{;W(zCalrd=J(%e{i#fn)jFEeIO`sXhSFY-yON{0elOmcT618;m@ zyA0T#53yGLY-)XcHd|7XIn|?_8v$?H5SE1}9L%};+Wq`ov%sGF)djcsR=Ms@hNHIk zPEklCymvHR%>I{fy|R!>QTi~1Qj5Xo7zY;QyLYD$kBS*3$H((K`mKy1)`bVtTD8eu zWCb2YbGeq*_Xu&l0wntx5;RS~4AhPw2YOiqGn|PY;o0jm-Y}Vu76I zX@BEH$`I95SG_3SLo5!1{(Kz&!nM|B6nG3C{D2Y5Q{D1TQ~191Je#2*>e9Ku4{63+ zin`UTD+d~0mfy(qaB@QU3P+tbz}C){gc<{b&}U777KHxghiVIuRoOg;9(%rvB`aV}DaFwG+6l`$0Oa z1qFoz{oj89V6awepsiZr86XNMs$O-w!wKOb5G(MDo{!tq z6?=$lWuIPY(XW*6u~K5DwQ6mO#CD)+ucq#!2}KuOENQIKq=Ipvugp7Llj`nJ4)zm>5VSsMdL5( z&p|~rS;cHIH<;=ekN@%;dpKP>-f9Y=E|QPZ5Y{wh1zK|V5)U1(QIQG!I& zXzQZs-x!6cW~#dxT9?)Pm65S4AUcbx$WN9jgDipu66O?$UR407MoL;3b{stI)HxSi z>KPq%#%PO{*E~!&NZ!^)r&&gU6cho{}@MR|m6-kpQf?)wn#kauEe7 z@^45kf(3CAyA5z7Uw50z9hY38`_g((m|RjP5V_%`p78YbDL$R#I85jnQ=) zAm$OOZAmZ2vrP_+gKWR~%aoqDILotoN%#rYY>29=5iC z!`|0PZ@6Aej0d0z70yJi86x0;dXQsPC$g$To}nJ~_$Qlp2Dp2u64`c%q;J}Kow7HX zYX<~6Stg_zuZMV%hVasMjMMi_%3`z~Y2ps^-PhDeNPtXZ;TUm~Hh4u&vN_AOg> zS%#vlV;|YGB|9~iqGZkfT+e^-{QmrW?>+aN^E$6{zUTY>ygzchIMWTXjzVbCas@Z7 zBi$p-R6T6U5k3NLOQGt08ohc=G{}jS@bzKeYAxh$OJ9m8!ZK#Sa56RG`7JB~Pop8W z?r7ukSu~`IM_PI~N}!c|0qM>5fVtb*t4Kg-4R{X!k%8Bh|IXq+#3mRJXM`KGf1H`W zDi{(=WW(vHV3b`^^zDXM2hu0~m44IJ+viIwob_I=JX?PK`K6Ii#Rnn+jw1Zf40oj& zPu4Yn`|z&}q_#_T9oieN#SQ)&rIs)P!FQ>e4N%Ydvw9e`i+|?%$YTJn3Ht~PY3g4@ zl1{n{rXlu~^aaupA!bU>`hl7}x>O|hP0q8RX+rihH^_~KXJIsYhxZ~Shkz=k&Q!@I zXI5U)HqWNaV#V#}4raR8vvJ_IQ=F5P5fH+LZl<5m=yqcPjvV@UaI3}g!OsTHrK{w3 zrI)J4{G!K#@ew?+{UF|^qN%m?V8=%*{AzgC^`Jh`HRo*>yE3`toj|sPE*CSAeEbCU zv`EgsW;~!Ga2ZZ*ie!@%R z28sW>*kSCugD+wVk%9d3XV8{q(i&_L7dg)gS@V3Ytr4}Lb;@dhO(7vVRqO%!9$m<5 zcpvY@oB=*@T(B~qG8QOj|FOpJOT6}CnrOJgS%(_9M+5mPDFpX~C?yvHJh|YeXX<^- zX>4UHncFe|ktqB%Qke|zVY%r&vTw1$K4GP$AN<#7e40o_mrat!`Y=4OXObi3SNW7R#f*>*iK*CyLbvcAojqc+H$cKrHI?KN>QIQ=Shi=JLm|HxDq(%YVP zS2*$VsG-2vnX_rWrf35&Scdld*6EN_o56YCX9^gn)s(b0^p$p z7XKWn6*-{fupPyoW1uiMCUZ6-9HilDxx4g(CAdCfu->Qz*&syp9Wn zVmf(#PVjJPE>|E}Ac#Ib)5zzvY?pSmuc(y?@X=w|sSJ#G(P)3h8wDT*={T zR3jPI16+3YPM$`R@_6;o8r)1ms{zdRqBam5fUi;^OOJdT988}@g;P2TGfj3q9Gkyg z!q&(R%Y91^c(1g@Tx7YM{?HS8roJ1jX}0|CpYU#JixIDFxA2}T(n;|iWSHJQd!Ct& z<8SdGby4J+$TxG3+SEfYpoEB+C3rr*rfmUA$b7=*AgwCiJ%ZhTsc_s=4!|bJi9kTZ zP?(XnD(0qFZ*$zWhl_gi`F!z{{Gonh0rN$T;rcs!rqoIpxm~*qwcI7rwr>yB{9w(y zXskG2ctBW#?Fs$nmTzC7bBb0TzPm@vTm0SGHYY@OT#~c@&1_p_i=0FV!P!$0$YPxg zBQ_}s9QPcYupn`s{_zKBT5waH-!vfw&(GmX5tkC0whJP|QG?eB41g=>VuMdd}G$V&h5!*sjQ&Y>@n<2swqocBBk z)k&clrX>htF!x2tz+rww);3~vx5cksWHJiMcUy^OqG4xt=$!B4lYjA#QLR&sJ@b)Z zs@}PW)gzDDpe{#9-oQ|%Ur3<}ID?_b1keZHkbm)r{oQ$cMXyhFw8sH8Ixyv^%Y=i# zh-%*Ymha(@u(xb-k{&4it3MQv(~wayX<=~RmvGaK*Tau7&(Nn!^&TN(gHz*EwA!Mv zk$G7xukN5~1p*d)ZS<_-o>A|n%jv5szhCVSuI4>jslo*|ztTrI{QSV@AW$|f*(;IN zYG&s5h_gvsjG6H?ATImG@A4?jddj3{xk+($?BdgB=Qq7DsVr<{6Sw2Pb3v~}jovEPCv_IL?i3ZZ1 znX%q7!~KiMf!d49v0CUGN&VKc(kMx3bX~1WJYD}h>vPq^1 zjrjS{?-5NLNsdLCw@84sfmKIG)ku_O<~7P?Bpu7!A?X89)EL@PT?=pe{^PgBx%3ir zUx5mRNdC_^KA&h4uW?(IarB>TmTYR1nUS)Pu;N%u7%dojU3XjIJVf>Fm#w@@zuuhX zzKWZd4O3z_2B$nTY zb70j4&NqkLuJ;~O6vuer`K%8JDXjd`Bu4bi@oE0j%$>=qm?fMi_6sf8e2QyZac;(5 za+L&Ep@39`x*!m>msjBo9q>7}I^7Z?XFwmv=v zKhMPo=eJGnR1gl5IQV&`OOfxi<>?8zmv@ls9`6k!T5_>)`{9oOuR45Bil@JlHNX6+ z%`~NvQ3E47L>{<_#NOsEANET#E7|HP5cu>dGbq4mN3qN_a54`_znv=+)5`Vfm%Lp5ajxjm-NPVKLBQJ+g<7(Cg zyiG2fd&vRbCTatp`?{VouJAGq8xYna6ra{-Ikj%brGS^89OU1fa%z0#jfB8Xhj6VN zp>k9;S3FY+8H+PvfMBZtf(SY#O`P-z@y<26{^A4;Ll+tt(QBG{jd% zRz1DZW<-N_@6+>}_{c?3LB1qa{D;_?ep1z{>3jn?l5y^@w>-sjEi~lpBfBKdl~HxUP$9v6)#QOdadU-e03i0K9#y)*!a7EIhQ-J>l&&Fk$^q)(YUs zL=_Gvp&lDGw5M9fNC$sZrJ79@DG6>q>6CMNFgx|9891RaQ`1epS|vbVik3ti5p-} z?xGHzPzmaTY^(FbDrwytAAU3p;3cRv!*bNowlGJKfsY`x#F$Y({`8T32xBBP8Jxoa zgo@${I^JZAB!N;H4W0t?{0wQGad&Grjl656F_B1v@LnU+ghCmc277r-G{6w z&`h*A^}BsKp$>EiDz7C{9d0W{Ow}Oil`ubKY727vxz4#px=WCA--1kX82cQ)2IN!) z64UX`3!6%_AV;Ck7S2ecTTO8K%sX8Su&q>>9b2n`~<( zYNd|_*5DiDP7iWAD|Bx{N>ZKZfG8Rz*l#3NY!S)rfmaSAkPF}AH!(tekOX2mdh=7R6i^|DBt}H z!QGCpO!EZm!>Ku_v3^vha(W^4T1UK3UD5gnPLlq&%4}3=lJ>3_Pu8APx;-r_B6cS% zzs>Xq95wbQcc1Gvwx|V>;0E25JxEOJwK<%uZBB9Mb}p8*7S3d2OD#@a6uyyYt0gRB zj4h<5xV&LOq1reYrY==|{d!|{KOE|}kNpyWFP$aYJb#{}a!+Ee;cXV*j;p`c?vu@QPCHz5t@_vz|B$6b%QENrQJX ssQSHO8sP_m)j(y$Hx4^iyZ;{X5v literal 60888 zcmeEvc|29y8~5(kO#^AB2&JefM+s$2rAg7K!R&_07#T91QmK$`10qAIR5B%;2q!{i zER~sqL}iLN#NmC`S_gH1_w)Y$zMp&kh-2-wp85Mc>)C7XF6!*u%r|BJ6pEtwwr<(D zo1&&ND2k_d(nR=+SM!e<@ME6CrUMRpY|I>-j87b)*8gQ=azt#am9hDe-A9c7ay+%S4$r~>H6#*>fVqP@I%Vvd7j}ip!MJ9KMnk+f&Vn{p9cQZz<(O}|3?E%eGk(= zkT`;Q;X_@;YM#zRKRNtrLq+qcD?K$%nJYX8vpa@+>IeJ{J%_%i%^e4q(uXFrqbW3c zU_8f0w7v0g#&G@SNJ_nYJ8S6sfUNcKy-79b!U(0)O6Wp|AL}e_V9pixCRQzXXf=1p zb{Tg_b987H8M@VSsJwPk^*FJPP-gm!)bX$o^EIJtd**OOl zEf4&lN4Ro_oJNPT$gJs|7Tg6-k1VKufr%$i`(eo)x;Qe#e8HtfkIW*pct#hT#-$}G zydYWO4=3uC63y1;ELKYAV#2+$*c>u6b##`4M2R~zI;-;%8G55=>?&@{Lw`Q*54L8@G^9fr z!iJ^Q)A;lmjPdHV>Ze`B9gtd$TtQ6uB+DHmin3vjFdx5yNPKvn3L&LnJ7UW6%a{~> zMZrv>j1aLA1}!&$`M{#@%%)Ze!2+Ll%w{NPU>X6ESWiY<6VJE#!2(?pywyUtu_xuv zWFS9?zh1#r;Vd4m{pn5MDll{)YdKe}KS#u3t|DS-StW7B(ij!%6cOwG(-E;oZ9?OU zwdWvLtdvo)g1BNWYvqbnH7b@CSF938u2@#1Vx8xTwIz)!mi(w#*+eYb^%|~NLZf2U z6R`x(#B+`#Pn*T?lpq!nB&?^-OwQbaLg5d#4T-ImomjQ%0$IK6$q+xcOF_J7{Y}yh_we!e)6WXAQ0`o0*(^fn+l!OME|SHJvm8YhV(lwAkqEn`ju)E_Q|6H=-C}r(I?+DMD9iCO zXcY{mq)nzwAXKKolr3B;(+QQ+go^gefy9Sct_>Ce{xCBAQ$x z^RSTde`3DkrHz$VgHs{CwqoF;-^2b2X{G8Q5*JWRYbO>(8aWn4NpC1G+6MFh5zS{{ z>5Z1b(vT`oWjXGLq>U`;Ljr$0xDSk!&P-0^igJ*M5=2A^0#T4pAyFELDA$Q7a%bCG zxuVDuQSK8_&Vwji-%li>(1<8B5JibA$_^sRZ6Zqjq$*@b^XZJlwODu>sa0sa#!7?X zM8?jGSjB>*MrhetKCKR*5!A9EgE-Q8Vy@H~$=OGUCbL6SbK82dIbGk1)i_ncASbhe zF#qLjn{6mr*LEMr)>8mOmpQ@OC|IG5=+8Uk16iC~!!^J>HDdC5>m=+ZbQ-qPba6}X zJmPZG_@g&rdz{5SN6(&IRSyP788W-{7|}Zq!3l35Wyd9z{hpBGmT+Mbmfnrr1--co zDiZ@MYn4x1OBURqwL$}#mbD(f!w>5$M<9dM(!eF;Y0z&0S>aeUb#iwEPFezWG>3%sAkZVh8ie8&2-(S8q2jSnG!Tjltd?;^ zvAn2~qNwpRlE>pRc0xPThiH$+i?zoKgr1YkrqsEOD1uwOEtp(&dOKm9XC+q=HJI^; z@y1HFBkRo}ieNclXHon+egWeKF%;8}%idAUo-JUttqf7$X(g^ENI5nENNwhZfbs*v z`b$ho4ER$fC6p6uE0>flA$28wfg%D4z(wYIE-7O|iULw{TvA?KQlyMyP0n%@+eExs z>@FdttcA72KRtQ;QQ|J+jk%<(F)4-@km8mM%&lBfg@hE+g*=zkH7==WJk&YOSgC|N zOI?z?ASvdauP>kWgRAzEgGB9ps8Xe<3^7X{O>W|*T_DElBvs?3_yvO8`haOfxS*ps z^mNeQF0TG~9}%I1F!01LF;=3X${GUk^b5kC(FQD3@*+zfvk^iU2_aI`G3TCbV_Jd? zkXU#mi2Kp`usse0$1eyPvF}~NLp%Y-?K6|dCvqVvNRP-g7w;U6mSs5_&Eaa+hqw~A zu#*NwJjUDTKb(wYiH;5sp&o)z`?x|$bA_6Pg`$B_@gqXzaD~#qLfr?U9JxZpbA=++ z9dkAag?1lED0!|>ld({OAXNE?Q1^*YG$Pan5Gsu;R5@3uiA1QSS&sB!Zj7?75&iMv zLXtWCY#U|BoyC$OQV9~NVqwT;WM~H&B2^x2mK^Ea-O{wqSiIn1VEVupzeRVUkx*PgyDHO*I^pNZ(^n=RE=QJ_TgPIt8qQRuMJ)VsfU z>fMhz7w#RJx>_r(-?aK}>CJ@i8!JETJvmOk$U$?Xhw8gX$*YYW8x=EuE@95bs!-em zOf-U2OmLWSmzxZ5foyvoJM^u|#!AfBaGeo^93^=|2IoaZQtnH9eN#5v{ZnRcCaCNd zswgD@U^@@CbE+CWhkJT|R&I|3_cog8VLSnTYHhn}7M(p*Yc@Rv)g^M9(i9KJ>~yMF z3SWXIfWuPMtiSIb9K32Tb+$S%6uwf0PY&JMVG6$(&BW~Z)`&LtidmyB3PrtzttzEI?1aN00O|%CRHw6Y7|5ze{R!+>HiS)1&ZJa5l`7Er4&RGhFLt|f| zm@Wo*fIKhP5O^ax7foQgR>tg1vEqea-KJp~! z7Z*0<#_YI;E?@=4&Nbiu3319D4Om?G#_zE4QQ@> zi^jQV^w!?A4__{!Mf(E4~Rl52uMXpih8L&Tj?ZA#I@E2l)t z2q4oNy#r&gFC(Y2B{9zrY=>0kw83AfTlZ`Zg<4mknHTWN*RRT@?a{Nsx-^<1e<(CI z2k8x4VrgTqqn*1cb~mlJ0m%3i5|!z@{hhSOQHC)YsW>5Mb;3{{ZpVUKAw>lld)o~J zU`cdl3>o|h{@93#tp@i#A--SGDi^g?>5mRrolNzxn+{BCV#}K8R-a8ehKxn+)d9=Y z%0igXE3o#1)+6^lfZdmR)Ecrss!AS@;*vi|c586W%GhK=bi5Pz>Lxx|W;Ip;ZNpW$ z;{s*+Sjq?q5*u;zfib2f4xWMw;8ORc>`-u1CufPX3-?E*S5!UGGGe<}6u9ESS`q;txG4o;Ph zL!}Vw$|4>pYes*=VXN*{4H~h+GbQlM4NH#BRXCWm1M6$zrN7e*SAYXq;GMks_$5aj z6B+Hn*roWA^lrs~OMXB&6^!Z#s)PnAE8IsQK|MR?m3!icFDr#WY;}8V;Q5Qe9`^eL z-JgNgC~Aj!1$eu9HW|~Xh9$t(so=?*+Tm9|Tt8b}?psp(2)P$U^BRr&9;_w{`r<;9 zY7Y|jLB=@jU%`dPaMMl4{ulVt0rYADT}I?$&sYP9P`xFIc$Chel|}obOZBl!oxji<1va<%A?=}0?c4C%a=^J+++q?$0*Q) z-Q_`e-@R%g1XHrcnyd2%{8;CHi;vE2>?N+mOt5;ouI3wpIURmXFL=wl$NG9H*do+U zOVH@geH#%U*pc3=Uo|W9Ii9^3@xn@!_TKd=1N+ej44MxZU~RL91iI*Zo*lZi;&0?2 zN!b0DOM^KQ_X=7C;kynyC0AY}(Debk%7X{l40kDC`Ryposm8a!_9CQ#OIX&ux`S7( zh?5z$T94$Fy{IaqBF_ARTt+4k_zp19N-vjf03kuF6R%Y&!Qg7kMPb zvW!Vyr=R&4tfMmyPds)(=+1sWEX@7;HD4V%1i^w#2ds+wpbEykZP*2=QuYom5;K2b zmhLazny8fXL4Gn?_<=7V@$rQyn6HyjV0aX_bk-imO8SD$qqQDzG(Uk40|9;8p;1|E zld&6`x6-WZR*_}pJoA8`Ak?a%nDfBPst0Fl3Pm82^kat+a$TGOV)0^ri$0nOT6th4 ze7y~_*r2?@tiVKXK|*V|A4Ni(O0rM+@Ff|EjcXN(G8Y3;>mQP=u5(!S1pAvW(3piC zCJ5l6GUsF&_NR!CzarNsEUW~mg7J1MQB>jdUy$n>4%eV7kWJ^7?H6?a0vdvHW`o|b zgrf*zx#JMBZ+7flPZrqWkpAI|HMT<^mPTnc9VIaZb_irH$18p7N6 z_T+UnAt+F2s@S};+uliATOK1xz5p5NTbVG3ol78D0+lN}oJbs*y*Xi*3|@&d;w@;X zfOdGnXN7|0iD#KzMcKzl3lollOxxcCuyqlOR3NdP24r(_d(UYY%oEC816)b{Lg~VrfF{zPgRx-2TKJXV0%tnpY$qF3%#^fXk`a0D3n24pYgCU!N9Kz;L24&D|* zG@-@(&|rBH6;DIgk;Pb&DBzc#dn>U7i9_FdVY__$d=v-%*q3VU1ygL!y`9J%Jq8%1 zgrx06tvZS>>LYZG3kQngb%46KF!sd;ng(fb1d=Ip%g7I34m}6D(=pTceidn1#mr|w z63;TJ7Ri(w5#~}rw#7A4awXzjb-`1B(|qi_^eLU}ZQx#?5ufy%H|`#MHe;Euyf}~m zG<$I@y0pg!AcI+ekv37Rw8bQ7utzsDi<(WH==%z`ZtPwTkiL}&sO08rxs zKM1$Oj3gExbxdL;lYn_}oAKPV6lxCoLEMy_F9W$HdzYZqIt)?|B4JxnCbJj5_~F!T zw%)f&mw@quXCpO=`ZcT&O(^%Q^C zMxos9t7dHtsmICG@hb$(qd$BxSOfkZfqO6NjgV{=1TIz~RnsMW{vLo#jT3*wRwT80 zBoQjBB_BKtUy`pXkNC8hiGaC8aBJ`lA)hlOx`Gyz9S&_jkBXBW82y45kHS|$&`3F7 zG6DHp#0-QlmtqM%(IQb&#Cy1}xLA2_fEeeZ7p>9cQ@id;@Qp(_q?2&9@|097l<~9C z4I(`idq@&^Q1!;ecmqiFiKCF$K{qI=hyVzY%)65#WsW0hU@U(<2vtdDiE}614T;&Q zi%OjLxbgQOvhQ92DOM;qv|+)}^#Gp)(2;_OWN^r~ffCLi33qjU6an-~T&$4lo>Kf9 zs@iY5vPcY!BS`=i)s-1sF(k+KB>S*SEC-ii9Na=OLudcUK5H-PT2l|6Lf$CUFn($b z2^~Zz76`mpjo>xn4Yu2n1-Wgo2|GweOJSp{kJWGxtKm&UpfoqF(DtVsCOZ=Pa;%0M zfbOFJMw~cPfa{M7vat zL*&5}JdE&b$9gV)*_we!`6xslVRhU9bv#?P)K`8Yj(c0j{jiHL{0P3h9NikAv}%m2 zMHD9JuR$9NivG}I1i-XHutv_L zHz2bS;&~7D{30lrn?8rrDk&fXadZ+_UoQdJZ!JSKZ9g{ThVL^)W7LClBLUAmh#L*& z5@7i+n~CLhWMDUiw~X{e3?)UdRdu?=Z9`B=v&EgJYJom3;?S5Ch;n6yEY2YGN>a(E zAA_wFb1?>!i>NI4Q9)7+z4DicH`gp26e%)We|&rmrz4h`z{#SL>Z=S^TvMok`t33ptpz8z2T1;dgq z3~dIEo7Al!kz$xzIloKC4#D=!$u6G7>J-bjpit)QJsBBqpTJ>>$FrODzR#ok&ti0k z8QR3faUzj7HsV%W?#iu+eh4L+e4>~OSYBOTPR8Y~`zE%1pGO5{2tE`2v!huclQx|m zzR;$a^Z@MN($M}*?YZ-m>Tw>QEH=1`bgZvary{aeO2?e_$rQG7 zG?We-Cpd%AJxj*N-qh8-?JzQaE_O)Y9SEZ!BY6C1SZ2#(rms3Fn^SXzU3cEtCS{P9 zdaqsmd1%PdiGEPRw%PpUD~?S{S(yRa_Szo8&Lw?S?lA0X8>B-fs-5~in=yX%QqkTG zwL(_M{DrE<(amm#7%mlON3uAe0Hc6O)%|qF3(k2|i7Cdc6$d_cN^MPyhT4(oo6leC zcc!;Cx@#U~5$iSN;ryKw*|kyDA=6nfEpJB$MT=~hdA)CHxFOnp+Rm3p{j8{~5b2es z)fva;54QS5@wKaaEwHjEZk+dnZvr(*AYgyiJ*l!cEdADLayR(e-Llf#dXf&KiotCq zcH!k*8+N`B&_1nHLfgE4AU;J%LzOn2VtJXcL{if7SnhU5bJXd%(;4=>DamDJ?el0O~uXx)o~kKfj(}I5ZtP0B-bX3#({y4qv;%MkKyn;XDWjTRl+X1O4i0>h4WmwCXxO6bQZS zH^l(BUA;X!UBXm64p`g!htxElXESORP{)?9+)=T#s7w5BzHPu$97ym}e;RIDv|s0WeH`p>O8;h*5@TU$IQeBAwXK8K0)LIom+wZ zaDS_i3E@3xYSET?iEOq8i6Z(+92d!-oM%fYW-##lgqcwwRIyr&(RllvoV#3;%%zI*VfONLaK44}z&-Pjv1 z2KsYkczNhYFKpl6KDRjlIb%oS3kc}Gzq1)C*4Bn=_;R}%j(RECYGn&1cI547OI!G9 zJh-ExxU8-J;TqG<48~IvF{We%t0;)m2;LZniZMmpRq=q0H%3EKR|F=aSNXGSJrzEs zD;$AL!u*j_?*1h||41jq<@iD2(@L{!j{}~CAvFCPCD1;!Id3+y37KH(j^=l~_U4G5 z>3yCF;Y7`mhj7eomv!)eG4QNmwfbUPa}AFq6i{eBE-G;URbJ~%LMCX@_;V9aM^-%h z5pQMVYKrQajc`oG`V@~@ATxBb%fVYhiy*z?E#pfD2O6_CC(LpIs@YoWDoeuhTuT(D zQ}pxC;-Ai3IdtM_g?obYL^B_ldCAVzx7rY4XhAH3Kzc>M-HKz#`~YV}UQUWJ@tqPq z8^ez4%_*7v$pnZg#dCe^uMFQNU1a~n{E5Iqknz^w6#`|MbDJL>r6mYx3sv7v=rUIO zl>YX9FCzie^7*sCn^&Br&T6`D6NB?6Iu$WTaWE0r*sH(Nto=i5_wVq!m;sXC-UV4G zG9sP*$ZaA;_rG|1ASb&VNglkORWz?(6avTz1&|qT4Vi)S3!zqu#1O=fccl`Neszlw zcvg55;0MBvdCz@nZr6hE-yV%V&;_t&rh_U0C|p=t2Fic7>umo8fvMc(6Wi_5TzP8+ z0{8PyIHn222R?wqq#y`Q#0SJyL!IG>dgA82i1?{jJjRSnp|0AHRZt1j6IYEQ@aeC! z81}gabDMGUF~gmFGmL#baS|_XVzHle8GFti3mcuun{T^5FD{f3oV51}Gj!R7QNzjA z-I@WYWl8#0M#(u>(s?V{b!Xj>&8g+)&HbfaZo0}c?k@e_EY4tq!1?jCw1&DA1t0tD zrBLAZTSAkYJ`c9aCj@iaqVK=Bndw}~nzl#5SJqXTqp}NNDd{8t4+0$wK68tiKgwA} zvummh4AYLlX1v6*8Pzhu9Wrs88Z{m}r9EBd&7ZSnhTJN|Skg^itB#4bbIb)>Rry`2 z%RNe@hTJXWVK=ZplkFWpFd$H0H{4;9=nBl1i@we!#FO5oZYagB~ZQK(_vKD$go;>BJ=usKb(|3%uBj*fu! zuHbHz#=K{FIs6o1#>!|C@MhW%|M80fzBvUawB+qAG=(_a?{=+gIA3z*y-F} zp1XI$_RE{3_#Ypd;twc~Z(L{_waDbt5Q0rf6gGtjYv>cVy|aYP?xjS_`C1$V+fmo} zvpQ)OqP>p!kTjUDbF9Go!1eh$n9Z{odluSxxmJG~dWQ_S_A}N#+@nIZi_Y=>c(uxR zwXSjga9dH!TIzr5n!=UsQMzN*=n!46;8k;!N5Yy|x++uWh}f#rik{iU75B36M; zs={acyFGV7To1i3>ZqIl$G*g$WuC+QaYK#a!)2L*fKczlQF<c6!}vI;6^C9!j*fuHhgB$i zgC$L3{SsvMg4AD&B`PA!q%{NlW|ed10pTDx`?S3Eh#kta?#H3>*FmR3s0Ytbdjneh zdwaoNMZ9l*LH81xefQKOts)!Pg3rUHRI{aLF!tEVo*1qg$(22*+@{apK5BHbi0>=@ z+H=Mo`{q5(Gl#O;5o+9}&i)~_x4%qif3IH6^4Ng3(LHkJunE%9hP@$QgCqUn7{XIQ zyOfH<1DaoZ4WMF)(Ub?}5FXUH>q?EB+IyJ{`= zxK@_~i=AtKf%)^$McA`?v|N3U;qDkp*oI@IK`lWJYxrRHH;_;`?9cx9UWrN_mal)7vWk<{0i+1$qY#O%9CJlk##(P&z4Q z8+9IFHW%SE3bqfZ6F`T19;C!cLWzBk@vks%^!3+D`vKfWUDI<{F%v+;^(a!FmAxiF zU6QfA67%S2g*gx1Q{KD7#zsNebueBTjNh~$OrIwN_eADvdYTVcqByNYj;AAI*Wso2utEl{VnT-#xGYTiKg;F)pbZr#dEM0UcxKYH%P-%25M@m6KeLt!14 znqglzJpFmN;b4|6Fow1UzzgL^8$<6B=QvOHb7gIjYJRi{d8Zqgjbg*(JAfdiV~(5| zJicqaV0*{;=WGey0oNvEgo8i+tzL#p$;oncE5KWD)D>-ZrhtN!+Q+J@6&u*j*zhV) z+MGfe9tIwe^X587e?wN5^R%A>^*L<+H{JT*`#FW}cBN<)^*Ok=N`JHLR}^ImVreD$ z+r6i5m#Tkvnl}oaa5Ytm zce>R5MWVf%4_CU{yzxVKlC?JfV!R_Q9a5NZ|m@m%gj!)8lD-4V^LI{?rrL3ErSm^uaWvYj>n(B~ov2gOf)lk{Drtv=Fz$h7WZFF9EbdOHa-&D8^Jym|o z&DpD*1=7F`xHr|+vlsl?mR)N-sh_D1^B?#DyIEB{?Cfg|FPX5Xf=V9JO+5^$Z@^9* zadaf6?)##KaKz+Ou4(lZjowYD5s?D=R;fi-lbhEO<9;vrs1gev4yUtWU~|*O%NlIw z@DM|q7@|wgoV@>slNp({{VO0um|PfB0* z8z4|xuZy~)MkPSpBPoakEK96@*5glJG-cjO@AMkXzRn6@|7;3>dtk9$h--Db$9LOo z6i765k{BW)0vaAxN1{{Vy%>R)Xo zNFHzVUH>~E(9c&PH>;adJlYf@bvaOo!aYfPv7E2$fJgQF4ATEW0n6pyTQcY3Y1itn zB3|sAdvmgp{%2wRGwpuQN7Ufne@6r+_kgB&m+`Q7gi1%pJU|@E*7E;?2z}&KPv6Bs zmcC%G!3|O!L&!pFtenUaZu$vO6ZOVGP2^F#DVv2VyPf%eqNt;dV4@uiGKdV?O@+ z3)|SF&ft3L)zT#XFJ;0$oJ5R%jv(4dl>kYv8CeO{J=LE@ z6w>neI_%Pra9Z#@6l*xqNkPpj>~N%aU?wf$Mbx-wJu`S_H=Pt;GIPOSKZkl2(6;c* z+q>?Kn9$^=rMq8Q>zTxl_h_|c*jUy+tvS~H!ke#b$%=j#Wxd4fhJoWK#*8RYAK|A_ zQf*&M5DiM9@h|7=8`yFmDtBM5-qRX(hyA7k4d_YYyixYq!A&?SG7_dB;GsU}GX!%o zhwC!CTEo(#Ub)YG97mD*}Qq>*nMz2N(qE;v|ccqg^`(r}OAu+>8NtKP*YbKnR5gdcaS z_RrQW<{U!n>S~}HF!T+x6 zlzbRsCE}<+Q|MBw*H!!184d~OM3^H|gW}j8Xw3HByFb}OwTW(T5zj;9SpYP^mY`DwDj`zi{qFU_vuv_VkWH%oo)ByXPKf~k1}*P;fU9$ z$c!10*&Me*75$SazHxUHFIOGppX2SEnKnywh$Dm6Bac#{vFhjbRe^8oTm!sNQfSAW zdGvRRCU9xJ%rr+Q69BQux1hA4R1F5Xj!yLc83AH*FqYv6zzV!W@j z!}B0mk@?5iw3OV=wtV5#C>B_rp0g(jg$@-Ydhcsj#e36dJ?Td%=A7vL`MWrM3ny|6 zZt?m{PhibmHZfhXyyoH!zqSL1I4?9{#$4`$+}1h|KVCQyGjCT+xVwB&#)D5C9Z8}G zrcrtcuIzw@;lYl9{y(;-%uw$RO8WjauQJ)Ss=l9d z9rp$vo(pTxnu!W*H@!Q_6izG)HZSfGhFZ(#_U^-%20~+$zqbsgRh(p3++*9xK3Pit zQ&zP-Z=fdB?X|lL=kz&{s({pWl-^ZsnZ&5=7~yPPpMF$wS;rs#iG8j9?WWeSEVe`2 z0(!r^bN3=m?}3EWez{@o*Jv_&q6j{2e!G`!ZcU6(;iGI*-U~^+wRVV1uPE%eW^&z> zJv)`{sl;D5E=Ub9A~SPv?dJBR&%v1XJnr=K4iz2Bf7yhol-a{JzVqQWt7Yv@$z3la zyE|0VJHu4-a$en`J~O}EMJ7IA;5ogp$aE@rK#(|b-9CQR^n$RJNl`wqB{6)IRQr1M z>Q=T1&99~unR7C0;)2X7pYIU@g??=hR;EZpAH!%GfsKg07k4L#G&zL2&j-EfozpzN zLfbVlq)sxYWszg}xeiJ%?z^4ZPuI+iku`+^sP1Ioo9_^9w|3ui6nUb1U$-(k9}HT3 zfy;J5!5MbFoU^EaxX&L96Nu4?HU=AITXNpyIu1FBgSr&B&-KOx{?S`;q<2tn?sAIi zj^?txGBz!0?L3i#GnsSdEojesVXd}b-s93C`$FV|*7zv{ie7b(@O`Lz5!|CX@R#4m zLt(vU3Yk$%l}#OMorl|Y@D_!QV<-cd1FFpR>+tk-ZHq-p*IP}LK6M4h((Pea-c(Ro z%E9!6tIZzf-x`p!%zhr#=dR}~J%uI2SC`?t%TsNCN|@#|$6~}lEJ1eqxvPKl9()z1 zmcIrLQno+;cuG=Rd7WpmKy6f6&7Qun*o^U=O}TlTf5cEMd90KEEMQUZiI}y1mEXSn zm39w++6(kvtTmOou=LkVr`Fk$aTC0(lB!5%CmjttsW^3tfk# z)6Vcvw)%&@g`yBqF?0^kZU2(xn)-7ZQ`B8r_2q9dqBoAHtW084Yty0CcMhoySl+$X z1~#Wj&W}k_4)xaVa-9N&fa=t`JrMBEAVWbW^#4#8)nE1sl*Yg((Li~c53h>%YRfzw zPsJ;eFc&~UBF>(jUc54WeDx7-i0qe7Ha*(IgE?PJIA6Kxa(Q&m(htGSyId>&R00>c z3o%agK5Xq+qmt9U%tKqZZe07-(ycSR8Fa36|QMCt&&YqR5vHF^gRI^N`if_&@o z^lOqLp_$E7J4(N`FLOh26PpG31!`qe7tVUQO8qi)S6T@_y+heuXnvv zJ5IH%#a7M3v%&}9YS-~tbk@vPk|f8W$9rGT7>E)8Popw4(-UU4X|fx@(@uPXEl1z23Lhi!WD+tKUC*(fz_io{*2P+Ppp#&udG%4zMjYj z%%C48nO^$oSaC%$t;!B{eEOp7cC}9X{AK~D@9IjEoLIf6g8e;oY^AUn=sCL{eY)U2 z_h}R`C3dIwz(vPMp|-+q=1H(bYnTRm$fmho$<`GmYm?5Es9HGD7KU>DRN}q`F}v@{ z^HqeP;t(~NI*Wf1VBklV>rY|C|KYoWKN^gjB!5Xha|3XlA0!lRV5S{dS(<~|9xFFP z^q8BKcjOq&=awqcj9+pNK)y3v#=JxPSRwMa5{t5RM>(^t(Apz>HXt@i_dZ|#nQoXG zWOCUeect?qu18MYOHZ|22CLemtcNPw)rl;3MZQ`T_(tnt|A_Xi!J&CL+c58te5bo; z1g|6$KzDO@a2eOvbJDh7rTk=ZkiD0J%pJiWCV&{CVz7LW6UJFq)s5x59(|0|J8rNM zb-Alm|Fyt6-A^{0y$G$|2Omczc>!tOcjmlHird}fk7I}73dvp>vYbF&AyjuPl^mSp zXgmPbu$|5s%ha?K;`wUZ#tlgSRUCdq%^dKG>G=^{=Hlk&j{dBHAF0S=J~**H3zMRD zEW_(#$oEKB8pCF9RC5_X7?K51Tee`|su~=1zl!X+n>|g3Dy)f=J`j@I!0Ux5K zQnrp3Sm1xrs50pbZ*d$$ELPS3Mu`GxE=-l)-1_0v^<#zI57PA#0puy`bg1aLB_b}2 z)1vxR^xb(f^dLLFXx|BiN#47!{78Lzb|w}66zfUl!?OD!f$uZu0KH)KXJqx8T~3I_zA>>|(Gb1IgmDZ90At~=5zqShZyQ2I z1=tTlcLMOD`!)bPJsO{1qJPsFgDdLucAPd8Dnn`pRhMb5b=1D?I-JtoxuE?Cbf5-@ zo(M#TXh(k3OGJ*a4NK3Oa#w19^Fr6|)@Nba2yl!N_L4FMQnn}i%Q^OswJSB}=7gSD zw+xy`(ny1e-Q@^xx8vkP-`)xN)`66`V8(E6o#OKXyb#BI``Jb(S6PuCcb+9I_|O!9 zurd%oTL8Pq-_eS4+Jg%fyqk5Hkyf1L#E5~7O-l#os?4*p%qm=E>?VwhqT=>S`l|GI z2Kvd!gIH>h-Ul>zm`fEGNIQ$}^uoJvYd-+X&2gT7XlDhw^V|-Y+&35Ny${tXD-)ZW zpQyco_@fNt{<$lhYJ-~~oC6ho%Z?SULB(MPKEzBq8Qu$6|{_Jzy@cdXU9Rg zjA7aKq%`EdZa0QyC|7pk>@pV#+rG#&{^9wD=!>?W_T7~8*-#{TLqU*)UHizb~>m%H$#!y#pmfAQOr z&Xer!z3!^s?ea~hzJK15c?x4xiEcyO7Llbxwj0pyfN3{>{(}7CPd9bL(?3?C7Perf z%?EkDOR?YkUc~4eb`7$zm-$N^b@W?tR;Gu|9pAhERhZ$_o3~*;^*Z{N^0jc=jPEBi z>eHXqsGRSx`=)gFadPtP02@Po)71i5tiGeDwQfLk6*g16S7X1mX4?k4BWZXAeVeDN zZiEL2GSsYd+haNx>`qM7pBioGDRN%IOryU*sNY5xDiddNXHZ{i=v=va!n2#{&+efZ zj}YbcyWC{nRS;clkZ;O1>lBH*a!E4ZDZ4lK*wSLXN%!h6orcSb@r}ZW<1gvybDlJp zo$=E~ysx-%&Ae+?kwQk1!gB>vHltp}+TFYHNm9a0k(!J!n-ij{YmbY8j>2$gFP745 zJ@;78^VEVzhMODTIBo!;DCez_Hy#e&>TinciZf-2n6mfxi9|Xn9y`vqK417@zrM6@MaUDm%mGxIiLo}Rs0Zr$9SeqpY?J&snZ{i{uX)K>nOw)r+_Z^-Uj zsq%^9H+zuk2gxfAO#j~8dha&v$_=4it}l`$&cIO*5 z8Rjun>N*Y7{6$wZy;Tyv@8|s?J}G(jo7DmuM;Rud4Z=#%C1ckwFAsVF4^)fBhDg); z_btzCzZw=b8QPS|XV(o6h#K4XcpUKTJW^hHKz8vo=^@XjyE)r4Ub|bE`%eFUOLWIqh=zG)V(jCEr3l-gA~OjTGZU+Ya?L*JPgi5T{n$0h@v$Y(+e!a%PH^ zc361+2KH$aGcn-w&cO!mLEF3`I@Thg5G?0DBf)_!M)fm0&rU;u|ylju6(V8PVD z;^#9Yii?{X+>3o3C-`bG>u#nVwU{0T$ zl|UCRUUKks#X1u>Ady{aPi zT#~CoHfXS!4WbCB&YI7anYwc4o$ZpnW-?8iTZ%j^V{7?;afQG!k+;Iz4}7ch4Be&R z-l9z7Q7e_ld)20XtMfPt$HYoDZ5vax*Ng9^{Vq@36qG$|rTByDH#H1Pg-Efp%Vj*Q z(6xUeKqyhkco1Iqv3O(mH-!+kWH9^fEm_P_I#R8g-D>vF>|^B>vGba4T-C`R=@JaOvdbSffS zQ*&arjECyc>C}M3w+{BTpB)$N26Epmxb12aEwAnSVK}k(oC=V@_sf>GR}2Ton7f;5mT^^k7a4yuVD{b@$Zj!(kskW;gPrWL;}Y z^Vm{zFX@Q>bJ%HpLIN_wdM~g%7kDrhh=dKZ;B>kL$^h%&oa-G|2_8j9U4f^PJu) z({C7lxsdaD&BSm1XPm(U6miduwMDB~reKYYDX(ZF+jD9Id&s#H4pcuyj~RwSp<$s7 z#rg^j_V|O3!?ib3j2KNKc}7LBvkjMi3g61siXG-mcWgF zdauJ_R&Cc`{npMXzz-c}Z*X(z@a+DcgiqxwZd)SG^+4)WP033wkF%f%>gd=#4R>+P zcCW^`vwruKj5)?@S}n_Xp?nluIO@3(alEq=X(ztw35Kl;@ou+^L}~WuQo#{tE%85e zd*D<5Gq*%EkcZxtMLs{0G~{RInyvhNdw`oV6^qQ}M%l4A+uj(LF=u&VpvL|%^cn}c zsm?0M4fsRvc*#(aGMti>bg#3owlF>^!DJ~6Y<`JOdy*^kUzTuQ9=GuZMCx}$e_dBR zHrOl}9Q3*ve89L3W+&>eh8l|g%bly|X9b_}U(TEf+D_J(Xl!9Ysvx%YrH9P1|vfXFN3O<=P8n00v>dhY!xp@q}9m)aqG+QzpBp<6io`UW_^y<(?^<5;i z)XezP`)3FiUC^CUruHZXQuTkg7$G>2TmtVwCgM=k)VT2agQvT|py|MFG%kl%M_~odl3|;_4 zkQzbh70`gC$F*RBRCpc_s+y5WSC(UhCE_JF2AicT;MlM*TIy z{Q9WFL@4Eq_-p)7Tg)Jmr4XGn{g$PN{8xMDO)O2nZo?CKs1UT||4rD72gbPf_#R_FT)We0yDWZT`kW45jRFr`Kk zr*|}fpF78FwTy?H1l38w1~%jeRF~cDZ#9A1lzcei=l>C*V#~(Rq(x2Ey7*0?hLzPa zOlGHjL(t&%T#S(2d6iBYKO-UrCA{7dI7a?xk+^Q)E@T0`I7KQJ>5fo z(Sd4mAWg(Iq-6EH9e~~+Wke?!yLzhrj{KWhwQQJCC8R4OgC0UDnT)(Y!Zg?z&Jla$ z$o=el0cYV$(Cu^wc#T4Y${$X_b2peq4&2!VO74V *EW!nr}QXr_{)h9}Zd5 z(UHf9ZZJyQ42C^r_`bNqMlbz2o??JZ#k8MXwYKvgRl$UI;WIfUw`(P z-pgc0hht4vdi_Dlm#U*b_HX6*6vma>+^J2+i=KX0z#o5p!jIuYb6M-_p& z7t}JAwo%yu-&45O7OcZ*XV^@Szn<*MfM9R~lGMDvAxzo@-L6zQ;JGeno!2Gj1sIi;*uBJgkl zWd*>i-VFHwe6u-7`aEP2q|1^jd_50b-w@~ECsY*|G;#CL?}ZgSVUlMli(da0fluv4 zc(2Lst@}`{!O;qul-Kn?7heQ*YG8*~w^@d2j34(QIs}tF_o;bKBCismPUYYD+5a^w zP1IdSEfFau9X|xpB8yyIL;P6y8osN}o$%~q#0t=}dNZI(iHK13){+w(z36bY8pbJ? z3Z$bn3^yt!r#y^fJsNU*`A5__G{e*9sNUdAh15)Qct>nW4SvFCr9gIXNuSri+ETIC z|KhMzy=F>3Ix zO#lA8`a6M;s zBtLpW=AACex-Ii%EyHQhTZ4~9{$r>*)PG{B+82_y9Khd+h43ZlpDpAy%-rwv#bYVy zzd@pu|1|WUC;aCLL<9eM!hfFdpC^p8Es-Yv%MM5h8n5xb+sM-~S^n#=^Uu<<@Q)hh zv78@0p>4UDiB;@JDz!a5-os9%5#v~s28&n5-FWhXcIakhPxp=R(2Aa(6P)gxpP6ja z4dSN877hgwP$*HXC8RmgvTXB=@bOf-Rdn*a{UWthJ?ZHNbuu$lkosFmgO>Ssg_L03 z>TLpWoyxY2(9okO^qyw;aiz-1ZOCDDYwMF4Djtq;OBcXyQTz_tvquP#^;7A6!W4b> z#TGj#qZP=No=rVT&)w}*{rcp=(`uJ!79OpUd!XqHWdYUfD`nT5uV$Xme+l+ew*OZ zfGqWeQ}00&A6l9FDZ}|9ahc)sP;ZXhl7e5LQ)YSGw5fvg`71|7O+cJ2n6_EgLuT6L z&+HsQrojAhZqV;T{TkJ=+aH$;iX2zG{}^EKHPMgCNEnx)s5klC=ZSsqEX3*l9ZB*p zfsYa)tQtf>{VKG1yJw{@y(Gh@o28!c_xit@uJy0U?TDdNt^GtTCU(I^cgbt@(e)1L zF@879*)o^v6Hl0qAH))D^Dy)s=eBU*#Q*(e>pyMUHI;d57=BbV562HqYj+fyZvEGN;Yn<{~JX z!)`E#Yn)X_muHTHdp6CIr*`p%-v|+6O+C2OPwtOC!OWCD`aU8~IOIQaK`Yyy9pwZ% z9M{k1br0m8?|VC#alms$?)ULIyE&Xor@_}$@ctCdpsqxOwpC6|d65EZ=1%Nuuw63y z(m}zc*SiJ=lU{USHQpufut!&yG>xdy|8QMnFPrgahTOYJEYrc-DXhkieGC;4m8CdV z_y2tLG51a40Ne*G=2-3zdlTo!_dT-TU3OyUxmKPIFp==sa|2Y`skicd0h&}@Ib|pu zdsf4KyxY;IGnjeoLqZMUonhn{Ct`zn8rpwIC9@5oUwGO?{NU?1lHK3bZ(sNR?#Isi z)?t1gnlbKUT{?*A$^$Nr!*>K#r4;0u{Ij3v%s)Dz6M5GY(dXz^6nX76TAbSY(}T)< zcFME3ILEKUuG@9C!V7_5A=dWDLiLOTZxql=MZXB4W?73q-b&?%@C?c)7TQlYcj_1U zo_Rb=8^1eC?BIvb{TVV}o8OMh>^RXqAp8p5p!gyF2lNQBGhEL3F_ z$3``y7r#zr^FyQ2t{M#99eY{>zNwRUl;cV8&tlx>8Tt1A!eHlcYjyF7J?6p8`r_nP zqtYO?FAwGBAEV_TbeOYRp~P&-+{B=aGpD6=-bGjZY0Z48OUpR(P`WDG&hQ1z_=U!@ zZF~I|#835KesP7+X_e+J_chK~UTGE2yc!nuHGi#e#rabQLp!4dMl?dv$y+8zzMsVh zhDN@LO&AR{^rwYmz6;+b{jS=vMm4%rW2{pS-=_b*_x+FLe~SHo+I?uo06;Y1H#fd| z9tk|<7$&N6NGVlbet)1S`+IlnUx8IogQhElLp=tomSx~jqc_AHX|{Ru~NJbKJ5*q!T5zb(L<+s{jC`8wP2c`}xp&!(Cs6e;`^G}i z)=c95{XJi@?)AD$8igOP(>_uxArZCd8(ej9v>KppwV1v=W&N{ZjTgW;`F|4dy0rVX zo4&Z*H3)9A+*drlckol!0~LSNlbtx0F=-kb(Q34@JE9YGQ)%H6_t)Lb`;k*zRd(_9 z?%>pt$>mXI9+S&q?)jgbywO`p6Sbd0^;OcOPpVu*wMbLsZnN>JmHZzhqwZI+?>#%- zc3k9X;N?Hi0W?+e@&ioq*w5L9-89=I$-i-|pT+I33gVW3%a72Aaw7Op3f)Qlg|%^tfS%d7V# z%Plx)`NfYQMPJXr9cTiQk9ocR)3A_?K9_-YvIn>A5m+~F;C+G2ICN{#!5vBo_&H^b z>VWbMdG|vrI3)8Ur@!gAex~_Qx7v>%gQ1m)ffGTa z>UJFrbbNvN?2W>6?c$TeEkVs3#bp7-*?s(BqR-ni>C5~)N3O$TL4<_1F(lB7Xt(0E9R zQYmRT4M&6KS>tK=t-bfD_`di1c|Y%8zklxer_R~W+H37Kul?+2Jwx2Cvru|YXFe+G z?Y@hzpv920CsQqjW)Dw)<#aBls>gArzFdW?&PdNdACUWK6$4RCxsYSl*PmYf*m$q2 zn^8aDdQ{SO*#F~CmcI+0_qrmvawLvViF1ycrj-G!%XmsPC2@xmXVf=BdS@ zqJ~mlMbtb0;wIIfG67f1sla2w1*Q+u)-Y1zhhc9W`~b0bW$ClxN-TS>see#Y9{TyI z$wnBj#b(A~S}<;IaQT(rlc#@!_`j(N^=}UNZ>CEr|LY3=bp`*rf`5x%$U6SP70^5S zMdk2Ipit5Q2_ni|OtxFRlytpoXnCamAbF25r7V^DIoJ6xWKsPq(DANIns4@`+%a5q zq~710XC&kFIY}?cDmX8+=(Fo96Anr%sCWyA3cHGfzoF>hluVAkI=b^ zQV*tnM_pVgXZTsU6Y{9x*$HzE6l~YUMWSw!+_*)|^;=K!hj-gqKAd{+e_pJ;R4!Z7 zyG>CVw$J8S-bD%Zlf#E?&q~OR-m9q8^pjYNSV2jD>WUgZXlIGu`e3pK9~@Z<6uaCv zOwG-OLm=fw@+YmQHF&^DAT4<~7&Hi{4$r?d#jCV^g7VFS=osSlD9@rgtLW_TucbzX z(xkZ9N{kNM*KMH@4PBk;Cg z9Td5Oh}YKx)bOQouM(5J{p3nphrgCynuM>bUp$K;c(mRdtuV9L4K2G}O!q}!boP@L zgH4c*l4Z!Y4w`0GSmZTm<ueKdm= z8iShKz^Af!CL`vwI`pQdeja*(%1a1B^s{foVpVk&nv87|C!Lx(KoaCOGJ;jWaS0A7jhaezWIu ze1hwJzsQ{zdr@Cj%J~)a;l75>V!6UuERWb0d8g6q z4YGftU4;zTNcagS_Z%0Q)r%bjv#`c5`c@8t9(bc!xsT1SH~tS5UfqMFbO=dlKi(D5 zdJ3j=&hE|L2yOnoW^h_C6wys_2XAe1Ypp^WsdW!&BsKNm7$22Fa_w--C+gB>Ig7kp z2~_@Ty`GM)%H8~C&v1K-4P#j*^M6;zc~WAf15F0oCUAG2S!vccufLcN4ok*8_p(GUv`UDW$A0 z(m;0*X*Y=$J8$vXlFITIv61k9I(e!O{Ve_GO#RQVllH5#JZ;fzm&mitXvpNs!qk(& ze*BuqAKwIHnlnemfgQ_XSHKwlevAfBhnUW8)T;Bv-wckD}OuL z6ODmIYq()<{K5`0bm2s+MMwp|X5qsWLG<9yJoE9bAX;EoRLYxuCyYThnWRe!f)Vn) zs3X&A`K`d^a0OcCtBL1fII25NinS&?h$73+oLCI=QQdERzAiStxlZ3u=D1HY=|B}1 z+D@C$0H#5U?6<l$xNK{ZeTJ zqZNo~9_m<=GSV1~K%CshK?13>C8-lK=-)EI0GTuWabCS~0UJBQTpqD~?3hT1i8qaZ&7)MN1MupAN&{cm!|7Qy@!nZVV?HN5+1strc6B4wV(z`;G~k~ zXWgD&*gc{$CdSTuDIYXKJ&C1FCYIRuJDYzU(l}uzH=I1;hfILU-k*UPX|KF5w|)-S zXF=qn4@^vdcxBbs)b8}cpMxPjNMbw_ES@L!8;dz6Ea19(dVKin_+k0A#zl-inEm|( z2e!gHl0jJ=(7I{ok&i50uB01%n$-ZJfLrTYEYBWfJk!FT9sJp*3H!#I>j)38@ad@`4NM9!~uu|H*(9ANL;==Q=sL zyOsuN$9X7)7YdFWcwKXW@fvbU@_)tgKXEzJRWj^)!#{kXp=t*#I~3D2H-$e^YJp#N zqt6xRFK*;zBfn%6Uncd9d9mDmi{246 z`TSrOyyz*Sfam8OQ$vUAd)nPv(^V(KjYucFTebPIL8xMho}o*Ada+0G)lT$gj0SXY z;2J1qgXU|x!kA*TNtvy=%7ip52a#&B{hnA+dP}sopHj%KC`lu;_5YFMU7eF42>vFe zwZ1jM?^UtHg-AiaOdyjLqcl<%IXY~fu|R3)v`dorLS*WbC+z0ZJD0;LDX)f+D^mPuraPi&IFzpR z>}sfaI&-u%$-Z)3f%qTkmwo1}+{?=?5Xa54Zb8ZUNjn2aN5$6N50v$tKEK(|j%)MB zXUF)|V}}%@OBPuei#KOxcT>|cLOZtXZ1&qsQE?NxS8^;Sd(=~EwA=omT$RL>)|D2J zeZa$5hU4y$zEw2s(T5@_r8;4!NXyanm#lIh8!pg?P+?KyT6h3d8GoYG3W9s{ltz7` z`~;enTpvoIR@aP)qX8Cfym6W%J$s#o`|`ZsdcNMM%55B=y2{}w^vH{U!zn{NzEN;d(0xP-8oGuQ#B@!P2L9A~v?> zp)0(vL!uO*KA`Ar6Xx7^R59hs#}8>-j5l7tF{NeDb#$C5*2NLoVZKAnA^n5x0TLGv zobBpzUXbcw2H9&AyUPv`O!o^7TfHJ}$X>&B`0^2JvG859(~He^bNi#dcrE1lWyE9a z6DutO{6}RXT!y!eaL1pLmcRPsi}?*?|HBhe#9Z_=)#J?Sma+M zUTZErvgLsn@0)m!x77zuNPTEu3EjiM*;($ZgQgKHk6Jypa}d!qO1+uR-;O$S_CuNt z;}(w+A8m>CeX6gK+EUqyh?h!<=Te6YsDz+1o(dmYJnmo5J6%3=R(R3ML;Zu|*WSut z)VRAM%~LloiNz|AMlV!TRg7CX+3&C9+Y-0^(oCE-(RHh#X10v&n6 zE6TY^G{VehwzN5=-qPw++{YmX zSD5e@b({NdO`KEI^zu;OzJ7>$RFp)jXKCSSqg-)!Pany+knGmRsvq!sDp^~KmhKUM zU&Dy1;QSOrZk@M|Qu{jikWl3lUpxxG*eewlxG+wQv+U;95Kzol5R#eX6~`^%n5SD4 zn<@?L^YY@dkt)%L9~wN0+}yjySU5n!J$xUxf5w*vt1pr>QFqCH@=&B+Xnmo)=Na=C zMf)?_!a5+>LHDc&x16J|mDShf>?xW4i~JHuGnAq8iK5-_TWp3U-P>9ZMR8U-hF*w= zN`hj}3O>=&sN%?3`cSMj>O?_o@0nNyLo1L?f&|vva%IC^kM%W{8roV#B-NWGMJ)rU zlEWL0j{aPg`torvydg(YZl3@9FAZx(=P!W;7~-6hF7XvHa&WWJ%nG?QXg-Lzzhkze zzoy{naz!aK?{`MqM`!ldW+XKzxt;(gL-)fzgud`an9Zu1jF8SQ)v&qYmSANje4LgC(Pm{_9d(DN1ur8#1kN1LVtx>Cs=}Xg=FouWSDm~+KNU7U&5;Ogm^jbx}o|G0j zPu}BubIbzt4$^X2%u$O6iPy#@|GsVDG(!+QFUDN);Ff}F>&l}LXF7`glbr-FOI7ZN zyXGAx*aV}dG!3lLG?0k=7<^*?2D|p3#b1zLiPH9pvCXR7(n|mQ+9Q2l>IM(X(UR4$ zylJGByM(rG-JwhmP3$uCD~w)yyd5k+fGr+ix2i4XD{8u-m6B4MiNhRNqmwBjB?vnk z6aKIJzSb>!k+J4Mc3WfVuD6b1hcywT+X_>w$GvAa#6Zot@|Yk5o0O2H$lMos;`G=~ zo3pa;ebHI%S3{HPx)NC?oo{?S0e1! zXsmlGy(^At<{?@ggc|urZ;V@^$zhxr(_#}J)m{zjetu|RTW+0g-`ncb;1vO5$CDQ} zaWl)nEa){NQ}d8h8)ZR8MPEU3aM$M%pE9m59rMLj5y_~j6-{aNoAt(*Tg<_sAKK#j9|$#3TDzEK`WJSD^r`0jmsdHR=BpTah1{6{$rZ|X81Cvq zOr1So7VcAAF)iDSk%wC1wLAtv8Xn8r%?i2Md8G625`&bGCr)n^h8Z&uwWQLtfAJf& zqrF%M7{VV~!m!p^Pg`fqS+=(XHV1*!lcYFo(;XxAS~(*_=rJR^)A1T4hg6C9z-kuA z;u-@v9Fy!b5GjG+*6Mtud94GIhmixMZw^BqSnWkdZFU=I-nF+~1&$2OBS5qK(=(pV z(TO>g2IkY^4vY>koZg_Eok2vh?Z1QzW^S$TaAB8<dmfI;-@I&RMb2C19)TPQ{Gm;3T8J3ZAjiPzrF*~n9|9jaiS@*AK_-M`;1p{3+d__sMG5?4d@DJ+ zZ^MDhEjz!?Y>BKa+?`rp+0=goY}Uk{@W*eT&)w&Ig~ue5A_Gpk4gs&%9l$PU&E3wk zGZ}yE$#+?^(evW>t4Mw1$T`Or&vS~O3%9#+TiWWpVu?C|qjJ#Li5y3_&C!)~*M{Dk zC><=z2tq(f11TS%PNQhf`~Eq)m3zE+RCh(e@JL$!Ywof?Msn`@wAXZG$aw&e9d1lR zF$FM3JU*2$SKgLxm*N)C&32qTUz z9p3b2Ji;;i){GknC2L1Ayb;;yh(w~S3I8R;mB;`yr?r0evZFD6$%{+Gu)5F=yueuk zgpAL&eM1&WQ4ng9j@q0`ompDw^wa-FS6%PVb|8^rku$Z{dH*wHOl|i7eOR{*%WuW8(dd3| z>#E-b=t$0FbmTXQHrdQ6IyArWh2z|$`n;36Wyn?6`a1uGGCxSfXN5wvdGe)-U4}&U zk-S|X&|kCreNoqH)5J3G!+pD47ozt<*e^9cYHVnxd5V?%Xg_Div0ow%ooscPOe)&< z#<#P&5V_*3I`RCFIi#ZCI1~#WC!?hbn%0&~Zq#>7Ec1^*RZRzZu?l_N^Zy9MT#kQI zChcn%J3`@-gYG}sZ9~#-R6-5vGm)Coc6z5K zDV^<<9L%dig%(|qY;d()NfhOmF>;zOW&C>Mm!{swhTPwL5bQ(9ZdM+$f_yJwp~-;> zK~k<=!yl1?Q+C>hsyM@W@}|Y#zm+#Req--sik|st70cml5YDHkm27Q1rpU>$lEQGL$t@7qU%dvl8v%N& zKed%&2^pTy)MXU*pi@OIq{lf;AoL%?af#GjkM91i&$hjhck{geZ1KDJU2TEquI|xQ zaH57LNUCYfYu{lI+#0Sf{IDP%6(`itRR}h}NOBSO z#8e5#KST|F@5z+4dgFY@Yb6f@hqJ3?sH$JnKx-u;N{gqzQd?gFNlW7=ke93IL{Mga zkUu-1IMX;eh65JrNLnYD$o+GOGcn#*XR#1hfvw#ca+GDrZ!xEX-GA1C)E&Rqa>J(b zR8&&I+FsrmSI6Lmv?N<%pszQk8iT`=$8XbyVDbdHk~1jfg_s_GG|?Gqz&M@p(sLTq z^MlOeYK#+8Go zrd4zL>TfC)P@XO(eQrwd%B|mK=5TO}J=U+7bf!2kEOf=*opUA|RdH!)Y$%M|ew(VA zlqyx7>717?<5=B!{sPu=4>1Av&eGi+jRxCQ`?^@}eNCU9!WP&q*(cvipT771)Y7ir zO66>`jh+21z-hx9@VW^X+gGFeWmJ?#^!Fs`YxOCwn?_tJoNauDTjVKeS94DVuac_B z*C94?RCoL!)8ixj1Df z)2A?J23x)T98%?0c5W})_OjQ&kd)#r&r5gb=H8GTJs_IsbE28wz0d2b6Av$CeFPIb zQtx5g`;%MQ<>?+r(fE)%7^@~5c8NeL*As_I^{}?~;tFB={nP)X`aKHmKlo&~m&jB123S@qwy}%RzVV~oxO)gq;M3OhkJHVz`HV{ZRgoRr-HPKu9y z%)xHI>1elV=tA0|{R1r@O(E&b`vmK(y-7rpqrA?Ee`{y5k4}Y>frFISUn{9Vj>_Sx z(FF16kkf;0dLq;mFV5D`5`Rv#%TVjX2HRxkvUE!o3zRFFCq+5v2s*9WkDfmdoX3ljK}u*=^zN)n?e9eVYhBZZ z+gJZV+t44dgwoS%^ody);4=4k^Ui~C8SfQBDf}Cy)%5V@yUe>%!pl*Kb4`_q3I5fa z*L-u72T&|V#(QDUcsCZ#$iy;^X3f+`pedD^Wp-KU&L9qz$vI+rP_f zg74x&QxJ{l6Uei}OTWZvaOd-IS$DWPDv&S$hG^wsR*IYgK;J#Y6Fq|oDk8#_MmFm? zceo3|b>6gb8g8ir4SKF!b+sS~6SdSSDE#10?M{6QW5<5+d!lO(!VluJ$0-n!-4o|=CzfVS&b@|JN9evAfR32KnXJD;R@k# zAvk<#jWfG#i*xtx5{IrMBKwZN0|*n9aR}cq?PG|xmx}!=h=QRBrb^5HvLIU8fF&<* z0)M1unZHtyh!4T*=-)8T8@k5t66)VKF_iQ5+rxu3AC=son=z7JzsNWjOjdkzVfB5l zOgFm#1I$koQaIrCtxVQ+bm#Mkbx0ZLvBZ!O5%JGb2Sk8BtJz$&J~@HQ6UZ2>QF8Oj z^XGuXfu{Lvywy+n_FtSIb8D6F5;Ph5C>AwclvK||Z95wLsq1s2}3@z_sU5MHDLZjVc85jIQyKrDXgF`HBSc(kgT)_H=!xkz!YteGm*t%izW3sJ==4TV-SO zO%*$rIhi3=A?|(spzV}$>eJPZHE(M=9y){+q0GgX9~1c_LfdeLm%|+kdG6B2FLm#5 zjOf-4FOuSF4J|jiiYhc~;i~6|2TVz?PqA8w#oTocD(g6FVrsI<*ntOLK%EH_hiG$*CAfL+DK(-@_W%Au+E|)t*g)} zi?h^WUUcCT^Y6n$Th=20*X7}_LsT3S#bOhc;j0KYUM**cc7%ID%2AOIGAom2RqfNlYZiO#jNb^V$a`7vW`?%7cz>(7x%moW2<377 z$jO@M{7Q!oJt6oiJ{ix>w=Hzer=HI|)u9${A0Io~?MCDepzGb^E00~etQp{*S$`9R z2G&ngeS#E@{_dQy-7F%l=$IPwMg>LCMv7Op`%sXAL@W`No+%csxceSkTboZO0xn}= zovBdx3zBz=>UydlrunHQ^sE1;Rxi94tKMyhcBLurouLL?!H$R`(dQD+`;k%@0q(IhO1uBmE$2G-T&0~?;_pikK>7NqZ zbo}}Ix!(lciMqY+aB*3JPM0%3Rq~e=8ip24L1d>9-xCoyCak&9yXOTg)Et+V%VYg8 zW4^LN*tww$YLfI(V6fg9yh9|ieAZ(B9Zox!_qB+JCkbkPLeipl?cdMvh`bP3U-Zz{|E zqeetPKMUWn8W|ta&Z(9YN+`+%W-SuK+~md{h<^Rvi?F+UQ;WHYYG%75t_RM05j*%a zzcN`QR<|0lN;=)@6iTk>Eo|EY`mt@@nlZW{GvosimXK1xY8FccvK*ux-B@*~Zk-oy zU7v=>)J@+muRvk`Col`DjehRQ-!9!rjeA&F*{HTl*sGMkLI!pQ_}?LAS-NF~$Fb=8uqg^{BbN0sGyRs|B11|1no`-lzqtkF+|()@zy`EMi&YwpWbV~W zFBQ3-cSDK`v%To)kzVcNn(n>Y=)eNiwG&5_Yk}I+>%3Tt-|xK|d3Rr*1FZ3G7sA}Z z4;swbIx9et>xAbsW%wmf8PgI-8#Ag~egSTsjSRK#3Ia3Skwo_B|0XEqRaj8UZ@pfG z1Vm0+biWCF?&xTY$?qRJrRR|cAoMOAl4;9-rnW31 zaxyjhP*{N95>89XPu4r8sm``4yS%!URp-j z3-&+YtB8qdOmtj_wTvdV-)ha~n(hzZzZaSvl)!##h(tc-4kWRHtI^$er4^m!B$CC@ z`%OD^fACPr~gz zZ}@ifJqTK$nYHzxlbyUoa=^mruwaw(6T+e_FJkJg9e<&IM_Vb9mH)jeSf5CoSFER6 z&@iGs{0lFno|JVnq38a}e{u@E?|+Lxl<#uv(@Z3H#Ky8i(7;D56C-5YEp zaj$JHc&Iy`@A_NTo2DyW?rc280VNc?gm!y&OFIhEX#QK%ur4UO+#7S@0tcfT{Jkq; z&fLVhmJrmlRc7diJ-ikp7AFL^WC?_&7pUgb80^zf%++_ei4@V`^W`z`EtE(2q9TIj zv`k>uX1jA|-gKbwWvq=2L^PgwGv(bx$K3b+lSjxFk%$RFp+DphGI-_P*jlNKaXqN||9f>zy-*=@j01w0#zK)~-}@1$ z?B>0U)KbJZ+;3Q_y63QbfZ9bBCkK7&5IPv+Ex(1D>e%HopFQB`YBdjqN?`Ns{njBS zMtzL%ldC`|r}Pj5F8uv-qg9+;Z=q6V$a4GT_O!o&fx;oly=SvfMA!xI{6ebB6h+}I z$A1SqSIqgm$#-P6D8xh|J}8O;17V7Q56zBhw^*|@x!S}ZLndYR;qE(@L%(Ggth2*N z|LCZ7=t4fvDfXPz(*BXG)^7s4fr?j1kCe}!Y;74`X*4Q6-DdcEC^L7?c8|8wQb>FR zW}rkt{TSrJo_T_oQDy~N?V<73@6YXZy{#@uwnDINDY6LjA^~{ux_ZQ5PHk`c-nV|s z?~*8D{@t(?mcXa4k6mgoup&KL%Q}SW9c|RlT~yXDyjbcp|$>g!M1(GKC*89?!t!t;%ck?x+$$< zP#;n6gd||HiTj+N>;X9z!XsL1DF996|<|8lfmD5tAl`ww|m{19X;AT169LJ|DcD2LebG zh+eR!$7@+aAwuqZ_4SxfgtCJ+3z?8?+C$0M$P6>4-O7Z!bE|q7;Z_-7%N`+EddC=@ zLp=A&|KQv8k7ZSML%LV*>#~`2m1dNh(W|48^>SIBFjMkk?EN?_V{R=h=+9SvFoGl^ zJ}5JadJd~T5~Se85bIp%JBzEW{MAL5q@GyB^i%th^-Ym&u$_sl?+)u`0j1}}4g(wf zZ=o>IAB|kEs?S8xMGNj;q2c-l4kqiFV5zHFw6-YSEbhBw(uUA4mR%9n)_Gm?rf?l{ z4i|7%hK*+HUZGo)_{!fbz1(R59$*sK9{mt@fy<*o@F$Z;)aK;$fY~gxSxIvIyYC?W zb55pc1`z(ol4Rvnk%ZIR_PiO!i|9mtoqngZtW;C1EpPeTazzl{;=W&UF=g0Qocl{E zw&58wcVP)wjv=@py3O-zAOY`O4~Nvm^%M^~U$K$D8_nvdfDmi93~Gj>&&FziG-fdJ zeB%6E$XwlkZ%Q7;H#?mc&p0~r-TdvsYp~;aN;D||8O}$zu>Ad}vdkkvDV@w$%7KC6 z2{PL%T#{mip(xXn#N3-c*;oImYROF7aZVBE-4gzH^Mv6+(D4unz1?o_fYALRSE4t<>-mVT8#%Cy7&ZVfVhyj*42?`OmoLU8-boe46*=3vx1X46AsftAO4c?=echij4`- zDGyM;aA34U+xl{__jm>f8W6|?zkyO$hyI`YA$_F-KC??>-ZQHF1w=+lGWkk zli-S?IXDlgJ4_u#d5MHc#rJ;AAbmZ5_rBIgz>HKccMjD<^bvvR!&Jb8n1KplQ)Ffe zLODsFVVZ95;|_&2Ok~V+wMDTk!-E zzk1+^6?mJY=I!BNJEjg8p}HM}P%*>@S6pxP?*r#QIw#Wg-sFHGq(ZRSJf3>e(w_gC zdB&@~^*az%Y!#mfnF!^wYabLb^ldE}-Kz4&SO26Xi+G&~IurRf*$^@NuvlPZ1NaoY zC6c{j%nwtwdRUJAYmo)eZUXhA;zpwq{&`Q|#wsB`o(3%{jCS;H`=TWT8z4}^rCYhk z8Xh->3aI#DfUV!?7`17%t`_ zPJ$w_929&e>kfhyP2)6)Zd#Pr|I=ZQ%QVV2{x@f=9I-Ot^iIcNyD%~EN2EfcX(Cs= zIeA26y5b-bA+HA6+XExUYJ_%YemuhKpB zgzL3pCG`tw14ta+h@0bkZysT1U*-FIBacLfY_MMUsSWEqTFXN$OG~s4{Zp=z(_Old zD8yg=vKL1?gbKNdl3RS~6nmGqV<|}j-UMUnppd{5skGxlHuA@`(BD(yT4RzCQvmwS z9;A2x*>Zdk%0`&pt(kXsw1F*BidtiPKktiy34S|^D{x(KYw3S%)G>8&PDx{!;>Fv- z;zP^*ceDklO#<5z^!YUxZrd`E3Qy|D24grsmY9sggD6c-n7PU+j)=^l`V;opuGOxd z4*QqbsffIMDLZ)*C8HDWot|E$_=uN*w_lm(W>%N-fmgR?U^&W+<&=j$Vu#>#C-aGv z8%4?3wHql6g;Wi1<)MmA-}*I3`;Vpa%s4N!)s*uou(&a^Sxyc9hltNx2Rqp&M^Uaw zyFdTu#;VLPF?5iGqPa(fpt?Bc4oU;1PhT`LlQJ4XXR;tMwvb|x1fMRB3()T9g^=N) zp=&Qt{JR4G`>sXK+eC6|O76krf;+6BHE5jTT<+;3g7)A}2cbkS=W}4On(O3JxRVVZ z<%7vr&627t|CWLYe&9V!-!P9mEdOO_l(LcXDziuVJ?$)x%Remho6pqr?3sf{-*|s& zorc`*4>c^Pxxi@EjHB-^j_Di0sUW3}1YE=fgyA@1^HM##xP z@>mnQFi*goT%ZyU?u-Xrpi3^W-EC9lS+6x858I0lS3g7flXVCgh1|11aHrjJ%n@s| z6=?~jR`J(w!^QU4fU?{-MRspAawMer${`sD$zs^@c|SK+IHcUe?4TR8LP(y3)c0&4 z;#MsP@9gYIT{fVHwq>7HG_cJQ?-`tZ&3`ji2$4i9ZR%x&MnXM5)MyD zyF*qfeD;e~{Ez?~i@xQEqJ{G9r+1QU7b8eob0aJSmG#s$ywNCfeN*J=d6BISGlL0t zmm^n+bB@Q@y>P1H+eEM*ULbX-4HYsXbC4niT2D>cHTWclizxok<27@lK~=B2!_@AN z5p#JV;c1+@9Iget0Wt&2b3kLZLME#G1+&*3SYb479sv$OG+q<$ZM>#q*g_(S(4}vG zYQ+ImH5g4TexbtQU+?l+WI^Jg#lvjw7c2p;gZa(L zSBvemh=9B`2i>#r)E<~2dh>vn!?Ww%v5C-@-`T1J`J;E|EU>-Bm~O5kU?mrckfP+{ zP-I1lp^EKK{1K?)Y9m9!6)sCnlt7X#AG!+o1INy;%kT?Kw%d6ut>N=T3y#W)8=KT2 zam>ql;9IMDdSH6eM&=)fiG|QFtZ#@v;E1Hihj;O!@Xvnqo^TAVVJN3>y*3w zZkzaHSTlG_Yx%`kt=`5j;VU2BO^xCONLiyQHE`x2+UHPr*a|_MHzCYYKb+s}BoG-v zm~lZZyEQTct`nbxeNiY>@+l7cH(=On)koNst{xxGQC64TsP_cU0B;^OgVymQdcCjW zwVuIV%&um`J-`Key0_6s1UsT&d?P_TcvIxvk@c^M>lC_N`5x8q*U!ME>bMHg(+7^& zCigDGI#8Qc%TK_M5cP;f)>5oags=g02m3K^!mFQv+rz<@C|3EaaX+>i-@x%wMc*9QjgeZagWI)5^6Tg( z6K6n4a=qA2{xVsRWdkgzY%cg^x)f$cKL;^iCf`%zYud;i^>g5*ih)}l>Bmat^y1@{ zt!Wjj9|NTwvytQ}-8Ep@Q`n&A?xDlp5qRr`IUj2q+d6cAKCZG{h+-S_IHo7U9C-9s zAKM)(F7aX{A)QIsh;eE{qM_ph-zl1LL2LX0DYO!61&lEOxC{PRcFZ5U*4d7&q=&Ho zG1%n_BgQ7;&H6olX5Yk<_mk~dS!onw!i=_KPH_~jx(=3bgcn%BoQo$Y&!6ySdUl~- zQ$ZmsufWyJLNb2g7sIC)oE!6I*5oF5ylH-OOGa}nHYAhAr#6mgKaY|PLGH{t$*$&A z0p@1*lQDP3w8P$}90)%>@7eM`5^3n~+l;Lx#~)wt2q`r)Rbo0KTjZYX+q7oTlN{_R=?e_+l}jkVcjyxK)Y!Bf zl)>dbL{XDnSdD%FM<9*y!yFOngFPT+#A|Yo(s~-0e|%>Xu|t~^k&ua?M3C-8#OTN0 zsppEq+wSdqZx+${erUe?ZX01pIECDyI z4qs|-;mL1+^60W{&e8+rbjrDy&$Z>h__PG%gmKuR4dGcuXyE^K%8f}>Qrs^Z&yrmS zt9gXIlE@8MjU@1mqKYS0gSAx|Z7!!focwSaY618pMC-DnKb3(qJ}sdg$U{-q5{>+` zQN*D*@lCuylc^~;$op>TAA)aRLDr8BEU<6*avlRCyH-q+9eUn_p*e?u!%LwMq#eSh zAoBcI^RF6DX0+t7DNF&`_g44l&*^|iULDJ~N62tLkx=G&xg~1pTiPFLa#8j|f6f$L zb9i}M`E*^Ooj;!EeB$lpysjozGnJdCVTOi@cDcIs+RKYh9nCsA`c>D~HTMrm%aKbJ zvn-JnI7QuL$R34%vhaCqA=l!X&iUw>9O|Xc!;_5i2XK*)F@8WKXNGh9fs~u&_TSKO z;30cP-2R7X*a{E-$?e={nH-_B1ZQncuSef~kA-Kvs4jdmSmb@aJxi(6QXC;wKC&p) zI^m?iO(poki=S53x)s#EP7FN;NKv+L%HlrDp~DFG@H6pRzCrDlA5I*`1}z#@8TUB_ zP5cS>Oc2S>w{PxDjKMx;iet>lhn{#4KLaXK+~bgJgv70UbsZyaAOjh59e$RF_jp56 zg+r^cP1Sy+owN((R*a)3k>0Py4?=OS`<ErB4iLBxK2H_CPoaH7&bD~={t>R~@{-S8yiM>MTpEO^pH4 z;r(?Cr2^~2XcdY0u(IeHSVi7jvWi>J?Q4Lwp4*VMisC2Xo_{@Dn;M6CceQj|YcYD+ z5p5$`2r%GivZVwTm`d*{7G`=AexT`jdA_}DP3$WSI9<<*@go;4vIjrT7ZrcAJmnf< zI{l7i{K=SAut?0!@c?~sbEktU7ksC!JQ+vU zLgirv>UbQ~ZdYTw*57X}3{$>z#!1G@&+s2z9j|r4GF&B?EMZAaxrV&Odi+2LJ?v6y zcl}ZX;^W}3M89Uv0JkKM6wraw(N@y*LxE{*g(_ z$%l?eJ*>GglrSuB^cw2`)>Uga=km6A6u@raqvOm4tgMQ#1Mr<%5M3JgITK0h5q@br z{UgkZA4(!pdR*mILJ+YprhY6EGl zLzcZ4OG`R5sJ#-FN_eZ4IP5bNfuZjaXv>DA;edy-7^}#)ky96Y;!euj066_AzQ7Hr zikX9+=%ns79cL?$33=Q6J8oxtR)$YnQY^9ullO|&9i++FO)K?f;b+b6HW*})THIwy&(e6o6Z&ODG z!-@h%`loHC7R1eBBz2aLumoL8?<2@#_yLNe!e^#A_`!pisHxJ-IndaPisHcR^q8GE zDH%1TLCw7K$cP9TnVUGt33)U$A~+kt+#-*tedI};G=%gYjm&au1=5ybxhmVZwNZf3 z_ac4a^GB%V295NG+3uac@Hu)_Dd0c9V>(jz?c`PEwbLuk&j+G}pmWyq>lj2FyQZv5 z0CIH*B}rPf<+vQKMSykjA2p1Sh5I~7QixxjYWiDS%R^M02Yxk zEGig_2*FYfSmv;?WDzV&FcuNOk~M}!17i^(SgHXFYHorE%OY4rF_!0mC2I_e2FCK7 zU}*v@3)on;6D*<_%X7f8eGJPMjO97O(gav|*;uw?ER+rwa=&uJ%%@w3Sd^Dwv2>8; z6^e_`OxpvG#L=L=o5{g4@Y#4|F&cF`m)TNtIFFQ;;=_TD z<_^NCXOHx|;gLD`Swz|vkaP&x3lBWcC{IXJfv;dx^Md4}APGVucVO0XOv zAb_QSjb%H*f;EO}2MF}CZiK`XBcaX0VxezyYr8d-=u&z$R$00k8HsUgixOv#93mqg zWMq?yy&f_waB~Okur)FFVf;)H6?W)Leavs?BAy%`pcRC?Q76dt3itEm*H(E=-KRRBC*<2G34=c51zzyq|S)*{ABhFupC&PD&U3Jh~tqwvhWPD z@MRno-;r`67!r7tS7XjA?{RDE0Jn=4z6m>?r{o#uWoiw((CdqU#$6d$R?5)?qQI@~ zD{AusK}>r)23bZPH8$PUu=azkz~ zL6bWLGmY9r(42Q`!)Yy`c|Y`(jd7H?2&Ek5dsqhyoqEJ zFb!J<#Yegs>D|ir^C5x9&x}1n9u3`2%lAX?O^2++PneK)mH578TN-jl*&|-q@P#&D z`=(8vUO})p7O^3K9ALmQjRqt1iZ2kft{OIgTkEl)m8ZJ3^{_$q5s>=?iRqRGqgiYa zvi@6t6Oc=8ZFPjrj?ktGW~c5GET(`(l7R5Dv5T>4i@RG} zBO6Nr8-!rd1}yBg2(z(}wNNU3lL_w?R$_O|E>tN<=6|*vI$F)GFpBnAb|Kqv*p;#m?C84#)>lmH`ybL$;dJ?vTMbe znpmyNuB!ASQfdvZG7SD6t8+$sQqD zF$J}cD;s7Ov1WGZLqYA6#x8wG2s{~p|07^cyYF%VMvP+41dvO(ss zK}hLC(OSOp5RmWVAcQknIv)whUt?HE=>s5MY%B-IKuGCBiELG4gN$Jzr4Q`4-(zF> zF%Ckoi0I@JkRxMQNa+J0b!;q0$3RHwLp|TRiw!b{g_J%Nt*MrcrFtBMV0o_diGVC0 z!$L|Q010MeIm8BGmp)X#P;kB>a^_Iw;ZB+qyY!(HZ>uy&qBz4ft&QlC11WtdqbO7M z$Q1SnyY!(V^_HCBhd7gbe-gC%L!OCU`cT^*$goHLWRI{*A4>SaQZmA%8)6G&mp&Bz z{*ul1$Tqn+*`%>cAId<4jpphY8g}VJ&Aq>xOm=k)jRuKL)ZB+`7_KoGQuawB#ZY7D$CjCFR1S9{cy6!iQ^@EOKsY=hq6!(B= zJhtSc$y_5B+Hde`TuYj}tXoo{TgiXpkgIUhnO*0G;XE1pChaehj_5g(xe;iGhK&dd zt(5?5j281q=T$Ck2qf&QEQADpO%c+p&Z93u zNpjYO^uJyH|Hj2iML6@9BPA>`!-d~qbhcycB;x!!p}Ti)15D?io$ynS1J zVNj4fl!OH491dlSLQMh2v6YdA)EzRd}rQZKZGeJ~Y{>wL5RW z72RQwFB?l|3wQy6?Q(`#;X_~00LG?Ka2mPxTxd{$K>cau%XXWZ&%x!haj@j&sfD%Q zbK`mFfbxJH#I(tff|Yy-H-?0AN*)4rIs@P!s@@2;3M0 zl1Bm6STq%QE-WqQdmAAqhh`ChDquC`kcrn^+n5C@49acQBSDrMh;>z-&_?^b;!EbB zS#hDFcO+2jZwZjJp+BezotunK2B+FPD@-q%LLdyP=lJ{>)Mu*X%8{%tUl9WXnu?jc z1*-;93}*fbQ6)6qukj+>Ay#&YeSbU>Gt4qPFqNATlJ76;@${M(v1vz{RaC4{(cs~j zZj1Dt8`A4GpWZoL-qyr`B7Skc?n zWF+t}ageYb4W58Ai^Kzq^ta70!8kWG0OY~Eap{dUUI{m)5T%DpWhG=VfV%>{2MLsx zPZiRP=y+`actQotEO;L(1g4z_#d(Pzl)#RFkZ3ZsCNf}+E1-CV*92HY5Fn4`q^LeK z8~`U_PKh71lL1OHT_d@~2?>1)*kuu|D^#4C(LJS?M>MNYy%KI!&Y*NvOF!?K{({s( zpwBktp5AQ)PF3B)Iea$0Osw$R7##xVkTK=g%w_b)$oJVb1kP9Tkg+}5N*^9qg1V-q ztG1t7n<0GOKokjhC`GCtQ`*5ZY#ll|q(|QX><_u0l>qFIXdd~Xu^qejZ%&Btgjmfd zPGHf>E&kKPVzdUh{xF)T%!6 zCmLJk5%&UHdXA|4XPK*HPCxqxy2ZIpKPqkboJ1Ay@CA@eCG&#JPo^vLiLbgRSDPdA z1BVQp(~Ix6q*nD!A`9(0Df}UR^UucbIE|~iuBwc07Ti6ou0I)Lr{3?=ID>8L*S2Hx zQ`6_~4<8JmVQ5^Xrl}^yqG0ls@rf76Zo2%prTWU0Y|oZ8)^U4v$_M(^HIbcnXU21Y zf{NB?SMH1n#Ll`N2B&IdcX_2Tg4jLrv_HaZ%e-_XZWaHh4*$o0Li_R% zJ6MwcY0mWecK*`|NrE3@zz+%^+E{~D{UnR0Il&4_@sy<*xLEk<6?7{elO{S%?ii+Ktw4u7WS3rQ z6QveKb`LE_+0e)&eEB|bwf*m|CpuNDtD|3CE5$9V%uB~s>y;Tsu}f*qtETQ_Y3?H< zruD;9{=otW(;`1AZ`^dJg|oN$gymxkIXVNk)E=J&b^qR1KC3YU7*82 zU(0 zd=nOW>#K!v2;IjG7n)Z@ekhmF1U4VbY<548RK$3Q-yl#nBkc98BR|R7EtKtE#Dq zla`s2OCR54p(!O-Sj@6U+7hL5q`gwol2(}}Wjyrym=ClaM*10C2Lc&XX9CTEEo#3J z21^N!y4H!ysf54*(z@-A?5tMNN|)&Y{sPABX55>Fi10=LZ>@&aS5JCdfz|3ilmd^BlII^usL)yrwylA7czvZ!YA( z?iC{p_=(0mHMsCN+|m_mW%WF#$zvlRAAL+d}Fa?~%H8G?$Q6ZJ2PeI{JO&SFRANud{0^ zHM6EZzx@?g&lOlD_85QOM*40&?kaFPiPq!@>_T!3+Svw9KhY(jIG)Py$C#K=EPr+D zD(LB=qI&Q!a=au%sMtsp1PRff8mex@hT}g;MOULu!|gS zkRVERSbG?})uQI_O=X`70$uy}IZTZ_|jpE&N7 zp2XdC8y!vltQJn*du3p&!yfPciAI#MsvnJDThSM%Ae6Fn)T<`y`?rY-5^9V5Xf$op znJQ-5;Rd%_mc6ob)0~Aq^;)pZ$(^bl_hvu^i4b~usC%0irkZ}A3M)I=f172UbKATma?4LUKBGI;~aRH zQ+&Wz*k9_6o#K$?`Zha_aLNHiqB$an$?D}!LW%_m& zP7M%bj;Z67^5`QUhP=*w7g{%~=cp`BVY5uHbr7#4AWH({m-Y0u@y0ib-4aqXP?lfT zrTn|j)-V3Ilb1ecv%eH+35>b$GY7f>}%JvGndp+dDnV4u=}{E`lD~R z#^iO;yP1!^s5d6c?u4Wr6sa{+z4m;C+ru?0AdA0nWG}iR7D`V?)awNby0RpOb=pjSlhy0|%&7yCAHNNZ!N$ zOcYf&OD;VhHc9g^>nG#gY6Z!_T3ZPh%&fMOMw(MQiw_#>#m@<1 z{Dk*DT3r8=hcb5DUE4}?^>2LRwT_D}j_XzKQ_{`Z9go|qjkl(z2Pfl~>)}IYnZ3u6 z>o2ctndnwCX9di3$$jE@dsE-qzVo5tio&lAWHeLlaYId-oo+)!{6lw9)5&_m>c`^KqEC8)b6Y_7_NsS@+h3sAzR=U66z^$r@VgB2-Vc6@4Kr( ziB1fOrNm6fvs{NelLEY`jXh*1HnRx7%6nDTT5i_q-ohZDVK+XWuofp=%p-TE0DZ2% z_887DIaCQ1c^)uzC}-RkJ~&SSN5vi>)hHUhmN>$&^FXwDax^#)W?oyaodJPca-g^_%IdBehK?bFqEI$Vb}_?b;{R zV$9=sco;TK*?+1qwrLDaH6b}0<3d~}b2U2FfQIpS1u+EVL7cgOOs~|Q2u+^M{SxjX z1exD0CE-WPSHp1bh+u(zBWa*I{`-T{x0}OM0n(1|GIaIm^0FhNi5kMd2y0UJ{T<6euhx(k|?JyMVMc=2k-AM z8M2^B=bm6((R!NqvR`)F#>?$CJ?=!DnIn0EI)U{jhO=R5zQ zerQ#^b|$<44CP`p5?};F5h+iDrjG7*@g?C5J2yM-y}7Ogl39Gl^}=1#0v^f2U4imI zjNrJ<4zfDkaRY86*Y5kFax{~bhuS;3V*2yb#0U8%qp#N)gRF^js>7*PHjnA#%lf;L zwuUA%Oi^zu*d51^t8x|v1xMw#Y&t>P`v)oLasTc;stF69iu%Q{usby+1~Zh??$G;0 zTe%NDCCX)EDM`>$!a?5MEf0k>w?u6r0N@gg_QBar#%9QjdEWAr-!7iF(>w)8tV*0y z8_C;X3`@9|$g`y_7O;Lqe8kTau-L_yb$iy6;Lhk`O|(_WJ$h0r4Gl|hBdk^OzvBJ99~#Kp=DM@wF2 zjBW_!fIDEumYR)VSb|b&fCf{dw$ET_W4b5vyfBH*$w(DV-lq%A>b!=!enmxK6YsgW zI&~toF5G3;#X&_kA2U70Mt^dtSzY28>F|1eNu6E%gG4!|s{*9JD&%TjTwaxLh38{L zMqhNl3I&(vp9cnr+<-$($}itb-#!J3w_KGlVh8M%*A-W^LYz_(X-Gq2;)X@`Yoh`M{RUXJG|cn(^X% zj4e;zXgMv$o~U{yPyo1~T{lOJa@#kpsI%?+D~Im}K1~OaYgTbo9@bI|7F^LNG5=U} z=}X{e&PPZ-!<||CV&L0k_lR@$0~dq6z&U7(M>SxJ+P5+KS#Qp+T$^5CwJ|?{B9!k6 zMi-DrBX67xE!@?3>aFlI-@>s-VIn>Oz({)$J|qSZ9u_A2s$cd;QMSmkJ{hQBx$*bf z_nM^cgSD;t@z^B%K{plDz3A=J!p)API@+=eKvJn>uS=URoX3n*YFvnOVr)1TYi^^- z$#W)#DuB0>lg!;J(IAhPU@1LFXEA{K8~$$VE{?@2{=^t~GolyKnz$PDxL3xEc9@tY zKFCutuAV5D*lLsnIAq?_z@L4z+HIn(Qlm$lyJ{$%(d`C{vGV5b)m5#T*RI=I)GZ|x z7uNmlBu{lpY}i1O(mUXs$L#b$E)Ou7K(GMsZNTX@(vk{~7k2SCg1-wHWkSOAGxd7} z^)2^L(x{-A+<%6@1#z!t`MVPgEm9~%xgFkZ3#ep#Viavny95|9!q84p(6xb_7R=Um z1%5vtGfzJsLsikA2MnE5VSaIy<0=SWV1SVIzmuVJE04@QQ|gJeG+)1uqPo7$su1Tq z!r9hH&JYdcLjT%uKkXitUyZhyO{C=?crlTE^aTD}ph~%{;-RyamhaK$>bFm4NSi6e z4T-ijp$b+vL4v?Set&7@!lguQ3ojSif-qc1I%&Mzg?s7C;1rK~O-O+jq9B7-yoQFiB~C+ zhpqvfNAu!CIAd=DFQ1;EXNFa9-{P+O3(iL$>M2x#qZ!ksF(N+8MBJ{msu7grdLMB< zjstXUOjmicv=;fCG^*z6t+e$jhb)gg1z$p0#?4w~Wvo|gcB16g#HvDMx`v?al%59F zi>5vxTF#Gh>zJm$aFz>9=b?0P4{Cnsqi~RGD*|^r{np#v7G6P@rdgOpgOcA$fJCLU zgj>dlQ#UvD%h7My)NP%yieP^VZyAb;8nPIaPj@bLv(gHG_zGyMj$+yHoSUp%h|un+ z-=Q4$GyF-%EFcIyQcpH$u<`dlOmORJ8O^H9;~o8W-!(U86m^KNN`fdHe#QUfE@1pT1307t(^2WT-vYP z^ehd>zQ9wlE#$S-*+;2|z{5Q@DcGQe027_=%P04;TLHosc(eMrVp)KNuy1E? z{_na~CzZ8!X4x-U?l5uIdnJ_C#kvL_SCrU-#Pf<#3&kbq4d7xrJ?ssN*|uQm=G`!;X_%A>knsO;H@=$#Tm{ zGRhZ^4UZuLNjH1{SkPq#zRU3ZVKJSzt$o%1_hYx39m+cVn|PUyVGn3JeYnWO!U*o^ zv`Q1?Bf(L938Q@A9~tUZ#T!Fp$Y^Ez*=rqg4Xtp4fCXf9_5LI>+JZQe@=8}S>qtte zY_q%Y=7;jC0Otscy-iQDG^RUzBSCKl#a@Ygbqtu40ZvQ2(zZRbe9oB}zZ^kTE9jzC zc6<@-OzH6E$A0ZI;GA2Me=Rp>G!_JXF28MBSDS4rruC@{-*Z~><$;uo7?@yixp1Ne zY<|&M`lIMajyM`uFGYncr)>K{r$neV!hmFN&0ziT2gAwjx@`D2)tT4+F1sX^`+k;b z!mD1!E1mxM)b6zg)^5=%B&*q>>#$!Mqx{vp-uH@xzoeN#s&JSzL+;HDrE?0injR9Vy$57S1Fc z8Y5o~mXvM4r15~Ll-g7~ETaVa6Yw{8RNkIR9Wxa%0Hlstt3psx?qd;bcyCkyu2I3f zshKrckC6$!P181Z_5{a#?Y0hF(a|8sp+KMOD$B!bO^SRx{WF0iB##rXp%9A2MOQ~N zf@m>>-?gdt(Ey2})BrEcB^cV4qV(#72f(iAo8qNI`Lf#NosDEgASC7D^e8(mrUxY} zx-on9dwvR!r^C1{$_X4SJ25#iwx<aGZIypO%Jo!hrZvL>Yah45s52(Vm zJ>o+HrAKuTyi&1Cm-miEVkEF*UeoNZ`;buDzu!-(Ml)Tx!26OCcas>#v49y4rG|Un xU<`N^V^ojDb3sBKLCE>f!@e@UVf4!+zr$i?Kgz_Wk2s3~hGmE5xm`yYK=RL1}S literal 148827 zcmY)0byO68{OEr=MCp`R1f?ZJIu-;(T12`*8U;Z*mIi4BL2{80k#106>5%S}hNWT2 zWoxdV@9%f-J@=0}?{nslIqyH_oSE}@J!d|?)K$NC=l&f$JiL1vFH~RQ;o+zLcaV|% zI}-+z9Qn84d%seDhF3MlwvUIWgr}jZ{MsM?sGXwe;qOmwrc*y#uTldG74l2_`(umM zzps)B>&2u!CA1Np+S)Vrxv&OKfeQc&hcE+^m(`GfEM6_6;?uj^;BH*`!gi>=C%4q% zh~c{$IeX(B>WxD>Z`0y!7`;JA9VmPr_zDVR*n-VW_sj#fR29#vWH18|S2n$$bz&$v zgIfjIPQPu36Z(1m_tTjtX9T_zQCC9&tGE={BOS>3CNSsVQC1P_bUkFP!uk#y7}Ku7vYFQNHckbU6( zi(?k%m6D(_gHWe)Qga|VMBY>_C6k45>I9m8(YLyu)UnPB#@A}-N4!~-GGHPouIw;a z%=POFc6QRY9;cu60C=%T))NmbOnr*Arq_B66rdgKWVAE)%ZEenM`QC=Qr|N8bWk^d z>m;a!7<^|n75|Svj2CiMeILE*p$mw4%fC4Ez}WB;tvAJ?xsh> zO6ZIitHB)irvwVB$HMa zv1PZQHvHEIZ9;nOD-ONByvPL;0GZF^MVb(d1`VtR>H@FUcF#Flq{a%(_LTpQO7IG>lTP>_3sK*5J!}sf1rDVZ@`=aI#$@D#rP-N0kPR*FF_;nlRz9W2bLH& zHIcic7#)CDgaBR7%(NMJg*f4_^J2H&6n*%jzq;cKd3j>?+f*#%PUVU5@)g%+V3j@; zF(v*0aR^&I^EW8dd)ptX?oVh18N2=X6y{H{wLg<2$t@%p^FANvkIrHm>q5mp0{%kvA@V7 z$pn3AiEaoop_RJhhYKy0oaaauqq1-n#JK(zrpnhCORcf_z~rZsVHJ;E zXc+%|2*;6hF~acVe3$dHtu_%IHKqenCHZ65c@ivG2Le>amap>%9K@}gA$+wz4XT`$ zAuac&uREJ@30lM>>=r-M3LIsXcaa_*`9^Emfo$iDqT>Crbro0x_0&FJs;XAp@gpZ2 ziAd}{(HrJG-M%C1!)gfqn7MoFSxO|UO2HOVWIhIpb1U1!SZO+uKX7O@;JJxCk@NNI z9n2G-LfMprk9>MoBOyz0D{ zjOWQzl{?CR%JMl6&S@OEn3U?oNl;}*xo8oppQYYP?))ALSK4%srPDB=ER|>&XG*Tn z`b8_sg{aJ9HNdlWH$CWyoAsrh(m)Am`xQ3!--VW9{c^EBKt;A~^tO_>@z+WV`Em6=Em&wagsCRgaHd|trD<-cW^nPn zP;2w$seOdDvPWl$V0By;Va(*cAt>%kYmpg}^CY|0Q_L!Q=Sd(k@8yq6;S;QB`JD_3 z%TJ#LQZIP#E3F=OT0~uRLR_Bz;@jL`6YL}7W<~G$s7#>BrnI?u_{~R~AtvZ3Fv}eW z%fMR$BVEEl66rd5%kq4q%tiOU(c$O*$DMV}N=O^FI*&1RIc#l(*WL65xWbcPPbx&# z&ga5s0)_Bhij6Q2r~ys)(J9Od;IEGNDgBZ_DF5YMD#e2IaZP;NsYv>v!>y06n|9W4 z#&m2J{bh#Xvk&1C=Ms3=YZ}sSbiTaQPLXHqg0*D#9Fo>mnR;H4jH*}TJ0*gc6`4aM z(Oj9aFQ#I9V|Y3vPJ(MbEg(yX5bEFS}tM6p9)H^C4(>UQ}9PKDH)o2mx(_mMd6p%-=YAPAAUO zbCQI-X6!v%56@uS)*BgpI6578fj1W2Ej6H)TQd>fp+hJu6T-=#8UfNvKS};;#+T;Wx5hM%xRH~SJ?V&eC>e@Gl^;M+~l^27xKQ!C? zwJLcX2_X_NyqVFvH~pgSE2Pr0e$N{s%=QQk{o#2H4t=k8i>$4BQRR?2&#xyMY3M4+ z`E3p72jm3ZeYD-Sj#=J(;#e*H^WxZ`f@s=ORyt@i0sU2Fc-zzHJ;`F4qIDcJyF50| zVV88pE}NW6EA26h^^ya~;zt5IsgQg!;JV7k@x79BUd82?aCS#I`pGz_O%w+aqFYA&fDKl_&azDW;n-%af>rB;=7033_}pC z3J25ydB|6do_!GN%74T}9G+Z>TLQBA2OF1Q5>Q;qo40z)LqToT;)ywN+En2Ru$5v=QDI z5dOxi;-v@E8h%Nrtjg2iUWN7*AoKKkFUIp)=oW4og(Nx7(!9iUSs(0@11P2t6I)0qh5}S6_9MbuBhVxVC}%9<*XdgWh{U*evT4GFcCikW2vH{tt+`dS18IkA!sVc zkbBFg3>w~7&fg4Wu0*NsZiKMiE~}WZF5EQN-TR$>r{A8U4pM9};D7L#Mxlwff&PLn zQBo3DC}?w@S%Y2C9#NbKMd=-tqaVXMq<}@(Vsr3_UkWMaCX@OusZ;pf+hOsuCCjB(5$t~8A(BfaQyz0u-@ zT5HzbpR%p79uxQ}KB6Qjvd{68i=?QuWG)ArP%qIbes^?gY5n6bCy|7SUV$rC?r!Kb znE19RHiNBW-YJ=B${00{OUnZ;-T#yVaj*Z-Lzy;FDsOQuMou-!rAqmXXWZ>BEaO_d zH#mxc^ZYItRu8=oJ^J;e^7MCOarM`P^fbcRTT zoCm)Ty17EsW}w+hLbBZ{!8;{upDk2!PU+^pXHK$KC(gdVaWW?pauap;j_5Ptl}IxD z5)0XlblviIIW}}Rp_O)kO^oDfA@8G~b!YljN(<$tO_$L@;UQu~Fh zRO>PPh6jaf2`}Yt-CI+Rn0T?L)3Hpg zufuY~2XqZ`>?>L&b_~eBrmc^UogH+-`r2fl|Ac2VRw_x@%UwOzwaArtBTHqsdsawo z1FtB0*C)tvTjGg&D!g#zaXk$zkIe9ntV$qYb$MCL9zeP|Q0A%>gz~^`$=F?;>VW3b ztK?r&VExT-uQgYhzOYH#H(oK|Dd48VA!4r&@)%=J<@h}?wxy>ER>$6^qe-JTyOA}e zhkcFN5Bp+2wRD{fVfhrHB*c3={)p-c5YA(?M?wg3(_SEXbzSxMhQd)kpk(I|YBKd9 zol}5?tL+~mz?|D)QePOKs z%5w!2Wgt0YXPD4++tqrkOk_7rTMM~dFrk^mSOAND(D zGbf13jF`)l=d&@F(AYIDeo|i-p2XUX8uP`%WaA>XdeT{h1*LmeYT_m>gw8ne3!f#N8~7V806H^LrT}Ob4-lrNmU>g#3Og=GmRA z#g!WI7WmFDiR$`h-rUq+HkGxrQ!R@#<|}b6&bdqppLMX+y&tGyHDD}YDrj?~1WXUz z-4vUUpocP;N{8v$l9Kylh8U#@zJ7(VGqEcDg!diuRE7*y4XS=OV#=V_Zv_6u;H_vtv965BtUN(=Cx&0Kg36wl zJ^1tU5(pZ!Ut|E)2)jm>Si^9J!SxOTZm zX~yjEGxC z-f98yRoY{)a1w>+@4Yp~pU9kuPnL~{M^FKP8l+y%9W8~+@snWU%(@AVTQQjFxS>6& z^d<__zd>vCFG{TArBL?etr@}hjyR%|IZPr~rG<{!LTxrD=Db-rvhv{110 z0K`o#^k-MAr@-XezShF6)Nixbfd8wyqlf9ue>1_rG1SG!_Zt*f@-=g}l#w$>gGPkO z;Lg~|tlO-PUdtRc&vaxFe70VM`qlNX;Oi*6srv&!V33!M<>3sk(jIe-RbjyAcu4C0 z+LJ5g8Ife~XYxL#uW&1%AnXzDY>Ojh;99Q)r&4!xFNl6;^4DBqXDNgRp991kHue=n zeWAQ^DD>P|$Ri9JL~4VDWAM#~aX^LE6hFt6TH_V;Wn(JX5Y%FqHg0Om-CcF^=$-i3 zZFTme{t?f5sd4&>@X4SY*NGt7aGou>4dF9HZO&QkexrvnO z7sn&pYp+^uZ&)V1!w@lc?fS-tOm5nd7c=wYop}XrUE2COVjp=a4iKv!!*p9wjpB@A#$Qs%xUAYC1sA7HCW!4$c1X8|QW*4eNb(aBG@3MJ2Z3oX>f^ zn)I5%`cTmx$WqudYk*UlXO2<_zs#u=1?5T3rD^mpvwt|3%bRBwW|@!sAN8 zO6MpJ8%XPef#m@dC0xNFL$L4GgAEjn5vs80BO?d;kQ19I6~Z;_+=t60hk z862Hw0LK*q0XrK=BVGYZ7~gOJBGNvEka*Q?jY_b!wXHZqA04I$ltumYb{evg)byE$ zbmq}%*4*loAOzf$1<~;4!8P++%O9~o8{|gYSW1l`KrJNl6OV<66~-rz)yLAripU=g zR>jXZqbvm^fQf{VRt#Jz?QW=T@QIHD0`e&(Hay?&A>qcZ}J;8ptT+0g)^qJ)U%(k+Ul*WV~_Y zn0c?2w8&J$;Gcoe{zEBPuzy({-7;h~a`4Lt>z*_i;=;>MB5@`Ols*@ABYunWv(0SA z_vk{pFAXDXpX%IsP8k?`=-M9ees?})p7P**n_8SK`u)$VM@sc%J3X(!aNG~u(cNe{ zY??Q1r{&hx+fE;?6qCcQ_mwjXviUEMQmvl)D^`B`K6eQ4eh5*G)C?~oYLA~eyWiV< zNAA^Fy-E9dNPb_9WDC=tLj0~&$%-pKfl8!|o0ss0^u~6-sls>)bbFG~iPP283G2dS zS2*KJei5ICr^viL;dhT1?%@40fOZ%?{bQ&!X|7z*i(^Eu!bhW|AHy2VQDq43AFavJ zWSUEr3?X!GoOV&5EK4v1WDJ0)-T zIO*KWKU|Zn!XN&$j6|j=y^5}YlxxP`%J~96HS^NJe&<%5YYXf1U{VI{r?Fs(Psu~%|`;us5P8N2$~6!PXNqC1T{2rWO&CZl3a?#1zje; zxG-x|A&`U35b_6}F38YgYG2fm+=_*Mj$Uno;<&@E*=yc~#Pg*Vz}sTGP8A`cm*-kP z-$O$P1E%1j#{)JF#4EHyZd8;JDhF*XI?|0su{o1L6L9Dv*egm9YO`%Xdd!}9$hhO!5NO;u-+Ze0l*TpY97cFB^1(M(F!FHu{z?d|?uQRU?^oHd zCnA3NzZ1=>`w9&TNWLJ~lr2TLQuDI(;8BAW>3szxQXV5x3?lj@zka@Os{{v~UXO`I zy82Wrm+Ff5Nv{mt{w1O#B+Ip0*xZRJ`1`5e9;6xe;s^_c_9$y_A;DGCFa#|H?5Gq{ zZ^_eo`1seHZ?ycF-@bE)#_b-Enx{yUEH&>`2?DHbb@HFzuFHurqWBg(1s?8y&0#0W zRn%qbA)4>j`;W?_DZsP;QTbP<2RuLdbsa3_VuAHtowJSaDI$JzT$lMO?95acS~Lr7 zHe)W^kD*YZRr!YQbc9!dQ?rr$ou@tP;4Umyst0h!%hr$Y$Y{YwYkz=bxq;rQ@+T++zr&sA`W#7a!a3h^3BFDXF0rkIax1{W)X^K*UpPs)!VOg9$& zu}>COAuki!vvZ+0%UI}s)OJE!yWchbn=S5pM!k}@x6lfOy zQ^n$E)JXv~fBK)33z+Jz0xN2(_`I)=^Z<}c&p9mISw{bb;B_& ztjkTAJ&PJ;leDWB54N@{#6`mTPgbeI1*shOOkM_-R4$3su|wxQkxkjf%)|b1$l=<8 z{*RBm(#`?yx`a`s4DLf@S#*R|8u4Fc^yOG>z*q`j^~OyAcH^N?Yw?`QdT8Lz&-k0O zg_5+fQfCc@f)m-U8Kk0!=o^IfeTh}^B1jC@jhvXV#WO3ehU!}C;41p6ocFOSMzB^w>%tq@%8J9|e$UEg3AyUZa^s zpSO&54DRqJz8Go_!6*JZLK0%S|pN-#q?hc~r*f|v`{~~HkPu1``fkO(o&JoF&XvBNhD5;mQ zXeP&bAV1gs-SJ}X$!-5CS}N~1gi=ghC5A?%c+#tE_5$;782K=_E~~vZOtNJ>h?VHu z9bIGORXfYk(=7?&IAzcmVljFDf)e6eI&X1}i-uMCFz;_?qwi^Wg+3H`=ts$$?PsJs z*eT4xQBbO+zt|yv7ZPMy^R~7roMEr&>y=(};#tC5p&MBCwrJ`S&-+)8udh!{z8MMa zEYm0>4X1(|7~Pw_&oU%gS8B2aeRluChST1N(NyY)ppeaZeE4~7Mumq?n9Yo4Mk?8p<=u6FM?3?!R_fa`3JVDn@u#_ zjC_iKhCrR2M+CSN#%SI84Cd1!Fp9*aLmdeX3tsdip7AMLBYR~M?1XyIo2vs&4H zF$Y@BIV8me#8!F>EY@$t4?x=P>YY?=4a19(w|blc#1)-v-TPYn=x1e-v@aH8DC&8C zz~i!wk>@VWEp{781d0n!4GgJkPN>J0=&o~j@l4aQ?%#J?V0EGK2_W5z{94#wo%}o&D_ zqjThwUvG5=AoA>7DNKClqdRu>wLh~8qPB9b4$xhk1kR)7B{y4R%zClqHM0gJ$;Kj zak}b2f~xSl+>~F&55(73-Kz}~7zCskPq`I40Qs907Uj&MT-j`RaU>0_TwDt>6LM23vZNls39ooAYvDFld5l zsqyFU{@$-0L-9e7;bp4US*K&`2Dyj!poom>-8ldg!lh6Ioa0VWGHZTE7)AKF!UHD_ zP>Uo)pOa|9-!lYY&-ZemAAY&=YjXJh&q%_n{!N%`u&Aw#EkjVKzBW7{$1XdrW9b)i=#Y*ybBE1uU%I=!J3b+=e>*u@66xq zg3(gw465Hjx>2-f1y9?loNDvA@&uQ{@Ix>M2CC$>jq<6fZ{E9>VS~F89MBw4P|QA; zShI*XSl(T{^Cp)niJ_<~VovCN&jH+~5m^#B3LEb9_?0J%-eEJyvvud8)CFW_0ypE9 zZybS5_auK!J@ZlyIpH5Yq*&su^L5w(m*#wwAqSu z6l2@WO=Gf1BG`8`YL$K_oxZ{RK6)d~Yul}myjyW)Fpy_wXfv(Tebs9M#tr4t`wT*r zYWn$jDg*ZL@!o}qs3>3#)*W!f36JD!2FP{no6C!;nuj|D(5?)?JGmR%w?=O-o4QDLR$YwAKaG0b_+G zHCR#2TT_@nzSkm^m!$AB116-wex|PV$HeZgWXQ?C%my@_t8?0gf$M(XLA8MKXR~0X zds8E0Tm3jE_qaLv#{1U4mYh9(uVHLg^6xlt-UPhyO;^)k-mT+>;@`f{KAaNUt&$EN z!|v9oL$7F-x^E95U-2KWb5@O83cT%Y5IjPerRdQ3I(e|2?mc-SxYiRb*c?Qy1eSjfWtC7P+^6?ilhL?%kS0HT6_=f2hQWxF;eX<(wIsKsX zcy$yECr~hgk_t-tdhQ&~-A~QquHX{tAV{_`@gVm$Ju0@RXQbszy1=_Ct^txihWF+w{Zjdor-9q z7hsd~;JJI={eM32{_>PMu33x+HK~1cb^dF3zRX5bdsf}z_Ez}t2D2&t>q`pnnRR`! z;Um;v`el-@FK(QiW~a&f0z@jWnsGz4>dDtfSusu5KdGXzOq%S^!n2;;dFr;=<2uRl z-tfpV+kemMz9YUC?N*}Qj9halS4yX4A-XTSn%Ydq;hLo4Iq`Z--d8H)UT0xO|J2k* zsq!Z&MkmPg6w1U5O6&3a?2h|UTdpVa#$lM=xE^bF5z?1I`y-(~&(wkOuRi%WD$>v( zNss@l)o1+sZSy;K?Nvgk>yyK9eO0%8Z`*OlWo$GlOejaa_V+OkzAFjC?-l4oc$cS@ z;%Ir^S-OhKZXF_xr4^IT>^rI;E?FGm&PEkX`zDW9tcuS*mi=_TXpm(b=jKUQ$_w6K zq&~v(TC**P{^jm4{W>0*{q24u#=#PbSMbJpLkFeq>{BT-Nq>KcZR_=wCJUQLw;r$f zOr-qbAkSFV?GHU$n@n$GgU?<>y1+|Iky3rXBzb~~xWe?`BPXhdebOagtwqeI75M8n zYV7R0<$iyzjn6PNsSR?%tagZz#ct@h?y4D-O((h?RyRBzGE{fQ6kL5ktea1VKiMw*x3#qVH zizqMnt^u;H$@gzQ$!U52NWn56r_o0@a%1re%=@Xmb`O;+p#5j~KeRU6zOQLjc?rp8 zQQ(V~8CVHBErTQ2hEo7qiN6=R?y`#-t&CkhWv`7w6$xPfsr7(Xt`ms_mkN~XPef=h zBqN0il&SgxQvx{t_z>Q!gVlrmqH~QKybzI(n0Bbo5;k_-M{8&=JkSs zQGtCpfrS;-xQbwP`)MnJFm*Es2QTS=&iZdAw;&=Y%jngVqfJ9UW#wh+$#Y_5X(2`# z1E%yE)O^hx=<4DS>MGO!YRj46I+dWqB-T+|ny}S>_@NPRav!9zOpu?sbdKNC{uWNQ zxI59^({ncgii`f~I$gECaB8YQFUB`(cZ(>dMU?T8-z(s^?jVf1GtrQL-S@4L99i}6 z8C5Y(`!?oj`$`sB9PS@n+fEtqHDe;Tf-mhot{wipMhfhw92&dkOTuJos(cQXU0gf; z3^HbTWb6=BoS{PYdOcBl@UfYj1S!RKx2`oD{{{zsKm$bJ7OMXNox)CmT&ZLU*8_6F z_E>wHgJM2nJH^g?++cK-$q5Z+NHC+(k>Z|8H29E~Afu4sxv82L&+BT&VS_7_=hYv0 zMS<$VY|h8Kxb2av4#dt$ZF?p0WiAh_o7=3soSm#{s@L5qgT(xfm-~H77@%8FGd8aot z+vK>(6zw>J3gW8%avL2|FUWVAwo7oMci8uQ(wHC9zg{#2uO{Fd$Pd7M2x`Ap5nSC1 zyj2HglmxBt=WPnF*5Hsk$=>}8T1|M4}^DHc0-2hPnuxLW^u>H^uk z@Q8(aH1GXFG@g`#CLCRD!|idyb9;FN+m>6L-|r^~lz;pw^56zql~VQ^-3C<6#ZKKq z9$xMfAC+Fys6Ji)n7i2YnMh^ynO5PW=AG}E;%hc)nh%}Ei56bsEm}WZ_R^mdm#*URVXxUn@SnXvPIXMA+amruVl>U zMHyAT&Wx0Xj$;i^wxc)P`#o6sSUv}ku=$P-E$1YJb-7~Y4{t>~3j#9-(|clyXa50q zdSp$Uo4(lDoFCynaCvD!{08H8HRp{%q%dSgY#2h%Eq@O4|1v`k*^Fi9DoEPbd2JQk zox0fK4QR+z4LfDF4?cdToo@DqK~h9t!GIW)xLy8jz0I9cK+i4!a$lkqRT^xWS9k<9 zIV?=Qe_=hU`%kbd(7?y!89^Qi94CrQJrd8vlo9VPD3}GpxQ-+`j^JEBOZ*yPnD=~e;!1I@1Wp_a|n zPGaF;xnEaZ4KV=-<+xA3XE)7P8#g_+uX^2of|>!nmw}7c*9Lw#=yp$2X|5rD5B^m4 zs;@{hZcAgh82}y`zSEl^>y9rvu_kZ5>suPO=}WT9@_oi`Z|*Pcwr$1YSc17x3JiOJ zd~4O5zeMHu#=JH{l>E7s+vC9_n}{b5s9^8S7~$Vl#-@Jc-htcI=F?b?b}Ovs>c0Vr zq$_DHZNIWR+Re@Hu@X><(dfH^Eh7xCw-cmCAG zsI-G6{zH!s!kCTI>4_8%2l)8&L-d*<7+y*r39#ca*PnU>t(815xZ*i|b&}W#dzeg6 zzs53CT-l2+I70nrCgKUuHE_}`Ligh2q0Cu6AHg>rjL$b)$>?5P1qAX4Vl zDHGk{85GA5g}RyS-s)Zl8C>hUAfdmzg?uN&Ixb zPdckG3++0C@a}nnjakTJ1@%TjS6RX?D-M7J&3cA}2)!P`eG_a% zso?XEpvrUF$UjPnhW^a>x1&mKBnBso`J=!L1Ah&E{)l}N01YZHyZb)q?0KzP7xQI{ z$a^yFfgOiZs(Wr*tX7;dl8+5PG>gylu7T*DV`k(}p=TVpFB868cq_Oseo;MSHk`C&!Zv&7kGL~6w3`tfywn1lMNz#vD}{jnRME*?bOI2z)kH|D*ptz z6$|cfN*<30zW3cIE#jT@-=tcaq>RmJJ!{Ga$bTgLF`RXv?z&_)nBOh`dIVHK92K;i{J2(Up>Eo19m$0*jh{r6%-gcc58Z_d(u1F^WSv!H_@;-wYV5<=(yk zX6v(DedV)W{{-XNoqyT^+cYua`yZKp(1L?M3?7~)aW|7nZj%1lXAo6`I7QcNUvPGZ+7H|!ca4Mg>iq>Y^U+B1(DDNeT3KkwRQ zhi4#dB!SI~KH4V6&dRx67cp}RTasz}zizIQ9*+qL0^#Dayn*+B%KUw@pY4|c zmmYd`WHXw_z<(gU=Uj4o{AiLk$j!s0Uggf~L(RN^8xMxhLOg|y`)WBlo!s?Uk%zh1 z6l*qmESUCu(^#MU;A?4%Q<}4poZ_}bNW;>@Ru-u-do>TrA&I`~@F(0y|JoVnS+#{uRv-YMB-u-xk z2{N8$9;laTo5viD8CmwDI*3VxTmLoKP?%nn+g8VQ{%OS+JOA;Pc$L$y(;|P>LK+KO zIBvmw_=la~-tPvIC$Jko2&COAB7q79rk|*(3vqz$pnf?VN#@ce<{vKPh~`9(H0N=p zKm@uWPYaIzF3FuflQa+xT#7Zjr?p(R@vqEilMwCAI%_Lrl!VJ?xN5x^_)+mG(k^k= zT2`X0>3)A9hL&L^^X&YhPrPaYNyQLcGx8(Nq2*PEe02SFR08`_cFZH%%E^oFmNu1u zZ^3~?Wjp-vPG5)bsWM9!Bf@UTE06ZfSw(sP6)Ueee0Ho+mcK;973%sav&$C0&-8Ku zg0dDT?i$uI87&34UoShq%?;d31>?5*@ z9wT3f_sXJh9T(Pnd=Vk@taq2ZLm}E)c1NC8$D^bdCZ!}qt#F(SVW}bYEGNz-SSr%` zPor}@bPX^D5u?M;4D_|q_BYYEOp(>L;_u`G!zX(B%l3{@Jpv>ag#NZYSI zJU4-H#b4$__de(Yx|u2IBmvXEw}MA`UR3H3_oa2|iC{sK2F(5JYnCGqaw#;L^4-jK zz{RIHqyXP>@6!g68{3{)O#inGl;LUdLyXYy3AW zO!sjJ8|BEqfHN(yC9i5yJD{pPr>_fh8Cqmvu1*>j}?F7RrO!rjUGV4`7Djc|u%CHNKsQ1G#Ia%VT!J>}gj&@xME9mC@E^FA@`C-rn z6q^$=JP>^2U(g#$W$_jF{zS4RR%ddgeK2hJ`Xiapic6H0=WunTP(w^(2xaM1UmK>j zi11=|PSQ1EF~O0!E9#I;E1s;u@gv&(khHQUvUY-eWByMmZYG3dROb5?!h|~QLALjo zcxgX0R0P;e6#E(Uy8m#-zDzc`MyRmv-{2GX7sk=Q-(>}!dGp0oFp69(09OyZfrAxD zQP-ePnpt*0++oVNU88KIk38{tLl{WL{4t!Na}MnnQQGgd?h+NcR_{;^LjKDW(8hlP zkT!+)1pXPc)0VA;+lbzifbSh@3^n22Av1QpT62^v`Ng*s973xnGA-eq$M=E|kB&K8m{jcj>1Ly*GgZzMA6S|rUG4ys}ltlwU{A)M9H{Xf< zRb`Q|8&KjH_-yRL8ed1JT;9a@mLnCJ`>D2-5IrEuNwI?ao6*w>3BQE&4^Imz#1AIy z-8H4UBOBrTUinf3hj;ftQE`x%+)^+nhp`nyy_)cW=&lN<$Z^R$-5+6U%dz;!KmAm} z=>E*PBb+vE>;L#Oe`b#dA^(RzMb+h=QXaS6791U?2EY4?^5bOyUUCYXE@hcP6&3fL zBSF#^d>rYIqHz9Bvv|`RTlpMRW<$H}&cLyv#1Lz`Su890c>b7+ON}Wd8K&*`gos=^Y5j&uD6T8WykmOWj{8@unfNJTFWjSprpt^oD1*i6 z{TMIK?GeLEZ6#HU(n2O>?cd$4K%^JHZ)x`Xwynh8Q|!xchLqoSlAJ1usGqwL?AS}5 z4yOI*&qlseVOw3YvpaW9tp4fq?rL|?4oxfWOIas>QR+qN?mvAdVo==2v@|J%g+pa< zjke!D0eYA3*yuk)gFjM^ty^kK2AVa#(Lp9Z_*px&vNRfLyZ3TEGhdA|-@hS@d|g&3 zNNne+`(ygbZ=8t9X05kg17No36Zqv8Sb(R<9tMgWFB&BHg=K?}_qxIG%Pykbkn z$CsVBxs~Wul$a*Kxvyl?oS~({ovbjEO;h_sGWdH^63hB(AW*a)MNMb{U-O70bcY*pVG$MUoaHc+f4(Qw2gs`;m^(YATS z8*NOBw-qWK`d@>pE9#zyVNlKby#CHV7;y~E*wJIu4JKIld!N1LgTS~qGSacE#(XB6(rh0hazH1{nu%xJ?Z9dHz{>Zz%5rYU5bQj@LaF;W6)7c1;M!fm^d4*W`2Q162e)uN_pa@XB#QfkbZ=r@ zdzK0LU_)fo@%?`|dJ(Y`xO-sL%jc80JmBjzMyp-Y&z+aC%nbW}voXnt??tU04RVK7XoY`BI?{bzyHdpE+;hc!)%+ZqzUXOE z%W^&9Ya?{WmcdASwH3>o>T2R&*!~xN{@_a7LcX@S_7z13?>ZcY)i&c7_l3tct0uo3jC?cMhX@dfvJl?8&Fv7Sg^PVAzp1Or`h9b;q-#+G{ZEV1J}m5m5Ma~{%?og zYi)Um@FV)){{m=xwgRgUqL&Rcp#lcx`({&>)OVIzK z>@B~djP`(Sx>H0t1f;urC`D;Rx5lS|pu0-Pa^J3tMbJ??CM0lORsZ{219l~_jX-Jh?pO79Y zHB$z9@z07@h!X-Z??>GoYWMu1Y(GQ&+G)(bV`8_MHbKocn!mLerka}vIU$;ny0+hK z_Ph-U8dIB5<+aFv^N!YTmuV**gZ*FB`rP_U2H!JFjujD(p?sb+TP8e)wJ4ODX-(YN zd8Y&MD(S{5UwSTEU`*Y5U#1wwwlB9wMm~muqAh{PscgFQ+V@qML)Z{S;Z+4&i0?=z z>m{J~2INuN+=I2eAUF{;cE5+_zSb1+Q}A?%O)hM7804ucNeJ39@R0^?e8lRrP`XZv zFm>4z@)(VM9M7B#ydIz>A%6^o6z^J5Wp0EiGL64G+bcPHqj>H%3v4*-<5k5PYjd0* zU;xMI@7E+ZkeoNn=Jz|5STRhnn=X#^)B?C1((;_g=M!gur^-TQD5bcbV!y5Dn5g`O9v!Z|zr4xD~ZgH2cBwC-Vvh9kqd*yVw4qD&Q>1KgT0vd??em967UFUD*JT4EuGYmDXYl$#N6t_TR)p+AdwzkC~(cCtQDJJ^IZU ze+h}Y$z}j@+~*=AttCT=*i9Hy!Ub>U=;(il556KiP3z2hO$=3_!p9oJ0ZB;yT4$6iMm7cB*1%jrdzMiAe>N9mMP;J^k7sU z=u`z7LLv}a2QwDEg0`hGIuWIPLwn1vX{Sr@%k58P{PL00D%ho1^}>U=rSA^P zZM)9XaKiSwNg_4#e{j`V;`Fs~v408~U72wr1}2aeN|&9_I(HSEY&c)iK(7uhrC*JQ z?Ip}f-}IdS*|zMpei7`FXC+wqa?kozeKX$1w+Pv%Uo(OlzeMS6&vX|JE$cNA=1dJi zR#uP8i*mtIX`FLtKr)Up(29{>*6d>Y1Z;=NO-SbJwf5qf#J8!}s1G+`h1S~hZT6^x zwQmamJVRknz}PpmDPw5P{5)uxKBujd?nPDGtl`$BB4s_R{^y~p>-FUL+d9~0_~oxw z^cIBng9DFDm3n=g9#+R>y)J|@EXJl1EK2)M(;V{p9C!&R=+plBd*x;ZgoQjgnAc`# zwz3E*9A6AYrdw8i2KQbM(AV5g*2AzGCy+nEZBP^&QvElmAyx>Nqch=W7{)*#aF@cDu<30%IVnrAE15n$$Ksc^QUZSH&)ko&| zdg|2+DtIMI^2a}Md8GGt9>XjTwKuglsoZ+le?b$o3R8@vaYI@XgEipV91Gpq&tfA( z&BeyP8D`O(B%_9snj>VrljioGE&~{=58EVPqnNmJ_ZcM2f$wVWL1cb4<*f}<$Ysb4 zg2KJ;61aYTPh|V_=g@X1CR}e^ycMwhA9eZ97Z9u#T|^hJ(oEC9H!|@l3|)6N`*iE- zPshcAQOLSl${8HuL}g|dq8ER11pZoB{)S;5IACrQw7jsh;fuZj&s4GNH|H7X2OYI* zd788*VY`i!h3Hd_Uv%SyH>Zd(zuY#?8{bli&Cv+n2(8VLZpYZ^|6H>X)!8HA-49RSYxp^9ly!eFt#sSQG8(h{0+m) zouBIRNEo)RgDg`Hr9hj!9$^*FmBR7id76*A>K5Ml$LdGlk~*yGx7908E`6L#BdjFU zQI^2(5Uji?Gh%yK<;x!OIni|(fC@VR!^8}A2$=l&>`&l-D<1-~Kkr633Jfe|Bud#P ze%h6jMv)=!wvy{Kw>R!-HL8U)Z~G73yzQSQKKqP#V@Wa_&LXtAJfxc@a;Qzvk#~?? zCj9^4;wNFmSnBb_o;v6ELg1b=w8wE{1uifom4z|-LY5CMQAe;p5bpd{eVkq}!7Utd zl{`x`_X#!s5RZ^7BLS}k-ZZ)7uM#fLW+g!MW-)F)WHly`ZU&>onNTG~Cl*|N%U{kF zWq+2(7Z_x0_R1gFKM~>zLHF7x$NNGZH_yeTi2ubxTTi^GqpCM`&k@xzV_N^D$7Rr5 zfX=P~b8H=BD~5rQ+L_zkSdoUaZYua`1ib3okqKiIdt<>pv14C8dC)+2lySW%;C7>% zbU;_`Z$eReZ`io~9v?STuL*bAhriz;_A5fthg&~w)GqYJVxz3XXDS|4zE4${@N7%{ zF2|i48^xLL2L;nGeDzO%;h{W4i|vm0us-TNS^l5qe*-n}$cpuo|3)B^u~ zGSi#3o89TpxPrN$i+h_mBf?Ds=lXnu854O*Dj8YZ--VABv)A^c#jN@nae=nxYeMwm zL485QM236n@w7{;58a5*owpG0FLa%@$CgU+Kb;S&|9_niH<*AVkuJ-Z|8zcjn+BJ0 z3<_6l<^gNMeF9BFx0O1T_#Y!qG`7E)oSbgdDODSdHnT>aSAM&9;9DI1|MHlrmQlRC z+Z2L$t{wQ;$-X39bP(q3k$B6OB{4C<9wx1d6)Z*gqYVBcJG*^T+k0tC5s?wLCm#$= zp%3(^07ZICepA$dXqBBpkk%E@JWnjlsb;+l^bl_QN(VYxj|y1|6&%E$5T707wR*<$ zYEpynct4ZVbK!lF=aO-@ZN1kfixqOdoAeNbBxSGK`6YG=FSGxN7Bv-EQF=2>+B!m; zl!R*?C}qL4y2Vlr?vDsKXdSbbz+4Iv@!fY7X>klZb$v*HNLG8=69s-q=82yAz_7lQ zHj{&}882o^c&2kwV$vVABwcX`(l^H->zE&VmRG!udR*J@PFisDE~agYt@%{rPxtPSEmkyGYto*?0~= zv}8<+Rrr?dWam)u<>$OJCB7VxSHbr8$lTE4Jih?*-B|@^>wY<}jR?yB0l9Lgvs;A; z0D$|K;cK{cL0_Luyrv*p`GOvb`zB;qq`hiu;a|O@SpI2qGd@K|&l~}WAs=2#aG|Fz z)ZHS^ppf@6`E{4kuY7v~B|=2;{S}DKxt*Y($A3%2Mn-%7>VD!);W6gwkN1~@#L>Wp z9sJ1vfxf-+BBsmncfr+fqb{0}{$jnm7>~qxI;6DTa@vtX%UrVNig$!l3U zLU-26K$(XUF(y>maUR+W-qY4ke}H#~BIOO7MW@|O^TX{<){NEONtwX z6vaEChU+_A_P?z&Wo9au<8rqj{i*{CwdQ9el(*NrgUoB?TM&Q5hwozw_bOYj|82si z4*m@*b|puL^jD7n_&uSbK+R2}L55h1`Cg{jXV#>_zR6|F#$WVm z5$#L^hMSZCXSxNqjSasgkB}*4!uch($0UanAMO8=9C@8P-IB*|#}?HW!`#@Pb&P1E zREu+RYnEZ_A@C_cVrK1E?mf#3eS%V+86i*$b!zp)!*+zS3u*4x?BLX`gQdJ)6Y#57 zWs$yQA0F8=@XE>}?6vHDPe@~9V$SEAuS+c}-^hqr_{qvmA)G&chcjqo68}TbVJTL= z|Cc@MIoY6>{g*wvZJrAi>(bQf3FzLQ*pYuB23295P9wb09;Tx@!#dMIW&rMwT3CrqEsUgL_ax-2eRNSCggNC)b!D zk?dxWQ<{HrVHHDH=(2x9++W@%l1q8LjjsgD;gS>nNtpxm!m@9mR1JL(&4vzL>F4CC zfUeiplC0_yg$njedP28KVCO=!0nVHC(%JJ;xyM+DT`fb#%fOh5bQ&6qg?CpfGW+rl_L*?w6QD2K~g3~J5uUSt6Y%M@K!u=w-cpHG94jadFNZzfIDIuLYzKZX_i zru-nlJp#5hkz#<+=$^~==AGfs>XBT*Le`WcaHDAV)u#HQKqPA{#x|cvNz+@ghg8)C zBo|or`8+A@%SfPf$m-0_=LnTt-^VrwMf3lh=h(bRSJqKzIBGvs{F6KBIgD_k?l2+* zcAHt!rVMKM06-E_gWKVzC~mU%a6nsPn)#cB=CZHvTwqC0B|BP=Ef=p*oH+o}vP@PF zc#Jb@Y~sL^N}#pew0HUM3q+dn*y|tipIr{h@%KaV){u#K^B7B(Yx`R~BOJ)3;)V{= z--^{wUCs2YcwxHh*VD>hqt=#8^Ze6O?~YcgHb@3Zi!NtaBFJAUD%_LZ+&f!L0aVFJ+okhtezdT(B zq-o;ep zXY62qrg)|aFft~u>-ICO5=8mWcU~!&7*wC+HqM(c-#)oyOusBcIW{Uh_|^-$)1ENfWHp)m>zKW9>F{L77L{ME z#zbJiZCIC|n7W|)-j=pri*O%Vsl&_0l*-uq8Dgsd!K>2n=YEm*EACv>^rF9sQ1?M!7|#*nN%)8htBLdJp!!!q-*(H) zvv0?GbAII7+P~xIU^F7MGx3cg-J4TO=8>J)ez7DF*tR6 z(%M%^yK&>6?5u5Ze(e$l2MWI$mD+AKN+P*LTErQ`pXqQ9=FB zve}|_$#hBQZEa1-nuU5s{uP#w#+U<%o*G+|chbnjAwUOF?Z-~!BRgNhdouCqnuVcq zD8zV+A`PSM={c{^`vfjB!cm#3-*FjHXb-zG+8F$8_7Tczq{E_$e~ge7s1g(lA4|mD z2tK`Lq+_g-{ReT&@`kL$&Ao;0a?au{~bN zyi~3CR5#Z)jwM9*lnx9>In}ad3n273$tg263!?Dar1DC>6`--%P0-JSQm|07Cq)SU-Wb@g<%)@+jhpX$t1 z5jY!!y)RH=z7=R!31zG1L?1l>Ug?qD+Y+tIQNav%Gd4yqdrcNXQkrEecQ$B_e9(XS zNFw&gU}*kb(WlS{nQyPDZ&y$}gUf+@;>tm7vWO@-*m;dL=T`8Sn17zLm4CpJTw1ba zyqU7Xa;X6fA}d}{5?TX*9|p<}dxbp4Im)sErI)T5W~0U3C#oJoV5qg?iyM?~i_HxM z4J<@w2rH{=pIbz`SFGp#^<;sTW$*D_{g1}K%>-)SlIjNpvWlu=6E1<4Wqo^47<{T7aZYFnP*Btnqme401-XRPGIu68Xyt3_0tKQA^2_hai3C>6_gJ4)eO_k#D_kC=&F6hal9n;umh$`*ua1>;!{R59Ii^}u+#;0K;5rUvmO6x3~v z-iWepcq&`ECGQ3LVe#{jhx#|D4Be2%)k<%PCgcx4r!9Q|R2%;`;Ndp!`)v1bbXiaj z?0>P1$&Hc8fT#j$3l=e^fzC-ApTQ8@A#jI{F$9UGM=T)rUFWDaMUQ`3ap;>MJsTu?sT;Udh+<^|B5)HmeL?$G!jrAJ89w+ zSG0`2o;cY#kKv=9PNJLkWkk0S+3?gok4pG9PhjpQ*M|SnRD#>_X%UtC76+QI{*x~0 z46*0QBT-?vNO0#N-;WQmiPU#BP+@hni+iTd$&R(1>KBc793!0*s?U&pd@9H)@IS6h zl+OaO^iVXBVhQ%OX9M-gwqF?rIknA`+5CA-JZc%lIncB)bPh1yZMAncxPMN(UTPEs&|Rf0%{ z{t=9e_dFB>Kj^wVyRCPLRUz8heBkKCi+nLcE$4Ln2_2q6fFgu;;_Bj=4OW#NVL&6L zQvkYfH&JSNy0_WH4Y^_0L%R07G**Je$#8eS?2WZp)jljPV~{)CEm}Jnc5Z=k!7j>g zdav;y5%&RVLm?d!^llTlS59}&wEEtUmRp;F!S9ZFgFXbDu}Dh39OsDfQw^fXhY`xR-q%TNhbJ}T^{yuCRd4)* z%P)HK;gzeu2%CfWuc_NUi=eRtrDZnYyj0TQ6Z{947jaHHJ9$VZk@>HaZZGcO_~y@f zQusQqPgz4IlP^p^a&ke8s|t4VDD$k0=vHw$RHq+G(@m=dH6us*q%zMZ(HUdvg9?}c z(<-Lt5#R0dFB%DjbR(L@Y8UZ0qZCO(7j!(361amXjy3}+Fumx|S?n^OPo3YElfmnk z;|l_`B2cF;0o%^Dyej*qaN&pY1`3EGS(QZ~de1q{DL?;xbFnubKiL-jY&yk``rgH3 zI%AUiYBtVZK&0vpMR`v1(t{-XaAR&hR{To!oU8Vfe+CVbc-U(Sxq3nL3rLT8Pm%;x zr!26&3)z}f*z+@}i(w7?2||xpdrq z@xM7xIb^xIFxiw0s*)4dubsQKo3!KFX(Kl~7?au}AVc`F(IGepT2}#JB!g-y|#$@8>GTumte?qUJ~U1Mi`L(LnFG z6NNW`JDRWtfx?yyO%Yd2njIgV!)RwODF(t7XYVZq@aa9aNU&dH)$bo#?>t{lxh@gk zkr{tvyQz7vlMJ2tB%3-~+lCyhUF~?D;%&;Xc zl06wNQgj)fQgca@6_K|c7g|`}@IErYHoL{&3jMp}TrD+BZ#&+4d%JpDab$1^TSFC$ zjK5|LI_pm-4IWwm_N|7YNJGbBKo_JSc+43h!X0_;x&aNwLnN7ZBY9Bg{iq8<)Iv=6 z)r=+lJRWr29Cq8y@wMA9t=W*ZYaN91w>$Y4Q1PMz8ae~% zVa#r){&;iVpUXka69SjW6 zlpNf*okg~*YzxCQL3hy;)GV8($KIosMCrbL+zyG?aIYQ?@a_S)Q$ z?7cs#HckSWSn5E}{0-40HhFpU`7KU=m#{=qIdjS4ESgX?8ec2if9)Wt&2!yUHTKsn zw%_7%o0Qxai3r~*T#SGMo_36-7&n$?T_G>u4JUYK|9;A-i9(qfp@9;6wmYvK zz<7VfLnK(^cwj8&Ng24lsgZ&-RSV!)>s1LW{nLTpa#-;vZ(W4DhGJf8dF~48AW}+b z$?Xt71rRQmK_(ZpM*oebP6~Re=VOU``HD>+fb&R@BRwxA9b4?S?@{ghk$S7Z7!{;xP zJlLK(kdzs(bhgw-&VoPENCp`?v}M)9LJGcd8l;Z-mZ;%EQ0a`oJPe{8Gdy0P@pA zO9w7#zZqvXO=YN0T){3$M2~w0<8ZH27-Lk0{upA^+KKqA4vMe%B4zfcAPN$ScarnT zle==KB#4cYho|JKXRpzWA~+-F`ew5ty*+t&tdYA^Vn^3)9A}@5BpQ!He>Hb(FjigXT z{RMqji?SY9(H(<5|C)ZPy>2!*nKRX0)L;A+_g9Jb#G}KE*(fdUBPYZ1x>L{M%;HIY zUv@v=sne(H{0)U1edeKlv$uFt@=_jksa->3j3Lp<%jMdP+Q_vB$Y%{Wbh+shP|5M3+bn!>2XwTAA{41E#AO zHET$MJrW(OWvPQ`-Vkwmb$Dqw+ES}g%G6xDTX^agR#x6tk})nAsTsu+Ro>{yUx&*- zfVR6iF0~KN0z;0DgF%~2x%a^R(2V>}NL7TFTf$s&;P}nGkpE5SpFzyyu!55@I2Z_N z2ixgzU-C>I`lEtbMDz=y_ET>GOf{jelBRgEFP|a`VJD-h$qAb=@liFh>e>&p{e%5` zxnom%$a@(uXdC*FAj(fWtKDdDdcxzg_PNYL$1)Tsd1BGJy_wbhL5vbRht1x^i2Mza zlnRukGOy!fE6rys*D~`j+?frRL#6lBq|5gTG3m<18MoDm+iX=8{({Oz+K+%s&l)9I zKD4;xE<(`txR3n3MVD}7cJ_OGpdS5A#_?QFK*{ezt9bFUZBgXZ~6p5;ck z@tGBXxn;Rqsumgl{3>WJS7;RUsLgH&g64wH9$YsN!EB&=s)ME@8j9n*=h>a$>ZD#B zF%e2ksi}r2`afP2Ni`BhJ}G^>c=iP!Ebu?X#|9)lgPSeQt;|bQ`yyf@MuLPJh59As zRb%ZYH9hI5In(a)QYtr4=c0VKUlhp%VgB!LDQH+I?g}?lp4#9<$qfbz; z7bo?x%f6OI1I4Exq^M~$dQB>oG2g(jPX-09qR+!$e#E>c?=-Tro5X2y`oyLGY6H4~ z88LpnG|sSXvF6Y0NYBW|j7g(hb7&kRDxpcMEl9LaN3t++`k5pOTRP=^5p$lNT>4&M zffpQ`{F*2>2x1ERx!kQ_d}y$6mmJ|l{QB^mM#^Q$KP=^oz^2!%Dg2YckrrxJ={NH9 zD5HNbuh{#FWSj;bd_lJ+PGJCC0s7$$ob?tZ_`m-Zfxmb#lx%6=>lg zVMcoJ9^@RzFXZvP^c#!~mYbxQ2EYb;aGc6_=a2E#7KZxg7szBP)Z%E^{ISda92zz) zbp{&JC;7m68u&oYH4W@QZJ95vUEM#PdtZ=p39fZ?^()|+yVM=Eb58Q}5Wnks+I{yE z3{z?-ZCmW8`{RI7J9?yvx)!maf_64~V#Drzd1gpg&8~msYp(vf>O^oD$RG7j-&Gl& z|3wkR_%~{^-#IfDZ-za#A`!Uw&aKWdb!Q{J%_MKW>Ky)2)m=dfFviQ2>77u(LJVmhokUSi>@ z93AMWywEo@wj7Itfb71Zq7uOSW_*{|k`27mDy~;~Q39 zp7~9Ksma4!QFxc?60R3Jh??1W6~~&%3OP6T$$3za->+g+3Xwjyiklp^T)fi+!MtZN z#E)Y2Lc1kSJQZ>EKxL6J>F#RxB+-Tfnf?Tf z&`&&vvJwPl>52x4k$>SWI!|_I^G^P9&q>VcWEVqE1u|Oy zm0@ysykual+d7`Ot)Ogicyh`urf_wdp08sfPf6x`okD`)>cJ?NU*mQVTMI2|Faa#e zDh-?D?zdl=)7}Z81-z_!OBtxmA@3+)nj9SD^2F8r&^Y%HbmG3M*7g!v0*b`WQg61X zYmp@e9W^4LC+o+SIJSnr?#cO@qzA!0l|1Am!>*E0Z8X+0K_Y2hmk9BYw>jACx zgc*yFeWi?S2w@LVh}mFB!T56%JMV?BDBSH%cXptvZ|%>+JlvT9VxBh^qHtci(^_2IxJzb{O<`qgL=c@zmJ&5z2b2MO-p@PObM+PWHMYDFPNR`{v9b2GDbI8y zo6(Hc(xqhlD}KLP1fBC+sr9}UREpA;tF#D@gw3uG1XovUQu@fCV~5?su{z&P1A+Ew})0%+Mg2+^#NZ1XmoN3hX1nAaZk7Mx5qGze) z6Vtleq;@M9dE9<%($lq(EK3Ckvz7a0w)D;b6~Z#2Zr|3#31P-$8IHz?Vun2Q}vXKHhRXry$LVQkh5o?s9*sP*s;sD_fS1z7*O!#_D2B{EIR}0 zsjv$BZrSV zL6zQ48f3<#v+Q5GT#iJ; z6JtQD?8&J@WZDP^GB-KjUj&F{YF2GPG?BqrxxYT&q~iTjb%GIZnyJq#zh{OmNj;i6 zpifjQne#76EPj#bJcqka8?aiUA5EHNQJvcx;!@Le92g!oPVd22=etEV?RQK@(Qjgu zy#<`%`qcTM0DUK#?PHx79uf*_NMqpU>##}R{U@8wh@g9V#y zQMp>etP@5Aj|u;_x`d(tSCrF_m{h!aj9V9HJhSUdWaYzTPS3UK!K926`p9KL=JP48 z1Bf^N%{i}PLd6d$1K>(H<^;Etce#vD-l=OLpK2@e=VJgln3-VZg6JD19R)@3;EWGo zHt;}ABNXeh?;99z;_)V2?tqa>XtWQ&C8LgjHUE-mkYg^wj#N{w)uOlVjSY!$WJcQ? zHxx-r&5U;@!Q+IkJwQ5|b~7&0k$n+J5}F~KB)go=C8FW`WI5NS;37(-U&3tz9Ec1f z<~X|r-jMPE0_YDu$1;erGrQ}6D{uQ$056oKVx}w~l;1pz(WLR4p+st@fbC>%+}_6w z?iZw5UDtc9+!oaxJ-b>}2y0hhbzeIuR(9qZAeTMtG87{TI|0mt7%wj(B(lsB<*%h@ zaW8J3y0Hvg(w}Z{d_4tB)lTIg`(U##N4Vm@j=tC*hBDRcpO7%(CJ2Pumyo@|K7&{W zfPbMZk-;p;=8K?rIpMiL6WD(0Nikr?hrRe(*Y#M~(@xf>DcKWpb#91eyb3Kr zB`yx(rCY(c_dkO(vrEk!MjBG-r=lF(1ukEo6QMP@^%NXWAHC!CbYpq#t9-7ZkI3z8 ze(2#@o@?g#(uC~1-_Vi#eg8nLEArN6| zt$S0q&xWnoQno%m)71AJR5;}0LwoBSDTY_3BVxwq!vmg8FGu+{F4p3pwxe z5e&3`gu8=VB{;OvHStp4^Gtym4c!iWHhO@^kF*pVOH?Q;4kTg40ERK^u9g z&>B)*|As3wI&y}w0mu;dbD2lB$$qkkF%I9jl6!wC1Qz~qgIESgS!ARp$C3<83x*Ah zr#(+}+ayJ?Z_m5?kqiyr;b;D`N8`>Ot5*iMtpHT5e~4rx#G&5wZFN-OyUINZ%U&fY ziUt8I%9f^6uy1RR7G2CBfzd5)|@NBdwd&JXL>0ypm|@?P_HjMTVerG=<2QWOaT@W zTVB*|+WEPT9QJX*Et03Z~ zojp%D+uab%3rf(wm+vC_Z6>42S>p_*l-4*6{((*r0On^u>foBx ztUKZ$#;us1@YYGN{FWCMq!#+wYSDG2{kh`4tEwvKQzoIAuTj>Hbt#?}U23M59hI3C1ebyMK3X=P?!grsRNKC8U zikeqN))=+8G<(29x5)M#;`PRdS7g~*n(T}jP1HI2WV(E-x5!hWK3RNAo#(18S8-9W zlui0QtQXHI{%fx;_T<_+r-Z291s5~HRjAyK5YSyMkOb;;V#obk>d9%Bv`I`%tlaIY z@g@-ISgxn(kvB;=LjQmyYu;DP{PRwztt}7&>rm{;Lsp|1GPVfE%=8F&jMa-e!r&NU zXUcyBq|AKW0#SRZQY{jX;~WH^!|+cg(i|#WJDKtMz{qo7P{no_#YV@vAY!8>Qy&<{ zvdJ`0u;-BL?{Ei4oBeTPe8*eWI&#X-cUc9P2js{QddHyC#W`d5+kgW))?Eh^#n3=2 zXylf9{L*`ThA17dTw=THu8Q8e?rr7iyVvuEp`|YNAAbI7NQwxul`B6eo|prZ%O}L; zgFhWLn@3ho9ca8-9Ga+FLAodrVzM<+B|TJX_|OqRPC~N^Lqg7XF12zw@(P4bdM)fAzLM?p8D%wufK z9W?`Bgg!iL>YPLU$x;}f0nk25RRK7Aw~!y%>?XtJU{pFa^e>#JKU6u zd|`AN0F#Zph5ohtaKZk#l@uw{u1M4!L8@os9<*7#Ma`$h0Q64V~oJkUb1~Qx0LlP$MnUL`+6!`_6L*_PZD+_xY zZD?Y97rdWzHC#=f7r@j}f6!bRC^V{}Y`=*FvaSJY78M&gItR0fOZl#HFD8}&(uRXD z{c#$ClNTa_+4uotm)X2L?imva#oja3Jp3U-KN)g1$4MTdqD|B0SMv)GQ zA3azrj{hhO z=+7yY+s?N8p_fn?gU>G)sH)LzhM`HyW(i~BdG%$?f=C}-%)REF@hb_RpHyVBxQ;jX zp4^EGlyf*IWQE6G4{i1pl5I5k7XUklRS{bZvzuDJ7uJG`B>P+RSMSXn`6<)u&xrEQ zuX0T+OQ#b*JZtXDNEBl8$~X65?okoJ`p1{Mj=h+b1ZUY_hgNKf305{KAn5RX)ZLuw zV_utzXIy&lDq|;1e)gAJ9%vx(S$IdhnlQV2a_xyWftyMQ-x`}#b*=@@F><6eN2nCt zT-=<^mYdX6u)NAY>~UNu%?la^1jb{V`00I@HiTAPWezoA^TO$a%CAW{)xZpzoKd-`>&Jd{(nL>WG@56dE-h zjW!D23xD>=mT9QHvuhcjezK=;8)DJQawuv(@Th(}x|1E0IsEGw3P38yhQV*bT4G*3 zIb{FlO|JnjGp(6T&W_u=A9nQEUMV&6y(-T}o;nuVCz!ukXBeb=0ZTiG9VhIyU`fIH zEV6qW<%tyUXB0mwWvrLrhf_C~I+r$bsm^GA1!DF?gqKtG-V=$y5=+NMR-k)e7Jm5U zXYa^1+#g7}v|eKokvf32%w&)!XuKzM%Ti04R5VX7i2V>(_QJudu*18U4M73aZoEC& zyxl6l&qnAKpRHt$F(mVd%!7XK08=oi(>N4<8_KzNpTchd&@WN%x-XKOujc&s{-TaO z4&%8z8c{ZF?3L{h-#c1=hSsBgyLDlz`=#UVTVOY;J`F`GoDc4_>lypG{-a93(x?&S zYCj}0clh!?6S=ac%;N%07z}?@lhy5oS9=y&nFb1{78^f6=+8c*) z>y~ibUIB|L7|{1X81)wlM#3E;w_3j!KuQI?3b;O)K^N`ow0ypZh&vNZJaykMe)yvB zk~|IwWB!m)dvfG@oI6iYaO^Wdf>J*9RhX$a53rxUY*iUd&b$J{xTshbT4YREeuCVd zuOZ8gP@~nQb+Q1o=T_RtEAkf*j~&S zS0~%Q$R`U|Ww<8eTCILhebkLwV#4be!W3Oxp8Bcn_Y;*F##*y!cAaTKH>W00Jr|UC z7G4VaJWb#Zd17U=o;&buY&umRAPqrSlD-gJ+X~r1iL9f}YfjwOXL%@4%kRdHWr#jF zC^*9fK6#t_C4S$g+HgkoG82DaypPcM)7X}%qi^62s|AzuoQ^r|e7c6pG*C3n1}#q7 zSZ7t)cKqftl24+e?a1T_xCMO#o)!ChW;RDv_=F*h7 zeCCKwG>BFHN3ApeQR{1podr@xr{BjDyGjdm1|#0!yx!&D!b#26*F;=lQ|I}YR)2%y zS1@F*+}sXgb%zOlx6JI~lQ>*|5alI$cl?z$aZL*kWp9CC-YJtB{$QGnY2zuiI_LfO z%FQ-%FM{JC)_P zfAN{1pXipq-t{+8U|5qbP?W5tv)3Ip(%R|)%pG-Pt0vkX$zpb@_J5-Eamn9KtM8k* z`!gqbOm8_T+o-Io&>l7)O_r4i=!oHR0&Z9(JWrN+_$NdqXXc2orkcryvKnM(%QXd9 z+_XR_?@%{Wn`%nll?o#`dIFCZ37?vYJ|tf}k=XK?d>yD*QRj$>%af4;{~PKzI(#p* z{p1d(y~ph{R7Vi6S+8YeFF$fv_S>~p1lK5YrP$uQcqabhH0#3ZAcH1a@#lA1%Tq$3 zXj^KjUiV&&w1R$K5sHGyM$OjtNH*EcFD>vgal^azTkZfunC4Ur^PyAHv-J}ml6jO- zcO9T~^fiFlr=U06d9zK;QEeYA>0f=KG%fPOJ~#~E&(lCVEadf zojI++96hM~5WrHC-2F5#&Vn`)jpDR#FXM(0mczG9YdE;W&IUVLB!0h98nKB@ z)kB9G6T=dzysix~v1Lsl;j5b~z#w=vIHZxVH~VeWY5j9{vl7HA?nRh^VM69fIn+BE z(q_g-`VU-xhEzNl@=0T^P^o=)?6unwNFYh9`j2b#kM9!9 z_}zycgfuzE0&F~k zJ;t8(Cf6GJ7*SWR1M&K#-b0xgFu#q#Px=~#gnDEOD$>}?B(V%wx~Iz{!h>f0?X(%F z9^@kk*@a4myr5-jsV@rI-%Y|(kTMZvAPFC;KzGp2fJ?uvz_Ls?Pb}q{+>3b@jCrK$ z)QhtHlTlI0*Nc%0F#X9BVcC~nJ_)YT{WY0_m}Bg3X_TJima<_)uXjf^r~a(bq=F1u ztz2$C{XayVRa{hY+pd)skU22eo-2?1#a1f)Zd6d1a@dniGqrE^dOq(M4|Msn!x z?#{{0`|j`C`)Hl6-#S?9S@-{Xu3PV%wCrjkH8wQ85UE{A73?|E@wJ8fWBgtmn#`b5 zro(RM(2d>pGDfZ+Q;4;*U4WTfZk^#~J$=M@g5l%SS^RR8ZicwZ zvbrY}EHP_=yJ+ha`x;8lXQonc7J&KCTzFc6_c zb%S)!4M=;IISFz#1b@Lq51rhKE@=Idd` z5c0xu;J(ivZ&W1&wp#yqj1YWx*br;q0F{0XI5bBZZn?(TW)lvVJ!gpE z<5Woc(y-rH%i>%iCF06c8L-v0g5rSPtyy6vz;`cTk*Aq`5Nsj#I0D8pj6Ao0+Rz#k zfKON?<*$u3PD}Ke_4ZHqKne~EaDX5iv0RGTUJ@Xmz^N;P623a}C<5K71Erb^;|OPW zr;)c-e82i&PgVZD)}Kt?i5vJ`W@h+cK21@Vr+)O)Xv#>O3U+f5mDB#Y%g`t%>q$UEJiRI)Tu!AWUr;AM~U7NPD$ z%{JL(QcqIdFAxfcSTkmJK#%nN;Xsx=;)bVZ{g!QPeYC@_LY?*JUpYdmmp@A!N7?yJ zcKo07IH&_wjkj~X-QEs;E3A}yB97^?<%)11D!@iI>@V|y;yiMIL5|spd=gCW|3Pp4 z65`|#lQR->Pw&m7n(xF$3{zC&p9?|26nIVtSd$AJ-FS5T*^5X20cUVBa~kIX=-nX#)v znC|Ba^4w<`l4>~Y`~HpXS50wUC4UimFFzWx%Ny3|yc>L}Ng1QW<(o5M#t>$Jo;XwX znyh0E7-;-XZqUW#S03lv1*3wABVV|pF?yYuYRLH{kHSHYF#RS*-nFRu zdPJeN8Im4kGx+wIzaiX`L55`Z(n%RYR^T7@nAj+0ZLjSxp`7cn$TzJ2bTnk#|8@Y1 zs)C7q?!WaCt*V`6(%%f_SI+tytuPsC69A|2eI3{D8MJnG#J1S3n?W|%n4>74(^`QS zYACAkW#b!-n^tR?93=8Wxd7FhgSL5e+``-6Nokc@Xd2Y8(m%y&5>OG<(>KAScBfqf z!M|4I1j(2jF;M5Kn1-HJ3zHS^P}$?MdHag!t6i650yn=)YFW}N>_1mqlmSzb8< zj-p~t!HB#2he{3kj`hIOyS##-SuW3c4HFj}sseB>2WkP&(KivcIpyclqnH5CHGzdk zT;YGu0oSeh-e%lDfZDpJ698*s7a*R!1NG|oBZ#5;NBGym&f)H<&k^W^j}2aXqMg3$ z1HyJJr8Ud`E!hznDWdfgP7^(G%2APQ({b9Dn1=PMaO%@5L;%#bs?T?=BY`8_XcFux z|CKcMvO?n6wpzzY8y@=R;iIkZ$0Nnn+JCEb4vR?O2&_EzjPtEY3pNpS4YINDK z6@~MZt;(=>C<}79f;t9Z_o26aXbrV>CB0iE8EiJjbrO3EMimEmk)^d+P0=o~9p9Ya zE6D0mY~5>g?N1t-KzHbvl028YWF2$~mz7`H_O3WeG@=@1xmu*Nkv)ZRDl^SZJD8H& z&{OM(X%?q~SWcxEX~V?cNbXzBIjSkCpX$Xxj8h+e2!Jyn5v}ER%<}^3Y57(o7HvA` zQHdg?15x_N`+9d(_jv~#Je>&0TWT=6h*`YN6YxD0a>B!RGP)Qex zMxF#e?IIaVO`-1Wu6EQ}AOc33mVcVXJKK&C*`k{GP7qj@e4M2c{Z_R64*cx!^9Hq- z)cmZbm^r}|6G7z#k7HSv1g3+OOx3!1I_Cg6N??|i+g5;G?aib(qXHcU)W&P!bW>Zo zrN@=@N}@G?oL}lgq)RPO)qJgKwKn2HFYc>5Tc!DhT)A>SSE6!VS%nE34Kaqmq5DH% z&?y|V3;7v4UPhLUz9_hOF52oNe3?#&a>=usSYYLQ(h+Dk?6b zlb$>LP<|;uKo@0X%LW+}xY>V1#@G@$~-zR zkwDtY!6$-)oOrA|MC=N@OfvbNnm_rXT2-Wr99oCb>?^Oj(%h=@yIiw1`qxtLjHZe7 zaWgo~!n8XLvR8^<5pt$V;%qmv4dfi$4lGM;Z9_ZH&)zTyTtxEhEk^pJieAqJOAY!e zyv_lbhrEUwJ>8i>+tOTw0Q{=B0BZ3 zhjX2L<$I2^B*9L(dA9&74!mTCA-i-cAU9IV{ytuTly?bG6c@>+u#*mb=H9UnI<5?} znqhW-Ck@9K@j?mm{Ekb)zPy}q{k1CFziTR&~tM#@A`(!Vf*_-93*jZOE@TSJ(af9OoX?>+mnd2Kl@{ zdclEya-qe7|I5iT|2%YH*%7C)QiXDrMdY*Cgf)r=@rq19TbgRF-+|4m|2`O(oqaY$7JfWKEqK@WTWjo7caROa z2`Ig*sjWQgmhJ~k%Mbl{AKC96AR=jgnd++!$9T?uqd}P3YB_Gy0IwxLfYYyR$ znf8u(@V|lhgCD5g#b})W2I5iM|Btq=G1pRTRC43sUUK?b5{vB~b6Qyj2)?gtH{AO@ zi(q;EeHBMFfTVi+&WepKcUOuc0R57aNepJE`g3Z5NFZP~x-Nv5chn)p{_GoF%%At$ zz{hsWxW!dx@0QBg-S^??!Nd18*iKIK2L_}I`p;Uq zid}t9i?V!s41M<>T~d8=wN z+^-~@Nz;x~v)}%qWM*>G@-fm(k$P>YuTQc2J{c z&3kyI#cd?l)%s_Y*FT1Q=a}W1>7ijxqSqWeCE7!%P=aG;x?;q58tct-ybnAgOeNc{ zgiACdtn0V$ow}A z9+7RPkSg!!*EX$Zu6`n0uRTGw<7ZRvI%t$D>CPhJ3X<#VTX?24+>zB3H05T@R!f=V zWDgS3S*n#Id95?-l?;RHZR6|54gajBL+lDEokMFyuE>U!SL^=h^?`+Jh!^!lMP`2u z+vu|1Tomg3O7Y?iAlFP{HuULd5XxAdbsBPjV*hOa2SZ6{&Fy2_DTADaT;VmScAw;)SQ3; zYjMuxr@SftgKOu6*DQu@3}?0g$u!a!q;6jQLx6GCSbv<+DjEJy^xcH%lI#Y{r_CSm zsKQg^26xl7BEJK<_dr*gTr%Y@J9=vL(>35IB9ojVL54Jks4*x&1jl=G% z)tKrNc&ZzE&P?U{gv$CB8~osV&BV##Idc)IC-ZvEa{Szs{*P`ty!g<^KaUUk>(}~I zGR|vonI?Eub%F2 zor4Yh)%aNgNcs>^7$~WD;-hDf@2&lBfZF*!_O_3oR$SVH&d1_s89F>vVBBV4G+k-1 zE2u*+zz%QGXUefFI;qigix|lU%e1+%{V6f;5uHECY?ur}dcn8X2%ihIa}Ei>q~(mepU)R!_uBW z(0vn)sm%!Ubuj(lXlmYK4)t&TQDJK^BOEfywW9di$iD(j{S4g0w0y`sYjTI#3GK{?B+$2yDuI>&*PgL0>`~BMdCZ{NAGj*1?x<6 z*PbQ5?@>dtFEyw5{5N1FA`(|+@2ma98>3WskTZ(1JVi0=_-C(1_w@+L-f(j!2(q9u zd|GR9%C;FO9mUDM^=xU=yY7@@l93+`0x&}?YS=l5KkNu(bb!4Z$-(di(S|CCac`G_ ztxs+%s)PaoU3ezE^@DB%+ABU}x1d%b!kmxs7hR05gM@zD4|;z;jCpot+?kyeKu7QR zt2gE&8c$r{rH%sWC%$ib8!vey-0k9yphP;A-l8;uJO=SyKrkF*Ry|A>TA7{j+PAv@ z(#fPx>w;5mcZkR#^q_9>Ooo>!_q|GolH%DGxG*;Eb^>c06FdoXH{)9wez^6)z4owO z_J%L^zWETBGAnL=24vFL&@+3FHzcjHxTMQGAPMU@8$L){lS^rk6AF9PP4K%zvT~2< zv=Z6Ez+lKrfQ-i##nk;gBb_*p+c928+$uomdss7gLr5P0_EfOXjeSoJ=1BPxPa}xb z_Er$n>PG#}gccjDG3@6boKpy}=uM++S0fNX!58nJT`tgFUwZG(>2+a)S#ZaxT*^hra-8ODl z0of^>_gfQ?)>hwvL*wt*?I(j0)s{fqm6A+k%;|aO! zjBY@wnH=?tC4B#cd^GMQ)3wQx>5-<}fQMYW^=9Kc^ zoY_J1jAfQUqqa(q%6qh;`HWt=Gi2-DZ+)D5XJdfX@_Vip;Wo@Uo$Am*o$^PVb#2YQ zPytVpI7#=yDHUB%5FFCR*E90}CLnV3j)-z7)wkVHh-QH&@i7QYPfob3OMpb-~ zotH$nhj#e#GMxJXW^3v1mD`u!fBtJQ#qhkYByHwj+oD+-B7L~rl&UUS?` zv5{!x*)sd|74epE1{+0h@RR`FWPu2;n%aFI73V4Q>vGs3Y?cv9xOYTTHk~Zd4xgU_ zQI0dc!Q0L=dkZ2-@Fbgh#BW={dtrcfGCK7_vd2579s|5zMh?*rr1XdO4ns+G?2Ln`OLbo628#*4e?ZX-VT~0Qg0EQge9~ZOkq}3+ z$Gz9NC?v-A3&Z>zvgEPZ?GvDop2vNN??3Xo65AHQIwh)Sa}nu;<(a z=sAX9FD{}(X?e=JjLtc5N`#ccPLy!C$e0RlWRTtQO2-ObqbGK1B@F+7QnU`QNSSsPk9!)rXh?^G`R2PyQ#t@Z{*3LwKn036l2v>2Z*I%leC-SN=778A9m>0f90pU}P_03WDkTS8A|! z#_ysCXio*O{gzj;uNf=2rPKrX+w$O=@1mjiuS9}xZ-jZu`KHi4o*h{BjCk<2B?p!_ zGe^^X$>FAGxW4H2P=OZi;Lc<1)gpQeZjxdE?? zN_GnjOx|9DUXqHUDTQ)oo5|MZ7TpwW47Gyalp`Gw0})QYPFtznzVKJ)K`|rssHD$pQcDX( z^`x;wn^cQQnUk#|6${?ys1L$T;<}cj8hlqrkQ(yJuN!-qdv6qalo)cm3<=!fh7N)P z6j)nLn+3*zX9=`wIMo*|rXwDB3=v!ePm9|l=U_pF5-qd0EeCr5Y&xw+iar+F$L|Aw z=0u)_nZaJpFlg;zuA#ci1QRU_AlBm=z62(L*mr>Ijg>%hdI*3){9w@cnnB07p6R8~ zSVoDsN2{B0SuFx@-RAwa62I5!89r;EGM|J-goN-?VUZYL%b7S_?W-5|)+O!$vTdH@ z`oS2nScBEYDiVA78wa^^X~+lv0jw`T8zadF-SA*OJ%49xf0?R#mnaeG$|MS2i~#f3 zOq+|(_Q}6XL-k&Z#ON;3H4N8YK!w#?j*B_|B*+^`vD;B28oPY^g`uG=Hnu-oRE*a|@wG-5He$^Ftv* z2ig`Ny7uQn8mfL9*+@m35spu#xO*Tx7m4$MU&;5x74U6p$b-Kh2}K36vGi9Qk~UiU zOFI_x#5OU6qc_PILDnB5+-*sp)?VJt@_{y(<0Ppq_U-3DP3f+9F&BQdRB8Zv%UMcy|s3}W#QolhhuwGbN72o&>-EB?ggVk zz$JRFX5d`;{kmJ9xwfM;tc~FKv@4O~mil4F)DaC6qRZyZiO!1`sUQ7nE6N7h&guOp z;*@XoB4^G3kuetY5Gv)|9aTOlA)-43`+^ZitU*Y3y}mP1G4_vdbjU|gW9O}(1YZQBKn zYtAoxq3b{+b{n5AYX$L9LH+DW{MqZ_J=kR`p7<;jw`H>L20>^iMv~cJ>Y`hJ4Z@2T zeg7|oG7Ir(CS4u&dq3E*dn8xyL!Vi zsodIE zCv(0);0>JndH?XuHQ;2leM(%yG|2k4AStwbEYqNDJ-#)vWi3~!2XpRQ`@S&(+b-zt zABKx-BiK8`v6%MT2wYFht}#-**TN9DV>*o$Yp;3eCH(ofL9P0r1p!*Y{iV3SR;2`2 zm`9KCyPGcgIsulC|G29=xtv6{nr4*U-@YadpIg{{nqhTN9@BB!$9uXPViS7!T*4WL zPsv2SOp21cGVZfV#;>Rg2Q0y1^fXL9MIyejE64H(;_S^ID7bptHbq%Q@QAwdNw0^6 zsy%c^fI?BEBuS1vYY#Lfh?ZTY-7<(MZFycwo9l`CAPGC(cZJw{WIudEcF;M4{n*xC zKoDMD{k^p%;tb25dDjjO@&EX)WcOJ$k$A*)00>$aoUv1J9xDBTG^H@Gfoj$&O`XTs zBU|{B5h1?Oo#@w(Rr3+x929CB4I)s!zD1RoqV3=n4=tVuwh?(f&b^s*YeLquyD?Tf z`Ff2V0mG7eHHS7GAHKQXt17I$hQ5#b=;iwb``MXMt%tn!I?3xdYaG!W(SrbksXkKj zHgQci@!D4gjz>}XN2i+RI2C)H3H==YVAgvSQfl19J^OhF_j#|Fc)moxldFlAob?~n z&hxDC$JKJPr2zg2GJ6+|lPKEfI7dNd=^&2N_y}LNZ3wBJ0>;;u%IWZZb5?^fO=C8Q zFd=omrfF^`q(Bl!u1f(cOmJ{lP2wt99%QvF-^C$W9V&%(AgHpfYiW`tmcoq7buhB>#-;XtP3S z#fp3-y1&=iTgtSXzewqLC6@EK<#z@w%;KxO*$BZJov58+<4h{Qckqray6%IfMr`)O zCl~K;{K9VQZhjwjWRPjg9&9qQ$a(3Ps5L#NK5RP{Q4DLoI>q4 zXJ3cX+mgVlTnF4Bbk(P3%H~{IKJ*U%tfE;(0?GYay<}o$vTCxAssJg6Jp-;BL0(Z@ zmO5e!16{7(^MlQ+1SwC~+gd>6_pQI+F!E^n*5Z~aWlPSX-C9W0Ea}#z&s63+tx>nD zJGDr&`=&cfV(N^Wu2ZC(;>zw8ns=&EW%*(1PK41F;Pmhi`@SHl*2aYErotLg6Rd>A zA$E%Xu&s-OgXo2FEo|Q1i7Rm4JiKKV=kb4JRZ5~@%a`*>ZEDh%Wt%>&$O9VX@Yu#* z#o?kn_(du&i?ZbD1xBw-y=<*hQ1tJl+)nswB@}VCe#lj_{(cp0$<7@IY8Uq3yxXbl zwnR|uX)5WsbvSi~CWt8I3G0bnRP|kf%*FU7BxciuV&~=0s}}VqxlyEh{M4~X30Hjs zZc4|eBV{1e=Xonb$HqrlVG-cK+|supx3QT}b0MUp(1M_NQFsnhh9&CObJM)nPQ1|% z>2yc&@OWb2Qokh6yS*+#?<+=golLqnLVfc25>{vc_I{9XBgb0r`Bp;=eA@NJXQ;7t zBKH);eLIaojxg#Wx_$|A%iea;b%N}0!f0R?jv+|uz=JOMy;$b|%5nHNMkWdP9ymNo z6g|;e)_WjUk8Fa1Zd-H&S&_C!NC*hF2i&)AD+8A3p+-(sVmmIo_!b^A|Br7`_z1vO=>>bG6RVZYKYE}^`qXJ^|gzd zg9Uf{ITVA}ErE_5xwMta7R zIJ6~-XG<(2_>ygsy1uE3W$baiT?q0wk4c^GA!EA9z<>oDx-7=4i+*le12*u|9gI;N zPGEh}iW;#};n_|kMozOw(y94RXCN1UM1cRVy=z9*SdkL7Sl@SdQ{J6&qsLAVryG#J-3H98^9G38 zfn9aMQF~zYDK&Oz#ynKj^&WH~Msjioa-DeZ@9+b-;c?@=^?(~E?=miM z4=_V>gi{;%k5ggZ1AgyO4KsBDfqtUe6&H6f(%dd5=eiGXt@yX`JsHl7GBqXDL2j-a z)o8Ul--qL~FHdGM4=9XRfWhmK7RlVfR*M6_98H`Y+MH=Kglo!2Puy=DOLyF5`{s~i zXzS&DmkVg}Y-N5e12g9N(ZsA#YDl1p>7l+R99tr3VEn@GukdY8+U;>f58J)ReVezV z){4i!D7IqLoqKeFVg04IGL3lPkl_4b#SDZyff%s;^gJ)b)(SKF1&no=g zp2kdsm-r#NT^d*o}{IQFBgIC!CxtC>y0t5zTm6YYrMYS@ioE*N%%M18$?#H(y00+Sj&nS zr;s3~x&=D`M3w_qZX()zy4+9wv5Br(UM7}H$@Vvf_wk_!9OeRQ*IM_{{oId-(rD0~@j(3iQI{CAUW~_L^9o{J?|7-YS zXy#7+eTmX1->XoK#{jwXZ3OBZTJusQ@x8iV?l!wXFiSKGHS=RlX4neWdZn z&oV#l=6S8qNIkY+I-x|X*oz$dI(;M4_01s=Cs?dxZihVN1_1^?H}$|5wsd5pcr5?d zpnM>uxqaIa9Qt-XEBxj7%_g~jp65g#JvL1o?N_%SnQRAzU4zaOSMGv=htT~G2NBqR zUM&eFf?u-al6R*D zK+i9}P_e=>%2z_sp{1s{40jq(>2X+W%zmAo@`|DR5^jU$NUd?6dMEiQd+p`QTmdAc zT(RupH5>v6dx!nyU!Lmr=QZ6Bx$AX({ufl!wv_4H+5Wz2z_y@A^mSGuv!mzb%yyzv zmo&J9p}7KkKJ9mIIvZ9pBtrx;!jB$)FuXIXiT0tG#>tlc8y)Ygx*qUU9InYVG#Sgc zNbWN&(pZ0+LoF@IH*^sdZ2LN~E}UDRWuOhQ+F8=jGg$R>BDFZb*5hEG(#cZtKstvB zV}si_0a{2;it$uy#flz4XE8SzNeu8GB2|KMk~+|(Wyb0j`(M4;?md$&fyv&m7uo4U z;pgapUjYpM!(~6912_h5xHSw}s{c-1ucK=_eC<)0>w~$^r~`n#s#<>l4n7-J-6=PX zRW4sgc4nuV?f?{syx*Z%9irq{&Ypb?yxTtv-=>Y4qD9%ZbQ0(4B)Y|#O%-*>*jMZ6}4E*JaRV9ho6pF-Ksu2>a;54d`S`@EmqDa(e_)yNLdg%g2o& ztb(qj5r@T1GJB?RB>vc#j@`eAh^67L#-#5wP9^A)KW}BJH^OmEVRv2xXQ!}s$I$_= z9U6Uu&2v>Lq|;}GoyJeAtW1{M0{KQma4y`_-r6oIG|tDDL2uLZT1AM&KKA_Jep3e7 z`Wpiqs9p~gpM+5iG1q8u*6rw3nCZQGb1=hmm&|I0lmY6%YJgG!WZ~ zLzh}0!+7k$_U2^v6~~eMimgPRqt0`{aMgW0OC(X;v@5?e)qPeZBx)APG(_Op`# z@$TsvOl6@Zy!VXH7Ymd>yd@O;)Rvc&95iCXse8<^MwpW!_Rm-a7jE>8;$;Pb_s0V% zPiE^RX`S_={tcIv zgW$W!jq)`7CeFxx4N}nXcYEnr>E{aiK2s?5(08Hf zR%bT`o7Y$y{xT3oZJpQmM@92o>y1u;QFNX1OnFU+_~RGQxb-JA<=ciGZ_mQ|zL8a+ zLB<3Zs=pU8wQ!EEUOZ2NNo_x3>kyZf^t8Uk-yc!WgVTGF-ggtC8HQeKuyB8zgFkdAODy&r<^TS1h>eSED@)O9{dPMNak! zc}X4h{QS;Y(4jGo1MK*xx4%^%DcQ>Q>vSmc!v~Tr5@g;RapsD7Q3`>+_5!r_8{$-X zc{D1(drHj>ng3+($B^ojBpbL{8?THEu+E<#AlwmiLzTV zUG)KqW+cA2^?m7jkbUz;8{lsGeBAfR#h{K|>COI1u2kpFo4a2*nP$ZB?-f^0HLRM) zw%7kARv$_W3cBqo_29^ z7ysvwyH_}PRgceqgmL|q&!Y(oA7U!C0gv0Y0dj-gnotGvgTK2kN+l3@(L`!uTRlOD zoV>3Upyn~^%6BCaSbu*HW$dy8k_NkhGOfb^-`INIfS! zy&cqstQ7{zid>{t*ema&_Hm~&fGIAmC+WN$KN^ublX(g~f~$^Bax;P6pqeTOPSeOl z1w`5%d3Xas0=`;xrC^*SYC4}S{acXKNGu4iXND?;H1_@ibOgQOnnR-Dr~_cunh{W_ z=^LiUxo%pX)~(e&daa5x@^rWq-(_yvV*QnPXQx9_URVZZr-th40S>pl&2ll1Iw;U59M-?~Yl9 zgFM*W#rugpEnW#sH&vzm+%qn}`$^?<+ZU>hq?5j~Rtc`64It=b{-NMEm>aDq~|6M_ct3>d%slNMFj_3qahxI2#wR)(PA6 z*vaz)K17`MZAmTWlY%og`e~NB-fIg{SfsXC!*pMynHkq2D{>|9_ylr$0J^E~RPexG zEJ!BG$a#5nQEK&USk!LlYqeTp<1@?=iN23}GzIG8i=nFu9@X>vCfvzf2G9+r5Oam~ z_+vJD0wALdz2u8teUb3k#@QwuVlt`NJb5Ej2%D zx`5jc-Vi)a`Z2{g>I<}}IyUpSM1;C+#b`ltEo9{>2_%2fM z9>l$Sm2$5evHUA>?_56JRi#(fPTT91ZEF43hh0q(qKD~Fy@lskJrDUT$kHm{+?~=? z8CO~cal%BLJ;yuAH_wG%B|KwcoLxyXSbSN>OSOXZ(>np(<80tz|#B=FdX|9+wt$HgpK0|ApoDpV$*fjC~qk|4-N~{H6|L zbL{f^j&A)wVYBPOQaics{c5iobA0Bc+NkV^O7>D~Go13}R*m4p9E@9RqV-msbQOC9 zHpwqnJwv(xcY^QQT(W_ zDGS+;QS70Bwxi*wI3+w>pR(X;7>YePZEk}Y_;G(46JdNnNNXVP!7k$E-^@f7(Kk2- zCCkP$iwjmAllmN2Bi?#lA@q~(`{o`97rKj}rIoM13`@%h)f|F5hA7OT;9`Tf>dQdY zCIkO{hhdpWi_bK|_roruV<1n|ln?5VbF|dux=Bpj=3J{~<0s$th2s(I1oo}-#^)=r zzwcs*z{!9UiK-bB9_fRzbZCG{B@z^V@BYbByZ!Gm&*Q{;8qmzr~9N?cr$j)hWCz}A_Ktu zNTU&v*-u4GK*Y&W_R7$I@n{s4UKk5=dG zA&qTGD2M0rWw7>>-Gt@VVV||poVIY=*Uc!)o*u%!O~BAmka?OqE~3z6FBG60J{LOs zq0&jTI1rC?E~hg~x8UMMkOzsM8pnV*ygNnnd5qmt|hCn65B_Oqc^*D_kI z*XRd_X^pXSf~~MKK&RqHKVGb^8$u}h(i84P^q4@PwG6`U?#IJ`eI=_=)nOU5JujSM z$%6Zl@cUUeqIgTb(c{(hI9Kk!`yyRc8C}XZu7=;U?;xWqGCHzGP1HOt7|Y#KHZyov zJ&C-QLi#$R%&$hZMy#)hfZXl$FCm@B+^W9=cvB&SSu^KnCmRvHft`9zXzJ{Qwle*` z)>i`}sU?i(BU)$h>p95fQym}Lb!98sAQp(4p59c3OHCFZa&j71T!d^x+4!Fqrd!(b zaM^%@aYF9}FtL&e%Ju*+VLIWPTts-pw|m_PFp3zgZ})-5a0fgs@8kaO(Cz(xQ}7x^ zBW(eJgHs{{^aGwua2q$)Y-Bl!*rCAz@?YIZpmhf+AOWFsO9Fna!|e+|VOA$HQd9hH zJCjV~sXGh_W^$HYuP;c<3*Gj%^ukNwo1WvCvLdOg7Tx)?!GPjJhwYRbFw-3~f`un) zh(eWXM?Z(KRwO{E#A?xoIk40lfy)Wm0 z4=+l+R1dE_Ztm0N4_fI7+B60CXz%4ppNyuaEt4Gg*?am8c3a0HR=A=ARKJKO{x4gTLmvKzjQrjYDyU5#5 zlyvVcLFH-=mX-v0u@QfNLw?BXNa!s&Jg}-Wi`}4ZdDpWT7@pI!-}<6T3+(%mzssDs3u#E%ffDZDGq&iJF&O1AbXL7=Da{R^WDjB@S=K9YN-m{TckL_3Df5-92;oWdj8B4hagAP^R zU*(Yx?S8Os(+M?fKRC>XSd0i5oPjc`oOzb>2i`T^CA{L_s_u7tNY=z}q~rcQ{DI;5 zWyh62#{BX~F9uIODx~0z$Gvr==5raYrZX-ze{)QGzO-<<$GsHpX?(ReQ`)}kr?L`m z6SdJBY+$e1KkgAW|B&05pVK_$=H=m)vvP45 zFyaIAD{0oRwrH(C_8XZ)&cOq(C3cXGQ)S2{HC{u5IB8k6r@y~F7k2n^IAZ0Hv9?+K z9RY<~;^YAia3sA_w&&y)iR2*DB~{wIum5;htR4xD z*b@p@35NuY!&fcbq%1ONl^Lw}y{-qWxY(yTN!Aj6%|1uq|1p=$TzW_x9$<9I8FnEV zm_KSzj9u#jfRpV@8GJQZS-!^bFGM^js%`HYSlYIojt2+6S&`r z5v$5>ab8f(`C(W`$$kYtNsLa123!^$)f-_So-t)XT=g_>JYR`mBZ;OFkFpTWYCBSd z3gC4ySIm|g`0Iky-jxwZ{Gg8;-*@TEE@xl{{zo9D=7|Mm{tG9N2J#b*Q?*$|~s4x=Cyif~dXYs)k^4pW))^X1*+t;_ZL$CHy z#y`72$#I~0Xn!{ir52qC7xFe(ZQI2hdKxxIQVj+t=kOv|`4AYvS+#af z9nVz<6pMJyFTwM|_@^i@r>>gob^!Bc;=7G+#!O$y509vDpJf}26uO+gtP(bG#_iY* zW&5afzHwapZ@{iIn@dmr!v{#Le-s;YQ4gQvnA*3UNAMg~Q9d4Mo~Vd-r(5oR&L;f4@Pb`T zLTnBmpen~Htd=8DW$4?}fAoVx%`tWV;{M`OqvC0A#UW2{@Z6 zxlp2p98e#Z=r@eRKJM2?HY&jWTex#N*Gy+_e;!VL&dT| zTj#fbjHm_E%0Xgh3&-Ar>YhGs&VAO7xX>DrO9W1zmT;D4U61`uvC4169|I-mkLPit zmn1Lj%!&RmBzxR6yy*C@SIhicfYvH|sG%5?6Qfb)uvyYFMlhOwiK^ZG(YTyYxfNXU zSe$0K{DENZ9r?P$7gSC*--Bc%q*y8qK%CN_Dri)FZN|{5rqchCqcPWm>*Ebm z43(B}W^Qf1u(1Z{VMG8CA<@4d5mg6jTrJOxqkVM0B#4vUfV09=X5|7{_Yq=T4O)fr zns$;SydFQnMVkjM?L?Db-yH90`I_4kPVQpz9&z=N|D+A%5>E5ra?^fy^U(-n;XHL! zSkF-bQ{Q+UH^dFo2Q!zCQxij6glex{v*u$=W$-Y3B;Indfc zviGfJ+y`7H^KJpC>g}tJ|8)Vlq-}kH!aOTv`e4mj`S18<_`_@{*eYE1Di--!%Q}C8 zHlS&oU+hiPU-vKqZW=52LrObzQq&uaok9e{|K06Ft+(?lp+?=ztD;7 zBxDf?2JXT^!rgZ3tV-p^R_GUST&}AHut|1cg{*4S9zv>vcSyfOVB&c$tOu_tVE1LL z`>S7SMzU{4pQl#!9Qj6`Y)cT4hBsM!2(eZ}Kaz{+R6~J4vymsqanzv) zdfmS{lr)Qj&0y4rjRn}j1Bokxa=<_iyNsvG%xt&4@-=Tw#bl;h>v?Ue(@hKZ(DTH> zs^0I?r()Zy_2lfR+zYzICmq|wEM#?(j)@H6-M^3Hde$-uz_)I))Ym+> zoU5#lGrpgygYKwmvd>=_Y%nF@q-nm0=i4cM(z*1LS4NORRCts>x@)Y{_eEp0D5u?r zv)b~f5^m{K1}3@x^3$meZKr!{G2{4^8%V!6XHu>oe^Y{sx&|U+N~)7)W>0&821HF5 zQ;(cYov{AMC)10jDGrlPagU#9HcW#PK9;RehI1m;2D3isQ=jSm&Rx`ejN|(l7Qbf7 z(w97wJ@1H_jH>%0ab+4~RhP-XL(&_8aCor`(x%3;kL#Q~?)eKpmUTkSr;**hikX(> zvY|zSo8o&UTns(X{YUT62eI&-R{pvxOna-Ey;oZVH(Q)jed|TlQ60?NzZq>9=VS$Z z{KhJDUlTMPxESlU$2X3?FAg75VO^{~ratgR6{fyQR`HZ3U#cP#4EFz;hYdun<0&te z!dyHjhY`{lX%bj6vOl$-5!TFc<(Db6cJXLb=*zUH#)2UO-?`SqoY49^F0@>H*7HcK zx;CHB@1w>;1W0C98dsrLz#Ei>qe*;w3lOV95-Nsp$B9e$g4oKiv$f%LDXR5zKZN}s zn$G*34L5B2N)^?jMfqBxR#mOqGt}PQw%DUKwMlHXTWYTwtu6K@_TGDMg4hHJNgmJp z9PbbJzi{2hb$-t4JY{`?C!@Dn+Bd#_5sCWo`?IE*!v6DRNXh9-LP25Cr%HaWg3Cg; zdY%RLST+rgE$eS0Mvm@2#}YNuL&bIGHebC+OPO%nAsw!CrWhCIl=*QPCShOpsv^|` zlT#b1I93WHq}avS67G4Ty@@l6o{K@@WeZQ_?cfDXcV(Ta|73pst)H+9SIMI)jj#S1 zdv|xc%SXX?GaOY<Zkb*SD&Esh33M{uQ5eS+!>+r_D?0#7Y2kJK{l>F;KNTA5 zxt+YwkTauyPW0!fAlUbb!W}Nm)n>D!baU&8L!85j_fQ_T#Rz25M2OK%fe&#GpUFt( zNoO#VRT$Et4=#Gr8U?b5>b5*@x>NE-A=#QsKj0aY32%Q^T|>uUJD*#S$#t=34L3YX zDN5T?Vxew{bspSx1^eVThl&G}<3DVUcTF_g&ZDxWLtOSan^q{&7&LaVIJMoTCNMu^ z^=vk_w(jMZ<#L>Huq`m?tu{2(VEfLYr7F5)fdA6!(k~!|iab}atal^5*Q@qeJ~%y! zV_`Qo;(?|kndWd#yD*=<#XB9-Oyb~?Cc+0N>wI%8$?0!5-)%@J!8hDr3T``7rG~{n z{*l;sNrn+96L045e*IEFz2Uz1w7eMLY&5#II1DgpA z^Zl*{T_K$VsJ+}g&)2?(b{OilF)ji_>m0p4-K&U_1;2`=b^T0kaLxBuMJADBjz+mcCwE=>({aFxI z@aEzgYsUo$s4$~s4shei0MD+RnQCQu1NCvsak2L`j7J@ozaD3z*U3pAJv_V%B`u|S z{GsgF8`)D;0w!JrDt}}Po?xhbB=yOD9qia{a2%F= zhX;0cdX*#!JcJYpiRg<>i5HaG>I2&h=A>{lcB8AucOKa!qJf{jOL1UWx`jFKY%uu% zIM77-uRl^Z=)Y`U#H<;k_Y1yySH-z$@}yJde{GK*li! zd>2u2)P=PA0`)gT<|bCPk<%5mus%(CVwQ_Tk^91#j0X`d-*Quorh`L+iG2zdzuzh zme4n>n4C2pCjqgzIy*?`TW;H<{%j5 z%?qblZ|wO=r&)D#;ER_E0KE1qN!9_bx-6;0gofU_F_PRqFYZfyQQJK)cz4bB+KcN~ z1zj75Vi$91-j@;c%ev#+9U_SMm=a-$f`j(7J0UjlcHzKk`LDW%Wvo1@$gb6!YqGyxJZbUvJyz!p z+Xvoq0FBulhqJL;Rj)#P8`*)f ztgp+18UF7Sv%(J;*hwri?7R5XcgtV`7I>O|y;>HwTKXOW0jk;z8;!wdPqTD0TKCRU zp^r_aVS2Bzc(Nx@md6*<`WKy)K&(e*E)nUXat24|*@j(6fjH}<6jHTMEvjFE6hxE) zk&_Kr_*`cz@%s#h+e>4EPdh3$lvHBB$Z82a8{>U9OS6RDet{tXfVWNBp+{5{JUDv~ znCnHDFFdvg*2xnhwO}gImqDM*$OMI~+CnzG7Pz4maLRp;t255}J|^%t-@|7yXg@%Zgwu81z2fWT3o3Dhr`Qx^=1elQpt`|oMAvJyKyY5HAUqG$X#Tj7P z3A^vBuIW*#H8wAkZS)_s=>uOq?v-)NCXO2YYhw&slI#e(GQW0!Auq^p8c+|f7G^t^ zr3VHbTcbwz#u`_$S4_^nk6c~9V<^Rlnd6fBRJ6ygQ6)@&zV0y0oPqC@0;&^}hDr9S zw@e%}89;M@Nh`*2US{uyxgt*qMXwE?eN7qoNsG@7HIMo;2Hlu{=yoeb{;JV|XZDaV zi4_?KM`6EkD21fhh_*a0qo0(qZ{^aGPa@P7^k*g>pSdw1KWj5?ls)(pSkNT>JzySzpwKg!r_9dKkz0)-pL!M90$2 z(#~TGM>*tDzs7FiH>U*J_Hi;d+gCqi0N{#U^w?^3oK&rJ4yS(hQ_(i^)T=mu?c3$6 z2p7-gkV*%FB}Ot1IQ#AOC=fh-Qjbks^Egtziy{U|fp>6&OBZN{qM(faMo-Mcz|FW> zn5@;+@J-!<4y12pDN5#g^T@gl#|tqi8M_g~trBl6(D6Q;Iw>BIJ{DYskUacYCoS^u z(#*fD%H7^Wq;;y+>9m7!(x!@O{FUji61Ru+T7Xx}MCh2FLqXj$d7%Cdx198to9+4Y zjpZ+}oQJe6SFihv1_Ehgq~|W?DEKcmH7BMdtca{&lXkPG<2j#%+N%$Avex+zidmw( zwL65&@Fg7ou^W=G@Hq;;Vtc38=$9_J8rAkRC*e<&sfNe#O|YK#za|a~4a#}NThi$? zLxY)}OFNsf2M9W3l=*I^{y=DXkj`*q*|wKj01@j(pQE*2e6=v|goSF|CW}dk9rg?4 zBJ9Yn4bxlX%@4lhAV-jPNY_i(t+?H8EfF0{HLrLU^fr(BU;~?()|eVjB95N<16%p7 zLCCSn*X4BA$b!{lRt9m%Oy|p5&9hZZ=pWw$nb=;wfY@Y`L)dMEZ=EORjCF|y(lxWh zlkKtFg@wGl%kC?4^8JikY;^Ly&c5_*R=$?9s}}V?xDnW_W?haar+b2j0XGdjMCY&J6I zQZN&3jnO2t1aYbQ?o1csPW;ZGWJ>=J!7B*OG3)4fNG znZ8qcImedg6fp2bbxGzjssr`rkL>J>>c*o4hIT*3iAUd>d>qv!A`2NGP1dqcXe%HO zRAOyNUXHekSRQ2>{+PgDOj}$PGss?SyFoyg+OWP^1W4t(O%QoK3^8#udM)#Bj@qUL zDS)}*1PP6zL1bT}MvGwuv#^stHNBBGCuIk&Wkzs0;a1$G?fPEmDgT$j(3#<#Xa6SM zZ%cVLI^XDj&}YLA{wPs#1A_ZYcdl68bR;o}^8p zd~o8U1Y&-*jxGzFUmps#DT4uy2a04OqFdf4;*V(Bw`!<9$VH|v#JlOAqT_ki%zzlvG#RUolNlf_)JDtA*I75;a_`8VEM4 zo}Kpvat$@_##^rtu$F@pCF@oR>gAn172Q1@&HcKj{AP5%Cl+0gJ>eKoRsH?cBbCFx;It5ZqIM`WmFCZcI4FFNx z*$S__zT59wa&UrJOx44Un*X}7=6r9|pfVk7SG!uYCK%7AkR<%4Z}vPiPO)cR3UR&K ztRqL1>U@%TniI)U=%Ab}ex~y|%Hp@f0nOw_2eJk82__dAGyBz|X>y|xKXdBgrj!aWP-%NC2;==sEj(&ekX$aO_o+~t>e&wjNzX^pR*tv9z#*u2%T)h8fl#1 znEl+IvIlS)aRKsgYztTxwwc9n!o`v51yC8>7ns8rpnOhi#Y4IPuq)?K3Ko+A&WtuKb=_ z_%DY|5OMjBm0^Z$RMY}=38jAq>@?UjpKWTplU1sSS676Mhuana$Tkj>%|~qiyHUp# zAgFf$!#$AqFl7Dl+A4a9+#Yh(e2u_ylYv4rG|JUJNz94=cRi#3o?rfnw7P?TlA0^> zO`93hv}vy2m8uS58Lm_Jh}9@sp2oq7K)`oQj~seAWctL)lP+DZPVCzr`R?hP7ZrJ# z){A05HM0jF$E8a|dIdf*?r}NHW@FZ*cd2IeC9*HJUnHDIUqIhE&7E{N$-~GI*I?9x z9Nu|e*zxvN?pT{Hjtl$?XP%K==TkSQ$(IHS3Cj|avi=(406^PTF2(odx+RUobdNKv z#&uncWmV}|(kyS-@3~giy8*?cqLPS-7fm;|GioL-SmQu}^3%_)du87LWz1Mmvz`NO z_+n-`ic?o@dnqXlH%Z)UBANB|1-$>!^^htD@czzEr2b^+@xn^TpO*SB8p%`oje*Ti z%tfW6oW8QWd41-^hlPe%yNqnMab>(lB_`K-3+=v%m|eGrgneSpUA#fP9<8IC?dv7S z9TVvmFZt*11kQN4B$n(tf0b+*;pMOXw&|EnYqva3?IJ_eb4VRWo$JY>E%x!=tI>58 zPX=em*B`Ezw>ZlDsP*eb7&&>{-2Mf$I`}S~$ov7g#4!BGNZ4C^a}t_V)`M&$=3@4o z*}zg-1E>{}5q!-86#Ii2O(aEbFD2b_8zrn?=*MI!Cf>JAwR`($9p*-PnX4r(qz^R~ z;{s$KSG;Q9D-r%=J~sF4{q%6EyEh3`SNH}IvFrbJ7M*>JQ6O`Zi+WLh<^93#wqH*t z8doVX!|L68;@mX8!62>bTxSY?4l&^pqD$_yTxY4e3v7`f)<%Tl$Fn zAhXWnohe4B%qP|h6)ajQ-h{oc)}h6Se({N|=}j+aV5vH`r{soL8j0qMi99X6AJKxR zlouFoK)@(Sg)g(J`WRFB!gTMN_l*jXZ z-I)x}n^1&H{Q(Zy9NaLua1epNPK@PVrngr#v9n@Z!9kK)wwuklmAIZD|Lu4O|5Ch& zs?*9|V0jize`7J&jTZ@WIT|=}YeQEZC4wM5)#y#Ra-!o@zBh-zhHpn*j2jkUSkn+%hq!Lqzb5XYN5?43N_yGf_AKde?k6afPeD% z8~vnVhrLb3YCoudv1 z2wNOgBKNU&XA$~`PX~X*SmpWJx@cH&PKr0aWuyQG{(BQ~0x^@HlY!?cZhyA#yT*P$ zz@)_F|6I3|lz;Zxa$Q?=5tUywEk+aN-?o|X!uiXl%1UcbyHvclnXKw*FduH47A614 z-2B#rGwK$EOpjV}txYmmyE^q);XzksSiAT~f@xl4Lovq&bq2V*jiWNyMF4OO$w_`d z=u?6{Or=&jsMIUf#+GMq8o=oD3MH-{REmJvEoH997~W0*lgkaR=TXB&^vTbO=2$h- zbTo*W2@u39y4I#LJf7&M)qQf?#?bdXz0cto4h@St+4k``rY70vyrD8HsFUGc%4Jj4 zgHz;e(*_fYKTSbyUbQd^;vjV*$h&BKqep`RG2U6<68W~`_|5@)-OVEGciXoS&c^$# zGUAl(&geUM>b6Z^HE;oDznpJEpX{W`@RaFRqv^1EEa<1PdKAlew2kE@DlW7_WjC7a zed}|hFe>&=2^I6QepKtCMLRrLWizqNi*+*Jmg1d+ZH)gPpYD^4=|1bSjm=8{_ zMc;dF!0x!Gwh^K2VDsp?MrG=P-^>HJogsqHi0$|WyUJ=yM5C%F**3^!U0+iBzu-XP zduh>M1m9PJNS{Qi22ef*V;Asy*4V6FMCvt)9DAG9jfl(mXDs&M%mMvYnHjW_6D>(zPH^J(UVHmkA2|9;iV#t@}SII!r`Omy2><%(E>B0O$) z>^)DY$&`n}M;_*IHy4?X>yuHo(;Sm^2#8TWw;9p-mHMRebU0@v* zP;w23bLYb6(Wkw?Z2Ml*<>Gn|L86Ne;L(OTYs}#Cx7q31;|h7gnq3u z0C2^ieWCUacH8irRIFa!18iRDUkin*^&q;CT1*lic%&Zc&=!(T`7DpJLdAK8wbE zw9+lpYru{s2E+o7QA0AnSOQUExO`VKv0OzURThbCIg8;HR@+f>vrfr8s(*k3v94mq zXEYxRjoCx$nL0UF%?1`H&6nVr#si11hAb$r=M zWn|xz=VrS8;?P5Ge=w9zwg_-&+kFbJKmOa zw2Q_FOD;o1-b1P?%`)z!J(DB*<uc94PmQMW&*YQTul>R^tKCz|Raf3yWoTzNAB#&t&pmZ4ZEQ()(^gry zO@;-Ui;aU+)vNxBKrRjPH|c&|$jW5buBLma%WFq)vK*aH%PrGQ=(qPA=pmE;tm1M2 zhN0x893sDbRYTV3Dm>LHS}Brj-`zD=Xhc|nj2;vHZY2L^*!zTuX5l}&_Y6-bt3(O3 z(pL_4k272_U$?!quZx1I<6bMsk zt%B9rP-aWIF;Cln+obZxDxUwIQLAlD2Kj&FW4K>W^E`iC76STOwP3htnipAOjoqSS z$|io78=Uo^yI;O9m(ia(Hc%(iGIN(70moeKFJ}~mA5Y~1RwMuWQN^C&f8zfpM7$tL zYIf5gWn>9Iw}{`Z+Whp9=v(w)4P9uaIHm4J8M!gOyJA9>*`!bQBq41w1#3&}2hX$Z zH$--rP=1}s{jV{g48|1>nI`;J9|$Sv`>rg4yhz zXp}-`SG@Zjk_C}GVqc9Q0||dwcjqC#0pXY$E$6U&aMk}ojr6vW%lRub)Vf~pn{FrActI#bthEJdyl}{$j!47 z;dyQJ0@?hmZ`Mf)yO{0BeA{JNy?dPXq7KgN<@96dRPHdE)HziMlca zq%6w;*=>bPYLl*w=xjzRiP4s>wQI(Z-cH-ETXPlC;7G1b|$*W$jj z^fq9IbDf*&s@7GlKXDqpGX!<3AN;R-K^yg3L)MRS zqWW7>Ql_8c+rHxxF<`+7H`KAcJmzU}fxGRb z)yeK9Xoo|3d|}Tv0o6>3gTa+4s`pAa))@a+k7+t3D8y+~n*8M=y!}iKE(iC)bkiw? zdwx#f6{97%|ICa}bz#erp*6-PjuY9lL)R*iHRd2`Oo|Kgeh*VQ>p=gFyo-GUu6zko zM|9kdeVgX{IGu5w<8{e`d8deZ+oODmO2O``U&F7XYcpa}m%{#I<2jcR;gu%P6>OxJ z#cOa~v=2U7|8`4SD5rD1#@wMz?5Pj)I*m4dHYzFo7A`Ht+j!XRakj&W@g~V@(>*<6 zU-d~ki|5t?8~VAZe1W&Ybk^$7nUIpTegmC3?E;(k(Es`?df8{Ky#CsXFH^&jG# zvTb*Nu9Lq&#%V$xG({Fpzx5^pnxY#Yu$m7Yj-4>KI}9Fe-*zz;=e;S%ZO&c(6yuP$ zCioq{a*(C$UzI;Em4kesM;?wTnl|*Vh|pc^GyLp)BAV-3|6+2c#jUe)H!^ppd`gyh zZX+i~_-qxAtufoZA$6$k;Goe>9|B*SJHe`CRJ*+_!LA}NyNh37UeyL9Sv`;$XVsB= zu~i7soO<#FSSpIRU4xv2%lL1p-cg>JK~P)*Z9k>3g)&I)W$A;trVGT%*a6NGtfEIEr7OhA1`%icJ^CowuScOQn4Fic9qR{hr5J$t$Ex^GI_>;?cooE;)MT})XU&BzJbdx8W4#U zEn7u?ssSDa&n2@$P2HJu;THZQ|Mg!I(KKcM5+009S`& zi$MBn`kq%6_m#C?`_noU<*$yw`| ziKAzNv>g<_9W#F$<}SmO7s2lsf_PAI`|jo9*$rRIoI5tN_TyG>vf+cJ+Ns}tDK5wx z!iA{|e{9+sVai52Vh@6{itvkNTucfowE5mgLFM~=6S9xBn7X{TRb_< zJo=7|l{{gPu!%NaUIt!4P{A7}a76Gi>|8R8A#sfBg=P#{eIi9_hAW+fTo+Bz2iiPY z>SMV;BOI|Pg;vEofmuJ{r!Q%pZJtPJR4zmh(KIGK>V13e>ZjGeiyZ(b9HNN_yK zZh=%00Q8A&fMt+t%Ik=b>XB^J1GiC|Q41iGcs3?9r|;{A4T^rNdN{abw8cf3Xvpn% zrk2x#j&DB87c@B~9O3DoJY&1z^n4vppLiPk8Nrb9U4&z~*`7h=i*}TBcnvBn2s=k5 zMUTy(@4-jx69P1}yZ}08bWO||b8?`dzbU0W3udE*XI{=E9@59P)bC`-Z?XH9+~uxg z#?iOY{q7@-pLZb7>-FiWoWQFjbY|`*L*4Y+0O_$a|4PZIxgm*3Kmxbluoc1BL9G!G zMR3w_Gk{27ys2b0-C%=^3|gSkEF8kLCUIa%K_lKE$;S$Z zJmz0HK6BE@nr2)gPs%;)gX#559T$}VaEYAzaC2AhQbwAm8-+7>EY7QgBCcN;DTO?Vzjywq_!7(H`>ClTLeRw7pdmhJ zpG}%<{B0%qQ9$tN@o|rsUJX9usNdFhQ+JIk{NFBH-0Q51y@==72z=Dc_?C8>Ix9H8g-}9({?jG2V_y|D(k?n)-%TNewlw;%OC;%8UWeL7f6A% zB38uyU0L7%s0pMoDIL?9dan+=$(nH%40n-Zen-DFH1SAiT}?|Sz)9#saTy%dR|AT^ zL@OKKe0bVB<@LLh2a)~CqR=e@p)ZltP3lKS0 znD`MsQNGl!R+lp{9l=wjkL*SJCp=)117C$Ztr%ZLa$inK!>=F7pFsLp>7kt7Szw*n zf|kR{qfbJ{9!OMA(xx^Upn%cqbd~dx8x5E!0FN|-BM-i;xgUAlD|jb5DW_A;PB%0@ zBSgugUt>XJ+hVva3nGzG5tHmSb0!?&k!cnr$n}@QUH3l?4Dq@7iyvb^>#UyWC~k2S zLf;j?V`^%fs~NO7B&)z<{L3m5Cn0poA*o#4b58#^1r zr3p(Up;;!W{9|W6DF=|;u!i43ef=g6Y@a}72vljrKLrJ<2L zzcX!FxDvdDK-mp(nKWT(CFMUAW$nMlXRN*tnBn~0_yId+nKjJXUUx1x-y}GQ&|;s# zW_j5(VOP^HKgAY9w(0a$sOrkIV@|vepz-gl?kb|-{5nH5j_k*12dH7*ecmZ;~98>HbqG$@_S zQ-qeI7dG{1F3;_~@-XF_saPNUo(Nj?KmW?}hOh1oq_a+? zEef|)Vcqv)IoLXq?7U7yEM5@5YpqH4D=$ZzQ00VXiS1=hgcpvwN;Kd6{h^XxMd`?L zw?bDL!Df+j?^1MGB*JT930lPGWHzSxVpxk5{M;)grblG~Q^FCi6!fx}l}@6ZqllGx zq{QW5FRfZ#bE?FoaKz!?U;?@_WnjO#5ucL(KwEnkw)%Vv>iLf40T}_rL4x}4JiYI zGf8%x0-`@0l#F_dloq$djq>^AnZ-T6?)cby#gg=pg(-O8(F=vo(Jnk}8@E9^OA;qy z@IC=TuV+S<#;*ENbgs18%~LP?$4|Z~UCe0`zr5bHHJB}e%y(xTeSS0{q9ZaD7REGT zseBEJ8m*5tL7UKm#C$gEK1beXjWm&ysS)gy(ORWvZ{zwP=lgIBGz^H1A-Bif-L7s( zXs$1rNdE&rP=l+sDs4t0wBR#S=(!=oFv#{$z zUof=X_{6m+dHS$)^91et-DZaT(Szee>uqL^Jj@r*KF-@{0nia3u+NsxJ4__^w|nxC z#_ZcHm_^leV2SFVXL{lSnRGF)cCkJB-koJu(XQ$2s{Pv)-+m~XOoe|T&gu<9a9GQv zay(kHSu{0Q80e7=boI5u3=f$;)dI!vIJBA)(BLD2ijXm2lWF%-Y{ScZw}_PGaSx0w zmjp4M&BVlGPg{Bqdkz)oSIv!AS%U=fbJ>RP4FVQa55c;>q(_e+sNYGFNYQD~old<2 z6dRk)0_jCqLR-vkhafwHN92&Bw&{b#-A5rWFlt}e1@X5O{jOI>*FsRjYr6Ss=1H*= zmo;GPxxdMXnFXD!MXxVcf6)*+;~4+S`?tSSW!rL7m9X?5Eb?~S;~DgDmXmM#*6ZdA z)9Z~48r(ZC4L;ZHnF}x>CfIFz=BlMY3R(mCp|f@o>5I{)Q96v1&H(Q$k4yV#01%yj zhWrca(=Pv`_b2Q2AeO)i!W3X*hf**v*)uz!$BDF*2z zS9UdJ?t%yJYm|Zt35L-PHPVXV%8&Tp3|qUtN9l^w75~c{G(iQAKzE>-u#Dj+Usl~%TT6(PfiRy z-1yF9ppTwT0x?o)KcQ#xFc?sYOzPoYb(`{w!_sVWIL>^HP0Vuzw+=x0UX}d^M?3mK_zFiW<(YNk) z@hEGbnD^+FUtV9^3+SZEK)*;77_Urc1`vLp+G2ub`vHF)(1a(*-!~nW3Qe@;Lx^bn zt-8+rem%8$4D~AKXKOQRDB77$@v6&cEz!xn9q7JfzF}p-|IR?)xOC|8CqO-Tf{yjO zUWqJ#txv-~ONDBNfn(5tYM=I@xLn5kcVq{`#pcgz8OHdqTJYvaYTCeIU}Y&u&s@_J zg9A~5hECaE>VkZeffMA;Ww7rL4*yqkX9$kUFSRr|sBraYmQeSOc?y;9qYEmB>E1iC zs9ClWe9X3MS=Md|_?XUxMk30#n;h-TLJT5Ya+Z ztQN1}tAB&F&*zoU!Wq9Do z`XW*9wyz_ZlV+?cM#JcQTrtehuKbJOt^H?wqp-^356ZTefR*1}*79C5%Qy*`SA|RB zn=Kd`*0$RM$t7#cp0krdFIjx~saGQD=JYdoH25MeXquYmg@yTSx2z3@D!;^itk3e> ze^Q}Djt19J$nzTqrUGZJgm94Eb?haz6EC7qPk-CZ=(6EL*V1t|8~^zf93U$0Uh7Ef zFj)p$52L4I`f0?S#37r%Lbvq4rhY!7qU!4VyOv7=(Z7yNi6a&I)nfC&07js3L~Nk8 zN=An8wtE@(R=nowABn7wH~e626AmE{rY9R!cFQ0A_B)FI?1~n88Bh*wa0Vvyi@a9p z$h4a`$nK5xY>-&ig$KhLG&Hj_EEcKpB94q;$%-bPBQccE_PQ;Oh3hI&T`yCps5Z78 zAbvL_BM$^ndjZHTPD=x7$44uxC}na-r%66TsWk*Ow9V@F_t^IB&IEYp1+S&Y+5-BIQPk-rvJrCR z+CPdwXU?s`T5q+Pim2biW_&M@1kvZViZW6gjZDp=?!Hv^+xkD9o%d3@t<;4`(togy zMIXNQZH}TR^AR15oA9@Dw$maCqjtO~A*g*|{Iy{vSWWlaSKtImV%zlh0>7&KUR>B9rGxI{ufeLCDb{uMDBezx`VO|2Dc zu)CPlRvJ;%&#eQ_{ZdK)y;H4(T9Vvqy!@BeB2_V=Q|CXx!5z;A5`&ctGE4fz7}r<^ zzLY$MV1Te0;Fm9_1&rh8Bao9u^p!RQR^Zx%@0HMzri4wbVxr<%@j|D!#Tp9~xxLfk zE>H3HOJkcahxLsPqV}Q|mT|h7Q~85$3E2vmk0G4yuS-a{L*_MRZbu(0^CnM{F5Er< z89(i)TbUqxjF_gf(5qFkY|9%9%q^*OvBTs`xEvH6!S1pUpXSEhj|VXK*GFKKGJ>#h z&^D91}x%FnLln(fn*pKF)3VT(-HsA=B-}c_AYeX4D@Dp#T;Jw zH7ai>(a!n$I0qII#g&HtEi)L}_=qS&fB5ue&(Dmk>8Ji}Vyy1J=*cwxrSh^g741Op z7?+TtqPhsafT_=FZd~9yQVR|Z^T`l}D~8S3zTKyCHl+V)bzg6e1DegLnjO2>|DzsJ z6?s>>ohwpKH<0-5KkjRdxDJ|z?e{TcPC=VBA4GO^Ylpp`{pu3tF4?Fg&q&%nh-wrx z1+-3f_FkrQ$;2@>(8JrjSa%S=>{-%TZFOB@e(ScVV7l3$vT3J?3CIBihy&dR8WtGV z`1jNs0^N0TpJc8)ZV-2$>wk$(Y}o95mFYk?aX&*t`{i1&0an6F1}}odLp|f%TQr;# zx$|gD+^HW_M#PS+d@>PRdg1l+p@F7S5KVbGY|Hn%!e6bHWA++>qge4`{mj0>V90j9 zd{zCCdfWcN5&FWfgINfQ+i1Q7;0C0;lF$b9niA#Ur3v;2z27dkJoY1Y!UBpgZzQd@ zD@5yU2C%oNki#C5T(}LL+jR$i*o*LShD!jYlkdmal&vh-^GF+`(`SVw9v6$VGA)Oy z`=WH2Y12E6X8*tyGxRYVZ$1Kwi>t+~coIZKEJKqYGHI&4)NR~}2Y8rt=?fs|`o!qT z8({kHj)~MH&ydTdUsME^3g`t~hD^8Uev1>Qb?!)cEA;4HQxy|tL;)~}8L8eKt7|42 zmPP;zf7{>qC{#a;qr^LHEU2sUa)2#!hj}cQCngo?D?0L(gtiH9;s^oU9+BXQ_c&Ge zq8-{(BvU+8CBOpj6V~k^msVh|Rg@73Stfc@%d}7VWY5sNV01RD;v@H;`yRee=I^F_ zb**qNy;1&`J*2wDO!ALQOnYgZbNDs;$W!-?TvPujCe}TqCDZBCgZ#29Om+Vf`Ic8J z?_)jEbNq)fl2Y{c9j09wos}x1i`VQgM=aN5^y!`<~@op~I@$DQh(7?P2T2&mvIyoQe@U(P5SDCV0=U7$!$!^35_96{6R#y=V< zhR2Y9QDpV%Z0MYX)ju4Tnhs3DPApfP=@`eXN_9^CpBfcZhPLlb)c;24!>!cV*{d#I zj1yq%JdK^6drL(8JW@a27Ic^-@L)2Fu2ot{DnUsA3OdjswxF)+f6BVwrvM*oj4s=h zc>jCz_dA}|x1bQo*+$EMK@=hLEi+M<`EG<^0i%qu-GKcs@o4~tw43c-bOCn5_wtkc z#mvA6c)8yf{M0a0Mr*I$-*&jZ(IXv>%^FoX!kh_;o!<740`}5q%!?$|e>@j-; zhPDP{!1d$cM1xY|Nb@oviVCl+Jj@JkpmWEzh%&tUyo`X{XuYO&ewnFmxThW?(o0sg ze)GdYnn}?_I$@H5Qu&6N-=miu9?8zA<7#1XTs>v!!Fy;kDdtLXn43E!Mf{rwUBEP@ z3$uDNkGmt`bZPol5jMN%?QcQs^po}AE^lypZ@TBHT6CJH(Flo!VUH-1J5TjF8QJR} z@y)K?zgrTAK1UiM;;7KYcn`F689EdZMUKa+mQ0Fu&+}NjyncDJ7Plt`H~I_ydP8QT zt2E(m-m)L%x!o6b+9t(EaHjs0Y9yR5KUPfh(%OW`q9~l%MEeuW+f!7a7<^rpaMWP8 zK|8{|QjxocNl;{)FTfU+oz-2w(|t$8^(i+T%%@I$cN0}Dwx{d_a1y^I1)j^u!rfa{ zpcc3w9U~(&^&~o6#?|tfYTA&{^0_|DsVeEX9Ci)qF_<@FuBq;p^435EEWac7Y5j<+ z5NmlZBsVjYa_*ywn`}?5AN*6@`B|83@`&00lE#t3ded!2HQl<@?q!fdz44LQw@EJ~P*M)Xo6QIf~$DFd8$z&IP%;aCY=ePkMmA4brv(bwX zLN8=kM%WZu`+!6gtTyTjq#MZzg{=H$>71d+ zD_QS-1&*j2d;j0UaBOzyTlEls_WPvc9rKZ$kcsX`8(UMREyR#|M8Aa^)=`-DLvq#dtf2SBQ&>Ca*^lZQlfGXtFIu zM?kROl>jGO-lsS&%C~k8OBT|>yQ4=&PU1I7p@;A$<2(SO^|BZ}^KDQr4a@fLBKCvc z%@63MuO)wd;48~3iMA;Az}*Uy*G+We0Ypy-E(8v1?tK)yTnuMqtjc{+&`+VywCpDY z;`<3y%whoFKfD0UGm%B3T8mt z-wl6wb$)=K4uASz#wvcPJn!wgt$lB|cdV0bz{TJMYf9Ta71#>!DO=;vdB506_&LAx zU~K|duo)-@5DG-|C-OcXdp!r2G3Lm+ND`(Egl^*c|$SF z(NMTqltaq8N?!B)u)kB`mGnSZdOca+=ihpdU(kRzk+$^alwWw2^PwanA_Ddb5HaWg z{zdEgiI^olil8`Pv1O7xu}k=rwaUhRu^y)pb6v!=~tW%HD@Z zc=%@$cQi?W_pTLUjWA(!l2`oy7PuQE8oLbS2xxEzAKMOvAKpdaUEhC4HpYfc9&r%! zVqEu&f4+yUA6oS6o~W-`1LBeNpiQ$=J)A`kW`ikNF(wrWTVR1)k;}S%S zPX68qk``^?nqZv`BKCR5BqQVlcpYejcSxS(<*e)$IO?td<7yaKJI{zxbk#}QxOy1v zcHI!7!}|zPzXY+PZy# zObeO?iEDt6&2wCD84E9iSMa}SH;KC{>AZejye;&Wg7yDr0kBj&Hvx#bO3L>h$6fgH zNlM-F1Kxyk&N687S^Xyg&h7t5!{+yb_xWb69)DHN#0R$aBL&Ro_9{0Wnc&xIURER* zD>pK0J;M}CO8)DR32RVG>)p^OP8i^X-oL#x@CDRLW#cxNciz1-nvW?#3s}SGKj>G~ z^HG+41gO-@dwg%E*>l3hL;a&mIK&zxJm;M5BFh*)B+y>Jz@YoHqgUrPzsD&3=F3^| zMgF;DrL#Yzqe9w7>QFlxb zf^8;{I$}uK5L~ej`;6*DRUg=eszAHTDQlutKxdci}aiNW~O!8BTkMB zHJv`PL#p+Dy;%whrAZy5k^2(@Y9ApPIcl}mPuTGaD%&#B?sDhdRsRPB$*9*ww?FEH zNs5M?sBa!uPXYN;*Gt|qK2nC;%hcpUy7xXoyhxXuvw95H!CR2)j2IAOr()Y7Fx6G_ z>Gt|&%XThRvRa0w#w8c)lgbg*OT;RzGg_#;{mHsU%e+1jYET0i8>hIEoaYj8^6B0Y z9Yig-$qhVnpkEAO5E(lV2yaruUzz6&o}ggZ?_~$M=>gA3Jp8uWgQxZAN2a zs~6b5d(hjk3OBdO8?{pR8htFCg2;i{W4dQjqWMFIBn`w1J2Kdp3$Mx^?$i$)N$#G( zHihHL2{4!7!8swo1jieQ5|;3lj5$Nwyd8P55cw+bu-R@Vr|osPAYdAV&p;xd`}%9J z-jT_#)yC1lqm=12Uy>X>hOXDizD~2Ltc#}rP%vQsho?MMa@YfzCqIbMsS2TGhQEIL z*0E6a20b&ner5od;#k&AiGE%unsaeQTee|!Oimi@kSt{Hn6bs>#FmYm+AdVb4BxtD zn9(w7jdo1D>W!Shn~j$u@0IFofKxwu^m;1BdFR93_(Ph*9l*d-)$cyS0t2+9y&jt^#YH$P)kAT-Aq|4Nn;er{25lTtNniTRTAG*gAS zZ)hHQ+k&r8QVO+arUqE1(vu}~Ong&+p_bM6aMdIYJ%pPTUYZd#vbjYqgO7<$PNAWy zdCyIG^1!3QGv|%4SB=oiXlA`ZIgDTQ#oEitcH1|>GR0U0Di@SwZR7ti_114qzi-?> zDJ7wFBM7K8NXKYUN+*p2UccgdZ1R2tBF-ppwPTaWXef5wcAU$Hzwwx)E zp{DL5GK}_j2KD+IP9evb*Ue@al0|E8vHOZ^X^Ow8+9^hVcc)vibJ`^tmlt1vK77*l zROJ>BsbHd}>@Xi5k0QoZ9~c~+2J?tKpRi6hz!Nbg z2>*pisf8~Rjmx-|u)?r?;u_n6Yj?%DWPBY-4%s8#LsK;ZFQVsN8d_6xE(n6zU--_t z=s&E_wRy}@92j-iP<-CCx@xiAC#b{%|V&@ccr*fU%2iDoQc41eNE!?mitUyjHszVwkQ1&s4-yyE3k zXEB?BZ>2~zH^WD{{O4ELDtDvwB5R}1NIql57$I^0GzpOX3y}_M92At0vyiSB8C7zT zt&ekMr&8IXt8tJD6_Yscxv_O1;42xE)85Nnd)6tS5kL=C<1tjZ?DU^W!*)xp$ylyB z(Q73v~A z2NdxrfXl(}!k2Hys@Izf>^_+KVUbUskJ*~+P5<8V!RS)lkG}YDjg_dBL=J`>+ z*j40ivygpNzFHNE5#8P~L+PKbsHY*sAn4tqOqEr_5Z3cg+*_I7INO1%r zuIJgG$?10dO0AyMn!5VgG@Lw06CJ*x_=`tp0dI~bucdzhl4&CT{f2sOfh*;nS;tLK zpkyn8DaEp7zMisva`&gzuBd$|sQi5~&y2Qh8Igz_LN-49I{ejw5l9TC{sRwL;Xj(R z1d=)aJ#;_P4Mi4u9q~SZx9lF?0rec+T zZ)J7Yda+%MXdzC`8zpZT8f2tl?mK%Hpvn^V^1F?d?(+lRxkY9FOu2|n#rI*3PYj(Z zjNLG(aH{F$u{iAsb@yz37;%!OQ0Xo?Qq_H|b68P9B~yGUnTf@X6> ze&mv;b4R5Xoq7eFXCFI3=UsrtJz7F%RE_#89q<*NWvdHnm$aJSv0MuQPC4hgiLi!h z!?mFa5KP~oZ}IOUfRpmh0%^7Kk^$dz1u&4#*E#yEr7C8kjA_8TIl%G-3+aen5{0)% zl0wb)LDdz#RjlnK%zmyJ=+n0P`)28XQ3~B1UcWZqP?2svI&)dRhQPI65&jhU9J*{T zoGSG~N1t*rw6o?(u&pV-?riI&)mD>b&jJ+3PQsrEaLT#)FSy@jisxK;)Du*8hwb)z zd&=XgcdYpnQj8IJ?F9@oG|zD)4g8r=CNhP;6TZsB082X$${>zbYCcsw$BK z8|me2aJg+6fA%q6(?neO6tVHW)(+prb+Y(K&D+tzUnM+zAc5I~PsK?A`y)sAa&eF!T0{7ZBB<9G?Iu^4NQwcX{`A$anLIyoDDn2nC&w zBvmMUSLe5tM*_N0!=HmHkFKcyc*3pxx8v4iLKjuqc+0U;5Vp)W*D7BQ@yJhlYGRz( zUk&nbm)jKT0cTh+o48C*T@ z#$?5G8{OZ7Ov8qAy-S3KUjsN@pX=0r10%avoue7G_I+hDnNgd9b@cs?iw7wHQ}Cha zNKvCimK!0wS#~5Gs=2h7F}IhA{g3=FIcbi#D*NZ6FyqIiF#qv9)Z}WybyWATnNR*2 z*8Ib-X$-A|wv(|nj9?QR<87wv&d*GlBeaLXs_?bGTftBk(q`I|Vg{#d_le+%vD+~W z>Pt`a1!PSnD}FIZkN2(=N~HQbo@sS!4APmPN2KFgCw(-iisZZB9In?wcd21Z#NN|G zj!vQ*MC-X>TD>82&K|QTwXah|O0NsiJEhBiX{%>|7bVoQ+M(vm=Niv2FM1|~Gn)Cw zfB|sjn^jX+-JHBPwYRcAX6TY4AC!bUBzeW-dp;egwC1!Gxp@bXvm_QnO9u_5Mt2_g zpLPeLsv9!gW?Ya;VsB^CxVmzk$l|ziex9HjDnd!hw}&)$pyGHv%D57wj8jr=PmKwU z*OeB(hI`E7xX;Gf$0x7Q&taG#{)ovS@VTICz21?;dEYGMUU+idRY54!(i?A;!8>?hx6q;Mwu7CP}{87IgipK zuK%P*r-;$ZJeu7EiMNb z5*MY9DR}cX?Z*fFNawkze>PnVQFY?d;ZS=cHej-iJMF z9m_@K#^()Cl@H~ROJQ=__CH#e z!29&m*50a7f>@#t&ABblKhvmg2==cUZ04=u3>u%W_t)leX-Q_9iXbt|S`fNcn~CI` zgSe+mFz>YL*aCvp#?4n)mgI~&In20WHBN)ne;iX8KS@yi4}d(RpEiDNc{tQ12ndrD zsJK^YaL}?vj_LapRnUK9^j|_eVAmz*la{e>9CYW7N~s~c`Ml?kZi{m|X>e~M!eZ$2 z0u`%EzDWy|3w)g;&hWt*0SfBaT1gbZ`d0-Cys#?=<+RJlf0f4ot$R6++{S^sRd%@D zd*Yr{DNe`TE&HN$$ex`qrn&$>)3!-Fu(&#Y*K=dHr^tFyTbjOxN>*0Q65jZdI2rz5 z9QNd29lxAZB(KkjeK3WH`Z>Da<86gy52>{lMch$2W759YbfvS-xM&i9PA_Oyw=}^S8UgR*g4qs~E zcl>L3)X%kZv86Wym6EdlnY!V-dc>*coy40)BKLpPP7~Ck#NHeyNFxMFamuuA`c7`v z`g-j@-j)H&_n=q5k1yMIhjid0-7ak??10F{`2MTq1ltC_iNTT4YH8cHdLcJ)2hVb9 zlL^mLU&b6x7eD!pgNfcRRr5L$Hps~*jvPnG?hB7SNJ?Q9A#1=6k>|OPn6&J$f}Tfn z{S0)#uyFUdbGR5!f(cGj4d4YwiMz|o+M`c;ot@lErC!Gj|qYRn>RsN3cr_6L0t*WMv* ziH@&T=A%Bp2p~(66{5jXMD^goEdS&&Vjgp@imD=$O-V@X&1-Gu<*ll#1pJkem2|Rj z#+s{nyoobk%Xb{Ajo@1zFS3xvYyJYxtVnJX&-KC2G5BIrT=V{&HN|E#vvI>qCd5uipmILwc-?%Wv<+ z$c7?tH*^A$Ilsd_#xpe+i^b49kr>~;TdHV(v~E(Y-n*lt<|fXhCVIi2ES1(Yd@>b0jkH~zZ~n!9q{34-m*tJ< z^PZ+Y9f&>tW<#?-|NHeUb?TIICI>>O61%#BSsg}m=f_Fq1bU-9d)n?<&FIC$@XIHk zUhk>}QaypK##xSij^?5u+`NDIY7*Hg>d`5wpR|Ir%m061M`-Eg+`o`pqyF63tMZ0U z%~%voSlCq6G+)H1qy~p8a{H)N-1g}Bg_w{0+k_BnH%~ALpTXypfN~2dopE0h6;zNd zes?_ebNL^UzwgX|eqY(8Zr>9<|Hb5+qDmp4CI zh}_0)+GSOw77>Y<1krnP-Q^vODT?gGxwr~*{!Lt^ad1>{ldu`wqhJJdNBU8%XKsqr zx?9T)Nu~ZS#kDJSB_+HHSLu2Bcd&ObBvT9CrI2sOXxX79)15}1plxHa#Ee3K#}PcP zF5V|;B%-#@IV(>8gBvE7Us1&q=j){cjszdW{Qn3K?6^NzAgisaiv)fJz8awI`p~Uu zjHO_n9YpTZ8dY+lS@&h=PMd{V?&0t4pVJ3A0RzXS`s3z3vqoeR)QvdivKe(H%KQ&? zt3mp4gV271@_03V?V1n4)yB@zCb0EjoT0i+k5EBwcDlI+!uU;2DESj+W_9mzM7%(l z!5yxd4{6LFJk0jqNK5yxX)TU4F_is@w#XC@dNx4)Ps#;n%jEWOy6rF45q&w?)02r$ zPK2D01z&?}?-=X}BdA8tZg=?*pU#g^sT^nhD=(2fM`&L_yT=>JG|CbD#Qba;mge`3ee2GVgOyqWvD{lxfPb5 zAcNjd)J6<5?%>jhK?E;azVhbH#vr-o4M$ZB^HrpFW)>h``yDc@A%F+rf08G;`VGec zv3T01?V{}$P;uw|(M9_ngzjhD;A<-TZZg+dgeC2Q$$hoZ_8YYrx^r$E5_9T9K0`h` z_2-Hoj7RJ>U-pjtsrI&za`tzSdF^CBp@UOm$gioj^PMAve(9Y+t^cl?)TM`N`ansn z;~JT42snGce!7AZ;{Pe8ocVGeA}4r->Ob`kEL$_tb*w!O%5s@0R>qx{j!s?X>F7+1 z&vZaPKv=rG6>KGvMYNmqJYlAtvQMmBgVHi~qdXmQ&m}?d%>BbLz*?!LFA7u=^;8+U4(P1JtWP%q*_rp_GX# zbXGY3?!7~KFp9<;k4GV}!?Mw6R*I+fVo)EvsjSoq)wvN*Ahd1PS{qN%Z zj(r^zG`gZoM1DW%wl9}5ZM47>6}5c!eMtlh=H8lWr<~7BulCvrdzHB4`6&NIipbVP z*uahvX`~O4&?cin!<5wPAFUu+Y5s#3<#VFutOl1|+5RK$qYpxO0+QG6y%~OR+^2M9 zpb$0O#e&v!veHG)2$$^J2-kb=6D5y;9kXow>Yozwp{%2o2bo# zf)ev-rTo2@xGUgCgLV>XGTsP_O0V}b8p#nW(E%zVk*56e3a+}-$()N)(KBU$( zbiY`=3n6`^CJt@(s=&aQg55^CaBjnPG<#GMK3 zHOq!#HH_RgQW5ge089xg3Fo_KcJ*cpI9=z7 zKfY1M`?%X{(p4%MTGR)=*b~@zcj}r;QOiUocyzHu#d4C_B2j+m6l1TpdRr}}%km$gw}>DK2p zVQHB?M7aIU!_|`Z?7SggLziDdh?a-7oPcO)gfM5B-0QFaFHVU^AJmUCk67_%$%1yx z?SZ8TIVFhqcUX2>uIEQ2d#6G*SxRP9Fcm9MJTe!PI_lc*(>bdH zr#i%p)Gjw@2lNf`Qg2Gun(@rVO%LFb)MY7q2Av^kOWpT_;lT~lJ6NE6?-Nv5Snf|W zi0tntOd41?58_xIJk4ZJd!D~72)%)hTy1C|?dt_72wom)`<)D#(lvrETQ7WvE;px3=a}QWg zv<+XqVj4X~zf$A(ZH_76{m`t!>-Nh{n7mTi!I7EexY-i3lY7nlvr5^-$P~Q!9+SA* zXEy>`>O(JHnI9(Ep|ORdmFk-t>>8G9cwHW#Imc|S5I8fcV+mRu+o1%K{B9IN9=aBw ziPk(W&bCNsR+1fNQh{m4bk;!TQ!2R@zvFz%utl zGpRx}Uw+QNvI-dIx*G3k&6(C}de8#iM>gaj1&|59~{9S=v6-_Au@p>GR=!45_|1ZC`jN9vmeoOS9bJa*Wbn+fTiaM>J-x zvxknTjko%w5&t&&9eR)CUW1~W`o`{0-A|Bi5$zf~IjDEtpZ^~Ew4dhKK1V;hK8hr8 z6hZY<=++H$Ng1JVdqj!i@Ew+yu^j!4)wcF9Q|LpQ963g1hxcE2)yEL7xJrnYn|OBy zyBXz=m1n*xPi-b*QmwpJ0N14yP?kK4?M-}xG6@$3OO&g%RW<}TjfvCDUyrO8&n{e> zye&RnzFNyxpQLAVJng;aeiALt{#b{60$DM{4^#KK!s+X}a5d$`iz&9Vc>+I1uhf8X zPWA#OVJ>`NX6djzL(Ln#>V_t-;raR~4$cV+^xsV1&x#&5S{U%zA=+SaS42boP49=( z)bi|3!><4$Z+`u6lP`-bkldy&IeDI6^ho*C!t2bq3a-`GR3Lj>Lns4efyo}?fjKJ zjk7Wnd(;=9_f9d(cl=MtAmv%uhNP!7TEQES`FHO)J6vEA+f1S+qJnt!J5nxgQR3(+faq`t?)&N+y?ififfG;`s{+J;p}?8Kx} z4^-h_@F$XW1O(6!2Ohyw`I!=W5i^ zBT?56ekl)AtbEwLSbq=mknoWw);uTTz^mVU0ML^LFw8Iv4Z~`vxZ^!oPq_umH`3Z86g-I)_Qp zVIY}?>f}a8%8FSaMJJP(VQQR?MNt>C+~;uxp9dmtH%AurUESe$U8_|(vO;-|j(K@a z%J@H)jOWA5g_~O`3R}p7^mTy9i3_WSI5ib+l3`6j_+-sRVegD8be*3I3=XY!ectr&W zxY|GL?b|8di{n;_aQ>RPMFaO(k+gl7p<`>$TsA!(+)DQKPX|{M{)n=`o_!8~R$hZi z;;&AKH?wCC+A*N$clxrH283(#wINaCh>i)F4MgzT_4uQ5l=R_ZYN_8EZwvOGVnn%F zOU@U4fZhat$ayNK3||<1eTu-{l6w3foKcMayqAKqjQU7YVw)MYeE#7F`Ns#p;B$o7 zkp>pzt&2aPclW-J(2ib?Kkj>6ZXMot3LwJ16rOsrsR};Vlv-kZ+dV_?g0=HZ8b3nS z>W^@EE%XPRqs7wM+NGY^`1wsy+J^h*o?xIKQyw+iiVZ5b$5Kk0>?wWU01v+@v*1ex zJap?YO4&t+6V1`E?+3e`^F2%&Czp{5^nX%+E#Jk9gl96ra)Jw2>x>>ty2hy(!#>%z zS)D%*5&Jpbw0XQBg9YzJ;4hC)13(w7qc=d8JSRLVCXKQs0HTVD^r*i|I+WCfz)!EY z+{M%T!$0xHFb#h9h_oS5jyf*-4SA zw|Ta+OckqQr2OYc35LwQ`R)U#?*^++TKk4WjtAGx3B)V(y$Oiu&r?fkx>$ML$`;GIQ;`5~xpk;JX_&%lfBL=SMV%>Z)v z95*pT5KTq(_9Zq=Fq`_=?`0)3I-x4}QIa>Y&)$BkaC8o!!v^oXSk*sNk{#)3%4|7P zXN%-wSXOV3f<8|jn6on9NRIA zR23t`;V$P%@)NUTFF5etRjh3wB)^bjV0<{vXHgHmtvG?)6%OXgDEv9khumy&Sv&iT z#$pcSvWmTlmDzR2FjEAV-{^mn6OYNWZpz1GiL!t4wH@CWJ@EN2ods9U#d(O3cUTHa zq2wRk#`xDwjh2r-)PKG46UuFBQPJawU$zesi=u0*=OSQ5r#fp4#+Q$5R{B>aI-c9L zBcr&)y8+^T3|!76GHx?qw(F;7S>40)7=!Ky&J{Ht11u^G>B`i$Pgl_bA7&00DA=r! z0^2p({*p4trZ4Yxk5;!UkXz6;O7MoxbG4{=FDHa=EBdo9Ld(-XsnBGuA4#};m*YJ> zQUalMKrfk&wT6^XQzFJ?zu3^m3j)G01S@z)8Q$VmKIIKEXB1_zd6U{lMf1dkj&!f~ z6Hk!PQNB%_kv!HnHW45-I>X%OE6M^}IZRZ^8`dtP=4IgaD${5RD?p-V0@dEWmmry( zzC2)VN-|D&**B5BcvOm3c-|HnOz@p z77mQc3Am3;(O7hDz*qp9N)Jc$gmpS-Q1zokSGf5eeA*j~Am?jMz zXQVR~Y?w^}qeD$Khxs1*%5xFui^A`EOcm~=A{q^)98Z*)W9G!me4iMN`7s^yXqj*P z?(B}fes0%4CC4)}e(<>>9DhGEZ`>0B6zk+gI@q#r!b)r1T%pefXTykryq&aL5@Y2U z{&j*vf{M_?arCy(e<8&`7nMf-1*XezogXS*cGy~bwc_uf^TStVt|u~-3#CD3PFT15 zU_7j8<&n;!+}18fKOugdVGio_>uom6UG6`WC3F@GTaN{II%IFKeSvJpK|Yn;b8DZi%7!xfbSPgpato?FS;cD#%Fg_tH5>=MarjXAA7xNA}Sn1 zdTM8(_! zIt_qqZs&aA%N099nPOfp-J~sn$?}6NKYVJMevv(6p3pFM1)Z9x z#T<&c^x5Z<3LF?5XuadiL9)35P^4fC%OZda`owB6GG~TC#DbqR#W&A+nL~>f{Dmnj zO$(eb*i|>yOMkn2Cu&`p&XhSVXOTX~mm%I_m)21bu*V5Kym8C9Y7Y+BI5-X1;ad*a zh(4{Z2(7Q@9mbBlazdwg7uQ1$=g4!01qF;Bd6goPj(A zN6r%2ll*ZM^Uqk4*5mPjU8xu=LmBPXzHsY;3u~_3DlwNxIre=nf@o^LC(^h&e;g3* zAa&<3gO0RNxL!)KynKF1CyeX56TxpfpTQII?&)Qy^r!5NcDS-8AM-hZyCj#^=AR$bTJxIh?tetpbu3Qo)0M0nlGffIx^Z6eA2Wz4S7ttw zG#zQ%n)9$(qyAMsEboBv*J*5Vohnc;Q`p=h?Yw7Bz(Obi#D4o1uCk80l_c`*Two5o zCqy-9T$Sd(feEJM(#c?qy=GtU_>jFZmOOt;y4Hm;iP>Czz4+|rjvU)0J`ZBWYOOAA zY3~2})P4L>44_n4J+qI)(3T-Xa@*#db61#y@{(D7Acy+iKl&+?^ZRSUV(vcBvmL<* z6s38Vy@%VfeKEHpYo;m-o7_?PEi?e_=-Q`u%3&XB72}c zI2qmWtt^ylr|9ZPrUS40`UjivVCPA*4~lwkH{yxNAp{PTkTN0~Azb{1G@q8@Hp#Bx zohv^gVDX%s@I2Uza93UhY}q2D;N4 zvce4PxWAT>c76mY8m9TZVkRrS&Bd(SOtmiMfJLTCx-b4*T|)P~rDQsK&T0jv|4WJb zbQ)e2ifw>6I?t zShRcdoH$Cy&}21QC-CWk-3+>A?K*pD4F&oyUm_Z+eX-~7)>f#k>qD=Yx7^w{ZV3ei z$|x7>w$?OWNrWCl5u5==S3RLq#W)QDth_Pr-! z@BQt+jhh@>hh~9%-oj+-EdXO}Ip_A?7em63mR1~UCrx*cY||) zuP9E6Co|EBHn2Cc&i0)xUcwH?q4Z~6jFwylLm<;w_Lut1aPjR)3A}rz#GR(7*c}H# z2AQE26}-{IlPxG-j-r}kXEfU)>d3)dn9>^knw;qLV)o2K(dlnicba=I4`%R9Me^In z=d!~ZLv*0IAR4dY)BvkL`aC?Z22yu69T{ZW2VuQ(wKCZl)mRu~EEO$YH#6y_b$wbM zFD4w0DioLl9Bvzb*FDrYsS^1)V({!7sWEx86z|aZM875-dqP*3iXa+z_s~-;nTdr- z@Wopv7b?X`LfMS~p?S<*ib1g~VfbMJvM2okw}t>Lc<|-3Mps97&~Mjz8|FL*Pk*8b z>T+La2hH$5Q8gi6F_ zRDV@Zy?(F~={$y9?f3X&AB3bNH&HwT8#~H|&>XL!h*^vDV*o@cEy4NvrVfnAQ|7rs z>{r+9ljKn0m)%92JQpAZ%t?dD2Dw|O02gd#D!s=u?w9|nE|aFB%=#Zg;zVn zf5G3nEOnH8+Zy3upNZe$mg{E@7h;1^Y;nJAU(yfds#|cfFS&XIeLa%GwR0%18v$K1 z=)9)mSDfGOdpoXPZnYNm_fM)o98+aY>;d*p<=ZjT(sWyLpfOXoj9&&71X>FF#pO=JI)IzN-1!tkkRi;mQzpSO7U7dhZA{^tW3*ayf(m~hhJq>X_i#-;M&uq zw{dD$yr}*}QYM!Zh}FEbI)0Eipf;tGk}Jw*$6>6~lvMxLQ)^;Yp9#8=^>_U15D2Z> z^p*b&+|}iA0y{EzwRjIe$y1KhmOu0zida3OsHab zqbwP4ipF3zh>%RWXuB6+K=J5NMy^j8UTH*|w?oXi*34R(pOA4*^L8;&7&VPq)L8GM zO76qV8vDUXN_dG(Pir5pkqTOK;o(XdarUWvAaIN&U{gLZ>uep?G6h7iZXPP8H+MWnIt58vOZvlj&PxgWyg0vdqq<8B+c6m)D(oZxo&FF0s`{WkTc;s2 z{gv5+Nw;UkCxIO%DReL&*JFVgiT+dh1YH4+g6L84O_z&sGY~v`-JA?Ag`|fd^yvU5 zlK%BwVyja2W9oTEay09WeCAihB%iM<9_Map1$li)c z3h?0-X@~`SrtshOAdc=bMg|=gPR)$R!J77u6w#AuOChbXA45^A=xm-zl^mc@=rHFr z*4QAi_CP~>dZc3cjWFf|k?yg#M5iG+bg%lRJ_qHgkt5cfX z1cD>N?2gx;c~fAP%jTWq0~)`i#Mj8yz1WNhH!+VsXr@`s`u~x-;H7@A)J8CS+{%Ri za;5S+<3KC1`;tl(6-zp!6R*R%)m~CFD$?bx_wYd{75%K!hb)k1Bcdv&}IW)sM-I z2H6}m!CmHeuFO3eK*{HmD0iF|$S0LGnI~LlmPMqaGJBq3y&rR=Om0cFk`_2_vX|)F z&+5jnp4F}0=HE#&yH2bPu+iygh@-cg5I#x_XBX}Wt#KdCY7qwB_j=J^9dEzJ{Z~hQ z9Q-U|;rk4Q0`bl{faac4ffg<(RAl$sRg_z(N=k?$e`4~YVh=()aOvQZyoPMB!H@HT z9u4jos-vk(iQKCoj5?@X z{$I7GB)Vh$n`-$X=ULQ1>aMzi8gP?Dz!Fg#;znbP!Y*OLoL9w)S9KBpJ~cyDt*;D} z;mbaf*90#SlJ;u79zC5>S#PgeqRs>$w$|V`?{d!N@Pgl;AS!0Z1o)Bm)v?o1T_wQL z+K0mndSC?ve&xhf~K{Orm(v~qv|6leIJ9pTa&HgL0vlZ-o2OkZa?{xn~2?`1E3n2 z>%!r8&aXV8&X43&6o7=QGYTKb1??RqEg0nMkK(51Xk>uD2#kJHW7@B}1VdrA{bxZX zdVB5y84eFV5)Zd_nrPRi$2uy=vaI`23f?iuT7?o-2vPeqehEh3-Qk9#pWq_t0nSrA`QFV9uB3J5)1!*`Bhkk zr-}a|Vb8&NgOHb)*NDlGu;*aRB+UPWtjtm9I)9ij6P%J!qki zCn<27IWoKtYWVF)J`%uUMPQexzqEfa@++`YdN$TL%VRr`h#W}Y<4@%h7eE&Mn#+tu zX?nFhUChmliAuAi1pRYvNk(`x(&A9TdoE8Ft%P`Zk#~Cjk@Ru8Ph-}x2#r)uygt5Z zKRg#gbr+(uVsj9jr>)3t7vGY)jWQ;)WDJsDbwL_Q%u)*7d!uM!OsziKv9v;(Z+Mv@jJS|ULN7YwM#6Ba98WFKb)1w_GUbl?6nvWN%X;o;|kN8_m9Q*gNX^2#JX2K3_SV#*1aTM`h-hNnK zeXsuSV#kt6t6Tz!tpUz~kw?DIoYd6ojW-qr$-34-?)%xAM0V(S=jF1(tBmZO(Yrlp zQ#1(9$VWwTPcLz^p^2WUjWKy*X*5b3U)Yx`|5hJeg>v9YUB>VWQd5-C#is>5v=N19 zPouFXtv}dyS5S{lM+p_Oq#E`krUcqrXm^r(9N#a!)eDkdjwz&L?g+;Ij-F{~-W>-c zvka|xu2U50Rb}fw)%1(ZRkMAy+@BPet?r#BC zsQJyk1@QQMyAm_o0~hQ?dAP-xxb`?HEvMuqcB1!t`wW?{n1gM2YCbk>n08{DpX%Bs zamUPRGF1aFKDK9K?mtVL()|m&yc1CO|Nd9xd9oy5)A%LPbHs4)hZO6t7`-`dMT94+->y_p4@rUe?RSrGI|#| z?9rBLGIDLhsJdhBeq4i0jAp-%S33J)L%PPR<+z46&7y3}UOsDp*!08^6a?`#w<$hm z&L_yM*uof~|8&Jl`(*{vWWAuXA}u9g0nC(0bae(`DYyy)O2%k^T}*(DFhWn=?2b5n zxkEuAJ5-jQYtZpt)-5gl)uY^if6&B_OAx80P{L&N`HnYXW=-~v6Nb=D zS0?+D7`X}B))xZ}!B0F_;Em&U7qjjp;-aF<$y)zE2jDt|AhO^EuZlJ#Mfo%PqLq4p0(n8&(+{u!!KH7^t{PQVW%8lB&MN7=h)ejl?}fZm8sDxSuU$8*It+dj|9U+AlAI49n#i|RffH7kbo-L5od0_9(pY@yyNXp< zL-s$t928Qg(g%x+^sAJYrSs~}9(_+xi_KlbKujdY^T-Btlc|lXZ(~#*=MA>yv2@jc z7$EEzC3@=jAW-rU*`~$lk8@Ip9Gri`+v#1|;T?b9k4P!1633u)jim*#LouGz)h(0k z1Su+Wu|!e4PT}kFL3#1=<6rcx?rgoQL*GEoUOb*LDOljCTM49E%PAts&HBqjO$zoH z5N2H>e((8sQFWN=yK^A{?T_#%_pu!k4Upl=q)a3HmipP-`nh+K0H7ygk0X+ADs|C^O50E>`+K zM1Y`gpu+cL<$_tYTv!?@ItvPXWCHW$>=8T=3X-_jXwr~hVc?b@6jYLl8Yt^B@9HKU zxmMB_AY2}P^BU~-D03%^ZBR01K~>HABN2$x8AyFS{36Tyfmw{FN!#D_A=oyKuT3QO zn)E2@-bQm08yy+>W`AQQNWW|`8!P=3c7EV?^`m~6a+qNNgHkPeeAbdGV%4q=5_O9r zR{A{jxkgtrDu1C6xW)Q*82hP+o-+V}gnHZuLNBSTvfb}~j^9lz!2r8ECDcT^cG!(6 zr#BEX#KRkje-!y5NQ)o(Era6npf%{}<(KbDl|9tSy>@fD$hjey3}UANa2EF8XBF~b z>e7M)0?uOakjB%0E}+DX8MkX@XA^rAdpC+vb>&T`(s+0pX;OaKvw7psKS;ajGb3HJ z;*CwZ);skdTF6-O`lq)kFDmEx{{iGIr_o!eE%EEEwTz}z&_kqS8!0OrD&M+dE}hJG z@{f#JJ^I{d258Nx*(QE^7|8>LPs6@aXdJ>?Krki1uHBAUwE2Es znIAU!jN6zC3tO36l>GJaBM?z4A4xDfhg?CuqZ-W+t~BIJbuK)6b*!I{eFJUgY|$Ze z*urK}mvYycuVfxl?0S1xIuR#NkEWns>(yJ~jp;*VHK${GZO|vk7h(U`1%P8)Iw;m+ zxeB^R@F5SD_|OR`+z!sC5S){o*~k1T#%Zp$dFY5<4Jk!ojA%Po!s;P!beiK7)fFGM zf1n8~4~D z`FL|*Z!`b(%Cscj3k4tT>A%;<{&oF(+!PN_y6s=nI|(s@3{Yl)USDL}=Q@{t0&W{& zdD)7-$Mv)sn%#0%O#vAwTT)10r zjht1#F$HiihFoS3+P@080p9+3I*34Asws^z_E-BSH1#Q5ZbrK0E~ku`FYk`hTDE** z6eX=Tx2a*_72!y+x4EqaF={Bm5JY$ZM+6UKid??4qnHF132(b+vn9CZ=`DK9k0Jga zyYF`1N%>>W3=n&~#Prudn_0eJi^?3N;IJl*?(R*0{>9MiE%ot!+Sf{vKXtH;4Omfl zp`T3!`_G!irbApwGrNxEUJG8;`yX_18`tsH5p zt5k9zmi1BG+sNx@jr$irAc#4M?He1O@IS+)DdGG#`x%|$b=+g4R_4Wr!l*5bCdsj2M3>XWSWp^}+Nzs7}fP`cq9G9$gE8($o_tenYyg!#*DoC_J2 zK5F*DhR^#z%7Qp2o|TFNc-Tu+ZVZ{kI^g#CDI(V-zNcqgF*B_ERI4)^uK=XIL!IxiXfG>Wea4y%lu}4R?y7fY&9bj-GtEk1piirhu)u<=kVT2Y z2Dp*6vK?(W66PitT2?_$zT_~J$)}o$uuQ?%t8u^U$OL;xuf)8_8NA)sFl-NLx_T*d zdJMro6OicS)fS{M3BTEQ{nt8Hbw!ZA)I2uO`kx+hAF5dtC- z0#XtRQqnIvM~8F`327KGnlWHwyUt$k_vd@QKb&*^g7?rdJ%zlHQealdEeT=2Zia%thZN1TO~BD{KGp1;bZ@linIZ}J z+=VR+PYI&d@FWu0^{r2B(_ajWRyjNUi@~Y=)tB@RbQ>8#2%4tjC9B}<2)BvE=%t{zI@Mag9a%B z%6P0t<^u~;87~Ql`Oq^SJ99mtm(g#9sfx9_r{@=zd2D@WO)V-t6Is{Zc5|?6408F+ zB2{xl@-l|(zZpV&Io)Yr%aGBjpu3mw4D(!C?75kDMWr*yZR-j z{DwgstcIg&Iel#PzL2avRjI1nwXi7~h^lr?a4Pfrw?_Bv+L&YAXi&q(@a3#;KKl1D zsg8o*PrN`-z^Lz45sXzDSkNcT?I{z@iB^Te{)^l7K-(FDP!8^2%2MYPfWWGd&zK(K zHN^GV-aQcA6-Lujr~W%a-25Q9wl7#`afdUUba+eLht;TuFe+{`gPpfxYsI*}r+Tr6R;^rzbxAhIe9iF1pjfoi57Hl?L} zaa|H|zLmPg*`;)q#)m9>&z`GDeh1mUy704))Km~NY1@3tcUp?#t-+1%SzOhbaa7)D zgx|7M9^-p~e{W5Ybr*mH0BfW0t6>QGF~;Y}UYSXTdAfByRvC=i7!4*tDHUP9!DXmy z#&JjA-L+%*kXEYt!JwZH)~8sm+B0&qmhY!P-E!HTUIbNw#=R)2g41GckD}5QC|B0% zH&RuW`4dk0N&yqt9F`WwY9P$&GLgD@a#e|{{lZ2cvhj&$i(yqr{N~|3_#)3zj8pv3 z^T5!YBNOJ57Q2iiM&Cs`(+r_W$m>c;PQ9k#LG|HRodU2OEcM=y*)T)@)n%bGn&6XU)XCMxH z=PO3R)|SAZ%CFn+%(L%`jctiyg?QKgW0=!wfQ&uNHZ<37z zF$@{^HD+EJFz0Qaq~HEsMD5CVmhT{-UzcqtU(l;`xxM6aX3lD~9gc#lB!6u*-!LT0 zYx)Vc=b@KVUG_X)cX;rA2}pNz*FE*T+=f`LH@rJ{i|}sopEwCI7NMaq=NkH<$$5_c z6X-*%s6;Z>ijoNg_^(Rq`c2a_-kI$8kj(F%3Aac<`DQv#4Hw^`@sCxZ->Hy3vj5z` z0Ma>?FszoU_kAZ9Fr!Kxqg=8zL9OgPx?x(Vx2=8S_DAcMlj9 zFI_!=pYTg~An6SoWJEN5|0x^5;fcJYvdckGgyO1!Xpp9^vv19ZCHn`>6~Wi1Hiw3m z6BS*`OsB1R2|})67&%tN#`~64Y;?|1=w`t04Sd@MyDwYNBQaDmT>(y^GdN|7Uwu)T zdxjV#@Thkm)aD4#Q%8HfQRP|BhVsgcb*z-p9!ef8-7G&oHmc6P{+4El%*c`;pGq0l zVGuD`4YyCJZ>ca<^>cMs#kLf5k8SLU-t=CAIF*ML$BVa30Qt$ovuT4Me_)BVXGV$9 zv8;8k5N@@mYu08eLp{cpY~0T?y{h6D%aC*csv6`7u_I+W<`24g468jmedx!+;~Pyp zsPKm-I&$l|UX0+B02Zg)@)`Ec2zu zBW(?pKk%6=H%c}IH@>`&Ht59OHLiz_NGy~Rd@SS(uh4UjYuEJz{{%I`xh_92Wl)k=w5%ozBp^tr3TX{eS-mP-~r~0Va>;k+V zcB7x_4u89**Aa`!v3R=7Nu10|;0WE`1lh@4_gogG+gIwY?-NaOd0lr72KXHefsNpsv&i@SQIj_oTOq765O*YO!ySC(DgvM`$&J+fFP z>)agKrdNPJ~-F4pHRPUzmkA3p?NOz%uzu_D-!n6OMY{ALLw();s z1OG>~F!z6r7D(7YaP2x(#$L|!mfU8T+4Z%{Yw6#};=PU5cwO%dg{N2}j^iK**dF{8 z1K<2WI&TQV1%OwgFfG1XWq|TO*@B!TL*3Qk?WOiy5Z1G3;sy1Y0IYL4YQg&C%Rx9U zsaO6oKWCcrP=8MvnKhKc!s9>UmIW7!mG(7>|6-c~U8L-|$6fdzn_Trl-3~hWTC)>%{pC#m4|PD*C`vaV z;Ujdqnrr1H$&PmdKWu(?(Wft&b#Q>i%}TZWThhmLrK8|-I#HksRtw?^m#0AhC}b$62|K` zo2uC~=*G^Ygk{0!e?40X{otT)XZnqYq&p-xK3tX}=#atGZdUu_CZ7{qJo{RQlkaKH z#2&TIll>BrLThh#2mzP6#a>Tf2gSh4FSHP6sU84&DR5pB@cYqEe6{PyZ<`G=sgaAp z*J81Xgfkv>@Eq9j3VXHW&{zG~2hlPC){B&>6UAA6$t}_BDVwl36kPa&5YdPQxIcp& zMW;Oj8s!le{sL&G)9(sLzMHf@ralt*-Sf=GgOlga_C;I?Y{-Qmuud0aMcbvM_gU-j z*nU|BLpDjIlY**c0c+>$Fm0w`C}ns&hwtJ)c-tlsZ$-b}e~QW)A9(^86Tx?(%&7`| zyOd99xOtl;_8?c_l zn8IZJ4V^>Nu8YA}*9XYD6jYh11o)6U&z5s)eH0DZ?$V^Ws)~m38~zS>Pk>KD{78|_ zG#F|~;b)IwSF8~`p!odknkuxy%6nOdgr@gB(CT_Faja;BT%Uj&=HPoO5JyKX`kX-v z2Q!4nFuOHx{pH_jdVJ^cISWP!{&B0Z1OXd+uhF?cdQ6SsMqI(b39%Vrz~H*ySQ2ac zhDp}(=N{Ixwvnup&e|wZ_S+E{XY~DcSJLzU#vBo zdrWJquw}eM!@T@V0C<7Csf|ZGXm^4XDww4QhgC%3_S)272C7lvw}^w@)7d7TqeN1( zq6xNIcoNU@gogS2XxExG4MeUMuZ4rKxexRTXPkf*X&r?kw8s~LMSZ+FsjH69*cMLp zJky#Y8PwL4g_=iPFD-5YNF-Hd@0_D{x5(Mx^M=Fw9$t!UJ;ovKOIgsAYX)@Hzp6v# z?GN&*Pum?8*4|voS^tX7jC`Q0lPyjZE2RGy9Owb96hkEUSfCx>D_FD`BXc-=#hLFu z_(P~?xFq<0%zgnw;!*6h0u&h=1GA?Hw;Z8F#MAy_Xs%kZYm&R02m38JG%{K8o_#9l zw@WRxju~Yy-K3%t)J!<8WqcaM%Rpvj_X?}d!c=z)t^6bA%|6Z8m-H`jp7-`~@(~;U zU0W;TsiUFYvx2oj#e6y~ifO(Zj@HZq?$aNTyo!ja=1?Ssx`n+zDN|rG^(Av9khzlI ziHz~bvlF+(2DiDT6ULYdNKPb)ZA5ku+nVEVs$uT&|6&EmxBI8>@Yxl3MbhVuvKR5^ zQ6CW#wY_G0n)92FU@w>z2dC}TT=n=oY3O;H$YoV1Z~7TNnJ#sD>N>7iXd2ue;|a%r zga3O70QxF`2*bc$iUq86_Kb{OHzjpl0(XgLLlHsJ{f4vjw6VCG=h)O%Ze}Pm0vwVYr&2b^~%tF{UecGk*%3gj*6w> z#@9RqwG(bpYW~x>u*iSwbLF3Sz>8&p7!tqv^IJ811}*T#q|okQPUvX(D-k9*789y2 z#}J5JeLTax@fAeLF;}$5gU9hjxy^_Bi-lDOv|$mex~?9o`6voQ*Rof{!qDEwWVF*n z0qg&=1$qBr3$87xyL|Tg81G47zo1!={!@7F^MGY~ot2&YwUp;iO>PP`5St>Ige7<6 zIxm^z{W&<2y@iPVm<+H@9@T-)g(_(ivt?N7&jdzc(cuA)GT6g~ZlSVNXEy&~L)EIBOC_TYG{yxuImyPHpEANf zwxcP$rd~ZfR~(CzR}Y$A%jWyWAiYpp2vi|fH?5D~6Z**RJ1CFlN+WY(N1u#Mc3~Yq z9VRock0ap0k6c0}EITEDaHrO#)_9CW=&eHF21@=7>xZ$Qa?;$>V>+#rviTuVQ@+U| zzMttR9FW~RMf@N)pH{N5A@M@LvgG<4lTaW;EJ~ZeL=jgZX9hT31tG|(sPd=x9OHU8Mo)IddcD7jEQ$IP$3NNMF?_&EhrBnm<#R9sxl>z8POfx#rUlQB^s z30?ixIRr0sVIEs(F6w-LYs$W}H{&hMUB!Fe9uk-()YOLM$Hs!QtgLEVbN@PjVm*T2 zpn%Yf%k}pP4Ugp4`E}i(<{ugM_i8am8AEoyidCp%c#C7cbB>%O`%ZBRHG+Mxnn5&w z9hPTE-$o3g4>5Ecxwe3S8$H9X)NTqABzRu)9?m&ZWC$V8JW&zf7dCUBwZiFRsN_53 zR9$7&*+;n^+S4lC?d3Xh<0@=_R}-1|B)ZWP6A8{NHa|UV`W<6aT~n7Lf^ca!rBNyy z7XXQoSOav#4aBnzM}Nao*BPUN6|S8XD`Ju+vs<5RPJ?|ih#+A}AK%EVh0gp0b5*Qh z<#+Y;C+EX@Ry5AQY^K%Y$TOLr_hTuS%snT%yS*?7uP`ed2y*_1!+jS{VhwuWm(~>> zFf{yRmvjvlC29>+8dEXxSJ2}rY)PG$r03l-aUVTXaY`BR#lfi#>Dc zxEd$lPMnt~*(sLpSjXwHbSne~^{}ccX7p=RrXF53Dn+wq%(snpC{&KUDUh3vzQ7p! zElwU&ZSNe(KA>jLps8!CR_1cPX3gX+r7Of^>WlJ!EBZn=)9J5S4txSDErZ`NL!8RU zJ7nhpFEwG76o1jjzsAgl{ha=06i>@{@x9X5mC_rDNb@&d&k?$}=uh>ju-C$9afp1} z7-sl$oU4z8)Csr{E2X$i#sgC_%FNSKVc7TVE>gJ6_|7V~(>awMAf~zefYLuV(|tf% zpA;TAN7lmpvZ?c?DO6xRNmuX&|FoVL)*yb(114gDzj{Ob`#!$$j4^!bkKvs7gjN|Dq9MNtUs`hu^zK3a44%>$1S_z2{1vVa7Cep}!Wk6Lefc z2AW430g+hqQ(+^b6#~ay??|9o&*;>`V##bK{s^*I~cAW1IGawL4V zUe|dhRGD^j$j$dQ5<0Lr*W~A+PB}LZ+~Et`%V~m%tFX7H2AQ_^FF#&NC3*1fqyYWu z@`Aqp6~oZ7vAjIO{pgVZcw{FjJIFCE&6>%Td90PXwp8X8yi;fU-0Y7Sjw;kEo_#ZpMV_o^xzC4BPt9c!V} z^w@^JO>riuhPW7&8`bEmKR38`zg5dwlYoO?RoRsQ{;YKm8{c-aO}Y)J!KR?llK9wI zhG8_#XW72)WFEBUqwzU*MTjk5^c=W&^wO=UY1{_ z=))ZFaP+Ch);rZ^tI!4x``q03!cr50i55`>&jmY6y^lUnzx|jvc8HX5l*`Ps4e=^} z@v4OuHFvGSl!968%WAwtJPAMePKBS9yNhAKj|vE*ZBMYe5BhZU(_-tfhI4|IKuy-yM(WMoJah?v3wR5`_-t(=sd8~1KKvr*RBd+Ca!gl4 z%@O=rjOP)E&NB*Hk=oy5)U?z%_~TVD&MLtbSk9>cs^01B7!vRc_us8#>&8Mh6x&rC z@AhY*2}X9?$s4W!IwzoxbhsS_Pk=zp2f#GhSyBBcm*u8f=?S7)BJCFld<9bhFJC_) z?na?=#Aegq5Wn;TiatXkmDlOhx8${ZQ~XrNEuwtWUtvQVEysdG~=LabWnM;Y4)U& z|0<$3juI^m8$4>-fp)R9#Ij zQzG1S{6oG0PyYkA%*ZRW7n729)aAlHwo%EX7lf9Y3Y5ILuQJY?02P11Dc^c}e!svK z737iB!3~+tq`T`H3##Cqdb4|Wh5PDBf`#Qq_Y;eWPCjl9y&0O(aI>cOZ$`H(sU zGh?XDO9jsYoVG!u@Y(HBgbMeSzv>-rTUab)og^i2$(2xneINxlNu(5Q5d1;&6Vf|` zR%D7ebD~d+lFB#I^V;#O*AMP0@dSyN6~=iIk8>JZ31x_2e_~g?eo=teToO+Hs?~n% z>W6pWCs(z{6brh>%{7~q4XTVQ-^{^WOC$jfE3 zUZ`XYeW|Xq!;e@^98(%0M3iTI9BfZlP8{F(U}GlVq4v>fXi8JR=+sCtO>UNZf7c!- zuzfKLIt{2T3>14XxK~?c-YCx3U|t`|NJ(+8)&B>5hW1j2&3!w*o%6fXZ)u;zP?C1a ztp1Tcxpn(|>!Qp4u$mO<7XrmIf7K3UC8FWlpU-V=_ympT)WQ~p-YvlmGx5|xTiPBu z*o%r+f7s10Nuecv4^FIu)QkkUA#)e?d_m(%K-cn)bKLlZ8pLFt=yb&A3LhU(&9C2^ ztM8Z>EUH#IfLm=|Xla`z$?*GpnW;NnPn7TVwTV9i(;~{VFF${oy_zbNY1l(z5%!p~ z2(G;TlE)v!x9qGuxvaPt3J6JM5fKKUy}&z+>tXd6PZ23~gBy00DyNZLuKKfb4FRxH zfWGFDtkw<28V6qDUxa&&N065sFFPPGDRJw0!7q+3VU2tFO%Lhz+qncOPn;qs7`zM8 zoslo7CeNdvrOUR{PTo~c&AE@<$cV_J&s#5cf6E-veeHv?xo$j*{d3dz8#5!o z%>g_jWHrielUlO;PFkS!8=m7NYKOBPRufG@f zD$*Q+fUSOrIm@(Vi*# zAV}SjyR0g7yjXr?)wT5IeCH9R!-|vcX?6qh#3tP^+geC<$asZ+KKbLT7(6jNQ~IeN zXZ1BfXL~ZAtQl%jJd6jW=mguOK695i@B=$X6Ry((pf`c-aP0h%_*9Q**J5!MHR|NCli&%p%Cy%;`Txr2(j z(t3`h&m=JyUrrMw4tK%(N!GqH2S2Tya3hE#DDHd_!s>>QaGEU3btPvD_>l**3%m|A zu~FzrtX&3_Ggs{D%vXD!2`dt2uGEhZj4pO?D*Jol>gI1W@X0^lkSY&;tTx>M@IRG@KP zftYwLXq>@QxSqo@Cp_P9n;W?ussH17x43I3kYOX>G62An3{P z$VBfy*FDIUN5kJJbLfW4D$!ORx*Shz4D|5~pvkDiT$!dG^FMw2ppdN+<~qs~@XJcS z+FJRF2BoERklMA>E=5Z7EJ68DpW~-5yafJV^wPc;fiNx@#k>_(@+nU_v%!MWoIz1!uPhOVqiiu^4p z{P)Ui1^O8w$PwaX=3jNqMfEkVB8NW(*X4iSCTlBkw)%7<@9r7cck}l7hogd=^RM&o z3A)6GuU%eL?>-7L*&L_U0n!EzK+ z82^HrmGCeYPC|WeO;RD`YNJZb zHW0>myTK>61Id%H-0jGcu?n_`Dg<0`?AOi%8nU314;gvwuojcTTx4$*9XmU`?_>;| z$tqJ=sy)e@eeE(vQGJzCDB9;mmj8$O!?0gN!@s}6(!D&MUaT0ldL}>C@OW@1#&Z?P z8O{!LZGVGe;6K7~7_iD#jnK?Y&gq87x5}j*!?jvJWrZs43tR-;ljc*Du__9Z{S(8sM2{9q4}joE)yHY>$}JFTd{# z5;vw*UN#y#%FH;uV-dni=vi3lMex7N|Lj1JII!p(1vurmH3UYg;dm1zV$Z)x0Akxn znP4BT1g@4jNYWesPZ&@#J=?!RGS;BB5A8kImJlJkU0-P3b(p>lceitDc17N!l!30& zHNJC50yzk2ap>QVcNi{Dk{QHOB&4E+e3|eQ?F;k*4h0MmNB}c$%-!ar`Usz=8b5Mgxx0)7us#K^&lo#psFJoY{^;-DL z7--DSc@ff~{w8b%nKHkw)$u<3C7r1vhc-vPn?nRC0{BXBvSy~8<#@eu zok6WmNG!IxPn1Hfe3^4Fw{A0$6%WQih=o_YI`rBJsop!o?t+s`HqjOerN|p$jlmQj zLEjw#)Z7DAQTLtg%#L%vv)c=P-G-t8b!~RoM}Yn9Ge?$?lE_>v%3Q9`X&2L?*AXRY z4h4n>%o}-x>97B@o%W`+k#nQ%Xb{7EUCn#p3|z0@lqY}nmp9hapQZa}xjx_0F)O&b z(%^Mvh^*u9Ik}tCPfljlWQ~{0taq1a-^_j9gb{CZP7Y)NDd(4(_qo_)x4&}+y=ZNW z4VC+oo6hS^@pCjFy9ASR4>nYdjMPnlhHM#nP*~=gjIOMsQt7%U{X1GYK+Km>9?*PZ z2LU9#+7ZcIW*6GcDfdqkX362|BP+bT5MvS)B&bm5h;rWMEJ@f5kQT~*g7AgKMYC&* zJHEVW%r6`N_b<2l5d13nHCsqkxs?3KZ6*d=hO0M3{VtSaf*=YV&*ka{;R?-Rdj{KZ;-W676)VYh!+NDUy0%JCgjVyy7%YK=V&+ z+qp@tKy`VqFc_+u0G^>=qFT5q)v|aVy?NOeG+KEMC0T>M$!p*}7i*ctVF@zvU$=#9 zWhmxej1Sh3eTz6!zvk^n zI`q9eHAqA+-ssgYv)x&OtT=BFc$S>qvu1Gok)bZmS#?-{(SN*Wtt03?ceYobHr{77 zF9*GbG5L_j{SLMG7WTjJ8UeK=Hh-imqI;CSQQsQkCHqDM-o3}ocx1e`Z-jjvdvI~8T5483_u?Yt_C zy8JYc`fvp#s~8ckh_>M6HqJ&d=gm^u_m&g~*&8;8CD7MkK0FX*Fn0a9*_P>-4Z1XU zPB`4Qo;`1K?72zliie!_8aBh7I=uQ!TUD>m3kK{)xHhbp#}`_C{jS;s|8S0sXl-k; zmN_ku4=e&PNb4+GM}6OsJ^ERP-Ckihxp_ z!8hKaxses^J_=n%sMB4Y)@gH15;+h{oxi?rTLs4kv2zT=Dx;bI9tZEiO}_p6^myn!g|bws!4!w#&M$@pfp+#jZ= zADK05m-(GmaypwQdA|#tg{-dC*9_?^^11M-%>lCL8N!Y#?kI3{rDrxKE8ykbY{RYf z;@n3^hcpfL&p~1>vrryU!2U`x$@;^7>f$_ue`Gh8qep&>&v}zXH4<#(d&l$lEPLH< zj(8OFuiE*eXMyIY4TXJo$B~lqG28Gah_}f0jf>(6)iA0w`drTNDx#BJ&c?Pa-gRZ9 zxjqjLToz#yXRjQdf^(kI{6d?^0S)+cVT94-wZHCWUn19F$+UG>fS+6nX=Ks+Iu541Dkms%IuC4GGEPzT_)YkqZBH2k}+#X;nBi>r@Se zG8j}%oj3aZ{Es^57`U)wGss}Cc6<;q_o-By@iUXMLluy{zVH<0&Uq!S*4~54U zGt_c=WuR|v=g2Yz<+%R6>$CYE9su`B@`ZxSoxeH{HCDweRGvfjBS;|vX?FpCrH)E^ z>(50Sk*r24aWh}tMZ~Nkx@fR6WF{&qCBO_-yF$+EN6RQw*O{BAV{x?>J; zyJ+nz`GB!7Yc4)jw`nans-lS&8n1!fNTTgiaXk$Q34TWq;1$G3KFIEPC#vft^mGW8 zXIeJmCw{}aqLoHYd&ytSGMDn@r{h{NZW;2JWNZBcL^F`L9 zfBNx3tK~l~x8O%Mdmfk-r>x1U|DXV`NvfdsE)rJx5FILz@{qyS?~F3X_4}Wn(a{}; z-Ryev+Q6KK{zb3Tn$13V@GAxWL+HD!I0rMClxS%eFzSYzk7DR%crejbQS-hoY;Ar= zGufHn+s1IU#}EYE{kSz14jD9w*JF`1^t3uTA^J|SqF#B3`-KZ5Pw6()9)8lg7c`!W z_S5&ZQ&VLX7n@F&xGd*`b4DBEWK9f|-tKIgxySPuVN)1ro zd*gterhVXYCUwF?s8Bie-eW>GwW!&sF<4;rjia`3x!`8qcWHe9DgvZu(_x|I3|kVBN2 z3Whb?pda6_vITd}86AXvY%$;{SL{8fy{L_ajhsFK*5oKRUpzXIEESxCCF&sS%408x z%JYLf?d3TROmqUi_1ix0#&4u`Pi5wp3wqs{%31BP-baLMO%xr5yIzz(X(ON7NPjEz zNpog*LJC$%CVm$2@Y7rGteM@8H*)P8w?V+g%XQm7N@&6}LP&}PRx7CPnBTTXS!i(w z$Go)4?vz8%YLt^-(c6iJ&IL)y1?nE?uFZs}e)UxnJyyS+^6j-D>ue1_|GK%ogrSCE zqjSL&Rhg56t&U~m;MQ*Bs@UbDD;GOHCSHl70Yhl}{3{8WK?H*eS=sRn*+-jvdb+~! zeWFJD(N-+&#U1(Op5_s{q%Rm^O6@n~zAYee8(y>F;5>t3|2>U}xEo0w)H}J4u->DkA$c!y;AJoz znQ=?)!$^0qU zdYz|;u2;m$M~|d)uP}SSllQv8L^d#%fK95ealD6q330oUl&4l`<;j0*6MqGqhMa82 z#D7eU6@DC>HwhrMo(W=1c~sGZ0b}2$DCfb?Ka6F((9as>F>{SBb!;Mvh*I`8CFT!? zEpS?Fd#e@oz|D^o zaHT`4l8zuN5TTu4hZ(*-!%l>muR+X6HGR9T4)H&Co<}bGdIkN`s&1kZg^|C1AhNu+ z^m9A7{ex*18_t@@{(8oo{ST%Y6Hkn7wCr~JAH&ZbP$TPf@V2ENXsIEH)rj8XsntKD zXzeQVE0L4P;4mjo6xzwWI)6zT7-F>#uS1Kzx?4Bu+Z^lMfkPd3S)mQARh zg>_SGL6?C)|85buQ&5r1bfKbuO`R`PZ=jfcEl{!c5eGoo=xIs?Hf8+kxaY_pW-MQI z0325sZzz`C$!jRl*MD9I^o;-?a-X((XTIR(xZd$_Chx$v`ZuSb-Mc=leU2OYK1($L$fK^^vN@^0tTmrGfe%ayWPrTf!?~voA>O=#xKXkM?1em{p%7l(`;;F9W$sl zBqReiwJvB1uR!u6EFlKi%Ne)nt*~mqg{1KjQ3&VBmQ#-C<@s*&9^g;gx|X%r$wywq zIZ((vl+`98Ydm6=4QKBw5zDB+Wt4TaRbiIy7C+l>7cI40>&td&kjdv-Fggxm{m-;K zk7YN2^gj7*d2o!aNbmkjHY06_r79)wn@|iLwi2(D%TJHMdaDtOL8Y~EM10I6M-Gni zWMcFZSBLtAUCum*oi4w%^go@DD5*)WN7Q=`ir+C0lUu6U$(@Hf%mSw-zK`9daXh{U zp*4>;lrnfC0=zCEr03Y0w_dQ9AI85d;szAVgpV@@Q{^2rNee}W^=XU|)5{!n z0`2r@XVzcwYOIt%qJ}V@wK#cjDt-saoxn!2z5c7{S@~{%VY(aypJLH_jQVo+C8bt> zb?lh)Uc2dKl8)F+6al4bUB)sg?Sp+ICNAUPEG9|knJJe_D(yK^Q`@?i0A?h)Fiy?@ z`u^1GVpM&?_d~;Kj~U^~ci4seqa;v{qL6#(l{Pzb@mBs-zpNZsRmTR`yFqb^1ONEH z{o!#BFN2d_$#{=-==}9lU@nsX$y(&A3+3_QIhd~V$1uzH+;-fHiah+#+mo@A$46!k z4rz;O3wZzv(38!v6<9K(tHwNt{e(D+9m2@IK2ad9pqpmt0r}>I4 z-KhKFGZ3@=NIod!h-jV>6b~DvSoC;U`f1hJU0m-hhfNG-e0too>ej#|X zFL(nRSKo@qzH41PGHE4VGDV_~PM<>MJ5 zM1^1Odoi(ZwYNs0TQkRIs}|WAqy=2X{`2jDc*gkM$6_r_1&?@D}x=4YK%t-^9kkiVD9jg`pX}7cFQOBP(PHgx!6zVP_(YR z`}waP&OfQEZR39pYIjQ5JDeYa(ZF_O_DfxQM=DBiFP?F}6LG)L1kD*8;}t6TZr((f^c_tI@KDT@tj5~ZK^H?-(M8)Q~kvjhWG)fsi-5BTcI<7w)Zg#L->gueq<9ct#~0Vp*L}$pMeKbjnOmb)Va04LPTUrFDhA6FMab%2SH$PGw~eJ$UCMv=zW{p`Z7`3Q(U}iP z+kd{GJi7ZXk3+iid?bxAkXl$zrqsl@QH9xL!0zTN>a`;0x13=vy^|^mdWMbE3eWd! z*`1qn^JuGV*DUb9GL~84Nmu{=6g<4{1at>h`63z$6eAahRO5*Jz&r8@VRb({5i6v!`-; zBLT-h&(c|wZr$krru{|i9O;iymDuJEAZBl#4__?gyGYm$K=shR<n-*XXz2^FwS8GPUP5n-Qm z&B4rV>#}(@@YG|B8P|;|1`|sFfjE|ipFTz{3_dyVlHdnamTkZyEvUc=fx4~BG>5R zgvNC;2bsz`BJv%58EE>brQSweZ+M&nRx+Ep27yVlxS{Ix-9X})jA43H`C?h3XsiTB@R|&Y zU{#f{j*vE(FCp0woi8O+cTDjLUW4X-aRB}#Rj0SI`7|?ws{0v^s|0P#L961bKYTP- z-Aq3Q2*#a@2@D$#yOl}~Zfi6KwaNX(=>sd?O^4~D&!TD}%1_vNKtxa5bea(%sxT@R zyVQ;>s~r!gynssS zp&9AR;3no21xZyyxie}oaO?P&p+Fc z0~T?ybk#Bn$S$&Ol{{#RFIr*y1oAkVq2Q+s=3Ao{=gWOi#>&KkXr0&9xB_cUv zEi4Ps&t7wiB$Tzc&C@a0Il0nZwBfa4S_Un|&h)`dhJ)maQD>*%yPYZ5Hu4nPc)t~p zXQMDjq*9eQq-^U?k|CZLPj@7LfZ(MJVebvZAEYzVh+#~VB1tX#5501OOox2EF8Phw zrNY7J)Je6ECr82(Pu{E-aJc02XPz2eGktbjU+{BdgZ6iaR0*hqCWgADgxmUUAzH$0c~J7m6(YaaO4wF zn{Q23Y)4%3g0cJh1Y$fuBD#@($tHV&4odXCsnl<89`nZ=JlxXeBFC*TAiI%w7tI0a78Aj+FktI)35Sz~dH z(D3cw!?*R+4ot;cTvtDbZpPfBe)!>5m+BUOqW7!&Ri##&HjG(8lnJ}+TfX-LR&DsW z$w!YD>Fv$hnk8&1|L{ab({LYTa26mC02yVKn#?-qjm_1MucRX^2ajlS9LiPbTHjWe z!7mV%El=Z{$`I8ztuMX}hH)|Le<<#_HscQpKt8k7m-nlDt%GPe9Wft#e)=YSrTG@Q z{j%vLuEPJ!lZc1l_jB;Fkkfsm88~zH*vM?+zDW#FTft)rnxXLZ#VMsnYmIbISX)NvLpXn_`5a9bIj}JGpWYk_ zjP$uuDkJVsm67^S55$Fyp%#6!%52sR$*qA)#wHm;uHFW#=<HxiS|cz%LWx^vYYt;@1C*Di$JTFCPDGL2b>viC{he9NoC>?Fcf#DP1N#}@?~&kq z78|sdR-Imbdy&}6CiOBg+QcPGLZx#GZks`Z7r*s7wq3ijtsd@22rcVt;eL1n3L4^~ z@PLI8p<8S)Eb}PQewKV%8i4|`i8G`{CN)oVcqEqyS#W(2>7VaPEFNvarvhIghx;?c z62$UfFyM;7{Vl|#0&RcR@W9*>{48q;_kM|R;fYs1AdWB-1g)bPUSVc&-8a`iCl7!c zTQ8FW8dIS}f%8(|0+ZU#li<6H2*Jk}i*Jw%jT+CKs!^lcRfWONHVZQduAISU?-FZR z%HFF1#V<>%=Um0LPLNuDl*hip2fw%BEu63I%U_lJD9phVJQwx0V!61wU)8JiObYtC zdWBr0R2whv&dy(gv^X!FE$uOspz#EGU&`ef{l(XxR-yFk9ydmsvMEDd<2yY8;ZJk} zes_AGI-M48<37zGY*WD}Ixl0Fh-hN+zHrop(s=NGQF7P#?TeTeA#wnoyka}D${0O zQe5_UnDdBP3^g~4+DU!f{@_flDX}ngE#|~ELS`xZqX7OZ183cH*Jla$TG_yb_TZ?5a3jY^do0bejjdV0h;GD3C6 z`qZepP4m5<^AKl}M9}Fh^bgSu*JdoMGw#9s{5Ezh&v{%6=w_DpGf^{wUC(UWFNhnI ziI1m(rh0nfz$2JeT(87DQ;%2F61f}QALuI0aPx|ip^0>fsCZ)TV9ngN&DmU(Yzv`T+JfxnEQ#k}n8=`Z#a~BHbwV^;F72UX zAN2?6SCC_wDURgVj4$FK-kiU{u(5Ss@}Qn(@JkOYdVLk(2)tj-z5~jE<5M8w6b;)2zRIB2o0-uWz}j;B{TZmU zRK~9^Yq|5Xwh7U<{%=Y7EZOzq3!v;f_tfWl?A(=y*2vvP5K6=QXJLa_?$ zp~exe_hTilm9Q|FX$@>uVJzoX#BQCkUb`-(pI=4$=ab`izF&S^eRk46#}isy+A;>i zV6=%*^^E%(M0W}M5Z;FMt6g1K7(T|&y0$%la`6yRiYR1nL>#BYG>ydxW20*%^Z9S zuza?qKWlD@Ny2||kQ}E_2fM#y95#O}+4_T|pA&R&l1XXf?K;|_;8&zHr-l8!=3Ljb z+)z$zlYRe;!1f=wj;Xm}1=MqEkekkI1~u$UHBazcBKFTGTkx~0_@}EtfIZ@D#K*j; z`pyPgspXqzhFqQ15`QgLp=fhul;L^0>h!(>37Y~FAHEJCzdvQ(ZfEix|D7?i5V4Cg zFVa)%Noyx2m%uW(Ia8ie43}mfP<$rOIBcX7Z+}<$ou73(f~SSHSJ~k1)sCH3k7F3! zj3RdAjzd|tfqgCY)tD#&&+9AF6J{JbSVZ{*vD+J@r0CiVw-@h{w82XN{SWQWyEl>^ zY9QlKl8x{-fko#lr+KYNkNVX_(>G2pZsyNVSH5Iqboi+l|CCate5g2N8=nI83|2g<@{@b!Aj+ua4e)Hfex*GE-%xOf z+F5!x!jyzE>La7r@gy>w;r2bLFg+d!ao6lzh63hOtnylAd&;LL5Co4KTFwFCEhJ>+ zxARj$SW9E*qp;Yui0|$b7vshrEVr+ehr6figR>2TC@A2@&JPFJ8>R++_Y4fZi(S_gA%@EW;Rd^FKB43SDg>U+SxO1zIcG@t@P%;v;Jz`7Hbv z!}>bmvt66q9s9SfkPa=<-x`LCh9CG}Z(#HxVX8ncnJ(E79%OmD^tm!L?~o;KI7Z2KE|@IU>LUX|2` z7UzWC4@#(f&CxFcuTPjaeF?IiK*c3|Kdm5!G1_P9d$C}#|5f>XbrGxBUwy%&wHRep zytwdIJU}+@Nr9db^Wp_vi#$-Ck*YN#_5BjN$%6fzc#xp zq+`OoyBX>83=Y8uKPe+2$b=ssMs7?UgPx1NXqoCVO~jw@>v*jL+8jwG7W%7893ZUh zSH}}t6sUkyYMjq&8Das6+;4tqljXg%HITvx{SumKO*J2~m7JC#9Pt>4qDRnzMd zI~#I#O?rAgmOPlg2=?1J?0r7r!t{9W7|f@eOX=AgCV%~6U~D`#lSzCom=3#~7B3qIc*BbJUy3(sX%Pu~vavN77q z%91IQi{(FDn3AP?F0MG5H=~;q@LCajWcu@i$LC_8?XZLi?~VpDKuGB1S4;TM@`}5w zRlzXdUWC}Qx9gPhU7)=Erl+F4NePGB5jto-Be(P;;tfaAKFi&S%WBXqPX=~C*=N`g zCIT@Wjh|f}JuZEHACDni-&VDaqYJ!ETe&(&2E%#Pr49TePo=>gkCd2Wr&UbYeTUzk zf0YdPu(xf-i^r_A&{!xXJp|?ZogA96ax(WzX)8OA2Dy|UKEcz!V%vKrc|eB~qgEiT zC|;Er2kUF&F#jE%Eitj>)Vb}uRguF`6T~R6i?VrE+LQL`;#I|$yZ;jQEfe#dgNZG8 z97y)+hVJ0?nwU1`MWBBk3gdwIM}oVx2gEKOU)1KKA*qVDj^rQF1f|6ni9DKf%0TY1 zJxj&!<80{FE`Tq9;EqgHLQDTK)MZGE@?(^(h*c+|603Au?MK8%q15G=oOZQDP_*l|{s^0jy$F6Xgo|_)4^((71A8oRE z@hLdw8c=R`d~7+BOP(sNzC)|YQ4fvJdpHvIMGzw5DgeLDxWCYdEXsb`tQuEyp@Xth zsFfcB2YoJwdcrl|;X(MuHvbJImP*U7+1^o$lLp7(0Klg z>6W=aoMpj=7cd#7NFVm*W}-FN>X)}lV`)c%&{*?RjR(KWRq5?^fvk{7dAV5)3a0S9N;K)6I;H*BK$5$Z2=c!Tg9PUiUjoJ9g~Vek?PClR)e; zx+|poS8(`KpEyCdv3%;oyRv;BFtARtQ8fx6FBrCU%&M-uRs)J}y7R^woo4p#*m;hU z$3lGiV9bwPZ_9PZyPIlH%#*by>HK1B5Dx<$jHQjHnDFgi-lNV)Qm1b_Eltg1rw>tk z?5c`Smq(3T_klCLvK^rgOqbe!IlQ*c@*7c!GJ1t^sE8Bxq~FaJuzMG7&bf&EXEkxj z+UljdRQ)6I5t+2V1WW_9YyFaJ@4BxXo&QkJD&8L&#PlE9w5yjp0h^d*17_=@s8Eh_ z6e;%`5q~5I#aq5oL<=k3aVPQC#Sus1+q#^rqEj-AYmdP8Yy3CVXM>fG`3SS0=9ni> zllk9vU46>gL}eDewP*A`ngxPakm$cYwMFyjbK4Pa2-?Vbcc6M!^asNyM@_*=W1}xE zI|^>vBf=JyxL4yAZ=%|1C|p;K9OzW3$MzrPAUF%f^;0@e1HsgmTZTo572EW(J3=3RS$EN7|QR%K%QHy7d zEk5K@kUf?AS>w@OD5OlK+teB+a|4pPzO?vr-qi=#a&XO0Xw|ROQVGT25=;s+j`&@C zTK|DrZ^K{9l|F1fp6|4pFE961% zoGn$`qpHP;b!_cU(tJfD)+JpJ=KH{JP|BkM-?_Y8roiIK8ourqg_Ssem39fLhR&GIX;U9LSYzd-2_LZaMlzmY z5W|~6+NbR5yDFGOu?s4&Yp?-2f0ELu(?EYfjtT;Yn_T< z(;clS<>jgwWi@)TW5Czcd%uWA6l+~S)$$N5+O2tB>I^tMHzl8=9a9h)7!w1p8P22ycQ!v=@MOOFxe`Z<^ggK6EBWx) zUc-e(jAZ2y(h`kp2p3z(aK!#N9(az>5#V}?c%q0oAxp9-#VLisM*BWuJ zEk}!Pr8c}M+}ma*o^AqxAJaWg!xyi0&|0dzxy|-@0jUi&0{Zrzj?7j(t*kAK)ETk~ z3+K9)Za5A>vH*|^jinoQy4Iq{}Z#!I{?wKzWq{ZdICU2(XlkEI{9`veWbxL8lx>8ug$JumH`EpFsP zFxOdcjI>wTZ!BDuN)&#Yi`AxN^{)^X{Hw^%I{N!4fbaQt&VrMW2zoC622@Tj>{D)4 z=J`*<4qR85HshFTL6CMX)wqvlc{-7V?17IzPM(y7E3L_+Kjw*KY^TfZvd zp~e^34JxZEU`0`S?XZ*==q`p(w0l`49pYevs*W@S&J7YZ2jP#D$Y;#}4*AAMeARSP z&1cmOcc1p4wQjv@-85S8cf*K2;*B+9uY1a=eiQ5CbOX~}YO^)N62Ydjh?KG}I_FqB zr#qL1OWNlL+JcMkl*cM7NpR!BJumNm9{(-3tlb1O;1^k1KQ8$QTZAqi-8yrpX(K*N?umq~bj2??lIM zZKyUTvvR*_5B`5>zvIQOVf81Xa6$2G%dLymPSG)zTOv*0|G@r7;1KN5jw^|YU=cy= z#EUlUJ6Cph?8`l$8v?eZM&y=ItB3Sc!gqdF*F_^oOVx+HvazWV#akPMs`vgDVY6tX zse!`@ME!1+K!gk8Mm5MJy1D}&{j737^#zmkLas;$^cWbI4|ltP7(RU~T|?y<+YIK^ z&!NdFj>?rVVR#vj>BZFNW$<^0#qjs=^<&0xRPOU1s4G<9K zSXRR4nv$cn>(a!Rt5cR#y~W^5@mUUMx05ADDeWQM(XYq;ain3DJ$Ar!^{Io1E%T=Q zT_MmP7VqY<@ZgHsBU?56euwTO{UJ70DiM>JpHi5H*_fPeJW>KsHN`uBYDWV2e_j*q zTj8*S>oVDCvVKA-?Z31}MT9RsVy|7qxZhfM=Y^5@Hps)Scb)AWe0wN*9s~7tqeB^x zGP|oQ!uKotb?4$GECwlYF!2n3A9$D^$@?_wz5oAk{qea29kOyJ&Q#;Oz-Jp~jrZSb z*kT+HD*}``u=|$?GO6K~^o6M5`rg)+P7{4gI-14}_6OC?CET{@?56GT!k3!%2=%oW zpJavoB8_@K=*_=2%3R6p16`~|fCD~lw0yq)llKr&;na$sL06#9_1*k@*FaXe0(~wr zQM|*s45yiG+vRJb$aC`2>9_P5Yjy7>oqe^hvCV$-`uK10r3}D7Za9gTN8Y;l4kit9 znIUeV{Gb!NfyOS7$NL{$1iO zyfN%kWHGGx-J$qnHx)N3yZd(2w!mR%xZV8qr_5!amK97WuANtTF0xteBoOh z5xekWkPcsd|0*GMO!u7wr%F-_KvU*b));j@x?+1Z*)gwXj*5R=*$TL8itH+z3Pi(X zznmKLX3ajRun(p|8~a2tvQFb2-Nz5#vsl+7t7v+v`@Qq#wsT3GLBb>7mO=oy0^dGn z=6=}?zR!RP?pDB=gtLVW^G9BeHNISx{(230ugS8@YZ3yR?y5ff{sqvK{2_>)2WbWU zO)IcL)4x|>{f_T~x_@D8)@J2A7*72Xk3yQ?q3T0O=An&e$kB(c8Se2LuArRRFMiWr zy0`2Q^n3#e`Tbn`?W7+nL-22Pa!A91{@N8(_?Nzhsy$8m_7+9#(_nJ5`QiyYhS@IE z;X{G=6p}ryd~g3r>{T_e0?J%k^{Fcx0&Y7Qf5x=ZAGvs;5W>Qtu_=Av!S0CfiqVq@ z+N~CLn;Tu1jjivz08Y40c5teB5@leT&PI~0mwmDsY+9)&;9@5fv{390ua>AYuiZ97 zu}h{JYBL?#u@Ghl6#W|;oB63qrgVjy3-f*PK^&aCKX z`{CiBStgw!rVn6gabDbBqnyiI-?1L1EvvlVxu*jE^fFr{`ljyOdD6H5nxBxj_hy%| z(;DiNmKK>Of(xMHQPVX+Ox-DF{y43gIXVw&$)|de7!I|izO+dfzkeOS)C!IK^|g1N zL!yV@E8HVYl3cIb@%O2${WV|m7*#x1$oB}1cj$_XIzlD3qEa?0v(vaZe1hl!t8QYj>sq#^Z{t0)B$aaozK;zS*joU-;{9{L^8WESCGJ)$Hq_o78d&TuGH) zb5LVrzp(o*r9yEdb1}1vZcg>i*Qo>$y=kZx356HwC&MC?j#F-%%#M8+9dh9D3=9j z)n=@x9}G*nx+PLCdDO1)ZadidcVrLs+u7ir{uXqU3rXj+U{U!*MXC51y!tM&lzpz^ z?o}#w;DNM@wKQ|!`k=eW_3@x@8NvMLSgg!ilos&qLgkHVGa3ID>KX^IirapBlKCE1 zr@90O6|7-&rHCIh=7>TD20hVL+KsSZ{gzMl^6(lFuj)o z3LQx^()))p&+^*{9ZIbPm9jb?!@<~=ZB>(6Kp9ukbSdT8U-p6M)BgKuN+NhXQ(FhM-UnwEDWrI@^$YhYs2Y$TbYPvCY~P(Cw5N+9&g^3ipInmS=9W~+|i!XT|lx6 zWOLm+4&OLCF}q_#fV_0fMc@f&%HjF^%Dc@zGI)(1Et7|d0rR62Jw})QTLc7t z+P=>WN1RAEu)YYZ&7NJj8g6CK;<3_Bveat$t(LPJ{yN>gFw^WgVM1k4VNRIm$@Qsh z++4`}4>1s1`|}0xi3!GiM$iqfQpjBwroP;>>(awozD9Rq8ms3G7n{#lCcn5%Gxl26 zx3BL)u~V%Yklvta*}~eTX2!h)cM0?v(B0|&Xwa6sIsG*`gn~J+u%Iu(8Z)uFbiN)e zxhbTA^u3q&m(PPIW)Fh?Df`RvZ3ap2K<|7=fo3l2hhZ1#Bl$KA- zQw3j{b)JWDx?27E=^T8D?CHvtnVHTx@dVzt5n|TZnNVig%8yv%)tNV$g2G1i7x9d6(KPw*+!s++T(=vR z>sxQlx%Ot7BU=}ywK<6`#h?D@Q}K8fm&Z^n*PylO|9Wrni4ra19Xu+#>`=O#nJT={ z4}&mFM<<$W3cSGhD=4|Pe~^0v&dJaC|GYCo69T%~i4#E$b$NYizJ_L*{BJ+4-wb|_ z`r>D0_?G1Zozx8!{YzjrKA`NC1iG2H5(PcIH#=JNKLP$qDovP<87>uyJ;>`!+*J%& z_q^Y9q!-b|{KJ4(E_a>{Yw`Aq*i?pTpUsDza<3jUyx1h=`V#2#`l)ki==bxCsG9&s z>B`>9 zj<2-c2E%MPGes~r(X(nV??i#?bCHK(ys?z$FTWEuC3SC;AKu(H8&TB$t%~-@(;<~C zbk4xc-Fu=+xgZ6^dVFLMbQ0sqDJZiJw1Rydz~8aE0jSC zjva?qeD+?xW8*7Ts(sv*V-`%Jn?T>@($-c!7IX7~bK{Ss+%R*2gsau>a?0c%95eps zr+o22m9OPx<&gI1K?{s1%rSTu+CdN;Se~6zFUIx$- z!6_ajT2t)k)?Hq$Lo*jm0=)x62+(lHRb>D6(yxk9HR7^UsenMSUgKvG z%J?JRSI@l@bLIAHz&J&AZ4)^HekT*jOd$BVUmjtOC_6}O78e>VFEbOd93Izk%rSVb zfE%xdp*r^ZYFb5WAM|{Nu8HQcUC=eNNaS6UE}(ja3ivl9%JSGQ&7D4r+Ze7JgxdV% z+V$~h0($YP6qVq>y@y0ELa|cj*;ZMkVW3W*{uToGYMZE?xy$zA_x?+N7L{vX)0gHy zug&rf4gEi6u|Wo}tDGo?p~_D*&n=9`^uG@{mOl>o)B7P2fCR(th&2cNoqwJ64T?wY z2xXM2C?`vO>b)K28G|bXxiF(wHR7T zHd}rcGTry89is)XG#mYSn>iB_qUhmiqUg^^QhEPb{&SCT*^R=eTQ=_aItRnt@v4JSUsDDe72Xc0^_#J(B2_v=p9Lj;v~@Qi9d^r3Y~ zm@*`kJsC-O)zq52&X9ZQn8Pt&nj`N~7+2gAzZ)!43V5XL^EuU`78b za;>AxEvzd=ouW-wh%Syq7fVoP3nI)Xa})37=se{x9Iw~hew57%5s+54;*@1Ylze?m zT4#Fj%yGMTJ-y3KLoVur7jDtlaMDL|7We9N58>%8-%3Y=gV2ta;U9EM>F3?iSWW74 zwEgn6IcGFrvz3{Zu>PNd0AxwDIDi;=6#VR;$lQUO$ z^znW*<{ACf_wc;n>frzekI%-8OtR9tQxj!}BIcYeM4Ug+%4>f^Ak&|$4Zf?X5SDef z<6?`(QURE>K;&8_Bw63m10gp;XFT*Wj;amoQ$_I^KOfRqeA!Wm7BgE4ecf` zrq#ESSRn@9Bxva(d&BK)wFD0}37p9S$0=bue)LHl?D0&x#8{lqesKo6rUsu+8b2db zXo@2Xy8#KleaQDJDuE(IL*Bldiki+I?j2_4c9Q|DI%SMRTM5)F+)0dhy=t?HzZHxgYZJ9_GMrY$l0QC;GiAC zYHg*K^ll9in8EgSd8+twN7v)J2N5RIAGID>WZ%Ou0qXxyI}?_AirLRdf%_ z4K!$F(6xRDaCW0t@$p3oUjXih+a1dGzyxtC7p%BK(yMstzhB+wFL|C1BggFRt z=iEs+04mQlgn4lFX-fLQL9rN{-#*AM@pAt1M)tV}HC|F?ZqCF6jgp8TtE$gNcA>aU z*W8~_K|lLW=d!>o)~1D?n&IRd)|Hq9vTBsklXG|4;?PxXvbmMsXk;*mOahY*D1Bqx zle(qPUW%xj zr>mcG)^A#`QHvfJWr!8@VvH=LST3%eoZV}~Ol+0#B3hxQ48ni2MT8jQdG~-q z`{G%*__8=!P3`DV%jexwhlwhw*Sep1RH@wGo%^Dn5SwoJqjcSQFn<{hEvDcHG-sks zeD70R!Inp1dt2vUfM+;ju{~zS>XwN3W{!+Ytye)uYB7Dx%K!MITy3>Capr2k5a5cG zRn(<}Ds;m21|q0uJs7+XNj)H*_uHYT<}K#6UlsJ-TbMgM=LBz#`QK!K7iZg$O=jo6 zPTLN4QKg)h`KHCuPAM^!DY@h>&awxx`WQZc^I`l5~J{`Ur1*okzFiaNG`S`OWY5vb#c`0eWcm{ z0PS#f;q<;~L3M|fX)Noswv9oRi#7E^GEQm7X&ypbhDZ5R%9eVR(+iog<6Edc+ggVa zjTrB1rQAa1;*Ff!6rc*Tj(1C(ed?a&#ub#iXwKI6^`Z& zG`FKc51m?pJIl~p`nFzXV1)S=?F}3=-^)=SC+t4#)#NoY-DB%d2s4pvv;O(Y$8Wri zIUcVsyCc*WF!S+B-=C5y{g|xP7c#baK-fL@fgNK7jz0V05>eSSQhyU!b`b_yEdF2H zmpwO#pR!#hi;A7QOoFh=bEmHj%?8VW1h|mHNv|VuPTzda<$(jiCJ|@Mv{esM%!b^( zj(NT~!HHdyJg9(-Y5V|{sPb{TvgJG>PPtm6q8<^SoI3@AutDCiKQ9wwd3GPQrSLD@ z+&=vAKbhY&j34i|jFJqTzER=E&^N*Quh_^OU+bWB26Y*5a)0d2LN_#pB(b(rglo0= zAuT!A6`$w-FUDTFXk_&Zs650EFl%!;tL=|{>UXAfoLnQ*+y^QBR*@jS$sMHHw^@=#|IaP102d^086UT%#AnsL7W^;Q=94$O zsh_H1*U5gB%AjqY4R-;M2@N9cj`r$Odp31j7m-%Brfio8{r;osQ9xgmc^U7*fU@eH zc+TJC1Rw7;^RtoPZ?Dh&SW6=nulHB6tXPc4DLtheL-x&d@bvM&NAsR4N3s#T(UR}l}xFSw=dY^kF~ zw>vN}3%|}RwKk%vm7z&7ihA)0MG)9<8jFVJTS1FJH{IB9;>1ULFJEvSMs#N5D$x5( zW5myXz|es7`IG0S3nrK7dqwpBl4=n;EiNVCl zZKWMd4cGp@NT`H&L!+=+gc_Ow9}Vjd8tm6{=*@Yzzfof-s}>Ikwql%`t--NZ;6~Jd zPPE_C+Dn&r?z)0y_QHJxVnTw8n8S@v^l6xQr*l-IAq5-E6f$XoEllgzlOCN5>(~u8 zWh(@E)^`KZWsvLRd5^-ll&yq1@Kd_&22%&mm&~FQH|bdGjW1a_Kyxe1gu%mtH9Mu_ zG2ky!0<*+Vz&{6-UF1B!1id*2;ia#p^MD^oT}DZUgGc=lC-+0p^^(HD6rVv%m;OUS zd^lWcnzyXp?3(kEldQ!hC1&(oas08*&Rh4%bej&;XTj5^lQFIfLmE+mLYs}d7Myn@ zxEZr!i=T9z6E-;!^?LX9e3fou;A0yfanE!XJTuVtKywyjBhDzrGCM3FY&`wFwR5`; zi{Mfv<>h|V1CPccrTDk}DLlZg7Pyei;Lq=xb&q*E<P2~2xG#?Nc`3HYBc$kr z5NK2cYlUqb9roHfkbGQubNch~MK%OQ?SpJ;g0v}Qo_DMGx6FZi@S2%=gFX8q-Tlba z{9@|H^Kt(`Y{l8DS5ebiwIweCC#XS)dhn$11`BEt2;|%RGsoLhT!LXUDfz7Bw8zrT zx=|#nbhr<|s!7d}zd?qWo<%pF0D3;9CM*fJ&sf6LRgUNve57KGEDFwOXu<;3G$Cg^ z>nZ;0(r$I};=x_ak|{U|w{HQtts@<^HFv&4J73#nEO?>M&=sn#F7)GM(Umjsf;;1a z8-o|33)6Yv+(r+lt5`lu#0qcyQg?;@UD6)&WqoQ3sxM>p(t87NB+?jgA5XYhlXxrTGb?^ z+;W59eQ*RccLe*UZs&BVLP`Uj~D611INLYRuo_7gn}2YMfF4O`^Ozit&>yJ z>1=~ey-~MoZ5~srw`!nu%W0b$FwbAJh72S_)sz7?*_|P`ANmv4ZLO*a<`s>Ko zy(RO~ayhs1URKSH8On5&wbRjeRWV~};2*3whr4sBVB|nx&WLT512?%W$F+W>Ki0Hy zh;IP_@#7Q=3ntd8&JMTH^{R9{+12Tl&`ZWZ+bE<+}UDpgh41XZVha z;2Y=uUkf8gDVhYA33&=53b@48Kax7~@5hD|GEazK{l(fU2yo*T%*##1nf7{T%K2WT z-`0DCVW84vR>ibsfW?J0qC)@1`)w7XsKRLD!z7Z9n}2)t3bf8|`u`7Ue`b1Y@gJ=o z1o=_G9-?t3PlGh;#3`EwN-u({6cKe}sCK~{#g%id8vww{4D_k;9pnA z5Tpe%k(+c?PxSk3+}(py9110xjCVJA4C!NV(%YDCxxRn)!8ZlUb|kZYl>A4g?m^mD$HI_IZ&K|f=qpO{u!yr>#a4MDX;(*`Q{F>eS_@oIBUy&NAL{`=3Hl#$R}N$ z%%Zi@g6#A0&UjInT+hHrx4%r?N>WetFL~wn2~h$ZYDLTetPMOIf~|AH;`Ku7J%--D zvOJv-yrY%r>1c7AAjj4Tx3cp@;eRRMkHy}I_Qmao6mMujRr)-|z!=BFZcEEL7J3#K z&;<|!o2!90g+TOJ#R9c^hiE)-a05m!n<34>0jLHh?(xGJXuLiWdJRS0HiO_u;D&cz zG{G4q54t_!4eVQu=9Yq^AaKUPMJyrjJ_fNAerey1@UR5G?3-B&=>-6WU1AwH6RzoU zt83!_2K8>TrSE7e4ol1*r=5k*SFbGPeMfqP?J%yTGkQnAEt5Y=(g5jD`yy#TCY15PAA{DW&=U=)YN`-A zE5ba>faT2RM5DQwttir7s*BsJin|_Q<@8$ME${9lUR}zBA3+N=e@d+)!d`EG+Mz|f z>Gu+>ZTfIvN&0Re2wy0UuiZ{)>AYPj-D#S@C5AmQ@SdF#)zNaLH7&g&@k3=AS{xJu zl$vx0BsOP>r%Wt}Cw6>xGvPh`rW*&%Alh%Up{|~ zF2RWI7Z_?r@&lD=u1M#aQ)OY87K%8MG^n(Y$iDnunw~Sr0X*-Fss&H3`pDMvMRb@$ z@k5Eiq8y}$xHq~ZUghQ7J`H%KEZCXQ*kcf1|31C~Lu0b*&*8SThsA=4UP-N`M&#E2_w4tU znSmXs%DHRZF~f!D!0o3w=$N>F_W78-%-z(d^o*BA+rHqhYwLlKt7(R&KphKLuPm;s z0yoY2uUp^c#!IQ@ufuQ%KkXr(KKFnVJ7@PGN=;p1Qgk9 zcV^tQ=IVHit;jBUVW4N+NJ}}vlR6{dIjs&`h;Z04RI(oY)Unfqvs-vL-9)I66SWDk zTk2SU4xV|HO~+^@OJUtwtQ)$MHx|u3$|rpuIbOP>5)|^iG*Z{huZ&IG#!r7U7c!tju_0jXOxD?btGx{jL1=$E$ z`^b7fxXsyy5Ik)=1S7N^dZ^T-eAC4a?bZ&U*(b0CwcUVR`qtdKjWwBb(Rk;(-`?Pj zn?RY}LZq|C;PgrI^EGdN@=APniK1|uS8OhoL}VT;bX<&Bl7A^-CjZX`(0)*3eN!UJ zi*vpe2fSU|zwj(G>NRK&7)F{Y2{{3wUrX21-x(Zk2BJ&IrN$;G#j>@4t?>>uo4Rww zQSAtyO0y;8z2p$}R?Kup?4LfUM|V7+i#<@?mF(pX^!}S&GsxVHno_37E&bA3842k~ z3f1$p*+D^^Sw@wn2gvorIJv#ddp@x0^Cq(UD^<$id=uiioByV5O4DO6%Bw}%Z@726 zY3W;xk6Ui|J25jSt+bIrqcqjmBgmXm#!+^G7R0cJFjx1wruM?gsm*UKdbcV?>h+c< z*t)l+;BP+7+GyXAeiqn|4~rpq3To&3w!l{|vNdKgvC8?`PnY#e!KWc4&voK*_EqYi@)xjw)$bBax}hB19+b^8ruseMeD_!3mML*w3N*!HzLnUV z>EraJ!Vy}g7`pH?(H{wF9{&iKekav+zo5F$gYLq~8u0!TJG$gN3Uiixbf zcMtHfHenbl0fIUNKpx1AkF1cb8wCVs0`%7Ak~1Om&7&)Y0(GhK3L2iBTW2|`> zW3Zkh$3u$*RT-U${E`HH__~okWIFo~?=f#U_^)!e06AMu!rMU8D^AhUuOSv`2kkWu zBb?bc-V8~3zMu5getU9!)cfhK>He~$k|fxtr7b#Eu-X6R4P)+_Orj;ma(Pl@Tqe`V}>l=if> zOq)f~xGrGj#_yC`D`@}k>1EY#Eu74FQEXd3_^zPn);9`AqcKHpX~rPn z@AKOybJlqN2|ERP zf0_ItEx}DXx?m_}JbUZL*VB`j=vUh0xpEtEwE#X&HnFfOP9yp-`iP^u(_iQ$f;#_e zM^t6k5k*?FDb7L5xA$z^F~ounzXLgn-OrYteJ|CZpePp$j`CmdWmpa>nkUT^ zNL;GKYSbz!{w4FT)s;ohymG6!=S=cXo20Y+*7sS&J{W5UV!T>)VLwuhb7i)<-S@0o ziYBYl~YwbC^#_$>-&@13W-9y(l^`#mN~YyIz?uI||9mokG9 zCtJD|pOIRZoKmF-(GOW(%nYys)vztxQh1~|qq_Lb{h&9a^jf`MzH|0$Xzm(})AsF5 zeYrb4>22*PdY`4m@(}VSa<3|6;MZvi<+pE}cCX5_lxOB<%dc&-WSpd9g1C9b_D1Ov^2KdQHw`Zr0R&OCUP*mdBsqg)xHs6MA zG09zR>_Wg@sHMVa&vEn)RGZ?BQm5``Tl&#?;{Y&OkKbb{lVqyZJF+K zg61zz{zbMA1?_d>JOO;cQBsX;{JH_Vqc<(R5AKz^BvvWE`roBS%*Gy6-skqE-Qw6Y zL>2C%6qrS?koP*4##(Qhr}rJVmeXb{P|61s4DIK@^Rd3eLset%m; z@1iAYL=cgvQG*c@5ha^W@I_tER4ORjk5+ZC#^S-R4`mm6|#)F$E$JMYSurK&@Ju`-6zDM^`!lsDoP zbz8@McSUaD`~au**s8Jt_UhZ=rTeh;)_e4$JkFVAG^(H`Soca)m*{xxp4MN&0Oqf> zT_vA3Ik$LxkrT6yBg&K?X@%#8`nyQq?I&1DhcSJHyIe-&y$u1cRJ`}}_9DqDYO;`q zQwZ4I?erCxwm(<(hK}1qNl3k{K+rvpgZ<7vbNkAkAd!aHYKmM`I3xevYnR75&u|uk;w2vLMHALoPp`_W5=_3o+fNZDcP=sa;E&KhJ7H ze)>anmv{8dlheItpCz`<7Ux^Z8m+BZNxEdZtNlb-!R1)SkOXDW;w+_i9ytq&^c0qn z26*XytT=wu?>Oq8jQmSX<$TinL0ty^Xfb42{fV&l1gA3!-umJ!HR{V+#ykKYj!E3mlY!<^;e_%3i@Iay?bhmK(37n83%+{1}|% zDy8r}hm%w`{5e3)Ze5B=iC-U^!IT;kQ5{z{i+H|t`BT{^g^10yjkLE465?v{H7D0F z;trE@xlZQthr&O39WJ@nZj8yYldG(g)ejbTmn`pYst7JjVXYgtevhgm)wXtq-- z+V<9RDeE>B-utTw#Cpcjn*rN74$4K*E1VH;zQ#{ugvX*z+m}yy^@Tb~YME32QCr{4 zc%Jl1cpZnufm%c&R!*l)Y5O09Gi@G6zuY0do`Q=A<4!e~7;tLc(C#!i{|PqbFU+@e zDrp0qZt2;UgW~`L;+TyW;|dBpHK}PXBm94yE&YD;a;k4ZeRKMG{&wwt`zVQPLXO)%()#doZ z+XeHbDJOAGZK*|;+UJS*^YV(b9I+qY1lD?crCAx*bAvbw^9-_(11g(}p7$u93CEGN zn&u}|6``{1Px6e*$vxR&Om{yeqbTpem-zs!(uAGTt5Q6T5Xam1Yx3j2#fj`Ioc4zN zL`Nmh(88lXnmY!8qI$eAv~*@h)_($M{QY2b@YSZ}k*p7O!Fp?p`$6}EGErFczYW!a&2#cjORWJ-EF3SVmL!ydUm| zU22A0*#&pW9jaa*1|mHHipvt0^%ABUk_)^qz`&ax+pna}%AMSm-CxP>%eK#s!C`JJ z&@-!JHtquNs!m(M2C@Sm#caV?O0Sj=pO5`8v>T7HA|m{J3C@PSwlf7PT?o%blD&7f zJ^dIvO>GLM%^Bj2F(R`#efy~xz6@km0M(<0>(e`EnGQ7S;@2s|=^n`bE$Q){9zFg; zQ0*F>1CIWVw0HJ;{4u@`737`#YeVL;!1}4)t|PNX)EtI=_vM52LG&5jjosC^X9k6=?5>4LL?S`{%e}oA z`^sL^KB~S`-4?59r^_#sQQi!0^Me+))+HgwxtnK)Ct-0k+ClHDAD@91nh*R;SXN$L z$4kxTXkV(AtG_#PXWFf0xQ$$u`Ciq-@I5X2Sp15o4bQfFT3)>ZmiR@rV+o7g4K{@8 zUr23Bt;0u;_fh{KoU3r^KeQLGvx^PS5Ai`N4_jUXy^ip+JI|BY@(}y-IdH|WTWo9w zwBDg*V!_3l-cS$k1YbI8)*A5uGq<3CKkq{dbK43_J0rTmnCp%(-a z=2N1j-^tB$;KUr>;_L^vhhHa*lLmIdp3D&&hmp0P%Q2Q_e%#=_+5&mOPJe8(?t||5 zkKl~pI@hYVAt8m(NCKI8h#KZy3tSCOx)M=prDFcvw{_yr%Sv)*n6@Rc-UMN=oaVYB zhqxqce$-ozZmbSGg$Y^pUvx|&i{)q52}vWcVfZ*gW3|I0jF@_`_RW07BrG!cok=lN)jS7-59_@2Qlvq2!-JzY)sK>w~z8q|?BKuNx8536s`90gB*7dUD%Mm_Dq7_z>eW`tvBDZv*865ce@~R z$kUjHyR*k$*W9!6#grMf+xL6hbK^2~s?mnEQye&3wucdKm_}99bD_>ZyhRJ>V7r9IAMX0sL$pwJuiqmblc%6Td@W}e{>D!6~^$2It@->F( zFJ{um)xcH+Yowo^*xZty9!43{B|Npt^S+6`|B9$AJ>ql0ObC=O4UnV@?q$d%wk{of zZpP3C!}!c;n&8SYlYKHX`JsgYH$HRT-swy{znlu?3H93i6OEi3){?K)2lp)evlI@Q zcE%-7zT1k+DWd@K&t;J>yf^e){Di>44smPHZX;-9KF@-7evRZH*17oRj3c4zqEJM> z1gyF#8Jc4j`{8OZAFAe3w8F8iGNVZIH3ESnzwfg2zdW4h+5eDj z{CxKd;pFo#_ox4-DEXxKpV%B`%EJJ-Dm8M~0aKvb2yf!tI<-z!e*1B?6w?1;+vA0b z#B0tEA{LxSgh?H2PJYIVL#}-VZq|h^WtkCSEiHOgKJr+;zQ1?#M8x3o%bPU31lQ-= zufxa?>Th6~J?2DlO!1WL9>+kei3eyY6 zt;JuSl>b!dM;UKQts!zo)L8Z}GS9#y_yS;d#}mGJ4ck1bQ)d~x0_Qlt1b=;K|LUT^ zga7%}D;-xE`OA9+>eyykyfwHWUVjH^AYMj>r(abP7YWF$|Tl1>|f%P5r`$vdRwtgEn%RH ziL|zPQDt)??(lN6%nnXm=cyW~>Iv*3GcJ9f#~d~rg6@NG#aDX9Hv23bd4dtLy{$$T`ZpdM#PlXm87| zqp-LMohJj6dFM zDv%*s+!QZ{qdqN%_JLnwl}K+6*(~_*B0WM_;VZNg!gl26E?IC;gY-YcalhFM-|%3l z8R30##)-}+@ZQ-T$yu@XprPGktx*ggWuq@N0e?cr>vSn%w5mSR^9q8^&!*2mM02fN zV`Pbcago_QtEzqQeT~BWBa&OAw;EX8oOk@cNZac0u)$ngh=(1-t(5+))M-0XyWCHt z9$5sfKoZ`*JTMQFC(TypB-ogdk&x_|Sc(xVs>7Q4`^7B3H?bX!Olh=OY zBelI`oU9`Q%_fT}HddMEXfa8u2MXCRor@2K*JdeRy!|MG-8q9qR!M zs^zy^508lvK0Hy-yecLcbXZHRM){Y9k?h_p;rz~KuiBrQ=u`USrO?yVB(Y+VZ_R;X z?Sc$%-!b#wDm48;;x_kkTfDGAGQP{2me+eF_Aw+rGrE2-ww2Q zJ!pTcT6?Uleh^zF_&oMr;(Y$)9{cSMTut{Q2C&JDEd8~rYt<|Z3+>Dd3E`%D-(ZQMeU6YefIX079AUX5Ozz|+G@mlbA%m}tOFlX3;nXIHQIAwC zF&lCl`8<)i^DL5!B+16nd15|ZH{Mhw9_jmrsLYF=wf`LKUbu9`P_ofc7uL=21n#wS z6!74S@EKI?JPuU={-9B+jTMZ)+ISIk@yF97o+1Br{O;O((QdQ$ImnQ&*N!#4hoIA16h$lVx(#n}d z%yeA^!AL-rt^~SwdL8y3E07O{XpkKb!(H1$*=4sm7*aN_PbnuJka4{pm_v;#9@S4N zdeRBUUAT1^p-uyc{F!A&4VWMphQgB@`C>6tELwv*N8sBRp7`;FJQrSc zpkZf2=#>1j;Y*sKrycoXy3%k2pHPDgmVX@2Ggk=aecVRHw!pWnQ%Eu58hO7^)8FcCoiXQkUb7XOj(?GKPc_c zG>&7&VFtI8)GD=U(a%zj^Z1TL;K$)e_Q^Ou)%*5OLVckqz0dxi69_49rrC1$u8l8RnL^x5aNF%H%GM(u_~Ag`_dI3lN=*tlks}Go7XKvskuxl zy*FZCCMO>1dPQtO2J9aJ3vhWi>z2tiWe>-%AH`5p3h0J_ODUV$Q(Sv%s`zDi?t#nL zKa!U%A#x5ngePmK3u99t9l+5d&c~d)80uwxT?Ih zAe^BeYiYRHg!X5yH+);qbIm9g#8XEN=9ILx}rLtTU4`;(rv8xM=>f4ps#|9-PX!uAMv72qFN zZ@75yZA&WQA+>&rM8wt+7Tm$aLZxlW%`_5y{cG3UQ`8}2j(tHR?LQ>D@6`tV?VuU5 zSwn3eHq!mi*bq7GI7?$sA<%Tt%u}5{fr$kR5YtMmZmrThe%*1GVDx6-bRg;``G5|R zN(C94n`2(ubMAsaWtp7J-jn*fN{V+_BOv|dhzP9tYo>YnM}kjo=JatP^#prbOHQ4l429V1eUqtu>1^xqKg#EefkA}OTT`iKRaghCB zyHTt6$9a=l-{q}1dC*<0bDNq&i-g)#_ppuJZzL504?OLb^xfqql|^^C zghN?3t32(xpOx4WcvS0PD>==by-61QeP?7##+)>&<7q8CPc+pAFaM|xkAJ0~`D*oKx*^Etp^$**&&$p+JRF7j#mgDU#`ukbTLl|#1$pxY25Y^Y7Oq?AGDVBK8*I< zixU=U`Y9{l{J0=8;Cuh?y+$j6_^9?-_#5sU8#Vp2>FT01Djn#9;) z*X}xbjU|36!Am1s=y46Z5smMTfqj2hp`ELv8IH{<=?XV^)HrYq=JYCgE?m+*aOnT+ zD1UJrC1K5OxZ8EOhwXZsH$iEX4V<7(0d1lI=I?Za)?W z_ckHR2g;#U_7uDHrJ96V=%EN=Eegra$R&;IF=_<_?%ZoIU+axOv7!4s| zDA2hWQWHHq{=K^t7*)p5`r@0j!5<$wz_GDU`+7ph8c;Z5zjMv_*msjQp*UO7oW5A^ zP^ZCau$p)9d=!Bas5c1`keAJIA3#<>Rb=PVFfLb8wFSHA~*}=tG{%dXV6x{~}jonNep5rYf z>_H(V_CmWTo{HR$_ZqJLdHnv3V&^2>HQEdJ2_}RiK#yA#9&HaAFK7`a($NvFPWlqV z@B$mVt!?}UrdFPhAYC%UUB+#jsdK_)Ep9ngr(bb?v4;~`ev|N^WfY$m<*__hQ&U!g z+p@uYr8|4fb9%>CM2@4WijdC(k6OV)IG9DR_lQGkA$qq8K08tEd zU;VOYh91TF3Ac4GHLp4M7T?V>OQ>%2GoB!op#B^ck(Q;n7HZ?RX5nY(@s;#tqtJqr z$6{hcLbBHCB;LV+%iM9uLZ06h_{xk`ASo84VYutbmR3zI_qbs1*x@Ebd?f-LW$4s5 zw6txZ?u3qJOty9iH(10&o!ULv_Y&2|Uq)q>@skne;c@6WgK%3_;BA`ZYfHRJ}lZzAZ!7W=iN<8BTon7Td(B9qf zUWS`Z?dC8NHC9;!MaloNQF(j&u^KS-_5^|x_eF4#@Ap-DSL15N*W!BA&101P5MXOF zaZ9NYjMC>AFA5cYr#l|=4|7vI&BrD}csrx>E|DEZ6~_s$-Hs_xPqp_-!L{k7Mij@5jV@$7Wq&cm5bNU-s8fo1{9QD1L}zA>V3|#A1RQXb{3JrVYGt=frFVv znbrFYzK*>U&JM3BWqZ1p2XJlCCfuns*HRuyH{DdXW2Y0^F=qU7?C^Q_?{kn+o&cCw zQ>oyVKqZUlP?Vdz`5Y=?7dhcaC!kS_b8NQjotD^PV*He2 zAnf7V)i(&>z~b#Ra&;taJ72`oah&6|qwNlE8&|wDf5p9b1t%fPlUYb^SaF@%?!#9v zjPv^91nhq*`*;KgmmxJxBj=bd-e+aEnWuAe(Lfa~0hWzF+eh#b3=|)L9gG-tUkCYN zeb9cGcZS%%2m4ENKy*28#aS^|+Ac?}>P!F9-ictEJx*$yxN<0$5d?&Iv7Ra%)rwHO z{IcaQKLlRU3ftma+z0=>;E_b$|D3`+#IHt1;(A%Z(s+Z#4cv1k{Fp&s+nfXn`9nmf zxgWr&_58lMfUXcY{1lHex1yT>1j_lpFAE&bfodB&;jKm~F0H32VRs(N4?AVg)O@-s zmkSKQ7h3mF#AeaFc6-W+eq@K&_Gou>N1tOYjOo7W*Qn(uct4QR_6B?GNMjZw|A@U) z9oHHO6Aj)=j=1%eN{rh}dqAQ~0?>%kcy^H+H34^qDCiH=S5}riIL>Z=+c6rs#$GPf z`PJCrlO@3S`_n|I>%_4>Lo)OZ@%P9(7A(+9j zb#|x(0+qkK(93ux#gXc^9Pr;MR)QU)JEbsJ8O%fXpl1dNlHu~tBC2!*$sm))NAn8bcL&&fRxX-Dznl#dT zYAV2eXlQ!%>PVoN8Xoa}B~0WXVjdRg`=<*}OZ$_6qOnSD!%m$$i6wb??Pp~Tg$a(L zz?}cY(%j5Lyw_Da?)h8JPHtbc6s$#o^TF}ru9wI+e`ha*hjE{9=((4X+pEsqAdIwW zOe;>kvfO(h38&2Feu*8K1q)7t%C60Q4b9C~2dQ43hXK2(@@eu{?aiGjYJQbF>?S_JA& zPnljIlZ=BsJ$4>^R%bf4E{caTSpCXc3}5UpG_ofz7W@UN>>D&xwdtht5e%n2%V9-2 z$EN+N!Vst!k^j~-uz|rSkWvlD#M?VgDI3eCKrxCHC|Y-A;pO}_asZ7!czHL6rAt53 zF*MlWYsRErjrG?!2qgsFl!i)>{6{F|Xikaa&q0gM=eGALU;MC+dN&aHxShsmUBo<< zdR=7^e@n;{2gDH!w?^$I8OtZgw~=k$zP%#n!X<5)TkEKg+RZUu5fI5xl+tee^i~Zx zoGy}#cT`#uC!%%xz8V`6%&eN$y51Um&2@Bs?gebK2!h!S^OCQ)407`oNQTUKRCZLp z;4WIR`fQc7vp_vG3M+*H`yB&%Cz#LPa#5x;Ds4f-e!7|LlTDU@>_0dTptzUC#-rAR zm|PV+kU)1>bZWLg^>&zx8vH`ccMSRyIm(O{+ zu~AJt6Omh@w5w`yFGF5buQo7XXFs9gqIfDjAvh6Je=TK)EB#B=_&I~fXmoFbCt#g= z?CAz2!C%Q^$8}pC6$xP&y2*Dc_}8NLKI#BaP}jq{i~DS??w6YqS4@xhg#_J+jpvB zwV263!{yY?>PB4bo4suD1}9T|kkCxC$}Eh0X?bz8+yyIm)w|2{1DWJjI41K&xN|I{ z+=i)}lHXNzBUvxr-eR13BFasdBirwUV2HZJwCwL<7T+s)!m`pID-5CpgC|hw77X9! zqA1hN5cr01I!W#86Db+=E1#x?sgI&H>=uU}x14UJ zC*VN1K7f`l;RoI5Vs9-l$5chW$OnPMyOApdj@zv3E{py)&R27g`lC=#ygaZexuo)n ztAGU_Kv&LnNQ{mg7h!2iq_wfQ6al3z#x$tryDNfFB0*HHsqP*TqD zy-%XSqMz>4CKZLXIANyAJ=aI6BxV=tf@hk=D~P5NQzZZbQ-@E`Q0LQBi!w=P{ z&|Q4S610W`^YycC7|w#m0!n%Ro)WcLDsnD- znuFpA8qGP5$Z9CIR$Subgx_k#_5T)C$|$^2>5%Z4IYoP8|3;D{dw|KC>YUx3-;1t9+?ffftqxHvh0RdT|>zFaF@cYuuewZIyx8-q&aTpk8laP-+}UEv z+IXAR_O_Lm!d1?gtDzfq5*P;^j%zo@Fy4Ach?WY+e{fA`gPxu#O<${t;E|eB`9>D+ zV1Gcn+MCwQ=hw65AODF~r0zj+g_v=8I_XEhn>FXX6ZM<`$W*(r4u$XEAJ907>Up&L%(P{{>D{R%1gkkBpT<&rb`ejTB6#COKJnHhfezah$ z-HKDr%Q?>h6BggUXU{KILby!JM{*De+e!yoUi|ZiSY%+Z|M<3*{!QRJfaH7$rLZV%e^#LeNX$t9ojS|0FiH}cHf_?pO@8{f=RgbD)(|bwoTR9j* ztt!pL!TI#z#PUz@3pgye*8ECbQDr?n8Va8BJ{prfg%kP406KdkL=78UkJEs$&Yc|m z=k&s}7h~{6X!}tYefHMut$Is(sf!0wiAW|pPpS}217AHfB4BK}24j3k zPK|ixq~HGh%Daq3^4*3$(=Da*(-o>Z&H7v#J`fQhXY{9n9unpIU0?4tcy@I*T$cV# zmcBF+xG2~C$mFoi%U2n?ec&8^EwhlwD5#V&%*vHAw ztLfy#owA4ND8;4WPfW!tFBcch`A@BMg*XJ94(*Agj)__>81B1h$jc1|4&7FKv;t~> zY5M-%vBnxm2bkJAwFi|=ewvY3+-}3so|bK#jL&G~lzdsdfcx3(bjUsn<7xr4Fbs?O-FpQ-nR+l_&JT8^H z#OZx_eQ2LA&%3cAo;Qr;%~(^&{K3*J*`ZRzC_rv9IF~8$`QJ`yb$X0NhE?|ZDdk!V zX!BIPCS&vy_d^h%__&D3Py^Ng9`IHlO%q=zwmi5`lzMC1N_W9c%axnvX*~G|g-8~@ zfY8cuTG3Ldk2)`6T&O8sH=>SqGX6Hx%Oq)ieNE8fpbVtQc+|PT8ytU1`vbS!kvX++ z@_8T5>bl|R@N9~^Ew7TEr$qd_7osf1vVk`(JHBb3mn6p-$0NQ6173X!vXo>a7_O!p zP@pE)dS50_WP6ZUhsGx>r@+%1p_ur^SF`X5H+%mDNvK`*U8dcTOCclz8a82Of4_9~ z;rm4!E&1r^N%5M-EC);a71AX9iy`nB`e>(n;-s*7nWJM2|n|t`6ssAUv|jl22CrVcr>v^23Z1!O}a@cm1Ue zv~l|&69#E%BRfY&?5=Clu?Lp1%f=f~#91@c$AHfVF13d-!XH8zi=8U#fu65Y&(u>a zK%Nc7_qO)9ay$I+XsA;XTN>WTc#h;4=AvB^Os>wEEN=a8LD^xsC)De$d&?ikx*&v) z8;n_pH4KDY*xu_5%#FTY_oZg=yl2Asi1$)kAmH{;7pE7A467kBy)g35_gEd=zVbmn zxmOo-E$pnLL}AxXJN&6O0U=F0J5`5+)_Fq29Kl#?rJKOEHzaSRy?n)7^wYea-r>S>BeV?%>|ce|yGnMX zR@r|2^df13%V?G=K4$cVXOr2g!DMIk1U^xEfr~?1A7Ksw4VTRULTT9q4__horM@)SV>f zQDr=rI}!5Fl(J^{Bv+Jr>Z!7l(;rLn@AtD5TX{Yj&v@Q5rXA?CEp0JhL-}=tap_$d zwm2tGIBC@Nyi(>~FS-zt+ArOHJo&ZHF@Q_7CP~1 zhW+8x`&7Z~26x`q!|OJioBY(^C&C=pNI=vhVBl0+#s>jpR3Ns0vCzjLMK24H=RvTC zMKb8=5oP3ZJ!jj8aI;~((73>0kM4uK#J_$H69FHME9wRKl7Y4t2 z6S}>nEenzUI)gvnaN8foaUpo+vGR66_!7h&u z6=-(ueF@$m&T6!)mZt}q$9>A2GDLcgOSE|IL*+AUXjP_f{&r7ddb93q6iZ%D8oN-_ zNhwedb?x@OcUS>CKV$LkBo#O2mua{o$eiuenz5R^ywu@vah{{g>;I|WwoJdy(w!WB zPUHr9>a^hl`^b4yK>d@sRy}Q_%o?&)lpx=!3a%@wN1y>Gt;2Cp^(Z3R5#(2}RglP` zAwQcrXbu%|x01rl`RZkH7r3pS#fY%PG&h0O?hu?Y-`z(E{6vHajyTk0u?c2OQfx2R^{A@MZHBJb zEVEwMM|o_0hsY8lg{(GrdV+ov^-A}%>uNvzT!`Hw3Bn4F7??OWG~u3K9mHt@8ju@N z_c6$tVJ+3sNKi7bP`*)ad}!-Zm{t}Wa@?aF%BKR6`FcGOgKlK+Pe$ZA@`$k0Xzc4(X zQ4V)qLg&rBr8aaacYz##8jpbhV`4APoUB;=f(MG{sraJaOcbuSj+m2fDtl#bQOB4U zCuW3QMkWP^kNV?KF+-a6pT6*(GvDOlIkJDhmfJji`}AmRwplZE&|t=c#kKn5=&Q`s ze-ygz=P3WhVpDH-SYz?=%b#gOpWeyc!_Hq0G77X)F)^O|*A)p>Qu_hn7CaG5GrOLK z!Hwey&JNuJT>GZEe;(~PCZ6X2u-6fZEXQ)R@lLeuQHWAKH2a%N`fZgU*zlCL2~E#c zD`8gXS`!!CFZ)&>-?x;#EB>TSNN#1@PrFU!@AEqsl%n{QL8+QewO3zH>-|+1(ms|^ zcU--i^f_EfmV(JxZ1`~f+;lZ&bLc;Ej;(GL0L9x~$_U>C>3J5Qud0~lb(UKsRa0u$ zk%^fH#E<#;{S*7Cg^l@%JF3wJ&FSK;Nb}J;# zxn3r@emSfA`o|?KTfD7yy@&c6q%>)h8I@j=!;+3u`K1FYLULA=d5(OG^2y|CIQj(( zbfS4BsLxkA@nQH0cP@#Y4?X#=^sJ{>+2ZG8WTWQ{m6Y<+-mimJ`WZZRp#7O--|e*V%u`_}!$45qKjvqjRx&dDwM5 z5j`aQhy*T_r-NJtTPv67X~g_EqaB+N%!6FK8D%w@DFed2FZc@2_;kp=;OXd=O^7&A zI@8EAGwu&HVsj&(n(9@Zu5{(6eLY34uMVBecg4kz<&krsoxt&!^0L#HR3D(_Kep+- zu3+Fw#vg#2B~WnxZ_56T@}B505F(nZhsohbJg`OEXl@kv4>|+WzON9c?tU-od%o#D zpjj73));y?yBg}d5FUKX2xpJa9XCz`knq>$#&&}t5c&)EsjNC<&FXt#?kQ)W^@f+rOxMO1)X64QVr%hMjw1)xQ zibwcbd<$9VjT-p7wUeP$a^l$Lp8CQ@zEi|%$d9J!OR+ockSJrm1a{0dm@rqjjiSaT zWVSU5f@=m(nlfbXB9VS{5Bt&gFw8lj4a zwS@0gfsGkgphg$&-+-Q_cJA^MVR5VC=k=D$h#$v4Yh8mS4MPhGO`f)-vlli!So>*q z9s+%R+s}$dC!PoiYC$g@q^t=(oHo(jnlq_4`ZV!6UxUMs%GvrthqsK#yFWC_zO&x^ zm~M?FsiTeFd!Kv6ZIq|x-0)>|_38R~qiBqroVY6Y&gsNB(eE%tmf~txU;a{ZHP64f zxck$X2Wck57gOgsKL|$j(uG%w3@LzzWDc4+v%44tbz&;23BSp;b$>FAmy+hiKORr7 zeJm6UI(~A>`4gZgatsIov5+|Ju7cVRv5&C5T_*pAGpx!H6A7|mcL>-kgFngRg*zNk zS0j+^(nIq_upRU=XOcNWk96g7<_4rj`D3sA{a?-5#@JF9DE8>CUaSb?AcUC)Q{_Vq za#VrnjCJvcEA(P3v_Nf+9W`3KBh_qnXXgx_=y3jwmgXNfd}$VXgEIK`2NpppnVtO= zV(m-SPI(yNaf{;SR?(4^{`3JOE=f7>yF%3NlsKAw^!>sr#1xP!C^c2!;Z9NPD9HP2 zc+Sht(6#%oC@9IpW9sdrq|NX6YiKW9R&57m3hj&SsiGhKF5Kdtxl{ilnjQ8<1ZJDL;hbpuA%p%OBRo&r5Pz%iU~&s zV5oXqAED9ZCo4_Ql_nk;uAJANd2xFV6@NUnZ^}Kc#3;vr0t43lew|(R88sSrdO4Ro zFD;#*BG0*43>P%JDA@1MVLf7~0x*P*)PEYPIDwa2;O~AUwer|}Z|ZZ5C7y3HdIuGF z*Ie1}TtV6>2oiX|$RxYE5@Iystom2Tp3&+8jlz?!D{EO6GTQvz98VhOnWHG6ir^1+ z@@h#M;PekP|3X9aOSo>WB^0;XgnReRxj0(e_HIQX1YGyWVna#aeB?dhhn{zLhmYS_ z1l_v3c}v~(YVzgb{n9V&$JR34aPrrr32~K+ckJ}Cd86KzAD*ql*fk&W)j%Cy$Z7!{ zO_pX283Q}FzpE?mGZOguKQ>22f8I>juN{C`qPIZ+$7>0Z)zoQUSzc`$lk)kK<^C}~ z^YMRqmOO7_HBgS!Hcgu_3ERzl`g|H)(l#SD8P&<>W@L*)&hw?uIraZ4JUf%&bG(fh z#vDmMi!$s|-q@N0jvmT6@OXv2M~=qDruHvznjf1k zR1Gcs9Nf#}6}z(`7h7zhisk(d>M^qc3oUIdCFw7r*2B!CV~Lj6@|OoMVacn!uQ|6w z2puy|y1($Kh}^rd**LBVZp#jff{^;XaUrPOQYlaf4FV4>KeqeuJN#_;nhSS1&M_}n z6v{f6K#Jt|SRrMjqbO8>#vAS`n=>#+0||yQMj~DM~0s|{epZL^^4z&=zalt9$ihJhK!3BlGX|-9V)62oIuvZVxv%Uq~ zltul4EsdeHhZX4wJ%%6goPyVz5r(*mzKJ6HLzfnE^l`Ffkrt>At~4>sqtI(2+f?{B zaV#!E=GF{FhVX`IxJ-=Gxe}en`2$QrfNk&YMED z9N6KNJJToty0%WTg+6`*;V8r2{yo24b^;sJpE|SEi1_DSAuB2^Gkaf(Nvoe_9bf94 z>&819n}|t~r_r#w&5}1AID~xT6jpd~;-_MC_Z{>M<+e|ojF;h6%9%-T7O@AK0M^b< zE*v85nZgY*UX4SQk7$R$VY;7MS=!r{rhiR)EV;)c zxLpf_r$c{id-H6gg(_e;dzr5Kqe4dk7da9dQ1+64gHF6w;|ZN zPy_f^PDH#Lgj~8Sd=OYMMHW8ziR`I+{lJYto&NstzI#*B=d@F``5Bf4aj**eS2Q|nOzvKP?T>!eDFR<+L zFLLjB1dy`u3RrktFE3Q2dRw7VFUV2QZYvZ3DKYn10xk z??$$Z`5n#2y8>Bl&o?~j{JSXZ z0lh5zWvJK-y-nt{%j#zgr25KHbGTMDE~&SUu&lkWA=?yu@yYby6)XNkcBE|U;%YGW z09G5PA796vi( z>VhUW0=&Bqg|%J6N5B~Y!49Wi-!fV3@8!!`h-Ka-$i^5luO4@d*`#y@H62dxu3K#YbBUMFSfAd zz96qW%B4b+2ijx<4mBcFeFVnRjb&I~#qsmCxPcGiviKA)9Hm$?L11>E`;<4uZ5K2{ zjXGO4{@u|T<3_GH$9{QGnofMU?H>j$|IB|USvk2!<2lNP7<-9N{Y9#4_5SB;*uziB za>kD|dNKF_ijRUmxi~t4EnO2E1-DLR=~KAsxTR9RwDT7OD$1_dX~qM1VaV4n^;oxM z{&lXn--6HJ`eI+QYs+7Kjx2ln^uqRW<4ZXW-$&ttkEHcfAG+(3mqDk*zO__WPE0GU;ElGl;BV1Q-SyWqYbOaT!W2?wg))JS-Mf=-BT*qyI zP;shfZ~aZy0bdKWf&9VfT>+<1?KQ4!CeMr4*jvX-Cof01m;L3 zFTR|0TxNpWzc%IQ=w3H0e(u4^xv)q;3|+1MZ~!VCzoilmpE^W8m+U{%Q{EzI6MTM6fR@mT;!T< ze`aGxcz!J4+NvPsp+(s~U#_h4ZTq#nX{ZZ07^l=?PSy}G3(blL#oObm%hSa(3ftv)x)Z-F~IfjbN zBsgaZg}uvI?V@ZTBWw3@moy~&>|x-!xm~DGn|?2=u`E(g!cSd8!DQhBf&IJI%u8g@ zcck{VNeRs{LuNd`#zhXkgQ^~{{>{z>9bAUO)4DE}?H`<^4+1>aTOLNXvnkeXykZQ& z(rZksf?e{uWl-&BFbuZJz4h&g14~Whs1m$linxKg->J&^D)3(4t?yHpNK2Mc&qfh0 zhZQt;>FWl|RyMOPiR-cU#$4ZrM&l8x*WL0Mo_-jy75cdfH(mmzc}H`H845U~L`Cq#S9-heeh%m7wT#QA zSUI<-M7F|Vc;{RScU!@uT=y0fJ{_DO+7XQOxAvCFhZb1wtxbUn`gF6;L-*(D2s=V*e|ZapX3Euhi7cwFwl}3d zXPj7tU%xNf+{{x|?lunb8t&*n$mCbQ6k!i_NMEGty6MPE(>{IK<{Qv?57IFWU-`I~ zcTDv}p$@ZbWAO7GjmGQy7yslQF`m~SFTSJ*^tofBXK4L^1|I%w?S4(`U9y#?h5!Y& zJEDaVzh+v^O}KvF6XX04(Ab}vIau#VB9BgF;xMzLh`tY_xb={}2JhAYQ12P0d|<*I!wk8&Xq{~9hL+wU6Jl7DUH@Vznv zj?1ro^(!-iw%AulI@BNRz|SFNEPv!1AgV2ATd5pE9PQndi&u@v6s`G&I|VnteT?6m zE|x*{KF;9{6sE@<Rf3wo|b@R!hv~M_G^#1h}immKCX~G^p zy_yr^)+Zid(^S{1sHHR8cauo};3_Nn)vv`iKc4LU`$gru?@`Db8v6I?L(k-qQs|lX zTZ{^F=lP%6sYh)@9ZNs2THujSUhP1i39*<0f=}eld3)F2bi`?T@Dg8){!}KJ#n4kB zW-e{86^xWmGZxXQw@m_y5}jlI`}Cb?p^{{8w^Ywp@9p_XzRKQ2ZmzJbZC=^p1JD+l z_?Fxw^4E7=Ik&He?mT_1zF0bSaaBmBQ8wWWE;~KI4#Ta_YDY%N{$E{Z{nm8A|7%LR zBqSt61O%izha!R^As|SX(g@N$N%@1PsenaZwiY9vr$$UaeK!{07 zHK|Qszme&wHX3Io!EU49b|+tX`j9Y-w_h zWKxb9e!g(0kznbCP^C;oIUlV(9M01Zj>dBqOQhR+4D+~sNfRT1MIx{7It1?qC8NuF709_5QQt}dr*+z3kR z&2rlaoz-!4yiWD(F2h%6D9rtVd!ATFjqG&qt6jhw8lTl5!;_Ta>n5xN*)k4I&BR>=x(QWBV>AsfqvZc@(pbqi9#yHotfeKzj8q$9kNM1(D2LP{$ywI{{*G-wb zE-Muce?!7S(a05lWu=m_4XzmMr=W(VvWO)J)=ytV%Jaa7E?F&xHl&?=?=d(iMt|>J z4K>}`KzYr*dA7tASasH}_2)Iukpe!AX*fm?cKZrAd#yc|ZiH^5#Y*Ry#=wy7Gdq!y z6xEyYaa+2}EE+F9kORIs`7h71e0>QUaRyu|LEtA!^QUAlHm7i9(&b)7i zlJxB9GdOVcAuRe^F8w^$5pGO(&R(xlIOgELLCN6QH0X;GAhWFKtC-{7ZRVP|KPH-l zd58GdXgdJShrUjD@BO;n#e7d(>XA|?4e-Y zF`LGA|HO;b@oji~SXIvEs!oXYK6_j`X%fs0EBbBxWhS{>&indfYqKe?y>lh74cCog z*BpSK)Z;_j4K26C>toW2?8_-|hw&L;L6+JJW!`-1w{= z8oO}6hfEWb`rm^EI4cgjlcshUu`Z{F`ALX6C*g@hFfjDj9_8D0K^HQIy4dG`7&8EK zt&d*_cX~^yC5gSeimRhcj6Y>~*{)zo65qr|*tMRudph)M??zK{r(8vlxMbz)^(+|m zwh%c2?i`OOu}rz1+Re~x36AbK&LQ!?UIIs3RVjNSZZ^ol%8wC{g*VV|AAzxrWs5b< z9IsK)&5u1N;BwKoS+SsiyA{|k#TbSwq$X4=O%)#prjN?Gu5T>OLfbwRbD|&!yMow3 zGh^Q@U#NL?W}>P-Jf{aY&;5xkkJTF{vIQo3p)bX51c|I`aSfS#GwRdh_S_#D%RS%g z{9_K@gq_JIR;vkkrG(|QESJNGeLm6XYTHF%^<(o!`fb{vrdS_$W){y5_ z4?B{uI4m8Bn}G9+uS$?6m50IVPrLgU%y>zKmplHs+K+Z#rYjrl3koeJA^XRfHrlcG zT-7e_rUeZjlG)#zDCJ!?L0ZH#$nv|nF2~!{JI~r@JNYevYj<&Z|mJ0W6AWFZ~Z337++1D^EvGr`4lq2ERAQ%-y$EA5B;v#1_0P zb0;ep)@pvOOs>tJ7SR!y5E zF$4Zfq%kt7O-A+Rypge+a*J`6ik1@Z)pM$RJEFHY`q7 zqHwuj(L@^J5m?!Tl}Y21JxY7}4!G)ak5J8_wn*WfqM&cpc3tr}T!Fwv+;fv;q$mR} zIb@tCY*h14*B(_i69_@!U^I`Yqhc-km=lB{-0md3ipWf3*#!_*JIfevQCI3Hz5-mb ztZdMU{@Af~Lbm1Ke{aH}bHGM#6qQ8s$)+lKJksy+(H&7BJr$QhHKo&=1DftS@Y(j| z8FTgAej3BUYln<{#fmK^e6wq?VDrO`OXc8qO|?zHdTpvUZX&lh2l_QDPp#A!Z1A!b zPNEGOT zo5KOO^rfJ!m$k$RcJ)l)ky^62B8S>XR7HciKmztzPfbZ+R{9!XKq$nR4|yVdbZ92fARWp<#gW zsYKkwU$z?PZ$8eyfcZUvQZ0R9Mdecco1a^*2LCtwxGvAzXyE@Hw)h%j1;zS(P%Tqj#+*u zqEYi#w?%iq`8%ixM1(33j8KZ=HNiu|CD(q~&uba9xtUi(+GhV_-Y;YgwY6ja12oB+Fg;1UQo5X2Pt%5s`RqZ#LJqCs*H z>|rch>gB`gq;SMFuK7s$~xTJ^{YAKOKyq#HR{oKR35cEnQ=gtIho4$Y7t7h z3F!M6i=?0*b9^)84H|N&_-J#a%g6lEPxq?v(xgadHq~9oH`}|CfkSt{Ke-48pijHF zaxAcuHGHPcIh|mz%V>Q5hbi?DF@AS;Y(s+dt#Z+5+BD_+vvv7q`A<(UBgZWb+}02v zfup(Eb-a5_>d4ENZoD~k90MdIyVKB}dF1J@dfx-M4cjulMWwDlw`k6dLxg^PtYQEw zjp)WQy@0M`J%w|N2N+YHdOAVX`2Qkkg!Io#AC{y%FZK#;O6_>*KU$yl&(&IhhpgAi zSWYP>#oFw(?>2BT%Si~k4r~Yht!kp(Uz&shM!ib;{{ZMEC0J(+y_`032$VOC9;8(gYOu`5h${61{i$OX#A*RrN@L-Q03dSoKk zmx!2QO;zG3=x?-x1H1$S2$How%;Xf-6q(1(`r)B1_OZ{O|3?>XO(|MJFDzF<4nMk!7>EL8jE$D6eB^&RQ}Vt1 z(E9~x95c&!4dE-SqFZOYMN7$sZC6IeXDFW^Cr2f&itzXZ7Z? zV{t(uhv8YfbSAN9Rjj=^Y?2M&1X+31L*M2BV>f;f_L_^h*qK8~vqJg13VU5rmicQm ziI+z$t1nLskbN}&8PH#f;Dse(8tR4*dFCxpZ^0bNs7m#SDN2DvwZ{?Bd|nZwj0SA) zJ~{XMQiCfdSAqCut3wpNb?PDZ=ohU-YyxnZ6!7LWME7SISqp`?p!b}~<}VO!8gx6I zs}zOVa?<;Co5~*rV$%NsT@;xSWGHZarn1$d7206!N6o3%g0OxylV9vB<7$roj;6nE zs8=_>{S&1fMvT_IOee2jqtHVAz}Z?XOISKbi8;5Ngq{WF^=fAcfOKEM#J(PB!DEOp zPbJf9M)K97Dw}`@mvxw{&DJ_yJ=Kg31k#ukL1C!)s45MiwS$?MCACc}G zb;mRb8CUH!t&nod;Qdh-vRPoySy+T8|NG#|NiuO9b>7-yaOTz-1h8RVVRKS~lZ!R& z=;=hK#6uEY6WLlZi|!V|$Hi{I>BB*>S^L@DVuXF@yI)Zj2po3i-+POuDYj8x6OCzp zXv1-E-T317Rp-s)K@!YoTIJwhr`7Q2KUUXMT=J9NiKZfc)XOOAg8SLzf(bGp>e)>D zPzT(-4^Ds?P>W0PCfLum%D5!@QdgzV{Eo|cVs-DHMt!&@ujhSFoAh|ZXL@sNMas~F zMg1F{%kwM_0?1?g_UpChIsmaY8`YcK2eJ|RJv2U9s4Gd)>CF!Je?DGX4DlrdsPhPx zU|$P(a87XM7MCUHc7YB($g-qFKGyBlDK3j>9^XGfES|$&Zby^~G;6tEwHH&%cm;XY zSu()!SQtXE(Vu>i+z$-tE|GD*K-O^@rS)B209N-;B%1u*j@t-dy2Xv3@cJKbr?^$_ zT0usoDFkF*&stn)SdVm!Q>0gea@VfU0B(u}XV9=T(C}@1v*T7*Xo(C46{@ctSGTWb zH~+Z#@i;#(0|)h)&g~NLg24#MjHAyHj%Bt3PI648)8oCjsb*gWSJT0Y>4*N9{4ZoP znfFy!TogzT_uJ5x*zM;$Tf&fTF~EIfMgLdhPcfd=O%oLEL3b#x06i;@LdAjCrv=y0 z{=B|V&d4b9Ek#-U>wzc6elk|Q+i=C@)L9W=0>i?@v0z-f;;);3 zj%7H_%YF7dJWG=~z`k>X{O}qx-cJYr%hN>T%x-3VZprLqI=PHD>({e`s&-L&Rfih} z-W)&DK<~Yq@Us@QU;gd`pY$56H5A&{wxZY0k$ye6b2ZLU_d{^KR1Ge$#dxxFN;s(T z0ECJf_}+dczJ@U5rg|n6I$csdw*l!;Xra{N+ZOAM)VnQi5}6UE>Ricc$mc zc<`93V-89mm;GeTYzruvu=Qf+mYw9Ine>_d=Z7;p@UZk`Q^@|w^7ll%>m1&@2SoRa zp>9FfTcJvq>){BAn!7?sp}g2O zEQPzB%Q17h@;7i0o~Wdok0=;xi3h5U?qp`ZdDP1m_(e3goI(wF3H*Y$i+4E)c|R*; zR=9eQI+a6PXrV?!R;o2tu14fSrQ5veTS3&k>3Xqyp2sPCg{$h8tLBr9B$#i9mugP)LNTVircJaDBe_gntmr|`r%9Lc6*PK?hyfjHtsUMuberh3E(mQv( z1auNMI-Op_0KBx=v4ek(`61ve6Iq2>+KeeJk-u~}LJjYoxjfFvZLr53DOrm2y0JsJ zf-V7=F`Mec1Tj&t@j3C>IP}_R-$u9ukdK&*#!L95Zp4Rp<+1`_yeTQxVjhNFdg6_J zYu>!s0@a+_V~%3?7oD(5PhB~sY_Y`DXm>3sEkpJ!f+m){lG^vPv3^uuM^Cf)(zl8+ zlK{2-`{v~^X1;xM6C~lU0cH*5X6uDe-DQp~Gygn#ow(k?mN%#Tqd7MQ%CcVNL&H*v z_Ud11L&T0i#*G5<2tA&;`ZL_2_(Rymg0JY&I6@g%kY0Cg21Q=Bh}tvV7&`zr{iziR zJkxIdpK7J(HNV~U-S1>qzx&?9op3vPL~X$XBEgrVu}(b!MP-AO4`qZ@*{V#&6c zoeMdQABsX3CHhaM+*%Euvn{+1k zUGlx^OL78P^0&ia2@Qb2By)pq2e;O4bOtX)Fb=|H zc)hw4l&6e@MmA$%5{Pm+MNje7-Wdz%SbSyg1@vo~8BV8}6+HQCsuOzUYmzS3Os8Xd zC^`ss`uqidWELQ1zAKK``jG*9^tB)s!$%v*FQQ9E{_I}jl6$DM0VS>zJXkqEI$QOO z5${M-*7l0etfqvDC1cMt_vezSn_1Ox#52KZ<1jNUR~VyP!ptDR``J49A>3>EF6Y4Q z+KLiydmWd){B zICUCRVkDp~zBEn$a)Y1d^KiH_h7nrka=XWtrO(znWCPi>4}ddpCt%%TDI|`a`d?hs z^Idgf2Z8<*wI}U0bmbCr&TfG76pYDhwyclO!-H>Spda9%_;8T1F4)=|ux7eZJ7HeK z`cIv9pZwBkRxP!2xw_MYO-K>+$doy68yB9QTzvF?w9ZDFpfxAFIHV)X5x0$)()vH< zbRspGs?9%j`ud%Tzpekr`L9J+@}8fgNTc_5zbovlIC6hXQ$ef+ zQ*$-L`G#h+A5@W=9RhH030b>)b5?CqgPjlULO=7BHm8}>C6$8?mW<{tiTOyf+`X0^ zHn(T}bn#MgX(^kzLMN_D%KV{&#{z3-!0d1I8H|BuZMZIgqP;x+(7$#A`L81~>2RGM zZP(|nR5m(-1eq2;aS#o8-r4~h&zaOIUE`l)bn6H+kqm*$%jm>@3%pU;d*IzW4tVy! zOCfToG6YP!W!Vy>?^R{6EekC7WnfYHk2cLn$SjrD>-P;UQHuGl5{iFiH^2s;P4*a5qzOIU{^ruzbYaoGW$&bs;)P#DTBVQcY0_ zLKe*DcCvykYnT-ct*Rz>73SbCZ1mKli0*WJF=!7osliD|xqF@-X}0Z8EGy;v=W7PA zCNM;rk!HEu=so6X^aBVW+G>A|PDn!JQs8AYjC= zppuKNyRjXh#~ia0Kbpb-wY; zjp~zVbJj-NPHe}ej}_-p$+C;q472z46P+Vz|A;&6N?}xflw^7^$xG()bCKM%CME09)(Gen?S_cyyFoknHs)kBLDhRX`xtpchUO3kM5r8Bue z;cYDgvk@}7ChWr~Fyl}HZP4(3WO>I3UIXwD+dESxWT$(4lfRnvD4-uiJ!dvts_zHr zG6G(!K^Lx5f^UoCGcGx?{aCt+kTa*=%>bMol;>AHlcck4V(7N_tCn! zCkn`)<#R`^r;Kf_bZFJy26Ove{w~b7b7N;p0l9XCefn+$lK|bjAuWyFv3xYuL(OFG z`2p|C>9_2m>|1;T9z2~dW(ga_JwG4roU6&guw!?KpN7En63+v|7YJU8Mob(8Rh>fT zoA06D@~UokBfS`JMWw&Br#-9ZeJIA3LQTzSH6eHFB2(X2yL};AweTRkaM!F!0uEvF z*PE5}>oKl|^gjO_75N5;I%1~nLHw<`&xN?AslY(RvWnh12qw2gQD8YZ<-{kKBOdBu z@w|k9)UGZS<$tEuUJ6RKWuE2}(?)yP*LQko5vx;p@I6b9Pzqpj2`*ED0p1gIi zO4hX1DSiBu_w~&ZcoPPfQIV;SDd>Mx=}$SImuF`g)c;KNv>1yAh8OJJr?G#Dt|4kk zJp0S4$)30Q1O&yNB``4IIQ3~0CamKVxK8M%7rT*99w6JWF*xSMlM8oK(hgSY$UpYz z|4poyYg2j_Xe}1$RqGds8jJyf@b`ST)JP6M%%*jjSNsP^8f@*ubq#U-sg$Fcizn*N zm2}G1aHec;`#xDc%#vw@3nxWBH&L^40K+gs{!;%rkCucv| zIqGOq$#14DKhSaiLTnw$3zd1P zQ$y<-(l`wo?v053@nhEsaDSb94MuMXnXhA|Pdd>2^F;gZ(MkHtKR4(~-oN)1j&&s1 zx@6VsHMNa9F&>cQr0x2Edy)y#S-`qyoNe8(UV%>Y>|9QZ71D>y&5w5PXq$@+sZ#BD8P#EiqNm#YtWX!6rZ(A5Bq)$J5cyrcGpmpsekZjG^bo2&KJR z)Z@7y^|e+>BScw7$k{GKc1Q}{`g0PWl}q3yH62Oh7}!Ft4;x8AdhX}L-gNK;pZoIW zUYm}+cOT~Qw~xL8SR1%yegd1cIkbP>J;rbTj%-#@==Q4QfIhZjFPzZTy|W; z&4xw~4`epP4iY!BW?QHF$?-AQobPhaJ!_h??`vdPB)hGIe!FxZ53+^&>i&ih(v-R0 zaRi*{fJi21xmwXs$=7H=TqkxWFl7a}!9DTdJ7Dy`mfB2(!DkbX1TyAPzFQGFlE=$xsV^oy$`ady|hBj}!rq#ET2Pr#i3uJ1i zha)0%7SF%aFp{A)CW>O6LG1^v(chfI;m}lq5a=^Cs;odqfJnT@-uFxIpwCvVy4r%6 zk`}S8hXo}PDN>$F)j0@{nWTeGASd`Yobd;OM)zCl`no_-Py4rUMv7nXsSH8lT)7Bf{}1yVDDlY8Y+!&DPFF;8i2Bpd*oVXGLinwv>l z!JkU+VMSX*rpeyM0|KH_DVSio(m*PPx;d~vo1RbfI~g%2UVpR#EwC{Z$MxrMBb}M| z)iwfA>fkKnBL)FZZ@5L;*ewS4E+*Y3&o!zrGg15pZZ8u-(@qAPBISY zZl&Ep8F7d_t(T^_baiPaY&UtypEvRH>x0F023H$2Fi47JU?db_K=+wX0Ts~Bt9U`T zj}>hVD*kG|t%L2cV(>tRmbXL2|9`Ohv03c7MPVzyPy!+Sd>8&3c}}=@q5)v1FF0qC zabTVkEn|ivECg!DPT2eG3Xd*;Pry-|z{{~QrV!xG)dFUpcW%9#;|)&(b{N@y1@z>E z*kb^ahxa(KO;qS?DTtO#qch0$6H1(6HA5^>0AjF%jcP&6uEp;@)o!y=g~3u z*cqa0TDfog;m&#~3B5;&MGyaG4tr)D;id#v;OlLbn1zHgq`>Xr+Qv4{6}E1uGJbx_ zo7my9OW_>MXzQXntgrewl_e&oa3AiM6@;3cy)C$Bz|C0~(CRGAASQ6tFv~M5q>+Gi>H9H|Q zxAw-QW%X_V|S2Y@^plQJxli=uvW~`=8In;UWdzsQiy;0 zKb~|D=A0FfN?f7qaK7b>jH13vx!piKo|9NXo^<}%@Sy&gmVu0IxwTM_{MOxXpG=bK z;PqUTA5dhE_V(Mr;-P_S*i)P>O^r*0yrRv>6Nrss-+mHjC%1YI{LB*yx8TnWH+oo3 zt!#hi{ymF6E=6W)NP}hBX9aqg!=6Ks^u~6?g6Li^Q%B_ahSCm^H`QIv;d65!F9PB4q1=h+Xr=>!G5{TdKZ$>)yW(HNeyX2 z(%`EOo;M%Q)iIN))OxqcI9qxY4b}T|V}kp^?lS>{+8NHhZhJRG`wHXYRf7!^)j_qT-O$boipjvY*xu6lNf{rhyJ9rnK}jYz zr&Y?s;}pGHwjgD_>p5hvt>~@$bG}w>k3%&?SI3~fq?aNw`y;CZ1F7RO1rN}q%BPUvyUO>eZy#?5ehibe3G18FPm9(ftehp_i z!RgAf@2k6>3!SFr&EG@i%4T=-Z;&|S1+2s| zoWvgWT;#de&Kh~Q>dm05B-B4`Hm>P15cxWrwmf|8TR zvH3MpT)+ZLay6@hxsi!s4I5XJ(YHY)e{($xWAy!W!>53DHt6=ZZToxjurg4}5LVC) zR0irlmN9MAr9%0$hKF+=(ppXi(Q=!;ZY5d4QRq-5$iiJEiCk;n?MslrqXzm3-Gb&b zJE+G%XMG)rRr~=O_`zi1b0P|pT(AtYIRW8bt-eA0UtgN3f)FM08RtuTEZ(n$vhs1k zhv%G*WBd9Jkb3oJ!W>84zkquGd}%-LA2GabW^mn4A_xFpIcaY?YWG<*F5>ZJvpZuh zC@teEj1AME4e~OzF=EDajK$F%TPU@^53zGEEiFxpZ;UR=P|(2{>k<;l9_BQ~2d7+O zfT4`8r_6@6TK^gzxC&GrvDs}M86;>KOUEw6=Pe_lso_t%q-vGq@_0h+qewKTY{bh0 z&o%dsSzmtP@$N z6|D{!XiavS16{4X{f{j@BjF*mBc)Jr-lhmFl$f#!!sUssP#1Xn+K${Ew-s-QeOSlGTw;Xa z;`&)ub?1~Oy+~b##20Kxd#1-o^%!Ookn5ZOZJtz1G%|2TC z#t%h#f=Ny8hNnKS!MW0wf8}+LyMq-rSDrT=x$oogLKy0*H=?W0ny}Mm!agAue^2|* z|JJJ4A)7*K6|u>lP`2^D5@U9$)vvyXljR>l7GzVvowrl+zj1ELKUKOq#Mw;w4$@p? z?^Mx=C$kBQROb+7y0h}Tmd57Sh3)brx(@qHYb(F}g!@wP>P9X%3|QslG&KH{rKBi6 zwW{n(<>41E zn2md25^bH~zSLTwI~qUsgEW=iZ#;xKRXQJE%Y3^QpN{-mwtqJr>I-I;C6^1dbi}^>gX(9L*0R+zfWajRZ`qn>F2(@NYmVAi|x~xul<7xPFIGr6&S=%SXGX zfiKz6VnEHBf_^YvZosHL5%*4OPCDpqPvAX1j<@IC0^u>?ghXDJY5;gARfg)Y+OT%z~y5B^@GzBqe1%q*~4?h{q zqUidTzWH~cw-U%vxw*1+4mzvn+so0DWMzLFyU8?)c3!OAf3kCz)Fr2_Ksck*o6L>- z6lY4)UMqAS^9On39i%wBxk=CZ&YH9AvMHb@TdzhJKz8BQymLvV%fNtJele{gc1|gF zrKUvRKr{sD7h~#+Z)jsTYiRjR;IbW^bv^VU>*D}3>Y79^e;P~UKdjg7EL5?curFE@3pmcEs9J5VjSPE$vh+MCCwYl(^rg*~qgybqE|MI- zBfRbyWnXAiKqZ57E?p>=g=&3}Ux-a!GLuB2-Ms z++X?_MYAL;6KK(NHCeakDNzwnNC`Ca5*;tXh?{KKgIBLn^l=@)e#}(^;Lo|D283}| z{HUwRQagRS;WUV7Qz_1kAh)g20<$k1cylH?xJ(+#lK>)U#iax*p(ZthZWMt2`tI?( zRv~-7m4rTD%T@^M%ir#NRP`crW1gBP?&JPaSg709%3%C2>?QAmbJ=%=I^VFHX}Xbj zpJvuY&yMA6&~V9|F4qXIkvHn(?1X4X|8+F`-J9zjrJV$@c9DEl7s*>ha>E9tfaE(Tz;m^OaA!+Db9l;7@E=9 zSgMTi=KSmVXv)s@U4$5;1ld=liUd|1?HWS83WfPg_s7-MHDrnUR8Cg%P|)SP3I>6P z@>isdHJ(;YG0n|~i&X_!-;(_5^cb|-PaRpbNY z1#>en6I;-*@X$UYurrEud=khR51{(V*4;Fw*gRP+C4fv`^cd%J#>d zUIum02NH=`A8cv>v!@Sjch2iwQup6AeGiyId^=g7a`VT%j)s0I>#ITe!SF$7JM$Lk zEZ$x6tf1b0V|HyZm{V842g*wac=^+QIkmKng(3FK{u9MhpMZ=WZ1bC?pA}6#UlzxL z;eebXXl4C@m38^c5LSkZ(JbClua&Z?yEx z1~}wbim=DvfuSVFrt0XRBzSAexMX}~{EwJ*7iN_RhA~%>)cz!p2M@l?~En|p#$DZbX4LGge7LBI_uP44>kR^UjUdWe=8oKgLrCG z*!CSjZi(BrlnG@4TZW?$?*GKf@IB7k$O^v4|X<+$2f!gfrv z=kq{gKmR)!oH`6A#-w5)YVeTHKj$yS$jTD9<1P}MyqO8KelHi#j?;zhgxdhyFW}<| za~n1CFWmEcH@Bm6ojb=jozE*L&Gl9%}kY;Txr#S&blMLmmC4 zTz}gWb`oQu-7~Jdtr*8m^GC-ad5ezIi{uls6}9R|G3D(?uuS;+#y9 z?(;z__?^ci@Ew9DAUylh_=QX32-aH^_PP4R)ec?h)tvivl36$uF|2i4=*wtJSG z*D8v98cn;eZL>{jEU zf@8+qG$baJTIzqS{m_lLTuh(Zw)#^9lm_4;-GT|vh>>Ub_oCpLq;hjBFS-K5m7!vz zLuyaK?FHp!*0v``a;m|d1XYb)&kuW;ARY9sk_!rWa6Fm%fjacm z2D3g5;rt?%QZ+f<@L~;B#qEW@uloCRsxYe3(&=(P>(i2I)4?t;&<{>EP4@6hBUdSl zdYp}XmLnfIdI$65an^=Bp6IbSs8ku^P$VkZ3&-Zlwe!#46#r*la)3c2wFj4aK% zftsuSZ7{)bRGzb|kl*8ay=M9R5I<9HTfq$dqB!+(6(~h7ew1CB^T%ELO@kM*)b{$ceb4|+BqUH{BfRb=wW7u><;o3SMi-BhqvB?Yj+1J0_LdM?E9K!!jHz*w1_-RQS*EajZFGA=O0Eum&#VNy6g-V?BhR!_jHY${{bs(jMjM^e{H3sYgI9rgCwickey$Eh6ain$aICh0%4-%Y}*gbBCI282d@_Fazw4pT-7Kr{!oO9;Ga33C*s>z*I`us zhNf-J$!VpUZs4FLMB&fx7Oq+l?n?3>R?x{@c!Dk6P_e6ciumUI9wlrD4IggcFz{oR#<_+c!O^Fi}+c| zOd+q%yXPYJU+j>A0U|D9ArB_XwPaGgN9RHkcP2HbA4x`7?1wnOI03~A#kJ4Qj&6k5 zxfmSQ?ft@+J>5t0{gg=d*kx?Nn^ix=hKu;rNo0{@ZWQfxiaPv#GHF*Rrg>`BQ5u=U d3lzx};HU8)sjeUc4LscErJDBhs%KW={|iOizw7`2 diff --git a/datahub-web-react/src/images/redshiftlogo.png b/datahub-web-react/src/images/redshiftlogo.png index 5d7b37930be997e4ff21126cd511413e038d156e..30f7ed4a88c9a5b7fbe13a1cf75c640b4c057b07 100644 GIT binary patch literal 13749 zcmeHuRaYHBw=M4O?h;@l2^I+M65QQGAVGo$cX!#iySux)dvJHR;J5RgbH@Dzciflr z(4+V6uI{Q@wPdcjszVgzB~g)xkRTu+P^G2Blp!D>qyHTUP{0-Rwd_>j1Zl4v0lFFAg5 zb{bHk=ifLFpQLe4|3&YyvE#osO^Q3n<{KIc6&fpSCy&a(!`mEZd z)_AP?x>1-RyQZU7IPus%?^Pn-e!#D#u%^z)fkgp{#pWZ#jLkUA8w-IL?M)@rz3nIj zMPrBpi3I*wVJt4~4f8>>4Nl4%CY(~J`_q70*$2dE2-vX2f2}_P_Xe6macLp3vJ9_? z{=F;^0(O)@qdJF12nq+J4-K3KFtQ8@cNo>6k_k`7}vyK7bG#xiMOWojyb5?O}} z5B9csnq9cAe#^5M(pGzBi%sCkN>``x7lUV*=I6o$`_mv`nL6-=)WIARadeL}bqO5f z8cZ_TDKbv%cZf&QvsuQPt6CPtX1i-jg6Y6*zH{tCqZ1?=PmagYIOOrUZo5nRJk^=b zro~xQkLkX+(v2mpd0<#GB-nJzX<_~j@!q)zBjkFi%uqPJH`T}sl@UjW#wR}IBAf_3 zekh!>;JBtEwHya=>}GP=cdM?V$W)ql`Tu<-xMFY74fNKVTyQTpJ@ zVSq>xYW)0Zxj|~p>Eq8HjkzNn`EHq@41>Xj3+gpVrefub9u$tg;2z6t1=OqD0zxP) zXO_hBFw;dDMvcQX$!??=St70&Pwc#R8jA(4O>IgUA@MZ)(qQ^FYf&0kEk0g#j6D4u z0nDVxHH^{7;QN`;%>H`%p5@UW2AIzoD833XeT&7`>%sCKGCf!cQY z)_&Kd`QNa_eAF|@GIrd<&wiHxs*?ViuNJusjK!!U(C6~zs~ zBpOt(0lxA9ln4;M391#R`?0ei%+o*Gc9`)S+Q)OX8#3tkVh;-~JT;EhSl_j)0Cy&Q zJxIRpooYHX_tiluou|N;G3perZ8M0^sM`f!bnrxY$WDi8&vHQmc91F$`UK&_-8{wi zkAtC+WXm*a<_~~E7!K{+8%;5P`q@I{u^pt>@cH!Oqb}1Z* z#4s3*-9O8-VzpRNVIKeD;{G0BF6v;z?4~A98p66Gq%Ge)LgFNrgPu&4d#*-0rce0f z=Ul07vrVZS?o$@R=xxFV0cHIKHTI<jVn<;d2Z}zL1Kq|Umi_ZC+eAh)OrT@|v83+K#LKfB% zmj@9W82?RNh8`{8-dT+K%>KaD9WU8I-aHxLaQI=M35;(%m%> zyRPi6WfiYED4lC&kVDjMk0!Wpe$Ncr0|-LRPYUrJ+(j5{&JFefW1iaL^D{3LLfzMX zB}nQ?Pf5pnjz&#!A`YBSV$pxo@DL#VFPsPqM1Frk{WU8ZmAb9fsW?r;F$n}EK1oI^ z*P-$aY?@L#(?f8N{l@j;?i8Bj;4gD>#4HWZ!}6JT3E{LvYSE^Jw2jdMbW+m3^m5?A zGD4m|xsJ}wCy3^c!P5`bSJ1R-B(?hpjss2uKd{3k2soCF3`~pLZgiH}SWV```$Ilx z>bDRwxC*{w!zXe|3~V0RTy4PYeEUvG@16J8uW%^%3u*XBpx4Gz!q3OgyUgsxt~j8{ zwgBhdhnKIHoz-$Ht@8Wt$^4{T76E454g))SG8hf_E$0=Bf@~pCe}q)l=P-oE(@wlOe?kXnLtk_pMtXF?F zB>I|Rt8%s;&8ix6z!@N`i!vVzQdW4(|J0+tON=;|vhH=*_TNl_$bsYxT>kO>ipF~r z6mi5#Jnpj!16b5UqL1hPmp=XJZE2y*Wj(LuzPO}Ub(N~qTA4NWPT6DDgiZrKgGb^; z6EUGo#sv$OE&`w0X;maJsubsGPNqHv_V1Z(X^4=(?*f=vhL11{sSF6OB_yMFaYCa| zg0fN0%Q&FUmyHwc;)6AmNzeJ%y(b~VE;#|55h`q8IX(3d4s}R}kQZ57wwUp&Ps8i3 zC;4$e?@J?>UtosKzh6;$>w4^T;AHN@O(`RRqo#UyGT zxkYM$zI&CDAKlGgG+y{&Wzi9RJh21XH9Vhv-=Cl5G~w3EkwL50-UtpvNU0+3GR|al;wX!$?gw07Uc;8jNfNG$|it*9hW6M%-qU%#T{ZvkF z_NBP63egZQxtbNvOk0?Z?3`YHwg{_Mv&Pj@5)r*}Vokxkvr0^E= zm*Mz}BHIkdi;z7rIJA+yiV6e@eaJ^H475Kjt))A^qXhx+b=Zn4KHo%*%nibzdqkF5 zFy(9IUZ|;LiZ(lW^boU1zVIy(BF%fn-gd}mX|bTh?RPxuG@m`YyEs5jK4JWur?VT$ zS}w^b#W3@s&_y0&Rmm@}Rybw1gxc{C*~{(P5?;cAUt>Ux)W}2!!_gL& z-8AVg@?t)%i$l#Sc!m4>mWZ2L=z2%6et9<-S2TA5`&nrZl01%ie`>EJS%C#^%hOWP zN|J!RQYStr{Xq6ooM%E19o(b>YMSJ&QyIqVL-xT;>WnL(^tSbXshF{do+)l<=aW@- zGVUa*t1@ZcA6wm}1tyiqac4%l&cOPr*iIqY*aB~{UZYAm{5NYfarY8!xl^1usViZp zG%ixef#o`;{;XJF#o^&|zW@cRr!?2tjwbA@+QpSR5_nLEIgCj+Uu2~#fgg z>z}S)0MRm|*fAu(1fCR`Od)V>-$Rj#_ir5O@Ni0+V6G{$Px0SAyKNyc-p*rARp_T{ zUOKT7A&(ZfTBEbd$$N-iT=sc4ozAyrbvdRX^ffFx!kWgSOr9 ziL4#*Fvp*ixuWFxtr&_vGAtT0>1Vg!M6tx_A4q|IyGkj-+*tqg^N)79p~Vu7XhQ!I zblagOkL-Qf_!QyqloY@Ev#-aN>JAQG>7Q0-B(8+etykfLruLNmUZfLgJ{Ppll6i(R~d*xd=hHt(7oG)<&05SNy}G9UO&X*9`t*UhnEIHUu=*w=7-Kr;;~|6k5nzEMqC5Y1ideeYCU9@p#%QHX!mM zky9nsqslvg-m~x#gA2bWx6z9p& zIW-&HbrLea|Dd~`up-o5@|BwEM2gtk zY$dj4f0sjJJD?7x2~#%EfNPlB(~~hzO6;eq*4zmtPlv*qC?yfN*_p}?b8!jXuz(shT3dqM33O^^+CtovgGb@vxj&G&P-oe<0fRwe5?kw?Ib*-&nRghTsPlMU+q6eLbRcWSJ8y^rb!J+&>)_ zL-8-30ugt4r>~t6vnw4lk2#GxpGhsQ6q9C?%ghEwsY)6kQjF*U8Mg*$3Sv?C*a3-# z9gZ#~Bo@LKAPrjL?|X`YK#a%EGEB(bL_&-fS^|7j&pG8n6pU0BkX4tGz-j;-PLDhg zBb?ByJ%QKl!4Q1bj`U@!rtl{4;A|AP$1RHr;bI%XGQqVd063fEZH9$een^yKiO3^s zYSoe668lTTvh6KB%LPObfw~+rJJ4}bdv8m=ECgZqfV(gf-0{qjVQlqoTg2kKUy8NU z@Mw*rQOm*J)qlYWa6jBv2=?;Gb_3VQUtDnn1(bwy!fwd)eK6iV{+>Y_0WHASUJx-DIVAd+DV-@AzGWftJ#u+|>pck3>*tU*2409;aF ztBF>`Xxb`0w#sS@>|hI~u&bWw5dtIx$6AF(hC#rg1-E`G%%c|8g(JrO{$oD#Q5FIi zBBam(13x{49O1j!q-^;I3>)I0ujPM7>azf+>l*#ETwd zG@CCMb-htyAl&p4{Kc%hcHU;MG8yoyBPiw>{gP0N{DyjeLl@1qkmtXq`S|!pvw=JY zM)QK0c^BaoVieXDd(ka~F*m7=6nB5?+aOSI@mC7zs1~<9f5C08$lN#IaZkSTXjaT& zLb^b53tu?vvhoDf=TRsuX46Tr&Yjr0;H`7$VfQm;5R>ox9-QsbjLlA`}B5X@o{&U z{#*pP7+ZTf+FMEgHNFBMFTU;}5nsxw&kD}^4n-Hb*(=$9tXzVEv*06+6iRraQ)d93 zm&v4*MF@w$3bHKwp6ka}I~_;y2y~aRZEBQE8$J8gxnow+QHOKo z7JmP`nz9OrJ0Aeu2?C;E3<4k5KT|gflg(vI;#OeizLXhXfb2vH<(oW#gv>Kp)TBOp zP2BG~)g71M(J5OQvFZTq_%Spw;8|8EQ=7FiceJ)~ifl;AIZ?CybGYvwtT-4G!)Hgl z1)Cb7Lt1tXjnnGHNls2@?g9ryN}AcIROJpgw*5+D>z>)~o!YHJn%Qx}IYVRMjN_{W ztV@<5ta!%3oi{|_^yx|wlli6TuvP8~rJ2rIOAfhA#D{X|6=k&T#WA3vStRG=F1(MG ztV6ztcY&9|G3VTw%I_7qchLBX#v7%h$Dm!z%pCb6zX>1-LeX*TM-hl*qgxAtDL>Ce zY-2*c0G@8^C;TJ+iJaJ#`ls)|*tiSj0I6_^;9-35u`h%H8VCM{yLivPP5q#yHGdJD zoO$7)%$AzH9*HBf#rVpyddvz*cX^Y@Dmw|`V^%4BaguNCu~uA@ib(b#;h}$t5W>@Ao$oD-c-sV}c%3XBSmt7o4h_o*8lP{wd4th@n$EuI5 zQ|9@9W+sMw+#AYR5yX|#d(L}A# z{%8c`iZ_Wl=*o9HZJ=64zzwO^jEvN$ZVjm6jg$Zr+>9X|*Ng$&y4 z7bKRD%~SZraI(p|TDGddTdCmyskRiLf+E`p>b=BJk`F`38@LJ*le&ELmG8RXj%~Sq z%~1M(%4KLuNlE2>>vka+XYGOZGXcMo=?get2QBIW4AD!}wl_Q@BAoT$cwXuga(YD& z4B@Jc0C-=KLmYJmW~g9_Gsp5BEH)EZDC!1gb!ErBnw{R?Lf?R%KlLV?50m(0EXuS? zw9FbQr@+hg zLY+FG;|MC8b3*2cThFR!91frtsq+d@Zb)<^HT!Auo5MV9e6N0Qc>p#A&l3`roZp;j#>9s5MDxKa#b#AHUu5dri8ykid}g#<*5k z1X&(rV!Kb^C@XN^nurYGBb5iMwB->2ye>sjQaz3g)2+^t3(Pv)iJZwmIIEK@1@gH( zU8S0R5rr108_fMGyH|s;oCkMs>)y$Z;QI%X{fW6I?o5&N_Q~CU1dS7!Oq8O%=38`_ za%)JBPs@7T=Y>8F*!`>=akk#>inpfDQ&J%%Sl2%~Osb9WHH)mADYfVwnDMA{Ag3gS zD^gI+y#C<%J=^o7^xD$Vxj$IxIfvY}XK@#7 z)2vi_9mr)v@BFR`x32ppdZ-@loL%VzrD{sPMsD4WN;-?75$vo<=_F62CaUpNH<-WU z%@O+dRMEjj?uFiBO>X~1Abd91iSXM;RQ}u*!5dSYw|qnK?l11_$a|pq{oUyIHem>Rh#Gk5|F)IcOs4ux@EH9)LKK&%QZY%h0QjvgnNhoAdh~-wpwGf zl=qnyC!m%NBLM7Ej3ktDoV31Ig{|k?{$##wf}`6n(nj z>rOk|FgKDe--GWm-=K9^`T6-6!bTu~>A_K$Z{tYy7k<1IYGr#>(nrLj(z_cL(VHU) zqEMJPc++EZNd;&-oU#ut*xWdGwLg9$xK4$kFEpvuZVj=FOAKFttlVVIT3S3njrC`= zCg=65qkCuwAxSU66dmz+gi!9X1h5*%vwG<~=QR;e&>tf&*6HWt=zJ;fZPZ6Q#_bxG zx|^pQIgd_S{)Uv8ZRr(yg5*aigF(bDVPzTV*X*4sfJZVbIbdAW$s8qkFgPEZ@M-Gp z(Zd4grSs7|b0r+%?`yI!&!_*sR2@DkTA^%49TwsCq*zHDjuGK;R$iOkdc?Ola*K?C zB8Dc$R0(vL3?Ht6v+4neQ|vZVHAzPUXp5q)W%vh!CZ=C8M+6Uw^5qsx-M$}~3FuhF zOJ;V*7vIW@r9+kSvuDe~3~nMp?r$YUuKu0di1^47>xM&Uc8>K^C1hk^$u(;G%xVGr z2LwStvVT2H^+ZubdE80{5a{@R8_h+A8Fi69X=Q5H!W(P-f{$-Q>?sG2yd=B)>@&ZD zJQ7ewMV})ex;wuB)o9^b#UGF-7Dpk?{Ix*2D!O?$-Tf()BGGS;4a-F3DVYb5rX+DP zO&B0P``>1Vj}uIaqu!~iF)l*2Z9AehT@g3slTjK-wHfiPUR1$m<-=^Ng|mjs%{c{U zFKYzAntTs0y~SNCM?IAw!t*fpD+$=SM|u?#Agoh+&w&VexMcf2J9YIw;HB-~yiFJO zKYo(a=)g`-bjm~E$_SqI?-fT-M|Ktpx*z##Wo=xh0AprbHb4g2f5!x9`A~0cHEbZB z+7~)lbsLIuW;6#zK8{F=tQ81+OYh9Trd+SsIl^szNc+?B;3!E!9Wm?nRqxj-9tI#e zq0M#yX4iV=%fVQ@w6yPv^;c_(-ow(2i6VlxG@{|c{|KMr_I#QRcjzpE z%k&U_JV$}I9N3cMF--_%&*i{epCIt-P*_6~eHrzCu!*n29VdeS&5V_+0P%YrklK0MTmf3`%ePhqPWTN|*(0dO>XtXMZ*;wFZKi!b8 zvo{>YG(8^YQ{~-~nlr$P{9R3c6Z)zr%$^Qp{0`^^3-2y5Z4d9J*NrM(3h!SeKpj`) zPVjq1x@aQ3vC~FP^F4Cp)~&H&W(Up(*}gFP@~D-_x765^x@+bKb?)cnaDX3RbLrmU zrTDvI-#bfwzQ$HE5U_>1Kh19`E&n65jMb0sWM84Lj-R03Z;DZJ!Mx4dnXS9a0$3%2 z1;ZBj(qh5&+uuL_{_uJ=$JNuYz>Dn2o3}Z_;-TPa-z)Wt=M{npQc!q2E2&KK5Y6P> zE@^+HEdwTQbQiwjO6lmc*XJ8KBB1=i>ow935p;?Z$%Be{RPu3`qW`Ng7itK30bh=C zxaH3ktZ@nJPk|8Rh!MXM#zWDr&hL>zbT9w>8Xm>#z5S^UGHWCqRi*JCuzo8@u^Rgn zjMHes{Z=b(3<|&N9+lIE`4=cR`$Fj`EbAVcLMtm4vv6FN8~@t)7JqQBslONro;+#5-MoZ<}tezQ2XgZ!A!=#$bklDd_qn1ceEeD z^{?aL9en!~`{m_2J1|O{R&<$+jZ`q-6Ip%MA_?hz(~U@UqKD2+LnvCVxF1lOC-PJ+ z7g~_>*xh|<#(j_@kV^UlX1k%t61aFZNWO1cV;7C#ltwCLhP4tSCt>~s^B2tasX}!J z?5JMii`et+P*23tDHDYi_1oJj)k&m6;kWAlVg|To`;E0#UU5Es(P&9apSl23tnkwP zx$512S*LR8iHfWFY5+h>_x{x89%k!GG4=N3=8@05*y|7-PbS<92ae_p39ysk2+O&% z0jmNaV@lqyK0Xf1?)RvNR*7D&P)xmTZ&7A{AYPa1xc7G)N*G7Vu1oHW-t%f~{^HF3 zUy_qL2v1io@^AIf0&7 z7(2j9a4bC&CZFF!rR`WbX+G%pUn?pv)p3WUoTHiTn~H0))Q z=yr%^Jj?k80={64Z!UsT)IJ}81t8p%8&X-^nzUi-5@y15yV| z8zq+)5!?4i9o-gyI1+?Aeo(8jIM7zA~tmSF={S01-N+uD-IL!1Uqm z*|H=00}IR~wl36g{l%+nU-xo^EeekV+2fuAD;VydGk*zo15yvZ3Ump~jVl|xVRJqK z+AwO!4dBi0q9;{`H1wCIGTkMG;}@Vt_tN9ZmjIYCYJfTjn8aveyZ0pjG2@YUn|f&S z2ugtgN=Zhj$y+~v;k8ZolBheC-G-7dDJQE{=qgiQB)LKH8#tXl1*xKK*Y^Lbv(jKF z1MYH@bk;t+?%Y~0V%x9g6gS}waXD+G+}h)aLo9HE#mY7QUHuh_?rZx#+T&(t>BfkG zbQc2fnD?4LG1EyjTkUaSA^am7qt}Hg5(YvSeq1D23SimCScZ!t zbJ{jB>;l>>%qzvJSs2w)mipEO$HNk~XGDYX`QI}vLTNq&7$$oYZ0VT;Ni?$1L8<@U zqf8yNS&hBk0%{!1kFwpSZ`9l|LPda|@pV8Cgs}uw3e_q#A~Kk96bKgY3t3b7bp7X^ zkNy{JXO-U6bfST$L-y{(G+S+&2YR0>fH8$V3W^Mj{laEG{K@%<5ZqXL70@zdZys&8k zJ3YDg?0)xK+#vJeT^k*Q)q6ACR{IE;@PFF3WF}QoNGwh5OLp6uQFT5h;@?*%DK7X4 zQ*Nz;X7m2~mL?hvm_P3=-{<>P2Cat_!jZ|_wfd=nkMWJR z-L-qA00T|^Pb|*F+bHT2@iCc=71BF=gnZ)6EI$W7G5k5){!AtUMMU9`94?q^F)JXw9Ehjis(CyAulj`O+J_7sxj9Y zs$Z?GEfXGr9%tu6S4C5RhysRaT>^rhL9b$1ktJbnrOjT^_4XvdCJbqcHb5P=u2Exa zIxm~jmM(*Ax#IVRu0qd7Ge<_u1_^nDW^YfQbw>WRifBiVlZlzz3 zMCEyqfaG1L_i$N$yge>hm?%sQ&qc0IHU2g)hz z;PB0nK-qGvmUBCH#~(XysgnYYA|v;>zQVj8!G^UDct}gEnN*{5r^-XH1m@la%EIEK zFp~n}cQ_nwo~_w-Ji(IKpyqxJOOcKSwh7uYr6J?%5V7Fuvi zjk4DT`8q6gvND+u4~s@d?Li7)3K`5#>Ev&S73*wV5psEaxOkDM&~E(1^l_bdYw_QF+TUU-t`mcuzJG{Qxop!+WPCQn4E;9S-?CC7p=mVe7r9^;^mT zunrm2%uH2s=m5&)n6TFpphDqhRUGN>eq}*y;RSWpwqzbVrgQu}J$8wimE4PXkA~LK zBb(^G1)g*ju(0YE7kpfqv5?XZpzxS!=&$iWOkiRTfAY&#S(A{7Qku!h^^^=NAL?U! zwg{}%QIs)fhh-vl<%%4S6<8c?g3ni~h)Az*&9XtRskaI_-5B%KWoB?AzA9U6NOT}C z&O`E0v-q=Y{CK%6R)yQTG%HrjpXQt2zcW4X6k&RcZweF_u8mD0Gw>cN64rjq^^mz1 zviNf0qNvISL7rzmGiKtfMV5XW&B6B{{=;N)Ws&Z?hrMJOYPBp)_mW$8qBl;rC@5S- zq2snN@+OBj+^m~P1qIJ#O=;yOI|wOLIMbP2f?~Qsm;4+~!cg`?yyyXxh^{fM>&2Z3 z3w+U6?#rA^v;{MS?V8F%%)&_i7^OeZmB$4<8;VgH&IzYx_-c@A+G)EO=%G}IOkGUi zt|yLRaRQPf`Yd35JY^uUp8q!aFpTkL@6dQ$D4EHxUKO8qd8C^CfvYvyu!0E0TmOuKmDpwUX>zaFTvP5=wY1bwdie56oO2hZ@ z;~cWEvedCJee3Fir4acpV&%QAB<0dzu=id1sM!${idNO+*fT$a<~qBhws9PoQKI?+ zrdx&%)yoHg$k#5N??Zpk?ky$*12}f)SYt_rRnmeY7Q>;Aq8S5=z~WS_eN+r;qu)XScy@KHIJpv{=};3pxSr;m~{=I zZC38go*PvcG_E2eJI_)KhC+mf3Fp>)&N$WY{~T+V2-hY8AO}b_g(yL7F>PvrBE8J7 z8#1cp;jIQ)x>a4cgL-?vcA9N+L!Eo2po2M?Re@E=wi{GS@Ua7MUJubU_l2?Mwwl>h z7%rs4K{hvOcJoT2a$%VvY$LFBn#66ze4rK!sONE`@7bkn_1#Mww@{yFXQ>>@ZB&c! zAI$s*dD+41>9vD~Pd>mx=j{k3$h7JAmNs6e@9Tr`(;D@P+OAhs>DB?Pj;gP z(8bqwtSQmCmQ*4GUjkKsNsgb!mZG0jF6;|cKSSm4e=I>vE{<#enasmExlIXqK$yzM z#}g*w2l2Lhc@RWvfgsnjidXI?b#l%)v?AWq%yEo0AbJIdz4Zp)I4msoZd1#Nt3#05 z&8r6##T+@&O*$!&a7MuaC6ENhYdL;E6@uQE@hXHsR$)Ld2oGI?f|}@lNgtOaY?8Wl zp58N0{2x{}MpGN_CbzWvEdP6j%?GOBSol2=tCA~3>dGDc(|UYj<`!;WaQasShbu_h zuZKar)tT78O_vb6(;|IwQ65k-aaCN573F1=A&ZiTc47(keR0L2>8ZIe>z3KzA1nwf zcIFG2670=YFD*ZL;$l8Fd8rCNBBpPL=V|nYPIDuvho_~_sJ<+l=u(190?9mXw-b*v zREjgpp=Q-oc|~U8WR2Ucp>Zh!(?{{j7Gt$T3=|8H+$CXN5=yr>G0$(0es{a^_F)uF z_T^9=Ve%T}KxM@u1qj&PhiTj)du0RPb`%9d^g;RA<)m+Br+m99oWj1Og8p_$Jd6Z< z!y)DeNPx)Glu3#L4^WtDTXte;sOf(MUm*E15>_xQfaE}{V-m3dAW*Ez*^+GVg)tTe zP(LzvZ43Kog literal 152085 zcmb5$WmHsQ+c$g~0SN_Z7!Z*zk&Xcc0g+a^kw$5xhfZM>1QcNgLv^tYAII!{}z02eYIzJWy7qycz8;98Y;@K{qYZ)wPwmR)6;5o9O%0;bbEOj35gC1fsSNI z4-7F5Q*bVNPK3{=n1G-5ttVgz8t1>sMkuliQJX#JtPte>-ehG)A{ zJPUaU+?b7D@QC1M8V?SACBzc1B9lOB(d{fa{K}h9$?Y}J#~c=`5J6bL4>DZes~)(+MR=cS_pmt8)J(Or&lWQcxvxoNzlj{G$cN_ZyCdUu?M7gDwGf z4fodj;9$t6*lD&&n%VwzfPC)saw-sN1yhe}FJ&C!i+l^MZ&4oO~LD=nw)rb%Zb<|{feavw}qVNU!vjiv~Q{6;!h`Zx;`E_rGSAfYhF`VWrrlA z_Pb!elP*1^zn#h9Z^_o--$%4@3NC-7@*kSfqo0hnaHmMYW4nDu>l~SFzJB?=GKqK| zdi2>CWi%p7dULca0XI#ndGQg}xE}?5i{gG?y=|=Tj zH+_;b-dq5rucp0G8X?~Yf>xKM6J7EN*;X#uK0Y0|Zn@d5^LCH)?pgqSyQ;^1+g(}P ztsI-*GPyngH8!nT7I_siIMw!+8gtk0-vWBTq`afeA@2>{HvY%%k%OY5fPzibFhQ#h zs;BS8E`MIrz#%SYvDUBJ7DZ>eRQ_jQwe!+EPjMsP@!5JY5ZJMym9JRKgl1Z!^v5b9 zdB!p8LLUnZq~#c^m@k6MDepN24pdyv*J0e1#||i!1FxoSWxake_?RM+@@rtwvHhr( z%udHv>sh5Caly!1>77d06iYhvgm5sIl3}&^EsK&tR=Vuvn#lIEn;*Y4(-_id8% zI1dQpJnPWRUFC?se5DzsA9UyQBld{=K*KGUhufrIy_S$|$Sh~l>$ zqmeF=F3I%R>WAnxNN>Qx@_D`6_Wl($x1n$Ur5l>M&SswdW*XvI6xI-_>3e_j6;CfK za5yxvXJ!C>zD(fIeWROR3)#CfxzEp&zmUvigm^Af}+pq&4e)< ztKBp^(zt=zwF1DDUlyD(Zt_gs zoc>%g1N3@Z?>&L3IUw^%dSOjv{7Z*3<>7fW*M3SF(MR=QS7bL`D}z}&O4PQNEy^cF zkzyj-sZ^{3=?pAw51RgrFD!s`B!@S7D6nhoe#7<1+;p@-te&Oi0@Q9HqH~B52Cs?& zPqnkKV}mg|U`}=sr!xeiFz)5wn}aXok8LQ~s4BwK|?=)H#UXu@keI zflF)1Gri$B%B=Jdvy9$)5_prXTX-RSgf%H9Pp7c>oWlR>>bw=={GyR} zeRoBm>kbzSddK(qD5_}u)x*bpWi{6A!Jwi&HN#I=>X-K=$ z`ti^>MxL+&Wu0}c!esu#X`IMjk)Y954Ik4d7M74f$P)kezUpx0*Y47c42fU^4eiAn9r(b_xw zMe6s|;dg5~Nvm%Kmap*}L&D5Thvb4)E{Swos{Z;qPcD=W~4ZkV5V5E|l6A~oc%g>O5mX0Lpgn%&7+8P~b^YFxfl zih9+YH`v>}lZ=9G$Bd2>YS_4Cr{1tLj^&KWefKaR!feG-Z8n6heU};_7Il7y9*VUR zPujv6RTrFdo=wJ1;0`rVN~N=naJG9COfqWCR3Rp-d3%s~VImu; zpnNi9e;si!940JRe8a<8qOjcZB2jyI`+CBs>%x5{A+I)5OU>x}RxjwT>1PRyc~z+g z{VB*@Pu~1pABbw#MF!`8yE+JQ3?5o=FbWJ-l?}BNTG$w59FK%gbddYE@3zU5&WUu( z%idoze@9qhoD01HRE9-cS89XROPxEcxxDVF{2FXxyKC&^YsasmY zAa}^+nk#gyL|1UNc?DNpgECuhJ3NKfFMX~I$yX}*7BVE6%~E0^10TmwVGo#xRa#3m z8fU22&^V|EZx6z_AZ6%Fq9yhQGjMIP__AAf0cjgN8R7k+=uMp8@Z8U@WP+yAmQ>>u zBzNFCxO?d-rUhhS%Wv5~>lRf~l^Oq3!w0#EoU;zyi~Hy?4XZ!~c}&xtO+#+coOz9g z213{;EIKN#E(>WIrwaJ_BmsV>BfoG$*2bVBb=(Aa^VB2;k$l!@#kO9M-X5w)vopIF zi!J{aVP1?iJKqJ=r>`!)A%_^xdLYB&u$BviV&VA|P~=-v%mv=ilZv!8Ey|zdf`$;)rS0#PN zZapLT8T-Mzf6gn+%W-WDF9OBBkmmR9&NTgetZct$f9;qMUb2Ue|9X@^TIkZt5Wccf zHB7{}%#XRS+R7qaXI|`*i)$>4yvT*wDcyVJ%fFMq;#+gbq@dXTcUoK^IAr00Qs}Zwk*ycj;Cn5<<%CZicP^k6J{C0;e$% zu?Ayh?T~#QoAuM6;jTW#9nzKW3mwDUg*!p-KJREdyekjg+oZ7lwKkDMGk9oXuYdyu z0(pObf50V|zs8h*ys%g&C68*k0MmtTdwKu%5-Ewm%tJ)?U1vkf)iaCLC`$W_Vgs!2 z9B=eO+1uw^TM%jQX7gW*vYUOva&5_W8@R{sR4U}=e}!AuRGt!A;HM&@4v`MFSK(I^ z5LATy5N_>k*1>rs zgpe$E~^XeK&{~t$l0}r28zQlINouDstltcG(xnqwnzimWcq~ThC zw@9$U_p>pG3cr2A2!$v1mn6cXSlqbBfJ2Ak=JRsRM`V6#5U^^zEPhi5ID@e(-Zuwe z!wK-$8)#U_d>y%{tFGfU7~HoRaMYdtuJ!uZZQcX0Kso*E7;m*n#8&>?^I?`I@Q0V5 z*{PZuP7-=NsFBBMnDr2J3ugFZz&_;PK=bc&pYYZ@$?JI^Hy3|b8S}RxKEIhM^XUO& z8C*Z!82iPkf(Pz7>TXZEXepmMl~sK^Z6$x@vFxG%M6qq$v!hhmsL&C98OyU3a21q4 zR)>KA&Muj(R1QC-JLTeBSnD%07NRN;|Naj8D!=5wX% zOD&_dkbWZ_&S~mERR!AEzb)oK4n=mxl@)p`2daLwbz($wf+xrz(nEVm*gtot8ruh< zabJFQv-xJfDUL=?vF2wmM#M@{yDX`ct8Oc!4H+}vqp@;8emI)vw_ZGQ;DQs{=~=1Py)S{`&VvG@ILLg0J; z!ag+=z){M}i@s3A#V}VyH?qXO}+U9n|I~?GUr+ChD!P1 zFI%&S5FcWUNO3~jQ)2oGGc``cznK<_+&$OVOE@P2XvAX~qVeP$*?a{oPkX7OCki~< z${^Kn+=|05MVqU#cp&X?u+tx1u!z!mLpVPHIs=W|D~7nEDt`HFg`4j5Oq>Nt zo7f9?-j;QpqYVOo1mJRUh;y9)Xk0!0(1;x@A6B0!nU-|8E_rq6I=kz)eJftt2 zqm1AAZQiTb^Z{3H3w-IYVpC~5wduc`@qpF#y>X|LFungLJUu4((Db_5&KZ>IV{8A+ z>%0!ZY|+09i2XO#miHb~K`NVBl@9a8(NkjOrdD>-0Vr_54XW02dg8&D@sK~`@t}w1 zh$eTwSIjp_ak2flPR&1xu}sJ$sQA*L?dW8%p>=T23L_3O3Q697N~25$_s%ED6K_d| zL~l&o0hjO`T>{g>>GE_K-7jRkzZWocy z>)yX}uKg)KFkt12Dc5~Dr7pD#A}k(g&w6^1()6$VR~fb*h2siRnL6!qTzM4*syy)E z?Ns+qZeT(lM9=W^KkXj|%Nnrkkho@eWvTdmM|Mf)4`TT_=$Al$&J-0`oGLwQ*yyQk zLiSLsovIrOv?m#r*&fXqr&-P{J7xt(=H=-hzbj-7*4_8GAdRbXZI8*Rul1Or%W3v} ztGy`tQ^=d$`Z8#UIKRO5`0I#3!MydEdbXG4?*Jmp@LPjmV^BA_?9HR3m%tTx+y;SL zTk_6@tQKGl)2Zo|P^2b0mRA&CE4t2x|Y58qlq`y`p8l#^ih?6Ac-9;{e-b9;8MY`Ph5mD4*`| zJ>`yRPEe5+@PNO2(^?{OjM#Dai+u-lL$)o3h}E|hKo76!YDa>|uBK%OnhVj`M;iys z9${*+>%;EF-adZNXKK_X%I+}2N!gJY`@UGW5d|Fi405uBY?)&q~7ClC@yIER_ccL*yUUy5<4E>G>a z0%6U5INGk<>Ms~m-?LGPH7Ej71cN%*RRe0uB)Cq-5OH`=>bpvggVb@i97n$Nc>?KD zt+-~w3Y<4CtkfP@nc?20gQ9-SqjU;Qd*Q#S%`5wmB^CEmbAYg=fJjN{XVM3|l(f!k`BqP_I#B%k+!Oa!@2AgZ9 zG4HU`7~5t&qkRU~SCQwFGb63pIj(K5^t8m{c_{Wym*c~<>k*M!&||Ab%K?rq*beO0NTu930Rt_jYzn*$+`Qbf~D3$95(R2F9wti zP~<-h-gxcZ^zHEnOkU6jU2*f4`UvVHK0jOA=KX*y)`r`g%)BNvo3S^BXZ1{eZ=(Zf z2f+kaaYYS{*Efg=10Ga?x(aL(b$jQIE(Jo;obDaGjAe!1R7J=35!6TPJxRtG$%TP@o1Kv|tr zH_v7aP%9@at|>sTbhejQ2ytHmh`ff&_6rRalw3jVo_J%6OtB$bws}9lz43Sg zyefaETF{&-@c~L5o)x z>Mdy}`rvYG9#!~;Bej0Yle_$Ro5DiO%a5(62lC7>4eWv+m=sMa#~KIcyc_S33JrE3 zkYpi_eQb&vVwl*T2A@wC9*#FpN2ZFCEbk+>iuz*lC2`+hi3UH5)q!$<`$p=!sBd>g z>_!Z;mrt$gAa|0*x^lj+VyJE`djmG(^0J!ONKmG18Ul*$4Vo%{$63s7y=yfSRs4SC z70jrW8u2-Dd9BgZ-7@R;a-%jfkk$R(kKs_6Db)QOlc+PBA}b`W&}L{mS`M4)eZSRW zW8+<`uO`%Zzs<9Ba#j|md+^EfslQ_B=U>zNfaVS=jwFphjV$m%(V9fu2>*e)%!ut} z|8)R;$NZ{g&NXl5$``NV{5y9kw@dmOqBf#ZSsX9r!J#Q?W?A8d_Su&O7IE?V#oU7e zegrqxpPsKs&}C%|DQ{^A9_!>ihD(~3m-PmSf-9%F)0ofQPXg)p{z zh(%U#J_T4V>QiIpe|CqumXHnoipbLf>{2Y8!Qq<;+ zUFepumhoZ%;kZ7%=GZco}Bu z{3*emhH$yOW`Lq2m^mPY+Kh4dt2%y%L=<#s0D1(vQ3EC?Jzn*d*QQ*EL+{kfqi`!q zunU=2Xfb0G{5qn6`N+uCPqHHKX|+qI)*LR_FK$0vL0Q-Oc^Gdem4iq(WM5oWQqZ1_;e}zT(;#^31144(eS0=kQio}=y&-~4-F>nK8@FEpJ;f5nT1*L$! z1#?_@g-AHM&`|bf>zMYGX^YwGg=`#{$j`(C@nc?qwE7+}J z?+m^DT4Nvb$(;LuT;aYdQI+`lZWhhAv?|8Q)$OF)U+dicN=6TyOD}_tmzHH5bW4vU zb|#&FF~*{$!QZWI@xmu4%$gQyYUI594xW?I#5V)y-qLZ-tU!-1O3Ko9EdzVu{%7FP zhi288`La(vx58{gQhXpRsA__z{d5~uaybAP8K)NLDc)#*j8-I<%p%v2-FAq@FMv*T ziMG|A(C(Ww+pM+gT(Zu4;BTwO#k(XJn9K~Xf$IZkgX?!U2C{4KG$VCMy9vRaEqBSZ zD72|2-=4>kh%aJ9GKjLQpUw?nDADr|hX9PE|0|a(|r@R;-z~Q?%etb3^mnBJb;mq`S1yl(= zMfEb=&%U&7;S4-jQW{|1a;f5{7S#bb5@f4Kwq!JsLzRUf899o_yrUsN=ptdu*gr4_ zlg}(%!8U{w0_SHA;qOZJzU<$!yVg^p>nU0;;mqd_)*5;=g;;8D%<-gpNsGiB#A4B# z3Dj>L1Skt#xp;*xgRha~Bxsh=Ad-xEt$+=0Jv{6bh}@j-$k#Vjiu07DTW2ce-=NXR z``2O71Yach{n#R?@wo7msyZni6 zY*KW^3*b+yK#~~*RNO3otcebuQ$I_l8Qo@$3)$k(swOuM_5Dh?dzPXqB|!-bv(b(e!D z5tVtmv^pN6XR}!(cXR}1Q|>+CAu&&-6Z8JzC)wI^Au`K0l>mSj5tp4_ObWr=AVb|Z zO|d<~**=P@n!oXUXUKkaUk8M9CIaJyr!Xe0_<@xsZ$)eLfJ#)z=+L8xit&kyZ)&Zw zg4NOaHs3sqGb|PRQox37pH*9H_ndtXLCNce6|ajU?cKvq`lO@q#Y*z>?H}0WbSfTy z9!s+cxlOO5dPJL5K*CiUIR2v>ao?=^Dfk);i@zhPKTc5*TJYd5b&H-bJdNNih5e_x zB}nIIl@M^H(+&y4Q&|6%FO@D9_r>t(qH>~mN+_Pd$S+;v57x(QB0v#D{rNzJlaBNL z?6F-d03*u zx1GhzTuD{3dm3jZ-Ih%sugnGkzr3$UIJcd8Mc1RBnhMtj!Vp&@a%2T=4(@_Elt<^w zB+7*m|0Csc|No@?7ZA}w!>#{cDQ9pqD|u&4+$iLLoe<)}*s=^@%OA_!{rN{D>vdu? zR?e{Sf#&T&90!KqS<#f@&w`A<=6A~X#zSec=SM4B8dYE8rhhSnU>3B$glSs}j9t27 z*3;|gslA{4WtX;v)$II(awPR9p52FHYmTEbId8H!DaMQtf|8sO4&D+O?{Tm5itlRF zz`9EwSi2C6%@PNc#>$+uOqwgK7f`u+Bs#~)3dSy^+%YIcuRgOlkhWg)#F!bG(ma(6 z3gK7b!BM`{I&emZ05`W+hZzW+&Balweq?+0HYRn&;^-ioHU?`uI3i%Acs+H#a!9)) zZGPN!UGB=QPN8y#)%R-?=D#I-_tn>P+`f-=UMQ4yBmnfG*t8K#QmFCdu-yY$76y&J=Qur339fcrEP3$ zx>xTHv+Hmt%jV#d5jnSPgFYCgkvwk0-rws3o0TF@*9e4AkI$NTn?~E}aD>!`Yn68)Y?qVCcTXpW8gAC_{oj16pemv)f z9A)w8(+HjN3-pQ@jkbkI@N41ZLM6X(+`pMgO-b5r_jKAe`}0&28*V%k3h>gLioBeR zL5t-p{FQI{_}+~6i4GGQR&c|+q%XfW+%qm|J`rm1R(1PHD=BqG-Ct z?aKz)$BD8%3F;rej-7|@(BK|ig`aSN^yzgwXPJ{tvH$w$cc z?g#VNbrzDWYUiU7zLIQcNmyXw3+N%4P#1=1=IYeSeAup5g!2X|&rjdf+YH%fYNGvp zqIP@Pe2c~nwQO}9!Dw?kL#fQaUTp8DU+&pvI<2Chq=3{X)`n2lg9D+)5UQSr%&@>H zRQt19y&99)DiQ=NFV3K-m~3*9d)jxeXvx1_F@vYYgl7bAp@P$HjxzKcWMFB97r% zb60SfUbqMPL+9R4#w#5aLR|oy3WjDyJtUF;E#WV@a#1eJD!<;7M?+R8)~7L3)^EpK zN;%wT6GfcC$A;-Pzrw9!8|(!?xopTW0m5=j!x~>bpm{Q`*h$X<;|H@osU8Yq(@@A0 z9#yt89-0!d1PxdAK)f)sl>uF9gslHJNSiGgbxA7`vUJ+txL+K~==zX3HpENsnXoAz zy}2vzTgkQvZ3UUc*A+3-rPz+KJMVB0oUp%kmA<}y(7)ve&?`k+z0@OXQl_uRnoVCU zg3b3U{kwzy@CF!b77rI8Am4th6Rool!4LnYj(5I-S%2?(iWoHH>6YXX+c#+Ke;Gs7 zVS;zdyA~N2Rvn-MaUJ1YFvif<6KIq>xULPgthf4hnFjdCyv{3Fk(VI}mUmd1%Z{o5 z5ltv6&D1V^npgO|0kQyv<#pdf9kg`Ac27KYplWjY#0}sDU6_D!g|ZO1Sf)_|s&&+2{`iYmf7{E-FK6j&K3sL5CJRT_oF$>}w}J?6 zr=KNKzF;KA-+DfaEDw;0*uLWm;3;ft;4_R0;(gdT^vpUoz$kxI(hSl(+7$5UZ{?u& zwLFMwtr;(0foy#=bYU)j03=j}HLLsy4V{5_lA7tc>>$ElYtXhB+$Z`z*d;|BGbW`Sei^X34I|FbME$Z$jm zc=I5avXgeqqiSy=DQ3S3>w1|Dn4NDOn`5d#8mh!db;0vmWwr0_*gUd7*q*5)u{$8| z5?-Zn)bFs}60lDjaYYb6hIX67k+~7;dYE~Kqf9G5uyKi)?jx2o@u#n`(?7A+YgI%W zs)LQsf~P6HyZe238a<>AE9MeFe^ldLoR;34EwJ9JJSneteJ8xXf7|8gBLLD_1rMbd zrtN$%aAUE4xMU?)4AbTv4stF9UFxJud(}9OJguoU3gJCK=)IZn#YLdm#*rBJno^?k2 z)T>H?GK$w`efs~V>+OA1M*8l(IESvl&&gBmhpOZtu{RIi!3=xW13IJb7p96RutYQi zn~8)hzk)iYD9!_P#b4*8BTDvZ@xI&@YSIAil$J^GWIp5iW|#Hbk}q;Y^=9{NOyDO+ zwz*CjanL2--2<;9mzVm%G3$Nt%9zV`YGy2;JKwi=ZRD|+WHoeq z0XTKb}gXg^Hho& zd8?e&4EGOaZ%pz1Z`+NvJaq?PvaP+r7t~e1{|B!NJ~oVMoaIA8m;yojMxR&t3!TS% z7)p4SOVN+tn``J~6hQP(iOv-=jz%=zx(hZc7Lcc&RRui;A?O=oQj7%t0rm^(mo~#< zQma9G0rEX(U6A<9qW~Vs?1P`?iK~sT`31nB@baLi%eWrREJqTFhtJ{E{Sl#^kaQ?D zC__aZlLI&urkuO4PLWZn`_kTdgf+vqQ97sWE${8-ooQU-g^0ul9|LzTV2O7mT;SIc zqi8?&aOQfMC&NcD*444x`tyZ9aAVzHqho|YHwxEkWdni!RynCmjdfQxVUg@o#XsQ` z9svsS)TK>VEy!+arG(>Kcm^}ey8|iK3M)e&!A&=*Zx4BYHg8s~zFKT6y_ypHA^auo1KkqNu7^PJgX@dcJjgJ-5+^3= ztN(>S3}meX*?eXsczp-A`C!1bIyxq|{o0odUvrE+ujHW&?Qf&Rt)jq?`oJx3L*0i5 zdzR~xb|HWdbDQT3$@4{1>(DuW1kKnEYa$nYABD+9BE@L7E=`%ts|7c`^U6l zz=dZALe2R3r2M~(-KqbTm`qra|H#|VOJ*;$ZCm)??AQ`YzeLtZ|3?2}vOP8N+KtzG zD7!2useBQ=48n4i7-!)1WRh2uI@;*>R2j_Mwd*)0DXko(35fAWnPGDN)^}+UtMaZD z8=!2|6f=38fDf_iRLYfPHRZYbJaUMHRT?-zkd>NK7Zl7#eYx6i#&ZFAHt{WfsGZ|B zycoIx35c{DnhsSUtHwcma3XXRuSc-OBU7bJUOfzrA4nzguq;L=Z3}gr@8!+O7X{zKhDNy(efY>5*u*P;bJ^=Nh_5225;%^Ng4I=<%r>@iw~8j zE01i1pg>1~8h6WG58t(qA^7KO#7+6$1ZF{|*6+v#H9}XcRE%b7VYut6_ap+_AWCvd4p2b50DwY((azG@PHA>Yy zF(JIJg+(1GC=hw0)QvE7SLgu&#-4nPmX~}h`HfjWilgCo|4H_oo%mw`DXM-Bh5t}? zkOt9BvRLR{9KvZE1z@2EH1>TgK>fO5Pn1#!I|MVL{(sw46c1o*IEJLv~iCwyjtI-3d$|`$`y>2XUCfS^Y%*$C4Y{<5Jqzyy4A%=;i>#7dvyNKj}HSX)Op zXee|>j=w$2a%$!D_`}1NbE5B4Z(ccJe#VkaEtdrCBtUqxP-Paf$nDRBllpVh?%S`5 zp?t=vdC#iz3GrQnj zL@mcV)1(v3+g+rbFOCcoN6yL~K)A|z91O7aSO`&m2G$G+*LbHgIkr!9^8#L3jqwzxWbRR5oH7w8{B*5FjOm*2Z z^B?;dYdHCi(0Pss7LbETnyx?$u~~5aySNW{)`KmG!;p-aS5x1cG#kVTz#6Q?4hZo= z4ZD4(o{VCyR=f^6bktPCR<&Jr zaG2umGVMOJQkp2~`tNS>qeAnR5qmW8qcIkrZ)Z>`A=%|jOx7r(KPp2m(l^xq(X%nA zZNwB;aplp=y%dk%tYpl;IJfyZxOJ{`h5>jA7#`{h`Uwh8iCwp~pKPPBv z?=la)_o}!&zlYP^#7JsH7$3d4LQluftPNYYHtVl{=7C(*aG4#ndXzR51r2t>!yq71jwJ-o5RAg>Q>pF zeU;3~<~zOmh-bP)<`wal2howuCWT{_!mG>xEaXn-JRkHf?xxG#&aB5#xrM}_SVLJV z>!^D+?uF**Zh;8tJmMj#SypFjlAQ^9+@lbqQI(EZ9^+dYJHZyjimg(WZ|q zB7<9Pk8}zU71u$wCVBWkVdzOZ+m6cy7wj#H-X&5kdCyXbN^Izj73S0)A@@%UNI zo1L3M*W9UnUf-7qWzX_wCcfyAKPq1Rp$lAyR9@Nw9&{f9G^fHx`cFE@f?CkH1Ur*l zM&8Pipr}<8JoQ9KhwCFv)dg*~Xwqm0%yspWx?f^I+;hL*EMCrtZP7e5?8`MkQboua zT>tj(d0Vw_$zvqVxPayG&ghT4H@ETDy?uiM>oTJoT@{vwyn!v4Vmxj04kdkJw=y$~WKb!vZe{7n< zhGfES>AxXv!vBW2ec5{mQ_`yrINf_1J#GN(uE5^jD@>aQ9My={^77KQ>oQa{4@#T! zhp6~;O9gzlmk@8%|Hr1^N#a8XuMnR+uE3$5x)=WnaH5qeccC3@9j5ie<#Z|{`<*}m=LX7LI6tg>T2C zz{>q%U9;KUDqPTPxJti21Cfs%YP2*qHZskuR`{(yFI#*=-Q)0u%mSufm-m@GvC-}DIkS+jYa1WPIGGTSSDh;NTYU2lzuCtaJ3~W11tts^u|17@dA9_}M85#?ZaR)=#lD!KE&wBSZQ}Ks!#U> z4BQOqG6Dsz6?`xjz~WwO@UgL!o6y?PUK(C$qeFNInU2sPyoySXT1ULh*NK)r-6ocF zB(8PLC`b=zW976xCt88E zi)4GT$N#F+UteM2ppS$3^&sxf64p-@J{(`$wta3Wxsd6`3KB+TF;yLvcxx2R&D_OmKT$jwL1=N6W0jYBL)m#^7?Tq>ry7Lt;%DZ3TIywd-Z#-&!4bJRD$Wf0m7LTBs zYP7RRDTtDhZd@k=D?HgxCHqgj%IQnRhNQl z+Nrvq@kCh(z)=R*0Z)-%r+e&_U&S~Lzu{kBJcP7hBE`E4kMx+z4WRjZ((P4o{kTF1 z`U4QiSU?(t{qfJEO*vo1^_^d1dE|gynbA5R?sY#MTFv}_G`eJi?^6LWrgzRorX26j zGUUFzL^`1k*996T6oidHFg4df^0Q*8AJ6Bszq#Wn$=+Stjd_&nxM%hnpGA5`P3T3_ z+rv+58kvaIw)I0;B+H$}E64&b<3;2SQ&*55e%x+rfAFt)&ep1^R_{R+>k?8IhLZ_34) z7SW8@WAxTHz8Oa|SLe?NiLo-7xb5mc?6KPl#ATkg6 ztD|*Bl0UsxZhiMIcXHa<&itj>gk#>#{u=7<;?_4E%i2P#B0~0by!{PC+-bw#4WNQ^ z$_Z^n&K~J_uSh42HjEugidh}vhDKmT0*K9#lPDHRm1d&M;|RzxPgp8) zU_F-RIt>E9c1nBh6tH0;fFCuMsdW2wNbYR&P9|4LTK3ab{|aqi8N&1#&lYGy-HBy@ zN*TnS-=m**Q~qZ_B`tw+w2myoe1W~Sb%Yg|uNdtGcmk!U$otBJxdKNM-Rqq~{`Q>B9W*MyO+$_fUFZ9R1pqAShiI2}t)t}ZZMnlkC=HmxC; z670W3662$F-emX3Luk6F&3-(DmV$%Gh^w%K z^M>0K6JHB}(8t;MMp|gWDFavTVT()FWPE(t(d%1@PDP0c0-W1o=DicNRJfxh#wm0c zGP)ZCEem{itHk{Mcxa<%;$QEi^+UY0b1okO;=2u+vyl+9#C)f(yj+XCf&KfE4X~?T zN0nHy%(%51$XEJ=c9VEAPdz9Dw%>sd^F|ZT`s4C&xfH?#R%jsXO*kX`X21=<^MU#8 zq-?uTj>cQHcYkF2`cFLzRVoCH&pfoM^%$p3XoiD6XOGBkvP3sr_w&0F^%?<^dqmWf zQ|*ZLKZphkhyoa%==|@-FpmAvaDq?dX4w+A(P@{&43{`H8i;*6(-q@H=(ua~(gZ=qcw#prR~Pya+G^n? z12;Z@(7#qrrTV{tYDM#Vc@4&^$^=f5u{9I^|5)L3_s3izdu>QlpWOO*{n(y~zTgU`M)1u@kDG}X!K z!F|(6j*X^M&tKnVe9c=dM;TX66J%om&;@AE8IL%m9=T9w-|4uze{+oEbBAHWeInR{ zBv<2a3CJFrvU2#TeY{~Yn1b!k;Krn%KPu+;`BBEFg4St~j;5vxIaWmTfd3_`r!V7U z1N@f1Ux-AbQZmtnyk9lX=5c#OLz@pI+_CQ@TPx(pwcggz(~lV#4-`1)iY!D&*qHF& z--lFlLGh}DiN__XfU@GKEgLdh#-`ZJnd_5 z%^QM?^alv0@h|m; z#?84jVI%t>^J|&_iu_8xd~IFuh}oiOm^C5#h|}?~DdM{Z*kT$|aSS0q2bP0;M_XC` znRLPLj3x2go7$6CzLf7)a8?uc^xZPQ!06roiZsGGbn#r<)m)jb44-Nd*e+UWM(tmOVLtHDet*iuhrZ2Ly|-T+lIBQt2-LpML@9cUh`FY*s`+ z*->@?wy+E#5`;s;IDwOGizFq6Hsa4{0R;BVvx$E?jnDjMZ@X9eZ;)4*n%YR$)`N9i z_EO&HSV!QERj`G*!q4~{m%ig!qGWvf4TrP~JZ+>N=P2ToxcuvVxf5KROsp+aCcR`n zFKlL6q&TAY=jV6#JE*i%H|$Hb3APU}N_bJY5X}3c_OibZZ#X!Xg6B*OR+O=5NL9ff zKJDQRX~Hd)hZc&VU7Ym5)UWqk1=D;U`@q?C{K=EeocfNX;u$1P#w>{yEamH|=(t3y z@iSN`(0}PVmLSSYBYi&m|6%LBf|}~XcWvoX1d%31Kv0n0L^=@>5CIX8-a(p3?=3+< zid03aKxoo?M@s0u_a1sl=#T&jNj~2H-rt_tGy5Q!b(mStLDssT>%M+pfgTncoXWQr zEGB{tSI_9o2m0e}V4?PAkKErS#&Rbc$kZ>u86KmjvOJnummywdwt-<<^&J~vu^^QG zqxG|Ic)uTC5Ln4ag0Y1BrqyUHSk~-M6wY#!t{kChy8L{{D^SY`Q#i*4V-pYO)swgT z`;plX4MxHK`;FUgk6UJ2CM|cdC#FZHGg?kbI3c)&!L<5;d`)jMZzt=nI0$nw$X=;1((wFo{#oJK; zhsh!YC2}uamEH7atCTQ;4TWsyUi+Damcv%A>+jQQoo+@}=%abR7KHqVJfHa#5qsZt z=*o=w+)#TFetg3ESy({CBXh+=kuTpQ0k=>1IaEC0g!q0+lfLddhU*dVre`~K40vnU z+WtNj0?I<%Pd5K*p%k_(dMT*n&%eEG+R0B*-)R=NSmP>=1vi;_VpxxiDI-|44n^q-9_2)c)kVu#-~ z)!8$C@p8LRdulE6N1cEwcPv=Bf1Q4UDeF)9YcAQUseb%eTlY~vwVhAZaq#?Pk(n2d z=Pz`4_L|ab z-W5?eTjT+lmF`+}p!siKEKvjgxM-H@EHg{ht6pXDqr|qnX2zMq9gAZZ_cw+)uJi4=X7azB39Qt}}=Ya~t%I`6R_{#Icao5$h z7c&O9ADXraBld+AS1*z)Y}}t0(2MJ*eD_W{mTSr+;2E~3q@VtwPUEkrKR{;7#X$5j zb}~bJDYl-hWx%m_^T!`Czz0%Cb!K+EUhU40F7*R8!gd;C&(ovU5RLv>Yyfz#IB{;W z*Z&TQZN884HL#=Ee+P6X24}U`;6LUavG;KsZ1i75{^+68Gz}!oiy~7ewYB*{rCvOt z(;v9O{or;8Bz^ytS64v~@bP_kLF$Wht+gyYM@j3zd$UsC3(YPNpK)f1M6X7}hX(>lfG_N5E6BUvI-K%*h(I>LA)&K^U9L_avN&Ii2-08CVIJDS!) z5zbR^wlKalK&y>*@Lrjph9ex{M5(d57W+ug6sEIru0U7Isr#$146zm;eN~NG4?Z7k z#%sc8-rDoYl&RH5=@7P#)oSh00VAwR!7rIUX_(%z&jIJ=vfsgX=3HsNA}F&bujFPh?D-`jM~Ourw`1 zdd_#cj^SB7DT7#zqk+N#A!jAxZ1!42&YF(rxSRR%3tI!hp@A&c@faIK_Hg)-C&jI+ zatBnX)JChE5;f-HqFr(f;{PG-^;qjaczI)Va#0QhCbkw>cguN83$uYdU2 zjsDJJUPlux!Sr$EI_vs|ARAolnEA2V9)F|=6e-Txa=B-HMeOm<(+ig6U)8ybT#y0Wso!U ze-h(yW_hsX9nNgf>SF==vB-6$a%f-#~J^J-P0V3hAXxitI7gR;*dN4#|s)^7efe6%wH#5 zio&!74*m_D7^r%ZfFKr;f3j6r67m>`Y6Jn;d-SooT!;EYgv-@@Z#PERnXPQE^W$>e z7N4_$Fd;8qW>Jm)U@!&Wb&BZxvk{)xs{b1ETnVqc=3EP!=zp=qH)!I;Ot>bA4~rWlhn>qgr{WHQ6HPfWjAHcbLyUJ zmo+2|vd|dl#!i-Wy0J#4oI0`_7=4Wl4H*}@APszCA?_X+fa{ec(Rtk*R%nCc((BB4 z?5g1zl|~Rg`JhS&!|A15zX=fsSpde0yX}0k+60WbuR>mlb8X$w)(6WywH4SRxNZgg ziN{TS^sc0}JbHi4&bm0Gq{*jtYQc7w8mH2>-;=tyAND@xE&TWI?6CKhg*<;gn$4{b zoY#TY4$2KlEgCi}Qes7yzWv8GcT-Zuh$l6cW&e-;<3~r3p0<}jyh5VU_-**hjj{W3 zo6ycO!*nqE1WK}5#Ol-camWH-TYFBQMAd1;TNrj+v}e04B7Lw?WA*+_S3EM*GI+Y= z_HEI4iP3-2aWTp4@DTA_HzfB! z0kCGbe2C5o10DD)JF4v5IO19pUGM1)Gn2mvLAN#%pDqx6(7#5sF)qB^5+{mOU`rwr zk@d~~OLRlyA*Om!wCA&PLy4a8)ajfA%2H8x5Z=62;vwOM)}6ETf@hpJRj^Eas+Mh zY7Io(ZH){h0B@sOxX5FN$A(z`BoQ#+UF2bd-a&!!q;Ow%4K!9;%rLlfPgYk4y%k~d z`GHD*XjLmqCDpO(&$-I(Z@&A@!yG~+F4t_^Yw=sK?#OX_aRwpb~wwbB%4XAig>=PdTAD^Op`-FZeVbpmeCl94pY-39%`& z8d+oO2^XVwbu=2bm!p+f-ZTo8>B>2E#+2NYS}J_vkX_y*=c9ic%{a{PwJ+2=Yr%Cy zuxB&w#!Vp4@e}-gznD0;k+gVm1Jia?&_lYFPE(*vPDcdpYbVL_a7;-UgsEtrG~ffS zL94GEckW>;NVO33u$24DWX#LXDpSS@pM)MC+1g;1g5zzmI zL^s`K&(<4GpT%7wAa%Q2IL}O)?6ALb`dTOoK#*SpW&apPi8-!emmxU%i@Y@Y(Fr9e&}&;TVqQLkv{>%%w{`(Nw&@^vw@fOFbUoACxXY~umT5N2IRB%f^DhCp)5KN}#`lqA#_v)| zrF9kl6cLj#_kFDu6lXp;PyG>Pv1ge--iBdhduirsdt%fJ35CjSwgfHk&Ado`H&Wv- zyTLNO4Rik<7G1F!r?K^}Zg}QBv=NjtNx(aF=|(!!ioPSFJM1L4cD@SGj3_lIXv7xd z_P?0ZAYUgaT#M8pu1UC#o2Ls+-ky)#1<>B*e%T-9rX>_N*C76;mGVuPiC6tM5!pMX z%?Ke7_}%zERV%JVAAE4Ja1)dPxC+a79<;u6`@iz%9XrzV{j{<44#poc58KK@0S^Vb zSdlPR&H7s2LNl}vAhNQ94>YC|Es1w&n~xQKtITB z?r(B+N)={Bo!|E-7#puFE@4`K$3r8*AZ8(RCEX$LH80V0(m^WJBxXPl2t4h0+7O{J zInp-ZdiizCqFj?RPp4D*e%F$GgqXju5a3x>phf%AlCp+sz~t8zC+$dLYKF*}at~Sq z`wq&2;qX((9UZ{CD(U!LmT8nPe<}CYMDy$YC+?t9B)=1P)J}?K z5iChx1>+E(SbUkZVly;)7vT)Iqo3EX#TsjJAvNCDNC)U7*T32Y=>fF*9TRO&u=x@y zW%kJ9e~{=2vJ0qL&W(1L3Jd0%2)E_Dwg`4QF7jd{4kzPw$~S316SGf(yzNKpsLOwM0||3 ztX0`J`)q;>IRG-E-zaumBc%8xht|rs)vF+ zY_5U1c;elBMAULx&*!sAxpvs~voH5+t6!(bdxI+guS=hKy;wCGR#+GLqIn&1Ak>vO ztbKSh|7hjtnUo{X8Pa8;la+)Nu$&1_;!E%xkHK2-q|`=*ZkZ1@aJX~@$Gt8AmHoX@ zrD_kuuuF!#U|+-6|H7x+&F1HM1U?_6vBG3)J@YN-?w6AjWPDd=*oa#KxTzidJU2(h zdJ1hFS3Wz!GXv#Fy^>4N#9?bb`Rr2(Y{cX6`~IV#A!LM$*m&oy&-zn{ zqv98If7&^TC|!;*o`HeyO{qW=?WkAq zM#VG5qT9N67GYWsKgx#%VbfDC48KkHWAXe)blm&MI~X4WS@zIj{b#ArBShs}qHxNF z`Z5h-?mg&TAFuPiQYgyaDWi{;T^2q2CE*1_8Ub)W?#VoTlk&tn0jp@c-9g{Bk0T`R zWL57pHc5taxy3av0w`VaY>ED3pv!g0YHb0MCjm^%de)&G`u{9g#P z#t)d>@1j>F8YMS(nKQL}ntzB#Ta^ip`+XS(w*Wd(ubHXSoMWDgAr6G7@X+=+=}msA zmka=keEqg_HXibeD8Pp;nr&1FLnRe-@*JUv2H+#6P=QwTO;|WrDt(R?!`_3 zZu-l&YY71>csI%2iuU@#ckKJpz$4M10{k-H3o-hnxwl^5;Sj$R2h~u>1vtIAL!~Zb zbz0PXI8QYu9^=!%8}!1U*reC{m9iPz&sc{MX@OL9<(uk+z_mDqSZ-u+n zP+84UCkQdt-x1I*fh@xwddT0>Gt3c>>@;qRN&6B0bKN$^`NEFDO(JnTD~#s-kjd+a zl#TkTc~Cn?K+u-Yu&dNx&vFR*87!+sBNGnN<}U17MM zFx4%hk?=AZ0t4(r-bKG-mv_fkNwi?fO<>F!)#nwk<@CLQL1!YMU-}`0Bh?`C{5hSA z0B<)7gRY4(Xd&*3g_Wlff9xH14LRDsx=uShkk0-IEWKQ|un?3qsLa22dv3LUJ{z^Q zE}XbU#b)yLAPsaCMbn;|q7_;-QxIN*7= z3@+XAW{65{Sp>>wfz|hUiM>4eBAf38o5d{yt{}|ow-uH4S$m21$2MzAm=55ZXWriO zoZxu-5zvmyEo|8w_XmPS&SRf}A-D4Eu%hdclmgG7SEyKsBzYW&shf)v1& zZNSrAx-~R1*SLa)6m@gIN@5~a7U>)qgfn+3+u{bSloPCyv$`W5zk7x$ddG#0NUGJuAv_2yADHk|ROBbP7jiOWvo?QeU%ij=l`Xe*w2O^x``WUx;a3%Q#N2;p+O1h@^Fz(uh^u$-9A zbCu?99PJbUhu!<{CL>4&A-9-DP$}RewtN?a83)}>3VUz@!3=}CZkJtfFfb0ge1h$& zEJk0o>+ZquXTz>4Sck;^)F z@(^3EElt+S(H@-eSvpfOoZ~{4)z3ue?qnAO#y;xJ(}auxz7*CR8^!L;iML8XMlY zW3dkE50qde7hZmZ+?_J+O(9HWWBaYU(I=rFN~)YGKl400&YtN@N*o;e{bnVS;ndDd z^z2Gy34TQOvm%-$Ec8dRgv1Q@OqJdfLf&6tMvT-w?AT4P&1VmY}0x5SVg?E?=j(oz77H|8lI8djV&GnHB!?& zy$~8(&X?lzmEOOgi~4C`2qqSSxf{$C1hwS?LQ!G&mqFO8yKnF%)ep!`A>;;kNELB& zgY&?u;_#p1HVOgvaF+8D;s&!jt`9(W zD?o0jP}n*#X$SlB>9@E`zdH-%qixMvmOi-#mXh}kks?&^0C{BGsr~pO&}F?FM=>)$ zVaJ*(>&O-4{h_q|=5bLW+e9I|JxaxcQv}oDReqbGf_~y>_@i^c+57_L$5k6l1T)vU zy{dTROzNXQdW!j*s;8x{D4&C*h4$4YiXsY@+3jeu_*AZGU(&&-~Wby+h2y> zM5Nt?M7Pgz)&-ioLM?2W9WJ=dy}J+Yx*YfV#tv7C{W!_Sk4uriyZfJSYp(mpY_eJo z&;e#I*v(HgzqJMFETEaq)Lt~7sJZ%iP&RZWL(wQL^0}Zw3LndF{D*bXN;d|vcp#OD z$bFv9O-J01ogg;7+H1au_nz%I=XAF_UuyvW4?>gU9Z<2&PG7r~2Kpxs_YQYM;%^K`189v7EK0x9Y zBzBNg5~lphHxIUJK_Ok0dZ`MPxV!(;Yjez%XfgV}`n2>h{OEJsR889>)D*7$@j!YT zu`L9l$G0rIWe&IG8wa3n&^QDe2pxdBWxyTXU7&HJ=GZ<^-nGpw1n>>IuZ|l76$1hh ze*KVX06n@Nw=dL=*~0Are6fJL_8J7!0-6SW9}G76Y4#qPew>oV=TUYN>aiYx8&3kZ z;=ZvF-Pb3X(mqksqIgqb76WORx#XZU$y!J`n*|)eaoqxpY9BZR&T5ZwmM0p)-lG+; z*2WeWXuF&ylTmr|_lJ*|?%};*=dyflTT`7W`K?K~hqN>Qtu|eR`y>4l?WZ<0+&euz zJbce|MUrYl@7pkoNsbUlv6GUHzNuw;TKs*cJj-Q#aoi{%(_`%fcXG5`p3Kqfe{v@- z99wg&Y(vWLq1!>&pL??BUTXu%qw8V*{LIby<{K-L7`p)Ml4H7&lHxUgKr!GPIcbp@ z-4AaH$F42A$E?b+-y^#+0=*oZ&);fh`qn&^$TYad_Ki0C!8&0hkL6B*8}fbAZoN0| zrSikkZL3jr8?I`W8e!vZ$4R7C?=y2#=M~=9pExt}hh)0S5A6x@$=kZdAFHAvGZhjET8n=d;6pifg|Jh%SzOMCVqHK`uf%ntQkLH zO$qv88UNx_V&~@kJ9p)bvKGtJL<|0RC$_3Fsl)HX8C8a1}u|@9t>9$-?}Bv+r<^T7pK<`L`oQ#GCV+ z#+hZ~lUauIda%^UbD6!XcSMH)8=F@&rkTSmzo~;Qdq+99LeuVnJ2>~C8)8M{E5cV> zTLaC9x0CvWP9p9-jZ)8<%^wdl`dv*b)z1hO-8TWfQI%vZcAyyJV7~GB$I>a6(nVW) zUtlZJ<;frqhKKmq82tWFNn2!_>pI;sVQx2IZftqc=EE)Owz4*xAtBdA!`kFriK^gV z_M4BfS^g^mWrXTktww*E4L)_5O3tWMj45?;r1owZ5>a4mOg*R@W!Ue@xC><;5>w&3 z5gX@y7E<6We9`>eL&IxvSH8|BU+d%6+f z&lmmDe4NJMY<@m9++Y`U^Bgrp3&lr&hhNkZ1X^MhUE1sdpv=ds*y|@(bBgTf@u&eA z4o9b>N{`3eX+F^Va+V0ZsWTfh!HWPl{(0fKux>R2YR@Ls8m0wN=8}l_tW!Ox^bXx< z!tZ>9C9eLFV39pD$ahbqppvW_N#0PA;@1FO$&2ehBcq7bc&U8s z_J?BqYVTgRf0}@>Ut(?FLX_)ge#S!Xq2yC)q92ZXr89ZSz0|fl(Q=(uX-*WKg0@A+ zz@PboMYj3$^{iyBuycY{p{Z({?P;xX%#vE;UeKz|viSTF;EHvGp>OuI_g91Z+}Cj%g!ImR_tkyozI18ogB_c#}8*bXBXb55L6MAY_=J<7CgyFpWiqe z)oC3rrdW|KP<9ZUR*$l&5QQ45T#x80n1Z?G3^=o+8iG-hbw4?j)1_RVpmQ5*Wnz$E4ecrW(oUPQz)vPgv0!^o~aNmAE{XumYt4 znJ1D@Juk$5wNkbS2(l=C=s|*%g!SpVbR|_z%c$;sl1xRV#9#8ex)+#RoIPQYx~po$ z9~a7<SxeTyEl z>a|RODn6a|nbQo32UF}7cHJDDlDu34x!^qgFfVUH5u@*KKQ)7vT7jFbKuSf;ul2xb1*Z&GYFKgwAQ<7a;5$ z^{u~j3@TX8=HFV^msp5uydT7RnH;aahBfwaalyn+FAK1hQmILTno;*)pAgNT+ab3v z5?eC-R%~Cs+WTcaoVap)9Y;{Im;e3NUJzgPR{}7ONn~)#ZJQ%3->W>nIP!^B+T(vA zDwf~M^280J&+9x=9ADDgY0Vq^7tGo(9LCA;UGE#Zmv617m)SxQL$aj>XK}@*iBpry zegLFKWnsyaqu2GbLeGwMzqoF-Y;iQcZOg9u5G(%RRUZGN_VyaC-PhzMGG1e8vWpZp zLMhSUBW+KCJej=ub<;5K0xgiEY#E!>Q02C(tJm12yS-sx=gf=D?~IEPeTEQwOTj8= z_kaqM6M^u3S%#BeB=;L;1P1pRh$~5*uWOYPtkuLp5q36S{T(Ugg z-!!tZ`j|b8K@daouZCc+`!6&MgR(T0RQrJLex^Vt;Ya@6XS098;-fg4W_7brU}drh zDP8JvNq~XUxPO)CUP+A2yo$Ko$0T=Oy=v!@QDCm@0HbjB$djU~q#*n5q6ex%$bW*Z`?vm+O;+gilV+61CY-5pw({co zI^uZM8MWSqa)4{jM)!^)yqCFsD;vi&XEz&=x7k40*xzG1NugFUPwQkJyGLXACHhen zS_OH2^kJLcXOesoUngI=3pP_+FdE)cGAP`@hzjJjo6n;dzUt3M56x;yYXNf1T@c+V z`bn{1TO96ub5G6psb4pw$*~3P-KdEwj>z)HsIY;hqGJzG48(|p-Pdj_Skhs`7ajM_ z(=C@{NQ5f74qN(4CXXh%i8AIEK?8Q%kxLpKm$OR4rk5M<1+|oLW16!5O>r=AFTKZ} zuy@Iy$&YndS8?BghKD;-v>=+dwJ6Pwd%{;l%52|0a|$yXUwLZxHPd_3I^`mOzOLS0 zykk_)M1PXE4Ke=GejdQIf~_d^eZzE=rEz z*sfVrv8xIYd3?2sUiY3xW8S#wv{`H`ZF$P^*x<&>0EiT`BP9U!Q-DCUTu`>XcoW{p zZ2K<0%j+~a+i73ww50Z1wu`Nb4fD;xByy~l@!nJOLblt^&+4+9A7-wEWh>0ovrc2k z;Y7Vcyqc{WT}PjSP=~catNLK#z~RcZwl*xo@zQabdD}14#p6LqufGe6d-Xt=nANKZ zIkD~ze0;xzj@<@s9)+ckCs2h*zZ%?oVTV|`NQPI^e>u|LSzuJPZC(U%dTT|Ry1t%* zxL4s%SU(ss`eAcSkCb>ZZvNv#!lu-)dYm;wHiB8v|K-8iv0 z{>COvJo@3VQBm3_i+A5<#Hrq7(Z+Xrt5*qo47R?s@_eL+GDx+ctD2rkCf@!iyHe`N z?bUf&`jZ@;>&06oqRqIM)H{jth_ij#DLAQ-Znm9J_V2aKFLXPfY-j6sOXX5Ka@@4s zTwXE_P+M#4{Lck_%9gn%e_Ii7iJYX$sDLWNxowbAZ9~b;b}ER{51H7t==9RLphVr6 z@p!asiHG$}a;NUADiuL>NA#BuBX0Ul-cnRse8PU0xgSl|AovvvW6fLdr`FeDz9y4i zqlua`nke%oQ(P~1BR62L3&k`SHNK?68~6a`1o68<{sVc>d!xBaPX2m1fz?up)G@ja zBk%u*R$Qzpb5vu0joTQnWulUNG;~4Zu5&1@WzKDC_EC#MR zG!!#dudVZ;2o;|`#%5Q2Et7+7?bf%8Eh~2C`NrKUnn?jwRCD>%lKo!liBvPk2?-84+Yv8xGlYYaEZi(g)pkb=_)%@~ z#Ekk^7i>#rs8xLzAsp5b-u4Noa$|=?^?+B5q0JuiNUX$sv~rv4iklkd9P)MGXIpi0 zz2@)korFt}o|DUx7-p@otGRx+$oYV@r6rmB#A2bcC$>66iqr4zT#|k14{Ccnlc%mx z1XA^^@o5B3r+r|j->+yQIf3eGO-!z3Sp3PBLYv1Jj&{exWg@{LrT?x_#U5 zD1fUO@vRhe;U;>4XX+ql&b!W;;Adg?C)1&s?NwfCU#Jzy7T(N*B5EF!xiaVKlExeM zC#iTnQyfpNXO)Cl6Nj8AL>}mjU~SFo{9>}+ysBoFsu4ml$UWl;(WPk)OAsRW)nGAg z>#7d^vXmml01n%{TK5D_CXd1a!U3qy}FU~5K*%stacUu~- z&U4QqeBkpxTNsl{;C6ELquk5y;$OhPBx$aw7zsKeLwcDo>fCvS={yry4B>CO6&C!- zead6?hQ{!-XU|Rh9m&F9kLuiJCq0ClD{Z>G_Z*l-gEA^Ezg$+qq)P@=&ochqwjMc* z0}LVJ>$#{SJO-ybGn?4e7$J1~bQ$2Whpd>-(9;^8Gk7>B3iGW~T2jGNFCloDQ&3!% z@+!Jc#Fa${bh_jr+~)EHKJdMp!H>2!zuy{T!}1M?#s8z`d5@$(b_C93m*m$bMPkru zQbUoZu=vCLP#t;J^F0p^7AY;=$}3j}vuRP9k=|GE9V~W5@{I=&XI>4V1o;`}9~^`{ zvHYX@r=0nv1R%_wefSrh&f@RZk@t)PR_Y$4YzK5|P4RICDY?3u&7-Q>(Vl_y4LP0f z-pPScp=s^9MKwO7V)NS(KKp4N&+%x2=V7sq<+%bktgh z=hi(e6XNIlgm~Z#0n{z6j|ZrxQQ`9K3S;;x_7fXSYsd_4(-b{c{>r*h#3r((xchP( zchKrJmq+UYlpPD&?`^}ry9Nd4n@hW)zX{pTM;@iag%+|fzjT&8%=}A$GUs-?5sEHl z=6}xNxaB7ZnI_i9{qu;?f&%#=$>ZC80Q{uiggUAeUZ|58?AK`foLG@_ zTlIadd8qZn?@*7tLncxR&Hyn3zMS(Rm(r~mF3p$JBx&xN;XiF)B-Vm9Pab5vx*3-D z5ElursYu`5B4pD)VEb1VK|3hb9_usj`<(|hpfNaNVLs|4c0(bVzCf2>^`yq6R@N`r zhB-MfDp`g$f^KYpeW}LYDZYk2P21ukZtxF_-W=W_-HJBAohFZwJfzt_OPJY%$~9`P z?uc5-L7BDXmT)8CqG4~A3X8eXH)}L?zp+NM?vApx`2_Sko~*24TN+*1{7%~F!EFcC zbSdrWHVQN|jUtDL`5%}o#qxk$SDIAm&oOAnj6qSf#R;ZN*%SUaJkz6-)RjWBcl@eL zQbJ0Hl|}?6q$n&+uCh~lyfqG5nDA1MUkVaLMDQ$J@+|lJcn6)$&qS-XnO>FkBYVR% z9WiU;tMwQxpbfrIp-jW{S1m&NhnPRfD#-~=Rt88eSmcu6tJ6i?2Q+lAnv_x_b@gm# z!#J!TtO#D-x~kO8AGC5B9daYaM7M@Np=tJY=MwS4q|e1JHLu96yAdy<8}G&Y zhI}8d8-XSY^Rs*WM@p=K+?04Jd^-I?(bol?x!ef2PBLz!npa^mnM`bs399Xy6+Ju7chr-4 zYf+)L?|L|gm@fzg1E|3Z}RA+z)Z~b)y_iQy%kTFAWHqR z>}qkD!0Bi5>AP4o|NYrm@Pg>&x1M@40-e6YsHWA9qZ0TS)a>*;x^WLQ-HVb+5=Ty_ zK~AWS7lAj0&hu2hU5WkCQl=dl{PgdSVs{aU(itJI(?UPoX^W-{=v2sMbq!T{9&CpB#A zaDpNi$)Yd{=$ijI(qckKx$0lO=Uy*%MX*AZP1!s`2s@=pB}WVW80Jk?9Jgq;j5AadMhP7rx$QL4QJo8!6PN8`yg^eNaiLr zP-VUl#=VHU0fFGiC}uQ+Q!K~e7pJM62a?(lVff8~JAMlCScBJ8<(9x$GSH-dmQ4a~)=xua%vKbYeEK4EOBKP8q{5{ObH`gniO%6x5)ux-J$S8sBvrQtCPaove|_XO+j-9 zd&>i1SvRdqI;2+mo~zHAgKO+MiDUdU%x^R*nE@n^v~T;;+=CvzVFjIXeKJbZj>Kpb zG+_v&gC3a=D2I&cc)Ql#mw3CFtW3>H z2=HA@+cJWfyYiL!+7scuP`5$k4iq@(4XaVXg+|-sQg9bX%gdnUlcHyhD)8vtRpb)~ zilczAcyryU0F5_H8o%0q`8bc1fi%Dmw}xqctVx!vyL)O#Q~4?=eT1nB<-5rH*f?Z8 zY~m|2N{br&w?$HQwA$fNVYv892c;9U&ypewyiM$L(*dQO<4?)ehpgE3)qIkk-QG;f zCDr}aoTy0QR!CJKUflBlC=2iU`>p!g?{&}j7B0uIW~!6S(yHU}z_ZTU-7x(i!O~j3 z?@p>qel*Pt5Al^BJ{d1c8ZCwW)Vb$8xPq725me>Wcj_Z0xP)DT0++nc@@}o9AWq4e zI+$HRvtLUauyDo_CC=~xq0%P#e+f zFwee17MdKy+sRwMjp3`&F_Ajowp+OusTuK-%QD&n^to#84^f$sUJHhZ#@qih7C~3WYjBKgawpw;`6X^ty!?wD7uL+zH#9MkdiNVZ(-TYddr`)0(Un8zpD^oevpV zb2{Q>N6JG~&0|Yv*X4@XWKz1ksN7FTyC2EtzU?5x(9xU52fzmJ(2&8x`4csNPlzoB zD@ja^k5Xjd^`(uSI+<4YKOe+*My0=pUfiuI5#&R-tudARd47mEu<(Uu&kxM&^_QFZQFYl{doYG5jMoJLWX88hg zHuF<%f1;MMmF8j`*J9jw1v!)7sOg{FTpxmM+V@fJDXn?{YlH~5vnjycT5<6#>2n&j zJuiWKGTp=veLVSQ6c#sxeS+&tR>bYqrcuthomX-zL8AN7L_J? z`HT53oYopp=p>H5ekWM_^2!Z0%U6w3-rQvfEQbmPI99aooHQ7Lt_W)`bNh23q{;=H zs5}Rx#2?b5l9QTJ&949yKz>UrXf)KE?_90&pF_Yr&SP)LXx2yLk%5&y#(s|%8fbs- zQXDm2vzaSn-Tjh9AL2pni0!d?=2{v5a*Rz~`MF4vPaT3HVE%80Q+`1b$aRM$gc;pv zL2~d7u-DMvF?Y6s7?%Zp{U>o*M1JwsSiE}MzF2*h8#XA8EI~K+Z%6U&LG!Fgiir-# zaSd`gz#+`|ZHxYlW;@`#vipo@28^x*=z<#Caz<#eF$K#JB31ZQnx&I~42WL*hvo~3iOEcw0O2QMx0@$-c>HtBSkytV zZcvo7&OhK5?>Ys7kGKnI$AbD3ObM@^lD{?CrJX^OiO_7KtJ#3HU zcBQvuGijH_vwdpg73n}rRpa*>CU!2I-^s4E%npOoH@!$)5 z%P+DN5xOW#zK1{?BLZWZ1V(65up__QkCZ`^DlMJbkRpjK?0LL7uEbyN-ZbjRbhcL#+aYBG2{saRvmU2jk){{uKJ5FH9@xoS1!0~m(|ub z#`E!7T9(uwHg@pka)s%6 z+xHj2AeCn%7JFeOo5tkU=RPQW$wckBqre?bO7do7tGVO{_pHL#V{FQW*;AAGR#HXT zLYVfhH$S?r{t||FH8%;0-u&Z$D|E}FfzLOr+<^hp+?BaG-#eQU?6XkDG(*4qdc&XQ zT{5Kf>!m+=TD8Uk%_d`kCwK-4p~>fEH(h3TDu>%pC&14E2u)xe)|nV8e_+P(?!ZQ& z{~edwTtAHA6YFT1Pv4&}PrtrKg1ggzZ^NWRf2z6L2Ktm|H#{&X@6Y{BdJG_t2;^P)-x*>Nn{S!jv4GUXhUoxK6-1tFlg<8cwqcjeQq7a|G_`=H~cZ z-Z|oQsKo6KQPU~iCQFaq=W!+uhUYTlvN(&I^gc)Ppgp7}c35+^vUafXAbjI+YiHi1 zDhLkoNz-8?kgEW~g{nN2U8JiLEbeCh0;ZbA_++qMSnRwCX#MWNiv^&`#;32V(poh! zfsrJ{Zfl>Vj`a*k4YDS_)f*OvhkqFG&(UVx7-WdJF`(2jiHUYA2Z^{0o727Urc<(F z>ST%wq0zb!p{GRR&Y4}41UNbKto%$uFGt@ld-ib4)jAKb zGz3fi*QiM?uCEwXK0H;{NuOI&aj4d0KP0h5|O zpL$0m2IFTIumch+L}tw<;xzcL+k zNuX%|J{4kL*n-U`mcT0ixO!Qe;{4|Wp0RdvX$k^(@)+zY_Mza#Zw{s!f7uTj@9iju zx+Ct>YqY2l+EV*ylrI*I%rnGU!cAIGie?VlAY~Ir45d&*2~iBOMJy>Q(&$+l*;;0gi|mj*;V{y1_Li zrAfl*l$9m+C{N7$+WnN{iHSlE@2{Vt6)blP!2BD3j+ld7b-u@QU9maYI9mo_Ix;n- z$Y>tCpW80h*mjLI(J)a^ggoxdW_Tss_XO6@&n!Xi80rA~bM*IZ(VITRg) zalHFRn;L3A?`lU%`M_;vz{K&ZUImAtyOt|c-pdBqhtIcRV|Ooq5B*L1IsbnbKtjTt z2ZsnEQq!eKC}g{9^f{1=gZ&3nGfFV149}CkMfsi3=!3A*XazPLtp>Jt&%rPkQb4J% zn~+LU;a-NIfx~l-mVxVM@%u7n^1v;*&&KCeb8UOY8BOEY+shly=Jp#BSl&Yi*_ijQ_Ov<9 ziIsy&CJ|ezgRi&EsvX><)(4m$-o1*9QtAgF>tS-FBZ#Gj>c|1IvHufD_%7>)n<4K6g{^<6b_vBI$ zXq7b||A1NO^~BwZ3!btdj;#V76bjz1yc^HJ4tBgZGyJP6=3D_l5i9s@JMN}6%CExI$Ve)YEuI*Sr&FFfIrhAzOG z8gr<>;}bWqt^fY|0OX(k-A`@rdM&?^_uQ^)pK@Zm;VbH=-l?TR_9@AP)pqOD(l@!O zdNxp+8X_)-!@x$%Lm1a!U;1KP`}blJZl~_-pJ}DP<_=*(g7P=oxq1~|8Ld8ri)X|P zh31paJ{9FM7Ea!RW%QLay(+DRn{JsEUG@yBDWZn6Y-AIIs~9msL@dthOu!OkeU{*K8MD0Q%uCe!cqt@n3zo z{?urH9=f@xFB#CG?v;qtZJWz5(z!Ah&BB%X6Q5BPk*F_CRcJ9KzI`@ZRn%#0oGKnM zj*qP$I>`2!>XCKwkS6ntv59A?fB1=4rz~>J!&`yE)`*70jgi>;&n1n_J0=)8`-V+C z736m^TxT$eLC2VPOn5yykJ8hGz7iw*YD51q^*eMFP!TQwySOS&SC8l#`h`ZPA$-Z_ zm;R?)>)XZsZMwFe{r(l-ak2A+BV?5)Q`b~kyRV1!j}0JI1tFE#rt|`1*KdF0z}A~C z__bo639FlI3Kb;&88vkRAjFplYRY4sOQ&OUVst@kkml0YN)#&6vZAHt@{bY3NIK?( zxNjXpkO`Pz^vHC~GFKvFnBuRozliP!fy=Q&-kQ2OpmSAJ{rAPhsoYpQh8s{sl`-#W z*}O`Wg`4_E0{RF~!sWz*QJ0%GRz+u5V<4z24GV5J0%;k>HG_wSPMxMKXZ^PoU?)bX zU1b}cT=L@t-`BpdV(EGWX>1MA|H5y$B9Uhu`9kK;{^Xx*pZZh2B~k<$2oiNiOFsq| zLZYd!?7{B#U&h5LljfupZp(hq=W#wwVczIswb*-E-dTT&TdExUhlIgj^Xw};T)RHkTG$Gbt`Gx+C~M|Q z4{%sDS?dpciDX)4`sf{yp6YA3Jxzk%)RgIGxhYPiC3=6_TjIktHyFn{bdc-{cl4XH8`vs!A?dj zXai?`f(@is2j{=So8Im&)b+Q?gYL?v1T1~d!{AV}?f1plnP(7KXa&Z{C~pdoA>MM9 zomK4;4;&L?0dLVvutVg7h8Un@Qwl782>f3@{Dcni_=PQN&GB=T&)5W@mVz}-Z7j4%(>7)Sg1n!#r|z{R0#I z9qwb*dHF?!n!pSi%_eq&V?emX@TW@+` z`<;J$$3UKKavs0Lu|AEZLf9LG)szR=WzYx zAYR;nSu~;dB_@!$*80g}pL2n4!tmg)xpFiG7)62Mt%BWu$AiUBP56^TmLb!2g^M>) zVpfbxDxAp552P+>6fb#<*BQiSu=1jdrG5^iLj{0X;xL9(M72|DwXk3N3;XFq9xWOPP1SA466b*{q#4(-vO z@K&Z}Zirnj>vshmUS1PWe^bde=X-+GZ)g-QHz{a>Nv^Z6-j zhDw@4mDMkoU?Mu&uHM0 zUJ0>;OEPPOionW8{Br(GPQU9)q7H2rTFq&zX$gP9SWdw`0Zo+mB<-%O+Mj9t95m@_P7{c;~ zJ$c7i(avj}Gy>!zu>E0XPMzspGBZLX5Fm;n}!3=EKTF>YOaC)*_>Mr?-TlKRn965R8 z0%QH{v~lVm@zmHsiu4T~Yj)`ZmITYxnsK!@D8P>TYf@`_CnkB7YpFjk5*eZ6QY1Oh z7X2fKG0NLmKiDg=(o2AK$+X{p^E-#;)CqTAuC4s4F;J=Wsp#8TBiA&{c1vet5~%SLWzg3vllmD?Ckc;LuYMnaj7-&)OW^m(s`! zPwj`sa4x2)!yIHWyz?u@@z!k}YZj2e;|oLDq2dhq)bjS4g|MjhrtL)yB8@GW9EvVe?H&YCM}2Od5s28qD%``Amo z@`A7qV~Y`1Y@55YZC74vfpY@|lxe617(o{7uia=&jRPahR04dpkhR@?~D8PgoUuV^Aytm-fB2xz+W%IWD+X(BI?!NL=wVKgSW zpbjQs(q`i66p$O*T)(6YTse023o5KMc^TvEf08Ra3MP2;adlB5xwvX>{#n1zVHe&* z&bC7q{U#7qT9o<;9sM_=?~Z37eFC;7<(p!U)LtfY{=!;@YLi2CZkQM-)>{P z@m*UvBrC2IrwSJROZ{o7_fgTVPci#j@}mz8yt!}Sc4`$kxN8-xQnRp92sHP-t@4Xv z-pZEzcEdqd=aml!|3iPE$?#ht!xS-2ulvX?zlC-E>=O@kpJnG-Ok;Vm&mO=}GZ>$zfb3VlRWnQk) zo%WH?=ig-66YRNCKf5OV-H(BVemq7TC2hg!v01U40~%dCsmCXw%zZ`_Ellge!1|zm zIC5E^c}IOX8>nX>P8=x`zUGjxV#OTIj9dyz zSodVHtHxRvyY7q~GV(jR?(6=8um9dx(zut5W+F-Vib?4izskhx5+jko>w*swuYAX* zneWJF28PgmIKs;-qr;a_8}Kzo2c*{UVe;jP8y{PZ(AKc#L(9}(0RA^lz$Pt|uL>6L zSg!Gom40P1=uj6b!iSdVv90bBfHKKx5)am2_h42%P0Dcn7oFRuuIRoT&S{6LJIX0pupaLvh1$%h0C&P<>1b5G??JzAH~E$ zLSx8*mgtvN1ZI$~$)*IxPIat>8%c|$>sP?I(Z}YMfDY(*xymHesyXq%JH+9BpeBD{ z>u6mGvPZq2@?C%3txW=~U+@K3YhlVHjwI?mh&IVA{p8+XtB%SChJOe3tH`b&2BPjg z#~%sFAFWCzX;^wGyYfwC)|pIBXl-DQuL0d~;oa#cgo~Wy@v84uWn0~)Yu3-wQmf0J zK+$b=Y@20o@XW6)jjOrxh+x_T>H4vP8T$7dXsGUWGsQ9yyD{QpKPN2bAJh!>44Zxq zMA5rjlh%MJ2+_|QS2oHA)wU!Rb#PUdu;pY6#HD!0q;XX5M!Vrha~bcveV`Sy6qWK2 z0wHhwM}I63s}}s7Q+O?flYYe$+wth{IY?^Zp6r@A(7C_@1gV@>VEtvabJ?geL`Uap z0uAT-5y%ssdQsDmfZXIdbNvRhH>t!U^TitOR9KeIh`Ean@s2{0ixw zX+{0~S05UuT+vi05f^ABU|F~tbP;a-odj$Fpff(dZwD0E4(K; zmqv$1qr=VP;uIq;Sq<3lKausu{wZ%QsaZ*TK6GIhw7U-aAi2Qd<*r<_$wXn95$E4IQJStdTo;60UK#0L=rD%c|k?Oh)BCYzUqqSpvM0B!XA{ zas`c+#wIKHGC3!{nx!bYj?s@_$qWs9!O!X94@{*E$;_zi79t*|(i?D%t#FJwobeMy zQ0WT|E=Tn^uvVQ?2n7MbQod{n5tkNza&R)@e06k|8kO6rC}!w}MpB2;f@6#DW>HQ> z5gwwM!cT;Yz3U%{a;^1?qcDSO$p>zB&J26~j?)6VVddB_Pn^Ra>9JwyH0WxZj* zhPH$r zxSH+tPsz%;)FSxJKVrM_O$ZC-lj`ojdbbat*a zXEg2Vrx%biLQvcZ~HnYvm^& zm*d^R2iq8(#7p-6a%c^OHI~ZExWgodj&;o5t z4(ls_g^NK1!62`nC=99lg43c!%i|jqJ*xq~>u0XA%Op&VQ~!*RewGEoFS<3bH96wp z?-yTo!S>bPaaHGn)0!N~e7P!5dM#^XQxc=1RRH?ect6M_Rlyu|Y!xMZVMX9RK(q@V zGMo(^_*r?*qVlY^)@uesNTlV0nNwgU&!_XwoickK;r4H=M)s5BuMD*bHo)D9`@JL{5-03gCDmD3c z``3wMZ zwbBgzBfci-GXdzr$CR~lFkwpIT0f`}t~obbms~|esN);a^|M;*A32jt029EA-+S78 z@wbm9IEAwqm3aSPh%b8f``_^ur;k7Y@$bwRgqCJ(BfDr;iWh~-8*8rslvDT`g(J$m zS!Tq;rIenIxplZn##p|^D8RJU*VH?GyMeK+4K$n*4%X4L=6TXun`UhG^WdK~0gzc6)ExUldj ztZ#GeJTu>f$s@e=&Uj-cG7Xyg{g4{^m9B0QRyN|;nUK-&)2^va(;d3SE&+0aFQ7u{3}k$%YQe?@b1t{%b4 z?gYMzK)1dA#zeRG2>r7b`6_f*f4G%C=YPhm^;@dzN0+%xF!fgg^w&im^|F83yKRA)!J0Aq)KfT9#ZEnL6i zWw(A|EI|22kH{@>)4q>5sLX@^gb!#BOAAb-44ha04_^Y;##( z0G-8v5ziI9fc>A(P^0kkUPe#=j4Wlnf z*xJ!T>CzwH5ia-AJ2f?nTN~s0@o3rc%m*clw7BNj^9Fp@-w|L%;5AcUV&6hB4{4@+*I><%6a!R%0!4&S*S@m%r`q4Qm06s(F^eA*3#S7nZutstX0E zP0)xVS3fXnsWUnfV(2#s<$$_WkB%p1UP|V;Haemft8yn2!W%OgCM?8$a>K4%8N*-3 zG{L!gHN%ygR*(8;I@yLc5fI%%^JzYn~xsWU5rn z)h5=_Ckfbz6Q>Cd&;Llt1N&{oEzm(;8z41c{9fiP_okGOF=O3TK^ipyA{ zc?*`&SJL!K=f_Vks2=#5AOc%obAD(mNrKxxaJB3*#K2rKu!lD}%Y#knGbZtFG)`EN zxnM)cFNQHfkCltLMl4!nBf6#dE4*C_96HMteE`Baz>1stl}Dg~gaNZcRxWQy5;wp| z(`)|dZ@h^7k&FC65cOM*8xUwm_0!Oc8>pEwrI+yJ! zOnqR>iY4(G3mF&yGl4DsU|Jd5*0x!bYHQ6((R1v|E7I0)+VSL&J|vH!&UR?C{A2VN zTp9?jYs|5ur2;e3!|;xgit9g@dd$3If|0Xt*u+yodSWMI*9;_cbc}h&gx6z%DT?&R z#LEt0UGf)peIZGVLm%2+^1YXC-~Pi_*7<|<(O=@D{-K|}?%vdBEuBLTs;;wp7+qr6 z*{W;sl0c(Kfx*hnSNUNFHmn^30rB?t2)0ATs}(pn>PD{eORBJ1Em7zX0Wq1^L<`rS z4h&AwbRjcHG16CACzZfm_kn(a26E_vQn|3y0{!EuCCnt^;$SH0D#WgFhrsSMv7cv#T+kD-8>7Hxke? zjB5rD4V^koX?|OqKtoyVsyO!^g0trD3}+ko*51GNx1YZ87Y%ihYq_Fs_Q}0-BMXe9 zW7bf`VOR;WKV=VgxBoIOPNB>XF5xqJt)IBr7j!{4(q|@mV*%{;g}Mo<+tAOluC;zJ zK5Koh6=U4)J_Re!IG@Bf@UZ^aaWj{{CK+v?sO&$M!B5;2Oo-z(^G#TI)u+j4VZCNh z`vFePhxLzQ3cfJaoIZL7q^J9F^3-|TD}Ln4?Hj-EQGVpH)Ipgur)Rn^^&hSYU+dp7 zOfrsB?>qavpoB5dby?67+ErsYT0hoc-TyYUQG>%oY_koX`!UFTDRW^Z?BM*b`f>MO zNI&bk%75&8vyVk^0c1Rgdo%fzY;qyn&98-ClC6=;aa3W*v^YsVT%M)uHD3# zZ!E0rDH!J?+I@;9c?xK5B`u8g;In>y8*gWyM3B{0b{)Q&jv`fwyvN*%0fEGYbjRHb zMb_L}g49}hL|}%&Lsfl+MV2heXUN1!MeCKXu!d){7qB)=IBp|8STjTmn_D*)k4Y8A3g{u&_!k_55&Ej3aZ)f5SZ-6aYgd((!HTaeO+PQtDO=OSnv|5h?;JAMq-nqg#W*HE%UC=r(|7tiL%_ z|CoD6S<0k|owI(gonXl&v+Gar5?*kyYb-B zRebtQpi41ot~tpuxDnPq!o^Z+{gOx}u%cZqc7_-(Y0_XCe9}=I1Jb(5TgCxVyft;^ z$yc$eqjNNQ$D`jD*__CEbw|G{X-pZDH@yPU-cWnQh;U*Afbp@rUi@uWZO?t_<=e0Q zqmOUD|BJUMl`~0s@3AEe1?q9{zbc#cqfE8YTlr+Do7ar=d#ztjY}p)elq-zFdY%51 zZQ(KxG2I<)CXjuf)ZeBU=k_p{satpJmymK)Z1!lxm(MjG_QEc+p5MbR@p{)l=%}>)86mT>x@%b>7sZnC4!8OYg`hsnkjzOu+nE zFFv`w{0FYwu6~SOI7BUZ&NAvoICZNI`(!FL)|9az-56a=@D#j2sma>%~}ArN%{m zRo{no(a#POxsP_dc7GuVr+CyDq831lvZl_pG%$ObD2*u~0-7$ly!74rYaT4wS!x|4 zd0T(y4rU-KV4TZUzthZk>S+#@)_rsRmZ2WWx!k15*=8Peu6d0gXEow5cf4Y2KKCfK zAq$YPjA+WW*8jMtUbubB4_>)F*xIV*!r?iF)2 z#0;7b9)d!?)Q@qVp8_;S+Xr3mBU9yKa&RdJwgN~(O-J5?TFbM2kRtkqx~C zEWvFX`W5a1eEzLNafH{KqgRgo`9GnbkoB z{a%Q-tTwPFc*Z_AdJKL<4%|4vJWvR4ZXbnO$-qji%Mmeh{yhZ(JE~lXK#w0#Qxix> z*NAXk{p6Z;i;=hA(>Hs=qqGm;7yh0|b%&l+U+C#|H=L|bS6#L}=W9-V+TUT7jvqJb zwACi9e^11FURv0^(vs&0>oZGsosUA8f8pp zlNuBmc>}K*x4I->^TIgHj?d8j8PC6D`>W4?+V;`+pWc4!|NO-Ew%@&dd#HYQUE1J2 zm7p@LpBkWk^iBOPfVC^?V(1gGwG4t%+&!Nw9LiG@oihONR1mmb8^{SBj=Jf*I#@QJ zU!r}1YoJHgIEkreAm_lyy7^d&pl+B8YhAVI%QhCyq;txOkcJIy7{`@fP}!`XAi}2p zih)bk5u9|=g-g1UaEV_crv3$|+&lxfwB$jZ zh&OFd9^x+&a&;5D8?_Q$VQHIvLUk1qS|VqmB3W*Zheq**ZEd}Wb zIi-oP=29I5LA=`A6i&2?y9{nHT3pjECxT4m5rTSfrT$6*!J^;w%Z{%uY0}JAq`?`@ zV~$NY=LD8gWNA`m@44U)bGz@J2e%6^n%|lIFz_SpC*5#r`?vq*Q?{EwcJKE3U%q8~ zet?PW2GN=z0HccA2dP5;rHA<#c%WyCsNG#i$j8x!JN7S0BHoomQuKyto0iyc+d$FmgHhDpwAE3 z@%KP%RCssF#Q|yRk)bCvq_G)^CkdCu^{NJh?Bb(TQz^QhHYXa?Pm0m8>KqZq0d? z`GXHWwEfg8-@o1Tp>zC`Z)f4U=DL&HxBuW1xBv8KzG(aYzxCAZi@)kJeV0u_B#olF zSBzBBk~Omb!%ra2vN3QeylXw&OZ@wAvv>P&*HuxNnMsNsj(n*3cb^QZVE@$;9j=~JCr37$i*Y7x;*{X%{5U>3pdGnGd zxiU&yOTNBdEMD;+T)DmSr=PU_*&n(3F?sR0`}PO7pZ_~I*GIQYUq(aDFp=KF`u!x# zzH@2RO${uI>)&O`2BxoPs6U+Uml?1To=_&(vVSZ<;vY5w99~x9YaI>Xl*jays2oO0NI}UyYkR%6B^K?!z zW(+1{^04L_eBmz+T&p0;rHMd-hbL=j_At1KpiwB8a=f@lKbxn|siBY|@$RNF!v zJbYw7_y~Z%5c$+e`Z6&1D|YCHgNX34;F2y4UfWoA1BQO}jL>xa`h6sltot~HNx!%qilbd2+_IraEO9>}^495F2FA~TL? zIiZP=@|F)Tm~Jd%{}Tgw3!)-xnno~Y6NKyAiW~C)IQ(XGn`n94O$4}RyCfI`|x4l6>RrfTlUN!^&ceu$DvObs-HlAY@P_NeEfym*Z-w!_4cWHF?mP5 zki7MGZ`*EsM}Nda1JU2rkBX`gJUQ5|p?ldi5=Yk*O&}2>a35Hj-O=;zEIA|U9r@6* zjO@w0nyLIw(Y~0`Qgdmki7P@~PpiJ>XYbfv_Y3u%e)W+@ z`qgsII=E-_u3+7M23q6(cE4*OjEpSs+c@h#BoJ7E%#w>(BkgyeysI5iXkTn6d6i31 znfctpqx`@HL~q$@9xmm|xoA9CbI<)Fe7#y-@dZmR5cJric`VZVUiR|oD;3EZEd-TK zFCt=Ttz{Ek`-VD58jEdA5)L87ddAC*K>fNQq#|5YNJ15h zx|FZpC2TbfWRP^I>T14nBeBbt-H{7|sr@j^VeSdi8cGqpZr{}$J_i9lMcb_t$Cu99SW|w-Ww}>xOC=7!d39Ioqtf|$bKr!Nr~QF_*8#Q z+=p^w6iMq6TZaX|?tgQqevq7cX@&^DZjgF^Whj=Xk)GT4r^zcpDZD@9bSd@ zCYd1hH>>hwBXvf_T{n%zT)fBn|&yXx1w-|)gq&w=HUL9V#w!i^tJ^1jphxsvz3>8|a@x8J?p ze?R@`^U%5|44Vo>Xk8k5&fe&}u`ZmyjaE~IfF7KRcD!m=2>Ec(dH13X0-CmSsAFZ( z|GE_kG2r<}p6ov!=~52S3!hvb(Mvd4xL8+It)&@L=3p8JdZ*ul9K(}C0Ao6}Kxsdq z;*kJMm9x&&bzHf zbUl(wHCK%K*MTiPyfDgCxBT#<&jxrA3=PJHpjLL*m|bO$h%3##FW$>!X@-h(j*b&$ z-86dtRWEA@4)zaV;*Wd&lCLmGU_Q=26)A@`I^oo0UdeA<@1m#y692=B)Xy$oW@lTb@*iLf|=`qkV63<7g%m}DZYr)AQ|g6{LHmW7L8pc$tHqY zH=f4#HQcOOP{f#5zQ!rSo$ow$IA`l8T);%JNyepqrKs*=2CA9j$ko;wG~=cj;JNYP zlD88mujbKgu2mm+WnAjF?d*4Ssh5>R{qLMSslV!Pd|-R_i!a+=QC}Q?^0O~KJJF-` zQty+$=wiKnz<2)fCr%RDC z#cTgYcvY@VC_nx0`iE^Hz$X@NnN@U8y=O&UYo2_$#{MsJ^|@m-Gy(DTzruSj#8B&* zK=YRGc{CLfvE00Wjbd;TDG=2lkZJJqXN{loochzTUwn~XEUtg{$?c*`9{qRb75h0~ zw|(ON?KQ8yV|(jw-nH>f9qLh{m-=IRxG^DYK>DZsp%>Viz^(C0{l_j$5bmeo>UExT zLa>}3Z$Rbiy=chH7A@WYr2!MM6%PfB{;0qN!yRn&mxC9PT+Cb}@ofd0MPZpbB!KBx zY=s#UzX(KzA8l!%X5j`=eqqYC>T-;tOkNv6Y<~n&Hc_@X5!J2pXi^$>2SV=ogk{BW zS5QR2Nk_&N@wU4u4H&qTXY88ukSjSu4}~NjG(@=arMbbkR6L2aekyI#n$yj9wB6mH z8BPPL@y<2c*9nzB+7z=Oh!GAOmjFG=a1J+bDjK3+lg`Ocod5kdzJGiE*ImAS%lAKF zz|C~WKCW5k4J)dhJcwL`$d{w*C$}d){bYTVaZ)cB*Ia+XHLtv%;rSi6 zJh1)V&)>fN;jiAgJ#b$o>-vZOukW z^Y-j7zjXWhzxcTAi@)-+N0{i*!>_0>>OcQwyiJa~Zht^8BsYEFp87WAd$y0&_{opn zx7~ePeeS%j>^bFiHQg)LYv1NM%l?fBno$JqMfT5op}xaY|LB@V<#G@4v$2ZyR9xwB z+P;qH?Co!@6%U{<=d8tU-&=pd*7JX1z0P0rqzmgiqfTy*ukVgxgxh3k#m*5{trJ@4M-w@o$*bgZ_XQ^>PR=}J~eqs`38`{SSj1$%C*BnGD7H$w$ zGjxww>~)#DeJunt1ZTLW)^4Jr3oPBqau{EByBcV}@^rxpPtN35nDgM8hhhLVu_hl3 zYD&0KkVFrCn3-mug0b&x_0KxeC&y&poHO~N^DHB&%e^X_`QvQ968zr!B0FzSs9$OF zvhTfid(k&txt%=q+4*$GS_QwG>zOaOL~pD8^j!~Zx6}*C%^$0`8gIF&#*frnjkkaD ze!sWNCRJ&i-`O|1;Onq{f22{qs$UP<;Nan+O+5IT!tq{k_v6wJCgq#$RCp6qxMPT) zx*C$99sDNS%dS3uyZVV2ZdYH&+liXjTfJypRpX^s7T|N_dhmgVw)eg1biHugQD5V_ zm&VsM`y-jPq<&oLpWSh3Jl9+HPZV|IamK~C_H{ge$-962&h*T9bVQdux`$Zta4-71 zK3r=~<__a|g_;vAWZg>D;=teXHTsO$Vw@QU5BQ4LI>yeUylwMf9_B%T6#s2vL)BY>%IP;|9cr-NU^ui0! zJ$F4+-%Rk(2KJu29@Kdm`;YYQ1yk*lo1*s<*b4U#uc3fOe6WfvfVS+R?V^j%udh4Q zrmMCYXv8&mVUcB_bnc0R0lC=i zD@%ejX>3xsf$3%ouv41?*)jEZcqw!J#&q|mv;6094WY(n56KVJ5jqTA2BG)VcWu4l zS8mbU6Q6!+`-*S9a(m%7T)AEKgwNc!dY(g7XM~)(^n#7I$3NxVf5pbV^>Z@RdC#2> zZui~$(8hbz57u{zJy4%5W8{m!58PjK?0olW^@WcRE~wvI&9_2dP>&dmH9wzu{khf? zC;ei<3&+WeIX6c-ea0F-mmR08NALdqySKOe#+~(z5dG0cA0$2bEaLrS3WEW0*Iy< z%9Iy2Yz-IpT)|$RB^-;_BG6K?O_0a$a1>18`1?~;&b^X)FR&b`fApyc#{8fIivaas z^|v4iajtq;kfGj~)JOxM;$JS=N`)jzjj{4qTvBJ0y~4vW;^Bq|F?Ef!a7KXag{j8G zYFDiea=~$^No5qD`YM9i$oNV?CC3Q?nl$t%R5^-cKw5WYvwpdW9-1^karkQLet_wG z^11$|R%uS;yt<>`9;G!tzZ40#R(UQ5D>?&MERwkKE3_&Pv5jX^|_a9 zFL>GI+Y5LxshNx40?88_N)8QdPt|K5+|-O%HtbWgPg?Ae-l&RvH=3ky)mN-Y64h+Vl)nX1RG&{ z;v3~*2%Q8ehS90H<2PpIX=yu3a)O~K`5P2X@e$n7z!ws#F ziS2kiEkJ{VmEUDKALi7LbBQL4);s;sHrKffDyMa<_?3g4(hnf@w~aN}=(56x>b+0C z75A>~m;RTJZ%=*BsqHymUBAlvt1ho^2d-b|y>LMJh3oUL1AKkyy?=0ed*2&RZy$QA zUo@z~ds`>s-~f&DUk8OVg4^ep$7aW2)M2v^KCb<#x9v>#G`X|wLy+g-pH0!0@muwTDGCTwCmToYx4oGSH~9X#3(WAT`r?Z9s9slfjO9?*3n>Z?Cv8Kk<<+gLu*@qSVwx~1twv}FPaoBmu4`v zey02Sr3+Z)N(-#4)+InL-O5|BV(eTUW(=C5A^Hb7&U3s}bN5E`iYwksVPUT8p#IpU zv2OZ%90gPrXe-+v|53e|yt{tF_-8($FYfYU!gp~!>qVE==k>?;WORZr44;b~;0H`^ ze8)Z8``>hT{m|#>`bLHOb-oX^=5ferI`;8f=VAS{H}N`WeSUrZc^)XM&UP`i?=aVK zYoEh@ybXY?TA523D=%YT4cR?^`4sOTzojM55!UDUWZ#Aj7#>^%1wU^X* zT>U%;j9!SlpCd@E9@PkjL612!&gzt3UI6HG{S_Q*!i%o_N*Dcv%z3RMMd4xS?0Vi- zK9eY)lC(~I`h!wwP88^gTx@mI*g_m_7uH5W_<_dCI71nTNA@7gi4}Ggu?!5HmE#owq*l z|FZXPG1_P8Ro~k^@oO&k1?1-Fb0zZ;@}ViPK<;USxH1Bf{=m~as?OU zB9W3P63PXNa)Fd12qhq4B`A@V5JQ4x?8G)Pwz0vE&)CM}@yzsi&OO~dS--WOwfBDC zs;|CkV}q3VUseCT_w%f0t@Z5vf2;cYzOE|$$oCIizUNy$K;-fnpZnb93x30!FTYM- zO!&X5UeG@l|LXNmeh2tBpMRh)82muPpZvQY)9-zG$-nQ#2P=E`zW#4)@8{nSma~5C z`HMrqd761X=Jlut#ZR3}cHIs26PJ0;c>T{ax{zebmz-bY6Y}$HtPL1TDph9n!jH5Q zirqZ_E}sGBo=N}!KmbWZK~x&%=J)d-5p%gl9S)^2E_Bu!-V2q?jyRiJFy6yvF{I0U znP!2Gf+GV$AK7j5j;|^AdBOuO-h{OWs=_iQi5moFM7@G zqmN`ttgdmg<+(ES>gLxPkv{)CXrbH>(^bX&3zwkjXL3Bjbw!rv zIvCL-B`B!<`71RrEdR_F6$M@7HS2O=Qo~BuJois}{RU*9r-rkq;?BG>F=#K!SEHi? zVMwdfl=E=u4Y$-Aki=leCi(Rz5XgEec!|<#*@EJl&)*^94nH~*QC+O4UK9HAtJ&0! zg`C8?X4+;GpC;x%Q@c@7f{6DNc1(_H;=-s%cbo8oIhu|gaZL?(nj}{#@}Jq_fiIfO ztw}+QsUWejJLQU%PkBRCvmOo$;cq4toncg6aS?>2gnkohw{}tUGCBmK!`}nM7Ym0%uTj%5Zzx9Lm1B~x)c-I%c zQNLf}4f>HM`QU$R{nhKW?*RXo)X(vS;>Yua;^%(s6PNe=^hZl^ieFH=b1*_dB^8logoJeW7$G2yO4me6X#}Jh zNJ)i!l5L7+2n>Nk@r>WXzDnMT7Doo(hv+)p(xAh)8L_*YQ&66uSdDa~?=%eVD*? zoLHf&>>kr+L3vyrcxT(wE%TClm@?Qmu;`jmSl0}16 zi>|?OZW^K8quR#LjjDUko{T%x*c9WIGnmx9dZeoCeiZO2w+V%0ibBFHKVP_PIQ7f( zuMW@Tr{17cRVz&@@{5QA@&9FBE;n)2tUW?aA2$stAnki4+?hdA>25(fATp(c_QqDe zkMx5f|Grlr01Hi~CI9 zGIh^&S%clE3}MjToS7Uqgg)CLG)c>z#@AfyDt_B6$d#)Ti9emiiWG0W(ll@DgciLz zW=g+$0{|+TnH!Sgi)eOuAsE zQ2-gr5PaWNR0T}Wnwrm99v(f>LM-c!@cfmbRMHch{LJXcFr@0G;NfI(gS)iomdW{V z79KgTx2rGsrI4i!A9=-?_o78`);S#5_>nmhPL0hscJy;Wz@45A(8c#ITRJsKW5WR( z4;2tHk2sbsJ!wiV11+I{Ksz(#BTQb%H>Yg8SA%(orWjv=3t}qFWmUv=e;t!3bQ1*J0@v9*lIP7 zlJp%CQse(dtX-rCp#c^-Sm6ApgL=?b5PNaUBwF0GxxvT97(LDOt5*#6aij_Xjq|)z z9ln@J>cdb>Ti^@3P0?Vq|E~8zG(CMFvZ;=hqucOO*A>oiX>{}ESEp8b-EiK}{!RCi zzPXMUcc|z@DfBs2c)@N$wR?-}W0h8;8h$Rp%qoCj4thsPt}Qz&-p++@`(4bW6xd87 z^NPbW0LRfo?bSeXIlstM1rXF6!gu*>waGv|v+2dd(-s+FPRd{L+w}Rr0 z0QFpa%3pQ9YqtDl2NcNn3=0X_au47)p4kxHIouZ^G7nNO7Xbd8w*i=*g!C7XICL+PW7DV7(?m|g<*OQHP%|zLFGwF!K(%p zU>z;^@)TRoh0aC5gWpv$C8H1o->-vo-%idV?RyVVvXQY@IZ0S>-<~#u>mlFGZcp_Q zPwA^e|6xX=0a4Q|5I2q@#R{BsrALC3a#nyMw8t6!Cu;DrW$xd-nyrkP>8_SFXLFuy z$G^7Q`xV{W#-eM5EkT}A8f{_@;cIG!Iz_l&a))xT)1;`RVWdTma=3kdSo0jC+DR*k z!D-k+bVegXDxVVueJngPNtB_vx7x`~)TU~fh*%)Dp&n1UJps;!bA5~brpnF}Iov4y zZgNiV;p@lFm~ye9hzu6$h(v|v-ak2J5O!(Y$M;(Q=!*Y1kH$rM{%gDhfS;JGz=OKS z*Hu&MeTs8m`L>jf+c8Ja6jgFjl##6JbxtB>fjyxa_e}}W2XD>-6%_NkA~tgZ^0TNr zJ0+)=_uojFNBntlGiT!E2vI@;e4CuVUH+oQv9wXVSVM?j2S}U%MO9Exf}Wr?y{tvh z*#)5Qu$Uf=Ijh@6k9ji;22d>nFq~fsoY-V9QZ;k-qWu6De;M8#O&o}qTQNTnkuKff z8*2Xn&?d#U>*oUAkV=)#75Tcs#O|_72Hyk=RLjhM2eAcVRKwn6ljEdXXZNWOjg1yS zu~sVE>sZCIDrljdQ%><)7tw0v8!@-d89X)Voq!P=vCvKF;s6vCoL+ zm^n!NuS2bXmJ!0ciz<8^ZL@4XfdbqSGv9|MhZU$LqK5RD7cDiv4-`uM5eUv)o8=of z=%+b5W?W2Z4oH#cvKwCC!r%BqY`^ZC+|ti&Cu+t$QMl6?zdtj7dtWN(jiAp?eUSGd zOgT__E`D2Y)w3llL%VxmFQ>bemf?UD{;I3ua7{a)y4Jl-wpoQ`DuH80q9J|TxpH$S zCmw@U)|;^~>rrJk^cPlCZqD&bvUd2go@?*c+*v`6KAwU)?5`iGahZhLbV$qylbcj8 z&kyQ>aNOE{4+ZnYQSJDEW!8UB()i474V&B~g3l<|0^gE$o}bm|y5LSho>^ASzVFEr zDm(&M>KFosgGndD9sjKU(^rn7^iWa1o<^ESQQ01$c^5dRp*f*q-Mjz$72I%fk z%#4_m^OtXc2V*(LfoLpDxEN&9N74&q3)%frO*(x!obQJCyi-bYFC^{tjX~)`r>TQ> zRAkI0L_k-LkRJ3(br%e6_FC*-sG<+aJw;}$(;K77?sh5CV?eq+?ue*D!1r^A7nO$|-f_s=zRvElQ9r|7&*4!nVDDD8LH|=3x$V zMbG>x;j|fznr`N1+BLv#E}Z<7@PAEsaKxbx&NLntPE|m<{>s+oSEX9QfCx#VmVin5 zhx6P)`G29pe_KcrvP1)#)2D#ZZ_y5KrUv2{n{-5Q%{TR8dF`vKwd_EFycKf`@6tQ} zkP=yBf;@K6f1plT?-Cor+U%oQfhTfO0y5St3n)O5tb0H)6&SZuJoI7N8nWf$9Z*Vo zqz^o1OQz7elOdKz6S-ob+_xWlH=7tC^v1iD6wP|2>v-dfR!(i}#dAL-IG|DeLEB|5 zB?bxNT2vyXy52Kaj$a}Al<@aBjTQ@a7O7o0cjvsj3%f4q^V8_=N-j(PXV<~3FNlnbfh znC)RNwF#G0oXkxDK!vVFUSwT@_u_icbaYq7M9o9R?RaC6|DLK(ncB>V{pNnEz_lm~ zYTFCgkMr~=*9~5)8+!CGuStm6k4r`9ffi}`P7kno_mt%U&cV~?`J)d(pYrardL=!- z{!X8KZfLB{i~Ng}C`xK>;0-iL`hA$DoAkK{8X5WW(KkihYniUy6&7Oev7@}Z=%1$5(m9^$`cfG0$(Ne6gsVBw|v<(eG1hj%?WFvk9mv2>Z=l^G=id`ksIIE&9<=8 zbs{4iyLaYbjL8pcIujz1D;Q4(idGtb$!Ewdny(-8*st8&9Y+h6OSuq^{@OGLqj!tb zvH@0x5&QKhUOicYgi`kUot2Qta7!r>9{Z4Q>cv3s^@JiG^O;6UFVF)H2U%qMsCD}Z zUkHP<@_9>){{AB@(&;|GQnl7PPjp82YyAnTr|3k5;0;iTQuO1))g;fK4~#eEPP%!% zP2NkKRwFd{CUsRy{&R-q(?NgSkisU56+u3(mfuYKoxb{Iv+$vqqt@|-)6SasTDbbrn; z=02!I5YT?ytmO1Z;^%B%PAFs~Ss9+vodv#}yG^N_l8pM&8r_3j*e z^1l&9RwXqxSajmruUFs*xBOmj`mH?TeQlo;)+I{C!lC$EZtA5Nb-V-CvPcjChqAB? z`%+9)$0q5!hhzjhWGRSf@5Z_?1V%!b;=bK-jERyX+iAn8$Lm0z?i_C&bY(da^PeG) z%E%ehRtQrC0hH1oI)vE?ce~-*oSMNGB}QZW&*U_6ZFr+FU)z2>f8}p^axYYPurS%# z`rF`cr-JypV3UBbBjdtil4w z{M9EiwK0gC`j5f9KCL5h4o>3>xjYLIg`g^iPRue(m{0RH;1&SCd_w%EOc9Kr1CI+Bao9$B zLoGh})_cPIgu$U|s1x9biaC8@jC=;CvWV4+@1NXrbwL_r28=LPHvl{ zmY}L#Q#tI6va#+zfk8cHv^R|pp%i4_DYGF;caBc2EQQ-40ZL&^4qw#+utjDic~5hu z{emm>nKE(#m$ky1f%hT^`tr@1!~x~!w`|jH5$$beAG~H|#56^{CHnZ}e|*S^+C_q9 zJABSQRClS6(nN$X)K>s2+U~E&ywGSMoZ)p_i;`{>8NBkx zW=!8~isU{CC?ehWxS&a~yyUn0ZSfmA*HNM1@|4-W_so|EK#DN!&OC1_-45Y@|AeY^ zyxP}|ebzeFz9B9C^*X$sBh{Gzxq`Fnt3gIcqg-U3nzYwxWB6mUMlzAbM}MsUG#VAO z&6RtgNiRXJ@+X5c@8p7!uWsSdcgD&Ot`%uVuj-zwdKS)?0pL@PyjEr^hf67T%E5Fe z_~w1Sh)#x0yqUUt`U;XapzZ_1G#i)sQw`#!bGZ~Uk8hQ`>g5jcpSF3tF;iqPS%1Wg zB}2HHX~N;8{1eaM3|6ex5sYREoiMr{JK?RIJGeRrCh%o&NY*h=nE)##JR1$8r>C?r z0;8k7UQ7QuY~&Tg`eQ8x518KSM0sr4KjkD#b$sPO2hmVGx_R`pA=4qI31K!Xom{n{f$A{oT8n_dSVJPM14lMn$Zy#AKO1Z@@i|ZBezpR|B z+cL`o-1mI5Nz?~ieBW4k7^+1FKRy&~2oV){zj1zxb+6eGD6p{)Yn}jUS~-iQ^j4yv z+fTjP1{X-fPK73qo!B2;g)d~Y>Xl%Ci=W-aX7ABz*A=g{GbvsjqM0fQ{g>XNxB*su zb0$S1c^Tv3uw-@cqQAV%4WaZA|J!*XK)ze&dn=D*kGCebgLLJbrh`SN7UbdirU6#X z;Ma0P|LUJPS+0vRv=uk2CviB59W-@j>=jwV1ArzxjF=q~@4dRPE#?2*Ip4_H;6-N+ z$0^GN0w^42-QF#GMN$F=D8`~~;{Wr^D6CvNT{IegX`n&7JJ#B)PKwp9DROgn@{PbD zkBeD7&=WSntKb_}6VxvTl8F5K-IjZOVxm_K*D&c)_QtZf1Xpi$qkz_c`s%#asD)z@ z$AyNHZ=uRr^`uVYnP+sOI~t?5G#>&bg)_I)EN- zmtx=&fW0a7waVJ~LcM^%tPbB9Uc%}w-w4}VS#>#WcRVUH#OhBfN_9@>P%Oqf99$Oa z7i@2dxm9BzJ=G`sZUb^kV02{p-nOnCy&=&6tQ(e2U^(Tiiv(!Gi^?NfgHM0G28t&c&M4gE^MBhsMH}i=@f~q~40cOShq}0P z4+c{cbU0-c-n-!;m;4$4G4tvC!yY*gK8&&3&PrXgkH{j|2iN0OA7v-@r)n86&qMY} zXciMSqrT$|QGmm!h28Zkv(0#U9NJcXn85rvJC*9TSChx;oqcpZfkVhb-zlKX+t z!0@LxG=6Xr9MAKU$2Coghy+}P)brqs?4^PUTxIa@KPumoo!r2cL%$=@gYG9Ie!syCQKlwnKU z(;xU-CMSOsPO}Br63K~kVncmR55SmVU$YpN( zQ{NHXI_CzR>n+c2_NWNGu1`ajMzQGoT45*|8D1(dfnrFWiN#_0JSk#(=K$r>y80LO zj@Q&KDN&R5)P5nBipDx11oI`%h!FHadz>>FHN#elTfV&0L0{J%{^_xp-5V2>Zhyyk z_A7$I;>r)e)$w!#e0jx|)OiLkhpC;$wikdn?!9>rIZZu-{C3!RJH%?K@6;ghQx8os z0J#9U+Y^_YkS@v}yHAl4eN_y9m0qQCLeC!;W2>YG%_xqnPgCc=%n~Cd`uF0F(697z z?!t90<5SxhE2o7^MpmC?$%?fAlEto*yDTHJ5rMT!~IXUIHnQ z1p@e^uf)hhl2rIMk4TWXv{b;u5u&~Dh-H&Teub>~*Y~jHcpl-0t*d_wA{^Y2E@}_c z{Z%-YP8ijszKwj$@DDtIhUxa=f4u^TRZEplDR$W?s_e)TadxD3g)dPL^#rea-@WWP z&e;#ZF2Gej|KhOwJla1+^I;@g`3;TtO~nG$(_j8XbYP>DKj~lB#_?qKX5-Wf=koC$ zGA;zoDFQxSSnWgmFPA_zl829CgI~8ALbl%8ObBaI?4>sW|GKd+ zNshIAZAQLZ1mv{-x3_4qc@e&BlvM7mQvUSn1~GfD;bk) zt9C%v_K8?l0lr-y|_c5=S3^_$UTxl_9_dqpU7UxRHe(OOLRy!!~^ z8H8SCa20UR##;c_r)$HYA60wgjJ;2DnwSDy)2kJ3$!yzB{^^qK!g#3Bx@E>xyo+={E#*Pk5yYe=BHZnMyuV5r; zQSNc{61GgH`IPwbYVLs=#oY?*o4saEH{7x0SF1gZT99?JHq#*1> zhYM*;yz_{WQJp&=|5o6ir#z}=3(C{YGDy1w9g=9WY2I^xQw9OY>ASN=83@&zXT)!t zS>GsSj2cT*Ar$FY`=O~yyI&?=GZ}p+jDK_D9k%5W0LJG+ht4A_Q@xsllylTspgEgFuU zav>w>KL&Fz515s$pa->8; z0rQTc^xo$Yt7#L_O#?(z=YI#mI_%N<7A%DY=v&h3eUbfh*%$unuD6cdzI^1#I@%g2 zUw#jn;CP0bH<#!l=>r)uj>s#9C% zyi?5)(~d;v@Ods)SjQRAB|T)uHP3UKTIklgy6$LeloLEIh59(=?hJjX6Y@L|_#XJP zwi2yR8PQO4Z|O*Ax!>g2&Vq!jII}ERyZdcPoMDb93Oyh_4|$m;^jt>Z6!WCrLctzC z$-%D#k3^L#p-x^y)DO#9%6aoY)KN>W$xBoUQ%D+LyfsVQ&XA9szW(7Y%Ft9?Tyyuo z`Dm-6x4j>bdgV5ZUFw1FsMl@;5>EgY-al;Z!p#`=FH8TlixN3Dz<3_Fmf$YfIUPy% z@*e4U8Gd}$=6*3litw{Imr30%P^QKqwTFb(b&n8-%mH}ZCXuzT$uHaw>|2z4d)}{4 zE)Ga~;Y%07u=+ac#Z}Ts2(b!5`>4Q=u&0fNo#3PWQY&7a1xyJ{T>owOy`@O^X`9`2 zxhjCjC5$yvy3g1?_j?Lzb)nAAOJmRan{@#?yg zZS5wVNi;u@{bV9`_;hier%t2H=1L|vHre{%I6O)VtW1Lze)saxkct-nNmx%bJ0CFN zLuwEMviZv~r)IdZ@znXxE~uliU=h&zCvE zk7!e~J(P#qsJ`Vkpv-@t*h3t1KKT`QFngUulN*#)qU$X;G`tT z<@E?|A-nWQK z7ARQb*#jt6B_*&0wtT1McqNYv?dfvG`iH%uJ!}dc3$Tc_$rRZ?^5SEcf0QaBZVr8hKd;bqcsoKn%CU}Va;o_F{OUkgh2EgSt7M%Ws1NL z{G{RI`)e6rJ8gaX;EnhKtxMG`rLn22w_itw6h9nf+nxmIa6Jln=pc9K zQKvCZCqd0h&&}Wv^)rWozZkb;i;(#k5Ue+`^7mHMdy%d8kCluh?`n}Y`vaWUrr2`C zcx?`EE0?9R=kr~f2%BZ9R?-d_oyI;B6e(i-BkA4=;x;IsY&djH&5 z*J)dpI&(}&d_Wn*U&xvRbu_E`IKuQ6sJffYo6GEIU>y>2o!0+)22QTzu6X;Gv+{K1 zGI2m%3F&LHZv9uBuz)`3pHKV`EWSPo*MxP$50ct#W=R-EJlFz!akXOY1u@^SJ}W*b zRyh>&f@$L6@B8=;v#!dY%eS%@MEH6*R9yT?WkdE|u%=gl={qc~*L#1C%*5x!optg! zTt0}WLSS~z-Jcz_d05l{wItpq8OiPr(iw`0b1yee@G)q9#W6(LR=J^?+BpeSPm z9|iWHKm8tid)I|f#x}2h-%X-hi$nRJw6z^%TP^tN{_Rmmix2kP0Mz|&v7v**J5CQ0 z>Ba~QonPY82w-Nm2~%m9fl^GNjY!b9frODYA)T*?-aO*!lmX;Y)xEM_gg##9p(nXT zQ<}&u#kXqRHHW1sIU2!aANQ_gtI!rwV3*MkwN(|S1g$3fZYcVdH2=TYmX51EXSZ6i z7Ny1)z`@^c&3oo`&(*Cz*t-{@X>urWCH^&rsKeDSlKl8pllHR1Q?Ny_82C6hGcTL3 z7Wy#SU}ELk7=rnEc<4UnVI#6~RSRpxcb`E|jtf?>Uj|#C>;I7w&nS+!uYH@nx^1m% z+Sto!|3RS7$0H+yjPT%1;z z=Ag%p#3!o4_^gHahWA~4RR}A84e-v~kCC~2$r2!Tf+ldOGgG8|)_Gvb2F7ZUbD^A0 zcKXT6&O69bMwky}FZoBRik(txQZGr&y#7f8Up@@z<*=Rw5*^kO!{< ze3yUCJVBK-#j5zN{AQfYAy9CR(7n6iBP4?;Ns8g1(D%xteb83km~Hy7uVs3c$Qrfy zS$vgDsJscF`*A7{}EnoExA;O}`RH}wUu%?d?k3&TkF-3pJ#W<-d8 z2h-s=Z4rx*BL$`4i=!(nRK-}64t5-6FAT=(!gsE(#7FufFl-S5l!Z+YX+1brAdQw- z5PQM?9v3!lfy4vjvK;8Vx5h`@8~LS@n+4A?K0|$0$tlD= zXQAdO?rmdHk*e45#UmB?rk6J9DU!44J;_pKqXv>2lp4qy7#Jv^kVN%XZ;E<#3f5#^ znK8GqBCx)HJ#!f~LmMZ^NW>@W=S6G|c&!jHem?}#ivO}iEBBSl+)75O{w@}5dgqjL zpLrMJLKViz0X@6tv72;wVyi!G3+~KNZmg@E?fBF8>uoFN0)>}a+==gFs-1JIqcyOX z1`UtdW7v-8uX;soQj9Lkdei4>&qCVi+k1!cHwDiI07aC5)}c3_iXhmzti8P2Tj4)W z+;aSM@>9EoXt{wv52xo=;ubA21~U>TMB3VIGcmHCK25r0(snEZGOUH7RN7V%r|_~z zvBW!ig^YJ9fT)m=m!B5PbLH)&$;=8(U#Ww*vE)lA8a7h5X?S$@dMN${`u5DtO`9zz z(r`mW$O&$r)D@Qw$FQzI1ExT^luH;%I@tLvp9u6&LCN!?(~We5lnEI&6^5>^g!Ccx z^H$)z+mG_AE-xD_+Og2fRiv6o`}&qYVczqJB&>hzAXf1?uqnjaqH1OO^K?~xrPQ_E z_U?|0pHQ0zl8ma*}-REX&B^rnL*l z_Xpqxo+A5YBH-OyLaHw+dX* z@$_29#Lmh49xNZd_zC#zOPcu%$54$3O>u?wOO$bcoLM<#+`92*<8tj*=kx2Qq%ggY z)vLH;Qml7$V*V@Na|9v#+e~;F*PH#cX6{KSZ3uG3%9UXGzPuMxdrF?hA!BoNs=Kth za&dt$U%t3Vy3!}FhvGm%2qg5%Z5buPy#t%^4~~zg_B=UT5Vax1YIjcf^V1Z=tzDIJhNz$48bYSH(ROS_i4pYOifpNqEO8H(*f}U=8+TFj~Wlnhru{>@)u&ow0hk( zHsBxaL?Ffc?74=u7_pka4)0r5|KNk#y>KbH^ov)6-gLophH$wUjYT-_8k%{r3@3J~ z{GkXeClxx@e})mhB!Sy^KoA6S%8(<#XipOw-5xsH3-n*^`$#LaTv?foG&rzZBk=i} zyn&qE-aRVgg8|xV`gp|EU@kv9^mPa7BYs7BmHLIK@1EA5RUxP?gT!Z~In&H{jB5mk z+=?(C$epOOadzIX>fVkljXIB?{K4y%4hICBm-r6M$$;mSwUHLUGxH!4j`P;s;&hZC zc_RN<&4;DcN%m^Vjw7<-$6T!28M4$jsRNx(N7c(NEMNR+y}geBYE&k?Ok#Q0Dm96PqCG#6^w`o@&JO}4hFd9gb%`oR*qS zp9x=r-IcsL+XppDXV_f+gVFI! z1nQsO%p*A$&sDkvvi?k5T$5SH4 z3y!}SxsKM`A*CNogCONzJc8j4y=WqssRQ`4x!t~wwoH|0pq)IWjGbe z4N4i&%W8WsO89Y}a+*NnnlrR8G(#TmFb3~Bg$3SvMx_qeIDG1t!*P7TjvpGf<=_F* z_MFRzNT*Q0B^DX%zv{XX#VG2)lr7fcXUT^m(Zcg1N71QZ_?Omd@CTrT+#%N5kiGcA|jm4YI zJ&j-Us6RZqi4T3?4_+0gmsBP(4E3bu0J`i#fuU!2e zg3s>oa#FsPE*dYa;pK6!C&PrQ`p;&7=+>L@EPblpV?*Q)G$5SzG#|WfVlQ&k#X6A? zGyNH8=dMPhOpb6cF(hMuS|XLeh&k5H$LPa>+KT~O?~Lr~$B8Uo?Rxcn7C>t~dVm5c z79gem${ypDe4THHPze*qWJJJE()j@2xufW@cQ+`GX}lV*kd5`@TD|MAPSgsGR^rQ~ z-D%Z)pYXnxJ%GUs$e$mAE|1 zkR*@}!-pkR%&L8BXjfa-`i5%gZA+SfqjZ4XC+?L&>?G&R=ZEvUhYpx!VjJr^YaW}M z?k-GTIDHi?Ul}%N*!S7g`c!=OSyZRgs`@u}v9^}N;QgBw0Xzmp9YzVPb`>7StwjUk z&18d%NaN2<*VKxvK)<@@Qu*#QM@u@&`fwtXBC=W6L)wpp&W`(?(IPLC+rL7Gyg{7) zvX5-8uQs~XN?P9R$k44tPWR z17m~?q;{TK%=}rjWwof7;g1dF3ez-=gsvEiSq*(PyROHJb0lB!0M3__(@&KM3iV|y zh6bGiT-xqg_0LL1XFkcOEy4jcuh zavM@~-AB`JdAsKYXp=V$hlW#918G>g9Ii?*( zwy{R^=G;96%^!FHP_D`WC%ZL@Nkx71Qxx=oh>{Izk0idcdfJk?dXft}e}%Yc zghZ-73~8*JrzjvEM5hzG(LaB7(HHj}gZOY7|4Z15{l=2kJ$M#lr3shH46{WVgI6QR z#P)u~j8tD7Z%$2yoGhRSs3M>{sMevvP!Q3NgYlQW3uX)e54;PU(XiAH71l4;sB zx39-Ugli2U=UHaAwHf0>X_K$`6x1xH$Fuf!C~tMlbfE8w?#Qs~VodV9acW^F;OQNV zZN)WRJyW31syUCGrOnR7&DGAcb7cbR1YnzE_R0iNm0Iae@~77w-}6sm`*Evhc0qPV zYT$7?4(M`icaO@~adKEr(f)4_I1svQ*qX1Rl(VrQ7TyUYZ3Trq>f<%3O+r^y z|Gl5*TQ>qt?k*6ZH&=ox(A=0xKT&*~)RY&*v$xmp&P@;*_y>$~wYt=*4CURYIBX!M zr@8x+4YT<*P^HLxu<0mv6!_7ZYX5QFE92X5#=7#-?`NvEn3b$}A2O%bfOUyl*+TNU z81Kb`hx-fSdSMbQB-6Nn%As|T!l;(UK$Gvx&=F{on_5!wx?o^d0;N#tcpOXj?w1)# z{_!2a8xGrFj3}u}bzSL=9B)Fu$4FU=dSi=Co*P8whahgMQe%b|TB!gIL|=zv4}@C} z(;~Vxt0O*=OtMHv3qFw2(F!AIU=z}GE&ptJ*&Y>B;~?wuTXdZa#JyBTu0iszU5PlG z*W}z+qC5Old$-GA3^y;gQ8H};{QRc9QZe4r0I(@R1t#e{&AcR|&#OF3y-*?zr>}cH z(OD|px@i-ZIHyU+L}hPSW?5CW+n{IoSY&ES}jN&AbxI&CRkMs)GVO zYIL+N1W4(Frb_!B;JWj(ulj>m5SvNj9NcQujISjtw_>CG@Nhmp+E~$dPz!j#4P?mCn&Pm12$+Pa z3a$q(cPZ15Ig9NUJS8>FM(2OlKn>r!sG*|R8LQB$lRlRg{s>;JH3|^!-G|+a^0Lh$?%ymt>Ph+gVH&|O zIRPQ4DYrREiD+m9eQVuu++?yjm8zg`67s7WH#pe?hhuKY(Y{xm$u``aE!&KBB7GRn zzdDV3J9D&uYRF(aoCC)zGn=;wf};0P;1Q_J0{f761YlMXWD}5D?+T$Itt&^0QG+ zL)->+Ci^wZN*i8)vqND(@QOguJTb;7k0O&wK}T1Yb6xJ|$cD$rhj-V)M)hRu8bb?m z0)%?YtwwqFLddO@dV>H)@`eH-PhA%sCJ`Nq&sPdfN|b`MJd!bAo7AHCb}p%g^+=c< z{u7G6OG^Q9K}N>aXGoqRANq#If2Q{Sw^aHQ$XSPJ zpHY5*1duW^L&*a-vs#Xxk%{uZs`zSryW~3SAL|6TRoan~%!*@aoVE;fB5T*!musOV z)cW3vx;5zs2#Z2dN@U{JyNwE2O~%&|U9mVkE8Kdx=n2X@h4!{?<7LdM;AJHA=wEr2c(u^`Sq-`?SteTgZR#lQQnJ7ImdUk%wE;E-)Ai!|O%P&p4_K z5Me&ipb5XqVy@bOn*HnfVq^ix`K0x3D2;bP{Bt*ejS{EK5Bgh`7>lQ%`Al>x8#8N zvxv%%^K!8wf~vqMBW8E$(TCzLLCBHF{CdgxA%s z5M*pLNR~C>8n!uoc7k^$o!XB-`W%*XOX|P>T#4Rx?_In9?c{mcBg+ef&*Dul8|;23 zin)*~xrf2$nd!o?^Yabjz2gNqQJ+WQ81vjA$Z8%7eq%jBN)MUJ7J;3P^bu}tqM(lL zjhSBf3GkT}yV2`|^Ir`}5M{%4x+9Y}1FzO^=p|d;0k>JmWf9RWHa>KxLel6BF>e--?|? zqoI6V#;;KS)e$Lw->enm3qTb`C1o&fO5SxlWn+Hh3hAcs)T&3R2rjZIMJqk1@Ji(N z$w#*Kl^3K2qfQUiHrDKDUw^-^Pc6l?b@Cg9VQ{jx76oD=Cyq;@73I$tFL^%~i+2mI zFol7~BN-)Pm>HYWo-m1wL;WmOwVT{8?!6$8Jw=i0lCHt^CoErSton2-foHt#A-7cy zei=3==rD`Ebu-6Kd@In0<2E#`uvGfx==%e5svNK}_QXdWL8bY-SQs`!v1XEbXZu_} zAhV3y9K~Wq7p_r z?&+EY3o6SpGUT@YVENkb$UVZ7M8fhjp1fB#n~@UkTs*wP2C|*L?)c}nwOjGwfe-qW zGM{J-&hQ%mpS{TFj`k^rrxtHDlRaj%$RA~hAM&}N98Qk@@nQ9cLI&noj}%RqE3x_~ zIvE5*Cp14NOFUv@i^8a0RT$a%{Q#LfL|G&RwWY}91M6?F75FfvtuFixJ%stb+`bbRs?~$k~afbub}Lv zi=uUmVPkF&@bQE8_<0nEzdL^I+IuABJ&o3HEo$F~W2j#xeYh%d#W*ed=NroRl>Tc^ z;n7#a1p|2$p$x)$tWH zojeZ@ohym3(%t8?Jqg%mWB!gV*?O8=k_C(O;Tg(GQ*^Z8$@qOcLa8;N_ji`&?P7fz z7DOt!!WXjJmgQlF98+B1sezIf<^U?>eWu?{4p*+oFG)>fQq$ZoJk5}0+$}lLJ0@{o zE$4XNH)jU*wY?eHROw>Uo3Z^N7dk0z@8bU7DBi>Hv_JLnwIZ)A_9xaMe^3Koc3XGx zJcG#0L9%R0X^6TxTaxx-4oKB3(P>5YP0ONgRUvdTTMw&9U4v8ff^8sMC`G^PZpd1Tut;m$%75I(zgk3q#!$)6pfY&PWjZNW7mn|R z5^ts@H4}-X3#%*5Bz_Ow<@>{GRzgD8W%t28(!~$N|FZy+8RHt#Af9al>}`B;UbQcH zGS=<9(gDEb8MwzZyU_C@R#HmIqm{mBwH)a3zDrOBQ4X}sU=9+WzHRTv&tX}>XX0gQ zkFTrpEcQLH^qz~rc)H!S?))$d6X12>67AjD8iTAsuT*4+7Xsx5ihE^Cl3$4Rk` zK&@rAyAy_D-*8_EgH+9hK#79Y$eW+u3w zQy993AZ87^T`0Qkq}d^~2l>Vzkb^2H@mPa2gfa^moKYgiQNzcMG)^K|o3?8ZZm^cy z$)+Ywzw2R==!>8$syb^E7*>e~`PdN9AUCwB6~{>S^@JkhLuSNxXe&BK!o3IL#t6b7SP&=^U&i(;o}9n7Wo-a%iF3ThLb zsV-`s6}u)fXV2M={0eca+dUaHf1B`SQX@V3naO5mOXomA?0I<*TjN?OqfMQ(;b-R= z6W_PE3c%i$biVg`3z2F1KQ@obOOVF;2N$ng_oJ%LhP;8kL+98~_E06< z@t+Ilqen%vbHt)Ff|q>o4_z3~cI{V@sopY6_o(1QXz3F4)7!(A>b{crXNKL9*Yd@g z4n2+V?-bi!0ntE_2G}i8ovwNzYlmkJs>8=g!eUnszXS-ddG%(gSo4Z5HJtZ;nNao) z(B;amG=ETDs^NA9MUti`{1@OUwpm{s?c8rA z;KF?6{B&?-#_h?bMsDy?Th+PYT>E#J74G_ip938@Lut@0-q8Qy=`H-4e8VIFI9dJRdLe(?O4}IAYQ2u|aR)UJ30>UdkJh4xAa!530!FbHhZ;C?xvja$jqQRn0cl zlKLh0?Pjt7X8e2b6*KdumfpSWbp4$1;LJfAcRj|)lw(r=#RS}W+?`?R_TfgRnKB8) zLGC&NGiWa$wbm-gS#V-@U-+-6gg1M1bwLILA7PcqiRVR#F@=6OUf6^NlH3 zAi&dIgIn0D2S!IdCK$fY+9z>@gi-DG32jA36`^V;35n8A2Ug$qA6a+x3}lWsWLwD7 zzGs{LZNu}OH2OEy0uz(`n0bp{BX{|_6P1ZTY{PU-*otJ1%$`SmBj$(~QT}>AkNKh_ z)Fy-vr4RG5EuL$`X$W8MT3^7FdN zG=O*1t+M&~=zj8!$TeN~`_0kLmrjRS-H6F+D%~HUy@dg*K|0R5!d?u-F&(xOW7dB^ zzL4>)`wWpGOnc6t&~;{sWxQVjFc9{}l{|5Ct*=7dgI8HsCu)$1vcz4t7TV@Ji~A#y zkY=5(!rEvyWnq)Vxa`JFn&`H)W<{)o!ZWo1)7rtuV3`6VJy>dZ>kc& z+q1)xxEAE+Y`Vae0Bq_&0KHP1S~umIEMa!Y%`?*)dX9un8e^)E&kd~Z&b8^*zOV13?l6pUaryX#9AZgtj$1>g zcV+3@v)#8REA(Y&4iNQ)p32?L7JJVfuvD4q$>Gh9YC9x>(E2AXywmWyT27sp={38i z!*vP`=T!Dp$NBv2Ql6ZqxBtAG@N*W%5-H5WF?3!i#!n>WD#8B_w;oJ~+)&KW3Sa1p z)o8!HvcA58!qpBzxSj+dzt?t&hI=*ya@!5k(TqRP<>2d;bSm(=2Vix(?3&%Gk`0Hf5&v$&xqz!XWFE-;xn-W#f99%Kx64ZnlAGcvB@0GZ0Qf=YXu3 z!k2xeN_|+y%Z&>8xZNL?%&(>^JxiHof0j@#;@_=JzPD^1And~JOS|q69RnE924>|m zWcu0;na{75a98Z?@=()s`EMdwd3vidlUr^A-ZGH(?>$h4SoH~GlG3Wmf)?5Xh~tMZ zBXt=JdJZMrhq~2o(Ny^3A7lsLf&T^TIW$MUxEJx4c*3s#z1I9+5x>o>g{t#rXh#Ug z0|>gE=K`yyb=-+AckYZsPcLx6NVVl}K=x-v6lf=UD?L>c*UqJyG1_5{X~!U#EWMnk4Oh zd;2olu|S#8RCwBDfS=)$UF2SLEvLz(Mw!T*wlpJbgC*y#F`j9>Xwu^#&P_r77o8Z4 z5$iHms|b7`BiN~>74l&BkfIvKQN@97Y7C9C8Wqdy$cX-uEPWQq{Nlc;N7|rYW#!x< zu#740*isc09~@bAr^~&3F(UV$bF6!T&SHN?n22bblZW2y&sLI>E;rSeS6qj+&)5hB zf^^`|>)OizL77)Fl}A0C$FUeTlD_;EYx(ih4UX>K_l|y})-5B#Sc8ChZL0rp|JE?` zJJ35_KuEw*f+^4*`XdR1Lz(-p?Z{&LshaPGa2K7Km^;Y%y@JFBum+z<%Qte+2k{C3 z>cmzz=k_Rr_X$qLa$3T*Z0xEY+Mwy5Q{i2RSX2$Z_RWJ1`}hxs*DPesWRtC3q$vJ5RBl*kQ)%W1$^^URtR@1bw0>?awpNX0mO4mxVn0 zj%RHXhx7BC(ib01L=2d#TfH=00rTX$8@76JtO@!srK13rBh)^M)P8jtD`Pcb|gcfHc zrIti)FQiuIf9<#-UWdI%{Zu>pRN^T7pYx?V; z0Xr{utr7jG>}%j$?7i5pZGb6n#HI`3$8-eNE)Dzt(hf^3UTrXpCmg@Q?BRMLcL&4c zIGax9;Rd1&Bf~SY0Juyemq8R?wDa8O1%mR+ zkw+kyyM)_j_u|sb-Fu8e(%nI^taJ!A52 zv0QmZ?h7th35BUpiOxy$c(zR5<~)SZ{71knd#U%^0tuYc=6MhsIjhEpwtF^NLu1pD z!Jh6kYP#7?=c?7+gf)W5*R}H^FM-+)alFE-mvzAlk&C3Omr=vGyS>%A0%%SO1e>2= ziY*5q#L_|i0WAyYBfZ$$yE(IC(>5sXkDRTI_Vi}%amizWd5T+B*StaQZnrO?8|koBSZe{_~MWhgM--|S%=-%lRfYX?ljCAJ6|tTJ?57i{a{ELk6t-`GmA3A`eN!};K#})VRCNpfN^QC0;SKb1@WYM*~2g?Z+p;Z$C58GK$KZZdfQGNP>yBANP@bfi1DFQ0WQ_Mp8oq;~!<`|{r)v=<0D_6^c?YOpGKo8(; z5Ha6f-Z~vp@nw0ymy+rnoTcJ4+6;Q9KodOkA#abl%GQ~=`53J7WdQJ!*6ne&A2`w}CPkDXLq zD4G_Wxp*H^n?~_|?KF?2R-RJ*A^yTy;;)m$JQ0xzQ!I?3u%C%F7{6$)Th&}|C)B=b z_Ls8@2Vy(wpe#Lpr=c%S3!|G}#X-H_N9+84aMs}39tk9I4`-CMdkx8r8(aSwJkroi zJv)2pA)9jKYYQQhapo`6ZG{mn;%bUr(FSjaK3>WOyH{XyJ-X1JOwa~0uJBH1fZ(rl zDt2l7i0=;}wc(EZNbm5lstw6(g4pK+Mu0k28v&j3*(jm^pWqXejTLcWDO1GuB^SjS1&&Kms0x;P=0U|FvB_YtWAv4ceWT47=LCW7|stRoJ z*dT6?8MClnFcWn;8PiVe`WmKWyv*in8}^=u=S-WQe8S(I95HE>ZH!Z_6uHT z1ks?^^0y|8@@FEiliH@(`7u>VHc3 zE_etE$6lR(my|zgBE{Yy!P^%{b>k*xB{=W9b+j3*pN|cYZ>;>4la%L6+0`HMM4v=ZQ!G5!2>4KVluDzz;wD*3T7$G=cr#bXUcc3b7@LlXd;SV z+us5|H{{xZySA(~0$vuFC69~;n@Q!Ma#PO!MTlJO?UoRIqU0wreP+8=#>UzcBHR%D z#E?9*FQZX8SmIDaSiEm1n4YXMNOGz99Ck|}5)~qwq4mduA{yF9 z?Z`J>ZYoYww3k0(@kzfZItNU;9BWTg#oiYLj}`a5#JZwv;9)avDH z-v{2Edd?0&VZF(RwwSz3=VCTI36jTKqFr0jMC<&w?KgG-`X36h@{FT_OuxrvE-P(u z4|juqc{=3T-}m5R3riUW^#(HcT8{za;C0Mrn!Y^rS#==0xWi z32nt^;ymjLAKYU>g8p{jr%lvC@=HiskD<4H45KermYAM0nvKGq%?*bri6V59y!nrNN)P`|K4Fs z+?|VzaH>oSN%(b{r>!YcA}4bBV1Jy4N!w)a#_+QA9fTD=>D%O zNtE{$n&{dcyf&O~AwE($X=$p#7q4T!Dz9fyQJXNiyZo>0Z_kj&8avppJ!9v=bU;Yh z%PnB3dh+9sh{^hxjMayTC<-ZnYR+_=;S`C^>vWpDa0}h$ifK*v=S@4bV_e|t9n^-> z%n+AX+y`N#h)Lx%)XE{u7}d+l<@*;xuk`9A$$zQLxrNvXe6##p1n#ih(vIp<50Hty!RK=aO*6o`UQSb(MsNh9(93-s}h z{x*lMzg+~#Bl4Qm;-*_v`_Y3ga#g#GcYB4aog6&!(!A*QbJ&`(F)BrBK zS+`IW6fTjDTP1Z%Ua)s=^mk%o3e9WXRbmA{do2+pfdN)VA7N8e1opH&u7{rOif=7I zZ7T}OVvpC#53?lX{&j$}Vl^Mv4g74gKKDjRmcuD(%dFZ}ilh8+`+(0I&?V@r;&a3} zSa>3RKCSY=y5Rj!i8Ny&VTneO0587vX3r1KWJej8es2Ds(#p$tqUZ1{`U&WPGaGWw z0Jna6AHO)cUo0MZu1_@_I(>z<;;v(|BGBHV+%|IPtctpMLm8WVF^G z;N5^yZtK~W0G7pI$fS_H(!bOYDUSm^?x0qKNSo}k*UibRLKTEVg$xR{JLnMkr-YgH z1(9{{iQd_rI9iOY$|lsLu2UEHX^D+CeXRaA+Pzg1>0a~j`W;?t+GQ)V?uw{D2!W(H zpD?D0dxeUjQm-UxH62IXB0?0aIni)sf4rWX%@9&EL3Kdk_fz|UArCX~d+!#Z#z)@c zM)D#8=^!1%g-~csn4jqUh6P~V;vkVnU10`H?f(JC8F|@Z>Oh>Cv0M1|MLGsuy!@mQ zcL=PUq1Y*_9(pN|cQiWV@IZe5@;#2vin-8`;AtM)m^3>5k$1jh@grYVc&Sg4+rxWU z!)4lVwweDjAP;wpL(umxHbm?guoX^4x)0-JnxyuJ?$IXT3dCP6pm39C76=Q(Htd zVfeHh_J?G$L^oO3N|EcemruTT$%U&7GZ1U9r82$RtI7jM`$JFzq^vk!&)cDD^h`#; zQ10n-92PiAe@~p!-V>}o-?%u*Pe}Sd+)rAp(KtX|a3>9?H|`u9{Uz`we!wKR5$u&t zZYY^bw%E`7WVWcPx_@jCney68aE{!VVhi>_mBZZPGJuQ0IVt;pfIsWY@4s1iL;EnQ}M4T`!Z1Q z+oJnx`FH&$4d+b}nd8wjc?Ixo;GLdrRl1Bu`rLFJpz$B~8a7`esSrdcH>az4w<=dWgWBJU{lqZ4SJwn>k;CLXTFl&pM0ahh( zMsa!RNEX#I;~Cd~*@MNKP#fP4hMkm~88VEre>vj4)UiuO_?{+Phyx_&vh%^@4y-u#b_wxMJ?$oyvN-eGi>g4U zUkf(}aV=ht8%X4H(_^pKEEz_XDuTt8U)Z`i(aRRCqwHCTk05bVKF2(@Vx4yhlJxS| zc7QF5_H*RHar{WuRz#<4{P*(Q3ysMuAo_psKQ|7Ft^OUHpV4;=G;#5a_SLMf(KA}h zpn}7;C#F@stKj?QBT_{>LA3gqcj56F{&{`Kcr%8tTn2#4y7>AJe3cm_G;q#z{(Gr4 zGW3!4zQo+~`6!_~jd<*6SL+4=YT(*NOO-$=WGLM zs#ai(b>;m!YU;%>O*GS*3441&!20$kh#Xlm0AhZvXW$bk^+CvJO;Yet1vclwFAVxJ zoyiA&VW&WsWt?~Gz`k=qmmWK_&Ns2~*bqmbrYs24zzVw+y zS+$KkC6HQv;&T{srbr5ux$_z8m*VFX-2wbp7$O)W4JAs5&U*F+A1k5%KW=-9A6~L) z!j+J~qZcftA6ZXP99HYpUqwXDObwt}U3K<$p*!(H^M07?lngSKit@yp`*J1clsQcL zle}Yq&%ff-`qUctx&L$Ab9BaMI5Ekhfg{Q}kFs>HkKlQer-I=B`MLM7-)oij2+c1B z>5)Ud9svoNKYOI;Gd9OAy%x!wzT_V^g6YRqyMv}Nzvblkfd;}En5V5)-LAa*dm=}vV`0~y@45A8T{0WP zY9L7PdC8!^s=S_Vd3St{MJ-&3!YOVwy%4;omjZll@?6)^&f{WM?^NP<#RIP01q~7H zcJqV2Ma%H_a#Jp9dMw|+46rIx4d9?h9b`3#1-%vI=|?97zfaSz6BvIO5tCvazIyjN zQDqB8kxUK_!NgkLEou`oBA&#==wTI?hb0e($na@3T*Me_VmTOes8bRdBsfXp9QO77 zKFF8vRjhpuf_R;ToCdAAs#iYOlRoXhN!y<`%$Ws2a2==URd;iyuY<5?HC4)2^KW1c z8oQH=sQYO9QLC^P$Ljkvf_FAt6jdoQ!jwl9>{}D&@O_Pye^-_{Wmrx{vTSnV66Q3r zmqpyLf)6RMQovM$QtWw;8Mni(Q7xga0VC>orjp|b}&we$n5jm9naW`PT+pH+C$-7Pn++6_mqlFpp$$>5uLXCz$r7jaZSAOD^WEt5igF$QqlmH8)mpCl=p;l z02J$E!G=$x{8X%C?)ED&&<*c#qdvRVV=GFCK@0VCIK!%JU(1rZS)6#C_0%@@)c!XI zbz+>|8LI*L-pfRfn&Z8C*IL(+o3+a$yo6TsZj9(%>>*o_^LKxBO>V>A zgD=`7fj~@Ub|U}a_>UfgFJE=vD1_A;=IIzBAC0=Oi*R$(U?MpL_L0HL3_XOyzmq0h#IS>tZNHB;v6-*3A$9*nINWVdg@KWliOIACMn|G14k^4KJNZ1hR%RWZ!e#46J8iguHQ z(nsrN~5U)vv-o^h&h=R3u^%f;{8D6?wgD^ z!*A1B|DZ>9!XO}Nw${L$2Yy1%ehEK&)GmF#q6G!__^tgXq1z>$#Cs@*D%F#3ixw6K zXS9bApL;w4r(C$3jd${u2HYQ2@S-+`d})|m~ByxbH%pW4!e`d&-1d7Hz(sS)@Wy{jMRe_ z8K{3RPzjY(qq=9ZscGrI{IfRmWHLzCTXF(`Dvzcn5G8Z_cyTYk+6~U^-M2DpxYuLd zsZ*6zB;eYD?Q|BtkK5SwZ4!Dg0LauuMY^1K(5<9`XWPEzo+6ek?gKmMnN-Eskds6A(qR~IG!RDN2)+UIbxLyxS_Nt9*zee!8 zCz0&D?&hfD@9oKXG{<0%K@*X{g&xZU5Rb@ligJe$Gski1NlvTFO2}21d`Q=b;6>XEr26yC!#O#0xwc(x*2-~<`Vorf7+$L@5ePwPiLoJQ}X*J34qGTYv3O7FN}Mwb@DO&>EsIBIOp zO>S>g&(cA|dP@dibRmEjB3JCxp)ga(R*$I!qi&7^%&#Ua8#}AaVf*9Z^r_C3T)*ii zb2BD|*jgwEN=xCUBmc>(!Bzl7UnS2ij8#64ION%E%c)eGAV<KH=gOduT{6L3fmDs?q_>~rBRPzsf3p)Vp87*mJ5IlB5R zB>LA}y-k9WZ=zm;k#qAWiRl9${{g>49s+)-JmwI5b93xv?$IJZ_3zs$r2PT_bwXbN z+lH=S0{D{C1>?Dw=q}0rOB;GKi>VE_=?J za>sHG#y+~OIy_LR2ZJhpd5XR+d-^wup@7AFW8;srNif$-{~Lb$1$v;f6}j)34B3Hkmm8*M)oqkjdp|Io z;$sB=@`Mqz>1luKG;ikkAex|-4ZEW>z4 z6{YMxIdnP2TWasUne^(;vl?LDn$KRx(kU)lAm_^gSNRdJ_9x8Y*?SU(~lj<`!9n8qUh2*Y+CSt5)VLL0BGU4UhWH z05Nflv4dQceG|pUN$g&Ze?X7zPQG^5YJRkzy0fj*d%c&h@)=#LL&WP6N;uJQOv#(X zMc>B+o3cujcua;zY20zV4*OLc(J_?MtfecPENx%+SZi$ZIZQ#(IN>mXk@{WS)lB`{#@%1o zYLpOrhy39*C7Rn^3UChJ>)iZv{LLJ+&|LJ_aBpqOl#psUr+cEq0Q#XSqf>7VbznNT z=4DXhMYpZqq}J?}^`B8OUZG&%{K8U+Jo>I!#sjnjT>cYsL}&19`K7R$^fO)DJ6*)q zE}Qqlo1l#{dQD4#@?#s1kp2F!{N27v#Bk94$D}wZ%9h=bH%nIdhz3^u2oqQQ{;fdA zzf;0!d=vO3%{{1>7@e@zt=Il9yUrHbzJ0`?^kJ{&4K%swB#-oOR&m-SIllXOcjSu2 z!o(9a4Q6M*Im~rMzO8)I9JMF+2mQfN<(<9q8{+&|&O*-wKk#t}pw%d9Uo1{Fa7$v1 zSl9||4OVZsk}(tifwo4v%B&(Intc4UN=`>R0lxjzRsUGmyhM-8(7XFmL0oHPM{^HT z(8AN!XNK_pHySL9FQ}Csuw@ZQ9m!n9j4{26Z4k3$?H0+Gwh+7i+1oXaiKoZloMQ)zi@Ag-FlX@BFKQ@S!Bes z4t=nRhY`t~va^{QWyPfEY^}C*Jft-bF%=`(3G^QD-*N_gzEe^&IB>vZwP8DX0)5}6 zGLZ;BqsUh1*&o#fo_&Gp{2UHS!&x{4)4Gh{6aNt@QymUew(DDTQcd#vOT zniHc$vMyMrTe=Wkui0?>r7{Kp`0T34vSMZL1@A+QDf!a*puAA+6N3P5A1Y9nl-EoYTSFa$uPq*KOa_vDm<88SALIoeepA)RJs{Yl7>104atrE3Wx=zRkZJpO}9$k3VR`|+>5U#JB z5=$>$5gT;U0t}kV2Cl50_hmOSSG9Px9T&GXkqgP&w;h#{xnZdJ+%Z)Y8_e;3im;CEN$mq>wQY#p$q5LnVaHzQ)gcP*w>+;$)Lb@8rAG9Bz zYXiVRNA?OfZ8sNnO_xFnK|uq(_O5;hNJ+OT;L@evZ~*5U5Em|SW3FS#3mw2Q6W)faD zYZ6`irBQvR8Mkw`*go^$0!>wIxUQT=CBTsOWfgpmP4`V!p#-8G#m}$#9``=f>rLqG zGd=^dm!^0HqzS`tf*f`T^=K5xIYwb=0Up=j<9Nyxft@+hxP2?QrQ*VOf6nvgfe*A) zzlBqz66q>YM(hf#a3LSO_}6Oa7m8E^ncdyNqqnkD z+m()k0j;)q!M)}Fu#YLfTC9} z8)<49h?kb0fyOteTJJ6Qb9J@CZ7;1s(zI6@ZQWyOcJ2Z?{eB;LJtMlJD1e#nLy6;A zf4P50f0vO2F=;jn%f!CK%kv*jl<2QNj1aSo=MFgbqO~4%U{wLX^E(rckQd2a3!rxk za@2G@T=0EAvD7a%qKnW~byC;NEo?zbw7584g-2c;nl=?~gsY_Lu}RF2Qz7ip^)dMN+RqJqTV6fic?OjGyBFO0 z{0@c|!=}VEt)Y-}^gn{EClh8#URkhIu1VYz>mJk&RDrG&^_I3?~G8`Wd*RwO}92+ZDVga*fI6j*aRs z*71}Ikr4Z45d}GDwu>G3;tO4VxpTJUal0W~jIDA6(BEb+Iq!H$1;{4+k*?+wd6lla zZBDJ0R>}S#!9XccgBxkF8y$^C@nf^ZT94R0>`;rE@Xr`^MejU zzR$U+g-01wqgyZV|OazW?8sl@5wDV8zu%fxZ(B!q_n?i;aV zLh?hwc>#tl4Em!B?TUB9cZoE=(fXXsSICvdGjbtr$MK)qTt}Yv*ThbC=OUmt!$|KPGY1&uw>Vs!>|l$-kTxd_ zyF=Op2$&v?{#^e}mEfPYLL}VIvQdhl7YL#6Q0 zzMOLXw4DixA=THy@vnkAT{^b?h`DOM69om$bXWi6%R4%`A``hg29uINfL}&lNUZ4#LJn>5B4(cmPmlT2DPam7MHA_^GOQiaG{|j zsBi}QJAY&e!cIg8b?`BS=%3!3qD&dF@1ZWWR2ou>-VyY?-4OTOTYE~lx%GCmqyan~ zfGLSsj6KUSk0n>Xb>+mv=*cp<8>lZ$E1cFq*jhJK{9LNhgU%~G52(pcby9v`c8%tJ zln%mW;ShwV?_Oq3&q8(0*>P9o_t8I>cij<}i%SIiQ^`$_tqNDS4kNZSIs~`NvDs>_ zLU+-_BiD*%g3VV)$FLV-WpgGB2qQOA}2EU9s5A%ws4m!0#r==ADa-A>mt z1(Q8^?FrwUx-;!pPUNhG*MFWK4;hc*j4d#Cvl$~-CI0ECNe3+xl<=X{=#FIilHbS{ zSkm&~{h04m|m~C=tkAPCzwQoYRou zPnn_S{DoND8Gm@NI`3z4f;bPu<5QIZ5~~m$Vd3?5UXq+S2FBfoGsX*}JMR9`czdF# z-5TzN6_fQKt`?g&ge}p3wwFl0=k$N&>FD_Yb^20rc`Zko{^akCYbliIN9*rDKgz(| z5X3Z;;{cx`GiJN1)Ji;mHo0BP|H@#@iYWXw()z2xXeF7TW#Xw4wF>%5nH8^XyY=FH zkJu{)2vM_%y ztbSa}cAfCZ#0*4D9hSK1HxM})j|%W(XK{)3_$O86SHmLw*v4#PKSIH$Miq~}t#GJ* z(vZ~AnDTH>z{hef!qF2KD3twid&ao`t9KmP*yn#6Xy`NQ?@3ODrxh}bMoS?B-(b&5j?%%eYMqFXx{6soFHx zee=zw-J6FIX(3Vzy!s+5l2)xJd1{?QZTbledyWAPj*lcHJNInlF%2&YXQlaHW0(cz ztDy+p^i??r{}e|{R-XrCP5Ty6%95NC^fO?JGDRLO;4LC>)BI37@Xzrx7RumRV3a9z4}&Uv6bPmw+N2YcrT0X)&OiR; zLaVa6{=Z)!B z`*4#SelsTTNie}6Qe@jpT)W?tFYcUs7k7xYU6Y#*Ld`A$Cmaf2H~U*RKYc``TT;Ii zqukb1v=(OM7fYrl<~P3Sf;s#fK+l4+E{iW8W^G!lGevgG*1?01_IXm$i`1A6@W1 zted@j>{`NfXLx21j82_u%!9xjrX!rEj;^D=%BRj#^2W!{nh5ybb^Z6mU3Cq2)<(ZQ zWq7(a6g%6gA{aHkaE5%tz^cgxUmceACtnL(X9_$U<&xA+VO}$$TaIk_o`W0EGntvc zJv;vQBd^g2>SO`!t7u&xSm0mOj4e#z?v$OjZ`u9$c+b(;Q~8Gy==<^6-U!QU_>O}M@|Gi07L7lO_$yxQJ4Q=3j{ptQu<4_-c z7I{Ay5^nZ~g+e9#+_&G=yGvB`er9glCpER2lY_SgWGt=1^&}OX`SCS$N>X=vbSRM% z2jy5gf?LPFfrjZY+fE_!7uA*O_RLyxmh97VNq-^&!s`mL)9G?NI2lV%zx0h`z4ms0@U1A! z%M3JlEn7XApj@aaQ$ZTWcEA`HGcvPrDgM=pWjl)_w6L(Jd-mrx$5(;dGDdlWmpfNtRR1@k&RWo#8_LR=1e;r1V3FIy}F;=N>(F$@qe?%zvUYw`ADe%I+HZX zA6?5&>;=!{OHygVisHa0+1{peLv}1$XO^xes87MU74|B_{jFOP*1h64|AC#on{(jf z1xwc+=FK$Ee%hD+VRKR?CrkZ#*hJ?j8sGkpnd)6n{H7j-al}&Q@QNRuZ!K$+oUu7n z+j#IJqh{KT`}ieS0Gs=#Qbjah>?!dwM9B>_nUp~9dTZW;K&?%*t(9qW2Y;I6`Qf~F zS$k|20$dGw2Jw8WJ2ukx3gKmTUUN*75ORh|syVk00OQhu%d5qJVRXLVTJ4JOmOPf| zFR=L1_)t4&<1OmrKDHQVIU{u2IxLBOwo$pM?2eidOH(6E`u|t}(GB`ECv8C`1k0DZ z_CPhK*+zJOjsFuoV81QptQC=tjBirO>TMo>)2Z6tV2C1U-V30%RD~T}YDG);EQ)0P z@|JG4vhjnKfsEDleftB-TM@f$?6_BoXvF!F6gKaH%;(_=p|rlPAC{klmge^6Bi-(T zu~|t!_Gh}BU#343-TEf+D&!# z^3uQY;qEzjSw_ApR&Lh5f>R$%`AJ|mfvM6B|R*>3?v{L9~53bissA zk&#thEim`T{X)Wei?`5C3HkV&oNj#r>@CjgE>gWa1rd2$5X06}Gk?PO#=S3l-Tfj+Qf1O#*sb%m{V^AZq6Z0CZypb{#guA&urj$Q0 zR@BoxZnvA=3pwZ^8Rv{y1C2gf62c5bG%dN&uooVdKqZ6EPL5Gj!QMgBLZBHTpNnW< z!1nc7Kg%^#2IP2tKdAm3?{(s`0!TLB@U(7d6N)%U3$_rV&eyT*mp7-Ee(t^LS3~rF zm)G!e@*n%m42m%gOz$Q9$>{{fxa%>xBrE$by@FU>-02}JY-blGb=*z-uBK@^+QvH2 zfces1z4?I`B4h@6R<0|UU`w;Gv!xQ9@yC|N$QaDUjccBQ!n9w7-JbEGWIAF?;~OY% zBC7>q=x;i)yZLl8Lk8Sd>sapwSk!1sL@H%GzZt7El>~ddzpUzm2h*Np(D{^h(uBF)(@G zO$&gZrAu%gCU#4f0FI65QG|CL$knEEdfbi81sk^H;(7`31{`5lwNr4M%r_?gb*kgG z6nqWBoD9WP0f|Z#=E}2nKSaA=ArnY2T(+qi8(-eU130Z=f)w8~+DD-`6TWNmZ3&QE zG$4)*`ARK55|3I{`kOHPg zf?J+x(%d2<6{MinVIj673V$U8v^3A`yYv8@(i{dP#0_>h#&v|C3l3)*GIA{;*5aDP zA$#-}pr(;afT6!rIwl*qx~c8%d9}ATd~SY&n5%wLS0&=Kv|P%+hMc&zJ_xvp(eLFF zlA17Z6wyg`44F`pv`6b-jxPa2Y+|8sEcP1wf*UV@c)Ib77hR%HAh!-p&Lx^PXV(N* zaz$xvY<$KG@84@~ole(r{xkKbWgq#t2m9msFVCON$5R`9>31c zMI#-p$8GK6$yv+_bN|otDFQf@W9oOJP)u-Bo;=s_cFsJ0E5x+XNoHf(ra9Om=n&-} zkIh;WI|lJ=(66`g_#Xl$+J1Ujrd69;jJ?w55k>!I{K%<=@9WN4vw17Nv1<+gMK| zN{tWG%(89o_}!aNBRcNqzx*=&>z#kkgaP{I87VCcD-Ui4T_dlT`hLL$+XL}G5Hg)^ zTyV+R+lBg4+#%!5zkbv9+yCJs?tN9OusmVS`n&q7&&2onoBsd$AH4tIUzp@2RMH@( zoGLl7xqHj~8(ZM}=U*W^T--a>T?aJo4V_Mz0OYLdpeDwy(_UI@c;L-FHuS@*vz9iKc8q`alJ;< zSXWH*W=LGEIVQt>YS`<( z_l;2J2fjk)eMyv!i_?ZYKCHikMl@x10^)K{TEHO}g(N=2zSd>vFP@Qx+`PM%aJG?@{Gr4C=A=V{}($>ZPOnujKiR@R@wFA8vI(f9Xw+vkM_pGi!Ns}EJ=G0soM{* zT)?zl>u-3um^|R^yw+3wSlTjll4{o;4*k$!A20|6Yt{#Z7Ek0*5G+^7f~k#hT0=mr zmQC$qiVv=c4HRsL+NU5#n3p|J1Gf~kwGjUH`8t|h=NT|U4Uzv z_Ab}DwBgON(VT@iL{#;+&$;2(+k4-B>vqeHca!e4<0;R(czeY+UnQI(m$Nz2t0Xl) zxO4Qo9AEq2K33mgzJEXcbXq|V0D!-5a{c@7aL*_M{q&6w#;7|^SvWsBf-g8Qd}IBP z!Artle0lz7L>z`4iRg?O;DKg6!kW0{y)I4FP(jwkdOyXE4GHY@XO|jZ}b}o|!WRH9Ux8y7Aj7!cH z+a$@c>Q`_{8}d)69}F_h!@}X{ntE7c4~BJekOte-01)@AtNTs}r1_agHqOkPVNToe zIAyy$qir4DTd;!6E^o)LVB8((MT3%uT`coZ9bPbdk6rHyiv9q*Aw!pP?9?Qf_txLy zl&SIrJCa7>ZaN6uc;Ad2SX3iF`#qXrARdw$seT2w=fGK^Xkm=}d%AX+lwI`$84spP ze^UJg8TE%C3|wjHZ~okecT=6Pd%>4pv3>n_KhA#(W9nCcbrIN$^y(7*p`~yB-Y0HP z(Tg_ze_{Ob8*Z60NAox%w~*@mkvapww+_ze2K}h4hUb6u6|^S^JDh zW_I9#tz-^e;?uc6{IeyZ8mG zCGp15qb{HV@Qqk_=Wddy^nokzz%9KRHbt)M*4TvjTF1VR^yFFx`@T<{o=fQ%x+=it zCA|0KA!q09HtggtPE%>EZwG2PGb^#iV3ThOn|LDVj6FM$i+v<(&%jnpF%PU1I%nh7 z^ay~*4XITHj(D0&EFYF@+?vJo6Qg{cOAGUaEX(jDZ&_T7p@MfleCTRSugx7HcCq){ znyaH^#7pZbg4Dd^bBaaNsKmD9?cAibd4h^Inp<6r0#&#t z1^UhNw*BglU!xyWJ?wuW@IB*&mu`RmM?Z6W(U)JYZ@`~bw@ZY$*TV2)wlDw2E4T0X z;m_C}{p9m4(AF7GS0j3?BdG(+o{6HIdi7V{SP_soS){T58D05V6ko6({D4E{$SPE%!lZ9e&UwL z@6?07u)rPtV2fGHp?}0vV+Ab&z;zgA*9fc=EKBz%sawHNGW%YLlP*M$gNthqbbUIDEG{PFUur)&Mf*72p^SO3=4+n0RP zRonaZhof%z(4E`u`dtaWS<_$ud;4$aulAj-FMVCDziRlH`bEOW ze8xjIekbLD9Q;{c_n0KrCzie^wC~%_?miogHQdvBjNN)u{q9IRPcfhKp!Fp8&aYy) zON{D+l->8b<|Tb>&LeE(W$+4l#C;f$KtVmhtK#`6EH?UR)>5WRZVMD_&lSo zEpd*4Ub4{CW9YYb6Qn%_8Qjh_^>sWMSN&%<`3{t3XyoUBh0xCfyBMokZ;-|meKmAV z(1NNKm(ptyy8)>n?Iv@jS59)%5MKg|!E3aWvQ4F8Os%6e0EFK+W}#xKjfE`@Qr*t6 z)ZbhkH-6V;;qAPA1FZQflP7a=Ag+Vt%aBn2965sT=SYZp1BYVMzNM}@r_jy>t#jya zZJK%a7|l)xsNXrtZ5$X4xh#O+$9bLp7xK){zf`}aeb}q>nm`vcUHzno_@iwf@BH>R z+%!IUz;U-;{r5fk1;KagWt^oSF&Kz3 z>!@qoKjZP+xX`gXWHb-_t>1AaCh2`0xx*XV$+TmIKUO_+k#DIV zKSSos;{iyQy0`Ihb)IV9nxhe|qn{soKd{QP{;mmerYiw?Z1OxHD?LMf)ss6xWMB2_4?eCC0!sgxvyku}ZZ5QXhC86hxYr z05C~k1mqD8m)5q5t394G>+-PwA(gLr~DUuqh z`=3LQvCl%wOCNFA|CV@}`y8T96P_OCL=KOi)Q^jOENsZBK50f$qFJ$aROvU&R_ixH zF@)5#1{me;pr4wFjeOMPcYIJ<>nWJ_0dBVB<$sx#Ok2ZMlWPIAI-51rZOU8+?q^Gw0y2)J5@R~9Q0TiC+=Q%+8%A4! zDQB$*v5gU=W&aD@4Yta}B!Qx>1$de;n*kw1fMvAqZpbP1cP`PGd#N8v;+fO<&b21W znpobgzv@4BT^OfDa#QxdU>52VmoE2cfBOb$-I?j11CF6A0h+r0<@k2$ca35&RqGz% zCa@FSHSf87`-$&;@Aj(ieB3cPA2`$9cip@F@89#T?Pk5SO70oy(!yu4q@CSJ0o2UP znEcrv+`9eg@AJ!e`o$^zZ{%{loXF$!zmv!C@tF_N%cmdq@*<@A^yglvA1b)PcjoL_ z^wfhTw>Mx3LP_iLeXAWaKUsC?Yi`|qgT8DApW?cHpS=al9Gz>NX~iT6Ty!#{Y25!a z(AK_J_*Wn6+-K_=;?oak8RYUB{p_4T^Ktrd2d(rK>7of+>u+67v2!tfSrTK?s1VoS zmlm%_{oGT55q|3DFT5Isg(LT3>Rb5(%ym6~@agM{C|uRgC)eg~Q`*O&r2g*l4oew5 zxo5fO!1P%LU$2G5fsB7)z)fLXx|3z=ZA5^f;J{`vq)l-YJcdcfPC?aRnOvm9BGk>X zT^D5N%{LA~9({0R*MJL*a*P9yXIy7FRvaAO{TS=OOybH{wk3UXj{1`uy4KoSurbV+ zrLSvmh~d3scCc0A5_<9y&5B9C=8P@+YayJ=+N<9-L0gNHSzE{B%-RnSF9iUjx2|6V zvcbm4`SEzZUT>ihx$~~wR*wBYuek`^o>t59=G?YKU{XcU(x4xwTB5bhr1< zddVfdJaMMqyr5!J15Qq=VUwKW@n8P71Mvtru`(cYJpSnn`{9+=&|=guST=)Vx*yky z7R!wHeZ~!iE{PfX1tE7Fm85Le+$NLOeJRJzV~%K%tdG%ZRz+)4{E_>=VtlIB5O_Z( zj@1tYF1>ZmeJw!R*DY>sZ67hNi|X@OBo_XT^WHsc{rmTSdeXcjJWX}DZQ|rS{W7Rd z&LPMHj+u*R2g+C7Zy#pskip4=M!e%<~)V%oGTZ}$Y&oJ%~c9C&77@2zTl%Ve9>e5rH6In8pGW7MI_jfY*J0m~zUGLi7 zrf-^)-zS8Y6qZAjJw3D4l?1owulhwoq^CZA_P5@6@AjKNd%a&Hi~ULC>~qfCUi$S{ zxM#Ew#`KttcJ5be&HWE%=vTx;^a<+)Uv}7wHvZ}F-eOw!Up$P?*|~bQz5E2RA3n6T zO*1lweyxmcymr|Km(aEWI&AD}?hN7anOT7BeUFLOy)0%Jj+4$}eGsvqH_{t^=X;wTyW^`9zNYg;vsO1j3X-&s|Q zbIF&MvLNpn{*ue5J@ZrZ`x+m3=k43~{_Q{6-ti{=#Ffh~)tbDb=X`LFSYGZGuN|@A zOi=OVW17cy-*(&0_ipd|uOxIRTBJ+GxR02zy9r4 z`}bsGIr;jw-|Cl=+0!pxpY0&Vzuy0;|G4wVL!aC}r_Y%?+Iq#6rlv>QcAAsb?s|RC zRsGXbbsW(zgmR)cy=2Z~lGt(o7ZF;64d03df_%oY^>;A^f(!n^skI3rBt#`Un_R(r zFyYtSn#_s*z52Tj?2UyZy<(LS?r0*WcY?-9M7FK3<837kAjgJa9@G+Toc1-Z9P4@{?u98v0c}0y+!_tAoT;IGBj30Q zsOHD7rnh7>XqXKue{=T`cI;jZ`S|Pk9}zSzHfeqe$h^^KB$2P z^muly7^BSnIHvwlBLq1(&*@*hP$*Q)jQjMmE%{{S?P)Gw`S-5co~-YOpo@5TPBo%i z->}rH7JvD-U3u`|2H|B>{`hzL6G*A-L?R8Z?6yfqX~51Hl2mBw?m4~ewt#aLBcP%e zKDb9d+y{+?u*5}&f~9Hj;q!YXfxGvWCta4gct(xdk26)waU~)=pB8rvo6#_-92{?wXYvg z)JpGuP{CJ#B;cCQu*?RUC?R-hFuN2bscSsa$lWm`4_t&fIO(;;nt0iX!GbX`&n&wk zG3)9@c3o@{6l;7<1g26)Hxw4x`3vdvsh^y_ZJ}CgB$hYDgNZg9vu^!08&~n^nfR8m z)}P`R=LyTf)p=;jqMuOfXW>I(p|PVM7X%qu?w-Hf)cPA}5)oDX-6!`#bo_maXTSKc zFS`uN`?285RloI`>$ZRJb#L2V`$Hex?!LR4u+*Qb(t*B5P>l36q4cXZn`BLluD)5= z$-_A>Jqq@W?G69o2LH|rY^NLNJm}2r>;L=5Y%lrRhj09z1{{2lVdaJg+ruCGpzYQF z__5nFUwqj2K8SnE>u=g_*Z+*d5S7V74w-jM(oi*a{&ur(pKWXX1!8izvvr@e;eOB- zU(Z+%8$6zBG`V=l6^*PIFSLZZ8O?I}_h;3~+&EbuFd}FRh*WY+!>_mBLV(UZH41Vax zQ3XaDHDgTfj)^54XzG&Me!4k*ZDMhcZ(8o^vo_aQjFQv!S&Mrt{m0Zl^j86ss|!qL z%(vV?&-?$kHOZ{6PV8#iu$^!kr(cYdt@ z+8cePkEzOba;PcMdp74wdwx{dMT~F7`)Zd>!bIb3z z{GUH^trh0dRi_(WzkTjQs>%6NmhdT7o!Yy97TBW6bC!xVL#GOG!FF8N7Dapf(zaiJL@QeSqZiux?}1<( zKOe5;$pr^~D_I4Y*icrLzC$$! z>kI}YryGnn!WO($+umBDU%`!qr^c*Q22zzyy9+mRt@cj5^Pm2o z-{(&T&|UPfbGD21!Aqq-L@&v4yMC+vHvMAUNA*)oAGtO^mYd|!lHQ?T!IZS<@0#F^ zyYv?}M(NkSu}ipZ%K`4*fW(OR^*{CD?Js@tL#>rOhYV`ueyLnY$P$ENg8rNp=<&C@z#d`~P|y8fngJR4z%ACIpg zVBx|KSDBJ8OiqE_4PXj^`ya^8wRJgXr#?cj>moL;vtbMy&TKK6`0u%REK8n%&L@9no6O2PL3^X*@+hV_Pc z-B&4eWHMF{uv=)emRITxRx&!Sn#;qy_pkw>fC ze)Vk7`l(6p&DHmRxx38hhocJU^Y8kRotzQ2`ulOd^Z190Za6)>^avrmXWcf>;Op_* zeuulU=r3Ui(TJ9+zqw5x+|U;R&f77~oBVJ?$b0k+ejK(teCXH9xo5bbT01QP#8`A% z9ed%KFqnWQ#O|v@advYSpRK(Q0ap`PeSEgrT*RSsN+V=Wbnv-(vbLuPR$Zy0@T)_n zb?a26Wi+~}H5c^|VBSS00Bqpag9yclHo|^z60m<`fM3rg@u#^~ZYOvAicjub*8>9> zT&qJ1>yz@+qV%oKxs#eCOw{+n1HXAruGjy}hnppW`d~!2_UEXs6p&++cJS68nq151 z4G-!$NRxeGLUdyNNf3_c3By}{{l@JLuf2Yfe+pN3>Gxj#)%W~`e<}=pNBR_rLs8!a z-DcmpU|hcmx-q-`f&tu0P(8zri_!PDH#WA%meXFQen@jqI!J z=o((uo=J5(pXZ5L>!+R`%hGZFJ&@H&LidtcA3iiiVe@v(7{MAHikJ*GkNoLzrwx`X z_F!r=Ao7}*L`560@st}~hWXN=_E}MLgrLG5*Ll;RsxfJNwI))d!TQDC3AAT$rRl-O zW`}Tw$mGtHD>sBKhMB=w19M|kj?P=a`!%haTAOLPpapT2*H~0mO`WLsLx&pHH*WiO zvCA2on>0@5oV@7alb?OqpP7A|UaRW5_kYasVc@E4iBDBO7%0rkLb1kLYV`PO{rn=KM))I_b@mGL z1Ea_Q_Anr1HuZir$yVW%*^y&L+d90r;3d$!d--v9pcf4iPH*gDq3o>8T`+r(UGEA) z`;q#)Am!MpNm%c#zr_)sb+9986z-;jz=uZ8&O4b7TdkT>{SeSiN$%DQ?SSkFlD&0| zo3sl$R*tf(ejwdd;iOj6h) zq<7%h+T=x?C)FQ>6}bKooUz?~$9>xme&;*4KmViU?|%S$fCd_W{s%v_{o$|P82wz= z`j@;huf4*V`UlM6Rb85EzFBYesRHO<8aVS-dYFZ((_9HgvQsSr_I))VY@ibGnKhC!2S}`3yCL0_+#IsY*~nho z*j8tvnZX8ApLLxlIlSzA0&%moXB14ZM(1O_>S>%|1H@->xbikwi;EbV%bc}Ybr}R0 zN`GM_u5;O81fVPiXWMb`9w8$iZd7HoK5(*;z6N{0nV_u)%%%xZF(jW%NA#CASi!m{ zn#a)6G19<|$l4uRG&MThvjF&b=bOOKed%R`@}$Kz@BNs6>CY_6;Wy8T+R5dcg&g{@ zHc#HpMvB2&(OYbrgxHb)EhhN#rfoO`76&vvvFFysEO`U&g$`~D}=e3hdo za2-H%S;w-BcFjUfu!;*14AY`;$b^U>S~A{uMuS1VbJMSn79Tr?&ua z!MHb73)sXYPX7ZTP{V-S1X%nbq4riH<799Xz}V@FMK}s<9cE23>=JZ5_{NYvxl~LW zYu=4s^DBZNK8hgCXtNpRT!u`}uQ_?ZF_rYWbT*S4X%Tap`tfO`r z-l89J5!9IUZOjv`#M8p7@C!XSncm{^ROz81wMAI2+SH z4Rzw0G+9H z0*T(h!Fo2%=Zn0GHFuwrxJzPUyDwG0vRX0oZ@=dH?a%+d!${KjcgLq*(HVkyuU-I2N`cw{Cf2?w+0DzhTb*Mjrmy2XCMMm6!V$ z1bK1A`!o1b-ygp2#_iXC`ug$8#?D__;PN>|QdKY8(C@N*!PI8rT#am9Pq!RQ1IO=+ zIRG&4JQ{cFk3~jEM9}{T0C)Jvy@jqSC!ypNQF4+ZIdEbd`h&Ku^?Sw+p&QUSFov|_ zlN4^Cb^nA7aM@TvmOb+#XKL$a0ubKm#pT)@ATxc!TUeu<9_uoaPI%e$0lx1&P#wQS zO#Rec!?}4vD!ycATc>B2E)JP-gc~ES`hjz+kQoqc0r#;D3r)n22#Tyi*Qj7C($AKK z)nuH*ISj%N9Ydd`1q-ERRXo(YxfY(*KY|wD2}|=Ksp7|@%rc`2j2{dUP}tO+e~ldNZ*&Uk z!{}MoQ`8UXi(dKg6O>LDoP7DpRIPizemSjPcAq6P6a=AZwU=qSb7t)GAe;5?e8eFZp*B^TK$F_g-kKecbl)kL>gr}XqJ?U8&>JL^u z#J?LUq89@wp&pj9NCKt zjB<2*9P>KMvErTjcVZ%c_N(|)Xz_)@`n@d8fkta$49o?*K}|MPhAa}N8_-M9bFjoXLyqq_`pj7I{Npo{=v>-pWDGJ0~_r5MWWo&$hWL z=Uw`R$v=MM&Hj){gGjxEiN0^dOOf28504OLUDxCFS5D;)(;6}>ij|(3IQnI5$;?%S zXV+BJ?eTr)O&sI#-;qrQhEbfJVma-#+9zDPI=>`I=&C>ae$34066_d%ky4zW{&vFcL0NZ2-JC#;5) zHaRd`hu1tRD_QCACA1&eQ3fP@I$U%H-?3t)uCrfa)>(W}-rxsQ-W@lY} z0N1&C28Y@g>PL6YgW)ve*M3_6`>0bwo4MS%Ixob@Vg1V)bd2e{fT~|$o4aN}G%&-Cn zJ6C!JPMFf*%f@T~1Q0T4QpI7gY5B|2>hlmuY|c;!hx>hiV)^`ssGM zndxTWGWS9_lY#+G5b*`!_5qs2tP+@OaLH?wyqN{`z{jv2&EqD)7=f%jfgCq?A4u2ltm1Gk zdV>yxqH#Sy_8P=xQ8PB&7TdWc>^Y6ts&q=d;8lq#`Cr7lAqwCLFxope4o1%2czpFoVlMBvc&cDd0bmWq4 z#RBQBIY;@idvq}cf{Rlbob=iateK#a9fDlJdl;*g=GJ6R^mE3`==&IZV{!Ye7ihSn ziI|R?R3y{Oxko99aZw0^#LT|1&?@U8NaDbH4|osnpyje3?>xGs5P%f41gy)#k#OW| ze+4zadF^XWu{8J4FANnVuCXnEM3Cq*2;=McJh-AKnkvM(b7{6P?Zz8H8qE5z4$bqR zf)CTjc{N29P;^6oQL^vc=|wwA)7XZfMcqCeS&xsvypv+Z^3Z{EZ!n}c;34o8^@6M8(mRvGF^zLoM`5zx8Cy_j2B7)xd42Nz!1ZEKD} zQFSF(ba(wnH6Ck8%E~kIl^J4vw60x-vQA^^?*38#j(!Dj#x@ew`(m&U^G^S>kIeyv z-koyJ^$Cgi;uwX&OTr!!=KW%7cjpH>pTD#*b_vjO5D57?S7t@qXF_*|uhkk`F*^ha@>!r5 zJ_vk!IFHu-wN8Ap!JN8`T8zo*L-JA=i?K?gH6~_iip^|-np;k+g!8pRcAaxLN;`X_ zHG@eO8x^0ujR#&5llT(Qvd7$7dL9}`$46;`tM!vURe=#mm~-`aEiS(7ob8g!9^{uB z=gU`z48BamPhW|mM(M2%jD9H3IKR?GVs!amebr}31{d1c%fQlM4Z z_aug{Kdy8j`IRH#WGex}%QpUjVUG5}W@VS-uz$SLN*SqzOJWk9`s0qTb(vS7^hZ9w>!MKGx?k+~pT9^&3c>5|fnbnDVnn!KqKlggLJMiyn6F_SDb0WPAGOUAjHt=@)M2oj?8!az7Mqn_qSJ zwtG`&1SVeE-Tw#}oL!4M%7&WodaZg3X+M3Jy=x9Ei1dbYvbCaSue$!watcnRSu4Fe zzO6CklZb7A^F-n^P7>#6EjB@}0=TA%qvwykzsZeMhiK0FxcfhB?w*o3e+N0f{PtO< za}OnPM}Os;kAE4^qidgzm2`~ecnO!n5}f;ATJ<*T$9dpre!-VgQd`krCuQd+dCr); z#}H8eNZ%`Z9b?Ylgya4vRrB}SeW(~uML>M(F{W~L{WyI#i`{zJi-_2P*?ImdfO`ps zy#~p(Zhmq)zwGnzyZD{Jk)tNp<|0UKhQS*o+X6Uc&J>?t&OM+gnvEo>YjKD%BY9dg zC`sCZnxS`r72mi-C0qP>CKvQ>uBC8^zarAy&ZD^XAR6Ob!oV6Okd}AoFL}H40tIV$ zrMIRc$j#N6m_y(jz&TsLf;$&HHD;wUkg9arUAR7|9sNyk;M_M8@YdGr9g3=-)Ky8& z+~8sK;t9{VaQotKc+~bp%}4t{z~KXZum72AUB4Sa=;%{Aa9sV}(qwW2m$)olhxo_v z0cO9i8(27J2huxbFVmXSy)5Fmooz`hr$~9+Hx^tE=WlH$sCWRx7#OW9>9DM1Li_$1 zwk29QTE7>Svo&?EZFU%!(jucDJu>0nslNjwSGbC)@Z=;202c<2!;GE!%Xacq{3AXp zi|yF@@pWIKzY4)IG(}?T!alx|(oLzFsHJh-lVg}R&od1*R`H|>c3*$a%p3w)=OEMq zPWEuL1jy7kc%*PVRpzaHQ^WOSb6 zu;Yf4vvpBFqwC(8>t8>Qc+7d*E57N`+tWVx(ttk+*LS}8mhGDN-YzDoTIwJAY4iBs z-_)1(SKr)h#LV;W_-^B_)T8I(`_Q0{m~-F%v?%8q9XTU$Ij$Zp+N6%mtGcj51|>94-DUqI(1w{x(lu~UC@ zQztc4YV#rkRsk60_sMLnQJP}X~r3GCbFHdPQ@vtXNo;Q}4z3RZ7#@11_nmVY z)O@nhkiW-Tf5LIreZAls?|C^7@{Ida_zu~xzwN}cwqJ{$W1pz5AIsn;ZVDzO@m}R( zEWGa1_pMbP40XgmLB2OsPxatC+@gyUZOe zi4GnOZDrjly9sHGGa$}TfBn_Kqq*D^^c$LkOH|b)2L1t%@Ke{{y*l<_O2_JRLidd- z$lqD3zP0`-plUyC8k{t`1{#LF@>-W`oHuT3;%{&N^>2IJ_PJkj*l&|}o5gqxZMc_Q;7%Gy z2|OM2K25e`XdUtwoC7I>^Gnajg_$_pFw7)5C-xfi;irJ&w`+}WYy-+ieBEt}{@8Li zgmUU9kSGmtdTF8B%SFt%*H{aanjA6o?ck_cZ_a03CO3A(lS%K#o&S!uN&{Ae*4RDu z?}3=wTp%Fy#L5f$wAzNd{Dm6)*-Lr_wU}m2SaR9s01KE9**a$*#&Bv;XE#jVx6lrZ z5I1}P;q8qV4WgGcy#2I3K64k$-ecEGJ#Ywfr2f_=|4vQ9dT;$LE*x`r`c>Z;owp-) z`aiVG>vb92e!aGh{ucMQ{IjP#V4pl%6Z7Lf^TO@#e&18Ji}d%--2v6_zE$6QO0V{W zSG$K5{mc1d@AF%4mc~20H)OY=NVRu#otJ&W2HuOp76%%g-jQ_~@r^Ao^fji1c;PT8 zuBJi}A7m)CE@%Owe+VsX#q{~d+7W}&20}YaNBc13HSHQ^0O)n3{$UYj+=oq2bq&mE z7S+ByHdKq#_GK@9^rwQ3>z+}+!<=v26}#Dm>H6aku8CT|9G#wSO-^=-UNyod1%y65t!T;GMfj4S_j~EAYTA$%aia>$){IA-;Y6?IXSGm0{->31!vbGZ%wv zKG4$%x=7}%gCee1iHg7Rz6(h#XCr%UV_R`5FTVbu81vvNbzasQA7@Qo#=t_L(fL@f zdKzcg0HuB!#M@vkF6SyeS({atL4cvOQwQ1>{lycIuff2XEzx;j(ht`G3v&8x%~f%$ zo}r&SCI40b)f2Z*`|QJBhU5S#r-fYk_=jxY^beo3o%`VVyCd#9Em-TXTJS|`YIFj8 z7&Sa;?>En}L&c!x+$@@Fy_yCQ3^>Rq(yZq!GcFHuB~CL~thzX6m)tfqf-%lmR#ras zJ01=I#kJ0gPTpYF`#(YPH*?WMQwkb^#`fud^RCCS}GKXI#mwq({^l8o_ll(B=r zHG`Rte;(s9>3MpwM9B=t+iM0`h0@ zSu!h3$(4d_EoY;#f$4bb#}K1u8r`N+PuLuCI$ravzaJn(&Jq9IdnHYspG4#!DtWbX zA;nYE9>a-SxXJ6cZq8mr=U?l`_R?2fxjpB=KLE8y`3Y^We$x5d*Z%#-nWOd76w5!- zgYI8-4!=HVTV`&A7?15gSqui2-UClwJGs<=XTZH~EV(*QCB~5BGqT)X&pVKuqyL3t zS|8x%6+gM0XTimgVspm9QIhdvF@tCJimv(-5OQl$NSI?wceGvzx_)NoOCzeL)ZcRB zXSWa>EvVr~=Dt67qq~14pvs*3m&bo;TZxB$8Q1%x&t#{5fuSGr_j1AIGthNDerL9$ z_4C})`h#ms*NBd;dp#e8d33y`L_i;^XX{w7Rly=k<<$8fSN{Yq{b_MH#oh6&&oq}W z$;y>&a!8JYxx~-j7~`<(!^Ze!aKG)5o!J(O1-D?0aqFAi;9#&jI5Wi>FJlU?Xk#kQ z4b1^{kv+FM;5+(5YU{wrw&vDOLFKi6!HsdSuxKg`%hb8!7eKhNE*u?##lsw>-w8-F zyp1~LOkdom$^)=992aC?)$zn$F>{DWU#tR<0J^za?-_>8*+=A|}`_HC2 zXUuZ{*cGFDM=yjP;Y&5tkI#Kr4lp)=tQfiH8teOSfV8lt})Eq5?~)VyU4)M zwffWgXI`slx6#QGZH{)hm=zTL1v||Y2~ps-2v4@?m)#;Tg2_{*^*ayboiBb{w460! z4<^qkAh2YhwGgT=`kkx9Bt4kcRDm63BKT?F{_|Jo=$K%Pv;L^q?^oxtt_T?QHxp(g zSQ%V;8=pu*YiR0|SD3Z_$rG^S(GS;(l{+tGLPInrKpUI#gkk*`7&19oYy4FN;J zhh<>zptwnBXJZh`TcTt&0T*EVheSc~7yw3w6qr!#eei}haJDn~tShl5bzVn1em70@ zq(3QPy*bE#+^~JGe%lr}`_x&-tyISy>~Z|q98dm|h|SveG_9P@Cta`n=ErPjp1A}P ze89N(p8K{Byz{p0?Qi_(_LkrL$o9TJxplkqj(ZP?JPP3_vv@i4Q9#wdw8*Zg%r*Vu z{>WLl)GC86y4Jn=*Z$%NBX(9zn2uk^L5CuG!B5^A@8fClIMt$)pjuG$mxf3x?d!M=ZKRo|}%8t8_m>862h zZES`x3LPneQ9#9rW6&}YER9J_9Ll6jOhv22vaFK$LK9zTVrglGpfOG)CQ1a#c|f2P z2PT;(>82ZKdLE$X&ibtNtiAX5-1EQpbfdnB``q*0dq2;5)>_Zr-*fJNxWjr!rvE}0 z4S@ig8Uh5fMB{G%7Fpi7r>cg=XAi*)_RIgombZdD!xSzw+dm%7ImV4z7jO`qjk$dP zc9!vV$G^1m+lc9b+~rro5W^7uoE)l%Q9zh{PO+HIL|G3t3xVBuNFgTbBWzXX zsr2tfumOW?Q-B_Fe3|0%h$nOp|10RI@p$nCEMkRgL`(l#Y|CG3()8{h4s+?e+@Cu_ z08F0^`y|32!TvY@+$(S2`VU`! zd+VFumHzGfX}1sfK>dYMKl^#NFZgvY7Vn2VnD^Nq`pnxeQq5oa6E9DVEcyA<7Je-e3Dy&M;|( z--6KuH)FpoA= zIbPhQjoGa5(RB?N>YaDN!yX8@IlN?@GsRx`-NP)2E>Cy9e}!u2duHy^#?qhn@QlMn zjej&Ppm~}BS4^y?GQSrio*~QE{hqnk`^NIGWNOd^87`lLb~#swz0s^ljeyQK5XI=l z1mj;x#$bJ(gISzTU;Ul;4=11g3B&|8fzh~8E|TV04y{||{${H*%*(;$WTq^ZGm|s- zBw_}aiSx@2ol+5@#w|Fpb+9QBh6^297lDq~dT~9Em7j=U{mnXO|Bv`bOCNl_^;?wR z8@4MAf`mxNh$x64-7pLxp~NWN>4$ElV+aYQMClp?1Zj}Yp*sZzq=xQ>A!eAF&3Er( z|Mnj5|FE88-OshI`#d*XzwRjB5#>{xS{6>B@G8k}ZyGmN2UjY-1bbRBI6zff`}J}O>5p93qxO611E#&v4YM$ZzNm!^ zsV;>ao!lAXGK{F!aM>2O-HQm+MO90?9wb8>=-F}e@hTnZ59uG2NBedKSb0L)E0cNk z3?+Dh24UWJMC!U$$%`Nn3Weysb$}e56h!q$I`2zH+_qg-9Q2EFm`2_n zV*lGq7k*xclt=rzyBhf`*jsIA*$y;wqd|q zj6w<&H;=T?yBfb0%AuB#k;X(kj{ds4l~k3YOI~webOI{Sl$kX)qpmw)ZLqPES;S#7 zk@!CbfZ2&{IAkL2hK^&^!=(d2j4aDEO7z>hKR@U+9BP|8o-`?43*f(^Ob)Jg!mGx! zE!lxDrcrmyv?`j$3!%Tid}FiS+vEDY_zgR_OT=ECOr9Y9=7@o| zeO3Laasj`tb4OgZBsTb&S3LS=&g`-+oJEvm*yu0%AI<}EFqx7FNiVW(Ymxf^>aya< z%-~*&XIf>^xU<_}?KnItHIg-F&?+m#Z2-UghRPm-Qgo{FV#WjTo2u?@6WO5~Lkrip z21n;ac&0O>f@ydKQfI*-%5whZ(%D2S={!AY&(k0kDxQ?K?pL-=2MqnYWp=GXZXM>& zy!1b@xn4UHd_|8I7UZj<6UPHjt3J@X2Td9{W(x5Y)jLxzNh>uq1Yb0Lt@c|4?&vO+ z)<-YlUb@7+DpNhyCYYlk7}&WpIC=c=8l>Enq=m6mzV7^^@(;m!?~0q;?UO z6z>(u;qsD%6GDF&^`#iq3EN?;$?2xsr;~@wI}7udiMGJL!8P1V0V==#KQR!OyhccX z*JEAMSDLc(96leIidUAVLf76#?EjMZZyAb#9;5}YkmtT>J-4_mHV;7;!LFrG9LRy! z5%ez9oE}$IPjMETV94I}@(tN8$A(=n_Ucm=?mIuGyY^Gn(c zWcm)5R6`_*#C_mp?y@v1=hI~PoYu%GMUbKGn<3$2Mzzx#utC93kB(Y*n)9N!QqE}eB2aJt=!O7gOdUA~yhtTOeNOQ8H&I1i9$T>Qn zKi+=Ec^GBv>bPS^n&69qEH(HB zXfAqeP1jjAcGwj@k{zO;(OY(NEsWmq#}5d(wCKnb6j6xU8VGGPY7FUmo}Er^8gQKy zI1YC1hdWa+hWJDZoiyG_$L{OAAt@W5FX;5S>S$_(KnJG-sX6M>9Mt%ie!c74;G*_+ zrXW7%K4a9@BI>gG*5&mu{-Lc?tKXMw&8ScTx&3HW^}ulB0^{4FIK*$Rz#D^kL-xMH z`9nRmcAw5k)_*Zl*^&!(?mm^i-Gquu98dp@lY+a?dOE&Gz@H?`xxY`e2o&cSj>gqk zWnV(CyX}_c_HEd&pj93FBjz*=;&dxZWO7uvyi%p5^JZ8T_8ic0R@RU!dV2jG9F$HUmAGh)9jGEi2SfV#GN8j{3tM3dw^oFX?c^9 z(tgh<`k6@eWFE?@Kn)rlKhbR8E=w6RL_OAV!TcY$ze!dGiy_kHm$7rfz&lAUl_4U+ zER_BWzgv{I8Wqi|oole|{@UlJnI#6iJO0^!5nt@mMl%;s%&(#k0iKvsBV8xk>I4=b z73~d8@aqk!+sX|Z+}_^aCww(MVUqdlhUn)7by%kaphG7sw;vyyYvN z36J09X>XfL!>AXY6N6S2fDB{NW#Dk5;*^_B?r+g?jcsQ+_w)hos)^GsKSZnmC_4My;`-<8O;(f?w$u7pG z%*Jx%DtBI#{p!i)UE;^ES?iMzczV1$G9!4_CZH&=A)ExItrax8k;~2ykS8XM0bfsv zLfDg_sE3a)rNgu{`~~3v&Ev$aJu9@=Z1O(Hs-`rfg22f)^L00a>b;KZld9p_I(a2H zFDLq`!n;TU7h|_tiL|f3c{^n-MZ<@sZv~qY+Qh!F5fT^mUullBSCxRp!3Mku^d$CU z-5S){7>N#2!8}0kBS!F5Pv)wW;-sHsblrUG3%lFp`f9(*lW}sDj!WS|#PgePXYGhP zE*$s*WhxfSO9N#Rl3N}fC9EsPw&MO};fHiz^vJCn{S32AaY6YG2ITOl4LKxn?XJj_P&I))Sc z3`TLn{cLmNHfy8)?KBb^zL~a|i`Nm&+|6?GtXDKssXl^T=<`)3MUNKn>@H)`NGE;v z!K^>mx!AIG{gw>ubp}P)`4M7P6}N*Io{&o4UC2!ERo`cOecV^b{Krpt&sJ~lFm8#- z(qW;Pd`F=@^^2ElA{r}Z%9+rJx@dnKJ(|7Y1%Q*Dvyf47iY1GvO>xN!ZZnf37;9E$ zEZf!B%Uxpp9&2)!v&mI&8I`G$?%K@>Xfr>d7fAj@yjUapGsn`Y+@NB-yF9?pY%8+< zFHHX7-oS7b^C_Jo*a?mA@TinA;~?@eB99-!#&N*yxEcAo#khjC_o@u>A4%=j$;5CuZMdP{#U5vo!|kGp!KpU=QXVWLv6&~ zNC(|TP*u+|C^K&Scck;gWUUR~+h;4It^D=74-5MWO`C#RI_NW?!6#Kj*o_-)2|m1C(bdt>_=C;aHHw-~jq)4d-uEMKgqagEEO40A zN(JZ#Cu&H(2;Hj@qaCJJL@410Kkx!qD2izK` zh)c@w`nlxnO>FfUK>w)Y1|lQ7CgeZib55U6YPz2mQ#Kuvh!`gxAc$-@o2(?i%&~lh zeC~k?<8$6$?3unKm+)eKqjj%S^8j9UJ9Ik1JdyzTQvXbcpKyglWQVpI(<6orUdScj{Ohz?}sbr+p2X|fdjH(B2 zbaGE#GU=FSJNq|0aEy4w)DVxv#_)wyu*@Myy1RhJlQ3`(5BsP2%Hkr;6HmX7C6WxA zu{<@V+F~y3hty30YPcf=_qtNsQoPpwnN20xdkBN0d1f<1-$jMOlJtc{$CVXMyb}?z z!Vra80nClCU11!?xokiINdvPHxQdNAO$W{kaTSk>{9n zMKc63)B*D@E({(V-s=I~i494-ePI}z?uih4Cg;U(jez+c9Hg-+Q6$%DFqM(SFG;-5 z8VN3tNjiez&)0p2*dX8q=Gdudr&}s|2ouS8*UKp{f3g9CdpmZ30RRK`K4M|M&HlIk z=|58D5r?vwmAgCwj@l0Uf4b80y=G!uPsI{mtu&m6@ zy>kpJ_VTkRaFJDE--C49!1699_-Rn1!%GBSS1ZWb_mx(7;4*M~11`svWe>Oc*Vxm| zZ%jt~MyQMdvUCQjJDIFAxf%D;x+W;9H|p56fnpec<*ajmQ$2<~a+$CB-h#Sr)^i>L z5S#uao2i>#+LRey%9%BR9VbmK#*-#HQ~TMeG-5>*+Uo} z{K7FNUOYlzxQkvz`W)T((tg#lLRVqGK{s{?U7}+Kes@bf)PVQQqj_Zh2}k& zWDosIjF)E+U;A2qQOFNy~4Wv-xZy%IZ)>SAd5Gab!WJDE66!mjY~@WzWT#dwKf5UZns$ zS9j~9Z%5I)7=DmwZJiiBvm6A!()BO_677HaL$4-t0NkV@PIzj2b2(Xjd^w>{eHiz` z=1$D<@L9Ei;N54-*Lv`QJlP}`1}Cc7$C9Z>P=AlGdY<9>&c!o@IzuNm%nnGDAoMfM zz_t6z@4>%;)6toc zVnz)VWZ|g3DyS8^xx|eFkp%@|8QSd~uI~=19tAfPw;JpL=7 zdp3D-8(;7b@DHzn&Ha(Nxl2z(fR^IhbJXU%u#-?E*dI$wAQG`QrM!8(D7;iCns;oZ zXZLw%fh3i&VQqH%&BX!?Px}$qlJzRdn-}EH_?FWkM!7ksTKZPMvgk@wbl0p-I%n$i z6zXYpaUt}c7#h)OmmJI{JPo>Ve3R8dGPj=##dA;_S^46hDi{}ZoK%}xbrzWw&^76( zj7XftP3py-H%FdTbz1pMOT@Th26TkTf1;fbJD3^W1D6-X*7Mg1vdA9b@t-Vi`Yy-3 z4H(tu3MUPF!rzy|BL40N`E}y`U4549Hw7G`JO;*H=)SGZ(1U=?{$BVxEzA>PWLD(w)vm;LPyD)~|7)`*IMW_-> z1y#_M$mMms>>-s)&{$&I803wtT_4$5wcXVQWRGy|bvmtTDf=9Gen6OpqmRv#S3h;g zxBygRytTwjhZRCQ>cTaZ!ix;eD2u1B4|&)dMAp}YIqr6uSR~#HrnBLRG+DmY{!v}y z9M>P{NtyIfev{c9m0j5L<*?R~qxMr5AM2L>V75r~I9WKDH8RS4)OhI#(9q|1NySh9 z9a+Ozc8wVRs-%%=G_{404po`RD|wu(H+)yeecTxFvVh%vy$$)4T3=^#2(Ggq1#B2> z**w0kvK;cxTf5fi%Jtf@E>+I(GP3u*@ugr`dpovC1>DD6hkbkX^R2|3D0v?bjeOH( zc27Yn*zM6}#Ucv5Bmpx+(09730O{m@d zq?grkNej5MbNzWSW`UO0E5eQUE~?+{lm|p@UZ>Y)VO@p}o{rwT)rl#%Cw9~pl|*;2 zpHe$=eoo|&Abh{v`suARX~pk;AE3ahP8jiW+Y@w!*4rM`lTp1VEzbRH)##e9 zLX*BLvdspuT4J)P_UGs=vThO)yYkb?Vh@;UnxOb23BMMGQ zCOSWs+H%Qf8WqT~njC5+%@&tNub8fkGNhfCNpY@a4H}+yu@b)5TcHlDkiR4S&Jikh zL=K4X5_h`fx4IdXVhvxOgYX}Q=Ko4`QuX3UiBz|?T#ux@2?F)IqdJzLk})}-%BN-q zOb|Dfi*u0KM4;jBFX;KtM#ROIKr3d+;w))Bj#2igRU`m<<(2D;{w{#NT(o0{v>gBV z9CEsd2ts4sf@_Cunl7$D@w;uF`ckduTIdjq)2*LFa|V-Bpr2b1c+`jY!mkf)jzL@R zDj7DXu;|iH9p>_46;Cof#}0itR8n*9+3q=*Imey0vVRH=fMQ7TSGjS|Oz~vtvao4K z2@$)7@=d-$6TZnmB=xSsi}xR_akJXuk3i74Gb^#vonHxM{2+if;%^5acjUjQGZ-@| zdP450T|J;LEB}=0v zq6K5iNPgkFqc^)(jXh*V7>YeV=THY5)np<dz=G2EK>MZfv(&m*ml>1g-FT9kOEhAJb8ey<0io+{C3+(yg1onkM+nKM2 z?8j43H>4*msQV}l#~{!gJOT<@J|gKj8f%=x-7I3u;0ks#&Dd7v28w^HrN<2`ZcgxaW4wcz&? z58(bDT46Foi|0G_z7ZMb>TxTVR&t`*1rf{~H#Z*F*b>`b19(}+)boAWzzVPNS}xz4 zzv7Io(kL&NbWipO-Iw>re79y?!`lohk|#OOd(K|ErX|>&b0U2dO~ndDncJr88$NiZQ$nWiHut15c_VB_JIuzV+!K5PN&<)DtFibrtFE{` zM}uRm8|qQyNfYJ>c3HY8fQzR>Lzn*Z$EpUe9$i4jV(hMUefU3I z_J?EDLtwE~S_lWSyrn8J*W#jg&gdo~K)+2D^?uZN^yYo&KfMB@@a!6e2eRc7*ZP{< z?hnHFq(`q>>joD>g`WIWNT$-yJg0ITQD2|U@Ao12g;%Qk3HdfgH%QqA{di|nI?L;~ z>4;K7IoJEW+b>RTt3xaPb2B*-iNQC{MUW)(n)N;{LG#@t7Q9_nkrmOM>@ULGd^Vcs z%QZ(OU90eH0z&sX<|vAYd<8O-%WXkvw5+jqk?Nh!NTQVb#7$gdaG9RCA2LzD=4o~J z>8+E`3UNCJ-8YkEj83|YUPqeBMbHYw={9cbz8*66H11V@#Dh~uTuFxYp!S%ubWi!f3jF!S59o>)% z=?DS(FUjabD5bW6Glo;Gf<5b7r;TbEx%4&CcfT0$lj!aMa!l7~5P~x?LAeEOgYPgS zkV6YsGyhV%a5Dj1+fuK0HMa0eF>4!mQ$*nTfGo>JAh7LpZ$LUEw4a;3gcmBD_Q5sW zY!2do0K8S2{AfiO3!1=}4!ESceUldQs-oou}nEMrQcNdG)MS2OAX;%PKVdg>d-v*(`~T5%t#a5L{evNhR`i9+8@VD;KRrU+A)Z~Qx6 zmGo>M+Y=JT2YSqdJTJo2yU+Ikn$UT$p^PGY zD*$d%+7j)SMl$U-Xy@%WDbuV#U6A0-! zT=FTGQujB<9UmN!#5HJ^e29^`svzz6QDAeV;ja$8%-b+LI?0{ML%KIVKH3T{JihlU z*yD!a5LuFd8)#AV@QW*|=8)f+s&%%+2Tj{|go^SB3o|XGeKOg08(dZo1#+qnxX<7+ zyASR{i?nneaVtJ#XQS>AI(}12Yajf@EJD6*5q8jpCV87!?Mu7zN5lkihkiz!g9_hj zleX(-eW>;iQ(%xq*=>f49eFwZy!bPQck`TxVw-FNH5}{bp5^MrWT!*TrFB@3!Z1Mg zuS4eI-F9(Y0|s|V5dRAR)5Pt~0~;sbZz73qtY86`w<1za8!cYY-h?wDj- zEdeJi-<|qDW$$f}ey#iPeI1JFQK3_a26@xEEzI0-KPB-BeLIoCvxAY&a#D6l(yBe|nsF>cz% z&AR$>^q}z1R4%Xihlqb9uW-AMI%OAxwNt#$#Z#rvOV3^`oKlxNpL0-|RQE{G#_jQ@ zS&j`-D1_R&1%0p#4Iokpsc#bmx0CLCYxuSrH&Vh;|C#VX@x%!xRbmBubAnA+1b1wi zy++T8!_a2;FW`NE+ptB6yLtl?0!YKBOE(MFuWe=sK*Ii%`QFghKIp;$!YC4k)?#*}{C$FXA_St3<6BR>>1NMF>PB&owUNgT9I89J+GT6b8-Crj zqGPx7@eAw3j>%A28=oKE^XsBVT0h+VE%xXVr%XFp%59S>ElZJ1ffF7m64}~X_v*k0 zkr)fgFPgY(G*m|9-wi+-yiGxZJRQ&xOZ&cI4+Pr1ilt|$z7Re7ZX{CAB=x9_G{|z{ zh{^QAEP}kBIbFnmpTk6~Je6APUHI%X5<+oKtD{qe!CL)9>i4w=3?-z8i?t&;`2Y7T zp1D0*Bl#xw@?LC_iu>R=DnQb(A1Dgj0JN`NMTZ9#?a?0c9#1p#qr3IuCmtC8MVay zOV3a7AMzN6%-Nft-t-jB?^4YL);r1U z{VmChM>sdqC*@@q@m>o*{ye*=w(GJOUj% zP}tcO2%GBG8hi~xKxEbe-u`j{iF@u~ciw;BABxYJhAG~?dFYyh16E4qXP+#2Q7z-^ zH5U+Y@#wx6to7a=HOSNu`1WUrJ19*8Ex*Zj_dzS3GxjdX;@OXxCkgNk6zFh5>QT>M zc`tOi#D2fa`6>oGATput9IV_=TTaEidwFO4e*NFva@5vg{(s?_#t#}4}CUH$b(U?RJt;m^`^y#BvjUTy~& z64hPZis@OnC(Zq0-1;mPYR*WXQM%-y(uaZu5x5gfBq$Cpn%);G*g!e=&6 z+ix;5^xv2)rIT%oVeXsny8L6w0m&oLdZU*Ux?rH>so$XB#%Z`%S^Mgkjr9@qZ>#al z5q6P$7U*5^hN0du8vAx4z>yQ~O0yE=`qBe?)@JVXR#OeeY6u%T)jYY!@8Z@1T_HOH z9s5^hxH6~*I(Y(nD%@^yK?OmWxLMqCTZEvmAmyOaUIvB4JwW%gPU7^6B)&->h&|AD zfp+-CKb^pR4H&Qs&P4hjdPie>4#G`{vv+K$64FgPgCT_CnhHrI3D2tH)?K@dHxOav z8xDxK4&`#@p8Dci)9bC%u>Uk(31lon3RMiAyoc+Y;Ex<3Nb;UFCXI73B+eXO@UC4Y zTWF1E#C(c5b6=sOXP0qSp}4`947mbImws+#%INL?Vl%cV&{1rSu6lqe?y!K_7n>JX zB%{yPwQ!w>Htzg!f0t`ZA-s>~O4%_ZbC8Am=Dc1y`Of7;-Q5Jn4abUDQq9pPf4>m* zbfiBy-%hpo{&5Q`Up=1Z0hpj3wenAkc<`cQQ`2YPnlcnx%N1HYRKpaGdeQ4HAT9P+ zsq1U9*YSX|!@w5~@-2VakCE!l$T=<^AQcJMWUkvY5#y07{#qMV|LYL3iEbr`kW<&l znnHDEDrlSxM-T`pnkQ8fxRWJ-QNN%cWeoZtiERnGz3$k$g56ysJkU22H76x%1!hP~3nt=2cXp;2?3)gy*0d$1w(>nozv@~@vlM{tusPu0## zg`r?f&nGr(>UqTK{@!YXD-|)-)%V~2b&bW%xa3sJPW;DKJ8BY#bbbo~LQgu|&wGF9 z6e^qh(JEfx$!d-L4f$;@HQ3K`$fi)Oj_abBc$eR&a#GBN`$+zaGe9uVBByu1f2V3P zUmZ0Bj*Ud~X|ec8lVGWcUKa=~P2u)#O}%{O;2Uu*pT?bCwS!9AI1>p8?T-WB;N1q9 z6%+R6tcSj7W#z@E>f@ReH>LFV_r&jk2u_x;5`QfKEsaYp=-KYycA;U0wngnJu9uEb24ZyRO1bgAp~>Y4PN$ z!PHTL@v9+~atDt+fp1ZvZI0~(c579#uomZNm>)0=YyNPR+w-*(X)jr}nXG4bR_O41 z8X~@lNTX4C_0631IPhTm50?w?lKw>S{tS$#&IO0Ku5fEPyQPC%lDG=o_SzkAC-1Dt z-9-ZYHr)7vaP3ebvE__I!!zE zoQ!WQ3ZPpbUDs&9X>}2gu8~>Nij*A3`WGo*;HmT}6PPkiZv8b^zET|Pr?rD-mBaw; zH8SE;B=)I0`t~1ts7h0WX7G5DdHLY8Nq^%&jmqomO8wsh*EfxCU+6B%7$b*&4agW@ zCOA8TPo8>i_6G*aF|6&d=7iADM0lt~mJgKuv z>oE2koe|M81p@Qw2wIt)UY#u0Z@}I1h$uST*)heb+#&0Gn>^Q%UZ{c43xZPaYkIE( z#ww)R@}32&iA3{^JpZ*S>3%$$N2g7v&Q~v$%4NcXoZq-EPZfO@_U;dW&P?WZ7!GCV z{<yH58Fv)vAsQn9C9AP3*v3`A{cM%XUCs zQFXrO!yU$}Ti*b7HZYlq6oFPH<_RaqxX$&|AZBxruYKoD# zFv1mXTgKaB_j$c-TcZOnxZJsX&q))x4v5VZH|xy*3SYaM>AA|=~Q{y4}4GngkIcx?dG8*gz7g=;S^DE;WGgsq%f3RTcG%&mQyvQ_j z4^*^rU2GvLd0a z?<;#=uSU7DM6_!Rsw=KZ+`_jGCGk9;7VBocPHGd96&)s!_H{MjFg^cr&YEy6h`qvlFz)MlA;1yD26+56B>ZLfh{~+eW ziO$Ct9gOgPYT4|Mt71GHe2rT>8i? zg<){Q;WHcOIcE9xGw%WAG*1TFF#@><`KUR2pUnmJiv+fQQ-R{9S`e@_aiagX8;PBr zI&3UV#|>Vx+uI^`VyO=t`ii_uo8YJ-K8qY;`d$hn(!m`O1*ly9SqY@&`fZIyP@^B( zC9UN)fb8Xas|wmWp9j}0ywKdh*ERTCO9?Mb$M)CmAvMHlnbL%@qH$c01pMRo z4Sp1~kQX)KiHngD*7gF- zk5?d6!yV#T{^7rbBicRGb(f|gLK#v6p>PWa6Zvi%RUNWvDE@=_d8sVc_xnD~pGoG0 ztoVt7cQc*vOK)h%N#rh70&G4Q+^3$u{)4s6kN2nADi3cN{de;>@dbF$S!eHUBFAs8 z;P)v4{NLvkm0iw#I@y$g<{m{0w3z zDU>!Mq)1EUUpCpNht1FUSJjIx!#cnPp=rf0Oz z*F4B98~|Fh1($RfhH;N`aP%^j-VAGBUE z{6*DEu=Ekv>wh@US8?ABj#g7~E}uxJAqXlGua~@3+(_^a^+b<#d`x7CM_= zeav<$gvir&d~i~`PHN}00E{}b_wv#Sg}t}viZ_n6&3 zY^pU`yl-a!GxB!`=S*$e+|$a5zb-Q_eGFZlezn zmaFfat8e}^mf4(LH5acbWDKEU)DkB(PnDtzL~T3 zwoI5xvBjNBgje3-3Ra3-En{uc^mb-+PM|rL!6{H=30axrm)!P+p=Ua_{rUIP}w=b5~q53yx>M60Ss$4LfOJaBN7$tEI9)Sui>=|ANX>vnsBKKHK{({9;Am z?K=){s|HbyL!+%21s1#1spoY1qB@|$*jC1A)jhxdETLC+4Uiv-9aLP>H|d^X!B01c z__nPG6}fXckgp>Bi5Zsj(r}t5dK1$Oa#Bv-6;h1HR)K%d*KwgBU$@bGQca%ryn`wqBV+(<={JMLful-mWBpt8N%MkH2_( zo+Y18LHAs746h&BhT3H38gRc=F5~8Cn|RmE%0o_@K9Cqt?V+p^k&V=yZ~cd=oZ;;A zy|3Amr*Q~PvW#JV$vSLtD8!!H2LQFn2iU`8Y!+QpW>|Y6?`#7tZrSb?v>RVWQ=wW< z*=fo~%oQ)RewQsV8TGT_@UP2%a3Q9xQ@#dc?SjWg&-ur5S~H&-+mtRIA&4$wBt!>m zM>5B5B)W`5)9Yg{gBd|4d-F}5xRCwS)FZZlZYSEVP92RCNE=N}a;#BY9 zI$(cElef}pcuU*y3g5XFcXdL$mYoohiR`e?NmHHh;xAkq7~j&1PYWnN-wc2RbIk8h z0-8HUyx|rHE(A>W$i23$SB&lPUVn|b2bx0)#dIhTlXNiMQvgAR`m-P)?S2aqlhC^Y zb(h^5l=ws`GLP2)aXI)&vJxDO%8P5PQFcFH_Fk=J`#87Cv{iBaImLwRJxeS=TivaL zEFIi;r9Ko_#=v4wv?JI?JFZ2MmVdA!wIPr<4D?Ajsq>kGDWAC+10Oh*2RFQF^MsXu zCa+p2PP6%-K{S80EM-TA_e-!$n!E{kB`0k{6%-|6-tV^YYhlnqZMt`JZyn(oK>hI( z)a;HX7ezEJC&WO3l0GX9Jp&8RP>~p46rT-K#Hmouv$8Uhvp*0HoTYZam(|c#qSfN2 zkLH^+eKVJniHJ5K=&o8IAhw9N zu9pDgD?V%KJi4_9pa5{N;!@lUrPT?>WH_aMys{xE5^-J~i4E^yS=}n7R54pYv+%u1 zBiG0dcy~}Cs8+tl7wkGZPcuXO`$`pf%hx}VY%{}LIeXuUKYG+p))WMn(bqQZ|AFB> zEE6)Px*g8nTnaVtn=qY)QErfJxqGgL919j#Ys+f{?kCl-haOCtqUmBZ1Kiyc( zOGD(ip(}^}HPQ|D-AC)`r|NJ()f`3tWk51&3Y$}CJl|$=Va|#Hv9YU%{^gcKKYNU@ zsq&KNH*wHtEb*q{B}eLVgr-J8Y3@LY^xIKVc z8BI54K_K_YYSv4y<1jK|qCO|hP3#*XSdI*R_wU6PDjKM3iBoKCBb@5BQQ3AqSiYUb zO;zm~^ExtA5^ESyn(NA$wyWz4=;}pxyJuJf144L<^UqmC+q?&TwrHwjtPdVkVXFKv z4zBGZi2~m;9pg(9u`CRD&*|N8E0kf=u^N&zVOpB-71|55lB06laY%Gl8adHz;s46- ztjSRZlOK2ZjG}>!%mD#`7AmA>Yb*6tvLeT?hiLhr z@3Jv04b=)d5FUyN zECo|lSh>UNJM3lLC_8N&%Zi_!nE{w6+tnD?y;Tiq$X)hyzDLA$&7|hAo#{HosbCWa-+>mS@iry#G^lgbLVQiZhOs-B_3i6yMg4L5*yuPW>Hnw}r+D8sbQjfH6hT^Y}pjpACW^(pK{ zZaC3s9LztE&g-`Dp?&RAH}&HcL{!T2lP4aH zGyW7ee60S|a*I5{IR2o7iQKFWoAW%2T#D7}9O% zY;$^IaxB!}U-BD}Q|9MQh!%LS5WGCTLR`=I`0a|cU#$B3uHe>erGEBCM44uWVxXrwzc=VNcnfrr#&nP4!3ditZ>y{NS7o+(2dm@pFXFljubt@TqN(?ov$b1h#@bmQNj&HF+(oi&b!xR>95pE%qKIBaiO zyukKz0Ls99Em8NwT@ArF$Y4r*al%d?xZ}pV3He3 za%S&7U?>pg!_!NxLM-8ZU@q?9j}Mj=nzeVL;<;XU*E`a8Xo>pMW}RDnEPaHX`uM3~ zwUmP3617(I+$GK)WtUK?RL2IMX;9;Ur_Pv;6Y1yQ!V5ofSWR*_S+@HF*els{cB7K8 zXLJUagr3iYtWw&abZ5nj8UNQjkj(Nu`XH^yTJP!hw2l>8%NZr=Ic(SN??u@RGlThk zmJr`6VK3;r(XjTt^acA!5hka^Nh0Hs7!pT;0V!SXtq-j%5q6mP&)>4I zs~_W|EnLGbr1`2fLn!}B0b^dcbkcj)$p-J~@e|C-b}n*f58P&lbfIIT`s@w1zxE_? z1SD9gUWv!p*MuzH+Hl|9a#fGdJ-0IMD5Z_4tqq@pXJECZzKN4_l(hW+EP%Y#dYlZl z82WyY%hobZ_NlMfy4t4z7-{^lVs)FT4+6yg!Nae zJ*xRnGKtP&KJ>7nW&}C_uuh65Pn_}{ve(ecGgO_Y%m&A`_!4#J~Eiq+w z!mnQ!;pJ*n%qn;^L$@&&U48eN!C46T@OjOZh2thvHemnHGv_7&~>H2G*cDNmG@d0J8+>Vq}TzLT8mTupZOK7Xr(G+RviM6Uw{bUUK^i?g8O<+u= zCp$DZAJ@m}mMoI$+Mv#h&8N+hvI}(gyEaHX1KD-|B0b!>+>7<3zz=@KUuHfx0MGwk zgBEyH^V1+WxY^-^X zc2xL+&UEv)AcqAGs}Kg+;a7(@1|4$zWk;G=yA0zi#FhM>p6ztSYigXwSjs? zXwppn#Luy=`6Qx&X(nc2@h+byjU)1ynb1qdh|!Nlb`Zj z0|ys--^8*1v#8_0Xad?*2**Mc0s5maJ9y``LuFAuZh7V6SZ1}>9iA+^LGbs(@U952 z7gZX~b>;m|4e&1{`0d5tAJr?1IB|m;xqsWHlbW_e*L{_)cxe|FD=EkT{22hlw_{(| zglEbR1rEba(IgfCb=afX`Tyt({#UI(>;|&T*#kX!r7_oi94qLHxjuWklXKVPi~DB1 z=>41@&r8>Q>==5#sV6mNg9ni#0_vse>g+0 zOP`nToI~`~{7@Dpm#_wYKp1P~EtmCt^Y)!*D8EL>*2d_f-0|Q5M0$RQ79VC1UGvk7 zM&Ql2{V1s&1vpGQWv5On_r}d7v;uZ==Y+f8L^8=O%%z9(adIVDw}V?>yprSfVGUmBaI?zIzEM81HK?f|OZ#gHEL4prPTv;J{-Aj2#})Qe_M67k zKKev@Tyg8wNAov=lA#dKseR|Y&*dr7F{ed8Rnee9wjNH#jky1mKjr>atc3QKp=m%z zMsi^5L>9NegVw%@0Z-c|{{=pLV-_79#hM)^O+jbw%)Ry3bR40XG$(sK%oPl7zoLev z{UO>}){@$Q+kK}7Vs!tKN+bWob|)Vn5{V?(w9WLlzk32SBv{Oq|AUbq;$7?L0pz1i;18$R5{9?4MBAu?GnqEJ-dB z+iklc0|##$pr@>8c_8%6qeO4;|KaMa|C)Nk|1Tn;NT?{yMBbzdA|R4ek&qA(1<4Ug z2+~N7NeR-SfYbz(E8l)x?qeqV(V+>fHFQ0Gx^8E|Wd7S$^?)$!8*Xw$|$XSI~ zUaMK-2m78}HyQ*B(!XhrSTiWyRg)+62y)z+xEp#${f74jxe)8Qq`}N&@;eAos6Yl6w<+H?pEtLSGfW&b0)solNEZ)470ChdJMONXRkq zEzm!FTK71SE3d%rtT0J|)#KACnBW%2Qw3|Ig(kHi?LlPfF%c+8qAV6}m1cnP>;5TuZ|t=XLqT^ zMjN5-A1|$m>6vp!geRv+t6y~#3TjBbp(^lMPNCpoh_B?`?V;7_D?;Kv9q+nSHTN+`jfMn|7FrU4pTEB#0U>V z`gs)m_$K6aq@F3JA_FFSHIp$$`&Z)8@FnM=usWwx};AY-grn@zew6kM+*O`Blr$)aBS~;@du%i%&1e7{cZjD zSW#PMo(d-OqBrlrl#I~EJR@Q7k){U^E2D83{3_hoDb0#0MmEPp8DvHpk}`heTJ$+$ ziS`W7&pqaP_SF9#H<@-`x-KPoU-^Cp=clf!l=O~)RQdCqh?FE4nvq{UqDb$VsT5%> zFQoXhGuN9%Kd#yz(%VvZ9dB?Jvuuf-EdOnB z>=Na!%NVP6=$)nue@!-}6q9V8+br8DVKEIEyK zlxO-$f^TSYm^yk}_qHQh#2-J(H{<05I^%PW6zVv?yRHlue&>{7G3R%580>fCkTm>m z{Ev1c>FeVikWa`ABU{})UOnL=~(e{}8Nj zz>{wqM{qjfWorHQ(m#6&I*}bL_@BV3sV94i-R+{ zk`@0{4rA!h9fiGl#WTy_4>|&|K^)i`_dr+j_zO$n!flw%e?9+Wk@pRrFUMS~l2C(? zrBH3OV#vW0OQU<9{<|?5!t)|y@ho@n;r{+V$A1IeD7q{?nh=sA=JzAwtf54Kb$+im zkl-Ea-C#K7b9o>2RQ|h_fH~K}1rJ%kbTMchnDOiVC|ev_%X(!gXZtGY$VBmQ-Mg)IB&S)Q$QS#5 zxAfy1@<3~@bpGl~OOc+E19;S%x+2hb!)N_*+d&bRyu{?SWVjvIBPq*29aj5;9zOmC z!Cb8M;-YUc)Gl})cW!kYcs4mIw3%OA-`N}Y_@bx2IG`(QoAFd$=hj_meve(`L1{v) zT#hjGtwIC!<^!7K1!MUR)#90IT6wiMO;Z?7xT!%tVnWiz%3*I!jVg#5nR)fxi4dW`0?xeoMjZA3%h~5j z$fIi^JSVmc?B^H43Y93uUhKsed=2m6|M}ZJjf{BSsm@+ZvC`wZ`1Jq}$sHQqQIWr2 zHkEL$re5xXM%1f5*u4?s)y`9?UBP?~b$WC+LMLTI5wW0q2)wI}tL;|(`0@_?%wG8# zp*WntXH?aOLcZ;agYm2ly?VPUQ}^33eH0gT`uvL?>bbPwgoeEH2yG3;Inu?;#FD_7 zpY%@NcLbS(+`79slU6`qgx1pg+`gW*?3f73EhbYQAC%V&Ffb;>{*6YT;>;a~E?E84 zJ6tSfJA2o3D1JD(jbHBd^8Z!aKeJ#Z5D&IfoNXjsqkO%1rio#4jbG!edsI6;z5 zW_jrA8Hu4g^OZv`h51AM4KNCr%xO#b!1i9n8+gNw&83fFY{jDgOX^>zmxPTKj_sg% zKBZ#$cgnu{Eogb=(OuKdvODj}F~9xKDpss7_?jrbx^t;%7SI~J5I8=&87RqxuQ4@x z)LZexD)Doat((f0-KC2u2b0+dBcm3zQ~f&_3ArRn zgjd;AOmy|Cp3)*tjh)D90_~R3jTw4Z#M6-kCV%z5Pu17nE?^9o5)bj$bFH39@OEHzL5B0*+{DSF!7KE-1c^%7h}Wljye0}aBPZ}nctw*` z4vV!!-BsIlZUeeBbAkZrTeC44Vrg38x-*&BUdY;lpY@?N(nv2qoKass)sbcM=7qFHIVU$ck{3{l_vQyfj)k;Fu_Nws^>gs~e_VrbLNand2WU zXf4gi>I$kjcaqSpI6^fA1Z$^s+xd62|fiJ$o? zX$;5J&)n8&J#Q#2Mj{9kA2gB;4=V=hA(6^)TMpM-V+E`dgG?IhRm#WxM$8{Qkh%}Y zNPqLZY%#wL(!FcbQ=KslLY+?NV{Z;q0ie?$||X*^tp)f7;KCd7J$LPQB4r?*PfkG^gDPT%_;{vT1}8&;4gNBd#s zdes9j?f0&2FFwPs_q{EsqGV3_-PCqkkmHBmU2%WU2l^-#?yq4{%oENPYmxUg_elQq%^2}N zL$(IhkSMJVuHJxO%Si?wpY$D$OOIa2ij0YfO=%$-^U2(7R%1w)zQb~D-mR(lqnf@F zA0+f?j$eTf*N*9#&@<=ncnQWos;IW6^32>~DfQd7`~l^{y8~n3Po_G>uO3@!i8oTn zr{ER61=7*+9K8X&Z_P9v zat5>sc-2sEDp&S*7UEz&&kply*PpP|=*Zzfvpau+)hR-|(hKM&@|l6Jb>YB+?9_I7 zjom`nJg(2v)@49d_{D##;eK*gY+TMo{FNWI>C~&FUjGs!R$4B|?4Y!{tk;twaVCJ{ z9E4-@=fH5I|La(JQ$@;+iYm{5R8HEnEBuT$w~iu@&(|t(zDR8_?2BWr7^0G&qHR0J@kXIfY>>DEicH zHhn~ZgL?FDQPgG}W^YU3JM668k9#WH1o`$22LpV|>YCcZbWvb{bEc)I% zz;@c$o$11B6>~0?{AzdZ-`_eCdcVO`e&TD)yjTFJL|8l zF_!&taU|4u(((C)8u~qV*5{#{7`{`!MLPcH>f9nX93^6|X#`kmOb8AhfQ)#6XfQ7y zhA`~WsE&(5HMbq65wn>weRyNpz@uFrYM(4ZouUa3q;%jy>gbiDEY=xp+XG|G8>nh5AS^xYd|f9nsh2f3tre`@x41i*WNmv`wlq&M%Z84CnVTTbGk-iwAxi z3$4Y~DFw6(rkWRf+R5BsD}r$hudQ%S`AK>NU}o{KjgA@|(@ngy@N7&TrX?>T^(n*K z>hhF%K<{?#1^=hYySFM#-oUaJ3Z3RcH$6^wWi<3p-<-yl!#X%E3%8U6JDjL)gAQlV zHkz?<3kTN2^Dwm^fO5YaKTyTxZ0^{R>6qEh-G+M4Q2opH%K&AptQ6_mr9x=D_iL}$ zgGD?GofG_GKEUB8w1dZ5-XV`#mVJy3Z#u$lZGtD>%&Cm(PyQu~uF}oFRV2yM}Ct?N@Di!BwYO;L{+da1^WW3e$I~RAmLXu#2 zpI*?PH;__;e_+1*Yd(ANDZui2Yf+524Rq!ak>bC%!+gumd6 z+lOjTe_=od9Fn+^PgC3LM?98$0iv}QdAQcu!&FZHBLe_H;^VMam@HzAwgW{q=kACpHUl)f?S1CzQ~NjR}8CzPft=Rq`H+x1F$a}=p6~fc*>E1A$k{f z?0t97&$EtmN+5tnch0zSaN~CP6tg(Uw?>6UfT7>p#`*qeW+qRajn^G=jqF1-|J9FqPPpMQK8yWBWA z@%`)WrrR&W7`x6_!ACx)`&5oBHD(q|-#Rt2A9fMz`i<;D#xSQScv-4Q=o=CGM&Z4d zCqsc}gyrgJJemi0REkh^yZ)wkbXVZ_&ljzW=Nxd$a!&7qVw3!aq!Wd2o&3A*0{K+2 zbzzGBSmIg$`7R{bEx|eBp{-~7pR89S(ycOV}mYE%kD>!-hk`jkFT#D ze+{8QC@H6e*f?-V@Nm+8^t2Eo_y~;o&{vLR2o}nyTx<=>3Nabvv8qEi$p+{h3YMiP z+&>->67UmoR`2NBRDh8;CtlQbCvA+!3v?|C9|v90@LHNw{dZ-$zCsueQy*aHd!Q^^ z{5M1XbuZ+gD_H>&eYv0MorjL3r_`r&k}FKIr^f^q&;e}%Pv{k#K^Op}^XiBIr|qIK zbQGBC4FIiie@}wO!6_@0{%53;-eT98jDxj1MgQKFH7*Q)uvmF}Zw&e8m%I7qU7mm! z!OC7f5=trMN-5;r$Q>?Sty`Wro0RI}7~Cm4b}9YQ1C!ggh&rOys8-K$|A@s6(>(4j za9BXT?)_%Zg{0PBq~&4ri$zgcBEEZ>Pl9ub8hCh;`IC!i;R*wDvNiTA5;3Z4V;f74 zwThL@CXDW7B-ig>B0pTsc5I#t)$KJIrNk#5Z2wTt2drMuqSxF3;!h*$dx{o>I_3eP zVzy8nL0~$|`sNo!uXVH&GrUe3zUp?z+QX{lzUb(nk*A3I8X92yUdNt*qIp`43o;#q8qQ(7^NC@`cmtr|f4w zA%}(zYeF&Z(!9v&_y0gJP!N!dQyDRziD>!Y#r)#;Cs3+hG?7k| z)8O+tW1GQ5tS0M>HQ8(K&nT=(mGf71s=9)|)!W4BPjQS|tLFKHiWs>nx#%Ne3v-ig zxnj%scGHnRS@!x;C~!IGD(g>9){Jvcd+U<`sY*}=2xAf}^D1OpLTRlmYJe)Bi_lFM z`NbEV6mn_T(^SF{$#wiWPXD#SlTVX%zEd5q9Z8a!91ySP?sIW3V<)uUr=mK3U+pq| z>A!Th^3n0i-^Z3Uh*zZK7u2*x&{4gnt0 zA)F9?4!EiTA<;bAXapp%(%?a!LC@LX)k&&)Go~}ufR5I#IgRoizFZNW-*X7}m%)iE z_Z0(IOzLyk`qYTy&hr6Y*j9zTpju-QCyh40cX?2)=E_45@kEAQvAB9n6HRRLTrM;s z961}lGG5@iLwKLZtIL*l^mS{d`LI$x2rr>VIpTv;T4x7C7YObk+P`Sp52GsB;z|qs zPqsU3iIe_!S7PBiI1zh)`^}}iUMxM4S*A3{D+mFK{$vG@8_W%88t$u*z&$C0(}cDhvb>N zW_W@(q&#@Ksx}ud;E>ofS$8Zm+yAmdlDI>FO|m{$#z~eq&o!CdOZhO=8a9d5 zJ*FyYDE66??UNc2robfQhs0l{ZvlhnhSG=+-V>P0WzrgGZVG&tg99GO8n2eNb;o-; zqmx|1)46>ZHoLKkhnJlcr*7|fNQZ9=M!F!ifjjRpNK>HL&IQlZogS434Rtr*eK)i` z=627w)8m@V<;#Q`4Epgz4~5k53MBcsc_u`clQ69K*R8v}D37!6rJo_zs^ZIm`0j&- zO{1lM#e?r#I3%YucjA8&b>K07NWDU7@B{K3@PfhT1 z6+&6%Si($~rKOOe*;w1Xig;L7gxBnLqk_+Wl;0bqEHo0gU_02kzOi)=S*6jV&wnI{ z!=bQ!qY&&M=)?hoKVXC%UV)%7dChOW60puNbJz%;=XA6)gw!K@WRiQddhlxu-P6Lc z+zQ}~5Xe=j5k0700NCu=YBbTLDkfBR&vLb(@46~1|mp>F@oF?qB-c3Qqy z?&0IL-QC9+fKel|orkE>xU9{vorsjhM zlKxcY^%E=KveCDKitdF)a9z2S5KGaSozb-XH-9SQUoE_S-eluw^E;8&{g+s$J^TFu z`PU;~i*s0qy$^2#3b7)x9(Sk0JhR3!=S_}cTZ(8(?&0_`Z+lJ8x0zmI#b+T~MCZzc^DABGnv0A{sTM^BZ z-u?8lU7sOK-SSNDsfXR8s)9*N88|=`LqA|7hvQj*6aSDdy6j5r;${+nfdr7)fFAH9 zxQEJS6P)1=6hjr2DEau0ZXvuE$L6f~DW={*9?=X`!Ex^T+C?{8yAi|+ldrVLYXMzH zj`z1MgLAcU1@}UdaC&nW^c@)|KJk=ub`-A9bo|RZkf_aDsS-wEt5w}Oyl{1Ed^U%p z0Hc&$B=%<8Qx}jMpE&xO_ngU(6Xn0R$jtizO>$8#j`#Sn2^0l>^$WEQ^O+~lhn5`9 zOkU5<66Chhtbtx?R_Z&4qAx`p?g1wj1H&G&y9wl`Nl1Tp@UQDrA?gX3&o@;+lv}+y z&SQVxnDa#VLIIdstU)?(0>*Gq5)HPh%18z_=man=P8LirgpYyaiKGk=8k|Vvgzd1& zLP%n`GPM!;Yjy98Rt^Q~WG%p+fO=`QHtm&2RLpJ)-yCgMJVky1zo+E%i7y zx_Q6EgSvwQhGh}dAHX`ije6R+ndaTmc%>rHXgR?g+cfgsGPmtzly}?0HpE_!6S)L6 z8rG2dQeiv!azXKm*$^d(HSo~!moQ%Bl^B5w_CstT?~TrYzUPMBjgVh|twNs%<#Rf& zf)G|5%!bg$nRC%~6sBtj#o>jMjg)F!;zM@RF7iJr8JuiUEs}uW!zXiR$bGcUYC99a zGs2u(HuCY(EwLbmwJa|Aw)fk*ZJN135-h=H?}!02Ea7LxFmF?MM?_RI*9T_sCwJ{{ zyBstFAJ6#`4K;j4gZ+?^v#_+#_V-DNppZjv;vr`TC;cbxA5SWVau5xyBBzY>Z(}^( zQ`f5LTWqlFKoWhA=MYi~IiO0wsByAz8XxVyHdpY7Mh<<}U#CG#6MALo9sJLr^MY<* z9c0PBSm}tEy|K}qdyJeN=~sLo2R(5yc~~|-HtSj$7SN~;jYfpw#xTHWh7k;-13_Sq zp*w4*`%V`5@EEqUrb*8G2IE6xNUm@gNgp5@wNcvu0-Fo;-LL7L4-KXH{JC@?#KKKO z+(txbXsz(c19$uNz|X>&(P4|nZod?c{Fc_F`a-tjStD#7G>xQO8_aZ_VYSJpP_gol z@>w)rv-MmR751nMxg>XAFoXIvZ(2mP}5F-KQL) zLxCOGL9k8e35GU~Uc;S$UjjX5RPzfEUHTt@@FN38K1LoMi~*~IWWs02eiDhkWd_rt z{EIeI|7{4mLEHGzbUg9T$tipG(*5j$?KY#C)q|W>KDl4M@{@5-e79$cm24`Gr5muU zQYf2hGdf8)9!E!?2E(-;V19jew!@S)bq3Wk(T;n>%sJN80IUm#GDB^k_) z@T7KuD5)j?9vu4#o9NF4k4Y>vQb-te0>{GIpf$U0`O57Vm4o%U%9CPoi1fwgwRrsF zAdd0=eYFqLLj=#QOXYyn*qy8!$8K#OGUpU$itm)m=&I$>2Zd;@0D<}#KBJl36eufn zy?c-TL?>!T#dgV+IC*4vot3Cb4g)QD*cmSQBu8%Fuytri$G#Ym_-$bLQ|fbX;8?s> zSCHd_>O zCWD~s#M+e?>vK)aZ)pdrx|i+UZ61XN zt*Ig`Wod>@IJyv~NcI}$=s=40PEtGc)jE1b;Mje*x{rzlre)ix9x%KUgpNj@$g5mI zbbbCic}pKIKYxHcpd-h+m`40af?mQ7ESgIcW$=X^?4(Lm z#8W9QnW;Uc;o*fQR2zHa14YfhbWAXyIZRwv-z;I>A=I$k3=8%_euUM?AdgCE@Kg#3 z;#)b}GX~4{k)ExV;sEc9f)9CwVF?XU^&Oejx+ZGSTkh6&|9zKK>ztQ5$m*+IN^08H zY$Xm}xqmT8JsYtx40`(%y@otB$QNl$NKb zr9s|a$^nx3&-R39pRF7#hv9mXQ!uAq==&?_9?nXgc{iVevSg*9Lc}M(9Md_E6axy_ zU&_B(Z5q$Htl&X3dB2Mm@-+^s9=*_hCgW?>fj|Go()yXFz?j$Lb(BFV`sIPdZ9opCn?ADrJEq*omqQ{R9cj zLP7yf7}nztz#qy>XTEi^+)2!bMIn+;6D`AJ3&8nEL!uHLd-~bx0X)_QvH!K23_T0W z1LXI*W};0U;Pu;h(JiMWi8}-Wbv-}Q>h#)xRd+G$Og@6v5}XbB{x%_c3Dr9uhcp3M zojSM#*B#25UxE>1Jr>HVQ)-X@4eYs4z*Dyu=(le!BVZnoZ}@v0{%fq2cSeU5FDD>h zByagqO;)92vz`JLB+gOF%Z+%JV32RwSSxltu3fIXqoqgTpLR!L5`SrK0UV-eoEhDf zz}zOF&iqeOpyOk2L*M7F#e}N$NNb^3e`?>~F_@_|-=*PYXO7EI=(h}Q2?26{`S^Hd zp6lolMq_bFg;ouS;zCv}{K*nFiFxHx)s}98KuZ|s5Q`1M8X!`2fFTbU$pKg>wxR!p zQ{O={;0=CHLVU=UuUSr%U&72_*GG5~K(s+Po`)Um-yN$`J>!vguqW(PtNA2Qz!l49tH#{zoOx1 z!=Qrb&lB#I*Q#aLmht?%bcP{q`b{j*Qj zcLflPR8tIwwhce_Fs2#O@=s);QUIE${R;IFE&w*7eNS~-?j?Vnj9Agx94@Rk%F`Q> zIM8r%bhXnEU`=;hKnIGOU9A|4?pSNc&m9DNK18vOjo)>Ck|FQ6_WC8GDO&6Al>KQl zASFtMlJq;HdH89N_70*mP`O4p#_7wYiMN~<4TWDo-cEbpS<8s~$X*Y#9_gbp7;w!r z`!`4_C){2RDWLAt&9ZgS&n`3Hw0fHiRT$>OX*5cK|9pU`RE3WU}YRTn|yc}%v z*S^wKkM)D^Re`Y|eIE0fa_@V519k+W-tS9RyL001l15}ip~A7pWDt{TFxP+fy8+@{ z3U%auY5;T|oO4uGRJH~b$aTcgPa`L&=&=7O3ekh7AFYjkiN@EiRE=p?-Df+sT_TTR zP#d5auM7nC8qvEmd=M(uc?xvbaUQJdPczOqj3|}{2a>aIpkFTpFCf*_LPL*?vp{u{f^#(Kyb z8pi5M63=B0c$zP66WBG+v4B{+*UIP>6_I{^2C75Zy>8EGqC@)hBpq zIVSk;&|CP!{ACI_E0?+5JV4{1i!cm`+&#!i$K!%XP_knWZVZW}pQo7*C{@D(RrjM% zA!}=(jbaSF8$&KYIENgqfi|n>=yzePp_W?=m$QAKtFmtJFUtXYmuLSy+|CuhENm?^ zw$AYHZTQGy)baPUz6|#GI1OOmfcUy|Ut6*AHL8BLIDA`vgTeXU(^W_vMeW4$ zm6A6oIlZL4=aKQC-yIvniq@;1_m$;NkQVAibkb{U=9eiSjoPAIgF>bb*Gex!A5?7- zTj+f(nJ%Sk{C!CIOJ^b$Ix9U;i`5Ceo+HHH!a4sN#G))eu-Jz`cBvO^@4$@o8~3-`_~MKp*c6{zzn zv|lQR9gcbjxJTcdvQ@+JaCsOkl_AiPD01@r4O&FzvvX-h_CFyH_FJ%`;k9D@Ww+({ zJdJx=I&3+~LeXNNtP^!M`HSl>lJ-uVTSbF5#TupjO*A%2$L`dbI=bnP9;rX`(3=#v zeX@mF>=5?V5PGeY=A`grvTQ)(OJIjsUux;3yG$? z{0=?ipZ)vUm4j~RDPa^Im#rc2!1^XexUV^3t0SXB?_qJF>0>ymIw}0X1!_kULT-zamtbTWw>COJW+B2UP1PGM`!M{P6OsHC zcECom&p-V+sUVG^%~xhK zDHwBNS{tx}i}srg4oewU{_V-Vt`jJ?-E&c3;&I~!?;tg|9Ge91v0hlHeRMWs^ypFK z<*JmNlTo)QIT$^fZV8njKlwO7l&jpFtxoLG*;Er8Z=-EBGv_AAiN16%QV}Z|42id~ zZs_nUJoxlr9)#YMia#b&W&z?HrN+roaB`Li3B zN!ctFv)#ykyREzOFqvMsbN84a%n;%tHl%Ep{L=g7hDfWQiqechixXrP*kJvt&ISx5 zZ0=-SLkA@bBi1az8}R=qr8tr^!h|vgA|{a*QD)TKIdZlwRny|gi-&$Kw+;uP&Y97M z6XBKhz7Z;jDFk`8$o50rztM_?zr%G$bH(-r+%)2k z*h$mgu;?(gjzz)A?~3c86G*R1Y%f3cfd|to;v@F^FuhvqtPHU>VhcI&(Q2;@pym&c zjE<~HWv$08lZUG$dVTAeUTPrXictD-m7Ia5%aNeo;)}%biCG7S92uFS=lTCW@`2)( ztll5A73t;)uNi`SuP4Gv)nZ?*Hg}cy>_*5@{!WpRLamyAYt^_0rL#7T6tRH;(d|?x zrwWLn(xd;-+ixc=T|^^-3%{NOA4day`5YOoJ6*(?Gs&L;K}QY_3oVvR!<^N-aw06=FE{c99TlU)N z3dNyd>lrqo#Ef}gP}D9=X?WHj_vf&A7Ts_>9f7W!H+eU+a!5zfu(m4&306L9UAqDqwY*7Z zQEIMi9q5{HjBXFUD?Q#wm2N)X31FCZrIxQlje2so-aqcer9?~o3}B3ay_k>E45o?& z9^E)|Kv&J6%kotYbTq1#l?s?8S8KLCn*4n@*(Z?j*1m{QF~X;X-Luq|j*+P6x}KWi zJzV8_!;lr*`c{}8z3w*hJZmQMCZpyx>f=Ap<*RsjYSf~2S+iw9{fQ@6;AqVujyp`w zwWHICthG*06RQIowc<^=xFWG?7tgfuIT8K(LN^o@y4+}g`>O#6MzZu`@pk&30 z?B2*hmeHH`=)2`Mx~G_YC!L)iZz*sYgN~9a;^zf4KFKJ5TH=fe;U+nfYEI)#L(zxK zDyC_j`Sm92f^YgY7i-pgRRybwXMZWLw=EE+vyS%zHZ;1M_NqsK%#MtfSvlqL;!8x_ zHJjb(U-iK*$UP(ZRI4A2BS~|+DbQRN6Q=1jVY30e$2}}TZH-#O)Wu|2^}_GX@eSYc z8HAQeM*xG+Swcl#=NR~Gj+SCrWBA!@e!cWHy(T@qe!}-}2X*Cm_S<&%qRp>eG2Tgl zJ(kQu#P96-erC<;($H}IijP_P^Z8xhy*(_v8v$6F-z{nUn8u)?Uj|eJrw75#xh>-sqHW+m~$O2~IUeS_8@^WP6W zL!R9+e5SAu{)3m6n zNaMiR>I~bGjtku`;BW zIo2IgINYk}$k3%V7hqN-{?#=vhh|cM);(vgZaR5hoc`&}Zoz1?qL)3>M9{gy=`p|u zg&V$OCS4q=&=U=}+C*B6zmOk}KMJPf&z?tI|0|s}bl#`v>#R=^9z$9zn@Fh*3V-dF zsX3F`J5cOM?J_D5-o5<}M~{%J8%BmCpG^yhcYeN$y7zHvLbcDNYO3bW`r*U9g^Ltv zE77YZ=7X;)g@!uWS6m)%xmYm?rmR4IEJj4d^NFJE`m+qh)Vz{TshGmF<@nwVz1tqO zhi;Y0GX8R3KE21`9phv8b}=_BzBwQUX;Vx)a{)iWTUzYjgR_HfNX>cQNoQ4obCrxJ z)=qj7=iAkJ=JI;}cL41gvh(VD6>Ju>Z|rKFIFBd(7R1AIIeqmRbM7p4@wQTS2-zPqB>MxG`Auo zOlm3(TFjrvTg0_>uxLwdXk_0q7$=>TH``Y}ZUxd**Nlt*Q!jf)aF%moFb|(-q!{ss z+m|F2-H^h)^5{FWipZ)>FaN%N-6})kJVHjNk8XE)itdCMj+f{x4pn(*`*_Ek|9lkN zk($rzra8Z&Ii@`uRp+q1YZIlq;0+8>&1_U(!D;WU!M8OLO8bGa`$KnRne271%2H{` ztnI9#pqO|4gqu73{8{!b&65|D8rb>tL`_4TIoV!D6y%BXVRdf*5{)5VK(0@^zhQrv z7D)UQDN7u|>{qh>_4{`SJW1eTjo~E3YYuHK#~~M^G6y--s(~qZL<_6wkH-fG`yQ40 zHJ9ZPRSk*mN8tJ&p)a=*^newBd8Cz!=Zw%D&eqi*l0WO4+-z_C?N;wchx2K!Pz)Cy zPB!I)N;(?DD*bu+Vy%P|X^;KP1d6HgN`30pdx@Tu&k6yFHsbH%>f5BOlB!_!z6`nj z|BnT5+Ny-=v>*g!LU6r=NcZ0R`*&%s7M#RcKPdkcw2^fgoFy>g-DLiBmOnz;W9!Fy zjeEA|m#?f#eT5%Bj!-ZmiBV{#UjN<_JvLuZcvQHjDe-SJHfR7sLJ{=a2)Ch%z5W)n zG=4zt*=;$7$*YL_+5O!T;#L9m_ZkKD<)Ebo7SqRx(}V78@n!4pt(|C9Fi|2lNWePw&vThyF&z+J9k*L%UAdoA)_$w~dXcF?s#W?mW_%#uG_&g>&SJeYu3 zQ?hd74V2%}9l?LleibLjw|71}`SZcrM2MmS>~Ci9Va|P;s){Wrv!?A^^!fOvghBEN zi;#M-)jJ-Ce}BnJN~ex}X|(GS{r1D|DJd^TZvXFc54%FR3&ub{F{4_$`b>$52wp!iU%Nz_^ zUd=&Ik*|CJJ>a@%MloeYE_^B-ln=_*5N(;YwCtIvIN8cutPiD>t z^zHn+eh+_yd$aunHFy6@u=@4&Y^;bQ^wKzF`DsMvr@8Zk+e89P#XkQ~1Nf{Ot=t|C-zRQ?={|4wl{?0RwL?bev!g@aOnPmfmn? zZjHRF+INz}7b(66+}v1GtJa;DhF9-TTovJzf5~|gt&q&Neuk*)3H`frYpyLdB=Vf9 z;RUEl_NMXmm;WS9bcDP3Gp%2Iy7|z$_WJVIeJL}wbENLqH6D+xTe_zO4_W=xt{U6# zQul+@isY*dya`;BX(Zf4LEt%k#7@=9%JtohmKM*m+ z7E;u->)+&{Z>zQg(=YJymJrJRbopJ%jB#tU(0p0*hxqgjdHEJW@CR`DoJ;Tr5D#6Re)yAt}2*<_U#LX>=`+FP|;i5A5GYn zPd57>Z=;DP1V3Wz??LWC-u^RJ^%LN6{$w17Z}nY`Hv?kF;Cy;G?mM z4vi~rO+ON=o&vW&O;&7mJXBe|GIt2BcSN?nhLytY*(VR_3VB{H#37IHvm54JOz8oQ z-*|o=e|0(dl5^*q$oa`%Cco9RuV%#Ny%|hUEVXi5a-JDTE?9R_k-w>Azd8j@tM#5V zmP~W{N~c-An*7hawYT;BS(KIH+0WnPJYv}_<<%nMLjuFS3=}uzwC{bq2)bHpOi3ty zEA~CuZ(1_ewjlI;73q^i^}EY+K1ovko8#z=yPX|sW+bf#Z7=dGe{cM4? zcMQ8laXSfz1t?y#>hk;t4x47-x%5}j=foeP7t1q@e^j!9PYrRYup3YYAAZD~JLC6` zcKEhf`rf2mn8Fs?woj?Y_j$hg3{OA#UgDOgcoJ}8Zj`3v{`t4##aq9#(s4JaIT*&RP6OW^dnLwEs_Wi1UB=JRx2d z1ru>iUt%y$yZ=My@fNl`;WYxBz5U`(`S*{DI80Mxcs7x${~Ld=@uO=lm4C3wRxHaS zfy8l{`F}C>mJd<(QQIzE5&{ZC4j>^&cMnJ@QUcPAgmg+XbV*8gDV@^YAPv&p4Bb7< z%%1ys-~H_U4_sfaFY9-#W1Z(o>n|O$OGh+F-?!a#bp>m;3_AG^f7bK4*w}uIWDeYa z&QGc_#{!|Y>J%L)GP?5Z#cVtv5S!h`=7`7XtTn;-Kv7-9aj=FEAL+k$tqnxq0OGP$u4KnlbB+ zIo!naS4uuT1TtZ$Iq>5c_>eu$SYgCqlI1zQ@DpRAz-1i|NMYhmkjm-E@JpcXEVd|t z#d!e}FUcD*ncWsr>uhIi9lKmda*-1K#0+0y$JI#g2ppAlN)9uzcJ^iu@SeDx7(edl~g)U4L!UJJl zL{(hyt@E=vx3>8m$oq{bGdCX;@NYVAJtp>cEOG$Z`tbH#Bm#vn2;ch)0=<`UkLD&z zzop%3pqUo~?@`gcv=3zVu0?O&(RA>R=+?IK*3sHWy0NV)-Q}0F@uV!TDuL}Y@{&@u zE;4R4Ahl`nBz{1z2;xYv7~C)zmp%MQpw=S@WWuK~8!SoM{DF+#FfWWZcuZD!B z#Ot{7QQ*m#_S^a47jibkfqo#i+!)1qorJLOQ~A(?>cVAp4(9}kjv+0b;O&VVf4Lmf z!XZeKq%g}2!0Rz%L>kvQs;bJ^tjWE1XJXj9JD7MZ(*o1CHk1CemIQg8fG0cMeQbt6 zxJo7J-&1!=jsnmu6OJ?a_i|lW)?#@MMHUa4o9C@OsBPC*845>y)KBoJVV&TQ1j{fV zq0(T5ok!41jG@W-U!q~-J4%ANw>t&?g62n-FUw-|v~4dpx60xKh2>Zh3X4loNGBxn zQQ2bdx{C~27T^k8TLmn_Fs{PUHqk}-^(S(y!uVrMhuzakw$oE?W|?nd*4cn8cKi6X z*IL6wBQX61sjaZTL@H6P9zA<`$6~*9{AL%F1B;sjMwTc^E=LH|JIvIl&8 zu0&%?9uQR-9Z%#C@Ns84vBqcxJpMV?tL7bLxT*+_?}W8hlTsWEEH~p|IB8$y)`dm3 zAK7~hrZ1j<6}2#Q)RZ43F3(d2rdzvTf=^TC|BPFeFGjo!G4=8dAbz*LXZZH@-?iYf zp=#G&to7j;6eucTv~!N!jQrbtaMr0p`i;nPTJH43Y-t7VsC+pLzXTB<06G`n9uG z{~F~?*4(16o|8Cff@vb9nsan}nz2irn^TR_Zu%k?YQoH4dC`+1ja#@$uZjrP`y!<%k zZIU^7y=Xr2eRHMh$pPO<(mf7T5Ors8;kQ-fsK)fxgi4Vng+Hn=o%(#l+)P)X#+h^| z{Nmux5BU&Rc&Ry;%3CTPMRU3ffC$utk6CrYc|ek~G_zLKWeBS!$F3Wj`{?1j1oaM> zc`lc*kSkgOOH>Z~v&6qD@LJb)h|Sxdo+el=FWjk)D2_Qo>A5HOXRvph&K2dbCnP~{ z4}#K+DbwX8t*>9!=pukP<#=PJKOwyFAEQX|1i#3-yf)=?>*0#|o&pl{V~Ia* zCqSbkl#C(Sn}Z8Jcv1Vh&vi+tL88caO7qtrs-l3}EgO&6s!rE8^4VOZZ~s;wsj|u3 z>%Du{s7{sOISN**8dFrpf4)@ppAw`yixfr&$q@z3k~ZGW7E-;*OcVdhB7INkE4)d~ zl3}-z=&QByIgB(07H1U9k_Ptyw`apWMkT|n!cLSw zbZd@2D5@mxPTeX9LFO04Z+CLNJE+au@0OouJC~){)n&L~HC4f{#sr>-R0gy*xFebw z%xI4uEMmS+cHDlBC3=2A@B8#&s|`w&&~s5N<5qCY*wnJA+YA|y7ZQC_A->+OSHZG? zRq%My6*1FAMJ}K>kNfouW2VBx#A)6g51r#H{xazrTBMfK@YjuzES>KP*MX$=i0G7I zc_mfk>DYK(D^9zD?gr=eGy0c_UHpjlsx$aNfPdO{Jhttrq|Z9Cc(t#xE_izvk~4X9+S9W-*S=YVk?wm6MD2DH}ah$ce<$J zAnHwmj{jo%KFOseN%53lP!ON{d=C86I-Ft{`>5>Ik}97iPF2FjPSc zpYI;orkU?tD+Btxo2U^m56nf`S{xmG!b*ttt{a>Kx9BYZBXAUQh@V5Q&%gpzku9^} zC#mOFS?3(^1O;O2EGwW1*itTjREDam*yJtUbF1WwIbMHxYwQ@G3KkL*iq9Lg`uOoAN^fg{K$YVI#t%g)B}xcL!4^JXQ2FQxa4^@Q8{A=DCAwaKmz* z%vdD-zY8np_>~Ebg!`1gelUrN5AGy#R}C3=*?Re9j8U-vZerkxfib5q*3EQ(z06BE z=@&nhEWPzw5i3AzsDCD&6fgSwvqq+^E!{a5Yci|Q^8rY}RTY-EX+g8i!A8W>8~pJ;ulOIj zkja9GSTD=+*LI9T6t1u8p}#EqUtD>1>2jlpHZWU!3%WrhA&J(-lvVu`QK@eHkBMPX z%d38abIGCHU*7N)#E6xPN8?j);N1FDXO#;HZ}pR7r9=XExWhV?BmHN?|B35Klwh2r zF-j9(ZRM6r)UEPv7C@F($rFe1-JiV>w4SMqeM?*MScag-c_;F0TOqkOqmD724PleR z)vojP9EPXU8|4?3h_W*M$;fLd>!M*xkl+jEsrKvfKYAlW$3rTsFgM^%f@no{fAP#x zBN{g~{hBL74&l?FwC!btcV5_bI6|YV*_+hG+}XtVQU3j3Z`@GgmHF`K64e%Fv^Kzb-V!5n~He#UBtbBCu zvxNpaA-;Bv#-Vf*eTn77CG0vJo%;7`3^KwZt#Ib51Pv=VzLxX%^6g0?q>6zYH5D+D znWi_g706ANOCQw-f0qzw}hNLWkIM^x1|dOFl_eDPj7T` z@>^U}%nA8aL?V`s7J;_wvlhb79~mnc-pqOCaB?JETJWurNPF;wO_&{# zU5gs^Q?yYt1!@qd58I$$=1YH}5HByWC3`EdH#d5jQh5=r%?Jg|NnLp1I6PVNi-ex+ zW@$cHTc)u>MsQuxk(2An={K0XwL3@l;BUwpRL%rnF#}_0jZj>CLWb2=6nw{Xraq&J z5qU`1qs*}A16x@jkoyE|UU1{gQFgP!O(mmqDWW5yoJ~+cn#uywE}bUV!`+rX+5 zW;7C$7yzT{ZAnMQN>s;M$(S)@@P!vij71nN)&&wFl5iI!%*9oD z)3KR9zdbb{TkO&(wrMHLVD9+hW{W7MkeiB28H)0ZHd+c^PNWX@khspXipWV=3jeg6 z6V0WwBfQEpdA=9mFZ*+C?7PPAjoyD-Y@a#Lla34&RousV4BZZ2YHJt=ZR=8j(pH@K z*Vb0eUOdA#TbWB8;XVw@ldfLH--pQ{N4gX1u7cw?;+8k)S4Yo(5E4)dWzNp&rWb2C zeaa7#b+W_)ydwOlgbGoEX<@U){hi&3^ACgn1LtYEql&@Rwh!nD5aA#wLwN97$EC~k6ZXwx^E>3*8iV()m(-@cdZ;1&~zdptpJwsFT; zFs#Ie6d~@cqtFZ!2VA+3IrU6qLd$31WI6!v-ZUxTT^rG)m&;~Jl%>4W{`)u)U;Lcd z-2EwvO8q)dn{iZmWRZor?ngrtm-KhZ8r8xXH~fCY-F^)vwPcT%v+g{zVnuBVPAL+T zXoO?0P58}^MSunkqUu@_?Nx-X?E*!$S3sq{V=?7e9<_$-??W-+FR}Oru}`mff=jqw z`HZ)-3f9l_M3J@l@SV9>mvpaK2&l8pu3}JxXSW_$(;4G`_I9@x=2F$PHzaGVD-9{; zid*7fP~o9;bCNIIGtix>D}Ud*%3fL;U9^Dtf^wM88^Qk5i4A@e7e^bV|iWQ?}2Oye<3w(n;lt$vu%#XgDEn=kX(&;rIV30}W-HcTx_5}OwLyvwEu}?r&WM#JD+%A$4gpFM4&J30 z-0|;eh@d~uIl*hSWnBWdS|x}=p)6LFXBhDGIwAa7jqZX4rP6Bw%CX?dmVB=pbOH)t zMqyA?nMF__qfQL$nW=qGSq=YoTAlGR_VzneJGPva$_bfPAM#?NC-2+zY)gvwvrmHL zl>e*P|K;*t^;1MC#NX?BQN~+Bj*Y~G7YoKq`$HYR1zTi!LXLo$r7sD zyQ{aM8u1@qR{-x?{fE~V{l4vAIi0YS0X{T1k4$>Hu3jgI1`{Zw=U8>2D)B>s=&eLJ z0Y&$%*K?VZA1Yw2wxwW$>rRWV9wCVTAE((!;p0}xFU_zG455uHLhxjk1!S{jME@LD zp%0~xkPL+UH*=ufyW`$f^`!tdjwwIDI<6H{n8LKH#NK?k0Q~w{{jQ?YVemm^+!n#9 z>3lTsYP-&;%vqOys~1@&yQdR%oVM&Sbi#yjoqgbaLceR-Ntxat1RB4>`plo9)XXn> zB-8U3=dHyGAS-JtBlz+QAqW0+nt^zaQ!0puJXGTAsrPjlH4}n-FgmZSXZ0-bJtudI z=5}H`#dk)Cr&QAMzw!Nl2wABo?Yl-QWEn*YOx&|sVKQK*t1wo2_{iWxpX@ALMzUm! zPuzQ6?el>FH6lnJ+{_W|7~faON~R%ZAM7~?25=%PW&1g(R?J)z-fx|4_{CN{V&KJ^ z?01qt79`u(+2+jia%H${hk*%~fq+ zfSy$ZR6Hjl|J2ytdhd1DGgk`kQ(yAc6APwvl~OL`{QpnWFYEkjRLh#$_fuo^-6n_j z&_y{?l1SQ*9oFjwcNc5lXqIqmdE9?cejlr#7JE!U?R7?GX^}rfn=W(d{priXMfR1+ zzpm1qZVykj=6hiDm*lVY;ymZzBq5v@sdAPYQ`0;>Z5yLh(#g6sI1#Vjc#EFst+26_ z`flD!37odtw|p!2Q7w_&pZbgZW*FoN*KtfO4KDfr2_L@y$C$$o33iqo?Jr(k)_Iwi zv97Q{*t7fT1OHy7)ThrKC4SXlCWY*!G1a$EPOI3puWuFf&;|(#_cd?jJ`$Dlrn!GS z#)w61<*kd~f*uwyDjin^-WDOC&w-nz5I0jFgsg>`GlM+|Mi`R2uxks86B}ZwF=)Y**RGq;tFB}6*tIAZ4 ze~R4afZblDzPdJCxX;2F9XCH-I0TE?exlSrcd_5JiK(1VdphcJel2C@{5_yKwUR}` zl|^>O+8GvPBPBN01WVt;r!^&j6qYxVsr7Y_1Ct6H=SfHz-%&MfgSg%wySqelYKMba*J z#hWhKbY3;bbqT=>>#+H#zkI9Ar2O*bhLl0P88Gm*HhM@IKRu z9V`Vt={fSqA2(ozu?U)tF%7dI&ggeyN&)JXEm00$N5y&oHP>{ybom%}9%pn*rI-%k zN$&}+(hiMb^b82g4v*Sz)9TtvEu-2dIJ)l{P;RoBW+8VR&xP@{RG|M{=%;haL zA=mbgQ>WheBw)Q~SYg5OSryR$P?;lUSxktWuRgxc3ZlUxux`O$d-t35-9SXqF47s@ zL+2XfYyE0r845`9`Tc&>$M{0YtiG9|-o6XT(7wu|i&7+UPadAyK` zTY`k>(+u;T@@EE3 zT`A8_h0x4AWeB8Xuq)9&!^r$qw%y4yM!gGT;kjQwQR1~0R&1t&aQ$V!!V_7|FVAUls0n59d{;h? z%ty=O!PdE>56TJJh!7P78Y`qRnq~t}|M&a4dXP?~^lR;&U;oRvGtG4=kzD+p!BQaZ z^mp-Z2Dx~1aL5Y`&>gJ(|RIxICFTOoiX)8V)U=X0eSb`22BBLuec3nkuPH_L^0?3AIluq zg2{``$+Xo9P)Z+&yH95S?;7}$F&4}bOMpK)hwc0>a34nSAW~n&Oh&Q`gc>g zi$pcs)-O|Z|J!advP{+$uAQj`z=P0VP!^Kcq}{%5H8Dla2~19zguiHqcD3eq8e}{< zglMHuaxEcRas9Q;yW9W6=>0!w3*T9UXwyCU>O+pYF51AL#KdMOgC?zPr8uvuK~Re^ zL3sC=Mo}REp^9ei-RJPnb>d7%H&JshSp}aEGi`Sba+5m}sA=3|g{bzbBMfa14u_#; z=9Qj9n1nB?%y^ByBeH$m>!i&72c-kiZ^LSx-$h&Q4-4ZHN;};oQ>Zfg3+`Wg*gMvt z7O#Jq&R~^UqiH=cte2JV%@S)~c8>xB0`CcnLaKrC_a^_YFCd>ux!#XRtIVgG?RHRB z?pBWnYW5@*6RN&37euKTH^{-L@}p7heT1l$TEbp@@`7dNM)WJOZyvkE+V$ zQO%mGtr*ga!SjFDkUag?8G_Z`K42NX?S67TS^EdyS#OH8P=I;-H7aXRMRXl_mu zJEvXwwe9~Q9t)lo0|7E%)T5g!k3FLT_nJ0!E zi(+uNYplZyr44bC3nPpP|F9?$@*G%yD_UHE*J3Ch;*Y7Ta9a$fg$HnX@bPL?Q}eAv zv^W)hGT^GeLrBRlxV6wzZ}0!fIC@P)*@8`YF%Q`~@b*#{^*9nSu_$)H+FP;K=zZ&U zE$gF~DI1$3|0cod43`G09$T<|q~i{xLdxkKG=t!6Z7&bD->n3ja^tlaZOX4h)sqGN z0EzkaH1#U@Dk*}0&+y83vG?GWwwr$N>RAJ)f!0o$s5u2cIgp3=TIzzp^84#Q9OQs? zGH-RYC#z%}Kg<2AKoRKK^S=O@wCiWps?H?v4P(W^ow4hd$?KCSw(THv{J(w$x@%}g z_ZyzeR7Se(62GB|nU!j#W+{%Urfs$~sDiL6_#DD}F%mEfia+xa09V2>Cqu#KxuJ%w z;*#CnQB6?JB)qTQyf-I>u2U-FiA^wlBfw5G4C{%ArT-8#xbtjBu2*Q}5~_0gU|bkX zBYTe+Ec7H*(@WL@BIM6GI;A$q;nfWvyy2N@Tc?37{z=2Wa??lB+*G|=Zx)%!f){Zp z&22EdxBI--0MlvjYclVYnwYW!=0i$wpQIv|;&{h{Vut4Vm?yL4iSE!;=_%WE2ak%P z9`M%T)e;3^R|ZN2+tTk`?i|PJ5Yh!No4X%2J{?yAqraiVFIpml??cS8-kNk(U`Naa z>?rdvC6<^RHg!~ibaYcPLsqPb6U1cADimjnPLgqJLw|w$rt^D1x#r9L@kA<&j#X^i+qw?X#>lh4rBS z#&(|xt;>ACudJPM`uvk=Sy?yMbIPriE+#$ThrHNqtSJ>3`l||(TFP*lmk7NyIbZmN zHHKT~$+wA(4o|MTgqKB<{U*i_xkgD6(C*%(dBtor6Wrr@IJ8}aUFB6bAT{r7`j%#+ zDjdDrX|1sA@q^0ToW02vU0G@pLp2oJH89NG8rTU=C>EDy)80@@YW3ZgGns8w}>q6Yy(E)l;W>eidRA_07*(dkuL%Y4o5w*gLSA9P!`oY>AVlfn2>HLSxJ9U4fOnB zT}49J=OuR5m``_L`12niz7A?HPzgUG)6@7u=7uhU%+wa!)KEKvE^D}p$r`cwV+m3O z0b>}A=E4e_EyIx{of@ci`aAwsAP3%7toqyrw3J0~GGlvVi@FC~VhgsD<6FmCx zD$vNxHbuE3c=mV?U>-?Ld-VpiIJ;Q~oDV(dlt=WGE*?>>WJIJPW9}1dyuI{+7Ld@b zhqU6jSDkH$3jjWddOETyadn@e&1v%T6#>UWUNDAIcG<_aBRjf`Dxu|J=`8s8yPv;7 zHKmGm>w!IwmvWvg?4GQL%s_MP1IBZ(YEK)hB=0_D>a6gfxA;xK6?Aii&FIW|zijbE zU(5x4K-vZlWN16F>D|H9cCYFPkaCc<157BzwC;dF7e!bGQux^-VE6nlFLQc=zun{F zP0TLM+<=h@**8FI$wviiVI?!H`xHNEd9TsW>Fo=8>7Rbo^5$$ai3adqPoQSGN7;0w z%|1ys-yz1qs*I4yZ+qjTu@jx+T}$1adlaUbEX5LB@a45^(PG!o@&*>x96vY<*YaLn z2%8ss8syT+YxWCzPLhNFq){h+`;^kLDQtmicQ4$Ww$%xGxLnWT=RZi5i`_^~b6iC3 z%3(!vVfNfEG}HCXy>1c{^Tpv+t8Hb2$mQEHaRg@_qRxIWPsG`t_uX9Y!Ay&K;koc( znR@;xlb5KH)zuAfUMkvtgD+)RcOu}j$1UUPM z2H3~W4S;T@Q<4o(I|jHP%~YX0|8#bWXp0QY+z9(KPH6m8(X(4KZt}rlVoVfUook5> z@8hvxxZ-*~ZXI_yDf{-CUr@TOtr65CD1(8b1D*2Y)kpM4UU6!Sooz`=orrMrS7}F) zMCD=Ga(ef7!na0xWJGhS7Nb;+m}E*%ly8&UD`cwV5{cj@|0E7dZ@LL$kF#i|U5U-A zqd(J4>H;Eep9u`V=5Q};PIRi<{!0iCeJWOZc8iz6VCBhzF^XFfkNu|lN2JrWoj+CgQmgTvf)Z3m&IT`5r zY6=i*Dp-KLu(s|FjRtdEZvFMF(j19(4B{{LVW@6L7#VHv)P4r0Ew+qWMkQzJP!Ah! zm1d91Z|t?OybKYAoEIdOvu>PDNv$&)A(!$Bt~R)-KN1>@PBiY=t(N`x;;uL&qYO%7 zjR^?@=O)GtrsCr6CxG?te7ITycjz1%bJu~)r9#@yc_DH`0;h=i;>C`T@tpZi=3vZM z;|Ch9{W}A;_4{vTQCwTj4>$IDz1#2>oo)F0!FIDzUca0@h=_c)V2f8baS;-c0lr>Z z1`kLR+_GZjZkq%kJWe ztdbpRFnX7+J#Dy7b1!&jhpb~TiDYOvzjUK3d}9AC+}!W)Nm*t6 zw}s`OCn5iqpe6fqYfWM)Tbi|XC2HaDDuD=~q}YvDY|kZXCVS?iTws~KW~8>-R;0vi z>03W}QZ2rNKl(Mdzll5jK+lfE-Ryat6+Um+g}m?s>@W*~69MdqCsU^6G78)@6y_s^ z?8QHP5Z;Svuy?vp>eapfQYMb_UM9evPLBv-{)RvZ2%3(4zC<$d$JrZY4;w~b#{!e{ zTSsFdO;@MMSWm%en(xi^C^LS4X)B+YvI&s#?f912{`Jo=uCOGFaq98mjoX;LXLQJf zXjbIs8BrVYV(pBeE^oex!{2F=(}Ss;iX(fWW&<7uohlA~IQvKV)$?>!bPC#^a`Hs5 z0RLO@`q*c_dZf}path8Xifs6D{}UNX@5}xVysL44K@@y(obDz2J=&>2ZDsOwqg128 zpwHy3`_6n_Y16Y0d|02|+aj((V%Qxo>BZ^XM!^|5L>L|u%d+z&m5a_iI*-Ud?UNpH z+)5t)lB4a2%2d}Xz92Wsf;YP2CI5-gpf%Z~JOGqNS;Q}yep|pMdLMk8@vHOkJaPLbbwjTyIE$I{*j?odQ<9$OuGFO)dI zi9%w33aH@@#)qC-V_3@s=l>L^+F1d-5&mAhX1&YFjXC&rr~5sR7H>w9NukwxQbdvB zE#9Ao{E3p?!YqN;C5Bptbvf@%Brg>FEO&pi@Ay>82>tyB@$jahyXre z_^lHvDsh_M%@9@awT>IPMTSjM&&9_kjDkb;fAO@@0L__! zVIZ)r@FIQz#;%k_UVD~@!DPOZBbm5@zbLAZPWg|w z^f^?d-@d+28SPPOc@e!p^gXz{Bka3`CwZ|nerUYAkvt6zc{sh`zlvJR41I%7`iq|~ zGKzehYGR%!Tbw_1>*!9gkzA^fg_{WGYp?T<*f*^}pUAy<9(dZsS@vaQs4) z2lW_S+RMuAm1I~uvnsZ7E3*_7jyk*c_#QRlxX!q*M9S(h3B;lKrP2w66Qkv?kcc`x zcX`7Dv7`;Fd_mmdzwpb;;GP(sOMjOI$3Aa`rJZfNei z^9PJda3AaIQO$3(>`*gYhQdACP`_CpiP_nJy_6XH+LjOWsos?V#PlD2zT4`r6YoL& z-Gr(E%?QAOBCo{>s{NRR<~Rpr?!Mmzwmt#rypDR7uK~C1a4UX?qe|Q%_cd5tpaMc_ zYdk%Eq=;SHJ;D}_WsAmi5Inkf%k?~`Uo?m392@tTkAoXhlI>?U!DZ|IHCNWoYn+Qp z&8!<;)POVdF)|q+W8HlrMm?3Pn#$HM?|o=(Yny@%iOVWu-v8y|a-W~Yk|WN;gaHQg zNHE@i<}Fr3WU09hCCXk(LUQrinO)rXoKr`*b~=EP`PUsO(CSMP%fyF^23g&eG&URd zU!{uDE)y?SgKIKT2%F{)7O^bD2OD-qZifN(QBfl+g^!VZuYNLL?keVHU+XgG_|>4A zI9>JZ-AT{{4X4IikLU-c6nKAz8X?Gn47AG|<;)KH=q-=9lJ*m=T*iK9iC$gfG2V_= z|1R|#rN$t4H#h~svAdS&t^IZi2}YmDXW?=N4%fl8KL^B*Gn|jB;V-PHzm4xdHwD(D z^(`IfSE8BFmmmf=Jk@*QJ=f*k*89FmnVu4J|3@~LYpLn+rJ_3pI7@!sOH3vZCYLJu zCfCABv=GR%0x8A{#cW!vRS#zuBC3{q1*L>J`g#U5tnYb8XuXhW)iXaTM+|;JiHt7> za~Wo9?nRR(ETEfpdUb(ImLedAxlY$E`E=C_lZeooNV{=a0nb4Kb<$CU53Em>YY9cQ zopxcG%S01x@u%)`ad3BKWSp>C1<^e6<|9poH zIef|FOB*Jh7RDJpJUFl=LsC2WGJ7ws_k)cIi|gf#pjsZ{ ziL`OG-2w*3>L{i$iHsx=Ou>o>3)k?ANdMN&U1XK{)N0`_g%=;At-HevxMl|wfBA~b zNXgv7T|{T7?cV{(I<*{H)B>rgJo}zhdhzbRUz);Gqylf}Fy>xIqNXW)s?xN`@dX69 z$R#eyndHq}nzd~-meJi3@_-#;X#Ydy6#LN+f{jdtM&bf3V{MN9XH5F$l zV-uQnle5**DORvMDv-VZ%zOgfE1we$;$P@w4i{&&-xH;AF<&b9)}|KaV%BKz#SP%bZ$1~D$0j*ROuV-h zor~j*Bo=nftK)H3W=d_Lhd8;;eLWg8A$Fuca&=c+J$pr@nwBNhMUqd!wN=7pGrF86 zWPw3B%KP_o4e9oDL*UZx4CpzbST%Jq9&%?D6}I(K$2DHeQF%h!eP6ZZ zQ!#C027O3S_0^9&iEQFkx~*GkYRy(^EWghEC0_c1r05vsFdk zzWVS@*EQ@poIK76^w$@+^l%`IaQ_c)tJ!NPFd($?7_@~t{9fP7)&3`;{H zL7;yFxTf7DQxpx^EJ4JqD1z|L&l(jMquKy;1PoMgp?M3$a$et7FYR+~t|8`NHx{JG zqD~!L{W}CV`ub~9=+xI};bJPik8z*N#mi#;Qt9qx4b6eKJP;o6yBCe(5qeAVn;fL( zK`kH>RRrB0=IW6K{HWLA0g@FClKEnE7_X#gr}VRF;->4{%`%F`0=)h9b&k|7&GQ$+ zZXp-| zjhM?sVM7g4{Gl)WG$ij*^aGkFvGj+J2TTka#0%nHbK~tvuPe@u!-?5!HmT)5lFkZ< zxcz-nk8amab;ujA{(VSO2QtG*tYyEx1u+NXR>44E+Af#g0xiTE@R&AeASSKhgNtELo<83e?dSci8NI`k1yq`8=-rZl2tnx07_$bPr zP<6NYB!=&Ta_y!$BcvS>R$ct0_LIc8+@~^qFjsA#If#WQc4+}nreJ&bNKy0Y3B?|N z{bd{G`0<;m8IaknA*GoN6ZU^#d~p8Gk)pg?_RV}F*FJ-Lb%+%KTL%D7Hr3%SOWI@T zXB$kR53UqZX7)@uv+n1*hkMxBV_bO~rx*l(v=WwH*;BIpls0=qh0Bs|CQ?h0`ZG|V z@GS8PIR(b|Fi^|7{c_r?+SBKY&)TQs4o_0+QZroJ@%-=}%* zW=?S3&U5Laf@RC~tqhbF&#jAPsZ6R)i4kK<0mjIdTu@ z?RbgVicC~X#%o^uO06%6!h8Lei56e!w%efUJydAquZd>3Nv00ITt2@?qY`QjD-r#V zX(}x&^CEF!1&F0%$npQ6@h^P=#h!K&=H8HroTO5Twk6hRIklEBcUhU7*3t01q}-^I z8@1u3E}kXFnjxa=9Bpqra=Lsy5MTy{u6CSxIUd)-q#ywHhfk^ki`p@YHGS;C1CvL5 zx53*)gJeHlOt>uze%$pi1D><9Oqw1}S9wKTMG?pMK4LHe63X!wwgNWDC^MKhi|bEp z33by)+ey%kzDaXrsIzAhpQed-Lt{=vVqM)p|s(J2?Jg4IDr;mwbSsJTl^j=-YTU_96sakpN~ zd{&o82RWOzX$1i>zByeBQZsk@Fx@K1-Ik{Y?fOe26BxzK`wdnf=(wOd5}#Lo4i7`E zkidhC`XViJmnz3P4eQ$%{E9sVTdT5s6@Mj=1I|X%I2<=6p*YX#-y&!@o%XYl7WChQj?aqUs+8|^-5I~IS_ggo>HNaB;uCyfXGU&mn`_M+U@%*D?c_5=%Rty7i z&|U7uO<1TAa+S1m*yZ!oUmZ8kJEC#BWSv7-R#2Yqhd+tG>m8^9(4I#5&HRsz zgYVA)S4<>igvJUvc^sl&XgDhLpAT-FXtT5trofo#N^pr?*P%=7{28@Zhwm?1ks*wY zHLxc>RtjOD!oo)f*Pc43$sg|Jg|1W=mlPs`;vlMsqIMZwcQCQPM35z-mCyhTx6S&{ME(R8NuTFzbN^y5>W^@-pC^EY+|FKJTtlEdPtlUt) zDqj<3@~y#A6C(XaKQuS~yx+y*LfGvySe3&PYM339({W5O)(M$JtB|eDvhGvbX|+_dTm?gspX#0X7I!Ou0}W5U z7k75tyFLH7OJNGNRBf0X2=?<6fy``C16k-uU>TrrQ@5POgGB7vN#g4V9fqR^!59T- zmoE*MbX`^Wf4cVqE%n{Vgf$yF?I#=ux1-8QP&}0_O>+t#qY27?uitcu2}@7aR;#*! z$Ir~r0qIP#0KqA%6l|fDjJCwP}lgl%p#oJV_~z0sPoWLu}c?n zMw7;Nm-F+F#D9GHMz!&2=q>l$i~9;fE2Od=Gw<{5@5(iK;e|d4qHt?q?&T{ao%VIh zyO4{nAwvS{IwYXxXc=ptjZuvq=68W|H9{P%kvwo+%-lF*DK4Io-~&Q6y)=v7evhIO zK4L)X+I0l-#@_WlMLU!Cp9JQ{e4Zve0l(D(w^OhvALAF*a*m6oRSH=s_uL z!6ti^0q^hr3aiA+KmJ&KChif{I(}I|ivmV3V0${NT`;xTKA`3!v&@GoqHrPulpjGY zRSvNV&(ZssJXU3xo2kt<^X_G);u+>GpmM}v>Dhx=dPU6O>{p9r2*1h(RuKi+J#aDb5VwhAxh3j*>N?cZ*UUb%Sg*}NdLW1~7hNprTG_9Fq|`1#-=9>? zV?_}sK-!LIchbEiDE1LsGJCXO=o#?|G)BfrFTQ>18{urZ4?qw2izuj(kdZ0 zMoI4Pl$-Wk-dEpZke?(9@!W>Re?>CC8_<{G83- z+_1LSuc0p@h*m#-DiBxUW!ez>`!Uikn)`i@JYwT+0<-BSjYvUvH`Ms5+j(2&592Li zQLFn$%Rw$(ZAUx&=PZGq-bpLA@*xeH*Me!zbNyWUC_OP^tDY6Vv*(&zEq@a18vbnj zW|(r_Hc-Td`-Lk7Rb_ zTGwZ$()XO_`}8W8;Tze_*Tg5)kJ+D|u%lApzAW2qk&biI|4_%y@-=)sLYKW`au zeD^oLy67X(n>e|&L+n+j+yjMkvkKEGvo1G1CoAwn;m@sir=eq6o{67`lD<9WEnn!z ztqWFz(EJ+TluMM9|Ap!>I$qeJ%s*CJ_V&+u#CB@BMT>RrmDfJLXizGk@c?eco!z?r zeXb=sPXuCasaoFgh}cGJ$Sr%VKRX!CPnQMdUu4X;UD+eF{htJXY|N6Lwht%1DnX#{ z+IMhGm?ua{VOKERrq=0WnM;mLwRS=cUpMd43?bb;yn}p=%4KYv7WBfso;iw~y3120 z@T;GEysxj{-}8YUiD$1jCe$$}L0tL(Ot>X|ANMbwCegvy(BW!5arTlpjE8j#m%%IV z-pq}zePgVO@IAw#*zU6BuE$idBKIr$q=R zW|~S_FLZ+U3ub+m_`jh(qIbKJTgPgZ7S4@kD*k<*)!N;}+6!A>vweUVZ<4MQ*m?T%(IV0~+tC;r@m+FNScVO8b)*9A)r^qA;O5wHqN%=JDjT@Ri-c#RHfa5_1( zGoIXPVcGF(@dD3!>KNQb#`#c804*P1Zax&^yX;}9jb#Ux*JvGOr9yVKha}Jr&L20V z;tMgS{>_u1@v}jtn^k|?WEBJ}u8JBSt_BhZsaFf?XnNOEcc*t-8GQGd4G#!cgl>Bp zM`W|xbVsm^>5-u2<%)rSrN61q1X+!Y0pmVUQWZd48}R^EHXgI#U)CNC%k~plW3}n6 z{$wl)`jOcGth!}+5eM?IYV>G0r~WP*cgGEa2#q^f;k#a~A1c1ceagLADLFH$69(Ry zp)IY2sWSD;V4MHQJFgC=Jhd42+b198fE-Qy;QIuev`~y^K|})?JZa<70Q;`m6NoYJ zl{PjIinh=5EvgGL={tbuV6}fQc}~=LU6^W(yl6$75!|Aj$j(S^479b@rb!0$q2V*a z+#0&>4);{@vorb6)2Bzur7 zfY#d3do{oPdgxR4qW}uv+KR39HM^AHSp-wgdgkROa(-)UUtKicQ!y0{dpo%K7)aee2LZxX+ZIqiY6aJXJ~Zy(#mg#md^s+Eo* ziu@NQKz5KrxJ~^cZ@q5(X4AJ;uLC?X&!8Zey(Ln(us-Z)z3*YWs`&Ne^dnRJEf|52 zb#Z@Wc#>>c1sfM^`FAb*of(rKoy)ZND6~K!b;3s7m-=nL4zFXc*frQ@azJTN7;s*C zRpgqw=VlP`2*U<_f9`_pAz$y4h1o5{2h|=?q`WGys_MZu;By7xENc5X40p=rtZOTE z+INWcW9V(HF(R^m3Di{tVm%>ShlN+P<**1JgSffn-}5 zm3&5)~izc z4Tx=6U}){3U1Wh~J)`5zi-|0`$X9n%rbLrnrF7>6=ZtsZA?FH!A<;fJZx=caK1gr1 zZkqen{P)S9-?~(278x2?h^kt3uZ6rT`_NYWdc26+NN3 zE;Z;Df~IfsZo^StugeL*Iuv?a{=Hk#JQMX;@d-%f!s1?;n)JYTgN0oC-@O~P^01pM z$(p>uk^T7VQlyn^t^K274{hb6nvmalwa#eS=gO&6<0Pv_yP>$6>e952I%DJ+kEw^% zk1d`XU67?$LKA26F1?El@=Yo1A3WJ^{Sa(o8JAC+6`=*`UqKKUB>Xaf}JIUvTD=Y9fz^XYB|nA+N(Q>;S{WUFWi zJzd`>l86e&KACp8BFR58W5f^bDM;NzEW-XYxJaUr!N!y`Hr2gBIjgkcP&Yq|J(&Oh z3FQeI_HpDu3Tk-^9h;o6)z0(PbNF-$vp_Ok_oeLJ2m|-8`9E5lEpj%v_nY~_Kkeod z^oC1cp5XR_82+wIp!4rS7zG5+Ae%q=Rd-|E3Zy7b>-9$ED!0Dkvt9BnfoX(zhR1+hPUfB%7CstT1m0m#=MPNYzp?EFri#w9XH_Bp9fbb`in8 z`7L^~a34p_tvlb0vkrFqWN21-CL*8BC)ziEe9nvM2y+SeYT^ffYvI=4-u-(U&JfSc zpHy*(`!e-3gGZGK-1Wda5d>HM!;RXW%zrEFsNLK*|Kn+T-b$+u7Pguqz4n$_+pAWND$1C42=?wPFZ}rs=JJ{d(1N`3K+{lGm6ED-e z(rVwX-ZNhE7r;WTe?IxcNWYogpV#_b-+S?Qwz%ay@S~MVG_|Kro~>+j{>dk+pJ!IQ zO;asvAO)%x4wS#<-wlw3eb%xmoyP*n*;*Jny!R3#BYx#?{40{*3Fsba>-0Ml$9Pp> zxm?T}^t~5XdhT!8iap*s+tc@ht^r=_(*9A){Q>jCCU?%gMvIc_^;{-Pt3~-gE*5aU zIBM)gp?&t$U#ztEMa2H}7(#zPk8?;#e6TzV8J-oQhhu%fZR;G5|Gpe9Sszgd%aRaB zOTfii?XUS`5oEGXC%Uc)l>GV9oSN;)uZ=7)VD+@pl7kUrB1d=0j z3pi1-XrFtqj9Vwap$P*eyUVffytcv_@|mB~onIjJJeIM|t+HKa)T8sEY7baESxywSW z^3vNdB^Ym=CzrkaRd_(}{O7{e&r|?Af7dD>%k#phJoE2G1dmF-#3O#^zkyB$5!oj{ zb54nSZc}1=FD?zD)$k`zaP`sBo%!3JB09%!U&Q9OM>7CBI3477wA5g~_WOM>hHUzq zdU&bZp!p~8&W{g&L1+GFOp~p1cFj%D2RO{h6*;S#3S66$U%PSK>EWIKnZ9#bDRL9i z$yUvR|H}h>eAqaLHs!mg4>)NX0IiP^-hPH(j(CAP{$PGQ#EpL6vl#-}U@kv=Xr@(Q`u6Gkr;XnRfcD(a?_ALdMA>UII#M=1 z;_vq*HffbuuirjVn%wR!xGEeEPNJ}xz4-<@j7~@zlA3YUD5it7_a}ct2Gbrw{B^Ov{1HyB?vo?fcmCzCnf;KmwLZL; zZ}PcnjNQR4lNfZ9t#_TusUK`WNF09tJzC|*v>|L#e^ctg-orI|_S8m4Ap7Pg$qo6v zf6b9r-Y$Cz)AF4SV`l6-zMy6_8(Da)m`{%}%(4PWZSy1j)9b$fX!>2S75Ml{CT{

FUQ*}_~;J%e*X{s{EMhamHC#($#jv2&)%xn|Mm5J6-I0GLxDH!dLE@? z*!x_sirSBFPfEPhAM-Dpnodu9yr0y5T%uv#79Ctmrew^W z#Y2*T*i|+Dy2zcV$^PRNee}-1%7iGpm|W;Y(G$`kvRIVT`{fb*w(N8 z0|}Yv9^%pu*PCyE9i$~{NPk=WAksel=B(I z0?p4YljY!k=BKGA<(ny0@{bD$-1IMYmEZY=v4DxrF#3Ke>{EfOSa<0Do-7~rfF~Q| zZsqoiGMjzQ@B9DZX24V1`7Nn-@;6uf^SO7jn$HV5VuI*eUg70i(19{Pr&O3TJJSnu z_s{*~frc=9f5(5h)ZbleYe}D--<6c>6YMb^;R4yo9v?jCJHYX$@I&Sl;6L}A=}&+I z^G4-^J&^DG_$RGa+y^*XZhPaM=L$7v@B48pnLh%}f0}sZkVx9sYg*2Df)-oO?*GV*q(c4= z2}L}YrKSlo*lN7RTLaTS>FKlTUf@1|z;O18m!?45`s~$Uuv!>r1{nQGZ~W5+;>Q>V zPyrpPd?3hLzm(lgn*z_**k@Di|Amm*d8ELf(LecJPCwa*F|5{sF(I2o_GPnPRGX_9 zV{cSAl=4`CiiBFxHh0W9_s#C+F*@+hMBk6jXq&GBk~sLGvHY#Blw;-z0~X({Xu&`4 z+#YBJ`MQKfhWs2qu%Xy$dl#1yGj-|hhwrw1X(gn0+tc$%t3u@I>inrhr}IzKumj5c zO5i{9n3lboCwm*|;|SOZP5Q?09ch;71mw?P(9BVvkl|_0jnI=SUT-L+@x!v)MxDJ3{_t!>AnL_s*sdP#W5)L5U9g;&ycQ;7aAdS>eBQ1*3EhXIzcYnV3 z-sk=c=RW8Bu%B78W9_x}Uh!V9*il*AdM&F0BurNlEod(kZry$2(Y{V=e^0q_sAr_?p((sq$?;_iQ3!?z*dyWqe{n zZRXr#@?68a$8U?;(_p9WQSU(f_tUL`-j0Q)RroOjP9h4Jm{QbhoSuq6MUJDHi@Kj1#sN+(SnX%n1||BgXOn6zhMHr^Woa*8wCa zFIQ4RV!=#mQJieSoMK*_Y+jOF;21v%#a1FWK! zU&qJa;XghPSDr*gHAD^bFCbW-4RJWGSfr$EY^0Q2=VyL>&N3ioMNNIcK)uI6eLzcp z&>DY9m~bg7r!a(BQ-ZF;q+?Z0ELHChw40k6j}(Cy%sl)&99+U2I4iEAQ5smTQ(RL? zQAI_M`-bw3g|`IRyvkUa8>bo3nw?mliBRZg5nibeshirwDkTefmGickI^4R1ed zYysJCc!K(C8^+9|N{Wg{;ai6~FPF-Xm!`al^@X=V5`Uhf;_7SZ8h(7^pz)76N3N77 zL}~MjzCxHH*4yE8W9$llISnh@3HCerSWSr0r>gz+-#^_unzut6PRt~w3#dmm5ZO8J zky<8JBlpQQ1Z;+TXFoI6-tvN9`8!3MXMbCNUC3yGA$7@-pPSvw{n349c>WT-`QT36 zZ_+(j;CeyszYzr^h$~K8u5)Lzl(#ub_cCI==7KMacB4J^gp6-tMz($aQob|$o5sl| zzesf&jC|G)QSWEA_3vj*98qQ8n2?JI_Wpy9XAn{F*&BJJ+2@(k%PU&3_*Iji3W_~b}9}|kWf#=?`dz^Ewym!r*iMuS z5?F0WRTM2SGr>FC!1px2KdsVZGP6S2H#z}tcK$w3V1UwzMPT%)c7x)&a6!7{7uJq> zp%K3nKkTRhXK#Qt{)9RYH!C+cxA$s#8Wdkf$i5~)PD2tAR8+L~3#;Ym^D*Z+Pk(F zoMn-fYyAKuQ)chImx-y8Fff)n=#t>9G3gn$k>*wU`Yyl*_;FRH#b?6~gDu+vplwT+ zk^&Esi_h|Kw|03iZRNG<24-u*W@`pAcb(}5tk?#(O9!^G2-on&j|vy`ejn3xb+JAQ zppmE{-hC#2!1z3Un!IolXAy3ook3ud@01hyv3cCcZS~y>^jDLH5Vg-E?yb%6$&D6| ze}5c>>Erbmw)A58^7zue#T_5H77b%RRNhz-eIIhG6!l;j8|`loxz4M!Z6`y0KjeBb z?NsaO%f{&G%#SnTvo}`i5H!KAHZfPeyqhJ$xZSMa(K3;u^ zSuznnNvFcG0+_jqItd7k744{#UJGanP^w4*LAXoG3DeLc>EHgETY@|ih zFvVOhw>j*dWP)xDg^CWtU~I#L2cI79?+gqU;|9@LNh?1G(<8p!zFW9!c08K#rW>Ly zh>qc}o&+6FKvLVwSU*|VUMh@<1sC7=<4{w>PiK;1dL&&QGp;d`lX8_if$6Rt4VGpn zixAqQl+a3Q{e2Wi^@rd*-{EljN9E;%G1*|+BVAretB@o*VrwNQr)A*i*O%iBj|Sh# z5O-it?y#^$5}5aPdvrJMRT^dk06ObwLN01kh;A*ofUD9N97jMn+qpX1T1Bu>NMd1^7!^X zdSsg|?JM#fAp;#gj@e~|E=>e&D2hIhk-po)*PjbIq|wvT#O$n{p6d&W)di)hd32fV z1flTCCXjVv#>W%pSNN{2-f*mS?+fvhX6DM6&VThhJ2Eo$ zQq+CZDM04)+TUrVL^2)Ia*iN(lnjP(RFyLmU^1sGAi{Z6efvR;pyVkv@rba45_%V&o(PcRw_JRHQ@1{q~$;zV- z4s#p!3S}IqTxjI&My)&#-@fJ&DzhcxB40(7d#@m8oVq^+7j21VLPRjeL73j+swVmZH6;R~cdJm3Jqwe1H3P-1wjX^)- zI9z)C`0-8b7|A7pP>q(g$+^@sHR zFfFqCq_4Z@h{5h8I}=BV@dt>Ha{D;^LxMWKMRdYUGrHLx=jgKkm)qA{#@IhZV4nTi zLH&=xrVA8a*}ne{K56`ZB+d!;Qy&Ad780N~DS!{LHM65T9Zk#Q*#@loxG&_+7Uol5 zRvfNBjEh!8bL&xw7&#hWQ&&P1MKDo=+V=iM#IwW`(I?6r6UmIexl0w)jr&vAi{SIg z*L79KXNDX*p4hbvsP%s6dQjdUtD~Y2OqMCcDGVr1GsoBVXKC_k@^z(=AEJ?kL+uMP zdQ6NZKTY1x6kG-+r8o?vQqkgreyFT{45i@a!Jn6S-z^e%Vjjl`$uxhlyo?!&tf#-J zI(d1NZ?8<&JiU<4Z$KG=fciSC?X5=l?V3MIn& zadt#`bP@CX-PRYVyN?bFcKXap)}BESn{>b+-Mg(? z_fL?B^a zziUJu@4(ygk@hwm0!S|j6PEgM`R>>05%5Qd&A>(Vlcg-8ImrJBZ;@S*{S`ajtB|IF z)Xxh~J$?ZOZ7GW3bTm!KbLYYrFQt%oPdwOSgN z@TPij&ysdL!RV$H6|CK9(?V5Ms&j93V^(aKExnwz5H)1I#CD_-8N)Dtqv+6lzIEcPaimdExXFiybFGFFA2L&O{qz35I$*+=O}^DZ@Ov7a z!u1Lx%#!-Bx6>=@uDoMrp|jKRT34v^^3REL^LehTW-k%RF(s-4J!)kYxYKkTV+|=N zFkiogD>iOVgviFt`uDGX2uvsRJJ_U;g2L3hW$of>^0BF7(?lr4+?2$H1BLtd&sJWNgcA8pfCMt=U(R^U|W*if)NKByO6v# zCupVW^jp_q+>5r)NKW@>#AxA$AiTZ!=oz`QS@4euVLhoZ!b>y})3e3ldFgY)ET=fd zgpX95jEQ6!jZJ@B$Su9MN?($)k}8@t@*?ze$iV!P&j+GEK_G&A7Zx9SEFW6mt)G!g z;A7^WAgTz3YVUxU^3h|7XKywbw#OF}g7Zd^orkO2*LOE>c1XEU$Q0-Nnec(w^<-<^ zlW>^k3;qu{rQdhhJ5}2<4OQe2b3?BgUyJMe6HBsNJ{H@AZ?+p9w|hRcLer!duEpx+ zq^FB%6~Hzq*fRQjuI6TliRi`Z&boTW?x(-g*PryHefO?gr@B;PN!0@>o+M(9iuWKJ zoxn^YT-05firo_!)nJ&p`ncWyVXH zCrM~pbp3Fw7;`WptMl9v53f$1_PgeX!;IvamZKHVRsL@V1Q7`!mMHl@^fg-R89DzB znTLqMkV?Oo>JTNRs%LyAc(8CtQ&d5x7cE_VoypDQKg!F{JhX!9j3Csl+Ou)znWo3h zcY9LV9bfEZXVxI681af(`*Ab(_!2!;`A#J7>1z!p+2&) zovtu)lLV&IlvWjCz+{xe%`h~;a|IfagR}8UQT>J$YrfH8@>kFbtbMTHszkOoWnfkY> zF1Pq(F@-rTXeA`fJKK|)DSt@|_FH_GvtsKy^ZfhgSpWu*rBNjF6Wh*S9>A)8(NFH8 zccb__fkTXk$1^xRdOR<;wOHPkpEZo|-pK*$L8NW^_4NAGF_Wj^Sy;a|k0=ji7st8Q z@m&}tx4%p9od3Lh?<~WXXI|$yS|^czhd3_y*p>F*ZfuH-B=lV$NQ}^pWJ*%JnMmN7pI! zm>B0iOb&R22#JkXaF6m>)gbaAKG7j3a89>|T7k4C`<251b(0sN$>aHsIWZgL7EUa0 z>@k0i^a@_)-(M%(*=Nk|Fm@L*Se#_UCiK?gTak4Z z;m6IgI`}j>dpP_MlM;!5pUBdcghp(7SUnqxWM@}6nfYuC;A$y4DZf@kGBc%muLh>4 z-`LprKp-Mb;$U#!<#QVT03zWBM?4(D1i1pv7tB&G1RhUOHCQ%ESk_rs){o;hW-C7C zNDRt+dG{c$vZvU|lOd;?dAYR^(BPZEU^024vggLKts_Va??u?a*zzsH>C@epswEU? zB~7lFrP`k1cR&s)M)Xpfc?2XBHq+2Ep1%*KuP7v?X)~hT7@?vHxjPBpYMp$2G<2-Q zNUw6QVzb30_oEDYQ$CE~LS^XWGgl(sy7{>u$2Ld18Wk-X``C_JFW?P=3n-f0F^ygH zTe0zJo2~K|S`y#A>h+@j@eS){5P*H^7@M|k2{ym*M7WV#)`fgv9=iDdqo19wHUgP~Y|e7T@+z@@GSf*nbc; zEEMXd^%_~%fksP}uJI69-IbRv738l*+0BOW9Nf!OxPN#FxqtWK%0XzDaUpZaXfW9e zS}*cKmCRC-WO9mA`N?SE0~RVHhZXh4m}FqxDjE1s)-st%-{!R=q%arKcGY&?S)7-2 z#M0=l2wy&z%k{=3EjCIe|LiBGBkC&lv)8ZMri-aP&J8TiUhj}mJpVJg80=9O#^zu& zL@Bjn|)(M!-t!B^=^Mc)oI zqWpd0=ziOp_b+77x8~(?G(bxrbvP3puLf7m44A+jyFYdc z*JQVPQ~be7{Id$q>5N)uH=^XXu!U0-cv1oBNf;9~99-Syx zs{_!pVBagrqM5*Ge$s>ls|cCnH$jHSU&@xiNW=hur3tH<-_arabIwpqGDFsc{l{*u z5%<%_w!sT=x;Q5-qnj=LUv_q0(;YoS)8rnrIor5Iy)7L`#$rNkK#h?is1QKMr7)F* ze}xr_Vjjp86^_LUprbZ)_5Iy@HA@?eih)PS9>_gCB~wKGsB#ng49ygLxMhknsu2a- zw0SAP7$VA^$YYG7Dv|vP`F7^tCGgr@e@BZizfal@Q0h$!ZlzD3+ujV$;Kzum5338p zl_2y(gH)+WgsF3YarJ++np6CE1$sw{i=kR;E>7ZCOr;NpXC@Wq^*_-z8i8=A-lENu z9$WPaTyzA)|Kr=@W-{I%kXEEtCVvJ3ZVqLXK~K+p&sPRr8@3%v)_G7Pl>iM`$i9Nk z*^j_sgCTvZH##4c{}L&YI%#Sm1lphtk!;%uVYaYH8BBw|WNqq*12b*-$7!h)5FYB7 zuZA)wS!x7O^EE9vC@xH!wU8Lq2WsX>U72GxoEK80UkT9(r`3lK2-oZ$xZ}4q#aGVP zCDH3ky!kv6UYW=REdAE4`D>tNIqWrKT!R>rE*c@}uzr^V92YL{tg*Po(cVLJUt85u z87NtM<5RINxPK6|nocmN89grh^>Izu19d?4_H+FZ%Km#tACOnZmX3?7R)C2`7mAm@A-Y7! z2{}{)G0nO=j28DScbvQPYb(>ISh7zpbnl<#ykJ;#|95wke^X@I^854}JdfpfLIVU8 z2327&&v4LF27BNL{Ux{nI--`+UKX0=s(lPJFWL!o!^G}OY&Q!TQk=e2ZorhOr`oXIv21s)~>XyiNDRV|H$se%KaVUx?Of8s^QDX$S4f; zG_t!1JSepOh{#@+vR*u?;K52FZkTnSW=kyR0c`=Gq2Z%;hD=uZsEYGd#lLXKtn4N`lH);oJOSAw}gD2A*|4 zZS=gzh_JG$a#^o%8e@287fE#PhmV4<9OTr94W1kV+dto$gAT}n7!dTxFgw)g4d=4K zs}De^@j%$-NoS^>Z&>{NogvJ_F8PAl^uaUM52FDUucz(Y)}_ub6i|8wt~#Z zN_=CrK`WO5w{bt#kh%*7}E%C~Ms-@v25!nK}hBed4LsJ~8pZ~6`l z>g-tGD7ZUb{?saMnA)yy%Fa?was}_ew2Qqz?v~Cu!5B3ntRC>Tu^M)5u+$8KrjtaP zRewFcjdY&90Dy%m@M4rsBVXP_u5dxbK=Pbf<3toy$F=sWI;_a{d= zj3vs&@uv5VCU*!KX|URRZ-?7M%0m$t1{%Ky?al(mjL1wmxJHpmzyr1%*U_W16&*rd4PufGk8;< zV2HBD%)Y+202_}4p0fGpg%t*VyvO7Hg?PiqyBe*v;bWV6K2x$N#DIDl;RC>n9(Mmj z0*jcp$`=eA6g5uIh&%KePB8Eiz=t1QcePvf*Jx0F9VI;aZ&c1pw=4LFNqemAI!m{n z2AR(JmOWYye%m$E&MswRc&w)V5U_JAjgUxKA=hmE8ITEq4;<)U5!UaZ_O&iL-P@7^ zE99>_6e=gS`x5g%e05)>uX&+ukpB7Q$!lWcC(bV{E>!eAAdMUHxthi_;E*@yKZrnN z=$IMwtf1n1TTLp);jSfYun!KwcJbYPuA}oGe&Ax6MeUj7u<&(~W57y<{NkZedV{kF zkB-GzZp}y6Pgq2&LL&I9G=TeNPU3;yMw8hCjMuq_Bta6qPae%$F7Y`$6c6xRA zoeFl~a@@gDAitt@#vR%7_3m$w1LS%)4v*oTL^%!mXM>l%Wb={T%iV;Jt(ALi_W~d@ zm22N=(qAXH zO>*;~D0rSkT+h7j!Yu#AHrUD;I7hbfHWUJMReHz|FhO)ty%9W;JelOf=PR1QEwg~% zSGgHp4(5M*+dt4tJWa0`=hVMHeSNdbO-6~MQGVUKY5p` zCNBir`f<25JG)||4kd2cjPFbL_vZY8x%#OAtYC0W$q{oWf2UPeHE8Fe|B$EclZ}my z>8(I)U3@%&kaxkC-)J>s%n$$|z!fx56?)d0uiLtx7H!!k3_9L2UFz@#9w8xqt2KLC zwP6vN@AjtSO4BhSHB2s6`+dYhvoEON(5?SpP?F#Z?P*uxD$PE0`j63X>)VT^ac0Qw z@aE1(+i!YO&{mS@{{Q)@HA&y!A z(hao^I1kGla~yp%eJn96-mHpF;28Kg-EyQTtsAHiTKXymiUm|$8;HN@c@w=|HVmGMB%0vHcWb8qj$}RkYdaE|6;j!JP*zjsh-3UhRX@%h{j%Wk1 z8>~CFmOK^PO0EK>g>UsSbN^;#?kirAJd>}4L`*WFiZqv;eQLUGg}i1Kx0DS#|EFz= zhr_ELJ7Q+#890Qi7Ij2jahA>If-~YS)l{K0Owq`%9mPFz7)EH|xNvt&jAM)wdNyK3 zTr=g0x8>VaeUq4A9!u&-ykpp7@ z{J%!q?SQbzNzri=vIQCgG)zwgG}>o&=mbD6LY!F!f@rhZYd+)d5L~pS(wfwpH8bsK z=jZekNjWW4cjf@4MJ<>C6bk=rkd;Uid4LVMG>aN!RqYG9?gJ*g8jLhi!Ct%pp1uMLBiM+qqT4vw1V zr2#|r&2y_8sGE(xz%nVz?^Nn%MAyFZk$8fE3P?<|h0Br>U#EZTLT6G;dY#=xWzr0f z8U2(&j?}vY`%YQU`8(;&lfE{EnV*XhIQ$eyf+8$@0OD7rIT_IL1Tfqy0aJ({z|~1@ z)2z$}9a6erYN&#vV>EPh+(d#9tb~M8J9xRmSP}1C1B57>wqK6Ubyn;sRPI6Z>MewN zkRS*hUHy%u!rQe7_Ow|xy2~~|eBuJ)pZ!@Ak9-B(&E9M;S@F5%p&Jo7_0!=lgNp$h zC0In_Elq+l9rjJ|<|D|Fhf$-A_>)tMmq z{iZqWpZG-9BUdz$LcV<4AgDONn=_Z`^`~&NRN`x#y@(0h9id8F=-e?I(-@qjH6Yj|IXU7LpNSvMwp?nCEH6K`~q>0&~9hyQI!!UBV{f>T;ON2`veX*dnP(#~kgj!ty zTmb@)QUTpjhwPzY>QzlC#DRXx7a!cmhPEf)%fbLsbr&|PrR7Q;U*d?@d(wFEN0d&L z!$QE&a*e>uY;9tV2f1@wR6oPF5Pg)apjciuD}>Z$Sg$;Qz9aQnIR#u>T?3o!mS+0Oz^O+BMcTCbDuA74yS8-5TdzH=+E0sVM2X zLyu7_zGh34D{XjES{tN+M$4o;g+>%l1nJGhi6Ip@=1V1fwoeA%j!3@&3W->+$!a8# zI}QandlpAUOLGz9Jql7++vWvi>uJuL;c?4d^@G?o6-Cq3)(N?T=7*)zLshINL*q`B zw=o#e^`aaNTVKsk8!`^u#hwX!=jEnkud0WppzNP$RrgcoM+kyqWqr(a+reSi{8$kS zt4mJQyT`+?LKI>|7hvlB=FFsKUr1NpQ#O>5f}Iz(bCE}BJa+Nn)0rr_OgznppPu$L zd4+q>pV()&2GdzA2f0U#8?BZlecX+;;OL{#LxW#5)HP%{*WR#qGhU+h|M;RylPP|g zdz-IY6W=#(>7h92K`@P5#UWYrhG>+N1O;_02eG{=uLJ`HadYm1C?0FR zo^9-n$6WDN>kkgEc7{`zLE2WKPGCPXD)v4i*3N^5%#79BV;!NP;N)G9s)UvjZF=F9 z;7AtA{vr@@A+fpz0R2IPn_1x35BE&DN9NK)m*G2(Ya8@zf*=+gsV<9=&YH_%PDo>B zweh3lpJpHPXK0!F1V=jls{$PrJ7uMVElX@@UyP%p%x|04J7v?RJpM!Rp+`!eYB2BL zn+qw`&8-qYLmvde&4jjIZiI$wQc|uoeSF^z533Z(5NAtW^5G4mWTl!58xWH#$0(t2Om?1cW+tO<_WeAX{i@b&@j{{3=u`Ex{5!Tq@PJ@3 z1{=;6ryy?nY()5=HGiQ+Y_kZgeKQO0uuiXkl@@LZWvsWC4o-MZOviG}65R<%c60I>K!@GqwLfd;h{_NLx;#y)9*sIvjDP==3G|3rAG!VII1Wg5NBP zWQ;OKj36%hbiR)v7aD;5yvy_Zl%5~rSJ{d04amquwZ`av97^@W6a$$P^C9eQB$rtI zTTw7j>{n7i{bqwh1}4;^`VueWXCAeRwj@;-3vNqoC{cR>F?E3SwOHYA2Fl`|#yI_<~WFUB*wGIgtWh_Cpqcx9pUEtAT} z{Z74Qj(Yws-e~Le>8YBbcb;nW=D-<_1uVwgqH*?YIrqH%D+%>uIHN12KOQcC+g7~h zg}oI_KV0x4l0*?sjZYXRBCGf$({z{1STa}43CXw5MGwz!=hGzy&`rEXJH9))sc`j- z;Z6Vwl=~%g+5QRoKd9%a{SdIrK<=hCj7yJ%NZf2{ori%JV^pwuI*fUe7@rMw1?LM+ z#-R>evg_zApVG54T>T>`%N9aakEQ0z?A;q6c41v54Z&Yiftc@wX}%WlH$TIrNPoRY z>p-H*cW{U%5_=j2fS(jgi<|4O>Y*Wtv$1R~;@>gGJjytNFBlt;RVebUrTW0AVU1Qs z0d_UWl>~1`^jQpo26##UEh0gvHlC9XV>7&JGE*)#3*!a-1ZdXA$W6RaB6qy>x35C*QhJ~~7g6k_ zDZ%2XC%g=K@Wm(`IklINKm2%V`DkM66P=7KB}%=rG7?Ez5~mw&*o6Ba;IIs%6VEG) zlcP6msQb~`R2eVZKKuMtOq|NqwxR^Px)(vS&g%OTGzritygE`lRuh}8eVPn_=W_^1 z_L~p2=l}!;3zwdgg|X$ukKj#iJzV`-l&o3f7vH~glE6v76CEB7GRb{dh?>KQ5e9!S z!vA&mCRmC}x@j@$zNSf+4;|`$l-%vxGu8WU^$gR+j>4*~(^$>cw%{P*AXGM~M zFX11B$=T1cfL_>vY14a3&4m#I+jK86!^{G;k)*3>ou+8Rdqce) z70jxo8AU*=u%g|sf3PT-IU789nD#Zh2s+oM4Nq;R-y$I*0{9wr4Hv@{MIbi51NZgf zU&Sn>ok<@haK@M!56z~^%_zWg9AseH7d3o>e2YB6>`wl$*jND$)ErHD|L)bf_Mj@1 zc*ba$C-t*&2u#0^^8*eP_6B0prdv04A`2d1Cm(e|MaBJNDMAL83019P z%mhS1KS@+EhDF|R2D8K$Lru0=C~Cg68OPpz))q(y(SZP={(nM;eY1AKBx*TC2fm%l zmr*gRq~Zu-2A)_pjL0<}5Pa^Pid=|l;ThK_$iypuu`=lRDv9@35n1a9vV<@YTqqYR zKH6}y25>HG7*t%!!7!oF2$&EsmcGFLTrTPD#^p`!zy%RJ7SR&9g`tx)ji7fSx7jaEF=0!H%?=ux5B0fn*)p9DBAwjnhVQ{13I?K+fP}cdicH%0^v7 z{ro+R)x=W#&fIE57-}78HH7V$pFW3*n##^|yx)Be$g5Gy^)A;?#Ah8C7w#Cxo&ipY z?hk3AJir_vc zm|r+d2-LuQ^DUf2#j11*2d9nr_-B7Hw4L~j3wS<*iJ> z&Xa8x!s1ZC0*@L}C?9ykHa^lzO5(yC+_gLC70RQ*>d{xanh0Dxe_qjO!`|hCZ}VSt zBZbCwas%|*lc=e{+gBC#M)D4BjQEVf&BepPEHNTPIH}CY3aTb7 zWh28=a(U84rOVO=N~R_MCqHQ_aOMa1wo8*Tx$p=#&P~rQHtXOvV6#w_6xQeY_uL^F zfTq*{a|&T0@UBpCLpzCq-ulD6Tr>1_*zyJ!gq|vD1$VG+jGhO{=uQNXVpT#|5xr7u z7^CPPoYBLX#ls^<`M=YF1~eeApf{!DT5t*IN5WBbGE83N*rF5?UC144&YvdiS4-Pf*asSH;0{CGMHFf^ zS48~JLDFD+C;)FP=7QiQB$s;0eqK46p{3fukbepoweauE@{avcs-72G4G! zab~fhX4I&k=~X2!Ka#LQQ~#^wWT9W!T|nAL=i}VCA}cmP4rqG3(-pdJigSET^cHc7 zkC#*B7oYiT34V-PBlSY5ms5xMbDYXt+p#QyA<=dzFKsR8;35!JXpi<_n5CcX1&n+}r!>8F?cEOv!Q5UBF*D2sSBEwyW z@&nH5(-w1Lmc&uf-cu9f6U~Xm1~TDS{@~}o$Oz|lNz*_(ozf=!)_Tsn&(}$$<^uYY z->{A%040ZLv)rz)SPF+`BGxlb8#psA4>oS9?6C` z4M_f%x_7@_m!_r7a}h^0?af~@R6j0Nc4X98plk3;;G3dRy)}fmERCEdyyTo+)7hSX zRQkg|dUW~tdv0hZ^a{t*NB}o|u_|=qx1!Yf%Fe6daX#^-*#RZJ{%Y%gUU|-bpFI3s zyjUk;FqEdsSJuA((48-=*oPfm-_qyy;BNj-`wnn8*)e?e z!GPNl1u0L~^0wV-Fhja&huEQDgK0j4@!Aoojgasxoi}NFH`#1{pw@RN@;0h zzN$K1tj!$Zflw=K<^2d<4mg!wmV7|(*cfIl15;zD;j(BKU- zCD`8^`=3puCB#q;U1Vz0VK>99%sRkQ1kWSaMp-l_A*iTh7o(F9?cDYjKKGZoB!B}B zBZ~Zu>=&=+{;`*y7(1z!MNqB{FHy>|Cj0FmH{>br0P^RKK^cXxn{S z1YqV$)+RmyGOVQbIJLAhM034(DAOTj8mvP&mbB#aIudClV ze>W-d>Jw+HXhy;{4T1EG9S5s{x5bJ-O2?BBu+BgFaY3D)HQ z0HUM4l5Ze+AAb*;?hqbPDTawnh!lfZ8d*VkAfu+R=u8-(r$$y4NV=W0<~>lFZ1~i@ z1Z;bSJ|!u7$<1bf;VFY`g-!%7WBupGZ#S2}UwsJPf2%Ed_;Z4oYLE@vSn!gN^0@Kv z$0k*0=Rh{Z^=%%FVr`_Pkm~4=0**9DE6P?U9F9#D+xP$YFsTy^tQI%~==YwlGv1Ey zTcsft%aMa$)@Yf@9k|=pD<_4zeE;^`JZ|o;*cx90&hL-5SIgLdSBsFur_1&hRBZj}V0GKH5@pWB9IVmY-vol(|n4OKxwqAJ1 zgQBQQ`BBHjL>7+4JgPb@HHo@;?bTRpoD~6+VO2<1nZ&mI*=ei6Unf;naDI+l=3`xE zsqw;Pp!%K^xEiINIQwU4&s#0{R(E{zx*M+GjGFlCx?xqLh`iG6ZndG8sLR)@N%o|b z>0Rn39Bn<7-BsBza^njOpn1#oL<<;fm+>g8f~S7UGq*vmD__C$S}*hg%Z~lN#PX~! z*&ZjHW0RxF-Gv(W51mX#km?vXq+VEfE9)}aI5+RzaNJ<$&;AaJgRbp`+X*vs8H<^Q z^uu46?#gE>y}dX#R|Nk?!ktn7%zA=VHSpHNuepyL!H2-}1V!Y3nMFw%Iz?r$u9|K3eOVCvMwGi)SHkflh@a{UOoSm#} zU;{l!HAk_H!GOhO(f%1B)S<6wJ$z9}otO%pK z$oaA!Bf!S$66?rp%jjKcPMZG?j&HOJdG}05`HNLbFBe!?jNG1fK!MFnbT#w)?oq{G zKnX}^L9DUBAt5{xNJNZf|K0~e);Y`M{vD4);9ID;#@fDARFIB~KSQJ>Ef(o7EVx$! z!sKxT=^Bgf^OMl%Q$(3Gq=1w(;)k}JO20lP!h{OkX#W?bR9hzWc$An(6# k0sQ~3^#5Cw_)$7%?4P|Y^;Y43A8$~8tMR5z4hs9f01P_Hl>h($ literal 154300 zcmc%RS5#9$-#>bgDhN@LA}wG;0V&d30*Zi26O<+$AxMWHorEGH(nORR2vVhkfYJlf z1rk7d@1X|>B?%!pKF|C9&pEf}>RkNR%$_}KU+%r;GvC=UhWgrUEW9i*%|Mi(^XrtfI(En%3zvBH@ zG&FQMv@~@8TH61N#pRm^*uS#Yx>G&A94yt+$a|??Kp&D+kowYr|)~N9cE7;RF;F zN!fP{4mf^>-R)R!`OD{nDqB-=o?(|{n`74 z3ivgl?f%*c8eT|XodlIPB&%apK6fJUXxT@0F4jYNu0{uRpVQMM$R-5KhAL;#F*}nZ z?R?cUA1a%hhnnxc^AqL^NWgh4oWDNAmrZ06vzP*{(n3Mef+m3bgkUa*Z2T))f%Bb~ z{rWbBygFA2YsoJMyQ&a*J>dG4%M~P3|A%yMc8#jIPl>t!iy$R2QnJncoyW5j33kp* zJ@iW8og)z4z+#H_M8Ml$VHJVWSY4;HdTygRH4YlzAgjpb7eH^Ga;vSIeKUx4sy{Wx z4t~Die|Q4~(c1RQvu-W9P_t*Fz2~zQm$<3~HoJNrf&;%v?pP*8_66Ew{32W{I*Scy zBm^XHJc@WLiMnZF(iNYG&x)lrA@|BU<#W581)q|(Cy?J@o!AZ45_x_HH^dsF@jiz@ zSw;83lN~R#2J5ByA6j_s&*Ns=zX6eJGv%sl(1Rfu>edfS`Kwwhh4fV^#o{G7(tkmQ zU#a_v=RcfP6$91+EGoU{Tq`hm*58u6LT9do$UE9KY4VjQaZX-yu-JO6m1Ih=H@ZPc zBAj&_Y@&n$M*bKfkq107%?A4bAAji-Hl39ckbGGKj}(`%58v&|PW6 z$&1aXza~>zEQ^>g8P7PJz!gPJC`$fGr#N~Mn-cmK^U_8)Wx(oW`KkjF`{a0)n!EsC zOP%lYyjLs^*GpY!BJFb@J!&)12-1QPMds$~gnH|*hfBV{8(Ld}&z#R3A0Nx8$$n4L%xz6D9G~Qk15^O6-I~Zxe*_UI-!@2eCAud+W#&i^3+l%N zmbIiq8`BbS%hq%I3-K1(Oi60fs=s17#w8&PP89BoQnnBV zdR6YWty`wt?#oQBIS61-sWUmofO?>Ccc~_3>(Lqq-(2aMd@Knn0lPZl1zTb1)~-j` zq+Mjtej5^hF|`iU;56*<;R%G^ zjqDR_L9W~N)|_ARdcBw`P@Vda40)oQgqT|6;D>Dr)rZn9J;?Iu>cw4Yh`EO1g+#>0ljIy*jDH`kSlkb zM)cCf|M@flyjV-Vtip6nz9xWu<>|RAQoQ4>!QPX^$kN}#Ps3FkLF7{}ANJY~$8Oa= ztIUQi5bMn+gQ=w<;)1jW4VxVK>y2ql8&aFk7RI7I_dR6anrlxYkS6FT4O;#%@6V1U z0EedC$v!BuMtzu;RinIed%w>du8GB2E+aTf8H~{FysF^+gZ$OmU=$12H($0ga2vv9|;3%O1_ySv_DZ|79|AW#9T%3V)!49d3`K3W8{>D5_>9 z3}0C2$2{B?;dlYcQ3c2PebiH$2e}>%hJE*7aPjoKHYIld5w>*rOpHgsLDaGSIHm;6 zxi9;%FIw{?ooBpGAhbf1wH^wg_pkbJ?R5Mu@XW34qXOnQ&gloukJaa68A|r0v^l+R z$-yrj%pW9Y> zgCbq;o*PU6bi8GBo~WG1yIyEv(uWB+-)pyl0T#bc zp?0B8TM0|&m6f8~!Cawex*G1mrt-Z*r^j;GdvvL%xAhPcDB5^rvvDYaW7MF{3q$j4 z#v(vmT#lT{a>3n&U9a6jH?8~l8(nKq*p>sE;&|@2?IrDdj^u;*V8w%Jb9yyuRgmd6 z0w{V?bn3I7l0=U8DY&{=hV3Gt0XZDkhJ!c9vuQ)lhcLZOxnJ|XbmbVBqWx59bL)n9 zmoi~IYl5_QHKZ0EGX%IXY6ohxiFgbuu3eFMTpd!Ag~=Nm*p5z5QYtp*s$_@z*0uSK&kfTY8u@5{6f4{^ zu@JmpILwYrT39k~{*3=s>oGs^+rzR&2wd3-CfXp?X5xpM?=_ce$jf|Q2s`qDc~TZX zFYNRfw`2kT%oPp1z*(2I1QKX|$^A8z|H=1v7ILtUjUpI=;8G*%cd)6)@|()(EXr7> zXd2?NPQ?z$!mH_e;wu1t*V88xeKi7lzV5l1Zo?-I70!=6-wO|nan|*@Bmd1|_DA~x z@h9x$jUW0Zu>;}(JZ{)xFDF`^SVSSB*$($6ZOXO{3MP_~%kzUc@_5vR^(Ecp_i{E= zw-L;Py+jeC@juQm`b!_FyGt>?)GOm@g;_)FHh&rJ2&rgiCQkfDsO#B1QX*8fFP&H? z(!45Q8-tdb;5*>F*C(ES{;&|gE({rT>S0Lb?k3)IeLzLrNV|sTnVefg2GW@^eXQ8! z&T`Jp>rO>g@2Di((!aIl#HHuxH&F!K)BvP)#kzy0T~*KtnC;~5oZHRE>ngFy3--5P z@*qy9L%#vDXg)WdwXYR;dOQr+NmRoio!51p5hS_+^E%qex^2(FLrJxofMbE1Hs=EB zc>ck;zk?`|a4DpH0(?P5xnv1%oti}slo-~K#E50|Ax}3*2JVfv+fy=17+(08&oRB< zWfif?D07(Or1=1lGJJGH#l{6K&M$XR%C$nap@@gdT6k$4eG5VpjUDoPqt++txF)Sm z$%Ri0o~|1>SbzO`*?rqvMqKqU4VmkXcveGSq_J>gCfB4Wc-|gY9T5 zUX_x%JB9XHnb$_z$Q)g3O(Q5;^lZa&j_D%1oSh{~r^-Dzy^_D05*j;0Tsy0TGuiKZI5Ntniz?9vm29V$X}#WT zoM`)%R3ETQWu+S}esf?cakEXUGm(pl@x#n$ox5IQPjMdG$w@`((d=wJ(}hNT-rQHM z{f$Z5;M1z~V~yX#08}gQV*Z%&!CPc~^-{DGgR8F-rkCYl4V(bD6YEp`VL>fZhk@_D z5AOuzOK;*QlH|gfXf$y@P)qMo(t<2}%8m*xiX^ee`(z)s~yV?8$U9) zp@IwgFCKPS+rY;gQm&28#Rhw9n)IRz`$?{k+0}JL5Cz*ABwo}r$e*bni9K){t=^FQ zZSv@g4*)0QiSG-MeLtK5BD3f}li9m2@hphHfA zfy8Ctk8$$O?;+P~Z4ZvR3Ava@q6zUz`vJThw^aKzJ0YK#UU)O+u?~r5qF97JWzYR= zcr;CCLxp!^uao!S(4e#VKr-gKlHjk*lZ&eZMb9n|w~Ql_bhX9q_|L(Z1Bv^6%6Y_* zur2b@0;@rVT5Ko$hVo{77mmwLUhj_Ew12ZUk_Q#ESa)H=z6@SD!sYG!qkP@c^=x)1LlU z4qr(auMXo?0taXh2aW6U9PiUyz4WMvd8k1tSa4f}3W+%QMp;FPSNGHi_UqhxKKjP< z{osgS>*u*rYkPE3cB;}VmAH;cHp6J3)1K&9aty(i!&`kDDPk7h#l#}AO~`xq zgkoc-C*J$+SYp3{9yQtjLR=Z>%Sz`mX7}(g^4mhJA!z5~cYU3Smku5n!Jgr35nKioOJrf(qU=1SP5M*;S$(1)rjC)Ndo}a1z}D(|QW%k}|nd#$RHiO0HHnJ@N~; z&&2oW{3nON`2`OA#8!13n?a~}ze zb$VS|8G8zQgh>51+~ex08uQ@+S>e^cvV!On%U>2GD=G{IKzZdHKy=Bf02*A)r9DEC z``4_Ar}Y7_3qNfmOWV?ww(Q--$!f8$mk@7vyVGN2rn^U?IEEMv+gOGn?HNED$tzD8 zV27vR-#^xChr^!dT|`So%e;(VOCMJ)1kUiag$pPH>;a>1XC$^#&+o0Eb2B0RCxFEb ze$~wZOlkjgXI=j=xQT*Rc~a?NibXd2OPPJeaxUlh6dOI~;%{P!t1oDV|0*`eM=-$* zForKlA?2Ah%T`y^U@L~UdxIW=bKi3l?y1G=Mn(CZtP6%($p#H8j6;(A<;t(f=gM(4 zC`{zX2Hb<6r*`y40wtx_fpO~SB&avc-DTxiH3jr{OlbjOjXCbq=w8}ahHk~Ll9zLm zTEd=9e5VyL3>A8PZQ6lyg{%Ls*RRZ`+!q{*p_!U}48h=Sy^Cu~(Q9<9)IHJ0)jC*= z+Dy&8#>X$z#lG&8&TC~{EkO%-V;B z{SPvsgv(}rIPue2HApc=vEn+w?cysQTE^SIlpws2y6SG8CB&pV$Mj~ z`n^~xghp{v?8^u8b+J*QE*-IR#+fRnn}!TKMUN@`S^M`tGk#=u%oz2%mCZ-)e|j?T8a=7K8s_S!uaQG%kOB^G5Y%PObP$ zHFCFNa4bBV6Ye~KyoH8Fk%mLrDmt5 za(5B;q4e5-+>LD4{{a_aB zB`0wIy5wa(tJw@>`i~v{S-IY}nxYWurUWn~K(y+K9-5-WN|%@Q2J@u8KAJm-Jgh_% z-t=*_w@44Ar{7S;7^t6T<74aPfQxs>k)usik3ZHBgB@Hyyt*;;6pvFEpBt3DSpgC+ z7umYiy{Cn+nu?QFt6dZzA*or=TjYT6o!h|7LUWj*b zr0k{3HIN{eo9g$cK_%0Mi9J_xdJFO^-m?w-Tny10H-al03iAN1|uDP_^$tK(BI zzsTPctY=n`G`;*6CA3bYO|(uRZS#cDGbSF6F~_yHhjU-1{A0Rj17*+nZ(i0rt66Yr zxOR;TtTYIn^4{5@GyfA8Yw^Z4?_E!DJREa!Xu}V6(0_hLqfA#cfESdmh zlU8syGVrD*$!(Q>8=Oxu{B}c^;5RRLNiO7Cf0heiY(m zJK4fF%TB^~m#^rsKTtpkjhC<*jwgHWsLvXG4!mEX4njq_P45PJ^;8KFU1JLm{JQ71 z33NW2VZ#G>9oN9*P5W16<$XU6)NRR1j z+)AJ-&m(rx))KksuI9k(!4^Z+vm&=kkGVF9mWC4GtYC0RXOO$Wm8UCED(wv=(AOv8 zd8f1=x_?A%sX*-GW;(t-p*_7G9F$BNk~B|c?gY=2k<{d=WpPIzQp$gIkfzX6@~jMRiIRjBPe4W8h(ih zZC1ajj<8wTM#UR2Igz08T%~FNBzMFyq&7kOEBIk*rEu`IYcA3!QRPl9vP=67+@*GN z#i@y&Er%f>n~*PyP+B}3&>2&k@p76a7tY@2bdI^Hk@?*teD6)Q_wl>ub%;bT0O&BY zeWwNQ5jNH}_i`rWO)2C2kFYwH)(p;h#s!bH!D;vUa+#;E8!~_$dKYc)M+7|o>0E8# zRD#5sA{0I~6dm&_@ckf65E+u9D+7gykiq}rJUIR9GsS;6ACI2_mztgLd zC6y-)Q~oF81->il^|he(yBw5oQ}wy`eWzfGEE7IZysCSwTr3(-(dSdSz{Q+!PcAhf z?D>COUR}*>edjD%?CkTMsXu1ADMdS7oF7_F%;%DkP}Qk-xLIPssgq9)`QN6*-d~%# ze>+OSHz>Gpg66f1cu)L{qqQnJDS5_DdN_Kpl+ub=AUpE~ZDx6UwUT_-k*U&7*gBHp zn#r!gXGRrbGGOZL>OfpIAtj>E8eyY}y7fL{&9c=@lp}_u+tC$KU%K&gTS@H!e(Ny`btnNxChOMl`^&xlDDi z-hgonP;O#C31Hz0btlk)s@Ok+=Vb7rc=Ima4Y)euqPSz;(A_B2;-k7))uosg1 zSiY9M-SKrXQ-_ix;8jhO>I{ZPn-MlOEV|G>JmzTpC9$O6`PK28W+Gb&Dv8lJz6n>7 z^`LwY&Zmamr+s)$DwQH$CWCo{Y?EE`W?E5FkQ_M>cFpqnB6Z*5rt#ZXjmA7*I*0+v zf7&(b8Rxd%FYxk3!P%$9BCV;tN31SSyegf@2Q#cIObM`@z7a(KayF@_Rb+_9Z@)R) zlw)M7+t0@0v&!(m^FxH_UMSb=m1IJO?;i`KkkyH*RrC{^j6yIuC%CA%U->s|l{%?T zpOBJ*c(Q3yuTk(m1@=Rv>ayFhw@Qf5IB+;C3L4|BKR|mUDxWS43(5W>$Fw$VOvZEn z=<;fP%G~SuZaA1I5h;OZcD@h|h}5#USD*;TEqu2vTW71~8MzXst)g^j`H^hcE~9?{ z=D`BM7OeN_f1iEXccda7s|@}T#eTx8X98^kPH0@HNedUnhr~yYC@C+McROuzheaxs zP22vis!L%c&IXjs6a=)zrUunmwYD(}4p;eGUA`o?oKAlc`3LZKTzv}AW?GXU${8hw zA~x4_)$)oA?hX0=55N!6W_H~OWveivG+J8dA1|taOUhp{WnUmHgx5=d?rCNoPUu%F zdlB9e@$UWC{bhBbf)2=3F~a~r$e(5XweCj0!seqzl1+JQNa1aTzfWG-xGInGeu_E$ zks1wxk_2f8L9%19T6Jjq-mIwc`4MO#+ahzVTwlX=3)=9);-{@VNXmv!I=s_}74~?n zIq4uzjfm$TW>Gssww54z)MEGCZ7+U{B6XLEnzWiGe8L_jQ+ZB_#v0_~qRyDxVG!Mx zaz=AA7gAqf&KSbWGfedP$Mr|rZInBvG_%`OW*$cHQ@bTx`Dyk5*H3JSf}rG`h_%qy z!ko4D1*5$lAWP3pBf7uRq;G>HyZvD_SwfNX&wjxlDqcGKhD?GKd_C{CA1V^Vg0pFc zEu7;|iHXnmQVJ)9EaYcSfqAsWZ>M;(Gyyix%IW?vgUC1OHu53pbm>-$gWQxNHFfeS zqfjK~EoQnw2x_NFmvW+iXS!_AnFNl}6P)2v)?2pdgmH%UvTdII( zQL2QG{jW2X?<|?a2fcpWi@UQ|eE}x5X0My_#)c}7wj(d%Rt^#p+Q?75>OPBT$Z6L! z4q(j95^uHhw@$oB9tnv!`-mA#G?jc;ZC^Ztrh^&CnP<~v{wD5gcLdohH@&=%;1&g~ zFbTrf4I_dHDLNE2nqZR*HSaFTz-Hhf_ajfrtuTx;cxO9F`s3eM0R5j_6q39FjX`QW ztp4Yp246b72zI|;ro|K)uzpLuR#dP&p!a#pUCUONDLN-0TjigUROsYZ@`&cw)pz?B?CQ3Qx+xZMep$t~RSYUtW5Ywn3lfFshN$ zVere`S$>BGYcq3?80Dz-y!@3rBqr=1zuUlcE7(#RE{}Atw`;++r4Ed9N&;0QEAV1{6h4r#&XXj_*3mI%ao9SKKt-tX8G}$bwZ}cl? z^3xzrVHyQ(PwpV2Hm8ycv5h;i<>^v$9FTU3+PYdJC3&lxoe^Dn4}rHqq^mGHv>SGn z5#<*C$$RJNS)?d(sOMWioi#NT+<)S$0g#T_@k8@`R0>58MY|$C&){Bf^=GDN;i~PUj zJ!B})|GYimzEi!kCCIo^TqxUgC6gD_*x@1wu0?JKCWW$db%zGs?>rEm-$eU3pa9b; zpXDBL*cd@xU-;~8_tG-2DL6*I)=X9z2?oG}@CxtjD}ft*C9PAxl~LzWl*^OK_MViD z?zsTSCv=!!{`sQ2NAw?3(EQMoKb(N_E znv7r1+3GF#+wYgh+WaES;!tlB!}^Bd0o}4S{c{U_2E&2NmjbyVgX6!r!cuWorny-v2eeBny8?YBmlyLW0u;uTQA3o|_Tac>zT z(fa+!)!u+#od=8q;$}_P_5xlrX=sy<=21znv-#Z?L1mwY;ymiC#^Y53-WTdFE$+^A!WeQUd;prifWS1qCplCb{yVES3*=gHpU@nlk*^nljf*epfT zDn1o31%_EK_S5uOqJl$C+9Pt!IsC$WdlzcVQgYFYcxapRtA74_DyXXs7!rwkZ*H%x zI9I^iXuZT-W@7nGu}$xRqzuL7Gu`FC7a^OCg{|GV2OcZgq4&aAez9SO(B{O>9 z2Ez!h{q?4xuMOvd`SqC^D?Jv5=*O9W7g&Gd3|MJMN{dAW5c_!R?kTO7?{eJEGjKRo z)y#MO(wh<;K?kssXSY{U{Egr=`I!68@KmTAW zLb#GAsK^5)?83RHzUP8UB30>T&x+q)P&k6sPA2l&=%GQA3h(jH&aeFD>LX-;|N zsY+8_&{xD1*0(8f;N0fp?n9(dbUJtH7}!M`Y$)M|t9kf2iUFWiZvKy~Cx*DjhTJ-) zb!8aqS7F-tjQ>1EJdOJ+s!nDJ@_fPFQ%)SSnXJp|c70i^lk%~nZZxt7>2mJY)$B^J zIS{cwMZFgO=x}_ytXjtp7urePQS9 z)M{oChwS^=gAml0ClL>gt|=kvQi>{Z%#0ZJ9PUnl@`*xK2Rz^{LdxR>=$t`Gf!Bi#7JiA8{<(#WC=pgUyFMu*XUv?4@Y zi4~lsNg4gYU1GV!BqkNWgdxz)eMx2jT_t%HlFhc`w!D*+WIDiva*lb3Ej1MImkgt7 z#iXuv)Uk#)C;fi+5Z|za8!1<5;kE5MXjdZdRy17^VEEOS=*jh>#?R)i#cSIw8Y|j)@)CTW z4FrFl9JS?B(xRN$X|N*LE)lmtRqKq0TFEeMl$TXwCK{E*=GBf|IoTN|IxFeVzns31Vkk%|ra8jQ|Dk)HTXYnEY`$vs z)! z@nw587z1kWi^s7`a&r6srTB8-`#agty(i01!Inl_ABxiHsbz9Md)r6+3xy?CiehS= zLc}Ix{3~5(%?H$NFd9^ipBMTSdRYyZB%%$B<4o;mczGg}aWl<12kKj?9)(`Ks0eNM zehUx*7Z{^{eyMkLYO6F+N_}I{a*>7h4;D|p+l}!* zO=s{&_oR)>5MbonJCobrjNwl=#6^{W4bmX3Q-)>=l5Dy-c7X~BZ1)J{n@8SURac79 z+(EpXr|-^WZH^4324CO_jCA|=g6HI61IBfAo6dobE}2C}VaJQF<7?YSFRK`ddYVYEmT;-uOx$$ z#UtX>eEYTHxzAVv(DETL8kJMcTD3N1b0%fe01c;SJXZG~Al0f-BqrC56^z9P0JXme z{k3M0f497)AYupt0tFaw4LA8gLlNa!!b@I2-N{QBEVHOZLhalmP-)cRUVYBVI#%O!n>;^HK8 zfJxm`pyb(g(Z1-(llZL2S9PUj+qZFRPE_O#f5DbD@!nri?E=8hQ)cvqldR;^N}v5q zOTD&y1AViu(QS_FoZ;I`;dD|963gKkC_rIt(*6uWT5QkGiMiNhChS$-tGo+J;D&Km zO0>;!N8p|I0q~UnI0#(NG2Xmc+Gg`b@xt7iz+`%Tw$3P-&-!C}Y!F(L^9K-B6TYXz z2&9Z0G*UQ#@W!7ryUe%fUpy@{nks{T)2klAEGs1|Tw61sM&FFCDY(17ld`EUGZk!p zSEQ!6Cl`b-4al8}`+dSiDqVq`2GaDbsZs%^T;5KM>m#plQn1C!Z53`Rb3W`4%oL0{ z>HuL+ZwdV+^dOg&-UJhVYs5`J|_MsHVR^XEXiUy*!29Da&u&!MWji1HZnVd# zBz`~B_)RRFqH+HG<G*S{MEE z^oONq6C0;J~aDrY_-7Rsgyjx`QF?$YYBro$+kI z$6)FZ+c$S%gG(RekF8ID8G~lVdKa9tC82E-)Qdt1r~y*-hwjW#$Y ziB$Rifx`0-7;t$%R;84vub9G*#XToJ6<~X6$ z+wJE#YBqCGR?YAd!LnvYxVkr8+Nm+sd^-^@me6s($2ex*=&{?aSWp}J^zy?$9TWVY zE6y-m=A3`mk4?2LdPzhXri$%faFfaml#s`duqK9^1CE!c10H=z8xV@lkM zbvQMc2R~~r2W}nH>AqJ`4+m7KL1|1b{EMxt#YP7P{a40SA?w&LHR{eaAkb2$lps*m zby|rMD1n6h5LbzI&PkZBs!GxofLBbthMk{gDcDRVwje&G^fq|mP|=GvMFE(_@4?i& zm-2K8A`!x~tqV3QFM&J!`sbzqeV;RmE}x30n^NB6!P%R(FS0(b)V?39$Hz~%%-CEF zL_Y^(x0GM+i%Uovw?aJT$%qxNTIC+>3Yf|Mo?Jgw71eVUCJsBJ6dfnSbx$c@&?q|d z8(m4>G$Je6b%vq6@4k#%`+NFH`V!~0{nNP<+YX<+XRb2rgJtA_si`=c#Xmv8qamS0 ztYh4i4~(VOh1$gZAPCGZdg*M{$*0AhAn~!!+E1`BT$Ji?R-yfs#O?&Cl9MKLe~_f^ zT$)$oevKvy;P*AMYJPlgr1>@&>m@QIj zV%g~?LgaW@Kg#i4D)2bVz~|F$+iz|L^R4zOwBXM>TG_jhu=@5`#(&!cxJ8eNYioHK zTIUPhnF~3jSLd1^{zK;QnCzR!<1NKYE$v<*ZOk(|Xt#=T`oWmUeTctQHWYtD;9sok zp7yEXO1LkeHLr+G%j>;ZUf)tanCn)a6x>ic{f?_3;}@8QiuR^UH$i1O>vkpCk5f64bY^UH8qozL&|8C zQNoLvWT3%S#nX97&K=~uea`<0t}m>q)!EA;=)#IvLDS-W9gYH9?AxEEO@duSbQl(g z^%GQ6tNfXIDa+bp#EHENu-dlME%V8og4eJ^W3Lpy47s&{Gx8A5TA&S)xi*NS=h}~Z z>p)?`DDNU3L$5w1&HolpsK}dIj~bp9TvBgV-Gz4zPF<_=1od@V4YGQnNt>`fx>Q zA>Qs7osxhe?DQ>rGmGHgl~bQ`#2%Xfv+;8Q>KQ_j=l`@E^HGZYKY{iC)p8^^Kli|} zc#^?ld+B&l{{ER>FRzJ`$Bbc4vK$9Q26I~`-9ZHn27{G&}(@vZTa zI^CxzKCV0bxIeL8Y3s2rKfRe*dJt^u6F24mwo(>fszo0}qE-M4ZWW zd;FQfj@WXgc%UzC!tow1D*5K7Joa%4$?Tc0S5CFJrupo-4!X12x{_cbt zYe=oU1V!u7<^Fn)w(N>X3@54-OYV|dID^(UYwsTvBv7vvrCMMxlLHf#=b8u(^fp`} zi!DI%+Hvsd11jdc>W0b5LY0E=76UQR?%Vcy6Das6AKOufEXU`gWi0vmN0b3+5Dp!5 zUD@_r+e@xlbNd00^+&TeY#F?MD%La+|AsOMZe3O2qMTeyN7%?UJz2$=FUHvYbOt?> zk|>=|IX7mi+&cIOZt;XCi!F+CIXixPs_~rUZcy8(5`YrgD7EaY|2oi>ZFXJ`$h*61)+i>II58FT)b6PsLGyA~T zY%u9L3HJO7k6z0Nd07W`c1vRNm4V=qX;tV08|OquB^v$b(o8!m;B(TztLhVxsUH=?fL_hTzBasp=%-T2Ik4c8O+eNQ2E0Z$V9q=`md#* zq3OlkdcL)kLxp#tkgIVy=+b31j?p(0sK|1x#sa@*7h&!PoOF96jgfES#*9uKPZZ>h8ya{Z{If#orOH#6PL zn0vH^Sn-qjWExw;dn4}uqvmf~to#nK4Vp|XhzaPOkxaX;d*kdm(K z2f>FO1OMQh{X;q>Sg7%KZ@2oxU4CJ8zAVKXy5}*yy{LG0dOyYC=ldf;t>@iPJN5W= z)~7M7ur#R7xop!r?sO7At!6G>DsjD7?zFE`OEBp8Q@Ss3Ja#bTKbdudZY}SG z9H=Y$ktTMpHYkEYkZk9Y?w+fSBSjtrxwzO^)iuknE=o{~^Hj>#4~(fTzJl{8hf8tO z{({Gp&bpe}$(!gv*~mz8n+V6JxCs^y^fm}kWCxVl z9Rqz<{OE_8<6YX;A^;w5tjLXBSQxcDm|hn|)Rrqux7vp3!#<)p;3-S_d&f}57598h z*ECVvPvYI2+#MMfVyjza2veEDUUzi@JTfJ*-rej>o8G&~mGm#Jj#rq>p`5B8I*y9H zMSXR_adKGN@40uLzpME?EVX}JSn?Kn>(E!vv~O&VrBB#DT=fR%K3B>1MEc&yvGnhv zFD$N98#;;d|El@t)B}%$wlu{su{4#Zyd{Ijla5s1R`T!@cL06gez=wb6NqrC{EuQpaYGzm!9uNJiM(>#?C^G_mw})6>^W4Qy}T zR{kADmdbU1eGsVCex@rIiV0TLLPnDW#jXN$BwlZ*O_yY@(A|}gaho^+^!~om%e3uyYz$;19n^i-(+AjE&tTW)zu3s-dZOG zNMER+xbm|Qg|{4XlLiUMr0Mh2jf=avUu_>Xaksog%rU;d{zc;3QQoI<(zUi#Ta_3l z#~_brOc2aA5tXMGt>r8k1n_|ITOWQA+2w=RIeWMEd764Ox|MjFb(hloKKKh#3;t08 z=BXHkuTystp}kLh&Q1NB7@q~7*kJN#^o1(rAgb7BZ7lq|Cy#n#>BYC>)}OJKoIazr zWbCT4hBcZbm~r?p16$UiO-mbmobg2C>A)tdp@S;|ZoAtUeOg8YIU}_Rued#i{C%LE zMn2?;yNWM2NO|R5xO%9dBk)Z-Y&-CO1MBlM)XhQa)tBi`Zu&5HF3=daKDhTV%xmmj zGg+~P(9U!f5ynZMYH{XKyybIVcUQCrS0@UZe%m?mg>&O24*!b5HOyRw`Vo9(8%yq$ zhs_775S+go(>)lJIhN5WC1-1Y>pT1!CJ0QRh)^A4)EcMSr{r#f8Uq+DaWb~^esS*( z8sDB7SA$=RdEp&@x+<}%IA@{42~olXU5yJ+;wC_42rXq4fj7M#tPW z@~dcI+3(N4Fuy@jg9R5bps{bo2JJ_%2+Sg zG2`eQv*BG?WWQmm#~g92%CqOuf4IC)Q@h>Wo3I(-GQP+9m{p^YeMnBu%A`}_t{j^$ z4iWWF%ZD!JsJNXV5aR=~g(sD1?`vAte^^HdQb&CZD2Fp)yzuTf)_-Lev7);&lhTaGR(Hu2Cub>KA|K!{s4y3gmP~#vjPS;aZeGfv|uTQPK{6kR!PE1HeyZk#m0(y0ojeBJS{FcAW zyp?{y8&{OPu=N=_3_vD75^jUzhD7Ek_vLzXl%CtZT2Ms`q}61 zioz>wBp5h8nSG_``pMq!=#Jlc480})AEmtcE}z{0`4_^7=b{Tw%>Z)?cql>4l44{|)6Z?w~o{JA;Z<`ZZ|{gcHN(xZ**r`n%A z4417p9IOP#D`-e5{Zd=~WW#rFKIfjvOiol1ynrN_AgV{>79BXLKY$_h* zjOVt;c1ZkROuc6~n{WKMZPct$RjU*&MN4h9SFN^YQG08}-g_iQQMF5Lq7<#Is4Yfq zN$gd7#oi<#vJk{3<1 z!a6rz1V()Fm@x_X?XPP`7sibZ;4J11H*vY07 z#NV7;$hx~7S68K5<{SM!AKl7i2b{cpT(Z{>Aq^u|7z^B0GU-CoO74S|5)rvVaxS(`QhWYkb)ugwUq0KET*Ri$p_>E(W zGcz}kdTN>kB#eUccuT0GDI?55qdU7tJx`YXfa1|uf!M?EZ@=_a1e3@mV-aE$6qFx~ zo({#b7~6B85B(4~1-s@hQ=ebE$}|hy_36>B7lAFWgAqN8yOgh1e9Oz_|1s(P+N0oO zv3$w+B0i`YH(vZy^tv@q+b&^w1vB|)X9$E#;o@iOG(M@cfA6CGWOusyMu%b;{d~bH~x=%xe{;EeP!Ib=fdW zD_VZv2VRJ3%+W+*VY9d^EYl)9PrP4rQyw*?I`FbNAeE4$-f(*X4caD4v+A$R=H<9S zEK>hIS@U5ewZe(^CWZulN1($tV{2RhzegQ72nl60m@0ANR?l0m<=P8FgWc~m-NZ+) zLTm355IPNVv%1lpBxe>|x30$jiOM-Df#irP;=FK)v~}rhsI85)oX1tP70YVw#>vbE zN%U-858;a4RKc(dRWGg(IoAL5^*53%FHIMAzAoclWg|CHs_N01u@(I6h3IO&=G}ck zqe`l7`0Qpov!2@BAEp)d+s?abs)7jK<1y~jKBnd>m4Dpq=VY-kl*qq_u(Ghua73Q= z5dXo@Bd&smz&>MOOj$q#;o6Q#)u8` z6ZA1Uj52`(m5a+i$Nu^eS?qaB+uH)hG{O#jZl%>pW;}|Xy0_T-FJ=oY5j+@}<`TmHiw`S^fhVd$k!%Be-~-vyow zjm>*4#ZDfKO(hROBFXGI%ReH7cFrI>PE#wSs>*=IfaH{xE z@e5Ny8nQqN!mz-y9p?bO^GYD*ogKcll_MA?1N{<3@+Rm7t9{W(fVZOJRJ{jJrLR2E z_@ z`fqm9f2_UcG;}uy%=T%-sz>o%2jrmSf^wdZ%@)(i8sg`=?YagB-+8k|@A#ASk1H5* zp@6L7G48e10H0nHf4;utr8m#>I45{9gDLr9)2J=?bIpt35BwMrYYYrs9^l~Fq;Yf% zhBIov4>aXDT5MN@vqZZOHYpX@W5-v|n3;|$dGUm|yqzphO93+c%X?3Y5lVA@f7211 zQtBiDQu;-m2PgojxdhpJ*Ijt`$V2Rnx(Sv+WMBI|U!(P^Our&Tk9h(EUENtVnLm>j zq;j@lM5j0RD$KQ0#<1D2!34RJQ;y=m&nM(5hh;9I4XAD?S^OX4r%9lV5J-Pyy)yda zeW>3z7HTx52>znm0^dfc|5T~Th2dhfsmenq6sEg+nzs?C5x;d|p{xg)9DRN3tZPIL zMSWCwAU8H*TPO(@E9oQYXU_){@3EL`$l=pzk2`2qVs>41^^G4V!m(trn(WWS=VA!3?`Ei_b)TM-%eQy~J5+F z#CFW#f$D%|%Q-aRADlL^TZrGE1r)VxaLj8dbmA|0|4nBaN_a(XCA%BwJu^lNjPZ44 zw-*p#Q+oFwjn3~AE{X;1Wa3>phHstqI8<2Vyt=n3kmcpqUe2>EDrO%baadAx2-$#j z%B*$K(BoP^Q|WN0Bey?@)bo11s}Jq}1KJ-otj^nW;}dWIRPX1XH+Q-ANx5s`)A&l^ zHAEdXpnhnB44{J}0$eq4lDFH}SJxuo=SXlYWYZ>%E^#*p`?%1ZSa{;g(f1o}qqf+Y z{COUs{&6Gf_K%MU@iQhu#C%_q!2P-W(D}c1$(8`bD?vvA5yH*ZWe(nlf zU~9|vh*9~f+2;9_J_t;RToI%cu?d#ofZ8pzVIzDyOoXE8GFy40$Za)i4s{)%SWE=*UfKBx#0+{km`;w6vQgr*;x`s(5o(O6@k zva&S`bQphH12!Q_Qp543b)+!E^Gpb+9c;&4>^0*@6&BlVj(TJqqz}%<-9Fr0`YK?f zpz+3dd$=lx=|5ec3(&eOL9?(g;>$e=YinE(OOz>0H^S%;gzRAnQk-E@zc2l7L%cVs z-lO1D?bH4Uy>Djl<-t($)&b&@sTx9nmQ;?+dqmM)EBKs2m>$InVntA!K0CRPe>90py{sv4i~^4~uU3@c-&E)AHs_RR>E5+v1br zTSnht?;Q87{|a2#F1u9bQ!tfd$oEvKAn_Pk+GsQ6Z?hI3DR8-4ry>5Qp9em}8SdV3 zq#_1W>`qh=rHYL^VwqBRUHUJ+-E%-3lUXG-)d;HS2^FzaU|fDBO5C4+pZ33 zL7Dl!HTeJBc5ZHbGLyFD4Bd=1k<`fgyKqXj7rXDCFHxG>K2W$xpfMqL4zrB4*4;47 zgZRXy0W5ppO5Z11wbp$LJ*G>*{;%0S_V^?>VW%TB6FK!|+>?f&K#%s6EJ(3o`0JW;reC(+$uTyWIu43%h zojo}2MyB_oV12_;;Y{~~h2+yu*3LFE2c!x1H~dXt=j2%&N=FYyhjvr~4q`8O?j39o zP}bit@w#M#tNI|Oh66M+)i;IbJ=@XiYqZ^gOv_%oGhbFy+X0VrJa_WN)YMTeDof6d zK8$mFl*maC(RT8j2#SF&T((mk^KV&tT`UF(=`4pSw)*A;tU8aYi`5eUafTTf+?Hjg z&1Uz(lwPNE(Len!9>0m?l(R4IZ0w@e(pE$q5Z^_zD}8g%h)wF6p7d3y!E+JxT)jb* zVCfZ+A9Nf=kEve9Ot;w}Fv0*`#$*+#Y+hHaagIr4)AdQ-aX<0I9TwO|$v^xC=B#7h zRJP_8(+7$d!bT9U3Dc60=uD1l>eI7q-j5O@50e9`MrL;Oq8dj`r{Mo7?e?qUrYEC4 z|0(U)r`QU75ZTlJRob6qcZo>_&vLwU!&85-b!*pltRpVPiOgPmzU~&x-KIYJto;#p z-(}I^)p6#x*2Y%pr;DYqm}m+z>&V1jacjf+#A23F%vy|(*_GC#c5>NfFlNo?`^{Fe zFdC7HC4Qwpy(c=bZN=n++7AC5hkjRT5B7b?!k2j-HP+{Up+bX#sbsVL7nTV{729^4 zRD6l_bBgU1FfOp({)2|m5-PMVh09y&4v`PGmi}K%KbL=<$nP*?a8dBftQf%ya*1nD zDClGdm0p;?l2Km@7t^0QUK)vF!c5cVV`aM@y%_nv^P4TwvUB@#zJAG(JLk!UvW2VQe&|nAa5cs*zM&V9*+lDFs)?YvN!$DGcrclfVBw{hMizhiyi?O5}p!5Lg zUTiN={w$4|*j!7>Rid?eW>P|xHRtr?v6r@bEy{Q#Z#2M>ojQ`K=(u$aHsFHcwE*4B zaExjItE>sV{}pJBR!yJwuDJVX0~-XkhvtQFz^rY}I&{=9T1N3}&ge?fe0sbz;65~0 zK494u!Isj)M1KP~?8$^YUgkFm<8&Hr5qS!lb)T*O57cwGa{EFuFB>p(c=6FeQ*Hv_ zv#xD|;Tx5YaOO>!R%Q`i>UqitRJPPstME1(Tvn7ukIk z$8)u{TKD>iXz}C+yTqS0`8-dKLt=SuNBtN(1aIuhdb1D=E1U3;6EEOQPwrh;c@BKT z#JNTDlqh$_wxFg9qKxWliBG%w7#aJ2^!Bk)paQx+j{vY)fB!b5qdpn@)lZ=x;Vl0C z_1FD^74bAtbO`B0>^{xY7p-CNwr3Kb?!K+}`sJX2->1o6eAEP@WEQ@lA#=kDrRbFT z|K(-<5|je$D&cNeWQpKX&j&f-njJ{cKGHdgE_Xrn}iZMLVQmJQMRzh zmy-wgtv>Ae+_`tldE|1+W_qR3(}OBC?4oH$y}@*o0gHTNmOj(jU@UYIFVciNIf8fJ;oBSCds%}_A zBisNjbsBl+E-@A7Lr-t<{N0N6hhSOB`8Uf_`EYQoV#ndo|KNF9A-DSI=r#_R$am(( zz&p#BX9KrV`#~4=Nr$@r>=|#^OvKxv`~Q^EGY2}h6DRt@_DW^Jl?f*oQ)(LBtyh*f zkC>VBB|g5}nv8JfOka(8=n-;7N26=gN3i(eIhG)NJB7%n0A)a~k>(7UgG#D%2E!D8 zw~&MQ{6T1(+y9~S{L9%9Imb7*!+UZdMk5?el&nxB0X8O#Z22fM4pLP*Px(l=9?pXpO8WW{YTHZ#%Hw$p^q>)_@7SmDjx{pccOaCS2zH~YFYB1VX zHTSlA!qE;loXk5s4h84@A3i@dA0P5^8%6MiivSh$p$=dJ()!axP*XGGQ0Su7XLy+m zX3}kY(=9S9bi+F*U%(soW2#BhOHU8i{ zwp#SjYTZs zbu9;1h+OsNdE%l7)LEf}QaYaFI+$z7)O zBX&SYi1cyV5U~kRqe@wr=B<)-5KwEHWS~pmr~e|YZ(Lk{;V?EOlh%2L1)9mY4AZ?u z^@+q+Ed*1I-hbC=@%3anPK%{X7DwY*itYK5=O5)1ilVB*B<7zvztaLj0m36ylI#TY zXS;+p-mQ>vJZ~rn#BF!gZ3;tI`t8m3vf8G}v>oERTE#>=XEO|Sfk6n&JX|d{H*z0A z5cQZs5e5N-?#?Tbbfa@R`%wl}kXjc2eBAio9Qh8-Y;V-SO5(G0F;L>I(jFNGr;R-= z20%Z(OMaMQ!z`l~4uj60ntYTBsNpI8;OQ;A>M3Ew{L+s3#GlA2Lu2@6;H=|2eh$M|&nbQW`M-o*?UKX7$= zn6Ug)E8`8WHw?uTze<+>iuUs_zjlX(*dz6`n_AvNnXyf{9J*=uZad)H!+mx25*ilo z(o+!YuVXLT5bO(|7eSF4=xfC?+3})qO-T z+*?*#c(neZJF4_q-t!hdS2l#fev~yi>jlfj>T&??7SE?OW*(e2?G!AkhrhkE+Ct^G zJCkoked8~e-n()XELv2&`bP@mt1RJyj4q1iG$7l_$r0pVXHCx`Z4H3+5M?Uv+!=fH zGDJtm#o?jBjj>kmyp@lska9A}xrG;mc2i1J+z_wX&5pZbh3NmU%dRU#g>wuiUdg=c z{D`Zu}Tq)B2ZjHE7vwQq&!Dd2@-jVt~l#RwF z_~FQ;O{c~-A%Tn-phvakyAb}C&R2Q_dj|z$d*403Aq(|ouO?RKL2rTWM>OB=Ut1Ny z*H1)6suTCYK10%Ft{>^&0j4a{lWM86n0P;I6%tU(z{%UpuzotdUEC z{A5@v5G*Gn(7e7tfGx*8fQplOPM$uLw3}O{+nAHZX0EO@!j+PYA9WQ@s!^RQx*I=w zQBE-(Z6|BBCys=_oqNXUsD-Ex6TY{yt5W2Xc3K$vhM`#z-i=hl(Cj6`(2)nEVtR{F1U}*fel56Y@0qQ|zop8reJbfZX@A zzs>*Pi!(5MOExo$@wUmG>EsWu$X)lyNNwhJMgC-r=Ea!Y`}gGEWwM=>{2i5KHlZ^9 zvZFW;_Xe6IzkW5DQtlLJ9FL=KnR@35g%j|W&VN|!v}v>?ye|J72#cmEr~;Ti8tZzd zVEo?j%>eRfM0nTW27y1Q0b#ckvHRAbOD6pKtTnzKbh=-IljGO}T_W%UHISKWA&D|1 z)9nX=*m(}T78rEB4-TBdfIoCzBAan|R4fF4iNtR}@Yf-U1p%Q55S|u~Ehf8yV2}{> zr6m6H)|kk}Ux7i`ssa4PzJ~-p7hn2re-vLl0HL4B#P3i;rA%PsX$2evw~9dII@`iI zfiyk-^nQX;Q22fT6{$?UMrNOz8RzFIY#2fP$im=8I{(G)@@HS6gn+SKt+NpC1or%c zE|SV4(Yt49yO(64KR)c>H@YdUai8B$VZN49^xa4}5Iszmn%tDa9qs-}6ji_+pj5hg zVnKhEy}unlUEy_JQ~W)ftonx*KgrI1Y3tswZ;d^7J;Or&&0G8EQjr^7s!D4 zHy~t0DN$RQ15Nw)-p}Qwj=ai;u;%%h&D%UUw^JRUoM|w*w766^Xv`$)U9m{x{=k)+ zYYpqJ&kPKe(}e7{Kloqt)^pU!K!S=%FwK)>IIXzW0m6fMrV=-ybjsa z2HbbZ9}^D^y(aDFPPw7lHw}T{k7ZR>@3dH5Nw2gat@cgf!7~fTH+Zm5=b9nrZ=?Ez za%ENYOhJaqQ3%R=-ImrwM`oQcCt3bNwOxd5(t`$L{uj?dwnBw&;sLS+>|g`_5#gv~ zJa|IW)8q?~Q%P(6wxiu#KcAbZsO&2Zk;>~c!*%{p!*cYZ{go1+W zhf6QJ-AAL7&yoHEDN^=UlDtJEYihc}bM5|f>;?GJn!u^8vVMwD20|klKq}dH?c&a{ zRbGBkfQ~#@!cJjbgU5lSm=Bm;y8X5DZ(7d7{AsrH9JnGuB1OR?RQm-YL(W2k1dpRA_r^X!z^2>L?zy6k8zi>2gsE zjJ#lx-QrV?sLil+S@`I=vY3^0Mch87Wjs%9&LXRN;;rshK$;Qkag|;1%-bOYyWSVh z9Inlip|8G5`Ut1j&eNmkK8K6&TGkSC`CHm;91!&{o}T{g8hn@D4kae@FqnQ^{`iU$ z<1-!?dqchcw^z42x9`^i_zo-n>ICWq_YVif-%d%5oJL48K#lhjqi=+k(Lsu@(>^H} zGnk3G@}yuJn?JpRv7gJfql2IKa{7DbNNZu$~QPYH(_EiyVRP}0=si*abs6xKfhwNTCB79=06XAet8?%RYq_PsaLAMK!R zhI=oJ16}~we6G9~L<)4}ua!q4$QIMUEOVVn9%e{4GIz+i=EyfN`p&Z6NvU`5(~&Jn zpli!|X|hlg2z|F*Q^xeD4y~|eb_HeqsnkAjwb!S~-7M5Mkatc!c-cJAGy>-x3usKS ze`L9Stp8=DarYtHekq$!E^ERALtO)VezQ>N(pdLF9Xo;=b) z=SNX(wG4{7K{&)L2NN*~CwbL`Yek>w%bUgP>;x~m-YpWvkXwoK1!MhQX_hBfj1>Nj z%Ao#KkK~n)Vme{WG*rF)(R!bnN+_38e5np26NpJBAeQP|V72o%2Sy%Lvou2i(Dk0D zi>X!P-$YCL zpmGheeDg$JQNS_dY2NVpdAUBt7pt236`sXV&rS3w*T0ZDy6wO3V?L_K3oj;w%G9KE z+jl;h{2;N(Re8>!c17%rJE^p)A4*RCa14*AthBQ$;%?{`9gd5mcsUdsP-#SAoOjy% zam~YvUjRkK=ghE^l<>h!A}TU8in&PTS%KFEHv~U3ahT1jmo6IdW!SjN!j{?y6xhoY z{GNdPD+t=R6a)0U!JYIPoEf7L^;sMO)o2EFz`FLjMZ0O9&xHL+|LM;yz0lt`xf{8U zvSdB~F?m=i1XrzSp<;SKq&K3=j^1$k{g7$?m9q_!ux+!dGV@@dR{yz}38Gno@%9Vc zeYtY(v$!4nw#JN9S1t%w>?jAp?#U~Dea+cNH<*QOP2RtgnS7b;g(6+W;M!~!Co`;O8~5Osf*Rpj z->k93W@KG_^#IoDxAjHZ$+V03uB~of1tJgT7$ZXIh!D)fr#k--)2OKb~d3LN1v}zSB*9?>rFKSoga4dMn&B zXI=lU{ zJojvEen$Hfb1TlkXo(-OUdmd;rGs5Nvd(gS0?==)8y^Zh|N1BlvXL=RaP?!$t4uK{ z=yrQ`fVDqt1PR^EeKaTe2sM#!Z6eru-7ykv-0p_%dVk!7thKniep-39jNFiP4F+ys z5TO)GgWAJA4)OrX)^k|D$ql-5h*9ZK>m&buK9GMH_wKzP3eKV67M2E zH&hW!%vW}yF3A{~#_Ag=KC41KB%y`T(r|I=Z<6&WO7eeNx)mDbIW0X-)*bCcM;$Pd z7E5652iA^4$uQ3X3CI{))XII4J3Hu&AT%|Lk@eoaI672%W8h2AP}3R`msT_JYBoKqq(Nk>A9IO))U zCdBOpgx)%@T}o1X0oon0?g6KGSRZxqCVC8n@+!DzLP5?vyylfGlHJBf*T2pBfwv3k zbP^(g`Ube!!1y1`akyZ9#ylbuBwOBJ9_ct~lEFcbtZrr>T3dJ?6CcHU1j;e5P$$!3 zsE-wpGIJrL7+|u>$x|q@4QVUEQ4V^G8Sz!esA6tvFb1LRVQ6NYzc&-EC0<47bKdDV zT<8(=nKo@9I8lxzPa31=D+lIH>T0j3cn7^(Aw6=)l%2GrLC-kO(CD2ZxJ)~q-|{7{ zp9_8R>sLoK14d`fw>w}_sgj&8v!aET_Y?rXVuig+jqO>X-xh(;~odn zWNSjG?8!LCgV#otzK>h43$vjgx&^;>pRe|6gg(u+G|S`+KZ{X0&nxaTOlqM6rWJ-C zo=>(ui9a#4^yz6E9!q@)@!oJO=u1r|Uu~v5e4ubPnOvu)7)5V0*gX6VBf>h$v380Q zD`?FNSTM-Y00(zkWksU!!?9$E6|U)U%6c} zUqPK-`YsNPe*^nuTrJ8l>tA_SMLf1S*zXL!v1K2Ug`c`Ha$3rXT)z03z0|(BKKEVn zo@;waoKUuVQ3AJ}W_;R9JJvKoa)MhX$(dLN@cEQhZz>&Sx;ZoDtgavVq|3#^AmVf` z*U&@Xlx)$kqmY+XK=Ld;#G5SV>u%1*5^K)tJA7r?p(oTujea;feNVBrC>`yIoB5#B7g^Wx zZ>s541C`bmQ9}|tsJ;d1J&M(_NRruX-r9*uw$_olNZ+&*1Jy^D-x{vdlx4Ov$&oo~nhqex?0^X4mOFXEn1{+anq$uV}-thGWf=Y_v~8{Nhj zFSCm94`~v+G8sP4>1^jaCqqRxbcWM!?f74Grsl0!tbTaLdPjUZ|9CYqjAoi__~c^V z^X(EXKC}mJFL!u=5YUx6XF(xSZm6q0!Wlr-lYUKBiIU{)Z$h^X0@Iq=#1sX4R|NcD zeQ%R!igjDpTUs*bycSLzMEhA!t~}AhPl?s7{U%N5bPti+a7{5oe8D9By3>#$;INv7 z%$>8`ruEp&E^nD*b;pFJ8yKA3bTT7rL`G{Q88APy(oVSSlLp8@zM~WGZL2qQG45fCZL8l%S(TDHQYdHy z9$9WM_iT%K(R4VMcipy1=gux-LvPMD>pL0nm0k~0L2FT97u4n@hoxZ05m%N_zwwqu z<*_deLB}VI5S6r9iqA7O9Uv^%KJp6C5*N$NDa??yS|ix$nh=aY*COfh`{sT*BYNUQw&}b52I@!SA%_8lz#pSd z8j8$d`d-OT?0z%e&lOP8IUb+9Mwr~qmi^{AUU)z*keQk_ke^Qz0SqO-Ubg=6S8|Kw z%^D^CmkyjE*r|VH6ES*Jzg&uCMaQWyKH@ZH%og(yvt(8%oe)BNMpCw(j38p)zx8XD zwzN61v>uMv}1M(l#!Noq~@?$psO^EV+#D%uE75%<5G7U^mz1LTx_g2~N=dhOy=+G}Rwfd@pl( z!9G1;QCaL8!b4z%QCJ&GJB>NJhy4*TnY5le5_O8Mhs+%luOqWCYkl}f_|Z!;)T>w_ zqx_V+9n%U`rb^cvUk50**`}(;$$R%?z<0G>1RDpCs$1H?iIy;!hlpRb?}yQ#AlJ=j zVClCIyJ3T#-d7b)7&ZyQ+f!!h-y~{(Aex0+BcmQa8(eF>?$k2M`3;ln%0;s}mtM0C zc9{$71%(HSc{?6IH>heD2YiC^m$5NeYqOHQzL_GwZ|nNZ9hkLhEzjL2eK(7PyI9S< zom*+yxje_dsak(hVy5Rz76?Z#B^%DX|6_zuJgqgzKq#D%jEKR5*cIxoFY!N~!Pmaz z3R()b?uCuSDq=ggif2P@T4=x(KW7{rtSdr~_g}(U_A5V$lpm-5Dl={62xLBSSxooI z{5)!z`SVvMz2QUS9U~{Fv+Sq=?Rko(X=5;ilCQU#!IjN=N&SQNQ5=0~L=C`+mGH>A zp!TmT5mZQ^K04RoY2C`a)2sH-kMD>R63t*5YiXB&N66Z(gAI<7{O+Q4a`!RcwxU`Xvj;QN8ubRe23H>q+E`7&G1eW0?^Q;SKT4r*p(Xb*a; z9{}!>Ow-t-j>cc~2|3|{sjs$sQEWpTz)i?5{Yk*ux$6YaVS6R55<)1=#dOy&DyGk5 z0I-BuCw>sMe~0~c$dZwjnv!a6CGB5xD^1~ z{`Gb`w_H1z-<<}54@C-Kv^Q$NNG!@ZCk=3Vd!$d=to85qV*?L!1>dcZjAVPvRB)5xxBX>8 z`p!tWv(mM2>OVE|M224c+G+QId}z=hMY1V-&>0#v??juYax{L~*ksk%epH12-F~@I z(~G|!6!IL?Bt%*7U=4Yvt@AmOm)+a^--LRTnb(Mr4uFH@ZjpqTT$%ta%P+~d)rZ?mhK+7~k zF}>o?sxXSFw8fg~dl|*z{0eFn_p(P2DBH(Ki^3vL3a~lDe38Lpk_g#Yb{z>qvYE^N zca7264s{wp=x!9&Jnu!~3N`L9Ik5W-GZ0#XBkR=hxjk#Lm{-BGuPp;W)t{@asqU{5QAW+0f;cT(unk4ZT??7K1)X;^WR91@ z3XzIJ6rTkmp3k zzw1kL|E4IE)qpT%ySJtH@gl?*w=P23xm)_#`TB#du){)<+|JxqYMDj2WZK<~*aF{z z&N%PLMJ`RC$Jnvuh4m%e9Ao|P8I;SprC#JWqNPg-WpyC0bRZJDmsou5q{|w8%0Xlk zHHoc24a0|+1z)=gAe@|xRYgf=hjMSg4yN)oFMWVjgj0M4)rld=KMbDd*xmjM`mInj z2y&jx7?OTJOZi*=~ zM6_d$kC?Q-eu_+l8$Z*&)dIhOSE9w`rWM(uOAxFV0ti~$u=>sBbs~QQ0$BK*e+%pl z963AE?PS5rU0Pu)pIo|BM}17#=!V$?1J&^cwKrXsG39+9Jrl2Ez4qf5_6RvmON2RU zq&jxe;O$qc8mB!{4ck)#4cKn-stW@2cukjo0(x0$J?BNa-PmU;8O^X)=edrDWI|G~O`#|%7S zjE~%yx6Ggo4>gqtDcC7uD({WKBRaJFxZ8rU6v{n_dDTuB61_OdB33h`a`c#iavKsc zx(aQ42>(X~8KAaG@VhDD(Bg!vi;}=;DsPnND+jAGHycn5svnx0i@(naf|jM&)>Ni) zl&9%_dz*A}OmvDwa&aoX@KNC;7KY0qWiuh@Pmm_IkR1xuWiETf3*#5JWzGFeNz1eA ztus~yt7)!j2jbAS@$u><3h(3yCB>=nsRU`wetzxzQwO1)fQH}B&}B)OhFTV>`8*HZ zq~7M6FVZiL&pn%i;yHg-p?_ryu~D7nYJ7CG3jRdLjV1}`NUZ$u*UOMFL&0!Cx;hsf z3#eQK{nQN}^OA2qw8Pq|kUy8J+Mgi^luX7&iZ z4ru1=lUP9`Tgc!WgQ-r-<b?IdDqy3IcBD~jb`F2I#=AenK>tZlI@^*`@Z=iB&bA6CHopVcwsr1#n zu%%D+un;;*UX;Q(=Q(Hqt(d4=)TO{>ky4OSbv}|m1 zqm5JJ>i(AwcOu!^0^EfZn=0M+@2J$NYEww7@_BlVH&X=fBt?mt0GR0T7xQ+=4wj=H zGd2kf4O*4IC~1}iUbSvjafI}qmBw2`LJt;bxfG|2hmJBO1^->gjQ#>xnv&SVQ5OYA z-Nt@KWd84PpEPRkFgvklaf-WK|8n^-(Akrft75`PRv7lg&0^2_ex%eFU*l_)l)y{c zItodOYMq4~<7;D*biq|0#!Yg(O@UW2)B4BX)P&!FT#zg;3Ib5owuy&7s z{z(qDe5MMeeJ6Q~U&sg6{$r?MUo2Ug%!3>B5uGs!j^LQ~rqaF%weauN$i!b4WWHcJ zZm)ILCbo<|=J=7=!Z22Qq-YriAu%S)I(fblx{$8X;fs}y@77p^o$G%M{nzh?gjJ|jl!`g`t$w_k7pfrZ2FoRZtTH=``< z3us+K{J%-uv(C&7bJ<|)7pSL69-W)W49N}cC~NuKgW}%Nlk)6OoX+2~Re5m$vsi7bo4rmX7k_DYr`XhQ+<#`Sfx#lq|Wrq-IY-| zLI2d8c5~AA9y(&~BUo4HhZTlmO;V=C=|n1kJ{eE^wE z=x1fhkKb*QCPbURzwMi=>Tzz7CgFL`ZZgd*dwOM?Dwk)wh4jK4@?upmCv9Ojo)*8) zdq?&aQ8`=MD8Tcuvy@$bi6;;0VXv+UX9$G{i|XIMXy~bAr_(PE8STh>o}T+^)Znyd z^L@8-+D#;#MSM{NRSyr5jKlUTDiOA>cgj6(ePPe{YvF~eQVv`Y@-NzdHwuLK^{=fw zapm6`gb{-rBm#hRhCVw-h4Trc;Rj-8tkAC-z%zYiJ8_Mj4*@t&5)bt&d&euyJSVS5 zMIlvrqp7yu`F(jHsOE{Gu^Srt8)H6%KU+ntL9`Tj6K+0iC%+B4yuno|DB86c;3kf7 zhtih8upl4b+hRM6$na3QCYVk(uDN=A`^lhH%CCVsF|E!QcUV6WJX3_}-}Y0zZB}yX zz{zq`q=fG+{=nZQauX}0q~bqG5Iu|7WE634?qVp`&SBHqe!|z+*K5IcpHa|ZoxHzZ zU>p_xfRI+?_c)QjPuq0XHT~`m?kA{rh>Ab0%R#aE#07|(!P2EadCq3C z{k5`N4K@zBt=p&B>@Y=~yKllN^!?vFZH{^GBi8)ZU|QNP;eAb(Iaqbz)m~iBt2cJ# zENL42U4Q-#Z1H#va7!=*aF_Ox-uXmMa8~%*g7^t6Mn_0)gS0+Z+{>;d$TO8OSluNY zU>y6}Yv`;p7Kk_t0AJCMDBtXmv7N=z;gXRZ&git{b|d_(TEoS&S#8yuR{B@f=1Uz( z3r^KrGdKqL%gIo&`HuJ9i@>E1cSJM$2an&&|M<=l@DZ75>w^l<>ZUCf3Hw9w#A7Cl zfiPE`N9Cbj$TV|hWErLrA&z_!B&I9;BF(NqlREokocia@ELE}5F=Br;XT6NT9IAPc zZudUr2EoMtoupc8vsj@&WLo^s7C;FY`ufU?1KUki|GGy~WXIq)%$sLhVNfP3ztrD(4WR+JB_G$~z9=66Fhh z{>OVZ0%fDBqxRS9fg?BTo!p`&c&$vRMJk72N0v|B$1KUdK6!+r$kXR@1y5~mFvC)5 zyL$es13x*MF@s{D;2_4%VQ1@vWa}HmNW}{F%a&UvrrUcqX*Yr==*+lihC1Z-h)ncz zOG$bf2VbF85it^tbEm_pAzlK(`Q&UTl&5Ec&o-=`6CO|H)}x=y!ME{w&Ewx%IdjqG zgqACkD~!-a3}Wvf*#E4xmJ2evUIe*_C&&`-|E@*=*z+1s^ud$UM{H+g*Sc+AOi<{< zuh3*Z6>3VRZCK)IG#Q#)tA(W8TRr)2t`a;dXzVqFw)ST9aL4*?y|qG9YE_Od+N(@H z44o7kRs$s1lsPwU3K$I76<6MUamvqeeb^N%3*uxNy~t*#Oh5Gb~ZF z_(lYfKMT`kLrY~vp~FC)YS<*N62EGfhonwY?H7}F<3&`*<-9z8wI|;UNM;_Q`zrYF zO9FHt3%6zW7eXd)-dLwiRdNMQQ=IR3)IQZQTck(&e?PVyh>-gz#%Lqyn z`BA#X)|qKmc#^+5QGb02_k|9rZPj8Hd3_Ck3+%Jk%RFpK>IlN!cyL;p|J{ax?~C-6 zrvy#*yhtajvz%uvDR}KQ(SUgdq8uBAtxC@3a)~-}04cxl)?+L3A{c&&6IZj6ozlwS z7PqW1jhR9kz{Hh%s0jY$4$Pi8$aal)@Ur}sTWVj+xqbJ;XQK8CaQW{c7xOkvfJLcl ziMY>055GO9hEc@-(3Hy-`)V-WBh!!bBc@;RuyB$5&6U|ai_WL9xknXklp1!XDUhgo zWouSbL|-c{7I}TDxn?dpulxT1gFt-0=E%-Wn+6M1PG(P^lZc4aWwn0&uCKq9n82AxmFNhr3Jm4qtByAkD%|`4vTW*FF@IM-M#aXj?#gdK{DnXM2g@gKxEKT$=zCddOj=r!A@@(-I5O+Z z2~vL?H0w7z-D1$W1e?}_l;Sx&@wzTgeINkl?g?Pka(a4% zlQYMfsbN|S>@0aRe-#ZPalt(w#G27H@xP7z>F@k^_+;OGH{E-^zwG*tEw6aX&*Qtr z->N<-`9=fMm$l;Bb)&3_+IcRcX5bPysY;mprzm&{BBmXw(&8bEY zLO)2{&-q=K`Vu0{(^3RJv$A@lJ`fn*Rm^Iml)q6XIbpG;SB9`gvfk!2JbKAv%Y;yIBjPS|pd3*gzr?kmA`iC>&CiyHbBe(JmRD$EH6CohnJI&ICFB`y(Pbj@n663NAVM< z-({Hy_lxS7slZhY?(`@3dyw}9913a?5nRfU^!D%hx8!9AAjab?`E(+bmTJYucoSI%P9T8yAh&1@AN0( z&Er?}xepoc=H>d^b}p}a`!6kjbkXlD{2%;OhZ0a$5`&aW>ct$Hl))hs9IDwSU#oFp zNq;l1p%q_Xm&i@6uuM~rjD$I3(}GV6rES@@V}`YU){n8r zd)}Ze`Pq*YpZw9IdxXXsst;?`QM!E}5zq~w7)MmEy)nf7geAlCYXgTWO<7m3uXhPs8`6nf5>VsYm?e{)>NW<#~8Zw?ie9&pB?rYGjB3M(+>*F9U<^uQp-``$=4__0*% ztgF_Yui~<2=G?}K&79lAdy9t?opfW>`R$}Z%)G|bcS#J2Gigx6 zg%vmCsipc;yo?%H!p8kAM#aq8X|)3JRWR4&wf?CAw3K$#pu ztT~J`zp&zPZI-o_lsM-j4;aEPJN7lJD&|?sw{C@+hxa*$IlW3P4A-dHQ#PF4-^*|M z7{1TrxAcF(pLz5-$gy7q?%lLMek}cYr#@ZZxqjp4c3KN`rN`2tg2)#YrK3_bRuK4j zE(qKxUp7UixQSOiPzKi(&Pt)w$r^0F7EDs_^Hdk{hR-7IrP4F|9@=zIKF@57vJy`3%|n3 zSzLej6%v#eRjRL;sY>~^z(z6!)LLb(3Y+x{?0AZ=G5Tyna*|tAFekqdWgKaICOK}R zOPC264=`WcIHX@IUTjN@?;peiOAm9b7evyCAy{!HCjt^ukO~)gtR0R$;-^H;52tA( z%lZpz{R-L+7dmAAXQWJ$59>C4 z9{GLW1-j}9@=1RiEuj0yt;K5A`5V&xQBlNjL?DuL$&r3=Wvs2M8ijOtE!|(y-nE>L z|El)~F8F8quX^L(+xfF!x@Gz2ulrlewYOfe;+*q~OAQ*Vs2cBs?H+FR0rsA#CJ|u@OzGTcwS#j(7A+l3ZF`zNph0M)MI1Ys0MsB(g*(y z#5D=WB?l7>VZ}~6v*3d`Vo(xIj1Jfdf~$C>MF1%1{M_aP~c>7G7E9$70(neXZ)P20@@kl z7dP$X^~wZkzjorMcMNj*Rt_(a1QWY4X7zM6XTNYU)~H}b5h~7E7lHG~TmVf4L;;e5 zZFxnRi+bkvsTY<_)`|}hb4togm-`o=V-#JWym2Vc#FeGuYr_zS^#Y!}pn#QTVtLV` zW*rh89&`SFK|z4Hogh%zI=nfF&Bpn)GeO5v1Wh_V%)|)SEADati+*g$8$b3xm)r5V zjk8a9wEl@}BHQiqKzv91*~dQ`-zW0gh5u5J;r>^Th5)#&ggEv?GsJ2=Tvew#(BW82 z<$FV0c(gLfuJYTf>J_DZNHwqHnUI#`(1xOOIB?QeQ3ENi0*Rn_3?xI3K2yfv_xL9) zf^|+?qgDm-W<;=`R|SZ?!mF8`&pzQRj`1pif;pToHE_-wY?V});1N-LidG<39vfR3 zgCD-Ar91g|EI;_vA6w3U;L}aCm)C1=y?Xia-~2oH>A1g-%J8`k{2EG&J?UzeoR|;ICHqRb$)tOiy@uQSQtyB zWksLhfgPZt8Rrm}9K1wr-6P7swI}LdVrx$QzNZ{7R~}Ss zI>R>NIv&2&-$F!E%8NkQnU5H9=6+YqUCSlcegK~sfA{jRlOB(svvogN_vUigeUDu3 zbHG8%hxGXnMmT@xtyFUvxqrf+dKBG27m)iX*kJk(Sm?WR`37)LhmF4Js3gzKR;p8+ z`=R`GCjIE-rWml?PjQn%AF%0IoM>EYv-wZE!Am%e3r$*cbAa`81Eti-IXD?|Qmcwmgy>aHT{FOx2nan!Y4JY0z< zz#QiEjls|gO#D|zq7&vpU#V>rt<@G06&OQ@c7^C_zUwXqu%Rl*@7cTF=^6i4{?d6#d`EGn= z7ypMv=!ZVab>EzdHV-eXw=9*Pm9Nf62gusY$H;K6I#BUsQ}-&D&jgzgR6KF>V2tXg zv4xJFOyR>l9RNcTZ~ao*XW}wo<8Qm1p6d*O=-+|AfjnjUB>D;j^t5-fQX&;(!(Lek zoxn*n&5YMUD1_Kll{F8BhplRn?z@&B`?i-Z_doK~ z!1p-+?)!gZ`IoQ#N6YPB{!&QAL=~=#FVe1Ay^-dkousZY@HasfUJYn-L5gWi;3vj7 z#4;Se{6NAw5o}7KUb?p&KnHfN1@vy~%#c##SHz%=n zRZlZUYLPZwHXOj*f&cRM9hbggIqaaL@cEI4C~i*|=0E0sCoX^b$=_!dd{_rl@WXu` zYuTsj+&^n^FR&(PHOxs9J;oJag{9Z86$WbSOT=K_JhNh2iBuj%mNrE7{q8gl zj5zgB_#s`CbYK`i)^mRKqw@Q%$cB+vlM}e=|Oys709_5uOXwGx~;H_7cukY$gqh-G+&v`>YKhx%s_!69cfezRA^wEhqZXs~EI&H(3YaK%sbtfTW& z#2_eEq~{wFT*_}9VuVfl6H@}3K??`P#tiASY$PtH@^Djo=9ux?iw+lJ3a8%0Sw^`e zntF}@kMH=o<+UIF&qVIkc-TqbvK)5^e+Cy-I9d8X$#!pq8aTq|dtM+PAO%%^-#KKK zrF51P6Fhcc<$FB&>53+E6YAGl#jEyu{S+^`fT)j=nJmUBmw0W{68SC5j>J1RB4JjY zDXCQz5%&0cIBW` z>yjotgo=l{l%Ez4?q?fg6E8}|BtMDG{ETzIV-**^?dI2g;ML2^-~3W(QNDo4m5(Z1 zIkB$tU6(fYNGtZ-u@fAg?U!LHU@Ykm77IBXPYvl0F+9T_#9`3SRq{sxeRF=oi6t&; zVg{#k2BY}OPdtaTRBefq4}P`FC|sjoQL<8GxH*dpuy9v|u++`HYyn_D@-us#1-0>G zBG9r3jD#P9YXc=lOKG8XiYK-QLombxE$$z7(uJVtD*|Px+%#3V7`&s?@vPE3%nv(V z8>8cqhgjiYOn&J?Om|EIOeIO&6F=vqsq|}B4D&;exNy%d-24u@{`V`G5GZnRTyMxb%mrHq6hDIlpvM zJ3kVOnTlzV{@!TUSeuWTSp8q<;`dr_Y04 z)`+X>z_hCAkwr#x;#?|S(Oe3|Yq|MAZ@V9sBJ(!%Pa zHl5JD)sfDhc*gPE9FL(FMQgN;N<3}R)WAviVIrPOa&I$hS7<_(?w^++fc}-C@W$(DErNFTT!#U;8 z;esatX8xkzGtUY;NvbGL_^6%EE4dgWgJQ78tY?wQJEB%izz`)cmy+?#lQt#_kha>E zjUI>I#W#@L96$-^0HzkF77QRUhvd3Ii7R&aiIDKZ+M=M2cd^Ftmf$|KkP?|l_@I__ z%x?vmS%!;Lvz=z0zjV+w^s@rzR|q!uKk_s!_jGyH+ke?V;S({`%n>fm-1y*2Bk~hP zU+c$)dzD#E$Iw3YLO5IsBmEUi0>+11JqM>6m4?+mn9ff-2OZ^1ekvGT)?=ZhW^#OB za4r4X@j{~5d8QtQJBi|lM|mo}xDu0%_PFHUuzsV&H3`U)-que&lu`ZIl0TXYLMX@R zh0FCh<789}7)9zn&Hby^6svnN^GUyA?aTc`LgmCvj*^Q|?_nWL<>#!OVvMabivXb& zpq2W8m^r^{P}6(@$xnOmBfkk*(L-lTt`UGDtK9m{WC_;RP(!}a(>PfyrD@fgjB90X%8EN9>J6y)&nVp35Z&sd6LUbFhmL`|TrIH`wQ zrtwGw9d=4Hb1MJ1q1HD;;xk(-iSKuxz55>p-*f3(BZ2C*mMqqxnqjk;p4E6Dqq@qK zbV6M!7A%nA_?5HJED&*SfUbs0f%eQF;>xeQ)FdCn9WT+mF!z*S>8Jd3igT4e9G>Gc zk(pDk@>YHpxz0~rm7Y{sC%^kgE*srvDoJj!C7EU*+najHV@~oAFye!Dui*;;X6Xl1 zKF(ES=Pw$WlcVTK!;g7%{G2}=mDnk4> zeXKe8DNWQNN=#?~6nEv1Xi);gFmYD=2=~gl#(kUo62Jo!fweJZ(wLPN0-VKVFvglb z^yzmJu*bnK|Mh)o9`dS1W zXZ8FLico%tj`{IRNUX!f+4azr#EBUM$|1iow^93W=JBOy&3q+@3*_>@FuGS9+@2Pp8bh%mAKIY90Z7t*`hMvT_FcQnli*>`GheY_QA$-^x9((rb zlZ1-Nnu6m)SA7#l?0{-t$(e_S3SI@ulZxUp0vRS{moT%;| z7bxfs{Vk*<>~{Lh=dbtuCqgi3P62gasLG;;EBf6$(pydJ9c{8H{~AYHVg;-G6Ie?} zV41v`;b1HW%C}8Z$xS$3?=( zCWB`#EqFF!?N=0w;TAYnxDhUgg;cz5Mr7AhL3~A+fE1xtEX|E!4i3eZhx)Z=dUDOf zIaQ;V%g$S_ngP39^E=hWt2Ap~_HOQ<^-yO{&<=$F{*37{eU#v|Qa3N1b5(KN@~jucg!r|2&=H1c<0Hsh&*F3lvw#u&#j7%Mt-rB5qd^qh5ILq@zz z0P=T|us4BXy(OHy!eF7n{P;y(^~;1Kzo?6dQDF^(3M2@g{dVkc+C5zTe}zytfzB`H z(q=m{A#d`h$1+a*RROYt0Rh2r?-8X$5`)Vyc~krJCT{1)jyf2`Wr)3%9ZQYG*H)U` zTh=k<4}dd+`2k+(rws<@*$@j{F#82SLyS&FCb2Ye_ANdJ%!y~G)jaZTA6}y93}T9} zRwz1dBDMC7&9$4wxJsf09nMcZ^B~_ATS0c-{OK9G%k>2hdWL+Wwty}{{K&!MQU3^< zC1`I0u_h`Aw~e3sHwe^U#_5o4*A*Ae&Tm@vpGyhFjGn0W+H*bR7ney0E&U?k1w%N=E* ztUb6gR#UUq{U_1^oAYD6AwMKcU-u6~$;{#NPzH-B8~z4jB1X|xK*X^T4Cg%Yz~Ui) z_>C$o(kPH|Br*@9n4(9yxmOM^kcz=R2qzvO$gmoL!ND{Ao*FxoU_-i*tH@o9Gs~izt}ep^_hFFY4g%C zz{;*`nYfm{NEI{U6K+`bxu6NPe$!Zgq6W~kwA;o#KRkl3U33nB9O|DctT0T3CckQJ zPG*|!CD$Uw< z!4I%bX0|N*&HT=!w4#_fjV(%fjSH8x^87h`o%5#`oK?^aQ-o`!Iq}9qJoQVs=aiEy zj1X-c+|a31zW>DqBF~BtXF>??S`=&y{&(o*xexu`3fTR0?QQ;-hvaatnSme5P&0Xe zB48TIFVnC~JsiaPJ{~^xMDeMvssV&3(Ib|>YD25{@&iQ|Ik>C_Xipo47@6NF^8`mt z1XV!dv!{!I=q7f)=fo)jI*8?0VO62BD3O$br;yxcUE~aQk;Nl_3az=gn1_ipX<0Wd zO!AS7es)hb#AEK9j1fO^A|RF43?YXbQO%JzHgD9BZR}-QuDa<{S?+c|{g|_szxwFE zk=|8v5kg%YnYpZ*W$yY|m_feQjl_-m;8{FVJoiMB)4g!76jX8UL-m9CMZ)YumNbpQpi222*F1v;@y=mR#DmOf{gBDtT1Vi>*)4NT(W;sG-52){ z@zsdIBk3SZ}BbOnuA+oV7JLxSYD&Qi*}$pw=zv0)~M@-kkyn%Bu#guIMr3^$rZ zz+6};rDt+!D`>4a&9wmPa5|WGS)OtBcj3np?cIMpz4O*95kX4!&D5FQ3%OmhaK<_Y z8JQfTu_be5i@(6Z8oK@SuM(?Hr4ZQXlOD5JMVD^ov`rJ1Lriihw|Lzv-YevL53 z-=l)%mvR$s+;B98n``xD3Q+r|eU{9xwaz0}5+t*EEg$}ZRQsm$qhnTp9ZaTSpQ*~h zC7lM0DNj3sH^OnHV9w9z5sn?4@=tln?>aYvSJ|0s_%~_Y&*BBHkQ@2Q(JZ|ZRze3q zOXbeyvUda)T95l%_esJ#JL9V_#1ER%r3~USPD?Wvc-5DhqsI!(Ao%bU|?$x-q-arT* zGLaP`_~J_9RbPr#8e2HABoI6oL>6igE814#{akL8uXth7C&pJ$I7#IDm8%+^-}_+K z=hxZif)(KK$tDAM;iK_Qi}o@PdoJ2Im8x{a7wP~gh#}V z8^m>{!uTF#8pex5`mKZVv@JgR%~@FK^IgW;#G#lvn?WF%)JAl$(capmq4Xk_;T(+9 z%nOXsDJ{ZPO$8*sG6`GpwPs6TDNb#K6=!MyPq>AS03n@U84zv;T!9IP$x*4!%6Q^L zlSS1;e)WX$Q+_d9Z1K)KL{39_VA1dBAgk3`7w{Bdsp6#fxI<1_{_!(^3I7S)y(hxm zFR#7zs@5c=IGNWPna|Rh0Y-iJ+~=MoR!7~zTzCSu4V7mFaTuwJt;VPF%6X)p*;B!i1U`e`k!{EWiB{z2cxI02H9kpM45 zD3fgTc$bN2CL?iq6wkyOR8>QkvfJyc5G7bpU>xs4X>)9?nzG`tPk7Q+#Auv|N#qzY zVg1=J-n!iU`5WWi<9x)yM=dY=o?lxYaMT%%RqtAW&FPbTQ)fD8wdiB%IwRBZ01w#} zIUCjupl3@wqbh__8-OKq$R>4!EydBgN7fBY zCe;sK=glH&%}e#lFMWk60c)gL09Ih_*Z{>V3*u-`{=TuCN)YCo3B2Rgk)F(6sq895 z!cBrJLft=PEm6te@rmuAu`nk2tv$U~{}>x$tgSckdwBjqZb`WMw*yP3Hq<|{QhVp? z0;(eDSmg(g_QW;kh!3w^xirC!<%Gi?u>8cgzI-|2pkq?d9_O1sf5Y;{J3r5+3~Y1S z9R1Ee;UKb0s%Z5%@eTDN_hk}Q7qYFAM;1ll($bWS2K}Dmgw($C(^Teo`}{jHiy(aO zQNF|11c^8AtUf==1Of+DHHV_>gCQZ&46t3*v1HcPOl$|SY71`|1{4nwbNnhiN*G%o zBY$|<07faHxYTTKwKt*+-|hSd`FvC(~8= z3*v^B=w$pcOKlN?@X*uSQ6GXYU7k>U@U3W!}^zt+9$WKhwteI=~OFb?M zN!8j|#aG1hL5wf<73va@0 zaY{hY$nVy|#^2-*$0Q{A?I*@HnJ)#9!olRsAfMvt61f(>Z>WQHRGj%bG7Gnw;k1wg zzr*R-oaPa#V`nVa#1P@UT9DKqrEikoSOB!{1{Fv=h|{mGkLWR^5nH*-JbENdHOg?2Kh593+Ix2o` z91^n-ubhaR((jCQ6{mn&DNfBIvFub)Y^74o2up=UC#J|x`%k1tCg=V4J7D?2r~Mzxzl2GlH7cO+$def@`@8Bg32@YlvoBbY@(aoEEKqqPGCc=6}n3dLW^6P zD?bVS{5a{zZjDk{aXb3?AVFJYFy#PD?>`DqVKLL*ohOAs%X{roV3K}<(T z5fE2D%7j)pQ8|mJUl@`qfxNMDQyiqI02-N~yGM+F(bZzL!KFmd&LxHEBk{zw5c-IN zA#qqZhF2S{pEp~Zv?jvr#_1{$tjv$~-TfylQBIdfoO0gstAG2o%L^X&ckrgNf6G)F z2x$F`fdJKlrTRq>bVR>tqcsmJNNh3zH{@0S#?2`{5foV5f{yT7xi6$6*U0ZaJ3czq z?4oS-%HO-@uuwz-ulq;AuFBUYvQh#ux_<1-L;`v!Z_*DF%+ybNJnXUSFMdqSPy0N| zD-%83o2%kGx#N|;2}oK&w1t4n>LGiU99c=HjP~e?ScYdv`&6iSsDB0#9CAkEH{}wO z{NbC0inim7aK3?d{6x<>IzL4W0K$1=h_$s9$GIZDxk+9uNiyXy6C=QmeTMY?J(A zhfUuH0@rGYn3f`Rf!>ptV$4^GX7UnDfN&Dz{1QuXu$jQ|Q@9$-JwJ4b6SL#Tyt+YE zQSmhav1tYA=%{$=qShK;%|Br!HZ#8hjHj^3chuK)g%uD)7ZN+YEXouB_9H)4 z*I6*0AC`qhEsMZNnH6I%0y;ktuukzt*Og8T@gQdOq>@Ho5i}S0XsU2A4545n7k;@W zVi}-Rt4qfNub3iDFQf|*KI=+YRFXsje6)V%q)EDSf02Me_C<#DIh+CfEsbX#`|#yo ze#fuj%XEJjUukt87{9i`A77;Ui-!4`d$w7$9Igjq)C!66xvW)~g>K5`5Y)@wun*nW z_^KC5P&V}pYqDz1i#Fw9O!;ya>L2-|fC7pjT6_-HF9z|Ni}~d*FnP+;wMaju0DFK0 z0Njl$k=#2G6;ed0U;qbf`r8xeB^Db(7}H$D(Wa?zv|&gX9RiTcE!Yxou=A6f`Se$7 zYT|G6>YsUtO*Uzfp)N)$NEmBX1b9FFnagMJ*R($KxU-jEdeN^h@4f8p%PZgdZL*m~VUZZywBHB;eHPsdH>L^n2@gUTrIY_P%fCUXVa_n+8|MbF19E=eQ<6lWAjEdO@sQlb!_W zpZMsejJX$#5q{{)-9r}#^XHyZJdGRqm6&#pIX{62QunoV?Sx5+;O?bu@kbA<9r?o* z$()B6GxYN;jp0UB8~KUmXuNjF3!r+eJucK=d8mb(HJ1`ehAXvZhTB8^+(>|)fBJa- z%2nZK;J>N;?#KMl@`#hqr+c4`d=i~%loQOUdG-3mjFJ=_gz5E%cotyIEQ4GEW3IAj z97C9~P2SAA%AYn-M~2!YVX)L$_=M0`>WCCAfT`RuGoD(#U!R5gFE23_m{k)n`YwD9 z1WYU8!8=3yC{nHvxMNBa;l7F3Xryf%MX*L1wPM-g)qyTA-Q9TXX&2zX%DrIu(~tkj^2)dU zo8^<&;Xk@@5vrd`q$M?{F7!$-ZtOXIOv3(AH72cGe)obs;~Q(vqW}F8@eHU;Jfeyr zsg};cT+AW-m{NBG_XKkaF)I(4I94PZ zMDh@65~m|Nyo78Xq_J!q0*NV3mc%XN<-*GB-qAx0b6$xbX>!J~XOUo^j`JV*^yLX> zK5co&C*HXH_MiRQa^dCgfR?C1DhN=`oN2CJaMq23IjXU6A;2}1tBP5@iKsT79~*!V z!Wix)R^BVWJ#d;Ud39qk79Y&lBO7s1ze8Do@&T=xeB`eDj>jIOQgNldZC)pDW`+-( z&x{t?5t95;87z!-msNG0^#HklLIVP;Xw4}s4C!&F6q$gP-0XXKBK$`(qX3js6D0r3u7cu=ONfm`JFkyz1?U2`Q+p-qReFq~e;m z5MiUE2w$+K5uVM+sPN9OXe3C}m0q9(VN(ROC@mJr&v+%NR?>ep{u%3@cmrV#JOI9t z@Z<6uKC%4vg}=W1;YGiVKMCQRnK&q5H$yipJ0={o^EXSx2P;`OdxJ=&5;)?qRON#% z8K&4t6Z7gOs(GV*)x{CZYIwdqDpBjFpBlo4Jt4FukFjP5&NI2qZcG(JD3-d2%=u|z zb&8P=$MY&a&H|MH;o;?$bi9^0<&%*YYG4j9V@eK2&@9fv#^-zr5g_`LpS5opuqChY zj!!)fVVt#WiKhqpiOcM@_r#NHSMl_x0?bfhF;EX2{T5Y3CjWHMaJ)N-vk{J)?=^Uav)*gZGIO z$q_pDpEiCl*73~)3&+5DZ(Q96jOA~HikA>>Hbz^34WDKN>->S2d`cM&*kCE80Oq-5 zu*yUNeGY`p@NZ_~Z-d2z!2bLM49Z1V5n8wyf~9SSr7mu|0Dl^j+&=)s^! zZAX5Y>q6eHqOi{4_~aKK^0Q!ul&p>XwDT*m{FNC@aZip19DU~UBhUOlm+yb_KV1Ic z!@sq>?gOu0uDH=Z5m@TkKMKYu$yj;M*?p1ZwBP=^iAHb8fFTk#fE&zQVy&KdD?48f zIdo=RRMWJ_+B68jP`p;Y2r38DP(L@m;r(yLj-$=AsWEn^6NL_nd&eg~nMYrwMmdzJ zXdF87Q|SD+GUZ1=IQ4mqNU&(vS){_LSb#Lk(ecQ{c9IrnQA8z;9>&l4jli`zD^?ut zF@2*>@`BJ!BdYsCj3rlID^X6Mgw4f#qc5t9`lg;%C5lge#{1mo@?DoOy5}6pUIRhc7twUa)GMGz;Q<4S;{+`DX;(T@NzP|e18(%QGyewB+gnCCNZD{X_IDorb&e}8AarQN+kj~ z-l{E_>`W)O@)HAFiQNbn)mA` zzu&zNS)Tux7wh8_*M4OAqd))MHi&iuviZ1n! zb`S13bUs@9h7gCES^gO_=iFF}2Uu#M&dE&%>+e4mF@gExH0)$g55lLLz46wf-cc;`okBE_p~U zkYKg4pQ1!C&}@`gnOA=EFiy}xV7KhVn5&}2@5CcYjY&La(kDN$d;7g0SKfHp@`|_o zBK{Wflgn|3oj5Um^~*Dld&t6%mpt_!FCV!4UCSSTh$Sf6jq+tP@U(zR5!%n>-!Hm~~S6L+kw6%c0Ah z#d(Y&*BHq^XOMKL?6gxz?lLmCtaTn6kqNyWi%Xsz z%Y6?xczOKkPuH8qIVV1LuU_QIy0_zo&t9|qr+2(``Tq0&$#Ueu$L=wVp97KNNN)WI zMS(P@SoT4CnWknCQ^U3Tp7T9P$J`U|*}r+Pm$n7VF2X2#^>+^HH8U2LEB>ZP8Nt9= zSR1P%nBX8cMQ{lLWa6Gk4n>sRU;*XpUWmZ)cmv6ez(%nN-jhkxNkqG7eCeB~@D|4) zC-gmxb>g+4q{`t0wAzN1e3_N3imOh`#Nw1g)Xkr~tsfmFH%IfD02bDWYXN-dd;iUZ zeA9U6C*HKYA20NO>xqAVdH!Qxyc~e@GjW=TC3uFIL)%U&19J4E-HTFWyvp!v)0o~v~NB%W#K zGcWo+AMTq=&h4LnO2?^+OVx;XI)5gS8#9;klN?*Sw82ut?h&suhlf4Es~+9FdgfZP zDq4bR+-_921f3fWJ8fiSd=D7=pOs+tq1fN~g+TnX?}D3J`1}$gPHdz|YGVrVS^r|+N=l;lqOa5TtuF~o; zVCZ$ds2h?D-rS5P<^p?YUwo!@-EW2XG# zz$JxpMm)X>t8ytW52!NOvPv?*jg{6%};}_#pJd<+}plRgSS7?+i ze&qw-h-Z{Jxgsu_*dUq2?F5Q5NV=TfT8O>#D|al{-E#GE;t{7Pc6XN(4}XB(K*)LN zbst^+{;$7C-}CW(_{8{wPk89=LceO?gN{E3|C!Y}%Zo4gd-~hRdoFvc{<-UeSH5St z;j`C?w_A!uwI3CuXWZepZ@TJPW7s&$ay-yHRN-OQx&YO``TV$chk*smx!Te>!$*Ja z17)3ZCBONd8dnkb*#fkg-x%aJ0cOWYS?O0^nO#`0tscI$%{VhrDRmpe`~b5gX`$CR zHGoDURcAQ}5r`6{pV76}g8t}F5O~PM;W#6m9>EylntM9iN~*9V3lEv1rC6~yXH8uj zgXvS<%!!Y~?t9eoQ2dkBxhFk#dDJNv?A1RXk>_g}pS=Df%d6k}@0JgK`kic@2DO}U z*q)zRg7pa^!h8)og2X0w$O?(8?z~A{dTKYqT*f=Me=>9Ctu$MhRN zIf1cussg<=XW`FW7NdIQ0m%tfER&<&*k41VW1KQ4ICGnVgr(mz;EJLZ9Q z+ymF64>@jm_QSqQAEdwXrpuNOT=DMZgZSWl^A~O$^{amXsR6SXHKPnVfWlPo3a|4| zScRy4JO?$76D-y9?tHND2`UbH6^&g1WXCCqf?nE1r+GRo^;@}!g*QyTM-^2u^qVI> zKa;R&kx0XR|Be`kR(vytAF)-p5wK$~Mi%PdFqW4NE|U`LLT^9DvpBcjG)_AF)S2QQU0-?ArORvH_bbbL zFMm_Fu@ja}ebA35#;* zycp8-761RT_vS(0UuSvW*V?RIvKDJIUSzNVFJUuDsaXtDCy<5^5;LtcKp=)RNttPA zGHL#32d2$r(q%d=)1*+s6fjA^lPM(u44A|Yn=!>pYzJc*Z?a@rl4V(&CE1dCeXi@i z&pF@U^S*k21{p&}=Xsv*Ip$>mr{XM_;eU{%t|MdnAqtDeF@AoXWD|Nv14W#o8 zVlr1uKSuD5q`8c5?wc2jevy!^gf+4aDZ1;;vt@1~SKdu4VNKBGWk0*w`E=Dj&*615 z2t9^hL5KOqT+X2CjUR2VdHZ%VKPK!N zbB^7e^nZ5%z8$)Ek_j5%1c2ccP;F7v{r z2*$i8@Y$kEKj5SUDR3>Z;dGewm)Dc=^p70ZMOG3uw=K$G!WD3>M3PWt+JTdpx^MBW zt&Vf>q>nNpmbe=G-WI2P18NkP1h#&X)SP};6~yB~Sm_Oow)_4da1{&KnM{Motg)O6jkPat{~-i&T=c^b&k z002M$Nkl>!9xW6H}gXZk0dOoSMI* zssNXfOO&nrLw^PI<8;K(2mrfe%~ew7s=xa&zVl1LhXpq1D#j0@22nzT7iTrNTNjke!Wr>`!9dp#Oo; zpC032iVW6YdG{~sC#inHU()(ZFZqt`Wnb|1e);##`Ap8w*6*6|@r_^nO4Ho;(7oI5 ze)#R%@9K5Ae&^P=`SrQ(*7tBS`awhgWLa9X_O60+#%$K}uX`ft2LKBQElaPGKQQY| za54@|vDG{ZoF?V7u)6EA{2k7jB=YCydX#=7rnm zUj2fN@AZ8)2Hzb2jrYE8`?}(ceB4uy#BB+vud6Y4ByPz7cvp z%xV62=@a+g|Cj&u_JgngAGbgM#eaGG`WJumcICxq{&B-Yyg$gE-#PhG{d5-}J#Nl>21sN|!M@-r>!uJ54C-6e9znW?YUMVe~Z#L24noU?jQ zMjYt?en5f0wARVP*WI7M-M7_S@aE`C$Mrrl;H9hdqpg?L)ua(Q(l8^mtKv(oto7^Z`<~~p zHYeEMtLs9V2BeiA&DLe1%@x$u>JJEE|8 zpzgA?ak!wHKld3_6{*Qe71;M8gey)fV-8^9En2{x2nHou@$~^9%Oxky@Uynd#0mCN z`FxIuW#+fQD=)gnPZZD8m!A5`;u%l7@iX!xwW;9Kzkc$OPj0{cfj4ab=68Q)d&|wg zzCHTbBWb_uN@Y106{~X@Pnfu9ir)EY#bbW|19cz1x4(qtR6xerrv2d|mcgwcOYxdC z?X`n*$>$ql9O?Avo(F@cc@oK~CD-W5jsS5MsjMs+?m?JI)qC=rKQ~BXlQi#GGk^_N z)3dQHy3*t1hrftR%kKNrZ4+3&gavkk_Y9W5spf?)G;0cnBo93N`_UTbPXd%!)GM!> z;UxgNqmOLA`QF#-;|=+^l0{@{Nsciwy3_}7ZhcNCtp9(}HJ^CwF6fqvy)m^VOT zbAtV^GsoGKb0deIc4zxRw9fV_sBP@~zC#B2oP~^asSQ(%p3H^Ls%dCZ+(=i(s@o3~ zU^JXpL{Jge{40R5%nhwYmAmeFYz8PT!sLGwS6qCJzE|-~{}j@-mp#KXKas@x{Btj~ zhChmz-_uW1y4i%aqy;p1UuWE98kvD2mkU5$C?mYZS@4p9PuiCl9hoL2qP=`n+Uw z*A`Q%6_W>A<+lh+R18D5{12;dj^?MykkmkvmV>{{eqbA(0I-NCU28t^21uN7{9jWU zWoq31$uMrPyY9PF56r8I>Gabxp7!kRJ-7dk>nEyf-szV;^y!o1ut6^!_dfXX?Wgq$ zy+8ftAJ>EabGEP2H{QSc`G02ng6sdpz7HgR76(t3H|mv&`8XU8f9m0lmxs7TPb~Z{ z&7JpsWc#Q-?$8qpYyRi;(5HB!<%DXeet)9OvB*{Tm>T2b<3i&!(kr&f=i2>qfcLp? z*89>KNvW{8F?av3gtLImzzWse_k|O2EqW4{dk#NZq8~=5lTZ#Ku$dtnUE%M0&iM%% z0L1emgIDSag8xbIQ+NC;`zr0PzW6%dD?CYjw*L1z3ORDYx!iR7JGM8z?{)fy`0KXY z@42NL_@9 z0sOgm^a5i`@j@}h-DdIu)c-){1JD1>S40<=IVs)nBnpoTfskP*Jm!GN-#$R?3LFmS zn_nJolH5cbLb(xHVa-px&F(M|6|5vbiO4=Q$(MsyRPl!=x{{q3?2c9bE&5mXF;5`Z zKkZq50vR}F1UQV^!%r7PrK+UJwaSMo+v(B-^51h z6EXOo_PcI-i@zcMoA3MA`n@TA8FVz;-4;zABR*e0iO@fKx}F*5gE6YQ&)&&??C*el z#IJ#+Y~u%vli7dbl-=i*8NKjD6cTcl{dom2$2bSwb^gnobfYAD`oV;iqu!`NZAZ$MpomZw}x4;5~lX z`%gUlfd41M%xi@4Quusi{fXLtV*8Z7t_}t-uE#42Jf`^(eQo^_{i5h2`T+qxvGKKb zW_>|Xf7ki!?d)^-JN9SmRS2{`TOWF98vX|NS!cL-ay{UmYuAKFjheYS21{()3$1LYmQ|($j21Dck?bsYGHObo^^#@js9xrM_^5v=7yy*u#O=*1O@f6i6U+rIp{dNR>pJAB~{U%EZ@+&}o= zh+ZUq)|Y(p=3nD{JURYocYq(dc-L)j-QM!SU(=JuuWdJf?DtLx_MNWBR_3~^*({E} z*E`t{MJ_#T;Bi(!dZ74wtlD3D^zVS&eCo$PIvD0yHp?dv)5_KSeD=v>w3H96mEQ^V z7&bl`*fr|2*W8cC|KtyH_60Oc(JQXuQ|J6-BBP5dKIQT*lW{F*6cB*Q@J6xC z>lyXfy-a2)_fAYmDY(Uu*p2a&e7xf#~+zdHml;zd^rDI0je|FKi$UHtU>$Ub%my zgkH8Bib2W<0+0i*aNqw?{mAW|H~C9l=bUw}zWM&b?Zuz-<=cy&@n!yU*F{gg*c?w@ zp6tLA-GK)lzE4jWZ`=O;t-rnTdQ>;v{&xTGZsp0no%nPVgrG9=Fj-FLdaqf5o5f5?6XOTfa3rpir1FQ~4xJeu%h8m7xUF7j}voqTbf%(;h{ucnj4N&>u zOyfS^;o|3sgWVHwS2keowvNjAOCFoL4CR9?CrVNe7?Q}_hs7`UjIef)DS<`+NtyU(A+Kh6dcjraIB}E6&e!^R)!v)*2de(X|M#QTajCvp`+WVD@#pI&yPkjTi~QrlS3LRUp>E@o zb9LbE2R`bblzPwY@6->6yhH1EZudQO_sPe<{yZ?!9bCIPrPe&|^O(jL{><6u^TN!7KemoVv5dA?f7hh3?&_#UxxClX zpFe?fY>dXVNe-})z)}MAC9Fv_KOvFGaH!@q62GlBPbB9l7NW)rkphcoZ6p}U(PH68 zyoGDdlkbvmAe0nG2&~Yy=I_~3t5Jn(lh$ad^r5eL@qJ7$@$t|nA3Wpt7S7Q(RBzC~ z?r*;91ICOFs!M%YRjqKA-TdtwHsVJ4uEND7sruxN%$ymfU{Z@?doE0(8GJMPx8BbS zJ^o4z%HzjD{bRXT>qm3-@tmulw>|TU8#iita{Pzg0shx|%iZtyD-V3|qwn#|%cAom z*W6S6c%lor#d2ewc>KFfCqIA8xcxoPJ=^0{kAK@BRN%3X|4-*NuuLy(KW-=qF?2X)_V;6S+w~hiRs`b}QVxCPSV9hrsoEo&k%k%`Hw)>(GUfuR{ zuQ~Id+~gbSH|u{a=m@e|0xrmve-f~=f?=BUs(t)(fH1`cc=RMm`}~HZbI}qVJAMi- zB7XUgpO|^md;jgwz~4c6=9M@43FTS(X(@isg%@bNK{fIVhEI+^ULD{Y+#l9Ucks)f zJYjtBuJ`EgXWXP$TX>&-0_{`t@x!ge824E%WXk=M$0Po}Pkmc151`$BroeibIlr<^ zvAZnyBl(>(7tKGl)iaA6*q)-l2zuO?XL%J~s_HMxB>kYEqTy3aYwpi4a-~0Unjbt5 zVDvWj7+ss}-7rl0*zprrV}{YY`Pkm|rDBbf2H4`hjg)xNkEWHM;Gxm|CMILYkF`LZ zUrCKghU-<#If)G!@J;A^V^a`~NSZ%sH$E_9jeq8YYs&-*NbOx_skVvuB66w&V(TSj zE`nLHt+l(-KJzNZkT?O*NHdbxb2}J(`scb`-Rm5Tmrmm~jUNbY` z>vhWSyvzwBHuVI#>o>y4Z#>Vw30Z|-MCnIJFh*hbQgM7zd9Pl!gpV*fOh4v&)^^pU z*KVKlw7lZ)4SHhX`#(?D@2XsV$#wd@hD*)!c#jJ~&7@9y>5 zV+TJsW57urHZ3N9Yiz%M2jtk-z|s@Q$;W?0Lhm7>e*E`u!9(7D>t|ODNR4?cHpF<8 zb_3*ae@&IxwGZn$(ABs!Bz>@gw)41c99WCLMUuz+!Q;Q{ik8ZEx`Ju22GL5 zkITkjenk^IzXxL85|dOgJ!UV4%68Q(J3*^xjQfHC9q9=%;P?q71x}B} zaY}k$3n8Vk90Q112&{Y?B8Pm&`m8qqa1>aIZ((pCw33&6$n5iM0L6)z&|~;P1$)Ui z=9*qW$9^My{YfnM1UaKFU?lHbjDy;m?U+=vZ}+Vju7NUvGcPcnLP z(aUo1%XR!#Hm?VE@l!9;2fwAwFWz1DALN(J_>vO8k$<0F`{_RYawPwI$o?MxHh%w4 z;*NWHIhfo03zGD(&(HUI4BQAAZa4Q?&Y1T5Mrgmb{rKZ-FcZS)b>AnA$!+7$(wdFu zj70mEf|D&_2IHQ{zOTN!MvQp-z8RPu2^RXJ2X)zZGI6PG_^88feuQ`VQThpaV#Sy8 z7(+)kE�NPmg7gu@tDbReyTjYNJVD10Y(bVtjH&@Z_)k;p4w?;*NsJZ_x`G`}86) zC4+j>q^EB%g;>VYZ5?|KzY9h<6!r-)YUal8;MgBUXx4K7U9KTfyK z)3Gy|n3ba>HvQrY#}HN!^ZwQA2Iwq#>6c@M{}N#eHV0zY`@h*zh!AavI+i`mFQ@x% zyz*27*3UnV0ci9<33_+`;BQdPqbCd5nI8Y7yXU6pe)l=p3$owXF&gvq`Yo}7HCfM2 zROu(Kz6X-%fV0jG!k#rV0A21}SI;Vj%r$1>xTyk5A5$pNj76FW{i&#o`Lwk1g9Hnn zBsU}59_^d|*DX#27=Xff*Tp9g%+AMr>>rl8=CT_EWYAeXTaEqe`JhycOVga?zU+JA zWTMkB!{=sC{#X;}A1Vlc3#hc^{JLjUEH3v?ISMpZY3g1^zI?)VyuWcs&+zD@0#~fv z4QT**>jq*@^#~s6#tRq5fF|y-jeo7*b>AJ^M>k$ZUe*P`utu6@XBS3vHWLg&a6l!U zW=db>RGrQ;kye^}Nvg40_=1}%=}?^PyGPbfvVmFDvbkU-c@y7vG?N44nBj;&`c>d-OVMG{8uR)xz8<1^PM4H+xh1D{1I=eGSm3 zZyIAw{o0zdf#IK8e5PsO)Sy-O0h(~8H@zmfU$HvKPoH>d0M#(pr)O0Y95*rK~9A|law3GsE-N}X0XNf4n z$wKK6)~IPjn-wznWus}lc^nEDIl4It9fr=o{hr(OuhB=6`#9GN&%4O4m`X_WbF*nv z_^iRrHAY{niR9#lnE4LZg^&uviKSgx8@TQ1eeh@%jL71rH|tp0@q{709OqF9t$!i_T1mt-0LwZyL5waQ$PPX1@5i!l*VXIL5qo)<%T4nE%{A9%@1%sc!LG0DB-~YIc zFJF~_f#8Kj4^$ZXtn5Z`C17_iYmdG)-h0F8LQKWEH|;E=UDXU~c8$jC`-cRL1t+=+ zw49U_6|r~!7ulodyL^SO_q>Au>yN^3`p{He_HmI{rn5pltXVh%e+tSWW=*0G9*AlJ zKrPxP@{Q38SPJVfzcIq>YiCiAxrnim3!l(3H}P3Jnw5BX6Z zE{9fFpBpo8a18{o`olV-)^XshSjBIxO2gMknMx(;z`zn7 zrb<@;tIE;rUPwS{B9;Vz_G9Fuv-p8Yp7Q;+eR@BEC5n+ROzM6D%1gGN3bAK&r-m{MNB`CPVM9%c~ z2Ap6m&G%VozwndFN6{zUoAN03EZ|)bi)FaDK_0OaJ2yd#^MMwVZIH}hZIo{r*h6B@ z0@(2vr*GPXrVHRzuFW)|+O+2vfcYy~>quLZvB^3L2oe!)tY+57ei@5zq?Z&B^r3us zv-BRsD99CK*Uh}HD`8C5zw{G?U>5=T#oM#P<(_Ox+HLmyEjBuq2S%$_?wSsNYbKXB zvcgZWJ}fU;*0TUOKLNR7a8BlV{!7xTVBw!wxi8%~6Q+d?-S;Gj*h-*-sKuPjviSa& zvIPdKW4X9uV-!{Z2juL9gN@gBfMFD0>DJP_wwbsw1(Do6e*+LULJJ`A11-HB`h^33 zxyLz1s;*Uk``rg24*ELhhA%AKU?yw?V|D%D#(D7hqlGvimwEo|0bThq=^N8B-%Q`Y+lW?l&rc|SAnSwK*&hXI?M9}FbUypN=hp*qI1*t?nR>4voH`BxpF@S zDg5QY?K+w*v-#UZC?nKa>)Kq*>v{y#0vPbt^ic98EP3<5*{3yN`$Cc!*0Z2qBO*iw zr$sKH8kSAj!ilQJYQCYe;>|Bw`;Es&A5`RE@z>mquGGXSiZSrfrG4~+p`xTpZyvLoLbK@wby) zYd0zg#>Jf+RvEJw)DwQ9M!q4Uze)>dLAR>JM-L>{Uoouv(;?XAG)Ky^)Y_wf(Rl46 zhZY!1i}t4qCwS?89Pp!ude-H#c31AWC$y2vA7&W5cHzzI=rBU}-7xaL0Fo~;GU zWKvuxuxJf$`_jL}rhj;lGJCPpT>M5smU)QnwZY3CCJ?(sr?=UmRakK*d(UtE@$auKpRCk4qPmSC3Z<|$T45|5zx{hhdTU6@ZO=tx4zaf*dpW5?1pY05bFEfZ0y{xpfi{k%QCx z2R$*+sZ>O3fGt`BlcAoo}b@#%v zgNRGMug)<J#NpDVr6GvWg z;3u}AG>wt^@mC_A#&Aa6<7Kp5u#oK;RK}989e%JIml<1+UQYh@XfCK@Uxn8{!n>!p zfb^;y5j!-%PtRJ@OwWq`T7QXgEd6kTVhph44}d-i0>Y9Matf1P!0(yW95Z{yHV$+8 z72fF28B^YVG%m3@2j*To7JiFkR{)LUSUrJsQs5eNPsN>JAlC><`JvTx(EUh|VqkUqL71H|;9BcDL7*UP$Ff%z7I;tx-b z!1bc155yvKa5k5H)fs!Phj!zcXFV5UXHS51UeAk8mj3AaLv!NVXoFu^U6%oJIgw{W z2QK`W7|>ddu9;&W&f3Bon2H(zfCOjB9GJsa^$q=Ua1XBgZb5%i+xy_1_xqq%5}wYr zNG_J?OLWW759nEG;VCTPiAQtxeV171j-MFoQDnzXT)AYl-9p9VUF&yh@)6##T6bM? z&T;Vg4>0VFXC}q>JX3qsz3ll^kN?xNLNgdd%>T@RqdZ=kOcPw!n77 z4O=O@b>}-<0POoM<%7y-z}jg$_-smC&V|@!tNg=#V9dud>Wkm0Lh?NMgIGLlv~2ye z&_uO;WDou#uCwhpu!!~dK)RWE##>3WCSPEFP=(9u2Fi`lw3*x2)Sh=Cx$(LQeHhKL z%NKXmKIYj7ff_eRT!|&W%-YM+00^m+iP}aS`hZU##C^K|1IZ^Ikc&_lkQ*-0r{PBz z!F+^2SjYiu9R{^My{}Z|c8&?zS6cYBqb!ne9N-eo8*HgR*^@ncl1I80w)VwyYpx9% z0C8{G$gX#5np@jDW7J#)vPXAd5_KTo@DUdNL~)-%;!GAA(r_Ft)-&Il$dOthJq#EnhXx!S82Gl$f<$Po-uBWoc)u9TDJ;60%qEluExC&+~EfSE7y!T;gA zKLqs|$7L5jZF}lD=SM#WsW%}`ej$XDPb69I^>=`Biog5atcou7odH*Msip)}_U3vSnI$8v#_p#oih-lj{FUDegC8T(O6?13w27Eqb?4SJ^UJ!%DVSM%e0LAJo?Z4 z_y-rn#)Quw;vV-~f+`aCh>oqcOhdoarvKD40g@Yo?R9=F042Ox|JwhCXnk<$%|lcD z4}`4{ok)O}nH^b|1-Oe`-QDU{xKc(TePj7VxKto948*(87GPsJ?boHWx7K)dWr054 z6k{ys$T4zcs1dGpmmHEUOIV+H`2OwAdvEWc$DMfh<1MW0xtp0;@57+Ir!;bV#yg8HX1>;0OuS+ zy5q0d6X_UcV}j#7OU`!=_XqM&*+FRf7MSSotQk0a6&w4KjDShPZsA`Yv8NYpU-=C) zDJ662pZuMv*~YMwhKeQK^-pc`K&B}%pcj58_FNQ)y~w@VBmi;GP2Pia`yJpEVu|R1 z3O{hWVYE ze5_V4hk0wR@DO;@)#<0OMkl}kv1Wr;=D1N7W80dD#Mle~SY{=h~3I z1fByoe{m^ojIJ5OsI}H3s227M2$zfsZo-(oBYX4z4A0!QMEF}3!%VWY-o&sRCvrc7` zKbXPZ>n||)O%6T$NT__L;njXbRWN(Srhkjg%hC^y-Wo6cNo=oIO~-x%-9LfVdL*ND zTO;~KVcwy;^si*KraJP;0n)Pp@MytQGnn2wv{p=w#abyz&n8h=G!FP3XLF()Qx|Je z04dxO%-u6WdcE2^cJ&XzOV057UR$tmNWGpwN)U)Px@G>k!#_i@EA!W60Q{Bc{FMxE(`m79TqDHL>_c z`WSN-MuER*5>T;geq$5helH2uzwCQC&L6x3{!(J4=rDdII#lb)k7HF3xb%%agO=o# z2BkfGPkPhw!=pthSoyKebnWD{f8i;A>O4p;R82Le{}49)EZn8eJ%3;dMVpS-(t#7- z?H=aCjK-PwxBJ}bg-cZWpT4hso&8QF^PI?1d<7?}cynTXels~u_W>j%o7qIyiWb{vQaxOlbOG zmWt{Z3Xm3lbl_m3?`w_D!`%%~M0yt7UUyE{0unv%GXb}4ps_EDSHU6TfIp)2GB}A2 zhJl@L-q!E*9PW`8>sgmElOY?w_>C^qDz1Cpd-tRiw5=aMR+fXYE|%zrtYe3Imu3OhMW`@!xeN zukw|i?}2C$IP(moq^`LBk#WIj|tMUqPdQB+#QypJldBo@v~z zyGP*9J||@O>xcM>A_!CLvld?KH~N}*SS zKub7mb4UestbCKHt1_Y7sSyPRu&tTwlR25aXtfUYy=Q(! zInOZ3*hj|Jhg~}#el*A;jo6!i+*qNqkkPG%vv1LFfH=p!Qn8-)qTC|uhkL{wPxQQ1Ds~Z|3K1L0+gTkL%x~P}Le~kk))Ik_Nb3*(k|TyuQo`M%QPc>{BK;di zWzY3ltRcWO`Kd-$SCt1ZJmGjWiGAE~|%vL)3+ z*;#;D#^A9Hb_I;7?9$8hOo5YN6JZHj($>>fYpk>3Ll4JK^al~+dNJ6`;b?b%r)qg(}0=a3F0 zrW?l2c$s?*+8C4Gv0(f0W)h;JGBay5QDXCg-Dp|#bP~;LByv7SR0oqo}bRDqH6xrXn1|I{aF&Az$y7#vYU2 z9K*W&jxAbl7^hb3v0uWv>XK`=bM(ti0yn3*z}GOpSH(Zn%3oxzNo&hUnq0M55XOUv z+6&(tb?P;lZTj~L(m&9_h=lIRkOo50`$1&H(qAMKUryU{uaM@C%CFQ+A`E}9$|zb_ zUJVGqQr-@t^AXP*NY)RwcSYR%OZi}St{&a6D4CR6 zb`}8*hEYz-pGa~w%pbhaAM&R%iYKomHk~23w2$b!Kk+gBIPU|)ce(|Cm%uNo*Z|-x z1-dW7bN^hwStglmRPO5KN^0u|)C~$I%~C+)8#4_gaE$4vH8VeJstZ1|@d*iZ@wX1l zm9N;@Kk_sXOYgPh1FJE$=ox<;#44a;YIgPxAgWxaVkYiL7oK9^E`Bf**Y@I{juARE zT4Dat;A8K;u^=m~E!T25H<&x-x zWR8IDS@QcV_5iV3Wnr6HYYz)7hxtnjbmFnT3hin356K{}u>gbNtJd)swC(LDuQi)X zJ7t>p94Z}2YzPjP(`;~X#|@_e2yROYw!u*H$(aI!#I^Y%uw(EX!%tjBiqX0RH-GCH z+UU~wqgchEG&jNNh_t~rH(REz?B zHZ}N_+{hu0rsNQxyadIECig`o&TdZYsTc&v?!Re88TuG{Yh}KLpZ?e1!m(zjkO-va zYc9P(FU0pb+ZFn=x{QxKaOZaG-5=OKa?dTs@AGi&N=jc#k24C$Yr(g076l&9rT6T z`fdwX?kAG;!LYe`Od@05SEDZqGY6bXtT2`g_aMy1buEg|-$YkH=UALm9OuL|60r$1 zkrIZ`iXT&vn^bETxQL6;WVZP??Ag9t$#T9d7L*+1X1aAE@5}g<_ zhFV)k0~F*qxw{V|)_6F;%>x5|uN6()CFfth{e>_2*6pRwd-?XPD?iun!wY|I_g8QF z#qFox{A2zb<>M!*j7cR)Hl#A6gAb$VxTU#YMW0tEdVp0fArpGj&A4F>;}yAfoe?I z=jWdUxgXUFFnJrJd9Agqvbknvo8ZUBPT7Tp3%jpbcvnerDkb$N9~$x-ISENNe9#8J z{T9>w$@dcoQI@9s(uO>P2NE=#hGFACx6@0lET;VdPodb_-vIp}54s!6zt95f*&Kt= zOxh`*6q+HS&Q_Aw%@2hfYja(c{ml=C&;k(L!QWaH!%)nzKZ*4W{V4BFOAxHLi=(gH znC=Za+3Sse(PyN9?nOg%qy^Cc4Pz{G*L-*uMyx)O2NUOjkPDlvr{+T4HTo ztJXb}X7`-580L=VU9faJcxd&Wd3hKf}I=|}likE!n_MiXB zzvjQ8-u$PUpLykTw^v^I*SA0a#oxO9!0_zZ`NK=pX(lE zsf8_P&JE}?ISck(v*zxFQ*i;L*`t#DuTU9I^nxJAcS7Si+n4p8bG~zQA*J{yA-08| z042HmUZ!>+D&2bNMQrVFJqP!EVYOstix>qb-+1y}d+f`Y=u21`;uWZSTo&suo^ckt zT5yubl}e+T^mt^WoDx0HqPS*yH3c9N$YB}m#vPA;$H+=dqAR5U2fMrukgmc#-Q%CQ z_Ls@7b}|)tNb1OV?$Oistli_OkU?WEyGM=4XR<^du-Tm(v=B53W1J=?D1-aq_{>C7`vE6fF!fRk=jSJ4XaQpsm`aiZ;ebwJS zT=OjFW$cMrQfUEf3#hwH6!;=a9LY_#YahEh8E`#5S8hF@~MotsUJ%XfVJ^n zm%ca1A9q04xumcpXUC7xc*=B7jOxh!`4N~we#kWyKZcQSi0JPE%vsRX#7KPffM@*` z!@BvwHm5m;ToBAJ%MOnxUA_c)%;2}sTieQy9`~gRXnyP+Kl#Jd^{`92bX9N1Y}o36 zf_z@Rb^j~7c=~L6#v5JE6ac*mpY=N+zVr|9?qA}fABCi6Uf0epev!GysoyE$3ubo3 zuXsaEIJr|;o=%cyo}(6x93Jo z(p7p1cb~^fa*FjQA9;BD=zSkaJy0(6&F@}oV@_t?iXUmt+0#!Z1*TU8T%yTmFwwK# zPx`FFMgOyRELl#F%>YaO0O$+?!jcqnqA8C>^rt5D^qAQ*wsDwqUg5PSVN#lV(IB4T z=I?=!#7dW1v8#QSw4#0UhQJ4rJbXAb*UlOu@e;Y#pPDle%$k4O|{h{||9+Yx@2{uun7w4Yn`(4}+`oMt><_DQ|#K+Voq zC?*E^$!vz$|Erh(_uCg<|Haw-0j|%#_C?$G{^`G8LW{rfxCci)vULmkljYur%6Y#} z{n6dIDGXppQW>#C^#giVT6hXec;ZKuY$_xLyeAwcf~MwAD*>9t z&5~tl8u^Jff)i?G&cKqh=kJ_}6*BD924B~&&^30JX5O2mYlBPc6Y1bOCix`$&HW?3GKgcX zcSBEe?9PHjg8)A82)p|Q|EWe|;&B>SOTIN(9(a@g5cjMmKqzk>(a}3Wr{HMm8yRE5 znGs}BGC^{v8y_RSP9)D}U0BQ*tkakE*w>;{ohh3+B-TZaV2~PF3;A(T6V`+!ccGvN+`QJ9#`vU-=&|pVlWH@Bye*)-K=c;^-LM zMUa@eHSk9bJ6i8*pf}Iclm#ny~W`m;N35J0Rml z6azVVs9RV?Uv?@baPoU!xelK04jb_pqEA5pqaVFmGBEAZ+c6?$KKCzgJAU$=Gavup znm3CC5G3w#zqQYigbZSPdINxup8xmrBGi;%9ujZ*@`-=GL$&-( z4=bF+W*t8&_z>!1PMsB@&8PDtyv|B-S!zA7t(C_aql1{k-@Ra-O?R*_(ck+PHgpNUad2C&T{@Tjar;sF*4?H83N91@T2Jzqd6?upe{g>J(HxU zgM32uP4pZj(0!m3F<8kZvWv?SW66$CZz_&>mOUDGHMY$KI=F8rD=~iOHy1e*w}rHR z8kJ92%*HPb&BeB5VS_0*J=UtDW-vT3>+RT|#L8=8DYt4eM~Mb;@M;tO*!qU!FbN<0 zYt-)#xgi-J)^R+UqC?TCUW2mGw0M5PGVVUG;z+S3LLY27l_~K{adbnybW8pnw|q z`AFMxuj1&4KNxkhYaxe&1rK*SmAn7Xws9i{+zV*s z%KglG#`PN0x+XS1h2`YRwghC=Fi!T1wd%l9L`B$SO_ z^N=D5m9jZPl#{*3Ifo=Ggg8bVvfi@ycI>^^vG-m_#yQUZ`FwA;-{0`OyxXS=A>C&dM9!!com)YWb4Y#^e?GPK9kgQA2r?c5S#DA@7a5v4CMQ? zUOc4{+IThXh(H&=y_7Koiq7dP)xUu$3;v+_wdHMjIVm+4Dn}C}M#78?A8iqKgpNtK ze94cyo$GSOopjB8|LX3+gm3$Mi#6qNm9=*t1iAz74U+KRva!KlZ4792AZ>J-(IxmF-S3yEfxXT}cOM=by@1?3FTos2tw)O^M^=0zQ`X z#>$Fi;5FM;JpmQCwv!Q?+MszRqqP~&r*o4X(P;WSmYA!x{ph{uFiZDAR6ud(kuLai z$(Dhp5jm@O?yKf>=Sj&qO5zb6D%3R2&m6{9lsjdFKdSXD@SA*2S;=u=6p}~|a+*(s zZ*1yR1lW`sTk#%j0h&)B30BW3tB}1$r;OsS&gC`v%Tgv9ZV@*|s#ZLgN3O#& zXd4{=URmG~`4pf^js%rxq?zHIy0$x({IBHeoK6;OQdlj}l5U_6va9v6f4`dLrJ&~XrO%m#D zD4Fhf5YNX>Qk2Xu<`ljj@<^%Kjztm$j1F0zje=kl?=B*DV#``e%2$$YueCmX6v2;u zKbT)XO@s2Qud4Ech7~M|9^69_HaRWaxX!*EO0Z)tSqL&Q;f&iqp+K&Wk5-G9+aJ4P z&IIO}7v)`1z&D}AoSnlO*um0=+czz)VB$3TQsnY%E(3Bpe)s;zWLWjm}MW3#`*L;y+v1l|oQxZ9sa%C+)MKntWi#I>0h16TBq&>2g~C zx@Y)F4I%dEecXXEbk)~jq4nLDO`p=ky#cBhR|=A6y2eG^4qX{ud@~}PxBl=OL2iU+ z>Gs3RUP*rbSK@N)c~k%Xt%|olO7%MDsIRu+)k|G&s6@R8n-j9#adN-&&vc!<4V;6a`+$C@;*IDmXKh6$*=+LzrFnetO z@$FZg<4DJ6pKGChDB=r&!V}qgmRU17sZYJETN=eHbd^t)+{fq9Ravku+O*iSsI=l) z|FMg+((0M-Gg|;ZR{FNyhUBy%PgBG7;69~bh6Zd~z^~&J?#YX6*g3q$m|TD?w+tlb zPTSmt|o+-d3oCW({aKRwB390{qIre_`Px+#;<>r@XMuF}n>FaT%_IW!!# zKeThK^hv;J@BwnhIyICmp>HET=C7xHXWG8g`4i0hJl#m zHulrc!ub7OQ^Z>I3Z-KmtZ=_`_8EUrD;48{#i2F|N1ZWRbFh1sZ#t7&rRs&KXKV#5 zs>@`rbcZlF?-h=z7Gzg@;WZA&NHlyuUbrq_`~10xNzGsCnW2YXpyD2ta{AO_qTArd_cCB8>CL9i&ICCoJRZz|%61t%nK+HgwpY~J9N z<4a%zO=ey&K|1UG&;L|H0;CASpl!O+c~(*HXa^=Pf0w~QU-iC)YZ-8p1-|F}W7MTh z6D z6qx@-GE;LKUs_;mZ>gt)V#A0Wb(_6q2a&J&xDkD2MNKAVO_*{|U9kk$@j^P%K~dX23Nkn2z$X*VC%@#Mo_Bh+j|4rbdo7)Z-{+C&OC)ht-N}kRxIG$lWx>_o`wMOAkxD^}wp|Zqty<^bz$3>%X z+fi>mFU4mR-DhAO++lq+&8hUT6~#2vnM3?r0QR_5o%>|3_A#jS)%@*;H0(px`lJ=3 zGNJwQ`&1*yJuhml?1?6sRht%;aC|O$BdS!U` zt=_Rkq1B9e_XUO(8hP&nhy!z~kD!B__gs%V7h9c1n%SF--r*SIORWwhusrGQIOS&v@$(dr)b6qb-I=FMVU|htfSHz+IpBw>lAlV8 z>i$(RZ^EO6@uk1Vu>JX@#-d;7nyUPO(EzsA?v@x1dX|8#F^Wb;U=us_=*w|CZEA0| z?KgSlPhmg)T|A!eWJWUmwOh|bel1Zwxr77w`vCQ+8IuXdb-nhPJ{+v1bPd|=QDS&9 zRp?(&eqc~AoaG6F9^i34?PN)2{HPpP!$b|Wh5jk+;3c(lI6>8-h4;`};==(ts81zfT|K4uIgMQ9c>R=s!- zPqN=-A42HCP$ zz&Y0VQsU8RnBwRO2cGYi+LGTuS}lq=&Enz`h~Cczsoih9JzWw3+1n?$r5`3XIvq5Q zKSuiBG_d84*GinV*oqZ%5Emv*yo)oFXbflm*0&}&?T|&6B-?dUu4dSKn>c6}q(Wvb z#kRRrwETLAVlv(ZVi-0$&VfH8oW0aScDB{NBWdMbhMt$6?bU@==v$diFu7a6J*O9* zpBub21!}D_LsN~3;8arXdThrsyW+sPp?Sp7Y}ZuuuPY91kKFa%>WY***r@YqZ`hyf zS6)$`Mwmn*g#kyb6ZC3L{gx+PwneR5!;6c&iV6;JT92e4KA79qXjH7iDj#X!o4;bh z8--Nb1ZWlXOT}%VhmB;e!vkps?`|7j$@Gxya<={$4IDGTKqR@>vbOwjol{?*?3r}z z`}U45A&y_Saf{G;huhsN=hgKp=!>zrwv;w76YiT2^fckL%qsM}2YODQx>orFsg7i( zK>QzF0Pq-CeKV16xhdUax_&n-Endl+74erW!tyt(TY%T=3RA#VvsaZ5Y~O}2qAWO<&rgyCq0I1 zlX@BhUjo|$wwnP~b>aFh=?)BjY|0!+ThCcl@-8z8jHAs0DC;hnG>F=m8cWJGM=uEv z*5p#pIh?6B=g>(0yC#y62&0x{#YLz5GGetH`(oXv4*L@z_X4!hPvXisuG~u7@%Fa6 zLi7GgQO>p0XF6g%DVD`@R(FDUo31$rKlwExU&Zt0cMUsYEc8M^$0Ke+o@p;lFz|tK zun6p=uY_%!l`e!QhV3UgMxF5c(rfHI<`hDKvGJqkEo9d}>sjqTale;Zq2dmbY211|hU3uu zvKDVYd_~Ur{A=@OGUvX%&Z-6et}?-W|qLI!kO zz>OD8x<%iuOemvC9^vWILoW!1c6gx55e(byuwYW{mLm>v`ME6YQuJJ1qL14lt6J(u z{;R0+t_%`R*6*Z|>Mb71h5McfEhzn;4v@?hf$&A~;f$}jAhFYjaoPa7;CXGq@Muo+ z3q!N%L*Wj=;N39SWB8+Jw~4nw;H*#0BW6jy)Fl_aFG>CMRsPpq%3iGS0j6b_Z}gnc zVh+^z-Sf(sEph#JbdKo*e*p#;=HQXxzia8mtvefOJCoL}gZE8OpUhi-P#=2p#D{9@ z^Q*0UM?B&yGwWaz&d8r3QYXr<$@M=@UYndBseCofbdx8w1HyRTn#GO2Z2(w9c6uc~ zSbFuz&MO)y!VOc^y8wdT4Z^phCe>_oVQLW@-9K4;^2*(7BpXWCh1{7Vy}`a;UojqY zrw0q_+x&+xvvi@`7P`CjF2rP(4@~7$#F!14-pE{Gxi!F4^6^nOapx}oU}_J}hrS_( zvFke6{bjH_5=|kc%^%`FRsb2-Bcn=|TGn%6V8TB%9yJdl>{0BA8uwogaxo>Qdm%J2 zdv@tMYZIV*D2#HVZmMH`fS@Kq31_=#QL~zLIIzCl^V*l581sHPt)D5>%q3kX=4K*O zK6`k=P4OPH$N4)fYXLcAr$&7kcssc*>f!fV$wIALMKqP*;s$rhNceyH*QXhE)it>T z>QTA;3AXTAopg3uREAYz_On`x3&s4Uk+UM=i-gLN@p8`*hDfQ_cp#NhNKo1RP~I~S zq5hC%=|V+&Z<|xcrbxHsIJUrK`wvv&QesN=)}p3n!+c*W_5< zvWH^yb@;!>A!pi#8WkJFpATmBQ1U4bUmTPQk3Npzi0q9{Nk;=}Z! zyh0n>7N;Q`Qo6y=Fi~cO=lK6RW+NZeF5mk@{i~U3bgONfk)3n1)7y0ua8xK#4BXrA zx)uFO-K)B9hG+UJwL#BATGQu>#`pVXyq|f$`{E_&tu9-Mz?t2TPhDpD`*+Okz4d(- zfw#AvmbNOG7$}qXh0#F~www7|k9$tbmtVlHBIBGDs2*s5S$wOK|1*)ArVpHQSTiC= zdiu&JwbxJDkhH|xsi;2q+8a_gvWKQytF;3}cXZ6O70UCmiAon7=FcJqYXbIXWdbbr zIj>Gnf>c5p#@rpjVo5`}PaEWkGQ-#=y!ro;k*@ZaM?Tx8t|c zeU$q#fO0Pqulcq`u{R?-Acd{VmXv!{5bc)H>jT`tUcaw26sHnj-M$JgtMnStUb~ip zlcVb)ehzZ`A(X+s>wcXa`Ah?^9r%&8#|-_e=H(HWN6;#NSwh&-JBfI^_{9fO^WKE(z5j&!SVoF|?Z3?@SZ;V_MDK}9%WKcCZwE)H%QKeZZD6VJ zna-iW7ux?W_g(O@&=t`uBU~Yhcqv-Xg_`D#?$R0OK>R5$!50pL4VcTTakUlhB>k;_ zGe21#Mm)$CSBw>0w)m~<-}EFXgU9ypu%YIuTZ+Mr{hv}qo2h_#UURr6i80>cKiVuz z=*QnLtvYz?kB!3yBVKoZUHos*_7{0UeZCtZN8(VWZrX*o%#_sa?9R)=Es_5ii0?vN zTFLkTs2?E=Nb#vf1k4-k#fC%P!fu~q4w=!=L(F#{Qjt~nM*HEczbUp-KLPf5P|l^B7=j_Fk%!%E>I>;u`7a|{i#5R2M|?M%W5!7IKQ@P; z@=8k~pAllPu=rZ0lUCd+(D7`wK#iGelYM~>z?xBxph_N* z>%#!V!?YKt8Au>*wBg737FOe>^~h&lw_@t53#V%CyAgfS0>Gls>|BrV&lWSl^kpr2 z$VNbL#o9IM>6xmRQ?iYdN^@htuZ^6VE$4?f7XD5CmU4~m3$@xYqpoau5EgXj(4nye z3)Ka4m)J92GZnbBVBQwUk@*wJIMQ00VJf3F+&g?^;K(gI%iwS~nSZ#ah5%@BAxx!N z8Q&;7`#|}#yp+qymq%1aH#-s98jzI~UBMBy5% zYRD-(kbn`n99J-uCR|TVnp*sFi8fGhTL|J6^c(#!C(>Jjw|P3Zj*&SDpN}WJ3dkFY z<&a~Ib`Fp#7r!ffQmh?IafZ=!@>5O!+qS@;>N7;vQi&s1`m-}s_{R|GEtbG`o`bdw z#&;thC>x{6L`%vKrtT`?kU}R>##*3@Cm-%CKd@Oii4LtOkG*%4RqGXyx4YQ3SFL9j zmfh6O8the638T+ty~7EOavgFLf;j%wekaM`aBDjT$WOXu6#eFYfHVJ3+0kh*k2jog z{8`H>ua&pWP%6oWxg?nH^)tOhp0o=N&N{D)(WfhQej^Foa@&<#szRD|C85joW-Bsd zVL2CPA20J?2_~e!O{;)b0SC>-GovYR35Qa|f6^wWVV2I?wp>)r0`fB!^d*CI7SiAE zM2_fnFNvlmZ-LL4-^Xhq)rXxZvZhpjw3+>k@T8~FAA<{ zZ*#>iBN$CLGgL}vo@Qe};{fPcf9G6C7c$z8`a7P0CZ_>nwYp~a-@R`2v9rmWAP9Ue zpmVd0t#vdZ#GgHr-6IAF=#M{tYCllT#`-9IbYF$Zs3_`?zUm(H&t4fps_vgDW`+Sa z9wz$nBX3fp3NGSMens4Ty3oRmn$A%^5AK?b&(+=EM^*D&Bauu;Jbj-|_z3PoC@W|O zt3{$OT9YG2Rvun$vbzF6hQ7oOxHwH-7Eo#GzW{etlh3mEuLqs7*=g@&cb)I9so*D= zscIA=cD=B<)t&M<4M(0&p+$-g9K82#ToekEBp%)ye4rfOJbmx_BGVPw#}GdbYOW5Q zuYRkOJZWiXlu zdbJS(&?TqTk|z$U(Y`~NAo3+F>@Xu!H{0odI!G?U(X>Iw%=4h&2r)BGl~21Ux;}^^ z_aPKENB^?+3GLJFV4^)<#KMD=)b;;kS-}g!zM)Fn&2jsR#<&tHlc%Bv8>aG<1D!S# z2e!U6>|g1}|Gmup6e6*i_UUB1g-LkxW&w3_ejl1YshFfD5nO!(&8JzTg)_mpwAqSW zY$SGfD!GGfZvqI|XO~A5oRuPOdF#)&#D3YouS)A@=7zw9UzL#0S$3J!Sq*H2z7ht7=EdE?hF9G&QfIOZ!V-ZHY`uY{jhT3OzwshcO^~n%GhYm=Kx5$5^ zq1WzS(x|r;?{{(V!UBXcSsMx#y26(8D9R0V(|0cJ@nu|9Obqu@bb+D#4u)A$zg}A{ z!s*n13y7b|o<~Bk5?_{2*-J-bSzz;+JZ#HdS-lG2@!l@}JE5>;&&8#bTmq{k4Z%sZ ze$+irqq`wAKBR=n{|@QOJw{a}d)@fkrdlC(ukjg0eDo&gUjbVdp%WFU`gaw@6l|_* z(|-+?7{271ANiEX_*|pU38@@HsKj3OQ+Z zqF-1fxTZRRfPsU;FL0{32Yb&4lhZ88-8pi93d zhksP0i;R7)<1r!d`>t7ed7Vk~10BKJw>NE(mHB(-F=-pYB_=7 z=_?mB5h3Lf(NcL_G1{%(7&lY*w0XgA42s)*x&B^pJD0_WSpf{=1$HFu89!a*QT8eQhn25vWp~ zh~7(PXfJr9H|XNq1C{_5IXIDKbdl(jOnSA8lXoO7ysGM{$Shzf(Z%K8`(f0sgX9Qw z%|jgRPf_+nG{3C1KPrZ}AN>rN3(FC6h|UJS&x=_SOrN0o7@t1|_2gT>m`e-QCHgHd z@@H;usaf&0Zk~hz*GF~|Hic8Rmc&_L$JyeroeAjSy1x-q>Wde|REL3QbP;s5!S1~D;ZtZYgtTG| z^X=p!=T&2q)e+A>s5g&-t?(bR;lUGngo zO#`h}>S-nB1Z7CHnBx>Foe1f?T#(=M>(G*zp7LvV5;M-M=H29R^bVsFS!3+f-9pPU zj3Dg&f+&5aQrj0qx6+W%C~P)$ME3$m=Ji9DV**yaKjY--&to1tuF_vOd@^aQuF!+-#B3CLXz)2h2^3;iGUuSabdgy$zvq%+PG z;SRuE#e71|J{*Zdi33k75Xpp3>u(*n|31D?KUjU)yIUceYH}!O5pMi~{&mu{L6c!k z(&Y6LR=b)ovD;1!T(5GetUCgPAjt1MQkyHW;?$6pF# zH>l938JJvu`x#K(adC-N ztL<$SAh56FawarClg~s;iYWbXV)XzGno@oPIl%-|CY@&BKf<@wNFwKrvdBH-Px?o1 z(+mB;JMo$eIn>_$$MTRBcYIe8Y-&tHsNTtC?qON@jZLi z$@P{AFCBO$qY2S;UWxhCk5x9H<5hD0M~8eY-C&DjJi>Q+ecU2_#YfEL!dbEPnQQYO zohznMY)$0+F6Y3efYiEKXUZc)GMIGDr=isDvSEyrwcOBj#w^hk2#2zgPvnu4u%Tmi z{(s*TfKQEEn$HIf0x_GhkZDA-%Jw5rk+Cp2GsB`y+$%?S5Mq zmtn4L<$Iiee9r0lRb5^6tzUoE^EdyYaQm8K^yZ@`*cKDHSqnOInhn6$4HyIWc_fIc z1WqJ*n>dhllWKB@ZPqPtUR9Xutz*iRC^7Mxlx?h!i)ENioBqoy1>%LCL#pFX8Y5&6 zVmzDFl^onQ;1&->nBEHa?ta9e4(jgEhD!>wi0GQINf{F=)zpGJU2UkF)G`a0t}3yb zKCwSp@Zz0&6nj;1e(q|*&ecjgRcVzUUFg>G)MfQhkj#vRkkKNR`^S-B@ zZ&=rU5NnUk(^fls123H#(KrGJuGB8UaKb=r@hUeijq1Mi_wTt{E_q)(^$Lu>AhW3r z+a2eUQmvGg>vZ}pU5g_2)3S#Es(m+zEjSvfCkZytH5=KjqNuhXSW(!Al9@`*1ps%Q zv1s0-fQ2nt>ELaGmMhziu)SN6GSv}XU8i_;eh_t8pIh_;T^jA}b*t0$o@50>sv6_n z5Ih!n0oa${fE}E?COutC0GxVK-eyawJ%z`@F4)CECxfN+#IX@#B|J=Z=O*;86q3F} zz1VzOPgb8>YQ*+^urWn3+2hlH<-ZUqgWS$ZUl-G4${bb~Xsi#x?6l8XNHc(ZycOk)g{u33#h zMq}#j>JCv4-bsDZ8-4+e#}_82X6G39{&eDTL;x^Ne&`jEShZ{llf9p`nG;_! z<=*BpAoCuL>fK04KsAj!q4Y1b%;bBRKO4ppDjr4qG*$}y7PPJciB;5>PaVBkDwN3_pZw_WE7zobzc_H-PGJeUQ1iqM3aesI z=rjIJ>ETs}0NBj1-Gp91ql%RTGi41$ItZn#e9WOyJ9$5C^zsKl9=!B4K{|Fe2-h$I zo6Bjf0=sH5{8=&}tC5AB^-M?M=^72*9wRBfi?Aid>B67FWGX7B#Z&50S+h4?0@Y1L zT<41v_H!{&aB}Ol)aXCs;olu#Vq82wgPqrh{T5$zKD;n1i_YO`HQ90ky%l48dP)1~ z%mW3kP|ZIy<~()6tr|;Wew1{SNy*o&Cj}8Kmrlt(mgIgoMVr2xV{=Z$WTX|R4LtQZ zS3Sng+H0|Y0z*aDd_JZWqK>+D%g*u#1eZ1HsnKB-;bcsenbfA_l6!P6uWwp z#qU--(1u3L!2fJQ0(AZvuIVf$J6B%BjjP`*W@K6ikPsQCPzAD`+co0HRL((<88Gwr5d+7`-_qz zuiw`FW?%NT+t<+X3J|&zTIkjGw*gGp?@?N9CRdyd*_xelG4wN17BcZtOt#dD3MWQH zMy>m{Y;d8&0e~L!9J7*bwA*@xF9#RH&sWC0$~k5Tlfq8N`1s{%mqsXQdvE><%P`)mo?TXyG^zbm$>o?W70#|?QIr5*;JKmYz) zJH7K3;gz|tMyA%!q0eNRiD+kPcGaiev2a#8)&Z`;+z4s#nmZtF)dDZ?>;DJx9wBKY zS7uzz-?Y!4{>)%VG3OTW&#h4&ZRxo}$_uZZUw8Z&*64U)=gbo0!tpL+Bf3^O=tbaO zyU3UvPzQ+>0eVsv@}Utek8!8)M*j1v0p+tFfBg;QsLKDZ9w15GTgenNM5YK{Y9i!|AS?}z|I>?De~V{ zV$h~kfiWx285jC754!)Xpm4gasbVWaM=O%l-UAE%{jiW+*Gt$_s)3u#*YIu@(fKD| z^51`1)aElZyE43g9CF^!2=c>a!2?m7+93SMkAJ^V z8H^+vl6yCU@fdE2ffF)Ii#+pNHpq^xG+@KdgQjNjd{ECF)9%zTi!6|$oiO-z4`ki4IXGd&e-g1y~z;qN~Gcs7!1@H+KDL(+KHWwQ-0#_x90sTA8y zUexwr+ubbsrs){@)Kj6)-+J#;;@@E6hv>hFZCb?b!$KM%Z+fSuVt|{O05yHW-`goo zwbJ`gb$5oJex5OV7vc0P*Xx%zD-kDja05s?1Zme*+OpaO2L(=zD5(C|*3bOt^?D4{ z>E0W>+YTsTcuvs1=RuU@kJB5HngRLWc3#OlQ}IfbNBU&pSZ}@x@|L*HJXFK>vpn?{ zc?$NxOEq|?%uY_9V=q#AbI0fG!p>*#c3F8Z%6vvxe@Q*ARavl3^JbxCgN)tpMJ1ZJ zs|bZ+bN98cOd%6`?`dte%YWMVTz9+E#n^N3AUp$U2;w6S4p4G4n$NKkKKljrKD*Uf zYU2hpS{;IblTWbsC(Qnjj}6bA_@OK0?~Ncasmr#)$NQGE+n4uO4K4Yfkl8U2ucZA14dZ?#6#2R&I*6Ya5sS zWJIa6TV>-MnSejrB0nEQs<3tNNONDGfBN>H(5EX>W&s27&05=^#1$|_F2X8w`s5rg z-IYgs&%RT8?yMRQW&vEZ1C{06l$ZZLT;ci>VLOw1K5;Iu(Xzq~Vq0&vSoXz&&w2WQ zW%nXKes2>K($GcjeU2^H^XnZjlh)PJvl5Z}*GOG58>c*7UrQ~CWDQuqUmT3HbH!jR zl)W!dG|#$^bz>iJfN57zjTZdVEqBIe_&yX_&h2&OFcogYDq$V2)|}O%f-5`ApHiV^ zm1@h^`x}_4gnI7F8Y^sHjU=Dl-1b8B>5*qqE0A8;{D!d=p>t9~+*6~1 z!WU156i&Y_)4)#A2_T$2&e&E=G3?|+cG4Q}Zfwmm`3RCdb_2tdQmHCZFMW%`haAL$ z0XaF*ChE_&bkcIde7gAfWty?abl~ZEe`~s3e5Ku<^s82O| z4mmo&&#kt-TLFPnoMBGNtyH1dFQEHv@BM78h*QgCZ!0%56C}=-`L>rs7Yh=$_Yvrh zJL24#HOF^N^#D(F$OuBjzc`cANw18yE|39n&Vlj>*0?W*FUI?{ppU%r_P@lQ&b>4s zW%q9X?9Lzl%?`;7K=`3JEn0RG$fJ;cO0|6R&WRurzv$Q(CXOU;aqfHdk_!;NK}*X+ zzW8;u^OIeJi-mzx*g8FN06_=}v7cp9;4q^sIGhES4;vFojPH5=~Aa`GH;hav~+*g&pPcdV+Ktuh73cCaeX60cfcF0 zvpxsAR4o_MNMnN`k}e1Xxc|?pgcv_lOT1J(Qq1K$iV|s5`82CI^3~8|>M{(56tDA7 zX|L8<T#tlc9slbn8!{-oP=1>jIRVj}==VU(?jp2{&>ERF?Tg((bT?wc5$-)q2 zwz#EH@JmZ7Wc1`M=WXVT-Nr4vlNCJT1CV<9W2dWy>dy$!o=lv zcH4@4;XpC>FZc=Ep&TMt4@={&s#@|56(_84C$D#^p38~rZ!0eH)myY&Bv5`rVVg`> zS2Kh&$i&Lhi}DqwAeU;=MJa4K6h%k)(WnL*sq*ht6RJ`BVEh)TQ8Cb9i|UXO8j;Ex*fU34(+=s7u@@{Q{1WJiRy*WV z^%(!i?Q^qVQ^o~*eu1P2L;Hw&XRWbMRtBAanmP~ZHr>Y8K2PB9@2#%f4JxS)mGh3Y z{K^uz{w(s8NuBY-CeqzsTE~euD=uMl-Cxd$g3h9TONIJ*a{wQLx7tNOk3iEyxRt9* z-a=%0t20aTIR2LFdp4eK)|9{06}b|$8?bx(A#j?$jvi0`I&)j|BL%uD>Qu|H%CEa0 z6`T9$bXJ}W?pKEw%NmtwwX3l^Y{X|g|AHDE<`*kso=rW7W~M9Xeq%)+263#vE#N^{ z&TF07D3^jc%Q18QcvnrarO?vb|99lJ6~e6M6}?%7Wd}?8=$RU6TcxZQ*6W2}cc>+$ zEtf?CCvU($Us#jZ@!;>o&lY6oUQ7J5lhIg+Nvm4BF-RwBvma!h4OK^l(U0a+{+dD8K;sX`9z;w&Z^)Hd$McVmp%b+yesNy@CsMR za`w3PC4ZqYc{~*YBzN49UC~va_PD%kIC!BhI^2Jj(jS^*)2oLkd zlq$PHA19x^&$MI=!2{|8iGutvfkh}T0ZGn=8#ixm3a(8j@Ac4N*G8UB^IKdj!VUSs zfEst|F@bI`Kh)JAzOU)dRe1oap=E$%w3FUPZXqW~2)Mh_PC|Cx_e42>zS2_gNmx(y zN4{!X#3yVlJzZd3clr#iSF^wSad*v&18TAmvC9%V7GQ#`g^T2Bk?H(dXZ3cVyk6ar`zV{%M)ewH@e# z;dUGtvLYIv77L?mY!Uk0($H6eJCwPd+N;_*;+>1iO)i!vdNU78Vogn~`<-sJGs0&} z{DQ1PgPweo88oNcmQJi?kj#0amvPILE1?B)SD9Bds=oK$KPt)4e+}&^t29r6b2UF2 zgm->S`CH$sPace89Nq?!qAnW|BM^@B&6ZXt4WNdm<0&9eRgIR{=IeFLCl#u~%T`3M z(Qg>YzS#KHPf(lE(~$c9=%I1<{+4leO_KvmJ$P3G+64er5RzQjtpN0gaQ7W0SatFVgN=( z8CbZCNldg%ARWpH)RK=?w|E1A7grlB$S0f>VLWCA0w&fVaCt;gE%jj#Rx1nbgNv4@ zoNR)ciJ%>G7eu~8sj6Iza??pf9Pl1C9!2>oZ4_0_GYeLGY__8|k#RVcQv7o&tdswt z^XR#$+;<+QMPc@Myu5f}7f^mY(hcEpB`S-Kk5vTJa!C>*YCPgKv_2w#^{%--5rXus z)}faD?lpT&e{b-g%<@vi8<58}aCtfX1B)+I*7;u9k&4eS^rISb6W83miy3r>Ta<<` zeo8qHuTmz31M!=R2Bh|H3j>MBY%&o<_~4ekxg+ZzqcW|Hi_xRkcj@yv$p zrEh?A-;x21-5<_X7Ibu4Tp&)>UZ3d4-Us;eyDu_LM!E8c<2wHGzfwAE_hS>8R8H8& zYdsBEn~a0-_+tgiwITQUv*rba)Ys{~ zx^u(U$PuqO>L=96np}S;$a-ksnzSGSYX>DB7Yjir(p{zktFB&tT_bl}jy%XsRug-L z;{RLZ5yD7le5>-?%}-8x5Gs`ejo2n)y#$d3ZFn0yBv0w2)Is&*zMXZ_HQp?5(F7iE z(;py?kA3$Jk4n z7@v<9!TERJKIxY))A3L%zugi_RR{BDqJ5u>4YmO8$Bw8Q#VG%=ZuqcXbk~L(yl2M4 z9j3Y&+PZ7wQeMc-uKXIcc2{aF$fsB__o-K4CQ}~=kb~V2wRTghzn`WWB3EPQP9PKWXPrSvp0X>or= z96^%9PI)T5uT8D=tQ+FhAH3Q0llmizIWvY3{D0!q9b=6_tlS<;hACs7->IPWxy$re z^}hnI``>+@v9$#R){jh+?1$(#&GICUmN|l6o8@ms^Tg;ZPjuebug@{xUO>~g>OI=& za#HL0@IglB#f|Vp5s9AD3VBMT)y)fKap1`~`X5Ux;y=*qja*eVv-L3mwr*fsWiyKS zCkAv}C4p3Vhj_JgQX+rh)mj5B_T(q(_n@HNhz5g4$O0EVNRM3>;o|?Z07fVXaaqd@ z!n>44Sk+aFsbY8}P6zVfL=6JouRIegUNXi$f3}dnzuUHq{o^uW?0Q(L<8JiGn#Y+c z9#Iw0f{G{~3`VE=AwKRiQj52v`g)ya;K4UiP-Uw(7qh zy)-5yohoj0TlPVjPFikxjCJadk9VH1lx06n(&rA!tNfS@+bY`D_!j+&EpqRWv5kR?9^}FIdxvr|WB_@RlH25)hNeVV|Fw;W zJI?YcL!LHv$T}qci3%G59Otz?lO6xJ$&GCDybG%~ke_Wge|l{VArVsV(Lbi?3Vmuh z7jHm5o?W%PFvY9vVSlj1?xKY78z!x^#f(MEpBoflYkw1nCDD4LQ}lEd>}}_fLJSx< zTa${6+tDjH_zO?-BH55E3&Ypq`u?dWX`O+k9w;wm*u+5f{_#2N)lsMNTD7llgK)$F zATE?bIN@=@f26dgBNC52qGP959C(P$@h+n*%YH(fu+sH(bTh#n8?fv+6gYL#g&@je zbtwBT;}hN>yaBtzhou$lVG!$`;0FFHjch!j8wSCmSQG>tBeyx!8liPsMle(u>YMBg^S=M?txI`fN zhl#cxr^@DS(*;Gh4TIR3UUjdb9>3yRmH%1XxwxKOh54?4wR-_vy@~L}l^`*O*kmjE zWgw}7f-xKim6v45tnhFBwc1u+P>SMR-&Nj4KVwq<2@&P^*puPV))U;L zdcZCLIz0fcf;QF@`j91UwNvCN7h)#m+oACuM+h1mxEw`Z-GmY1DBYHyr-o)pvtAjH zZalZorSt(Jl~dls8p*>Mdjqvpltm;#h2#p&s8G><``x1?##4DONobg$*4>^O(D81V zY4;?=!})3Tm*!YDIRMO^MvM*{-vZp=Qjco@5@*JsBu+sD^I>HY?K_75N7!5TMfryR zx&uf_gM`$8bVx`y0uq9t@{ErDH%+>28MZ?jE{^?tx+0{MXuR?T7mn z+|TaMb$;vkn!P3yL=xxBQUF(~I%!M(8kp2ZdhlDXA;pYH09Yg62@*(qIT3}owD7%l#mZp~az z5jf7lyOarP81e`5s^t~Uz#93Sp8e|V+Tj-YkbutTez~d5Ik#{Ry>V-jK>Uwf+| zA*WODAIM@js$~d*Jc4}T-@9#1COPovfbo&ACes4D2~(8Dvrzy!BaGM z&-v>hqqkLVq$1jeH0!2+7?|trv;`d@TB3cIeid3REu7BIXxOTc9K$=tGUIM+D_t*D zJl)nBzb4o)(yy9Z+AI(BNw7)ES@yA)?YoXqfG-2P(*AelX(~oYVm@r_K{K}5DF{Zh zMV*eCcVhs5!dnwXx$h~Q1AYU0qrQLx{S{*MTk60*4TmX-CmFY3HOSZlF1$>DbkUx(wSh zq`J8fD{e{>$iM#D&>^)^%Ubhdo8S0l?yjW!$Fc$j4%XxiOjP{@^sd3muxHirB2f47 zaQjK;{8Q@j4BDW6NCZeJIBdmxSkwkS#CG)m(i0So!DsojY?^16_%7{ToBg}N4A1kV zSPZ?p#N-R7SVaaYz)VaNr>bh)h=kaF=a0{n_Nqcm24&se%tDos0`1d2c`5D;m`%d4 zKdl~GIIfP_|5n`&>w-V0j@wCk4Z+(aV0r2`E|Fc^IC6{s9#ucw(xiO`cPqKRBg2cv zFXP1gftZo`^sxQd4tk1*6@z>ywxO`Sr7Qb_leQLw-^poxlPqmHp>J{8lEG?0vi1|v z6Lfrh(`Eto`s6I;tuV5W-R3OGSv!%Fyl>Oik~Ob=2{sObcRwCe`y!g^$&Hsk4m-F? z=ON^FPZOX!Fbk65WH_CDr@nkcVP)*$ZlID z#aAbVHZpq7RZ#I)n?aZiImlSde;h4yCGDQ{Em1MU_uq^zl5|B6jCPwDAO5>9hWZYE z^PCNxTq3Dg=G&miEsmhV(_2cDv1~2xNNd2f*(`feUWtKwx(BVdC6M5 znqovrKvWeYyiHHIt`RNC%yB&xbO(+bUTW9F-T z+psfNAAKqJ2tp3P@^4`Ku)iI~;;1SuWEVK9qz764y@O=kYj|79Ur3ANkP)p$#Vvr&y5Q&L#i4MX$|xwDanvL8Ese z{OAlUO>@Xi7ctg*u{=XV(x<1JHTEbDhCdG%gSz&3W>nf?ki#RF@}%JVm!EG?Sg(BF zuuIgxF_|) z5eHcMdBl87Vly)OS&&>O3f(Sp7p1ax+32h**Y0l+%54|}EJTwT|ZT`R;vx%P8Fp!f6#9V&P`g~=s~s^`_l z+O#80ZeiSZ<~7G4-DEOnHpZ8)w^6+=3fB|a{_R`oBNCS^GI$z4#NiW-yq?&f6ukv+ z7O4KNUH(m(6%rr7mn)D(dmPFV+zezkLffj)Y_Q+`k7s6&L671TkT*9Z(wjUG-SSYW z&$5SMFqX06)AQA{4mi4fTLk3OXxw;v1%lKnTG9P?=+#&^%`aw9^k zQF?da=3)0``!&DVoxmPhQihrp^s#DPi(ewOx4AHUK%Wh=66Cr%z{(?y7{;y|-SR{c ztZ?;MBCgs7xCPz&67uCASupUvb?0Pm-ksr+i1MwGUjV3?2z>6hSSg!>OpWZYG+aIh zlA4+Yf~3*9^KXBfG0mlKFHB!`zgiaT{KqL@!~V+atG?l~_)0y{<4xA(UZK54knw~ah2Y+jh$BYg`;`Q~uYc=Chxbl# zRS~+z<+K9a9C8ml2X3+@88TwTT|VWOM1?mu@%#=E#+6+BRtzJ6{~n)b`=4_4*OC_G zsPfI#eE`BXK0}25Og7_@UPo19D}L|2`Am0xTD_Ozd&xGdrusSaSrpBs7I+B+LTetdQ8zS0Z1#xg>sJctb~=Nz`?C<|%MLqU+-MmY=bbGIc%Pi1PgKc;eftO5 za_TFg@>{aj$07)HOC5@oemFXl1OKl?O77inlOJ$L=~Fb&XFbu)>rpit?z>C(!?O8g zA9)PAtBoN!UV-^hjr31gsyN<{9GaQgR?InBncV2Yt9N;Iz##d+7?i0aixmUFHIT0> zQyL8fFtdEX!xOXUqr7>kcucFE*kTI7AJ)13W?b2e>n&ybjhi8+9zD;OsA#CHR)Olu) zZFFQ>!e%7@XV$6^=bS3SmuHs~8$yBzRZh^d2zySPx=u@I5*+c`aN$zpTUQkEstcM- zyq{$f)%im1Zc<1**pm&g6giO=Mt^8X6@V2bB1}_u#_)#_dk;d9Ab^2L0rAOtfwlFV zN%vV&lMD)XSoDR7pivPHptVYLpc~6~Q7>6uA#!9Povh^}*^mqBzj?defo;Lw(c}rdV z5jPnLv+u>)yaZnJDf0VLi^w8w9>-N_G@oivY=}$d+qTE?oyc|R*)o!n^OzVeQA_4# zBO$%?uHH{zcy2U9ohrHiAG`NXlZ95={%Uw^DW^O}bf<6IOFCRe!%C|OvjGpysldZr z>QQFCtgN1=t&(ZipSd~T3g?ub09l_+(TyCxUu5pFC3Kj+AhNEcddz0$3;Rj6QF?+N z8MXO;!oozR6Ey%N^Pws*|!cT^Lp9p3b(>74h$iv81bdsXZix0NZJp*nBIIrwOH z*D7u)Qb4|ah|7cAn};DCE$>6^i>E2s#sRo&5akT&1WACSHwfB?iJ}?M=HjY?X~BN^kE&{J z1MMwC)E7;Cy@KC^q&dq>X1n*lVeCaGDH)#`Ouf*6*FrOqA?ztr8A7o6XDMBR`1NS4 zZ1wY2M|%PbMgynZm5xn?r5%zDos7%&vs;zE?%&Ikho^&AlCho#^OSyA+ zU9wDaN0pRp|MW5&PY+HNqZnQxt!xa7Rnt%j-WqD)S{)V``H*}}v=evIz?5e*X^Jiq zPUfQ=<0`pA#mptqFxX`D@F&bFLW1CGvUr~VV5mxcV)YotQM=W|wPy~$e<;bD*vyN0 z4b;zg5q~tiNRU~?0YJwTh5RfOw)D-$w2Ty8Bx>DP=*^z4Vo8I$>6$76$Kyqbp&~UNH1jO1umEZfYHR$~}G473`hn(}`OsD+XCnVI6Bv zHoE82t0b;`(ZhJduuC+tbJDn55_UxsUhHGqxJ3`E#V?Uexrq2l+)orJ4{T~o80he* z0rca}@dc>}re0I_Nm6Fq@+Ea{mDByW#V>G)8aZ}wD!5f*<)m$g?Z{!!LMViM%9Q*u zH2#Eem#H>1t?rM#%@WqyZhe2PEKODMrI8>4a#+n_p;9#zVlW35t){M0~UvyOS-Mi^yYqTK~j1~j)%P_HO=G1eCLo`Bo zp$r52SCL=-0bk52wYS%6A=+M@GO94L@qCLI)Mz8e^-#?Cg}|Y)TIPXvFSe{AyKC}i zT1~f?;)a?#+BYu4Se$<9K052UIAGAZ-g)E(SX6@3(apUUVZ#|$id`~lhIl?Nxe|HslSa=W z38Wr7A>m|eZ9#ZTs3%!b@LIQJk){Dt@EqzBvtD%^Qo~YK^-&0rNwwkS%=lj=U%WQ2i*H@!F z{BvNApu%$KAfzO7@^ae?^ zqJ<;+OH%;n+BodCo|t>MS<%%urNZ5vFp10_ctuL#+p?Ro)J_u?=dmB++)fR->v=HH zqAneKpv!fL{SDdHPt5mMBq|FUaUT%fWtLv`g-e|hE+WMyH)+5WOxPKVYqRNn8pVFy z#K{Y->^Zz@W-(!U&9oa5hNH&oW<2myv7K1U;S3HF3?{x0O@^LT11EiFF%Wb*KLio- zhkSTJ6~2D;cr@Zt(W5fvt2-mJ=d!&SXT%;3d1H; zu4N#ae;`1cx~??BIbPRh_E40x9O{gR8JTV>v9F=NHc^E=dgUI&gxN5oTV?FS$Yh zrM+1voA2X8KYN~+F8w*}+3{}hr0YQm^ofbS)ugrNr01$`sAJs#gx-Y1=FcZ+es$d% z-C)w(XP1M5hM**dqvwl&g;gX6>EnrQTq?{@&1)Uxc2w?@?f*NGbz+%XCBz5ICMiYc=@~Dt*JR)?qp#?&xo;mN_vZ`NFeD zFUXSAg>OyiB(O`+B@KHM|^X)f=BQGPM%Fh3l_XoL=PF@${)0QcPK z>ydRQc_@#6Z6KgADD2)|#0`+rsH4EOoAq)$w*-XXl*Kp<8i=*IwCF@u+N`~$cN@6b zO(LtNqju_}v^$>*3d`Vn8D_2KGcM;Jq0Lw$=_p|lai$5Odk`eK_mr91Vg_?re;z5q z90b0-=`nV&F5stS@tlE^E*#Zs;w_|l>iiWKQ!&(Q(Y5wDF8VtY1%Q3DCe`}bk%HByDE6bbU1C2xL??*C4GaF{!-zfm_b4OR_Eb1a+52*7%qPO714&#@g8-cA~;@7 zN0~Xxdkc2aL%lbW5qPvNa=9QFe|z?}-{2tF{5DashjGe(br-*bT2`f&dH~O8Dnik8 zvc-u_ao?L6O6$#NllyAIh~AYq`|yS8@B-wt?=Qab$39!}uECFB?hUcd6Z6y0+=QpH zML=7a9>g)p1IH7{(i$^IkRtoZ6Zm-AhY)GbT1$oPcK}B=oIM|7yxO;AfS!eD_j@T+ zlacgF3XI~Mr~I*JsQHSTVk}LUKs@x!1kq?>_i$9rqa%KO8P6Ft(*{@*LSNeV{Zco& z_b@bxB$)r12z~9>5l4SpnZcMVIr^Xy?)u_g%Bm00iI1{E_B(Aj=zAsJa8xS23Vtjd zt23KAqiEIcTY;#r!Gvlr^nbkz2p&=>UQ7}vw^<900w^R?VKhiox%uytRYr4+NgVon zysqj{Q5Yt$)A%*uZAxi9Kfuw^`f;ln{#Z7&asozde*z}Y^*f(C@O59L>A}tljN`%nk#bNP<8%JGdh}5^^0YLzl{#&&_T@oD!L@W z_3Sn;leMR#&_gE9NDj*N?|e%kj!C-I$MK|QzrXEJAI>_e&6}UE|2)JhwSWC0BH0y| za3a|&9nJ(hV z9GaC0!6YoH{1@bv+n3fB45T-cIKHbA=3<^-ziiEJ$-;7zn0-4b>KbXI7d?}KVn|pX;E1kw_AyE~DQLitX|)HAsJ=X;h?_mHPK_6W|EY3Qh!HJJJziVcasa()QlWfislYJv8^C~_ zEkhF|lFqTb&q3;EnldFc`i0-bj-RUf%ZTFv1E8olKU)I!hH!O~pO;Oz(C=BxOZ2U- zul_=Fh>pPS@>+*WpSjub%+LUQ=Mw3T3M=##mjuWlEkazM?4NmyDF5)Dg8EAq&snuH z0oh*?xG}Ddm^y&btW@?h%i>wbSdY^J^OiynQm=oNg&Sq>#2j#d2b|o4zkLhN9ucL_o!2vW@|!lLZ~4=fJ_C z>Lv~W2G)^eM_*DSYQ}VT589vq(yJ1Cy}!dgegMQ@BSIk@-nam}94>Q;FB8A9@P4bs z?uWot&*5{;80Y|Bb~*!|nBR0++mg3ox(ar!Poxz>C(tR@;YA>6OhWXo$I$!xiHmMP3uS-5vB4G zqlcS(I26!(hwQ)~Ui)?y6xU=$rW&kMCWuY?&=7Kwtr?{}UJPz~6LH^g{wLhv{qJ5r5N@Gnpxu_A;Pm(Z{@ak|fql&r?MzJnVO*Z?Ddy~W9?j+G;t_={ z&N8%+rUM<%y&*Z_iTxdlzGg`pnGY3-L8L*?yR@*5*$kY!#<5#BQ(8c7f{mp=q5ky) zT9U-A)YL3k+Bz$iS>oOx#-ndt?;9wYq!SHFRWe+blK-wH`Qq^*7{a=y`uj1j5guywckTd}xc8xxjz;&UZE#H`! z5o;H$U`}#dOS0+*3PnI|c5-lz^uZ`lOS`SPFU;pP7 zZQG{(l=3mFb^MlHJ`p38G+{Cy!I7@Ao7hNoWU*^gc@Ik020>%k%`IpDe0S4ui-`0T zjgp$(-aS=&O7xtvgnHbVi3DP%JBYsG;M)seRt_SKVpIaL%C{Vs$eWVO;w=iVX1E`` zO6wL4xwY{%laBCgdJi_-V#34H8_LO-VP(5ET>tF(eTW}1uL+L4Yi9De9r}sl!*ffL ziy!*kD0@5G@LUa?=#wNTQ!qSRU|(98A)>8hsv5}r#G~&Kvpllkl2Oh4z4&X0&U%(|`2aF;FNe9I*3j^tA6RSHKZjmnA}87pmvMfy!^ zV{bFy%}WA;6zkFbwa{>-82@vq)p+&Du%D&LHXHshdSREjsc--4&K6ETm~*18fO!4y zIpuo_~E=p%MHI7Y_Hg~{`wWhBW2>ckv7DDv76IxqtD2(a3)0aiB zBA$0gagt@5x~d?ilrTKHEkvSfs#^6urFEfoVs_UYupvzTh91rA-M{uj>qRVPSTw)> zH{G?^4;mL;f^E5^&-kA$NyIt{l>1&-!I1h~wg8vd=QV$oMfL0TS3(L-4Rl98 zCsvNFZR3WC2<|di$UpEbG+&w{=mvWVY+xZiwvM!Lhy2Cnvg5+Qu?$1S_bCDimQd#qe9 ztBsxA$5tofPY`*T6RnUz6`2Vkug=wTyP)o)3*?EbX+MF0r1cDRVWSlmPs& zKNofD5XLai4$rBEMF8`*s}r9kX`q>2a>iq~{WFB;ItMh}9gTi2W_eymURdJm zN@~j8FRuG17eKw4bYXmqrT@oMpcXd#Qo#@*=te6=(U|T*CO(Nwu@tX6tyR0&C?pPw zc0H?a!r;LXy8GLlHjR6--PHS@w_kUk#;{9I`-RRf@H{u+5L3hU5|>@ZZkCvk_j-Ol z#{A=Dn~FCr+5uY*`L1=d@bth#Ui2}*+HAgK`GDM2{qtq4eQ)fywgAc3mEI zn>N?a2?2iXVpwo3tusC9RTXC;0@HJRcHLmwMoLN2p*nsFRn(pQhaxd>yc#LScrB^7 z`(6A)F|q%!nTs*itWBTwW}NtxMPUT|wkxLREGQT#tz|?EG6}TM z2;!!b>G`p^A*NB;&ku7rJ&w!LY(vfF9NBj3piE(nGSVTThFu*=T?&&Hb=*8{~vrytDAK>}~mO{uX z`q>}m(jbB_@7W>ukrWSLCvI6k=fP5H0Jix&fL;0enhBNguYGZ!0Fkx1;SssfF$)ti zUYZvvt>g3O3tb0JtQjx$alr-GCeqJ#(=4yrq)q3}+Y=p<`7O0W;L!kTFVeD$*V@iU zBtkY;4;YT#msHs395w%UhP%}1fSYaA3bYw~TM`0QDFytNNLzRbQJ4%$0QUWrmrBJ{ zgl=a&SC+(y3bK-*h2otv{(%!hrWJt(U9F?#kUqOZvE}x{QEgu)QK|bQSd})eqzdqh z=GH<1!0~sj%b^#b6XUth@Dmv}5?lQQ1m~crTNqHW;bj?XY=-}3b#`Y12bSM`_OSY& z&9mT>cip^(b)r34X4{}Y1lf-i0%cc4CCE|}`!h4M%s;3OP=8QHL1G2ia&@_gr*c_| zOZL`MjyC)wqj_3&%~)+ztcea#L9J56Yf%0)FFo7NRg~)E!>onx;9H&0Kp{1q;;V53 zEl%=wICf*gM6#P4qWr*tC;pP8@IL1Qa^XbethulZBLYEF!ZIJeAf=o<%WHrrc|Ajx za3RmqFmuow;CfmP5A4JD|Fy}%kF1j%!wBcuDe4y_^!L)w^|{L)`SOIQ&*aARu`WHn z?nyZpN*N?^bl@`KiZ8sPa1H`G=*<=5`O}SfHvJloJIb$K8#BGq`g_bXacA}5qzS|$ zu{@9G58mnC^(T_{>V5fU_NQ0Muc0d5(Nh){RIx6uE;2|CrK~%9TD!YKao?>XhCDZFJ;w?zMg2?OvP-8EJzV=ECFnGHI3u@k)J4A8JwzLFO{>;Ck-(BIBC00s z`LaQy*lmy5bfnd3Wojx(FN6_wxNmc=a`_KpPUG|D$aUq7-r2LKplo47u5Y^v59f?^ zb=}2{{2@0>dXjYF&Bw#|v7ZaKX?~8R&+05F)6LQj*(06;Ar!>wb9{I`mBRlct_#DM zoSvE8;xm?o{p{@$*9Hk#=zP3b5l$V1?l3mu$9ykyYS|%%VXi0RgI;!>p!I(fIL*tZ zvth z4J{syvorBbXT{am-x8B@&Kw^|#m+(f0^5e2pTI@!HsJmyj_owGEaPKFvDc3gScPpS zG=P+;N|^LC0E*Axppc${MWE@hzy|d^lmhjArXO!1SX|?CybX*kHX8<7iSPk7KW4`f zT0fAH#;>1tzr(8eP&gyp>}?2r6}dfsZm0MWW4u62qZ}=eHCTKqVwezSR`uiKO3S}x zlnlvo+=xOWF@9_!4UkDh*vrH zWLJw!<_0x40eI{(6Lp zt!4UXePu?wMW+FU^z<9h%H8v4r{n@czS`c2L$4B>N0{E68QI|SY>^p42+ZJboe6g- zvOUCxIyx$0-`qS&@hTbX7L^cNL%%KOZREsv+ZEADB`K#IK*Lwzemhdmv{auvf13$6&Z!J<%KBni~L`+7dAS4UzsV^v_tTqPM7JKpNnf zUjFl3ku=iU1*Ey$86<%oD69>%KA?w!_%cetxn`Ffe%ZNcFOEVM-&zn z{p=8N47WkhW}Dp6+h)D&-!Y@To?P0Mrk@dQ41@agI6g4qnc(oo(=SJ|4Ma)kO(xN? zx|)(o&veOmwF~1ovo6@5ygX=FhhKh`uu6%d1PkqNG+AzPZX}92kccUvlN(2ve(SNX zuG4DM@1ItIATJ7b$~wlE04?a_xqT2WO0?)kQug>QNwf6MCSx*9M1K`Y(RI%r**dP`KO+sa##N(Ocbr+k zd8|2klILmA>RoU93zuCzIn^pQ_cu@f<>G14zug4=EVsQGuOdJi2s`_oBbD<}(CZ>x z=)wcD(n%c=2x$PUY=)@Bo^q^M_wqjRMX6-{pHuO$WTJ`tm(E|TepJEcCL{Ge9%=b**#pCy z_ASwWHF~E zxOjQkppzVxzdg|$$^iDzF+_K~%6~qau{7fMsV9oJsL`1lm^cf;!MEQ^Z8vKwlLLL5g8R2xL+c}iZ zzBtV=aB2?^hJ3M|KoglC(<7YCtp#<~9p2G`iosmV0LrcQHTks_Z%L>OMDpwBN68Mq zhBi(Wp0+uniSb@!r;Yj^YJnv}fcFxb*Z+uSx5^dacVd{?9io;PHT%ek7?&Tli*Fe3 z6PukAP6F8%#3$ovW1c_PtV2Hmowx-aK)eC?IGdb97}1HXEXa{|ZtMtX`#ZyFFRlcN z6?}vK#O%N3WI6pQ9O5|FCV55SdU_2pIsJ_j&k8R$q`EUYeq=<7;s?#kSSq$5@d=Ya zrfjJ|Irql$3+?bv8nHAk;;I70@M?_Cwu;}refCn>YrIM79>a3CI}hs@iF}c&)wtH!4*Vt~swvy=>c4w@p4O{o={aUzQK9j`kE>r9 z`9d`H+=MPH+Xlp_JbN{8z1ddm_*T2kN*DVgS)Q{m`Vi0nYRuJ?&1x%Hkf#QP!9lf-d4ITPTB9PJz=sdOlJb5z*@F9qHuO=sDAn%>^cu<5n(gO zote~ti?aLpez!-~p_@^RHPnP{56zLF+c)i2~E75f4O6pd`n;$ zYDDrTxOU$vPL{3RQ%p3FNnLixH73({l^**z$I`jq>$%vk&3I4-liu7o5AImxLL_}( zDsrv9;hd*JowV%M%t^2e9{A@^+K4w`c^_KucWwryUD*LWM((KJJII7PKN|iw+YC8k zlX?F-F13|c*Hb(2N$zi0r#b!EY)QTpGY#UTCH>QgF-h=p1Ty_5*1VQ6FrJv~MDvx& z0v#Q}f)u=NJs{c73ipmTP0it#C}e9|7d^da_-)kaKiszBtM2c>iPXw^_GZ#Fnq3ut zGm5z%2dWukZvR{E*JY~wzjA-m@)`#<;5=eQ=od(2*tveaZS#+fs53{% zm)>J+>2;353?({YKgoNs(n|ciZk3p+@ss6S??-^O=E#n=Hu;=qQ_a}{3r9X1%$Ha* zF!>m_Dlx^d{qneApNF(-$(3|KO(8y4@>P_YyF6icyq>D(+~Rjqxo0Y36zlX2TTqmo2TO z0%b*qhlMg8!N}^zX39@lvC~`eMfeL@3GjFdN{CO(X-LF_c`4XlvxV@TxUAJ%$bN`d zUu)W6TlY84*!s|J;Kb-0E!rgBH%|-PAf5|IhNWRLWdL5bsSdQ0?QSR~tS%vjyO$kF zd79~Rdxp!@(jxaIkUhfW`Ds1}w3dSCvmr)6D2BsJq{AjsoZ=%_E3W-+HXYv|qj-Fxk0nKtVTY z_Cu>QZ@Ixr22-_oZujKJF;-3k@R=~C)$HFdOPJPQIB=1zAx@+SI`<%&%O0n~>ebBm-f$I@?ygCS&9-9IjXtJqOUD?<*iJ+~FE+gS=&53B3O zbnE82nGU|$VWitESn;HKu59YEA`>v^mB~O2P6ER;WdmCH{;&35zqK&_|EK*&%c4(E z$$0m)dQXffx)m(B;7^>x6SgZ~;m2oiFst+#kM`1Ue8#7qbUh+QPM6`oh6%#hhD=aV z)SRC4PZI3DhO=8}m5xlQ>zK+h@6yd^y$=emEt|$E=h`#K8T9z-`#XN9oQh`~To<`~W=gUhr75}WJJ%VU}8mU9HAn>6~rfcop{B~tM%!;)HSSA0t2W4NfUFIfa?<61%# zow@qY+?}D7P6e+k$eYD$VE^;zxjxp_!=;gsW6H%HUL&p>Aw7$^uawxYK@Kp(E9EUJ zr)`1ACaPuTX^|UG(#Y?;tVI4dU!kb)<8BP{@7q@yPgb9CS8f4hooxOkVPp7_sQD+yvPbT*Q!M>+I*XPU!)(_%q^QE! ze0Ae(nif-vTY1Z2#jMvyUv^&k|H=Q}*@82na(1?-w%693VyoTx?|5+eK_|%eW`2<0 z97uAzjZX_aJVkD5YY~ApzXFRIH|nNl^Q>30&yB?_^9?hotmX_CvJeXS?8j1SqDWLV zd%j8gYhCL&5&Y)8e1#fL@ik?|MPtAqyn=3D%ogXO|7NADHRlkMblvUFrx=aHX??o{ z8h^RAV#?EI>3gPLdJn=@V+TIsRV4|Nn#6cs1UP0opM^0EgyogwmCP?{tu4TaDsESX z=ly1ay^5cBxOB1y&JNpiE!!kI@`m1*xoon+i9I@-_CK4CFFoXUxLS(G{E2X5TUZ`+ zz<>U*z1ItYxi^>E6+6I2UcnUvCIn*qu9DYcc}I6}O^!d%HCp4%BRgNs+#QMYIlb|w zii4o2I+ZCI4PI|8lm3jh6dTs1jm5^olu02`!bY zSB?Ocjq$sB(@i5qQlwOVyXBhL7tz`t!C-22dBpw6;fCJ ztVWf&X@5n0e&utX{&(Szxz*pB(Wr)es=Gh!AG3pufMH?)$DJEPOaST4@=j}(|)UNDv#B$ufqx?W$V!GNXH_|PaYe@fN)~5i=%Xngci1PTuL&o2`-RgWWGNjCq zgS+lGP<_eoYT3#WcY%J8p%wvI?wCuYG804691GIvo(WskYyCHAXe%W|h{~BHuhAUwZ-&S-n!LJ~!K$ z4hM{1|IZe{iBI!|HTJ)>dM~m4qgw{C=<|DU-;S)g@~QIshn(N7#JeURUYE3AY9vNQ zOs@gmX(A<%YSOncsOy_)i*!H|dc8i2=ZZzXa?Ac38_mN-1OMuVVZ&BXVgT^rj*1FL zKBa*5=l5Cbr?aK+yIk^eufE7c4(5V_{C{}?L8Fp?u8AyuH|S5gjHP&Y0bHg{E5q#m zmkWT^xh35uQ%w1`HS$*_OM1$a z6`-3Y^gmVrWT69_ybE*h)%9r*#K9vz0FZ(avues%m_Bn~6|n2OHu%&e5F$+gsW6XS zYE!$fy_jngOmLrtTmX(Zbg|GmVI8PBWRJV_PnEzc;62|QGDs|ieIDDuW{bjg+^+R% zQ=IbIu_nYH{k1i8!y84gn=MwB5(qgTa#7ECz;}_zlZd{6#YL{H8r3}x@>QQy?~{mA zcm>IcU)AcbLUEbsvn%?1g>uF3(e6+V`|1QazfXhxt}b_`zZRgB9l(l=#~qRjL&^() zoD*Ia3ScKBy6M@9PsnZ1P3BE1u2Ce0&hj@f)QgLqLe+48C@y_c|9$1jc3bYag55$k zYw#bJUo`Y&em(l(*CBn*K>bWS(;Qr06Z84KaN%Xi)!B`b(L<6hsW&tT^!?N4OFEmY zyu0lh@^c!OwoQzJIl`TCkrvzXkRcZejelvqGs%7#o< zJq)A%&cMWP=Sw8Bb721Z?SOh)sf(6>fhl{lTW`!*rEr&c!U?7x4^U^>GUjM{QKWja zkZLw%5ENS(Alk1t?419&r9wCHD2McU<$=D4(JfoOqo1k1mb9uI+kD(UK2m7=P+ign z(fu-Z>H9+$qM_yXMR&>K2P-$Zk{~zpnLNHH6W%LMoQ+Q(SxNCEH08<02o;%FaoHt8 zIiBMO$P>T#i5Ev2nEk3(wf<_NPH0MIMP~Eza?bo=b`jB3cK;}SV(=)5&wO(sd*k-a zr@VD`@xJX5mt_uZ)SiFfOlBEWTXm~Gc-z8NI=kuC5jlIcKx(}j-E+QYV$CD-(!%-_QB^43(nYhCq@w%)q5S&e*nWt znio3kEJ42PdyQ@8mI>7%u!?&AX&_?mE(}QG$nt%&i`0)5%YbhCt|T;giLu5w8Ba8N zZ+2Jz#%17E)$3TdeA5ijMr;GrzQAWCE`!O=N$ANJwM z#vjj!_+duRMe9TZgi83&=JjrC25X~Ruw0+E+@RJ0h9>@3b$J6}dLeW99`5?Y#YB8Y z^tU~fNL`s;K@w%()TRaIWaK~B#s7z=^ZsWmVEez-qO=sPsu4PjQnNORS=4H2jf$

wcd5`Thsz{BXU_b>7!!Y>ulhG|V5ch}nQi zJ*&%lZfm2Ey;LBUf)xuKcnVwXg?R0MH82OgFcXxhQx}fcDtogtqs78@Ool@as z^m8iBo#&H6iiu}X1tdQKGUzV@rM(FSQ}A@Jf~^$sFU#R7ItNq9!7z!uY@#}YTp&hY zyIWc>HP`GD`R?Z1@&^OY!I=1xZbg&KCb8MsyY9s;E-^FiZW-Kx8Kz_Zd~WSZDows# zYub(rMiA_$!Y}(9->gQfbk9ONO+>7JFxGa{?^MjXQv9JopKL99k2BIdlltIj_$Zh{ z?c#JXdE@Tx`t;Tu(iT1NJmObqNYJDeO@MI`^HssB`~$`ijT)EUO!_GboPr`P=9hP(1>Yslx@@+zDfVEX4<| z+XxYOCi`W*!l?d|0-2`6#K|f1)t#$b!+ZJtpMD#kr)zk_xWMoLwzD!^E1gn)|2=Ue zZ$}mBA0HA~Uv$7|cjt0*OZa>EE9Sej_;IK_)Dl?^IWS&;0mWNR(Y81ONH2JUj`-Jw zNNp2y;t^EA##zqC553B8^L~c=Ua5POKLQ54AY^i(tI33L`B%hVGZjox!KmWynft2& zLkfGtxWFccr|H1)T&g_xXPV9z8DsXmJLN2FALN$yBNCqrOcvPI-p^tv`Ew^wW~!Ke z>6_nF$5WnNC%r|1{@GTis93~-LSfl=G;=N{6E=OxGIl9U%0;_GIC)QviynsnU{=~%$qtp>f zUBlJ@n9Dj&<@1?4MLO+MkEQ7!z9OM**EhrX^^_#y%?XgKIhl*{kdQ1C9Y1sSiE0MC z$EBe13lU!O{%>aZ4dG8JxZwEF{WQehSDkinXTxnK^Z6qZA%V5QpTHX7k9w0&*Z@%? z4ab6^=KR0x$%yko8coiw9#B`&XFBF=j_280 zS+?t`I8>A&s5>8do{Kng$Y#3;OX5kV&bKH&6D_mA<;t6JM{~$441JH^)&w-fooHnL zET&&}jpX6@r$KZh7plkn)ng$8Z=C_qpIMNC#5bhK=lilWknS{(h}DL}9er&(8}H%8 zuMQ{_fGo@Frg<=v2BAH0_v^$?qmVOwarajKD<7@Rk!Y>z6MM@O>Kdz)eQ9Ujn-DEq zYcrCji0SViv)^mAKO&xSWf~aofKie6n9!F-_T=(M|Xxw0q$u??iqqY1t;vX}|ce+bmaQXn$y@QH7(UuZ&@x@<6T%FlG$a<~_+tr}uK-bbNY zVdXx=#i3NcRA0xRiN4gR2J5@lXmE6yL5OqJn^8hyr^_yy!)FX;hY|TVS%n0G{5=uW znW{eWj41u}Id{bm&&YsNJj>Ej1{O9eJ&#^H{TPvE`#rP8XtWYEgR_Fv-Cx+KDjsb{ zG^?*+ztV;Q=W30hyRgISK93g2Wuozoj@<(JCFR+(ukAPR`Z9lLLJm_u&U_{ud&Egzm98TVao z4E{E|>^5hzuKD@g0GdN&;8v1&t0a4WXlMW-e5?c5&%AEpRhHLorCH~?RmmrHP{ z1!Ckbo9?nwBYytQ(X&rimd0gJB@ab4I~&n#P|-owSB;Im*yr8shpcm^>Z`ng0bP-X z2NGkP2hR7+CC1tkUU#|Fb*PNknCq!$LEx-+^A` zP+w@YqV+Fjkf{^_dNvlWhJ&9<$GsH3k6L`ccf{b+MRcUFN;Q1sDQ?4qORb)AcbYvP zXg47XlSe&#CHdPH?t&*?ha>klZktWb7X<#ShjIFyw5PP;F@&B3O6n7}NtebezGP|- z1t(ZiGZd}ImhKi40dl1yQJ_3NWX|=cmW`SqM;m)WysRgwo|kI?cT#ff{W^)7?%}+7 ztX2JpM&2T%)hi#QLqf{b_}fD!v@h<&bD|AQj2JNU|Dh?&_ANjuvO}E0oO0Hj^{?e> z$0sP+-W`*LzgsCsR5}*9n(DsF=|poK$}#!C>spl4ueanA$Jf6b$J5s3oEn=ia6z}f z(4s9i==sr+FMdf#$)T$cyC}HGX$@7TC#9tuTYBcm7SG4Mq;NIq$V2UYT;|iNDVKYh zv&DYo{;#*FNm{0@dw3eYC_+b3l2hMLPGUued>)S(v~>vEtHb8FiF>4;tBZP}P@*(C z19C662S9E@-1kM;Y1%u=_t@##!3x@A*>Lqxe>s^#B)^s>*6XfChku6T(^0Rl6|l;z z(2lp-jx1!sS(fVysMI7Qg>*bssPqwdL8&}l7viQRrHa18t9$a7;)tO1AQ!#!V*V^id=7My(awPl)vy{R=J=chkur#qPC&!vY+HrsM${Cxu#A zug@exsNGF{r{P2tO!}pJJ-R#DSqO|=ay~3o&+t=;5Z&=%L*K4~4kNK`Zm0wQ&_9;@lw)os_`)gb^baipI63E`nXoa&J<~ zl0H>a>CAnCPUtS2lgc;a8+Dnt<*84ctJDHFVi31exnQ!A6I^q>ujO7n z;k?>q2~KM|in@a9g9W?t{MH-u5`xYGm|kDiJ;O^|`ck~C8KP5-z*}r(tqy;t5~u9) z^>YiL=bY^AFo$kasNSu!nVW4RBu@8D-^S|*&zBm?%NQQJff9G=8`gaPRGG*0xK7N3 zemaiXA4_xxf$az3CO5>F@6WSjX*7Ji`f<(THf3P=Pya_>jmYG4%Y_kA=mgSt5Ae!9 zOB9YzYm;rE9gk&CsR0mgvSk#XRHnZ4HP@Qy8;z7atWCMtj%X^}3=; zOma`r%vZgUpI992@@(e?+lD0Ces^(Vakzo7a_U+ihwPbu3=B#iAX9wy?OR)7)s-O( z@Vyk?H;_Ulq`N<(ZsAL%ktzJ11RNCM6 z=s@=j4wn25I{+ppzo#^>rZpFVXJa5qrbeS@SIOKsrrEJnTEE@inYN|ZM2$n=+C)J| zzpH8P?7C~|-u7;)UBEu_4Nm%elOjp0S9@cwc=4iXj5NW82IrtnQcw0}%6zh|-g}T3 z|5)l}c#OSDo)Hz(#6&Ik?#Jf3E`B%fgSp_@~vcCCqa)Eq1U8;{QgK)&K#UU zE@KZS^8~)w3mLTqdhee&8nqEoW!{!nr7u7KgKayvv$Rh{s0~}mKX#Y`ID1A;ARy$G z<&8*?XW6MakrMj~9y_W3Z4e2}1N zP~dVMPdVL+Hi%#MUdE%{(I@$Nudr&?cx~MmiyoiUjmV0QX&=7%XD|jPJ2E%Q)Sp_8 zJi7NuWc+4J3DgAHgOx+Q3#5UrYvr{YPdV&qRS`u&7pIB1!Cb#NW|6Laq&lU&b`higIv(jHLYyM0u9x~mzSH806^)#4jDcgBsZz2|Q zb$X$RAit6?KWn*oWU6JwY<~u3)V2IOn1N)yykH9FMpA9ledrgRG(6xZ^xH+YU<=`d zb++c&eO-`L3F)-z^$u0Yu#hP8BuQ8+N)kUAO#9HVOcCh42dDdJ0Pk2nJZL^xOjB;G z@}t=l8NE9Sy1w$rG58lxG5@y9lj&Ehe3(h6ZA{J5LzT3CvgnkhN9O#=A46%#T+94i zzw*=^hIza(8&2w)^rlvY+B^;lU=VrQ36tJIsg=P#xb9LYN=$IbdK2Kzp`;goydKk+ zrbwcJeI~e$GeH^xJ=BTT{>96|<|^e|?;2mI-l|Dm4yyEQM=I^l?FNe%)NX0jhX0HG zInGHLwRo&P&jP2EiFi0aJ}0~56|f6Hj0DF+kbI?Qb3#H6gf)v+h~4sQ3f71CAp-{E zIxiaTmdOne@yEeas(_QRfaXpDe&fxl04l6^*9H~g3ku1!1X^|!M!aQNo8^uu!m~hi z#zKAWEZ=*ev$F?R64{M*DytuY>JTn{mT-gEpU^Cf1AaQga>q^1={B32wx zovAD8$?beHkx%G)n)@R=y7>v&t>V#QF!3dC9CEfQ6Dcah4!=if;kV0jbww;)?!8?V zj_=4Ztjj@`za-g1e$(;z2ICO3D0@t{_VS27|6%uRyqDNv-{$A>e8CxUnCmh_Uy%kK zr=?fiuD)|ZKdEDUERb-hEs&SRL&=@clJ()dWky_ksDrnz#pBm8LuJgwMPFx>E=b8V zre3J>b5rPU)@n3rriCL=R=cMwtJDJn9UCy0m-mO&$BKhB2C`AutD>oki}VMRk&F5s zAruiV+!|i*+^yv8Fe!K5BP}Lm;`%STf}ZZ4D@_rtWWV|xX>v9}`BG<4vs0k4I8*B8 zNt2+e&;cKQ5BWn<+){w|kD62x+iT~kb~~p;@5#B$wG?BwnadQ}jvCBK-f!L2AN$F??>fLbj&=*XOuN+c`bwu3j!Vp^axBf8Nagu$%wT>UQPDmiV zC!)^$Nr{%>-p98Z9-9xno~l92FV*YKE9ntyysN*NNC(C0O`Tha#|PSqrXb^E{vO zfQSOS^+f$>F>O@TQ_Bp}J-ndn^R4-KGT0U^Dk#uTmCwd8bh*c!x&46efh&%@`j7jy9v4+BQz#9w_2C*c`t=RI<2tkumy!$C2i?oq z>(YMF3SPe$nm#W3Icv1@M#RJLTi5-INmPA788A*h{~r_9p1w29>Hf%gKFJ_fNJ9{*5D$@vbBKn0VY+EDa zEJFvkeAvNNJa6kpp^1rcN9Q-hUL25^qkqKp(Qgkv+Ak5e`j^s)Kjv-1&enitTrG#5 zm(%i?_FdJWl;m2KVr1MTtz6E}N)ko*+EBb?BQHg-?`t;+O(TsN2pN@gI z06Kj`*Bvv*=dUSn&uL{))zPmV2r~OWm`pqGf0KruhsNWSZuj3yoHT2_iJb@S2hP)E zApw4RhKyOZqxCjs`ws$QM^wLdXwy#!8H6y~CuQe3WgX}87LD)3Zad*uLbr!Fm?FRI zMb(Uy#)@i?Yx;h&UXuo{b{?qo!z8flYxzRiBCab3#3K$=f5{W8 zCzArqh?%PjPKYZ!-du7t@ZvK*md%DHi&@SvfhODkYH0ue1>Z3^;Y!)%Cjt?m29y zy;As|V!AukV%|5RzBKb&_`XQkVs~iwg65*9aMh0NP?TY{l_2kY<8)76j5K0!t-#8R z;K_XariiX(lIxk5Wk;{S`HZK*4bXjv#Z+%ZuL2c&j1W!`#nED0&c(VpHcpskBhy zoa1e46OxT|1UDcb(in}XFGJS7IzE;H3S;RRl$2Mvewuy*7tUe&i~0A}dr)R92gC9@ zEXgr9q=JLW1%V3*)mDv1_GwkG6*yVO$Sza!n&4S@9)dhH^*G&-3tY1A!Mh83(vFg1 zV6vO#7Nvnt1^ER_^n}dpli*TvCSTDt59ML9?k(=?ttE=%6gX~oj{|hAUF+BbnwMt z-m8)g#DH5M^H0*rA( z_|{RWE0t%ZRi-t{H)*oNDt*tqQDrjMfUowF1yb?i--RLm$9~Is1~g)isu_V1YMFUb zRd9Os%v3|?4PoU_CRqMEJ0YnFWU57AeBQ82UyK}<-P84~Q(L(Jnd^f}3ZPzz@J1l5 zBH`|Xi^-4HrDZf~FHTP+D&C}xJ5@F*1C|8Sjs85f!HpaDsng*%j4P{vI)$m;uLmq* zPVa^?tUvAOw2;E33=&M5v8)BU&-LH*>f;Ccg?RgWx`c8cS~iJ=hKDy&`HGrYE|d(K z=66BQmxCo+X%nci9vd1>MYpAMR)DLQO@Kps6=s_4uNZr$&@b^q1ZSai((HeyyZ}=Q zf&G#a*o+AiOR$E{Qk=`B%uT~#UkV`nLEc-zs^Byi2bL!NLx`^|qoGK;TjdZieLz8% zj8kvv2ymF-Z|7CljaW-K-pH;oL)W;m{F=HUuFM(_NVN41bTz*Bm!U;!Xgxsdt2!A5 zZ`Z6VmH-5zc#%Jk9I{Z2Y1h}kewVdq&1O;RB@c_k@En82b9$_ z`^l^${CvLGb`rFl-J(a-fg*ox=PvQ)O2pngxmQoXBgJsKSfFG^X!mJ`YJm~+yAOQ@ zmVb**U_1GTdY_?r_PyWmh@+i zg?VGzs)wpdA72f59S!1O4BR})K4u%!-i*0+j50!c0*}bdGPil?m@f=t^(w>yI*w!X zC+~tYga2YCBU5cMaQE)zq#tle@791La*fWxQmN~-N6WdA_;s@Zu6m`ey#(ncc1gu99a4T99E)oN#(U`q_(%GB_?^DB^YuiH z%_br_DbZolrHPbiechE|I@N}Pp>WPf7ii9OwL_Di#0x`o?NHB)7-<_TC|hmw8)@PmU)+lsWIzrYhDI$x6?D1rgGh zP*?=kwR~qY93i%0(u0WFoAh+p@G9QPN=QISkY2jR_4!6OP%PrRbj^0;_bX)5D- zlxmAnwvgbp$1|rX)JML_Fg`FvPru34E{u6=^ElExy@;@5U#ixcW(O!fX2ss+?>gd? zyb*$vEeckhRk~zHpc@Se*X0EsbJIUqSb0J3HtG9>3_bVEPBtz-_f(Y7HSUQ0+VrD( zh4%Irp*kPt9|7Xrmxn%OV-}W7fFn`|W69VJ!zH76z~`}mmn8C44Tf`oX=zXzrQQ|i zq563G20OoykaG!%ke;qF+3LO9qx`{Mm$B(#&p)UPAn8k#U(zF?pk(C1mXE{My?%u7 zS!(Z(Zd1M=b$4|P;|)@C0bi+4O8@l0;h;Z##B9CpyEAW#zba#**;!Jj8ZzOli}55~ z^gbu9J{rctV+75A;K$a5-}|4qSfAMMKYPtOPE%EO1B^>`$evm4vE&j*GYP#VXDr%& zs3H1XZ;dU(XAo0x;*`h-ph*sV6huV2oxHuV8<#o<$yz6YCoXc;m_8B1d)3E5)W-XDaCT^^nz9QK_t`UTmO zFj1$VRQ+IJ?$hbfd?Vw+jk7@CFKkACS9Hy?+dclh4uV4TA!=UiFYdq9oa6cn<-`m= z;(Jk(Yh-YiPv**SFY7XO)yC3KI~I1dg3^3c{VcE_SRkq-YN2&tG~`Xs2?3XyJvb`4 zrSgM3O;u(tb`fo&B>gR`T$<8N5A+xjo{r6xObr2PKaUm?jV4J-k}8^VQM%csOVb=_s*uNg_RG$5GPz#SH~qM%bR{+K7+Bnb_IRhUT$ zRD$ub5FtGehz^c((oz`c(L+umxYREQviOrZ(I-60C-xPS4q>c4oin5b?%SG}0xIwQ zge}2X8$nP8`d=}@Q~#SsNwlyRv21{n25?7F0bU^jS6Qv2IVGYcgP(DdQj-=V^0&nH z%QkuMmWO@id|9muRF|uvP|bjGzxmhJ_b=t1dChI+a!>y0@~<%!A4AgNHlg@#C|?iE zu$E4eLNP)g>fKlMq1xmt-$lefM{mJO+a2Sb-+2meA26vQy&_JPg;2k_vd*&ql_2V| z?cM2CM^|CX$2@3btbvi_Id6iRDA~dED@R_Ze>5kR0iG^~|zkP3U7y`9KtOZExXD+DSGxmXGPfTB2kEEhj444cz^WULyv@gHouQVfG zYNwP+zSV-&%Y{EQ_IXUuUTgW?|B%=nq8UBoBz{M8FEiwb`0n2*6K_2mgKS;hCawvf-zQ&)7bTiB-IV^ zdi2y%Q}bz`Z9fRSxpmU2mg)k)Hoa+XgHmj#m=IwBqj)+0qk$-98qT-H;z&pS-LtEP z*v09PmEk2{`84l<{!h(bbj7+mQ<~C$8b&Zl`kVy-EO#){Xhvsxj6&ngzst?~Oj8ar z>r}m1IU);l)pa`hKOC=N47NT|SDD<0P2SS@w)ZVIF3NFr;#4mS+}~KW14&_jhMuH* z{kAQbsmVk@Bx#M@4+DZ6>b57DXPzm}%SMJF+xHbL;3(RlJ_uc4D2D{?dOw(n?PU%zgRCdKRbCnTmN}^s0PPQU=LY@C)nMTvi?{f z9cM?rusj(G^4kAC)kNQ}*IGsrQzktlLxa{q-hOJFXgxTMwtu&^PGVc-QkzD%5A~q^PxLv93Z4gcvUQF2t|q>PH_L2j8w(Y!rl|c_-$+q!%;fJif<@b9zl>W! z2Gh1nIQVhAq0e6kq5KXxI6Y@Hhp_(3Yo8C};M2Qa&IDf`C;Lx*xwI^LetyWefjv;x zbnmk|upCKT8KdRLS zA1JwF^RKU^L^A*Vri;nZ+v5y4BCqj^j@0~njd+~DUk9_l&=RNQ6@oh|{i}Taeuok6 zt=bZNR`dMg>hv~Ru4Ycu@5>F#D>r!|38p$bKi?OB+Ep~1a0b_p5I0vIB>1p@5rcj$ z2dYMje}_*qh6hn3^!T=RNj#x8GGQJi~@s8%!YQ8E@^2Z{?1j zPfqdsCxEKDm6iatJEVW7cPsi#cH*dqZPKa6SP3)l7W1O3G6)jaAzvq`HdmBm20f#J zFW&}h`n+Hi3K*%^_w&?*Af*$DQrmQr-m1&fif6YP)|A}#c_xt3dv40wH|vk7EGS^l z`8FW8s6j-?m2{crw!Z{K7kIJ?`6e?wp5Ne^QbeteVQu&+kBfN}I$q+fI9`CTDO zkD0Gkxs%f(&DBfQW3jBoxA)bm)Fep7P%^CiI#D3MSe0MEC9Oub4imjN(8UyX&Due6 z_xXdI5sqKHuMLxG)rk7jTqQ)O@|EG>%>e@=qf_T~ zh-++6N@uE-t3fgRvdmMU^4%B*&cPWoWg@l{S%^*XIe@i0yn9F_z+3yQ`g^5_v>q-n zo*Nm|vHLgx>47UfGI8POFu*C6<2(E&I0Wk`b#4d*h?Yk(Bi?I7J&TXoW-j$7bS)rI z5-ZT?0Is=pW<0%r3Jls*abjW8%)?18?rYykaC|3C78WlLt%7;Hd0T@$Jd@>t6y4!W z3)v#C*e@ww#EHkRdEZ_D5v8C+SR@OW{A>AdO|7Hp@;F1DuyvBJHL>H8{s+BuSrd42 zf^W$3)n3x|8%7eQQ^)4rjd- zcSI9?lkO<@tMb_Jke=#3hY=&(K6}4S=DE(qbu$t`Vi8udE8s)vh!kyt()FJ(10&YM z*P9A{&ytARg|jS3rxD|LH1*O)s`e5eHqLWkKDyM>MY#VEDgFdfwUC zrd*wkJc>7UX8NK*4=Ul9KK=Nyt+8jitH>!(d92I#HH#fl79Wl-xyjo>?RDaUmOe z#%L@}&9{gsU7`rYB$U=X+J^H?`4%L-+!PCw4E(fmcS_}|$|J?0Zw+36GJjOP=>in? z+$JsA*0Akbt?+u)5m4-%v2>5h>i!eS-lY4w&FsaJx0x0)v(v6iRdPkMRXHwt@XH8#e2!{q_jA>CYa@-N04)NeeC)j(G zeod%44gbp42RW*(C_a&^lMMu4lnhK-Wv1X!7=$LXNRbwh^nGQ==%d9vp+k`!#^lT%QuInU%R!zvXl`{ZelZ*AH?P!>lE~M{AZd z@HbcIm#B7kvq=U{tTI@%Ooba$QsP$tK_Xf=pRr$nmmmFlEju7`lWG^)w$Sf38~ya% z`;Y!ibycIg*K;*H&sUyN!RZ}OqO#pIYMuVDg~DAL?9RjTz$TxFx>Jpp5ga1eBEoL> zE?O0y181?<0utK4zFdB1pEgZ|En7~wpgmb06+q>;AD1}Hx_x;d{^XbRR}5D4ArRL% zeBm?fQSWgY9Nwhoay7N%JtQ}$x$H3_5U%)S3ngvE;PePZYsx++(mJ}z-A8foFo2@W! z-@j#<&%9ppr!Tzf>XG9A8~)ESw{$s%d)S`uDppytvHI(*=uoT-M+HxM(-{umn<-WoW0fad?N{+&&v%msj5#Y-NkF}* z9h=&|TqUGz5U9}p?s50>g8Q|}TJ@k`!<58-fqYmUZ|)3lpaY#Nm&$qhk;*F%aA06= zyBD7}oOELV`|a>mr761accI6#&JN=w+h+TFQx_Fl-%g=YCKM7Nx2;&UXBlH6mR(9v zTGJaA>K0vooEOtQk0v+sZsJ}_jhh#<%)!}|_u)__oq5Kk)AUJ@(?8c9iaTdwV%UiN zjm-;@Q@_QA>M z`TpUr<7H71;X**IS={s5Z?UfIdr_0xcxl2jdEHS<&CQ*q>*Z5C!0*QGV)vRBa1$|y z0$sfIZkK%V%)djabRl+T8^Z$7_?p`|^nGuQmE64R==;?BBFg(i6jShB z1Nep9fCuWSFW@Ej1vg4cyL(KjuDgwpJ@CQ3{^dB1vZ$kT`h(?wIXGEUs((R-gGOEc zVwDaMRWdRKd;pG(b@_9Azl_$dndWrhk3hD6?D;$s2BjoxK4I=@!Xka3AdcG*9^!Yf zob2OS@0ZfnoUIdI>o;aQ6?%eySouWcNc{I*1E1AhmCqFFkXv2TTo+_|4}XzHHYkl0 z7WzOkm?Cdkxoa-9IP$lx)R?FF!;h-dvDPG&Z2X)xLGd*M{%1n`XC%W*+8wZ5`3M#J z0VlBaNmxi*?tbq6BDbFxd(7tRAS6>6IZ#pF?Ed_wbU23|N^Xp0HDuPZJEocX*Ec*U z?#olm$BJK6`vPKuRg~0BId4mkYa)78G`=Lo)2_W#POrZ#a(zHcGAljzyKTj8Ic0uT z`0pDADXP4(tGI&NN`FT4o*E@~TrnZ4@1z5O-#Tp*UAo^EHsA8==E15&57H=BVrNJ# ziL?uq{A^!VmDyX5jq{pJ{D+VSwbPaomcqfgpv=Q``0OfbEh(_FVQ5NDqg_8&P61Pc z;Q(5_5)s6vFSHa888Cuc#MmA?6it__l1QPLXK~Mr_)*!sGR2vl@E9yQRHIPMBYh zk9G_sbI!pwjn6T7kA?Jd7MO%)ll9Wwb*QX(;DHtomSKE%9ybKSgT=wbMDOY_k&$9k z*1Z1m7ZKdVzkG`<+n#;_GYckP>QNWQUPq{WyCj88vK~hYKQ+DW=NpA$jLn zN(GW_0R!^M{fv{VTELlAUX~c&(@-#xWo~&W{iK`fMEp-&;;5}#u9B#IPQSfoz&@s`Bd-qk-Y=eS z+|s6XobvucIN)LD{UEz)kCmMOwF1JO_!6+_Zq7`$an@hue%8?8i0(x}!|(pg+*ZHN z9}-Z=6LoF0(`+4R(3hl1^2W4455qXMb{uzOwfm`=AjB0p<-XU{{azwQljnAlp#@)| zo0adD({tg8oVU~G0+H)ex8oP%pB(zTFxmK=bpsR1>#hz8-gV>`_UG-%WinCF+IPLO zhc)UdxG|HgtTXKVxxXh!P|i?W)#g8@xc=y;NGJ95L*ZT{?V>7qJiim?dmoZdo?M{O zcbi)3X_c9|?=shQ`*qY_$u|k{|LuxtcQ1hxg zns93_lhT2>M?Fz4_U2s)pQfQ_@yw&QI8))vCsEe}T>|6uMH^hH0R-Gd=@~%*9r8-e zWTh9h)#Rnb$98v)VLJx(QeoYwG1D}y*K^(s$#F-vt|MQipac*A{6wB zkFo=*Imhv`PI#1~<)v}&YcwrT8W*bN!&H8xR62*(R_*!x6GO*t&3 zoskO)DFw}TYx_js6vE$ufHGJ9aLmFd{Cb-m-Yc*yTofZh=vx*xwZ3>;c{i_(iS4U- z4KHoW25byG9U8KH^hdm6|AXl8cx9Izdgq{`r9wt_`#`~}UtLLPeTzz=;S^hYQdjtSCtX{&bVj4vO3eKxe3w&IS%P(g5~xQgJSqpbLRuWXP#kX5 zHYLb$I>k*jQhDL?uY55f( zgP-r+@Cy1pYx;QwwB*_66E?z*fxxxuDC;>-)TlUgK%q3iZO4()f+JMKto0zmU)%_5 z`@@DMZ>f%abMHBLP)&q$(dNAOk`Z$5vb`s0j^ zNTFE;2KzdPm{EEr=1TLv{nIS%&hE(GC%n@x&II*Z5@SmsHPz#p(fdyOGtR}`{rRxV zWlsL(+1Ut4;RlGgA77(c>I_2sDPcG#)0uLMXgWRGMzi${q(4qI`r}dKD1jsx+8usg zS+=Uv%J`#T@j25CWre|Ik+oVLxU%Q0bmHRp0`N#QuDLdMV)l&%wjT*fG4VbDQkzx_ zJWMxbI157v+W1$VUsM4kak_e5Z0YTkoT9XU+<+n4>&Y^4Bz(K$``LN7F~e&*eGYWQ z0JUmustS|R<)S@zm*|V_;9_6A%wC)QyE?9woQ2o2A-R^L zAAVep(j7+%(E2gUO)ai!n4fcQtR}CkcE$JMVTQ88|FZq|HhBr%9noM08<9-s4;w2{ zOX>Q$_&{KO%g-+OdLccW)z^IbAOKr0{?78-X#P=Xw>;s%cRpj`)Ugj$&{SDMs-2wb zjZp#J8aKhkGm^O$l$6-1>3U|?!Cgh9^s6}>nwa59?I=+K_2o1YEZ;;>QZpe`NN3p8yqn*<73|Y7d=~}v$9T6y5Ab|0@ed`0Eoi!WM5)Zin9Mt`TGENbxS^n2X|H+*@O9N z3;@LY|E|7M@c(Rc$#hJcV2Q$W6X)-6JSJim+nEDDtGJ1Hi7I(+0D8cO1UT`%bNeK8 zQtX#W<%I;v?975h8R4CRsosn6rz^NiHgKPkt8k*xu4-LVGmTwj=i}2uV+|vsyj@tE z2|U(i%5~ERDTN2kEg+Mmg+ah6`NLpo*?tTg*52BUSTJkLfgl{ zh;N&WQY!MKI!h(K*)>m=(H`TI%#`1$ZrJ5+*U?vQrY2;kBK5!bJH>vGUcPu+pV*vX zsYin?0pAtDuB`J!J+4@82>a@qm-ob_EpJlqURb011Fl_X!av=(1MwA?IiZlidZX>a zESXC&j}n4^g%mXI*wrVs7Y`2oa#q%{AN zre(oNc)LDRA~^krppGqiWdM6Eddv}ddXmAsVrrQGzeOZb(ApCV~=qNuL53SYU?NZW6Z zXLL~kk=NEawHr0^Op8WaRPB*H`F0Gd~pE{Wr_DUZ&#uzSJxQJQF_8{MF8!E;PAc{7yIhwFLRvVzdh>PtDh(_A# z%mU7}!j2Dns{QU0TdlMv0X2E)^Fd4E zmjFs18y9zEl=;gBac(oci=PyHcl#!;TFuoGGr~@=JvR9Nkbi4Df}))c`M3REUi$2K zn(9SYq`CsXA#8dJGxJS&81cE+u0UP-YF3|Pb}L`W(XypF2~|o}1~iP94;-)MSW8HM zJjg)Yb+@)Yod_7}x+zVuc>?!6IX%*a4*cY6gdC7AjiJ_!>v}R4w;eOS45m&P5X&pz zbpbbBAJaQutj*jZrGNZ!1x1*6Dx`Tz%OVmUxP6%yW9n;KM~~W=#8_w|8+@1e>!}%8mak%(qd8CiJG}Jb990KOhZwUtmamzrh8l z+-Jr)-{3>0|6Gk+qP~fEptfG{{m_<&wGk@&U;c`=`Ao%A+Up0fiH>w!P&U54SDt+wj6V8R}yPnU$xVfJg>Uv%7MrAnsXgi+#6rA8N zsN!G2FQfq-=)lR)%q#R?+mT?dN+<$WJ3pe%9GvCuLd$o-ubohRHb62NpOq8X+Ue3g z8Wmo3`^|ro^QS52kajkFiGDSISQq-|jSVKAaayFyCWS5hhdt)AR47@Dxadd7 zrRklgAP{-*~c=ICU3=A6uz0>zh4r}1$E56(BBf5lIB~GU z#i{3aZn6Q`VEZtJ`6=eiL*`W3*F<$4u1PT0f1$QW&2MNcnZz9skA~gpCB*pnm0WR+ zR|&QJQ+g9w>9Y&ba+3hkmRIgk4}91E0-wMKOG-y!^$-hOC9L zuNF9OiT|$;4zPDVv-;fACR(DDn}n(w46Y3gB(%?K^%;Xzm$tU&{FIq-L6m&S1$8kj zyGsM{RpjTCN1qpqfgk-ZV*Kjj!giX}(4Ll{->SybN00wcj=yKjq%d}~6@@W?`&oVC zGd@269D&_e*fOkkSQ3xbvPOZ*Eq4_1GQom}E0S-b85$fyf70}LT6rR;>qZ}r+r1?J z*RJHUo^*Xd`3H^+usOa0{~N4XVZOmRZ|$Yt`=fz%v;4?s7a%^EI|}?;+po zXFYyHX$}5{$iw2LUBe4QLdmca67RCM=JHk4WlS6*@cvdPEq*2FpYc@?qKg}&y`>BrjPvc_Y5;6j)7 z&^o!*N&FSF2JaY)|JNDE(08B8#BBqa@SC(yMCJ%+LKC|6K9M6HhfT2K^9v$O7IL>P z<8cn~&GbAD0tD(*1Mf^BZ*Gkztz!MZjS-vL^aT(Dot_;@Eqq+Ri^9qPVJ*o zzZ=Gjv&KB{-sl$kPB5K=$|WEDM&{Tm12Iu`i7nf93*|}n=d0^2(%E)>-G732c0GR# zJcJ-)OJaVLe+^7wxEL{35uq;pe@vZeIMi?W_7f$^5=tRTMY2`)ETf1POObsUOW7Gq zvW%HYL_#XbZb-K5NeDA!%QlmBtTXn(82ika{qg%B&;NLyH{UnkclUDK_x(A~>%7WO z3PpRO2viPEZJz1iXPSzBk59ZJ1b>?uho3wDsnZDVoe|gh{bn)7JwR! zwiEqn3HN!faIXd~MQ-j>ap1Dt+V9rSqG;6FstBX8Qdj(TZa>O14{g4k8+kY6Jovsa zim^+54l*6np@H@F4pwXiYIS-wbR3ua?ojk#+_&T|UyyQ#@LR$r&^Z}fL!3pKz*Utx zYkwE71ljJ0?zgo;pRcN1MG|_iAo39HB?#~LLLZ)EX+zty?WLB#W{>TC6Ybp2b*t>a z{i%|ZVska^Iy%?>JE9#{jX0LYU39dIVIgRp%r~~yS^TH*VT4tPGP(4`I^PA zYQNZ9JB(StDaNZhbd4BePNC}`8p`LabR~X*EHZhf(gLE|ojB!xD1qN9PqV zg=~Q(PXOu7v~TqWu+{=XulREN@4iPBDLUsy<(&v3TJw&>BS_z{*NcToO1lv7`M9j_ ze4tucntwG4e@eYuWmTcMulAw9>mg}mL>lryR6rBp5ZEVhFTA><(*i*<$b-)9cGZHf z&HI~_LrwH(hDi-WIt5vAL2Gd`jw`BV-*+rgwxtL;NQ`mOSlFpqNWG`@qY$!qTJGLo zfs(Z-@n_VYbg=rD+^UmdS6_@SvMY^YJj(3T7xD`8LRO9nKN#?FeNJH1T1XzrudkDh zQ7I=;r79mKQ=Oo;le9JSstTz;In5>j1XDtKNU}0C2_(dWT%p%EG4- zjT85tP3?d_EacwVFL~P5oykjop_sLqbXqcisYwVmaqJf(NUt701<>z+C7W9`E#5T- zuE4=@W0c*tDq?&2L;$ew%X@_IC8fO^js$TmJa6PNiz z)lyYZPd3x7I*xE-eF|4qf?%dTG8ZadRq+Vid|0BBa~r&AYxxy0#HCHU^0Xgwz4(-W zPo2irs-#VQMmLP9S00r08F4d z6t$o&UqQ%`>q2%gcohMHf$f${zgNHLcEX4>IT7t<%WIE4Bbu~@D_>l8tU|v;y_ysH zlRxlN!tDCTrnqXb^{x%7EKCOy3zm>@ktT+mm)Px`ef2oA(rm4y{KQ6Tr{hJ@lAk&8 zIv&cTp)hVm%BenY$)HyRu8xu;5Lf@g9JMM8bkve~d>Fc34&p>)+MS8m z`lF(vc}jtO@V01r8qOT7157kal?YD1 zx)36HT&27mwUx8cf&6R(`}QE>{mvtEw?d7VE%~S6g#s%ZW2G||n>?}aZY(<7eNu(M z-zHV@MM>NJ>7M;lYsb9OLn-o;GQ#cE{oOuMD+L@2vwQi@q44hgHQzP!DAcIm6-?_5KMSh!H1K3``?)t0ai*Wr6VfZcbtKCI zZ)l{uUK5HAjQMf+Q-L(D!Eue=h!MDPee_tnKix$xug zoq^*hp12hE#+Vt(fXcSZX9w03MOk@Vri~uELq%}CNs?(9P6z7`RAl#SZOl(ls8k9V zG{^IueS5%1#l`#67&th>SUMx52H}7dT6>_ox14{&ls`wMKn;1f1x5FY6M5CI{rf~q zZQRTHM>VwTJ*CQl=Pk~{jgjZXEwUVC)s{aPeN#99AVegcgaea)_3f6|Xc{bxC4yAb zd>^b$q-W+LxN0xOwefrPd$d|_MT(}}6(#Io@ z>}SNM$Di=oEA}v|(=2O7Ix4yPZRcuJ6SRc*ZvEW{wQ9jbU?U;wWOY(Vm(UJ29Y>e7ZdH|=4!`IYOBlzbq zbg*YW=|FpV(irY093DzNQg)-5(fuSN3#u+DoS`!GnyAryriub}kJF9Q5BLDU6v*4vfQP zy)v-t*B*&wqQ9>3L@p}}9Z{d9!H)1y z4%-@Hl-C~K6Dw7r{!}2XHB{gelhoHgH^Fe?Eoq=Ycgk%Jc+D6A`#-f=MT@^S9^HF@ zO$K;m=wd>lsh5wXpLTg|W*w%EMk&ADJEpc#1cSG|y{{l8gc?wAM2)}YPo}gIaws_b zM_NYGVDcp!?1wM${J?LpzwY{{?wZ$tHpaIq4ykM9I|;B_$-J3YWL(Av)4H)}-GWr_ zzI7=BvHHgnyz~X0N$&kDe>MS8f18awEL%HBxn-v;&|MPY=z6XA=Owet zSE9V}N_&D@j|q`QXNPmOn|%}33?sG!FOZfsu4~4~WoKwe=SwPDfkArmXHGT18m~0; zs#&Bgav)IOAlE`q#YDZk&>-m)kS*04_~QaW>52EyqHf^aU6aR)zbLK(o(8#7tSgtiT^pJhExPC zO^iTS4i(#SM6Aq@(!FM-BiphO>x(z63%4#&&nkV7Kf|r-(jz&bbfj*fA0w%cDzeC_ ze7MI~C7SR>-nVLe?X%b>&o3B7>k#qJXqI@K8(liz)9iVRrhnWhc``oYN#TXOjS3;1zf0jgKZ{UV$Jx|$p=&EuB= zYJKfXyUEyl1~z%g!VdgCx~YQ=r>~ zXv7jq8634p(lA!ZIUu7t^x0Q3V!n*w3Q{;IhIyK1%A}x8!6QyHMBw+p-%z(pM?wS3 z&Y5AZF8{s(r`1I3=CS8m4YBoL=oosB1CK`=g}>O@%=;1@@b{Z!7x^P4eum_8Cg#>l zwR;{O^SNg{kIdh|3;ru&$}fMwf6waYe5H(a&4>1tczrrK{vYaJ#64oz9v5=j;Ev8t z$n-;_hrFsY&HM>vFO;Pd^*Wy$U7pIUzR^K#f=4cr7p2&pInFJNxp*8B!qh&L+@9FHlx+%T`&VoNjTa$&zrLzOrG9XSn~6W)E8yf^RP4G92Ke@1_|jLbZi zkCHxP2_B2bu#YsgNL-(ZDdUQ)Yp^{vDd3toS@OoOTXXhS{AkQ_*7moV980gHoxH?5 zfZElJuutUbD{s{rwQz)V&dgdjro^rwer+lw9zg@0A;m+xRg|MG@RO(w+`0xUxXic; z$3$)Q>#PpMs>G~?ENQbIOQP@AsHF@j)6~OGY%LJ-5 z&BB0bK%OT34S%TW1=;lSy4J0@+de^RSN zK}6rRFx!V}$KTP^*b+il0#KFv&4_)l9Q7Z8fE#AhA7@4otOzE5X&z(Aq;u?1!9#nd z;7zlJOEv7WWYohZNwb95MhKt;XLegJeO2&vj6Bp$#D^m;y_K2XFB7vyx7H5Lq&jdF zj<5MKlKR!+YwK3-Fe5p)gU+pf$x77AS+Oi?m7*=$9Zm!E?Xbl7-SOh~s~K+^wJMzr z)OroRNzXj35JhnteE`+HyC#q~7dR-kIREvtyxNpuXH0LRaj9e7)RlW9BZ9V4a;WYr zAYE7O_#yzQ&FT5;fQbBppxm1`(<6V!0L0jL*Pg)Xf<5rrf{ zR~ljlV&rL9+ET9PM(|#SWo_W*Sac#Zyw9KKo#DSHf6kbP(PRXaRQNHw@WtN%4Yq&R z^`RufPe7d}Np`lYakb|$79M`DMce%aHmhEyQ&n8FWsf%8p2QRIbq)NZip1~>=6bpj z*`@BeZyiDZc^2)(@KwWwpXAuOIa9*6{x5~cXYZLzcV`Dlvif{dxXwk1!&Jdp01M1& z^_}E3LvH6*7pZ&Pa#)!=?&T`;@6P>hx!g*vkY@rfD=l%{tHGF`;PXNrJ2BOp6{C(& z)>rN+lk%b0!sfJ-m+2aqWE+8qU@qCk-% zSp80|O#r)BirL=)hin5~nH>~1+KDlMV#*<+%F4+12zm1=q5Nk`$4GyK7yg9!*<9L5 z%efqSIGn=zLV1P3K~)o{JgGLX(QOf%44OcqFaInP0b=jWP93~Hw;Wy%BEsnm2Tpd} z|0e;UST?p;n(zh9Z)mmotb(6`bA{4is-0`xg3ZiTFKthQ>))k#)78TLJOZzphm5AW zuRpteqp-1nXz)L_|HZ+JZ-~87z#0CY zQcX;4Br|0O%EGa?0sy2%fd9yl=h=vYAW-`6v24$g{3|j3d_2C-L$i*AGzlmxOcR3SNbTVEbP_vu}>@lrhGRK zu4)E}H2ShzCGoo8+LFTdt4kWLxtCrDt~R3jzmYR0&DVyJvwAyIvH`F)b4GdtjL^XRShxKK97zQr zcEe^leb!(vtgTS0F6o|$W^3vn`u$2ZtRFN8%_L2z$?;atUui17b3hB&Y~mWG-$V>_ z^I%of)o(n`8`;;)c<@w-Dc@L6+Y7S6*6nDTcAUq~zqCBD6GN$0+t@DRQfumZogK*Y zMA*K|w$Z{M%~Sd<^gPH%W&cEkt?8BA1=-pM?_y|yb0?|3iKvle$mm;F8II&L4JDEA zxfkjT)xhUQ_e3>X_%vBs>R;j@>UUltQjXINYXwg9CCv3Ow5s)D#1syGIx}NVt1mV} zh}l~>78bw+VyGSJusJ-7I1QuYSv}L}R;u>VdS#E!wDXGKAMYUs{DZoQPY=3I-}zoJ zFKC3xzU}#INk-h!=%>%$vo{62w{58osQ=UjU(!ti&D8IcTW{(^Ix#^F*Ga^gChOfvTxu!n|4oPp9|MuAW@sI%M~z!^ zc8cz+Cf=Bgg8oF(ME@4wH-tk`ya?IayFY)iar;fUNIIH507NobxZQZvZu%x03!rN^ zR1*ZA78DGqRjH9yFjO+px z-XUMR@YU^IJXX4MI3KE0ineIuvF&-sCCIM) zPCPRPkP;8fw9tKFL44Za88f2i2l?ChVE!2cz9*OEij+O>*sW;mr4JLy7#0dvnumse?`XHzFMJP}a+nN(a%F~s!=%(rb(}&K@6crX)TF-O zn@fRODCUG(Egj>Dickf>-LI+E~MA0u1g3HVw@ZXuy%*;HVPoZ)%Y&B-2Jd{lA)n z{7EfE=1Yx)DYq`bvBQdk__@9_3V#l>f5129cQXC+<4FzPQ?Hs^hg`GD2ttvG`%cHrL?1V%>>gLW7V(aoD62b;S7!dEJve2xpj&T>IlG ze5Nn{ddv|Ofw48cze9PEdmkDiOw!IhiXBPQniSHr(GMXNoaYt5WbNldEjeRGPULo^ z8j=)!W(nNgx2pIqS+$L;(a7H}DsRj`YoD;i(0IZ`wphb9AS5#pNg$)}u>NE0A06e@ zEs=Cx_S6=V+05>iYKAOoAJSwbY@U6$eJ?rvV4K@Z!Zb2v!%HS_FV#5;Ri}IW#-bg` zn0o3tfigR>_qLVE2-WlWm}wt)?x~>b-BJ{%Xx1FWTXxI=H zDVf}a=t08v?@aq=^0;_H=sB+2_<0ClEY?RSapA`%jlftrFLwdr&v>lx4ZEHCZI4?y ziIdDP?&2oSoPVL-5P~qYuI7$oH|)TfJ8&d(9!Kp7s9r~6AfE{!W+8ixtihtNi&N$k zqV)vERb|L1^Gzb`?ggZHd+aBp(%RiCCRQ)6UTekV){mz)B_VWqz>t7t{)EsuCR_=jHC zCQY#d7t-h_tCZ?rW*bLbATQ)ZKG<%f>t8F*AtWFiHGbVVzWz(q6E=DnM@du6n_Byn zT79p+#VCxgxvfgD)a!`=tTiA6Igh(`pusLY41ygs-+Jxe8=cXrI-2E;VyCe;&Irb}ZUAv^p^Wb4x z>7~1-X-k|IqxS{XEV2Zwtm@CC(Tc2!iocI$+gtIKoqRZIlwW*Bn(wRr#n?x&Jn9-D zwY7Ze2=S=3gXEh%_P%U%gmY4R^c5^#3tXv&&u~vt!(j0(TZd7)AU#V)u8m2B8D?hP ze?zl{Aw^0I37%LqF(3MyR(;BErb8*iX)e@Li zz<`v*5X}m}vhb}ldvB~_sjC7CpSstqPY|@3gV^fJugF%J7baD<8t8}-u4FZyv60B4EE^zhnk? zvuqm<)ZDl}zmYJjtb9ErAoA0jAjRm$kS(f?@abI0OSH_dcl<{iK9hzUnz`OK+}Bra znnG5PXFZ=l^ncHxiy{Ywv1q3~8`Nf1{kFo_#Aspfw?yMoOLE*n_oieKtpBTQIqdV= z$nt<~;8;cX#^jwM?tjuTA&JZ-G+xg%ljfe{wPIp{p0)SRX9+B>HB=w`tVTd)x9GHK zPskrU=pQJ!8`r&^j-;Zo&a6bhGnQ2V=i93LyTj(%XG1qZfCjSyDlG@SMo5C$GHhDJTYRewbOS_06rae;I!oI(GN z+uE88d25I7$L7(aq`>U8kMgCXp#k4agAaD90eY}^#hX0Kee!!5tWMPJkb>mR>zqO7 z+_2w*4jj}~o1G!!!N1o~@9d(qR|ldV{Yo7K<2*g>TU><$*q^pF1dC@ow2m)S1Wf$3 z>Zpjy7e?P^>fY^Mx@xkoI%9oLLttcm?!^xWXLnQUz=R9R-`xlotzBP}0ra&yoyJlt z_g1&SvOr-Qn#(>20g_>(;m24(YtJtBz6?PGHIgr zWq5?VQUNo;Njg*njdj$iHuq@iHmodK9;`|2CQYZq)@^24tN5tO!<{V8#DCr!n`M=N!_xJLoUZQ7|Yo>&wNRw0*f0$%9M`rU_ znQniscb#ogi!PZe8iwjyl_gB~yL3N8uPc0)xQyw32I-#3eqrPq_BZ7|uy;)5sM5gW z*5C#_HeS4D@RyVl5P96~SdqFJRamUx>c80}DB{j59HyJr(t0`s6>!+rI;9xseb)64 zghxbAcN8#heIuyn{l|?mz(mi!lp^vmv1h54DL|4m^g@lfTeo7tbWZq=aR8f&WMTr) z5b~ZFLosFRfYg9vu~L|T1|;1a!6dVponcrkivZF_9Pb7w`j}@_IXQ_5j@YnYop}P7 zU3ueq3+%L*1LApnQ{i6Ej%n9*N7MfP=$yt#vYco*P#*sw*L(b<%Sut8VL>bZS|oIN z4Bd0YjA@D31LxHVw@b9@=MZYRWiW0@dgdtxH=Bb#<5~f6OXB9}Qb@$Bdw%?LYvy1Y zaSynAwE?ib2HPiB!&We>7qZ`F z9_j}?FIVo!%~LDLr<@G9Bt-_z>3(^?UEQ=V&rt$T?>pl7;Oy_8FaLU~aL88CJslnu zQms@t_;k&s>6tTWDSxNiB-BoQUa@bFM-NRI&tGm2W(i#Rn8)qth<&?~0Mgd&blLro zmmRb-I~Z}=p*Qw&aAUdNMZlH#=){#?Nc)x3E2?g-3e&72&@!p8Mobl(*XA_g9pj&2 zaA(UxBqD#I@agCbS_3^2piEs&pJkN;C;+V9VKO|T18J%P#byFvySd72?FP0D?kRLx zvD#dDVzU(%Py2EEyl712)yUereo2In++Me|Y~T_3d3sM^H^gQh1-gX~*Ntl`3)|2o z9H#H6`~S6c`LxA^U@&XOP|)D;On3S%PA%=AsoPoh@bAFRfdqoOZi8Wd3YI9<^5~bS zA-o7lr9RT5%9BhN%RxYz9I%M}DuNBQ>?!;s%!mh&oEkthBx{#A2!f5|z%}yyT33mg zrjEa~95%owXP-kQg293-bP90to=!2p60%wJ_32_Gv2TCGd$L_UVq8fRbePaYaDS4N znUEu?>D@OV=#Eb0)}Fbpb88iBEPdQD#ZRVLf;~#$l<6r4EhugQ4RMM*zQ8Z>#jcae z+t4TROw|C+)Am_{sJNV+C~>~u;;{RIq+Ijt-vP#@iSVE_kGpzY`2MAu?d6kx*SAg- z>+q?`dpXV8SZg(A0^h=z3`!SoLTb}+lPH|q_*!_i=D zA+U|{1Y$s~nx-(9!e*hXp0JCs!4|)tfSs<7L>15J5v%gQtKC1eJ8? zz1h~R-@=||3z*}E=0TwNu_4OGCniB@e=v0ipK*I&5b)pu@FjgAo$1?v2JeU>&0)*G zyP&BJVJw<<0GklT)@VTP=(1bc#Ik<78B)<`xs?A>D~uBD{~QCs+T->bWxg?LPW>bgmISS*M`LI--XJgP@87uJyjyCehda&EfxhlPvaH(B%uZzlWatrc2B)dF-T~C?6`0wc1?*4) zV8kux2r(T>68$9`vF0268#>B9CJ!&S+u5j_dDdf8j2TJOF|M4{fgoPm&<7#wDV~+$ zt9El?bkE3)+`7#WxC$rCPk(zI*kn6(lv+KK6R@mb`u=7= zE>L>~a@#wUEnR) zXjmlSQaiWIAM>ys(XRVjuX zI#(vnELlN>4j2{&xN+FCBe~^h>3;IdwQDaAsmtchB}a4|y6dE_}vXpJv{k5yAQvP_Ku2%mJ>y1^yrHKATn{ikpGv! z#%CPI0T9-saRK?*%zRqbyT66$p5g27L3!(}X@<~T3A1sHQ%L4EM^CO)3SjO8t_3Z| zto?A;4I5nL_Qxi6m*mXvl@l z=h3O~fOp5f-RJezAwFkl2|ma0`gb#P6^e~)n*59EzJ6bZP?_PotxzTQ)mf@v zSkqOEVB{E}KyUPQY%U+t5^uz$$TvC&P=?0_vI;@M9O04GAof=UjMmrvd{OT9?$;rQ8lF=(JOi)EpEdNDYwUZ@y!d^2D2}`FVCV%Ya8SkEOMM~)Nb6xaQr|)~ zDRlN@DH;>n6@4$%9fo0SA~TAw-aW@FaP?{O@}%$%hi?+>anmM=3A<=IZfGb{Z~a%g z`BwANjaXNuZ27Mq9<2jDe79}UGDijLXTTYkM?UX$Jsaw_4EH}ukfo)3-qkwyDzAT5 zOQ@%&>~iDy7?3H>qBt(3D|KQ_r%yP_8T8s~IhFPhh~GnMI)Ofn9@ z;GlPrZyyZ4XNXLo*?nj%)EZ~vOp1h94r!bnuZ~z$#yXe7bXGAql5g>b@Q2*BBvN}y z=nbC+dP&=PGxLHuoflO7-J~mlGqMdQPQ{y!L8~(S$;WI;(MxOGUg*yct4ITnqr&`G zS{3PUR8vyV>scV5e}QqX+(WYure#+#3VJ(ll*jP>`^k$%G$bme$(@-CWR*hg_Z$#x z7=jri#nztc&f&}r#N`E^t()g&hM2zJ(cu+)x*-;F#7AE(OnHV$nLh$C!H}!#Y(3>J zAmb@fvzLE_K=J3zIlq)G9~#%Ou>tLO2Rq9sL}rQGPDCEd3Uu!9Lh7AyM`K_q7-&mx zhi8maBR_w$At6*?bJG~|~45Oj0)lX3q8B@0u_1zWM6rxz;%`Rojo%)2R?#a9H99%Yz&=VQ(| zIEIe5Dy~~-Sa5myZg;jF#OJN<;P%62cru_+;3{qV;MQcpo7SQK=ycc==MVbta!}Gg&umH-<^_`%XQ$mnT zjxpM!Xz&^zWWOu;#H;Q1_4m2{+jSu2gKg27JW7t}jB`F(RFC4$6j0n5d<_d+yQiW@ zI4||LxI5Hio!uo||G1`Pd;)h#0`p=Fz2UVgYb`wnLE1AtO_MJb+{PT8t;OGSl zc-DrDUV`I=wl%K}C1%T!r^|Ner1e!iTfa{+4iWB*oZkjG_?MqlI-H8SMd!SYvrDs4WAymt-Nfplw2{1dU=9>jr^g`YcK*noQ9lR`~8}GG*eyjt;UCFBk{)8f+B_n%q=931`s8tmhIBQ3{^UqfI;nNq2 zCr(2T90J(evg~N^l(8Oq*42auslJy~jZYQz#ujlB*IGO>)5BXQbSUCMY?5bt-2N`f zIFxx8Ljfq;YWDXvR&^ksr!}%^ArLm-#PYxdXYyKT#Uc#JCfgLq-Mb0lHAPr|1~v2h zH(od1*hsfINh4%Z`)8dRkQdz#`{z|4sWQ3MfIZm7tCg;nYvd0z)Bh@UBL)JKK8GY> zb-ld}4hEl;y$@KopGXuh`%owynA)Dq^}BWDCe|ozd;WZWG2yVu@`JFQK9J*jOlpk! zkt@z~7|$LH;_7Hj6SztJCHmnOwjxktDqhCN#-#1gEq8bTE01WT0)prUa8eJcYvWkr za!jhg%Q=npw(0%B4A5B|$sE~%)4}edG^+7A@hxas0cIqIe>fg^ZBItxgW9YuHaU7ebZ`kuJ7B3JGP>bxVTnS!7WNkVL#5FrG)5wpLVx53nZKxE z)1PCWb}KNL80qGMOP6>2yA|oSxPRBxOd~RWgm>In7s<;D6U^9#@qGm`0&SLp_? zUT9cK!;HBjGO~IC3A#sbus>!PDJdVutR%lw&=ToEDCnbRc}|>6*Zo|J$X3tWYdc+a z!%2xnhdp@?%l9;I8sh?t^R|1pN#QjZ=l{vpYAMv4ho?t<_twAk3EyyoNa#J|=%1 z?F;u$IkJ0{%ADHr!=@qXR@Qt+b}k}U4F=S<;3dD)%4z&2pC~1Ha0UxUEwQ%QFo)c2 z0?adUKyLMLy19ZwY=lRa5^PF;8-F%YS4+F0qst2QXq>?fe8&BCb6<4G=A(L8z?G06 z@#pvipP)I+sxCIdulN4A3G9)h1`0V*=IpC$EoawDim0M@fKzNY-KaIa2 zoK7}gMa(2CO_s64-H6_TZep@OE;Mn6Eip*1m$+A$a7_Ptgt2NpW5fMOKk^g7*97?M zQ|YtsI(L1p%kY+;*jpr^&!kHmWCdysI6iNIu~REetTEbPFSd4oVR(=I#&yHb@-6ty zuk6FJ|3zK~UjOxQ>lyx>H(b(x&Z#8SH(A~V8Qu6O<~c_FuGLoWnG;}rn8{an)Vx3- zu0K@8#KxkkbNK21^6z{BlUn+*e!_BOYI@tDQ96BS^rsEIjW%kIcxHF?KLDPScka`m zQQTA@`H~$gx0g7|p1`;Mg{?DzkX})5#!>d81oS8_Wxt&Otl-}sv0yLZ5A{STv_?`o zl4i^#=u?L)TJusy{UEUUxu9{d%?BxU{RmNRl>84kohJ`%kB=xmHfAtOyo$Qg?25De%fFcQhFa%KNh50o>e&%@oL8??EP-* zSclkK0!Il7I9n$9n%A7~rrsMC>0}uUEtYvIp{&?PLTW;A0>klNj^jP z&b}vBZ*LJWjeB}Pc&bNS)Z06}Fh&h;{l;BSnig+QrydILB_G7Y2!_~NP7zz>y+Z5l9-5ukm#P~CefUqj!UNiJ& zY|~}}#0NTYZ$6kv0Wej+h|!Py&J8=8WRd>BONt@#g}@<+5mH<{_SuCJ_&)pSfQ+J4 z#0q|l=DRU-(P7}y@sM@AtNn%jxRFnpW&%$aWt~rnGFz^RDoED@3Ylo=ZWi&~h9WA; zX3#D#LwZcg3v98uEfaxDb*rf%8W-u5_z)6x>Bec*yk1+}k{0M;;P!#HGR*|0GB17- zen`vF+gpa$ySRIBnfxtQ$TsFkHMmF*ws3f_FEB%Rx7>wUwpCLxH^Plpn3=LSeOorU zubG?IY)#kos{BclU5$zg|JjPQ8+?a@I`VBd_Jzd$Vjrt?cq~9tw0hgU7ZKL=cJ8h7 zhA)zxtOX7%dQK;pV=Yz6mL?AF1b)$u3J6{z4gotmE?_Td@hIG+T^4<+uS?j!inxxc z3@|Y*7N7&REz?y}g`dz(j)fI-RAMwfI5Ezx2`0q|`zk$rU*h5NFZE(`RTffGVtKC% zb_s3jA9)CRXO=!TGJqVl##OOk{iA+`C2cyNmPirA=dVi;(vB%>0Q&&H(xK|H9x`ZQ zPo_p|S#I2c&r3WM)a=J(+VAY0`o%hK&pXqO`f6;tvIto3Vxoh}V=sqKD zpD`IHFp9e}qsZ)6M=W0e4FfOijWoa`XXP1#3ngl-&xfOD4TE}IVCUFG)|m(B!ui*d zQalAV`sy-%Aj+W=?4tT5e&@}1*$*#+zRd2n_u;;J1 zs6xHR{g=EM!|iJZYM)Bae+y%}(^9@9hPS$g9Iu$nr0rWpW%fj66zRJ1eWgT(ehVk+E&rX&(6XhiN zIZVz4%pc}&?_6W~RU|%|r#QCce5jYZ>ifd%Fs3wDc9uLO#uSFa1)IYiZ=AX?Eh0k=DK_+dQC zLv9XMH|>-(+o|kwU$D`paG0fp9L=$@&*g4w&Gl^GS{lvOv=O8f6d?p2qxwIis_VpT zrQYbt$|_qq(YNg$achSy3-{K!H18d;*zVS``eD^qX%sEjVRsMep*BZn zJvuVJZ(%?NuZH2n_*~A*XC^D2I4k;-!o0lVdt$h{2ge?ivR_x)!aZ1`t`6D>UPpSw zOjP8VPNWP!Lf&C>e{BnIPO@xGl|eg?bkzA2V7@`Tkv48J0iPW^w;?a9%kM7rO`1r~ zOP;uF96E0vNQ&*#zJ9c&XYkr~fFSO*>SpL%tl-)H$juDG^y1z|oN>3Fca zb6Ag6GEI)oS_^e8*nv>4;~)LQFO+Uj$Snf&`;~Aoy~a~V8;;I-0#iJ3ll4s# zDK}?8lI^1!?O&qFI%wm~GxxZa=q1CneC?^8fNC$sj zdd|_Jl$$ty@WbE{#=pLSwaiHuBSd-g88=?NG)S^rgUYCal4~Kz7=)jG!NJ|sM6iZt zj_@OR`;wD_59XqLCDlF(dKr|oJ_vufN`hTvh*)1+gg75Ot)SmK!f4!8ds8@HfAsX8tmB*W!EaT~?1Hlf@^rsc>nEER!P+d8l(v@h zodi;Z{zJIWk8O2d;rKv&Wg|V#y1LiWPXt`$;M?3k!yFMk+Vq8WV82W%b8A@rQElyeDk87kfZfNd zH^8aEu{Yv*!|aeiYArJBL;7CO-&78Ub07U{RU&w%hp~BmK=2I7QJgz3M`o-HnktOk zUwEJ2|9*Krt5(;;uSF;+#rc@ePXyZhzSyhk)9XT-qSa?>_w03LR=@Gs=G0_*j?gF; z^fn^0gCJT^rrM(584qEN{NlyD`g^;p4m_T^ssV4*jRFWHF)f z47a<|5l1netzhze}Md=Ls`+ za2NkQ8z}NL%1m5;d`ikpq(%xaR&qRR)zVYRIKBgG0noNkM;Kwj3GTulR^ma#o-$B~ zFWT!UDs)KJP-x5VTK2Bq`lJSsCzxWUv72*_QC(G6+SAkio@r0^+wn6`k%voI|$gC z-3Q{$?uWA#gHYS6%hddKfxM^mqMsA%?@{C-6vmC2-QTh|B1abS>z6#gg?$4kM8KbqNfBRF(>AIjB3kjH|m7g^i z{{fXnDsG8?_0Z%_LcF$A=UTpLE$99w^QJL?RZsQP0o&33DTT^NJoTxtaY`=7CZCE? zl;HB}mTy_ks$~()_3}&ge%XmH+U*-*nDEHG7?RadMV)&9)q@K%vWxK9)27All|`O1 zzx1x{RWRjmv0@%p|FCI$ke!nKTu32o-_i-PJZ0) z|GIRpZd^ID-v?*IEI^h7NhWMhSA|{;UB&f+2}9pgwZsMRf7W!p-RRCTry@?2Gl%SGp!dOs0)=KoOjR#9!W0lO_O z#S6uYlwxg>;2J1eid%8l;_kt zJLmgf`WYD_M^S)lr;Z_8N(LI7t6%n+>(A#O|?~~E6Gy1=Sl5g_U@itlu z;>Vt=#Aass6L_#J){ai(JhMwWxbQZTCac+HzuL^U5iwQJ%UaISG>0txIc)W3QU&>i zR|!30v5eQdv!m-TIkzViVyp$_^YWl6bT@ECh`AjI4MM z5gOCwCM&mjrMEQtKQx^rqx)9!i)0GBray%3ZSP4~rLi@Ddy{#Y(MJnnl)_F|1CuTx zqS0~ATz)Wl(?tv65AfQzXPU2Zgtm2c~kzcmmVh|ous4&653p%Z1Lu`(TI*3S;mYG zT0-byiGJ2czGppHa&rLU|I}+(+3o-Yhrmv$zT~y4lMLFqe3|{tW1BwHPerMKA*z-$ zjm3ve`$qS>+~0x%_b~}A8w00pAV1&>K^!JfXd#|ISW%V?mDu8kbR}|-<=5tkcN5aM z0qir@Yx8YcR8)8G!0Afez1g<&1irx{@|JA0po;DJ1;eEwG|@Xt#4vpW8$T=R;}RC1 zuM{^W>cg~b3nam7LzUMUA=uX?aa3e>PjsuUK#u2N{Bg~4+^|_xGGp;czpV)Nx${p2K3nKIoX>`xgBj0f^vZ}+t z)uTdUfZ=r#(5oxywZPn@qpnQw618!+@}H2tqUG+bln$*^8e)e|*C~DeFTXu6XRIMn z^>cIUl(-9jsc7pTgq;Q%?VRP!j8-Su=sz6hABA9E#Xv*xqWzdO~sDM5bl3S*>1fsO^VbcaLw|HQlcriHibs_y&DWg>{|_On!VRgcs?D zRvNh{)Dao=Xe+BjbfFf*%iC%bC&agabsrX2{{f;AKR^tOCDUXmx1*bBIC`4IEpdY8qf_qUN`Z=G-dlZw4K zy9B=<-TC@Fr%+fa7EM;iG@3{mwr%eC^xh$UBZ*WWjcsS)9^fz6XgDCB`?|92rnr(U zg8(4IO`_h{B6biEJRjqqGsDU>F$7Y!AuL_#T5pAWRAXgD(6VF1IN?P+HB|d9=-ImT zd`5wZNv+U?P>?_pGs*&qjIkRoA*>W^m-3Z!dd!!W9{056M-e? zWWfLN(yaKmC`(7YSt*IcDssFq)OAm+2>}>hF!HlxfGWc_g(aa zQSR1J3@czEF=ManPSGEZ_Ypmw#dx+E&O@=+bqMA4rLcYokJ5Sqha~0tE5`2r#+duPa%w*qvguqlJRI5<_uFs zm8$`hbhn~ER7ygO>$~j@_t)zoOIY|oAQHZGgSOJ}Qxa_})0F?#)j+Ll*!V)43^dKp zEs9)Bf6w+G$j54JexKlBE~O`)vM&s4uAvo+B*a{T1IR(~~UY>sXEvc`<(OR?Uj|d zH2ZF@3az%^g~3mj%CH6OfC3Dk8U|~pr=GLQj-^3fU?}rS=*>nkKi&&V^-OvE5yDCj zQWT3W4@>lvS`rugM~l$VDAYQMy86F&8zzeM_x`f8+41dPd!^SN#&5sOdoLP78-jeX z1{A)yPq!=3e-RQd(`<=#{sYM#Z-b<#u^YAz{4Z=Hv&n3XdPBJ3Yjd9#a-aXb$$0Bi zzNpp~%+mB;LjS--2VJL*q!McK=PciGGDYkK!p((@dtc7MkMyw5QVlm-X9ef8&KWTmt6}8v)zFh$~A;BIse7SKlQFT^a1rDD7p89c_NN zToK81flKi{W2G6*w^y>pT}o$jPO##jbnX{Fg98nIM4yzsgV|3|NUoHNX3n7<@b{>~ zH;Sdj_FJ21H=6KddejRPg5FlLJ}e&pUdLU8d|&hm6>~4;p9bt*QQr=nxzb5+ELza8 zK5`r@^LU#Uwl1!|XSPo#7kqAqGB4d4bpY==lt*N8BaHqmt0PWfy<^BvSwAC;j|1)` zQNE-6p?>47_BmzSJ*9(aItJi^8VJ(#Yt3bZSyYSmdJ%9a^pFd>gsi_7Bm_@EJ=3jEvqVBS7ppOn*3*2|Lg5LB>{eW=hw-b48r!Knoz?E<T+>Hb2-5$i?Eg0}V_P~HzgPNQJ&V$aI}Q%S~t z@5OB~3y*+uM-MOm(!dFxP6^9nvKw$tD51qDYr z<;z8zkbM_0M!mjCQ=}VvawTOnQFHf@_tpjj-_Jc)42kdhZQJu}7y@Z+Hv@Bnsh-?f zS$_@DqN?mKETgzH#D%R|d9*dG&(;q~t7-~IP=7@L!p=8WRt5#iJDyLf@C26D-=4?w zllfqdeV}ZcLJ1Un5Hh4t%ki1QwpsBx(Wm+2D&G8q7L4XTmErq=qIGEjMq2gPj0)eO>@Y?rv(9Yneygl#?G<8d$IksQ2GtC>G|H@AB=+i17PHxH~crvfcx#t^vS^VqZ z&<=^;it;p*T6|Np``@O9Ad+fniSm2COm$04j@x}Af@to9WY>549l6@c2P--lk&$MU z^*LpsxlU8;=VAk6E+>j!T2%blU;U0{Tl5UUePde{q}{qyE(*60n&(OT!em~xrWOqj ze0BWGA`WiQTc)tIC#vBSNBtEn4^!+vd8oCKuk6WteqJskT^ZNXAGAyYT-CtnmY?HO z>S5H}RW5@!S&)P(A?MlN4l$RbOU#a1FtV%9PtgkhKb(kxyMOJ>S%?Z2^}tYve;~3q z+25dlx%(M22;+>(T9&78jP{RLRtq`a@R~N$&p3S%VD2LJm2+DCMvP^G#j7wAukK}s zUo%<>`&%=5@x0ov*EfyZHyGLNM+K$ao+|&{hBOOOu%h$}xVqr6W}s2;{)WZs=0o~91RR$DV# zrcYXaJn8$~(9vhH_hYh5AfpjoVxp&`_L8gpI0HSVmdxFZo9!^~KxP(tSN*X>a?p}0 zzOtFud_)z6Qq-M-??E31YspAbP2+ux6y7yC+mh|XrY1iXiQI(3aOPaQe_CZ!;huX2+7=g^UkY41K?UWCsGY1;OJ4kjC$4wisS&S%pI#-W43ZRw=$%#c0g}d#e8Z`s)PSVGqd8wwbGeuq zIm-=XVq1(dyCAv)1uJcBt@whY#+t}XgnM2Xvi$gd7BiBH-;=5}8JhW5y z%gdvY&sWwREV}XH0y|OufBa&moE&H;FLYvp{_^7qgh+d$Ypc@!(0@A(5BK_5E zAk!V5Mb*ULmTzV90N3QdDvB+=8qF>ZEW)%PwlX-+aq*%=<;X!*#L+T?aK}0@9Wvxu!e>wxf- zKdxx-a0v~?f~Hvs*2AO9l^;2&>ht1;e#d9Rquz5B%lN0MZ-c4E8}EEGTLv#?VJ5A2 z9Qj4z^4^tDSFqv0su{%W-_&SECY^Z^ZLEor7~Y0q+~HWAA%n_{xxu--@1!fW*<__4 zAAh+1Nn6{^BQE-QBjhERgUfZ;Z)caaI9>)kx00u(KVWjT<&ui~GnbeiuqEK?e>8~U z20r6McIs~Bxz|y^=ACfwq|m+p^RStL(qp`_Eql`bm%wOJh8HIj(uO&`ztu8Xmp=J5 z@gYD+uM&$Ty&}pn`MeSX&q2m+!cDP+C1UPa~>29g)*H;W`CBc87uP%tg}a zT=G*$21eG3prsaAh6CJIUA{-|?jR%uk@AX4cOwx`RdvoMzb$q~a>FB-+I7j-$PL$|2~8DIH_1=XR)CfSt00KYsbt1Qn+cx2InO zLO!4Mk+#zVwU#}3N5SObutn!Jo7SR!#oPzpHR3badTtK@^QUicr!sNraovVbpF1iS zXR>4Vp8T3uPRG^4%sh@n@i&ewQ4u|<7cEd5&m?R{8O!P^lnf5JlEWRk88W9t8v2l% zj?mMb7EkHXMrJl+4R(OHv-@YQDoVodJXq6D-15aR-_0L8;1Z7&pQSTac%<({-;XWaZTO zr|S2PdYX#;?sd{QXI5^rT;g<4Ax#OFB%wtLPDeBo&;11JR|EAksi-TC1;Fae6+_DF zg=Snc^?;+0-y2p!xBoR}HwqruNN1!1W*0-L=I>AhK}sV?vbjFL^b3xGQA56fE_F}v zN{~)DMGVwd`I}=FQPj`7q!!@0*la86$1B9rt6S3KtW`C?EaP}{e%Hd!%LrL_iL_bO zF%B`<_Z2*?QVcz3sj^zg{IPo1%l(mQC~0{naNp=Uso0PzEn{BUZGmd+FR^h_%w?SS z!AFIjtA;qP2~7G|ulox6PjU;c{Riz_PZWjim4DKe%M35YdsOdr)1H!l7gf13$ zwe+@$vB8%|C>hS+=aLq?ZfbQ}6zPL)v&K8J!dgko^!ZmO(KHDS?Su1<3N}p?_t+_8 zySndFU~lr$Ua}x0$=S2K$}QJ#=nns_%AJ9RnxSILtv1_t4Om{>k!23&7- z?VA=_E;Nz-9@B7McT2eNxr+O2qT!D=aWQ#ZB1GG1C?J}GjO>&#H~h1(e5 zf}^B|qU8Q{{i2(!a~jC@7fjeT?YNKMI1;vkA}zt>RHVI{ONr*4r_$fr3acA?Y{A$S z=VCBin|WN)`(!ka2TZSkvOqR@Y_vxHn=;ztP0l_GL#ca`AJP6Oe5N4Zy!Rlc^D=$i z7=wu`1wQsuT2Ysyz}V#bc>T()p?I6K&4%<#*hG4)q2SCiq%_F8(HMl=eJWbj&$T7@ zY$8^0DN4uVyPGZql*I!cPlQUV-ZUo!E7(HitSoL&|5q%XmWA z@BfJXUI|Cj)cCcWTUGpV$ZozD)?b3_gCDu%3)GX_77pGJshhT0+#&RHqCk@y(2U59|#^Brdr*p#Tqz>5gFvg`Mq29h^R&Z+NaOK?4!(ag#1 zqud*w-ch)rKkr(jiPyt<++*dye$wA^QkHvzgPd{wwOOA#&i_Tn=ahjScxOf0G2;#| zhV8%|P-h5YeX$SE*kOXaN#%`$y7E&cL8zPDmC%nnFv@&ANM;9$(m%hoPl?7Y&ugJ3 zez=WoR^>oZ?1?LbyFRsxmA|0nkXI8UM@T6$t{A!$QJ!b z%5AHEIF<7)hzWJ#n`~|x@L#mL7)-|TAA^^6;Yy{(X6sG+6#Ocl;73Gvh7%3s{e<9i zyuTpVFI_kd*83NyzXBD$$;Eic*q>}|FxO-}MbQdvK>Xva)U1O21{&F3tM!-;PbI66 z*;fCP0<<3-@WmckaT5>OZ{0M>S1fC4OIfw3qF;SWlw`+ngLt<~9@)81b(LWG9QeHl zzWx6L>^p*n5xbxri-S?ox+szoy5#}SAS+iZ_FIwZD~zB?x5+y_j#(X7wLFop0Uw+K zkyjO-DmS@f@RY(4i95D6Owc%4%O(Ya?=J0_1z1+3k=(DI^}a z8V|J0&G-4?i8pkBTjRy1$9_-E#d|$lWGluV_mOBLQWlb_r;%$@NwP;+V_0>9IIf)Pwuc8*T?ZfX4 z+d;FF)3t(~1p)BNvz=A!Fq)*Q8^u;B9NV_y3pR@Th^0LtJ1CzY(>P8tAYoA-g6PN+ zoKy$|(+AehSF(-boV4f*Q@jZ+d&Yh5nfR|jUbpq`jreq&8WCt=B|>H_aAoPNyJ8e3 zYxTL7{qjqi%{qq5F&?36D$~A|d9V&-omCUPtOA1p>sU}p=Jno5e z8FgHX0P2?8GSQFPwZ$Uwx=SlA@hdte?ln)}Hh2q0>^s-$ZNdyph@Q!0bz3^T2Pa@V z%&^~sNK+du9)IHuSyHFCc|ytt*1VH3!LW!E6Sh8y#q5fwuy%)4dLA9yV zQ*FrKk&Uybo-Sa>qYH*>Vwu}$s5)3Z0&4zTi%tSKrUpXsV$vEX2t|xW%^s#@c(D?Z%yu&x&x|3f)DyGG##PceL>4u?3NNt)M^|%v}e77qPF~V-I`-3 zQV;)&@Jopw8aLDA0wFJ^ye)3RA0)zT$rs^4Ua`~5lk{pI1`{zc42FP~`;q%uOMicGi8vTg` zVf)T((4Id`xJ!omV6w10r)H&|`7?+vUq{D9J zQXvX7Mx|Egu%5Q7kwMX?vc(%sXB*_xxoJhm{uMEli$w0V!9W|`#hQd8;{A+N8=d_4 zI_vG2GSl=7$i*BYuK_n+s^!T8u`XK;`d(XRnumUA`%6dUW!VA2#65d&H}=#2cbWvk zW0SI;ylTiKgF#+1Q%rUpA2)_`-}^Ke<}T>zU0XrPZ*|gB{!xWPYconGhQlduH;}yYjpQqRp`*rOF3UC=Ck<^UB&2>B-LGtT(f6)<7 zABiu4Cj_J)5!M$$B+KnB{zDdGi@@hRz#>1T+YofF3Hy-vipES)ucUHdt|;B5 zK4bzlg6eRP2fUJip`1MtoE3ZDZI(7Wbf+lTjw>49nUomJ3lz5&ZH=v2W3f zkvmWqxF&O2g&~EV(-J)1YjUi&+TNz+q&N%(w7fc*!?ivxfB@u~ApL!Fs$h29Kdws* z;bdy(BNAkSIpaYNBlTrE=6i)}hMtvPGFkh&kWWLH-aDUT1D;kgR&hk_gS`>LNrV*c z-5arQ0a2*cM~gQJTCjp=C5# zQ>0lplB#Se#4f;9F*n5=X!m}!#o?>;GC(W@fH<`Q_%{zP)q~$Ce`@%i%jKUCxm}#) zQGKU)tjEGy(`j~eQRd6kyOzPdSu&nQhD5mgE&^P^B#CU&HO+Rd>HkYpgKTI|B{^p zvd3C|gwCSMrEYb(7x}vc&D}cI-~ywpck-LyeRop_QXv#w$;4Z`U(Tmw>!03?%fhUG z-Jj2XzIPk<_}1i-W{)&rOrCD=&~Qp5@@-LUNihe!FEbNikNoW{kscR!KYzm&lUX?m zC(UlEGNboy>rPn$Um`_lLUImREI=-~Gnf3=L1MSi_e$Uwq=FbG)714g>Z@NRERQ?k zuCV0US)6k1@Vdz>aMju=gir246bXx(qn1?35H(J(Ynk{)kPVqhr#VMZ>@`{rexcJj6@$eR438*z?x8j>z+4ut zYvhp1;6<_wueor`?wTr?IqK^}=Kzk2C3HZA-a89TfBjA1PT!L5)(3mwWa7(m2p?E( z|7%aFi~L~@0`(**4Q@xS82h4%?@tngqfSe01IUTa`Wyjs!t0F*-oJ#)aT@lmw#d$s zM@MR=1PI%{0twdo(VTR^%luCfewV1d{X%7bOTT47RG)d*Ev&SMla`0eF9m#<+*3c| z-&t9f7psd2(yB+7E8DY6@-!6rkwlEnGA}L@7SqCwH!^In94NFbAJ>b1ve(ABl7RXO7R#!;~&GQTXt#hel386hJj6 zG-=#^gHHx&^hybFV;lb+ChQ%(9l_{>lR+|R;218ClOgW~+i=a1jjtjD>hN!hv;c z*dUZD)%;;f=fP$F5lj9;#)--%H^})Zm}2k%T{lU+%6;tN|4?|<`3S{6t;r}Su)pc` z!3GhU!PY>%m9?Wu*}*0c(d8e!zGad`F)C`qtwj>$wXmCG45nW@_z2 zLl|QyY0UOP)EB%_GPV*E+kN0bjb@A2Hy>Xqm|Wa>IMd=fCLXElXz)fG6*kvBe#&-Z zj5#Egd-#l~z#^lSO)(!Q>O-zgN=nrOSow*vRsz}=0)~DlbzsmUy3^5jbg3kO@-knG zJ3|z0&s%b`^0AbF0a9i^zak@Vc!Z_-*k%=(`CG|95T`af-f3I6inmc;6hTi{JBL7~ zk&LL{V9$NIn9k4ILB)VNI~w9f5KZ3~Jl~mIsZFb?_CG+`FRWbh4u^f_GtzWPHQuVP z4=PV3I0=H94QN9r*33LR(M%?KVkE-nZIyPbv&#|}6QhB<2gU4Z6#AXFZ8?gh@*L9= zVdS=#4Yk~cWu8zIo_mDP5xS>7l|H5qv{MLcE2^9FFqThfu(K`f6?|RONgE-AMWmt0LmB z#Ui&yl0Xzn10bUP>a2^f~H5|EtJUL-a_&<=y^d2B1gDTjEmJ6tH8#TWjwL9T}(*)a4H*z$CM<%x^Cp|bp zDdU9!M7)^#NvFDEOIe!jqD5jw(DL#Y>=b|gMzG>ZyRTr*?aSHP_(98Szq{4L7azkH zT2qkSqBhi+FFu#xXISHuqsA&`Rp|8`PmhR>DRirgnaip`>2K6CRBNe{Aa7M*h?&|- z94?UVHfIHJ{dGV&?cJNT9dfwTPk4V8O2IDnOoqZ58`KFGJ?))#9A}c2pHv;|zuozU z+y31?$OyHDwMqnRh(z?dmcW;vW^a$pf-#~L+^1XvL|k}V1lY2CablgsHG}cQg|fd6 zV?B)(`1$@(13lCOe;Ir8gX_+g{wNE*hR zSJe;hu@w!AXn0z2og>k8gucRvyzoZ(8b-9zGjix7Dm17~szJ8JcFCKBA2RLSLQ4|Q zgcf%9;^8xOw#m!($1`=38j|vZ656-ttTT0b9L>_B5g}coY(W13SUw zH6GW7v(Ub~Md7ntDxNB>QWhv0C$@Ssmy60uSfp>P2+VZ(jk274f``+ zvW5WWmpPF_7J^>(DcvaJqJG6Z9hAW#5fzMkKEL_#u=&nf>w@R4^7i2=a%-Ron7UT7 zs{@DW-yOp$gOFiFo?%MBUCeB3Y`SUpH>p(6H?gh%t-3Po=6TK##(-SIqe9u4E!ijU znM=>iws8+lgu!#jW{sMJ=bCemVy${uXkIA_6u+_01C7GJNl@$qg;@pN#PH z#h_?y?2h$ub^Arv*^#=Ed&=70rdx=oJQeDk@E=#VVdm#Kpbp+QDoHA)F~ls#GitZJ z%0cuAvDf|&Sx0t-5Q`8Ugb<=fZL6As5x)0afQ_Ex*QI&P|FOLvBW&+-AiQ~g23l)2 zjAqF3Ni#40lwZnN;&oMRXdj78!7kD#Ry^OYojD^xa-szw-H=*igXuJx{KtpO-qCPe z?DWJsODrK7NsQ-oQ7GI??$A;&h=gk|{7>Un9G)a|4BLp)_oGheh$2)1N zKhOp~1dMwsTIMbKr=8ASy|UixyQVnT3bS2$Pk`f;1EDAB&eGT)upxr5b3|ZGLKbi? zX`O;7I_yedv2WrR7tupI2Ny02iVOo??Lk#;nGx|Tvqni9#9#Pkb*tRXVw3iRnl~nK z45tn!zL@2{^G@)Y1&XLc_U({ z0UL$C|5@Jc=@jU$dc8hp(h*%ss$asVb6IgoyB&_-5}Hxe;$ynH;yunrBg+66LetP` z*hsB7VgN@Dw3#T8?n_=XL$*B>wBO->tAM;^i4I6-ZT_mddXTw~mnhKxi`Q8SwP38} zBIV!157Dhd=j73FN67VYId;D?0v9#ZpQxMD73O@tOi><)_QxT<7qv{KZ<$TiZ2)TP_xJCA8@jR599`U z&+!r#z07pID)gS6PcoY!#a zz$eg2%aNMJNGhH{yw`Jqa zR8=MSWW^Dd;GI$;wns zcmgKKh$y$#EY5P*qZXisj+0V1pmVXLmoT3CFoxF~PalNCX1>k%`GR4o%8vQw0ZTa68zQSKr+DK&A z?AZ}L>V0YvlVD=Wemdsu@|&vrb&&NQYSY188f5Ib?^|pp2wP#JCe4{+%kXd$zn&-` z{p+&(h0(!zm~2S*M9%UL+in`jJ}u}p4wy^^cTy==hWpZC*nU21pL7SgNu~@&65G+E zH6CZC;Rg?$1T_HHRgC#s7tKmE7L4M9BFBhgCD)`s|BBuqY&0>eZJ(%ghtA9L=Re*! zzV$;R!MqrLm8OgxzZ>?RH|g!sJoUqgTfKr*KDO*Yd@HB2@eLX1nf2s%NMhY>T#V$t(CR(v38DmKj&0z{Y>voW6EP|i1h2xg1ZMLPgFIUmk<0z zO+6!5$EEGb8|@!do$3ad|NlGqB?l9zM(M8PkBpX+p?EX0E`rmt6-XzXi{omeMHxR= zCCkIoe=%f27x5-X*ASmXK;w=)qna&7Zj*0>1eB3L_Vu2fE{{c&{k+vahQgn#76FZ0sw0tBs<@y{U$QYd)+Dpf(IH-&&u@QrM{7=sy>4O> z+3CHjuu2A!Q@U`D24`27&D|aU6VzN4@R*{ki?r59KLymtCrU!+qR@hslm4s2*Rg|& z9ja@~ZDgfWZYZP|`UU>W#2B^ai#H3Icjk{WT>#?d5By^URnLcMkUHWkT- z|NDK~_phm)PFVj`ZSpy)LDqjJ)Am}X{+q*#?Q!mpwze~v{B@@qF61X!jr+wkH%!8@ zS9{lQy9GYTF+aD$pa1dw_s6bCcFP~8`l{mPcq8XrM$t+hqD;FKdG)GfmV%GU#bY;%1My25@4!sV3PaSZcn`bl{rE^P)Q+qNaP1z7h}w9q`H(P zVvAuI=B1lfNClP^U{=lu31{Y~)M-@e1MN7d#>j9#_Q$-!08$n?C2jQU-xuU9E_lAx z_1JmSH2hKFJ2>^XvwdDoTc<%SsJGR{-$_XvaA3;|(P8d9sh}0p%T1q_>|Ue9=N*3M zdcU<&ZvPcAh)*T0psba4YwIaizrXBmr%E8r13YD2UgRdr0=M>boA)B@%vf6Mx|R}I zaLmBh7q?>35pR`l{?vlbm*2LU? z5J#T}T5$A)NZ_*|$0JSutC?kJW;!MdK55vk*SkJK)Na3YBb+v^js_;OK+@e2UxqK% zGKcMv0gE`8^|pH4P7Ad{GZ$UnAqC9W-0nMnp)a(N{Np*VGWUAlWffrnRCc(fi2&Rc$w?};vn^F8tI%Tyuw|-n=B-w zIs;bhldC9>36cJSB4E0wg-D%Yw%z}M>EqP@f0&*Q<&VYuJ6Y{L*e%p`(hy*1g?Z5A z6vF*urLKeG^nIBy@k=bQBQk*T+r)8Fhg5GltS5XbKy$$DZpkAg!FT&gZv=DMCBeLw{vbos6}s#+J$o!s!H zL_gn(`Q(qPz4;)i94uXtiYzE7!<4Yb6E$3JTiD@F0K`(|iKm zcFsjhMvOdv0+?U>-)I`4m^R-YhWFKY}U?XzSkR6D;yAHnaVVB%Qta zqf99~PVncL3`NbT+hx1$wseULw)YB#d{*-xOn+w-BL@mRmRG4GXj#p;QWmW&EA(#V z2UQS6mUHLp;P>?4#7j5%cVIKd88|9FK3S(9upe%00l3aJ4R@KAwR1eX>j>&?R<48t(yQcYfc_ za@aPPz!g*Ye0Z{!)Tzo){&i~@Hod83s?NbFP|2P1#nd~l&-+B-XZZQY4C9D~-nW-- zXI&^@#~qO#{HVVL{9MgP1#v)yNF=5L=M$dh4Tf7Jq>A(h{Hpde>4MInG`WYE?a(yk zzs!}a@)s28Hj8&#S#x+Kw*#58Q9=Ok*FERD=v$=um?@gO3{=SI4M0B$YInX?f=zLU z`Rvq1yS`r^v8_+ZI_3L(n?Otb5W4j%4chFs`>=Yvc~2Sg%IAM4Sz|S(-a4MniwNT! z(hh>F$KdW4{U5F#$fmoiFc&f>hE1PM5b16G@9-^ujRVMkxSUKW?7YqSsy>J~Qt7ep zs{}WPN59j0?NpSy$lQNyz2mXMT?!jW!ME`8u4yj>MiDA_x1`=kt%u%$6%er)VdRmjfk2f;}jh#tm4w@t)R7n z{^-MDL~P^bc4i{rl%7AM=+bcBg5HbB@s~|m1RZ)^eQ0muuwW_Lh*HbhFXq^p#$M_K zsCit#VMy7<-^d^giGkoM@ID_+PeC^<-o5zrWt&O~XFObNzBfe1m?8(J?RGO6Wq#jU zec8Da zaV_+zo`2Hk$Z}5bLB*_kJj$in6?<0HmtjI877baa>`qB-TE=0ZX)P zpE@bM%t4GAF1OW6A7bap0pG4Q#RN6hX!*=QHm(#B{ywIkZV6z%?=IeSxL0`;__fBd z9@d#_O!@d@C`?hU2FOx$C|+h~w$2mhTBVmdRb0Au&m;wAO=&V5j&4B$wIdZj&#oN^ zb$Cn;TTG^NYizzBWk&Z9ygAzmI2k*1?&x+(TaFWIf|CdsHLWDt@X*T~`v>$&7lE)a z{D-eDpI=0EOZnxZG_hG=hmO0_D8UXyuh8rk{rd17DrMMhr2@$tPx-4}Bi1eqXrV0o ziNdt{z$Ar^`*5x~5NA0K;E)vwiK10b>azxc62Ak!U12+t`;g=W;FfSr6dn*ssOla-+8N>P=G>I>0y*9{YVgN!q1 zJ~@4}bcRM|m%t{kShm;~`G}ka&1X-KMgYF;Wv8evl~biiBZt#fwihNIqU&6 z;QF>{XsYqz9zU1$;zk#Ux$$|a?j3~#A(`;It*qo6dHlax&=duc<+bw&J?v09(al#` zilg4^uXWMC)VJ3%j&*)j7|G#?e?`lOIK5<6n|qGZ{O;3)OZZvz2&~We66)(A7gpw0 zHZjldoDw1S(-w7{`&g5p8Zh<5`Uz8G{D>awOvPHy#q_n=^1l8YdN`|=1UKFMuW9Dk zlBfaHesM{B6nv4EJ^F}5YcErVm+=-MkJsNh_ZorYE#%z%mP9d0p9d}=uC(b*oV!3C zul#tpCbQ!6%&_eJA7S#U)C#*5tSlJ(JR2u&ubD^+x$i-&Zg(K!kBucLSkp7goXcOK z6l3t;hR(OxQ`r#Tp}_$G->J`0)+;V?MsRZUJ$0<;da=2aqn*?E`oKwoATZ$erXQRG z5VYMsU^c(HS`mgya?e&WiUBRY&lxS6rgRG#yOqqr8A3~dG&c(ENMkZ6)Z-}Q!OpIi zEpUG2_SC!#C%VD~qLR(8lPUpL_bXdJYhD5;*9G`|1%-;MSJW*3+yOiLi&$#uiCkQQ z&OfTucS{X@%cfFy4AnnaoO*`V#eJ&PWD5_tVV=KM#+PKHg+)00-s}Ikk`lmRVv=0d z0~+Rf>R3g|Kr1Q&z)q{i4paOwXaelBUj&pAe=0+gF9R%I1x+pJYFfEkWurt zv{+ZL+V0-4@%O&RJV!E8`Fa&_v{E7_0Qd!rOVUo6x5$jGN5T~``^1>8vG4JEXoc)%rRXjImj#?HE zwsUtl>*m-o-kGAwT*X;wQpM? z|Bg_p-&Qcw%!^B%#8l7B-j`#z8 z`KM0NxBAC)-&`HHl)yuqLrC@7bJz9+J5_Dq+Gpr9c`)7Wbxn!`4dbG^et=DD{pAB$ z7U`y#K*_!?2(+Sg00Yr%#idWV0L~cAI9neCA1Pj??JsTq%yYrsb<~N54wi~cWqo4V z*32n*>PUC^VTA#)Lvu3tr0l+n)Viq7(S4-rAKVr|?@dE5e#Pcfd$&9}Awt@tlNii5 zKAZK=C;qbXNp|-p}$#SH_cl@^0wa)&!pfsx=0+6-D{C zF38IS@(P&=fon^>sTZsk%>oiI&YvMJ6K~VZ>NrLPDRMG}R)&Ekn)E>U^SB!%NRGKZ zDE~fYhPd@m0KHU{|s@e%gu*4u%+xk9?VA5!-@fFw|r*zP}A*1`fU1yxf?> zI6d#6m_v>~t;>>Nnk)Vzm#On<5xILE4_RJ{F2rslRkU~UT@RQ!0UfmG>Njmc z!>z{@woTdTHE7P zX+3u*Eo4|!dUI7uz;|hgU!R{YEXp5LE4IDhP5P9kMRbe)TfLa<2$*iK#!e}VAd;0T zo=zU={hLp*EigL#G^Bg6CP^-533DWezi7`f$ zAI&|ew#zBzdMXQgTlp%ULpLUW-<9eA391oQ?kQSJsOXe$g7D9E>GcHTBDeO!GdpIF zDYb*8-2AGC9=-yU6Mn0QZ!D`#fI-=F>6mwZ#t^DE9yHC1APu1CXyf$H)ntEbaL2c# zIm?0Yl<|l+qj}Y2z5s82vfy%`aNYQHr9`NdT5D4R^|ZGW)*+ z!DQka-^0OvqMGc(7>sbjcEiKd8=L2F-pb9K1TmW_hpFb_KL^YaOW%oDq9wl1smY5o zn!Q&LP1TZmUkCzlWdsi9UVo@+KLDTqnO{z3ppHFreGH$0Mt)rH=)5AHzO(s6K=bvF zpa9l{#TS(;B>X+gQn)Yw^ujE_17`GcC3~YC2jB4p)baHD;VcVGuAAR0-S#Yyy2MGp zy%-lJ7i$>CY79U5*bn#7RCcgj9?v4R^lt#wl;`z_G1ZF)PnO}O zdJ>zvl}7x_|Lp}4_Wm#@9qZa_ojxDF_C1d*+Xz7HMUJjJ*s}s3|7OB($Fn$kA4&Qb zgl9~(Z!K`cQuA_!L^AgrAW9(MGk(w(NF~o9xYz#)YXjm=K91|aRo=N7-+A!&9{bB< z7HxL2_+(MZ?br5YQ3QOik7vWCM%IBd{-ymVGhz6FlN{o>0eD^hkyI5su>L<1o-Z4L zh2P;aMqptV-<*Oy4_N%*rOj^|VZ#=>5nKYF;P!`~V-Tcjnzq)x@xf-^qsxC;HOZ02(P@dh-*sfemf{ zn8QykKCghYXcNSG0{CzK({P*6-B~o!sAw5wPxP{tnisI11Hd6VYMwKD*3^?f8qDnz zF=q~&-j-)atW$eWQ`iH732unc*kS#^qm@`1rhS(mdJu z%(XuKPv~>j(+fB__K4;ufU&`awjLY)^n!;~y8TVpjnT*Krn9&Xp7%GPzxl(BPonuB zVR7COJwL+2wi!+Wc^4iBtl}G7ON=aI$83j6m3<4;_v>$_1$aTr3Qdbnw2=cAGk?|B z(2Y^3PWfho?K)#J;bC8C=8UQw|EUqoA&s#Ynd%QJ52gpG&jt+UVI3Pd70fEve_Dug zDa&=&8qabp+FlTP&u`}}{Ud;DvQnN_M~B2S4e)*b3gLAQf^_`qyPi8WOCnAu zMJAZ9e^N}lM|BQo5Yd{9E}(Cuc|$$3Gk2 zcRl%`83Vn1YPqD_!zKmKL_hcgNbw3CekTIDe-0^pLnnWpFFiws>(D!kSKjgL$A6jP zuUd25{Cg5?Wb=5ENuRyz8J7T;_X4Z6g*EBuGm)E;$MC0V1sx{1o~xd&nFt`7=lS>C z8;A~swR!Ge=IL8{Gk)^b&m6G*`#>t;Y`d`uLfBj`sAN}_y208t?H4k}XFb~CBR~*u zaD)h0w%V`2-}u1B*Cf7s(pxvpvd#az2Et7vl*inpAlZxGj`!eBj_oa%*2~ka`BOjn z?L&@$qU?^Gf_d8T*eqldx8f4@-6(ihlg2>tT!jQDMo z4)F-Z7HA1xuSt#XZy!GNj*m3}|0xik>;(IRgFXN9f7gxi zS6rxQ48`a~DKuwZBhE@WIhp$NC5QLir*?9QzH>#-q7UPl4ye(5K`C+lXD?!wDFgBh z0_tdw(J`1T_Av6FLWJ#MysP=>8*lO}iUxcz?;-!@4{NGR;6Hgd41W0MVBaj_?Q;HN zGQ>a1O=@xUBDWEZld*#NPY6l#YX9QaNY?{zY5vouBe~+M6*;IfB_dZr)R|I_XE$0~ zZ|RG1f$uolw53L- zWYL!XM#4ET$-*xfb1yI9U;`@}SRUFQ+`3(u4LhB$%AM-R}llg&XVrJm1^D@C~g`_C`GR z6-)g-znA}Dt2GtIl^JD! zT@)(#!+C=_zc%&|*Bej;XR7;okkUZLT0Yd*f1cLz!lpT=)$Z;1_pISW!I=t0;q^ZX zf>K}Za4uh4xh%lqb~TzR`3%L8qw8sICtoO7_X0{i&tffmO@qD9Dty-NpSI7rG-vm) z2Z+SYwKZ74`WVXmiTCyoiu=UGR|AMM+Qx%8qEvYQg*f{3v5OHqGH|7TXJ!9vco%^4 zn{$3(2+eVZ*Vt_;r|-4`t@jUrnT)yqc>X}WeKGO>+xK6JPcNqF zvGqibe}s=gVe+BW>-+wsnEutDb6B*5`YG(iVq6OD<@8h4M$9lT- z`mY{vZPRGB(vO=H{<4@lEn}_Tz6m!?IL{&v+NVABK>HwBXxo&H4}Tlz@M9(WYsO-} z8(muJZtpqQB+ws!@uv6YN8iu2{s~*K^ClA<#}WQSVIQb2=f|HcTKk2hei6WYIi_d4 zmo0+qBBN5(AH>}oo{>bkjz7!hV4r$W&2OKQ3N?kC^VdAx3m*adBP2{=7CG~`PSK{< z|Kzuisks$PpAOCbY}+_m+tkQ(*L2_FYX9{8*C9sNxaa!n{^r*?!1>vN(|#r9#j+*u z=lJd4&e0JqUmS09m8`XzMl`Y?e=1(P0eM0@xBXRzgQQ2@qRE6Bd8X+E)dvTUTL#`e zG;%(C@3k{iVGfS#t*6p`>?M%**}!dQhumrU z@*>~)@I7ZuZ$E@}(BO);z|LJjf@){TMey_6VP0#ns;@uJe+-ypVLo$yQ?kmh|8&?S zzk4lw`eMqTHv}bQ*$4)s`g#!mQUzR{zA)c&JpRTAkG6hazLu$LIBh)wn}FdL^zDxp zu6NVRbHhhJ*6%&d68}BdAuxITSe>PuZ5@AaM0m0&;vC=)T`3@zTfH@$hU}TnbZbQAgXCWMX*|f6!3Pv_C%>hPRi&C5^NWvk-@CdTz=p-+vNI&^EsQWM|rBBOH{ z8LTPS|NcA5#701u{mKj9Rcjg`+%o0cl1LL^&)w`f7zm#JTR|%o^{XT)=>BHid>m_X zZsV@MMOushNo0@c!20n|DE)h^zW(%%ihY;g^!AduuF~(n z+#M$8ftiD?sXjOpgbP8CCpu@8}vk8ReG` zJ=;mwzqhli2|O38#N!+w`HOITWl5Y(e1aAI^rBIJ!0%a`mBBjgNifkx~^>a%?&2Rprv>t5kC;kM43BP}M9xiSIodc9_|3sSY0{Xe$qRKO2 zdgo6}L(ZNzg4_MYlE@xp0#M2I>AY6}|MB|y{!3%g!WXf+SHk}YE9UQ>!|4{{Ql(?m z5C&tMiCQpvh!SzY@&mzI#%R6E0?{d5JDLH3%f3D|!gH6=i-5HQJ^oBgw{vDEaCH(R z`jwjG)wVSlO4mQ(@UM3L*KF^w&p993%1Dp(^V(A9 z@_D;wPfTI}F*`O6p)^Ch83Aj-|Md?rAntG`f3(bIPnFl-CEls0N6orna3t0<&WhAq z%L@DKfxVcEO@46c4IqD67uE{Z0poUMGyW>FEFM2JeSu{IV}DX!kWx(D+BqdSlE95Fs+bx32<3 z3%Msi=f+r_8`$EQ=;P=0V9A=TYBBtiKlzV;au{mq@IQ15#sB12E%`TU-+d`xgCAd( z$!|mp3vAgvhxHe&ds#=zozKTU`Ei{lBcJpu-~Z|qvYzkz%C2jeNO<$WPXlu@74031 eSd~9LviuJhPdY%c1_9py0000*sdP#W5)L5U9g;&ycQ;7aAdS>eBQ1*3EhXIzcYnV3 z-sk=c=RW8Bu%B78W9_x}Uh!V9*il*AdM&F0BurNlEod(kZry$2(Y{V=e^0q_sAr_?p((sq$?;_iQ3!?z*dyWqe{n zZRXr#@?68a$8U?;(_p9WQSU(f_tUL`-j0Q)RroOjP9h4Jm{QbhoSuq6MUJDHi@Kj1#sN+(SnX%n1||BgXOn6zhMHr^Woa*8wCa zFIQ4RV!=#mQJieSoMK*_Y+jOF;21v%#a1FWK! zU&qJa;XghPSDr*gHAD^bFCbW-4RJWGSfr$EY^0Q2=VyL>&N3ioMNNIcK)uI6eLzcp z&>DY9m~bg7r!a(BQ-ZF;q+?Z0ELHChw40k6j}(Cy%sl)&99+U2I4iEAQ5smTQ(RL? zQAI_M`-bw3g|`IRyvkUa8>bo3nw?mliBRZg5nibeshirwDkTefmGickI^4R1ed zYysJCc!K(C8^+9|N{Wg{;ai6~FPF-Xm!`al^@X=V5`Uhf;_7SZ8h(7^pz)76N3N77 zL}~MjzCxHH*4yE8W9$llISnh@3HCerSWSr0r>gz+-#^_unzut6PRt~w3#dmm5ZO8J zky<8JBlpQQ1Z;+TXFoI6-tvN9`8!3MXMbCNUC3yGA$7@-pPSvw{n349c>WT-`QT36 zZ_+(j;CeyszYzr^h$~K8u5)Lzl(#ub_cCI==7KMacB4J^gp6-tMz($aQob|$o5sl| zzesf&jC|G)QSWEA_3vj*98qQ8n2?JI_Wpy9XAn{F*&BJJ+2@(k%PU&3_*Iji3W_~b}9}|kWf#=?`dz^Ewym!r*iMuS z5?F0WRTM2SGr>FC!1px2KdsVZGP6S2H#z}tcK$w3V1UwzMPT%)c7x)&a6!7{7uJq> zp%K3nKkTRhXK#Qt{)9RYH!C+cxA$s#8Wdkf$i5~)PD2tAR8+L~3#;Ym^D*Z+Pk(F zoMn-fYyAKuQ)chImx-y8Fff)n=#t>9G3gn$k>*wU`Yyl*_;FRH#b?6~gDu+vplwT+ zk^&Esi_h|Kw|03iZRNG<24-u*W@`pAcb(}5tk?#(O9!^G2-on&j|vy`ejn3xb+JAQ zppmE{-hC#2!1z3Un!IolXAy3ook3ud@01hyv3cCcZS~y>^jDLH5Vg-E?yb%6$&D6| ze}5c>>Erbmw)A58^7zue#T_5H77b%RRNhz-eIIhG6!l;j8|`loxz4M!Z6`y0KjeBb z?NsaO%f{&G%#SnTvo}`i5H!KAHZfPeyqhJ$xZSMa(K3;u^ zSuznnNvFcG0+_jqItd7k744{#UJGanP^w4*LAXoG3DeLc>EHgETY@|ih zFvVOhw>j*dWP)xDg^CWtU~I#L2cI79?+gqU;|9@LNh?1G(<8p!zFW9!c08K#rW>Ly zh>qc}o&+6FKvLVwSU*|VUMh@<1sC7=<4{w>PiK;1dL&&QGp;d`lX8_if$6Rt4VGpn zixAqQl+a3Q{e2Wi^@rd*-{EljN9E;%G1*|+BVAretB@o*VrwNQr)A*i*O%iBj|Sh# z5O-it?y#^$5}5aPdvrJMRT^dk06ObwLN01kh;A*ofUD9N97jMn+qpX1T1Bu>NMd1^7!^X zdSsg|?JM#fAp;#gj@e~|E=>e&D2hIhk-po)*PjbIq|wvT#O$n{p6d&W)di)hd32fV z1flTCCXjVv#>W%pSNN{2-f*mS?+fvhX6DM6&VThhJ2Eo$ zQq+CZDM04)+TUrVL^2)Ia*iN(lnjP(RFyLmU^1sGAi{Z6efvR;pyVkv@rba45_%V&o(PcRw_JRHQ@1{q~$;zV- z4s#p!3S}IqTxjI&My)&#-@fJ&DzhcxB40(7d#@m8oVq^+7j21VLPRjeL73j+swVmZH6;R~cdJm3Jqwe1H3P-1wjX^)- zI9z)C`0-8b7|A7pP>q(g$+^@sHR zFfFqCq_4Z@h{5h8I}=BV@dt>Ha{D;^LxMWKMRdYUGrHLx=jgKkm)qA{#@IhZV4nTi zLH&=xrVA8a*}ne{K56`ZB+d!;Qy&Ad780N~DS!{LHM65T9Zk#Q*#@loxG&_+7Uol5 zRvfNBjEh!8bL&xw7&#hWQ&&P1MKDo=+V=iM#IwW`(I?6r6UmIexl0w)jr&vAi{SIg z*L79KXNDX*p4hbvsP%s6dQjdUtD~Y2OqMCcDGVr1GsoBVXKC_k@^z(=AEJ?kL+uMP zdQ6NZKTY1x6kG-+r8o?vQqkgreyFT{45i@a!Jn6S-z^e%Vjjl`$uxhlyo?!&tf#-J zI(d1NZ?8<&JiU<4Z$KG=fciSC?X5=l?V3MIn& zadt#`bP@CX-PRYVyN?bFcKXap)}BESn{>b+-Mg(? z_fL?B^a zziUJu@4(ygk@hwm0!S|j6PEgM`R>>05%5Qd&A>(Vlcg-8ImrJBZ;@S*{S`ajtB|IF z)Xxh~J$?ZOZ7GW3bTm!KbLYYrFQt%oPdwOSgN z@TPij&ysdL!RV$H6|CK9(?V5Ms&j93V^(aKExnwz5H)1I#CD_-8N)Dtqv+6lzIEcPaimdExXFiybFGFFA2L&O{qz35I$*+=O}^DZ@Ov7a z!u1Lx%#!-Bx6>=@uDoMrp|jKRT34v^^3REL^LehTW-k%RF(s-4J!)kYxYKkTV+|=N zFkiogD>iOVgviFt`uDGX2uvsRJJ_U;g2L3hW$of>^0BF7(?lr4+?2$H1BLtd&sJWNgcA8pfCMt=U(R^U|W*if)NKByO6v# zCupVW^jp_q+>5r)NKW@>#AxA$AiTZ!=oz`QS@4euVLhoZ!b>y})3e3ldFgY)ET=fd zgpX95jEQ6!jZJ@B$Su9MN?($)k}8@t@*?ze$iV!P&j+GEK_G&A7Zx9SEFW6mt)G!g z;A7^WAgTz3YVUxU^3h|7XKywbw#OF}g7Zd^orkO2*LOE>c1XEU$Q0-Nnec(w^<-<^ zlW>^k3;qu{rQdhhJ5}2<4OQe2b3?BgUyJMe6HBsNJ{H@AZ?+p9w|hRcLer!duEpx+ zq^FB%6~Hzq*fRQjuI6TliRi`Z&boTW?x(-g*PryHefO?gr@B;PN!0@>o+M(9iuWKJ zoxn^YT-05firo_!)nJ&p`ncWyVXH zCrM~pbp3Fw7;`WptMl9v53f$1_PgeX!;IvamZKHVRsL@V1Q7`!mMHl@^fg-R89DzB znTLqMkV?Oo>JTNRs%LyAc(8CtQ&d5x7cE_VoypDQKg!F{JhX!9j3Csl+Ou)znWo3h zcY9LV9bfEZXVxI681af(`*Ab(_!2!;`A#J7>1z!p+2&) zovtu)lLV&IlvWjCz+{xe%`h~;a|IfagR}8UQT>J$YrfH8@>kFbtbMTHszkOoWnfkY> zF1Pq(F@-rTXeA`fJKK|)DSt@|_FH_GvtsKy^ZfhgSpWu*rBNjF6Wh*S9>A)8(NFH8 zccb__fkTXk$1^xRdOR<;wOHPkpEZo|-pK*$L8NW^_4NAGF_Wj^Sy;a|k0=ji7st8Q z@m&}tx4%p9od3Lh?<~WXXI|$yS|^czhd3_y*p>F*ZfuH-B=lV$NQ}^pWJ*%JnMmN7pI! zm>B0iOb&R22#JkXaF6m>)gbaAKG7j3a89>|T7k4C`<251b(0sN$>aHsIWZgL7EUa0 z>@k0i^a@_)-(M%(*=Nk|Fm@L*Se#_UCiK?gTak4Z z;m6IgI`}j>dpP_MlM;!5pUBdcghp(7SUnqxWM@}6nfYuC;A$y4DZf@kGBc%muLh>4 z-`LprKp-Mb;$U#!<#QVT03zWBM?4(D1i1pv7tB&G1RhUOHCQ%ESk_rs){o;hW-C7C zNDRt+dG{c$vZvU|lOd;?dAYR^(BPZEU^024vggLKts_Va??u?a*zzsH>C@epswEU? zB~7lFrP`k1cR&s)M)Xpfc?2XBHq+2Ep1%*KuP7v?X)~hT7@?vHxjPBpYMp$2G<2-Q zNUw6QVzb30_oEDYQ$CE~LS^XWGgl(sy7{>u$2Ld18Wk-X``C_JFW?P=3n-f0F^ygH zTe0zJo2~K|S`y#A>h+@j@eS){5P*H^7@M|k2{ym*M7WV#)`fgv9=iDdqo19wHUgP~Y|e7T@+z@@GSf*nbc; zEEMXd^%_~%fksP}uJI69-IbRv738l*+0BOW9Nf!OxPN#FxqtWK%0XzDaUpZaXfW9e zS}*cKmCRC-WO9mA`N?SE0~RVHhZXh4m}FqxDjE1s)-st%-{!R=q%arKcGY&?S)7-2 z#M0=l2wy&z%k{=3EjCIe|LiBGBkC&lv)8ZMri-aP&J8TiUhj}mJpVJg80=9O#^zu& zL@Bjn|)(M!-t!B^=^Mc)oI zqWpd0=ziOp_b+77x8~(?G(bxrbvP3puLf7m44A+jyFYdc z*JQVPQ~be7{Id$q>5N)uH=^XXu!U0-cv1oBNf;9~99-Syx zs{_!pVBagrqM5*Ge$s>ls|cCnH$jHSU&@xiNW=hur3tH<-_arabIwpqGDFsc{l{*u z5%<%_w!sT=x;Q5-qnj=LUv_q0(;YoS)8rnrIor5Iy)7L`#$rNkK#h?is1QKMr7)F* ze}xr_Vjjp86^_LUprbZ)_5Iy@HA@?eih)PS9>_gCB~wKGsB#ng49ygLxMhknsu2a- zw0SAP7$VA^$YYG7Dv|vP`F7^tCGgr@e@BZizfal@Q0h$!ZlzD3+ujV$;Kzum5338p zl_2y(gH)+WgsF3YarJ++np6CE1$sw{i=kR;E>7ZCOr;NpXC@Wq^*_-z8i8=A-lENu z9$WPaTyzA)|Kr=@W-{I%kXEEtCVvJ3ZVqLXK~K+p&sPRr8@3%v)_G7Pl>iM`$i9Nk z*^j_sgCTvZH##4c{}L&YI%#Sm1lphtk!;%uVYaYH8BBw|WNqq*12b*-$7!h)5FYB7 zuZA)wS!x7O^EE9vC@xH!wU8Lq2WsX>U72GxoEK80UkT9(r`3lK2-oZ$xZ}4q#aGVP zCDH3ky!kv6UYW=REdAE4`D>tNIqWrKT!R>rE*c@}uzr^V92YL{tg*Po(cVLJUt85u z87NtM<5RINxPK6|nocmN89grh^>Izu19d?4_H+FZ%Km#tACOnZmX3?7R)C2`7mAm@A-Y7! z2{}{)G0nO=j28DScbvQPYb(>ISh7zpbnl<#ykJ;#|95wke^X@I^854}JdfpfLIVU8 z2327&&v4LF27BNL{Ux{nI--`+UKX0=s(lPJFWL!o!^G}OY&Q!TQk=e2ZorhOr`oXIv21s)~>XyiNDRV|H$se%KaVUx?Of8s^QDX$S4f; zG_t!1JSepOh{#@+vR*u?;K52FZkTnSW=kyR0c`=Gq2Z%;hD=uZsEYGd#lLXKtn4N`lH);oJOSAw}gD2A*|4 zZS=gzh_JG$a#^o%8e@287fE#PhmV4<9OTr94W1kV+dto$gAT}n7!dTxFgw)g4d=4K zs}De^@j%$-NoS^>Z&>{NogvJ_F8PAl^uaUM52FDUucz(Y)}_ub6i|8wt~#Z zN_=CrK`WO5w{bt#kh%*7}E%C~Ms-@v25!nK}hBed4LsJ~8pZ~6`l z>g-tGD7ZUb{?saMnA)yy%Fa?was}_ew2Qqz?v~Cu!5B3ntRC>Tu^M)5u+$8KrjtaP zRewFcjdY&90Dy%m@M4rsBVXP_u5dxbK=Pbf<3toy$F=sWI;_a{d= zj3vs&@uv5VCU*!KX|URRZ-?7M%0m$t1{%Ky?al(mjL1wmxJHpmzyr1%*U_W16&*rd4PufGk8;< zV2HBD%)Y+202_}4p0fGpg%t*VyvO7Hg?PiqyBe*v;bWV6K2x$N#DIDl;RC>n9(Mmj z0*jcp$`=eA6g5uIh&%KePB8Eiz=t1QcePvf*Jx0F9VI;aZ&c1pw=4LFNqemAI!m{n z2AR(JmOWYye%m$E&MswRc&w)V5U_JAjgUxKA=hmE8ITEq4;<)U5!UaZ_O&iL-P@7^ zE99>_6e=gS`x5g%e05)>uX&+ukpB7Q$!lWcC(bV{E>!eAAdMUHxthi_;E*@yKZrnN z=$IMwtf1n1TTLp);jSfYun!KwcJbYPuA}oGe&Ax6MeUj7u<&(~W57y<{NkZedV{kF zkB-GzZp}y6Pgq2&LL&I9G=TeNPU3;yMw8hCjMuq_Bta6qPae%$F7Y`$6c6xRA zoeFl~a@@gDAitt@#vR%7_3m$w1LS%)4v*oTL^%!mXM>l%Wb={T%iV;Jt(ALi_W~d@ zm22N=(qAXH zO>*;~D0rSkT+h7j!Yu#AHrUD;I7hbfHWUJMReHz|FhO)ty%9W;JelOf=PR1QEwg~% zSGgHp4(5M*+dt4tJWa0`=hVMHeSNdbO-6~MQGVUKY5p` zCNBir`f<25JG)||4kd2cjPFbL_vZY8x%#OAtYC0W$q{oWf2UPeHE8Fe|B$EclZ}my z>8(I)U3@%&kaxkC-)J>s%n$$|z!fx56?)d0uiLtx7H!!k3_9L2UFz@#9w8xqt2KLC zwP6vN@AjtSO4BhSHB2s6`+dYhvoEON(5?SpP?F#Z?P*uxD$PE0`j63X>)VT^ac0Qw z@aE1(+i!YO&{mS@{{Q)@HA&y!A z(hao^I1kGla~yp%eJn96-mHpF;28Kg-EyQTtsAHiTKXymiUm|$8;HN@c@w=|HVmGMB%0vHcWb8qj$}RkYdaE|6;j!JP*zjsh-3UhRX@%h{j%Wk1 z8>~CFmOK^PO0EK>g>UsSbN^;#?kirAJd>}4L`*WFiZqv;eQLUGg}i1Kx0DS#|EFz= zhr_ELJ7Q+#890Qi7Ij2jahA>If-~YS)l{K0Owq`%9mPFz7)EH|xNvt&jAM)wdNyK3 zTr=g0x8>VaeUq4A9!u&-ykpp7@ z{J%!q?SQbzNzri=vIQCgG)zwgG}>o&=mbD6LY!F!f@rhZYd+)d5L~pS(wfwpH8bsK z=jZekNjWW4cjf@4MJ<>C6bk=rkd;Uid4LVMG>aN!RqYG9?gJ*g8jLhi!Ct%pp1uMLBiM+qqT4vw1V zr2#|r&2y_8sGE(xz%nVz?^Nn%MAyFZk$8fE3P?<|h0Br>U#EZTLT6G;dY#=xWzr0f z8U2(&j?}vY`%YQU`8(;&lfE{EnV*XhIQ$eyf+8$@0OD7rIT_IL1Tfqy0aJ({z|~1@ z)2z$}9a6erYN&#vV>EPh+(d#9tb~M8J9xRmSP}1C1B57>wqK6Ubyn;sRPI6Z>MewN zkRS*hUHy%u!rQe7_Ow|xy2~~|eBuJ)pZ!@Ak9-B(&E9M;S@F5%p&Jo7_0!=lgNp$h zC0In_Elq+l9rjJ|<|D|Fhf$-A_>)tMmq z{iZqWpZG-9BUdz$LcV<4AgDONn=_Z`^`~&NRN`x#y@(0h9id8F=-e?I(-@qjH6Yj|IXU7LpNSvMwp?nCEH6K`~q>0&~9hyQI!!UBV{f>T;ON2`veX*dnP(#~kgj!ty zTmb@)QUTpjhwPzY>QzlC#DRXx7a!cmhPEf)%fbLsbr&|PrR7Q;U*d?@d(wFEN0d&L z!$QE&a*e>uY;9tV2f1@wR6oPF5Pg)apjciuD}>Z$Sg$;Qz9aQnIR#u>T?3o!mS+0Oz^O+BMcTCbDuA74yS8-5TdzH=+E0sVM2X zLyu7_zGh34D{XjES{tN+M$4o;g+>%l1nJGhi6Ip@=1V1fwoeA%j!3@&3W->+$!a8# zI}QandlpAUOLGz9Jql7++vWvi>uJuL;c?4d^@G?o6-Cq3)(N?T=7*)zLshINL*q`B zw=o#e^`aaNTVKsk8!`^u#hwX!=jEnkud0WppzNP$RrgcoM+kyqWqr(a+reSi{8$kS zt4mJQyT`+?LKI>|7hvlB=FFsKUr1NpQ#O>5f}Iz(bCE}BJa+Nn)0rr_OgznppPu$L zd4+q>pV()&2GdzA2f0U#8?BZlecX+;;OL{#LxW#5)HP%{*WR#qGhU+h|M;RylPP|g zdz-IY6W=#(>7h92K`@P5#UWYrhG>+N1O;_02eG{=uLJ`HadYm1C?0FR zo^9-n$6WDN>kkgEc7{`zLE2WKPGCPXD)v4i*3N^5%#79BV;!NP;N)G9s)UvjZF=F9 z;7AtA{vr@@A+fpz0R2IPn_1x35BE&DN9NK)m*G2(Ya8@zf*=+gsV<9=&YH_%PDo>B zweh3lpJpHPXK0!F1V=jls{$PrJ7uMVElX@@UyP%p%x|04J7v?RJpM!Rp+`!eYB2BL zn+qw`&8-qYLmvde&4jjIZiI$wQc|uoeSF^z533Z(5NAtW^5G4mWTl!58xWH#$0(t2Om?1cW+tO<_WeAX{i@b&@j{{3=u`Ex{5!Tq@PJ@3 z1{=;6ryy?nY()5=HGiQ+Y_kZgeKQO0uuiXkl@@LZWvsWC4o-MZOviG}65R<%c60I>K!@GqwLfd;h{_NLx;#y)9*sIvjDP==3G|3rAG!VII1Wg5NBP zWQ;OKj36%hbiR)v7aD;5yvy_Zl%5~rSJ{d04amquwZ`av97^@W6a$$P^C9eQB$rtI zTTw7j>{n7i{bqwh1}4;^`VueWXCAeRwj@;-3vNqoC{cR>F?E3SwOHYA2Fl`|#yI_<~WFUB*wGIgtWh_Cpqcx9pUEtAT} z{Z74Qj(Yws-e~Le>8YBbcb;nW=D-<_1uVwgqH*?YIrqH%D+%>uIHN12KOQcC+g7~h zg}oI_KV0x4l0*?sjZYXRBCGf$({z{1STa}43CXw5MGwz!=hGzy&`rEXJH9))sc`j- z;Z6Vwl=~%g+5QRoKd9%a{SdIrK<=hCj7yJ%NZf2{ori%JV^pwuI*fUe7@rMw1?LM+ z#-R>evg_zApVG54T>T>`%N9aakEQ0z?A;q6c41v54Z&Yiftc@wX}%WlH$TIrNPoRY z>p-H*cW{U%5_=j2fS(jgi<|4O>Y*Wtv$1R~;@>gGJjytNFBlt;RVebUrTW0AVU1Qs z0d_UWl>~1`^jQpo26##UEh0gvHlC9XV>7&JGE*)#3*!a-1ZdXA$W6RaB6qy>x35C*QhJ~~7g6k_ zDZ%2XC%g=K@Wm(`IklINKm2%V`DkM66P=7KB}%=rG7?Ez5~mw&*o6Ba;IIs%6VEG) zlcP6msQb~`R2eVZKKuMtOq|NqwxR^Px)(vS&g%OTGzritygE`lRuh}8eVPn_=W_^1 z_L~p2=l}!;3zwdgg|X$ukKj#iJzV`-l&o3f7vH~glE6v76CEB7GRb{dh?>KQ5e9!S z!vA&mCRmC}x@j@$zNSf+4;|`$l-%vxGu8WU^$gR+j>4*~(^$>cw%{P*AXGM~M zFX11B$=T1cfL_>vY14a3&4m#I+jK86!^{G;k)*3>ou+8Rdqce) z70jxo8AU*=u%g|sf3PT-IU789nD#Zh2s+oM4Nq;R-y$I*0{9wr4Hv@{MIbi51NZgf zU&Sn>ok<@haK@M!56z~^%_zWg9AseH7d3o>e2YB6>`wl$*jND$)ErHD|L)bf_Mj@1 zc*ba$C-t*&2u#0^^8*eP_6B0prdv04A`2d1Cm(e|MaBJNDMAL83019P z%mhS1KS@+EhDF|R2D8K$Lru0=C~Cg68OPpz))q(y(SZP={(nM;eY1AKBx*TC2fm%l zmr*gRq~Zu-2A)_pjL0<}5Pa^Pid=|l;ThK_$iypuu`=lRDv9@35n1a9vV<@YTqqYR zKH6}y25>HG7*t%!!7!oF2$&EsmcGFLTrTPD#^p`!zy%RJ7SR&9g`tx)ji7fSx7jaEF=0!H%?=ux5B0fn*)p9DBAwjnhVQ{13I?K+fP}cdicH%0^v7 z{ro+R)x=W#&fIE57-}78HH7V$pFW3*n##^|yx)Be$g5Gy^)A;?#Ah8C7w#Cxo&ipY z?hk3AJir_vc zm|r+d2-LuQ^DUf2#j11*2d9nr_-B7Hw4L~j3wS<*iJ> z&Xa8x!s1ZC0*@L}C?9ykHa^lzO5(yC+_gLC70RQ*>d{xanh0Dxe_qjO!`|hCZ}VSt zBZbCwas%|*lc=e{+gBC#M)D4BjQEVf&BepPEHNTPIH}CY3aTb7 zWh28=a(U84rOVO=N~R_MCqHQ_aOMa1wo8*Tx$p=#&P~rQHtXOvV6#w_6xQeY_uL^F zfTq*{a|&T0@UBpCLpzCq-ulD6Tr>1_*zyJ!gq|vD1$VG+jGhO{=uQNXVpT#|5xr7u z7^CPPoYBLX#ls^<`M=YF1~eeApf{!DT5t*IN5WBbGE83N*rF5?UC144&YvdiS4-Pf*asSH;0{CGMHFf^ zS48~JLDFD+C;)FP=7QiQB$s;0eqK46p{3fukbepoweauE@{avcs-72G4G! zab~fhX4I&k=~X2!Ka#LQQ~#^wWT9W!T|nAL=i}VCA}cmP4rqG3(-pdJigSET^cHc7 zkC#*B7oYiT34V-PBlSY5ms5xMbDYXt+p#QyA<=dzFKsR8;35!JXpi<_n5CcX1&n+}r!>8F?cEOv!Q5UBF*D2sSBEwyW z@&nH5(-w1Lmc&uf-cu9f6U~Xm1~TDS{@~}o$Oz|lNz*_(ozf=!)_Tsn&(}$$<^uYY z->{A%040ZLv)rz)SPF+`BGxlb8#psA4>oS9?6C` z4M_f%x_7@_m!_r7a}h^0?af~@R6j0Nc4X98plk3;;G3dRy)}fmERCEdyyTo+)7hSX zRQkg|dUW~tdv0hZ^a{t*NB}o|u_|=qx1!Yf%Fe6daX#^-*#RZJ{%Y%gUU|-bpFI3s zyjUk;FqEdsSJuA((48-=*oPfm-_qyy;BNj-`wnn8*)e?e z!GPNl1u0L~^0wV-Fhja&huEQDgK0j4@!Aoojgasxoi}NFH`#1{pw@RN@;0h zzN$K1tj!$Zflw=K<^2d<4mg!wmV7|(*cfIl15;zD;j(BKU- zCD`8^`=3puCB#q;U1Vz0VK>99%sRkQ1kWSaMp-l_A*iTh7o(F9?cDYjKKGZoB!B}B zBZ~Zu>=&=+{;`*y7(1z!MNqB{FHy>|Cj0FmH{>br0P^RKK^cXxn{S z1YqV$)+RmyGOVQbIJLAhM034(DAOTj8mvP&mbB#aIudClV ze>W-d>Jw+HXhy;{4T1EG9S5s{x5bJ-O2?BBu+BgFaY3D)HQ z0HUM4l5Ze+AAb*;?hqbPDTawnh!lfZ8d*VkAfu+R=u8-(r$$y4NV=W0<~>lFZ1~i@ z1Z;bSJ|!u7$<1bf;VFY`g-!%7WBupGZ#S2}UwsJPf2%Ed_;Z4oYLE@vSn!gN^0@Kv z$0k*0=Rh{Z^=%%FVr`_Pkm~4=0**9DE6P?U9F9#D+xP$YFsTy^tQI%~==YwlGv1Ey zTcsft%aMa$)@Yf@9k|=pD<_4zeE;^`JZ|o;*cx90&hL-5SIgLdSBsFur_1&hRBZj}V0GKH5@pWB9IVmY-vol(|n4OKxwqAJ1 zgQBQQ`BBHjL>7+4JgPb@HHo@;?bTRpoD~6+VO2<1nZ&mI*=ei6Unf;naDI+l=3`xE zsqw;Pp!%K^xEiINIQwU4&s#0{R(E{zx*M+GjGFlCx?xqLh`iG6ZndG8sLR)@N%o|b z>0Rn39Bn<7-BsBza^njOpn1#oL<<;fm+>g8f~S7UGq*vmD__C$S}*hg%Z~lN#PX~! z*&ZjHW0RxF-Gv(W51mX#km?vXq+VEfE9)}aI5+RzaNJ<$&;AaJgRbp`+X*vs8H<^Q z^uu46?#gE>y}dX#R|Nk?!ktn7%zA=VHSpHNuepyL!H2-}1V!Y3nMFw%Iz?r$u9|K3eOVCvMwGi)SHkflh@a{UOoSm#} zU;{l!HAk_H!GOhO(f%1B)S<6wJ$z9}otO%pK z$oaA!Bf!S$66?rp%jjKcPMZG?j&HOJdG}05`HNLbFBe!?jNG1fK!MFnbT#w)?oq{G zKnX}^L9DUBAt5{xNJNZf|K0~e);Y`M{vD4);9ID;#@fDARFIB~KSQJ>Ef(o7EVx$! z!sKxT=^Bgf^OMl%Q$(3Gq=1w(;)k}JO20lP!h{OkX#W?bR9hzWc$An(6# k0sQ~3^#5Cw_)$7%?4P|Y^;Y43A8$~8tMR5z4hs9f01P_Hl>h($ literal 154300 zcmc%RS5#9$-#>bgDhN@LA}wG;0V&d30*Zi26O<+$AxMWHorEGH(nORR2vVhkfYJlf z1rk7d@1X|>B?%!pKF|C9&pEf}>RkNR%$_}KU+%r;GvC=UhWgrUEW9i*%|Mi(^XrtfI(En%3zvBH@ zG&FQMv@~@8TH61N#pRm^*uS#Yx>G&A94yt+$a|??Kp&D+kowYr|)~N9cE7;RF;F zN!fP{4mf^>-R)R!`OD{nDqB-=o?(|{n`74 z3ivgl?f%*c8eT|XodlIPB&%apK6fJUXxT@0F4jYNu0{uRpVQMM$R-5KhAL;#F*}nZ z?R?cUA1a%hhnnxc^AqL^NWgh4oWDNAmrZ06vzP*{(n3Mef+m3bgkUa*Z2T))f%Bb~ z{rWbBygFA2YsoJMyQ&a*J>dG4%M~P3|A%yMc8#jIPl>t!iy$R2QnJncoyW5j33kp* zJ@iW8og)z4z+#H_M8Ml$VHJVWSY4;HdTygRH4YlzAgjpb7eH^Ga;vSIeKUx4sy{Wx z4t~Die|Q4~(c1RQvu-W9P_t*Fz2~zQm$<3~HoJNrf&;%v?pP*8_66Ew{32W{I*Scy zBm^XHJc@WLiMnZF(iNYG&x)lrA@|BU<#W581)q|(Cy?J@o!AZ45_x_HH^dsF@jiz@ zSw;83lN~R#2J5ByA6j_s&*Ns=zX6eJGv%sl(1Rfu>edfS`Kwwhh4fV^#o{G7(tkmQ zU#a_v=RcfP6$91+EGoU{Tq`hm*58u6LT9do$UE9KY4VjQaZX-yu-JO6m1Ih=H@ZPc zBAj&_Y@&n$M*bKfkq107%?A4bAAji-Hl39ckbGGKj}(`%58v&|PW6 z$&1aXza~>zEQ^>g8P7PJz!gPJC`$fGr#N~Mn-cmK^U_8)Wx(oW`KkjF`{a0)n!EsC zOP%lYyjLs^*GpY!BJFb@J!&)12-1QPMds$~gnH|*hfBV{8(Ld}&z#R3A0Nx8$$n4L%xz6D9G~Qk15^O6-I~Zxe*_UI-!@2eCAud+W#&i^3+l%N zmbIiq8`BbS%hq%I3-K1(Oi60fs=s17#w8&PP89BoQnnBV zdR6YWty`wt?#oQBIS61-sWUmofO?>Ccc~_3>(Lqq-(2aMd@Knn0lPZl1zTb1)~-j` zq+Mjtej5^hF|`iU;56*<;R%G^ zjqDR_L9W~N)|_ARdcBw`P@Vda40)oQgqT|6;D>Dr)rZn9J;?Iu>cw4Yh`EO1g+#>0ljIy*jDH`kSlkb zM)cCf|M@flyjV-Vtip6nz9xWu<>|RAQoQ4>!QPX^$kN}#Ps3FkLF7{}ANJY~$8Oa= ztIUQi5bMn+gQ=w<;)1jW4VxVK>y2ql8&aFk7RI7I_dR6anrlxYkS6FT4O;#%@6V1U z0EedC$v!BuMtzu;RinIed%w>du8GB2E+aTf8H~{FysF^+gZ$OmU=$12H($0ga2vv9|;3%O1_ySv_DZ|79|AW#9T%3V)!49d3`K3W8{>D5_>9 z3}0C2$2{B?;dlYcQ3c2PebiH$2e}>%hJE*7aPjoKHYIld5w>*rOpHgsLDaGSIHm;6 zxi9;%FIw{?ooBpGAhbf1wH^wg_pkbJ?R5Mu@XW34qXOnQ&gloukJaa68A|r0v^l+R z$-yrj%pW9Y> zgCbq;o*PU6bi8GBo~WG1yIyEv(uWB+-)pyl0T#bc zp?0B8TM0|&m6f8~!Cawex*G1mrt-Z*r^j;GdvvL%xAhPcDB5^rvvDYaW7MF{3q$j4 z#v(vmT#lT{a>3n&U9a6jH?8~l8(nKq*p>sE;&|@2?IrDdj^u;*V8w%Jb9yyuRgmd6 z0w{V?bn3I7l0=U8DY&{=hV3Gt0XZDkhJ!c9vuQ)lhcLZOxnJ|XbmbVBqWx59bL)n9 zmoi~IYl5_QHKZ0EGX%IXY6ohxiFgbuu3eFMTpd!Ag~=Nm*p5z5QYtp*s$_@z*0uSK&kfTY8u@5{6f4{^ zu@JmpILwYrT39k~{*3=s>oGs^+rzR&2wd3-CfXp?X5xpM?=_ce$jf|Q2s`qDc~TZX zFYNRfw`2kT%oPp1z*(2I1QKX|$^A8z|H=1v7ILtUjUpI=;8G*%cd)6)@|()(EXr7> zXd2?NPQ?z$!mH_e;wu1t*V88xeKi7lzV5l1Zo?-I70!=6-wO|nan|*@Bmd1|_DA~x z@h9x$jUW0Zu>;}(JZ{)xFDF`^SVSSB*$($6ZOXO{3MP_~%kzUc@_5vR^(Ecp_i{E= zw-L;Py+jeC@juQm`b!_FyGt>?)GOm@g;_)FHh&rJ2&rgiCQkfDsO#B1QX*8fFP&H? z(!45Q8-tdb;5*>F*C(ES{;&|gE({rT>S0Lb?k3)IeLzLrNV|sTnVefg2GW@^eXQ8! z&T`Jp>rO>g@2Di((!aIl#HHuxH&F!K)BvP)#kzy0T~*KtnC;~5oZHRE>ngFy3--5P z@*qy9L%#vDXg)WdwXYR;dOQr+NmRoio!51p5hS_+^E%qex^2(FLrJxofMbE1Hs=EB zc>ck;zk?`|a4DpH0(?P5xnv1%oti}slo-~K#E50|Ax}3*2JVfv+fy=17+(08&oRB< zWfif?D07(Or1=1lGJJGH#l{6K&M$XR%C$nap@@gdT6k$4eG5VpjUDoPqt++txF)Sm z$%Ri0o~|1>SbzO`*?rqvMqKqU4VmkXcveGSq_J>gCfB4Wc-|gY9T5 zUX_x%JB9XHnb$_z$Q)g3O(Q5;^lZa&j_D%1oSh{~r^-Dzy^_D05*j;0Tsy0TGuiKZI5Ntniz?9vm29V$X}#WT zoM`)%R3ETQWu+S}esf?cakEXUGm(pl@x#n$ox5IQPjMdG$w@`((d=wJ(}hNT-rQHM z{f$Z5;M1z~V~yX#08}gQV*Z%&!CPc~^-{DGgR8F-rkCYl4V(bD6YEp`VL>fZhk@_D z5AOuzOK;*QlH|gfXf$y@P)qMo(t<2}%8m*xiX^ee`(z)s~yV?8$U9) zp@IwgFCKPS+rY;gQm&28#Rhw9n)IRz`$?{k+0}JL5Cz*ABwo}r$e*bni9K){t=^FQ zZSv@g4*)0QiSG-MeLtK5BD3f}li9m2@hphHfA zfy8Ctk8$$O?;+P~Z4ZvR3Ava@q6zUz`vJThw^aKzJ0YK#UU)O+u?~r5qF97JWzYR= zcr;CCLxp!^uao!S(4e#VKr-gKlHjk*lZ&eZMb9n|w~Ql_bhX9q_|L(Z1Bv^6%6Y_* zur2b@0;@rVT5Ko$hVo{77mmwLUhj_Ew12ZUk_Q#ESa)H=z6@SD!sYG!qkP@c^=x)1LlU z4qr(auMXo?0taXh2aW6U9PiUyz4WMvd8k1tSa4f}3W+%QMp;FPSNGHi_UqhxKKjP< z{osgS>*u*rYkPE3cB;}VmAH;cHp6J3)1K&9aty(i!&`kDDPk7h#l#}AO~`xq zgkoc-C*J$+SYp3{9yQtjLR=Z>%Sz`mX7}(g^4mhJA!z5~cYU3Smku5n!Jgr35nKioOJrf(qU=1SP5M*;S$(1)rjC)Ndo}a1z}D(|QW%k}|nd#$RHiO0HHnJ@N~; z&&2oW{3nON`2`OA#8!13n?a~}ze zb$VS|8G8zQgh>51+~ex08uQ@+S>e^cvV!On%U>2GD=G{IKzZdHKy=Bf02*A)r9DEC z``4_Ar}Y7_3qNfmOWV?ww(Q--$!f8$mk@7vyVGN2rn^U?IEEMv+gOGn?HNED$tzD8 zV27vR-#^xChr^!dT|`So%e;(VOCMJ)1kUiag$pPH>;a>1XC$^#&+o0Eb2B0RCxFEb ze$~wZOlkjgXI=j=xQT*Rc~a?NibXd2OPPJeaxUlh6dOI~;%{P!t1oDV|0*`eM=-$* zForKlA?2Ah%T`y^U@L~UdxIW=bKi3l?y1G=Mn(CZtP6%($p#H8j6;(A<;t(f=gM(4 zC`{zX2Hb<6r*`y40wtx_fpO~SB&avc-DTxiH3jr{OlbjOjXCbq=w8}ahHk~Ll9zLm zTEd=9e5VyL3>A8PZQ6lyg{%Ls*RRZ`+!q{*p_!U}48h=Sy^Cu~(Q9<9)IHJ0)jC*= z+Dy&8#>X$z#lG&8&TC~{EkO%-V;B z{SPvsgv(}rIPue2HApc=vEn+w?cysQTE^SIlpws2y6SG8CB&pV$Mj~ z`n^~xghp{v?8^u8b+J*QE*-IR#+fRnn}!TKMUN@`S^M`tGk#=u%oz2%mCZ-)e|j?T8a=7K8s_S!uaQG%kOB^G5Y%PObP$ zHFCFNa4bBV6Ye~KyoH8Fk%mLrDmt5 za(5B;q4e5-+>LD4{{a_aB zB`0wIy5wa(tJw@>`i~v{S-IY}nxYWurUWn~K(y+K9-5-WN|%@Q2J@u8KAJm-Jgh_% z-t=*_w@44Ar{7S;7^t6T<74aPfQxs>k)usik3ZHBgB@Hyyt*;;6pvFEpBt3DSpgC+ z7umYiy{Cn+nu?QFt6dZzA*or=TjYT6o!h|7LUWj*b zr0k{3HIN{eo9g$cK_%0Mi9J_xdJFO^-m?w-Tny10H-al03iAN1|uDP_^$tK(BI zzsTPctY=n`G`;*6CA3bYO|(uRZS#cDGbSF6F~_yHhjU-1{A0Rj17*+nZ(i0rt66Yr zxOR;TtTYIn^4{5@GyfA8Yw^Z4?_E!DJREa!Xu}V6(0_hLqfA#cfESdmh zlU8syGVrD*$!(Q>8=Oxu{B}c^;5RRLNiO7Cf0heiY(m zJK4fF%TB^~m#^rsKTtpkjhC<*jwgHWsLvXG4!mEX4njq_P45PJ^;8KFU1JLm{JQ71 z33NW2VZ#G>9oN9*P5W16<$XU6)NRR1j z+)AJ-&m(rx))KksuI9k(!4^Z+vm&=kkGVF9mWC4GtYC0RXOO$Wm8UCED(wv=(AOv8 zd8f1=x_?A%sX*-GW;(t-p*_7G9F$BNk~B|c?gY=2k<{d=WpPIzQp$gIkfzX6@~jMRiIRjBPe4W8h(ih zZC1ajj<8wTM#UR2Igz08T%~FNBzMFyq&7kOEBIk*rEu`IYcA3!QRPl9vP=67+@*GN z#i@y&Er%f>n~*PyP+B}3&>2&k@p76a7tY@2bdI^Hk@?*teD6)Q_wl>ub%;bT0O&BY zeWwNQ5jNH}_i`rWO)2C2kFYwH)(p;h#s!bH!D;vUa+#;E8!~_$dKYc)M+7|o>0E8# zRD#5sA{0I~6dm&_@ckf65E+u9D+7gykiq}rJUIR9GsS;6ACI2_mztgLd zC6y-)Q~oF81->il^|he(yBw5oQ}wy`eWzfGEE7IZysCSwTr3(-(dSdSz{Q+!PcAhf z?D>COUR}*>edjD%?CkTMsXu1ADMdS7oF7_F%;%DkP}Qk-xLIPssgq9)`QN6*-d~%# ze>+OSHz>Gpg66f1cu)L{qqQnJDS5_DdN_Kpl+ub=AUpE~ZDx6UwUT_-k*U&7*gBHp zn#r!gXGRrbGGOZL>OfpIAtj>E8eyY}y7fL{&9c=@lp}_u+tC$KU%K&gTS@H!e(Ny`btnNxChOMl`^&xlDDi z-hgonP;O#C31Hz0btlk)s@Ok+=Vb7rc=Ima4Y)euqPSz;(A_B2;-k7))uosg1 zSiY9M-SKrXQ-_ix;8jhO>I{ZPn-MlOEV|G>JmzTpC9$O6`PK28W+Gb&Dv8lJz6n>7 z^`LwY&Zmamr+s)$DwQH$CWCo{Y?EE`W?E5FkQ_M>cFpqnB6Z*5rt#ZXjmA7*I*0+v zf7&(b8Rxd%FYxk3!P%$9BCV;tN31SSyegf@2Q#cIObM`@z7a(KayF@_Rb+_9Z@)R) zlw)M7+t0@0v&!(m^FxH_UMSb=m1IJO?;i`KkkyH*RrC{^j6yIuC%CA%U->s|l{%?T zpOBJ*c(Q3yuTk(m1@=Rv>ayFhw@Qf5IB+;C3L4|BKR|mUDxWS43(5W>$Fw$VOvZEn z=<;fP%G~SuZaA1I5h;OZcD@h|h}5#USD*;TEqu2vTW71~8MzXst)g^j`H^hcE~9?{ z=D`BM7OeN_f1iEXccda7s|@}T#eTx8X98^kPH0@HNedUnhr~yYC@C+McROuzheaxs zP22vis!L%c&IXjs6a=)zrUunmwYD(}4p;eGUA`o?oKAlc`3LZKTzv}AW?GXU${8hw zA~x4_)$)oA?hX0=55N!6W_H~OWveivG+J8dA1|taOUhp{WnUmHgx5=d?rCNoPUu%F zdlB9e@$UWC{bhBbf)2=3F~a~r$e(5XweCj0!seqzl1+JQNa1aTzfWG-xGInGeu_E$ zks1wxk_2f8L9%19T6Jjq-mIwc`4MO#+ahzVTwlX=3)=9);-{@VNXmv!I=s_}74~?n zIq4uzjfm$TW>Gssww54z)MEGCZ7+U{B6XLEnzWiGe8L_jQ+ZB_#v0_~qRyDxVG!Mx zaz=AA7gAqf&KSbWGfedP$Mr|rZInBvG_%`OW*$cHQ@bTx`Dyk5*H3JSf}rG`h_%qy z!ko4D1*5$lAWP3pBf7uRq;G>HyZvD_SwfNX&wjxlDqcGKhD?GKd_C{CA1V^Vg0pFc zEu7;|iHXnmQVJ)9EaYcSfqAsWZ>M;(Gyyix%IW?vgUC1OHu53pbm>-$gWQxNHFfeS zqfjK~EoQnw2x_NFmvW+iXS!_AnFNl}6P)2v)?2pdgmH%UvTdII( zQL2QG{jW2X?<|?a2fcpWi@UQ|eE}x5X0My_#)c}7wj(d%Rt^#p+Q?75>OPBT$Z6L! z4q(j95^uHhw@$oB9tnv!`-mA#G?jc;ZC^Ztrh^&CnP<~v{wD5gcLdohH@&=%;1&g~ zFbTrf4I_dHDLNE2nqZR*HSaFTz-Hhf_ajfrtuTx;cxO9F`s3eM0R5j_6q39FjX`QW ztp4Yp246b72zI|;ro|K)uzpLuR#dP&p!a#pUCUONDLN-0TjigUROsYZ@`&cw)pz?B?CQ3Qx+xZMep$t~RSYUtW5Ywn3lfFshN$ zVere`S$>BGYcq3?80Dz-y!@3rBqr=1zuUlcE7(#RE{}Atw`;++r4Ed9N&;0QEAV1{6h4r#&XXj_*3mI%ao9SKKt-tX8G}$bwZ}cl? z^3xzrVHyQ(PwpV2Hm8ycv5h;i<>^v$9FTU3+PYdJC3&lxoe^Dn4}rHqq^mGHv>SGn z5#<*C$$RJNS)?d(sOMWioi#NT+<)S$0g#T_@k8@`R0>58MY|$C&){Bf^=GDN;i~PUj zJ!B})|GYimzEi!kCCIo^TqxUgC6gD_*x@1wu0?JKCWW$db%zGs?>rEm-$eU3pa9b; zpXDBL*cd@xU-;~8_tG-2DL6*I)=X9z2?oG}@CxtjD}ft*C9PAxl~LzWl*^OK_MViD z?zsTSCv=!!{`sQ2NAw?3(EQMoKb(N_E znv7r1+3GF#+wYgh+WaES;!tlB!}^Bd0o}4S{c{U_2E&2NmjbyVgX6!r!cuWorny-v2eeBny8?YBmlyLW0u;uTQA3o|_Tac>zT z(fa+!)!u+#od=8q;$}_P_5xlrX=sy<=21znv-#Z?L1mwY;ymiC#^Y53-WTdFE$+^A!WeQUd;prifWS1qCplCb{yVES3*=gHpU@nlk*^nljf*epfT zDn1o31%_EK_S5uOqJl$C+9Pt!IsC$WdlzcVQgYFYcxapRtA74_DyXXs7!rwkZ*H%x zI9I^iXuZT-W@7nGu}$xRqzuL7Gu`FC7a^OCg{|GV2OcZgq4&aAez9SO(B{O>9 z2Ez!h{q?4xuMOvd`SqC^D?Jv5=*O9W7g&Gd3|MJMN{dAW5c_!R?kTO7?{eJEGjKRo z)y#MO(wh<;K?kssXSY{U{Egr=`I!68@KmTAW zLb#GAsK^5)?83RHzUP8UB30>T&x+q)P&k6sPA2l&=%GQA3h(jH&aeFD>LX-;|N zsY+8_&{xD1*0(8f;N0fp?n9(dbUJtH7}!M`Y$)M|t9kf2iUFWiZvKy~Cx*DjhTJ-) zb!8aqS7F-tjQ>1EJdOJ+s!nDJ@_fPFQ%)SSnXJp|c70i^lk%~nZZxt7>2mJY)$B^J zIS{cwMZFgO=x}_ytXjtp7urePQS9 z)M{oChwS^=gAml0ClL>gt|=kvQi>{Z%#0ZJ9PUnl@`*xK2Rz^{LdxR>=$t`Gf!Bi#7JiA8{<(#WC=pgUyFMu*XUv?4@Y zi4~lsNg4gYU1GV!BqkNWgdxz)eMx2jT_t%HlFhc`w!D*+WIDiva*lb3Ej1MImkgt7 z#iXuv)Uk#)C;fi+5Z|za8!1<5;kE5MXjdZdRy17^VEEOS=*jh>#?R)i#cSIw8Y|j)@)CTW z4FrFl9JS?B(xRN$X|N*LE)lmtRqKq0TFEeMl$TXwCK{E*=GBf|IoTN|IxFeVzns31Vkk%|ra8jQ|Dk)HTXYnEY`$vs z)! z@nw587z1kWi^s7`a&r6srTB8-`#agty(i01!Inl_ABxiHsbz9Md)r6+3xy?CiehS= zLc}Ix{3~5(%?H$NFd9^ipBMTSdRYyZB%%$B<4o;mczGg}aWl<12kKj?9)(`Ks0eNM zehUx*7Z{^{eyMkLYO6F+N_}I{a*>7h4;D|p+l}!* zO=s{&_oR)>5MbonJCobrjNwl=#6^{W4bmX3Q-)>=l5Dy-c7X~BZ1)J{n@8SURac79 z+(EpXr|-^WZH^4324CO_jCA|=g6HI61IBfAo6dobE}2C}VaJQF<7?YSFRK`ddYVYEmT;-uOx$$ z#UtX>eEYTHxzAVv(DETL8kJMcTD3N1b0%fe01c;SJXZG~Al0f-BqrC56^z9P0JXme z{k3M0f497)AYupt0tFaw4LA8gLlNa!!b@I2-N{QBEVHOZLhalmP-)cRUVYBVI#%O!n>;^HK8 zfJxm`pyb(g(Z1-(llZL2S9PUj+qZFRPE_O#f5DbD@!nri?E=8hQ)cvqldR;^N}v5q zOTD&y1AViu(QS_FoZ;I`;dD|963gKkC_rIt(*6uWT5QkGiMiNhChS$-tGo+J;D&Km zO0>;!N8p|I0q~UnI0#(NG2Xmc+Gg`b@xt7iz+`%Tw$3P-&-!C}Y!F(L^9K-B6TYXz z2&9Z0G*UQ#@W!7ryUe%fUpy@{nks{T)2klAEGs1|Tw61sM&FFCDY(17ld`EUGZk!p zSEQ!6Cl`b-4al8}`+dSiDqVq`2GaDbsZs%^T;5KM>m#plQn1C!Z53`Rb3W`4%oL0{ z>HuL+ZwdV+^dOg&-UJhVYs5`J|_MsHVR^XEXiUy*!29Da&u&!MWji1HZnVd# zBz`~B_)RRFqH+HG<G*S{MEE z^oONq6C0;J~aDrY_-7Rsgyjx`QF?$YYBro$+kI z$6)FZ+c$S%gG(RekF8ID8G~lVdKa9tC82E-)Qdt1r~y*-hwjW#$Y ziB$Rifx`0-7;t$%R;84vub9G*#XToJ6<~X6$ z+wJE#YBqCGR?YAd!LnvYxVkr8+Nm+sd^-^@me6s($2ex*=&{?aSWp}J^zy?$9TWVY zE6y-m=A3`mk4?2LdPzhXri$%faFfaml#s`duqK9^1CE!c10H=z8xV@lkM zbvQMc2R~~r2W}nH>AqJ`4+m7KL1|1b{EMxt#YP7P{a40SA?w&LHR{eaAkb2$lps*m zby|rMD1n6h5LbzI&PkZBs!GxofLBbthMk{gDcDRVwje&G^fq|mP|=GvMFE(_@4?i& zm-2K8A`!x~tqV3QFM&J!`sbzqeV;RmE}x30n^NB6!P%R(FS0(b)V?39$Hz~%%-CEF zL_Y^(x0GM+i%Uovw?aJT$%qxNTIC+>3Yf|Mo?Jgw71eVUCJsBJ6dfnSbx$c@&?q|d z8(m4>G$Je6b%vq6@4k#%`+NFH`V!~0{nNP<+YX<+XRb2rgJtA_si`=c#Xmv8qamS0 ztYh4i4~(VOh1$gZAPCGZdg*M{$*0AhAn~!!+E1`BT$Ji?R-yfs#O?&Cl9MKLe~_f^ zT$)$oevKvy;P*AMYJPlgr1>@&>m@QIj zV%g~?LgaW@Kg#i4D)2bVz~|F$+iz|L^R4zOwBXM>TG_jhu=@5`#(&!cxJ8eNYioHK zTIUPhnF~3jSLd1^{zK;QnCzR!<1NKYE$v<*ZOk(|Xt#=T`oWmUeTctQHWYtD;9sok zp7yEXO1LkeHLr+G%j>;ZUf)tanCn)a6x>ic{f?_3;}@8QiuR^UH$i1O>vkpCk5f64bY^UH8qozL&|8C zQNoLvWT3%S#nX97&K=~uea`<0t}m>q)!EA;=)#IvLDS-W9gYH9?AxEEO@duSbQl(g z^%GQ6tNfXIDa+bp#EHENu-dlME%V8og4eJ^W3Lpy47s&{Gx8A5TA&S)xi*NS=h}~Z z>p)?`DDNU3L$5w1&HolpsK}dIj~bp9TvBgV-Gz4zPF<_=1od@V4YGQnNt>`fx>Q zA>Qs7osxhe?DQ>rGmGHgl~bQ`#2%Xfv+;8Q>KQ_j=l`@E^HGZYKY{iC)p8^^Kli|} zc#^?ld+B&l{{ER>FRzJ`$Bbc4vK$9Q26I~`-9ZHn27{G&}(@vZTa zI^CxzKCV0bxIeL8Y3s2rKfRe*dJt^u6F24mwo(>fszo0}qE-M4ZWW zd;FQfj@WXgc%UzC!tow1D*5K7Joa%4$?Tc0S5CFJrupo-4!X12x{_cbt zYe=oU1V!u7<^Fn)w(N>X3@54-OYV|dID^(UYwsTvBv7vvrCMMxlLHf#=b8u(^fp`} zi!DI%+Hvsd11jdc>W0b5LY0E=76UQR?%Vcy6Das6AKOufEXU`gWi0vmN0b3+5Dp!5 zUD@_r+e@xlbNd00^+&TeY#F?MD%La+|AsOMZe3O2qMTeyN7%?UJz2$=FUHvYbOt?> zk|>=|IX7mi+&cIOZt;XCi!F+CIXixPs_~rUZcy8(5`YrgD7EaY|2oi>ZFXJ`$h*61)+i>II58FT)b6PsLGyA~T zY%u9L3HJO7k6z0Nd07W`c1vRNm4V=qX;tV08|OquB^v$b(o8!m;B(TztLhVxsUH=?fL_hTzBasp=%-T2Ik4c8O+eNQ2E0Z$V9q=`md#* zq3OlkdcL)kLxp#tkgIVy=+b31j?p(0sK|1x#sa@*7h&!PoOF96jgfES#*9uKPZZ>h8ya{Z{If#orOH#6PL zn0vH^Sn-qjWExw;dn4}uqvmf~to#nK4Vp|XhzaPOkxaX;d*kdm(K z2f>FO1OMQh{X;q>Sg7%KZ@2oxU4CJ8zAVKXy5}*yy{LG0dOyYC=ldf;t>@iPJN5W= z)~7M7ur#R7xop!r?sO7At!6G>DsjD7?zFE`OEBp8Q@Ss3Ja#bTKbdudZY}SG z9H=Y$ktTMpHYkEYkZk9Y?w+fSBSjtrxwzO^)iuknE=o{~^Hj>#4~(fTzJl{8hf8tO z{({Gp&bpe}$(!gv*~mz8n+V6JxCs^y^fm}kWCxVl z9Rqz<{OE_8<6YX;A^;w5tjLXBSQxcDm|hn|)Rrqux7vp3!#<)p;3-S_d&f}57598h z*ECVvPvYI2+#MMfVyjza2veEDUUzi@JTfJ*-rej>o8G&~mGm#Jj#rq>p`5B8I*y9H zMSXR_adKGN@40uLzpME?EVX}JSn?Kn>(E!vv~O&VrBB#DT=fR%K3B>1MEc&yvGnhv zFD$N98#;;d|El@t)B}%$wlu{su{4#Zyd{Ijla5s1R`T!@cL06gez=wb6NqrC{EuQpaYGzm!9uNJiM(>#?C^G_mw})6>^W4Qy}T zR{kADmdbU1eGsVCex@rIiV0TLLPnDW#jXN$BwlZ*O_yY@(A|}gaho^+^!~om%e3uyYz$;19n^i-(+AjE&tTW)zu3s-dZOG zNMER+xbm|Qg|{4XlLiUMr0Mh2jf=avUu_>Xaksog%rU;d{zc;3QQoI<(zUi#Ta_3l z#~_brOc2aA5tXMGt>r8k1n_|ITOWQA+2w=RIeWMEd764Ox|MjFb(hloKKKh#3;t08 z=BXHkuTystp}kLh&Q1NB7@q~7*kJN#^o1(rAgb7BZ7lq|Cy#n#>BYC>)}OJKoIazr zWbCT4hBcZbm~r?p16$UiO-mbmobg2C>A)tdp@S;|ZoAtUeOg8YIU}_Rued#i{C%LE zMn2?;yNWM2NO|R5xO%9dBk)Z-Y&-CO1MBlM)XhQa)tBi`Zu&5HF3=daKDhTV%xmmj zGg+~P(9U!f5ynZMYH{XKyybIVcUQCrS0@UZe%m?mg>&O24*!b5HOyRw`Vo9(8%yq$ zhs_775S+go(>)lJIhN5WC1-1Y>pT1!CJ0QRh)^A4)EcMSr{r#f8Uq+DaWb~^esS*( z8sDB7SA$=RdEp&@x+<}%IA@{42~olXU5yJ+;wC_42rXq4fj7M#tPW z@~dcI+3(N4Fuy@jg9R5bps{bo2JJ_%2+Sg zG2`eQv*BG?WWQmm#~g92%CqOuf4IC)Q@h>Wo3I(-GQP+9m{p^YeMnBu%A`}_t{j^$ z4iWWF%ZD!JsJNXV5aR=~g(sD1?`vAte^^HdQb&CZD2Fp)yzuTf)_-Lev7);&lhTaGR(Hu2Cub>KA|K!{s4y3gmP~#vjPS;aZeGfv|uTQPK{6kR!PE1HeyZk#m0(y0ojeBJS{FcAW zyp?{y8&{OPu=N=_3_vD75^jUzhD7Ek_vLzXl%CtZT2Ms`q}61 zioz>wBp5h8nSG_``pMq!=#Jlc480})AEmtcE}z{0`4_^7=b{Tw%>Z)?cql>4l44{|)6Z?w~o{JA;Z<`ZZ|{gcHN(xZ**r`n%A z4417p9IOP#D`-e5{Zd=~WW#rFKIfjvOiol1ynrN_AgV{>79BXLKY$_h* zjOVt;c1ZkROuc6~n{WKMZPct$RjU*&MN4h9SFN^YQG08}-g_iQQMF5Lq7<#Is4Yfq zN$gd7#oi<#vJk{3<1 z!a6rz1V()Fm@x_X?XPP`7sibZ;4J11H*vY07 z#NV7;$hx~7S68K5<{SM!AKl7i2b{cpT(Z{>Aq^u|7z^B0GU-CoO74S|5)rvVaxS(`QhWYkb)ugwUq0KET*Ri$p_>E(W zGcz}kdTN>kB#eUccuT0GDI?55qdU7tJx`YXfa1|uf!M?EZ@=_a1e3@mV-aE$6qFx~ zo({#b7~6B85B(4~1-s@hQ=ebE$}|hy_36>B7lAFWgAqN8yOgh1e9Oz_|1s(P+N0oO zv3$w+B0i`YH(vZy^tv@q+b&^w1vB|)X9$E#;o@iOG(M@cfA6CGWOusyMu%b;{d~bH~x=%xe{;EeP!Ib=fdW zD_VZv2VRJ3%+W+*VY9d^EYl)9PrP4rQyw*?I`FbNAeE4$-f(*X4caD4v+A$R=H<9S zEK>hIS@U5ewZe(^CWZulN1($tV{2RhzegQ72nl60m@0ANR?l0m<=P8FgWc~m-NZ+) zLTm355IPNVv%1lpBxe>|x30$jiOM-Df#irP;=FK)v~}rhsI85)oX1tP70YVw#>vbE zN%U-858;a4RKc(dRWGg(IoAL5^*53%FHIMAzAoclWg|CHs_N01u@(I6h3IO&=G}ck zqe`l7`0Qpov!2@BAEp)d+s?abs)7jK<1y~jKBnd>m4Dpq=VY-kl*qq_u(Ghua73Q= z5dXo@Bd&smz&>MOOj$q#;o6Q#)u8` z6ZA1Uj52`(m5a+i$Nu^eS?qaB+uH)hG{O#jZl%>pW;}|Xy0_T-FJ=oY5j+@}<`TmHiw`S^fhVd$k!%Be-~-vyow zjm>*4#ZDfKO(hROBFXGI%ReH7cFrI>PE#wSs>*=IfaH{xE z@e5Ny8nQqN!mz-y9p?bO^GYD*ogKcll_MA?1N{<3@+Rm7t9{W(fVZOJRJ{jJrLR2E z_@ z`fqm9f2_UcG;}uy%=T%-sz>o%2jrmSf^wdZ%@)(i8sg`=?YagB-+8k|@A#ASk1H5* zp@6L7G48e10H0nHf4;utr8m#>I45{9gDLr9)2J=?bIpt35BwMrYYYrs9^l~Fq;Yf% zhBIov4>aXDT5MN@vqZZOHYpX@W5-v|n3;|$dGUm|yqzphO93+c%X?3Y5lVA@f7211 zQtBiDQu;-m2PgojxdhpJ*Ijt`$V2Rnx(Sv+WMBI|U!(P^Our&Tk9h(EUENtVnLm>j zq;j@lM5j0RD$KQ0#<1D2!34RJQ;y=m&nM(5hh;9I4XAD?S^OX4r%9lV5J-Pyy)yda zeW>3z7HTx52>znm0^dfc|5T~Th2dhfsmenq6sEg+nzs?C5x;d|p{xg)9DRN3tZPIL zMSWCwAU8H*TPO(@E9oQYXU_){@3EL`$l=pzk2`2qVs>41^^G4V!m(trn(WWS=VA!3?`Ei_b)TM-%eQy~J5+F z#CFW#f$D%|%Q-aRADlL^TZrGE1r)VxaLj8dbmA|0|4nBaN_a(XCA%BwJu^lNjPZ44 zw-*p#Q+oFwjn3~AE{X;1Wa3>phHstqI8<2Vyt=n3kmcpqUe2>EDrO%baadAx2-$#j z%B*$K(BoP^Q|WN0Bey?@)bo11s}Jq}1KJ-otj^nW;}dWIRPX1XH+Q-ANx5s`)A&l^ zHAEdXpnhnB44{J}0$eq4lDFH}SJxuo=SXlYWYZ>%E^#*p`?%1ZSa{;g(f1o}qqf+Y z{COUs{&6Gf_K%MU@iQhu#C%_q!2P-W(D}c1$(8`bD?vvA5yH*ZWe(nlf zU~9|vh*9~f+2;9_J_t;RToI%cu?d#ofZ8pzVIzDyOoXE8GFy40$Za)i4s{)%SWE=*UfKBx#0+{km`;w6vQgr*;x`s(5o(O6@k zva&S`bQphH12!Q_Qp543b)+!E^Gpb+9c;&4>^0*@6&BlVj(TJqqz}%<-9Fr0`YK?f zpz+3dd$=lx=|5ec3(&eOL9?(g;>$e=YinE(OOz>0H^S%;gzRAnQk-E@zc2l7L%cVs z-lO1D?bH4Uy>Djl<-t($)&b&@sTx9nmQ;?+dqmM)EBKs2m>$InVntA!K0CRPe>90py{sv4i~^4~uU3@c-&E)AHs_RR>E5+v1br zTSnht?;Q87{|a2#F1u9bQ!tfd$oEvKAn_Pk+GsQ6Z?hI3DR8-4ry>5Qp9em}8SdV3 zq#_1W>`qh=rHYL^VwqBRUHUJ+-E%-3lUXG-)d;HS2^FzaU|fDBO5C4+pZ33 zL7Dl!HTeJBc5ZHbGLyFD4Bd=1k<`fgyKqXj7rXDCFHxG>K2W$xpfMqL4zrB4*4;47 zgZRXy0W5ppO5Z11wbp$LJ*G>*{;%0S_V^?>VW%TB6FK!|+>?f&K#%s6EJ(3o`0JW;reC(+$uTyWIu43%h zojo}2MyB_oV12_;;Y{~~h2+yu*3LFE2c!x1H~dXt=j2%&N=FYyhjvr~4q`8O?j39o zP}bit@w#M#tNI|Oh66M+)i;IbJ=@XiYqZ^gOv_%oGhbFy+X0VrJa_WN)YMTeDof6d zK8$mFl*maC(RT8j2#SF&T((mk^KV&tT`UF(=`4pSw)*A;tU8aYi`5eUafTTf+?Hjg z&1Uz(lwPNE(Len!9>0m?l(R4IZ0w@e(pE$q5Z^_zD}8g%h)wF6p7d3y!E+JxT)jb* zVCfZ+A9Nf=kEve9Ot;w}Fv0*`#$*+#Y+hHaagIr4)AdQ-aX<0I9TwO|$v^xC=B#7h zRJP_8(+7$d!bT9U3Dc60=uD1l>eI7q-j5O@50e9`MrL;Oq8dj`r{Mo7?e?qUrYEC4 z|0(U)r`QU75ZTlJRob6qcZo>_&vLwU!&85-b!*pltRpVPiOgPmzU~&x-KIYJto;#p z-(}I^)p6#x*2Y%pr;DYqm}m+z>&V1jacjf+#A23F%vy|(*_GC#c5>NfFlNo?`^{Fe zFdC7HC4Qwpy(c=bZN=n++7AC5hkjRT5B7b?!k2j-HP+{Up+bX#sbsVL7nTV{729^4 zRD6l_bBgU1FfOp({)2|m5-PMVh09y&4v`PGmi}K%KbL=<$nP*?a8dBftQf%ya*1nD zDClGdm0p;?l2Km@7t^0QUK)vF!c5cVV`aM@y%_nv^P4TwvUB@#zJAG(JLk!UvW2VQe&|nAa5cs*zM&V9*+lDFs)?YvN!$DGcrclfVBw{hMizhiyi?O5}p!5Lg zUTiN={w$4|*j!7>Rid?eW>P|xHRtr?v6r@bEy{Q#Z#2M>ojQ`K=(u$aHsFHcwE*4B zaExjItE>sV{}pJBR!yJwuDJVX0~-XkhvtQFz^rY}I&{=9T1N3}&ge?fe0sbz;65~0 zK494u!Isj)M1KP~?8$^YUgkFm<8&Hr5qS!lb)T*O57cwGa{EFuFB>p(c=6FeQ*Hv_ zv#xD|;Tx5YaOO>!R%Q`i>UqitRJPPstME1(Tvn7ukIk z$8)u{TKD>iXz}C+yTqS0`8-dKLt=SuNBtN(1aIuhdb1D=E1U3;6EEOQPwrh;c@BKT z#JNTDlqh$_wxFg9qKxWliBG%w7#aJ2^!Bk)paQx+j{vY)fB!b5qdpn@)lZ=x;Vl0C z_1FD^74bAtbO`B0>^{xY7p-CNwr3Kb?!K+}`sJX2->1o6eAEP@WEQ@lA#=kDrRbFT z|K(-<5|je$D&cNeWQpKX&j&f-njJ{cKGHdgE_Xrn}iZMLVQmJQMRzh zmy-wgtv>Ae+_`tldE|1+W_qR3(}OBC?4oH$y}@*o0gHTNmOj(jU@UYIFVciNIf8fJ;oBSCds%}_A zBisNjbsBl+E-@A7Lr-t<{N0N6hhSOB`8Uf_`EYQoV#ndo|KNF9A-DSI=r#_R$am(( zz&p#BX9KrV`#~4=Nr$@r>=|#^OvKxv`~Q^EGY2}h6DRt@_DW^Jl?f*oQ)(LBtyh*f zkC>VBB|g5}nv8JfOka(8=n-;7N26=gN3i(eIhG)NJB7%n0A)a~k>(7UgG#D%2E!D8 zw~&MQ{6T1(+y9~S{L9%9Imb7*!+UZdMk5?el&nxB0X8O#Z22fM4pLP*Px(l=9?pXpO8WW{YTHZ#%Hw$p^q>)_@7SmDjx{pccOaCS2zH~YFYB1VX zHTSlA!qE;loXk5s4h84@A3i@dA0P5^8%6MiivSh$p$=dJ()!axP*XGGQ0Su7XLy+m zX3}kY(=9S9bi+F*U%(soW2#BhOHU8i{ zwp#SjYTZs zbu9;1h+OsNdE%l7)LEf}QaYaFI+$z7)O zBX&SYi1cyV5U~kRqe@wr=B<)-5KwEHWS~pmr~e|YZ(Lk{;V?EOlh%2L1)9mY4AZ?u z^@+q+Ed*1I-hbC=@%3anPK%{X7DwY*itYK5=O5)1ilVB*B<7zvztaLj0m36ylI#TY zXS;+p-mQ>vJZ~rn#BF!gZ3;tI`t8m3vf8G}v>oERTE#>=XEO|Sfk6n&JX|d{H*z0A z5cQZs5e5N-?#?Tbbfa@R`%wl}kXjc2eBAio9Qh8-Y;V-SO5(G0F;L>I(jFNGr;R-= z20%Z(OMaMQ!z`l~4uj60ntYTBsNpI8;OQ;A>M3Ew{L+s3#GlA2Lu2@6;H=|2eh$M|&nbQW`M-o*?UKX7$= zn6Ug)E8`8WHw?uTze<+>iuUs_zjlX(*dz6`n_AvNnXyf{9J*=uZad)H!+mx25*ilo z(o+!YuVXLT5bO(|7eSF4=xfC?+3})qO-T z+*?*#c(neZJF4_q-t!hdS2l#fev~yi>jlfj>T&??7SE?OW*(e2?G!AkhrhkE+Ct^G zJCkoked8~e-n()XELv2&`bP@mt1RJyj4q1iG$7l_$r0pVXHCx`Z4H3+5M?Uv+!=fH zGDJtm#o?jBjj>kmyp@lska9A}xrG;mc2i1J+z_wX&5pZbh3NmU%dRU#g>wuiUdg=c z{D`Zu}Tq)B2ZjHE7vwQq&!Dd2@-jVt~l#RwF z_~FQ;O{c~-A%Tn-phvakyAb}C&R2Q_dj|z$d*403Aq(|ouO?RKL2rTWM>OB=Ut1Ny z*H1)6suTCYK10%Ft{>^&0j4a{lWM86n0P;I6%tU(z{%UpuzotdUEC z{A5@v5G*Gn(7e7tfGx*8fQplOPM$uLw3}O{+nAHZX0EO@!j+PYA9WQ@s!^RQx*I=w zQBE-(Z6|BBCys=_oqNXUsD-Ex6TY{yt5W2Xc3K$vhM`#z-i=hl(Cj6`(2)nEVtR{F1U}*fel56Y@0qQ|zop8reJbfZX@A zzs>*Pi!(5MOExo$@wUmG>EsWu$X)lyNNwhJMgC-r=Ea!Y`}gGEWwM=>{2i5KHlZ^9 zvZFW;_Xe6IzkW5DQtlLJ9FL=KnR@35g%j|W&VN|!v}v>?ye|J72#cmEr~;Ti8tZzd zVEo?j%>eRfM0nTW27y1Q0b#ckvHRAbOD6pKtTnzKbh=-IljGO}T_W%UHISKWA&D|1 z)9nX=*m(}T78rEB4-TBdfIoCzBAan|R4fF4iNtR}@Yf-U1p%Q55S|u~Ehf8yV2}{> zr6m6H)|kk}Ux7i`ssa4PzJ~-p7hn2re-vLl0HL4B#P3i;rA%PsX$2evw~9dII@`iI zfiyk-^nQX;Q22fT6{$?UMrNOz8RzFIY#2fP$im=8I{(G)@@HS6gn+SKt+NpC1or%c zE|SV4(Yt49yO(64KR)c>H@YdUai8B$VZN49^xa4}5Iszmn%tDa9qs-}6ji_+pj5hg zVnKhEy}unlUEy_JQ~W)ftonx*KgrI1Y3tswZ;d^7J;Or&&0G8EQjr^7s!D4 zHy~t0DN$RQ15Nw)-p}Qwj=ai;u;%%h&D%UUw^JRUoM|w*w766^Xv`$)U9m{x{=k)+ zYYpqJ&kPKe(}e7{Kloqt)^pU!K!S=%FwK)>IIXzW0m6fMrV=-ybjsa z2HbbZ9}^D^y(aDFPPw7lHw}T{k7ZR>@3dH5Nw2gat@cgf!7~fTH+Zm5=b9nrZ=?Ez za%ENYOhJaqQ3%R=-ImrwM`oQcCt3bNwOxd5(t`$L{uj?dwnBw&;sLS+>|g`_5#gv~ zJa|IW)8q?~Q%P(6wxiu#KcAbZsO&2Zk;>~c!*%{p!*cYZ{go1+W zhf6QJ-AAL7&yoHEDN^=UlDtJEYihc}bM5|f>;?GJn!u^8vVMwD20|klKq}dH?c&a{ zRbGBkfQ~#@!cJjbgU5lSm=Bm;y8X5DZ(7d7{AsrH9JnGuB1OR?RQm-YL(W2k1dpRA_r^X!z^2>L?zy6k8zi>2gsE zjJ#lx-QrV?sLil+S@`I=vY3^0Mch87Wjs%9&LXRN;;rshK$;Qkag|;1%-bOYyWSVh z9Inlip|8G5`Ut1j&eNmkK8K6&TGkSC`CHm;91!&{o}T{g8hn@D4kae@FqnQ^{`iU$ z<1-!?dqchcw^z42x9`^i_zo-n>ICWq_YVif-%d%5oJL48K#lhjqi=+k(Lsu@(>^H} zGnk3G@}yuJn?JpRv7gJfql2IKa{7DbNNZu$~QPYH(_EiyVRP}0=si*abs6xKfhwNTCB79=06XAet8?%RYq_PsaLAMK!R zhI=oJ16}~we6G9~L<)4}ua!q4$QIMUEOVVn9%e{4GIz+i=EyfN`p&Z6NvU`5(~&Jn zpli!|X|hlg2z|F*Q^xeD4y~|eb_HeqsnkAjwb!S~-7M5Mkatc!c-cJAGy>-x3usKS ze`L9Stp8=DarYtHekq$!E^ERALtO)VezQ>N(pdLF9Xo;=b) z=SNX(wG4{7K{&)L2NN*~CwbL`Yek>w%bUgP>;x~m-YpWvkXwoK1!MhQX_hBfj1>Nj z%Ao#KkK~n)Vme{WG*rF)(R!bnN+_38e5np26NpJBAeQP|V72o%2Sy%Lvou2i(Dk0D zi>X!P-$YCL zpmGheeDg$JQNS_dY2NVpdAUBt7pt236`sXV&rS3w*T0ZDy6wO3V?L_K3oj;w%G9KE z+jl;h{2;N(Re8>!c17%rJE^p)A4*RCa14*AthBQ$;%?{`9gd5mcsUdsP-#SAoOjy% zam~YvUjRkK=ghE^l<>h!A}TU8in&PTS%KFEHv~U3ahT1jmo6IdW!SjN!j{?y6xhoY z{GNdPD+t=R6a)0U!JYIPoEf7L^;sMO)o2EFz`FLjMZ0O9&xHL+|LM;yz0lt`xf{8U zvSdB~F?m=i1XrzSp<;SKq&K3=j^1$k{g7$?m9q_!ux+!dGV@@dR{yz}38Gno@%9Vc zeYtY(v$!4nw#JN9S1t%w>?jAp?#U~Dea+cNH<*QOP2RtgnS7b;g(6+W;M!~!Co`;O8~5Osf*Rpj z->k93W@KG_^#IoDxAjHZ$+V03uB~of1tJgT7$ZXIh!D)fr#k--)2OKb~d3LN1v}zSB*9?>rFKSoga4dMn&B zXI=lU{ zJojvEen$Hfb1TlkXo(-OUdmd;rGs5Nvd(gS0?==)8y^Zh|N1BlvXL=RaP?!$t4uK{ z=yrQ`fVDqt1PR^EeKaTe2sM#!Z6eru-7ykv-0p_%dVk!7thKniep-39jNFiP4F+ys z5TO)GgWAJA4)OrX)^k|D$ql-5h*9ZK>m&buK9GMH_wKzP3eKV67M2E zH&hW!%vW}yF3A{~#_Ag=KC41KB%y`T(r|I=Z<6&WO7eeNx)mDbIW0X-)*bCcM;$Pd z7E5652iA^4$uQ3X3CI{))XII4J3Hu&AT%|Lk@eoaI672%W8h2AP}3R`msT_JYBoKqq(Nk>A9IO))U zCdBOpgx)%@T}o1X0oon0?g6KGSRZxqCVC8n@+!DzLP5?vyylfGlHJBf*T2pBfwv3k zbP^(g`Ube!!1y1`akyZ9#ylbuBwOBJ9_ct~lEFcbtZrr>T3dJ?6CcHU1j;e5P$$!3 zsE-wpGIJrL7+|u>$x|q@4QVUEQ4V^G8Sz!esA6tvFb1LRVQ6NYzc&-EC0<47bKdDV zT<8(=nKo@9I8lxzPa31=D+lIH>T0j3cn7^(Aw6=)l%2GrLC-kO(CD2ZxJ)~q-|{7{ zp9_8R>sLoK14d`fw>w}_sgj&8v!aET_Y?rXVuig+jqO>X-xh(;~odn zWNSjG?8!LCgV#otzK>h43$vjgx&^;>pRe|6gg(u+G|S`+KZ{X0&nxaTOlqM6rWJ-C zo=>(ui9a#4^yz6E9!q@)@!oJO=u1r|Uu~v5e4ubPnOvu)7)5V0*gX6VBf>h$v380Q zD`?FNSTM-Y00(zkWksU!!?9$E6|U)U%6c} zUqPK-`YsNPe*^nuTrJ8l>tA_SMLf1S*zXL!v1K2Ug`c`Ha$3rXT)z03z0|(BKKEVn zo@;waoKUuVQ3AJ}W_;R9JJvKoa)MhX$(dLN@cEQhZz>&Sx;ZoDtgavVq|3#^AmVf` z*U&@Xlx)$kqmY+XK=Ld;#G5SV>u%1*5^K)tJA7r?p(oTujea;feNVBrC>`yIoB5#B7g^Wx zZ>s541C`bmQ9}|tsJ;d1J&M(_NRruX-r9*uw$_olNZ+&*1Jy^D-x{vdlx4Ov$&oo~nhqex?0^X4mOFXEn1{+anq$uV}-thGWf=Y_v~8{Nhj zFSCm94`~v+G8sP4>1^jaCqqRxbcWM!?f74Grsl0!tbTaLdPjUZ|9CYqjAoi__~c^V z^X(EXKC}mJFL!u=5YUx6XF(xSZm6q0!Wlr-lYUKBiIU{)Z$h^X0@Iq=#1sX4R|NcD zeQ%R!igjDpTUs*bycSLzMEhA!t~}AhPl?s7{U%N5bPti+a7{5oe8D9By3>#$;INv7 z%$>8`ruEp&E^nD*b;pFJ8yKA3bTT7rL`G{Q88APy(oVSSlLp8@zM~WGZL2qQG45fCZL8l%S(TDHQYdHy z9$9WM_iT%K(R4VMcipy1=gux-LvPMD>pL0nm0k~0L2FT97u4n@hoxZ05m%N_zwwqu z<*_deLB}VI5S6r9iqA7O9Uv^%KJp6C5*N$NDa??yS|ix$nh=aY*COfh`{sT*BYNUQw&}b52I@!SA%_8lz#pSd z8j8$d`d-OT?0z%e&lOP8IUb+9Mwr~qmi^{AUU)z*keQk_ke^Qz0SqO-Ubg=6S8|Kw z%^D^CmkyjE*r|VH6ES*Jzg&uCMaQWyKH@ZH%og(yvt(8%oe)BNMpCw(j38p)zx8XD zwzN61v>uMv}1M(l#!Noq~@?$psO^EV+#D%uE75%<5G7U^mz1LTx_g2~N=dhOy=+G}Rwfd@pl( z!9G1;QCaL8!b4z%QCJ&GJB>NJhy4*TnY5le5_O8Mhs+%luOqWCYkl}f_|Z!;)T>w_ zqx_V+9n%U`rb^cvUk50**`}(;$$R%?z<0G>1RDpCs$1H?iIy;!hlpRb?}yQ#AlJ=j zVClCIyJ3T#-d7b)7&ZyQ+f!!h-y~{(Aex0+BcmQa8(eF>?$k2M`3;ln%0;s}mtM0C zc9{$71%(HSc{?6IH>heD2YiC^m$5NeYqOHQzL_GwZ|nNZ9hkLhEzjL2eK(7PyI9S< zom*+yxje_dsak(hVy5Rz76?Z#B^%DX|6_zuJgqgzKq#D%jEKR5*cIxoFY!N~!Pmaz z3R()b?uCuSDq=ggif2P@T4=x(KW7{rtSdr~_g}(U_A5V$lpm-5Dl={62xLBSSxooI z{5)!z`SVvMz2QUS9U~{Fv+Sq=?Rko(X=5;ilCQU#!IjN=N&SQNQ5=0~L=C`+mGH>A zp!TmT5mZQ^K04RoY2C`a)2sH-kMD>R63t*5YiXB&N66Z(gAI<7{O+Q4a`!RcwxU`Xvj;QN8ubRe23H>q+E`7&G1eW0?^Q;SKT4r*p(Xb*a; z9{}!>Ow-t-j>cc~2|3|{sjs$sQEWpTz)i?5{Yk*ux$6YaVS6R55<)1=#dOy&DyGk5 z0I-BuCw>sMe~0~c$dZwjnv!a6CGB5xD^1~ z{`Gb`w_H1z-<<}54@C-Kv^Q$NNG!@ZCk=3Vd!$d=to85qV*?L!1>dcZjAVPvRB)5xxBX>8 z`p!tWv(mM2>OVE|M224c+G+QId}z=hMY1V-&>0#v??juYax{L~*ksk%epH12-F~@I z(~G|!6!IL?Bt%*7U=4Yvt@AmOm)+a^--LRTnb(Mr4uFH@ZjpqTT$%ta%P+~d)rZ?mhK+7~k zF}>o?sxXSFw8fg~dl|*z{0eFn_p(P2DBH(Ki^3vL3a~lDe38Lpk_g#Yb{z>qvYE^N zca7264s{wp=x!9&Jnu!~3N`L9Ik5W-GZ0#XBkR=hxjk#Lm{-BGuPp;W)t{@asqU{5QAW+0f;cT(unk4ZT??7K1)X;^WR91@ z3XzIJ6rTkmp3k zzw1kL|E4IE)qpT%ySJtH@gl?*w=P23xm)_#`TB#du){)<+|JxqYMDj2WZK<~*aF{z z&N%PLMJ`RC$Jnvuh4m%e9Ao|P8I;SprC#JWqNPg-WpyC0bRZJDmsou5q{|w8%0Xlk zHHoc24a0|+1z)=gAe@|xRYgf=hjMSg4yN)oFMWVjgj0M4)rld=KMbDd*xmjM`mInj z2y&jx7?OTJOZi*=~ zM6_d$kC?Q-eu_+l8$Z*&)dIhOSE9w`rWM(uOAxFV0ti~$u=>sBbs~QQ0$BK*e+%pl z963AE?PS5rU0Pu)pIo|BM}17#=!V$?1J&^cwKrXsG39+9Jrl2Ez4qf5_6RvmON2RU zq&jxe;O$qc8mB!{4ck)#4cKn-stW@2cukjo0(x0$J?BNa-PmU;8O^X)=edrDWI|G~O`#|%7S zjE~%yx6Ggo4>gqtDcC7uD({WKBRaJFxZ8rU6v{n_dDTuB61_OdB33h`a`c#iavKsc zx(aQ42>(X~8KAaG@VhDD(Bg!vi;}=;DsPnND+jAGHycn5svnx0i@(naf|jM&)>Ni) zl&9%_dz*A}OmvDwa&aoX@KNC;7KY0qWiuh@Pmm_IkR1xuWiETf3*#5JWzGFeNz1eA ztus~yt7)!j2jbAS@$u><3h(3yCB>=nsRU`wetzxzQwO1)fQH}B&}B)OhFTV>`8*HZ zq~7M6FVZiL&pn%i;yHg-p?_ryu~D7nYJ7CG3jRdLjV1}`NUZ$u*UOMFL&0!Cx;hsf z3#eQK{nQN}^OA2qw8Pq|kUy8J+Mgi^luX7&iZ z4ru1=lUP9`Tgc!WgQ-r-<b?IdDqy3IcBD~jb`F2I#=AenK>tZlI@^*`@Z=iB&bA6CHopVcwsr1#n zu%%D+un;;*UX;Q(=Q(Hqt(d4=)TO{>ky4OSbv}|m1 zqm5JJ>i(AwcOu!^0^EfZn=0M+@2J$NYEww7@_BlVH&X=fBt?mt0GR0T7xQ+=4wj=H zGd2kf4O*4IC~1}iUbSvjafI}qmBw2`LJt;bxfG|2hmJBO1^->gjQ#>xnv&SVQ5OYA z-Nt@KWd84PpEPRkFgvklaf-WK|8n^-(Akrft75`PRv7lg&0^2_ex%eFU*l_)l)y{c zItodOYMq4~<7;D*biq|0#!Yg(O@UW2)B4BX)P&!FT#zg;3Ib5owuy&7s z{z(qDe5MMeeJ6Q~U&sg6{$r?MUo2Ug%!3>B5uGs!j^LQ~rqaF%weauN$i!b4WWHcJ zZm)ILCbo<|=J=7=!Z22Qq-YriAu%S)I(fblx{$8X;fs}y@77p^o$G%M{nzh?gjJ|jl!`g`t$w_k7pfrZ2FoRZtTH=``< z3us+K{J%-uv(C&7bJ<|)7pSL69-W)W49N}cC~NuKgW}%Nlk)6OoX+2~Re5m$vsi7bo4rmX7k_DYr`XhQ+<#`Sfx#lq|Wrq-IY-| zLI2d8c5~AA9y(&~BUo4HhZTlmO;V=C=|n1kJ{eE^wE z=x1fhkKb*QCPbURzwMi=>Tzz7CgFL`ZZgd*dwOM?Dwk)wh4jK4@?upmCv9Ojo)*8) zdq?&aQ8`=MD8Tcuvy@$bi6;;0VXv+UX9$G{i|XIMXy~bAr_(PE8STh>o}T+^)Znyd z^L@8-+D#;#MSM{NRSyr5jKlUTDiOA>cgj6(ePPe{YvF~eQVv`Y@-NzdHwuLK^{=fw zapm6`gb{-rBm#hRhCVw-h4Trc;Rj-8tkAC-z%zYiJ8_Mj4*@t&5)bt&d&euyJSVS5 zMIlvrqp7yu`F(jHsOE{Gu^Srt8)H6%KU+ntL9`Tj6K+0iC%+B4yuno|DB86c;3kf7 zhtih8upl4b+hRM6$na3QCYVk(uDN=A`^lhH%CCVsF|E!QcUV6WJX3_}-}Y0zZB}yX zz{zq`q=fG+{=nZQauX}0q~bqG5Iu|7WE634?qVp`&SBHqe!|z+*K5IcpHa|ZoxHzZ zU>p_xfRI+?_c)QjPuq0XHT~`m?kA{rh>Ab0%R#aE#07|(!P2EadCq3C z{k5`N4K@zBt=p&B>@Y=~yKllN^!?vFZH{^GBi8)ZU|QNP;eAb(Iaqbz)m~iBt2cJ# zENL42U4Q-#Z1H#va7!=*aF_Ox-uXmMa8~%*g7^t6Mn_0)gS0+Z+{>;d$TO8OSluNY zU>y6}Yv`;p7Kk_t0AJCMDBtXmv7N=z;gXRZ&git{b|d_(TEoS&S#8yuR{B@f=1Uz( z3r^KrGdKqL%gIo&`HuJ9i@>E1cSJM$2an&&|M<=l@DZ75>w^l<>ZUCf3Hw9w#A7Cl zfiPE`N9Cbj$TV|hWErLrA&z_!B&I9;BF(NqlREokocia@ELE}5F=Br;XT6NT9IAPc zZudUr2EoMtoupc8vsj@&WLo^s7C;FY`ufU?1KUki|GGy~WXIq)%$sLhVNfP3ztrD(4WR+JB_G$~z9=66Fhh z{>OVZ0%fDBqxRS9fg?BTo!p`&c&$vRMJk72N0v|B$1KUdK6!+r$kXR@1y5~mFvC)5 zyL$es13x*MF@s{D;2_4%VQ1@vWa}HmNW}{F%a&UvrrUcqX*Yr==*+lihC1Z-h)ncz zOG$bf2VbF85it^tbEm_pAzlK(`Q&UTl&5Ec&o-=`6CO|H)}x=y!ME{w&Ewx%IdjqG zgqACkD~!-a3}Wvf*#E4xmJ2evUIe*_C&&`-|E@*=*z+1s^ud$UM{H+g*Sc+AOi<{< zuh3*Z6>3VRZCK)IG#Q#)tA(W8TRr)2t`a;dXzVqFw)ST9aL4*?y|qG9YE_Od+N(@H z44o7kRs$s1lsPwU3K$I76<6MUamvqeeb^N%3*uxNy~t*#Oh5Gb~ZF z_(lYfKMT`kLrY~vp~FC)YS<*N62EGfhonwY?H7}F<3&`*<-9z8wI|;UNM;_Q`zrYF zO9FHt3%6zW7eXd)-dLwiRdNMQQ=IR3)IQZQTck(&e?PVyh>-gz#%Lqyn z`BA#X)|qKmc#^+5QGb02_k|9rZPj8Hd3_Ck3+%Jk%RFpK>IlN!cyL;p|J{ax?~C-6 zrvy#*yhtajvz%uvDR}KQ(SUgdq8uBAtxC@3a)~-}04cxl)?+L3A{c&&6IZj6ozlwS z7PqW1jhR9kz{Hh%s0jY$4$Pi8$aal)@Ur}sTWVj+xqbJ;XQK8CaQW{c7xOkvfJLcl ziMY>055GO9hEc@-(3Hy-`)V-WBh!!bBc@;RuyB$5&6U|ai_WL9xknXklp1!XDUhgo zWouSbL|-c{7I}TDxn?dpulxT1gFt-0=E%-Wn+6M1PG(P^lZc4aWwn0&uCKq9n82AxmFNhr3Jm4qtByAkD%|`4vTW*FF@IM-M#aXj?#gdK{DnXM2g@gKxEKT$=zCddOj=r!A@@(-I5O+Z z2~vL?H0w7z-D1$W1e?}_l;Sx&@wzTgeINkl?g?Pka(a4% zlQYMfsbN|S>@0aRe-#ZPalt(w#G27H@xP7z>F@k^_+;OGH{E-^zwG*tEw6aX&*Qtr z->N<-`9=fMm$l;Bb)&3_+IcRcX5bPysY;mprzm&{BBmXw(&8bEY zLO)2{&-q=K`Vu0{(^3RJv$A@lJ`fn*Rm^Iml)q6XIbpG;SB9`gvfk!2JbKAv%Y;yIBjPS|pd3*gzr?kmA`iC>&CiyHbBe(JmRD$EH6CohnJI&ICFB`y(Pbj@n663NAVM< z-({Hy_lxS7slZhY?(`@3dyw}9913a?5nRfU^!D%hx8!9AAjab?`E(+bmTJYucoSI%P9T8yAh&1@AN0( z&Er?}xepoc=H>d^b}p}a`!6kjbkXlD{2%;OhZ0a$5`&aW>ct$Hl))hs9IDwSU#oFp zNq;l1p%q_Xm&i@6uuM~rjD$I3(}GV6rES@@V}`YU){n8r zd)}Ze`Pq*YpZw9IdxXXsst;?`QM!E}5zq~w7)MmEy)nf7geAlCYXgTWO<7m3uXhPs8`6nf5>VsYm?e{)>NW<#~8Zw?ie9&pB?rYGjB3M(+>*F9U<^uQp-``$=4__0*% ztgF_Yui~<2=G?}K&79lAdy9t?opfW>`R$}Z%)G|bcS#J2Gigx6 zg%vmCsipc;yo?%H!p8kAM#aq8X|)3JRWR4&wf?CAw3K$#pu ztT~J`zp&zPZI-o_lsM-j4;aEPJN7lJD&|?sw{C@+hxa*$IlW3P4A-dHQ#PF4-^*|M z7{1TrxAcF(pLz5-$gy7q?%lLMek}cYr#@ZZxqjp4c3KN`rN`2tg2)#YrK3_bRuK4j zE(qKxUp7UixQSOiPzKi(&Pt)w$r^0F7EDs_^Hdk{hR-7IrP4F|9@=zIKF@57vJy`3%|n3 zSzLej6%v#eRjRL;sY>~^z(z6!)LLb(3Y+x{?0AZ=G5Tyna*|tAFekqdWgKaICOK}R zOPC264=`WcIHX@IUTjN@?;peiOAm9b7evyCAy{!HCjt^ukO~)gtR0R$;-^H;52tA( z%lZpz{R-L+7dmAAXQWJ$59>C4 z9{GLW1-j}9@=1RiEuj0yt;K5A`5V&xQBlNjL?DuL$&r3=Wvs2M8ijOtE!|(y-nE>L z|El)~F8F8quX^L(+xfF!x@Gz2ulrlewYOfe;+*q~OAQ*Vs2cBs?H+FR0rsA#CJ|u@OzGTcwS#j(7A+l3ZF`zNph0M)MI1Ys0MsB(g*(y z#5D=WB?l7>VZ}~6v*3d`Vo(xIj1Jfdf~$C>MF1%1{M_aP~c>7G7E9$70(neXZ)P20@@kl z7dP$X^~wZkzjorMcMNj*Rt_(a1QWY4X7zM6XTNYU)~H}b5h~7E7lHG~TmVf4L;;e5 zZFxnRi+bkvsTY<_)`|}hb4togm-`o=V-#JWym2Vc#FeGuYr_zS^#Y!}pn#QTVtLV` zW*rh89&`SFK|z4Hogh%zI=nfF&Bpn)GeO5v1Wh_V%)|)SEADati+*g$8$b3xm)r5V zjk8a9wEl@}BHQiqKzv91*~dQ`-zW0gh5u5J;r>^Th5)#&ggEv?GsJ2=Tvew#(BW82 z<$FV0c(gLfuJYTf>J_DZNHwqHnUI#`(1xOOIB?QeQ3ENi0*Rn_3?xI3K2yfv_xL9) zf^|+?qgDm-W<;=`R|SZ?!mF8`&pzQRj`1pif;pToHE_-wY?V});1N-LidG<39vfR3 zgCD-Ar91g|EI;_vA6w3U;L}aCm)C1=y?Xia-~2oH>A1g-%J8`k{2EG&J?UzeoR|;ICHqRb$)tOiy@uQSQtyB zWksLhfgPZt8Rrm}9K1wr-6P7swI}LdVrx$QzNZ{7R~}Ss zI>R>NIv&2&-$F!E%8NkQnU5H9=6+YqUCSlcegK~sfA{jRlOB(svvogN_vUigeUDu3 zbHG8%hxGXnMmT@xtyFUvxqrf+dKBG27m)iX*kJk(Sm?WR`37)LhmF4Js3gzKR;p8+ z`=R`GCjIE-rWml?PjQn%AF%0IoM>EYv-wZE!Am%e3r$*cbAa`81Eti-IXD?|Qmcwmgy>aHT{FOx2nan!Y4JY0z< zz#QiEjls|gO#D|zq7&vpU#V>rt<@G06&OQ@c7^C_zUwXqu%Rl*@7cTF=^6i4{?d6#d`EGn= z7ypMv=!ZVab>EzdHV-eXw=9*Pm9Nf62gusY$H;K6I#BUsQ}-&D&jgzgR6KF>V2tXg zv4xJFOyR>l9RNcTZ~ao*XW}wo<8Qm1p6d*O=-+|AfjnjUB>D;j^t5-fQX&;(!(Lek zoxn*n&5YMUD1_Kll{F8BhplRn?z@&B`?i-Z_doK~ z!1p-+?)!gZ`IoQ#N6YPB{!&QAL=~=#FVe1Ay^-dkousZY@HasfUJYn-L5gWi;3vj7 z#4;Se{6NAw5o}7KUb?p&KnHfN1@vy~%#c##SHz%=n zRZlZUYLPZwHXOj*f&cRM9hbggIqaaL@cEI4C~i*|=0E0sCoX^b$=_!dd{_rl@WXu` zYuTsj+&^n^FR&(PHOxs9J;oJag{9Z86$WbSOT=K_JhNh2iBuj%mNrE7{q8gl zj5zgB_#s`CbYK`i)^mRKqw@Q%$cB+vlM}e=|Oys709_5uOXwGx~;H_7cukY$gqh-G+&v`>YKhx%s_!69cfezRA^wEhqZXs~EI&H(3YaK%sbtfTW& z#2_eEq~{wFT*_}9VuVfl6H@}3K??`P#tiASY$PtH@^Djo=9ux?iw+lJ3a8%0Sw^`e zntF}@kMH=o<+UIF&qVIkc-TqbvK)5^e+Cy-I9d8X$#!pq8aTq|dtM+PAO%%^-#KKK zrF51P6Fhcc<$FB&>53+E6YAGl#jEyu{S+^`fT)j=nJmUBmw0W{68SC5j>J1RB4JjY zDXCQz5%&0cIBW` z>yjotgo=l{l%Ez4?q?fg6E8}|BtMDG{ETzIV-**^?dI2g;ML2^-~3W(QNDo4m5(Z1 zIkB$tU6(fYNGtZ-u@fAg?U!LHU@Ykm77IBXPYvl0F+9T_#9`3SRq{sxeRF=oi6t&; zVg{#k2BY}OPdtaTRBefq4}P`FC|sjoQL<8GxH*dpuy9v|u++`HYyn_D@-us#1-0>G zBG9r3jD#P9YXc=lOKG8XiYK-QLombxE$$z7(uJVtD*|Px+%#3V7`&s?@vPE3%nv(V z8>8cqhgjiYOn&J?Om|EIOeIO&6F=vqsq|}B4D&;exNy%d-24u@{`V`G5GZnRTyMxb%mrHq6hDIlpvM zJ3kVOnTlzV{@!TUSeuWTSp8q<;`dr_Y04 z)`+X>z_hCAkwr#x;#?|S(Oe3|Yq|MAZ@V9sBJ(!%Pa zHl5JD)sfDhc*gPE9FL(FMQgN;N<3}R)WAviVIrPOa&I$hS7<_(?w^++fc}-C@W$(DErNFTT!#U;8 z;esatX8xkzGtUY;NvbGL_^6%EE4dgWgJQ78tY?wQJEB%izz`)cmy+?#lQt#_kha>E zjUI>I#W#@L96$-^0HzkF77QRUhvd3Ii7R&aiIDKZ+M=M2cd^Ftmf$|KkP?|l_@I__ z%x?vmS%!;Lvz=z0zjV+w^s@rzR|q!uKk_s!_jGyH+ke?V;S({`%n>fm-1y*2Bk~hP zU+c$)dzD#E$Iw3YLO5IsBmEUi0>+11JqM>6m4?+mn9ff-2OZ^1ekvGT)?=ZhW^#OB za4r4X@j{~5d8QtQJBi|lM|mo}xDu0%_PFHUuzsV&H3`U)-que&lu`ZIl0TXYLMX@R zh0FCh<789}7)9zn&Hby^6svnN^GUyA?aTc`LgmCvj*^Q|?_nWL<>#!OVvMabivXb& zpq2W8m^r^{P}6(@$xnOmBfkk*(L-lTt`UGDtK9m{WC_;RP(!}a(>PfyrD@fgjB90X%8EN9>J6y)&nVp35Z&sd6LUbFhmL`|TrIH`wQ zrtwGw9d=4Hb1MJ1q1HD;;xk(-iSKuxz55>p-*f3(BZ2C*mMqqxnqjk;p4E6Dqq@qK zbV6M!7A%nA_?5HJED&*SfUbs0f%eQF;>xeQ)FdCn9WT+mF!z*S>8Jd3igT4e9G>Gc zk(pDk@>YHpxz0~rm7Y{sC%^kgE*srvDoJj!C7EU*+najHV@~oAFye!Dui*;;X6Xl1 zKF(ES=Pw$WlcVTK!;g7%{G2}=mDnk4> zeXKe8DNWQNN=#?~6nEv1Xi);gFmYD=2=~gl#(kUo62Jo!fweJZ(wLPN0-VKVFvglb z^yzmJu*bnK|Mh)o9`dS1W zXZ8FLico%tj`{IRNUX!f+4azr#EBUM$|1iow^93W=JBOy&3q+@3*_>@FuGS9+@2Pp8bh%mAKIY90Z7t*`hMvT_FcQnli*>`GheY_QA$-^x9((rb zlZ1-Nnu6m)SA7#l?0{-t$(e_S3SI@ulZxUp0vRS{moT%;| z7bxfs{Vk*<>~{Lh=dbtuCqgi3P62gasLG;;EBf6$(pydJ9c{8H{~AYHVg;-G6Ie?} zV41v`;b1HW%C}8Z$xS$3?=( zCWB`#EqFF!?N=0w;TAYnxDhUgg;cz5Mr7AhL3~A+fE1xtEX|E!4i3eZhx)Z=dUDOf zIaQ;V%g$S_ngP39^E=hWt2Ap~_HOQ<^-yO{&<=$F{*37{eU#v|Qa3N1b5(KN@~jucg!r|2&=H1c<0Hsh&*F3lvw#u&#j7%Mt-rB5qd^qh5ILq@zz z0P=T|us4BXy(OHy!eF7n{P;y(^~;1Kzo?6dQDF^(3M2@g{dVkc+C5zTe}zytfzB`H z(q=m{A#d`h$1+a*RROYt0Rh2r?-8X$5`)Vyc~krJCT{1)jyf2`Wr)3%9ZQYG*H)U` zTh=k<4}dd+`2k+(rws<@*$@j{F#82SLyS&FCb2Ye_ANdJ%!y~G)jaZTA6}y93}T9} zRwz1dBDMC7&9$4wxJsf09nMcZ^B~_ATS0c-{OK9G%k>2hdWL+Wwty}{{K&!MQU3^< zC1`I0u_h`Aw~e3sHwe^U#_5o4*A*Ae&Tm@vpGyhFjGn0W+H*bR7ney0E&U?k1w%N=E* ztUb6gR#UUq{U_1^oAYD6AwMKcU-u6~$;{#NPzH-B8~z4jB1X|xK*X^T4Cg%Yz~Ui) z_>C$o(kPH|Br*@9n4(9yxmOM^kcz=R2qzvO$gmoL!ND{Ao*FxoU_-i*tH@o9Gs~izt}ep^_hFFY4g%C zz{;*`nYfm{NEI{U6K+`bxu6NPe$!Zgq6W~kwA;o#KRkl3U33nB9O|DctT0T3CckQJ zPG*|!CD$Uw< z!4I%bX0|N*&HT=!w4#_fjV(%fjSH8x^87h`o%5#`oK?^aQ-o`!Iq}9qJoQVs=aiEy zj1X-c+|a31zW>DqBF~BtXF>??S`=&y{&(o*xexu`3fTR0?QQ;-hvaatnSme5P&0Xe zB48TIFVnC~JsiaPJ{~^xMDeMvssV&3(Ib|>YD25{@&iQ|Ik>C_Xipo47@6NF^8`mt z1XV!dv!{!I=q7f)=fo)jI*8?0VO62BD3O$br;yxcUE~aQk;Nl_3az=gn1_ipX<0Wd zO!AS7es)hb#AEK9j1fO^A|RF43?YXbQO%JzHgD9BZR}-QuDa<{S?+c|{g|_szxwFE zk=|8v5kg%YnYpZ*W$yY|m_feQjl_-m;8{FVJoiMB)4g!76jX8UL-m9CMZ)YumNbpQpi222*F1v;@y=mR#DmOf{gBDtT1Vi>*)4NT(W;sG-52){ z@zsdIBk3SZ}BbOnuA+oV7JLxSYD&Qi*}$pw=zv0)~M@-kkyn%Bu#guIMr3^$rZ zz+6};rDt+!D`>4a&9wmPa5|WGS)OtBcj3np?cIMpz4O*95kX4!&D5FQ3%OmhaK<_Y z8JQfTu_be5i@(6Z8oK@SuM(?Hr4ZQXlOD5JMVD^ov`rJ1Lriihw|Lzv-YevL53 z-=l)%mvR$s+;B98n``xD3Q+r|eU{9xwaz0}5+t*EEg$}ZRQsm$qhnTp9ZaTSpQ*~h zC7lM0DNj3sH^OnHV9w9z5sn?4@=tln?>aYvSJ|0s_%~_Y&*BBHkQ@2Q(JZ|ZRze3q zOXbeyvUda)T95l%_esJ#JL9V_#1ER%r3~USPD?Wvc-5DhqsI!(Ao%bU|?$x-q-arT* zGLaP`_~J_9RbPr#8e2HABoI6oL>6igE814#{akL8uXth7C&pJ$I7#IDm8%+^-}_+K z=hxZif)(KK$tDAM;iK_Qi}o@PdoJ2Im8x{a7wP~gh#}V z8^m>{!uTF#8pex5`mKZVv@JgR%~@FK^IgW;#G#lvn?WF%)JAl$(capmq4Xk_;T(+9 z%nOXsDJ{ZPO$8*sG6`GpwPs6TDNb#K6=!MyPq>AS03n@U84zv;T!9IP$x*4!%6Q^L zlSS1;e)WX$Q+_d9Z1K)KL{39_VA1dBAgk3`7w{Bdsp6#fxI<1_{_!(^3I7S)y(hxm zFR#7zs@5c=IGNWPna|Rh0Y-iJ+~=MoR!7~zTzCSu4V7mFaTuwJt;VPF%6X)p*;B!i1U`e`k!{EWiB{z2cxI02H9kpM45 zD3fgTc$bN2CL?iq6wkyOR8>QkvfJyc5G7bpU>xs4X>)9?nzG`tPk7Q+#Auv|N#qzY zVg1=J-n!iU`5WWi<9x)yM=dY=o?lxYaMT%%RqtAW&FPbTQ)fD8wdiB%IwRBZ01w#} zIUCjupl3@wqbh__8-OKq$R>4!EydBgN7fBY zCe;sK=glH&%}e#lFMWk60c)gL09Ih_*Z{>V3*u-`{=TuCN)YCo3B2Rgk)F(6sq895 z!cBrJLft=PEm6te@rmuAu`nk2tv$U~{}>x$tgSckdwBjqZb`WMw*yP3Hq<|{QhVp? z0;(eDSmg(g_QW;kh!3w^xirC!<%Gi?u>8cgzI-|2pkq?d9_O1sf5Y;{J3r5+3~Y1S z9R1Ee;UKb0s%Z5%@eTDN_hk}Q7qYFAM;1ll($bWS2K}Dmgw($C(^Teo`}{jHiy(aO zQNF|11c^8AtUf==1Of+DHHV_>gCQZ&46t3*v1HcPOl$|SY71`|1{4nwbNnhiN*G%o zBY$|<07faHxYTTKwKt*+-|hSd`FvC(~8= z3*v^B=w$pcOKlN?@X*uSQ6GXYU7k>U@U3W!}^zt+9$WKhwteI=~OFb?M zN!8j|#aG1hL5wf<73va@0 zaY{hY$nVy|#^2-*$0Q{A?I*@HnJ)#9!olRsAfMvt61f(>Z>WQHRGj%bG7Gnw;k1wg zzr*R-oaPa#V`nVa#1P@UT9DKqrEikoSOB!{1{Fv=h|{mGkLWR^5nH*-JbENdHOg?2Kh593+Ix2o` z91^n-ubhaR((jCQ6{mn&DNfBIvFub)Y^74o2up=UC#J|x`%k1tCg=V4J7D?2r~Mzxzl2GlH7cO+$def@`@8Bg32@YlvoBbY@(aoEEKqqPGCc=6}n3dLW^6P zD?bVS{5a{zZjDk{aXb3?AVFJYFy#PD?>`DqVKLL*ohOAs%X{roV3K}<(T z5fE2D%7j)pQ8|mJUl@`qfxNMDQyiqI02-N~yGM+F(bZzL!KFmd&LxHEBk{zw5c-IN zA#qqZhF2S{pEp~Zv?jvr#_1{$tjv$~-TfylQBIdfoO0gstAG2o%L^X&ckrgNf6G)F z2x$F`fdJKlrTRq>bVR>tqcsmJNNh3zH{@0S#?2`{5foV5f{yT7xi6$6*U0ZaJ3czq z?4oS-%HO-@uuwz-ulq;AuFBUYvQh#ux_<1-L;`v!Z_*DF%+ybNJnXUSFMdqSPy0N| zD-%83o2%kGx#N|;2}oK&w1t4n>LGiU99c=HjP~e?ScYdv`&6iSsDB0#9CAkEH{}wO z{NbC0inim7aK3?d{6x<>IzL4W0K$1=h_$s9$GIZDxk+9uNiyXy6C=QmeTMY?J(A zhfUuH0@rGYn3f`Rf!>ptV$4^GX7UnDfN&Dz{1QuXu$jQ|Q@9$-JwJ4b6SL#Tyt+YE zQSmhav1tYA=%{$=qShK;%|Br!HZ#8hjHj^3chuK)g%uD)7ZN+YEXouB_9H)4 z*I6*0AC`qhEsMZNnH6I%0y;ktuukzt*Og8T@gQdOq>@Ho5i}S0XsU2A4545n7k;@W zVi}-Rt4qfNub3iDFQf|*KI=+YRFXsje6)V%q)EDSf02Me_C<#DIh+CfEsbX#`|#yo ze#fuj%XEJjUukt87{9i`A77;Ui-!4`d$w7$9Igjq)C!66xvW)~g>K5`5Y)@wun*nW z_^KC5P&V}pYqDz1i#Fw9O!;ya>L2-|fC7pjT6_-HF9z|Ni}~d*FnP+;wMaju0DFK0 z0Njl$k=#2G6;ed0U;qbf`r8xeB^Db(7}H$D(Wa?zv|&gX9RiTcE!Yxou=A6f`Se$7 zYT|G6>YsUtO*Uzfp)N)$NEmBX1b9FFnagMJ*R($KxU-jEdeN^h@4f8p%PZgdZL*m~VUZZywBHB;eHPsdH>L^n2@gUTrIY_P%fCUXVa_n+8|MbF19E=eQ<6lWAjEdO@sQlb!_W zpZMsejJX$#5q{{)-9r}#^XHyZJdGRqm6&#pIX{62QunoV?Sx5+;O?bu@kbA<9r?o* z$()B6GxYN;jp0UB8~KUmXuNjF3!r+eJucK=d8mb(HJ1`ehAXvZhTB8^+(>|)fBJa- z%2nZK;J>N;?#KMl@`#hqr+c4`d=i~%loQOUdG-3mjFJ=_gz5E%cotyIEQ4GEW3IAj z97C9~P2SAA%AYn-M~2!YVX)L$_=M0`>WCCAfT`RuGoD(#U!R5gFE23_m{k)n`YwD9 z1WYU8!8=3yC{nHvxMNBa;l7F3Xryf%MX*L1wPM-g)qyTA-Q9TXX&2zX%DrIu(~tkj^2)dU zo8^<&;Xk@@5vrd`q$M?{F7!$-ZtOXIOv3(AH72cGe)obs;~Q(vqW}F8@eHU;Jfeyr zsg};cT+AW-m{NBG_XKkaF)I(4I94PZ zMDh@65~m|Nyo78Xq_J!q0*NV3mc%XN<-*GB-qAx0b6$xbX>!J~XOUo^j`JV*^yLX> zK5co&C*HXH_MiRQa^dCgfR?C1DhN=`oN2CJaMq23IjXU6A;2}1tBP5@iKsT79~*!V z!Wix)R^BVWJ#d;Ud39qk79Y&lBO7s1ze8Do@&T=xeB`eDj>jIOQgNldZC)pDW`+-( z&x{t?5t95;87z!-msNG0^#HklLIVP;Xw4}s4C!&F6q$gP-0XXKBK$`(qX3js6D0r3u7cu=ONfm`JFkyz1?U2`Q+p-qReFq~e;m z5MiUE2w$+K5uVM+sPN9OXe3C}m0q9(VN(ROC@mJr&v+%NR?>ep{u%3@cmrV#JOI9t z@Z<6uKC%4vg}=W1;YGiVKMCQRnK&q5H$yipJ0={o^EXSx2P;`OdxJ=&5;)?qRON#% z8K&4t6Z7gOs(GV*)x{CZYIwdqDpBjFpBlo4Jt4FukFjP5&NI2qZcG(JD3-d2%=u|z zb&8P=$MY&a&H|MH;o;?$bi9^0<&%*YYG4j9V@eK2&@9fv#^-zr5g_`LpS5opuqChY zj!!)fVVt#WiKhqpiOcM@_r#NHSMl_x0?bfhF;EX2{T5Y3CjWHMaJ)N-vk{J)?=^Uav)*gZGIO z$q_pDpEiCl*73~)3&+5DZ(Q96jOA~HikA>>Hbz^34WDKN>->S2d`cM&*kCE80Oq-5 zu*yUNeGY`p@NZ_~Z-d2z!2bLM49Z1V5n8wyf~9SSr7mu|0Dl^j+&=)s^! zZAX5Y>q6eHqOi{4_~aKK^0Q!ul&p>XwDT*m{FNC@aZip19DU~UBhUOlm+yb_KV1Ic z!@sq>?gOu0uDH=Z5m@TkKMKYu$yj;M*?p1ZwBP=^iAHb8fFTk#fE&zQVy&KdD?48f zIdo=RRMWJ_+B68jP`p;Y2r38DP(L@m;r(yLj-$=AsWEn^6NL_nd&eg~nMYrwMmdzJ zXdF87Q|SD+GUZ1=IQ4mqNU&(vS){_LSb#Lk(ecQ{c9IrnQA8z;9>&l4jli`zD^?ut zF@2*>@`BJ!BdYsCj3rlID^X6Mgw4f#qc5t9`lg;%C5lge#{1mo@?DoOy5}6pUIRhc7twUa)GMGz;Q<4S;{+`DX;(T@NzP|e18(%QGyewB+gnCCNZD{X_IDorb&e}8AarQN+kj~ z-l{E_>`W)O@)HAFiQNbn)mA` zzu&zNS)Tux7wh8_*M4OAqd))MHi&iuviZ1n! zb`S13bUs@9h7gCES^gO_=iFF}2Uu#M&dE&%>+e4mF@gExH0)$g55lLLz46wf-cc;`okBE_p~U zkYKg4pQ1!C&}@`gnOA=EFiy}xV7KhVn5&}2@5CcYjY&La(kDN$d;7g0SKfHp@`|_o zBK{Wflgn|3oj5Um^~*Dld&t6%mpt_!FCV!4UCSSTh$Sf6jq+tP@U(zR5!%n>-!Hm~~S6L+kw6%c0Ah z#d(Y&*BHq^XOMKL?6gxz?lLmCtaTn6kqNyWi%Xsz z%Y6?xczOKkPuH8qIVV1LuU_QIy0_zo&t9|qr+2(``Tq0&$#Ueu$L=wVp97KNNN)WI zMS(P@SoT4CnWknCQ^U3Tp7T9P$J`U|*}r+Pm$n7VF2X2#^>+^HH8U2LEB>ZP8Nt9= zSR1P%nBX8cMQ{lLWa6Gk4n>sRU;*XpUWmZ)cmv6ez(%nN-jhkxNkqG7eCeB~@D|4) zC-gmxb>g+4q{`t0wAzN1e3_N3imOh`#Nw1g)Xkr~tsfmFH%IfD02bDWYXN-dd;iUZ zeA9U6C*HKYA20NO>xqAVdH!Qxyc~e@GjW=TC3uFIL)%U&19J4E-HTFWyvp!v)0o~v~NB%W#K zGcWo+AMTq=&h4LnO2?^+OVx;XI)5gS8#9;klN?*Sw82ut?h&suhlf4Es~+9FdgfZP zDq4bR+-_921f3fWJ8fiSd=D7=pOs+tq1fN~g+TnX?}D3J`1}$gPHdz|YGVrVS^r|+N=l;lqOa5TtuF~o; zVCZ$ds2h?D-rS5P<^p?YUwo!@-EW2XG# zz$JxpMm)X>t8ytW52!NOvPv?*jg{6%};}_#pJd<+}plRgSS7?+i ze&qw-h-Z{Jxgsu_*dUq2?F5Q5NV=TfT8O>#D|al{-E#GE;t{7Pc6XN(4}XB(K*)LN zbst^+{;$7C-}CW(_{8{wPk89=LceO?gN{E3|C!Y}%Zo4gd-~hRdoFvc{<-UeSH5St z;j`C?w_A!uwI3CuXWZepZ@TJPW7s&$ay-yHRN-OQx&YO``TV$chk*smx!Te>!$*Ja z17)3ZCBONd8dnkb*#fkg-x%aJ0cOWYS?O0^nO#`0tscI$%{VhrDRmpe`~b5gX`$CR zHGoDURcAQ}5r`6{pV76}g8t}F5O~PM;W#6m9>EylntM9iN~*9V3lEv1rC6~yXH8uj zgXvS<%!!Y~?t9eoQ2dkBxhFk#dDJNv?A1RXk>_g}pS=Df%d6k}@0JgK`kic@2DO}U z*q)zRg7pa^!h8)og2X0w$O?(8?z~A{dTKYqT*f=Me=>9Ctu$MhRN zIf1cussg<=XW`FW7NdIQ0m%tfER&<&*k41VW1KQ4ICGnVgr(mz;EJLZ9Q z+ymF64>@jm_QSqQAEdwXrpuNOT=DMZgZSWl^A~O$^{amXsR6SXHKPnVfWlPo3a|4| zScRy4JO?$76D-y9?tHND2`UbH6^&g1WXCCqf?nE1r+GRo^;@}!g*QyTM-^2u^qVI> zKa;R&kx0XR|Be`kR(vytAF)-p5wK$~Mi%PdFqW4NE|U`LLT^9DvpBcjG)_AF)S2QQU0-?ArORvH_bbbL zFMm_Fu@ja}ebA35#;* zycp8-761RT_vS(0UuSvW*V?RIvKDJIUSzNVFJUuDsaXtDCy<5^5;LtcKp=)RNttPA zGHL#32d2$r(q%d=)1*+s6fjA^lPM(u44A|Yn=!>pYzJc*Z?a@rl4V(&CE1dCeXi@i z&pF@U^S*k21{p&}=Xsv*Ip$>mr{XM_;eU{%t|MdnAqtDeF@AoXWD|Nv14W#o8 zVlr1uKSuD5q`8c5?wc2jevy!^gf+4aDZ1;;vt@1~SKdu4VNKBGWk0*w`E=Dj&*615 z2t9^hL5KOqT+X2CjUR2VdHZ%VKPK!N zbB^7e^nZ5%z8$)Ek_j5%1c2ccP;F7v{r z2*$i8@Y$kEKj5SUDR3>Z;dGewm)Dc=^p70ZMOG3uw=K$G!WD3>M3PWt+JTdpx^MBW zt&Vf>q>nNpmbe=G-WI2P18NkP1h#&X)SP};6~yB~Sm_Oow)_4da1{&KnM{Motg)O6jkPat{~-i&T=c^b&k z002M$Nkl>!9xW6H}gXZk0dOoSMI* zssNXfOO&nrLw^PI<8;K(2mrfe%~ew7s=xa&zVl1LhXpq1D#j0@22nzT7iTrNTNjke!Wr>`!9dp#Oo; zpC032iVW6YdG{~sC#inHU()(ZFZqt`Wnb|1e);##`Ap8w*6*6|@r_^nO4Ho;(7oI5 ze)#R%@9K5Ae&^P=`SrQ(*7tBS`awhgWLa9X_O60+#%$K}uX`ft2LKBQElaPGKQQY| za54@|vDG{ZoF?V7u)6EA{2k7jB=YCydX#=7rnm zUj2fN@AZ8)2Hzb2jrYE8`?}(ceB4uy#BB+vud6Y4ByPz7cvp z%xV62=@a+g|Cj&u_JgngAGbgM#eaGG`WJumcICxq{&B-Yyg$gE-#PhG{d5-}J#Nl>21sN|!M@-r>!uJ54C-6e9znW?YUMVe~Z#L24noU?jQ zMjYt?en5f0wARVP*WI7M-M7_S@aE`C$Mrrl;H9hdqpg?L)ua(Q(l8^mtKv(oto7^Z`<~~p zHYeEMtLs9V2BeiA&DLe1%@x$u>JJEE|8 zpzgA?ak!wHKld3_6{*Qe71;M8gey)fV-8^9En2{x2nHou@$~^9%Oxky@Uynd#0mCN z`FxIuW#+fQD=)gnPZZD8m!A5`;u%l7@iX!xwW;9Kzkc$OPj0{cfj4ab=68Q)d&|wg zzCHTbBWb_uN@Y106{~X@Pnfu9ir)EY#bbW|19cz1x4(qtR6xerrv2d|mcgwcOYxdC z?X`n*$>$ql9O?Avo(F@cc@oK~CD-W5jsS5MsjMs+?m?JI)qC=rKQ~BXlQi#GGk^_N z)3dQHy3*t1hrftR%kKNrZ4+3&gavkk_Y9W5spf?)G;0cnBo93N`_UTbPXd%!)GM!> z;UxgNqmOLA`QF#-;|=+^l0{@{Nsciwy3_}7ZhcNCtp9(}HJ^CwF6fqvy)m^VOT zbAtV^GsoGKb0deIc4zxRw9fV_sBP@~zC#B2oP~^asSQ(%p3H^Ls%dCZ+(=i(s@o3~ zU^JXpL{Jge{40R5%nhwYmAmeFYz8PT!sLGwS6qCJzE|-~{}j@-mp#KXKas@x{Btj~ zhChmz-_uW1y4i%aqy;p1UuWE98kvD2mkU5$C?mYZS@4p9PuiCl9hoL2qP=`n+Uw z*A`Q%6_W>A<+lh+R18D5{12;dj^?MykkmkvmV>{{eqbA(0I-NCU28t^21uN7{9jWU zWoq31$uMrPyY9PF56r8I>Gabxp7!kRJ-7dk>nEyf-szV;^y!o1ut6^!_dfXX?Wgq$ zy+8ftAJ>EabGEP2H{QSc`G02ng6sdpz7HgR76(t3H|mv&`8XU8f9m0lmxs7TPb~Z{ z&7JpsWc#Q-?$8qpYyRi;(5HB!<%DXeet)9OvB*{Tm>T2b<3i&!(kr&f=i2>qfcLp? z*89>KNvW{8F?av3gtLImzzWse_k|O2EqW4{dk#NZq8~=5lTZ#Ku$dtnUE%M0&iM%% z0L1emgIDSag8xbIQ+NC;`zr0PzW6%dD?CYjw*L1z3ORDYx!iR7JGM8z?{)fy`0KXY z@42NL_@9 z0sOgm^a5i`@j@}h-DdIu)c-){1JD1>S40<=IVs)nBnpoTfskP*Jm!GN-#$R?3LFmS zn_nJolH5cbLb(xHVa-px&F(M|6|5vbiO4=Q$(MsyRPl!=x{{q3?2c9bE&5mXF;5`Z zKkZq50vR}F1UQV^!%r7PrK+UJwaSMo+v(B-^51h z6EXOo_PcI-i@zcMoA3MA`n@TA8FVz;-4;zABR*e0iO@fKx}F*5gE6YQ&)&&??C*el z#IJ#+Y~u%vli7dbl-=i*8NKjD6cTcl{dom2$2bSwb^gnobfYAD`oV;iqu!`NZAZ$MpomZw}x4;5~lX z`%gUlfd41M%xi@4Quusi{fXLtV*8Z7t_}t-uE#42Jf`^(eQo^_{i5h2`T+qxvGKKb zW_>|Xf7ki!?d)^-JN9SmRS2{`TOWF98vX|NS!cL-ay{UmYuAKFjheYS21{()3$1LYmQ|($j21Dck?bsYGHObo^^#@js9xrM_^5v=7yy*u#O=*1O@f6i6U+rIp{dNR>pJAB~{U%EZ@+&}o= zh+ZUq)|Y(p=3nD{JURYocYq(dc-L)j-QM!SU(=JuuWdJf?DtLx_MNWBR_3~^*({E} z*E`t{MJ_#T;Bi(!dZ74wtlD3D^zVS&eCo$PIvD0yHp?dv)5_KSeD=v>w3H96mEQ^V z7&bl`*fr|2*W8cC|KtyH_60Oc(JQXuQ|J6-BBP5dKIQT*lW{F*6cB*Q@J6xC z>lyXfy-a2)_fAYmDY(Uu*p2a&e7xf#~+zdHml;zd^rDI0je|FKi$UHtU>$Ub%my zgkH8Bib2W<0+0i*aNqw?{mAW|H~C9l=bUw}zWM&b?Zuz-<=cy&@n!yU*F{gg*c?w@ zp6tLA-GK)lzE4jWZ`=O;t-rnTdQ>;v{&xTGZsp0no%nPVgrG9=Fj-FLdaqf5o5f5?6XOTfa3rpir1FQ~4xJeu%h8m7xUF7j}voqTbf%(;h{ucnj4N&>u zOyfS^;o|3sgWVHwS2keowvNjAOCFoL4CR9?CrVNe7?Q}_hs7`UjIef)DS<`+NtyU(A+Kh6dcjraIB}E6&e!^R)!v)*2de(X|M#QTajCvp`+WVD@#pI&yPkjTi~QrlS3LRUp>E@o zb9LbE2R`bblzPwY@6->6yhH1EZudQO_sPe<{yZ?!9bCIPrPe&|^O(jL{><6u^TN!7KemoVv5dA?f7hh3?&_#UxxClX zpFe?fY>dXVNe-})z)}MAC9Fv_KOvFGaH!@q62GlBPbB9l7NW)rkphcoZ6p}U(PH68 zyoGDdlkbvmAe0nG2&~Yy=I_~3t5Jn(lh$ad^r5eL@qJ7$@$t|nA3Wpt7S7Q(RBzC~ z?r*;91ICOFs!M%YRjqKA-TdtwHsVJ4uEND7sruxN%$ymfU{Z@?doE0(8GJMPx8BbS zJ^o4z%HzjD{bRXT>qm3-@tmulw>|TU8#iita{Pzg0shx|%iZtyD-V3|qwn#|%cAom z*W6S6c%lor#d2ewc>KFfCqIA8xcxoPJ=^0{kAK@BRN%3X|4-*NuuLy(KW-=qF?2X)_V;6S+w~hiRs`b}QVxCPSV9hrsoEo&k%k%`Hw)>(GUfuR{ zuQ~Id+~gbSH|u{a=m@e|0xrmve-f~=f?=BUs(t)(fH1`cc=RMm`}~HZbI}qVJAMi- zB7XUgpO|^md;jgwz~4c6=9M@43FTS(X(@isg%@bNK{fIVhEI+^ULD{Y+#l9Ucks)f zJYjtBuJ`EgXWXP$TX>&-0_{`t@x!ge824E%WXk=M$0Po}Pkmc151`$BroeibIlr<^ zvAZnyBl(>(7tKGl)iaA6*q)-l2zuO?XL%J~s_HMxB>kYEqTy3aYwpi4a-~0Unjbt5 zVDvWj7+ss}-7rl0*zprrV}{YY`Pkm|rDBbf2H4`hjg)xNkEWHM;Gxm|CMILYkF`LZ zUrCKghU-<#If)G!@J;A^V^a`~NSZ%sH$E_9jeq8YYs&-*NbOx_skVvuB66w&V(TSj zE`nLHt+l(-KJzNZkT?O*NHdbxb2}J(`scb`-Rm5Tmrmm~jUNbY` z>vhWSyvzwBHuVI#>o>y4Z#>Vw30Z|-MCnIJFh*hbQgM7zd9Pl!gpV*fOh4v&)^^pU z*KVKlw7lZ)4SHhX`#(?D@2XsV$#wd@hD*)!c#jJ~&7@9y>5 zV+TJsW57urHZ3N9Yiz%M2jtk-z|s@Q$;W?0Lhm7>e*E`u!9(7D>t|ODNR4?cHpF<8 zb_3*ae@&IxwGZn$(ABs!Bz>@gw)41c99WCLMUuz+!Q;Q{ik8ZEx`Ju22GL5 zkITkjenk^IzXxL85|dOgJ!UV4%68Q(J3*^xjQfHC9q9=%;P?q71x}B} zaY}k$3n8Vk90Q112&{Y?B8Pm&`m8qqa1>aIZ((pCw33&6$n5iM0L6)z&|~;P1$)Ui z=9*qW$9^My{YfnM1UaKFU?lHbjDy;m?U+=vZ}+Vju7NUvGcPcnLP z(aUo1%XR!#Hm?VE@l!9;2fwAwFWz1DALN(J_>vO8k$<0F`{_RYawPwI$o?MxHh%w4 z;*NWHIhfo03zGD(&(HUI4BQAAZa4Q?&Y1T5Mrgmb{rKZ-FcZS)b>AnA$!+7$(wdFu zj70mEf|D&_2IHQ{zOTN!MvQp-z8RPu2^RXJ2X)zZGI6PG_^88feuQ`VQThpaV#Sy8 z7(+)kE�NPmg7gu@tDbReyTjYNJVD10Y(bVtjH&@Z_)k;p4w?;*NsJZ_x`G`}86) zC4+j>q^EB%g;>VYZ5?|KzY9h<6!r-)YUal8;MgBUXx4K7U9KTfyK z)3Gy|n3ba>HvQrY#}HN!^ZwQA2Iwq#>6c@M{}N#eHV0zY`@h*zh!AavI+i`mFQ@x% zyz*27*3UnV0ci9<33_+`;BQdPqbCd5nI8Y7yXU6pe)l=p3$owXF&gvq`Yo}7HCfM2 zROu(Kz6X-%fV0jG!k#rV0A21}SI;Vj%r$1>xTyk5A5$pNj76FW{i&#o`Lwk1g9Hnn zBsU}59_^d|*DX#27=Xff*Tp9g%+AMr>>rl8=CT_EWYAeXTaEqe`JhycOVga?zU+JA zWTMkB!{=sC{#X;}A1Vlc3#hc^{JLjUEH3v?ISMpZY3g1^zI?)VyuWcs&+zD@0#~fv z4QT**>jq*@^#~s6#tRq5fF|y-jeo7*b>AJ^M>k$ZUe*P`utu6@XBS3vHWLg&a6l!U zW=db>RGrQ;kye^}Nvg40_=1}%=}?^PyGPbfvVmFDvbkU-c@y7vG?N44nBj;&`c>d-OVMG{8uR)xz8<1^PM4H+xh1D{1I=eGSm3 zZyIAw{o0zdf#IK8e5PsO)Sy-O0h(~8H@zmfU$HvKPoH>d0M#(pr)O0Y95*rK~9A|law3GsE-N}X0XNf4n z$wKK6)~IPjn-wznWus}lc^nEDIl4It9fr=o{hr(OuhB=6`#9GN&%4O4m`X_WbF*nv z_^iRrHAY{niR9#lnE4LZg^&uviKSgx8@TQ1eeh@%jL71rH|tp0@q{709OqF9t$!i_T1mt-0LwZyL5waQ$PPX1@5i!l*VXIL5qo)<%T4nE%{A9%@1%sc!LG0DB-~YIc zFJF~_f#8Kj4^$ZXtn5Z`C17_iYmdG)-h0F8LQKWEH|;E=UDXU~c8$jC`-cRL1t+=+ zw49U_6|r~!7ulodyL^SO_q>Au>yN^3`p{He_HmI{rn5pltXVh%e+tSWW=*0G9*AlJ zKrPxP@{Q38SPJVfzcIq>YiCiAxrnim3!l(3H}P3Jnw5BX6Z zE{9fFpBpo8a18{o`olV-)^XshSjBIxO2gMknMx(;z`zn7 zrb<@;tIE;rUPwS{B9;Vz_G9Fuv-p8Yp7Q;+eR@BEC5n+ROzM6D%1gGN3bAK&r-m{MNB`CPVM9%c~ z2Ap6m&G%VozwndFN6{zUoAN03EZ|)bi)FaDK_0OaJ2yd#^MMwVZIH}hZIo{r*h6B@ z0@(2vr*GPXrVHRzuFW)|+O+2vfcYy~>quLZvB^3L2oe!)tY+57ei@5zq?Z&B^r3us zv-BRsD99CK*Uh}HD`8C5zw{G?U>5=T#oM#P<(_Ox+HLmyEjBuq2S%$_?wSsNYbKXB zvcgZWJ}fU;*0TUOKLNR7a8BlV{!7xTVBw!wxi8%~6Q+d?-S;Gj*h-*-sKuPjviSa& zvIPdKW4X9uV-!{Z2juL9gN@gBfMFD0>DJP_wwbsw1(Do6e*+LULJJ`A11-HB`h^33 zxyLz1s;*Uk``rg24*ELhhA%AKU?yw?V|D%D#(D7hqlGvimwEo|0bThq=^N8B-%Q`Y+lW?l&rc|SAnSwK*&hXI?M9}FbUypN=hp*qI1*t?nR>4voH`BxpF@S zDg5QY?K+w*v-#UZC?nKa>)Kq*>v{y#0vPbt^ic98EP3<5*{3yN`$Cc!*0Z2qBO*iw zr$sKH8kSAj!ilQJYQCYe;>|Bw`;Es&A5`RE@z>mquGGXSiZSrfrG4~+p`xTpZyvLoLbK@wby) zYd0zg#>Jf+RvEJw)DwQ9M!q4Uze)>dLAR>JM-L>{Uoouv(;?XAG)Ky^)Y_wf(Rl46 zhZY!1i}t4qCwS?89Pp!ude-H#c31AWC$y2vA7&W5cHzzI=rBU}-7xaL0Fo~;GU zWKvuxuxJf$`_jL}rhj;lGJCPpT>M5smU)QnwZY3CCJ?(sr?=UmRakK*d(UtE@$auKpRCk4qPmSC3Z<|$T45|5zx{hhdTU6@ZO=tx4zaf*dpW5?1pY05bFEfZ0y{xpfi{k%QCx z2R$*+sZ>O3fGt`BlcAoo}b@#%v zgNRGMug)<J#NpDVr6GvWg z;3u}AG>wt^@mC_A#&Aa6<7Kp5u#oK;RK}989e%JIml<1+UQYh@XfCK@Uxn8{!n>!p zfb^;y5j!-%PtRJ@OwWq`T7QXgEd6kTVhph44}d-i0>Y9Matf1P!0(yW95Z{yHV$+8 z72fF28B^YVG%m3@2j*To7JiFkR{)LUSUrJsQs5eNPsN>JAlC><`JvTx(EUh|VqkUqL71H|;9BcDL7*UP$Ff%z7I;tx-b z!1bc155yvKa5k5H)fs!Phj!zcXFV5UXHS51UeAk8mj3AaLv!NVXoFu^U6%oJIgw{W z2QK`W7|>ddu9;&W&f3Bon2H(zfCOjB9GJsa^$q=Ua1XBgZb5%i+xy_1_xqq%5}wYr zNG_J?OLWW759nEG;VCTPiAQtxeV171j-MFoQDnzXT)AYl-9p9VUF&yh@)6##T6bM? z&T;Vg4>0VFXC}q>JX3qsz3ll^kN?xNLNgdd%>T@RqdZ=kOcPw!n77 z4O=O@b>}-<0POoM<%7y-z}jg$_-smC&V|@!tNg=#V9dud>Wkm0Lh?NMgIGLlv~2ye z&_uO;WDou#uCwhpu!!~dK)RWE##>3WCSPEFP=(9u2Fi`lw3*x2)Sh=Cx$(LQeHhKL z%NKXmKIYj7ff_eRT!|&W%-YM+00^m+iP}aS`hZU##C^K|1IZ^Ikc&_lkQ*-0r{PBz z!F+^2SjYiu9R{^My{}Z|c8&?zS6cYBqb!ne9N-eo8*HgR*^@ncl1I80w)VwyYpx9% z0C8{G$gX#5np@jDW7J#)vPXAd5_KTo@DUdNL~)-%;!GAA(r_Ft)-&Il$dOthJq#EnhXx!S82Gl$f<$Po-uBWoc)u9TDJ;60%qEluExC&+~EfSE7y!T;gA zKLqs|$7L5jZF}lD=SM#WsW%}`ej$XDPb69I^>=`Biog5atcou7odH*Msip)}_U3vSnI$8v#_p#oih-lj{FUDegC8T(O6?13w27Eqb?4SJ^UJ!%DVSM%e0LAJo?Z4 z_y-rn#)Quw;vV-~f+`aCh>oqcOhdoarvKD40g@Yo?R9=F042Ox|JwhCXnk<$%|lcD z4}`4{ok)O}nH^b|1-Oe`-QDU{xKc(TePj7VxKto948*(87GPsJ?boHWx7K)dWr054 z6k{ys$T4zcs1dGpmmHEUOIV+H`2OwAdvEWc$DMfh<1MW0xtp0;@57+Ir!;bV#yg8HX1>;0OuS+ zy5q0d6X_UcV}j#7OU`!=_XqM&*+FRf7MSSotQk0a6&w4KjDShPZsA`Yv8NYpU-=C) zDJ662pZuMv*~YMwhKeQK^-pc`K&B}%pcj58_FNQ)y~w@VBmi;GP2Pia`yJpEVu|R1 z3O{hWVYE ze5_V4hk0wR@DO;@)#<0OMkl}kv1Wr;=D1N7W80dD#Mle~SY{=h~3I z1fByoe{m^ojIJ5OsI}H3s227M2$zfsZo-(oBYX4z4A0!QMEF}3!%VWY-o&sRCvrc7` zKbXPZ>n||)O%6T$NT__L;njXbRWN(Srhkjg%hC^y-Wo6cNo=oIO~-x%-9LfVdL*ND zTO;~KVcwy;^si*KraJP;0n)Pp@MytQGnn2wv{p=w#abyz&n8h=G!FP3XLF()Qx|Je z04dxO%-u6WdcE2^cJ&XzOV057UR$tmNWGpwN)U)Px@G>k!#_i@EA!W60Q{Bc{FMxE(`m79TqDHL>_c z`WSN-MuER*5>T;geq$5helH2uzwCQC&L6x3{!(J4=rDdII#lb)k7HF3xb%%agO=o# z2BkfGPkPhw!=pthSoyKebnWD{f8i;A>O4p;R82Le{}49)EZn8eJ%3;dMVpS-(t#7- z?H=aCjK-PwxBJ}bg-cZWpT4hso&8QF^PI?1d<7?}cynTXels~u_W>j%o7qIyiWb{vQaxOlbOG zmWt{Z3Xm3lbl_m3?`w_D!`%%~M0yt7UUyE{0unv%GXb}4ps_EDSHU6TfIp)2GB}A2 zhJl@L-q!E*9PW`8>sgmElOY?w_>C^qDz1Cpd-tRiw5=aMR+fXYE|%zrtYe3Imu3OhMW`@!xeN zukw|i?}2C$IP(moq^`LBk#WIj|tMUqPdQB+#QypJldBo@v~z zyGP*9J||@O>xcM>A_!CLvld?KH~N}*SS zKub7mb4UestbCKHt1_Y7sSyPRu&tTwlR25aXtfUYy=Q(! zInOZ3*hj|Jhg~}#el*A;jo6!i+*qNqkkPG%vv1LFfH=p!Qn8-)qTC|uhkL{wPxQQ1Ds~Z|3K1L0+gTkL%x~P}Le~kk))Ik_Nb3*(k|TyuQo`M%QPc>{BK;di zWzY3ltRcWO`Kd-$SCt1ZJmGjWiGAE~|%vL)3+ z*;#;D#^A9Hb_I;7?9$8hOo5YN6JZHj($>>fYpk>3Ll4JK^al~+dNJ6`;b?b%r)qg(}0=a3F0 zrW?l2c$s?*+8C4Gv0(f0W)h;JGBay5QDXCg-Dp|#bP~;LByv7SR0oqo}bRDqH6xrXn1|I{aF&Az$y7#vYU2 z9K*W&jxAbl7^hb3v0uWv>XK`=bM(ti0yn3*z}GOpSH(Zn%3oxzNo&hUnq0M55XOUv z+6&(tb?P;lZTj~L(m&9_h=lIRkOo50`$1&H(qAMKUryU{uaM@C%CFQ+A`E}9$|zb_ zUJVGqQr-@t^AXP*NY)RwcSYR%OZi}St{&a6D4CR6 zb`}8*hEYz-pGa~w%pbhaAM&R%iYKomHk~23w2$b!Kk+gBIPU|)ce(|Cm%uNo*Z|-x z1-dW7bN^hwStglmRPO5KN^0u|)C~$I%~C+)8#4_gaE$4vH8VeJstZ1|@d*iZ@wX1l zm9N;@Kk_sXOYgPh1FJE$=ox<;#44a;YIgPxAgWxaVkYiL7oK9^E`Bf**Y@I{juARE zT4Dat;A8K;u^=m~E!T25H<&x-x zWR8IDS@QcV_5iV3Wnr6HYYz)7hxtnjbmFnT3hin356K{}u>gbNtJd)swC(LDuQi)X zJ7t>p94Z}2YzPjP(`;~X#|@_e2yROYw!u*H$(aI!#I^Y%uw(EX!%tjBiqX0RH-GCH z+UU~wqgchEG&jNNh_t~rH(REz?B zHZ}N_+{hu0rsNQxyadIECig`o&TdZYsTc&v?!Re88TuG{Yh}KLpZ?e1!m(zjkO-va zYc9P(FU0pb+ZFn=x{QxKaOZaG-5=OKa?dTs@AGi&N=jc#k24C$Yr(g076l&9rT6T z`fdwX?kAG;!LYe`Od@05SEDZqGY6bXtT2`g_aMy1buEg|-$YkH=UALm9OuL|60r$1 zkrIZ`iXT&vn^bETxQL6;WVZP??Ag9t$#T9d7L*+1X1aAE@5}g<_ zhFV)k0~F*qxw{V|)_6F;%>x5|uN6()CFfth{e>_2*6pRwd-?XPD?iun!wY|I_g8QF z#qFox{A2zb<>M!*j7cR)Hl#A6gAb$VxTU#YMW0tEdVp0fArpGj&A4F>;}yAfoe?I z=jWdUxgXUFFnJrJd9Agqvbknvo8ZUBPT7Tp3%jpbcvnerDkb$N9~$x-ISENNe9#8J z{T9>w$@dcoQI@9s(uO>P2NE=#hGFACx6@0lET;VdPodb_-vIp}54s!6zt95f*&Kt= zOxh`*6q+HS&Q_Aw%@2hfYja(c{ml=C&;k(L!QWaH!%)nzKZ*4W{V4BFOAxHLi=(gH znC=Za+3Sse(PyN9?nOg%qy^Cc4Pz{G*L-*uMyx)O2NUOjkPDlvr{+T4HTo ztJXb}X7`-580L=VU9faJcxd&Wd3hKf}I=|}likE!n_MiXB zzvjQ8-u$PUpLykTw^v^I*SA0a#oxO9!0_zZ`NK=pX(lE zsf8_P&JE}?ISck(v*zxFQ*i;L*`t#DuTU9I^nxJAcS7Si+n4p8bG~zQA*J{yA-08| z042HmUZ!>+D&2bNMQrVFJqP!EVYOstix>qb-+1y}d+f`Y=u21`;uWZSTo&suo^ckt zT5yubl}e+T^mt^WoDx0HqPS*yH3c9N$YB}m#vPA;$H+=dqAR5U2fMrukgmc#-Q%CQ z_Ls@7b}|)tNb1OV?$Oistli_OkU?WEyGM=4XR<^du-Tm(v=B53W1J=?D1-aq_{>C7`vE6fF!fRk=jSJ4XaQpsm`aiZ;ebwJS zT=OjFW$cMrQfUEf3#hwH6!;=a9LY_#YahEh8E`#5S8hF@~MotsUJ%XfVJ^n zm%ca1A9q04xumcpXUC7xc*=B7jOxh!`4N~we#kWyKZcQSi0JPE%vsRX#7KPffM@*` z!@BvwHm5m;ToBAJ%MOnxUA_c)%;2}sTieQy9`~gRXnyP+Kl#Jd^{`92bX9N1Y}o36 zf_z@Rb^j~7c=~L6#v5JE6ac*mpY=N+zVr|9?qA}fABCi6Uf0epev!GysoyE$3ubo3 zuXsaEIJr|;o=%cyo}(6x93Jo z(p7p1cb~^fa*FjQA9;BD=zSkaJy0(6&F@}oV@_t?iXUmt+0#!Z1*TU8T%yTmFwwK# zPx`FFMgOyRELl#F%>YaO0O$+?!jcqnqA8C>^rt5D^qAQ*wsDwqUg5PSVN#lV(IB4T z=I?=!#7dW1v8#QSw4#0UhQJ4rJbXAb*UlOu@e;Y#pPDle%$k4O|{h{||9+Yx@2{uun7w4Yn`(4}+`oMt><_DQ|#K+Voq zC?*E^$!vz$|Erh(_uCg<|Haw-0j|%#_C?$G{^`G8LW{rfxCci)vULmkljYur%6Y#} z{n6dIDGXppQW>#C^#giVT6hXec;ZKuY$_xLyeAwcf~MwAD*>9t z&5~tl8u^Jff)i?G&cKqh=kJ_}6*BD924B~&&^30JX5O2mYlBPc6Y1bOCix`$&HW?3GKgcX zcSBEe?9PHjg8)A82)p|Q|EWe|;&B>SOTIN(9(a@g5cjMmKqzk>(a}3Wr{HMm8yRE5 znGs}BGC^{v8y_RSP9)D}U0BQ*tkakE*w>;{ohh3+B-TZaV2~PF3;A(T6V`+!ccGvN+`QJ9#`vU-=&|pVlWH@Bye*)-K=c;^-LM zMUa@eHSk9bJ6i8*pf}Iclm#ny~W`m;N35J0Rml z6azVVs9RV?Uv?@baPoU!xelK04jb_pqEA5pqaVFmGBEAZ+c6?$KKCzgJAU$=Gavup znm3CC5G3w#zqQYigbZSPdINxup8xmrBGi;%9ujZ*@`-=GL$&-( z4=bF+W*t8&_z>!1PMsB@&8PDtyv|B-S!zA7t(C_aql1{k-@Ra-O?R*_(ck+PHgpNUad2C&T{@Tjar;sF*4?H83N91@T2Jzqd6?upe{g>J(HxU zgM32uP4pZj(0!m3F<8kZvWv?SW66$CZz_&>mOUDGHMY$KI=F8rD=~iOHy1e*w}rHR z8kJ92%*HPb&BeB5VS_0*J=UtDW-vT3>+RT|#L8=8DYt4eM~Mb;@M;tO*!qU!FbN<0 zYt-)#xgi-J)^R+UqC?TCUW2mGw0M5PGVVUG;z+S3LLY27l_~K{adbnybW8pnw|q z`AFMxuj1&4KNxkhYaxe&1rK*SmAn7Xws9i{+zV*s z%KglG#`PN0x+XS1h2`YRwghC=Fi!T1wd%l9L`B$SO_ z^N=D5m9jZPl#{*3Ifo=Ggg8bVvfi@ycI>^^vG-m_#yQUZ`FwA;-{0`OyxXS=A>C&dM9!!com)YWb4Y#^e?GPK9kgQA2r?c5S#DA@7a5v4CMQ? zUOc4{+IThXh(H&=y_7Koiq7dP)xUu$3;v+_wdHMjIVm+4Dn}C}M#78?A8iqKgpNtK ze94cyo$GSOopjB8|LX3+gm3$Mi#6qNm9=*t1iAz74U+KRva!KlZ4792AZ>J-(IxmF-S3yEfxXT}cOM=by@1?3FTos2tw)O^M^=0zQ`X z#>$Fi;5FM;JpmQCwv!Q?+MszRqqP~&r*o4X(P;WSmYA!x{ph{uFiZDAR6ud(kuLai z$(Dhp5jm@O?yKf>=Sj&qO5zb6D%3R2&m6{9lsjdFKdSXD@SA*2S;=u=6p}~|a+*(s zZ*1yR1lW`sTk#%j0h&)B30BW3tB}1$r;OsS&gC`v%Tgv9ZV@*|s#ZLgN3O#& zXd4{=URmG~`4pf^js%rxq?zHIy0$x({IBHeoK6;OQdlj}l5U_6va9v6f4`dLrJ&~XrO%m#D zD4Fhf5YNX>Qk2Xu<`ljj@<^%Kjztm$j1F0zje=kl?=B*DV#``e%2$$YueCmX6v2;u zKbT)XO@s2Qud4Ech7~M|9^69_HaRWaxX!*EO0Z)tSqL&Q;f&iqp+K&Wk5-G9+aJ4P z&IIO}7v)`1z&D}AoSnlO*um0=+czz)VB$3TQsnY%E(3Bpe)s;zWLWjm}MW3#`*L;y+v1l|oQxZ9sa%C+)MKntWi#I>0h16TBq&>2g~C zx@Y)F4I%dEecXXEbk)~jq4nLDO`p=ky#cBhR|=A6y2eG^4qX{ud@~}PxBl=OL2iU+ z>Gs3RUP*rbSK@N)c~k%Xt%|olO7%MDsIRu+)k|G&s6@R8n-j9#adN-&&vc!<4V;6a`+$C@;*IDmXKh6$*=+LzrFnetO z@$FZg<4DJ6pKGChDB=r&!V}qgmRU17sZYJETN=eHbd^t)+{fq9Ravku+O*iSsI=l) z|FMg+((0M-Gg|;ZR{FNyhUBy%PgBG7;69~bh6Zd~z^~&J?#YX6*g3q$m|TD?w+tlb zPTSmt|o+-d3oCW({aKRwB390{qIre_`Px+#;<>r@XMuF}n>FaT%_IW!!# zKeThK^hv;J@BwnhIyICmp>HET=C7xHXWG8g`4i0hJl#m zHulrc!ub7OQ^Z>I3Z-KmtZ=_`_8EUrD;48{#i2F|N1ZWRbFh1sZ#t7&rRs&KXKV#5 zs>@`rbcZlF?-h=z7Gzg@;WZA&NHlyuUbrq_`~10xNzGsCnW2YXpyD2ta{AO_qTArd_cCB8>CL9i&ICCoJRZz|%61t%nK+HgwpY~J9N z<4a%zO=ey&K|1UG&;L|H0;CASpl!O+c~(*HXa^=Pf0w~QU-iC)YZ-8p1-|F}W7MTh z6D z6qx@-GE;LKUs_;mZ>gt)V#A0Wb(_6q2a&J&xDkD2MNKAVO_*{|U9kk$@j^P%K~dX23Nkn2z$X*VC%@#Mo_Bh+j|4rbdo7)Z-{+C&OC)ht-N}kRxIG$lWx>_o`wMOAkxD^}wp|Zqty<^bz$3>%X z+fi>mFU4mR-DhAO++lq+&8hUT6~#2vnM3?r0QR_5o%>|3_A#jS)%@*;H0(px`lJ=3 zGNJwQ`&1*yJuhml?1?6sRht%;aC|O$BdS!U` zt=_Rkq1B9e_XUO(8hP&nhy!z~kD!B__gs%V7h9c1n%SF--r*SIORWwhusrGQIOS&v@$(dr)b6qb-I=FMVU|htfSHz+IpBw>lAlV8 z>i$(RZ^EO6@uk1Vu>JX@#-d;7nyUPO(EzsA?v@x1dX|8#F^Wb;U=us_=*w|CZEA0| z?KgSlPhmg)T|A!eWJWUmwOh|bel1Zwxr77w`vCQ+8IuXdb-nhPJ{+v1bPd|=QDS&9 zRp?(&eqc~AoaG6F9^i34?PN)2{HPpP!$b|Wh5jk+;3c(lI6>8-h4;`};==(ts81zfT|K4uIgMQ9c>R=s!- zPqN=-A42HCP$ zz&Y0VQsU8RnBwRO2cGYi+LGTuS}lq=&Enz`h~Cczsoih9JzWw3+1n?$r5`3XIvq5Q zKSuiBG_d84*GinV*oqZ%5Emv*yo)oFXbflm*0&}&?T|&6B-?dUu4dSKn>c6}q(Wvb z#kRRrwETLAVlv(ZVi-0$&VfH8oW0aScDB{NBWdMbhMt$6?bU@==v$diFu7a6J*O9* zpBub21!}D_LsN~3;8arXdThrsyW+sPp?Sp7Y}ZuuuPY91kKFa%>WY***r@YqZ`hyf zS6)$`Mwmn*g#kyb6ZC3L{gx+PwneR5!;6c&iV6;JT92e4KA79qXjH7iDj#X!o4;bh z8--Nb1ZWlXOT}%VhmB;e!vkps?`|7j$@Gxya<={$4IDGTKqR@>vbOwjol{?*?3r}z z`}U45A&y_Saf{G;huhsN=hgKp=!>zrwv;w76YiT2^fckL%qsM}2YODQx>orFsg7i( zK>QzF0Pq-CeKV16xhdUax_&n-Endl+74erW!tyt(TY%T=3RA#VvsaZ5Y~O}2qAWO<&rgyCq0I1 zlX@BhUjo|$wwnP~b>aFh=?)BjY|0!+ThCcl@-8z8jHAs0DC;hnG>F=m8cWJGM=uEv z*5p#pIh?6B=g>(0yC#y62&0x{#YLz5GGetH`(oXv4*L@z_X4!hPvXisuG~u7@%Fa6 zLi7GgQO>p0XF6g%DVD`@R(FDUo31$rKlwExU&Zt0cMUsYEc8M^$0Ke+o@p;lFz|tK zun6p=uY_%!l`e!QhV3UgMxF5c(rfHI<`hDKvGJqkEo9d}>sjqTale;Zq2dmbY211|hU3uu zvKDVYd_~Ur{A=@OGUvX%&Z-6et}?-W|qLI!kO zz>OD8x<%iuOemvC9^vWILoW!1c6gx55e(byuwYW{mLm>v`ME6YQuJJ1qL14lt6J(u z{;R0+t_%`R*6*Z|>Mb71h5McfEhzn;4v@?hf$&A~;f$}jAhFYjaoPa7;CXGq@Muo+ z3q!N%L*Wj=;N39SWB8+Jw~4nw;H*#0BW6jy)Fl_aFG>CMRsPpq%3iGS0j6b_Z}gnc zVh+^z-Sf(sEph#JbdKo*e*p#;=HQXxzia8mtvefOJCoL}gZE8OpUhi-P#=2p#D{9@ z^Q*0UM?B&yGwWaz&d8r3QYXr<$@M=@UYndBseCofbdx8w1HyRTn#GO2Z2(w9c6uc~ zSbFuz&MO)y!VOc^y8wdT4Z^phCe>_oVQLW@-9K4;^2*(7BpXWCh1{7Vy}`a;UojqY zrw0q_+x&+xvvi@`7P`CjF2rP(4@~7$#F!14-pE{Gxi!F4^6^nOapx}oU}_J}hrS_( zvFke6{bjH_5=|kc%^%`FRsb2-Bcn=|TGn%6V8TB%9yJdl>{0BA8uwogaxo>Qdm%J2 zdv@tMYZIV*D2#HVZmMH`fS@Kq31_=#QL~zLIIzCl^V*l581sHPt)D5>%q3kX=4K*O zK6`k=P4OPH$N4)fYXLcAr$&7kcssc*>f!fV$wIALMKqP*;s$rhNceyH*QXhE)it>T z>QTA;3AXTAopg3uREAYz_On`x3&s4Uk+UM=i-gLN@p8`*hDfQ_cp#NhNKo1RP~I~S zq5hC%=|V+&Z<|xcrbxHsIJUrK`wvv&QesN=)}p3n!+c*W_5< zvWH^yb@;!>A!pi#8WkJFpATmBQ1U4bUmTPQk3Npzi0q9{Nk;=}Z! zyh0n>7N;Q`Qo6y=Fi~cO=lK6RW+NZeF5mk@{i~U3bgONfk)3n1)7y0ua8xK#4BXrA zx)uFO-K)B9hG+UJwL#BATGQu>#`pVXyq|f$`{E_&tu9-Mz?t2TPhDpD`*+Okz4d(- zfw#AvmbNOG7$}qXh0#F~www7|k9$tbmtVlHBIBGDs2*s5S$wOK|1*)ArVpHQSTiC= zdiu&JwbxJDkhH|xsi;2q+8a_gvWKQytF;3}cXZ6O70UCmiAon7=FcJqYXbIXWdbbr zIj>Gnf>c5p#@rpjVo5`}PaEWkGQ-#=y!ro;k*@ZaM?Tx8t|c zeU$q#fO0Pqulcq`u{R?-Acd{VmXv!{5bc)H>jT`tUcaw26sHnj-M$JgtMnStUb~ip zlcVb)ehzZ`A(X+s>wcXa`Ah?^9r%&8#|-_e=H(HWN6;#NSwh&-JBfI^_{9fO^WKE(z5j&!SVoF|?Z3?@SZ;V_MDK}9%WKcCZwE)H%QKeZZD6VJ zna-iW7ux?W_g(O@&=t`uBU~Yhcqv-Xg_`D#?$R0OK>R5$!50pL4VcTTakUlhB>k;_ zGe21#Mm)$CSBw>0w)m~<-}EFXgU9ypu%YIuTZ+Mr{hv}qo2h_#UURr6i80>cKiVuz z=*QnLtvYz?kB!3yBVKoZUHos*_7{0UeZCtZN8(VWZrX*o%#_sa?9R)=Es_5ii0?vN zTFLkTs2?E=Nb#vf1k4-k#fC%P!fu~q4w=!=L(F#{Qjt~nM*HEczbUp-KLPf5P|l^B7=j_Fk%!%E>I>;u`7a|{i#5R2M|?M%W5!7IKQ@P; z@=8k~pAllPu=rZ0lUCd+(D7`wK#iGelYM~>z?xBxph_N* z>%#!V!?YKt8Au>*wBg737FOe>^~h&lw_@t53#V%CyAgfS0>Gls>|BrV&lWSl^kpr2 z$VNbL#o9IM>6xmRQ?iYdN^@htuZ^6VE$4?f7XD5CmU4~m3$@xYqpoau5EgXj(4nye z3)Ka4m)J92GZnbBVBQwUk@*wJIMQ00VJf3F+&g?^;K(gI%iwS~nSZ#ah5%@BAxx!N z8Q&;7`#|}#yp+qymq%1aH#-s98jzI~UBMBy5% zYRD-(kbn`n99J-uCR|TVnp*sFi8fGhTL|J6^c(#!C(>Jjw|P3Zj*&SDpN}WJ3dkFY z<&a~Ib`Fp#7r!ffQmh?IafZ=!@>5O!+qS@;>N7;vQi&s1`m-}s_{R|GEtbG`o`bdw z#&;thC>x{6L`%vKrtT`?kU}R>##*3@Cm-%CKd@Oii4LtOkG*%4RqGXyx4YQ3SFL9j zmfh6O8the638T+ty~7EOavgFLf;j%wekaM`aBDjT$WOXu6#eFYfHVJ3+0kh*k2jog z{8`H>ua&pWP%6oWxg?nH^)tOhp0o=N&N{D)(WfhQej^Foa@&<#szRD|C85joW-Bsd zVL2CPA20J?2_~e!O{;)b0SC>-GovYR35Qa|f6^wWVV2I?wp>)r0`fB!^d*CI7SiAE zM2_fnFNvlmZ-LL4-^Xhq)rXxZvZhpjw3+>k@T8~FAA<{ zZ*#>iBN$CLGgL}vo@Qe};{fPcf9G6C7c$z8`a7P0CZ_>nwYp~a-@R`2v9rmWAP9Ue zpmVd0t#vdZ#GgHr-6IAF=#M{tYCllT#`-9IbYF$Zs3_`?zUm(H&t4fps_vgDW`+Sa z9wz$nBX3fp3NGSMens4Ty3oRmn$A%^5AK?b&(+=EM^*D&Bauu;Jbj-|_z3PoC@W|O zt3{$OT9YG2Rvun$vbzF6hQ7oOxHwH-7Eo#GzW{etlh3mEuLqs7*=g@&cb)I9so*D= zscIA=cD=B<)t&M<4M(0&p+$-g9K82#ToekEBp%)ye4rfOJbmx_BGVPw#}GdbYOW5Q zuYRkOJZWiXlu zdbJS(&?TqTk|z$U(Y`~NAo3+F>@Xu!H{0odI!G?U(X>Iw%=4h&2r)BGl~21Ux;}^^ z_aPKENB^?+3GLJFV4^)<#KMD=)b;;kS-}g!zM)Fn&2jsR#<&tHlc%Bv8>aG<1D!S# z2e!U6>|g1}|Gmup6e6*i_UUB1g-LkxW&w3_ejl1YshFfD5nO!(&8JzTg)_mpwAqSW zY$SGfD!GGfZvqI|XO~A5oRuPOdF#)&#D3YouS)A@=7zw9UzL#0S$3J!Sq*H2z7ht7=EdE?hF9G&QfIOZ!V-ZHY`uY{jhT3OzwshcO^~n%GhYm=Kx5$5^ zq1WzS(x|r;?{{(V!UBXcSsMx#y26(8D9R0V(|0cJ@nu|9Obqu@bb+D#4u)A$zg}A{ z!s*n13y7b|o<~Bk5?_{2*-J-bSzz;+JZ#HdS-lG2@!l@}JE5>;&&8#bTmq{k4Z%sZ ze$+irqq`wAKBR=n{|@QOJw{a}d)@fkrdlC(ukjg0eDo&gUjbVdp%WFU`gaw@6l|_* z(|-+?7{271ANiEX_*|pU38@@HsKj3OQ+Z zqF-1fxTZRRfPsU;FL0{32Yb&4lhZ88-8pi93d zhksP0i;R7)<1r!d`>t7ed7Vk~10BKJw>NE(mHB(-F=-pYB_=7 z=_?mB5h3Lf(NcL_G1{%(7&lY*w0XgA42s)*x&B^pJD0_WSpf{=1$HFu89!a*QT8eQhn25vWp~ zh~7(PXfJr9H|XNq1C{_5IXIDKbdl(jOnSA8lXoO7ysGM{$Shzf(Z%K8`(f0sgX9Qw z%|jgRPf_+nG{3C1KPrZ}AN>rN3(FC6h|UJS&x=_SOrN0o7@t1|_2gT>m`e-QCHgHd z@@H;usaf&0Zk~hz*GF~|Hic8Rmc&_L$JyeroeAjSy1x-q>Wde|REL3QbP;s5!S1~D;ZtZYgtTG| z^X=p!=T&2q)e+A>s5g&-t?(bR;lUGngo zO#`h}>S-nB1Z7CHnBx>Foe1f?T#(=M>(G*zp7LvV5;M-M=H29R^bVsFS!3+f-9pPU zj3Dg&f+&5aQrj0qx6+W%C~P)$ME3$m=Ji9DV**yaKjY--&to1tuF_vOd@^aQuF!+-#B3CLXz)2h2^3;iGUuSabdgy$zvq%+PG z;SRuE#e71|J{*Zdi33k75Xpp3>u(*n|31D?KUjU)yIUceYH}!O5pMi~{&mu{L6c!k z(&Y6LR=b)ovD;1!T(5GetUCgPAjt1MQkyHW;?$6pF# zH>l938JJvu`x#K(adC-N ztL<$SAh56FawarClg~s;iYWbXV)XzGno@oPIl%-|CY@&BKf<@wNFwKrvdBH-Px?o1 z(+mB;JMo$eIn>_$$MTRBcYIe8Y-&tHsNTtC?qON@jZLi z$@P{AFCBO$qY2S;UWxhCk5x9H<5hD0M~8eY-C&DjJi>Q+ecU2_#YfEL!dbEPnQQYO zohznMY)$0+F6Y3efYiEKXUZc)GMIGDr=isDvSEyrwcOBj#w^hk2#2zgPvnu4u%Tmi z{(s*TfKQEEn$HIf0x_GhkZDA-%Jw5rk+Cp2GsB`y+$%?S5Mq zmtn4L<$Iiee9r0lRb5^6tzUoE^EdyYaQm8K^yZ@`*cKDHSqnOInhn6$4HyIWc_fIc z1WqJ*n>dhllWKB@ZPqPtUR9Xutz*iRC^7Mxlx?h!i)ENioBqoy1>%LCL#pFX8Y5&6 zVmzDFl^onQ;1&->nBEHa?ta9e4(jgEhD!>wi0GQINf{F=)zpGJU2UkF)G`a0t}3yb zKCwSp@Zz0&6nj;1e(q|*&ecjgRcVzUUFg>G)MfQhkj#vRkkKNR`^S-B@ zZ&=rU5NnUk(^fls123H#(KrGJuGB8UaKb=r@hUeijq1Mi_wTt{E_q)(^$Lu>AhW3r z+a2eUQmvGg>vZ}pU5g_2)3S#Es(m+zEjSvfCkZytH5=KjqNuhXSW(!Al9@`*1ps%Q zv1s0-fQ2nt>ELaGmMhziu)SN6GSv}XU8i_;eh_t8pIh_;T^jA}b*t0$o@50>sv6_n z5Ih!n0oa${fE}E?COutC0GxVK-eyawJ%z`@F4)CECxfN+#IX@#B|J=Z=O*;86q3F} zz1VzOPgb8>YQ*+^urWn3+2hlH<-ZUqgWS$ZUl-G4${bb~Xsi#x?6l8XNHc(ZycOk)g{u33#h zMq}#j>JCv4-bsDZ8-4+e#}_82X6G39{&eDTL;x^Ne&`jEShZ{llf9p`nG;_! z<=*BpAoCuL>fK04KsAj!q4Y1b%;bBRKO4ppDjr4qG*$}y7PPJciB;5>PaVBkDwN3_pZw_WE7zobzc_H-PGJeUQ1iqM3aesI z=rjIJ>ETs}0NBj1-Gp91ql%RTGi41$ItZn#e9WOyJ9$5C^zsKl9=!B4K{|Fe2-h$I zo6Bjf0=sH5{8=&}tC5AB^-M?M=^72*9wRBfi?Aid>B67FWGX7B#Z&50S+h4?0@Y1L zT<41v_H!{&aB}Ol)aXCs;olu#Vq82wgPqrh{T5$zKD;n1i_YO`HQ90ky%l48dP)1~ z%mW3kP|ZIy<~()6tr|;Wew1{SNy*o&Cj}8Kmrlt(mgIgoMVr2xV{=Z$WTX|R4LtQZ zS3Sng+H0|Y0z*aDd_JZWqK>+D%g*u#1eZ1HsnKB-;bcsenbfA_l6!P6uWwp z#qU--(1u3L!2fJQ0(AZvuIVf$J6B%BjjP`*W@K6ikPsQCPzAD`+co0HRL((<88Gwr5d+7`-_qz zuiw`FW?%NT+t<+X3J|&zTIkjGw*gGp?@?N9CRdyd*_xelG4wN17BcZtOt#dD3MWQH zMy>m{Y;d8&0e~L!9J7*bwA*@xF9#RH&sWC0$~k5Tlfq8N`1s{%mqsXQdvE><%P`)mo?TXyG^zbm$>o?W70#|?QIr5*;JKmYz) zJH7K3;gz|tMyA%!q0eNRiD+kPcGaiev2a#8)&Z`;+z4s#nmZtF)dDZ?>;DJx9wBKY zS7uzz-?Y!4{>)%VG3OTW&#h4&ZRxo}$_uZZUw8Z&*64U)=gbo0!tpL+Bf3^O=tbaO zyU3UvPzQ+>0eVsv@}Utek8!8)M*j1v0p+tFfBg;QsLKDZ9w15GTgenNM5YK{Y9i!|AS?}z|I>?De~V{ zV$h~kfiWx285jC754!)Xpm4gasbVWaM=O%l-UAE%{jiW+*Gt$_s)3u#*YIu@(fKD| z^51`1)aElZyE43g9CF^!2=c>a!2?m7+93SMkAJ^V z8H^+vl6yCU@fdE2ffF)Ii#+pNHpq^xG+@KdgQjNjd{ECF)9%zTi!6|$oiO-z4`ki4IXGd&e-g1y~z;qN~Gcs7!1@H+KDL(+KHWwQ-0#_x90sTA8y zUexwr+ubbsrs){@)Kj6)-+J#;;@@E6hv>hFZCb?b!$KM%Z+fSuVt|{O05yHW-`goo zwbJ`gb$5oJex5OV7vc0P*Xx%zD-kDja05s?1Zme*+OpaO2L(=zD5(C|*3bOt^?D4{ z>E0W>+YTsTcuvs1=RuU@kJB5HngRLWc3#OlQ}IfbNBU&pSZ}@x@|L*HJXFK>vpn?{ zc?$NxOEq|?%uY_9V=q#AbI0fG!p>*#c3F8Z%6vvxe@Q*ARavl3^JbxCgN)tpMJ1ZJ zs|bZ+bN98cOd%6`?`dte%YWMVTz9+E#n^N3AUp$U2;w6S4p4G4n$NKkKKljrKD*Uf zYU2hpS{;IblTWbsC(Qnjj}6bA_@OK0?~Ncasmr#)$NQGE+n4uO4K4Yfkl8U2ucZA14dZ?#6#2R&I*6Ya5sS zWJIa6TV>-MnSejrB0nEQs<3tNNONDGfBN>H(5EX>W&s27&05=^#1$|_F2X8w`s5rg z-IYgs&%RT8?yMRQW&vEZ1C{06l$ZZLT;ci>VLOw1K5;Iu(Xzq~Vq0&vSoXz&&w2WQ zW%nXKes2>K($GcjeU2^H^XnZjlh)PJvl5Z}*GOG58>c*7UrQ~CWDQuqUmT3HbH!jR zl)W!dG|#$^bz>iJfN57zjTZdVEqBIe_&yX_&h2&OFcogYDq$V2)|}O%f-5`ApHiV^ zm1@h^`x}_4gnI7F8Y^sHjU=Dl-1b8B>5*qqE0A8;{D!d=p>t9~+*6~1 z!WU156i&Y_)4)#A2_T$2&e&E=G3?|+cG4Q}Zfwmm`3RCdb_2tdQmHCZFMW%`haAL$ z0XaF*ChE_&bkcIde7gAfWty?abl~ZEe`~s3e5Ku<^s82O| z4mmo&&#kt-TLFPnoMBGNtyH1dFQEHv@BM78h*QgCZ!0%56C}=-`L>rs7Yh=$_Yvrh zJL24#HOF^N^#D(F$OuBjzc`cANw18yE|39n&Vlj>*0?W*FUI?{ppU%r_P@lQ&b>4s zW%q9X?9Lzl%?`;7K=`3JEn0RG$fJ;cO0|6R&WRurzv$Q(CXOU;aqfHdk_!;NK}*X+ zzW8;u^OIeJi-mzx*g8FN06_=}v7cp9;4q^sIGhES4;vFojPH5=~Aa`GH;hav~+*g&pPcdV+Ktuh73cCaeX60cfcF0 zvpxsAR4o_MNMnN`k}e1Xxc|?pgcv_lOT1J(Qq1K$iV|s5`82CI^3~8|>M{(56tDA7 zX|L8<T#tlc9slbn8!{-oP=1>jIRVj}==VU(?jp2{&>ERF?Tg((bT?wc5$-)q2 zwz#EH@JmZ7Wc1`M=WXVT-Nr4vlNCJT1CV<9W2dWy>dy$!o=lv zcH4@4;XpC>FZc=Ep&TMt4@={&s#@|56(_84C$D#^p38~rZ!0eH)myY&Bv5`rVVg`> zS2Kh&$i&Lhi}DqwAeU;=MJa4K6h%k)(WnL*sq*ht6RJ`BVEh)TQ8Cb9i|UXO8j;Ex*fU34(+=s7u@@{Q{1WJiRy*WV z^%(!i?Q^qVQ^o~*eu1P2L;Hw&XRWbMRtBAanmP~ZHr>Y8K2PB9@2#%f4JxS)mGh3Y z{K^uz{w(s8NuBY-CeqzsTE~euD=uMl-Cxd$g3h9TONIJ*a{wQLx7tNOk3iEyxRt9* z-a=%0t20aTIR2LFdp4eK)|9{06}b|$8?bx(A#j?$jvi0`I&)j|BL%uD>Qu|H%CEa0 z6`T9$bXJ}W?pKEw%NmtwwX3l^Y{X|g|AHDE<`*kso=rW7W~M9Xeq%)+263#vE#N^{ z&TF07D3^jc%Q18QcvnrarO?vb|99lJ6~e6M6}?%7Wd}?8=$RU6TcxZQ*6W2}cc>+$ zEtf?CCvU($Us#jZ@!;>o&lY6oUQ7J5lhIg+Nvm4BF-RwBvma!h4OK^l(U0a+{+dD8K;sX`9z;w&Z^)Hd$McVmp%b+yesNy@CsMR za`w3PC4ZqYc{~*YBzN49UC~va_PD%kIC!BhI^2Jj(jS^*)2oLkd zlq$PHA19x^&$MI=!2{|8iGutvfkh}T0ZGn=8#ixm3a(8j@Ac4N*G8UB^IKdj!VUSs zfEst|F@bI`Kh)JAzOU)dRe1oap=E$%w3FUPZXqW~2)Mh_PC|Cx_e42>zS2_gNmx(y zN4{!X#3yVlJzZd3clr#iSF^wSad*v&18TAmvC9%V7GQ#`g^T2Bk?H(dXZ3cVyk6ar`zV{%M)ewH@e# z;dUGtvLYIv77L?mY!Uk0($H6eJCwPd+N;_*;+>1iO)i!vdNU78Vogn~`<-sJGs0&} z{DQ1PgPweo88oNcmQJi?kj#0amvPILE1?B)SD9Bds=oK$KPt)4e+}&^t29r6b2UF2 zgm->S`CH$sPace89Nq?!qAnW|BM^@B&6ZXt4WNdm<0&9eRgIR{=IeFLCl#u~%T`3M z(Qg>YzS#KHPf(lE(~$c9=%I1<{+4leO_KvmJ$P3G+64er5RzQjtpN0gaQ7W0SatFVgN=( z8CbZCNldg%ARWpH)RK=?w|E1A7grlB$S0f>VLWCA0w&fVaCt;gE%jj#Rx1nbgNv4@ zoNR)ciJ%>G7eu~8sj6Iza??pf9Pl1C9!2>oZ4_0_GYeLGY__8|k#RVcQv7o&tdswt z^XR#$+;<+QMPc@Myu5f}7f^mY(hcEpB`S-Kk5vTJa!C>*YCPgKv_2w#^{%--5rXus z)}faD?lpT&e{b-g%<@vi8<58}aCtfX1B)+I*7;u9k&4eS^rISb6W83miy3r>Ta<<` zeo8qHuTmz31M!=R2Bh|H3j>MBY%&o<_~4ekxg+ZzqcW|Hi_xRkcj@yv$p zrEh?A-;x21-5<_X7Ibu4Tp&)>UZ3d4-Us;eyDu_LM!E8c<2wHGzfwAE_hS>8R8H8& zYdsBEn~a0-_+tgiwITQUv*rba)Ys{~ zx^u(U$PuqO>L=96np}S;$a-ksnzSGSYX>DB7Yjir(p{zktFB&tT_bl}jy%XsRug-L z;{RLZ5yD7le5>-?%}-8x5Gs`ejo2n)y#$d3ZFn0yBv0w2)Is&*zMXZ_HQp?5(F7iE z(;py?kA3$Jk4n z7@v<9!TERJKIxY))A3L%zugi_RR{BDqJ5u>4YmO8$Bw8Q#VG%=ZuqcXbk~L(yl2M4 z9j3Y&+PZ7wQeMc-uKXIcc2{aF$fsB__o-K4CQ}~=kb~V2wRTghzn`WWB3EPQP9PKWXPrSvp0X>or= z96^%9PI)T5uT8D=tQ+FhAH3Q0llmizIWvY3{D0!q9b=6_tlS<;hACs7->IPWxy$re z^}hnI``>+@v9$#R){jh+?1$(#&GICUmN|l6o8@ms^Tg;ZPjuebug@{xUO>~g>OI=& za#HL0@IglB#f|Vp5s9AD3VBMT)y)fKap1`~`X5Ux;y=*qja*eVv-L3mwr*fsWiyKS zCkAv}C4p3Vhj_JgQX+rh)mj5B_T(q(_n@HNhz5g4$O0EVNRM3>;o|?Z07fVXaaqd@ z!n>44Sk+aFsbY8}P6zVfL=6JouRIegUNXi$f3}dnzuUHq{o^uW?0Q(L<8JiGn#Y+c z9#Iw0f{G{~3`VE=AwKRiQj52v`g)ya;K4UiP-Uw(7qh zy)-5yohoj0TlPVjPFikxjCJadk9VH1lx06n(&rA!tNfS@+bY`D_!j+&EpqRWv5kR?9^}FIdxvr|WB_@RlH25)hNeVV|Fw;W zJI?YcL!LHv$T}qci3%G59Otz?lO6xJ$&GCDybG%~ke_Wge|l{VArVsV(Lbi?3Vmuh z7jHm5o?W%PFvY9vVSlj1?xKY78z!x^#f(MEpBoflYkw1nCDD4LQ}lEd>}}_fLJSx< zTa${6+tDjH_zO?-BH55E3&Ypq`u?dWX`O+k9w;wm*u+5f{_#2N)lsMNTD7llgK)$F zATE?bIN@=@f26dgBNC52qGP959C(P$@h+n*%YH(fu+sH(bTh#n8?fv+6gYL#g&@je zbtwBT;}hN>yaBtzhou$lVG!$`;0FFHjch!j8wSCmSQG>tBeyx!8liPsMle(u>YMBg^S=M?txI`fN zhl#cxr^@DS(*;Gh4TIR3UUjdb9>3yRmH%1XxwxKOh54?4wR-_vy@~L}l^`*O*kmjE zWgw}7f-xKim6v45tnhFBwc1u+P>SMR-&Nj4KVwq<2@&P^*puPV))U;L zdcZCLIz0fcf;QF@`j91UwNvCN7h)#m+oACuM+h1mxEw`Z-GmY1DBYHyr-o)pvtAjH zZalZorSt(Jl~dls8p*>Mdjqvpltm;#h2#p&s8G><``x1?##4DONobg$*4>^O(D81V zY4;?=!})3Tm*!YDIRMO^MvM*{-vZp=Qjco@5@*JsBu+sD^I>HY?K_75N7!5TMfryR zx&uf_gM`$8bVx`y0uq9t@{ErDH%+>28MZ?jE{^?tx+0{MXuR?T7mn z+|TaMb$;vkn!P3yL=xxBQUF(~I%!M(8kp2ZdhlDXA;pYH09Yg62@*(qIT3}owD7%l#mZp~az z5jf7lyOarP81e`5s^t~Uz#93Sp8e|V+Tj-YkbutTez~d5Ik#{Ry>V-jK>Uwf+| zA*WODAIM@js$~d*Jc4}T-@9#1COPovfbo&ACes4D2~(8Dvrzy!BaGM z&-v>hqqkLVq$1jeH0!2+7?|trv;`d@TB3cIeid3REu7BIXxOTc9K$=tGUIM+D_t*D zJl)nBzb4o)(yy9Z+AI(BNw7)ES@yA)?YoXqfG-2P(*AelX(~oYVm@r_K{K}5DF{Zh zMV*eCcVhs5!dnwXx$h~Q1AYU0qrQLx{S{*MTk60*4TmX-CmFY3HOSZlF1$>DbkUx(wSh zq`J8fD{e{>$iM#D&>^)^%Ubhdo8S0l?yjW!$Fc$j4%XxiOjP{@^sd3muxHirB2f47 zaQjK;{8Q@j4BDW6NCZeJIBdmxSkwkS#CG)m(i0So!DsojY?^16_%7{ToBg}N4A1kV zSPZ?p#N-R7SVaaYz)VaNr>bh)h=kaF=a0{n_Nqcm24&se%tDos0`1d2c`5D;m`%d4 zKdl~GIIfP_|5n`&>w-V0j@wCk4Z+(aV0r2`E|Fc^IC6{s9#ucw(xiO`cPqKRBg2cv zFXP1gftZo`^sxQd4tk1*6@z>ywxO`Sr7Qb_leQLw-^poxlPqmHp>J{8lEG?0vi1|v z6Lfrh(`Eto`s6I;tuV5W-R3OGSv!%Fyl>Oik~Ob=2{sObcRwCe`y!g^$&Hsk4m-F? z=ON^FPZOX!Fbk65WH_CDr@nkcVP)*$ZlID z#aAbVHZpq7RZ#I)n?aZiImlSde;h4yCGDQ{Em1MU_uq^zl5|B6jCPwDAO5>9hWZYE z^PCNxTq3Dg=G&miEsmhV(_2cDv1~2xNNd2f*(`feUWtKwx(BVdC6M5 znqovrKvWeYyiHHIt`RNC%yB&xbO(+bUTW9F-T z+psfNAAKqJ2tp3P@^4`Ku)iI~;;1SuWEVK9qz764y@O=kYj|79Ur3ANkP)p$#Vvr&y5Q&L#i4MX$|xwDanvL8Ese z{OAlUO>@Xi7ctg*u{=XV(x<1JHTEbDhCdG%gSz&3W>nf?ki#RF@}%JVm!EG?Sg(BF zuuIgxF_|) z5eHcMdBl87Vly)OS&&>O3f(Sp7p1ax+32h**Y0l+%54|}EJTwT|ZT`R;vx%P8Fp!f6#9V&P`g~=s~s^`_l z+O#80ZeiSZ<~7G4-DEOnHpZ8)w^6+=3fB|a{_R`oBNCS^GI$z4#NiW-yq?&f6ukv+ z7O4KNUH(m(6%rr7mn)D(dmPFV+zezkLffj)Y_Q+`k7s6&L671TkT*9Z(wjUG-SSYW z&$5SMFqX06)AQA{4mi4fTLk3OXxw;v1%lKnTG9P?=+#&^%`aw9^k zQF?da=3)0``!&DVoxmPhQihrp^s#DPi(ewOx4AHUK%Wh=66Cr%z{(?y7{;y|-SR{c ztZ?;MBCgs7xCPz&67uCASupUvb?0Pm-ksr+i1MwGUjV3?2z>6hSSg!>OpWZYG+aIh zlA4+Yf~3*9^KXBfG0mlKFHB!`zgiaT{KqL@!~V+atG?l~_)0y{<4xA(UZK54knw~ah2Y+jh$BYg`;`Q~uYc=Chxbl# zRS~+z<+K9a9C8ml2X3+@88TwTT|VWOM1?mu@%#=E#+6+BRtzJ6{~n)b`=4_4*OC_G zsPfI#eE`BXK0}25Og7_@UPo19D}L|2`Am0xTD_Ozd&xGdrusSaSrpBs7I+B+LTetdQ8zS0Z1#xg>sJctb~=Nz`?C<|%MLqU+-MmY=bbGIc%Pi1PgKc;eftO5 za_TFg@>{aj$07)HOC5@oemFXl1OKl?O77inlOJ$L=~Fb&XFbu)>rpit?z>C(!?O8g zA9)PAtBoN!UV-^hjr31gsyN<{9GaQgR?InBncV2Yt9N;Iz##d+7?i0aixmUFHIT0> zQyL8fFtdEX!xOXUqr7>kcucFE*kTI7AJ)13W?b2e>n&ybjhi8+9zD;OsA#CHR)Olu) zZFFQ>!e%7@XV$6^=bS3SmuHs~8$yBzRZh^d2zySPx=u@I5*+c`aN$zpTUQkEstcM- zyq{$f)%im1Zc<1**pm&g6giO=Mt^8X6@V2bB1}_u#_)#_dk;d9Ab^2L0rAOtfwlFV zN%vV&lMD)XSoDR7pivPHptVYLpc~6~Q7>6uA#!9Povh^}*^mqBzj?defo;Lw(c}rdV z5jPnLv+u>)yaZnJDf0VLi^w8w9>-N_G@oivY=}$d+qTE?oyc|R*)o!n^OzVeQA_4# zBO$%?uHH{zcy2U9ohrHiAG`NXlZ95={%Uw^DW^O}bf<6IOFCRe!%C|OvjGpysldZr z>QQFCtgN1=t&(ZipSd~T3g?ub09l_+(TyCxUu5pFC3Kj+AhNEcddz0$3;Rj6QF?+N z8MXO;!oozR6Ey%N^Pws*|!cT^Lp9p3b(>74h$iv81bdsXZix0NZJp*nBIIrwOH z*D7u)Qb4|ah|7cAn};DCE$>6^i>E2s#sRo&5akT&1WACSHwfB?iJ}?M=HjY?X~BN^kE&{J z1MMwC)E7;Cy@KC^q&dq>X1n*lVeCaGDH)#`Ouf*6*FrOqA?ztr8A7o6XDMBR`1NS4 zZ1wY2M|%PbMgynZm5xn?r5%zDos7%&vs;zE?%&Ikho^&AlCho#^OSyA+ zU9wDaN0pRp|MW5&PY+HNqZnQxt!xa7Rnt%j-WqD)S{)V``H*}}v=evIz?5e*X^Jiq zPUfQ=<0`pA#mptqFxX`D@F&bFLW1CGvUr~VV5mxcV)YotQM=W|wPy~$e<;bD*vyN0 z4b;zg5q~tiNRU~?0YJwTh5RfOw)D-$w2Ty8Bx>DP=*^z4Vo8I$>6$76$Kyqbp&~UNH1jO1umEZfYHR$~}G473`hn(}`OsD+XCnVI6Bv zHoE82t0b;`(ZhJduuC+tbJDn55_UxsUhHGqxJ3`E#V?Uexrq2l+)orJ4{T~o80he* z0rca}@dc>}re0I_Nm6Fq@+Ea{mDByW#V>G)8aZ}wD!5f*<)m$g?Z{!!LMViM%9Q*u zH2#Eem#H>1t?rM#%@WqyZhe2PEKODMrI8>4a#+n_p;9#zVlW35t){M0~UvyOS-Mi^yYqTK~j1~j)%P_HO=G1eCLo`Bo zp$r52SCL=-0bk52wYS%6A=+M@GO94L@qCLI)Mz8e^-#?Cg}|Y)TIPXvFSe{AyKC}i zT1~f?;)a?#+BYu4Se$<9K052UIAGAZ-g)E(SX6@3(apUUVZ#|$id`~lhIl?Nxe|HslSa=W z38Wr7A>m|eZ9#ZTs3%!b@LIQJk){Dt@EqzBvtD%^Qo~YK^-&0rNwwkS%=lj=U%WQ2i*H@!F z{BvNApu%$KAfzO7@^ae?^ zqJ<;+OH%;n+BodCo|t>MS<%%urNZ5vFp10_ctuL#+p?Ro)J_u?=dmB++)fR->v=HH zqAneKpv!fL{SDdHPt5mMBq|FUaUT%fWtLv`g-e|hE+WMyH)+5WOxPKVYqRNn8pVFy z#K{Y->^Zz@W-(!U&9oa5hNH&oW<2myv7K1U;S3HF3?{x0O@^LT11EiFF%Wb*KLio- zhkSTJ6~2D;cr@Zt(W5fvt2-mJ=d!&SXT%;3d1H; zu4N#ae;`1cx~??BIbPRh_E40x9O{gR8JTV>v9F=NHc^E=dgUI&gxN5oTV?FS$Yh zrM+1voA2X8KYN~+F8w*}+3{}hr0YQm^ofbS)ugrNr01$`sAJs#gx-Y1=FcZ+es$d% z-C)w(XP1M5hM**dqvwl&g;gX6>EnrQTq?{@&1)Uxc2w?@?f*NGbz+%XCBz5ICMiYc=@~Dt*JR)?qp#?&xo;mN_vZ`NFeD zFUXSAg>OyiB(O`+B@KHM|^X)f=BQGPM%Fh3l_XoL=PF@${)0QcPK z>ydRQc_@#6Z6KgADD2)|#0`+rsH4EOoAq)$w*-XXl*Kp<8i=*IwCF@u+N`~$cN@6b zO(LtNqju_}v^$>*3d`Vn8D_2KGcM;Jq0Lw$=_p|lai$5Odk`eK_mr91Vg_?re;z5q z90b0-=`nV&F5stS@tlE^E*#Zs;w_|l>iiWKQ!&(Q(Y5wDF8VtY1%Q3DCe`}bk%HByDE6bbU1C2xL??*C4GaF{!-zfm_b4OR_Eb1a+52*7%qPO714&#@g8-cA~;@7 zN0~Xxdkc2aL%lbW5qPvNa=9QFe|z?}-{2tF{5DashjGe(br-*bT2`f&dH~O8Dnik8 zvc-u_ao?L6O6$#NllyAIh~AYq`|yS8@B-wt?=Qab$39!}uECFB?hUcd6Z6y0+=QpH zML=7a9>g)p1IH7{(i$^IkRtoZ6Zm-AhY)GbT1$oPcK}B=oIM|7yxO;AfS!eD_j@T+ zlacgF3XI~Mr~I*JsQHSTVk}LUKs@x!1kq?>_i$9rqa%KO8P6Ft(*{@*LSNeV{Zco& z_b@bxB$)r12z~9>5l4SpnZcMVIr^Xy?)u_g%Bm00iI1{E_B(Aj=zAsJa8xS23Vtjd zt23KAqiEIcTY;#r!Gvlr^nbkz2p&=>UQ7}vw^<900w^R?VKhiox%uytRYr4+NgVon zysqj{Q5Yt$)A%*uZAxi9Kfuw^`f;ln{#Z7&asozde*z}Y^*f(C@O59L>A}tljN`%nk#bNP<8%JGdh}5^^0YLzl{#&&_T@oD!L@W z_3Sn;leMR#&_gE9NDj*N?|e%kj!C-I$MK|QzrXEJAI>_e&6}UE|2)JhwSWC0BH0y| za3a|&9nJ(hV z9GaC0!6YoH{1@bv+n3fB45T-cIKHbA=3<^-ziiEJ$-;7zn0-4b>KbXI7d?}KVn|pX;E1kw_AyE~DQLitX|)HAsJ=X;h?_mHPK_6W|EY3Qh!HJJJziVcasa()QlWfislYJv8^C~_ zEkhF|lFqTb&q3;EnldFc`i0-bj-RUf%ZTFv1E8olKU)I!hH!O~pO;Oz(C=BxOZ2U- zul_=Fh>pPS@>+*WpSjub%+LUQ=Mw3T3M=##mjuWlEkazM?4NmyDF5)Dg8EAq&snuH z0oh*?xG}Ddm^y&btW@?h%i>wbSdY^J^OiynQm=oNg&Sq>#2j#d2b|o4zkLhN9ucL_o!2vW@|!lLZ~4=fJ_C z>Lv~W2G)^eM_*DSYQ}VT589vq(yJ1Cy}!dgegMQ@BSIk@-nam}94>Q;FB8A9@P4bs z?uWot&*5{;80Y|Bb~*!|nBR0++mg3ox(ar!Poxz>C(tR@;YA>6OhWXo$I$!xiHmMP3uS-5vB4G zqlcS(I26!(hwQ)~Ui)?y6xU=$rW&kMCWuY?&=7Kwtr?{}UJPz~6LH^g{wLhv{qJ5r5N@Gnpxu_A;Pm(Z{@ak|fql&r?MzJnVO*Z?Ddy~W9?j+G;t_={ z&N8%+rUM<%y&*Z_iTxdlzGg`pnGY3-L8L*?yR@*5*$kY!#<5#BQ(8c7f{mp=q5ky) zT9U-A)YL3k+Bz$iS>oOx#-ndt?;9wYq!SHFRWe+blK-wH`Qq^*7{a=y`uj1j5guywckTd}xc8xxjz;&UZE#H`! z5o;H$U`}#dOS0+*3PnI|c5-lz^uZ`lOS`SPFU;pP7 zZQG{(l=3mFb^MlHJ`p38G+{Cy!I7@Ao7hNoWU*^gc@Ik020>%k%`IpDe0S4ui-`0T zjgp$(-aS=&O7xtvgnHbVi3DP%JBYsG;M)seRt_SKVpIaL%C{Vs$eWVO;w=iVX1E`` zO6wL4xwY{%laBCgdJi_-V#34H8_LO-VP(5ET>tF(eTW}1uL+L4Yi9De9r}sl!*ffL ziy!*kD0@5G@LUa?=#wNTQ!qSRU|(98A)>8hsv5}r#G~&Kvpllkl2Oh4z4&X0&U%(|`2aF;FNe9I*3j^tA6RSHKZjmnA}87pmvMfy!^ zV{bFy%}WA;6zkFbwa{>-82@vq)p+&Du%D&LHXHshdSREjsc--4&K6ETm~*18fO!4y zIpuo_~E=p%MHI7Y_Hg~{`wWhBW2>ckv7DDv76IxqtD2(a3)0aiB zBA$0gagt@5x~d?ilrTKHEkvSfs#^6urFEfoVs_UYupvzTh91rA-M{uj>qRVPSTw)> zH{G?^4;mL;f^E5^&-kA$NyIt{l>1&-!I1h~wg8vd=QV$oMfL0TS3(L-4Rl98 zCsvNFZR3WC2<|di$UpEbG+&w{=mvWVY+xZiwvM!Lhy2Cnvg5+Qu?$1S_bCDimQd#qe9 ztBsxA$5tofPY`*T6RnUz6`2Vkug=wTyP)o)3*?EbX+MF0r1cDRVWSlmPs& zKNofD5XLai4$rBEMF8`*s}r9kX`q>2a>iq~{WFB;ItMh}9gTi2W_eymURdJm zN@~j8FRuG17eKw4bYXmqrT@oMpcXd#Qo#@*=te6=(U|T*CO(Nwu@tX6tyR0&C?pPw zc0H?a!r;LXy8GLlHjR6--PHS@w_kUk#;{9I`-RRf@H{u+5L3hU5|>@ZZkCvk_j-Ol z#{A=Dn~FCr+5uY*`L1=d@bth#Ui2}*+HAgK`GDM2{qtq4eQ)fywgAc3mEI zn>N?a2?2iXVpwo3tusC9RTXC;0@HJRcHLmwMoLN2p*nsFRn(pQhaxd>yc#LScrB^7 z`(6A)F|q%!nTs*itWBTwW}NtxMPUT|wkxLREGQT#tz|?EG6}TM z2;!!b>G`p^A*NB;&ku7rJ&w!LY(vfF9NBj3piE(nGSVTThFu*=T?&&Hb=*8{~vrytDAK>}~mO{uX z`q>}m(jbB_@7W>ukrWSLCvI6k=fP5H0Jix&fL;0enhBNguYGZ!0Fkx1;SssfF$)ti zUYZvvt>g3O3tb0JtQjx$alr-GCeqJ#(=4yrq)q3}+Y=p<`7O0W;L!kTFVeD$*V@iU zBtkY;4;YT#msHs395w%UhP%}1fSYaA3bYw~TM`0QDFytNNLzRbQJ4%$0QUWrmrBJ{ zgl=a&SC+(y3bK-*h2otv{(%!hrWJt(U9F?#kUqOZvE}x{QEgu)QK|bQSd})eqzdqh z=GH<1!0~sj%b^#b6XUth@Dmv}5?lQQ1m~crTNqHW;bj?XY=-}3b#`Y12bSM`_OSY& z&9mT>cip^(b)r34X4{}Y1lf-i0%cc4CCE|}`!h4M%s;3OP=8QHL1G2ia&@_gr*c_| zOZL`MjyC)wqj_3&%~)+ztcea#L9J56Yf%0)FFo7NRg~)E!>onx;9H&0Kp{1q;;V53 zEl%=wICf*gM6#P4qWr*tC;pP8@IL1Qa^XbethulZBLYEF!ZIJeAf=o<%WHrrc|Ajx za3RmqFmuow;CfmP5A4JD|Fy}%kF1j%!wBcuDe4y_^!L)w^|{L)`SOIQ&*aARu`WHn z?nyZpN*N?^bl@`KiZ8sPa1H`G=*<=5`O}SfHvJloJIb$K8#BGq`g_bXacA}5qzS|$ zu{@9G58mnC^(T_{>V5fU_NQ0Muc0d5(Nh){RIx6uE;2|CrK~%9TD!YKao?>XhCDZFJ;w?zMg2?OvP-8EJzV=ECFnGHI3u@k)J4A8JwzLFO{>;Ck-(BIBC00s z`LaQy*lmy5bfnd3Wojx(FN6_wxNmc=a`_KpPUG|D$aUq7-r2LKplo47u5Y^v59f?^ zb=}2{{2@0>dXjYF&Bw#|v7ZaKX?~8R&+05F)6LQj*(06;Ar!>wb9{I`mBRlct_#DM zoSvE8;xm?o{p{@$*9Hk#=zP3b5l$V1?l3mu$9ykyYS|%%VXi0RgI;!>p!I(fIL*tZ zvth z4J{syvorBbXT{am-x8B@&Kw^|#m+(f0^5e2pTI@!HsJmyj_owGEaPKFvDc3gScPpS zG=P+;N|^LC0E*Axppc${MWE@hzy|d^lmhjArXO!1SX|?CybX*kHX8<7iSPk7KW4`f zT0fAH#;>1tzr(8eP&gyp>}?2r6}dfsZm0MWW4u62qZ}=eHCTKqVwezSR`uiKO3S}x zlnlvo+=xOWF@9_!4UkDh*vrH zWLJw!<_0x40eI{(6Lp zt!4UXePu?wMW+FU^z<9h%H8v4r{n@czS`c2L$4B>N0{E68QI|SY>^p42+ZJboe6g- zvOUCxIyx$0-`qS&@hTbX7L^cNL%%KOZREsv+ZEADB`K#IK*Lwzemhdmv{auvf13$6&Z!J<%KBni~L`+7dAS4UzsV^v_tTqPM7JKpNnf zUjFl3ku=iU1*Ey$86<%oD69>%KA?w!_%cetxn`Ffe%ZNcFOEVM-&zn z{p=8N47WkhW}Dp6+h)D&-!Y@To?P0Mrk@dQ41@agI6g4qnc(oo(=SJ|4Ma)kO(xN? zx|)(o&veOmwF~1ovo6@5ygX=FhhKh`uu6%d1PkqNG+AzPZX}92kccUvlN(2ve(SNX zuG4DM@1ItIATJ7b$~wlE04?a_xqT2WO0?)kQug>QNwf6MCSx*9M1K`Y(RI%r**dP`KO+sa##N(Ocbr+k zd8|2klILmA>RoU93zuCzIn^pQ_cu@f<>G14zug4=EVsQGuOdJi2s`_oBbD<}(CZ>x z=)wcD(n%c=2x$PUY=)@Bo^q^M_wqjRMX6-{pHuO$WTJ`tm(E|TepJEcCL{Ge9%=b**#pCy z_ASwWHF~E zxOjQkppzVxzdg|$$^iDzF+_K~%6~qau{7fMsV9oJsL`1lm^cf;!MEQ^Z8vKwlLLL5g8R2xL+c}iZ zzBtV=aB2?^hJ3M|KoglC(<7YCtp#<~9p2G`iosmV0LrcQHTks_Z%L>OMDpwBN68Mq zhBi(Wp0+uniSb@!r;Yj^YJnv}fcFxb*Z+uSx5^dacVd{?9io;PHT%ek7?&Tli*Fe3 z6PukAP6F8%#3$ovW1c_PtV2Hmowx-aK)eC?IGdb97}1HXEXa{|ZtMtX`#ZyFFRlcN z6?}vK#O%N3WI6pQ9O5|FCV55SdU_2pIsJ_j&k8R$q`EUYeq=<7;s?#kSSq$5@d=Ya zrfjJ|Irql$3+?bv8nHAk;;I70@M?_Cwu;}refCn>YrIM79>a3CI}hs@iF}c&)wtH!4*Vt~swvy=>c4w@p4O{o={aUzQK9j`kE>r9 z`9d`H+=MPH+Xlp_JbN{8z1ddm_*T2kN*DVgS)Q{m`Vi0nYRuJ?&1x%Hkf#QP!9lf-d4ITPTB9PJz=sdOlJb5z*@F9qHuO=sDAn%>^cu<5n(gO zote~ti?aLpez!-~p_@^RHPnP{56zLF+c)i2~E75f4O6pd`n;$ zYDDrTxOU$vPL{3RQ%p3FNnLixH73({l^**z$I`jq>$%vk&3I4-liu7o5AImxLL_}( zDsrv9;hd*JowV%M%t^2e9{A@^+K4w`c^_KucWwryUD*LWM((KJJII7PKN|iw+YC8k zlX?F-F13|c*Hb(2N$zi0r#b!EY)QTpGY#UTCH>QgF-h=p1Ty_5*1VQ6FrJv~MDvx& z0v#Q}f)u=NJs{c73ipmTP0it#C}e9|7d^da_-)kaKiszBtM2c>iPXw^_GZ#Fnq3ut zGm5z%2dWukZvR{E*JY~wzjA-m@)`#<;5=eQ=od(2*tveaZS#+fs53{% zm)>J+>2;353?({YKgoNs(n|ciZk3p+@ss6S??-^O=E#n=Hu;=qQ_a}{3r9X1%$Ha* zF!>m_Dlx^d{qneApNF(-$(3|KO(8y4@>P_YyF6icyq>D(+~Rjqxo0Y36zlX2TTqmo2TO z0%b*qhlMg8!N}^zX39@lvC~`eMfeL@3GjFdN{CO(X-LF_c`4XlvxV@TxUAJ%$bN`d zUu)W6TlY84*!s|J;Kb-0E!rgBH%|-PAf5|IhNWRLWdL5bsSdQ0?QSR~tS%vjyO$kF zd79~Rdxp!@(jxaIkUhfW`Ds1}w3dSCvmr)6D2BsJq{AjsoZ=%_E3W-+HXYv|qj-Fxk0nKtVTY z_Cu>QZ@Ixr22-_oZujKJF;-3k@R=~C)$HFdOPJPQIB=1zAx@+SI`<%&%O0n~>ebBm-f$I@?ygCS&9-9IjXtJqOUD?<*iJ+~FE+gS=&53B3O zbnE82nGU|$VWitESn;HKu59YEA`>v^mB~O2P6ER;WdmCH{;&35zqK&_|EK*&%c4(E z$$0m)dQXffx)m(B;7^>x6SgZ~;m2oiFst+#kM`1Ue8#7qbUh+QPM6`oh6%#hhD=aV z)SRC4PZI3DhO=8}m5xlQ>zK+h@6yd^y$=emEt|$E=h`#K8T9z-`#XN9oQh`~To<`~W=gUhr75}WJJ%VU}8mU9HAn>6~rfcop{B~tM%!;)HSSA0t2W4NfUFIfa?<61%# zow@qY+?}D7P6e+k$eYD$VE^;zxjxp_!=;gsW6H%HUL&p>Aw7$^uawxYK@Kp(E9EUJ zr)`1ACaPuTX^|UG(#Y?;tVI4dU!kb)<8BP{@7q@yPgb9CS8f4hooxOkVPp7_sQD+yvPbT*Q!M>+I*XPU!)(_%q^QE! ze0Ae(nif-vTY1Z2#jMvyUv^&k|H=Q}*@82na(1?-w%693VyoTx?|5+eK_|%eW`2<0 z97uAzjZX_aJVkD5YY~ApzXFRIH|nNl^Q>30&yB?_^9?hotmX_CvJeXS?8j1SqDWLV zd%j8gYhCL&5&Y)8e1#fL@ik?|MPtAqyn=3D%ogXO|7NADHRlkMblvUFrx=aHX??o{ z8h^RAV#?EI>3gPLdJn=@V+TIsRV4|Nn#6cs1UP0opM^0EgyogwmCP?{tu4TaDsESX z=ly1ay^5cBxOB1y&JNpiE!!kI@`m1*xoon+i9I@-_CK4CFFoXUxLS(G{E2X5TUZ`+ zz<>U*z1ItYxi^>E6+6I2UcnUvCIn*qu9DYcc}I6}O^!d%HCp4%BRgNs+#QMYIlb|w zii4o2I+ZCI4PI|8lm3jh6dTs1jm5^olu02`!bY zSB?Ocjq$sB(@i5qQlwOVyXBhL7tz`t!C-22dBpw6;fCJ ztVWf&X@5n0e&utX{&(Szxz*pB(Wr)es=Gh!AG3pufMH?)$DJEPOaST4@=j}(|)UNDv#B$ufqx?W$V!GNXH_|PaYe@fN)~5i=%Xngci1PTuL&o2`-RgWWGNjCq zgS+lGP<_eoYT3#WcY%J8p%wvI?wCuYG804691GIvo(WskYyCHAXe%W|h{~BHuhAUwZ-&S-n!LJ~!K$ z4hM{1|IZe{iBI!|HTJ)>dM~m4qgw{C=<|DU-;S)g@~QIshn(N7#JeURUYE3AY9vNQ zOs@gmX(A<%YSOncsOy_)i*!H|dc8i2=ZZzXa?Ac38_mN-1OMuVVZ&BXVgT^rj*1FL zKBa*5=l5Cbr?aK+yIk^eufE7c4(5V_{C{}?L8Fp?u8AyuH|S5gjHP&Y0bHg{E5q#m zmkWT^xh35uQ%w1`HS$*_OM1$a z6`-3Y^gmVrWT69_ybE*h)%9r*#K9vz0FZ(avues%m_Bn~6|n2OHu%&e5F$+gsW6XS zYE!$fy_jngOmLrtTmX(Zbg|GmVI8PBWRJV_PnEzc;62|QGDs|ieIDDuW{bjg+^+R% zQ=IbIu_nYH{k1i8!y84gn=MwB5(qgTa#7ECz;}_zlZd{6#YL{H8r3}x@>QQy?~{mA zcm>IcU)AcbLUEbsvn%?1g>uF3(e6+V`|1QazfXhxt}b_`zZRgB9l(l=#~qRjL&^() zoD*Ia3ScKBy6M@9PsnZ1P3BE1u2Ce0&hj@f)QgLqLe+48C@y_c|9$1jc3bYag55$k zYw#bJUo`Y&em(l(*CBn*K>bWS(;Qr06Z84KaN%Xi)!B`b(L<6hsW&tT^!?N4OFEmY zyu0lh@^c!OwoQzJIl`TCkrvzXkRcZejelvqGs%7#o< zJq)A%&cMWP=Sw8Bb721Z?SOh)sf(6>fhl{lTW`!*rEr&c!U?7x4^U^>GUjM{QKWja zkZLw%5ENS(Alk1t?419&r9wCHD2McU<$=D4(JfoOqo1k1mb9uI+kD(UK2m7=P+ign z(fu-Z>H9+$qM_yXMR&>K2P-$Zk{~zpnLNHH6W%LMoQ+Q(SxNCEH08<02o;%FaoHt8 zIiBMO$P>T#i5Ev2nEk3(wf<_NPH0MIMP~Eza?bo=b`jB3cK;}SV(=)5&wO(sd*k-a zr@VD`@xJX5mt_uZ)SiFfOlBEWTXm~Gc-z8NI=kuC5jlIcKx(}j-E+QYV$CD-(!%-_QB^43(nYhCq@w%)q5S&e*nWt znio3kEJ42PdyQ@8mI>7%u!?&AX&_?mE(}QG$nt%&i`0)5%YbhCt|T;giLu5w8Ba8N zZ+2Jz#%17E)$3TdeA5ijMr;GrzQAWCE`!O=N$ANJwM z#vjj!_+duRMe9TZgi83&=JjrC25X~Ruw0+E+@RJ0h9>@3b$J6}dLeW99`5?Y#YB8Y z^tU~fNL`s;K@w%()TRaIWaK~B#s7z=^ZsWmVEez-qO=sPsu4PjQnNORS=4H2jf$

wcd5`Thsz{BXU_b>7!!Y>ulhG|V5ch}nQi zJ*&%lZfm2Ey;LBUf)xuKcnVwXg?R0MH82OgFcXxhQx}fcDtogtqs78@Ool@as z^m8iBo#&H6iiu}X1tdQKGUzV@rM(FSQ}A@Jf~^$sFU#R7ItNq9!7z!uY@#}YTp&hY zyIWc>HP`GD`R?Z1@&^OY!I=1xZbg&KCb8MsyY9s;E-^FiZW-Kx8Kz_Zd~WSZDows# zYub(rMiA_$!Y}(9->gQfbk9ONO+>7JFxGa{?^MjXQv9JopKL99k2BIdlltIj_$Zh{ z?c#JXdE@Tx`t;Tu(iT1NJmObqNYJDeO@MI`^HssB`~$`ijT)EUO!_GboPr`P=9hP(1>Yslx@@+zDfVEX4<| z+XxYOCi`W*!l?d|0-2`6#K|f1)t#$b!+ZJtpMD#kr)zk_xWMoLwzD!^E1gn)|2=Ue zZ$}mBA0HA~Uv$7|cjt0*OZa>EE9Sej_;IK_)Dl?^IWS&;0mWNR(Y81ONH2JUj`-Jw zNNp2y;t^EA##zqC553B8^L~c=Ua5POKLQ54AY^i(tI33L`B%hVGZjox!KmWynft2& zLkfGtxWFccr|H1)T&g_xXPV9z8DsXmJLN2FALN$yBNCqrOcvPI-p^tv`Ew^wW~!Ke z>6_nF$5WnNC%r|1{@GTis93~-LSfl=G;=N{6E=OxGIl9U%0;_GIC)QviynsnU{=~%$qtp>f zUBlJ@n9Dj&<@1?4MLO+MkEQ7!z9OM**EhrX^^_#y%?XgKIhl*{kdQ1C9Y1sSiE0MC z$EBe13lU!O{%>aZ4dG8JxZwEF{WQehSDkinXTxnK^Z6qZA%V5QpTHX7k9w0&*Z@%? z4ab6^=KR0x$%yko8coiw9#B`&XFBF=j_280 zS+?t`I8>A&s5>8do{Kng$Y#3;OX5kV&bKH&6D_mA<;t6JM{~$441JH^)&w-fooHnL zET&&}jpX6@r$KZh7plkn)ng$8Z=C_qpIMNC#5bhK=lilWknS{(h}DL}9er&(8}H%8 zuMQ{_fGo@Frg<=v2BAH0_v^$?qmVOwarajKD<7@Rk!Y>z6MM@O>Kdz)eQ9Ujn-DEq zYcrCji0SViv)^mAKO&xSWf~aofKie6n9!F-_T=(M|Xxw0q$u??iqqY1t;vX}|ce+bmaQXn$y@QH7(UuZ&@x@<6T%FlG$a<~_+tr}uK-bbNY zVdXx=#i3NcRA0xRiN4gR2J5@lXmE6yL5OqJn^8hyr^_yy!)FX;hY|TVS%n0G{5=uW znW{eWj41u}Id{bm&&YsNJj>Ej1{O9eJ&#^H{TPvE`#rP8XtWYEgR_Fv-Cx+KDjsb{ zG^?*+ztV;Q=W30hyRgISK93g2Wuozoj@<(JCFR+(ukAPR`Z9lLLJm_u&U_{ud&Egzm98TVao z4E{E|>^5hzuKD@g0GdN&;8v1&t0a4WXlMW-e5?c5&%AEpRhHLorCH~?RmmrHP{ z1!Ckbo9?nwBYytQ(X&rimd0gJB@ab4I~&n#P|-owSB;Im*yr8shpcm^>Z`ng0bP-X z2NGkP2hR7+CC1tkUU#|Fb*PNknCq!$LEx-+^A` zP+w@YqV+Fjkf{^_dNvlWhJ&9<$GsH3k6L`ccf{b+MRcUFN;Q1sDQ?4qORb)AcbYvP zXg47XlSe&#CHdPH?t&*?ha>klZktWb7X<#ShjIFyw5PP;F@&B3O6n7}NtebezGP|- z1t(ZiGZd}ImhKi40dl1yQJ_3NWX|=cmW`SqM;m)WysRgwo|kI?cT#ff{W^)7?%}+7 ztX2JpM&2T%)hi#QLqf{b_}fD!v@h<&bD|AQj2JNU|Dh?&_ANjuvO}E0oO0Hj^{?e> z$0sP+-W`*LzgsCsR5}*9n(DsF=|poK$}#!C>spl4ueanA$Jf6b$J5s3oEn=ia6z}f z(4s9i==sr+FMdf#$)T$cyC}HGX$@7TC#9tuTYBcm7SG4Mq;NIq$V2UYT;|iNDVKYh zv&DYo{;#*FNm{0@dw3eYC_+b3l2hMLPGUued>)S(v~>vEtHb8FiF>4;tBZP}P@*(C z19C662S9E@-1kM;Y1%u=_t@##!3x@A*>Lqxe>s^#B)^s>*6XfChku6T(^0Rl6|l;z z(2lp-jx1!sS(fVysMI7Qg>*bssPqwdL8&}l7viQRrHa18t9$a7;)tO1AQ!#!V*V^id=7My(awPl)vy{R=J=chkur#qPC&!vY+HrsM${Cxu#A zug@exsNGF{r{P2tO!}pJJ-R#DSqO|=ay~3o&+t=;5Z&=%L*K4~4kNK`Zm0wQ&_9;@lw)os_`)gb^baipI63E`nXoa&J<~ zl0H>a>CAnCPUtS2lgc;a8+Dnt<*84ctJDHFVi31exnQ!A6I^q>ujO7n z;k?>q2~KM|in@a9g9W?t{MH-u5`xYGm|kDiJ;O^|`ck~C8KP5-z*}r(tqy;t5~u9) z^>YiL=bY^AFo$kasNSu!nVW4RBu@8D-^S|*&zBm?%NQQJff9G=8`gaPRGG*0xK7N3 zemaiXA4_xxf$az3CO5>F@6WSjX*7Ji`f<(THf3P=Pya_>jmYG4%Y_kA=mgSt5Ae!9 zOB9YzYm;rE9gk&CsR0mgvSk#XRHnZ4HP@Qy8;z7atWCMtj%X^}3=; zOma`r%vZgUpI992@@(e?+lD0Ces^(Vakzo7a_U+ihwPbu3=B#iAX9wy?OR)7)s-O( z@Vyk?H;_Ulq`N<(ZsAL%ktzJ11RNCM6 z=s@=j4wn25I{+ppzo#^>rZpFVXJa5qrbeS@SIOKsrrEJnTEE@inYN|ZM2$n=+C)J| zzpH8P?7C~|-u7;)UBEu_4Nm%elOjp0S9@cwc=4iXj5NW82IrtnQcw0}%6zh|-g}T3 z|5)l}c#OSDo)Hz(#6&Ik?#Jf3E`B%fgSp_@~vcCCqa)Eq1U8;{QgK)&K#UU zE@KZS^8~)w3mLTqdhee&8nqEoW!{!nr7u7KgKayvv$Rh{s0~}mKX#Y`ID1A;ARy$G z<&8*?XW6MakrMj~9y_W3Z4e2}1N zP~dVMPdVL+Hi%#MUdE%{(I@$Nudr&?cx~MmiyoiUjmV0QX&=7%XD|jPJ2E%Q)Sp_8 zJi7NuWc+4J3DgAHgOx+Q3#5UrYvr{YPdV&qRS`u&7pIB1!Cb#NW|6Laq&lU&b`higIv(jHLYyM0u9x~mzSH806^)#4jDcgBsZz2|Q zb$X$RAit6?KWn*oWU6JwY<~u3)V2IOn1N)yykH9FMpA9ledrgRG(6xZ^xH+YU<=`d zb++c&eO-`L3F)-z^$u0Yu#hP8BuQ8+N)kUAO#9HVOcCh42dDdJ0Pk2nJZL^xOjB;G z@}t=l8NE9Sy1w$rG58lxG5@y9lj&Ehe3(h6ZA{J5LzT3CvgnkhN9O#=A46%#T+94i zzw*=^hIza(8&2w)^rlvY+B^;lU=VrQ36tJIsg=P#xb9LYN=$IbdK2Kzp`;goydKk+ zrbwcJeI~e$GeH^xJ=BTT{>96|<|^e|?;2mI-l|Dm4yyEQM=I^l?FNe%)NX0jhX0HG zInGHLwRo&P&jP2EiFi0aJ}0~56|f6Hj0DF+kbI?Qb3#H6gf)v+h~4sQ3f71CAp-{E zIxiaTmdOne@yEeas(_QRfaXpDe&fxl04l6^*9H~g3ku1!1X^|!M!aQNo8^uu!m~hi z#zKAWEZ=*ev$F?R64{M*DytuY>JTn{mT-gEpU^Cf1AaQga>q^1={B32wx zovAD8$?beHkx%G)n)@R=y7>v&t>V#QF!3dC9CEfQ6Dcah4!=if;kV0jbww;)?!8?V zj_=4Ztjj@`za-g1e$(;z2ICO3D0@t{_VS27|6%uRyqDNv-{$A>e8CxUnCmh_Uy%kK zr=?fiuD)|ZKdEDUERb-hEs&SRL&=@clJ()dWky_ksDrnz#pBm8LuJgwMPFx>E=b8V zre3J>b5rPU)@n3rriCL=R=cMwtJDJn9UCy0m-mO&$BKhB2C`AutD>oki}VMRk&F5s zAruiV+!|i*+^yv8Fe!K5BP}Lm;`%STf}ZZ4D@_rtWWV|xX>v9}`BG<4vs0k4I8*B8 zNt2+e&;cKQ5BWn<+){w|kD62x+iT~kb~~p;@5#B$wG?BwnadQ}jvCBK-f!L2AN$F??>fLbj&=*XOuN+c`bwu3j!Vp^axBf8Nagu$%wT>UQPDmiV zC!)^$Nr{%>-p98Z9-9xno~l92FV*YKE9ntyysN*NNC(C0O`Tha#|PSqrXb^E{vO zfQSOS^+f$>F>O@TQ_Bp}J-ndn^R4-KGT0U^Dk#uTmCwd8bh*c!x&46efh&%@`j7jy9v4+BQz#9w_2C*c`t=RI<2tkumy!$C2i?oq z>(YMF3SPe$nm#W3Icv1@M#RJLTi5-INmPA788A*h{~r_9p1w29>Hf%gKFJ_fNJ9{*5D$@vbBKn0VY+EDa zEJFvkeAvNNJa6kpp^1rcN9Q-hUL25^qkqKp(Qgkv+Ak5e`j^s)Kjv-1&enitTrG#5 zm(%i?_FdJWl;m2KVr1MTtz6E}N)ko*+EBb?BQHg-?`t;+O(TsN2pN@gI z06Kj`*Bvv*=dUSn&uL{))zPmV2r~OWm`pqGf0KruhsNWSZuj3yoHT2_iJb@S2hP)E zApw4RhKyOZqxCjs`ws$QM^wLdXwy#!8H6y~CuQe3WgX}87LD)3Zad*uLbr!Fm?FRI zMb(Uy#)@i?Yx;h&UXuo{b{?qo!z8flYxzRiBCab3#3K$=f5{W8 zCzArqh?%PjPKYZ!-du7t@ZvK*md%DHi&@SvfhODkYH0ue1>Z3^;Y!)%Cjt?m29y zy;As|V!AukV%|5RzBKb&_`XQkVs~iwg65*9aMh0NP?TY{l_2kY<8)76j5K0!t-#8R z;K_XariiX(lIxk5Wk;{S`HZK*4bXjv#Z+%ZuL2c&j1W!`#nED0&c(VpHcpskBhy zoa1e46OxT|1UDcb(in}XFGJS7IzE;H3S;RRl$2Mvewuy*7tUe&i~0A}dr)R92gC9@ zEXgr9q=JLW1%V3*)mDv1_GwkG6*yVO$Sza!n&4S@9)dhH^*G&-3tY1A!Mh83(vFg1 zV6vO#7Nvnt1^ER_^n}dpli*TvCSTDt59ML9?k(=?ttE=%6gX~oj{|hAUF+BbnwMt z-m8)g#DH5M^H0*rA( z_|{RWE0t%ZRi-t{H)*oNDt*tqQDrjMfUowF1yb?i--RLm$9~Is1~g)isu_V1YMFUb zRd9Os%v3|?4PoU_CRqMEJ0YnFWU57AeBQ82UyK}<-P84~Q(L(Jnd^f}3ZPzz@J1l5 zBH`|Xi^-4HrDZf~FHTP+D&C}xJ5@F*1C|8Sjs85f!HpaDsng*%j4P{vI)$m;uLmq* zPVa^?tUvAOw2;E33=&M5v8)BU&-LH*>f;Ccg?RgWx`c8cS~iJ=hKDy&`HGrYE|d(K z=66BQmxCo+X%nci9vd1>MYpAMR)DLQO@Kps6=s_4uNZr$&@b^q1ZSai((HeyyZ}=Q zf&G#a*o+AiOR$E{Qk=`B%uT~#UkV`nLEc-zs^Byi2bL!NLx`^|qoGK;TjdZieLz8% zj8kvv2ymF-Z|7CljaW-K-pH;oL)W;m{F=HUuFM(_NVN41bTz*Bm!U;!Xgxsdt2!A5 zZ`Z6VmH-5zc#%Jk9I{Z2Y1h}kewVdq&1O;RB@c_k@En82b9$_ z`^l^${CvLGb`rFl-J(a-fg*ox=PvQ)O2pngxmQoXBgJsKSfFG^X!mJ`YJm~+yAOQ@ zmVb**U_1GTdY_?r_PyWmh@+i zg?VGzs)wpdA72f59S!1O4BR})K4u%!-i*0+j50!c0*}bdGPil?m@f=t^(w>yI*w!X zC+~tYga2YCBU5cMaQE)zq#tle@791La*fWxQmN~-N6WdA_;s@Zu6m`ey#(ncc1gu99a4T99E)oN#(U`q_(%GB_?^DB^YuiH z%_br_DbZolrHPbiechE|I@N}Pp>WPf7ii9OwL_Di#0x`o?NHB)7-<_TC|hmw8)@PmU)+lsWIzrYhDI$x6?D1rgGh zP*?=kwR~qY93i%0(u0WFoAh+p@G9QPN=QISkY2jR_4!6OP%PrRbj^0;_bX)5D- zlxmAnwvgbp$1|rX)JML_Fg`FvPru34E{u6=^ElExy@;@5U#ixcW(O!fX2ss+?>gd? zyb*$vEeckhRk~zHpc@Se*X0EsbJIUqSb0J3HtG9>3_bVEPBtz-_f(Y7HSUQ0+VrD( zh4%Irp*kPt9|7Xrmxn%OV-}W7fFn`|W69VJ!zH76z~`}mmn8C44Tf`oX=zXzrQQ|i zq563G20OoykaG!%ke;qF+3LO9qx`{Mm$B(#&p)UPAn8k#U(zF?pk(C1mXE{My?%u7 zS!(Z(Zd1M=b$4|P;|)@C0bi+4O8@l0;h;Z##B9CpyEAW#zba#**;!Jj8ZzOli}55~ z^gbu9J{rctV+75A;K$a5-}|4qSfAMMKYPtOPE%EO1B^>`$evm4vE&j*GYP#VXDr%& zs3H1XZ;dU(XAo0x;*`h-ph*sV6huV2oxHuV8<#o<$yz6YCoXc;m_8B1d)3E5)W-XDaCT^^nz9QK_t`UTmO zFj1$VRQ+IJ?$hbfd?Vw+jk7@CFKkACS9Hy?+dclh4uV4TA!=UiFYdq9oa6cn<-`m= z;(Jk(Yh-YiPv**SFY7XO)yC3KI~I1dg3^3c{VcE_SRkq-YN2&tG~`Xs2?3XyJvb`4 zrSgM3O;u(tb`fo&B>gR`T$<8N5A+xjo{r6xObr2PKaUm?jV4J-k}8^VQM%csOVb=_s*uNg_RG$5GPz#SH~qM%bR{+K7+Bnb_IRhUT$ zRD$ub5FtGehz^c((oz`c(L+umxYREQviOrZ(I-60C-xPS4q>c4oin5b?%SG}0xIwQ zge}2X8$nP8`d=}@Q~#SsNwlyRv21{n25?7F0bU^jS6Qv2IVGYcgP(DdQj-=V^0&nH z%QkuMmWO@id|9muRF|uvP|bjGzxmhJ_b=t1dChI+a!>y0@~<%!A4AgNHlg@#C|?iE zu$E4eLNP)g>fKlMq1xmt-$lefM{mJO+a2Sb-+2meA26vQy&_JPg;2k_vd*&ql_2V| z?cM2CM^|CX$2@3btbvi_Id6iRDA~dED@R_Ze>5kR0iG^~|zkP3U7y`9KtOZExXD+DSGxmXGPfTB2kEEhj444cz^WULyv@gHouQVfG zYNwP+zSV-&%Y{EQ_IXUuUTgW?|B%=nq8UBoBz{M8FEiwb`0n2*6K_2mgKS;hCawvf-zQ&)7bTiB-IV^ zdi2y%Q}bz`Z9fRSxpmU2mg)k)Hoa+XgHmj#m=IwBqj)+0qk$-98qT-H;z&pS-LtEP z*v09PmEk2{`84l<{!h(bbj7+mQ<~C$8b&Zl`kVy-EO#){Xhvsxj6&ngzst?~Oj8ar z>r}m1IU);l)pa`hKOC=N47NT|SDD<0P2SS@w)ZVIF3NFr;#4mS+}~KW14&_jhMuH* z{kAQbsmVk@Bx#M@4+DZ6>b57DXPzm}%SMJF+xHbL;3(RlJ_uc4D2D{?dOw(n?PU%zgRCdKRbCnTmN}^s0PPQU=LY@C)nMTvi?{f z9cM?rusj(G^4kAC)kNQ}*IGsrQzktlLxa{q-hOJFXgxTMwtu&^PGVc-QkzD%5A~q^PxLv93Z4gcvUQF2t|q>PH_L2j8w(Y!rl|c_-$+q!%;fJif<@b9zl>W! z2Gh1nIQVhAq0e6kq5KXxI6Y@Hhp_(3Yo8C};M2Qa&IDf`C;Lx*xwI^LetyWefjv;x zbnmk|upCKT8KdRLS zA1JwF^RKU^L^A*Vri;nZ+v5y4BCqj^j@0~njd+~DUk9_l&=RNQ6@oh|{i}Taeuok6 zt=bZNR`dMg>hv~Ru4Ycu@5>F#D>r!|38p$bKi?OB+Ep~1a0b_p5I0vIB>1p@5rcj$ z2dYMje}_*qh6hn3^!T=RNj#x8GGQJi~@s8%!YQ8E@^2Z{?1j zPfqdsCxEKDm6iatJEVW7cPsi#cH*dqZPKa6SP3)l7W1O3G6)jaAzvq`HdmBm20f#J zFW&}h`n+Hi3K*%^_w&?*Af*$DQrmQr-m1&fif6YP)|A}#c_xt3dv40wH|vk7EGS^l z`8FW8s6j-?m2{crw!Z{K7kIJ?`6e?wp5Ne^QbeteVQu&+kBfN}I$q+fI9`CTDO zkD0Gkxs%f(&DBfQW3jBoxA)bm)Fep7P%^CiI#D3MSe0MEC9Oub4imjN(8UyX&Due6 z_xXdI5sqKHuMLxG)rk7jTqQ)O@|EG>%>e@=qf_T~ zh-++6N@uE-t3fgRvdmMU^4%B*&cPWoWg@l{S%^*XIe@i0yn9F_z+3yQ`g^5_v>q-n zo*Nm|vHLgx>47UfGI8POFu*C6<2(E&I0Wk`b#4d*h?Yk(Bi?I7J&TXoW-j$7bS)rI z5-ZT?0Is=pW<0%r3Jls*abjW8%)?18?rYykaC|3C78WlLt%7;Hd0T@$Jd@>t6y4!W z3)v#C*e@ww#EHkRdEZ_D5v8C+SR@OW{A>AdO|7Hp@;F1DuyvBJHL>H8{s+BuSrd42 zf^W$3)n3x|8%7eQQ^)4rjd- zcSI9?lkO<@tMb_Jke=#3hY=&(K6}4S=DE(qbu$t`Vi8udE8s)vh!kyt()FJ(10&YM z*P9A{&ytARg|jS3rxD|LH1*O)s`e5eHqLWkKDyM>MY#VEDgFdfwUC zrd*wkJc>7UX8NK*4=Ul9KK=Nyt+8jitH>!(d92I#HH#fl79Wl-xyjo>?RDaUmOe z#%L@}&9{gsU7`rYB$U=X+J^H?`4%L-+!PCw4E(fmcS_}|$|J?0Zw+36GJjOP=>in? z+$JsA*0Akbt?+u)5m4-%v2>5h>i!eS-lY4w&FsaJx0x0)v(v6iRdPkMRXHwt@XH8#e2!{q_jA>CYa@-N04)NeeC)j(G zeod%44gbp42RW*(C_a&^lMMu4lnhK-Wv1X!7=$LXNRbwh^nGQ==%d9vp+k`!#^lT%QuInU%R!zvXl`{ZelZ*AH?P!>lE~M{AZd z@HbcIm#B7kvq=U{tTI@%Ooba$QsP$tK_Xf=pRr$nmmmFlEju7`lWG^)w$Sf38~ya% z`;Y!ibycIg*K;*H&sUyN!RZ}OqO#pIYMuVDg~DAL?9RjTz$TxFx>Jpp5ga1eBEoL> zE?O0y181?<0utK4zFdB1pEgZ|En7~wpgmb06+q>;AD1}Hx_x;d{^XbRR}5D4ArRL% zeBm?fQSWgY9Nwhoay7N%JtQ}$x$H3_5U%)S3ngvE;PePZYsx++(mJ}z-A8foFo2@W! z-@j#<&%9ppr!Tzf>XG9A8~)ESw{$s%d)S`uDppytvHI(*=uoT-M+HxM(-{umn<-WoW0fad?N{+&&v%msj5#Y-NkF}* z9h=&|TqUGz5U9}p?s50>g8Q|}TJ@k`!<58-fqYmUZ|)3lpaY#Nm&$qhk;*F%aA06= zyBD7}oOELV`|a>mr761accI6#&JN=w+h+TFQx_Fl-%g=YCKM7Nx2;&UXBlH6mR(9v zTGJaA>K0vooEOtQk0v+sZsJ}_jhh#<%)!}|_u)__oq5Kk)AUJ@(?8c9iaTdwV%UiN zjm-;@Q@_QA>M z`TpUr<7H71;X**IS={s5Z?UfIdr_0xcxl2jdEHS<&CQ*q>*Z5C!0*QGV)vRBa1$|y z0$sfIZkK%V%)djabRl+T8^Z$7_?p`|^nGuQmE64R==;?BBFg(i6jShB z1Nep9fCuWSFW@Ej1vg4cyL(KjuDgwpJ@CQ3{^dB1vZ$kT`h(?wIXGEUs((R-gGOEc zVwDaMRWdRKd;pG(b@_9Azl_$dndWrhk3hD6?D;$s2BjoxK4I=@!Xka3AdcG*9^!Yf zob2OS@0ZfnoUIdI>o;aQ6?%eySouWcNc{I*1E1AhmCqFFkXv2TTo+_|4}XzHHYkl0 z7WzOkm?Cdkxoa-9IP$lx)R?FF!;h-dvDPG&Z2X)xLGd*M{%1n`XC%W*+8wZ5`3M#J z0VlBaNmxi*?tbq6BDbFxd(7tRAS6>6IZ#pF?Ed_wbU23|N^Xp0HDuPZJEocX*Ec*U z?#olm$BJK6`vPKuRg~0BId4mkYa)78G`=Lo)2_W#POrZ#a(zHcGAljzyKTj8Ic0uT z`0pDADXP4(tGI&NN`FT4o*E@~TrnZ4@1z5O-#Tp*UAo^EHsA8==E15&57H=BVrNJ# ziL?uq{A^!VmDyX5jq{pJ{D+VSwbPaomcqfgpv=Q``0OfbEh(_FVQ5NDqg_8&P61Pc z;Q(5_5)s6vFSHa888Cuc#MmA?6it__l1QPLXK~Mr_)*!sGR2vl@E9yQRHIPMBYh zk9G_sbI!pwjn6T7kA?Jd7MO%)ll9Wwb*QX(;DHtomSKE%9ybKSgT=wbMDOY_k&$9k z*1Z1m7ZKdVzkG`<+n#;_GYckP>QNWQUPq{WyCj88vK~hYKQ+DW=NpA$jLn zN(GW_0R!^M{fv{VTELlAUX~c&(@-#xWo~&W{iK`fMEp-&;;5}#u9B#IPQSfoz&@s`Bd-qk-Y=eS z+|s6XobvucIN)LD{UEz)kCmMOwF1JO_!6+_Zq7`$an@hue%8?8i0(x}!|(pg+*ZHN z9}-Z=6LoF0(`+4R(3hl1^2W4455qXMb{uzOwfm`=AjB0p<-XU{{azwQljnAlp#@)| zo0adD({tg8oVU~G0+H)ex8oP%pB(zTFxmK=bpsR1>#hz8-gV>`_UG-%WinCF+IPLO zhc)UdxG|HgtTXKVxxXh!P|i?W)#g8@xc=y;NGJ95L*ZT{?V>7qJiim?dmoZdo?M{O zcbi)3X_c9|?=shQ`*qY_$u|k{|LuxtcQ1hxg zns93_lhT2>M?Fz4_U2s)pQfQ_@yw&QI8))vCsEe}T>|6uMH^hH0R-Gd=@~%*9r8-e zWTh9h)#Rnb$98v)VLJx(QeoYwG1D}y*K^(s$#F-vt|MQipac*A{6wB zkFo=*Imhv`PI#1~<)v}&YcwrT8W*bN!&H8xR62*(R_*!x6GO*t&3 zoskO)DFw}TYx_js6vE$ufHGJ9aLmFd{Cb-m-Yc*yTofZh=vx*xwZ3>;c{i_(iS4U- z4KHoW25byG9U8KH^hdm6|AXl8cx9Izdgq{`r9wt_`#`~}UtLLPeTzz=;S^hYQdjtSCtX{&bVj4vO3eKxe3w&IS%P(g5~xQgJSqpbLRuWXP#kX5 zHYLb$I>k*jQhDL?uY55f( zgP-r+@Cy1pYx;QwwB*_66E?z*fxxxuDC;>-)TlUgK%q3iZO4()f+JMKto0zmU)%_5 z`@@DMZ>f%abMHBLP)&q$(dNAOk`Z$5vb`s0j^ zNTFE;2KzdPm{EEr=1TLv{nIS%&hE(GC%n@x&II*Z5@SmsHPz#p(fdyOGtR}`{rRxV zWlsL(+1Ut4;RlGgA77(c>I_2sDPcG#)0uLMXgWRGMzi${q(4qI`r}dKD1jsx+8usg zS+=Uv%J`#T@j25CWre|Ik+oVLxU%Q0bmHRp0`N#QuDLdMV)l&%wjT*fG4VbDQkzx_ zJWMxbI157v+W1$VUsM4kak_e5Z0YTkoT9XU+<+n4>&Y^4Bz(K$``LN7F~e&*eGYWQ z0JUmustS|R<)S@zm*|V_;9_6A%wC)QyE?9woQ2o2A-R^L zAAVep(j7+%(E2gUO)ai!n4fcQtR}CkcE$JMVTQ88|FZq|HhBr%9noM08<9-s4;w2{ zOX>Q$_&{KO%g-+OdLccW)z^IbAOKr0{?78-X#P=Xw>;s%cRpj`)Ugj$&{SDMs-2wb zjZp#J8aKhkGm^O$l$6-1>3U|?!Cgh9^s6}>nwa59?I=+K_2o1YEZ;;>QZpe`NN3p8yqn*<73|Y7d=~}v$9T6y5Ab|0@ed`0Eoi!WM5)Zin9Mt`TGENbxS^n2X|H+*@O9N z3;@LY|E|7M@c(Rc$#hJcV2Q$W6X)-6JSJim+nEDDtGJ1Hi7I(+0D8cO1UT`%bNeK8 zQtX#W<%I;v?975h8R4CRsosn6rz^NiHgKPkt8k*xu4-LVGmTwj=i}2uV+|vsyj@tE z2|U(i%5~ERDTN2kEg+Mmg+ah6`NLpo*?tTg*52BUSTJkLfgl{ zh;N&WQY!MKI!h(K*)>m=(H`TI%#`1$ZrJ5+*U?vQrY2;kBK5!bJH>vGUcPu+pV*vX zsYin?0pAtDuB`J!J+4@82>a@qm-ob_EpJlqURb011Fl_X!av=(1MwA?IiZlidZX>a zESXC&j}n4^g%mXI*wrVs7Y`2oa#q%{AN zre(oNc)LDRA~^krppGqiWdM6Eddv}ddXmAsVrrQGzeOZb(ApCV~=qNuL53SYU?NZW6Z zXLL~kk=NEawHr0^Op8WaRPB*H`F0Gd~pE{Wr_DUZ&#uzSJxQJQF_8{MF8!E;PAc{7yIhwFLRvVzdh>PtDh(_A# z%mU7}!j2Dns{QU0TdlMv0X2E)^Fd4E zmjFs18y9zEl=;gBac(oci=PyHcl#!;TFuoGGr~@=JvR9Nkbi4Df}))c`M3REUi$2K zn(9SYq`CsXA#8dJGxJS&81cE+u0UP-YF3|Pb}L`W(XypF2~|o}1~iP94;-)MSW8HM zJjg)Yb+@)Yod_7}x+zVuc>?!6IX%*a4*cY6gdC7AjiJ_!>v}R4w;eOS45m&P5X&pz zbpbbBAJaQutj*jZrGNZ!1x1*6Dx`Tz%OVmUxP6%yW9n;KM~~W=#8_w|8+@1e>!}%8mak%(qd8CiJG}Jb990KOhZwUtmamzrh8l z+-Jr)-{3>0|6Gk+qP~fEptfG{{m_<&wGk@&U;c`=`Ao%A+Up0fiH>w!P&U54SDt+wj6V8R}yPnU$xVfJg>Uv%7MrAnsXgi+#6rA8N zsN!G2FQfq-=)lR)%q#R?+mT?dN+<$WJ3pe%9GvCuLd$o-ubohRHb62NpOq8X+Ue3g z8Wmo3`^|ro^QS52kajkFiGDSISQq-|jSVKAaayFyCWS5hhdt)AR47@Dxadd7 zrRklgAP{-*~c=ICU3=A6uz0>zh4r}1$E56(BBf5lIB~GU z#i{3aZn6Q`VEZtJ`6=eiL*`W3*F<$4u1PT0f1$QW&2MNcnZz9skA~gpCB*pnm0WR+ zR|&QJQ+g9w>9Y&ba+3hkmRIgk4}91E0-wMKOG-y!^$-hOC9L zuNF9OiT|$;4zPDVv-;fACR(DDn}n(w46Y3gB(%?K^%;Xzm$tU&{FIq-L6m&S1$8kj zyGsM{RpjTCN1qpqfgk-ZV*Kjj!giX}(4Ll{->SybN00wcj=yKjq%d}~6@@W?`&oVC zGd@269D&_e*fOkkSQ3xbvPOZ*Eq4_1GQom}E0S-b85$fyf70}LT6rR;>qZ}r+r1?J z*RJHUo^*Xd`3H^+usOa0{~N4XVZOmRZ|$Yt`=fz%v;4?s7a%^EI|}?;+po zXFYyHX$}5{$iw2LUBe4QLdmca67RCM=JHk4WlS6*@cvdPEq*2FpYc@?qKg}&y`>BrjPvc_Y5;6j)7 z&^o!*N&FSF2JaY)|JNDE(08B8#BBqa@SC(yMCJ%+LKC|6K9M6HhfT2K^9v$O7IL>P z<8cn~&GbAD0tD(*1Mf^BZ*Gkztz!MZjS-vL^aT(Dot_;@Eqq+Ri^9qPVJ*o zzZ=Gjv&KB{-sl$kPB5K=$|WEDM&{Tm12Iu`i7nf93*|}n=d0^2(%E)>-G732c0GR# zJcJ-)OJaVLe+^7wxEL{35uq;pe@vZeIMi?W_7f$^5=tRTMY2`)ETf1POObsUOW7Gq zvW%HYL_#XbZb-K5NeDA!%QlmBtTXn(82ika{qg%B&;NLyH{UnkclUDK_x(A~>%7WO z3PpRO2viPEZJz1iXPSzBk59ZJ1b>?uho3wDsnZDVoe|gh{bn)7JwR! zwiEqn3HN!faIXd~MQ-j>ap1Dt+V9rSqG;6FstBX8Qdj(TZa>O14{g4k8+kY6Jovsa zim^+54l*6np@H@F4pwXiYIS-wbR3ua?ojk#+_&T|UyyQ#@LR$r&^Z}fL!3pKz*Utx zYkwE71ljJ0?zgo;pRcN1MG|_iAo39HB?#~LLLZ)EX+zty?WLB#W{>TC6Ybp2b*t>a z{i%|ZVska^Iy%?>JE9#{jX0LYU39dIVIgRp%r~~yS^TH*VT4tPGP(4`I^PA zYQNZ9JB(StDaNZhbd4BePNC}`8p`LabR~X*EHZhf(gLE|ojB!xD1qN9PqV zg=~Q(PXOu7v~TqWu+{=XulREN@4iPBDLUsy<(&v3TJw&>BS_z{*NcToO1lv7`M9j_ ze4tucntwG4e@eYuWmTcMulAw9>mg}mL>lryR6rBp5ZEVhFTA><(*i*<$b-)9cGZHf z&HI~_LrwH(hDi-WIt5vAL2Gd`jw`BV-*+rgwxtL;NQ`mOSlFpqNWG`@qY$!qTJGLo zfs(Z-@n_VYbg=rD+^UmdS6_@SvMY^YJj(3T7xD`8LRO9nKN#?FeNJH1T1XzrudkDh zQ7I=;r79mKQ=Oo;le9JSstTz;In5>j1XDtKNU}0C2_(dWT%p%EG4- zjT85tP3?d_EacwVFL~P5oykjop_sLqbXqcisYwVmaqJf(NUt701<>z+C7W9`E#5T- zuE4=@W0c*tDq?&2L;$ew%X@_IC8fO^js$TmJa6PNiz z)lyYZPd3x7I*xE-eF|4qf?%dTG8ZadRq+Vid|0BBa~r&AYxxy0#HCHU^0Xgwz4(-W zPo2irs-#VQMmLP9S00r08F4d z6t$o&UqQ%`>q2%gcohMHf$f${zgNHLcEX4>IT7t<%WIE4Bbu~@D_>l8tU|v;y_ysH zlRxlN!tDCTrnqXb^{x%7EKCOy3zm>@ktT+mm)Px`ef2oA(rm4y{KQ6Tr{hJ@lAk&8 zIv&cTp)hVm%BenY$)HyRu8xu;5Lf@g9JMM8bkve~d>Fc34&p>)+MS8m z`lF(vc}jtO@V01r8qOT7157kal?YD1 zx)36HT&27mwUx8cf&6R(`}QE>{mvtEw?d7VE%~S6g#s%ZW2G||n>?}aZY(<7eNu(M z-zHV@MM>NJ>7M;lYsb9OLn-o;GQ#cE{oOuMD+L@2vwQi@q44hgHQzP!DAcIm6-?_5KMSh!H1K3``?)t0ai*Wr6VfZcbtKCI zZ)l{uUK5HAjQMf+Q-L(D!Eue=h!MDPee_tnKix$xug zoq^*hp12hE#+Vt(fXcSZX9w03MOk@Vri~uELq%}CNs?(9P6z7`RAl#SZOl(ls8k9V zG{^IueS5%1#l`#67&th>SUMx52H}7dT6>_ox14{&ls`wMKn;1f1x5FY6M5CI{rf~q zZQRTHM>VwTJ*CQl=Pk~{jgjZXEwUVC)s{aPeN#99AVegcgaea)_3f6|Xc{bxC4yAb zd>^b$q-W+LxN0xOwefrPd$d|_MT(}}6(#Io@ z>}SNM$Di=oEA}v|(=2O7Ix4yPZRcuJ6SRc*ZvEW{wQ9jbU?U;wWOY(Vm(UJ29Y>e7ZdH|=4!`IYOBlzbq zbg*YW=|FpV(irY093DzNQg)-5(fuSN3#u+DoS`!GnyAryriub}kJF9Q5BLDU6v*4vfQP zy)v-t*B*&wqQ9>3L@p}}9Z{d9!H)1y z4%-@Hl-C~K6Dw7r{!}2XHB{gelhoHgH^Fe?Eoq=Ycgk%Jc+D6A`#-f=MT@^S9^HF@ zO$K;m=wd>lsh5wXpLTg|W*w%EMk&ADJEpc#1cSG|y{{l8gc?wAM2)}YPo}gIaws_b zM_NYGVDcp!?1wM${J?LpzwY{{?wZ$tHpaIq4ykM9I|;B_$-J3YWL(Av)4H)}-GWr_ zzI7=BvHHgnyz~X0N$&kDe>MS8f18awEL%HBxn-v;&|MPY=z6XA=Owet zSE9V}N_&D@j|q`QXNPmOn|%}33?sG!FOZfsu4~4~WoKwe=SwPDfkArmXHGT18m~0; zs#&Bgav)IOAlE`q#YDZk&>-m)kS*04_~QaW>52EyqHf^aU6aR)zbLK(o(8#7tSgtiT^pJhExPC zO^iTS4i(#SM6Aq@(!FM-BiphO>x(z63%4#&&nkV7Kf|r-(jz&bbfj*fA0w%cDzeC_ ze7MI~C7SR>-nVLe?X%b>&o3B7>k#qJXqI@K8(liz)9iVRrhnWhc``oYN#TXOjS3;1zf0jgKZ{UV$Jx|$p=&EuB= zYJKfXyUEyl1~z%g!VdgCx~YQ=r>~ zXv7jq8634p(lA!ZIUu7t^x0Q3V!n*w3Q{;IhIyK1%A}x8!6QyHMBw+p-%z(pM?wS3 z&Y5AZF8{s(r`1I3=CS8m4YBoL=oosB1CK`=g}>O@%=;1@@b{Z!7x^P4eum_8Cg#>l zwR;{O^SNg{kIdh|3;ru&$}fMwf6waYe5H(a&4>1tczrrK{vYaJ#64oz9v5=j;Ev8t z$n-;_hrFsY&HM>vFO;Pd^*Wy$U7pIUzR^K#f=4cr7p2&pInFJNxp*8B!qh&L+@9FHlx+%T`&VoNjTa$&zrLzOrG9XSn~6W)E8yf^RP4G92Ke@1_|jLbZi zkCHxP2_B2bu#YsgNL-(ZDdUQ)Yp^{vDd3toS@OoOTXXhS{AkQ_*7moV980gHoxH?5 zfZElJuutUbD{s{rwQz)V&dgdjro^rwer+lw9zg@0A;m+xRg|MG@RO(w+`0xUxXic; z$3$)Q>#PpMs>G~?ENQbIOQP@AsHF@j)6~OGY%LJ-5 z&BB0bK%OT34S%TW1=;lSy4J0@+de^RSN zK}6rRFx!V}$KTP^*b+il0#KFv&4_)l9Q7Z8fE#AhA7@4otOzE5X&z(Aq;u?1!9#nd z;7zlJOEv7WWYohZNwb95MhKt;XLegJeO2&vj6Bp$#D^m;y_K2XFB7vyx7H5Lq&jdF zj<5MKlKR!+YwK3-Fe5p)gU+pf$x77AS+Oi?m7*=$9Zm!E?Xbl7-SOh~s~K+^wJMzr z)OroRNzXj35JhnteE`+HyC#q~7dR-kIREvtyxNpuXH0LRaj9e7)RlW9BZ9V4a;WYr zAYE7O_#yzQ&FT5;fQbBppxm1`(<6V!0L0jL*Pg)Xf<5rrf{ zR~ljlV&rL9+ET9PM(|#SWo_W*Sac#Zyw9KKo#DSHf6kbP(PRXaRQNHw@WtN%4Yq&R z^`RufPe7d}Np`lYakb|$79M`DMce%aHmhEyQ&n8FWsf%8p2QRIbq)NZip1~>=6bpj z*`@BeZyiDZc^2)(@KwWwpXAuOIa9*6{x5~cXYZLzcV`Dlvif{dxXwk1!&Jdp01M1& z^_}E3LvH6*7pZ&Pa#)!=?&T`;@6P>hx!g*vkY@rfD=l%{tHGF`;PXNrJ2BOp6{C(& z)>rN+lk%b0!sfJ-m+2aqWE+8qU@qCk-% zSp80|O#r)BirL=)hin5~nH>~1+KDlMV#*<+%F4+12zm1=q5Nk`$4GyK7yg9!*<9L5 z%efqSIGn=zLV1P3K~)o{JgGLX(QOf%44OcqFaInP0b=jWP93~Hw;Wy%BEsnm2Tpd} z|0e;UST?p;n(zh9Z)mmotb(6`bA{4is-0`xg3ZiTFKthQ>))k#)78TLJOZzphm5AW zuRpteqp-1nXz)L_|HZ+JZ-~87z#0CY zQcX;4Br|0O%EGa?0sy2%fd9yl=h=vYAW-`6v24$g{3|j3d_2C-L$i*AGzlmxOcR3SNbTVEbP_vu}>@lrhGRK zu4)E}H2ShzCGoo8+LFTdt4kWLxtCrDt~R3jzmYR0&DVyJvwAyIvH`F)b4GdtjL^XRShxKK97zQr zcEe^leb!(vtgTS0F6o|$W^3vn`u$2ZtRFN8%_L2z$?;atUui17b3hB&Y~mWG-$V>_ z^I%of)o(n`8`;;)c<@w-Dc@L6+Y7S6*6nDTcAUq~zqCBD6GN$0+t@DRQfumZogK*Y zMA*K|w$Z{M%~Sd<^gPH%W&cEkt?8BA1=-pM?_y|yb0?|3iKvle$mm;F8II&L4JDEA zxfkjT)xhUQ_e3>X_%vBs>R;j@>UUltQjXINYXwg9CCv3Ow5s)D#1syGIx}NVt1mV} zh}l~>78bw+VyGSJusJ-7I1QuYSv}L}R;u>VdS#E!wDXGKAMYUs{DZoQPY=3I-}zoJ zFKC3xzU}#INk-h!=%>%$vo{62w{58osQ=UjU(!ti&D8IcTW{(^Ix#^F*Ga^gChOfvTxu!n|4oPp9|MuAW@sI%M~z!^ zc8cz+Cf=Bgg8oF(ME@4wH-tk`ya?IayFY)iar;fUNIIH507NobxZQZvZu%x03!rN^ zR1*ZA78DGqRjH9yFjO+px z-XUMR@YU^IJXX4MI3KE0ineIuvF&-sCCIM) zPCPRPkP;8fw9tKFL44Za88f2i2l?ChVE!2cz9*OEij+O>*sW;mr4JLy7#0dvnumse?`XHzFMJP}a+nN(a%F~s!=%(rb(}&K@6crX)TF-O zn@fRODCUG(Egj>Dickf>-LI+E~MA0u1g3HVw@ZXuy%*;HVPoZ)%Y&B-2Jd{lA)n z{7EfE=1Yx)DYq`bvBQdk__@9_3V#l>f5129cQXC+<4FzPQ?Hs^hg`GD2ttvG`%cHrL?1V%>>gLW7V(aoD62b;S7!dEJve2xpj&T>IlG ze5Nn{ddv|Ofw48cze9PEdmkDiOw!IhiXBPQniSHr(GMXNoaYt5WbNldEjeRGPULo^ z8j=)!W(nNgx2pIqS+$L;(a7H}DsRj`YoD;i(0IZ`wphb9AS5#pNg$)}u>NE0A06e@ zEs=Cx_S6=V+05>iYKAOoAJSwbY@U6$eJ?rvV4K@Z!Zb2v!%HS_FV#5;Ri}IW#-bg` zn0o3tfigR>_qLVE2-WlWm}wt)?x~>b-BJ{%Xx1FWTXxI=H zDVf}a=t08v?@aq=^0;_H=sB+2_<0ClEY?RSapA`%jlftrFLwdr&v>lx4ZEHCZI4?y ziIdDP?&2oSoPVL-5P~qYuI7$oH|)TfJ8&d(9!Kp7s9r~6AfE{!W+8ixtihtNi&N$k zqV)vERb|L1^Gzb`?ggZHd+aBp(%RiCCRQ)6UTekV){mz)B_VWqz>t7t{)EsuCR_=jHC zCQY#d7t-h_tCZ?rW*bLbATQ)ZKG<%f>t8F*AtWFiHGbVVzWz(q6E=DnM@du6n_Byn zT79p+#VCxgxvfgD)a!`=tTiA6Igh(`pusLY41ygs-+Jxe8=cXrI-2E;VyCe;&Irb}ZUAv^p^Wb4x z>7~1-X-k|IqxS{XEV2Zwtm@CC(Tc2!iocI$+gtIKoqRZIlwW*Bn(wRr#n?x&Jn9-D zwY7Ze2=S=3gXEh%_P%U%gmY4R^c5^#3tXv&&u~vt!(j0(TZd7)AU#V)u8m2B8D?hP ze?zl{Aw^0I37%LqF(3MyR(;BErb8*iX)e@Li zz<`v*5X}m}vhb}ldvB~_sjC7CpSstqPY|@3gV^fJugF%J7baD<8t8}-u4FZyv60B4EE^zhnk? zvuqm<)ZDl}zmYJjtb9ErAoA0jAjRm$kS(f?@abI0OSH_dcl<{iK9hzUnz`OK+}Bra znnG5PXFZ=l^ncHxiy{Ywv1q3~8`Nf1{kFo_#Aspfw?yMoOLE*n_oieKtpBTQIqdV= z$nt<~;8;cX#^jwM?tjuTA&JZ-G+xg%ljfe{wPIp{p0)SRX9+B>HB=w`tVTd)x9GHK zPskrU=pQJ!8`r&^j-;Zo&a6bhGnQ2V=i93LyTj(%XG1qZfCjSyDlG@SMo5C$GHhDJTYRewbOS_06rae;I!oI(GN z+uE88d25I7$L7(aq`>U8kMgCXp#k4agAaD90eY}^#hX0Kee!!5tWMPJkb>mR>zqO7 z+_2w*4jj}~o1G!!!N1o~@9d(qR|ldV{Yo7K<2*g>TU><$*q^pF1dC@ow2m)S1Wf$3 z>Zpjy7e?P^>fY^Mx@xkoI%9oLLttcm?!^xWXLnQUz=R9R-`xlotzBP}0ra&yoyJlt z_g1&SvOr-Qn#(>20g_>(;m24(YtJtBz6?PGHIgr zWq5?VQUNo;Njg*njdj$iHuq@iHmodK9;`|2CQYZq)@^24tN5tO!<{V8#DCr!n`M=N!_xJLoUZQ7|Yo>&wNRw0*f0$%9M`rU_ znQniscb#ogi!PZe8iwjyl_gB~yL3N8uPc0)xQyw32I-#3eqrPq_BZ7|uy;)5sM5gW z*5C#_HeS4D@RyVl5P96~SdqFJRamUx>c80}DB{j59HyJr(t0`s6>!+rI;9xseb)64 zghxbAcN8#heIuyn{l|?mz(mi!lp^vmv1h54DL|4m^g@lfTeo7tbWZq=aR8f&WMTr) z5b~ZFLosFRfYg9vu~L|T1|;1a!6dVponcrkivZF_9Pb7w`j}@_IXQ_5j@YnYop}P7 zU3ueq3+%L*1LApnQ{i6Ej%n9*N7MfP=$yt#vYco*P#*sw*L(b<%Sut8VL>bZS|oIN z4Bd0YjA@D31LxHVw@b9@=MZYRWiW0@dgdtxH=Bb#<5~f6OXB9}Qb@$Bdw%?LYvy1Y zaSynAwE?ib2HPiB!&We>7qZ`F z9_j}?FIVo!%~LDLr<@G9Bt-_z>3(^?UEQ=V&rt$T?>pl7;Oy_8FaLU~aL88CJslnu zQms@t_;k&s>6tTWDSxNiB-BoQUa@bFM-NRI&tGm2W(i#Rn8)qth<&?~0Mgd&blLro zmmRb-I~Z}=p*Qw&aAUdNMZlH#=){#?Nc)x3E2?g-3e&72&@!p8Mobl(*XA_g9pj&2 zaA(UxBqD#I@agCbS_3^2piEs&pJkN;C;+V9VKO|T18J%P#byFvySd72?FP0D?kRLx zvD#dDVzU(%Py2EEyl712)yUereo2In++Me|Y~T_3d3sM^H^gQh1-gX~*Ntl`3)|2o z9H#H6`~S6c`LxA^U@&XOP|)D;On3S%PA%=AsoPoh@bAFRfdqoOZi8Wd3YI9<^5~bS zA-o7lr9RT5%9BhN%RxYz9I%M}DuNBQ>?!;s%!mh&oEkthBx{#A2!f5|z%}yyT33mg zrjEa~95%owXP-kQg293-bP90to=!2p60%wJ_32_Gv2TCGd$L_UVq8fRbePaYaDS4N znUEu?>D@OV=#Eb0)}Fbpb88iBEPdQD#ZRVLf;~#$l<6r4EhugQ4RMM*zQ8Z>#jcae z+t4TROw|C+)Am_{sJNV+C~>~u;;{RIq+Ijt-vP#@iSVE_kGpzY`2MAu?d6kx*SAg- z>+q?`dpXV8SZg(A0^h=z3`!SoLTb}+lPH|q_*!_i=D zA+U|{1Y$s~nx-(9!e*hXp0JCs!4|)tfSs<7L>15J5v%gQtKC1eJ8? zz1h~R-@=||3z*}E=0TwNu_4OGCniB@e=v0ipK*I&5b)pu@FjgAo$1?v2JeU>&0)*G zyP&BJVJw<<0GklT)@VTP=(1bc#Ik<78B)<`xs?A>D~uBD{~QCs+T->bWxg?LPW>bgmISS*M`LI--XJgP@87uJyjyCehda&EfxhlPvaH(B%uZzlWatrc2B)dF-T~C?6`0wc1?*4) zV8kux2r(T>68$9`vF0268#>B9CJ!&S+u5j_dDdf8j2TJOF|M4{fgoPm&<7#wDV~+$ zt9El?bkE3)+`7#WxC$rCPk(zI*kn6(lv+KK6R@mb`u=7= zE>L>~a@#wUEnR) zXjmlSQaiWIAM>ys(XRVjuX zI#(vnELlN>4j2{&xN+FCBe~^h>3;IdwQDaAsmtchB}a4|y6dE_}vXpJv{k5yAQvP_Ku2%mJ>y1^yrHKATn{ikpGv! z#%CPI0T9-saRK?*%zRqbyT66$p5g27L3!(}X@<~T3A1sHQ%L4EM^CO)3SjO8t_3Z| zto?A;4I5nL_Qxi6m*mXvl@l z=h3O~fOp5f-RJezAwFkl2|ma0`gb#P6^e~)n*59EzJ6bZP?_PotxzTQ)mf@v zSkqOEVB{E}KyUPQY%U+t5^uz$$TvC&P=?0_vI;@M9O04GAof=UjMmrvd{OT9?$;rQ8lF=(JOi)EpEdNDYwUZ@y!d^2D2}`FVCV%Ya8SkEOMM~)Nb6xaQr|)~ zDRlN@DH;>n6@4$%9fo0SA~TAw-aW@FaP?{O@}%$%hi?+>anmM=3A<=IZfGb{Z~a%g z`BwANjaXNuZ27Mq9<2jDe79}UGDijLXTTYkM?UX$Jsaw_4EH}ukfo)3-qkwyDzAT5 zOQ@%&>~iDy7?3H>qBt(3D|KQ_r%yP_8T8s~IhFPhh~GnMI)Ofn9@ z;GlPrZyyZ4XNXLo*?nj%)EZ~vOp1h94r!bnuZ~z$#yXe7bXGAql5g>b@Q2*BBvN}y z=nbC+dP&=PGxLHuoflO7-J~mlGqMdQPQ{y!L8~(S$;WI;(MxOGUg*yct4ITnqr&`G zS{3PUR8vyV>scV5e}QqX+(WYure#+#3VJ(ll*jP>`^k$%G$bme$(@-CWR*hg_Z$#x z7=jri#nztc&f&}r#N`E^t()g&hM2zJ(cu+)x*-;F#7AE(OnHV$nLh$C!H}!#Y(3>J zAmb@fvzLE_K=J3zIlq)G9~#%Ou>tLO2Rq9sL}rQGPDCEd3Uu!9Lh7AyM`K_q7-&mx zhi8maBR_w$At6*?bJG~|~45Oj0)lX3q8B@0u_1zWM6rxz;%`Rojo%)2R?#a9H99%Yz&=VQ(| zIEIe5Dy~~-Sa5myZg;jF#OJN<;P%62cru_+;3{qV;MQcpo7SQK=ycc==MVbta!}Gg&umH-<^_`%XQ$mnT zjxpM!Xz&^zWWOu;#H;Q1_4m2{+jSu2gKg27JW7t}jB`F(RFC4$6j0n5d<_d+yQiW@ zI4||LxI5Hio!uo||G1`Pd;)h#0`p=Fz2UVgYb`wnLE1AtO_MJb+{PT8t;OGSl zc-DrDUV`I=wl%K}C1%T!r^|Ner1e!iTfa{+4iWB*oZkjG_?MqlI-H8SMd!SYvrDs4WAymt-Nfplw2{1dU=9>jr^g`YcK*noQ9lR`~8}GG*eyjt;UCFBk{)8f+B_n%q=931`s8tmhIBQ3{^UqfI;nNq2 zCr(2T90J(evg~N^l(8Oq*42auslJy~jZYQz#ujlB*IGO>)5BXQbSUCMY?5bt-2N`f zIFxx8Ljfq;YWDXvR&^ksr!}%^ArLm-#PYxdXYyKT#Uc#JCfgLq-Mb0lHAPr|1~v2h zH(od1*hsfINh4%Z`)8dRkQdz#`{z|4sWQ3MfIZm7tCg;nYvd0z)Bh@UBL)JKK8GY> zb-ld}4hEl;y$@KopGXuh`%owynA)Dq^}BWDCe|ozd;WZWG2yVu@`JFQK9J*jOlpk! zkt@z~7|$LH;_7Hj6SztJCHmnOwjxktDqhCN#-#1gEq8bTE01WT0)prUa8eJcYvWkr za!jhg%Q=npw(0%B4A5B|$sE~%)4}edG^+7A@hxas0cIqIe>fg^ZBItxgW9YuHaU7ebZ`kuJ7B3JGP>bxVTnS!7WNkVL#5FrG)5wpLVx53nZKxE z)1PCWb}KNL80qGMOP6>2yA|oSxPRBxOd~RWgm>In7s<;D6U^9#@qGm`0&SLp_? zUT9cK!;HBjGO~IC3A#sbus>!PDJdVutR%lw&=ToEDCnbRc}|>6*Zo|J$X3tWYdc+a z!%2xnhdp@?%l9;I8sh?t^R|1pN#QjZ=l{vpYAMv4ho?t<_twAk3EyyoNa#J|=%1 z?F;u$IkJ0{%ADHr!=@qXR@Qt+b}k}U4F=S<;3dD)%4z&2pC~1Ha0UxUEwQ%QFo)c2 z0?adUKyLMLy19ZwY=lRa5^PF;8-F%YS4+F0qst2QXq>?fe8&BCb6<4G=A(L8z?G06 z@#pvipP)I+sxCIdulN4A3G9)h1`0V*=IpC$EoawDim0M@fKzNY-KaIa2 zoK7}gMa(2CO_s64-H6_TZep@OE;Mn6Eip*1m$+A$a7_Ptgt2NpW5fMOKk^g7*97?M zQ|YtsI(L1p%kY+;*jpr^&!kHmWCdysI6iNIu~REetTEbPFSd4oVR(=I#&yHb@-6ty zuk6FJ|3zK~UjOxQ>lyx>H(b(x&Z#8SH(A~V8Qu6O<~c_FuGLoWnG;}rn8{an)Vx3- zu0K@8#KxkkbNK21^6z{BlUn+*e!_BOYI@tDQ96BS^rsEIjW%kIcxHF?KLDPScka`m zQQTA@`H~$gx0g7|p1`;Mg{?DzkX})5#!>d81oS8_Wxt&Otl-}sv0yLZ5A{STv_?`o zl4i^#=u?L)TJusy{UEUUxu9{d%?BxU{RmNRl>84kohJ`%kB=xmHfAtOyo$Qg?25De%fFcQhFa%KNh50o>e&%@oL8??EP-* zSclkK0!Il7I9n$9n%A7~rrsMC>0}uUEtYvIp{&?PLTW;A0>klNj^jP z&b}vBZ*LJWjeB}Pc&bNS)Z06}Fh&h;{l;BSnig+QrydILB_G7Y2!_~NP7zz>y+Z5l9-5ukm#P~CefUqj!UNiJ& zY|~}}#0NTYZ$6kv0Wej+h|!Py&J8=8WRd>BONt@#g}@<+5mH<{_SuCJ_&)pSfQ+J4 z#0q|l=DRU-(P7}y@sM@AtNn%jxRFnpW&%$aWt~rnGFz^RDoED@3Ylo=ZWi&~h9WA; zX3#D#LwZcg3v98uEfaxDb*rf%8W-u5_z)6x>Bec*yk1+}k{0M;;P!#HGR*|0GB17- zen`vF+gpa$ySRIBnfxtQ$TsFkHMmF*ws3f_FEB%Rx7>wUwpCLxH^Plpn3=LSeOorU zubG?IY)#kos{BclU5$zg|JjPQ8+?a@I`VBd_Jzd$Vjrt?cq~9tw0hgU7ZKL=cJ8h7 zhA)zxtOX7%dQK;pV=Yz6mL?AF1b)$u3J6{z4gotmE?_Td@hIG+T^4<+uS?j!inxxc z3@|Y*7N7&REz?y}g`dz(j)fI-RAMwfI5Ezx2`0q|`zk$rU*h5NFZE(`RTffGVtKC% zb_s3jA9)CRXO=!TGJqVl##OOk{iA+`C2cyNmPirA=dVi;(vB%>0Q&&H(xK|H9x`ZQ zPo_p|S#I2c&r3WM)a=J(+VAY0`o%hK&pXqO`f6;tvIto3Vxoh}V=sqKD zpD`IHFp9e}qsZ)6M=W0e4FfOijWoa`XXP1#3ngl-&xfOD4TE}IVCUFG)|m(B!ui*d zQalAV`sy-%Aj+W=?4tT5e&@}1*$*#+zRd2n_u;;J1 zs6xHR{g=EM!|iJZYM)Bae+y%}(^9@9hPS$g9Iu$nr0rWpW%fj66zRJ1eWgT(ehVk+E&rX&(6XhiN zIZVz4%pc}&?_6W~RU|%|r#QCce5jYZ>ifd%Fs3wDc9uLO#uSFa1)IYiZ=AX?Eh0k=DK_+dQC zLv9XMH|>-(+o|kwU$D`paG0fp9L=$@&*g4w&Gl^GS{lvOv=O8f6d?p2qxwIis_VpT zrQYbt$|_qq(YNg$achSy3-{K!H18d;*zVS``eD^qX%sEjVRsMep*BZn zJvuVJZ(%?NuZH2n_*~A*XC^D2I4k;-!o0lVdt$h{2ge?ivR_x)!aZ1`t`6D>UPpSw zOjP8VPNWP!Lf&C>e{BnIPO@xGl|eg?bkzA2V7@`Tkv48J0iPW^w;?a9%kM7rO`1r~ zOP;uF96E0vNQ&*#zJ9c&XYkr~fFSO*>SpL%tl-)H$juDG^y1z|oN>3Fca zb6Ag6GEI)oS_^e8*nv>4;~)LQFO+Uj$Snf&`;~Aoy~a~V8;;I-0#iJ3ll4s# zDK}?8lI^1!?O&qFI%wm~GxxZa=q1CneC?^8fNC$sj zdd|_Jl$$ty@WbE{#=pLSwaiHuBSd-g88=?NG)S^rgUYCal4~Kz7=)jG!NJ|sM6iZt zj_@OR`;wD_59XqLCDlF(dKr|oJ_vufN`hTvh*)1+gg75Ot)SmK!f4!8ds8@HfAsX8tmB*W!EaT~?1Hlf@^rsc>nEER!P+d8l(v@h zodi;Z{zJIWk8O2d;rKv&Wg|V#y1LiWPXt`$;M?3k!yFMk+Vq8WV82W%b8A@rQElyeDk87kfZfNd zH^8aEu{Yv*!|aeiYArJBL;7CO-&78Ub07U{RU&w%hp~BmK=2I7QJgz3M`o-HnktOk zUwEJ2|9*Krt5(;;uSF;+#rc@ePXyZhzSyhk)9XT-qSa?>_w03LR=@Gs=G0_*j?gF; z^fn^0gCJT^rrM(584qEN{NlyD`g^;p4m_T^ssV4*jRFWHF)f z47a<|5l1netzhze}Md=Ls`+ za2NkQ8z}NL%1m5;d`ikpq(%xaR&qRR)zVYRIKBgG0noNkM;Kwj3GTulR^ma#o-$B~ zFWT!UDs)KJP-x5VTK2Bq`lJSsCzxWUv72*_QC(G6+SAkio@r0^+wn6`k%voI|$gC z-3Q{$?uWA#gHYS6%hddKfxM^mqMsA%?@{C-6vmC2-QTh|B1abS>z6#gg?$4kM8KbqNfBRF(>AIjB3kjH|m7g^i z{{fXnDsG8?_0Z%_LcF$A=UTpLE$99w^QJL?RZsQP0o&33DTT^NJoTxtaY`=7CZCE? zl;HB}mTy_ks$~()_3}&ge%XmH+U*-*nDEHG7?RadMV)&9)q@K%vWxK9)27All|`O1 zzx1x{RWRjmv0@%p|FCI$ke!nKTu32o-_i-PJZ0) z|GIRpZd^ID-v?*IEI^h7NhWMhSA|{;UB&f+2}9pgwZsMRf7W!p-RRCTry@?2Gl%SGp!dOs0)=KoOjR#9!W0lO_O z#S6uYlwxg>;2J1eid%8l;_kt zJLmgf`WYD_M^S)lr;Z_8N(LI7t6%n+>(A#O|?~~E6Gy1=Sl5g_U@itlu z;>Vt=#Aass6L_#J){ai(JhMwWxbQZTCac+HzuL^U5iwQJ%UaISG>0txIc)W3QU&>i zR|!30v5eQdv!m-TIkzViVyp$_^YWl6bT@ECh`AjI4MM z5gOCwCM&mjrMEQtKQx^rqx)9!i)0GBray%3ZSP4~rLi@Ddy{#Y(MJnnl)_F|1CuTx zqS0~ATz)Wl(?tv65AfQzXPU2Zgtm2c~kzcmmVh|ous4&653p%Z1Lu`(TI*3S;mYG zT0-byiGJ2czGppHa&rLU|I}+(+3o-Yhrmv$zT~y4lMLFqe3|{tW1BwHPerMKA*z-$ zjm3ve`$qS>+~0x%_b~}A8w00pAV1&>K^!JfXd#|ISW%V?mDu8kbR}|-<=5tkcN5aM z0qir@Yx8YcR8)8G!0Afez1g<&1irx{@|JA0po;DJ1;eEwG|@Xt#4vpW8$T=R;}RC1 zuM{^W>cg~b3nam7LzUMUA=uX?aa3e>PjsuUK#u2N{Bg~4+^|_xGGp;czpV)Nx${p2K3nKIoX>`xgBj0f^vZ}+t z)uTdUfZ=r#(5oxywZPn@qpnQw618!+@}H2tqUG+bln$*^8e)e|*C~DeFTXu6XRIMn z^>cIUl(-9jsc7pTgq;Q%?VRP!j8-Su=sz6hABA9E#Xv*xqWzdO~sDM5bl3S*>1fsO^VbcaLw|HQlcriHibs_y&DWg>{|_On!VRgcs?D zRvNh{)Dao=Xe+BjbfFf*%iC%bC&agabsrX2{{f;AKR^tOCDUXmx1*bBIC`4IEpdY8qf_qUN`Z=G-dlZw4K zy9B=<-TC@Fr%+fa7EM;iG@3{mwr%eC^xh$UBZ*WWjcsS)9^fz6XgDCB`?|92rnr(U zg8(4IO`_h{B6biEJRjqqGsDU>F$7Y!AuL_#T5pAWRAXgD(6VF1IN?P+HB|d9=-ImT zd`5wZNv+U?P>?_pGs*&qjIkRoA*>W^m-3Z!dd!!W9{056M-e? zWWfLN(yaKmC`(7YSt*IcDssFq)OAm+2>}>hF!HlxfGWc_g(aa zQSR1J3@czEF=ManPSGEZ_Ypmw#dx+E&O@=+bqMA4rLcYokJ5Sqha~0tE5`2r#+duPa%w*qvguqlJRI5<_uFs zm8$`hbhn~ER7ygO>$~j@_t)zoOIY|oAQHZGgSOJ}Qxa_})0F?#)j+Ll*!V)43^dKp zEs9)Bf6w+G$j54JexKlBE~O`)vM&s4uAvo+B*a{T1IR(~~UY>sXEvc`<(OR?Uj|d zH2ZF@3az%^g~3mj%CH6OfC3Dk8U|~pr=GLQj-^3fU?}rS=*>nkKi&&V^-OvE5yDCj zQWT3W4@>lvS`rugM~l$VDAYQMy86F&8zzeM_x`f8+41dPd!^SN#&5sOdoLP78-jeX z1{A)yPq!=3e-RQd(`<=#{sYM#Z-b<#u^YAz{4Z=Hv&n3XdPBJ3Yjd9#a-aXb$$0Bi zzNpp~%+mB;LjS--2VJL*q!McK=PciGGDYkK!p((@dtc7MkMyw5QVlm-X9ef8&KWTmt6}8v)zFh$~A;BIse7SKlQFT^a1rDD7p89c_NN zToK81flKi{W2G6*w^y>pT}o$jPO##jbnX{Fg98nIM4yzsgV|3|NUoHNX3n7<@b{>~ zH;Sdj_FJ21H=6KddejRPg5FlLJ}e&pUdLU8d|&hm6>~4;p9bt*QQr=nxzb5+ELza8 zK5`r@^LU#Uwl1!|XSPo#7kqAqGB4d4bpY==lt*N8BaHqmt0PWfy<^BvSwAC;j|1)` zQNE-6p?>47_BmzSJ*9(aItJi^8VJ(#Yt3bZSyYSmdJ%9a^pFd>gsi_7Bm_@EJ=3jEvqVBS7ppOn*3*2|Lg5LB>{eW=hw-b48r!Knoz?E<T+>Hb2-5$i?Eg0}V_P~HzgPNQJ&V$aI}Q%S~t z@5OB~3y*+uM-MOm(!dFxP6^9nvKw$tD51qDYr z<;z8zkbM_0M!mjCQ=}VvawTOnQFHf@_tpjj-_Jc)42kdhZQJu}7y@Z+Hv@Bnsh-?f zS$_@DqN?mKETgzH#D%R|d9*dG&(;q~t7-~IP=7@L!p=8WRt5#iJDyLf@C26D-=4?w zllfqdeV}ZcLJ1Un5Hh4t%ki1QwpsBx(Wm+2D&G8q7L4XTmErq=qIGEjMq2gPj0)eO>@Y?rv(9Yneygl#?G<8d$IksQ2GtC>G|H@AB=+i17PHxH~crvfcx#t^vS^VqZ z&<=^;it;p*T6|Np``@O9Ad+fniSm2COm$04j@x}Af@to9WY>549l6@c2P--lk&$MU z^*LpsxlU8;=VAk6E+>j!T2%blU;U0{Tl5UUePde{q}{qyE(*60n&(OT!em~xrWOqj ze0BWGA`WiQTc)tIC#vBSNBtEn4^!+vd8oCKuk6WteqJskT^ZNXAGAyYT-CtnmY?HO z>S5H}RW5@!S&)P(A?MlN4l$RbOU#a1FtV%9PtgkhKb(kxyMOJ>S%?Z2^}tYve;~3q z+25dlx%(M22;+>(T9&78jP{RLRtq`a@R~N$&p3S%VD2LJm2+DCMvP^G#j7wAukK}s zUo%<>`&%=5@x0ov*EfyZHyGLNM+K$ao+|&{hBOOOu%h$}xVqr6W}s2;{)WZs=0o~91RR$DV# zrcYXaJn8$~(9vhH_hYh5AfpjoVxp&`_L8gpI0HSVmdxFZo9!^~KxP(tSN*X>a?p}0 zzOtFud_)z6Qq-M-??E31YspAbP2+ux6y7yC+mh|XrY1iXiQI(3aOPaQe_CZ!;huX2+7=g^UkY41K?UWCsGY1;OJ4kjC$4wisS&S%pI#-W43ZRw=$%#c0g}d#e8Z`s)PSVGqd8wwbGeuq zIm-=XVq1(dyCAv)1uJcBt@whY#+t}XgnM2Xvi$gd7BiBH-;=5}8JhW5y z%gdvY&sWwREV}XH0y|OufBa&moE&H;FLYvp{_^7qgh+d$Ypc@!(0@A(5BK_5E zAk!V5Mb*ULmTzV90N3QdDvB+=8qF>ZEW)%PwlX-+aq*%=<;X!*#L+T?aK}0@9Wvxu!e>wxf- zKdxx-a0v~?f~Hvs*2AO9l^;2&>ht1;e#d9Rquz5B%lN0MZ-c4E8}EEGTLv#?VJ5A2 z9Qj4z^4^tDSFqv0su{%W-_&SECY^Z^ZLEor7~Y0q+~HWAA%n_{xxu--@1!fW*<__4 zAAh+1Nn6{^BQE-QBjhERgUfZ;Z)caaI9>)kx00u(KVWjT<&ui~GnbeiuqEK?e>8~U z20r6McIs~Bxz|y^=ACfwq|m+p^RStL(qp`_Eql`bm%wOJh8HIj(uO&`ztu8Xmp=J5 z@gYD+uM&$Ty&}pn`MeSX&q2m+!cDP+C1UPa~>29g)*H;W`CBc87uP%tg}a zT=G*$21eG3prsaAh6CJIUA{-|?jR%uk@AX4cOwx`RdvoMzb$q~a>FB-+I7j-$PL$|2~8DIH_1=XR)CfSt00KYsbt1Qn+cx2InO zLO!4Mk+#zVwU#}3N5SObutn!Jo7SR!#oPzpHR3badTtK@^QUicr!sNraovVbpF1iS zXR>4Vp8T3uPRG^4%sh@n@i&ewQ4u|<7cEd5&m?R{8O!P^lnf5JlEWRk88W9t8v2l% zj?mMb7EkHXMrJl+4R(OHv-@YQDoVodJXq6D-15aR-_0L8;1Z7&pQSTac%<({-;XWaZTO zr|S2PdYX#;?sd{QXI5^rT;g<4Ax#OFB%wtLPDeBo&;11JR|EAksi-TC1;Fae6+_DF zg=Snc^?;+0-y2p!xBoR}HwqruNN1!1W*0-L=I>AhK}sV?vbjFL^b3xGQA56fE_F}v zN{~)DMGVwd`I}=FQPj`7q!!@0*la86$1B9rt6S3KtW`C?EaP}{e%Hd!%LrL_iL_bO zF%B`<_Z2*?QVcz3sj^zg{IPo1%l(mQC~0{naNp=Uso0PzEn{BUZGmd+FR^h_%w?SS z!AFIjtA;qP2~7G|ulox6PjU;c{Riz_PZWjim4DKe%M35YdsOdr)1H!l7gf13$ zwe+@$vB8%|C>hS+=aLq?ZfbQ}6zPL)v&K8J!dgko^!ZmO(KHDS?Su1<3N}p?_t+_8 zySndFU~lr$Ua}x0$=S2K$}QJ#=nns_%AJ9RnxSILtv1_t4Om{>k!23&7- z?VA=_E;Nz-9@B7McT2eNxr+O2qT!D=aWQ#ZB1GG1C?J}GjO>&#H~h1(e5 zf}^B|qU8Q{{i2(!a~jC@7fjeT?YNKMI1;vkA}zt>RHVI{ONr*4r_$fr3acA?Y{A$S z=VCBin|WN)`(!ka2TZSkvOqR@Y_vxHn=;ztP0l_GL#ca`AJP6Oe5N4Zy!Rlc^D=$i z7=wu`1wQsuT2Ysyz}V#bc>T()p?I6K&4%<#*hG4)q2SCiq%_F8(HMl=eJWbj&$T7@ zY$8^0DN4uVyPGZql*I!cPlQUV-ZUo!E7(HitSoL&|5q%XmWA z@BfJXUI|Cj)cCcWTUGpV$ZozD)?b3_gCDu%3)GX_77pGJshhT0+#&RHqCk@y(2U59|#^Brdr*p#Tqz>5gFvg`Mq29h^R&Z+NaOK?4!(ag#1 zqud*w-ch)rKkr(jiPyt<++*dye$wA^QkHvzgPd{wwOOA#&i_Tn=ahjScxOf0G2;#| zhV8%|P-h5YeX$SE*kOXaN#%`$y7E&cL8zPDmC%nnFv@&ANM;9$(m%hoPl?7Y&ugJ3 zez=WoR^>oZ?1?LbyFRsxmA|0nkXI8UM@T6$t{A!$QJ!b z%5AHEIF<7)hzWJ#n`~|x@L#mL7)-|TAA^^6;Yy{(X6sG+6#Ocl;73Gvh7%3s{e<9i zyuTpVFI_kd*83NyzXBD$$;Eic*q>}|FxO-}MbQdvK>Xva)U1O21{&F3tM!-;PbI66 z*;fCP0<<3-@WmckaT5>OZ{0M>S1fC4OIfw3qF;SWlw`+ngLt<~9@)81b(LWG9QeHl zzWx6L>^p*n5xbxri-S?ox+szoy5#}SAS+iZ_FIwZD~zB?x5+y_j#(X7wLFop0Uw+K zkyjO-DmS@f@RY(4i95D6Owc%4%O(Ya?=J0_1z1+3k=(DI^}a z8V|J0&G-4?i8pkBTjRy1$9_-E#d|$lWGluV_mOBLQWlb_r;%$@NwP;+V_0>9IIf)Pwuc8*T?ZfX4 z+d;FF)3t(~1p)BNvz=A!Fq)*Q8^u;B9NV_y3pR@Th^0LtJ1CzY(>P8tAYoA-g6PN+ zoKy$|(+AehSF(-boV4f*Q@jZ+d&Yh5nfR|jUbpq`jreq&8WCt=B|>H_aAoPNyJ8e3 zYxTL7{qjqi%{qq5F&?36D$~A|d9V&-omCUPtOA1p>sU}p=Jno5e z8FgHX0P2?8GSQFPwZ$Uwx=SlA@hdte?ln)}Hh2q0>^s-$ZNdyph@Q!0bz3^T2Pa@V z%&^~sNK+du9)IHuSyHFCc|ytt*1VH3!LW!E6Sh8y#q5fwuy%)4dLA9yV zQ*FrKk&Uybo-Sa>qYH*>Vwu}$s5)3Z0&4zTi%tSKrUpXsV$vEX2t|xW%^s#@c(D?Z%yu&x&x|3f)DyGG##PceL>4u?3NNt)M^|%v}e77qPF~V-I`-3 zQV;)&@Jopw8aLDA0wFJ^ye)3RA0)zT$rs^4Ua`~5lk{pI1`{zc42FP~`;q%uOMicGi8vTg` zVf)T((4Id`xJ!omV6w10r)H&|`7?+vUq{D9J zQXvX7Mx|Egu%5Q7kwMX?vc(%sXB*_xxoJhm{uMEli$w0V!9W|`#hQd8;{A+N8=d_4 zI_vG2GSl=7$i*BYuK_n+s^!T8u`XK;`d(XRnumUA`%6dUW!VA2#65d&H}=#2cbWvk zW0SI;ylTiKgF#+1Q%rUpA2)_`-}^Ke<}T>zU0XrPZ*|gB{!xWPYconGhQlduH;}yYjpQqRp`*rOF3UC=Ck<^UB&2>B-LGtT(f6)<7 zABiu4Cj_J)5!M$$B+KnB{zDdGi@@hRz#>1T+YofF3Hy-vipES)ucUHdt|;B5 zK4bzlg6eRP2fUJip`1MtoE3ZDZI(7Wbf+lTjw>49nUomJ3lz5&ZH=v2W3f zkvmWqxF&O2g&~EV(-J)1YjUi&+TNz+q&N%(w7fc*!?ivxfB@u~ApL!Fs$h29Kdws* z;bdy(BNAkSIpaYNBlTrE=6i)}hMtvPGFkh&kWWLH-aDUT1D;kgR&hk_gS`>LNrV*c z-5arQ0a2*cM~gQJTCjp=C5# zQ>0lplB#Se#4f;9F*n5=X!m}!#o?>;GC(W@fH<`Q_%{zP)q~$Ce`@%i%jKUCxm}#) zQGKU)tjEGy(`j~eQRd6kyOzPdSu&nQhD5mgE&^P^B#CU&HO+Rd>HkYpgKTI|B{^p zvd3C|gwCSMrEYb(7x}vc&D}cI-~ywpck-LyeRop_QXv#w$;4Z`U(Tmw>!03?%fhUG z-Jj2XzIPk<_}1i-W{)&rOrCD=&~Qp5@@-LUNihe!FEbNikNoW{kscR!KYzm&lUX?m zC(UlEGNboy>rPn$Um`_lLUImREI=-~Gnf3=L1MSi_e$Uwq=FbG)714g>Z@NRERQ?k zuCV0US)6k1@Vdz>aMju=gir246bXx(qn1?35H(J(Ynk{)kPVqhr#VMZ>@`{rexcJj6@$eR438*z?x8j>z+4ut zYvhp1;6<_wueor`?wTr?IqK^}=Kzk2C3HZA-a89TfBjA1PT!L5)(3mwWa7(m2p?E( z|7%aFi~L~@0`(**4Q@xS82h4%?@tngqfSe01IUTa`Wyjs!t0F*-oJ#)aT@lmw#d$s zM@MR=1PI%{0twdo(VTR^%luCfewV1d{X%7bOTT47RG)d*Ev&SMla`0eF9m#<+*3c| z-&t9f7psd2(yB+7E8DY6@-!6rkwlEnGA}L@7SqCwH!^In94NFbAJ>b1ve(ABl7RXO7R#!;~&GQTXt#hel386hJj6 zG-=#^gHHx&^hybFV;lb+ChQ%(9l_{>lR+|R;218ClOgW~+i=a1jjtjD>hN!hv;c z*dUZD)%;;f=fP$F5lj9;#)--%H^})Zm}2k%T{lU+%6;tN|4?|<`3S{6t;r}Su)pc` z!3GhU!PY>%m9?Wu*}*0c(d8e!zGad`F)C`qtwj>$wXmCG45nW@_z2 zLl|QyY0UOP)EB%_GPV*E+kN0bjb@A2Hy>Xqm|Wa>IMd=fCLXElXz)fG6*kvBe#&-Z zj5#Egd-#l~z#^lSO)(!Q>O-zgN=nrOSow*vRsz}=0)~DlbzsmUy3^5jbg3kO@-knG zJ3|z0&s%b`^0AbF0a9i^zak@Vc!Z_-*k%=(`CG|95T`af-f3I6inmc;6hTi{JBL7~ zk&LL{V9$NIn9k4ILB)VNI~w9f5KZ3~Jl~mIsZFb?_CG+`FRWbh4u^f_GtzWPHQuVP z4=PV3I0=H94QN9r*33LR(M%?KVkE-nZIyPbv&#|}6QhB<2gU4Z6#AXFZ8?gh@*L9= zVdS=#4Yk~cWu8zIo_mDP5xS>7l|H5qv{MLcE2^9FFqThfu(K`f6?|RONgE-AMWmt0LmB z#Ui&yl0Xzn10bUP>a2^f~H5|EtJUL-a_&<=y^d2B1gDTjEmJ6tH8#TWjwL9T}(*)a4H*z$CM<%x^Cp|bp zDdU9!M7)^#NvFDEOIe!jqD5jw(DL#Y>=b|gMzG>ZyRTr*?aSHP_(98Szq{4L7azkH zT2qkSqBhi+FFu#xXISHuqsA&`Rp|8`PmhR>DRirgnaip`>2K6CRBNe{Aa7M*h?&|- z94?UVHfIHJ{dGV&?cJNT9dfwTPk4V8O2IDnOoqZ58`KFGJ?))#9A}c2pHv;|zuozU z+y31?$OyHDwMqnRh(z?dmcW;vW^a$pf-#~L+^1XvL|k}V1lY2CablgsHG}cQg|fd6 zV?B)(`1$@(13lCOe;Ir8gX_+g{wNE*hR zSJe;hu@w!AXn0z2og>k8gucRvyzoZ(8b-9zGjix7Dm17~szJ8JcFCKBA2RLSLQ4|Q zgcf%9;^8xOw#m!($1`=38j|vZ656-ttTT0b9L>_B5g}coY(W13SUw zH6GW7v(Ub~Md7ntDxNB>QWhv0C$@Ssmy60uSfp>P2+VZ(jk274f``+ zvW5WWmpPF_7J^>(DcvaJqJG6Z9hAW#5fzMkKEL_#u=&nf>w@R4^7i2=a%-Ron7UT7 zs{@DW-yOp$gOFiFo?%MBUCeB3Y`SUpH>p(6H?gh%t-3Po=6TK##(-SIqe9u4E!ijU znM=>iws8+lgu!#jW{sMJ=bCemVy${uXkIA_6u+_01C7GJNl@$qg;@pN#PH z#h_?y?2h$ub^Arv*^#=Ed&=70rdx=oJQeDk@E=#VVdm#Kpbp+QDoHA)F~ls#GitZJ z%0cuAvDf|&Sx0t-5Q`8Ugb<=fZL6As5x)0afQ_Ex*QI&P|FOLvBW&+-AiQ~g23l)2 zjAqF3Ni#40lwZnN;&oMRXdj78!7kD#Ry^OYojD^xa-szw-H=*igXuJx{KtpO-qCPe z?DWJsODrK7NsQ-oQ7GI??$A;&h=gk|{7>Un9G)a|4BLp)_oGheh$2)1N zKhOp~1dMwsTIMbKr=8ASy|UixyQVnT3bS2$Pk`f;1EDAB&eGT)upxr5b3|ZGLKbi? zX`O;7I_yedv2WrR7tupI2Ny02iVOo??Lk#;nGx|Tvqni9#9#Pkb*tRXVw3iRnl~nK z45tn!zL@2{^G@)Y1&XLc_U({ z0UL$C|5@Jc=@jU$dc8hp(h*%ss$asVb6IgoyB&_-5}Hxe;$ynH;yunrBg+66LetP` z*hsB7VgN@Dw3#T8?n_=XL$*B>wBO->tAM;^i4I6-ZT_mddXTw~mnhKxi`Q8SwP38} zBIV!157Dhd=j73FN67VYId;D?0v9#ZpQxMD73O@tOi><)_QxT<7qv{KZ<$TiZ2)TP_xJCA8@jR599`U z&+!r#z07pID)gS6PcoY!#a zz$eg2%aNMJNGhH{yw`Jqa zR8=MSWW^Dd;GI$;wns zcmgKKh$y$#EY5P*qZXisj+0V1pmVXLmoT3CFoxF~PalNCX1>k%`GR4o%8vQw0ZTa68zQSKr+DK&A z?AZ}L>V0YvlVD=Wemdsu@|&vrb&&NQYSY188f5Ib?^|pp2wP#JCe4{+%kXd$zn&-` z{p+&(h0(!zm~2S*M9%UL+in`jJ}u}p4wy^^cTy==hWpZC*nU21pL7SgNu~@&65G+E zH6CZC;Rg?$1T_HHRgC#s7tKmE7L4M9BFBhgCD)`s|BBuqY&0>eZJ(%ghtA9L=Re*! zzV$;R!MqrLm8OgxzZ>?RH|g!sJoUqgTfKr*KDO*Yd@HB2@eLX1nf2s%NMhY>T#V$t(CR(v38DmKj&0z{Y>voW6EP|i1h2xg1ZMLPgFIUmk<0z zO+6!5$EEGb8|@!do$3ad|NlGqB?l9zM(M8PkBpX+p?EX0E`rmt6-XzXi{omeMHxR= zCCkIoe=%f27x5-X*ASmXK;w=)qna&7Zj*0>1eB3L_Vu2fE{{c&{k+vahQgn#76FZ0sw0tBs<@y{U$QYd)+Dpf(IH-&&u@QrM{7=sy>4O> z+3CHjuu2A!Q@U`D24`27&D|aU6VzN4@R*{ki?r59KLymtCrU!+qR@hslm4s2*Rg|& z9ja@~ZDgfWZYZP|`UU>W#2B^ai#H3Icjk{WT>#?d5By^URnLcMkUHWkT- z|NDK~_phm)PFVj`ZSpy)LDqjJ)Am}X{+q*#?Q!mpwze~v{B@@qF61X!jr+wkH%!8@ zS9{lQy9GYTF+aD$pa1dw_s6bCcFP~8`l{mPcq8XrM$t+hqD;FKdG)GfmV%GU#bY;%1My25@4!sV3PaSZcn`bl{rE^P)Q+qNaP1z7h}w9q`H(P zVvAuI=B1lfNClP^U{=lu31{Y~)M-@e1MN7d#>j9#_Q$-!08$n?C2jQU-xuU9E_lAx z_1JmSH2hKFJ2>^XvwdDoTc<%SsJGR{-$_XvaA3;|(P8d9sh}0p%T1q_>|Ue9=N*3M zdcU<&ZvPcAh)*T0psba4YwIaizrXBmr%E8r13YD2UgRdr0=M>boA)B@%vf6Mx|R}I zaLmBh7q?>35pR`l{?vlbm*2LU? z5J#T}T5$A)NZ_*|$0JSutC?kJW;!MdK55vk*SkJK)Na3YBb+v^js_;OK+@e2UxqK% zGKcMv0gE`8^|pH4P7Ad{GZ$UnAqC9W-0nMnp)a(N{Np*VGWUAlWffrnRCc(fi2&Rc$w?};vn^F8tI%Tyuw|-n=B-w zIs;bhldC9>36cJSB4E0wg-D%Yw%z}M>EqP@f0&*Q<&VYuJ6Y{L*e%p`(hy*1g?Z5A z6vF*urLKeG^nIBy@k=bQBQk*T+r)8Fhg5GltS5XbKy$$DZpkAg!FT&gZv=DMCBeLw{vbos6}s#+J$o!s!H zL_gn(`Q(qPz4;)i94uXtiYzE7!<4Yb6E$3JTiD@F0K`(|iKm zcFsjhMvOdv0+?U>-)I`4m^R-YhWFKY}U?XzSkR6D;yAHnaVVB%Qta zqf99~PVncL3`NbT+hx1$wseULw)YB#d{*-xOn+w-BL@mRmRG4GXj#p;QWmW&EA(#V z2UQS6mUHLp;P>?4#7j5%cVIKd88|9FK3S(9upe%00l3aJ4R@KAwR1eX>j>&?R<48t(yQcYfc_ za@aPPz!g*Ye0Z{!)Tzo){&i~@Hod83s?NbFP|2P1#nd~l&-+B-XZZQY4C9D~-nW-- zXI&^@#~qO#{HVVL{9MgP1#v)yNF=5L=M$dh4Tf7Jq>A(h{Hpde>4MInG`WYE?a(yk zzs!}a@)s28Hj8&#S#x+Kw*#58Q9=Ok*FERD=v$=um?@gO3{=SI4M0B$YInX?f=zLU z`Rvq1yS`r^v8_+ZI_3L(n?Otb5W4j%4chFs`>=Yvc~2Sg%IAM4Sz|S(-a4MniwNT! z(hh>F$KdW4{U5F#$fmoiFc&f>hE1PM5b16G@9-^ujRVMkxSUKW?7YqSsy>J~Qt7ep zs{}WPN59j0?NpSy$lQNyz2mXMT?!jW!ME`8u4yj>MiDA_x1`=kt%u%$6%er)VdRmjfk2f;}jh#tm4w@t)R7n z{^-MDL~P^bc4i{rl%7AM=+bcBg5HbB@s~|m1RZ)^eQ0muuwW_Lh*HbhFXq^p#$M_K zsCit#VMy7<-^d^giGkoM@ID_+PeC^<-o5zrWt&O~XFObNzBfe1m?8(J?RGO6Wq#jU zec8Da zaV_+zo`2Hk$Z}5bLB*_kJj$in6?<0HmtjI877baa>`qB-TE=0ZX)P zpE@bM%t4GAF1OW6A7bap0pG4Q#RN6hX!*=QHm(#B{ywIkZV6z%?=IeSxL0`;__fBd z9@d#_O!@d@C`?hU2FOx$C|+h~w$2mhTBVmdRb0Au&m;wAO=&V5j&4B$wIdZj&#oN^ zb$Cn;TTG^NYizzBWk&Z9ygAzmI2k*1?&x+(TaFWIf|CdsHLWDt@X*T~`v>$&7lE)a z{D-eDpI=0EOZnxZG_hG=hmO0_D8UXyuh8rk{rd17DrMMhr2@$tPx-4}Bi1eqXrV0o ziNdt{z$Ar^`*5x~5NA0K;E)vwiK10b>azxc62Ak!U12+t`;g=W;FfSr6dn*ssOla-+8N>P=G>I>0y*9{YVgN!q1 zJ~@4}bcRM|m%t{kShm;~`G}ka&1X-KMgYF;Wv8evl~biiBZt#fwihNIqU&6 z;QF>{XsYqz9zU1$;zk#Ux$$|a?j3~#A(`;It*qo6dHlax&=duc<+bw&J?v09(al#` zilg4^uXWMC)VJ3%j&*)j7|G#?e?`lOIK5<6n|qGZ{O;3)OZZvz2&~We66)(A7gpw0 zHZjldoDw1S(-w7{`&g5p8Zh<5`Uz8G{D>awOvPHy#q_n=^1l8YdN`|=1UKFMuW9Dk zlBfaHesM{B6nv4EJ^F}5YcErVm+=-MkJsNh_ZorYE#%z%mP9d0p9d}=uC(b*oV!3C zul#tpCbQ!6%&_eJA7S#U)C#*5tSlJ(JR2u&ubD^+x$i-&Zg(K!kBucLSkp7goXcOK z6l3t;hR(OxQ`r#Tp}_$G->J`0)+;V?MsRZUJ$0<;da=2aqn*?E`oKwoATZ$erXQRG z5VYMsU^c(HS`mgya?e&WiUBRY&lxS6rgRG#yOqqr8A3~dG&c(ENMkZ6)Z-}Q!OpIi zEpUG2_SC!#C%VD~qLR(8lPUpL_bXdJYhD5;*9G`|1%-;MSJW*3+yOiLi&$#uiCkQQ z&OfTucS{X@%cfFy4AnnaoO*`V#eJ&PWD5_tVV=KM#+PKHg+)00-s}Ikk`lmRVv=0d z0~+Rf>R3g|Kr1Q&z)q{i4paOwXaelBUj&pAe=0+gF9R%I1x+pJYFfEkWurt zv{+ZL+V0-4@%O&RJV!E8`Fa&_v{E7_0Qd!rOVUo6x5$jGN5T~``^1>8vG4JEXoc)%rRXjImj#?HE zwsUtl>*m-o-kGAwT*X;wQpM? z|Bg_p-&Qcw%!^B%#8l7B-j`#z8 z`KM0NxBAC)-&`HHl)yuqLrC@7bJz9+J5_Dq+Gpr9c`)7Wbxn!`4dbG^et=DD{pAB$ z7U`y#K*_!?2(+Sg00Yr%#idWV0L~cAI9neCA1Pj??JsTq%yYrsb<~N54wi~cWqo4V z*32n*>PUC^VTA#)Lvu3tr0l+n)Viq7(S4-rAKVr|?@dE5e#Pcfd$&9}Awt@tlNii5 zKAZK=C;qbXNp|-p}$#SH_cl@^0wa)&!pfsx=0+6-D{C zF38IS@(P&=fon^>sTZsk%>oiI&YvMJ6K~VZ>NrLPDRMG}R)&Ekn)E>U^SB!%NRGKZ zDE~fYhPd@m0KHU{|s@e%gu*4u%+xk9?VA5!-@fFw|r*zP}A*1`fU1yxf?> zI6d#6m_v>~t;>>Nnk)Vzm#On<5xILE4_RJ{F2rslRkU~UT@RQ!0UfmG>Njmc z!>z{@woTdTHE7P zX+3u*Eo4|!dUI7uz;|hgU!R{YEXp5LE4IDhP5P9kMRbe)TfLa<2$*iK#!e}VAd;0T zo=zU={hLp*EigL#G^Bg6CP^-533DWezi7`f$ zAI&|ew#zBzdMXQgTlp%ULpLUW-<9eA391oQ?kQSJsOXe$g7D9E>GcHTBDeO!GdpIF zDYb*8-2AGC9=-yU6Mn0QZ!D`#fI-=F>6mwZ#t^DE9yHC1APu1CXyf$H)ntEbaL2c# zIm?0Yl<|l+qj}Y2z5s82vfy%`aNYQHr9`NdT5D4R^|ZGW)*+ z!DQka-^0OvqMGc(7>sbjcEiKd8=L2F-pb9K1TmW_hpFb_KL^YaOW%oDq9wl1smY5o zn!Q&LP1TZmUkCzlWdsi9UVo@+KLDTqnO{z3ppHFreGH$0Mt)rH=)5AHzO(s6K=bvF zpa9l{#TS(;B>X+gQn)Yw^ujE_17`GcC3~YC2jB4p)baHD;VcVGuAAR0-S#Yyy2MGp zy%-lJ7i$>CY79U5*bn#7RCcgj9?v4R^lt#wl;`z_G1ZF)PnO}O zdJ>zvl}7x_|Lp}4_Wm#@9qZa_ojxDF_C1d*+Xz7HMUJjJ*s}s3|7OB($Fn$kA4&Qb zgl9~(Z!K`cQuA_!L^AgrAW9(MGk(w(NF~o9xYz#)YXjm=K91|aRo=N7-+A!&9{bB< z7HxL2_+(MZ?br5YQ3QOik7vWCM%IBd{-ymVGhz6FlN{o>0eD^hkyI5su>L<1o-Z4L zh2P;aMqptV-<*Oy4_N%*rOj^|VZ#=>5nKYF;P!`~V-Tcjnzq)x@xf-^qsxC;HOZ02(P@dh-*sfemf{ zn8QykKCghYXcNSG0{CzK({P*6-B~o!sAw5wPxP{tnisI11Hd6VYMwKD*3^?f8qDnz zF=q~&-j-)atW$eWQ`iH732unc*kS#^qm@`1rhS(mdJu z%(XuKPv~>j(+fB__K4;ufU&`awjLY)^n!;~y8TVpjnT*Krn9&Xp7%GPzxl(BPonuB zVR7COJwL+2wi!+Wc^4iBtl}G7ON=aI$83j6m3<4;_v>$_1$aTr3Qdbnw2=cAGk?|B z(2Y^3PWfho?K)#J;bC8C=8UQw|EUqoA&s#Ynd%QJ52gpG&jt+UVI3Pd70fEve_Dug zDa&=&8qabp+FlTP&u`}}{Ud;DvQnN_M~B2S4e)*b3gLAQf^_`qyPi8WOCnAu zMJAZ9e^N}lM|BQo5Yd{9E}(Cuc|$$3Gk2 zcRl%`83Vn1YPqD_!zKmKL_hcgNbw3CekTIDe-0^pLnnWpFFiws>(D!kSKjgL$A6jP zuUd25{Cg5?Wb=5ENuRyz8J7T;_X4Z6g*EBuGm)E;$MC0V1sx{1o~xd&nFt`7=lS>C z8;A~swR!Ge=IL8{Gk)^b&m6G*`#>t;Y`d`uLfBj`sAN}_y208t?H4k}XFb~CBR~*u zaD)h0w%V`2-}u1B*Cf7s(pxvpvd#az2Et7vl*inpAlZxGj`!eBj_oa%*2~ka`BOjn z?L&@$qU?^Gf_d8T*eqldx8f4@-6(ihlg2>tT!jQDMo z4)F-Z7HA1xuSt#XZy!GNj*m3}|0xik>;(IRgFXN9f7gxi zS6rxQ48`a~DKuwZBhE@WIhp$NC5QLir*?9QzH>#-q7UPl4ye(5K`C+lXD?!wDFgBh z0_tdw(J`1T_Av6FLWJ#MysP=>8*lO}iUxcz?;-!@4{NGR;6Hgd41W0MVBaj_?Q;HN zGQ>a1O=@xUBDWEZld*#NPY6l#YX9QaNY?{zY5vouBe~+M6*;IfB_dZr)R|I_XE$0~ zZ|RG1f$uolw53L- zWYL!XM#4ET$-*xfb1yI9U;`@}SRUFQ+`3(u4LhB$%AM-R}llg&XVrJm1^D@C~g`_C`GR z6-)g-znA}Dt2GtIl^JD! zT@)(#!+C=_zc%&|*Bej;XR7;okkUZLT0Yd*f1cLz!lpT=)$Z;1_pISW!I=t0;q^ZX zf>K}Za4uh4xh%lqb~TzR`3%L8qw8sICtoO7_X0{i&tffmO@qD9Dty-NpSI7rG-vm) z2Z+SYwKZ74`WVXmiTCyoiu=UGR|AMM+Qx%8qEvYQg*f{3v5OHqGH|7TXJ!9vco%^4 zn{$3(2+eVZ*Vt_;r|-4`t@jUrnT)yqc>X}WeKGO>+xK6JPcNqF zvGqibe}s=gVe+BW>-+wsnEutDb6B*5`YG(iVq6OD<@8h4M$9lT- z`mY{vZPRGB(vO=H{<4@lEn}_Tz6m!?IL{&v+NVABK>HwBXxo&H4}Tlz@M9(WYsO-} z8(muJZtpqQB+ws!@uv6YN8iu2{s~*K^ClA<#}WQSVIQb2=f|HcTKk2hei6WYIi_d4 zmo0+qBBN5(AH>}oo{>bkjz7!hV4r$W&2OKQ3N?kC^VdAx3m*adBP2{=7CG~`PSK{< z|Kzuisks$PpAOCbY}+_m+tkQ(*L2_FYX9{8*C9sNxaa!n{^r*?!1>vN(|#r9#j+*u z=lJd4&e0JqUmS09m8`XzMl`Y?e=1(P0eM0@xBXRzgQQ2@qRE6Bd8X+E)dvTUTL#`e zG;%(C@3k{iVGfS#t*6p`>?M%**}!dQhumrU z@*>~)@I7ZuZ$E@}(BO);z|LJjf@){TMey_6VP0#ns;@uJe+-ypVLo$yQ?kmh|8&?S zzk4lw`eMqTHv}bQ*$4)s`g#!mQUzR{zA)c&JpRTAkG6hazLu$LIBh)wn}FdL^zDxp zu6NVRbHhhJ*6%&d68}BdAuxITSe>PuZ5@AaM0m0&;vC=)T`3@zTfH@$hU}TnbZbQAgXCWMX*|f6!3Pv_C%>hPRi&C5^NWvk-@CdTz=p-+vNI&^EsQWM|rBBOH{ z8LTPS|NcA5#701u{mKj9Rcjg`+%o0cl1LL^&)w`f7zm#JTR|%o^{XT)=>BHid>m_X zZsV@MMOushNo0@c!20n|DE)h^zW(%%ihY;g^!AduuF~(n z+#M$8ftiD?sXjOpgbP8CCpu@8}vk8ReG` zJ=;mwzqhli2|O38#N!+w`HOITWl5Y(e1aAI^rBIJ!0%a`mBBjgNifkx~^>a%?&2Rprv>t5kC;kM43BP}M9xiSIodc9_|3sSY0{Xe$qRKO2 zdgo6}L(ZNzg4_MYlE@xp0#M2I>AY6}|MB|y{!3%g!WXf+SHk}YE9UQ>!|4{{Ql(?m z5C&tMiCQpvh!SzY@&mzI#%R6E0?{d5JDLH3%f3D|!gH6=i-5HQJ^oBgw{vDEaCH(R z`jwjG)wVSlO4mQ(@UM3L*KF^w&p993%1Dp(^V(A9 z@_D;wPfTI}F*`O6p)^Ch83Aj-|Md?rAntG`f3(bIPnFl-CEls0N6orna3t0<&WhAq z%L@DKfxVcEO@46c4IqD67uE{Z0poUMGyW>FEFM2JeSu{IV}DX!kWx(D+BqdSlE95Fs+bx32<3 z3%Msi=f+r_8`$EQ=;P=0V9A=TYBBtiKlzV;au{mq@IQ15#sB12E%`TU-+d`xgCAd( z$!|mp3vAgvhxHe&ds#=zozKTU`Ei{lBcJpu-~Z|qvYzkz%C2jeNO<$WPXlu@74031 eSd~9LviuJhPdY%c1_9py0000l|IO8BOo9q{rAE^0)D|?Eyx1i z5IwXNWf7{!X!e0G^45k*Hfm}J9KdG`1jJ}N1eE_y0bW$V3jqN+9}xi=ct`y2w|u1k za}+TtANl|N{NIVx{4OI12+{~j@-n)+%E}TEDSy~wH`OFO zEQeK`7?XQCS=f4!5(eC#R>xW^Kp8n=Jc%%pq(5L3_{g4eONah@^dUzd4EcWEp1DRNg5?4l$Lm*!sUhe@}U?>&^tT= zL?#Prpo}IhQdCwX`OwG#pHSfJWd`Zqy7+Q-cAyd#u1;j&>;*JLqBJL61)0wXgbd(w zTA=oBVW14h9~@Z9C@~0_ai_omq$ZirNNZ|6Elr>@plRs;w`u>M@3s7FCzj%TGy;kx z=s-}_H?i6BMBzI8(|~~O^8L2A)Sr0o^rGK5y15+o|0(siR!JX>VRQPG*=P$o8z@9U ze4p>R|C$(i1vLww4e7XI-k-b7XWd_ICDPE>Vtz06(j$@1RmI?+Rp9l~w`Bk8bBalW zHsQ4_L7$LwyX$CaOfP2X-YMLi1JWPEiju36*ST|;JIVp(cjoP$`%<8bypi#Dt~Xx} zB3ii2t1iBd502fxk^lw$K^JTZdA6xxyS6u|`svLmGCP%!+M%2bJu2q? z+7QpF_iP}0OP$p~N=o#%$612Sr>Av08d6*dcQh3`NbS#E;GdXhEiry@)oCHaRIxt( zl*(P_*b;4(OdofwT~;~iw4SU&zP^_6c=WwkaXfE!DF|U$FOFS`r|B@lN$h>fxpV0B z!mAQvs<1j0exfwfH#<6G71WL2GZre0cI+ipe%WpWyyyo1?P~sG4)av%Y$~AQ>t3CF zhld7h{hEp+^hY~Whnt-5==no$UF}O?3s;6v(UTSWzfSj;r)Tj|`w16uzV04O=GW!b z>&bPfBD2W~s-qVyTte4diX2@$*ue+zk0Bin+sqZ@PNIIQKa)Kdf2LM+q)b?S+!me^ zvgU9tXN^dUwN^6ZuvcIpHc8s3J8KtCM{V|wO1?f9R}1DX(>>8ACc z_|#OZ+G#!v`@7{mp8|*bH~~hJ%_XbKGpZ|N!i5^PP|hH*kQjwqKatj)Rb^%KQakG|;p5FUb)K3B7#dU0EymS@^~WVsMLcFY~dLGS4_4Cd+7aqO=2J zfvq~%KPKnX4r%Rec8$9>^frbe_GFK;(N|HV6~AOd`#@56>ywKYUG{ zJarmfRvEpZp*`l76cXm${!xXRo0Sd?=}28Mi1X;G7-_I?DJ*yq#8k1P-S;*^kn4H9 z`Mdp#+-@u?bEP)V*7*X@k7Z93Zmd|pQL&(cKgGQ}HI;BlkAi*&#PNPC>){uPpW6!y z)qC@T2H8mBOAdzyolx(^&&eL0%9iEV>h9|fRE;-!-40?U#%5qG!LO0UKh2aW-HZ%9 zE#@OQcWdSx9qr9p9wPhc>efBbU6(azLp-mUxUnlXC#yD^)(VxqXk(Zu#`;>Y-{!q5 zFXkyRHrA}Oqa;dmj}_digI-=?!Wj0Jks|gV8jH+mS@0RfHjot(%o;6bzdoByercm2 z7!>S}pYL`4%-3D8)53!$(u`y#opJ-E(WG8v)?I=5O|Ft*`JR?} z3TpHfl*VcxayIjIL>ZyhW*KZI0K|IWDkKv8vT}*4vc8gm{UXcGgYsvJh+t67G26EP zeUG3+n&osix5^E3iE&-bYqQ))(m+&7PovNl4uTR}%kuY(rSNXFQ7?ps^Zu{ND(OW? zy&?gb9X{entCbSLzcMYKHd_as*8ZXuy3vq+3X3FFWkvaA-P6w)29*Xe@DeXy&60e= zyX>b_00olrB`1`Ti5wa`Gf*LFg|)I&=D$X4b6*geCbUN6!T#4Qia$xx&x37)i-Rp^ zVsZZQvD%R1`|MAUxZ#Hv4=1wrvvJe*jWp{MMc=i~$y)rN#u0_t;kgfn`i0L9=B`G* z5@pT1LReG^`*+i?=FI7|N@Nu%w!z6mW1;Uw8%ZL{4*RN5tu6&k)UJ&=4tOHU=0LQM z8=eWe*zdyFQ4Kb*zCn?qtJ2fJVec*6BVXNm@kgeM(JAfxRPViAVP1@ljHL2%wZf%g zP~CSt4_p0VFlxTvN7c|oez$YKb&pPGJ~5emuXm3e#_&pYy(5$g_PNK%F?=M9 zP6I_%wWU*OB&)$b6XR28784ki%mOIBlSL;CN>jJFaTP~9chJ+20 z`f5RaKL1d!b3V^Vzpk8)(W#Q=OcLZGCWwOfu5yc4(!V))nNzC6(}z)P-G^6tO9g*u z5|ANg6GG(GS8r79o%&-V(kFmbN|=tik`I~oSXPa#v+!i8{HKT_GW)Vqg8*J)wvIw8o@5y`%tqIZ#yenzI7e>VAg*4>eT ziE3U#)Pq}mwF>W3SW={ltf(%btcU;Yrq}}3y!X$Mm#f?((YgM}uhgU}4sr-ORWPG( zT`BFu1nPbcXe(%S({q?ApMiM#^Mo7x(7bDL+O5p_bFfse^=1_7`fcu8y+N$ zUsG4QH#t=?%Y6b3(#IOm(G=7!vv8?w+SqPJgdXW~1}5n@nnBlQbMJ5%KHJw_jl z3|6ibni+E)ci0>%IE2_(Zn!9XAEA3`&PGq4C;@;3&$m$=W^@g>yup4n!ylXCq5NyI zUWA{OvG8q7XGun|mIV_o&$pP8L*UCJc^onJ%$f4KLhe&`iA+pJT`{VU>EWkTy(eaA z>a>tAj;32rzb3;^Q^T9rKoK{2p#J!{W6L7mj9s^y=T**N;VF#6VWw`%G%ibF&T-Mu zlHS%?=a{%0iD_${6GuX+a=(B@V=fmI0IdO?IvAAkp)~V*V5cgH3MB1rR1y+uVQf zSpMkHNsvFeN-m&^FN3mGG4E_Dr2Gy#`YN%rRnm4~vtxjqhedI=quv^Qp&dn!BRx*U z_gW$xX+(e!Xw7Gfy4SOWXNQ;fi#M{4PohDK^EBdjzi)b9S+yO&61Ba!j$YHJN8Zd( z7tcYYjdMQ9ba)NnukZKjCnRGuoH_PP0qYqGX{Po6>G2*#gp%*5vQHkzbz9i1X4qfg z@U!n@@^Qe3HNp{T!)8OW>@^Pdt>N9oN}w0hb6+#yndX)QB6-T0BlbHCd92l%gj~)W zRwlBjSIT4e!`7|V07~GB_)d#TA66*%3RTwgOZhG*`C7p>^FG4?Hr?TrlHnY9G&ek< zJ9kS5iPHHpXv{|UQbb0<#wZHtGJ(=XtusP)5$_KS{kUqD&W=-a_0mJ7ts0G|2bp6e zhyOJ!Q8WOD3Tgd#jqBubdPW9-JhRoBlUkL^EiwH3zbS)U_zsT>U_O5z6haez1T$jE zBFYD0%GQ7 z*bSxYs`~slN{F!}BPEe`dCcx^(*n7-9nD(&+w6rJp(EKr;{A@r#o}p`0Crbl*5o2% z!f1ziFHTu(9$Y~xyupKq=}2A9FuOTcw@L$R!K=wVuL8*BPC+mG)8#)dR4Y~5j&O>V5?LnU10$jll&Jf1r%ZL$5CnK?Z$apHI14-j6Q^K6;TbBsXOzodwn ziv(H|XvnVZGL}aIg+M?Vze#|KOmAKD3wwtVMradTC47)Rd-YIW-=4yVa>xxYSG>OF z?hP|I6SeN#`-9;i|3_>_2tuIDEqUIc`G7d+*)(Y)VB z;&*wVTY3jfM$=PKvDP-#SQs0fkGK)1-H=MsNJK3?0--&DB&a+GtW~QbJggbOUvjRU z>y0mZliXT#KG52 zHD`_=2CW*{88lXE_XS|mmuc-VOPz?FwVm`0+|u5aI=xxsul>H{ghBVzgsRBm7LV*e262?U&Ot@(ko`>F8YV%e3@`8lMd<>phAw~ROpTl1xdQNpi-?INOo6J&q_1VtBGQ z{zs6c>eMtM^_W2T*6Mh_Req_zactv!+%^44mp;&caQvsU&SnX~XQ$(t;?_g{i;OXF zb6D^fi>RH-w&$7ikV>yAt7NWIbxb%q6^en`GR!*^AuvrC-b4xU#_qSIx@O zYxiPN|DHt_ha>-HNankw3|&uad1I*qumfI!PusTDt+Q=9BoZ2`>6G(tgJ>(AdUbIZ(8>$ zS8Aa(Q!dkUL-t%Tp1M>Mj;02Em+^mcCqW6 z$G*2L4XLk-sWvuM+v%3s2jYG%(~skrtqt=9XaHqg8I||K&X;nD4%#fuNVH@Xq3V?; z>w;Wf{AtyV)ZGDdLaV3sebCwFU}6Ru=P2%xY2f*Jj1s|)e8Iui`Lq&uFYibmKje@- zvC#vr_Gfb8NAhj^`RGqawn~)E9+2&NvQw5;gOS$yN*}*E`?l*?U$t^&0$~HjeP6pU zrU$=oubHWey<|rC_sO9L_Jk83D$W_zjcl{`?6 z=28^60S6Bcka{q7rx!1)X9*a7S&Svq+w1apd2Z+PCUKg-anR*a1p$^4 zyus(Ksf-YZV0b_`y#aLF8~9_-ReA`in${PR)$;GPTgr!-TzN>ZYIuY zr#>yXciI%l8Db=z8Ed<6j_Ros4B!hfaj2>uKp331EL%M`t-LhUleqP%AVYtA0t3bJ zC*Iya8*E+}K)#v3SuVDDW;`3KnnjQzN?;XzN{t=MmTyOaNpg$oejJJsL&F7yn2XX& znp1fd`PLFJa;?%yBb{=12)%{Mc&IjRJXl!PXBZm&wX!Z;2;+0y-0RKZ-llh%H+q*j z@cQcWVLiZZI0XK7VPc@l5~!;SZnD^~cSy58`mkeJAZ_@3_%>_tlFBFrTz&Bv$G!X$ z7I+E8!Z*7b z9gzTa9Q3~bT(BV$8-u!}Q7iKn`nlW0%j>V_-uSTQtFy0}S`C!W*%F7sNPhdlDtxuA z4hjews0U=K<%L?&ztd=483_3?P91XY*2kWUP4p07ezEcI45#!sMQ974cR;t|Eioo0b3t)-a02$;NY2yom~Rj6`V)AAOfLdAwq&H>Cp zW43>E%R$YelvH*b`(CsZ-|R!@Cv2QvkyWmz2BuFpozP|3^SZunhf6qam>m- z-%}1L^(w1e)Z4f7@@?<7g$8viHtq^izF#+}@^PkdqpPHuzTZ_D(`2YDSt^Oe#Qs~p ziv@u9Ac_fcF+{a>_lt#)<|iT>764&(6`XQOl2S~B1vmw|oWHJ|2BPqo{3+wkU^D8; zuQ8NZVgnDlil(IH3>L@E(Agr4MheH@wk>-rk9Th06`@dux zk`_DOV~8B4RnkYeb7&43elZ9$y+7se%6^`4BhfNH#Xw&EJA{7>bJ-I!iyk6-DmEov z&4mY^mxE%DC6)U}8deu*kp5W@I1Xq`ZJk}RZgM04j26^4N;Tx|(uEa{;?r9^*u{^N z1gg(X^UZGg@|EgsXa{9`RX^4 z=prg0ONF*#FuVkv`svibvo}Qus!b;$7S%?5Q=8;kFfG>@UegveX%m* zN+%4#Lr*$1vN)WKJ%S#^VxhMdg*&Q>6+I3*v}N-YUNSp8*i$egej3S!{?WcCtw;b(MmB}yQuxFO1zioyp1;=EQgy-lL-cEslA znM>n4?*}e&W6HDTk-dPv@s9T!!|#L?qwe&PdbA4zzZdrV;4#{y={w_8i+Fi#YPr0`{i7lhvXZb(>!JYuk=L4`6sSCn;@!>1-sd1L$%A} zmv471scW+~Xa9biaEtQZ{gYwa81}=L{s9GyuV0A0V{#;F#(%lhn!ZzM{n_mTzh=&x zmEXX3Df!NXX3U{U>ZqsDq~TTh66h7Jq@U-n_V;*svm$hL+RZ$QcUNf$%WW!(W+gQK z6JHPQeqCRHo3nZ#$jSeB@9Kmo_Yn4z{c_`EBO$KCL588y*3eKy#8J-~TiU#v(Y?Kf ztF5GO?!{P}w}*Ykh_QE6`ACbE0#Z|HFmKD8Q39ariU?^SG>RyYJc3cZz-lNl=zj8JkByT!WC7?Ffv$|4T*Jy^U^z>_cWHOa)! zLQZ{A-RXJZ+z zrL43U0x1Up%ZfJZ#b@U5+J?vTK632zyf$AMd#)Z+M#*NIecNU1Via%Ar-6iPAc$*a zz4$SV5h)1{E5ATndGI^1IA_b3R>CVk;5)REf4TC9>TAyP7?tp*d>MszQ`nxa@^E@U z^i?s`zRt!GgbaEydP&Rx&|R9$Tv5OwX*#kAQ#UgkK&I|tV#lp$8{{;>2p*gn8hgQ@^1-vonXwU zrg~hdYSzNglT&_M4o0EapCMtZPDYy7OFIdW04NToA{DcNE8H$$Bd^8(6qT>n!k+#Z zGMX)yUiTN7sd4Ce!AS!>D+yejmri<8FT0vtd5{H=ST{8oKn!)zis@^7c_YdWhH@0x zg@BgxU~vnJvQE+~@G}x~nev$c`1*~<_+0IAn1^_V>^%hSqpe(* zt}Hks=GF|EhyYV9%F$3SqYfpa{-}4EJKNJgM2r6ptdrlWR?zg`)=jBkO3dL?d8z%F z1IRLVi1Zy%8kQ~u9>1=(AMKDgAO;E^P>cD_#A78>Z3Piw@`##Aqd}o2{^l6wtM46_ zmtL~1-KE@B4#2jhM<o8QN(#Pe+T2yrGPH|QWEvxYJ0DtdWTi0H@g4YzZ& zM1QKLAZr%pYAt?UK*K*iEK!$tVo`U1{Y*(#dH_(#ubxO$>s#eGXPG%d%WZDj-_wIc zsrCV3F_zOszo%#?X$Tl~{N#&o#}jsV6>$%p=Irs#O_~2utj{K77;<^+5$kVh&}-;* zV|z+R*xVyKKdZ#~Ze!^W7eO{HATd@vn*<1_YjdBM497VH0`amV~+CT`EFS$s+-a_qUtcju1||m#=NEpa1r; z^4VEee4MHy0>?eJaVYk6kFn^>ndOA$@(BuWneqEkBG zphwpDiAi}BxFWGS>rYtu+I)qo&IRb+|N3o;7C0XSoAINP7lQHHI(OYqH|2_E{`NBH(oEeWdr0~fA(ZRP*Z*Yw z7%S@lSw(SG2(I|Q38A8e1Nq-A0f6k&vZ12U!gJmb~KA>(3Fz@1<|rqbu}s{5k*0snL_XM!vz+IuTrzL>pN?> zicU#|IZG*Wjaazk{D)G_^NR5vIF8Hxzos_4^?Z72=`zbTv}?!JjaE7Y_=#AWcQbf# zTdV_}%x4y7RLs9>ZvTU={QNEeM|{4?$wY^#(#1p&OH5n=xFAyPw@NyeT@4og*{z{? z3gdm#ukq6vFsHT!zw0h)%pd>ox&&+fAkbTmVW>G3i`C809#Ac)ZNsW^<24Rndk)B& zW=OhWA-OefSRKv1mTF~`?sZ3&pH?&}iCrf9S&XY-A*@EqL*CnuHFzC{-IdnyZEz7) z)%bP&j;gUT?K@Ar{sjDKA19)?XCQ94gCIP1Ct#Wr!H>?$=$dWd1PCd@(-hW zeEw7Bs1NPR=p?OSD(qE<1-PK65|5BOc9q%eM%!i6MXA%|miS?Z$AGvHe!Gsf+P6q{ ze%u>rduLM*kLC9OBXOKIkf@EpCEiZH5tp+%?VwmvqNpNtXiULNI;PP5B4RU(B(l?D zbG{gcU~4$#BD*+c4cyj+`}z>Gm+5>3pjSAfEAhU~nF^R0^VwwvVch^Mh9k%l)J!N^9V7!(Id240e?_+qqS4g0DhgjnYb#WP)g=tut@S#veKuC<`?&P+sAgb6bToUDh47PM9 zGKBN*{9lert?Q$>9@t!aZi-ZmW3cikbD8MUZFwq%7Y){k@Y2Xvf@2067jOrN=~rK4 zbX|XHQ$IvcZ2jS9BGO%f%1)F=;ns5PKwOV_&By_I}oMVb}jiEsH`%Qy-0V_#5=fVe=*R5-(d~+xTDGYSy-m z+6&7WG3UHG?RXs<_c&RR_L)Q1#jw<7nr4}Cffj{IFR0!|(jckW$_WGh3~2ms?rtO& zPK9zEeNdeM6AT2|WeAll6|MmL89feoFS@Gnb0s`ZU9kHAUj1K+kcaP}XLix#dSUH% zyl-gPp*}OHR?P%vh~Dh0=yr?1P@HrgQx5KS#ibY% z*MXOOkC5z~H&Y*e%?t#e16cdi<=d5}YV4gO*;V*i@TrsWaVy0NAdbyaPP?7Ht)`8w zF#}TgdB&t@gtw@e@!(3TrRanX2IG>hUP#`B$s^Pish{4=vzAlGnJj=AVf;WE%ujo z>j#ZKh@43D+5L?dtnhkG|1-wWURQ$3=dABdv}>iak-enEb5*WNc%zD~R zzOcd62{~ozQVCxD$OLE(c|5$qf0_*nO;yU)a0%LUW06lOjta}E_^7=XLW@l~0wg39 zJV%*;RI?+dfT~!M{{f~!9p#su#QQGTQ2!k>`PJ|ZY2^h#f%Nq9{COPUcY#4)Da_*D ze=Qf5?q^WWxIPC+X{%c!p~Y}cd`wQ*9p_S8ALM!Yq_1xXVk)GTV$ymC*wLJTkhChk z#aCag-Qu@gZykxBn=6ru!ue_@9=W%P8X2C!?BgJprf3FO0_twIRjI+Zi=|87WdHUG z6W-s){{9WMj?h6#?mW-F&yTI24o2kO@~rK`m*OTb_9!%Sq|DHpJ4VAsi{)^LiiyA< z54k+3K0b^TohtkTXrZSE6CC5F@ceU>!$8hN*?OJBi2SDg2fDUR$U}UWX7+HxyjeK- zmLMPoe}|_(xUhret=7`StI7rYM%RyTct12iV5bX?gP7p{Zm-hguiiPv<+}k!+=cI| zr=$(f-HRJLiRhiUKjd7Bt~9Cz(k`Y@gk6&JsvGAa_mS~{u*QX8bIm$memu0OFST|m zeG#g!64v`+bKOqnw!SKyMfe8;F(FkJhv#SkcP4n_=bbF(R?b@OQ;8tCathUl42%f5 zO4t~Y$1($*2`A_aQEkI_qSiADux~NwKk=S2dWf2B74T;3AYL6z?Fh5Idv20_B z>h9%Ny;}B}t^F_qL%Rwzz$3{UY#hm|TD9(D+Ymf2OC0VOG6Sq!fTsjQ?yD|T~kZA2dWdDKo` zld4PjQ1`c=41bUjpju)#$&k}6?A-qKoDU{E!sL|UR+Jk{q=`qCsP;0=WB#d*HSXkV zK+qtSrJ(0Kq}n`XPp`nB97O^aMW_pHf}+tZcXF<9<*ex~c?sICCZWVN2Vhe_*}ih!SS z^CwP^NwMA~CC?GFfbGQV8Rg>)S-qusbbY+h{!ZB>D9s*9c+*B@Q2?%pog5y{$1p?W z?{~r?-@~!RdFC;y;gJ0Z?G_pBRPW(pF~dAotU%pH|CZ@!-sGjH`=>@j5#x(>LEE?s6<>be-C$y6;SAJuJ zY>D9szAYc>W|f@nJQQ?W^=zh((%tFi_<{lFw=h`pSDq(C@8Zpjp9`hHZpR(mv28bj zA^keb6T;{weg75&;now?CH9Wop=jcC+HZ35>s&EJmdtJ=2uR5_nk(vtm~KSwyhi4^ zYq#4^#J)Mb8nIN~AfTZAR{7(cuY?2L60B^P&?9cAavP?->p*@b#E>nV7pL(p77qrn z=G1Ay8sy$3MBD)%63SoAl9$>C%8&(Pj7pU9vPLjL`o2SH#1NcBpPR07BJdxm2(%Xd zrHjFQRtJ~XIpo11f!CJYJV#5*AC=-UX1M6fYFWyLaoGV4FLdumRelX3V-P^zyk12U zuU7P#uAWaE8p~5Sk~ro$GNT4t<`Z^zM2?el?Nkoq+7ivtBYZ8aN}sR7Xys*3K<~(y zFby!-o>jy*Q6#^S+B0dvFX9MO{$Oa0JE02xqd5@vqD*;hlk_&z;D>W4hs{m4O834 z(OOtnKdE>QmTQG(A&FCKAp))mEaM3d20)Td_$U z?dVTktK>XKKhmDBj!)DM@LpRagU7FWglYdRaUjZ&t%?Es(H(MJ&u?$Q?``t$d~i_j z?9!EUFtO=BHnsFSopaWOY_uoPXsz*lw9$3tlejj!XR>N)qkVJC)l;V&Abbw5@pifb z<)Z4!F_7?R>*4A|T<9j2p6M;=@xHp67}KYB=H3(Dy*z6d+yVr-^;?)RHc0e(3Z^}4 zXD0`yl#jH0sV7Iu2A37r0B*xG(`0wBV)xxYXQLl;e3i+6J5qvu8jtd8nrmxKLod}- zLNxC`M06HFT)BzTWQS>I+a=P3J;C#($pGKzS)ckN?l7x=gK5riYxN0f?m&*J`g0XVVpGW_^T6?*$XDU8Rxcz+{xrtR=063d~062Om2);$F zN(kv#f3o9++~yF9yx?k}*1wrA)t*IPX33nTTE=7th}qCy&Q+0X2Bw#vkVa4D5Yeqy!r{DyZBrcHTEd+U3s#^R^Mt;`(%yLi5uz za-OaNFcu1JTwPI`2gMx^{-+D;&)&KIkb9c}iXZjCH^tScD7h3IP}%?U-QB=tKPg`C z%YV=}0W57SoiVXoXA=zd!Zlgg9~NDOP(c&NgCG<=V!nc9^CKiiB3$x=A(HFNZzgcr4{T*i zBqX~=>V!lMk8Lg}DIVPoNvq`!&5hlYfDD%~mYm1Q8%qHm$c2Wj2lmuzjlT7PCoh%CxQA$uEd&c3GWv zer*m=3ajDSVQ}}6x^TPG@2t2?(o$Rz2zCBvgm-S^bFfT$iH+@Ye}j8py$(()??5*2 zqLxF-B5fF8Sa{Kc(SW}`&N?hrI9vI)Zu{>pk#aBzli^`*@$UY50Yn8rBK;#|oEG5m zRLKF{XgwjT0+TaZ{P+RFY^ji?Hrx=h@S$ohg-~I@%cx&fU%oS}aU#&2Lan4nO|^rm z1U$tLvtq(cD)$1`*Yo(^qcg_1?&weZ8l#rM6XW6xnc2-L)gd-tKmuuzVFiF z)O)A!?*ZTgMB$AZ9c&L-7}{zpJ-51#)o~AN6YcI^F`QbBsco4DMzO7tlzQ}E;%nnJU@DUF!5hXM%L80 z?CahN&Xs^Wa600pzxlj6#Yd)g=}94DqBIQQAq7w&QTnB`-BPvdL}^#kQT~hWzd<&q zbLK>LpWKMn0wtWZ=-ho({%{20H-O==%Xy-7xd9yT^dx#>g@y16jM@Dfg5g@|nBERB zd234h%?<1EfWO8xnbg0MOp8viXIvW{GcDMK_0~>nCtZQ*UuL+pY`mDt!|EMijVc^g zw0Gnix~-C;0%06mvT%J3HeXI+9=`c?s9XPX!RZ{#UG~;{{hdHWCc>i_n|=mnEF^Rx z&zn}a`pr5Pzxs7O#&f~g#8E(-KGibozK(%p9oL?l;xImJ#JGkgEabWwU5l|Qh?xf|RuaK~&Y#amEMpPE%xNl(y?txt zTD{XzZ5{GX4c`Lsg!Dg8(KhU2bRNuWhk78$`_^1F ziyx*k3NuN%F^41zr>R@4G_%cj_Wixj>tibVnku zMjkR8t^ZVQ;J`8byXGK1cUiR8jyBBDl|4P|pOmmhR`wgOnj}ac&f)j>RpzG)py8f@ zu-T)I#u-#G?!F%jgPmoN;t(xS)OLUm%;AJbch(Hn)!a;qWB9(JBD`CRm_W7a^^=gq!Q(^ZJMn@ z}7%)hZFSRcCy zazCpKI8|zk{2<0br)J|NuNHB*zxV#l7$xgQl;&kTb6m~J%CyyovebE!w0rQrijCze zC0;XDe1{%zp~W0#l2B1$Gl$PQw9-_%qFcH$W7yGH{32ln8&wSx#xHjdnGTz(w8hLV z%-1@9pQ&x0d6;p+5cwqGzk@{Di+Gq|fm*U2?M^<`Gptr0_VS?@>rJoO($K;7;+^xY z%KQ0?sU#Jmtv-gcxl2@|*6lvno4=QYfVpIQ`s5Rx?Ncl6iqP}q98!On?2%aBGpaxY zo_?78TTLe*sRvTI?(Nm);Me3R6G)L!LFYK_<`|&%mqPXgDXfJL-Z z|2|d)k2G@&3&n#p;Za?UI@!1ayg!wZ{^1FEJFw2_c$RX#qZZ6M*OHG}MmAw&sk_<+ zV#}Y4?R!*Rltyr2;@JxkzjZMwEARinBW@UPo(0*Mq^2Z$j|Gz^`-{Vww0B?+YHlMF zULy4MDG%E-P9mpM;yn~Hu%_dp>ag!T^jb93>cuq6Lq@>?oh=AR(g07AKQ3vNe6O}H zf9qOOFH0T>w7vSRbyr;U51jHtlh>U6UPI9rN9jbd8&75JNl(ww9B%^J;#17 z-QC|_uXxauds_=ty~uTJiixJZ!c65LKCayJ2)WlpsFUIFZ)IkT)@DUb?{@V1dWz43 ztkSOWb?w@f-iU)39^+Ouy8O&!FbN@^eq!U{li$IH_pH62)3gE8hQEW=Neru??`axu zOnB}XrT&E8h9bi1oF(@yZK$N{kB9BroNLNo|2Yg;L7B~3Q^rC)}TKlCuI&D%+tEdS*`6hwXm*#v*AZBPcZ#top z3=OYFszS=RgS^=?MIH79=;d=k&wO|~k^isU9PiK_n*}U_0tY=DxDjBGhs#1o_l7qz zlM4k)27NxXb!3hyEW#mhbADb)9I{=C+YC?KlFkniVj zMhcd){0(RxD}L=v6TI3Sq=3Ds2-m5uyYwls>2-`OEA>ab_tq#b3PBfj9Fg<3bl&~1kOg|D>`|X3Zd;Jqcp((_`C5-|G%9R5D-5vY?6KG;{m=f zARz%qkX$nqfUO;j0lYLbgm(5FNJJ3Al(U<@eO@e&(=oLMc0@r?Qc#z#mbD1~U(3ydqyPW_ literal 313530 zcmY&6)7?|l zRBc^ruV;6pvZ53+0s#UT7#OmQw74o57}V=66c`-L>m`7gA^7zI?<}q31_s6`_x1-~ zEbyiL^-FL!RVh)hnkk~6Z~t#4q96hW28>61GJytz8c~uF7g6^DKkl@vtG3Y~;T<|I zA^ZuG8KqB=V_F%3uJF-}+L{=fpksy{H}v)iDON)!M!S?HMfznq?TXe^cM*>~y_z(a^4A#%J>0 z@619!=g|9*;40e<@S)xIB+kbuV0HJ08ZL=vzrr-uN&o`#>Aa7{et6>t>nmYTcKfm-GHy{kMR{ShiR z&Oj{lIp_Gd38|sfBkm?Y%D7UNkN$94?08e|<|=`asU;_)AI0hH2Lid(`;6W8T@xx| zhg8gI%!jPc{r4MV8{;MMq_bIOr+2QaaV9PkO>3(salL?{4|c^%#dU8b(tolqoUjn#yd<%MjmW|NTB{y7{kx(ar}n+P0fm;{QjFbr z|9G0&2fwLJRxbMiskRT_hTQtTw(2-JG`r(h-)}xSy&OLOG4}1&NW|_XcO<|t3VPr- zam|`%>vfMw`lJ6o>C2o^siW3+Cd8S;&UNpzBPJs8L+FY2XxP1V7qFX?P%Omj_eo4M z@1h1J2i<;l>&plB<(4Yd-^HqJHHLI6@h9sBfV+1%c_nICdE3w-Y7*4C__TRC_i9?9 z-()~q_q`G|$y+$lGc!P|em&DIB#s*`ptYApP?F*+%RGEWx{!=?H=2PBo z69;PZA7mM43~PoH&p$>ykt@1=AU?e29s`&(`dW9O#Cvo^J*ogm=BKEKc*hqAT+@R( zOcoX_lYj8!yVhQ8VNWqN0SnPeqZ5?kQ@VqNS53S6sE3@*%yM=CtN6?#%>-AG{36{; zeXO-EJJG0R?#bPcb=#gghg1soFI*tzUH{kIeG4boYx3&(bAU+2UYJXvP$aJ@{aGA?_o&@6ufh!6P#Pq~9w+{!BQBW~66T$&9dq zzKt1#;5jf~n_i9AzMUHBmX3MZtucQsC^7Q|+mnzPh6s{Ssy%K;Wgw{koum1%Ayx{J zy}jvn;eFFeN7(_W2v^aK-+4lYS=kwK$jhMug!nye5-xnR2L zizc|9er)X_CCtriQ;Aq)hS2ih0=4XZ``7Y+T2%HG=zPyCd^p@RFzd(^Y>vm?bK~(V z-(48tDGztRFv3Xk%=FfcW<><%vMEV{0tJRTceL00J})vIwBb+)u#+RL{Gyal!Lu*Q zyDP^TBBG&1b2LASO4!LXbNmb(IGsrkrA)Fa1Fwd(Ogn+2M3`8(X05GJLI)-swU1|J zqd42T)rNz%m+b?l_SXGJWyUp6x0CU|EW1()HP#|bclpCUVK5N4I^cF)ZPR|BOv48Q)EORI z`NN`E&qt@H<4TmX>+WA$x5`S^nV32XCix*94=n0uipJ*3aFf6bR?_(>77j98p1|Mk6gZ?56BaEBA?T{iZuXQRoF8oYUZEF zhAeWC;4p;gp=5!S&?G9@YNKPdm`xEvpL!nF*y(mPZ(LTm)t4j8uE>@WXnJ`$yJC1K ziU_p$nxUVAy{@#eTt>v!soty4Ndi_QGjWDWB7%o~aXia?CV|M-0}0Ix$!Uvb^XC+S zXyF)@bdtcI^4Pz;lj%(Lvdasq7u~aM)K)*vvN?UciVkl*W%e+CT#s1Z+|ilfdpY zu*cU7hLIl+T^|}^8E%Ce!iv_JQ4{C(&E#q{^X6_& z4=0-)nxzi`pOvLozLV{)eya3er%pYe0} zw(?d*gUm1Co=>TVkh=_EF6vr^E8>kf=1y)Ywhua-P-Oes0b=a1KN|PSHCPe5CzQ=L zo;u`#VLQ&GpV0pW|SZl%oYkeBJ+Z{g2tAj{jgCIV`pdYJw|C+aM{u^f_{MjJD#*!{kqCSh@2p)K7q{Ga_*+{yQczZiTK>{qnPzfl z|G^OIj%~a-fe%vje#WC)(WY%qY*;2MU9-6-+|sP49H(IdJE%A8AYjez4;lEwYd6o{ zcmdD!{$(y{Lg&-f(X$d9tHJOIHRuM^5q|r4mbgFu3?{5M-w7`~)niX6svKffdGRo! zN#3w5Hmtj1@zw4MId6fUrTM+7Wz$t_4+-S8W(qUC_h~}0!mKrG>lQGXuWou|!Xzclaoc2YA7KXy~-v zv`ezkXx#iEV*d-lv>CRkuhf8BtH$hP*1|NT=9I)nXq=7aYa({_>m=^-)SHG6xN684ig`Ui1kWOB(Q7mHvdw;Qxx zcli$2I`zbsiUpjW(wo$_-vt$Xt7iZV#1w=f>YW3o9NL+NPQu@?Ab}xHRN0@vn**XH zylWGS@c7<)CA1CXO??=z@i8-C+0GABXfGZT%5>ffbFnCViLPIgjdWwJH1TsvzR($H znUPz3h*)~h%Ff!Jq11vGwCib##U78LeM5nNy1o(~yw4kDQ+qWkzWP4C`K_T?oO-|4 zet`p#COp4{F3#Bm5*xg8xKHYy-c{;8LOa#k{xECK2=T6+vVd5c>DUoLczjPHVoO1{N2UM1G98v0Y)@Al-w+aIEK9voT#_=)5Q^^`rHEMhxFVV> z%HZF_h9VMp?I9ArBReZxLpq{|z}Lv~Bm9nwc$5Z}W=2FAU$(+DFKGKxN;nyiJ9=XC ze}&n7e>TnzB9(YCS~~F{#UoYdRwS6CbJ{fkt#2Cy>}btKYEW5=k!;^ZK(+2c*j{?72h_gvGS{~bxS`?Nzi+*C`Qk;aTOje+(Fv7)Im9i?~_-m ztW&1tIy|QNy8!2Vu-9V2S{_cCFN?b$${t-IcfY*{;mtjePbqb*x^3lUM zqGiM!jG>}))qSoV2Pj@|;ZJe?8K#9eK+NkIpww{?>Wn|AV9uXlOFb9NYqV3y_u_mU z)Q*DMZ8h+*O}j#bA6Kz@KH4xenW|QPuVvIeuNE+E;<;Nk#;R)alfnYD^I4qjzEZ$h z4m!oF1w*Y0{-M84`kPb^SK$7GAqcP%wiPD8-jr^@S(;Wl1!Kf8YN8orfK%2V@aPwz zJi*S9t;gUfy8N!fZ3jT_q47u|lk6^u$`KB5Ov0lp__Rm5;QSyK%ul4D-i|s}!i|50 z;NzDS5zS)JMYY3*m}*dZ?v!h44bz}iolZ;N#qY2Oz4<~B{9e<)N}=dnn6)S&RiDkU z3h9C}orem#$;=v`0Ku=fdlh>D5>8(dBSfZ(fhrTFY_lMRS1m?5PR^1*EmdjBnrc^1xkic=wc#x ziwRUx6$OCO+i^Q6jV;K#L}zGw!^Q@Lbkep+AL}Euj^_hi8)f3%TV&^t*pq=oT?LzS zz7p&%F~hJ7Js|~CcJ95WDz(9rbad(qAxCDIy}#c#2;+s*Aosu+QJ2X7gzp@UH}eaIbU;8PL{!|G4?^ z?d%ROyMD-hY{hjC1phQ+hk8@cu2Y#vXE|<0b{;jOsIHXp&JTu?YPqSHGkdrgH{N3a zi_JzpA>mj8&hqlUoDPa{q%#<>$bkaWEIxYZww&i(jrW5P+>uOp1f8MMYZNQ#;{Ltt zEJ6UjJGlML;|I3!*3bYvzFP&c_`R(Uq!AFp9pDCZ*sHbM!e-i>#AgE*c}{&y z_@6CY{GGXabq!t|HR1n&8C#Nf(ywX*2V#}iJ~vx^92e8C=1kzy+(p}CogO+IDVqVr7gj*47lG5g)BU2CGPj| z+6Q7jV89mF4yHhl8yTgl|su~OoM#Mn5HD(T$Mj6>cidB#rw1=Hko*LPUs9nSnAA8usOtvFs^5@UMN-pYev}A;Xco z2)})yMYE4F*AT@O+}s;+9s7}MG6CGdc3twLUck^E2X1NjVz0-j8B@`(Q$Cos5eLVx zwPC?FLtPSFaISs5IW^zQNp9g=HP-5I`~xvRjsB+B8z5i5k&Rl^7bEVfD?87ZzJQK! zMHw?*=o9MhbF*6BkYm^(Ojscs)wHX$XlI&POoE^yDl)9ur*ie!UG@^Jm$~@J1R8;g zCx@}+wo zeB_vkBa{LbJaxB(D>JoAw~umOSwl1l6s>|omn1I1vCzKF;Ks}~`&}67W7NAfb7Dl? z9MCR1FQt0vXLT}VMEL!_636N`rYGBBPGOF)-ups>{J+ESod&E)gOvtxTcm``sFTUW z)qyhVemv?b7PZXMRPp-~FA+0*vbmI+>6BZVz4}zO|Fz#X&c8JB)i0o!bI|#j-NtqG z=KZsKEa4o7qb712?5C{x>Rte38x$EN(%w2zhqn+oI1|MWYdt(G*XhWZn;>`I8?^BB zW3rFr@~61fkwl8f$pcVak(IsD=xT(1+8dRbkpiW%N;BP5{V<8am8Xl0Ari{SYY@$W zWSBL~g5OPZ2S)DL!r?8ZbQ+zL!2q$1A`=1&7Q3IC-g`=VO?9jgDND>heH3K$C(&CW zfDFRA_ssnDAkipm$SJWJ>f%i@oHSI2rqRqJ{DdgjkOZHYxx;A!2<72IM#xfbBk@}G zB4>^K_$T~-ML`qMe;#tFUr5Jno$mIX9<4S-7V7{8{ha>yxZabOUN`~>8Szrb8Y)iM zGjtx#*VOc1L|mbmqmZbGslbr_tavr zV@gRG+C$zr#kcuG97nVn1QeDV_BOIpp*%eaU)p=~Yo(J~xKSa!eGeP5Q*GQ{xEbY{i-EdDhk|E;+>P zQ_x5hiI7>vwAG2#l};W(b<+S>DmEPNi3<=2RVN8kvT6^@F5+WNb=&Q|%za39O#I0M z3Fkw2P&NJ*xlP=G&`{Bz94GzVelK)We9-;=`%~%MWI$J##H*zfs}J{M%wE^Nt6_pAg2d@CoIK`9`I_JX)-&}$ariN3%A`v#RwZEdBm#+^e zPv90x(~i6XM8)k$?7Z7LxNTla4H)$P5XraJ9}l>g`j;z&-GO@Y&k#4Szny&yB4@5G3Iem>6S-C3JzXrawh2eD=Tuke>=e70+qkU zQwSrMZ4`uhO5(Xo-2L?X_X>a*IzP^48!>k4IZ$21Z|*vOI3rCX%ccL<&kNr-5n?i+ z;0=zd-X{0Q-29c{k(;_f*`yyMDJA#(hIBh4i$9 zNAX>bp6XGiPnWg7;s)(%)N$VVcFkvaPa>7FQ;QBr^Wvk}af$}9GEG!+m=+y`-%AUq z1>!p5E@c7Hv0Ws6mLZDJBA4Q6C&kT!H5T|~%wPo{Gw9rZhJJ680FtYod3TNNWT<_L z+L4`1YgmbQ@zl=5oS&h;HU)fxUTQJJwyFIb9=Hu)UXWfe0J!#%Cm)lM&VNU9OOl5# z>^^n;uR&kvUgc(hicvZKhFs`LNWb;@>W;13W4N=nJC-Ea15yBH^5y$xB@vOwvVR!W z{iF=y$(0Oy&m?;<&+zBACTi;BkajZE{tua-++u=i<>{j~w0(4Eu{ej|SGX6leYu)) zcD9v>mQ;P?Wc)sm$8i*U(9O-i(_aXq6eizl0wy92e_t|1n$e2)zQANKtmLsbGN$q= z3Y@d5N2tLWNcZh`6G`)18c8!V$tFCQXiO=24>RkTQcGkQw98gpEx2LIhP2hg(+|@t zu`$tI9$(8h{GEk@L2piOr7P9{JImHctnD>_n&s1*5^Qn*M&3d_7qzTEzj5thd#pmx0s3)qXBlfo;;*>%W7!m@(`nJj~1I zD*Kl6sb<}%4*yhhty{GeJIH2r_W)$kbkKcc`(k!xHx446>jtGbJzIgYNCrKqsoHjK zlZ@_WwN66)joDP19H=%&4hv^dh% z>s}R==?<-sIY?RJ3yKyj#E91rrxypx^82=ZJ)pa64ZS!30elw0|39VUEykGHd@pBr zVT(_fEicJurBdYp0ofDk-!JEb0-NVG8EN|sSVVivRbRI-ktX22GbQbtHXxho7Wyd0 z^rU15MR{u-#;E3+`G`6DGF@9x4;|+snSlLZjK+joK865OYD5f+Wj(7Xc&CPD3`yHe zlHc%{V`rh^o)j{wN9Gxee(?PewP4kPvmo@Nc+NW>)Lz2rv5c?=)*<>j^WiWu^?4Gw z&?%mQnC9=;m`EN zs5b*C(i)qQ`sKbqk>b8iC;@#o<)OF}ytB zgyk_5eQBL3={PNjOfm9ict>OwO&8l9M^X-nPdVzN&dxO)2DJ~*WvH-*b|P{5Hw(l8 zY&`~NofS6$MZBV?Pk7`b4Nm@aL-aI)L%qHBfP^-#eD0wX?j2DzYFiAu^SRw*R|@{* z)N;5TM(ak6^^7Ymtqm{1eYX|%iaP0`1>|UE37~&sdt>#A!a?-ln1nHYsYY*;5`&OM zA6|7csVLEupjnJQ0dYJx>5gYC-vDz5turbm4f`Q|UAk;@jcM(1;G>`czxAuNCQ}DO zcVc$AZy+|Dru<)bo2CSzq6*S*%^+?*p}s6XyQdJ&y{1_U?uLB8O^w(VvvN7VEuu(6 zMfy%g5=#0GW^wRvm8)J9n9#$ph>{}%KPMM+>u}RbP)VBxII(A&AG*kMLJ~p&B(lrZyXE7& z5FIssaZAUvZKDBsaR#+#T(S#^Qbe1A#t!VGurOrHqYlG>5i>cE4C77N#0`D6Ec7UqfL<8<(F;E%7ZuiJt0WxhL2LUb+h1l6=eKZ_8_e>g8>Nc z4UziC>1$_Hpon-(3%ES1asQ}c;^Lm)sL65pM#4nw=zxPG?SxyrID(WQ`CPWH8S}eT z!g7TusTePNdD9;kfEvGpcRxwe^~iFv2njPytZGwEKChJ*BYu&5866;$ zL7!7U)laz@rqbTiX&LhoSGXdGh&cL87s-w%nddA||{@@kgp)a2#Y@xCW5WK+=_v4St)*)5!iRX~-oh|l+OnEq%S zO{t0>TH7%NW^o}#ri-P7LQxZN;(M4Fh>y52H8`a+;F@3D^R(1`@6W#DuMzmgvIKU4 zj&koD)|LmIBmadxDUNR(Mv9uha9^BVkAp9fFFriPHfnXBE#}7=&wxid?83DdKG(ni zWl3z4ejxR;OXdJNqo%E8rLye9*40DjxsN>_#uDw0c^54S9T66m(QeRouec(@*Z+L; z?`d4Z;< zsGoekqtk@w@tED(%7<|06@{SU^wF%@<#2J?#nN<<0shQOOUB0tCV-o}W%d7T6O+on zu%}H_K+?=xzylo)_;T>bq9LI;xIiro7yr}vshlW(_RhOWA>r6e1xa_QwLUD3)%}SiMO> zujJvCHdVeU&1zRd3V-YD?++|hPYYi6aXmYM9`%0GH&(hr7{9uv_s(b+9V&Llv_cSt zRiSVx*f>vjrJGACxG!8+nXpEX;-j(=vzB1YgZi`3n`}+dK2{}Wf=l3q){TvP{UDL0%s*(4NecUz zaX5z`oHEf`t0B~%vF$QMA*npCb@Ah7Ax#A%*)$WZ!Rv!+A52*@!ZnBdU~1#Qi}^yj zw#E*Fq=C!`XW@*IEFY2+4g&l%Of7%FR~|M5k35|Vi7~ooI&|^;`uqNeFLwU}PMc;c z|1>J}>H`qvfzZ0w?$PksnpVC+3PjIk8z|Tfcc2&8Sk*YO)eOZ)DnNeRhq=@e5X-<4 zAKxNQfQFnRqpxp7<2*qg7t(G)G|Z~%g*;Jp3F(g@wIhRx;}4}GhK4UOLIjUicw*@W zM{vzT3j38?CiQ|P1t6!e)Gq}WFTyld&1tj79z@voT?*EAU_L~ZnkU&Z<}$5S2`3H% z+-S1jvTrX$R>g|GO#=djPj-G@UBRpjLloWnH{F66A{GA6d8g^}WWtc8;~C`C6RYDw zv|TjjtFisBni7NdvZSiM6F4_&q1ieeV?WQor!QH_dqq=9KALH0aY6Nep``Yd2ag#L zh%It3h-3B9^^i5YpvC-mIJ_Xpd&csomuNoI6x-UOm3aath(?1+H`X3Avc zA2s2xFGe%}lTtE4@t;9YZR>IY`>WkohdS2R1{`MUKULW z0gS03ZZ4i%71j0vEnH~TCF#(Cs1xb(E|E)7TKezDKHuNMvKS=}wna|w10$cpzsc

y29$1X)$u88P`FE4ZG*NbNXyhB{R4Ma-deAI|UWrGF0?^!|B^ z%*V(X2B$T}D)k??d27sH{~hFv9A3_xmV|cVA^7{^!OjI>A(<1XwJ@o~SVfEUeYii5 zl}*$b8TQo++vUe~miAge$(a12YK}ri$oNJb9Hud#PM}2M^I<$$M3Q;z1636o>+VQd zClaek(eV+-0-9kmbtY-0z6@$E^G-$)@_G3PcKRV**j^Z1X&HEeaZOCE3g;qcqHuoo z7a1T+y}5rmnym!$oRaG9DDBuT0Jk@IsiyY~f)e7BDtY81i5@4-Yd3`q9>uYS zE7V+ynOVDq+Ry)wBJ(di@pio?Pw4@uuphK4c!ute75LMn85guj+L(HK9%}$oe zfJl9?4-hA+__y91CalGeL1F>8-vq8cBs>DR| z4L_FK_YZwALKNn|XE>4SLrCX6kIbSU;epy~1LE*;^Ao^RL%8Na3@L;OrL>#vM(iWu zCzx9n6g4>)ld7Si&Vg-BM4vtzOkIVq>RALZ-J%oFS+CtCc&M(d3QP^VEO+i>vs-}4w+V-}Ye4~Z& z&)xNVocChZ->WY6DJLvmT-GD#HBZ!V^9uHpDaD51F^s+#bXbQ$L~D`KnHFr3_-%)2 zpckrQF-0!hkC$}Xq#95+Y*v^zM#*fYcMx!YVqBy;hdD5ltp^-9Ul^rBce&70HG~8@ zQ->yHK^n-b)2CP$AXygF&qw1?X^bJe+(fz`3}&y z>JUvk^&+CJ(>_m}@9Sge9_}iB2UM1;hj8%P>8brv)h3R!8xx2gmxX3){RYWz)6Zkz zL3jGwcGSP!bhEH>93*fRFSPbrA5{=NEo+ZoeNb@xTq4HMkjlx6g%!fadbpn;4=HW- zM8TuUDYf8B12=&~ZLJ=-m2MZ8T!fQPogv)V?mrAjG=#yt3{>R#bxg^dY{;^q;*w%k zvA3Gh>|hl=0bX+joT*xsc#N~Aj@ND+%IA1=70q(@iTU52r0LVslEHfX4%V(RhsuQM z+13!aD~jj(XgI2rJ|(lyU`x^r5^IJsga$e$oxC5@!PSOWX+$~RQ9~=Cy&)S&QZ~t2 z@R;4L!v!$)BC`#Nk3CP;l-r9CyON9WV*#?V7AL`;mgR_KMDyJjVPH$b*3d^%v>7u& zAK&(iJ)m;{^$A1A13TweJ{JwAQZ``mF?|Nm)=P;fky~TLdAIv zSe`iYgqWY`-U-JUZnVo1h7O2x%JgH>LVdfVo*2A^23XI6M29oFvz-m@BV(u%=Hdb7#lkT@?1z>=9U z@@FA~jDdJ{l{@UYu&I6hnphW?VF;WE$gHZdief6d@^8C5R5H3t!Xp(0^{CtKnd!z_ z@UDS8lDWqg9K-BFVmqB5{Is&ZIeFLp?iyvKL`IZ1@14!^X^MOAnbhWyfAL}T@V2A9 zAxxsPZj(>&A}z~0O1-^_)SlqL4=zhA5a5WN8(52`O%_lmmTM!S1*IcZFkDTmlhE&4 z(#cY8@Bl+Ot))9z z-lKNVlfM}FY9a~@YDa|$Hol?H;pjC&%+hY>9+!$XHLNnBP}X6V_(+o}vL1V-fn(}& z!vh7J*f54xH5&dWT2ZHHiBPM3HU<~GnJ>P$LupnTbSZHL>m_dd7S__kpLuExsnL5x zo0xYT)SN%_AhtXcXtwa5;%-ikaZR@`gWz z;&TSqV@I2RheCFqZOzv72lt-uBBr6>MNAt{SAh zOUV91CB(H5FdOMbmZ82(qfc+L*)@+TE?17LB@7Df8v zjfvl-WjA}{&6rYqt#Sg|(Yxq9Qh502L?C1j7D9KQRurQPECCVzek5!F@Sf(0O z-zzFKa6TC8?_y;B!k!XuwW;c@xnDN^poTaO_VKQM#4)Lx8FO-H8tY;3j6-U4Rpxy0#m)LfxT`PpKOR_z2|!>j_Y z1(pDEYd-_=&k@4i$`co9CaBr?e-QX%WS@tn_*pjLb z%&7%qImi?G8DxHn2uY=c_|YJ$g6TvSRz9GY5QuYylamQcr6g%f;ie3{(hJ2;{SR{Aj?3N{+1nOgz{*_!btxEACD{2S zXn)wyISmBe-y8}^1XBcesn->z2CInAX2obsrcjl0w18I{3T30^h0PR%g0E_@nCnR- zLx-;?th5vyAfmGQ*tOxd9MC_{V1cx!N5mm~&4=I!zrcxbX_08o)hb~xA0+&0*TNsk5%6#evqyEj66A!yi;2kUie+Vlf^`n)-?%Sn7^2 zj{imA|3l=za-9+;DD<@TN(eMV?2dm@j{w#_?RSd3hb7rAsei=bxk6%{SM~4%gkLqn z@aZGJD5CJ~WY~7vG&WdyQnB|CJ-R7Hlq_eqM*SKmCV5lZHeNhr~$dx`csO4 z{(T{;IVk{q8x8)0i?a9!j-m78FZbIits*txF=)wP&Pleoa3#K2N8yL$isU7#&sRNO zH%L$-a;@l?@DIURtf46IaM5>4b*n>cNTfRypAkZLbR?7_X%nVn3de%(^uO*e)8M!T zFUH(1xurvu!WBChlk*f{)o?>2-J8i=$XX4U7HeD7kvD79-vp+CQ<$a-gr$OIRcu!b zVdO(6!t?#Z-L77{O2BH)-`xYboVo(3oM36VMWao;?~Z zGFtb+T9TloZ=y`t)snbSQaAfX=!qW4<8?Eg2+EU9BnYC9jRzJI zyu2PoKegi`8DmL7ft z{0^!~cQtpIuDO9~Iwk`MB@Syu^Uk?L`7~-4ME2qg86dQO7x_xxa%SznC$&Bh@mW_m z)9hVQ=6E{@_MMP%Bhq(ltAZ#?;VGt=c@$5mN}DlKTqrKfwJA{o>J;_>T?;c<>fVd| z)>mzb!nN1g=ZW`g?U3y}J$OtJ7w1RTA)-76V<|?`X_u)+SVp+*XmCdi?cQ_gXZ7O{ z%bCyem^Y0Mol|vLHyaUj_E5qFqASa!^e)m<1a;AV8br%j#1-jkh#AmR5#WLY zNvuoKmJW8T+GjyLh{pL^ja+f4Ztr*i>aj^1V zzI7r*wPuNmf2;jd8vl;HHYV3^FXUx{wzRH5D_5YofR~@|x^MVi?uUO!^IbsoSpB|q z028>N&znysLx2FomStwUH=VW^uLSflxE0p-!D}0gloPag&r725%%H;V~rHlf#(VL&Ca~0vgG!p(SO_~Xkt?u z;^BYd5s=bcWYoDp$eSuk8Hu4gwlORw#(q6bwC<3tDsvS0kFvaFNB4bhb_NK)dPM)K zyYT?jv;|UV5qQyX7v%$jIrqmIVSl1zR!podE6RpA{waQlf7+U6?auG@gC@2jPlX|v z!i72TCF1T5?$beVd3tf3r%JFMU8B+oADoxYR$LQtp< zSXiNrQ#15PF0KLfQ7I{1L8zm@tia--Y^@DHRdf~Sxky`x8rZX?vym2>z3;urDiaJ$ zIfn9qEj2F>AVZRcpht&WTUrxg@7EsdIFR_1Y|8#);6c3#u8f{&qoI(Tg%dLYVswWP z)h$W{W6gdjYW?$74`IIf01epHxj8rs>cJN@pTX2RMEjvHDdSt=PK^IKsLQI3ufgF_ z0IJ2?J9TEi*ZT_dK7Z%vgh43mF%dXdq#l*$557u#F0UXPG7UIPmz34}KnLi-BOGcze_F zvEemCgw8lYbI1QvSxUGd)RX4qE~N82c(9Uoj#3(ooB?CFq>=a6dF3jA3>|~t!H9gI%}LnL#QLmzkXb85Xu`!J z9U)?p79+p-x;)zfAtmFQ>%vUS3B!uQCl&eDv;qzIf0I<86$eM|zbb~lB31{q2@o-|z%)PK3>aK{2d z2aEMY8WNJ^W&1vK06#V>nX+;&MAs?+<;jg;ga&9R5W9NN&RRakc*IZ5TUCRh^PY@B zX{KAI+$10!_smBnk;CP01Bim~pBdwbE}}bF;FPc39#f@!gz9&Egd^&?Q4pTjaOp&D>0IAL~)b1tV#)fymkkYT}s_A0C@ciS4{4VFa7wV*mpce>f}`h)(VQ zUMZM;dt*!J`3W?U$PBs04}RX3>G)O5Z^914+X5IUO|@Atf-gT9v3`}bWS%UjQHc+G zfO<)vtVoWtg~l<8QIwh2`yE3m!H5}tDXotN>v=*^UKrdjsx<1$a?L2slN`l}^^|SN zJ&crL01qlh$#oDd?;ip&%9li4D=+l1j3E z-hIzRClq0Vg{g<4AyG|eGSSc^L~I#Bo>lA%l9a5W?9%)(uBy*?qT;;PhHeOWe~*Pr z$QI(jQ5*PG#4*N!=~j(m$FO>r>Z_$_2LYXJqWMP9lWiAemLE~+3A)dg-hOS{m&Y8( z2yLzfZ?)aAyZ@<@f2L-k%b6#k^{4K88LsXi&J{~=<6+;HG$R|KVlx=~OXqak&-Mt; zIWjGxDKO_+%mQysQT& z^Z?_(Qu!>;9ALz?t3t8Ke-L+_3SJva7a_ye2YeZo_*UsdGJefVFZ+Q+bgK?Cg0V(b z&WCkyLAB32yL3-H!K>!Zw2wW<#BJpr{#OyzW`yj=vkPoQh=Cq&+Q@g>ggBlq`O6K1 zjORKL#+dm_{7HG{V?Vf-o!@DUO0@ECB5rA3%6;rXlbIdqaq%yi%yr|~{Ivb~U{oNW zZ>su_AI=!w){DBJ{#)?v&Uj6e;-^hEKL&?J4e+h0+>h&_{SeG`wm38C4f3UNl7bL- z2aT{9)nBR5A7nnx^vTDd+l_&1DnNwm%PHBnT1qBCCh(BwT{%1VElS5^g3(2)H)@}E z#g5~f8&xg*KdQbmEUK_ud+3HC1_nW57`nT=L%O>|K&88e?v`#81QZY{LApDn6zNg{ z2_*;S8}xn8Ip42+&A#}vAJ)3pz2dnSu0zpyxXY|H2VFpEaxbM_ay@~{tIssn^8+e4 zJRbH5PjVSO8Rx5kGITNV?g@iyckw=%lo`kVKIuQ;>CZEn&H9e~R!D0v5;(aq{K}hM z<1Ogic3tGlw)m0a1W$0?d80no^PIOGcQA7pWWn+P-ZWPdj>?vx<}rWY2oEj=N14!+ zMCj?$1?3ScJnAL9q9xicocP4bHUWOa(^=ww6F8sYmz4jpjrDyb#6FY^)-_)1jU)Q+ z#E<>&T%T9Gd5a6U9UjGtdtDPH)}zZJzwwIaRbD6{>IbrwBNote6iDwAZ6~pB&JSeF zz1-alErIMa*tsVU9vWCE2RLPu?bF4o;OgI*OKdMslI&W9w+^tDwzgs%9z(!|D&bGd z3Whxj#mi~KK&e0#_&O{CB_=I=W0K;$j@~WTz@m_i>Qqx zT4jK0I)6Os`{V>9Xf}m;C%sPYx=2!gDQf%NW8qK@*CiQqf;-C!kbh&l;js0=@Pa-r zU)b|ZiPR|ddmwHSKdT*@4inYSUR>7^Ql?V#7^1HuA{dLGGbAi!w*H!n)?^qql7w8B zs5&Ea1$ao>?mt zH%!LYB(eV0`~I4$9BQc_An{{sdvy`JYbVfG-$BaMwwWjG06U(vxF;0qzSC!CLq^80 zU8q8+Mr*`2jvrbZ64Cf!$T3azh3DKArapi5O>W@Mv}+*a$VU7okZiMXDN-ASP;QXR z9!3>eVHu}4LfW>GL_pY>Sj&3&@%tWA!=;D|E_(ZPIxSTd#n00!^GG)-FfABV@Gv#~ zf9eJ$)z?}Nfv2iyi3;J@c>@M)XNn((9Jvc0J-_E~D*)m03uZFjh~oDt%3|`Bhwjk3 zP*^YaqC~|dTcM?uMmZ$BDPEE(uv3jcF8qe#t^W#t&tn}fHH3B-ce)s_itxrgNEucO zLbhMRvx$6yAAS5I@jYg&MSy~nL5p#F-ICn6V--?E6`j)JK1kXzHKHKMnZhgBZ zYT^y;;YDKGOo1G6X3$6vHGVe{GKoJ5H^a04I2*am~UERK~1UZ zC6DP(=c_UpD7N|$mv-RVaCD(mkHDvYBPwn2e^a`lH`iK!Um?Uc5l;8z_k!V`s2fTF zt9YLqaX7$OmrE#2wS4*SsBx0mv?kbfnaAw|%#Tb}QArKPR6=X9-%gN8Zz=Q{<9zrX zyBx=4g*N2FXa1xHoT3?&Ai{buL`)}OzLg$EQ!`+ohClI4ChzMcrK5+o-f7esb@tjg zMzF0#*Z`w~;2ujvU7~vEb+m!!sIikJKm{3hMTxER++qCo*(ykCO%qzHv#K<{|FQ;aJwYU|TV=Hvdza6s z63PViQ5yZ;7h-x=OGzw>$*KHQA=Dec=W{5ZI6eJEyC!u#eH`O=UP=U2*#wxL0b{l^ zD^pQ7iD%!%>als6)e;b*| zSEW9WOAB9-bBP71err*1AqOX`GsY;k#n<$7fS0N11QSDobW(7%f?B)P|8=q+W^Nwb zM8|(04WI@)+3<(&11JjC$>JO22_<#(H%X!xtte_N*?t-dGUH1dr&stcH?2CI) zS3TVj>E z(Kkk+iHVk5JoE~h>`m_pQVcTBg`(-~|0_Kr3u%RQ57k%@S@{TlB&OshR7`!;y zBmD2S@57l?S^W<|Mal{xqwb5~TlAjC(tp0-VFApo0k4?7%@g;jQpnQ8IF?-*ld_d? z1Y}9cs9Vk8ZJGdqP3L+029|Cz)LKTV0K@C|G!Db8!Gs|nUg?qzaud{u$R^cg1#D?l z_nScrQ?F$?5~{!3>Z0Rr;cpq!km~8@Q6FwTx37372^aR@@Qy2KN-MRx2l}I(q;W>kI{e$gFA{SnlyyymT@k_}Gcn3X^tiO-;7e6df#unQ# zV@mW`^~_{QB|eJ8;FBs)&rt0*uR4?DKDzTq=eR_)>}!mzx{F_mwFYv9JE@)iHzY~- z%TdhsynErzL;gS&h*w;Im`%n7FjGTFL4z^MuwNG;vg6OClU*3n3&utPyjnHt*1%_t zVABa`>H=7oW!+6gzk<}5IJ2W1MSH1rGrvSwV^)+9lkPR-c)Z(fba&H9UoChnt5!av z%gg7HjJ}Fx^@Isu!!dJ+#g>u&LL;0O=Pv(q@|!s2(iJzXiE?;`*Fdz@VJ=XIa=YMv zXFva)$3KYWA4E*w?^=D^lq?6(ZC`b7^1{#~=|>x-CVTuMIX;ggz%|y!qWox_5pj;N zpdD*6v`{seQ96kPV>>;`(^5S1I!GInLv}hGPKuji>2umQjY-dnI+HG;5fdZL-GrjJ zO%0AV(f$gG! zm{hWCbk(>~46C?|ni*)&0Y{r@z7$(YwI-k``We)qgAHTZ%y%K0*(pxWQ|pd_0B z>CoOJz!+{3ZcJElf1hcso1+@8vA)oxZzD-2hKYF`Wbc?}TcRmft#l2gQfTlD1(lE% z8?NaQDTsZ<#8A)5-i}~nJN&Nl;>xQ{D~N*X7m+Ppx`L`WzZGhw`CDZ{Y0*{eW<8XN ztItn{I5#yAP>Niw(R-TtBAaj(?Gz2_IJxRNF&3Q_bHQ&a8E_Q8Q5u%!%4-^9)kSut znaB?zaqn<|H}=wHPwQW~|8!jVewg2H;@FGB3xrKR7J~mhh3?sWM1b^O#or&a{~@T8 zi1}#*#nm4jK->1((wc8ufKFVIFV9S^WqX?#)nHn6T(1kMV_lidJIJV}260ASzT;`G zE+uUbrRqhyVq0~Vv{Su^2UCx`O5b;_V)&^j=A8wUljmu@!^|7Cq)=Avw$c$*qd^oq zo!gHV#G2`J<+QU2jkkrueG}(8pji8iuBg?TG$`HND@?~rR#j(u;t!c`c=Y#^ajS;V zy(CjsjmZKk0?pujvecm*0w7W!9pa2pX0mpmaziG875J7ima7fbGirEFgsph-Yw*-$ zEcP~&s*Kv~g%9VDkVmsBuXAbC7Dw#irekil{QIZQphq99(9$9QM6G&}3M7A{$-m+l za;WqEI;_q2_Hn&5d`im@(01A+Y*`uK{|duJqS|Fd#TJ+XamZ-o3w7X$mB$~fAo1pl zM~U^#L&s=m*Gf=FX^AX%G}<1-SvyFK_`?RS_?6+HCa3G6JD-v4`=Smcdbt*Gh~D>7 zMk}lnucL6#<8eoHjk-Y+_MyG!_3!sNwBkAqDNm^f*dm^Z<_IWCaOJC9E*&@R#?|&{ z6ifrZ6yd33p!P7qWo4Z0hk z3k?RAt7M{*CK#7KzT9=A-NHKU0~>HzHOcBzDZp39jBcC~d@^h%Zr^D}S7XKhltxzM{ zSEd|FHN)?eh8~#m7Mi+jXN>PR&I3tn<92s9JQ}!z|0E>KlJBeXIM9Wp7|&zKk6U31 z$A@1BYNT+VvO(jVVIvb0{$}`)#r$U}!K$ECAk$-3dNMu(!pj#>7T#AIg6nR>bQf(r z#JJthS=_Ft3RUb!F$@Q1LYUtbL_F6Nh>s@*yOE}bXO6q;L0BU=vB(xIaCm22B&(F% z*O#g7PMSUzh@Ob#wCUMCQq0i^_aE*GI&o8PNXuh7P&t8>L-741EsmRrOWv~`Ej#o) zyZH?FGyB{A@0R*^vv)FVMC+f${LDlM&k&dZL{>ini5EUNA0COwqnoJUR3#cK4W63} zKsjtLEkiy>7@|Z0UL9(Wq=UyL^e~{hH|#1yovaEriRfjFsPts^d2X*yNJqHPRe=<5 z$+&vkJAccG`oy6vsTGhWRy-XTG{S0ddCV|UGbu+{M@CIyLUHmL%v_91w!pxf5#`rW z2xc@yN%$S>kJ@eTAUODx!#&A?{ov7}z}8dEFV3IG!5|bNiI>aZV>P?}cq4pvI@Q#f>r5>-kc$Q#$ph70pW-R!s(>_5C{5;{v<2tgdaXc2> zWW;OJPiT#tiYWSDpSk(3HXL|qFRac_@-(b9Z13!I7kb)rtoKok_a%ez+fs}NNW}(K zVu?{UESUP;RuUfgxRMn1;fL#HopdXFMRRNMKyMqrU5BY%^Pw?SU<)sMqiD zX3=0A8?3N--w~8JeYiePL&n(AZ*7Az!QT$l=tmk<-?r*rvKrbEoUJx^KzyOYY6-NH zeLv6#4UW28qzj-uYviXYU%k-iP`f4l=*Sr{M^#ZxP1jJGx;%7uU3n>MU;CE*|M^o~aHEKXiYecC?x4 z#UyV#1`#^Ura-rTLN-fh^g&of&fnJpL*IB|(2sx(D@I|uQ42I5vhn6VsA2Y%L6gpC zUVXOaFvdsNW41 zt$;Jq#n;K?zJVMu2eA-iTFttveEjI0I0`2TnO+iF*LCwGc0+gfwLuz)=Rj4!)M)GH z)MQa-2E#aY&0~DC_iVUqXqO&S-iKGq{86I9 z{D3kBCeE1Js>00g08$)0-0SFxXI4(5puu6L@^~Wyrh++g9lYR06r$pcp`BNcwk0cO z?4y}cfl@z1V1@d#2IWE7gv}`sOk(k*_-Ky3X4k?;ESp$6rB_pyu=feMpGS!MiCHh6 z@_OS&yu@$O@BshhQTdb!9C7#B#eDz!OVw3u&xUQY!@Y+7waR6rV32Bo+(?zwcgWR8 z5&?n4%)_+pxbT%>(WXI)Ou|B2nb10-#$31mz}$bkh|lkpRSFPkky}CMh>yF7?Pu-Z z`;LdGcJnNgRjq&7o?f-1&!?QY2?0Jk1`oo4ez?+!6@asvNo=WJ9TGGL+)S_wU`JCy z*Nixen1!yej9Jm1Ni$78+TPpmtX-1cHeXvdPIpKXeJ@#rv)?!4rxCe-ojh5)hUq|4 zoX^z+@BEn;H!a0krikKJs;E&odjA%rp-%}8jU-9THxwtdw)&FplZ(EEQ%UCYX=ANPmP4Ue;cg#04{>=4ax}lT`00|5pJE;{( zbo*Gc?WsD5`7<$RJkKsnmY~3endZ$Zwd`LM%uG0-5oN#e*v;3RG3dqB z8(9!hG-eiW|1B^V6ffy$Q-1R+dw6Hrw(1++hTbVw33Cq>~i|_k(uKnUebKna3}Or_MlaEvJ-rkL2txp!eEtoBSpf+!Pi6 zCw&V0sPF&@yn9GU=&qlE6J=^~BBYsbnYrUSWX#@xbQlP*#j8T(5*S2Vc(Uo#QKI%f z4`E%Ovn0|s6Tio`LC+~YL&Kp?iCt5{%wkuCmW#U)Qej8_FoYaTuJtfPe=_|xJ53oN z071M&(ekyN_8Gv78%6ESt)7#c7h8QnpzHr* zu8VOo=np0j@ujF^&tv-3Hv?_| zC*=9B!kJ6{NqUf0avize$Kq7wBZLoqrzK8Ixxf?S2|`80W`$`u#*>Vq@ooA7aSb;c zm(&)z!K^*_Ef^;X4O^tLR0qp=Q0OpJ)Li{gifSWo7QNR#!?uzM6V*6=m@Z>wX)M_% z<*KZQ78{$G4*Szij|120Q>9`WES^utX}sSp=ggjlO_m^UkOlQvFaO9tDA7y! z#6HNRm%X`Vt^z-ol_IS)n0b9giSDq0yXC#6ElI~ATA!9vZxGdFr9WELjztrUZ~LHt z_fUxnDf*`yAZ7KJ5h8EXv)y@6JSkt3?=6ydXhC9;P( zG#xb-`FA|+!U`xbWuX@y%gMUFo)9@TDIHRX{LvyuqyrSWS8_b-&!#GU&$ zfR?0Oj}(x~qlvsrdy=7sM8ip8xe;jv8C0Z}#bpEXvmf##X~%!PjgQDu!$33gWw+&Z zH!~$hI9?&^F7=0A9eFDg;fR*DP1_8H5t#|{G`hn_cs6uK^8COBoi#ul>9==A^z>-V zUN{@z?Q+1vCrH|2dLGk_7#%f-8boqP4yjtTRsBfGylr_D&EEq(ahWDZ+arVTwPMg& zS!8tl`#FbdOkS@BvCU7D3qnLO<9XS|wwi3$g0lGY=ACEld7`#^*}ZnwzPyd#L5dd~ z?WFjPgN(;!_~z(orSqtY;R7DV1A}sknzfrw?>O=jM~JXNKi@^+6HlupR9Zd$nYcf4 zcHo);G|bEM&wSsGGPrsi>)iQY4)pi=fQ!fuhWjL<3E9cX#uG1*7*!l}1ZR4^#2YFA zRbIkh*u2biu{mU1IrDxMlrf(OErI{E!Wo7Us!))hUdV=nS&sX&MyP1HQV= zfyV1Idi`+EHS|&=Z++)J+C}?>OH*z$*l zQda`VmW}>@Uc|aQu%lJ!LAX^h*ZR@vb-q7N1IM)T?r=&RHnl^14cO4LM;7bZwTl6m z!L<}Lv*X9uvnOYR`yQeTThmc|gWppjA5!=nT_47kvl zchyw+_&Zv32vpCU<#u4)wf?) zl#a~*ha@3M@jrvuc#y`fLdu2g)&oW||9DZkIT9_KOfcS`Py5no1Ih9gEUME=>Tn(T zGZ~i=dWF?Zs@QbkT6xV%12n#*64ZEQb@UbxE8V~j^Q$|@@pCpIwGB@N_5nTHm%IZaYBmTjI=u0A@Bc$a!*2E(Tq#^Kq2#z6tcoZcz*givHvgb8g3@HadXa&t$ zkCO#mPuE+L7J}59r=&!c;%p@?90Ebopz=JwIFBoGLQSx*b!ca&TdRY4>ngl@8aYb( z(HGB%?L-<66iyHY))Y~PgWq6^449UG&JJB2=ayDQbWaG(GCW>=U~rqjZWQrou2J_b z#82rZNGh=AJ5x3{FbNNs0DR*~Q0p?7!Hyai5<=t!mSX#)WAl)@wIKB+AK2l4`gmcf z>^lEBpQ>cavY!PAal0q07u9&M!6nX~KO)Kt8nw0=vwutovOGRw#P2{hAPPc6Xe`ln zpI_2@RSs<;XzsMWYWAoF~A35x81QeVtB?^?Lf~lwAE?gsg~r-`xCI z*QqE-wQL~G&Z{?(hppPp5+moD_>Ma~O|(cx%TV#sKmX_rf+_UH{UH zcKw@0(R%*Vi!DIpHvUXPbiB$8rBmGJlU){=I(x0|#GDe|NWz=zG8Ly$>YzoEdA&iq zcM?iQX5`e);|@t@_9GNC(Y4d$E0=j!2qvroiXjE^Y^UD4!$yUam)(ZN%=MFhbFmR~ zr(#U>wp31Sx3}&LgEem_R53nm8xN>5bCIE@a>mfD;b9M-Bz+wzxV8XrH`c{+D-4v^ z2B6R33>l-o$jgbhmx|CjF2F&fk$Bm%>)f*-%d9exhl(dgf!lDaLN8YQ99Q-c6Ph7T z3tAd&QBH5NN@WNkQqxwl75r#e*R6$X)qLBYL?y-R(qylV_aOCFflGW$@+&sZ>BGnW zkKE#aOa!?YVPy}If{`K%rk|%9+K@jJqiuPS4GF}#9(y6;2Zo4UZh{`xz7lnho z3Ya~?q$SxC4&$KF5jNnCcgJNiBz`nr;clvli4_9>ojf`y0fn&<;#=LAn_ELD+#0e* zWcy47!(k*`YZ1)1l%kOm__3&o8QY9Cz7M)q zvdo7&YY4AighjX?n1fiPTJ!-v`)@t8dJ%hQ2h;@ zH<$Iu1j87V6zXeuzBv69b$O}jn2DaZe7$|1|3>bx6!l#2bhG16)0MNW`S&e*Ws)w; z6uD&)|J;5ZcKU04Ncr8RA^kQPh*@n&^JOS{V987WF9S6u_p7!UG%EVn3c%e+7&(Q- z@IZK4WyQXxb92kvwj{4xlkUk6F!vO?lYyaSZ;19O6d0kwiDNHODjn>2>z`Y>!6aOV zN;paXq)~4t=KFkWRfJJ zeDgdnq+kH-sDS@AY78GEuO|V1q>~f9w5ju8&d)Xu&@WOJ(#Mai#E1MU zw}F8|$e#UJ17a`KI%ra`wi1+nU;AR3g7%@s5KGCjsfzDe;VdHW-M_xEQKl_YR=}0qJGTGCjiGi$ixIYCTWZ9A>Xl$wa z{%Nk1H3Jyqb~0NFYML{NgY&6@*oq*IemgIb(&Zm9<@+XczpANSKofC2WF?Wr4PVn< zEBS-i=*$5)jTp=p=merb*N(AF%+eR(;k9uIUr`*o?biXf)rZ-vZ+|!h8moMlYB=Zh zI`F{Ss>@Etv6gKs!|yXDbeqFp>iHNl?XuIqKlkH=npE4+ ziWB7n{+MRxBjsLi3<%)u5Gqq{bf9-JWRqr} zRq7>vt_v`F-clO&;So0%L~a!4Qo^qA+hbb^Da8x}6N9~7Cw zWbSE^do+fN9w|Dyt%#8MCpo#z5nEl5ZWYz`ocZg@R6n-%w0lu z%i>dA&Ln~S5r-=3Dx9$QOlpU&h(yE?%5CKNCFaEjBiD_&1yX`02txI7Fb6J9`I6N}5l z=J*mS4GJAh@QrZ&6T9+0?I2DCcjc0Ad6pP&ZH(CW(Ob9FEu@Z_5Z7X_e?DSM)+#nB zgGE7b89g=NW11K4zywaBl#%!nfe8(iIh^>*q;W{b+m${P%`Nf{$zMDn7HB!+qZ=)Ue#BK(7E86ij$hkB0doSP9$TaGg@vfo5vM@xZPIu<) zqKC2Pl)6$jvK*yZHy9clqxKJSJQ9NK^xe^p9Nxmnc^{^`nLB845YNNL_^(V_?*s#X zkV=(2`}F;F`iU0J+9-TSzC^UM>00{xU96ul;`a%c6B3of)~>>k=9YFp>s73bl`@mv zB2OP4dM4Q|PkX;pGVih{t@8^xa^P2boeC%7zxjjIG$lJhXalZ@ZnMvMLNG}(bkNc6 z=+XQbmw8@LOUexM48u@SjQ)tp559#}kZdiRllX$T<9HkRu}aT!B1(Qe$EBV;Z73vG zMG>#c;SlGeb>bx%LDNa*C0;HpcKMdhqL?Y0WJo7}63ZQUVc!SCd8hSZ2ryY*C!JR$ zE6)=3jWCKgD8B`c>#0vGbk@>$5`)dR3<{3%If^qT&Tg<*rykpxZzf2g3pKABrs>yy zwTqKm`^n|$fw+6z{D)^-0i2;o@Iipi5Z_xR(k*Z5zJ4F1xR2?s2(S%WF9SAo)?N=u ze?**2?O8pP1d$dKcK4Ebwj2JHdn@)l5wZGGu1Bl$wkT5A#Vc^}_93!`eY{$)NB(V9 zIs*Pg`dl8m-a#E8L9G|FI_vjNr`Sf+dI+rEoS zHi)t%Y=+@ydF(ilN}T7ia$9~p}nJQ&KWWDPRM zfGx^Md49rP_buchAO1?FP9`;s)qb7Hsb@883_AJ|%M}CNH%)&RFzm`M;*OJPfybL& z{o`OUh$iEG51sSNw0K*M^u5K=EFGNe2lv{>;3voTYE?A|31p6{Y!m^P|8W#9QID3GTc1-GAb)8W(8bpNEx+H;jk|!(B(i ze%w9O4G^Js$O*sh`zvWFR6qp!@Hd$kBREvsPPTy>hu=wdK20`n*MUVF_iYX#WUcu7 zni~Wt*y*{7Ni_#DrTr9hf}%D0(IV`_nk4<#q6YN45QsK`#jvH0j{6!NWI2yzVl^{V zO2KLQXQ18N@;t_A`mCE>o+gp!#O$AZDyVnpVOd@WYOhd5hNOpPa0ocbq^%mxM;yH6RQrS={o7%BFqb71#1VS9f&P_B_S5KNQxI#!ix2og zizy3&{tQr@a@L;yMoD0eK9!vZtaRWvfUe`iwjl2X-3x~+G7@0Xqm{#wN|Zi3pQ$R_ z3+@U!DbwEg1d{$v_ERsN3Ij}1ji1}XOcD$T#@r|uS>kGsdh1r5!uAwOut(&5+FL7I z-6#~cAR(L6enrB`V$UtmhB5cED($7Xydu;Pi?}pxnt~o8Jk#`C@4BUA`Yy}Q6nO)6 z4Us=4bXyXaIUGK>8jfWq}5n4AxbWym>-9pB9uNp8qV7nT3dmC@|!c0x}tfQ zRYHjEAm($n?^8*nQ<`@LdbDW#N%v;#t&qTi$UnH=T=OSN9M4$C=c$WRZn2Oh=w_u2?^k`(YF1J_10z~c~MTU=N-C-V69MF(WxjD z%79w@);Ka-={&AN16}Av0x3Fk$newMIp3rOiF|e`C57VAA24}G3Yscbm>gqFg0U*> zQQjN<-GtZbjEO5WoigR(?L|-+h;WvN%s(Z*WJMpD;8G1w+rZIb!B^!u#V7 zc-0!xO!P|j!n*H~r(6Qxe-6*1&48MjJ{it1#@VI8oMDkEPG;5>JNd(e#p&?|9STl< zeIhnn!={vWT^&D}9iYiG6s*Ff@h*|1zBWye1|6KMGDeq`=^sO&e=6fIfs?Xh-v?^z zb&MQp`&D^)yVIz3qHLLhQ@$(fX~4D6t!<+ICU-qoc$o~{gWgnteJ1~8+coBeSux!Y zdfRP&y;pP5cJV^O^(5@T4=-HvbcKbkA`8>CGMR2|N$U@EW_v!iYtL4Y(b?=o4M+I+ zrK@s?!cUDWWx&f3;tfnRet2rKG!L1P9lZOF)oP@^OKteB9=VN6C?^%;0e!1-5v|gH zeuPl(rMH|iM*IrcKWhwsIFw1m8bb2(WEhdcdD!YAYB)1ZT#hO;-THuBIcjh|8caV_?54a2v`yTL;y5efO)ps4p3@Ret8v(=vPb4%|4*<-mhr$N$Mm4l{4*8x;~7StmJMnKvA@E}0JhM#bU=eM1> zHT&dcd{s?ho>mtcPlDSDcnm!0tbfD=i2JUwii)BT;n(?k6I8FsJ>Xh1Oom0Kr9pMI z2eE6!<{h9`&~WO2Jg?Dv_Y#lXx6o<}0%TtAjH9ckdJyXKbM8J|xjDLD+7dZ847x4p zEV1-k|Ekh`lJsCXu49Otu-)?^4}_T}4$_6LGJ4i;UiId&?}q(@mj~A=vA9nq1uR^6 zcZSoD7qds180a#nN83W86pCO)ohF?8GwBA5pSqmrw*ap$*f`OSvbjtRvX|YP!pFx>kkS0tvnABEE9J}CSm{6U4+&Pt&2R#OqT~!e^h5VI#8Y8JM$dn} z=fQ_;z8a#)H0frBgUUp>c*%2;+%wwIyZabqIRuBB-`k+d>iw~0!m{lG&V;}KPqC}R zJC489s2Sd!ml?-hqUzdq8Ws@N3zd9^^l5(J#Lo&in=AtwtEaNhvQZ`3FMyKiE(Jd; zdw%-PEKAsijT#HFj;6Ssm7qOw6cjcnd>N21)1RcnI`yY7EWb^D5^+9-*nR%;wj&?8 zOY4_2SNG@6-N^0-cc}WQkopMmy#3TM@XvWy`u!3&oa7_TS0sGU1}uF&I10Oocyj;G zbLQK>ArWOi6>-WKPDx#>Pz_RKNz<;y-TKT+j;}qSXo-Vop4I1UYQ)0YD@!^uX^-uT zp#zgtRWhC}0}V6aWa?!DH`J-e;~Nt`U=uIl=AH|)OPRQjM|CVgpRw94WKsW)qxw_g zZ?vaQbwRCKh#|i9jDcsLMMwdgXS>*_*|SMzv#1fBO7?BmmoJA467S#V0E@5G%+%(b zG2k$dacM=x=Xu`BZ#I~IG_iO{Vj3P|)nyF&)sf0&D&ak=tbhpb;;{Sc5P$XoB8HfH z!6=uLSxKOr)x{_8uC?N+#KPcVr~KWp?w?yrCHY9$__`H$zL`_-5Ta(c5V~Zy%2fy3 zJgJqRb|Ws6w(`7QU;eFFkqCR=Um%(C^$y~S5HMbZxstrj!-ATK!h~K&*@Bvt>B*%0 z)-qZik|ZWG=wDLBl*>e?U*lP40;u%;KCwt&%cf*m3k8`K9`5n?9vQ%9jIiEgw-f0P zyXMFr@?zr1G?hHfKnb?37GM1u%QjU`s!)!LkG9gbJ)FbRxACPchN1BlrjyU(NM@Rs z##XOs$QfRX4!U>Z}N&HQuF5DzV=$ihKy@k zi?y4D>JIcbj5TJx>J45KN_I>ytci~q-|O}hc@7Wz!l^~QZW(HtdTaHJYCSmg>eXI< z2|V$^YwF1nh((HE`jDl;=cP{HE-<`b<` ze0&Tq#$-e;{TShw-;Bb-k##4GZ;T!#6|P&(VPlx3J{%>e$By3=r%r3ItzmM9IpDr` zHD(f5HIU^^ocloDqEYk2r97vcl*{v|o=&CS5RlZYlZM+~jjP4zo18SnF<3D73>Eb( zX4%ZSW}1xmtwhA;DO3aB`~^)&{14+hN;`V;ORtjb#tDVpb!vx#?|~MVy%=JrNqt*g{u73xT)aN34svm}8Y|WxI<(Ib@KeO%uw*Fp zAFC92Np=hT43QZN=_w%k_2MDlwz3UzfOr3SU68}wv-5NF!Dh0#aSoAmAE~=r`!_eN zI}f|Y?y+3*X7GFl{o<(r6H4issEoqlv$U5@-q99G=q1A@hpeVX+Z#Bo#xXFcT1ISq zH&2S-#>pJu)r(UPbSk$0!|u)jiMnI%?cbr;azY5m%`x*mGL<6nRtR+)c2wmBln1@f zW7@(uyD*-u;B3zuC;-SrZKV%aUzqRbM$K%8*ng{2lM`uRE6^#XYuQB0@ZePXKmzQ< z$es)C(!w8}g_c8f&sSMKN-?{=&z#v`r5SEO)rHRT>LuM3|-kFWyuCYqH_l~;n`A91D zhz^^;HXl-Aqevo78Z&8XG#Ot!@92K_kQ8~TbSVEjy!j+v2lF4NXMmQR-Zv0 zeex;Lo`_xa3Wt8(=Y6l*G7eiFy!<^vp{YJlNu)~!i4@-Y(|%+!NV$)gFFt`H0rVZ> z0XKfzq5PUj#dsLL5931+b%L#60hU+1tkSP=!gsmNvAZvwW1epLzIZ{Gb)o-vW}=DXv^TTv@{?x2emDWpkEc0*5)u;Tx~ zMHU8|hQeBZa{NNv-K}!o-HY(;X+r$Ou2a6Ig3mUjTo)zw8l~hFluhB( zK`dNpBn1hf?1$eUEN>F8!{^@4owRoNMk| z?ACGsdy+*>$Z{9-gp&x~^-#Q&u7z{w-2Rc4X|Y0-VBLRJ25?MAt$zY&*0aH~Cn%8% zWU*zq7T8k#XO#OK`Q(x$v{|*Ul2fT<1u_VflP0d6`1VTQ$zbD&OGnXv*Sv^d#`u%B z`iTxR!dX9)Qi!}tuad_PgRb)<>1l~tNg5Zme5TBkzTA5;UWtom&qsYGEZO~zOc+)E~2rL_BkdjI`L$B(v0w_|Zr#=&qpwnR#%Gt|H)K+wov@c-ppQZ~^ zvo03;M;iYQz!73xNx6m!AZI1|5xeU5mI0?fzngIdWB81>baq_F_#sjxu12MiRMhTX z`e#ekjNi9=0`FVB>3t8xxcm5}bMX4tuykY{h53QSBYDX~e4#A(S&Z1O2TWM2IzcwF zB8ZDgKjCU7M3$cOYb&Lde}RD{u-_|>4Qrx9hTv*P>dpCjICQR7MQI9V%H?b#3AWZT zrJOmBKWFBqn9)H9wVk@&Jt5v$%bU!+nz~jN?x@NisdMOyGOoA0Ib{tpsa3kk$Sm)J+*M zE-d5I;1{tUPD=(>L5Z4- zShxp#2ya4%&T*OMVfPztRFb67zN2$ZNJ05{1IXe=;XVaOD~+sjq1lL@McFIx z;6|W0aKg{xGbDiDuG^CN@i7GU4XDO)OrdEIc}066coa4GMwgnz&HhmBbaGo>?yE7& z(g$T?TYL#%uWj-bj`S&Zig=reXmL$H(KDsocvvVne&n< z2}I63PkrYVfdx1y*F9KuBjkC-Iy{r;!8d+AM38wQ^6KlbqLZ*fK-^QMc=Gp-zJ27W zYDB~f)A2J29M=4XT63zua%5RIWD9`!6DW(oC{CEVMbP)eVZZlsS=%99`SwXDnwmZq z??zc0iA8PKH8c|f_NPPY4)MdI5)2#0VoJ4G*GRq@a2Rd(()vGZ0n%676ly0(ukoLW z16J{0%N;zb0#-KIxo6^;52Z7sRy{_g(sN^?18=Jtqsfxx3l#|7;~i7qHi$mM)?U^u z5Z#C;3UIaf^I5aW8Of)f->(PGZ|8KItXbJ!@O-_RYg!L-f;WfnNf)u+OC!sx$-jCJ z`j!`5dULcmgE!X@KFATAO9X3Z>f}F9Aj(19JO=3TWRku71cFLF5IqK5IIpGJaArywpZIO6=ok zw@2m`TxejPv4{9GjtmYb!Z#L^wVz*FJqH4!1-=^G82g zu%xd&0{ee`id;MET}K+g_4=)}MZsOSnA+zDR50ob3!R;YZxoYU0d6J-jS^qhe5D4S zosK*4gV_Zz-su5{(_oBrkjV=hjEt1!239~`HICh>zoMZT^JJl0QPgKAq8`l!dF3v= z{KOeGur9Xo5`UwZl7ROHw$h9eA#r3tm4~9hTYZd+vJv(^CuxBAD=`+=0{>=u;#qUb zA}?`^{z*K+Qa2{^!BynqETNU8owYdb>GKZW_v9Z|s`=`7bAvBG#6R8r8c2Njsg__7 z#LNSetG%XpdT)iyMQ%uo2kMXg1nUFMG*RF?Wx$g?WUQD({Js<0Ml4Bgs~>a}-@?5R zOWZU^LdY=D8mdYI-?91|DFortKW~xMuo4>N3PW!A#*RM4TDctu+;Wd1vv}mD z6Q2PDl1aFK(SomuA26@oiGM~iE9rCDPlyh)yBlPwy($%P!x+vVTgFFLqVoc+MMi;K zSa>SKUmvUn6p*X!+6Z3W$03=<=EWtSwo8t(;bJcJJKn(o(M0@yANwax3**Hgks1rX zp;b+|VUMRd>nWy(H`Oc+nFh$XfJ!Fb7e_SC`vl0EngjAx?$l%XAnm^6LXr;4dFvl= z{0Wtfb0Mr)4sNyXY{NZeAj9QBHFmn$y%zhm*M4Cqfk$BWzUGx z=D6Hr(YSPW%1#X?NNsP{}iL^DWxu75pfmHHj}&~bQhl7+i&z^vQh5;A@$KX{22 zqI9FLSb{K=r4iSE)>ttGN;|0X1A7sMtNc9`HEpXjKcWk_P0^sHYEda!8>h^?zGTf#ed)Um zp}8jzVe&|1tIE z@lb#7`}i1S&mJPX6iF!AGL)2E){w@Qec$)7hU{d^S}H~MY+(l3_a(dRjD25bn3?DI z+3WNEJs!V*{4*X8bB_DI&vjk*b?#$fo2jTXG-ZmV@#dQ4ASJ0?b-gA`%d}1!D0Vm* zB>pbWAWbq(%o&neI)IST2g&M^=>Ql6yE7OEqP{LDsH@u%^5y^SmkkG{<-%NKrS9lF z4y5mlM6`E{Y<`>)zy{1bFft#EFYg2!4()icHQ*!UVE+b#L($iQp{rlTR7qy+U$?qh z{gz30C%#oG#2+|1bMG!m1m!mhVL`El7s)&%mJ}_;ZO>;F+UmhC?6$~<_xjtBmRp$l zBBU7QEfQg9|I{aeb}H<8rClsKNa z$4JRya~{3=JW7^;@p1eCrNmP=wh6AubNY1ZucZx<*)4F3XO4)IW;)?0{d5n@kcQBQ zGS8IvS2=Vzi3=NMsE0lPMM;1U4DE-j`YoG{7eGuRMIu8GhZ7GIL=`}#5TStuPE2Sc zD^YljLdfv0=L69XVDD;w^S|KuPeRXsOi0{^+*A4yy%`(E9S@~Odv@LYZq@0rj(9Nru_rru_bw-%Yq`b-h!HC_52uMQ{zjJ9 z3_eTUTr2FDF9xe25Ap34!>Ei+md@5LZ;ORz=vu-bE-r)fV+IdS4*^ ztV9U@ku`d-o_o{tibZaS*W;=gQXZaEwr`r0=(UZZm2ce%f@$B8v&M@S26Cp_4-SQg zsnnlK-xV%*4bUh03nhEO!{0zl#0q`=GuwO2>@R*tNyR41k>9;>eSkJw>jU$j$IEfw_3yqBZ85kz zg=S#Q9+ICW&T7Mnxid_V=4j|S65tYqh0`5$g3I|nc(Ufdg$LGxqwm1!_(gw@l>FB^ z(2219$=wUbaN&J__GY}+{C>v&k7!Ir1GrE5*w$voAw^b|cJ9?v5d}9z{!rTY5hcxh z>-MDs8Uk9gmKKBDTlZPc`Ycr_Rh37(&|=UXheo>(8Qe)F_Iz)N0u{u~)qrKS$CH|`0R3AZD~4J%50{3&hP zYrKvmQv2S+k}rh7NHSJhjsvXFUMn&4n@>XS50(RJa&50oTBK^)J7l-T#M2NO;1Lef zfoWkT>}`5Ak@0m{gcAmFcpI)H3KW-bieeiR*n;UK>{{t zP}#5kD+=ZR7YdbV7-|ECzl;;VIi2_@q|)PH7)S0Y;9%Mai7FnBCbPW;X}Z!m>z;h) z)3*pFqKbxOGxFCyT|qX*`sv-x7he+nMxpyIA3~t_I7PSjUP7+Dq~X`PW&oA2dN0-% zBLc&El~r((h&u1Rlp9znyvdnb7)BplcAx%Ix0FPAuJ@+yu{3cWc_pcS4F|MVm-IN| zDfRp+`;5Y^$?oUe1?T7=^)he0RebyDk8+2V<2VgY7oWd6IS&$$QoP>~a;rdpuY&jL zxaUVc=I7-p0F|M2TIzwL)>KY@dK?^H!KbwN3}GUeCKp z*e0R+6R|C_M2jz}rff?zc_nA#f*W0*8-$14vTWLza=jZ#sv{P&cjSTcB-RmZP5m79 ziRSYml2ZwR>~ARi@%CeZb&fVSBNyYakR933G*w_$hG#r4_+Sz*pV_j_E@K(fxH+$} z3jHDxDbM83ij|wKIxIw$uWri^r;`h`_@uv}hXD1VN%xyV>P586SDCeEF+doDbaSJHyvEUE*WmgEBKLIL;Vbh2QC9N3Lz z188%S_Jj8x9@OQe?zg@~a_=wT1`(;3TsX_%nbLewAFwTjX$mBZ4pvzd9gTvS!x^S7 z!KRew*?&d29MB>K;3s3SdeN@|;XDRy`wpH1@R;Cl*d1yX;lV5;DeWb-5cW#b z_S&7>BJOlp)}nQ&VcvsWLHVxgHs$3?z0Zd1PgcXlxYq9*iWPj|`;7ODyM43QJaY37 z4)F&IBX~|PVAmz^MSb#qzJjbvk*kqy!3Z}0}oO`iDuw@ z8xei)s1y#s83`cp=T3(FvA7jhLc(>e+e#z)b?CHV-4MEh%%h|@Vv?Kffn6F0Nv*&w zFA~n==K*ghHJN`)eRv{Jc~4Zb#VBOTf{u|XY#ft;%#^CgkA9cU9!y5DzM?5<%EN49 z@U`&eZ~C2Vhd+TNUF!E=yw{cs>3K?bi%%TFFWwPUd|x%*`{eexy3w@0J5diFJ5xDa6WI5_ezSdJz|v9f*t z`O^ryvcdX@%}1gCA`m_NHwu0-_=Lv{yS9!_haYiXcpk%EmE#b5x_PZs51kQPEF*rl z;ez`({UG#Z1RfzxQxT}NA;+YYo+<&ul`Rx3T%^)!?*q+1{+EBsf$lW$9n=r(7Eu5* zg13vWI{VO=aoiqp*b)i5-t+3!-A{Za??$ec+;VN^PagdWH9M$l zXYG-tpyyB%TM#hPRs3|mw%C~AU(e59qOw!+f_%jhE`1SpOy?9sd{Fv2f9Lm0l3_7# z6OTiZ!S5f)g4?_Xwg;b52>&d>4L+82p{(uH>k*i%D1f!6l27IZ73TO1>c8zBfQBu} zu}VPY9?wZ6-i=byDH+U+?R^n;8mbkM*jhFa++k=>(Au>>1eU>;)nQTJ;6bCGTO5{; z>z5;F#&N#8f0kF2NP4FTs_X#z{rW^}qu}mCxN|dK-8|faI}i@+zu5=08rK4Y?ko;) z`JQ8{L%HOc=mUU#`~vmHBp?a@*C0mUPmBXr_#o>K$0n3Ex~U8NTG>K2tZoQ0su*mS z78}bu7xSh|i!XgQG2y)@=&~Z!BbU$CuQ@R;_F1J&au=m!(z!#X6Ei7x9pL{6Wx5r| z?)H7N|-l>4kL_a^e@(s_K^r*OXuh-1+8sQ!Az_-Sq zjTbJNaDa+C_WRgjQ~B0a8)uXmGC>*ep8cKnDKTkx@LmY6{!qVCW_jP>foC6JVL@(n zI(a;TgI}JJgPws#{1!TBEDK@i;pY?J*Gh_UbS^Md%G2Jo$^_TxFRR{yA&G!o5J$T z0ypS#XRF_Vh||6@E)iQlV& zbi~;vJLUA(saI8QFI`Di^-3$3qaE`RLxkohh8Jy{0l< zq}w!wWmqdC{Z^15L}NT#fAOZ+hmzY@yGtvx+Xc#-1e6B``B8uPZkbcj=aPK3q@dJ! z6IN7sz00z~_*0x($j}Ow!Hs6P9^+2_nq!uTPFjpA+Nq3gOpTrEFh=gBNnT>1uAshd zk(UianbJU|c!Dh3V#{D*n^}W5>Tyhi-sQUrm%%qdwIy@xxZe#GZBmZ(j7mc6gA?l? z?XPxiCpi8A=dFX(A>)H#{^1G%BY-f>2J_}r)(jgEV zZNGxwuLpS-C;SC6SHO|dT`3f7&$S-v*j>`;F{X(7WDv=v2$#3izr40E*2U*HZ7*Sc@1}RYjbe6})}d+}S5Jph@WJ7# zhDGs=(^Bxw6J~u;o7O`b?|I*kpne$#)Xkk>{#Bm~wC4htMPrFg+YhhqcjG$>dx15U z9K(464bBUGBH%eOt3!@qL7jE3NetPf1AQ6t$fJe@*WiRpQ8*5?X7m+tUYr#E0(VOD zWUu^U#^V@tBsA=6Yhzc=b*4raI@RZS3|i{lYtp~{`N8XIsFr@tU#o*= zEaD9gK41Dep)7w5?%bJ?(R_>=!E<3P{iSx7ju2Q<8|l3!CXzw_fIi}VHk8!Gi^J*` zz~&K_3ezYm56p3#qxM^90b$k=&NV#f&J9L3;a&x)EQSj12N&Iu10nnWfKxWmLFiit zbRbi@oV2$XGgKq8q#w&m$FIgJy;ln1q|x8Ka4!R~WCw ztigA5!y3`E~96b+kxpHpvwNjmS zc4HgtBO?xOfDNDtwRRmMTAH*u{A3>oxoHhYYd8wXkITnalL|Nu&u$Cxk91bvAWb9o zm|b5U`uXD4y>A8t$zz+e#-D=1f1_#Qyr%D%Y0`p2Cd>~J*e~mQ9tTF&xT7kE4j_Yz z@{Uh__|PL!)F0gHezWfUNvIEe5(g*jd_mB5`q}n%Sr7)6Sb#d9hiuh&7a01rOSN%{ zBJAhKS{N<$jQWEI8q9LK91xUsi~q;*&$~_19PrB~ z@OJZ2NxqgAjI3)k<12!6<;F?OP~e5$4j}r_NrslI&Ue2{-hZ)2v?m-ic+7+L|H&?} zv6T6rVPODPLc{x9W!_PFFqcw$#7-NuSwH_S*cu&L_Ecm!XUgjIENU`8_twZ8B(sMy4sZ z>XE2x2)f*!gwcc!(nDR6NJzkowbj$>6Ai1^+sU6+n3>Jk@(z0E>R4G^4V!v<she7oek4L0=7)Z5Wc zX=TgnRf^R9wgJ^z@&zJRIh^r%U^nf<9( zHpbZCrB_&++Wh9{)R#A^ORxw#f+)P|v>dsfJ%j^K`^GF6OtVk7AlF6zcQoLBeX&e} zP;@P$Ht=K_O;|LIg1gR3=jBThL~ApU+;tM@r_;#+-}UU2pW7SDmX@?Z3k<9y-;P=Cb&+X)VXnvy;8)X(LHH zUNt&#mFoD*g?<8^AIbS!QfsE;)DDD~)z@_jr*|4xN$4GK0B<4M^l5RWU9gg%`p%eT zj!za;yh}E(zZ#TN$fI{YuUm}b7S@5Iz5&xS0fbG1h-e((E`mV7) zByPC@b2a`8_IuXhn-3sineqpUg7GeXE8veOgS5aYALIa=vn*%O_ycw!ZkQ_>%wQ1v zm|v==saSsGJuBI&G8ah-TUYfBXfu&`rbSvSA6Jb3H_DMNvDKL;|CL4C( zjCUZ)HzpXj)GPsGE`Gc~2|5?gC^nz&a*p^L7PG(qwE*qq27CWl` z6)}X=B?^7}HwYKNSesr(5tt#(pH>qK-kL-1kO=6Rw|BoFv0U-sfE0g-cG|sn(_O)i zXz3%=#z97hN4Dvvz5%6!t0Y7|=7tlAY{`3%NXiuB(wVXg-`ab%^jeT7xkLUUVo8sR zh8~R28?LuYL+^S}TcxTxmq>o(e)_cU-b4LflE33ix^L)&9QVf5d-ZD=I&OHZ6V&w^ z#rko15!Sq<(S1#9N>7cCTf03O4dObADV<-?&?i%f@GpV(d!8cVO50mYcAjR& z*u8tOe$UYj4mQeLKor`IIBjxSt|s&iU{X*%$VjjQ*fm5Az;PY$)^aJqXL*R_1FFG9 z)a8xD+y5@k4FpbByA(AdaSiBYwe|Ew_#M=9!W9${@ZsMRgyEaR*YUfdVSe%SVO<^BXqju zwQA1jG+;UlY4~zov@NsWO_)R!!_Ch#n8u^Q8&H-&-{5FK682or=?6AKtgXIsxcJvB zIG+N!T!QrG$avI;n!5u+hyws~`n%B*g}4b`LV*ANi#T=|8h}$_Vh%OY0i0f7=jgr7 za0zPNWMdOuP+#@)Fo+cxav2kpKOfGT1@Z|If=7SJN==Y_i*i37;`Us|*yr(@|3hG) zD+3${cbDUmJcM+XPmcom^g>D=nwE91-%Fu3@rLw=z*1@MFnyKpQ`9y=S3k7dt3o*H zV&q)oK4S^B-BUqa5snM07h8fF`cHE*LLj$npWin+mQcJm;*$}UP`SVq!DRS&(I`)t z=;6T&_w2XK7OirMx0tRas(oKCixik``W{lxV_?&!o;}pbTF>I0*8TQePl+y~4L?Z* z*=|B_M`mhwl8M!{H4Hbok-pYm1*dKuS$Yoy%9rC{qNq8H=!1ORj?F*Yh07PfN3?1C zRn8UP%cyyuTU)p*K^Re3@HiHx*`52ZlreI7S!7Enq|v&R7tkm>HhcAoVzp!Lu&U(>a$Zfmt!K1Zs0C z$15Ws9j@Z=Vw!u%-Z>MV_{?{8tCJ<39$I$xPLl zD`slr-660c3iA1F%MZs6JiQ#6hl&vAi`eW@Wr7WZGiBWni*!q|wL7KL9qG{dj@sK% zOWRGMBQiw_dp&}x)KkB2I5~HlH73H3#RI>#)fKXz7UwkCLl9&Qj412^Ul5k0#{bt542L+3wY@}0+!?SCQz8`33hKx5YeI1#lAPkSiqatXLw ztT%zCm2CNgJvaMK>CsErRM6lVFwkI5_`iJ-2l{*<1>xKFDt9w`!j!bm3*SAhGJ10e zv40ssMSjPPzvP|y^}+3}e)V2s`Xe)4fwgPcqU|We8=I>(|DLbw@026mFqwQ2N2?a6 zIzZ0mLREl2ak-o8G|yi%KjxM2t_x!Wz44-_$6#fJdC`wg>RPPjF{i0Vwd`^PhX>uP zc~33(AX4UiH~Hr87ZsnduWuiI;a@5#hpkOLoJ@n~tjS3)qr14*>D{Byb%AUX8QK0A4^cd#de} z?kG#dbjNU}4y`2imk70yTrHzeV?I#>ny;?Qyn*=W_3`rDcFG>lX)umfG);C*r=u%j zoSm}~@h!0*eYa||nqRC@GB*{NoK5qwNiWuyaZFiGbx(oXVEc~u{)R8JU&*_S<9Hi) z6-Ajn68fa8legQYmtmAmT?>ab3?A}tt5Ype`ZXp?4pa~QyZ11GD-@+9HZ?JMj0DkV zS@1)BxW{_p{X|i0K$l5pUB^R!SsPT?HbBCbkKBtzMu2P%MAxbd=e6*kuqbaoFvSEd znnXCTF$dg0!AM8NBAXf?1@}mY zf(if}xg3`teRxgK61=4{51_Z}<~@(4Zg@?yO{%)As6lCcZiLHBJyMLe$Eq8N;o4ogJc}bV z(edG_f7|XnrIw7~E^&L1V43@7zEo}oe?y}0l~MN247Z?Dr(YYven({h8u*x0Thg+E znH}-UZ5ym@ToywmgFPW5e-H+-n^Z_!9gpNbI97?!Rqsi(LFFw_~Um(wk|RJp`qv^R-aIVU}aFizbgT^@+hE zsbVf;pkuR&)F)mHP<>uLQfoS%#X55-bXcz(EpDeg`hYi*eho^TUSY|{8Js~s_ip#s zm7Q7V-j3h9gxf4-xT;DIuF?)M&ER%#^E&VS8nG#Tm0L>K!q{vtq`!P@&{`{Tn zI*?Hlj|JAy2w{k8kxDzSm1AcR^GK9q>2(G~AQ1EXS?|Z#)GK`1JqG?t%p!{NCG)uj zr`|pG7NW7+lZiadkF5RG3|>6-EOf7blHVXVEANBPDX>Q`ucqYFEWGJ(m_^>8S%Ll$ zqoQmheS zSS-@`bZ;oQH~91k;`6TR_zxNaERdjOC_}qev_BXSa5*wQ2geU{^@Rg_!hC>>9c>ga z6tJwj$~A|(L3WzdvMc7j3K+paCo*UkAs|9)wQw#dXiJ~GWJHI2a{N|TXQlqfWQ9Re z8iwJLV6pVl#t#(5>>kk1yw$sZ@i%fi1tAH4beO4XWR0v>oU6N3y4#owqy$So3sR7< zTwi%ilz-Z!&2>kx*2Xb6zYyQ76DAVKKqdm&yt*E^h+9luujK$b@Ye zqcfX^GaIe`-t3FH$8VBix)8-x#dqMvDz}2mzxf$^z>d}kbIJsiBMyO;K$;F+QVQP>e!kcZE_G-n z0SoNphye@I>vxSZmQle&*}>D=ewpD;dm++h>jhFVpSpzHA3rNhsWTZw<`34hK-QGC z6`Aj3*zeq2@7Va+=WSY0*gNW0Ylr3_()o26`T72m09T_>RpY9lpILw=d4(b}|L+q^ zBn)1?x$}`Nsi8;SWK$F+A5CGQr;L^i_D(!!GZUrdR0h~@s1;(LEDw`s zf=dDn{C%G3SeS+~?NagXefnI^lP+2;J8mKAyy_6Rjlgg3z=B#Aq@$w>a-$>U|B4*}w-10M$~Ww^6mgPAK*y%r?XE;D$`NL>ib(hA2ef(-C{l>2J$kY9@xZIv1lor4@DbDl+dMAbR*{Y5TP3 zs#ybY;)(*6@h-3r5%R?`EOO1g>L%fTMZTi9<~6gQbfCXolQlPokKOW@r=Oo}T(C5;<)>Clcbe zefn0FPeWXRb9yO+N)L+e%oIkMt4l7|no03d=X<;wo`bYswQ)+DOw^eDnp;PgBJ9-# zrI*jX$P=-g=4QZ3f5|vnkTe<{u2yZKqUgSQMX^B>Vg6*-)UoB`!bDj6j=*zu|xX z@cr?e>(k>}Sfk|u!klnfP8t{>d?l_H=5)b(25$mr*AnUltXF|I7{sn+S!D)pAp7dF zp}-Ljh&l3m|AjjYBl@pqCjqY@u7!FvcFz4fw%4T0DiS!V3hiq|AYz6I60fM#ymfod4Fnt!EbfoeOj1( zLWks|&WE$lcW14tQyx;yXdo@+_o3Jva*x@;wcXtt%$*JSHu8JWf-|*`wWM=D-S{({ zaOT*>*+FMn$C|#q2yFcI#980KYZxG_j0dImlL#A#jqNh4o*+Xp;)~25*Ch+aoMuhE7*Et$w zPABA+naoJO9t9-RXnJ11W7as-eyWCw$L=a~d_FI*B7_HeO7R|)%KfD8Vu%k?_a zJAb~e5l~01F#A_*SOg54I1Y(wNqneDOM&aYv|0tgU%Vs(7Y|S$z`=!MzdR7`N#N}} zPxzuJvl12T0bLI$R_-U=ihGh4>&N{)IOvT!**nX)`BNUfwH(rx!TOJv98DhJ1W6ym z5C-GC$~5YCZw$9<+dii{$e$^5?+e%8;R~$T-Aq&eIk-U5cGUC7jW{pBg1zOd(@S27 zR7(@ffE`M5$EZOclO2@Et)W?E?FDPmX>|+7B3AW>uD)~iv#w%eK_>V9wpbYKK5?ex zG|d-l&vD^UILiXY(g<9#7oSd3Jdi(`4DT--p?zjD2!9Q)07DRzf{ivJH$2`MdaYRg8H+JrOPV0!|q&jg%C-ANAwC|3~b&@zn?v9duYmjRs|9#ds z`Tf^h%LFyOsu&f=?5fC^Zg4(d_aPMV^avBc?Qsf?fAEgMg~PMqh6z zCa(_rDNJpcB#?E#-I2XmIefTEZ6xVAx!Y&?%c@;Lko`V8{QeTtRok7}$fwS;fi)6~ z{S?v*BAyK8B8Cgyyu=gAHu;Qz`i9G& z1=sm6{81LTavMlLq=JgW@qL2?8r`o_p&w#ux&O`Ii~qBHEzu{5uk4k44)LX(u|7wy zk_Q$g$|Ibb+@77> zj%#PLXvV;##&R9!dw6#goUTjV%W zDX~0H^2=i?1E-=065#mmXMJkO!<*6(D$6dwbnaPaB2tSB)0;sC46y--oouA(YeN5V z;*#+6krC@N(vW0eZSDXel%P@aD-QuFc{B3_3Sdy@%j4ZZ1J}Qm_y4PR(gYu2NbU7t zwWsEW+8eU@dzM)uVmvF3q%&?;o!f=@c4d@neOw(+i>NVF^Xm1pnKx8fMBhjzN)`RD{PcrtD5(G9d+F1f)Y&Vauob6$PSVK|XTL;n($I<&}HLIMJhOr(WvCwlSEMkli`5I&< zC}0l$sCoMsF;aF>Cn_n98M1uC)pZhNfPpPx)`ps%eq9?X%x$V!BOtt^n${S7exCCI z`C0_jMvqp0-mW_x>Z@4k72lPUBnA8bLj2WPGVrD!s05fW3wcB><4LB<4wl2-lB|q= z4quj8hnklw9IV(8k|lWe3$+{W*Y#tQ@gk3 zcJ~(TgIJ-(T7?I+&x0ybZsm-NnWPe*RXbK)&KEe(O3FD>ka)B?AJ3MxAWL0#T-;Mk zH96v3sq-Zkkfj5ciRbJWABA~d_ug^c{l>jV?M-!v_y8gN@jwFH>6cns?+7jcZm<;~ zUIt1LD|Wk+!ktb;EY2_Yz|Pykj4}ach|8A}D^1VJJ9VF*jo5@s3*nHL)%n-42t$yV z{I^uCSip%i#8C_)`)yVbS}yx-BeUT}Lt^O_nM`;sesi2{O`2r>e~aRo#@X z$aKa!OWb`eGwQ8W61d)Q?e2D9z3}sXCpvLQK3rw1Ppr%A=FY+mB1hrD6{)W6)rsk; z>X3?Y!J>Wuh43V`^$)w?ky}kfO;Hoz1)$0Z;0r}%V7R(L*aVm6${K+W=xaSh2$A5H z&ylNUoD5RTVf&YLv>bti@^J9<;=hRduVSYHvQ&_&(p~5msvt+MMvgG+TUPapU8!mD z?w*)mS7+|yeuRyCF}m-&dU6F?=d<57emV$S+UYrvOf|n6S1RtM|H0c8T0neu7P3)w z!$!1E$oWRLIL|!SSp7Z~$*fbztkn5qOYJ^x?^o_0xB}uhP4CV6O^L0-HtUZU@A1i! zeM(KJYkd&+q*t8L_Hv$Nwc zQz@@mbVYTI>M6O3hesVWe~3d@$gHMOD*_;&(&oD+SF;%9Sl<&8BSI} zP*a^Y^ijkt^dPCFw5}=VHx7{^iGhu|{j1X%qb953M?hmDCB&Ova2<&(<`?|F<49~u z_I18O+|V`S6Y^2q-L~7OUX1x&5gBP7`d_Gq1314`U_eYO7tBrwTRgLM%MR38rZ0Y{yhOiF zk8j`Uhc@D!L1J7;%RIY~BUy49}m{P{7M zwUO&(^(>?>FUiweFRE_lSZ9`L@4Ndl9Y=Zo0|}pf`3>rI@IO}+RPCm8 z1*oe7DfG-;3E)%~*oMDyA^v+s5%Tlf*NvPXiWjA5m!BHwqm<9s26p|-hSs`Y_Nh#P z2u(p^Hs>eKyeeU!x7z8Rgxub=9s0*48zdlzN~K!N$F^GIeAs-?_oRhcRem9I?vIfHeQfkblyk$gcbzLrmCV@^Y#)IMg6rIUPII2i zTvClJ!x|rwJyWNdhINHT0cnBtJO1MY9*M@3m9ozs(imXsnOyPTA?Zh}`%QH|h%;ww z8ZF^Pb~_D$mH^`*u$p9Jf z5d1@0-N2Iu5wqn1m*A5i@5>_j;G4&YQ;BsTd(e+@SC5<(`Ns$W~=WbnOq1e@_r=vhSM-NV3+fU6B!9t6M?7#Eck zkx|evG0f1bECCWVLY07PFYVf>Cv!sq#F8oj%|$vEyit zIYF;aYOqqhc6RtPaYd?;rw3QAVbILF3(VTf&AQ8*kPh+wSbL0t1Y%Jsiqr;33h^Pa zKRXY!ghnqD@w4fn)Bbg|(5T0fL%r9rWWLG@)U(bs_intliLrc5vAfr-NY0buOk&r$ zc~c_b9{#znB8}z`)hO_&FMrOq z$dpB@^w+I?)EhSB#OHD8QE*!M_$`%wRNV&BOJ!y!&`<=5zG}42+>9)20Q-a6P`~Nn z(cbZx(Id9^eZ`<%32oL%B!u@<8a43B?*4FYki0msfuaszA8|Cc3L@Ay<}{qF>X*$F%zM(CM@kP3XPmUQnRnvY{p<;zVs62&e|AnRh{dv4BU{ zXY(&)v}YNPl}^`K3$!U~X#T7QvdVwbH+Ul%XQn5lJ54QQx0q#K107W+wXq(?%2SWi zPyL``+6?i7#Dds0u;RvfnnLv_kz%hoLza_K&d;K>JVA@YPBP@!+~VJHvDibqXPg>! zh7TIs6)1{{tDft#gx2_tYkn@Dc+qkb>eK3_i43eDTS>Qyz)!p4o=O)e%*~aPQe*1vSX-#sB8BXRvo0bJ>FG$kKef==JWmk~nSR?XWFIFgLwuD( znSZ~QY_$*d9)mXFZQEwt4pN9w~unAlK0M^$`s!tDU_mp_=2Wh4`LaxK+gwCLO4Nmjc1 z>stX?mGH~S730LU|HlSz$d63`HQe1OaS3B2JsFDj2a?;>|g8@ z?Q94&Xf{a=l@Q&;e>Dl?#BF7)?V&CvV^@K78c*;D1A{j92;_}ioB%L`0)E#KP(5Ni zHR=9D+ImW^yy34Mt*KAaRx|Q!nQz?})+J|kyDR-r$Qas{O!T?HD19I$Y9g5a+KVy% zd%mKNh@xrcrj=ylJp@lDNz43B@1Nw?ZYS*KzBc~})nR%65b~8=K~6rr_g2WQ?RXC% z+hl5-WI+Q%?(3BKQ)8#pO7PZiafK|w2jqbc_)&q{ z*+>jXJM+GwPMSsS7?Yk6YD!k=W^>OM__zqMLq}g*fmT3w1sKuy_B*$c7s$UHWtsgg znX$g)`9PfOgr&42<==6*=81(py1%~*^mUyI1DB}O?6PLXL5FNgjv>Tyya+9zb&ePI zJNO0>cPjU#ce(39aOsM}f}!6HP|D`7VS(b@OvL8`+I4hCkJJ6)>$NWzPk&NhRrGz+ zl0A7ZE~xkOJw&X1qDhjF=@|B=4*z5AZzxR_&dJBtid+QyyTx8XS1`RibCe3ZLGR(8 z&p#Q!%broUO0LL%sg=0Z|H5A9i&{5lvvo*r^iHP)1-rl35Vz1d?{cHdK|g|)jDE%AL^ldAQd)s87#Lot z&n0*je^iJ%zi3|UN^=~u4|#}h5uQC(EVCkz4Rzw+R{-(TRar(LH`Tn*8%l^tnUW zT@BF*X?Az!g80?vzgo>xNI%G2i`eQWPx{q(G@k&;_iU}GW}kk+W7)$os0gvD8LL&5 z-wt~Mk?l?o)mo3YdKaQ2Gch$>UG6OqNNLMqtNmg?p)<5fpy0hMMR*8n;rYO0mQ$fJ zGR?lqkS`XicU~klqg&a3MJY79=mdq+#cMGK3b7vha;Q&BWH)_YEUKbE(X$D4&RgV; za6Cfz{{pguH}uKe5Nj&4i!gS}gM2UAP|ZX@t?pkCo+klglv!MeZCDz-MN}te>~77bn-dhAVUDSJ8&8Gc{I zBu+Ef240lvwSp8AO8M;dbQPk%K^{lBeszEOf-HGdUfl7F@3?oS|0?!+s8YB2rnHDHA<~4xE&kZ zcGU6U1}>`qa~(CIj{yg7iU-RestX(>tIL%NVBm8TdAV+Rvp>c6(XS7PkH!`Vc*0n8 zMkWw6`)|L7Ja1ruxqiTKCh=>HTnHqi%Ax2U)7I7O^wMLF zlGTb`g{wV*?M#PH*WAwzL+!1js1gZ{(DzbTsr~N8#Xj&v?cD+yOAWO;n}U$EH}iSc zoDtEu4433^o`Vcpd8WnD#4$PE(lTeWk8m=(f;tHjeN-=q%LjQZ$XkoBqd0i&|F=)) zt)ji6P5Zk?D=wEs=>NykRmD}=JzcuHyFt37ySqE2y95EHIdpf2lyrBBq)4|k%0Wsx zqz?6bhyVNio(pcy)v))>nzh!<4(1wr_&@CkT8VQf0ak42)oWQpt1b$YVfvu0piJH; zg9ic?Q_NOX9LhBIa03*^?eqHBp;_W39ju`|o&z>cWkhmKC6RQP1s{n-m}L*jM+{h9 z9Cl_Bd%@9rogMT1=?kVjQ#R()ztuk&T=j2^Sh-{WF~iwkrR~n(h#xpjkdID7rdw`@G=+;zpX_b@TEk7f%%q9|GhEO zTBzGmGXG4m&0|02Bj!!9$q>Mo7`&Tq8#xzc{8G;TBgoZxkL_Pc&9dR?rS z#mjKupG^ZFa95uAv{fK@Z((-A2wjGmdN+~$^4opYub%__qhCUt;X?nE6n3p+7rH(2 zk&4{r#o{XN74mBK6of+n=V8-)>IX)`wU5`zOA)+EEAnt#^KJz+V&=s^)tTf6H14Y` z+oq2>^kwxo-UX++TS_Bz;tVUX)f669q&H2$2eC-m@GnOd&UdEGbo^NTMKa!**b&rP zzS&2^@K|$C$Ltq6M>1xYt6w%HZ}^}!bs90ID zx&C^a4qW@ZP~-W=_Z}i!FwO!G?tiaYY;>s3EBb;1HS=F!#oBiEn$MCp#9S6NZ?c*8_madD{1&Z?a6Z^$F+7*AI zdv=~pVVuD)tO?tS6ipo_EvP;%j}X~wy^SwlmO^ti_Jmv%q#F!5YMKou<=E|Zf;*AT za>=Wln)apYPh-=Pda7ca)}lv45ou9UgweSOVh}D`PrhW8m&g2SHg+mF)xEbn^Bcm1 zD@Pg>NYZ4Ex0UzQ2*OX}$HzK77dgqorq!x@+Gi(3!zc-x-i>Now(8wc7^k2}&o8== zd%htC6kqUVO3n1hpQxXR6IvO7yW-c(1cJIB>fv$RDgNaP@7iz49$aA#|%ZE6!OLK|V7d zHc^2shcBEV`2LO1xVeT$uV)XFqD?K`v8A-4qaj+{wTCEMKQ%CUkE0bl_#DYjh(TvW z>!HzD`|bz+i*dAi^1H$B^y1$62N=)cBw59r1&Mo{E%FFR-~>nK))Y(fcUfz=!dXz6 zw7G=4-l{O6t*5+ENRrL}n_iYnnd|VV^n~z%z{b4QirKRer2IU{DPYVZz~sV_c+C~c z+<-v${obz}9gy%32yGOzb-zzP>^uMVR62N*+R3IW*n0J^=;UitN~(u3vz zsZ>fnvW--Dra2}oB|nC!o5@F9hF?wvxPzZo=mrq2Har>hOd}HVGP&3)%k%P{H9~5N z2^U5uO*(w>ccZjD%M)f!mL7Qv$gNN<|KaeVvo@GRO!?uV%a&$bE~jSRLtU>54q2WT zsV|>ky7bRYsPS6a5>))rDeV~BDl@&(H<5bNa<7jgoBjJb)11ud< zLlulb;oA2Z5gW>-D9>hdpTZ&?^RJpKv@@rbG*+F#a%(Va?=bS(dZwe<@4_%C!~C!@ zrPV+Omjr}auzoK|+tGoJiM%L+M!;}&=kEEq#mmq5oXnzYCZ9m&Vm50GNOr~K<$e7P}MJF^NGB3^aU zChTKP)n^Muxu+$ybi&j$44j^Qmx-R-MWIM;$cFvw)cW-1M(^_^dn|03=sS4MWj8}1 zrXN)in{8V3RS?+vza+hU1!w1=MeIJ6%E#wX*%!`c{U-|OrL@QZT8Q0tp*if|2mn1` zpzB%p^l)D#F!Yz^FLkCl&~y$(T-&AB=dc(3i-f)rY7QPR)Dm{Roo@|W*7CmoSwXAM zVA@d!s*K#nqv&8?W@chvDPt|Jt)*7tN$pMDMBT2ZQcrlD!u;dUgQV`>#|J%z74 zx3)x!Gp}J3R~*`oJEtE<{_}!FKM0f$$RySJ;v-J{Q5a(n>W>?+C$tS+%{Wt9jKz_^ z71J!`s(qVyJh zk&9oY9ydbVg%33`tbkOXydF+Lkv*s81Wn9MFGjf^BgPVyk$XqQ+NbY^gfP9Wehed> zU`~9F?U8YBxvf#`7B_j}@F3)eC%`s`1O@8E`~4@%uNRYALXzvL|4!ASZKd!0RDcj} z29Sj-+JW*u+X_%28G0iB86l5>I*yGCfV&w6U&IbWE4ln|L`(0asBch{b4lhvwHQJ| z+Kis0;h1B+W`!9-?xB0Z%nC)~D0ijK_y!zCE{=^$rH?YQ%Y2Tp&9D$t!LT$tU3xgt z=-ZFV8sl-=2;cE|e-wjjbT(9UomA`cu-W0Wd@9{6h%w}q7RsUw7-#n`FLA(nM%ejF zNbPmHOfg_=Q4_pHlYxpjBVu^%cDk=aj`biC?nO;R*)hz4U7qfK3q1Nv8NLspL0)u2 zZ+5rDBzZ#190`lVs&a7%5G~oWy&jp9X;HBbq>ThT-fi<6I}YF#9d%SneID<06Fs_P z-12ShJ&ZPAI)jj(@(M)kzm~^HNI$mQE{OC=5{le+yzbgI;IRJChH~9Qy^K4wp^F{+ zVgJ*~EMJP2AfPc6Jk=!JApYxt`>u-I}JzVm0!NpN*=f4;uuFKfZjS{@@BJM%r*lwQ5EL zn;-<}@~~e{yd;KOL3mIb%-TvMwJ+{Y1T#wKFA9`Y9fXx(-|LO*b1g4H`@V!PoTrpxgX8)V-I(*bWR3q1tE8G?qJ=x#&J00ac2``Zp~2+tBW*>ChyGT^^UIWewvE?| zc^3)Xs&Ci2R~E?KbY;7jToxKx%pcXpaKZ0p{lqk7S!oV-2PFIQ7a9BfJ*fKFt14{B0E4 zav2Oh6KSgqC_Swswal1~P9c0)A=rblzEwN819Cd%V-kc<)~NAzDDfa%l+wql4j~-r zp|p|YZ8F@{(NSC&anvswD1RvlsZ{Br3tCm2LI;}?AMG)}wsZ+6!LJNv!SaN2Js38X10mQC3!C_6 zV@)oMJFMAQ%GS)HeaaY@aUD&zBg9&@{X!?D$`D5rP|VkclA!ijS#%9?ki2H~8yA!? zoeB8*&#NfaF8{SdI!6TVzCgR3a3FdF=nOitxk?A!+xtxx1u4yVeT8z94byDqF9`=@ zuNe;eB|Gqu4`_sw1=Fmi1giNhQk-__8+<7v%8^5UFNZT@4UuK8v~m>Cz-x+z>mXjW zq_BA#l{_X%6LeES5inFq))1!F9Vf#}6B~M-H}jUPoiCY&{+pT=G7d5kUW1Cvd0Ro* z32q5m~? z)%WjNj7yMr=wod0KdBH)3flJMBh77EIi+P z!4AHmg=&j^vcXptoAxapuSj`Xj5=s z2c7772G?Fg1UH3VrR8v-g&IQLg*4}>cadhkanGt?_qIDuiYzQ@O|Nyml>0r7Mtbap z*cg*BL}e#(+swp=fYr#TVmq{qRpiLp0a`(G6fV&iK5D5B&BZe}sU}4q6%)mjU4LNS zhj`b)jKR>=@V0%6wH*&9GYwnCWgQa~<{X0M*eHhCZt}<`)ut1ZTWYPlr)`R0H|NH{ zA=slQap^+hgB9}hpj8~LK2+V4TF@xBRCI8*;9;L6yifeDgK|1p->V19!~-9A1FMjd z!J9%)E7wTs%Tg7g?rW^0FKv@=PqlWD>7=bG7(~hGRi`phv2ba#%y&6|Ljb#&)OR9j zVt&axBa^5dXQh0H6@NS2iQgOYz6LaU24MULS`+BY9wiPoBy^`W zq>QIDEb(E?U{>ZtfQim=!mU)A(#JdJF37OdJRFd@>pj+*FI4sd4^7GTOayr~GW`2v!EByvU^X<)jCv?I(ym_UoCrVx+Z<0f*wUlw-?b0-g%C#ACT$E`j_D@*kSDKIc#&ya-enZfgzDv#D8}B1YpeqGzFC^Qb{YBdq zh_F2j?(hv)+06*?maQK1S2XukR;PDxv`aRtA6ZCQeURLAx##f;VAPSSctWHX(GbKO zleQI#=@J>S%hDI%U=+#_lpR7Dp%1s>z%@MjR(Ko{8PU)U3*bhG2*b(A%Y?geFs{~w zr*n#Q#C^o$2}gBp$MpQ5Wk=4yET1-@pNf!B{)743rLlutOc&ynnh)>nQO z8K``F9v_X+cl_3;(_&i~s=EX~WD#rB;_Nv9fA6mQK#v1pkVDBJ)EhIIwaMx7f-Q*@ zj8L4YiP;r_$b$7C#KufnXe(;RUmA)HlUN`T)7?#z-@H!})%$s9nx?@V2-V+2{ivBZ zL=#rdsV-8FW}~;1G)Oo}$B4W2cPX{HPT{ZSNc$~b&WZy0Mhv)v{t*frSh@t^HeNZZ2S!7Z35=|>G4TzZN^_5nhH&H;BH6d;5Q8sl!+u9(nt=i zu&WeLA}v~s0<-e{1jnjc$kui;o{T+n^H*(7=L50iWMqo$9ktkDuaiwu5d6)YAIS{4(L-?`zn>z~MgdGe_;z zciK#!ef#|GZ0E2Wd3U$rs+G0?-r{5Cz^Wl<_a4Ey&3n6Ubxmokf1y_xc?o`%RlC4 zIUCUW7*IX;U(^IC2%ACIJ{DZ(Pg7=|g&Ta5ev8_)V2Eb#hYQKw>9>NS(EAPnT6RGm z<)SN_rOwS{@Q+HPPNPRkDVgYY`s66vTH=m$??X&B$6(v>;KD~-@G@1y_jdVs7w5*b z+%j@K$EzB8D$2h4+Hrmz+v$qadw$a`^j=u&WB9wYB{m{d_BKweL7tv@WTfBPNzfgNbG3e3mtEE5?EFp@L&Bc4++6-mN^3(dQK|1`(Dt1+|_#i(vR>N}CnNtjs9 zhoYJ%(t0*fp7xGbWnCZzTeb`B(y!c`>WO|zr7#F}gIs2}gf!?wN~ROT8pTmIN8*pq zzw1k9VOqjdW3~_OW!0s4I~4C}{zMUyFCecY#v`SdE1biEAxiEw4>C1$MX+5$1K@gD+`oU_`15!iQ^MeAlo06G+^ z0OgyI?pcxxZ@L$*Ro*fTDAK#LuhZW7HM|W9Zs_cutpXk#wzCdDzA^`TZWo{*KH;W+faI?vTh6Y(QMlNamC8Oy zpyQg@G>}C-GE%4IpwRP6wXMrz(uJ1SU4q+M^n}H@g>L$pG)xuGjoZrT=GEMLHeGJgF;7*Nx7qMeqoqY|Cesh3zM z+k$5I4YayXrx%zpQ^*ry_dJD1>uMHyy2kll^)SYbVC+L(Dx0oTqQ3B< z0S50zi}N5DO>-31m;WV7AogDJ*ZM6RwQuy(*!fG(=i?<9!$(;-3*b-xn-$6Z53h$c zNL1@((ryNL`L;>0Gxqeg*E>R&TMiDtc3gTOstdcW>Kgcb+5H*6q#xi)WWp;(SlsiWP3feFH^fttY&g%30W-e1%jTyKM6`|? z?$e7lOH&jt;nzo+)k(U67#1s89Xi*L=SkbXH?VjkvrXE^UhST%3R;*ta8B#LHAjm( zSxA~<6gnr0grMwc8TUjPv<+H12k^U3 zH6?wkpDY?)BWc0(VrqUeH4Ydi#GBoGT8v3p#|&fA5!X&|h#M zGXO$ALBZAxq0iJWwVQn z&(u3z*tX`w8Pfw9?+{rIyv#bTAQbCbAVRve89uk3E3nl%5j+lqL40%c_J_^_o8XUm zGTI7P6I_DXCZopT6OrSGzre@TUU9nNF`^m*aGf>9ar&S{vU*spUD%lI<(JC9^yk+8 z$DM~XuU-Zmx4gekyORCW75$G?Pk+jRIrH4=1-(!{3i7Lg|`M2xD-mq}n9qet4 z{V!1lZ;o6797SEFd&3(p)lc27i?VQ)7U;f%n3}1t`If?F7o-|QZSXRiiA^N5Cmt#| zuyVQP`=^$6mz8^BPy=SZt$q#$Ex9C|-o+X+$l&3*5{YoHzQD6D%d?O7M8cDa^0a3Z zrd0A5U;pi$0|9*#2HlZl3e=Bysn_zwK{)D%9~t6jd)>q>oOTpl$Llgp>u&rA%I&Dc zJxLY9Arh%}WJ!GOaS@o5@0wP(`{=OCRarghnLlXW8a$^V#jB)Yg9mON(u!hcKLk_c z4671P@lNGl&>Ke7`g*kce`I$vkhpqpvZurL)M?T9E`ICVo(RnDN6~oi!Qpd7IDr(V z<%rs@Xp}%K@)@|)+tkdHi9Q?6@}+taVUdINQ85E^ugQAk%hq7suc!DvW8}cD_!ZcY zN=LsN<5eqyTJHdE$hj*0+Yflie44Cj7ma);tkO88*u+Dy~w`Qyy!C__sUoJo;8*nNMXTmCXm16WAE^jL9= z!q>dz{)i~Gcb@7?jKc)N3=MHY&B7oo7z^&rq?~e42v$Rk!xBThZHQdWBW!0K8X!P{ z%ki))M&WajV@I_APL!=CDlD}g7`3~AdQp%8e1JF;7`EqZ zR9dE7eZQRBwua`pPZllI`5Oi%?T$KQgoIf}rYg;P$k@V@HtcKv^`#wpnu0P7j1B}W z9m^I)byt)09 z#_Y6!^w;}i&3|!u6^AyRdt=-8SE0v!4Jzh{-#w$}Q}({5<%$!9wx zt(hJ-Q#DSO*bF?yJS`E5zSsl#k8K@GTwwX#KyUup!{j5EC}1}(aBsnj`BIbv;GNg; z4fyOn@jchmrsKX5vT0`p`34&^*}l?}&`@!q1E8JZb7%+NJ4~Gvz`4;z2IlYFWNie^ zZ^aX0{|Bdt3jj(BE75UKaK)REMw-m9 zvb?}4FTf7XFIM*T(W395XSbzyx8+9)rTszlIeXMOts>TL&ZMVl+ZO(6=K)E|sy zo4aJI!+UNxyiJ09o(n@&l_s^8I{1EK!GE54z@Ab_Wuog%jc9|VT|pTsE(S@AR(eGY z{c1FA?WZ8aIbGiO@zSxh$~#PBINWas94$WZ4dYwNcDFQ#q-KUaVr__FqeSviv9%UG$-eJnbX3A|<}Oc&1rOVt$#sv#vA zONK^8VLNwvkfTyEu_IoA4Mz-oEwkYyPs^@YM zHbiVI<(e>X_(A1}>PsC}Bqtn3#54menP){eKbmw+wPJ(6^2UK1eH+IIcD^VAqs3s( zA}NH74_+AXLms`R@RrUb7)AMetQ3^d?wb#h`pJ|GpC|P1|ZutQ}4AQPd)&yMZda)PxGX5Pk&Ds&z<$r z@B9Kr zDuKB$8Z%KKiu%WdfY|yy?FGx{&ryQoa3v)m77wqRITapV-lx}6H^Lb{ajR;H-0v1KiBD+K5B8VO31uB`ociZUn5--Sb>Z2iNBy<8`Il( z6)S7(&^(8#IEgib^iczhuP6D;53c@Dl%k_ms2wV4Y>3FW-k3IzZ#qmJb1-8yPUBn# zxeiuOMfH_2!;~5iam}^WiIeL8{S@)$C?E89^rOn=!Sf{0wh1JHt|S8XfD6+_+dc35 ze%n{(vL6Nzx5#@y#2jX41EAXiNoL9CP2!Hb|E~X=X&}qu2qIR37pM9I@ojr;99bGxK@CxA z2)*?1PiGjdTxA485-1)o7;rvi8y; zZ%AbHrN(l5m@^FZ@HyC=WSA;;yzqO=2Kq8A^m@Wneaw~Wwbe0lg^F!~63sQ@?^58> z0pm*4L4pqMd%k*ALQ$Iyz2bDg5w1_(zGG~n7X_?`f2{rG8;Y(`OIBu4gE&IxBBL;F zCG3wI)x}Y%e-qa9mrPA14_Kygh4JhC8+ENP_v?+Q=bQ4pp?}f5#qPfmy;=5pSAQU2 zR*=*1@EQ^vEzMY9lK*YP_Uqjy2EKaHJH=??x9whbeza5MY5e>FIS+;`cbYM`gBicq zN`w1K(*KpH>wkLc2UIP7QbP(X(w*eD*^(M^idN9k7geGU{gfBE5I2MG6p?|iP~dPS ziBTWxevY0~5mZ*kWTvxt%X}QcuGFy~7ZoB!SMbOnY!hKUu0k1dcJ?>9gjMaW?Hs#3 zPIZ?fs;CVtLzJ6LG|YQ8Ar_^-VC6rN!$TA@p){#gfxTImWny-@IhS&!B0_L zqcjF*Foqs)6>g*WXf57QDhq`brmGptWKbm%u0hWOx8_G$6@k>#w`D)(H?4Fm={Ur{ zxU8WN{Mxj$Q6z>7fU5h4@jxwbG5zU!5BLdzx*(v@DT2N)39ef3&7s=H+#MQD&$Gbn z##ok_xf!48=sgk03Qp_PdaX*(TzP#Dyb=SA56@f9v+Kf*ub!?vQ*-7<&}Fg}ztqZW ziPw#W-vMOIjSXrTEZjf;mL#*fJ0ujk_ldb1n)#{NX@zT2x4{$4_4KnT@U80Nk$?i%vLbE z<^QYId81KLAxb}%hO(WLK#?Rz#zjZHtJa@Gpk1vgXssV!gTvSwvyxH;n}^}q=kUJr z;d7t2rU49|-pd6@U%M5KiNJR*A0b8i5Z)!8+eF~|v-mE#tW?#2dE!&ZAj6xQ1* zfv0w;SqIuVcs^=3?+Cu^e)F_2cO`-Tm#85YLl)D^E-MA?=^1Q+gtz~!=dUqBHV4gQ zQ~SQ8CFxGfsTx~Y2L2|f69)}#NO&w;feC$$U7)FYFnmjAfrYlSzgcub1f&wko;I&NDj z7>)Mx#XB;JHweBq{v=~u^VK3WLH-IY*MDNBE_PsPlU4$;A6a*hxMyN15oM-NOY#Vv ze&S^RK}++nQYp4^kX7aD_|1`RZ@fBJ=gqbLhK`19!_^-YB(@FU^gV6eK>AaPo`wY{ z8v`#W6)j$zMA&Oy+Fch^%Do}dwGb^z5>27qr752A>UJ7)s#Y7*H+<$I&^|LWZ#+CFnl26#h@>f%z<87uz z6y?o3*!%vn>eCSqA>^re5te*59U#_X#Qx{SuJF-s;qn;I#a@Sg-Y+Z*><3Igjm{HB2Z4b3pmY9{bxK3~ z1e;oh328as{O{$3Cl=NCwfDH}2{C1e6&iweb#%N+h(bbZ^U8xQYI4nU_JcoNzMv)a zK1kl5d40Xcax<|O68{Axb`3SXd9rBBedRt>&9*~670&UCPN7<@X@#>b+$6k|)RFrA z@?5A==pJ(|4c~)?koUE-JbKd1iUlyv*+9^RytDlWpM+=oi7xUEey_*DgaeFA;C>1* zpj_nU&sSP5coZ2z(iJ!Pqy$!t{Xf1O;f`wpQ>;?#H3&qdIr1~JmzyqC!6>Ah5j+CA zx49w76Y?#b31M;Z&5QO?^>j)Fv!AbK-#6IvHWi=Iw|HymqYpY8p$oLuOm#iVXb~fL zx@&CxDcv7t;1)g#Ba-G-EoLguC;Y{IY2rZ2iiARGtP=TW8-FKE-0nQhhC?>NhKc1%L6^%`l9y!8edN=-#?@*AP^ z9RG%2Dp0@AzRZIh8ehT%4{WU)p_pfJ5G3sJ60k6sCL05X3%9dIdsdE7nv&?vB+%eJ zu+OLcuU2$S?$^rpE9vnXJ19VUMmMYRy1u;qD#;}2m(e-<54x*iyOYW+u=@eZ$uUg5 z4jaxM?BS)6dc)2@oj$8{YB-?Q1{L}21X?3!$MP_3-t%u1Sw5y^my0wk!4r#E|9H(a z=F|9CwT1Bd1mP5xe~yYQP29(AMti1|749s@++{>=HbARjW$-Yb0vT0x~xdAC0yR2 z!B)MBGuMKKVFr_Ftt4WR&aNQem%{y@bsGmbs1+YXfBgXlJL0 zM$qh0PT1Ye1iAo1*ca*nuv(OTG1)MD;aYdS074*K7qK4?S{>@k4c_76saEWE%ji;h z{9sqO3qXx0Ppcp-DfIL^gwAb4?bNTi&E2kOSH|C71k%voYhix7(Z5`{{SV42rTzO^ zfy}i9rJP@beF|L&$OIq`MN@}p_L_F5FBud|!|~mk ze{e;`)+NeV*6kCD6P-seWoavzX`x%2p@7S1ID(C#MBX9#uO?3tBa1Q9rl_r5g9+7A zI#;+0Qn|pV`Q}qI^7n5Sss!c}wFV4EQt42H`3H&*n~W;#nx{s^P#h<4g6NH$5U4^_ zP+4#jcwtfnmxzViu`I54D`(p!7u}$o#e82e2cX$OIUxhY@pwd2$M?vQE zxV~-8-!@Rh2=Y98kM29t`u!sL5S4A#*}+Rbv!?zV6E08bd1i?po@FVRnVl?EwNWVzlb$whH{xGPhKfx0*m z0*+CB#(bDz7$jn?#S6tpcL_yF$lXl9nKL%zE0iz?lh2?*`{-$baG>l?`qIKJ_)_|@ zGu0x*u9~rM7cV`JWchoWWX%ymvzW1Xa-%_awjZ1a>nf@+DRR8`*u9 zjX`&(o`ul$9`$ftXD(OLQ?GFg2So3Sf(bGxTuqFltuo7WkFU`NgqvZuDvUm56uW*z z8x|*AV&MQ_SoB3&BKG2Vsws>s<`-jGq36=T8i8sW)LX<})wLN%mxe91#O)b&1A{uq zvxceze#4(&J2ilhJ;vw)bT@JVc*7V-;U2-A9CgR0O<~p#u4@a2CS|m01n~YK6u{3(OuTd0ZY{pUW()->7q>^)?eYti1|4phCWTP|cIz;!u6@98x1%pRHPv z$475}CGfRth#)vSXvQ~SF#SEn6ZSk*ia-W@E+qq?-umR<@Jv}{&3ZJkjCEWc_%Q5X zBw1gxDqMUSr!<f~hGYRWZUt=5O1^-(_Jt9$3zP zDc#?e$}HgB!uW#XX~UlE#s7w#K+3c)5E5_|WCTMW_!i#Q^%?P-;A4>d2Sj6hJ zQ}NHnZp^Z0nFl<{Qc;)|EB;-yg52*f0@h~{e|^{z_x}Xxw{3-P$Jqm^Bd}l9leg!G z)AAfpQ-Xc9^9+c(`7pE#2F`QfQ1+N>zqfh(KhJm|AkOjfn2oD0VVqQmg^cqr2f6Pm z<=gh~d1m(WOk)9L2}UsZbOi$4Sn~SeUv6oBeh*6{t%u_1WP69WSp=snL95%+Q+AQx z<24o`<(9_Erejc|H!I%_wc-bXSf{@sqfTJ+9)H(Q83LLata@0K>~~9;)W&eKWR_-PaK1^=6)S`l1`)it zA)(3IriiU~qnYv=s8lya(po+)zdFu7F9|@mSSSV~Vg$?NQSI=3OqU`cZ zJpcCDbpyEd$>2vo^4WSImjglU=5j>?YWjQqJU={HD8q@ReYz)=ZI`}Kry&ee%L-0B z(iZYE%PgqMgZXTX3tjo7V3(hh_J<|KS&KKRAx^!Zv2SftEYMD-<_~ezaMBDoQB0wN z>8mY>RsMV(i+n&EmrhUh*Fe$?yu2PDLmmIea3f9VS~>k@HTEct5L^8VSsCGBgttn) zKZ~bKMU|Yh=5>?1g?=4Ps@#Bul6 zDwZIeteHmMWUi)DqM#Xt{KA*V9spu7Q44^lJAOky?KRnMN9x2sgp?x zm)#pCGb=_>ZwTP#1F928xQG7Hn=$$NfSXMAzt#;z@?iY^tuf*bx{bwFQRMq~^g;A5`xR(>-;(l4 zQtBl=J@SqC9?+DJhc@JGXRthD&04{ui^39~eo>#^_`}U)x8pylunGF$+To?7SY_80 zs@kyTi#)t1{ycPA45~EBn4)rv6WqJjBau8sU%wu{(<=NcP-e*Qdu#mL4w|G$fTwI~ zTn}vX+n9n_|1+fFWKuiiE^q&u+y!hzQv4uwGj^;}LBb}V31`H@xvY=uDY?TA_-$|%|)_@91H0L5^ZXE z*bzu&$3si+P{|#&*tm#LEzt3jX1MwL$2kV)q&fU~oRhQzTI5gQqK>V!Dcrwcj%0pT zN+iBhstRvoR+0v`t^;CAhD@Cvyn(__mQYQ?XV6n9$O$`eZ!#68!oCE7?s{zxu$~UT z2ZO#*;Xo-$W6F*tyvDC##e?*_)na7Js0V_BUJLvFE|O5c;7q!B@i5f9Lj81Cf(&S8 z0yDxa`w@1_H+xGy1!4e{jCi!`;Q*hAG5ildZ`Th3@nAr|Kyk;m!C6XQ{o1iQ;VA5@ z>SU$_?gic2RAA`2Jr25#d^^1v)g$6>Wl~FThF}3Eg&DL|L#<_XpYluoaM<0!C-_Xx zV@itm7`F0=%B2exv`DrZGFP^h$fXg-Ic=!a@TlQj-hor8YAvjKznKw#ak+Zf&{D)y z*s!tkZ+gDC0&^SXt!kDJ9~ugMY&j6?cm+J}aeOw1Gs zWxX<~Y7{Zf)b^XH72uQE6F6z50-7hT&12>%bglahM`R*Ba|Q=cgOAmSJ_=_dd?tFS zWKBlxrL=TW5S*KjsFS)*sG-{yjycod;m{FCG16a^xy#U(m0XcqQ+5eiHai<~F<2=s z*{CkMwJkmh>j~GCE$kJ6mu2&(ElLR%XqCA!!9F%>k0EF-*O`W^8w$BABkVr!n$K$FAQ90nWG*szHT-R+Z)~{3goglyU zy4}f!)3%iat*yvokJzs)vRq4`?ZWnDQMRdqL)d2-9PF$%8+{0cccGs0aTziVHif%z zI3_|DkQmbJ`X4h0!hPxN_|aR|=WEvR1Ue75Xe$EUoxa*ne?1+9-F3dw5jO7CJog|9 zpmAYWmM|19!dzbKuQP<<`yquMgO;(QB^u0tuND3lxU@?CWA%k=|wm$z_#2@WewyGdx54hpD5CoL`_p6nUKUqQR zte!D?c$5x^SOb*{NyD6L|Q}`_30oJ^rZ_KzhDTA-^!FQ%aHok9uxBnZ8gc377PsDlI9_D?U5TM>C zNNx*qD47jCo_<#gmExwiwR?BB|3Mt+J+IlU-R46twSSg^!HTC!XhM}@v|ZMQE^U&J zBt%JFF;E!1LOlN}iGz;>`vHyuF2Nc#ItBhH1O@rSJDj0;LXrFpT@OaO4*3ygaIuiU z64pgWbGnCg+eNHrh~tJ@DXUFYNV}KeJytoAA3TgPrL+H=d`7m|OWN=_GBs435fT=) zn4zsN)`3O6kxoXJLK0DeJ^nVK6&QrtYp%c)#UdhWeM5DlM}aX!X4_#pC!=p=NcH}Q ziqF=lzWO`;qg}bS(yr{fR8nL8#B+=F>`xnSBpA3JPc71Ye*aVUDY!vRvR?ZRi#gmQ zZ0PKe3B*Gen1%03uBMUMNralc6G$Rt)YZR5=gW$fVBee{;c=g1{>T}0%gFSfLe-A~ z=<95-qG7wiuf`(TlC!0WpvoHpq zq7{CKEcP;cPHdM%TT8RfLqh9g(g?zajH|GQ$VR%UHWJomOqU|+j#{i~&VOUnqz#8r zOnVqaw;jS@j;I+L7xU%#dG-J$|B7xO=YooE!3-CtZG!CeRpzqzY?Hox6>1*K_xQ^Q<*Lz3?m|0RA%b`5c2VH~{!1ONX8>FP^K zm@qCwhH1h{;EpN?0y@;j1mYd?w_1(&Aq)tJ4*DNfoeh3n zh8@~(G+;<_PWsYf7|Lw%{IfkXli6ceh^HBqAoCd$pAu_%kll{%d$oZy)AIqP|1b2- z^x&(p9VbQNWD(=Ng%-px+zq%^KFgnN-Z&DXrI;@Ap^dm}@lLq7HqUs)n_LB<&&#~e z9eYIVQ4}d=A{NdF%r;xQS3HDzhgratB*$G5)y%(rsBDiHHW^v&f)rBCe956wWf!ic zHp2>JzlUDq2~YwR-QXvlke?197qcteT0PggN?R;^sQ!K^8}t*wKrP!LZ0wcmY8n9p zEdL~TP3VCB)3fCDg1cX(jraR|)Ob~N|3wVC(tc_4qJ{#arU${}@vO>((f5~ko~--QFTTVvC%m*N{fckm}}`> z6r|wK3@bLlQIy9PqtGK=uKfDF#>O5yWcgq;?A(#GF4L(YFD z9oWNBG*p346t>ZygCBE!o*FixsF3c5?i{B71Kzu>5e>WM#UJg5ulP{l9+dNBc-V_7 zU(QK9;$UT=I%VbvM^MVfg0s{^hi()KJio5h9O(sVOct679mDk`Ys{^6CI*av zmG)QSi(ytHmabCe2893P=`6#l`o6ABcf+B(yBnmtyW!9&(iR}y-QC?O(%ndRC`h9q zDJ8Jq?eG6w@2B&9U1zT~=a^&OLw{Aqi(MaM?-A?AtW|9G*8(-3nkLfoG0cL%1vRW`NwEhDxcq6 zE(}yml7c>rHpVnyKZu@eTueJH#Z$jZG`xlNqNB}BaY(=?`zh3A-TFMtgwcU6*Mt+c zIoq)zGXu}mKV$!hZwe^e*a2v+3rP10+79mYQDY4$n>vxPBNX*N32s&bACd=}v%cz^ zR-gP&H0?Okd%!j)oB^;u9k4Z}qY(_xb`Qaz{A zVG~3P5u143ef&g_u>+6ePCVTL%8b#Tb{eofO=u4(B zDy-TW#VcfqN6l|ans*wMCjvw{C%9t`WetT8hGF7ly`H2S3`B#(Aovh1dTF2sQ=PbQ z!j9owHMqz&!S^*QwS8ez?c?AI63GXoYWcx+BbGPqxQ1_Za;ViXRb%{vmFybk^e4mZ zTx>dX=BwvpqtfSTGb>&#*2bJV!CM;ss?V|`JyEqX#xX)A-QN&i?&PM`(R~B*8Sncn zU92%)sF(qLWx|89B#+AUMjEzy!OHZ6JpA+r`N>fg{+dhG7S|^zNMC|B|Fazx2m(a2 z059EQm7vhh|H|5t%YczU3}i@P@Hfj3ScQCJGTJOSVu38t7fm0c2?YeRy$qbI6j((t z3spkrmt@5DE}xl=%GU_4%2=%acvs?9E?C;ueoQdx-^201HQGx}OO9{aS4!cDRKvUK z$k~A>S5#G0^F)ne5KbHXJRBnI*X85cpr>?mK>`PK7yOPJ@ukH-lu zOf7u}m5LOPX0`6fE#rp7R{ZUJ?}U+rcpq!W^SL9wh1HUj&(K?;<;KnY5z5CyhzYpjj^}dhmA%HAQvu+(cnt%tVi@puS`@Z0T8n0h7hcK{Vc=SKh z8b$rmsBf#a+gTYHlzd0&O+lQ77HSk5;N9*>m@SckuV$}lTG z64ck&HTC;VMuJ^ydXsGEWHJ+cUAJe){H2Wq$iPyP!2uHoKnn#_BMtbQ3Zgpx)$!mT zDHqt6Jj3KnYM&DRU}o@&JSsaA#+?Fw-wc^^JR@f)2=l4l-Ca-a&|^fa8|MVx==gM3 zsN`Tj@UbP)tNdg)4^b7GAZxd8cF)Ohk*{Flr1aeMzc@48NBlSl2&DBVqbTloTG zbM}R>vpSLQQhDC;_jIiqSIEW-(6$kc!g@UzwjAf%bI&gyY1KE_{j}_aNo9Mg^9>ixkgnjRu3v9aFZXUUE0XY$8x!)Z9++ST3?ssxz@&e^$Ot|qgiI_ z(@7fFA+zBpoEKfbD|f;^u11shG1ZzvU6X@JjJ;j1QQnx?r7S$|3nDBk)k;%O&TUB( zcR1@0@*H7)|KqXgWFTuF7z79L+z(V8OpKDDA8uj2W1i0>M#U=@KQl@v71+PvVX(BY zX|YtWfABO*6}m(Pa@IcQcx%+@d~ujhB^#sc_>$(kuv?zV(Tg%i znU#~>ter+J!-j}-4mMZ3A_-C_M>KO6`w#z$nU#@ba7N7hLWflPm-x#wg1pyN`OI2} z>+!hX*<<801=sU(s7(Lhe+Uhbba3Ct^Z>oB{P*RwFoE3VYoHpTzyMg(&@Z*3jSS~; zSGz7q+k9;w5@#i^2>X?dBfy<)4S-1utlI6XGL+li8Tsp~VLW{NQ%0v_h^h&lhA4g=`0RFp8O?|Ss` z{5Kgjl-vbzbdrbJ_k%mglJ{v5`vVOU^#9Z{&+Uxf_cxD;TYubsOK#jAv`hd!69A*C zAZP`9fWGxoq}V^=0-&@&!(^!|@4M))urmt%11foL1YEfcl^4wCC^a)9IV9Zu#Fn({ ztrn0OS0v7?9=;XWL*psDT{?9o|_O^kPCb=aO79cPtoLC7L z=SK*d5`ftl```FUj@=ybRo{c zv}w71^{mf$SR*8I;R{3dX|=qec9u%X-umjjhRm@ldvB4-Y}m&o0Z#=>cXLA(diDx1 zWdT)jcS#rZHFdxUctnnb!O}g>Il4Y z`cp}HJds=O@ujxl#_9!6Fqt}J`hga4 z2r;M2X&=}C&y(R^(tJJul>V${`+g|+|MX4+k`gxLJ7eA~P6nNvBWiXw`R5y;JzZ8v zvYdAxaei2RRj_6Ha<2XY6~_S!2azMt-H=fL7#3X<;KdWl!A#jnoT@=54R-|Ct+e$j zvctN?ss75B;NdY(Q7`ICS8u?{Fd4~u7HBC6FKI(0<}rgc#-Jt&(s; z`ST32r`$RVSrG6BWkk3`>=);0#Ifo9_nft*ifQW;IR-~xnh=SE#ayi-Z=|ihbiv?F zaM9LVRbl&`(5GOQlF_lA!Vp=v-x@FW9T&;!Ev+iZ35WgeQWsu0H0qyK? zgy0UBT z^KYx5gw^5M(RV|&TE2|H)q6>D=EjQDeO*a>(bgr&`p)k+Lx#~;7!0rthgct;P z`~!z&u#EyW?BY3C3_tkawIitIcQjcd$qUp~>(=ol5Y9Ik8}Q;u!c4?={}>S0xcUkE zh_lYEMCmMXl$b0O2SPHk?8!=Nyu^vGh_GQYw{^9uoC$(9rKrvhY(LCz?>ojCLMX;d zt5mZP{ol4JYngZ>g#E0Jb^MtoY8-})MvAvgk0U-8bH}M|K*$jiiS{G~OOF$g&IO3D4Q9UTA*&aTrikU1rikd7X zEs(S`3rXZHF4z*xCg?1QUX&{K2)#)uM6>Ro39N-MSMm-H+k~hxmDn!VNAc%Gex)Eu z~9k+ot)3&~#&UnIh9}I8)1#D|K=qe4Y2MGKt=H zrNS_X<#oA4Aon`=XVv$hCgchgdE&QJpxuAjwSE`a`fH;J>z8>(+B za%HCu0=jsEf)ggpX0cCV+0BI|Jw;xJp!ikZ(sW#sLuwp-`zmx z1?yz)vyOU{{?oa(!MXLm>pOqF^nKtvn1WH+V@CEQxd}c&l7G5)pP&Pdxn?;fL+fYN z6`2Q?zsD(ObS&d0r1%J7QsXgfc3W+|gFK-CpK>O!nlNvw7|87W|WQwWp6E1G4?d?sSjhULZ4+p$TlHVu5c& zra3e`w(QvxD-hYNjM6(z6m@)l?)kZzHwVBVhsV6cKNdA`0kW4r3yqt8x(8?)UfRyT zO>qHR<@%^#cDoOziOV`lQ??2@4hz=MePsy2rvYQrp+8 z(4^z$iWlTRD&0v8R8YU=yn`JN{Y#+toto%1CxQ`8PH6XunP`wTRoT%1Ty0E|bveCd8fn~~T2=l+ZODEKela zUyxUR2(f*4Lcs$x`wG-jBw~-IzSrgG?X!*JrgTxFXN9TN-uGco?_B*GN*(Nisb9;h zwrJ&9F0ieDDzzxl` zzb(Ou$G5mOw6-3m$fe5&h7Ts5^zTyL(Y2JH-B3T$IM7{ISjmKI5Mid=$nGCouzKYE z%q+wnPjD^MbWkE*d7zM(l{yv!GeurySJ+_d`#jmi#lG+wd8?*YyQ#hvn9%!zxOqy4 zEpV-5MQ%~Nii{Tm@JdVjx43(S9}i#1)Dd1fdNm0RC%Auq%=~1zsW!s*IUq=4YUjat)QM3a}kgxHJPI1QJ?9k zl-pGkB%($jL=0=%lSxQ86Xf$ZSU# z7<)Px_gJkC=XHyaA|j}#6mNl}EU9cy;1o1Le0ow+>72dH@*aZ5Xu{9+aG3%7%3Ei; zHa!UQ^CBtTEF?ufXHzSB*hYy%hzE=Dq}4zxb&FH`HV73jYc2Dy^A6!td6o;*6T;@DF?L8T|cL z-*buCGaBuPcUUb%+fF+iLtKb$qNXvlNJ{mHF;LfitRDaSQc+spgEArV5TGwf!X|0` zt(Z`dTZoqG-CJ~vMz_SBEXuZr?}YYJtGU(xbjE^6U=`{g*Zt%CDYN&UWt;w}spqQv z=(7Wf8C9Q!yZpmHDJkSb1$cC`7~tt`Q@u1$E0Wfph3>dhvOdgrVCyNQ^5>QChW}1} zwr$Va{eh&4mnIY5Y>oow>5{@lfGKB(*g(QL0N;>kRZMl{e>{Sj#!8+N)c+T~&KAB9 zRgN*iHmyvif=)54Q=O`A)BQk#k}sa05E-J76H_cNGtrNvWygLok^Uedf5WS`L~4b1 zV)u(42`@M;kH)3N^Y8WqVzBh!M=I&eVCG+JuT>D_v;!KMl61o0aFsG`QNo%`_+_Qp z6VSS5Y*=3B2F1b83}X77H)%*alqA<$mj4+;EBG`DYLGc4mY`?#zK;Y>#{R%~K%sm5 zO=$hr-NT5rHD=a8^(pp;hCRR{v~=a4X<_WI`BUFRC(~yJKow}+PJ5H`sloS0LYuXs zi(xDvZ2#k_&zl%5QTR^z{v8_EizZ@V6=O?CFU@D;deL#$;)BdOv?%wHNO)D&*9YAQ zHDNpq?~L+}92ps6YgjwzhmgUhF+Dk%3~BmVAqeZLtglYplKyMHXBauEV3}p%R)osr zS?Pq*5BzLF>1?_=cN%(0^gD++s;ioiIqvSJ1F)p?kS^Xj%0z4aDOhT%F{(u&%camS zh$h3S5&*@m_P$umt>uQInSRy9JqePN9(ER4B|naGzsAve&(E##D;m!|-COt3wB5;| zh34Jn7NscOd@y_x43)_kTI~U`XwDO~u?p*jiWLyU7~=FYb&3laP-nGj>6OtgOGgzw zYld3Jo|ZYsM9}gld;RmHnm+|x#?3lHqi3g*0rbyRDBKV+i;GgS26Etz0W5`AEWvH> zBmkT9U+ZgV<-))trf~d8&rd})`AQM#wpeKzJGOR> z)2U0TUuoCoJu1=caB7-pBk#rOwTX}nwt%6TFplJ}+zgCTj3)>v71;(CFCNg+gNLxT zEOq{Qb!uH4X;la1#7vwcOVwYmSp>>)lkq}c`r}YME3rz7*Y~J@SPh#ii;*LWEyTqX zp-j{TEX(kn#=}6sa-cE4Joxu(x7anv2W70Awy64jLlE@3O_SH**wuqs`@Lu=~#vwo>dNOS)R z_U=X6O*`3ht4tn)SdjWX7{E%Br9a)4zn6``4B<<{qVm&L_+;{p7*bv--WECoMz!j5hzIm;BEoFNmS%lK6H%d)}{ao3W=6NTZ zeKd61R>+MIT*@km*^;;eGh=vHNbEY zAV5{a&ITu6N{7VYP8BdY&*Lmnk&i&~M|DmI#O{w69z-KCVBh zR!nJCh8YHKlGwV2q&n5q9(ri;BS@rdNC?fc2&qFWhaucH<1U&oQ9Jf+xcx$Sfw?R% zT;GuZ>}238I}|GtJ6Qt9XtPsU&Q0oF^$NtxE`Gs9P2$zI>s5g2cFcbel^8(N{|SHJ zA^*;rq;BJ;B_hR~lM1+F0k{^ky}{0NEWqF+m^#0S{{l@LO?p&Uq^eX=?UxrD?T$X6 zRlPIzw54iA=U3>DEW;C**24Y9ovMts1rxeI3%{F5AbF$5+ufuqG!|@O0pUBKh!QE= z?#9RkJJY~9mI5u*#ibrbl+1W1Wd#+H&+Fglqb!haYAEr#Gw5&Whi{D3GIZmH5=ZHH zWKG$ms+&IWN}1p&ro|IwAtO#*v-LY#%RTmXJqvG%b`J#ke_nhhxJ0w=#57EqF=|E% zAPhH`*p9VqKv>s+L=_)w?B)&sDJT4{Zy4QZnM5s!=~=CgNv%?;j8L+ZVG{A9T+6r5 znvAJqckw?Xc)A=VE6@N6n=8Ma)P1Fds>ta~CGIxe8OI!Uf%I6i^X1FCe9&qmT643tC zr*arCU{Q`I!G>VCJ~v(6>{;67!S3Kqm{x>_XKAS0Fw9g9$8KPY%xcP-4MGhy-E@@=c(wa@XE+k7oQ>8{dxan!iP`VBm*U!39Y zlyT}u+-3*Gc9eL#hH6g6q+mtF_%?c$J3W}{^{RIi1J17Ik>>STdWq$wc~K+giLho- zV3ti^9JSx=eV^8zlz-q;-wRK|BE$Jf4@eX~?!or6B6*3}X6Q|XOL;n2@+}e;84JBw zK3y*ziFSYxEcfFRnI!|XF)gqkJomvjBxi)u_lDul=gZ4+2T;`)u~{$KJmu0i76St= z%ND8b0x$a@Rzz0~39ti|!ulh?Hyz+g1K7Hi-#h8O^FFjB8idnJ-_1gIogof+HD5p+ zhP|O^)QHo08%Rs?8pcA^ruEuJS&?tJT4|1rg~o7AYUs0*>4**%2k*z}YJ>Opp~J-& z#WIg9>oO{%r&Dt@-dM&f9#opf?Ri3HW;PuqzAJ-o%Ule;TfP!*8^;cG3ZxBtZHpyV zZY}^%Z(H=chH{jBX;8W|wzh%X6286lOWLiwJTp9W;@3yQw{C`bb)x&_7meJ!bB)hrl^1AB}R?sn7RJTHv+tJi~x-a zo6S$}ek=F79jz{(DRGhVIS#-i>xKGJl$S{jAFaOsU@->xJE~r6pMfz3l!t(!-?i6R z27umrA{bZ|`ElZDJLRdN)9j6-)!PdwNLu84E3(r)4cxK*SG&Nx7aM?i^FoBhsl{Ie zkmiI>}FjgnVEWLn2CT1p{*x6nf0SdSLM z9pqlMksW9K{9qh~PToOLGU$L|kxc(!(7?&h_)%}9lxGS-=W$LUj(eavBq|PPvMrLP zvym@_Ex0DdK(+vb+FBtOBfzaZbMcKcxfy(`TA`Yyt0ABI!Dy()#lcr_z54C=6LjlQ z^@{39@lOGhw2$97(ffQ|O5*ef;>?_;%FZ zNu@M0+j3Y$j#i#!ZOBUWJF1NlkP1a(w^hfGE7ZMTHQt3q45|yq*As))z@g&_ks2Q0 zD|Mg^Z~6FWM3d35FC=3ktrw+Uy}{+Ci>8GB*`Ug-D~zMrlX15eYv@7mPe{*JAK~wn zmPzoRM{^Ce`vDeh5q3Xy(Zi<;Ur6hjU#~8V?4%w*&~??`FV4ebGK zPrzhO{`PM-s62M5tS77A&-#A#(j`TV}r27eQf9PvoTI#OdGbZbVOz2jS zYw@1s*fAi`Ie#){U6R&jkne$}@w3FT!ZPNP*}J&KdGAK2Qymt_?xa$?rv+*1(jW2X z7TMh+iMiHmR(*4qeqDEEe^uddKnW`|O zwR=HG?KZ=eS}ZHVA_q8;snsroQdrvqRQH~-HA0DONU9yUA6vvk+x#(D=o*8ZMZ|s9 z5s8lBi(*I1V91+72F;01{fZ|uSwg1r#o?$)`R4*#bSz(s$%z)&*f3*%D$swK8azvU z*v9oJY9j`48)_M~&v2#=N|ORV_O>V3$lwTkvTBt|tzVqFQ0UNMrVDkz< zf77o0Ec9EkYwhi|k7lw16etCW+5QtivnBWtQ(Xf?polUF=#%PN(WGc_^I}&+Z8g<{ zw9_KLi)kqcRK%Tsj-sS)i+2Vd9pAC5Q2&5MiuxI`ni)K|ge?_%muR?1iF6ebv6va< z0_X}#)sH*TYZExjFtNZr{O}qA!+HkrZBZli%_er{Z&r?lMp1pdk`bkhHxNFdFd^qL zgcPYfC}AK9T8gPEGCNq4M9b2RDtEm9`eAvp5z41$YsS}%mW~D)pu4xt{mP>jlPQ9A zNxD`&gSu~8y{JL(`N~%>gbwHD=p|C{x`IQ+H-7TmCSr0eI{3w=1&W+YZ5L91#)m%3 zDLFcjD6Afy`?h7|zJSa>P_8)1tt&+!X%mZkq$l@PdP-a-F@a)i$}QNBf%uq!iwA(p zfAenK5xejC^xEPd29XJDLcvz0EKOYf#|n1hm@uj4{AN` zaShuxNL@A)rTMd`xpFBgLt9p~KT2sS(tuj)kob&-Y8wh{Md>>dy(qmO{nSo3I9TjV z$(NJ89=3^$0g9_1~bcy-{fpA*7kK<0cdX#87EW37UF- z_q4@jeQ=R;B5dBmnqCFf?D=P@x#{m+)D6?czYdk1hBKJ#s)kl4`yzpd7IGT#6<{zO zc_ZTU5_7O5e?>eoL73Ib%O|gH)yt|*tmc4UkSlk9F->e!Rl~jw&MHD?fWIKV(Lj_|wNH06#hjqvw zmH=5MXfsos5RSyur;qYc;FN}`DEcnYRw)QZG`VBsx1BN5m+Mi)K0E8A+DKV`V5Bh+ zDMVkVBeIylrSMXGG96q2-SuPInr24zUZH6V8^`!-UF1xDKi*1>ox$e$n*$p|?6ka5cpCq4izd2@nw|H!I zTdtw{a~yL7Q0@m9gIQ?_;2h_@(Z7*IpA3_GdJ--?0<#B+SSCcJq|Fv<$Wf#Oo7hf7 z4M~PX_)8$Z{3MzvLU{GA(`f>-up>Bf^-t6t%~aw_C!N-p8ItN6NJJ8>zW9>J5f97E zk?}PXhqjQ9j|aAz#9f2{@nAp_aZZ;@l#*WC&Qq#I4vB(nY*w#1RST6g^!S)|LXHyc ztseDC_p6AQQFB;spG=rKJP|NYZj?*tH3g|Gfb@6vBX0cB7GLG9b|8oe}_BL8-%$)PH4v zv@T0^kE5O&D<5>W8?UGbes1=;CcmoLW$>0kBMwz zpyZK%*C7|3JiD<=`N~4U<}y355=!WiQ^&HP^*g~c?T>!IP1Sn=B5EhCu!beRwLK={ zN1P}I3dT#ZLUma2gOCQ7Iu1C*n+dut{FF+2hA(#Pm9!=%5Ib3_t{#FU9K{8R-{w@Z z3un>0B~-2Pad`Vg>Xe_t|OQNIR6oQea=qLRoxBVrXs8 zif8}J#ATMx?-Nrtv#ugx(muGaMVKJBq|^((sKLM@@#5mIv1MRtvs@o$w)Z#E1Gv?Y zo^siD5uea?1qM+|&fKp#)z(lw9G@QWD4tXO9ak&@6f%L9zeEV)S9bzBs8S^fiW9n2 z0d?%#NWj<=0M^VC4{kJ`9pB600{`-<;ml-&H!AINchWFhic_ynCw`W0my(i5GFAxm@vyC!ZFiM^>k)Oq2)BGFePD4+>67T`k76LcA_EP@}?lw5n?yo$?oEpmvDNAGN(DJfK}PGImrGL0ON{NRa}Z!cv!&md5w6 zd378yo~T5^@satr>JPHB^@I9{dP8!`r7%u%yrP=uAYWTOOCXlu9WsfMHl&f5u6K3f zRFF2-O(#gXDi{SW>qmvA+0hMpE@D$$h!#yp&AswE6D-ClI`!etpsS0p)F^F44bwU_ z>XX)CEO~=vjA7o085W^;Xu)W&D0gnN2I&*8Wok^uC!)O)E1x8~RAC0s)R zva$HMC+!SjE~IqpK{Y!&NHaCGxzyAullX=i*)bv4SvhKiccEY8L3SHp{S=|yx2GJS zJ0sHtrgM(lri_g)|MYkJC#-F3J2JUNf()JyFOVZZdIq>-2cSQnBn@Dc7>{5Gu@dP+ z!WKL2;2y%7|3OW<`kKyrRwZRNg;!)dSQ;hq_o{t55EIw7djx{nW=G);$GA zx3NwU-VkEJHybhR8R3%E)T8i%-HQfMUh4@jP<SyjhQzbQ33CKA8OLVB zX5Ome<(T(cF+1rvIP5JqO<@$qyb4flI?R}GJMH-e-Z)DgCSwH-N8(<`atdC;Iy_$j z?kxRvWrHtQ&e}-$u-&n@#z=5jr??o8WujIQkB3twD#BLb0E5Y7Au|%Q?JTkDFgII( z4MS>wLg}9I9#A|jeo4s<2*D8L>NNKc+ zUZ2ZL^O9cr?*pcS z>M{8F+}wrB_wmbdd)SZDFQ|#{vBtsc`U0>#0$6?104|@GfHXY@`4|MRO`(M14SaTP zxFBWy#-#<)#Y6pZXL<})oZz3yHF3UP(UA2nsyY25m2zsEZ;7Y+_~*#9oeA6?q0Ux} zE2(je$p}@u^X#p}AE|>QO7)jLviqgY&QxWWn_4bq-|E85Ec_ifp^t^@M5MxWps)OF zP36~MrY>s~dmoCZV=kb9unJpYWvB=Yds0y=c3oDp5?6?=a0HuTw+<>;=hpaJV1hOl z`m;F{Dbn~^IjvH#QSKhDv%QE`aj!#Xa~mJzuoCvLE91im|AvNO;DJxSq-Fx?2DdCS zTt3b{L9-D!w`HD~M&%BdlU4j)IUS@O%UL%E4y09;iJ)qi+~i{wsv4+x3IjygN_EC_ z@Am=W^Q|L*h#UfQTlejqcNuW>{ZCs77XVCBK=IG^0eEbGbuSa-h*>u(Y(q}U*t`LnLcp6 zzJWKYrTNGn&~$^R;Mr9h!b$Iv)==yg$HnwUG&^$CcN&I^1uO&OW1O|Ts)i7HSqu|7 z*v}|bm3O0KjBzWFq!GvNP}A`y!e#f2?omA7eR5A8wpBteloyxoH?}#57xTgNu`i8Q z(fc$qRih})%C?<34QI_Mo$VbsWAOLtJ^pA&;W;%4Ib_ z5cr|PL#P@{JHd&AXLQ*!t(BG!F8~$!k&5ORBOr^Dlg?Tfr&m4cj5~3*BPew8;h-na z?uBca;Qp1?3)fU&`0U6%6h9%_xM#aj8MB25)yjz+kqe<09yXRyTn9geKl#@7@&ug9-uXWk--FQ^w+-H+=NUjn08L5WG!lsE1=DGZ4uJ?U z0LpMW2xFlb6^)JK%q||@SnDWlbr)&3KDr%)%O@7}szWT&EB0`Vq>12)ERRiR6TWpm zLc5G9kB^D;{G5$92pfJc6iCVqy`K;W-ncaAI5F|vY)}BXwoc`PlQ*9WAk}1u0r0FuB8_OE* z)vN!~MY)hifQnCN__se#fQG(3b6)?TK7yCNhdrv+jICS1wFX5&%#;Iue+S3}V#p0% zn6`MZG(`bu)uj+A<_@XD4qVo%GzP`T4n!znu`@i5{fOe0whUYbpU=>Pv9^kMwi`N` z3&@)X8%F=H2VeL>)%vHvHgU?=j18l#)n@0+G)Bgngp}V>ujYu7jD$n_abam;O(ARu z&8?r^|IAHP_*LTds%?w6<+u~2nw;35(i)xQxF#yd za4o&hvncOmy1ow`n-(>SB+b69)VjO3j#!O3Zf-EK&M@`%yK~$d9PX?&10{sYONrNL zuQLl5l!Dfvet=^aWan3V_R1l9eqD z=vxIpS zP_+sL5QG9p6AKN*z0w4F1Le7u#&5>x@y;N?vy8@X|5(SjkVFQ>oB5d^pI+%Wq_)N*m61g)poHP5TmSmmJyGvv`?nX$FgnL zl04xz8b%>9f&Vk%zpz`N5j4h40Tnej4F$bVP}rA%<6hs|I#%Md)7Ak%{6girAXa0D za`Y(xRM*PDRQHnzYFWc|TDrD?uAfLB#5G0(Q&U5n{^!ShZ-)FJNIxu81Mctw|x~ zLPu}WjBQ$B;PUkAWl)*_BqPx44@8TX9f>;4VO@Ipd(l@&qkbCne_x^a))-4foaUgM`|};EyQJi{Zc6R3Z9|Ef0Zj@tfM) zWxX`B_vHh=2CT<`Q+}Gu0IE+VFMr;3n^L|%o&xPPZjblKr{6u;`m>xu0Gk)k@K5DN z_4_G6e)O0T-*}=w8CyMsXq=|nSdvLi&j%*uCl@P=OiGx+t-HT<7d-QrN=PL0QX_pr zP5|bSZ$_pnF$dW^p29@b$QQv5F62z+Y26*BueyT4F_$0KOLi0KrGnFIcs**b?vn%g z{4N)2nVn&*zr9e;ZehMrs3}w!Q10?V`JC%1!rg_2Z#tP<#*aGxZz zmxQf?tcq8zp>lQc!S=|Wp>uRNY$^~++gm*faS;6~$2_k5s~Gcgn@s!=mds#vVTL#( zy>|uc+Vk+f&(Uj+iXG%3*=-a*MY!&MAmJBb2!JLHs5xFV zmNtYdbJQxPbxvDW-n6QSiskJ_ZCc}5dW6^DqgV%4Yk8=N&Kj>|?baq6#!aA_u!xL* z6?Hh+haD9-`I|~94p$|Mjp>r%#GG0>XADX0t@+7=;Y6lbx%4;c6m3PQKlBVmSwgy=2m*bxl`ymLgjJD z&lm%7i&oKSuEy2HFWUdBuoM}f^nI~)`CY}gZj;J%h@}8Jp0QrWfH8stf~oqZrl8q7 z<-@O+0R!d55z|0fR3kl5r&^w;^;)PP(-?+g-7fE5p_?EfK3)85^f<>rM-$941oIQH zBLG9mEA3`)U;i)Sz~m(es0fgwe43Qy-FW#=gOnWpSJ?%az-sA7nyguw%VTD345Ffz zZk*k!AuqndLo!qkM-!odwLlo)CHw`!D$`*sBc&vVNAT63d`mhf(suHi_%k2(9Ujzs z_dVv4?*Znhn;3+Bbb>`AkgS|flyM9vrdJ;2bTn4Q(#4+(-PBlbpLNT_XVU)i>);XI z?KJ6rl14UTkm%y~G|lgFovepRm{%OEwgQZZl{mT`*pT=2{3L56Oo13{{?!y&404&f2v;yZ!f#Wm^tt2=zj%4xX&NJQ+)gBQZP#@46@%K>{_fzh<9CsZa##wl){pqjwJ>ZwOe zq_*-Tmnijt330IeZRO9@uHpCN}Mg4(+p@A^# zG9>7mVW|9PIVC@EGw8T zp)l~s{m{paYyzH3kTSN~gjiGVidAySg_HIAht;v+=-3}II4q2mGREwGa1Vj}e}B9X zAi;mQnGNRUe(_%Bljsw$e{`NPf7=IURM@{@N0WnA{9AS)Ljv37rEk)s7|tz+TzaH` z2LpBpL7xd`HMCzzFVX|ks1_A=hL*|cC`V8U2R#Mxq+*9tmUx-?*V?ZXbe0cFaA)NC zf0-1uCnnZBT%)4P^QlooQvNUxvW*Tfqn?JoMWW5{>WTifMr8l+bjyT$XvJ3qGul47Avu)R2 zudjvxu>H6HJZ_~3D1;Rq?2{t}8ch7vqIZA| z^0@7o!SXb7n!qu03=}fsaijpJpRg|XiGEV2bH{rfzq~pI# z+@B3&5}S)B(&Qirs>fB|AZ&y96PW{ZQEM&6mR1Qv#@~3=&BPKxIkty1c+87}uTZ!g zx3w@&{y_II)V%rtE%3s|T^iO(^>4&8dqcXid+8Z8M*&8x1}xCaL?b2wbi9)6Rj;5T z;ZBU})I~lL%i@Z7qdL0zc%m~rjt=ym54D&Qdw|6MF1XEJiCp`?`+XgN-Zj7kOdr|) z_ia{W{43o093B9jBY>@_6P`TY$wH)qL|p#QOU=}igSuw1-j|YazNwFmvhg-ONt3Jl zO>)tfkvdZ*?ve1FxwLt(HYu#QH?Br`Q5s^`6MJ3^kKR%ed)Ku z?aC^VBkrrIIZ8IAi>C-IHAqSP!T3tV+B0}>oCF7orm?CTg8v^)SK$_AA8hHCM!Gws z8)=XZX^;*P0Rbt=1qta!x+SDLg$1O$yHmP*+4nBrz4ssBd6=Ew%)DpLoYQ#9CzqX+ z|Ipg~{my1$NhQBq8_ZCaD-n9Ax1>8@OG=_Pbd+w*Xyy$jg|~8#)SD;|5VcqK3vrBk z)%|?8XIfdvVc}!6)YruP2|>4lDmTG{FVEf%253b#2JHj2YQ+1XUPgmnT8reb!N&dE zjfe0xQwh9zKQD!cD3&(>|N7~DNzr=VsR5uEcI~>H1MKea%v=GVY+=XZ!@qX{*&N_Z z0}7sb0CeHbf88jTE}1Sd>Q+nJxL@170`LqUciC`9FyARc_awe{(ZzDNHapbXmi0FX5RIKLorPb1#>uTPP z-Jl#2oKW~_R5Y4A*Rp|b4?M*y64Cyfn3A#wd4A*RMG`;OkUX8j^M6kBlqXVpcg$mP z;;FB`O=KP)>K?nxV>K}>zByh#Ff+)v8mEBq=>v^Q{VuuAn&>uZFF^;ep>1C8t-T7r zeA1RyG;^Hnr+j!Pqp^5ZHXj>@@@BN|Fn0zG=$XPatgZzEwZePG<Gc(!^UW!g#lFrg zXzbFArb*+r1Mu0s?|8|@+U`1jV6dKPxIRnFFHRrot+ zisYi(DoJ5(v!x8wa4bZ*aOZGl%6So&cy=3=bRfm(-I6kdUt@pcgLCZdk+=g0;fWff z);-nrTb$4cDd65$gq#tW3HVFBdE#_<*O{DDfq+HVYdxQ=*ZUrKVaQSs(|Q95DLG$Z zD_Hf-)5OZ137mZfxgH+v;lBXR>rO$urf0()V7&dRBH8#OddofaIbF^zWSomA80*c^ zzf9ZCLyIdAPKPIFALysFt$&d5g|xU%8bg;Bc8{X=*s#C@LvWX8u&ou05eqYDg$2`c z*z)wTjG7r{0o<#-4K(PR(HCTe2;Q&TbMFDD@qmTh!Uo+wP(~sv&`u_S$4eZ@MuOMc z>Y`d5_1D~y!DchOVRzzP(1Xk9L^BuZyVU$*q4A*uw?Zd|x zrGg%a*}a%lz0E!KH{AUM#ZC1@y>x<{2>Xqcy>|bE4hP5NVMRM$q9CzTlXnzg>h~ou zujRj+KsyfSljfg{Dj%&sVw6x|wBL;VzvyulCjmHKP;d_HVt5?TkB<}kp|4F=F3L~x zl}KHYeyg7Q9l2(-C_4A;^UW+PnJZ_kqZdxifvacbl%R_Og1mk1-xAYWJS^U7!cJX= zbt~4Q#6UT@-a+Z#)i}u5D%D0wYz@K88Y#?I@&a(nx+Gp-Do0zwj4Fd3ycYSslewu) zL|AVmo>v*h7z!5Be3I8Lq}i)ks0rvyCFnFV+4iW%$S(K$8~<9`pg)rYMyCT+-*HVe znd)ZIi0(6GnMYG}(yhh?y}HngcdkO>M$b)3FgKBA?5g!H>B-Z()11&dy1jYtPQCiN z#J#_{N#%E`OPh+RUW$fPnT~2TAvHsnj467oxsrM*e;y-q9vYKKF^UMRvb4{o$F3W% z-${y$18Yy<`R|Fq+PYOm%O2!;FLNC-cXtw?@gc%GSp}S7|7lVy&9ABsQRdJ$`gq57 zFqrXa*)3s1o0WpH(N|$K=tnOr%?M0|1h6{?jUt6LqCgYv(8q$-ZES0;edwE2$TqC5BJal%`R=CS2_CQoi^o&(Mgd^yZ-rV5j7 zh|gJu?;ZYm&1oF{Wqad@=rya*W$r1mOoDBMTw{mKYIHLznn;tW-+0V|m7_^%KYmnV zJayE=cutuZp6>rsD`y*p)Nb;vQAP&*{2K;oVl9=Xpx!K$3oMzqysghOt^<4VY&ohs zD%ndY=oE`J`IcQup`JaF2hD?rev7qLnwfg{!4y9#u=cJ(hge$*j+n~KH)(pvGMHvI zI$6FgY_p3Pa!?8WIY?1@afOi-0p^#jqaiq02KWWfxFMoKD zsWI-`y^n(d@+p`WLlEEzI_0Y$yHy+a=QM$s8P{_`z}FLr2c^Za2%UoUT_Cat09J2O zLjI{<0wMaZDfl%HX2B?>1GmM*IC{mFuLf+UdHLfJS|u=9D&1CpB8TLjDh0=@^1$mj z%u-E>Cg`n`B~NYYRC@C}J3=|pKte0EKkpH$#@nTw$( zXYr8{gKkaKgyK6-@^>D+ZA;#i&L^gA-NoER4RGvs6n5g z_zPwOkuD>!jrO;WZ0w9%vvn;zvS8sfnY5%D?MpF<_Dm**FKU6T{66RL`0z*^br-Ve z`-|peTt{I&iCR9*Cjw->LXkCj%V~li3BkP8@z?mgiga0&6;kppEpLXyKW8o$XV{y^ zi*+xDl!&rC)8iA@t(s{F#yfjsdtRnRsd@Im?SCv#Q*4g*omk*p;t~Ewm9W4(ro|I- z4ttj1J5U59C+=fT25O(cIrw_tQkTA$jjnHj{G7r zHH;HvExWXz=L=8G8)sAG-sFy8d&ffBcsuwT40Tn(G9u~^Fk#$Ll}pKjOSQ9vH_FH{ z!J4&7E-@M$5}n0JmRWgVrO&H-)nT$5e zq@NS_)0{gE5^uW1AY~y5)@lb2sc%b!e;m5zEcPZvbW(m+z}pIqv;j_|!k!j3dC{XzZ%q?+e}mn;7nM z*j6$6QjBg0rrU^`afPKzDm9x^b<^RFY1r{l3a(!coq^*!(jTRd{yA! zBVP7O_sL@zgBqWm~osdVbn&FsD{jGrW^}-$l z3$YD8+yqjo$r`DSk7$B?H?jLM{Y4bT_nago|i3|dNs2*DiZqhUMS{5 z3e1A??(hASJZVG)sQF^>3~PO`-?vRgb=rSXqQX-DXyuMQb0L!gv;_|UzSGuXN9aLTze+JVw5`X1ZO9Ys)tJaKg`N>o~57NT$q zFF|`Y9F9KAzdAfp9mt@xn+b*zeP62q0N~Tf}E6S zuL}Kd-R^IB$--wfoASHPUQARL>mQI%bBN^6^PTgvO1{JQ#-v?KyqNl6K-OUgZBTnN zjJUbBj$`$x#FZnRWAGMTE`I!vA;d2$dX=qJ4C;flj&W~bZ!Z*P*zFT6SeQ)L*#zpv z97hL+t&q33Kw_|1!DyN=5=c;JqB;yZFpFdR^p5A*G+JtRcO7wmYZl9?-ys`c-t*^V;INbXqvzip- z7Mf6!n3>7bj^W;*j~vma4KM7z%vdmla+NUG{nAVbuea7!HFQcO;??bSZ&unci=pYZ zvY5gykCz?YUzF7EpU$a6myw~=KzDcpKta<8Z1#dO{2}(>a2#syA<_%rfLJmO3ao#l zk8XlGfyg2K1@E9v_|yycT!L$9y8y9)S7M_sKP zT_OV&TNc5eU5kx&YrYe(4R*|T$;VdA{tDSxMM|RiYvaD~y*+0BV}Wc7g~zf?=0C#Z zLcJ=si1#c;-hU%dyB_clTZi(EIhpy>$hp--xFzY5Y27%YH;sBR_P6Uo{FPj+z2(0~ z+>p#pm5a?ciHhmXE5&@QD_C;yo~4-Oxoycs{J2F$efn_;zFa8Y5?q`R*~zvYR@RbX z3VBN(jrXRL>0M6|2HZ7DBa=^^kK-tJDQA;@TZ&Vwdq)Goi$R5KQU4}joU}5I;;DV8 zza$B5Tk~u1#@zNuDQuCg6t197KcI0I<65nhiNl&q;*%ji_JvUi&$J+1WpLTg^8ou& zk{%WEw*$T$wWc*6q37XP2;QnEWV3%4#xQ)UddSH3vw@Uvd|FS6pfQWWGC$XwY;rx3Dk_|? zHVncKug5N?#EqjzTo04OrW1%+xZ>Jz*wCBO?r4J6*Yrl^C`)If^;{5Fs9qr!Ax4wf&%-ikCj#Rgw6@-5hNO&Np$btGBzxgF{SCc z>+K2rQLJ4vBKLHi-POPdS(@QV)!yDQSdD+t-fMbQJOncGOw9au%nrP%F;qtQ-dsaJ z_(|}(XXrZ8*TsjfW^A-BJ8DSX3X_blUm2t2ppeq|qqw2cxp0HSd zwl4PfSHb%ks^E@L@%dLDe;nBc`l$no#U=$ET+a}Tqn8q#9^q?IiA&XeX%}0DfOQ5! zzRDxGm<9pqR|u-9$%&%*NU0Nu@rh%hC0Cxc83fY!5z{6zqcZUUA(dIb@cF;jsh&|- z)o@a29slFa>KiVS{<)J0tLQ)7!^a6IEQK?_+so9Fd19p;>N$gt$`@0K>;w1R1<@Gr z@H=_lFCQtnmJVq(1c~9e82>lUhyp&#CqCO0v$yd8?)_@@mOs~5iFsC8O9NE%?T{` zM=|I^f!lEnLC3*DxWZB6T>6;^G=$(Qq-THhM}>ss&<#a20NfFS#M`e8$BW!yyLH$A zq}~VcjRRcH&YVMR2xtpYZe`HSv@BQpGS`wow)-+9|~kTEq<9hBPkTSV?_~h?1 zzM{EQW@pa`Z~v|*^?M_ZC7b8RV3dKh^?s#$s&T@z9d^+Y>##9U8?9Yk%|Po)BI>opJ-qM!0Ue!@0Y+Xj2| z&K2D0ZiAkDM=(*2?3jjM#C<$jp*~a_ahgR-WBr2w7q`@GX+V6Aj)8Xe8?oF05xOt} z{@#45aLw1DhbgFjP43U8gNZ2y&O&e@mW)y?-EnB}*S(a|!wkY$aO!xJAy2|W%q@TA z5&~)E;-wwYUy)Rl{i%;3lG^+(kKUhpjfOiBFtLV_PsuJk`p&chA58u?y!tg315Cxu zsLU%@L}kbK0Z%xZKNO|7^POIuQ4gc-J`W^aNm``ESpJBgy_ zFwY3^cz4QhZ#~OWv0M+X7}!27RiRm|$d%G%OqlgY%T&u`q(v&!XLib*$>(x>=GatgEATAlEFzl|V&&opZczM*6 zsfnu#-CcL_)y8Slj7X|3T3^DyVFz__iEg}u6}WrVbQX_Rq2iw#0}?nItfTR*jv*%V za(MMW6VBP+VN`XuHtKTT^5}+V;P1_Ze3!iyF?d`1AQxxXKj(#xL?_tx61w@(;t?Pm z^0Xot3UmogyF?LPte;8+pTll(DcVE-IMjY_O}+=SX2Y}wP;hL`p?%!F!!?q5^E@Dyy14zpO>mOLT-ds|i!3+!Ii!lRxeN;mHZME?Liw|XW} zG`I7RUP`!q*aBKt@$k@L1J!2;c>7=bZuArX(qdDS|4w`HILID_)<^?l=E?Z=JZG*O zbOuuRuP4ooB>|dv=*Tj*lYx3`B2cD~;*IiTZXpyFM67TW^YYdb8LKzZdn5?jj?{}b z#Hh`k3-3fx>G%1+Qy63_F0zjArjt!$l@O&{6Mj|Tc31x$OA=YwR-$ztxrrGv-_jEE z{q&>h|reZv{GC7$5Hv{jxvC_kyPr;x*IK8Tqa44&G(2VAKi=*dowjlA-< z{Q9Xy*r8Q?(DA5dFzl5%Le>i*pu}w;?Ht4-t|=uv)VFw5W!_MMj)?G~mnA9w>Dq=V zb3+Qd7KuSK_`1$1(W=a&M~W$ogEl%&PKqR`jR*N>!S$8xy(*vi{4<8fDTP{N?g7G{ zc8pWJzumE&$U#=Gq})jyLB#S~M=m8E(x;ZWL&o^rzsW6~aO!L|vFQ8O z=;pQV)Fn%-nGP`h-n(Qn+~BVQ1dpRcJv1eQwvA>(kK4xf7j>L9zv(-Wr5w8-LGHX9 z-F1mCZoxu}w;RB_dVvc`XWZfe{=tirP{%rXSF3Nej4T}~(-M?QIb8fS8V}$(!F#7V zCI>Nm;AKEOL_YpxySC=te|QQk!m#Iy9J|r~qTIj~KGNBD1dw&4*q$Ij=^dnl{sPGH-?&2tDSxTgb~dgLb1yns8e${>vu%Dpbq;_@Ay=kh`5DLBjz&oIy%w>@0+v+Lgt4TDlFTEP z`0X>Q;jtJx!eo-;*D*%&_XlE1_*0KWXDQ2=yuO@LEWnqm{T-I0^*-T$)Eg(iKJDY* z5YZ+@utxP^V7a}M`dXG6GQw8cH3}xkuyU~c8|V0zU}^7jZD6Wq$c&tm#{NDA zI#cub-s}MRdd)iW2%c=7> zLuK1RZ&Q!m|64%(z67m0`ay6;$nut?DOg|I$yop4Nm~gDUtW zaT4;6AwtQ;Ctg~W2@#nx^2UiMh6E?s8mU*#D&744n-uC9Y;We66E*~1yf2uiW2Zo6jaSke5@7bltT6fwo zZm?*9qcWc51~2iK&18NE^I*dl*N{#S)J0MX?~*b?nh$`~vZVKCKI+eCrYqLThSODXQx>-`TQa=U}-%5ttDzX^`}^}k|b zERH7MQ0wKE2By=~WVmL237r^Az3^PI~@ou!>^w$V5B& zedXLb+@3YuAt1Xtn30jI^0Tna0UJ900>}vgcOQqK_56_Zhw%zK=HbNaxSHjwB7e^o z-j9nuc~T=Nuvr`r04^MbroRzx%owr${Bv|FHjWA0i9v_cgSuZPMMo;$Eto$Fs4ENa z&{mslRG+job&q03vHf;Q;)>k#AXON%(FOM!t!Re1fG(I9Y2R9v=4&l230l~gS9#or z%eQZ9;L)2q&b#_Yc)e@{;6atFxCS>gS(%DEaud54z73b#37!(LvQA25;w}+BGUw#V zBvBcAqRUsZ)Uuy3!rDrxOC+rP7spVbQbdKxHG-wp&19&jc-pP95!7Ps+H5DOM@!w0 z{)DOiX-ogE9euQ>!qX$hbyXxtX!Sq$>{iJ7uSNheju^knIQ0r$gEQFDlNCL(`on@~ zzqz9?9(s#SHgE)cAN*Tje=e1!{sOqz^@wcfg^yEZ&NwoVMNpr8uaN=UNX-uc`~=JL zsI0&^qN6fIQX9&JRL!NgshF6a9opk*Y3@Ejfv)3X1VSN0>5^5XCo5!=kBgHsgzwZ8 z#qUT#e?^~}U7_g7^hYmIabVD#NLWh8N2%Cf11_#HhvJ-YR_Xgk2Bn@}jKUKobv7}x zGKt3V>83wyHKZrYUkV1IICwrj4;y5w*0L=WLTP&!Vbum&7|7-+NGC6 zFtBQ!=)7?IddyV6CYa@}LhiC-?_=ZX$MF*P*(bfllVe_jD8jY?5i-bK5DGw@0%XkG zW&moat*W)lC$0GdgsL&=aHBe>$)TZ(bo#Xq*WX~$AaFid(AfLf6Gs3ldwHwj8}@Fw zEx8FwEq;5xcqO^yy$z(pxF&n&0L?x?P7Ju*^KAeK^~#vfx>DN4>&*ldg9X3NrKijZ zRdjBrr1-QDOEhr1D)zU6R}|gRRW*MY5^?e+5~p#ZD!E#^hi;;!7GeL2s5Jp=^0cvk%&z787af5xMB zA%0B1mhX!26a)x{Q-yEaOor~(Ivm{B8xvowt~F8s*N-dXjq1>O3tOEy2*u_`j50c@ z)T+mRLB!hQ{8i6TG=kOrD&?-^pwVhPY%s(F-uHo^f$PO>)V=}lG8$hdy^FeV+d&{2 zHWFiF1i^TQ>%I4&Mu^$MR~(#fHfMy9k#+ufZUUq!IHRPKpW-F3$YwZd*!)#q_@G)iw1@ynq8Ou$S+!K< zU%>~XM2T$^yaZ!ItRMMYg#l+MG{WNp$|zroi$>XRc|-}nZ?D)PxA;N$HD=omBeA$X zraly`17DolYKC0kn5B+9k?cG<}kmpPuI*K z;E8&HzNTR=%|G4kKiO}BFsm!MPJ`GBf7K?cdUK>eC-sZ`yx%*L@EOJfb^b2kq+-q^?SM8_h1)D|HbrM-LvI1z)24x`2pHr z1!usBj8Kl)^BKe2IL^dOly+7AeH$MhO`~fAFXD4Sr;^AY1FIVzN@vKRQIoTc@O-l#7X?&1l;?DJ4Ea=7e_dii0i8dK+9^o@0W@IsA&2n1O zH}qT*532trkJLe>&0)5jaZq+d87^cp&);dHU+T2Hp4~=cbNa|UQ6^_5C?{x1!JDtQr$FluY&e3o~0UfM&SIsM6CM5*~BF#+6Z}`z- zDmYl->j%v*{Y!fTNN1&~SBEl#i9FLjNq6Xj;b%>|ib>!C2r zZ%cAv{!z?h>k5K+_Q*&kA21g|1*HZ9RY&`-y+CvnVE_3gyY4PL7@#?C0OHm#&1&yW z8IlM0mOu@4h;Rcu2`alehV>*H2}~@TKsb#Xtq{85nykxC9Mu=u#K+4*!&L67tb|O~ zy`wWF)R`yVu0^&{9YNHDOd*J!2E{6&+hFM?ZIOP3ZAqxpumIv6^SJVhFwo3&nwN= z9+Kf~U{u~pGgBgYDo64$JV1yQ>Lm5zQvj&<0(HB9jJ?mjY9Bufz3@{+!q5ac1^5WB z=+V%&;FrtQwN&k4s&4oScY+j)u-DVEt1`;8jfyfMeLLRrpluGL{%mvEits}K=*hQ}0Tx6GmQ{PY+-ZD+Jr+-B$s(x5h_i~T@GA*i2V8 zBL0$dOLJK+0#vrH=DKXj{)JQI_K@RQRag-Tur5waA}7AJKfXiBJy`vWd5D3ikTGN! z8}&QweNX@KJ@}RhHWv1RMBDGpIQ>Bzd#fhs=ET4qCFuUoIf!5jc!5D+Xk8^NG@Aed z^Hwgx<-B&6Vmwl)RL8hRZsk&FUHA}rP^Z#)Hbp{Cnw?*r#xMawiB>w6pF7KALTqqjXazqTv^j95-R9_Lv=iLjA6=kMwOTA{6(&8@Q zJg9d(mZ-hKnbkn}K&L!E#n5a6fsYdTg7zInT3~_9(&N9t+=IX1TU(#-0e5KCC1oB| zF10l`F)LHn1%?F9d-r&$QR^1Uu)SPpxEpMxO%HwYTDpTlongmY+i(r{HR+!1_gp6w zS0Jmc|2p2sE1+`+Ab$;v{{$i#bV!hxNdpbo8JHAR^I1u|;6AF$H1px&;1^}G0UqDv z-v)V6Y_$15@BfP_xSQgvZ@pN7;J0B=L11k8m1sI z@yy~ID~Dhn<3WknHoD{g%9)8-WVwIy1}sYEpfZIDXDfdF@&yO6U@_8`?MLINOaPOO zaBTQA?}Yo6G;@0T!HDAT=FiWie9~e|b~+l4g+g)O+wA zj1mUhZXm`1PFgR`%`8A#D3A^%s*8vrToK+fc7molRrBC2IU*rS#16kmwqE!MLTT7- zTN<0yy3^3WZa^DYys`(La(x}NhS=5BMvrK6IXiri9mx& zVv?Z0hG}7m%G%!)d8?2M*;AY>#a0(vb@_gCA|jUH)X`GO=GJ|Y*!XlPJo1Ts0y#qV zTr0{HB=?Q>i1#$S&|SdL=!SC>-G$xlUe*Ad8x458vtq7 z_j_aFeUW&OTW=Mcm7DW4;In&Dh7fR!3J5Vns~TFeKb?cf@4*!hKp(6O?EuIG@}n%M zJls~+*(8Z>c42Hth&QI&BN!%Vv+PF57mhHwO~qRCy+MCfsgI55BgF}bM;lg7R=X?Hbe7 zYhsnK;DAKkp-x828qg*vo|CmY_jL|Fdz!iXcrf~2UAf_>sI)zn)P)nJyJ!2KIWWDm z`zos(We8|KK6btz-qiI*Wj6!5mrM?~Q)h282uMPnLG%jLUZ2+Q!8{kB)fkqmZ!f5` z7~V_m!9%>ZN6~Ugijvod^M=9!v304H9|ER`ob5u{?HIo};#VR{I(W*=jIy3_a{d z1j2QOd_VZXsVJxQrl`VC+Pvim7+(W?+y}uXkoiVx*yr3mxDWPvvLNGs`xgXHnP9br z2pETHFqeJ54%$F+jtolg40Mwk(xtBo<%Z*j3+~>sy=X(D2gn7 z4TAgkTU=je`n8|?RTc%#vqsEWmLfBN6eh0JCX!mWjOptmaue=2N*b8i{>m}iXJ{@K z)ry~L4Mde>vG0Em3Vd zn1OTHfCHUsPl*r2J?$r`fT8##)gR^l6C1I3@kAP|m2Xr_dr1#qGZ^^L13KFq*#g?2 zf0=h)YOD3~+N<-*Hc3;{>-nD$Co*835SX8Q0c<3<09CWtzd(rNLR}EYpOHY1{Dn#l zJ`OTBJ?V}}jn;MiAf1Qr;C2r8m9-6T-xC!$hWy!Pj<~ywJ-FHtRT&3|<1y=c*N@|c ziEuU3RivKd!6ow+Ddw9VdZ;IaqGyy8qDGob`zXxG&0T*1EC0wYGpPrDan{MyKW!MH>Uh;ey%pvK1 zH0lOi{Fv{5StT4fY=KaK=js!f5+1Da{`aF3!_NPcTL0uewV~22Tc@UL?ZJuL8>H#n z+Yc&9&%*uQzI$HUua^tDY02`>U$vD?u?)|P4!Tm>_o!`qEBvtvhe7;0>w1P+q?q^P zUCVso#`#--B-FMY-_dZ`1eMGYFA2fAc zRnZVx87^R{xOlK#^1MHFRRg^VSD$-F9`rlhXcDc0bebi=ft2eYm?*sNp>!9g3zXX zpv`Z4v5n`x5ty*G_}atqWAb!tVu(J%IQ{am1*lKq zKs4XQMg5amsZY*9B|tQzIZNi}TaEgRaX+Wb6Ma4rR6qf87W+2i9(27R3z-x9QgC?& zBbqRp_$(5+oz!s_KY!W%0#w{l<*iFx))*maZVCkhF3{F(V96+Jb^~+>6KaQ{=i3mz zO#s=j7K@5DU5Ad%qNI!=7ys#yn2o5*G6p$h)^Wy>&nBmw_LJrpH#DzOsGth-Dbj>_ z$>pMyK~wVIPNa8hEo>w=%Uz1Ur#^>Hfd`qEaQT^CQRdEgYfDur$_exRMm#<)Co%$i z#~(A3#IkvtCdk{OPyVFsk;oP!ePzP?)KMGi`Qzgg!UvAmk)_P8zTXtc-3aL`TP%{N z96vVOs^8CsSxN|r#AL7kw>RkpUVVBR2Prq!cN~W);p;rwFMR4}-y3w^B(U>4PL3mQ zwD`iTfppg}xB-L#`vOqlcGYtZ^n>$Hm}x5;lCk>%pFWW7Zt5>nZM?Joyn*Pp+^(>* zM^C-I?LA42@C6$XVuM!Iyn;?2L*79_)!V=#EMde4Kk#5E7o584X9*RGq{`xUDK+>- zb~i5$CA#INl4yRS<2m&dNA&Ws@#?KQw%?d3kay&kv`uWfZCC80RZw!`UUtBixv>HH z`Lp9bwoo20yS^s=@)K9xc^Vy|_u3{|l0N8A4(r?ze=3j_ulTSwIt3Ze0ekkg zvdzVB%=sMb0h`kvqF|mbe;~f*5Bf`WIgj;5NX?Jud%;k|2xo^4uITrG6R(G>XX2oel! z1eoWK4N{J|2Trgt9AmAW`=@Wz3~gIgBM?)~t7dG~D3?8uyz5vmdRRS1vjhiXkltLLOz5^H9c9QN#r{SLQOQ3Z^u*h?%Ck8N__ zgCh9;szRcY^Y+N)j?!o044eFPVXu+#Kd`4RYPBEM%@ZO0(SF;MVSw#0fEl-gSD(S< zCg9d(s3h|{zlfkpVGI9fDviiw`9^+;ji!-U%xbggg(fNKz&iskxd1mJB5w0#Cl#&q zH4-BISZ&-L0SU`w_C|@B&M$EMp`+@AzLm^rYY&RAN+M>NGE+v$_nY|l^HDq&zs{fT zT&M0dW%b}ImmxTp3^;1=&2m=9Z`@XH&5uxFRX(S>qU>8(1)ifkMil6d*8h3tSLCts zq^uJ)irF9%s4HzOaQTud8oCZm>ovN5=4leqNdi_=h#{ zFwGl0>)CYV;gt(o2(IuEaremOb5Kl?RYynz(J#qJ z0*6?dq8%Tx3qBK5Tv?=2zh_x%U|v*Tf$!d5qnwvJRa7>)`2cvw1w_7mi|RhU;rxw> zjsy)EoB@rLb@W!m=7L6uAUQJ?w#kbV2c?OSC9;p=QQ&72tc&bTig3>ST{7A=d`jJm zdhC0q^goGnhMAwiE*!=I_a^s?^+%T5yDND5L5&iL51{Yi!5DkX-9O0myYzpKJD9`;Q}W=nKIc=CzD6Y1uP9odE`?y|(Rk8sI^ zJ5FV`@4sUbo7$^cT1Kr}qB9vm=18%`_ubOC zIzP%k+eWx=hy6++duVxwfSgZ_dBsiY5K}sAK#6YdB4VOXF(z-sIC;E5{N+i!OB_>bt14XlA*Z@m%0As-YB9SE{zx zv?0Mjmoth)PNu3&07P#Ah~hMP))8xe;}Yx3%pMK5_&xbjkL+O72vx+})K)5JK~?&6 z2-Pvk6+<^Z?Vx8s5=Ev%!Sh|XqOv-ALZQ1LChE(V#F-RiodxQo2{E&MIX=2`Y=*tX zUl6lYsY)rW)}U&n4aH?V!q3f2vH?Yzz4(0xSuxoPN<7|FN+gp>R1Hx~)oaL~tUOU7 zt%EGnVq)^(%gyO)LuY~T+i|}}f2bX_@j|C%-{l(+m-`+0y zgMPoq;f!?vw|ieLLe`vyq(rrJYJ?Ol{Q7asJ8}a)mly%n_LuAO{e9&R@g8&fU<=w& z9LgX)O~if=V^@qlv`o|+ryo~W7j?45)0G|9hDoJ?ME2OOyi^9?9ej014(!Z}yFe!n z=iq~9!UymeN~JmY;Vdv?`-wrRwkBGi(rg5$$T~#Sv@csR^p^|!1&APh;^q0kJ{W~! zUS`Jdxqp!id#fEU&^?Tuh5-=UeLbQ zzArHH!A^e z8B1f%EMU}q-2M%qMK=WZo_0m3TuI$c;grztyyBK2Ch zEcjw)GE;pjNHj4xPwPSCaHUc?CNSdqEGLyPJG* zC?&3wehr6j%M_QpsuR7h7kLF`&ccF9N&xdKuniYw(R*Btxnl(41Sv@-M(K(D9fK2v zUAk`Fa>6S$>~9cb$uyX&5jYnzwM;Qn z#+j4x0AI|sC7wjO4G+RK58W2j!+N^PjK$%Iq=NlO&>>Z# zS&X9mXlaaR9|=j|IG~Yax5mn4%si#s_Nb10JTqe0vA>yYsb7}ZRIbX<#0}^x1I1Mu zt@^D8dpv08=lvH9Txt=Ek9cC0`7eRaA%&oDFPVycQjUV~M%dcS_u^@s>&C=#2e=UWI%aO~w)KW`!}^xd#=Qn;yTS3}~lx6-7( zxdV5dM8WPjFF?!NfZm1VNzS_Z6A>`KFf4~ib$L4U9Au)rf!xRgX!-byv)uVdd5hPpZ8IiU0q36Til<}Q-=8Q$W8_o zr&&42ESFI|nT`Xn&ri7-1HlKOQ#^GsI^*G9%yc%-=0|;=0l(?pG6>=#zZKCRed!zQ z-JgpdjJPC0IqYyQ$#TGWXz$n9cc#VODEyJ7Rl>%iD|Ynh!Hc5Pwq@X9v&vfph&%$j zvU5Ufl4mi%UfVbgnDuB=?GkTfcHsU?DSRmSv4tz&47jCUii(7>ZwP?VA|w*_XAaiR zia8;Fo2W78Ahjxr+Flcae2M%1YX07wTY=zGdpwV&U2rT`k=4+mLGe`Zv5H(F|T6vtGx_| zU$coknam7h_27f5OnGj>`{mY0QCbBo2c$?^ruBZAa=&rZKcl$cBtOL96f-ZuoN_U6 zFzMOA(y5$Z=mV)s!!Yb-^?h)rD$qkN3s$eizh9m>XJQ_+++dHE{()ph(zI=+q=P9 zyYG80&pj!;R)g*4RA;@3-uWDmx&U;RrQ-nlBcBU%{h6BAx>12g<9q(=|0Y?))yhO0 zrV5f~dh#BIWTJ?@i}>sS=*V99?qPFp9WpJf7)V(ABr{x;Oe?Y<4@I-IeeF+TI-S@S*q zw)jJOu>0Q~?hE5-VfTF!_90V+)EJ4Q!iVS+^;?V!o^0*nAXcp^diNzbTGyL<(3^mZVchh5$)Y$vGHzNr$9^H3bFB(1hT+#JiD`6=`;cOSL*FFlf zx!6^fULUQZ*pf<~sF<*Rns^tDJaF4Fd04H5s&5SZjEE8y#ZfqzxB4S4DJ586UNH2g zJCd7#>a4*)ap8ft+x8KVuPw69TZ+orBRz`a1Q=C=XQZ0`vVyZBCC|QqFnr35F1!cwrYn!8vs6@>pJr)%WPGV*tF4ry=MV z!yRO0y7Vw=4}z?F3^|+Vq5r#w5n>@v^6=S%CyQTLu1q#fg( z7;Q@^E^1}r0|+OtXMgIbd<}APuV!Fe;HaTk@^5|z)dwGLF8Qhw&MOb(XE=PsDtG{WM+cYy@ggP^8M zgC5te$C&++$+lHgb;kt6LP8DUv~7yu!^;)0g9R9z-?9pKS+C^?J;R5Qm^S*z?nPvt!tstCdw`TLD z!NjRJ?FD$&I2hE^C_KdjUpxE3$@Gtrg9(iFAq#*p9mu{|%1p;B4JXdpVgv+``7OPh zsgP29v4erX75R1fPnqNoXC=fmGb;A{dgP#x_p_h9WJ=(C@{QgKFG%t zKSWLWr1)3YC>@YtQlgJJjln)-5y9?AyD|Dky%$^4E(Ea)O7VgWIZ9|w*Q4T%xn$K1vAJM&x$ zV6FKpratjNPV9P|CiPP9X%QVe|J%(do|f0g3p~zz^kqMgj!ff%^dpPgQMH=6h)|DH zRzq(=;OFLP;kQqty#UuXz&!{=3oqM zbeXcHYvrFS2{;A;VD)&kYig>_vQa@Vw$e=y<%lK2F*%0GW+ZJu-<{y{D+^*k$?3NlW$Qpo#a{~Z(S8F z-99Rj|G2U(m2O6OH6ST+-BzFYSqpr1B173UKg;GY7rbXQuGDdVGM&R{5i|68Dtfz| zoKZ{wIH`QryK3A?`{3J*`6jep^rffE{A0;X+ys6N5|qN8tjWP(Q||=Ne(3i3-?_eh;vB(#OSz}hyT8*}6d?{Mz+na-wrcL+(j z06nm*y}M;KmGkWH3Ag@X5w$C0L1D8p5r#<7^x?KMh%35m!O^VS@sqQfq-cY!MO91l zk+_g#ubBqkc$yCY=WE^X6q4Sm{(FMDB1nS!FyZ6<5|g&YUR*_wIc*ctyp{XzHo?oj`ggEIzz+-!P!G={Ttet0nbEk!nk3$Xw79yyDs2}zVZke23zZq=aG7({Z;b=?1 zQOJ>W7-cQ`IRZ8FcQS`u8l5G~K-uv*+Vk5YX__E=oAVfaSN zeT*LQE$zc$#=;N^H~0|z=Ks>hCIIM!86VhJ167qzKU7>xhruF!XHENq#W6PGGNc;O z>;wfn62=n-Cs@8VeH>!Zrt!2`-%ncgV#Jp(h0eZ_pGkUn$mOajxv*LTIf$M zOC>9xNe|wbizAwDV0jCuIyW~YCMW$+>vh+HZcE>jwlaOam0mQP2n=aRD6jnCetVtz z7!sO{iLk_&q=Ypi;6{Ytz(GrHm8k3seh&t_P?2!&+}oNHa?F}nS|>ky%@hVl(9V%G zPnh(->aux$nN)KUXClJs*oWfLqx^ADQFw(jQ2t(&9UJwohjcy~L00%;HS!7k=ixDE zv4mCC0I$mqQ#Dp`BUhc=DvfD$zy`;yS(J`#q`4@6rQ>(7mdLwrnU=B1m47`1sX1hF z3B9XSjlMR-HD^)gkN$}F5z281;%A8_uGJ;;?%0{()#2>uzBueQ&k#xyf{qSu@I0?A zOeIyHt}j_5BZ84rfSs=ym)x;+t3aDG_$~?yL<5{u9}K#UWG54DB)vudw%dm%eo*h% zolktkw~;TA)z|Y8Hr0->_`LT`V?TYz**x$?`fmi8woR8iNe=V;f;l`0(NA80sV|_; zep3(CwE=yl(ab%;ow+ePGtN(~qrjNPiSx&D&djRzuywy@-#TFXO58@NsB?5j}lGvcRgr^ zE%Qv1rs^RU;p6dtX>aza&BmoLWIL#03Abs={6{z)`?C}{0XCD zWz}tiZ)vZL8sdXUO51qOIZ9BkP0OvVu;Az>)eL!auwNU4kYpmbwpS4I>+cX^a>CT( z=r?&U2`}2cO}-Xi7=HI<5lLhuxYj-WuJJ}*Ae@r;rTwTDe_O5t2c6Q(Dt5Qi7N%;B ze!+dEIL)6O4I0T_(gp2i(|QN*wKWrdtZfC+BVXdOO<82OGb{3iWV0+!CkcBm!Pu!1 zQ)rLrxmg$aytYDZauj~^NHKonob4vWpy}(0xYc$kJQesJgfCWo=>se{&1z&iJBSAy z(?6cM!b={T5k#_+9bmpZzM2+(SY6E{DaLlPc(@4wkJTXoeZa1MWFVMCGvPcdD4*sa zY~9IzD1uhdoZMjlf~Yq?mE=?*DMPzIdgz23eOd;{x~hh&lMS0OWhlS(m^CG~9!UX*~R|INeBros*FH>=F)~ z_-_=0v0wVRaWs-vW|#)aCjp5%4ME|Mvz1Zy^2x`&1MpS@{v9Xpd;V5wtpHX6(jenkeeI<3G0+ zE{Iup#WK1yv|YQ@2Ktt8b~W#vPZ5!NhkhtLhCO`_>w1%%@4F4El+Ie4T#4y6-NU0O z{eyN1S+R;-5_Q65X*?HElW6^c*zV$WWt;L4+in>nc!u&i!qYbxi^+}O+l<6GGhkrn zx=0Zf`!J45w$dLxwIe`5-~3UPn_P?;yK6rj*P+34VHCHss`L1Ao_c}A>+UBZf};EY z<53zT=-NNp2dyWM%1s-pEDF$WQ4MvmC;z+SVkDC1U59$(C6! za=w~Kqs=#qwuXfk)VzG5?iqDgFg~;-XOz4#M!ce0D_~F5z*K$rLQZ(|n^h_aiU-bI zc8$m0W4=-}m6GI7R6GJaRzmfDD#}muUzJ9z<=-FT3nb11DwvyxYC)0%jwA8yW69_6 zqL1c-poX?ZQ3qcU;U9+Q?Y9VDgC}5fvaIs}aWtQxhzx++!;DLf;I-x{X)|=~bO5NP zg~^I_aVUR%NT{GyhCCmsT17~UJ&a(b%lM?=j>}_LJ1C$F?ieIq%5lhKi@XvaeySmO zJ_t3;fas%#?p-vIB2}b`f8+ru0dj7i275#yRe*u!?;#UQ?(mcwi%Yhy+570E`%}+j zub)G&?5-vRx3ccRKyA!N*pd*~i{qrO)MS&BaqPuc;&=>T>mE_lw!1+ngp5lAT1u7H z)rqQ3E-Q1fc%NoQjBS4*O*wHb5mvn{BWC9^5c3E<6dN0svvjbage=<7Rl*`?LPO0) zj*83r`G+Kik=Q!((L%reax2I&WB7(0)h5wOaQbZl?ZARW(9NT#r7LRq%p+8Q;maG< zrA90=E+@g$W+g8kG3@u7^0b>j`qM2HN?A49_Tyl8X6MO|IkWmEmx8aH!A9(LsT;l* zQRLH|1aA!@XkWlx(Eyb`-F#G@sO*R?oLzFs$04tkw+d6gyaOlRElQ*HQc`DZ3fBB# z<;XUW#uL2St(u#ELreD3HGk?kZ?ylw=M7^Q8#DTVfS?`!bG1g?3c&)#01{ycfn6m7~ci9X*qx1qnT)^JzrS1aC}}G?K{dFAoIj zc?kl3Q96=ydkHm~J~Z%@Kl1pVp2An{5v0Um7*Ejs#Yb~p$Q2K{HfCp92LJOc2)@>J zUwl98@_oqmR)eVj`|aSb(*w3@y950Ao&5v4(@$obi?vkjlZ^34BFOwv{kw{{RboMg zYRb)xY2-QqGS68W1c-tPnuRT@=l23FiwW@@O*^Aqsbbt@akKIujnaOSR&`bY8y2;D=QT+xAc6eKqL51={Q^5ocvpAczZ zGgOti^4(rb>2wyUGci)iNQArNJK=(b;E}pN!ht1~+V7Iln^b!IPZg`-%v<;B7zvH2 zRQLH+yuVl4z8Tc@h7{`_P~{)%vI@O_$I3U9+CoSuOftkee>4~ivaUneH81Xl&sR9P zz;b>WW@7D<>YF1RcyNG89zbtB4L48C6|TCKA!FcBg|-M+xAuVfrmHH z9?)fS2JPY=Y;PF^>jbMQ(7;p|g_cQa+d79bT%(xACS4U&nBE1xJ9X5Ix@~d{)5e@b5Rpb4VS8`pY^-SmX8sv>!!~Uz#pjS9+09J zbbm_T5eZx@1mk9;y=&}xe8Ei-ezvf&J`L91^W^QV1zAz>OkYp9)m5{Kq>A@S z`_)lO#tiHeP_i(8j@uJp(raQN8cHsOH^v`r(PDmEQbAGSoLu^of%aW!6GX9mU@-)U zthQnYS#)9c8HPOJKgo=bD}$Kz+O3WayAIXbQTU%{Wx~BtH$jRPQ(I2T7O5XG#{>%~z$^9*-j>Bd^ro^P7 z?Y7$alPTdRB-oor2vxYxAG@1XwexpsMgo+KseZ7`CS<&jXMLd1!#GL!KQ?2mP0{^OZ3> z8}Cp}q=5%q*6rA;aKw872DO1_kRern9E#CQ#>A)dJuK=(p;rjK={jDZzbYa&%rZ2j zN!67+Oy!Zv^g+T|6(hX0B~y~BbkZW#_#&ayM1iS@J*7D1H^w+zu&K0Ph?;xQsb^`J z7s=T&J_}Xqb=jx@XA*fTyW#2UHYYp1Wq#+u;8Y^7p6a>w!Mp`}BFzS&j@V-8JK{sM zN3Y=u3u`^);$vH)0M`e&Om#fO1o}cVz3&A)2G1)R<{IRuYv@?(nbb}77S0Ry|VU{q`J*WPXs`BHR0o=oqGW;2^4Q_YJWf+9Ecy1+Cl)n* z9;{1lHBe$v%PQY$`fI~umWnzWh!k=geO8D7#HBAPIl-4r;vkVtFe|NcW90<|W#L3X z_^GiU6QE53rk2+A4v9#}sR}WcGGN`?e!`M>gEtExpKqN%+OT*VP*BKkuk5AO%5+ zH1AYSGz;E|*AHrm_4(0=kT}n-=#%>o(TGnA{S3+WCn2^1J4@VvDtxBw>74^T0}Dr_ zK6H}jd<`2u<=Ct6(||e!)|;olO}rEraO>SiP5Jy+sSybra;pn9HOKP%f_M}G&*R;4 zcW*DJVu9%O&T^HgZM*qd%<7BNRww&rjhNN}p&P1z7OD z6uhohWU&Y&9QyD+okEv)p_GT{0^2UiD1!OTvzIm8&>rRLvb)M};jh`eKJ5}Vs=DQ* z=-!WCaI=^>uMFpDn`D>=0rSTudR9!WTP~(Q_pu%tXMHcHbxnO-l=Q&&%;x49QJ;O> zR>sS5?v3@y&Yu$BO8jFMbwB#^6fT%F=0w#PpNY!2`a#cT5KbLFgP!Wj6-)fm=gx$= z*MaR@-(vilj9f6P)y-Znn|@t(UwyklO1D)N_gObM3wLOa?59k+L062!TM8O`-co}| z+hlj}c>PO=@6zLSJVdqo8rtE*ile}s#m$LA<9|F53j{W(B>_q-40|L|$_Pnr1mMP% z%5(mrELgJo8HHOmI~ajVafH(Pk8tiU+ncJlPmMkqE#o?a{@E>d6TzyJkQS4%1YAPp zZ3Q$>EhV-=?sxhM=Mlj3gy|52k}?(TjH0?e(Z7eV3W)E_JW) z__!%*Mav{Mr0VHa?aG7R+tU@Unv2umshK8+|JjloXo!d{tXs7FQa_92^B}S6*dHXn zH+C+8r^jQD9I7kv<(tSet(oZa5%yXkLJj)x7Qs(SB zdU5vDsr&aS*$Kg13<_NDn8^ZiEqzuSwa5FIq6BLLFQukQoAyTy2NuKn+Jo} zhkK{%xZ}$8_BmtSj{^?jl9r7DR{PDwIyE5k=; zDwnM`vCvm@d8VJ)6Thofx{p*cr0q3vI_IH@SQi#m^P>?=$UVQd3CT2slmC(86a!5LMFRR6AcSNH--s790d zt~9+(dM+zdQ#1Nz|xaf=6%&c9g#j1#8i@$wxmZ&XSvkPko@Rv|3Uc7BVF&*3%<=IJTk2SPZs~JXZn9<s{lXJ5-W6HZ|k!F$88Kn^)@BtZxCuSEcR`GAdPT7o=JVY?2sfDj$P)2hjZ zW>QNjEU!G9rs@H`i1}Am?=k?Mpe+q00-QI|ldNiZ&Aw@3@ocA72ajqpJCYRKrvDI!Kh)g8HRlOe zGj!SP+;>x}PA;-cLz9h}SF~|hx|aEEWr=Ov^3W=EWe?VBSmsw$NW_4Mon&2`Zn#Jp zina{(FBRph6@(I)s775JaPC}jVCU9kdOH}?ahv^dCyf&#BR=y<;{l!%QVXZP06$-J zA0&v-7TvytfvRDyS9EMbS`%aALO!d}JHQ*?prvwrSiD;05~G@MrAuNef;D&FO^awz zP!~C8r7fs!P>(1nj-c`)9ohf_(dyH=ATargQP(`94V zmRd!Ag$4dL*rT;@xN^Yxe5E9s$|KMt1(|OTHU6HJlcbY=slXYbB3;q(X`X5 z^%raN#m~Fu$(MHpmq6PX_uju5)!DHp9OOUdIz4^4d%N65w~q3Y=SKvZj@mAJ=n!?h zlAjY3jx(w12ganM}_b)N6rslO-Nj3Wi61!RMch{I<8f>_wu%EwA-QEg>f-_O)9 zgV${J=g6H;Mh^>TMkmq#g9butqg#o9J7Ksd$e>T`LaDZ}+j55Nn335d@Q z!j6!c;4#kFrBM8|-5vpcKqiF$UH)*Lia(ao%3A-eCtu_mlzVX1+~8bj)Fhy#PKP_a zM$`}N+=^E@X_FB@*zs9nr9lVep**SHkV$v{KEtrrX3sJL7L6ova{EZyTHk;6#Lf@I z4Ist2Oe}h(d=mGRJ35onBv6@W{R&;GxzO%I8&aVCTi?&^oLFrldhAhKUk*CbKfMLE zE8A8x*!Jcdfo z;^`Eb(w%0JqGcAJsxKW} zLO0(%SG@h}8PbxQ#8)QEANAaQ8^x1$)_GW>Dn0|VS4;|e?Ti5TVZVS4nyU1IUf|6Y zly}CB<-mo-83FGvKUNi!AFo;`m&A(RBRYeX}}5#)3CLn{i3ZKQk8 zz*HFTEB@b(y+n_yiHiI6~)#O7PZhJzS9;-r0co07s%pre1Dw2!0@jU%v8*cp|1t$4SmQ|0sA zbftxCWl+6tm#6AE+Sl*o5;yV8;rqF5NGE=wk&M8M3_k%eYy#TZri-TgxT18I!$?-jX z+#WBMGsPZpgk3#a@S7PL7);8~uE@b&(ir>1i#P4Lp- z9_%xwYx`A%$ML|PcE>O>F`t=HSAAq$zhI9o6kCYo#*~mp@d}+Oivc5@xX*xPnaMr# z?#6I8q2F^7ypSPo^G&ig)1<~|n)b9SEutD)Yui|o*0-4h3R;N;l&b2JfMmTlULuH_ zSrM+?mqIx0vjb>!-;Z1hRNo7Q+-gJKwNK4xRUq@ezv5Y+|hMFQnhD#o#5(H<07R-qtPRRDV2pS z50^}B%g7bUT)WAXOy4L;W$}Nt{K@;p=Kb1-{yP1DYdqmTDy0sdL!~#Y1X+PAMcyh# z*+1;CK6*~W$FOnHE7;7XZ@*}fWxixijcd8fb>K1cei9opZ|BC~03F>if{%8*5L{{X z_Qjx9U|OSfD|5rH?ppc8WJKw6&+&&mc7gB+r#%tS(fjr4mCwT4%k>??)VBd> zHB!#7s4lq&zBUW!?+z4R>r__Ug&8|)Ll+KSN=#Qcvj-74rvJc|@-P>ecia1qiL98+ z{VI#wnz4A%qgx_mQkpVyp$~k7da;t1Ka1I3FJMR{%Sj6td_2$(dQz{%IKTVcQ;pt$ z0N(Os5IuRFJ0{Sa47k}`ciTPyAFCgXLT)^}KiG%d&Or&>V1NJV@kQ72_<ka^iz$E6y)P*oe|JEFM=tT&FME}kwu z?7p8v{Jb-++IYIdeQMK$A2Zr#IQLer)Vs^fmHft>Cq{mlf7}*i`stlzBJ<0@HE}GuWOd=3(d3yPQ#`B zvs-xNIkf~-cKA02BH6a0ykBUO5_;@}a~ET>h$*1U$oM9sWsCJ^i58yP5ZEi^T}4^5 zMhfozphoIlS_}(-yk96K^%3{_EAO?HX5LueZE3cQpv9U&u9_j~9OKm1PyCK?1(7jf zKp%PNYr++Q;80JgmDBf^W=@(RyGQrdNmWrmc{^UvRw8`B)_(#(G>bB>PXjTqstLhw z3Dxl0$BH0wg=A+e6rKUx>|3t;rKi!o>)eBfWLs6GL7kB54Gr!Mp=dxp19!+%LKv%b zYHn#OFZDanuMzO6!fkxVJ>Ar?dDWA=^KbB3dQp-D!YNI8VF)~6`WEf<5FbuU1*eUK z0q27n--Npk+kFpfLz`qSxBf5g8V6ulp<>9@irRPv1a3PkI5=^(6p`g&Wth?XT4l}? zeZ7HlZOja!-;ykrk?18I!=?jxnff<-?kjP>KhmB~Q43r$Q_<~tIW_Ah*Rt!uM9XrW zyvo84=NM8Z6F}2}g5`8!_|9G5+fN@VT2QspMXrv_Zhz*%UR=#2O89|IcVT~v?r<~+ ziwx2d8NF%Ywag;5=6sbC)92=j9mSDG=VT*4wqkTK+pnWdU~$#Y+dz4B5MLUEae0Is zw)OyTd)!&HX0>amfH>>QBT@g}{IA!7D<4UC!(Xa11*U2>1EWo^fh)c`;RurIzepnxr0;IjY( zGH~#-r_b3l5)?4*)}+DR_k97ML|CUVz}~pRev}xGa7s!tnh;yA1eS>L?*3$Q(+N^! zPSh;q6#Y%W8ngWk$wU$plLh;mvcsgb!DJQeUR{DwK(>3GipLEhz25o<^}Lhn>51#*o zS`D3MFqpoeT)%5S=>r`pq5h}FF*C=awQ$Wcte9W5^Ln$4#3kG==rE19>Y@L*qOIHfi9UqCM3n7)H= zfb)luw*l!9z`Xs^a_84@mEdIDsPg>3S~&Fg{}=J!zE?;eVs7`t;v(j0zli+!z7_H_ zTgrAMbZe&N>L6F;z4C9(3J@x0s%}6HMayiW8Ajd{Q*4g*Ogj?%yV7kfdoF|{1V4A) z4cEOg@=}hy^q2B{?QfRK!sKjIcA1ujXtv*Uk@K}Lp_#>xRYQNNBV{#SgKTdL>nd`6 z8XM#DeHR#{JR>Kfz84S5T<^VSnID=Ve(}hIU6j{ptg!hq%$?c4?-(H(&o%)+T(vH` zk#lUgnyqzm`kqg`0em0P!FMHlu=?(BDF?|ef)}TyPZrpao7Fj5wNoj_sfevy`&j>m zkI(7(ka(fb<{^ngY|2ZcdY82Lwz)2KVM)_MzXt&o$Tg||>Y4t@g=R% zXT^}_!A8RBRVG1^rA}ECc;UX@@G4&_M<@ShSSJHh!r7Kq#}bhwV_}>F{t4Q|9NrYI z>m+?neLqebiW=#$(4J235#@xrm)O#`w#1xSd0RB(<3&nta7ge7v`TW%{1gWJ{j|>I z8}vwXikkFSr;WoqVtSz6y^NguHT4R022-*M6l!96UcSX#^jdDiS+>pN0-)CKYT-5o z?Y{`&+NgK{@At*uBic!Y>X7w#hXteQEsc&+cwZDNCJNaBGxBXu5)n#QL)=;Xz0TWL zJZ#;mc84ZwvzxawA_w#JrZ=_SoX4LQ@xXwPu>YYKg8G*SU`a(543G^D8^F5!B-% zB&lx6nD1?DdnS$jzG5m@JBUP-0#i~JF-jJqYNBE!{2ODfr{v@qWT()rNNHX^li0QC zS~$sHgdLC1SL`i);f=sCP6yeO()A&;uK4`6GX@t#` zt;0c*iBu+s^{0$UY=3it))V|6=DEzoetizAYDS>KCw2aa1f|_w|I}0g_m*eNV1)x znkW9O`(o3Ur{O7DLEE7Z=}^b_5Ep3bH8PKLppJ8t@y1HY5Qh+}dmQ*-C)x2-rhH`z`gXWL-)-?@FHju zx7tMU6)3)ITZva2{a3pREN1Hk)Noem}Z0Qyh1kt8} z$Z@uWBaQ>B$AfOUX~0~g#Pap-ifd(5=s&p-&(*v+vO|tM_?^{lI5~Hucww#Hs)si# zXJ1H0U~f}#q-rg@$fKxMtHayES*5Y-o1ki`+m%2C`)Dkrn7;ZsJRBAEpK9soyJp`k zM==doE^xR_3{d771^tG}eCrm)5!u`dPkdGc&^``4 zVoA*tF^9{S+I%=*O>dA3WK-bEo*cu+Ku>nDGKC1}%7LJRDfedGx4jy9WDl1aS+-=! zNkvGNVFKwBimV_Mw+V^~sv~VXqfeXQcM5TuYSBt>dEQgBqUU4HHuN+APH=613Yeo>N>D`Y0R8`|`c(?JGxvy*Vb|H27 zOUgfokg9u8?74lg@r_}%F0UWS4jQU;*u#oQb#T%bXKRh%BIe9jZ1!ib8fawfM3Q`x z0JOS1H5iZlfN zW=#_$fiX z(4CTE#G*^A`#uPF>#nL$R2C^qR8|11m2ehmZTAO~9^HdqCu-h4s$$BMG7~l==agZe zw18H5y{-dkC&v8pQ(nhLtzO6wjH_)GT4VhsiKTu|@1uj}XcK|Bo&EQHd%r&jA)w&P z9K6lMggjiI=)ZkIQg3q^d0#0Mxcbr+tIIt zcRK9pA?Dt<=Lq7bYZ_u^c^@lq*Nuj*;Wsz4DSLp4D#(X+ivPs^tpZc63D>eF!e;uJ zWmCUB6YXyot)lX8@_-h3h~zzi`|j~3*gv77L)?1Ltfr9zqoHZ(0zzup*wgcmS5bW> zCJY0yAqW^mw;!vQ@)VGhf^I_|Ykh%is^D!NOdxpMQM?R>m?${Ew>B!FNZ{?3z+#z| zWc4sgGaP`$=b)=&ZegpEqP<(BW4pAOgAnT71}G42wHg&AAw{gr^$hdt`UMGDsBPoT zsv%oPA1I;E>u2lK#Fib=ddbXjU9fFQ9`wZ@uH^V$gbx{YZnd_Z1LxC$Z)2Im-*kBF z&(#o)A6uFWvSI%k@?kP!LD7RGOdnqg4)UUDt!}{>q7tP?FkhgxsZN-F_cv?uk@d%b zXCL_-s;^fy)E$`uuY!+QlTDoz@My0i*)Xx%Kft3_+gR?3Q%(RQAe{R zzytrNPm}Vxl9bPNW{6pZ5(c`5?Ue(A8gfz$FGy2gPGA3cp7=T#`;M1FTqsu9_4NVO z3$c87_3gyp*9`mgbXv7dN%N({@3YT%QLRfpRZeXS!;oPgn6OQwdi&2&@Oi|C*|EIM za~ceG-dnUE2vfZx;x>#M4Ne%jdDva(JU%-uTc-Rc-%ZIL35Q{)mm7Fe_ zCQ@n!s&>n+ODJ>WN_)$FME2OM1o1K}RGrI?@oN900zd~bCPy7uz@DHZ_MT?C+ z5Pe6#bN;RL`1cbs{s&H97O;o6{pD5RlGPBym;kwigK*!`{>BTK0Jl>SmFQBef?A7=WmfJZ93)|<6cHRBe|XFIhjfs?4eMP z)(m#hnja}wQpbAdhDTo1tPT3Q{5NlY2xV~>Ru2ULhdWbagWJgZo4Kj??lX?UM$@nfd9+oxJdmK zcb3JO3kzN$U10qMQJm;o*p$qW-LdIhnMz7a8Se!UZe6Xhz|NXL`*|Xf)9uVOf_uR9 zL;=L@UZws!u-+E{-3}mZauKwi2GQbvLF>xld=A+!xPa;-NukwD77`G$eT&*n-(18j zH)EA5;9rpr_Z^DT6g?#UsTFlaWZ9C5Wjn|eDmNwAO4eCFpG=5?Qj-~SB{44jzHN*a zpCIV~N|zP7TR%akL%3hXZIB~-O>Ye#Xm_!`5BATaXIb5UgS%yggD ztl6=Jztby-ka;RV8%$`Ii0ai3coG5CcS9&XC)~?Jx6E)#$mk!$GJy-nH~+nj2|x$z zr(-@P^3qoblo0h7a7!$O9mb@2^;G$o8l0 z+*)b&5W;qMR6(PY;^Y1IglX!arRl+hGGBfqFSlqV;kijJtaSWYNqutI zHS3wV=~3zU?}9(6xv`D7gea5<`C2%fyozVrntf}bpz4jr^d)wwmL@cIRXTZQB#*i& zLBOjDyYz%Y{6pEUobc6*oajcM{l!gSX%Em}J`hZH?I-bHjbSBe*SgQ%(+VzJ@BR+S zrfIhQmA^mVZKqxF6})2ez@uFxRv#s?DyQ^Rn-+f_{MJx zV3H4*lt<(lB9F#5`W!Ha-EggRU*Wf%-S>M*Zi4<2>mnq9z2JrM?-!uN?1y&S2QWL@ z@%|Gs;!F|(M>t4l4JTxoBIVqeJjJE7YXpM2ruf}q*9C*Q<6EvLU|>6+}+!e<7+oY zTp|o>hnBR{baXo-)g`m4X{9133($>|wMRF8SMf z4g(h3DN8hn@t5Sx+OLUHO4#fFu6=Ri9GY-w#o1*u8460~+=(l$W<(3+B6@+V-{6HU z!vWxkyFEOlSkkCLyj$cw&fLw8(H<{c{O-Ev-%|pL0;es8{eQS>8lYN7iqdtK2hvc` zM@O9HlRbb0-yEo0AUc9&c@#LV$mkoCSY|4+wEK;GZj5c7Do9P*s`yVGjZrvhbYfZI zC+U*?CxMza;+!KI?EpXC9L~L|kBFASL$T9gt4@+jK8$|>RUp&I8`ne=vYbdlog@9~ zY=l#yfc*KfIG%eGI5FgT+Trlr!OaO_6rJs0}*vw;3a?Ve?^M|R4N`8HP&Yba}VZT;H$4m zW7F0IS7aFy&i1sThW72xU4eSOWZ*Ox`1a6#Ef1VbB%N1M+u{={dZiJ&Kwe#ubUS}W zl*Koxzy;wVg8X>grX;Lc2`%BWLJCwICi2-#hEvb!JA{&BaW4lDn=u>&0%;zdOdEA2 z&7Tv-9LS9fzv=5qQh_empf?++_S6996y3pf|fsJbmoBLdRW-3>~2cPibIf;3VRLw89?gCIyZ(lB&)r*ukp40G=A-QPU? zU~~4`Ypu7iQXbas+o}u*5GX3kGEsoWOyK?%oR--Kf`m3@t(JhUBJJ0hJP19%%+@|U zBB3SJyRj|bkri{{zh)s|SPjdBf1`u@2k&+mJz_6KJTL7Fqf-XUFxoUR(TBeJuOp9j zk|$gM8a0A)blml0YI{^Acnem;D%kJ9s&U!(z7m+4sv*rPrH}9Q-ZQEjyDv7_EJ$_u zq6Q^^;0LzglIYXJr{F)(rwa`vUBtP4QB}^x8`|(~i|%dz_}+x?&**- z=bW3-zTv73cTU!CpI2ydYv=98<0ucH5uJ+`rUPi=u<*);f_YCcC%r`lOYT|kDDUYh zM+OE`*+1tJ(y_sQ=i?3W9Pk-LeC6Ks$Mlcf(zQbQMMv05i1KkGGL zsHyJ~>^*3LCSP*YUj}tvj#uCSwqG8qzz7l{DSX2t3&?{zC=CHgp}KrGa$iw3ZaEq7 z{sk|}9Qvz#2T@a>$-v`>G;l0?J?55YIeN$AwViPbcJMi(7n`N+VB2mr`Qil$WuX0h6+XQ|_iv zvKBk-&&_-1b>k&D@giR0Yd1@%2XRlQc`M?6>fZW`8^HFTF3==0c`*c}``7)$W#ZZW zg46$D2XO~Rz!1JO$AW*C06yqXG9$qhL(W)$oAx15@*aW_0usyOROj=pGn}dQR!VfV0F67%LIQPt+Ii4wn zKirU;>d$`-LG~cvR0ypYVWtZ12WHDedCNTOU2e_;az2ff;8=-i-SZSPWTg!e0I*T%W9!{9G!aOQ5qoY9%(vv+| zSQ>qH(hT>O7kSR6udte+7S~*a3flA{<&9VNwv|d$w;-`&H}S1!xE8k90*q@!+9d3G zH7?JJ_SJi!uVz3EW=iEdANKQYl+f_KEkQ8h^XX>9?2+8X}~G z+|Bjk)C?783^H7owEIjG3;_s-|8Z4^L?u6;hTsZD@Lv)+!S8AgoDJ&n z*LKwV3q2~r81~6Rlq(nA8w_k(fz|#Tv=7160lF^z@6Gg@&1y4I?JfR^G}TCCnBl{d zAo}JPvsU=kYw&IIlc4X&sI&TcPf-y+qmqZ2h*D-IS!l-^y%-l+^likNheGA+VnsNzR zhZ%{yh6=wm<}V@yC?SxG5BC+WXVc0ErWgFV$^+`YIm5~bcTvF2hUmTNqu29--&N`T zH*(|K!cX+07G48FcaC)DkTrI%=?dnPpcl2ANZId+w>*)>RT*DUC)qhN zk?e1wA~29KxC^u`>{Yc2^VDLSI%kr_V)iSRlNF7 zfD#K?hiS~Rf^QJcE{Q%iyYO!T9v9$ejGt-4zg4Txsl<4@?XsL$c)z^)HAgMwm6ZO= zR8;d!EM51NKAoxngYC+&ZmP7h3Vu%PWYQuULJqxJEhvAED7#MMm@cdoL&0V~XUg;p z!BbAd)yOn1PKOoy%8OPOy*OyiCF&TF7p27Eo`>=#Pnkq z=ZlqVDI!YF?Osnw>fL1M}9%jV= zZbP8%sOyHIgs^>r=7C86eaBzKI@^Yeroyw2fpNMl4;o+M#4xbRm62aLqA^nlJ`KDa zGf)=`^P|YpBM3Q5e2N$EZQT-A$^tE=iWjk*ZQBR*~Sf3-LC3e&nd<;i!Pi z{_9(Gg!nhmCm1Xz3h*Y7r9l7~9(^$Hb6Uz-*RhpYbRl_RQ}!@hHI$_`GNp$Ag_Tde0L!J+ppvRbrRfwFLx8B1~G%^xcO&Gph>(KqIip3nYEA2|e_ zQ+ht|LY~0StRJ$>A5`5k%W?ZoWYq+;gbW6bKdXiL%X3G?BE z%l~M8gLb?cO(hUnM{Z)o(C2c+Fk6iHS0g)EtsebrgYQ_t@}u5CR-Kzvl{@qvwBv@A z+_v z00VT99G*XLh{8T1pK)yzrH1F2=wLf4XUGC^whILOuMTUFy^P2OhK|fjbxJz*iFfRz zgX;R9wBbYIsyLDrZ!QX=a|VH`)Q&{|>n|OjzHmUEbfU<1?|P%VbOk#%Kwrh~)ltO( z@yn%O5SWBU?<#;3m{x88S2s@^dM^w_0QK6Csxz2}NRdpnJl_)_n1jE2DsUUht@G@r zPa0g9ClzzwPGkQkj;ZSv*>@S3DppG@1H`@Iu54;7m|S5NS4bY|T7n^RMr7L`NoS#{ z#l~1BbL2UbG^Spr_=Z%jU02@cq10S22T$Zx1sgJ?X5AaL{h_BZxm9Esp{(W4P`JZ& zkb-3HiR4cUboRqFvQNefKL32WlAQwzY;EXVMw{#Hc$24$_}1osraBLNo$f#9&x&#r z&O(nClKfB*aX3m&agV~y#rxIDuXpij=s&&gE>65Op+DM*6pi?Tqw(4lWNQ6EyTiRO z9M2T8#e9{^M3(w<+3kH*4A*y3&<%j& z9I%Hu|Kr8GdN(R~OYV(yK%%mlXnqL#%eQO%zI23pbn4h5c|3Hw_h`91MBQFv*{un^ z#7k)Wd!=Y%jPcTj&v5Jlo?_>REmD_T4in)zb76wc|>&#(S0=0OLw`$mPO`m4>BHjG9&!M@K6&*aN{IxIQh%L|yCNc=+5 zduQsKZjWTiT$d78eM-_th>*g0R*S6anfBk7+1;Jm`T?hyRh}93r18w< zI^pFu2XJy{2vx$;M-=mi=Q6Ws$@j!p)~|)AOjY9gEkM>zm=I8ZQb+PIr@`^?O2>#@ zDnIDp$F~9SpP6k?UjR}^kLZz$HHB`YiY4|5Eiy4ZDaC;CO*3+PC8rdtXI6dV(dp*a z6u}$l83QoE2L-L8SM8Ek`Cg2R=xCYmOJ;vbH}nQ3jyUpPjEJ80lVFR`4Jg0H#MM>h`Z z4hyofE1qWMP-nwz=@*uKtAD7@oc}ji7c)pkApYT`CM|fJOruW*d4f$TWMpvi{-lzb zD0xfdJ}hE(xE@6UfmUd*vl(Y=F3`Mb>#t&ZGB;zzvS!&kylWfnubou#vRQ2j9N6Uq zV?t*_NH%OsDi&zQ7F9z{>kv@E+h8#!`FQf83GQmcr|BOr^7J8mM5x(6lLP|J*ZD}CCiw}$^7-1^_BX+n2*3KJO$*JXUh z`+EfHFi1BXaR~W0()*%8P<;|k1X5F~&!AYjKOOu`r!$?v6;N%n*n2QCqzXBpV>!QD zll5!hR=eBtJi{nQbrZ*Tk&ymK3%y)hY(g&WxWvN69MVlof5y^>m;@tr zA=|HSm6o}}MtV7ZjZ7sh#nf_v3twl^gL9e4AwQ>owfxNbYU)2U(@+Z+Kq&FZwykIY zM5pF@aBq8QxT?M5UtMr5-0pUETOvUNj!fib?!FA zHka|^7-9b?1tZ$9!@2l%11W^RJx38De!GaqJTfq#EQck>8jqBGhu8*#AuRkDVcy0Y z;wO4g&|UDITQm2E9d^!<)wj8~utr9TXA9;m3ZSKSTv}E2q)W$-66eaO>3w8tb*WFr zd5;!Y7a)enjf(Il7)=P{Ut~OIyQIsO32~lRN6)TI-!f8Z4%1j(Pl-_bx3&^tVX2vL zl6NdYtJYkF&Cd?w3hw#L-xBMD*qrTCOoXg=VIj69)3i2*oc`jouRhl>7V~xNgcOA ziW#sOXF$gjmY~yeX^Xo3aBu98>UT)5_1FFDNUoo&d(Q^Ol<|UR*x9wie?QWFI9|5hhfuED$SjCR z$ajI8Uf6c6v|+a$lyXhl4Oqnc<&1DjC2Paee_wA9{zlnxM)3&sgi7^2TfkmB!Ci5) z=wHLCua6Xz23ZfHa*=bsbd+5A4f32@8RNjY4D@T?wf_w?2ki-J=E6SDsC28PPtaT# zG|o9|M6i0l*DA)*dYRZ6yc~5>`6Mn{^f+Y^FDe9sbe*2hw4mVO@ZkxROI6lf9oRA~ z33Owj4F^XNa}S0+2Hl0bPN|$w3TWXiu~I82*pYY@`}&GMT0p`15U&|CPdv@ShUn;Kz8{C5 zNxBw{84s7d3l)EC1+zFoH`sS8I(7&+-#cRm)m^#6$yk-0yiy<6(0u<+;rj5K_DbTq zD18OS0^WsG=@r?MoPUj#&xPI_m88JrB?WPp4=x5~?{6%aSXTm^;%IxMlD^k6&12OZ zE?Gucl)J5kaJsKZK-ZrpgIE7zou?XJ_y@Rd0*~UpMdd+MlvQ8{zte}Rfk$NVU`coI zzl60>4~3poWa1r!&9#h}h07IgzVbdEc#Kx8dJAk*BwP@E(cj>fj%k2 zMb;=G^xn3LG?N7P2caE32vPA0_g<1QMk_|5c>Z0I3AD8DpR-t0n2~^=qfHeQq)}9R zlbl|Uo7>DNC^!n;eE%c|2RnB6d|J0!Xg7&*$g=V z`=L_$?c|YhHO(m7Y7keLqS?!uo%Jh$pna`o8!54K4dFcr)fV%-Nbt`z5yHbrz0vud zNnc|=D4Ngos&!%B%a0;)64Y!9Y%0Tzpd^H$_ea;@6MY)bI%VP);9o`Pwd6!BoTymno z09v3r3KRpnJ>CU8#{i(qPSZ(tkcY1756>G$`NnpFm3Dh=q$XSUi>pvh=-i9@+xQ-^ zyZ#+!rvc8_7Fw%^mFJRjI3k$KWF(;8be-7-7a$dmbAqffkK>8=FH!Z7m5t{k^?Rwg ziDMLdHZ@b2ESUT*`!tLs}!=wbBlk8sDwa)iiYFTsxxSkfr>^(0hM6v~VDQ?_0n%7+l=rtoMy{z)T9>HP&lK~6Z zw1B!;y|tGqZUjPx**&vwfqamt8otWxj|aqw=TZ@c&(!7$)J1k>4TURBSrEeqAzXIwWM)nUf-I3&C@U1p_3`fxh<2{sy!i2KCjy>Ia@KTCf_u=p@Y&D1=Q9ukBh3BnHWskOipfI^#-8zSx(Oq&J9?sGdNq2DXY}Tc zQaGtN%#rvUFuHmf4`4w%;H?wXqlMsWW!l#1;kEH-PG6em3jk{Kp9VSl0km9aH_$N| zmASiHwBWmBbv+9U_Nk~#Nrl6OwNJ=?g>pk$Q>x_+~C`CbsF-HB|m%nkl~*M7BsxDI&|-jVU{S{J(#Klz9^YR1tY z>~9wSZvjEO=W}m?Nq?v5ghQ(^&;9z4!s6WqXt4G49`sanwwbAmb~U2YVNs5K)!@>F6=o;sjt2`X?Lsrj#u#Ob73|a8 z2J*ADj%5tCOE<=8MryS&%#&aDydzQ@XGm5ffNK;>os3JeHn_+gPmuBFsf;bD?6q`> z+o-{vKuot+)?AvYO^WmBQW@taB}KpSh@{xB^|i15;}Z?k}W$ zc0Hh^nI9vI@E%#CRB|NnWRw_PR6EDxQE)WWXECefrxN*{BT=aYEBTMw`!2Q3wH)Ci z(lB5lK2@9XX?3`iFyH;cv@?$4)eutm*?4x2CtXs3sz^Nd8}wgQUwRrtCaiFLe#gA@ z^A?VV*ua@EwAu)W2C_^5RIvW~Cin7fJ>_$T?j&p>>-S=3g?a@f4FJCeD1QHGgt;3> z0qDZa^wA3VIs*Y{e<|1}bbM<+8uBU*TXqXB-b>ADdq6BmS0=cBsUm$!gz#ns2~(ad zJmC^e42&KHAnAA6MxD9S^!j{O{Hj7(wa`2q)}jf^Uc0LWRkWQz4r;*Vx$SoL-!vE< zdo>D>x#5RoLzg_tpXluaZcDp*Qs&2<>#BF4IAZ6iGxr_FBVUsO_MY`N(i57;G(?k- zYC?@Tk=~K)S(DKWzy4MC)+h5VU3F(O6^X^@vucgckw9Y#P|qP(9t)g;ZoQK1{{$}k zn+V5U3@GZgMcAXE)pTbpKJeeKhmk{a4}Gq-LEbg1U)CS4zRoa;EB$g%e@b_Pc!+(S zl6@(B!ao3U6NtE?#Ckm67BGJSlJvu48-{C&&C0W&qM@1STqhb|BWFbOtd;*0ls9n+ z6bk<$5Df@%4wcSiTL##B|$m&8NZhy%?<^7#g_Z7SE23)lGq?xZ8CIKE!<=X z2M|L|yZK`|S*%g&Pmt~qcF>9Uo|MRK6r1mz&Jz)HtWl!qJ00z^9(!(@Pm(IND8_vs z4wlP$I0zSqg%8O@v;>w2`$A*ieq&Cyr4~7}T!yn1!`>L?OZh%Ue7n_xF*JlW_{G;M zLrWluX{8T%cJ%OWe+wGg=b9RFj1pa$tHAIIR->C@ShtE7fc|M7Y7T)WfVo zu-i|u)|tg0h0hXdNX&!{^p7BZ3r74eI0GyNt6~_SWQ0!Wxq)CZM8g3f6!TR%Zm5TX1)m9Sb*~1a?Tu39*ty!Gg^BLbrO?&X`z0VLP z^Auy?jG0NsbRu%#P)!WEfjqu1hPKHsS-VL~Iggj#mrk_Qh*L#ddO52{SVZJDzz(`qTBLnKQiZC6)Kj^gi}1Bjo*t17;uz`T z6ul}1U69&qvoK&w`mR>Dqp9|%R|5!f2rTwGR?tlO7g10VU3^RVv zJzw)D?N?1OCc@|XF~=7Y)>O=k=OY;X0od*XR6oOJGXj7xhEp`b5lt3vOw)*k^TY}x z_ISDVEjBH+(aGpeI<`h^sWKjJT1mSRi~1@z0iDSU!^%ECSm-vR75zWX;!IR+uVXuVTe40yx z&fl`>fmINdCriE)WHIljvTW`DMDDNLizV$y#Z&(`_irx?c}S>2tyNuFTWCK{250T& zBlm1XMHbRSuoxMDB_O|EOzd<(1=4X z=u*((6+p=4uuBWa)9*0JUTVl8c`hQzJ;-u?o)idshvkR<}U-Z@H&K zEJA%_ZYQKEeH^oZGLjVMhEmN2<$9GS|2GHaUC=@KmGQOE@WlR!)Yb5%pmr*9$=*Wv zhUnNYK79(oUNyme`S>U(yCF4`&KV3x`Cr5p38B`SC%=apEswkojb3TIJE9oS=P0C1 zcknkH11j&HKeK(=0kiZ3BNpj}N#a>@v09tjZXax}wSE#3xAP1VkoeCL#{=q_Cbn<^ zG(xX`U1zM{>u%ErlCGLOb^p8GCqo5b!9El8fS*uiA}}+w4Ya^&&MqrA8y?3<>tAP3mxPkvO^ruU<6;=h3jfeBfApw zPh?$1X!su~?!eg*uBqtkA^EU(mtY4KJqy~U@M z502Acv#&nJFC|#H?W>+Sry5hQ|D}Q|xl@V>*Qirg_+*RkUKtk47n3x!Tg+#=HJwkz zE2Q_-bBZpoSN?9Uf^5Hrlh`l{H}`S9GG*r#sjUMYIPntnLCF(sqMxhEwGN5XFUZ4Q zKnZL{ixtH39TOHSoIB}3X@JoTi}GF#tj=AauKt3Ke?di-=M^U?i|zJJM+IwtLuN7U zx)9-5-~|#h|{IRJ>j%KWcFiSCQxR;j>RSW>ZvVrzs&)n?= z_$^!khu@*X;m_F84ESn~gYFvE?%zvDL3JM*@t^2f_+lCC6kc0pAVyiJW|IBX2=B9I z-u>YHEQhVCZEjW)7iaDZ7k#s9MswvdrYd-tX3rdFAhXK+=TqN)`o5)ds7l|3Is1$) zV#mnD$<<@#ctmg-8D2@jllcbH58ErLo6b$uOh&vPm7Y}ZR@}7rMptekBmUL7Z?T=5 z^^#d=F7kA|PcTkIL=9?QaN?f+aRaBj3dNM}Q(K~rjy#Nu@qJwyFBIJIuJqe(C&gCn zP?Pi0cTzra2EAGa93hd8^qFOI=$VfpEpYM%^)EGcYpa>=whZ6A*2PVwEL^P%{B

$O#6vPqT1dBg;<9|0s1Wu-B0aGpQX_v1+lZy_>Uf4iG4YGe`qts zv%<@$;3VV!v(UmJW-J@e*ZocWqjk5`;g=8(Je8sf*XfTEw%=7z@_!IP+;rh(aJVQR>f}3I5SdJ@nRL?G)(s_?ra88UJMI4tmNH!s~ii$>;Yr6q>Oh*w)}5vv;U!iy!31)Lr`eeT0dAvW7J0ho4H1f8BD zKpD)e^jzFN;AHh8mtO!n^KzG8`ia^i)k=4&qqlZaOY?JHu0wL>_#LG-c|p!v6v;*c zLvpTBx?1nkD+VQxl|AiI-@8{Z>49h@&th8fUaxK8Qe{j0s=Ouqh#lv4BTK6zS^Lho zb>lOKlEFHP>y7@-$LWq*`267=x4Wb7UfSFegS{p+S~Z~zQHy5`dv3{=szv!oh4^sO z-cijYd`3l)ky|lU*&HjfV=)yf0OqA~jTQe9*{e3o)=`&fB<{tVrDL`VRB{!o5N{;z zRF-c~s}eOlBMs>615-iz@;KT7i83)x2eHc{6(rdM1QkY&uX$KGZw8j@K@1oIBm%Z2 zM*CWUJg6;mkRo25y_<`53rY0HzjdEYm1KM5NmFQ?@t@ch0~8(>5J^nFW?BpHCes^g zG>*Mo-c~X|{CQCNxHSO@1H0Qwtx!QumXM)4S2NcBn6SRGBd9LhX>e2M>5}44lXh&f3zHK4ERNZ6+kP0GF`nT^IQQ0G z?R!$)q}WJ!XbVyCj~XocH{`Y)c~J+5|MCLAyv2-jpcKoYo;C0J5Lx~vha|GBg1We= zz4>A&=(FK6LD$mW%-&#F+qtGT&5gMorJCkkY4~fA8gBUq(>ofyX3MLEdarQpOBZ7877{llWJ-fOBPL$uyM4l3FpObZVAme8bb z&|Qok^`O8yORrvPfIfoUD}#L9@OfDWTlvVwuR=R#b~mHvNAVld9Ff8LQUbwkJ%=xE z7CD3r_V={hx%z?)bh|@8qHC$T#2pg7eKbQ1Qtf3qIA=bUr3UpJcvR1}>h;Kl6G=%Hvk9@mIm^7PKnksYwj zc2I?Qb~u;$8@5=*wId67kk%7TULSj68XP+io{dq+czstN@A5Sa+1C@IP!||`}bLDVXsV6R&YG={b5wTjU+HFuwV*N z{S_vP?%MK^M~iM@So0Gi>^JHzJ53g)JQ}rv!-AY@po`q$OtEVUA(+cDi370mA8+v zKbg{eFWV-MNVOSf2YX`h&5~p!h8#p{naXf*%m*O}l&E#t#w*Z-~)KBiwI@U!WV zovf0(=dZ_+q||Xbh3nf!SA#kwvtRAVGhh3~wB+Bqy|RQKDlKVQhxQybb$Y(KIaf2u z%IbAwyhzNp$ay6d)g;}SlNoN>#Kcin-A9xWXNO>?T@TgCbnOdN92b2$By$Uz?9;gf z`QbRJLaICorCM*e2uql2Z;Z4Wz9M|1kqdOUNx77{;_0RvNTlx3JO6y1D};^#?NRX% zfPxQ$8>eH>ytcjA%rGn3Fskuk40}`n%MuC*T;#r&xow9XtRDm{AwMlFgiep&x~1iC z9IOP1Gq1p@bthK(oNXBSJ=@lym{0b7gRZg}+ILn1RwX&E%c1(RPal)MaaA`8;ki>` zYUQIi=8VsBzMgc(=3-z*Hgu3L(&}K#>?tq*l8Xl0jY>KgeOQcUTpqJ!*J_kNpJC*j z6_i09#;iL<5ptDZ9`-N&;)*wGtj1uO^4UF=4E=zFRxkNPnO9!WmUxlN=I~A4a##jy zl$D;4*b^=VrhSSDt{&Hq2E3DIZukS24Y`3Sco+}hq51v01H%AUPZ|ds54wU|IbQy9 zZ_&y7;;Zwi(vE+Ii=_N)&Lm?nbShl3XKMZuJoC4M$9VA1TPXicBK9D5VoA=T>hG>z z`}Jy6J;t^pC)3dEQRIpKt_HNUm!Sq0fnx&Uj7MlY#qtk!1g=G|{Wx3VV6sm9<=NX! z7_Sz_V@>*cec$7{01_Hb!7BqO#jY20VgAPXUvgZCjbXU(VMb5t6kmOr?&XiIkqvQ7 zC}lMSxYz=hU*){QG3taxo3TT{ox%iV=tNCzVbYep*575UX&BStB3Qi?2aj+o@oAS! zT?g4e;a~B2@Tpbnbfq~Yvb0@}V}6NrL^s~7*Qip~72!shrY9_;j3TtR$I#?xq*TGC zrB9zRRgOm+=ybZh?i-I|q)lbS5A4oUh!jA$F5tXcCtz(-$QF5f9h^Q<1y~Z@&+`WqQ~|PI;ftNz51M-gdyh`=*1HlqzLu%# z6`#lLqaW12e9mt&K?lYpI@Yvd3RcWg8b#BB>?D)L#&sHo)`3rQ_Fh$8$`6_|@5n<^ zq>+PR8g|YPJ}EkmXKP$0MP3FdO8Sr>EHSR z*|MS~ae^KzI~giQP}w&@0mZ?w((8-Vk3`=i$@9$di`xj_cf=O7X!sn~Kju`gk{R*C2|Db3C@j2 zz0|FrGiI?y^YVxv4b_sD-yKs;W~(Y`et7KIJ5Rkb?05ig^ZRa>9ST=M>~fRMbCU1# z>SI1m(J#kx?%fJ-ITU;HG+McBJ@ejeGL4zc*_*+u=bn7*U?~|ISo#31ipKi4CEoR+ zu>mrTOHk{wcvIwI?fj(UZmn9`d$I9IAl%4a^u_bD`8-^V-xg3vC?3lH9SBMBD9b%S zdkN;E1^5rQ0MWA6XMV_*_%D9{4bie_U?TS&Y@2(VdOdUYO#W@m3F1Wh>?Q4uu@R-r z@e+K9rAnUgaxcW)KmWaJkbVFpH-R7({ceqm_W?_txKz3oUv6aRk@qzQ^g1tBQ66+| zwyTp6^O$WEaU~o<`YFH1C8W|xiks3YV;1o@ckAVN-N8EGw#(UfhDsQ=|rM*_Oy+xqp)6-J%FBG^wj1s!<53ruZe zEew$Q@2qz?SeSZzc4>ZLuLFEZ?&l!!Z4jmsg2WL^+ zgI_wYWF!%ewlhK!0|l-6J6DEtMFUDBeKV<56vq_FyTkd(cE{^A;jQQhztb_THvnjY zd^~IxbSAogOfJ0jM6=bqw4cRO@~s?K{L9S6_-hfB-&RW7JcHy>5uAegKglgCr81`a zSbaW)t68)|Wp>b z%9gcNIrHuY=kBNSBw>Fk0&MQ?U1~IX_q#K^r181y-(Ga{WBswwt)$$Kt6= zNd!8xU`=E&7A{fad}+&{pf6FBO(h#WDNp|Q9b?<>QK9mmWJstV zmR13pd%^6B?;(jAw{yy;mXu`OPz|1n!Rx4dMLG^#nw+hB%`4tK|2-4iO}P$T0QkS& ztCJ1;U9Jl&a5D;wBLY)B*g!TA?Ro>wss};DjT=8ffIafYsVc~W{M*`B8aeu0=b#O7 zb~s?47*PDp%X`)^`~a|Af@UMOM+lkv!tN_be$1$FDJIF#r6GKvPg$_FbE;6^oGno; z(~+gS<~}JeZIWZu?DHwK2qEt@ana>*U+#q00T0lWlv2bB#ZI94O9(9^tfyZw)r3U~ z3MTb$6tu8g++Q+XF>^OItoUbO>q(uuo!I@PmdM+UF71Vg@n?sybai~CO}%* zoZE#O*Gr`#M}c39)BGNb-xrr_%vyvJ3WlWF@ZFK5N_D3C-!vXp01*1No5`NNVNf)! zxzNQ%`n2Bd3C}smAZ~0Ec-IRQJ@0JJdOmHKfcF?{8wD*AGLk-;8zQiFH^D@dZ^})fn`S;_MpH$)mX3GQmy*#KC zhG%2;@!-tqj^75Cu9JRI#Pc zsTj|DohyKqrx^u4-l5+wb9UR}&HNY;%>ixq?$0oq_?Y;STw44w%{`6|PI8l=!kI}y zjy%UU`NZRdN5ZqGC65aFo-N9Vn+ByzA`?;~a7uTLN=7mkf=;xj6vhZaAP=11ePtM1 zID3HDtFs2fH&isq%rbAwN!AY}35+F8`p&Jdz#`7&=Z-q8p$#8mSdoLq_CQ9=EXuno zYQJ^b%?jVpAp-j&fY1#xjG50$G*R{({tv8Q1$n|E9Rp#95^ESP!DS}Vx zrL5K19KvqPMOtqYMNXLaPPiXKRH*A_2pC`Xs7FVEb7QQo* zc2hlV)iwsthY275@jY&+@4SlyDpxRg+nLFlEUuOIL+xk@rM4zbTLH~)cGd4}86<2X z1d+Nbm?*TQKU{JX;8&imek&yOFnoC^5-dK|7H`b)!YU06xLKj3QsQYW9~~uq_?-G zq#)XHvo=G1IE?B*x6OpbcC!cJVG87N+#D(|+mR8PG!8}}^zpObW$vs)GyC~5kwkbT zy(YC~9KGEJfwY|qXpsV`$Oehh$GWKm!g)ptdYGChXtyog&~Zd(ns_a6y)iH&cTOoZ z%-}3x(_(I(;j2OCA~RTq3dOnrPw4KcRzjs+Ue4JVU^ghxu{iGp0s+yh^$OwC`*qJR z<*msNK+6RvITjDWWmaz(u+UdiNk}SOq>W5iC~d+a`WDhCfHRU7bqE??G+tjQ5e*K`z_wY^iMKwznt5G9ELCJge zTzc5BWKXjDEwxt1SAMmAo)_uAvyE2#G8oP++nu!rR{7oT?v3a@HbU7X+oB@jTG~#* z{(OGsu5&=auejgO4mW`TNqC7|0)=o0d7EJ70F(iS25KXDuEzAz+~dfmb(~?M)Pk`n z{ez@x>Zjq|ZSk8)mLK1?#E}MTHw5l<6+VhID+bGLIZ0u^9%`0eW40C8qHct&B0<4V z$W;Y?4px1gA&o|V-#Sx47Mj!J5;2z;XUUi9blQb1OCm_EHUMnE$BzH_UjDJWf`1|w zSXlQXT&sSjwAEY&-RwLbhXw+Oz|%gk`cjfpM1!;gZ2Cp2eAKq5=X&vjj;g{ZDwAs8U7?#fJ*( z!>6=CeTRzq^Xf5sD12BvI!QkHY%S^srD)}+2Ihb1D$9^R8wf_1W@vMx5ip_B*nON# z(4X~Ven>94sCCECtZs3%Wa#V}_&xfYMGvmf-t#GMik}>OGuV=KUm*_QE=TzAjra`@ zs8?F@afi{I7snZ$?}$VoENP{V&arX^4R&oF%XZgvW>#|sU+vmelDLjr^N%TyfsTGD zf*@Fw`_z)f7wD^W{Hi^zS%wQt+d32HPJ-#1wwU&>0aI?l!1Z<3-(H5Dt)}Otij?Y$ zjL?;armDF_xPHHCd^Bz`H#+1TBL$&!+;jg8aV>Dp1-OV$@Zk(n`v3vE1iprwM}cBN zicB8F*YDTA7|ui18!G65f6{OR31RJ}8+Wr3y2U9Hhux2*D1oZNcs-e1T%WO}HjuOq z5B?ThcW&XlR<>Lmz&Q@DQ78}U7MO2rHfJVY!{5JS4|O2N_$}ea{w7WX!HV-cxlZ~w zgO;5%d`}UkG%0JMkE6`0bD~LgCt%_{W_t7v)quVblsVjB1+HA2@ioLa&+6Y#eNDtI zVweO4LqB1`n=x$n;2X6~J_f(%-W^MD)-~)8=0Bd3hTayYXv1|P^_*7cdF}MJC2z%f z_aJ^Yqm%iQw(Tr|y%OZs3_@c=E9|rdv$F#wMe}%oCVdzt8G0h~J1H?n>Vl!j0%W95 zQA8&}N5Cd9J5NsIfB@Y50ZkMkI_%yC!YVHs|Ip z%^SevtZ>kYp{TR=F~M!HXFSIKd{VzinEzcV-5xqqL~xOVvv6SXeQ^#ZPbHy<;tW|h z0^?AZ3NbS7GasC@tEU92daszSqL6|aB2nV8w*r0+(cbNIAEpsyVq!24-3IiBh&pe> zleLPerim;0XOq0!0E%M@hV;Yg)k8d9iRq{kOR{2*>rXtt37^nyh7-62`VXrds&q+5 zBBh!Qz{C~!`2M?K|Ary!B!x}~GPP+%SDi$1jU~_ym3dV{6f!=R-qI2`ueOC}lPzV6 zjpBp4D9?!T;|7>OwD&pTQ~IEw98n4;DFx!RDH_jxJo<-Za{*T3;GQc^bL)j$WTHelMG_Y62s`NxLj<_zm-~U{##($ZY>4td z!?InV#hRh1XTM3Os$Y3&A2xv6T_BzqC|Pe%c>p>=Yc>T@k;u$%dCdJnowWqYx6D^+ zgHE6%qlOta;v^r5XMzfi(@Jl@2g~>hbVK3aplv5Huc1+!|Lkh=4?tyBi~5B1tyD#A zx$9b=Xinzs_#NQ99W%(1e;c}?Y)+xbx9Z(J*ZP{m%lZ9*`%p<3eySwzLC?5 z-l49jW}8J)MzR%m`78XvILk{@3@=yJs0x0j*F#&lE1i4bTtJfdl61g@L6q;zwTwBYpYShiU zy2IePeaB-ZuLaUb$r@f4u&%2|P&Zbm%KKti)R1zP6AkOe))jaoG@i1(>YU*K>K*-Wc_Y$q27-T{Ol~lt+P2qAl@qSWa%=lJu1hJ{x*if+#5Zup|u%yR@e*F{y&bcDj=$Mi_#_1AV}wslG5E> zQqmpL-3(pQogxZS(%s!Df^>Ix4|DGP_hBCAVZL)_e`~MU`)XsD=CEERGCGHur-nND zn9qkwXvC7XrUpJP(F$Fm|22LKkY#<{_{35X*k$Yd#-$VIqr(1T&*(R;tm$TS!Ks+{ z7ShzQOa(YH8^V|1_#H51rD$9=2)a@4&WQnc?)YP~&~X#!0jws0K-L`CJRd&~!QwL; zNP!#__A>JVa+c`~o|mmZPMDkdzmMA+G7+|Ki3#g1cC{_&_1Tt<$$J6na%K*@EW@r7 z>6=pDTb9M5IwU!CG?`^&5~M=5xU)z>mM4*9Bu0w8WirJw4=Fq--ehkqcZ5+2!LCwn z!>zF_{mn}6d|p59d_%>Y|EV4Psd#k%ToC=n1ti8iZ!#Zr^v~FeTF<_+Es@yjUt{3$ zhpnFe?8OT!h+7{pY9dvFQBU-{G_}N`6Vhd9Peh7+ZwL=!b zhKv8em7G@gq0d+)T^*5)of|029D2Rlz`Cq}?U}er4RYc7QT(rI>#~d-+e?Keij@H0 zqR5R9A#LXg{swq_UhcNAqo`aby0q_B0g?ME4+BU;1F0BvX3t|RD#4?!sG>Mk;<#?e z3A^E3A5fsx7LXEw<~k%`I8oFbVWI$NrwEKDDkKznoHvVY?XUX}_D+WZ?(slp^WX%q zQ;K7`*?kQrP}C%C(Nqbkq9^TP_zdr0k3%qpS0vR^YbCO)ili9gO5H&!{ym$AI>{hZ zy-YAtF_ERq!cY-E(yRf*U$XmqPJJ6I7J+5|t^wn4p2_n&b4YZHFaG*gDP~BS$rlkF zCG*)YhhR1%M#AMF$nm@mo!guuEiuo# ze93#M$|BJJ0)3b*5`)q%IVIp%AXMPiIqz6EtImw`en$&kj1=IB!7$U<7`g(3jbFT} zDp!dvhuL2VpJCO3&{2dZKvGdnP&Z7={g;@Nj-JO^*0Wmz6QbQF;5P*LZs3~?oumQ= z6c0Z7;Z63MNXbmr8AD0k|L-T9QGkQ@fX#N`H^+?~z6sRnX~KpQdAazMJ6rrHCN0HDi14zBqd6G!XumA8q$UhM7;24^iDOB}d?6Ut!}Jx` z|CsWKy`5p2G4%de9LJ3?X`hA>fvJzB_!CL;XH!Mhdgq`8257Sa<1v{3iJ@LLV@WLNTRCXG4XMGK8%M5T%_th+)f|p zp~$VXWG0rDkI-2nH}v^@1`v|jZMxq#a8VD~x+zyDTuko;&819EtHX_cfU zHb*{-ZF9c}d+U`;^gNa)u%b01!Tmj+M1b)IKBx8?iPm!!c5C+2md;I%g^xXGj1 zh9_}u?a8h%ulK6aoVf>m*KzC+v@+f8b3Xq1Fn5dyPxN%`~2vIDK%JxD89nyY?z03FvGPeVV4xfPX+@WXHZ`VbQs}+Yy$Tl@2jr zkH+~a&#h{oqlM^VPS5Wh3}M&Z0Ph^A{tz_?lpU`Bre*?Z-1r3?gTVTCN+W#IzPb;2 zL%HgFian+0B`pHUcpDb5*ufxh=H3PBGWEl=@!J9ZVk8oMKl9r)8^^9wnk5noFN=SR z2*BQU*=umsm6+yH&@#d<(1sc?*8>c11M-y&vlFz(n%lb%J%y>C#QWifkdp?wL1q#< zh@!R(@vCJY=#1&izswVlpwl5M#1XVfRpq**Fi(Ky;AqfeK1jP*#s5u-rTR9lalp3U zy?j+1f!VGI3Bvk)>GB#`q`YKzyRsTz`~2xItF+dbJjRD;>Y8jcnb&!BmEDlST2wNO zjC#hp?A3JtR5nzxZyV1y>}BJx2X{%`fp%VXy?RqtKBG8_=!C8P{nu2RhQ!|}LS`C9 zu^hjUCBLh0eY>7^Z=eZ%nD>K)Wutw$6C2;#y7@^^kQtdydzMSf3*bd*m;dYaxu&lb zxz0s|S(Z6A{!uuOk5-Ooe8asy|71M5*F>!*3PT4%Z?hlXIwH{QBoOzm_^``j;nm?uq^e`y+MVmRIJM%-G_KHlSl0~8c^m+c9dKgh zFr?u3W(s7f2mE7r7Z|bL9f@EfmTSF67ndcJ918ar96-d%Xn*8-qtI6Sf`xf0>Zz6d zNhYepL^pP3BsLi-1qJ=o2N^Dx6oXz}OCtd6W<@-o8<<+XdC7h5+h(AERJ(Z$3E-T65rtj+un%I=8etp6sKmEpVD?8bE~Tdw zd<*7QTsrJ(ee&G?2Z_9WGIwj6I=DK^2Uqo#>IWOtgDXof)4SI-2m_IP3()=y^E8#5MIq`J*`+eJAL{3{UrA6o53(MNb)J z;x3PWk(@HENa&dRW=~wcNSC-2y;s^bLOx|>4GaY$*Sx!~QIWXT#+C~CH!(z&9&X59oNC}-Awxjcon4>zBozpoM-y^2}Aq4P+V{F|5Pw|(!iGQ)PCeKikDO? z4c+E`I@v_&Q89l3CVFme?i?H)C;nhjgIi6nsUfy)#zjq{-EiY@lf!XY*$mmbvbv2_ z`nbm=Tu1U;35~UjR{^`7-&Wg=hl-qRjTClJKqvSywK{NI^25W%ryEoR#p{hfq2b2; zL?&EF2eU}hl4FrS%7skFoah#`9a%l|#wUPveG|yLkm}U5VLhtZ4`j#=99-w0T^v%{ z(hffMGQ670#!JUy#Q_o3)%@0+ss#6QW84^HEQlL{uvTUzqd$)S46&f-RdRP!rHCi_ z>T|uc^Cp0D2}a)r{^KBhn>8_;)2-cifPV^{K^o>)@iR|otmAd4VxB0PYWf2=*!|yA zbU6=xOj zC&o3bEPfZn$P)-XNuN=8bZS1Sz)f^GQ|BDy;%Ch5Ft|c8QSyK+xi9iEkG$z`ci-ssX)@w3yu2Pc z&&9Af%S{gqj+Y6bPTwY5pGhZe1;O~rgGJwR=ZFyfKD_9U(d+D&P73D&cUB3~$8NVP z@NgBX<$5&&Yvr{=0t9>_24-SYY!7+?5GXC7^2eia&2;i3O?!JtTKK;3+G^H_y8nUV z3)rQXa8*!tF$C+O9h+aL0MG8Z{71gL=Q=6ZO~B%M!x+(4kplG*&w3EqYAiysK68MJ z=##2{gsMMxc}yzmSS4!sDMffd9YbFhKN$jD&8w!-6h}(o?wBXFdkX7rLS_F$Ps}7Y zBLDr{h#s7jxFvqk`&a&RINUb_E_Czd2)T!5JbzIbgshxZ@9w6o{Y?L@JU^!ML4ZyoIjv|h*BzA~fLZb9A~TN8byJRgn~SnyEG zzp$@l^wc^5D@_0tIWfy1Ktvq_)W?ZxpYr9q_*<7NY2t(qwO2}qo-|cV)}z#(5T0I_ zjmK`7`DrCnM+dRvB~ACp5k887NRfUZVwo<5D?c!>nMZ@B#A1Cy=+XpR@9YuWe-AG5 zZSJerwEcz(jC1?IP^GPY+WmuA8wjbXq6sg!SeVlK7YggXl$ohN8rhRJ&!+i*O&`yJ z$Q9W5syp9>((&dR?2?2~GkqvivoaOFV4lF`q4uWG-!zVmGF$;f zB#&{VSNVuPdtfHGJquLilSKMHOGyRZnWv8Q8(Xi#;EjCCq zf;PiYTbn$M{$?+K?b+q$yzRZXcul#h;VyWRmQg+bv{Y5|#31xAV?co2!g&r|<{g26 zd1fjg!ef1&2xt_^rp2<*FyXqANLC7e=tb+52*V!2xbDkWL?ALiO^|_fIYX*JbRymN z7vi}aL>#Sc`s+mMxlgPwrz-Ln7X%4=IWpKm!0&|zm_p$Px!B%o}u z0WhS=!Y~KD%f^mW;k1<1jzoc8(8I`!FlyG<-V22iPVG1EX0=>}<%vJu)N#Ak`Jppv z_x@)06d+i&mBBGU*+tl^rB^sBlFw0CQ|G6A8Ltf=fh(OSIl)UnY`6t|VF=B|swsY}Qc{4WA`g42)dJ|Y5nlD39=ns4v z==Z>sDn)!ygJ74y9oqv{qQ9L#=|u4gb104niItg-)D)&XLTkk!;3v*;-#f4lOEa?J zV|o2i!)W^Sbs);rCt>u?6)us!bc{98%L3cR9f0qmBmMcyg}|QnommEqoa;M4$M#9~ zd3`c)X6P?(6}@7azl_;$C&tPwE|jOCrUy-4=`!XUVM3gT3qt_u&H-SkD6m0f0C*h* zQma~8y-iM!+sfq}=4F0MlRQ$Uh@k*-8I4C~#Oj6-tE`_d(X%*DHkTBTqSrQmV^|~d zV3yR$>+pFK?8IV)7Aw*d$QT`@q&ek0_-C#@(1{Vms)lQmhWe-qXk0Fus=J(;x!bYyefxe*ag?-wMEZiLRqk}RJdnq>0x;=P+lQggEfZ$XqVpYv>E_O}Eli8Kj!<8> zivi|sm-9C4YyanqTk~({EJ``qyhUl@vUlvt`*j9sua;|2qlws?;NcZNRx+Ac!|(>( zY)I>kiQp4)>z|k$q)=88Y9o|?WJeiluj*Ke=d`p*?(G|0*Wi^5&~@&+$N$jWu_HXJ zn-09zUDw1QH3P2sB#0TVTBrpCW-MSNg%@O_>jb{lW&ySaPF=s(&Nou#x84#lG!xY# z~55*_%MX4!Fq& z>?jDmu!lMizo&frV!5F##vkSxIIIRM=@(C8?^onH7kb<2)Vaf|5=Ngllis6^so`D3 zr(S=t3nLQ+&|1!a?1lQ&TcEEabo|u{%Jl3$w?xi$u3r@fy|U_l#sc z$6Gs``CLg>E+K$EI+vZPo2+HMP}}mnxQnlt^E1M>svz!dg`}X~9Us>IFJ=SxNp*uh zWFBJfF<5DjYk%l(pt(DKcZTRQnQnlINZLLB3h&N~*^8V;DB*B-!X z5IF;ZbE=*jHwv4f5G9}S9V{~gqQWm$PN{$MSfki{H-Y0}faN6pC`g|P0j6(vM;Jd$ z9L&gG0*|yoksq@1AneQaK4aIwnb5@oJDDz7FKVRYsq+@W{v`jIqA0H_O0+VyD*}eG zpv64QvhkN6cD&!Nz}9#i(@nXLCf#RN(CQbiWuK*`p2}U84`EOYQkI&DRAli{I7M-8~=4=_O%JN7@1)1c9 z=X)E) zOLen%%9;}4JBAm>%%uTaK) z0>|Z}G1rJN0K^tWruo(Pt9~B#x`uxQHia5kt?REf8ORimC3lRaE^8ZuUez#+-+|=w ziP9%im=b=uam$)lM!CRjc2pHB%y+GcrqlIGE|{0WS+dDR=3SwPd8oi96@YK^~~^DkMu-UmOmm zF|FaKwFJ+9)O){?_6=^Q%{^MMTOgr$2%YDdL;bj;>;VN={QXdcW$B-4J$2lTOasg$ z)rXm3a^>omrGv}t)C&9xp8~Mc9>(AWc!z!WTmD1+6Rv7b8Jl>96rRR*nGEqtM&8#* zSvJoc_q|L1!Kb)Dcb~<*eQ>qoI;45Cq&=(nJH8TKlD;irzEsz0HP(Awwo>v1#8!}U zjps;RGi0M}58bnIynGetF45W7i_I#=xeReOk+E88HeMRLBvgw=h5pqtn1Afv>3uow zhkN3Ene)(oAc)I&Y^)1wdr3bnjZnV}Kt+huaoa9d``WYQcAwl|fwKDEzjx zK1=aZXMCYx#3oCAo{KY^VDywjDK!H8$p56*NjmmQH*qxsCKb z$yK`+MBStUyM}22>@S%6G=ABG)%RH+I}lm^mi<}vC0@&8ttwL&2zAb%H}h|rj!_lVu$2-XE_rZ}v?2*e2U;QE3Ed<;xXkhwuq~b&ExI#R(}Q z!Ja?8$527~$w2ZYs?l8X(`2Ohr{_g(00esPwvzxv1i^ec)q{$`vo0xjlwIC$19zRP z67b9O2tLC=yXX~gePPnf!Lo|+4g|Ce{8|Cu_K$Y^+YV2m9uB8o$k6XKg;W|~<>of;Kq#W#2Q|J&^Ku(r7MKgMq zo`T1C3^(;lziJMCDW=AMiJaN!BmMyPLb>GA!5tl~KEoNp`$--_9p2$YBO92~QYgPh zz|3hKkW$u1(P(i!hjK*xFI%l%Vnr!+N={XD;S!W0V+9++EK9e-Fgj=ehSIa#>36R+ zrrZX4xY4NNv6PGBFPI?-UXJ``wQx`{MRP#&QLS?E$r*auA9MGj7*fhK^L^S-viSQ1 zc}fiL`_lOV>yL-z;QmkH1;tjyA-qX-OH%Ol08>l@>|TX#2Hw7y0KWqu&fVi+NS(>& zuk&!l>3g&6V$XUXnjryOO@7#qJ;3kpAHTwKA-)D|DD1P^w=_yj1MDyTUT=nz3UyDi z2JL5Ao_0g7>BYItshs~^ZMD@(Dx##$}02 zH#I+r5v(1fK1QeNbY85n>kVt)-4Apx%ZA9oZj6mbG4osxMl|NRX&DlYNmmK_X@yEF zZR~0f%ca0yE+|8r%C-ug7#IjJbOY~d#WEKj`*rlN&-ZDzf+gSA8vc&2(1lELdtBa- zd$P9FOYrI@!+Jf%jPWzVp;ql`NL2jP=NV+%JInrxVh3JAyUbznFZJV=E7{WlAHdg{ z8+F$pU@unDcVq(oRZc1|{>7VO7RzRwe7?6VKB3Fq3Wq+I|?4-8A(~i#9 z;At7gn@`bhoTJUIqJMSq>$WBzb_Rw*fR{1gkJfw)+0}_TGSG_+Upox?onN*2k;}znK%c+82ittCTTO>&RtXscBlhjxkF&Hbb5N5tnf##yM zKfqQLCX6IWBL(;MGU_e9eftJO&eKgJhe-pSt#5Rxy`LV7T_2iJ9KN@O2wm^FYl)UR{KJT$qOAiY1e#YaR@Lo z*mVg;T3U?YDC5t>Sxg}UsY#hiBv*B4Q zc2dUqnj+c@8XhO|V&<>gir*)dn#dqNekV8@mGqiYdBgJ9qY~ z4?MU6zq5t&TJYT;+y1&tMGFl(eZRpOuT&yTRNExe*o@s|w;O#D8;<*6(c5;VkN z6!cYFEk#W}xTgbY=^cOA8{S2n|CC4}sJGP9>dN{sIFBKBWLwzu*0LUN_&%+3)-5Ik zfLVbl_nbPmdrKKP(PYX-VB2gAeMCCSoH+O(gj#d*p>Z1NrLw`2{Vk=I98te9hw)d4 zd4Cs7hTuHS=AF|}6s{Ji4Y4Hbj-Rg`iG%U*)MvC5$o4L$XbDUpD>1;_%7d7`iB36F=>Qy|Um8 z_utg2(alK)y{mP@Dgm8>-trf4PCdp%VqSqa!R%KBsWQ)w{**xFqK3hmGnt<>cK8nj zcoMJn;97s!GNPOAE(ea|prI+(zN&Y;Md|bTA<_~uAK}qSVmac?+#>C@QKB!_-XOlp zs%z1NDPRt#9a>?HwsD2hnK996Sf%Jz1OzbVNY;LWye-Jim%d!swK&iF$d`E6n} z3Il<;;jBH_lBF_pIAae7F80(VMd)_oTrDC*Kn1_WXez3XWQcY^U2*zEW+0$1_)!0D z+;5G@h4uBRx&ww+K2Mi~0Ehk}+kjm6!=&izf?`A4?FJ_m!9mzIu}87LaMS+3!6kIj zVU#nSK$Z+Z(3mlT>g$#i1opK#6G*03_+jIj9AsFb?c8b2)I`M0W;KYZD;*U|Nd?Fi zI$SyRAhL3z@BccMart{^mOCrTfr9};$FX3~F5NN1Dy;P5o_bYJ5=W4s{&~ViHp4HR zIvKhWo063IR_eVQXje9#!~zfRFZWE%N^oAy77sBFwOVU7{;YsX8vj++O7$zBZAyQC zikTU#5&WQJ_%sdP>|MHV%dB_X*z#L>oSkbq_() zLf`LgbZ{L)nneUIKfkJO;;JeIpBL|gDcQs-tk)rl?(Q*2H1Yi$KZ@vc_fM!y-l+1u zEw)Dw5i`R0@{N0xBNt%SdDQ<{0#}k;mK1H07?Y$&xqAmIB(P8}NQ~yB?)`TH`3;CW z=&!c(`YLuV|Y}@{?l?$VQGrsg*y&LxDfSR3ueO zgn5x5i(!v!B>|J}h4(Ur270M^(~T-yn*6*CM*1Y%GukE>+Lx9Rk<`mGwoVE1N$3}R zN;Cj=x@-|?Z0$#D8laZaef5> zcE$k^;P9d9bFSoqEU(csIKC?%E2$4X|42_?tcc%q`1_`Gk_TvGxPYJM-BD;IF}H6)Fl^@9-=q5~xFTQ`%NH)p2`- zf2d}onAeihqeOCQQrc}vAB(Zohwyx-i$6{+TVnV&uUtu+-AW;*uJ)$#H9& zDRbJHjNyXC#4prwI>nfWkAlBI<^H^u_z(Nd{Oy4035;8!bKQGm5_mt>z45XGelhYr z0z)jG`W!bFC^5Mo&BF;K>60gKY{tjK%hyOe^gm_Qyc zb%FyO4xoedbvMaToAuk$EVU64ZHqPL48<=|?J8;*+XFoLEwyaJ_o=9`QGh*Q{nAIZ zdJd89raxB&5VC)q{m_qYFn6gpVCpxC^Gk2R7h^M!Fh;o%Ym{ z$C;Ve336TSi!Z@@>raKxEpV>ipsy~uOo)yitiduZS=zI}n?q-=VuB(ED3{mgjmQ9k zLn1%9|CnY9>`d{FFbVHuGp)3Eg=f-Lv{uRXwg2q3A+u~>5lJBeUc1rr|Rb$oNo_zs3OMwsbwd_h;NN>b&Pwn)Dx3?(+{TT zS@#TSKfMLDk~x?Qs*^9<^sY!y-L$Ny?T%deyIo$QCWJTBl+5B`Re9(fehFrplBlCl zNaQfAI8|}|vm4xBWAojI@OSFW!%BF<#y$7#et}So>Y^82r5lT`wW6EX(?fOTMyn3| zNxKsSy@_b6k0Y+;Pff?`a!V*0kDbAaNk^-!H)lt2LOx$5t27vfQMh78UD>nDVOBg{`|PlWVHVe1s&`<-z!*i^4|QpluIc=KlBeC zia{=o@6{NuOWLiDK2u-kEf_*8cwO?b?$KRdIwW@ zD+mFl0H&r7$ajTjU3+@JCxe%ew5aC6+&`j+t<{ps-e=V(Rvx&X*JtrVUjKu7f2}V! z_V{NWyWt$`x4X%u&_!W7PtOxrUCECcs9Htm`a?icB7Pw2j#w`3w;0{o7s_1dv!oIe zv-3VqSJtO^sNJGonC#tX!zY?W3A(13tw~qAAZiy&iyYS$WIcrMx@nA-&9NYOs$)BT zq)BxgKCQ+Yp_wXLx?s&Uq%ldYY%gcX{*)O2_c#L_Y!VO(Y3$a2kb84 zL^TnQJuc;2Lh#GQl|Z!rwJDTFO*ckaj~AO~-Htkt>UK=X1_v}zzJh00iTZ~*PJAQj z%5eJGu*dsyb5T|87rplpO1Es7OQnHTA{$tk4=BRm0bd9YPNxbnelU3X_YK%xDB>Qul?YAhb2?Clj!{Q zxt}kypZQ_itsW=pZ-UQKS+*{opKbOU3m!*c82R$syRNo%q8eD>?RMDn0Eis~umfJo z+h(n79a)@LnXuPo`nCIp#0ZF8g_9ifE`N=-6ujM~>u{7Gj%UedN&fFlH~gX9C68*7 z^@#gJBw{1JKO8sJ4P3&0kh%)L;?BQ*7pSOq@x(^z#d>z60rN}{V+s!S0;Ki$O5mL~h#ux8`DFf80VMpuq7d~289KF%UAxz^Q z^vfXj`zjqJj>NTQ>)Hr8yeK^rMQyOX+QbOQ9yzfI1df3&KF^(^2Yd~zT&zj3ckU)0 zhJo*_m?GLeZHsYW4PQ_R5n_{*fJ5a|_m`{gRX49G8}{UD!{9vtwGbmOF&pwCR{>a4P_WO9<6mFSAV(?|0$=M9dvlWFu}>Tzz|+R%WL}jry>+yd^bra zz=wJvWo#V$(ynKakkym_-hxwKFIJSw%`amcK>vWJ>Cws8Hav%Hdyg#9TI%xV?0 zRV7DBBrIGcK;lNB&f2>yCG~yAV);%@fC?Ad$L7ji^m95RvDU09oef2M*BzrTQsPaW zO0YvscR0+Xpq+6)093u5Wsto5T5DlqcxIbYvW&w+4$hNn6v;!wdoI|*?-#x7bSS_* z!gkJdV7AoLwn!k^Q zYdZzyM#Q#|lOB!jsKk0?zXo=elg8f%}~vaE=f_Jq{fJ)(zBP ztz0*D+cd00<`h8RIBQ2JFsQ6SXHW>Pk#}Q|=Olzr<^A`<+%^_vx>hW+gvFgF`?^*o z9)hXtlR<~UHpl;=8rmAlM5ZLE zK;b(#(7s602&U)TGdTU$?dz&)AR_N3`;FCb*aN)5TPMX6K_630?MBSuCT6yTg$``| zJle9{k-;%CbtYtv7|N3J+|iAvQN8HR1YXpGHujQ_feo-@r9RwR?-6~hR$b|G&lTd~ zN)hsjFXWPOAdITya$KdEu}p3C3v(QIeGB&l(JAnc!{_+!Tof%_ z2Y1vd`>etAb6AVqe=)n|W&cnAO6;Oe@(LJEZMj`m3u?LPX}|lD2?hj>?z=M~*t$#K z^oY-y`XkT2P~h!;sg9oi@4sJwYM2E9JMVz2D@*<}-CUoM0@&t1bhgVzci^mZwyl_N z!{_@@quHj_$L;7jw1e7qTozmzD>Vm|CEbx|K%N4an49@5C>QL z73x%yee+;LGlh#Px-Wl=izaI2uO^W0ob0yr$C7+=wtbmbbi4c0{bcQ1Q1g`j&nBKp zL?EAOkiG_iB>Lw5nou$53(M!u5k3=woBuR%2h03qZalWnml#1`FJ|xa zA1aXnT@h#jplkA<1)*;*Ug%_QtlyjcWyL=I>1714^RF>lJKwH^zU#7y2DBg}W^RW& zSo{BJS0y?DHlLI0ZvMy}pNj?E;xboop4TFr^Z1FznpH8;@Xh;_n%FT!t~4 zb>mfNUd8q4I09EV=^sba3DEI)(Phx2>hnVG^GBTDhiFh4EYvRQWR`m(otx%3Fr>2c z*vtObR+{S3!C4fpPjvDbsM zU1Z!yU$UVWx_Np@>wXxCTuF<^H>jpy)WRKCIEy-*ald7-<|~hCnw@ExRTq?#^8Eoj z4|n+Cy5Gn5NsR~iUZTBb{D@tWA9;wxF7{M%dRSwy-cha0+`0Scfh45*6%KH7Ji)auE>|F>YRo;Y37qzYakL@|tfi2`Iwa zZ%2vUBo?2VxR8En}E<*7PPvla^P%O^uRdeZp#X*ZlJ0 zc2LAb=2iO=J-fIXlrNhVkvvB>sU4v&gd6_3CJA_0t?I&zhMYYW-yWb_!;^S{p)1yAO zqJNX1{l&|8oF_Yh>mn6|MU+{YijNVruplZ0qlg5b9=aa|jWg%rE_}Bt7-SM{{?X6v zsW3GBfS6=H0+T|{xp^+H&e!YjI$~iSReAkEuRHSQS+J166T7S5&qwTT&BB5yAxOA| z$~M?n?{HL2BvRGym=n@aAl%X6l!Dce9;C=p4PuKJo>hcDh-|-Loz~ihCFoSVL?AOg z;8zY z(lt^gSfG7oIJPZ{!N9|jK=_eSWO#N}wB~`+A1;}^y6i*yPh)6z*jBr^3_%f%JFiNCHURp)C$jvA;2>h5b|1|-jm3*5xo)c)E<}38(@=U0YR+N`;mSwEjcm*EEGfN z&731+LqGxlE{?!imcbSs==f(xt!!2);v@AQ@qk# ziYa$LvhDaotONLT_-VPMLJyk|BKl!Px-vF3ntAlQV1C0JUu4v_zKOtZvS^ACB8zm6 zCBIt>&x}|IkGx;^Y>q!cb4?L60^36I_A?-=Yd1m5~RyJAWApdaPz?{3x;26N8FYjphF> zIgS2L^-DuT6v&1Pi%joeZGi6nE$03eu?$KAXUy;Ci8Jl3!^CJjqF`MlN1`cRu|<5u zlp)@<)*{h075Amdh1q%Ub>@TD)zOX*&B%vrIK!@=U$vax=y5`;wIT2NFF$&(GdM$A zQ!^;~pUJ-2Gk&}7ZvD@P4Br04pt}pcZ@v2>cDFiq9DH5<`29cCx2h)&wE+vVaR^gs z+(+1LA(Yp^5d@$rS5gb2;;oBmrN-9@Cczl`tQuyY5b8n1N}z)iXbRs8mwv z8boW*!PZhH$0~RVG05MJH{8HP1CILW7r0L@aqlGci>P{*cfrf3W?AlT+-j%GcYR25b2IKp29ZdDu&IEry!eO(|R|BV7HutLKC<; zgv5Y{FMbzg)m5O+<8x!_Tj*GOk(8?>T{fza62f=@6cyaI3XRTc*YLEi|o7P>NL%$&(j63$AdYH34NCBsg|w7fXIyQG}+ZSR75orJz% zb*Fh!57R}iHvF#sU={BVCj5M|5g@_$%N7kOhI9TVlpSK1@FhL4CC+b!a~>t_5=_NQ zN8(4+!Ur7RFLK8|kE%z-kxhLbsGZ>_;e^BrpwOUFSR!Ry4R6`$sdRW8?Dz@{nj}hY z$=ksq3R3+={ySm;#I9J;Rqv6e(C94Fos3Xyh+{+8j_{@je}C2{X|$qLil;u~|54)q zV~c*riJ2o6l}ak-3eF&ci`Qr&?+!(<-KjoAQ+#NH1k(K~c2U}loMwP9<+tyCh4VYz zN$;CMhGAyvaPa|**a8LsH>DMUg05&)pD_)$^?FtP#{@|sUny_o7MWLkhzn{+d2U+SxLIR%D-gF8YcB=6R z$l5Y>8y#V{OA>gx^BkIKR&W{%LiE8lnaMpX@Xs?jI_LDlz>4ml1Lr>_Cb{M5>N3?4 zBRfAW&`a&6&%1Zf_#sSeBb$zu++;{kKKzN&lX1uIj1VfNgyjnG(^Gl%a=X@ zV$kZxAf2BeIu)~W^ut3HN$U~HgKw2VHY;$e#d&kB==a|MN&xEVF-hJ$lUopok>6z8 zj69cG%f9M$g9BvJK`#=3J;i4Mv6j>((JckH_33=A^B3bR8>F_^9dZzO)=)5V5t1pilOrv6+H0G`tT_79%$!*sYY=^G8l zItwIE*cPKrQ6|aq;5l`zRoPyZ*<{cUUses`A<)}8)LyoCt}37&*G2I}@|UoownwD{ zs{$V8^DKc8FWkbdc)Aypuz;7kB^L&~$}rr{^5n;6&?a5Q3E5-KnlE1z5*(n@|9N#O!4w=H4oSm zbzn5W26s5rk~R}WGn(j?ig)_P@XdDf154<4S!;K-Xu+G1Q9nm91+%!O()7r6dr#s3@A#n?Z-DBKyVE3>Ya_tTNdc zg>gr%QLV;bzR#CODGg!NgYf*s z9+;?3rp^iOpo}1MA!Qn3h7y9_gzUOi0g_B2wf_T46UfDwxU{2W{~VHB=rrG}83&S4 zD%*2jA4M{1ML?bNzN5z`xg3Ipbt9E+RvNK=-=}j1U0=hxRMZL2-x|3>ZA&iD1yJRN zD@*6o5{Uf_Zza&tvr0`YLG<#Becrnc`>KHG19UoRkMpV9-i^k%Cg4AAnHEdr@hVla zXAAaGyJ?cbu8;|-Ozrl6;h3n?Ec)Hh{<==73SE=hqc|tMfXXdD9ZF4sbBAZIDPP$% zA!2^!_27qVZrx|XUxTRW=JV~)>-x6$rkjB16{T)}*>n@a73FKC6!ZcaP9d_#dx1Bu z-_BR+jJo_?>ZZ|K!KQcbGG5Dj1r_Z_a0ydzavb}ZdL2@*e;nNtVXO;KK`uRol?2lBOqb!#HJS_E_!g}#KG=xzN>k%$`FH@4^*DOBm5C0ize^I+LjxGiea!o+b+!evK(82t~In4p2Ik` zg7;xg2pu)jQ?W@)HKe?zGK89}a-5DEaZK~7JMouOj{kPI;KO;A*Y>*y8q0(M z19GUacfbedzh%`YD=I7&zwv-~OwfX-iGyq3Y2$c$AaPz1e)Gpdp447Ou>WmSQ3&8a z3W(nPU{>_!61*%R5(6kn%FKW25Gl*eI4HM#BQBCCoZ#WN=>DwTfPz-vWN=yC(-bl< zxJu2Z!ACvFTk!WhF0@u9BXZ8_gNE>^f|3xJJDR<-){;c9f!v%FmpN#6eA}yEm%sBl z5n=OWbquc`j{us>ZfGp_Go}}5fRT+p|L?&6X!^>qsQ%||x;v!1TR^&`q)P-85Req4 zyO%DJ?nb($OIVO@q`O198}>Zs`}<$d`+YUnp3ltObBAhg>^6Eg(-#a%P4V#TzHtPr z>&O~nsmJBPm}W-3%ceAh(pT9rbY1pY(YlO@3+uF5>0GM=>@@T|p<7#9hMCXsuZdSgZkUpMZRh9sEytXw7qdRtod2*?(wY{`1Ry|x zPYZaB19`Kv_t`r37PGRS5`5++BvZrhp^g`j>3d_{xnzzSG51v-G3wvBNBfr!mm;~( zHH5%Z46rY8WbXgO^V^k69rjn5?ozwvNzEIkr7#aFuI>^5)sCh>z*`DpKa&qA2X` z#)y2Q3l8jLHnbvK8j@PZaZQDFmXg|al4MBEDP#Ha z8{;|GY9#XG*)ys?{_jaWb=8bjms7ZwiKW^`t;Bz?52iX!hmcU1RUCV_KKanI_{$F< z)WFoIk+Up({R z4swia&DSOiKb=7?Mjyq&Be2f^m~EDtBMWk6Z65&O2)NpUTsXXZY|?>d!@Z~SC+JNY zKB%{ob?QCR>lRp2`}IX}xOS#^HO4+NqjP!vb**^QzhK&*f2@*iOi@GD5ohAjX1n1c zzEJQ93^<7mI3ncppMlm&^p1#%_BRJtLM=%?cf&otb4AI7uzD#!tAn*bi2&;w> zz9Ju3s#Td58E~#9yLHG|POSQ=cUS&;i%i{W$zR52M^t+>{O0X-cX0)S0!i)?j}a^GXECi;G|)E~f7Oq0we<`MQOU`^ zTE4OKWo@j}xR5Q)TxHD493#(N*>EVPTzN3xp5zR>`2%FfN~*56T?uyX#Vr5xlDg8E zw^Xv{rH-lrIXG)uQvPK3(8mj?_;F3nbF~-8MHA~@*vTOTq+I6!EYwarj6$Lbk2bq> zG3*mvfcQB5KA;cx>%v#2?67w(mPaMfxvE~Am*P!`-Bu5l+HCF5+)qKil})M6y01UxG<7n>J7C08&&&KTgUcR{mR~ z9hzWoW3W{OlT?fO$mi znG8ZeqSWnVc{liq=@;SD*b)yx(dxq_=Qo%HYD2^N%&5dun&{;(maB3S1ZDg@PGsndf)wQzB&Dq9?S4H-3MnBAGt`mulc;`Y2zLd!!MWiwacuM|q3-JD9r9R=v2axb}a+ zh_-;t_x*3?~}_%MwqetmB-;z#mLc|Zb!!piP0Vh zb=dv726%M<|DM3_p&`KQTiY>2<3xhTS0EV-ngQDPfl6_QQ_vvi{Vu?;B#XGy9?)R2 z!b=(M%*Z7&(j{=&_);=#XjRF!eI)&$(OffSNIm_r0P6rRX{zM`!>So_DpNn|&Iu03 zaJJ#~9R$HkPjT)fCI%#ldqn*uI(9{E1_=jOP&d#QCPLDYbur6|YMLnnI7R>3FLOlu zcJ1*gBHQqxlOrzG`i!DFX&(hO+w1SvZ#U`{r#G8xX;k^lwk9rutq{@j5h?+IY70b*<%T*?8Uz%;HPH2H08VD+4Lfr3-`zhs|6 z&Dy*|i^h{n0?0>h=;5T((^MbK6XR9YhY61cXPGg$;@=~OUoxuMskw#MM=N=qQkI)q zqV)U1fj)&27*D(9SXk8FKPxenT_SelX7CtCI-)@2nuq0uDXM^%YL?kUYpfKPh zIPTbg_7(lxrXg0d4m476(ldBy)r8h;%}y+PqwOWrE=*t`XdWh7H)4A~hR{m{NuZR& z-=Wn$wB5Xi?CqErWZm=h9K{-4T$>$eTftP=`6j*tW9TIQ+7RF=Qo;>0pLF0lEEfz67gFF}3SjA)N<#!;R;zq);Fq z{A*y(<#^Pm7?%3`v)@3~wMr_0>$-RcWhl+EsEpf>Kb)_xZX1UbGaURQY&m8AsqUOF zsH6tfzEf8I({XYpOcZ7QEv3H4u;+ZI2KAsbB{tzVc9_)$omg+yu>fB{>S7re9xy@x zIB!91Yb4q0UZq2y!`7?LqSsYn9?iS!WTi()3GYMq@U6z*45&YJ67|-enIY>xX=O-W z_zAtdSuunMKh0^6f5#JsEeXxYMA5mfGrSj|6R}Suc(T^xfH`nXG^HD^QZJbaiKBv> z(Hp?aqc6$}99Td#mgE(%*bHaRPz;wHiE^&2kFq^%R$RQ*y`vo=S<=03f0yozf*L&) zkT08{YN?0NymJ&ofiZsA1mQ=pqn`K2!qaN2;T~FYlaT}?kkW9G*+g;7(+yf>bW@&y zTN0iUs&;BL+rKj%doVI#7csilkW*0|@%enYEi^-q3a7RL>CoW8;e!y!amGNfS?LyG z@It-$c_I4+reEt1tJV2e80e@nPV!aXbGe1cjrQ2SA3J3g*Qo9N*MSAfPII+^WWs+I zc1+T2$kEN>Pf6JOP*@7J?~KTo3=y11o;xAt->$pvKFm1kOr$fW+U|+xAJRoG2(pHi zv=Sh*_mGKRX!0_`cp8zAo}B<(PB(8a@Gt;W-igNs0wYwwx*?5C`)^){wW|9(=XRt{ z+~>RCCn;&td*}zyE4@0eVYbb7fSZsU(<6G1Y7B#1Q(4GFRuL44b@Pi12M9{r55`5C zDOb@jF=5_(Ot0pY=FT6#pZm=GDTks-TOqQv1+hzp+FZ--;!GU@M^!6|5zNj^v1oks85@< zs>ItjhzBSY_I)2^xaF&VY(P)*G^E^D$a31Mlr9P1H5yOEbj7DHsQM?hbuWuk=#Q+% ztfWgZ-zUYE5bpr%z`yXDrhq!v&J`&rhLS(q%&cH9V0n5K0ZcxDpJ^meBJm>34umy1 zRIQMEf*9A#g6KJDk6QzNq?h2c_S;3FEI};&P(sgD_2NFJvo3)%LL0VhJ24;(PbsbT z`jA!L@K#RuQ1??>p@>(vXhW__mX8-AyCHtrCSMpbTKz^)Ph|1IzSSMYYoMxOV!>aq? zLOjPU?HEy7SkKvR9v$NejP2uw#L#EMee<8)`nUQ0$>Q#N^f7h_l;;T-XzBz)@XO(b zgfNozI}nPCuKcr%fRmfm0;&K&jAsL|=>xtT0D%HfVv`N%$+Spdwg<&M4U|g>M0~~? zcuil;p)h^W&J)+h`yzsY1*Tgl?hMac&D={`zUiyFSGU#0LU1Se!D)b!#fG~Rfc3er zNd*#@R$5uzfFmL@QD1RfmO>?eDs@nI+V@7ls5!QJEw${k|CPtT9*E12WAA&qQ_+c~ z-V}Z<&Q+N$Q}?VTsiPu&&P`%SXmJ)6WvrMdDRSW-N$fC!O@hCS+sui4EY-IL(SdAm zh@UG}G6#3p6GpwUT;FXuTfNM2NU5qh@45VB!s z!7lk4k#H%gRt!V?;KkabK)*eJs9Fq47f@U%<(vCpurA_MbJ5c@7TY74Zv3%ve)wy+ zXfB1}*k$g~Ag~O(!tuUfqKQjrNzpUl-bRN5A?8}UrXWEUkjrG8eQ#`EM3w@h@#N^c z;c#e;9|>|oq?PBmmo>YlQYr3 zaurN}f|{4lI!acaRL_7po$GA{YIn^`Bh{Y0wXNWiREO4gg#Rk!hsxa#XdG?FrSn{x zzby^ebrZ3Et?eD!?7hXg{SiXvA**Ra>EZ6rU9wpipDw!dCrxA{4bk>>=1%UHV6}aK zBSs`e@}>;}T9?;s4?*l}(ir_nvi-SSdL8gt>CyySXXT1E5PRj%WM$+V`= zIPLcmh0nCF^HxfpOsxAYS5r}8EUO2CTO-VUf9s_>vK%nc8nG`Rr~myAmCvD^#xzTT z%?TPwGKXCdw|+mc#g3!`v6o00wBHe#Qgj(DRMDqY-+aSNbEH?(Q7UHH$+*p95{W;D zeu`w|efJ&(rjGm$yuqTxCT70TvvEy=)bJIqim5Yl_+UIwejChXk?h+k>$QP5DEpHG zqFS>h#>UW>>Q@b_(}HKC(^TYI^(j47ud|G0O`Sq*!oX`2*%!V?~eJ6;xERi{<@ zBMNJCuNJPOoxx_lnT)}S(-d)c32LHP5oqhV6EC^;V{h5`+$*yt2#Q&S$igqDv2)GxH+ZomD;+`j<6utTXN>$99 zj4oGIH=T!-Y`sq~sx+{7`00bl;3!^X_lJ>0e9AxdyGp}xhBtXJf9mSm%y#{+77CMl zKJlp!ISlMdl(1`jCbJ|XaT$JbmF@^QITW+6Vu7Wu`r18zEv%ri5-AslJgo`$eNwM6 z`z9r#=6w`Ixt?uaR1uOHHEheW>|wc_jd(;*{k>jX$ohg4)|?nJhK8iQp#Cy3RmBv# zb2Yl$ZWnwH3Cg}ttsr| zU_~+eeYq|IW(w#Q2D)~0&CGrmK<09E4mA0AOn&Eq!TD`0>kTCkA%M%2_J3{Q`I(0L zxw*Eg5<%GIJrX!-nAmMdjb$w(fbCNJV|iB`2XPd?JWKbH?nzdHLpE_B86`{(~(uu7VPgJ{{Y}HBUHH=JMJSDBw2ag4-v?aWG z5YKq7StfDoSK*Hk$*7{zkW{C46K!I){=}^uSp^d$Jes~jvwb8jn;3UL14myGO_+n& z^)GLWr>)IiNDYN9ViH6i659uql{?y3inSXf;aVbdb+0`*4&NVMkMVX6X}3(>x7q_B zN8b1za-g=p$<)SW5&_Pw|52-2AB<05XsTfjz97Yz(x#cDnhUVP6cTDo`@ zds|&(w29V|e@DAjX|ZOq-YOa#q%Ueb$r2_}k~U?c(r5jOOJf#h*A$dIT%a?3%{9U6 zpZc!sNBzISc_afoG9CHDfWVu>A>q4RH+0=F7d_ z7TFbAm30oiPrz07%6I_zsTb>Ros@ZMxAt!ER<>LsgL~5N_%jq`fu>7~Z8fH!A@U__ zJXWSl($i*^R@6#gr-ib#``SflmOh&$oJI-=A*(rB+-4UL^}$?Gu=Oa z8vx@!45I@f@r|=CgTL#`kjr~1eYV?p7or!53LDS2`xtKL_3NLw{w~Ri{hgk$h@oWHY zt3mjRQruX#P)yd0z~ZTO);pA|R6ZSnQTU?1#iW3IL@KNP4|hL+8sEnY-@bGpYUG<| z7$NY(j!EnJvJg$vb$Kf-+lAlj{d08#WmLHg8Zn%}Kjcr-t|GqaYr zXrY~zwYqD@_WR`FUF9yF*scfhCGhBm9YT83u(34{gV@AC-@A~1D!B>&N{!B48 zbCq(njnkYju8^+_8Jjr6X>8GC?XwiN>Qwa`#LU_;U0J^-f~{O7U4FZ-bp=Djt6>FAg~rf177+ zLsmOpfBR)MLezwREdLW#`s7cZw&X)2>uY$!)hy`DikbkO#=(S>&)OyoEoV3FJ{16d&& z<+!}$OV9eBu+Nk0*Wc~BXI_qCe{28DY^Bl6#B)eQfms2?!EwWRFI7sjx?VE2f^--N zy!@{*4u0pB%w4;{&=%-f zXASWC&Y1$|-YML0_(=C!i8tB7mBNp?#hF{=#$o?3XTKH{bDZcvh9!)Kco@A`mwY|o zSe{ba-S@}6ACo|RoHzjYb7tZ%TRGg|laIyXx$&|!_KGSAoRH~v@n~AgQM4K_h(3p% z)|g(GNAQ8|OCz}J-%rwng49U{1ofnory%a5;g$Sev+1(JA9S8_?v2&oj=aB~&hcN7 zp>_P6xwNt1&2<0Re)gf+4&8t86qc;*7-hEuBggrzJu?PNPlPlIBJY7!z5s50Ac`G= z9bd(JAykFo_h4yLqk-?wx>=X1B`OY&<2z^B4Of4|OEkFizYqB~OK!dv{jwZ?{kWae z9@iJ(EVdO%_}SP2x7dgu#(%R2K9`@teG3^`))!}SF|L{@V47ptXcL}SoReRKwD)IJ z{2*trViGe2={wE$>gqbVe<=cecZiR1;IO%6_@l?E3f^Miz1Hb_iFkt=7Y=*k7CJ%PxXW_!y6bXZL@rm=u9aIa_d5Ulg>+eVyesXd2 zOeyGuryE0w8IF5+3LhX0ICBKPVNnPSa3+t1;KMn69@Ji2A z3Jo5(n19Hez7AzITMjE&v3lt?53P=h>HfBBe7&aOJe^Lrl3$>>4QeL{g1T7?0l=cq6vnY^Yuq`$K_ zGKi*0)9Hh;kO{7)BskE2rt9sqmLOQO)ll5=c)Z1m{l44CD$ zZ{Irf;d!)-k6p-0IM{J;)HX=8M?NKP*_6q`$) zN?4CJ)x7NN(BEQAvaF^os0&WRBfRpHZ5p#8MyhH5XSB+z)n_$WzEMCcpZkL?BA+jP zJfAbCl~Mr_T|O?I*}XB4PN>-E`Bm(U_C+?Tk;t<)rzzd7ql+|Ul<;M#$rct_FHHkOnY-hjn%xz znl$9w>+T`zD3RzKJi5H&wkqryOE~v9L`65TcG9`j+pw1(NneiN7V`0%y?5@&$nd(a z6#3LNCoRH8oI94bMg69__`SMX5*(IOG_;BJ-HqDR3Qey%5g#6pZpBr69HF4|`4?-} zvSS3C(04M%WB%oY@}!R&{mtq7+dCh7IxP)2X|Pc-!fRILz-_fKyBz0GGO0!!BAHJFBKcWZ@nqPD%o#S{X5KLj^DtP9&@dh$_*QhP#1@(Q zUmWaTo|^-^-z)^W`mC~4N+h<0U!+IPx`ASjf^6BvY1%L2B67;-Jv1dJh(MVh>~3`d zz`1~|T}W*Y)~OLQQSqNcYVV<}cf+8abTw+meXKj}0{{|?m7zBeBVY0L*<_Ls9`rAv z1K`5*a0%cm;bc+jVA^DfA-zH(=5IM%mip9yYsSmY`;ML0!fG-CQwh5zT@0ga_;vD9 z+Rm_a5t3PJwGMZEF*^nZEl>VzhhcX}t8ghXt!fpE(xY>z!G^`~RtY4fH-Z*Cm|IOM z*P-v+#CY2V0ThvZc@#buGlBjz`HQpx)-KJv(its`3V96lu~=3IF;j-QcN2S|b+Fm^ zH#nxnrIwkrT#Z+Fki>@LzXZ%bXWjoZRPu%I?L1kmnBE*Zd;J`|GOIPw#1|F4PQit-A=*rTHA2YmM0K7k|L&yNpA#7 zXRwp(vCkYc6jC#DFj5G{=wUX8t$}6CuisPba3aBP zZ@5dMZwN1MgYRTF(5Z*8p{oW;_;h6Z|`l4_*GqKqD z<^IPIz|(P1)B0O2@67#LgR0a0nZ-1p8f;4sy^vIUsViU!&4Aq>>RY@ z-V`C~AUs^ADH_W4I{oaOGHX5PbWcRraNJ4=+pJzIu^4<(4oW{+A0>1lnP?fk(0Wbg z_uzT@cBahJBpz+fOW)Cu^3B+z?60NEmf!=f36aoF=Uan=of2xg6vdQU%MS8FCatZl1fgDhG4fM?W1#j)>Z0)>V4%-80I!|p5+wCN- zm5v4gx#qQ{E+BLau)2WEs0wO`Qpi{Y{#@V?B;`avvO+dpq;6e~NWl(@3WYbWjUyr( zNHe9d54$tabL3nU1^WdD|BCIEA*1YEL2Z>k-rzNDvLd3EnQHGr4I^H)oy2Yog^#x z3Jy5^ql>N4ICvQraVWWrgSy#)jE$<__dl-=o*n!mjov&MQ0Fv*iq~vokte&pHGkrt zBJe6hN-yY>k@qHt>?Y$-a`p?RT(z0j;ehKBoZ$L0{gl*D=CHwBAQSpnmg4@O z1O88qIDWf+TKQ2mTo*1!MqY!0*Xvoh@%J;lgS=DYRaCi!CndFdUVszB>W+3FhQ>qw zyU~WKMyrDQe~FpFWYC^(=A#GV60~f$*IdA}|NJjEi&KA$nb1~yUh zVock8r+OsbU_GCKF9VpH{@4!u6&5i_&je?u`?ebGT)UC(*xX0E*#`|j)G)A$sepD) z4;E-sV|KeErFlHwWrx9RBt$T(331{CzJ^4_D0{}VenlfvCjNcucPS>x0hiYwA>;^|K(h}T}l6) z^tQgkSEMM!!w-n-8I}K_N4oDq=tx%^VXfa_W;jt zTRV;C5uJ$3neF<7OQbWltLL)Rblb(V5_!&26MPmwmc%8-q)}nz3jjHAhoy{{S5v6 z>eq_>ydHmoy`b|*M2k3EDn1l_^+3HzC-+KRp6iF!ZZAQyLKC^EKf@|VRg zh^G~9Q~XF`d2;dMS^=T)_NJO^kTOhl(+m6n|D;FLh>@TSy^Vb1C+;M@c-Pl@au5;T z(nMODNpqh4O_+8JHWK*_$V6F=SxDIbjO@CtJZuu2h1d0pN!5Q;f)P5Yz^>o5UlWDD zW67te)zg+ku53ephAR-4f+h*CSc&D`=8orIJI8?Fi1sSVRpaV9O~f24W*A(3v>at9I@gc|tK=~E~KIVn6?%cbJt4jJYmPWxhKa*H{BQW3A1#2Mon9aYy!+jLz8j%n2@Sr2 zKJ+DbaOG8^u=eHOb^N>jEk28vnn-!d2wQLf)20ysA3JLqaQ8HRZa&!=Du%}7OXR1P zCZw5AZ5BJeInNag2MlohpWhVkE!aQ(#qEhz3|{Pl&ui*_fP6xp^fBOv-7TZQv^5?e z5}lj%sY$nkK=9?8CCf&)Ot=YpD0+?*X5(2`VX47_CKh7i&QOSxAAzR~*W7AHJ_D(Z0lpnZ$8QF$VHZmQb-M=?YOgs`m1mS3%pgjyIFe%A0pulIhi(-VhkB^)3`q|Gb=$vKND2Fqgl_mMh1mq<~F<+Zurim8OH*`x}QKQcnAc{OD00)25%LzItc+2hy*4>buoI zqf3H+V;q#;c!)RAnrJ7GCFf(%t?v=UDq7OF70KCAd5vP&O-^CUOvB-K{ghT~G(7+C z#3#Ek|L&C#UK0LxS1YOOZ-XemLqlYV{qhL}flN`chpDI#-dHDrw+HZ6LsaiE@I6Xq zznJjbF}2&fSN#jG;Kt_F`5B5L~Ym6$QQ!J-_7`}FtfY>Sw@hRtGQchNY*{DIJO>7v)u~`zK5F4 zBVLc!_-0fC-PzYE`@c;!dmR6 zAUU4RqQ3KUtb`v3F~^jf9cH3UOAzm>=YirW zFJR52Vn`OR3WkE9S&?YXqRPuJ7-w-Ddi1r1IWPiU@#d%$f$$SX7sm6_iQX2gW3!4H zay#J$D8o6hDmhVS*(}QT`4i^(JIUSXMuf4YTxMt}{E0*FY2;<)$a~9pDH-BH=(p2u zT!VY3U4CAC4h$RECtcDOMRt0*=^;`Y+f!^_()LcQIw!&g8Abk3x7d+oc`;yDe7QROx2siY2C}rpbpc$gsg}EU+T>`Ah7i zm^pEJf&;@9L>c*~^A#28aM$`g@cLa}$DO;wXKw z1Q8wSj4(4pvSL@8tx$H{t0NSMUPd9Su71^CDFOO`G+o){L@`}v*4@OL>)3n_ z-=*%haX^k1Vf&emAS5`5Y!y8-*lOapFZ6i9mkh5z?>K8FU0r>+#W2ulKKv5(UTCPlYOdj%VTnAf;w8 zzV*N=K`V)@q;2KIllaUQq26aZZtEidNu7|CBDu=Df3hl04|7qnpl1g9w1kG>NWLX4 z9XZs660R`^kjD?6qz<0WN|?~+f86B$k$T7bS%B8XAdn}wNn%s!x>MV@evSnO(l?B0 zKHRkq4c*b7L-rYfk}QyR^4CJhm?Jgog4w8V7H)Sg!9yG%Il5vR@7xNiBd5Jbgn*a( z>4&piwIZ;$U!FZQXL$V&;#z+_>P{U7BX~YG?6pcXk4Z!`3lgp}lus%)1MXBEHvs<& z$W!p0R<0dE?uS^x<}lo_;IUyK_jaeJtVWlcbSE*VaRahvXbch}aF%B7b7L9$h3x>N zulcK25IHqs#t_>~awTHAH3^vzHk~JR9m+sFo8+($_HcWpx&_~O;iY*#xWA#oiyG%Z z=@%Ta!7D-&S!ALld@!XRG!-X~E;HCBl_rp0G_g0N=s{BInzo_evw5lKhu6em(5lFV zgU;2-v>6%b)IX4+^J^*yGa~G1A$u(MA}xB!C;5{w2)g2rgvhBeF(!DH0RggA=ge82 z^sHZS6-OO~DfdDP<|a`mLcF4OG}H^(<%Pe|U78K0M58Z{r+rNpGr~t8kc~9Jk!;Bc z@Vhx~t~wclcy$?nI)pqHLCRSG%zl7M?5seW#}~q-UTFg+!T9cGHK#VKl8%EZhm`lm z$#}w6nIyX*P4gfF(`XBtYnZ1Pl=BzLAimL0ipb2D@SxgDpc-aNdCOF$qAXzz#{Pod z+MxDiio>$NsGTVca;T6}|#hM|n4$Y&#$c;r4P^<1Ub{0onT~hHkk5PTB+tKhXNbFqFe+kkv z#U^KdbsW!O_SeKkIT9PA)|U1BTJgk|K?QG8k?h5*h8Ksx(A=n;$T2LjIhW`l&=EcFtu-sdj7pmILvI?z%gM;qNq%snl$~n z3+UJeVrjf0R8)^Ra&j*Rlv_Ps+JqmA0X5L z!W5F@mi>@bfj?yvp{@8mwG+HgIDmpF-m7@DV@)g~d3`Q43yjtP$uF{HvfHz5WMSCy|K1HUF}Ze1%9Gi~W@C6YAr@Xi4G#;U%hGWX6L_AC1IN9>1?-W{RQR3`+Boz69CPc{(k2c zhgPGo4h^t3yozB)q~i;|ceCh#uD zvk!+ehcb1(W|pv`ZU^!igf$3XMmqMOcgY-{pz|3Q7XWtD`Q2z0AgLM_ZtzvdfK5~F zd>B094+a9R8_&&>*LPX(4c^{)=GVH?nFf;EMgXAClBwChQ2KSAF=iqb_n+{)_h*Wx zpVhC4BWpfmZm-0S%ZJ^cZM90$Bm&JZ`#z-;w<%V+qg{TXxkmOe+O*#&(gEle*-Hms z^Q(#pKcC-KR%)O{0mc1U#=`Uc*^3MhBOenoTh0Pg#ugCaVkPr&9^n^0mRWEafr#AA zuBq;i2xWemkP1@eS9pC%YH75$Jx;cKMP%t6Ohi^2gX79|rg^GXENHf7MdjaPdnK#A zg3ie)U!1txB1hg?>wO##qqj*QK&%vde}-wcQ)}u1#qP~h^*qpB|MTU&34&+870xS{ zQT|bLCad_;&OKfxGohFfN#=eN74sD;Jg>g-5|mcWtq~kmSJmk3EPjjSJo@92DA6wn z!y;dtw;)429c3_s`T56W)SKTI+9WfOd0?aZak=gaI31eJfxLvO>}DH3xN^!G);8HC zn0)r_&-{mL&E8PzOsncQy^scxehBn(|1hekXcFG4 zRb1DFF@9{t&+x)1n_`34Cq=Xu@Y@0G?oRV0mq8?}ymL)Q!>;#4Z^?28*iGkM_U@#@ z;h0;y`#Jqzw&Pq7&;SSCJbbqxuH~NY^VrmuYMQ+>+<-#Z(wnfatWJ~4!-qWb_UjeZ^)Xz$*YOqhn<2^-zD>73 z1)U~OD%t_Yq_E=ip^vbMHA|ui6^_c7By3w;xoYyIIe08o088 zUfhjHTgNSNd7=Q!=t^A}&!avkvphksIP_H2pMl_{3ZSTp9FX!D`Eeb_!5zl|oW9ea zbzho}E%?qN%sqh$vMW8FHO2_2HFg&gaPPe*5N!;9RkV_Q_Y;tol#_k-NC%U!k-gIM z#R3kz8!A+98J?2)l^$|(L%M|H19h`E->?vqhTY?;ok#!xY|UQ}F6q_*^zXt~7j?#u z_$qq^*cs$BXx0pJ)iiQdz>oWk58XVKUqL58n9t}UcW5v1;i%%@&;4lbOD#S?fk+Hp zR7G%WPZncv%(q2r&Ag;GP41l9y0u5x7oB}-jzx%}-~0pfXgCAj>v zMIOdow9)~z+ugXq`Off10Jfb!XC4ywOQ2t1ktXa2HAE{hPJ-1c2&eK%O@GMWF` zw_GD|3)*ELp~><_aq#(*hyq)_Nt?bW{yQ3cv0l8z$D*h$b3v}!Ncm9r6XA@ywONq@>P%juyKi_Tt z?IY^d>iRP<8H`(o+w-~CfNZ4dn{g%)f^uzJ!|V=JbHZ)|YPUXm&-2Afz4nI$O5qlq z1$3Mo6|DQGB+fM4f61(Z^3L1h#nAS$ugvf**jLpb@HsQjaoYTzJf-1|E2M421#+0N zw&qch;(6!`sIvE0V}?YW&^DQ#)D+)ox8rNE@G1>~)z6SCEzn@_I6GEP;vDPNyCB0A z*evy5eAZuFRl-AN3u@VQAz?JyV0itykusteQo2KlGv4$)Er}f$`RE znjFHcGaW;{Jy-WR*Mwwfr~CNwL$bsg$-EWEkcQV=BQLwP@aiYg6Yu-{#w}pqzC&y6 zzAX0dzOI)*7t(TSmeVQH_5kj)#xu!TFzYr}i%R!LON;2K_cC|t<%dO)SfGKy{e&ou z*ka6x#deyHms2k6$9CXJSA#6IXY9i^8e{(yJ7|0QGyrruAhMoR4kbTa;^__eE#n9S zkO3uV%cR4fMVr1u4kMuE>oOsZffDR)3GA0_uz{#H{0DyAV@Do5CyzDOV@oihi%<}x zstOGL<@)%^M3!GwQ$SNEF^Gdd(xF+yUy$N}h+wf)*C_w^$ENa2?TF#|=rvSr>ywoA)nbM<%<3>H z4ujJaS`Sx8b%|K)8imo1pF{@S%)n@pPXm&%a}UY>MagidmE&4D)cf~xS^fv-%S&p@(tGtQq=z-necuAh+lU_WG4Bpv`hkIif)f5t~P9IVu6(p@vK~SboBY16haVYf*;t&r$xdRgiZBl zR7Gq$CYQ*cSd_l232&_qC9ZIxC?t>f6PM~alCb0HG^4Yi9vSrvDDwZZvn5eHR&jgn z1mIjTSv^1ML;oL1R~Zmx*R)CLZt3psknZm8PH6;*rMnwxM7kRkmPSGv0TJnL5G0np zzkS~C&;Gw>=bSS$SIlDb?+q)e%YU=YmNv~YZy&7&fFC%Q>3ZA{IaYQ$!_FLs+|saD zaMdbx{lCa2dcY$|uIYhCb`~$bRw|j5o2JN>Pw)V`2V+aa6q2b<@BOr5SrcL&1?f{m z8B*3A)CiV*BjD_Emp?Aoc*4^EJK%O{TqHcQEZqC;RiFC)LXJv|VzK_4eJN$+_D63d)JlXlxQ&!t=t`wF;6`!!8&VJ_QbjxW zMH1m}@DwUQ+LjzY`xcGOfM@arYyfSsi0nNpk>`~ z%i8r(iTLj#F>`aSW#vR#U|gv`h-L~@6?tP8ni(3ian^;C*0FvS3Kg?^ z-7qVaO`C=nMIm@qez(r)3!GWAX2PojznEW(q_b+|g@{<&Xu&@VObIOD`4B=z13Lk9 zhd5601(x6J1sK9)22~lG?JK_&x%yAU55kUP2AJQ|o57TZFU?N~p?j%jFwX}@V0tPZ zHU$8ySF^H{kj9J0ABU# zP5B=Rw%pi7RiBJXbi7`1E-T5IkRca1lG5FOzHZ(DayU=i`^?ZSl)HB5^yg#PZiA3E z0FAx74G*WCbFPgQ=VEi<4Ekjc?Az5WbWQEiz6X}J%2w6PMDLI7J#7=@`zhBbeU3~o z`kncdW~q4ngX3+gd&SM^o3j$MA%WJh4uL0u#BKM@Cz10l{;(0u{n#VLes%9^Ib0fM z+?2@8=bLb>W74(q5SOvTFaLfa2`*UI)O876>pmbA?eZZu@m9?-&XG-9tqoERR=>w{ z|HGMcxvPq?5ydjYt9dJJH70Ljj?KM|z0eWPxqDebYwPpo5!qBXa+Oh}4?%|{yG^X@ zC;N-`uM}}z;t#tC&zdfAnlvj8%ie8rs~%ZfFh^6b-nrC2|8|%D+hjGkRk@~fb>~@} z0VR6`)*#%>69-|VQlc04_s~%T_k%x6OVITjzlUl`A=VIymRrawtvBj~N~U&KC^I(8 z<>`z&_Qww7kE^mrqwoLxAXtX6p825|i`~lC*M(A;=wjCbi&cC3H&CyTC|R)Za$q*W zmC!5cE=~Oj@O}=yjUPK zE{1O_0`QFohROITJ{knMHU3P*N&%RL_K3(@Jqay0iIvKJz1NcwutsTIT_4j)LY4ff zWMjAY$G3XtlxiS(D4S4#z$iU%j9#)ZCB&rgje9AoFw;;6vbulSgj4bW!(pntirBmiqE~d*c;(eI)sUQl=y@^O8dxv=X+T&HnHp>^c}aaXO!nUfy`78n zErOmlv(zRY(47aNGvgIwWdB|K@E@ADC&e`OtKSSddDMPss&k;Ri(!_ia3Rfp4~%cLtbLdW@kn}~53e#v}m$6cYR7ZTSH zSzm^0?(rsS-DOHuF(~HIeM78bCp~MxzsH(-??9QEO-aO5YeZvgi(Rt0wnTvi=kk;| zDq~qjI0NIri4gyh8KRMxFxdOHbJHc>EBsMygi;{rJGfJ5i+`P|Prk@w;y z&2z1y7bgcqvq?XIfsx_=7-m3K4R5So*MR7-^k!xv?QvSxx6c%)@*%dn!~Weeq%U9( zYgfPIP=WNHgB-X^bcBg?@XjT45yt_O7{AQ9!}DKx7l*tuQR^t5)=K1uaEp@Xg@PI4 znc`g*?`GGlm+&wae^gj5U=<6_vT!tL zXu!CYk{$3Jxp&zu?VK)*aJHL*bsJOr>2MtMBGvBx}F*y*>6JYlZkM z+K--ryjEr3L5`$N3npqwN~C&KUm4lDMKB>-6ws~yy8%;}T;9+hSw*F33bAsElkcE- zrhW=EVmGRjY6RUUb8=V8WWMR2x^miFcFVfK1!~hCOEdS__|HTXGcm1*HC?E0UD1wd z!jhu7&LZ1QvIop zbKqQNa&X|=7;0IY?y{h}dh^?rB(#T*C`b6gyE{bHhLaEVcm=&?^Rc<-e=zW@xST5+ z!Buia@gn02cyAVsN`%n_ljB%(LAE4D{11| z{gIXA47a$t>aBZ=_ygHx`r4gDi^iYEPP4~~+9%aqp3=p1dE=~a*Iq5qN_Pe!*!m(i z?=x9qr@6sr_)iBOj5f7e55OIUGi>Gs11AZ9X|`}-^}oLz zUoh-z^aIiyJeLRlQ_18!G-P6Y4u`gYg|G*?@Uq!yZDZ zzk44O4G0hKCmrSHMRznMl5E9jEF0)$7m?Gg%`fzIJ4ZHHbkb~{k68}h}_T2j8Mmeu<|2 zp_j}IV>1)|U4dbuxIo71+juv{SoUP3n&+9Ngnq^}`Rqi7crZ_tb_ml1cj^a8%>P0Q zR;@=^Z7nuT`U%$jPv`c{7;^Ow0&^HF+La=3FiV=y4amtT8{GCzvtTm+UfqWv{b_bh zu<=uOt9nqZ^-adxQIjw9l}=2;k+Zov@Cn-)_|akwWwtV05=_H}hKru6rASrz@Glg* zu1H^P1h&b2?=Iouvb0^Qr0?(=jpj_h+qZp5#fLZ2MPQJzhC{IFeJH}@&gQ;%8{VDZ zP$sZmSVmPM{+JnC)Lapc~8VjeJq$9O9+09ZKUE%?hVZdPu zC01zD%WTkS>o0bw2oRuj7fnH=nPsXYiUpj(bgm+oo5ml#^jVLb+yRj3b1g0 zNuuqE;AYwjWloXdMR!h6X+^OHtwq6QLIQQOJFnI-iy;liNIhybUJMAaNA1=VBX!WQ7$3%zgjH>wI_+gB0~+V>xmlJ$wpey z4zne->Y@B%+WxlWIqJ+%JlXav{KE?t*jXSGCp+#0+oP8p9UcHCVP_-I_Ni7Fst3S^ zvJ$&evkQ2uB7c8Sc<1usrF|}%esh2Iea61HnnuIoZ*BJONSE2$%69hXjhR8oF~vfP zU2KzU-f2g#Z7P(nM)I6Rgz8jPJ&kSl#ttn76zJ%J6iTnEqAE?Qrz}KELr{F8+#>(& zV0{ne?4C@&s9ydcwu}|eH=m)Rv(jk<wzyJ(v&dNRVf(;_Jc6bE}pHjvIr z==x^{&NHgECn~G6L}NTjqSl!Dqqcl)^KKio{by7Ll{I!`7K)$5hzbM|u2R;Yd`q;; zBT3dew-X#ExuPfg6`6~6a!22sn&1D!@Kdq&2B}_pHkyESb@nb#qLhr4W}V;uXeQw+ zZ=Jj*%4vI$4}&F9Qi^F%rU3Kg2|>o*h!FE`BD|!ry^3)X&byW>u7}`7TUBCDY{DJ3_Z*m82+Dw>`< z(v}VA_5EF0#xNF8|4BX<1!>?wl-rmLYDe9M^u5Vh5BtKu{a@|G+f`H%Ptsp<81Jzm zS6P5iPcf*l3ZYO;DT{4{Ohq7Up@-|4{5V@zrLdNSAodZ=3juCMaBkQJm;#H9k|}aisPU=1OhABb za8b0%j8o}|IK5Fm;@`!QS?7@=qFW8BYKed)TNuemG9waf!)L*D?bM+>09~PY&^=inIy(%Py`8${wOp z#vEvMiaco7@i9Gw0e|jy_iE?xhjGI*M8(`Uzt7u_4h&n8ItYE@95~u^WGIzDZ!OKK zGQ2M?{|(%_m_}D=J-~p3i@@svm?_{Rgv3Zk-Tq}Xfcx9_t1gVIYKCOk*niN=UFP)e zpeAQWN;4cSAu=~c z_3~lFiJ2ze%|0F=9|x{9cD?#H-#fD}L7w1_28uPI%yVVEH{hclp6k*>0zd}`D(iL?o=dzX^&&Ehtor9oqUPE1iXy07%cJxaD0li&tN3+0QxsU-a}JNUvq@`pvPZ$o?GBOOEh_918h?trjA6y= z>Yg6^7q#{sWup$+D8*@5?F6Ah9_iXYoV^Q4?Wy43J}CguQ}b@#{U*0^+Ekyg-W1Ye6!J*or~ zX_n)(dq1%E_*mElLn^O!RdA zu9#O89j}ViVH}Li_yZv~KDutPv!Ko6+>tN5GMccA4&61I**e)h8V}{y(q+Ce9hswQ zy81a?&brxROr99G~9yf5v&fGAkyBV9Bh_kwn3(a4eaCjXQ4~HwtXB`_$|bdL$pFoJttN`b zrL@a9G35c04RVsKC*tpro>3QY0pUzQDr}!NY@`(A&S>O?m2#W|oCiMtD*Ph@7G<^f zY@9<`qk#U~g5d!4(4=X6@9#vUsyj?;aDwWh0XN?fN$oTR^3k=b4e+3JS$6l#$(TdfK(#{wx_nJ9f#*aoWu-$lK$$Yy4zTqGt}A$*}nsG&=+ z36IH|8*B4&jxe}K>_`#EKLazgh%Dp9y`#lvdC&VP^3O$)ve+%U*(d4^NANSQ>D@Hu z??Ds%jCH~v^2-cv3+ft0ISi?+I*U4LB9K^BQlGKouVhG?%-2ISl~fbO<_TG&+c=dQ z(Ct@Wvj-zsVX(P+vWx*uhn|mvA5bC%W^G`88rR@<&&~uR@Vi>(zT2z!m6_UMm!MP6 zFO!Ph?Wv$YQ`b@rteN9?MZ?Or%{jN1RS3cV$;@nTOS38~2^_@>;LJlIe$QOkn@Hbj z6#v4ot^d5Bi(%xztLf5;^2Yb<-R%~)YGBJ!kM{p@iT~IC9y3ts*YFAHOce<&e*7sZ z_5{R>agq&9;McIDU?7bto;+#cBbcg#5StqIyna{1i+g3Nc8Y4=-QN){^^nWA@kR6h z$mD^?2@$5T4v0YK^Xlat7drF1qRqybeTHIda){tbOEKW{2OcY|g+>^YR{ue>EId{p zzC6!zZesH)(qd{Xyy$Q5soK7(C^Nnz0M+R6ElpAVE%ejCdjD35cD^Z%JC&fF{AdBf zmst6c`l%X^9+9M8|Y`n`40#{rR9}&8h>?uH45IvV%~Lx$kSO#y2ZoBHjJ)m$S``tJQSl0oZldBDtVRH8L%^Hn zfR@r@>p|W{_^~r9$R^gUTj{fL^IY*-O~8~?==jvNRUXxw_bOu8mI(CwhyG+Q9(0Jn z5^2&5=y)?!VtibGD6=S+UnI*=@;hwgKLn_GLa;*vBdkayI#OH?3gAkgRzMn8g@lM* z?`~RBc0pj+0SMWZ+V5C10+N@wU7SD^d+%W~))=(gV6s}YPsUvEuR-Bu#+W$yLrqnSu2>ez!qWA(9=);G4)DwB$JZ5vI-^!KaT5#XZdKWRb4Y*2{!oN z{{-UM6ghZrdYw`xaxj))mz}>xkfxyvNrl38fQQ}iNWlyRp^=V6C(rAVJoKNKqgZv|9k`6eKJk2N8Ppacdd*-ylz)QUzBSx(&o=zA_dVKKiU1`-DYK8?7vcF z#g=_HQES=yZUA>M3Sebr_1|8V6#v^m3k$@***7Xyl&JUr?~d5&CBEC~$WA%0qdY;& zf-ee!4IoyI5SE1{Ic{9TKbM9)s@`lpXQs#dWM(Psu(mv`N0!2*(%}GCymh}(zF zU%dKRvfTxxn|WZEiUR|f<_xBWty^?kn7V+o5;rQv@j9jjVqW`7Ay@q{>`4jI{BET5 zh#lgs!d)T~hsR+eHcl*#9(N z8Zs(6Bv3Tj7E5E78YgX1k$*tl{&pjue3v_FoN#LX*{uS*=N=uYpnY5U8k4-%SA0}u z*s?z=Rb{cg#@bC&(eC}Ed)OEOwLTUWfy}y>kH*8XwuTDxScs_K_`yDiz69CprCiWMcP|2=#y0u6m6% zYE7E?vmUt|l|8CBTaIv_-&dhedVf!)UY0)IOP$b0`&7GGk6m@I` zEX7NkSn9vgt|EKbhQvdwCa(VL$ba0MuHkYxlF^1KK!9->x{PF%%cb78k6NMKMZVY& zD>3Nmp077k>GFq#F$&iHFDbBW#Q~<K3a9%frHw*`q}U1&8gY?523}R#=!Yl64O7^hFyysZ%W& zqt%kx*mdAo1mr|f)sPq=cc*w=U#GlfvnD#}2z=#Avx)a5Ie$)|5jDwGSqE$zu!|gE!FB&)rcY;0m!I9@b2{;)-YJR^p2#|c9 zmz2UNLz?ixhjnE@amZyZxkl5xPac29-!~8HU_O{h+2fpdk>>c-bR*$0!@XroVaX;; zG%<-G^$*(r8}wZ3_;oczXUnl2VXk>S&k!9R^i=g>p8DHy@^jGD)7F2v z4}$=0nQuF`o&aUU&kqN~>x!>Q(w=6Se`rDyyWAN=BW_(4inI$HH5Uz{5|OXbx{7N0n)ssK@={y< zrq-3gy{f5rcVvdND4+DaR)$*d&Cn=n%U(2WT6Aq5W#0YX-D+@hW|@*+`G*u5^}t;Y zY5c|SLcU>@WECfi6to(ijJi5@5?FsGed# zzJ(HhuZ43CXEcArO1=H&P2wEHShECs?v}5&%A$TT761u7 zYn;*^_`F90TB;7gK}DS#+DAJ8lME5O6*E+M+`*yg!mY`poAaOt=L5EC z#SO8UG&G5I`P#Zf0mow50BI*(;`Z(6&0$x zZoNmEzxIKGT`9i_cG_g-xY~nserTxxa20xd77BB45mR9AqSA6B5jKScopB0JMnR`` zBVd0>*VSV&Xo1=>Sh=3o)NKyMxf#nHIpPl&Y(JE97LYEm;@!FjURRg~_GrU}*m%U* z#Yx$&&MxXv19|o2c*1GRA-@c7-lKB5z)i2Xy}rlT^K#T== zusX8e)LmAI-9`!OA4&TQZ0zXJiZ5zx; zXz}g8BnQLtDvIhM83=LUZvLc-j6$vU8ZBQj{R=G18;Lb+a`Kt8|9sLQxM$yz+1M!^ zcr2mq-=lKs;h4OHMFm*liVHi@e>ZYy+ws8mJHTx4YuK^U0hy76Mgj2keHOqx0wfgx)N#rE_~x_| z>^O%jw&>BQ#**PAsz{8Ex<)TPmHFzQ)Rnho1ST`^z8m<@XW5UfpE~P@)J4JNj)f3X zE?S5591r6=1j^xC>*#d%pjsma;wmr1$cAN#9yataSN%;?&01~=5*?MDz;?CBbYSLm zT;(RmC4RHti~XkG+VL@aQh&z7*bFxS4QlMomUqAXQKM2F{<6m>((cC^{8-hZr@hoA zyKDz(bR9^7b6Nn!H}ahWg-9czVZ(tktVdYo^$r zJs~|pKmj6|EUpsX`N;PKVtd>gaXVJ@TC`=*`-_p1p+UI7HkOKrWPzb!`TUxDbomP0 z|06hp`7Ag)6#%jor;)4lfJVS36X3nPzAwcfyZO3W(!2rNY^miso%07$k(UIPz2Y$~Jew|gd1sNv5_vof zzNm!V@}xiz!2AiIz6Tjax~ibhoa>oZSRdRorZ_xC1-*oN5hF7e};8OvG{JdnP(v_CbHgA)6|}O}Am&H;}}|cG)ViSmPNIZn<(R?ma4e zgkv1rNCoYAK}|JiuaOOKa`wMx_E*wOnB0;CzuHk>G6VfM z_XDW6Z)EwokM+vf%eCM}xLfIjis$!1m+k)`Rv^hKXT%lwEi&c1KLYSl3*xRzlSykH z-m1i{hftIa*KnMx6`$$$Sx}`)+INt_x&sF1-rYVxb|B@Rt%UXb=?3Hkseas|%KZ1R z_2dO=bo<+2agh z-oQ~Uvm~StB4q>B1M&J(ScV>suWiWl0a8t<_<8Un@u-1h4i$;g2B{5}%QMbQg6}P5 z&biGii;XVL(*RjJE(*SdT@;?QgCCWcfZLhn~&$UZi`em```HWuv*O8Ni=Q zsm>Ve)u}^_DRWMZxwYS>V5Oq?VEuHkNHJonv62$tp3$CH5l0D_`R-D_TGXi((?_Te z|Ei|QbnYUkNwX^6A|S5uXWcq*9Cht{0_Q9T6 zV)f9b6Xa~7kTZZoH;FJqB{km&i2>(3UHUVNXsH3d2v(j}+?K!!dD-1OwM>R@Hm1I$ zQa)F%vVC}-z7!(cO{FNC$s~UF2^fEZE!mOe^FQxwT!P3X6Go<7fnTAB70-EPEozo3 z$#`QHfYR$M0sK!@t%6n5S*bI&;6?Z)3PI}nx%3=| z@s~m<(-?1_jQ0|dsCf#4C)#_{^IYGXHOrj+sTud7~F&~`7##PY#ni=KJ&mkPiv z1)A#%{O#$|k+lzS{`C8K z@_Iu5lufM_`zLXH3XBc~3-9wj_oz)G9v_~F?Fx@A>bSD+Pl%u{qT!l}ED*epb@Iz> z{aSP~5|clH(m2yRCyf$BV)Kr$w#6_&_?gNNPNephs7tW?RTXdx{`a)sDQ(oE<5Y(IvBcu1Hs=fAS4 zeVyt^+IPL(vOnS^7Se=LQvoA}{LpsZf3QLY0xML2uS-oKQwJaK+e_%V%}5XjUN%bp z0AC3kiKr!?Vx*4i$DpsEZ&p+ipg$IasgD43?)Tvw$|Lk4IlA7yOKpn@(1JM^nxFeP zD2PnX!?*`D<#M?Bl%<(`$2uQNOTN z3M&kvBgz_55y_!Nr{N(~iPB%aMU(xZg3PSB@Tv`&TaHg|XqBWNjDHq;u3zlOVuq;E zU)J?n=YBX=X2g+Y8aqco{*2YB>?+rdZ(<64L;2jqzODR**!UvLBGUO2Yl$IT#cSWS z(jbZvhhCTJi~PuqylM~L_oCQzv_QdwGx_9=UzeRyjj9&b3@VO;NXmJi^wjCYoRs2! zoMi8$FOZAJO#($%&|9l@(ja`@M|JGl*3G24S=6MawA%?dEw7C|ha!f@2?+yJAQZUz z<9b%9s`ZgEJOlh4j4}?`w0(7sHH%;ZaB=`xrC##5)ie&&2iOikn<@Rr4zya2UPV{<|Ur z%!c|J;{yS3ug0JID5*ZK=#c7}fD>4Ia54p?DNOMYZKnz~3H-%2UN@B=4J13~V2Vh{ z(50z8!7bv5vM`jIamDWd{RGG4b3;GBWyOwo%Z3xA+r6nm^Ea1%!G~XzJLbzXStc{? z_>*)5`W@4tQ)NR2L*8Q$=Op@?q9ba-?Hv_Em_bh6OOHBjg{lS7LK&)h2m`Xj;wplC zbUBhQGD^|M)Q1zf<6D=Cx@fl$!)%0@?L0bs{A>g*CQsAPk%KNeT6KKy4<>zSMe52J zjWEsH(~5p&kk8LoeB51<7dd%LnP|UT5Mq%Qh@@0@UfIzi`2+?aB=ziJ@Pr98%s~~A zHxL$bSv%v2K^6m{D&sc=6m$RZCXf2vZma?pn1BzTKfE^-D}hLvL+2jB^iZkEM<68* zTzfQdJK=LV4Kg2KV1oQ*ANX>$oCx2@Gyf|TC&Yrs^pBI}4S0Xc4%)qn*|qwntDCVy z_>IJ#bP0}Eo5wBC0lQyn&n4lR@gp-2E|r3IS2b*i;E*D+qGDqu86!VueiB(@C2LLQ z%BMLEk?Sme3FkOnteVPMM&Bkh4Jk68xeSG)VZ*?!!h(DpZsBN#9?G+%J8fl7U+Pby z{G>I%mcQ>%v@A-z=;{=0h(t}rAmwx)d_IO9E#SSML!iD+ZAsna6-B44WKY%Ydtl|3>MZOMr!tLvXX~UpSRht^qcR zd~F1h34X`M)9Xu8yC=tLtPZt5wzoj`H8nGFl4Kd6Euz~F$Iqxfe47J}7RG@$UdpC; zHrzXzP3W2gvB!O_e?zGSr4|63AbFHGz~UJ20=|(R2f1Gu$@n|qW!3;__>Txi&Xh`8p63R*}e;3_^>91U3xM?Y(5b$ zFn$F^)8xV}Hf=qYKhG@eeqbHJDw}z#SudM^u&0!3qmh&5q`^JEDLCjz*T3^lMP}dx zzIfG|F0olyz}YbWe$IjmPxAVSCPaHA$-t^8(cK}{XWx&FT?v$t>6Wnw`xt*mr6rzF zYE+L4F`utY)!Pa!8rYw)+u@X zbc%DHA3JVvlbn4btEuoKK_*jl`Yy{2b%TLXyx$6oH2M~hBD}*%L@b5+Jb3o@%^cVq z1(op9X=vvvTqW07hT&1k-e}76=H`F@JQ6SO*m7M)7PB|OiIGs<;YPZc_GX7(gMc1{ zfay54IBqSE4KMo)puP|D@hCAJPeE{#Vpp@lhbQTBeG1So~4L9olR0N~q$1*tj@ zxCn^t24=zO584~BJVO2d=zK^1;f3VlPDV3vac?B5KWddmAaq$sgzqE7^trv&{9bQX zt`5CsUZ#)Zhc{XvT&@wf%@8t6N|fzeCEbS{ZwEZtN(WtdIVsnDb)BE9Z%f0akbK9W zwS?3281wIq?Tj=qS{kg5t?YN=rzO3{UD#8O_yt1;3oyD8q`3_YC|XBs)YQlA8?KrS z69`2!EIG}Hn4}*Y2;Ud_Z{DGP^EJ>j&@C&n(xmHkksF*r$Yo&<-oSfPic;gw;J=(%Hm5I_guSX)d+0yd-WZ%QcHq{t-1gn+$ z%|l>r&5y7%#|JxeTX?V^a6UwdcAFA>w*w6ySue3Jgyh+TRfhvgI!*29x2A+SK z*wi)a;*V^m$>)S(R2|lPLTTNNLPh9FX*e}<-a;?CPi2r8FBGh^5$wXIdV{IUb87Mj zYyD`m&$;Ac@V10A1vtmDTGTA|2S&E$GdA3lGBvx%l$VqR!oLZw45ijYdAIQf+Q<>| zTG%!nPqD@)>XVUSQ+=c(}zcFRbg>$`~_m`HyiYQLSR}NfqIR(07 zl*ZBV`Jpui=(Ogyg?;e39PG2pOA-G&QLyKg5xNyJXW{e8-S1?9`PlMX7z19b4Ay`O zL8%qI%4q^324z$mU$~dku!a|)!z&s2+8T505?v{O&1I;W?K?Nsr+5rQ#{+(tN{op} z1q+eV*UL^n)yzLiejx2GqF8Qiq9>~rgfkivnaO>39i}@=$gelc;cl(_a)mla|Hq*H z;U|sAbt=hvcp>Xbg&&W?#U;+SHRU{^C}gkNLz>Iq1)pY>?!8;-*ZR~olXYwvVjZQh>ItRXE6ISX`4e8=*6exNMkGJ}8 zsV*6QB#LHldyeNF>AZiY|0@mO5qq}XiFAjLl%J*=;mGc;)l!>6#8X}=2bqeH8&<)6 zW(`FFwMrdge_j?@(6OCa^v<&=GNu})OJ_w+Bjqm^$0ts}5D~*~*^lsTRfRlyR*Vqe zwKVA4r5$$yQq6Vwx-^Mt(Ku!dWkiq$KMHWrIa4U9U!!S)*>_wDmUaDJ^i)w;f4~c5 z)S3i^PIT8`KfJ9_+GQ`Xrh@nhF=sxH*pf_ST;h_VKrq>XFaSc*1Igv*j>@(*j`dT+U} zH^rMy@3oYczO=ShoR$gDNsG#`qbSa{0B+;69upor#tPAD!D0+rH>-=xN?hN2yc-eA zIbYSZdj<@~l85V^PfUGS(TYCpNqrE)uIyhm9>|ywugNWOMZpU2qHFqnv2Mj;@A0CzI+H$Fy4h!q#vI6iyB&-XtwQe6wK9QpKb5XcienRp!e z>c;cPy=^Br|LQ1jc9o8$My`~wwGLvZYA3{+XhqXwk(F~{N7P2iRKdP)7`W)EyP~|) z72z>t70_TtuTf^+dN%$`nNM0}2-z|aXMkG0G#njo{4|1qVQm-J%L)_lGKVq)kOAE9 z0RM5Ie;a_!mtQUpfXB_-Pe)dqo=?!T`xB$E`Wc}v`MM## z8sen3^b5WH=CsXj=!0-tNR0yfs0?Vwk4-y4s(jwL2lM@Y$tmfND;psn>7?Fh1T@2R zPWO5YQ3W3|75#Ugn=ZjOnG&thW*^w<;Y7B%5mxgI=r|_4if!1%!aoTow~ZVIvM8Aj z$xV=Eq^_cpwERfFV>t0>NqW-KKUw^hHu)`kK2lkcXnc&67$7~cLhOp+P?=?F*yB5t zrEQe%?ra|LT_zMA)f{l?fO$V5 z`VHR6m^C)o^VSJ*zOG^a+U9v3G5kHRB!oenOQf4!*J&zL$1t5XWbC40{o))k*MJyD zVVd)&>5kNnUfM+Ti~aB#KNg%B%414X$Dj?1m+tH!&QPedg8uuFB7ZMW$I6EV+Jft~ z-)>$?aZ^IsWTEQNJDZm|;J6AQW}GPH{afs}wtIXB^PVXpG!Ek93{pnhnHuIr&f*&o z@GWcJ8R+hHb^3=egtGV0WG*ArolWRT>WNy9^nhAl?7ySwlnuN-!GZr?K81v*2t9#5 zJ%T!l%0Ae(F~koc)towKG{y60cI?Wlm*KVNTaNP*o9q46FGCk4;Yh=!9Ee-I(R;Hk z_FIh+Gm%DIoV9uMY?#+rELG=eN2Tr8E^5-PjaYZKL{ukgoy-?Rv-1>DHxug#jT5We zX@6!-Pevy(Gp$H-P~S;IO4t+}+!P?Dg(^Z}iT2VB0w%K2^x!e);tD^uT zi37f%=_Jc0Ps>sk%C|s)%Rr+NfrZ0IKGl*ondYQAiAc7;c6cp6Dd&qlgPyGbiXGn4&;XPf@5q*NV)tIMw2>h;UGj8EAx{o`tA5?e2R3pcS z6Mdo!5F`7ns`=7_u0-%gRe|gyrUFLLXPHCq1XS^KpCU<%53agxlRi*32WqvJzq4(T zU(5wFZ^6se--#Pb+=GAIS=CUGsxnJ{rfd6>h0Wg6NEUuRTC6IT8HW%quK$F?lB(hLDfL>Vc*2jz{Q{VKx2J?5#wegT`Py?z#>4f+CAMB50684rsylSE- z)lP-?@?bjo2ppx_Y|3cT52}yA9(PFV?}seUJYf(peUM_*Z(MS->?tY%6*e~${dicZ zJxs-)@Xkx8|d2ra_dgh*yW|Ac|or@>3l zWS#>>uB;mR=3T%4*2lNC?VTZBjx}}@$qdD|G%6&w6BKv zjJnzpf{eu>eeO9rR;>hSgOkM14w#s;bVb>w28nuaY~<818*J>}b6-sT+7?y~UrER*dk@wrSkJ?6z+zeF=oQXPh%KZ5Stp+=!5e!<9Zn^L>}m4naX zpoj1`)Kh2MsVWOMFsnD(Yf5CcG&WM}Wz;tm*X~1|GSDH0&!L)i#f-4A#IqozNelWf zpcnYuNe1{X(cXf_VNXVP*lZ9LF5o%v(i8>*!GIM{<>27Kole;0HxAGrzRj9HI+{tF z=V3|hZDY_wPWrOfQiO>@EfQ5{NRs(%KgKSME5XK#*I@hp-UiOp=x+x5hxwSC-KBZt z?lnV6_*C5SyCCdGatif(3x2KeIJrhjh!v@^eyoE^n^L1ixvNo~DeX3yR;cMjNlf|c z0Q*g_1$xO4iD{LC9pegWwlqanyLyaj9`?g@SX}5ZZGLiSK_!6}2STuS8oA`>PjS#i z6)c&VgO$b|9r>DDbIobinY+snp-9`whOmS*A?BvMY+2F_F3J19#we z9^Dp{{U`TQo%c4*Mp9?uFGR~D+aRO08jX^q<6H*!>2Hlq`Chl##z>kT=WZhe|3}nU zg;mvc;nLmRjkKiF-6h>6E!`p#n=a`Fr5mKXyBnpEE|Kos>#XlT=XuT@S6s}!=9pu= zHBNhZvk9R+kI!cLPK*-{Ie?U)6NkwX>P3(5I4R(e;J*W*a5g^zkF${!z>WiBxqQ7x zzl+^`L%#P590RJ9Rve(zV>WK|ibRC)Q;H=M2`{3Us5z{aiJv3j7`|?!W;r4&{y=8`ZHgHUxjTXjDk#xe92~MQOct=>mC=^eQpcWt$Hdl3U?~Ok#C%9Y=N%UWVSlQ(nCP zC6semB7*1nN#o+pOm9!=F2`lnLzrkVzV9gTIkl%s0Ngt7;`h6<@YMhYxfhttHdZhl z#SbRI{JE#bi$dR*-|aRu9Ist?{MgEQ?wxencnxHoKYVCAb?Gv>hl&C6pG14UUqY_L zM|cxS0cw0;?i_q+z5`B(y|F#7+aX`ow?Le77jSbOtF(}3;*A2?ZP!UL%x!)+};qWI33o`yPF6(dHy<0srTY&z$_di%XJ38|{a=;Pe z-t~Up9ygKNA*D5u&z38Y8cVu81j;TH-w8m1r@7ibx(&M7c|iBi8jAKNNTX4u-DM zh5HkB^;N5+>Em4O``?FXegO(d4D#BT0Y(vu@{ z@G@qEUz9ZVlVXH#OhuM$5+)?M+uZAZ z?Am)8*|!IKn?213ZndpAK?l~NUSh>BDwu9IuETqD1HT@<_?TyBt3r*+x;-^8~2iTOKh5 zDdd%yvKXXm9O~1@Q?v1aRsX4~&MJo}0jI<`Woy|PikiFPakd6c!2?s$;(bDENyy7AY zYYu6plZ;*`|L;ta{(kaT1G!v&@;Pg6bNZcC7*=^fsA`fh>2ohNqC2_{MXxSWClBLx zxDr+sN%CGb_XI8DqjWp$%o~Mub+y#F%qVu;odx~Tl6u^ihT~QvDF6EUO?Yk+`kzBU z-_Vz4i@fIRSmuFv79BD5L7pI1sa}*n!|e{q+n-8^W3y9vq-Nvk{^^|ae_9K*1_A!= zi}rwAz(DAcdMyS(o`Oso4OiOLg14O)nojiwFmfPfS-pnc05XmCe}WIu(LTW?Fk0Fb z7QcfSPphtwSa(;ksgWtU0B^C?Q}#8Thg(td3mUAiL`9T6%486V(2yoFa{!9jc(5|1 zxd^8>%dU4;yoTslT07&aJosSCOg!G6$f|Eo(94gc>U^Cl|IT8c8SSbx%w03G zmm28GM`iVn@`rmZrKg{eVFzPg_PP)4mL1gcNOO!f@;6;|z&YKGDyrP7IO`%|Q~&)N z4I(Lq4;=8?7K4ao{=WM}6T@&JktVh887&w$w_Fk21BEHqjNQ&DC=T)Blv;VZR4PPJ zg0Z*RD@^rKbOqR1x{-2|vP||XI(C5go{OP*us!m9<6egUh!|94VE@gmR`jIMyK@Y;po zGGAoE!-mI1^&XW4)8^SNY6f5Avud*?eNyB~Ww=S}P42@%3(wC_k%ctPq>Zb`BsLKs z$ImGw!SY8^EmXT^!FpCcn~uBLjY%Md)y?Xz=waDE2(NGYMT4|;V^J$-80M>Z3`8OwRvk@tua0aImM@3WI!rY9HS_cxWRfL;Zn!OEmnk~MCSeMIhj$$&)=eW8#f!!BO7zhxA3!(lz@TsfldHP z@DB`^x?uFIF^pb1R_Ig7N}30GOglz`q90kqFkw{Cz{G8jyhbfT;uq7Y$a~QX9l@%q zKWMJI*qw$O5B=$*&yaiF$5AkMqMrb!34_dvNunaeOixqUEKnbpmH>}b^$M2cwJqP@ zV|(e8T#RWLBjl4H9{JaFlEQ-E;?2ql-ap~waXpZNI5`T;AAboix86BbCPewi35}Oy zV+kazT4u=&{V~?m3O^}60Djdx=sL|rd}2CEZ*PjI&VHWd%n^jzUP*qfLG&P0psBpL zl&o&uOO`*bjW2SUzBuV#T)=I|Al3Oi>j>WVSMOJApS9JR?Auad7Za1ukowQX|HME1boc%f;d>}TtN4YaGI;QI!kEHN z^j#^hMNgvvvdp83LbcOXFn-Q`xr4A;N~k!lp{`%BN{*`4=HUyEXT78-f=YwAVh<|X z2VY$D61Y4s9+YgO)6sEbNHv%NP(JH>@0%uJ{ zk2E*5SsGF(Zr)3GVSrNilFiG=qVz7Lq;P<+gu@N)Yd*fbD*3+{pnz|3 zB^pQuhz|gv_@$Q4Bla%Z@Eh*`1a%O0#p-S@3CaVR-F(aB3LC3eIYySVTSP3HA8)^s z_8)o?tRLG`h<=D$xigV1UqOmUd2N%PW&Tq;pIqbk1#sIpRDAdKlP7Y9Fj;DUJcTyV zxT(&V5(y4RUWw#$^_k!^XSw!3qCS-GF2pCY9AiN#)#ZnHP44qAU{=lbgRg_f!=Boo z<#f@mbKaJ*SL}dyH*CE%aS5}5uJfsI@QLDy+Ns?fSOWMZjh2Xt)E zdifZ%r1FDl1cV9+erIP0qYTIrMVG!o)XYWaP-g9WGClboK8jk6j3g}BDB}tEls^E& zpLc*Ay^}r3PaL%yelzC=yORdnJ({vcTV_?FqoAq&3!l=NrKeF~ZT=ne+g*+q8;L|X zHMEie!URe_y~(qOO*BK>)24v<4!e?5sM9bIvIiN&&)vd3(Vv6QjV-0Xby(&vTnEPi z7U(l>GN24Kb2IR@ob$RWxrNyNqpho*+XL*j0q$VAO)7@>hV#7>a~2=zKIW0o@NAmd-RyHn&hhfCr> zX;fZ`k{ILAK7<4`a#{+wJ-S*p#}j6A8;s%_1f5>5*ZX9%){#Y~nLTDTZ%`cF3hn5Y zcWM8J<+Dkd8|kAVqTniD?@lnwd%tX2f|+2D*;x_0{T4$eD(JNq*4I^5iICS(`B~;S z#hI3SiURIBo>*ci;YJ`!5WMDB&c;+%$D1b(Y% zD3%e@1LXxI9kHs*Ueq@864-jg?n;PNgeB7m=Gro5C$oW&)intS??OnkxxLVKfIT4W z5ppZUZ{H%7b7?dTcCx5IyiYAN3Q>$S&5OfNhV55(Vb<}t5gK{@=eWP^d&2NFzU4f< zul^!jN7zYUm)xJkr)F*Lh(vh%m$}ySi_`}R>oqsQf0s!4H>HYxNBI@SIZDN*T433YzAJy%Vnb?NDqIK0gU4_NcEM%lKvW9O?~GCE86L*6 zikP9C9VEyh?g$?=8SLhoN|u;8wR+IQ30Wq514l#Iln>0;jv#XpMOtJP(TpB`BtNWm z`?}y&<@W>0o}A)^Sh7t;pyIFPY7O4NZ2WOI`?r8!Ppqd?_v_a8;tx%{0|0E{p8(d7 z>xj-iVxz~#xbgJkguE#6M_paRv1sSx`S*K#D~fBd{~HWm^~@l7_m3{k<2Ad_;A=Mt zh?yr745if1!vxA|Rxk*(5w!=R7| znT=!p$5-S(57uItwb$9)mof7DgT0<6U447RsD?oRCn`9BeRAY&{80T>={C2hfX};* zSg%Hf%ZM`~pI_ChidVp2P?0VE@RYlV-Mw9UI40gSdSo>WQE)ALk zjeDpT7;03h%CDR?k?nE=#|mF^fC=E71lX1YM`JmCoBS>?#l1>$iGP_{g3#dc1Ee#C zgGB_zQ8z`sM1C5_5~Fc@U%HO|P3jQNenqANd-_F^svzxzTZbEk)`l1R>1;-<&AVS8 zjgcwfVd(6FwJWg*KUGESer$a(YFIom`ieN0K$!1ceq5+4%Y4F~g@nD1*sbi`O1G;N zNBz>*;oNe&;4WIJX!l*uw|H^Ndy^4lPnmviCT6KDz&Z(0eUDUYOl)v&?)4WX+xume zw5JoNZ&M^;u8qfAif;CsT1wi-R55Uq2cy~EREQ$6h-f*~T2g6kcYN;Y7h1oxRL&eyXoE+*iaa`K(OQYcL6Y` z?a<)tpUi9L2|vl-4j>%{Cj_1+nvcc`Y#u_V08A2CRsmjS=04O-P8UmM*CGTi)i=_^ebl$-eObdyPk z{hZ_3k@5nWx~@r5tM;D;6~v(+Gq5La@IiSlOYhqdm(h~eAn877=@hOiqY^cXfISV* z=y$ZPhnMqQ+ZWD4GaBf(u{{70#DbrK2S9!%chF@)H0E&V$MY32C>f>&;$L793Vg8R z5QCNFr?e;u5b_iPp432qjAo*5WeZh3bjPYDFPawCb{S%X* z`gtGhK=sMCD)RK}4Nf$Bo-bGntkFY!?0bJVPo(amQ^Y;+kkbFkFyju?^=ur-t9`$C z2t5g~tC*~kYq$)hJPH0Uq|B-ex&Uf3lyykU@db9Ybf(X zGcCOS^029EyMALSzL4H{dPNI~FKyC+acMsuvW*6U1%Id5lS}C#{tWz_`EDdC&}w)$ zNRcnI9Ys6%q`-s@YW6yLc!X>QXstR+3MtR%wK0HQ#u$W}oiy!wkcH49I~B8mO+(dd zZYxsLa?!pRLuv-pTSWogPFE0eU`;mh`^Hw=9PJ;T#f`m%oCdx}&eAwwWIj7^20t)d zH6=JQ8Tnd2EWgmRjNnU(4O^MV_vaWAbRB%o9rH)&GDC(@1F^A?Z-nYn1c+ZP+N2Td zaNeYlhkM%pyOqLhg2*+JN>7f3mgjYYwpIP*ITR8Ggk8&n8EePI66%2T;xByHuW7Pq#QXSKrZxmsaNj2F8S2mvLT}^FZB7X zd%}y}kt80HwVGx%dH;x8SJ@k$pNkgAskDZe7R7O7*E<&=Ns$SN8~jILGehfW-R(!K`+5;aZH*1K32Gtc7CrWZ=Eff>UfY6=LH&cBAwsv)$=icIOB@di7qg(LksG zsAcsagbRlBAFT?jmHP%e=7>=SU-M8BqAjl;Jk#)|<>888(6xyf%8F?Eg)5TYAFzV$FWs5>s&BQ!vD`OF!Bl*! z3JZq4nQuU5wM;_)5^o9PKNYCk#|WQoMX2c>5)1B>Y)zEL9Y+ByuhIbeGIeHB>cgf^w%Fd;8o?4`k~(% zi&w@FcZ{`3K_`{bTV;!!G3$pi#~@>v{}q@O76sG+eGX;U{rSC6oit^p-~=m}+y<9&5~hrd7>-ZG3$|{* z4Bh<73Ti2e@w`ck`IJW$4}}rD5)wVgSN| z2icx>7weZ`tTU56W!!;@!FXpho&++$Qqq2eq8|N_iou+(9 z2YZv~xDY=B!?agls0?&q33TvB_NFu+1i-`^TF87*j6T_?!R@u85x$@pp>>Y!BzPF# zp&|3L-}zAyIMlc7-3QnjKQ{&S12C{A6-k*`aIdYKr`hz1)8+So>+aLPulcFAk6tQF^K?ox57c-R%s>OYUr<25 zseMy?@oG4J6}zqik#0ik(_pCknEg&ro3kd*xbVZ#Ot5fOsyuT*MR<3IfYJ;*K07UI)M!;N@K8;xotge3}rk zA$DJSKVjqy4O|CimzaQk&ud`W_Jp^NEBGL2!FZn$#ydN>(n8bl{Wj_xtPbjuCmWe$ za)lIP8AG9AeswFge~HFVjC3?*FGCeAUK(_rP_!Q`OcjYm-?;qCI)~|*MpCQc$Qfr6 zgx0IR<)<>&^0EpFCZc2;>a_lVsgeQ>a!>DkHrq)qpi7isqE60Y%|)|!P4^?c19H-M_Lsw?{cjvAN*TCRKcr8)SsW}^SrP=g-umqzgAaG`vi z0mWYX0QU@MFftn6pw8zzM)Wi~b%~H545E1Ydc65N6CFYLHjNx${3I_`6 zbOI7Wu_O(3!HOg`Sdeghy?K>p)Bh@&N<$h9!N_00rg+0D0h?fc@am$|Tajd7Ojaqz z5TXe4KFvXrZNue1<9<4j`>%3Wk9zVN*08RyYho=I`vR2};fr)C`}(&v*_7Z9;S7V) zY-&ky%FH5yC37|2)j=Ga{GI7Tfc<)R_OI&DBsx-pNaivjAtDU&C)}VTtK`KQZV}Gg zZ{{^V2j0hPHIQ3Lg2>q?2iTf!XUa+EbkHHm@FjD(n6pu2yDU_pmSIK{>4?TcK|)#Q z^REPwn4mY+Sc>6tMr-;mFF|@^*VCidYqv#k?o z&tCySsFDR8oA)F|2Pd17?F=*epkeMynM~0;W)j)aM5%9zI$6pWHlh$HW(B+9!YN+( zA05o}t$6~I3TnOY(dsbGsi?+CEO~I=fw-iSkJ8ANLezl8-mV|gg$R5W-)OQNgT?I9 z+n-_C~rG5i$6Y1`Lw{;EfCKT$7KQuFhR&1ozOU1v)_iIhoE?!TE4=0I3}3q zu4bp^4`F3*7IVTV4`GAr2^`F}SQN#p7|^{AoNOEd3RN7I|Fu|as@?%lG>_EzI-yns z=8lY>{XSVGIB2<@9fU9nSPASBJ-xTa6+VB#A^c~|z<$pVpu}V+nXAHB&bOUWUTkWd zr*F7c>cQZ*lW=vbM!Y4C%}$42zC-?j3rhgH)cHcS|6nN1gzF`p>~~|Z2l3UN{BHIV z9U#1y-;%~uRr658Nw!gLH8uwD4g3^A$g!9dFGcrDdC_r{ZMi@-boI5 zUTP$S7kdcL6^mSha~*o-;QZRQ@nF>R=D#;=8>4;2JoiJHu8*mK+|U)llw28s@Oh{N z+hjh*r=TSa{aR)HHetT_Pcg4n)_H6vS@$JjDp_L&>@`_?b zt_BVQgD)w4BC}0`hA%d?s^4?3<}bKNZ~wjr;|H=;TH`KV>mrYYLi-3*vEM?OaMTs> z23kW-BGLfy+xm&G&V1iS+;Rbv+_ScM=|gJJX14nxPE}?ROcr93NK-1ght2p41sBG$ z2lm|seem?iY*R3obcmQvJ((44F4nywO6=k8VfWF}{1+(46msA0F#i(btpKU9j*XB3F@?o%;5C%4yV`4%v53w-0YA-v(Efo4hs6kj*McW9)mA9jQduHXU*vDY9#yVW+SL zdF3+VjW58et*HFQ>p)HJ(5_d;qrKtL%!);6p> zW}|hEX{xVCLeujcZ!XJDeom$rDASQQtck29Ak5*??OFxh=h}wRw5>g|nz#P~j<6)Y zN!QM3=>D>})5P+$E}SkzL(bmtyU%u4_I28NVqMK=#~itabvD!!68~~|`kMO2jj*hx zb;n{Tej~Ur5R0OK(b%$OlqKxMBxg>2L5o%ZO=tIy`i8>=vcCh8CAebGKHl2>)CI!U zk4=5%4?Ts=;D_Ete1)X}{bNMXWQL!GCu>qr3=#H4#g^M;J=1{9^#}MGu?(Y(yXq?T zEif>{{Ce|rX<;Alxt9%0h-6#T8m!yrS7#rytE_oWn-rI>IrysDsqwUi-SC?#>4mn9 zNDOs0a4PS+z{}4eh`~|V2ZR@w(a({O5MPCp&Y8_M+oi|3XYuPOubi8a)5;PDP^5kR z5>E(2GKl%68>8QST1Rd=Fg{QB_g6+^JZs?5`vWmW$1*!t< zKp-JcD@tXMfR@LRNrZ~hldGZO#|22m8WLuA)A`C-A}}iIeY*e?k!0NR+*J)!pWEq0 z7H^)*jDHsUbWvzVG>@yUXxc+uilUq9#pZ{%IF9<}O)>`+g1@|~sgemxQZ0i5C+X38 zj$SjP6LLT<#MeZX?aXiYWdORn2NYgyS|+&46rioA@Q`}gt6=2)wd#Amcdo&U{PX_E*SXn!?8e((S9o0(7TeSK3h^ltG#Fn4qrTXVM zfzpW{mKLW>=^RT8#m|6W3R5UgbnkUUmUZuVc?>YFw+BB_OYgt&J99L1`afaLRlvOX zw-L{vy?f^t7m=5XJ&2#U43t_+;x9laQ*Lb{AaI~r8i*r!z}|Pl)430iuCxKIbXJk! z|BdscdWd%w76M!M8WFISINK#j`n#dRAVfyhOl5}ypDjwtCddS7C8qM`FNwTFM@G9H zgI35B2U3+bePC`NVMV{3cuD1*7{R-Wl>Bww)LNJTigG1ZB}!Cy>_7?%8${UTFJggR z2Z(wTw4&5`9GN{(&VcyNf6be-&s0x_UqnWDeti5QP>e=#Atc0yCK06<5}(?qAa&wa zi;z-_#rd32OT4YNChU+~=+XmSxcaks=P5XNz$BZS;9+Jr@%pY!_z@UXp^p#P7@&s?Ub;NneBJJV#OX?kJGLRw|F-D?%2|5p1MkXI75$cap6G&*R zDA9*s{@9YDm;O1>xdS@?5#7TFgEEdX6iqZc?ZNC#=*#kO)Kj8IK99d(=*xcs(CM6| zrkAr>qi>X8y{cIO?bIOTdSw>J9f`ng&{sZMt$e`)u105ghg(jvw)EiwFDWC|5HXkT zMLle6^n^_NO$R}n2|G`7qI4ar8EqOXYp7GHj$nGPvyjw#n!PbrCc#PBkenh<`trhz z!eFy#X&DbP(jOe!M_*j!jg6&K*^o4kLed(T9o4@^JxkAH6}u#9J@DW7Pn>VE0LSrX zf@O^RoX^#V_keTv^IQK-N@|xV@11Ot10>L;6%vc00&Xi9iybRdC**@8;%JNR#`|SA z;7VKt*oxg6-Tr~x-XigR%`epz6u+CSt#@dYz;>X_;L|KMU!->$}YP{hExTk{k!D)};;^k`eaK z2ipn!G#rKTl02dV^EA@ejbR8!7PCO)=F0;;48>Rv!?Q#pZx-lH_<1-TF(~&uAl_xR z@mLcuaP~4!+&{?!`Z{VxsF?Yv1qd&Q=g*=D~|J+4NUhSC`z5u!H1=`PW^JC z#V=+gM36-koV3gP1oMCwhNtu6hL1a*tPsW#HsL~2kdR}x!26*Ngn`g>vdS#2oACPt zlaq=9Lrv!QgD_u7lwk~s`~_gg2Ia;`kdStl=+)qc5#&Shs3*CHvDY7eg-{6rwT*g6 zpnSk&(X{Y>mDrdgVs_O7$MCkbhkJ{+MXCli1x*4@pC_T}2AipLpPc!Jt}DSl72<2< z!W{(=4Td00ydL^PbJPCA#*+b@?VW4tKf{fl8CQT_OokNo3je7Z6I!0+kCabSuV7FY2me>lHd1~q2AN4-mHqkstX&km zQsR%;Q0hZXN-TF~=c8rr;XgDUQ*E8W=jt=PxJ$uXl4#tR+V`~x2(pEm{aZ2a&4Q%3 zLlg|~dkK_wqMhzCcC0G%6PR(agx_J;Ssyj4*;kI%X=LBxk6~3&6ckVtuapN_YG7V5 zhI+XuCcVK&EBApnk=hOKlWLO;oz0|D)ZSDmEi=x4AY9nKZEkiva_V`dH4?`62mS@@ z(3}BI6T_>DpFyf!rW1spf^!jIqFFLE-%^a@>=oeZ@?7J)2dWsV+8)YxULF%q7%lNo z9(Osu79HKiPe=ogo&OX-;wc#DZ_hbRdcQtDpEHZ$6aqMlCTBMpUyb&F^Yy2f8K-yH zBt-$}nTc};b&kk>Gx*{^ZiYq_1XOX2>KOD`H{(Cl=f347Ek@sqPw~dp)3wDnhn$y| z%mZJ|PM0IkgyX?~IOb-6tBV@&&XTscb{Sh3tW>G#CL4McAlUuyW)@syR%O!YK&<$L zm^20wJAOopP-@!?IOdAq8aBV!4JvUm7rYWzsSBM|na7v^l-+!U*Dh+>JVg2^F*jTG z9n3H*D9}J-hEJjUByyAE?TUi&VIqYnZi=q|-ZES%b+=KNi~cYrEjWKLbU4Hq?kH?v zqEiBvi)`~KM-tE&tG%teA#FC!)>?34(!bw_fD@RJw)D8-$LzFm z$aybz0g80TfD|`!ARaM{7M1BUHn!4?$_3 zF~NRgTcjD@oUdMu8H2@O2(@;p`iTE=*hs`T@X-@Vb~yAz8G!9|L4}u_vi)Ot)!qyhyP8+-@9EtN#MVx zL)XBKp0K;C2@Pb^3Ta_EFityp z2Qiy$UyXj`%Y@hbo@4w5tbt`2`f+O*P42><5KU7RpD}zFL1@c0>l#SIukujKzJL7Y zccylp?mgrWMQr)+$kDF>fmjJN$y-GC-TQod9uy)uY|$G%#=$k4cNKd?i#q{Yrq~}f zVHXh4#{4^C-oK=IVE7Yp*_+X~4aph7@a+Fo&mT1I4>!WYxV{G%eboVp!C22eB}FYs3Zc*xh!3^%6c3VgH1x23H{7xD(M0n{Lu2((i86Ul+&%6y zw*$~$=YE#pc&~Q%yQ*_?d$&{eT890teNO*N&*mX0UmNe7p~j9ny?-Q@U;?LoYIq=? zEgX(n7am10Og-G{HZ&$%HYc*q2IGC%M5%IeFwHi53YG8#|)MI|(hgnb2btc#Ej~RViUfxmW+qmRs&)aFME9Y-0E2P}~QYTaTWPS8= zFVuOhOC+(iNQ(Jy5O3tU2=dU2-^(&5DsH<1Y2{0)ZllcJxmtWF0{fP!*e!BPHc z*Lt_39)s7^2YP&7?AkH@-Psl3bow?{&&+%F{;}obB9-a&EdNYtL(YFI0B7#@3_JV! zLB9ujs!k;45TRh>!gjw?4K0XEf#n%f z^Yan})sbAu&@5{8q)k8P(Ly&be5Rb)r82v)SGE%560H)UF&b-;zYl8fSH}Sgg$Jbq zytGFvq_rwf+-61G4~o$EZ*(q(hw|5AfLD$uINX{7cL~d5U?5d|92FZ7Z)q2VwPS0{ zOU|lbw-u_Zt*YG^P^*uyz1E1qY@j-JB<%Q2U}AJ-At$?=PKYG(X9S+7?(A(Ezd=;tX*om&;Rs3&0T<0H(eXoHK- z<-|V7%ayI)$wOu#rAkHsQ|llGRcxc~-*=mr#{MI>HEHM2=F$n;=FjgMbsA>=Jj)(} z-0L|TfGlb2I~l9jEOIlI)=CISw}d1ys$mh%YX8YUru@$t8Uv7D_S_)nkE97i)%Z-n zeK1YDwlug%O{9b~P*sD@-9J{K5?LXCGt zIt^V-8LdvDkutl4&u*_aAP@i33JDsHQ~p5JwM36>XETV4gLFiHNEBxBJz)P0X%4r zxN=qQb{b;~K15@fhnSsN?)*-}kWIH?0DTZLP?r>j&d_5{d_+zyMMa55r+IM5K!GZh zUmE?coHMZ=)oV%E2o}Sb7p+jUrkn#$4vnR_`EqY~k+{AvjX-Xb2*?!xEY~#lj)PL? z&dduf;QqCUo!dbn`&w_)Ta5X^ZVwYIa(Xl&J^y{tYH7d-PC=R^id0pxk}MG%&2(1L z_B&V_QWW#0Nu5atg4a*V^bHzg>Zn4aOd8v+Lvjh$*1aeu$Leq?rdSHF3`g?)dTth# zU?v;(-&Q5Xa^qAtit_Kd1zGPYYz4(Dq*&(OB&OrjBUoH)v&yk@#8zj0bEpR5bn0oi#KW;wS`JCH|nFD`c9yy{w2gJrTq*>nSHn2pkVO}aev{&bA zMC2c&6XiGjuP*Fulf~y9_CmWo%7VK<;UMrJsU9re76hVi$bo9OTEsV4adIEso~Ou%Rv1E8B0zrj8gU4Ye%5C)l|O zv4rT8NYU@YWpTAuhGr?n=Tx8tA4nsgdVcA;eSSHA_!7?u#-xF_3JvV`kYMM6$TW6{`?+p}Fkfl}Sc{Z?Vvy$KASASrPo)0Ay36hqG2u#XCFYfKKf*Cn? z=B^kOMjDZVcA~Ht!P;Yy@n3- zU1t!P z^JiNx>-ueuL>-epRp4)S9`9Ug;$Jas#>{8`X9r3jAp#hkRN9IlIzPzB@eRitNh+A>My>qWEi|N_7?Eb?3L%X{eb}+nL(vHX=q5*T31VYtEBAA3GS_h zSo=j1HpWrV%4;04`jN;ebe?KKH`5+_d^Zj^jXGuklNNt%v3uM5M{McAY!Dy^m_Qu6 zuT^P+Py+bvs~s`OGoF<1;EIA7X78u3NUqk)w+9*v{I`#g zdTN=EFR{~xE31nz-=O_#BgH51{h*SAYD<&MO#7dTJrLAPGpws2Y&R|pE}FncQbtDm=K3b- z%k;TJR}BGb69SdW=M9^eO-Ej+xcv+DS2;Hb(iVU#F=^=;mK|t_jjS+_2=J&DdkW(@@Dj_d z!C$Hh{w6G%J@09iw({h;8sj9C5a-dujD)TSOKW=vVhSmJjA97H!w`z$WuVi&&Gv!jw2u z`(<=QZy3v5A(V(-^4)3ogZH1$%4QokX~bQ|X7^nn^w%gY@<@Gl?NU>j`67=0LcB72 zA=#4F+O!Yl2l}!->*d>twnlaUtOwdCWeQ;6lR0GK1}=)VJm__qQ1{4>d79Op(O39^zlAbtlVT>t{QgexI=Mv))iPX?=rI=F^l&kC&z?n>QZpj4mB*I2nZQC{h3L zBI^we)aWTL#1bDEB+UE$eoT|FX$m1Fa6(*WLFI2BzN_x*00=PZY~HOwwSR%Nm!t)e zLbKy<^_r8vX3=KZ66guem4#Bf{)<{`Q@cgxyIp$XsLtW|j%JIWhOMAvQ}l$GZXknP z^f%6{hWEd(Cgq`~IdSqj%GyplNWhYm(LvBl#H=`_5AIL2K$!Xw7UkF6RBd9LrQE9k z^JEcek^=@!CMh<9V`VwJw@840|3Ul7m$c6QC|gT~0xpfp{j9HNJ%ukIn#Gw+Uj6lN z=Zv^v2hQDwuvl{(^`R=`49b6_2`38>FWnN&q=c4e94;eA5pDus#IR&y$NhaFo3N_s-lj8Rk9oT!$!%Z=A*RkKzA%D_TcH|<4tDG0*=Zg&^ zgPWy#Sfi?}AKr4sZ2zsIgaSQ^xc3?ezw~u+{)`zt)Wco9okKLqJ96AS%TOvyM<+Gd zc*7=!z~o@*^)nDv-iF-+k$6A!??K0T%J=g&Ez{?h%IA>F1JRGA9pJmMlzO7O41TWP zUrKA;YpXc!U0Ryf@4;AV_>pIUIuFn9t{+`;^?ECr-r%-!Tn@gbS`DwECiAnqy4N0pIT5;!I5+oYdK+aw^3iLWNylB-9y%v!5# ze99JsM|5?~xF`NWjE4Aw!f_GRR}E6ntvqIJ}y1NmOx^yEVB`w`jN|%6yl!y|SF6l;EKuWqpF5S}I9RiY) zmwUd$^L~FB!!hu%_TF>NHP@UK766IAEoRR%re6DI)?*`DyP&S`cc^c?X!yiCeVIq6 zk85znrTd3-9pJ`U3Y*=ehC>berlmOS%4(N|5iR()zA?v$ggBE#`hHb zeku4DN4a}(R`=8QpZ`h3mSc*cm>jm3AW$ zn)p>jcrW!X=D`gqx-3J0P2<~EsnLF#Ix^Rd@UCF&vpTV}s$wc|JM=l}gLb2;vW%=@vx{!PqpIowPDh`Q^prYYN46*JYb?pV?U&lB z7yR)Cuklhb<8qaZtND$z8(Zz3fg(6)$tEmmd6kaj{r6P%JupJ*3!&oTZgXI^gIOQ$ zsUGOdm0y7-5v?QAm^A7+1kJRYQ;s3Sbk$OK@36`q?>9^p17Mpq!-XrWpVDs4I%~WUM$hOZVlBVP3Xow5lA;;!H;r0dBTd41%1`#zagi}`l3Rq`5&C)*OE7ShkBEIpp({-`{8w^@2=I=`0Yg^;MQ;p-2skj5LYoY z6`uz?S6UZLH*QBYS1m#G`#HLd>@+AjZ6aq?g55Sjz*rkgV_i+Sib`s?nOE?rtz{C% zL(-cFZni#t-z}Kl4>9(oSL_-bbVd&wg6Vh-^t}^lH01e~rX30Cm5yk znSvkKvGR=}MD470#iv{ba@GgIr9iP{^paeRZuo4l+)Ie3L7eo4Gk;XNir`x&*`<8c z65a3d9mjqfI}Ag&;mV7VpM*7NQT8p9jXTEUUcU$TULij%nMwlk=k+76O?5tqn4~ANEU4b*fttE6{}MEJL^S$eM*-9C;Hjz)i`~OA1gu?je;A)@U*{CP)IZz$ zsJ3`-ps9KY=JJ_-alZ_YNuf- z8bd=U-Fx)H-@N!pacwb7__9{(ZS5%MNF6ntXA+H(4qVxXsc>7iK6A>D%r!_pFF{7x zLbLHeD^r*yFjOZgLd7gXQ`Uws8DAmry+C{(ilmdy5P@BpiX(G`25`Dmz5$yk9K2{mucS z93K89Q@l}Tp>^G9+T%(mB4p6y5v=1r-OG6v;r1e(>f_h}DvR?5WovXQ7rACt^38uG z>UKsCGN!VI3kp9<{3VQ%4keW`>WWg^=fCT_{Mii-uyeq&Dc^7bdYE43u8)2kgpwa$7MsiH9zK zw%pcus_Mp;>su1ZkK8HFb1SRMl50;gQJWZTKVv(>e6|fCMt&3A2stR|BdE8`mQZr# z_()BgxG}%Z=ixNbh@2PT&Vjk_A6_UNY`5PMsHssUKJ&=pUKtGk+Pa$?_y;|^6uvi;4|G3pt@6_T1r z?m=e>tw+!*lUJbSE)i(&b$*lM6bJhO(1!Hu%^EEJA)3TGvltI;J25Z13(v|>sr;-K zmzHp2kq^;5|3oc`yrpwhgab)baRmGKt>qq2Zs;?>1~iQUKfvE6M_lI-NZ9~4CyhuV zt*;k~>pGU1>`hSQP?WlS;|c_%b#tQb>UvUlN{Ln~IMkYdU|TKO*`ob>vT|0gUlS-N zs&BUNy(^bJF81PepyG&3)-+l+13A4E>(hEZgZExI=;xekc>Y+iaRkh?s#jc&$jK9& zXR4GAkXUq+UVF+^ITCNtG-Tx9APb6&7?y`oO9GZT23X|a`jH#ppPn%c=e*2-KBUeC z?7mM2w~H^2e~3%*JPJF`qc8T@078G@g|Tniwe!`S;L4r$?Q+h_TswyF{YS&|j`p9<^9TP#Tl@l?zzBc6nIZ>`{ zEhBG@kygpu2?T@n&9AF_(D4+IJO`+jQP1J(jC(Xb?r_a{EpK*)ETu*Bviq@yn$rS_^f*% zU#?>!RG?e>NwDJc$WEeUj%?;1p)#sHRMgt#Qyd?`5vI%&2~pq+j1`+<6#FeDs-`lQSX>ixMe{@6l-(135Jv7jb%4D0yUt`yJhOPu5-m-D|k zvu6m4vcSwwp}6q^=ZU|8wJ*7Bz*08p)Z#X5U|Wi{4hol2XdK4=5*#JvG>%y8B9m4$ zH)@+5k4jbXh~BO~66JnJK%~o}pk+*$Mm@LzebI-VZq??8i_O8XjXU1huZf~6C{j1C zK3r`BlTpO{ZdINF;j|$~fJrQJKhP;&)%Kv${MgOwF`LYrp?%p*jeV%~Z$kD}n!MSR z%|sDR)$Us}AEHC)O-|~L7y2-zb*0E<=O8*u(|64DD9g^okoSs(OESA5PTaADrF#DD zzXJa@NVRyI`1%#L{q^1h#r2L3Z1Pg7C!(4+AY4&KNL`eyae}Jpx#33+hE}KBiN`5q zSZ+;%9;T4;x+D^qdBq>v60bDRByiT#!5(1P<2Iv$>jQ;+Zl|Smd`-;-2nKE!Wbl$M z4T7$9pJ;$*+F`D$6w&_4MS^`^ST(pgG?_SvPmmUHS6AJW@u#+PjnYbrgf6cpY#P=VM=>`PJTy>%#W&e92=<0d0$#gew+wh*z&zJR8rckw7or& zW6rPNB5TArD)fGC7MK<&1Ct~gAl-|~R+Cl%svMRrNV^2w>e?h(W=EI{W;LAur8rNgc6}L2$^|&}v zN&iS(56-tdFm8Ja(eM${bsYaVXuy8`kYkMiT26i`5ESOQEb~g#k5%+P&Lkf##E|oz zU8->N$A2>}mMC~mQOS8PKu%b^nioopw7x&5GKdF3wudC?EBm7g9F--Ezh$TMJ&%F# zCZ%0$%NLMV8Z8($XbC4IQBlWKk~tV93id`*2R>07{kUVtX?63|q$XLHByfl40Ilo0 z8U(+eQKm|xq6x-^q&*|>>YPM4nFY&a$(Q3DCU<54o-(Qb`*BCHP?eZCi9th|9$}ic z*rlf7M#0r0N49hol^Ej&%+npjznMdNz!o;XM#c}WD$E#$(b=;g%LBh;oK(8zbRd&hnxmgakDs?%+RDZ;gUvH%R55I`qsm~ z*XarLpJ1~rUv6zkC!1|;$2`OHD@#{ zs=2I>x*{WoU_YzZIFhu5b&n=QDfhH3+bMV2ec z9q;2hY|V3Ka4F*TeUT>V5|Najz6p-{gVqTHWH(u$!EaDJ(Uhnc?v(2Wo9V4rJb z-Jg8Z=P&DzJ8fNMr$N>9!VZ8x*XH4S4nzjEjRC)M5KU4eZVQIj*?!1-zhhQlE`kYn zvl1eS)u*rDrE~YBI2D%#_r;B$VY|C=8Cze?#17t*ydVi8bx7t_e=N`LURe0bMMD^_m;QLbUxTXw|16q?WCUqnW6BOw zG@Uc(M3N;Jo;7RNTY5-SbX-pVKsh^K{ic)cXqc2m{o#2TOPG;-*RrCh(&$H3%hno) zQ~qRhzR;m89);@8vp)>%$lTI&qF?n3HJ3Y96kNH8fEdWD5|latseh8s8;TNRB~i6f z2(0p+aWA~l%`^98N+h^4SL3%FQP?X^7#33g7|@6%0ylkzOBsN)i| z0ak$TE^OpRst2g;0+199$Hmd!4YEdH6Bk!{v}dj*MgX?uRX$7fE44B7jh8+G{Gzvf zAij$R`nh#c%94r$kz1U}%nvv51G4jZ0(_D0UgZ{YM#jwGlmtsFeHLR+B+DTu`|bTu z`XYa;$>~s4+H&~J&7SGx@a@xkWwnh?>XPK(@(t5i6bz3z`M>93l2Lb>lY>|AqK6g| zkv~Wafd%$qGK77(AIYnn`eYPP1}#VTtu6+fep3^f{QUlQ&^=j_&Flx7cuTaL+0r(| zyYAR@M@Xcgy2?XYoi*zLY0rxXd#=i!%AJ)UpHc>mn7mSmd>Yj{F=kcc4eZmRr7DAAFkneci1 z)F9x4;ZInsqQ!c{`>|o1NqV9z4Do#>A4wdInxPSEw*e3FHG%+jZcpZxCX%t+a)e=Q zHx2EFRtJXCsl)G==`8uu?x^-t+&h-G(^Chs#+-A)SgYx{i^=b2PT z)uS4b_rm6}D^Bp~ljD|qUT;u5o@aXoDmG@ravpx0C4)Sv6}D+;BfISy0_2`);Ltzd zOUekFJ#m8F=Ka&lsO*l69fE%N3bBkx6w`NCM7vk=9C9W}_N1o`Yk>S2q%MnThmjVr6Pr{O9I?0kLk({w{qUZEPe(?JH<5D}NF{I_8ix7{@ zdX0n}$!c&bLEcm^YMD;&XdRyo8A-8IcSgt>iuX{_H)yw1{l0BcAn`!Lr+IHLWN?Gh zg!Ds2Z-@l#O%@%}(~;?)Lm^W{)LiyzQLD|YA$B9`S%e101$`#G#41I#Y8b;es443n zoE+}HcwgRbvfHQ`7^!CU1ry48`Yzvh0BK-ITM@96G1jw$@&{4BG-n_S(9R(6y)+Ox zbOis^?us*+@zmYQ5Q-X+elr>g(9VpHK0|;$n_HCtDznkY7e6Y z-8JBHyXwqXKul1w?$me{>TTpGWh0`@B@*!$C=QGuyxkJ(AC`%mQISu*5ExG}66x4d(Ad+vF=$MwUo7g1>EhF3C-Qkv9{Uo= zxjxoRZ8QU(G+!-@CCj~s$Nj(@18!4-FVD_kHw-j`ApuCPIop#4SCAlq=5l*aRwX<6 z)R#{EXI+)fB-_DYye(*c#)HV=MbO)4+D}W0*bg#tpScWf0Wl8~r1l;;QVZ_qIYI)r znpRoh0Lyl~Jf2L@O|dwpj3@9f)}P~4mIkVoCb6-W2`}H!6No^T1>T7p^}vBUPCMC_ zUAG(Zez`9-+NnGv9+5A(Hs&ey;EzGm_+sJDo$RG3G*0UX-epis;kV_FnA=9ccF8Xx?yQpWH|f#mGN{j^3Zsg=q>UPBv@1N z?>$w@KQB4%v^a)=`Nmgk_Vy1EdyP;~mUe)fe_{Rd8~z2@$NYkS&@^?vk?5Jb^F6<~ zo?iocc#i)7xrbjnp`l9fUA^no&k%V6(OKk6% zZ9ASyzSOUgf!u>;0X{zm@Khm&X@3o{xGQ>$ej`>v|?Lq>U1l{cB%Ip`&O(gmaZ*aQJ(k>1=CPDKSwu(?=!W>hblG}KknubWx>gCV0bu}Ggid&j@$ zMO%@ZE-T^*2aQ@opsg(Hb5KCzKK;$9Z9yRo|LE!ZO zKvN+J+JN|in$g5ht5@3hN2!v~!CTD-PBzb_CoBqoO-oLC{O8luT+bPBb9SYNw}d}) z93eE@)F0XP*)6GmtzqzfTGFo+VvF|hwY}MtdjMV}HEW>M@R$cdNKKdw^f&i(i}#^2 zySQLS{J|^>AteeaI&Du$#`kw&jx%D)Oc9wzB~a?!62J-RQP?j}n!V>)e-iO$?I)S2 zn4I}k+o6oGVAQ0X7(XmJk|4VIEJ#^Dpa0n!nN>0=2X*n}G^~{uv>=Xa?!rrq(-$@V zw|XZ_G#q4D*QNOBhQKkkkV3^7Vp-bMuOobvy`6H5x&x04-@x=CB(rEOSy}jIrN%q$ ze*sAB-6#>KqlPNN!5+2; zxY7kawZWf%39tNjf2(;v^`I`V840LrG{@)f;iB2~Id5tWRD6{WtSq(el9Oy_lPDYT zacfg7xK;kI!5i~|ubCm{B4Xm7zL_m=+4wy03iKpfb}2IyKl@NAl?~!MdK(F21buAj zsXzi6D~+o4{xflG3zCg%g42%zMK8zl$ePoF6NeUQNCdy}1+QvV_1zKc*Xyos@ z9T##dzauTDXeIL>6FuN5Kb3-8`<{a76O-}OT^D@! z*rldBCvMKcr!4bjM)N}x9m9>eK`gv~eNW4-3|4__4>M=nfqVEv4-j}`B+~B2zi)RA z=$-=UYv-`>PsYF#d%`>My4i%r>y$WPXqWdfJ^oA#%7x%+}ldeD`OqAP0g8Q=! zVa*J$>B7%j3{A3}4YpxMpDR_fd3$6ea1eIIW#$oy1Nsn6;L!4!E4YsiScMkz-@=OE zAb*qk&l?WiTugns>hREh8Pa|pn*A5%j<8M0YvxHL={w|~)z+F&aqd%iM^epqtN3)v z3vlugpLnDl0pvcwcLUaJYHhX&J9=~27`gIAHJ0&lnwQo&iErZL$k*JSAFV4N~emR_}p$3b|)O+gW^XpbLx z|4%H!nS9yQrm2Bj$LYKQhOtnqIyW9uqu;)ReXq)HeCv`eFJ%-4USNpKSRfW`2UO)> zQ|0!}WMQ;k#lglJ!r*if8K23yTI?j%?tbp4oRiGi91cQ{YBb>=h997RD`6W9W`}br z_t5RX?%w@CJzvr~txwrqAsIf?o7>lyP8k|j2N_oDN?3}S{-|k*MXIdg4ta-<91I~3 z;sh5UP4y-LY*G>7Z((|lCPM88%SDfq0Z{)P0`xz0tOn?=W7H`#&hK`tV72=<*H{fU zl&(eJ%wBVK{L8AVifaW`106`6#RES_ScpUXzFKvtcm-tAh`08B!cbrK-td~@3B|{Z ze;|unmv>--(9^0vE!)-`%qz}N!d8kWY^lR`6LF`*5+R~@GrXjt!)EhDQ_8lxHpvSY z86A+&orsbd|0JThSP+SQiTV6mg)90%QuCfXdmnd{b}0axNBUqrZR^E=xDT(4+WfBS zmcC}IL-l4+3Qs_7Z!f;0{{kj&O&MbvI61-vK?r5G{YevI=1VbnsdY4K4$7_hc=6qy z7^F{$k|XXz@CWZ1<?Ef~Qtle9GVW~VZDv4#I?ET9*7*Z{J7ftN0d zPW6Ug>5eU+|EsXrQNW6wiM3KkdJrb3m5ftouS6G>_N;4r&VY+2x5QE6a-*N(Mz4=W z_$Rk8KdS*!QfF?4U~*>^d4)3l&UWYdRb&!R1g()U=@a6O0_!AA7T=<`0fq^=Px?dD z?SxeT6;U?TtyUwYhjGcS(N}QOVUKO4#d!{oj<*>8GMLX{Xx>qI_7@elG?@cBidA&e zC^J`KCgvJCbZ^B_=7?!21`-Tk6Zh?|9T&NipIxnEZwbRDzP&hxAmqxzRaH+~>G>!}PZ|{SBwzqAH@ute`*+=j~9Y z8vPIHuJIIKrFVMYI)5qPeEPNpG9GG=Z)n^ZH0K7U2`0<#CqC}CwcX{w*Y20%njS?G zpio#4h)oVY?xvh)!OvaS#KH4EoD@wu&m&3xxeTG^QJ(9pNXz|1m&|OcLX}|T|4GYg z2;+SH6Ah;;Z^-~t08>`?f(&nowLG?u@)Ls(2V>#>K^KFcwRCFZg6K4ne=a+)+Xf}A zJ4bbrHyi&=xR6mjOp*)6BO%TWdQ-kjXwogJjb2VqIM2~-H9;Y7qY(FOU@N^?_994( z0R|w=#$3_#UlUKht7wvmbL+XpwW4X6*UK-o)8Q4*!V)*>(?z4qfpM3vCIaGnk3@;C z-!4m~MAJQd$qjgHzmC!UUv~nQ3Kknmk#89t=pouC|Dfl-5f86q<>wQ6+$=_>a+SOk zXmhNm-a3muUSKl4z^@I!4_qcMzJnMUiF%#{nW<)7!T!I%h5#$tG2U9j}3}{XzguxMZnVx2R4@^FX%-~ zmOd9gC`(M(n!m|JwMx(#Jg{twr{Ua6rYz}+u4-(JY8|vm1g87J5Dp69({I2Rv}$yI zEE1Yc@KBL}HpN}@pSLb3h}iX1<}LV(^G>sQ`)VhreAs*~ad_^A3 z_0^808UBK4qeJoOKOKu%o!n9KruOX)s4yQ0t@yB#uioggQopu;w(w^fW5wEn%Og=< zw{s+Q^nI&|x@W502ZyDHOuV*b!PLT-$Q)M>=QKDh2@F8~=P>;6p=p;T?E0-#Ti3a& znVlKtLzWn#jHZ#^nL7uMaC~t+lba-hhbq;kD@?v^th?a?_+M*?Ne5g<1K^&d_j<~s zIJ^IW6?oMrTEl%<-am{g9b923XvgW75rpJ&hd^hNa-1s0Zei+8!?Rq*RqNog{0i|; zVo*hmLL>ba8$wDuSUXWEHbK#`_}$AFvQ>5QW}q>TCYw~8f35fXT~XE~1PC0r6j)R$ z#kCw*X}T^1_s;xnt7=DRgv&%@H6|#;iWxkR*Pn{!GEJz`Z5ndyg^u|uYjk9^XfcZdtgWeJxlcFNP*C>t^C)f*E><|>2lg-HxY#gAPJy&%0Vd<_?I@&>*Z z_SmFI-2zRZ*Dsl1`~6cq-D>sqAA(U)W*CYrC6_@v-ZJb`en?+cQvP7<@O9=PVbeqh zXjq2`zEEnhfwI4YM6=u&)NoQOk6QQ*BkXyD@VH-~LN~1I)pC&;*wLRXQ+-&13vWM1 zMQ;9HR#h@2EA?aPV`Cu?YF}(IOUefM?jTf&RUYU(K%B-otv4YGvl7t%SmD?p)e;N zQsMZC6gjDF2nywr46ijNhv;WGuK40NGqw;rXbVwyw?o~S;37_HWQ_ZJ4yivg77S?{lRzmk4` z6Bgr); zO#r>P6+G2dEy|*1ql_>3IW~QyG(PZk$0w6YzQ%fjy8f&|3YJ{uI&*Z1~MWo_X|;Yd6(MGUI; z$+<8(8HQyK>?JUkzkjQu;e%x8FHcLQRP4d4QsJe6xR$p%~lL zX_Fo?Zbz;l8qMPF%?54oUaPu7erX{u?WjL?YV&1Np&RScw-~+2=pmNf_4`iunSLI@ z4_RaAZ!PV#t5+EALNad&-Yy2y@}k>*)-^Tl5dE(x8f_3^aB$9@1AqW_{yDVoHU}FW z^Du3WdGsaRYBL_rJ=`~FqSkVvAnDm^%I8`;qoT+9*j65ax!8ykGx%C89Mq;g%GA5u zO@`dtSj?CB<3);TY{}_)<2?XAq zk~l_8Br2R87H%5Nst{K-?00i>Za;DhUps*-4*+%_IXA@EK3n;61+ax~89eThe?fwe(bKFY z@O7{>)I(DZO!;GmHvjaZKQSSux`!4N{6Sj`u>_68GQN1#fqRD{DQXB?ZwTVV3ci;5 zcvt3>ro*A2(FtGYH;hFcbY1F>j&I@D#{XW!7MIb9@X zoVMTbSG+$Mp7au;M5?%c8Q0NSA^Th}J^SJ91 z-wZ!djwOt6md;c$X1c^|4CBipjV~yT*e$P6`WI_(b)VYAFYh)zKS37f2HfBjb==uW zEz{2>_Y#E~_nCKAQ)hcx{7>xM8oV=6x<6ti93$js;@!a=uo%EVa>wm9!oh6uQw0ai1wpu6Ji`x?^qu5K#Q23RrJyh+@#y49M6Lxv`sCA_i2C%?>+ zo$h&~zgeYgTBhsjLPdUtH!z~2=Ir`gNn1~d5ukX>uWLK)#i%KuJEx=I-18tB zI4*~*JveEma`CkqXICR=5voSWwGXpzx&GFBo}ijd0hc*r!v^h$8N?FoKJlJF48gbC zAdWaOfv;tW|M?CARycULTrU7G;^N;sm65Id1KkYoJP6~4LWy5sv3>UE4Ui}G^bN6Z z;K11mNyrLruVzh|j+0@HO7H(+4H?sBP=lDLu(HZ?URuzFW$0zosw#UXGj=u;4Tvc! z{ECXfj0xg$*t(%qtGVOlqKSDkRI?GDM9Z-d%pb48fM$%5+Q}A7nh;$kXZ<2h(2=`J zz`M4tmHvg;dB`g09y%%)7A_D)ShP?7TLJyA#94|=Sm|Y*<1kCT=qH?Y6@uE6a$HQ| zTL7e?G``KTAs$sVIlyKATVe)jFGDzelg=QOPOChB>@T(N#^*h?>+b!G6>2=2f>vI0 z*w-Dusex-TL}TI|3{IKNI0y9Z4L|xf9M?)NB&&c4E%%E?l9#n+ca}gKFCM6l5(2Jy z5huKSZdCKzpqVH5P_uy;wx*_-_f;V`<{{mIVe^7uGX#axx&=E@ra704ro@-lUs*+f zhJ;`PEjYzny15wbR*p1Psddu}p@{Cln8KP37h`Ori!jGuC{F~xIkGwk@FB9Q71l?idyVOz zp+x^+b}14Nqg~lNMf&~3-2(*-IwHsp*V?Sg$2C80`1s6AhI|O$``_jTN8=&C`7mP9?xt+cxIT_*?ICGmrp<5~c$^=sC5}wQNY>mjJ%W5Vy^Q(8(9C@qpeFK{cXF$EZYO$J{v%JKU<)$WCv{g`|+C%T*m%1Tn-?z z+f_i#?&Jk@Z4=;KLaH7F9mBys;2nZKaV8iatDLVE&nVsbb&HDLJpZp9Nl|V8poS0Z ze6009#a!# z0->{0bna%aiUh@$U0i;!Na$s%K#Pf#^?s3@eiQpzF+gD-nVFif^|iSSB8gE{7%}WR z7z4DcZ4PPQ4~Ve_(t24+Yx*IdhKvXjF-KcVrvB|XXy?LlVG2$a@lq06(H;tnVO!Nj zp<)dpB};p&kjX-@d9U?+8!W|eICSOOWQG3x)8hzu2LYz#z!3L)6omDnx(J%xug^of z+i3d)GI+oGm&wWY+>E|UC$e!%ib%C~3vG7lOCSZMv%^HGjJ>>DP^0+i$V9eZf+f*& z2=D%3v!$=d^tTW0hrr)VI4l^x)_4OWblL@;0XcA}4_r!PgYE;4bZ-B2i+(a%BFvn5 zgDjHWd|QwVD>!0DUXQA6exgVp<2eRhcnZ#z6VtG~>?8v^n)0vSpFvvKeK+L3xYL~~ zeCCNZf%9nDNpi+x8f1c_p|AJFwVo&JmRhtGr=kDEQNm$k+!82AEcdriTyWCRKoaZl zBEeo0jJJ}oQ_BdzeJC&~y9F$MV%7-qTCWjU^=)|om;`;{7PyU8& zoZB;P!d58tp0@bqJO_GcS2viKa0@-Ig{k`Owix_jl|G^S zr0smF{veN&Bq9qbYEsVr+!*AHZFNyJ8mDLJ5+(0`#O@P<;Gr7KA7FVHxHc(!VSJI` zDFr)f$)h@*J@l(0NAWPhiUWzEKifd>E+E+t(A7nA{d)#z3V=nJ!wS5(fo7J@G6lv@ zSpMiF+)*~vsCcL&5>8?$`O9*1_J%T<51>5nIP0P13G$6W=Z8o~B^58F&fXkN`8PYh z+n`cCKaxiiVWjht(!c0G)$haqhmQdf5;GJkl1O-dx(#_UnB&iFTY%Q$_zKg?_L-Q< z(;V7&ELkXY(SrO3=3(QCmf`mv94tEugf7g;ImodI!}>#?YqLyVxHKIYhwMpjhgBqa zpWzd1u3uyxXw`ruE&AjO$#4J|E8krnCRfvJp-DnEU_Np)p*FV$&JJ zQI~|wsDEhGT!bU6`g!sF$$`0Hb4dqUs)q+Z zf)#x7N@Kx-k5XEHUSqK>Q(oI|ocr*i1zH$)$&E<*{}vvC2_o>bgFj47H$ zP)HAYFx8HlkSl#Pb{Oggoa&>EMpwFCl&T>>VP#g+d+++SN94Yo3L5}_vUZ=qxIqFp z7P+oAVcHGDbh@lo3;hXyxl#WkSH*qA+I_FWt`EAP2ZvpWXO|W295b_{)!CBNk-vf= zfR`_Bp~N>iRwt_PwU7B!LKonjmkP?dAx9V>ir0C4ANmlC!lg;1Qc%b+(Y)c8poJ6U zk1Cmj`-+N;XzM3jAwN%c@*~bpydyvBovB6|wxI6Eo<5bYx|VqZFSzX+WJ;Afi*aUP z%}ND{v{FT~F;*pO3=StTcrl+N2fPYCcy#Tt=ut8vEPOZx3>8+QOgn3O zrN}-s%lL3-R@EC?cY4iIo3QdqnQv!i8X^d(Eu-w&hbj9uy1})~b1UE8F6gc1TG9W^ zirBgV-^aH29U|vp^bx64p8{zg8}T!{Jvp={NQh85bPgn|J^0h9I_6pV1qYve!QLZ5 zO(2bQW$FAY#3Mk_%!Ph`_zL2WhZek42@PLLX^$w#bVw$ zkj^9OJ9D7+Gsn?E^8|5E4}0_1#OwjQzVYDnReod>CR16w{bU*_aa!1}o~nXEE(^jH zDkZutRmuHq6Yt*ge`N`8UH_TEc7R0#)VAxZSnw^?Mw}SI*Yfzz zd%Si5rrubmFLCLBT*@F|*H6+pPdgfTkI@h?skA`ykvIUw5<@EuT~K=HXH;;N5>D_F znr~#5$UTK_DOj^CPGw>S7{`%G#}_o~^^V`%w&5w~7wsmMMp!MGWvkBn_;F6*lh%f!M`}2D)>coaQZB4sTHtYYanl~JTSuiN;ExuZJWJrZ=VyA?!BuDq zv?a7i;c9SV!#qN*=o>78zS-(g6;`!QBa2672{T96HDLK!U{Y6QdgkH3w4>t%vwLcf z_4LDHlR5lbcz#qwoL*A zFFHuJ+hD66IDzx{4s4zfz373zBEaThrC#n}sSP13;r{NmnH&iZ2AE~oq4`81( z9C&GrX{7>N@uZ?MRGEUUt&}Ow@%rEDdRl7*o%=LdrYJO4zoowKGcJ1@e7vF7@WmB{3=9G~tFtbDF(-E0W(#(_BieKL|oNj&#FbE$i#Me3C ze;TjXAaaX?!%VFF!USkW{a1a@-dO)?`DrTFAWAHq^;`1a5ZD-RFjP_vTm|%2?Oy^7sBC_!DElC98?I90?>tdGfDOxWE>` z5)Hz@bNjoo)ti)GUM)bOT{+U}*xbc^2_Zmw`)Ezb3gi$0*I-ZDfG^nvO}qUM4ZprR zhD)uM{@gJSVtjpC9_%kKH$@f9$zY9i4JoX%u!KnXR<836qXmestB|XIDRwCx)-%r+ z&EKb3$rY4G^?DH-mliVnibSe6!9R2$G?CoGg>HeErzAj&29@U_e8oAk z;87xWQOCQlo-)r)exh5tnn~Z|{lbel&&}Aba$0#Rm9308`D3zhKfdUTIQjKpbv}L@ zc7HtxypK%>lqgc(lG32K_?+BDuQg{xiALSpoQpz;DV0V&ryQ032se)2jBUM)zgc^F zd5sI*f-MfSf!*6|Bn|A=3~2OyuH%g{xGnYXmg)u;dj=FN5!r^W*&_J!UWkI0axl$*|(@p{+TRC!wBn)uII}8zVAz3J@@M-C<*QrVLr(9(iid{)Jr4i zlT=`r-ZFFgj3Y|%%luq>CD9auzZ=Bf$2VzV#;P3Hztg8q7sQOUr&|#2WW@Q+e$HZa z1`4q4Dqpz#!xww9r@qDR`ZxY#4gPXO-c8C&`_d28J{TPrnW1IsPsD0Zqq@7Dtf&87 ztv6hl&n?B_vF%3t(O=4H%+J`Tm~C~Z?*Rb3W_bHUr6zoAQD?D#a#0ZQ9CNx~0(;X} z>(Jel@>t<$X2o~QgaZP3e4R;NY1$Ct)exS`8rl&XA)~=u2XKqD72FXuOSl1GK)b{J zYXo6Qy+K8TWxw|a;AR_2;A!!Ap{3Hj$aEQ`UjMJk+!InkIgpSTb`+*h4>Gnn1JwS8 zc)NJ@a-EMx)g5h2WB=N{3K{%*SFI27t)!djYi|&Z?^|eek<#gPV5xwwV^~M@! z?fCk0o6TQ(jZNDZdvFWY6byRPD3x z{c{?ZV3_Rt-pSPd5EspsIPT@gjKw92x>0^`fRkYGbQ`pQUS0J0r(ZV+#k;q@p}#xy z4vw1-6UCtWpp4@)7m2;3XH}DoS_aOs2>tB)fUi^lbeBb{z$s*sQFx^F_U9^ig9QNh zv1P<_SnISNar`TmctE3AQ7NjS#K^|$CJrsmZ7KBjMZy!hs5LyH@Spj4m~n|m z4UF|MWU8pINpzuwQeI3Rgtgo<7?hB*8{zGD5`5}3)MO46aR%smB{kT&uNrrz9pmeE zt{4paQ=Q_GpX(60ezaf;VkotOmN2)yDg9YcLyc`nDX4phvh?|gH-q=`FA`0BPH5jV zqfL(G6|xd3t`o1T8_-|{_*8jU{mh&2IOK`x;q~yIB+H`-j|C{WOWc9=u{Yap>e^=$ zzvgCwxsW7DOjwM(+T4v`4nulk^=v-Jx>n^1+?)MM0eUD;YHhqf3(I0R`47B`kvJCVAuOjZP|;WFt^-QyNu*s zJOu0QssgQg;!~qLfyGKr9luzdVU&#ED3?H2eu3`15ZAExMq&it7PXQ7D)r;cGC$Y- zrf^-q-mMgqX*OcT!u2hqCn{=Q=0dxi-ZF#WB6)wR>$fW4Q}C_w&ipKK+^U6=f$;sN z?Yw)5Z9Kk7=YmHTU#V+%yF7ebPt1DhePCS?gs|*G*98A;1D0EYdbP^%+0VBzxl8=~ z*?ECpgKbm|z1g-Q;QwlN>c0JmZD~;u`lY4;v?Ki54}6b3`J4KR=#9N(P$dROaL_0D zf78E0xS4wM)J>FF{Fq{&8t}e)4Tg_0^@HLx--FH^c)lw!s0n2Z3e*QH&BmZTe2HTh zJdgP~)WEMj(oLx~6n>=M6I-nAuJGq!#l_=^n5sipL}A||=_?91z8Rtz3Vqs(Bp_q7 zX2gowc@`<7gk7ObuJJ?&J-$35NQK8W{#DHcroSJ#U5r49XKxD{v0mNo&zUT48cy}P zsIX9CjZpp^4`-F@V1i*e?y$`*;l6iLaT*hMP;~y10`5$I&ekXf8M7?rkA*5ql1xZE z7SuCwo`p^;85D$%=$OD~$Tn zZ-$b!wtm@sFZ8g6KyyDX6_4TE&K1?-l>M3~&!Y7Ej6xlI;C40DQToz0YPa5N$Hdv=DFkd_rLdk=Kb^H%sG3nz4qFB z@i1fkHkY{;#A>zTU4CDsqiC9|2?FKs-?jAl1Jg>%~^#wBBQh*ZzkqOTX%vbfB1 zJVJgvZdN(-`z0uXzlga`yY8q3h0=A!*!eQcLTNf$?#c#ZFNoWK#$TOwM7l?G;JEYD z@aB}*jnSbx<4IVaxD7M&5y?cCBE{hSpLtwO91PTszFcRtJBJ7u90^_SsMINWP@v_zb&sX>#%=NN)fxX#(YY``4rWknBsPC&dgFG87b>32NN+(qD zVCm|$%GQ2~_@?obXy~wxLbSBmC(TXA$+n#L(YtpHj(0wt@+gLlZP*>xvX-+-zB7N{ z<1%iZybA##W@opOH@%Zq&V63}9muDIrwR0d-l}yiXWmc`b&8}yPLJxATQ`N-R^2ap zgc9!P?TV~lL^{j&pR}?#-#d=_@rp`s870}ti(+C6?21Udum)5NLhDWI!WEJ=eV}r@ zY1@^E@JjTImHsOdzkH>#@33!JPUw8x4ivlg@-i%ZHtrp^erg|@CBjp ziSR$AH8>J-xQDA|`zCj$kX2I?d6S~B%k{X@HFH&Vx-1XdpW2Kkzeu?ac6zcfom%V6 zS;@FvA$`|G8p+l54iPfs*ZkEi(&=<&i7coR0Hz3Wf$2m>Oy)RrDNm}f&t{70q)oX#bmL=hHOwH$nbm32XC?m!OkEI*F zs<9M$2HT_Id1(V+13PBCMS8@KIcmEUO_sM+@F-hDrAoeQ#B4}kx4jIev%;y27h|Ej zZDsR}3=fZCD)exV?>L*sKAb9(M3WwW+9NWajUW(jbtq4Nit1z4WtGh6-slW9BPKHg z6IT*xE~aC+qISnmHoH%Lcvg5^i0seD37$v%a;mZ`O>5IU`&QOrjCX32Wtfp$ivjo) zGQEGz&;1&vv3&urxx7gJ6~A8PzF&TV4P>zZ{5DaNZrd8HEO%;KpS%=ey=ls{as7;9 z6X@VEllSzIbylrlr&$sL0eZQ?XG4uf@A?{#s)^yDxC7T;reKbI#bdCgUy1B6EO!ON z#Wb+Ve`Lk?;h2_aY#{_L#hk>1UgWB&+h&AT5bv8O4aZQ^V8G`^l~_J zOQXfG5u8~NhTn19=g-dY=_HIa{;3x=m?Cjj#ybqGt^S@Co_}HaJx_eT1tb2L{7i|~ zcPTeh@Y=vBzdcCgV+LN1mbPbx!K@tSpD_wb_N+L&iTBQg%%K!1eES%Yxj5TsSlVi2 zGOiDs`O9la-WiL=ApKGUp zy@69h*puGnAAn`z2bdgS1;)kg2}+U;t<(&28)NU-VaMO)thKIfsanD@)DxV`Ykv(N zI$@LkN{0={)z--yaox`csvM2^O4fVUgW*>{CehesP%EEO;h89eF#C+B>fYM>BbDIR z>IBJn7U6ukPZbm~Ga5<|RY>u$bZ8$$8J>AbNACvL3#Q7b>Ejzoq0jA0BkI;DrDRXD zLBo_OoAV@tM~kfg$kqk^z#M>X5!Eu%f;oqe6FbokDnPSfjf_G=Aup6TDD0T}(8Y=V zdexxj77mWNfRB({rLm_0$A;$X8O&jK4cPZt*XIxVWf2%lY4W`PR|S{@KkdTG4XF}H z?&g(LXuqvB@^^NBvd55GB)$sniHJmEK9JB5RqpDQpD}KAOQI-&5V4=^-IN zW9ZTuwE1*#k@Fw53HI+-x+-z(&P2iaf5YQfz~&V*2eG!A7ptQmt1X#YR9#wR-|ZU| z6#Dj@6HYY5nDWFODH9ErEF{mQ#AHdnsnR-fA;dKJCAf}O9gTQF38nBsZ{SBvE!MA8 z_V;f;ZBsFQo`1X)^}|Dqqa$WR2CM7lB5C`pV`XhD*O%Ikw|jsQ63Zh$%(@oXObAiA z(es8y7_Pd=i%ze?W66W}P8A*Uds?uWXB^ch4&{7>OvIv;Rk5Fsw!;+`0#L#llux2{ zIpJzfTUj&BhZCxw_$4bxq7ZWitu93htGr{xpI4Niz;G(m@cMe-7D2Rq35E&pJ^EKA z@0ssaSN*TSxsbUoD^NAs+p9?;CXsC+tM%|2RiSV;kj(;pO9~QmM*pT*IqVocAwV9Y zS^urA>Ebf@zK$l7<~jRb^S@rJpS5q|Lu#l!tW1N>xWnAOV;-Av%$TGi#t>UR$}5_d z+f-bZY%EB>$C*a7geymQ9~J*JyGK7n@f>qd?oWu01gATZX&@P#8;s+ z;_Yat%d7-NOU75$)-jr5Tg1C(n&AkAjo1`#i-l?3RHkU`RSzimT$R3CrR+g;~*Zr%FMA zN#Mz(Kn*A{a_K}cd#0Kzf^3aEaJR0HIvxOg|Gd+84}kQvL08M<*;u$D!W(cB6Y~gO z#g5rZ)-aqT2+;&XkiHsGf{r8xi!+!_NYNWF+@>0NpAnB^?|3(DhjWs%=QlS<15plx zQt=Fd2MZKsC7W_7!j{LQ^I3&}$0lal!Gp)szz(s`hQOe5QK?x?tPClR@E&Y^VF5F? zsJoH&P1-yZLL)_xuOoO>i+xV}>JQoO4>wRDYCPFGZNLO=`lx#pV&a*9oi*sxv~$Xh z;Z?QAXDSe_AA3T&>*a0-C$;99gj*V|9VopC&VOyVG;8R)j(S}KYI&0UmThY9GUe{8 z-2#^jpb$_#gans>=g`G~UnjhWfgxLDKFNa1YfR+C!P>os0z*aT^;49-%_8Pxrv?nN z4u4{=`uMM3@mST$)(5|{Yl>_elE*iaKBd$;sEa!kctMY=-3h)nV2e0acA;>VO(InF zV!}ls8`ey?&$F6EgaQk|6|9kzm>OJAd;?w|cuF&S#FSzlxar=afo$nR3vO8>LufZE zLEm)wwK6eA>sCx;iy_T2YA)=x#tsQrLL|DO3FUYPVPe>HE1z6Ni5zh`LURwJ`p>f6GOEt?b`D7 z`)`Eus-`bU4%oY~+if_BDTA-1m!GK%+EIU+{mA(^KZ(1~g-)RT>->AQL+I!H|{w@({ zTLNd09FdvaE@F8yDvFJq(QeAyAjB$XC?+Fd*6zoCZn$rx*+qTFSnb+a<*>#KKOowk z|BY(eCLV;|aHG$iO2m8Zct-3!EO`0(i|i=GbfUTvjt`a9PL_EJD?;D-$$`9V3Kf*4 zg3PTprC=WrSKcY{<>oJie-M4XP+o}pYxC#kFUg9k1X_Y}AMos#dGeLIJaU6J9tVNc zkaNIwZj`1|sL}NS3fnbvjaU!B#!8?=K+yp>S4lTqBF43-TUOkn@v&`yjLW6@u)sEU zzo>U!5t_?C2`uuT?w$jyPExlJ(rLmMDvalE0M#_WW?DjeCIAE#@&YuE12kd55DVpx z=XO6xShOOsrGf zh+*DlrkUjEF#Mh1e%JafVLzBJ?O`j3QC&rh2K}9ITg}H=gU{U6*7pF)Wh7PqEC`<(p`|OJ;7h`1RC8RqxW3P+??^uEm-lh zF9*N{mE35ul;a+?csKzLk8JOhk(;GQtOf*VF5P^aF_*R2$aKVT0P9KjZ)E@ ze8@I1d7L)k2iv%){mt!r}piJpHdnvM!+wX+y^=BtRRyVuUcb{FY6I6AO zhiWbQHYzA9oCbuLa>OZcCo61P=|y!~lQaBj^F~#6qoaMc4Pqwj-e0iSa2#H_#7ceg z_1inTGoX!f@ImcAtvm^&+ADChk50bx7@$?+PwX7;vjf=jY)`Zc6Z?w5fntr>CX-w9M; zjvZR!U1lytWUAo8pivySLy*bNnX(kk*Xgkb=yaw7b0Po`b=3xT_}stKJvTQO4BNXq z37Dp+{a?(=y8+~j*74-C%gNd~E#~XKMzf%=+EFr`P5vkP9Q4i&)?fO2{Y_0h zr=87ANG{9o#qKA^xZg>4pH-YfbtxKRwQIYW7b~3iQVWK$$vVVz5aBL-YI^x#a#&2mBT#@}L$!9rdO{({aBIu6`?sKmB#oa}FtKGXiMk4-t z(Y0Kfl+c}DGL#>nlDCtFcJG$s0@V7Opka1D51rNXj|DF(sY1RgMSm2-xNm%VTLi?r zvIAPA_5a?s+Z+^A%VhwAXKXR_0BG6+8c{&_bOipfXO8+7*OC{ubH ziT2j9`I}Mf#{&GgXo*UE?tTKjBF6}I7aOzREHnqSY9?%kMnO*MnBC!EZk0)r2*fXG z#mXiT!_R6e^wOZI-(}hT)q~AH9<1z^)Q+qQg;-g7XjYht7)Y$93ck7Nq zl3Kly*a2=M`z-oyY@T+(FTr~uPK(H23k+i3cf7N@1R(oTL6Og64^_fc7nI3F1Oa0{ zA>UNe2VQp{VCTQ<+4YIEYyElG@N;b0%jwN;%O@?NQV=Hj22ap1^k_p?{qAPJ_QUTN z|A2ZW@!+!yNg&4iur9497qayk!fom<1_N6E<>_lc9?i_a^1Y+%&v)xx47ygOKjhzC z$;w_)#AXXO3zQkvNXCP#a!I}`9V*U7aMbIluHZ7qtKyFgC{%r;TEH#2t9_r`iuNmO zTV_sxI>!5Dafmi02YIAhN8$4Qy(7v!2Kcp2hKGS zcT-eDqS1+C_tT~=qHPM<#56;ZCS=VHOfxIGPA8)@|hWp zFFt0@&SuUgObF~!F2XB%I@8h<`fa~*TEE$25TSD8mqp>^f5h0;FfuZ@WT9X7I%EL0 zuBK9FU~7Rxg4Qzqf!VcN13=|hoM#Kq1DN{8Vkr~l3A4WdOG4#GJ8I4}MFW~?wG_LZ zsGOLne2l6q-{kZBotz*B9Pxs43~Lfpzz`K9^k$YUVT^pe&Am=0$&225Q{wz zAp)IIjQC-_w0Dx@J2&)HnH_~-)DPuRHPIl8Zv?JkmETvW;7e;RsfOB<-yaZ{We87v z_}PpgFC-;Z*_q-vgOTzW!9Bu4mYCK%*{FN?Xhamckuf8NN#*&15`pMk1#ulX?PM-& zud%a|=#Y8(1pyoQSL?ElN3-Dg>%%OH=7*ml&n-0zWx5WR$f(5YmLq(o#_R}%2DW%i zy30!M)i7SZ@~tQi*5Ry8$wQ&spj*{S*S6oHl}{91!u7P7eFAi+A8DSTv#<#!wX}d2 z2l(p!TJVdT6sO0tAx&a$L=AV9Nv*;RS#tQhufsci1(cvP5GfxhcfL~=TwYDAGzmiKyxt6@-VP%U&&Lc3 zbR;V|CH?p(L6|-GuH!fvI&He#$BEa&e{+UZ;n9eR-T$mwEw;fc!TG&Hk`9llGotz!DYso{%l@pd08MQG~ShJ4Je zIP@c!V=BWdoFqqCY*trK#oMr9e$au#?}!B{l8rzUiSI0n?rm=+h%LWN`e*|r@Smv0#mpJ`N2s6*oox`|9*rxr>5QEJCs%?koDjQt@dN}YntNF< zf*21TRYN|O{4IcrKkh%H0%~QuBVhSJ)@PKaOiV%an_kW{D-FVTcXoHn554**R&yWG zD?85|E6%Kfr-T9F;AvoU8d%3s{Qr^@_;;ieYYCxGA<@W*x@AbCv^DDrXmO#alDBLw z0b(|U=lacYXC``ao6X2xu6cNoF6S^Jt*8%Kkuw7&2>FT@FE83scK_V$8gVlpTBl3O z6Oy9hy@qEy%C4}~r&AVrX6ffnpcUbHLmE$;Vz-?*cP5dH$yrZp{YL<$`SR@#+$v0b zyBpueoGS0sOuQE6NGrW6giGSW28kSAZafBBE?2ns>1Tqe?dBpOi>hV`tJ2(l)#sew zG$Jr?3=A;XQ4PVrr#GgQwhppY_4L@n)RX*w{a~+r`6t4xZx-jRVpIeY#dT7plE zCZx$&+4OV;@%qFhvDmM#^>R7uw2#H&NpKOkYfO;iXX2tS(PRTP$Fx)HxAV^H*+^zN z2f8`D)L(yziYAzo&{scUNjCp{>KLSrLO}NI8~IGUHQr6SS0AwovD(`Py-6g6A%e&c zB@^%LqaDu~6`OAE9AC+*^P_ePuz??62j(TPIu@s3hm~AF25z>t+7}&}ixoM@#3Wh~xc&ED z-4|`(V04njh8*<52)gAjK>@R#a@_-lfRP)p4hcbm_|XC}kHE#LA8jHWoxbtS4WcTj zKXro}T_-(^l5-XXP0kpJrd~FKgc}>=NiIXr>DU~hYiMUncA=&P!kpI0(OXKwvEsj` zhDvMJyb48t`}>rz6k$)RBAZW40U!9=!34q8>w_ZW=+cGm^NduxcH9p7B}#p*u0D?$ zQfN3L9_?&fT0%HCT)*~DKIt@4KO=)of8hsi+e^H|gqYFB=|&{Z1@>=bGqNm~(N&_u z*Hy1SS13GH4-)b*hqUi27P{&GDz(wcm? z3IuwX9s?BvUYQ%u+o4gwL=iBFsZ#$?+7(473eWpB(FW^-WbSX3Urv{z$V{Pvv-ajF zfYK{BfpmwLAV5DcbQj=)Wd!e6H$Xao3hXC6f@QZK3p0g(U%D&CjEp=A*yQH%ni!WK zMcXOhD9csV!1IJx=6txGMn5J_!B}R_r5OseN1ty9li#oS5+@bSt^lvwlj!Ls*hmJK z5UEn1WSjkl0EAME{)B>lFvQd_cbAixtr2|0uAdJm?xWeVnnt_g^FXdy#NL!g?5#W` zmAIpKFGRTqfcI1;-Qk67w1UBmgm+4t>>OG83JDlSPaIvAb``bnt1R@Mp*2sn20TA{ z+L4ILw<|Cn7g7wKA}4F%pKvTzu-;Ha$i`5u(kqZVJO{ZuQ}Z|6W}-Q^XG!W|bHG<` z+dboi{_Q&i45`E9!B?3ynH3OYX^wmFgtJ+}*(-?DEJQf4c@LlPyCkY*q>XOqM0ETk zNrnDXwn$cHLOi9%2#?W5Bk!kUh=}MX1lTyCncqBmimJie8l-7$s;l?- z%<8TINy96GFAH!C^5N+f+xyFcy#CbL-o{Q;jHxL{P)ot42b zUmFEDsoGHly1}m<%X}p88|dSAqC+3dW$@HEXj$x}n1+dmK%f5#vHqeDRNSJUCXGhx zbAqE-&gd@Q0VU`S31Tak&u_mT>0EAYR{mbMo2U0P58#4X7+|XG$@Jhq}LV zYgbzQC?YiW?Pz%5aKPuNjQjP*rT-!6g?n(x=bm|J?;P0beOvF3kN0q=j6eVj;L-d4 zSB-=j3Iz9_5!xyqBXIekR)H^0F>w+Dl#=>u7`Hk0KM+bp!o`?X$)h^tGTS?b;oY^7 zAEa^g%2f04467s^h%tBvqomEVL{$3<20sxP4k1kH^r%Hv>}hqgp*12KS6vuvCEVJf zTIL`qr^MA+jME=QBAvc!K*Y!6p3YGfVYuv=?Z?O?A4T<*GDSv27dPVevu08*nvcT^ z3RuA!nq=rO$qr9xEe3mns(?nt5{dZzeQw2Mn(zFtBTgZeU^SHdw@!CcRM~ir$ zPiT^N8#qqsgPz81u>+?bZy65jGyn^`psX%(tA( zx;i|Wdn)zNYNhO1j&V4fBic=WGK#Yhw}8+i^G6zwZk6+(OUW|!4~y4eu?uG4-lA>m zaRgis+yo4m72j3*Sh&7cFG&1C3eDA;{}gie8#V~7c>r*B{vUMw`T(}>=#tf)Gt#xx zOd*;DbU$LV#6#SBdRDfrcH(~aYcBR_;GK{-$+OM zrPZHOQEWtknA{XP?B1s5a;11vr3U(LGjD|Q1H`cp+7~GOHK}8^GIQ~7S;){u8{bLJ zJp$_`&oBO+$F0>;Z)WWa!j?RGc7ft8V3|G$(drV+{Ydk;e2t&=A&6*zW2mlX1v7Ec zej63%s$~s{%~84yr8XfG`6oGhMY4aaTh9s5Mg#peF$(AxX7@sORUQw)Cl~uS_v`Lj zJ=<5;upQ++9+0g($iNwjLwwSs)Y|`r1cbR?VrlZ(Ir(su`HK&}{h-jg);G{Z45K^a zY|$G|>-m)LRtaJwN-_N9gG5Cf*zz|RZzT5(Z|5(i?H?RO?;C=>o zG8=$(hyx_=J?u!=+MvT@ux%S}rvX!s|G`hNzn>A58!4?)74a+j$jGf#5*FU~?mFLU z?Lphq+GRPH*iWr zB*rrB6#QK8%K1JM6>Uv5oU0!Yik(Qt8u6M-*jWE=L^F}cOZ#DD!6ywreo!&EAcUCh z(kc+5xuH!XO~N|Qh?7QWx(23F>DktSPe(ut@F)UfBc+7-RIKoYlxD-#Twu+M{Tz{H zvaUT>*7YJl7UYAfSlnPEXKQHM+<0}dzuN*kylba?Kw%8guVh~K3J3Lx5O z&9)rOc{hQ?h-73+f09Q-5LAZcZRu8PiN%6Yh^pIQ^UblMs7Hg>E71*b~iflwmAT@)dCs#DRcn*hd@OE(s7ciIo@!Ynjf#l{-jk#+Z<0}l%CcKP-?3& z*QT{3X^T4WDo`C%5s*%Q+*j1C_*0tzPdCw%SLBnK^B z6eY2P_-*4+#n{=EEO~9dQZYaWj^r-f0xoDY3e4mG286*@{owZ9lr%_g2|vssU_(ud z_Y&SV(3&`ya&bqOen^Yq8b?6#X`E!+EQ-MS=D+L#B@TLXyLJ_)Uy$g|=!~MUK2i8NI#QV%8y(n{KF!Qd$(s|`OvbUAQ>350h z<4AmokQcRKB`d^MdtiiPu~xfR{GC@O2m?=?G&QJ&&xklM%=t|?1VwM zQEpAU62}DSN%wQPCywL46fhCssm5Y5&=DE7yRm{qpiVJ?yOU`S_N=vtJFY58o)$Zb zZejMz4@S1(4O@|yQ!SaMM?i&}Mb$G&2xoz$0Pp*1?#U>P)`miFE31NeGK z>h*45FxOhx6?ozrz)^qyF6zIG&GmO1%p5WSAd6`cy@ER;*f##v&|DeOvIN4!mgy>; zigm`6ZwVJFbgS7Sl7V+sTNT@@y|ew^5Qsjlp+OrPqYFoPgqV8=4~qNxLC030JR~WX zI=2Zr25#sfG-@M-xBcl`SBb?ZB{@z9zWiP_VL~@IrJ_xVW&~HF+;HG#1D$f z={mBaz6c3Q|G%_5$&%jp(pgqr!Gly%`wj9KI{>yAM(8X06R56t)$Pbw;t{; z4G_-;gzcUgTy3BhN~sX32QVzWJ>-#hG2@jC|7`zaxI7HsEqpZ|%A)dq(=0h1i7zrC zrsHhJTd_e*>y2nOj!;aB2)D8FA6uLEdV+B(XDr*t_|%jH{@+`3=h)DQJqr<+FhNe; z_JpbJNE8*&YYq|KB#p|9^FukjrDXiZeg2u!AIT0e5h*umRGr11P90x)@OG=QuzI?i zT9>ZLh+G+^i~5z%Kg7JQIyPrS2oz5-u@1#MMa7vQsINSYbbULQV5JNjq5j^1)K>B8 zPPLZ*i{mv%i~;Aw)8Xzk;KcXz5qCgI^XJDy2rw(Y#RR0nc7{VI2e)CpIj0AyDB}d& z&7@N$eaW9bFU$_ALiqI z93Y+pbp8?&vHuDvDE?z~kO5LWT4Z$wXXZiI;dCUgjSmCa!AAq=d5JsA=olDb9HMoU zOccb;5*_-<8KcOciU8x`iLm|crb7M6h9t7V68OmsG=)R~HmKs@Bwog^>nhH3qaHF< zemWFTIyPg&$J2!}8%5d)GX77mo;YMq2UiMY#OVv!#i5sRL!UTFd+jkKCE%+xxsw{b+^0i^-*! zIV-F8%YZ*6@uX30aY}92IY55*^lBg0Px(ENt<}VJeR#&+C~|532o30KJfnfcb@4rb zDWE=c=qDo5Dy>TcW4a3=Lih+S2uUv)b@)=zB5fSEu(w*a2K&MV4LJZl^cmnbg_)+C z&H%0ZH3A1fYuM!^0Q-tFl)KWkSeE1RZZL}ltA;T=8}}`?gs9cg7rYuHCMzr##3)L= z?0GQ)oYbw79|#}N`V}OG&?lZV7=Ly}rnPqbHN?hZs|)X#^D@+-LlvbH4@Z-M2J%;5 zxE)3w1hu`(|&M$96-|)Bc*I?<c6!k`!x}P7?b_gOJ=jug{9odGLDRMcdDXCHH|Lb z5>8ChUd>0Z-dOBF z5w@ZXE{7?@QXFa3cUG+A4(uU0Mv8mo`@G16*cNKh_^H`KnB(D={M0KBm2yUCtO^O7lFF)3qvfGLMh-xm&<%{Z2q*uky~uYYJQKK)6lo@m7<5`s_#SrC)Z_xX zu%n%?@W+3wc6?V9?j#zH6T(sA`)ogy+i6D$Ai2kpJj;k?bS>Qe`Vt+hYF|=86 zFKFN;?*t1kFUG~5!Ohb@1+B#_6mvy*67o-TmvYS2F?laHeHdypW4!+9SUFhUKeTls zd}Ha=<(uEXpY_z*v?9?RpZlv@m86Sh4aMf?7b;g7Xt$WJ(qoI^vO9b9}Wn zruAh~ijofC?%V!@3Agwjwc5ge6}F5*7rNZ2mE8A#M^y$RTO(;7ze>lK8w=2_zv@RD zPDXu$UZ;jPD_dM6>qwz|#8r2|FAtJK#WmuO%3>#Y`(dAKVTy4NSw+DTRPR-VP)t;n zd|YCs8uR5yfCl{cG2Y>Ta>E`DaP0^H{Fu z0A@0TCVNtYT{~oM0fjUHT+Ibs@Set-@gxdOdrNxH;|XV!A+)HT%46EdwSSOp8~FWD zLU=IGIS8j7trb{>7lN5$!rVAN`ljk~LhN}x#`4cENm!cH4qhr5APW@ZgGvPp9>BZ` zzxaBDtif1)WoT~WG6Sp!_pei_{x~pytTdg*(~;%-pw(utNT1HneAuq{dYq%qN=QXQcWCFMu0SY7YnXyR(Q&*4g>jFDSRAI8%L8zLxYpx;I5H%a?^Yp zg=a?hixL_^e)a;2#2l%aFDv*v)#%qhCR~LlXUqRqk%iB;N`fg>Fo(YVJcH%}_u5kZ zQ%&VMd@J$Ewez4rgnrOg zw2XJAe5w2c#?kvZuMUE9#aCygLj6e>!xI*swvd6-qdC`ID!<+nHRiWXuXll_eZUPS z7*T?G8=wMmvx_%iIy|gIU5D{;G&~>V1~^lk#3E5E)FY01-8FqylL_Ol@@ZqdtYM9M zU76bOFnpCamw!0IiNGz`v8jh?-wtH7roSWebjW*aeYZftKY}|6LkW#?+zdn;=qiQx zoD|wO&NVPT{6h+wJ2qcwkFqm(!WcXmT7UOK@G(EY;y1bxp}tlbX75wV7Y;WM-AZnH zVq;J9$o_B@oiYeEL{x}0NqqOlbt(DVWJXuDCraO|Cl@mFS%$IiLTu73R@5^ENjaFj z-0u_6l<@;mfmIlWg;hMNPzD;8GNil4xrL8b%^FW6ki`sqdiV;pq_&7$_=8_|u=|wg z|I>%t1)LfQaD&fp>|UK^TJ!~TKMt5af_oRs|DDkPn!0@!>`=+cHC+c;N0 zh09si^X#v9Yn2dQ9hNTJ8b~vvJtx+ScKS(z?ZXdLBP0tPjhrw63igPiJ?SyEs2`hM zx>hnnXqW&)cwWnZ+Jy*>iY%UOX)H1+XQ}BBd+Ikr!c!)F?&OG3_>0yW#g2tmVrDjq zf+(Q|YN#R)o2e;SCO>=oj7cmm&~s6{AKQG)0jaQ+`BJt4^ZCEZd>02=5)0^VO`+I% zY>Qrcn4d=K>xat?d>_rEYnp#`qw7VAK0_$Z1dHVnXW|3<+*IfQci6(4b&PPgb{Ic7 z_3~d^a#r5d38wJ>S7*YYLcF*#CvnUm#~qd@7FUH;2;P{%y%m}@vbD%^y`JLUw4}Eg zYeOxguH;#iJ#HNrHd43^Yp0g<;7PL%Ud%ennB9fjQ>bi z?xZ{PNIX=ASx;m}>+#o00Rx|h{AH}`6t;`M$p+aSwfK^^=8w~4m90`Xf&hMBI9WxU zvzI0kOTsoDIr7B184a>@qB|S$%Fd@!-4)qLyyB|C691WFZUlvx)6vQt^j^#5pD!t2 zM00YM;-JILYHuS-Gc4M~8JUOY{)*1@WB| zQ*hq8XuUEUVxTt4YY)wY=B$t zm_!p+j*cb7oI3Bbcb&7r!$$3&Ir<*UJ}g<;T@1i2j{8L9>U|}W5VzEjdya#~xE|`@ zJbmtZRUO#e$pw|0MOifsv03M6N9RsSrZv=D_ov>>-D>(0ayc5TWW80%Sbs;K(ajyt zLO)nYXBDIM&>o9^SgaVzAFuG?^fY^XnM&EegM|*^*q$7%J+dAh)$VL3Q(K$lSo0rF zM+I=V!JEc|AerFcL2l74BY(02VN=zG4BpMFkBeHt|6qaB+8l5%RSN4CkQVYorOu|b zvT4A-)&4y{aL9ew4LJ82AX%RTe8~TMf3E@3mmCA1W|LoOTxhlqonM5hj>ZjYqw2&C z{&MjC6rsv4Z&2L6mN8~kL0a_6sBm=m#7t03BS4BW!HLe5BYJ+z%xNq;A67|=f?tl5 zRRh}pjxAD`@VgJC^+&8`>R%cHVL5mceI(n=r>w)uQ@kMsa|Bzxpn}(*=qcMXiGOrv zqeo7Zmc{PtiNL%f+1}1ax+h?o&2#2w3e2C@k&Pwr>7?Q7i{t8{gX`Ho$#1FBHG3+5 zarEa(YCYxT=}u!)tQ1y_EMz&QWuvj<*TY|QqwER|_B5VuSPJ?=@MBvFi3&z*O9=u& zBnQNIgT|VF>&On`o{EUC!~T?H(hU*lN-~*R$~$^vK2JJUP?f*!>yZW?2Og!`=U@~q zOt^#ayk-7^YioM8IdDJKzyB}1H+K9BUb!$Fx(}2WC<|yiJ5e?e%q*0klLqV<q-nq&i>TvO`f>N&x(vCqvK;dOBCjX9{*_yn-yQ|dXVHC zE|W_lJZW=^50PwA%I~oCBZCibY#IHZXI-!uK99|rjZo-^JD zVp4-6VWT%tug&oVptdOFGOYVVOY@xjUx#oe2X+JhX~@90eu1zGu}QW{BRBcaw;DB~ zdT%{pR7$dOKjsdG-QMTPL{2O1Z*kF~8j-^wGXvE$)Y{(~o!!=%_Q1YxKs)YXf8E16 zadxA7dBK?k6&Bc(>t^aISNHBuU7WUj`{GC?CQ(L_J}vJ&-(cm@X;oR>%ARl&p6VKL zhb9jfp2RjmD|q#7BQxl4<5aw#HXHJ^e!z^zQ!MV~>j0m@+Cd8V@^WwMkIX1(J8+%m z9a9dNWz6poQvLXfwp;gOn`u`4*x+Qzc82Z0QaVMYAv1ums^Bu?UOlkl>kau~zrle9 zxcvk4dJy4aw&=EnAxx=Hw~P|1c>-Q8VK}Z9gxMLaK#rnh)rvRAyAv48@sBt|4jH(N z#5^6Ikdn#dCxDuS8l9QGcDZdib{LbRKjYsgDsZl1^my6-}o<0-3dTiN2kQkLQM$kI`-EMsA z;6}l``7@90TgyoJk8j`aln|5b*refCm>5~SjxerEwB#-7fAU~O|C#5WTzw!%dKikG zk+!(uZ|&M}CV3DIM`%y%t_<(bvp6~r_aAXz?wTcTCr56I+&^*ez7o+U^f9U^ zD5V(%;}e6G@xb~Z7x^H4z$mtmTI=#Cyz z&<65Ps~F*+rDNI0>Mzg4u*OgRj5!PUW6l5w{9NSG7EIH3p>qYadW3w0Lbf0>t4l@T zc5&cklQL()_ z+|lmvX@gRc9@U?&F2w?4_rJ*fJ8a1N#Tlrtsnq?1TgJ;T z)N{jCoBw7kyxac8`P5%10PZ({@T5N%FNYd;pX2NU&Zrt*7IMIjlY}N{48%xm5Npx& zp$BcmnNKeKn*fhzQ8zWxElxHZPjau`VILQzk{uOFM*6$%5)zzj=z(~jcLwn|ZH#|d zqO%3sVDXPij@8lYdPL2e%L5TI?J560+vtSJ@4XC?Q zyGHBqCNUX4Q6oYcFU)cDsfQ}l=ucx$kv%a_OICfaTr3>Mah@~2F#$znsE^zKkcQS6 zc)|sTjId*}`_FQ~kk|*jhS?k0el0Pvb&O3f>q1cTtGk6wrge`fAnpK=OO+@e@I9O` zAf4Z0=v`=f>NxBK&LKOS*Hhfk-n#&kFO31s`U|jZyiTXrJxZn2Lq?D z_KCMgHJxG-suKyua=ys5&9l|Mt5L~)|#6Xe9)dmElXbdYW4(n4QN zn6;thk(fnmDkJBVM_X7!^-y8PfRS&iL(*&Fd8wObO=?F#DescG3xI4llY1pqJlVjZ1yJkv0@sLyo9gcIr?U$blOTCG2aSC8QhVF zo9>w^Z(bhEsC{%c&|_B7;7e!H9{yC+NMA265g%(U6g ztp>u({HsO0>h6kvZURTh;lba4+B02f^=M0@(6v-JD+ACb4!gj6K)KTb(i;mQI^^a% zfMeHXjY*mC*o3?!3H>_ALp?DY@?Lq{`uCu{Kzf5gSAZ!p`Bgkj87vl7!nF}XhJBt@2hCz#wUza7r?`-wvQ<-5U#Zz~!V znWU3i=y3j#3xZ06KfcutvV}?u+F!}<7>4St!xv~6Gqy}-<)Zk$L9X^~$Mr9{@Kb-A z;GL`!>g-W8S@7ZU$rvP7!FM-I1;*PeR`MnW^Btqv3Kr&~IcLITUo*92+eUt#2GY8f z8LR)btcgY>hk;}mOy&f#^$F8Pr*)a~njBu7CsF^XTB?WvOL-KvThoX^3hG=NiZiuS zuo1m0;6U7?4Vzny@+|Fav!wgY`QtTA_xO*|UeAK2{O={VMQ9BH3Xg`Cmy~9Ie2A`G z$Rjf4qd(H;{A{LNcX>!q**m9%8%3+)t9$q`lg}5fN29Q? zTRNqsySuxT?iMy(0us_K-5t^(jgo?dbV-MF>~$8;`<-k5gSponbBw#jKqlHoB^{Vg ziEP~e)>OJo2|_{3=F;5z$!j4*sLew3tNxZmB(k>&V^rj9m#woMjuia`R<{<%YblK$ zt}TA%0^QOodNJ`7%l=YQQ;)=~t{Ozbd5Q-udh}T9A4$F;XdiZ-Wb~WGrEvN$j$xNc zlVJVX<`Ad?v?lz0Byy%ZHYK5rZ$U4GWQ|rUyM{iy`vYH(_XISq8RL) z%e~K>(*4%w0LC4VDn|jDb~a{O?3RG5*@1pY_{r)RptvCXfP|F*Vz`hTp*zSMD2HYg z)i*-Wz4N6QB{q}b8ner;w&}-V+BkeMI@dv{G%=CtPd5o6^Z|QZ@g-@5WSK-^7^}i) zFDqP_GJwNmCr2Y1<;%tR0_)d?&lvqM7YGTfIME2w<_9LXl{cv;-11(yisS|H%pQ z`)1^fn7%0AmDUPv86)c#=?pSpw^DIO>7%P?>@=~Q`#!`#PgnSJwpiis_KdEHPkJo* z70G#%6RN%hH-GrDqCWQn3{xbR5W|7BJPXQ`IK%gC;MGk4C$mUe^8Zl;a{Yim0+mx0 zS5K-09Lxzl+NPAJrMmM07JM<|$UT!qqK$mzKQxWM*2Bi(iJKTd>3O@$4F)tT)J`Dr zgOyBHf^W+^Q3lXdbU$k1nvSDp_lCkD3+aVIDG8 z=!Rn3oD0I*O=`gRS1!-rmy%P~)s%-*Zd{(F3G>Ql&k95B`hrS-B^VBD$i$He$7^lh z2SudJQiPi0^o8*({iRB?S$}TOR6Xe0IAtB7|8sv&E-@e5lTF}RuVc~XO#EyAX}%~t z3?&cs)Zb2~a_7MgbA`^RgE9IF)!&R=8lh~PQ`_}TO6-TaF>c#9hI*n?$p}kXg}$vH z^!3ympGsV4)osLIQOk+T)E(0^wEn?K4!?@`gd5hDoZhne{e~J7Z1+JiKP)g;ZjHre zIN&q(Sw>lS$gMD8n+#2C$lAGx+jX=^SC@*RM&qH}=zSzO)9(SfX6;50HxV?~a=z7T zkmO54jTi=4hhpwdj~xk&IP)?;R$Qb(=qC z$l~!AmTayjNxWL@D~_|UyR-T2b7fX*H;D#cpebn>;cQE9B&pEOXE_tB019~HJV*@2_?;5$Aw`PSgma7YmClr zvP9PD$km9vw{5&e3KagEG!ac}$fQmhwcJx8j|9fZTMa?TpVBoSjnCH`V4>!gWFyjB zZFgf-jPXeHAFG?yvVB`h%Ec8FlMcWB4%+&45v0TxMfn7221W5vYqX%SwK&fE@Mgj8 z07$1KeoD`KA=}$XbKC5fozYwBR}{5 z1R`=tnc&L?e7*|2De*k*ypbe1N%J5NbYMjh4NW&U#}Ar8cEPC&WH%l5ndygEpo5;{ zm4&(UbtGVl3(z595#$Y!plfOyQX$bfXS8n2@|6bGJ|&$lF%Egd0>W98fpfe*k3x6Y zt5b1+*wc|q`b8BCa$DP<8(6>F_o9M#y~8iwp4r{vBIovLvj2)5MCf1h)9tN*E`j14 z_2%tC@6&ysde&GWowYxbd?9?|qVOgmv&`aBPY(SwaiKL2nZEvny!aE=uVje{`-%~6 z2HaEKp%9G9Z$EP5T1Bz&`Qf64yK+0)IRcsX44Py0?Q_r@M{GGLx?R|{dfM4emETTd)v(A?sUpPP$i0R|w z(hU*GKiLwmC1BHua~m2lcp@9+=0`-0uV=F^Yy3+#D7QwX7~F4vPu{DIgxNn-Ow0Ds z9<+>M12*T7$0xNqQide?zo~=U#TknzkIatA`S%+;O#WFHcz=0U{Bi7^&U=UbnxDk( z`vPyi_d?ORm;E14#GbluwtD*kmH)(*!y83TFIam1B^ES&p=`6a4yNx6GRa7k6=_~_ z6P$E@j&jVDN3|^L+Xz zCl==wkjhdVo@8D(yNlDIR%Fw3%qCKAR$|PPP&;B|>%c^?f56T1=>0N2Fw^K6U66Zo zVmBrpC)E<_fV6|fXSiZdQ<=-h={MfBY6~}DL?oS&-on;wGU#5^_m#vT=>K!bZ5dWq z`}NbI;n{>u26az$KNt0Jnn*al&rFc}uJM7?pur}<*z4P-{24w@*UA4BV1y3UEpPOW z+CtAvq3aR<_bf#1LwfYMRB?4B$=;B=X7{I=3dXz_57w`2|NF!jk@nB7s*meaF>}4x zXJ*Cuu}N#gcM$`Ef`1>;+ZGMi-M&nemb+^1%KFQ=>UQXmbnIe@$p#!e_4epVCH`fd zPH8zV7|)UOO6sc;9H6na?5JfQB?xIap<4eS!oe-ZWz(SE0D@{(Ar#8bu9i$+sFN;q zObGp|u}!;`{cahhv?wDqQngP)Rn&Dn2AH0c>F<~{Wr)zj(P&iPQ|m?p`a9QjmFGxXR{yTXtv#ZtPNdv{tqAk3*RfyzL`ECQ>R<%SxtuO zfd7|`G(Y$6Xc^6Th)(>!x%6T9_k9M^WQ}BKQ>cV9LiXVknhJbi2n07VNiOQ)a6Yco z?@(Cth#A+QIhNQDS=79kV_3IM8NNR!btM#FeyNhi*WlVLgc{1X#s$~6as(zywmWT- z$FDo_HT@E2>f2~O{3f31NteBi!zMYq$SPbte>6A~Nyk9Blm2#9df{WD6*2RGVz@;r zmE8DZQjn6$KT4OO&otj1#qK#z77Ja^fnQHA94I7{I)+Q_a$$W@?A{`zN5!AKLqYYC zV`_>Pc0tIM5pD+jXm)}4wr;aB`m@JBt{0Fg_*-n~ep1;Uz`g@nvj4;@c-i}hPAriF26y^U`B@LA!Y30d@EFxklMRtpDNA=XA9egLy*h%mSBYqk{{Eu4W*!Le?*xzvCptt@aQM?;CbcJ!|&^rS=I!1 zEuJ$drVLF8vJzt*@>)AqJZ-lhOP$3{|Jogecnh-`Ygnkv1v7cnyX#5}?Tf06Z}jFQ z0PYF*b)dK7-RJwy3b0nD!e_bw+T0dqSGQ9~(e_gdN(!8z&();#*M5lTQNsnq|6sL; z5VrIa(EF?ubn%(*4gv%Cumdhl*Eb|!)rPc=ZUFsHW?we7i1ZBizAmWQisx{$R#@`v zog+Ix=9u%)Bli{uNMGV_=_Y7Iw@W09G z$YVi~^NpO&&8&&;TmpsxPw_y3^}D*b+-aw6>%`-7rjZWE0eqxT@!P^T^hTd$!Rj3e zxO_W#uED8SJE`SZWq~}ByEIgwfbMA&=$Rpuf5}}~^yxp&uKcbS=1qaRtv;r^}AK!>Ukv^d|r6U0G#oEjv8Hz9NlGr^Y*C!k+;H8sd$|T2drJDLYAVq>=P; zO5r3$eKLFe0|=o7kBo3ydD@Fxi8c#M8UcQsWX%#8jHs6)2o&-322DLp z5;mkY`!aktA6x{FFmy-sG++NWJ*#z&G2N>v2=t`9jb~!uasF`K+d*A#Ex0))DTf`> zF?|jFi8X#9w*+?!cb`PIIAW)$ou4my@ z-I<~*bLj^ij1z42fM_6ZlaQ8a_YG$L?C6Bv~O>*xg&wm0TeN{7oKE4a*o z;BKb$Wr3QIqlt|S@4CbBIcTfwAr&hvfRSfa%eK9-viHRt4!=SgX4v*Q2AszumX zfM%60JqB!OVMTP#?M=DZTxW3VBNO>2bqDl#zQcr0yi{DKJ2Rwa!Px~P3&bb%K71T` zm|5m;Xj7p3c&*jQ?%f_JK!gP#7n7gI%)VWeEzjOOoh9Yp{WNQcqPPd=7uBk{ zi(XQTqkFT~*9o#vwJ%qJpm^s+hrqEX6l1LrRp69c-O?*wo23|$nJT2a_DU&+c{Ey^ zz*>_arg!-55v=-zKH=Na>jVNc)8ns#3dsTH{1{-8o z_th*AGUL5m`Y|8FwwBxwOxaeV85DI&!t9rk_oI_J96MuFvxN6ai<=xmO_?{f>(d%J zZgC;VfK2gpd7=wBXXA(v##ydgQE?gXb*7S0b1BU_mC?=JGw z^|?Oi&J7Sre~qK%g!`5HP;F6uXq=oOFxkAG2FFAauBwMbbRtJUM_u4Co2qkp&zPq7 zuzlZ!l{2SiV}JH0)vuljp58p}wvVU)_`~C7dWgdQ%lCwIsk)Qpd#f_6mP;50L3^|4 zTl5=?-_}b^bI5y*Ca}pj_7TGfHLqmPjcWUpNY&^WQ}0q=BgE7~BAk}%%03`*R&fnz z`XwY&ICu*^*q$39(jDVmis{x5Nxu#;u_g@vKUif7lE+$U*?5dkP(F7%_DqO6iqlQ> zKE>&@$W|wiaRoe?w`s-XlMH~GsBixBPVe3Pf$DvOJ|?HhRqs_kw0C!s<73dd83h|q zbLNYD38bww?1B^Tv;GeiXV!`W>(BAL)d^<6 zz}rYPY#eNRU&cY^n)G&gB36a7SOX?GT$AF^0;OK{pY~*MvyRF9=w1H`1OayLwt%JR zFj+ZHY5s^jix6UAc$261{Zk2X(vCr@%F(>coip5SLt?1xzN?4WoU`4kthCZ2m2m|ECAP`+B+vTZRets2feb(lC z(JN=aze#l5M-+dTTTnDT>i`6|Fn00sjMghdnY1V%t?1_qPK0^RxM%jq#84 z-y^}a`CPeyAj)Xdp2>~HP0NR)?L8iy#3dmX-NaIJgvPxj-qIsvqlgo)J^m$cDZGtn z%slI2PU3JuVgydE!hiqw<$&l$TXp8FO|?RjvjCDm4{~#MoV=(*CO-O|pp#>^@R}y* zFlL6T5zW*AV68cjeE4X9c@742wVA!WW8sHRxq~)WiYGDB|AfuYt=QmGwJrl9@xmZ3 zOtlJw@$BA}Pi`G6p=gm~YL`nev5!kuVfZi^FO5c5zjQAT<+5ef8XJhw*ZGUmj$2wf zbsWH?b>rW!f0gnkW<3^B`qErQ=V_dsN-~LZBb0qv=96f_@F0`Z95RBt{omJC$)`K; z0`97lR4}fxe5@zQ#cx#$CLgxRV>0zILX?-x)s3`*u{fCnsjNtiD0Ih|V)CNG4~og& z&pwLVO0+!Lf}+%7wsP4tOaHlqjt?w593XkK(>sGO{xJ5+ zDlL4{V!*+}r|hH18*4W756gpmp7p7GV749`bp0Zt)2X9iZFdB28QgxYO9bVA`gM2) zMpH~Nyimy#t?Me>UKq7fIn1!J`B2zz{qKqVvb+$D@~>89++g3)fFdP6 zP0h$Ex(%V=@U}<|8qN={++A*E;PW;iIutY3o>Mhs9>zaP;C^k(0WT>OCeGef`oOWU zV9?ax!EH&ZNplXrVn#-bqK;S`ac<%PQX z2xndRnHg~@ljC#y5_f~i9o}9+=?*vIsHnNJoSITwnsw*@UA+sl-^q?!ykF#-^+&P? zx}tZQE`Go^IzfEoLw>wwlwP-Fot*{cya7-76zCby04NW8%Wu;g$-uET5xv&c(}!7w z`;#eXo^r1xC);)Y5oZes(~SZq69B1iJILLD6^bSd&=MvG)UF~`R!Zg&_!QMMs?~(m z+bh;R0kz`vr>zw1QVusYS4Y{{t9@X{%N#w z8u^QaY(rL2ut^iHl8SN0TZumEiMF#9VvLedPcB8r(Ti2fd1Rej}?}oLe_{hdG{WgR*JH&8T&-` zH=iT9$bN1Wda#N>3H`p_rWSPZ&*K=>+&TT1LlFY9U!+z)K(qp%)MmsBP;>-opt%Qzl{ z==3SANK?YRPsDR$`mEjf_+t`9<jwB4jUfFw;6;YfhPnRxb&d!w_eQD-j7opmSnX=uMS@^L&lrHShuLl?K`00uEI9HIz~(()6Rm(6^l}2^ zQUbV*%Zg=BA4tcd%k%TtJNL_p+m#oj*~$;-*#%5lm#_e@-(Yh>>i%Z~$N)7tlz9Ln zjgKN6LHAC~LLLdz!gp8+B#Fxi3}%F_x_ne7^#5P$P=itBsGYOQjaAWB(d!PTv=TOq;zhC%!4p7?-49QSIxXvot_s1>s)gqxs;6|M-e% zfXLJ}r0b%yF1aetC-cWjwjN^0S^H+$O%#w>wZ!OPnmk}IhVre!h3^{*dv-}}XpF@% zvU3a=DGN@}K^MRzM^Mni=UQyzJ9J7Qd>%s7T$FdSQ?}fVy+(AS_3nNnzf|=L;N|iU zs699HW(355Z1$E40oIo;5wr&1K_Z{Ta2cFB21trC`UDdU7j;P9B(Ih{hx%p>vOK2J z<|I@=BEC993yhOykNb){P8bk3SyN$}1ehergnDvMCkd-OzQZdtoN8zK`jDZ6?tdi`t$m=Pd<%r}_#@984c3a^QTX zNG;=^U83Fth#v+@=DIFy<)HG4BS$>YcB8 z?xe=Q730~^=IqD82yB}EJi9QMaFoE3OE9P&jozhzks@24H(pdf5c^|-0_LtyvgQiT zaNi9z&_twB2rl;dQ!R#5FOL1C^k%@?p~khSr&s)xR-9p_Nuy6C9Zt3;7nn=}V-rau zgttk1pi7)F*l>!m?P^$N+lQ77LpJa8I2mq9u4PIe5q3#NupdO2X_qN<$U3Re(WtQ8 z(0^MgkL|vpXnVJ;88Exeab6DQa$(sj)0(7wgDcexB?SlgvwO%r5Bq%nLtd4&A@j!$ zzHV1XfBgG**nfPU?y)zk8Td8vzr|&U_^~K+SY=jfeIG}Hy^>MZEz!4&IWg2^=HYcN zslb3znn6E5^=?1C7jvctdT%aSJavfE!qC3Ut-jElS`%P*^be9W`6@hEK!mK~_;a(F zS6I+H3;1kDm#?`W{E~B}#`QVuiG%D=V>r9+ZQ&cJRf32ZOs*?z$_3VX<*kjL0G{(N7y_>G6|>oy2+=Wz9kda2HXE ziMP}3SpLY2B*P-zzO*u-E;5bOrIqR0gqvG4K!31$&t|Q(R03DtKl92O_WU?XnGDae zA?XU=fVytXd*uq_#(_ORqwOb-HoO_!)Sxq6Ga5q4Jouc8{m+E7cK5C>(G2LU6aF;> z{3_l3K9^h2_@`m7w=~84%2KzQeqO&(RSRLD>lG1d2UKAqNzmCRoXfIp@!?=@1MGDc zk!6qp&{U{W`Xx*xr8K!I4af)L-j5?gjxlQlx>$$+t(SiZe15I$}wLUvH${ zA4`|y(b7<)hbY3cm^{F@A1>H(6H{IM=HC?yk4e@C?FL&{18?Md6SQ9iE?8rd@0&*A zxW(T8nR2g^Zxf%7$$f6ulaHL1HE_BVj*>6xW-hj)r1~g`O%s>4)Ddh}+ah8toE0o| z^_1Y)*7B)F=kj^=C`gvwZ(PfU&@dKnUh=1^N6En*czBQ_yN(nfhM*Q~X$ag)FcWz;75RYMr0%!Vx{vxl|vAuM6Mu+~>6-*@ns zCb^Ow|}%e?r0KT;?En+Kn| z=S(K1BYMyvyK!^Ly|S=))icEHi2*$m1iy;-DdW`(z|F7P$7}8Bl2Qcv#PL;u>qDv2 z{YnmUa~P;4Bz0HUlU>)RND)XY3p8>F12wK?WC6*5PXr(v{7SIwz8CE~NAfE8Cj94S ziF`6cQKHz-b|sJ78CTt2N0RokatKa|(_^`MBKt*NERx~7pCq&$x$h2RUz=L$l*5^Z zhR3W>`}UG{gJhBe=V4J1-V3Q*9e+g#qaI<$j1BM+ZH{G@VL8Ni->fOlC4o7=JN1sd zuc|v{o+=V8uoxCX?=*<|*BsgiGn>BBEX7zjCiE?)a{wj*tGZ&|ENk5&;7Zd-t_ed3qu6MbLk3-q+@(Je2>zWf-$kQjBf~sp)%BOV>sF`m+ zA|Kl=5dXVNl|5j%;KkHP_pU$pT^F>d)!WnA`)((&-LC)npDb|d3&7Y|q|kyWUL?fR*2k6h ze9^!#VWX2sH`k$*BG*s7^jS|=`=0?}$#~+8d593Few>Z@u>Fzb!YK`JW4pOxA+_M& zjbQz0m~MSyG8~T8f_zvMXQ>mLW2yL^tKkq0?>DOh{fbyTivtL{hxsy!0dvyKFa>y< zY;P6+rICLbAbF*v%Zlkj*I$tlW3UC&sy|0y8l7HTuFI<(53<)KF&b|Sm1?-B?H_)+ z9SD{7XbjM?GWryAzNH1@tp_#xC$hAz4(FgxwR;@s9DukgjoWqiQUpTHb`iR7WH{+jAx2NfINpDDM|PagN~q_7|BfNTZ*eDIWmP4Ckl_8=7I!3<;?c==Rv>uPWhph7{`PCGfX5o2Wx7-vFwDmovuTqf>CvL{xu@a`vUGu zWg^!MV#&tqXk26!m-J?qP()>g#4z^m+u{_~zQWdJK974B-sfxMxS!27RCGK%;mC z5dXY{?5B`{ER^wO{)Th+Ju3>``y8Ooe{J{k!#hqR=a)Vw3~;x7G7CTW;@wgb(2Xe0 zKx0r(b!Z^pm(&p7qQF?%eP|f_T#JY>vLyM5mA(X*GWKn8P@QewR^V5AqFq|r)l|HSKPz+nS}6Tgh$loa2o`)bJy;#Lwl-#dX~s~`^paw!SIKv}2I zoaQ7pbdhvscu zMajL3xn9<7K9H{*1p-n5pBTVyzpKc>?|pq+|G__?Irg02)T5^B{xL}12l5T5Sl$rB z7QD;%PI<{c;gD?5tffU1?%NFxX%v-S$h~3X?4o2r|7pXGM@3Xn`ElFyd>0>%)B;t7 z30FLC2G(84Un87>f??n~)s@@vol_qowuM}Ye62u*yqfLX8)fa$S<}1U3qR~aP?(T; z&(#_QEF_@8jw+F_ZkSf^jD$A6Pnmbxh)}QLFNPmvo2?Q-ZT8GCRRrJ=g6Dc9dvu{% zu&;u~clA-=7FHC-PR|vT#rHE5Bb71*67<it8oX@CgLb_(I2CNKgs@o0rOTeN8pW{Q1UMz6O8<)(ir zbgH5&6q^lQ=7RVrEpTQ4mE&L82fnt|m39Tygj5y35}uK}2WPcj@X9){AWwN&+~u_X z5BLEULNq@~$Q@vE-8qmL@ns8{ayLNq{Ya=HfI?254Ywr9OmVDyM?UVXwvAVYT33l^ zNX9^jJEuSd>8AW<5KXDyZAMSOMvz>!Nn2mB=PxgviivAdxW%Vp*jKNW?787 zm=ke7Y2$sd{!J$8^vd!QREoUz1li_n^E8P>f(wZhWBHEKL#;^Glx%4v zk=`bVbT+CkkEeH2LS+wPgN2cCo=%zLMPW1r#PKD(3469F+%1a9oc%@9%I^^-Q$CJn zpvZuBSuodF4Sf@4909QapAZ8P%?&MVQ1Addj&xdMbGlqX2q886Q8_*Xz4l&K@$_69 ziJ>iJyD#RA*FFk_DWTsekdx3-CvSMOZYO#>O+sez9k#CM@~moh(TsE3f#oqJWW8!9 z?YD6in5sy#G2O^5a0jWrgKQ$I)&Exc9Af;)u)1fG`@3fYE%V&l@!my$@-MPXSIyNK zkSTGYc=ayrxw;vi!3B6a8c=n*LP{rSDE+ zO=a)yD{e^Zx?5L?1r7(!zkxl?%P(rPcjgJAG2q`HA+D*-l@U&iFNuOR%2Tb(>T1lU z_vRlVl^0ih*Yw*r-$Kh5Gm7&U+5R%BE>w;hOewy%rg|4C&ubA%EowHi?9kx(J$mK! zL+S}R*3L&xiKEHLStt@iO`Bg@ewfY>b<JTywwlI{?I=a=Cu4);cnorZHSfB1nD-qq_{5dctY(lUQ-6lW?lkaLKmzaLE zkSzHsQc&8fD(adK#=n1e7i~TGc5GL%?xojNZP0d=3Tn^>03wm?S-Wq&rk(IUUZq5E zs%PW^s!yMJ-rgEqj|k9V;%aE=R-9+>MQ>gV?;92&`ygjnZPj0wf->YWFh&&&918i( zEtw~0rQkf`4tbtt7R&iP@%;XzFskf+@qb@p#T|!vbaus(w**qZDI1JFS(beBYhD>o z?_kAEdle}X41Il!Fu7ff2{j>=S!q{i+D|2R)Rcm3utf(LOhh2wDtHmY;pXXt{*`c% zw>r=K_v~C~ugw zoB22LU=aIDLLvD1@GWH!Ndw5OGB-`*^E{>?n7?wg%mWdd%fpJtORpeAyw*Or>Ct0<(K*J;IPNt+n@*z*T?3L0DY?|lETv<;R<$f4hP?>=Nh z%1u?kfJhKSupq8dzm$s8a=Geb(ZOALXKHeg=Sb(Nb{Mnn1_wv5lmg*hCahs(3Um>H zR$;5utBy7P1jS_3)ekL6o@JU!KfG|9<(+}J`+=vD=HB{c*#K@DjDnz!f63|^Zk%Q* zck8ZOS|UqRS@JCA>@_Z53mz9-c7s7neM!>>@~228%uTwe*`Tgpu>1jW;S*>X{g)YL zrT(@ur0aOFuv5GSnKNh;ad;M_E%ufjJ`Y!$Q$ezmw|pja(1BYZ`f2;@vH!r}q96G8 z1(-9rv6C6OVtsG?;mkPT-vs1b|H2H_!_qIRE`95--LA zNoZ<8aJ;~RDYD2mpE1&>U9~FIO^}4l_V+E5A>b@ororHs82Ta)s+isx-6lg0Z$UMZs+6FhWIeXE_If>n1t+$@%S zk99<{V%oOk_<=}z^Z|9axq)WI_m4Z1u07XJ*G1cHTJ&%b$ zNv_?N-w8N=kU~jw)p;n^38%+L9I92tx9U)((bG_+;<6iqKjG!8pw+eRBEwqhQs41_ z{h(Na(CIyd@s4u&d(>t?{>2DPUH6j=_v&@Nt_AtN_s9z?_W}mwUzi=X4;pN7tD4rs z&OWGOPfmt!h|9#inp9!D`k>mJnx-iHCUb-v9X0qpvG$RKx*V1<=&m7qzxwuM#PJ~2@t+G~GaDxb9ub&^Z^ zQJK9t)96{J(2&9uyo>IK2{#G!#oej_u@x4f=W$x>2{&dqWugx*&>p>(TX^HkFP}Wm zA#L{%1*217dw(80Meb$mcSuiafAyhK9%(mHs(!!4OY+@{gSeEl6O&C1sXHcrP4aSu zd^K&G2a~0t>CY2@a6hEN+3GU?!R!VcbKrq((w%sY?uSLT+x)f^>@^M_=*<2LQR*?* z^G~ehOx1%t8J~IpZc?&MQdnhHlZngbY_V8f(w=5R2M2X1vH5MKHMM_+I5EoWAbE#E zweQnuO6=ovCQ`dGHf-Ph5bMcuXQ<_o-*zR_wi8y`Fm8kuhW-{WwIN5Z`XBp*`9oP4E_fIRasQN(u^dNBg#c8*CUrI*YoA)+o{5gu za@*boaV9fAl_hmUZ<+~KP8988(Oxq@mDYm$Exd8mb|ftItjw>}J_i5n_}pmO#GZ@R z+a)lQQztgn2d83e9?)0%C`AG2LCmP@1g5LfRvaqmFi<2@6h5|kPHik*!~em zTHuE&zeUH1ja~@F(>g!26QJP<*klYLe6|)%&~PeTYAeB$4T|U~*F|NaE)jsy6R&*2 z;xBV~3iD1*-K+}OPOA2l;qY_z)GcksGNH~+sZ(}woc0u*@lF;*`!GiFG(*;bjy{mf|h z6NllQ|Idh7*Y>kIaA_pZbb+iQ8mYd#Z5A4xoB6GfYKl<`_7eeiX}vE61EO`|D3$(I z!5}++Nkp*wqm8@%rH*nyHP3PQ4G*`EPzC0lC@rECQ;E^a2du&CJlAMywWb-pK> zn}Q^xER}YF+y8iJ5&QJ(ue&>&Bw5)oyh`o^3_u(Bz@(UQV; zY2R7C_4|Ushl=?fH^D70!`4H(LJ_eg+ZiZd`#4hLPDVMmWc8z3|&sKZQlA+XZfVLH*+MEy^po6UH2;-evxD& zd~EbNrmyZG&-;LNe#bsKsBswu9CSfE5DG8Wc-c?yZfuxcT-)6F58a0n9f}UiVaYuh z#~F)BARHlOrOX_DtJ|ZpM1Ut<9kj)~*X8}m=TR4$A`IM}f&v(y|MZArEnmE#fLQN8 zmqXxk9nuTLPUW?bx&-w8`S@7#@>&OsLB7cNlgUVkFS-oyA!F0UV)At(a8}`08!{IX ziwF-N2A4nYlZ-P4&<}4IV6KyD{5V4zRN{3U##3Suyc_rf6;qf?)O7P?gkTZ|z#l6x z@J06Va(d>eBE?(k{Vkalj@Rxlq(@XjG$I+sFNDL0)7^}-$|`0vabe?-M#ONFYCaPF zNm7X*9w;`Lhl=?a;iRlU9_C~)R<6BJ>i>s9J-mU}Psd%me(a<^@=OT>X|rR1uIB?@ zmtMp`vJr*5b%Qq{)mAg2e63%pI>z$>c26gL@%C~l+=yD>l#c)7(K=na$oB(bT)*7Ng^^|v%b@j15EQLgczmJs z1GP2$PjW6VdNh|UKxe8`xzzY+`JLhE)QL%ckJK9X>hsGRzWy8B;7Grtnff;H8IM9! z*xU86(i9G7?-Cxqk9@j?KZ3C%5p#nqKAn~ItB~bE0Pg0kn!fnA=;;2cedE?Xc%^T~ zHEmTPz81SAFp>w7zM==>eYMQ$2yk0DVeH{}2gp;<_f!n5QI6#HbK~9R-)ve}(Pe*1 z_Z-uDy5}pzqoXUjq{y&Bs1F*S=)HXCiIYAI&AvWnq1Bh&BnG$XD7v?NR~6@x{mHatjU)fP zV2XG5ke?e1djjVS>dlyei?3blzCT+okFB2Y$`q!4n_M)4cAKifSi^5fM8%sc0(&&A zl{kzj>7vw9)ip_W!%G$QHK17MMG$g6`)v<6RP9~6%yRJZkF#G-dn{7W~_;6Tt{6^^$lQ610 zmVx%BqEk;dmNg|{ z?C%J~fT89Op}Y7Jd)Ba|m=i6AW+njP>HB~Xsfg2>fNL(eOHTvm+EX{~IF~+j*kvPB z;$$os%5pryQu(yP?4=hm$M(%(jb{S07^ z<5Ib*wDhUI&MC{tz91+ffFoGDG$yZduI#(v5SCX)Kz_UfVf0AD=l81k2y?l(>MySSiCE{>-0amCGd_u9oO1We(I`G=}Z zf+Q>0XGB#0TXszm+*c_J1~Bz58|(o#>%J9T_XKr9gdD+vyiN}lyQD79!C>)}TlIJ; zpR@q_Qo;#Ky>7DjQvI~nt36>{Yt2d=o88JlJ(kP%r1QrZ2AJTXe{Y7x{IL##8euB?bEr|4XL8rPwZHVm^gSx2#Rydki{`CSUt& z8owgA*y(L9Mo}RTW}kYhY;(ar^K`g4-YYbTG+1{`Ce1-@FuyAjk{U-%f?%nPEUEDY z`qttra^D6oEw;ayTQi~Yj0_dW)msFrihnpEp57DzROg6PI#g&z4uX|v90%O3uoVo- z)R?uur#Gc!`f!ny?|+X{9DhdX?BPNse?UaS!5M2)pBrM0ME`DzR#Dir;*2Wh5uwz$ zRx%tJ6P>;-O-W*H0k7~4*E|&SEn-bD{tc>VU$TNSOBF6s+|%FvTChhv{e7?sqxbI2 z{afujLRS%V*tyHWs@`XAuqZxG?P5c}zXMmeYyfXL+9hvozJWf@E~DCjDd94OKc<)Y z)=jfL=7p~UtY}E29-xS7rZf2@MfNN;T=qL4SR@A(J9_Fpjb*uR;qaYV6Wv#vcwYVf z-#b4DNbN!vY1&}`Bz>x0^nsGdjvkj4RoCAGJ*d=QIZ&{~JvUw%JYpd_2zTklbO)C; zOe3VgcozO?%MbTM43)rI*21)$=F(x~^@X3OKK-s_)|B@tGZCFYaCEbU$R_#2;i_IY zPJ4JTH5VnCObc9!C(1=k3SGnHB>IXL3ko%Z_|JMY#NIU|%{^vIIzJ&&ZN$mjugbbN zp>cm$QJbE@Q z{kCt7YaSc_P)<2wLGhz`PIXa6+7{~UUC6WEA?|G*9rx9|-TM#(T06l=vHSIFVOD@6 zV3@~iJ^fLQWyrbbS=aCHX5C9x?&l{+<&)YF+$S_ymXGai3y~O0G$S1gjI-agS zq5uMR?Tq+1TzFc3w5wpN#NAqerZ{@Gn`_0e4h@YF%JvL(APL`_-wayYh~uuQ=!MBS zo@7>pksgi6Fm!M9gi3lO16j0OB4;Pn%12CwthrvAR zuzHZkUH5x_6QOsV@4=Vq1Y8=*Xmb7^4Y*RZ3*}!AS~>ieuab=f{BHDD?PK+vPz>OA z)20AdSur*%lV8ph688kz+U7_kHH9(LVu)y$pQO{eT2J_^3)3#AtkC~ZO@~vD-{+hf4I_^88DVbxuPfXs z4BbvO%p2<({e`|zxf3`~wGZjNin0vJ#bMTA@Fj7?5`}%Q`{HzPV#hOZaiT@p;mg09 zfze0UOXv)7iZmAbCCEM2EgBw-o{}31`_^=KzDzh5Wm+@Fo!o{T*X_Y<&^K-iRn)EAT=lP~EtPrj8Dc?v? z5uhvWH;E;3SB2K5?ebU{c+suUe+^XbJ`#I+ywUG#;poszIMNh@C~ROK&QApdb>fTA z-vsUeKJSiJt|Il0N2d5%6#lsM%$1Iqf+csax0t**ZF{%nLU}}>@(S~bl-h>{JmBx= z`Nk|SEh@NdChOh9b-IcZGqKv#eVZA6mf<;i{!3pa2j$P)2XK@^DJ@KyZ0X`P5w>Ir zs8op&u!>3oin98PH3VoMll*%M)D4p5#85lF^5J4rVXiu5BFkDx8*G&RKbEe7AbSx!^q=b}|lt{Pa(%m3kihy(pNG{!_DBay%3(MZ$^1R>vgPnWk%$e(4 zVTi?a5XY=zC6G*7IF6F~F1`0@CvJC9E2>HCQKDemAa4H+OTgY=+?UOMA^mQ@JVGg| z@S;8ji~qee+jTyLk-d9+cdD4n9?Xu0)$;b=M7#NS`44|yqJB*={%jkQoashGqa^!A z0QF6{Qt2myJ07U_=8@kEIln~;EV6vvah>vU#>A!Q{X^Txrf8#W^ZijZV327-c$>B$ttk;N2Jkk2)CIvh z^!B!V>lXpN!*TH4`$CdKBroq1LeOtPUHy+_@zovM#%DVpXI9 z;iF}t^HhZ*taC}ibv;8%SE!hr^nKwkD=cJ@S@K&$^ekjXu_0-5s?vz@F5bJM^f#1j zoU$KvSeICz6)CclL>zfba$AUK))zg|FSSEgLj8#)^Y5GKeXZ&dD9$JOwYiGT{3qfk z`MCVh72X6C2g<*l@h7$jTJm3|s8UFNUldW4IyLg! z1jzHMI?fB|Si%wQ;KgaHlX~X-aKBPShd8u}=LSk7#Ht;&t*%Pn@Cojux!?_F^PK@6oy~9|y>H@0vLp+c3jwu&ol@ij(NL zh4pT)0*b#Maln-f;ML%Zf3i8$8wxH1-@?1xWtMzbwQQ3Ey#-VlWW-NOruIHka&};+NOrMpF=851VUltFL3xJ5Rk*f-t|IM} zT1XxB<YfM3O65V;#`4%o*d0oQ{tcye*{2RQ2XN@7petDjd4_1j* znuKcw+l&(Z8ySbxD6Ex5@5kg%HJ{ z4BCcCaLywo2J}9hGSAoR)e&{dm{ItzQ4rRJMOy+ypVRNZq!zkwO9$Q0&R{&{?x{C7 zzxL*1>*=L@Y5hq+Y})#o0IQ&7Ce~tFYPoB}l}yv^p4c;dwd|$&!RAP(qV$oRn$Cs3 zaAZ&=RqEI_lZ~Le|A32O4i5eIE~fr|@0y1o&v%|uo~gitABz>M9#;>J?>?phKdD6t zT&RBbr%r$m*D4a8Dje|iHbXw-N?Jipe*4!Kd4PuR@GH-r5z}^xk%zOSF|LuwH$hT^>#gmJ`!HP1>QDvf`Fyl$BnF5(r2KB;LH=_F< z8Ut@;qrK6U(<67iD_;Z{_+iXOH#2|=XAY{)x}4Hm_S^V=;XMF6;r%63)eOh=KYCI% z>Hn)-@Ig}B*V;`e3W=I{!A}ku+S>~tfOebF$1_4XO5aTgQZzxI09N?@J4^VGb7qpOS-k&`OSs#sMWjUuj<&O6-rB3>*(>PN;~ z+n-PC@r2zmNoGeX4y(;2aZSQ<(YAIShVn~VE4+GJQK|x#w_K5JD)wnQdWWe~$Hu$+ z^B%}M39k$wUCIKQcCV3OzX(&xpHUN~eZ-!cmS0BMG~i7eAu*^&17+wqaN={_w0FhZ z>ce~9Q#>7je|^2MSuElBrCJiME3&r=h9I@PFyQSD#2ma1s9}!1A8k>=4i!XG{gw{z zp=+3&Zc<0((eeK4`15J5;$G39-uLC%h_UMpO7<`pr587!G@c`Jvem767q_X6@ZPu< zjoGW|rQk{&*398VC1UFs^x@`I=%TLSsf5o4rHIdpGk##3MjaPGVMY;FIjon%l%XIO zzD=`N7Ot*KS4dO=lbueBzoB4y^8EwVoB-;N>i#-u^?EuVRjcOefmacQb=l0F%I}T~ z7wHv=Y{uRhJ6ClsA=WH{<~$tTTU_ue1~URU!iuAMxo6({R0Z~XTTL!xzU`sNWWL1wN{ z0K6t==i%wlPsjz7t@H)QK3Zy?5CF&xTie;G>vE)BZ~z$3y#VB4vXX>bV7&@P(RNc zksD^8{J+Ja2G;w}T_l%Tx{kBya3p6R1Aedq@Yq7n8O8tigqJv_*#Z=hUG?zsb^`7l zDp}GvUS$f2P?l$7YMs?L{6J+H?7#6-nds&?DdGfXPeVQt{$Y~|4VcHk6g8zxecPUd zuPR=C{DYV5Ek5^)xCE_;l8uVA_+KbXXyC1*I`tdLK}nO4jlCyddES>$HuX50~Btv zuzm)UvnE|}El1cdU)pt~fv;3eJi6oS@ z3Uwes+!<|C^y~Fm_d$1&Yx(?#pYLzJaeDYDb-LIiIY|PrDjTU8$cN+W1p8co_jA9< zLAL2xnUujXq-oK4;r=ileAi~ZPYYk6LJSBGe_gQ6f;<9G;Kn0%fcL&w#l!SDmc$RW zlZa;I&Nb-R{1432xP`{S6SV!z1}klE@EHtQC2zHOb@v}h(u#z0+aeVnk1A^mM# z7gk?{vO?{jo|}_g{*BMwg;86Gf)eEE$CjR)9F*0aluRR2Q!f{(qnWdpPo#sPpLy4O z6Li1Ta??@YPPf`ZVd~Ar`ux$=+IvH#+HmQucT(GVG4*90)(P14kvcq6$>ItA#i8hz zO>?t7@}P&2uHpCc;$#(YJckPIl4tMWKbltn?VG@(tf?K#8u`XI1%iw=tH8|^XJ-Gy zOx<#T_LpckJjs0n>VMa9rBEkvzGeRXQj_S3{<#dpC-!w(o+IP+c$^8E`%6Vm z`OUy>yfAxnES8EVPX@Z}o@wUczsM>eCLN$~L>m8A7Ry;NE(alx_V1Eiot^>l^ zFTDT5qo1Ec@}FZZ{4EYXdrIe@zvwsMfEwEPsMRHqXC~d8Jp*6Pe?sV$wuFbVg!!lm zxinsRendzT4aOPwzmGw1EjxC~y+N5Q>rAHmQ}?w7P)ob}xUG>WdVtz(piIml-NvR` ziz@8z^vujaO`!2%15e>6j|@JmFs=FelxicqosiUww^GgaM!+`IWa{ttcx6q)c(p^=m~q?Fe7RIR%1Y_`Y-bScH0tdMDptTj@03o>$Rn) zE>*V>B*?O(izU@h!q)H%mFOa<9fHSCleQt4ATM~ejCrhnEd2dz{ z2D2`hQK*f7Bsu%>6@&IoHuYEle{%=qS2U+-fD9LHBT;@kU`>%rc5f`=c z7@#D2O+!JlaHcHb7pVGAD?HwtqR8eSw^eDdn{klHsN8Gylj+;a%;SI< z-UZ2a`;vDCtDnk*C_CG-AFEp{qxlo%*>jnF6c4b*N|f7mx+t$9G7}sE(dss4%5tbG zCfF*(n_pEI_xzg2GH!tLv`aCsGucw&Uix@SI)6Jo(^@qfy%M&3@|9qWo<3BbAP#fB z{F-HO*mzY|WxqG)qln`i#i~7Z`dcOWF14ReNROfCxJ3Eyt;k*qh+;8lNIOGKF6@B6 zRfy}6t6$ROlgl;mXuXlGiC^vKr|JKgjEM(Tzv`C$i5-tYzLiSVvm<#u>_8>b zZyA}4izLmW03~qe2bF5+#*-y6)r;UW-Vs{U+xI)2j|1o=AnxcpY%Ovd;Bt382U zu~opqvE|@IjTn!|u*epQ=t&!+;r`L5m-O?g{M%9it5<#^&Ts!>^ZwvT$__o6vR_D} zwZ(II+qRjaQ$GB$y}y&rJvr3kx-a z-*ptA5k;jY*7;HehQ2h2uJ5#C8m3VmVEi>1ua!x&fcWI*EZI#Gs}NMRx!XBGA8V8N zhoktN{0mT#AokUtLg%kfUU3OBiz^31Y2xq;SV66d;U)p1D@RkpthR%aHEI~W`^|zX zf22{}{3)3KV0?H2-sa+1(HD{Dy87@^o{t&KhQSpZmZ5iDhnpkmN5qMC9vs}5B>RUF z7bmIrmc__{f^6%%W&IFuV`*aU&-fJ7T-KU5PDxqGJV&Y|sqXBmH4mI(mW?>ew)bLr z1Z^sghD%%iGeR@83tQ1sukLlQKOa>;2qMwIML`E~Muh3LTDm%kM7tk>$@<^p<+cS= zn!VXSU67=UG~g07Lz^1j%W(ELt*HXGX-p%L7zMo#=a`#Vjt+{+ZM1$3hjc<^ljwZdP4FBAoT znuwtKIu@rKhm;*^qkk8r6QUCB9}3hhqgJt_?fQ79n1g9mv;?9F&&a=5(+@_lxPL<@ z_Wii=tK!>QB-;=6-$H$4UoUWDEDJAf!ZRDcFj}F*lV?P0deq54F9x_)d({;(eQ=_N zV_6N;EF}9q3_c|Ir&Ra~O=l8+{l654_xJ1RLqY@YVdx@UH&EF1LUB600ZI5C2n1ND-Im`9mCq45 z{O7xRkq>D2En<|zor~VrVuR{IWo075Th(j3O+$e zKLYhSD*z_|dr&UZ<<^`&1<*7n{aq!lR}t^>bNzKyKYP3(pJ@d_pl+GUjsR>ei!xuB ziqL=~#Yxk^eNiv5<|#_klk+v6`sNpCBUE88A0`*(e6nx4B%I|3z7I0g^LvXWq%IZa zvs?(+8=*ROaxI6KeK0l?^qdlKH?Jz->lYy1jtwatbM6sO&kgj~3W+H-N8gwG?p_jS z-cj+R8UdMNm1dy3J1jYD;q1_>4p=_(3wJTZscF$JoN}6l9;__7&;vKNVmrAG%>b_k zGzaO2^&SQ-DmD~huluhL@E!+dDsZ=uhad7uWce$+f>DF8OMesRsvN4l7OP z6{E{v-QLAaH&}(B95kp8O z5yw_gu76ebS0sU>?8o_l*e__6X)3ycm&L^2H6;oe@krhc6{E|I5$6enJ2zg3lj0NZ zl?++O2nBE8fu}hb*oMC7$DCDK7@>bCvT&oBuG2sz!pWIQ`J8M2-GV|@u(Qk>X|G_H zYaXT79WZ0Vs-4g?=~20I%FE={tg#G%s;hK@;aE`C8B_18Gk@jP5!0n;nwj>UTps}w zV$q}Q<$a((y~7opc3*wG{QhS7+7lR*4gtQNoUK7Un{NiQ*D^=`oA6=6A>$%H;WalS z50E>wra@#RYs%)vqo)C{)n>-Iss6~t3!tPbA{+2R&@bd;zsCIcHtO4p!I!J$eb2-4 z->$u$wBoOh#3?M%APb!laL4~-5X`?FqlX&vQkIV)-|{T1lSVzMAUhdnRc9@j?0}}1 zF?8#Knm)Dqnvl@nfj1VpK|`RtwsZq{}BIH3B10nOs!IUwaMgu z&3+UnRAeNYIKC#Yt=M#5an)M^dXi6hwOPl?Ep`?(>^KV3HbbZ%^LjWL@!h5&Lc zc=vT8VBQ2+2dW;@&K@tS<9KAZdc=yR=Otk8GY({(h$2QFN<24|R%VtTkghnUNKj=Q zZrUp4-21fD^>H>eVS%`J1s*=r92N*mu{5?E@HzoY3A2y)R06y@w=c}ny>;T*T!8{Z zeohP=VA}zX+E(969IuPOU(@ne%OvvQeM3i~jtn!=3&hXyujapB!n&-TrnTc921ONC z=t;-bC~EF!e3QLe{~|OO8sG3%WcU+}sK_k0WeQE0ff8S6Q&Wo)p_M~-u_KFxmH2r;} z2j%?Aa^oS}C63KA@p*)kv5cqv8+7A6Dh>)$_ax(+ZzoF!`NHBn4lTYHcQ{rZl=CNE#ctPub#$iX&1YVa~*Wb3y8bq zEqLmLa6u7qqAo?S4W!t!hIYRAxoB}G$Lo=(ZomL}mhq)7IVh;;ua_dVk)96O2zSeV z-e~6{kzDMF+{8G~2YzbRND7Gb-mAU(dxLGi`BdPc3{^t6*5>JZHH*M}Mn95h^c^_%YrN+1z?H z7XOKr+dQaKUhzDv++<|(`;@gq1)CKIVW7Z9@nkSzg>)#vQ~>CwgBvV1(ja-FCwN>) zLUo|7+5=9e*m@M=h>NAgAq00MlF(v<6S z@pJHX0(>RE8L401WQV9beeNs2_Gw0*!69YX-pz^tS-g4mGA8%RQ0HV~|r=4Ie? z3GBKyZ+_Au>KQ1FdTrB<-a1M~sIL?FLXDNOEU@h7;<^l<$_nq9U_TDm)fZ@Aa5||F znNHLxR|Bk6pPuw|G@1nSki%A((aiA%f;LKny+DT<;SRdC;vmgnr|cJ{t)pnm!6Z{+ zv7b8v(L3m)rtB0uei4v)c`&ezXKo;lhN(KtcPax;^FT*4KRbMip_iM4S}RbqMd+*mT;wrXNf2mSNWbKWq?mHA6gfLi*_=z2@(Ig(5)h?rRPzU0<=dC31b>uDVa z_WA;5{07L%X}tQDJ2rXK?iUsyH;kvlIy3}U`p6<~Z>wvSo+H%;2rc^LGr3V~y{1Uc^Pge;hsiSfvgf+jc; zYQ)z_RUP4U4e&W!w*mR;<~OtH0_dkfZR)2q-Ik5b5{`RL^^5w-W3pi{=?U2x`*6NI zxCc(1Hah6f>sCHdy~QHaCWvi6WP0)=Z%3)YHQ;^xKS)!}E3ue+h6T2=C@OmE)rn03 z>gsN_Y7wj);mK&U%%f=DJXOiBSG8)W6jZUCbEA?@*`gl=4VuN(qF!u8g~Zt>;k1Tx z4hKJmz0|aw`8Um~HLaALv~0K3J~NA0__+x2nTFhD^V=Z}gYJ8PsVz9;$4B57(s2r$ zJGJOIBDdO&c=gin0J7~|&-0-ByMqCKfFKO%JHQEq+4t^DO}FO&`5^vTwzXRPHHTG> zDlt8ww0$<=EkzNutAVFAB|>6EY^1zA&S~U))7YqSLi3Wd8L6y{c?yJ)*z{@ry*7C| z9u~eYIhsp>S1RYM4AX&9QWW66%*j^ZPDaUjtv}-8yiI z2^)bR=o<6?Q72#=zRNydR-G``@IsUb-$J2q5&k)A-!0P)zq)k>Gw^hVcruG^=5u$@D;@H|H?d+;sVl!&NuX-v6K5 z7dDJ``@t0`O~Lx3I=-s{ z@d6p+L)uDSROdb1e#Dml9e82&niLom#i9S2P@C?AnR4+}bGfETES z8aVp(m3u(b?;(yJGeG*n(xX^+01iQV$!-G1!+?z)qFTeZhk&}GR(TEGVP~+0{>Z{^ zj0SXC^gTC80l(nbN07lUu#^!Oq`|RC$rqyV3)^igQ+2!9jVKW}BI^Sa(d#DBMzrIK z>hTVY?(of!V8sJtHf*BO`WUfQ(V8TJj32m!o}203a=WNki%Zit(<4F=I3zfC(VCz= z!T8q`4?LeW*EOHnNZBRDrkW+c6i~sfr~l|?HE?$ky7EdRBURM!Q#@q z2>p-yVfuhfqot^a9^6Y<{hl;)0yA-e`VIN&;XxNdTQ7@&pz; zrBPSky?}KA*VRKC>hKjyx@_JXNC|u&IIG^Y1NHzf^$W0z*C;S&^0@QR7W&Bc`VD{u zI%iB6luwF!4Q~gy`w8qtq>Gk{WMS7%Lbb@a=4mFkG6Lv$XV@HdKJLmiy%XBoJjk5Z z1s8|J1{VKdyEABPmSm!F*0Mi9p{-k>`EI9@gKOp!C^e`<)|qiy>YHnnmO9tCUtS)d ztReaw;tw4DXeOXkp}r3uCkt$~7_x@J0;NY#Xhv~bOX?mJ4}Yb;N?QgCmfVCM&W9If zuG$G8$svMsf4i$+e3H?%SXs@!H4bpE#NZ&M#4)8D9JkZI&+xP<4Y-JAOMA>(KOe+O8` zY?&D{FPt}fef#m+yFpfbw?trK8V=zA>9ZGz9AB=0FFl zHIk-mgkN-ybwn0r#Zz6(zq@MgGcbES^HqxvIr@tg>!CS*f0X9zT#ylBxo=^s(stWr zxvjc)RB0#s+S)VxXt<<%;JM8A=beI;t)RxCCLt4Ec~$}p5n}>(YI}og200oU>=w{J zUA35Bq&20D?KrH|#0DistV*jL(^NzD6}X?XgL_e0>XDXMR^IN8yt{l&MUuD(Q+R63 z;WphZzyBjAt1#EV4mt)u_8LacC>}|~yspm>8k~;B!3!|YHMglFx?CQe!_AQYhQNTu zI-q~dd;X}Gt%9ty!uhkgcaX+_6Ap7oJE-{#Mqt5W0-#>7SOwZZllUHTc1dYK_@k`kRXVjnMg-GbWN|`R=kO-~yQ z3c_h#{R}}9#%j3sziU0sm&m;LpK8XMzatg!TA}8dwjz0Uqw^4LUxVe9xK5>nevqOp zvRk+Va02IBK1(rBFSudni+OjO%lVr&`D1wZ(xbfz(gHpl*trjz2!+{x8Q})Dudj0d zV-0#_3;{V#zj~`6bDUS_Ht#v^=MzPaZwn_*-XkceE|4)jU>kV46mmuG$h>3WC%7QH z9NXkdT}5GzKc!}_r!L*0Bhn(`f>Txth2i@y? zW`db5F>8f$wDXlao(AS56#m^QUe$AAAZxp_EL_Hdy|U@p-7;kLT4;XHv*Yt&C#Vab z^#T!38TPv>nx*90;;cJjreTxO1KsaUW3u<8vQ}7Fdjei~1kNGzgt&?DP{3{@iYfKN z+$z9~EZ9}8>%Lq2PSE#?M#$R?%)I~~o*#JS}!{m_;e5Ry*O=m{<)KXp6q-@dE~Vj z>jIAj$SON>fUC z+XG2SX1Y>3mL%8wonT<98l&w#Yv$z2iGKq5&;aJWpoL3)w*Q5@lEQ)NymOs)#IWA* zBl+}^Jt@@rln%!IU&JkdiGXez<#B=^+N&22bgiaT5LNy&5?h<;Ct#;@un*k@QaYbG zB6SWZioqn#8t26_fP36Psvp7ni4XKW6+%nzGn1co6Y~|jrTusj<@mgi(HL5oq-wb> zLjoGO8RIa)qKdzo32Agif{f!o85w@yPA2_P4oLeV=*kd5q@7;@EbE z)-)S|n9x8zBSL^(h@rA)4`;yZYwu>jedclbw59!KY-;w}MgT$YZ3wavY;-t{N37R8CMa!KT(>RtoA_jeKj_*+roSV6GL6-nmx`3SKiCV(@2DlLe_SJ zpE~<%R*|vhFwqVu{{(EjV$X<)?miy9=>sh}U+ee(9+P*RgV%wDJ}q8~cE+;Q;#n4o zrn=K?PPO!YNG0Eq{s^Ao2&YuVpWf5#mEM@%la<{CwY#ne_`l`-A~pkue)eF!xPol> zK8oA}rjKe4j=~cV?~KmDYTJ*1w#Ez(P>ghv`WH44Em)^pIc(Icq~9l#_*dDxAM7iV1^ma3d$45KD0ramzzkyAh`@ z`sHL+1PdcuyMu50@A+Avrwp`CQD;sj>qiBxS+%tDjdGzwE>`(Z00qUyqOijt@3Z-! z&uLoRmu;R-`|jmE)XAWXB*AEbl0%hoBB103wE`E#w>GDq`vx*2vAG%2@n2(Ws@?_c z`1LTmKB$<^;ezhx=T%U_cOAO?tJg@0Zz8~Zi|qKF+V&%^1Db)}AbsxsqqYeE>(Z+X z9}}=yKC$T$X5b-J^QtP;qZ+>W8;snInPmR^<91O3V}0VzdG9>j^dWrAe>pIBVlg(V+wMwvFA+z!vjS zG*BHsJuT_r&iQD+72VbiTF6njaEdHnTcqSOSFM3v_sWxTtooA@>S_X>dYKr|T(0x4 zPN>P*;ws%d>jjVP0*h2Eipa{u&O8dG#MJf2TQIjv#~B&;`@mKFj#PO}hTgJ!&!xmz z2*B653bDC?-smC1kMB=1^UIdp@0Av^Cr^@oxq$Z%3|9T8qd~ ziW>=FEB(k99b^<}0s}j!otUzlAZSh*GT!TFHCB*qJIgF}GxckH#}W{*hbKg*hab|V z9T&lm5u|u}sHWSFdP>|_y5o+^Bv7qA{(4?r1NBYyZ1|_-lk33dcVfNXZgh@+ZumM9 z4OIJtcSo0UqWJ_f3oiO)Jm0{=*3mGy_b@Q>e$UAE8kOhW4Uu8jMH|Y}K0G<#zt59i zpdJSlI@n6~a(YnKhZ-HT`pSKZ_^ZRsvb+GsKb^@II*!r=KHLdXH^Ffg>4uXYZ zx%o@3OZTLorL>`6XORxU@#m0k#1-#@(Rq%O=yR|G?Bjgs(ZTWqe)B_ubW;-y04A29 zSPovgH$kh0yyLxf1MivU`0?mmBm|GxH})HYN~xYmT2Q}!{dCoZ8Pn%OdY7Uqiwn|J z*T%XmOXYk*hiaZd$XM#BBO{F$1+l0VlU7&S$L!D;ZCYw^tB)HT_EdkFbcy6vG3WSw z-hp=xJz$j{*B1f#ydh9XrmjhU$p7%CAOzzG*=7Kv0cozzIQ;F{z53W zcmU~>biUJS?&L--P0cWfBMeH~?C9S$*9ZGze;|)qm^gg^94@l20!U+fr*Dwcw+XPt6o7!t0|(p2wDfaemXeKUlxw{Fvnl9IAIR1^ z104oU%RTCu{_)#a=~z*btiwGsP$aSE4cU|{QNP6=1gg+mUhk{YKEX*Q_KekkPD}gS zQF6(Jws;#<{I(d6ktqhK)m@*DE?{2yk+^urCpgjjHCBaEutO*0(r|eoSRww%0^u0% zrU;KcB_WEIl(iXJe`z`zojU-Fby2>^0;!SDSyW4~TyLV8&?3MO>@y?2rvB9V|5ytk z_CR{xmR;^;W8gM8)+G%Fd_U;OH1VGzm@3b|fiE8e9*srVr_%(>QwIS1CNSm;=an!W z7sN`7fVUq@oxGW7+P#8;ftgyLhm#3FF9c{r16|bex1Dklg|O*L|Hxsalt?NQO+5|l z4h{N6N z2y1T5RsR7Y3Dh~3qz~9y-rf58(dMc#aLZidz!dUR2$(>|Bi)Uktef7} zUOZS{j4Y*9PC4C`c>Sq{>_cXAQcM618eo8%m$&!?7(e-TF`s5^R(=0ZVz)3Kc6r`7 z1IdWEyMR;!4@f1?>53O<6q_Jw7PLVq%awVxXfNqUFi`A9MlCGcdmu72z-7%ZKI^n@ zfyu1HprUG@!SV55f84F5pe@OLCM{)2a?DG_=nU$6rrsAHV5<>)HGAN*%HCA`K7{5* z)M`=0k5KLd7{g1QC>dnVwnXWu-klz`)rX7P#V$MCDjSRO!{tr3K1(|hez)iW^j6kz zV1y@dOMYWyeAjbtqOu8NQQqreTqzr{NdzeJTp77dZKrQwrp=x+Cw}%@lRF)^t9N<6 z58$^*<(Ejnh0n-{HxM3U)e5*inN5EVd-LC)Ts+N3Zulqw4um>+FW0Ypl=*?Seuhhk z7f7SXSON9A^b({Oy z$2E6y^3jR#NoW7DO1f`&S_vAc&j|v82xd)Bg(e6yS#!4*2fC`5cgT>@CZbv3p%|ih z<`nNKyXO?|Al7oR8!mC+UW(j#8qO0=Pbs8{c`wlpM$8#(_El{M? z!2v~-YMhoJ#?USgq3~QA$A#ULs(Tin-(HXt1Vtb9JGvW$Fz}T#Ukkwty@ZbvbVMQ! z@^2+n97`9z*iHD&hkcPZMNBE}Y6I6O`&Yo`DPT3x5rxRkzw_O>t454)n^q&f0Iuxk zf1r>{yOFP%*9HeyboRRwKuI>xAPWe#Bjlk}8tODaIxR>G@3iY+Vtz7GuG8m;lm?4~ zL5r3s(`kgwli69<^$)op5qh59F3Se2;oph?D@|5E5A*$Gj&N}w=x(?u-q`+WVdS42 zW~Hgmdad7eu6X#Dud_a`mk$>2zv)Y`jX!;MdNNC+73CjA!stmv_mMg;JI?4rL;?cx z9SV=ie`)u{x^!jM#`4VF=-?n_5HK=5B{n&xlZqMZK)a)*LzrCvq!+;UgQ+;=8e`}F z`HtPcG+H6>b$rwG5~v+vYK7p%7&E=it`?t>Fb&UxLaKLeuI^Uyf%^;K`XTc@V6&0u zj;L+|O$yt(F4ktc9Xt>+4KOb)TTB+?WTCU76k}G3`eN@66)UfbY-W$?qCsf92^uWm zIdOVHC)d1dX(|~bidm6cTq_L|F4E|lH^AXGg8(uWxn7URM|k5~12q54m(s^YQzyUYN`A^->21OxuWcr;Q3<*gM*{$85K4cQ|?V_a!=STiKn&?5y87w!!2htbg@&{?;}{RV!K&hn4# zRCnltIh2GmLPSSvqGnk*5I-h{ic?0B@}m8q$KoZ*#=zfUfE55|`rP(~i=MrIfS z_E#i3MP0=9_G!HYE0{;}%e-jqH3LpyzN?T+C3nGV6>io97@e{@@5oDf0}z-yqc2vLt&k+0*;S_XQ*{??TMG{l(x^S zUt*%bc3K*2$p{rLX3zQkPvMH+Bk_4LCnH!we@M@>FF5MRg6ri$66hQ0O{)LG1;(w_ zNr@`{l4Xl@HF($ju@QeW*<|0F^7G=KasDbAxt}L)U7Ff|;v} zr8tN%H^qAA$t=36g83 zR@OT%10pxrvdhzFGq9z4kHkzu4YyB?be(W>uFiGlh=hrAS*-q?Wr(mHk7KW2(%6TR zG+-oTdig$UVf=8Wf}0?@Wvkt55X}@WcLCgP=!F4~AAb*_ae@WU&AQHC@giuy3c2Cp zMV^`4`Ew%F54cR_j79VhI>N(<0S6r7*d2KHwhVDlgqaZA<8Z6dXPuM30&KNvU zc6HSlv~&sjy8lahg@490nk{a_XpOeD`6aDj@blr{C;}4;{K2m&y=t~P`KGYGXr8ss zW4df_9Ohy3scp}XdJT?Spz|^tlleWJ;+JdfkAUs+WyU-Rd2yO%h{vr1_>{QhC5A#n8g_D9?-QUp2K&`9N$hHaXGoO!N13y>9*!bw<_Rky+Hx@{R zy8Y|>8_wx}aHV6vGVA*z*B#ja-cfH(??6LMJ;UjUfB6}8`Uc!si~;h}W4zvr|8)R@ zD{X+v&Uqr#u>`twB|&E-X1PLM^V0<29S5vR{+(CEKt1D6E==b;2suw~PyOFNKc9Il zk;oPh*$w?Yql04dHBb%mgehR%lLne=?#itHIkO1-yc$(JRlDROuF2iZ#J)J+tSp^e zZUU;+l9kH%G53DYqc)vRsmNJMBQVVt5lNDn%MPaSLka;2~w|YfSsv!8DhPcMU zPn0xW?$8YI{1x-=mL*-&<;z~6%=$sx4-+Q36g!&q)e%`E4M8A1LTd?OQ7u{Dgu%7K z*?x$v4~eoxnDQfd!@i3^H?1BT0&FS_UO22JA&VHn1%$M7{AR|8r|CoG!UG*5nC{>T z;#&mgGGn(6POI}f%$vxu?>}9DW8-3?;3Otywp~_Chi)v>!;!Qrt;~Xz)rx9*V)9e%arMXSL4F%{8&>3 zxc2G}Jb#14kk&dt*SJ30+WG1`4cy}Q{WmLLqF=Qmkbm}~6Y$}_+JR7?jh+Il_<;jc z(L|WojDc8{(c|_sk|nG3;#J*+B;)AJbB#1r+8?w!r(@=#s@PoT2HtTIU3|TbG;W%2 z^~CC|(HrM-Os9V_Fze-)6Tz+nqRE|Zvbs-SKtf8h>N11acTa1l*AHHWLgZ2{UMZ0a zB;5a-EzZy-u<4K--pLWJo*W zWC7U-T1#t4ivC?bTO+8Fcy(#S#8diTL*p@uKvISP4M{)=rGS|JYP4tUwaG-~fS%oj zSk~I9j7MC78%`*;eQv=}* z#v}UJo0Jz*Qw3crzUh+u=n0ispd{(xuTC}?@vbFg<)o+;rKh=|47w4l&4yuJ$8{VN z7*cl#mUn;vSG$p@-9ip7OkExGK6M~V`Lxu#fVW-=ZFj{xQh|JTl=li37`?xPM29T9v@HxkFO1l_Jm@Wb7C$r&iQ#){qWAxsI{M7gX&dX- ziA;b!7H@_XQ{>Dopq?&oZ$lab@lPRh+kW>6?fZ*xvPDE04EZ)lz6D|roZdV6ik3@Q z@icHR8bmYR&Tdp-# zKU6v{lT_}QnY+#}kq4qY&PUDbz_|ko^lpFQwgU0z=B=+88Ju_uJ^^0=x?2Ns=e%KI zHr|nQM&R3kIY2TE>8Wz9_VX}Y^aNQwTTzx zeA(G!x_bq#I(RJpD@zh8k2e8bQvMN$Pm@yYlk=a@k^90zM)31Gfo0e0|?rS`Ut8=L)?_}5T$LG*yY;_=498X>*M z4Wk-1+rQCU1N^bb6$(i=#k2%=HOGz@JDfN@=bvzr3+qE=w`eA`E;JJR{f-VAWy+;MHt5#AbhnVoZY?2JSM*B<Gs5(MU2ueJ3 z(+s2l)ORM~^H+mi{gvhqyGbWK8KL(E#Znmuz(n!Ax%|nRchmOG+mn#JZpACCE@biN zOGcRM?>MlF#vWWY2Ral0N91c1CfR%vS|-Q{-OUEaY8%S%`|S$f*!?)J_vvbNn0>xD zgq|X=S=?g=)mqCaaC5+mQQ3SpG*lVOL0aB7isr??xrrpm}k&>r5A4z z=EY+)AThly(vv{-PcBhePY6=~^)!-+)>4;mORhZP#ZGTBzk`H7gmEg_xkCJ0$LbA5 zM662lObQc&1-i4qpgD!Y#HH&q^SBSjLb)l}*?-o>gt?GU>PV)inStz+wpZPoF7Syx zQ`h{U{27pq4eYCuxSKu~e0)wWdy#VkTpW0mNCK9Y+Jb?*aOCmz*_ZrlSB92=mx}-f zFtJ!(bbW*b12QmuGkX9?TL8>PVBwJ10IYvlo-GDPg|98UNU@_aeZHVSV$nrEQWc#u zZ6p$Mz#k?+%gcBsQIF-g`yAC4^RTkH4d=I-niazfJ-@S}tZSE{2)~IoLwtIPQ@JU=A?e{o z<^xBm>7cc7(8KF*A7hVs(JAIoTdl%3WwIBSQu@&sanN7A=yf(V?5EeT%F<4OkTCm9 z7|zIpY6y!{7Mz0aI?7+7-p}bE3PtZzd)tCf8Q{eXxBVSkK)|=2faPQ8M9@j<%;Sr7 z`wgI5ila>)V0XOA%6;Z+r2Y-&C-3dD2yni-)_Mc;nK>5K75MTA;Xgxv(M!pAcnM;w zU?DR6-`^bIKolY*Ea9QS;P7;w#aWTPigXCS0E3cV$rB9)XUn`sHLg<(<&jYm7aG@X z6%L=5A!>u2a2m6BSniUd6NwQ28JRL3+w;Y$*Jp@GxwunqNr-=95HTMMQBa%Cy~IW< zY~3n}nOr~6cpC4iOpGs^H5$tdB3U)X7)Mox&q&<)S<7EUlYiKlJ}g4CwMlA5+d1Mkg@=Xph_0c9&Kzf8~X_oM#xekfUKR@ z_eDUuc=2B8rV8RMF)roGw5r4C4W{N6YrGE zDbhccc&}UC8^h*bSEBYU(#hzH-35Ka8aCcwR5O!TQV{V0Sp$!hIt%Is3bE>D`Ur}H zU<796l(4DK<`$uQ<0LwHAp@IczXiImb!WWSmrnwamLWH0g~TokK9($+6ExV!Qp_gB z_#7g}|CRrE(t+jJG#IGCW;AyPs4c!5I9QY~%DzKnSKSE$A#k0Zh$;cll*+vge}Y#! zpz7W4M&f5iBAwTq#s*cNT!xDX_ zW&|56Ddv_VW&%pFz^9~cqU@aTu`xc1MsdvDJsSGN4?7ycw8`nh8I}zyx?T(;&qtB) zrpB0Fze*yyX#WZmFB*9?3gv#%MuWZ#68Co+u3SHOs^LYHRPk|rYo+*3DN(NIf*W(L zV|eVMA3j>Wzr>aeKIDHwJ^f)vc_p25%b&-+fbwu^Y2}80nxW(I+<%!%d5H1m2j_OU zp#0z(tOc*dweaz?BaI28^}VjJEaeeS;0Wz}=V^DK!%YvM7L#oz{p#v)Z_W}C-J|H( z#lMmZEC}9zxa8gikguVybS!<5(7$O7bAq84?(wH_W!03(aT%rVXBQI=SO zy|D^kdXE-`8I0lGp{DPWbq?g~=?i|k>u)C6O@|cdUvtVnNA%H^(C1*0ra&rJjud|{ zn9ZAChLb%vz4&NkDd{^~Nt0jmVM`O|+jH}A8=)ln&M_YX5sqMH>*t5hUQE{5@Ar&^ zSFSYcQpLFtyqUINWi1lGwa1SEdgppsrgnL4bsrnUo!-?_ef88NK;1=@6n+kv76qVJ zaKTqVUNFTRS!Q$H7zgZ~A#;$6@&_X>a;d`KR(HPZ+D%B|k7(pcd-f3g+{ zHcJV}1}9Kc-^pmap?QX9j5{awEaLHg`?h@$Y$3@Mn35v#(YmWCiHH@k@B1$2w1*?* zlVyOgdY+p(@8FxSJ~Pes7{TtuW?%-}dRVi`- zOv=~KOa10CW-l=`x^ja^Da#(Yhw$yKO6-|ReLk52;ut3sw#JqlWIhw7n}wzY{|-8i z4a(QS0l@od3G1XRsBO43YZV2RJ7rUI+NeNwPh<~#NU#cT578RdR&V(&!0&PJ&jkQP z*7Ge9A`dV$W8t3fUN;kLIOy2C!@C4*&pM(5?~8B2EI1GihWpkV4_QcTV}bpNBysd# z{O_;>2|e0O`A36;LgG12%3l+{m{+iVs`IotdcsxWqK^wTCl=PAY4y@F+-kkrhCBI& zoU6D#i!BLbXVM!?zls_(v?%8hDCm2Mv8h{e+I#lsX0~X1$?gT zsKCsv@fb*ZaTWs5qi;Rg$c<;Oyxfshb z#!>`58h+hEVT6fQcf~O*OQl)9Lq8S{{-aga`avV)0|y;?5QxZ+a6!y<+fn@nnF6|# zF74xNf^|?1%k=w|jyv$=P0fn|Vf)!1J%>H73N$p{4-V6CGSO+v7zOfOVZ5g#A5hL3 zjdPy~KU7wLyr#31$@-)*5|>T*{Hj41OV#RcQ^r}zqw!5B5h=bf1B(M;6!S$afdwzi zhC+nAF;|9%TDwGRxMDuDc$>8GTQ51glL{+Yyc5K0JC`U4MNc_!9XW^vQo5TI$4*a# z>^e5}f!^MWvjzAm97P-go0EGxeGbJ41lmP3G_VSBi2JZJgt^P&L*oYN?^Yb%sC(e9Rg2l06VY)fZXB%Mtl2N0eC8>O$qp*th0o( zzrnxDymlDGZ#Ti1g{^R^e0@x0MZIF@`_1AubV@j-G+$2I{@4^7g8oui4*kjfHA#B; zW6F6yQS0R;>r9<3t)#*`;gqB%{8vC34K+ zRA-uqn(rzlpu6WUGc&7tX}U%IVOn>dbbd3Xqnv6L>^gPY{t?^a*Ji=YK+u(vpq{EL z@{3F7%JyeCuyg}mD30xRt+436GY2+rL_?7PFYa4`k4XH$CS@jTdt zRk-0gXM5d4=Zf$Yf|A=ktN97xK}SJr+=Rg!cpy4)3w^N&sC@bll$q~`@!U;|7N5m|lR1@gaYb+#8=b=!P)oH(k} z*ccm${*ENT&|mAX_Q-c0+R4b{9Z&e)tgXM+NJ7<+Vl z-V1-qj@=+nKqljv_%Tt;A*0UGV1VMX_M7c@BEDe(E|^dQINDeXlwBczL5`{IR9dQ z5HeYn0Z`WWgum+A8wvo}-*TfPlG*^d$BlqSw)K4v9BZ1A(!q^evwNfH+qNW#^#51< z{0CcngabKn5UlDb&Z2@g-*2^Z>>x?VBZd?)vnSQ_S4r2=1%4*z{gkStQNvOl_nS$C z=rey!b~8xe^n-h;>Q@YTR)kP2Qha;bI8VxjM=y>aRgXV z!QRPKH`7XGdONu)+sBcxQ*)HuBvdP8B5(J8 zSeTnC$=ExXZnl9>U0ebJRP^VVGkx(d zFlsWA?j2E+qHI|8iMQ~5+ncB9%f+mX=orv}*y|19&-=P844r=xlNNj1#iO$fIGu^z zMIMdA66;M)_^%A1B1oJc+)2gD6siQ~TQ;9}j$88Z@bqDT$QhIyvg#Y%CcoD4XC=5z z?({olJr^J-xAPD`pTj4=*lE#vt@3zA4hEsRb1zZ+OgWlm@B4%(d7szw=$U zEtGt>L1eADxyYSNc2E?|I>{&Cw8E@xWfFTXBtX^2P0INBQB}P&3DFT7)jP5=CU*0@ zmD(@FgvP&w#(g%*AgxtuWy#r_WqXe+sn9UP^}i~u>L)Ay*ywKJ{|F8vb{bXU^VzHh zqm)0Nj*R-nHd3-XZxhL4y`_hqMR243v%aOVAIN6D#$$ztc%u$a{xQDG2eOj@M|j-+ z-|Um%e}=-TX=_sB?G_Jb;8()viLwJz~!T=qOH(VIgaNj{OZu z^)Bs0lhG7Uldw92`a29)>a(+$r#5CXbVQ2D{5Nt=wLGf&^lzB?=oS zZ*n^qG?~J9p#x_VO2zk^@BZFw?EuX~a5urbk3KNU%}c?&>(2bp6IijV9|AgiEti1YNU)sVyKfDv zNWIDmT5S0J`~ZmJTvKf8IegLe8)`{sW^)0eZgUU0y~^c5Es~c1%oO1ER|M}~11_*= z2EpI?s7&U}QZQH`yt})a{V(2>wSy-cU!(7N<|FY8+iz%tBLc?q*|6}i zk^$bMhe;N~-}!rWlsGisW8dLuRE9}3=krHss#de1p|fmYakoe)Co~D)NgJD|IZ^me z$r@YBGI69q^9RWwHrnFTY9)Qis#lTJ-t;K}dLcRA-z_bmRX26+AseZzLOI!<^sC{! z#^r`vM%+dLB=-P5fbyxcp z8B0EjWd+pPlqe=rjriES@7C^m{pq^QFrsf+&xvG{inff8gqh7O>-74h0#@53-uNurG=VdSvYPWHLghNzqcFiW(eE+rK&& zk_faR{U+Reu7vzSua#wpP*q_d2Ta(s|`gV zfSllmaevW!Gjg;3o8^|zoyJh%1p72!5zhaI;DI5X2a_d>s}*KnVo*0NbvDCMvBy-bRMIt;%;@fE?C#=Ib#JYd*ZZPK6z~{Sbo&22TJhRv^|zqlhA?9V z54rh}yc8R|KkZ$_w5@fMRDF1nb-b`7y_ z798?H2)*J@)q?Vzrm+>kp>|4_<1Cy1Ptt; zvrGXK10ZzNXt32ePKstPL>#Cny@btQNKs>od|a}r$I4dDsrn8>C0U(qEWbQ@`wQsGX?0%8Rlz3fZ?M!)HdQdFhM*}G*Gc4%;Z^KOj_LO{nm1NSdB_}E z@7PL})2E$c)l2!gFau-m`2p{=V4a^b&Hj}lvcIe?77lpUY#ukee{?y_KxHzyPG^3k zgq{yX_JK-{9?$!vW2JD?HgtEQ$| z(jTd@gd2um;|LWdpe^R|Fko&suhlTeCK6V$&LB$FqN@>^HwxPdan^O#jOnYyC5**7 zLrF8YCGOSU_dbtkq||%vutXQLYgd z)0B9lRxh4*g8gp#uc3BNTlQJ}!6?uj+~7UI;MR}eG2ZDzL0QI|?UBEgJ?9pC4&MM! zMdKAX@Y}CGiqHiKwE$i0bMZ$r42{+b!^ zcmV<2KD6+*9d6t9Gtz5>IC6Mys|;^aI`0RruWoxl>XmAg@L~@N!Z=d^Z$A`ywFvxh zS7miGJEdcHBrw!%PQ#&+&((}^SAi(%%3XwOY-G*I2H1%pT|S$rHKb~eD89%c62Krp zeWAjmuBxSYp3BUljM~bt;I~PsTsEb5@e|WK0+(&@g(vlO?5rFrZC(GaBb%|&;z3Gs zyAh%2FQc87I@c`;gUwIBrMWYj#)BwB$hKNib8;wr28_>4M5*l2D{*wk`yo86Ic=+N z@aIRBCCtUTGNUN(Jf8Zky{?deC7t&}(5rYDXx$+K`2c4aXp1+=E2u><^MAfFc}RVg zJ4=@=aP)d}U}qET?=Mk*pJjlr#$Xi8wXTxDvhI#66_#NYivU#x{J$EyY8Og>&vOdk z00$3ntBf{Jb|Sjn(~(}`<`*nEZO=DH^Yb$yR0IqJ-WOD;xom~9GkU97fQEb2aX49#8%zS#QH^; zoZ)}yOWy|`&;QE8qVN7^0Sw(51G@?|P&hb=Zj6k~W|=sDTK^!T{dINX z%d*l8K4}bJ>n;q1TZYCW4_|{IDHK**y$BU${RA9)Syd7I5Xku9AsBJeN|64Qdn3E zN3UT|^&&=EC)_qNRH=;izF?Z#lFuH}#k3uYMe})A8Pr5!LmU39wfT1~97+PBALl); zA;+9$#htCI1DyzQ*oQky1X!9B|0m7;y#PAXe(Pakz(kkdM7R3Ffx@J*n-d6qfQcH6 zF^_H(icm@xxP+W+2kX>cl4A>yv>ZN9EA)Ts{2OUQ>X15-V?2bWB#dX>REwP*zr1xC z?u5qn2-{<%FwJX;nPr|QZS#pEUX#i!JES%vsEY@^<>m1vhU4!-FE5JRq~fhLrI$ol zkQHx)(|sHcBM*S4;y>Xs!kkZ(le@ZAkJ{%9c4RI3^D(T>xhr%rHW!W!zlRCA^n8 zFVtG2)u4Z9;+a=bx_Im6a~`^Ey9g za+CaQ*%FHG@V+TUtcm2>z-?2SHrvHDUP_3b^y#l(;1U<56)TN2lc>Q|&Q}^4T8#9x zL?XvAJjzeG;+Z{l>3va2U&zsvNx(ifiVQAR=C{N#xqCfbcd}5Wn$#fpyC1j{-nm<| z$0zBo0s{tNP`G_nact<(FJv}y=k>()vy|Wpc*;n>ntKEiqUg>8QcF;;yz?_zBlHcuS> z!Ax~%6VE*_ij)<2p-{md z53#vsVbemo2?;h5{ZR53o!-FZgrgc{OLAv<_1C6sG#OS;Q5Jp2uD&{B4ocn(GS3I9 zA|BwYw4ejnPq!`Ji9cO`CRPy&9SBQzJpVl%qy?xv64Y(0UCAmGo=`z%u`Fi;hqYT) z4S=T>48&w^zb^hSk4%h0kqL#dK>ICuE$S*3`C6uZn`OjaTNxp#E=C%5)H&Neta?{D6y9p1vvM6bnqv@G zq+O=|!jP7b_`B53V)RkIKT&ibFF~$NB+&)kYdLlJCfaSD?pn@EFQTE^rL2rcyLmuW z5&$w&j-Cp{_v5WIA!kZ~6J6KeEqY*Sy8!nlNQD=QD9p*)*X!TycPsi%1pUz{4%+|E z@rgwLT^83*{@=Z)zF};G{3sbQw0*A}`U&Dv)Q5J#7a-Y)hUTR)9$tO(HuDDKdlb5U zb22rp7gej!ka6mg2?Z)4pX@$0*+>#AR_DxClSJFqiQfd8targks4JSVe%SeC8+#=?GTf#7O?f?5tLSwG-l>^w4Dq>K-@UpS35nZg z=Fw1P#KbANm>9#W&+{FkRWUj&73YQO`^fsN#CGb~?>Vy`aOrP7xb4`|&(Qsl#G$eT z0+rkX)O4A6Q^5QLu=46gzZ>jt6?G89$kwol<*ZLg4`A;a9Bg()7Rw_AEvo_!@Lr7z zo;1vN+PG~DKQzq0;j)Y%nF-ec<>>d9;5Y&l1{|ydT#!^9tKU(qgJxIdVVZ^%ts0Tf zN>QZbu*a+-@JXy8`VnF{MZGx_yi6=OM+Ez<-z1RBM*eK|MqW#Xb?HWx#mkFyyw8|? zR3(NqJ5l4$rehNqiABAeM7%g4{5*6XThXt>6%#)dZ$^C9)mBBHQGPDWA>VzMZA!oG zB~|tn$4Jv+5~Vdg(g}~9h-1W>0DX!gSG`)0;JvCRPpE%#MCZoSZFe7rdQMznHtul) z*OrbLr_B{Wt%& zPyt?i25xQH4;t`Q+w6Cm4T{e!9b3~D^Qq~%z03$Ngpt=D^g@m}fXxg5Xh&~Q?? zo{UnCotgPtWFH_mJCUK6OUEwt?d;B>e?WdGAlN(2AOy=4$M#B5s( ze***X&!OyPogRe%k}33tVuNwZD6BTw9iUNtiSP;i#3dc0C$8Jn z`DB~ikS~|V+xif$vJpABAdBh2X_WQO-2lOoP?p<@FaG|^Uw?ybO7wq9WM?_W&wk@_ z3$3Ty{5@y#fg?}l)gSVP4lCeO&@A}o$pUnpbr`ELuJhyA5lE>f0k`-N4CyV26yYpm zZMm0DU$35x?xLHQ592#Qkmzz!IutiQo_59AayS%;R1>QmmwY9yAPJ+-lHGaJ5+J$BoLbWp38!bb50`3z6=bZeX1 zVLXL1J5s|K8Pz3(!?<-SSe7P_Lm6ymQmkv>9w9U6Kyi~C@qMPkypF0uB|mg3_{gHC zCD=Fs2GE@WMUZOWjBz-IVd&x-Wj4T!dm90>S6BK!^y_W_c zz{iHTiAO`iW}v%X;_d3*Hr27YoGvwIiL7V^tfc#jx*yHbZ9zp%LBG9}Pv@`wChDSkyV%*qXMBWIFlwymzs7)U0Dq4mmat&XQ;GOgY(@+J#@^zeE1GQTs{y z_zhLg_iw+gEZ#8350#=a&gPh?Lf)j{{QSrG@3j@aYbrlOt7z%Ick6Jza@NOqOyWpV zpS04huHhJ5Xrp4)SC;&r7Hb5$`eZ2J5zwaA$wg_hfi{WQDx;!ToEcQ^`X4C6xo zQPrlX>g~KqN5f@XIg)yt`h4TCMUZ7Dpq^q zrD-ZQ6+y^0!%bsh#wDs<*<56+v8v73UR7Ou{tbISDRxBoYY{)<1(d-;cZ_$^876t* z+Ywoc==K0dR9Rx8z&{@~Bug`y0wJ!kE#t zr}=OKdZqqgh7Di>Ae=E{0Ef6kswQvwsqT7ip;x6?LH8hg@t@WiY$=T3J;#vPC8u=K zX7Wzm`%x=)HY1rQEV_m9IbJMwkYFB&Cw>woAok z?*LOUQLoh(YEM?1P>a;aSzml~BX71w`#6rI_x@Bz>R0djpe;jN{I+-MWTzs|qg^BM z2C*uJ--nw@v%;#+NK4XpMlEx_5@Mcm5xMVid<-mniFMCqY%fe91Faz&FHd=aW&ryn zRq>Da3yITNs^QVr7-1d3Q+F;UYuV7TAYb&hIplj*BmUO&T{>lN zbTm)7FKroh2lpoi840f^FI1*>D3-;>go-`Dnb$vlReofvQ#EU z#V5*)u}-*#Qa#Ml_SPQ)e@0Rf9khUGdwu!8lh(~PVIfx)zO+S18%vv0`2jsSKAG2o z{APG`GWvna%W4T_jqYDY_+DqrM=eYG?Xw64C^)nRX)A1sLGRugxE`AUhVT4CTmrx= z0Z74te|uf)&rjZ)`VMuT-TTd*-0KuutX<#gOaZ(9aVy4HGkm}`Mp$t=HdC0NyAuJb zeJ?frato)r%})xfu>)B{;c zzc3+BZ9)MUeG=}f6s|NCdB?f}*k-Tdx zbyb!W{g4q^1-8-o_!ptqQdn8~xJ*$)&IP%n9gpcy1WUY?ITnAIWQy&xGQ3A8(Q4l} zS7oY~Xw8Ba6vW%2hDY}AQaz3BB&n4E+2OlJl$pfQWL(e9vPBC0C-0S|RF?QfcpNfJ zAWd_K*y%)U=+qE&R5ZYW`IVI93&uEIsmMr)%g;Rfs?OzFyyhl=xRT>jK0QOfDifxD z`^IwofWIaYQCwo*zWxH;9q;kht;ddJ&&AHgVlkXgM#~KzwX5*$?)RHLD3G=bR*Uhc zz|Ksl!0#(#9YC8ph-x=G>$mVTP?-i_Lp&G+Dgak-N1@8ycq3doBqU|tg@f*f)>EL2 z8j;<}Y>#VFPsdKQE=*;MZ-GjK;H979#Nf8a62hD~D10$QS70A3_DZwU;Zy9C%P$#( zfU(MFAED;Wb|YKD#7EM&idc%Bw_^UyFs{Ju$?JF{hkBnNb1!^+6VI&47@lYc7!O4x z14-3c^)Mz+*ZZ38O8#czePJGBFVP;0#u5=tB1fWdD#tBRb|uMf_P+iDWgID-pMwo- z-slCanuyf>r)Mq~u1EuzB^+qpTV-qOI(@1=L^`|XeR7x8!21D|Y-j#w0vK=!y?na8 zK-qagRIB4^#&GU8QP`u(2p`Y^!6yW#%L4|5P`G}$S`%JaeufU_1WwO)qnDdB{WXTm zP-sy~_z3=qq0cqE9CtAvXt|;{s3@OQWx;4`ZHlrL{}ZmE8n=)AoSy$G;ZzmgT$1ys zz^xj!PhF0Vw_TW(f3aPl(D%-3Sc2v|(a!X(6=5r9cn!1z^)I?nYQ2}~tCPgUCrp}T z#fImjIO+Y&PQuG|U$Jpy(AxFS(yGhdo3L!uX!{c|xY8a`u+&^-u5$3%5o;isUMlIv z>3&D6rTOEUu}359>~p}x^=7z3Ecvz&j@L~LX9F)ixHsjmPKPx=YQc|BGhl95{Bt>d zNtq8?Wo@Jj7h%ajazhxpO@3Z>f|Rc${R6tjfeP}tD?jkw1FXBjkt)zEYY4^}NN?C3 z+t?%fE==0L_@+#x=>>v1=jW)$C$S}&+ZBWG@79hP<$FWl9DWf3lIA&Uk^>QPzRByiGFwk1*zrg?(|^ zij7sqjKT>+G@fR6C`ypcYvS2>IGYP{z;&}rmkdBdN`zCqF4?bjQ`E=-E59cUG;$DkLuuMxC zo-4d^H1{9nVaNx#1E;`sVN(jH%7IhJ9&k&FRID z;%40bmSAoV8wfpt=*%tK3sy#Fu0K>R8M|r-|D0r~}S;C24R~>YGTbh@Z$vbNAW3`kqYH$@Cna{CZxk|@l zWgBH6LUOA9)7=EUYOp@OYv}zRm37H)*H2BPSst3h58o~Z4wRh)P}~n>BRP!`*@ZXH zc25kv%fOhgQ2HRt6Wm@z6gMEXbE8-%=I^1P*rjU|QPqoe`M*+u3U7f3y+Hwx1O`Sk z27o1$M)A*vm9EWsXN{6kcqIRro;~)qYDwAmdIBHIsa4{C!%CEaU1D|VN@6IL`aTMG z<)f6gMgmXlmzT~O%tVSZ=r`-Ej3nW%g36EX4Cj*clAsEpaTk?~7eRSi#$b}M#&h)&#ORE2mjm#KU! z(365`pL2}su^nz!3F+6c_tfMe<3j3@g>4UE-v(jXD!+Es&saL~`>@Ym0iZp34LY_C zJs@Zfd@^>cn(WUk?zwgkxKjXP$e+%kBKbW5Z%ps@YT-4m}6mTDvsVl~! za5&36Khww2_{=2mS@=VlI!)iZnAUQaC3N(k(4>@N`zL8UHsO&q{Wz%Yoxe2XGR5G@ z(ZcqA<*FZ13qP;0>}>M5N+EoEe#D+o@aLYAhy7cFkSP~dSMm%!y0!a^kTPWF=QM}$ z4gLKYW*+rq#zLs=r&WGd+RBP_FrpwxSgFc+M~H%kWe`umOl|3a>X<&I2~l8GcJ!-y zUn7g64m~187{&E*LIUW<&`>^vC2}r6`Xu~ScU3Tupz!!~2&-@7!7dkPKm&BR^e*2<`B z0oAhW_hWM@3i&;Mr7%!fGL*av*n-VsF-)%GsG<8kt9ZU z?f%%M_c{^EN1{+UaBzAFp~X4c);y-*oa;W5#|y_Ch~VRv!U;aSislw>rAO0iA|b#R zn`+Jzh0w-D5TTnYYADJ52&Kj>;}v#KrVYR+d;WoUm{w7aM@94?!6^+za3Y&Q5Wl>W zs`tz`Z`oE+gQb>f!6*Ga86|F?i}3UTgEqsa(vUX&kCeuh^pWz|XDmKT%rud8T==RD zCit>Xq9fEfaD86&7eu^pEG_t~xy)zCBmV((dhv$K7Z(@9uR&75FRN0cV_z% z2K)lwtC@3CU>6d$6(4n*rHsB!f?+a^(PMHFqf?7}fdv?D~yGv4C z{kUS@Euk5kOk-V2zf43&dZ!!p2{qKNlMkU{MoOV?aJb*f{o}{Oz0f>`a-#;%%nQ^- zh3(LyA8xImd-i}rBa8>pnpB8s&s+nX(MTM8%XsSo2HhCs1*!AEW!p}B2k!JPdk~Ir zWDIOv1gi#E&yAZjxI7Aswmsn581PA{7ze3kioWT7#fTLiR0G_$7WPW1wzJ3hgjnM4 zs!THPsU_nqF+XnPi?HvRFt%F$Z=e^q>%ZubabV>3c;(v&ZAL+s(wsp zO?8Oo{xe0H%U704Ir1peftwRH9}^MJ?pEP&&hV4_buv0-RP$-sqe-d4Tm8s)%xTQq ze81w}%V~rUb|}OFYR-U?b5KCZu*Utw7fxf_dDh4rJ!= z)i-}CCyCP0_;(>rG8e1Jte7viUe5J>olev`YHExCh2`AD>?t*>DGnQ^rChZ8bG4^* zjH=w%agfYMZ~t^Mp^SbwkwF-NSkFL{7E!cr!`pVZ;A~-ElF?GFTPX~2o|B(hs&*y{ zZfWL0{6DRUdJf;kH3<6tm9KjHlu_+VH~4`c%H(`7Nc`{j;bL^G18HcfD7INvUtZXe z(J%By*|Kub^e3}v#!`_0lx1ZA}FFP@*TE`QEpWH%doaMhNobBSm;rE)m1iq@78Q(K!@iW+k> zt7X138N+}Jm5QQ@u;K@+Dq9=l#F2e$r|~-2T(@KDO=fI9#N3;5`vP^{5rIX~$#x6n zXi?ycT8@~*`f7&oW=of)Ngc37|MuI%DMZQ|SL|^UiGSv@^DUrFO4ZshpL+P4#z>;Z z&+sQd6>a$c9~*3|4qZWyU^NEu+i~E7aDspNDs@--m!t$gCo4$*uHHUv6AO0JCpKB< zJXXz6-!fjs(Y|u$X6Y_LQyX%d!?G=dYE_d};eKiE4Sc$}PYS}3^9v%fB#PFO>uaRL z#~gIukrC(;G|3wfb8F@xHE*`}lF4!ODU&?s#+#x~L=%iVN#4~|{E!vueHl%KfsCdK zJ7JpKRCfAP{!>Miz*f(^_UDe$6Nx(99QrbK%$Ea|c>Z%7UBjCUs?3fPOKd7s&q$q6 zXp>4eVWe9)=YJP4hNe-Qhk`k;nc2c0V@g1`1skUe^hzoNvRFA8z{Xh;@KHs)`2Rls z$N#Jm6tfHDZUAX!4znUpXO|BnRDO|syfHAL`$#}3sXXbRZO3C3VnIs6Fl*jEm_v+OuHv?WbFfz zIOkMFNp7y>8X24fn30D>=&xe1zefh;+J(!A=kw>0kT^|7l+-uvNge8?jmCUp->q0K z+Qt^4Xbo!NnjQCiLgrgdh#G<&W6znMp`o!a%oM|?nmZqsOz|yinMLiZ*v3U25}!}0 z0H+ayj4#x+ z7xjvRq0E3z9|V-KABy;pkR^pb3mF%vADq!nl)Ql=}0#xQtSd9a~V{vZFWj zxtYET8yO_)W|? z`k8}PcwyuS&bkVGXEM~Lxr~?MC3bz+gTidD-t{Y*E4ksm1P8;YB4>p?ey(~-rf0fR zW^2lN5V7<%p%5tM9ipI{^#`ViE#~5MROT?ZF*}Sc@p+~3^$J5%EudwMsqQLs#NS(c zYlw$iX15FEa<<7*%Kuil5By7UaJ4DkOJ+WKR?jMj6Sr*=<;bOku^-EI^7F zQ6TDfWeT}Yll#PAA76FP+UHAcbSZk5Rdxn)dXv%NN>~nTONu<4aN@QMFT4qvXHl2& z{AW^su@70Fp^8;q+x*NGj+h|9ZgO#Bime9)>?R_*VY%= z(scA#$-%4is*Kyp|M_&!qebw@ch4U(oajRtFa>o%&YNy4KvzCB@SqTWUf5&g>cEG( z6DIZJ(F#Z>prmSG5&eoD9PzxiXXn4bt11!HLygcGe)}hXj-I;(? zt0lc?yoHLYm^G`Ln|b}(FNQgM3(3VNB|l=`pdaCQ3Y2(Sup|zy3ogS9aBswhWnQ&Q zA7;BsMpMEDTD1Njvc58)39xH>g8`$*=QmhHju!Ai-b{=M*jQ7b+1{hwRi z6Sxj%Q%`FHC9(fAVDz+G0e5-jIqI7Nhf@ywT6b&rZPdMGgxcav27?^eiyR7N>{6t2 z(0eXs1|Nr#f#jjiNrJG!{eGWT|8tQIm*}VP%f?u?pI=m^DrglbC0lfRy(SSI0dwpj zH)~!)MJH9|B7gDuH^$=DIv$MnewSh>Myp%+?~Eq(l0Ei$A^1pM=d`9W{9)$OwgoZ) zQ$y~gM*k!6BG|K6F&!bUVJ;OYy3_Lj#qaOl3d@Tdq~T6aka*O`BK}RFWIeg#Rj<_d z_{>6}tjGNiN)ckWoWC^}POXU2TwmWcVj0IYR-GmH-uw3@2P@!vdDblg817H&LVDl^ z&JQ|Bh&~yHp+c2NL~%429UX8J!$-q?rre}?=c*?&3Iw0&DTnn>aZ07eb2&)Jb1ZZw z7D}@5p8_Dyka0xOhF-=BtW*QoeP_FyLv87x4ad$=3pHO#0;<$u0)L<1bX%!)3_T#5 zag;Y{rvMO$@;^ReCmj}UgEdcP;~zg{Zxb&lTExrm zIqB-$SHs0Rc<~7!u4dpS(CmT4-D#vj#Tq|f_xg^p6QcLbF(g|HrSc*NgrI+qJD8{e$ zP4qFJr-tkO41E)Z0p-_Dyz1>PeY^GikhfR+xlcnFogYtCw}4sb)SMCuJ1{F%w~#K} zvaiBqa|q7#1G3_i=S%vG*ag`&%P3lo1P5IW$W}IFk^4o@1w2r*M?PPf(&gP%d1_X< zdTy!7=}$nGes&vwQC$Q1D9h)z38?Z*dh>EcF@HYJ0F-cj@a;_z5v6%JHiZDHhFd9= zzf?AHNJ--(SsEFT`8|>0`nXRw@4#elQa$>n3sddfb<{eY|6|{OE*x*PtT13Ig!hPd0IB1N2iLhT{CN2CB&z>jxk zg7>_OGiZD_LMT8ItIj&>tiT^t~NN0She7YEPQwzTfbae4p zaXeGa#HVpG7#Wby$aFN&1FsPieG#ODPx;DOXZuUc=3$Ay#9kAo#f2x{SP1|bzCVy@M)Kou>1e}OB*J13RMgnnUK1PA zQ2#V(N@n-yBuo)}K~wLo@iCDw%&8E@1X|r$RVLya1uQJ2M)a}|2A0M}juR$;H1B|; z+>tvBakf!MIr0#Hpzf~ux+CwKyL!>R=xBDS<-D-F1y1B?_BfQ6f>#CWOSDxHDIX%k z7}u2CAjTCr$w^(3b*qGRxu!V8*+(X-%RBof;kGNqN1rZ|XD)^uB>$K<_R9Z+w%Gb> z-c^-@`6WUQyf6t2AgR>2jhBGyK!><_fD2e3EHN&!%BFrWIr#C|3l^Am*P#!Ez>(z9 za%NJ^jHDby^TvuJK0KP>p}Q>T*-6$ampL#B(j#-|0_X!o-_b|7UuG)YMnE8{pt#jm zveN0>Ob#qm-n5)zD~=!bUF;`a*0X>$puhoHE+++5*J~@QWkg2%8a^6&vz7MPH!knSZzt zJ4a={?L_Ow6m+tQBN?CnGv|Cv>MN`+`!55zcJX%Msx*wQ`~oYcT!BJ%irZVYBJ4?B zyn3upnu;Ok`K|ZaHUPC7Qcl`pZKP~zUo;(m0BKtASSoRiDSP#hY>bQrEkvUXjwF(` z%A`mi9|!U*V5*Mug!7y+1rD%fk9~bjZxE^#KX^|jybb+e?Q)9Z{gjq0+B}@yyudM} zt@f*MO>0Rv4%|j_ZVw$t4v+O7!GlB_2+^N8)RS#>rFt&}F{`0vqYk^ISe@T5%l+_l zYFK;+v9TQjaFF?vIfB0|;J6dW(f}v2ax}c{DOvi-UcGQXXzx(Jpri?Us~$?Ka*XGY zrZlMT^}dn8gU<76ZE7<#-td2s3~9^=%+Kqr+wd?uDRAu9W9^~BctRmL)Gf`Y`?Ujr z&H+grmPi%fk6%Fw%tOW~4P@&ml+v0b4Is<+z&FtiF`nbR)%!t~n^GZ?=Y^(GmA|&~ z&+Y=28bV(&EzO5(^1PpTf2NK^RvyQR(QiF$zU8i+Qplx681S*9xo^?^@Nr95^7vjD zZO|U`=QkO;tyvQDudRs>h=3orp|UJ80|*}L?}Nj7wo%_M!ec+$a>^Q_<92YWWCDH* zHa)tdcU&RB1om%z*skl}s8!FKWJ%bCAi2|IqJvI*6_+^gIg{=}eVwkZ0avK^t-XcK zP`PU)_BVHLVrn*&h;Ka!IKFx2%#t0PcHGKXdoPhIwfJZS+c+UqW?5n|LM`{uWC5Or z*~**Zm&}*V@_vI7^sdR_r>C?JkhVEKEkfl; zxUW&kYkBq5I9D2W3Gp_K&JY*Wz}hqDi0u_AAW0DiHB}6IkOj2OcqSMT{xYS#3%a8yU!9lwBJBz8NiLE+UEXzX_}>Ow>`vkCNZ{`aMtiszz@8XZ zgvn%aZpilDM;KY3YTS-o?@A!-a?!38OA$iu>XT_Kb2km|^N4O7J-1I~XIvaQZ48?C z{?TyM8{r~>{JLz7rkvZgwR!yR6tR|PNw@Q;zNkM~T*|z5?SZY-ib|0^^xgZ>j~Bc- zf0z@~&wp9@5Bj{0&sA8Vw9?X)X;RF?gGCR7GZNfM%s$U!9Ru~Z?9O6HIsRX?cY)Mk?Ip?0Ay+=hLd^1yf>{MoFbmP z+e&QS+2R0ycT>^!a`opFO}OuzpZ11F&wWHtbTo&3bzw6L?*@35g9q>2|)ekHVyy zN;XQ#F%&^}k2t5^S<%oEoc#;2*^~b3Ua!5@9f=`Vdh(l@ya5leFol66 z@s1`sNA$Wp@OkA#SXbVi&^O^}c4ntVfx}23*tqOB4(y zb|)j|4C5Se%7nGK_^_>MfT0@Dn`yKYc`Z)`(nySTu2Kf@zT+-vgEf1k=0^zhPR8b+ zn#(7Y`4S<=1szP)a)K%2>$8=qaZy@4?4~36Q6+N8o(aoFij2?aTvh0!YE$bZ<)By~MKK|P23d?4LNL%3$j)s;60=R#g${NLXv5y= z>`p#L%I1$%{|AEHzj9tOeT(^>&FM@K_C`gVuSWX}Cy==R?1>1D6}^jaQ8K3qPHA!w z)#*85rJlj71t6b>A++MrBpX35L8K6yS^at=$+jChc{Vp|Mk8##flP&D1b)U*Jlg)c zQI)do)M!i<;H;x+CBpw>9RF6oSn@oG#pp!guJ=@Yc`PJ^tXS3UqvkM@G!*GutHLKY zx7eWFSI!q6zZnu$IQK0w&<~-DmJ&}}bDgO(l4MqP_JOVNTN=6NQ|~jnMUjgY zLT1vY6bCAV(zUPVgT4}vCe0;O=cYk-xoZsvDxD z(h`RR7=-qlye1%nD)Az{(A+Mx*#1+DCnd5B3M|y8rYEVaYe057A)TSAA zv7n{`(tkkM7|iQLFik0zr9R};&@(|bc!v*!IyllgSS4LW`#9A3$UvfMH}{iFd}lDcdlAHp;FT5u z=YqL>hkvj-v8sec@W*!SX+7RPDcNTP06*AG4A3Ud^;k1Ik3T<;*9uGl%T)#7`t4#n zvLOBu|4OxexRCz6F2oSjX80+MaaD%0c}9wIa6wq?xC!18N>wfY_cI@3LXv@we)@OO zLNPL!{dZz!o_qcr)7jsu$P%L_pG0dB#=P4Dy+|Q?epKo=sNlLf?l<>;A&Ju1;zct2 znq*?>2gd9O<1HDoq;zO1f1W~mPq99{v35x`DisZBx(UvM!1rOP_yu`%m1J5Rtqb|J z>KO)f)oaMeMZB>Wl%L&H1vq^irSox~*XyagVDySj7q3lMOj*#@05s!nvQGQv!EGzB zswFGR!BKlZF@>#6eFgQ$es5+*(5odQI@?;#>LTV{8I~8EiF0`F$oaFZ&~SY zdg?-N8tzmo$>tfw(Hp{-eBaTs2e(F{568xQRa_6rjJslH#7Unh$cH$?5r;w9#CPaE z(s~KrZ-MTO`dc;m@{xv9z7qqm)JIT!V}<*|;t22N6(wBW=<#6!+RxMQ;!iyYN8Zqz z%1~RQL+f{PYbi)1BzO22rWuhCrP|Tb+$4>QRji(d5=X5LM(6j$q_iP3#d6;kL%{z5iRSut|sKfzGx`NgmMmkBsMlrxWU)L1ouVT>Y) zf@M?=Krg5{sbP3F5D?>f@Au zl%!N$e1~SQjBoW-6P4!JOfSLSY=0~}56G#57*G0sS}+A?HZA@ao_3BWwQN-C@-qW$ zjT{k#I&-QT_=X_TDRpyz4G-JPwxAA~b)l7|J3F$0V+11lM(=T)C-Bdso7h9@ud55> z7!PR9X1Cqcg>*tq%l~5uya^b2mR}f=abf`GIu4Zp=^i%{Ve%Vjn$5w3Y2gw|jm7lNjPyD+QjjoS-*){a|Q~C*U;Le9=?fp2I z$0x?uf-$fu+ZT+Ps*53OvSv=H@s7jb8(_f|_5zi*oA|TZ`iFS# zCtVj%|Jp_G&h!B0a|NKWh56w3T(BoVVn4|sSo3+?krEsc z;zqeX98YW)*NGMBjHz>LLcq!b>7^hl-c&ks`Xo*Mcc8%2x~*8+8YcoL@SFUlw{-GA zif6`L5@yx)X~cKI9FH0JeEZ(eo%Kmd4((0(B6h@__3HEZZ#TCTw$bF|a7`!#@JRxKMZd>76j=z{Q9!f-e?G}e$uMZ(1 z;mYSK-|lZzL?2>SLLdHSE^e$d1ui34FF}Ms)HLKrnqx4iMXZD~pWial$bH^KIY$;i z>Sz9&k;*nx@TGwi!$f@ez!)_E2#==@R>RHVNs^-4Xff8Z2=mkraugakt^o106BlFv zf}jG)ysz#Gor9KOX&A!3NO#mKymqzKJE-gQ!q=EDj)ZZ+X=l2 z1kdN8Z++C(aN>R0Lp%KW*X0S$jA}oC?S?) zH9(a<7}w=(4mHmD;EcA}7YKZowXCu)PE*;Fc|+|V8u~pq9xD3$Olk8Ulf~>lI4U-f z!_F?bqM67b0))PK+}EI9talc6dq;Wd z&UoJ;;=}7tD#YA+gdI{X25aGAvIDzDRkqk(`nTV50_E>c9)Xr}OXH+OICaOOw5P3} zpVJXKCA)JFh9%+-+wM2Z-P;<79(zh)M2Hm7;9e4MwQ@5Y2i;ufY@IJ~g>Yd<^p|hYL;+%eqVjlCSa`WHy$?Jku zY;RRAhC{O(KYbIY=kO!ocX;Sqp8#I^Hak8e3xWhnoN@K=2!pq9QlkY$sJ`HKAZ8c> zaSx{mEMVQ=cS%0w0>QW(l0FQ2(zqvbCui)Ag=9t*x6rH0H|l@WCa(mOR3yFF7e{^G zb{r0$D06PI^St$-sC*q&mz27sa!Z^QI@H;Z*$oAbLX?={a0FXV2X{HHU zd|B$av2HU=DSImPMEK51iTSu71<_4^xq4Hf_Z~J3LeGXf50d7mi+WJ;QO-u~iU$>7 zO=v+P6>X6^fhfcDb2XrxjO>M60`aTl6C)~IP)#FbOw(tRE+O)^7Z(fTuWe))UuR>p zFurhxaXodu&CsZm2&9QhEw;$Bg20?25~l|0X;C+`*(DY&s5(7|c*p^vdA7grYiuD+&7i` zR42JKy8N0;RsV^hf2-Koa%M9&fM7h=Fjv`^hV|sXW83%vPZEoKW1(&`9`K%J*@|0w zWcIk3(OSUNvzo1_Fym1kAp}B=JG#U6CH^$OPmB`;{R}xT;Vmwcq(w3kay6VpU`bH( zM4=p?F&(C1OJdHbP=z;rX8=y_w;5B528meROnmY&Z-Xz zoAk$6(Lc9JCNxfBG6_LOztQx#w0#K$WjnSu`o9H#ZDZ}gcSdxX6-iX3I&T`el?h~c zq;>F*W5iVd#ggQM{|hz7G1tXTgrXXDs3u_NE?_4TCu0`&=9ydXL%*20I#!k^Q7x$p z$abxmja>jpQWS6Z<5wnCr5+%XI%gLSbbvx(R321ggutnAm9kHMJfHhu?1+evG@fua zaxw!-Qyf%9eD%i)AD>{ zXEnIPTx)HIubu)Z{`r1S_ECa!;(Obi$^6jWN159|eriL~w540g&4lrJyzo-VPYCn{ zzT9|uT=I~T@|No|JSl~mC65iLDV!#9kB4o5fc;%0Jwsn#(T!hxZRkFWnKBZ)0+aqFF8W0Ga&e*YustRN7dGHMe_ z5wH5$4^79*yG!vqQrd^Em7iOaFsCj#ykpK7{Y7g?&gPqAnQ#~dhb`YHq60cD zqAKNebHv@>W>(}8c;7^?sHu}s%5zO`A>4MNPOndtC45bmLH&l2QBiaM;a6Le*U`em zA8Q-;kJSFPe6YI#e`Buby06|XbOG2je~9-aev8VlO;71rXA$fN508W2xMj7)yC9m2@{}HRM+}XzHmMRPNw2* zXbHK*%LYOE8o|(79#b=?dHqN>lk!j)`R zeZ?qn#=a)1IWwdeay8G{1ia0TVqP{Eq|ngBy}?KpNeI>1tmF%Iy}XmnC9W5SJ92RM zYO(vjT*UOB%V0(}F_eK9Qh6<&K!=vk`3i`PeVFwSgmcA9Wj!y_ZzJ}c8FxWiwGo(g zQyA)`OqNAK^0pCK^HYfD286QF_EJ=3s(}V2;VUK|-qKnA=+P$v>ebAF@ib0vrHCD6 zQW;xle1q(Pl_u|H9OXKQ*yok_Y>Xc0)syH?%}K#Y79_AndEeZb)AS~5RvnT zRFW|5G)Vl%l--#w!d1he74{Yq3AOqU{+4$hexgM*_Jbpq+q`N|AHY;96zJE0WV>{v zWgIdy6gFNm=*36_I?6zx8KBQXWaLdA6~A;?Hq;E&ShDjr*N$ya9Z&w03Hv1aGuA3h zqZ`17p%%20gh_VC>dXq&9EV z?V6v{b7mO@nKUEk#_mbxEWnL_{8;EUOt_EPHLtofGQtp!tnBsH}uQ4A9q)RQTLUE!CyG z*+_>2kBW6wWEsD!DYf9K*-N08>~L=VG5`Kho?FhWaBQu(aFBCfp;m>MrU6^f{`A+H zxnsOcCU5kA`O2SI@~3taaDX9vB#)_Qwz;lcz8kI7||+G znsmT(fiORC)wh;;Zrey8z1=T3Ga9?t7x{`1tfH3&dIVi9!%aI5XKvxd-_5q3&~~RS z)*VXpJ|5T>iR&-r!prZ+lVXR-3(D*S@`Yues1QA(v=sZ%vpL3d#pbKSpkK80u0=i8 zPPJs{X0Q(h6ON&Zm2-4AX3(kxB#v6r8Q2cJD2!xdt(N;cto;wmAuG#>h}4(lPw zsQ;B*4Y@GCXg|R{eq-Jz2>}GjaX%$r2%m%ly9Ds4=1C&)a=2b_uPG9_-|={_1#V96 zjLRmb>%CxC^07}>X7DAv8!5|)a*E9zHg3bWjN-Gpi}S3+0xD-U9`q|WLY1oWWj6=T zkt{6GdLAXjvHCQog#R)IE*Bb4Y_L^X=TI`i?i;XU7}uV&)SIJ_NDq!FOQ~0C;mBW8 zE(n6i{&GPnn+R18$cKjE%UaHZ-R&vMtSEtEv}=_@_%Q%uAnHioUa%T~eJeQWYkUCl zTT}653R09M87@NlPOd0@8vD}x92e@$a4`QudB8=9g_tafoS6UBR8ueD+76;_{Z|Vv zbnmY|>MQyn^kLBN?)+feR|gN24npT+f3#C!&I9_y7*lZBxYX`|fuZQ)4w-Y}u=lq) zSrCP(WB3I0O~%b?jeke@w*>Wi%d>qkar7gGD9u*zBY}@yDJ7c z7R+;8UdIPZFx}RW>SdF!`$35#3ise-F%IuyKEBktalhnHTtO)-j;W$ZUV}j|T6CruDeF zGu=TrnwWv}P81CR3atRzsXa|`WXr85XB?=LzRrE|CD`9_7X=Uh_w0y!VRw1mwwDpftRF9WGnoqepxaA|x=f&k=+v?`QTs<*oUyoGRJYLF(($n2=qE|c|yeq%yY-~k8@CTxBfN1Ui6*m5y z$n`VBs{z)hE3_eJb^)n@+IV3E_c57E3yVM5bK;Px%4#r@?XPNsY@WN@Y{bucHgLg% zM$|QO=N%?u#S!&D{~~@qEza@LItV2B-0y-fA&uP!&pXKTgme|(132*}dSu9S4JJdn zm-0oXtb7ms)MyDE5>N2ah?{C(ZI7EMFNrrXj~UWTCx5uPYLH_#OxeYg5g}`$Z^iNG z=eMYE6~wYFC`8u7t#jG)Laz_cF^Y765UK~7sHKB{tAXW-!0}6rY8`m#CDSGS8E#PG zu2CWBW=Jo*2n-HjM_hW&=8G@y<7|(vYtCj%8SivUCV)xbE*Z99{F* zgJ$f(j19`-=NgUiB3p+MebxsAXT*vB>^mr?!EzqL&fX{0C2$$;ifVi525}=H2AY)`h z3$0Q&JwF3EOq-_DC0sdO`}ieiAu8diB-s>r&-H~dRv^2&LSK3B8^7tgvY?Zys_01$ z(g+Q|^GW2FCb&G zU0auwT&@80shV5evrN_3pmt$nC{?{=vqbtWuDicI2uCP{b2f$6 z1#nc&U>lu0)W$BHHv$8ge|BWU&5h`37}@Qq=YLnH#8{SYo9ySPsXk%es<0bQS(l$E zuk=L!aS|o$nII?s;V_xtzfipnjVYI&=5Etn0yeg3E_F>bxY(Yg3HBs%rSn$f>kBfK zAqZQZQ}ZU~TD;(*E}TR3o(O>P*K8{B8t=;j1jFl(fV#8pgMS9uBq5sIJ$iQMq_kqq_bXtY|qEUq8qF$IK)qUAGa zKEBdG2^h}!21ysM%j}UzjO12>RdcTZhKy+K==AtBUa;k-5ZtG8=dh`3OBCwBRb+Q} zG^?0pW<5}ydNGI;5Du??75b*p)90?w+7QE!=I7)$DQ{SXGhv(ay z`MJ2zpM6k4j|;kX#)c%ztLSJ=m zxZthzUz?zIoPlhe<~e%`TQa!eD|q=K&6+*cjI?o9T9k^jwQ}{EC1z8GL(;tMRu!!Q z_wFydQwT*?ru9v;z6QN8?LUlgJ*0|&+C~n<%R1U2-iyac85DS8^#gwuL0j7YQY`CU z2{wdahz^RQe{^T?Vw@k3;f2NC?_Rs&+zfTWM@G?`1TaFw2G;PT7&n=N0D2Z6!E$)d zdtVQmH2i(L1q-db#=AYne#@|JEeR-r6d`aY9t}tP4_Q5vxL2N|g|h;o8L{w;px;!D z$SDyZbq$syvR)sv;GAJC=b-PAGO!j&sOo0k+#zHV-bj`WmQ=IXvmH`tkb)J&U*z0` zt_~oH1YZKe;AjnD!Y1(9&GMJBTHjVx$sjs?z2IB)vz^7ur1>ekm<3{|3?|jpO^ti+x>-Bou|Wl47748bpL0L~8K@w7`+oPM@t_q}F2PEsj{o!# z{sq&&6mB*KW4~6;S~)bw;tzE2v@b7T{3W%dgf&44HDm?vQM9>mj^h47YpX1!+h?tc_^H< z4jgP{`(awapb&C{`s)Tsb97kZDa&jrWmNiJm1X(W}B7GoTYHKgq%T&Y?o%A{E=u?Huu@sd1g2 zJ9e86#mjoy(msa}RHGqs5fu@4;%H&MN*4`_8<)p-9J}cRG*YKxXU{8U>;8!tahN|? zdn2lo1`hwdrN^KKZtaQEivB2gV#Qw%A7SUP6*(r&zW{wK$d>X{8@fEPAr#6aNsl<$ zQNq4%c=4y|@gFz(OK$6WL*HPGSEZ-dJg!ef@+sRgGgZyQWvFLkq`(U)vI;iV)Ri2C zl}A*4KF%FbdewpO>@6beDE=45Gpe>-bCP<5xJwVbMwUxH@B5DtUN}G3Scpp16QiS? zoYda-hxDZM5|2DW^S8Xs*Ky{I#$^OEHFtbh?-2@vHuR}X-cuRiN+v&ha&>Ijo+@K$ ztv&6jQ_^sYN2_TnPCx5HyI7;;Kmt8O8F>kMLloO5O5p1I8CO@GN`@Z&rpUxWKRS7u zn^2bGmWk#3gEW{^ua=Ug?{oPZM8(@$)%6B9ft7CLj>-q-pObi1S|ZbsURSXPe@x+T z*%;e)nhO78C!x7#B3-X84=#RU*EiYjjkI?ij%Xfkvwna2G@f)*c`C{ss@oVRle$Q` z#vz_Gvu3N;wtH?%V^x?NE@+{yQWC>lM5j`~K21F!2DZ zVf)EqzfotUsJBb!ZA1}3HX0|kQx@N6a&irxz5h(nRmvjqWZ2j)` z{VjqaIF?zY+}`mBJzp$7oxXp5h1s5doN$MtPr2&>ZU#PmJ z7OFlkWT;&}z>PW6qh02CN_4v>S!}DR&i&8@lzW$R7cJ7pc;)(fRLhQ59Fly^5AKlG zxeave07jFiI^!X@eF{W3K1*i73sLZO`KpCY3EdPy0f<}-3&0Y*?C2QbABeYNlTcjP z6z#R9q&fH5mlzf%Ji~2I9p1QNqDHW3fw0nHs=`9L8&GBQ22__$xNBYk$t<4$nO^K* z&*R&@+G0-Z{lhzAK+9Q6+*qghH8H7P1%{!%O6@Sr5vD8^3re=H%DmG$+X@bD)rguXrzI|^O;ao+ z`9)xDB&NW?``A&EiVJ`{N^V&i&BMpF`eQF&`E!c07PxW40oQ31b;5VHno>_Jjjz(v zFh!f3h=P;wTF8=TQ>D3J;+#aQTs_iJ#E@|gO8ArD>L1WiPAFFJfIXp zStQ*k(|8g~d&nAY3=rkK@o5@Ish4MF6Qx!P&Zb|jpYi+1cWfTUW*{`wE<*>duWC_H z<2&Awwq-BNPl&+WKg~eA(Mh`d462gPm49CZB>EDm=x{?P#L3FRr{-g->~ehvCFlo) zd@Cs8(yD#nf@}}+@G+;SNn;g{$)t`1MG*QY3{*8X(h`hDj3QI0p}P{zqOx<5NRi|JI?S3OWO08V@5ogqAvUyJR334)RILy}UmxPu+uO1gd zYP8;KgHybGXopfI`P5@M90(nwBiFDyk`XU2`JH3&(`N7|olExIVYH0%5Uchek0!}D zb!A%A!S`whzEy0eeCQ3nT``P^#(G#b*=*3IdpKHll?)?p$|ABh($|0R;6{)9aNMii zBQs$MOJAr;;B{y9xZ})lJhZ*#tk@n~3h&+w^}LRkarcifPmV&1FEQ^_yZ2w%SG}DS zw!wm|-zkuTd*gkYVOFpiO7hJZ^22+d2%2s_0G&ji|RY`PRJqdAAvw3|@l zKCHogb$;!HIk&WKK2CF+vE+~Dk>doV;l+$}-$P3J}lk>3KPPJo_71gVvW>$Au zbYtlzr%$i{FT=Ht`LD-pSYd7D2dWfnGfM_zXQ4y1Urugd?!VOAYx;x}wrBjPFZny^ zYx^7SjHeAsG6e6U)m5mY>r2f{9G!6bM?uDE99#qM&K?J4=Mj?YQ8?w8uEgP(r0xdd zu=HALa_O^#AdJEjI!No&)aN%JC@}NH@ybJ=f6L~m>nD)*v~^4fzaoE>p8VaWrcaHU z*=gzCh8Ud5pW^Yw*9OOd1xaAKb|jeBCroCj1RqM*bmZ2;_ul6q!gjk21)sOp=koJp z6}kD9TIqJ$YYkqxpYN%La5m#gv~HWs?)fde4OtH5BgxV|65;MU=x2Kw=qWgQvhZsy zbuc~71bj;hdQbBzfGj6g?Brk0EEe{;;+4D1JBVN zm8F%!st&=nIAn3!1nn`Xcf}ONE8=80gOs-i=7`RH*gzNedU;k?J#|JtYIwk0KHkpv zPEq%A@Q^UC=hXe_%{|c3fOno}71z@*%^_RQB&tFP?Fd#AUX^pNze7?uOqvAnVxKb3uF`tfJX!U6(VA8U-~Cd@n<0DfFJd*Qs|jJH#O?C$iR3fNjpyzN}1mGMs184g)<_{YuG*!~^QwaF~HjAIh8X@n)@n9%-r z@5V3hm&nAgM6HbX&f+Y;3x&E{giTqz7yK3ZLCq@b8#kR8{~DV~Yc{o*EbBwq`Aves z>0-`tHYiz8nae|4cRDc8HNMOG8HFuqzOQ#h70i-1Q5rFU;`0FIAnG;ZG}8 zu&-ZkkWQCJejP&pM9q+Ec%9sMJwA>#@NZrj4PCh0roD&Bv@d`ADd&a5tAi}o(CIJH z(jPh{Pg5hIp9bKM9nhr?k0If^DIeG(${-Q>jnqB*PMgaH<#)GO)$4NOh?F0jYIBE~ z6QPg|$VSTAVMMUO0?jWLYNIVej~6tt+Kq)ncOW=9O)1>Qx^Et}_?3xQwMBq=XD2%@ zq8-gP>p95txXnq(g?!V5R^w+|84L-I(gur!_%fVcwkANZGMu9nA6jK}Z;bmQz9O?U z`Et6?j-`cqQ6l{htm^2`bz)(XNB8TyS3?Tpp)%NqDxE9Wu%Vb3yF9D=taLGSLq7nI zVVe5hEao+9_fyu&S>Xmd`}#cmk=B@}7P!ed-hI<_ctg;ycc%KP^?`DZ@(~f)dsdi+ zOtw|=5J!AVR-cz70+2UxKSGQ(gvM9Pf?(aAGUG_8NeIMm>Fx`By}6 z%^+Ea$~G}{e~FVcW(abB5wn4zc?nDWD$v4s=QkD)VuD=RufF{*WfRtSb7=LCH$taY z>9g4xRY;5-3c3%WVMng}jc3ftd>S|vffV&&vpem|q@c2WHh3Lh4$$Zs$ML`>i&H2X za4(mTcG>O&du>~lw`~elmj=wKJ%NY#eQ9vAd`OAO=2i@K!5a0}Q@EcB$Y#1keb>t< z8ISl`&bCg*kz<7Ry#q0ujl)&cmBMP*PeV34Pvey8kn+=eicESmyDzs%6(-h_t=h1| zw>sv=uAAVjIlnsAu(zu;Sb4@8eyvx=P6itf{V8XF-QM!y5JO4+de8DrGS+B!$$k|2D;lYTQw`5)jXWgot76VBLJ@%AQ?mBJx@_;v*K5y+oWQi zdLp^oNMI1f8`5DHW$q5_*k;nZ6FwWlKid(pU6${(QKlEqb~(b}iA;1IjGCBXT+8c0 zn{+t_t(4`{7Uc}3-f%)O5i4TnITu_P=ucl_{rI{jy>!`#6%%1UV9g&pSO0hD_oV~r z3y9rHpgDFXYC7@5C&#L~>ds8JQFvi@wA3cZZtdQUdYV!hQ9*?Jtb%{!;t_{CbM$_F z!!37wYa%^Ds6kzvW@gGaaP(Ou6;QW_z`sciszTypUXXZNzk9PC-i2o`T<}pk@mXyK zQB0a0wjg37#%X@V$Zpj{Bd|l~%+Qy|N7f&oZ?@{u-F9qM_yDk{rkza;zXGyJ3qsbYPG$lKCOPqLV+Tj4$H<3I3F) zEpxs@j0#>An6NVo&IxXNn{Ws*ZOPm(k5ZiDRZOc7`!tZ_<@mP_jXjCbThsrbfTJaL z68};0mB+7EO_AS}7h{88uD9B5y>a6iT(~2+Xk%rIO ztwE<5^la>ybD37me8HA(-&oET-M~I@j_wFS?DjHZ3l*5HPtz}LbKt%1S4{VLEti0E ziz0Rg@6GNK{MEcw$)otL-3Ko=p@BwR(|y}}Cb{U@n{!ouGp)&<#S4i0B>cbjj(;>E!3 zN%p6a!Q5v3KU>2Nga=W}bJ|tbf=FSLdo?8;X$csa?4kO;tSI6=bH8$n(qbFqDy}}f z9bvvOIvW>fu^R~@sXH_we7@a+PSLDIQgV`pI_$?uLAVp;njO|wx3{tm8}qQY{I|^d zw>~7JaozVVCcpY2e{t~fVk5KzPP#BhxZk?;$q@4ze~eU?i98yS8x$4g4_FYjf*xwA za=6?`k?J-~)iRCwKRsP#Sd$GGCI(1Ijgancl$34}hDb}NASoq1V3GqSAR#RhA`MEz z=x$WHOS+_C`v&j(e*3X&KX;yUpZlEWttT$+*Fk<-AN z!n*JUxlP1c@QeZivBQ2a7Cr$!@wz&>xwuS@ z*_9PzS>iiD%h=y5`M&b@p3>RzKDQ2zMjaeO9A^*KC+~$D2d!jQB^tCdmADn;mB&60 z?SG3lkru_)K9I<267#`5u7$XO%0ix#WLp&bkgfTd)rZaJ%mq62g7t>+#AHb`#={(cgwj_*dFaYJNVIHj+6mL5|3}m4{KNZI4W*b61XhX2sPAm z3i&$h)@bMffG&%MS27l2VNrm%74P)A^v{6&e+5Pqj`TxC;K-3sQAJZpOW{IpJEco807HKa)>>Qz_C43mjf;gGFNd+dn?(r;C$XePrA=)D!#5WVlCTmtb+K%eK_6qH8tfTDNig zI#vC4z3KwEmWk`^7K=MqrC?@HPed%Jt^fc%3;`Hm8Ol+8@fCV5qrKwLe<_~(lXv;e z|LY`T$V9P^xBsrQ0ZW$NyuRv;LvE(ix7^MnlgqRlH?V^9rH=)Wr>=WYwC8F;FNod} z)1b4h{q!6x+|J>Ul`BS@bkyjy_=rY;Cr`_ApcBR6*i6BV3U`co2Hv9+3jzPnWv&9;9p4mv{S+3s!y`vMK6ZD@J0c zr#0UBm_bqPz5ADkyUETR1CzMlUyRIn2o^voBXA*rKVDv{%3|X8@SE)GET`{#uQZH;CfL^Nwlx!8*T!0*bB3BZ zjVPvgu=hX95OR@h$c9ZbvcK2U|1RRJoz@k}OzCS@!8c8huciS_GE3qun*C{!g-c1g zAzGRG_Qht%+xB-p5l1R9BFK)i4O!9HLFB}&aSvPh8NDj_(Wd+C8nMyd+sk}%%VJ?` zoFYdRhkaf;k(*OKR%g|oWTm$Y2^Fa?z|&QJbL@XYoj&c-;HIXljmBXe@*>!WKpwEn zf7X`aV_PF3ko@sVsj2Ekk`3!JKC&h+^Gcq^fWsG!Am<`C_B1VOs0a7vXE+3dd~)UY z*Hj(=&0Z;fU@!>i5IA(^=UgSM;>kdYLu4s@9l9!lJh>4vN3oV{?OiX$e;GU!bpcJ8 z5C@CWgR3dX+A&-QRnPXBj#~J2&@xBBB4WL9cj_xa(e4tMTW&#fF^$-Fp-ZB$V+;D@ z`2&*gUc4$x-8Trbk+%0W!VC8>GwuOT$&7aasV_0-Vhwj`!_Op?9OECDGJi?>APluFA}JqvRCsr zaO|5!q9AyH^SL6&Rthp-D09jCmQNPJP7!2HuHlHOYZB-tKI4mgHtWos_W|>0UjLAQ z-t6>yzZLVElT`G!);A2T+X@+`7@YLeMLBn@Kr+e1Y^4|E%8T|Z0+$p*a0=(xmIh_} z%;bq&1)SXltdrWnCud&t&}Y#@?-Q1?2c3eYIx&hcJxocpms(^Do(SJhegnNHFe2Iuiu=HTfjN;c<8D2jh7sklIk^9n~=#$ zF_gbB)ivX8d*lFM2uyd^Ipd0OCHSX*WC&T@i)7g#lI};Iz|3~8Z(r}NSb4xr5Or4$ke%@qi_k5wtl^dN*{mCO1QJvuI9W~=1r@G~SwKIq2e(HtBo8a0O1+^HEjN`#N%ew zacwS;1V{w0TfSDH>uPkAl_WbeD|ze7?vr!i3nG6`Csx8~+fy&*x)S-Ko*~ubuf3&1 z$uvN6^v6c&RDepGI*QH1?EQ()BJ#%~g1v5LB5a*+rdZzA|E5*+jCkE`$_xV5jVLUbseW+%RJewy8l3KPXq#Fv;n!q1;j!we_By%i4wI|<2Ybi%me zsKlySB4CY%oZ76h9QFOYG&EAQwsWf?mX+gtLiLh`vavTBv=O#CzCZVu7}8svfPTH* zBFSfeN1-OdSMFvXh`MU8&K{-3`r7Xf(FSF1J$no1P2_G`i(CE)m`$_D5{fI!7EXVU z9hoy8d*WpB*Ed5f4nte3X3i+Py-^>^<@O}ZWXZt_l+C*5RTBI=_D#V|XvR4akUr%ZdBjLC%5x>5DC~y0R zrrk>Z&)74o(TCXwi+gK&sM}OXip&J>zBO5g`+DnBpYu(4z1H7%jDOGtlkK={D!vuy zU6pj!3oz5ghuEu>orW6|5O(0g zm7q)Lh*~O@4QPW@s8QH{3*BVKsbL@uICT8#6**Iv@|AP}&zF9(=mR_YlkjRq!1w`v zZdTQcN=6hpzl8_>c2;TG&v`QR$afdTp9-P(RNFAiZm(Y)xaa7U93$cWR-sVGx zzs&IK_DUhTxG68XlcXq5nm@3IVNn*niI{F@^R^8T!&LH>ZZUmuEH|xEWJPa-HA~3H zoce_96SwG@o%Icq`QF+$@;=O+L<(G*Tv`^WQD?eaNSh4uxxYaIk1C0h`I%}-AixDp ztX1`pnrV=fNi?I?4U?C(GeKTqS5WK_9s;uf$vsacuS?wiMr%f3q_-Gs^G>yQynL7| zJmx%Cva#Xe4tR)tIk7&3m|s`nr(ZOK8Sjr-NPv7)%~|2*RFyb)RDW1=?sGBHgu1ZI z5b&1%lI-D#H{&70#UzCIg6IYZ@dFY9rh>L2dnY~9&9h`=gH9$MH-#j)p}Qkqp&bu> zQ3Np`-#QN13^+@oV(UUQyQ@6*N}C zye}7CIW@c^O(Q3Pibg-~DqthZmW$gxRGao{bb6aCM%(Vn-Lj%9!}gXMYWH2SU@u;A ziv{i40ixLL4)X$)bk@!;o`|=#sYPL9Ao+qqkgZ^2o;7Kp<1f`eV$>#D+o_%M9-jUmrD&1 zMC#OutFx?c;S!;Poha+h!NSiAjjs{><-K6=E5&iGsa^F*1ox&vD{p0JZ@!fl56_{iiBfz{z+ks0CMtue)CYE}-OQ%* z60CWni2pEIbD6zn^baXG9hAbxhS8vbNXS!7ln2_ABEkX8`r<*<=Ga)c_zbwD6o7v3 z*J+C%xu1R?VOE>3TO)>znc`~3JkLQEgsEUQ*hGnVF?|FOrB2LILl-zAL4tg`g!*uY z*zFHv4^3I|K6xzKYo(xTj^unDsIN-Jm7cdDSF{C|;_xGeV^jFv7(Ze(me@pzfPCY+ zlpXkwit+^xVP%piy>Wag$47F}6^bFuO=oD3+_(meC-QPFzVA<50mPp(+=m^KGH5Ju znJulx8&Y|j^Kj5JX1Pb1NDXA-6r#%eI7N=Lx9m!KnBvxE^j@ikS^oj@EugvJX3pC? z^Su9)Mac8?N&KgF-5xH5jYRlDx50FJVyf3Gg2}TvlY!7bv5qjFty}?U+MtdtG_s2^ zBEk<_$fN_3^17_Ixmt{j$M9Hd8dpGF9FBo}@E^AB)Pb_Dr9~+tVfc|Ocm+%XyMnPU zN84P!XLZ_X7rN#bq_Vl>PT3wy!r{TOnbV!558^^k!-geiPll)Uc zcdpKvs*8T?Gn+gu;1*Kn3H#W*z%9#*Hp5r$>E{L1`gvASkXLT>@&!;1Z+o6$Die5;o{z>>a7-PH_lUN&5*)VDDm)Hk;0Bii}riK{PVvmOe(D%K7GPsZQd zNO?EB>_Qf#eY5=0HAo`pc9pM8t>(efO1=!aV17>r7sLMK8LJjlseL3?bb7{I`DJ*F zh#^r#$DzL#^_(9ASDO@7SB~5- zVfEpvXFSTPT@~xf{=2(`RS{&xGKv1%Ps2G+1^F=c7S*&qiA!FRV~Gx=(_1u8t-q4{ zS%@vAvZQ?g1rk9dMhbsa-irX=;R2!<3M?XmGVD7ehuh9NMj*G2xAM;4$n2f3fs{PA z##@+!I|3dpg~YYy_KgzDU+#oz4;?9;Kb)kAPpYP0`ao;};=NXDpNP~Y<09PKf4I+L zrI=gRkhHgQT3$^E;MoL6T?9POFYfZlLUTcKQ zdeAS{p6r2TpUcnRvD;_EtF(-dB}%02L?qE&)+!WK6ot>OPOn&|b#_G`xVxq|?r(XH zcM&|@r7h16m$i93LbE%>?=QA-xT7iUzXI}jvBB%>vj8I%GfSDAn*!HPy-7b?V3nBk zt23;gqP&Or`Ih`A;6-f^0FjCKmrfTG_M|j=9P&9PA{uH1ToFE`u2=bUNl(i3_XDrm z{KaV(vYEYQ$3oEX*}gnGn@Q2rhpw4|;zJ%*>@rFIe#|ZP#Wx8;_rtHWKmoWKX>qUw zWxqU{qZe~+sSMkl(-ZeP`VHt4!|>Y=?iEjVF0bt?0oHMII|fCzw+NSIyB`R28Vk=U zn!Q^SyR0ABmrh9VKW>SJ8gQ!IL1PF1AA$k@u({`cB+`1(L+M4IuS{Sk>K u>3PXpIN_?Z^Z3Tg&+q1JbMwJ89}=yAd+!LcnFdEgeV!?Ul*$w=LjMP>h*x<4 From cf200a32ae8c7a3cc692f1a11c5e09eea6cd4943 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 18 Jan 2024 12:33:19 -0800 Subject: [PATCH 386/792] fix(docs-website): update copy around the video on index (#9661) --- docs-website/src/pages/index.js | 7 ++++--- metadata-ingestion/docs/sources/mssql/mssql_recipe.yml | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs-website/src/pages/index.js b/docs-website/src/pages/index.js index 085680df343a4..07345ac7100fa 100644 --- a/docs-website/src/pages/index.js +++ b/docs-website/src/pages/index.js @@ -66,12 +66,13 @@ function Home() { width: "18rem", }} > - What Is DataHub? + The Origins of DataHub {/*
*/}

- Explore DataHub's journey from start to present,

- and learn how we empower leaders in today's data-driven world. + Explore DataHub's journey from search and discovery tool at + LinkedIn to the #1 open source metadata platform, through the + lens of its founder and some amazing community members.

diff --git a/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml b/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml index 93be7a86d72cc..5cfc086756090 100644 --- a/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml +++ b/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml @@ -27,7 +27,7 @@ sink: # ------------------------------------------------------------------------ source: - type: mssql + type: mssql-odbc config: # Coordinates host_port: localhost:1433 From 6cb3dc839cf2c845a8d2f182c20fa68dcc1a66b6 Mon Sep 17 00:00:00 2001 From: juhyun seo Date: Fri, 19 Jan 2024 06:45:00 +0900 Subject: [PATCH 387/792] fix(protobuf): fix reseved field error in fields in nested messages (#9318) --- .../datahub/protobuf/model/ProtobufField.java | 17 ++-- .../protobuf/model/ProtobufFieldTest.java | 32 +++++++ .../extended_protobuf/messageD.proto | 89 ++++++++++++++++++ .../extended_protobuf/messageD.protoc | Bin 0 -> 8749 bytes 4 files changed, 132 insertions(+), 6 deletions(-) create mode 100644 metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.proto create mode 100644 metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.protoc diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java index 5bb41017488f3..c3ede2e62c314 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java @@ -277,13 +277,18 @@ private FieldDescriptorProto getNestedTypeFields( messageType = messageType.getNestedType(value); } - if (pathList.get(pathSize - 2) == DescriptorProto.FIELD_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_RANGE_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_NAME_FIELD_NUMBER) { - return messageType.getField(pathList.get(pathSize - 1)); - } else { - return null; + int fieldIndex = pathList.get(pathList.size() - 1); + if (isFieldPath(pathList) + && pathSize % 2 == 0 + && fieldIndex < messageType.getFieldList().size()) { + return messageType.getField(fieldIndex); } + + return null; + } + + private boolean isFieldPath(List pathList) { + return pathList.get(pathList.size() - 2) == DescriptorProto.FIELD_FIELD_NUMBER; } private boolean isEnumType(List pathList) { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java index 9508f4778e5c8..40d54a8651012 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java @@ -323,4 +323,36 @@ public void nestedTypeFieldTest() throws IOException { assertEquals("Zip code, alphanumeric", addressField.getDescription()); } + + @Test + public void nestedTypeReservedFieldsTest() throws IOException { + ProtobufDataset test = getTestProtobufDataset("extended_protobuf", "messageD"); + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + SchemaField msg3Field13 = + testMetadata.getFields().stream() + .filter( + v -> + v.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_MyMsg]." + + "[type=extended_protobuf_MyMsg_Msg3].field3.[type=google_protobuf_StringValue].msg3_13")) + .findFirst() + .orElseThrow(); + + assertEquals("test comment 13", msg3Field13.getDescription()); + + SchemaField msg3Field14 = + testMetadata.getFields().stream() + .filter( + v -> + v.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_MyMsg]." + + "[type=extended_protobuf_MyMsg_Msg3].field3.[type=google_protobuf_StringValue].msg3_14")) + .findFirst() + .orElseThrow(); + + assertEquals("test comment 14", msg3Field14.getDescription()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.proto b/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.proto new file mode 100644 index 0000000000000..4aaf80cf788dd --- /dev/null +++ b/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.proto @@ -0,0 +1,89 @@ +syntax = "proto3"; +package extended_protobuf; + +import "google/protobuf/wrappers.proto"; + +/* + MyMsg Message + */ +message MyMsg { + /* + Message 1 + */ + message Msg1 { + int32 msg1_id = 1; + } + Msg1 msg1_field = 1; + + /* + Message 2 + */ + message Msg2 { + int32 msg2_id = 1; + } + Msg2 msg2_field = 2; + + /* + Message 3 + */ + message Msg3 { + // test comment 1 + google.protobuf.Int64Value msg3_1 = 1; + // test comment 2 + google.protobuf.Int64Value msg3_2 = 2; + // test comment 3 + google.protobuf.Int64Value msg3_3 = 3; + // test comment 4 + google.protobuf.StringValue msg3_4 = 4; + // test comment 5 + reserved 5; + // test comment 6 + reserved 6; + + message Msg4 { + // msg4_1 comment + google.protobuf.Int32Value msg4_1 = 1; + // msg4_2 reserved + reserved 2; + // msg4_3 comment + google.protobuf.Int32Value msg4_3 = 3; + + message Msg5 { + // msg5_1 comment + google.protobuf.Int32Value msg5_1 = 1; + // msg5_2 comment + google.protobuf.Int32Value msg5_2 = 2; + // msg5_3 comment + google.protobuf.Int32Value msg5_3 = 3; + // msg5_4 comment + google.protobuf.Int32Value msg5_4 = 4; + // reserved comment + reserved 5; + // msg5_6 comment + google.protobuf.Int32Value msg5_6 = 6; + } + // msg5 comment + Msg5 msg5 = 4; + } + // test comment 7 + Msg4 msg4 = 7; + // test comment 8 + google.protobuf.StringValue msg3_8 = 8; + // test comment 9 + google.protobuf.StringValue msg3_9 = 9; + // test comment 10 + google.protobuf.StringValue msg3_10 = 10; + // test comment 11 + reserved 11; + // test comment 12 + google.protobuf.StringValue msg3_12 = 12; + // test comment 13 + google.protobuf.StringValue msg3_13 = 13; + // test comment 14 + google.protobuf.StringValue msg3_14 = 14; + // test comment 15 + google.protobuf.StringValue msg3_15 = 15; + } + // field 3 + Msg3 field3 = 3; +} \ No newline at end of file diff --git a/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.protoc b/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.protoc new file mode 100644 index 0000000000000000000000000000000000000000..03cb56b35314a849cb594321a119f3e05965ed3c GIT binary patch literal 8749 zcmbtZ%TgoB71g5xI9;-uW*a;qca5M|f-hgO@%x>nR}`BFq9Rv+h0j+(+KbEZ9Guv$-?NI%}Cr zmmSNtPI{+HR~_T>((H6IVn&{H-a8|_pMTEAidOHWWxh9BJyU+fMn2NROq53#qPjDf zpJ!~dWf}GmnOI0hYul^&Au@4K2FCVp6Oue37=uhMI|OLNlW~ED3Qc%IU$E4=WwnOj zBs~$}RP9c)eKtfWYN3+vac`A?$Xa~>#FVN?egTm(deII@vX}GUt)7xBD)y@hU{ey%vS4HBwr)d zoxeY030Yc*{4UD=HN~Ria(p2oXa73Rg+kqG@%7&6DXM|L<_^@$E^ipN!JBQ{?9|Ue z$>gV2=fbcV->@#PJI%9mn`f7o*IWgzwd)zql~#+3cU|5wyJqL3*~l<{ZrhjLw@XV6 z^P}0aMA^CSA~uenAn_W^iNq3PTsIrdE;4Y^vzu0%8|?<~bxqLWU8~orn_}jq*)}@Y zf>$@qubTEbw>sjl)dSB9tI<4d)(t{PbE9K&SiWf5w%OpQ`j5>99_(|&#xL@&)v~UT za$dLEjV38|NeMWn{T7tyXK{d7ms_VEqq@~F`9-g5!*kmJQ$jUPtdAxT-H0*Xw(Mrz zOygy<%Uke~5W*JWX+IqVqTVu^7iI^M98LnpLb32jz{f@pq4IgKfXM&FgHav`wPDqJ z7iQZQA*PfpA^sNLbT}%->@Lviv`ykNjZHoDfS zeMMzLL{UM!dui6Gf}qu;!s<}%w4EyIc2OW1ltpb@tMY1jvv#29D#vq0FTdA{YLTxW z;-$(r%9TT1+uE-2?ecC>)vH`76){_?>Du~!t*loWpI53-ofmJE(jixWtmtaB%F8;} z_A0v?&;YM1rJANz(_AZU?Cuw}(pH)SftSiP#&@+ntp>STIW5fI)Z^t%zNhLN+kjEl zwOy@tC=70DwGtWIEbELbyrSqeZDW5|(RpQGuav7QC(nvnbz@i2_S9kq_FZ-jT{P|HV`}z+)XoHt*9Fso)8Ze zVNmqLv?Qgrd5?Uae`h{hG?R%H^U6c2(8i zYa43yTfSSS@Ne?{stO`SrKSkez(Bb09uBPUS2YnhtyEKWeZNxE%B5Gx@&O_P*A?g% zMTE;G@|J2vE$fE_OK}hxNb`ej74t}R$%&Ri0jZ)mHfmup7(}fg(Se`5q;BnMTWV=T zB|c?>Inb)=D5sz~rcis(61x(G7z$+bcDD;GUvYfNDDCTGfg?5oVYy}0O*Din^bjmz^bphO0@Q4P z+G}>t(H!h66HA!-h;h<2+h~tyGe%$%y#QM#`a;ueH3T@?47$as(QDcKBb|Z7suzxU zhbVq-z~i=st-@%-7JL%h6*UsH%8q$yq8+-R*j2hj;F^Q+&(wQ}OS9Fquc>J@Oxvv6 zAcRN>dTj)u(Zp`mGg{ninbdH*=$cL}?@7TxZ@i>NXmG1-(x%k}I5qMBni{DoI;$@& zqTo(BdUXT4ajRn*jq5?Gh)a@1V{&{kGDA;EIi8Mu%Tg?s{L*=JCSOOMiOFYjoF1LY zOk`V3j>&O)bS9S~X)zg<1>F5F#?YVA05G(P6oI)q@mhYH3f zji0y_0q1X$4;g2Y6p2OT_?y_n*@u)g?3#40_G@XJjgZ6$i6ogjg7%x#18)?^Xj*0_ z9Ei%&50UlgEEctMg6wzTl#b(f^AAq#x6qG*zz<28Jp=tH(N|*ftSsmY6MgI<0QO33 zRPe6KL!F@F8dg<*L?QE zL?3$yfW79j#|X-r%N`@J*T9~NCoW6nh<|I&pZ~J)$u8g+$Nz= z6H0)}S#p}qVHGFHk!Eaqc3P}rGS-&|yoVdjSVAmYjG)^b9rs3Xqd7SxmcghjS&`1I zd2m+Z-HwDV*num3%XkzNEH@9MME`@I2S={Y0Z71s4{jbXg7ybD4;aCL5AHHZ%F-3? z!ngI%6S~yj%|E#N-$FkL0$1)bND}>Pe;Ei9ee59s_Sfz*zzE9M?lQm#?60S$PZHt) zlw<$o?18&+72Pj5)8=n+Il-0o$U7v>-tp0A{@;v^>|O75&*bkIyXc;+9Ets2eB&9* z{xWj|F(afi&{)w^q_9b+Bv(7@YRB+}=nn0y(9SyA1(pzIb2IU~HFLTvJd2%)c*sxL zsG#0#2;L)lf`(ed709{**`q#zECl|O6c9N%%Sb18wBmaH+2H4zOt9nVBP6=w>ttsf zfA7}G`Z}JcckATjoXaggBZ=sIeuG`SJ|AhnC3C%5tEVof*HkS~gJ2%}m@|Ef=WeXCB;JEgz^A zW+v~gR+#-;T$E$pEtCAG%hBDzM8@-8Ip&W(Sz3KMIr&>rB?Y&zUfx|LR4N5gDuvlU z#su4f%eHWTwgr!E;pmf#C(O3sEuO;tJuG+&yKv+!o}mk_&yE-U!qMtXLL~2Qoz)#F zIWm>N@7Pq_e@;w|;(2UpeCN-P>FaAG@?(_lCHVLniOBSXn^H6~OE;7_Hivj*fqmxO zP?8Xxon)ff@;&E%;%w-&DbTGz#Gx6v#8}n{2ld&fbax9XNt!7>1927biH`w(-Rkr$38vhI`P=2=UgZr`}8n|oyR^sU_JHeMM3YWPmk<8 z_31^S_SC0`yU6FU{V=_pN006gpMxGgn1=y1YmVe znhVANn0xkRFoxy1g;zcxUh|uP2#*IyZtxHwIl@DLU8=Fb2T+5FoiY z9|DX4a6SZx;S1^OL6`FZU>txiq-VYXy0g6CUj<`Ac(sVLC$6{gSW1q6B~1?E6_{J0 zu@oxwtBD7mdWs6GZwwD<9KbEygj)^ZpuP}-qxkiW0q2ZGhG{7+O6wz@d9i*L$EVmA zEJ^I06_ie-;Q;NdM=M?xFgC?fo(MvcPVn#y%%;D18J;Kz?u}RN1#U_Kpw{J;AXJ7^n(M&)Xrk4Z4xnYE<%9i^GQ=FFj2vemkgYcBchGLaX z{4g5O5z)#%klAy_9H*Rk<&zmFUf3N1^D-L?gg_vloM(M=ppbw5s(%h7^6A%o1|YT? z7|0GcKpw6R7$6^42Mmyxs{;lQ(Y3%pD1c$Mw64|$4A9zI8!$lYYi+;)rrrhy^20r# zb@q0^0IjvR0|w%R1_Lj=aulWYUGgD5+j#$q3oJ|rnWF0Y4mYEaSE9ad%po2$7f!My z$G;CeSPk$)V07<=RDY7`6T$Zb9^m^*{CjwSPlE1(f<-0qX!ZfEjY0tKuY(vxUrB`5 zAjMu8n+%FGMv+(Khkow`=b|JJcA{^B&PIHTQqt=M^vx|^Oicu102c90!rL9Nmu}u{ zm1_au2mm)jt&+YkY=&Cp2mm+HDm_4mZ{Gxb696WJM9_Qbp1&OeOaO2jfZW}`MrE8! zUZcnK?_bLS;wTVzLKNxhy~AJnK92%%XE6LEl1L_LIvkKMH)h}adqZF>xewGiSM5NjdCF(B4}_(UAk k5Yz*FV@SFGz3|R6VwoKHha$?w!Km|jHX@eCLGWqee-Q?U)Bpeg literal 0 HcmV?d00001 From f2e78db92e050483c851fb9edd4beb251905dfd4 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Fri, 19 Jan 2024 03:56:33 +0530 Subject: [PATCH 388/792] feat(ingest/databricks): ingest hive metastore by default, more docs (#9601) Co-authored-by: Harshal Sheth --- docs/how/updating-datahub.md | 20 + .../sources/databricks/unity-catalog_post.md | 28 +- .../sources/databricks/unity-catalog_pre.md | 5 + .../datahub/ingestion/source/unity/config.py | 10 +- .../unity/unity_catalog_mces_golden.json | 2506 ++++++++--------- .../tests/unit/test_unity_catalog_config.py | 4 + 6 files changed, 1178 insertions(+), 1395 deletions(-) diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index fb082bea7d151..b671e2fc5d123 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -10,6 +10,26 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Neo4j 5.x, may require migration from 4.x - Build requires JDK17 (Runtime Java 11) - Build requires Docker Compose > 2.20 +- #9601 - The Unity Catalog(UC) ingestion source config `include_metastore` is now disabled by default. This change will affect the urns of all entities in the workspace.
+ Entity Hierarchy with `include_metastore: true` (Old) + ``` + - UC Metastore + - Catalog + - Schema + - Table + ``` + + Entity Hierarchy with `include_metastore: false` (New) + ``` + - Catalog + - Schema + - Table + ``` + We recommend using `platform_instance` for differentiating across metastores. + + If stateful ingestion is enabled, running ingestion with latest cli version will perform all required cleanup. Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: + `datahub delete --platform databricks --soft` and then reingesting with latest cli version. +- #9601 - The Unity Catalog(UC) ingestion source config `include_hive_metastore` is now enabled by default. This requires config `warehouse_id` to be set. You can disable `include_hive_metastore` by setting it to `False` to avoid ingesting legacy hive metastore catalog in Databricks. ### Potential Downtime diff --git a/metadata-ingestion/docs/sources/databricks/unity-catalog_post.md b/metadata-ingestion/docs/sources/databricks/unity-catalog_post.md index 472b0a541911a..df244f7d9ae9c 100644 --- a/metadata-ingestion/docs/sources/databricks/unity-catalog_post.md +++ b/metadata-ingestion/docs/sources/databricks/unity-catalog_post.md @@ -1,11 +1,33 @@ -#### Troubleshooting -##### No data lineage captured or missing lineage + +### Advanced + +#### Multiple Databricks Workspaces + +If you have multiple databricks workspaces **that point to the same Unity Catalog metastore**, our suggestion is to use separate recipes for ingesting the workspace-specific Hive Metastore catalog and Unity Catalog metastore's information schema. + +To ingest Hive metastore information schema +- Setup one ingestion recipe per workspace +- Use platform instance equivalent to workspace name +- Ingest only hive_metastore catalog in the recipe using config `catalogs: ["hive_metastore"]` + +To ingest Unity Catalog information schema +- Disable hive metastore catalog ingestion in the recipe using config `include_hive_metastore: False` +- Ideally, just ingest from one workspace +- To ingest from both workspaces (e.g. if each workspace has different permissions and therefore restricted view of the UC metastore): + - Use same platform instance for all workspaces using same UC metastore + - Ingest usage from only one workspace (you lose usage from other workspace) + - Use filters to only ingest each catalog once, but shouldn’t be necessary + + +### Troubleshooting + +#### No data lineage captured or missing lineage Check that you meet the [Unity Catalog lineage requirements](https://docs.databricks.com/data-governance/unity-catalog/data-lineage.html#requirements). Also check the [Unity Catalog limitations](https://docs.databricks.com/data-governance/unity-catalog/data-lineage.html#limitations) to make sure that lineage would be expected to exist in this case. -##### Lineage extraction is too slow +#### Lineage extraction is too slow Currently, there is no way to get table or column lineage in bulk from the Databricks Unity Catalog REST api. Table lineage calls require one API call per table, and column lineage calls require one API call per column. If you find metadata extraction taking too long, you can turn off column level lineage extraction via the `include_column_lineage` config flag. diff --git a/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md b/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md index 12540e1977f64..22f3f9cb1d276 100644 --- a/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md +++ b/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md @@ -13,6 +13,11 @@ * Ownership of or `SELECT` privilege on any tables and views you want to ingest * [Ownership documentation](https://docs.databricks.com/data-governance/unity-catalog/manage-privileges/ownership.html) * [Privileges documentation](https://docs.databricks.com/data-governance/unity-catalog/manage-privileges/privileges.html) + + To ingest legacy hive_metastore catalog (`include_hive_metastore` - disabled by default), your service principal must have all of the following: + * `READ_METADATA` and `USAGE` privilege on `hive_metastore` catalog + * `READ_METADATA` and `USAGE` privilege on schemas you want to ingest + * `READ_METADATA` and `USAGE` privilege on tables and views you want to ingest + * [Hive Metastore Privileges documentation](https://docs.databricks.com/en/sql/language-manual/sql-ref-privileges-hms.html) + To ingest your workspace's notebooks and respective lineage, your service principal must have `CAN_READ` privileges on the folders containing the notebooks you want to ingest: [guide](https://docs.databricks.com/en/security/auth-authz/access-control/workspace-acl.html#folder-permissions). + To `include_usage_statistics` (enabled by default), your service principal must have `CAN_MANAGE` permissions on any SQL Warehouses you want to ingest: [guide](https://docs.databricks.com/security/auth-authz/access-control/sql-endpoint-acl.html). + To ingest `profiling` information with `method: ge`, you need `SELECT` privileges on all profiled tables. diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index df36153af9d83..d933e5a5ff38e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -126,7 +126,7 @@ class UnityCatalogSourceConfig( description="SQL Warehouse id, for running queries. If not set, will use the default warehouse.", ) include_hive_metastore: bool = pydantic.Field( - default=False, + default=True, description="Whether to ingest legacy `hive_metastore` catalog. This requires executing queries on SQL warehouse.", ) workspace_name: Optional[str] = pydantic.Field( @@ -135,12 +135,12 @@ class UnityCatalogSourceConfig( ) include_metastore: bool = pydantic.Field( - default=True, + default=False, description=( "Whether to ingest the workspace's metastore as a container and include it in all urns." " Changing this will affect the urns of all entities in the workspace." - " This will be disabled by default in the future," - " so it is recommended to set this to `False` for new ingestions." + " This config is deprecated and will be removed in the future," + " so it is recommended to not set this to `True` for new ingestions." " If you have an existing unity catalog ingestion, you'll want to avoid duplicates by soft deleting existing data." " If stateful ingestion is enabled, running with `include_metastore: false` should be sufficient." " Otherwise, we recommend deleting via the cli: `datahub delete --platform databricks` and re-ingesting with `include_metastore: false`." @@ -299,7 +299,7 @@ def include_metastore_warning(cls, v: bool) -> bool: if v: msg = ( "`include_metastore` is enabled." - " This is not recommended and will be disabled by default in the future, which is a breaking change." + " This is not recommended and this option will be removed in the future, which is a breaking change." " All databricks urns will change if you re-ingest with this disabled." " We recommend soft deleting all databricks data and re-ingesting with `include_metastore` set to `False`." ) diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 383f94144ffdc..649212c1041ed 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,42 +9,10 @@ "customProperties": { "platform": "databricks", "env": "PROD", - "metastore": "acryl metastore" + "catalog": "hive_metastore" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data", - "name": "acryl metastore" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore", + "name": "hive_metastore" } }, "systemMetadata": { @@ -55,13 +23,13 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Metastore" + "Catalog" ] } }, @@ -73,37 +41,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:abc@acryl.io", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [] + "removed": false } }, "systemMetadata": { @@ -114,19 +57,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "hive_metastore" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore", - "name": "hive_metastore" + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -137,12 +73,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [] } }, "systemMetadata": { @@ -153,7 +89,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -161,30 +97,11 @@ "customProperties": { "platform": "databricks", "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main" + "catalog": "hive_metastore", + "unity_schema": "bronze_kambi" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main", - "name": "main", - "description": "Main catalog (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Catalog" - ] + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi", + "name": "bronze_kambi" } }, "systemMetadata": { @@ -195,28 +112,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" + "container": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586" } }, "systemMetadata": { @@ -227,28 +128,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -264,7 +144,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -280,13 +160,13 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Catalog" + "Schema" ] } }, @@ -298,96 +178,15 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "hive_metastore", - "unity_schema": "bronze_kambi" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi", - "name": "bronze_kambi" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "id": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", + "urn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586" } ] } @@ -398,64 +197,14 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Schema" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" + "container": "urn:li:container:21058fb6993a790a4a43727021e52956" } }, "systemMetadata": { @@ -465,22 +214,21 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "datasetProfile", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" - } - ] + "timestampMillis": 1705308660413, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 } }, "systemMetadata": { @@ -491,7 +239,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -504,7 +252,7 @@ "Last Access": "UNKNOWN", "Created By": "Spark 3.2.1", "Owner": "root", - "table_id": "acryl_metastore.hive_metastore.bronze_kambi.view1", + "table_id": "hive_metastore.bronze_kambi.view1", "created_at": "2022-06-22 05:14:56" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/view1", @@ -527,50 +275,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", - "changeType": "UPSERT", - "aspectName": "viewProperties", - "aspect": { - "json": { - "materialized": false, - "viewLogic": "CREATE VIEW `hive_metastore`.`bronze_kambi`.`view1` AS SELECT * FROM `hive_metastore`.`bronze_kambi`.`bet`", - "viewLanguage": "SQL" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "default" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default", - "name": "default", - "description": "Default schema (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -588,57 +293,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" - }, - { - "id": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "urn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.hive_metastore.bronze_kambi.view1", + "schemaName": "hive_metastore.bronze_kambi.view1", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1044,19 +704,166 @@ "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" }, { - "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].stake", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "double", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].stake", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", + "urn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586" + }, + { + "id": "urn:li:container:21058fb6993a790a4a43727021e52956", + "urn": "urn:li:container:21058fb6993a790a4a43727021e52956" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": { + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "data_source_format": "DELTA", + "generation": "2", + "table_type": "MANAGED", + "created_by": "abc@acryl.io", + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "updated_by": "abc@acryl.io", + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", + "name": "quickstart_table", + "qualifiedName": "main.quickstart_schema.quickstart_table", + "created": { + "time": 1666185698688, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "lastModified": { + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "CREATE VIEW `hive_metastore`.`bronze_kambi`.`view1` AS SELECT * FROM `hive_metastore`.`bronze_kambi`.`bet`", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:c45a3b960d7503abfb5549f583eb0517" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -1067,12 +874,35 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "containerProperties", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "main" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main", + "name": "main", + "description": "Main catalog (auto-created)" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false } }, "systemMetadata": { @@ -1083,14 +913,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "removed": false } }, "systemMetadata": { @@ -1101,21 +929,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:abc@acryl.io", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -1126,12 +945,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" + "container": "urn:li:container:21058fb6993a790a4a43727021e52956" } }, "systemMetadata": { @@ -1142,12 +961,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "browsePathsV2", "aspect": { "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "path": [] } }, "systemMetadata": { @@ -1158,37 +977,54 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "schemaMetadata", "aspect": { "json": { - "customProperties": { - "storage_location": "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", - "data_source_format": "DELTA", - "table_type": "HIVE_MANAGED_TABLE", - "Catalog": "hive_metastore", - "Database": "bronze_kambi", - "Table": "bet", - "Last Access": "UNKNOWN", - "Created By": "Spark 3.2.1", - "Statistics": "1024 bytes, 3 rows", - "Owner": "root", - "Is_managed_location": "true", - "Table Properties": "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", - "table_id": "acryl_metastore.hive_metastore.bronze_kambi.bet", - "created_at": "2022-06-22 05:14:56" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/bet", - "name": "bet", - "qualifiedName": "hive_metastore.bronze_kambi.bet", + "schemaName": "main.quickstart_schema.quickstart_table", + "platform": "urn:li:dataPlatform:databricks", + "version": 0, "created": { - "time": 1655874896000 + "time": 0, + "actor": "urn:li:corpuser:unknown" }, "lastModified": { - "time": 1655874896000 + "time": 0, + "actor": "urn:li:corpuser:unknown" }, - "tags": [] + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "columnA", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "columnB", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + } + ] } }, "systemMetadata": { @@ -1198,14 +1034,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Table" + "Catalog" ] } }, @@ -1216,21 +1052,14 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "subTypes", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - } + "typeNames": [ + "Table" ] } }, @@ -1241,26 +1070,22 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "ownership", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" - }, + "owners": [ { - "id": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "urn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -1271,12 +1096,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.hive_metastore.bronze_kambi.bet", + "schemaName": "hive_metastore.bronze_kambi.bet", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1705,12 +1530,111 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", + "urn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586" + }, + { + "id": "urn:li:container:21058fb6993a790a4a43727021e52956", + "urn": "urn:li:container:21058fb6993a790a4a43727021e52956" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": { + "storage_location": "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", + "data_source_format": "DELTA", + "table_type": "HIVE_MANAGED_TABLE", + "Catalog": "hive_metastore", + "Database": "bronze_kambi", + "Table": "bet", + "Last Access": "UNKNOWN", + "Created By": "Spark 3.2.1", + "Statistics": "1024 bytes, 3 rows", + "Owner": "root", + "Is_managed_location": "true", + "Table Properties": "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", + "table_id": "hive_metastore.bronze_kambi.bet", + "created_at": "2022-06-22 05:14:56" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/bet", + "name": "bet", + "qualifiedName": "hive_metastore.bronze_kambi.bet", + "created": { + "time": 1655874896000 + }, + "lastModified": { + "time": 1655874896000 + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:045573d60442121f01b8d66a3eb95622", + "urn": "urn:li:container:045573d60442121f01b8d66a3eb95622" + }, + { + "id": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", + "urn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", "aspect": { "json": { - "container": "urn:li:container:5ada0a9773235325e506410c512feabb" + "timestampMillis": 1705308660403, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 } }, "systemMetadata": { @@ -1721,7 +1645,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -1744,9 +1668,9 @@ "updated_at": "2022-10-19 13:27:29.633000+00:00", "created_at": "2022-10-19 13:21:38.688000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", "name": "quickstart_table", - "qualifiedName": "main.default.quickstart_table", + "qualifiedName": "quickstart_catalog.default.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -1766,14 +1690,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "container", "aspect": { "json": { - "typeNames": [ - "Table" - ] + "container": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc" } }, "systemMetadata": { @@ -1784,53 +1706,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "subTypes", "aspect": { "json": { - "schemaName": "acryl_metastore.main.default.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ - { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - } + "typeNames": [ + "Table" ] } }, @@ -1841,22 +1723,21 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "containerProperties", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "main", + "unity_schema": "default" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -1867,50 +1748,21 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "ownership", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - }, + "owners": [ { - "id": "urn:li:container:5ada0a9773235325e506410c512feabb", - "urn": "urn:li:container:5ada0a9773235325e506410c512feabb" + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "quickstart_schema" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema", - "name": "quickstart_schema", - "description": "A new Unity Catalog schema called quickstart_schema" + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -1921,7 +1773,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -1937,7 +1789,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -1952,15 +1804,13 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "removed": false } }, "systemMetadata": { @@ -1970,38 +1820,55 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "schemaMetadata", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], + "schemaName": "quickstart_catalog.default.quickstart_table", + "platform": "urn:li:dataPlatform:databricks", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "columnA", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "columnB", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + } + ] } }, "systemMetadata": { @@ -2012,19 +1879,15 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "id": "urn:li:container:045573d60442121f01b8d66a3eb95622", + "urn": "urn:li:container:045573d60442121f01b8d66a3eb95622" } ] } @@ -2037,12 +1900,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:481380c5a355638fc626eca8380cdda9" + "container": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e" } }, "systemMetadata": { @@ -2052,42 +1915,15 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "spark.sql.statistics.numRows": "10", - "spark.sql.statistics.totalSize": "512", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00", - "created_at": "2022-10-19 13:21:38.688000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "main.quickstart_schema.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -2098,7 +1934,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2116,12 +1952,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.main.quickstart_schema.quickstart_table", + "schemaName": "main.default.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -2174,21 +2010,66 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": { + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "data_source_format": "DELTA", + "generation": "2", + "table_type": "MANAGED", + "created_by": "abc@acryl.io", + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "updated_by": "abc@acryl.io", + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", + "name": "quickstart_table", + "qualifiedName": "main.default.quickstart_table", + "created": { + "time": 1666185698688, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "lastModified": { + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "browsePathsV2", "aspect": { "json": { - "owners": [ + "path": [ { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" + "id": "urn:li:container:045573d60442121f01b8d66a3eb95622", + "urn": "urn:li:container:045573d60442121f01b8d66a3eb95622" + }, + { + "id": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", + "urn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e" } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + ] } }, "systemMetadata": { @@ -2199,25 +2080,21 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "ownership", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - }, + "owners": [ { - "id": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "urn": "urn:li:container:481380c5a355638fc626eca8380cdda9" + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -2228,7 +2105,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2236,7 +2113,6 @@ "customProperties": { "platform": "databricks", "env": "PROD", - "metastore": "acryl metastore", "catalog": "quickstart_catalog" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog", @@ -2251,13 +2127,22 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:730e95cd0271453376b3c1d9623838d6", + "urn": "urn:li:container:730e95cd0271453376b3c1d9623838d6" + }, + { + "id": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", + "urn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc" + } + ] } }, "systemMetadata": { @@ -2268,12 +2153,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "status", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "removed": false } }, "systemMetadata": { @@ -2284,14 +2169,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "typeNames": [ - "Catalog" - ] + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2302,14 +2185,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:account users", + "owner": "urn:li:corpuser:abc@acryl.io", "type": "DATAOWNER" } ], @@ -2327,33 +2210,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - ] + "path": [] } }, "systemMetadata": { @@ -2364,21 +2226,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "container", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "quickstart_catalog", - "unity_schema": "default" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default", - "name": "default", - "description": "Default schema (auto-created)" + "container": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" } }, "systemMetadata": { @@ -2389,12 +2242,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "subTypes", "aspect": { "json": { - "removed": false + "typeNames": [ + "Catalog" + ] } }, "systemMetadata": { @@ -2405,12 +2260,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "container", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "container": "urn:li:container:045573d60442121f01b8d66a3eb95622" } }, "systemMetadata": { @@ -2421,14 +2276,21 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "ownership", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -2439,14 +2301,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:abc@acryl.io", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -2464,12 +2326,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "subTypes", "aspect": { "json": { - "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -2480,21 +2344,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - } - ] + "removed": false } }, "systemMetadata": { @@ -2504,13 +2359,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "container": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2520,60 +2375,21 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "containerProperties", "aspect": { "json": { "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "spark.sql.statistics.numRows": "10", - "spark.sql.statistics.totalSize": "512", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00", - "created_at": "2022-10-19 13:21:38.688000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.default.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" + "platform": "databricks", + "env": "PROD", + "catalog": "system", + "unity_schema": "quickstart_schema" }, - "tags": [] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Table" - ] + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/quickstart_schema", + "name": "quickstart_schema", + "description": "A new Unity Catalog schema called quickstart_schema" } }, "systemMetadata": { @@ -2583,53 +2399,16 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "browsePathsV2", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.default.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ - { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false - }, + "path": [ { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false + "id": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", + "urn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" } ] } @@ -2641,15 +2420,15 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:account users", + "owner": "urn:li:corpuser:Service Principal 2", "type": "DATAOWNER" } ], @@ -2666,25 +2445,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "subTypes", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - }, - { - "id": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "urn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" - } + "typeNames": [ + "Catalog" ] } }, @@ -2696,7 +2464,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2704,11 +2472,10 @@ "customProperties": { "platform": "databricks", "env": "PROD", - "metastore": "acryl metastore", - "catalog": "quickstart_catalog", + "catalog": "main", "unity_schema": "quickstart_schema" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema", "name": "quickstart_schema", "description": "A new Unity Catalog schema called quickstart_schema" } @@ -2721,7 +2488,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2737,12 +2504,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "status", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "removed": false } }, "systemMetadata": { @@ -2753,14 +2520,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2771,21 +2536,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2796,12 +2552,19 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "containerProperties", "aspect": { "json": { - "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "system" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system", + "name": "system", + "description": "System catalog (auto-created)" } }, "systemMetadata": { @@ -2812,19 +2575,15 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "id": "urn:li:container:045573d60442121f01b8d66a3eb95622", + "urn": "urn:li:container:045573d60442121f01b8d66a3eb95622" } ] } @@ -2836,58 +2595,21 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:47a033e31b92a120f08f297c05d286f1" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "containerProperties", "aspect": { "json": { "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "spark.sql.statistics.numRows": "10", - "spark.sql.statistics.totalSize": "512", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00", - "created_at": "2022-10-19 13:21:38.688000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.quickstart_schema.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" + "platform": "databricks", + "env": "PROD", + "catalog": "quickstart_catalog", + "unity_schema": "quickstart_schema" }, - "tags": [] + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema", + "name": "quickstart_schema", + "description": "A new Unity Catalog schema called quickstart_schema" } }, "systemMetadata": { @@ -2897,14 +2619,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Table" + "Schema" ] } }, @@ -2915,55 +2637,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "status", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ - { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - } - ] + "removed": false } }, "systemMetadata": { @@ -2973,22 +2653,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2998,26 +2669,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - }, - { - "id": "urn:li:container:47a033e31b92a120f08f297c05d286f1", - "urn": "urn:li:container:47a033e31b92a120f08f297c05d286f1" - } - ] + "path": [] } }, "systemMetadata": { @@ -3028,20 +2686,21 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "ownership", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "system" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system", - "name": "system", - "description": "System catalog (auto-created)" + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3052,12 +2711,17 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:730e95cd0271453376b3c1d9623838d6", + "urn": "urn:li:container:730e95cd0271453376b3c1d9623838d6" + } + ] } }, "systemMetadata": { @@ -3068,12 +2732,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "subTypes", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -3084,14 +2750,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "container", "aspect": { "json": { - "typeNames": [ - "Catalog" - ] + "container": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" } }, "systemMetadata": { @@ -3102,14 +2766,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:Service Principal 2", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -3127,12 +2791,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "container": "urn:li:container:045573d60442121f01b8d66a3eb95622" } }, "systemMetadata": { @@ -3143,17 +2807,37 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "container", "aspect": { "json": { - "path": [ + "container": "urn:li:container:730e95cd0271453376b3c1d9623838d6" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "owner": "urn:li:corpuser:abc@acryl.io", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3164,21 +2848,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "system", - "unity_schema": "default" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default", - "name": "default", - "description": "Default schema (auto-created)" + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -3189,7 +2866,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3205,7 +2882,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -3221,14 +2898,20 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "containerProperties", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "system", + "unity_schema": "default" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -3239,21 +2922,17 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "browsePathsV2", "aspect": { "json": { - "owners": [ + "path": [ { - "owner": "urn:li:corpuser:abc@acryl.io", - "type": "DATAOWNER" + "id": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", + "urn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + ] } }, "systemMetadata": { @@ -3264,12 +2943,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "container": "urn:li:container:730e95cd0271453376b3c1d9623838d6" } }, "systemMetadata": { @@ -3280,21 +2959,21 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "ownership", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, + "owners": [ { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "owner": "urn:li:corpuser:abc@acryl.io", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3305,12 +2984,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:b330768923270ff5450695bee1c94247" + "container": "urn:li:container:934b6043df189ef6dc63ac3519be34ac" } }, "systemMetadata": { @@ -3320,42 +2999,15 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "spark.sql.statistics.numRows": "10", - "spark.sql.statistics.totalSize": "512", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00", - "created_at": "2022-10-19 13:21:38.688000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "system.default.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -3366,7 +3018,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -3384,12 +3036,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.system.default.quickstart_table", + "schemaName": "quickstart_catalog.quickstart_schema.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -3442,21 +3094,41 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "datasetProperties", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], + "customProperties": { + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "data_source_format": "DELTA", + "generation": "2", + "table_type": "MANAGED", + "created_by": "abc@acryl.io", + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "updated_by": "abc@acryl.io", + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", + "name": "quickstart_table", + "qualifiedName": "quickstart_catalog.quickstart_schema.quickstart_table", + "created": { + "time": 1666185698688, + "actor": "urn:li:corpuser:abc@acryl.io" + }, "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] } }, "systemMetadata": { @@ -3467,23 +3139,19 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "id": "urn:li:container:730e95cd0271453376b3c1d9623838d6", + "urn": "urn:li:container:730e95cd0271453376b3c1d9623838d6" }, { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" - }, - { - "id": "urn:li:container:b330768923270ff5450695bee1c94247", - "urn": "urn:li:container:b330768923270ff5450695bee1c94247" + "id": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", + "urn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac" } ] } @@ -3495,22 +3163,22 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "ownership", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "system", - "unity_schema": "quickstart_schema" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/quickstart_schema", - "name": "quickstart_schema", - "description": "A new Unity Catalog schema called quickstart_schema" + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3521,7 +3189,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3537,7 +3205,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -3553,14 +3221,20 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "containerProperties", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "quickstart_catalog", + "unity_schema": "default" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -3571,21 +3245,17 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "browsePathsV2", "aspect": { "json": { - "owners": [ + "path": [ { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" + "id": "urn:li:container:730e95cd0271453376b3c1d9623838d6", + "urn": "urn:li:container:730e95cd0271453376b3c1d9623838d6" } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + ] } }, "systemMetadata": { @@ -3595,13 +3265,36 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "datasetProfile", "aspect": { "json": { - "container": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "timestampMillis": 1705308660402, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 3, + "columnCount": 3, + "fieldProfiles": [ + { + "fieldPath": "betStatusId", + "uniqueCount": 1, + "uniqueProportion": 0.3333333333333333, + "nullCount": 0, + "nullProportion": 0.0 + }, + { + "fieldPath": "channelId", + "uniqueCount": 1, + "uniqueProportion": 0.3333333333333333, + "nullCount": 0, + "nullProportion": 0.0 + } + ], + "sizeInBytes": 1024 } }, "systemMetadata": { @@ -3611,22 +3304,13 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" - } - ] + "removed": false } }, "systemMetadata": { @@ -3637,12 +3321,20 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "datasetProfile", "aspect": { "json": { - "container": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1" + "timestampMillis": 1705308660401, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 } }, "systemMetadata": { @@ -3653,7 +3345,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -3684,10 +3376,69 @@ "actor": "urn:li:corpuser:abc@acryl.io" }, "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:a1123d3ed81951784140565f5085b96d" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3698,14 +3449,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Table" - ] + "removed": false } }, "systemMetadata": { @@ -3716,12 +3465,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.system.quickstart_schema.quickstart_table", + "schemaName": "system.quickstart_schema.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -3774,48 +3523,19 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "id": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", + "urn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" }, { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" - }, - { - "id": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", - "urn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1" + "id": "urn:li:container:a1123d3ed81951784140565f5085b96d", + "urn": "urn:li:container:a1123d3ed81951784140565f5085b96d" } ] } @@ -3828,12 +3548,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProfile", "aspect": { "json": { - "timestampMillis": 1703580920011, + "timestampMillis": 1705308660401, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -3852,35 +3572,41 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProfile", + "aspectName": "datasetProperties", "aspect": { "json": { - "timestampMillis": 1703581191932, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "customProperties": { + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "data_source_format": "DELTA", + "generation": "2", + "table_type": "MANAGED", + "created_by": "abc@acryl.io", + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "updated_by": "abc@acryl.io", + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, - "rowCount": 3, - "columnCount": 3, - "fieldProfiles": [ - { - "fieldPath": "betStatusId", - "uniqueCount": 1, - "uniqueProportion": 0.3333333333333333, - "nullCount": 0, - "nullProportion": 0.0 - }, - { - "fieldPath": "channelId", - "uniqueCount": 1, - "uniqueProportion": 0.3333333333333333, - "nullCount": 0, - "nullProportion": 0.0 - } - ], - "sizeInBytes": 1024 + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", + "name": "quickstart_table", + "qualifiedName": "system.default.quickstart_table", + "created": { + "time": 1666185698688, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "lastModified": { + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] } }, "systemMetadata": { @@ -3891,12 +3617,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "container", "aspect": { "json": { - "removed": false + "container": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6" } }, "systemMetadata": { @@ -3907,20 +3633,14 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProfile", + "aspectName": "subTypes", "aspect": { "json": { - "timestampMillis": 1703580406273, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "rowCount": 10, - "columnCount": 2, - "fieldProfiles": [], - "sizeInBytes": 512 + "typeNames": [ + "Table" + ] } }, "systemMetadata": { @@ -3931,12 +3651,21 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "ownership", "aspect": { "json": { - "removed": false + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3947,20 +3676,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProfile", + "aspectName": "status", "aspect": { "json": { - "timestampMillis": 1703580920008, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "rowCount": 10, - "columnCount": 2, - "fieldProfiles": [], - "sizeInBytes": 512 + "removed": false } }, "systemMetadata": { @@ -3971,60 +3692,54 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProfile", + "aspectName": "schemaMetadata", "aspect": { "json": { - "timestampMillis": 1703580920011, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "schemaName": "system.default.quickstart_table", + "platform": "urn:li:dataPlatform:databricks", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" }, - "rowCount": 10, - "columnCount": 2, - "fieldProfiles": [], - "sizeInBytes": 512 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "datasetProfile", - "aspect": { - "json": { - "timestampMillis": 1703580920012, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" }, - "rowCount": 10, - "columnCount": 2, - "fieldProfiles": [], - "sizeInBytes": 512 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "columnA", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "columnB", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + } + ] } }, "systemMetadata": { @@ -4035,12 +3750,21 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", + "urn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" + }, + { + "id": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", + "urn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6" + } + ] } }, "systemMetadata": { @@ -4051,12 +3775,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProfile", "aspect": { "json": { - "timestampMillis": 1703580920010, + "timestampMillis": 1705308660402, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -4075,7 +3799,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4091,7 +3815,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4107,12 +3831,20 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "datasetProfile", "aspect": { "json": { - "removed": false + "timestampMillis": 1705308660402, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 } }, "systemMetadata": { @@ -4123,7 +3855,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { diff --git a/metadata-ingestion/tests/unit/test_unity_catalog_config.py b/metadata-ingestion/tests/unit/test_unity_catalog_config.py index 3c0994cde7889..6b97d06b7ff93 100644 --- a/metadata-ingestion/tests/unit/test_unity_catalog_config.py +++ b/metadata-ingestion/tests/unit/test_unity_catalog_config.py @@ -15,6 +15,7 @@ def test_within_thirty_days(): "token": "token", "workspace_url": "https://workspace_url", "include_usage_statistics": True, + "include_hive_metastore": False, "start_time": FROZEN_TIME - timedelta(days=30), } ) @@ -38,6 +39,7 @@ def test_profiling_requires_warehouses_id(): { "token": "token", "workspace_url": "https://workspace_url", + "include_hive_metastore": False, "profiling": { "enabled": True, "method": "ge", @@ -51,6 +53,7 @@ def test_profiling_requires_warehouses_id(): { "token": "token", "workspace_url": "https://workspace_url", + "include_hive_metastore": False, "profiling": {"enabled": False, "method": "ge"}, } ) @@ -60,6 +63,7 @@ def test_profiling_requires_warehouses_id(): UnityCatalogSourceConfig.parse_obj( { "token": "token", + "include_hive_metastore": False, "workspace_url": "workspace_url", } ) From 2b744fac7f9856dc84806f7716397edb263542a2 Mon Sep 17 00:00:00 2001 From: Yang Jiandan Date: Fri, 19 Jan 2024 11:40:56 +0800 Subject: [PATCH 389/792] fix(docker):The datahub-frontend service failed to start when executing dev.sh #7616 (#7618) Co-authored-by: yangjd33 Co-authored-by: RyanHolstien Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- docker/datahub-frontend/Dockerfile | 9 ++++----- docker/docker-compose.dev.yml | 1 + 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 17d691177aa34..5563fd6350e20 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -15,10 +15,13 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then # Upgrade Alpine and base packages # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 +ENV JMX_VERSION=0.18.0 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl sqlite libc6-compat java-snappy \ && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ - && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && wget ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ + && wget ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar ENV LD_LIBRARY_PATH="/lib:/lib64" @@ -32,10 +35,6 @@ RUN unzip datahub-frontend.zip -d /datahub-frontend \ COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/ RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend -ENV JMX_VERSION=0.18.0 -RUN wget ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ - && wget ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar - FROM base as dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index 7067b68fba3f9..23ac821670e44 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -25,6 +25,7 @@ services: - DATAHUB_ANALYTICS_ENABLED=${DATAHUB_ANALYTICS_ENABLED:-true} volumes: - ../datahub-frontend/build/stage/main:/datahub-frontend + - ./monitoring/client-prometheus-config.yaml:/datahub-frontend/client-prometheus-config.yaml datahub-gms: image: linkedin/datahub-gms:debug ports: From 3682c5f1d03c673f72215bd335b17ecacbc33afb Mon Sep 17 00:00:00 2001 From: Indy Prentice Date: Thu, 18 Jan 2024 21:41:08 -0600 Subject: [PATCH 390/792] feat(openapi): Implement getIndexSizes function from rest.li in openapi (#8730) Co-authored-by: Indy Prentice Co-authored-by: David Leifker --- .../elastic/OperationsController.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java index f7c848f91a64c..777d65d517b81 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java @@ -9,9 +9,12 @@ import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.timeseries.TimeseriesIndexSizeResult; import io.datahubproject.openapi.util.ElasticsearchUtils; import io.swagger.v3.oas.annotations.tags.Tag; import java.util.List; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.json.JSONObject; import org.opensearch.client.tasks.GetTaskResponse; @@ -44,6 +47,10 @@ public class OperationsController { @Qualifier("elasticSearchSystemMetadataService") private SystemMetadataService _systemMetadataService; + @Autowired + @Qualifier("timeseriesAspectService") + private TimeseriesAspectService _timeseriesAspectService; + public OperationsController(AuthorizerChain authorizerChain) { _authorizerChain = authorizerChain; } @@ -91,4 +98,36 @@ public ResponseEntity getTaskStatus(String task) { j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); return ResponseEntity.ok(j.toString()); } + + @GetMapping(path = "/getIndexSizes", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity getIndexSizes() { + Authentication authentication = AuthenticationContext.getAuthentication(); + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE.getType())))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { + return ResponseEntity.status(HttpStatus.FORBIDDEN) + .body(String.format(actorUrnStr + " is not authorized to get timeseries index sizes")); + } + List indexSizeResults = _timeseriesAspectService.getIndexSizes(); + JSONObject j = new JSONObject(); + j.put( + "sizes", + indexSizeResults.stream() + .map( + timeseriesIndexSizeResult -> + new JSONObject() + .put("aspectName", timeseriesIndexSizeResult.getAspectName()) + .put("entityName", timeseriesIndexSizeResult.getEntityName()) + .put("indexName", timeseriesIndexSizeResult.getIndexName()) + .put("sizeMb", timeseriesIndexSizeResult.getSizeInMb())) + .collect(Collectors.toList())); + return ResponseEntity.ok(j.toString()); + } } From f993f50a0493111e4cfe85409098d844779292c5 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 18 Jan 2024 23:12:20 -0500 Subject: [PATCH 391/792] feat(ingest/sql-parsing): Support file backed dict in SqlParsingBuilder for lineage (#9654) --- .../datahub/emitter/sql_parsing_builder.py | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py b/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py index 046b615bd4e9f..a8fe4f0df83cf 100644 --- a/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py +++ b/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py @@ -20,6 +20,7 @@ UpstreamClass, UpstreamLineageClass, ) +from datahub.utilities.file_backed_collections import FileBackedDict from datahub.utilities.sqlglot_lineage import ColumnLineageInfo, SqlParsingResult logger = logging.getLogger(__name__) @@ -80,10 +81,10 @@ class SqlParsingBuilder: generate_operations: bool = True usage_config: Optional[BaseUsageConfig] = None - # TODO: Make inner dict a FileBackedDict and make LineageEdge frozen + # Maps downstream urn -> upstream urn -> LineageEdge # Builds up a single LineageEdge for each upstream -> downstream pair - _lineage_map: Dict[DatasetUrn, Dict[DatasetUrn, LineageEdge]] = field( - default_factory=lambda: defaultdict(dict), init=False + _lineage_map: FileBackedDict[Dict[DatasetUrn, LineageEdge]] = field( + default_factory=FileBackedDict, init=False ) # TODO: Replace with FileBackedDict approach like in BigQuery usage @@ -128,13 +129,14 @@ def process_sql_parsing_result( if self.generate_lineage: for downstream_urn in downstreams_to_ingest: - _merge_lineage_data( + # Set explicitly so that FileBackedDict registers any mutations + self._lineage_map[downstream_urn] = _merge_lineage_data( downstream_urn=downstream_urn, upstream_urns=result.in_tables, column_lineage=result.column_lineage if include_column_lineage else None, - upstream_edges=self._lineage_map[downstream_urn], + upstream_edges=self._lineage_map.get(downstream_urn, {}), query_timestamp=query_timestamp, is_view_ddl=is_view_ddl, user=user, @@ -170,11 +172,12 @@ def add_lineage( user: Optional[UserUrn] = None, ) -> None: """Manually add a single upstream -> downstream lineage edge, e.g. if sql parsing fails.""" - _merge_lineage_data( + # Set explicitly so that FileBackedDict registers any mutations + self._lineage_map[downstream_urn] = _merge_lineage_data( downstream_urn=downstream_urn, upstream_urns=upstream_urns, column_lineage=None, - upstream_edges=self._lineage_map[downstream_urn], + upstream_edges=self._lineage_map.get(downstream_urn, {}), query_timestamp=timestamp, is_view_ddl=is_view_ddl, user=user, @@ -225,7 +228,7 @@ def _merge_lineage_data( query_timestamp: Optional[datetime], is_view_ddl: bool, user: Optional[UserUrn], -) -> None: +) -> Dict[str, LineageEdge]: for upstream_urn in upstream_urns: edge = upstream_edges.setdefault( upstream_urn, @@ -255,6 +258,8 @@ def _merge_lineage_data( column_map = upstream_edges[upstream_column_info.table].column_map column_map[cl.downstream.column].add(upstream_column_info.column) + return upstream_edges + def _compute_upstream_fields( result: SqlParsingResult, From 45236a89aa1fcafcc5fef61994c5c4a498ebfe69 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Fri, 19 Jan 2024 15:38:50 +0530 Subject: [PATCH 392/792] feat(posts): add edit support for posts (#9666) --- .../src/app/settings/posts/CreatePostForm.tsx | 11 ++- .../app/settings/posts/CreatePostModal.tsx | 90 ++++++++++++++++--- .../src/app/settings/posts/PostItemMenu.tsx | 8 +- .../src/app/settings/posts/PostsList.tsx | 19 +++- .../app/settings/posts/PostsListColumns.tsx | 11 ++- .../src/graphql/mutations.graphql | 4 + 6 files changed, 125 insertions(+), 18 deletions(-) diff --git a/datahub-web-react/src/app/settings/posts/CreatePostForm.tsx b/datahub-web-react/src/app/settings/posts/CreatePostForm.tsx index a8d6cfa64c9c1..ee7f50a058957 100644 --- a/datahub-web-react/src/app/settings/posts/CreatePostForm.tsx +++ b/datahub-web-react/src/app/settings/posts/CreatePostForm.tsx @@ -1,4 +1,4 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { Form, Input, Typography, FormInstance, Radio } from 'antd'; import styled from 'styled-components'; import { @@ -21,11 +21,18 @@ const SubFormItem = styled(Form.Item)` type Props = { setCreateButtonEnabled: (isEnabled: boolean) => void; form: FormInstance; + contentType: PostContentType; }; -export default function CreatePostForm({ setCreateButtonEnabled, form }: Props) { +export default function CreatePostForm({ setCreateButtonEnabled, form, contentType }: Props) { const [postType, setPostType] = useState(PostContentType.Text); + useEffect(() => { + if (contentType) { + setPostType(contentType); + } + }, [contentType]); + return (
void; onCreate: ( contentType: string, @@ -22,12 +24,27 @@ type Props = { link: string | undefined, location: string | undefined, ) => void; + onEdit: () => void; }; -export default function CreatePostModal({ onClose, onCreate }: Props) { +export default function CreatePostModal({ onClose, onCreate, editData, onEdit }: Props) { const [createPostMutation] = useCreatePostMutation(); + const [updatePostMutation] = useUpdatePostMutation(); const [createButtonEnabled, setCreateButtonEnabled] = useState(false); const [form] = Form.useForm(); + + useEffect(() => { + if (editData) { + form.setFieldsValue({ + description: editData.description, + title: editData.title, + link: editData.link, + location: editData.imageUrl, + type: editData.contentType, + }); + } + }, [editData, form]); + const onCreatePost = () => { const contentTypeValue = form.getFieldValue(TYPE_FIELD_NAME) ?? PostContentType.Text; const mediaValue = @@ -75,33 +92,86 @@ export default function CreatePostModal({ onClose, onCreate }: Props) { onClose(); }; + const onUpdatePost = () => { + const contentTypeValue = form.getFieldValue(TYPE_FIELD_NAME) ?? PostContentType.Text; + const mediaValue = + form.getFieldValue(TYPE_FIELD_NAME) && form.getFieldValue(LOCATION_FIELD_NAME) + ? { + type: MediaType.Image, + location: form.getFieldValue(LOCATION_FIELD_NAME) ?? null, + } + : null; + updatePostMutation({ + variables: { + input: { + urn: editData?.urn, + postType: PostType.HomePageAnnouncement, + content: { + contentType: contentTypeValue, + title: form.getFieldValue(TITLE_FIELD_NAME), + description: form.getFieldValue(DESCRIPTION_FIELD_NAME) ?? null, + link: form.getFieldValue(LINK_FIELD_NAME) ?? null, + media: mediaValue, + }, + }, + }, + }) + .then(({ errors }) => { + if (!errors) { + message.success({ + content: `Updated Post!`, + duration: 3, + }); + onEdit(); + form.resetFields(); + } + }) + .catch((e) => { + message.destroy(); + message.error({ content: 'Failed to update Post! An unknown error occured.', duration: 3 }); + console.error('Failed to update Post:', e.message); + }); + onClose(); + }; + // Handle the Enter press useEnterKeyListener({ querySelectorToExecuteClick: '#createPostButton', }); + const onCloseModal = () => { + form.resetFields(); + onClose(); + }; + + const titleText = editData ? 'Edit Post' : 'Create new Post'; + return ( - } > - + ); } diff --git a/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx b/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx index e3fc424a47ef2..3708c04ab1ad3 100644 --- a/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx +++ b/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { DeleteOutlined } from '@ant-design/icons'; +import { DeleteOutlined, EditOutlined } from '@ant-design/icons'; import { Dropdown, Menu, message, Modal } from 'antd'; import { MenuIcon } from '../../entity/shared/EntityDropdown/EntityDropdown'; import { useDeletePostMutation } from '../../../graphql/post.generated'; @@ -8,9 +8,10 @@ type Props = { urn: string; title: string; onDelete?: () => void; + onEdit?: () => void; }; -export default function PostItemMenu({ title, urn, onDelete }: Props) { +export default function PostItemMenu({ title, urn, onDelete, onEdit }: Props) { const [deletePostMutation] = useDeletePostMutation(); const deletePost = () => { @@ -53,6 +54,9 @@ export default function PostItemMenu({ title, urn, onDelete }: Props) {  Delete + +  Edit + } > diff --git a/datahub-web-react/src/app/settings/posts/PostsList.tsx b/datahub-web-react/src/app/settings/posts/PostsList.tsx index 849a3765a94b0..b71f06c83c17f 100644 --- a/datahub-web-react/src/app/settings/posts/PostsList.tsx +++ b/datahub-web-react/src/app/settings/posts/PostsList.tsx @@ -51,6 +51,7 @@ export const PostList = () => { const [page, setPage] = useState(1); const [isCreatingPost, setIsCreatingPost] = useState(false); + const [editData, setEditData] = useState(undefined); const pageSize = DEFAULT_PAGE_SIZE; const start = (page - 1) * pageSize; @@ -82,6 +83,16 @@ export const PostList = () => { }, 2000); }; + const handleEdit = (post: PostEntry) => { + setEditData(post); + setIsCreatingPost(true); + }; + + const handleClose = () => { + setEditData(undefined); + setIsCreatingPost(false); + }; + const allColumns = [ { title: 'Title', @@ -113,7 +124,7 @@ export const PostList = () => { width: '5%', align: 'right' as AlignType, key: 'menu', - render: PostListMenuColumn(handleDelete), + render: PostListMenuColumn(handleDelete, handleEdit), }, ]; @@ -123,6 +134,8 @@ export const PostList = () => { title: post.content.title, description: post.content.description, contentType: post.content.contentType, + link: post.content.link, + imageUrl: post.content.media?.location, }; }); @@ -181,7 +194,9 @@ export const PostList = () => { )} {isCreatingPost && ( setIsCreatingPost(false)} + editData={editData as PostEntry} + onClose={handleClose} + onEdit={() => setTimeout(() => refetch(), 2000)} onCreate={(urn, title, description) => { addToListPostCache( client, diff --git a/datahub-web-react/src/app/settings/posts/PostsListColumns.tsx b/datahub-web-react/src/app/settings/posts/PostsListColumns.tsx index 38f910baf8f41..ee680cbec7396 100644 --- a/datahub-web-react/src/app/settings/posts/PostsListColumns.tsx +++ b/datahub-web-react/src/app/settings/posts/PostsListColumns.tsx @@ -9,15 +9,22 @@ export interface PostEntry { contentType: string; description: Maybe; urn: string; + link: string; + imageUrl: string; } const PostText = styled.div<{ minWidth?: number }>` ${(props) => props.minWidth !== undefined && `min-width: ${props.minWidth}px;`} `; -export function PostListMenuColumn(handleDelete: (urn: string) => void) { +export function PostListMenuColumn(handleDelete: (urn: string) => void, handleEdit: (urn: PostEntry) => void) { return (record: PostEntry) => ( - handleDelete(record.urn)} /> + handleDelete(record.urn)} + onEdit={() => handleEdit(record)} + /> ); } diff --git a/datahub-web-react/src/graphql/mutations.graphql b/datahub-web-react/src/graphql/mutations.graphql index 439d20810ef7c..077922cee45fb 100644 --- a/datahub-web-react/src/graphql/mutations.graphql +++ b/datahub-web-react/src/graphql/mutations.graphql @@ -120,6 +120,10 @@ mutation createPost($input: CreatePostInput!) { createPost(input: $input) } +mutation updatePost($input: UpdatePostInput!) { + updatePost(input: $input) +} + mutation updateLineage($input: UpdateLineageInput!) { updateLineage(input: $input) } From 4138b2f72442a72d84d9b12fac04abf8144ba1cf Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Fri, 19 Jan 2024 11:48:12 +0000 Subject: [PATCH 393/792] feat(roles): Add support for roles in groups in GMS (#9659) Co-authored-by: Aseem Bansal --- .../resolvers/group/CreateGroupResolver.java | 3 +- .../com/linkedin/identity/RoleMembership.pdl | 2 +- .../src/main/resources/entity-registry.yml | 1 + .../datahub/authorization/PolicyEngine.java | 75 +++++++++++++++++-- .../authorization/DataHubAuthorizerTest.java | 73 +++++++++++++++--- .../authorization/PolicyEngineTest.java | 12 ++- .../tests/privileges/test_privileges.py | 62 ++++++++++++++- smoke-test/tests/privileges/utils.py | 61 +++++++++++++++ 8 files changed, 266 insertions(+), 23 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java index e487ee00608d4..cde7d4958a25b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java @@ -43,9 +43,10 @@ public CompletableFuture get(final DataFetchingEnvironment environment) // Create the Group key. final CorpGroupKey key = new CorpGroupKey(); final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + final String description = input.getDescription() != null ? input.getDescription() : ""; key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". return _groupService.createNativeGroup( - key, input.getName(), input.getDescription(), authentication); + key, input.getName(), description, authentication); } catch (Exception e) { throw new RuntimeException("Failed to create group", e); } diff --git a/metadata-models/src/main/pegasus/com/linkedin/identity/RoleMembership.pdl b/metadata-models/src/main/pegasus/com/linkedin/identity/RoleMembership.pdl index ec1e472545c1c..2f0ebbb58d05f 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/identity/RoleMembership.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/identity/RoleMembership.pdl @@ -3,7 +3,7 @@ namespace com.linkedin.identity import com.linkedin.common.Urn /** - * Carries information about which roles a user is assigned to. + * Carries information about which roles a user or group is assigned to. */ @Aspect = { "name": "roleMembership" diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index f275d41e786c2..9d8c4bfdab0da 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -184,6 +184,7 @@ entities: - ownership - status - origin + - roleMembership - name: domain doc: A data domain within an organization. category: core diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java index 123e5f3c55932..f078d2d316cae 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java @@ -3,8 +3,10 @@ import static com.linkedin.metadata.Constants.*; import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.Owner; import com.linkedin.common.Ownership; +import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; @@ -12,6 +14,8 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.GroupMembership; +import com.linkedin.identity.NativeGroupMembership; import com.linkedin.identity.RoleMembership; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -26,6 +30,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -393,7 +398,6 @@ private Set resolveRoles( Set roles = new HashSet<>(); final EnvelopedAspectMap aspectMap; - try { Urn actorUrn = Urn.createFromString(actor); final EntityResponse corpUser = @@ -401,7 +405,10 @@ private Set resolveRoles( .batchGetV2( CORP_USER_ENTITY_NAME, Collections.singleton(actorUrn), - Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME), + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), _systemAuthentication) .get(actorUrn); if (corpUser == null || !corpUser.hasAspects()) { @@ -414,19 +421,71 @@ private Set resolveRoles( return roles; } - if (!aspectMap.containsKey(ROLE_MEMBERSHIP_ASPECT_NAME)) { - return roles; + if (aspectMap.containsKey(ROLE_MEMBERSHIP_ASPECT_NAME)) { + RoleMembership roleMembership = + new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); + if (roleMembership.hasRoles()) { + roles.addAll(roleMembership.getRoles()); + } } - RoleMembership roleMembership = - new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); - if (roleMembership.hasRoles()) { - roles.addAll(roleMembership.getRoles()); + List groups = new ArrayList<>(); + if (aspectMap.containsKey(GROUP_MEMBERSHIP_ASPECT_NAME)) { + GroupMembership groupMembership = + new GroupMembership(aspectMap.get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + groups.addAll(groupMembership.getGroups()); + } + if (aspectMap.containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { + NativeGroupMembership nativeGroupMembership = + new NativeGroupMembership( + aspectMap.get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + groups.addAll(nativeGroupMembership.getNativeGroups()); + } + if (!groups.isEmpty()) { + GroupMembership memberships = new GroupMembership(); + memberships.setGroups(new UrnArray(groups)); + roles.addAll(getRolesFromGroups(memberships)); + } + + if (!roles.isEmpty()) { context.setRoles(roles); } + return roles; } + private Set getRolesFromGroups(final GroupMembership groupMembership) { + + HashSet groups = new HashSet<>(groupMembership.getGroups()); + try { + Map responseMap = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + groups, + ImmutableSet.of(ROLE_MEMBERSHIP_ASPECT_NAME), + _systemAuthentication); + + return responseMap.keySet().stream() + .filter(Objects::nonNull) + .filter(key -> responseMap.get(key) != null) + .filter(key -> responseMap.get(key).hasAspects()) + .map(key -> responseMap.get(key).getAspects()) + .filter(aspectMap -> aspectMap.containsKey(ROLE_MEMBERSHIP_ASPECT_NAME)) + .map( + aspectMap -> + new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data())) + .filter(RoleMembership::hasRoles) + .map(RoleMembership::getRoles) + .flatMap(List::stream) + .collect(Collectors.toSet()); + + } catch (Exception e) { + log.error( + String.format("Failed to fetch %s for urns %s", ROLE_MEMBERSHIP_ASPECT_NAME, groups), e); + return new HashSet<>(); + } + } + private Set resolveGroups( ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { if (context.groups != null) { diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index ffee378a363c7..588cdf57269ef 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -28,6 +28,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; import com.linkedin.domain.DomainProperties; import com.linkedin.domain.Domains; @@ -36,6 +37,7 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.GroupMembership; import com.linkedin.identity.RoleMembership; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.ScrollResult; @@ -254,10 +256,14 @@ public void setupTest() throws Exception { when(_entityClient.batchGetV2( any(), eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + eq( + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), any())) .thenReturn( - createUserRoleMembershipBatchResponse( + createRoleMembershipBatchResponse( USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); final Authentication systemAuthentication = @@ -460,6 +466,49 @@ public void testAuthorizationOnDomainWithoutPrivilegeIsDenied() { assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } + @Test + public void testAuthorizationGrantedBasedOnGroupRole() throws Exception { + final EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:custom"); + + final Urn userUrnWithoutPermissions = UrnUtils.getUrn("urn:li:corpuser:userWithoutRole"); + final Urn groupWithAdminPermission = UrnUtils.getUrn("urn:li:corpGroup:groupWithRole"); + final UrnArray groups = new UrnArray(List.of(groupWithAdminPermission)); + final GroupMembership groupMembership = new GroupMembership(); + groupMembership.setGroups(groups); + + // User has no role associated but is part of 1 group + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(userUrnWithoutPermissions)), + eq( + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn( + createEntityBatchResponse( + userUrnWithoutPermissions, GROUP_MEMBERSHIP_ASPECT_NAME, groupMembership)); + + // Group has a role + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(groupWithAdminPermission)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn( + createRoleMembershipBatchResponse( + groupWithAdminPermission, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + + // This request should only be valid for actor with the admin role. + // Which the urn:li:corpuser:userWithoutRole does not have + AuthorizationRequest request = + new AuthorizationRequest( + userUrnWithoutPermissions.toString(), "EDIT_USER_PROFILE", Optional.of(resourceSpec)); + + assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); + } + private DataHubPolicyInfo createDataHubPolicyInfo( boolean active, List privileges, @Nullable final Urn domain) throws Exception { @@ -575,20 +624,24 @@ private Map createDomainPropertiesBatchResponse( return batchResponse; } - private Map createUserRoleMembershipBatchResponse( - final Urn userUrn, @Nullable final Urn roleUrn) { - final Map batchResponse = new HashMap<>(); - final EntityResponse response = new EntityResponse(); - EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + private Map createRoleMembershipBatchResponse( + final Urn actorUrn, @Nullable final Urn roleUrn) { final RoleMembership membership = new RoleMembership(); if (roleUrn != null) { membership.setRoles(new UrnArray(roleUrn)); } + return createEntityBatchResponse(actorUrn, ROLE_MEMBERSHIP_ASPECT_NAME, membership); + } + + private Map createEntityBatchResponse( + final Urn actorUrn, final String aspectName, final RecordTemplate aspect) { + final Map batchResponse = new HashMap<>(); + final EntityResponse response = new EntityResponse(); + EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); aspectMap.put( - ROLE_MEMBERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(membership.data()))); + aspectName, new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(aspect.data()))); response.setAspects(aspectMap); - batchResponse.put(userUrn, response); + batchResponse.put(actorUrn, response); return batchResponse; } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java index 08ec91d5287dc..c7f06eeba6e85 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java @@ -83,7 +83,11 @@ public void setupTest() throws Exception { when(_entityClient.batchGetV2( eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + eq( + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), any())) .thenReturn(authorizedEntityResponseMap); @@ -94,7 +98,11 @@ public void setupTest() throws Exception { when(_entityClient.batchGetV2( eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(unauthorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + eq( + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), any())) .thenReturn(unauthorizedEntityResponseMap); diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index 75e2265f1f555..e1cb848cccf8e 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -450,4 +450,64 @@ def test_privilege_to_create_and_manage_policies(): # Ensure that user can't create a policy after privilege is removed by admin - _ensure_cant_perform_action(user_session, create_policy,"createPolicy") \ No newline at end of file + _ensure_cant_perform_action(user_session, create_policy,"createPolicy") + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_privilege_from_group_role_can_create_and_manage_secret(): + + (admin_user, admin_pass) = get_admin_credentials() + admin_session = login_as(admin_user, admin_pass) + user_session = login_as("user", "user") + secret_urn = "urn:li:dataHubSecret:TestSecretName" + + # Verify new user can't create secrets + create_secret = { + "query": """mutation createSecret($input: CreateSecretInput!) {\n + createSecret(input: $input)\n}""", + "variables": { + "input":{ + "name":"TestSecretName", + "value":"Test Secret Value", + "description":"Test Secret Description" + } + }, + } + _ensure_cant_perform_action(user_session, create_secret,"createSecret") + + # Create group and grant it the admin role. + group_urn = create_group(admin_session, "Test Group") + + # Assign admin role to group + assign_role(admin_session,"urn:li:dataHubRole:Admin", [group_urn]) + + # Assign user to group + assign_user_to_group(admin_session, group_urn, ["urn:li:corpuser:user"]) + + # Verify new user with admin group can create and manage secrets + # Create a secret + _ensure_can_create_secret(user_session, create_secret, secret_urn) + + # Remove a secret + remove_secret = { + "query": """mutation deleteSecret($urn: String!) {\n + deleteSecret(urn: $urn)\n}""", + "variables": { + "urn": secret_urn + }, + } + + remove_secret_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_secret) + remove_secret_response.raise_for_status() + secret_data = remove_secret_response.json() + + assert secret_data + assert secret_data["data"] + assert secret_data["data"]["deleteSecret"] + assert secret_data["data"]["deleteSecret"] == secret_urn + + # Delete group which removes the user's admin capabilities + remove_group(admin_session, group_urn) + + # Ensure user can't create secret after policy is removed + _ensure_cant_perform_action(user_session, create_secret,"createSecret") diff --git a/smoke-test/tests/privileges/utils.py b/smoke-test/tests/privileges/utils.py index ea1f565f6f5ac..eeb385a243a90 100644 --- a/smoke-test/tests/privileges/utils.py +++ b/smoke-test/tests/privileges/utils.py @@ -170,6 +170,67 @@ def remove_user(session, urn): response.raise_for_status() return response.json() +def create_group(session, name): + json = { + "query": """mutation createGroup($input: CreateGroupInput!) {\n + createGroup(input: $input) + }""", + "variables": {"input": {"name": name}}, + } + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + assert res_data + assert res_data["data"] + assert res_data["data"]["createGroup"] + return res_data["data"]["createGroup"] + +def remove_group(session, urn): + json = { + "query": """mutation removeGroup($urn: String!) {\n + removeGroup(urn: $urn) + }""", + "variables": {"urn": urn}, + } + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + assert res_data + assert res_data["data"] + assert res_data["data"]["removeGroup"] + return res_data["data"]["removeGroup"] + +def assign_user_to_group(session, group_urn, user_urns): + json = { + "query": """mutation addGroupMembers($groupUrn: String!, $userUrns: [String!]!) {\n + addGroupMembers(input: { groupUrn: $groupUrn, userUrns: $userUrns }) + }""", + "variables": {"groupUrn": group_urn, "userUrns": user_urns}, + } + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + assert res_data + assert res_data["data"] + assert res_data["data"]["addGroupMembers"] + return res_data["data"]["addGroupMembers"] + +def assign_role(session, role_urn, actor_urns): + json = { + "query": """mutation batchAssignRole($input: BatchAssignRoleInput!) {\n + batchAssignRole(input: $input) + }""", + "variables": {"input": {"roleUrn": role_urn, "actors": actor_urns}}, + } + + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + assert res_data + assert res_data["data"] + assert res_data["data"]["batchAssignRole"] + return res_data["data"]["batchAssignRole"] + def create_user_policy(user_urn, privileges, session): policy = { "query": """mutation createPolicy($input: PolicyUpdateInput!) {\n From 0b66e5e452140b158fd350b4b769d4b7792db073 Mon Sep 17 00:00:00 2001 From: Ingthor Birkir Arnason Date: Fri, 19 Jan 2024 15:03:56 +0000 Subject: [PATCH 394/792] fix(frontend): Add fallback for image load error on Avatar (#9501) Co-authored-by: Ingthor Birkir Arnason Co-authored-by: Harshal Sheth --- .../src/app/shared/avatar/CustomAvatar.tsx | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/shared/avatar/CustomAvatar.tsx b/datahub-web-react/src/app/shared/avatar/CustomAvatar.tsx index 320b244125315..2ec5a1e77fe22 100644 --- a/datahub-web-react/src/app/shared/avatar/CustomAvatar.tsx +++ b/datahub-web-react/src/app/shared/avatar/CustomAvatar.tsx @@ -1,6 +1,6 @@ import { Avatar, Tooltip } from 'antd'; import { TooltipPlacement } from 'antd/lib/tooltip'; -import React from 'react'; +import React, { useState } from 'react'; import { Link } from 'react-router-dom'; import styled from 'styled-components'; @@ -50,6 +50,8 @@ export default function CustomAvatar({ isRole = false, hideTooltip = false, }: Props) { + const [imageError, setImageError] = useState(false); + const avatarWithInitial = name ? ( {name.charAt(0).toUpperCase()} @@ -62,8 +64,15 @@ export default function CustomAvatar({ ) : ( avatarWithInitial ); + + const handleImageError = () => { + setImageError(true); + // To prevent fallback error handling from Ant Design + return false; + }; + const avatar = - photoUrl && photoUrl !== '' ? : avatarWithDefault; + photoUrl && photoUrl !== '' && !imageError ? : avatarWithDefault; if (!name) { return url ? {avatar} : avatar; } From a60df52cd2d1abd0ab5e9b6d05f6094a7d3e58f0 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Fri, 19 Jan 2024 22:06:50 +0530 Subject: [PATCH 395/792] fix(user-removal): resolve user removal list update issue (#9671) --- datahub-web-react/src/app/identity/user/UserList.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index 22b44e5f2d625..178f54325ecde 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -82,7 +82,7 @@ export const UserList = () => { }); const totalUsers = usersData?.listUsers?.total || 0; - useEffect(()=> { + useEffect(() => { setUsersList(usersData?.listUsers?.users || []); }, [usersData]); const onChangePage = (newPage: number) => { @@ -92,6 +92,7 @@ export const UserList = () => { const handleDelete = (urn: string) => { removeUserFromListUsersCache(urn, client, page, pageSize); + usersRefetch(); }; const { From 0c940c7b2b9ba2e4e79ead7973045775882460a3 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Fri, 19 Jan 2024 13:38:48 -0500 Subject: [PATCH 396/792] feat(ui) Add standardized GQL error handling function to FE (#9470) --- datahub-web-react/src/App.tsx | 3 +- .../Dataset/Queries/QueryCardDetailsMenu.tsx | 10 +++-- datahub-web-react/src/app/shared/constants.ts | 8 ++++ .../src/app/shared/handleGraphQLError.ts | 40 +++++++++++++++++++ 4 files changed, 57 insertions(+), 4 deletions(-) create mode 100644 datahub-web-react/src/app/shared/handleGraphQLError.ts diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index 895c2a4781e42..79c9ee91ceaa1 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -14,6 +14,7 @@ import { PageRoutes } from './conf/Global'; import { isLoggedInVar } from './app/auth/checkAuthStatus'; import { GlobalCfg } from './conf'; import possibleTypesResult from './possibleTypes.generated'; +import { ErrorCodes } from './app/shared/constants'; /* Construct Apollo Client @@ -24,7 +25,7 @@ const errorLink = onError((error) => { const { networkError, graphQLErrors } = error; if (networkError) { const serverError = networkError as ServerError; - if (serverError.statusCode === 401) { + if (serverError.statusCode === ErrorCodes.Unauthorized) { isLoggedInVar(false); Cookies.remove(GlobalCfg.CLIENT_AUTH_COOKIE); const currentPath = window.location.pathname + window.location.search; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/QueryCardDetailsMenu.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/QueryCardDetailsMenu.tsx index d17f0d12b3cf3..a663dfffaaabf 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/QueryCardDetailsMenu.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/QueryCardDetailsMenu.tsx @@ -3,6 +3,7 @@ import styled from 'styled-components'; import { DeleteOutlined, MoreOutlined } from '@ant-design/icons'; import { Dropdown, Menu, message, Modal } from 'antd'; import { useDeleteQueryMutation } from '../../../../../../graphql/query.generated'; +import handleGraphQLError from '../../../../../shared/handleGraphQLError'; const StyledMoreOutlined = styled(MoreOutlined)` font-size: 14px; @@ -28,9 +29,12 @@ export default function QueryCardDetailsMenu({ urn, onDeleted, index }: Props) { onDeleted?.(urn); } }) - .catch(() => { - message.destroy(); - message.error({ content: 'Failed to delete Query! An unexpected error occurred' }); + .catch((error) => { + handleGraphQLError({ + error, + defaultMessage: 'Failed to delete Query! An unexpected error occurred', + permissionMessage: 'Unauthorized to delete Query. Please contact your DataHub administrator.', + }); }); }; diff --git a/datahub-web-react/src/app/shared/constants.ts b/datahub-web-react/src/app/shared/constants.ts index dc04372b43607..1cd9077ab8cdf 100644 --- a/datahub-web-react/src/app/shared/constants.ts +++ b/datahub-web-react/src/app/shared/constants.ts @@ -1 +1,9 @@ export const ENTER_KEY_CODE = 13; + +export enum ErrorCodes { + BadRequest = 400, + Unauthorized = 401, + Forbidden = 403, + NotFound = 404, + ServerError = 500, +} diff --git a/datahub-web-react/src/app/shared/handleGraphQLError.ts b/datahub-web-react/src/app/shared/handleGraphQLError.ts new file mode 100644 index 0000000000000..f129fef34c7ca --- /dev/null +++ b/datahub-web-react/src/app/shared/handleGraphQLError.ts @@ -0,0 +1,40 @@ +import { ErrorResponse } from '@apollo/client/link/error'; +import { message } from 'antd'; +import { ErrorCodes } from './constants'; + +interface Props { + error: ErrorResponse; + defaultMessage: string; + permissionMessage?: string; + badRequestMessage?: string; + serverErrorMessage?: string; +} + +export default function handleGraphQLError({ + error, + defaultMessage, + permissionMessage = 'Unauthorized. Please contact your DataHub administrator.', + badRequestMessage, + serverErrorMessage, +}: Props) { + // destroy the default error message from errorLink in App.tsx + message.destroy(); + const { graphQLErrors } = error; + if (graphQLErrors && graphQLErrors.length) { + const { extensions } = graphQLErrors[0]; + const errorCode = extensions && (extensions.code as number); + if (errorCode === ErrorCodes.Forbidden) { + message.error(permissionMessage); + return; + } + if (errorCode === ErrorCodes.BadRequest && badRequestMessage) { + message.error(badRequestMessage); + return; + } + if (errorCode === ErrorCodes.ServerError && serverErrorMessage) { + message.error(serverErrorMessage); + return; + } + } + message.error(defaultMessage); +} From 131c8f878c9f0fe872c9cb4faa4de22c57922c31 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Fri, 19 Jan 2024 13:44:48 -0600 Subject: [PATCH 397/792] feat(kafka): add health indicator for kafka (#9662) --- ...docker-compose.consumers-without-neo4j.yml | 2 + docker/docker-compose.consumers.yml | 2 + ...ose.consumers-without-neo4j.quickstart.yml | 2 + .../docker-compose.consumers.quickstart.yml | 2 + metadata-jobs/common/build.gradle | 16 ++++ .../health/kafka/KafkaHealthIndicator.java | 75 +++++++++++++++++++ metadata-jobs/mae-consumer-job/build.gradle | 1 + .../kafka/MaeConsumerApplication.java | 3 +- .../kafka/MaeConsumerApplicationTest.java | 4 + metadata-jobs/mce-consumer-job/build.gradle | 1 + .../kafka/MceConsumerApplication.java | 3 +- .../kafka/MceConsumerApplicationTest.java | 10 ++- .../config/kafka/ConsumerConfiguration.java | 1 + .../src/main/resources/application.yml | 1 + .../boot/OnBootApplicationListener.java | 10 +-- .../kafka/DataHubUpgradeKafkaListener.java | 2 +- settings.gradle | 1 + 17 files changed, 126 insertions(+), 10 deletions(-) create mode 100644 metadata-jobs/common/build.gradle create mode 100644 metadata-jobs/common/src/main/java/io/datahubproject/metadata/jobs/common/health/kafka/KafkaHealthIndicator.java diff --git a/docker/docker-compose.consumers-without-neo4j.yml b/docker/docker-compose.consumers-without-neo4j.yml index f1be585232a1a..792c212e9be9a 100644 --- a/docker/docker-compose.consumers-without-neo4j.yml +++ b/docker/docker-compose.consumers-without-neo4j.yml @@ -17,6 +17,7 @@ services: env_file: datahub-mae-consumer/env/docker-without-neo4j.env environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} datahub-mce-consumer: container_name: datahub-mce-consumer hostname: datahub-mce-consumer @@ -31,3 +32,4 @@ services: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} diff --git a/docker/docker-compose.consumers.yml b/docker/docker-compose.consumers.yml index 8d331cea2f0b9..0a7cbe4dbe3d7 100644 --- a/docker/docker-compose.consumers.yml +++ b/docker/docker-compose.consumers.yml @@ -17,6 +17,7 @@ services: env_file: datahub-mae-consumer/env/docker.env environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} depends_on: neo4j: condition: service_healthy @@ -39,6 +40,7 @@ services: - NEO4J_PASSWORD=datahub - GRAPH_SERVICE_IMPL=neo4j - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} depends_on: neo4j: condition: service_healthy diff --git a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml index 4ed57dca1f080..c66931cabd7a4 100644 --- a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml @@ -7,6 +7,7 @@ services: container_name: datahub-mae-consumer environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -45,6 +46,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false diff --git a/docker/quickstart/docker-compose.consumers.quickstart.yml b/docker/quickstart/docker-compose.consumers.quickstart.yml index ba8432d8a89af..b8106ef096952 100644 --- a/docker/quickstart/docker-compose.consumers.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers.quickstart.yml @@ -10,6 +10,7 @@ services: condition: service_healthy environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -55,6 +56,7 @@ services: - GRAPH_SERVICE_IMPL=neo4j - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false diff --git a/metadata-jobs/common/build.gradle b/metadata-jobs/common/build.gradle new file mode 100644 index 0000000000000..bdc3b7a44a98a --- /dev/null +++ b/metadata-jobs/common/build.gradle @@ -0,0 +1,16 @@ +plugins { + id 'java' +} + +dependencies { + implementation(project(':metadata-service:factories')) { + exclude group: 'org.neo4j.test' + } + implementation externalDependency.springActuator + implementation externalDependency.springKafka + implementation externalDependency.slf4jApi + + compileOnly externalDependency.lombok + + annotationProcessor externalDependency.lombok +} \ No newline at end of file diff --git a/metadata-jobs/common/src/main/java/io/datahubproject/metadata/jobs/common/health/kafka/KafkaHealthIndicator.java b/metadata-jobs/common/src/main/java/io/datahubproject/metadata/jobs/common/health/kafka/KafkaHealthIndicator.java new file mode 100644 index 0000000000000..305d33d2a09d1 --- /dev/null +++ b/metadata-jobs/common/src/main/java/io/datahubproject/metadata/jobs/common/health/kafka/KafkaHealthIndicator.java @@ -0,0 +1,75 @@ +package io.datahubproject.metadata.jobs.common.health.kafka; + +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; +import java.util.Collection; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Value; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.common.TopicPartition; +import org.springframework.boot.actuate.health.AbstractHealthIndicator; +import org.springframework.boot.actuate.health.Health; +import org.springframework.boot.actuate.health.Status; +import org.springframework.kafka.config.KafkaListenerEndpointRegistry; +import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; +import org.springframework.kafka.listener.MessageListenerContainer; +import org.springframework.stereotype.Component; + +@Component +@Slf4j +public class KafkaHealthIndicator extends AbstractHealthIndicator { + + private final KafkaListenerEndpointRegistry listenerRegistry; + private final ConfigurationProvider configurationProvider; + + public KafkaHealthIndicator( + KafkaListenerEndpointRegistry listenerRegistry, ConfigurationProvider configurationProvider) { + this.listenerRegistry = listenerRegistry; + this.configurationProvider = configurationProvider; + } + + @Override + protected void doHealthCheck(Health.Builder builder) throws Exception { + Status kafkaStatus = Status.UP; + boolean isContainerDown = + listenerRegistry.getAllListenerContainers().stream() + .filter( + container -> + !DataHubUpgradeKafkaListener.CONSUMER_GROUP.equals(container.getGroupId())) + .anyMatch(container -> !container.isRunning()); + Map details = + listenerRegistry.getAllListenerContainers().stream() + .collect( + Collectors.toMap( + MessageListenerContainer::getListenerId, this::buildConsumerDetails)); + if (isContainerDown && configurationProvider.getKafka().getConsumer().isHealthCheckEnabled()) { + kafkaStatus = Status.DOWN; + } + builder.status(kafkaStatus).withDetails(details).build(); + } + + private ConsumerDetails buildConsumerDetails(MessageListenerContainer container) { + Collection partitionDetails = container.getAssignedPartitions(); + int concurrency = 1; + if (container + instanceof ConcurrentMessageListenerContainer concurrentMessageListenerContainer) { + concurrency = concurrentMessageListenerContainer.getConcurrency(); + } + return new ConsumerDetails( + partitionDetails, + container.getListenerId(), + container.getGroupId(), + concurrency, + container.isRunning()); + } + + @Value + private static class ConsumerDetails { + Collection partitionDetails; + String listenerId; + String groupId; + int concurrency; + boolean isRunning; + } +} diff --git a/metadata-jobs/mae-consumer-job/build.gradle b/metadata-jobs/mae-consumer-job/build.gradle index a8920d50b068e..f3d1ca9885044 100644 --- a/metadata-jobs/mae-consumer-job/build.gradle +++ b/metadata-jobs/mae-consumer-job/build.gradle @@ -15,6 +15,7 @@ dependencies { implementation project(':metadata-jobs:mae-consumer') // TODO: Extract PE consumer into separate pod. implementation project(':metadata-jobs:pe-consumer') + implementation project(':metadata-jobs:common') implementation(externalDependency.springBootStarterWeb) { exclude module: "spring-boot-starter-tomcat" diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index eef513f8b91e0..e695788e09726 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -22,7 +22,8 @@ "com.linkedin.metadata.dao.producer", "com.linkedin.gms.factory.config", "com.linkedin.gms.factory.entity.update.indices", - "com.linkedin.gms.factory.timeline.eventgenerator" + "com.linkedin.gms.factory.timeline.eventgenerator", + "io.datahubproject.metadata.jobs.common.health.kafka" }, excludeFilters = { @ComponentScan.Filter( diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java index 69288cec8220a..22fbe7fc6b6ca 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java @@ -3,6 +3,7 @@ import static org.testng.AssertJUnit.*; import com.linkedin.metadata.entity.EntityService; +import io.datahubproject.metadata.jobs.common.health.kafka.KafkaHealthIndicator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; @@ -16,8 +17,11 @@ public class MaeConsumerApplicationTest extends AbstractTestNGSpringContextTests @Autowired private EntityService _mockEntityService; + @Autowired private KafkaHealthIndicator kafkaHealthIndicator; + @Test public void testMaeConsumerAutoWiring() { assertNotNull(_mockEntityService); + assertNotNull(kafkaHealthIndicator); } } diff --git a/metadata-jobs/mce-consumer-job/build.gradle b/metadata-jobs/mce-consumer-job/build.gradle index 2f60d1ae985fb..3370838974bf7 100644 --- a/metadata-jobs/mce-consumer-job/build.gradle +++ b/metadata-jobs/mce-consumer-job/build.gradle @@ -15,6 +15,7 @@ dependencies { implementation project(':metadata-service:auth-filter') implementation project(':metadata-jobs:mce-consumer') implementation project(':entity-registry') + implementation project(':metadata-jobs:common') implementation(externalDependency.springBootStarterWeb) { exclude module: "spring-boot-starter-tomcat" diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java index 05bcd556debe9..181a723e1cd25 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java @@ -29,7 +29,8 @@ "com.linkedin.restli.server", "com.linkedin.metadata.restli", "com.linkedin.metadata.kafka", - "com.linkedin.metadata.dao.producer" + "com.linkedin.metadata.dao.producer", + "io.datahubproject.metadata.jobs.common.health.kafka" }, excludeFilters = { @ComponentScan.Filter( diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java index 714c7b899ff49..6d19db97fb39f 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java @@ -2,10 +2,11 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; -import static org.testng.AssertJUnit.assertTrue; +import static org.testng.AssertJUnit.*; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; +import io.datahubproject.metadata.jobs.common.health.kafka.KafkaHealthIndicator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.web.client.TestRestTemplate; @@ -23,6 +24,8 @@ public class MceConsumerApplicationTest extends AbstractTestNGSpringContextTests @Autowired private EntityService _mockEntityService; + @Autowired private KafkaHealthIndicator kafkaHealthIndicator; + @Test public void testRestliServletConfig() { RestoreIndicesResult mockResult = new RestoreIndicesResult(); @@ -34,4 +37,9 @@ public void testRestliServletConfig() { "/gms/aspects?action=restoreIndices", "{\"urn\":\"\"}", String.class); assertTrue(response.contains(mockResult.toString())); } + + @Test + public void testHealthIndicator() { + assertNotNull(kafkaHealthIndicator); + } } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java index 61b9d5c816790..60f3e1b4fef76 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java @@ -7,4 +7,5 @@ public class ConsumerConfiguration { private int maxPartitionFetchBytes; private boolean stopOnDeserializationError; + private boolean healthCheckEnabled; } diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 36498f7c45fea..cfc84491ab0ae 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -237,6 +237,7 @@ kafka: consumer: maxPartitionFetchBytes: ${KAFKA_CONSUMER_MAX_PARTITION_FETCH_BYTES:5242880} # the max bytes consumed per partition stopOnDeserializationError: ${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:true} # Stops kafka listener container on deserialization error, allows user to fix problems before moving past problematic offset. If false will log and move forward past the offset + healthCheckEnabled: ${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:true} # Sets the health indicator to down when a message listener container has stopped due to a deserialization failure, will force consumer apps to restart through k8s and docker-compose health mechanisms schemaRegistry: type: ${SCHEMA_REGISTRY_TYPE:KAFKA} # INTERNAL or KAFKA or AWS_GLUE url: ${KAFKA_SCHEMAREGISTRY_URL:http://localhost:8081} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java index 801a902b7f835..0750dfca865c2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java @@ -16,14 +16,12 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.EventListener; import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; /** Responsible for coordinating starting steps that happen before the application starts up. */ -@Configuration @Slf4j @Component public class OnBootApplicationListener { @@ -73,22 +71,22 @@ public Runnable isSchemaRegistryAPIServletReady() { return () -> { final HttpGet request = new HttpGet(provider.getKafka().getSchemaRegistry().getUrl()); int timeouts = _servletsWaitTimeout; - boolean openAPIServeletReady = false; - while (!openAPIServeletReady && timeouts > 0) { + boolean openAPIServletReady = false; + while (!openAPIServletReady && timeouts > 0) { try { log.info("Sleeping for 1 second"); Thread.sleep(1000); StatusLine statusLine = httpClient.execute(request).getStatusLine(); if (ACCEPTED_HTTP_CODES.contains(statusLine.getStatusCode())) { log.info("Connected! Authentication not tested."); - openAPIServeletReady = true; + openAPIServletReady = true; } } catch (IOException | InterruptedException e) { log.info("Failed to connect to open servlet: {}", e.getMessage()); } timeouts--; } - if (!openAPIServeletReady) { + if (!openAPIServletReady) { log.error( "Failed to bootstrap DataHub, OpenAPI servlet was not ready after {} seconds", timeouts); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java index b2b6fb5e5cb7e..e69ab342740e4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java @@ -36,7 +36,7 @@ public class DataHubUpgradeKafkaListener implements ConsumerSeekAware, Bootstrap private final KafkaListenerEndpointRegistry registry; - private static final String CONSUMER_GROUP = + public static final String CONSUMER_GROUP = "${DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID:generic-duhe-consumer-job-client}"; private static final String SUFFIX = "temp"; public static final String TOPIC_NAME = diff --git a/settings.gradle b/settings.gradle index d2844fe00cdbc..4614b6ed4ccaf 100644 --- a/settings.gradle +++ b/settings.gradle @@ -63,3 +63,4 @@ include 'metadata-integration:java:examples' include 'mock-entity-registry' include 'metadata-service:services' include 'metadata-service:configuration' +include ':metadata-jobs:common' From bd9b0c49fbf26a344a0f2f626b8b33a04cefacd5 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 19 Jan 2024 16:32:22 -0600 Subject: [PATCH 398/792] perf(entity-service): batch exists calls (#9663) --- .../assertion/DeleteAssertionResolver.java | 6 +- .../UpdateDeprecationResolver.java | 7 +- .../resolvers/domain/SetDomainResolver.java | 8 +- .../resolvers/domain/UnsetDomainResolver.java | 6 +- .../resolvers/embed/UpdateEmbedResolver.java | 2 +- .../entity/EntityExistsResolver.java | 6 +- .../glossary/AddRelatedTermsResolver.java | 6 +- .../DeleteGlossaryEntityResolver.java | 6 +- .../glossary/RemoveRelatedTermsResolver.java | 4 +- .../lineage/UpdateLineageResolver.java | 10 +- .../BatchUpdateSoftDeletedResolver.java | 4 +- .../resolvers/mutate/DescriptionUtils.java | 52 ++++----- .../resolvers/mutate/MoveDomainResolver.java | 4 +- .../resolvers/mutate/UpdateNameResolver.java | 4 +- .../mutate/UpdateParentNodeResolver.java | 6 +- .../resolvers/mutate/util/DomainUtils.java | 11 +- .../resolvers/mutate/util/LabelUtils.java | 61 ++++++---- .../resolvers/mutate/util/LinkUtils.java | 12 +- .../resolvers/mutate/util/OwnerUtils.java | 23 ++-- .../resolvers/tag/SetTagColorResolver.java | 4 +- .../HyperParameterValueTypeMapper.java | 2 +- .../resolvers/UpdateLineageResolverTest.java | 42 +++---- .../DeleteAssertionResolverTest.java | 59 +++++----- .../BatchUpdateSoftDeletedResolverTest.java | 19 ++- .../BatchUpdateDeprecationResolverTest.java | 19 ++- .../UpdateDeprecationResolverTest.java | 46 ++++---- .../domain/BatchSetDomainResolverTest.java | 56 ++++++--- .../domain/MoveDomainResolverTest.java | 13 ++- .../domain/SetDomainResolverTest.java | 33 ++++-- .../domain/UnsetDomainResolverTest.java | 14 ++- .../embed/UpdateEmbedResolverTest.java | 14 ++- .../entity/EntityExistsResolverTest.java | 3 +- .../glossary/AddRelatedTermsResolverTest.java | 86 ++++++++------ .../DeleteGlossaryEntityResolverTest.java | 7 +- .../RemoveRelatedTermsResolverTest.java | 19 +-- .../glossary/UpdateNameResolverTest.java | 11 +- .../UpdateParentNodeResolverTest.java | 27 +++-- .../load/BatchGetEntitiesResolverTest.java | 7 +- .../mutate/UpdateUserSettingResolverTest.java | 4 +- .../owner/AddOwnersResolverTest.java | 110 +++++++++++------- .../owner/BatchAddOwnersResolverTest.java | 78 ++++++++----- .../owner/BatchRemoveOwnersResolverTest.java | 80 +++++++------ .../resolvers/tag/AddTagsResolverTest.java | 39 ++++--- .../tag/BatchAddTagsResolverTest.java | 48 +++++--- .../tag/BatchRemoveTagsResolverTest.java | 34 ++++-- .../tag/SetTagColorResolverTest.java | 12 +- .../resolvers/term/AddTermsResolverTest.java | 79 +++++++------ .../term/BatchAddTermsResolverTest.java | 96 ++++++++------- .../term/BatchRemoveTermsResolverTest.java | 80 +++++++------ .../metadata/client/JavaEntityClient.java | 2 +- .../metadata/entity/EntityServiceImpl.java | 70 +++++++---- .../linkedin/metadata/entity/EntityUtils.java | 23 ---- .../candidatesource/MostPopularSource.java | 40 +++---- .../candidatesource/RecentlyEditedSource.java | 40 +++---- .../candidatesource/RecentlyViewedSource.java | 40 +++---- .../sibling/SiblingGraphServiceTest.java | 6 +- .../SampleDataFixtureConfiguration.java | 17 ++- .../authentication/group/GroupService.java | 8 +- .../token/StatefulTokenService.java | 2 +- .../user/NativeUserService.java | 4 +- .../datahub/telemetry/TrackingService.java | 2 +- .../DataHubTokenAuthenticatorTest.java | 3 +- .../group/GroupServiceTest.java | 8 +- .../token/StatefulTokenServiceTest.java | 3 +- .../user/NativeUserServiceTest.java | 4 +- .../telemetry/TrackingServiceTest.java | 6 +- .../steps/IngestRetentionPoliciesStep.java | 6 +- .../boot/steps/RemoveClientIdAspectStep.java | 4 +- .../delegates/EntityApiDelegateImpl.java | 2 +- .../src/test/java/mock/MockEntityService.java | 5 +- .../resources/entity/EntityResource.java | 2 +- .../metadata/entity/EntityService.java | 23 +++- .../EntityRecommendationSource.java | 37 ++++++ .../metadata/shared/ValidationUtils.java | 79 +++++++++---- 74 files changed, 1064 insertions(+), 751 deletions(-) create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index 89912b2814e40..cbf685e9f45bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -24,10 +24,10 @@ public class DeleteAssertionResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; public DeleteAssertionResolver( - final EntityClient entityClient, final EntityService entityService) { + final EntityClient entityClient, final EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @@ -41,7 +41,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) () -> { // 1. check the entity exists. If not, return false. - if (!_entityService.exists(assertionUrn)) { + if (!_entityService.exists(assertionUrn, true)) { return true; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index 62c88c506ba61..be887d845f385 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -37,7 +37,7 @@ public class UpdateDeprecationResolver implements DataFetcher _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -101,9 +101,10 @@ private boolean isAuthorizedToUpdateDeprecationForEntity( orPrivilegeGroups); } - public static Boolean validateUpdateDeprecationInput(Urn entityUrn, EntityService entityService) { + public static Boolean validateUpdateDeprecationInput( + Urn entityUrn, EntityService entityService) { - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java index 1c52f707c61a4..4d2e93be42fcb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java @@ -28,7 +28,7 @@ public class SetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -74,16 +74,16 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw } public static Boolean validateSetDomainInput( - Urn entityUrn, Urn domainUrn, EntityService entityService) { + Urn entityUrn, Urn domainUrn, EntityService entityService) { - if (!entityService.exists(domainUrn)) { + if (!entityService.exists(domainUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to add Entity %s to Domain %s. Domain does not exist.", entityUrn, domainUrn)); } - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to add Entity %s to Domain %s. Entity does not exist.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java index b2a82ac7608d8..c415d933e4a3a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java @@ -29,7 +29,7 @@ public class UnsetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -71,9 +71,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw }); } - public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { + public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java index e1b264606074c..caaf76b0d1dc8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java @@ -82,7 +82,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw */ private static void validateUpdateEmbedInput( @Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { - if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) { + if (!entityService.exists(UrnUtils.getUrn(input.getUrn()), true)) { throw new IllegalArgumentException( String.format( "Failed to update embed for entity with urn %s. Entity does not exist!", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java index d2bd2f3fb8a17..257f0a4efd260 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java @@ -12,9 +12,9 @@ /** Resolver responsible for returning whether an entity exists. */ public class EntityExistsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; - public EntityExistsResolver(final EntityService entityService) { + public EntityExistsResolver(final EntityService entityService) { _entityService = entityService; } @@ -32,7 +32,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) return CompletableFuture.supplyAsync( () -> { try { - return _entityService.exists(entityUrn); + return _entityService.exists(entityUrn, true); } catch (Exception e) { throw new RuntimeException( String.format("Failed to check whether entity %s exists", entityUrn.toString())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 535dbbf70a4cb..31aa8b2ab9ddf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -29,7 +29,7 @@ @RequiredArgsConstructor public class AddRelatedTermsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -91,7 +91,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || !_entityService.exists(urn)) { + || !_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format( "Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); @@ -104,7 +104,7 @@ public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); - } else if (!_entityService.exists(termUrn)) { + } else if (!_entityService.exists(termUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", urn, termUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java index f623f0e34b366..3dc3e93260665 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java @@ -15,10 +15,10 @@ public class DeleteGlossaryEntityResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; public DeleteGlossaryEntityResolver( - final EntityClient entityClient, EntityService entityService) { + final EntityClient entityClient, EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @@ -33,7 +33,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) return CompletableFuture.supplyAsync( () -> { if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { - if (!_entityService.exists(entityUrn)) { + if (!_entityService.exists(entityUrn, true)) { throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index 8c9b792b74e0d..b1dd404e12465 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -27,7 +27,7 @@ @RequiredArgsConstructor public class RemoveRelatedTermsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -46,7 +46,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || !_entityService.exists(urn)) { + || !_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format( "Failed to update %s. %s either does not exist or is not a glossaryTerm.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index a0caef20a4755..804bd6ca05431 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -35,7 +35,7 @@ @RequiredArgsConstructor public class UpdateLineageResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final LineageService _lineageService; @Override @@ -60,9 +60,11 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { + final Set existingDownstreamUrns = _entityService.exists(downstreamUrns, true); + // build MCP for every downstreamUrn for (Urn downstreamUrn : downstreamUrns) { - if (!_entityService.exists(downstreamUrn)) { + if (!existingDownstreamUrns.contains(downstreamUrn)) { throw new IllegalArgumentException( String.format( "Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); @@ -128,9 +130,11 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); + final Set existingUpstreamUrns = _entityService.exists(upstreamUrns, true); + // build MCP for upstreamUrn if necessary for (Urn upstreamUrn : upstreamUrns) { - if (!_entityService.exists(upstreamUrn)) { + if (!existingUpstreamUrns.contains(upstreamUrn)) { throw new IllegalArgumentException( String.format( "Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java index 5a25e6d83e648..aa7c1b152790f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java @@ -20,7 +20,7 @@ @RequiredArgsConstructor public class BatchUpdateSoftDeletedResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -65,7 +65,7 @@ private void validateInputUrn(String urnStr, QueryContext context) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - if (!_entityService.exists(urn)) { + if (!_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java index d0796389d2280..ab151d6244f48 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java @@ -44,7 +44,7 @@ public static void updateFieldDescription( Urn resourceUrn, String fieldPath, Urn actor, - EntityService entityService) { + EntityService entityService) { EditableSchemaMetadata editableSchemaMetadata = (EditableSchemaMetadata) EntityUtils.getAspectFromEntity( @@ -66,7 +66,7 @@ public static void updateFieldDescription( } public static void updateContainerDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableContainerProperties containerProperties = (EditableContainerProperties) EntityUtils.getAspectFromEntity( @@ -84,7 +84,7 @@ public static void updateContainerDescription( } public static void updateDomainDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DomainProperties domainProperties = (DomainProperties) EntityUtils.getAspectFromEntity( @@ -107,7 +107,7 @@ public static void updateDomainDescription( } public static void updateTagDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { TagProperties tagProperties = (TagProperties) EntityUtils.getAspectFromEntity( @@ -123,7 +123,7 @@ public static void updateTagDescription( } public static void updateCorpGroupDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { CorpGroupEditableInfo corpGroupEditableInfo = (CorpGroupEditableInfo) EntityUtils.getAspectFromEntity( @@ -143,7 +143,7 @@ public static void updateCorpGroupDescription( } public static void updateGlossaryTermDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( @@ -168,7 +168,7 @@ public static void updateGlossaryTermDescription( } public static void updateGlossaryNodeDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( @@ -189,7 +189,7 @@ public static void updateGlossaryNodeDescription( } public static void updateNotebookDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableNotebookProperties notebookProperties = (EditableNotebookProperties) EntityUtils.getAspectFromEntity( @@ -212,8 +212,8 @@ public static Boolean validateFieldDescriptionInput( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -223,8 +223,8 @@ public static Boolean validateFieldDescriptionInput( return true; } - public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -232,8 +232,8 @@ public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityS return true; } - public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -241,24 +241,24 @@ public static Boolean validateContainerInput(Urn resourceUrn, EntityService enti return true; } - public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { - if (!entityService.exists(corpUserUrn)) { + public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { + if (!entityService.exists(corpUserUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); } return true; } - public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { - if (!entityService.exists(notebookUrn)) { + public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { + if (!entityService.exists(notebookUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", notebookUrn, notebookUrn)); } @@ -335,7 +335,7 @@ public static boolean isAuthorizedToUpdateDescription( } public static void updateMlModelDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLModelProperties editableProperties = (EditableMLModelProperties) EntityUtils.getAspectFromEntity( @@ -355,7 +355,7 @@ public static void updateMlModelDescription( } public static void updateMlModelGroupDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLModelGroupProperties editableProperties = (EditableMLModelGroupProperties) EntityUtils.getAspectFromEntity( @@ -375,7 +375,7 @@ public static void updateMlModelGroupDescription( } public static void updateMlFeatureDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLFeatureProperties editableProperties = (EditableMLFeatureProperties) EntityUtils.getAspectFromEntity( @@ -395,7 +395,7 @@ public static void updateMlFeatureDescription( } public static void updateMlFeatureTableDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLFeatureTableProperties editableProperties = (EditableMLFeatureTableProperties) EntityUtils.getAspectFromEntity( @@ -415,7 +415,7 @@ public static void updateMlFeatureTableDescription( } public static void updateMlPrimaryKeyDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLPrimaryKeyProperties editableProperties = (EditableMLPrimaryKeyProperties) EntityUtils.getAspectFromEntity( @@ -435,7 +435,7 @@ public static void updateMlPrimaryKeyDescription( } public static void updateDataProductDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DataProductProperties properties = (DataProductProperties) EntityUtils.getAspectFromEntity( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java index e4c5c132be4f7..dab8cfffd54e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java @@ -27,7 +27,7 @@ @RequiredArgsConstructor public class MoveDomainResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -67,7 +67,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { throw new IllegalArgumentException("Parent entity is not a domain."); } - if (!_entityService.exists(newParentDomainUrn)) { + if (!_entityService.exists(newParentDomainUrn, true)) { throw new IllegalArgumentException("Parent entity does not exist."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index dd44c2718b3a4..8e4a96637e04d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -35,7 +35,7 @@ @RequiredArgsConstructor public class UpdateNameResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -47,7 +47,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { - if (!_entityService.exists(targetUrn)) { + if (!_entityService.exists(targetUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java index 848118e6cc0f6..2fcec54978b4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java @@ -26,7 +26,7 @@ @RequiredArgsConstructor public class UpdateParentNodeResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -37,7 +37,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating parent node. input: {}", input.toString()); - if (!_entityService.exists(targetUrn)) { + if (!_entityService.exists(targetUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } @@ -45,7 +45,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw GlossaryNodeUrn parentNodeUrn = null; if (input.getParentNode() != null) { parentNodeUrn = GlossaryNodeUrn.createFromString(input.getParentNode()); - if (!_entityService.exists(parentNodeUrn) + if (!_entityService.exists(parentNodeUrn, true) || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { throw new IllegalArgumentException( String.format( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index fb88d6c29f662..5dbd282580c87 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -77,7 +77,7 @@ public static void setDomainForResources( @Nullable Urn domainUrn, List resources, Urn actor, - EntityService entityService) + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -87,7 +87,10 @@ public static void setDomainForResources( } private static MetadataChangeProposal buildSetDomainProposal( - @Nullable Urn domainUrn, ResourceRefInput resource, Urn actor, EntityService entityService) { + @Nullable Urn domainUrn, + ResourceRefInput resource, + Urn actor, + EntityService entityService) { Domains domains = (Domains) EntityUtils.getAspectFromEntity( @@ -104,8 +107,8 @@ private static MetadataChangeProposal buildSetDomainProposal( UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); } - public static void validateDomain(Urn domainUrn, EntityService entityService) { - if (!entityService.exists(domainUrn)) { + public static void validateDomain(Urn domainUrn, EntityService entityService) { + if (!entityService.exists(domainUrn, true)) { throw new IllegalArgumentException( String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index 8765b91f65d9d..09323fdfc8377 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -42,7 +42,11 @@ public class LabelUtils { private LabelUtils() {} public static void removeTermFromResource( - Urn labelUrn, Urn resourceUrn, String subResource, Urn actor, EntityService entityService) { + Urn labelUrn, + Urn resourceUrn, + String subResource, + Urn actor, + EntityService entityService) { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) @@ -80,7 +84,7 @@ public static void removeTermFromResource( } public static void removeTagsFromResources( - List tags, List resources, Urn actor, EntityService entityService) + List tags, List resources, Urn actor, EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -90,7 +94,10 @@ public static void removeTagsFromResources( } public static void addTagsToResources( - List tagUrns, List resources, Urn actor, EntityService entityService) + List tagUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -100,7 +107,10 @@ public static void addTagsToResources( } public static void removeTermsFromResources( - List termUrns, List resources, Urn actor, EntityService entityService) + List termUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -110,7 +120,10 @@ public static void removeTermsFromResources( } public static void addTermsToResources( - List termUrns, List resources, Urn actor, EntityService entityService) + List termUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -124,7 +137,7 @@ public static void addTermsToResource( Urn resourceUrn, String subResource, Urn actor, - EntityService entityService) + EntityService entityService) throws URISyntaxException { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = @@ -248,7 +261,7 @@ public static void validateResourceAndLabel( String subResource, SubResourceType subResourceType, String labelEntityType, - EntityService entityService, + EntityService entityService, Boolean isRemoving) { for (Urn urn : labelUrns) { validateResourceAndLabel( @@ -263,14 +276,14 @@ public static void validateResourceAndLabel( } public static void validateLabel( - Urn labelUrn, String labelEntityType, EntityService entityService) { + Urn labelUrn, String labelEntityType, EntityService entityService) { if (!labelUrn.getEntityType().equals(labelEntityType)) { throw new IllegalArgumentException( String.format( "Failed to validate label with urn %s. Urn type does not match entity type %s..", labelUrn, labelEntityType)); } - if (!entityService.exists(labelUrn)) { + if (!entityService.exists(labelUrn, true)) { throw new IllegalArgumentException( String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); } @@ -281,8 +294,8 @@ public static void validateResource( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); @@ -310,7 +323,7 @@ public static void validateResourceAndLabel( String subResource, SubResourceType subResourceType, String labelEntityType, - EntityService entityService, + EntityService entityService, Boolean isRemoving) { if (!isRemoving) { validateLabel(labelUrn, labelEntityType, entityService); @@ -319,7 +332,7 @@ public static void validateResourceAndLabel( } private static MetadataChangeProposal buildAddTagsProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity @@ -331,7 +344,7 @@ private static MetadataChangeProposal buildAddTagsProposal( } private static MetadataChangeProposal buildRemoveTagsProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity @@ -343,7 +356,7 @@ private static MetadataChangeProposal buildRemoveTagsProposal( } private static MetadataChangeProposal buildRemoveTagsToEntityProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlobalTags tags = (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity( @@ -361,7 +374,7 @@ private static MetadataChangeProposal buildRemoveTagsToEntityProposal( } private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( @@ -383,7 +396,7 @@ private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( } private static MetadataChangeProposal buildAddTagsToEntityProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.common.GlobalTags tags = (com.linkedin.common.GlobalTags) @@ -402,7 +415,7 @@ private static MetadataChangeProposal buildAddTagsToEntityProposal( } private static MetadataChangeProposal buildAddTagsToSubResourceProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) @@ -455,7 +468,7 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) } private static MetadataChangeProposal buildAddTermsProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding terms to a top-level entity @@ -467,7 +480,7 @@ private static MetadataChangeProposal buildAddTermsProposal( } private static MetadataChangeProposal buildRemoveTermsProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Removing terms from a top-level entity @@ -479,7 +492,7 @@ private static MetadataChangeProposal buildRemoveTermsProposal( } private static MetadataChangeProposal buildAddTermsToEntityProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) @@ -500,7 +513,7 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal( } private static MetadataChangeProposal buildAddTermsToSubResourceProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) @@ -526,7 +539,7 @@ private static MetadataChangeProposal buildAddTermsToSubResourceProposal( } private static MetadataChangeProposal buildRemoveTermsToEntityProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( @@ -542,7 +555,7 @@ private static MetadataChangeProposal buildRemoveTermsToEntityProposal( } private static MetadataChangeProposal buildRemoveTermsToSubResourceProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java index b93c72edbcfc5..d82b8c17ff1b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java @@ -28,7 +28,11 @@ public class LinkUtils { private LinkUtils() {} public static void addLink( - String linkUrl, String linkLabel, Urn resourceUrn, Urn actor, EntityService entityService) { + String linkUrl, + String linkLabel, + Urn resourceUrn, + Urn actor, + EntityService entityService) { InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( @@ -46,7 +50,7 @@ public static void addLink( } public static void removeLink( - String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { + String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( @@ -109,7 +113,7 @@ public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, U } public static Boolean validateAddRemoveInput( - String linkUrl, Urn resourceUrn, EntityService entityService) { + String linkUrl, Urn resourceUrn, EntityService entityService) { try { new Url(linkUrl); @@ -120,7 +124,7 @@ public static Boolean validateAddRemoveInput( resourceUrn)); } - if (!entityService.exists(resourceUrn)) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change institutional memory for resource %s. Resource does not exist.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 55d408d3f7aab..1dd9da97cb2f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -202,16 +202,16 @@ public static void validateAuthorizedToUpdateOwners( } public static void validateAddOwnerInput( - List owners, Urn resourceUrn, EntityService entityService) { + List owners, Urn resourceUrn, EntityService entityService) { for (OwnerInput owner : owners) { validateAddOwnerInput(owner, resourceUrn, entityService); } } public static void validateAddOwnerInput( - OwnerInput owner, Urn resourceUrn, EntityService entityService) { + OwnerInput owner, Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); @@ -220,7 +220,7 @@ public static void validateAddOwnerInput( validateOwner(owner, entityService); } - public static void validateOwner(OwnerInput owner, EntityService entityService) { + public static void validateOwner(OwnerInput owner, EntityService entityService) { OwnerEntityType ownerEntityType = owner.getOwnerEntityType(); Urn ownerUrn = UrnUtils.getUrn(owner.getOwnerUrn()); @@ -241,7 +241,7 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) ownerUrn)); } - if (!entityService.exists(ownerUrn)) { + if (!entityService.exists(ownerUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource(s). Owner with urn %s does not exist.", @@ -249,7 +249,7 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) } if (owner.getOwnershipTypeUrn() != null - && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { + && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()), true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource(s). Custom Ownership type with " @@ -264,8 +264,8 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) } } - public static void validateRemoveInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static void validateRemoveInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); @@ -276,17 +276,18 @@ public static void addCreatorAsOwner( QueryContext context, String urn, OwnerEntityType ownerEntityType, - EntityService entityService) { + EntityService entityService) { try { Urn actorUrn = CorpuserUrn.createFromString(context.getActorUrn()); OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + if (!entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())), true)) { log.warn("Technical owner does not exist, defaulting to None ownership."); ownershipType = OwnershipType.NONE; } String ownershipTypeUrn = mapOwnershipTypeToEntity(ownershipType.name()); - if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn))) { + if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn), true)) { throw new RuntimeException( String.format("Unknown ownership type urn %s", ownershipTypeUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java index 7b9290b4532b5..e548c6f3eae07 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java @@ -33,7 +33,7 @@ public class SetTagColorResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -53,7 +53,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw } // If tag does not exist, then throw exception. - if (!_entityService.exists(tagUrn)) { + if (!_entityService.exists(tagUrn, true)) { throw new IllegalArgumentException( String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index f60f34dd7a085..81849df320e57 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -33,7 +33,7 @@ public HyperParameterValueType apply( } else if (input.isDouble()) { result = new FloatBox(input.getDouble()); } else if (input.isFloat()) { - result = new FloatBox(new Double(input.getFloat())); + result = new FloatBox(Double.valueOf(input.getFloat())); } else { throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java index 0d87ce4b2e2ad..3f228efafac42 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java @@ -2,10 +2,11 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.LineageEdge; @@ -16,8 +17,10 @@ import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletionException; import org.joda.time.DateTimeUtils; import org.mockito.Mockito; @@ -64,10 +67,8 @@ public void testUpdateDatasetLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_4))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -79,8 +80,7 @@ public void testFailUpdateWithMissingDownstream() throws Exception { mockInputAndContext(edgesToAdd, new ArrayList<>()); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(false); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(false); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))).thenAnswer(args -> Set.of()); assertThrows(CompletionException.class, () -> resolver.get(_mockEnv).join()); } @@ -93,9 +93,8 @@ public void testUpdateChartLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(CHART_URN))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -112,10 +111,8 @@ public void testUpdateDashboardLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DASHBOARD_URN))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(CHART_URN))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -133,11 +130,8 @@ public void testUpdateDataJobLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATAJOB_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATAJOB_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -153,15 +147,13 @@ public void testFailUpdateLineageNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); - Mockito.when(_mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(_mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(_mockEnv.getContext()).thenReturn(mockContext); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_4))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertThrows(AuthorizationException.class, () -> resolver.get(_mockEnv).join()); } @@ -169,7 +161,7 @@ public void testFailUpdateLineageNoPermissions() throws Exception { private void mockInputAndContext(List edgesToAdd, List edgesToRemove) { QueryContext mockContext = getMockAllowContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); - Mockito.when(_mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(_mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(_mockEnv.getContext()).thenReturn(mockContext); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 019d254ffdaac..f09ead41e5c46 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -31,7 +32,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -49,24 +51,23 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(1)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) .getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -74,7 +75,8 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -85,24 +87,23 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(1)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) .getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -111,32 +112,32 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(false); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(0)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), - Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), + eq(Constants.ASSERTION_ENTITY_NAME), + eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), + eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -144,7 +145,8 @@ public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -161,7 +163,7 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -178,14 +180,15 @@ public void testGetEntityClientException() throws Exception { .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index 56b01be29e163..f83adf33d521a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -47,8 +48,10 @@ public void testGetSuccessNoExistingStatus() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); @@ -94,8 +97,10 @@ public void testGetSuccessExistingStatus() throws Exception { Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); @@ -138,8 +143,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index be7f200a6b9d7..f25d5a4cbbf04 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -48,8 +49,10 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); @@ -109,8 +112,10 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); @@ -163,8 +168,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index e4be330f5ba2a..f4b45b3dc8f29 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -45,9 +46,9 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -58,7 +59,8 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -66,7 +68,7 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); @@ -81,10 +83,10 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal(eq(proposal), Mockito.any(Authentication.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -101,9 +103,9 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -119,7 +121,8 @@ public void testGetSuccessExistingDeprecation() throws Exception { .setValue(new Aspect(originalDeprecation.data()))))))); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -127,7 +130,7 @@ public void testGetSuccessExistingDeprecation() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); @@ -142,10 +145,10 @@ public void testGetSuccessExistingDeprecation() throws Exception { UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal(eq(proposal), Mockito.any(Authentication.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -155,9 +158,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -168,7 +171,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -176,7 +180,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -193,7 +197,7 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -214,7 +218,7 @@ public void testGetEntityClientException() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 32f0d30e7751a..81343b75f7d7e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -53,11 +54,15 @@ public void testGetSuccessNoExistingDomains() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -88,7 +93,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true)); } @Test @@ -113,11 +118,15 @@ public void testGetSuccessExistingDomains() throws Exception { Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -153,7 +162,7 @@ public void testGetSuccessExistingDomains() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true)); } @Test @@ -178,11 +187,15 @@ public void testGetSuccessUnsetDomains() throws Exception { Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -222,8 +235,10 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(false); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -260,9 +275,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index a0eff5d0574db..1aa7f5aef467c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -73,7 +74,8 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -92,7 +94,8 @@ public void testGetSuccess() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -115,7 +118,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { public void testGetFailureParentDoesNotExist() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -130,7 +134,8 @@ public void testGetFailureParentDoesNotExist() throws Exception { public void testGetFailureParentIsNotDomain() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index ad5ad2315ce43..7b8d11802792b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -58,8 +59,10 @@ public void testGetSuccessNoExistingDomains() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -82,10 +85,10 @@ public void testGetSuccessNoExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true)); } @Test @@ -119,8 +122,10 @@ public void testGetSuccessExistingDomains() throws Exception { .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -143,10 +148,10 @@ public void testGetSuccessExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true)); } @Test @@ -170,8 +175,10 @@ public void testGetFailureDomainDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(false); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -208,8 +215,10 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index 7e6e258168898..7ac45fe98b131 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -57,7 +58,8 @@ public void testGetSuccessNoExistingDomains() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -77,7 +79,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -111,7 +113,8 @@ public void testGetSuccessExistingDomains() throws Exception { .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -131,7 +134,7 @@ public void testGetSuccessExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -155,7 +158,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index 241951319c75e..ed04a14ed7c3a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -47,7 +48,8 @@ public void testGetSuccessNoExistingEmbed() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -68,7 +70,7 @@ public void testGetSuccessNoExistingEmbed() throws Exception { ; Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -85,7 +87,8 @@ public void testGetSuccessExistingEmbed() throws Exception { Mockito.eq(0L))) .thenReturn(originalEmbed); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -105,7 +108,7 @@ public void testGetSuccessExistingEmbed() throws Exception { verifySingleIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -128,7 +131,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java index fa8b1d6a747ca..c3c9ccea6d270 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java @@ -3,6 +3,7 @@ import static org.mockito.Mockito.*; import static org.testng.Assert.*; +import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import org.testng.annotations.BeforeMethod; @@ -33,7 +34,7 @@ public void testFailsNullEntity() { @Test public void testPasses() throws Exception { when(_dataFetchingEnvironment.getArgument("urn")).thenReturn(ENTITY_URN_STRING); - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(Urn.class), eq(true))).thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 287d270ab569c..8c5b1d7607027 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -28,9 +29,9 @@ private EntityService setUpService() { EntityService mockService = getMockEntityService(); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + eq(0L))) .thenReturn(null); return mockService; } @@ -39,9 +40,12 @@ private EntityService setUpService() { public void testGetSuccessIsRelatedNonExistent() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -52,26 +56,29 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.isA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetSuccessHasRelatedNonExistent() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -82,24 +89,25 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetFailAddSelfAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -108,7 +116,7 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_ENTITY_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -119,7 +127,8 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { public void testGetFailAddNonTermAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -128,7 +137,7 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(DATASET_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -139,8 +148,10 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(false); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -149,7 +160,7 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -160,8 +171,10 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { public void testGetFailAddToNonExistentUrn() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -170,7 +183,7 @@ public void testGetFailAddToNonExistentUrn() throws Exception { RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -181,8 +194,10 @@ public void testGetFailAddToNonExistentUrn() throws Exception { public void testGetFailAddToNonTerm() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(DATASET_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(DATASET_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -191,7 +206,7 @@ public void testGetFailAddToNonTerm() throws Exception { RelatedTermsInput input = new RelatedTermsInput( DATASET_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -202,9 +217,12 @@ public void testGetFailAddToNonTerm() throws Exception { public void testFailNoPermissions() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -215,7 +233,7 @@ public void testFailNoPermissions() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.isA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index 7229d2acf763d..f4d4c528dc0c6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -26,7 +27,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_URN)), eq(true))) + .thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -50,7 +52,8 @@ public void testGetEntityClientException() throws Exception { .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_URN)), eq(true))) + .thenReturn(true); DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 47de668b2c9dc..e46d8b1503d9e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -41,7 +42,8 @@ public void testGetSuccessIsA() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); @@ -56,7 +58,7 @@ public void testGetSuccessIsA() throws Exception { assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -73,7 +75,8 @@ public void testGetSuccessHasA() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); @@ -88,7 +91,7 @@ public void testGetSuccessHasA() throws Exception { assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -101,7 +104,8 @@ public void testFailAspectDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); @@ -131,7 +135,8 @@ public void testFailNoPermissions() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); @@ -146,6 +151,6 @@ public void testFailNoPermissions() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); Mockito.verify(mockService, Mockito.times(0)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index 3972715fcefb1..062c1da5e038d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -61,7 +62,7 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -76,7 +77,7 @@ public void testGetSuccess() throws Exception { public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(NODE_URN)), eq(true))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_FOR_NODE); @@ -106,7 +107,8 @@ public void testGetSuccessForNode() throws Exception { public void testGetSuccessForDomain() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_FOR_DOMAIN); @@ -148,7 +150,8 @@ public void testGetSuccessForDomain() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index 74a59b10a40b0..cdab78023b846 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -63,8 +64,9 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -80,8 +82,9 @@ public void testGetSuccess() throws Exception { public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(NODE_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_WITH_NODE); @@ -114,8 +117,10 @@ public void testGetSuccessForNode() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(false); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -131,8 +136,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { public void testGetFailureNodeDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -148,8 +154,9 @@ public void testGetFailureNodeDoesNotExist() throws Exception { public void testGetFailureParentIsNotNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java index 6bd5b4f8c3f38..21d1e0caa1bb2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java @@ -13,6 +13,7 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; @@ -79,7 +80,8 @@ public void testReordering() throws Exception { CompletableFuture.completedFuture( ImmutableList.of(mockResponseEntity2, mockResponseEntity1)); when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(List.class), eq(true))) + .thenAnswer(args -> Set.of(args.getArgument(0))); List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); assertEquals(batchGetResponse.size(), 2); assertEquals(batchGetResponse.get(0), mockResponseEntity1); @@ -108,7 +110,8 @@ public void testDuplicateUrns() throws Exception { CompletableFuture mockFuture = CompletableFuture.completedFuture(ImmutableList.of(mockResponseEntity)); when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(List.class), eq(true))) + .thenAnswer(args -> Set.of(args.getArgument(0))); List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); assertEquals(batchGetResponse.size(), 2); assertEquals(batchGetResponse.get(0), mockResponseEntity); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index 3fee28bc31725..f8fe38187b30d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -22,7 +23,8 @@ public class UpdateUserSettingResolverTest { @Test public void testWriteCorpUserSettings() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_USER_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_USER_URN)), eq(true))) + .thenReturn(true); UpdateUserSettingResolver resolver = new UpdateUserSettingResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 5e199f2c6b2c7..4bd16d5311818 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -45,16 +47,21 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -84,10 +91,10 @@ public void testGetSuccessNoExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true)); } @Test @@ -112,15 +119,19 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -147,7 +158,7 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); } @Test @@ -172,15 +183,16 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(Urn.class), eq(true))).thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -207,7 +219,7 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); } @Test @@ -232,24 +244,32 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_3_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_3_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -288,13 +308,13 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)), eq(true)); } @Test @@ -308,8 +328,10 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(false); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -343,8 +365,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -398,7 +422,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); + .ingestProposal(any(AspectsBatchImpl.class), Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 92960f45232b5..cb607adf45c0a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -52,18 +53,24 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -99,10 +106,10 @@ public void testGetSuccessNoExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true)); } @Test @@ -131,26 +138,34 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -186,10 +201,10 @@ public void testGetSuccessExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true)); } @Test @@ -203,8 +218,10 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(false); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -256,9 +273,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 10c95c1bac648..84e0f6f282a7b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -37,22 +38,26 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -66,7 +71,7 @@ public void testGetSuccessNoExistingOwners() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -88,9 +93,9 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(oldOwners1); final Ownership oldOwners2 = @@ -104,16 +109,20 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(oldOwners2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -127,7 +136,7 @@ public void testGetSuccessExistingOwners() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -140,20 +149,23 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -167,7 +179,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -189,7 +201,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -217,7 +229,7 @@ public void testGetEntityClientException() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 2468cef0e1216..1898753e5ae76 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -42,9 +43,12 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -73,10 +77,10 @@ public void testGetSuccessNoExistingTags() throws Exception { verifyIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -97,9 +101,12 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -128,10 +135,10 @@ public void testGetSuccessExistingTags() throws Exception { verifyIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -145,8 +152,10 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(false); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -173,8 +182,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index c174d917748eb..82dd13ee29e8a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -53,11 +54,15 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -92,10 +97,10 @@ public void testGetSuccessNoExistingTags() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -123,11 +128,15 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -162,10 +171,10 @@ public void testGetSuccessExistingTags() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -179,8 +188,10 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(false); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -216,9 +227,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index ba75b41388587..83de3acfb4c94 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -55,11 +56,15 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); @@ -127,11 +132,15 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(oldTags2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); @@ -178,9 +187,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index 6ae72fcbb7268..f7929012ccb68 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -47,7 +48,8 @@ public void testGetSuccessExistingProperties() throws Exception { Mockito.eq(0L))) .thenReturn(oldTagProperties); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -69,7 +71,7 @@ public void testGetSuccessExistingProperties() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -86,7 +88,8 @@ public void testGetFailureNoExistingProperties() throws Exception { Mockito.eq(0))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -131,7 +134,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index 397bb533ff871..d0697df3f2f6c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -34,14 +35,17 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -51,19 +55,19 @@ public void testGetSuccessNoExistingTerms() throws Exception { AddTermsInput input = new AddTermsInput( ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)) - .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test @@ -80,14 +84,17 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -97,19 +104,19 @@ public void testGetSuccessExistingTerms() throws Exception { AddTermsInput input = new AddTermsInput( ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)) - .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test @@ -118,13 +125,15 @@ public void testGetFailureTermDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(false); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -133,7 +142,7 @@ public void testGetFailureTermDoesNotExist() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -147,13 +156,15 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -162,7 +173,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -180,7 +191,7 @@ public void testGetUnauthorized() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -204,7 +215,7 @@ public void testGetEntityClientException() throws Exception { QueryContext mockContext = getMockAllowContext(); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 2c85e870dd6ac..b3700632f56cd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -37,24 +38,26 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -68,17 +71,17 @@ public void testGetSuccessNoExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true)); } @Test @@ -95,24 +98,26 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -126,17 +131,17 @@ public void testGetSuccessExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true)); } @Test @@ -145,13 +150,14 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(false); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -163,7 +169,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { new BatchAddTermsInput( ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -176,20 +182,22 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -203,7 +211,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -224,7 +232,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -249,7 +257,7 @@ public void testGetEntityClientException() throws Exception { new BatchAddTermsInput( ImmutableList.of(TEST_GLOSSARY_TERM_1_URN), ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index c2520f4dfb712..a76a813802b94 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -37,22 +38,26 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -65,7 +70,7 @@ public void testGetSuccessNoExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -88,9 +93,9 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(oldTerms1); final GlossaryTerms oldTerms2 = @@ -103,16 +108,20 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(oldTerms2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -125,7 +134,7 @@ public void testGetSuccessExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -138,20 +147,23 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -164,7 +176,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -185,7 +197,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -212,7 +224,7 @@ public void testGetEntityClientException() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 34921e4182b10..9b3f42a37b45d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -625,7 +625,7 @@ public SearchResult filter( @Override public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _entityService.exists(urn); + return _entityService.exists(urn, true); } @SneakyThrows diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index 2e19916ee3c8f..ed69e919a7b24 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -93,6 +93,7 @@ import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -1782,7 +1783,8 @@ private EntityResponse toEntityResponse( return response; } - private Map> buildEntityToValidAspects(final EntityRegistry entityRegistry) { + private static Map> buildEntityToValidAspects( + final EntityRegistry entityRegistry) { return entityRegistry.getEntitySpecs().values().stream() .collect( Collectors.toMap( @@ -1950,36 +1952,54 @@ public RollbackRunResult deleteUrn(Urn urn) { } /** - * Returns true if the entity exists (has materialized aspects) + * Returns a set of urns of entities that exist (has materialized aspects). * - * @param urn the urn of the entity to check - * @return true if the entity exists, false otherwise + * @param urns the list of urns of the entities to check + * @param includeSoftDeleted whether to consider soft delete + * @return a set of urns of entities that exist. */ @Override - public Boolean exists(Urn urn) { - final Set aspectsToFetch = getEntityAspectNames(urn); - final List dbKeys = - aspectsToFetch.stream() + public Set exists(@Nonnull final Collection urns, boolean includeSoftDeleted) { + final Set dbKeys = + urns.stream() .map( - aspectName -> - new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList()); + urn -> + new EntityAspectIdentifier( + urn.toString(), + _entityRegistry + .getEntitySpec(urn.getEntityType()) + .getKeyAspectSpec() + .getName(), + ASPECT_LATEST_VERSION)) + .collect(Collectors.toSet()); - Map aspects = _aspectDao.batchGet(new HashSet(dbKeys)); - return aspects.values().stream().anyMatch(aspect -> aspect != null); - } + final Map aspects = _aspectDao.batchGet(dbKeys); + final Set existingUrnStrings = + aspects.values().stream() + .filter(aspect -> aspect != null) + .map(aspect -> aspect.getUrn()) + .collect(Collectors.toSet()); - /** - * Returns true if an entity is soft-deleted. - * - * @param urn the urn to check - * @return true is the entity is soft deleted, false otherwise. - */ - @Override - public Boolean isSoftDeleted(@Nonnull final Urn urn) { - Objects.requireNonNull(urn, "urn is required"); - final RecordTemplate statusAspect = getLatestAspect(urn, STATUS_ASPECT_NAME); - return statusAspect != null && ((Status) statusAspect).isRemoved(); + Set existing = + urns.stream() + .filter(urn -> existingUrnStrings.contains(urn.toString())) + .collect(Collectors.toSet()); + + if (includeSoftDeleted) { + return existing; + } else { + // Additionally exclude status.removed == true + Map> statusResult = + getLatestAspects(existing, Set.of(STATUS_ASPECT_NAME)); + return existing.stream() + .filter( + urn -> + // key aspect is always returned, make sure to only consider the status aspect + statusResult.getOrDefault(urn, List.of()).stream() + .filter(aspect -> STATUS_ASPECT_NAME.equals(aspect.schema().getName())) + .noneMatch(aspect -> ((Status) aspect).isRemoved())) + .collect(Collectors.toSet()); + } } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java index 459b2d183d7ac..4d3ac9a550553 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java @@ -6,11 +6,9 @@ import com.datahub.util.RecordUtils; import com.google.common.base.Preconditions; import com.linkedin.common.AuditStamp; -import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.entity.validation.EntityRegistryUrnValidator; import com.linkedin.metadata.entity.validation.RecordTemplateValidator; @@ -157,27 +155,6 @@ public static SystemMetadata parseSystemMetadata(String jsonSystemMetadata) { return RecordUtils.toRecordTemplate(SystemMetadata.class, jsonSystemMetadata); } - /** Check if entity is removed (removed=true in Status aspect) and exists */ - public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) { - try { - - if (!entityService.exists(entityUrn)) { - return false; - } - - EnvelopedAspect statusAspect = - entityService.getLatestEnvelopedAspect(entityUrn.getEntityType(), entityUrn, "status"); - if (statusAspect == null) { - return false; - } - Status status = new Status(statusAspect.getValue().data()); - return status.isRemoved(); - } catch (Exception e) { - log.error("Error while checking if {} is removed", entityUrn, e); - return false; - } - } - public static RecordTemplate buildKeyAspect( @Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { final EntitySpec spec = entityRegistry.getEntitySpec(urnToEntityName(urn)); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java index 35d75de482007..f5c783014caa1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java @@ -4,15 +4,11 @@ import com.datahub.util.exception.ESQueryException; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants; import com.linkedin.metadata.datahubusage.DataHubUsageEventType; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.EntityUtils; -import com.linkedin.metadata.recommendation.EntityProfileParams; import com.linkedin.metadata.recommendation.RecommendationContent; -import com.linkedin.metadata.recommendation.RecommendationParams; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -22,7 +18,6 @@ import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -37,12 +32,13 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; +import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; @Slf4j @RequiredArgsConstructor -public class MostPopularSource implements RecommendationSource { +public class MostPopularSource implements EntityRecommendationSource { /** Entity Types that should be in scope for this type of recommendation. */ private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of( @@ -59,7 +55,7 @@ public class MostPopularSource implements RecommendationSource { private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; - private final EntityService _entityService; + private final EntityService _entityService; private static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; private static final String ENTITY_AGG_NAME = "entity"; @@ -107,10 +103,11 @@ public List getRecommendations( _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets().stream() - .map(bucket -> buildContent(bucket.getKeyAsString())) - .filter(Optional::isPresent) - .map(Optional::get) + List bucketUrns = + parsedTerms.getBuckets().stream() + .map(MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toList()); + return buildContent(bucketUrns, _entityService) .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { @@ -119,6 +116,11 @@ public List getRecommendations( } } + @Override + public Set getSupportedEntityTypes() { + return SUPPORTED_ENTITY_TYPES; + } + private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { // TODO: Proactively filter for entity types in the supported set. SearchRequest request = new SearchRequest(); @@ -142,20 +144,4 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { request.indices(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)); return request; } - - private Optional buildContent(@Nonnull String entityUrn) { - Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) - || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { - return Optional.empty(); - } - - return Optional.of( - new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams( - new RecommendationParams() - .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); - } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java index 0815ffadd05c1..127b0f5c342c7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java @@ -4,15 +4,11 @@ import com.datahub.util.exception.ESQueryException; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants; import com.linkedin.metadata.datahubusage.DataHubUsageEventType; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.EntityUtils; -import com.linkedin.metadata.recommendation.EntityProfileParams; import com.linkedin.metadata.recommendation.RecommendationContent; -import com.linkedin.metadata.recommendation.RecommendationParams; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -22,7 +18,6 @@ import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -38,12 +33,13 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; @Slf4j @RequiredArgsConstructor -public class RecentlyEditedSource implements RecommendationSource { +public class RecentlyEditedSource implements EntityRecommendationSource { /** Entity Types that should be in scope for this type of recommendation. */ private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of( @@ -60,7 +56,7 @@ public class RecentlyEditedSource implements RecommendationSource { private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; - private final EntityService _entityService; + private final EntityService _entityService; private static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; private static final String ENTITY_AGG_NAME = "entity"; @@ -108,10 +104,11 @@ public List getRecommendations( _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets().stream() - .map(bucket -> buildContent(bucket.getKeyAsString())) - .filter(Optional::isPresent) - .map(Optional::get) + List bucketUrns = + parsedTerms.getBuckets().stream() + .map(MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toList()); + return buildContent(bucketUrns, _entityService) .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { @@ -120,6 +117,11 @@ public List getRecommendations( } } + @Override + public Set getSupportedEntityTypes() { + return SUPPORTED_ENTITY_TYPES; + } + private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { // TODO: Proactively filter for entity types in the supported set. SearchRequest request = new SearchRequest(); @@ -147,20 +149,4 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { request.indices(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)); return request; } - - private Optional buildContent(@Nonnull String entityUrn) { - Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) - || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { - return Optional.empty(); - } - - return Optional.of( - new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams( - new RecommendationParams() - .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); - } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java index 47ffebee2e947..0ab5cf40cf4e5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java @@ -4,15 +4,11 @@ import com.datahub.util.exception.ESQueryException; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants; import com.linkedin.metadata.datahubusage.DataHubUsageEventType; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.EntityUtils; -import com.linkedin.metadata.recommendation.EntityProfileParams; import com.linkedin.metadata.recommendation.RecommendationContent; -import com.linkedin.metadata.recommendation.RecommendationParams; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -22,7 +18,6 @@ import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -38,12 +33,13 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; @Slf4j @RequiredArgsConstructor -public class RecentlyViewedSource implements RecommendationSource { +public class RecentlyViewedSource implements EntityRecommendationSource { /** Entity Types that should be in scope for this type of recommendation. */ private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of( @@ -60,7 +56,7 @@ public class RecentlyViewedSource implements RecommendationSource { private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; - private final EntityService _entityService; + private final EntityService _entityService; private static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; private static final String ENTITY_AGG_NAME = "entity"; @@ -108,10 +104,11 @@ public List getRecommendations( _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets().stream() - .map(bucket -> buildContent(bucket.getKeyAsString())) - .filter(Optional::isPresent) - .map(Optional::get) + List bucketUrns = + parsedTerms.getBuckets().stream() + .map(MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toList()); + return buildContent(bucketUrns, _entityService) .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { @@ -120,6 +117,11 @@ public List getRecommendations( } } + @Override + public Set getSupportedEntityTypes() { + return SUPPORTED_ENTITY_TYPES; + } + private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { // TODO: Proactively filter for entity types in the supported set. SearchRequest request = new SearchRequest(); @@ -151,20 +153,4 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { request.indices(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)); return request; } - - private Optional buildContent(@Nonnull String entityUrn) { - Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) - || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { - return Optional.empty(); - } - - return Optional.of( - new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams( - new RecommendationParams() - .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); - } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java index df332cacaa751..a2cb9b7412a8e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java @@ -19,6 +19,7 @@ import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.graph.SiblingGraphService; import java.net.URISyntaxException; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -60,12 +61,13 @@ public class SiblingGraphServiceTest { private GraphService _graphService; private SiblingGraphService _client; - EntityService _mockEntityService; + EntityService _mockEntityService; @BeforeClass public void setup() { _mockEntityService = Mockito.mock(EntityService.class); - when(_mockEntityService.exists(any())).thenReturn(true); + when(_mockEntityService.exists(any(Collection.class), any(Boolean.class))) + .thenAnswer(args -> new HashSet<>(args.getArgument(0))); _graphService = Mockito.mock(GraphService.class); _client = new SiblingGraphService(_mockEntityService, _graphService); } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java index 14e5259f90097..84433a2b439f4 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java @@ -40,6 +40,8 @@ import java.io.IOException; import java.util.Map; import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; @@ -276,7 +278,20 @@ private EntityClient entityClientHelper( AspectDao mockAspectDao = mock(AspectDao.class); when(mockAspectDao.batchGet(anySet())) - .thenReturn(Map.of(mock(EntityAspectIdentifier.class), mock(EntityAspect.class))); + .thenAnswer( + args -> { + Set ids = args.getArgument(0); + return ids.stream() + .map( + id -> { + EntityAspect mockEntityAspect = mock(EntityAspect.class); + when(mockEntityAspect.getUrn()).thenReturn(id.getUrn()); + when(mockEntityAspect.getAspect()).thenReturn(id.getAspect()); + when(mockEntityAspect.getVersion()).thenReturn(id.getVersion()); + return Map.entry(id, mockEntityAspect); + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + }); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java index f33ae5de130da..8ce7675edf580 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java @@ -39,12 +39,12 @@ public class GroupService { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; private final GraphClient _graphClient; public GroupService( @Nonnull EntityClient entityClient, - @Nonnull EntityService entityService, + @Nonnull EntityService entityService, @Nonnull GraphClient graphClient) { Objects.requireNonNull(entityClient, "entityClient must not be null!"); Objects.requireNonNull(entityService, "entityService must not be null!"); @@ -57,7 +57,7 @@ public GroupService( public boolean groupExists(@Nonnull Urn groupUrn) { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); - return _entityService.exists(groupUrn); + return _entityService.exists(groupUrn, true); } public Origin getGroupOrigin(@Nonnull final Urn groupUrn) { @@ -73,7 +73,7 @@ public void addUserToNativeGroup( Objects.requireNonNull(groupUrn, "groupUrn must not be null"); // Verify the user exists - if (!_entityService.exists(userUrn)) { + if (!_entityService.exists(userUrn, true)) { throw new RuntimeException("Failed to add member to group. User does not exist."); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index c631bede45364..40555107f4c79 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -63,7 +63,7 @@ public StatefulTokenService( public Boolean load(final String key) { final Urn accessUrn = Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); - return !_entityService.exists(accessUrn); + return !_entityService.exists(accessUrn, true); } }); this.salt = salt; diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java index 741d176f98c1b..ff46642827b30 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java @@ -30,7 +30,7 @@ public class NativeUserService { private static final long ONE_DAY_MILLIS = TimeUnit.DAYS.toMillis(1); - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; private final SecretService _secretService; private final AuthenticationConfiguration _authConfig; @@ -51,7 +51,7 @@ public void createNativeUser( Objects.requireNonNull(authentication, "authentication must not be null!"); final Urn userUrn = Urn.createFromString(userUrnString); - if (_entityService.exists(userUrn) + if (_entityService.exists(userUrn, true) // Should never fail these due to Controller level check, but just in case more usages get // put in || userUrn.toString().equals(SYSTEM_ACTOR) diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java index dc63b5e4a2897..de2c18782d3d8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java @@ -146,7 +146,7 @@ public String getClientId() { Urn clientIdUrn = UrnUtils.getUrn(CLIENT_ID_URN); // Create a new client id if it doesn't exist - if (!_entityService.exists(clientIdUrn)) { + if (!_entityService.exists(clientIdUrn, true)) { return createClientIdIfNotPresent(_entityService); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java index 5bd273f3bacf8..f9a762b2dd02a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java @@ -8,6 +8,7 @@ import static com.datahub.authentication.token.TokenClaims.ACTOR_TYPE_CLAIM_NAME; import static com.datahub.authentication.token.TokenClaims.TOKEN_TYPE_CLAIM_NAME; import static com.datahub.authentication.token.TokenClaims.TOKEN_VERSION_CLAIM_NAME; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertThrows; @@ -151,7 +152,7 @@ public void testAuthenticateSuccess() throws Exception { configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) .thenReturn(keyAspectSpec); - Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); + Mockito.when(mockService.exists(Mockito.any(Urn.class), eq(true))).thenReturn(true); Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java index 6d0678d4f3558..2c1ab6f12efa1 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java @@ -55,7 +55,7 @@ public class GroupServiceTest { private static EntityRelationships _entityRelationships; private EntityClient _entityClient; - private EntityService _entityService; + private EntityService _entityService; private GraphClient _graphClient; private GroupService _groupService; @@ -121,7 +121,7 @@ public void testGroupExistsNullArguments() { @Test public void testGroupExistsPasses() { - when(_entityService.exists(_groupUrn)).thenReturn(true); + when(_entityService.exists(eq(_groupUrn), eq(true))).thenReturn(true); assertTrue(_groupService.groupExists(_groupUrn)); } @@ -147,7 +147,7 @@ public void testAddUserToNativeGroupNullArguments() { @Test public void testAddUserToNativeGroupPasses() throws Exception { - when(_entityService.exists(USER_URN)).thenReturn(true); + when(_entityService.exists(eq(USER_URN), eq(true))).thenReturn(true); when(_entityClient.batchGetV2( eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) .thenReturn(_entityResponseMap); @@ -232,7 +232,7 @@ public void testMigrateGroupMembershipToNativeGroupMembershipPasses() throws Exc when(_entityClient.batchGetV2( eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) .thenReturn(_entityResponseMap); - when(_entityService.exists(USER_URN)).thenReturn(true); + when(_entityService.exists(eq(USER_URN), eq(true))).thenReturn(true); _groupService.migrateGroupMembershipToNativeGroupMembership( Urn.createFromString(EXTERNAL_GROUP_URN_STRING), diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java index ed10022632a56..63ac0f048ad0a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java @@ -1,6 +1,7 @@ package com.datahub.authentication.token; import static com.datahub.authentication.token.TokenClaims.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Actor; @@ -180,7 +181,7 @@ public void generateRevokeToken() throws TokenException { Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) .thenReturn(keyAspectSpec); - Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); + Mockito.when(mockService.exists(Mockito.any(Urn.class), eq(true))).thenReturn(true); final RollbackRunResult result = new RollbackRunResult(ImmutableList.of(), 0); Mockito.when(mockService.deleteUrn(Mockito.any(Urn.class))).thenReturn(result); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java index 9cb5d5cb697cc..2b584c3461452 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java @@ -85,7 +85,7 @@ public void testCreateNativeUserNullArguments() { expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserAlreadyExists() throws Exception { // The user already exists - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(Urn.class), eq(true))).thenReturn(true); _nativeUserService.createNativeUser( USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); @@ -109,7 +109,7 @@ public void testCreateNativeUserUserSystemUser() throws Exception { @Test public void testCreateNativeUserPasses() throws Exception { - when(_entityService.exists(any())).thenReturn(false); + when(_entityService.exists(any(), any())).thenReturn(false); when(_secretService.generateSalt(anyInt())).thenReturn(SALT); when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); when(_secretService.getHashedPassword(any(), any())).thenReturn(HASHED_PASSWORD); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java index a0bbe69691db4..8baeb7d3f8443 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java @@ -76,7 +76,7 @@ public void setupTest() { @Test public void testEmitAnalyticsEvent() throws IOException { when(_secretService.hashString(eq(ACTOR_URN_STRING))).thenReturn(HASHED_ACTOR_URN_STRING); - when(_entityService.exists(_clientIdUrn)).thenReturn(true); + when(_entityService.exists(eq(_clientIdUrn), eq(true))).thenReturn(true); when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) .thenReturn(TELEMETRY_CLIENT_ID); when(_mixpanelMessageBuilder.event(eq(CLIENT_ID), eq(EVENT_TYPE), any())) @@ -99,7 +99,7 @@ public void testEmitAnalyticsEvent() throws IOException { @Test public void testGetClientIdAlreadyExists() { - when(_entityService.exists(_clientIdUrn)).thenReturn(true); + when(_entityService.exists(eq(_clientIdUrn), eq(true))).thenReturn(true); when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) .thenReturn(TELEMETRY_CLIENT_ID); @@ -108,7 +108,7 @@ public void testGetClientIdAlreadyExists() { @Test public void testGetClientIdDoesNotExist() { - when(_entityService.exists(_clientIdUrn)).thenReturn(false); + when(_entityService.exists(eq(_clientIdUrn), eq(true))).thenReturn(false); assertNotNull(_trackingService.getClientId()); verify(_entityService, times(1)) diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java index ab86749eb431d..e3a3c452b85f1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java @@ -28,8 +28,8 @@ @RequiredArgsConstructor public class IngestRetentionPoliciesStep implements BootstrapStep { - private final RetentionService _retentionService; - private final EntityService _entityService; + private final RetentionService _retentionService; + private final EntityService _entityService; private final boolean _enableRetention; private final boolean _applyOnBootstrap; private final String pluginPath; @@ -63,7 +63,7 @@ public String name() { @Override public void execute() throws IOException, URISyntaxException { // 0. Execute preflight check to see whether we need to ingest policies - if (_entityService.exists(UPGRADE_ID_URN)) { + if (_entityService.exists(UPGRADE_ID_URN, true)) { log.info("Retention was applied. Skipping."); return; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java index 3c62f695ddd5f..7e232f939dc08 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java @@ -13,7 +13,7 @@ @RequiredArgsConstructor public class RemoveClientIdAspectStep implements BootstrapStep { - private final EntityService _entityService; + private final EntityService _entityService; private static final String UPGRADE_ID = "remove-unknown-aspects"; private static final String INVALID_TELEMETRY_ASPECT_NAME = "clientId"; @@ -27,7 +27,7 @@ public String name() { @Override public void execute() throws Exception { try { - if (_entityService.exists(REMOVE_UNKNOWN_ASPECTS_URN)) { + if (_entityService.exists(REMOVE_UNKNOWN_ASPECTS_URN, true)) { log.info("Unknown aspects have been removed. Skipping..."); return; } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java index fc935514f4138..d7c8268903508 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java @@ -136,7 +136,7 @@ public ResponseEntity delete(String urn) { public ResponseEntity head(String urn) { try { Urn entityUrn = Urn.createFromString(urn); - if (_entityService.exists(entityUrn)) { + if (_entityService.exists(entityUrn, true)) { return new ResponseEntity<>(HttpStatus.NO_CONTENT); } else { return new ResponseEntity<>(HttpStatus.NOT_FOUND); diff --git a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java index fdf99cdc303c1..b082a735e8084 100644 --- a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java +++ b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java @@ -42,6 +42,7 @@ import com.linkedin.schema.StringType; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -212,7 +213,7 @@ public RollbackRunResult deleteUrn(Urn urn) { } @Override - public Boolean exists(Urn urn) { - return null; + public Set exists(@NotNull Collection urns) { + return Set.of(); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java index dfd986c2ebea0..afdaf06802a11 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java @@ -1057,6 +1057,6 @@ public Task exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) } log.info("EXISTS for {}", urnStr); return RestliUtil.toTask( - () -> _entityService.exists(urn), MetricRegistry.name(this.getClass(), "exists")); + () -> _entityService.exists(urn, true), MetricRegistry.name(this.getClass(), "exists")); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index 89b0e5ba9a558..71573aa2b10e0 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -25,6 +25,7 @@ import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import java.net.URISyntaxException; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; @@ -312,9 +313,27 @@ RollbackRunResult rollbackWithConditions( IngestResult ingestProposal( MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); - Boolean exists(Urn urn); + /** + * Returns a set of urns of entities that exist (has materialized aspects). + * + * @param urns the list of urns of the entities to check + * @return a set of urns of entities that exist. + */ + Set exists(@Nonnull final Collection urns, boolean includeSoftDelete); + + /** + * Returns a set of urns of entities that exist (has materialized aspects). + * + * @param urns the list of urns of the entities to check + * @return a set of urns of entities that exist. + */ + default Set exists(@Nonnull final Collection urns) { + return exists(urns, true); + } - Boolean isSoftDeleted(@Nonnull final Urn urn); + default boolean exists(@Nonnull Urn urn, boolean includeSoftDelete) { + return exists(List.of(urn), includeSoftDelete).contains(urn); + } void setWritable(boolean canWrite); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java new file mode 100644 index 0000000000000..546c2856c28ac --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java @@ -0,0 +1,37 @@ +package com.linkedin.metadata.recommendation.candidatesource; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.recommendation.EntityProfileParams; +import com.linkedin.metadata.recommendation.RecommendationContent; +import com.linkedin.metadata.recommendation.RecommendationParams; +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; +import javax.annotation.Nonnull; + +public interface EntityRecommendationSource extends RecommendationSource { + Set getSupportedEntityTypes(); + + default RecommendationContent buildContent(@Nonnull Urn urn) { + return new RecommendationContent() + .setEntity(urn) + .setValue(urn.toString()) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(urn))); + } + + default Stream buildContent( + @Nonnull List entityUrns, EntityService entityService) { + List entities = + entityUrns.stream() + .map(UrnUtils::getUrn) + .filter(urn -> getSupportedEntityTypes().contains(urn.getEntityType())) + .toList(); + Set existingNonRemoved = entityService.exists(entities, false); + + return entities.stream().filter(existingNonRemoved::contains).map(this::buildContent); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java index c618db801d9d6..71c4d357ad1eb 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java @@ -1,20 +1,29 @@ package com.linkedin.metadata.shared; import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.AbstractArrayTemplate; import com.linkedin.metadata.browse.BrowseResult; +import com.linkedin.metadata.browse.BrowseResultEntity; import com.linkedin.metadata.browse.BrowseResultEntityArray; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.EntityLineageResult; +import com.linkedin.metadata.graph.LineageRelationship; import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.query.ListResult; import com.linkedin.metadata.search.LineageScrollResult; +import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchEntityArray; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import java.util.Objects; +import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -23,7 +32,7 @@ public class ValidationUtils { public static SearchResult validateSearchResult( - final SearchResult searchResult, @Nonnull final EntityService entityService) { + final SearchResult searchResult, @Nonnull final EntityService entityService) { if (searchResult == null) { return null; } @@ -37,16 +46,16 @@ public static SearchResult validateSearchResult( .setNumEntities(searchResult.getNumEntities()); SearchEntityArray validatedEntities = - searchResult.getEntities().stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + validatedUrns(searchResult.getEntities(), SearchEntity::getEntity, entityService, true) .collect(Collectors.toCollection(SearchEntityArray::new)); + validatedSearchResult.setEntities(validatedEntities); return validatedSearchResult; } public static ScrollResult validateScrollResult( - final ScrollResult scrollResult, @Nonnull final EntityService entityService) { + final ScrollResult scrollResult, @Nonnull final EntityService entityService) { if (scrollResult == null) { return null; } @@ -62,16 +71,16 @@ public static ScrollResult validateScrollResult( } SearchEntityArray validatedEntities = - scrollResult.getEntities().stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + validatedUrns(scrollResult.getEntities(), SearchEntity::getEntity, entityService, true) .collect(Collectors.toCollection(SearchEntityArray::new)); + validatedScrollResult.setEntities(validatedEntities); return validatedScrollResult; } public static BrowseResult validateBrowseResult( - final BrowseResult browseResult, @Nonnull final EntityService entityService) { + final BrowseResult browseResult, @Nonnull final EntityService entityService) { if (browseResult == null) { return null; } @@ -88,16 +97,16 @@ public static BrowseResult validateBrowseResult( .setNumElements(browseResult.getNumElements()); BrowseResultEntityArray validatedEntities = - browseResult.getEntities().stream() - .filter(browseResultEntity -> entityService.exists(browseResultEntity.getUrn())) + validatedUrns(browseResult.getEntities(), BrowseResultEntity::getUrn, entityService, true) .collect(Collectors.toCollection(BrowseResultEntityArray::new)); + validatedBrowseResult.setEntities(validatedEntities); return validatedBrowseResult; } public static ListResult validateListResult( - final ListResult listResult, @Nonnull final EntityService entityService) { + final ListResult listResult, @Nonnull final EntityService entityService) { if (listResult == null) { return null; } @@ -110,16 +119,17 @@ public static ListResult validateListResult( .setTotal(listResult.getTotal()); UrnArray validatedEntities = - listResult.getEntities().stream() - .filter(entityService::exists) + validatedUrns(listResult.getEntities(), Function.identity(), entityService, true) .collect(Collectors.toCollection(UrnArray::new)); + validatedListResult.setEntities(validatedEntities); return validatedListResult; } public static LineageSearchResult validateLineageSearchResult( - final LineageSearchResult lineageSearchResult, @Nonnull final EntityService entityService) { + final LineageSearchResult lineageSearchResult, + @Nonnull final EntityService entityService) { if (lineageSearchResult == null) { return null; } @@ -133,9 +143,13 @@ public static LineageSearchResult validateLineageSearchResult( .setNumEntities(lineageSearchResult.getNumEntities()); LineageSearchEntityArray validatedEntities = - lineageSearchResult.getEntities().stream() - .filter(entity -> entityService.exists(entity.getEntity())) + validatedUrns( + lineageSearchResult.getEntities(), + LineageSearchEntity::getEntity, + entityService, + true) .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + validatedLineageSearchResult.setEntities(validatedEntities); return validatedLineageSearchResult; @@ -143,7 +157,7 @@ public static LineageSearchResult validateLineageSearchResult( public static EntityLineageResult validateEntityLineageResult( @Nullable final EntityLineageResult entityLineageResult, - @Nonnull final EntityService entityService) { + @Nonnull final EntityService entityService) { if (entityLineageResult == null) { return null; } @@ -155,10 +169,12 @@ public static EntityLineageResult validateEntityLineageResult( .setCount(entityLineageResult.getCount()) .setTotal(entityLineageResult.getTotal()); - final LineageRelationshipArray validatedRelationships = - entityLineageResult.getRelationships().stream() - .filter(relationship -> entityService.exists(relationship.getEntity())) - .filter(relationship -> !entityService.isSoftDeleted(relationship.getEntity())) + LineageRelationshipArray validatedRelationships = + validatedUrns( + entityLineageResult.getRelationships(), + LineageRelationship::getEntity, + entityService, + false) .collect(Collectors.toCollection(LineageRelationshipArray::new)); validatedEntityLineageResult.setFiltered( @@ -173,7 +189,8 @@ public static EntityLineageResult validateEntityLineageResult( } public static LineageScrollResult validateLineageScrollResult( - final LineageScrollResult lineageScrollResult, @Nonnull final EntityService entityService) { + final LineageScrollResult lineageScrollResult, + @Nonnull final EntityService entityService) { if (lineageScrollResult == null) { return null; } @@ -189,13 +206,29 @@ public static LineageScrollResult validateLineageScrollResult( } LineageSearchEntityArray validatedEntities = - lineageScrollResult.getEntities().stream() - .filter(entity -> entityService.exists(entity.getEntity())) + validatedUrns( + lineageScrollResult.getEntities(), + LineageSearchEntity::getEntity, + entityService, + true) .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + validatedLineageScrollResult.setEntities(validatedEntities); return validatedLineageScrollResult; } + private static Stream validatedUrns( + final AbstractArrayTemplate array, + Function urnFunction, + @Nonnull final EntityService entityService, + boolean includeSoftDeleted) { + + Set existingUrns = + entityService.exists( + array.stream().map(urnFunction).collect(Collectors.toList()), includeSoftDeleted); + return array.stream().filter(item -> existingUrns.contains(urnFunction.apply(item))); + } + private ValidationUtils() {} } From 14dee5723de8aac6ae8e566988f4bbcf3fac98ea Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 19 Jan 2024 16:18:15 -0800 Subject: [PATCH 399/792] feat(ingest/airflow): support airflow 2.8 dataset listeners (#9664) --- .github/workflows/airflow-plugin.yml | 9 +++++--- .../datahub_airflow_plugin/_airflow_shims.py | 3 +++ .../datahub_listener.py | 22 +++++++++++++++++++ .../airflow-plugin/tox.ini | 8 +++++-- 4 files changed, 37 insertions(+), 5 deletions(-) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index 158d3416bc2a9..7ae7b87b0f5ce 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -40,13 +40,16 @@ jobs: extra_pip_requirements: "apache-airflow~=2.2.4" extra_pip_extras: plugin-v1 - python-version: "3.10" - extra_pip_requirements: 'apache-airflow~=2.4.0 pluggy==1.0.0 "pendulum<3.0"' + extra_pip_requirements: 'apache-airflow~=2.4.0 pluggy==1.0.0 "pendulum<3.0" "Flask-Session<0.6.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: 'apache-airflow~=2.6.0 "pendulum<3.0"' + extra_pip_requirements: 'apache-airflow~=2.6.0 "pendulum<3.0" "Flask-Session<0.6.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow>=2.7.0 pydantic==2.4.2" + extra_pip_requirements: 'apache-airflow~=2.7.0 pydantic==2.4.2 "Flask-Session<0.6.0"' + extra_pip_extras: plugin-v2 + - python-version: "3.10" + extra_pip_requirements: 'apache-airflow>=2.8.0 pydantic>=2.4.2 "Flask-Session<0.6.0"' extra_pip_extras: plugin-v2 fail-fast: false steps: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py index d384958cf3ddb..c1e2dd4cc422d 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py @@ -32,6 +32,9 @@ HAS_AIRFLOW_STANDALONE_CMD = AIRFLOW_VERSION >= packaging.version.parse("2.2.0.dev0") HAS_AIRFLOW_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.3.0.dev0") HAS_AIRFLOW_DAG_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.5.0.dev0") +HAS_AIRFLOW_DATASET_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse( + "2.8.0.dev0" +) NEEDS_AIRFLOW_LISTENER_MODULE = AIRFLOW_VERSION < packaging.version.parse( "2.5.0.dev0" ) or PLUGGY_VERSION <= packaging.version.parse("1.0.0") diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index debc91700d3db..a7f588a166dde 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -24,6 +24,7 @@ from datahub_airflow_plugin._airflow_shims import ( HAS_AIRFLOW_DAG_LISTENER_API, + HAS_AIRFLOW_DATASET_LISTENER_API, Operator, get_task_inlets, get_task_outlets, @@ -40,6 +41,7 @@ _F = TypeVar("_F", bound=Callable[..., None]) if TYPE_CHECKING: + from airflow.datasets import Dataset from airflow.models import DAG, DagRun, TaskInstance from sqlalchemy.orm import Session @@ -502,3 +504,23 @@ def on_dag_run_running(self, dag_run: "DagRun", msg: str) -> None: self.emitter.flush() # TODO: Add hooks for on_dag_run_success, on_dag_run_failed -> call AirflowGenerator.complete_dataflow + + if HAS_AIRFLOW_DATASET_LISTENER_API: + + @hookimpl + @run_in_thread + def on_dataset_created(self, dataset: "Dataset") -> None: + self._set_log_level() + + logger.debug( + f"DataHub listener got notification about dataset create for {dataset}" + ) + + @hookimpl + @run_in_thread + def on_dataset_changed(self, dataset: "Dataset") -> None: + self._set_log_level() + + logger.debug( + f"DataHub listener got notification about dataset change for {dataset}" + ) diff --git a/metadata-ingestion-modules/airflow-plugin/tox.ini b/metadata-ingestion-modules/airflow-plugin/tox.ini index 27ae2ce65ba65..154ced6b8deb9 100644 --- a/metadata-ingestion-modules/airflow-plugin/tox.ini +++ b/metadata-ingestion-modules/airflow-plugin/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py38-airflow21, py38-airflow22, py310-airflow24, py310-airflow26, py310-airflow27 +envlist = py38-airflow21, py38-airflow22, py310-airflow24, py310-airflow26, py310-airflow27, py310-airflow28 [testenv] use_develop = true @@ -30,10 +30,14 @@ deps = # constraints file is overly restrictive. airflow27: apache-airflow~=2.7.0 airflow27: pydantic==2.4.2 + airflow28: apache-airflow~=2.8.0 + # Apparently Flask-Session 0.6.0 was released by accident. + # See https://github.com/pallets-eco/flask-session/issues/209 + airflow24,airflow26,airflow27,airflow28: Flask-Session<0.6.0 commands = pytest --cov-append {posargs} # For Airflow 2.4+, add the plugin-v2 extra. -[testenv:py310-airflow{24,26,27}] +[testenv:py310-airflow{24,26,27,28}] extras = dev,integration-tests,plugin-v2 From 0906ce832d2bdc3f9b63e3f52cff56772fb8ecf9 Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Sat, 20 Jan 2024 00:45:22 +0000 Subject: [PATCH 400/792] fix(docs): Correct Kafka Connect sink documentation (#9672) --- metadata-ingestion/docs/sources/kafka-connect/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/docs/sources/kafka-connect/README.md b/metadata-ingestion/docs/sources/kafka-connect/README.md index e4f64c62914c5..5535f89259082 100644 --- a/metadata-ingestion/docs/sources/kafka-connect/README.md +++ b/metadata-ingestion/docs/sources/kafka-connect/README.md @@ -21,4 +21,4 @@ This ingestion source maps the following Source System Concepts to DataHub Conce Works only for - Source connectors: JDBC, Debezium, Mongo and Generic connectors with user-defined lineage graph -- Sink connectors: BigQuery, Confluent S3, Snowflake +- Sink connectors: BigQuery, Confluent, S3, Snowflake From 9168c4550a1553c1efd205662181df6ae3ca3e86 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Sat, 20 Jan 2024 06:36:36 +0530 Subject: [PATCH 401/792] docs(observability): add databricks as supported source (#9619) --- docs/managed-datahub/observe/column-assertions.md | 1 + docs/managed-datahub/observe/custom-sql-assertions.md | 1 + docs/managed-datahub/observe/freshness-assertions.md | 1 + docs/managed-datahub/observe/volume-assertions.md | 1 + 4 files changed, 4 insertions(+) diff --git a/docs/managed-datahub/observe/column-assertions.md b/docs/managed-datahub/observe/column-assertions.md index 99a764f771676..8ef32e73b4b72 100644 --- a/docs/managed-datahub/observe/column-assertions.md +++ b/docs/managed-datahub/observe/column-assertions.md @@ -35,6 +35,7 @@ Column Assertions are currently supported for: 1. Snowflake 2. Redshift 3. BigQuery +4. Databricks Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** tab. diff --git a/docs/managed-datahub/observe/custom-sql-assertions.md b/docs/managed-datahub/observe/custom-sql-assertions.md index d4a09b434ca79..11e9aa807b616 100644 --- a/docs/managed-datahub/observe/custom-sql-assertions.md +++ b/docs/managed-datahub/observe/custom-sql-assertions.md @@ -43,6 +43,7 @@ Custom SQL Assertions are currently supported for: 1. Snowflake 2. Redshift 3. BigQuery +4. Databricks Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** tab. diff --git a/docs/managed-datahub/observe/freshness-assertions.md b/docs/managed-datahub/observe/freshness-assertions.md index 82de423f6f2de..416db6a65343e 100644 --- a/docs/managed-datahub/observe/freshness-assertions.md +++ b/docs/managed-datahub/observe/freshness-assertions.md @@ -43,6 +43,7 @@ Freshness Assertions are currently supported for: 1. Snowflake 2. Redshift 3. BigQuery +4. Databricks Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** tab. diff --git a/docs/managed-datahub/observe/volume-assertions.md b/docs/managed-datahub/observe/volume-assertions.md index 5f5aff33a5ce2..67971d0c20037 100644 --- a/docs/managed-datahub/observe/volume-assertions.md +++ b/docs/managed-datahub/observe/volume-assertions.md @@ -44,6 +44,7 @@ Volume Assertions are currently supported for: 1. Snowflake 2. Redshift 3. BigQuery +4. Databricks Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** tab. From 087d3fdb0dba1fccd802161e44a74a22edc211ac Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 19 Jan 2024 23:27:44 -0600 Subject: [PATCH 402/792] feat(build): conditional ci (#9673) --- .github/actions/ci-optimization/action.yml | 79 ++++++++++++ .github/workflows/build-and-test.yml | 34 +++++- .github/workflows/docker-unified.yml | 132 +++++++++++++++++---- 3 files changed, 218 insertions(+), 27 deletions(-) create mode 100644 .github/actions/ci-optimization/action.yml diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml new file mode 100644 index 0000000000000..404e0bab814e8 --- /dev/null +++ b/.github/actions/ci-optimization/action.yml @@ -0,0 +1,79 @@ +name: 'Identify CI Optimizations' +description: 'Determine if code changes are specific to certain modules.' + +outputs: + frontend-only: + description: "Frontend only change" + value: ${{ steps.filter.outputs.frontend == 'true' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'false' }} + ingestion-only: + description: "Ingestion only change" + value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'true' && steps.filter.outputs.backend == 'false' }} + backend-only: + description: "Backend only change" + value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'true' }} + backend-change: + description: "Backend code has changed" + value: ${{ steps.filter.outputs.backend == 'true' }} + ingestion-change: + description: "Ingestion code has changed" + value: ${{ steps.filter.outputs.ingestion == 'true' }} + frontend-change: + description: "Frontend code has changed" + value: ${{ steps.filter.outputs.frontend == 'true' }} + docker-change: + description: "Docker code has changed" + value: ${{ steps.filter.outputs.docker == 'true' }} + kafka-setup-change: + description: "Kafka setup docker change" + value: ${{ steps.filter.outputs.kafka-setup == 'true' }} + mysql-setup-change: + description: "Mysql setup docker change" + value: ${{ steps.filter.outputs.mysql-setup == 'true' }} + postgres-setup-change: + description: "Postgres setup docker change" + value: ${{ steps.filter.outputs.postgres-setup == 'true' }} + elasticsearch-setup-change: + description: "Elasticsearch setup docker change" + value: ${{ steps.filter.outputs.elasticsearch-setup == 'true' }} +runs: + using: "composite" + steps: + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: | + frontend: + - "datahub-frontend/**" + - "datahub-web-react/**" + - "smoke-test/tests/cypress/**" + - "docker/datahub-frontend/**" + ingestion: + - "metadata-ingestion-modules/airflow-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + - "smoke-test/**" + - "docker/datahub-ingestion-**" + docker: + - "docker/**" + backend: + - "metadata-models/**" + - "datahub-upgrade/**" + - "entity-registry/**" + - "li-utils/**" + - "metadata-auth/**" + - "metadata-dao-impl/**" + - "metadata-events/**" + - "metadata-io/**" + - "metadata-jobs/**" + - "metadata-service/**" + - "metadata-utils/**" + - "smoke-test/**" + - "docker/**" + kafka-setup: + - "docker/kafka-setup/**" + mysql-setup: + - "docker/mysql-setup/**" + postgres-setup: + - "docker/postgres-setup/**" + elasticsearch-setup: + - "docker/elasticsearch-setup/**" \ No newline at end of file diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 64493e99211b4..180e0472a8d99 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -20,6 +20,25 @@ concurrency: cancel-in-progress: true jobs: + setup: + runs-on: ubuntu-latest + outputs: + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + docker_change: ${{ steps.ci-optimize.outputs.docker-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + steps: + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 + - uses: ./.github/actions/ci-optimization + id: ci-optimize + build: strategy: fail-fast: false @@ -36,11 +55,13 @@ jobs: timezone: "America/New_York" runs-on: ubuntu-latest timeout-minutes: 60 + needs: setup steps: - uses: szenius/set-timezone@v1.2 with: timezoneLinux: ${{ matrix.timezone }} - - uses: hsheth2/sane-checkout-action@v1 + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 - name: Set up JDK 17 uses: actions/setup-java@v3 with: @@ -51,12 +72,12 @@ jobs: with: python-version: "3.10" cache: pip - - name: Gradle build (and test) for metadata ingestion - if: ${{ matrix.command == 'except_metadata_ingestion' }} + - name: Gradle build (and test) for NOT metadata ingestion + if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} run: | ./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :metadata-ingestion-modules:airflow-plugin:check -x :datahub-frontend:build -x :datahub-web-react:build --parallel - name: Gradle build (and test) for frontend - if: ${{ matrix.command == 'frontend' }} + if: ${{ matrix.command == 'frontend' && needs.setup.outputs.frontend_change == 'true' }} run: | ./gradlew :datahub-frontend:build :datahub-web-react:build --parallel env: @@ -75,8 +96,11 @@ jobs: quickstart-compose-validation: runs-on: ubuntu-latest + needs: setup + if: ${{ needs.setup.outputs.docker_change == 'true' }} steps: - - uses: actions/checkout@v3 + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index d246bf329bcb0..5e9112726b010 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -51,9 +51,19 @@ jobs: short_sha: ${{ steps.tag.outputs.short_sha }} branch_name: ${{ steps.tag.outputs.branch_name }} repository_name: ${{ steps.tag.outputs.repository_name }} + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + backend_only: ${{ steps.ci-optimize.outputs.backend-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} steps: - - name: Checkout - uses: actions/checkout@v3 + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 - name: Compute Tag id: tag run: | @@ -75,11 +85,14 @@ jobs: run: | echo "Enable publish: ${{ env.ENABLE_PUBLISH }}" echo "publish=${{ env.ENABLE_PUBLISH }}" >> $GITHUB_OUTPUT + - uses: ./.github/actions/ci-optimization + id: ci-optimize gms_build: name: Build and Push DataHub GMS Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -113,6 +126,7 @@ jobs: name: "[Monitoring] Scan GMS images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, gms_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy uses: actions/checkout@v3 @@ -142,6 +156,7 @@ jobs: name: Build and Push DataHub MAE Consumer Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -171,6 +186,7 @@ jobs: name: "[Monitoring] Scan MAE consumer images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, mae_consumer_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -204,6 +220,7 @@ jobs: name: Build and Push DataHub MCE Consumer Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -233,6 +250,7 @@ jobs: name: "[Monitoring] Scan MCE consumer images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, mce_consumer_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -266,6 +284,7 @@ jobs: name: Build and Push DataHub Upgrade Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -295,6 +314,7 @@ jobs: name: "[Monitoring] Scan DataHub Upgrade images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_upgrade_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -328,6 +348,7 @@ jobs: name: Build and Push DataHub Frontend Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -359,6 +380,7 @@ jobs: name: "[Monitoring] Scan Frontend images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, frontend_build] + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -392,6 +414,7 @@ jobs: name: Build and Push DataHub Kafka Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.kafka_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -412,6 +435,7 @@ jobs: name: Build and Push DataHub MySQL Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.mysql_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -432,6 +456,7 @@ jobs: name: Build and Push DataHub Elasticsearch Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -454,6 +479,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: setup + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -486,6 +512,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -526,6 +553,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -568,6 +596,7 @@ jobs: tag: ${{ steps.tag.outputs.tag }} needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_slim_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -623,6 +652,7 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion Slim images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_slim_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy uses: actions/checkout@v3 @@ -655,6 +685,7 @@ jobs: tag: ${{ steps.tag.outputs.tag }} needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_full_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -709,6 +740,7 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_full_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy uses: actions/checkout@v3 @@ -734,22 +766,31 @@ jobs: with: sarif_file: "trivy-results.sarif" + smoke_test_matrix: + runs-on: ubuntu-latest + needs: setup + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - id: set-matrix + run: | + if [ '${{ needs.setup.outputs.frontend_only }}' == 'true' ]; then + echo 'matrix=["cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT + elif [ '${{ needs.setup.outputs.ingestion_only }}' == 'true' ]; then + echo 'matrix=["no_cypress_suite0","no_cypress_suite1"]' >> $GITHUB_OUTPUT + elif [ '${{ needs.setup.outputs.backend_change }}' == 'true' ]; then + echo 'matrix=["no_cypress_suite0","no_cypress_suite1","cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT + else + echo 'matrix=[]' >> $GITHUB_OUTPUT + fi + smoke_test: name: Run Smoke Tests runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - test_strategy: - [ - "no_cypress_suite0", - "no_cypress_suite1", - "cypress_suite1", - "cypress_rest", - ] needs: [ setup, + smoke_test_matrix, gms_build, frontend_build, kafka_setup_build, @@ -760,6 +801,11 @@ jobs: datahub_upgrade_build, datahub_ingestion_slim_build, ] + strategy: + fail-fast: false + matrix: + test_strategy: ${{ fromJson(needs.smoke_test_matrix.outputs.matrix) }} + if: ${{ always() && !failure() && !cancelled() && needs.smoke_test_matrix.outputs.matrix != '[]' }} steps: - name: Disk Check run: df -h . && docker images @@ -788,57 +834,99 @@ jobs: run: df -h . && docker images - name: Download GMS image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.gms_build.result == 'success' }} with: image: ${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Frontend image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.frontend_build.result == 'success' }} with: image: ${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Kafka Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.kafka_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Mysql Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mysql_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Elastic Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.elasticsearch_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MCE Consumer image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mce_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MAE Consumer image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mae_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download upgrade image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.datahub_upgrade_build.result == 'success' }} with: image: ${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download datahub-ingestion-slim image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' }} + if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' && needs.datahub_ingestion_slim_build.result == 'success' }} with: image: ${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }} - name: Disk Check run: df -h . && docker images + - name: CI Optimization Head Images + # When publishing all tests/images are built (no optimizations) + if: ${{ needs.setup.outputs.publish != 'true' }} + run: | + if [ '${{ needs.setup.outputs.backend_change }}' == 'false' ]; then + echo 'GMS/Upgrade/MCE/MAE head images' + docker pull '${{ env.DATAHUB_GMS_IMAGE }}:head' + docker pull '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' + docker pull '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' + docker pull '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' + docker tag '${{ env.DATAHUB_GMS_IMAGE }}:head' '${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' '${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.frontend_change }}' == 'false' ]; then + echo 'Frontend head images' + docker pull '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' + docker tag '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' '${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.kafka_setup_change }}' == 'false' ]; then + echo 'kafka-setup head images' + docker pull '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.mysql_setup_change }}' == 'false' ]; then + echo 'mysql-setup head images' + docker pull '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.elasticsearch_setup_change }}' == 'false' ]; then + echo 'elasticsearch-setup head images' + docker pull '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.ingestion_change }}' == 'false' ]; then + echo 'datahub-ingestion head-slim images' + docker pull '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' + if [ '${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }}' != 'head-slim' ]; then + docker tag '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' '${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }}' + fi + fi - name: run quickstart env: DATAHUB_TELEMETRY_ENABLED: false DATAHUB_VERSION: ${{ needs.setup.outputs.unique_tag }} DATAHUB_ACTIONS_IMAGE: ${{ env.DATAHUB_INGESTION_IMAGE }} - ACTIONS_VERSION: ${{ needs.datahub_ingestion_slim_build.outputs.tag }} + ACTIONS_VERSION: ${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }} ACTIONS_EXTRA_PACKAGES: "acryl-datahub-actions[executor]==0.0.13 acryl-datahub-actions==0.0.13 acryl-datahub==0.10.5" ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml" run: | From d78db0abee07e33b44342ce1920889324303b137 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 14:53:50 +0530 Subject: [PATCH 403/792] build(deps-dev): bump vite from 4.5.0 to 4.5.2 in /datahub-web-react (#9676) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- datahub-web-react/package.json | 2 +- datahub-web-react/yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index cf63d5c313bdb..97830cec4e164 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -126,7 +126,7 @@ "less": "^4.2.0", "prettier": "^2.8.8", "source-map-explorer": "^2.5.2", - "vite": "^4.5.0", + "vite": "^4.5.2", "vite-plugin-babel-macros": "^1.0.6", "vite-plugin-static-copy": "^0.17.0", "vite-plugin-svgr": "^4.1.0", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 9ea6c58eadc6b..37801e42b3eab 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -10835,10 +10835,10 @@ vite-plugin-svgr@^4.1.0: "@svgr/core" "^8.1.0" "@svgr/plugin-jsx" "^8.1.0" -"vite@^3.0.0 || ^4.0.0 || ^5.0.0-0", "vite@^3.1.0 || ^4.0.0 || ^5.0.0-0", vite@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.0.tgz#ec406295b4167ac3bc23e26f9c8ff559287cff26" - integrity sha512-ulr8rNLA6rkyFAlVWw2q5YJ91v098AFQ2R0PRFwPzREXOUJQPtFUG0t+/ZikhaOCDqFoDhN6/v8Sq0o4araFAw== +"vite@^3.0.0 || ^4.0.0 || ^5.0.0-0", "vite@^3.1.0 || ^4.0.0 || ^5.0.0-0", vite@^4.5.2: + version "4.5.2" + resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.2.tgz#d6ea8610e099851dad8c7371599969e0f8b97e82" + integrity sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w== dependencies: esbuild "^0.18.10" postcss "^8.4.27" From 20ad83d0b1be4fbb54e10aa183afba64674deecf Mon Sep 17 00:00:00 2001 From: Quentin FLEURENT NAMBOT Date: Sat, 20 Jan 2024 10:24:10 +0100 Subject: [PATCH 404/792] feat(superset): add some custom properties for dashboards (#9670) --- .../src/datahub/ingestion/source/superset.py | 24 ++++++++++++- .../superset/golden_test_ingest.json | 16 +++++++-- .../superset/golden_test_stateful_ingest.json | 9 ++++- .../integration/superset/test_superset.py | 35 +++++++++++++++++-- 4 files changed, 78 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index 7f607666db313..931069a921058 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -267,13 +267,35 @@ def construct_dashboard_from_api_data(self, dashboard_data): f"urn:li:chart:({self.platform},{value.get('meta', {}).get('chartId', 'unknown')})" ) + # Build properties + custom_properties = { + "Status": str(dashboard_data.get("status")), + "IsPublished": str(dashboard_data.get("published", False)).lower(), + "Owners": ", ".join( + map( + lambda owner: owner.get("username", "unknown"), + dashboard_data.get("owners", []), + ) + ), + "IsCertified": str( + True if dashboard_data.get("certified_by") else False + ).lower(), + } + + if dashboard_data.get("certified_by"): + custom_properties["CertifiedBy"] = dashboard_data.get("certified_by") + custom_properties["CertificationDetails"] = str( + dashboard_data.get("certification_details") + ) + + # Create DashboardInfo object dashboard_info = DashboardInfoClass( description="", title=title, charts=chart_urns, lastModified=last_modified, dashboardUrl=dashboard_url, - customProperties={}, + customProperties=custom_properties, ) dashboard_snapshot.aspects.append(dashboard_info) return dashboard_snapshot diff --git a/metadata-ingestion/tests/integration/superset/golden_test_ingest.json b/metadata-ingestion/tests/integration/superset/golden_test_ingest.json index 6a522281f1c9d..74312940f06e7 100644 --- a/metadata-ingestion/tests/integration/superset/golden_test_ingest.json +++ b/metadata-ingestion/tests/integration/superset/golden_test_ingest.json @@ -11,7 +11,14 @@ }, { "com.linkedin.pegasus2avro.dashboard.DashboardInfo": { - "customProperties": {}, + "customProperties": { + "Status": "published", + "IsPublished": "true", + "Owners": "test_username_1, test_username_2", + "IsCertified": "true", + "CertifiedBy": "Certification team", + "CertificationDetails": "Approved" + }, "title": "test_dashboard_title_1", "description": "", "charts": [ @@ -52,7 +59,12 @@ }, { "com.linkedin.pegasus2avro.dashboard.DashboardInfo": { - "customProperties": {}, + "customProperties": { + "Status": "draft", + "IsPublished": "false", + "Owners": "unknown", + "IsCertified": "false" + }, "title": "test_dashboard_title_2", "description": "", "charts": [ diff --git a/metadata-ingestion/tests/integration/superset/golden_test_stateful_ingest.json b/metadata-ingestion/tests/integration/superset/golden_test_stateful_ingest.json index 268fa37396245..cf38341085c1b 100644 --- a/metadata-ingestion/tests/integration/superset/golden_test_stateful_ingest.json +++ b/metadata-ingestion/tests/integration/superset/golden_test_stateful_ingest.json @@ -11,7 +11,14 @@ }, { "com.linkedin.pegasus2avro.dashboard.DashboardInfo": { - "customProperties": {}, + "customProperties": { + "Status": "published", + "IsPublished": "true", + "Owners": "test_username_1, test_username_2", + "IsCertified": "true", + "CertifiedBy": "Certification team", + "CertificationDetails": "Approved" + }, "title": "test_dashboard_title_1", "description": "", "charts": [ diff --git a/metadata-ingestion/tests/integration/superset/test_superset.py b/metadata-ingestion/tests/integration/superset/test_superset.py index bc299e36515e1..b3b5982016146 100644 --- a/metadata-ingestion/tests/integration/superset/test_superset.py +++ b/metadata-ingestion/tests/integration/superset/test_superset.py @@ -41,6 +41,18 @@ def register_mock_api(request_mock: Any, override_data: dict = {}) -> None: "dashboard_title": "test_dashboard_title_1", "url": "/dashboard/test_dashboard_url_1", "position_json": '{"CHART-test-1": {"meta": { "chartId": "10" }}, "CHART-test-2": {"meta": { "chartId": "11" }}}', + "status": "published", + "published": True, + "owners": [ + { + "username": "test_username_1", + }, + { + "username": "test_username_2", + }, + ], + "certified_by": "Certification team", + "certification_details": "Approved", }, { "id": "2", @@ -51,6 +63,15 @@ def register_mock_api(request_mock: Any, override_data: dict = {}) -> None: "dashboard_title": "test_dashboard_title_2", "url": "/dashboard/test_dashboard_url_2", "position_json": '{"CHART-test-3": {"meta": { "chartId": "12" }}, "CHART-test-4": {"meta": { "chartId": "13" }}}', + "status": "draft", + "published": False, + "owners": [ + { + "first_name": "name", + }, + ], + "certified_by": "", + "certification_details": "", }, ], }, @@ -151,7 +172,6 @@ def register_mock_api(request_mock: Any, override_data: dict = {}) -> None: @freeze_time(FROZEN_TIME) @pytest.mark.integration def test_superset_ingest(pytestconfig, tmp_path, mock_time, requests_mock): - test_resources_dir = pytestconfig.rootpath / "tests/integration/superset" register_mock_api(request_mock=requests_mock) @@ -193,7 +213,6 @@ def test_superset_ingest(pytestconfig, tmp_path, mock_time, requests_mock): def test_superset_stateful_ingest( pytestconfig, tmp_path, mock_time, requests_mock, mock_datahub_graph ): - test_resources_dir = pytestconfig.rootpath / "tests/integration/superset" register_mock_api(request_mock=requests_mock) @@ -241,6 +260,18 @@ def test_superset_stateful_ingest( "dashboard_title": "test_dashboard_title_1", "url": "/dashboard/test_dashboard_url_1", "position_json": '{"CHART-test-1": {"meta": { "chartId": "10" }}, "CHART-test-2": {"meta": { "chartId": "11" }}}', + "status": "published", + "published": True, + "owners": [ + { + "username": "test_username_1", + }, + { + "username": "test_username_2", + }, + ], + "certified_by": "Certification team", + "certification_details": "Approved", }, ], }, From 61165994587b1bd9f430d693aa9a73948903a68f Mon Sep 17 00:00:00 2001 From: Quentin FLEURENT NAMBOT Date: Sat, 20 Jan 2024 10:24:23 +0100 Subject: [PATCH 405/792] fix(superset): handle comma in dataset table name (#9656) --- .../src/datahub/ingestion/source/superset.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index 931069a921058..827c630cfa148 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -9,7 +9,7 @@ from pydantic.fields import Field from datahub.configuration import ConfigModel -from datahub.emitter.mce_builder import DEFAULT_ENV +from datahub.emitter.mce_builder import DEFAULT_ENV, make_dataset_urn from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SourceCapability, @@ -223,15 +223,13 @@ def get_datasource_urn_from_id(self, datasource_id): database_name = self.config.database_alias.get(database_name, database_name) if database_id and table_name: - platform = self.get_platform_from_database_id(database_id) - platform_urn = f"urn:li:dataPlatform:{platform}" - dataset_urn = ( - f"urn:li:dataset:(" - f"{platform_urn},{database_name + '.' if database_name else ''}" - f"{schema_name + '.' if schema_name else ''}" - f"{table_name},{self.config.env})" + return make_dataset_urn( + platform=self.get_platform_from_database_id(database_id), + name=".".join( + name for name in [database_name, schema_name, table_name] if name + ), + env=self.config.env, ) - return dataset_urn return None def construct_dashboard_from_api_data(self, dashboard_data): From ce65b9cb3fe344da20dfd1b222a6d2a80f2fc2d8 Mon Sep 17 00:00:00 2001 From: Dimitri <36767102+dim-ops@users.noreply.github.com> Date: Sat, 20 Jan 2024 10:25:40 +0100 Subject: [PATCH 406/792] feat(openapi): allow swagger 3.x (#9646) Co-authored-by: Dimitri GRISARD --- .../src/datahub/ingestion/source/openapi_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py b/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py index 84bb3ad452611..c1caca18fefe3 100755 --- a/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py +++ b/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py @@ -111,8 +111,8 @@ def check_sw_version(sw_dict: dict) -> None: version = [int(v) for v in v_split] if version[0] == 3 and version[1] > 0: - raise NotImplementedError( - "This plugin is not compatible with Swagger version >3.0" + logger.warning( + "This plugin has not been fully tested with Swagger version >3.0" ) From 344eeaebc7ddff275cce5c62e2fd31f2bff6b8d6 Mon Sep 17 00:00:00 2001 From: Quentin FLEURENT NAMBOT Date: Sun, 21 Jan 2024 21:38:44 +0100 Subject: [PATCH 407/792] feat(ingest/superset): add domains and platform_instance support (#9647) --- .../src/datahub/ingestion/source/superset.py | 75 ++++++++++++++++--- 1 file changed, 65 insertions(+), 10 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index 827c630cfa148..18f8e3709a648 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -8,8 +8,18 @@ from pydantic.class_validators import root_validator, validator from pydantic.fields import Field -from datahub.configuration import ConfigModel -from datahub.emitter.mce_builder import DEFAULT_ENV, make_dataset_urn +from datahub.configuration.common import AllowDenyPattern +from datahub.configuration.source_common import ( + EnvConfigMixin, + PlatformInstanceConfigMixin, +) +from datahub.emitter.mce_builder import ( + make_chart_urn, + make_dashboard_urn, + make_dataset_urn, + make_domain_urn, +) +from datahub.emitter.mcp_builder import add_domain_to_entity_wu from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SourceCapability, @@ -49,6 +59,7 @@ DashboardInfoClass, ) from datahub.utilities import config_clean +from datahub.utilities.registries.domain_registry import DomainRegistry logger = logging.getLogger(__name__) @@ -72,7 +83,9 @@ } -class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): +class SupersetConfig( + StatefulIngestionConfigBase, EnvConfigMixin, PlatformInstanceConfigMixin +): # See the Superset /security/login endpoint for details # https://superset.apache.org/docs/rest-api connect_uri: str = Field( @@ -82,6 +95,10 @@ class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): default=None, description="optional URL to use in links (if `connect_uri` is only for ingestion)", ) + domain: Dict[str, AllowDenyPattern] = Field( + default=dict(), + description="regex patterns for tables to filter to assign domain_key. ", + ) username: Optional[str] = Field(default=None, description="Superset username.") password: Optional[str] = Field(default=None, description="Superset password.") @@ -92,10 +109,7 @@ class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): provider: str = Field(default="db", description="Superset provider.") options: Dict = Field(default={}, description="") - env: str = Field( - default=DEFAULT_ENV, - description="Environment to use in namespace when constructing URNs", - ) + # TODO: Check and remove this if no longer needed. # Config database_alias is removed from sql sources. database_alias: Dict[str, str] = Field( @@ -188,6 +202,12 @@ def __init__(self, ctx: PipelineContext, config: SupersetConfig): } ) + if self.config.domain: + self.domain_registry = DomainRegistry( + cached_domains=[domain_id for domain_id in self.config.domain], + graph=self.ctx.graph, + ) + # Test the connection test_response = self.session.get(f"{self.config.connect_uri}/api/v1/dashboard/") if test_response.status_code == 200: @@ -233,7 +253,11 @@ def get_datasource_urn_from_id(self, datasource_id): return None def construct_dashboard_from_api_data(self, dashboard_data): - dashboard_urn = f"urn:li:dashboard:({self.platform},{dashboard_data['id']})" + dashboard_urn = make_dashboard_urn( + platform=self.platform, + name=dashboard_data["id"], + platform_instance=self.config.platform_instance, + ) dashboard_snapshot = DashboardSnapshot( urn=dashboard_urn, aspects=[Status(removed=False)], @@ -262,7 +286,11 @@ def construct_dashboard_from_api_data(self, dashboard_data): if not key.startswith("CHART-"): continue chart_urns.append( - f"urn:li:chart:({self.platform},{value.get('meta', {}).get('chartId', 'unknown')})" + make_chart_urn( + platform=self.platform, + name=value.get("meta", {}).get("chartId", "unknown"), + platform_instance=self.config.platform_instance, + ) ) # Build properties @@ -325,9 +353,17 @@ def emit_dashboard_mces(self) -> Iterable[MetadataWorkUnit]: ) mce = MetadataChangeEvent(proposedSnapshot=dashboard_snapshot) yield MetadataWorkUnit(id=dashboard_snapshot.urn, mce=mce) + yield from self._get_domain_wu( + title=dashboard_data.get("dashboard_title", ""), + entity_urn=dashboard_snapshot.urn, + ) def construct_chart_from_chart_data(self, chart_data): - chart_urn = f"urn:li:chart:({self.platform},{chart_data['id']})" + chart_urn = make_chart_urn( + platform=self.platform, + name=chart_data["id"], + platform_instance=self.config.platform_instance, + ) chart_snapshot = ChartSnapshot( urn=chart_urn, aspects=[Status(removed=False)], @@ -424,6 +460,10 @@ def emit_chart_mces(self) -> Iterable[MetadataWorkUnit]: mce = MetadataChangeEvent(proposedSnapshot=chart_snapshot) yield MetadataWorkUnit(id=chart_snapshot.urn, mce=mce) + yield from self._get_domain_wu( + title=chart_data.get("slice_name", ""), + entity_urn=chart_snapshot.urn, + ) def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: yield from self.emit_dashboard_mces() @@ -439,3 +479,18 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: def get_report(self) -> StaleEntityRemovalSourceReport: return self.report + + def _get_domain_wu(self, title: str, entity_urn: str) -> Iterable[MetadataWorkUnit]: + domain_urn = None + for domain, pattern in self.config.domain.items(): + if pattern.allowed(title): + domain_urn = make_domain_urn( + self.domain_registry.get_domain_urn(domain) + ) + break + + if domain_urn: + yield from add_domain_to_entity_wu( + entity_urn=entity_urn, + domain_urn=domain_urn, + ) From ad65c36ddcb253dd3f8b22dc01465de134b006b1 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Sun, 21 Jan 2024 14:39:31 -0600 Subject: [PATCH 408/792] fix(workflow): workflow tweaks (#9678) --- .github/actions/ci-optimization/action.yml | 2 +- .github/workflows/build-and-test.yml | 1 + .github/workflows/metadata-io.yml | 20 ++++++++++++++++++++ 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml index 404e0bab814e8..f6160fdbcff67 100644 --- a/.github/actions/ci-optimization/action.yml +++ b/.github/actions/ci-optimization/action.yml @@ -48,7 +48,7 @@ runs: - "smoke-test/tests/cypress/**" - "docker/datahub-frontend/**" ingestion: - - "metadata-ingestion-modules/airflow-plugin/**" + - "metadata-ingestion-modules/**" - "metadata-ingestion/**" - "metadata-models/**" - "smoke-test/**" diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 180e0472a8d99..060d345a6b7d9 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -69,6 +69,7 @@ jobs: java-version: 17 - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 + if: ${{ needs.setup.outputs.ingestion_change == 'true' }} with: python-version: "3.10" cache: pip diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index eb5822b5b480d..243bd90cd6003 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -24,9 +24,28 @@ concurrency: cancel-in-progress: true jobs: + setup: + runs-on: ubuntu-latest + outputs: + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + docker_change: ${{ steps.ci-optimize.outputs.docker-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + steps: + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 + - uses: ./.github/actions/ci-optimization + id: ci-optimize build: runs-on: ubuntu-latest timeout-minutes: 60 + needs: setup steps: - uses: actions/checkout@v3 - name: Set up JDK 17 @@ -36,6 +55,7 @@ jobs: java-version: 17 - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 + if: ${{ needs.setup.outputs.ingestion_change == 'true' }} with: python-version: "3.10" cache: "pip" From 77df9ec9262047e0e314c5a7a80f0eca3854ef35 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Mon, 22 Jan 2024 18:07:43 +0530 Subject: [PATCH 409/792] feat(ingest/databricks): view upstream lineage for hive metastore (#9657) --- .../source/unity/hive_metastore_proxy.py | 1 - .../datahub/ingestion/source/unity/source.py | 101 +++++++++++++++++- .../unity/unity_catalog_mces_golden.json | 60 +++++++++++ 3 files changed, 159 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py index 814d86a2f3234..2a98dda1c79c5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py @@ -55,7 +55,6 @@ class HiveMetastoreProxy(Closeable): - # TODO: Support for view lineage using SQL parsing # Why not use hive ingestion source directly here ? # 1. hive ingestion source assumes 2-level namespace heirarchy and currently # there is no other intermediate interface except sqlalchemy inspector diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 1bc47c6307849..7a47b1181ae36 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -1,7 +1,7 @@ import logging import re from concurrent.futures import ThreadPoolExecutor -from typing import Dict, Iterable, List, Optional, Set, Union +from typing import Dict, Iterable, List, Optional, Set, Tuple, Union from urllib.parse import urljoin from datahub.emitter.mce_builder import ( @@ -24,6 +24,7 @@ add_dataset_to_container, gen_containers, ) +from datahub.emitter.sql_parsing_builder import SqlParsingBuilder from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SupportStatus, @@ -67,6 +68,7 @@ DATA_TYPE_REGISTRY, Catalog, Column, + CustomCatalogType, Metastore, Notebook, NotebookId, @@ -104,6 +106,12 @@ from datahub.utilities.file_backed_collections import FileBackedDict from datahub.utilities.hive_schema_to_avro import get_schema_fields_for_hive_column from datahub.utilities.registries.domain_registry import DomainRegistry +from datahub.utilities.sqlglot_lineage import ( + SchemaResolver, + SqlParsingResult, + sqlglot_lineage, + view_definition_lineage_helper, +) logger: logging.Logger = logging.getLogger(__name__) @@ -137,6 +145,7 @@ class UnityCatalogSource(StatefulIngestionSourceBase, TestableSource): unity_catalog_api_proxy: UnityCatalogApiProxy platform: str = "databricks" platform_instance_name: Optional[str] + sql_parser_schema_resolver: Optional[SchemaResolver] = None def get_report(self) -> UnityCatalogReport: return self.report @@ -179,6 +188,9 @@ def __init__(self, ctx: PipelineContext, config: UnityCatalogSourceConfig): self.table_refs: Set[TableReference] = set() self.view_refs: Set[TableReference] = set() self.notebooks: FileBackedDict[Notebook] = FileBackedDict() + self.view_definitions: FileBackedDict[ + Tuple[TableReference, str] + ] = FileBackedDict() # Global map of tables, for profiling self.tables: FileBackedDict[Table] = FileBackedDict() @@ -191,6 +203,13 @@ def init_hive_metastore_proxy(self): self.config.get_sql_alchemy_url(HIVE_METASTORE), self.config.options ) self.report.hive_metastore_catalog_found = True + + if self.config.include_table_lineage: + self.sql_parser_schema_resolver = SchemaResolver( + platform=self.platform, + platform_instance=self.config.platform_instance, + env=self.config.env, + ) except Exception as e: logger.debug("Exception", exc_info=True) self.warn( @@ -243,6 +262,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: yield from self.process_metastores() + yield from self.get_view_lineage() + if self.config.include_notebooks: self.report.report_ingestion_stage_start("Notebook lineage") for notebook in self.notebooks.values(): @@ -304,7 +325,6 @@ def process_notebooks(self) -> Iterable[MetadataWorkUnit]: yield from self._gen_notebook_workunits(notebook) def _gen_notebook_workunits(self, notebook: Notebook) -> Iterable[MetadataWorkUnit]: - properties = {"path": notebook.path} if notebook.language: properties["language"] = notebook.language.value @@ -449,6 +469,17 @@ def process_table(self, table: Table, schema: Schema) -> Iterable[MetadataWorkUn table.ref, self.notebooks[str(notebook_id)] ) + # Sql parsing is required only for hive metastore view lineage + if ( + self.sql_parser_schema_resolver + and table.schema.catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG + ): + self.sql_parser_schema_resolver.add_schema_metadata( + dataset_urn, schema_metadata + ) + if table.view_definition: + self.view_definitions[dataset_urn] = (table.ref, table.view_definition) + yield from [ mcp.as_workunit() for mcp in MetadataChangeProposalWrapper.construct_many( @@ -828,8 +859,74 @@ def _create_schema_field(column: Column) -> List[SchemaFieldClass]: ) ] + def _run_sql_parser( + self, view_ref: TableReference, query: str, schema_resolver: SchemaResolver + ) -> Optional[SqlParsingResult]: + raw_lineage = sqlglot_lineage( + query, + schema_resolver=schema_resolver, + default_db=view_ref.catalog, + default_schema=view_ref.schema, + ) + view_urn = self.gen_dataset_urn(view_ref) + + if raw_lineage.debug_info.table_error: + logger.debug( + f"Failed to parse lineage for view {view_ref}: " + f"{raw_lineage.debug_info.table_error}" + ) + self.report.num_view_definitions_failed_parsing += 1 + self.report.view_definitions_parsing_failures.append( + f"Table-level sql parsing error for view {view_ref}: {raw_lineage.debug_info.table_error}" + ) + return None + + elif raw_lineage.debug_info.column_error: + self.report.num_view_definitions_failed_column_parsing += 1 + self.report.view_definitions_parsing_failures.append( + f"Column-level sql parsing error for view {view_ref}: {raw_lineage.debug_info.column_error}" + ) + else: + self.report.num_view_definitions_parsed += 1 + return view_definition_lineage_helper(raw_lineage, view_urn) + + def get_view_lineage(self) -> Iterable[MetadataWorkUnit]: + if not ( + self.config.include_hive_metastore + and self.config.include_table_lineage + and self.sql_parser_schema_resolver + ): + return + # This is only used for parsing view lineage. Usage, Operations are emitted elsewhere + builder = SqlParsingBuilder( + generate_lineage=True, + generate_usage_statistics=False, + generate_operations=False, + ) + for dataset_name in self.view_definitions.keys(): + view_ref, view_definition = self.view_definitions[dataset_name] + result = self._run_sql_parser( + view_ref, + view_definition, + self.sql_parser_schema_resolver, + ) + if result and result.out_tables: + # This does not yield any workunits but we use + # yield here to execute this method + yield from builder.process_sql_parsing_result( + result=result, + query=view_definition, + is_view_ddl=True, + include_column_lineage=self.config.include_view_column_lineage, + ) + yield from builder.gen_workunits() + def close(self): if self.hive_metastore_proxy: self.hive_metastore_proxy.close() + if self.view_definitions: + self.view_definitions.close() + if self.sql_parser_schema_resolver: + self.sql_parser_schema_resolver.close() super().close() diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 649212c1041ed..7cc0f84ee5177 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -3463,6 +3463,66 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD),betStatusId)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD),betStatusId)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD),channelId)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD),channelId)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD),combination)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD),combination)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", From 1d16e4296497d5e9525cfebaf89344dd18fd247d Mon Sep 17 00:00:00 2001 From: Dimitri <36767102+dim-ops@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:46:57 +0100 Subject: [PATCH 410/792] feat(ingest/dynamodb): add domain arg (#9658) Co-authored-by: Dimitri GRISARD --- .../ingestion/source/dynamodb/dynamodb.py | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/dynamodb/dynamodb.py b/metadata-ingestion/src/datahub/ingestion/source/dynamodb/dynamodb.py index d7f3dfb9279fb..972eb60ff5b05 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dynamodb/dynamodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dynamodb/dynamodb.py @@ -13,8 +13,10 @@ make_data_platform_urn, make_dataplatform_instance_urn, make_dataset_urn_with_platform_instance, + make_domain_urn, ) from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.emitter.mcp_builder import add_domain_to_entity_wu from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SupportStatus, @@ -53,6 +55,7 @@ DataPlatformInstanceClass, DatasetPropertiesClass, ) +from datahub.utilities.registries.domain_registry import DomainRegistry MAX_ITEMS_TO_RETRIEVE = 100 PAGE_SIZE = 100 @@ -68,6 +71,11 @@ class DynamoDBConfig(DatasetSourceConfigMixin, StatefulIngestionConfigBase): aws_access_key_id: str = Field(description="AWS Access Key ID.") aws_secret_access_key: pydantic.SecretStr = Field(description="AWS Secret Key.") + domain: Dict[str, AllowDenyPattern] = Field( + default=dict(), + description="regex patterns for tables to filter to assign domain_key. ", + ) + # This config option allows user to include a list of items from a table when we scan and construct the schema, # the key of this dict is table name and the value is the list of item primary keys in dynamodb format, # if the table use composite key then the value should have partition key and sort key present @@ -155,6 +163,12 @@ def __init__(self, ctx: PipelineContext, config: DynamoDBConfig, platform: str): self.report = DynamoDBSourceReport() self.platform = platform + if self.config.domain: + self.domain_registry = DomainRegistry( + cached_domains=[domain_id for domain_id in self.config.domain], + graph=self.ctx.graph, + ) + @classmethod def create(cls, config_dict: dict, ctx: PipelineContext) -> "DynamoDBSource": config = DynamoDBConfig.parse_obj(config_dict) @@ -234,6 +248,11 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: aspect=dataset_properties, ).as_workunit() + yield from self._get_domain_wu( + dataset_name=table_name, + entity_urn=dataset_urn, + ) + platform_instance_aspect = DataPlatformInstanceClass( platform=make_data_platform_urn(self.platform), instance=make_dataplatform_instance_urn( @@ -480,3 +499,20 @@ def get_field_type( def get_report(self) -> DynamoDBSourceReport: return self.report + + def _get_domain_wu( + self, dataset_name: str, entity_urn: str + ) -> Iterable[MetadataWorkUnit]: + domain_urn = None + for domain, pattern in self.config.domain.items(): + if pattern.allowed(dataset_name): + domain_urn = make_domain_urn( + self.domain_registry.get_domain_urn(domain) + ) + break + + if domain_urn: + yield from add_domain_to_entity_wu( + entity_urn=entity_urn, + domain_urn=domain_urn, + ) From 943bb57cbcf22db12c092a7f9a30c762aa2bf6e5 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Mon, 22 Jan 2024 11:46:04 -0600 Subject: [PATCH 411/792] feat(backend): structured properties and forms (#9626) Co-authored-by: Chris Collins Co-authored-by: RyanHolstien --- .../GenerateJsonSchemaTask.java | 15 +- .../io/datahubproject/OpenApiEntities.java | 30 +- .../app/config/ConfigurationProvider.java | 4 + .../app/controllers/Application.java | 7 + .../app/controllers/RedirectController.java | 25 + datahub-frontend/conf/routes | 4 + datahub-frontend/public | 1 + .../resources/public/logos/datahub-logo.png | Bin 0 -> 53563 bytes datahub-graphql-core/build.gradle | 3 + .../linkedin/datahub/graphql/Constants.java | 3 +- .../datahub/graphql/GmsGraphQLEngine.java | 235 +++- .../datahub/graphql/GmsGraphQLEngineArgs.java | 2 + .../datahub/graphql/GmsGraphQLPlugin.java | 4 + .../datahub/graphql/SubTypesResolver.java | 55 + .../graphql/WeaklyTypedAspectsResolver.java | 2 +- .../GetMetadataAnalyticsResolver.java | 2 +- .../analytics/service/AnalyticsService.java | 2 +- .../graphql/featureflags/FeatureFlags.java | 1 + .../resolvers/chart/BrowseV2Resolver.java | 10 +- .../resolvers/config/AppConfigResolver.java | 12 +- .../ListDataProductAssetsResolver.java | 2 +- .../domain/DomainEntitiesResolver.java | 2 +- .../form/BatchAssignFormResolver.java | 52 + .../CreateDynamicFormAssignmentResolver.java | 50 + .../form/IsFormAssignedToMeResolver.java | 80 ++ .../form/SubmitFormPromptResolver.java | 89 ++ .../resolvers/form/VerifyFormResolver.java | 63 + .../glossary/CreateGlossaryNodeResolver.java | 1 - .../resolvers/group/EntityCountsResolver.java | 2 +- .../execution/RollbackIngestionResolver.java | 3 +- .../resolvers/mutate/util/FormUtils.java | 105 ++ .../policy/GetGrantedPrivilegesResolver.java | 2 +- .../ListRecommendationsResolver.java | 2 +- .../AggregateAcrossEntitiesResolver.java | 8 +- .../AutoCompleteForMultipleResolver.java | 8 +- .../search/GetQuickFiltersResolver.java | 2 +- .../search/ScrollAcrossEntitiesResolver.java | 2 +- .../search/ScrollAcrossLineageResolver.java | 2 +- .../search/SearchAcrossLineageResolver.java | 2 +- .../resolvers/search/SearchResolver.java | 2 +- .../graphql/resolvers/search/SearchUtils.java | 2 +- .../resolvers/type/PropertyValueResolver.java | 25 + .../graphql/resolvers/view/ViewUtils.java | 2 +- .../common/mappers/UrnToEntityMapper.java | 6 + .../graphql/types/dataset/DatasetType.java | 2 + .../dataset/mappers/DatasetFilterMapper.java | 24 + .../types/dataset/mappers/DatasetMapper.java | 13 + .../dataset/mappers/SchemaFieldMapper.java | 14 + .../dataset/mappers/SchemaMetadataMapper.java | 7 +- .../types/datatype/DataTypeEntityMapper.java | 51 + .../graphql/types/datatype/DataTypeType.java | 78 ++ .../types/datatype/DataTypeUrnMapper.java | 40 + .../entitytype/EntityTypeEntityMapper.java | 54 + .../entitytype}/EntityTypeMapper.java | 8 +- .../types/entitytype/EntityTypeType.java | 78 ++ .../types/entitytype/EntityTypeUrnMapper.java | 85 ++ .../graphql/types/form/FormMapper.java | 129 ++ .../datahub/graphql/types/form/FormType.java | 76 ++ .../graphql/types/form/FormsMapper.java | 133 ++ .../glossary/mappers/GlossaryNodeMapper.java | 11 +- .../glossary/mappers/GlossaryTermsMapper.java | 11 +- .../graphql/types/mappers/MapperUtils.java | 2 +- .../types/schemafield/SchemaFieldMapper.java | 54 + .../types/schemafield/SchemaFieldType.java | 70 +- .../StructuredPropertiesMapper.java | 80 ++ .../StructuredPropertyMapper.java | 124 ++ .../StructuredPropertyType.java | 79 ++ .../graphql/types/view/DataHubViewMapper.java | 2 +- .../src/main/resources/app.graphql | 10 + .../src/main/resources/entity.graphql | 336 ++++- .../src/main/resources/forms.graphql | 407 ++++++ .../src/main/resources/properties.graphql | 243 ++++ .../src/main/resources/search.graphql | 6 +- .../src/main/resources/tests.graphql | 2 + .../linkedin/datahub/graphql/TestUtils.java | 8 +- .../browse/BrowseV2ResolverTest.java | 13 +- .../domain/DomainEntitiesResolverTest.java | 2 +- .../form/IsFormAssignedToMeResolverTest.java | 167 +++ .../form/VerifyFormResolverTest.java | 122 ++ .../RollbackIngestionResolverTest.java | 7 +- .../mutate/MutableTypeBatchResolverTest.java | 5 +- .../AggregateAcrossEntitiesResolverTest.java | 21 +- .../search/GetQuickFiltersResolverTest.java | 2 +- .../SearchAcrossEntitiesResolverTest.java | 2 +- .../upgrade/UpgradeCliApplication.java | 8 +- .../common/steps/GMSDisableWriteModeStep.java | 10 +- .../common/steps/GMSEnableWriteModeStep.java | 10 +- .../config/BackfillBrowsePathsV2Config.java | 2 +- .../upgrade/config/BuildIndicesConfig.java | 10 +- .../upgrade/config/NoCodeUpgradeConfig.java | 9 +- .../config/RemoveUnknownAspectsConfig.java | 2 +- .../upgrade/config/RestoreBackupConfig.java | 9 +- .../upgrade/config/RestoreIndicesConfig.java | 11 +- .../upgrade/impl/DefaultUpgradeContext.java | 46 +- .../upgrade/impl/DefaultUpgradeManager.java | 3 + .../datahub/upgrade/nocode/NoCodeUpgrade.java | 10 +- .../RemoveClientIdAspectStep.java | 2 +- .../RemoveUnknownAspects.java | 4 +- .../upgrade/restorebackup/RestoreBackup.java | 10 +- .../restorebackup/RestoreStorageStep.java | 2 +- .../backupreader/BackupReader.java | 1 + .../restoreindices/RestoreIndices.java | 11 +- .../upgrade/restoreindices/SendMAEStep.java | 26 +- .../system/elasticsearch/BuildIndices.java | 24 +- .../steps/BuildIndicesPreStep.java | 29 +- .../system/elasticsearch/util/IndexUtils.java | 19 + .../entity/steps/BackfillBrowsePathsV2.java | 2 +- .../steps/BackfillBrowsePathsV2Step.java | 4 +- .../src/main/resources/application.properties | 5 + ...pgradeCliApplicationTestConfiguration.java | 2 +- datahub-web-react/build.gradle | 1 - datahub-web-react/index.html | 3 +- .../public/assets/{ => icons}/favicon.ico | Bin datahub-web-react/public/assets/logo.png | Bin 53563 -> 22 bytes .../public/assets/logos/datahub-logo.png | Bin 0 -> 53563 bytes datahub-web-react/public/browserconfig.xml | 9 + datahub-web-react/public/manifest.json | 2 +- docker/build.gradle | 5 + .../datahub-ingestion-base/smoke.Dockerfile | 4 +- docker/datahub-ingestion/Dockerfile | 4 +- docker/datahub-ingestion/Dockerfile-slim-only | 2 +- docker/elasticsearch-setup/Dockerfile | 2 +- docker/profiles/docker-compose.gms.yml | 4 + docs-website/graphql/generateGraphQLSchema.sh | 2 + docs-website/sidebars.js | 16 +- .../TownhallButton/townhallbutton.module.scss | 2 +- .../openapi/openapi-structured-properties.md | 284 +++++ .../metadata/aspect/batch/AspectsBatch.java | 11 +- .../metadata/aspect/batch/MCPBatchItem.java | 2 +- .../metadata/aspect/batch/PatchItem.java | 6 +- .../metadata/aspect/batch/UpsertItem.java | 5 +- .../aspect/patch/GenericJsonPatch.java | 34 + .../aspect}/patch/PatchOperationType.java | 2 +- .../AbstractMultiFieldPatchBuilder.java | 7 +- .../patch/builder}/ChartInfoPatchBuilder.java | 10 +- .../CustomPropertiesPatchBuilder.java | 9 +- .../builder}/DashboardInfoPatchBuilder.java | 12 +- .../builder}/DataFlowInfoPatchBuilder.java | 13 +- .../builder}/DataJobInfoPatchBuilder.java | 13 +- .../DataJobInputOutputPatchBuilder.java | 13 +- .../DatasetPropertiesPatchBuilder.java | 13 +- .../EditableSchemaMetadataPatchBuilder.java | 10 +- .../builder}/GlobalTagsPatchBuilder.java | 9 +- .../builder}/GlossaryTermsPatchBuilder.java | 9 +- .../patch/builder}/OwnershipPatchBuilder.java | 9 +- .../aspect/patch/builder}/PatchUtil.java | 6 +- .../StructuredPropertiesPatchBuilder.java | 110 ++ .../builder}/UpstreamLineagePatchBuilder.java | 11 +- .../CustomPropertiesPatchBuilderSupport.java | 4 +- .../IntermediatePatchBuilder.java | 4 +- .../patch}/template/ArrayMergingTemplate.java | 4 +- .../patch}/template/AspectTemplateEngine.java | 19 +- .../patch/template/CompoundKeyTemplate.java | 23 + .../patch}/template/Template.java | 30 +- .../aspect/patch/template/TemplateUtil.java | 97 ++ .../template/chart/ChartInfoTemplate.java | 6 +- .../template/common/GenericPatchTemplate.java | 59 + .../template/common/GlobalTagsTemplate.java | 4 +- .../common/GlossaryTermsTemplate.java | 8 +- .../template/common/OwnershipTemplate.java | 6 +- .../common/StructuredPropertiesTemplate.java | 56 + .../dashboard/DashboardInfoTemplate.java | 6 +- .../dataflow/DataFlowInfoTemplate.java | 4 +- .../template/datajob/DataJobInfoTemplate.java | 4 +- .../datajob/DataJobInputOutputTemplate.java | 4 +- .../DataProductPropertiesTemplate.java | 4 +- .../dataset/DatasetPropertiesTemplate.java | 4 +- .../EditableSchemaMetadataTemplate.java | 10 +- .../dataset/UpstreamLineageTemplate.java | 11 +- .../aspect/plugins/PluginFactory.java | 29 +- .../metadata/aspect/plugins/PluginSpec.java | 8 +- .../aspect/plugins/hooks/MCLSideEffect.java | 11 +- .../aspect/plugins/hooks/MCPSideEffect.java | 9 +- .../plugins/validation/AspectRetriever.java | 25 +- .../PropertyDefinitionValidator.java | 91 ++ .../StructuredPropertiesValidator.java | 264 ++++ .../metadata/models/LogicalValueType.java | 10 + .../models/StructuredPropertyUtils.java | 45 + .../models/registry/ConfigEntityRegistry.java | 2 +- .../models/registry/EntityRegistry.java | 7 +- .../models/registry/MergedEntityRegistry.java | 2 +- .../models/registry/PatchEntityRegistry.java | 2 +- .../registry/SnapshotEntityRegistry.java | 31 +- .../template/CompoundKeyTemplate.java | 52 - .../registry/template/util/TemplateUtil.java | 39 - .../template}/ChartInfoTemplateTest.java | 4 +- .../template}/DashboardInfoTemplateTest.java | 4 +- .../UpstreamLineageTemplateTest.java | 4 +- .../metadata/aspect/plugins/PluginsTest.java | 28 +- .../plugins/hooks/MCLSideEffectTest.java | 5 +- .../plugins/hooks/MCPSideEffectTest.java | 3 +- .../validation/ValidatorPluginTest.java | 2 +- .../PropertyDefinitionValidatorTest.java | 212 ++++ .../StructuredPropertiesValidatorTest.java | 246 ++++ .../models/EntitySpecBuilderTest.java | 2 +- .../PluginEntityRegistryLoaderTest.java | 2 +- .../ingestion/IngestionSchedulerTest.java | 4 +- .../java/com/linkedin/metadata/Constants.java | 23 + .../airflow-plugin/scripts/release.sh | 2 +- .../bootstrap_data/business_glossary.yml | 2 + metadata-ingestion/examples/forms/forms.yaml | 54 + .../mce_files/test_structured_properties.json | 218 ++++ .../examples/structured_properties/README.md | 51 + .../structured_properties/click_event.avsc | 14 + .../structured_properties/dataset.yaml | 45 + .../structured_properties.yaml | 68 + metadata-ingestion/scripts/docgen.sh | 2 +- metadata-ingestion/scripts/modeldocgen.py | 22 + metadata-ingestion/scripts/release.sh | 2 +- .../datahub/api/entities/dataset/__init__.py | 0 .../datahub/api/entities/dataset/dataset.py | 466 +++++++ .../datahub/api/entities/forms/__init__.py | 0 .../src/datahub/api/entities/forms/forms.py | 353 ++++++ .../entities/forms/forms_graphql_constants.py | 27 + .../entities/structuredproperties/__init__.py | 0 .../structuredproperties.py | 185 +++ .../src/datahub/cli/docker_check.py | 37 + .../datahub/cli/specific/dataproduct_cli.py | 3 +- .../src/datahub/cli/specific/dataset_cli.py | 67 + .../src/datahub/cli/specific/forms_cli.py | 53 + .../cli/specific/structuredproperties_cli.py | 62 + metadata-ingestion/src/datahub/entrypoints.py | 6 + .../source/metadata/business_glossary.py | 2 + .../src/datahub/specific/dataset.py | 32 + .../datahub/specific/structured_properties.py | 53 + .../src/datahub/utilities/urn_encoder.py | 1 + .../src/datahub/utilities/urns/_urn_base.py | 40 + .../urns/structured_properties_urn.py | 5 + .../business-glossary/business_glossary.yml | 2 + .../glossary_events_auto_id_golden.json | 93 +- .../glossary_events_golden.json | 93 +- .../remote/content/business_glossary.yml | 2 + .../remote/golden/remote_glossary_golden.json | 93 +- .../java/datahub-client/build.gradle | 12 +- .../java/datahub-client/scripts/check_jar.sh | 3 +- .../java/datahub/client/patch/PatchTest.java | 18 +- .../datahub-protobuf/scripts/check_jar.sh | 3 +- .../java/examples/build.gradle | 3 - .../examples/DataJobLineageAdd.java | 2 +- .../examples/DatasetCustomPropertiesAdd.java | 2 +- .../DatasetCustomPropertiesAddRemove.java | 2 +- .../DatasetCustomPropertiesReplace.java | 2 +- .../java/spark-lineage/build.gradle | 5 + .../setup_spark_smoke_test.sh | 2 +- .../client/EntityClientAspectRetriever.java | 35 + .../metadata/client/JavaEntityClient.java | 19 +- .../client/SystemJavaEntityClient.java | 14 +- .../metadata/entity/EntityServiceImpl.java | 52 +- .../linkedin/metadata/entity/EntityUtils.java | 12 +- .../cassandra/CassandraRetentionService.java | 16 +- .../entity/ebean/EbeanRetentionService.java | 16 +- .../entity/ebean/batch/AspectsBatchImpl.java | 31 +- .../entity/ebean/batch/MCLBatchItemImpl.java | 23 +- .../entity/ebean/batch/MCPPatchBatchItem.java | 11 +- .../ebean/batch/MCPUpsertBatchItem.java | 63 +- .../entity/validation/ValidationUtils.java | 3 +- .../graph/dgraph/DgraphGraphService.java | 19 + .../graph/elastic/ESGraphQueryDAO.java | 92 +- .../elastic/ElasticSearchGraphService.java | 123 +- .../graph/neo4j/Neo4jGraphService.java | 19 + .../elasticsearch/ElasticSearchService.java | 9 + .../indexbuilder/ESIndexBuilder.java | 46 +- .../indexbuilder/EntityIndexBuilders.java | 48 + .../indexbuilder/MappingsBuilder.java | 79 ++ .../indexbuilder/ReindexConfig.java | 33 + .../elasticsearch/query/ESSearchDAO.java | 4 +- .../request/AggregationQueryBuilder.java | 17 +- .../query/request/SearchAfterWrapper.java | 2 +- .../query/request/SearchRequestHandler.java | 1 + .../metadata/search/features/Features.java | 3 +- .../SearchDocumentTransformer.java | 114 +- .../metadata/search/utils/ESUtils.java | 102 +- .../service/UpdateIndicesService.java | 82 +- .../metadata/shared/ElasticSearchIndexed.java | 11 + .../ElasticSearchSystemMetadataService.java | 8 + .../ElasticSearchTimeseriesAspectService.java | 159 ++- .../TimeseriesAspectIndexBuilders.java | 8 + .../metadata/AspectIngestionUtils.java | 6 +- .../metadata/client/JavaEntityClientTest.java | 10 +- .../entity/EbeanEntityServiceTest.java | 20 +- .../metadata/entity/EntityServiceTest.java | 228 +++- .../metadata/entity/TestEntityRegistry.java | 2 +- .../search/SearchGraphServiceTestBase.java | 28 +- .../search/fixtures/GoldenTestBase.java | 2 +- .../indexbuilder/IndexBuilderTestBase.java | 118 ++ .../indexbuilder/MappingsBuilderTest.java | 137 +- .../request/AggregationQueryBuilderTest.java | 97 +- .../request/CustomizedQueryHandlerTest.java | 10 + .../query/request/SearchQueryBuilderTest.java | 19 +- .../metadata/search/utils/ESUtilsTest.java | 71 ++ .../test/search/SearchTestUtils.java | 2 +- ...rm_assignment_test_definition_complex.json | 145 +++ ...orm_assignment_test_definition_simple.json | 67 + .../forms/form_prompt_test_definition.json | 39 + .../kafka/MaeConsumerApplication.java | 4 +- .../src/main/resources/application.properties | 2 +- ...eConsumerApplicationTestConfiguration.java | 3 - .../kafka/MetadataChangeLogProcessor.java | 7 +- .../kafka/config/EntityHydratorConfig.java | 12 +- .../event/EntityChangeEventGeneratorHook.java | 9 +- .../kafka/hook/form/FormAssignmentHook.java | 130 ++ .../hook/siblings/SiblingAssociationHook.java | 8 +- .../kafka/hydrator/EntityHydrator.java | 6 +- .../kafka/hook/UpdateIndicesHookTest.java | 7 +- .../EntityChangeEventGeneratorHookTest.java | 10 +- .../siblings/SiblingAssociationHookTest.java | 6 +- .../spring/MCLSpringTestConfiguration.java | 19 +- .../kafka/MceConsumerApplication.java | 6 +- .../metadata/restli/RestliServletConfig.java | 13 - .../kafka/MceConsumerApplicationTest.java | 2 +- ...eConsumerApplicationTestConfiguration.java | 21 +- metadata-jobs/mce-consumer/build.gradle | 2 +- .../kafka/MetadataChangeEventsProcessor.java | 6 +- .../MetadataChangeProposalsProcessor.java | 6 +- .../datahub/event/PlatformEventProcessor.java | 2 +- metadata-models-custom/README.md | 20 + .../CustomDataQualityRulesMCLSideEffect.java | 9 +- .../CustomDataQualityRulesMCPSideEffect.java | 5 +- metadata-models/build.gradle | 3 +- .../com/linkedin/common/CustomProperties.pdl | 1 + .../common/FieldFormPromptAssociation.pdl | 17 + .../com/linkedin/common/FormAssociation.pdl | 21 + .../linkedin/common/FormPromptAssociation.pdl | 23 + .../common/FormPromptFieldAssociations.pdl | 16 + .../common/FormVerificationAssociation.pdl | 17 + .../pegasus/com/linkedin/common/Forms.pdl | 66 + .../common/GlossaryTermAssociation.pdl | 6 + .../com/linkedin/common/PropertyValue.pdl | 13 + .../linkedin/datahub/DataHubSearchConfig.pdl | 87 ++ .../com/linkedin/datatype/DataTypeInfo.pdl | 21 + .../com/linkedin/datatype/DataTypeKey.pdl | 11 + .../linkedin/entitytype/EntityTypeInfo.pdl | 22 + .../com/linkedin/entitytype/EntityTypeKey.pdl | 11 + .../linkedin/form/DynamicFormAssignment.pdl | 19 + .../com/linkedin/form/FormActorAssignment.pdl | 21 + .../pegasus/com/linkedin/form/FormInfo.pdl | 51 + .../pegasus/com/linkedin/form/FormPrompt.pdl | 53 + .../linkedin/glossary/GlossaryNodeInfo.pdl | 3 +- .../linkedin/glossary/GlossaryTermInfo.pdl | 2 + .../com/linkedin/metadata/key/FormKey.pdl | 14 + .../structured/PrimitivePropertyValue.pdl | 9 + .../com/linkedin/structured/PropertyValue.pdl | 10 + .../structured/StructuredProperties.pdl | 14 + .../StructuredPropertyDefinition.pdl | 74 ++ .../structured/StructuredPropertyKey.pdl | 11 + .../StructuredPropertyValueAssignment.pdl | 29 + .../src/main/resources/entity-registry.yml | 55 +- .../authentication/group/GroupService.java | 42 +- .../token/StatefulTokenService.java | 11 +- .../metadata/config/VisualConfiguration.java | 9 + .../src/main/resources/application.yml | 20 +- .../factory/auth/AuthorizerChainFactory.java | 31 +- .../auth/DataHubAuthorizerFactory.java | 22 +- .../auth/DataHubTokenServiceFactory.java | 6 +- .../gms/factory/auth/GroupServiceFactory.java | 13 +- .../auth/InviteTokenServiceFactory.java | 10 +- .../auth/NativeUserServiceFactory.java | 15 +- .../gms/factory/auth/PostServiceFactory.java | 11 +- .../gms/factory/auth/RoleServiceFactory.java | 12 +- .../auth/SystemAuthenticationFactory.java | 4 +- .../common/SiblingGraphServiceFactory.java | 2 +- .../factory/config/ConfigurationProvider.java | 6 + .../StructuredPropertiesConfiguration.java | 10 + .../DataProductServiceFactory.java | 10 +- .../entity/CassandraSessionFactory.java | 4 +- .../entity/DeleteEntityServiceFactory.java | 4 +- .../factory/entity/EntityServiceFactory.java | 18 +- .../entity/JavaEntityClientFactory.java | 100 -- .../entity/RetentionServiceFactory.java | 12 +- .../entity/RollbackServiceFactory.java | 27 + .../indices/UpdateIndicesServiceFactory.java | 18 +- .../EntityClientConfigFactory.java | 20 + .../entityclient/JavaEntityClientFactory.java | 85 ++ .../RestliEntityClientFactory.java | 60 +- .../gms/factory/form/FormServiceFactory.java | 21 + .../factory/graphql/GraphQLEngineFactory.java | 30 +- .../ingestion/IngestionSchedulerFactory.java | 20 +- .../kafka/KafkaEventConsumerFactory.java | 1 - .../AwsGlueSchemaRegistryFactory.java | 2 +- .../lineage/LineageServiceFactory.java | 11 +- .../OwnershipTypeServiceFactory.java | 17 +- .../factory/query/QueryServiceFactory.java | 16 +- .../MostPopularCandidateSourceFactory.java | 2 +- .../RecentlyEditedCandidateSourceFactory.java | 2 +- .../RecentlyViewedCandidateSourceFactory.java | 2 +- .../TopPlatformsCandidateSourceFactory.java | 2 +- .../search/ElasticSearchServiceFactory.java | 13 + .../search/LineageSearchServiceFactory.java | 4 +- .../search/views/ViewServiceFactory.java | 16 +- .../settings/SettingsServiceFactory.java | 17 +- .../gms/factory/telemetry/DailyReport.java | 4 +- .../telemetry/ScheduledAnalyticsFactory.java | 2 +- .../gms/factory/telemetry/TelemetryUtils.java | 4 +- .../telemetry/TrackingServiceFactory.java | 2 +- ...tyChangeEventGeneratorRegistryFactory.java | 9 +- .../linkedin/metadata/boot/BootstrapStep.java | 2 +- .../linkedin/metadata/boot/UpgradeStep.java | 5 +- .../factories/BootstrapManagerFactory.java | 10 +- .../IngestRetentionPoliciesStepFactory.java | 2 +- .../boot/steps/BackfillBrowsePathsV2Step.java | 2 +- .../boot/steps/IndexDataPlatformsStep.java | 2 +- .../IngestDataPlatformInstancesStep.java | 5 +- .../boot/steps/IngestDataPlatformsStep.java | 6 +- .../boot/steps/IngestDataTypesStep.java | 103 ++ .../IngestDefaultGlobalSettingsStep.java | 6 +- .../boot/steps/IngestEntityTypesStep.java | 88 ++ .../boot/steps/IngestOwnershipTypesStep.java | 8 +- .../boot/steps/IngestPoliciesStep.java | 5 +- .../metadata/boot/steps/IngestRolesStep.java | 5 +- .../boot/steps/IngestRootUserStep.java | 2 +- .../steps/RestoreColumnLineageIndices.java | 4 +- .../boot/steps/RestoreDbtSiblingsIndices.java | 3 +- .../boot/steps/RestoreGlossaryIndices.java | 2 +- .../steps/UpgradeDefaultBrowsePathsStep.java | 2 +- ...SearchIndexBuilderFactoryDefaultsTest.java | 27 + .../steps/BackfillBrowsePathsV2StepTest.java | 8 +- .../IngestDataPlatformInstancesStepTest.java | 12 +- .../boot/steps/IngestDataTypesStepTest.java | 81 ++ .../IngestDefaultGlobalSettingsStepTest.java | 10 +- .../boot/steps/IngestEntityTypesStepTest.java | 91 ++ .../RestoreColumnLineageIndicesTest.java | 13 +- .../steps/RestoreGlossaryIndicesTest.java | 10 +- .../UpgradeDefaultBrowsePathsStepTest.java | 12 +- .../telemetry/TelemetryUtilsTest.java | 2 +- .../boot/test_data_types_invalid.json | 9 + .../resources/boot/test_data_types_valid.json | 10 + .../test/resources/test-entity-registry.yaml | 18 +- .../openapi-entity-servlet/build.gradle | 2 +- .../delegates/EntityApiDelegateImpl.java | 119 +- .../JavaSpring/apiController.mustache | 2 +- .../delegates/EntityApiDelegateImplTest.java | 8 +- .../GlobalControllerExceptionHandler.java | 15 +- .../openapi/config/SpringWebConfig.java | 31 +- .../openapi/util/MappingUtil.java | 6 +- .../openapi/util/ReflectionCache.java | 27 +- .../v2/controller/EntityController.java | 507 ++++++++ .../v2/controller/RelationshipController.java | 228 ++++ .../v2/controller/TimeseriesController.java | 115 ++ .../openapi/v2/models/GenericEntity.java | 57 + .../v2/models/GenericRelationship.java | 36 + .../v2/models/GenericScrollResult.java | 12 + .../v2/models/GenericTimeseriesAspect.java | 18 + .../openapi/v2/models/PatchOperation.java | 26 + .../openapi/v2/utils/ControllerUtil.java | 67 + .../com.linkedin.entity.aspects.snapshot.json | 7 + ...com.linkedin.entity.entities.snapshot.json | 7 + .../com.linkedin.entity.runs.snapshot.json | 7 + ...nkedin.operations.operations.snapshot.json | 7 + ...m.linkedin.platform.platform.snapshot.json | 7 + .../linkedin/entity/client/EntityClient.java | 30 +- .../entity/client/RestliEntityClient.java | 10 +- .../entity/client/SystemEntityClient.java | 55 +- .../client/SystemRestliEntityClient.java | 2 +- .../resources/entity/AspectResource.java | 4 +- .../entity/BatchIngestionRunResource.java | 309 +---- .../resources/entity/AspectResourceTest.java | 2 +- .../mock/MockTimeseriesAspectService.java | 15 + metadata-service/services/build.gradle | 6 +- .../linkedin/metadata/entity/AspectUtils.java | 2 +- .../metadata/entity/EntityService.java | 16 +- .../linkedin/metadata/graph/GraphService.java | 15 + .../metadata/graph/RelatedEntities.java | 31 + .../graph/RelatedEntitiesScrollResult.java | 16 + .../metadata/search/EntitySearchService.java | 2 +- .../metadata/service/FormService.java | 1107 +++++++++++++++++ .../metadata/service/RollbackService.java | 328 +++++ .../SearchBasedFormAssignmentManager.java | 94 ++ .../util/SearchBasedFormAssignmentRunner.java | 45 + .../metadata/shared/ValidationUtils.java | 167 +-- .../timeseries/GenericTimeseriesDocument.java | 26 + .../timeseries/TimeseriesAspectService.java | 11 + .../timeseries/TimeseriesScrollResult.java | 18 + .../gms/servlet/ConfigSearchExport.java | 2 +- .../src/main/resources/boot/data_types.json | 42 + .../authorization/OwnershipUtils.java | 20 + .../metadata/utils/AuditStampUtils.java | 10 + .../linkedin/metadata/utils/FormUtils.java | 49 + .../metadata/utils/GenericRecordUtils.java | 21 + .../metadata/utils/SchemaFieldUtils.java | 22 + .../linkedin/metadata/utils/SearchUtil.java | 26 + .../main/java/mock/MockEntityRegistry.java | 2 +- smoke-test/cypress-dev.sh | 2 +- smoke-test/requests_wrapper/__init__.py | 1 + .../cypress/cypress/e2e/siblings/siblings.js | 2 +- .../tests/structured_properties/__init__.py | 0 .../structured_properties/click_event.avsc | 14 + .../structured_properties/test_dataset.yaml | 19 + .../test_structured_properties.py | 577 +++++++++ .../test_structured_properties.yaml | 33 + smoke-test/tests/telemetry/telemetry_test.py | 14 +- smoke-test/tests/utilities/__init__.py | 0 smoke-test/tests/utilities/file_emitter.py | 21 + 492 files changed, 15378 insertions(+), 1931 deletions(-) create mode 100644 datahub-frontend/app/controllers/RedirectController.java create mode 120000 datahub-frontend/public create mode 100644 datahub-frontend/test/resources/public/logos/datahub-logo.png create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java rename datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/{resolvers => types/entitytype}/EntityTypeMapper.java (91%) create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java create mode 100644 datahub-graphql-core/src/main/resources/forms.graphql create mode 100644 datahub-graphql-core/src/main/resources/properties.graphql create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java create mode 100644 datahub-upgrade/src/main/resources/application.properties rename datahub-web-react/public/assets/{ => icons}/favicon.ico (100%) mode change 100644 => 120000 datahub-web-react/public/assets/logo.png create mode 100644 datahub-web-react/public/assets/logos/datahub-logo.png create mode 100644 datahub-web-react/public/browserconfig.xml create mode 100644 docs/api/openapi/openapi-structured-properties.md create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java rename {metadata-integration/java/datahub-client/src/main/java/datahub/client => entity-registry/src/main/java/com/linkedin/metadata/aspect}/patch/PatchOperationType.java (81%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/AbstractMultiFieldPatchBuilder.java (95%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/ChartInfoPatchBuilder.java (75%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/CustomPropertiesPatchBuilder.java (90%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DashboardInfoPatchBuilder.java (86%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DataFlowInfoPatchBuilder.java (92%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DataJobInfoPatchBuilder.java (93%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DataJobInputOutputPatchBuilder.java (93%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DatasetPropertiesPatchBuilder.java (91%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/EditableSchemaMetadataPatchBuilder.java (90%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/GlobalTagsPatchBuilder.java (88%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/GlossaryTermsPatchBuilder.java (89%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/OwnershipPatchBuilder.java (91%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/PatchUtil.java (96%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/StructuredPropertiesPatchBuilder.java rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/UpstreamLineagePatchBuilder.java (96%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/subtypesupport/CustomPropertiesPatchBuilderSupport.java (81%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/subtypesupport/IntermediatePatchBuilder.java (83%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/ArrayMergingTemplate.java (98%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/AspectTemplateEngine.java (71%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/CompoundKeyTemplate.java rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/Template.java (69%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/TemplateUtil.java rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/chart/ChartInfoTemplate.java (92%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GenericPatchTemplate.java rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/common/GlobalTagsTemplate.java (90%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/common/GlossaryTermsTemplate.java (92%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/common/OwnershipTemplate.java (89%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/StructuredPropertiesTemplate.java rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dashboard/DashboardInfoTemplate.java (94%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataflow/DataFlowInfoTemplate.java (89%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/datajob/DataJobInfoTemplate.java (89%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/datajob/DataJobInputOutputTemplate.java (96%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataproduct/DataProductPropertiesTemplate.java (91%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataset/DatasetPropertiesTemplate.java (91%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataset/EditableSchemaMetadataTemplate.java (92%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataset/UpstreamLineageTemplate.java (96%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/PropertyDefinitionValidator.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/LogicalValueType.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/StructuredPropertyUtils.java delete mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java delete mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java rename entity-registry/src/test/java/com/linkedin/metadata/{models/registry/patch => aspect/patch/template}/ChartInfoTemplateTest.java (92%) rename entity-registry/src/test/java/com/linkedin/metadata/{models/registry/patch => aspect/patch/template}/DashboardInfoTemplateTest.java (91%) rename entity-registry/src/test/java/com/linkedin/metadata/{models/registry/patch => aspect/patch/template}/UpstreamLineageTemplateTest.java (99%) create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/PropertyDefinitionValidatorTest.java create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/StructuredPropertiesValidatorTest.java create mode 100644 metadata-ingestion/examples/forms/forms.yaml create mode 100644 metadata-ingestion/examples/mce_files/test_structured_properties.json create mode 100644 metadata-ingestion/examples/structured_properties/README.md create mode 100644 metadata-ingestion/examples/structured_properties/click_event.avsc create mode 100644 metadata-ingestion/examples/structured_properties/dataset.yaml create mode 100644 metadata-ingestion/examples/structured_properties/structured_properties.yaml create mode 100644 metadata-ingestion/src/datahub/api/entities/dataset/__init__.py create mode 100644 metadata-ingestion/src/datahub/api/entities/dataset/dataset.py create mode 100644 metadata-ingestion/src/datahub/api/entities/forms/__init__.py create mode 100644 metadata-ingestion/src/datahub/api/entities/forms/forms.py create mode 100644 metadata-ingestion/src/datahub/api/entities/forms/forms_graphql_constants.py create mode 100644 metadata-ingestion/src/datahub/api/entities/structuredproperties/__init__.py create mode 100644 metadata-ingestion/src/datahub/api/entities/structuredproperties/structuredproperties.py create mode 100644 metadata-ingestion/src/datahub/cli/specific/dataset_cli.py create mode 100644 metadata-ingestion/src/datahub/cli/specific/forms_cli.py create mode 100644 metadata-ingestion/src/datahub/cli/specific/structuredproperties_cli.py create mode 100644 metadata-ingestion/src/datahub/specific/structured_properties.py create mode 100644 metadata-ingestion/src/datahub/utilities/urns/structured_properties_urn.py create mode 100644 metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java create mode 100644 metadata-io/src/test/resources/forms/form_assignment_test_definition_complex.json create mode 100644 metadata-io/src/test/resources/forms/form_assignment_test_definition_simple.json create mode 100644 metadata-io/src/test/resources/forms/form_prompt_test_definition.json create mode 100644 metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FieldFormPromptAssociation.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FormAssociation.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FormPromptAssociation.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FormPromptFieldAssociations.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FormVerificationAssociation.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/Forms.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/PropertyValue.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/datahub/DataHubSearchConfig.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeInfo.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeKey.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeInfo.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeKey.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/form/DynamicFormAssignment.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/form/FormInfo.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/form/FormPrompt.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/metadata/key/FormKey.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/PrimitivePropertyValue.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/PropertyValue.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/StructuredProperties.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyDefinition.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyKey.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyValueAssignment.pdl create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/StructuredPropertiesConfiguration.java delete mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/EntityClientConfigFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java rename metadata-service/factories/src/main/java/com/linkedin/gms/factory/{entity => entityclient}/RestliEntityClientFactory.java (53%) create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java create mode 100644 metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryDefaultsTest.java create mode 100644 metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java create mode 100644 metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStepTest.java create mode 100644 metadata-service/factories/src/test/resources/boot/test_data_types_invalid.json create mode 100644 metadata-service/factories/src/test/resources/boot/test_data_types_valid.json rename metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/{ => v2}/delegates/EntityApiDelegateImpl.java (86%) rename metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/{ => v2}/delegates/EntityApiDelegateImplTest.java (97%) create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericEntity.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericRelationship.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericScrollResult.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericTimeseriesAspect.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/PatchOperation.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntitiesScrollResult.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/GenericTimeseriesDocument.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesScrollResult.java create mode 100644 metadata-service/war/src/main/resources/boot/data_types.json create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/authorization/OwnershipUtils.java create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/utils/FormUtils.java create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/utils/SchemaFieldUtils.java create mode 100644 smoke-test/tests/structured_properties/__init__.py create mode 100644 smoke-test/tests/structured_properties/click_event.avsc create mode 100644 smoke-test/tests/structured_properties/test_dataset.yaml create mode 100644 smoke-test/tests/structured_properties/test_structured_properties.py create mode 100644 smoke-test/tests/structured_properties/test_structured_properties.yaml create mode 100644 smoke-test/tests/utilities/__init__.py create mode 100644 smoke-test/tests/utilities/file_emitter.py diff --git a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java index 25bf239ab835b..1c9dfd4686610 100644 --- a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java +++ b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java @@ -183,6 +183,7 @@ private void generateSchema(final File file) { final String fileBaseName; try { final JsonNode schema = JsonLoader.fromFile(file); + final JsonNode result = buildResult(schema.toString()); String prettySchema = JacksonUtils.prettyPrint(result); Path absolutePath = file.getAbsoluteFile().toPath(); @@ -195,11 +196,21 @@ private void generateSchema(final File file) { } else { fileBaseName = getBaseName(file.getName()); } - Files.write(Paths.get(jsonDirectory + sep + fileBaseName + ".json"), + + final String targetName; + if (schema.has("Aspect") && schema.get("Aspect").has("name") && + !schema.get("Aspect").get("name").asText().equalsIgnoreCase(fileBaseName)) { + targetName = OpenApiEntities.toUpperFirst(schema.get("Aspect").get("name").asText()); + prettySchema = prettySchema.replaceAll(fileBaseName, targetName); + } else { + targetName = fileBaseName; + } + + Files.write(Paths.get(jsonDirectory + sep + targetName + ".json"), prettySchema.getBytes(StandardCharsets.UTF_8), StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); if (schema.has("Aspect")) { - aspectType.add(NODE_FACTORY.objectNode().put("$ref", "#/definitions/" + getBaseName(file.getName()))); + aspectType.add(NODE_FACTORY.objectNode().put("$ref", "#/definitions/" + targetName)); } } catch (IOException | ProcessingException e) { throw new RuntimeException(e); diff --git a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java index 888c4a0e99931..04cbadcdc6b7b 100644 --- a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java +++ b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.registry.config.Entities; import com.linkedin.metadata.models.registry.config.Entity; @@ -58,8 +59,12 @@ public class OpenApiEntities { .add("notebookInfo").add("editableNotebookProperties") .add("dataProductProperties") .add("institutionalMemory") + .add("forms").add("formInfo").add("dynamicFormAssignment") .build(); + private final static ImmutableSet ENTITY_EXCLUSIONS = ImmutableSet.builder() + .add("structuredProperty") + .build(); public OpenApiEntities(JsonNodeFactory NODE_FACTORY) { this.NODE_FACTORY = NODE_FACTORY; @@ -117,14 +122,27 @@ public ObjectNode entityExtension(List nodesList, ObjectNode schemas return componentsNode; } - private static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + /** + * Convert the pdl model names to desired class names. Upper case first letter unless the 3rd character is upper case. + * i.e. mlModel -> MLModel + * dataset -> Dataset + * dataProduct -> DataProduct + * @param s input string + * @return class name + */ + public static String toUpperFirst(String s) { + if (s.length() > 2 && s.substring(2, 3).equals(s.substring(2, 3).toUpperCase())) { + return s.substring(0, 2).toUpperCase() + s.substring(2); + } else { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } private Set withEntitySchema(ObjectNode schemasNode, Set definitions) { return entityMap.values().stream() // Make sure the primary key is defined .filter(entity -> definitions.contains(toUpperFirst(entity.getKeyAspect()))) + .filter(entity -> !ENTITY_EXCLUSIONS.contains(entity.getName())) .map(entity -> { final String upperName = toUpperFirst(entity.getName()); @@ -547,7 +565,7 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode getMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Get %s for %s.", aspect, entity.getName())) - .put("operationId", String.format("get%s", upperFirstAspect, upperFirstEntity)); + .put("operationId", String.format("get%s", upperFirstAspect)); getMethod.set("tags", tagsNode); ArrayNode singlePathParametersNode = NODE_FACTORY.arrayNode(); getMethod.set("parameters", singlePathParametersNode); @@ -575,13 +593,13 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { .set("application/json", NODE_FACTORY.objectNode()))); ObjectNode headMethod = NODE_FACTORY.objectNode() .put("summary", String.format("%s on %s existence.", aspect, upperFirstEntity)) - .put("operationId", String.format("head%s", upperFirstAspect, upperFirstEntity)) + .put("operationId", String.format("head%s", upperFirstAspect)) .set("responses", headResponses); headMethod.set("tags", tagsNode); ObjectNode deleteMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Delete %s on entity %s", aspect, upperFirstEntity)) - .put("operationId", String.format("delete%s", upperFirstAspect, upperFirstEntity)) + .put("operationId", String.format("delete%s", upperFirstAspect)) .set("responses", NODE_FACTORY.objectNode() .set("200", NODE_FACTORY.objectNode() .put("description", String.format("Delete %s on %s entity.", aspect, upperFirstEntity)) @@ -591,7 +609,7 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode postMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Create aspect %s on %s ", aspect, upperFirstEntity)) - .put("operationId", String.format("create%s", upperFirstAspect, upperFirstEntity)); + .put("operationId", String.format("create%s", upperFirstAspect)); postMethod.set("requestBody", NODE_FACTORY.objectNode() .put("description", String.format("Create aspect %s on %s entity.", aspect, upperFirstEntity)) .put("required", true).set("content", NODE_FACTORY.objectNode() diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 3d87267f8ebe3..0f2945d5d2393 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -1,5 +1,6 @@ package config; +import com.linkedin.metadata.config.VisualConfiguration; import com.linkedin.metadata.config.cache.CacheConfiguration; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -22,4 +23,7 @@ public class ConfigurationProvider { /** Configuration for caching */ private CacheConfiguration cache; + + /** Configuration for the view layer */ + private VisualConfiguration visualConfig; } diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java index 60971bf06e27b..df0cd4f4ff82f 100644 --- a/datahub-frontend/app/controllers/Application.java +++ b/datahub-frontend/app/controllers/Application.java @@ -13,6 +13,7 @@ import com.linkedin.util.Pair; import com.typesafe.config.Config; import java.io.InputStream; +import java.net.URI; import java.time.Duration; import java.util.List; import java.util.Map; @@ -125,6 +126,12 @@ public CompletableFuture proxy(String path, Http.Request request) headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); } + if (!headers.containsKey(Http.HeaderNames.X_FORWARDED_PROTO)) { + final String schema = + Optional.ofNullable(URI.create(request.uri()).getScheme()).orElse("http"); + headers.put(Http.HeaderNames.X_FORWARDED_PROTO, List.of(schema)); + } + return _ws.url( String.format( "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) diff --git a/datahub-frontend/app/controllers/RedirectController.java b/datahub-frontend/app/controllers/RedirectController.java new file mode 100644 index 0000000000000..17f86b7fbffae --- /dev/null +++ b/datahub-frontend/app/controllers/RedirectController.java @@ -0,0 +1,25 @@ +package controllers; + +import config.ConfigurationProvider; +import javax.inject.Inject; +import javax.inject.Singleton; +import play.mvc.Controller; +import play.mvc.Http; +import play.mvc.Result; + +@Singleton +public class RedirectController extends Controller { + + @Inject ConfigurationProvider config; + + public Result favicon(Http.Request request) { + if (config.getVisualConfig().getAssets().getFaviconUrl().startsWith("http")) { + return permanentRedirect(config.getVisualConfig().getAssets().getFaviconUrl()); + } else { + final String prefix = config.getVisualConfig().getAssets().getFaviconUrl().startsWith("/") ? "/public" : "/public/"; + return ok(Application.class.getResourceAsStream( + prefix + config.getVisualConfig().getAssets().getFaviconUrl())) + .as("image/x-icon"); + } + } +} diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index 6b53a2789e7cc..9eac7aa34c3e3 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -36,9 +36,13 @@ PUT /openapi/*path c HEAD /openapi/*path controllers.Application.proxy(path: String, request: Request) PATCH /openapi/*path controllers.Application.proxy(path: String, request: Request) + # Analytics route POST /track controllers.TrackingController.track(request: Request) +# Map static resources from the /public folder to the /assets URL path +GET /assets/icons/favicon.ico controllers.RedirectController.favicon(request: Request) + # Known React asset routes GET /assets/*file controllers.Assets.at(path="/public/assets", file) GET /node_modules/*file controllers.Assets.at(path="/public/node_modules", file) diff --git a/datahub-frontend/public b/datahub-frontend/public new file mode 120000 index 0000000000000..60c68c7b4b1bc --- /dev/null +++ b/datahub-frontend/public @@ -0,0 +1 @@ +../datahub-web-react/public \ No newline at end of file diff --git a/datahub-frontend/test/resources/public/logos/datahub-logo.png b/datahub-frontend/test/resources/public/logos/datahub-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..5e34e6425d23fa1a19ca3c89dae7acdfb2902e86 GIT binary patch literal 53563 zcmYhiWmJ^g`!Iad9R?vapeRTibc2YX0xCnNA~lqB4oIp8RPLAqb+=cyRA21d%p_f0ro9!C!)B;4I)j zR4xw;-5|&yk?@ab)KPK>g18}#d&;`r>1&fcuIx*xifa^^-@In6rY?9jurE_yg}u(p zE8=Bl>O9ZO`%7dXL#(0BeFPbNl6-aJi>km`bKRu?QtRmEbWYD1-N&~RVvY`tC+#0_ zQxRkD2OfIBYpjf+W6KcI0mX^KQ&NBege{M=VS%zIKTZGCSEa<12O=-1{D?nw3vilk z=LsEhJ3q}Dt!o%sYZ?m7!eK#XMJjWl>XRRN?(!d-rdLQc775`#V%B7v zBpnlTx7z#-B#!P$kwd!L5INKzRJ5}CQev)9!O{EB>lK~4A^$;sjm7V^TTu30!UG=x z*3O}f^!DG5jXp1X1+BsY(NAx(l0XtyArk28lf9RC?)QUTsKttMYN439wXE}wg7#>> ztoK^0XBVNM4FVp2c#m%**;Kp-L!g2vL<-&Hxg~RKswg@1o3a^#>=^^DDE4{!`m0jv zwX<2b9fUoH#9Rn(zsqxsu~Z(CeaR-IT&uM2GOc z^UPuI%Tou}>FfXq%T8dn+9%PuLY`2V{>A}+kbOJb6lY+g$BwOQp!*+eEff?*1s)c- z7;vSDo@c^fb%0FAQISG@N#&Mf0@Tk#c$@m3`LE9AlcgFiLufcpoWmw3MTr)YQ35>E zDZ_S4+4y2r8O6T%G0h~VhC{waOi_=?g|RP@+`{vIG1JhdPHzN9@1eTP@+NI zcUJozvgcp2P@t79!P`P&N(2Ps0fHI}nC_>2`H;OW;z2J|sIzi1y4NvAO;@xfrm4!=Ag<9siNY&WHDnRoD{_-mW zLUfUwx}G9rI%t;zT)VSi+eRuoIaEjxnI=JG3_NlLdnVM`W8cFMGD1ctN<~90q0*Fp zpa+t-R~cKQy87~<0#1bOrZi(XQ8O_C?TY~ISB^_hpaqViYH~|f$m$<^zKTOM#Qcbk z2Ew002)rBtB2KSRXe`;6qs}hM!Lh|dYmp8?jg!1YtfYW3&EafR&B0I?o2H@a#qfa4 zFw6Wr6cuFjS6-VGQc@otcdtniudt!}X~Mu$Ps{Tt_Y}uH7PINT9SH@&bpg*EzJ8}J zy#hB_H&6}iZLyOOU31%Qg|IaLgUtY77cjMNF0pBqF?Y%f$3yO^%nzU-1*{W|nNNj^ z-uG~zUrR>3L|7RnAYk@)NOnHA4@h02mvkhCs!yq^k}DsKsa*`xB3Qr&?qf#|8nYtH zAcPb(kN>S2$+1HG1E`b;Jf=kTY5JF9-um-k7KH5(+lp*sff!`eLkpPYT3>^ejObz; z``qZ}ay@j5KXmI^1*|FhV2;96N*36_u^asv~q8z$db&>GLl>wLG28X^}>Ap%D-vDMpm>#%|C z90E(tY>P2j71W9}JRDlcJ2H;m;t=f{YJk!XhnY98V^Be9k>wdqLw3HqLSStb=d)LN z@Bmu*r#gLwdYKPrW5g@$=$xa^oovrA+sglbF32Oh2xZ?JaGuLjp=2pEs+ciUO+)wh zy18(u;QV*e2F$Ll5@lol=>y=VVuw+^$i;dCRpV%w(P=~MZ>p24ps_3%I0 z8<(;O=Ck_XOs{ofhBSxii_LfOnuwj#@f zIX*<^8)3C?PnRAA6Jp`F3xs z<~OmBT0%nC0S#Yuss!g(_4-K4!#z0^y}2|ER4;}GTu|}8b<~5c_#;l#Z}-m+zT9n& zXI$&bQ@`k6@q(^bSr%P-w4JcE>ZZ&J-65P{qim6J#n1TI_ONwHH-fi65H#b4A>~2Y z@Ote36st=X{9rLdKuWyJI+)W$hSeNvNr%>1*e0`tNC^1YD(#Kq>?mhDTdx6S&3TN# zE=fOVMYVfIk+RYgs8t2jN|q&c6td`Xdw&nF;!xZ>_Tw29-$~`B%|0i9rt2H3$K}ED?SYj_o1$bLXMF+z)DMxqYcEyk?761G7(y0WJQ3bcR zp6yTcLEJ!fvR$!DpcQaRo}g8>-+*!s-z8-aivdBGrI)%ea;u$ftRnn4Ixqzun(@c; z9wb0Gh<$_Yu6VrTd(c<(c!$O9T;b%zo)XX^+|E#$71AI?DOkI5YlMQ4tyb`L2?#FMhvr_$wDUby7sfc5B zIy`>(QZyCtX`Joa#HA7B3nXc88fBkFHEcZUOIXYLZ+ZRzJCficO#qts7U^Z_avni5 zE)aQ9$&g?Rt&xnacXnHXx_^h6NG>V}32b`(l~p&aJIE>vS~HL$_|9w|!{L#l=WFR*7_VVX;;@rbs`4u8kmh z_g)iz&#I@7pqF%xhHZ6od@PAs+8fcC617~}cSnU#ynDw*b2*ro)|MkI3FD{VteAj9 z?WeIjndfg{JaQ4?D9qZ*2SH|9TB@_4#dq48G$65O?^uYYX>4lBAZfmGb(gW#3}X{c zi22xJ?DUZGiR7XY-)fD;1*OUtbn9{y8sa}y9pm~r2fUUy0Kt~{Q!Q;bfSPBatHG#B4*^O-OjcSn2{VuuP5{Bqpdq|Xh;nt$pA#!n938fvcZc3s z;o2j>L@2p~_`9szYXcH;UzthI&nKivl6tpKg!MJ!#cB%Nw=@X%%{!2`J=tjs-wf-?VXM4oU2il#)OB+ ztajTre)ljKYeB=&;_gcvax^^kfWF~3jZvtGC*G-!Z&Z%Vtae+BcG(D;6-1X&NMGll z(nw$hwpUt=7bd_Mkfs+PuK_a@c$%jN5Ai7=g0ihf*Gjd^b=Dj_UjI9ZmLBOIy9y6! zA^+q&)HNA$2i5&FPNqI(f=k}X_d=#U1byu{DV)!D7Of@K=~X`aXIc@7B^15_#}Zm* z5#}OA{fY1Y@T4Ms$k)ir*x3_hT7x?pXaOI-_fu7x66(8_RgS`Nxr_3Adw84U z+-{O$e~%baYGVWHsbi`@5i#me`pf8DcJZJo*Yxj`+K-60PvdLrQymTFMB*}yAjmfE zeybRdL2_S<9dqz{Vj~y!D|Q@rpBX7U73amIpuwspC;fPNtxK1GIRI>V%ZRA+$uth{k^iFFrIoa{$qx9 zqD4EL3;hg~Nt~BX6*3%m4fQznihkN$AksBQXS2O;xhanS7?<+r+YaOJAu4c+`Bb3+ zE9h*NLXg7<9#_!9`d+gYb>*8zh5%iQ94MvwNkHPGF#bdBDKwn#wgrOhBXQOMxE)UX zn)J74FL_e(FF=s*mk-PuJO)|4EgE`sp1NJR$`AisMD{$;UinaO9iVft5u2$3%z{!2a`z(%By+V4Y{Wu#Z^WrAy1fGJWnRnc|i(|mINn`~#X zS!wY`VZ`0$f%X*$+S1!Qao4ZprL223Tl53Fwks{houwDqC|y05i^}$*H~kPZ2-rl()z_ZacG=@3o@C8 z?Px$y_T>S68uCSq+JZVKqW_|XaG5DAP{>^fo}(I%(0`y!13@|kzk-2H{rh)qJO&vEsV<|d+6Za35XOK@=elF7p^Ogv1WnNE&DnE@ zAgJibrkF~D{;ie0r?U6h=;raW<@G@`n()8dAf*2+L+v-@Wbpv{kqIZc4V|ZZmrmLZ znV!@<*S7Cjnt8bwfqPl3{@Ixeep|4V4n<}bv$8c>@S=l<2nxCnmk}~Qc{T~`WHC77 zkFTx_>d&ZozPU0#>1rL}kS{m{ zUkong%PZvqO=x=^`zEbRrNl56butR2(i~L8sQ5MmsG zBV*!tv+G8%j3cFcgxI@xUxHuJF4Dgq?GIFa$hvJiqUO?!QiEe)Iv?ap-*vc+zeWvm zmY}KB30OnNV#so6L@P zu{KYPo@FxJs8;Vh5zW60VZSU((dAjozUetX5nVi!pJ#hBN!RY?Xxf<-_|bTOy!4R^ zIg6Tk9@G84zJ~kh3$1l@2r1g!EtfJ!DI^T=&Feh91V0#lR?c61v%9==-M!rcfVE9t zmTC~oATuiODm47+k@uf{QI1q9JLQ?`!P44!hSQ`bi8$+fY~kMDlg`x1r9~=2hv7+; z>Sx9xuTP3!cBkmk$F<>HDjE)5E_Z7(oMQjF6GQtSX?~PgpX0|Tn#}jz@BVc!DO=Ot zJs0NAMU_=`B-5yR5K{I%=wDnn(0OP6>AN@g-&#Fb>ho@!vX#6+_mR;2jkB)n^ox{Lq!)KQKByiE(K;R z**tMAuW_eO8KQ(POR%_`_6gLKWTmyMopajEj$~P%m)O5A=^VRiZZ?wItLB`Bj{Qw) z+>pyx%yN3Gx2Z2DOO0p5noHX$#!M;UohiJ{N|@> zPK37?zM=B~YnhA4i8UeF61}s>_~_lU2e&DwzmWtHrA7}`WzK)vXj{`(UAD_jIMQ)g z)f-cxit?7_4g3?FP=3BPJH^+5@|=M|HaVucz~$mm6t@7H$!`}&OFOp3|bD`!+mSf+M?xAT0n=i7gw#w)1)y_f-IQWogJU=wOQ ze=(FxriA|)(r;fP22DF%5pXGQ-MFW*s&o?^yOfY%S6~3CM~AkesiE22MLa;BRr8JK&fUkfY37kzC{v<8io3Vn$)|>7 zc++M*)WcYc+~sKgt8&UK56;#dK+28ZEK9{$vP>;uTt32ssalK#j$fqLJNuX(uY_8t zly994qIK&RXT1^HUWMk)KWDd+r+1-5@o-9DKb_wE*vsij8^ov<0a% zT0pP^FTD#vL`v+B+U<;U1w_Ag?R>jazEa9dd*@M>3R^MxrC?zh(u~2vUVrx>C6Y4qFGK80Z+YFD(;W*5MKWu{XmvX=@*1mQi#~)RLZm*-LRjAi zVh*(^mO@5Ki$gjIHADwl;jvZH#PD&4rtd>FVvl5*5RnIStA^qR0jPZuyr^*qW6^bc zLVvD69AV3oPX-C_?gfW`s2e~shCjJi0pm-wh_RRT5vl)ol+2!{Ihe5s9K8YpyJk|D zS=LK8i%>J2>YX;!ZoJsquGK@2J8cHn$F#i~{S3$b)mf&JInPIdB3B$#Zs`>h888M{ zTGI*tbqDPQjebT1 zNslyKF#0;Wg_wFpX=!Xsc;nU1Y^Sm?Ed2-2zse2^l2VBx<(&jy6d3{()M?Re8Ou_R z=11Sq63jEMuL@gHv1DwDT1^ZLMy1R~_O8zEw`(MixkK3m$kWb_Y}@*l{))@Jq5qp) zi3+o>SUtRVCq0DF(-DcQkkV?OJ#tSEH|xybXRx<3CelM}rX9JXhxSe^MkF3S-iYFg zV05w;t~k9;noAej62j0z7%TK@8gb;$ZCS|Bqsr}3>XqvgWJUSx&VREWH8F^k7!pvp zOi1FS3f3OgY?)P&gG7(Ag&Y`a-^m$iSw904$D9`Y924^Ni5fgNsOIV-M5{B2Jxrt~|QS_$oYD zShi8KwDxQ8)3<0w&~v4|e|uaobnDx}ee9dF2IX^$)m)c}V2^&H0c5(v=dI82iS!M} zFOS%CgWA7ErAF3XgW)8gJOXBP*p6v9~+4%!{wB*?-dKb((v3 z0O`+Hs4H@Km`rmHM=<`oo)zE$^M&SZzH9YWC=vHR9D4QR7G@E54@bs15R=s2ZPMqN z6ju3YBZ@wut3^moI7Y7nj_9X1%t*KsBe$ogq!{TT^Mr_831#IN9X_VYLbvo7S6Lj> zYf9QmJ%D8E_#pdGYrnH#PBG0PCKFwiMD)h&CyFKd6RYp_q^zGZ6T0B;()BuNApB)_ zhp+PP=u+~X0wJF2&W4KXsEI1MVZOrXb6%M@juuZ!eh$A^Mg3QHONlcYXYb zh$QYda|s)E6}+Ya}&2BPOQ2r5dy5;7c?1dQ>+^yYHd?<{J*{ z`=l{b1%1ST>bVu%dY@i{9`RcFA1ntLcJ5kvct{89yjTe-50-cILNaOvm$zY z%ZLYK*tWmc&@;oUx{<5Dz3V^{Za+Ov*KqN7-}4LVyN4i#>D1KBta^OAzO5un*(i}0 z@UDxuZ98G*b5!_T-IiH<29}#kmy)^R-KN!gG>9J9epPVA=05&A5$v*B)vj{(l0KpQ~F#d0Hcx{cTvIe*2d za3CG_PBS)D+s*rCkBotz1WD%YYvN^0@P`hP~o%s#RK5=FPIGG`tG zcUJQl`Z-v67fO&gg?WpkXlDgKjd)F(eHpaq`g+PBxZq$!9 zY_qGnW3^33X~Zq_9ypP5ZR+%p6=~$|SgJtA=;^EbpyFWfB%e4>Sx55XgBu5TC>!F} z8GVZT>Ej63Rd+J69`1}@p2--jn3jad@%G?WAzio`*@DgXZF8aHb71P!8srn&?X%n` zdn3$e{Wig(*t{Cft6>(yMLbk3^q)e0JcBXbfSV6k%K61q*Q<>mzMiX|F2iL zJZ9sVkv3mR5q*cvOUb8UK;sinf7XEx(f@V+B(6 zB*Xj>;pIP1+X8tHO?`lcAH5v127a$W1r&Oc0T?h)h3tM@X?Al%3&7+WLVt*-2b(I-u^y ziqTDHq9!I^J!@p(XgYJ-(t*ecH3VnYS6ndB1%L5%m(|=-mwW+R8Xkj}?TS=9{yLc6 z*v{A!AzT6K_&1Ou#*020xO-SpZ5(i5AP3TtSbB&fQ{}eWe-6=2VuOp3=-D45fAE>iE{v~4Y+s?u=Z$P7&=9dv|19c%C0`w0PGGNXnZf$`j z?pL4+w0pXqpM2-=Ak^j&e_H-_HR`;cs%h-rJ_{)DC1rqqrPYw`r(!)(}3LcIe~iP5;p>WV5YNkpe-{p?WR>v8FFe3D*SLb;y&%R;>PJV9lI%+{90Z zRa{*{|Kic-8C|@z)b@Q>F&EZ*>USpg7&m;Q^KcpjYS)Sn$`9|I*!?I&QhqtMo)ni$ zcMyfmG(=;$8{-=KWM${4c4u&>crI^=ng*n|5JbWnyDY`q#I5jbU#-2T;CuYmjP_`< zYx?QoGP;Fp<0 z$SUxA<5!LpT=1@aEoo7_PN=51P>9w7DqD$9u37X{a*$)7%YUmwRw}2TFKq3Qj*J~=Un-7@1%NmB&x=x=qUB&sk_^1ZtN9r&VopuM0%&UE;$`0=t4m;HD&!`ocFZ*L9S{(5mFClsbhnz*Yp8EX2JOH5&svX`M^an5|_H<`ANKM|Kp{DSFv#;G&Eo9t*U zWpL~Jwj43!u#tS^N!vQcld|Tmfx`{0K+exfXV#-#e^2Ol9+VG(+#fi(!QWTt2A(6jNjm{aNiZOSN z4J$Wp-GJ|J4QKXMnY}Zl(*9ge#`g|nRB08(PKPyC{ zC?}&mPzvF1v_qJ&jYb`Ca)4Iq_C+Q>y+!_@2|G1Rp~% z!47x-s`sal>H?2h#)HObeom6C?^_#ODn$1Y9CC|t5y6a?V;885DMcVc!Teuru zvkk=!k=Vk7W2~$b&CDqqvQ0*Nsarxt*A3eG zV_@ka@nAJX4t+d}0-RAEdgXmt#d|}V^}S)_+>&wiAgDFTwSo2#JWGo-|12(lwDe?b znuewsVvDVmyU~InSqfx~F0(u^;vlkkZL6p?b*aI`*C`a%tIQq^Wbb~6OWfT1@g6|8 z24&XAo|CRRCdJ8k{PXGVf`+xL6p$cg3rC>i|M-A!M|n(P2n4b{ZeNZ)A=z#8eO$=` zUB0~DUf8Zm3FDZSwryua%OZDjwhjP|xm}oYkR@l|SPa-7bEv}H88(ATQ*9NIHLj0B zx7ws_+t@%lU%JD=VcoW2{Cze7oFyN6WpH{wT12z8q;Gwm5?V025=>}+f|D@lTs?C8T; zjjTG=Zfn0q-5_k8tzu(bm_;&BEUYhi)h4XWx>d2q3I|erhnbjmgsWmoHu$co?r2(LmJ_pYbR#4lYtsooi)y-)2hZLe3~8o?`&AjG;*cv z8iF5*Z!22q_LHLqToSs)nV*AEUJp2)_eiOf71$;ZdJU3e*gEa#&RoRA(hM(6G@d`)Y=u01@KG+sl(| z5Cze!(T+yW!EE5>Ars$XSL&B0h|@vP-@O)UzMkmM=KUB)2Bk~ZB!}OKNzP39L6%>< zEMBpTE9W_cQ8s-IrWOm5HpJ}@O55eRcO%>y!MY&`P@_yc1A}RF|8iW=SI-5soX81W zgGf3kn~YWgzQ2!%AvE)PyN23%)I(>{ATXfOYU8JCq-QrJ3&|$CsDvBV)3|~!FE+NT z%iW@ZIuNoa+>W=3V;`et)o!CzaB8N2h!8%!{q^-|sQDM|Pv?d0p_MZ*?x0Bj16Ffr zLGGZ}7m=;SH@Xq6Kn41&7l4OUT)P}R?18sQSRWWNiY8s6Yz}GxkzP`T!JneyS7gz` zwmXOy5WhZO_}^+vx?#?9{=(y@liDvxEtKF1dnRm?)1<6d%^584nxeSK>f}Zk1@vPWCc!FOc5^^=% zFkAQp2~-J8mVHzQ-)$E*;Z7GmF{HWaV-%Fbh^%b2LvMIL0|NNt=qgJ0GE-RIVInv` zj}F0q!_EV*gi~Yx2H+^;hN%kx; zSHR5sR7w}tXnjW6qRR?IH+QjMAGP23{|kgyoZfSC5e4Z1685J=saqDogn-OEBC3SK z70e|Y?sDdX+DN`G3N%y#Qi+J5icBAi8qkq4$J`jt9bzsbcl}Po+b`RSx_{lR(TTbV z3ZftFE%+#6W79FpR#HSulamMJ+;A0d|JzP)KkqUZgiQxAR3!r=h$+4(B#peiyGjDL zaE}6rO9I3-$g|BZA&DSUanK2x>XS3B@lUkq1Vz@Jth@Ep9goRaK|c1%z?lWsbpYl} zV_>KY2Dixc`mFlKomCww=0Sndx@#{)v9wGUN=q+iF)Q&NT_2JfX#i%KhVl$T6`wB_ z`FI-VKO?nJ%I^s?*OyDEl>Xf+nuQ~R8bFPvsW9(1@)q^&h;idS7;!7l6$Xjhy(X1G z$Y?Dw;Q8?diAm~E_93v5RKYr#i2>?V-EPyg330}(gJ(qK?t_F@H$^LI%+`k#3ZH*< zaAqT|x}9+NH-^oWs7M2#Pp~%MVxqhcJa#_*KRggoK4ADN50rb=x$N^rp>M>-oUAG# zXQQ1IGT6MWLWw@$e)36%B2!uy|eg{OM88uV{LN4#2O=odccL-uxw=2D@Ht+@f~p z?);{g*G4F#0@(VzlHq*VyG`%3w_&bT=9$TR`6OnoZ!U0b8a_!?3dSJkKMo>LkIgM5 z{qXBxInOdzFz7m9qYaqlTpwJ~o*DV`iWzzHMa0L`(~Vw-q^^9P&+Gx|uV$l+zjLqE z8;TvYneZqmY(QM9^b$TJRSB}J~w#PuYQ8dz^}nC~bBWv^}VvQ&T@*nTa1x)`8La#oc=|07WP z{Md6jt?ZMsf%I+*T5&P${DIjsvplBf=HD1ej`_x7c0F#;N|K&fxO%xybi~bGBo@m~ zEZbV$ACukCxmWhEPZgkK22iST|1vu$X>Cf`Dl)YxmUb&pT|R};Q5a?>lm zQnOd!ztBx3KDCoqXCh&i8?-g@HL@}UvVs6vU_LZyxfEyf<+jE!#!S0O!nU}27E>=m zYWZ_1q_}o-)`ay79Xs}NaV$bQb%N8h@bs$-6%eujNRcbV-$Y&UWS~1*&B##C?XK!G zpPMvT-fej|QC-U|WL4hQ_(0{BC2Kzu=)&&&Tb2RngFHW59JFltDlnOGn(0=v$yelb z%A2M}rJr3d)kqmmP(CW8YzxxDS_xX6A|5CNHAvLR4jyRD0RBJo(UKSmdkI>xT2X$Xey;~I;;8bu7kO)Ylyt|d?Xrqr zP>v6$UL|scdid#_-EPl)f|K(xWj~#Q0t|*t*K68e@Lykh&DUsb_Yt_sC%|>Duspw>={QNP*^|R#w>no= zwefK3b)~(6LEMR+EdTl1>C}Wm&CDau%G2k2C#HAkU;hFlzp{{{zdc$>cE|7Pr{<>9 zCpbsnzx@St{nyC$_uCrXa|AJBL(h-AmlpUxqh57;@K7R_vhiM$fnF!P(9l(?yY~g> zHPsX9a?>g~>3Tt?6qN#|>e-m^sbqng89yO>u=QzQ+6pSmI?`aZN3T#aiFe2|8=irP-4R&7+?Hx*80 z(6j2r3^tW`D;P*A!i3t>)94mM!laP9+6Q&lzcrrRIG&4cCh#0j{mpi`*^Twl=h zeNmvoNd5e$JQ+2l{O}BJ7(r)2( z5^mFc)8tB+m}%|DPmXbF8wj1VpC=-X+A`%fKp)nnl25a*iDso390<>^bqA0pHa-!qo)tP4!d!9hoF@rO#wT{$2G5y zl&*mHJ$8sRaw1j&uN&;J`8fhcS0)(|z<}(vC3&lm1F)xD$|URjzxw!s5++G8e#Q1v zX8n+`A|vTk{Z!phd0;0`8fYCpjVR&8-e{Hz?!q{fhe5iLm>R8G){RCK$K)|0mAe5d zp1o!_(*2U*K;i=@CEM9)dZFSnu%eCtcqkFm+k+v!roZgU`*jx|F|k&Hxu@=P87);5 zD9fHwHf2Zl2H2bZ?_$jhtIQY0^@hEgWh!vfC7Z~FQYSToMs>0W4IL5_{9mSJRSSr7 zC4lB>rkudpW?9^u1JCW3?(1#W8cf-jUY8Uw(x}=4l^0BCX}gs?(aO?CEyeWO>j}UO zt*V>4w+fmqUVz(!ssC>8{jAaaqFGRj{-Hbo^lUMN=^N$v*8)Tv=QYt=O^Epn{}^i8 zVK8|R3w6=Ob#ETZiNP)Ys6gZ&>_H2OHdD5c9f%^LDB!WqT}tCq9mI5VHlE|Ku3q>y zSjbqwHE+4Og6aJCIZ<$G#w)?L%54RDIdL+9nHosZS%=0P!fym3Z?nLO#)Q`6}q4V z_yDA!NObtE#~NS(-~P#My>4RNLidoN9PPdJNqOXkaALB;{kWtq5J=-R%NCSJa)l^& zWiR}&a24g;JM*5qGt|cjB$`-8!&~|t*vQNM1ax~85NX@lPIt!VvvG ze-EbwR{bsRq>)=DEB@(vu^?lurfl&4*WZv*jwDjc&Yvc?xzF2y6xolsn6Te^DE>y6 zg_}v&)p)p&7xUzEUYRYU-skwW9c~9S zvt9KA)*qb>Pj*^5m-5oX&cXm3{tSSel?=W1!z&ZgCJbTb{j#eJGJVE?S9Ank&5^4W z)=D@=ns^UgO|&8#yDCZ5DiWtF$vbQT!c%Dje;&2ao(O5(^WW4j%V~IHjnT)r+&lUP z7m?PNv@zHU%%pc?v3RmBA=g(HZx4sc`YIvvg0Od z-TAmqlHXjMmG)W9Cd&y3k#;v@T4$)5gFNg4gl{G>rrVGyfSxNNMnH3w5Qgh*RWUc3N_MS}rUwueTjrRdV9slTcHbPHw|86>PW)#nhkw@-uEh7PU8Xt( zU5QSLUd6;SHP0qa@DVLYe=91*9uSLJd@I`DRv$JG^XJ9~&mQltXUtdb9_R68lYuO} z);Th`G%s*6jN4)N$fAAJL9EF*yB+vtAIjM$%jT9t`sZn|^RfAjuZndqpUMLC1dL#j zKK%IL7URb4pI<=wy2fhwInQ;7^BN|wJaVh-0bIZ3B(soQ_IE)`#XS`U(CIsFQry8J1wRXjQG)_hXm?_?(@`5o_8Y%e zH#twdXj^>gxz5>GyOGbdC2dz0U56wq+wrm*$W$j`@6NL;JjA2`XL6RM;GAmu>c0AB0&etK*;&w%NA~-KAGt$yeVVS?`40^ zz<512VtGOV-HPfHJbG8l863cT%G3CC#fX&_aEol{)mQ7oRHSTA_>^ELxd;vw;~CFz z*(~Nh&vcq6&@4r45XA!GZD=Owve`@Kt~klgw}B$lY#EVr2x5Aq&{S>KOVD8m>c-HbRzes+1!0u`-q2P zBVPi@f?AOp=j;EMY8rwlb>j3d>nPtWOg=w6i?(;d#*}`J%=^Dx0HpqFCgV1nJXFZk z9g_cnhXvTzH_m}URG5&G)A{~@R@u--?&#@#pXtOX<}<{`i3qOGb#wgFkww~+*KE&9 zIz!TVquubVpLr?`xha@ZDZ(Xagxo#3Mk+fe<1X+Pffh1&ti_X%JpspsUXiz zH|7iUuCWY_c&JRpjjqrZE9MmVpV)Bg+zeEEG?tPZoof z8Fcl@m}QKqzPN>%0116tmeM`>_VnD}VuM8|{QTHeZ8l{ zmA~jTr~m13?}?MnFuWpu4WPPiKN>+_SWhkFUUty|;p+9dE3p#T1Q_jq+Y(F$9ywb` z@f>-$w=}(Z1J#Bp)5Z-+?tpFeK$4IhO#E*$zs$`CO5M7~03AcB1KX%^B~3C`+6i~O zb^F(&0+J-)iWS}81r=B^S$dVdE3NPl_7XQ3$-Y_yEMKC=xe#PlHO{Y>WR?OL%I(FN zF7+!{>b?$R<|SK&om&A1Qao7M&rU4H`eRv&!-~bID2e{kR4tRqU8s_v#wFIM0a!Sr z)BF3o*E)%-GnIq-jUSo%$SzPi5mL7{+#$k2EN& zPN($i)Q`A0ZJ#>AEpndm5Y%BnV*!yfL~0?XGdkmEOo3{j5X~+>^Y>f&1-@~-M)yUXO~*4L!l2rERyO55+AUe%4a+B!A* zeM;=R0G#zy#iQs!v9%tD@Ac-{%12=ve#<>$7eeLzHVfS)pJtQT7ZcNDP_}553y5uw z)_`qMa$6a#KtqMC36#|aix_3#*+TFB6K|F`#ivAXm5<0+{){Of`E3SN7XG~=1HO%4 zJy)t*-m}=>hG}g?>z2FT1X#RpIjoTbk}-3)ZGiG{-J(N2&%G9jzc0{siT=peqwit~ zR#pdO`ddWY}iXxQ( zFyaS)A^pi+nzj=2AFlxk%!KOq3Ezs@}5*f}9; z+)7fkm2AFY(qnX8v4JuC$_I07WAao{D)%Ewa$#W8`sD(pzLk=scWAR70u|u?GN*H% zuR6I0R@zHxn3c4W!!5}dSWI*f|L#;ePv2(j~BlGWB5DeqkO~bDK#?vDpBy)Vd!v+@Y!+#u3#;< zPn9x84)ZX>x;SQPg5(2Mv}vLy4j$Id& zM?y(jrRsb-hM~4&RbcrFg>Zo`pm|U$YC&u09VukLdsbc7ZU;BuC<&92qv!cdf_A&C zx`3h-1b*#rW16mqqtOMBsrv|ax@K%uA2$3yuHHJX={EWs|B$1HK}%V16M|9#O4g$|yo%25N z*@=p?Zoh(2EzVBLJD`?5e9l%ey)d3jc@m5f)s26QeUAMu6`0-0(vBx=J5F5c&R<^N zh}S3QNW4rn3#G=MQ+9}R(4|eyjqkhYV1A&dV|4yZ0?LiEDlH>_I**t_`oLG>8ZjrB zhqiX7E#tC}-^&dJYsxhG>b*`!)EIYl+HD3*O{J7~m=?*ip6`<~61~Bllg_qt3)hPY zoXc3-krpvXx7*mHy8TQi6Ad_&UJ#wamMyg*(dV!~6%O?cPMxW>ziDtnJ%e{HWb%Haq>EYw-9bY%jn|`jY~NqVury zGP});q&$S>(KZHK0#GokE&Tsie2ecV0bdkyNr5%+Gi$27c=_FZ_DKLf*{`RJOICT>H5gAOV|L z%2YG1_Pgbdey|VfpX7xB7N}1@wv=>e4R}kLV^`WcE|d*7bJBk*?f>5h9ozWo$|B+L zq|NekjCZ)T&)YoP;bU-EySgy)2XdY%U8_8WbSn#m2gUFNsml`7!j|*s3l+%;GdZl8 zj5IdcaQB~Y<+2YR7$zovH`fQ;vRx^Q5Z}4Q(XP^q-|o$Q4!QRwpv$pH3m4WDc6IcynW@W9Hfo3qNih?0OP2!Mo@%h0XO!6`%(c zP;z|R&(pQ)1*#$vbNxsiz1Xu^#;&wZ+WgYiY4*mQK>~!73fdU;YVHM(db;&;;&bBL z`w*Wmp0VZhX_R;44=_rIC?to!DE@KlltpFnnXqN*ySz~J7xqvO8sERyFG6{>|4aNUoKoU=l z1oe-GHo9Az%QzCs@SNpHDA$!3Y>nxYLILzLFcuqK$5V-tN&vP#o|4DTZQ6>mTpnT( zdr4oGB=mMfvaXc|3e)fgc=-bZ!s^g7%Ey#YcR) zcCwF|ACXk>!`RB|{IXwC-hMqCZVlTy1GCw}Y^Py1d0mwHJ`>|+TM;m4JC zOHM!Mt<|;i$N$#>s&0q_G*hPIANJ+dv=VYI!8B}u=dN#jS(Xzm64cMqy!1`u)G^Sf z4oF+9nS}5{5K!sKHR83x*FsT_o?6+zpN@_R7iT2An$Uzf9^|U@|iQBxqjj2A1&&tN{>(_NVh4ECNt3h-CTve*SMm#BFSyE#KyRE`A zc6*m&I1G9=gE7t1h=v}DW((|^dA!mVYX9}Q=BmZl5I@v-PwB{}M2OidNI72*69G0p zG&$Gex=qdXvo?-vpkBUG;h!`!Owrs;cU<)ftax$yqbbm5aH`+aLr42c|Nm&0Z=)1g8rhVu_x)wPKLZVqa@2ViopuIr+Egd_gSvqvJshb7)1W6t1 z{ll9pJdJH%UzPCDTqT4}J|ylt*@&+8{LKUW+Bf%nVj}eY){N{#5R#_Ozy)9N)?@aB zio8tqI6QSquPs4W%lI3}dsU+HHaWHbV_aF}46x9}djCrZ``pLf8+*GjNTHSb9(v!0 z6)%`5fP}&l%506*O#YarpPs{TjH`Ww5;g5tK!;+U{r5qE36`l{q``eVd*|%3bSL}R z#Mg!MIWfi$M+&?s0--_I`}TF5gS{x#E+}20vgu_sM_4ya%nH9Gv_hEgjCI$}`+@oD zKSm0w`uIcR{+wF%QnJ$%a8~Dw?={C*1A05H1ZO?}ROFL! z8TPDmZr7&6^-eLR}gv@Zw6Yb^6(;VwI z|2#7Q+a)bo!q=+R7}lh8OS;Eja$5K$wimNs@tl&mBKMD+O$B^$&oLm2Au&Bb}3Kb!Z+>P=Z%+~mxr{~f1v#;xn5`d|7x z#{z@Itde9{xuMJA=tBQB>2!q;h5C_J;nunJ{vh=fZ*EczvIxlJht%8B7V|TzVaxV~ z1_tEDJ7Kfytug;KnuA7H8I6kevvObMh@bKANZWq>Uf^f5glyY59LIq8Bkb`lEwdYa zhOlfQx1mURX&?@1K>_vKCbIf>7#(=4P42IOUVRM!?^-1Ybnti! zn@r#A^?~tkWD7V(@0m~AmcPre1#y|NL z!s`p#Wx%)RJc)^kSVCGlslGzoiSIz+^ zrjTn?^7^GCJ=E%c+o!GDpDWpXEGUOhou_-`P_l!X_ik8jC&o&8~ zKLp=Ml?@P!AT0&S~qL2X|uWxy)M~Y*nHEmm)eWcb7bejg$&#t*2 zgX#R)KfR3QVKyT-vNp)_nzNRIEXYMluTL4MKgD0P{5z#)M+|d(2zvqcV`YHiq~8-` zSqGTpO*`3p1!pQ89K&XJzyM&<>RwjP7)fsH&*w)RqT{43lx4B^ao93jDIMp`zOUs2 z!jJMS(oX}SD%38JH2X@6(&|`{GY0rkpV9S36&qt(c^l2!zJ6Om7U}`M^t3bSPwt@c z&w^mfwoaE}^h=d3UpZ+h$c(tG7A$zj`W-`?A8JY47GYVk@#tD;9i$~ow=m6s_g(eY zU-~*%c{ksyX)yq?7_%qM&F56ufN}c}g)J+vEpd^}P0_TTlX&FDXJG`VrYWht;ReJN zqDBQgG3u+gz1U{jwuON3ZuPIaTGzC&_zPf8nuM%@L>Gl7S$m6_l2&?pI&xOtQET-uU{OON`6sO|@@(@F}2Ms}$;17L<75J{rX#X)?jIpi-65 z+M|3KcL%c`+VC}EzjkZgKN*}kmtE*1TghL6g8lN$JzH?VvW77KZGB5?w=QS(JvN(K zv6tAjJRi_h%PtH(_Lr~f6A69I(#^uGocVgaUgc?t>)0zBW+Z88Wx*$wyVJgaZm!B9 z2HJ_ti~{mo2;mN0;cYZ$V`uBqMLB;Ihkzta>zWn5T_O4vJXP3QAcf4<2tVs+oEk+==wPM0ESY8_+rJM{3umPl*4DlI4*wbnt~ zj2zxTjKfl~ghZZix~(4X@BdvTncDus?AyodZd>Owj=}=0wJu>~^R$S=o-<}govXDF zYgq|FvKC5J79$`$x-wo72qGf#1m&|F{we6d$IH)DN^diVh%&bR=O|y>y%G&OcICf~ zmC3g}-<0GR9%+ei&z;i(nZLIA%kNSzbqC_OuJmUhqk+vST@zy{{8AFf9F{cL%p~S2 zKdL0EDJKSp1#$JOu_r4=q?4Qn92+uDBFe|yETuI*vCoX9ZNKMIP{#?}+Z>*|7$p+= z37L^)5;kcQ%hF(6L*#aouLhZax_5T+B(9okphPTeId$}gC{IssvQ(5%=#7W#wkyfn zprUqB+BGlD6)e7!tHC&qrePh9FNzjiz-iH#`|$@ML6VFe`)u zJUslKZ3YQRxBMbM`cdDaQoGRGm}c*nz{29~IecEyjB!6GxF3VgyIPbcW7D~3NITbr zrF0vEn`h`NGvcP=_!LRO!7Na=jyLRd1 zmnQd^I^jpu2mv-V zaDFPEZL_uq9~@;}8pN2p>oU_vSnRbqSgg^XFFjGF$0c9V_Y=3QLk>a}*zT;oa|^98 z%=o7}LiE`nT8hU=P0y6F5Am%2a}8JeXs`rOEps}>TwA9(ysNKFY_$B5!50VJb*&C? zG-O5!88A+klfFPt;QMBzbkiwkTF#%e#Xg38voUNFLMvQvv+B&c8YYM5g9+4hJgu~s z@9-o1`S&;Jx&a&AH%|MHM^zNZ=birTJuphp=dAK{*^Yus~oeupCeLiB#Hut_3WEo4{ z?GY<;T+k|a>sly@{}`~JS=Qt(aTwFKQvUi>ZMHnda-Q2!@+E;xAJE{D6qF5aBen3) z<}?!a6~Q`>n}ockvmqUuPq1HB1*pwI9SuVedKWM-Z4y|k1VSgwZlZfVn3_3afQ!p=jchK zA3XvCn zLP@3Y5Mq;9@#07oS1==ny{gqL{QBGLS?|J@H4^%dOh)9H(GL9KtKOpR5rzOtlUvKU z(fD1dpsi2FP|p`sonyBV-@{nQIL?kgxLn?FovNt}b(k}sk}>00CO!N2rq-SN$)kNM z4p2`J7wB>kyeYKV3Pto+<=S&$i-rZi$J+U{BNC4{%}fff8p|6 zT=-=k7tc6u{Hj!|*_pP`9q^ocZRsF%_s!&+=RdL_cQ@C274SrPgJO|zwj;7w+pmft zR++h63YGj95J4y=Ym%{`7{ibPiN#7S%s6EZ%TrBZ%SHNd9_Si^Y%c$7e4JD*%+7To z_`nho=wsle`TdsblXl~Q_<3|Ak)5-^lpq>-UHX-MO%;eY(IDO4WIh7KsfX|dXf2p0 zzV?HNy-M{nuWYsx!~9cFBb;zr7la{gz~tS=3W2$Y9qNvfXs5MhR6Zw?(Q;M5;-NAQt`@fMyjCfU)@I!m!UiTGZidqGjB%$A8KVxWp1p*j7qiu7KjmY* z=)^7(vxqx~`qsNU3(@-w0I}+-=v)bMpa~(XO@sNSH^c$4slG?zEp>tuu(Jz^5DOh} z4Aho|7Z?ZrVo3DqX8(H5)ciF27>R5|>i@aSN>p13XJUUZYD1Rr zWUM7N-{amAlTVy74qR_V+SOhIH#nEGW+q}v`Udxp?+U5rI!Krfs&NAaWwkTI96Mtg zhEF8)hUU4sG4A2ZeB$@@v%WboSq2Prt!wo-@-LMQIqo@Q67s>V`gQGi>c?Y8%2?(IzrAPLoZ-g zD{`+7=1u2LKLXEP)QvB>$wFf`;SjsRd;keMa4|S8xo0czo(i2CIf`R;l4|Di1q_b9 zIiQFxcfT5-)Yy-%Zf4>JCssUhV^_a9lQRp_o=YX_R@~3Z1-G`F!iIQy!Pa*6;v7aH-DeypNvCCDDO z4$p9*NtZx-TA)YO(AzsmL1;fWL;*1Hx+Vx7*S!eI<7Y3i^g!9-Ac{~)+LNqY1W+n! z)Pi%4;)ZxyESF-;fEH}5jw=vEN0X4)B2eQ)dV9~;_}n-oJu zrV#5uokM>P^oxFaISr3jh}JN%JV{6(#60ViX!Xap+`Cd1>h_9)6%ilF>TFGtOESGUmp-WDv`daNX_s5rGsWBzS9u-kT5*@V~JDw`3}ejwUBc zvVpteGXF>3or7PoQ);lmnM&axL_qBss|(s|IdXBF4Ww* z=}4t#Xv>^^k<`rnC=2la<+N=qT~{<5{dHs|jJGbsjxlKNer!>EJ&tiOC(HdNlRYGV zF6ooybge}|2N=wn)xHb{>PsObHj6c(m{?FekoJ=ptF2^Lj#pY*vLva%qyN&_{C%?U zJk@AG4$i&WmU|IvQDlnA<0Z&LsaLMo1M7uS9P$)34ccdQOSue*Kxs8vwN$I=Xpw>U z3xubUlWzC1GPYJVv3oT)`b@j%)S!bRbxd)kjz)WXH~=6zjy*42OI&Ve&WS7zci!pWlYPX z&&SItXTC5yV+ON^9wazwZabR$4E7<&MXlH5f_UDV5wVv_NLbeCPaM@2aN>`gG9C{! z2p|uBVKy;#=o)MeM9(!`#1LBLh%_!a8m_Ah!9hmdt*CW>L2-QTff~HufW?qv!&5cYx&E9$!4{b1F5Y_4vP2Fw%PfDUJ zIK=AIc7;Uv?MyvRU_q=uyB(rEyatgtwDb{DJ_HhE=?-1+vFG9*X6R(b;T4VVAO+Z> zRrR$dJp-m>#->H+lkGzE2Wng+%KIn1-S;sG@7^$Tp~!8Quy^JDW=2*UaHnvkT-)~# zqQ+8bXv?gsG*`38_XxBz58}Sn^Oq*IDJ_af)n0n9kak8M=A+uxuA(8d>QROjx}W!$ z5uA`7!tQeS9xJl-XSvob*7OExfK<2$(L^I{Hp)p?Q=jHB(pHXRz}Q}Fwa(8(ipqx+Ma%P5SB=5ZbXf-v^Y`> zC-?Kimk3nr&hjIZd9@J)KOfW>p{urlg!(vEsc@23@%Dz+?VqO(}k0 z_m6?d!8sJ-YwJ(Oa1Y3N*&5TPJ#O6}E23W?-@^}H1Cy4~f02-@e0hWWOa;OfSBhNt z8lxl}E7_L!rZx?phY3HsrdSA}bJ!4B$5UraD1msf;4{0zmSyPF?t6b!)AoJW6)x0e zg_?8$FrVFBQt>W1r0Qa2`6}TC=R=0pg|Me2I6#W+qt~R8oL}OIwI7b%#)CHV14@`* zT)aA~2I|6FNtt$U${HX%N53v@Fmw-FhDd>mv=d{=At!Niv{}w!isLb^zpM-;toF#98eC zw)La0U7gfh@bnw!aN$b#ioPTf1I}7pKu#J$KI286Dio5n3aWjC>5b2PjVR_}l|F7z z(~z`(h~}Z$_$>KJeX`zEo9Vm25N*ciQ;@>Ew^_MEKoTpu$6Qv1=TsS$MEN6OY-T{u zGA2Lj<$0;FTH>~~^LPLKD}Ek|hq6lA73sD8tYB#EaCJ~B^ue7Mfmf#|JROX6Vgmp5 z720e!26BZz9#6ec$=5m}EBW9-+g-@cOcVAHLy#=x8;3c{mg(P*%1X*^_BtiDxo5zC zuPR$zWQiZ4eD1G&eGBkzvorrlF2a>BUEkYXk%I?eb!D_!6PGtf=5JcSdnK6ze8$~)Lt2d^b9%nd2gNXU4$vBYioVR411 zF1cw9)taEHE=a{3;{Em)4aV89=+T^0-uL+*YyMs3zum3a*3~}3%UwWWlb?&e2YaPz z+Cd@{g5+2fi7Gs_H?H){Z!au1#B1o%@m+I6K>l!HS?XbUaAj_IoxD4#5$B)}Nj(Un zpDD*7zrm)+(};RwBOxpVc`J#(Kl>mdTmy>BIsPiLl9{RK{ZR3LcPBA_&GN6dD{w&a zL;&NT1hnnK0od^@&T3vxhzm(GXfpBgx@BA`z1gnbrd|OB*K$`HcOscRbPCDk_B(<2 zoask_@V+z!YkIb-hRpA5J6Zw7+YfnMMv&>Htzj@WH6lhM($^a^?hAr0#Xs znY1v2GtUwSB7<9F`4~JlH9vGEPl+i+FMCgT9gyx;E>>EY35?mp1M-%pWoSfoCg-hg z4tOTg+rq6?!Eg#PnLpviE{xzy@8oxzn3{)#EtmCQMA#nFXrA8Oh5V1%fw$cnQ-@lB zG_vsVLC@H0vyIuu=yggyJk^nYFbd-#OFBVF{WZpdD85NSS)VO1y&(+;{wT-_U`Yw! zv)#(uuA@vnjzTZp%!|blJte9xea=iU_(oH1A4kobRJ0x2nFsno(n*4odcaLHX1dQ4 z!Jx}_oEssU3qXu{!U%@w%g>&z_?zXL^e2#nrRSRVA^cBYqX|S;Q_<{cM|b1k?RToy zUePT06C9^1fF7?O`+)08Ddeq2A!ceD!#;MjUpe|@(o1@7yRCt;<6d}YY9y^JjJgt_ z^pdq&=VnO|H#a9Qylm9 zM;heo8vo7<>o;%VErmi2$a%64y`_8YF4uuD{B45z)x*tMdpRgdG@`hGHC|z*Dif~u z=GX}y~{_U?eS61?Tnho7n%V9Yv{w8pLG|#5nGJZJt|s^3-5jV3(w^z+6l=sQ#ZFHf~u4 zcL^dPDn$E`ftQkAkaA@{Rw?h-fB=9_E+n9}JH4w?xUn_fdoaPum#?Q|r9KL_H zgagE0T6Q+uz22cJDvKAlE~|$G2gu|`a(*#YAeb`;#Ix)yiI$vWDYPJI_G$VN zSn(*#(SH{nE%!5rC&b&T%Z=V1)^ zY5k(rbvLd-E^9F)#P@f_o%80rHH|@?Y}jmo4&|zbE>M{)IP8*8paC33GgQJclh~p9 zf7iU8Gqm6@ca~kB>oVTj>a;*~rMsP-n!{M3K|VO=8?RDN9UP3ksUma;Nt2(?Red)z zLgy!?ZFka_v80YI){ETotR8$E4EsVj*rzVm;

?*5y(jBpY@{v`pxiG=JUUwS>Js z?$i$u&=&WL{UTKA)RUa{A`ThGfk4kjdF+*~NWtVBlqz)7o>5D?Qd`yFYTcF|X75vcrcy5O zTaXFSCu^8bV8gMY1nI$K%l79VQ-U$u2X+2#JKE5rum`W8L4rpvJ^P z&~DixXN+WlQGYI$sl3>hNH&&9#WAxWb`%Y4z)bgt*Xb_JlX9B{ZreL>yp4e8xg{Of zUR7cJZ`k;ik6hVUw_Wg_RUH#kp;OfDJae`RVn1MuqpZ%nxV4JvY--~@&RzYTp!RA& z^Mn(_I5$6j$1mxm*nUL0Q}yg%#oe~T!|!lI8<~^#sWj+l%Xa3d2er-*YoI)A9s(U6 z-^oiTE`Gs{D7SsDDbIY;{mpEVm8?l6zBI$xpIsL@-yI;yf%h&5UDzewx53j z!s{rcQTT=aoCG(7x}k*Pq#7 z&aCv|VS@zvCQ4R41u{yJ7){2sA5Ui0x^cWLRR*G=r;cN**~YX__3>59ZIl0yL|6Uo zJTu`)%XKogN)Dzvg|Q+>u2O7=?dx4ZFa=lNzN;f|vMclxjq5KiHX-=qgn%#-^Sbg5 zR^hpdmy2;F8ROo2Z{NSAhtU_-%+p|~{qp*N@T4tVqK$VCWdHngPWIOse!=dvKmlv_ zt4q?G=5>;xYgEm{7Jnf;BUkoe79_zOdtHqqaRr0Yk)^NZy`r<3=d9-+o9CY;#<`;6 zc7#+rZtV$y6feXskM;$`!bDo&u+Vkot*|?4ZJbMy)~%fP0grO5oLZutM-6jVBWW3o z`NwP2Gp3%<7g{hvZE1lkgrWRt_Ms?4i>B&q3=dr;-nKR!@d$Ma+#&En8EEh41$GLb-O~> zD)ja`z){@DLk%10U~C@RB2t$+M~8n(cPA-<{G~;o)ZOWynIib(DEPF~8RJe@LTNr# z6c6G*Iz#wev{YJS8|RoCjZvl3E66i_;xDmV=5+EU(nNL5fw&w*(`XaQ@d%ye26)?y zxN*eX1)xmcnYB94%om8E!O zTc6B6RO?0c)71GBc$k)>Pm10b3S&F2Zk^B|Ux&|Z-q^^%+Rk*VVyK^EK#AQ#jV;i- zu%F-kWXZ)5*p{#Ru2k_>UV0ryb#!Z#p5ey!1EPOLFBGf|1a?@w8^t-P}{-kl3 zKc-px<2n!p&O`qLx_3=d@U=dF-2+yVDNzQd5f(epjSG|t$>yJ}(~A%vh$#j@hvBcC z49F{Cfc|)0>WpP#f}A$k3#Er;>0KC#kha|Mp;B*V{Qg(`&3@AE;CiWA4Cr-~4C1SH zea*s-B(J-VD`^C+Jo}#YHEUA>gd)B0o0&msVZXr7U?{WB%~<|BE*>bF!WiAht-RGp z31#!)o2D#j`kAt20AQ)Y^mRQa&moYy$(0*{8}<#h$Yg>{L~y_9L|ENnSu8z|dVcO1 zh{q5lyFyNRB7y@R6fMA>LoSg!36)+Mt7?_~Vaqu*4wc8jSby&n4ky#^2RW&YXyGGCF~Fi2-g=ci08|OYLgo&t4eNy9^3n(N^yUb)5Mf z?@mN(#5c&vMeCw^22SUUx4kiqW=X%7K~T~jwo5<_Z74BtScV5^24^+bHskZWPl_o` z+p3Lr&JZ6Lig)E``Uh-$d@r=J-yk;|c-dvR-JdLtVm zM*+;raHBRQSLEyR`S1TNASK38osEc~U#pIyjBQufDgx19tfSB2H0!j~P|jrT zaP>3DgU;W*n3G=akX5vyTtB&yF%>FO!nKg<8fndpKLhtEu@q&EyCJMD*UyKf?SMhW zOrIQ?gy2nT5#4E`aQ0c0#KVG>ccuKZp4^Kej{N%c zYc_AM7Kr=Kd<^sP0$&<&t<5wcllBPovgsZ6Y!*dA`%JA7{)d0oa_XnJvrCKBf9jKE zoam1BgkS0gbOm#!?qstcu7^UpFT-B-Jw2+jK5dmw+l-NRAeq0Hwz_lV?63km050mn z;CqrkY+TkbP@a3X1E9yC5no#iQp7~-XxcZu3rL!x(K+7FxqADRp^|hCl*$)8d9V@Oo-2i~IOMSvb!B z*DJloS;Cv25?_B+$6+)B0GX`7#h|PA`51&2U18a)^GJm?8aqZ$fu&po;oE3aJHzR= zK9Zjf)Li3PURiYYD%)u`X3>{)$2rWLFFv9!R1;fxV$GZZ#OL8%Q~LZ0-a=dESahXZ z#g5}RFAk%(tWB6d_FZsus)-6THS{co2IyZ+X`PUqyj|EXM z3$v!1PI~5D|563aIX+%dJp%hQr^xFUV|R40b0SQ4JYda=%_#7h)s_F-AN*(^tF}0O zU8?-9216F9i3Tfay5wk-4`k)x7zcCI+g4kUm))jcp0;~3+J6EYAsPCbE&!=NzZOpZ zp&U&bnn?ok;;4d~sXzPT_ro=dU&myz3v<{CkJ1sJs8%s5ZS~SV#a}j2);5nz(*YFv zmDj8OW3eXh1n69a3&7-xXJ4Sp++`i%U_8fo!-6Nxed=dDo^atgzzDXhUAQe%zeTC7 zb^K3FD|FIWt=_n8+9UYd4EuV5rLMfcCzxx4{2ke5v=>=Rk*ejZY(36ntAeCq;a(+( zIt3XA1d2`ui_Y;6Rz6De~_j3nC`>0GOvIw5uj4RdFJk!nG!-q`9m9H(G zC1NK8Lh-fqY5L&h7A7{3Y<-tvB+Ni(+5w4Ts@9toqT?YEq~)X0uzv@uiEEN>`6u}8 z)`fNxtq$BdD&1qAY zj3@ma(|%3;$5CA6p=*hIzLl+NA>T%QgJv#CUXv8M$`4vlXy_?I(#N>>L4*Wdae8YN zSIl!*zK0nOiPqjM{h=I)WcD%`qkd{=&#P{Cq&7wSr4Tm3#0S!|>Wt!*1 zF`rr(FjDFK!*fq&a#?K{EeqD`A;JYyydHZM%=|Z|t`FxnEhX%~s%a|oNFvks$}8in z>?QlFS1x8`5v)1g#yQtDSUmXC5tBEHW5||w%hZAI7U#7AhcpR*Gu}#q_ zq4I5%bkC*_J2UqxII==<>incd?kvo%4gO9rv{9Ll6Cp%{I5oCh=CjqpAd&fP;j>uZ@y%+JX6SlJ$Y^) zi0v0CxiJ;}5HqmDS%&rYYSSH2e1c^~MWXa8GKSG%y&1!sk74lNS#mqYe%(@r}tG=CbMdxc!p{+Msp}xs+g`fq&D2gJL z8NAD(=&;_M2#1BvYA~^#QuC9NGk2y~AoK`q+~tEv*uW=dc!qbtX%l7EvE%cjX`{}7 zGiE69R49#onpU#tyEpet$ZH?m368PlLoP9HnanI9Vz8Y#F2Xs@@}R;3p^kL}2c67Y z0ejIPGgTIwyBPy^ELJ$n;&x5HZfUsh&4;`igX6T7*NI`3+0QoZS8zf($qx0G6TyI%W;h$=O_!580AJe;$)_s!NV6-cWRVK`i zHF+>7-2LMmGF;m*6aD>xT`&9JmKl78T#gL=W@^3=93z7ViuP0 zbkXS_c2e#1^j8@)Jldp&&a8P_&5ArHO*2fkT!x^qp^#%rxa_xp~} z*ImC4!?00ucnyijd}*ZpJ-mIYtNw?!uQe7|Zx}zcJM|(Q9KflbVEfnD@b!o##)jQe zeEEbaANlh;)9T_T5m4bpr~zex8q07GecG6K@>3s)u->46Ir|63cW zklw!5yDw}ZedoXp>e4}?p>o@8aWF9PlPcJKIn13jCcOzxf|<83#i71d&g30gptttA z$Z`_|6+Ggsl9H<3cu`Rvo~rT;d@rRJHjDCCE2l1x5?EN%m)7f}nVll`&|CE~2lZh? zZ7H_PeNkCe$fVq``GJ%{oxZ~vsH_fGitSOT1&wpgw#6Yjp=z{tQSmD!5>O1SlbfcTInRvKI~K;~o(<jM;@e#rwb0>E?P5lrQWb3A!8M8C4{;c$r=#Zzbqv;W+i25n z@}tpa!E#SPi~6#4oQJ-}W(3=~T*@E(^=Zx}Sb&>1#g=Y>?_45@Wg?6yS5`U6NKzZq z;{qj5#*nQ*2X=$&vfI~^yH96Y4APEb*Xn~KtRqQ2ppw^g=gHxxYPgeN({Gm5(oLeu=w z+(Grhke(!|4PtP3##^9g1m13w`%L^TZ{G7Gh!k@6guY2O(3~a6<~D9qQ{!LjbMM4+ z&KiI2Shz^cK1b=JZ7%SJ&oZKe7w5)q(+3KETkZiZjdhCgI!k)%VpI5TSDZtU`kjoU z(dUCYUCr;`!OnvA&FZ>oB^d0rkTc;Zq{QfP05i38M(rddoEZc)My)Xt6JDl9bMOX=;)OA&y(QpA4W_e1&cGE{V+xTkc0kf-sZNhRJhrXE*SlJF}L9T7Y9+j zViS0Kj2c$DT%$bI^aqok;&OcbKxX}paJL1xQ=pa(IE)GTZZrAtTQ*-^d@b25u#if; z8BUFVn+%~!Xcptd^0B2eo2IzJ>6v$Y^kDu+Xs6z%N25lZu(Kje!rx?W?D(hotMJ^7 z&F+J#Ue57=;eBaeb{HGpN_s7 z$>XZH-s!wWOKH=Tbz_d`IOg+j_S~{DDySv>@qOpv=Bum8W37#FT)(y!XU6aJ&do%t z&2!+llEW#TZQ6{PoTcHJJ-Kd%eY7uwjm7P9aPcMMSyeT;TcziK&$-76DxX3vQK(NI z05!x+sI?0doCP~CP;8ArNZuqX;J4(HM>s26b1v$fZ{E#h{ZYQ4viYfV_vw;n-d1Z< zLXk+J_O?zr|Nry|UNgI2Cls7aY1a!xa8 zXmHA~_LpqgVquzlwHx_d_-S0#2xz_s6jy=;2kn^JBY}~%fsrwzM0WXO>TQpnwLgt| zPUna>x*Mf$zm?ORG>E_25A18PNymcH79`sNpW)dX|iIn|GKC#N|gs z6&p|v``ou{tMANM>dTvi=%Y-ov2|ByoG6QJE1yD5zJsY+wuhl`Y>MX~v7(Z;+@9Zr ztBcZ%sXR-m$qNR`_sNFRAhh^;zwTq35H1L}GXS051)5qb>Gye5j=V!%f_Fx0BOst{ zj~RAWQ!%DpIyNkuciDA{lqu=K)E%|43wMdhvj=)!)A`dBLRn9a!sJ%8hju7^OeCKV znK3xa^k(2`=MoxKdkVX^n(V>H>DkzMg~@`zep9D2l&SsVcp%&9O}4p23{{PHKdY0s z_PQA5EbDnTVb2T0a`@m}_LYvs;C%5aZ^ZcivCVUadFUc7_OXt+cgYo%3#<-JlR}+0 zxo@pG$J4sFVBdFDfF!oUrY zdsw*IuttLXWk_KjSW|r+FN>W|STQ>zWEg^o=egzF$lTl+7y;FthkLGXCG`%(Nn=d5 z1|C3#NSMUFA$kAV6eoeN9V|I9$na4sQfX|ImDSKU{Y^F|5$esYwZ;wFKo-M#bXU$E z)$ZVgw0YZusRt(J_8RT!kMNdBJrH$GnyJqV4BzJ{jF0#u2M&Ui6cC?=yx)@exdRqz z<&)z(A^r2mXu)i?jd_>34Ya>@6Cd;|9K)Zo3_Lg0Y8;5uz4s8d9RkAQb^P-WcOqCF zek^U7ZRL-rJDC+i%U6GQy%ybAA{Sn0O?L5BqYYbE{B)7_mjfn>ZOcbJu#aTTJ0ZVc z?}~EsYXOW{5dXPT}M7US)DuJI>ycZd!}lEng216vFhrU)Y%uj zrw{noC~>p{nP?XsM}I3jGWv;(X$C(g2Qa@*v1;Jh{3V_95Y)1ISPmo}_tk0Qd2?*J6c=LJ+U_Nu>&JL#X!cm&F)hZfk46B(Q>WT2$@ zbYSmxsNR#{!x!61FdF$@cUxF2CF$-%!6{C_q^yh=WL2g?)o<3fYdX;M42uerAY9M6 z+FYLqtK@ai3tFv6KFqAyUwbi2Kx^kV>yE)mxPwx;@3o!b{PF9m_5r@SJf*HcE? zGZb20wB)MuWALp!ReC#gv8Z6#wkHmh8;^Y;O4AzjyQg0jAM-C8Oj%{#T3-yl8FH{) zjA0u3wSQ>DrT%mp3PrZs^p`v_em9x$++sjs3ur{9igUuC5V@4N@U1J{ED-Q;PxI zSMS2|+w&Xr*QN3kE4+k)BPM%c*Y3W$OUB5FeQvj}ml+Ruo%Ry)C~uS}6mA>tk+4@% z8i7!sf3gS`XV`=b_eTgJL5~TR@ zaD+o<3y}ml+g+FHbwznBNnD{&AoQBD<_~u6h05Rn!=8`5EVup3C5~Rv6FbC{fobX& zYU>xVxf%cu3{Q0Amt%bSUKP}H%3YYSRW*IZ-~Sc}JUOy_*Iz48H?fP|xGYeQ zS{tZ#YA)L3pX7}q`)3Bn?98yq|FW&GZ=Mc|H7f~`1+60T0$;v{x{*oH$(!qk{g3#) z-or;SQb41B?Ughkyx!jsWuW93Q>>-)1uQTv_NCVtYS)6FHKj)B5l9ua#67nZ<;wCb zO~Wz6Wn|$GXtWz_*yJB{rNl&aLY&B_us8|nD;j3l8^7Al;RF6i@`i6Q7VP)}{T2H{ zx1FQ%=I70$AL`el_n%cL;4xufzWXq{Pu2-}X*SCf%FFK`6-cCZ5zgUv4GZ0d<%+^` z_3O&=B%z|?usrthdgOvkB#PyH6Ry&GMFeA()TRs3vkD- zerx9BMfu%pI^01!J9DLYz{}DxDs>f341J`{O1GJ#vc|mSaO3#>i&p***dBrX>K|{; zWq9MK-_e=K0YjN%SGN)-X)6yxQh)MPM+v}_{!Hn{D^|?i%=M!pkI=gTk}0|_#=U!E z6&SvLUFcihi|M?1_zajB-NF3|+2_NGNl5k1pVAxf-VR*e6o}pGyRvY>Dr*uM2Tjdi zVf*c=46+;)%$*>4t zaOG>9^nGscob@xjmsU)HW!n$?tq})WC5T<~Ek@sUMSVM(f3jM;hcU#B@5}wb&0;lq zi$y+~h_!O>x#M<_Mh7HayvP6EzrcDOImfFr1jd&u2vKfYS~=XIO3*F*Rk zgw{qoH)8gY`SBo-ddkFkUQFd=p~{KmBQ-$BGD^oe4BIkkZ@FaH2Ml#byD*aKw5IP+ zTCoAHrn9dkqIFkDhf{2r!mG`#z8K1hMS%$4rycX7V~+{8dJ|w$4qTz0WGh~YKOHuo zE}2qPeA9i8G*qmROJ{pw1&Vc?RhdeHuwY=FbmgCyk=8P`8wHw&^n5r87AK+N`k6&G z>ftR<|N5do-@Shv@4}eC_rNP+(O>D;HU&QU_Fw5N@yA@SvbuVmY4T0fQ`z%Z*4U`4 z*=r=N*?^f02>ue^QNx_s{s73XzzL_FvMV?uPpn=x2PxAC)LlBrt#wc`m|pbf+}SMn z&f}?o&a&P;BgQF>hCNT4`eV$n#~ocPC-*4Ec>bT7zB(Z4r+N73qeJNi5x#_=q;v>^ zN-23L2dD^0N$1g^QUa1n2?$bmv~-DdOSg149LE9g;`4j?ll$!M?CjLePMAtB$+N3t zJyv%4!D&`S4W`zCmeqta&h@1X_9y@26Y8T;`;+i66xU0>sdt{tdVae9fsvC-9L%zy zkKjim$>>2(=x876S;n2+@v@sPNmVi=jsI4S7A{g`?~-$nQUPryka43`AzHdPYP`#! zqh0UwnfJDFa=uff+7^kV08BSV< z=v}DVl`GOzbSfEay8^s?t^9L~vRMKTxW^4n^=i2tN30xbjDNmpXjIuR!m8Wp`BUfA zZ3y^!J2{(i=n<#CbQ!caC0+?2=FmF#97&At_J4fPe7c<9h>^SrKc=ss@uyDhRHF0h zWoJo->VKPO=;g-8d#{aYI{EWec%D$(-UpGK3%@2o8y*NZ)3rm4oQN)goYbIYl|hIL zeA}wQ=5rs8=e!)W%Pw3Wh5^~em*3oQMZSsLSC`j{U1|++(8L#5aE|y$6NnSMDMr~) z$;zGKKSDGYMiV&l5WsN5pSrwLsWN|br)8ufm>+zaz=L{%wg zKUzv(OB*Pc|H>o-g2SMc#PVuEy(A|f1im06qjv7OX?ij*;+fnl|I;&pIUL%|VGF18 z`k>MQK-D*3_wkFf4>kjunX+df8~7<<@jwO}SczoM3;4WgU%N%wP@Hg2K|OXU@8I6v zrG^TsV&QzED^Jm=-0_z{y_+vT8aH2L@(W_RBYX~R7+QY79eEEweH5r^A~bkIZU9{I zrk(Ev#pOECy!xKBL8t596>RFUazUU^JljIG~ z#T;BScXeWpE`O8N^b!ZDk{fBi1`wAHDZTX?1r4S%RqCoFQ>rZ_ho~MN($}*AXY*zZ4OeWy(&_PetVdEJYkk-5C=J>e8iMb$QvNmK-4grzB8_*)YcH$iwNxKAp?8_zVwp|NyAazL}9xuGF0Ej5Wd%9X~G_dqr0;E_3|7nPPxQQT>=?JT zkc~a$qCPh5U$&(fuRQkLI80QvkEJ2q0`Wsvb#Yf^<6rY_>be}J8@hr@3|mkC+L-|L zb#7m!R~xn`;1XtByLf?7xv8>m=yuu`veT`lj(@ zI=`*lrTv!Soa4-IruNOSi_Jr#!I?FUALU|*OxHCwX0Yj*UhHunzX#N8hMmvw?3wI7c6DSQ*60i`5VOg$8pu_{!FlKop^F zJC0aKN8$HEnPbxI{s8f2D?zduAGVvN$y-gcC4GTUe*V$yY<<6~LKA8v5mGu8k2j?m z{Z-k#dJp;2od>-b68bf!gnUJXu_O~3vpeHn#-;I`^hTG?<>`h&p6`Q00sRUU)69Pz zAcf9G7DXE}eryEu>~SS86f#qFHqp7CP~0EalTR5^n?VnQ_@4M&xy zpfj|!=|xUyqS*s_f8qwZd_idUn;+W=4I1=5AzLEsPVB0T0K}8~3LAD64=&Pka$6}| zD3mk950cGZP@>^T+oCSgwLtK^ycxgy`1}UAnJBP$Ogz&0M%;8DvXVSsfXWF%`J_#j zLI&jpf$ z$ydvAzd!Ct`R!g@*3f zUJi#;b^x=`6ZGzGLF&l$@?UWkV^Urp+%vWxNgW6|zW*p}(SZoXI&ke1eS3ENv-fb? zn$68O`Ref6-8qt}=n(0adtCHnwwFqf)n~Ux=~Ey{6Hu-*uVA_k;$>e~%9<;Ai6%by zxc{iO2}J6!u53r8zA!Iv<}^x5=W%Y%Q1w(b@=7~LtxV*GdAg2Co;5ZObwT%lbO0@- zLvU4H0Z|O^w+0X-Kn+B2jDTZ@4)FtH^Sg6Ma-;=Gl}hmCv=&}1>Y42F^0<6Y;UV&X zs<6zQ|JU`qvX-gmUUMuhl$RdH;OEQc^Ax4c3|IzqHq#KCQ~K+ zkc@uN6Em!%i2I6IaK=dV#M0pw;g09M$hz1pN_;q{bZ^-7>49<_c>I4TDIn6|JenZ8lKCAi0r? z8l)br=+Ew05o4y8UekT^qjupjatDlf_FQCn8=|-x;gl^Q%J&n_kTxpQyH`9i%u(4H3xOw+&nYmEvJFYR7%BqlNfhmK&~l#zvX(tF3eOL9 z+BLG3DGm8)12&=l^2|wM3y~rVcOu|UxrHX_HJ9J){`f*NsjlbIhEw@r zC?2UPq|f%I8}>t+KiUR0pFYSviT=DNKm#=C7C5|jWFD021)N5H3J=dc#giJ55Jf#O zq7Ph;pp{n4vup}1YdW0#6@8E!2vub?ZR`=36L`ihBv zm;VI$WS;WQJBe?yC5v~%b6bQh^h_YPu^;`FPBS%#)R~;%0l;9>!LBHaq-W{X*bLDV zF5y8+I#vP8h;|)`03+AE3(`eWXu90pI&iGQsiW_&rYJl9$^$>1W$8?GsNvTrK_z*MY zrPtG)L`w=$4#OZB&sgf;lEeEeODqG&0?gqp`a+~6iR-U#c2BdS`oISGi;WgokLwzwmgih2z`*j zp44(ZFtzX|F+|Mr=x|;4by@zoK0!jIyjNQ0jeKZRgHe81ikF8OWt+L_an6W<4ylSr zdd_!Q&8eU;b}Mp1$i`jFSvkgd^PGU7w$wJBnk387CZDYrP)+5als zTax#2CR${6nJec#`{XkQ^7);z(zPYu8s`wQn*cIn^e?fy@>4T-G zJOH?iZ*jRs{&m#05`Je)qs`O6*}X7aQkD_K|c;RfzoKRd&> zzH5xL$Ce$=P1Mzs3$;UnNbzUN3JZkTjSA%fo_ zlml;@9mme|h*7#~&Im$CUxeHDs#qnC{sIuNatw6o`;Odi!>6_7)3$2w;3&?$UNDqCLNImsD)IF@wpA_u@)h>g;k%K> za_g<uqlkb9u3|sf5U3OC#SSMh_)-}HKWb|cG&(kH#fRHT_pFc@&%`j^7LJuMwyuAK z!st0)%Jc}dOoQj`_DHwylKr;inYaR5{HXHH*S>Z4_r4{X^3nKjpLj7&e1|qnx&V{e zI`gj@P#i_F__Z&!^$Ri1^X4E2H-j9)DmIx+=Y9V_eD6h!o0V_2p*@x_|I+6DUV-NM zS)s6l9Nz<|L6BE}fH&xcM9Ynn!L24Ue02XfPv>=mr&(hCL{$0lewK&?NlFPHC>EkG zxnO9bb>Q}}K;|ioEl|!G@m#lTRY8dX;?lH9%idiVaWn9Rgt}>@B++JoExDNG4iy&M z(EO_wbpHIxIF|rAZ?i=>K^)6*pB3>*h_?O>@0}lKhj1o???DIdyywQQq7bUwnW1=e z=wG-*@6O9icY7Ft(=ci^1?=>IpRz%I=s=zJfU1Z5f$?@49)UT8>c?Q8i~BkkIS`V$ zix2TcpRVUrt0u(-*`6eZ1?3YrPJRk~kxsVD%YM3Rm`e;!JMW<9aS%a^?3HySzCTp( z`F=?rqDqA=ik*C?;vkL_BC6mP7qnzQYpAIY&gVWbdcA+<7{t+Ql*GTs@ME1bILIKl z`rs9@I9)*HyDH8mSoKu;Xta*5Bkb~#o;rFYbXXQ$!;Ae!+)CL8vpqC0>1z|+-K!Hn zID4hw4Z5k~o0W^!#~Dt#KR!Ht7#$2hnyHw+&yG-7KactmJYOK>eMSr{k6BLgFq8`N4f+8^_<2ar=qGvR(9XMB-Y%4)#X4L~}A24>HjN z08=bj1J_NUuyQPAJJQ}%m8{15^05P(LT{fabDt0JYx!%S4S~(7L_^at4(75SFe7?5LiGI2>@Bn z_`FN8bai;$(Yl}2T>q$%`@zlz8H$o+fCt)`k|1weOxpOqZ7-b_;%@6 zBfe)}QSEUTY_Z7qSNp#7|XH@AQOJ%0Gs=@S7QKBYw9x%YO` zZT7$lEjIY2u4A%y(_|#izRyoJ8)ux?dL%vb4B}WexT_@@r^G=@1afHXqS?vi^Ic3b zxbF~}U6>TvGO}S)PQYWK^wtrRvTIDR8F$mq@VgckEAT{ z7NlMhGh>b6+6#ZIzpsqy{^;-!Gi2wBwVG$cTlvZ0&+6(GQmPMj@)X9iZ{;zE$8|x4 zeJ--vD4nfuE*#1{(Idj46X*A7hPmI?kR1Wn=Pk6bM7wH1-Hc1NMNUcSz#N{xlG{wJ zhpcCJ)&f4wCf7-s=!*QTHtP#;1>Fk+I$Dd%xlU8DIgW!NnG}aH^Q_$MEJ-lSj=A(a%`?s*=@Z5B*SukrZ_4~rXU`zj3epL60Ls&IjXTTG3BBc@g z#%+=SAloUH2+q&WA?X>D8qHo+sqpnmnS(4S<;kU#cc&wrmnNy9r_iEU`gv6mKUrS{ z!X38rI-P2(baTplhyJr8n}5YQik1dooH-v8xCzWIo8Xz>CS0j_D!0gjXkU=j_n9z% zk(ncIF-zXt{0atk|NntGyx(f;=iaelC-*ANPerDdR8Kyg)WsyIuQp0O5e_z|3^sR+ zOU^VL+Bn-0EAYBP{56lCHI~Lo^WxWT*~;`>1jhE^p^%TsFk5Vt2)mlx3u=1<@5kBB zW@tJQxunmv2YKls)Hz(MMJadTlyGQj+MFTu0ors99FGn5D(+B3Zo_Dr`$O@eY4Ud)% zAHhabul}O2@4Q?yrJuwLrN&lCq#1e%A@V3reR6qXwF-b_!8fZu-q$8_1O$~YIK38QC>DNcK!&xx;{dEwDAbHt2ubZVnEq$B3N#YRBF3R5hwB6BO{47$nMwuBQ7l+b( z5XQJ(y^vN7N2Q2COp4-f6S83gCF+XC!_i;}NGgG$uHev)c4?E|$wft#!HIBib3rJ58d=KRB)gnpEW(y%kn3gC z;v^9PQjB|r1{F5)?|H_^_Vuj~)2Mv2emFh$|ns z3}wRk#p93kUc>uOX}9v{=u~zym1oGBt{=7?BS3;iQ76LSMTz$hrU;bFq4Y67k3L@c; z*lY$;As_-NjIkXgSso{Ey^+kREow||bKNKnZyrY;ek^&0cRCe^{N=%BTeN&wV^83@ zv)2D+eRl7BGMspLb8i#NGKi#Qq&KmXA*&Bdap8s>>QFZ@M}cI~4atfAzGL>j7yre2 zRcpn+i@jQvW$|`pOZvfLqG{Uc%4RduRZwlOI(%oQu|I}iB+y0ZA2ZvYZHoZNX#5~6 znOr`k*HfjKOBj(p&)RTg`;@wJ_&b)A(^$Bmq0uW_x2pUvV%f0C@< z1#;Ccp)(#O=*jx+#q&?gel`lPlCb+ZbKBExdmB@RY|`h{mZ#@sMU@b_?O`aBxvOaVL|ey9{MAD*5O7ee=kYR&#u+AX-Bm0c4PV5 zqcZFLxSxo;A+69>9yGTCxBX6ln05CF;dSw@TQlT}dv zkm|Z8Odnd**@_e^rjJmMsip6RVUPcscj+d*NiH4*L6JR^Hba1K$?ZO4Dz3 zPO%m9&r8!bEEhi$>%5z?JhLb9p@EMTkxIJk%yQ}Wbp^y|c$TX$Lh(1pjj7_leSFVafV3or1*^U9=iqdo$d7OC!pFErtV#>;t## zczBI@hSD5U1O1xQf_*RuTu^CCoSD<5N?cH@l>>_rXve8Z8 z+!N^NzRH(VAVjfF#Drr88q$9^Q0sBn5~XBmLI-FqBWmXZtAlziOm{I8NAGeL z=*zYu{qCVV(gIdek+_}=zn-G6SG>6U)SM=H@Z`uI(SSY)VLmXSzr6T+_u`&I4)A*x z{*qB!|Ag&CxIGzW{>$q-$6ox5+A-qqyj1+L!SgSVBh(bWd~Y7J6#8QSy2IJ}G=7g1 z<^*)0!e$#djg9+iD`W)r<#OQZX-{4dO^Dek^kcVs;}>vpsAKDZ9k-DzQ4d?<)i~IH zrC{>kfw2S8y;?1>8z^7Q3=r+4mOZ`#h zx`!}=DpARj!S2l=yL@5EHWzuTye=bMdMAR;cIRXzpvRS;BI+5MB}<=$qA|T9Mi4? zcqlzS%A}R>(d(-NbakofYDrL?{YyNMm_)1I{g0-udnhF9s40v#)K`1el zLTo_<_ye&+x;c|3DKY1WhL3&^+Z@W}>cuD1lneV!qMPrF1n$e1IPwB5k^v$ z4ofh6&PpKh2fJ_O?FAAx@3clP0n55Kx0>G7y9_WfOV%DSAE!}PIxthM-FC+i=#h)V zU$Y)>2YUHE-~VZYY<0u@2qcvk-l7@E=ji<_{!6cQy}8D5D0Aim;zT7FN_^zUyb>B&E9=_F%qlZf(&=-B?x~e&%U(?ARFfJBOd#)cVBJc zhK@vYxJtr3%=*zNmgQve@xA)Ox*9C=Z3mo1s=m8k)J+mc zy@wnLwZ*@M1Rl7xhHz||Zc}>vf8wfW9*2@JUxr+12L()r_m(ADz!Kz33a@4DxL3TH^5j;wX9U zHw_IDj(X|hfNqdX*6f(V;hnkCT%BZ7aC08gkGl}u^7-0dCF8xhEln%`Pf4hQuq$t1 zjX#&fSQ{){)eRCvt;pY%D7@GnXTItWdRXK@fGYQ-HT$7mdbMfE=ktyw>&Aj%7RHp| z*rQzTqUAZt9x!DtIMu~Et}WerFGQ+hPpT84=`U|GQBFLM>pz#a@o{x1`t)Isw8q>X zBH<%EajHWd;9+lcb#<~M)cIixhUaL5ALi9vbtf7ki@OJ*2wlU0Q19$43baA3)cjFX z_LvVF#6R<2;91?pO?+r}rs)v-FyltDt}hZ{q_0jHbUWDvD?{ z)($-ioB69`^+AljFjtw%4RIiaZ~ozvob=?=+UB=6zZmkuCxZ^FCs%!EEXP4+6{(lS z8_snszr)=@6SgdSJA*P$xkizz0>(*pNiPW*(LxvK3ccsv*#p{F_`&@9hfHm2n5FKe zL={kiH1-lz7Z(w0J3E%ecc*)Ak`wu+k?~BDSgxdi;NHZ2+oTg0hgOUldL&OuLj9d* zV$`8|$wa3Q-+R8ifU)!`58Oz@A!bvvH zI4;bSBQrd!BzC(+FyEmdtm|YWEe*b`JqX7<9*Es(=uEm833RDD+P73dTE5A0s72}I zi#MukwML+94U#@K@&DZX)&KTpnIM*?FOC2{;XTw=L@6338TPiyW(>qZvrS1ty3^#b zwH41EhUTAt88PIgyn&+={W><)rRD+&W+l+$A(x!3=oWx1DLK?})H!ee6ZB7gY$`umgQW?;@E_FTwIjYZ6 z7wHP_@80@qNx-5T9(r5;G^m_Ra^hbSCt3HD% zG52*l(7Eq7to-Tl5Ow({evn4vj$X)xbh(C8oK6mNoz~r2vUs?>73K7-Q1sNVr@rwn z4Kz>C1bDgQCZd>ma@)NFK;2l1)jLR=F(fo2b(IFJ1AospmC!GMOE9hJYn1+_9t1GLN3dxeN0GAl4P zZ!HX_`?rsRE#GBL2`1$c`gO{)y3sAilCrKlp`K>xQ+}*1!VF2EIS$@_!Jo$7{N__3 z8h?EoSsCVJ-;NaJ@TDxJI(icQn&!=$+|Bgq8h%Ezr26m?Lt4^idG~YaV7Q1P(ag6M zL(j(@GJfV!V2TYfJwH!w>kp7Zr?E_?9y6EGV7g8VhEWCOc(iAv#pepCOP7NjaCxdvkhXqJxOBl0Vp7yJh{` z4DUb2x$U_Y@2kJ9s%z!xGuE+s`><)W2ziYlw(o467HtR{3CMq+yHQvx4-eCi>p+a-5&2)%80V%-h0uQCK zo#DHmeqnhe7&`(Hx{pZ^YkV|WTkWplCs`OSa@rk{t)9KO++T0q{Bf*cVnV%*JLiF% z=##u5a?{*OX#KcJQP57e2xZ}#&k3-w^VY7%UruyNMBhI0E5o3d%p30BqFMjh#v9e@ z#G2n+9ERZJyY?7;lhSS2LT>!o(0yU3T3?zpf{Ae)-W2^Xp?Xh2tNT<3DNl~Ceq z^3kg*?Ar9TvU9mw;aHz~ZQy$4R79l+tt_5!jd%l(1lVT{Skw1nPJ#1iw+=@yF#Wa^ zS5R7B%1rP#%2`}i@xZLciq=vNJOG=A`A$s^2?$dukv%A@FR zNK3$&242%=z8LxG(y4hu;`zP!aoBOi2P)ea^>3*WI3^=b=gIJ8_PTV6l~T$#Wi`-v zxaG?ScgmGgy2sc?%)~4ZF^0jkcqP48DK&K)+l5 zwuDp?)f2R8_E$ni0x-_zCCBY+dS27P6=r&tzp;6nbG*i8EXGeO*?0Msy;&8^ApkUL z3x+=sWL?$%?`m7zCe=m+25NG}2*v%;M(c+rs4=^Zs*^?mKQ@J-yOrAy^T zUM|q1kZSh6^(0_Tm-ohIgSzBuE-QZht*EW`nJ%rASCR{3x5pM9vUym)$<_pC>BjE! zqFV)5Wq*vdGGSBajE8~fWOTd;dB{1wCsELXOmpOV^|@F@JjoCK!20quT4|hqq2N(T zKD04bXChwT9xD+1A`Uu=UwyP)dU*rnvI9Y?r&G;}J50kNP0%l$}5bqLcMHU`(6CJP-7k#=qG8ca|tV8}CH?WUb zv~6c5kM2gq%%7|JU;JxbCsbF)6{0Bw?yYW&fgmdVb)g-Muq2B!{*nj6$iAllK#8k~|*YJGjmxN2%aYgxqqGu9EQ zf&ZLL72`X*Si(T_9O5T$jR}dK*q4RCxwZs<6{o{?9y;`n}sGkPuCO?>cnj>_$yWOd;7fQ`+<`VKFmfYeoT!7)1E#FruG>58^Fzuj7h z=^~sd;93w}_h8%&xM}Oe7AV530vR`ZS-W^AFt;}a-tBR6yr#N1N6Qewhlh$k(>Kk> z79VVUUI>9N4jioEi86;!2~P3JyM{%gOTqYqMjE>_R z)2~W6oEXb`Ga^K0rjSbB^bzRT`5oT$dg>DeEG&3>l0>BcZ+43yyQ7ANoQ?Q10-_k%xdnS1- zX50lB!>l!zmcOP66rueFanWLM^c(Hsl!I|f%Ge4`w4TG0PYKF@7~&WWV%a^RC!0Fe zL&x{kCYalzjnO%i;g!J)Xk;(JreM$k&f4cLqG@(zXr*y=#q%&v_z8z%ffutsZO**a zGf-VP=ejOH?^R5{2&_Wg%{*<8%g5;AzhKi)6`ByRlW*;`2`Y}jQ);V>^ZqHOY8jBY zp-)@M995uQ5htbYENU?dF9X_;w@FFY7>W2!#;uT#eaSO!iF{X-@SLT|fWdKVjJUo0DkOysY zgN|4faBR)u91<0`GB+vf)BE0B39LW->z2kr=Kp(}tl=X*;MV%vE50~`H2ydbiJhAf z5-g4M_~wv5_Pg_MRUwteK5y^tlD-FBOQc{)q;AxmPvoL_p?DciIYCIS=3&eI!sWIt z=EDa6pTtf6UBDXJyrt3id~lX37{{?ZEAZbp+r>On>{y$V-j1L29A3C08Bf8n^7Rl5 z(@d}jia;I}2%`CEWbH=#V7JfekPchq{`R2Gay+SX^X!OA3ja1Wn5zKxzqxvgU(23b z9(^0!g*t-z>pI~g^n!0c_-v4zMu9zvR}jHffd=g@Up;cQHj5_OP1U6n)O(&x;7Cue^LlV&mIP70c2E=3dKzP7n57; z=8#I-0tw~8hYOo{`_cQ`8T7--WpPtM*<~hb7#C=G8vT6V^h+>@?Y$#XoPmey@I?1m zUButRI6gL8;%12gdm$KGz%1BN@&SaUyT!CQbu;mP_ zyI?{x%YoULRPiysnc_|5$FJNzW0DPDIhH@fE$1k|FaP5eO90pldh8y#K0LoPVD^ST zj;==gnI$$gp`*EmEcaoG9Ij4K&?L(#kH@6oY{m1?y~nf?BA_HLhjb>>mc{D)3(OsF zu8Wsaa!>Ucnqu72R*ZN6rM)ZSA+0m*X<$r!h+^{p4N7QR0h7|0;Zr~A2ML~(V%`ZX zb!EKgtES3+RrpJxxL2->`3UR3e41^if=rxAoCL3=kJ_MW5denY)W7oZ?Mc* zRXKxW0u(4-wI(}awq*@tNOLe{peky4j$G7wcT?JKtnsZgBaBTSOvnkr^HLIL;301H z&&!?@+6_7Rof|JdKn?H|{k9C+oXG#i-voeYs!RSGC)iw3fFyGaBXXV(O|9I#gq7Y; z6b4{z0g^n5vJnAnh-WjRzUCT7Wm{`s+(sM?Ap*2tpTiWv4HkF=7GS?==_P){YvvaI zRI$n1cP-pa0pJvnKM1q9wBqm|Xypp}Z} z$d3V6XzQJdM=hvqzs^-pIjCs;j@?Bosn_)==H*>sPQ&j_vKO3l!9uyDUI5N%7DhEFX zT{HEErs>u)l;g+vkEA0$y^+LOcyvMqWM$RlCQd%6#!e{2Dy*#{KEA+iW2P{1<3b~k z;)_*cuvCs;=Nx_MY4CYGC5m2pTVjjKmMKt}s0i{0lz7_Xgm%j>)k)k+C)9Mnw@5ot zAtkYkJZ&BGnpzCeswEf2_m02S>2YFWcL;F>LJS|6m-hk~#4`4q-rmR@wr?eFzMZ-8 zSXD$34-^HK4CXvTR;Uh@S|@RSe@zqh>C)4am$8Dy!;Z^kviNGu62#CPc3pKtY7g7wpL{G(KdIj{=2J3f`}MI~Obg(fvcslcP9Fmb-Q2BU47{t2Ki?@5MGEQN6|buC3^ zlCi~+8U}QK4cWIx!x~a`JIcNs+q#d#%4KjLC_@N!l&!*j*2~qUv&W^AeN*#qW|5MZ zkI~2X@6a4Q zt&ExinIAJN$4^xDmK%tvKvkBen4x!<0Xwc@4>whgKQ@nl8+i1iCO~}sOEm3^0>@3n zJq&|an<~^z1`u@mXc?4OCW_mH69Vd&`+p2RGeepbSo*s?X$foeBW@4PqH-zT6!0-)2NJDG;Zuo{*$q z5Pj5JIv(|tq^beh=>EK^g7SaEPew&M1sV{mm-qvq3O49uzPMaJKv!Ka1pTRbm(Qfa zr1+*nFhyypApTB);^s5|WAj9JzwOhP8QYEu%DFQsFZ#vs;j57dLkK@;AZMk>@cHUa z7w5nLoV}ovWY*zqc;{K~JDnyvSk@_VdS&W+kE`FvQ+7V~LoMz3au5w+e~f%X9Vm>q zgCh-UISpihGMYIYWL6PWg)DDtW<)}d-ZYPWxxPNmF*;HX5CIVBfF!R1JtHFTx)-o; zj&EaF15?BDuV?TA+%rG}Y_3RM0)wncR?!##q5mtar=cp-TWZG~)b;tOc6m7DmD z#`Qvc?w-%Ac|~6dLqNSPy!Q{JI)zAE(!4bNVbJET8`9gO+s99o+s5R>%plkOVf{Ox zE&ed#=B|?OQ?3qjfx`Ycfc+jkq9XjG1~rHhpDAWm*(ZQQK#w-O_g^{4@B*)pSY zO9(1BBf0z>3H0mNL#7 zXcpZBxqqMLXZ-(88?pKB8#kl^P%&9jz*&=)GRmoN-EC^@B}xtgvS)EV0lQ{g{U6jj z!(Kh7xEUaM6C!_OC0K5djo9bajsd{z)RS1*RCfL=+bPEanue)|Q77VbR{aTL^F@te--w5OZcA6#~qW3bRm)idXv=kXPkv5$r zr8SS%3u%Uc4KUk7-I9AxQ!dP1CdS?I{aFDrKGSJM_bC66%jLD-*V_Y5{QiI+uC9O3 zN}@mEE7u@t&3FuGBzMBqH)WIO2-nOEK=S}-yS#s`G;5+Q)nw&$ndRRvLGX{ zss9>pXVOEqz^N)C88Rqir~~7j&r`kw=PAwDUmOlofHW@wu<}j*RyNO^OdhbG(Xr2| z>JYF4o99O6j>2gFPtWhpd4{YmgvAtN1$hhFM08cmR+Q_Lx7DXx^eohz>9Kaat#j zqLzJ?Zt`{+$5t`_UBZIY7wZ6u7yW6`-!E}-6&1iqbKJ{(v}MU> znZR&ixZ8ImjYTJJ+<`EFuJRBUf1QZtJWF>NS#4?8ow8bp8K|cX834tJON7g{hnSb$ z^5R5lACD9K6I=diz*A>HYjN6YIAY<_zQVduZZr~q0b}NdpoGC}WDlfG<@tim1Mgah z_xeQ~G#?F9xq##_NEEQk{R2pw$MaJU?9pdInQ(fW#1L4xvg(+H+wC=6Ak#sadI|m> z1{3{{GeY>U-JgrwmlzS4a4w}3PcpoDlNAz9^iLW@r2lvoG%pk+e#VGn(VfY zVaNEZA^zil+5KTav*bH%X)U;P>8o2_8ciFFm}sCebIJsUqxn{7S;)YD0K*yjA&*M4 zbu>>DLIwoy#ud^OKm>^4SI^LA8>bb#xDe3W01qO~!z%1hX*5gLzGH9Iyrp8YVv{!7 zJ9qA|FxDpWSbz(n2=oEy&pGhDI{Ib)z(=7F8PE&_!VfIoD?CuPc6;S{dng3_X{c&F JC{{KL{C{3veEk3b literal 0 HcmV?d00001 diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index 3d5a961d6f7c7..f273a4dd0eea5 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -12,6 +12,7 @@ dependencies { implementation project(':metadata-service:services') implementation project(':metadata-io') implementation project(':metadata-utils') + implementation project(':metadata-models') implementation externalDependency.graphqlJava implementation externalDependency.graphqlJavaScalars @@ -40,8 +41,10 @@ graphqlCodegen { "$projectDir/src/main/resources/auth.graphql".toString(), "$projectDir/src/main/resources/timeline.graphql".toString(), "$projectDir/src/main/resources/tests.graphql".toString(), + "$projectDir/src/main/resources/properties.graphql".toString(), "$projectDir/src/main/resources/step.graphql".toString(), "$projectDir/src/main/resources/lineage.graphql".toString(), + "$projectDir/src/main/resources/forms.graphql".toString() ] outputDir = new File("$projectDir/src/mainGeneratedGraphQL/java") packageName = "com.linkedin.datahub.graphql.generated" diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index e45bed33eb023..2bde7cb61047b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -19,9 +19,10 @@ private Constants() {} public static final String TESTS_SCHEMA_FILE = "tests.graphql"; public static final String STEPS_SCHEMA_FILE = "step.graphql"; public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; + public static final String PROPERTIES_SCHEMA_FILE = "properties.graphql"; + public static final String FORMS_SCHEMA_FILE = "forms.graphql"; public static final String BROWSE_PATH_DELIMITER = "/"; public static final String BROWSE_PATH_V2_DELIMITER = "␟"; public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; - public static final String ENTITY_FILTER_NAME = "_entityType"; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index f61d76e72e8bd..4819510d34018 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -57,6 +57,7 @@ import com.linkedin.datahub.graphql.generated.EntityRelationship; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; import com.linkedin.datahub.graphql.generated.GetRootGlossaryNodesResult; import com.linkedin.datahub.graphql.generated.GetRootGlossaryTermsResult; import com.linkedin.datahub.graphql.generated.GlossaryNode; @@ -91,12 +92,17 @@ import com.linkedin.datahub.graphql.generated.QuerySubject; import com.linkedin.datahub.graphql.generated.QuickFilter; import com.linkedin.datahub.graphql.generated.RecommendationContent; +import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchResult; import com.linkedin.datahub.graphql.generated.SiblingProperties; +import com.linkedin.datahub.graphql.generated.StructuredPropertiesEntry; +import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParams; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.TestResult; +import com.linkedin.datahub.graphql.generated.TypeQualifier; import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.resolvers.MeResolver; import com.linkedin.datahub.graphql.resolvers.assertion.AssertionRunEventResolver; @@ -135,6 +141,11 @@ import com.linkedin.datahub.graphql.resolvers.embed.UpdateEmbedResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityExistsResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityPrivilegesResolver; +import com.linkedin.datahub.graphql.resolvers.form.BatchAssignFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.CreateDynamicFormAssignmentResolver; +import com.linkedin.datahub.graphql.resolvers.form.IsFormAssignedToMeResolver; +import com.linkedin.datahub.graphql.resolvers.form.SubmitFormPromptResolver; +import com.linkedin.datahub.graphql.resolvers.form.VerifyFormResolver; import com.linkedin.datahub.graphql.resolvers.glossary.AddRelatedTermsResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryNodeResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryTermResolver; @@ -254,6 +265,7 @@ import com.linkedin.datahub.graphql.resolvers.type.EntityInterfaceTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.PlatformSchemaUnionTypeResolver; +import com.linkedin.datahub.graphql.resolvers.type.PropertyValueResolver; import com.linkedin.datahub.graphql.resolvers.type.ResultsTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver; import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserResetTokenResolver; @@ -288,7 +300,10 @@ import com.linkedin.datahub.graphql.types.dataset.DatasetType; import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType; import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper; +import com.linkedin.datahub.graphql.types.datatype.DataTypeType; import com.linkedin.datahub.graphql.types.domain.DomainType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeType; +import com.linkedin.datahub.graphql.types.form.FormType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; @@ -303,6 +318,7 @@ import com.linkedin.datahub.graphql.types.role.DataHubRoleType; import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertyType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; import com.linkedin.datahub.graphql.types.view.DataHubViewType; @@ -323,6 +339,7 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -392,6 +409,7 @@ public class GmsGraphQLEngine { private final LineageService lineageService; private final QueryService queryService; private final DataProductService dataProductService; + private final FormService formService; private final FeatureFlags featureFlags; @@ -439,6 +457,10 @@ public class GmsGraphQLEngine { private final QueryType queryType; private final DataProductType dataProductType; private final OwnershipType ownershipType; + private final StructuredPropertyType structuredPropertyType; + private final DataTypeType dataTypeType; + private final EntityTypeType entityTypeType; + private final FormType formType; /** A list of GraphQL Plugins that extend the core engine */ private final List graphQLPlugins; @@ -494,6 +516,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.lineageService = args.lineageService; this.queryService = args.queryService; this.dataProductService = args.dataProductService; + this.formService = args.formService; this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); @@ -533,11 +556,15 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.testType = new TestType(entityClient); this.dataHubPolicyType = new DataHubPolicyType(entityClient); this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(); + this.schemaFieldType = new SchemaFieldType(entityClient, featureFlags); this.dataHubViewType = new DataHubViewType(entityClient); this.queryType = new QueryType(entityClient); this.dataProductType = new DataProductType(entityClient); this.ownershipType = new OwnershipType(entityClient); + this.structuredPropertyType = new StructuredPropertyType(entityClient); + this.dataTypeType = new DataTypeType(entityClient); + this.entityTypeType = new EntityTypeType(entityClient); + this.formType = new FormType(entityClient); // Init Lists this.entityTypes = @@ -573,11 +600,16 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { dataHubViewType, queryType, dataProductType, - ownershipType); + ownershipType, + structuredPropertyType, + dataTypeType, + entityTypeType, + formType); this.loadableTypes = new ArrayList<>(entityTypes); // Extend loadable types with types from the plugins // This allows us to offer search and browse capabilities out of the box for those types for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + this.entityTypes.addAll(plugin.getEntityTypes()); Collection> pluginLoadableTypes = plugin.getLoadableTypes(); if (pluginLoadableTypes != null) { this.loadableTypes.addAll(pluginLoadableTypes); @@ -654,18 +686,23 @@ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { configureRoleResolvers(builder); configureSchemaFieldResolvers(builder); configureEntityPathResolvers(builder); + configureResolvedAuditStampResolvers(builder); configureViewResolvers(builder); configureQueryEntityResolvers(builder); configureOwnershipTypeResolver(builder); configurePluginResolvers(builder); + configureStructuredPropertyResolvers(builder); + configureFormResolvers(builder); } private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { builder.type( "Role", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); builder.type( "RoleAssociation", typeWiring -> @@ -703,7 +740,9 @@ public GraphQLEngine.Builder builder() { .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); + .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(PROPERTIES_SCHEMA_FILE)) + .addSchema(fileBasedSchema(FORMS_SCHEMA_FILE)); for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { List pluginSchemaFiles = plugin.getSchemaFiles(); @@ -767,6 +806,8 @@ private void configureContainerResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("entities", new ContainerEntitiesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "platform", @@ -841,7 +882,8 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) .dataFetcher( "aggregateAcrossEntities", - new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) + new AggregateAcrossEntitiesResolver( + this.entityClient, this.viewService, this.formService)) .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) .dataFetcher( "autoCompleteForMultiple", @@ -928,7 +970,8 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) .dataFetcher( - "browseV2", new BrowseV2Resolver(this.entityClient, this.viewService))); + "browseV2", + new BrowseV2Resolver(this.entityClient, this.viewService, this.formService))); } private DataFetcher getEntitiesResolver() { @@ -1139,7 +1182,14 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { new UpdateOwnershipTypeResolver(this.ownershipTypeService)) .dataFetcher( "deleteOwnershipType", - new DeleteOwnershipTypeResolver(this.ownershipTypeService))); + new DeleteOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher("submitFormPrompt", new SubmitFormPromptResolver(this.formService)) + .dataFetcher("batchAssignForm", new BatchAssignFormResolver(this.formService)) + .dataFetcher( + "createDynamicFormAssignment", + new CreateDynamicFormAssignmentResolver(this.formService)) + .dataFetcher( + "verifyForm", new VerifyFormResolver(this.formService, this.groupService))); } private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { @@ -1342,7 +1392,25 @@ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder typeWiring.dataFetcher( "ownershipType", new EntityTypeResolver( - entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))); + entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))) + .type( + "StructuredPropertiesEntry", + typeWiring -> + typeWiring + .dataFetcher( + "structuredProperty", + new LoadableTypeResolver<>( + structuredPropertyType, + (env) -> + ((StructuredPropertiesEntry) env.getSource()) + .getStructuredProperty() + .getUrn())) + .dataFetcher( + "valueEntities", + new BatchGetEntitiesResolver( + entityTypes, + (env) -> + ((StructuredPropertiesEntry) env.getSource()).getValueEntities()))); } /** @@ -1422,6 +1490,14 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { "owner", new OwnerTypeResolver<>( ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "SchemaField", + typeWiring -> + typeWiring.dataFetcher( + "schemaFieldEntity", + new LoadableTypeResolver<>( + schemaFieldType, + (env) -> ((SchemaField) env.getSource()).getSchemaFieldEntity().getUrn()))) .type( "UserUsageCounts", typeWiring -> @@ -1518,6 +1594,8 @@ private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) .dataFetcher("schemaMetadata", new AspectResolver()) .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService))); } @@ -1528,7 +1606,9 @@ private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) typeWiring .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); } private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { @@ -1551,6 +1631,16 @@ private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { entityTypes, (env) -> ((EntityPath) env.getSource()).getPath()))); } + private void configureResolvedAuditStampResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "ResolvedAuditStamp", + typeWiring -> + typeWiring.dataFetcher( + "actor", + new LoadableTypeResolver<>( + corpUserType, (env) -> ((CorpUser) env.getSource()).getUrn()))); + } + /** * Configures resolvers responsible for resolving the {@link * com.linkedin.datahub.graphql.generated.CorpUser} type. @@ -1559,8 +1649,10 @@ private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { builder.type( "CorpUser", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); builder.type( "CorpUserInfo", typeWiring -> @@ -1581,6 +1673,8 @@ private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { typeWiring -> typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService))); builder .type( @@ -1623,8 +1717,10 @@ private void configureTagAssociationResolver(final RuntimeWiring.Builder builder builder.type( "Tag", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); builder.type( "TagAssociation", typeWiring -> @@ -1659,6 +1755,8 @@ private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { typeWiring -> typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) .dataFetcher( "platform", @@ -1690,6 +1788,8 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -1758,6 +1858,42 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { }))); } + private void configureStructuredPropertyResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "StructuredPropertyDefinition", + typeWiring -> + typeWiring + .dataFetcher( + "valueType", + new LoadableTypeResolver<>( + dataTypeType, + (env) -> + ((StructuredPropertyDefinition) env.getSource()) + .getValueType() + .getUrn())) + .dataFetcher( + "entityTypes", + new LoadableTypeBatchResolver<>( + entityTypeType, + (env) -> + ((StructuredPropertyDefinition) env.getSource()) + .getEntityTypes().stream() + .map(entityTypeType.getKeyProvider()) + .collect(Collectors.toList())))); + builder.type( + "TypeQualifier", + typeWiring -> + typeWiring.dataFetcher( + "allowedTypes", + new LoadableTypeBatchResolver<>( + entityTypeType, + (env) -> + ((TypeQualifier) env.getSource()) + .getAllowedTypes().stream() + .map(entityTypeType.getKeyProvider()) + .collect(Collectors.toList())))); + } + /** * Configures resolvers responsible for resolving the {@link * com.linkedin.datahub.graphql.generated.Chart} type. @@ -1769,6 +1905,8 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) .dataFetcher( "platform", @@ -1858,6 +1996,7 @@ private void configureTypeResolvers(final RuntimeWiring.Builder builder) { .type( "HyperParameterValueType", typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver())) + .type("PropertyValue", typeWiring -> typeWiring.typeResolver(new PropertyValueResolver())) .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) .type( "TimeSeriesAspect", @@ -1884,6 +2023,8 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "dataFlow", new LoadableTypeResolver<>( @@ -1947,6 +2088,8 @@ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -1979,6 +2122,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher( "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( @@ -2064,6 +2209,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -2103,6 +2250,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher( "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) .dataFetcher( "platform", @@ -2127,6 +2276,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "dataPlatformInstance", @@ -2145,6 +2296,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "dataPlatformInstance", @@ -2179,6 +2332,8 @@ private void configureDomainResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); builder.type( "DomainAssociation", @@ -2193,12 +2348,64 @@ private void configureDomainResolvers(final RuntimeWiring.Builder builder) { .getUrn()))); } + private void configureFormResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "FormAssociation", + typeWiring -> + typeWiring.dataFetcher( + "form", + new LoadableTypeResolver<>( + formType, + (env) -> + ((com.linkedin.datahub.graphql.generated.FormAssociation) env.getSource()) + .getForm() + .getUrn()))); + builder.type( + "StructuredPropertyParams", + typeWiring -> + typeWiring.dataFetcher( + "structuredProperty", + new LoadableTypeResolver<>( + structuredPropertyType, + (env) -> + ((StructuredPropertyParams) env.getSource()) + .getStructuredProperty() + .getUrn()))); + builder.type( + "FormActorAssignment", + typeWiring -> + typeWiring + .dataFetcher( + "users", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + final FormActorAssignment actors = env.getSource(); + return actors.getUsers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()); + })) + .dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> { + final FormActorAssignment actors = env.getSource(); + return actors.getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList()); + })) + .dataFetcher("isAssignedToMe", new IsFormAssignedToMeResolver(groupService))); + } + private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { builder.type( "DataProduct", typeWiring -> typeWiring .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index 4829194a8ce4d..5b780cc8cb40b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -25,6 +25,7 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -73,6 +74,7 @@ public class GmsGraphQLEngineArgs { QueryService queryService; FeatureFlags featureFlags; DataProductService dataProductService; + FormService formService; // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java index 472d9465aeee1..a544bd46527c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql; +import com.linkedin.datahub.graphql.types.EntityType; import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.idl.RuntimeWiring; import java.util.Collection; @@ -34,6 +35,9 @@ public interface GmsGraphQLPlugin { */ Collection> getLoadableTypes(); + /** Return a list of Entity Types that the plugin services */ + Collection> getEntityTypes(); + /** * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific * resolvers. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java new file mode 100644 index 0000000000000..ae8ac4330e7fb --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql; + +import com.linkedin.common.SubTypes; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.Collections; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@AllArgsConstructor +public class SubTypesResolver implements DataFetcher> { + + EntityClient _entityClient; + String _entityType; + String _aspectName; + + @Override + @Nullable + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + SubTypes subType = null; + final String urnStr = ((Entity) environment.getSource()).getUrn(); + try { + final Urn urn = Urn.createFromString(urnStr); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(_aspectName), + context.getAuthentication()) + .get(urn); + if (entityResponse != null && entityResponse.getAspects().containsKey(_aspectName)) { + subType = + new SubTypes(entityResponse.getAspects().get(_aspectName).getValue().data()); + } + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + _aspectName + " for urn " + urnStr + " ", e); + } + return subType; + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index 22ee4d4d4845c..d8665ae784bd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.RawAspect; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.AspectSpec; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index 31a8359f8f0e3..de389a358d936 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -12,8 +12,8 @@ import com.linkedin.datahub.graphql.generated.BarSegment; import com.linkedin.datahub.graphql.generated.MetadataAnalyticsInput; import com.linkedin.datahub.graphql.generated.NamedBar; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Filter; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index 03333bda05f61..baea3ea4e6201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.generated.NamedLine; import com.linkedin.datahub.graphql.generated.NumericDataPoint; import com.linkedin.datahub.graphql.generated.Row; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.util.List; import java.util.Map; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index e74ed09849763..667ccd368a729 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -17,4 +17,5 @@ public class FeatureFlags { private boolean showAcrylInfo = false; private boolean showAccessManagement = false; private boolean nestedDomainsEnabled = false; + private boolean schemaFieldEntityFetchEnabled = false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index da4a3a76dd7e0..d9ce2472c8634 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -12,13 +12,14 @@ import com.linkedin.datahub.graphql.generated.BrowseResultsV2; import com.linkedin.datahub.graphql.generated.BrowseV2Input; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.search.SearchUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResultV2; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -37,6 +38,7 @@ public class BrowseV2Resolver implements DataFetcher get(DataFetchingEnvironment environmen ? BROWSE_PATH_V2_DELIMITER + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) : ""; - final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); + final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); BrowseResultV2 browseResults = _entityClient.browseV2( @@ -76,8 +78,8 @@ public CompletableFuture get(DataFetchingEnvironment environmen pathStr, maybeResolvedView != null ? SearchUtils.combineFilters( - filter, maybeResolvedView.getDefinition().getFilter()) - : filter, + inputFilter, maybeResolvedView.getDefinition().getFilter()) + : inputFilter, sanitizedQuery, start, count, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index 81b52991cde90..f127e6a49abff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -126,9 +126,15 @@ public CompletableFuture get(final DataFetchingEnvironment environmen appConfig.setAuthConfig(authConfig); final VisualConfig visualConfig = new VisualConfig(); - if (_visualConfiguration != null && _visualConfiguration.getAssets() != null) { - visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl()); - visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl()); + if (_visualConfiguration != null) { + if (_visualConfiguration.getAssets() != null) { + visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl()); + visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl()); + } + if (_visualConfiguration.getAppTitle() != null) { + visualConfig.setAppTitle(_visualConfiguration.getAppTitle()); + } + visualConfig.setHideGlossary(_visualConfiguration.isHideGlossary()); } if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) { QueriesTabConfig queriesTabConfig = new QueriesTabConfig(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index a0f1698bf99e8..72912087190c0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -12,9 +12,9 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.dataproduct.DataProductAssociation; import com.linkedin.dataproduct.DataProductProperties; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 8f6d109e71b2c..6229e38954163 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -7,7 +7,7 @@ import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Condition; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java new file mode 100644 index 0000000000000..39c9210c289e1 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java @@ -0,0 +1,52 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BatchAssignFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class BatchAssignFormResolver implements DataFetcher> { + + private final FormService _formService; + + public BatchAssignFormResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final BatchAssignFormInput input = + bindArgument(environment.getArgument("input"), BatchAssignFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final List entityUrns = input.getEntityUrns(); + final Authentication authentication = context.getAuthentication(); + + return CompletableFuture.supplyAsync( + () -> { + try { + _formService.batchAssignFormToEntities( + entityUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + formUrn, + authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java new file mode 100644 index 0000000000000..5b5f058dbdeac --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java @@ -0,0 +1,50 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CreateDynamicFormAssignmentInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class CreateDynamicFormAssignmentResolver + implements DataFetcher> { + + private final FormService _formService; + + public CreateDynamicFormAssignmentResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final CreateDynamicFormAssignmentInput input = + bindArgument(environment.getArgument("input"), CreateDynamicFormAssignmentInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final DynamicFormAssignment formAssignment = FormUtils.mapDynamicFormAssignment(input); + final Authentication authentication = context.getAuthentication(); + + return CompletableFuture.supplyAsync( + () -> { + try { + _formService.createDynamicFormAssignment(formAssignment, formUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java new file mode 100644 index 0000000000000..e7bf87ae7644e --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java @@ -0,0 +1,80 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class IsFormAssignedToMeResolver implements DataFetcher> { + + private final GroupService _groupService; + + public IsFormAssignedToMeResolver(@Nonnull final GroupService groupService) { + _groupService = Objects.requireNonNull(groupService, "groupService must not be null"); + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final FormActorAssignment parent = environment.getSource(); + + return CompletableFuture.supplyAsync( + () -> { + try { + + // Assign urn and group urns + final Set assignedUserUrns = + parent.getUsers() != null + ? parent.getUsers().stream().map(CorpUser::getUrn).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Set assignedGroupUrns = + parent.getGroups() != null + ? parent.getGroups().stream().map(CorpGroup::getUrn).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + + // First check whether user is directly assigned. + if (assignedUserUrns.size() > 0) { + boolean isUserAssigned = assignedUserUrns.contains(userUrn.toString()); + if (isUserAssigned) { + return true; + } + } + + // Next check whether the user is assigned indirectly, by group. + if (assignedGroupUrns.size() > 0) { + final List groupUrns = + _groupService.getGroupsForUser(userUrn, context.getAuthentication()); + boolean isUserGroupAssigned = + groupUrns.stream() + .anyMatch(groupUrn -> assignedGroupUrns.contains(groupUrn.toString())); + if (isUserGroupAssigned) { + return true; + } + } + } catch (Exception e) { + log.error( + "Failed to determine whether the form is assigned to the currently authenticated user! Returning false.", + e); + } + + // Else the user is not directly assigned. + return false; + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java new file mode 100644 index 0000000000000..5b40c353b3809 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java @@ -0,0 +1,89 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.generated.SubmitFormPromptInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.metadata.service.FormService; +import com.linkedin.structured.PrimitivePropertyValueArray; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class SubmitFormPromptResolver implements DataFetcher> { + + private final FormService _formService; + + public SubmitFormPromptResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final Urn entityUrn = UrnUtils.getUrn(environment.getArgument("urn")); + final SubmitFormPromptInput input = + bindArgument(environment.getArgument("input"), SubmitFormPromptInput.class); + final String promptId = input.getPromptId(); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final String fieldPath = input.getFieldPath(); + + return CompletableFuture.supplyAsync( + () -> { + try { + if (input.getType().equals(FormPromptType.STRUCTURED_PROPERTY)) { + if (input.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Failed to provide structured property params for prompt type STRUCTURED_PROPERTY"); + } + final Urn structuredPropertyUrn = + UrnUtils.getUrn(input.getStructuredPropertyParams().getStructuredPropertyUrn()); + final PrimitivePropertyValueArray values = + FormUtils.getStructuredPropertyValuesFromInput(input); + + return _formService.submitStructuredPropertyPromptResponse( + entityUrn, + structuredPropertyUrn, + values, + formUrn, + promptId, + context.getAuthentication()); + } else if (input.getType().equals(FormPromptType.FIELDS_STRUCTURED_PROPERTY)) { + if (input.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Failed to provide structured property params for prompt type FIELDS_STRUCTURED_PROPERTY"); + } + if (fieldPath == null) { + throw new IllegalArgumentException( + "Failed to provide fieldPath for prompt type FIELDS_STRUCTURED_PROPERTY"); + } + final Urn structuredPropertyUrn = + UrnUtils.getUrn(input.getStructuredPropertyParams().getStructuredPropertyUrn()); + final PrimitivePropertyValueArray values = + FormUtils.getStructuredPropertyValuesFromInput(input); + + return _formService.submitFieldStructuredPropertyPromptResponse( + entityUrn, + structuredPropertyUrn, + values, + formUrn, + promptId, + fieldPath, + context.getAuthentication()); + } + return false; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java new file mode 100644 index 0000000000000..54e3562c97add --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java @@ -0,0 +1,63 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.VerifyFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class VerifyFormResolver implements DataFetcher> { + + private final FormService _formService; + private final GroupService _groupService; + + public VerifyFormResolver( + @Nonnull final FormService formService, @Nonnull final GroupService groupService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + _groupService = Objects.requireNonNull(groupService, "groupService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final VerifyFormInput input = + bindArgument(environment.getArgument("input"), VerifyFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final Urn entityUrn = UrnUtils.getUrn(input.getEntityUrn()); + final Authentication authentication = context.getAuthentication(); + final Urn actorUrn = UrnUtils.getUrn(authentication.getActor().toUrnStr()); + + return CompletableFuture.supplyAsync( + () -> { + try { + final List groupsForUser = + _groupService.getGroupsForUser(actorUrn, authentication); + if (!_formService.isFormAssignedToUser( + formUrn, entityUrn, actorUrn, groupsForUser, authentication)) { + throw new AuthorizationException( + String.format( + "Failed to authorize form on entity as form with urn %s is not assigned to user", + formUrn)); + } + _formService.verifyFormForEntity(formUrn, entityUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index 6a204286ba44e..b52153d70fa7b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -37,7 +37,6 @@ public class CreateGlossaryNodeResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index 93582fb956bd8..1f8c17ee72884 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -6,7 +6,7 @@ import com.linkedin.datahub.graphql.generated.EntityCountInput; import com.linkedin.datahub.graphql.generated.EntityCountResult; import com.linkedin.datahub.graphql.generated.EntityCountResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java index 0b909dee51374..3e9583824a568 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java @@ -44,7 +44,8 @@ public CompletableFuture rollbackIngestion( return CompletableFuture.supplyAsync( () -> { try { - _entityClient.rollbackIngestion(runId, context.getAuthentication()); + _entityClient.rollbackIngestion( + runId, context.getAuthorizer(), context.getAuthentication()); return true; } catch (Exception e) { throw new RuntimeException("Failed to rollback ingestion execution", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java new file mode 100644 index 0000000000000..25768da819555 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java @@ -0,0 +1,105 @@ +package com.linkedin.datahub.graphql.resolvers.mutate.util; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.CreateDynamicFormAssignmentInput; +import com.linkedin.datahub.graphql.generated.SubmitFormPromptInput; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.form.FormInfo; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import java.util.Objects; +import javax.annotation.Nonnull; + +public class FormUtils { + + private static final String COMPLETED_FORMS = "completedForms"; + private static final String INCOMPLETE_FORMS = "incompleteForms"; + private static final String VERIFIED_FORMS = "verifiedForms"; + private static final String OWNERS = "owners"; + private static final String COMPLETED_FORMS_COMPLETED_PROMPT_IDS = + "completedFormsCompletedPromptIds"; + private static final String INCOMPLETE_FORMS_COMPLETED_PROMPT_IDS = + "incompleteFormsCompletedPromptIds"; + + private FormUtils() {} + + public static PrimitivePropertyValueArray getStructuredPropertyValuesFromInput( + @Nonnull final SubmitFormPromptInput input) { + final PrimitivePropertyValueArray values = new PrimitivePropertyValueArray(); + + input + .getStructuredPropertyParams() + .getValues() + .forEach( + value -> { + if (value.getStringValue() != null) { + values.add(PrimitivePropertyValue.create(value.getStringValue())); + } else if (value.getNumberValue() != null) { + values.add(PrimitivePropertyValue.create(value.getNumberValue().doubleValue())); + } + }); + + return values; + } + + /** Map a GraphQL CreateDynamicFormAssignmentInput to the GMS DynamicFormAssignment aspect */ + @Nonnull + public static DynamicFormAssignment mapDynamicFormAssignment( + @Nonnull final CreateDynamicFormAssignmentInput input) { + Objects.requireNonNull(input, "input must not be null"); + + final DynamicFormAssignment result = new DynamicFormAssignment(); + final Filter filter = + new Filter() + .setOr(ResolverUtils.buildConjunctiveCriterionArrayWithOr(input.getOrFilters())); + result.setFilter(filter); + return result; + } + + /** + * Creates a Filter where the provided formUrn is either in completedForms or incompleteForms for + * an entity + */ + private static Filter generateCompleteOrIncompleteFilter(@Nonnull final String formUrn) + throws Exception { + final CriterionArray completedFormsAndArray = new CriterionArray(); + final CriterionArray incompleteFormsAndArray = new CriterionArray(); + completedFormsAndArray.add(buildFormCriterion(formUrn, COMPLETED_FORMS)); + incompleteFormsAndArray.add(buildFormCriterion(formUrn, INCOMPLETE_FORMS)); + // need this to be an OR not two ANDs + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(completedFormsAndArray), + new ConjunctiveCriterion().setAnd(incompleteFormsAndArray))); + } + + private static Criterion buildFormCriterion( + @Nonnull final String formUrn, @Nonnull final String field) { + return buildFormCriterion(formUrn, field, false); + } + + private static Criterion buildFormCriterion( + @Nonnull final String formUrn, @Nonnull final String field, final boolean negated) { + return new Criterion() + .setField(field) + .setValue(formUrn) + .setCondition(Condition.EQUAL) + .setNegated(negated); + } + + private static boolean isActorExplicitlyAssigned( + @Nonnull final Urn actorUrn, @Nonnull final FormInfo formInfo) { + return (formInfo.getActors().getUsers() != null + && formInfo.getActors().getUsers().stream().anyMatch(user -> user.equals(actorUrn))) + || (formInfo.getActors().getGroups() != null + && formInfo.getActors().getGroups().stream().anyMatch(group -> group.equals(actorUrn))); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java index 3328eff2bdf45..7bfd166b18a20 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java @@ -9,7 +9,7 @@ import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.GetGrantedPrivilegesInput; import com.linkedin.datahub.graphql.generated.Privileges; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.List; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index ca1e01b45989d..e65666117b4fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -15,8 +15,8 @@ import com.linkedin.datahub.graphql.generated.RecommendationRenderType; import com.linkedin.datahub.graphql.generated.RecommendationRequestContext; import com.linkedin.datahub.graphql.generated.SearchParams; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.recommendation.EntityRequestContext; import com.linkedin.metadata.recommendation.RecommendationsService; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index 6d23456b76b4f..b54987dc0e9b0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -15,6 +15,7 @@ import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -36,6 +37,7 @@ public class AggregateAcrossEntitiesResolver private final EntityClient _entityClient; private final ViewService _viewService; + private final FormService _formService; @Override public CompletableFuture get(DataFetchingEnvironment environment) { @@ -58,7 +60,7 @@ public CompletableFuture get(DataFetchingEnvironment environme context.getAuthentication()) : null; - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); @@ -75,8 +77,8 @@ public CompletableFuture get(DataFetchingEnvironment environme sanitizedQuery, maybeResolvedView != null ? SearchUtils.combineFilters( - baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, + inputFilter, maybeResolvedView.getDefinition().getFilter()) + : inputFilter, 0, 0, // 0 entity count because we don't want resolved entities searchFlags, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java index 6a01fa19867ad..f300331ab4bc8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java @@ -10,9 +10,9 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleInput; import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleResults; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -66,6 +66,12 @@ public CompletableFuture get(DataFetchingEnvironmen : null; List types = getEntityTypes(input.getTypes(), maybeResolvedView); + types = + types != null + ? types.stream() + .filter(AUTO_COMPLETE_ENTITY_TYPES::contains) + .collect(Collectors.toList()) + : null; if (types != null && types.size() > 0) { return AutocompleteUtils.batchGetAutocompleteResults( types.stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index e54955e1857f0..1a380781385c3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -11,9 +11,9 @@ import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.search.AggregationMetadata; import com.linkedin.metadata.search.AggregationMetadataArray; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index 742d1d170de64..658138ae6e3dc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -8,9 +8,9 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ScrollAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.ScrollResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnScrollResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index adab62c22bb72..0af0a3827b1bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -11,8 +11,8 @@ import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageInput; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnScrollAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 0f5d2d90ba0c2..2dc5032f2a4eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -9,9 +9,9 @@ import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageInput; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 6821423887923..bc177c600beee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -5,9 +5,9 @@ import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index 6746c30a2edbc..8c45df1b30b26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -21,8 +21,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java new file mode 100644 index 0000000000000..cb0d24839056d --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java @@ -0,0 +1,25 @@ +package com.linkedin.datahub.graphql.resolvers.type; + +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.StringValue; +import graphql.TypeResolutionEnvironment; +import graphql.schema.GraphQLObjectType; +import graphql.schema.TypeResolver; + +public class PropertyValueResolver implements TypeResolver { + + public static final String STRING_VALUE = "StringValue"; + public static final String NUMBER_VALUE = "NumberValue"; + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringValue) { + return env.getSchema().getObjectType(STRING_VALUE); + } else if (env.getObject() instanceof NumberValue) { + return env.getSchema().getObjectType(NUMBER_VALUE); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index 9da5f915ff31d..3a676f118c1ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -11,8 +11,8 @@ import com.linkedin.datahub.graphql.generated.DataHubViewFilterInput; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.LogicalOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.CriterionArray; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 4c452af126201..18a082fee95f1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -32,6 +32,7 @@ import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; import com.linkedin.datahub.graphql.generated.Tag; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -192,6 +193,11 @@ public Entity apply(Urn input) { ((OwnershipTypeEntity) partialEntity).setUrn(input.toString()); ((OwnershipTypeEntity) partialEntity).setType(EntityType.CUSTOM_OWNERSHIP_TYPE); } + if (input.getEntityType().equals(STRUCTURED_PROPERTY_ENTITY_NAME)) { + partialEntity = new StructuredPropertyEntity(); + ((StructuredPropertyEntity) partialEntity).setUrn(input.toString()); + ((StructuredPropertyEntity) partialEntity).setType(EntityType.STRUCTURED_PROPERTY); + } return partialEntity; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index badb24810c82b..fd31e1d394a92 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -88,6 +88,8 @@ public class DatasetType DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, ACCESS_DATASET_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME, SUB_TYPES_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java new file mode 100644 index 0000000000000..7e5372268170b --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java @@ -0,0 +1,24 @@ +package com.linkedin.datahub.graphql.types.dataset.mappers; + +import com.linkedin.datahub.graphql.generated.DatasetFilter; +import com.linkedin.datahub.graphql.generated.DatasetFilterType; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; + +public class DatasetFilterMapper + implements ModelMapper { + + public static final DatasetFilterMapper INSTANCE = new DatasetFilterMapper(); + + public static DatasetFilter map(@Nonnull final com.linkedin.dataset.DatasetFilter metadata) { + return INSTANCE.apply(metadata); + } + + @Override + public DatasetFilter apply(@Nonnull final com.linkedin.dataset.DatasetFilter input) { + final DatasetFilter result = new DatasetFilter(); + result.setType(DatasetFilterType.valueOf(input.getType().name())); + result.setSql(input.getSql()); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 7fa1decdf7f55..163e8b9288d87 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.Embed; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -38,9 +39,11 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; @@ -53,6 +56,7 @@ import com.linkedin.metadata.key.DatasetKey; import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.schema.SchemaMetadata; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -151,6 +155,15 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setStructuredProperties( + StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); mappingHelper.mapToResult( SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index edc9baf4ba9c5..e0a74d351125f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; +import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.metadata.utils.SchemaFieldUtils; import javax.annotation.Nonnull; public class SchemaFieldMapper { @@ -37,6 +40,7 @@ public SchemaField apply( result.setIsPartOfKey(input.isIsPartOfKey()); result.setIsPartitioningKey(input.isIsPartitioningKey()); result.setJsonProps(input.getJsonProps()); + result.setSchemaFieldEntity(this.createSchemaFieldEntity(input, entityUrn)); return result; } @@ -75,4 +79,14 @@ private SchemaFieldDataType mapSchemaFieldDataType( "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); } } + + private SchemaFieldEntity createSchemaFieldEntity( + @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + SchemaFieldEntity schemaFieldEntity = new SchemaFieldEntity(); + schemaFieldEntity.setUrn( + SchemaFieldUtils.generateSchemaFieldUrn(entityUrn.toString(), input.getFieldPath()) + .toString()); + schemaFieldEntity.setType(EntityType.SCHEMA_FIELD); + return schemaFieldEntity; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index 31381073a16dd..e550280a6c2db 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -18,6 +18,11 @@ public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); + return apply(input, entityUrn, aspect.getVersion()); + } + + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nonnull final SchemaMetadata input, final Urn entityUrn, final long version) { final com.linkedin.datahub.graphql.generated.SchemaMetadata result = new com.linkedin.datahub.graphql.generated.SchemaMetadata(); @@ -35,7 +40,7 @@ public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( .map(field -> SchemaFieldMapper.map(field, entityUrn)) .collect(Collectors.toList())); result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - result.setAspectVersion(aspect.getVersion()); + result.setAspectVersion(version); if (input.hasForeignKeys()) { result.setForeignKeys( input.getForeignKeys().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java new file mode 100644 index 0000000000000..612644ae2dbb2 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java @@ -0,0 +1,51 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.DataTypeInfo; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; + +public class DataTypeEntityMapper implements ModelMapper { + + public static final DataTypeEntityMapper INSTANCE = new DataTypeEntityMapper(); + + public static DataTypeEntity map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataTypeEntity apply(@Nonnull final EntityResponse entityResponse) { + final DataTypeEntity result = new DataTypeEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_TYPE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_TYPE_INFO_ASPECT_NAME, this::mapDataTypeInfo); + + // Set the standard Type ENUM for the data type. + if (result.getInfo() != null) { + result.getInfo().setType(DataTypeUrnMapper.getType(entityResponse.getUrn().toString())); + } + return mappingHelper.getResult(); + } + + private void mapDataTypeInfo(@Nonnull DataTypeEntity dataType, @Nonnull DataMap dataMap) { + com.linkedin.datatype.DataTypeInfo gmsInfo = new com.linkedin.datatype.DataTypeInfo(dataMap); + DataTypeInfo info = new DataTypeInfo(); + info.setQualifiedName(gmsInfo.getQualifiedName()); + if (gmsInfo.getDisplayName() != null) { + info.setDisplayName(gmsInfo.getDisplayName()); + } + if (gmsInfo.getDescription() != null) { + info.setDescription(gmsInfo.getDescription()); + } + dataType.setInfo(info); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java new file mode 100644 index 0000000000000..5ea1680546ce6 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import static com.linkedin.metadata.Constants.DATA_TYPE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class DataTypeType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATA_TYPE_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.DATA_TYPE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataTypeEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + DATA_TYPE_ENTITY_NAME, + new HashSet<>(dataTypeUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataTypeUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataTypeEntityMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load data type entities", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java new file mode 100644 index 0000000000000..ec71cd63a70d5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java @@ -0,0 +1,40 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.StdDataType; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class DataTypeUrnMapper { + + static final Map DATA_TYPE_ENUM_TO_URN = + ImmutableMap.builder() + .put(StdDataType.STRING, "urn:li:dataType:datahub.string") + .put(StdDataType.NUMBER, "urn:li:dataType:datahub.number") + .put(StdDataType.URN, "urn:li:dataType:datahub.urn") + .put(StdDataType.RICH_TEXT, "urn:li:dataType:datahub.rich_text") + .put(StdDataType.DATE, "urn:li:dataType:datahub.date") + .build(); + + private static final Map URN_TO_DATA_TYPE_ENUM = + DATA_TYPE_ENUM_TO_URN.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey)); + + private DataTypeUrnMapper() {} + + public static StdDataType getType(String dataTypeUrn) { + if (!URN_TO_DATA_TYPE_ENUM.containsKey(dataTypeUrn)) { + return StdDataType.OTHER; + } + return URN_TO_DATA_TYPE_ENUM.get(dataTypeUrn); + } + + @Nonnull + public static String getUrn(StdDataType dataType) { + if (!DATA_TYPE_ENUM_TO_URN.containsKey(dataType)) { + throw new IllegalArgumentException("Unknown data type: " + dataType); + } + return DATA_TYPE_ENUM_TO_URN.get(dataType); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java new file mode 100644 index 0000000000000..b942ff2325bf7 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java @@ -0,0 +1,54 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.datahub.graphql.generated.EntityTypeInfo; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; + +public class EntityTypeEntityMapper implements ModelMapper { + + public static final EntityTypeEntityMapper INSTANCE = new EntityTypeEntityMapper(); + + public static EntityTypeEntity map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public EntityTypeEntity apply(@Nonnull final EntityResponse entityResponse) { + final EntityTypeEntity result = new EntityTypeEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.ENTITY_TYPE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ENTITY_TYPE_INFO_ASPECT_NAME, this::mapEntityTypeInfo); + + // Set the standard Type ENUM for the entity type. + if (result.getInfo() != null) { + result + .getInfo() + .setType(EntityTypeUrnMapper.getEntityType(entityResponse.getUrn().toString())); + } + return mappingHelper.getResult(); + } + + private void mapEntityTypeInfo(@Nonnull EntityTypeEntity entityType, @Nonnull DataMap dataMap) { + com.linkedin.entitytype.EntityTypeInfo gmsInfo = + new com.linkedin.entitytype.EntityTypeInfo(dataMap); + EntityTypeInfo info = new EntityTypeInfo(); + info.setQualifiedName(gmsInfo.getQualifiedName()); + if (gmsInfo.getDisplayName() != null) { + info.setDisplayName(gmsInfo.getDisplayName()); + } + if (gmsInfo.getDescription() != null) { + info.setDescription(gmsInfo.getDescription()); + } + entityType.setInfo(info); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java similarity index 91% rename from datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java rename to datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java index aba781f9e1dc7..23e793782e8dc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.graphql.resolvers; +package com.linkedin.datahub.graphql.types.entitytype; import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.graphql.generated.EntityType; @@ -17,7 +17,6 @@ public class EntityTypeMapper { ImmutableMap.builder() .put(EntityType.DATASET, "dataset") .put(EntityType.ROLE, "role") - .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) .put(EntityType.CORP_USER, "corpuser") .put(EntityType.CORP_GROUP, "corpGroup") .put(EntityType.DATA_PLATFORM, "dataPlatform") @@ -41,6 +40,9 @@ public class EntityTypeMapper { .put(EntityType.TEST, "test") .put(EntityType.DATAHUB_VIEW, Constants.DATAHUB_VIEW_ENTITY_NAME) .put(EntityType.DATA_PRODUCT, Constants.DATA_PRODUCT_ENTITY_NAME) + .put(EntityType.SCHEMA_FIELD, "schemaField") + .put(EntityType.STRUCTURED_PROPERTY, Constants.STRUCTURED_PROPERTY_ENTITY_NAME) + .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) .build(); private static final Map ENTITY_NAME_TO_TYPE = @@ -52,7 +54,7 @@ private EntityTypeMapper() {} public static EntityType getType(String name) { String lowercaseName = name.toLowerCase(); if (!ENTITY_NAME_TO_TYPE.containsKey(lowercaseName)) { - throw new IllegalArgumentException("Unknown entity name: " + name); + return EntityType.OTHER; } return ENTITY_NAME_TO_TYPE.get(lowercaseName); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java new file mode 100644 index 0000000000000..aa5dfc13ea757 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class EntityTypeType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(ENTITY_TYPE_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.ENTITY_TYPE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return EntityTypeEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List entityTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + ENTITY_TYPE_ENTITY_NAME, + new HashSet<>(entityTypeUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : entityTypeUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(EntityTypeEntityMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load entity type entities", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java new file mode 100644 index 0000000000000..9e9bf86e5fe7f --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java @@ -0,0 +1,85 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.metadata.Constants; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +/** + * In this class we statically map "well-supported" entity types into a more usable Enum class + * served by our GraphQL API. + * + *

When we add new entity types / entity urns, we MAY NEED to update this. + * + *

Note that we currently do not support mapping entities that fall outside of this set. If you + * try to map an entity type without a corresponding enum symbol, the mapping WILL FAIL. + */ +public class EntityTypeUrnMapper { + + static final Map ENTITY_NAME_TO_ENTITY_TYPE_URN = + ImmutableMap.builder() + .put(Constants.DATASET_ENTITY_NAME, "urn:li:entityType:datahub.dataset") + .put(Constants.ROLE_ENTITY_NAME, "urn:li:entityType:datahub.role") + .put(Constants.CORP_USER_ENTITY_NAME, "urn:li:entityType:datahub.corpuser") + .put(Constants.CORP_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.corpGroup") + .put(Constants.DATA_PLATFORM_ENTITY_NAME, "urn:li:entityType:datahub.dataPlatform") + .put(Constants.DASHBOARD_ENTITY_NAME, "urn:li:entityType:datahub.dashboard") + .put(Constants.CHART_ENTITY_NAME, "urn:li:entityType:datahub.chart") + .put(Constants.TAG_ENTITY_NAME, "urn:li:entityType:datahub.tag") + .put(Constants.DATA_FLOW_ENTITY_NAME, "urn:li:entityType:datahub.dataFlow") + .put(Constants.DATA_JOB_ENTITY_NAME, "urn:li:entityType:datahub.dataJob") + .put(Constants.GLOSSARY_TERM_ENTITY_NAME, "urn:li:entityType:datahub.glossaryTerm") + .put(Constants.GLOSSARY_NODE_ENTITY_NAME, "urn:li:entityType:datahub.glossaryNode") + .put(Constants.ML_MODEL_ENTITY_NAME, "urn:li:entityType:datahub.mlModel") + .put(Constants.ML_MODEL_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.mlModelGroup") + .put(Constants.ML_FEATURE_TABLE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeatureTable") + .put(Constants.ML_FEATURE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeature") + .put(Constants.ML_PRIMARY_KEY_ENTITY_NAME, "urn:li:entityType:datahub.mlPrimaryKey") + .put(Constants.CONTAINER_ENTITY_NAME, "urn:li:entityType:datahub.container") + .put(Constants.DOMAIN_ENTITY_NAME, "urn:li:entityType:datahub.domain") + .put(Constants.NOTEBOOK_ENTITY_NAME, "urn:li:entityType:datahub.notebook") + .put( + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + "urn:li:entityType:datahub.dataPlatformInstance") + .put(Constants.TEST_ENTITY_NAME, "urn:li:entityType:datahub.test") + .put(Constants.DATAHUB_VIEW_ENTITY_NAME, "urn:li:entityType:datahub.dataHubView") + .put(Constants.DATA_PRODUCT_ENTITY_NAME, "urn:li:entityType:datahub.dataProduct") + .put(Constants.ASSERTION_ENTITY_NAME, "urn:li:entityType:datahub.assertion") + .put(Constants.SCHEMA_FIELD_ENTITY_NAME, "urn:li:entityType:datahub.schemaField") + .build(); + + private static final Map ENTITY_TYPE_URN_TO_NAME = + ENTITY_NAME_TO_ENTITY_TYPE_URN.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey)); + + private EntityTypeUrnMapper() {} + + public static String getName(String entityTypeUrn) { + if (!ENTITY_TYPE_URN_TO_NAME.containsKey(entityTypeUrn)) { + throw new IllegalArgumentException("Unknown entityTypeUrn: " + entityTypeUrn); + } + return ENTITY_TYPE_URN_TO_NAME.get(entityTypeUrn); + } + + /* + * Takes in a entityTypeUrn and returns a GraphQL EntityType by first mapping + * the urn to the entity name, and then mapping the entity name to EntityType. + */ + public static EntityType getEntityType(String entityTypeUrn) { + if (!ENTITY_TYPE_URN_TO_NAME.containsKey(entityTypeUrn)) { + throw new IllegalArgumentException("Unknown entityTypeUrn: " + entityTypeUrn); + } + final String entityName = ENTITY_TYPE_URN_TO_NAME.get(entityTypeUrn); + return EntityTypeMapper.getType(entityName); + } + + @Nonnull + public static String getEntityTypeUrn(String name) { + if (!ENTITY_NAME_TO_ENTITY_TYPE_URN.containsKey(name)) { + throw new IllegalArgumentException("Unknown entity name: " + name); + } + return ENTITY_NAME_TO_ENTITY_TYPE_URN.get(name); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java new file mode 100644 index 0000000000000..a0ddd4a5883d2 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java @@ -0,0 +1,129 @@ +package com.linkedin.datahub.graphql.types.form; + +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import com.linkedin.datahub.graphql.generated.FormInfo; +import com.linkedin.datahub.graphql.generated.FormPrompt; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.generated.FormType; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParams; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class FormMapper implements ModelMapper { + + public static final FormMapper INSTANCE = new FormMapper(); + + public static Form map(@Nonnull final EntityResponse form) { + return INSTANCE.apply(form); + } + + public Form apply(@Nonnull final EntityResponse entityResponse) { + Form result = new Form(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityUrn.toString()); + result.setType(EntityType.FORM); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(FORM_INFO_ASPECT_NAME, this::mapFormInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (form, dataMap) -> + form.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + + return mappingHelper.getResult(); + } + + private void mapFormInfo(@Nonnull Form form, @Nonnull DataMap dataMap) { + com.linkedin.form.FormInfo gmsFormInfo = new com.linkedin.form.FormInfo(dataMap); + FormInfo formInfo = new FormInfo(); + formInfo.setName(gmsFormInfo.getName()); + formInfo.setType(FormType.valueOf(gmsFormInfo.getType().toString())); + if (gmsFormInfo.hasDescription()) { + formInfo.setDescription(gmsFormInfo.getDescription()); + } + formInfo.setPrompts(this.mapFormPrompts(gmsFormInfo, form.getUrn())); + formInfo.setActors(mapFormActors(gmsFormInfo.getActors())); + form.setInfo(formInfo); + } + + private List mapFormPrompts( + @Nonnull com.linkedin.form.FormInfo gmsFormInfo, @Nonnull String formUrn) { + List formPrompts = new ArrayList<>(); + if (gmsFormInfo.hasPrompts()) { + gmsFormInfo + .getPrompts() + .forEach(FormPrompt -> formPrompts.add(mapFormPrompt(FormPrompt, formUrn))); + } + return formPrompts; + } + + private FormPrompt mapFormPrompt( + @Nonnull com.linkedin.form.FormPrompt gmsFormPrompt, @Nonnull String formUrn) { + final FormPrompt formPrompt = new FormPrompt(); + formPrompt.setId(gmsFormPrompt.getId()); + formPrompt.setTitle(gmsFormPrompt.getTitle()); + formPrompt.setType(FormPromptType.valueOf(gmsFormPrompt.getType().toString())); + formPrompt.setRequired(gmsFormPrompt.isRequired()); + formPrompt.setFormUrn(formUrn); + if (gmsFormPrompt.hasDescription()) { + formPrompt.setDescription(gmsFormPrompt.getDescription()); + } + + if (gmsFormPrompt.hasStructuredPropertyParams()) { + final StructuredPropertyParams params = new StructuredPropertyParams(); + final Urn structuredPropUrn = gmsFormPrompt.getStructuredPropertyParams().getUrn(); + final StructuredPropertyEntity structuredProp = new StructuredPropertyEntity(); + structuredProp.setUrn(structuredPropUrn.toString()); + structuredProp.setType(EntityType.STRUCTURED_PROPERTY); + params.setStructuredProperty(structuredProp); + formPrompt.setStructuredPropertyParams(params); + } + + return formPrompt; + } + + private FormActorAssignment mapFormActors(com.linkedin.form.FormActorAssignment gmsFormActors) { + FormActorAssignment result = new FormActorAssignment(); + result.setOwners(gmsFormActors.isOwners()); + if (gmsFormActors.hasUsers()) { + result.setUsers( + gmsFormActors.getUsers().stream().map(this::mapUser).collect(Collectors.toList())); + } + if (gmsFormActors.hasGroups()) { + result.setGroups( + gmsFormActors.getGroups().stream().map(this::mapGroup).collect(Collectors.toList())); + } + return result; + } + + private CorpUser mapUser(Urn userUrn) { + CorpUser user = new CorpUser(); + user.setUrn(userUrn.toString()); + return user; + } + + private CorpGroup mapGroup(Urn groupUrn) { + CorpGroup group = new CorpGroup(); + group.setUrn(groupUrn.toString()); + return group; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java new file mode 100644 index 0000000000000..8a09cee353cc9 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java @@ -0,0 +1,76 @@ +package com.linkedin.datahub.graphql.types.form; + +import static com.linkedin.metadata.Constants.FORM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class FormType implements com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(FORM_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.FORM; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Form.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List formUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + FORM_ENTITY_NAME, + new HashSet<>(formUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : formUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult().data(FormMapper.map(gmsResult)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Forms", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java new file mode 100644 index 0000000000000..43665b37b9ee8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java @@ -0,0 +1,133 @@ +package com.linkedin.datahub.graphql.types.form; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.FieldFormPromptAssociationArray; +import com.linkedin.common.FormPromptAssociationArray; +import com.linkedin.common.Forms; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FieldFormPromptAssociation; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormAssociation; +import com.linkedin.datahub.graphql.generated.FormPromptAssociation; +import com.linkedin.datahub.graphql.generated.FormPromptFieldAssociations; +import com.linkedin.datahub.graphql.generated.FormVerificationAssociation; +import com.linkedin.datahub.graphql.generated.ResolvedAuditStamp; +import java.util.ArrayList; +import java.util.List; +import javax.annotation.Nonnull; + +public class FormsMapper { + + public static final FormsMapper INSTANCE = new FormsMapper(); + + public static com.linkedin.datahub.graphql.generated.Forms map( + @Nonnull final Forms forms, @Nonnull final String entityUrn) { + return INSTANCE.apply(forms, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Forms apply( + @Nonnull final Forms forms, @Nonnull final String entityUrn) { + final List incompleteForms = new ArrayList<>(); + forms + .getIncompleteForms() + .forEach( + formAssociation -> + incompleteForms.add(this.mapFormAssociation(formAssociation, entityUrn))); + final List completeForms = new ArrayList<>(); + forms + .getCompletedForms() + .forEach( + formAssociation -> + completeForms.add(this.mapFormAssociation(formAssociation, entityUrn))); + final List verifications = new ArrayList<>(); + forms + .getVerifications() + .forEach( + verificationAssociation -> + verifications.add(this.mapVerificationAssociation(verificationAssociation))); + + return new com.linkedin.datahub.graphql.generated.Forms( + incompleteForms, completeForms, verifications); + } + + private FormAssociation mapFormAssociation( + @Nonnull final com.linkedin.common.FormAssociation association, + @Nonnull final String entityUrn) { + FormAssociation result = new FormAssociation(); + result.setForm( + Form.builder().setType(EntityType.FORM).setUrn(association.getUrn().toString()).build()); + result.setAssociatedUrn(entityUrn); + result.setCompletedPrompts(this.mapPrompts(association.getCompletedPrompts())); + result.setIncompletePrompts(this.mapPrompts(association.getIncompletePrompts())); + return result; + } + + private FormVerificationAssociation mapVerificationAssociation( + @Nonnull final com.linkedin.common.FormVerificationAssociation verificationAssociation) { + FormVerificationAssociation result = new FormVerificationAssociation(); + result.setForm( + Form.builder() + .setType(EntityType.FORM) + .setUrn(verificationAssociation.getForm().toString()) + .build()); + if (verificationAssociation.hasLastModified()) { + result.setLastModified(createAuditStamp(verificationAssociation.getLastModified())); + } + return result; + } + + private List mapPrompts( + @Nonnull final FormPromptAssociationArray promptAssociations) { + List result = new ArrayList<>(); + promptAssociations.forEach( + promptAssociation -> { + FormPromptAssociation association = new FormPromptAssociation(); + association.setId(promptAssociation.getId()); + association.setLastModified(createAuditStamp(promptAssociation.getLastModified())); + if (promptAssociation.hasFieldAssociations()) { + association.setFieldAssociations( + mapFieldAssociations(promptAssociation.getFieldAssociations())); + } + result.add(association); + }); + return result; + } + + private List mapFieldPrompts( + @Nonnull final FieldFormPromptAssociationArray fieldPromptAssociations) { + List result = new ArrayList<>(); + fieldPromptAssociations.forEach( + fieldFormPromptAssociation -> { + FieldFormPromptAssociation association = new FieldFormPromptAssociation(); + association.setFieldPath(fieldFormPromptAssociation.getFieldPath()); + association.setLastModified( + createAuditStamp(fieldFormPromptAssociation.getLastModified())); + result.add(association); + }); + return result; + } + + private FormPromptFieldAssociations mapFieldAssociations( + com.linkedin.common.FormPromptFieldAssociations associationsObj) { + final FormPromptFieldAssociations fieldAssociations = new FormPromptFieldAssociations(); + if (associationsObj.hasCompletedFieldPrompts()) { + fieldAssociations.setCompletedFieldPrompts( + this.mapFieldPrompts(associationsObj.getCompletedFieldPrompts())); + } + if (associationsObj.hasIncompleteFieldPrompts()) { + fieldAssociations.setIncompleteFieldPrompts( + this.mapFieldPrompts(associationsObj.getIncompleteFieldPrompts())); + } + return fieldAssociations; + } + + private ResolvedAuditStamp createAuditStamp(AuditStamp auditStamp) { + final ResolvedAuditStamp resolvedAuditStamp = new ResolvedAuditStamp(); + final CorpUser emptyCreatedUser = new CorpUser(); + emptyCreatedUser.setUrn(auditStamp.getActor().toString()); + resolvedAuditStamp.setActor(emptyCreatedUser); + resolvedAuditStamp.setTime(auditStamp.getTime()); + return resolvedAuditStamp; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 901361eb0b2be..31c8cec8cb5fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -8,6 +8,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryNodeProperties; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -36,7 +37,8 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult( GLOSSARY_NODE_INFO_ASPECT_NAME, - (glossaryNode, dataMap) -> glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); + (glossaryNode, dataMap) -> + glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap, entityUrn))); mappingHelper.mapToResult(GLOSSARY_NODE_KEY_ASPECT_NAME, this::mapGlossaryNodeKey); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, @@ -46,13 +48,18 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { return mappingHelper.getResult(); } - private GlossaryNodeProperties mapGlossaryNodeProperties(@Nonnull DataMap dataMap) { + private GlossaryNodeProperties mapGlossaryNodeProperties( + @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { GlossaryNodeInfo glossaryNodeInfo = new GlossaryNodeInfo(dataMap); GlossaryNodeProperties result = new GlossaryNodeProperties(); result.setDescription(glossaryNodeInfo.getDefinition()); if (glossaryNodeInfo.hasName()) { result.setName(glossaryNodeInfo.getName()); } + if (glossaryNodeInfo.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryNodeInfo.getCustomProperties(), entityUrn)); + } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index 8494eace22244..68475a2599158 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.generated.GlossaryTerms; @@ -46,7 +47,15 @@ private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossa resultGlossaryTerm.setName( GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); result.setTerm(resultGlossaryTerm); - result.setAssociatedUrn(entityUrn.toString()); + if (input.hasActor()) { + CorpUser actor = new CorpUser(); + actor.setUrn(input.getActor().toString()); + actor.setType(EntityType.CORP_USER); + result.setActor(actor); + } + if (entityUrn != null) { + result.setAssociatedUrn(entityUrn.toString()); + } return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 7c7dab2e02472..b5733626468d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -9,8 +9,8 @@ import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.SearchResult; import com.linkedin.datahub.graphql.generated.SearchSuggestion; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; import java.net.URISyntaxException; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java new file mode 100644 index 0000000000000..254a1ed1767f1 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java @@ -0,0 +1,54 @@ +package com.linkedin.datahub.graphql.types.schemafield; + +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.StructuredProperties; +import javax.annotation.Nonnull; + +public class SchemaFieldMapper implements ModelMapper { + + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + + public static SchemaFieldEntity map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public SchemaFieldEntity apply(@Nonnull final EntityResponse entityResponse) { + Urn entityUrn = entityResponse.getUrn(); + final SchemaFieldEntity result = this.mapSchemaFieldUrn(entityUrn); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((schemaField, dataMap) -> + schemaField.setStructuredProperties( + StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + + return result; + } + + private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { + try { + SchemaFieldEntity result = new SchemaFieldEntity(); + result.setUrn(urn.toString()); + result.setType(EntityType.SCHEMA_FIELD); + result.setFieldPath(urn.getEntityKey().get(1)); + Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); + result.setParent(UrnToEntityMapper.map(parentUrn)); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to load schemaField entity", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index b543a40cbac41..9f14bf52733ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -1,22 +1,40 @@ package com.linkedin.datahub.graphql.types.schemafield; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; -import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +@RequiredArgsConstructor public class SchemaFieldType implements com.linkedin.datahub.graphql.types.EntityType { - public SchemaFieldType() {} + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME); + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; @Override public EntityType type() { @@ -40,29 +58,41 @@ public List> batchLoad( urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - return schemaFieldUrns.stream() - .map(this::mapSchemaFieldUrn) + Map entities = new HashMap<>(); + if (_featureFlags.isSchemaFieldEntityFetchEnabled()) { + entities = + _entityClient.batchGetV2( + SCHEMA_FIELD_ENTITY_NAME, + new HashSet<>(schemaFieldUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + } + + final List gmsResults = new ArrayList<>(); + for (Urn urn : schemaFieldUrns) { + if (_featureFlags.isSchemaFieldEntityFetchEnabled()) { + gmsResults.add(entities.getOrDefault(urn, null)); + } else { + gmsResults.add( + new EntityResponse() + .setUrn(urn) + .setAspects(new EnvelopedAspectMap()) + .setEntityName(urn.getEntityType())); + } + } + + return gmsResults.stream() .map( - schemaFieldEntity -> - DataFetcherResult.newResult().data(schemaFieldEntity).build()) + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(SchemaFieldMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to load schemaField entity", e); } } - - private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { - try { - SchemaFieldEntity result = new SchemaFieldEntity(); - result.setUrn(urn.toString()); - result.setType(EntityType.SCHEMA_FIELD); - result.setFieldPath(urn.getEntityKey().get(1)); - Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); - result.setParent(UrnToEntityMapper.map(parentUrn)); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to load schemaField entity", e); - } - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java new file mode 100644 index 0000000000000..ad48067599328 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java @@ -0,0 +1,80 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.PropertyValue; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertiesEntry; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class StructuredPropertiesMapper { + + public static final StructuredPropertiesMapper INSTANCE = new StructuredPropertiesMapper(); + + public static com.linkedin.datahub.graphql.generated.StructuredProperties map( + @Nonnull final StructuredProperties structuredProperties) { + return INSTANCE.apply(structuredProperties); + } + + public com.linkedin.datahub.graphql.generated.StructuredProperties apply( + @Nonnull final StructuredProperties structuredProperties) { + com.linkedin.datahub.graphql.generated.StructuredProperties result = + new com.linkedin.datahub.graphql.generated.StructuredProperties(); + result.setProperties( + structuredProperties.getProperties().stream() + .map(this::mapStructuredProperty) + .collect(Collectors.toList())); + return result; + } + + private StructuredPropertiesEntry mapStructuredProperty( + StructuredPropertyValueAssignment valueAssignment) { + StructuredPropertiesEntry entry = new StructuredPropertiesEntry(); + entry.setStructuredProperty(createStructuredPropertyEntity(valueAssignment)); + final List values = new ArrayList<>(); + final List entities = new ArrayList<>(); + valueAssignment + .getValues() + .forEach( + value -> { + if (value.isString()) { + this.mapStringValue(value.getString(), values, entities); + } else if (value.isDouble()) { + values.add(new NumberValue(value.getDouble())); + } + }); + entry.setValues(values); + entry.setValueEntities(entities); + return entry; + } + + private StructuredPropertyEntity createStructuredPropertyEntity( + StructuredPropertyValueAssignment assignment) { + StructuredPropertyEntity entity = new StructuredPropertyEntity(); + entity.setUrn(assignment.getPropertyUrn().toString()); + entity.setType(EntityType.STRUCTURED_PROPERTY); + return entity; + } + + private void mapStringValue( + String stringValue, List values, List entities) { + try { + final Urn urnValue = Urn.createFromString(stringValue); + entities.add(UrnToEntityMapper.map(urnValue)); + } catch (Exception e) { + log.debug("String value is not an urn for this structured property entry"); + } + values.add(new StringValue(stringValue)); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java new file mode 100644 index 0000000000000..259020b83bee1 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java @@ -0,0 +1,124 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.StringArrayMap; +import com.linkedin.datahub.graphql.generated.AllowedValue; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.PropertyCardinality; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.TypeQualifier; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.PropertyValueArray; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class StructuredPropertyMapper + implements ModelMapper { + + private static final String ALLOWED_TYPES = "allowedTypes"; + + public static final StructuredPropertyMapper INSTANCE = new StructuredPropertyMapper(); + + public static StructuredPropertyEntity map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public StructuredPropertyEntity apply(@Nonnull final EntityResponse entityResponse) { + final StructuredPropertyEntity result = new StructuredPropertyEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.STRUCTURED_PROPERTY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, (this::mapStructuredPropertyDefinition)); + return mappingHelper.getResult(); + } + + private void mapStructuredPropertyDefinition( + @Nonnull StructuredPropertyEntity extendedProperty, @Nonnull DataMap dataMap) { + com.linkedin.structured.StructuredPropertyDefinition gmsDefinition = + new com.linkedin.structured.StructuredPropertyDefinition(dataMap); + StructuredPropertyDefinition definition = new StructuredPropertyDefinition(); + definition.setQualifiedName(gmsDefinition.getQualifiedName()); + definition.setCardinality( + PropertyCardinality.valueOf(gmsDefinition.getCardinality().toString())); + definition.setValueType(createDataTypeEntity(gmsDefinition.getValueType())); + if (gmsDefinition.hasDisplayName()) { + definition.setDisplayName(gmsDefinition.getDisplayName()); + } + if (gmsDefinition.getDescription() != null) { + definition.setDescription(gmsDefinition.getDescription()); + } + if (gmsDefinition.hasAllowedValues()) { + definition.setAllowedValues(mapAllowedValues(gmsDefinition.getAllowedValues())); + } + if (gmsDefinition.hasTypeQualifier()) { + definition.setTypeQualifier(mapTypeQualifier(gmsDefinition.getTypeQualifier())); + } + definition.setEntityTypes( + gmsDefinition.getEntityTypes().stream() + .map(this::createEntityTypeEntity) + .collect(Collectors.toList())); + extendedProperty.setDefinition(definition); + } + + private List mapAllowedValues(@Nonnull PropertyValueArray gmsValues) { + List allowedValues = new ArrayList<>(); + gmsValues.forEach( + value -> { + final AllowedValue allowedValue = new AllowedValue(); + if (value.getValue().isString()) { + allowedValue.setValue(new StringValue(value.getValue().getString())); + } else if (value.getValue().isDouble()) { + allowedValue.setValue(new NumberValue(value.getValue().getDouble())); + } + if (value.getDescription() != null) { + allowedValue.setDescription(value.getDescription()); + } + allowedValues.add(allowedValue); + }); + return allowedValues; + } + + private DataTypeEntity createDataTypeEntity(final Urn dataTypeUrn) { + final DataTypeEntity dataType = new DataTypeEntity(); + dataType.setUrn(dataTypeUrn.toString()); + dataType.setType(EntityType.DATA_TYPE); + return dataType; + } + + private TypeQualifier mapTypeQualifier(final StringArrayMap gmsTypeQualifier) { + final TypeQualifier typeQualifier = new TypeQualifier(); + List allowedTypes = gmsTypeQualifier.get(ALLOWED_TYPES); + if (allowedTypes != null) { + typeQualifier.setAllowedTypes( + allowedTypes.stream().map(this::createEntityTypeEntity).collect(Collectors.toList())); + } + return typeQualifier; + } + + private EntityTypeEntity createEntityTypeEntity(final Urn entityTypeUrn) { + return createEntityTypeEntity(entityTypeUrn.toString()); + } + + private EntityTypeEntity createEntityTypeEntity(final String entityTypeUrnStr) { + final EntityTypeEntity entityType = new EntityTypeEntity(); + entityType.setUrn(entityTypeUrnStr); + entityType.setType(EntityType.ENTITY_TYPE); + return entityType; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java new file mode 100644 index 0000000000000..b028563b5253c --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java @@ -0,0 +1,79 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class StructuredPropertyType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.STRUCTURED_PROPERTY; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return StructuredPropertyEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List extendedPropertyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + STRUCTURED_PROPERTY_ENTITY_NAME, + new HashSet<>(extendedPropertyUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : extendedPropertyUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(StructuredPropertyMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Queries", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index 8ea06f46d5133..a4bbd685fd4a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -11,8 +11,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilter; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 52451e195ee84..7964f7e4fab23 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -212,6 +212,16 @@ type VisualConfig { """ faviconUrl: String + """ + Custom app title to show in the browser tab + """ + appTitle: String + + """ + Boolean flag disabling viewing the Business Glossary page for users without the 'Manage Glossaries' privilege + """ + hideGlossary: Boolean + """ Configuration for the queries tab """ diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index ebb13d32643ed..2ad4982579380 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -700,6 +700,31 @@ type Mutation { deleteOwnershipType( "Urn of the Custom Ownership Type to remove." urn: String!, deleteReferences: Boolean): Boolean + + """ + Submit a response to a prompt from a form collecting metadata on different entities. + Provide the urn of the entity you're submitting a form response as well as the required input. + """ + submitFormPrompt(urn: String!, input: SubmitFormPromptInput!): Boolean + + """ + Assign a form to different entities. This will be a patch by adding this form to the list + of forms on an entity. + """ + batchAssignForm(input: BatchAssignFormInput!): Boolean + + """ + Creates a filter for a form to apply it to certain entities. Entities that match this filter will have + a given form applied to them. + This feature is ONLY supported in Acryl DataHub. + """ + createDynamicFormAssignment(input: CreateDynamicFormAssignmentInput!): Boolean + + """ + Verifies a form on an entity when all of the required questions on the form are complete and the form + is of type VERIFICATION. + """ + verifyForm(input: VerifyFormInput!): Boolean } """ @@ -910,6 +935,31 @@ enum EntityType { A Role from an organisation """ ROLE + + """" + An structured property on entities + """ + STRUCTURED_PROPERTY + + """" + A form entity on entities + """ + FORM + + """" + A data type registered to DataHub + """ + DATA_TYPE + + """" + A type of entity registered to DataHub + """ + ENTITY_TYPE + + """ + Another entity type - refer to a provided entity type urn. + """ + OTHER } """ @@ -1284,6 +1334,11 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { """ domain: DomainAssociation + """ + The forms associated with the Dataset + """ + forms: Forms + """ The Roles and the properties to access the dataset """ @@ -1426,6 +1481,11 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Structured properties about this Dataset + """ + structuredProperties: StructuredProperties } type RoleAssociation { @@ -1529,6 +1589,7 @@ type SiblingProperties { If this entity is the primary sibling among the sibling set """ isPrimary: Boolean + """ The sibling entities """ @@ -1910,6 +1971,12 @@ type GlossaryTerm implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -2047,6 +2114,12 @@ type GlossaryNode implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -2076,6 +2149,11 @@ type GlossaryNodeProperties { Description of the glossary term """ description: String + + """ + Custom properties of the Glossary Node + """ + customProperties: [CustomPropertiesEntry!] } """ @@ -2447,6 +2525,12 @@ type Container implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -2822,12 +2906,27 @@ type SchemaFieldEntity implements Entity { """ parent: Entity! + """ + Structured properties on this schema field + """ + structuredProperties: StructuredProperties + """ Granular API for querying edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult } +""" +Object containing structured properties for an entity +""" +type StructuredProperties { + """ + Structured properties on this entity + """ + properties: [StructuredPropertiesEntry!] +} + """ Information about an individual field in a Dataset schema """ @@ -2902,6 +3001,11 @@ type SchemaField { For schema fields that have other properties that are not modeled explicitly, represented as a JSON string. """ jsonProps: String + + """ + Schema field entity that exist in the database for this schema field + """ + schemaFieldEntity: SchemaFieldEntity } """ @@ -3444,6 +3548,12 @@ type CorpUser implements Entity { Settings that a user can customize through the datahub ui """ settings: CorpUserSettings + + """ + Experimental API. + For fetching extra aspects that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -3804,6 +3914,12 @@ type CorpGroup implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4005,6 +4121,12 @@ type Tag implements Entity { Deprecated, use properties.description field instead """ description: String @deprecated + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4099,6 +4221,11 @@ type GlossaryTermAssociation { """ term: GlossaryTerm! + """ + The actor who is responsible for the term being added" + """ + actor: CorpUser + """ Reference back to the associated urn for tracking purposes e.g. when sibling nodes are merged together """ @@ -4635,6 +4762,12 @@ type Notebook implements Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4955,6 +5088,12 @@ type Dashboard implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -5265,6 +5404,12 @@ type Chart implements EntityWithRelationships & Entity & BrowsableEntity { Sub Types that this entity implements """ subTypes: SubTypes + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -5622,6 +5767,12 @@ type DataFlow implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -5822,6 +5973,12 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -6558,10 +6715,10 @@ type PartitionSpec { """ The partition identifier """ - partition: String! + partition: String """ - The optional time window partition information + The optional time window partition information - required if type is TIMESTAMP_FIELD. """ timePartition: TimeWindow } @@ -6587,7 +6744,6 @@ type TimeWindow { durationMillis: Long! } - """ An assertion represents a programmatic validation, check, or test performed periodically against another Entity. """ @@ -7048,10 +7204,29 @@ type AssertionStdParameter { The type of an AssertionStdParameter """ enum AssertionStdParameterType { + """ + A string value + """ STRING + + """ + A numeric value + """ NUMBER + + """ + A list of values. When used, the value should be formatted as a serialized JSON array. + """ LIST + + """ + A set of values. When used, the value should be formatted as a serialized JSON array. + """ SET + + """ + A value of unknown type + """ UNKNOWN } @@ -8738,6 +8913,12 @@ type MLModel implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -8849,6 +9030,12 @@ type MLModelGroup implements EntityWithRelationships & Entity & BrowsableEntity Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } type MLModelGroupProperties { @@ -8973,6 +9160,12 @@ type MLFeature implements EntityWithRelationships & Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } type MLHyperParam { @@ -9142,6 +9335,12 @@ type MLPrimaryKey implements EntityWithRelationships & Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } type MLPrimaryKeyProperties { @@ -9269,6 +9468,12 @@ type MLFeatureTable implements EntityWithRelationships & Entity & BrowsableEntit Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } type MLFeatureTableEditableProperties { @@ -9577,6 +9782,22 @@ enum CostType { ORG_COST_TYPE } + +""" +Audit stamp containing a resolved actor +""" +type ResolvedAuditStamp { + """ + When the audited action took place + """ + time: Long! + + """ + Who performed the audited action + """ + actor: CorpUser +} + type SubTypes { """ The sub-types that this entity implements. e.g. Datasets that are views will implement the "view" subtype @@ -9644,6 +9865,12 @@ type Domain implements Entity { Edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -10139,6 +10366,12 @@ type DataHubRole implements Entity { The description of the Role """ description: String! + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -11015,6 +11248,12 @@ type DataProduct implements Entity { Tags used for searching Data Product """ tags: GlobalTags + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -11270,3 +11509,94 @@ input UpdateOwnershipTypeInput { """ description: String } + +""" +Describes a generic filter on a dataset +""" +type DatasetFilter { + """ + Type of partition + """ + type: DatasetFilterType! + + """ + The raw query if using a SQL FilterType + """ + sql: String +} + +""" +Type of partition +""" +enum DatasetFilterType { + """ + Use a SQL string to apply the filter + """ + SQL +} + + +""" +Input required to create or update a DatasetFilter +""" +input DatasetFilterInput { + """ + Type of partition + """ + type: DatasetFilterType! + + """ + The raw query if using a SQL FilterType + """ + sql: String +} + +""" +An entity type registered in DataHub +""" +type EntityTypeEntity implements Entity { + """ + A primary key associated with the Query + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Info about this type including its name + """ + info: EntityTypeInfo! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Properties about an individual entity type +""" +type EntityTypeInfo { + """ + The standard entity type + """ + type: EntityType! + + """ + The fully qualified name of the entity type. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this type + """ + displayName: String + + """ + The description of this type + """ + description: String +} diff --git a/datahub-graphql-core/src/main/resources/forms.graphql b/datahub-graphql-core/src/main/resources/forms.graphql new file mode 100644 index 0000000000000..0ff55cfa9f173 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/forms.graphql @@ -0,0 +1,407 @@ +""" +Requirements forms that are assigned to an entity. +""" +type Forms { + """ + Forms that are still incomplete. + """ + incompleteForms: [FormAssociation!]! + + """ + Forms that have been completed. + """ + completedForms: [FormAssociation!]! + + """ + Verifications that have been applied to the entity via completed forms. + """ + verifications: [FormVerificationAssociation!]! +} + +type FormAssociation { + """ + The form related to the associated urn + """ + form: Form! + + """ + Reference back to the urn with the form on it for tracking purposes e.g. when sibling nodes are merged together + """ + associatedUrn: String! + + """ + The prompt that still need to be completed for this form + """ + incompletePrompts: [FormPromptAssociation!] + + """ + The prompt that are already completed for this form + """ + completedPrompts: [FormPromptAssociation!] +} + +""" +Verification object that has been applied to the entity via a completed form. +""" +type FormVerificationAssociation { + """ + The form related to the associated urn + """ + form: Form! + + """ + When this verification was applied to this entity + """ + lastModified: ResolvedAuditStamp +} + +""" +A form that helps with filling out metadata on an entity +""" +type FormPromptAssociation { + """ + The unique id of the form prompt + """ + id: String! + + """ + When and by whom this form prompt has last been modified + """ + lastModified: ResolvedAuditStamp! + + """ + Optional information about the field-level prompt associations. + """ + fieldAssociations: FormPromptFieldAssociations +} + +""" +Information about the field-level prompt associations. +""" +type FormPromptFieldAssociations { + """ + If this form prompt is for fields, this will contain a list of completed associations per field + """ + completedFieldPrompts: [FieldFormPromptAssociation!] + + """ + If this form prompt is for fields, this will contain a list of incomlete associations per field + """ + incompleteFieldPrompts: [FieldFormPromptAssociation!] +} + +""" +An association for field-level form prompts +""" +type FieldFormPromptAssociation { + """ + The schema field path + """ + fieldPath: String! + + """ + When and by whom this form field-level prompt has last been modified + """ + lastModified: ResolvedAuditStamp! +} + +""" +A form that helps with filling out metadata on an entity +""" +type Form implements Entity { + """ + A primary key associated with the Form + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Information about this form + """ + info: FormInfo! + + """ + Ownership metadata of the form + """ + ownership: Ownership + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +The type of a form. This is optional on a form entity +""" +enum FormType { + """ + This form is used for "verifying" entities as a state for governance and compliance + """ + VERIFICATION + + """ + This form is used to help with filling out metadata on entities + """ + COMPLETION +} + +""" +Properties about an individual Form +""" +type FormInfo { + """ + The name of this form + """ + name: String! + + """ + The description of this form + """ + description: String + + """ + The type of this form + """ + type: FormType! + + """ + The prompt for this form + """ + prompts: [FormPrompt!]! + + """ + The actors that are assigned to complete the forms for the associated entities. + """ + actors: FormActorAssignment! +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +type FormPrompt { + """ + The ID of this prompt. This will be globally unique. + """ + id: String! + + """ + The title of this prompt + """ + title: String! + + """ + The urn of the parent form that this prompt is part of + """ + formUrn: String! + + """ + The description of this prompt + """ + description: String + + """ + The description of this prompt + """ + type: FormPromptType! + + """ + Whether the prompt is required for the form to be considered completed. + """ + required: Boolean! + + """ + The params for this prompt if type is STRUCTURED_PROPERTY + """ + structuredPropertyParams: StructuredPropertyParams +} + +""" +Enum of all form prompt types +""" +enum FormPromptType { + """ + A structured property form prompt type. + """ + STRUCTURED_PROPERTY + """ + A schema field-level structured property form prompt type. + """ + FIELDS_STRUCTURED_PROPERTY +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +type StructuredPropertyParams { + """ + The structured property required for the prompt on this entity + """ + structuredProperty: StructuredPropertyEntity! +} + +""" +Input for responding to a singular prompt in a form +""" +input SubmitFormPromptInput { + """ + The unique ID of the prompt this input is responding to + """ + promptId: String! + + """ + The urn of the form that this prompt is a part of + """ + formUrn: String! + + """ + The type of prompt that this input is responding to + """ + type: FormPromptType! + + """ + The fieldPath on a schema field that this prompt submission is association with. + This should be provided when the prompt is type FIELDS_STRUCTURED_PROPERTY + """ + fieldPath: String + + """ + The structured property required for the prompt on this entity + """ + structuredPropertyParams: StructuredPropertyInputParams +} + +""" +Input for responding to a singular prompt in a form for a batch of entities +""" +input BatchSubmitFormPromptInput { + """ + The urns of the entities this prompt submission is for + """ + assetUrns: [String!]! + + """ + Input for responding to a specific prompt on a form + """ + input: SubmitFormPromptInput +} + +""" +Input for collecting structured property values to apply to entities +""" +input PropertyValueInput { + """ + The string value for this structured property + """ + stringValue: String + + """ + The number value for this structured property + """ + numberValue: Float +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +input StructuredPropertyInputParams { + """ + The urn of the structured property being applied to an entity + """ + structuredPropertyUrn: String! + + """ + The list of values you want to apply on this structured property to an entity + """ + values: [PropertyValueInput!]! +} + +""" +Input for batch assigning a form to different entities +""" +input BatchAssignFormInput { + """ + The urn of the form being assigned to entities + """ + formUrn: String! + + """ + The entities that this form is being assigned to + """ + entityUrns: [String!]! +} + +""" +Input for batch assigning a form to different entities +""" +input CreateDynamicFormAssignmentInput { + """ + The urn of the form being assigned to entities that match some criteria + """ + formUrn: String! + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters). + Entities that match this filter will have this form applied to them. + Currently, we only support a set of fields to filter on and they are: + (1) platform (2) subType (3) container (4) _entityType (5) domain + """ + orFilters: [AndFilterInput!]! +} + +type FormActorAssignment { + """ + Whether the form should be completed by owners of the assets which the form is applied to. + """ + owners: Boolean! + + """ + Urns of the users that the form is assigned to. If null, then no users are specifically targeted. + """ + users: [CorpUser!] + + """ + Groups that the form is assigned to. If null, then no groups are specifically targeted. + """ + groups: [CorpGroup!] + + """ + Whether or not the current actor is universally assigned to this form, either by user or by group. + Note that this does not take into account entity ownership based assignment. + """ + isAssignedToMe: Boolean! +} + +""" +Input for verifying forms on entities +""" +input VerifyFormInput { + """ + The urn of the form being verified on an entity + """ + formUrn: String! + + """ + The urn of the entity that is having a form verified on it + """ + entityUrn: String! +} + +""" +Input for verifying a batch of entities for a give form +""" +input BatchVerifyFormInput { + """ + The urns of the entities getting verified for this form + """ + assetUrns: [String!]! + + """ + The urn of the form being verified on the given entities + """ + formUrn: String! +} diff --git a/datahub-graphql-core/src/main/resources/properties.graphql b/datahub-graphql-core/src/main/resources/properties.graphql new file mode 100644 index 0000000000000..2bed0f1155ff1 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/properties.graphql @@ -0,0 +1,243 @@ +""" +A structured property that can be shared between different entities +""" +type StructuredPropertyEntity implements Entity { + """ + A primary key associated with the structured property + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Definition of this structured property including its name + """ + definition: StructuredPropertyDefinition! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Properties about an individual Query +""" +type StructuredPropertyDefinition { + """ + The fully qualified name of the property. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this structured property + """ + displayName: String + + """ + The description of this property + """ + description: String + + """ + The cardinality of a Structured Property determining whether one or multiple values + can be applied to the entity from this property. + """ + cardinality: PropertyCardinality + + """ + A list of allowed values that the property is allowed to take. + """ + allowedValues: [AllowedValue!] + + """ + The type of this structured property + """ + valueType: DataTypeEntity! + + """ + Allows for type specialization of the valueType to be more specific about which + entity types are allowed, for example. + """ + typeQualifier: TypeQualifier + + """ + Entity types that this structured property can be applied to + """ + entityTypes: [EntityTypeEntity!]! +} + +""" +An entry for an allowed value for a structured property +""" +type AllowedValue { + """ + The allowed value + """ + value: PropertyValue! + + """ + The description of this allowed value + """ + description: String +} + +""" +The cardinality of a Structured Property determining whether one or multiple values +can be applied to the entity from this property. +""" +enum PropertyCardinality { + """ + Only one value of this property can applied to an entity + """ + SINGLE + + """ + Multiple values of this property can applied to an entity + """ + MULTIPLE +} + +""" +Allows for type specialization of the valueType to be more specific about which +entity types are allowed, for example. +""" +type TypeQualifier { + """ + The list of allowed entity types + """ + allowedTypes: [EntityTypeEntity!] +} + +""" +String property value +""" +type StringValue { + """ + The value of a string type property + """ + stringValue: String! +} + +""" +Numeric property value +""" +type NumberValue { + """ + The value of a number type property + """ + numberValue: Float! +} + +""" +The value of a property +""" +union PropertyValue = StringValue | NumberValue + +""" +An entry in an structured properties list represented as a tuple +""" +type StructuredPropertiesEntry { + """ + The key of the map entry + """ + structuredProperty: StructuredPropertyEntity! + + """ + The values of the structured property for this entity + """ + values: [PropertyValue]! + + """ + The optional entities associated with the values if the values are entity urns + """ + valueEntities: [Entity] +} + +""" +A data type registered in DataHub +""" +type DataTypeEntity implements Entity { + """ + A primary key associated with the Query + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Info about this type including its name + """ + info: DataTypeInfo! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +A well-supported, standard DataHub Data Type. +""" +enum StdDataType { + """ + String data type + """ + STRING + + """ + Number data type + """ + NUMBER + + """ + Urn data type + """ + URN + + """ + Rich text data type. Right now this is markdown only. + """ + RICH_TEXT + + """ + Date data type in format YYYY-MM-DD + """ + DATE + + """ + Any other data type - refer to a provided data type urn. + """ + OTHER +} + +""" +Properties about an individual data type +""" +type DataTypeInfo { + """ + The standard data type + """ + type: StdDataType! + + """ + The fully qualified name of the type. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this type + """ + displayName: String + + """ + The description of this type + """ + description: String +} diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index 8f2377edb546e..8896dd02b5ad3 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -1139,7 +1139,7 @@ type QuickFilter { } """ -Freshness stats for a query result. +Freshness stats for a query result. Captures whether the query was served out of a cache, what the staleness was, etc. """ type FreshnessStats { @@ -1154,7 +1154,7 @@ type FreshnessStats { In case an index was consulted, this reflects the freshness of the index """ systemFreshness: [SystemFreshness] - + } type SystemFreshness { @@ -1303,4 +1303,4 @@ input SortCriterion { The order in which we will be sorting """ sortOrder: SortOrder! -} +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/tests.graphql b/datahub-graphql-core/src/main/resources/tests.graphql index 9dce48ac60d83..579f4919bdc78 100644 --- a/datahub-graphql-core/src/main/resources/tests.graphql +++ b/datahub-graphql-core/src/main/resources/tests.graphql @@ -44,6 +44,7 @@ Definition of the test type TestDefinition { """ JSON-based def for the test + Deprecated! JSON representation is no longer supported. """ json: String } @@ -209,6 +210,7 @@ input UpdateTestInput { input TestDefinitionInput { """ The string representation of the Test + Deprecated! JSON representation is no longer supported. """ json: String } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index de507eda8cdef..b75530773c352 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -122,13 +122,7 @@ public static void verifyIngestProposal( int numberOfInvocations, List proposals) { AspectsBatchImpl batch = - AspectsBatchImpl.builder() - .mcps( - proposals, - mock(AuditStamp.class), - mockService.getEntityRegistry(), - mockService.getSystemEntityClient()) - .build(); + AspectsBatchImpl.builder().mcps(proposals, mock(AuditStamp.class), mockService).build(); Mockito.verify(mockService, Mockito.times(numberOfInvocations)) .ingestProposal(Mockito.eq(batch), Mockito.eq(false)); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index 433772d7e2cfe..c565e771a0475 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -26,6 +26,7 @@ import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; @@ -44,6 +45,7 @@ public class BrowseV2ResolverTest { @Test public static void testBrowseV2Success() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = Mockito.mock(ViewService.class); EntityClient mockClient = initMockEntityClient( @@ -70,7 +72,8 @@ public static void testBrowseV2Success() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, mockService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -87,6 +90,7 @@ public static void testBrowseV2Success() throws Exception { @Test public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = Mockito.mock(ViewService.class); List orFilters = new ArrayList<>(); @@ -123,7 +127,8 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, mockService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -143,6 +148,7 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { @Test public static void testBrowseV2SuccessWithView() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); + FormService mockFormService = Mockito.mock(FormService.class); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); EntityClient mockClient = @@ -170,7 +176,8 @@ public static void testBrowseV2SuccessWithView() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, viewService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, viewService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index 9596abf55d04f..c6e6cdc7f018e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java new file mode 100644 index 0000000000000..0fe57d0a28fff --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java @@ -0,0 +1,167 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IsFormAssignedToMeResolverTest { + + private static final Urn TEST_USER_1 = UrnUtils.getUrn("urn:li:corpuser:test-1"); + private static final Urn TEST_USER_2 = UrnUtils.getUrn("urn:li:corpuser:test-2"); + private static final Urn TEST_GROUP_1 = UrnUtils.getUrn("urn:li:corpGroup:test-1"); + private static final Urn TEST_GROUP_2 = UrnUtils.getUrn("urn:li:corpGroup:test-2"); + + @Test + public void testGetSuccessUserMatch() throws Exception { + GroupService groupService = mockGroupService(TEST_USER_1, Collections.emptyList()); + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_1.toString()); + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + Mockito.verifyNoMoreInteractions(groupService); // Should not perform group lookup. + } + + @Test + public void testGetSuccessGroupMatch() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); // is in group + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_2.toString()); // does not match + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessBothMatch() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); // is in group + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_1.toString()); // is matching user + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + Mockito.verifyNoMoreInteractions(groupService); // Should not perform group lookup. + } + + @Test + public void testGetSuccessNoMatchNullAssignment() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1, TEST_GROUP_2)); + + FormActorAssignment actors = new FormActorAssignment(); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessNoMatchEmptyAssignment() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1, TEST_GROUP_2)); + + FormActorAssignment actors = new FormActorAssignment(); + actors.setUsers(Collections.emptyList()); + actors.setGroups(Collections.emptyList()); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessNoMatchNoAssignmentMatch() throws Exception { + GroupService groupService = mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_2.toString()); // Does not match. + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_2.toString()); // does not match + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + private GroupService mockGroupService(final Urn userUrn, final List groupUrns) + throws Exception { + GroupService mockService = Mockito.mock(GroupService.class); + Mockito.when( + mockService.getGroupsForUser(Mockito.eq(userUrn), Mockito.any(Authentication.class))) + .thenReturn(groupUrns); + return mockService; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java new file mode 100644 index 0000000000000..192f4ff9aa7c7 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java @@ -0,0 +1,122 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.VerifyFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class VerifyFormResolverTest { + + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final VerifyFormInput TEST_INPUT = + new VerifyFormInput(TEST_FORM_URN, TEST_DATASET_URN); + + @Test + public void testGetSuccess() throws Exception { + FormService mockFormService = initMockFormService(true, true); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + boolean success = resolver.get(mockEnv).get(); + + assertTrue(success); + + // Validate that we called verify on the service + Mockito.verify(mockFormService, Mockito.times(1)) + .verifyFormForEntity( + Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN)), + Mockito.eq(UrnUtils.getUrn(TEST_DATASET_URN)), + Mockito.any(Authentication.class)); + } + + @Test + public void testGetUnauthorized() throws Exception { + FormService mockFormService = initMockFormService(false, true); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + // Validate that we do not call verify on the service + Mockito.verify(mockFormService, Mockito.times(0)) + .verifyFormForEntity(Mockito.any(), Mockito.any(), Mockito.any(Authentication.class)); + } + + @Test + public void testThrowErrorOnVerification() throws Exception { + FormService mockFormService = initMockFormService(true, false); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + // Validate that we do call verifyFormForEntity but an error is thrown + Mockito.verify(mockFormService, Mockito.times(1)) + .verifyFormForEntity(Mockito.any(), Mockito.any(), Mockito.any(Authentication.class)); + } + + private FormService initMockFormService( + final boolean isFormAssignedToUser, final boolean shouldVerify) throws Exception { + FormService service = Mockito.mock(FormService.class); + Mockito.when( + service.isFormAssignedToUser( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class))) + .thenReturn(isFormAssignedToUser); + + if (shouldVerify) { + Mockito.when( + service.verifyFormForEntity( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class))) + .thenReturn(true); + } else { + Mockito.when( + service.verifyFormForEntity( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class))) + .thenThrow(new RuntimeException()); + } + + return service; + } + + private GroupService initMockGroupService() throws Exception { + GroupService service = Mockito.mock(GroupService.class); + Mockito.when(service.getGroupsForUser(Mockito.any(), Mockito.any(Authentication.class))) + .thenReturn(new ArrayList<>()); + + return service; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java index bec141bddf260..6ae2fa7dcbf64 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -46,7 +47,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) - .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); + .rollbackIngestion(Mockito.eq(RUN_ID), any(), any(Authentication.class)); } @Test @@ -58,7 +59,7 @@ public void testRollbackIngestionMethod() throws Exception { resolver.rollbackIngestion(RUN_ID, mockContext).get(); Mockito.verify(mockClient, Mockito.times(1)) - .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); + .rollbackIngestion(Mockito.eq(RUN_ID), any(), any(Authentication.class)); } @Test @@ -66,7 +67,7 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RuntimeException.class) .when(mockClient) - .rollbackIngestion(Mockito.any(), Mockito.any(Authentication.class)); + .rollbackIngestion(any(), any(), any(Authentication.class)); RollbackIngestionResolver resolver = new RollbackIngestionResolver(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 8fc5ab6ebb828..05387123f9c96 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -22,7 +22,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; @@ -74,7 +73,7 @@ public class MutableTypeBatchResolverTest { @Test public void testGetSuccess() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); BatchMutableType batchMutableType = new DatasetType(mockClient); @@ -167,7 +166,7 @@ public void testGetSuccess() throws Exception { @Test public void testGetFailureUnauthorized() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); BatchMutableType batchMutableType = new DatasetType(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index c7d397c5a4a73..4d56cc3d52af8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -15,7 +15,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; @@ -27,6 +27,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; @@ -52,6 +53,7 @@ public static void testApplyViewNullBaseFilter() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + FormService mockFormService = Mockito.mock(FormService.class); List facets = ImmutableList.of("platform", "domains"); @@ -71,7 +73,7 @@ public static void testApplyViewNullBaseFilter() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -102,6 +104,7 @@ public static void testApplyViewBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); Filter baseFilter = createFilter("baseField.keyword", "baseTest"); @@ -122,7 +125,7 @@ public static void testApplyViewBaseFilter() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -166,6 +169,7 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of("platform"); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); EntityClient mockClient = @@ -184,7 +188,7 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput(null, "", facets, null, TEST_VIEW_URN.toString(), null); @@ -217,6 +221,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of(); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); EntityClient mockClient = @@ -235,7 +240,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -267,6 +272,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, null); List searchEntityTypes = @@ -290,7 +296,7 @@ public static void testApplyViewViewDoesNotExist() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); @@ -306,6 +312,7 @@ public static void testApplyViewViewDoesNotExist() throws Exception { @Test public static void testErrorFetchingResults() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -322,7 +329,7 @@ public static void testErrorFetchingResults() throws Exception { .thenThrow(new RemoteInvocationException()); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index 29a2b3081aefe..f5accdfb02043 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -9,8 +9,8 @@ import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.AggregationMetadata; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index d0bbfd126b9b9..0b8c1f1aeb83f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -15,7 +15,7 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index 909ceeb8f3bab..ff8bd542fbdff 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade; +import com.linkedin.gms.factory.auth.AuthorizerChainFactory; +import com.linkedin.gms.factory.auth.DataHubAuthorizerFactory; import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -19,7 +21,11 @@ excludeFilters = { @ComponentScan.Filter( type = FilterType.ASSIGNABLE_TYPE, - classes = ScheduledAnalyticsFactory.class) + classes = { + ScheduledAnalyticsFactory.class, + AuthorizerChainFactory.class, + DataHubAuthorizerFactory.class + }) }) public class UpgradeCliApplication { public static void main(String[] args) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java index dd6c3fd1e44aa..4be39ac3c4bfc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java @@ -4,14 +4,16 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import java.util.function.Function; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +@Slf4j @RequiredArgsConstructor public class GMSDisableWriteModeStep implements UpgradeStep { - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient entityClient; @Override public String id() { @@ -27,9 +29,9 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entityClient.setWritable(false); + entityClient.setWritable(false); } catch (Exception e) { - e.printStackTrace(); + log.error("Failed to turn write mode off in GMS", e); context.report().addLine("Failed to turn write mode off in GMS"); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java index 8a0d374d6ee3e..09713dc78ee27 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java @@ -4,13 +4,15 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import java.util.function.Function; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +@Slf4j @RequiredArgsConstructor public class GMSEnableWriteModeStep implements UpgradeStep { - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient entityClient; @Override public String id() { @@ -26,9 +28,9 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entityClient.setWritable(true); + entityClient.setWritable(true); } catch (Exception e) { - e.printStackTrace(); + log.error("Failed to turn write mode back on in GMS", e); context.report().addLine("Failed to turn write mode back on in GMS"); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index abd144bf453ed..406963c58fd71 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -11,7 +11,7 @@ public class BackfillBrowsePathsV2Config { @Bean public BackfillBrowsePathsV2 backfillBrowsePathsV2( - EntityService entityService, SearchService searchService) { + EntityService entityService, SearchService searchService) { return new BackfillBrowsePathsV2(entityService, searchService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index 1e9298bc60612..caa45988733df 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -3,7 +3,9 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.graph.GraphService; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; @@ -20,7 +22,9 @@ public BuildIndices buildIndices( final GraphService graphService, final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao, + final EntityRegistry entityRegistry) { return new BuildIndices( systemMetadataService, @@ -28,6 +32,8 @@ public BuildIndices buildIndices( entitySearchService, graphService, baseElasticSearchComponents, - configurationProvider); + configurationProvider, + aspectDao, + entityRegistry); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index d968e8521867e..741aeece1cf62 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -1,7 +1,7 @@ package com.linkedin.datahub.upgrade.config; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; @@ -21,14 +21,13 @@ public class NoCodeUpgradeConfig { @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeUpgrade") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) + @DependsOn({"ebeanServer", "entityService", "systemEntityClient", "entityRegistry"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = - applicationContext.getBean(SystemRestliEntityClient.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java index 0b46133209382..5bf1241e21305 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java @@ -8,7 +8,7 @@ @Configuration public class RemoveUnknownAspectsConfig { @Bean(name = "removeUnknownAspects") - public RemoveUnknownAspects removeUnknownAspects(EntityService entityService) { + public RemoveUnknownAspects removeUnknownAspects(EntityService entityService) { return new RemoveUnknownAspects(entityService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 116d62878f5c6..ec6e5a4a8f04d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -1,7 +1,7 @@ package com.linkedin.datahub.upgrade.config; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -25,7 +25,7 @@ public class RestoreBackupConfig { @DependsOn({ "ebeanServer", "entityService", - "systemRestliEntityClient", + "systemEntityClient", "graphService", "searchService", "entityRegistry" @@ -34,9 +34,8 @@ public class RestoreBackupConfig { @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = - applicationContext.getBean(SystemRestliEntityClient.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final GraphService graphClient = applicationContext.getBean(GraphService.class); final EntitySearchService searchClient = applicationContext.getBean(EntitySearchService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index 9d229f315d709..008bdf5cfac38 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -3,7 +3,6 @@ import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; @@ -21,19 +20,17 @@ public class RestoreIndicesConfig { @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") - @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) + @DependsOn({"ebeanServer", "entityService", "searchService", "graphService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); - final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreIndices( - ebeanServer, entityService, entityRegistry, entitySearchService, graphService); + return new RestoreIndices(ebeanServer, entityService, entitySearchService, graphService); } @Bean(name = "restoreIndices") @@ -41,6 +38,6 @@ public RestoreIndices createInstance() { @Nonnull public RestoreIndices createNotImplInstance() { log.warn("restoreIndices is not supported for cassandra!"); - return new RestoreIndices(null, null, null, null, null); + return new RestoreIndices(null, null, null, null); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java index 6cc94fbed5bf3..57e16eb72d025 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java @@ -8,49 +8,33 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import lombok.Getter; +import lombok.experimental.Accessors; +@Getter +@Accessors(fluent = true) public class DefaultUpgradeContext implements UpgradeContext { - private final Upgrade _upgrade; - private final UpgradeReport _report; - private final List _previousStepResults; - private final List _args; - private final Map> _parsedArgs; + private final Upgrade upgrade; + private final UpgradeReport report; + private final List previousStepResults; + private final List args; + private final Map> parsedArgs; DefaultUpgradeContext( Upgrade upgrade, UpgradeReport report, List previousStepResults, List args) { - _upgrade = upgrade; - _report = report; - _previousStepResults = previousStepResults; - _args = args; - _parsedArgs = UpgradeUtils.parseArgs(args); - } - - @Override - public Upgrade upgrade() { - return _upgrade; + this.upgrade = upgrade; + this.report = report; + this.previousStepResults = previousStepResults; + this.args = args; + this.parsedArgs = UpgradeUtils.parseArgs(args); } @Override public List stepResults() { - return _previousStepResults; - } - - @Override - public UpgradeReport report() { - return _report; - } - - @Override - public List args() { - return _args; - } - - @Override - public Map> parsedArgs() { - return _parsedArgs; + return previousStepResults; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java index 623c8a71e861d..bddf53a274905 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java @@ -16,7 +16,9 @@ import java.util.List; import java.util.Map; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class DefaultUpgradeManager implements UpgradeManager { private final Map _upgrades = new HashMap<>(); @@ -137,6 +139,7 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte break; } } catch (Exception e) { + log.error("Caught exception during attempt {} of Step with id {}", i, step.id(), e); context .report() .addLine( diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index 674efb2b8ba78..1524a015e414e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -6,7 +6,7 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.common.steps.GMSEnableWriteModeStep; import com.linkedin.datahub.upgrade.common.steps.GMSQualificationStep; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; @@ -28,9 +28,9 @@ public class NoCodeUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeUpgrade( @Nullable final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient) { + final SystemEntityClient entityClient) { if (server != null) { _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); _cleanupSteps = buildCleanupSteps(); @@ -61,9 +61,9 @@ private List buildCleanupSteps() { private List buildUpgradeSteps( final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient) { + final SystemEntityClient entityClient) { final List steps = new ArrayList<>(); steps.add(new RemoveAspectV2TableStep(server)); steps.add(new GMSQualificationStep(ImmutableMap.of("noCode", "true"))); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java index 7e55dcddc639f..74d97767d1c39 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java @@ -17,7 +17,7 @@ public class RemoveClientIdAspectStep implements UpgradeStep { private static final String INVALID_CLIENT_ID_ASPECT = "clientId"; - private final EntityService _entityService; + private final EntityService _entityService; @Override public String id() { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java index dc95b7605ef88..3ea449051b355 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java @@ -12,7 +12,7 @@ public class RemoveUnknownAspects implements Upgrade { private final List _steps; - public RemoveUnknownAspects(final EntityService entityService) { + public RemoveUnknownAspects(final EntityService entityService) { _steps = buildSteps(entityService); } @@ -26,7 +26,7 @@ public List steps() { return _steps; } - private List buildSteps(final EntityService entityService) { + private List buildSteps(final EntityService entityService) { final List steps = new ArrayList<>(); steps.add(new RemoveClientIdAspectStep(entityService)); return steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index 4ac295b4fdfb7..bcaeaa34e8936 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -8,7 +8,7 @@ import com.linkedin.datahub.upgrade.common.steps.ClearSearchServiceStep; import com.linkedin.datahub.upgrade.common.steps.GMSDisableWriteModeStep; import com.linkedin.datahub.upgrade.common.steps.GMSEnableWriteModeStep; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -24,9 +24,9 @@ public class RestoreBackup implements Upgrade { public RestoreBackup( @Nullable final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient, + final SystemEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { if (server != null) { @@ -50,9 +50,9 @@ public List steps() { private List buildSteps( final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient, + final SystemEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { final List steps = new ArrayList<>(); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 5c4e8cdc47e34..c756407832a36 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -47,7 +47,7 @@ public class RestoreStorageStep implements UpgradeStep { private final ExecutorService _gmsThreadPool; public RestoreStorageStep( - final EntityService entityService, final EntityRegistry entityRegistry) { + final EntityService entityService, final EntityRegistry entityRegistry) { _entityService = entityService; _entityRegistry = entityRegistry; _backupReaders = ImmutableBiMap.of(LocalParquetReader.READER_NAME, LocalParquetReader.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java index 212f0da9f592d..c6839c0e63f05 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java @@ -9,6 +9,7 @@ * Strings */ public interface BackupReader { + String getName(); @Nonnull diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index f46bb9b05624d..9bc42e23a9974 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -8,7 +8,6 @@ import com.linkedin.datahub.upgrade.common.steps.ClearSearchServiceStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import java.util.ArrayList; @@ -32,12 +31,11 @@ public class RestoreIndices implements Upgrade { public RestoreIndices( @Nullable final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry, + final EntityService entityService, final EntitySearchService entitySearchService, final GraphService graphService) { if (server != null) { - _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + _steps = buildSteps(server, entityService, entitySearchService, graphService); } else { _steps = List.of(); } @@ -55,14 +53,13 @@ public List steps() { private List buildSteps( final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry, + final EntityService entityService, final EntitySearchService entitySearchService, final GraphService graphService) { final List steps = new ArrayList<>(); steps.add(new ClearSearchServiceStep(entitySearchService, false)); steps.add(new ClearGraphServiceStep(graphService, false)); - steps.add(new SendMAEStep(server, entityService, entityRegistry)); + steps.add(new SendMAEStep(server, entityService)); return steps; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index bedf200a1c055..aca27892d2e3a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -10,7 +10,6 @@ import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; -import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import io.ebean.ExpressionList; import java.util.ArrayList; @@ -23,7 +22,9 @@ import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; @@ -51,10 +52,7 @@ public RestoreIndicesResult call() { } } - public SendMAEStep( - final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry) { + public SendMAEStep(final Database server, final EntityService entityService) { _server = server; _entityService = entityService; } @@ -77,7 +75,7 @@ private List iterateFutures(List iterateFutures(List indexedServices = Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) @@ -36,7 +40,13 @@ public BuildIndices( .map(service -> (ElasticSearchIndexed) service) .collect(Collectors.toList()); - _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); + _steps = + buildSteps( + indexedServices, + baseElasticSearchComponents, + configurationProvider, + aspectDao, + entityRegistry); } @Override @@ -53,13 +63,19 @@ private List buildSteps( final List indexedServices, final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao, + final EntityRegistry entityRegistry) { final List steps = new ArrayList<>(); // Disable ES write mode/change refresh rate and clone indices steps.add( new BuildIndicesPreStep( - baseElasticSearchComponents, indexedServices, configurationProvider)); + baseElasticSearchComponents, + indexedServices, + configurationProvider, + aspectDao, + entityRegistry)); // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService steps.add(new BuildIndicesStep(indexedServices)); // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java index c25888be07f89..894075417a349 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java @@ -2,6 +2,8 @@ import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_ENTITY_NAME; import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; @@ -11,8 +13,12 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; import java.util.List; import java.util.Map; @@ -31,6 +37,8 @@ public class BuildIndicesPreStep implements UpgradeStep { private final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents _esComponents; private final List _services; private final ConfigurationProvider _configurationProvider; + private final AspectDao _aspectDao; + private final EntityRegistry _entityRegistry; @Override public String id() { @@ -46,9 +54,28 @@ public int retryCount() { public Function executable() { return (context) -> { try { + List reindexConfigs = + _configurationProvider.getStructuredProperties().isSystemUpdateEnabled() + ? getAllReindexConfigs( + _services, + _aspectDao + .streamAspects( + STRUCTURED_PROPERTY_ENTITY_NAME, + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .map( + entityAspect -> + EntityUtils.toAspectRecord( + STRUCTURED_PROPERTY_ENTITY_NAME, + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, + entityAspect.getMetadata(), + _entityRegistry)) + .map(recordTemplate -> (StructuredPropertyDefinition) recordTemplate) + .collect(Collectors.toSet())) + : getAllReindexConfigs(_services); + // Get indices to update List indexConfigs = - getAllReindexConfigs(_services).stream() + reindexConfigs.stream() .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java index b3de7c503fb3e..52b34200991c3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java @@ -2,8 +2,10 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Set; import lombok.extern.slf4j.Slf4j; @@ -39,6 +41,23 @@ public static List getAllReindexConfigs( return reindexConfigs; } + public static List getAllReindexConfigs( + List elasticSearchIndexedList, + Collection structuredProperties) + throws IOException { + // Avoid locking & reprocessing + List reindexConfigs = new ArrayList<>(_reindexConfigs); + if (reindexConfigs.isEmpty()) { + for (ElasticSearchIndexed elasticSearchIndexed : elasticSearchIndexedList) { + reindexConfigs.addAll( + elasticSearchIndexed.buildReindexConfigsWithAllStructProps(structuredProperties)); + } + _reindexConfigs = new ArrayList<>(reindexConfigs); + } + + return reindexConfigs; + } + public static boolean validateWriteBlock( RestHighLevelClient esClient, String indexName, boolean expectedState) throws IOException, InterruptedException { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java index 03f0b0b7f2ec2..4b9fc5bba0204 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java @@ -11,7 +11,7 @@ public class BackfillBrowsePathsV2 implements Upgrade { private final List _steps; - public BackfillBrowsePathsV2(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2(EntityService entityService, SearchService searchService) { _steps = ImmutableList.of(new BackfillBrowsePathsV2Step(entityService, searchService)); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 610d9069337a5..9a426369cfb02 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -54,10 +54,10 @@ public class BackfillBrowsePathsV2Step implements UpgradeStep { Constants.ML_FEATURE_ENTITY_NAME); private static final Integer BATCH_SIZE = 5000; - private final EntityService _entityService; + private final EntityService _entityService; private final SearchService _searchService; - public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { _searchService = searchService; _entityService = entityService; } diff --git a/datahub-upgrade/src/main/resources/application.properties b/datahub-upgrade/src/main/resources/application.properties new file mode 100644 index 0000000000000..b884c92f74bd4 --- /dev/null +++ b/datahub-upgrade/src/main/resources/application.properties @@ -0,0 +1,5 @@ +management.health.elasticsearch.enabled=false +management.health.neo4j.enabled=false +ingestion.enabled=false +spring.main.allow-bean-definition-overriding=true +entityClient.impl=restli diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index 0e7bf5ddd5250..be28b7f739cf5 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -20,7 +20,7 @@ public class UpgradeCliApplicationTestConfiguration { @MockBean private Database ebeanServer; - @MockBean private EntityService _entityService; + @MockBean private EntityService _entityService; @MockBean private SearchService searchService; diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index c0355b935137a..05af6871715ce 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -117,7 +117,6 @@ task cleanExtraDirs { delete 'dist' delete 'tmp' delete 'just' - delete fileTree('../datahub-frontend/public') delete fileTree(dir: 'src', include: '*.generated.ts') } clean.finalizedBy(cleanExtraDirs) diff --git a/datahub-web-react/index.html b/datahub-web-react/index.html index 9490881246e12..bb86e2f350e1a 100644 --- a/datahub-web-react/index.html +++ b/datahub-web-react/index.html @@ -2,7 +2,8 @@ - + + diff --git a/datahub-web-react/public/assets/favicon.ico b/datahub-web-react/public/assets/icons/favicon.ico similarity index 100% rename from datahub-web-react/public/assets/favicon.ico rename to datahub-web-react/public/assets/icons/favicon.ico diff --git a/datahub-web-react/public/assets/logo.png b/datahub-web-react/public/assets/logo.png deleted file mode 100644 index 5e34e6425d23fa1a19ca3c89dae7acdfb2902e86..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 53563 zcmYhiWmJ^g`!Iad9R?vapeRTibc2YX0xCnNA~lqB4oIp8RPLAqb+=cyRA21d%p_f0ro9!C!)B;4I)j zR4xw;-5|&yk?@ab)KPK>g18}#d&;`r>1&fcuIx*xifa^^-@In6rY?9jurE_yg}u(p zE8=Bl>O9ZO`%7dXL#(0BeFPbNl6-aJi>km`bKRu?QtRmEbWYD1-N&~RVvY`tC+#0_ zQxRkD2OfIBYpjf+W6KcI0mX^KQ&NBege{M=VS%zIKTZGCSEa<12O=-1{D?nw3vilk z=LsEhJ3q}Dt!o%sYZ?m7!eK#XMJjWl>XRRN?(!d-rdLQc775`#V%B7v zBpnlTx7z#-B#!P$kwd!L5INKzRJ5}CQev)9!O{EB>lK~4A^$;sjm7V^TTu30!UG=x z*3O}f^!DG5jXp1X1+BsY(NAx(l0XtyArk28lf9RC?)QUTsKttMYN439wXE}wg7#>> ztoK^0XBVNM4FVp2c#m%**;Kp-L!g2vL<-&Hxg~RKswg@1o3a^#>=^^DDE4{!`m0jv zwX<2b9fUoH#9Rn(zsqxsu~Z(CeaR-IT&uM2GOc z^UPuI%Tou}>FfXq%T8dn+9%PuLY`2V{>A}+kbOJb6lY+g$BwOQp!*+eEff?*1s)c- z7;vSDo@c^fb%0FAQISG@N#&Mf0@Tk#c$@m3`LE9AlcgFiLufcpoWmw3MTr)YQ35>E zDZ_S4+4y2r8O6T%G0h~VhC{waOi_=?g|RP@+`{vIG1JhdPHzN9@1eTP@+NI zcUJozvgcp2P@t79!P`P&N(2Ps0fHI}nC_>2`H;OW;z2J|sIzi1y4NvAO;@xfrm4!=Ag<9siNY&WHDnRoD{_-mW zLUfUwx}G9rI%t;zT)VSi+eRuoIaEjxnI=JG3_NlLdnVM`W8cFMGD1ctN<~90q0*Fp zpa+t-R~cKQy87~<0#1bOrZi(XQ8O_C?TY~ISB^_hpaqViYH~|f$m$<^zKTOM#Qcbk z2Ew002)rBtB2KSRXe`;6qs}hM!Lh|dYmp8?jg!1YtfYW3&EafR&B0I?o2H@a#qfa4 zFw6Wr6cuFjS6-VGQc@otcdtniudt!}X~Mu$Ps{Tt_Y}uH7PINT9SH@&bpg*EzJ8}J zy#hB_H&6}iZLyOOU31%Qg|IaLgUtY77cjMNF0pBqF?Y%f$3yO^%nzU-1*{W|nNNj^ z-uG~zUrR>3L|7RnAYk@)NOnHA4@h02mvkhCs!yq^k}DsKsa*`xB3Qr&?qf#|8nYtH zAcPb(kN>S2$+1HG1E`b;Jf=kTY5JF9-um-k7KH5(+lp*sff!`eLkpPYT3>^ejObz; z``qZ}ay@j5KXmI^1*|FhV2;96N*36_u^asv~q8z$db&>GLl>wLG28X^}>Ap%D-vDMpm>#%|C z90E(tY>P2j71W9}JRDlcJ2H;m;t=f{YJk!XhnY98V^Be9k>wdqLw3HqLSStb=d)LN z@Bmu*r#gLwdYKPrW5g@$=$xa^oovrA+sglbF32Oh2xZ?JaGuLjp=2pEs+ciUO+)wh zy18(u;QV*e2F$Ll5@lol=>y=VVuw+^$i;dCRpV%w(P=~MZ>p24ps_3%I0 z8<(;O=Ck_XOs{ofhBSxii_LfOnuwj#@f zIX*<^8)3C?PnRAA6Jp`F3xs z<~OmBT0%nC0S#Yuss!g(_4-K4!#z0^y}2|ER4;}GTu|}8b<~5c_#;l#Z}-m+zT9n& zXI$&bQ@`k6@q(^bSr%P-w4JcE>ZZ&J-65P{qim6J#n1TI_ONwHH-fi65H#b4A>~2Y z@Ote36st=X{9rLdKuWyJI+)W$hSeNvNr%>1*e0`tNC^1YD(#Kq>?mhDTdx6S&3TN# zE=fOVMYVfIk+RYgs8t2jN|q&c6td`Xdw&nF;!xZ>_Tw29-$~`B%|0i9rt2H3$K}ED?SYj_o1$bLXMF+z)DMxqYcEyk?761G7(y0WJQ3bcR zp6yTcLEJ!fvR$!DpcQaRo}g8>-+*!s-z8-aivdBGrI)%ea;u$ftRnn4Ixqzun(@c; z9wb0Gh<$_Yu6VrTd(c<(c!$O9T;b%zo)XX^+|E#$71AI?DOkI5YlMQ4tyb`L2?#FMhvr_$wDUby7sfc5B zIy`>(QZyCtX`Joa#HA7B3nXc88fBkFHEcZUOIXYLZ+ZRzJCficO#qts7U^Z_avni5 zE)aQ9$&g?Rt&xnacXnHXx_^h6NG>V}32b`(l~p&aJIE>vS~HL$_|9w|!{L#l=WFR*7_VVX;;@rbs`4u8kmh z_g)iz&#I@7pqF%xhHZ6od@PAs+8fcC617~}cSnU#ynDw*b2*ro)|MkI3FD{VteAj9 z?WeIjndfg{JaQ4?D9qZ*2SH|9TB@_4#dq48G$65O?^uYYX>4lBAZfmGb(gW#3}X{c zi22xJ?DUZGiR7XY-)fD;1*OUtbn9{y8sa}y9pm~r2fUUy0Kt~{Q!Q;bfSPBatHG#B4*^O-OjcSn2{VuuP5{Bqpdq|Xh;nt$pA#!n938fvcZc3s z;o2j>L@2p~_`9szYXcH;UzthI&nKivl6tpKg!MJ!#cB%Nw=@X%%{!2`J=tjs-wf-?VXM4oU2il#)OB+ ztajTre)ljKYeB=&;_gcvax^^kfWF~3jZvtGC*G-!Z&Z%Vtae+BcG(D;6-1X&NMGll z(nw$hwpUt=7bd_Mkfs+PuK_a@c$%jN5Ai7=g0ihf*Gjd^b=Dj_UjI9ZmLBOIy9y6! zA^+q&)HNA$2i5&FPNqI(f=k}X_d=#U1byu{DV)!D7Of@K=~X`aXIc@7B^15_#}Zm* z5#}OA{fY1Y@T4Ms$k)ir*x3_hT7x?pXaOI-_fu7x66(8_RgS`Nxr_3Adw84U z+-{O$e~%baYGVWHsbi`@5i#me`pf8DcJZJo*Yxj`+K-60PvdLrQymTFMB*}yAjmfE zeybRdL2_S<9dqz{Vj~y!D|Q@rpBX7U73amIpuwspC;fPNtxK1GIRI>V%ZRA+$uth{k^iFFrIoa{$qx9 zqD4EL3;hg~Nt~BX6*3%m4fQznihkN$AksBQXS2O;xhanS7?<+r+YaOJAu4c+`Bb3+ zE9h*NLXg7<9#_!9`d+gYb>*8zh5%iQ94MvwNkHPGF#bdBDKwn#wgrOhBXQOMxE)UX zn)J74FL_e(FF=s*mk-PuJO)|4EgE`sp1NJR$`AisMD{$;UinaO9iVft5u2$3%z{!2a`z(%By+V4Y{Wu#Z^WrAy1fGJWnRnc|i(|mINn`~#X zS!wY`VZ`0$f%X*$+S1!Qao4ZprL223Tl53Fwks{houwDqC|y05i^}$*H~kPZ2-rl()z_ZacG=@3o@C8 z?Px$y_T>S68uCSq+JZVKqW_|XaG5DAP{>^fo}(I%(0`y!13@|kzk-2H{rh)qJO&vEsV<|d+6Za35XOK@=elF7p^Ogv1WnNE&DnE@ zAgJibrkF~D{;ie0r?U6h=;raW<@G@`n()8dAf*2+L+v-@Wbpv{kqIZc4V|ZZmrmLZ znV!@<*S7Cjnt8bwfqPl3{@Ixeep|4V4n<}bv$8c>@S=l<2nxCnmk}~Qc{T~`WHC77 zkFTx_>d&ZozPU0#>1rL}kS{m{ zUkong%PZvqO=x=^`zEbRrNl56butR2(i~L8sQ5MmsG zBV*!tv+G8%j3cFcgxI@xUxHuJF4Dgq?GIFa$hvJiqUO?!QiEe)Iv?ap-*vc+zeWvm zmY}KB30OnNV#so6L@P zu{KYPo@FxJs8;Vh5zW60VZSU((dAjozUetX5nVi!pJ#hBN!RY?Xxf<-_|bTOy!4R^ zIg6Tk9@G84zJ~kh3$1l@2r1g!EtfJ!DI^T=&Feh91V0#lR?c61v%9==-M!rcfVE9t zmTC~oATuiODm47+k@uf{QI1q9JLQ?`!P44!hSQ`bi8$+fY~kMDlg`x1r9~=2hv7+; z>Sx9xuTP3!cBkmk$F<>HDjE)5E_Z7(oMQjF6GQtSX?~PgpX0|Tn#}jz@BVc!DO=Ot zJs0NAMU_=`B-5yR5K{I%=wDnn(0OP6>AN@g-&#Fb>ho@!vX#6+_mR;2jkB)n^ox{Lq!)KQKByiE(K;R z**tMAuW_eO8KQ(POR%_`_6gLKWTmyMopajEj$~P%m)O5A=^VRiZZ?wItLB`Bj{Qw) z+>pyx%yN3Gx2Z2DOO0p5noHX$#!M;UohiJ{N|@> zPK37?zM=B~YnhA4i8UeF61}s>_~_lU2e&DwzmWtHrA7}`WzK)vXj{`(UAD_jIMQ)g z)f-cxit?7_4g3?FP=3BPJH^+5@|=M|HaVucz~$mm6t@7H$!`}&OFOp3|bD`!+mSf+M?xAT0n=i7gw#w)1)y_f-IQWogJU=wOQ ze=(FxriA|)(r;fP22DF%5pXGQ-MFW*s&o?^yOfY%S6~3CM~AkesiE22MLa;BRr8JK&fUkfY37kzC{v<8io3Vn$)|>7 zc++M*)WcYc+~sKgt8&UK56;#dK+28ZEK9{$vP>;uTt32ssalK#j$fqLJNuX(uY_8t zly994qIK&RXT1^HUWMk)KWDd+r+1-5@o-9DKb_wE*vsij8^ov<0a% zT0pP^FTD#vL`v+B+U<;U1w_Ag?R>jazEa9dd*@M>3R^MxrC?zh(u~2vUVrx>C6Y4qFGK80Z+YFD(;W*5MKWu{XmvX=@*1mQi#~)RLZm*-LRjAi zVh*(^mO@5Ki$gjIHADwl;jvZH#PD&4rtd>FVvl5*5RnIStA^qR0jPZuyr^*qW6^bc zLVvD69AV3oPX-C_?gfW`s2e~shCjJi0pm-wh_RRT5vl)ol+2!{Ihe5s9K8YpyJk|D zS=LK8i%>J2>YX;!ZoJsquGK@2J8cHn$F#i~{S3$b)mf&JInPIdB3B$#Zs`>h888M{ zTGI*tbqDPQjebT1 zNslyKF#0;Wg_wFpX=!Xsc;nU1Y^Sm?Ed2-2zse2^l2VBx<(&jy6d3{()M?Re8Ou_R z=11Sq63jEMuL@gHv1DwDT1^ZLMy1R~_O8zEw`(MixkK3m$kWb_Y}@*l{))@Jq5qp) zi3+o>SUtRVCq0DF(-DcQkkV?OJ#tSEH|xybXRx<3CelM}rX9JXhxSe^MkF3S-iYFg zV05w;t~k9;noAej62j0z7%TK@8gb;$ZCS|Bqsr}3>XqvgWJUSx&VREWH8F^k7!pvp zOi1FS3f3OgY?)P&gG7(Ag&Y`a-^m$iSw904$D9`Y924^Ni5fgNsOIV-M5{B2Jxrt~|QS_$oYD zShi8KwDxQ8)3<0w&~v4|e|uaobnDx}ee9dF2IX^$)m)c}V2^&H0c5(v=dI82iS!M} zFOS%CgWA7ErAF3XgW)8gJOXBP*p6v9~+4%!{wB*?-dKb((v3 z0O`+Hs4H@Km`rmHM=<`oo)zE$^M&SZzH9YWC=vHR9D4QR7G@E54@bs15R=s2ZPMqN z6ju3YBZ@wut3^moI7Y7nj_9X1%t*KsBe$ogq!{TT^Mr_831#IN9X_VYLbvo7S6Lj> zYf9QmJ%D8E_#pdGYrnH#PBG0PCKFwiMD)h&CyFKd6RYp_q^zGZ6T0B;()BuNApB)_ zhp+PP=u+~X0wJF2&W4KXsEI1MVZOrXb6%M@juuZ!eh$A^Mg3QHONlcYXYb zh$QYda|s)E6}+Ya}&2BPOQ2r5dy5;7c?1dQ>+^yYHd?<{J*{ z`=l{b1%1ST>bVu%dY@i{9`RcFA1ntLcJ5kvct{89yjTe-50-cILNaOvm$zY z%ZLYK*tWmc&@;oUx{<5Dz3V^{Za+Ov*KqN7-}4LVyN4i#>D1KBta^OAzO5un*(i}0 z@UDxuZ98G*b5!_T-IiH<29}#kmy)^R-KN!gG>9J9epPVA=05&A5$v*B)vj{(l0KpQ~F#d0Hcx{cTvIe*2d za3CG_PBS)D+s*rCkBotz1WD%YYvN^0@P`hP~o%s#RK5=FPIGG`tG zcUJQl`Z-v67fO&gg?WpkXlDgKjd)F(eHpaq`g+PBxZq$!9 zY_qGnW3^33X~Zq_9ypP5ZR+%p6=~$|SgJtA=;^EbpyFWfB%e4>Sx55XgBu5TC>!F} z8GVZT>Ej63Rd+J69`1}@p2--jn3jad@%G?WAzio`*@DgXZF8aHb71P!8srn&?X%n` zdn3$e{Wig(*t{Cft6>(yMLbk3^q)e0JcBXbfSV6k%K61q*Q<>mzMiX|F2iL zJZ9sVkv3mR5q*cvOUb8UK;sinf7XEx(f@V+B(6 zB*Xj>;pIP1+X8tHO?`lcAH5v127a$W1r&Oc0T?h)h3tM@X?Al%3&7+WLVt*-2b(I-u^y ziqTDHq9!I^J!@p(XgYJ-(t*ecH3VnYS6ndB1%L5%m(|=-mwW+R8Xkj}?TS=9{yLc6 z*v{A!AzT6K_&1Ou#*020xO-SpZ5(i5AP3TtSbB&fQ{}eWe-6=2VuOp3=-D45fAE>iE{v~4Y+s?u=Z$P7&=9dv|19c%C0`w0PGGNXnZf$`j z?pL4+w0pXqpM2-=Ak^j&e_H-_HR`;cs%h-rJ_{)DC1rqqrPYw`r(!)(}3LcIe~iP5;p>WV5YNkpe-{p?WR>v8FFe3D*SLb;y&%R;>PJV9lI%+{90Z zRa{*{|Kic-8C|@z)b@Q>F&EZ*>USpg7&m;Q^KcpjYS)Sn$`9|I*!?I&QhqtMo)ni$ zcMyfmG(=;$8{-=KWM${4c4u&>crI^=ng*n|5JbWnyDY`q#I5jbU#-2T;CuYmjP_`< zYx?QoGP;Fp<0 z$SUxA<5!LpT=1@aEoo7_PN=51P>9w7DqD$9u37X{a*$)7%YUmwRw}2TFKq3Qj*J~=Un-7@1%NmB&x=x=qUB&sk_^1ZtN9r&VopuM0%&UE;$`0=t4m;HD&!`ocFZ*L9S{(5mFClsbhnz*Yp8EX2JOH5&svX`M^an5|_H<`ANKM|Kp{DSFv#;G&Eo9t*U zWpL~Jwj43!u#tS^N!vQcld|Tmfx`{0K+exfXV#-#e^2Ol9+VG(+#fi(!QWTt2A(6jNjm{aNiZOSN z4J$Wp-GJ|J4QKXMnY}Zl(*9ge#`g|nRB08(PKPyC{ zC?}&mPzvF1v_qJ&jYb`Ca)4Iq_C+Q>y+!_@2|G1Rp~% z!47x-s`sal>H?2h#)HObeom6C?^_#ODn$1Y9CC|t5y6a?V;885DMcVc!Teuru zvkk=!k=Vk7W2~$b&CDqqvQ0*Nsarxt*A3eG zV_@ka@nAJX4t+d}0-RAEdgXmt#d|}V^}S)_+>&wiAgDFTwSo2#JWGo-|12(lwDe?b znuewsVvDVmyU~InSqfx~F0(u^;vlkkZL6p?b*aI`*C`a%tIQq^Wbb~6OWfT1@g6|8 z24&XAo|CRRCdJ8k{PXGVf`+xL6p$cg3rC>i|M-A!M|n(P2n4b{ZeNZ)A=z#8eO$=` zUB0~DUf8Zm3FDZSwryua%OZDjwhjP|xm}oYkR@l|SPa-7bEv}H88(ATQ*9NIHLj0B zx7ws_+t@%lU%JD=VcoW2{Cze7oFyN6WpH{wT12z8q;Gwm5?V025=>}+f|D@lTs?C8T; zjjTG=Zfn0q-5_k8tzu(bm_;&BEUYhi)h4XWx>d2q3I|erhnbjmgsWmoHu$co?r2(LmJ_pYbR#4lYtsooi)y-)2hZLe3~8o?`&AjG;*cv z8iF5*Z!22q_LHLqToSs)nV*AEUJp2)_eiOf71$;ZdJU3e*gEa#&RoRA(hM(6G@d`)Y=u01@KG+sl(| z5Cze!(T+yW!EE5>Ars$XSL&B0h|@vP-@O)UzMkmM=KUB)2Bk~ZB!}OKNzP39L6%>< zEMBpTE9W_cQ8s-IrWOm5HpJ}@O55eRcO%>y!MY&`P@_yc1A}RF|8iW=SI-5soX81W zgGf3kn~YWgzQ2!%AvE)PyN23%)I(>{ATXfOYU8JCq-QrJ3&|$CsDvBV)3|~!FE+NT z%iW@ZIuNoa+>W=3V;`et)o!CzaB8N2h!8%!{q^-|sQDM|Pv?d0p_MZ*?x0Bj16Ffr zLGGZ}7m=;SH@Xq6Kn41&7l4OUT)P}R?18sQSRWWNiY8s6Yz}GxkzP`T!JneyS7gz` zwmXOy5WhZO_}^+vx?#?9{=(y@liDvxEtKF1dnRm?)1<6d%^584nxeSK>f}Zk1@vPWCc!FOc5^^=% zFkAQp2~-J8mVHzQ-)$E*;Z7GmF{HWaV-%Fbh^%b2LvMIL0|NNt=qgJ0GE-RIVInv` zj}F0q!_EV*gi~Yx2H+^;hN%kx; zSHR5sR7w}tXnjW6qRR?IH+QjMAGP23{|kgyoZfSC5e4Z1685J=saqDogn-OEBC3SK z70e|Y?sDdX+DN`G3N%y#Qi+J5icBAi8qkq4$J`jt9bzsbcl}Po+b`RSx_{lR(TTbV z3ZftFE%+#6W79FpR#HSulamMJ+;A0d|JzP)KkqUZgiQxAR3!r=h$+4(B#peiyGjDL zaE}6rO9I3-$g|BZA&DSUanK2x>XS3B@lUkq1Vz@Jth@Ep9goRaK|c1%z?lWsbpYl} zV_>KY2Dixc`mFlKomCww=0Sndx@#{)v9wGUN=q+iF)Q&NT_2JfX#i%KhVl$T6`wB_ z`FI-VKO?nJ%I^s?*OyDEl>Xf+nuQ~R8bFPvsW9(1@)q^&h;idS7;!7l6$Xjhy(X1G z$Y?Dw;Q8?diAm~E_93v5RKYr#i2>?V-EPyg330}(gJ(qK?t_F@H$^LI%+`k#3ZH*< zaAqT|x}9+NH-^oWs7M2#Pp~%MVxqhcJa#_*KRggoK4ADN50rb=x$N^rp>M>-oUAG# zXQQ1IGT6MWLWw@$e)36%B2!uy|eg{OM88uV{LN4#2O=odccL-uxw=2D@Ht+@f~p z?);{g*G4F#0@(VzlHq*VyG`%3w_&bT=9$TR`6OnoZ!U0b8a_!?3dSJkKMo>LkIgM5 z{qXBxInOdzFz7m9qYaqlTpwJ~o*DV`iWzzHMa0L`(~Vw-q^^9P&+Gx|uV$l+zjLqE z8;TvYneZqmY(QM9^b$TJRSB}J~w#PuYQ8dz^}nC~bBWv^}VvQ&T@*nTa1x)`8La#oc=|07WP z{Md6jt?ZMsf%I+*T5&P${DIjsvplBf=HD1ej`_x7c0F#;N|K&fxO%xybi~bGBo@m~ zEZbV$ACukCxmWhEPZgkK22iST|1vu$X>Cf`Dl)YxmUb&pT|R};Q5a?>lm zQnOd!ztBx3KDCoqXCh&i8?-g@HL@}UvVs6vU_LZyxfEyf<+jE!#!S0O!nU}27E>=m zYWZ_1q_}o-)`ay79Xs}NaV$bQb%N8h@bs$-6%eujNRcbV-$Y&UWS~1*&B##C?XK!G zpPMvT-fej|QC-U|WL4hQ_(0{BC2Kzu=)&&&Tb2RngFHW59JFltDlnOGn(0=v$yelb z%A2M}rJr3d)kqmmP(CW8YzxxDS_xX6A|5CNHAvLR4jyRD0RBJo(UKSmdkI>xT2X$Xey;~I;;8bu7kO)Ylyt|d?Xrqr zP>v6$UL|scdid#_-EPl)f|K(xWj~#Q0t|*t*K68e@Lykh&DUsb_Yt_sC%|>Duspw>={QNP*^|R#w>no= zwefK3b)~(6LEMR+EdTl1>C}Wm&CDau%G2k2C#HAkU;hFlzp{{{zdc$>cE|7Pr{<>9 zCpbsnzx@St{nyC$_uCrXa|AJBL(h-AmlpUxqh57;@K7R_vhiM$fnF!P(9l(?yY~g> zHPsX9a?>g~>3Tt?6qN#|>e-m^sbqng89yO>u=QzQ+6pSmI?`aZN3T#aiFe2|8=irP-4R&7+?Hx*80 z(6j2r3^tW`D;P*A!i3t>)94mM!laP9+6Q&lzcrrRIG&4cCh#0j{mpi`*^Twl=h zeNmvoNd5e$JQ+2l{O}BJ7(r)2( z5^mFc)8tB+m}%|DPmXbF8wj1VpC=-X+A`%fKp)nnl25a*iDso390<>^bqA0pHa-!qo)tP4!d!9hoF@rO#wT{$2G5y zl&*mHJ$8sRaw1j&uN&;J`8fhcS0)(|z<}(vC3&lm1F)xD$|URjzxw!s5++G8e#Q1v zX8n+`A|vTk{Z!phd0;0`8fYCpjVR&8-e{Hz?!q{fhe5iLm>R8G){RCK$K)|0mAe5d zp1o!_(*2U*K;i=@CEM9)dZFSnu%eCtcqkFm+k+v!roZgU`*jx|F|k&Hxu@=P87);5 zD9fHwHf2Zl2H2bZ?_$jhtIQY0^@hEgWh!vfC7Z~FQYSToMs>0W4IL5_{9mSJRSSr7 zC4lB>rkudpW?9^u1JCW3?(1#W8cf-jUY8Uw(x}=4l^0BCX}gs?(aO?CEyeWO>j}UO zt*V>4w+fmqUVz(!ssC>8{jAaaqFGRj{-Hbo^lUMN=^N$v*8)Tv=QYt=O^Epn{}^i8 zVK8|R3w6=Ob#ETZiNP)Ys6gZ&>_H2OHdD5c9f%^LDB!WqT}tCq9mI5VHlE|Ku3q>y zSjbqwHE+4Og6aJCIZ<$G#w)?L%54RDIdL+9nHosZS%=0P!fym3Z?nLO#)Q`6}q4V z_yDA!NObtE#~NS(-~P#My>4RNLidoN9PPdJNqOXkaALB;{kWtq5J=-R%NCSJa)l^& zWiR}&a24g;JM*5qGt|cjB$`-8!&~|t*vQNM1ax~85NX@lPIt!VvvG ze-EbwR{bsRq>)=DEB@(vu^?lurfl&4*WZv*jwDjc&Yvc?xzF2y6xolsn6Te^DE>y6 zg_}v&)p)p&7xUzEUYRYU-skwW9c~9S zvt9KA)*qb>Pj*^5m-5oX&cXm3{tSSel?=W1!z&ZgCJbTb{j#eJGJVE?S9Ank&5^4W z)=D@=ns^UgO|&8#yDCZ5DiWtF$vbQT!c%Dje;&2ao(O5(^WW4j%V~IHjnT)r+&lUP z7m?PNv@zHU%%pc?v3RmBA=g(HZx4sc`YIvvg0Od z-TAmqlHXjMmG)W9Cd&y3k#;v@T4$)5gFNg4gl{G>rrVGyfSxNNMnH3w5Qgh*RWUc3N_MS}rUwueTjrRdV9slTcHbPHw|86>PW)#nhkw@-uEh7PU8Xt( zU5QSLUd6;SHP0qa@DVLYe=91*9uSLJd@I`DRv$JG^XJ9~&mQltXUtdb9_R68lYuO} z);Th`G%s*6jN4)N$fAAJL9EF*yB+vtAIjM$%jT9t`sZn|^RfAjuZndqpUMLC1dL#j zKK%IL7URb4pI<=wy2fhwInQ;7^BN|wJaVh-0bIZ3B(soQ_IE)`#XS`U(CIsFQry8J1wRXjQG)_hXm?_?(@`5o_8Y%e zH#twdXj^>gxz5>GyOGbdC2dz0U56wq+wrm*$W$j`@6NL;JjA2`XL6RM;GAmu>c0AB0&etK*;&w%NA~-KAGt$yeVVS?`40^ zz<512VtGOV-HPfHJbG8l863cT%G3CC#fX&_aEol{)mQ7oRHSTA_>^ELxd;vw;~CFz z*(~Nh&vcq6&@4r45XA!GZD=Owve`@Kt~klgw}B$lY#EVr2x5Aq&{S>KOVD8m>c-HbRzes+1!0u`-q2P zBVPi@f?AOp=j;EMY8rwlb>j3d>nPtWOg=w6i?(;d#*}`J%=^Dx0HpqFCgV1nJXFZk z9g_cnhXvTzH_m}URG5&G)A{~@R@u--?&#@#pXtOX<}<{`i3qOGb#wgFkww~+*KE&9 zIz!TVquubVpLr?`xha@ZDZ(Xagxo#3Mk+fe<1X+Pffh1&ti_X%JpspsUXiz zH|7iUuCWY_c&JRpjjqrZE9MmVpV)Bg+zeEEG?tPZoof z8Fcl@m}QKqzPN>%0116tmeM`>_VnD}VuM8|{QTHeZ8l{ zmA~jTr~m13?}?MnFuWpu4WPPiKN>+_SWhkFUUty|;p+9dE3p#T1Q_jq+Y(F$9ywb` z@f>-$w=}(Z1J#Bp)5Z-+?tpFeK$4IhO#E*$zs$`CO5M7~03AcB1KX%^B~3C`+6i~O zb^F(&0+J-)iWS}81r=B^S$dVdE3NPl_7XQ3$-Y_yEMKC=xe#PlHO{Y>WR?OL%I(FN zF7+!{>b?$R<|SK&om&A1Qao7M&rU4H`eRv&!-~bID2e{kR4tRqU8s_v#wFIM0a!Sr z)BF3o*E)%-GnIq-jUSo%$SzPi5mL7{+#$k2EN& zPN($i)Q`A0ZJ#>AEpndm5Y%BnV*!yfL~0?XGdkmEOo3{j5X~+>^Y>f&1-@~-M)yUXO~*4L!l2rERyO55+AUe%4a+B!A* zeM;=R0G#zy#iQs!v9%tD@Ac-{%12=ve#<>$7eeLzHVfS)pJtQT7ZcNDP_}553y5uw z)_`qMa$6a#KtqMC36#|aix_3#*+TFB6K|F`#ivAXm5<0+{){Of`E3SN7XG~=1HO%4 zJy)t*-m}=>hG}g?>z2FT1X#RpIjoTbk}-3)ZGiG{-J(N2&%G9jzc0{siT=peqwit~ zR#pdO`ddWY}iXxQ( zFyaS)A^pi+nzj=2AFlxk%!KOq3Ezs@}5*f}9; z+)7fkm2AFY(qnX8v4JuC$_I07WAao{D)%Ewa$#W8`sD(pzLk=scWAR70u|u?GN*H% zuR6I0R@zHxn3c4W!!5}dSWI*f|L#;ePv2(j~BlGWB5DeqkO~bDK#?vDpBy)Vd!v+@Y!+#u3#;< zPn9x84)ZX>x;SQPg5(2Mv}vLy4j$Id& zM?y(jrRsb-hM~4&RbcrFg>Zo`pm|U$YC&u09VukLdsbc7ZU;BuC<&92qv!cdf_A&C zx`3h-1b*#rW16mqqtOMBsrv|ax@K%uA2$3yuHHJX={EWs|B$1HK}%V16M|9#O4g$|yo%25N z*@=p?Zoh(2EzVBLJD`?5e9l%ey)d3jc@m5f)s26QeUAMu6`0-0(vBx=J5F5c&R<^N zh}S3QNW4rn3#G=MQ+9}R(4|eyjqkhYV1A&dV|4yZ0?LiEDlH>_I**t_`oLG>8ZjrB zhqiX7E#tC}-^&dJYsxhG>b*`!)EIYl+HD3*O{J7~m=?*ip6`<~61~Bllg_qt3)hPY zoXc3-krpvXx7*mHy8TQi6Ad_&UJ#wamMyg*(dV!~6%O?cPMxW>ziDtnJ%e{HWb%Haq>EYw-9bY%jn|`jY~NqVury zGP});q&$S>(KZHK0#GokE&Tsie2ecV0bdkyNr5%+Gi$27c=_FZ_DKLf*{`RJOICT>H5gAOV|L z%2YG1_Pgbdey|VfpX7xB7N}1@wv=>e4R}kLV^`WcE|d*7bJBk*?f>5h9ozWo$|B+L zq|NekjCZ)T&)YoP;bU-EySgy)2XdY%U8_8WbSn#m2gUFNsml`7!j|*s3l+%;GdZl8 zj5IdcaQB~Y<+2YR7$zovH`fQ;vRx^Q5Z}4Q(XP^q-|o$Q4!QRwpv$pH3m4WDc6IcynW@W9Hfo3qNih?0OP2!Mo@%h0XO!6`%(c zP;z|R&(pQ)1*#$vbNxsiz1Xu^#;&wZ+WgYiY4*mQK>~!73fdU;YVHM(db;&;;&bBL z`w*Wmp0VZhX_R;44=_rIC?to!DE@KlltpFnnXqN*ySz~J7xqvO8sERyFG6{>|4aNUoKoU=l z1oe-GHo9Az%QzCs@SNpHDA$!3Y>nxYLILzLFcuqK$5V-tN&vP#o|4DTZQ6>mTpnT( zdr4oGB=mMfvaXc|3e)fgc=-bZ!s^g7%Ey#YcR) zcCwF|ACXk>!`RB|{IXwC-hMqCZVlTy1GCw}Y^Py1d0mwHJ`>|+TM;m4JC zOHM!Mt<|;i$N$#>s&0q_G*hPIANJ+dv=VYI!8B}u=dN#jS(Xzm64cMqy!1`u)G^Sf z4oF+9nS}5{5K!sKHR83x*FsT_o?6+zpN@_R7iT2An$Uzf9^|U@|iQBxqjj2A1&&tN{>(_NVh4ECNt3h-CTve*SMm#BFSyE#KyRE`A zc6*m&I1G9=gE7t1h=v}DW((|^dA!mVYX9}Q=BmZl5I@v-PwB{}M2OidNI72*69G0p zG&$Gex=qdXvo?-vpkBUG;h!`!Owrs;cU<)ftax$yqbbm5aH`+aLr42c|Nm&0Z=)1g8rhVu_x)wPKLZVqa@2ViopuIr+Egd_gSvqvJshb7)1W6t1 z{ll9pJdJH%UzPCDTqT4}J|ylt*@&+8{LKUW+Bf%nVj}eY){N{#5R#_Ozy)9N)?@aB zio8tqI6QSquPs4W%lI3}dsU+HHaWHbV_aF}46x9}djCrZ``pLf8+*GjNTHSb9(v!0 z6)%`5fP}&l%506*O#YarpPs{TjH`Ww5;g5tK!;+U{r5qE36`l{q``eVd*|%3bSL}R z#Mg!MIWfi$M+&?s0--_I`}TF5gS{x#E+}20vgu_sM_4ya%nH9Gv_hEgjCI$}`+@oD zKSm0w`uIcR{+wF%QnJ$%a8~Dw?={C*1A05H1ZO?}ROFL! z8TPDmZr7&6^-eLR}gv@Zw6Yb^6(;VwI z|2#7Q+a)bo!q=+R7}lh8OS;Eja$5K$wimNs@tl&mBKMD+O$B^$&oLm2Au&Bb}3Kb!Z+>P=Z%+~mxr{~f1v#;xn5`d|7x z#{z@Itde9{xuMJA=tBQB>2!q;h5C_J;nunJ{vh=fZ*EczvIxlJht%8B7V|TzVaxV~ z1_tEDJ7Kfytug;KnuA7H8I6kevvObMh@bKANZWq>Uf^f5glyY59LIq8Bkb`lEwdYa zhOlfQx1mURX&?@1K>_vKCbIf>7#(=4P42IOUVRM!?^-1Ybnti! zn@r#A^?~tkWD7V(@0m~AmcPre1#y|NL z!s`p#Wx%)RJc)^kSVCGlslGzoiSIz+^ zrjTn?^7^GCJ=E%c+o!GDpDWpXEGUOhou_-`P_l!X_ik8jC&o&8~ zKLp=Ml?@P!AT0&S~qL2X|uWxy)M~Y*nHEmm)eWcb7bejg$&#t*2 zgX#R)KfR3QVKyT-vNp)_nzNRIEXYMluTL4MKgD0P{5z#)M+|d(2zvqcV`YHiq~8-` zSqGTpO*`3p1!pQ89K&XJzyM&<>RwjP7)fsH&*w)RqT{43lx4B^ao93jDIMp`zOUs2 z!jJMS(oX}SD%38JH2X@6(&|`{GY0rkpV9S36&qt(c^l2!zJ6Om7U}`M^t3bSPwt@c z&w^mfwoaE}^h=d3UpZ+h$c(tG7A$zj`W-`?A8JY47GYVk@#tD;9i$~ow=m6s_g(eY zU-~*%c{ksyX)yq?7_%qM&F56ufN}c}g)J+vEpd^}P0_TTlX&FDXJG`VrYWht;ReJN zqDBQgG3u+gz1U{jwuON3ZuPIaTGzC&_zPf8nuM%@L>Gl7S$m6_l2&?pI&xOtQET-uU{OON`6sO|@@(@F}2Ms}$;17L<75J{rX#X)?jIpi-65 z+M|3KcL%c`+VC}EzjkZgKN*}kmtE*1TghL6g8lN$JzH?VvW77KZGB5?w=QS(JvN(K zv6tAjJRi_h%PtH(_Lr~f6A69I(#^uGocVgaUgc?t>)0zBW+Z88Wx*$wyVJgaZm!B9 z2HJ_ti~{mo2;mN0;cYZ$V`uBqMLB;Ihkzta>zWn5T_O4vJXP3QAcf4<2tVs+oEk+==wPM0ESY8_+rJM{3umPl*4DlI4*wbnt~ zj2zxTjKfl~ghZZix~(4X@BdvTncDus?AyodZd>Owj=}=0wJu>~^R$S=o-<}govXDF zYgq|FvKC5J79$`$x-wo72qGf#1m&|F{we6d$IH)DN^diVh%&bR=O|y>y%G&OcICf~ zmC3g}-<0GR9%+ei&z;i(nZLIA%kNSzbqC_OuJmUhqk+vST@zy{{8AFf9F{cL%p~S2 zKdL0EDJKSp1#$JOu_r4=q?4Qn92+uDBFe|yETuI*vCoX9ZNKMIP{#?}+Z>*|7$p+= z37L^)5;kcQ%hF(6L*#aouLhZax_5T+B(9okphPTeId$}gC{IssvQ(5%=#7W#wkyfn zprUqB+BGlD6)e7!tHC&qrePh9FNzjiz-iH#`|$@ML6VFe`)u zJUslKZ3YQRxBMbM`cdDaQoGRGm}c*nz{29~IecEyjB!6GxF3VgyIPbcW7D~3NITbr zrF0vEn`h`NGvcP=_!LRO!7Na=jyLRd1 zmnQd^I^jpu2mv-V zaDFPEZL_uq9~@;}8pN2p>oU_vSnRbqSgg^XFFjGF$0c9V_Y=3QLk>a}*zT;oa|^98 z%=o7}LiE`nT8hU=P0y6F5Am%2a}8JeXs`rOEps}>TwA9(ysNKFY_$B5!50VJb*&C? zG-O5!88A+klfFPt;QMBzbkiwkTF#%e#Xg38voUNFLMvQvv+B&c8YYM5g9+4hJgu~s z@9-o1`S&;Jx&a&AH%|MHM^zNZ=birTJuphp=dAK{*^Yus~oeupCeLiB#Hut_3WEo4{ z?GY<;T+k|a>sly@{}`~JS=Qt(aTwFKQvUi>ZMHnda-Q2!@+E;xAJE{D6qF5aBen3) z<}?!a6~Q`>n}ockvmqUuPq1HB1*pwI9SuVedKWM-Z4y|k1VSgwZlZfVn3_3afQ!p=jchK zA3XvCn zLP@3Y5Mq;9@#07oS1==ny{gqL{QBGLS?|J@H4^%dOh)9H(GL9KtKOpR5rzOtlUvKU z(fD1dpsi2FP|p`sonyBV-@{nQIL?kgxLn?FovNt}b(k}sk}>00CO!N2rq-SN$)kNM z4p2`J7wB>kyeYKV3Pto+<=S&$i-rZi$J+U{BNC4{%}fff8p|6 zT=-=k7tc6u{Hj!|*_pP`9q^ocZRsF%_s!&+=RdL_cQ@C274SrPgJO|zwj;7w+pmft zR++h63YGj95J4y=Ym%{`7{ibPiN#7S%s6EZ%TrBZ%SHNd9_Si^Y%c$7e4JD*%+7To z_`nho=wsle`TdsblXl~Q_<3|Ak)5-^lpq>-UHX-MO%;eY(IDO4WIh7KsfX|dXf2p0 zzV?HNy-M{nuWYsx!~9cFBb;zr7la{gz~tS=3W2$Y9qNvfXs5MhR6Zw?(Q;M5;-NAQt`@fMyjCfU)@I!m!UiTGZidqGjB%$A8KVxWp1p*j7qiu7KjmY* z=)^7(vxqx~`qsNU3(@-w0I}+-=v)bMpa~(XO@sNSH^c$4slG?zEp>tuu(Jz^5DOh} z4Aho|7Z?ZrVo3DqX8(H5)ciF27>R5|>i@aSN>p13XJUUZYD1Rr zWUM7N-{amAlTVy74qR_V+SOhIH#nEGW+q}v`Udxp?+U5rI!Krfs&NAaWwkTI96Mtg zhEF8)hUU4sG4A2ZeB$@@v%WboSq2Prt!wo-@-LMQIqo@Q67s>V`gQGi>c?Y8%2?(IzrAPLoZ-g zD{`+7=1u2LKLXEP)QvB>$wFf`;SjsRd;keMa4|S8xo0czo(i2CIf`R;l4|Di1q_b9 zIiQFxcfT5-)Yy-%Zf4>JCssUhV^_a9lQRp_o=YX_R@~3Z1-G`F!iIQy!Pa*6;v7aH-DeypNvCCDDO z4$p9*NtZx-TA)YO(AzsmL1;fWL;*1Hx+Vx7*S!eI<7Y3i^g!9-Ac{~)+LNqY1W+n! z)Pi%4;)ZxyESF-;fEH}5jw=vEN0X4)B2eQ)dV9~;_}n-oJu zrV#5uokM>P^oxFaISr3jh}JN%JV{6(#60ViX!Xap+`Cd1>h_9)6%ilF>TFGtOESGUmp-WDv`daNX_s5rGsWBzS9u-kT5*@V~JDw`3}ejwUBc zvVpteGXF>3or7PoQ);lmnM&axL_qBss|(s|IdXBF4Ww* z=}4t#Xv>^^k<`rnC=2la<+N=qT~{<5{dHs|jJGbsjxlKNer!>EJ&tiOC(HdNlRYGV zF6ooybge}|2N=wn)xHb{>PsObHj6c(m{?FekoJ=ptF2^Lj#pY*vLva%qyN&_{C%?U zJk@AG4$i&WmU|IvQDlnA<0Z&LsaLMo1M7uS9P$)34ccdQOSue*Kxs8vwN$I=Xpw>U z3xubUlWzC1GPYJVv3oT)`b@j%)S!bRbxd)kjz)WXH~=6zjy*42OI&Ve&WS7zci!pWlYPX z&&SItXTC5yV+ON^9wazwZabR$4E7<&MXlH5f_UDV5wVv_NLbeCPaM@2aN>`gG9C{! z2p|uBVKy;#=o)MeM9(!`#1LBLh%_!a8m_Ah!9hmdt*CW>L2-QTff~HufW?qv!&5cYx&E9$!4{b1F5Y_4vP2Fw%PfDUJ zIK=AIc7;Uv?MyvRU_q=uyB(rEyatgtwDb{DJ_HhE=?-1+vFG9*X6R(b;T4VVAO+Z> zRrR$dJp-m>#->H+lkGzE2Wng+%KIn1-S;sG@7^$Tp~!8Quy^JDW=2*UaHnvkT-)~# zqQ+8bXv?gsG*`38_XxBz58}Sn^Oq*IDJ_af)n0n9kak8M=A+uxuA(8d>QROjx}W!$ z5uA`7!tQeS9xJl-XSvob*7OExfK<2$(L^I{Hp)p?Q=jHB(pHXRz}Q}Fwa(8(ipqx+Ma%P5SB=5ZbXf-v^Y`> zC-?Kimk3nr&hjIZd9@J)KOfW>p{urlg!(vEsc@23@%Dz+?VqO(}k0 z_m6?d!8sJ-YwJ(Oa1Y3N*&5TPJ#O6}E23W?-@^}H1Cy4~f02-@e0hWWOa;OfSBhNt z8lxl}E7_L!rZx?phY3HsrdSA}bJ!4B$5UraD1msf;4{0zmSyPF?t6b!)AoJW6)x0e zg_?8$FrVFBQt>W1r0Qa2`6}TC=R=0pg|Me2I6#W+qt~R8oL}OIwI7b%#)CHV14@`* zT)aA~2I|6FNtt$U${HX%N53v@Fmw-FhDd>mv=d{=At!Niv{}w!isLb^zpM-;toF#98eC zw)La0U7gfh@bnw!aN$b#ioPTf1I}7pKu#J$KI286Dio5n3aWjC>5b2PjVR_}l|F7z z(~z`(h~}Z$_$>KJeX`zEo9Vm25N*ciQ;@>Ew^_MEKoTpu$6Qv1=TsS$MEN6OY-T{u zGA2Lj<$0;FTH>~~^LPLKD}Ek|hq6lA73sD8tYB#EaCJ~B^ue7Mfmf#|JROX6Vgmp5 z720e!26BZz9#6ec$=5m}EBW9-+g-@cOcVAHLy#=x8;3c{mg(P*%1X*^_BtiDxo5zC zuPR$zWQiZ4eD1G&eGBkzvorrlF2a>BUEkYXk%I?eb!D_!6PGtf=5JcSdnK6ze8$~)Lt2d^b9%nd2gNXU4$vBYioVR411 zF1cw9)taEHE=a{3;{Em)4aV89=+T^0-uL+*YyMs3zum3a*3~}3%UwWWlb?&e2YaPz z+Cd@{g5+2fi7Gs_H?H){Z!au1#B1o%@m+I6K>l!HS?XbUaAj_IoxD4#5$B)}Nj(Un zpDD*7zrm)+(};RwBOxpVc`J#(Kl>mdTmy>BIsPiLl9{RK{ZR3LcPBA_&GN6dD{w&a zL;&NT1hnnK0od^@&T3vxhzm(GXfpBgx@BA`z1gnbrd|OB*K$`HcOscRbPCDk_B(<2 zoask_@V+z!YkIb-hRpA5J6Zw7+YfnMMv&>Htzj@WH6lhM($^a^?hAr0#Xs znY1v2GtUwSB7<9F`4~JlH9vGEPl+i+FMCgT9gyx;E>>EY35?mp1M-%pWoSfoCg-hg z4tOTg+rq6?!Eg#PnLpviE{xzy@8oxzn3{)#EtmCQMA#nFXrA8Oh5V1%fw$cnQ-@lB zG_vsVLC@H0vyIuu=yggyJk^nYFbd-#OFBVF{WZpdD85NSS)VO1y&(+;{wT-_U`Yw! zv)#(uuA@vnjzTZp%!|blJte9xea=iU_(oH1A4kobRJ0x2nFsno(n*4odcaLHX1dQ4 z!Jx}_oEssU3qXu{!U%@w%g>&z_?zXL^e2#nrRSRVA^cBYqX|S;Q_<{cM|b1k?RToy zUePT06C9^1fF7?O`+)08Ddeq2A!ceD!#;MjUpe|@(o1@7yRCt;<6d}YY9y^JjJgt_ z^pdq&=VnO|H#a9Qylm9 zM;heo8vo7<>o;%VErmi2$a%64y`_8YF4uuD{B45z)x*tMdpRgdG@`hGHC|z*Dif~u z=GX}y~{_U?eS61?Tnho7n%V9Yv{w8pLG|#5nGJZJt|s^3-5jV3(w^z+6l=sQ#ZFHf~u4 zcL^dPDn$E`ftQkAkaA@{Rw?h-fB=9_E+n9}JH4w?xUn_fdoaPum#?Q|r9KL_H zgagE0T6Q+uz22cJDvKAlE~|$G2gu|`a(*#YAeb`;#Ix)yiI$vWDYPJI_G$VN zSn(*#(SH{nE%!5rC&b&T%Z=V1)^ zY5k(rbvLd-E^9F)#P@f_o%80rHH|@?Y}jmo4&|zbE>M{)IP8*8paC33GgQJclh~p9 zf7iU8Gqm6@ca~kB>oVTj>a;*~rMsP-n!{M3K|VO=8?RDN9UP3ksUma;Nt2(?Red)z zLgy!?ZFka_v80YI){ETotR8$E4EsVj*rzVm;

?*5y(jBpY@{v`pxiG=JUUwS>Js z?$i$u&=&WL{UTKA)RUa{A`ThGfk4kjdF+*~NWtVBlqz)7o>5D?Qd`yFYTcF|X75vcrcy5O zTaXFSCu^8bV8gMY1nI$K%l79VQ-U$u2X+2#JKE5rum`W8L4rpvJ^P z&~DixXN+WlQGYI$sl3>hNH&&9#WAxWb`%Y4z)bgt*Xb_JlX9B{ZreL>yp4e8xg{Of zUR7cJZ`k;ik6hVUw_Wg_RUH#kp;OfDJae`RVn1MuqpZ%nxV4JvY--~@&RzYTp!RA& z^Mn(_I5$6j$1mxm*nUL0Q}yg%#oe~T!|!lI8<~^#sWj+l%Xa3d2er-*YoI)A9s(U6 z-^oiTE`Gs{D7SsDDbIY;{mpEVm8?l6zBI$xpIsL@-yI;yf%h&5UDzewx53j z!s{rcQTT=aoCG(7x}k*Pq#7 z&aCv|VS@zvCQ4R41u{yJ7){2sA5Ui0x^cWLRR*G=r;cN**~YX__3>59ZIl0yL|6Uo zJTu`)%XKogN)Dzvg|Q+>u2O7=?dx4ZFa=lNzN;f|vMclxjq5KiHX-=qgn%#-^Sbg5 zR^hpdmy2;F8ROo2Z{NSAhtU_-%+p|~{qp*N@T4tVqK$VCWdHngPWIOse!=dvKmlv_ zt4q?G=5>;xYgEm{7Jnf;BUkoe79_zOdtHqqaRr0Yk)^NZy`r<3=d9-+o9CY;#<`;6 zc7#+rZtV$y6feXskM;$`!bDo&u+Vkot*|?4ZJbMy)~%fP0grO5oLZutM-6jVBWW3o z`NwP2Gp3%<7g{hvZE1lkgrWRt_Ms?4i>B&q3=dr;-nKR!@d$Ma+#&En8EEh41$GLb-O~> zD)ja`z){@DLk%10U~C@RB2t$+M~8n(cPA-<{G~;o)ZOWynIib(DEPF~8RJe@LTNr# z6c6G*Iz#wev{YJS8|RoCjZvl3E66i_;xDmV=5+EU(nNL5fw&w*(`XaQ@d%ye26)?y zxN*eX1)xmcnYB94%om8E!O zTc6B6RO?0c)71GBc$k)>Pm10b3S&F2Zk^B|Ux&|Z-q^^%+Rk*VVyK^EK#AQ#jV;i- zu%F-kWXZ)5*p{#Ru2k_>UV0ryb#!Z#p5ey!1EPOLFBGf|1a?@w8^t-P}{-kl3 zKc-px<2n!p&O`qLx_3=d@U=dF-2+yVDNzQd5f(epjSG|t$>yJ}(~A%vh$#j@hvBcC z49F{Cfc|)0>WpP#f}A$k3#Er;>0KC#kha|Mp;B*V{Qg(`&3@AE;CiWA4Cr-~4C1SH zea*s-B(J-VD`^C+Jo}#YHEUA>gd)B0o0&msVZXr7U?{WB%~<|BE*>bF!WiAht-RGp z31#!)o2D#j`kAt20AQ)Y^mRQa&moYy$(0*{8}<#h$Yg>{L~y_9L|ENnSu8z|dVcO1 zh{q5lyFyNRB7y@R6fMA>LoSg!36)+Mt7?_~Vaqu*4wc8jSby&n4ky#^2RW&YXyGGCF~Fi2-g=ci08|OYLgo&t4eNy9^3n(N^yUb)5Mf z?@mN(#5c&vMeCw^22SUUx4kiqW=X%7K~T~jwo5<_Z74BtScV5^24^+bHskZWPl_o` z+p3Lr&JZ6Lig)E``Uh-$d@r=J-yk;|c-dvR-JdLtVm zM*+;raHBRQSLEyR`S1TNASK38osEc~U#pIyjBQufDgx19tfSB2H0!j~P|jrT zaP>3DgU;W*n3G=akX5vyTtB&yF%>FO!nKg<8fndpKLhtEu@q&EyCJMD*UyKf?SMhW zOrIQ?gy2nT5#4E`aQ0c0#KVG>ccuKZp4^Kej{N%c zYc_AM7Kr=Kd<^sP0$&<&t<5wcllBPovgsZ6Y!*dA`%JA7{)d0oa_XnJvrCKBf9jKE zoam1BgkS0gbOm#!?qstcu7^UpFT-B-Jw2+jK5dmw+l-NRAeq0Hwz_lV?63km050mn z;CqrkY+TkbP@a3X1E9yC5no#iQp7~-XxcZu3rL!x(K+7FxqADRp^|hCl*$)8d9V@Oo-2i~IOMSvb!B z*DJloS;Cv25?_B+$6+)B0GX`7#h|PA`51&2U18a)^GJm?8aqZ$fu&po;oE3aJHzR= zK9Zjf)Li3PURiYYD%)u`X3>{)$2rWLFFv9!R1;fxV$GZZ#OL8%Q~LZ0-a=dESahXZ z#g5}RFAk%(tWB6d_FZsus)-6THS{co2IyZ+X`PUqyj|EXM z3$v!1PI~5D|563aIX+%dJp%hQr^xFUV|R40b0SQ4JYda=%_#7h)s_F-AN*(^tF}0O zU8?-9216F9i3Tfay5wk-4`k)x7zcCI+g4kUm))jcp0;~3+J6EYAsPCbE&!=NzZOpZ zp&U&bnn?ok;;4d~sXzPT_ro=dU&myz3v<{CkJ1sJs8%s5ZS~SV#a}j2);5nz(*YFv zmDj8OW3eXh1n69a3&7-xXJ4Sp++`i%U_8fo!-6Nxed=dDo^atgzzDXhUAQe%zeTC7 zb^K3FD|FIWt=_n8+9UYd4EuV5rLMfcCzxx4{2ke5v=>=Rk*ejZY(36ntAeCq;a(+( zIt3XA1d2`ui_Y;6Rz6De~_j3nC`>0GOvIw5uj4RdFJk!nG!-q`9m9H(G zC1NK8Lh-fqY5L&h7A7{3Y<-tvB+Ni(+5w4Ts@9toqT?YEq~)X0uzv@uiEEN>`6u}8 z)`fNxtq$BdD&1qAY zj3@ma(|%3;$5CA6p=*hIzLl+NA>T%QgJv#CUXv8M$`4vlXy_?I(#N>>L4*Wdae8YN zSIl!*zK0nOiPqjM{h=I)WcD%`qkd{=&#P{Cq&7wSr4Tm3#0S!|>Wt!*1 zF`rr(FjDFK!*fq&a#?K{EeqD`A;JYyydHZM%=|Z|t`FxnEhX%~s%a|oNFvks$}8in z>?QlFS1x8`5v)1g#yQtDSUmXC5tBEHW5||w%hZAI7U#7AhcpR*Gu}#q_ zq4I5%bkC*_J2UqxII==<>incd?kvo%4gO9rv{9Ll6Cp%{I5oCh=CjqpAd&fP;j>uZ@y%+JX6SlJ$Y^) zi0v0CxiJ;}5HqmDS%&rYYSSH2e1c^~MWXa8GKSG%y&1!sk74lNS#mqYe%(@r}tG=CbMdxc!p{+Msp}xs+g`fq&D2gJL z8NAD(=&;_M2#1BvYA~^#QuC9NGk2y~AoK`q+~tEv*uW=dc!qbtX%l7EvE%cjX`{}7 zGiE69R49#onpU#tyEpet$ZH?m368PlLoP9HnanI9Vz8Y#F2Xs@@}R;3p^kL}2c67Y z0ejIPGgTIwyBPy^ELJ$n;&x5HZfUsh&4;`igX6T7*NI`3+0QoZS8zf($qx0G6TyI%W;h$=O_!580AJe;$)_s!NV6-cWRVK`i zHF+>7-2LMmGF;m*6aD>xT`&9JmKl78T#gL=W@^3=93z7ViuP0 zbkXS_c2e#1^j8@)Jldp&&a8P_&5ArHO*2fkT!x^qp^#%rxa_xp~} z*ImC4!?00ucnyijd}*ZpJ-mIYtNw?!uQe7|Zx}zcJM|(Q9KflbVEfnD@b!o##)jQe zeEEbaANlh;)9T_T5m4bpr~zex8q07GecG6K@>3s)u->46Ir|63cW zklw!5yDw}ZedoXp>e4}?p>o@8aWF9PlPcJKIn13jCcOzxf|<83#i71d&g30gptttA z$Z`_|6+Ggsl9H<3cu`Rvo~rT;d@rRJHjDCCE2l1x5?EN%m)7f}nVll`&|CE~2lZh? zZ7H_PeNkCe$fVq``GJ%{oxZ~vsH_fGitSOT1&wpgw#6Yjp=z{tQSmD!5>O1SlbfcTInRvKI~K;~o(<jM;@e#rwb0>E?P5lrQWb3A!8M8C4{;c$r=#Zzbqv;W+i25n z@}tpa!E#SPi~6#4oQJ-}W(3=~T*@E(^=Zx}Sb&>1#g=Y>?_45@Wg?6yS5`U6NKzZq z;{qj5#*nQ*2X=$&vfI~^yH96Y4APEb*Xn~KtRqQ2ppw^g=gHxxYPgeN({Gm5(oLeu=w z+(Grhke(!|4PtP3##^9g1m13w`%L^TZ{G7Gh!k@6guY2O(3~a6<~D9qQ{!LjbMM4+ z&KiI2Shz^cK1b=JZ7%SJ&oZKe7w5)q(+3KETkZiZjdhCgI!k)%VpI5TSDZtU`kjoU z(dUCYUCr;`!OnvA&FZ>oB^d0rkTc;Zq{QfP05i38M(rddoEZc)My)Xt6JDl9bMOX=;)OA&y(QpA4W_e1&cGE{V+xTkc0kf-sZNhRJhrXE*SlJF}L9T7Y9+j zViS0Kj2c$DT%$bI^aqok;&OcbKxX}paJL1xQ=pa(IE)GTZZrAtTQ*-^d@b25u#if; z8BUFVn+%~!Xcptd^0B2eo2IzJ>6v$Y^kDu+Xs6z%N25lZu(Kje!rx?W?D(hotMJ^7 z&F+J#Ue57=;eBaeb{HGpN_s7 z$>XZH-s!wWOKH=Tbz_d`IOg+j_S~{DDySv>@qOpv=Bum8W37#FT)(y!XU6aJ&do%t z&2!+llEW#TZQ6{PoTcHJJ-Kd%eY7uwjm7P9aPcMMSyeT;TcziK&$-76DxX3vQK(NI z05!x+sI?0doCP~CP;8ArNZuqX;J4(HM>s26b1v$fZ{E#h{ZYQ4viYfV_vw;n-d1Z< zLXk+J_O?zr|Nry|UNgI2Cls7aY1a!xa8 zXmHA~_LpqgVquzlwHx_d_-S0#2xz_s6jy=;2kn^JBY}~%fsrwzM0WXO>TQpnwLgt| zPUna>x*Mf$zm?ORG>E_25A18PNymcH79`sNpW)dX|iIn|GKC#N|gs z6&p|v``ou{tMANM>dTvi=%Y-ov2|ByoG6QJE1yD5zJsY+wuhl`Y>MX~v7(Z;+@9Zr ztBcZ%sXR-m$qNR`_sNFRAhh^;zwTq35H1L}GXS051)5qb>Gye5j=V!%f_Fx0BOst{ zj~RAWQ!%DpIyNkuciDA{lqu=K)E%|43wMdhvj=)!)A`dBLRn9a!sJ%8hju7^OeCKV znK3xa^k(2`=MoxKdkVX^n(V>H>DkzMg~@`zep9D2l&SsVcp%&9O}4p23{{PHKdY0s z_PQA5EbDnTVb2T0a`@m}_LYvs;C%5aZ^ZcivCVUadFUc7_OXt+cgYo%3#<-JlR}+0 zxo@pG$J4sFVBdFDfF!oUrY zdsw*IuttLXWk_KjSW|r+FN>W|STQ>zWEg^o=egzF$lTl+7y;FthkLGXCG`%(Nn=d5 z1|C3#NSMUFA$kAV6eoeN9V|I9$na4sQfX|ImDSKU{Y^F|5$esYwZ;wFKo-M#bXU$E z)$ZVgw0YZusRt(J_8RT!kMNdBJrH$GnyJqV4BzJ{jF0#u2M&Ui6cC?=yx)@exdRqz z<&)z(A^r2mXu)i?jd_>34Ya>@6Cd;|9K)Zo3_Lg0Y8;5uz4s8d9RkAQb^P-WcOqCF zek^U7ZRL-rJDC+i%U6GQy%ybAA{Sn0O?L5BqYYbE{B)7_mjfn>ZOcbJu#aTTJ0ZVc z?}~EsYXOW{5dXPT}M7US)DuJI>ycZd!}lEng216vFhrU)Y%uj zrw{noC~>p{nP?XsM}I3jGWv;(X$C(g2Qa@*v1;Jh{3V_95Y)1ISPmo}_tk0Qd2?*J6c=LJ+U_Nu>&JL#X!cm&F)hZfk46B(Q>WT2$@ zbYSmxsNR#{!x!61FdF$@cUxF2CF$-%!6{C_q^yh=WL2g?)o<3fYdX;M42uerAY9M6 z+FYLqtK@ai3tFv6KFqAyUwbi2Kx^kV>yE)mxPwx;@3o!b{PF9m_5r@SJf*HcE? zGZb20wB)MuWALp!ReC#gv8Z6#wkHmh8;^Y;O4AzjyQg0jAM-C8Oj%{#T3-yl8FH{) zjA0u3wSQ>DrT%mp3PrZs^p`v_em9x$++sjs3ur{9igUuC5V@4N@U1J{ED-Q;PxI zSMS2|+w&Xr*QN3kE4+k)BPM%c*Y3W$OUB5FeQvj}ml+Ruo%Ry)C~uS}6mA>tk+4@% z8i7!sf3gS`XV`=b_eTgJL5~TR@ zaD+o<3y}ml+g+FHbwznBNnD{&AoQBD<_~u6h05Rn!=8`5EVup3C5~Rv6FbC{fobX& zYU>xVxf%cu3{Q0Amt%bSUKP}H%3YYSRW*IZ-~Sc}JUOy_*Iz48H?fP|xGYeQ zS{tZ#YA)L3pX7}q`)3Bn?98yq|FW&GZ=Mc|H7f~`1+60T0$;v{x{*oH$(!qk{g3#) z-or;SQb41B?Ughkyx!jsWuW93Q>>-)1uQTv_NCVtYS)6FHKj)B5l9ua#67nZ<;wCb zO~Wz6Wn|$GXtWz_*yJB{rNl&aLY&B_us8|nD;j3l8^7Al;RF6i@`i6Q7VP)}{T2H{ zx1FQ%=I70$AL`el_n%cL;4xufzWXq{Pu2-}X*SCf%FFK`6-cCZ5zgUv4GZ0d<%+^` z_3O&=B%z|?usrthdgOvkB#PyH6Ry&GMFeA()TRs3vkD- zerx9BMfu%pI^01!J9DLYz{}DxDs>f341J`{O1GJ#vc|mSaO3#>i&p***dBrX>K|{; zWq9MK-_e=K0YjN%SGN)-X)6yxQh)MPM+v}_{!Hn{D^|?i%=M!pkI=gTk}0|_#=U!E z6&SvLUFcihi|M?1_zajB-NF3|+2_NGNl5k1pVAxf-VR*e6o}pGyRvY>Dr*uM2Tjdi zVf*c=46+;)%$*>4t zaOG>9^nGscob@xjmsU)HW!n$?tq})WC5T<~Ek@sUMSVM(f3jM;hcU#B@5}wb&0;lq zi$y+~h_!O>x#M<_Mh7HayvP6EzrcDOImfFr1jd&u2vKfYS~=XIO3*F*Rk zgw{qoH)8gY`SBo-ddkFkUQFd=p~{KmBQ-$BGD^oe4BIkkZ@FaH2Ml#byD*aKw5IP+ zTCoAHrn9dkqIFkDhf{2r!mG`#z8K1hMS%$4rycX7V~+{8dJ|w$4qTz0WGh~YKOHuo zE}2qPeA9i8G*qmROJ{pw1&Vc?RhdeHuwY=FbmgCyk=8P`8wHw&^n5r87AK+N`k6&G z>ftR<|N5do-@Shv@4}eC_rNP+(O>D;HU&QU_Fw5N@yA@SvbuVmY4T0fQ`z%Z*4U`4 z*=r=N*?^f02>ue^QNx_s{s73XzzL_FvMV?uPpn=x2PxAC)LlBrt#wc`m|pbf+}SMn z&f}?o&a&P;BgQF>hCNT4`eV$n#~ocPC-*4Ec>bT7zB(Z4r+N73qeJNi5x#_=q;v>^ zN-23L2dD^0N$1g^QUa1n2?$bmv~-DdOSg149LE9g;`4j?ll$!M?CjLePMAtB$+N3t zJyv%4!D&`S4W`zCmeqta&h@1X_9y@26Y8T;`;+i66xU0>sdt{tdVae9fsvC-9L%zy zkKjim$>>2(=x876S;n2+@v@sPNmVi=jsI4S7A{g`?~-$nQUPryka43`AzHdPYP`#! zqh0UwnfJDFa=uff+7^kV08BSV< z=v}DVl`GOzbSfEay8^s?t^9L~vRMKTxW^4n^=i2tN30xbjDNmpXjIuR!m8Wp`BUfA zZ3y^!J2{(i=n<#CbQ!caC0+?2=FmF#97&At_J4fPe7c<9h>^SrKc=ss@uyDhRHF0h zWoJo->VKPO=;g-8d#{aYI{EWec%D$(-UpGK3%@2o8y*NZ)3rm4oQN)goYbIYl|hIL zeA}wQ=5rs8=e!)W%Pw3Wh5^~em*3oQMZSsLSC`j{U1|++(8L#5aE|y$6NnSMDMr~) z$;zGKKSDGYMiV&l5WsN5pSrwLsWN|br)8ufm>+zaz=L{%wg zKUzv(OB*Pc|H>o-g2SMc#PVuEy(A|f1im06qjv7OX?ij*;+fnl|I;&pIUL%|VGF18 z`k>MQK-D*3_wkFf4>kjunX+df8~7<<@jwO}SczoM3;4WgU%N%wP@Hg2K|OXU@8I6v zrG^TsV&QzED^Jm=-0_z{y_+vT8aH2L@(W_RBYX~R7+QY79eEEweH5r^A~bkIZU9{I zrk(Ev#pOECy!xKBL8t596>RFUazUU^JljIG~ z#T;BScXeWpE`O8N^b!ZDk{fBi1`wAHDZTX?1r4S%RqCoFQ>rZ_ho~MN($}*AXY*zZ4OeWy(&_PetVdEJYkk-5C=J>e8iMb$QvNmK-4grzB8_*)YcH$iwNxKAp?8_zVwp|NyAazL}9xuGF0Ej5Wd%9X~G_dqr0;E_3|7nPPxQQT>=?JT zkc~a$qCPh5U$&(fuRQkLI80QvkEJ2q0`Wsvb#Yf^<6rY_>be}J8@hr@3|mkC+L-|L zb#7m!R~xn`;1XtByLf?7xv8>m=yuu`veT`lj(@ zI=`*lrTv!Soa4-IruNOSi_Jr#!I?FUALU|*OxHCwX0Yj*UhHunzX#N8hMmvw?3wI7c6DSQ*60i`5VOg$8pu_{!FlKop^F zJC0aKN8$HEnPbxI{s8f2D?zduAGVvN$y-gcC4GTUe*V$yY<<6~LKA8v5mGu8k2j?m z{Z-k#dJp;2od>-b68bf!gnUJXu_O~3vpeHn#-;I`^hTG?<>`h&p6`Q00sRUU)69Pz zAcf9G7DXE}eryEu>~SS86f#qFHqp7CP~0EalTR5^n?VnQ_@4M&xy zpfj|!=|xUyqS*s_f8qwZd_idUn;+W=4I1=5AzLEsPVB0T0K}8~3LAD64=&Pka$6}| zD3mk950cGZP@>^T+oCSgwLtK^ycxgy`1}UAnJBP$Ogz&0M%;8DvXVSsfXWF%`J_#j zLI&jpf$ z$ydvAzd!Ct`R!g@*3f zUJi#;b^x=`6ZGzGLF&l$@?UWkV^Urp+%vWxNgW6|zW*p}(SZoXI&ke1eS3ENv-fb? zn$68O`Ref6-8qt}=n(0adtCHnwwFqf)n~Ux=~Ey{6Hu-*uVA_k;$>e~%9<;Ai6%by zxc{iO2}J6!u53r8zA!Iv<}^x5=W%Y%Q1w(b@=7~LtxV*GdAg2Co;5ZObwT%lbO0@- zLvU4H0Z|O^w+0X-Kn+B2jDTZ@4)FtH^Sg6Ma-;=Gl}hmCv=&}1>Y42F^0<6Y;UV&X zs<6zQ|JU`qvX-gmUUMuhl$RdH;OEQc^Ax4c3|IzqHq#KCQ~K+ zkc@uN6Em!%i2I6IaK=dV#M0pw;g09M$hz1pN_;q{bZ^-7>49<_c>I4TDIn6|JenZ8lKCAi0r? z8l)br=+Ew05o4y8UekT^qjupjatDlf_FQCn8=|-x;gl^Q%J&n_kTxpQyH`9i%u(4H3xOw+&nYmEvJFYR7%BqlNfhmK&~l#zvX(tF3eOL9 z+BLG3DGm8)12&=l^2|wM3y~rVcOu|UxrHX_HJ9J){`f*NsjlbIhEw@r zC?2UPq|f%I8}>t+KiUR0pFYSviT=DNKm#=C7C5|jWFD021)N5H3J=dc#giJ55Jf#O zq7Ph;pp{n4vup}1YdW0#6@8E!2vub?ZR`=36L`ihBv zm;VI$WS;WQJBe?yC5v~%b6bQh^h_YPu^;`FPBS%#)R~;%0l;9>!LBHaq-W{X*bLDV zF5y8+I#vP8h;|)`03+AE3(`eWXu90pI&iGQsiW_&rYJl9$^$>1W$8?GsNvTrK_z*MY zrPtG)L`w=$4#OZB&sgf;lEeEeODqG&0?gqp`a+~6iR-U#c2BdS`oISGi;WgokLwzwmgih2z`*j zp44(ZFtzX|F+|Mr=x|;4by@zoK0!jIyjNQ0jeKZRgHe81ikF8OWt+L_an6W<4ylSr zdd_!Q&8eU;b}Mp1$i`jFSvkgd^PGU7w$wJBnk387CZDYrP)+5als zTax#2CR${6nJec#`{XkQ^7);z(zPYu8s`wQn*cIn^e?fy@>4T-G zJOH?iZ*jRs{&m#05`Je)qs`O6*}X7aQkD_K|c;RfzoKRd&> zzH5xL$Ce$=P1Mzs3$;UnNbzUN3JZkTjSA%fo_ zlml;@9mme|h*7#~&Im$CUxeHDs#qnC{sIuNatw6o`;Odi!>6_7)3$2w;3&?$UNDqCLNImsD)IF@wpA_u@)h>g;k%K> za_g<uqlkb9u3|sf5U3OC#SSMh_)-}HKWb|cG&(kH#fRHT_pFc@&%`j^7LJuMwyuAK z!st0)%Jc}dOoQj`_DHwylKr;inYaR5{HXHH*S>Z4_r4{X^3nKjpLj7&e1|qnx&V{e zI`gj@P#i_F__Z&!^$Ri1^X4E2H-j9)DmIx+=Y9V_eD6h!o0V_2p*@x_|I+6DUV-NM zS)s6l9Nz<|L6BE}fH&xcM9Ynn!L24Ue02XfPv>=mr&(hCL{$0lewK&?NlFPHC>EkG zxnO9bb>Q}}K;|ioEl|!G@m#lTRY8dX;?lH9%idiVaWn9Rgt}>@B++JoExDNG4iy&M z(EO_wbpHIxIF|rAZ?i=>K^)6*pB3>*h_?O>@0}lKhj1o???DIdyywQQq7bUwnW1=e z=wG-*@6O9icY7Ft(=ci^1?=>IpRz%I=s=zJfU1Z5f$?@49)UT8>c?Q8i~BkkIS`V$ zix2TcpRVUrt0u(-*`6eZ1?3YrPJRk~kxsVD%YM3Rm`e;!JMW<9aS%a^?3HySzCTp( z`F=?rqDqA=ik*C?;vkL_BC6mP7qnzQYpAIY&gVWbdcA+<7{t+Ql*GTs@ME1bILIKl z`rs9@I9)*HyDH8mSoKu;Xta*5Bkb~#o;rFYbXXQ$!;Ae!+)CL8vpqC0>1z|+-K!Hn zID4hw4Z5k~o0W^!#~Dt#KR!Ht7#$2hnyHw+&yG-7KactmJYOK>eMSr{k6BLgFq8`N4f+8^_<2ar=qGvR(9XMB-Y%4)#X4L~}A24>HjN z08=bj1J_NUuyQPAJJQ}%m8{15^05P(LT{fabDt0JYx!%S4S~(7L_^at4(75SFe7?5LiGI2>@Bn z_`FN8bai;$(Yl}2T>q$%`@zlz8H$o+fCt)`k|1weOxpOqZ7-b_;%@6 zBfe)}QSEUTY_Z7qSNp#7|XH@AQOJ%0Gs=@S7QKBYw9x%YO` zZT7$lEjIY2u4A%y(_|#izRyoJ8)ux?dL%vb4B}WexT_@@r^G=@1afHXqS?vi^Ic3b zxbF~}U6>TvGO}S)PQYWK^wtrRvTIDR8F$mq@VgckEAT{ z7NlMhGh>b6+6#ZIzpsqy{^;-!Gi2wBwVG$cTlvZ0&+6(GQmPMj@)X9iZ{;zE$8|x4 zeJ--vD4nfuE*#1{(Idj46X*A7hPmI?kR1Wn=Pk6bM7wH1-Hc1NMNUcSz#N{xlG{wJ zhpcCJ)&f4wCf7-s=!*QTHtP#;1>Fk+I$Dd%xlU8DIgW!NnG}aH^Q_$MEJ-lSj=A(a%`?s*=@Z5B*SukrZ_4~rXU`zj3epL60Ls&IjXTTG3BBc@g z#%+=SAloUH2+q&WA?X>D8qHo+sqpnmnS(4S<;kU#cc&wrmnNy9r_iEU`gv6mKUrS{ z!X38rI-P2(baTplhyJr8n}5YQik1dooH-v8xCzWIo8Xz>CS0j_D!0gjXkU=j_n9z% zk(ncIF-zXt{0atk|NntGyx(f;=iaelC-*ANPerDdR8Kyg)WsyIuQp0O5e_z|3^sR+ zOU^VL+Bn-0EAYBP{56lCHI~Lo^WxWT*~;`>1jhE^p^%TsFk5Vt2)mlx3u=1<@5kBB zW@tJQxunmv2YKls)Hz(MMJadTlyGQj+MFTu0ors99FGn5D(+B3Zo_Dr`$O@eY4Ud)% zAHhabul}O2@4Q?yrJuwLrN&lCq#1e%A@V3reR6qXwF-b_!8fZu-q$8_1O$~YIK38QC>DNcK!&xx;{dEwDAbHt2ubZVnEq$B3N#YRBF3R5hwB6BO{47$nMwuBQ7l+b( z5XQJ(y^vN7N2Q2COp4-f6S83gCF+XC!_i;}NGgG$uHev)c4?E|$wft#!HIBib3rJ58d=KRB)gnpEW(y%kn3gC z;v^9PQjB|r1{F5)?|H_^_Vuj~)2Mv2emFh$|ns z3}wRk#p93kUc>uOX}9v{=u~zym1oGBt{=7?BS3;iQ76LSMTz$hrU;bFq4Y67k3L@c; z*lY$;As_-NjIkXgSso{Ey^+kREow||bKNKnZyrY;ek^&0cRCe^{N=%BTeN&wV^83@ zv)2D+eRl7BGMspLb8i#NGKi#Qq&KmXA*&Bdap8s>>QFZ@M}cI~4atfAzGL>j7yre2 zRcpn+i@jQvW$|`pOZvfLqG{Uc%4RduRZwlOI(%oQu|I}iB+y0ZA2ZvYZHoZNX#5~6 znOr`k*HfjKOBj(p&)RTg`;@wJ_&b)A(^$Bmq0uW_x2pUvV%f0C@< z1#;Ccp)(#O=*jx+#q&?gel`lPlCb+ZbKBExdmB@RY|`h{mZ#@sMU@b_?O`aBxvOaVL|ey9{MAD*5O7ee=kYR&#u+AX-Bm0c4PV5 zqcZFLxSxo;A+69>9yGTCxBX6ln05CF;dSw@TQlT}dv zkm|Z8Odnd**@_e^rjJmMsip6RVUPcscj+d*NiH4*L6JR^Hba1K$?ZO4Dz3 zPO%m9&r8!bEEhi$>%5z?JhLb9p@EMTkxIJk%yQ}Wbp^y|c$TX$Lh(1pjj7_leSFVafV3or1*^U9=iqdo$d7OC!pFErtV#>;t## zczBI@hSD5U1O1xQf_*RuTu^CCoSD<5N?cH@l>>_rXve8Z8 z+!N^NzRH(VAVjfF#Drr88q$9^Q0sBn5~XBmLI-FqBWmXZtAlziOm{I8NAGeL z=*zYu{qCVV(gIdek+_}=zn-G6SG>6U)SM=H@Z`uI(SSY)VLmXSzr6T+_u`&I4)A*x z{*qB!|Ag&CxIGzW{>$q-$6ox5+A-qqyj1+L!SgSVBh(bWd~Y7J6#8QSy2IJ}G=7g1 z<^*)0!e$#djg9+iD`W)r<#OQZX-{4dO^Dek^kcVs;}>vpsAKDZ9k-DzQ4d?<)i~IH zrC{>kfw2S8y;?1>8z^7Q3=r+4mOZ`#h zx`!}=DpARj!S2l=yL@5EHWzuTye=bMdMAR;cIRXzpvRS;BI+5MB}<=$qA|T9Mi4? zcqlzS%A}R>(d(-NbakofYDrL?{YyNMm_)1I{g0-udnhF9s40v#)K`1el zLTo_<_ye&+x;c|3DKY1WhL3&^+Z@W}>cuD1lneV!qMPrF1n$e1IPwB5k^v$ z4ofh6&PpKh2fJ_O?FAAx@3clP0n55Kx0>G7y9_WfOV%DSAE!}PIxthM-FC+i=#h)V zU$Y)>2YUHE-~VZYY<0u@2qcvk-l7@E=ji<_{!6cQy}8D5D0Aim;zT7FN_^zUyb>B&E9=_F%qlZf(&=-B?x~e&%U(?ARFfJBOd#)cVBJc zhK@vYxJtr3%=*zNmgQve@xA)Ox*9C=Z3mo1s=m8k)J+mc zy@wnLwZ*@M1Rl7xhHz||Zc}>vf8wfW9*2@JUxr+12L()r_m(ADz!Kz33a@4DxL3TH^5j;wX9U zHw_IDj(X|hfNqdX*6f(V;hnkCT%BZ7aC08gkGl}u^7-0dCF8xhEln%`Pf4hQuq$t1 zjX#&fSQ{){)eRCvt;pY%D7@GnXTItWdRXK@fGYQ-HT$7mdbMfE=ktyw>&Aj%7RHp| z*rQzTqUAZt9x!DtIMu~Et}WerFGQ+hPpT84=`U|GQBFLM>pz#a@o{x1`t)Isw8q>X zBH<%EajHWd;9+lcb#<~M)cIixhUaL5ALi9vbtf7ki@OJ*2wlU0Q19$43baA3)cjFX z_LvVF#6R<2;91?pO?+r}rs)v-FyltDt}hZ{q_0jHbUWDvD?{ z)($-ioB69`^+AljFjtw%4RIiaZ~ozvob=?=+UB=6zZmkuCxZ^FCs%!EEXP4+6{(lS z8_snszr)=@6SgdSJA*P$xkizz0>(*pNiPW*(LxvK3ccsv*#p{F_`&@9hfHm2n5FKe zL={kiH1-lz7Z(w0J3E%ecc*)Ak`wu+k?~BDSgxdi;NHZ2+oTg0hgOUldL&OuLj9d* zV$`8|$wa3Q-+R8ifU)!`58Oz@A!bvvH zI4;bSBQrd!BzC(+FyEmdtm|YWEe*b`JqX7<9*Es(=uEm833RDD+P73dTE5A0s72}I zi#MukwML+94U#@K@&DZX)&KTpnIM*?FOC2{;XTw=L@6338TPiyW(>qZvrS1ty3^#b zwH41EhUTAt88PIgyn&+={W><)rRD+&W+l+$A(x!3=oWx1DLK?})H!ee6ZB7gY$`umgQW?;@E_FTwIjYZ6 z7wHP_@80@qNx-5T9(r5;G^m_Ra^hbSCt3HD% zG52*l(7Eq7to-Tl5Ow({evn4vj$X)xbh(C8oK6mNoz~r2vUs?>73K7-Q1sNVr@rwn z4Kz>C1bDgQCZd>ma@)NFK;2l1)jLR=F(fo2b(IFJ1AospmC!GMOE9hJYn1+_9t1GLN3dxeN0GAl4P zZ!HX_`?rsRE#GBL2`1$c`gO{)y3sAilCrKlp`K>xQ+}*1!VF2EIS$@_!Jo$7{N__3 z8h?EoSsCVJ-;NaJ@TDxJI(icQn&!=$+|Bgq8h%Ezr26m?Lt4^idG~YaV7Q1P(ag6M zL(j(@GJfV!V2TYfJwH!w>kp7Zr?E_?9y6EGV7g8VhEWCOc(iAv#pepCOP7NjaCxdvkhXqJxOBl0Vp7yJh{` z4DUb2x$U_Y@2kJ9s%z!xGuE+s`><)W2ziYlw(o467HtR{3CMq+yHQvx4-eCi>p+a-5&2)%80V%-h0uQCK zo#DHmeqnhe7&`(Hx{pZ^YkV|WTkWplCs`OSa@rk{t)9KO++T0q{Bf*cVnV%*JLiF% z=##u5a?{*OX#KcJQP57e2xZ}#&k3-w^VY7%UruyNMBhI0E5o3d%p30BqFMjh#v9e@ z#G2n+9ERZJyY?7;lhSS2LT>!o(0yU3T3?zpf{Ae)-W2^Xp?Xh2tNT<3DNl~Ceq z^3kg*?Ar9TvU9mw;aHz~ZQy$4R79l+tt_5!jd%l(1lVT{Skw1nPJ#1iw+=@yF#Wa^ zS5R7B%1rP#%2`}i@xZLciq=vNJOG=A`A$s^2?$dukv%A@FR zNK3$&242%=z8LxG(y4hu;`zP!aoBOi2P)ea^>3*WI3^=b=gIJ8_PTV6l~T$#Wi`-v zxaG?ScgmGgy2sc?%)~4ZF^0jkcqP48DK&K)+l5 zwuDp?)f2R8_E$ni0x-_zCCBY+dS27P6=r&tzp;6nbG*i8EXGeO*?0Msy;&8^ApkUL z3x+=sWL?$%?`m7zCe=m+25NG}2*v%;M(c+rs4=^Zs*^?mKQ@J-yOrAy^T zUM|q1kZSh6^(0_Tm-ohIgSzBuE-QZht*EW`nJ%rASCR{3x5pM9vUym)$<_pC>BjE! zqFV)5Wq*vdGGSBajE8~fWOTd;dB{1wCsELXOmpOV^|@F@JjoCK!20quT4|hqq2N(T zKD04bXChwT9xD+1A`Uu=UwyP)dU*rnvI9Y?r&G;}J50kNP0%l$}5bqLcMHU`(6CJP-7k#=qG8ca|tV8}CH?WUb zv~6c5kM2gq%%7|JU;JxbCsbF)6{0Bw?yYW&fgmdVb)g-Muq2B!{*nj6$iAllK#8k~|*YJGjmxN2%aYgxqqGu9EQ zf&ZLL72`X*Si(T_9O5T$jR}dK*q4RCxwZs<6{o{?9y;`n}sGkPuCO?>cnj>_$yWOd;7fQ`+<`VKFmfYeoT!7)1E#FruG>58^Fzuj7h z=^~sd;93w}_h8%&xM}Oe7AV530vR`ZS-W^AFt;}a-tBR6yr#N1N6Qewhlh$k(>Kk> z79VVUUI>9N4jioEi86;!2~P3JyM{%gOTqYqMjE>_R z)2~W6oEXb`Ga^K0rjSbB^bzRT`5oT$dg>DeEG&3>l0>BcZ+43yyQ7ANoQ?Q10-_k%xdnS1- zX50lB!>l!zmcOP66rueFanWLM^c(Hsl!I|f%Ge4`w4TG0PYKF@7~&WWV%a^RC!0Fe zL&x{kCYalzjnO%i;g!J)Xk;(JreM$k&f4cLqG@(zXr*y=#q%&v_z8z%ffutsZO**a zGf-VP=ejOH?^R5{2&_Wg%{*<8%g5;AzhKi)6`ByRlW*;`2`Y}jQ);V>^ZqHOY8jBY zp-)@M995uQ5htbYENU?dF9X_;w@FFY7>W2!#;uT#eaSO!iF{X-@SLT|fWdKVjJUo0DkOysY zgN|4faBR)u91<0`GB+vf)BE0B39LW->z2kr=Kp(}tl=X*;MV%vE50~`H2ydbiJhAf z5-g4M_~wv5_Pg_MRUwteK5y^tlD-FBOQc{)q;AxmPvoL_p?DciIYCIS=3&eI!sWIt z=EDa6pTtf6UBDXJyrt3id~lX37{{?ZEAZbp+r>On>{y$V-j1L29A3C08Bf8n^7Rl5 z(@d}jia;I}2%`CEWbH=#V7JfekPchq{`R2Gay+SX^X!OA3ja1Wn5zKxzqxvgU(23b z9(^0!g*t-z>pI~g^n!0c_-v4zMu9zvR}jHffd=g@Up;cQHj5_OP1U6n)O(&x;7Cue^LlV&mIP70c2E=3dKzP7n57; z=8#I-0tw~8hYOo{`_cQ`8T7--WpPtM*<~hb7#C=G8vT6V^h+>@?Y$#XoPmey@I?1m zUButRI6gL8;%12gdm$KGz%1BN@&SaUyT!CQbu;mP_ zyI?{x%YoULRPiysnc_|5$FJNzW0DPDIhH@fE$1k|FaP5eO90pldh8y#K0LoPVD^ST zj;==gnI$$gp`*EmEcaoG9Ij4K&?L(#kH@6oY{m1?y~nf?BA_HLhjb>>mc{D)3(OsF zu8Wsaa!>Ucnqu72R*ZN6rM)ZSA+0m*X<$r!h+^{p4N7QR0h7|0;Zr~A2ML~(V%`ZX zb!EKgtES3+RrpJxxL2->`3UR3e41^if=rxAoCL3=kJ_MW5denY)W7oZ?Mc* zRXKxW0u(4-wI(}awq*@tNOLe{peky4j$G7wcT?JKtnsZgBaBTSOvnkr^HLIL;301H z&&!?@+6_7Rof|JdKn?H|{k9C+oXG#i-voeYs!RSGC)iw3fFyGaBXXV(O|9I#gq7Y; z6b4{z0g^n5vJnAnh-WjRzUCT7Wm{`s+(sM?Ap*2tpTiWv4HkF=7GS?==_P){YvvaI zRI$n1cP-pa0pJvnKM1q9wBqm|Xypp}Z} z$d3V6XzQJdM=hvqzs^-pIjCs;j@?Bosn_)==H*>sPQ&j_vKO3l!9uyDUI5N%7DhEFX zT{HEErs>u)l;g+vkEA0$y^+LOcyvMqWM$RlCQd%6#!e{2Dy*#{KEA+iW2P{1<3b~k z;)_*cuvCs;=Nx_MY4CYGC5m2pTVjjKmMKt}s0i{0lz7_Xgm%j>)k)k+C)9Mnw@5ot zAtkYkJZ&BGnpzCeswEf2_m02S>2YFWcL;F>LJS|6m-hk~#4`4q-rmR@wr?eFzMZ-8 zSXD$34-^HK4CXvTR;Uh@S|@RSe@zqh>C)4am$8Dy!;Z^kviNGu62#CPc3pKtY7g7wpL{G(KdIj{=2J3f`}MI~Obg(fvcslcP9Fmb-Q2BU47{t2Ki?@5MGEQN6|buC3^ zlCi~+8U}QK4cWIx!x~a`JIcNs+q#d#%4KjLC_@N!l&!*j*2~qUv&W^AeN*#qW|5MZ zkI~2X@6a4Q zt&ExinIAJN$4^xDmK%tvKvkBen4x!<0Xwc@4>whgKQ@nl8+i1iCO~}sOEm3^0>@3n zJq&|an<~^z1`u@mXc?4OCW_mH69Vd&`+p2RGeepbSo*s?X$foeBW@4PqH-zT6!0-)2NJDG;Zuo{*$q z5Pj5JIv(|tq^beh=>EK^g7SaEPew&M1sV{mm-qvq3O49uzPMaJKv!Ka1pTRbm(Qfa zr1+*nFhyypApTB);^s5|WAj9JzwOhP8QYEu%DFQsFZ#vs;j57dLkK@;AZMk>@cHUa z7w5nLoV}ovWY*zqc;{K~JDnyvSk@_VdS&W+kE`FvQ+7V~LoMz3au5w+e~f%X9Vm>q zgCh-UISpihGMYIYWL6PWg)DDtW<)}d-ZYPWxxPNmF*;HX5CIVBfF!R1JtHFTx)-o; zj&EaF15?BDuV?TA+%rG}Y_3RM0)wncR?!##q5mtar=cp-TWZG~)b;tOc6m7DmD z#`Qvc?w-%Ac|~6dLqNSPy!Q{JI)zAE(!4bNVbJET8`9gO+s99o+s5R>%plkOVf{Ox zE&ed#=B|?OQ?3qjfx`Ycfc+jkq9XjG1~rHhpDAWm*(ZQQK#w-O_g^{4@B*)pSY zO9(1BBf0z>3H0mNL#7 zXcpZBxqqMLXZ-(88?pKB8#kl^P%&9jz*&=)GRmoN-EC^@B}xtgvS)EV0lQ{g{U6jj z!(Kh7xEUaM6C!_OC0K5djo9bajsd{z)RS1*RCfL=+bPEanue)|Q77VbR{aTL^F@te--w5OZcA6#~qW3bRm)idXv=kXPkv5$r zr8SS%3u%Uc4KUk7-I9AxQ!dP1CdS?I{aFDrKGSJM_bC66%jLD-*V_Y5{QiI+uC9O3 zN}@mEE7u@t&3FuGBzMBqH)WIO2-nOEK=S}-yS#s`G;5+Q)nw&$ndRRvLGX{ zss9>pXVOEqz^N)C88Rqir~~7j&r`kw=PAwDUmOlofHW@wu<}j*RyNO^OdhbG(Xr2| z>JYF4o99O6j>2gFPtWhpd4{YmgvAtN1$hhFM08cmR+Q_Lx7DXx^eohz>9Kaat#j zqLzJ?Zt`{+$5t`_UBZIY7wZ6u7yW6`-!E}-6&1iqbKJ{(v}MU> znZR&ixZ8ImjYTJJ+<`EFuJRBUf1QZtJWF>NS#4?8ow8bp8K|cX834tJON7g{hnSb$ z^5R5lACD9K6I=diz*A>HYjN6YIAY<_zQVduZZr~q0b}NdpoGC}WDlfG<@tim1Mgah z_xeQ~G#?F9xq##_NEEQk{R2pw$MaJU?9pdInQ(fW#1L4xvg(+H+wC=6Ak#sadI|m> z1{3{{GeY>U-JgrwmlzS4a4w}3PcpoDlNAz9^iLW@r2lvoG%pk+e#VGn(VfY zVaNEZA^zil+5KTav*bH%X)U;P>8o2_8ciFFm}sCebIJsUqxn{7S;)YD0K*yjA&*M4 zbu>>DLIwoy#ud^OKm>^4SI^LA8>bb#xDe3W01qO~!z%1hX*5gLzGH9Iyrp8YVv{!7 zJ9qA|FxDpWSbz(n2=oEy&pGhDI{Ib)z(=7F8PE&_!VfIoD?CuPc6;S{dng3_X{c&F JC{{KL{C{3veEk3b diff --git a/datahub-web-react/public/assets/logo.png b/datahub-web-react/public/assets/logo.png new file mode 120000 index 0000000000000..c570fd37bed97 --- /dev/null +++ b/datahub-web-react/public/assets/logo.png @@ -0,0 +1 @@ +logos/datahub-logo.png \ No newline at end of file diff --git a/datahub-web-react/public/assets/logos/datahub-logo.png b/datahub-web-react/public/assets/logos/datahub-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..5e34e6425d23fa1a19ca3c89dae7acdfb2902e86 GIT binary patch literal 53563 zcmYhiWmJ^g`!Iad9R?vapeRTibc2YX0xCnNA~lqB4oIp8RPLAqb+=cyRA21d%p_f0ro9!C!)B;4I)j zR4xw;-5|&yk?@ab)KPK>g18}#d&;`r>1&fcuIx*xifa^^-@In6rY?9jurE_yg}u(p zE8=Bl>O9ZO`%7dXL#(0BeFPbNl6-aJi>km`bKRu?QtRmEbWYD1-N&~RVvY`tC+#0_ zQxRkD2OfIBYpjf+W6KcI0mX^KQ&NBege{M=VS%zIKTZGCSEa<12O=-1{D?nw3vilk z=LsEhJ3q}Dt!o%sYZ?m7!eK#XMJjWl>XRRN?(!d-rdLQc775`#V%B7v zBpnlTx7z#-B#!P$kwd!L5INKzRJ5}CQev)9!O{EB>lK~4A^$;sjm7V^TTu30!UG=x z*3O}f^!DG5jXp1X1+BsY(NAx(l0XtyArk28lf9RC?)QUTsKttMYN439wXE}wg7#>> ztoK^0XBVNM4FVp2c#m%**;Kp-L!g2vL<-&Hxg~RKswg@1o3a^#>=^^DDE4{!`m0jv zwX<2b9fUoH#9Rn(zsqxsu~Z(CeaR-IT&uM2GOc z^UPuI%Tou}>FfXq%T8dn+9%PuLY`2V{>A}+kbOJb6lY+g$BwOQp!*+eEff?*1s)c- z7;vSDo@c^fb%0FAQISG@N#&Mf0@Tk#c$@m3`LE9AlcgFiLufcpoWmw3MTr)YQ35>E zDZ_S4+4y2r8O6T%G0h~VhC{waOi_=?g|RP@+`{vIG1JhdPHzN9@1eTP@+NI zcUJozvgcp2P@t79!P`P&N(2Ps0fHI}nC_>2`H;OW;z2J|sIzi1y4NvAO;@xfrm4!=Ag<9siNY&WHDnRoD{_-mW zLUfUwx}G9rI%t;zT)VSi+eRuoIaEjxnI=JG3_NlLdnVM`W8cFMGD1ctN<~90q0*Fp zpa+t-R~cKQy87~<0#1bOrZi(XQ8O_C?TY~ISB^_hpaqViYH~|f$m$<^zKTOM#Qcbk z2Ew002)rBtB2KSRXe`;6qs}hM!Lh|dYmp8?jg!1YtfYW3&EafR&B0I?o2H@a#qfa4 zFw6Wr6cuFjS6-VGQc@otcdtniudt!}X~Mu$Ps{Tt_Y}uH7PINT9SH@&bpg*EzJ8}J zy#hB_H&6}iZLyOOU31%Qg|IaLgUtY77cjMNF0pBqF?Y%f$3yO^%nzU-1*{W|nNNj^ z-uG~zUrR>3L|7RnAYk@)NOnHA4@h02mvkhCs!yq^k}DsKsa*`xB3Qr&?qf#|8nYtH zAcPb(kN>S2$+1HG1E`b;Jf=kTY5JF9-um-k7KH5(+lp*sff!`eLkpPYT3>^ejObz; z``qZ}ay@j5KXmI^1*|FhV2;96N*36_u^asv~q8z$db&>GLl>wLG28X^}>Ap%D-vDMpm>#%|C z90E(tY>P2j71W9}JRDlcJ2H;m;t=f{YJk!XhnY98V^Be9k>wdqLw3HqLSStb=d)LN z@Bmu*r#gLwdYKPrW5g@$=$xa^oovrA+sglbF32Oh2xZ?JaGuLjp=2pEs+ciUO+)wh zy18(u;QV*e2F$Ll5@lol=>y=VVuw+^$i;dCRpV%w(P=~MZ>p24ps_3%I0 z8<(;O=Ck_XOs{ofhBSxii_LfOnuwj#@f zIX*<^8)3C?PnRAA6Jp`F3xs z<~OmBT0%nC0S#Yuss!g(_4-K4!#z0^y}2|ER4;}GTu|}8b<~5c_#;l#Z}-m+zT9n& zXI$&bQ@`k6@q(^bSr%P-w4JcE>ZZ&J-65P{qim6J#n1TI_ONwHH-fi65H#b4A>~2Y z@Ote36st=X{9rLdKuWyJI+)W$hSeNvNr%>1*e0`tNC^1YD(#Kq>?mhDTdx6S&3TN# zE=fOVMYVfIk+RYgs8t2jN|q&c6td`Xdw&nF;!xZ>_Tw29-$~`B%|0i9rt2H3$K}ED?SYj_o1$bLXMF+z)DMxqYcEyk?761G7(y0WJQ3bcR zp6yTcLEJ!fvR$!DpcQaRo}g8>-+*!s-z8-aivdBGrI)%ea;u$ftRnn4Ixqzun(@c; z9wb0Gh<$_Yu6VrTd(c<(c!$O9T;b%zo)XX^+|E#$71AI?DOkI5YlMQ4tyb`L2?#FMhvr_$wDUby7sfc5B zIy`>(QZyCtX`Joa#HA7B3nXc88fBkFHEcZUOIXYLZ+ZRzJCficO#qts7U^Z_avni5 zE)aQ9$&g?Rt&xnacXnHXx_^h6NG>V}32b`(l~p&aJIE>vS~HL$_|9w|!{L#l=WFR*7_VVX;;@rbs`4u8kmh z_g)iz&#I@7pqF%xhHZ6od@PAs+8fcC617~}cSnU#ynDw*b2*ro)|MkI3FD{VteAj9 z?WeIjndfg{JaQ4?D9qZ*2SH|9TB@_4#dq48G$65O?^uYYX>4lBAZfmGb(gW#3}X{c zi22xJ?DUZGiR7XY-)fD;1*OUtbn9{y8sa}y9pm~r2fUUy0Kt~{Q!Q;bfSPBatHG#B4*^O-OjcSn2{VuuP5{Bqpdq|Xh;nt$pA#!n938fvcZc3s z;o2j>L@2p~_`9szYXcH;UzthI&nKivl6tpKg!MJ!#cB%Nw=@X%%{!2`J=tjs-wf-?VXM4oU2il#)OB+ ztajTre)ljKYeB=&;_gcvax^^kfWF~3jZvtGC*G-!Z&Z%Vtae+BcG(D;6-1X&NMGll z(nw$hwpUt=7bd_Mkfs+PuK_a@c$%jN5Ai7=g0ihf*Gjd^b=Dj_UjI9ZmLBOIy9y6! zA^+q&)HNA$2i5&FPNqI(f=k}X_d=#U1byu{DV)!D7Of@K=~X`aXIc@7B^15_#}Zm* z5#}OA{fY1Y@T4Ms$k)ir*x3_hT7x?pXaOI-_fu7x66(8_RgS`Nxr_3Adw84U z+-{O$e~%baYGVWHsbi`@5i#me`pf8DcJZJo*Yxj`+K-60PvdLrQymTFMB*}yAjmfE zeybRdL2_S<9dqz{Vj~y!D|Q@rpBX7U73amIpuwspC;fPNtxK1GIRI>V%ZRA+$uth{k^iFFrIoa{$qx9 zqD4EL3;hg~Nt~BX6*3%m4fQznihkN$AksBQXS2O;xhanS7?<+r+YaOJAu4c+`Bb3+ zE9h*NLXg7<9#_!9`d+gYb>*8zh5%iQ94MvwNkHPGF#bdBDKwn#wgrOhBXQOMxE)UX zn)J74FL_e(FF=s*mk-PuJO)|4EgE`sp1NJR$`AisMD{$;UinaO9iVft5u2$3%z{!2a`z(%By+V4Y{Wu#Z^WrAy1fGJWnRnc|i(|mINn`~#X zS!wY`VZ`0$f%X*$+S1!Qao4ZprL223Tl53Fwks{houwDqC|y05i^}$*H~kPZ2-rl()z_ZacG=@3o@C8 z?Px$y_T>S68uCSq+JZVKqW_|XaG5DAP{>^fo}(I%(0`y!13@|kzk-2H{rh)qJO&vEsV<|d+6Za35XOK@=elF7p^Ogv1WnNE&DnE@ zAgJibrkF~D{;ie0r?U6h=;raW<@G@`n()8dAf*2+L+v-@Wbpv{kqIZc4V|ZZmrmLZ znV!@<*S7Cjnt8bwfqPl3{@Ixeep|4V4n<}bv$8c>@S=l<2nxCnmk}~Qc{T~`WHC77 zkFTx_>d&ZozPU0#>1rL}kS{m{ zUkong%PZvqO=x=^`zEbRrNl56butR2(i~L8sQ5MmsG zBV*!tv+G8%j3cFcgxI@xUxHuJF4Dgq?GIFa$hvJiqUO?!QiEe)Iv?ap-*vc+zeWvm zmY}KB30OnNV#so6L@P zu{KYPo@FxJs8;Vh5zW60VZSU((dAjozUetX5nVi!pJ#hBN!RY?Xxf<-_|bTOy!4R^ zIg6Tk9@G84zJ~kh3$1l@2r1g!EtfJ!DI^T=&Feh91V0#lR?c61v%9==-M!rcfVE9t zmTC~oATuiODm47+k@uf{QI1q9JLQ?`!P44!hSQ`bi8$+fY~kMDlg`x1r9~=2hv7+; z>Sx9xuTP3!cBkmk$F<>HDjE)5E_Z7(oMQjF6GQtSX?~PgpX0|Tn#}jz@BVc!DO=Ot zJs0NAMU_=`B-5yR5K{I%=wDnn(0OP6>AN@g-&#Fb>ho@!vX#6+_mR;2jkB)n^ox{Lq!)KQKByiE(K;R z**tMAuW_eO8KQ(POR%_`_6gLKWTmyMopajEj$~P%m)O5A=^VRiZZ?wItLB`Bj{Qw) z+>pyx%yN3Gx2Z2DOO0p5noHX$#!M;UohiJ{N|@> zPK37?zM=B~YnhA4i8UeF61}s>_~_lU2e&DwzmWtHrA7}`WzK)vXj{`(UAD_jIMQ)g z)f-cxit?7_4g3?FP=3BPJH^+5@|=M|HaVucz~$mm6t@7H$!`}&OFOp3|bD`!+mSf+M?xAT0n=i7gw#w)1)y_f-IQWogJU=wOQ ze=(FxriA|)(r;fP22DF%5pXGQ-MFW*s&o?^yOfY%S6~3CM~AkesiE22MLa;BRr8JK&fUkfY37kzC{v<8io3Vn$)|>7 zc++M*)WcYc+~sKgt8&UK56;#dK+28ZEK9{$vP>;uTt32ssalK#j$fqLJNuX(uY_8t zly994qIK&RXT1^HUWMk)KWDd+r+1-5@o-9DKb_wE*vsij8^ov<0a% zT0pP^FTD#vL`v+B+U<;U1w_Ag?R>jazEa9dd*@M>3R^MxrC?zh(u~2vUVrx>C6Y4qFGK80Z+YFD(;W*5MKWu{XmvX=@*1mQi#~)RLZm*-LRjAi zVh*(^mO@5Ki$gjIHADwl;jvZH#PD&4rtd>FVvl5*5RnIStA^qR0jPZuyr^*qW6^bc zLVvD69AV3oPX-C_?gfW`s2e~shCjJi0pm-wh_RRT5vl)ol+2!{Ihe5s9K8YpyJk|D zS=LK8i%>J2>YX;!ZoJsquGK@2J8cHn$F#i~{S3$b)mf&JInPIdB3B$#Zs`>h888M{ zTGI*tbqDPQjebT1 zNslyKF#0;Wg_wFpX=!Xsc;nU1Y^Sm?Ed2-2zse2^l2VBx<(&jy6d3{()M?Re8Ou_R z=11Sq63jEMuL@gHv1DwDT1^ZLMy1R~_O8zEw`(MixkK3m$kWb_Y}@*l{))@Jq5qp) zi3+o>SUtRVCq0DF(-DcQkkV?OJ#tSEH|xybXRx<3CelM}rX9JXhxSe^MkF3S-iYFg zV05w;t~k9;noAej62j0z7%TK@8gb;$ZCS|Bqsr}3>XqvgWJUSx&VREWH8F^k7!pvp zOi1FS3f3OgY?)P&gG7(Ag&Y`a-^m$iSw904$D9`Y924^Ni5fgNsOIV-M5{B2Jxrt~|QS_$oYD zShi8KwDxQ8)3<0w&~v4|e|uaobnDx}ee9dF2IX^$)m)c}V2^&H0c5(v=dI82iS!M} zFOS%CgWA7ErAF3XgW)8gJOXBP*p6v9~+4%!{wB*?-dKb((v3 z0O`+Hs4H@Km`rmHM=<`oo)zE$^M&SZzH9YWC=vHR9D4QR7G@E54@bs15R=s2ZPMqN z6ju3YBZ@wut3^moI7Y7nj_9X1%t*KsBe$ogq!{TT^Mr_831#IN9X_VYLbvo7S6Lj> zYf9QmJ%D8E_#pdGYrnH#PBG0PCKFwiMD)h&CyFKd6RYp_q^zGZ6T0B;()BuNApB)_ zhp+PP=u+~X0wJF2&W4KXsEI1MVZOrXb6%M@juuZ!eh$A^Mg3QHONlcYXYb zh$QYda|s)E6}+Ya}&2BPOQ2r5dy5;7c?1dQ>+^yYHd?<{J*{ z`=l{b1%1ST>bVu%dY@i{9`RcFA1ntLcJ5kvct{89yjTe-50-cILNaOvm$zY z%ZLYK*tWmc&@;oUx{<5Dz3V^{Za+Ov*KqN7-}4LVyN4i#>D1KBta^OAzO5un*(i}0 z@UDxuZ98G*b5!_T-IiH<29}#kmy)^R-KN!gG>9J9epPVA=05&A5$v*B)vj{(l0KpQ~F#d0Hcx{cTvIe*2d za3CG_PBS)D+s*rCkBotz1WD%YYvN^0@P`hP~o%s#RK5=FPIGG`tG zcUJQl`Z-v67fO&gg?WpkXlDgKjd)F(eHpaq`g+PBxZq$!9 zY_qGnW3^33X~Zq_9ypP5ZR+%p6=~$|SgJtA=;^EbpyFWfB%e4>Sx55XgBu5TC>!F} z8GVZT>Ej63Rd+J69`1}@p2--jn3jad@%G?WAzio`*@DgXZF8aHb71P!8srn&?X%n` zdn3$e{Wig(*t{Cft6>(yMLbk3^q)e0JcBXbfSV6k%K61q*Q<>mzMiX|F2iL zJZ9sVkv3mR5q*cvOUb8UK;sinf7XEx(f@V+B(6 zB*Xj>;pIP1+X8tHO?`lcAH5v127a$W1r&Oc0T?h)h3tM@X?Al%3&7+WLVt*-2b(I-u^y ziqTDHq9!I^J!@p(XgYJ-(t*ecH3VnYS6ndB1%L5%m(|=-mwW+R8Xkj}?TS=9{yLc6 z*v{A!AzT6K_&1Ou#*020xO-SpZ5(i5AP3TtSbB&fQ{}eWe-6=2VuOp3=-D45fAE>iE{v~4Y+s?u=Z$P7&=9dv|19c%C0`w0PGGNXnZf$`j z?pL4+w0pXqpM2-=Ak^j&e_H-_HR`;cs%h-rJ_{)DC1rqqrPYw`r(!)(}3LcIe~iP5;p>WV5YNkpe-{p?WR>v8FFe3D*SLb;y&%R;>PJV9lI%+{90Z zRa{*{|Kic-8C|@z)b@Q>F&EZ*>USpg7&m;Q^KcpjYS)Sn$`9|I*!?I&QhqtMo)ni$ zcMyfmG(=;$8{-=KWM${4c4u&>crI^=ng*n|5JbWnyDY`q#I5jbU#-2T;CuYmjP_`< zYx?QoGP;Fp<0 z$SUxA<5!LpT=1@aEoo7_PN=51P>9w7DqD$9u37X{a*$)7%YUmwRw}2TFKq3Qj*J~=Un-7@1%NmB&x=x=qUB&sk_^1ZtN9r&VopuM0%&UE;$`0=t4m;HD&!`ocFZ*L9S{(5mFClsbhnz*Yp8EX2JOH5&svX`M^an5|_H<`ANKM|Kp{DSFv#;G&Eo9t*U zWpL~Jwj43!u#tS^N!vQcld|Tmfx`{0K+exfXV#-#e^2Ol9+VG(+#fi(!QWTt2A(6jNjm{aNiZOSN z4J$Wp-GJ|J4QKXMnY}Zl(*9ge#`g|nRB08(PKPyC{ zC?}&mPzvF1v_qJ&jYb`Ca)4Iq_C+Q>y+!_@2|G1Rp~% z!47x-s`sal>H?2h#)HObeom6C?^_#ODn$1Y9CC|t5y6a?V;885DMcVc!Teuru zvkk=!k=Vk7W2~$b&CDqqvQ0*Nsarxt*A3eG zV_@ka@nAJX4t+d}0-RAEdgXmt#d|}V^}S)_+>&wiAgDFTwSo2#JWGo-|12(lwDe?b znuewsVvDVmyU~InSqfx~F0(u^;vlkkZL6p?b*aI`*C`a%tIQq^Wbb~6OWfT1@g6|8 z24&XAo|CRRCdJ8k{PXGVf`+xL6p$cg3rC>i|M-A!M|n(P2n4b{ZeNZ)A=z#8eO$=` zUB0~DUf8Zm3FDZSwryua%OZDjwhjP|xm}oYkR@l|SPa-7bEv}H88(ATQ*9NIHLj0B zx7ws_+t@%lU%JD=VcoW2{Cze7oFyN6WpH{wT12z8q;Gwm5?V025=>}+f|D@lTs?C8T; zjjTG=Zfn0q-5_k8tzu(bm_;&BEUYhi)h4XWx>d2q3I|erhnbjmgsWmoHu$co?r2(LmJ_pYbR#4lYtsooi)y-)2hZLe3~8o?`&AjG;*cv z8iF5*Z!22q_LHLqToSs)nV*AEUJp2)_eiOf71$;ZdJU3e*gEa#&RoRA(hM(6G@d`)Y=u01@KG+sl(| z5Cze!(T+yW!EE5>Ars$XSL&B0h|@vP-@O)UzMkmM=KUB)2Bk~ZB!}OKNzP39L6%>< zEMBpTE9W_cQ8s-IrWOm5HpJ}@O55eRcO%>y!MY&`P@_yc1A}RF|8iW=SI-5soX81W zgGf3kn~YWgzQ2!%AvE)PyN23%)I(>{ATXfOYU8JCq-QrJ3&|$CsDvBV)3|~!FE+NT z%iW@ZIuNoa+>W=3V;`et)o!CzaB8N2h!8%!{q^-|sQDM|Pv?d0p_MZ*?x0Bj16Ffr zLGGZ}7m=;SH@Xq6Kn41&7l4OUT)P}R?18sQSRWWNiY8s6Yz}GxkzP`T!JneyS7gz` zwmXOy5WhZO_}^+vx?#?9{=(y@liDvxEtKF1dnRm?)1<6d%^584nxeSK>f}Zk1@vPWCc!FOc5^^=% zFkAQp2~-J8mVHzQ-)$E*;Z7GmF{HWaV-%Fbh^%b2LvMIL0|NNt=qgJ0GE-RIVInv` zj}F0q!_EV*gi~Yx2H+^;hN%kx; zSHR5sR7w}tXnjW6qRR?IH+QjMAGP23{|kgyoZfSC5e4Z1685J=saqDogn-OEBC3SK z70e|Y?sDdX+DN`G3N%y#Qi+J5icBAi8qkq4$J`jt9bzsbcl}Po+b`RSx_{lR(TTbV z3ZftFE%+#6W79FpR#HSulamMJ+;A0d|JzP)KkqUZgiQxAR3!r=h$+4(B#peiyGjDL zaE}6rO9I3-$g|BZA&DSUanK2x>XS3B@lUkq1Vz@Jth@Ep9goRaK|c1%z?lWsbpYl} zV_>KY2Dixc`mFlKomCww=0Sndx@#{)v9wGUN=q+iF)Q&NT_2JfX#i%KhVl$T6`wB_ z`FI-VKO?nJ%I^s?*OyDEl>Xf+nuQ~R8bFPvsW9(1@)q^&h;idS7;!7l6$Xjhy(X1G z$Y?Dw;Q8?diAm~E_93v5RKYr#i2>?V-EPyg330}(gJ(qK?t_F@H$^LI%+`k#3ZH*< zaAqT|x}9+NH-^oWs7M2#Pp~%MVxqhcJa#_*KRggoK4ADN50rb=x$N^rp>M>-oUAG# zXQQ1IGT6MWLWw@$e)36%B2!uy|eg{OM88uV{LN4#2O=odccL-uxw=2D@Ht+@f~p z?);{g*G4F#0@(VzlHq*VyG`%3w_&bT=9$TR`6OnoZ!U0b8a_!?3dSJkKMo>LkIgM5 z{qXBxInOdzFz7m9qYaqlTpwJ~o*DV`iWzzHMa0L`(~Vw-q^^9P&+Gx|uV$l+zjLqE z8;TvYneZqmY(QM9^b$TJRSB}J~w#PuYQ8dz^}nC~bBWv^}VvQ&T@*nTa1x)`8La#oc=|07WP z{Md6jt?ZMsf%I+*T5&P${DIjsvplBf=HD1ej`_x7c0F#;N|K&fxO%xybi~bGBo@m~ zEZbV$ACukCxmWhEPZgkK22iST|1vu$X>Cf`Dl)YxmUb&pT|R};Q5a?>lm zQnOd!ztBx3KDCoqXCh&i8?-g@HL@}UvVs6vU_LZyxfEyf<+jE!#!S0O!nU}27E>=m zYWZ_1q_}o-)`ay79Xs}NaV$bQb%N8h@bs$-6%eujNRcbV-$Y&UWS~1*&B##C?XK!G zpPMvT-fej|QC-U|WL4hQ_(0{BC2Kzu=)&&&Tb2RngFHW59JFltDlnOGn(0=v$yelb z%A2M}rJr3d)kqmmP(CW8YzxxDS_xX6A|5CNHAvLR4jyRD0RBJo(UKSmdkI>xT2X$Xey;~I;;8bu7kO)Ylyt|d?Xrqr zP>v6$UL|scdid#_-EPl)f|K(xWj~#Q0t|*t*K68e@Lykh&DUsb_Yt_sC%|>Duspw>={QNP*^|R#w>no= zwefK3b)~(6LEMR+EdTl1>C}Wm&CDau%G2k2C#HAkU;hFlzp{{{zdc$>cE|7Pr{<>9 zCpbsnzx@St{nyC$_uCrXa|AJBL(h-AmlpUxqh57;@K7R_vhiM$fnF!P(9l(?yY~g> zHPsX9a?>g~>3Tt?6qN#|>e-m^sbqng89yO>u=QzQ+6pSmI?`aZN3T#aiFe2|8=irP-4R&7+?Hx*80 z(6j2r3^tW`D;P*A!i3t>)94mM!laP9+6Q&lzcrrRIG&4cCh#0j{mpi`*^Twl=h zeNmvoNd5e$JQ+2l{O}BJ7(r)2( z5^mFc)8tB+m}%|DPmXbF8wj1VpC=-X+A`%fKp)nnl25a*iDso390<>^bqA0pHa-!qo)tP4!d!9hoF@rO#wT{$2G5y zl&*mHJ$8sRaw1j&uN&;J`8fhcS0)(|z<}(vC3&lm1F)xD$|URjzxw!s5++G8e#Q1v zX8n+`A|vTk{Z!phd0;0`8fYCpjVR&8-e{Hz?!q{fhe5iLm>R8G){RCK$K)|0mAe5d zp1o!_(*2U*K;i=@CEM9)dZFSnu%eCtcqkFm+k+v!roZgU`*jx|F|k&Hxu@=P87);5 zD9fHwHf2Zl2H2bZ?_$jhtIQY0^@hEgWh!vfC7Z~FQYSToMs>0W4IL5_{9mSJRSSr7 zC4lB>rkudpW?9^u1JCW3?(1#W8cf-jUY8Uw(x}=4l^0BCX}gs?(aO?CEyeWO>j}UO zt*V>4w+fmqUVz(!ssC>8{jAaaqFGRj{-Hbo^lUMN=^N$v*8)Tv=QYt=O^Epn{}^i8 zVK8|R3w6=Ob#ETZiNP)Ys6gZ&>_H2OHdD5c9f%^LDB!WqT}tCq9mI5VHlE|Ku3q>y zSjbqwHE+4Og6aJCIZ<$G#w)?L%54RDIdL+9nHosZS%=0P!fym3Z?nLO#)Q`6}q4V z_yDA!NObtE#~NS(-~P#My>4RNLidoN9PPdJNqOXkaALB;{kWtq5J=-R%NCSJa)l^& zWiR}&a24g;JM*5qGt|cjB$`-8!&~|t*vQNM1ax~85NX@lPIt!VvvG ze-EbwR{bsRq>)=DEB@(vu^?lurfl&4*WZv*jwDjc&Yvc?xzF2y6xolsn6Te^DE>y6 zg_}v&)p)p&7xUzEUYRYU-skwW9c~9S zvt9KA)*qb>Pj*^5m-5oX&cXm3{tSSel?=W1!z&ZgCJbTb{j#eJGJVE?S9Ank&5^4W z)=D@=ns^UgO|&8#yDCZ5DiWtF$vbQT!c%Dje;&2ao(O5(^WW4j%V~IHjnT)r+&lUP z7m?PNv@zHU%%pc?v3RmBA=g(HZx4sc`YIvvg0Od z-TAmqlHXjMmG)W9Cd&y3k#;v@T4$)5gFNg4gl{G>rrVGyfSxNNMnH3w5Qgh*RWUc3N_MS}rUwueTjrRdV9slTcHbPHw|86>PW)#nhkw@-uEh7PU8Xt( zU5QSLUd6;SHP0qa@DVLYe=91*9uSLJd@I`DRv$JG^XJ9~&mQltXUtdb9_R68lYuO} z);Th`G%s*6jN4)N$fAAJL9EF*yB+vtAIjM$%jT9t`sZn|^RfAjuZndqpUMLC1dL#j zKK%IL7URb4pI<=wy2fhwInQ;7^BN|wJaVh-0bIZ3B(soQ_IE)`#XS`U(CIsFQry8J1wRXjQG)_hXm?_?(@`5o_8Y%e zH#twdXj^>gxz5>GyOGbdC2dz0U56wq+wrm*$W$j`@6NL;JjA2`XL6RM;GAmu>c0AB0&etK*;&w%NA~-KAGt$yeVVS?`40^ zz<512VtGOV-HPfHJbG8l863cT%G3CC#fX&_aEol{)mQ7oRHSTA_>^ELxd;vw;~CFz z*(~Nh&vcq6&@4r45XA!GZD=Owve`@Kt~klgw}B$lY#EVr2x5Aq&{S>KOVD8m>c-HbRzes+1!0u`-q2P zBVPi@f?AOp=j;EMY8rwlb>j3d>nPtWOg=w6i?(;d#*}`J%=^Dx0HpqFCgV1nJXFZk z9g_cnhXvTzH_m}URG5&G)A{~@R@u--?&#@#pXtOX<}<{`i3qOGb#wgFkww~+*KE&9 zIz!TVquubVpLr?`xha@ZDZ(Xagxo#3Mk+fe<1X+Pffh1&ti_X%JpspsUXiz zH|7iUuCWY_c&JRpjjqrZE9MmVpV)Bg+zeEEG?tPZoof z8Fcl@m}QKqzPN>%0116tmeM`>_VnD}VuM8|{QTHeZ8l{ zmA~jTr~m13?}?MnFuWpu4WPPiKN>+_SWhkFUUty|;p+9dE3p#T1Q_jq+Y(F$9ywb` z@f>-$w=}(Z1J#Bp)5Z-+?tpFeK$4IhO#E*$zs$`CO5M7~03AcB1KX%^B~3C`+6i~O zb^F(&0+J-)iWS}81r=B^S$dVdE3NPl_7XQ3$-Y_yEMKC=xe#PlHO{Y>WR?OL%I(FN zF7+!{>b?$R<|SK&om&A1Qao7M&rU4H`eRv&!-~bID2e{kR4tRqU8s_v#wFIM0a!Sr z)BF3o*E)%-GnIq-jUSo%$SzPi5mL7{+#$k2EN& zPN($i)Q`A0ZJ#>AEpndm5Y%BnV*!yfL~0?XGdkmEOo3{j5X~+>^Y>f&1-@~-M)yUXO~*4L!l2rERyO55+AUe%4a+B!A* zeM;=R0G#zy#iQs!v9%tD@Ac-{%12=ve#<>$7eeLzHVfS)pJtQT7ZcNDP_}553y5uw z)_`qMa$6a#KtqMC36#|aix_3#*+TFB6K|F`#ivAXm5<0+{){Of`E3SN7XG~=1HO%4 zJy)t*-m}=>hG}g?>z2FT1X#RpIjoTbk}-3)ZGiG{-J(N2&%G9jzc0{siT=peqwit~ zR#pdO`ddWY}iXxQ( zFyaS)A^pi+nzj=2AFlxk%!KOq3Ezs@}5*f}9; z+)7fkm2AFY(qnX8v4JuC$_I07WAao{D)%Ewa$#W8`sD(pzLk=scWAR70u|u?GN*H% zuR6I0R@zHxn3c4W!!5}dSWI*f|L#;ePv2(j~BlGWB5DeqkO~bDK#?vDpBy)Vd!v+@Y!+#u3#;< zPn9x84)ZX>x;SQPg5(2Mv}vLy4j$Id& zM?y(jrRsb-hM~4&RbcrFg>Zo`pm|U$YC&u09VukLdsbc7ZU;BuC<&92qv!cdf_A&C zx`3h-1b*#rW16mqqtOMBsrv|ax@K%uA2$3yuHHJX={EWs|B$1HK}%V16M|9#O4g$|yo%25N z*@=p?Zoh(2EzVBLJD`?5e9l%ey)d3jc@m5f)s26QeUAMu6`0-0(vBx=J5F5c&R<^N zh}S3QNW4rn3#G=MQ+9}R(4|eyjqkhYV1A&dV|4yZ0?LiEDlH>_I**t_`oLG>8ZjrB zhqiX7E#tC}-^&dJYsxhG>b*`!)EIYl+HD3*O{J7~m=?*ip6`<~61~Bllg_qt3)hPY zoXc3-krpvXx7*mHy8TQi6Ad_&UJ#wamMyg*(dV!~6%O?cPMxW>ziDtnJ%e{HWb%Haq>EYw-9bY%jn|`jY~NqVury zGP});q&$S>(KZHK0#GokE&Tsie2ecV0bdkyNr5%+Gi$27c=_FZ_DKLf*{`RJOICT>H5gAOV|L z%2YG1_Pgbdey|VfpX7xB7N}1@wv=>e4R}kLV^`WcE|d*7bJBk*?f>5h9ozWo$|B+L zq|NekjCZ)T&)YoP;bU-EySgy)2XdY%U8_8WbSn#m2gUFNsml`7!j|*s3l+%;GdZl8 zj5IdcaQB~Y<+2YR7$zovH`fQ;vRx^Q5Z}4Q(XP^q-|o$Q4!QRwpv$pH3m4WDc6IcynW@W9Hfo3qNih?0OP2!Mo@%h0XO!6`%(c zP;z|R&(pQ)1*#$vbNxsiz1Xu^#;&wZ+WgYiY4*mQK>~!73fdU;YVHM(db;&;;&bBL z`w*Wmp0VZhX_R;44=_rIC?to!DE@KlltpFnnXqN*ySz~J7xqvO8sERyFG6{>|4aNUoKoU=l z1oe-GHo9Az%QzCs@SNpHDA$!3Y>nxYLILzLFcuqK$5V-tN&vP#o|4DTZQ6>mTpnT( zdr4oGB=mMfvaXc|3e)fgc=-bZ!s^g7%Ey#YcR) zcCwF|ACXk>!`RB|{IXwC-hMqCZVlTy1GCw}Y^Py1d0mwHJ`>|+TM;m4JC zOHM!Mt<|;i$N$#>s&0q_G*hPIANJ+dv=VYI!8B}u=dN#jS(Xzm64cMqy!1`u)G^Sf z4oF+9nS}5{5K!sKHR83x*FsT_o?6+zpN@_R7iT2An$Uzf9^|U@|iQBxqjj2A1&&tN{>(_NVh4ECNt3h-CTve*SMm#BFSyE#KyRE`A zc6*m&I1G9=gE7t1h=v}DW((|^dA!mVYX9}Q=BmZl5I@v-PwB{}M2OidNI72*69G0p zG&$Gex=qdXvo?-vpkBUG;h!`!Owrs;cU<)ftax$yqbbm5aH`+aLr42c|Nm&0Z=)1g8rhVu_x)wPKLZVqa@2ViopuIr+Egd_gSvqvJshb7)1W6t1 z{ll9pJdJH%UzPCDTqT4}J|ylt*@&+8{LKUW+Bf%nVj}eY){N{#5R#_Ozy)9N)?@aB zio8tqI6QSquPs4W%lI3}dsU+HHaWHbV_aF}46x9}djCrZ``pLf8+*GjNTHSb9(v!0 z6)%`5fP}&l%506*O#YarpPs{TjH`Ww5;g5tK!;+U{r5qE36`l{q``eVd*|%3bSL}R z#Mg!MIWfi$M+&?s0--_I`}TF5gS{x#E+}20vgu_sM_4ya%nH9Gv_hEgjCI$}`+@oD zKSm0w`uIcR{+wF%QnJ$%a8~Dw?={C*1A05H1ZO?}ROFL! z8TPDmZr7&6^-eLR}gv@Zw6Yb^6(;VwI z|2#7Q+a)bo!q=+R7}lh8OS;Eja$5K$wimNs@tl&mBKMD+O$B^$&oLm2Au&Bb}3Kb!Z+>P=Z%+~mxr{~f1v#;xn5`d|7x z#{z@Itde9{xuMJA=tBQB>2!q;h5C_J;nunJ{vh=fZ*EczvIxlJht%8B7V|TzVaxV~ z1_tEDJ7Kfytug;KnuA7H8I6kevvObMh@bKANZWq>Uf^f5glyY59LIq8Bkb`lEwdYa zhOlfQx1mURX&?@1K>_vKCbIf>7#(=4P42IOUVRM!?^-1Ybnti! zn@r#A^?~tkWD7V(@0m~AmcPre1#y|NL z!s`p#Wx%)RJc)^kSVCGlslGzoiSIz+^ zrjTn?^7^GCJ=E%c+o!GDpDWpXEGUOhou_-`P_l!X_ik8jC&o&8~ zKLp=Ml?@P!AT0&S~qL2X|uWxy)M~Y*nHEmm)eWcb7bejg$&#t*2 zgX#R)KfR3QVKyT-vNp)_nzNRIEXYMluTL4MKgD0P{5z#)M+|d(2zvqcV`YHiq~8-` zSqGTpO*`3p1!pQ89K&XJzyM&<>RwjP7)fsH&*w)RqT{43lx4B^ao93jDIMp`zOUs2 z!jJMS(oX}SD%38JH2X@6(&|`{GY0rkpV9S36&qt(c^l2!zJ6Om7U}`M^t3bSPwt@c z&w^mfwoaE}^h=d3UpZ+h$c(tG7A$zj`W-`?A8JY47GYVk@#tD;9i$~ow=m6s_g(eY zU-~*%c{ksyX)yq?7_%qM&F56ufN}c}g)J+vEpd^}P0_TTlX&FDXJG`VrYWht;ReJN zqDBQgG3u+gz1U{jwuON3ZuPIaTGzC&_zPf8nuM%@L>Gl7S$m6_l2&?pI&xOtQET-uU{OON`6sO|@@(@F}2Ms}$;17L<75J{rX#X)?jIpi-65 z+M|3KcL%c`+VC}EzjkZgKN*}kmtE*1TghL6g8lN$JzH?VvW77KZGB5?w=QS(JvN(K zv6tAjJRi_h%PtH(_Lr~f6A69I(#^uGocVgaUgc?t>)0zBW+Z88Wx*$wyVJgaZm!B9 z2HJ_ti~{mo2;mN0;cYZ$V`uBqMLB;Ihkzta>zWn5T_O4vJXP3QAcf4<2tVs+oEk+==wPM0ESY8_+rJM{3umPl*4DlI4*wbnt~ zj2zxTjKfl~ghZZix~(4X@BdvTncDus?AyodZd>Owj=}=0wJu>~^R$S=o-<}govXDF zYgq|FvKC5J79$`$x-wo72qGf#1m&|F{we6d$IH)DN^diVh%&bR=O|y>y%G&OcICf~ zmC3g}-<0GR9%+ei&z;i(nZLIA%kNSzbqC_OuJmUhqk+vST@zy{{8AFf9F{cL%p~S2 zKdL0EDJKSp1#$JOu_r4=q?4Qn92+uDBFe|yETuI*vCoX9ZNKMIP{#?}+Z>*|7$p+= z37L^)5;kcQ%hF(6L*#aouLhZax_5T+B(9okphPTeId$}gC{IssvQ(5%=#7W#wkyfn zprUqB+BGlD6)e7!tHC&qrePh9FNzjiz-iH#`|$@ML6VFe`)u zJUslKZ3YQRxBMbM`cdDaQoGRGm}c*nz{29~IecEyjB!6GxF3VgyIPbcW7D~3NITbr zrF0vEn`h`NGvcP=_!LRO!7Na=jyLRd1 zmnQd^I^jpu2mv-V zaDFPEZL_uq9~@;}8pN2p>oU_vSnRbqSgg^XFFjGF$0c9V_Y=3QLk>a}*zT;oa|^98 z%=o7}LiE`nT8hU=P0y6F5Am%2a}8JeXs`rOEps}>TwA9(ysNKFY_$B5!50VJb*&C? zG-O5!88A+klfFPt;QMBzbkiwkTF#%e#Xg38voUNFLMvQvv+B&c8YYM5g9+4hJgu~s z@9-o1`S&;Jx&a&AH%|MHM^zNZ=birTJuphp=dAK{*^Yus~oeupCeLiB#Hut_3WEo4{ z?GY<;T+k|a>sly@{}`~JS=Qt(aTwFKQvUi>ZMHnda-Q2!@+E;xAJE{D6qF5aBen3) z<}?!a6~Q`>n}ockvmqUuPq1HB1*pwI9SuVedKWM-Z4y|k1VSgwZlZfVn3_3afQ!p=jchK zA3XvCn zLP@3Y5Mq;9@#07oS1==ny{gqL{QBGLS?|J@H4^%dOh)9H(GL9KtKOpR5rzOtlUvKU z(fD1dpsi2FP|p`sonyBV-@{nQIL?kgxLn?FovNt}b(k}sk}>00CO!N2rq-SN$)kNM z4p2`J7wB>kyeYKV3Pto+<=S&$i-rZi$J+U{BNC4{%}fff8p|6 zT=-=k7tc6u{Hj!|*_pP`9q^ocZRsF%_s!&+=RdL_cQ@C274SrPgJO|zwj;7w+pmft zR++h63YGj95J4y=Ym%{`7{ibPiN#7S%s6EZ%TrBZ%SHNd9_Si^Y%c$7e4JD*%+7To z_`nho=wsle`TdsblXl~Q_<3|Ak)5-^lpq>-UHX-MO%;eY(IDO4WIh7KsfX|dXf2p0 zzV?HNy-M{nuWYsx!~9cFBb;zr7la{gz~tS=3W2$Y9qNvfXs5MhR6Zw?(Q;M5;-NAQt`@fMyjCfU)@I!m!UiTGZidqGjB%$A8KVxWp1p*j7qiu7KjmY* z=)^7(vxqx~`qsNU3(@-w0I}+-=v)bMpa~(XO@sNSH^c$4slG?zEp>tuu(Jz^5DOh} z4Aho|7Z?ZrVo3DqX8(H5)ciF27>R5|>i@aSN>p13XJUUZYD1Rr zWUM7N-{amAlTVy74qR_V+SOhIH#nEGW+q}v`Udxp?+U5rI!Krfs&NAaWwkTI96Mtg zhEF8)hUU4sG4A2ZeB$@@v%WboSq2Prt!wo-@-LMQIqo@Q67s>V`gQGi>c?Y8%2?(IzrAPLoZ-g zD{`+7=1u2LKLXEP)QvB>$wFf`;SjsRd;keMa4|S8xo0czo(i2CIf`R;l4|Di1q_b9 zIiQFxcfT5-)Yy-%Zf4>JCssUhV^_a9lQRp_o=YX_R@~3Z1-G`F!iIQy!Pa*6;v7aH-DeypNvCCDDO z4$p9*NtZx-TA)YO(AzsmL1;fWL;*1Hx+Vx7*S!eI<7Y3i^g!9-Ac{~)+LNqY1W+n! z)Pi%4;)ZxyESF-;fEH}5jw=vEN0X4)B2eQ)dV9~;_}n-oJu zrV#5uokM>P^oxFaISr3jh}JN%JV{6(#60ViX!Xap+`Cd1>h_9)6%ilF>TFGtOESGUmp-WDv`daNX_s5rGsWBzS9u-kT5*@V~JDw`3}ejwUBc zvVpteGXF>3or7PoQ);lmnM&axL_qBss|(s|IdXBF4Ww* z=}4t#Xv>^^k<`rnC=2la<+N=qT~{<5{dHs|jJGbsjxlKNer!>EJ&tiOC(HdNlRYGV zF6ooybge}|2N=wn)xHb{>PsObHj6c(m{?FekoJ=ptF2^Lj#pY*vLva%qyN&_{C%?U zJk@AG4$i&WmU|IvQDlnA<0Z&LsaLMo1M7uS9P$)34ccdQOSue*Kxs8vwN$I=Xpw>U z3xubUlWzC1GPYJVv3oT)`b@j%)S!bRbxd)kjz)WXH~=6zjy*42OI&Ve&WS7zci!pWlYPX z&&SItXTC5yV+ON^9wazwZabR$4E7<&MXlH5f_UDV5wVv_NLbeCPaM@2aN>`gG9C{! z2p|uBVKy;#=o)MeM9(!`#1LBLh%_!a8m_Ah!9hmdt*CW>L2-QTff~HufW?qv!&5cYx&E9$!4{b1F5Y_4vP2Fw%PfDUJ zIK=AIc7;Uv?MyvRU_q=uyB(rEyatgtwDb{DJ_HhE=?-1+vFG9*X6R(b;T4VVAO+Z> zRrR$dJp-m>#->H+lkGzE2Wng+%KIn1-S;sG@7^$Tp~!8Quy^JDW=2*UaHnvkT-)~# zqQ+8bXv?gsG*`38_XxBz58}Sn^Oq*IDJ_af)n0n9kak8M=A+uxuA(8d>QROjx}W!$ z5uA`7!tQeS9xJl-XSvob*7OExfK<2$(L^I{Hp)p?Q=jHB(pHXRz}Q}Fwa(8(ipqx+Ma%P5SB=5ZbXf-v^Y`> zC-?Kimk3nr&hjIZd9@J)KOfW>p{urlg!(vEsc@23@%Dz+?VqO(}k0 z_m6?d!8sJ-YwJ(Oa1Y3N*&5TPJ#O6}E23W?-@^}H1Cy4~f02-@e0hWWOa;OfSBhNt z8lxl}E7_L!rZx?phY3HsrdSA}bJ!4B$5UraD1msf;4{0zmSyPF?t6b!)AoJW6)x0e zg_?8$FrVFBQt>W1r0Qa2`6}TC=R=0pg|Me2I6#W+qt~R8oL}OIwI7b%#)CHV14@`* zT)aA~2I|6FNtt$U${HX%N53v@Fmw-FhDd>mv=d{=At!Niv{}w!isLb^zpM-;toF#98eC zw)La0U7gfh@bnw!aN$b#ioPTf1I}7pKu#J$KI286Dio5n3aWjC>5b2PjVR_}l|F7z z(~z`(h~}Z$_$>KJeX`zEo9Vm25N*ciQ;@>Ew^_MEKoTpu$6Qv1=TsS$MEN6OY-T{u zGA2Lj<$0;FTH>~~^LPLKD}Ek|hq6lA73sD8tYB#EaCJ~B^ue7Mfmf#|JROX6Vgmp5 z720e!26BZz9#6ec$=5m}EBW9-+g-@cOcVAHLy#=x8;3c{mg(P*%1X*^_BtiDxo5zC zuPR$zWQiZ4eD1G&eGBkzvorrlF2a>BUEkYXk%I?eb!D_!6PGtf=5JcSdnK6ze8$~)Lt2d^b9%nd2gNXU4$vBYioVR411 zF1cw9)taEHE=a{3;{Em)4aV89=+T^0-uL+*YyMs3zum3a*3~}3%UwWWlb?&e2YaPz z+Cd@{g5+2fi7Gs_H?H){Z!au1#B1o%@m+I6K>l!HS?XbUaAj_IoxD4#5$B)}Nj(Un zpDD*7zrm)+(};RwBOxpVc`J#(Kl>mdTmy>BIsPiLl9{RK{ZR3LcPBA_&GN6dD{w&a zL;&NT1hnnK0od^@&T3vxhzm(GXfpBgx@BA`z1gnbrd|OB*K$`HcOscRbPCDk_B(<2 zoask_@V+z!YkIb-hRpA5J6Zw7+YfnMMv&>Htzj@WH6lhM($^a^?hAr0#Xs znY1v2GtUwSB7<9F`4~JlH9vGEPl+i+FMCgT9gyx;E>>EY35?mp1M-%pWoSfoCg-hg z4tOTg+rq6?!Eg#PnLpviE{xzy@8oxzn3{)#EtmCQMA#nFXrA8Oh5V1%fw$cnQ-@lB zG_vsVLC@H0vyIuu=yggyJk^nYFbd-#OFBVF{WZpdD85NSS)VO1y&(+;{wT-_U`Yw! zv)#(uuA@vnjzTZp%!|blJte9xea=iU_(oH1A4kobRJ0x2nFsno(n*4odcaLHX1dQ4 z!Jx}_oEssU3qXu{!U%@w%g>&z_?zXL^e2#nrRSRVA^cBYqX|S;Q_<{cM|b1k?RToy zUePT06C9^1fF7?O`+)08Ddeq2A!ceD!#;MjUpe|@(o1@7yRCt;<6d}YY9y^JjJgt_ z^pdq&=VnO|H#a9Qylm9 zM;heo8vo7<>o;%VErmi2$a%64y`_8YF4uuD{B45z)x*tMdpRgdG@`hGHC|z*Dif~u z=GX}y~{_U?eS61?Tnho7n%V9Yv{w8pLG|#5nGJZJt|s^3-5jV3(w^z+6l=sQ#ZFHf~u4 zcL^dPDn$E`ftQkAkaA@{Rw?h-fB=9_E+n9}JH4w?xUn_fdoaPum#?Q|r9KL_H zgagE0T6Q+uz22cJDvKAlE~|$G2gu|`a(*#YAeb`;#Ix)yiI$vWDYPJI_G$VN zSn(*#(SH{nE%!5rC&b&T%Z=V1)^ zY5k(rbvLd-E^9F)#P@f_o%80rHH|@?Y}jmo4&|zbE>M{)IP8*8paC33GgQJclh~p9 zf7iU8Gqm6@ca~kB>oVTj>a;*~rMsP-n!{M3K|VO=8?RDN9UP3ksUma;Nt2(?Red)z zLgy!?ZFka_v80YI){ETotR8$E4EsVj*rzVm;

?*5y(jBpY@{v`pxiG=JUUwS>Js z?$i$u&=&WL{UTKA)RUa{A`ThGfk4kjdF+*~NWtVBlqz)7o>5D?Qd`yFYTcF|X75vcrcy5O zTaXFSCu^8bV8gMY1nI$K%l79VQ-U$u2X+2#JKE5rum`W8L4rpvJ^P z&~DixXN+WlQGYI$sl3>hNH&&9#WAxWb`%Y4z)bgt*Xb_JlX9B{ZreL>yp4e8xg{Of zUR7cJZ`k;ik6hVUw_Wg_RUH#kp;OfDJae`RVn1MuqpZ%nxV4JvY--~@&RzYTp!RA& z^Mn(_I5$6j$1mxm*nUL0Q}yg%#oe~T!|!lI8<~^#sWj+l%Xa3d2er-*YoI)A9s(U6 z-^oiTE`Gs{D7SsDDbIY;{mpEVm8?l6zBI$xpIsL@-yI;yf%h&5UDzewx53j z!s{rcQTT=aoCG(7x}k*Pq#7 z&aCv|VS@zvCQ4R41u{yJ7){2sA5Ui0x^cWLRR*G=r;cN**~YX__3>59ZIl0yL|6Uo zJTu`)%XKogN)Dzvg|Q+>u2O7=?dx4ZFa=lNzN;f|vMclxjq5KiHX-=qgn%#-^Sbg5 zR^hpdmy2;F8ROo2Z{NSAhtU_-%+p|~{qp*N@T4tVqK$VCWdHngPWIOse!=dvKmlv_ zt4q?G=5>;xYgEm{7Jnf;BUkoe79_zOdtHqqaRr0Yk)^NZy`r<3=d9-+o9CY;#<`;6 zc7#+rZtV$y6feXskM;$`!bDo&u+Vkot*|?4ZJbMy)~%fP0grO5oLZutM-6jVBWW3o z`NwP2Gp3%<7g{hvZE1lkgrWRt_Ms?4i>B&q3=dr;-nKR!@d$Ma+#&En8EEh41$GLb-O~> zD)ja`z){@DLk%10U~C@RB2t$+M~8n(cPA-<{G~;o)ZOWynIib(DEPF~8RJe@LTNr# z6c6G*Iz#wev{YJS8|RoCjZvl3E66i_;xDmV=5+EU(nNL5fw&w*(`XaQ@d%ye26)?y zxN*eX1)xmcnYB94%om8E!O zTc6B6RO?0c)71GBc$k)>Pm10b3S&F2Zk^B|Ux&|Z-q^^%+Rk*VVyK^EK#AQ#jV;i- zu%F-kWXZ)5*p{#Ru2k_>UV0ryb#!Z#p5ey!1EPOLFBGf|1a?@w8^t-P}{-kl3 zKc-px<2n!p&O`qLx_3=d@U=dF-2+yVDNzQd5f(epjSG|t$>yJ}(~A%vh$#j@hvBcC z49F{Cfc|)0>WpP#f}A$k3#Er;>0KC#kha|Mp;B*V{Qg(`&3@AE;CiWA4Cr-~4C1SH zea*s-B(J-VD`^C+Jo}#YHEUA>gd)B0o0&msVZXr7U?{WB%~<|BE*>bF!WiAht-RGp z31#!)o2D#j`kAt20AQ)Y^mRQa&moYy$(0*{8}<#h$Yg>{L~y_9L|ENnSu8z|dVcO1 zh{q5lyFyNRB7y@R6fMA>LoSg!36)+Mt7?_~Vaqu*4wc8jSby&n4ky#^2RW&YXyGGCF~Fi2-g=ci08|OYLgo&t4eNy9^3n(N^yUb)5Mf z?@mN(#5c&vMeCw^22SUUx4kiqW=X%7K~T~jwo5<_Z74BtScV5^24^+bHskZWPl_o` z+p3Lr&JZ6Lig)E``Uh-$d@r=J-yk;|c-dvR-JdLtVm zM*+;raHBRQSLEyR`S1TNASK38osEc~U#pIyjBQufDgx19tfSB2H0!j~P|jrT zaP>3DgU;W*n3G=akX5vyTtB&yF%>FO!nKg<8fndpKLhtEu@q&EyCJMD*UyKf?SMhW zOrIQ?gy2nT5#4E`aQ0c0#KVG>ccuKZp4^Kej{N%c zYc_AM7Kr=Kd<^sP0$&<&t<5wcllBPovgsZ6Y!*dA`%JA7{)d0oa_XnJvrCKBf9jKE zoam1BgkS0gbOm#!?qstcu7^UpFT-B-Jw2+jK5dmw+l-NRAeq0Hwz_lV?63km050mn z;CqrkY+TkbP@a3X1E9yC5no#iQp7~-XxcZu3rL!x(K+7FxqADRp^|hCl*$)8d9V@Oo-2i~IOMSvb!B z*DJloS;Cv25?_B+$6+)B0GX`7#h|PA`51&2U18a)^GJm?8aqZ$fu&po;oE3aJHzR= zK9Zjf)Li3PURiYYD%)u`X3>{)$2rWLFFv9!R1;fxV$GZZ#OL8%Q~LZ0-a=dESahXZ z#g5}RFAk%(tWB6d_FZsus)-6THS{co2IyZ+X`PUqyj|EXM z3$v!1PI~5D|563aIX+%dJp%hQr^xFUV|R40b0SQ4JYda=%_#7h)s_F-AN*(^tF}0O zU8?-9216F9i3Tfay5wk-4`k)x7zcCI+g4kUm))jcp0;~3+J6EYAsPCbE&!=NzZOpZ zp&U&bnn?ok;;4d~sXzPT_ro=dU&myz3v<{CkJ1sJs8%s5ZS~SV#a}j2);5nz(*YFv zmDj8OW3eXh1n69a3&7-xXJ4Sp++`i%U_8fo!-6Nxed=dDo^atgzzDXhUAQe%zeTC7 zb^K3FD|FIWt=_n8+9UYd4EuV5rLMfcCzxx4{2ke5v=>=Rk*ejZY(36ntAeCq;a(+( zIt3XA1d2`ui_Y;6Rz6De~_j3nC`>0GOvIw5uj4RdFJk!nG!-q`9m9H(G zC1NK8Lh-fqY5L&h7A7{3Y<-tvB+Ni(+5w4Ts@9toqT?YEq~)X0uzv@uiEEN>`6u}8 z)`fNxtq$BdD&1qAY zj3@ma(|%3;$5CA6p=*hIzLl+NA>T%QgJv#CUXv8M$`4vlXy_?I(#N>>L4*Wdae8YN zSIl!*zK0nOiPqjM{h=I)WcD%`qkd{=&#P{Cq&7wSr4Tm3#0S!|>Wt!*1 zF`rr(FjDFK!*fq&a#?K{EeqD`A;JYyydHZM%=|Z|t`FxnEhX%~s%a|oNFvks$}8in z>?QlFS1x8`5v)1g#yQtDSUmXC5tBEHW5||w%hZAI7U#7AhcpR*Gu}#q_ zq4I5%bkC*_J2UqxII==<>incd?kvo%4gO9rv{9Ll6Cp%{I5oCh=CjqpAd&fP;j>uZ@y%+JX6SlJ$Y^) zi0v0CxiJ;}5HqmDS%&rYYSSH2e1c^~MWXa8GKSG%y&1!sk74lNS#mqYe%(@r}tG=CbMdxc!p{+Msp}xs+g`fq&D2gJL z8NAD(=&;_M2#1BvYA~^#QuC9NGk2y~AoK`q+~tEv*uW=dc!qbtX%l7EvE%cjX`{}7 zGiE69R49#onpU#tyEpet$ZH?m368PlLoP9HnanI9Vz8Y#F2Xs@@}R;3p^kL}2c67Y z0ejIPGgTIwyBPy^ELJ$n;&x5HZfUsh&4;`igX6T7*NI`3+0QoZS8zf($qx0G6TyI%W;h$=O_!580AJe;$)_s!NV6-cWRVK`i zHF+>7-2LMmGF;m*6aD>xT`&9JmKl78T#gL=W@^3=93z7ViuP0 zbkXS_c2e#1^j8@)Jldp&&a8P_&5ArHO*2fkT!x^qp^#%rxa_xp~} z*ImC4!?00ucnyijd}*ZpJ-mIYtNw?!uQe7|Zx}zcJM|(Q9KflbVEfnD@b!o##)jQe zeEEbaANlh;)9T_T5m4bpr~zex8q07GecG6K@>3s)u->46Ir|63cW zklw!5yDw}ZedoXp>e4}?p>o@8aWF9PlPcJKIn13jCcOzxf|<83#i71d&g30gptttA z$Z`_|6+Ggsl9H<3cu`Rvo~rT;d@rRJHjDCCE2l1x5?EN%m)7f}nVll`&|CE~2lZh? zZ7H_PeNkCe$fVq``GJ%{oxZ~vsH_fGitSOT1&wpgw#6Yjp=z{tQSmD!5>O1SlbfcTInRvKI~K;~o(<jM;@e#rwb0>E?P5lrQWb3A!8M8C4{;c$r=#Zzbqv;W+i25n z@}tpa!E#SPi~6#4oQJ-}W(3=~T*@E(^=Zx}Sb&>1#g=Y>?_45@Wg?6yS5`U6NKzZq z;{qj5#*nQ*2X=$&vfI~^yH96Y4APEb*Xn~KtRqQ2ppw^g=gHxxYPgeN({Gm5(oLeu=w z+(Grhke(!|4PtP3##^9g1m13w`%L^TZ{G7Gh!k@6guY2O(3~a6<~D9qQ{!LjbMM4+ z&KiI2Shz^cK1b=JZ7%SJ&oZKe7w5)q(+3KETkZiZjdhCgI!k)%VpI5TSDZtU`kjoU z(dUCYUCr;`!OnvA&FZ>oB^d0rkTc;Zq{QfP05i38M(rddoEZc)My)Xt6JDl9bMOX=;)OA&y(QpA4W_e1&cGE{V+xTkc0kf-sZNhRJhrXE*SlJF}L9T7Y9+j zViS0Kj2c$DT%$bI^aqok;&OcbKxX}paJL1xQ=pa(IE)GTZZrAtTQ*-^d@b25u#if; z8BUFVn+%~!Xcptd^0B2eo2IzJ>6v$Y^kDu+Xs6z%N25lZu(Kje!rx?W?D(hotMJ^7 z&F+J#Ue57=;eBaeb{HGpN_s7 z$>XZH-s!wWOKH=Tbz_d`IOg+j_S~{DDySv>@qOpv=Bum8W37#FT)(y!XU6aJ&do%t z&2!+llEW#TZQ6{PoTcHJJ-Kd%eY7uwjm7P9aPcMMSyeT;TcziK&$-76DxX3vQK(NI z05!x+sI?0doCP~CP;8ArNZuqX;J4(HM>s26b1v$fZ{E#h{ZYQ4viYfV_vw;n-d1Z< zLXk+J_O?zr|Nry|UNgI2Cls7aY1a!xa8 zXmHA~_LpqgVquzlwHx_d_-S0#2xz_s6jy=;2kn^JBY}~%fsrwzM0WXO>TQpnwLgt| zPUna>x*Mf$zm?ORG>E_25A18PNymcH79`sNpW)dX|iIn|GKC#N|gs z6&p|v``ou{tMANM>dTvi=%Y-ov2|ByoG6QJE1yD5zJsY+wuhl`Y>MX~v7(Z;+@9Zr ztBcZ%sXR-m$qNR`_sNFRAhh^;zwTq35H1L}GXS051)5qb>Gye5j=V!%f_Fx0BOst{ zj~RAWQ!%DpIyNkuciDA{lqu=K)E%|43wMdhvj=)!)A`dBLRn9a!sJ%8hju7^OeCKV znK3xa^k(2`=MoxKdkVX^n(V>H>DkzMg~@`zep9D2l&SsVcp%&9O}4p23{{PHKdY0s z_PQA5EbDnTVb2T0a`@m}_LYvs;C%5aZ^ZcivCVUadFUc7_OXt+cgYo%3#<-JlR}+0 zxo@pG$J4sFVBdFDfF!oUrY zdsw*IuttLXWk_KjSW|r+FN>W|STQ>zWEg^o=egzF$lTl+7y;FthkLGXCG`%(Nn=d5 z1|C3#NSMUFA$kAV6eoeN9V|I9$na4sQfX|ImDSKU{Y^F|5$esYwZ;wFKo-M#bXU$E z)$ZVgw0YZusRt(J_8RT!kMNdBJrH$GnyJqV4BzJ{jF0#u2M&Ui6cC?=yx)@exdRqz z<&)z(A^r2mXu)i?jd_>34Ya>@6Cd;|9K)Zo3_Lg0Y8;5uz4s8d9RkAQb^P-WcOqCF zek^U7ZRL-rJDC+i%U6GQy%ybAA{Sn0O?L5BqYYbE{B)7_mjfn>ZOcbJu#aTTJ0ZVc z?}~EsYXOW{5dXPT}M7US)DuJI>ycZd!}lEng216vFhrU)Y%uj zrw{noC~>p{nP?XsM}I3jGWv;(X$C(g2Qa@*v1;Jh{3V_95Y)1ISPmo}_tk0Qd2?*J6c=LJ+U_Nu>&JL#X!cm&F)hZfk46B(Q>WT2$@ zbYSmxsNR#{!x!61FdF$@cUxF2CF$-%!6{C_q^yh=WL2g?)o<3fYdX;M42uerAY9M6 z+FYLqtK@ai3tFv6KFqAyUwbi2Kx^kV>yE)mxPwx;@3o!b{PF9m_5r@SJf*HcE? zGZb20wB)MuWALp!ReC#gv8Z6#wkHmh8;^Y;O4AzjyQg0jAM-C8Oj%{#T3-yl8FH{) zjA0u3wSQ>DrT%mp3PrZs^p`v_em9x$++sjs3ur{9igUuC5V@4N@U1J{ED-Q;PxI zSMS2|+w&Xr*QN3kE4+k)BPM%c*Y3W$OUB5FeQvj}ml+Ruo%Ry)C~uS}6mA>tk+4@% z8i7!sf3gS`XV`=b_eTgJL5~TR@ zaD+o<3y}ml+g+FHbwznBNnD{&AoQBD<_~u6h05Rn!=8`5EVup3C5~Rv6FbC{fobX& zYU>xVxf%cu3{Q0Amt%bSUKP}H%3YYSRW*IZ-~Sc}JUOy_*Iz48H?fP|xGYeQ zS{tZ#YA)L3pX7}q`)3Bn?98yq|FW&GZ=Mc|H7f~`1+60T0$;v{x{*oH$(!qk{g3#) z-or;SQb41B?Ughkyx!jsWuW93Q>>-)1uQTv_NCVtYS)6FHKj)B5l9ua#67nZ<;wCb zO~Wz6Wn|$GXtWz_*yJB{rNl&aLY&B_us8|nD;j3l8^7Al;RF6i@`i6Q7VP)}{T2H{ zx1FQ%=I70$AL`el_n%cL;4xufzWXq{Pu2-}X*SCf%FFK`6-cCZ5zgUv4GZ0d<%+^` z_3O&=B%z|?usrthdgOvkB#PyH6Ry&GMFeA()TRs3vkD- zerx9BMfu%pI^01!J9DLYz{}DxDs>f341J`{O1GJ#vc|mSaO3#>i&p***dBrX>K|{; zWq9MK-_e=K0YjN%SGN)-X)6yxQh)MPM+v}_{!Hn{D^|?i%=M!pkI=gTk}0|_#=U!E z6&SvLUFcihi|M?1_zajB-NF3|+2_NGNl5k1pVAxf-VR*e6o}pGyRvY>Dr*uM2Tjdi zVf*c=46+;)%$*>4t zaOG>9^nGscob@xjmsU)HW!n$?tq})WC5T<~Ek@sUMSVM(f3jM;hcU#B@5}wb&0;lq zi$y+~h_!O>x#M<_Mh7HayvP6EzrcDOImfFr1jd&u2vKfYS~=XIO3*F*Rk zgw{qoH)8gY`SBo-ddkFkUQFd=p~{KmBQ-$BGD^oe4BIkkZ@FaH2Ml#byD*aKw5IP+ zTCoAHrn9dkqIFkDhf{2r!mG`#z8K1hMS%$4rycX7V~+{8dJ|w$4qTz0WGh~YKOHuo zE}2qPeA9i8G*qmROJ{pw1&Vc?RhdeHuwY=FbmgCyk=8P`8wHw&^n5r87AK+N`k6&G z>ftR<|N5do-@Shv@4}eC_rNP+(O>D;HU&QU_Fw5N@yA@SvbuVmY4T0fQ`z%Z*4U`4 z*=r=N*?^f02>ue^QNx_s{s73XzzL_FvMV?uPpn=x2PxAC)LlBrt#wc`m|pbf+}SMn z&f}?o&a&P;BgQF>hCNT4`eV$n#~ocPC-*4Ec>bT7zB(Z4r+N73qeJNi5x#_=q;v>^ zN-23L2dD^0N$1g^QUa1n2?$bmv~-DdOSg149LE9g;`4j?ll$!M?CjLePMAtB$+N3t zJyv%4!D&`S4W`zCmeqta&h@1X_9y@26Y8T;`;+i66xU0>sdt{tdVae9fsvC-9L%zy zkKjim$>>2(=x876S;n2+@v@sPNmVi=jsI4S7A{g`?~-$nQUPryka43`AzHdPYP`#! zqh0UwnfJDFa=uff+7^kV08BSV< z=v}DVl`GOzbSfEay8^s?t^9L~vRMKTxW^4n^=i2tN30xbjDNmpXjIuR!m8Wp`BUfA zZ3y^!J2{(i=n<#CbQ!caC0+?2=FmF#97&At_J4fPe7c<9h>^SrKc=ss@uyDhRHF0h zWoJo->VKPO=;g-8d#{aYI{EWec%D$(-UpGK3%@2o8y*NZ)3rm4oQN)goYbIYl|hIL zeA}wQ=5rs8=e!)W%Pw3Wh5^~em*3oQMZSsLSC`j{U1|++(8L#5aE|y$6NnSMDMr~) z$;zGKKSDGYMiV&l5WsN5pSrwLsWN|br)8ufm>+zaz=L{%wg zKUzv(OB*Pc|H>o-g2SMc#PVuEy(A|f1im06qjv7OX?ij*;+fnl|I;&pIUL%|VGF18 z`k>MQK-D*3_wkFf4>kjunX+df8~7<<@jwO}SczoM3;4WgU%N%wP@Hg2K|OXU@8I6v zrG^TsV&QzED^Jm=-0_z{y_+vT8aH2L@(W_RBYX~R7+QY79eEEweH5r^A~bkIZU9{I zrk(Ev#pOECy!xKBL8t596>RFUazUU^JljIG~ z#T;BScXeWpE`O8N^b!ZDk{fBi1`wAHDZTX?1r4S%RqCoFQ>rZ_ho~MN($}*AXY*zZ4OeWy(&_PetVdEJYkk-5C=J>e8iMb$QvNmK-4grzB8_*)YcH$iwNxKAp?8_zVwp|NyAazL}9xuGF0Ej5Wd%9X~G_dqr0;E_3|7nPPxQQT>=?JT zkc~a$qCPh5U$&(fuRQkLI80QvkEJ2q0`Wsvb#Yf^<6rY_>be}J8@hr@3|mkC+L-|L zb#7m!R~xn`;1XtByLf?7xv8>m=yuu`veT`lj(@ zI=`*lrTv!Soa4-IruNOSi_Jr#!I?FUALU|*OxHCwX0Yj*UhHunzX#N8hMmvw?3wI7c6DSQ*60i`5VOg$8pu_{!FlKop^F zJC0aKN8$HEnPbxI{s8f2D?zduAGVvN$y-gcC4GTUe*V$yY<<6~LKA8v5mGu8k2j?m z{Z-k#dJp;2od>-b68bf!gnUJXu_O~3vpeHn#-;I`^hTG?<>`h&p6`Q00sRUU)69Pz zAcf9G7DXE}eryEu>~SS86f#qFHqp7CP~0EalTR5^n?VnQ_@4M&xy zpfj|!=|xUyqS*s_f8qwZd_idUn;+W=4I1=5AzLEsPVB0T0K}8~3LAD64=&Pka$6}| zD3mk950cGZP@>^T+oCSgwLtK^ycxgy`1}UAnJBP$Ogz&0M%;8DvXVSsfXWF%`J_#j zLI&jpf$ z$ydvAzd!Ct`R!g@*3f zUJi#;b^x=`6ZGzGLF&l$@?UWkV^Urp+%vWxNgW6|zW*p}(SZoXI&ke1eS3ENv-fb? zn$68O`Ref6-8qt}=n(0adtCHnwwFqf)n~Ux=~Ey{6Hu-*uVA_k;$>e~%9<;Ai6%by zxc{iO2}J6!u53r8zA!Iv<}^x5=W%Y%Q1w(b@=7~LtxV*GdAg2Co;5ZObwT%lbO0@- zLvU4H0Z|O^w+0X-Kn+B2jDTZ@4)FtH^Sg6Ma-;=Gl}hmCv=&}1>Y42F^0<6Y;UV&X zs<6zQ|JU`qvX-gmUUMuhl$RdH;OEQc^Ax4c3|IzqHq#KCQ~K+ zkc@uN6Em!%i2I6IaK=dV#M0pw;g09M$hz1pN_;q{bZ^-7>49<_c>I4TDIn6|JenZ8lKCAi0r? z8l)br=+Ew05o4y8UekT^qjupjatDlf_FQCn8=|-x;gl^Q%J&n_kTxpQyH`9i%u(4H3xOw+&nYmEvJFYR7%BqlNfhmK&~l#zvX(tF3eOL9 z+BLG3DGm8)12&=l^2|wM3y~rVcOu|UxrHX_HJ9J){`f*NsjlbIhEw@r zC?2UPq|f%I8}>t+KiUR0pFYSviT=DNKm#=C7C5|jWFD021)N5H3J=dc#giJ55Jf#O zq7Ph;pp{n4vup}1YdW0#6@8E!2vub?ZR`=36L`ihBv zm;VI$WS;WQJBe?yC5v~%b6bQh^h_YPu^;`FPBS%#)R~;%0l;9>!LBHaq-W{X*bLDV zF5y8+I#vP8h;|)`03+AE3(`eWXu90pI&iGQsiW_&rYJl9$^$>1W$8?GsNvTrK_z*MY zrPtG)L`w=$4#OZB&sgf;lEeEeODqG&0?gqp`a+~6iR-U#c2BdS`oISGi;WgokLwzwmgih2z`*j zp44(ZFtzX|F+|Mr=x|;4by@zoK0!jIyjNQ0jeKZRgHe81ikF8OWt+L_an6W<4ylSr zdd_!Q&8eU;b}Mp1$i`jFSvkgd^PGU7w$wJBnk387CZDYrP)+5als zTax#2CR${6nJec#`{XkQ^7);z(zPYu8s`wQn*cIn^e?fy@>4T-G zJOH?iZ*jRs{&m#05`Je)qs`O6*}X7aQkD_K|c;RfzoKRd&> zzH5xL$Ce$=P1Mzs3$;UnNbzUN3JZkTjSA%fo_ zlml;@9mme|h*7#~&Im$CUxeHDs#qnC{sIuNatw6o`;Odi!>6_7)3$2w;3&?$UNDqCLNImsD)IF@wpA_u@)h>g;k%K> za_g<uqlkb9u3|sf5U3OC#SSMh_)-}HKWb|cG&(kH#fRHT_pFc@&%`j^7LJuMwyuAK z!st0)%Jc}dOoQj`_DHwylKr;inYaR5{HXHH*S>Z4_r4{X^3nKjpLj7&e1|qnx&V{e zI`gj@P#i_F__Z&!^$Ri1^X4E2H-j9)DmIx+=Y9V_eD6h!o0V_2p*@x_|I+6DUV-NM zS)s6l9Nz<|L6BE}fH&xcM9Ynn!L24Ue02XfPv>=mr&(hCL{$0lewK&?NlFPHC>EkG zxnO9bb>Q}}K;|ioEl|!G@m#lTRY8dX;?lH9%idiVaWn9Rgt}>@B++JoExDNG4iy&M z(EO_wbpHIxIF|rAZ?i=>K^)6*pB3>*h_?O>@0}lKhj1o???DIdyywQQq7bUwnW1=e z=wG-*@6O9icY7Ft(=ci^1?=>IpRz%I=s=zJfU1Z5f$?@49)UT8>c?Q8i~BkkIS`V$ zix2TcpRVUrt0u(-*`6eZ1?3YrPJRk~kxsVD%YM3Rm`e;!JMW<9aS%a^?3HySzCTp( z`F=?rqDqA=ik*C?;vkL_BC6mP7qnzQYpAIY&gVWbdcA+<7{t+Ql*GTs@ME1bILIKl z`rs9@I9)*HyDH8mSoKu;Xta*5Bkb~#o;rFYbXXQ$!;Ae!+)CL8vpqC0>1z|+-K!Hn zID4hw4Z5k~o0W^!#~Dt#KR!Ht7#$2hnyHw+&yG-7KactmJYOK>eMSr{k6BLgFq8`N4f+8^_<2ar=qGvR(9XMB-Y%4)#X4L~}A24>HjN z08=bj1J_NUuyQPAJJQ}%m8{15^05P(LT{fabDt0JYx!%S4S~(7L_^at4(75SFe7?5LiGI2>@Bn z_`FN8bai;$(Yl}2T>q$%`@zlz8H$o+fCt)`k|1weOxpOqZ7-b_;%@6 zBfe)}QSEUTY_Z7qSNp#7|XH@AQOJ%0Gs=@S7QKBYw9x%YO` zZT7$lEjIY2u4A%y(_|#izRyoJ8)ux?dL%vb4B}WexT_@@r^G=@1afHXqS?vi^Ic3b zxbF~}U6>TvGO}S)PQYWK^wtrRvTIDR8F$mq@VgckEAT{ z7NlMhGh>b6+6#ZIzpsqy{^;-!Gi2wBwVG$cTlvZ0&+6(GQmPMj@)X9iZ{;zE$8|x4 zeJ--vD4nfuE*#1{(Idj46X*A7hPmI?kR1Wn=Pk6bM7wH1-Hc1NMNUcSz#N{xlG{wJ zhpcCJ)&f4wCf7-s=!*QTHtP#;1>Fk+I$Dd%xlU8DIgW!NnG}aH^Q_$MEJ-lSj=A(a%`?s*=@Z5B*SukrZ_4~rXU`zj3epL60Ls&IjXTTG3BBc@g z#%+=SAloUH2+q&WA?X>D8qHo+sqpnmnS(4S<;kU#cc&wrmnNy9r_iEU`gv6mKUrS{ z!X38rI-P2(baTplhyJr8n}5YQik1dooH-v8xCzWIo8Xz>CS0j_D!0gjXkU=j_n9z% zk(ncIF-zXt{0atk|NntGyx(f;=iaelC-*ANPerDdR8Kyg)WsyIuQp0O5e_z|3^sR+ zOU^VL+Bn-0EAYBP{56lCHI~Lo^WxWT*~;`>1jhE^p^%TsFk5Vt2)mlx3u=1<@5kBB zW@tJQxunmv2YKls)Hz(MMJadTlyGQj+MFTu0ors99FGn5D(+B3Zo_Dr`$O@eY4Ud)% zAHhabul}O2@4Q?yrJuwLrN&lCq#1e%A@V3reR6qXwF-b_!8fZu-q$8_1O$~YIK38QC>DNcK!&xx;{dEwDAbHt2ubZVnEq$B3N#YRBF3R5hwB6BO{47$nMwuBQ7l+b( z5XQJ(y^vN7N2Q2COp4-f6S83gCF+XC!_i;}NGgG$uHev)c4?E|$wft#!HIBib3rJ58d=KRB)gnpEW(y%kn3gC z;v^9PQjB|r1{F5)?|H_^_Vuj~)2Mv2emFh$|ns z3}wRk#p93kUc>uOX}9v{=u~zym1oGBt{=7?BS3;iQ76LSMTz$hrU;bFq4Y67k3L@c; z*lY$;As_-NjIkXgSso{Ey^+kREow||bKNKnZyrY;ek^&0cRCe^{N=%BTeN&wV^83@ zv)2D+eRl7BGMspLb8i#NGKi#Qq&KmXA*&Bdap8s>>QFZ@M}cI~4atfAzGL>j7yre2 zRcpn+i@jQvW$|`pOZvfLqG{Uc%4RduRZwlOI(%oQu|I}iB+y0ZA2ZvYZHoZNX#5~6 znOr`k*HfjKOBj(p&)RTg`;@wJ_&b)A(^$Bmq0uW_x2pUvV%f0C@< z1#;Ccp)(#O=*jx+#q&?gel`lPlCb+ZbKBExdmB@RY|`h{mZ#@sMU@b_?O`aBxvOaVL|ey9{MAD*5O7ee=kYR&#u+AX-Bm0c4PV5 zqcZFLxSxo;A+69>9yGTCxBX6ln05CF;dSw@TQlT}dv zkm|Z8Odnd**@_e^rjJmMsip6RVUPcscj+d*NiH4*L6JR^Hba1K$?ZO4Dz3 zPO%m9&r8!bEEhi$>%5z?JhLb9p@EMTkxIJk%yQ}Wbp^y|c$TX$Lh(1pjj7_leSFVafV3or1*^U9=iqdo$d7OC!pFErtV#>;t## zczBI@hSD5U1O1xQf_*RuTu^CCoSD<5N?cH@l>>_rXve8Z8 z+!N^NzRH(VAVjfF#Drr88q$9^Q0sBn5~XBmLI-FqBWmXZtAlziOm{I8NAGeL z=*zYu{qCVV(gIdek+_}=zn-G6SG>6U)SM=H@Z`uI(SSY)VLmXSzr6T+_u`&I4)A*x z{*qB!|Ag&CxIGzW{>$q-$6ox5+A-qqyj1+L!SgSVBh(bWd~Y7J6#8QSy2IJ}G=7g1 z<^*)0!e$#djg9+iD`W)r<#OQZX-{4dO^Dek^kcVs;}>vpsAKDZ9k-DzQ4d?<)i~IH zrC{>kfw2S8y;?1>8z^7Q3=r+4mOZ`#h zx`!}=DpARj!S2l=yL@5EHWzuTye=bMdMAR;cIRXzpvRS;BI+5MB}<=$qA|T9Mi4? zcqlzS%A}R>(d(-NbakofYDrL?{YyNMm_)1I{g0-udnhF9s40v#)K`1el zLTo_<_ye&+x;c|3DKY1WhL3&^+Z@W}>cuD1lneV!qMPrF1n$e1IPwB5k^v$ z4ofh6&PpKh2fJ_O?FAAx@3clP0n55Kx0>G7y9_WfOV%DSAE!}PIxthM-FC+i=#h)V zU$Y)>2YUHE-~VZYY<0u@2qcvk-l7@E=ji<_{!6cQy}8D5D0Aim;zT7FN_^zUyb>B&E9=_F%qlZf(&=-B?x~e&%U(?ARFfJBOd#)cVBJc zhK@vYxJtr3%=*zNmgQve@xA)Ox*9C=Z3mo1s=m8k)J+mc zy@wnLwZ*@M1Rl7xhHz||Zc}>vf8wfW9*2@JUxr+12L()r_m(ADz!Kz33a@4DxL3TH^5j;wX9U zHw_IDj(X|hfNqdX*6f(V;hnkCT%BZ7aC08gkGl}u^7-0dCF8xhEln%`Pf4hQuq$t1 zjX#&fSQ{){)eRCvt;pY%D7@GnXTItWdRXK@fGYQ-HT$7mdbMfE=ktyw>&Aj%7RHp| z*rQzTqUAZt9x!DtIMu~Et}WerFGQ+hPpT84=`U|GQBFLM>pz#a@o{x1`t)Isw8q>X zBH<%EajHWd;9+lcb#<~M)cIixhUaL5ALi9vbtf7ki@OJ*2wlU0Q19$43baA3)cjFX z_LvVF#6R<2;91?pO?+r}rs)v-FyltDt}hZ{q_0jHbUWDvD?{ z)($-ioB69`^+AljFjtw%4RIiaZ~ozvob=?=+UB=6zZmkuCxZ^FCs%!EEXP4+6{(lS z8_snszr)=@6SgdSJA*P$xkizz0>(*pNiPW*(LxvK3ccsv*#p{F_`&@9hfHm2n5FKe zL={kiH1-lz7Z(w0J3E%ecc*)Ak`wu+k?~BDSgxdi;NHZ2+oTg0hgOUldL&OuLj9d* zV$`8|$wa3Q-+R8ifU)!`58Oz@A!bvvH zI4;bSBQrd!BzC(+FyEmdtm|YWEe*b`JqX7<9*Es(=uEm833RDD+P73dTE5A0s72}I zi#MukwML+94U#@K@&DZX)&KTpnIM*?FOC2{;XTw=L@6338TPiyW(>qZvrS1ty3^#b zwH41EhUTAt88PIgyn&+={W><)rRD+&W+l+$A(x!3=oWx1DLK?})H!ee6ZB7gY$`umgQW?;@E_FTwIjYZ6 z7wHP_@80@qNx-5T9(r5;G^m_Ra^hbSCt3HD% zG52*l(7Eq7to-Tl5Ow({evn4vj$X)xbh(C8oK6mNoz~r2vUs?>73K7-Q1sNVr@rwn z4Kz>C1bDgQCZd>ma@)NFK;2l1)jLR=F(fo2b(IFJ1AospmC!GMOE9hJYn1+_9t1GLN3dxeN0GAl4P zZ!HX_`?rsRE#GBL2`1$c`gO{)y3sAilCrKlp`K>xQ+}*1!VF2EIS$@_!Jo$7{N__3 z8h?EoSsCVJ-;NaJ@TDxJI(icQn&!=$+|Bgq8h%Ezr26m?Lt4^idG~YaV7Q1P(ag6M zL(j(@GJfV!V2TYfJwH!w>kp7Zr?E_?9y6EGV7g8VhEWCOc(iAv#pepCOP7NjaCxdvkhXqJxOBl0Vp7yJh{` z4DUb2x$U_Y@2kJ9s%z!xGuE+s`><)W2ziYlw(o467HtR{3CMq+yHQvx4-eCi>p+a-5&2)%80V%-h0uQCK zo#DHmeqnhe7&`(Hx{pZ^YkV|WTkWplCs`OSa@rk{t)9KO++T0q{Bf*cVnV%*JLiF% z=##u5a?{*OX#KcJQP57e2xZ}#&k3-w^VY7%UruyNMBhI0E5o3d%p30BqFMjh#v9e@ z#G2n+9ERZJyY?7;lhSS2LT>!o(0yU3T3?zpf{Ae)-W2^Xp?Xh2tNT<3DNl~Ceq z^3kg*?Ar9TvU9mw;aHz~ZQy$4R79l+tt_5!jd%l(1lVT{Skw1nPJ#1iw+=@yF#Wa^ zS5R7B%1rP#%2`}i@xZLciq=vNJOG=A`A$s^2?$dukv%A@FR zNK3$&242%=z8LxG(y4hu;`zP!aoBOi2P)ea^>3*WI3^=b=gIJ8_PTV6l~T$#Wi`-v zxaG?ScgmGgy2sc?%)~4ZF^0jkcqP48DK&K)+l5 zwuDp?)f2R8_E$ni0x-_zCCBY+dS27P6=r&tzp;6nbG*i8EXGeO*?0Msy;&8^ApkUL z3x+=sWL?$%?`m7zCe=m+25NG}2*v%;M(c+rs4=^Zs*^?mKQ@J-yOrAy^T zUM|q1kZSh6^(0_Tm-ohIgSzBuE-QZht*EW`nJ%rASCR{3x5pM9vUym)$<_pC>BjE! zqFV)5Wq*vdGGSBajE8~fWOTd;dB{1wCsELXOmpOV^|@F@JjoCK!20quT4|hqq2N(T zKD04bXChwT9xD+1A`Uu=UwyP)dU*rnvI9Y?r&G;}J50kNP0%l$}5bqLcMHU`(6CJP-7k#=qG8ca|tV8}CH?WUb zv~6c5kM2gq%%7|JU;JxbCsbF)6{0Bw?yYW&fgmdVb)g-Muq2B!{*nj6$iAllK#8k~|*YJGjmxN2%aYgxqqGu9EQ zf&ZLL72`X*Si(T_9O5T$jR}dK*q4RCxwZs<6{o{?9y;`n}sGkPuCO?>cnj>_$yWOd;7fQ`+<`VKFmfYeoT!7)1E#FruG>58^Fzuj7h z=^~sd;93w}_h8%&xM}Oe7AV530vR`ZS-W^AFt;}a-tBR6yr#N1N6Qewhlh$k(>Kk> z79VVUUI>9N4jioEi86;!2~P3JyM{%gOTqYqMjE>_R z)2~W6oEXb`Ga^K0rjSbB^bzRT`5oT$dg>DeEG&3>l0>BcZ+43yyQ7ANoQ?Q10-_k%xdnS1- zX50lB!>l!zmcOP66rueFanWLM^c(Hsl!I|f%Ge4`w4TG0PYKF@7~&WWV%a^RC!0Fe zL&x{kCYalzjnO%i;g!J)Xk;(JreM$k&f4cLqG@(zXr*y=#q%&v_z8z%ffutsZO**a zGf-VP=ejOH?^R5{2&_Wg%{*<8%g5;AzhKi)6`ByRlW*;`2`Y}jQ);V>^ZqHOY8jBY zp-)@M995uQ5htbYENU?dF9X_;w@FFY7>W2!#;uT#eaSO!iF{X-@SLT|fWdKVjJUo0DkOysY zgN|4faBR)u91<0`GB+vf)BE0B39LW->z2kr=Kp(}tl=X*;MV%vE50~`H2ydbiJhAf z5-g4M_~wv5_Pg_MRUwteK5y^tlD-FBOQc{)q;AxmPvoL_p?DciIYCIS=3&eI!sWIt z=EDa6pTtf6UBDXJyrt3id~lX37{{?ZEAZbp+r>On>{y$V-j1L29A3C08Bf8n^7Rl5 z(@d}jia;I}2%`CEWbH=#V7JfekPchq{`R2Gay+SX^X!OA3ja1Wn5zKxzqxvgU(23b z9(^0!g*t-z>pI~g^n!0c_-v4zMu9zvR}jHffd=g@Up;cQHj5_OP1U6n)O(&x;7Cue^LlV&mIP70c2E=3dKzP7n57; z=8#I-0tw~8hYOo{`_cQ`8T7--WpPtM*<~hb7#C=G8vT6V^h+>@?Y$#XoPmey@I?1m zUButRI6gL8;%12gdm$KGz%1BN@&SaUyT!CQbu;mP_ zyI?{x%YoULRPiysnc_|5$FJNzW0DPDIhH@fE$1k|FaP5eO90pldh8y#K0LoPVD^ST zj;==gnI$$gp`*EmEcaoG9Ij4K&?L(#kH@6oY{m1?y~nf?BA_HLhjb>>mc{D)3(OsF zu8Wsaa!>Ucnqu72R*ZN6rM)ZSA+0m*X<$r!h+^{p4N7QR0h7|0;Zr~A2ML~(V%`ZX zb!EKgtES3+RrpJxxL2->`3UR3e41^if=rxAoCL3=kJ_MW5denY)W7oZ?Mc* zRXKxW0u(4-wI(}awq*@tNOLe{peky4j$G7wcT?JKtnsZgBaBTSOvnkr^HLIL;301H z&&!?@+6_7Rof|JdKn?H|{k9C+oXG#i-voeYs!RSGC)iw3fFyGaBXXV(O|9I#gq7Y; z6b4{z0g^n5vJnAnh-WjRzUCT7Wm{`s+(sM?Ap*2tpTiWv4HkF=7GS?==_P){YvvaI zRI$n1cP-pa0pJvnKM1q9wBqm|Xypp}Z} z$d3V6XzQJdM=hvqzs^-pIjCs;j@?Bosn_)==H*>sPQ&j_vKO3l!9uyDUI5N%7DhEFX zT{HEErs>u)l;g+vkEA0$y^+LOcyvMqWM$RlCQd%6#!e{2Dy*#{KEA+iW2P{1<3b~k z;)_*cuvCs;=Nx_MY4CYGC5m2pTVjjKmMKt}s0i{0lz7_Xgm%j>)k)k+C)9Mnw@5ot zAtkYkJZ&BGnpzCeswEf2_m02S>2YFWcL;F>LJS|6m-hk~#4`4q-rmR@wr?eFzMZ-8 zSXD$34-^HK4CXvTR;Uh@S|@RSe@zqh>C)4am$8Dy!;Z^kviNGu62#CPc3pKtY7g7wpL{G(KdIj{=2J3f`}MI~Obg(fvcslcP9Fmb-Q2BU47{t2Ki?@5MGEQN6|buC3^ zlCi~+8U}QK4cWIx!x~a`JIcNs+q#d#%4KjLC_@N!l&!*j*2~qUv&W^AeN*#qW|5MZ zkI~2X@6a4Q zt&ExinIAJN$4^xDmK%tvKvkBen4x!<0Xwc@4>whgKQ@nl8+i1iCO~}sOEm3^0>@3n zJq&|an<~^z1`u@mXc?4OCW_mH69Vd&`+p2RGeepbSo*s?X$foeBW@4PqH-zT6!0-)2NJDG;Zuo{*$q z5Pj5JIv(|tq^beh=>EK^g7SaEPew&M1sV{mm-qvq3O49uzPMaJKv!Ka1pTRbm(Qfa zr1+*nFhyypApTB);^s5|WAj9JzwOhP8QYEu%DFQsFZ#vs;j57dLkK@;AZMk>@cHUa z7w5nLoV}ovWY*zqc;{K~JDnyvSk@_VdS&W+kE`FvQ+7V~LoMz3au5w+e~f%X9Vm>q zgCh-UISpihGMYIYWL6PWg)DDtW<)}d-ZYPWxxPNmF*;HX5CIVBfF!R1JtHFTx)-o; zj&EaF15?BDuV?TA+%rG}Y_3RM0)wncR?!##q5mtar=cp-TWZG~)b;tOc6m7DmD z#`Qvc?w-%Ac|~6dLqNSPy!Q{JI)zAE(!4bNVbJET8`9gO+s99o+s5R>%plkOVf{Ox zE&ed#=B|?OQ?3qjfx`Ycfc+jkq9XjG1~rHhpDAWm*(ZQQK#w-O_g^{4@B*)pSY zO9(1BBf0z>3H0mNL#7 zXcpZBxqqMLXZ-(88?pKB8#kl^P%&9jz*&=)GRmoN-EC^@B}xtgvS)EV0lQ{g{U6jj z!(Kh7xEUaM6C!_OC0K5djo9bajsd{z)RS1*RCfL=+bPEanue)|Q77VbR{aTL^F@te--w5OZcA6#~qW3bRm)idXv=kXPkv5$r zr8SS%3u%Uc4KUk7-I9AxQ!dP1CdS?I{aFDrKGSJM_bC66%jLD-*V_Y5{QiI+uC9O3 zN}@mEE7u@t&3FuGBzMBqH)WIO2-nOEK=S}-yS#s`G;5+Q)nw&$ndRRvLGX{ zss9>pXVOEqz^N)C88Rqir~~7j&r`kw=PAwDUmOlofHW@wu<}j*RyNO^OdhbG(Xr2| z>JYF4o99O6j>2gFPtWhpd4{YmgvAtN1$hhFM08cmR+Q_Lx7DXx^eohz>9Kaat#j zqLzJ?Zt`{+$5t`_UBZIY7wZ6u7yW6`-!E}-6&1iqbKJ{(v}MU> znZR&ixZ8ImjYTJJ+<`EFuJRBUf1QZtJWF>NS#4?8ow8bp8K|cX834tJON7g{hnSb$ z^5R5lACD9K6I=diz*A>HYjN6YIAY<_zQVduZZr~q0b}NdpoGC}WDlfG<@tim1Mgah z_xeQ~G#?F9xq##_NEEQk{R2pw$MaJU?9pdInQ(fW#1L4xvg(+H+wC=6Ak#sadI|m> z1{3{{GeY>U-JgrwmlzS4a4w}3PcpoDlNAz9^iLW@r2lvoG%pk+e#VGn(VfY zVaNEZA^zil+5KTav*bH%X)U;P>8o2_8ciFFm}sCebIJsUqxn{7S;)YD0K*yjA&*M4 zbu>>DLIwoy#ud^OKm>^4SI^LA8>bb#xDe3W01qO~!z%1hX*5gLzGH9Iyrp8YVv{!7 zJ9qA|FxDpWSbz(n2=oEy&pGhDI{Ib)z(=7F8PE&_!VfIoD?CuPc6;S{dng3_X{c&F JC{{KL{C{3veEk3b literal 0 HcmV?d00001 diff --git a/datahub-web-react/public/browserconfig.xml b/datahub-web-react/public/browserconfig.xml new file mode 100644 index 0000000000000..0f5fd50ca7ce4 --- /dev/null +++ b/datahub-web-react/public/browserconfig.xml @@ -0,0 +1,9 @@ + + + + + + #020d10 + + + diff --git a/datahub-web-react/public/manifest.json b/datahub-web-react/public/manifest.json index 35dad30b4bb57..1ff1cb2a1f269 100644 --- a/datahub-web-react/public/manifest.json +++ b/datahub-web-react/public/manifest.json @@ -3,7 +3,7 @@ "name": "DataHub", "icons": [ { - "src": "/assets/favicon.ico", + "src": "/assets/icons/favicon.ico", "sizes": "64x64 32x32 24x24 16x16", "type": "image/x-icon" } diff --git a/docker/build.gradle b/docker/build.gradle index 8b71ff1f6f06b..cc95e12f26f76 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -61,6 +61,7 @@ dockerCompose { composeAdditionalArgs = ['--profile', 'quickstart-consumers'] environment.put 'DATAHUB_VERSION', "v${version}" + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally useComposeFiles = ['profiles/docker-compose.yml'] projectName = 'datahub' @@ -78,6 +79,7 @@ dockerCompose { composeAdditionalArgs = ['--profile', 'quickstart-postgres'] environment.put 'DATAHUB_VERSION', "v${version}" + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally useComposeFiles = ['profiles/docker-compose.yml'] projectName = 'datahub' @@ -97,6 +99,7 @@ dockerCompose { environment.put "ACTIONS_VERSION", "v${version}-slim" environment.put "ACTIONS_EXTRA_PACKAGES", 'acryl-datahub-actions[executor] acryl-datahub-actions' environment.put "ACTIONS_CONFIG", 'https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml' + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally useComposeFiles = ['profiles/docker-compose.yml'] projectName = 'datahub' @@ -113,6 +116,8 @@ dockerCompose { isRequiredBy(tasks.named('quickstartDebug')) composeAdditionalArgs = ['--profile', 'debug'] + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally + useComposeFiles = ['profiles/docker-compose.yml'] projectName = 'datahub' projectNamePrefix = '' diff --git a/docker/datahub-ingestion-base/smoke.Dockerfile b/docker/datahub-ingestion-base/smoke.Dockerfile index 15dc46ae5b882..5c6738720e05e 100644 --- a/docker/datahub-ingestion-base/smoke.Dockerfile +++ b/docker/datahub-ingestion-base/smoke.Dockerfile @@ -15,12 +15,12 @@ RUN apt-get update && apt-get install -y \ xauth \ xvfb -RUN DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-11-jdk +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-17-jdk COPY . /datahub-src ARG RELEASE_VERSION RUN cd /datahub-src/metadata-ingestion && \ - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ cat src/datahub/__init__.py && \ cd ../ && \ ./gradlew :metadata-ingestion:installAll diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 2898a363a0a18..4f0e66251b154 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -13,8 +13,8 @@ COPY ./metadata-ingestion-modules/airflow-plugin /datahub-ingestion/airflow-plug ARG RELEASE_VERSION WORKDIR /datahub-ingestion -RUN sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" airflow-plugin/src/datahub_airflow_plugin/__init__.py && \ +RUN sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" airflow-plugin/src/datahub_airflow_plugin/__init__.py && \ cat src/datahub/__init__.py && \ chown -R datahub /datahub-ingestion diff --git a/docker/datahub-ingestion/Dockerfile-slim-only b/docker/datahub-ingestion/Dockerfile-slim-only index 4112f470c25be..24412958a2d08 100644 --- a/docker/datahub-ingestion/Dockerfile-slim-only +++ b/docker/datahub-ingestion/Dockerfile-slim-only @@ -10,7 +10,7 @@ COPY ./metadata-ingestion /datahub-ingestion ARG RELEASE_VERSION WORKDIR /datahub-ingestion -RUN sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ +RUN sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ cat src/datahub/__init__.py && \ chown -R datahub /datahub-ingestion diff --git a/docker/elasticsearch-setup/Dockerfile b/docker/elasticsearch-setup/Dockerfile index ea64f94f88727..fdaf9ddbaf813 100644 --- a/docker/elasticsearch-setup/Dockerfile +++ b/docker/elasticsearch-setup/Dockerfile @@ -44,9 +44,9 @@ FROM base AS dev-install # See this excellent thread https://github.com/docker/cli/issues/1134 FROM ${APP_ENV}-install AS final + CMD if [ "$ELASTICSEARCH_USE_SSL" == "true" ]; then ELASTICSEARCH_PROTOCOL=https; else ELASTICSEARCH_PROTOCOL=http; fi \ && if [[ -n "$ELASTICSEARCH_USERNAME" ]]; then ELASTICSEARCH_HTTP_HEADERS="Authorization: Basic $(echo -ne "$ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD" | base64)"; else ELASTICSEARCH_HTTP_HEADERS="Accept: */*"; fi \ && if [[ "$SKIP_ELASTICSEARCH_CHECK" != "true" ]]; then \ dockerize -wait $ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT -wait-http-header "${ELASTICSEARCH_HTTP_HEADERS}" -timeout 120s /create-indices.sh; \ else /create-indices.sh; fi - diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml index f863dff7a59c5..769bce3105a7f 100644 --- a/docker/profiles/docker-compose.gms.yml +++ b/docker/profiles/docker-compose.gms.yml @@ -64,6 +64,8 @@ x-datahub-system-update-service: &datahub-system-update-service SCHEMA_REGISTRY_SYSTEM_UPDATE: ${SCHEMA_REGISTRY_SYSTEM_UPDATE:-true} SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS: ${SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS:-true} SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION: ${SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION:-true} + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins x-datahub-system-update-service-dev: &datahub-system-update-service-dev <<: *datahub-system-update-service @@ -99,6 +101,8 @@ x-datahub-gms-service: &datahub-gms-service timeout: 5s volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins + labels: + io.datahubproject.datahub.component: "gms" x-datahub-gms-service-dev: &datahub-gms-service-dev <<: *datahub-gms-service diff --git a/docs-website/graphql/generateGraphQLSchema.sh b/docs-website/graphql/generateGraphQLSchema.sh index 4e41c5dfbfacd..c6d7ec528b613 100755 --- a/docs-website/graphql/generateGraphQLSchema.sh +++ b/docs-website/graphql/generateGraphQLSchema.sh @@ -16,3 +16,5 @@ cat ../../datahub-graphql-core/src/main/resources/tests.graphql >> combined.grap cat ../../datahub-graphql-core/src/main/resources/timeline.graphql >> combined.graphql cat ../../datahub-graphql-core/src/main/resources/step.graphql >> combined.graphql cat ../../datahub-graphql-core/src/main/resources/lineage.graphql >> combined.graphql +cat ../../datahub-graphql-core/src/main/resources/properties.graphql >> combined.graphql +cat ../../datahub-graphql-core/src/main/resources/forms.graphql >> combined.graphql \ No newline at end of file diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 2b8873c678778..1e6d8bec01813 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -561,9 +561,18 @@ module.exports = { ], }, { - type: "doc", - label: "OpenAPI", - id: "docs/api/openapi/openapi-usage-guide", + OpenAPI: [ + { + type: "doc", + label: "OpenAPI", + id: "docs/api/openapi/openapi-usage-guide", + }, + { + type: "doc", + label: "Structured Properties", + id: "docs/api/openapi/openapi-structured-properties", + }, + ], }, "docs/dev-guides/timeline", { @@ -768,6 +777,7 @@ module.exports = { // "docs/how/add-user-data", // "docs/_feature-guide-template" // - "metadata-service/services/README" + // "metadata-ingestion/examples/structured_properties/README" // ], ], }; diff --git a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss index 3d30c65f89539..862fb04c8370b 100644 --- a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss +++ b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss @@ -26,4 +26,4 @@ background-image: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); background-origin: border-box; } - } \ No newline at end of file + } diff --git a/docs/api/openapi/openapi-structured-properties.md b/docs/api/openapi/openapi-structured-properties.md new file mode 100644 index 0000000000000..521ce8789db0d --- /dev/null +++ b/docs/api/openapi/openapi-structured-properties.md @@ -0,0 +1,284 @@ +# Structured Properties - DataHub OpenAPI v2 Guide + +This guides walks through the process of creating and using a Structured Property using the `v2` version +of the DataHub OpenAPI implementation. Note that this refers to DataHub's OpenAPI version and not the version of OpenAPI itself. + +Requirements: +* curl +* jq + +## Structured Property Definition + +Before a structured property can be added to an entity it must first be defined. Here is an example +structured property being created against a local quickstart instance. + +### Create Property Definition + +Example Request: + +```shell +curl -X 'POST' -v \ + 'http://localhost:8080/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Amy.test.MyProperty01/propertyDefinition' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "qualifiedName": "my.test.MyProperty01", + "displayName": "MyProperty01", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": {"string": "foo"}, + "description": "test foo value" + }, + { + "value": {"string": "bar"}, + "description": "test bar value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ], + "description": "test description" +}' | jq +``` + +### Read Property Definition + +Example Request: + +```shell +curl -X 'GET' -v \ + 'http://localhost:8080/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Amy.test.MyProperty01/propertyDefinition' \ + -H 'accept: application/json' | jq +``` + +Example Response: + +```json +{ + "value": { + "allowedValues": [ + { + "value": { + "string": "foo" + }, + "description": "test foo value" + }, + { + "value": { + "string": "bar" + }, + "description": "test bar value" + } + ], + "qualifiedName": "my.test.MyProperty01", + "displayName": "MyProperty01", + "valueType": "urn:li:dataType:datahub.string", + "description": "test description", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ], + "cardinality": "SINGLE" + } +} +``` + +### Delete Property Definition + +⚠ **Not Implemented** ⚠ + +## Applying Structured Properties + +Structured Properties can now be added to entities which have the `structuredProperties` as aspect. In the following +example we'll attach and remove properties to an example dataset entity with urn `urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)`. + +### Set Structured Property Values + +This will set/replace all structured properties on the entity. See `PATCH` operations to add/remove a single property. + +```shell +curl -X 'POST' -v \ + 'http://localhost:8080/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty01", + "values": [ + {"string": "foo"} + ] + } + ] +}' | jq +``` + +### Patch Structured Property Value + +For this example, we'll extend create a second structured property and apply both properties to the same +dataset used previously. After this your system should include both `my.test.MyProperty01` and `my.test.MyProperty02`. + +```shell +curl -X 'POST' -v \ + 'http://localhost:8080/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Amy.test.MyProperty02/propertyDefinition' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "qualifiedName": "my.test.MyProperty02", + "displayName": "MyProperty02", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": {"string": "foo2"}, + "description": "test foo2 value" + }, + { + "value": {"string": "bar2"}, + "description": "test bar2 value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ] +}' | jq +``` + +This command will attach one of each of the two properties to our test dataset `urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)`. + +```shell +curl -X 'POST' -v \ + 'http://localhost:8080/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty01", + "values": [ + {"string": "foo"} + ] + }, + { + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty02", + "values": [ + {"string": "bar2"} + ] + } + ] +}' | jq +``` + +#### Remove Structured Property Value + +The expected state of our test dataset include 2 structured properties. We'd like to remove the first one and preserve +the second property. + +```shell +curl -X 'PATCH' -v \ + 'http://localhost:8080/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json-patch+json' \ + -d '{ + "patch": [ + { + "op": "remove", + "path": "/properties/urn:li:structuredProperty:my.test.MyProperty01" + } + ], + "arrayPrimaryKeys": { + "properties": [ + "propertyUrn" + ] + } + }' | jq +``` + +The response will show that the expected property has been removed. + +```json +{ + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)", + "aspects": { + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "string": "bar2" + } + ], + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty02" + } + ] + } + } + } +} +``` + +#### Add Structured Property Value + +In this example, we'll add the property back with a different value, preserving the existing property. + +```shell +curl -X 'PATCH' -v \ + 'http://localhost:8080/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json-patch+json' \ + -d '{ + "patch": [ + { + "op": "add", + "path": "/properties/urn:li:structuredProperty:my.test.MyProperty01", + "value": { + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty01", + "values": [ + { + "string": "bar" + } + ] + } + } + ], + "arrayPrimaryKeys": { + "properties": [ + "propertyUrn" + ] + } + }' | jq +``` + +The response shows that the property was re-added with the new value `bar` instead of the previous value `foo`. + +```json +{ + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)", + "aspects": { + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "string": "bar2" + } + ], + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty02" + }, + { + "values": [ + { + "string": "bar" + } + ], + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty01" + } + ] + } + } + } +} +``` diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java index 83e40b22a5e44..453eddd3ae56c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java @@ -1,7 +1,6 @@ package com.linkedin.metadata.aspect.batch; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import java.util.HashSet; @@ -33,14 +32,12 @@ default List getMCPItems() { } Pair>, List> toUpsertBatchItems( - Map> latestAspects, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever); + Map> latestAspects, AspectRetriever aspectRetriever); default Stream applyMCPSideEffects( - List items, EntityRegistry entityRegistry, AspectRetriever aspectRetriever) { - return entityRegistry.getAllMCPSideEffects().stream() - .flatMap(mcpSideEffect -> mcpSideEffect.apply(items, entityRegistry, aspectRetriever)); + List items, AspectRetriever aspectRetriever) { + return aspectRetriever.getEntityRegistry().getAllMCPSideEffects().stream() + .flatMap(mcpSideEffect -> mcpSideEffect.apply(items, aspectRetriever)); } default boolean containsDuplicateAspects() { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPBatchItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPBatchItem.java index bb5e0ac53934a..dd0d0ec68dac6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPBatchItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPBatchItem.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.aspect.batch; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.mxe.MetadataChangeProposal; import javax.annotation.Nullable; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/PatchItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/PatchItem.java index f790c12ee5335..e9e30f7f2bd96 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/PatchItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/PatchItem.java @@ -3,7 +3,6 @@ import com.github.fge.jsonpatch.Patch; import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; /** * A change proposal represented as a patch to an exiting stored object in the primary data store. @@ -13,14 +12,11 @@ public abstract class PatchItem extends MCPBatchItem { /** * Convert a Patch to an Upsert * - * @param entityRegistry the entity registry * @param recordTemplate the current value record template * @return the upsert */ public abstract UpsertItem applyPatch( - EntityRegistry entityRegistry, - RecordTemplate recordTemplate, - AspectRetriever aspectRetriever); + RecordTemplate recordTemplate, AspectRetriever aspectRetriever); public abstract Patch getPatch(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java index 4e4d2a38799dc..c337e4f848e5c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java @@ -3,7 +3,6 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; -import com.linkedin.metadata.models.registry.EntityRegistry; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -17,8 +16,6 @@ public abstract class UpsertItem extends MCPBatchItem { public abstract SystemAspect toLatestEntityAspect(); public abstract void validatePreCommit( - @Nullable RecordTemplate previous, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) + @Nullable RecordTemplate previous, @Nonnull AspectRetriever aspectRetriever) throws AspectValidationException; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java new file mode 100644 index 0000000000000..c73ccbb2d93e3 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java @@ -0,0 +1,34 @@ +package com.linkedin.metadata.aspect.patch; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.JsonNode; +import com.github.fge.jsonpatch.JsonPatch; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class GenericJsonPatch { + @Nullable private Map> arrayPrimaryKeys; + + @Nonnull private JsonNode patch; + + @Nonnull + public Map> getArrayPrimaryKeys() { + return arrayPrimaryKeys == null ? Map.of() : arrayPrimaryKeys; + } + + @JsonIgnore + public JsonPatch getJsonPatch() throws IOException { + return JsonPatch.fromJson(patch); + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/PatchOperationType.java similarity index 81% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/PatchOperationType.java index ac93fd24fee02..6eaa6069267ba 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/PatchOperationType.java @@ -1,4 +1,4 @@ -package datahub.client.patch; +package com.linkedin.metadata.aspect.patch; import lombok.Getter; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/AbstractMultiFieldPatchBuilder.java similarity index 95% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/AbstractMultiFieldPatchBuilder.java index 943aaefec469b..165a4d26c339c 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/AbstractMultiFieldPatchBuilder.java @@ -1,6 +1,6 @@ -package datahub.client.patch; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -13,7 +13,6 @@ import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.tuple.ImmutableTriple; -import org.apache.http.entity.ContentType; public abstract class AbstractMultiFieldPatchBuilder> { @@ -87,7 +86,7 @@ protected GenericAspect buildPatch() { .set(VALUE_KEY, triple.right))); GenericAspect genericAspect = new GenericAspect(); - genericAspect.setContentType(ContentType.APPLICATION_JSON.getMimeType()); + genericAspect.setContentType("application/json"); genericAspect.setValue(ByteString.copyString(patches.toString(), StandardCharsets.UTF_8)); return genericAspect; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/ChartInfoPatchBuilder.java similarity index 75% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/ChartInfoPatchBuilder.java index 0655d2b3eb8eb..09f9dad134a0b 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/ChartInfoPatchBuilder.java @@ -1,12 +1,12 @@ -package datahub.client.patch.chart; +package com.linkedin.metadata.aspect.patch.builder; -import static com.linkedin.metadata.Constants.*; -import static datahub.client.patch.common.PatchUtil.*; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CHART_INFO_ASPECT_NAME; +import static com.linkedin.metadata.aspect.patch.builder.PatchUtil.createEdgeValue; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.Urn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/CustomPropertiesPatchBuilder.java similarity index 90% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/CustomPropertiesPatchBuilder.java index e621aaf57ff97..e4143851afbe5 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/CustomPropertiesPatchBuilder.java @@ -1,12 +1,11 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; -import datahub.client.patch.subtypesupport.IntermediatePatchBuilder; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.subtypesupport.IntermediatePatchBuilder; import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DashboardInfoPatchBuilder.java similarity index 86% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DashboardInfoPatchBuilder.java index cadde582f1c64..9156b304a394e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DashboardInfoPatchBuilder.java @@ -1,15 +1,17 @@ -package datahub.client.patch.dashboard; +package com.linkedin.metadata.aspect.patch.builder; -import static com.linkedin.metadata.Constants.*; -import static datahub.client.patch.common.PatchUtil.*; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.aspect.patch.builder.PatchUtil.createEdgeValue; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.Edge; import com.linkedin.common.urn.ChartUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataFlowInfoPatchBuilder.java similarity index 92% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataFlowInfoPatchBuilder.java index 9e55ab4fc6db4..6a114d90875fe 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataFlowInfoPatchBuilder.java @@ -1,15 +1,14 @@ -package datahub.client.patch.dataflow; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_INFO_ASPECT_NAME; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; -import datahub.client.patch.common.CustomPropertiesPatchBuilder; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInfoPatchBuilder.java similarity index 93% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInfoPatchBuilder.java index 581616f54e9b9..99c0ac6c15eb1 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInfoPatchBuilder.java @@ -1,16 +1,15 @@ -package datahub.client.patch.datajob; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_INFO_ASPECT_NAME; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.DataFlowUrn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; -import datahub.client.patch.common.CustomPropertiesPatchBuilder; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInputOutputPatchBuilder.java similarity index 93% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInputOutputPatchBuilder.java index fc250daffe916..8e2168e5b6a33 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInputOutputPatchBuilder.java @@ -1,8 +1,10 @@ -package datahub.client.patch.datajob; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; -import static datahub.client.patch.common.PatchUtil.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME; +import static com.linkedin.metadata.aspect.patch.builder.PatchUtil.createEdgeValue; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; @@ -10,9 +12,8 @@ import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import com.linkedin.metadata.graph.LineageDirection; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DatasetPropertiesPatchBuilder.java similarity index 91% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DatasetPropertiesPatchBuilder.java index f4329c84f33ff..31e181fc244fb 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DatasetPropertiesPatchBuilder.java @@ -1,13 +1,12 @@ -package datahub.client.patch.dataset; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_PROPERTIES_ASPECT_NAME; import com.fasterxml.jackson.databind.JsonNode; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; -import datahub.client.patch.common.CustomPropertiesPatchBuilder; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/EditableSchemaMetadataPatchBuilder.java similarity index 90% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/EditableSchemaMetadataPatchBuilder.java index 6478b31d27ef0..5e9e1911925fa 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/EditableSchemaMetadataPatchBuilder.java @@ -1,15 +1,15 @@ -package datahub.client.patch.dataset; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java similarity index 88% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java index 84db0ba307cf2..ff34b187f6151 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java @@ -1,12 +1,11 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TagUrn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlossaryTermsPatchBuilder.java similarity index 89% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlossaryTermsPatchBuilder.java index 6f31025406b1b..16d9beded3066 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlossaryTermsPatchBuilder.java @@ -1,12 +1,11 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.GlossaryTermUrn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/OwnershipPatchBuilder.java similarity index 91% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/OwnershipPatchBuilder.java index 20e0c930a8c95..35a647424a88a 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/OwnershipPatchBuilder.java @@ -1,13 +1,12 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/PatchUtil.java similarity index 96% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/PatchUtil.java index 69db36c6e038c..7556a8b1d9418 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/PatchUtil.java @@ -1,7 +1,7 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.UNKNOWN_ACTOR; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.Edge; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/StructuredPropertiesPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/StructuredPropertiesPatchBuilder.java new file mode 100644 index 0000000000000..fab81e0af5bf5 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/StructuredPropertiesPatchBuilder.java @@ -0,0 +1,110 @@ +package com.linkedin.metadata.aspect.patch.builder; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.fasterxml.jackson.databind.node.ValueNode; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.apache.commons.lang3.tuple.ImmutableTriple; + +public class StructuredPropertiesPatchBuilder + extends AbstractMultiFieldPatchBuilder { + + private static final String BASE_PATH = "/properties"; + private static final String URN_KEY = "urn"; + private static final String CONTEXT_KEY = "context"; + + /** + * Remove a property from a structured properties aspect. If the property doesn't exist, this is a + * no-op. + * + * @param propertyUrn + * @return + */ + public StructuredPropertiesPatchBuilder removeProperty(Urn propertyUrn) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + "/" + propertyUrn, null)); + return this; + } + + public StructuredPropertiesPatchBuilder setProperty( + @Nonnull Urn propertyUrn, @Nullable List propertyValues) { + propertyValues.stream() + .map( + propertyValue -> + propertyValue instanceof Integer + ? this.setProperty(propertyUrn, (Integer) propertyValue) + : this.setProperty(propertyUrn, String.valueOf(propertyValue))) + .collect(Collectors.toList()); + return this; + } + + public StructuredPropertiesPatchBuilder setProperty( + @Nonnull Urn propertyUrn, @Nullable Integer propertyValue) { + ValueNode propertyValueNode = instance.numberNode((Integer) propertyValue); + ObjectNode value = instance.objectNode(); + value.put(URN_KEY, propertyUrn.toString()); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + "/" + propertyUrn, propertyValueNode)); + return this; + } + + public StructuredPropertiesPatchBuilder setProperty( + @Nonnull Urn propertyUrn, @Nullable String propertyValue) { + ValueNode propertyValueNode = instance.textNode(String.valueOf(propertyValue)); + ObjectNode value = instance.objectNode(); + value.put(URN_KEY, propertyUrn.toString()); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + "/" + propertyUrn, propertyValueNode)); + return this; + } + + public StructuredPropertiesPatchBuilder addProperty( + @Nonnull Urn propertyUrn, @Nullable Integer propertyValue) { + ValueNode propertyValueNode = instance.numberNode((Integer) propertyValue); + ObjectNode value = instance.objectNode(); + value.put(URN_KEY, propertyUrn.toString()); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + "/" + propertyUrn + "/" + String.valueOf(propertyValue), + propertyValueNode)); + return this; + } + + public StructuredPropertiesPatchBuilder addProperty( + @Nonnull Urn propertyUrn, @Nullable String propertyValue) { + ValueNode propertyValueNode = instance.textNode(String.valueOf(propertyValue)); + ObjectNode value = instance.objectNode(); + value.put(URN_KEY, propertyUrn.toString()); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + "/" + propertyUrn + "/" + String.valueOf(propertyValue), + propertyValueNode)); + return this; + } + + @Override + protected String getAspectName() { + return STRUCTURED_PROPERTIES_ASPECT_NAME; + } + + @Override + protected String getEntityType() { + if (this.targetEntityUrn == null) { + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); + } + return this.targetEntityUrn.getEntityType(); + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/UpstreamLineagePatchBuilder.java similarity index 96% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/UpstreamLineagePatchBuilder.java index 9db2ebc522e09..bfb46d8fc5773 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/UpstreamLineagePatchBuilder.java @@ -1,7 +1,9 @@ -package datahub.client.patch.dataset; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.UNKNOWN_ACTOR; +import static com.linkedin.metadata.Constants.UPSTREAM_LINEAGE_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.DatasetUrn; @@ -9,8 +11,7 @@ import com.linkedin.dataset.DatasetLineageType; import com.linkedin.dataset.FineGrainedLineageDownstreamType; import com.linkedin.dataset.FineGrainedLineageUpstreamType; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.ToString; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/CustomPropertiesPatchBuilderSupport.java similarity index 81% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/CustomPropertiesPatchBuilderSupport.java index 9f221bac15be4..5e1cd094b204e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/CustomPropertiesPatchBuilderSupport.java @@ -1,6 +1,6 @@ -package datahub.client.patch.subtypesupport; +package com.linkedin.metadata.aspect.patch.builder.subtypesupport; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.AbstractMultiFieldPatchBuilder; import java.util.Map; import javax.annotation.Nonnull; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/IntermediatePatchBuilder.java similarity index 83% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/IntermediatePatchBuilder.java index e3b14c0838ad6..d891a6b9673da 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/IntermediatePatchBuilder.java @@ -1,7 +1,7 @@ -package datahub.client.patch.subtypesupport; +package com.linkedin.metadata.aspect.patch.builder.subtypesupport; import com.fasterxml.jackson.databind.JsonNode; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.AbstractMultiFieldPatchBuilder; import java.util.List; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/ArrayMergingTemplate.java similarity index 98% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/ArrayMergingTemplate.java index 9cd8e74d952d6..ff721e97c0e1d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/ArrayMergingTemplate.java @@ -1,6 +1,6 @@ -package com.linkedin.metadata.models.registry.template; +package com.linkedin.metadata.aspect.patch.template; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/AspectTemplateEngine.java similarity index 71% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/AspectTemplateEngine.java index 029eb688c5291..e9d09085e7eb5 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/AspectTemplateEngine.java @@ -1,6 +1,18 @@ -package com.linkedin.metadata.models.registry.template; +package com.linkedin.metadata.aspect.patch.template; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.CHART_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATASET_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.UPSTREAM_LINEAGE_ASPECT_NAME; import com.fasterxml.jackson.core.JsonProcessingException; import com.github.fge.jsonpatch.JsonPatchException; @@ -34,7 +46,8 @@ public class AspectTemplateEngine { DATA_PRODUCT_PROPERTIES_ASPECT_NAME, DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, CHART_INFO_ASPECT_NAME, - DASHBOARD_INFO_ASPECT_NAME) + DASHBOARD_INFO_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME) .collect(Collectors.toSet()); private final Map> _aspectTemplateMap; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/CompoundKeyTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/CompoundKeyTemplate.java new file mode 100644 index 0000000000000..78cf14c47a0bf --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/CompoundKeyTemplate.java @@ -0,0 +1,23 @@ +package com.linkedin.metadata.aspect.patch.template; + +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.populateTopLevelKeys; + +import com.datahub.util.RecordUtils; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.github.fge.jsonpatch.JsonPatchException; +import com.github.fge.jsonpatch.Patch; +import com.linkedin.data.template.RecordTemplate; + +public abstract class CompoundKeyTemplate + implements ArrayMergingTemplate { + + @Override + public T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) + throws JsonProcessingException, JsonPatchException { + JsonNode transformed = populateTopLevelKeys(preprocessTemplate(recordTemplate), jsonPatch); + JsonNode patched = jsonPatch.apply(transformed); + JsonNode postProcessed = rebaseFields(patched); + return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString()); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/Template.java similarity index 69% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/Template.java index 0793cacce780f..bd8cd544fb59b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/Template.java @@ -1,6 +1,7 @@ -package com.linkedin.metadata.models.registry.template; +package com.linkedin.metadata.aspect.patch.template; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.OBJECT_MAPPER; +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.populateTopLevelKeys; import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; @@ -19,7 +20,12 @@ public interface Template { * @return specific type for this template * @throws {@link ClassCastException} when recordTemplate is not the correct type for the template */ - T getSubtype(RecordTemplate recordTemplate) throws ClassCastException; + default T getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + if (getTemplateType().isInstance(recordTemplate)) { + return getTemplateType().cast(recordTemplate); + } + throw new ClassCastException("Unable to cast RecordTemplate to " + getTemplateType().getName()); + } /** Get the template clas type */ Class getTemplateType(); @@ -43,10 +49,20 @@ public interface Template { */ default T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) throws JsonProcessingException, JsonPatchException { - JsonNode transformed = preprocessTemplate(recordTemplate); - JsonNode patched = jsonPatch.apply(transformed); - JsonNode postProcessed = rebaseFields(patched); - return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString()); + + TemplateUtil.validatePatch(jsonPatch); + JsonNode transformed = populateTopLevelKeys(preprocessTemplate(recordTemplate), jsonPatch); + try { + JsonNode patched = jsonPatch.apply(transformed); + JsonNode postProcessed = rebaseFields(patched); + return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString()); + } catch (JsonPatchException e) { + throw new RuntimeException( + String.format( + "Error performing JSON PATCH on aspect %s. Patch: %s Target: %s", + recordTemplate.schema().getName(), jsonPatch, transformed.toString()), + e); + } } /** diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/TemplateUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/TemplateUtil.java new file mode 100644 index 0000000000000..d998692f2c388 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/TemplateUtil.java @@ -0,0 +1,97 @@ +package com.linkedin.metadata.aspect.patch.template; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; +import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; + +import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.fge.jsonpatch.Patch; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.util.Pair; +import java.util.ArrayList; +import java.util.List; + +public class TemplateUtil { + + private TemplateUtil() {} + + public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + public static List> getPaths(Patch jsonPatch) { + JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); + List> paths = new ArrayList<>(); + patchNode + .elements() + .forEachRemaining( + node -> + paths.add( + Pair.of( + PatchOperationType.valueOf(node.get("op").asText().toUpperCase()), + node.get("path").asText()))); + return paths; + } + + public static void validatePatch(Patch jsonPatch) { + // ensure supported patch operations + JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); + patchNode + .elements() + .forEachRemaining( + node -> { + try { + PatchOperationType.valueOf(node.get("op").asText().toUpperCase()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Unsupported PATCH operation: `%s` Operation `%s`", + node.get("op").asText(), node), + e); + } + }); + } + + /** + * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent + * paths to be specified + * + * @param transformedNode transformed node to have keys populated + * @return transformed node that has top level keys populated + */ + public static JsonNode populateTopLevelKeys(JsonNode transformedNode, Patch jsonPatch) { + JsonNode transformedNodeClone = transformedNode.deepCopy(); + List> paths = getPaths(jsonPatch); + for (Pair operationPath : paths) { + String[] keys = operationPath.getSecond().split("/"); + JsonNode parent = transformedNodeClone; + + // if not remove, skip last key as we only need to populate top level + int endIdx = + PatchOperationType.REMOVE.equals(operationPath.getFirst()) + ? keys.length + : keys.length - 1; + + // Skip first as it will always be blank due to path starting with / + for (int i = 1; i < endIdx; i++) { + if (parent.get(keys[i]) == null) { + ((ObjectNode) parent).set(keys[i], instance.objectNode()); + } + parent = parent.get(keys[i]); + } + } + + return transformedNodeClone; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/chart/ChartInfoTemplate.java similarity index 92% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/chart/ChartInfoTemplate.java index 654f923e7322d..aabc5b54cfa5c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/chart/ChartInfoTemplate.java @@ -1,6 +1,6 @@ -package com.linkedin.metadata.models.registry.template.chart; +package com.linkedin.metadata.aspect.patch.template.chart; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.chart.ChartDataSourceTypeArray; @@ -10,7 +10,7 @@ import com.linkedin.common.EdgeArray; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GenericPatchTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GenericPatchTemplate.java new file mode 100644 index 0000000000000..3a3e3c99f25a3 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GenericPatchTemplate.java @@ -0,0 +1,59 @@ +package com.linkedin.metadata.aspect.patch.template.common; + +import com.fasterxml.jackson.databind.JsonNode; +import com.github.fge.jsonpatch.JsonPatchException; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.aspect.patch.GenericJsonPatch; +import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import javax.annotation.Nonnull; +import lombok.Builder; + +@Builder +public class GenericPatchTemplate extends CompoundKeyTemplate { + + @Nonnull private final GenericJsonPatch genericJsonPatch; + @Nonnull private final Class templateType; + @Nonnull private final T templateDefault; + + @Nonnull + @Override + public Class getTemplateType() { + return templateType; + } + + @Nonnull + @Override + public T getDefault() { + return templateDefault; + } + + @Nonnull + @Override + public JsonNode transformFields(final JsonNode baseNode) { + JsonNode transformedNode = baseNode; + for (Map.Entry> composite : + genericJsonPatch.getArrayPrimaryKeys().entrySet()) { + transformedNode = arrayFieldToMap(transformedNode, composite.getKey(), composite.getValue()); + } + return transformedNode; + } + + @Nonnull + @Override + public JsonNode rebaseFields(JsonNode patched) { + JsonNode transformedNode = patched; + for (Map.Entry> composite : + genericJsonPatch.getArrayPrimaryKeys().entrySet()) { + transformedNode = + transformedMapToArray(transformedNode, composite.getKey(), composite.getValue()); + } + return transformedNode; + } + + public T applyPatch(RecordTemplate recordTemplate) throws IOException, JsonPatchException { + return super.applyPatch(recordTemplate, genericJsonPatch.getJsonPatch()); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlobalTagsTemplate.java similarity index 90% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlobalTagsTemplate.java index a98e60c739749..dac5e89edc88e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlobalTagsTemplate.java @@ -1,10 +1,10 @@ -package com.linkedin.metadata.models.registry.template.common; +package com.linkedin.metadata.aspect.patch.template.common; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlossaryTermsTemplate.java similarity index 92% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlossaryTermsTemplate.java index 7ce59916f2073..e6dd1fd523006 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlossaryTermsTemplate.java @@ -1,7 +1,7 @@ -package com.linkedin.metadata.models.registry.template.common; +package com.linkedin.metadata.aspect.patch.template.common; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -10,7 +10,7 @@ import com.linkedin.common.GlossaryTerms; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/OwnershipTemplate.java similarity index 89% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/OwnershipTemplate.java index b850ae830b98c..0eaed27ec4cb7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/OwnershipTemplate.java @@ -1,6 +1,6 @@ -package com.linkedin.metadata.models.registry.template.common; +package com.linkedin.metadata.aspect.patch.template.common; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.AuditStamp; @@ -8,7 +8,7 @@ import com.linkedin.common.Ownership; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.CompoundKeyTemplate; +import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate; import java.util.Arrays; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/StructuredPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/StructuredPropertiesTemplate.java new file mode 100644 index 0000000000000..df3d682632bca --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/StructuredPropertiesTemplate.java @@ -0,0 +1,56 @@ +package com.linkedin.metadata.aspect.patch.template.common; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import java.util.Collections; +import javax.annotation.Nonnull; + +public class StructuredPropertiesTemplate implements ArrayMergingTemplate { + + private static final String PROPERTIES_FIELD_NAME = "properties"; + private static final String URN_FIELD_NAME = "propertyUrn"; + + // private static final String AUDIT_STAMP_FIELD = "auditStamp"; + // private static final String TIME_FIELD = "time"; + // private static final String ACTOR_FIELD = "actor"; + + @Override + public StructuredProperties getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + if (recordTemplate instanceof StructuredProperties) { + return (StructuredProperties) recordTemplate; + } + throw new ClassCastException("Unable to cast RecordTemplate to StructuredProperties"); + } + + @Override + public Class getTemplateType() { + return StructuredProperties.class; + } + + @Nonnull + @Override + public StructuredProperties getDefault() { + StructuredProperties structuredProperties = new StructuredProperties(); + structuredProperties.setProperties(new StructuredPropertyValueAssignmentArray()); + // .setAuditStamp(new + // AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + return structuredProperties; + } + + @Nonnull + @Override + public JsonNode transformFields(JsonNode baseNode) { + return arrayFieldToMap( + baseNode, PROPERTIES_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); + } + + @Nonnull + @Override + public JsonNode rebaseFields(JsonNode patched) { + return transformedMapToArray( + patched, PROPERTIES_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dashboard/DashboardInfoTemplate.java similarity index 94% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dashboard/DashboardInfoTemplate.java index eae04b5285adf..85ce06b01c1d7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dashboard/DashboardInfoTemplate.java @@ -1,6 +1,6 @@ -package com.linkedin.metadata.models.registry.template.dashboard; +package com.linkedin.metadata.aspect.patch.template.dashboard; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.AuditStamp; @@ -11,7 +11,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.dashboard.DashboardInfo; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataflow/DataFlowInfoTemplate.java similarity index 89% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataflow/DataFlowInfoTemplate.java index 73e837f368f0b..28ee769521995 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataflow/DataFlowInfoTemplate.java @@ -1,10 +1,10 @@ -package com.linkedin.metadata.models.registry.template.dataflow; +package com.linkedin.metadata.aspect.patch.template.dataflow; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataFlowInfo; -import com.linkedin.metadata.models.registry.template.Template; +import com.linkedin.metadata.aspect.patch.template.Template; import javax.annotation.Nonnull; public class DataFlowInfoTemplate implements Template { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInfoTemplate.java similarity index 89% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInfoTemplate.java index bdb306c2d32e4..7cb986da0cba6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInfoTemplate.java @@ -1,10 +1,10 @@ -package com.linkedin.metadata.models.registry.template.datajob; +package com.linkedin.metadata.aspect.patch.template.datajob; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataJobInfo; -import com.linkedin.metadata.models.registry.template.Template; +import com.linkedin.metadata.aspect.patch.template.Template; import javax.annotation.Nonnull; public class DataJobInfoTemplate implements Template { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInputOutputTemplate.java similarity index 96% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInputOutputTemplate.java index 6761892b1b31b..3d398d97b50c3 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInputOutputTemplate.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry.template.datajob; +package com.linkedin.metadata.aspect.patch.template.datajob; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.DataJobUrnArray; @@ -8,7 +8,7 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.dataset.FineGrainedLineageArray; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataproduct/DataProductPropertiesTemplate.java similarity index 91% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataproduct/DataProductPropertiesTemplate.java index 899c51a7c3d7e..9b117114395b1 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataproduct/DataProductPropertiesTemplate.java @@ -1,10 +1,10 @@ -package com.linkedin.metadata.models.registry.template.dataproduct; +package com.linkedin.metadata.aspect.patch.template.dataproduct; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.dataproduct.DataProductAssociationArray; import com.linkedin.dataproduct.DataProductProperties; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/DatasetPropertiesTemplate.java similarity index 91% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/DatasetPropertiesTemplate.java index 991f7f3d4053a..cf76bed2fd3f7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/DatasetPropertiesTemplate.java @@ -1,11 +1,11 @@ -package com.linkedin.metadata.models.registry.template.dataset; +package com.linkedin.metadata.aspect.patch.template.dataset; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/EditableSchemaMetadataTemplate.java similarity index 92% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/EditableSchemaMetadataTemplate.java index 9712a9081d33a..0b3605708e610 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/EditableSchemaMetadataTemplate.java @@ -1,15 +1,15 @@ -package com.linkedin.metadata.models.registry.template.dataset; +package com.linkedin.metadata.aspect.patch.template.dataset; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.CompoundKeyTemplate; -import com.linkedin.metadata.models.registry.template.common.GlobalTagsTemplate; -import com.linkedin.metadata.models.registry.template.common.GlossaryTermsTemplate; +import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate; +import com.linkedin.metadata.aspect.patch.template.common.GlobalTagsTemplate; +import com.linkedin.metadata.aspect.patch.template.common.GlossaryTermsTemplate; import com.linkedin.schema.EditableSchemaFieldInfoArray; import com.linkedin.schema.EditableSchemaMetadata; import java.util.Collections; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/UpstreamLineageTemplate.java similarity index 96% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/UpstreamLineageTemplate.java index 81a4065dedb1a..6907181b3f7ff 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/UpstreamLineageTemplate.java @@ -1,7 +1,10 @@ -package com.linkedin.metadata.models.registry.template.dataset; +package com.linkedin.metadata.aspect.patch.template.dataset; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.FINE_GRAINED_LINEAGE_DATASET_TYPE; +import static com.linkedin.metadata.Constants.FINE_GRAINED_LINEAGE_FIELD_SET_TYPE; +import static com.linkedin.metadata.Constants.FINE_GRAINED_LINEAGE_FIELD_TYPE; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -13,7 +16,7 @@ import com.linkedin.dataset.FineGrainedLineageArray; import com.linkedin.dataset.UpstreamArray; import com.linkedin.dataset.UpstreamLineage; -import com.linkedin.metadata.models.registry.template.CompoundKeyTemplate; +import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate; import java.util.Collections; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java index dd9bbcda8f4af..aec0a4cfa0706 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java @@ -27,6 +27,13 @@ @Slf4j public class PluginFactory { + private static final String[] VALIDATOR_PACKAGES = { + "com.linkedin.metadata.aspect.plugins.validation", "com.linkedin.metadata.aspect.validation" + }; + private static final String[] HOOK_PACKAGES = { + "com.linkedin.metadata.aspect.plugins.hooks", "com.linkedin.metadata.aspect.hooks" + }; + public static PluginFactory withCustomClasspath( @Nullable PluginConfiguration pluginConfiguration, @Nonnull List classLoaders) { return new PluginFactory(pluginConfiguration, classLoaders); @@ -178,17 +185,14 @@ private List buildAspectPayloadValidators( build( AspectPayloadValidator.class, pluginConfiguration.getAspectPayloadValidators(), - "com.linkedin.metadata.aspect.plugins.validation")); + VALIDATOR_PACKAGES)); } private List buildMutationHooks(@Nullable PluginConfiguration pluginConfiguration) { return pluginConfiguration == null ? List.of() : applyDisable( - build( - MutationHook.class, - pluginConfiguration.getMutationHooks(), - "com.linkedin.metadata.aspect.plugins.hooks")); + build(MutationHook.class, pluginConfiguration.getMutationHooks(), HOOK_PACKAGES)); } private List buildMCLSideEffects( @@ -196,10 +200,7 @@ private List buildMCLSideEffects( return pluginConfiguration == null ? List.of() : applyDisable( - build( - MCLSideEffect.class, - pluginConfiguration.getMclSideEffects(), - "com.linkedin.metadata.aspect.plugins.hooks")); + build(MCLSideEffect.class, pluginConfiguration.getMclSideEffects(), HOOK_PACKAGES)); } private List buildMCPSideEffects( @@ -207,10 +208,7 @@ private List buildMCPSideEffects( return pluginConfiguration == null ? List.of() : applyDisable( - build( - MCPSideEffect.class, - pluginConfiguration.getMcpSideEffects(), - "com.linkedin.metadata.aspect.plugins.hooks")); + build(MCPSideEffect.class, pluginConfiguration.getMcpSideEffects(), HOOK_PACKAGES)); } private List build( @@ -226,6 +224,11 @@ private List build( config -> { try { ClassInfo classInfo = classMap.get(config.getClassName()); + if (classInfo == null) { + throw new IllegalStateException( + String.format( + "The following class cannot be loaded: %s", config.getClassName())); + } MethodInfo constructorMethod = classInfo.getConstructorInfo().get(0); return Stream.of( (T) constructorMethod.loadClassAndGetConstructor().newInstance(config)); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java index 03a0473677fb8..d88b05ede8454 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java @@ -38,9 +38,11 @@ && isChangeTypeSupported(changeType) protected boolean isEntityAspectSupported( @Nonnull String entityName, @Nonnull String aspectName) { - return (ENTITY_WILDCARD.equals(entityName) - || getConfig().getSupportedEntityAspectNames().stream() - .anyMatch(supported -> supported.getEntityName().equals(entityName))) + return (getConfig().getSupportedEntityAspectNames().stream() + .anyMatch( + supported -> + ENTITY_WILDCARD.equals(supported.getEntityName()) + || supported.getEntityName().equals(entityName))) && isAspectSupported(aspectName); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffect.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffect.java index ef9786f8d711e..a21f3cd2436de 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffect.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffect.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.aspect.plugins.PluginSpec; import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.List; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -23,16 +22,12 @@ public MCLSideEffect(AspectPluginConfig aspectPluginConfig) { * @return additional upserts */ public final Stream apply( - @Nonnull List input, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) { + @Nonnull List input, @Nonnull AspectRetriever aspectRetriever) { return input.stream() .filter(item -> shouldApply(item.getChangeType(), item.getUrn(), item.getAspectSpec())) - .flatMap(i -> applyMCLSideEffect(i, entityRegistry, aspectRetriever)); + .flatMap(i -> applyMCLSideEffect(i, aspectRetriever)); } protected abstract Stream applyMCLSideEffect( - @Nonnull MCLBatchItem input, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever); + @Nonnull MCLBatchItem input, @Nonnull AspectRetriever aspectRetriever); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffect.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffect.java index fc1d1587d10fb..80cb405201c87 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffect.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffect.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.aspect.plugins.PluginSpec; import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.List; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -23,14 +22,12 @@ public MCPSideEffect(AspectPluginConfig aspectPluginConfig) { * @return additional upserts */ public final Stream apply( - List input, - EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) { + List input, @Nonnull AspectRetriever aspectRetriever) { return input.stream() .filter(item -> shouldApply(item.getChangeType(), item.getUrn(), item.getAspectSpec())) - .flatMap(i -> applyMCPSideEffect(i, entityRegistry, aspectRetriever)); + .flatMap(i -> applyMCPSideEffect(i, aspectRetriever)); } protected abstract Stream applyMCPSideEffect( - UpsertItem input, EntityRegistry entityRegistry, @Nonnull AspectRetriever aspectRetriever); + UpsertItem input, @Nonnull AspectRetriever aspectRetriever); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java index 78aa4689472f5..00a20b3131c2a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java @@ -2,12 +2,35 @@ import com.linkedin.common.urn.Urn; import com.linkedin.entity.Aspect; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; import java.net.URISyntaxException; +import java.util.Map; +import java.util.Set; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public interface AspectRetriever { - Aspect getLatestAspectObject(@Nonnull final Urn urn, @Nonnull final String aspectName) + @Nullable + default Aspect getLatestAspectObject(@Nonnull final Urn urn, @Nonnull final String aspectName) + throws RemoteInvocationException, URISyntaxException { + return getLatestAspectObjects(Set.of(urn), Set.of(aspectName)) + .getOrDefault(urn, Map.of()) + .get(aspectName); + } + + /** + * Returns for each URN, the map of aspectName to Aspect + * + * @param urns urns to fetch + * @param aspectNames aspect names + * @return urn to aspect name and values + */ + @Nonnull + Map> getLatestAspectObjects(Set urns, Set aspectNames) throws RemoteInvocationException, URISyntaxException; + + @Nonnull + EntityRegistry getEntityRegistry(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/PropertyDefinitionValidator.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/PropertyDefinitionValidator.java new file mode 100644 index 0000000000000..5a4635da433ae --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/PropertyDefinitionValidator.java @@ -0,0 +1,91 @@ +package com.linkedin.metadata.aspect.validation; + +import static com.linkedin.structured.PropertyCardinality.*; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; +import com.linkedin.metadata.aspect.plugins.validation.AspectPayloadValidator; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class PropertyDefinitionValidator extends AspectPayloadValidator { + + public PropertyDefinitionValidator(AspectPluginConfig aspectPluginConfig) { + super(aspectPluginConfig); + } + + @Override + protected void validateProposedAspect( + @Nonnull ChangeType changeType, + @Nonnull Urn entityUrn, + @Nonnull AspectSpec aspectSpec, + @Nonnull RecordTemplate aspectPayload, + @Nonnull AspectRetriever aspectRetriever) + throws AspectValidationException { + // No-op + } + + @Override + protected void validatePreCommitAspect( + @Nonnull ChangeType changeType, + @Nonnull Urn entityUrn, + @Nonnull AspectSpec aspectSpec, + @Nullable RecordTemplate previousAspect, + @Nonnull RecordTemplate proposedAspect, + AspectRetriever aspectRetriever) + throws AspectValidationException { + validate(previousAspect, proposedAspect); + } + + public static boolean validate( + @Nullable RecordTemplate previousAspect, @Nonnull RecordTemplate proposedAspect) + throws AspectValidationException { + if (previousAspect != null) { + StructuredPropertyDefinition previousDefinition = + (StructuredPropertyDefinition) previousAspect; + StructuredPropertyDefinition newDefinition = (StructuredPropertyDefinition) proposedAspect; + if (!newDefinition.getValueType().equals(previousDefinition.getValueType())) { + throw new AspectValidationException( + "Value type cannot be changed as this is a backwards incompatible change"); + } + if (newDefinition.getCardinality().equals(SINGLE) + && previousDefinition.getCardinality().equals(MULTIPLE)) { + throw new AspectValidationException( + "Property definition cardinality cannot be changed from MULTI to SINGLE"); + } + if (!newDefinition.getQualifiedName().equals(previousDefinition.getQualifiedName())) { + throw new AspectValidationException( + "Cannot change the fully qualified name of a Structured Property"); + } + // Assure new definition has only added allowed values, not removed them + if (newDefinition.getAllowedValues() != null) { + if (!previousDefinition.hasAllowedValues() + || previousDefinition.getAllowedValues() == null) { + throw new AspectValidationException( + "Cannot restrict values that were previously allowed"); + } + Set newAllowedValues = + newDefinition.getAllowedValues().stream() + .map(PropertyValue::getValue) + .collect(Collectors.toSet()); + for (PropertyValue value : previousDefinition.getAllowedValues()) { + if (!newAllowedValues.contains(value.getValue())) { + throw new AspectValidationException( + "Cannot restrict values that were previously allowed"); + } + } + } + } + return true; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java new file mode 100644 index 0000000000000..efd95e0c2e3f1 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java @@ -0,0 +1,264 @@ +package com.linkedin.metadata.aspect.validation; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.StringArray; +import com.linkedin.data.template.StringArrayMap; +import com.linkedin.entity.Aspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; +import com.linkedin.metadata.aspect.plugins.validation.AspectPayloadValidator; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.LogicalValueType; +import com.linkedin.metadata.models.StructuredPropertyUtils; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.PropertyCardinality; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +/** A Validator for StructuredProperties Aspect that is attached to entities like Datasets, etc. */ +@Slf4j +public class StructuredPropertiesValidator extends AspectPayloadValidator { + + private static final Set VALID_VALUE_STORED_AS_STRING = + new HashSet<>( + Arrays.asList( + LogicalValueType.STRING, + LogicalValueType.RICH_TEXT, + LogicalValueType.DATE, + LogicalValueType.URN)); + + public StructuredPropertiesValidator(AspectPluginConfig aspectPluginConfig) { + super(aspectPluginConfig); + } + + public static LogicalValueType getLogicalValueType(Urn valueType) { + String valueTypeId = getValueTypeId(valueType); + if (valueTypeId.equals("string")) { + return LogicalValueType.STRING; + } else if (valueTypeId.equals("date")) { + return LogicalValueType.DATE; + } else if (valueTypeId.equals("number")) { + return LogicalValueType.NUMBER; + } else if (valueTypeId.equals("urn")) { + return LogicalValueType.URN; + } else if (valueTypeId.equals("rich_text")) { + return LogicalValueType.RICH_TEXT; + } + + return LogicalValueType.UNKNOWN; + } + + @Override + protected void validateProposedAspect( + @Nonnull ChangeType changeType, + @Nonnull Urn entityUrn, + @Nonnull AspectSpec aspectSpec, + @Nonnull RecordTemplate aspectPayload, + @Nonnull AspectRetriever aspectRetriever) + throws AspectValidationException { + validate(aspectPayload, aspectRetriever); + } + + public static boolean validate( + @Nonnull RecordTemplate aspectPayload, @Nonnull AspectRetriever aspectRetriever) + throws AspectValidationException { + StructuredProperties structuredProperties = (StructuredProperties) aspectPayload; + log.warn("Validator called with {}", structuredProperties); + Map> structuredPropertiesMap = + structuredProperties.getProperties().stream() + .collect( + Collectors.groupingBy( + x -> x.getPropertyUrn(), + HashMap::new, + Collectors.toCollection(ArrayList::new))); + for (Map.Entry> entry : + structuredPropertiesMap.entrySet()) { + // There should only be one entry per structured property + List values = entry.getValue(); + if (values.size() > 1) { + throw new AspectValidationException( + "Property: " + entry.getKey() + " has multiple entries: " + values); + } + } + + for (StructuredPropertyValueAssignment structuredPropertyValueAssignment : + structuredProperties.getProperties()) { + Urn propertyUrn = structuredPropertyValueAssignment.getPropertyUrn(); + String property = propertyUrn.toString(); + if (!propertyUrn.getEntityType().equals("structuredProperty")) { + throw new IllegalStateException( + "Unexpected entity type. Expected: structuredProperty Found: " + + propertyUrn.getEntityType()); + } + Aspect structuredPropertyDefinitionAspect = null; + try { + structuredPropertyDefinitionAspect = + aspectRetriever.getLatestAspectObject(propertyUrn, "propertyDefinition"); + + if (structuredPropertyDefinitionAspect == null) { + throw new AspectValidationException("Unexpected null value found."); + } + } catch (Exception e) { + log.error("Could not fetch latest aspect. PropertyUrn: {}", propertyUrn, e); + throw new AspectValidationException("Could not fetch latest aspect: " + e.getMessage(), e); + } + + StructuredPropertyDefinition structuredPropertyDefinition = + new StructuredPropertyDefinition(structuredPropertyDefinitionAspect.data()); + log.warn( + "Retrieved property definition for {}. {}", propertyUrn, structuredPropertyDefinition); + if (structuredPropertyDefinition != null) { + PrimitivePropertyValueArray values = structuredPropertyValueAssignment.getValues(); + // Check cardinality + if (structuredPropertyDefinition.getCardinality() == PropertyCardinality.SINGLE) { + if (values.size() > 1) { + throw new AspectValidationException( + "Property: " + + property + + " has cardinality 1, but multiple values were assigned: " + + values); + } + } + // Check values + for (PrimitivePropertyValue value : values) { + validateType(propertyUrn, structuredPropertyDefinition, value); + validateAllowedValues(propertyUrn, structuredPropertyDefinition, value); + } + } + } + + return true; + } + + private static void validateAllowedValues( + Urn propertyUrn, StructuredPropertyDefinition definition, PrimitivePropertyValue value) + throws AspectValidationException { + if (definition.getAllowedValues() != null) { + Set definedValues = + definition.getAllowedValues().stream() + .map(PropertyValue::getValue) + .collect(Collectors.toSet()); + if (definedValues.stream().noneMatch(definedPrimitive -> definedPrimitive.equals(value))) { + throw new AspectValidationException( + String.format( + "Property: %s, value: %s should be one of %s", propertyUrn, value, definedValues)); + } + } + } + + private static void validateType( + Urn propertyUrn, StructuredPropertyDefinition definition, PrimitivePropertyValue value) + throws AspectValidationException { + Urn valueType = definition.getValueType(); + LogicalValueType typeDefinition = getLogicalValueType(valueType); + + // Primitive Type Validation + if (VALID_VALUE_STORED_AS_STRING.contains(typeDefinition)) { + log.debug( + "Property definition demands a string value. {}, {}", value.isString(), value.isDouble()); + if (value.getString() == null) { + throw new AspectValidationException( + "Property: " + propertyUrn.toString() + ", value: " + value + " should be a string"); + } else if (typeDefinition.equals(LogicalValueType.DATE)) { + if (!StructuredPropertyUtils.isValidDate(value)) { + throw new AspectValidationException( + "Property: " + + propertyUrn.toString() + + ", value: " + + value + + " should be a date with format YYYY-MM-DD"); + } + } else if (typeDefinition.equals(LogicalValueType.URN)) { + StringArrayMap valueTypeQualifier = definition.getTypeQualifier(); + Urn typeValue; + try { + typeValue = Urn.createFromString(value.getString()); + } catch (URISyntaxException e) { + throw new AspectValidationException( + "Property: " + propertyUrn.toString() + ", value: " + value + " should be an urn", e); + } + if (valueTypeQualifier != null) { + if (valueTypeQualifier.containsKey("allowedTypes")) { + // Let's get the allowed types and validate that the value is one of those types + StringArray allowedTypes = valueTypeQualifier.get("allowedTypes"); + boolean matchedAny = false; + for (String type : allowedTypes) { + Urn typeUrn = null; + try { + typeUrn = Urn.createFromString(type); + } catch (URISyntaxException e) { + + // we don't expect to have types that we allowed to be written that aren't + // urns + throw new RuntimeException(e); + } + String allowedEntityName = getValueTypeId(typeUrn); + if (typeValue.getEntityType().equals(allowedEntityName)) { + matchedAny = true; + } + } + if (!matchedAny) { + throw new AspectValidationException( + "Property: " + + propertyUrn.toString() + + ", value: " + + value + + " is not of any supported urn types:" + + allowedTypes); + } + } + } + } + } else if (typeDefinition.equals(LogicalValueType.NUMBER)) { + log.debug("Property definition demands a numeric value. {}, {}", value.isString(), value); + try { + Double doubleValue = + value.getDouble() != null ? value.getDouble() : Double.parseDouble(value.getString()); + } catch (NumberFormatException | NullPointerException e) { + throw new AspectValidationException( + "Property: " + propertyUrn.toString() + ", value: " + value + " should be a number"); + } + } else { + throw new AspectValidationException( + "Validation support for type " + definition.getValueType() + " is not yet implemented."); + } + } + + private static String getValueTypeId(@Nonnull final Urn valueType) { + String valueTypeId = valueType.getId(); + if (valueTypeId.startsWith("datahub.")) { + valueTypeId = valueTypeId.split("\\.")[1]; + } + return valueTypeId; + } + + @Override + protected void validatePreCommitAspect( + @Nonnull ChangeType changeType, + @Nonnull Urn entityUrn, + @Nonnull AspectSpec aspectSpec, + @Nullable RecordTemplate previousAspect, + @Nonnull RecordTemplate proposedAspect, + AspectRetriever aspectRetriever) + throws AspectValidationException { + // No-op + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/LogicalValueType.java b/entity-registry/src/main/java/com/linkedin/metadata/models/LogicalValueType.java new file mode 100644 index 0000000000000..1643ce900f748 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/LogicalValueType.java @@ -0,0 +1,10 @@ +package com.linkedin.metadata.models; + +public enum LogicalValueType { + STRING, + RICH_TEXT, + NUMBER, + DATE, + URN, + UNKNOWN +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/StructuredPropertyUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/StructuredPropertyUtils.java new file mode 100644 index 0000000000000..a8711429421f3 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/StructuredPropertyUtils.java @@ -0,0 +1,45 @@ +package com.linkedin.metadata.models; + +import com.linkedin.structured.PrimitivePropertyValue; +import java.sql.Date; +import java.time.format.DateTimeParseException; + +public class StructuredPropertyUtils { + + private StructuredPropertyUtils() {} + + static final Date MIN_DATE = Date.valueOf("1000-01-01"); + static final Date MAX_DATE = Date.valueOf("9999-12-31"); + + /** + * Sanitizes fully qualified name for use in an ElasticSearch field name Replaces . and " " + * characters + * + * @param fullyQualifiedName The original fully qualified name of the property + * @return The sanitized version that can be used as a field name + */ + public static String sanitizeStructuredPropertyFQN(String fullyQualifiedName) { + String sanitizedName = fullyQualifiedName.replace('.', '_').replace(' ', '_'); + return sanitizedName; + } + + public static Date toDate(PrimitivePropertyValue value) throws DateTimeParseException { + return Date.valueOf(value.getString()); + } + + public static boolean isValidDate(PrimitivePropertyValue value) { + if (value.getString() == null) { + return false; + } + if (value.getString().length() != 10) { + return false; + } + Date date; + try { + date = toDate(value); + } catch (DateTimeParseException e) { + return false; + } + return date.compareTo(MIN_DATE) >= 0 && date.compareTo(MAX_DATE) <= 0; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java index ce8718c536fbe..bd9a6b6c9e589 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.linkedin.data.schema.DataSchema; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.PluginFactory; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DataSchemaFactory; @@ -18,7 +19,6 @@ import com.linkedin.metadata.models.registry.config.Entities; import com.linkedin.metadata.models.registry.config.Entity; import com.linkedin.metadata.models.registry.config.Event; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.util.Pair; import java.io.FileInputStream; import java.io.FileNotFoundException; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java index fbc3285579cc0..c2aa1fab6c2c0 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.models.registry; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.PluginFactory; import com.linkedin.metadata.aspect.plugins.hooks.MCLSideEffect; import com.linkedin.metadata.aspect.plugins.hooks.MCPSideEffect; @@ -10,7 +11,6 @@ import com.linkedin.metadata.models.DefaultEntitySpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -39,11 +39,10 @@ default String getIdentifier() { EntitySpec getEntitySpec(@Nonnull final String entityName); /** - * Given an event name, returns an instance of {@link DefaultEventSpec}. + * Given an event name, returns an instance of {@link EventSpec}. * * @param eventName the name of the event to be retrieved - * @return an {@link DefaultEventSpec} corresponding to the entity name provided, null if none - * exists. + * @return an {@link EventSpec} corresponding to the entity name provided, null if none exists. */ @Nullable EventSpec getEventSpec(@Nonnull final String eventName); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java index 285b96b93d1d6..650a1cd41066e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java @@ -3,13 +3,13 @@ import com.linkedin.data.schema.compatibility.CompatibilityChecker; import com.linkedin.data.schema.compatibility.CompatibilityOptions; import com.linkedin.data.schema.compatibility.CompatibilityResult; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.PluginFactory; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.ConfigEntitySpec; import com.linkedin.metadata.models.DefaultEntitySpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import java.util.ArrayList; import java.util.HashMap; import java.util.List; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java index c605cfa188fc8..35bfe935423f0 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.linkedin.data.schema.DataSchema; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.PluginFactory; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DataSchemaFactory; @@ -17,7 +18,6 @@ import com.linkedin.metadata.models.registry.config.Entities; import com.linkedin.metadata.models.registry.config.Entity; import com.linkedin.metadata.models.registry.config.Event; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.util.Pair; import java.io.FileInputStream; import java.io.FileNotFoundException; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index bb0113abc9ed6..8fefa2fe00ae8 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -5,25 +5,26 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.UnionTemplate; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; +import com.linkedin.metadata.aspect.patch.template.Template; +import com.linkedin.metadata.aspect.patch.template.chart.ChartInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.common.GlobalTagsTemplate; +import com.linkedin.metadata.aspect.patch.template.common.GlossaryTermsTemplate; +import com.linkedin.metadata.aspect.patch.template.common.OwnershipTemplate; +import com.linkedin.metadata.aspect.patch.template.common.StructuredPropertiesTemplate; +import com.linkedin.metadata.aspect.patch.template.dashboard.DashboardInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.dataflow.DataFlowInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.datajob.DataJobInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.datajob.DataJobInputOutputTemplate; +import com.linkedin.metadata.aspect.patch.template.dataproduct.DataProductPropertiesTemplate; +import com.linkedin.metadata.aspect.patch.template.dataset.DatasetPropertiesTemplate; +import com.linkedin.metadata.aspect.patch.template.dataset.EditableSchemaMetadataTemplate; +import com.linkedin.metadata.aspect.patch.template.dataset.UpstreamLineageTemplate; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DefaultEntitySpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EntitySpecBuilder; import com.linkedin.metadata.models.EventSpec; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; -import com.linkedin.metadata.models.registry.template.Template; -import com.linkedin.metadata.models.registry.template.chart.ChartInfoTemplate; -import com.linkedin.metadata.models.registry.template.common.GlobalTagsTemplate; -import com.linkedin.metadata.models.registry.template.common.GlossaryTermsTemplate; -import com.linkedin.metadata.models.registry.template.common.OwnershipTemplate; -import com.linkedin.metadata.models.registry.template.dashboard.DashboardInfoTemplate; -import com.linkedin.metadata.models.registry.template.dataflow.DataFlowInfoTemplate; -import com.linkedin.metadata.models.registry.template.datajob.DataJobInfoTemplate; -import com.linkedin.metadata.models.registry.template.datajob.DataJobInputOutputTemplate; -import com.linkedin.metadata.models.registry.template.dataproduct.DataProductPropertiesTemplate; -import com.linkedin.metadata.models.registry.template.dataset.DatasetPropertiesTemplate; -import com.linkedin.metadata.models.registry.template.dataset.EditableSchemaMetadataTemplate; -import com.linkedin.metadata.models.registry.template.dataset.UpstreamLineageTemplate; import com.linkedin.metadata.snapshot.Snapshot; import java.util.ArrayList; import java.util.HashMap; @@ -84,6 +85,8 @@ private AspectTemplateEngine populateTemplateEngine(Map aspe aspectSpecTemplateMap.put(CHART_INFO_ASPECT_NAME, new ChartInfoTemplate()); aspectSpecTemplateMap.put(DASHBOARD_INFO_ASPECT_NAME, new DashboardInfoTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, new DataJobInputOutputTemplate()); + aspectSpecTemplateMap.put( + STRUCTURED_PROPERTIES_ASPECT_NAME, new StructuredPropertiesTemplate()); return new AspectTemplateEngine(aspectSpecTemplateMap); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java deleted file mode 100644 index 44090b3a6d05b..0000000000000 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.linkedin.metadata.models.registry.template; - -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - -import com.datahub.util.RecordUtils; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.github.fge.jsonpatch.JsonPatchException; -import com.github.fge.jsonpatch.Patch; -import com.linkedin.data.template.RecordTemplate; -import java.util.List; - -public abstract class CompoundKeyTemplate - implements ArrayMergingTemplate { - - /** - * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent - * paths to be specified - * - * @param transformedNode transformed node to have keys populated - * @return transformed node that has top level keys populated - */ - public JsonNode populateTopLevelKeys(JsonNode transformedNode, Patch jsonPatch) { - JsonNode transformedNodeClone = transformedNode.deepCopy(); - List paths = getPaths(jsonPatch); - for (String path : paths) { - String[] keys = path.split("/"); - // Skip first as it will always be blank due to path starting with /, skip last key as we only - // need to populate top level - JsonNode parent = transformedNodeClone; - for (int i = 1; i < keys.length - 1; i++) { - if (parent.get(keys[i]) == null) { - ((ObjectNode) parent).set(keys[i], instance.objectNode()); - } - parent = parent.get(keys[i]); - } - } - - return transformedNodeClone; - } - - @Override - public T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) - throws JsonProcessingException, JsonPatchException { - JsonNode transformed = populateTopLevelKeys(preprocessTemplate(recordTemplate), jsonPatch); - JsonNode patched = jsonPatch.apply(transformed); - JsonNode postProcessed = rebaseFields(patched); - return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString()); - } -} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java deleted file mode 100644 index 18d070ec3da45..0000000000000 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.linkedin.metadata.models.registry.template.util; - -import static com.linkedin.metadata.Constants.*; - -import com.fasterxml.jackson.core.StreamReadConstraints; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.fge.jsonpatch.Patch; -import java.util.ArrayList; -import java.util.List; - -public class TemplateUtil { - - private TemplateUtil() {} - - public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - - static { - int maxSize = - Integer.parseInt( - System.getenv() - .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER - .getFactory() - .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - - public static List getPaths(Patch jsonPatch) { - JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); - List paths = new ArrayList<>(); - patchNode - .elements() - .forEachRemaining( - node -> { - paths.add(node.get("path").asText()); - }); - return paths; - } -} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/ChartInfoTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/ChartInfoTemplateTest.java similarity index 92% rename from entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/ChartInfoTemplateTest.java rename to entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/ChartInfoTemplateTest.java index 108936bde2ed5..b2911100519fc 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/ChartInfoTemplateTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/ChartInfoTemplateTest.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry; +package com.linkedin.metadata.aspect.patch.template; import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; @@ -9,7 +9,7 @@ import com.github.fge.jsonpatch.JsonPatchOperation; import com.linkedin.chart.ChartInfo; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.metadata.models.registry.template.chart.ChartInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.chart.ChartInfoTemplate; import java.util.ArrayList; import java.util.List; import org.testng.Assert; diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/DashboardInfoTemplateTest.java similarity index 91% rename from entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java rename to entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/DashboardInfoTemplateTest.java index 962ff1d40d873..be15d6976aee6 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/DashboardInfoTemplateTest.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry.patch; +package com.linkedin.metadata.aspect.patch.template; import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; @@ -9,7 +9,7 @@ import com.github.fge.jsonpatch.JsonPatchOperation; import com.linkedin.common.urn.UrnUtils; import com.linkedin.dashboard.DashboardInfo; -import com.linkedin.metadata.models.registry.template.dashboard.DashboardInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.dashboard.DashboardInfoTemplate; import java.util.ArrayList; import java.util.List; import org.testng.Assert; diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/UpstreamLineageTemplateTest.java similarity index 99% rename from entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java rename to entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/UpstreamLineageTemplateTest.java index 8f410ae8da085..7d59664513d57 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/UpstreamLineageTemplateTest.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry.patch; +package com.linkedin.metadata.aspect.patch.template; import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; @@ -16,7 +16,7 @@ import com.linkedin.dataset.FineGrainedLineageDownstreamType; import com.linkedin.dataset.FineGrainedLineageUpstreamType; import com.linkedin.dataset.UpstreamLineage; -import com.linkedin.metadata.models.registry.template.dataset.UpstreamLineageTemplate; +import com.linkedin.metadata.aspect.patch.template.dataset.UpstreamLineageTemplate; import java.util.ArrayList; import java.util.List; import org.testng.Assert; diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java index 8c3f71fcc8019..f801ce7bf1ffe 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java @@ -61,17 +61,16 @@ public void testConfigEntityRegistry() throws FileNotFoundException { assertNotNull(eventSpec.getPegasusSchema()); assertEquals( - configEntityRegistry.getAspectPayloadValidators(ChangeType.UPSERT, "*", "status").size(), + configEntityRegistry + .getAspectPayloadValidators(ChangeType.UPSERT, "chart", "status") + .size(), 2); assertEquals( - configEntityRegistry.getAspectPayloadValidators(ChangeType.DELETE, "*", "status").size(), + configEntityRegistry + .getAspectPayloadValidators(ChangeType.DELETE, "chart", "status") + .size(), 0); - assertEquals( - configEntityRegistry.getMCLSideEffects(ChangeType.UPSERT, "chart", "chartInfo").size(), 1); - assertEquals( - configEntityRegistry.getMCLSideEffects(ChangeType.DELETE, "chart", "chartInfo").size(), 0); - assertEquals( configEntityRegistry.getMCPSideEffects(ChangeType.UPSERT, "dataset", "datasetKey").size(), 1); @@ -124,17 +123,16 @@ public void testMergedEntityRegistry() throws EntityRegistryException { assertNotNull(eventSpec.getPegasusSchema()); assertEquals( - mergedEntityRegistry.getAspectPayloadValidators(ChangeType.UPSERT, "*", "status").size(), - 3); + mergedEntityRegistry + .getAspectPayloadValidators(ChangeType.UPSERT, "chart", "status") + .size(), + 2); assertEquals( - mergedEntityRegistry.getAspectPayloadValidators(ChangeType.DELETE, "*", "status").size(), + mergedEntityRegistry + .getAspectPayloadValidators(ChangeType.DELETE, "chart", "status") + .size(), 1); - assertEquals( - mergedEntityRegistry.getMCLSideEffects(ChangeType.UPSERT, "chart", "chartInfo").size(), 2); - assertEquals( - mergedEntityRegistry.getMCLSideEffects(ChangeType.DELETE, "chart", "chartInfo").size(), 1); - assertEquals( mergedEntityRegistry.getMCPSideEffects(ChangeType.UPSERT, "dataset", "datasetKey").size(), 2); diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffectTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffectTest.java index ce904142fecfe..8ee5ff4f99820 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffectTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffectTest.java @@ -9,7 +9,6 @@ import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.List; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -60,9 +59,7 @@ public TestMCLSideEffect(AspectPluginConfig aspectPluginConfig) { @Override protected Stream applyMCLSideEffect( - @Nonnull MCLBatchItem input, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) { + @Nonnull MCLBatchItem input, @Nonnull AspectRetriever aspectRetriever) { return Stream.of(input); } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffectTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffectTest.java index ee8f947e0e994..8522e8facf3e0 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffectTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffectTest.java @@ -9,7 +9,6 @@ import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.List; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -60,7 +59,7 @@ public TestMCPSideEffect(AspectPluginConfig aspectPluginConfig) { @Override protected Stream applyMCPSideEffect( - UpsertItem input, EntityRegistry entityRegistry, @Nonnull AspectRetriever aspectRetriever) { + UpsertItem input, @Nonnull AspectRetriever aspectRetriever) { return Stream.of(input); } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/validation/ValidatorPluginTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/validation/ValidatorPluginTest.java index 07c99ee8546be..eb132836be465 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/validation/ValidatorPluginTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/validation/ValidatorPluginTest.java @@ -33,7 +33,7 @@ public void testCustomValidator() { TestEntityProfile.class.getClassLoader().getResourceAsStream(REGISTRY_FILE)); List validators = - configEntityRegistry.getAspectPayloadValidators(ChangeType.UPSERT, "*", "status"); + configEntityRegistry.getAspectPayloadValidators(ChangeType.UPSERT, "chart", "status"); assertEquals( validators, List.of( diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/PropertyDefinitionValidatorTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/PropertyDefinitionValidatorTest.java new file mode 100644 index 0000000000000..96e9fceb4a05d --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/PropertyDefinitionValidatorTest.java @@ -0,0 +1,212 @@ +package com.linkedin.metadata.aspect.validators; + +import static org.testng.Assert.*; + +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.aspect.validation.PropertyDefinitionValidator; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PropertyCardinality; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.PropertyValueArray; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.net.URISyntaxException; +import org.testng.annotations.Test; + +public class PropertyDefinitionValidatorTest { + @Test + public void testValidatePreCommitNoPrevious() + throws URISyntaxException, AspectValidationException { + StructuredPropertyDefinition newProperty = new StructuredPropertyDefinition(); + newProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + newProperty.setDisplayName("newProp"); + newProperty.setQualifiedName("prop3"); + newProperty.setCardinality(PropertyCardinality.MULTIPLE); + newProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + assertTrue(PropertyDefinitionValidator.validate(null, newProperty)); + } + + @Test + public void testCanChangeSingleToMultiple() + throws URISyntaxException, CloneNotSupportedException, AspectValidationException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.SINGLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setCardinality(PropertyCardinality.MULTIPLE); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCannotChangeMultipleToSingle() + throws URISyntaxException, CloneNotSupportedException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setCardinality(PropertyCardinality.SINGLE); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCannotChangeValueType() throws URISyntaxException, CloneNotSupportedException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setValueType(Urn.createFromString("urn:li:logicalType:NUMBER")); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCanChangeDisplayName() + throws URISyntaxException, CloneNotSupportedException, AspectValidationException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setDisplayName("newProp"); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCannotChangeFullyQualifiedName() + throws URISyntaxException, CloneNotSupportedException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setQualifiedName("newProp"); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCannotChangeRestrictAllowedValues() + throws URISyntaxException, CloneNotSupportedException { + // No constraint -> constraint case + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + PropertyValue allowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(1.0)).setDescription("hello"); + newProperty.setAllowedValues(new PropertyValueArray(allowedValue)); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + + // Remove allowed values from constraint case + PropertyValue oldAllowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(3.0)).setDescription("hello"); + oldProperty.setAllowedValues((new PropertyValueArray(allowedValue, oldAllowedValue))); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCanExpandAllowedValues() + throws URISyntaxException, CloneNotSupportedException, AspectValidationException { + // Constraint -> no constraint case + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + PropertyValue allowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(1.0)).setDescription("hello"); + oldProperty.setAllowedValues(new PropertyValueArray(allowedValue)); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + + // Add allowed values to constraint case + PropertyValue newAllowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(3.0)).setDescription("hello"); + newProperty.setAllowedValues((new PropertyValueArray(allowedValue, newAllowedValue))); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCanChangeAllowedValueDescriptions() + throws URISyntaxException, CloneNotSupportedException, AspectValidationException { + // Constraint -> no constraint case + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + PropertyValue allowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(1.0)).setDescription("hello"); + oldProperty.setAllowedValues(new PropertyValueArray(allowedValue)); + PropertyValue newAllowedValue = + new PropertyValue() + .setValue(PrimitivePropertyValue.create(1.0)) + .setDescription("hello there"); + newProperty.setAllowedValues(new PropertyValueArray(newAllowedValue)); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } +} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/StructuredPropertiesValidatorTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/StructuredPropertiesValidatorTest.java new file mode 100644 index 0000000000000..450b299b48b34 --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/StructuredPropertiesValidatorTest.java @@ -0,0 +1,246 @@ +package com.linkedin.metadata.aspect.validators; + +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.Aspect; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.aspect.validation.StructuredPropertiesValidator; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.PropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.annotation.Nonnull; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class StructuredPropertiesValidatorTest { + + static class MockAspectRetriever implements AspectRetriever { + StructuredPropertyDefinition _propertyDefinition; + + MockAspectRetriever(StructuredPropertyDefinition defToReturn) { + this._propertyDefinition = defToReturn; + } + + @Nonnull + @Override + public Map> getLatestAspectObjects( + Set urns, Set aspectNames) + throws RemoteInvocationException, URISyntaxException { + return Map.of( + urns.stream().findFirst().get(), + Map.of(aspectNames.stream().findFirst().get(), new Aspect(_propertyDefinition.data()))); + } + + @Nonnull + @Override + public EntityRegistry getEntityRegistry() { + return null; + } + } + + @Test + public void testValidateAspectNumberUpsert() throws URISyntaxException { + StructuredPropertyDefinition numberPropertyDef = + new StructuredPropertyDefinition() + .setValueType(Urn.createFromString("urn:li:type:datahub.number")) + .setAllowedValues( + new PropertyValueArray( + List.of( + new PropertyValue().setValue(PrimitivePropertyValue.create(30.0)), + new PropertyValue().setValue(PrimitivePropertyValue.create(60.0)), + new PropertyValue().setValue(PrimitivePropertyValue.create(90.0))))); + + try { + StructuredPropertyValueAssignment assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create(30.0))); + StructuredProperties numberPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + + boolean isValid = + StructuredPropertiesValidator.validate( + numberPayload, new MockAspectRetriever(numberPropertyDef)); + Assert.assertTrue(isValid); + } catch (AspectValidationException e) { + throw new RuntimeException(e); + } + + try { + StructuredPropertyValueAssignment assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create(0.0))); + StructuredProperties numberPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + + StructuredPropertiesValidator.validate( + numberPayload, new MockAspectRetriever(numberPropertyDef)); + Assert.fail("Should have raised exception for disallowed value 0.0"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("{double=0.0} should be one of [{")); + } + + // Assign string value to number property + StructuredPropertyValueAssignment stringAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create("hello"))); + StructuredProperties stringPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(stringAssignment)); + try { + StructuredPropertiesValidator.validate( + stringPayload, new MockAspectRetriever(numberPropertyDef)); + Assert.fail("Should have raised exception for mis-matched types"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("should be a number")); + } + } + + @Test + public void testValidateAspectDateUpsert() throws URISyntaxException { + // Assign string value + StructuredPropertyValueAssignment stringAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create("hello"))); + StructuredProperties stringPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(stringAssignment)); + + // Assign invalid date + StructuredPropertyDefinition datePropertyDef = + new StructuredPropertyDefinition() + .setValueType(Urn.createFromString("urn:li:type:datahub.date")); + try { + StructuredPropertiesValidator.validate( + stringPayload, new MockAspectRetriever(datePropertyDef)); + Assert.fail("Should have raised exception for mis-matched types"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("should be a date with format")); + } + + // Assign valid date + StructuredPropertyValueAssignment dateAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues( + new PrimitivePropertyValueArray(PrimitivePropertyValue.create("2023-10-24"))); + StructuredProperties datePayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(dateAssignment)); + try { + boolean isValid = + StructuredPropertiesValidator.validate( + datePayload, new MockAspectRetriever(datePropertyDef)); + Assert.assertTrue(isValid); + } catch (AspectValidationException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testValidateAspectStringUpsert() throws URISyntaxException { + // Assign string value + StructuredPropertyValueAssignment stringAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create("hello"))); + StructuredProperties stringPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(stringAssignment)); + + // Assign date + StructuredPropertyValueAssignment dateAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues( + new PrimitivePropertyValueArray(PrimitivePropertyValue.create("2023-10-24"))); + StructuredProperties datePayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(dateAssignment)); + + // Assign number + StructuredPropertyValueAssignment assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create(30.0))); + StructuredProperties numberPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + + StructuredPropertyDefinition stringPropertyDef = + new StructuredPropertyDefinition() + .setValueType(Urn.createFromString("urn:li:type:datahub.string")) + .setAllowedValues( + new PropertyValueArray( + List.of( + new PropertyValue().setValue(PrimitivePropertyValue.create("hello")), + new PropertyValue() + .setValue(PrimitivePropertyValue.create("2023-10-24"))))); + + // Valid strings (both the date value and "hello" are valid) + try { + boolean isValid = + StructuredPropertiesValidator.validate( + stringPayload, new MockAspectRetriever(stringPropertyDef)); + Assert.assertTrue(isValid); + isValid = + StructuredPropertiesValidator.validate( + datePayload, new MockAspectRetriever(stringPropertyDef)); + Assert.assertTrue(isValid); + } catch (AspectValidationException e) { + throw new RuntimeException(e); + } + + // Invalid: assign a number to the string property + try { + StructuredPropertiesValidator.validate( + numberPayload, new MockAspectRetriever(stringPropertyDef)); + Assert.fail("Should have raised exception for mis-matched types"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("should be a string")); + } + + // Invalid allowedValue + try { + assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues( + new PrimitivePropertyValueArray(PrimitivePropertyValue.create("not hello"))); + stringPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + + StructuredPropertiesValidator.validate( + stringPayload, new MockAspectRetriever(stringPropertyDef)); + Assert.fail("Should have raised exception for disallowed value `not hello`"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("{string=not hello} should be one of [{")); + } + } +} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index 2cb48c1b20da9..d9cf8fd2603a8 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -198,7 +198,7 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { .getSearchableAnnotation() .getFieldName()); assertEquals( - SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.TEXT, testEntityInfo .getSearchableFieldSpecMap() .get(new PathSpec("customProperties").toString()) diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java index b3eb2af72708c..1a64359008dd8 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java @@ -6,6 +6,7 @@ import com.linkedin.data.schema.ArrayDataSchema; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.RecordDataSchema; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DataSchemaFactory; import com.linkedin.metadata.models.DefaultEntitySpec; @@ -17,7 +18,6 @@ import com.linkedin.metadata.models.annotation.EventAnnotation; import com.linkedin.metadata.models.registry.config.EntityRegistryLoadResult; import com.linkedin.metadata.models.registry.config.LoadStatus; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.util.Pair; import java.io.FileNotFoundException; import java.util.ArrayList; diff --git a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java index f9d22b142cbb9..8174afc20765f 100644 --- a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java +++ b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java @@ -11,11 +11,11 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.ingestion.DataHubIngestionSourceConfig; import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.query.ListResult; import java.util.Collections; @@ -88,7 +88,7 @@ public void setupTest() throws Exception { .thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); Mockito.when(entityResponse2.getAspects()).thenReturn(map2); - JavaEntityClient mockClient = Mockito.mock(JavaEntityClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); // Set up mocks for ingestion source batch fetching Mockito.when( diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index 3d9b533dc8f72..39a17612aa4b3 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -13,6 +13,9 @@ public class Constants { public static final String UNKNOWN_ACTOR = "urn:li:corpuser:UNKNOWN"; // Unknown principal. public static final Long ASPECT_LATEST_VERSION = 0L; public static final String UNKNOWN_DATA_PLATFORM = "urn:li:dataPlatform:unknown"; + public static final String ENTITY_TYPE_URN_PREFIX = "urn:li:entityType:"; + public static final String DATA_TYPE_URN_PREFIX = "urn:li:dataType:"; + public static final String STRUCTURED_PROPERTY_MAPPING_FIELD = "structuredProperties"; // !!!!!!! IMPORTANT !!!!!!! // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. @@ -73,6 +76,10 @@ public class Constants { public static final String QUERY_ENTITY_NAME = "query"; public static final String DATA_PRODUCT_ENTITY_NAME = "dataProduct"; public static final String OWNERSHIP_TYPE_ENTITY_NAME = "ownershipType"; + public static final String STRUCTURED_PROPERTY_ENTITY_NAME = "structuredProperty"; + public static final String DATA_TYPE_ENTITY_NAME = "dataType"; + public static final String ENTITY_TYPE_ENTITY_NAME = "entityType"; + public static final String FORM_ENTITY_NAME = "form"; /** Aspects */ // Common @@ -125,6 +132,8 @@ public class Constants { public static final String VIEW_PROPERTIES_ASPECT_NAME = "viewProperties"; public static final String DATASET_PROFILE_ASPECT_NAME = "datasetProfile"; + public static final String STRUCTURED_PROPERTIES_ASPECT_NAME = "structuredProperties"; + public static final String FORMS_ASPECT_NAME = "forms"; // Aspect support public static final String FINE_GRAINED_LINEAGE_DATASET_TYPE = "DATASET"; public static final String FINE_GRAINED_LINEAGE_FIELD_SET_TYPE = "FIELD_SET"; @@ -306,6 +315,20 @@ public class Constants { public static final String OWNERSHIP_TYPE_KEY_ASPECT_NAME = "ownershipTypeKey"; public static final String OWNERSHIP_TYPE_INFO_ASPECT_NAME = "ownershipTypeInfo"; + // Structured Property + public static final String STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME = "propertyDefinition"; + + // Form + public static final String FORM_INFO_ASPECT_NAME = "formInfo"; + public static final String FORM_KEY_ASPECT_NAME = "formKey"; + public static final String DYNAMIC_FORM_ASSIGNMENT_ASPECT_NAME = "dynamicFormAssignment"; + + // Data Type + public static final String DATA_TYPE_INFO_ASPECT_NAME = "dataTypeInfo"; + + // Entity Type + public static final String ENTITY_TYPE_INFO_ASPECT_NAME = "entityTypeInfo"; + // Settings public static final String GLOBAL_SETTINGS_ENTITY_NAME = "globalSettings"; public static final String GLOBAL_SETTINGS_INFO_ASPECT_NAME = "globalSettingsInfo"; diff --git a/metadata-ingestion-modules/airflow-plugin/scripts/release.sh b/metadata-ingestion-modules/airflow-plugin/scripts/release.sh index 87157479f37d6..5667e761ea558 100755 --- a/metadata-ingestion-modules/airflow-plugin/scripts/release.sh +++ b/metadata-ingestion-modules/airflow-plugin/scripts/release.sh @@ -13,7 +13,7 @@ MODULE=datahub_airflow_plugin python -c 'import setuptools; where="./src"; assert setuptools.find_packages(where) == setuptools.find_namespace_packages(where), "you seem to be missing or have extra __init__.py files"' if [[ ${RELEASE_VERSION:-} ]]; then # Replace version with RELEASE_VERSION env variable - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/${MODULE}/__init__.py + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/${MODULE}/__init__.py else vim src/${MODULE}/__init__.py fi diff --git a/metadata-ingestion/examples/bootstrap_data/business_glossary.yml b/metadata-ingestion/examples/bootstrap_data/business_glossary.yml index de6ba8731c878..327246863b0ab 100644 --- a/metadata-ingestion/examples/bootstrap_data/business_glossary.yml +++ b/metadata-ingestion/examples/bootstrap_data/business_glossary.yml @@ -10,6 +10,8 @@ nodes: knowledge_links: - label: Wiki link for classification url: "https://en.wikipedia.org/wiki/Classification" + custom_properties: + is_confidential: true terms: - name: Sensitive description: Sensitive Data diff --git a/metadata-ingestion/examples/forms/forms.yaml b/metadata-ingestion/examples/forms/forms.yaml new file mode 100644 index 0000000000000..80bb7cee08ec3 --- /dev/null +++ b/metadata-ingestion/examples/forms/forms.yaml @@ -0,0 +1,54 @@ +- id: 123456 + # urn: "urn:li:form:123456" # optional if id is provided + type: VERIFICATION + name: "Metadata Initiative 2023" + description: "How we want to ensure the most important data assets in our organization have all of the most important and expected pieces of metadata filled out" + prompts: + - id: "123" + title: "Retention Time" + description: "Apply Retention Time structured property to form" + type: STRUCTURED_PROPERTY + structured_property_id: io.acryl.privacy.retentionTime + required: True # optional, will default to True + - id: "92847" + title: "Replication SLA" + description: "Apply Replication SLA structured property to form" + type: STRUCTURED_PROPERTY + structured_property_urn: urn:li:structuredProperty:io.acryl.dataManagement.replicationSLA + required: True + - id: "76543" + title: "Replication SLA" + description: "Apply Replication SLA structured property to form" + type: FIELDS_STRUCTURED_PROPERTY + structured_property_urn: urn:li:structuredProperty:io.acryl.dataManagement.replicationSLA + required: False + entities: # Either pass a list of urns or a group of filters + # urns: + # - urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD) + # - urn:li:dataset:(urn:li:dataPlatform:snowflake,user.clicks,PROD) + filters: + types: + - dataset + platforms: + - snowflake + - dbt + domains: + - urn:li:domain:b41fbb69-1549-4f30-a463-d75d1bed31c1 + containers: + - urn:li:container:21d4204e13d5b984c58acad468ecdbdd +- urn: "urn:li:form:917364" + # id: 917364 # optional if urn is provided + type: VERIFICATION + name: "Governance Initiative" + prompts: + - id: "123" + title: "Retention Time" + description: "Apply Retention Time structured property to form" + type: STRUCTURED_PROPERTY + structured_property_id: io.acryl.privacy.retentionTime + required: False + - id: "certifier" + title: "Certifier" + type: STRUCTURED_PROPERTY + structured_property_id: io.acryl.dataManagement.certifier + required: True diff --git a/metadata-ingestion/examples/mce_files/test_structured_properties.json b/metadata-ingestion/examples/mce_files/test_structured_properties.json new file mode 100644 index 0000000000000..7771883152d38 --- /dev/null +++ b/metadata-ingestion/examples/mce_files/test_structured_properties.json @@ -0,0 +1,218 @@ +[ + { + "auditHeader": null, + "entityType": "entityType", + "entityUrn": "urn:li:entityType:datahub.dataset", + "changeType": "UPSERT", + "aspectName": "entityTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.dataset\", \"displayName\": \"Dataset\", \"description\": \"An entity type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "entityType", + "entityUrn": "urn:li:entityType:datahub.corpuser", + "changeType": "UPSERT", + "aspectName": "entityTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.corpuser\", \"displayName\": \"User\", \"description\": \"An entity type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "entityType", + "entityUrn": "urn:li:entityType:datahub.corpGroup", + "changeType": "UPSERT", + "aspectName": "entityTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.corpGroup\", \"displayName\": \"Group\", \"description\": \"An entity type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "dataType", + "entityUrn": "urn:li:dataType:datahub.string", + "changeType": "UPSERT", + "aspectName": "dataTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.string\", \"displayName\": \"String\", \"description\": \"A string type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "dataType", + "entityUrn": "urn:li:dataType:datahub.float", + "changeType": "UPSERT", + "aspectName": "dataTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.float\", \"displayName\": \"Number\", \"description\": \"A number type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "dataType", + "entityUrn": "urn:li:dataType:datahub.urn", + "changeType": "UPSERT", + "aspectName": "dataTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.urn\", \"displayName\": \"Urn\", \"description\": \"A entity type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property1", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property1\", \"displayName\": \"String Property\", \"valueType\": \"urn:li:dataType:datahub.string\", \"cardinality\": \"SINGLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property2", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property2\", \"displayName\": \"String Property With Allowed Values\", \"valueType\": \"urn:li:dataType:datahub.string\", \"cardinality\": \"MULTIPLE\", \"allowedValues\": [ { \"value\": { \"string\": \"Test 1\" } }, { \"value\": { \"string\": \"Test 2\" } } ], \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property3", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property3\", \"displayName\": \"Numeric Property\", \"valueType\": \"urn:li:dataType:datahub.float\", \"cardinality\": \"MULTIPLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property4", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property4\", \"displayName\": \"Numeric Property with Allowed Values\", \"valueType\": \"urn:li:dataType:datahub.float\", \"cardinality\": \"MULTIPLE\", \"allowedValues\": [ { \"value\": { \"double\": 0.12 } }, { \"value\": { \"double\": 1 } } ], \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property5", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property5\", \"displayName\": \"Urn property no type qualifier\", \"valueType\": \"urn:li:dataType:datahub.urn\", \"cardinality\": \"MULTIPLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property6", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property6\", \"displayName\": \"Urn property with 1 type qualifier (user)\", \"valueType\": \"urn:li:dataType:datahub.urn\", \"typeQualifier\": { \"allowedTypes\": [\"urn:li:entityType:datahub.corpuser\"] }, \"cardinality\": \"MULTIPLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property7", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property7\", \"displayName\": \"Urn property with 2 type qualifier (user)\", \"valueType\": \"urn:li:dataType:datahub.urn\", \"typeQualifier\": { \"allowedTypes\": [\"urn:li:entityType:datahub.corpuser\", \"urn:li:entityType:datahub.corpGroup\"] }, \"cardinality\": \"MULTIPLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "form", + "entityUrn": "urn:li:form:my-test-form-verification-default-3", + "changeType": "UPSERT", + "aspectName": "formInfo", + "aspect": { + "value": "{\"name\": \"My test form\", \"description\": \"My test description\", \"type\": \"VERIFICATION\", \"prompts\": [{\"id\": \"prompt-1\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}, {\"id\": \"prompt-2\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}]}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "form", + "entityUrn": "urn:li:form:my-test-no-verification-3", + "changeType": "UPSERT", + "aspectName": "formInfo", + "aspect": { + "value": "{\"name\": \"My test form without verification\", \"description\": \"My test description\", \"prompts\": [{\"id\": \"prompt-1\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}, {\"id\": \"prompt-2\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}]}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "form", + "entityUrn": "urn:li:form:my-test-no-verification-custom-5", + "changeType": "UPSERT", + "aspectName": "formInfo", + "aspect": { + "value": "{\"name\": \"My test form with custom verification\", \"description\": \"My test description\", \"type\": \"VERIFICATION\", \"verification\": { \"type\": \"urn:li:verificationType:my-test\"}, \"prompts\": [{\"id\": \"prompt-1\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"required\": true, \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" } }, {\"id\": \"prompt-2\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"required\": true, \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}]}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "form", + "entityUrn": "urn:li:form:my-test-no-verification-custom-5", + "changeType": "UPSERT", + "aspectName": "dynamicFormAssignment", + "aspect": { + "value": "{\"filter\": { \"or\": [ { \"and\": [ { \"field\": \"platform\", \"condition\": \"EQUAL\", \"values\": [\"urn:li:dataPlatform:snowflake\"], \"value\": \"\" } ] } ] } }", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)", + "changeType": "UPSERT", + "aspectName": "forms", + "aspect": { + "value": "{ \"incompleteForms\":[\n {\n \"incompletePrompts\":[\n \n ],\n \"urn\":\"urn:li:form:my-test-no-verification-custom-4\",\n \"completedPrompts\":[\n {\n \"lastModified\":{\n \"actor\":\"urn:li:corpuser:__datahub_system\",\n \"time\":1697585983115\n },\n \"id\":\"prompt-2\"\n },\n {\n \"id\":\"prompt-1\",\n \"lastModified\":{\n \"actor\":\"urn:li:corpuser:__datahub_system\",\n \"time\":1697585983252\n }\n }\n ]\n },\n {\n \"incompletePrompts\":[\n \n ],\n \"urn\":\"urn:li:form:my-test-no-verification-custom-5\",\n \"completedPrompts\":[\n {\n \"lastModified\":{\n \"actor\":\"urn:li:corpuser:__datahub_system\",\n \"time\":1697645753521\n },\n \"id\":\"prompt-2\"\n },\n {\n \"id\":\"prompt-1\",\n \"lastModified\":{\n \"actor\":\"urn:li:corpuser:__datahub_system\",\n \"time\":1697645754180\n }\n }\n ]\n }\n ],\n \"completedForms\":[\n \n ]}", + "contentType": "application/json" + }, + "systemMetadata": null + } +] \ No newline at end of file diff --git a/metadata-ingestion/examples/structured_properties/README.md b/metadata-ingestion/examples/structured_properties/README.md new file mode 100644 index 0000000000000..0429310be7424 --- /dev/null +++ b/metadata-ingestion/examples/structured_properties/README.md @@ -0,0 +1,51 @@ +# Extended Properties + +## Expected Capabilities + +### structured_properties command + +```yaml +- id: io.acryl.privacy.retentionTime + # urn: urn:li:structuredProperty:<> + # fullyQualifiedName: io.acryl.privacy.retentionTime + type: STRING + cardinality: MULTIPLE + entityTypes: + - dataset # or urn:li:logicalEntity:metamodel.datahub.dataset + - dataflow + description: "Retention Time is used to figure out how long to retain records in a dataset" + allowedValues: + - value: 30 days + description: 30 days, usually reserved for datasets that are ephemeral and contain pii + - value: 3 months + description: Use this for datasets that drive monthly reporting but contain pii + - value: 2 yrs + description: Use this for non-sensitive data that can be retained for longer +- id: io.acryl.dataManagement.replicationSLA + type: NUMBER + description: "SLA for how long data can be delayed before replicating to the destination cluster" + entityTypes: + - dataset +- id: io.acryl.dataManagement.deprecationDate + type: DATE + entityTypes: + - dataset + - dataFlow + - dataJob +``` + +``` +datahub properties create -f structured_properties.yaml +``` + +``` +datahub properties create --name io.acryl.privacy.retentionTime --type STRING --cardinality MULTIPLE --entity_type DATASET --entity_type DATAFLOW +``` + +### dataset command + +``` +datahub dataset create -f dataset.yaml +``` + +See example in `dataproduct`. diff --git a/metadata-ingestion/examples/structured_properties/click_event.avsc b/metadata-ingestion/examples/structured_properties/click_event.avsc new file mode 100644 index 0000000000000..b277674f8b62f --- /dev/null +++ b/metadata-ingestion/examples/structured_properties/click_event.avsc @@ -0,0 +1,14 @@ +{ + "namespace": "org.acryl", + "type": "record", + "name": "ClickEvent", + "fields": [ + { "name": "ip", "type": "string" }, + { "name": "url", "type": "string" }, + { "name": "time", "type": "long" }, + { "name": "referer", "type": ["string", "null"] }, + { "name": "user_agent", "type": ["string", "null"] }, + { "name": "user_id", "type": ["string", "null"] }, + { "name": "session_id", "type": ["string", "null"] } + ] +} diff --git a/metadata-ingestion/examples/structured_properties/dataset.yaml b/metadata-ingestion/examples/structured_properties/dataset.yaml new file mode 100644 index 0000000000000..557bf0167a51b --- /dev/null +++ b/metadata-ingestion/examples/structured_properties/dataset.yaml @@ -0,0 +1,45 @@ +## This file is used to define a dataset and provide metadata for it +- id: user.clicks + platform: hive + # - urn: urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD) # use urn instead of id and platform + subtype: Table + schema: + file: examples/structured_properties/click_event.avsc + fields: + - id: ip + - urn: urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD),ip) + structured_properties: # structured properties for schema fields/columns go here + io.acryl.dataManagement.deprecationDate: "2023-01-01" + io.acryl.dataManagement.certifier: urn:li:corpuser:john.doe@example.com + io.acryl.dataManagement.replicationSLA: 90 + structured_properties: # dataset level structured properties go here + io.acryl.privacy.retentionTime: 365 + projectNames: + - Tracking + - DataHub +- id: ClickEvent + platform: events + subtype: Topic + description: | + This is a sample event that is generated when a user clicks on a link. + Do not use this event for any purpose other than testing. + properties: + project_name: Tracking + namespace: org.acryl.tracking + version: 1.0.0 + retention: 30 + structured_properties: + io.acryl.dataManagement.certifier: urn:li:corpuser:john.doe@example.com + schema: + file: examples/structured_properties/click_event.avsc + downstreams: + - urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD) +- id: user.clicks + platform: snowflake + schema: + fields: + - id: user_id + structured_properties: + io.acryl.dataManagement.deprecationDate: "2023-01-01" + structured_properties: + io.acryl.dataManagement.replicationSLA: 90 diff --git a/metadata-ingestion/examples/structured_properties/structured_properties.yaml b/metadata-ingestion/examples/structured_properties/structured_properties.yaml new file mode 100644 index 0000000000000..5c7ce47ba3b8a --- /dev/null +++ b/metadata-ingestion/examples/structured_properties/structured_properties.yaml @@ -0,0 +1,68 @@ +- id: io.acryl.privacy.retentionTime + # - urn: urn:li:structuredProperty:io.acryl.privacy.retentionTime # optional if id is provided + qualified_name: io.acryl.privacy.retentionTime # required if urn is provided + type: number + cardinality: MULTIPLE + display_name: Retention Time + entity_types: + - dataset # or urn:li:entityType:datahub.dataset + - dataFlow + description: "Retention Time is used to figure out how long to retain records in a dataset" + allowed_values: + - value: 30 + description: 30 days, usually reserved for datasets that are ephemeral and contain pii + - value: 90 + description: Use this for datasets that drive monthly reporting but contain pii + - value: 365 + description: Use this for non-sensitive data that can be retained for longer +- id: io.acryl.dataManagement.replicationSLA + type: number + display_name: Replication SLA + description: "SLA for how long data can be delayed before replicating to the destination cluster" + entity_types: + - dataset +- id: io.acryl.dataManagement.deprecationDate + type: date + display_name: Deprecation Date + entity_types: + - dataset + - dataFlow + - dataJob +- id: io.acryl.dataManagement.steward + type: urn + type_qualifier: + allowed_types: # only user and group urns are allowed + - corpuser + - corpGroup + display_name: Steward + entity_types: + - dataset + - dataFlow + - dataJob +- id: io.acryl.dataManagement.certifier + type: urn + display_name: Person Certifying the asset + entity_types: + - dataset + - schemaField +- id: io.acryl.dataManagement.team + type: string + display_name: Management team + entity_types: + - dataset +- id: projectNames + type: string + cardinality: MULTIPLE + display_name: Project names + entity_types: + - dataset + allowed_values: + - value: Tracking + description: test value 1 for project + - value: DataHub + description: test value 2 for project +- id: namespace + type: string + display_name: Namespace + entity_types: + - dataset diff --git a/metadata-ingestion/scripts/docgen.sh b/metadata-ingestion/scripts/docgen.sh index affb87f2e70a9..09fa2be912f61 100755 --- a/metadata-ingestion/scripts/docgen.sh +++ b/metadata-ingestion/scripts/docgen.sh @@ -7,4 +7,4 @@ DOCS_OUT_DIR=$DATAHUB_ROOT/docs/generated/ingestion EXTRA_DOCS_DIR=$DATAHUB_ROOT/metadata-ingestion/docs/sources rm -r $DOCS_OUT_DIR || true -python scripts/docgen.py --out-dir ${DOCS_OUT_DIR} --extra-docs ${EXTRA_DOCS_DIR} $@ +SPARK_VERSION=3.3 python scripts/docgen.py --out-dir ${DOCS_OUT_DIR} --extra-docs ${EXTRA_DOCS_DIR} $@ diff --git a/metadata-ingestion/scripts/modeldocgen.py b/metadata-ingestion/scripts/modeldocgen.py index 81b26145e620c..610c6d3107916 100644 --- a/metadata-ingestion/scripts/modeldocgen.py +++ b/metadata-ingestion/scripts/modeldocgen.py @@ -493,10 +493,32 @@ def strip_types(field_path: str) -> str: ], ) +@dataclass +class EntityAspectName: + entityName: str + aspectName: str + + +@dataclass +class AspectPluginConfig: + className: str + enabled: bool + supportedOperations: List[str] + supportedEntityAspectNames: List[EntityAspectName] + + +@dataclass +class PluginConfiguration: + aspectPayloadValidators: Optional[List[AspectPluginConfig]] = None + mutationHooks: Optional[List[AspectPluginConfig]] = None + mclSideEffects: Optional[List[AspectPluginConfig]] = None + mcpSideEffects: Optional[List[AspectPluginConfig]] = None + class EntityRegistry(ConfigModel): entities: List[EntityDefinition] events: Optional[List[EventDefinition]] + plugins: Optional[PluginConfiguration] = None def load_registry_file(registry_file: str) -> Dict[str, EntityDefinition]: diff --git a/metadata-ingestion/scripts/release.sh b/metadata-ingestion/scripts/release.sh index eacaf1d920a8d..955eb562089f7 100755 --- a/metadata-ingestion/scripts/release.sh +++ b/metadata-ingestion/scripts/release.sh @@ -11,7 +11,7 @@ fi python -c 'import setuptools; where="./src"; assert setuptools.find_packages(where) == setuptools.find_namespace_packages(where), "you seem to be missing or have extra __init__.py files"' if [[ ${RELEASE_VERSION:-} ]]; then # Replace version with RELEASE_VERSION env variable - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py else vim src/datahub/__init__.py fi diff --git a/metadata-ingestion/src/datahub/api/entities/dataset/__init__.py b/metadata-ingestion/src/datahub/api/entities/dataset/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py b/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py new file mode 100644 index 0000000000000..3b4a5fbfbb061 --- /dev/null +++ b/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py @@ -0,0 +1,466 @@ +import json +import logging +from pathlib import Path +from typing import Dict, Iterable, List, Optional, Tuple, Union + +from pydantic import BaseModel, Field, validator +from ruamel.yaml import YAML + +from datahub.api.entities.structuredproperties.structuredproperties import ( + AllowedTypes, + StructuredProperties, +) +from datahub.configuration.common import ConfigModel +from datahub.emitter.mce_builder import ( + make_data_platform_urn, + make_dataset_urn, + make_schema_field_urn, +) +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.extractor.schema_util import avro_schema_to_mce_fields +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph +from datahub.metadata.schema_classes import ( + DatasetPropertiesClass, + MetadataChangeProposalClass, + OtherSchemaClass, + SchemaFieldClass, + SchemaMetadataClass, + StructuredPropertiesClass, + StructuredPropertyValueAssignmentClass, + SubTypesClass, + UpstreamClass, +) +from datahub.specific.dataset import DatasetPatchBuilder +from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.urn import Urn + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class SchemaFieldSpecification(BaseModel): + id: Optional[str] + urn: Optional[str] + structured_properties: Optional[ + Dict[str, Union[str, float, List[Union[str, float]]]] + ] = None + type: Optional[str] + nativeDataType: Optional[str] = None + jsonPath: Union[None, str] = None + nullable: Optional[bool] = None + description: Union[None, str] = None + label: Optional[str] = None + created: Optional[dict] = None + lastModified: Optional[dict] = None + recursive: Optional[bool] = None + globalTags: Optional[dict] = None + glossaryTerms: Optional[dict] = None + isPartOfKey: Optional[bool] = None + isPartitioningKey: Optional[bool] = None + jsonProps: Optional[dict] = None + + def with_structured_properties( + self, + structured_properties: Optional[Dict[str, List[Union[str, float]]]], + ) -> "SchemaFieldSpecification": + self.structured_properties = ( + {k: v for k, v in structured_properties.items()} + if structured_properties + else None + ) + return self + + @classmethod + def from_schema_field( + cls, schema_field: SchemaFieldClass, parent_urn: str + ) -> "SchemaFieldSpecification": + return SchemaFieldSpecification( + id=Dataset._simplify_field_path(schema_field.fieldPath), + urn=make_schema_field_urn( + parent_urn, Dataset._simplify_field_path(schema_field.fieldPath) + ), + type=str(schema_field.type), + nativeDataType=schema_field.nativeDataType, + nullable=schema_field.nullable, + description=schema_field.description, + label=schema_field.label, + created=schema_field.created.__dict__ if schema_field.created else None, + lastModified=schema_field.lastModified.__dict__ + if schema_field.lastModified + else None, + recursive=schema_field.recursive, + globalTags=schema_field.globalTags.__dict__ + if schema_field.globalTags + else None, + glossaryTerms=schema_field.glossaryTerms.__dict__ + if schema_field.glossaryTerms + else None, + isPartitioningKey=schema_field.isPartitioningKey, + jsonProps=json.loads(schema_field.jsonProps) + if schema_field.jsonProps + else None, + ) + + @validator("urn", pre=True, always=True) + def either_id_or_urn_must_be_filled_out(cls, v, values): + if not v and not values.get("id"): + raise ValueError("Either id or urn must be present") + return v + + +class SchemaSpecification(BaseModel): + file: Optional[str] + fields: Optional[List[SchemaFieldSpecification]] + + @validator("file") + def file_must_be_avsc(cls, v): + if v and not v.endswith(".avsc"): + raise ValueError("file must be a .avsc file") + return v + + +class StructuredPropertyValue(ConfigModel): + value: Union[str, float, List[str], List[float]] + created: Optional[str] + lastModified: Optional[str] + + +class Dataset(BaseModel): + id: Optional[str] + platform: Optional[str] + env: str = "PROD" + urn: Optional[str] + description: Optional[str] + name: Optional[str] + schema_metadata: Optional[SchemaSpecification] = Field(alias="schema") + downstreams: Optional[List[str]] + properties: Optional[Dict[str, str]] + subtype: Optional[str] + subtypes: Optional[List[str]] + structured_properties: Optional[ + Dict[str, Union[str, float, List[Union[str, float]]]] + ] = None + + @property + def platform_urn(self) -> str: + if self.platform: + return make_data_platform_urn(self.platform) + else: + assert self.urn is not None # validator should have filled this in + dataset_urn = DatasetUrn.create_from_string(self.urn) + return str(dataset_urn.get_data_platform_urn()) + + @validator("urn", pre=True, always=True) + def urn_must_be_present(cls, v, values): + if not v: + assert "id" in values, "id must be present if urn is not" + assert "platform" in values, "platform must be present if urn is not" + assert "env" in values, "env must be present if urn is not" + return make_dataset_urn(values["platform"], values["id"], values["env"]) + return v + + @validator("name", pre=True, always=True) + def name_filled_with_id_if_not_present(cls, v, values): + if not v: + assert "id" in values, "id must be present if name is not" + return values["id"] + return v + + @validator("platform") + def platform_must_not_be_urn(cls, v): + if v.startswith("urn:li:dataPlatform:"): + return v[len("urn:li:dataPlatform:") :] + return v + + @classmethod + def from_yaml(cls, file: str) -> Iterable["Dataset"]: + with open(file) as fp: + yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip) + datasets: Union[dict, List[dict]] = yaml.load(fp) + if isinstance(datasets, dict): + datasets = [datasets] + for dataset_raw in datasets: + dataset = Dataset.parse_obj(dataset_raw) + yield dataset + + def generate_mcp( + self, + ) -> Iterable[Union[MetadataChangeProposalClass, MetadataChangeProposalWrapper]]: + mcp = MetadataChangeProposalWrapper( + entityUrn=self.urn, + aspect=DatasetPropertiesClass( + description=self.description, + name=self.name, + customProperties=self.properties, + ), + ) + yield mcp + + if self.schema_metadata: + if self.schema_metadata.file: + with open(self.schema_metadata.file, "r") as schema_fp: + schema_string = schema_fp.read() + schema_metadata = SchemaMetadataClass( + schemaName=self.name or self.id or self.urn or "", + platform=self.platform_urn, + version=0, + hash="", + platformSchema=OtherSchemaClass(rawSchema=schema_string), + fields=avro_schema_to_mce_fields(schema_string), + ) + mcp = MetadataChangeProposalWrapper( + entityUrn=self.urn, aspect=schema_metadata + ) + yield mcp + + if self.schema_metadata.fields: + for field in self.schema_metadata.fields: + field_urn = field.urn or make_schema_field_urn( + self.urn, field.id # type: ignore[arg-type] + ) + assert field_urn.startswith("urn:li:schemaField:") + if field.structured_properties: + # field_properties_flattened = ( + # Dataset.extract_structured_properties( + # field.structured_properties + # ) + # ) + mcp = MetadataChangeProposalWrapper( + entityUrn=field_urn, + aspect=StructuredPropertiesClass( + properties=[ + StructuredPropertyValueAssignmentClass( + propertyUrn=f"urn:li:structuredProperty:{prop_key}", + values=prop_value + if isinstance(prop_value, list) + else [prop_value], + ) + for prop_key, prop_value in field.structured_properties.items() + ] + ), + ) + yield mcp + + if self.subtype or self.subtypes: + mcp = MetadataChangeProposalWrapper( + entityUrn=self.urn, + aspect=SubTypesClass( + typeNames=[ + s + for s in [self.subtype] + (self.subtypes or []) + if s + ] + ), + ) + yield mcp + + if self.structured_properties: + # structured_properties_flattened = ( + # Dataset.extract_structured_properties( + # self.structured_properties + # ) + # ) + mcp = MetadataChangeProposalWrapper( + entityUrn=self.urn, + aspect=StructuredPropertiesClass( + properties=[ + StructuredPropertyValueAssignmentClass( + propertyUrn=f"urn:li:structuredProperty:{prop_key}", + values=prop_value + if isinstance(prop_value, list) + else [prop_value], + ) + for prop_key, prop_value in self.structured_properties.items() + ] + ), + ) + yield mcp + + if self.downstreams: + for downstream in self.downstreams: + patch_builder = DatasetPatchBuilder(downstream) + assert ( + self.urn is not None + ) # validator should have filled this in + patch_builder.add_upstream_lineage( + UpstreamClass( + dataset=self.urn, + type="COPY", + ) + ) + for patch_event in patch_builder.build(): + yield patch_event + + logger.info(f"Created dataset {self.urn}") + + @staticmethod + def extract_structured_properties( + structured_properties: Dict[str, Union[str, float, List[str], List[float]]] + ) -> List[Tuple[str, Union[str, float]]]: + structured_properties_flattened: List[Tuple[str, Union[str, float]]] = [] + for key, value in structured_properties.items(): + validated_structured_property = Dataset.validate_structured_property( + key, value + ) + if validated_structured_property: + structured_properties_flattened.append(validated_structured_property) + structured_properties_flattened = sorted( + structured_properties_flattened, key=lambda x: x[0] + ) + return structured_properties_flattened + + @staticmethod + def validate_structured_property( + sp_name: str, sp_value: Union[str, float, List[str], List[float]] + ) -> Union[Tuple[str, Union[str, float]], None]: + """ + Validate based on: + 1. Structured property exists/has been created + 2. Structured property value is of the expected type + """ + urn = Urn.make_structured_property_urn(sp_name) + with get_default_graph() as graph: + if graph.exists(urn): + validated_structured_property = StructuredProperties.from_datahub( + graph, urn + ) + allowed_type = Urn.get_data_type_from_urn( + validated_structured_property.type + ) + try: + if not isinstance(sp_value, list): + return Dataset.validate_type(sp_name, sp_value, allowed_type) + else: + for v in sp_value: + return Dataset.validate_type(sp_name, v, allowed_type) + except ValueError: + logger.warning( + f"Property: {sp_name}, value: {sp_value} should be a {allowed_type}." + ) + else: + logger.error( + f"Property {sp_name} does not exist and therefore will not be added to dataset. Please create property before trying again." + ) + return None + + @staticmethod + def validate_type( + sp_name: str, sp_value: Union[str, float], allowed_type: str + ) -> Tuple[str, Union[str, float]]: + if allowed_type == AllowedTypes.NUMBER.value: + return (sp_name, float(sp_value)) + else: + return (sp_name, sp_value) + + @staticmethod + def _simplify_field_path(field_path: str) -> str: + if field_path.startswith("[version=2.0]"): + # v2 field path + field_components = [] + current_field = "" + for c in field_path: + if c == "[": + if current_field: + field_components.append(current_field) + current_field = "" + omit_next = True + elif c == "]": + omit_next = False + elif c == ".": + pass + elif not omit_next: + current_field += c + if current_field: + field_components.append(current_field) + return ".".join(field_components) + else: + return field_path + + @staticmethod + def _schema_from_schema_metadata( + graph: DataHubGraph, urn: str + ) -> Optional[SchemaSpecification]: + schema_metadata: Optional[SchemaMetadataClass] = graph.get_aspect( + urn, SchemaMetadataClass + ) + + if schema_metadata: + schema_specification = SchemaSpecification( + fields=[ + SchemaFieldSpecification.from_schema_field( + field, urn + ).with_structured_properties( + { + sp.propertyUrn: sp.values + for sp in structured_props.properties + } + if structured_props + else None + ) + for field, structured_props in [ + ( + field, + graph.get_aspect( + make_schema_field_urn(urn, field.fieldPath), + StructuredPropertiesClass, + ) + or graph.get_aspect( + make_schema_field_urn( + urn, Dataset._simplify_field_path(field.fieldPath) + ), + StructuredPropertiesClass, + ), + ) + for field in schema_metadata.fields + ] + ] + ) + return schema_specification + else: + return None + + @classmethod + def from_datahub(cls, graph: DataHubGraph, urn: str) -> "Dataset": + dataset_properties: Optional[DatasetPropertiesClass] = graph.get_aspect( + urn, DatasetPropertiesClass + ) + subtypes: Optional[SubTypesClass] = graph.get_aspect(urn, SubTypesClass) + structured_properties: Optional[StructuredPropertiesClass] = graph.get_aspect( + urn, StructuredPropertiesClass + ) + if structured_properties: + structured_properties_map: Dict[str, List[Union[str, float]]] = {} + for sp in structured_properties.properties: + if sp.propertyUrn in structured_properties_map: + assert isinstance(structured_properties_map[sp.propertyUrn], list) + structured_properties_map[sp.propertyUrn].extend(sp.values) # type: ignore[arg-type,union-attr] + else: + structured_properties_map[sp.propertyUrn] = sp.values + + return Dataset( # type: ignore[call-arg] + urn=urn, + description=dataset_properties.description + if dataset_properties and dataset_properties.description + else None, + name=dataset_properties.name + if dataset_properties and dataset_properties.name + else None, + schema=Dataset._schema_from_schema_metadata(graph, urn), + properties=dataset_properties.customProperties + if dataset_properties + else None, + subtypes=[subtype for subtype in subtypes.typeNames] if subtypes else None, + structured_properties=structured_properties_map + if structured_properties + else None, + ) + + def to_yaml( + self, + file: Path, + ) -> None: + with open(file, "w") as fp: + yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip) + yaml.indent(mapping=2, sequence=4, offset=2) + yaml.default_flow_style = False + yaml.dump(self.dict(exclude_none=True, exclude_unset=True), fp) diff --git a/metadata-ingestion/src/datahub/api/entities/forms/__init__.py b/metadata-ingestion/src/datahub/api/entities/forms/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/api/entities/forms/forms.py b/metadata-ingestion/src/datahub/api/entities/forms/forms.py new file mode 100644 index 0000000000000..cc43779bda409 --- /dev/null +++ b/metadata-ingestion/src/datahub/api/entities/forms/forms.py @@ -0,0 +1,353 @@ +import logging +import uuid +from enum import Enum +from pathlib import Path +from typing import List, Optional, Union + +import yaml +from pydantic import validator +from ruamel.yaml import YAML +from typing_extensions import Literal + +from datahub.api.entities.forms.forms_graphql_constants import ( + CREATE_DYNAMIC_FORM_ASSIGNMENT, + FIELD_FILTER_TEMPLATE, + UPLOAD_ENTITIES_FOR_FORMS, +) +from datahub.configuration.common import ConfigModel +from datahub.emitter.mce_builder import ( + make_data_platform_urn, + make_group_urn, + make_user_urn, +) +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph +from datahub.metadata.schema_classes import ( + FormInfoClass, + FormPromptClass, + OwnerClass, + OwnershipClass, + OwnershipTypeClass, + StructuredPropertyParamsClass, +) +from datahub.utilities.urns.urn import Urn + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class PromptType(Enum): + STRUCTURED_PROPERTY = "STRUCTURED_PROPERTY" + FIELDS_STRUCTURED_PROPERTY = "FIELDS_STRUCTURED_PROPERTY" + + @classmethod + def has_value(cls, value): + return value in cls._value2member_map_ + + +class Prompt(ConfigModel): + id: Optional[str] + title: str + description: Optional[str] + type: str + structured_property_id: Optional[str] + structured_property_urn: Optional[str] + required: Optional[bool] + + @validator("structured_property_urn", pre=True, always=True) + def structured_property_urn_must_be_present(cls, v, values): + if not v and values.get("structured_property_id"): + return Urn.make_structured_property_urn(values["structured_property_id"]) + return v + + +class FormType(Enum): + VERIFICATION = "VERIFICATION" + DOCUMENTATION = "COMPLETION" + + @classmethod + def has_value(cls, value): + return value in cls._value2member_map_ + + +class Filters(ConfigModel): + types: Optional[List[str]] + platforms: Optional[List[str]] + domains: Optional[List[str]] + containers: Optional[List[str]] + + +class Entities(ConfigModel): + urns: Optional[List[str]] + filters: Optional[Filters] + + +class Forms(ConfigModel): + id: Optional[str] + urn: Optional[str] + name: str + description: Optional[str] + prompts: List[Prompt] = [] + type: Optional[str] + version: Optional[Literal[1]] + entities: Optional[Entities] + owners: Optional[List[str]] # can be user IDs or urns + group_owners: Optional[List[str]] # can be group IDs or urns + + @validator("urn", pre=True, always=True) + def urn_must_be_present(cls, v, values): + if not v: + assert values.get("id") is not None, "Form id must be present if urn is not" + return f"urn:li:form:{values['id']}" + return v + + @staticmethod + def create(file: str) -> None: + emitter: DataHubGraph + + with get_default_graph() as emitter: + with open(file, "r") as fp: + forms: List[dict] = yaml.safe_load(fp) + for form_raw in forms: + form = Forms.parse_obj(form_raw) + + try: + if not FormType.has_value(form.type): + logger.error( + f"Form type {form.type} does not exist. Please try again with a valid type." + ) + + mcp = MetadataChangeProposalWrapper( + entityUrn=form.urn, + aspect=FormInfoClass( + name=form.name, + description=form.description, + prompts=form.validate_prompts(emitter), + type=form.type, + ), + ) + emitter.emit_mcp(mcp) + + logger.info(f"Created form {form.urn}") + + if form.owners or form.group_owners: + form.add_owners(emitter) + + if form.entities: + if form.entities.urns: + # Associate specific entities with a form + form.upload_entities_for_form(emitter) + + if form.entities.filters: + # Associate groups of entities with a form based on filters + form.create_form_filters(emitter) + + except Exception as e: + logger.error(e) + return + + def validate_prompts(self, emitter: DataHubGraph) -> List[FormPromptClass]: + prompts = [] + if self.prompts: + for prompt in self.prompts: + if not prompt.id: + prompt.id = str(uuid.uuid4()) + logger.warning( + f"Prompt id not provided. Setting prompt id to {prompt.id}" + ) + if prompt.structured_property_urn: + structured_property_urn = prompt.structured_property_urn + if emitter.exists(structured_property_urn): + prompt.structured_property_urn = structured_property_urn + else: + raise Exception( + f"Structured property {structured_property_urn} does not exist. Unable to create form." + ) + elif ( + prompt.type + in ( + PromptType.STRUCTURED_PROPERTY.value, + PromptType.FIELDS_STRUCTURED_PROPERTY.value, + ) + and not prompt.structured_property_urn + ): + raise Exception( + f"Prompt type is {prompt.type} but no structured properties exist. Unable to create form." + ) + + prompts.append( + FormPromptClass( + id=prompt.id, + title=prompt.title, + description=prompt.description, + type=prompt.type, + structuredPropertyParams=StructuredPropertyParamsClass( + urn=prompt.structured_property_urn + ) + if prompt.structured_property_urn + else None, + required=prompt.required, + ) + ) + else: + logger.warning(f"No prompts exist on form {self.urn}. Is that intended?") + + return prompts + + def upload_entities_for_form(self, emitter: DataHubGraph) -> Union[None, Exception]: + if self.entities and self.entities.urns: + formatted_entity_urns = ", ".join( + ['"{}"'.format(value) for value in self.entities.urns] + ) + query = UPLOAD_ENTITIES_FOR_FORMS.format( + form_urn=self.urn, entity_urns=formatted_entity_urns + ) + result = emitter.execute_graphql(query=query) + if not result: + return Exception(f"Could not bulk upload entities for form {self.urn}.") + + return None + + def create_form_filters(self, emitter: DataHubGraph) -> Union[None, Exception]: + filters_raw = [] + # Loop through each entity and assign a filter for it + if self.entities and self.entities.filters: + filters = self.entities.filters + if filters.types: + filters_raw.append( + Forms.format_form_filter("_entityType", filters.types) + ) + if filters.platforms: + urns = [ + make_data_platform_urn(platform) for platform in filters.platforms + ] + filters_raw.append(Forms.format_form_filter("platform", urns)) + if filters.domains: + urns = [] + for domain in filters.domains: + domain_urn = Forms.validate_domain_urn(domain) + if domain_urn: + urns.append(domain_urn) + filters_raw.append(Forms.format_form_filter("domains", urns)) + if filters.containers: + urns = [] + for container in filters.containers: + container_urn = Forms.validate_container_urn(container) + if container_urn: + urns.append(container_urn) + filters_raw.append(Forms.format_form_filter("container", urns)) + + filters_str = ", ".join(item for item in filters_raw) + result = emitter.execute_graphql( + query=CREATE_DYNAMIC_FORM_ASSIGNMENT.format( + form_urn=self.urn, filters=filters_str + ) + ) + if not result: + return Exception( + f"Could not bulk upload urns or filters for form {self.urn}." + ) + + return None + + def add_owners(self, emitter: DataHubGraph) -> Union[None, Exception]: + owner_urns: List[str] = [] + if self.owners: + owner_urns += Forms.format_owners(self.owners) + if self.group_owners: + owner_urns += Forms.format_group_owners(self.group_owners) + + ownership = OwnershipClass( + owners=[ + OwnerClass(owner=urn, type=OwnershipTypeClass.TECHNICAL_OWNER) + for urn in (owner_urns or []) + ], + ) + + try: + mcp = MetadataChangeProposalWrapper(entityUrn=self.urn, aspect=ownership) + emitter.emit_mcp(mcp) + except Exception as e: + logger.error(e) + + return None + + @staticmethod + def format_form_filter(field: str, urns: List[str]) -> str: + formatted_urns = ", ".join(['"{}"'.format(urn) for urn in urns]) + return FIELD_FILTER_TEMPLATE.format(field=field, values=formatted_urns) + + @staticmethod + def validate_domain_urn(domain: str) -> Union[str, None]: + if domain.startswith("urn:li:domain:"): + return domain + + logger.warning(f"{domain} is not an urn. Unable to create domain filter.") + return None + + @staticmethod + def validate_container_urn(container: str) -> Union[str, None]: + if container.startswith("urn:li:container:"): + return container + + logger.warning(f"{container} is not an urn. Unable to create container filter.") + return None + + @staticmethod + def from_datahub(graph: DataHubGraph, urn: str) -> "Forms": + form: Optional[FormInfoClass] = graph.get_aspect(urn, FormInfoClass) + assert form is not None + prompts = [] + for prompt_raw in form.prompts: + prompts.append( + Prompt( + id=prompt_raw.id, + title=prompt_raw.title, + description=prompt_raw.description, + type=prompt_raw.type, + structured_property_urn=prompt_raw.structuredPropertyParams.urn + if prompt_raw.structuredPropertyParams + else None, + ) + ) + return Forms( + urn=urn, + name=form.name, + description=form.description, + prompts=prompts, + type=form.type, + ) + + @staticmethod + def format_owners(owners: List[str]) -> List[str]: + formatted_owners: List[str] = [] + + for owner in owners: + if owner.startswith("urn:li:"): + formatted_owners.append(owner) + else: + formatted_owners.append(make_user_urn(owner)) + + return formatted_owners + + @staticmethod + def format_group_owners(owners: List[str]) -> List[str]: + formatted_owners: List[str] = [] + + for owner in owners: + if owner.startswith("urn:li:"): + formatted_owners.append(owner) + else: + formatted_owners.append(make_group_urn(owner)) + + return formatted_owners + + def to_yaml( + self, + file: Path, + ) -> None: + with open(file, "w") as fp: + yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip) + yaml.indent(mapping=2, sequence=4, offset=2) + yaml.default_flow_style = False + yaml.dump(self.dict(), fp) diff --git a/metadata-ingestion/src/datahub/api/entities/forms/forms_graphql_constants.py b/metadata-ingestion/src/datahub/api/entities/forms/forms_graphql_constants.py new file mode 100644 index 0000000000000..c227d8fc05366 --- /dev/null +++ b/metadata-ingestion/src/datahub/api/entities/forms/forms_graphql_constants.py @@ -0,0 +1,27 @@ +UPLOAD_ENTITIES_FOR_FORMS = """ +mutation batchAssignForm {{ + batchAssignForm( + input: {{ + formUrn: "{form_urn}", + entityUrns: [{entity_urns}] + }} + ) +}} +""" + +FIELD_FILTER_TEMPLATE = ( + """{{ field: "{field}", values: [{values}], condition: EQUAL, negated: false }}""" +) + +CREATE_DYNAMIC_FORM_ASSIGNMENT = """ +mutation createDynamicFormAssignment {{ + createDynamicFormAssignment( + input: {{ + formUrn: "{form_urn}" + orFilters: [{{ + and: [{filters}] + }}] + }} + ) +}} +""" diff --git a/metadata-ingestion/src/datahub/api/entities/structuredproperties/__init__.py b/metadata-ingestion/src/datahub/api/entities/structuredproperties/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/api/entities/structuredproperties/structuredproperties.py b/metadata-ingestion/src/datahub/api/entities/structuredproperties/structuredproperties.py new file mode 100644 index 0000000000000..af9bf3dccac5c --- /dev/null +++ b/metadata-ingestion/src/datahub/api/entities/structuredproperties/structuredproperties.py @@ -0,0 +1,185 @@ +import logging +from enum import Enum +from pathlib import Path +from typing import List, Optional + +import yaml +from pydantic import validator +from ruamel.yaml import YAML + +from datahub.configuration.common import ConfigModel +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph +from datahub.metadata.schema_classes import ( + PropertyValueClass, + StructuredPropertyDefinitionClass, +) +from datahub.utilities.urns.urn import Urn + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class AllowedTypes(Enum): + STRING = "string" + RICH_TEXT = "rich_text" + NUMBER = "number" + DATE = "date" + URN = "urn" + + @staticmethod + def check_allowed_type(value: str) -> bool: + return value in [allowed_type.value for allowed_type in AllowedTypes] + + @staticmethod + def values(): + return ", ".join([allowed_type.value for allowed_type in AllowedTypes]) + + +class AllowedValue(ConfigModel): + value: str + description: Optional[str] + + +class TypeQualifierAllowedTypes(ConfigModel): + allowed_types: List[str] + + @validator("allowed_types") + def validate_allowed_types(cls, v): + validated_entity_type_urns = [] + if v: + with get_default_graph() as graph: + for et in v: + validated_urn = Urn.make_entity_type_urn(et) + if graph.exists(validated_urn): + validated_entity_type_urns.append(validated_urn) + else: + logger.warn( + f"Input {et} is not a valid entity type urn. Skipping." + ) + v = validated_entity_type_urns + if not v: + logger.warn("No allowed_types given within type_qualifier.") + return v + + +class StructuredProperties(ConfigModel): + id: Optional[str] + urn: Optional[str] + qualified_name: Optional[str] + type: str + value_entity_types: Optional[List[str]] + description: Optional[str] + display_name: Optional[str] + entity_types: Optional[List[str]] + cardinality: Optional[str] + allowed_values: Optional[List[AllowedValue]] + type_qualifier: Optional[TypeQualifierAllowedTypes] + + @property + def fqn(self) -> str: + assert self.urn is not None + return ( + self.qualified_name + or self.id + or Urn.create_from_string(self.urn).get_entity_id()[0] + ) + + @validator("urn", pre=True, always=True) + def urn_must_be_present(cls, v, values): + if not v: + assert "id" in values, "id must be present if urn is not" + return f"urn:li:structuredProperty:{values['id']}" + return v + + @staticmethod + def create(file: str) -> None: + emitter: DataHubGraph + + with get_default_graph() as emitter: + with open(file, "r") as fp: + structuredproperties: List[dict] = yaml.safe_load(fp) + for structuredproperty_raw in structuredproperties: + structuredproperty = StructuredProperties.parse_obj( + structuredproperty_raw + ) + if not structuredproperty.type.islower(): + structuredproperty.type = structuredproperty.type.lower() + logger.warn( + f"Structured property type should be lowercase. Updated to {structuredproperty.type}" + ) + if not AllowedTypes.check_allowed_type(structuredproperty.type): + raise ValueError( + f"Type {structuredproperty.type} is not allowed. Allowed types are {AllowedTypes.values()}" + ) + mcp = MetadataChangeProposalWrapper( + entityUrn=structuredproperty.urn, + aspect=StructuredPropertyDefinitionClass( + qualifiedName=structuredproperty.fqn, + valueType=Urn.make_data_type_urn(structuredproperty.type), + displayName=structuredproperty.display_name, + description=structuredproperty.description, + entityTypes=[ + Urn.make_entity_type_urn(entity_type) + for entity_type in structuredproperty.entity_types or [] + ], + cardinality=structuredproperty.cardinality, + allowedValues=[ + PropertyValueClass( + value=v.value, description=v.description + ) + for v in structuredproperty.allowed_values + ] + if structuredproperty.allowed_values + else None, + typeQualifier={ + "allowedTypes": structuredproperty.type_qualifier.allowed_types + } + if structuredproperty.type_qualifier + else None, + ), + ) + emitter.emit_mcp(mcp) + + logger.info(f"Created structured property {structuredproperty.urn}") + + @classmethod + def from_datahub(cls, graph: DataHubGraph, urn: str) -> "StructuredProperties": + + structured_property: Optional[ + StructuredPropertyDefinitionClass + ] = graph.get_aspect(urn, StructuredPropertyDefinitionClass) + assert structured_property is not None + return StructuredProperties( + urn=urn, + qualified_name=structured_property.qualifiedName, + display_name=structured_property.displayName, + type=structured_property.valueType, + description=structured_property.description, + entity_types=structured_property.entityTypes, + cardinality=structured_property.cardinality, + allowed_values=[ + AllowedValue( + value=av.value, + description=av.description, + ) + for av in structured_property.allowedValues or [] + ] + if structured_property.allowedValues is not None + else None, + type_qualifier={ + "allowed_types": structured_property.typeQualifier.get("allowedTypes") + } + if structured_property.typeQualifier + else None, + ) + + def to_yaml( + self, + file: Path, + ) -> None: + with open(file, "w") as fp: + yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip) + yaml.indent(mapping=2, sequence=4, offset=2) + yaml.default_flow_style = False + yaml.dump(self.dict(), fp) diff --git a/metadata-ingestion/src/datahub/cli/docker_check.py b/metadata-ingestion/src/datahub/cli/docker_check.py index 97b88cbc8b8eb..47b89af6dfd04 100644 --- a/metadata-ingestion/src/datahub/cli/docker_check.py +++ b/metadata-ingestion/src/datahub/cli/docker_check.py @@ -193,6 +193,11 @@ def check_docker_quickstart() -> QuickstartStatus: .labels.get("com.docker.compose.project.config_files") .split(",") ) + + # If using profiles, alternative check + if config_files and "/profiles/" in config_files[0]: + return check_docker_quickstart_profiles(client) + all_containers = set() for config_file in config_files: with open(config_file, "r") as config_file: @@ -234,3 +239,35 @@ def check_docker_quickstart() -> QuickstartStatus: ) return QuickstartStatus(container_statuses) + + +def check_docker_quickstart_profiles(client: docker.DockerClient) -> QuickstartStatus: + container_statuses: List[DockerContainerStatus] = [] + containers = client.containers.list( + all=True, + filters={"label": "io.datahubproject.datahub.component=gms"}, + # We can get race conditions between docker running up / recreating + # containers and our status checks. + ignore_removed=True, + ) + if len(containers) == 0: + return QuickstartStatus([]) + + existing_containers = set() + # Check that the containers are running and healthy. + container: docker.models.containers.Container + for container in containers: + name = container.labels.get("com.docker.compose.service", container.name) + existing_containers.add(name) + status = ContainerStatus.OK + if container.status != "running": + status = ContainerStatus.DIED + elif "Health" in container.attrs["State"]: + if container.attrs["State"]["Health"]["Status"] == "starting": + status = ContainerStatus.STARTING + elif container.attrs["State"]["Health"]["Status"] != "healthy": + status = ContainerStatus.UNHEALTHY + + container_statuses.append(DockerContainerStatus(name, status)) + + return QuickstartStatus(container_statuses) diff --git a/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py b/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py index 5d6c65512354a..a52a9dddff127 100644 --- a/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py +++ b/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py @@ -56,7 +56,6 @@ def _abort_if_non_existent_urn(graph: DataHubGraph, urn: str, operation: str) -> def _print_diff(orig_file, new_file): - with open(orig_file) as fp: orig_lines = fp.readlines() with open(new_file) as fp: @@ -388,7 +387,7 @@ def add_asset(urn: str, asset: str, validate_assets: bool) -> None: graph.emit(mcp) -@dataproduct.command(name="remove_asset", help="Add an asset to a Data Product") +@dataproduct.command(name="remove_asset", help="Remove an asset from a Data Product") @click.option("--urn", required=True, type=str) @click.option("--asset", required=True, type=str) @click.option( diff --git a/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py b/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py new file mode 100644 index 0000000000000..c702d0ec28961 --- /dev/null +++ b/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py @@ -0,0 +1,67 @@ +import json +import logging +from pathlib import Path + +import click +from click_default_group import DefaultGroup + +from datahub.api.entities.dataset.dataset import Dataset +from datahub.ingestion.graph.client import get_default_graph +from datahub.telemetry import telemetry +from datahub.upgrade import upgrade + +logger = logging.getLogger(__name__) + + +@click.group(cls=DefaultGroup, default="upsert") +def dataset() -> None: + """A group of commands to interact with the Dataset entity in DataHub.""" + pass + + +@dataset.command( + name="upsert", +) +@click.option("-f", "--file", required=True, type=click.Path(exists=True)) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def upsert(file: Path) -> None: + """Upsert attributes to a Dataset in DataHub.""" + + with get_default_graph() as graph: + for dataset in Dataset.from_yaml(str(file)): + try: + for mcp in dataset.generate_mcp(): + graph.emit(mcp) + click.secho(f"Update succeeded for urn {dataset.urn}.", fg="green") + except Exception as e: + click.secho( + f"Update failed for id {id}. due to {e}", + fg="red", + ) + + +@dataset.command( + name="get", +) +@click.option("--urn", required=True, type=str) +@click.option("--to-file", required=False, type=str) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def get(urn: str, to_file: str) -> None: + """Get a Dataset from DataHub""" + + if not urn.startswith("urn:li:dataset:"): + urn = f"urn:li:dataset:{urn}" + + with get_default_graph() as graph: + if graph.exists(urn): + dataset: Dataset = Dataset.from_datahub(graph=graph, urn=urn) + click.secho( + f"{json.dumps(dataset.dict(exclude_unset=True, exclude_none=True), indent=2)}" + ) + if to_file: + dataset.to_yaml(Path(to_file)) + click.secho(f"Dataset yaml written to {to_file}", fg="green") + else: + click.secho(f"Dataset {urn} does not exist") diff --git a/metadata-ingestion/src/datahub/cli/specific/forms_cli.py b/metadata-ingestion/src/datahub/cli/specific/forms_cli.py new file mode 100644 index 0000000000000..a494396909b32 --- /dev/null +++ b/metadata-ingestion/src/datahub/cli/specific/forms_cli.py @@ -0,0 +1,53 @@ +import json +import logging +from pathlib import Path + +import click +from click_default_group import DefaultGroup + +from datahub.api.entities.forms.forms import Forms +from datahub.ingestion.graph.client import get_default_graph +from datahub.telemetry import telemetry +from datahub.upgrade import upgrade + +logger = logging.getLogger(__name__) + + +@click.group(cls=DefaultGroup, default="upsert") +def forms() -> None: + """A group of commands to interact with forms in DataHub.""" + pass + + +@forms.command( + name="upsert", +) +@click.option("-f", "--file", required=True, type=click.Path(exists=True)) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def upsert(file: Path) -> None: + """Upsert forms in DataHub.""" + + Forms.create(str(file)) + + +@forms.command( + name="get", +) +@click.option("--urn", required=True, type=str) +@click.option("--to-file", required=False, type=str) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def get(urn: str, to_file: str) -> None: + """Get form from DataHub""" + with get_default_graph() as graph: + if graph.exists(urn): + form: Forms = Forms.from_datahub(graph=graph, urn=urn) + click.secho( + f"{json.dumps(form.dict(exclude_unset=True, exclude_none=True), indent=2)}" + ) + if to_file: + form.to_yaml(Path(to_file)) + click.secho(f"Form yaml written to {to_file}", fg="green") + else: + click.secho(f"Form {urn} does not exist") diff --git a/metadata-ingestion/src/datahub/cli/specific/structuredproperties_cli.py b/metadata-ingestion/src/datahub/cli/specific/structuredproperties_cli.py new file mode 100644 index 0000000000000..4162d44b9b0ea --- /dev/null +++ b/metadata-ingestion/src/datahub/cli/specific/structuredproperties_cli.py @@ -0,0 +1,62 @@ +import json +import logging +from pathlib import Path + +import click +from click_default_group import DefaultGroup + +from datahub.api.entities.structuredproperties.structuredproperties import ( + StructuredProperties, +) +from datahub.ingestion.graph.client import get_default_graph +from datahub.telemetry import telemetry +from datahub.upgrade import upgrade +from datahub.utilities.urns.urn import Urn + +logger = logging.getLogger(__name__) + + +@click.group(cls=DefaultGroup, default="upsert") +def properties() -> None: + """A group of commands to interact with structured properties in DataHub.""" + pass + + +@properties.command( + name="upsert", +) +@click.option("-f", "--file", required=True, type=click.Path(exists=True)) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def upsert(file: Path) -> None: + """Upsert structured properties in DataHub.""" + + StructuredProperties.create(str(file)) + + +@properties.command( + name="get", +) +@click.option("--urn", required=True, type=str) +@click.option("--to-file", required=False, type=str) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def get(urn: str, to_file: str) -> None: + """Get structured properties from DataHub""" + urn = Urn.make_structured_property_urn(urn) + + with get_default_graph() as graph: + if graph.exists(urn): + structuredproperties: StructuredProperties = ( + StructuredProperties.from_datahub(graph=graph, urn=urn) + ) + click.secho( + f"{json.dumps(structuredproperties.dict(exclude_unset=True, exclude_none=True), indent=2)}" + ) + if to_file: + structuredproperties.to_yaml(Path(to_file)) + click.secho( + f"Structured property yaml written to {to_file}", fg="green" + ) + else: + click.secho(f"Structured property {urn} does not exist") diff --git a/metadata-ingestion/src/datahub/entrypoints.py b/metadata-ingestion/src/datahub/entrypoints.py index 0cd37cc939854..4989f984badcc 100644 --- a/metadata-ingestion/src/datahub/entrypoints.py +++ b/metadata-ingestion/src/datahub/entrypoints.py @@ -23,7 +23,10 @@ from datahub.cli.put_cli import put from datahub.cli.specific.datacontract_cli import datacontract from datahub.cli.specific.dataproduct_cli import dataproduct +from datahub.cli.specific.dataset_cli import dataset +from datahub.cli.specific.forms_cli import forms from datahub.cli.specific.group_cli import group +from datahub.cli.specific.structuredproperties_cli import properties from datahub.cli.specific.user_cli import user from datahub.cli.state_cli import state from datahub.cli.telemetry import telemetry as telemetry_cli @@ -144,6 +147,9 @@ def init() -> None: datahub.add_command(user) datahub.add_command(group) datahub.add_command(dataproduct) +datahub.add_command(dataset) +datahub.add_command(properties) +datahub.add_command(forms) datahub.add_command(datacontract) try: diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index 6baa70aa581d6..675c87b13313d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -71,6 +71,7 @@ class GlossaryNodeConfig(ConfigModel): terms: Optional[List["GlossaryTermConfig"]] nodes: Optional[List["GlossaryNodeConfig"]] knowledge_links: Optional[List[KnowledgeCard]] + custom_properties: Optional[Dict[str, str]] # Private fields. _urn: str @@ -252,6 +253,7 @@ def get_mces_from_node( definition=glossaryNode.description, parentNode=parentNode, name=glossaryNode.name, + customProperties=glossaryNode.custom_properties, ) node_owners = parentOwners if glossaryNode.owners is not None: diff --git a/metadata-ingestion/src/datahub/specific/dataset.py b/metadata-ingestion/src/datahub/specific/dataset.py index 62ee4fc57b61b..d3c3de36198e3 100644 --- a/metadata-ingestion/src/datahub/specific/dataset.py +++ b/metadata-ingestion/src/datahub/specific/dataset.py @@ -23,6 +23,7 @@ ) from datahub.specific.custom_properties import CustomPropertiesPatchHelper from datahub.specific.ownership import OwnershipPatchHelper +from datahub.specific.structured_properties import StructuredPropertiesPatchHelper from datahub.utilities.urns.tag_urn import TagUrn from datahub.utilities.urns.urn import Urn @@ -103,6 +104,7 @@ def __init__( self, DatasetProperties.ASPECT_NAME ) self.ownership_patch_helper = OwnershipPatchHelper(self) + self.structured_properties_patch_helper = StructuredPropertiesPatchHelper(self) def add_owner(self, owner: Owner) -> "DatasetPatchBuilder": self.ownership_patch_helper.add_owner(owner) @@ -331,3 +333,33 @@ def set_display_name(self, display_name: str) -> "DatasetPatchBuilder": value=display_name, ) return self + + def set_structured_property( + self, property_name: str, value: Union[str, float, List[Union[str, float]]] + ) -> "DatasetPatchBuilder": + """ + This is a helper method to set a structured property. + @param property_name: the name of the property (either bare or urn form) + @param value: the value of the property (for multi-valued properties, this can be a list) + """ + self.structured_properties_patch_helper.set_property(property_name, value) + return self + + def add_structured_property( + self, property_name: str, value: Union[str, float] + ) -> "DatasetPatchBuilder": + """ + This is a helper method to add a structured property. + @param property_name: the name of the property (either bare or urn form) + @param value: the value of the property (for multi-valued properties, this value will be appended to the list) + """ + self.structured_properties_patch_helper.add_property(property_name, value) + return self + + def remove_structured_property(self, property_name: str) -> "DatasetPatchBuilder": + """ + This is a helper method to remove a structured property. + @param property_name: the name of the property (either bare or urn form) + """ + self.structured_properties_patch_helper.remove_property(property_name) + return self diff --git a/metadata-ingestion/src/datahub/specific/structured_properties.py b/metadata-ingestion/src/datahub/specific/structured_properties.py new file mode 100644 index 0000000000000..6b2592bf1cbba --- /dev/null +++ b/metadata-ingestion/src/datahub/specific/structured_properties.py @@ -0,0 +1,53 @@ +from typing import Generic, List, TypeVar, Union + +from datahub.emitter.mcp_patch_builder import MetadataPatchProposal +from datahub.metadata.schema_classes import StructuredPropertyValueAssignmentClass +from datahub.utilities.urns.structured_properties_urn import ( + make_structured_property_urn, +) + +T = TypeVar("T", bound=MetadataPatchProposal) + + +class StructuredPropertiesPatchHelper(Generic[T]): + def __init__( + self, + parent: T, + aspect_name: str = "structuredProperties", + ) -> None: + self.aspect_name = aspect_name + self._parent = parent + self.aspect_field = "properties" + + def parent(self) -> T: + return self._parent + + def set_property( + self, key: str, value: Union[str, float, List[Union[str, float]]] + ) -> "StructuredPropertiesPatchHelper": + self.remove_property(key) + self.add_property(key, value) + return self + + def remove_property(self, key: str) -> "StructuredPropertiesPatchHelper": + self._parent._add_patch( + self.aspect_name, + "remove", + path=f"/{self.aspect_field}/{make_structured_property_urn(key)}", + value={}, + ) + return self + + def add_property( + self, key: str, value: Union[str, float, List[Union[str, float]]] + ) -> "StructuredPropertiesPatchHelper": + self._parent._add_patch( + self.aspect_name, + "add", + path=f"/{self.aspect_field}/{make_structured_property_urn(key)}", + value=StructuredPropertyValueAssignmentClass( + propertyUrn=make_structured_property_urn(key), + values=value if isinstance(value, list) else [value], + ), + ) + return self diff --git a/metadata-ingestion/src/datahub/utilities/urn_encoder.py b/metadata-ingestion/src/datahub/utilities/urn_encoder.py index 093c9ade8c152..b39dd04370682 100644 --- a/metadata-ingestion/src/datahub/utilities/urn_encoder.py +++ b/metadata-ingestion/src/datahub/utilities/urn_encoder.py @@ -3,6 +3,7 @@ # NOTE: Frontend relies on encoding these three characters. Specifically, we decode and encode schema fields for column level lineage. # If this changes, make appropriate changes to datahub-web-react/src/app/lineage/utils/columnLineageUtils.ts +# We also rely on encoding these exact three characters when generating schemaField urns in our graphQL layer. Update SchemaFieldUtils if this changes. RESERVED_CHARS = {",", "(", ")"} RESERVED_CHARS_EXTENDED = RESERVED_CHARS.union({"%"}) diff --git a/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py index fbde0d6e6d69a..1b50d4b2fe810 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py +++ b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py @@ -207,6 +207,46 @@ def url_encode(urn: str) -> str: # safe='' encodes '/' as '%2F' return urllib.parse.quote(urn, safe="") + @staticmethod + def make_data_type_urn(type: str) -> str: + if type.startswith("urn:li:dataType:"): + return type + else: + if not type.startswith("datahub."): + # we want all data types to be fully qualified within the datahub namespace + type = f"datahub.{type}" + return f"urn:li:dataType:{type}" + + @staticmethod + def get_data_type_from_urn(urn: str) -> str: + if urn.startswith("urn:li:dataType:"): + # urn is formatted like urn:li:dataType:datahub:{dataType}, so extract dataType by + # parsing by . and getting the last element + return urn.split(".")[-1] + return urn + + @staticmethod + def make_entity_type_urn(entity_type: str) -> str: + if entity_type.startswith("urn:li:entityType:"): + return entity_type + else: + if not entity_type.startswith("datahub."): + # we want all entity types to be fully qualified within the datahub namespace + entity_type = f"datahub.{entity_type}" + return f"urn:li:entityType:{entity_type}" + + @staticmethod + def make_structured_property_urn(structured_property: str) -> str: + if not structured_property.startswith("urn:li:structuredProperty:"): + return f"urn:li:structuredProperty:{structured_property}" + return structured_property + + @staticmethod + def make_form_urn(form: str) -> str: + if not form.startswith("urn:li:form:"): + return f"urn:li:form:{form}" + return form + class _SpecificUrn(Urn): ENTITY_TYPE: str = "" diff --git a/metadata-ingestion/src/datahub/utilities/urns/structured_properties_urn.py b/metadata-ingestion/src/datahub/utilities/urns/structured_properties_urn.py new file mode 100644 index 0000000000000..5bd36a0656d99 --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/urns/structured_properties_urn.py @@ -0,0 +1,5 @@ +from datahub.metadata.urns import StructuredPropertyUrn # noqa: F401 + + +def make_structured_property_urn(structured_property_id: str) -> str: + return str(StructuredPropertyUrn.create_from_string(structured_property_id)) diff --git a/metadata-ingestion/tests/integration/business-glossary/business_glossary.yml b/metadata-ingestion/tests/integration/business-glossary/business_glossary.yml index da238701e718d..c919dde18b187 100644 --- a/metadata-ingestion/tests/integration/business-glossary/business_glossary.yml +++ b/metadata-ingestion/tests/integration/business-glossary/business_glossary.yml @@ -10,6 +10,8 @@ nodes: knowledge_links: - label: Wiki link for classification url: "https://en.wikipedia.org/wiki/Classification" + custom_properties: + is_confidential: true terms: - name: Sensitive description: Sensitive Data diff --git a/metadata-ingestion/tests/integration/business-glossary/glossary_events_auto_id_golden.json b/metadata-ingestion/tests/integration/business-glossary/glossary_events_auto_id_golden.json index b8cc922f0c1c3..1dce940b44390 100644 --- a/metadata-ingestion/tests/integration/business-glossary/glossary_events_auto_id_golden.json +++ b/metadata-ingestion/tests/integration/business-glossary/glossary_events_auto_id_golden.json @@ -6,6 +6,9 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": { + "is_confidential": "True" + }, "definition": "A set of terms related to Data Classification", "name": "Classification" } @@ -29,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -54,7 +58,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -94,7 +99,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -119,7 +125,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -159,7 +166,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -176,7 +184,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -216,7 +225,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -226,6 +236,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "All terms related to personal information", "name": "Personal Information" } @@ -249,7 +260,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -294,7 +306,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -332,7 +345,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -377,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -387,6 +402,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Provides basic concepts such as account, account holder, account provider, relationship manager that are commonly used by financial services providers to describe customers and to determine counterparty identities", "name": "Clients And Accounts" } @@ -410,7 +426,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +475,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -496,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -506,6 +525,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Common Business KPIs", "name": "KPIs" } @@ -529,7 +549,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -567,7 +588,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -582,7 +604,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -597,7 +620,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -612,7 +636,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -627,7 +652,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -642,7 +668,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +684,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -672,7 +700,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -687,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -702,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -717,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -732,7 +764,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -747,7 +780,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -762,7 +796,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/business-glossary/glossary_events_golden.json b/metadata-ingestion/tests/integration/business-glossary/glossary_events_golden.json index e2b525658e36e..af85f6e2a3518 100644 --- a/metadata-ingestion/tests/integration/business-glossary/glossary_events_golden.json +++ b/metadata-ingestion/tests/integration/business-glossary/glossary_events_golden.json @@ -6,6 +6,9 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": { + "is_confidential": "True" + }, "definition": "A set of terms related to Data Classification", "name": "Classification" } @@ -29,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -54,7 +58,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -94,7 +99,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -119,7 +125,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -159,7 +166,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -176,7 +184,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -216,7 +225,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -226,6 +236,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "All terms related to personal information", "name": "Personal Information" } @@ -249,7 +260,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -294,7 +306,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -332,7 +345,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -377,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -387,6 +402,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Provides basic concepts such as account, account holder, account provider, relationship manager that are commonly used by financial services providers to describe customers and to determine counterparty identities", "name": "Clients And Accounts" } @@ -410,7 +426,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +475,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -496,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -506,6 +525,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Common Business KPIs", "name": "KPIs" } @@ -529,7 +549,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -567,7 +588,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -582,7 +604,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -597,7 +620,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -612,7 +636,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -627,7 +652,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -642,7 +668,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +684,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -672,7 +700,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -687,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -702,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -717,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -732,7 +764,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -747,7 +780,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -762,7 +796,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/remote/content/business_glossary.yml b/metadata-ingestion/tests/integration/remote/content/business_glossary.yml index 59bea251a24e1..e0bee3eb4468f 100644 --- a/metadata-ingestion/tests/integration/remote/content/business_glossary.yml +++ b/metadata-ingestion/tests/integration/remote/content/business_glossary.yml @@ -10,6 +10,8 @@ nodes: knowledge_links: - label: Wiki link for classification url: "https://en.wikipedia.org/wiki/Classification" + custom_properties: + is_confidential: true terms: - name: Sensitive description: Sensitive Data diff --git a/metadata-ingestion/tests/integration/remote/golden/remote_glossary_golden.json b/metadata-ingestion/tests/integration/remote/golden/remote_glossary_golden.json index 1e1932822aee8..a3adcb7639712 100644 --- a/metadata-ingestion/tests/integration/remote/golden/remote_glossary_golden.json +++ b/metadata-ingestion/tests/integration/remote/golden/remote_glossary_golden.json @@ -6,6 +6,9 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": { + "is_confidential": "True" + }, "definition": "A set of terms related to Data Classification", "name": "Classification" } @@ -29,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -54,7 +58,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -94,7 +99,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -119,7 +125,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -159,7 +166,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -176,7 +184,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -216,7 +225,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -226,6 +236,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "All terms related to personal information", "name": "Personal Information" } @@ -249,7 +260,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -294,7 +306,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -332,7 +345,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -377,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -387,6 +402,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Provides basic concepts such as account, account holder, account provider, relationship manager that are commonly used by financial services providers to describe customers and to determine counterparty identities", "name": "Clients And Accounts" } @@ -410,7 +426,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +475,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -496,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -506,6 +525,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Common Business KPIs", "name": "KPIs" } @@ -529,7 +549,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -567,7 +588,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -582,7 +604,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -597,7 +620,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -612,7 +636,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -627,7 +652,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -642,7 +668,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +684,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -672,7 +700,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -687,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -702,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -717,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -732,7 +764,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -747,7 +780,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -762,7 +796,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index b14953d7ce021..8e05b7ef8f5d6 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -15,8 +15,7 @@ import org.apache.tools.ant.filters.ReplaceTokens jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation dependencies { - implementation project(':metadata-models') - implementation project(path: ':metadata-models', configuration: "dataTemplate") + implementation project(':entity-registry') implementation(externalDependency.kafkaAvroSerializer) { exclude group: "org.apache.avro" } @@ -81,13 +80,13 @@ shadowJar { // preventing java multi-release JAR leakage // https://github.com/johnrengelman/shadow/issues/729 exclude('module-info.class', 'META-INF/versions/**', - '**/LICENSE', '**/LICENSE.txt', '**/NOTICE', '**/NOTICE.txt') + '**/LICENSE', '**/LICENSE*.txt', '**/NOTICE', '**/NOTICE.txt', 'licenses/**', 'log4j2.xml', 'log4j.xml') mergeServiceFiles() // we relocate namespaces manually, because we want to know exactly which libs we are exposing and why // we can move to automatic relocation using ConfigureShadowRelocation after we get to a good place on these first relocate 'org.springframework', 'datahub.shaded.org.springframework' relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson' - relocate 'org.yaml', 'io.acryl.shaded.org.yaml' // Required for shading snakeyaml + relocate 'org.yaml', 'datahub.shaded.org.yaml' // Required for shading snakeyaml relocate 'net.jcip.annotations', 'datahub.shaded.annotations' relocate 'javassist', 'datahub.shaded.javassist' relocate 'edu.umd.cs.findbugs', 'datahub.shaded.findbugs' @@ -95,6 +94,7 @@ shadowJar { relocate 'antlr', 'datahub.shaded.antlr' relocate 'com.google.common', 'datahub.shaded.com.google.common' relocate 'org.apache.commons', 'datahub.shaded.org.apache.commons' + relocate 'org.apache.maven', 'datahub.shaded.org.apache.maven' relocate 'org.reflections', 'datahub.shaded.org.reflections' relocate 'st4hidden', 'datahub.shaded.st4hidden' relocate 'org.stringtemplate', 'datahub.shaded.org.stringtemplate' @@ -104,7 +104,6 @@ shadowJar { relocate 'com.github.benmanes.caffeine', 'datahub.shaded.com.github.benmanes.caffeine' relocate 'org.checkerframework', 'datahub.shaded.org.checkerframework' relocate 'com.google.errorprone', 'datahub.shaded.com.google.errorprone' - relocate 'com.sun.jna', 'datahub.shaded.com.sun.jna' // Below jars added for kafka emitter only relocate 'org.apache.avro', 'datahub.shaded.org.apache.avro' relocate 'com.thoughtworks.paranamer', 'datahub.shaded.com.thoughtworks.paranamer' @@ -121,6 +120,9 @@ shadowJar { relocate 'common.message', 'datahub.shaded.common.message' relocate 'org.glassfish', 'datahub.shaded.org.glassfish' relocate 'ch.randelshofer', 'datahub.shaded.ch.randelshofer' + relocate 'io.github.classgraph', 'datahub.shaded.io.github.classgraph' + relocate 'nonapi.io.github.classgraph', 'datahub.shaded.nonapi.io.github.classgraph' + relocate 'com.github.fge', 'datahub.shaded.com.github.fge' finalizedBy checkShadowJar } diff --git a/metadata-integration/java/datahub-client/scripts/check_jar.sh b/metadata-integration/java/datahub-client/scripts/check_jar.sh index 02a1d06b73acf..e2c9ec16d49f8 100755 --- a/metadata-integration/java/datahub-client/scripts/check_jar.sh +++ b/metadata-integration/java/datahub-client/scripts/check_jar.sh @@ -35,7 +35,8 @@ jar -tvf $jarFile |\ grep -v "linux/" |\ grep -v "darwin" |\ grep -v "MetadataChangeProposal.avsc" |\ - grep -v "aix" + grep -v "aix" |\ + grep -v "com/sun/" if [ $? -ne 0 ]; then echo "✅ No unexpected class paths found in ${jarFile}" diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index 5bd10245899e4..1107f552012db 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -18,20 +18,20 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.dataset.DatasetLineageType; +import com.linkedin.metadata.aspect.patch.builder.ChartInfoPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DashboardInfoPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DataFlowInfoPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DataJobInfoPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DataJobInputOutputPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.EditableSchemaMetadataPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.OwnershipPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.UpstreamLineagePatchBuilder; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; import datahub.client.file.FileEmitter; import datahub.client.file.FileEmitterConfig; -import datahub.client.patch.chart.ChartInfoPatchBuilder; -import datahub.client.patch.common.OwnershipPatchBuilder; -import datahub.client.patch.dashboard.DashboardInfoPatchBuilder; -import datahub.client.patch.dataflow.DataFlowInfoPatchBuilder; -import datahub.client.patch.datajob.DataJobInfoPatchBuilder; -import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; -import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; -import datahub.client.patch.dataset.EditableSchemaMetadataPatchBuilder; -import datahub.client.patch.dataset.UpstreamLineagePatchBuilder; import datahub.client.rest.RestEmitter; import datahub.client.rest.RestEmitterConfig; import java.io.IOException; diff --git a/metadata-integration/java/datahub-protobuf/scripts/check_jar.sh b/metadata-integration/java/datahub-protobuf/scripts/check_jar.sh index 930e3ab7be9e1..e3aa181c58801 100755 --- a/metadata-integration/java/datahub-protobuf/scripts/check_jar.sh +++ b/metadata-integration/java/datahub-protobuf/scripts/check_jar.sh @@ -38,7 +38,8 @@ jar -tvf $jarFile |\ grep -v "linux/" |\ grep -v "darwin" |\ grep -v "MetadataChangeProposal.avsc" |\ - grep -v "aix" + grep -v "aix" |\ + grep -v "com/sun/" if [ $? -ne 0 ]; then echo "✅ No unexpected class paths found in ${jarFile}" diff --git a/metadata-integration/java/examples/build.gradle b/metadata-integration/java/examples/build.gradle index ddf574e8c8905..62c80562c7c3b 100644 --- a/metadata-integration/java/examples/build.gradle +++ b/metadata-integration/java/examples/build.gradle @@ -4,7 +4,6 @@ plugins { } dependencies { - implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok @@ -12,8 +11,6 @@ dependencies { implementation externalDependency.typesafeConfig implementation externalDependency.opentracingJdbc - implementation project(path: ':li-utils') - implementation project(path: ':metadata-models') implementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow') implementation externalDependency.httpAsyncClient diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java index 4cff55afc92de..e84511083b6d9 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java @@ -3,9 +3,9 @@ import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.patch.builder.DataJobInputOutputPatchBuilder; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; -import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; import java.util.concurrent.ExecutionException; diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java index b30cb5166df70..03f0673cd85a4 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java @@ -1,9 +1,9 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; -import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; import java.util.concurrent.ExecutionException; diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java index 0a89e87060698..eb8f700c4b068 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java @@ -1,9 +1,9 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; -import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; import java.util.concurrent.ExecutionException; diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java index 053c1f068e048..1586d9b069b24 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java @@ -1,9 +1,9 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; -import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; import java.util.HashMap; diff --git a/metadata-integration/java/spark-lineage/build.gradle b/metadata-integration/java/spark-lineage/build.gradle index c5dd9b5012c29..8d6160631bf45 100644 --- a/metadata-integration/java/spark-lineage/build.gradle +++ b/metadata-integration/java/spark-lineage/build.gradle @@ -102,6 +102,7 @@ shadowJar { // prevent jni conflict with spark exclude '**/libzstd-jni.*' exclude '**/com_github_luben_zstd_*' + exclude '**/log4j*.xml' relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson' relocate 'org.slf4j','datahub.shaded.org.slf4j' @@ -113,6 +114,10 @@ shadowJar { relocate 'io.opentracing','datahub.spark2.shaded.io.opentracing' relocate 'io.netty','datahub.spark2.shaded.io.netty' relocate 'ch.randelshofer', 'datahub.shaded.ch.randelshofer' + relocate 'com.sun', 'datahub.shaded.com.sun' + relocate 'avroutil1', 'datahub.shaded.avroutil1' + relocate 'com.github', 'datahub.shaded.com.github' + relocate 'org.apache.maven', 'datahub.shaded.org.apache.maven' finalizedBy checkShadowJar } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/setup_spark_smoke_test.sh b/metadata-integration/java/spark-lineage/spark-smoke-test/setup_spark_smoke_test.sh index 33cac9d562cd8..90a90be768a51 100755 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/setup_spark_smoke_test.sh +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/setup_spark_smoke_test.sh @@ -30,7 +30,7 @@ echo "--------------------------------------------------------------------" cd "${SMOKE_TEST_ROOT_DIR}"/docker #bring up spark cluster -docker-compose -f spark-docker-compose.yml up -d +docker compose -f spark-docker-compose.yml up -d echo "--------------------------------------------------------------------" echo "Executing spark-submit jobs" diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java b/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java new file mode 100644 index 0000000000000..974406c0be0df --- /dev/null +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java @@ -0,0 +1,35 @@ +package com.linkedin.metadata.client; + +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.r2.RemoteInvocationException; +import java.net.URISyntaxException; +import java.util.Map; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; + +@Builder +public class EntityClientAspectRetriever implements AspectRetriever { + @Getter private final EntityRegistry entityRegistry; + private final SystemEntityClient entityClient; + + @Nullable + @Override + public Aspect getLatestAspectObject(@Nonnull Urn urn, @Nonnull String aspectName) + throws RemoteInvocationException, URISyntaxException { + return entityClient.getLatestAspectObject(urn, aspectName); + } + + @Nonnull + @Override + public Map> getLatestAspectObjects( + Set urns, Set aspectNames) throws RemoteInvocationException, URISyntaxException { + return entityClient.getLatestAspects(urns, aspectNames); + } +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 9b3f42a37b45d..0ebe9ed1d1b66 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -4,6 +4,7 @@ import static com.linkedin.metadata.search.utils.SearchUtils.*; import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -18,7 +19,6 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.aspect.EnvelopedAspectArray; @@ -31,7 +31,6 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.query.AutoCompleteResult; @@ -48,6 +47,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.shared.ValidationUtils; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -85,15 +85,15 @@ public class JavaEntityClient implements EntityClient { private final Clock _clock = Clock.systemUTC(); - private final EntityService _entityService; + private final EntityService _entityService; private final DeleteEntityService _deleteEntityService; private final EntitySearchService _entitySearchService; private final CachingEntitySearchService _cachingEntitySearchService; private final SearchService _searchService; private final LineageSearchService _lineageSearchService; private final TimeseriesAspectService _timeseriesAspectService; + private final RollbackService rollbackService; private final EventProducer _eventProducer; - private final RestliEntityClient _restliEntityClient; @Nullable public EntityResponse getV2( @@ -713,11 +713,7 @@ public String ingestProposal( Stream.concat(Stream.of(metadataChangeProposal), additionalChanges.stream()); AspectsBatch batch = AspectsBatchImpl.builder() - .mcps( - proposalStream.collect(Collectors.toList()), - auditStamp, - _entityService.getEntityRegistry(), - this) + .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) .build(); IngestResult one = _entityService.ingestProposal(batch, async).stream().findFirst().get(); @@ -780,9 +776,10 @@ public void producePlatformEvent( } @Override - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + public void rollbackIngestion( + @Nonnull String runId, @Nonnull Authorizer authorizer, @Nonnull Authentication authentication) throws Exception { - _restliEntityClient.rollbackIngestion(runId, authentication); + rollbackService.rollbackIngestion(runId, false, true, authorizer, authentication); } private void tryIndexRunId(Urn entityUrn, @Nullable SystemMetadata systemMetadata) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java index 31c2846a9c9f3..fa020903c34f0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java @@ -2,18 +2,18 @@ import com.datahub.authentication.Authentication; import com.linkedin.entity.client.EntityClientCache; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import javax.annotation.Nonnull; import lombok.Getter; /** Java backed SystemEntityClient */ @@ -24,16 +24,16 @@ public class SystemJavaEntityClient extends JavaEntityClient implements SystemEn private final Authentication systemAuthentication; public SystemJavaEntityClient( - EntityService entityService, + EntityService entityService, DeleteEntityService deleteEntityService, EntitySearchService entitySearchService, CachingEntitySearchService cachingEntitySearchService, SearchService searchService, LineageSearchService lineageSearchService, TimeseriesAspectService timeseriesAspectService, + RollbackService rollbackService, EventProducer eventProducer, - RestliEntityClient restliEntityClient, - Authentication systemAuthentication, + @Nonnull Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { super( entityService, @@ -43,8 +43,8 @@ public SystemJavaEntityClient( searchService, lineageSearchService, timeseriesAspectService, - eventProducer, - restliEntityClient); + rollbackService, + eventProducer); this.systemAuthentication = systemAuthentication; this.entityClientCache = buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index ed69e919a7b24..b3b11d200ec0d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -12,6 +12,7 @@ import static com.linkedin.metadata.Constants.UI_SOURCE; import static com.linkedin.metadata.search.utils.BrowsePathUtils.buildDataPlatformUrn; import static com.linkedin.metadata.search.utils.BrowsePathUtils.getDefaultBrowsePath; +import static com.linkedin.metadata.utils.GenericRecordUtils.entityResponseToAspectMap; import static com.linkedin.metadata.utils.PegasusUtils.constructMCL; import static com.linkedin.metadata.utils.PegasusUtils.getDataTemplateClassFromSchema; import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; @@ -46,7 +47,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.Aspect; @@ -84,6 +84,7 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.util.Pair; import io.ebean.PagedList; import io.ebean.Transaction; @@ -166,14 +167,12 @@ public class EntityServiceImpl implements EntityService { private final Integer ebeanMaxTransactionRetry; - private SystemEntityClient systemEntityClient; - public EntityServiceImpl( @Nonnull final AspectDao aspectDao, @Nonnull final EventProducer producer, @Nonnull final EntityRegistry entityRegistry, final boolean alwaysEmitChangeLog, - final UpdateIndicesService updateIndicesService, + @Nullable final UpdateIndicesService updateIndicesService, final PreProcessHooks preProcessHooks) { this( aspectDao, @@ -190,9 +189,9 @@ public EntityServiceImpl( @Nonnull final EventProducer producer, @Nonnull final EntityRegistry entityRegistry, final boolean alwaysEmitChangeLog, - final UpdateIndicesService updateIndicesService, + @Nullable final UpdateIndicesService updateIndicesService, final PreProcessHooks preProcessHooks, - final Integer retry) { + @Nullable final Integer retry) { _aspectDao = aspectDao; _producer = producer; @@ -200,21 +199,13 @@ public EntityServiceImpl( _entityToValidAspects = buildEntityToValidAspects(entityRegistry); _alwaysEmitChangeLog = alwaysEmitChangeLog; _updateIndicesService = updateIndicesService; + if (_updateIndicesService != null) { + _updateIndicesService.initializeAspectRetriever(this); + } _preProcessHooks = preProcessHooks; ebeanMaxTransactionRetry = retry != null ? retry : DEFAULT_MAX_TRANSACTION_RETRY; } - @Override - public void setSystemEntityClient(SystemEntityClient systemEntityClient) { - this.systemEntityClient = systemEntityClient; - this._updateIndicesService.setSystemEntityClient(systemEntityClient); - } - - @Override - public SystemEntityClient getSystemEntityClient() { - return this.systemEntityClient; - } - @Override public RecordTemplate getLatestAspect(@Nonnull Urn urn, @Nonnull String aspectName) { log.debug("Invoked getLatestAspect with urn {}, aspect {}", urn, aspectName); @@ -634,7 +625,7 @@ public List ingestAspects( .aspect(pair.getValue()) .systemMetadata(systemMetadata) .auditStamp(auditStamp) - .build(_entityRegistry, systemEntityClient)) + .build(this)) .collect(Collectors.toList()); return ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); } @@ -693,7 +684,7 @@ private List ingestAspectsToLocalDB( // 1. Convert patches to full upserts // 2. Run any entity/aspect level hooks Pair>, List> updatedItems = - aspectsBatch.toUpsertBatchItems(latestAspects, _entityRegistry, systemEntityClient); + aspectsBatch.toUpsertBatchItems(latestAspects, this); // Fetch additional information if needed final Map> updatedLatestAspects; @@ -725,8 +716,7 @@ private List ingestAspectsToLocalDB( previousAspect == null ? null : previousAspect.getRecordTemplate(_entityRegistry), - _entityRegistry, - systemEntityClient); + this); } catch (AspectValidationException e) { throw new RuntimeException(e); } @@ -934,7 +924,7 @@ public RecordTemplate ingestAspectIfNotPresent( .aspect(newValue) .systemMetadata(systemMetadata) .auditStamp(auditStamp) - .build(_entityRegistry, systemEntityClient)) + .build(this)) .build(); List ingested = ingestAspects(aspectsBatch, true, false); @@ -954,10 +944,7 @@ public RecordTemplate ingestAspectIfNotPresent( public IngestResult ingestProposal( MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { return ingestProposal( - AspectsBatchImpl.builder() - .mcps(List.of(proposal), auditStamp, getEntityRegistry(), systemEntityClient) - .build(), - async) + AspectsBatchImpl.builder().mcps(List.of(proposal), auditStamp, this).build(), async) .stream() .findFirst() .get(); @@ -1545,7 +1532,7 @@ protected Map getSnapshotRecords( @Nonnull protected Map> getLatestAspectUnions( @Nonnull final Set urns, @Nonnull final Set aspectNames) { - return getLatestAspects(urns, aspectNames).entrySet().stream() + return this.getLatestAspects(urns, aspectNames).entrySet().stream() .collect( Collectors.toMap( Map.Entry::getKey, @@ -1694,7 +1681,7 @@ private void ingestSnapshotUnion( .aspect(pair.getValue()) .auditStamp(auditStamp) .systemMetadata(systemMetadata) - .build(_entityRegistry, systemEntityClient)) + .build(this)) .collect(Collectors.toList())) .build(); @@ -1796,6 +1783,7 @@ private static Map> buildEntityToValidAspects( } @Override + @Nonnull public EntityRegistry getEntityRegistry() { return _entityRegistry; } @@ -2487,4 +2475,12 @@ private static boolean shouldAspectEmitChangeLog(@Nonnull final AspectSpec aspec aspectSpec.getRelationshipFieldSpecs(); return relationshipFieldSpecs.stream().anyMatch(RelationshipFieldSpec::isLineageRelationship); } + + @Nonnull + @Override + public Map> getLatestAspectObjects( + Set urns, Set aspectNames) throws RemoteInvocationException, URISyntaxException { + String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); + return entityResponseToAspectMap(getEntitiesV2(entityName, urns, aspectNames)); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java index 4d3ac9a550553..f353e5142755d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java @@ -58,17 +58,11 @@ public static AuditStamp getAuditStamp(Urn actor) { public static void ingestChangeProposals( @Nonnull List changes, - @Nonnull EntityService entityService, + @Nonnull EntityService entityService, @Nonnull Urn actor, @Nonnull Boolean async) { entityService.ingestProposal( - AspectsBatchImpl.builder() - .mcps( - changes, - getAuditStamp(actor), - entityService.getEntityRegistry(), - entityService.getSystemEntityClient()) - .build(), + AspectsBatchImpl.builder().mcps(changes, getAuditStamp(actor), entityService).build(), async); } @@ -85,7 +79,7 @@ public static void ingestChangeProposals( public static RecordTemplate getAspectFromEntity( String entityUrn, String aspectName, - EntityService entityService, + EntityService entityService, RecordTemplate defaultValue) { Urn urn = getUrnFromString(entityUrn); if (urn == null) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java index f1b7d761087b4..4d9d2b3c416b7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java @@ -17,12 +17,12 @@ import com.linkedin.common.urn.Urn; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.batch.AspectsBatch; +import com.linkedin.metadata.aspect.batch.UpsertItem; import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -45,28 +45,22 @@ @Slf4j @RequiredArgsConstructor -public class CassandraRetentionService extends RetentionService { - private final EntityService _entityService; +public class CassandraRetentionService extends RetentionService { + private final EntityService _entityService; private final CqlSession _cqlSession; private final int _batchSize; private final Clock _clock = Clock.systemUTC(); @Override - public EntityService getEntityService() { + public EntityService getEntityService() { return _entityService; } @Override protected AspectsBatch buildAspectsBatch( List mcps, @Nonnull AuditStamp auditStamp) { - return AspectsBatchImpl.builder() - .mcps( - mcps, - auditStamp, - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, auditStamp, _entityService).build(); } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java index d1f54f8a7e6e5..eba550714766b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java @@ -5,10 +5,10 @@ import com.linkedin.common.urn.Urn; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.batch.AspectsBatch; +import com.linkedin.metadata.aspect.batch.UpsertItem; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -40,28 +40,22 @@ @Slf4j @RequiredArgsConstructor -public class EbeanRetentionService extends RetentionService { - private final EntityService _entityService; +public class EbeanRetentionService extends RetentionService { + private final EntityService _entityService; private final Database _server; private final int _batchSize; private final Clock _clock = Clock.systemUTC(); @Override - public EntityService getEntityService() { + public EntityService getEntityService() { return _entityService; } @Override protected AspectsBatch buildAspectsBatch( List mcps, @Nonnull AuditStamp auditStamp) { - return AspectsBatchImpl.builder() - .mcps( - mcps, - auditStamp, - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, auditStamp, _entityService).build(); } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java index 4b75fe73a12e5..80fb4e3e1b940 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java @@ -8,7 +8,6 @@ import com.linkedin.metadata.aspect.batch.SystemAspect; import com.linkedin.metadata.aspect.batch.UpsertItem; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; @@ -33,15 +32,12 @@ public class AspectsBatchImpl implements AspectsBatch { * Convert patches to upserts, apply hooks at the aspect and batch level. * * @param latestAspects latest version in the database - * @param entityRegistry entity registry * @return The new urn/aspectnames and the uniform upserts, possibly expanded/mutated by the * various hooks */ @Override public Pair>, List> toUpsertBatchItems( - final Map> latestAspects, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever) { + final Map> latestAspects, AspectRetriever aspectRetriever) { LinkedList upsertBatchItems = items.stream() @@ -59,25 +55,27 @@ public Pair>, List> toUpsertBatchItems( // patch to upsert MCPPatchBatchItem patchBatchItem = (MCPPatchBatchItem) item; final RecordTemplate currentValue = - latest != null ? latest.getRecordTemplate(entityRegistry) : null; - upsertItem = - patchBatchItem.applyPatch(entityRegistry, currentValue, aspectRetriever); + latest != null + ? latest.getRecordTemplate(aspectRetriever.getEntityRegistry()) + : null; + upsertItem = patchBatchItem.applyPatch(currentValue, aspectRetriever); } // Apply hooks final SystemMetadata oldSystemMetadata = latest != null ? latest.getSystemMetadata() : null; final RecordTemplate oldAspectValue = - latest != null ? latest.getRecordTemplate(entityRegistry) : null; - upsertItem.applyMutationHooks( - oldAspectValue, oldSystemMetadata, entityRegistry, aspectRetriever); + latest != null + ? latest.getRecordTemplate(aspectRetriever.getEntityRegistry()) + : null; + upsertItem.applyMutationHooks(oldAspectValue, oldSystemMetadata, aspectRetriever); return upsertItem; }) .collect(Collectors.toCollection(LinkedList::new)); LinkedList newItems = - applyMCPSideEffects(upsertBatchItems, entityRegistry, aspectRetriever) + applyMCPSideEffects(upsertBatchItems, aspectRetriever) .collect(Collectors.toCollection(LinkedList::new)); Map> newUrnAspectNames = getNewUrnAspectsMap(getUrnAspectsMap(), newItems); upsertBatchItems.addAll(newItems); @@ -98,20 +96,17 @@ public AspectsBatchImplBuilder one(BatchItem data) { } public AspectsBatchImplBuilder mcps( - List mcps, - AuditStamp auditStamp, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever) { + List mcps, AuditStamp auditStamp, AspectRetriever aspectRetriever) { this.items = mcps.stream() .map( mcp -> { if (mcp.getChangeType().equals(ChangeType.PATCH)) { return MCPPatchBatchItem.MCPPatchBatchItemBuilder.build( - mcp, auditStamp, entityRegistry); + mcp, auditStamp, aspectRetriever.getEntityRegistry()); } else { return MCPUpsertBatchItem.MCPUpsertBatchItemBuilder.build( - mcp, auditStamp, entityRegistry, aspectRetriever); + mcp, auditStamp, aspectRetriever); } }) .collect(Collectors.toList()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java index f61280bac4b22..6563765657d6d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java @@ -40,18 +40,24 @@ public class MCLBatchItemImpl implements MCLBatchItem { public static class MCLBatchItemImplBuilder { + // Ensure use of other builders + private MCLBatchItemImpl build() { + return null; + } + public MCLBatchItemImpl build( - MetadataChangeLog metadataChangeLog, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever) { - return MCLBatchItemImpl.builder() - .metadataChangeLog(metadataChangeLog) - .build(entityRegistry, aspectRetriever); + MetadataChangeLog metadataChangeLog, AspectRetriever aspectRetriever) { + return MCLBatchItemImpl.builder().metadataChangeLog(metadataChangeLog).build(aspectRetriever); } - public MCLBatchItemImpl build(EntityRegistry entityRegistry, AspectRetriever aspectRetriever) { + public MCLBatchItemImpl build(AspectRetriever aspectRetriever) { + EntityRegistry entityRegistry = aspectRetriever.getEntityRegistry(); + log.debug("entity type = {}", this.metadataChangeLog.getEntityType()); - entitySpec(entityRegistry.getEntitySpec(this.metadataChangeLog.getEntityType())); + entitySpec( + aspectRetriever + .getEntityRegistry() + .getEntitySpec(this.metadataChangeLog.getEntityType())); aspectSpec(validateAspect(this.metadataChangeLog, this.entitySpec)); Urn urn = this.metadataChangeLog.getEntityUrn(); @@ -75,7 +81,6 @@ public MCLBatchItemImpl build(EntityRegistry entityRegistry, AspectRetriever asp // validate new ValidationUtils.validateRecordTemplate( this.metadataChangeLog.getChangeType(), - entityRegistry, this.entitySpec, this.aspectSpec, urn, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java index 3adf384f3b0ed..be333af2f7539 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java @@ -16,13 +16,13 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.batch.PatchItem; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.EntityUtils; import com.linkedin.metadata.entity.validation.ValidationUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.SystemMetadataUtils; import com.linkedin.mxe.MetadataChangeProposal; @@ -73,9 +73,7 @@ public ChangeType getChangeType() { } public MCPUpsertBatchItem applyPatch( - EntityRegistry entityRegistry, - RecordTemplate recordTemplate, - AspectRetriever aspectRetriever) { + RecordTemplate recordTemplate, AspectRetriever aspectRetriever) { MCPUpsertBatchItem.MCPUpsertBatchItemBuilder builder = MCPUpsertBatchItem.builder() .urn(getUrn()) @@ -84,7 +82,8 @@ public MCPUpsertBatchItem applyPatch( .auditStamp(auditStamp) .systemMetadata(getSystemMetadata()); - AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + AspectTemplateEngine aspectTemplateEngine = + aspectRetriever.getEntityRegistry().getAspectTemplateEngine(); RecordTemplate currentValue = recordTemplate != null @@ -106,7 +105,7 @@ public MCPUpsertBatchItem applyPatch( throw new RuntimeException(e); } - return builder.build(entityRegistry, aspectRetriever); + return builder.build(aspectRetriever); } public static class MCPPatchBatchItemBuilder { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java index 9d41b141dcd60..89209c44f10c7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java @@ -4,12 +4,14 @@ import static com.linkedin.metadata.entity.AspectUtils.validateAspect; import com.datahub.util.exception.ModelConversionException; +import com.github.fge.jsonpatch.JsonPatchException; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.batch.SystemAspect; import com.linkedin.metadata.aspect.batch.UpsertItem; +import com.linkedin.metadata.aspect.patch.template.common.GenericPatchTemplate; import com.linkedin.metadata.aspect.plugins.hooks.MutationHook; import com.linkedin.metadata.aspect.plugins.validation.AspectPayloadValidator; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; @@ -19,12 +21,12 @@ import com.linkedin.metadata.entity.validation.ValidationUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.metadata.utils.SystemMetadataUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.io.IOException; import java.sql.Timestamp; import java.util.Objects; import javax.annotation.Nonnull; @@ -39,6 +41,31 @@ @Builder(toBuilder = true) public class MCPUpsertBatchItem extends UpsertItem { + public static MCPUpsertBatchItem fromPatch( + @Nonnull Urn urn, + @Nonnull AspectSpec aspectSpec, + @Nullable RecordTemplate recordTemplate, + GenericPatchTemplate genericPatchTemplate, + @Nonnull AuditStamp auditStamp, + AspectRetriever aspectRetriever) { + MCPUpsertBatchItem.MCPUpsertBatchItemBuilder builder = + MCPUpsertBatchItem.builder() + .urn(urn) + .auditStamp(auditStamp) + .aspectName(aspectSpec.getName()); + + RecordTemplate currentValue = + recordTemplate != null ? recordTemplate : genericPatchTemplate.getDefault(); + + try { + builder.aspect(genericPatchTemplate.applyPatch(currentValue)); + } catch (JsonPatchException | IOException e) { + throw new RuntimeException(e); + } + + return builder.build(aspectRetriever); + } + // urn an urn associated with the new aspect @Nonnull private final Urn urn; @@ -66,12 +93,12 @@ public ChangeType getChangeType() { public void applyMutationHooks( @Nullable RecordTemplate oldAspectValue, @Nullable SystemMetadata oldSystemMetadata, - @Nonnull EntityRegistry entityRegistry, @Nonnull AspectRetriever aspectRetriever) { // add audit stamp/system meta if needed for (MutationHook mutationHook : - entityRegistry.getMutationHooks( - getChangeType(), entitySpec.getName(), aspectSpec.getName())) { + aspectRetriever + .getEntityRegistry() + .getMutationHooks(getChangeType(), entitySpec.getName(), aspectSpec.getName())) { mutationHook.applyMutation( getChangeType(), entitySpec, @@ -99,14 +126,14 @@ public SystemAspect toLatestEntityAspect() { @Override public void validatePreCommit( - @Nullable RecordTemplate previous, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) + @Nullable RecordTemplate previous, @Nonnull AspectRetriever aspectRetriever) throws AspectValidationException { for (AspectPayloadValidator validator : - entityRegistry.getAspectPayloadValidators( - getChangeType(), entitySpec.getName(), aspectSpec.getName())) { + aspectRetriever + .getEntityRegistry() + .getAspectPayloadValidators( + getChangeType(), entitySpec.getName(), aspectSpec.getName())) { validator.validatePreCommit( getChangeType(), urn, getAspectSpec(), previous, this.aspect, aspectRetriever); } @@ -125,12 +152,11 @@ public MCPUpsertBatchItemBuilder systemMetadata(SystemMetadata systemMetadata) { } @SneakyThrows - public MCPUpsertBatchItem build( - EntityRegistry entityRegistry, AspectRetriever aspectRetriever) { - EntityUtils.validateUrn(entityRegistry, this.urn); + public MCPUpsertBatchItem build(AspectRetriever aspectRetriever) { + EntityUtils.validateUrn(aspectRetriever.getEntityRegistry(), this.urn); log.debug("entity type = {}", this.urn.getEntityType()); - entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + entitySpec(aspectRetriever.getEntityRegistry().getEntitySpec(this.urn.getEntityType())); log.debug("entity spec = {}", this.entitySpec); aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); @@ -138,7 +164,6 @@ public MCPUpsertBatchItem build( ValidationUtils.validateRecordTemplate( ChangeType.UPSERT, - entityRegistry, this.entitySpec, this.aspectSpec, this.urn, @@ -157,17 +182,15 @@ public MCPUpsertBatchItem build( } public static MCPUpsertBatchItem build( - MetadataChangeProposal mcp, - AuditStamp auditStamp, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever) { + MetadataChangeProposal mcp, AuditStamp auditStamp, AspectRetriever aspectRetriever) { if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { throw new IllegalArgumentException( "Invalid MCP, this class only supports change type of UPSERT."); } log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + EntitySpec entitySpec = + aspectRetriever.getEntityRegistry().getEntitySpec(mcp.getEntityType()); AspectSpec aspectSpec = validateAspect(mcp, entitySpec); if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { @@ -191,7 +214,7 @@ public static MCPUpsertBatchItem build( .metadataChangeProposal(mcp) .auditStamp(auditStamp) .aspect(convertToRecordTemplate(mcp, aspectSpec)) - .build(entityRegistry, aspectRetriever); + .build(aspectRetriever); } private MCPUpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java index 97f7aa06340d2..947f0116b587c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java @@ -67,12 +67,12 @@ public static AspectSpec validate(EntitySpec entitySpec, String aspectName) { public static void validateRecordTemplate( ChangeType changeType, - EntityRegistry entityRegistry, EntitySpec entitySpec, AspectSpec aspectSpec, Urn urn, @Nullable RecordTemplate aspect, @Nonnull AspectRetriever aspectRetriever) { + EntityRegistry entityRegistry = aspectRetriever.getEntityRegistry(); EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); validator.setCurrentEntitySpec(entitySpec); Consumer resultFunction = @@ -83,6 +83,7 @@ public static void validateRecordTemplate( + "\n Cause: " + validationResult.getMessages()); }; + RecordTemplateValidator.validate( EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java index 0d8b7655fddeb..24e272dee7a25 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java @@ -10,6 +10,7 @@ import com.linkedin.metadata.graph.Edge; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.RelatedEntitiesResult; +import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; import com.linkedin.metadata.graph.RelatedEntity; import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.query.filter.Criterion; @@ -17,6 +18,7 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; import io.dgraph.DgraphClient; import io.dgraph.DgraphProto.Mutation; import io.dgraph.DgraphProto.NQuad; @@ -779,4 +781,21 @@ public void clear() { // setup urn, type and key relationships getSchema(); } + + @Nonnull + @Override + public RelatedEntitiesScrollResult scrollRelatedEntities( + @Nullable List sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + throw new IllegalArgumentException("Not implemented"); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index 97cb186ce948c..3051319aa54cf 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -23,6 +23,8 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchAfterWrapper; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.utils.ConcurrencyUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -81,7 +83,7 @@ public class ESGraphQueryDAO { @Nonnull public static void addFilterToQueryBuilder( - @Nonnull Filter filter, String node, BoolQueryBuilder rootQuery) { + @Nonnull Filter filter, @Nullable String node, BoolQueryBuilder rootQuery) { BoolQueryBuilder orQuery = new BoolQueryBuilder(); for (ConjunctiveCriterion conjunction : filter.getOr()) { final BoolQueryBuilder andQuery = new BoolQueryBuilder(); @@ -93,12 +95,13 @@ public static void addFilterToQueryBuilder( } criterionArray.forEach( criterion -> - andQuery.must( + andQuery.filter( QueryBuilders.termQuery( - node + "." + criterion.getField(), criterion.getValue()))); + (node == null ? "" : node + ".") + criterion.getField(), + criterion.getValue()))); orQuery.should(andQuery); } - rootQuery.must(orQuery); + rootQuery.filter(orQuery); } private SearchResponse executeSearchQuery( @@ -174,9 +177,9 @@ public SearchResponse getSearchResponse( public static BoolQueryBuilder buildQuery( @Nullable final List sourceTypes, - @Nonnull final Filter sourceEntityFilter, + @Nullable final Filter sourceEntityFilter, @Nullable final List destinationTypes, - @Nonnull final Filter destinationEntityFilter, + @Nullable final Filter destinationEntityFilter, @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); @@ -187,17 +190,22 @@ public static BoolQueryBuilder buildQuery( String sourceNode = relationshipDirection == RelationshipDirection.OUTGOING ? SOURCE : DESTINATION; if (sourceTypes != null && sourceTypes.size() > 0) { - finalQuery.must(QueryBuilders.termsQuery(sourceNode + ".entityType", sourceTypes)); + finalQuery.filter(QueryBuilders.termsQuery(sourceNode + ".entityType", sourceTypes)); + } + if (sourceEntityFilter != null) { + addFilterToQueryBuilder(sourceEntityFilter, sourceNode, finalQuery); } - addFilterToQueryBuilder(sourceEntityFilter, sourceNode, finalQuery); // set destination filter String destinationNode = relationshipDirection == RelationshipDirection.OUTGOING ? DESTINATION : SOURCE; if (destinationTypes != null && destinationTypes.size() > 0) { - finalQuery.must(QueryBuilders.termsQuery(destinationNode + ".entityType", destinationTypes)); + finalQuery.filter( + QueryBuilders.termsQuery(destinationNode + ".entityType", destinationTypes)); + } + if (destinationEntityFilter != null) { + addFilterToQueryBuilder(destinationEntityFilter, destinationNode, finalQuery); } - addFilterToQueryBuilder(destinationEntityFilter, destinationNode, finalQuery); // set relationship filter if (relationshipTypes.size() > 0) { @@ -206,8 +214,14 @@ public static BoolQueryBuilder buildQuery( relationshipType -> relationshipQuery.should( QueryBuilders.termQuery(RELATIONSHIP_TYPE, relationshipType))); - finalQuery.must(relationshipQuery); + finalQuery.filter(relationshipQuery); + } + + // general filter + if (relationshipFilter.getOr() != null) { + addFilterToQueryBuilder(new Filter().setOr(relationshipFilter.getOr()), null, finalQuery); } + return finalQuery; } @@ -659,4 +673,60 @@ public static class LineageResponse { int total; List lineageRelationships; } + + public SearchResponse getSearchResponse( + @Nullable final List sourceTypes, + @Nullable final Filter sourceEntityFilter, + @Nullable final List destinationTypes, + @Nullable final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count) { + + BoolQueryBuilder finalQuery = + buildQuery( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, + relationshipFilter); + + return executeScrollSearchQuery(finalQuery, sortCriterion, scrollId, count); + } + + private SearchResponse executeScrollSearchQuery( + @Nonnull final QueryBuilder query, + @Nonnull List sortCriterion, + @Nullable String scrollId, + final int count) { + + Object[] sort = null; + if (scrollId != null) { + SearchAfterWrapper searchAfterWrapper = SearchAfterWrapper.fromScrollId(scrollId); + sort = searchAfterWrapper.getSort(); + } + + SearchRequest searchRequest = new SearchRequest(); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + + searchSourceBuilder.size(count); + searchSourceBuilder.query(query); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, List.of(), false); + searchRequest.source(searchSourceBuilder); + ESUtils.setSearchAfter(searchSourceBuilder, sort, null, null); + + searchRequest.indices(indexConvention.getIndexName(INDEX_NAME)); + + try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esQuery").time()) { + MetricUtils.counter(this.getClass(), SEARCH_EXECUTIONS_METRIC).inc(); + return client.search(searchRequest, RequestOptions.DEFAULT); + } catch (Exception e) { + log.error("Search query failed", e); + throw new ESQueryException("Search query failed:", e); + } + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java index 6c828c0e7c6ae..67590ffd6e7c1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java @@ -11,7 +11,9 @@ import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.graph.LineageRelationshipArray; +import com.linkedin.metadata.graph.RelatedEntities; import com.linkedin.metadata.graph.RelatedEntitiesResult; +import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; import com.linkedin.metadata.graph.RelatedEntity; import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.query.filter.Condition; @@ -22,11 +24,14 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchAfterWrapper; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.structured.StructuredPropertyDefinition; import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -35,6 +40,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -47,6 +53,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.action.search.SearchResponse; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchHit; @Slf4j @RequiredArgsConstructor @@ -165,8 +172,6 @@ public RelatedEntitiesResult findRelatedEntities( } final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); - String destinationNode = - relationshipDirection == RelationshipDirection.OUTGOING ? "destination" : "source"; SearchResponse response = _graphReadDAO.getSearchResponse( @@ -185,28 +190,8 @@ public RelatedEntitiesResult findRelatedEntities( int totalCount = (int) response.getHits().getTotalHits().value; final List relationships = - Arrays.stream(response.getHits().getHits()) - .map( - hit -> { - final String urnStr = - ((HashMap) - hit.getSourceAsMap().getOrDefault(destinationNode, EMPTY_HASH)) - .getOrDefault("urn", null); - final String relationshipType = - (String) hit.getSourceAsMap().get("relationshipType"); - - if (urnStr == null || relationshipType == null) { - log.error( - String.format( - "Found null urn string, relationship type, aspect name or path spec in Elastic index. " - + "urnStr: %s, relationshipType: %s", - urnStr, relationshipType)); - return null; - } - - return new RelatedEntity(relationshipType, urnStr); - }) - .filter(Objects::nonNull) + searchHitsToRelatedEntities(response.getHits().getHits(), relationshipDirection).stream() + .map(RelatedEntities::asRelatedEntity) .collect(Collectors.toList()); return new RelatedEntitiesResult(offset, relationships.size(), totalCount, relationships); @@ -328,6 +313,12 @@ public List buildReindexConfigs() throws IOException { Collections.emptyMap())); } + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return buildReindexConfigs(); + } + @Override public void reindexAll() { configure(); @@ -344,4 +335,88 @@ public void clear() { public boolean supportsMultiHop() { return true; } + + @Nonnull + @Override + public RelatedEntitiesScrollResult scrollRelatedEntities( + @Nullable List sourceTypes, + @Nullable Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nullable Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + + final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); + + SearchResponse response = + _graphReadDAO.getSearchResponse( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + sortCriterion, + scrollId, + count); + + if (response == null) { + return new RelatedEntitiesScrollResult(0, 0, null, ImmutableList.of()); + } + + int totalCount = (int) response.getHits().getTotalHits().value; + final List relationships = + searchHitsToRelatedEntities(response.getHits().getHits(), relationshipDirection); + + SearchHit[] searchHits = response.getHits().getHits(); + // Only return next scroll ID if there are more results, indicated by full size results + String nextScrollId = null; + if (searchHits.length == count) { + Object[] sort = searchHits[searchHits.length - 1].getSortValues(); + nextScrollId = new SearchAfterWrapper(sort, null, 0L).toScrollId(); + } + + return RelatedEntitiesScrollResult.builder() + .entities(relationships) + .pageSize(relationships.size()) + .numResults(totalCount) + .scrollId(nextScrollId) + .build(); + } + + private static List searchHitsToRelatedEntities( + SearchHit[] searchHits, RelationshipDirection relationshipDirection) { + return Arrays.stream(searchHits) + .map( + hit -> { + final String destinationUrnStr = + ((HashMap) + hit.getSourceAsMap().getOrDefault("destination", EMPTY_HASH)) + .getOrDefault("urn", null); + final String sourceUrnStr = + ((HashMap) + hit.getSourceAsMap().getOrDefault("source", EMPTY_HASH)) + .getOrDefault("urn", null); + final String relationshipType = (String) hit.getSourceAsMap().get("relationshipType"); + + if (destinationUrnStr == null || sourceUrnStr == null || relationshipType == null) { + log.error( + String.format( + "Found null urn string, relationship type, aspect name or path spec in Elastic index. " + + "destinationUrnStr: %s, sourceUrnStr: %s, relationshipType: %s", + destinationUrnStr, sourceUrnStr, relationshipType)); + return null; + } + + return new RelatedEntities( + relationshipType, sourceUrnStr, destinationUrnStr, relationshipDirection); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index c8d3147711eba..a1f73a134ec8e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -17,6 +17,7 @@ import com.linkedin.metadata.graph.LineageRelationship; import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.graph.RelatedEntitiesResult; +import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; import com.linkedin.metadata.graph.RelatedEntity; import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.query.filter.Condition; @@ -25,6 +26,7 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.util.Pair; import io.opentelemetry.extension.annotations.WithSpan; @@ -882,4 +884,21 @@ private boolean isSourceDestReversed( return null; } } + + @Nonnull + @Override + public RelatedEntitiesScrollResult scrollRelatedEntities( + @Nullable List sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + throw new IllegalArgumentException("Not implemented"); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index fd7491fe32ea3..7cba2e0ecc8cb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -18,6 +18,9 @@ import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.search.utils.SearchUtils; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.io.IOException; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; @@ -47,6 +50,12 @@ public List buildReindexConfigs() { return indexBuilders.buildReindexConfigs(); } + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return indexBuilders.buildReindexConfigsWithAllStructProps(properties); + } + @Override public void reindexAll() { configure(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java index 388dcea784cbb..cc6a0f3e3d6f9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder.PROPERTIES; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.utils.ESUtils; @@ -22,6 +25,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.TreeMap; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -125,12 +129,20 @@ public ESIndexBuilder( public ReindexConfig buildReindexState( String indexName, Map mappings, Map settings) throws IOException { + return buildReindexState(indexName, mappings, settings, false); + } + + public ReindexConfig buildReindexState( + String indexName, + Map mappings, + Map settings, + boolean copyStructuredPropertyMappings) + throws IOException { ReindexConfig.ReindexConfigBuilder builder = ReindexConfig.builder() .name(indexName) .enableIndexSettingsReindex(enableIndexSettingsReindex) .enableIndexMappingsReindex(enableIndexMappingsReindex) - .targetMappings(mappings) .version(gitVersion.getVersion()); Map baseSettings = new HashMap<>(settings); @@ -148,6 +160,7 @@ public ReindexConfig buildReindexState( // If index doesn't exist, no reindex if (!exists) { + builder.targetMappings(mappings); return builder.build(); } @@ -173,6 +186,35 @@ public ReindexConfig buildReindexState( .getSourceAsMap(); builder.currentMappings(currentMappings); + if (copyStructuredPropertyMappings) { + Map currentStructuredProperties = + (Map) + ((Map) + ((Map) + currentMappings.getOrDefault(PROPERTIES, new TreeMap())) + .getOrDefault(STRUCTURED_PROPERTY_MAPPING_FIELD, new TreeMap())) + .getOrDefault(PROPERTIES, new TreeMap()); + + if (!currentStructuredProperties.isEmpty()) { + HashMap> props = + (HashMap>) + ((Map) mappings.get(PROPERTIES)) + .computeIfAbsent( + STRUCTURED_PROPERTY_MAPPING_FIELD, + (key) -> new HashMap<>(Map.of(PROPERTIES, new HashMap<>()))); + + props.merge( + PROPERTIES, + currentStructuredProperties, + (targetValue, currentValue) -> { + HashMap merged = new HashMap<>(currentValue); + merged.putAll(targetValue); + return merged.isEmpty() ? null : merged; + }); + } + } + + builder.targetMappings(mappings); return builder.build(); } @@ -251,7 +293,7 @@ public void buildIndex(ReindexConfig indexState) throws IOException { * @throws IOException communication issues with ES */ public void applyMappings(ReindexConfig indexState, boolean suppressError) throws IOException { - if (indexState.isPureMappingsAddition()) { + if (indexState.isPureMappingsAddition() || indexState.isPureStructuredProperty()) { log.info("Updating index {} mappings in place.", indexState.name()); PutMappingRequest request = new PutMappingRequest(indexState.name()).source(indexState.targetMappings()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java index 4489c661bb2ed..4322ea90edf1f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java @@ -3,9 +3,12 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; +import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -41,6 +44,24 @@ public List buildReindexConfigs() { entitySpec -> { try { Map mappings = MappingsBuilder.getMappings(entitySpec); + return indexBuilder.buildReindexState( + indexConvention.getIndexName(entitySpec), mappings, settings, true); + } catch (IOException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList()); + } + + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) { + Map settings = settingsBuilder.getSettings(); + return entityRegistry.getEntitySpecs().values().stream() + .map( + entitySpec -> { + try { + Map mappings = MappingsBuilder.getMappings(entitySpec, properties); return indexBuilder.buildReindexState( indexConvention.getIndexName(entitySpec), mappings, settings); } catch (IOException e) { @@ -49,4 +70,31 @@ public List buildReindexConfigs() { }) .collect(Collectors.toList()); } + + /** + * Given a structured property generate all entity index configurations impacted by it, preserving + * existing properties + * + * @param property the new property + * @return index configurations impacted by the new property + */ + public List buildReindexConfigsWithNewStructProp( + StructuredPropertyDefinition property) { + Map settings = settingsBuilder.getSettings(); + return entityRegistry.getEntitySpecs().values().stream() + .map( + entitySpec -> { + try { + Map mappings = + MappingsBuilder.getMappings(entitySpec, List.of(property)); + return indexBuilder.buildReindexState( + indexConvention.getIndexName(entitySpec), mappings, settings, true); + } catch (IOException e) { + throw new RuntimeException(e); + } + }) + .filter(Objects::nonNull) + .filter(ReindexConfig::hasNewStructuredProperty) + .collect(Collectors.toList()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index f85a0dcb06a07..79f530f18a345 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -1,13 +1,21 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.Constants.ENTITY_TYPE_URN_PREFIX; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_MAPPING_FIELD; +import static com.linkedin.metadata.models.StructuredPropertyUtils.sanitizeStructuredPropertyFQN; import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; import com.google.common.collect.ImmutableMap; +import com.linkedin.common.urn.Urn; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.LogicalValueType; import com.linkedin.metadata.models.SearchScoreFieldSpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation.FieldType; import com.linkedin.metadata.search.utils.ESUtils; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.net.URISyntaxException; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -48,6 +56,53 @@ public static Map getPartialNgramConfigWithOverrides( private MappingsBuilder() {} + /** + * Builds mappings from entity spec and a collection of structured properties for the entity. + * + * @param entitySpec entity's spec + * @param structuredProperties structured properties for the entity + * @return mappings + */ + public static Map getMappings( + @Nonnull final EntitySpec entitySpec, + Collection structuredProperties) { + Map mappings = getMappings(entitySpec); + + String entityName = entitySpec.getEntityAnnotation().getName(); + Map structuredPropertiesForEntity = + getMappingsForStructuredProperty( + structuredProperties.stream() + .filter( + prop -> { + try { + return prop.getEntityTypes() + .contains(Urn.createFromString(ENTITY_TYPE_URN_PREFIX + entityName)); + } catch (URISyntaxException e) { + return false; + } + }) + .collect(Collectors.toSet())); + + if (!structuredPropertiesForEntity.isEmpty()) { + HashMap> props = + (HashMap>) + ((Map) mappings.get(PROPERTIES)) + .computeIfAbsent( + STRUCTURED_PROPERTY_MAPPING_FIELD, + (key) -> new HashMap<>(Map.of(PROPERTIES, new HashMap<>()))); + + props.merge( + PROPERTIES, + structuredPropertiesForEntity, + (oldValue, newValue) -> { + HashMap merged = new HashMap<>(oldValue); + merged.putAll(newValue); + return merged.isEmpty() ? null : merged; + }); + } + return mappings; + } + public static Map getMappings(@Nonnull final EntitySpec entitySpec) { Map mappings = new HashMap<>(); @@ -89,6 +144,30 @@ private static Map getMappingsForRunId() { return ImmutableMap.builder().put(TYPE, ESUtils.KEYWORD_FIELD_TYPE).build(); } + public static Map getMappingsForStructuredProperty( + Collection properties) { + return properties.stream() + .map( + property -> { + Map mappingForField = new HashMap<>(); + String valueType = property.getValueType().getId(); + if (valueType.equalsIgnoreCase(LogicalValueType.STRING.name())) { + mappingForField = getMappingsForKeyword(); + } else if (valueType.equalsIgnoreCase(LogicalValueType.RICH_TEXT.name())) { + mappingForField = getMappingsForSearchText(FieldType.TEXT_PARTIAL); + } else if (valueType.equalsIgnoreCase(LogicalValueType.DATE.name())) { + mappingForField.put(TYPE, ESUtils.DATE_FIELD_TYPE); + } else if (valueType.equalsIgnoreCase(LogicalValueType.URN.name())) { + mappingForField = getMappingsForUrn(); + } else if (valueType.equalsIgnoreCase(LogicalValueType.NUMBER.name())) { + mappingForField.put(TYPE, ESUtils.DOUBLE_FIELD_TYPE); + } + return Map.entry( + sanitizeStructuredPropertyFQN(property.getQualifiedName()), mappingForField); + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + private static Map getMappingsForField( @Nonnull final SearchableFieldSpec searchableFieldSpec) { FieldType fieldType = searchableFieldSpec.getSearchableAnnotation().getFieldType(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java index e3155c9f943cc..bb6905139f49d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java @@ -11,6 +11,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -65,6 +66,8 @@ public class ReindexConfig { private final boolean requiresApplyMappings; private final boolean isPureMappingsAddition; private final boolean isSettingsReindex; + private final boolean hasNewStructuredProperty; + private final boolean isPureStructuredProperty; public static ReindexConfigBuilder builder() { return new CalculatedBuilder(); @@ -92,6 +95,14 @@ private ReindexConfigBuilder isSettingsReindexRequired(boolean ignored) { return this; } + private ReindexConfigBuilder hasNewStructuredProperty(boolean ignored) { + return this; + } + + private ReindexConfigBuilder isPureStructuredProperty(boolean ignored) { + return this; + } + // ensure sorted public ReindexConfigBuilder currentMappings(Map currentMappings) { this.currentMappings = sortMap(currentMappings); @@ -141,6 +152,15 @@ public ReindexConfig build() { super.requiresApplyMappings = !mappingsDiff.entriesDiffering().isEmpty() || !mappingsDiff.entriesOnlyOnRight().isEmpty(); + super.isPureStructuredProperty = + mappingsDiff + .entriesDiffering() + .keySet() + .equals(Set.of(STRUCTURED_PROPERTY_MAPPING_FIELD)) + || mappingsDiff + .entriesOnlyOnRight() + .keySet() + .equals(Set.of(STRUCTURED_PROPERTY_MAPPING_FIELD)); super.isPureMappingsAddition = super.requiresApplyMappings && mappingsDiff.entriesDiffering().isEmpty() @@ -157,6 +177,19 @@ public ReindexConfig build() { super.name, mappingsDiff.entriesDiffering()); } + super.hasNewStructuredProperty = + (mappingsDiff.entriesDiffering().containsKey(STRUCTURED_PROPERTY_MAPPING_FIELD) + || mappingsDiff + .entriesOnlyOnRight() + .containsKey(STRUCTURED_PROPERTY_MAPPING_FIELD)) + && getOrDefault( + super.currentMappings, + List.of("properties", STRUCTURED_PROPERTY_MAPPING_FIELD, "properties")) + .size() + < getOrDefault( + super.targetMappings, + List.of("properties", STRUCTURED_PROPERTY_MAPPING_FIELD, "properties")) + .size(); /* Consider analysis and settings changes */ super.requiresApplySettings = !isSettingsEqual() || !isAnalysisEqual(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index b35c0258d09f0..0eb44edfb11de 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query; import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.*; import static com.linkedin.metadata.utils.SearchUtil.*; import com.codahale.metrics.Timer; @@ -303,7 +303,7 @@ public AutoCompleteResult autoComplete( /** * Returns number of documents per field value given the field and filters * - * @param entityName name of the entity, if null, aggregates over all entities + * @param entityNames names of the entities, if null, aggregates over all entities * @param field the field name for aggregate * @param requestParams filters to apply before aggregating * @param limit the number of aggregations to return diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java index 522c8e510dcf8..0f22b75b69f10 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.utils.SearchUtil.*; import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.models.StructuredPropertyUtils; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.search.utils.ESUtils; import java.util.ArrayList; @@ -72,8 +74,12 @@ private Set getAllFacetFields(final List annotatio } private boolean isValidAggregate(final String inputFacet) { - Set facets = Set.of(inputFacet.split(AGGREGATION_SEPARATOR_CHAR)); - boolean isValid = !facets.isEmpty() && _allFacetFields.containsAll(facets); + List facets = List.of(inputFacet.split(AGGREGATION_SEPARATOR_CHAR)); + boolean isValid = + !facets.isEmpty() + && ((facets.size() == 1 + && facets.get(0).startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD + ".")) + || _allFacetFields.containsAll(facets)); if (!isValid) { log.warn( String.format( @@ -89,6 +95,13 @@ private AggregationBuilder facetToAggregationBuilder(final String inputFacet) { AggregationBuilder lastAggBuilder = null; for (int i = facets.size() - 1; i >= 0; i--) { String facet = facets.get(i); + if (facet.startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD + ".")) { + String structPropFqn = facet.substring(STRUCTURED_PROPERTY_MAPPING_FIELD.length() + 1); + facet = + STRUCTURED_PROPERTY_MAPPING_FIELD + + "." + + StructuredPropertyUtils.sanitizeStructuredPropertyFQN(structPropFqn); + } AggregationBuilder aggBuilder; if (facet.contains(AGGREGATION_SPECIAL_TYPE_DELIMITER)) { List specialTypeFields = List.of(facet.split(AGGREGATION_SPECIAL_TYPE_DELIMITER)); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java index 1fe4a74968e42..452e50a6e8d62 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java @@ -1,6 +1,6 @@ package com.linkedin.metadata.search.elasticsearch.query.request; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.*; import java.io.IOException; import java.io.Serializable; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 4d51de39c88e3..05fa6f45fcb30 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -93,6 +93,7 @@ public class SearchRequestHandler { private final Set _defaultQueryFieldNames; private final HighlightBuilder _highlights; private final Map _filtersToDisplayName; + private final SearchConfiguration _configs; private final SearchQueryBuilder _searchQueryBuilder; private final AggregationQueryBuilder _aggregationQueryBuilder; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java index 2a9571b18b726..6cadb39d5970d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java @@ -17,7 +17,8 @@ public class Features { public enum Name { SEARCH_BACKEND_SCORE, // Score returned by search backend NUM_ENTITIES_PER_TYPE, // Number of entities per entity type - RANK_WITHIN_TYPE; // Rank within the entity type + RANK_WITHIN_TYPE, + ONLY_MATCH_CUSTOM_PROPERTIES; // Rank within the entity type } public Double getNumericFeature(Name featureName, double defaultValue) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java index bfeb993390571..d52a80d685fd5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.transformer; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.StructuredPropertyUtils.sanitizeStructuredPropertyFQN; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; @@ -7,17 +10,26 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.entity.Aspect; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.aspect.validation.StructuredPropertiesValidator; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.LogicalValueType; import com.linkedin.metadata.models.SearchScoreFieldSpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation.FieldType; import com.linkedin.metadata.models.extractor.FieldExtractor; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; @@ -41,7 +53,7 @@ public class SearchDocumentTransformer { // Maximum customProperties value length private final int maxValueLength; - private SystemEntityClient entityClient; + private AspectRetriever aspectRetriever; private static final String BROWSE_PATH_V2_DELIMITER = "␟"; @@ -77,7 +89,8 @@ public Optional transformAspect( final Urn urn, final RecordTemplate aspect, final AspectSpec aspectSpec, - final Boolean forDelete) { + final Boolean forDelete) + throws RemoteInvocationException, URISyntaxException { final Map> extractedSearchableFields = FieldExtractor.extractFields(aspect, aspectSpec.getSearchableFieldSpecs(), maxValueLength); final Map> extractedSearchScoreFields = @@ -93,6 +106,12 @@ public Optional transformAspect( extractedSearchScoreFields.forEach( (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); result = Optional.of(searchDocument.toString()); + } else if (STRUCTURED_PROPERTIES_ASPECT_NAME.equals(aspectSpec.getName())) { + final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); + searchDocument.put("urn", urn.toString()); + setStructuredPropertiesSearchValue( + new StructuredProperties(aspect.data()), searchDocument, forDelete); + result = Optional.of(searchDocument.toString()); } return result; @@ -277,4 +296,93 @@ private String getBrowsePathV2Value(@Nonnull final List fieldValues) { } return aggregatedValue; } + + private void setStructuredPropertiesSearchValue( + final StructuredProperties values, final ObjectNode searchDocument, final Boolean forDelete) + throws RemoteInvocationException, URISyntaxException { + Map> propertyMap = + values.getProperties().stream() + .collect( + Collectors.groupingBy( + StructuredPropertyValueAssignment::getPropertyUrn, Collectors.toSet())); + + Map> definitions = + aspectRetriever.getLatestAspectObjects( + propertyMap.keySet(), Set.of(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME)); + + if (definitions.size() < propertyMap.size()) { + String message = + String.format( + "Missing property definitions. %s", + propertyMap.keySet().stream() + .filter(k -> !definitions.containsKey(k)) + .collect(Collectors.toSet())); + log.error(message); + } + + propertyMap + .entrySet() + .forEach( + propertyEntry -> { + StructuredPropertyDefinition definition = + new StructuredPropertyDefinition( + definitions + .get(propertyEntry.getKey()) + .get(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .data()); + String fieldName = + String.join( + ".", + List.of( + STRUCTURED_PROPERTY_MAPPING_FIELD, + sanitizeStructuredPropertyFQN(definition.getQualifiedName()))); + + if (forDelete) { + searchDocument.set(fieldName, JsonNodeFactory.instance.nullNode()); + } else { + LogicalValueType logicalValueType = + StructuredPropertiesValidator.getLogicalValueType(definition.getValueType()); + + ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(); + + propertyEntry + .getValue() + .forEach( + property -> + property + .getValues() + .forEach( + propertyValue -> { + final Optional searchValue; + switch (logicalValueType) { + case UNKNOWN: + log.warn( + "Unable to transform UNKNOWN logical value type."); + searchValue = Optional.empty(); + break; + case NUMBER: + Double doubleValue = + propertyValue.getDouble() != null + ? propertyValue.getDouble() + : Double.valueOf(propertyValue.getString()); + searchValue = + Optional.of( + JsonNodeFactory.instance.numberNode(doubleValue)); + break; + default: + searchValue = + propertyValue.getString().isEmpty() + ? Optional.empty() + : Optional.of( + JsonNodeFactory.instance.textNode( + propertyValue.getString())); + break; + } + searchValue.ifPresent(arrayNode::add); + })); + + searchDocument.set(fieldName, arrayNode); + } + }); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 982b5c8d5f367..aa854149de43a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; @@ -8,6 +10,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; +import com.linkedin.metadata.models.StructuredPropertyUtils; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; @@ -97,6 +100,7 @@ public class ESUtils { } }; + // TODO - This has been expanded for has* in another branch public static final Set BOOLEAN_FIELDS = ImmutableSet.of("removed"); /* @@ -203,6 +207,9 @@ public static BoolQueryBuilder buildConjunctiveFilterQuery( public static QueryBuilder getQueryBuilderFromCriterion( @Nonnull final Criterion criterion, boolean isTimeseries) { final String fieldName = toFacetField(criterion.getField()); + if (fieldName.startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD)) { + criterion.setField(fieldName); + } /* * Check the field-name for a "sibling" field, or one which should ALWAYS @@ -260,46 +267,69 @@ public static void buildSortOrder( @Nonnull SearchSourceBuilder searchSourceBuilder, @Nullable SortCriterion sortCriterion, List entitySpecs) { - if (sortCriterion == null) { + buildSortOrder( + searchSourceBuilder, + sortCriterion == null ? List.of() : List.of(sortCriterion), + entitySpecs, + true); + } + + /** + * Allow disabling default sort, used when you know uniqueness is present without urn field. For + * example, edge indices where the unique constraint is determined by multiple fields (src urn, + * dst urn, relation type). + * + * @param enableDefaultSort enable/disable default sorting logic + */ + public static void buildSortOrder( + @Nonnull SearchSourceBuilder searchSourceBuilder, + @Nonnull List sortCriterion, + List entitySpecs, + boolean enableDefaultSort) { + if (sortCriterion.isEmpty() && enableDefaultSort) { searchSourceBuilder.sort(new ScoreSortBuilder().order(SortOrder.DESC)); } else { - Optional fieldTypeForDefault = Optional.empty(); - for (EntitySpec entitySpec : entitySpecs) { - List fieldSpecs = entitySpec.getSearchableFieldSpecs(); - for (SearchableFieldSpec fieldSpec : fieldSpecs) { - SearchableAnnotation annotation = fieldSpec.getSearchableAnnotation(); - if (annotation.getFieldName().equals(sortCriterion.getField()) - || annotation.getFieldNameAliases().contains(sortCriterion.getField())) { - fieldTypeForDefault = Optional.of(fieldSpec.getSearchableAnnotation().getFieldType()); + for (SortCriterion sortCriteria : sortCriterion) { + Optional fieldTypeForDefault = Optional.empty(); + for (EntitySpec entitySpec : entitySpecs) { + List fieldSpecs = entitySpec.getSearchableFieldSpecs(); + for (SearchableFieldSpec fieldSpec : fieldSpecs) { + SearchableAnnotation annotation = fieldSpec.getSearchableAnnotation(); + if (annotation.getFieldName().equals(sortCriteria.getField()) + || annotation.getFieldNameAliases().contains(sortCriteria.getField())) { + fieldTypeForDefault = Optional.of(fieldSpec.getSearchableAnnotation().getFieldType()); + break; + } + } + if (fieldTypeForDefault.isPresent()) { break; } } - if (fieldTypeForDefault.isPresent()) { - break; + if (fieldTypeForDefault.isEmpty()) { + log.warn( + "Sort criterion field " + + sortCriteria.getField() + + " was not found in any entity spec to be searched"); } - } - if (fieldTypeForDefault.isEmpty()) { - log.warn( - "Sort criterion field " - + sortCriterion.getField() - + " was not found in any entity spec to be searched"); - } - final SortOrder esSortOrder = - (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) - ? SortOrder.ASC - : SortOrder.DESC; - FieldSortBuilder sortBuilder = - new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); - if (fieldTypeForDefault.isPresent()) { - String esFieldtype = getElasticTypeForFieldType(fieldTypeForDefault.get()); - if (esFieldtype != null) { - sortBuilder.unmappedType(esFieldtype); + final SortOrder esSortOrder = + (sortCriteria.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + ? SortOrder.ASC + : SortOrder.DESC; + FieldSortBuilder sortBuilder = + new FieldSortBuilder(sortCriteria.getField()).order(esSortOrder); + if (fieldTypeForDefault.isPresent()) { + String esFieldtype = getElasticTypeForFieldType(fieldTypeForDefault.get()); + if (esFieldtype != null) { + sortBuilder.unmappedType(esFieldtype); + } } + searchSourceBuilder.sort(sortBuilder); } - searchSourceBuilder.sort(sortBuilder); } - if (sortCriterion == null - || !sortCriterion.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)) { + if (enableDefaultSort + && (sortCriterion.isEmpty() + || sortCriterion.stream() + .noneMatch(c -> c.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)))) { searchSourceBuilder.sort( new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); } @@ -335,7 +365,15 @@ public static String escapeReservedCharacters(@Nonnull String input) { @Nonnull public static String toFacetField(@Nonnull final String filterField) { - return filterField.replace(ESUtils.KEYWORD_SUFFIX, ""); + String fieldName = filterField; + if (fieldName.startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD + ".")) { + fieldName = + STRUCTURED_PROPERTY_MAPPING_FIELD + + "." + + StructuredPropertyUtils.sanitizeStructuredPropertyFQN( + fieldName.substring(STRUCTURED_PROPERTY_MAPPING_FIELD.length() + 1)); + } + return fieldName.replace(ESUtils.KEYWORD_SUFFIX, ""); } @Nonnull diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index 247d542604da7..1f39a3947c47a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -10,15 +10,16 @@ import com.linkedin.common.InputField; import com.linkedin.common.InputFields; import com.linkedin.common.Status; +import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.dataset.FineGrainedLineage; import com.linkedin.dataset.UpstreamLineage; -import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.batch.MCLBatchItem; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.ebean.batch.MCLBatchItemImpl; import com.linkedin.metadata.graph.Edge; import com.linkedin.metadata.graph.GraphIndexUtils; @@ -43,6 +44,7 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.structured.StructuredPropertyDefinition; import com.linkedin.util.Pair; import java.io.IOException; import java.io.UnsupportedEncodingException; @@ -70,11 +72,11 @@ public class UpdateIndicesService { private final EntitySearchService _entitySearchService; private final TimeseriesAspectService _timeseriesAspectService; private final SystemMetadataService _systemMetadataService; - private final EntityRegistry _entityRegistry; private final SearchDocumentTransformer _searchDocumentTransformer; private final EntityIndexBuilders _entityIndexBuilders; - private SystemEntityClient systemEntityClient; + private AspectRetriever aspectRetriever; + private EntityRegistry _entityRegistry; @Value("${featureFlags.graphServiceDiffModeEnabled:true}") private boolean _graphDiffMode; @@ -82,6 +84,12 @@ public class UpdateIndicesService { @Value("${featureFlags.searchServiceDiffModeEnabled:true}") private boolean _searchDiffMode; + @Value("${structuredProperties.enabled}") + private boolean _structuredPropertiesHookEnabled; + + @Value("${structuredProperties.writeEnabled}") + private boolean _structuredPropertiesWriteEnabled; + private static final Set UPDATE_CHANGE_TYPES = ImmutableSet.of(ChangeType.UPSERT, ChangeType.RESTATE, ChangeType.PATCH); @@ -100,31 +108,26 @@ public UpdateIndicesService( EntitySearchService entitySearchService, TimeseriesAspectService timeseriesAspectService, SystemMetadataService systemMetadataService, - EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer, EntityIndexBuilders entityIndexBuilders) { _graphService = graphService; _entitySearchService = entitySearchService; _timeseriesAspectService = timeseriesAspectService; _systemMetadataService = systemMetadataService; - _entityRegistry = entityRegistry; _searchDocumentTransformer = searchDocumentTransformer; _entityIndexBuilders = entityIndexBuilders; } public void handleChangeEvent(@Nonnull final MetadataChangeLog event) { try { - MCLBatchItemImpl batch = - MCLBatchItemImpl.builder().build(event, _entityRegistry, systemEntityClient); + MCLBatchItemImpl batch = MCLBatchItemImpl.builder().build(event, aspectRetriever); Stream sideEffects = _entityRegistry .getMCLSideEffects( event.getChangeType(), event.getEntityType(), event.getAspectName()) .stream() - .flatMap( - mclSideEffect -> - mclSideEffect.apply(List.of(batch), _entityRegistry, systemEntityClient)); + .flatMap(mclSideEffect -> mclSideEffect.apply(List.of(batch), aspectRetriever)); for (MCLBatchItem mclBatchItem : Stream.concat(Stream.of(batch), sideEffects).toList()) { MetadataChangeLog hookEvent = mclBatchItem.getMetadataChangeLog(); @@ -173,11 +176,14 @@ private void handleUpdateChangeEvent(@Nonnull final MCLBatchItem event) throws I updateSystemMetadata(event.getSystemMetadata(), urn, aspectSpec, aspect); } - // Step 1. For all aspects, attempt to update Search + // Step 1. Handle StructuredProperties Index Mapping changes + updateIndexMappings(entitySpec, aspectSpec, aspect, previousAspect); + + // Step 2. For all aspects, attempt to update Search updateSearchService( entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); - // Step 2. For all aspects, attempt to update Graph + // Step 3. For all aspects, attempt to update Graph SystemMetadata systemMetadata = event.getSystemMetadata(); if (_graphDiffMode && !(_graphService instanceof DgraphGraphService) @@ -190,6 +196,46 @@ private void handleUpdateChangeEvent(@Nonnull final MCLBatchItem event) throws I } } + public void updateIndexMappings( + EntitySpec entitySpec, + AspectSpec aspectSpec, + RecordTemplate newValue, + RecordTemplate oldValue) + throws IOException { + if (_structuredPropertiesHookEnabled + && STRUCTURED_PROPERTY_ENTITY_NAME.equals(entitySpec.getName()) + && STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME.equals(aspectSpec.getName())) { + + UrnArray oldEntityTypes = + Optional.ofNullable(oldValue) + .map( + recordTemplate -> + new StructuredPropertyDefinition(recordTemplate.data()).getEntityTypes()) + .orElse(new UrnArray()); + + StructuredPropertyDefinition newDefinition = + new StructuredPropertyDefinition(newValue.data()); + newDefinition.getEntityTypes().removeAll(oldEntityTypes); + + if (newDefinition.getEntityTypes().size() > 0) { + _entityIndexBuilders + .buildReindexConfigsWithNewStructProp(newDefinition) + .forEach( + reindexState -> { + try { + log.info( + "Applying new structured property {} to index {}", + newDefinition, + reindexState.name()); + _entityIndexBuilders.getIndexBuilder().applyMappings(reindexState, false); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + } + } + /** * This very important method processes {@link MetadataChangeLog} deletion events to cleanup the * Metadata Graph when an aspect or entity is removed. @@ -617,13 +663,13 @@ private EntitySpec getEventEntitySpec(@Nonnull final MetadataChangeLog event) { } /** - * Allow internal use of the system entity client. Solves recursive dependencies between the - * UpdateIndicesService and the SystemJavaEntityClient + * Solves recursive dependencies between the UpdateIndicesService and EntityService * - * @param systemEntityClient system entity client + * @param aspectRetriever aspect Retriever */ - public void setSystemEntityClient(SystemEntityClient systemEntityClient) { - this.systemEntityClient = systemEntityClient; - _searchDocumentTransformer.setEntityClient(systemEntityClient); + public void initializeAspectRetriever(AspectRetriever aspectRetriever) { + this.aspectRetriever = aspectRetriever; + this._entityRegistry = aspectRetriever.getEntityRegistry(); + this._searchDocumentTransformer.setAspectRetriever(aspectRetriever); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java index 9aa0cdca99f68..e894558e3d1af 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java @@ -1,7 +1,9 @@ package com.linkedin.metadata.shared; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; +import java.util.Collection; import java.util.List; public interface ElasticSearchIndexed { @@ -12,6 +14,15 @@ public interface ElasticSearchIndexed { */ List buildReindexConfigs() throws IOException; + /** + * The index configurations for the given service with StructuredProperties applied. + * + * @param properties The structured properties to apply to the index mappings + * @return List of reindex configurations + */ + List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException; + /** * Mirrors the service's functions which are expected to build/reindex as needed based on the * reindex configurations above diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java index 6fbe7cfe882ce..36eab7b69e6a1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java @@ -13,12 +13,14 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -245,6 +247,12 @@ public List buildReindexConfigs() throws IOException { Collections.emptyMap())); } + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return buildReindexConfigs(); + } + @Override public void reindexAll() { configure(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index f9ab86d41335d..71ffd603c999f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -20,12 +20,15 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchAfterWrapper; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; +import com.linkedin.metadata.timeseries.GenericTimeseriesDocument; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.timeseries.TimeseriesScrollResult; import com.linkedin.metadata.timeseries.elastic.indexbuilder.MappingsBuilder; import com.linkedin.metadata.timeseries.elastic.indexbuilder.TimeseriesAspectIndexBuilders; import com.linkedin.metadata.timeseries.elastic.query.ESAggregatedStatsDAO; @@ -33,6 +36,7 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.structured.StructuredPropertyDefinition; import com.linkedin.timeseries.AggregationSpec; import com.linkedin.timeseries.DeleteAspectValuesResult; import com.linkedin.timeseries.GenericTable; @@ -43,9 +47,11 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -86,8 +92,6 @@ public class ElasticSearchTimeseriesAspectService .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } - private static final String TIMESTAMP_FIELD = "timestampMillis"; - private static final String EVENT_FIELD = "event"; private static final Integer DEFAULT_LIMIT = 10000; private final IndexConvention _indexConvention; @@ -118,7 +122,7 @@ public ElasticSearchTimeseriesAspectService( private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { Map docFields = doc.getSourceAsMap(); EnvelopedAspect envelopedAspect = new EnvelopedAspect(); - Object event = docFields.get(EVENT_FIELD); + Object event = docFields.get(MappingsBuilder.EVENT_FIELD); GenericAspect genericAspect; try { genericAspect = @@ -147,6 +151,61 @@ private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { return envelopedAspect; } + private static Set commonFields = + Set.of( + MappingsBuilder.URN_FIELD, + MappingsBuilder.RUN_ID_FIELD, + MappingsBuilder.EVENT_GRANULARITY, + MappingsBuilder.IS_EXPLODED_FIELD, + MappingsBuilder.MESSAGE_ID_FIELD, + MappingsBuilder.PARTITION_SPEC_PARTITION, + MappingsBuilder.PARTITION_SPEC, + MappingsBuilder.SYSTEM_METADATA_FIELD, + MappingsBuilder.TIMESTAMP_MILLIS_FIELD, + MappingsBuilder.TIMESTAMP_FIELD, + MappingsBuilder.EVENT_FIELD); + + private static Pair toEnvAspectGenericDocument( + @Nonnull SearchHit doc) { + EnvelopedAspect envelopedAspect = null; + + Map documentFieldMap = doc.getSourceAsMap(); + + GenericTimeseriesDocument.GenericTimeseriesDocumentBuilder builder = + GenericTimeseriesDocument.builder() + .urn((String) documentFieldMap.get(MappingsBuilder.URN_FIELD)) + .timestampMillis((Long) documentFieldMap.get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)) + .timestamp((Long) documentFieldMap.get(MappingsBuilder.TIMESTAMP_FIELD)); + + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.RUN_ID_FIELD)) + .ifPresent(d -> builder.runId((String) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.EVENT_GRANULARITY)) + .ifPresent(d -> builder.eventGranularity((String) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.IS_EXPLODED_FIELD)) + .ifPresent(d -> builder.isExploded((Boolean) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.MESSAGE_ID_FIELD)) + .ifPresent(d -> builder.messageId((String) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.PARTITION_SPEC_PARTITION)) + .ifPresent(d -> builder.partition((String) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.PARTITION_SPEC)) + .ifPresent(d -> builder.partitionSpec(d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.SYSTEM_METADATA_FIELD)) + .ifPresent(d -> builder.systemMetadata(d)); + + if (documentFieldMap.get(MappingsBuilder.EVENT_FIELD) != null) { + envelopedAspect = parseDocument(doc); + builder.event(documentFieldMap.get(MappingsBuilder.EVENT_FIELD)); + } else { + // If no event, the event is any non-common field + builder.event( + documentFieldMap.entrySet().stream() + .filter(entry -> !commonFields.contains(entry.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + } + + return Pair.of(envelopedAspect, builder.build()); + } + @Override public void configure() { _indexBuilders.reindexAll(); @@ -157,6 +216,12 @@ public List buildReindexConfigs() { return _indexBuilders.buildReindexConfigs(); } + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return _indexBuilders.buildReindexConfigsWithAllStructProps(properties); + } + public String reindexAsync( String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) throws Exception { @@ -256,7 +321,7 @@ public List getAspectValues( if (startTimeMillis != null) { Criterion startTimeCriterion = new Criterion() - .setField(TIMESTAMP_FIELD) + .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) .setValue(startTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); @@ -264,7 +329,7 @@ public List getAspectValues( if (endTimeMillis != null) { Criterion endTimeCriterion = new Criterion() - .setField(TIMESTAMP_FIELD) + .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) .setValue(endTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); @@ -421,4 +486,88 @@ public DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull String runId) return rollbackResult; } + + @Nonnull + @Override + public TimeseriesScrollResult scrollAspects( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nullable Filter filter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + final BoolQueryBuilder filterQueryBuilder = + QueryBuilders.boolQuery().filter(ESUtils.buildFilterQuery(filter, true)); + + if (startTimeMillis != null) { + Criterion startTimeCriterion = + new Criterion() + .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(startTimeMillis.toString()); + filterQueryBuilder.filter(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); + } + if (endTimeMillis != null) { + Criterion endTimeCriterion = + new Criterion() + .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(endTimeMillis.toString()); + filterQueryBuilder.filter(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); + } + + SearchResponse response = + executeScrollSearchQuery( + entityName, aspectName, filterQueryBuilder, sortCriterion, scrollId, count); + int totalCount = (int) response.getHits().getTotalHits().value; + + List> resultPairs = + Arrays.stream(response.getHits().getHits()) + .map(ElasticSearchTimeseriesAspectService::toEnvAspectGenericDocument) + .toList(); + + return TimeseriesScrollResult.builder() + .numResults(totalCount) + .pageSize(response.getHits().getHits().length) + .events(resultPairs.stream().map(Pair::getFirst).collect(Collectors.toList())) + .documents(resultPairs.stream().map(Pair::getSecond).collect(Collectors.toList())) + .build(); + } + + private SearchResponse executeScrollSearchQuery( + @Nonnull final String entityNname, + @Nonnull final String aspectName, + @Nonnull final QueryBuilder query, + @Nonnull List sortCriterion, + @Nullable String scrollId, + final int count) { + + Object[] sort = null; + if (scrollId != null) { + SearchAfterWrapper searchAfterWrapper = SearchAfterWrapper.fromScrollId(scrollId); + sort = searchAfterWrapper.getSort(); + } + + SearchRequest searchRequest = new SearchRequest(); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + + searchSourceBuilder.size(count); + searchSourceBuilder.query(query); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, List.of(), false); + searchRequest.source(searchSourceBuilder); + ESUtils.setSearchAfter(searchSourceBuilder, sort, null, null); + + searchRequest.indices(_indexConvention.getTimeseriesAspectIndexName(entityNname, aspectName)); + + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "scrollAspects_search").time()) { + return _searchClient.search(searchRequest, RequestOptions.DEFAULT); + } catch (Exception e) { + log.error("Search query failed", e); + throw new ESQueryException("Search query failed:", e); + } + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java index 564bcb2a242cb..6437bbc390d82 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java @@ -7,8 +7,10 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.structured.StructuredPropertyDefinition; import com.linkedin.util.Pair; import java.io.IOException; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; @@ -91,4 +93,10 @@ public List buildReindexConfigs() { }) .collect(Collectors.toList()); } + + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return buildReindexConfigs(); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java index 252ac2d633b98..451b732722498 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java @@ -41,7 +41,7 @@ public static Map ingestCorpUserKeyAspects( .aspect(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) - .build(entityService.getEntityRegistry(), entityService.getSystemEntityClient())); + .build(entityService)); } entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); return aspects; @@ -71,7 +71,7 @@ public static Map ingestCorpUserInfoAspects( .aspect(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) - .build(entityService.getEntityRegistry(), entityService.getSystemEntityClient())); + .build(entityService)); } entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); return aspects; @@ -102,7 +102,7 @@ public static Map ingestChartInfoAspects( .aspect(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) - .build(entityService.getEntityRegistry(), entityService.getSystemEntityClient())); + .build(entityService)); } entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); return aspects; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java index fba11f24f4c44..5a4443904e260 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java @@ -6,7 +6,6 @@ import com.codahale.metrics.Counter; import com.linkedin.data.template.RequiredFieldNotPresentException; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.event.EventProducer; @@ -14,6 +13,7 @@ import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; import java.util.function.Supplier; @@ -32,8 +32,8 @@ public class JavaEntityClientTest { private LineageSearchService _lineageSearchService; private TimeseriesAspectService _timeseriesAspectService; private EventProducer _eventProducer; - private RestliEntityClient _restliEntityClient; private MockedStatic _metricUtils; + private RollbackService rollbackService; private Counter _counter; @BeforeMethod @@ -45,8 +45,8 @@ public void setupTest() { _searchService = mock(SearchService.class); _lineageSearchService = mock(LineageSearchService.class); _timeseriesAspectService = mock(TimeseriesAspectService.class); + rollbackService = mock(RollbackService.class); _eventProducer = mock(EventProducer.class); - _restliEntityClient = mock(RestliEntityClient.class); _metricUtils = mockStatic(MetricUtils.class); _counter = mock(Counter.class); when(MetricUtils.counter(any(), any())).thenReturn(_counter); @@ -66,8 +66,8 @@ private JavaEntityClient getJavaEntityClient() { _searchService, _lineageSearchService, _timeseriesAspectService, - _eventProducer, - _restliEntityClient); + rollbackService, + _eventProducer); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java index 45e992576676d..c45306e5f022b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java @@ -124,21 +124,21 @@ public void testIngestListLatestAspects() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) .aspect(writeAspect2) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) .aspect(writeAspect3) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // List aspects @@ -193,21 +193,21 @@ public void testIngestListUrns() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) .aspect(writeAspect2) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) .aspect(writeAspect3) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // List aspects urns @@ -451,13 +451,7 @@ public void run() { auditStamp.setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)); auditStamp.setTime(System.currentTimeMillis()); AspectsBatchImpl batch = - AspectsBatchImpl.builder() - .mcps( - mcps, - auditStamp, - entityService.getEntityRegistry(), - entityService.getSystemEntityClient()) - .build(); + AspectsBatchImpl.builder().mcps(mcps, auditStamp, entityService).build(); entityService.ingestProposal(batch, false); } } catch (InterruptedException | URISyntaxException ie) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index ee21b56cea7c0..db749f3575a06 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -12,6 +12,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.Status; +import com.linkedin.common.UrnArray; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.TupleKey; @@ -29,6 +30,7 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.AspectGenerationUtils; @@ -58,6 +60,12 @@ import com.linkedin.retention.DataHubRetentionConfig; import com.linkedin.retention.Retention; import com.linkedin.retention.VersionBasedRetention; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; import com.linkedin.util.Pair; import jakarta.annotation.Nonnull; import java.util.ArrayList; @@ -67,6 +75,8 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; @@ -847,28 +857,28 @@ public void testRollbackAspect() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) .aspect(writeAspect2) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) .aspect(writeAspect3) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(aspectName) .aspect(writeAspect1Overwrite) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // this should no-op since this run has been overwritten @@ -926,21 +936,21 @@ public void testRollbackKey() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(keyAspectName) .aspect(writeKey1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(aspectName) .aspect(writeAspect1Overwrite) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // this should no-op since the key should have been written in the furst run @@ -1006,35 +1016,35 @@ public void testRollbackUrn() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(keyAspectName) .aspect(writeKey1) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) .aspect(writeAspect2) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) .aspect(writeAspect3) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(aspectName) .aspect(writeAspect1Overwrite) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // this should no-op since the key should have been written in the furst run @@ -1073,7 +1083,7 @@ public void testIngestGetLatestAspect() throws AssertionError { .aspect(writeAspect1) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #1 @@ -1104,7 +1114,7 @@ public void testIngestGetLatestAspect() throws AssertionError { .aspect(writeAspect2) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata2) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #2 @@ -1150,7 +1160,7 @@ public void testIngestGetLatestEnvelopedAspect() throws Exception { .aspect(writeAspect1) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #1 @@ -1170,7 +1180,7 @@ public void testIngestGetLatestEnvelopedAspect() throws Exception { .aspect(writeAspect2) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #2 @@ -1215,7 +1225,7 @@ public void testIngestSameAspect() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #1 @@ -1246,7 +1256,7 @@ public void testIngestSameAspect() throws AssertionError { .aspect(writeAspect2) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #2 @@ -1299,42 +1309,42 @@ public void testRetention() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName) .aspect(writeAspect1a) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName) .aspect(writeAspect1b) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName2) .aspect(writeAspect2) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName2) .aspect(writeAspect2a) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName2) .aspect(writeAspect2b) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); @@ -1366,14 +1376,14 @@ public void testRetention() throws AssertionError { .aspect(writeAspect1c) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName2) .aspect(writeAspect2c) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); @@ -1637,6 +1647,172 @@ public void testUIPreProcessedProposal() throws Exception { assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); } + @Test + public void testStructuredPropertyIngestProposal() throws Exception { + String urnStr = "urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset_unique,PROD)"; + Urn entityUrn = UrnUtils.getUrn(urnStr); + + // Ingest one structured property definition + String definitionAspectName = "propertyDefinition"; + Urn firstPropertyUrn = UrnUtils.getUrn("urn:li:structuredProperty:firstStructuredProperty"); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(firstPropertyUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("structuredProperty"); + gmce.setAspectName(definitionAspectName); + StructuredPropertyDefinition structuredPropertyDefinition = + new StructuredPropertyDefinition() + .setQualifiedName("firstStructuredProperty") + .setValueType(Urn.createFromString(DATA_TYPE_URN_PREFIX + "string")) + .setEntityTypes(new UrnArray(Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"))); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] definitionSerialized = + dataTemplateCodec.dataTemplateToBytes(structuredPropertyDefinition); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(definitionSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + ArgumentCaptor captor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(firstPropertyUrn), Mockito.any(), captor.capture()); + assertEquals( + _entityServiceImpl.getAspect(firstPropertyUrn, definitionAspectName, 0), + structuredPropertyDefinition); + Urn secondPropertyUrn = UrnUtils.getUrn("urn:li:structuredProperty:secondStructuredProperty"); + assertNull(_entityServiceImpl.getAspect(secondPropertyUrn, definitionAspectName, 0)); + assertEquals( + _entityServiceImpl.getAspect(firstPropertyUrn, definitionAspectName, 0), + structuredPropertyDefinition); + Set defs = + _aspectDao + .streamAspects( + STRUCTURED_PROPERTY_ENTITY_NAME, STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .map( + entityAspect -> + EntityUtils.toAspectRecord( + STRUCTURED_PROPERTY_ENTITY_NAME, + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, + entityAspect.getMetadata(), + _testEntityRegistry)) + .map(recordTemplate -> (StructuredPropertyDefinition) recordTemplate) + .collect(Collectors.toSet()); + assertEquals(defs.size(), 1); + assertEquals(defs, Set.of(structuredPropertyDefinition)); + + SystemEntityClient mockSystemEntityClient = Mockito.mock(SystemEntityClient.class); + Mockito.when( + mockSystemEntityClient.getLatestAspectObject(firstPropertyUrn, "propertyDefinition")) + .thenReturn(new com.linkedin.entity.Aspect(structuredPropertyDefinition.data())); + + // Add a value for that property + PrimitivePropertyValueArray propertyValues = new PrimitivePropertyValueArray(); + propertyValues.add(PrimitivePropertyValue.create("hello")); + StructuredPropertyValueAssignment assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn(firstPropertyUrn) + .setValues(propertyValues); + StructuredProperties structuredProperties = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + MetadataChangeProposal asgnMce = new MetadataChangeProposal(); + asgnMce.setEntityUrn(entityUrn); + asgnMce.setChangeType(ChangeType.UPSERT); + asgnMce.setEntityType("dataset"); + asgnMce.setAspectName("structuredProperties"); + JacksonDataTemplateCodec asgnTemplateCodec = new JacksonDataTemplateCodec(); + byte[] asgnSerialized = asgnTemplateCodec.dataTemplateToBytes(structuredProperties); + GenericAspect asgnGenericAspect = new GenericAspect(); + asgnGenericAspect.setValue(ByteString.unsafeWrap(asgnSerialized)); + asgnGenericAspect.setContentType("application/json"); + asgnMce.setAspect(asgnGenericAspect); + _entityServiceImpl.ingestProposal(asgnMce, TEST_AUDIT_STAMP, false); + assertEquals( + _entityServiceImpl.getAspect(entityUrn, "structuredProperties", 0), structuredProperties); + + // Ingest second structured property definition + MetadataChangeProposal gmce2 = new MetadataChangeProposal(); + gmce2.setEntityUrn(secondPropertyUrn); + gmce2.setChangeType(ChangeType.UPSERT); + gmce2.setEntityType("structuredProperty"); + gmce2.setAspectName(definitionAspectName); + StructuredPropertyDefinition secondDefinition = + new StructuredPropertyDefinition() + .setQualifiedName("secondStructuredProperty") + .setValueType(Urn.createFromString(DATA_TYPE_URN_PREFIX + "number")) + .setEntityTypes(new UrnArray(Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"))); + JacksonDataTemplateCodec secondDataTemplate = new JacksonDataTemplateCodec(); + byte[] secondDefinitionSerialized = secondDataTemplate.dataTemplateToBytes(secondDefinition); + GenericAspect secondGenericAspect = new GenericAspect(); + secondGenericAspect.setValue(ByteString.unsafeWrap(secondDefinitionSerialized)); + secondGenericAspect.setContentType("application/json"); + gmce2.setAspect(secondGenericAspect); + _entityServiceImpl.ingestProposal(gmce2, TEST_AUDIT_STAMP, false); + ArgumentCaptor secondCaptor = + ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(secondPropertyUrn), Mockito.any(), secondCaptor.capture()); + assertEquals( + _entityServiceImpl.getAspect(firstPropertyUrn, definitionAspectName, 0), + structuredPropertyDefinition); + assertEquals( + _entityServiceImpl.getAspect(secondPropertyUrn, definitionAspectName, 0), secondDefinition); + defs = + _aspectDao + .streamAspects( + STRUCTURED_PROPERTY_ENTITY_NAME, STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .map( + entityAspect -> + EntityUtils.toAspectRecord( + STRUCTURED_PROPERTY_ENTITY_NAME, + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, + entityAspect.getMetadata(), + _testEntityRegistry)) + .map(recordTemplate -> (StructuredPropertyDefinition) recordTemplate) + .collect(Collectors.toSet()); + assertEquals(defs.size(), 2); + assertEquals(defs, Set.of(secondDefinition, structuredPropertyDefinition)); + + Mockito.when( + mockSystemEntityClient.getLatestAspectObject(secondPropertyUrn, "propertyDefinition")) + .thenReturn(new com.linkedin.entity.Aspect(secondDefinition.data())); + + // Get existing value for first structured property + assertEquals( + _entityServiceImpl.getAspect(entityUrn, "structuredProperties", 0), structuredProperties); + + // Add a value for second property + propertyValues = new PrimitivePropertyValueArray(); + propertyValues.add(PrimitivePropertyValue.create(15.0)); + StructuredPropertyValueAssignment secondAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn(secondPropertyUrn) + .setValues(propertyValues); + StructuredProperties secondPropertyArr = + new StructuredProperties() + .setProperties( + new StructuredPropertyValueAssignmentArray(assignment, secondAssignment)); + MetadataChangeProposal asgn2Mce = new MetadataChangeProposal(); + asgn2Mce.setEntityUrn(entityUrn); + asgn2Mce.setChangeType(ChangeType.UPSERT); + asgn2Mce.setEntityType("dataset"); + asgn2Mce.setAspectName("structuredProperties"); + JacksonDataTemplateCodec asgnTemplateCodec2 = new JacksonDataTemplateCodec(); + byte[] asgnSerialized2 = asgnTemplateCodec2.dataTemplateToBytes(secondPropertyArr); + GenericAspect asgnGenericAspect2 = new GenericAspect(); + asgnGenericAspect2.setValue(ByteString.unsafeWrap(asgnSerialized2)); + asgnGenericAspect2.setContentType("application/json"); + asgn2Mce.setAspect(asgnGenericAspect2); + _entityServiceImpl.ingestProposal(asgn2Mce, TEST_AUDIT_STAMP, false); + StructuredProperties expectedProperties = + new StructuredProperties() + .setProperties( + new StructuredPropertyValueAssignmentArray(assignment, secondAssignment)); + assertEquals( + _entityServiceImpl.getAspect(entityUrn, "structuredProperties", 0), expectedProperties); + } + @Nonnull protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java index 680d4079851eb..15852e0cbe35b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.entity; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EntitySpecBuilder; import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.snapshot.Snapshot; import java.util.Collections; import java.util.HashMap; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java index 2f8fba0083aa7..bd500cd469100 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java @@ -337,26 +337,26 @@ public void testTimestampLineage() throws Exception { // Without timestamps EntityLineageResult upstreamResult = getUpstreamLineage(datasetTwoUrn, null, null); EntityLineageResult downstreamResult = getDownstreamLineage(datasetTwoUrn, null, null); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(3), downstreamResult.getTotal()); // Timestamp before upstreamResult = getUpstreamLineage(datasetTwoUrn, 0L, initialTime - 10); downstreamResult = getDownstreamLineage(datasetTwoUrn, 0L, initialTime - 10); - Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(0), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), downstreamResult.getTotal()); // Timestamp after upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); - Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(0), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), downstreamResult.getTotal()); // Timestamp included upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(3), downstreamResult.getTotal()); // Update only one of the downstream edges Long updatedTime = 2000L; @@ -387,20 +387,20 @@ public void testTimestampLineage() throws Exception { // Without timestamps upstreamResult = getUpstreamLineage(datasetTwoUrn, null, null); downstreamResult = getDownstreamLineage(datasetTwoUrn, null, null); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(3), downstreamResult.getTotal()); // Window includes initial time and updated time upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(3), downstreamResult.getTotal()); // Window includes updated time but not initial time upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(2), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(2), downstreamResult.getTotal()); } /** diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java index fba9d5359d29f..d2aef982750bd 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java @@ -7,7 +7,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.MatchedFieldArray; import com.linkedin.metadata.search.SearchEntityArray; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java index 2c395875a1d6b..a54e8aa1c9191 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java @@ -1,14 +1,19 @@ package com.linkedin.metadata.search.indexbuilder; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_MAPPING_FIELD; import static org.testng.Assert.*; import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder; +import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.systemmetadata.SystemMetadataMappingsBuilder; import com.linkedin.metadata.version.GitVersion; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -295,4 +300,117 @@ public void testSettingsNoReindex() throws Exception { wipe(); } } + + @Test + public void testCopyStructuredPropertyMappings() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder enabledMappingReindex = + new ESIndexBuilder( + getSearchClient(), + 1, + 0, + 0, + 0, + Map.of(), + false, + true, + new ElasticSearchConfiguration(), + gitVersion); + + ReindexConfig reindexConfigNoIndexBefore = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + assertNull(reindexConfigNoIndexBefore.currentMappings()); + assertEquals( + reindexConfigNoIndexBefore.targetMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertFalse(reindexConfigNoIndexBefore.requiresApplyMappings()); + assertFalse(reindexConfigNoIndexBefore.isPureMappingsAddition()); + + // Create index + enabledMappingReindex.buildIndex( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + + // Test build reindex config with no structured properties added + ReindexConfig reindexConfigNoChange = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + assertEquals( + reindexConfigNoChange.currentMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertEquals( + reindexConfigNoChange.targetMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertFalse(reindexConfigNoIndexBefore.requiresApplyMappings()); + assertFalse(reindexConfigNoIndexBefore.isPureMappingsAddition()); + + // Test add new field to the mappings + Map targetMappingsNewField = + new HashMap<>(SystemMetadataMappingsBuilder.getMappings()); + ((Map) targetMappingsNewField.get("properties")) + .put("myNewField", Map.of(SettingsBuilder.TYPE, SettingsBuilder.KEYWORD)); + + // Test build reindex config for new fields with no structured properties added + ReindexConfig reindexConfigNewField = + enabledMappingReindex.buildReindexState(TEST_INDEX_NAME, targetMappingsNewField, Map.of()); + assertEquals( + reindexConfigNewField.currentMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertEquals(reindexConfigNewField.targetMappings(), targetMappingsNewField); + assertTrue(reindexConfigNewField.requiresApplyMappings()); + assertTrue(reindexConfigNewField.isPureMappingsAddition()); + + // Add structured properties to index + Map mappingsWithStructuredProperties = + new HashMap<>(SystemMetadataMappingsBuilder.getMappings()); + ((Map) mappingsWithStructuredProperties.get("properties")) + .put( + STRUCTURED_PROPERTY_MAPPING_FIELD + ".myStringProp", + Map.of(SettingsBuilder.TYPE, SettingsBuilder.KEYWORD)); + ((Map) mappingsWithStructuredProperties.get("properties")) + .put( + STRUCTURED_PROPERTY_MAPPING_FIELD + ".myNumberProp", + Map.of(SettingsBuilder.TYPE, ESUtils.DOUBLE_FIELD_TYPE)); + + enabledMappingReindex.buildIndex(TEST_INDEX_NAME, mappingsWithStructuredProperties, Map.of()); + + // Test build reindex config with structured properties not copied + ReindexConfig reindexConfigNoCopy = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + Map expectedMappingsStructPropsNested = + new HashMap<>(SystemMetadataMappingsBuilder.getMappings()); + ((Map) expectedMappingsStructPropsNested.get("properties")) + .put( + "structuredProperties", + Map.of( + "properties", + Map.of( + "myNumberProp", + Map.of(SettingsBuilder.TYPE, ESUtils.DOUBLE_FIELD_TYPE), + "myStringProp", + Map.of(SettingsBuilder.TYPE, SettingsBuilder.KEYWORD)))); + assertEquals(reindexConfigNoCopy.currentMappings(), expectedMappingsStructPropsNested); + assertEquals(reindexConfigNoCopy.targetMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertFalse(reindexConfigNoCopy.isPureMappingsAddition()); + + // Test build reindex config with structured properties copied + ReindexConfig reindexConfigCopy = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of(), true); + assertEquals(reindexConfigCopy.currentMappings(), expectedMappingsStructPropsNested); + assertEquals(reindexConfigCopy.targetMappings(), expectedMappingsStructPropsNested); + assertFalse(reindexConfigCopy.requiresApplyMappings()); + assertFalse(reindexConfigCopy.isPureMappingsAddition()); + + // Test build reindex config with new field added and structured properties copied + ReindexConfig reindexConfigCopyAndNewField = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, targetMappingsNewField, Map.of(), true); + assertEquals(reindexConfigCopyAndNewField.currentMappings(), expectedMappingsStructPropsNested); + Map targetMappingsNewFieldAndStructProps = + new HashMap<>(expectedMappingsStructPropsNested); + ((Map) targetMappingsNewFieldAndStructProps.get("properties")) + .put("myNewField", Map.of(SettingsBuilder.TYPE, SettingsBuilder.KEYWORD)); + assertEquals( + reindexConfigCopyAndNewField.targetMappings(), targetMappingsNewFieldAndStructProps); + assertTrue(reindexConfigCopyAndNewField.requiresApplyMappings()); + assertTrue(reindexConfigCopyAndNewField.isPureMappingsAddition()); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index 02bd186ccc183..6df31b35fecde 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -1,11 +1,16 @@ package com.linkedin.metadata.search.indexbuilder; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; import com.google.common.collect.ImmutableMap; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.net.URISyntaxException; +import java.util.List; import java.util.Map; import org.testng.annotations.Test; @@ -54,14 +59,6 @@ public void testMappingsBuilder() { Map keyPart3FieldSubfields = (Map) keyPart3Field.get("fields"); assertEquals(keyPart3FieldSubfields.size(), 1); assertTrue(keyPart3FieldSubfields.containsKey("keyword")); - Map customPropertiesField = - (Map) properties.get("customProperties"); - assertEquals(customPropertiesField.get("type"), "keyword"); - assertEquals(customPropertiesField.get("normalizer"), "keyword_normalizer"); - Map customPropertiesFieldSubfields = - (Map) customPropertiesField.get("fields"); - assertEquals(customPropertiesFieldSubfields.size(), 1); - assertTrue(customPropertiesFieldSubfields.containsKey("keyword")); // TEXT Map nestedArrayStringField = (Map) properties.get("nestedArrayStringField"); @@ -81,6 +78,15 @@ public void testMappingsBuilder() { assertEquals(nestedArrayArrayFieldSubfields.size(), 2); assertTrue(nestedArrayArrayFieldSubfields.containsKey("delimited")); assertTrue(nestedArrayArrayFieldSubfields.containsKey("keyword")); + Map customPropertiesField = + (Map) properties.get("customProperties"); + assertEquals(customPropertiesField.get("type"), "keyword"); + assertEquals(customPropertiesField.get("normalizer"), "keyword_normalizer"); + Map customPropertiesFieldSubfields = + (Map) customPropertiesField.get("fields"); + assertEquals(customPropertiesFieldSubfields.size(), 2); + assertTrue(customPropertiesFieldSubfields.containsKey("delimited")); + assertTrue(customPropertiesFieldSubfields.containsKey("keyword")); // TEXT with addToFilters Map textField = (Map) properties.get("textFieldOverride"); @@ -153,4 +159,115 @@ public void testMappingsBuilder() { Map doubleField = (Map) properties.get("doubleField"); assertEquals(doubleField.get("type"), "double"); } + + @Test + public void testGetMappingsWithStructuredProperty() throws URISyntaxException { + // Baseline comparison: Mappings with no structured props + Map resultWithoutStructuredProps = + MappingsBuilder.getMappings(TestEntitySpecBuilder.getSpec()); + + // Test that a structured property that does not apply to the entity does not alter the mappings + StructuredPropertyDefinition structPropNotForThisEntity = + new StructuredPropertyDefinition() + .setQualifiedName("propNotForThis") + .setDisplayName("propNotForThis") + .setEntityTypes(new UrnArray(Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"))) + .setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + Map resultWithOnlyUnrelatedStructuredProp = + MappingsBuilder.getMappings( + TestEntitySpecBuilder.getSpec(), List.of(structPropNotForThisEntity)); + assertEquals(resultWithOnlyUnrelatedStructuredProp, resultWithoutStructuredProps); + + // Test that a structured property that does apply to this entity is included in the mappings + String fqnOfRelatedProp = "propForThis"; + StructuredPropertyDefinition structPropForThisEntity = + new StructuredPropertyDefinition() + .setQualifiedName(fqnOfRelatedProp) + .setDisplayName("propForThis") + .setEntityTypes( + new UrnArray( + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"), + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "testEntity"))) + .setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + Map resultWithOnlyRelatedStructuredProp = + MappingsBuilder.getMappings( + TestEntitySpecBuilder.getSpec(), List.of(structPropForThisEntity)); + assertNotEquals(resultWithOnlyRelatedStructuredProp, resultWithoutStructuredProps); + Map fieldsBefore = + (Map) resultWithoutStructuredProps.get("properties"); + Map fieldsAfter = + (Map) resultWithOnlyRelatedStructuredProp.get("properties"); + assertEquals(fieldsAfter.size(), fieldsBefore.size() + 1); + + Map structProps = (Map) fieldsAfter.get("structuredProperties"); + fieldsAfter = (Map) structProps.get("properties"); + + String newField = + fieldsAfter.keySet().stream() + .filter(field -> !fieldsBefore.containsKey(field)) + .findFirst() + .get(); + assertEquals(newField, fqnOfRelatedProp); + assertEquals( + fieldsAfter.get(newField), + Map.of( + "normalizer", + "keyword_normalizer", + "type", + "keyword", + "fields", + Map.of("keyword", Map.of("type", "keyword")))); + + // Test that only structured properties that apply are included + Map resultWithBothStructuredProps = + MappingsBuilder.getMappings( + TestEntitySpecBuilder.getSpec(), + List.of(structPropForThisEntity, structPropNotForThisEntity)); + assertEquals(resultWithBothStructuredProps, resultWithOnlyRelatedStructuredProp); + } + + @Test + public void testGetMappingsForStructuredProperty() throws URISyntaxException { + StructuredPropertyDefinition testStructProp = + new StructuredPropertyDefinition() + .setQualifiedName("testProp") + .setDisplayName("exampleProp") + .setEntityTypes( + new UrnArray( + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"), + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "testEntity"))) + .setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + Map structuredPropertyFieldMappings = + MappingsBuilder.getMappingsForStructuredProperty(List.of(testStructProp)); + assertEquals(structuredPropertyFieldMappings.size(), 1); + String keyInMap = structuredPropertyFieldMappings.keySet().stream().findFirst().get(); + assertEquals(keyInMap, "testProp"); + Object mappings = structuredPropertyFieldMappings.get(keyInMap); + assertEquals( + mappings, + Map.of( + "type", + "keyword", + "normalizer", + "keyword_normalizer", + "fields", + Map.of("keyword", Map.of("type", "keyword")))); + + StructuredPropertyDefinition propWithNumericType = + new StructuredPropertyDefinition() + .setQualifiedName("testPropNumber") + .setDisplayName("examplePropNumber") + .setEntityTypes( + new UrnArray( + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"), + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "testEntity"))) + .setValueType(Urn.createFromString("urn:li:logicalType:NUMBER")); + Map structuredPropertyFieldMappingsNumber = + MappingsBuilder.getMappingsForStructuredProperty(List.of(propWithNumericType)); + assertEquals(structuredPropertyFieldMappingsNumber.size(), 1); + keyInMap = structuredPropertyFieldMappingsNumber.keySet().stream().findFirst().get(); + assertEquals("testPropNumber", keyInMap); + mappings = structuredPropertyFieldMappingsNumber.get(keyInMap); + assertEquals(Map.of("type", "double"), mappings); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java index 6269827104faf..9e8855622ced4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java @@ -13,6 +13,7 @@ import java.util.Set; import java.util.stream.Collectors; import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.testng.Assert; import org.testng.annotations.Test; @@ -20,7 +21,6 @@ public class AggregationQueryBuilderTest { @Test public void testGetDefaultAggregationsHasFields() { - SearchableAnnotation annotation = new SearchableAnnotation( "test", @@ -82,7 +82,6 @@ public void testGetDefaultAggregationsFields() { @Test public void testGetSpecificAggregationsHasFields() { - SearchableAnnotation annotation1 = new SearchableAnnotation( "test1", @@ -135,6 +134,100 @@ public void testGetSpecificAggregationsHasFields() { Assert.assertEquals(aggs.size(), 0); } + @Test + public void testAggregateOverStructuredProperty() { + SearchConfiguration config = new SearchConfiguration(); + config.setMaxTermBucketSize(25); + + AggregationQueryBuilder builder = new AggregationQueryBuilder(config, List.of()); + + List aggs = + builder.getAggregations(List.of("structuredProperties.ab.fgh.ten")); + Assert.assertEquals(aggs.size(), 1); + AggregationBuilder aggBuilder = aggs.get(0); + Assert.assertTrue(aggBuilder instanceof TermsAggregationBuilder); + TermsAggregationBuilder agg = (TermsAggregationBuilder) aggBuilder; + // Check that field name is sanitized to correct field name + Assert.assertEquals(agg.field(), "structuredProperties.ab_fgh_ten"); + + // Two structured properties + aggs = + builder.getAggregations( + List.of("structuredProperties.ab.fgh.ten", "structuredProperties.hello")); + Assert.assertEquals(aggs.size(), 2); + Assert.assertEquals( + aggs.stream() + .map(aggr -> ((TermsAggregationBuilder) aggr).field()) + .collect(Collectors.toSet()), + Set.of("structuredProperties.ab_fgh_ten", "structuredProperties.hello")); + } + + @Test + public void testAggregateOverFieldsAndStructProp() { + SearchableAnnotation annotation1 = + new SearchableAnnotation( + "test1", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.empty(), + Optional.of("Has Test"), + 1.0, + Optional.of("hasTest1"), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList(), + false); + + SearchableAnnotation annotation2 = + new SearchableAnnotation( + "test2", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.of("Test Filter"), + Optional.empty(), + 1.0, + Optional.empty(), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList(), + false); + + SearchConfiguration config = new SearchConfiguration(); + config.setMaxTermBucketSize(25); + + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation1, annotation2)); + + // Aggregate over fields and structured properties + List aggs = + builder.getAggregations( + ImmutableList.of( + "test1", + "test2", + "hasTest1", + "structuredProperties.ab.fgh.ten", + "structuredProperties.hello")); + Assert.assertEquals(aggs.size(), 5); + Set facets = + aggs.stream() + .map(aggB -> ((TermsAggregationBuilder) aggB).field()) + .collect(Collectors.toSet()); + Assert.assertEquals( + facets, + ImmutableSet.of( + "test1.keyword", + "test2.keyword", + "hasTest1", + "structuredProperties.ab_fgh_ten", + "structuredProperties.hello")); + } + @Test public void testMissingAggregation() { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java index 105ee2652dc30..47d18fe0d299c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.search.query.request; +import static com.linkedin.metadata.Constants.*; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; +import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.metadata.config.search.CustomConfiguration; @@ -30,6 +32,14 @@ public class CustomizedQueryHandlerTest { static { try { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + TEST_MAPPER + .getFactory() + .setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSize).build()); CustomConfiguration customConfiguration = new CustomConfiguration(); customConfiguration.setEnabled(true); customConfiguration.setFile("search_config_test.yml"); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java index 8cb28d3658ee7..38d630bc302f4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java @@ -140,7 +140,8 @@ public void testQueryBuilderFulltext() { "urn.delimited", 7.0f, "textArrayField.delimited", 0.4f, "nestedArrayStringField.delimited", 0.4f, - "wordGramField.delimited", 0.4f)); + "wordGramField.delimited", 0.4f, + "customProperties.delimited", 0.4f)); BoolQueryBuilder boolPrefixQuery = (BoolQueryBuilder) shouldQueries.get(1); assertTrue(boolPrefixQuery.should().size() > 0); @@ -165,7 +166,7 @@ public void testQueryBuilderFulltext() { }) .collect(Collectors.toList()); - assertEquals(prefixFieldWeights.size(), 28); + assertEquals(prefixFieldWeights.size(), 29); List.of( Pair.of("urn", 100.0f), @@ -200,7 +201,7 @@ public void testQueryBuilderStructured() { assertEquals(keywordQuery.queryString(), "testQuery"); assertNull(keywordQuery.analyzer()); Map keywordFields = keywordQuery.fields(); - assertEquals(keywordFields.size(), 21); + assertEquals(keywordFields.size(), 22); assertEquals(keywordFields.get("keyPart1").floatValue(), 10.0f); assertFalse(keywordFields.containsKey("keyPart3")); assertEquals(keywordFields.get("textFieldOverride").floatValue(), 1.0f); @@ -360,7 +361,7 @@ public void testGetStandardFieldsEntitySpec() { public void testGetStandardFields() { Set fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec())); - assertEquals(fieldConfigs.size(), 21); + assertEquals(fieldConfigs.size(), 22); assertEquals( fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( @@ -384,7 +385,8 @@ public void testGetStandardFields() { "wordGramField.wordGrams3", "textFieldOverride.delimited", "urn", - "wordGramField.wordGrams2")); + "wordGramField.wordGrams2", + "customProperties.delimited")); // customProperties.delimited Saas only assertEquals( fieldConfigs.stream() @@ -467,9 +469,9 @@ public void testGetStandardFields() { fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields( ImmutableList.of(TestEntitySpecBuilder.getSpec(), mockEntitySpec)); - // Same 21 from the original entity + newFieldNotInOriginal + 3 word gram fields from the + // Same 22 from the original entity + newFieldNotInOriginal + 3 word gram fields from the // textFieldOverride - assertEquals(fieldConfigs.size(), 26); + assertEquals(fieldConfigs.size(), 27); assertEquals( fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( @@ -498,7 +500,8 @@ public void testGetStandardFields() { "fieldDoesntExistInOriginal.delimited", "textFieldOverride.wordGrams2", "textFieldOverride.wordGrams3", - "textFieldOverride.wordGrams4")); + "textFieldOverride.wordGrams4", + "customProperties.delimited")); // Field which only exists in first one: Should be the same assertEquals( diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java index 03abd9ffe29d7..980b82194536e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java @@ -252,4 +252,75 @@ public void testGetQueryBuilderFromCriterionFieldToExpand() { + "}"; Assert.assertEquals(result.toString(), expected); } + + @Test + public void testGetQueryBuilderFromStructPropEqualsValue() { + + final Criterion singleValueCriterion = + new Criterion() + .setField("structuredProperties.ab.fgh.ten") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1"))); + + QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + String expected = + "{\n" + + " \"terms\" : {\n" + + " \"structuredProperties.ab_fgh_ten\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"structuredProperties.ab_fgh_ten\"\n" + + " }\n" + + "}"; + Assert.assertEquals(result.toString(), expected); + } + + @Test + public void testGetQueryBuilderFromStructPropExists() { + final Criterion singleValueCriterion = + new Criterion().setField("structuredProperties.ab.fgh.ten").setCondition(Condition.EXISTS); + + QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + String expected = + "{\n" + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"structuredProperties.ab_fgh_ten\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"structuredProperties.ab_fgh_ten\"\n" + + " }\n" + + "}"; + Assert.assertEquals(result.toString(), expected); + + // No diff in the timeseries field case for this condition. + final Criterion timeseriesField = + new Criterion().setField("myTestField").setCondition(Condition.EXISTS); + + result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); + expected = + "{\n" + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; + Assert.assertEquals(result.toString(), expected); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java index 58ea020e42565..a22a774065852 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java @@ -10,9 +10,9 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.LineageSearchResult; diff --git a/metadata-io/src/test/resources/forms/form_assignment_test_definition_complex.json b/metadata-io/src/test/resources/forms/form_assignment_test_definition_complex.json new file mode 100644 index 0000000000000..e68cbbd9aeff0 --- /dev/null +++ b/metadata-io/src/test/resources/forms/form_assignment_test_definition_complex.json @@ -0,0 +1,145 @@ +{ + "on": { + "types": ["dataset", "container", "dataJob", "dataFlow", "chart", "dashboard"], + "conditions": { + "or": [ + { + "or": [ + { + "property": "forms.incompleteForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + }, + { + "property": "forms.completedForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + } + ] + }, + { + "or": [ + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:hive"] + }, + { + "property": "container.container", + "operator": "equals", + "values": ["urn:li:container:test"] + }, + { + "property": "entityType", + "operator": "equals", + "values": ["dataset"] + }, + { + "property": "domains.domains", + "operator": "equals", + "values": ["urn:li:domain:test"] + } + ] + }, + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:snowflake"] + }, + { + "property": "container.container", + "operator": "equals", + "values": ["urn:li:container:test-2"] + }, + { + "property": "entityType", + "operator": "equals", + "values": ["dashboard"] + }, + { + "property": "domains.domains", + "operator": "equals", + "values": ["urn:li:domain:test-2"] + } + ] + } + ] + } + ] + } + }, + "rules": { + "or": [ + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:hive"] + }, + { + "property": "container.container", + "operator": "equals", + "values": ["urn:li:container:test"] + }, + { + "property": "entityType", + "operator": "equals", + "values": ["dataset"] + }, + { + "property": "domains.domains", + "operator": "equals", + "values": ["urn:li:domain:test"] + } + ] + }, + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:snowflake"] + }, + { + "property": "container.container", + "operator": "equals", + "values": ["urn:li:container:test-2"] + }, + { + "property": "entityType", + "operator": "equals", + "values": ["dashboard"] + }, + { + "property": "domains.domains", + "operator": "equals", + "values": ["urn:li:domain:test-2"] + } + ] + } + ] + }, + "actions": { + "passing": [ + { + "type": "ASSIGN_FORM", + "params": { + "formUrn": "urn:li:form:test" + } + } + ], + "failing": [ + { + "type": "UNASSIGN_FORM", + "params": { + "formUrn": "urn:li:form:test" + } + } + ] + } +} \ No newline at end of file diff --git a/metadata-io/src/test/resources/forms/form_assignment_test_definition_simple.json b/metadata-io/src/test/resources/forms/form_assignment_test_definition_simple.json new file mode 100644 index 0000000000000..a09fbc801414c --- /dev/null +++ b/metadata-io/src/test/resources/forms/form_assignment_test_definition_simple.json @@ -0,0 +1,67 @@ +{ + "on": { + "types": ["dataset", "container", "dataJob", "dataFlow", "chart", "dashboard"], + "conditions": { + "or": [ + { + "or": [ + { + "property": "forms.incompleteForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + }, + { + "property": "forms.completedForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + } + ] + }, + { + "or": [ + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:hive"] + } + ] + } + ] + } + ] + } + }, + "rules": { + "or": [ + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:hive"] + } + ] + } + ] + }, + "actions": { + "passing": [ + { + "type": "ASSIGN_FORM", + "params": { + "formUrn": "urn:li:form:test" + } + } + ], + "failing": [ + { + "type": "UNASSIGN_FORM", + "params": { + "formUrn": "urn:li:form:test" + } + } + ] + } +} \ No newline at end of file diff --git a/metadata-io/src/test/resources/forms/form_prompt_test_definition.json b/metadata-io/src/test/resources/forms/form_prompt_test_definition.json new file mode 100644 index 0000000000000..d797db7e25180 --- /dev/null +++ b/metadata-io/src/test/resources/forms/form_prompt_test_definition.json @@ -0,0 +1,39 @@ +{ + "on": { + "types": ["dataset", "container", "dataJob", "dataFlow", "chart", "dashboard"], + "conditions": { + "or": [ + { + "property": "forms.incompleteForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + }, + { + "property": "forms.completedForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + } + ] + } + }, + "rules": { + "and": [ + { + "property": "structuredProperties.urn:li:structuredProperty:test.id", + "operator": "exists" + } + ] + }, + "actions": { + "passing": [], + "failing": [ + { + "type": "SET_FORM_PROMPT_INCOMPLETE", + "params": { + "formUrn": "urn:li:form:test", + "formPromptId": "test-id" + } + } + ] + } +} \ No newline at end of file diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index e695788e09726..ae208c053d69f 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -14,14 +14,14 @@ exclude = {ElasticsearchRestClientAutoConfiguration.class, CassandraAutoConfiguration.class}) @ComponentScan( basePackages = { - // "com.linkedin.gms.factory.config", - // "com.linkedin.gms.factory.common", "com.linkedin.gms.factory.kafka", "com.linkedin.metadata.boot.kafka", "com.linkedin.metadata.kafka", "com.linkedin.metadata.dao.producer", "com.linkedin.gms.factory.config", "com.linkedin.gms.factory.entity.update.indices", + "com.linkedin.gms.factory.entityclient", + "com.linkedin.gms.factory.form", "com.linkedin.gms.factory.timeline.eventgenerator", "io.datahubproject.metadata.jobs.common.health.kafka" }, diff --git a/metadata-jobs/mae-consumer-job/src/main/resources/application.properties b/metadata-jobs/mae-consumer-job/src/main/resources/application.properties index 7df61c93ab66d..f8b979e6fbac0 100644 --- a/metadata-jobs/mae-consumer-job/src/main/resources/application.properties +++ b/metadata-jobs/mae-consumer-job/src/main/resources/application.properties @@ -3,4 +3,4 @@ management.endpoints.web.exposure.include=metrics, health, info spring.mvc.servlet.path=/ management.health.elasticsearch.enabled=false management.health.neo4j.enabled=false -entityClient.preferredImpl=restli +entityClient.impl=restli diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java index 7135e4e44d459..b409a41600bd7 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java @@ -1,6 +1,5 @@ package com.linkedin.metadata.kafka; -import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; import com.linkedin.metadata.entity.EntityServiceImpl; @@ -22,8 +21,6 @@ public class MaeConsumerApplicationTestConfiguration { @MockBean private EntityServiceImpl _entityServiceImpl; - @MockBean private SystemRestliEntityClient restliEntityClient; - @MockBean private Database ebeanServer; @MockBean private EntityRegistry entityRegistry; diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java index f2eeef6e2c8e6..278c52030b5fc 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java @@ -9,6 +9,7 @@ import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; import com.linkedin.metadata.kafka.hook.event.EntityChangeEventGeneratorHook; +import com.linkedin.metadata.kafka.hook.form.FormAssignmentHook; import com.linkedin.metadata.kafka.hook.ingestion.IngestionSchedulerHook; import com.linkedin.metadata.kafka.hook.siblings.SiblingAssociationHook; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -36,7 +37,8 @@ IngestionSchedulerHook.class, EntityChangeEventGeneratorHook.class, KafkaEventConsumerFactory.class, - SiblingAssociationHook.class + SiblingAssociationHook.class, + FormAssignmentHook.class }) @EnableKafka public class MetadataChangeLogProcessor { @@ -95,6 +97,7 @@ public void consume(final ConsumerRecord consumerRecord) // Here - plug in additional "custom processor hooks" for (MetadataChangeLogHook hook : this.hooks) { if (!hook.isEnabled()) { + log.debug(String.format("Skipping disabled hook %s", hook.getClass())); continue; } try (Timer.Context ignored = @@ -102,7 +105,7 @@ public void consume(final ConsumerRecord consumerRecord) .time()) { hook.invoke(event); } catch (Exception e) { - // Just skip this hook and continue. - Note that this represents "at most once" + // Just skip this hook and continue. - Note that this represents "at most once"// // processing. MetricUtils.counter(this.getClass(), hook.getClass().getSimpleName() + "_failure").inc(); log.error( diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java index 036968f9f6759..d8a959c0be624 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java @@ -1,23 +1,17 @@ package com.linkedin.metadata.kafka.config; import com.google.common.collect.ImmutableSet; -import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.kafka.hydrator.EntityHydrator; import com.linkedin.metadata.models.registry.EntityRegistry; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; @Configuration -@Import({RestliEntityClientFactory.class}) public class EntityHydratorConfig { - @Autowired - @Qualifier("systemRestliEntityClient") - private SystemRestliEntityClient _entityClient; + @Autowired private SystemEntityClient entityClient; @Autowired private EntityRegistry _entityRegistry; @@ -34,6 +28,6 @@ public class EntityHydratorConfig { @Bean public EntityHydrator getEntityHydrator() { - return new EntityHydrator(_entityRegistry, _entityClient); + return new EntityHydrator(_entityRegistry, entityClient); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java index f3b5a09708cee..375d1580dab51 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java @@ -6,8 +6,7 @@ import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; -import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.metadata.Constants; import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; @@ -43,7 +42,7 @@ */ @Slf4j @Component -@Import({EntityRegistryFactory.class, RestliEntityClientFactory.class}) +@Import({EntityRegistryFactory.class}) public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { /** The list of aspects that are supported for generating semantic change events. */ @@ -78,7 +77,7 @@ public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { ImmutableSet.of("CREATE", "UPSERT", "DELETE"); private final EntityChangeEventGeneratorRegistry _entityChangeEventGeneratorRegistry; - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient _entityClient; private final EntityRegistry _entityRegistry; private final Boolean _isEnabled; @@ -86,7 +85,7 @@ public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { public EntityChangeEventGeneratorHook( @Nonnull @Qualifier("entityChangeEventGeneratorRegistry") final EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry, - @Nonnull final SystemRestliEntityClient entityClient, + @Nonnull final SystemEntityClient entityClient, @Nonnull final EntityRegistry entityRegistry, @Nonnull @Value("${entityChangeEvents.enabled:true}") Boolean isEnabled) { _entityChangeEventGeneratorRegistry = diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java new file mode 100644 index 0000000000000..91e8e186b07f7 --- /dev/null +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java @@ -0,0 +1,130 @@ +package com.linkedin.metadata.kafka.hook.form; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; +import com.linkedin.gms.factory.form.FormServiceFactory; +import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; +import com.linkedin.metadata.service.FormService; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeLog; +import java.util.Objects; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.inject.Singleton; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Import; +import org.springframework.stereotype.Component; + +/** + * This hook is used for assigning / un-assigning forms for specific entities. + * + *

Specifically, this hook performs the following operations: + * + *

1. When a new dynamic form assignment is created, an automation (metadata test) with the form + * urn embedded is automatically generated, which is responsible for assigning the form to any + * entities in the target set. It also will attempt a removal of the form for any failing entities. + * + *

2. When a new form is created, or an existing one updated, automations (metadata tests) will + * be generated for each prompt in the metadata test which verifies that the entities with that test + * associated with it are complying with the prompt. When they are NOT, the test will mark the + * prompts as incomplete. + * + *

3. When a form is hard deleted, any automations used for assigning the form, or validating + * prompts, are automatically deleted. + * + *

Note that currently, Datasets, Dashboards, Charts, Data Jobs, Data Flows, Containers, are the + * only asset types supported for this hook. + * + *

TODO: In the future, let's decide whether we want to support automations to auto-mark form + * prompts as "completed" when they do in fact have the correct metadata. (Without user needing to + * explicitly fill out a form prompt response) + * + *

TODO: Write a unit test for this class. + */ +@Slf4j +@Component +@Singleton +@Import({FormServiceFactory.class, SystemAuthenticationFactory.class}) +public class FormAssignmentHook implements MetadataChangeLogHook { + + private static final Set SUPPORTED_UPDATE_TYPES = + ImmutableSet.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.RESTATE); + + private final FormService _formService; + private final boolean _isEnabled; + + @Autowired + public FormAssignmentHook( + @Nonnull final FormService formService, + @Nonnull @Value("${forms.hook.enabled:true}") Boolean isEnabled) { + _formService = Objects.requireNonNull(formService, "formService is required"); + _isEnabled = isEnabled; + } + + @Override + public void init() {} + + @Override + public boolean isEnabled() { + return _isEnabled; + } + + @Override + public void invoke(@Nonnull final MetadataChangeLog event) { + if (_isEnabled && isEligibleForProcessing(event)) { + if (isFormDynamicFilterUpdated(event)) { + handleFormFilterUpdated(event); + } + } + } + + /** Handle an form filter update by adding updating the targeting automation for it. */ + private void handleFormFilterUpdated(@Nonnull final MetadataChangeLog event) { + // 1. Get the new form assignment + DynamicFormAssignment formFilters = + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + DynamicFormAssignment.class); + + // 2. Register a automation to assign it. + _formService.upsertFormAssignmentRunner(event.getEntityUrn(), formFilters); + } + + /** + * Returns true if the event should be processed, which is only true if the change is on the + * incident status aspect + */ + private boolean isEligibleForProcessing(@Nonnull final MetadataChangeLog event) { + return isFormPromptSetUpdated(event) + || isFormDynamicFilterUpdated(event) + || isFormDeleted(event); + } + + /** Returns true if an form is being hard-deleted. */ + private boolean isFormDeleted(@Nonnull final MetadataChangeLog event) { + return FORM_ENTITY_NAME.equals(event.getEntityType()) + && ChangeType.DELETE.equals(event.getChangeType()) + && FORM_KEY_ASPECT_NAME.equals(event.getAspectName()); + } + + /** Returns true if the event represents an update the prompt set of a form. */ + private boolean isFormPromptSetUpdated(@Nonnull final MetadataChangeLog event) { + return FORM_ENTITY_NAME.equals(event.getEntityType()) + && SUPPORTED_UPDATE_TYPES.contains(event.getChangeType()) + && FORM_INFO_ASPECT_NAME.equals(event.getAspectName()); + } + + /** Returns true if the event represents an update to the dynamic filter for a form. */ + private boolean isFormDynamicFilterUpdated(@Nonnull final MetadataChangeLog event) { + return FORM_ENTITY_NAME.equals(event.getEntityType()) + && SUPPORTED_UPDATE_TYPES.contains(event.getChangeType()) + && DYNAMIC_FORM_ASSIGNMENT_ASPECT_NAME.equals(event.getAspectName()); + } +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 67198d13772a3..7a1aaa7f6a056 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -14,9 +14,9 @@ import com.linkedin.dataset.UpstreamArray; import com.linkedin.dataset.UpstreamLineage; import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.gms.factory.entityclient.RestliEntityClientFactory; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.search.EntitySearchServiceFactory; import com.linkedin.metadata.Constants; @@ -72,14 +72,14 @@ public class SiblingAssociationHook implements MetadataChangeLogHook { public static final String SOURCE_SUBTYPE_V2 = "Source"; private final EntityRegistry _entityRegistry; - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient _entityClient; private final EntitySearchService _searchService; private final boolean _isEnabled; @Autowired public SiblingAssociationHook( @Nonnull final EntityRegistry entityRegistry, - @Nonnull final SystemRestliEntityClient entityClient, + @Nonnull final SystemEntityClient entityClient, @Nonnull final EntitySearchService searchService, @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled) { _entityRegistry = entityRegistry; diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java index 7a8fdd11fac43..6ad7cdbcad3e6 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; @@ -24,7 +24,7 @@ public class EntityHydrator { private final EntityRegistry _entityRegistry; - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient entityClient; private final ChartHydrator _chartHydrator = new ChartHydrator(); private final CorpUserHydrator _corpUserHydrator = new CorpUserHydrator(); private final DashboardHydrator _dashboardHydrator = new DashboardHydrator(); @@ -55,7 +55,7 @@ public Optional getHydratedEntity(String entityTypeName, String urn) .collect(Collectors.toSet())) .orElse(Set.of()); entityResponse = - _entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); + entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); } catch (RemoteInvocationException | URISyntaxException e) { log.error("Error while calling GMS to hydrate entity for urn {}", urn); return Optional.empty(); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java index a227668e22e9b..89ad6105be9cb 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java @@ -28,6 +28,7 @@ import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; +import com.linkedin.metadata.client.EntityClientAspectRetriever; import com.linkedin.metadata.config.SystemUpdateConfiguration; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.graph.Edge; @@ -121,9 +122,10 @@ public void setupTest() { _mockEntitySearchService, _mockTimeseriesAspectService, _mockSystemMetadataService, - ENTITY_REGISTRY, _searchDocumentTransformer, _mockEntityIndexBuilders); + _updateIndicesService.initializeAspectRetriever( + EntityClientAspectRetriever.builder().entityRegistry(ENTITY_REGISTRY).build()); _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); } @@ -198,9 +200,10 @@ public void testInputFieldsEdgesAreAdded() throws Exception { _mockEntitySearchService, _mockTimeseriesAspectService, _mockSystemMetadataService, - mockEntityRegistry, _searchDocumentTransformer, _mockEntityIndexBuilders); + _updateIndicesService.initializeAspectRetriever( + EntityClientAspectRetriever.builder().entityRegistry(mockEntityRegistry).build()); _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); _updateIndicesHook.invoke(event); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java index 8400e19ce49a3..021186404b2cb 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java @@ -41,7 +41,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.DatasetKey; @@ -93,14 +93,14 @@ public class EntityChangeEventGeneratorHookTest { private static final String TEST_DATA_JOB_URN = "urn:li:dataJob:job"; private Urn actorUrn; - private SystemRestliEntityClient _mockClient; + private SystemEntityClient _mockClient; private EntityService _mockEntityService; private EntityChangeEventGeneratorHook _entityChangeEventHook; @BeforeMethod public void setupTest() throws URISyntaxException { actorUrn = Urn.createFromString(TEST_ACTOR_URN); - _mockClient = Mockito.mock(SystemRestliEntityClient.class); + _mockClient = Mockito.mock(SystemEntityClient.class); _mockEntityService = Mockito.mock(EntityService.class); EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry = createEntityChangeEventGeneratorRegistry(); @@ -776,12 +776,12 @@ private EntityRegistry createMockEntityRegistry() { } private void verifyProducePlatformEvent( - SystemRestliEntityClient mockClient, PlatformEvent platformEvent) throws Exception { + SystemEntityClient mockClient, PlatformEvent platformEvent) throws Exception { verifyProducePlatformEvent(mockClient, platformEvent, true); } private void verifyProducePlatformEvent( - SystemRestliEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) + SystemEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) throws Exception { // Verify event has been emitted. verify(mockClient, Mockito.times(1)) diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index d4c6d122a6689..3823668adeace 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -21,7 +21,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.key.DatasetKey; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; @@ -41,7 +41,7 @@ public class SiblingAssociationHookTest { private SiblingAssociationHook _siblingAssociationHook; - SystemRestliEntityClient _mockEntityClient; + SystemEntityClient _mockEntityClient; EntitySearchService _mockSearchService; @BeforeMethod @@ -51,7 +51,7 @@ public void setupTest() { SiblingAssociationHookTest.class .getClassLoader() .getResourceAsStream("test-entity-registry-siblings.yml")); - _mockEntityClient = Mockito.mock(SystemRestliEntityClient.class); + _mockEntityClient = Mockito.mock(SystemEntityClient.class); _mockSearchService = Mockito.mock(EntitySearchService.class); _siblingAssociationHook = new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java index 44b2ce54e19c8..fc47679bebd39 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java @@ -1,8 +1,11 @@ package com.linkedin.metadata.kafka.hook.spring; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Authentication; import com.datahub.metadata.ingestion.IngestionScheduler; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; @@ -14,7 +17,9 @@ import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import org.apache.avro.generic.GenericRecord; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; @@ -40,12 +45,18 @@ public class MCLSpringTestConfiguration { @MockBean public IngestionScheduler ingestionScheduler; - @MockBean(name = "systemRestliEntityClient") - public SystemRestliEntityClient entityClient; + @Bean + public SystemEntityClient systemEntityClient( + @Qualifier("systemAuthentication") Authentication systemAuthentication) { + SystemEntityClient systemEntityClient = mock(SystemEntityClient.class); + when(systemEntityClient.getSystemAuthentication()).thenReturn(systemAuthentication); + return systemEntityClient; + } @MockBean public ElasticSearchService searchService; - @MockBean public Authentication systemAuthentication; + @MockBean(name = "systemAuthentication") + public Authentication systemAuthentication; @MockBean(name = "dataHubUpgradeKafkaListener") public DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener; diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java index 181a723e1cd25..1210bf37059b4 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java @@ -1,6 +1,5 @@ package com.linkedin.metadata.kafka; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import org.springframework.boot.SpringApplication; @@ -22,6 +21,7 @@ "com.linkedin.gms.factory.config", "com.linkedin.gms.factory.entity", "com.linkedin.gms.factory.entityregistry", + "com.linkedin.gms.factory.entityclient", "com.linkedin.gms.factory.kafka", "com.linkedin.gms.factory.search", "com.linkedin.gms.factory.secret", @@ -30,12 +30,14 @@ "com.linkedin.metadata.restli", "com.linkedin.metadata.kafka", "com.linkedin.metadata.dao.producer", + "com.linkedin.gms.factory.form", + "com.linkedin.metadata.dao.producer", "io.datahubproject.metadata.jobs.common.health.kafka" }, excludeFilters = { @ComponentScan.Filter( type = FilterType.ASSIGNABLE_TYPE, - classes = {ScheduledAnalyticsFactory.class, RestliEntityClientFactory.class}) + classes = {ScheduledAnalyticsFactory.class}) }) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class MceConsumerApplication { diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java index a4747c72c20fa..b41e6bc75af19 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java @@ -1,12 +1,8 @@ package com.linkedin.metadata.restli; import com.datahub.auth.authentication.filter.AuthenticationFilter; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; -import com.linkedin.parseq.retry.backoff.ExponentialBackoff; -import com.linkedin.restli.client.Client; import com.linkedin.restli.server.RestliHandlerServlet; -import java.net.URI; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.web.servlet.FilterRegistrationBean; @@ -14,7 +10,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; -import org.springframework.context.annotation.Primary; @Configuration @Import({SystemAuthenticationFactory.class}) @@ -29,14 +24,6 @@ public class RestliServletConfig { @Value("${entityClient.numRetries:3}") private int numRetries; - @Bean("restliEntityClient") - @Primary - public RestliEntityClient restliEntityClient() { - String selfUri = String.format("http://localhost:%s/gms/", configuredPort); - final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); - } - @Bean("restliServletRegistration") public ServletRegistrationBean restliServletRegistration( RestliHandlerServlet servlet) { diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java index 6d19db97fb39f..bce8664689e2c 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java @@ -22,7 +22,7 @@ public class MceConsumerApplicationTest extends AbstractTestNGSpringContextTests @Autowired private TestRestTemplate restTemplate; - @Autowired private EntityService _mockEntityService; + @Autowired private EntityService _mockEntityService; @Autowired private KafkaHealthIndicator kafkaHealthIndicator; diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java index 1a44265c7a92a..93a6ae8fb4797 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java @@ -1,7 +1,10 @@ package com.linkedin.metadata.kafka; -import com.linkedin.entity.client.RestliEntityClient; +import com.datahub.authentication.Authentication; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; +import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.SiblingGraphService; @@ -15,6 +18,7 @@ import io.ebean.Database; import java.net.URI; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.boot.test.web.client.TestRestTemplate; @@ -30,14 +34,21 @@ public class MceConsumerApplicationTestConfiguration { @MockBean public KafkaHealthChecker kafkaHealthChecker; - @MockBean public EntityService _entityService; + @MockBean public EntityService _entityService; - @Bean("restliEntityClient") + @Bean @Primary - public RestliEntityClient restliEntityClient() { + public SystemEntityClient systemEntityClient( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { String selfUri = restTemplate.getRootUri(); final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(1), 1); + return new SystemRestliEntityClient( + restClient, + new ExponentialBackoff(1), + 1, + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); } @MockBean public Database ebeanServer; diff --git a/metadata-jobs/mce-consumer/build.gradle b/metadata-jobs/mce-consumer/build.gradle index 5fa65c06de714..49604924acb68 100644 --- a/metadata-jobs/mce-consumer/build.gradle +++ b/metadata-jobs/mce-consumer/build.gradle @@ -53,4 +53,4 @@ processResources.dependsOn avroSchemaSources clean { project.delete("src/main/resources/avro") -} \ No newline at end of file +} diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java index e22a8ba813704..352fa93f56a04 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java @@ -5,8 +5,8 @@ import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; import com.linkedin.entity.Entity; -import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.gms.factory.entityclient.RestliEntityClientFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; @@ -48,7 +48,7 @@ public class MetadataChangeEventsProcessor { @NonNull private final Authentication systemAuthentication; - private final SystemRestliEntityClient entityClient; + private final SystemEntityClient entityClient; private final Producer kafkaProducer; private final Histogram kafkaLagStats = diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java index 26d5f66f4929a..a4f5a287bc8fd 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java @@ -3,8 +3,8 @@ import com.codahale.metrics.Histogram; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; -import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.gms.factory.entityclient.RestliEntityClientFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; @@ -42,7 +42,7 @@ @RequiredArgsConstructor public class MetadataChangeProposalsProcessor { - private final SystemRestliEntityClient entityClient; + private final SystemEntityClient entityClient; private final Producer kafkaProducer; private final Histogram kafkaLagStats = diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java index b61858aef22cd..955d5c67c09a7 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java @@ -46,7 +46,7 @@ public PlatformEventProcessor() { public void consume(final ConsumerRecord consumerRecord) { try (Timer.Context i = MetricUtils.timer(this.getClass(), "consume").time()) { - log.info("Consuming a Platform Event"); + log.debug("Consuming a Platform Event"); kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); diff --git a/metadata-models-custom/README.md b/metadata-models-custom/README.md index 94399a67806a6..10801c3d8ed23 100644 --- a/metadata-models-custom/README.md +++ b/metadata-models-custom/README.md @@ -396,6 +396,26 @@ public class CustomDataQualityRulesMCLSideEffect extends MCLSideEffect { return timeseriesOptional.stream(); } + + private Optional buildEvent(MetadataChangeLog originMCP) { + if (originMCP.getAspect() != null) { + DataQualityRuleEvent event = new DataQualityRuleEvent(); + if (event.getActor() != null) { + event.setActor(event.getActor()); + } + event.setEventTimestamp(originMCP.getSystemMetadata().getLastObserved()); + event.setTimestampMillis(originMCP.getSystemMetadata().getLastObserved()); + if (originMCP.getPreviousAspectValue() == null) { + event.setEventType("RuleCreated"); + } else { + event.setEventType("RuleUpdated"); + } + event.setAffectedDataset(originMCP.getEntityUrn()); + + return Optional.of(event); + } + return Optional.empty(); + } } ``` diff --git a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCLSideEffect.java b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCLSideEffect.java index a8735bae1521a..ba72a97908846 100644 --- a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCLSideEffect.java +++ b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCLSideEffect.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.ebean.batch.MCLBatchItemImpl; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeLog; import com.mycompany.dq.DataQualityRuleEvent; @@ -20,9 +19,7 @@ public CustomDataQualityRulesMCLSideEffect(AspectPluginConfig config) { @Override protected Stream applyMCLSideEffect( - @Nonnull MCLBatchItem input, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) { + @Nonnull MCLBatchItem input, @Nonnull AspectRetriever aspectRetriever) { // Generate Timeseries event aspect based on non-Timeseries aspect MetadataChangeLog originMCP = input.getMetadataChangeLog(); @@ -42,9 +39,7 @@ protected Stream applyMCLSideEffect( }) .map( eventMCP -> - MCLBatchItemImpl.builder() - .metadataChangeLog(eventMCP) - .build(entityRegistry, aspectRetriever)); + MCLBatchItemImpl.builder().metadataChangeLog(eventMCP).build(aspectRetriever)); return timeseriesOptional.stream(); } diff --git a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java index 2c989725f4f9d..d2041c443503e 100644 --- a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java +++ b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -18,7 +17,7 @@ public CustomDataQualityRulesMCPSideEffect(AspectPluginConfig aspectPluginConfig @Override protected Stream applyMCPSideEffect( - UpsertItem input, EntityRegistry entityRegistry, @Nonnull AspectRetriever aspectRetriever) { + UpsertItem input, @Nonnull AspectRetriever aspectRetriever) { // Mirror aspects to another URN in SQL & Search Urn mirror = UrnUtils.getUrn(input.getUrn().toString().replace(",PROD)", ",DEV)")); return Stream.of( @@ -28,6 +27,6 @@ protected Stream applyMCPSideEffect( .aspect(input.getAspect()) .auditStamp(input.getAuditStamp()) .systemMetadata(input.getSystemMetadata()) - .build(entityRegistry, aspectRetriever)); + .build(aspectRetriever)); } } diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index 04c90fa444f0c..86f404adb7fef 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -43,11 +43,10 @@ mainAvroSchemaJar.dependsOn generateAvroSchema pegasus.main.generationModes = [PegasusGenerationMode.PEGASUS, PegasusGenerationMode.AVRO] -tasks.register('generateJsonSchema', GenerateJsonSchemaTask) { +task generateJsonSchema(type: GenerateJsonSchemaTask, dependsOn: 'generateAvroSchema') { it.setInputDirectory("$projectDir/src/mainGeneratedAvroSchema") it.setOutputDirectory("$projectDir/src/generatedJsonSchema") it.setEntityRegistryYaml("${project(':metadata-models').projectDir}/src/main/resources/entity-registry.yml") - dependsOn generateAvroSchema } // https://github.com/int128/gradle-swagger-generator-plugin#task-type-generateswaggercode diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/CustomProperties.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/CustomProperties.pdl index 8390a05846c83..cc70bb5c60fc6 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/common/CustomProperties.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/common/CustomProperties.pdl @@ -9,6 +9,7 @@ record CustomProperties { */ @Searchable = { "/*": { + "fieldType": "TEXT", "queryByDefault": true } } diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FieldFormPromptAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FieldFormPromptAssociation.pdl new file mode 100644 index 0000000000000..d05f2308d82a5 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FieldFormPromptAssociation.pdl @@ -0,0 +1,17 @@ +namespace com.linkedin.common + +/** + * Information about the status of a particular prompt for a specific schema field + * on an entity. + */ +record FieldFormPromptAssociation { + /** + * The field path on a schema field. + */ + fieldPath: string + + /** + * The last time this prompt was touched for the field on the entity (set, unset) + */ + lastModified: AuditStamp +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FormAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FormAssociation.pdl new file mode 100644 index 0000000000000..558672478c19b --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FormAssociation.pdl @@ -0,0 +1,21 @@ +namespace com.linkedin.common + +/** + * Properties of an applied form. + */ +record FormAssociation { + /** + * Urn of the applied form + */ + urn: Urn + + /** + * A list of prompts that are not yet complete for this form. + */ + incompletePrompts: array[FormPromptAssociation] = [] + + /** + * A list of prompts that have been completed for this form. + */ + completedPrompts: array[FormPromptAssociation] = [] +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptAssociation.pdl new file mode 100644 index 0000000000000..ee0f1041e23c4 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptAssociation.pdl @@ -0,0 +1,23 @@ +namespace com.linkedin.common + +/** + * Information about the status of a particular prompt. + * Note that this is where we can add additional information about individual responses: + * actor, timestamp, and the response itself. + */ +record FormPromptAssociation { + /** + * The id for the prompt. This must be GLOBALLY UNIQUE. + */ + id: string + + /** + * The last time this prompt was touched for the entity (set, unset) + */ + lastModified: AuditStamp + + /** + * Optional information about the field-level prompt associations. + */ + fieldAssociations: optional FormPromptFieldAssociations +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptFieldAssociations.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptFieldAssociations.pdl new file mode 100644 index 0000000000000..419aa8aa3921d --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptFieldAssociations.pdl @@ -0,0 +1,16 @@ +namespace com.linkedin.common + +/** + * Information about the field-level prompt associations on a top-level prompt association. + */ +record FormPromptFieldAssociations { + /** + * A list of field-level prompt associations that are not yet complete for this form. + */ + completedFieldPrompts: optional array[FieldFormPromptAssociation] + + /** + * A list of field-level prompt associations that are complete for this form. + */ + incompleteFieldPrompts: optional array[FieldFormPromptAssociation] +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FormVerificationAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FormVerificationAssociation.pdl new file mode 100644 index 0000000000000..066e72f2f2a20 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FormVerificationAssociation.pdl @@ -0,0 +1,17 @@ +namespace com.linkedin.common + +/** + * An association between a verification and an entity that has been granted + * via completion of one or more forms of type 'VERIFICATION'. + */ +record FormVerificationAssociation { + /** + * The urn of the form that granted this verification. + */ + form: Urn + + /** + * An audit stamp capturing who and when verification was applied for this form. + */ + lastModified: optional AuditStamp +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/Forms.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/Forms.pdl new file mode 100644 index 0000000000000..0a97c7d5099ed --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/Forms.pdl @@ -0,0 +1,66 @@ +namespace com.linkedin.common + +/** + * Forms that are assigned to this entity to be filled out + */ +@Aspect = { + "name": "forms" +} +record Forms { + /** + * All incomplete forms assigned to the entity. + */ + @Searchable = { + "/*/urn": { + "fieldType": "URN", + "fieldName": "incompleteForms" + }, + "/*/completedPrompts/*/id" : { + "fieldType": "KEYWORD", + "fieldName": "incompleteFormsCompletedPromptIds", + }, + "/*/incompletePrompts/*/id" : { + "fieldType": "KEYWORD", + "fieldName": "incompleteFormsIncompletePromptIds", + }, + "/*/completedPrompts/*/lastModified/time" : { + "fieldType": "DATETIME", + "fieldName": "incompleteFormsCompletedPromptResponseTimes", + } + } + incompleteForms: array[FormAssociation] + + /** + * All complete forms assigned to the entity. + */ + @Searchable = { + "/*/urn": { + "fieldType": "URN", + "fieldName": "completedForms" + }, + "/*/completedPrompts/*/id" : { + "fieldType": "KEYWORD", + "fieldName": "completedFormsCompletedPromptIds", + }, + "/*/incompletePrompts/*/id" : { + "fieldType": "KEYWORD", + "fieldName": "completedFormsIncompletePromptIds", + }, + "/*/completedPrompts/*/lastModified/time" : { + "fieldType": "DATETIME", + "fieldName": "completedFormsCompletedPromptResponseTimes", + } + } + completedForms: array[FormAssociation] + + /** + * Verifications that have been applied to the entity via completed forms. + */ + @Searchable = { + "/*/form": { + "fieldType": "URN", + "fieldName": "verifiedForms" + } + } + verifications: array[FormVerificationAssociation] = [] +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/GlossaryTermAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/GlossaryTermAssociation.pdl index 9f0f0ff6f24a2..80dc07981816a 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/common/GlossaryTermAssociation.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/common/GlossaryTermAssociation.pdl @@ -20,8 +20,14 @@ record GlossaryTermAssociation { } urn: GlossaryTermUrn + /** + * The user URN which will be credited for adding associating this term to the entity + */ + actor: optional Urn + /** * Additional context about the association */ context: optional string + } diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/PropertyValue.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/PropertyValue.pdl new file mode 100644 index 0000000000000..c8f1e4d5009dc --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/PropertyValue.pdl @@ -0,0 +1,13 @@ +namespace com.linkedin.common + +record PropertyValue { + value: union [ + string, + double + ] + + /** + * Optional description of the property value + */ + description: optional string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/datahub/DataHubSearchConfig.pdl b/metadata-models/src/main/pegasus/com/linkedin/datahub/DataHubSearchConfig.pdl new file mode 100644 index 0000000000000..2d09d828d10bd --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/datahub/DataHubSearchConfig.pdl @@ -0,0 +1,87 @@ +namespace com.linkedin.datahub + +/** +* Configuration for how any given field should be indexed and matched in the DataHub search index. +**/ +record DataHubSearchConfig { + + /** + * Name of the field in the search index. Defaults to the field name otherwise + **/ + fieldName: optional string + + /** + * Type of the field. Defines how the field is indexed and matched + **/ + fieldType: optional enum SearchFieldType { + KEYWORD, + TEXT, + TEXT_PARTIAL, + BROWSE_PATH, + URN, + URN_PARTIAL, + BOOLEAN, + COUNT, + DATETIME, + OBJECT, + BROWSE_PATH_V2, + WORD_GRAM + } + + /** + * Whether we should match the field for the default search query + **/ + queryByDefault: boolean = false + + /** + * Whether we should use the field for default autocomplete + **/ + enableAutocomplete: boolean = false + + /** + * Whether or not to add field to filters. + **/ + addToFilters: boolean = false + + /** + * Whether or not to add the "has values" to filters. + * check if this is conditional on addToFilters being true + **/ + addHasValuesToFilters: boolean = true + + /** + * Display name of the filter + **/ + filterNameOverride: optional string + + /** + * Display name of the has values filter + **/ + hasValuesFilterNameOverride: optional string + + /** + * Boost multiplier to the match score. Matches on fields with higher boost score ranks higher + **/ + boostScore: double = 1.0 + + /** + * If set, add a index field of the given name that checks whether the field exists + **/ + hasValuesFieldName: optional string + + /** + * If set, add a index field of the given name that checks the number of elements + **/ + numValuesFieldName: optional string + + /** + * (Optional) Weights to apply to score for a given value + **/ + weightsPerFieldValue: optional map[string, double] + + /** + * (Optional) Aliases for this given field that can be used for sorting etc. + **/ + fieldNameAliases: optional array[string] + +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeInfo.pdl new file mode 100644 index 0000000000000..4e3ea9d01e92d --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeInfo.pdl @@ -0,0 +1,21 @@ +namespace com.linkedin.datatype + +@Aspect = { + "name": "dataTypeInfo" +} +record DataTypeInfo { + /** + * The qualified name for the data type. Usually a unique namespace + name, e.g. datahub.string + */ + qualifiedName: string + + /** + * An optional display name for the data type. + */ + displayName: optional string + + /** + * An optional description for the data type. + */ + description: optional string +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeKey.pdl new file mode 100644 index 0000000000000..e0ea2b6974381 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeKey.pdl @@ -0,0 +1,11 @@ +namespace com.linkedin.datatype + +@Aspect = { + "name": "dataTypeKey" +} +record DataTypeKey { + /** + * A unique id for a data type. Usually this will be a unique namespace + data type name. + */ + id: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeInfo.pdl new file mode 100644 index 0000000000000..3a741a4d8f0b8 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeInfo.pdl @@ -0,0 +1,22 @@ +namespace com.linkedin.entitytype + +@Aspect = { + "name": "entityTypeInfo" +} +record EntityTypeInfo { + /** + * The fully qualified name for the entity type, which usually consists of a namespace + * plus an identifier or name, e.g. datahub.dataset + */ + qualifiedName: string + + /** + * The display name for the Entity Type. + */ + displayName: optional string + + /** + * A description for the Entity Type: what is it for? + */ + description: optional string +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeKey.pdl new file mode 100644 index 0000000000000..d857c7ff611e3 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeKey.pdl @@ -0,0 +1,11 @@ +namespace com.linkedin.entitytype + +@Aspect = { + "name": "entityTypeKey" +} +record EntityTypeKey { + /** + * A unique id for an entity type. Usually this will be a unique namespace + entity name. + */ + id: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/DynamicFormAssignment.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/DynamicFormAssignment.pdl new file mode 100644 index 0000000000000..93ecf017efb3a --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/form/DynamicFormAssignment.pdl @@ -0,0 +1,19 @@ +namespace com.linkedin.form + +import com.linkedin.metadata.query.filter.Filter + +/** + * Information about how a form is assigned to entities dynamically. Provide a filter to + * match a set of entities instead of explicitly applying a form to specific entities. + */ +@Aspect = { + "name": "dynamicFormAssignment" +} +record DynamicFormAssignment { + /** + * The filter applied when assigning this form to entities. Entities that match this filter + * will have this form applied to them. Right now this filter only supports filtering by + * platform, entity type, container, and domain through the UI. + */ + filter: Filter +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl new file mode 100644 index 0000000000000..e58eb4c7c56a8 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl @@ -0,0 +1,21 @@ +namespace com.linkedin.form + +import com.linkedin.common.Urn + +record FormActorAssignment { + /** + * Whether the form should be assigned to the owners of assets that it is applied to. + * This is the default. + */ + owners: boolean = true + + /** + * Optional: Specific set of groups that are targeted by this form assignment. + */ + groups: optional array[Urn] + + /** + * Optional: Specific set of users that are targeted by this form assignment. + */ + users: optional array[Urn] +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/FormInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/FormInfo.pdl new file mode 100644 index 0000000000000..b17bd1537a17c --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/form/FormInfo.pdl @@ -0,0 +1,51 @@ +namespace com.linkedin.form + +import com.linkedin.common.Urn + +/** + * Information about a form to help with filling out metadata on entities. + */ +@Aspect = { + "name": "formInfo" +} +record FormInfo { + /** + * Display name of the form + */ + @Searchable = { + "fieldType": "TEXT_PARTIAL" + } + name: string + + /** + * Description of the form + */ + description: optional string + + /** + * The type of this form + */ + @Searchable = { + "fieldType": "KEYWORD" + } + type: enum FormType { + /** + * A form simply used for collecting metadata fields for an entity. + */ + COMPLETION + /** + * This form is used for "verifying" that entities comply with a policy via presence of a specific set of metadata fields. + */ + VERIFICATION + } = "COMPLETION" + + /** + * List of prompts to present to the user to encourage filling out metadata + */ + prompts: array[FormPrompt] = [] + + /** + * Who the form is assigned to, e.g. who should see the form when visiting the entity page or governance center + */ + actors: FormActorAssignment = { "owners": true } +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/FormPrompt.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/FormPrompt.pdl new file mode 100644 index 0000000000000..73f06552d46ab --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/form/FormPrompt.pdl @@ -0,0 +1,53 @@ +namespace com.linkedin.form + +import com.linkedin.common.Urn + +/** + * A prompt to present to the user to encourage filling out metadata + */ +record FormPrompt { + /** + * The unique id for this prompt. This must be GLOBALLY unique. + */ + id: string + + /** + * The title of this prompt + */ + title: string + + /** + * The description of this prompt + */ + description: optional string + + /** + * The type of prompt + */ + type: enum FormPromptType { + /** + * This prompt is meant to apply a structured property to an entity + */ + STRUCTURED_PROPERTY + /** + * This prompt is meant to apply a structured property to a schema fields entity + */ + FIELDS_STRUCTURED_PROPERTY + } + + /** + * An optional set of information specific to structured properties prompts. + * This should be filled out if the prompt is type STRUCTURED_PROPERTY or FIELDS_STRUCTURED_PROPERTY. + */ + structuredPropertyParams: optional record StructuredPropertyParams { + /** + * The structured property that is required on this entity + */ + urn: Urn + } + + /** + * Whether the prompt is required to be completed, in order for the form to be marked as complete. + */ + required: boolean = true +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryNodeInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryNodeInfo.pdl index c3388d4f462d4..b4a6f4b47b221 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryNodeInfo.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryNodeInfo.pdl @@ -1,5 +1,6 @@ namespace com.linkedin.glossary +import com.linkedin.common.CustomProperties import com.linkedin.common.GlossaryNodeUrn /** @@ -8,7 +9,7 @@ import com.linkedin.common.GlossaryNodeUrn @Aspect = { "name": "glossaryNodeInfo" } -record GlossaryNodeInfo { +record GlossaryNodeInfo includes CustomProperties { /** * Definition of business node diff --git a/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryTermInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryTermInfo.pdl index e987a71be7131..1de826f1b2aa6 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryTermInfo.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryTermInfo.pdl @@ -3,6 +3,7 @@ namespace com.linkedin.glossary import com.linkedin.common.Url import com.linkedin.common.GlossaryNodeUrn import com.linkedin.common.CustomProperties +import com.linkedin.schema.PrimitiveValueDataType /** * Properties associated with a GlossaryTerm @@ -76,4 +77,5 @@ record GlossaryTermInfo includes CustomProperties { */ @deprecated rawSchema: optional string + } diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/key/FormKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/key/FormKey.pdl new file mode 100644 index 0000000000000..124d65d0e7452 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/key/FormKey.pdl @@ -0,0 +1,14 @@ +namespace com.linkedin.metadata.key + +/** + * Key for a Form + */ +@Aspect = { + "name": "formKey", +} +record FormKey { + /** + * Unique id for the form. + */ + id: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/PrimitivePropertyValue.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/PrimitivePropertyValue.pdl new file mode 100644 index 0000000000000..93dbb14c7f969 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/PrimitivePropertyValue.pdl @@ -0,0 +1,9 @@ +namespace com.linkedin.structured + +/** +* Represents a stored primitive property value +**/ +typeref PrimitivePropertyValue = union [ + string, + double + ] \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/PropertyValue.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/PropertyValue.pdl new file mode 100644 index 0000000000000..012ce5416364f --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/PropertyValue.pdl @@ -0,0 +1,10 @@ +namespace com.linkedin.structured + +record PropertyValue { + value: PrimitivePropertyValue + + /** + * Optional description of the property value + */ + description: optional string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredProperties.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredProperties.pdl new file mode 100644 index 0000000000000..f79e8fd86e825 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredProperties.pdl @@ -0,0 +1,14 @@ +namespace com.linkedin.structured + +/** + * Properties about an entity governed by StructuredPropertyDefinition + */ +@Aspect = { + "name": "structuredProperties" +} +record StructuredProperties { + /** + * Custom property bag. + */ + properties: array[StructuredPropertyValueAssignment] +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyDefinition.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyDefinition.pdl new file mode 100644 index 0000000000000..1b263b679531a --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyDefinition.pdl @@ -0,0 +1,74 @@ +namespace com.linkedin.structured + +import com.linkedin.common.Urn +import com.linkedin.datahub.DataHubSearchConfig + +@Aspect = { + "name": "propertyDefinition" +} +record StructuredPropertyDefinition { + /** + * The fully qualified name of the property. e.g. io.acryl.datahub.myProperty + */ + @Searchable = {} + qualifiedName: string + + /** + * The display name of the property. This is the name that will be shown in the UI and can be used to look up the property id. + */ + @Searchable = {} + displayName: optional string + + /** + * The value type of the property. Must be a dataType. + * e.g. To indicate that the property is of type DATE, use urn:li:dataType:datahub.date + */ + valueType: Urn + + /** + * A map that allows for type specialization of the valueType. + * e.g. a valueType of urn:li:dataType:datahub.urn + * can be specialized to be a USER or GROUP URN by adding a typeQualifier like + * { "allowedTypes": ["urn:li:entityType:datahub.corpuser", "urn:li:entityType:datahub.corpGroup"] } + */ + typeQualifier: optional map[string, array[string]] + + /** + * A list of allowed values that the property is allowed to take. + * If this is not specified, then the property can take any value of given type. + */ + allowedValues: optional array[PropertyValue] + + /** + * The cardinality of the property. If not specified, then the property is assumed to be single valued.. + */ + cardinality: optional enum PropertyCardinality { + SINGLE + MULTIPLE + } = "SINGLE" + + @Relationship = { + "/*": { + "name": "StructuredPropertyOf", + "entityTypes": [ "entityType" ] + } + } + @Searchable = { + "/*": { + "fieldName": "entityTypes" + } + } + entityTypes: array[Urn] + + /** + * The description of the property. This is the description that will be shown in the UI. + */ + description: optional string + + /** + * Search configuration for this property. If not specified, then the property is indexed using the default mapping. + * from the logical type. + */ + searchConfiguration: optional DataHubSearchConfig +} + diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyKey.pdl new file mode 100644 index 0000000000000..16fec7b2a5ab6 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyKey.pdl @@ -0,0 +1,11 @@ +namespace com.linkedin.structured + +@Aspect = { + "name": "structuredPropertyKey" +} +record StructuredPropertyKey { + /** + * The id for a structured proeprty. + */ + id: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyValueAssignment.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyValueAssignment.pdl new file mode 100644 index 0000000000000..d8b8a93a3edb6 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyValueAssignment.pdl @@ -0,0 +1,29 @@ +namespace com.linkedin.structured +import com.linkedin.common.Urn +import com.linkedin.common.AuditStamp + +record StructuredPropertyValueAssignment { + + /** + * The property that is being assigned a value. + */ + propertyUrn: Urn + + /** + * The value assigned to the property. + */ + values: array[PrimitivePropertyValue] + + /** + * Audit stamp containing who created this relationship edge and when + */ + created: optional AuditStamp + + /** + * Audit stamp containing who last modified this relationship edge and when + */ + lastModified: optional AuditStamp + +} + + diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index 9d8c4bfdab0da..65382c747a16a 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -42,6 +42,8 @@ entities: - dataPlatformInstance - browsePathsV2 - access + - structuredProperties + - forms - name: dataHubPolicy doc: DataHub Policies represent access policies granted to users or groups on metadata operations like edit, view etc. category: internal @@ -67,6 +69,7 @@ entities: - institutionalMemory - dataPlatformInstance - browsePathsV2 + - structuredProperties - subTypes - name: dataFlow category: core @@ -85,6 +88,7 @@ entities: - institutionalMemory - dataPlatformInstance - browsePathsV2 + - structuredProperties - name: dataProcess keyAspect: dataProcessKey aspects: @@ -409,7 +413,8 @@ entities: - name: schemaField category: core keyAspect: schemaFieldKey - aspects: [] + aspects: + - structuredProperties - name: globalSettings doc: Global settings for an the platform category: internal @@ -468,5 +473,51 @@ entities: - dataContractProperties - dataContractStatus - status - + - name: entityType + doc: A type of entity in the DataHub Metadata Model. + category: core + keyAspect: entityTypeKey + aspects: + - entityTypeInfo + - institutionalMemory + - status + - name: dataType + doc: A type of data element stored within DataHub. + category: core + keyAspect: dataTypeKey + aspects: + - dataTypeInfo + - institutionalMemory + - status + - name: structuredProperty + doc: Structured Property represents a property meant for extending the core model of a logical entity + category: core + keyAspect: structuredPropertyKey + aspects: + - propertyDefinition + - institutionalMemory + - status + - name: form + category: core + keyAspect: formKey + aspects: + - formInfo + - dynamicFormAssignment + - ownership events: +plugins: + aspectPayloadValidators: + - className: 'com.linkedin.metadata.aspect.validation.PropertyDefinitionValidator' + enabled: true + supportedOperations: + - UPSERT + supportedEntityAspectNames: + - entityName: structuredProperty + aspectName: propertyDefinition + - className: 'com.linkedin.metadata.aspect.validation.StructuredPropertiesValidator' + enabled: true + supportedOperations: + - UPSERT + supportedEntityAspectNames: + - entityName: '*' + aspectName: structuredProperties \ No newline at end of file diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java index 8ce7675edf580..c4b01fea8c09d 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java @@ -28,6 +28,9 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import java.net.URISyntaxException; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -175,6 +178,17 @@ public void migrateGroupMembershipToNativeGroupMembership( userUrnList.forEach(userUrn -> addUserToNativeGroup(userUrn, groupUrn, authentication)); } + public List getGroupsForUser( + @Nonnull final Urn userUrn, @Nonnull final Authentication authentication) throws Exception { + final NativeGroupMembership nativeGroupMembership = + getExistingNativeGroupMembership(userUrn, authentication); + final GroupMembership groupMembership = getExistingGroupMembership(userUrn, authentication); + final List allGroups = new ArrayList<>(); + allGroups.addAll(nativeGroupMembership.getNativeGroups()); + allGroups.addAll(groupMembership.getGroups()); + return allGroups; + } + NativeGroupMembership getExistingNativeGroupMembership( @Nonnull final Urn userUrn, final Authentication authentication) throws Exception { final EntityResponse entityResponse = @@ -186,7 +200,7 @@ NativeGroupMembership getExistingNativeGroupMembership( authentication) .get(userUrn); - NativeGroupMembership nativeGroupMembership; + final NativeGroupMembership nativeGroupMembership; if (entityResponse == null || !entityResponse.getAspects().containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { // If the user doesn't have the NativeGroupMembership aspect, create one. @@ -204,6 +218,32 @@ NativeGroupMembership getExistingNativeGroupMembership( return nativeGroupMembership; } + GroupMembership getExistingGroupMembership( + @Nonnull final Urn userUrn, @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final EntityResponse entityResponse = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + Collections.singleton(GROUP_MEMBERSHIP_ASPECT_NAME), + authentication) + .get(userUrn); + + final GroupMembership groupMembership; + if (entityResponse == null + || !entityResponse.getAspects().containsKey(GROUP_MEMBERSHIP_ASPECT_NAME)) { + // If the user doesn't have the GroupMembership aspect, create one. + groupMembership = new GroupMembership(); + groupMembership.setGroups(new UrnArray()); + } else { + groupMembership = + new GroupMembership( + entityResponse.getAspects().get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + } + return groupMembership; + } + String createGroupInfo( @Nonnull final CorpGroupKey corpGroupKey, @Nonnull final String groupName, diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index 40555107f4c79..e072a59ae77ff 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.key.DataHubAccessTokenKey; import com.linkedin.metadata.utils.AuditStampUtils; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -41,7 +40,7 @@ @Slf4j public class StatefulTokenService extends StatelessTokenService { - private final EntityService _entityService; + private final EntityService _entityService; private final LoadingCache _revokedTokenCache; private final String salt; @@ -49,7 +48,7 @@ public StatefulTokenService( @Nonnull final String signingKey, @Nonnull final String signingAlgorithm, @Nullable final String iss, - @Nonnull final EntityService entityService, + @Nonnull final EntityService entityService, @Nonnull final String salt) { super(signingKey, signingAlgorithm, iss); this._entityService = entityService; @@ -154,11 +153,7 @@ public String generateAccessToken( _entityService.ingestProposal( AspectsBatchImpl.builder() - .mcps( - proposalStream.collect(Collectors.toList()), - auditStamp, - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()) + .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) .build(), false); diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java index bc749a373c5b0..eb5243c0e5e4a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java @@ -8,9 +8,18 @@ public class VisualConfiguration { /** Asset related configurations */ public AssetsConfiguration assets; + /** Custom app title to show in the browse tab */ + public String appTitle; + /** Queries tab related configurations */ public QueriesTabConfig queriesTab; + /** + * Boolean flag disabling viewing the Business Glossary page for users without the 'Manage + * Glossaries' privilege + */ + public boolean hideGlossary; + /** Queries tab related configurations */ public EntityProfileConfig entityProfile; diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index cfc84491ab0ae..2b202d513c9bf 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -1,3 +1,6 @@ +# The base URL where DataHub is accessible to users. +baseUrl: ${DATAHUB_BASE_URL:http://localhost:9002} + # App Layer authentication: # Enable if you want all requests to the Metadata Service to be authenticated. Disabled by default. @@ -113,7 +116,9 @@ visualConfig: queriesTabResultSize: ${REACT_APP_QUERIES_TAB_RESULT_SIZE:5} assets: logoUrl: ${REACT_APP_LOGO_URL:/assets/platforms/datahublogo.png} - faviconUrl: ${REACT_APP_FAVICON_URL:/assets/favicon.ico} + faviconUrl: ${REACT_APP_FAVICON_URL:/assets/icons/favicon.ico} + appTitle: ${REACT_APP_TITLE:} + hideGlossary: ${REACT_APP_HIDE_GLOSSARY:false} entityProfile: # we only support default tab for domains right now. In order to implement for other entities, update React code domainDefaultTab: ${DOMAIN_DEFAULT_TAB:} # set to DOCUMENTATION_TAB to show documentation tab first @@ -305,6 +310,11 @@ systemUpdate: backOffFactor: ${BOOTSTRAP_SYSTEM_UPDATE_BACK_OFF_FACTOR:2} # Multiplicative factor for back off, default values will result in waiting 5min 15s waitForSystemUpdate: ${BOOTSTRAP_SYSTEM_UPDATE_WAIT_FOR_SYSTEM_UPDATE:true} +structuredProperties: + enabled: ${ENABLE_STRUCTURED_PROPERTIES_HOOK:true} # applies structured properties mappings + writeEnabled: ${ENABLE_STRUCTURED_PROPERTIES_WRITE:true} # write structured property values + systemUpdateEnabled: ${ENABLE_STRUCTURED_PROPERTIES_SYSTEM_UPDATE:false} # applies structured property mappings in system update job + healthCheck: cacheDurationSeconds: ${HEALTH_CHECK_CACHE_DURATION_SECONDS:5} @@ -324,6 +334,7 @@ featureFlags: uiEnabled: ${PRE_PROCESS_HOOKS_UI_ENABLED:true} # Circumvents Kafka for processing index updates for UI changes sourced from GraphQL to avoid processing delays showAcrylInfo: ${SHOW_ACRYL_INFO:false} # Show different CTAs within DataHub around moving to Managed DataHub. Set to true for the demo site. nestedDomainsEnabled: ${NESTED_DOMAINS_ENABLED:true} # Enables the nested Domains feature that allows users to have sub-Domains. If this is off, Domains appear "flat" again + schemaFieldEntityFetchEnabled: ${SCHEMA_FIELD_ENTITY_FETCH_ENABLED:true} # Enables fetching for schema field entities from the database when we hydrate them on schema fields entityChangeEvents: enabled: ${ENABLE_ENTITY_CHANGE_EVENTS_HOOK:true} @@ -375,5 +386,12 @@ cache: status: 20 corpUserCredentials: 20 corpUserSettings: 20 + structuredProperty: + propertyDefinition: 86400 # 1 day + structuredPropertyKey: 86400 # 1 day springdoc.api-docs.groups.enabled: true + +forms: + hook: + enabled: {$FORMS_HOOK_ENABLED:true} \ No newline at end of file diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java index ec398388ae77b..7b823e552da97 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java @@ -1,6 +1,5 @@ package com.linkedin.gms.factory.auth; -import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.AuthorizerContext; import com.datahub.authorization.DataHubAuthorizer; @@ -18,8 +17,8 @@ import com.datahub.plugins.loader.IsolatedClassLoader; import com.datahub.plugins.loader.PluginPermissionManagerImpl; import com.google.common.collect.ImmutableMap; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import jakarta.annotation.Nonnull; import java.nio.file.Path; @@ -47,39 +46,29 @@ public class AuthorizerChainFactory { @Qualifier("configurationProvider") private ConfigurationProvider configurationProvider; - @Autowired - @Qualifier("dataHubAuthorizer") - private DataHubAuthorizer dataHubAuthorizer; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication systemAuthentication; - - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient entityClient; - @Bean(name = "authorizerChain") @Scope("singleton") @Nonnull - protected AuthorizerChain getInstance() { - final EntitySpecResolver resolver = initResolver(); + protected AuthorizerChain getInstance( + final DataHubAuthorizer dataHubAuthorizer, final SystemEntityClient systemEntityClient) { + final EntitySpecResolver resolver = initResolver(systemEntityClient); // Extract + initialize customer authorizers from application configs. final List authorizers = new ArrayList<>(initCustomAuthorizers(resolver)); if (configurationProvider.getAuthorization().getDefaultAuthorizer().isEnabled()) { AuthorizerContext ctx = new AuthorizerContext(Collections.emptyMap(), resolver); - this.dataHubAuthorizer.init(Collections.emptyMap(), ctx); + dataHubAuthorizer.init(Collections.emptyMap(), ctx); log.info("Default DataHubAuthorizer is enabled. Appending it to the authorization chain."); - authorizers.add(this.dataHubAuthorizer); + authorizers.add(dataHubAuthorizer); } return new AuthorizerChain(authorizers, dataHubAuthorizer); } - private EntitySpecResolver initResolver() { - return new DefaultEntitySpecResolver(systemAuthentication, entityClient); + private EntitySpecResolver initResolver(SystemEntityClient systemEntityClient) { + return new DefaultEntitySpecResolver( + systemEntityClient.getSystemAuthentication(), systemEntityClient); } private List initCustomAuthorizers(EntitySpecResolver resolver) { @@ -121,7 +110,7 @@ private void registerAuthorizer( // Get security mode set by user SecurityMode securityMode = SecurityMode.valueOf( - this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); + configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); // Create permission manager with security mode PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java index 3b23243f76742..0935e8ad0e7d4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java @@ -1,33 +1,19 @@ package com.linkedin.gms.factory.auth; -import com.datahub.authentication.Authentication; import com.datahub.authorization.DataHubAuthorizer; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@Import({RestliEntityClientFactory.class}) public class DataHubAuthorizerFactory { - @Autowired - @Qualifier("systemAuthentication") - private Authentication systemAuthentication; - - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient entityClient; - @Value("${authorization.defaultAuthorizer.cacheRefreshIntervalSecs}") private Integer policyCacheRefreshIntervalSeconds; @@ -40,7 +26,7 @@ public class DataHubAuthorizerFactory { @Bean(name = "dataHubAuthorizer") @Scope("singleton") @Nonnull - protected DataHubAuthorizer getInstance() { + protected DataHubAuthorizer dataHubAuthorizer(final SystemEntityClient systemEntityClient) { final DataHubAuthorizer.AuthorizationMode mode = policiesEnabled @@ -48,8 +34,8 @@ protected DataHubAuthorizer getInstance() { : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; return new DataHubAuthorizer( - systemAuthentication, - entityClient, + systemEntityClient.getSystemAuthentication(), + systemEntityClient, 10, policyCacheRefreshIntervalSeconds, mode, diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java index 83544e4165ae3..beb467d614930 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java @@ -28,16 +28,16 @@ public class DataHubTokenServiceFactory { @Value("${authentication.tokenService.issuer:datahub-metadata-service}") private String issuer; - /** + @Inject + @Named("entityService") + private EntityService _entityService; + */ + /** + @Inject + @Named("entityService") + private EntityService _entityService; + */ @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Bean(name = "dataHubTokenService") @Scope("singleton") @Nonnull protected StatefulTokenService getInstance() { return new StatefulTokenService( - this.signingKey, this.signingAlgorithm, this.issuer, this._entityService, this.saltingKey); + signingKey, signingAlgorithm, issuer, _entityService, saltingKey); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java index 7c6c4384d7343..47af58a8d8626 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java @@ -1,7 +1,7 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.group.GroupService; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -18,11 +18,7 @@ public class GroupServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; - - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; + private EntityService _entityService; @Autowired @Qualifier("graphClient") @@ -31,7 +27,8 @@ public class GroupServiceFactory { @Bean(name = "groupService") @Scope("singleton") @Nonnull - protected GroupService getInstance() throws Exception { - return new GroupService(this._javaEntityClient, this._entityService, this._graphClient); + protected GroupService getInstance(@Qualifier("entityClient") final EntityClient entityClient) + throws Exception { + return new GroupService(entityClient, _entityService, _graphClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java index c44eada46794d..7a2b14fdb0f28 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java @@ -1,7 +1,7 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.invite.InviteTokenService; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; @@ -15,9 +15,6 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class InviteTokenServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Autowired @Qualifier("dataHubSecretService") @@ -26,7 +23,8 @@ public class InviteTokenServiceFactory { @Bean(name = "inviteTokenService") @Scope("singleton") @Nonnull - protected InviteTokenService getInstance() throws Exception { - return new InviteTokenService(this._javaEntityClient, this._secretService); + protected InviteTokenService getInstance( + @Qualifier("entityClient") final EntityClient entityClient) throws Exception { + return new InviteTokenService(entityClient, _secretService); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java index 844f3a094b6b7..0ed8f1a4b7af4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.user.NativeUserService; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -19,11 +19,7 @@ public class NativeUserServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; - - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; + private EntityService _entityService; @Autowired @Qualifier("dataHubSecretService") @@ -34,11 +30,8 @@ public class NativeUserServiceFactory { @Bean(name = "nativeUserService") @Scope("singleton") @Nonnull - protected NativeUserService getInstance() throws Exception { + protected NativeUserService getInstance(final SystemEntityClient entityClient) throws Exception { return new NativeUserService( - _entityService, - _javaEntityClient, - _secretService, - _configurationProvider.getAuthentication()); + _entityService, entityClient, _secretService, _configurationProvider.getAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java index a6ae703576a3e..317d8583ef1c3 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.post.PostService; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -14,14 +13,12 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class PostServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Bean(name = "postService") @Scope("singleton") @Nonnull - protected PostService getInstance() throws Exception { - return new PostService(this._javaEntityClient); + protected PostService getInstance(@Qualifier("entityClient") final EntityClient entityClient) + throws Exception { + return new PostService(entityClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java index 7696d5201493a..9321e2544a493 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.auth; import com.datahub.authorization.role.RoleService; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -15,14 +14,11 @@ @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RoleServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - @Bean(name = "roleService") @Scope("singleton") @Nonnull - protected RoleService getInstance() throws Exception { - return new RoleService(this._javaEntityClient); + protected RoleService getInstance(@Qualifier("entityClient") final EntityClient entityClient) + throws Exception { + return new RoleService(entityClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java index 52d13b05a654d..efe688ceee3ff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java @@ -34,8 +34,8 @@ public class SystemAuthenticationFactory { @Nonnull protected Authentication getInstance() { // TODO: Change to service - final Actor systemActor = new Actor(ActorType.USER, this.systemClientId); + final Actor systemActor = new Actor(ActorType.USER, systemClientId); return new Authentication( - systemActor, String.format("Basic %s:%s", this.systemClientId, this.systemSecret)); + systemActor, String.format("Basic %s:%s", systemClientId, systemSecret)); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java index 5663162186b83..465d28542f371 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java @@ -18,7 +18,7 @@ public class SiblingGraphServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("graphService") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java index 5c7c2370ab337..e969793fac1ef 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java @@ -60,9 +60,15 @@ public class ConfigurationProvider { /** System Update configurations */ private SystemUpdateConfiguration systemUpdate; + /** The base URL where DataHub is hosted. */ + private String baseUrl; + /** Configuration for caching */ private CacheConfiguration cache; /** Configuration for the health check server */ private HealthCheckConfiguration healthCheck; + + /** Structured properties related configurations */ + private StructuredPropertiesConfiguration structuredProperties; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/StructuredPropertiesConfiguration.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/StructuredPropertiesConfiguration.java new file mode 100644 index 0000000000000..6d4d4ea30c863 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/StructuredPropertiesConfiguration.java @@ -0,0 +1,10 @@ +package com.linkedin.gms.factory.config; + +import lombok.Data; + +@Data +public class StructuredPropertiesConfiguration { + private boolean enabled; + private boolean writeEnabled; + private boolean systemUpdateEnabled; +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java index 739211855cacd..39d42b6fb7568 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java @@ -1,6 +1,6 @@ package com.linkedin.gms.factory.dataproduct; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.service.DataProductService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -15,9 +15,6 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class DataProductServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Autowired @Qualifier("graphClient") @@ -26,7 +23,8 @@ public class DataProductServiceFactory { @Bean(name = "dataProductService") @Scope("singleton") @Nonnull - protected DataProductService getInstance() throws Exception { - return new DataProductService(_javaEntityClient, _graphClient); + protected DataProductService getInstance( + @Qualifier("entityClient") final EntityClient entityClient) throws Exception { + return new DataProductService(entityClient, _graphClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java index 326537ee07cbd..788dc3777e539 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java @@ -9,6 +9,7 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.net.ssl.SSLContext; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -16,6 +17,7 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class CassandraSessionFactory { @@ -50,7 +52,7 @@ protected CqlSession createSession() { try { csb = csb.withSslContext(SSLContext.getDefault()); } catch (Exception e) { - e.printStackTrace(); + log.error("Error creating cassandra ssl session", e); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/DeleteEntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/DeleteEntityServiceFactory.java index 8644327747281..6bc2d3c7be63f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/DeleteEntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/DeleteEntityServiceFactory.java @@ -8,7 +8,6 @@ import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.Import; @Configuration @@ -16,14 +15,13 @@ public class DeleteEntityServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("graphService") private GraphService _graphService; @Bean(name = "deleteEntityService") - @DependsOn({"entityService"}) @Nonnull protected DeleteEntityService createDeleteEntityService() { return new DeleteEntityService(_entityService, _graphService); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java index 88a3f5749343b..5fd64b02d08a8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java @@ -48,16 +48,14 @@ protected EntityService createInstance( final KafkaEventProducer eventProducer = new KafkaEventProducer(producer, convention, kafkaHealthChecker); FeatureFlags featureFlags = configurationProvider.getFeatureFlags(); - EntityService entityService = - new EntityServiceImpl( - aspectDao, - eventProducer, - entityRegistry, - featureFlags.isAlwaysEmitChangeLog(), - updateIndicesService, - featureFlags.getPreProcessHooks(), - _ebeanMaxTransactionRetry); - return entityService; + return new EntityServiceImpl( + aspectDao, + eventProducer, + entityRegistry, + featureFlags.isAlwaysEmitChangeLog(), + updateIndicesService, + featureFlags.getPreProcessHooks(), + _ebeanMaxTransactionRetry); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java deleted file mode 100644 index c550fc161b606..0000000000000 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java +++ /dev/null @@ -1,100 +0,0 @@ -package com.linkedin.gms.factory.entity; - -import com.datahub.authentication.Authentication; -import com.linkedin.entity.client.RestliEntityClient; -import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; -import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.client.SystemJavaEntityClient; -import com.linkedin.metadata.entity.DeleteEntityService; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; -import com.linkedin.metadata.event.EventProducer; -import com.linkedin.metadata.search.EntitySearchService; -import com.linkedin.metadata.search.LineageSearchService; -import com.linkedin.metadata.search.SearchService; -import com.linkedin.metadata.search.client.CachingEntitySearchService; -import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; - -@Configuration -@ConditionalOnExpression("'${entityClient.preferredImpl:java}'.equals('java')") -@Import({DataHubKafkaProducerFactory.class}) -public class JavaEntityClientFactory { - - @Autowired - @Qualifier("entityService") - private EntityService _entityService; - - @Autowired - @Qualifier("deleteEntityService") - private DeleteEntityService _deleteEntityService; - - @Autowired - @Qualifier("searchService") - private SearchService _searchService; - - @Autowired - @Qualifier("entitySearchService") - private EntitySearchService _entitySearchService; - - @Autowired - @Qualifier("cachingEntitySearchService") - private CachingEntitySearchService _cachingEntitySearchService; - - @Autowired - @Qualifier("timeseriesAspectService") - private TimeseriesAspectService _timeseriesAspectService; - - @Autowired - @Qualifier("relationshipSearchService") - private LineageSearchService _lineageSearchService; - - @Autowired - @Qualifier("kafkaEventProducer") - private EventProducer _eventProducer; - - @Bean("javaEntityClient") - public JavaEntityClient getJavaEntityClient( - @Qualifier("restliEntityClient") final RestliEntityClient restliEntityClient) { - return new JavaEntityClient( - _entityService, - _deleteEntityService, - _entitySearchService, - _cachingEntitySearchService, - _searchService, - _lineageSearchService, - _timeseriesAspectService, - _eventProducer, - restliEntityClient); - } - - @Bean("systemJavaEntityClient") - public SystemJavaEntityClient systemJavaEntityClient( - @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication, - @Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) { - SystemJavaEntityClient systemJavaEntityClient = - new SystemJavaEntityClient( - _entityService, - _deleteEntityService, - _entitySearchService, - _cachingEntitySearchService, - _searchService, - _lineageSearchService, - _timeseriesAspectService, - _eventProducer, - restliEntityClient, - systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); - - _entityService.setSystemEntityClient(systemJavaEntityClient); - - return systemJavaEntityClient; - } -} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java index dae5f903d7d80..31ad933b9579d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java @@ -33,9 +33,9 @@ public class RetentionServiceFactory { @DependsOn({"cassandraSession", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") @Nonnull - protected RetentionService createCassandraInstance(CqlSession session) { - RetentionService retentionService = - new CassandraRetentionService(_entityService, session, _batchSize); + protected RetentionService createCassandraInstance(CqlSession session) { + RetentionService retentionService = + new CassandraRetentionService<>(_entityService, session, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } @@ -44,9 +44,9 @@ protected RetentionService createCassandraInstance(CqlSession session) { @DependsOn({"ebeanServer", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - protected RetentionService createEbeanInstance(Database server) { - RetentionService retentionService = - new EbeanRetentionService(_entityService, server, _batchSize); + protected RetentionService createEbeanInstance(Database server) { + RetentionService retentionService = + new EbeanRetentionService<>(_entityService, server, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java new file mode 100644 index 0000000000000..e1055835616ea --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java @@ -0,0 +1,27 @@ +package com.linkedin.gms.factory.entity; + +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.service.RollbackService; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import javax.annotation.Nonnull; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class RollbackServiceFactory { + + @Value("${authorization.restApiAuthorization:false}") + boolean restApiAuthorizationEnabled; + + @Bean + @Nonnull + protected RollbackService rollbackService( + final EntityService entityService, + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService) { + return new RollbackService( + entityService, systemMetadataService, timeseriesAspectService, restApiAuthorizationEnabled); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java index d8c1422f988c2..34c1887d67c56 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java @@ -1,7 +1,8 @@ package com.linkedin.gms.factory.entity.update.indices; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.search.EntityIndexBuildersFactory; +import com.linkedin.metadata.client.EntityClientAspectRetriever; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; @@ -22,7 +23,7 @@ public class UpdateIndicesServiceFactory { @Autowired private ApplicationContext context; - @Value("${entityClient.preferredImpl:java}") + @Value("${entityClient.impl:java}") private String entityClientImpl; @Bean @@ -34,18 +35,27 @@ public UpdateIndicesService updateIndicesService( EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer, EntityIndexBuilders entityIndexBuilders) { + UpdateIndicesService updateIndicesService = new UpdateIndicesService( graphService, entitySearchService, timeseriesAspectService, systemMetadataService, - entityRegistry, searchDocumentTransformer, entityIndexBuilders); if ("restli".equals(entityClientImpl)) { - updateIndicesService.setSystemEntityClient(context.getBean(SystemRestliEntityClient.class)); + /* + When restli mode the EntityService is not available. Wire in an AspectRetriever here instead + based on the entity client + */ + SystemEntityClient systemEntityClient = context.getBean(SystemEntityClient.class); + updateIndicesService.initializeAspectRetriever( + EntityClientAspectRetriever.builder() + .entityRegistry(entityRegistry) + .entityClient(systemEntityClient) + .build()); } return updateIndicesService; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/EntityClientConfigFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/EntityClientConfigFactory.java new file mode 100644 index 0000000000000..c6fe0d6e95f48 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/EntityClientConfigFactory.java @@ -0,0 +1,20 @@ +package com.linkedin.gms.factory.entityclient; + +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +public class EntityClientConfigFactory { + + @Bean + public EntityClientCacheConfig entityClientCacheConfig( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider) { + return configurationProvider.getCache().getClient().getEntityClient(); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java new file mode 100644 index 0000000000000..530136e32662f --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java @@ -0,0 +1,85 @@ +package com.linkedin.gms.factory.entityclient; + +import com.datahub.authentication.Authentication; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.client.SystemJavaEntityClient; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; +import com.linkedin.metadata.entity.DeleteEntityService; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.event.EventProducer; +import com.linkedin.metadata.search.EntitySearchService; +import com.linkedin.metadata.search.LineageSearchService; +import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.service.RollbackService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import javax.inject.Singleton; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +/** The *Java* Entity Client should be preferred if executing within the GMS service. */ +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +@ConditionalOnProperty(name = "entityClient.impl", havingValue = "java", matchIfMissing = true) +public class JavaEntityClientFactory { + + @Bean("entityClient") + @Singleton + public EntityClient entityClient( + final @Qualifier("entityService") EntityService _entityService, + final @Qualifier("deleteEntityService") DeleteEntityService _deleteEntityService, + final @Qualifier("searchService") SearchService _searchService, + final @Qualifier("entitySearchService") EntitySearchService _entitySearchService, + final @Qualifier("cachingEntitySearchService") CachingEntitySearchService + _cachingEntitySearchService, + final @Qualifier("timeseriesAspectService") TimeseriesAspectService _timeseriesAspectService, + final @Qualifier("relationshipSearchService") LineageSearchService _lineageSearchService, + final @Qualifier("kafkaEventProducer") EventProducer _eventProducer, + final RollbackService rollbackService) { + return new JavaEntityClient( + _entityService, + _deleteEntityService, + _entitySearchService, + _cachingEntitySearchService, + _searchService, + _lineageSearchService, + _timeseriesAspectService, + rollbackService, + _eventProducer); + } + + @Bean("systemEntityClient") + @Singleton + public SystemEntityClient systemEntityClient( + final @Qualifier("entityService") EntityService _entityService, + final @Qualifier("deleteEntityService") DeleteEntityService _deleteEntityService, + final @Qualifier("searchService") SearchService _searchService, + final @Qualifier("entitySearchService") EntitySearchService _entitySearchService, + final @Qualifier("cachingEntitySearchService") CachingEntitySearchService + _cachingEntitySearchService, + final @Qualifier("timeseriesAspectService") TimeseriesAspectService _timeseriesAspectService, + final @Qualifier("relationshipSearchService") LineageSearchService _lineageSearchService, + final @Qualifier("kafkaEventProducer") EventProducer _eventProducer, + final RollbackService rollbackService, + final EntityClientCacheConfig entityClientCacheConfig, + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + return new SystemJavaEntityClient( + _entityService, + _deleteEntityService, + _entitySearchService, + _cachingEntitySearchService, + _searchService, + _lineageSearchService, + _timeseriesAspectService, + rollbackService, + _eventProducer, + systemAuthentication, + entityClientCacheConfig); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java similarity index 53% rename from metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java rename to metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java index 1dee8c4aa4d27..88989b1833e78 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java @@ -1,47 +1,40 @@ -package com.linkedin.gms.factory.entity; +package com.linkedin.gms.factory.entityclient; import com.datahub.authentication.Authentication; +import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.client.RestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.metadata.restli.DefaultRestliClientFactory; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; import java.net.URI; +import javax.inject.Singleton; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; +/** The Java Entity Client should be preferred if executing within the GMS service. */ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +@ConditionalOnProperty(name = "entityClient.impl", havingValue = "restli") public class RestliEntityClientFactory { - @Value("${datahub.gms.host}") - private String gmsHost; - - @Value("${datahub.gms.port}") - private int gmsPort; - - @Value("${datahub.gms.useSSL}") - private boolean gmsUseSSL; - - @Value("${datahub.gms.uri}") - private String gmsUri; - - @Value("${datahub.gms.sslContext.protocol}") - private String gmsSslProtocol; - - @Value("${entityClient.retryInterval:2}") - private int retryInterval; - - @Value("${entityClient.numRetries:3}") - private int numRetries; - - @Bean("restliEntityClient") - public RestliEntityClient getRestliEntityClient() { + @Bean("entityClient") + @Singleton + public EntityClient entityClient( + @Value("${datahub.gms.host}") String gmsHost, + @Value("${datahub.gms.port}") int gmsPort, + @Value("${datahub.gms.useSSL}") boolean gmsUseSSL, + @Value("${datahub.gms.uri}") String gmsUri, + @Value("${datahub.gms.sslContext.protocol}") String gmsSslProtocol, + @Value("${entityClient.retryInterval:2}") int retryInterval, + @Value("${entityClient.numRetries:3}") int numRetries) { final Client restClient; if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); @@ -52,10 +45,19 @@ public RestliEntityClient getRestliEntityClient() { return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); } - @Bean("systemRestliEntityClient") - public SystemRestliEntityClient systemRestliEntityClient( - @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Bean("systemEntityClient") + @Singleton + public SystemEntityClient systemEntityClient( + @Value("${datahub.gms.host}") String gmsHost, + @Value("${datahub.gms.port}") int gmsPort, + @Value("${datahub.gms.useSSL}") boolean gmsUseSSL, + @Value("${datahub.gms.uri}") String gmsUri, + @Value("${datahub.gms.sslContext.protocol}") String gmsSslProtocol, + @Value("${entityClient.retryInterval:2}") int retryInterval, + @Value("${entityClient.numRetries:3}") int numRetries, + final EntityClientCacheConfig entityClientCacheConfig, @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + final Client restClient; if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); @@ -68,6 +70,6 @@ public SystemRestliEntityClient systemRestliEntityClient( new ExponentialBackoff(retryInterval), numRetries, systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); + entityClientCacheConfig); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java new file mode 100644 index 0000000000000..73be819028f57 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java @@ -0,0 +1,21 @@ +package com.linkedin.gms.factory.form; + +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.service.FormService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; +import org.springframework.context.annotation.Scope; + +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +public class FormServiceFactory { + @Bean(name = "formService") + @Scope("singleton") + @Nonnull + protected FormService getInstance(final SystemEntityClient entityClient) throws Exception { + return new FormService(entityClient, entityClient.getSystemAuthentication()); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index 723715a13b1c1..60697e57a9afb 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -10,17 +10,16 @@ import com.linkedin.datahub.graphql.GmsGraphQLEngineArgs; import com.linkedin.datahub.graphql.GraphQLEngine; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.auth.DataHubTokenServiceFactory; import com.linkedin.gms.factory.common.GitVersionFactory; import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; import com.linkedin.gms.factory.common.SiblingGraphServiceFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.recommendation.RecommendationServiceFactory; -import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.client.SystemJavaEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.graph.GraphService; @@ -29,6 +28,7 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -52,7 +52,6 @@ @Import({ RestHighLevelClientFactory.class, IndexConventionFactory.class, - RestliEntityClientFactory.class, RecommendationServiceFactory.class, EntityRegistryFactory.class, DataHubTokenServiceFactory.class, @@ -68,14 +67,6 @@ public class GraphQLEngineFactory { @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) private IndexConvention indexConvention; - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _entityClient; - - @Autowired - @Qualifier("systemJavaEntityClient") - private SystemJavaEntityClient _systemEntityClient; - @Autowired @Qualifier("graphClient") private GraphClient _graphClient; @@ -86,7 +77,7 @@ public class GraphQLEngineFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("graphService") @@ -172,15 +163,21 @@ public class GraphQLEngineFactory { @Qualifier("dataProductService") private DataProductService _dataProductService; + @Autowired + @Qualifier("formService") + private FormService _formService; + @Value("${platformAnalytics.enabled}") // TODO: Migrate to DATAHUB_ANALYTICS_ENABLED private Boolean isAnalyticsEnabled; @Bean(name = "graphQLEngine") @Nonnull - protected GraphQLEngine getInstance() { + protected GraphQLEngine getInstance( + @Qualifier("entityClient") final EntityClient entityClient, + @Qualifier("systemEntityClient") final SystemEntityClient systemEntityClient) { GmsGraphQLEngineArgs args = new GmsGraphQLEngineArgs(); - args.setEntityClient(_entityClient); - args.setSystemEntityClient(_systemEntityClient); + args.setEntityClient(entityClient); + args.setSystemEntityClient(systemEntityClient); args.setGraphClient(_graphClient); args.setUsageClient(_usageClient); if (isAnalyticsEnabled) { @@ -215,6 +212,7 @@ protected GraphQLEngine getInstance() { args.setLineageService(_lineageService); args.setQueryService(_queryService); args.setFeatureFlags(_configProvider.getFeatureFlags()); + args.setFormService(_formService); args.setDataProductService(_dataProductService); return new GmsGraphQLEngine(args).builder().build(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java index 78b9c5d52efdd..0ba953d66730c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java @@ -1,11 +1,9 @@ package com.linkedin.gms.factory.ingestion; -import com.datahub.authentication.Authentication; import com.datahub.metadata.ingestion.IngestionScheduler; -import com.linkedin.entity.client.RestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; @@ -16,18 +14,10 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; -@Import({SystemAuthenticationFactory.class, RestliEntityClientFactory.class}) +@Import({SystemAuthenticationFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class IngestionSchedulerFactory { - @Autowired - @Qualifier("systemAuthentication") - private Authentication _systemAuthentication; - - @Autowired - @Qualifier("restliEntityClient") - private RestliEntityClient _entityClient; - @Autowired @Qualifier("configurationProvider") private ConfigurationProvider _configProvider; @@ -43,10 +33,10 @@ public class IngestionSchedulerFactory { @Bean(name = "ingestionScheduler") @Scope("singleton") @Nonnull - protected IngestionScheduler getInstance() { + protected IngestionScheduler getInstance(final SystemEntityClient entityClient) { return new IngestionScheduler( - _systemAuthentication, - _entityClient, + entityClient.getSystemAuthentication(), + entityClient, _configProvider.getIngestion(), _delayIntervalSeconds, _refreshIntervalSeconds); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index d82a789c9c086..0d00218d1990e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -127,7 +127,6 @@ use DefaultErrorHandler (does back-off retry and then logs) rather than stopping DeserializationException.class, new CommonContainerStoppingErrorHandler()); factory.setCommonErrorHandler(delegatingErrorHandler); } - log.info( String.format( "Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java index a88e1d971973b..c06ebae27f3af 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java @@ -35,7 +35,7 @@ public class AwsGlueSchemaRegistryFactory { @Bean("schemaRegistryConfig") @Nonnull - protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationProvider) { + protected SchemaRegistryConfig getInstance(final ConfigurationProvider configurationProvider) { Map props = new HashMap<>(); // FIXME: Properties for this factory should come from ConfigurationProvider object, // specifically under the diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java index 1589b33862bfe..d81df694c420d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.lineage; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -14,14 +13,12 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class LineageServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Bean(name = "lineageService") @Scope("singleton") @Nonnull - protected LineageService getInstance() throws Exception { - return new LineageService(this._javaEntityClient); + protected LineageService getInstance(@Qualifier("entityClient") final EntityClient entityClient) + throws Exception { + return new LineageService(entityClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java index ff48a922adf22..5403ca80fa5a8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java @@ -1,12 +1,9 @@ package com.linkedin.gms.factory.ownership; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,18 +12,12 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class OwnershipTypeServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication _authentication; @Bean(name = "ownerShipTypeService") @Scope("singleton") @Nonnull - protected OwnershipTypeService getInstance() throws Exception { - return new OwnershipTypeService(_javaEntityClient, _authentication); + protected OwnershipTypeService getInstance(final SystemEntityClient entityClient) + throws Exception { + return new OwnershipTypeService(entityClient, entityClient.getSystemAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java index cf81cbf70d5eb..64af400708e6c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java @@ -1,12 +1,9 @@ package com.linkedin.gms.factory.query; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.QueryService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,18 +12,11 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class QueryServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication _authentication; @Bean(name = "queryService") @Scope("singleton") @Nonnull - protected QueryService getInstance() throws Exception { - return new QueryService(_javaEntityClient, _authentication); + protected QueryService getInstance(final SystemEntityClient entityClient) throws Exception { + return new QueryService(entityClient, entityClient.getSystemAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java index f3be4db147399..9b8707b746b29 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java @@ -31,7 +31,7 @@ public class MostPopularCandidateSourceFactory { @Autowired @Qualifier("entityService") - private EntityService entityService; + private EntityService entityService; @Bean(name = "mostPopularCandidateSource") @Nonnull diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java index ac227faf06c4c..cfdb705dc3f6d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java @@ -31,7 +31,7 @@ public class RecentlyEditedCandidateSourceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Bean(name = "recentlyEditedCandidateSource") @Nonnull diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java index 6f17846efc1cd..742ed685fd6e1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java @@ -31,7 +31,7 @@ public class RecentlyViewedCandidateSourceFactory { @Autowired @Qualifier("entityService") - private EntityService entityService; + private EntityService entityService; @Bean(name = "recentlyViewedCandidateSource") @Nonnull diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java index ad241e7717545..8b1ef069423ee 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java @@ -18,7 +18,7 @@ public class TopPlatformsCandidateSourceFactory { @Autowired @Qualifier("entityService") - private EntityService entityService; + private EntityService entityService; @Autowired @Qualifier("entitySearchService") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java index 2b6d495e4fe33..7b5f4e18d4d53 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.search; +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; @@ -32,6 +35,16 @@ public class ElasticSearchServiceFactory { private static final ObjectMapper YAML_MAPPER = new YAMLMapper(); + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + YAML_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + @Autowired @Qualifier("baseElasticSearchComponents") private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java index 17103240c938b..0d7d2e9c1855f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java @@ -19,6 +19,8 @@ @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class LineageSearchServiceFactory { + public static final String LINEAGE_SEARCH_SERVICE_CACHE_NAME = "relationshipSearchService"; + @Bean(name = "relationshipSearchService") @Primary @Nonnull @@ -31,7 +33,7 @@ protected LineageSearchService getInstance( return new LineageSearchService( searchService, graphService, - cacheEnabled ? cacheManager.getCache("relationshipSearchService") : null, + cacheEnabled ? cacheManager.getCache(LINEAGE_SEARCH_SERVICE_CACHE_NAME) : null, cacheEnabled, configurationProvider.getCache().getSearch().getLineage()); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java index 32ad2175c9052..1fddb51065a1d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java @@ -1,12 +1,9 @@ package com.linkedin.gms.factory.search.views; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,18 +12,11 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ViewServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication _authentication; @Bean(name = "viewService") @Scope("singleton") @Nonnull - protected ViewService getInstance() throws Exception { - return new ViewService(_javaEntityClient, _authentication); + protected ViewService getInstance(final SystemEntityClient entityClient) throws Exception { + return new ViewService(entityClient, entityClient.getSystemAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java index f0d09a815628d..a3f533a22f7ee 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java @@ -1,12 +1,9 @@ package com.linkedin.gms.factory.settings; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,18 +12,10 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SettingsServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication _authentication; - @Bean(name = "settingsService") @Scope("singleton") @Nonnull - protected SettingsService getInstance() throws Exception { - return new SettingsService(_javaEntityClient, _authentication); + protected SettingsService getInstance(final SystemEntityClient entityClient) throws Exception { + return new SettingsService(entityClient, entityClient.getSystemAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java index b735e490f583e..393bbdf155485 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java @@ -25,7 +25,7 @@ public class DailyReport { private final IndexConvention _indexConvention; private final RestHighLevelClient _elasticClient; private final ConfigurationProvider _configurationProvider; - private final EntityService _entityService; + private final EntityService _entityService; private final GitVersion _gitVersion; private static final String MIXPANEL_TOKEN = "5ee83d940754d63cacbf7d34daa6f44a"; @@ -36,7 +36,7 @@ public DailyReport( IndexConvention indexConvention, RestHighLevelClient elasticClient, ConfigurationProvider configurationProvider, - EntityService entityService, + EntityService entityService, GitVersion gitVersion) { this._indexConvention = indexConvention; this._elasticClient = elasticClient; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java index 4986e705fd7b4..7d3638d44769b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java @@ -24,7 +24,7 @@ public DailyReport dailyReport( @Qualifier("elasticSearchRestHighLevelClient") RestHighLevelClient elasticClient, @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) IndexConvention indexConvention, ConfigurationProvider configurationProvider, - EntityService entityService, + EntityService entityService, GitVersion gitVersion) { return new DailyReport( indexConvention, elasticClient, configurationProvider, entityService, gitVersion); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java index 748acb4a9499e..2e8317df6b14b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java @@ -17,7 +17,7 @@ public final class TelemetryUtils { private static String _clientId; - public static String getClientId(EntityService entityService) { + public static String getClientId(EntityService entityService) { if (_clientId == null) { createClientIdIfNotPresent(entityService); RecordTemplate clientIdTemplate = @@ -28,7 +28,7 @@ public static String getClientId(EntityService entityService) { return _clientId; } - private static void createClientIdIfNotPresent(EntityService entityService) { + private static void createClientIdIfNotPresent(EntityService entityService) { String uuid = UUID.randomUUID().toString(); TelemetryClientId clientId = new TelemetryClientId().setClientId(uuid); final AuditStamp clientIdStamp = new AuditStamp(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java index 4e858fb5cdefd..cb0ef29b50a89 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java @@ -32,7 +32,7 @@ public class TrackingServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("gitVersion") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/eventgenerator/EntityChangeEventGeneratorRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/eventgenerator/EntityChangeEventGeneratorRegistryFactory.java index 53a98977413e4..50d4125257fb2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/eventgenerator/EntityChangeEventGeneratorRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/eventgenerator/EntityChangeEventGeneratorRegistryFactory.java @@ -2,8 +2,7 @@ import static com.linkedin.metadata.Constants.*; -import com.datahub.authentication.Authentication; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.timeline.eventgenerator.AssertionRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DataProcessInstanceRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DatasetPropertiesChangeEventGenerator; @@ -32,12 +31,10 @@ public class EntityChangeEventGeneratorRegistryFactory { @Autowired ApplicationContext applicationContext; @Bean(name = "entityChangeEventGeneratorRegistry") - @DependsOn({"restliEntityClient", "systemAuthentication"}) + @DependsOn({"systemEntityClient"}) @Nonnull protected EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry() { - final SystemRestliEntityClient entityClient = - applicationContext.getBean(SystemRestliEntityClient.class); - final Authentication systemAuthentication = applicationContext.getBean(Authentication.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final EntityChangeEventGeneratorRegistry registry = new EntityChangeEventGeneratorRegistry(); registry.register(SCHEMA_METADATA_ASPECT_NAME, new SchemaMetadataChangeEventGenerator()); registry.register( diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java index dc82fc4907edc..7ff91affdf765 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java @@ -40,7 +40,7 @@ static Urn getUpgradeUrn(String upgradeId) { new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } - static void setUpgradeResult(Urn urn, EntityService entityService) throws URISyntaxException { + static void setUpgradeResult(Urn urn, EntityService entityService) throws URISyntaxException { final AuditStamp auditStamp = new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java index ff5d3f215d86b..ed8a53aa594c8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java @@ -7,7 +7,6 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.key.DataHubUpgradeKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -21,12 +20,12 @@ @Slf4j public abstract class UpgradeStep implements BootstrapStep { - protected final EntityService _entityService; + protected final EntityService _entityService; private final String _version; private final String _upgradeId; private final Urn _upgradeUrn; - public UpgradeStep(EntityService entityService, String version, String upgradeId) { + public UpgradeStep(EntityService entityService, String version, String upgradeId) { this._entityService = entityService; this._version = version; this._upgradeId = upgradeId; diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java index 70fa91ae61861..b808c3da5d8d0 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java @@ -13,7 +13,9 @@ import com.linkedin.metadata.boot.steps.IndexDataPlatformsStep; import com.linkedin.metadata.boot.steps.IngestDataPlatformInstancesStep; import com.linkedin.metadata.boot.steps.IngestDataPlatformsStep; +import com.linkedin.metadata.boot.steps.IngestDataTypesStep; import com.linkedin.metadata.boot.steps.IngestDefaultGlobalSettingsStep; +import com.linkedin.metadata.boot.steps.IngestEntityTypesStep; import com.linkedin.metadata.boot.steps.IngestOwnershipTypesStep; import com.linkedin.metadata.boot.steps.IngestPoliciesStep; import com.linkedin.metadata.boot.steps.IngestRetentionPoliciesStep; @@ -54,7 +56,7 @@ public class BootstrapManagerFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("entityRegistry") @@ -131,6 +133,8 @@ protected BootstrapManager createInstance() { new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); final IngestOwnershipTypesStep ingestOwnershipTypesStep = new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); + final IngestDataTypesStep ingestDataTypesStep = new IngestDataTypesStep(_entityService); + final IngestEntityTypesStep ingestEntityTypesStep = new IngestEntityTypesStep(_entityService); final List finalSteps = new ArrayList<>( @@ -148,7 +152,9 @@ protected BootstrapManager createInstance() { removeClientIdAspectStep, restoreDbtSiblingsIndices, indexDataPlatformsStep, - restoreColumnLineageIndices)); + restoreColumnLineageIndices, + ingestDataTypesStep, + ingestEntityTypesStep)); if (_upgradeDefaultBrowsePathsEnabled) { finalSteps.add(new UpgradeDefaultBrowsePathsStep(_entityService)); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java index 2436938c6c026..f13037c1e21c7 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java @@ -26,7 +26,7 @@ public class IngestRetentionPoliciesStepFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Value("${entityService.retention.enabled}") private Boolean _enableRetention; diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java index 770c0d2840fe8..80e139dcd5c65 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java @@ -47,7 +47,7 @@ public class BackfillBrowsePathsV2Step extends UpgradeStep { private final SearchService _searchService; - public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { super(entityService, VERSION, UPGRADE_ID); _searchService = searchService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java index c46cfdd61158d..591082235ff30 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java @@ -34,7 +34,7 @@ public class IndexDataPlatformsStep extends UpgradeStep { private final EntityRegistry _entityRegistry; public IndexDataPlatformsStep( - EntityService entityService, + EntityService entityService, EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java index e2f0b70526af5..716ae292338ed 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java @@ -28,7 +28,7 @@ public class IngestDataPlatformInstancesStep implements BootstrapStep { private static final int BATCH_SIZE = 1000; - private final EntityService _entityService; + private final EntityService _entityService; private final AspectMigrationsDao _migrationsDao; @Override @@ -81,8 +81,7 @@ public void execute() throws Exception { .aspectName(DATA_PLATFORM_INSTANCE_ASPECT_NAME) .aspect(dataPlatformInstance.get()) .auditStamp(aspectAuditStamp) - .build( - _entityService.getEntityRegistry(), _entityService.getSystemEntityClient())); + .build(_entityService)); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java index 37eac6d5ec470..89ed493e162cc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java @@ -31,7 +31,7 @@ public class IngestDataPlatformsStep implements BootstrapStep { private static final String PLATFORM_ASPECT_NAME = "dataPlatformInfo"; - private final EntityService _entityService; + private final EntityService _entityService; @Override public String name() { @@ -91,9 +91,7 @@ public void execute() throws IOException, URISyntaxException { new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) .setTime(System.currentTimeMillis())) - .build( - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()); + .build(_entityService); } catch (URISyntaxException e) { throw new RuntimeException(e); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java new file mode 100644 index 0000000000000..6f3a415b521e4 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java @@ -0,0 +1,103 @@ +package com.linkedin.metadata.boot.steps; + +import static com.linkedin.metadata.Constants.*; + +import com.datahub.util.RecordUtils; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.datatype.DataTypeInfo; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.GenericAspect; +import com.linkedin.mxe.MetadataChangeProposal; +import java.util.Objects; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.springframework.core.io.ClassPathResource; + +/** This bootstrap step is responsible for ingesting default data types. */ +@Slf4j +public class IngestDataTypesStep implements BootstrapStep { + + private static final String DEFAULT_FILE_PATH = "./boot/data_types.json"; + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); + private final EntityService _entityService; + private final String _resourcePath; + + public IngestDataTypesStep(@Nonnull final EntityService entityService) { + this(entityService, DEFAULT_FILE_PATH); + } + + public IngestDataTypesStep( + @Nonnull final EntityService entityService, @Nonnull final String filePath) { + _entityService = Objects.requireNonNull(entityService, "entityService must not be null"); + _resourcePath = filePath; + } + + @Override + public String name() { + return "IngestDataTypesStep"; + } + + @Override + public void execute() throws Exception { + log.info("Ingesting default data types..."); + + // 1. Read from the file into JSON. + final JsonNode dataTypesObj = + JSON_MAPPER.readTree(new ClassPathResource(_resourcePath).getFile()); + + if (!dataTypesObj.isArray()) { + throw new RuntimeException( + String.format( + "Found malformed data types file, expected an Array but found %s", + dataTypesObj.getNodeType())); + } + + log.info("Ingesting {} data types types", dataTypesObj.size()); + int numIngested = 0; + for (final JsonNode roleObj : dataTypesObj) { + final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); + final DataTypeInfo info = + RecordUtils.toRecordTemplate(DataTypeInfo.class, roleObj.get("info").toString()); + log.info(String.format("Ingesting default data type with urn %s", urn)); + ingestDataType(urn, info); + numIngested++; + } + log.info("Ingested {} new data types", numIngested); + } + + private void ingestDataType(final Urn dataTypeUrn, final DataTypeInfo info) throws Exception { + // Write key + final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); + final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(dataTypeUrn.getEntityType()); + GenericAspect keyAspect = + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(dataTypeUrn, keyAspectSpec)); + keyAspectProposal.setAspect(keyAspect); + keyAspectProposal.setAspectName(keyAspectSpec.getName()); + keyAspectProposal.setEntityType(DATA_TYPE_ENTITY_NAME); + keyAspectProposal.setChangeType(ChangeType.UPSERT); + keyAspectProposal.setEntityUrn(dataTypeUrn); + + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(dataTypeUrn); + proposal.setEntityType(DATA_TYPE_ENTITY_NAME); + proposal.setAspectName(DATA_TYPE_INFO_ASPECT_NAME); + proposal.setAspect(GenericRecordUtils.serializeAspect(info)); + proposal.setChangeType(ChangeType.UPSERT); + + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java index 194e1ddd73c2c..1420ec116be8f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java @@ -41,15 +41,15 @@ public class IngestDefaultGlobalSettingsStep implements BootstrapStep { private static final String DEFAULT_SETTINGS_RESOURCE_PATH = "./boot/global_settings.json"; - private final EntityService _entityService; + private final EntityService _entityService; private final String _resourcePath; - public IngestDefaultGlobalSettingsStep(@Nonnull final EntityService entityService) { + public IngestDefaultGlobalSettingsStep(@Nonnull final EntityService entityService) { this(entityService, DEFAULT_SETTINGS_RESOURCE_PATH); } public IngestDefaultGlobalSettingsStep( - @Nonnull final EntityService entityService, @Nonnull final String resourcePath) { + @Nonnull final EntityService entityService, @Nonnull final String resourcePath) { _entityService = Objects.requireNonNull(entityService); _resourcePath = Objects.requireNonNull(resourcePath); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java new file mode 100644 index 0000000000000..b2213eda71cae --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java @@ -0,0 +1,88 @@ +package com.linkedin.metadata.boot.steps; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entitytype.EntityTypeInfo; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.GenericAspect; +import com.linkedin.mxe.MetadataChangeProposal; +import java.util.Objects; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +/** This bootstrap step is responsible for ingesting default data types. */ +@Slf4j +public class IngestEntityTypesStep implements BootstrapStep { + + private static final String DATAHUB_NAMESPACE = "datahub"; + private final EntityService _entityService; + + public IngestEntityTypesStep(@Nonnull final EntityService entityService) { + _entityService = Objects.requireNonNull(entityService, "entityService must not be null"); + } + + @Override + public String name() { + return "IngestEntityTypesStep"; + } + + @Override + public void execute() throws Exception { + log.info("Ingesting entity types from base entity registry..."); + + log.info( + "Ingesting {} entity types", _entityService.getEntityRegistry().getEntitySpecs().size()); + int numIngested = 0; + for (final EntitySpec spec : _entityService.getEntityRegistry().getEntitySpecs().values()) { + final Urn entityTypeUrn = + UrnUtils.getUrn( + String.format("urn:li:entityType:%s.%s", DATAHUB_NAMESPACE, spec.getName())); + final EntityTypeInfo info = + new EntityTypeInfo() + .setDisplayName(spec.getName()) // TODO: Support display name in the entity registry. + .setQualifiedName(entityTypeUrn.getId()); + log.info(String.format("Ingesting entity type with urn %s", entityTypeUrn)); + ingestEntityType(entityTypeUrn, info); + numIngested++; + } + log.info("Ingested {} new entity types", numIngested); + } + + private void ingestEntityType(final Urn entityTypeUrn, final EntityTypeInfo info) + throws Exception { + // Write key + final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); + final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(entityTypeUrn.getEntityType()); + GenericAspect keyAspect = + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(entityTypeUrn, keyAspectSpec)); + keyAspectProposal.setAspect(keyAspect); + keyAspectProposal.setAspectName(keyAspectSpec.getName()); + keyAspectProposal.setEntityType(ENTITY_TYPE_ENTITY_NAME); + keyAspectProposal.setChangeType(ChangeType.UPSERT); + keyAspectProposal.setEntityUrn(entityTypeUrn); + + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(entityTypeUrn); + proposal.setEntityType(ENTITY_TYPE_ENTITY_NAME); + proposal.setAspectName(ENTITY_TYPE_INFO_ASPECT_NAME); + proposal.setAspect(GenericRecordUtils.serializeAspect(info)); + proposal.setChangeType(ChangeType.UPSERT); + + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java index fc1c82fc6d631..02d965b44fc88 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java @@ -34,7 +34,7 @@ public class IngestOwnershipTypesStep implements BootstrapStep { private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); - private final EntityService _entityService; + private final EntityService _entityService; private final Resource _ownershipTypesResource; @Override @@ -100,11 +100,7 @@ private void ingestOwnershipType( _entityService.ingestProposal( AspectsBatchImpl.builder() - .mcps( - List.of(keyAspectProposal, proposal), - auditStamp, - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()) + .mcps(List.of(keyAspectProposal, proposal), auditStamp, _entityService) .build(), false); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java index 9b9feb8e14638..f925c96e333fd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java @@ -46,7 +46,7 @@ public class IngestPoliciesStep implements BootstrapStep { private static final String POLICY_INFO_ASPECT_NAME = "dataHubPolicyInfo"; private final EntityRegistry _entityRegistry; - private final EntityService _entityService; + private final EntityService _entityService; private final EntitySearchService _entitySearchService; private final SearchDocumentTransformer _searchDocumentTransformer; @@ -210,8 +210,7 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) .setTime(System.currentTimeMillis()), - _entityRegistry, - _entityService.getSystemEntityClient()) + _entityService) .build(), false); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java index 9ce4d9ce644a8..28b556e78de12 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java @@ -31,7 +31,7 @@ @RequiredArgsConstructor public class IngestRolesStep implements BootstrapStep { private static final int SLEEP_SECONDS = 60; - private final EntityService _entityService; + private final EntityService _entityService; private final EntityRegistry _entityRegistry; @Override @@ -130,8 +130,7 @@ private void ingestRole( new AuditStamp() .setActor(Urn.createFromString(SYSTEM_ACTOR)) .setTime(System.currentTimeMillis()), - _entityRegistry, - _entityService.getSystemEntityClient()) + _entityService) .build(), false); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java index 9e00b960482c5..1f8127d8be108 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java @@ -29,7 +29,7 @@ public class IngestRootUserStep implements BootstrapStep { private static final String USER_INFO_ASPECT_NAME = "corpUserInfo"; - private final EntityService _entityService; + private final EntityService _entityService; @Override public String name() { diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java index 919ba93c9213e..2e60df54452cc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java @@ -10,7 +10,6 @@ import com.linkedin.metadata.boot.UpgradeStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ListResult; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ExtraInfo; @@ -31,8 +30,7 @@ public class RestoreColumnLineageIndices extends UpgradeStep { private final EntityRegistry _entityRegistry; public RestoreColumnLineageIndices( - @Nonnull final EntityService entityService, - @Nonnull final EntityRegistry entityRegistry) { + @Nonnull final EntityService entityService, @Nonnull final EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entityRegistry = Objects.requireNonNull(entityRegistry, "entityRegistry must not be null"); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java index e2d367a034491..789a4cbd11878 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.key.DataHubUpgradeKey; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -47,7 +46,7 @@ public class RestoreDbtSiblingsIndices implements BootstrapStep { private static final Integer BATCH_SIZE = 1000; private static final Integer SLEEP_SECONDS = 120; - private final EntityService _entityService; + private final EntityService _entityService; private final EntityRegistry _entityRegistry; @Override diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java index 319bbd084e05c..5c2b2c28e6dcf 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java @@ -38,7 +38,7 @@ public class RestoreGlossaryIndices extends UpgradeStep { private final EntityRegistry _entityRegistry; public RestoreGlossaryIndices( - EntityService entityService, + EntityService entityService, EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java index e2d59b505a568..3eedbb48aaeca 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java @@ -39,7 +39,7 @@ public class UpgradeDefaultBrowsePathsStep extends UpgradeStep { private static final String UPGRADE_ID = "upgrade-default-browse-paths-step"; private static final Integer BATCH_SIZE = 5000; - public UpgradeDefaultBrowsePathsStep(EntityService entityService) { + public UpgradeDefaultBrowsePathsStep(EntityService entityService) { super(entityService, VERSION, UPGRADE_ID); } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryDefaultsTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryDefaultsTest.java new file mode 100644 index 0000000000000..87f1546bd9557 --- /dev/null +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryDefaultsTest.java @@ -0,0 +1,27 @@ +package com.linkedin.gms.factory.search; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import java.util.Map; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; + +@TestPropertySource(locations = "classpath:/application.yml") +@SpringBootTest(classes = {ElasticSearchIndexBuilderFactory.class}) +@EnableConfigurationProperties(ConfigurationProvider.class) +public class ElasticSearchIndexBuilderFactoryDefaultsTest extends AbstractTestNGSpringContextTests { + @Autowired ESIndexBuilder test; + + @Test + void testInjection() { + assertNotNull(test); + assertEquals(Map.of(), test.getIndexSettingOverrides()); + } +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java index 8268eeff48c5e..0657141562089 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java @@ -76,7 +76,7 @@ public class BackfillBrowsePathsV2StepTest { @Test public void testExecuteNoExistingBrowsePaths() throws Exception { - final EntityService mockService = initMockService(); + final EntityService mockService = initMockService(); final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); @@ -110,7 +110,7 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { @Test public void testDoesNotRunWhenAlreadyExecuted() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); @@ -140,8 +140,8 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Mockito.anyBoolean()); } - private EntityService initMockService() throws URISyntaxException { - final EntityService mockService = Mockito.mock(EntityService.class); + private EntityService initMockService() throws URISyntaxException { + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new UpgradeDefaultBrowsePathsStepTest.TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java index 41672a07a2389..1ac0f2f4f914a 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java @@ -39,7 +39,7 @@ public class IngestDataPlatformInstancesStepTest { @Test public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); mockDBWithDataPlatformInstanceAspects(migrationsDao); @@ -55,7 +55,7 @@ public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() @Test public void testExecuteCopesWithEmptyDB() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); mockEmptyDB(migrationsDao); @@ -73,7 +73,7 @@ public void testExecuteCopesWithEmptyDB() throws Exception { @Test public void testExecuteChecksKeySpecForAllUrns() throws Exception { final EntityRegistry entityRegistry = getTestEntityRegistry(); - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); final int countOfCorpUserEntities = 2; final int countOfChartEntities = 4; @@ -96,7 +96,7 @@ public void testExecuteChecksKeySpecForAllUrns() throws Exception { @Test public void testExecuteWhenSomeEntitiesShouldReceiveDataPlatformInstance() throws Exception { final EntityRegistry entityRegistry = getTestEntityRegistry(); - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); final int countOfCorpUserEntities = 5; final int countOfChartEntities = 7; @@ -161,7 +161,7 @@ private void mockEmptyDB(AspectMigrationsDao migrationsDao) { private void mockDBWithWorkToDo( EntityRegistry entityRegistry, - EntityService entityService, + EntityService entityService, AspectMigrationsDao migrationsDao, int countOfCorpUserEntities, int countOfChartEntities) { @@ -194,7 +194,7 @@ private List insertMockEntities( String entity, String urnTemplate, EntityRegistry entityRegistry, - EntityService entityService) { + EntityService entityService) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entity); AspectSpec keySpec = entitySpec.getKeyAspectSpec(); List urns = new ArrayList<>(); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java new file mode 100644 index 0000000000000..2bbd06c8a61a4 --- /dev/null +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java @@ -0,0 +1,81 @@ +package com.linkedin.metadata.boot.steps; + +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datatype.DataTypeInfo; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import org.jetbrains.annotations.NotNull; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class IngestDataTypesStepTest { + + private static final Urn TEST_DATA_TYPE_URN = UrnUtils.getUrn("urn:li:dataType:datahub.test"); + + @Test + public void testExecuteValidDataTypesNoExistingDataTypes() throws Exception { + EntityRegistry testEntityRegistry = getTestEntityRegistry(); + final EntityService entityService = mock(EntityService.class); + when(entityService.getEntityRegistry()).thenReturn(testEntityRegistry); + when(entityService.getKeyAspectSpec(anyString())) + .thenAnswer( + args -> testEntityRegistry.getEntitySpec(args.getArgument(0)).getKeyAspectSpec()); + + final IngestDataTypesStep step = + new IngestDataTypesStep(entityService, "./boot/test_data_types_valid.json"); + + step.execute(); + + DataTypeInfo expectedResult = new DataTypeInfo(); + expectedResult.setDescription("Test Description"); + expectedResult.setDisplayName("Test Name"); + expectedResult.setQualifiedName("datahub.test"); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateDataTypeProposal(expectedResult)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + } + + @Test + public void testExecuteInvalidJson() throws Exception { + final EntityService entityService = mock(EntityService.class); + + final IngestDataTypesStep step = + new IngestDataTypesStep(entityService, "./boot/test_data_types_invalid.json"); + + Assert.assertThrows(RuntimeException.class, step::execute); + + // Verify no interactions + verifyNoInteractions(entityService); + } + + private static MetadataChangeProposal buildUpdateDataTypeProposal(final DataTypeInfo info) { + final MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityUrn(TEST_DATA_TYPE_URN); + mcp.setEntityType(DATA_TYPE_ENTITY_NAME); + mcp.setAspectName(DATA_TYPE_INFO_ASPECT_NAME); + mcp.setChangeType(ChangeType.UPSERT); + mcp.setAspect(GenericRecordUtils.serializeAspect(info)); + return mcp; + } + + @NotNull + private ConfigEntityRegistry getTestEntityRegistry() { + return new ConfigEntityRegistry( + IngestDataPlatformInstancesStepTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + } +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java index b28a6e9f5cc5b..783c82934599c 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java @@ -25,7 +25,7 @@ public class IngestDefaultGlobalSettingsStepTest { @Test public void testExecuteValidSettingsNoExistingSettings() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); final IngestDefaultGlobalSettingsStep step = @@ -49,7 +49,7 @@ public void testExecuteValidSettingsNoExistingSettings() throws Exception { public void testExecuteValidSettingsExistingSettings() throws Exception { // Verify that the user provided settings overrides are NOT overwritten. - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final GlobalSettingsInfo existingSettings = new GlobalSettingsInfo() .setViews( @@ -77,7 +77,7 @@ public void testExecuteValidSettingsExistingSettings() throws Exception { @Test public void testExecuteInvalidJsonSettings() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); final IngestDefaultGlobalSettingsStep step = @@ -92,7 +92,7 @@ public void testExecuteInvalidJsonSettings() throws Exception { @Test public void testExecuteInvalidModelSettings() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); final IngestDefaultGlobalSettingsStep step = @@ -106,7 +106,7 @@ public void testExecuteInvalidModelSettings() throws Exception { } private static void configureEntityServiceMock( - final EntityService mockService, final GlobalSettingsInfo settingsInfo) { + final EntityService mockService, final GlobalSettingsInfo settingsInfo) { Mockito.when( mockService.getAspect( Mockito.eq(GLOBAL_SETTINGS_URN), diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStepTest.java new file mode 100644 index 0000000000000..0b87283fbe2f7 --- /dev/null +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStepTest.java @@ -0,0 +1,91 @@ +package com.linkedin.metadata.boot.steps; + +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.entitytype.EntityTypeInfo; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import org.jetbrains.annotations.NotNull; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IngestEntityTypesStepTest { + + @Test + public void testExecuteTestEntityRegistry() throws Exception { + EntityRegistry testEntityRegistry = getTestEntityRegistry(); + final EntityService entityService = mock(EntityService.class); + when(entityService.getEntityRegistry()).thenReturn(testEntityRegistry); + when(entityService.getKeyAspectSpec(anyString())) + .thenAnswer( + args -> testEntityRegistry.getEntitySpec(args.getArgument(0)).getKeyAspectSpec()); + + final IngestEntityTypesStep step = new IngestEntityTypesStep(entityService); + + step.execute(); + + Urn userUrn = + Urn.createFromString(String.format("urn:li:entityType:datahub.%s", CORP_USER_ENTITY_NAME)); + EntityTypeInfo userInfo = new EntityTypeInfo(); + userInfo.setDisplayName("corpuser"); + userInfo.setQualifiedName("datahub.corpuser"); + + Urn chartUrn = + Urn.createFromString(String.format("urn:li:entityType:datahub.%s", CHART_ENTITY_NAME)); + EntityTypeInfo chartInfo = new EntityTypeInfo(); + chartInfo.setDisplayName("chart"); + chartInfo.setQualifiedName("datahub.chart"); + + Urn dataPlatformUrn = + Urn.createFromString( + String.format("urn:li:entityType:datahub.%s", DATA_PLATFORM_ENTITY_NAME)); + EntityTypeInfo dataPlatformInfo = new EntityTypeInfo(); + dataPlatformInfo.setDisplayName("dataPlatform"); + dataPlatformInfo.setQualifiedName("datahub.dataPlatform"); + + // Verify all entities were ingested. + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateEntityTypeProposal(userUrn, userInfo)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateEntityTypeProposal(chartUrn, chartInfo)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateEntityTypeProposal(dataPlatformUrn, dataPlatformInfo)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + } + + private static MetadataChangeProposal buildUpdateEntityTypeProposal( + final Urn entityTypeUrn, final EntityTypeInfo info) { + final MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityUrn(entityTypeUrn); + mcp.setEntityType(ENTITY_TYPE_ENTITY_NAME); + mcp.setAspectName(ENTITY_TYPE_INFO_ASPECT_NAME); + mcp.setChangeType(ChangeType.UPSERT); + mcp.setAspect(GenericRecordUtils.serializeAspect(info)); + return mcp; + } + + @NotNull + private ConfigEntityRegistry getTestEntityRegistry() { + return new ConfigEntityRegistry( + IngestDataPlatformInstancesStepTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + } +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java index 3b23368d8e99f..9e647da9ef2e9 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java @@ -46,7 +46,7 @@ public class RestoreColumnLineageIndicesTest { @Test public void testExecuteFirstTime() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); mockGetUpgradeStep(false, VERSION_1, mockService); @@ -109,7 +109,7 @@ public void testExecuteFirstTime() throws Exception { @Test public void testExecuteWithNewVersion() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); mockGetUpgradeStep(true, VERSION_2, mockService); @@ -172,7 +172,7 @@ public void testExecuteWithNewVersion() throws Exception { @Test public void testDoesNotExecuteWithSameVersion() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); mockGetUpgradeStep(true, VERSION_1, mockService); @@ -233,7 +233,8 @@ public void testDoesNotExecuteWithSameVersion() throws Exception { Mockito.eq(ChangeType.RESTATE)); } - private void mockGetUpstreamLineage(@Nonnull Urn datasetUrn, @Nonnull EntityService mockService) { + private void mockGetUpstreamLineage( + @Nonnull Urn datasetUrn, @Nonnull EntityService mockService) { final List extraInfos = ImmutableList.of( new ExtraInfo() @@ -276,7 +277,7 @@ private void mockGetUpstreamLineage(@Nonnull Urn datasetUrn, @Nonnull EntityServ } private void mockGetInputFields( - @Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { + @Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { final List extraInfos = ImmutableList.of( new ExtraInfo() @@ -325,7 +326,7 @@ private AspectSpec mockAspectSpecs(@Nonnull EntityRegistry mockRegistry) { } private void mockGetUpgradeStep( - boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) + boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) throws Exception { final Urn upgradeEntityUrn = UrnUtils.getUrn(COLUMN_LINEAGE_UPGRADE_URN); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java index a4f0c5e0aaba0..4a4532763f02b 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java @@ -40,7 +40,7 @@ public class RestoreGlossaryIndicesTest { "urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-glossary-indices-ui"); private void mockGetTermInfo( - Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) + Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { Map termInfoAspects = new HashMap<>(); termInfoAspects.put( @@ -79,7 +79,7 @@ private void mockGetTermInfo( } private void mockGetNodeInfo( - Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) + Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { Map nodeInfoAspects = new HashMap<>(); nodeInfoAspects.put( @@ -140,7 +140,7 @@ public void testExecuteFirstTime() throws Exception { Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); @@ -215,7 +215,7 @@ public void testExecutesWithNewVersion() throws Exception { Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); @@ -298,7 +298,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java index 17159ba1baf53..024ad7b16a844 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java @@ -12,6 +12,7 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ListResult; import com.linkedin.metadata.models.AspectSpec; @@ -19,7 +20,6 @@ import com.linkedin.metadata.models.EntitySpecBuilder; import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.query.ExtraInfo; import com.linkedin.metadata.query.ExtraInfoArray; import com.linkedin.metadata.query.ListResultMetadata; @@ -48,7 +48,7 @@ public class UpgradeDefaultBrowsePathsStepTest { @Test public void testExecuteNoExistingBrowsePaths() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); @@ -104,7 +104,7 @@ public void testExecuteFirstTime() throws Exception { Urn testUrn2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset2,PROD)"); - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))) @@ -193,7 +193,7 @@ public void testDoesNotRunWhenBrowsePathIsNotQualified() throws Exception { "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset4,PROD)"); // Do not // migrate - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); @@ -269,7 +269,7 @@ public void testDoesNotRunWhenBrowsePathIsNotQualified() throws Exception { @Test public void testDoesNotRunWhenAlreadyExecuted() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = @@ -297,7 +297,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Mockito.anyBoolean()); } - private void initMockServiceOtherEntities(EntityService mockService) { + private void initMockServiceOtherEntities(EntityService mockService) { List skippedEntityTypes = ImmutableList.of( Constants.DASHBOARD_ENTITY_NAME, diff --git a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java index 9931f044931b6..17bf7810f71e4 100644 --- a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java +++ b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java @@ -12,7 +12,7 @@ public class TelemetryUtilsTest { - EntityService _entityService; + EntityService _entityService; @BeforeMethod public void init() { diff --git a/metadata-service/factories/src/test/resources/boot/test_data_types_invalid.json b/metadata-service/factories/src/test/resources/boot/test_data_types_invalid.json new file mode 100644 index 0000000000000..ed1d8a7b45abe --- /dev/null +++ b/metadata-service/factories/src/test/resources/boot/test_data_types_invalid.json @@ -0,0 +1,9 @@ +[ + { + "urn": "urn:li:dataType:datahub.test", + "badField": { + "qualifiedName":"datahub.test", + "description": "Test Description" + } + } +] \ No newline at end of file diff --git a/metadata-service/factories/src/test/resources/boot/test_data_types_valid.json b/metadata-service/factories/src/test/resources/boot/test_data_types_valid.json new file mode 100644 index 0000000000000..3694c92947aa1 --- /dev/null +++ b/metadata-service/factories/src/test/resources/boot/test_data_types_valid.json @@ -0,0 +1,10 @@ +[ + { + "urn": "urn:li:dataType:datahub.test", + "info": { + "qualifiedName":"datahub.test", + "displayName": "Test Name", + "description": "Test Description" + } + } +] \ No newline at end of file diff --git a/metadata-service/factories/src/test/resources/test-entity-registry.yaml b/metadata-service/factories/src/test/resources/test-entity-registry.yaml index fe32b413751e6..400b22446c186 100644 --- a/metadata-service/factories/src/test/resources/test-entity-registry.yaml +++ b/metadata-service/factories/src/test/resources/test-entity-registry.yaml @@ -13,4 +13,20 @@ entities: category: core keyAspect: dataPlatformKey aspects: - - dataPlatformInfo \ No newline at end of file + - dataPlatformInfo + - name: entityType + doc: A type of entity in the DataHub Metadata Model. + category: core + keyAspect: entityTypeKey + aspects: + - entityTypeInfo + - institutionalMemory + - status + - name: dataType + doc: A type of data element stored within DataHub. + category: core + keyAspect: dataTypeKey + aspects: + - dataTypeInfo + - institutionalMemory + - status \ No newline at end of file diff --git a/metadata-service/openapi-entity-servlet/build.gradle b/metadata-service/openapi-entity-servlet/build.gradle index fb49727fa70d1..016ac6693f55b 100644 --- a/metadata-service/openapi-entity-servlet/build.gradle +++ b/metadata-service/openapi-entity-servlet/build.gradle @@ -75,7 +75,7 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: [mergeApiComponents, 'java11' : "true", 'modelPropertyNaming': "original", 'modelPackage' : "io.datahubproject.openapi.generated", - 'apiPackage' : "io.datahubproject.openapi.generated.controller", + 'apiPackage' : "io.datahubproject.openapi.v2.generated.controller", 'delegatePattern' : "false" ] } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java similarity index 86% rename from metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java rename to metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java index d7c8268903508..39a7e4722988e 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java @@ -1,4 +1,4 @@ -package io.datahubproject.openapi.delegates; +package io.datahubproject.openapi.v2.delegates; import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; @@ -35,10 +35,16 @@ import io.datahubproject.openapi.generated.DeprecationAspectResponseV2; import io.datahubproject.openapi.generated.DomainsAspectRequestV2; import io.datahubproject.openapi.generated.DomainsAspectResponseV2; +import io.datahubproject.openapi.generated.DynamicFormAssignmentAspectRequestV2; +import io.datahubproject.openapi.generated.DynamicFormAssignmentAspectResponseV2; import io.datahubproject.openapi.generated.EditableChartPropertiesAspectRequestV2; import io.datahubproject.openapi.generated.EditableChartPropertiesAspectResponseV2; import io.datahubproject.openapi.generated.EditableDatasetPropertiesAspectRequestV2; import io.datahubproject.openapi.generated.EditableDatasetPropertiesAspectResponseV2; +import io.datahubproject.openapi.generated.FormInfoAspectRequestV2; +import io.datahubproject.openapi.generated.FormInfoAspectResponseV2; +import io.datahubproject.openapi.generated.FormsAspectRequestV2; +import io.datahubproject.openapi.generated.FormsAspectResponseV2; import io.datahubproject.openapi.generated.GlobalTagsAspectRequestV2; import io.datahubproject.openapi.generated.GlobalTagsAspectResponseV2; import io.datahubproject.openapi.generated.GlossaryTermsAspectRequestV2; @@ -66,7 +72,7 @@ public class EntityApiDelegateImpl { private final EntityRegistry _entityRegistry; - private final EntityService _entityService; + private final EntityService _entityService; private final SearchService _searchService; private final EntitiesController _v1Controller; private final AuthorizerChain _authorizationChain; @@ -79,7 +85,7 @@ public class EntityApiDelegateImpl { private final StackWalker walker = StackWalker.getInstance(); public EntityApiDelegateImpl( - EntityService entityService, + EntityService entityService, SearchService searchService, EntitiesController entitiesController, boolean restApiAuthorizationEnabled, @@ -732,4 +738,111 @@ public ResponseEntity deleteDataProductProperties(String urn) { walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return deleteAspect(urn, methodNameToAspectName(methodName)); } + + public ResponseEntity createForms(FormsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + FormsAspectRequestV2.class, + FormsAspectResponseV2.class); + } + + public ResponseEntity deleteForms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity getForms( + String urn, @jakarta.validation.Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + FormsAspectResponseV2.class); + } + + public ResponseEntity headForms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity createDynamicFormAssignment( + DynamicFormAssignmentAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DynamicFormAssignmentAspectRequestV2.class, + DynamicFormAssignmentAspectResponseV2.class); + } + + public ResponseEntity createFormInfo( + FormInfoAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + FormInfoAspectRequestV2.class, + FormInfoAspectResponseV2.class); + } + + public ResponseEntity deleteDynamicFormAssignment(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headDynamicFormAssignment(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headFormInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity getFormInfo( + String urn, @jakarta.validation.Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + FormInfoAspectResponseV2.class); + } + + public ResponseEntity getDynamicFormAssignment( + String urn, @jakarta.validation.Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DynamicFormAssignmentAspectResponseV2.class); + } + + public ResponseEntity deleteFormInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } } diff --git a/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache b/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache index 4a29b95eabc5d..7ac087f220561 100644 --- a/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache +++ b/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache @@ -1,6 +1,6 @@ package {{package}}; -import io.datahubproject.openapi.delegates.EntityApiDelegateImpl; +import io.datahubproject.openapi.v2.delegates.EntityApiDelegateImpl; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; import io.datahubproject.openapi.entities.EntitiesController; diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java similarity index 97% rename from metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java rename to metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java index 1f8f0a5023513..d4217c9fd1b66 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java @@ -1,4 +1,4 @@ -package io.datahubproject.openapi.delegates; +package io.datahubproject.openapi.v2.delegates; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.testng.Assert.*; @@ -32,8 +32,8 @@ import io.datahubproject.openapi.generated.Status; import io.datahubproject.openapi.generated.StatusAspectRequestV2; import io.datahubproject.openapi.generated.TagAssociation; -import io.datahubproject.openapi.generated.controller.ChartApiController; -import io.datahubproject.openapi.generated.controller.DatasetApiController; +import io.datahubproject.openapi.v2.generated.controller.ChartApiController; +import io.datahubproject.openapi.v2.generated.controller.DatasetApiController; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; @@ -51,7 +51,7 @@ import org.testng.annotations.Test; @SpringBootTest(classes = {SpringWebConfig.class}) -@ComponentScan(basePackages = {"io.datahubproject.openapi.generated.controller"}) +@ComponentScan(basePackages = {"io.datahubproject.openapi.v2.generated.controller"}) @Import({OpenAPIEntityTestConfiguration.class}) @AutoConfigureMockMvc public class EntityApiDelegateImplTest extends AbstractTestNGSpringContextTests { diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java index cc040d29657b2..f4689a9862825 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java @@ -1,14 +1,25 @@ package io.datahubproject.openapi; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.ConversionNotSupportedException; +import org.springframework.core.Ordered; import org.springframework.core.convert.ConversionFailedException; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.servlet.mvc.support.DefaultHandlerExceptionResolver; +@Slf4j @ControllerAdvice -public class GlobalControllerExceptionHandler { - @ExceptionHandler(ConversionFailedException.class) +public class GlobalControllerExceptionHandler extends DefaultHandlerExceptionResolver { + + public GlobalControllerExceptionHandler() { + setOrder(Ordered.HIGHEST_PRECEDENCE); + setWarnLogCategory(getClass().getName()); + } + + @ExceptionHandler({ConversionFailedException.class, ConversionNotSupportedException.class}) public ResponseEntity handleConflict(RuntimeException ex) { return new ResponseEntity<>(ex.getMessage(), HttpStatus.BAD_REQUEST); } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java index a8721b23d1fa2..2336bea565e59 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java @@ -4,7 +4,9 @@ import io.swagger.v3.oas.annotations.OpenAPIDefinition; import io.swagger.v3.oas.annotations.info.Info; import io.swagger.v3.oas.annotations.servers.Server; +import java.util.HashSet; import java.util.List; +import java.util.Set; import org.springdoc.core.models.GroupedOpenApi; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -23,6 +25,20 @@ servers = {@Server(url = "/openapi/", description = "Default Server URL")}) @Configuration public class SpringWebConfig implements WebMvcConfigurer { + private static final Set OPERATIONS_PACKAGES = + Set.of("io.datahubproject.openapi.operations", "io.datahubproject.openapi.health"); + private static final Set V2_PACKAGES = Set.of("io.datahubproject.openapi.v2"); + private static final Set SCHEMA_REGISTRY_PACKAGES = + Set.of("io.datahubproject.openapi.schema.registry"); + + public static final Set NONDEFAULT_OPENAPI_PACKAGES; + + static { + NONDEFAULT_OPENAPI_PACKAGES = new HashSet<>(); + NONDEFAULT_OPENAPI_PACKAGES.addAll(OPERATIONS_PACKAGES); + NONDEFAULT_OPENAPI_PACKAGES.addAll(V2_PACKAGES); + NONDEFAULT_OPENAPI_PACKAGES.addAll(SCHEMA_REGISTRY_PACKAGES); + } @Override public void configureMessageConverters(List> messageConverters) { @@ -41,16 +57,23 @@ public void addFormatters(FormatterRegistry registry) { public GroupedOpenApi defaultOpenApiGroup() { return GroupedOpenApi.builder() .group("default") - .packagesToExclude( - "io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .packagesToExclude(NONDEFAULT_OPENAPI_PACKAGES.toArray(String[]::new)) .build(); } @Bean public GroupedOpenApi operationsOpenApiGroup() { return GroupedOpenApi.builder() - .group("operations") - .packagesToScan("io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .group("Operations") + .packagesToScan(OPERATIONS_PACKAGES.toArray(String[]::new)) + .build(); + } + + @Bean + public GroupedOpenApi openApiGroupV3() { + return GroupedOpenApi.builder() + .group("OpenAPI v2") + .packagesToScan(V2_PACKAGES.toArray(String[]::new)) .build(); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java index c87820465dc88..a7e88966e4f87 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java @@ -465,11 +465,7 @@ public static Pair ingestProposal( AspectsBatch batch = AspectsBatchImpl.builder() - .mcps( - proposalStream.collect(Collectors.toList()), - auditStamp, - entityService.getEntityRegistry(), - entityService.getSystemEntityClient()) + .mcps(proposalStream.collect(Collectors.toList()), auditStamp, entityService) .build(); Set proposalResult = entityService.ingestProposal(batch, async); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java index 31577429df72d..6c0474dc6cfb6 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java @@ -134,11 +134,34 @@ public Method lookupMethod( return lookupMethod(builderPair.getFirst(), method, parameters); } + /** + * Convert class name to the pdl model names. Upper case first letter unless the 3rd character is + * upper case. Reverse of {link ReflectionCache.toUpperFirst} i.e. MLModel -> mlModel Dataset -> + * dataset DataProduct -> dataProduct + * + * @param s input string + * @return class name + */ public static String toLowerFirst(String s) { - return s.substring(0, 1).toLowerCase() + s.substring(1); + if (s.length() > 2 && s.substring(2, 3).equals(s.substring(2, 3).toUpperCase())) { + return s.substring(0, 2).toLowerCase() + s.substring(2); + } else { + return s.substring(0, 1).toLowerCase() + s.substring(1); + } } + /** + * Convert the pdl model names to desired class names. Upper case first letter unless the 3rd + * character is upper case. i.e. mlModel -> MLModel dataset -> Dataset dataProduct -> DataProduct + * + * @param s input string + * @return class name + */ public static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + if (s.length() > 2 && s.substring(2, 3).equals(s.substring(2, 3).toUpperCase())) { + return s.substring(0, 2).toUpperCase() + s.substring(2); + } else { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java new file mode 100644 index 0000000000000..503330fdc8a2e --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java @@ -0,0 +1,507 @@ +package io.datahubproject.openapi.v2.controller; + +import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthorizerChain; +import com.datahub.util.RecordUtils; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.ByteString; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.metadata.aspect.batch.UpsertItem; +import com.linkedin.metadata.aspect.patch.GenericJsonPatch; +import com.linkedin.metadata.aspect.patch.template.common.GenericPatchTemplate; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.UpdateAspectResult; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.utils.AuditStampUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.metadata.utils.SearchUtil; +import com.linkedin.mxe.SystemMetadata; +import com.linkedin.util.Pair; +import io.datahubproject.openapi.v2.models.GenericEntity; +import io.datahubproject.openapi.v2.models.GenericScrollResult; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.lang.reflect.InvocationTargetException; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PatchMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v2/entity") +@Slf4j +public class EntityController { + private static final SearchFlags DEFAULT_SEARCH_FLAGS = + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true); + @Autowired private EntityRegistry entityRegistry; + @Autowired private SearchService searchService; + @Autowired private EntityService entityService; + @Autowired private AuthorizerChain authorizationChain; + @Autowired private boolean restApiAuthorizationEnabled; + @Autowired private ObjectMapper objectMapper; + + @Tag(name = "Generic Entities", description = "API for interacting with generic entities.") + @GetMapping(value = "/{entityName}", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Scroll entities") + public ResponseEntity> getEntities( + @PathVariable("entityName") String entityName, + @RequestParam(value = "aspectNames", defaultValue = "") Set aspectNames, + @RequestParam(value = "count", defaultValue = "10") Integer count, + @RequestParam(value = "query", defaultValue = "*") String query, + @RequestParam(value = "scrollId", required = false) String scrollId, + @RequestParam(value = "sort", required = false, defaultValue = "urn") String sortField, + @RequestParam(value = "sortOrder", required = false, defaultValue = "ASCENDING") + String sortOrder, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata) + throws URISyntaxException { + + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + // TODO: support additional and multiple sort params + SortCriterion sortCriterion = SearchUtil.sortBy(sortField, SortOrder.valueOf(sortOrder)); + + ScrollResult result = + searchService.scrollAcrossEntities( + List.of(entitySpec.getName()), + query, + null, + sortCriterion, + scrollId, + null, + count, + DEFAULT_SEARCH_FLAGS); + + return ResponseEntity.ok( + GenericScrollResult.builder() + .results(toRecordTemplates(result.getEntities(), aspectNames, withSystemMetadata)) + .scrollId(result.getScrollId()) + .build()); + } + + @Tag(name = "Generic Entities") + @GetMapping(value = "/{entityName}/{entityUrn}", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Get an entity") + public ResponseEntity getEntity( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @RequestParam(value = "aspectNames", defaultValue = "") Set aspectNames, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata) + throws URISyntaxException { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return ResponseEntity.of( + toRecordTemplates(List.of(UrnUtils.getUrn(entityUrn)), aspectNames, withSystemMetadata) + .stream() + .findFirst()); + } + + @Tag(name = "Generic Entities") + @RequestMapping( + value = "/{entityName}/{entityUrn}", + method = {RequestMethod.HEAD}) + @Operation(summary = "Entity exists") + public ResponseEntity headEntity( + @PathVariable("entityName") String entityName, @PathVariable("entityUrn") String entityUrn) { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return exists(UrnUtils.getUrn(entityUrn), null) + ? ResponseEntity.noContent().build() + : ResponseEntity.notFound().build(); + } + + @Tag(name = "Generic Aspects", description = "API for generic aspects.") + @GetMapping( + value = "/{entityName}/{entityUrn}/{aspectName}", + produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Get an entity's generic aspect.") + public ResponseEntity getAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName) + throws URISyntaxException { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return ResponseEntity.of( + toRecordTemplates(List.of(UrnUtils.getUrn(entityUrn)), Set.of(aspectName), true).stream() + .findFirst() + .flatMap(e -> e.getAspects().values().stream().findFirst())); + } + + @Tag(name = "Generic Aspects") + @RequestMapping( + value = "/{entityName}/{entityUrn}/{aspectName}", + method = {RequestMethod.HEAD}) + @Operation(summary = "Whether an entity aspect exists.") + public ResponseEntity headAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName) { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return exists(UrnUtils.getUrn(entityUrn), aspectName) + ? ResponseEntity.noContent().build() + : ResponseEntity.notFound().build(); + } + + @Tag(name = "Generic Entities") + @DeleteMapping(value = "/{entityName}/{entityUrn}") + @Operation(summary = "Delete an entity") + public void deleteEntity( + @PathVariable("entityName") String entityName, @PathVariable("entityUrn") String entityUrn) { + + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())); + } + + entityService.deleteAspect(entityUrn, entitySpec.getKeyAspectName(), Map.of(), true); + } + + @Tag(name = "Generic Aspects") + @DeleteMapping(value = "/{entityName}/{entityUrn}/{aspectName}") + @Operation(summary = "Delete an entity aspect.") + public void deleteAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName) { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())); + } + + entityService.deleteAspect(entityUrn, aspectName, Map.of(), true); + } + + @Tag(name = "Generic Aspects") + @PostMapping( + value = "/{entityName}/{entityUrn}/{aspectName}", + produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Create an entity aspect.") + public ResponseEntity createAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata, + @RequestBody @Nonnull String jsonAspect) + throws URISyntaxException { + + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + Authentication authentication = AuthenticationContext.getAuthentication(); + + if (restApiAuthorizationEnabled) { + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + } + + AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); + UpsertItem upsert = + toUpsertItem(UrnUtils.getUrn(entityUrn), aspectSpec, jsonAspect, authentication.getActor()); + + List results = + entityService.ingestAspects( + AspectsBatchImpl.builder().items(List.of(upsert)).build(), true, true); + + return ResponseEntity.of( + results.stream() + .findFirst() + .map( + result -> + GenericEntity.builder() + .urn(result.getUrn().toString()) + .build( + objectMapper, + Map.of( + aspectName, + Pair.of( + result.getNewValue(), + withSystemMetadata ? result.getNewSystemMetadata() : null))))); + } + + @Tag(name = "Generic Aspects") + @PatchMapping( + value = "/{entityName}/{entityUrn}/{aspectName}", + consumes = "application/json-patch+json", + produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Patch an entity aspect. (Experimental)") + public ResponseEntity patchAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata, + @RequestBody @Nonnull GenericJsonPatch patch) + throws URISyntaxException, + NoSuchMethodException, + InvocationTargetException, + InstantiationException, + IllegalAccessException { + + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + Authentication authentication = AuthenticationContext.getAuthentication(); + + if (restApiAuthorizationEnabled) { + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + } + + RecordTemplate currentValue = + entityService.getAspect(UrnUtils.getUrn(entityUrn), aspectName, 0); + + AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); + GenericPatchTemplate genericPatchTemplate = + GenericPatchTemplate.builder() + .genericJsonPatch(patch) + .templateType(aspectSpec.getDataTemplateClass()) + .templateDefault( + aspectSpec.getDataTemplateClass().getDeclaredConstructor().newInstance()) + .build(); + UpsertItem upsert = + toUpsertItem( + UrnUtils.getUrn(entityUrn), + aspectSpec, + currentValue, + genericPatchTemplate, + authentication.getActor()); + + List results = + entityService.ingestAspects( + AspectsBatchImpl.builder().items(List.of(upsert)).build(), true, true); + + return ResponseEntity.of( + results.stream() + .findFirst() + .map( + result -> + GenericEntity.builder() + .urn(result.getUrn().toString()) + .build( + objectMapper, + Map.of( + aspectName, + Pair.of( + result.getNewValue(), + withSystemMetadata ? result.getNewSystemMetadata() : null))))); + } + + private List toRecordTemplates( + SearchEntityArray searchEntities, Set aspectNames, boolean withSystemMetadata) + throws URISyntaxException { + return toRecordTemplates( + searchEntities.stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + aspectNames, + withSystemMetadata); + } + + private Boolean exists(Urn urn, @Nullable String aspect) { + return aspect == null ? entityService.exists(urn, true) : entityService.exists(urn, aspect); + } + + private List toRecordTemplates( + List urns, Set aspectNames, boolean withSystemMetadata) + throws URISyntaxException { + if (urns.isEmpty()) { + return List.of(); + } else { + Set urnsSet = new HashSet<>(urns); + + Map> aspects = + entityService.getLatestEnvelopedAspects( + urnsSet, resolveAspectNames(urnsSet, aspectNames)); + + return urns.stream() + .map( + u -> + GenericEntity.builder() + .urn(u.toString()) + .build( + objectMapper, + toAspectMap(u, aspects.getOrDefault(u, List.of()), withSystemMetadata))) + .collect(Collectors.toList()); + } + } + + private Set resolveAspectNames(Set urns, Set requestedNames) { + if (requestedNames.isEmpty()) { + return urns.stream() + .flatMap(u -> entityRegistry.getEntitySpec(u.getEntityType()).getAspectSpecs().stream()) + .map(AspectSpec::getName) + .collect(Collectors.toSet()); + } else { + // ensure key is always present + return Stream.concat( + requestedNames.stream(), + urns.stream() + .map(u -> entityRegistry.getEntitySpec(u.getEntityType()).getKeyAspectName())) + .collect(Collectors.toSet()); + } + } + + private Map> toAspectMap( + Urn urn, List aspects, boolean withSystemMetadata) { + return aspects.stream() + .map( + a -> + Map.entry( + a.getName(), + Pair.of( + toRecordTemplate(lookupAspectSpec(urn, a.getName()), a), + withSystemMetadata ? a.getSystemMetadata() : null))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private AspectSpec lookupAspectSpec(Urn urn, String aspectName) { + return entityRegistry.getEntitySpec(urn.getEntityType()).getAspectSpec(aspectName); + } + + private RecordTemplate toRecordTemplate(AspectSpec aspectSpec, EnvelopedAspect envelopedAspect) { + return RecordUtils.toRecordTemplate( + aspectSpec.getDataTemplateClass(), envelopedAspect.getValue().data()); + } + + private UpsertItem toUpsertItem( + Urn entityUrn, AspectSpec aspectSpec, String jsonAspect, Actor actor) + throws URISyntaxException { + return MCPUpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectSpec.getName()) + .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) + .aspect( + GenericRecordUtils.deserializeAspect( + ByteString.copyString(jsonAspect, StandardCharsets.UTF_8), + GenericRecordUtils.JSON, + aspectSpec)) + .build(entityService); + } + + private UpsertItem toUpsertItem( + @Nonnull Urn urn, + @Nonnull AspectSpec aspectSpec, + @Nullable RecordTemplate currentValue, + @Nonnull GenericPatchTemplate genericPatchTemplate, + @Nonnull Actor actor) + throws URISyntaxException { + return MCPUpsertBatchItem.fromPatch( + urn, + aspectSpec, + currentValue, + genericPatchTemplate, + AuditStampUtils.createAuditStamp(actor.toUrnStr()), + entityService); + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java new file mode 100644 index 0000000000000..3550a86163f51 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java @@ -0,0 +1,228 @@ +package io.datahubproject.openapi.v2.controller; + +import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthorizerChain; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.graph.RelatedEntities; +import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; +import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.metadata.utils.SearchUtil; +import io.datahubproject.openapi.v2.models.GenericRelationship; +import io.datahubproject.openapi.v2.models.GenericScrollResult; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v2/relationship") +@Slf4j +@Tag( + name = "Generic Relationships", + description = "APIs for ingesting and accessing entity relationships.") +public class RelationshipController { + + private static final String[] SORT_FIELDS = {"source.urn", "destination.urn", "relationshipType"}; + private static final String[] SORT_ORDERS = {"ASCENDING", "ASCENDING", "ASCENDING"}; + private static final List EDGE_SORT_CRITERION; + + static { + EDGE_SORT_CRITERION = + IntStream.range(0, SORT_FIELDS.length) + .mapToObj( + idx -> SearchUtil.sortBy(SORT_FIELDS[idx], SortOrder.valueOf(SORT_ORDERS[idx]))) + .collect(Collectors.toList()); + } + + @Autowired private EntityRegistry entityRegistry; + @Autowired private ElasticSearchGraphService graphService; + @Autowired private AuthorizerChain authorizationChain; + + @Autowired private boolean restApiAuthorizationEnabled; + + /** + * Returns relationship edges by type + * + * @param relationshipType the relationship type + * @param count number of results + * @param scrollId scrolling id + * @return list of relation edges + */ + @GetMapping(value = "/{relationshipType}", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Scroll relationships of the given type.") + public ResponseEntity> getRelationshipsByType( + @PathVariable("relationshipType") String relationshipType, + @RequestParam(value = "count", defaultValue = "10") Integer count, + @RequestParam(value = "scrollId", required = false) String scrollId) { + + RelatedEntitiesScrollResult result = + graphService.scrollRelatedEntities( + null, + null, + null, + null, + List.of(relationshipType), + new RelationshipFilter().setDirection(RelationshipDirection.UNDIRECTED), + EDGE_SORT_CRITERION, + scrollId, + count, + null, + null); + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + Set entitySpecs = + result.getEntities().stream() + .flatMap( + relatedEntity -> + Stream.of( + entityRegistry.getEntitySpec( + UrnUtils.getUrn(relatedEntity.getUrn()).getEntityType()), + entityRegistry.getEntitySpec( + UrnUtils.getUrn(relatedEntity.getSourceUrn()).getEntityType()))) + .collect(Collectors.toSet()); + + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpecs, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return ResponseEntity.ok( + GenericScrollResult.builder() + .results(toGenericRelationships(result.getEntities())) + .scrollId(result.getScrollId()) + .build()); + } + + /** + * Returns edges for a given urn + * + * @param relationshipTypes types of edges + * @param direction direction of the edges + * @param count number of results + * @param scrollId scroll id + * @return urn edges + */ + @GetMapping(value = "/{entityName}/{entityUrn}", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Scroll relationships from a given entity.") + public ResponseEntity> getRelationshipsByEntity( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @RequestParam(value = "relationshipType[]", required = false, defaultValue = "*") + String[] relationshipTypes, + @RequestParam(value = "direction", defaultValue = "OUTGOING") String direction, + @RequestParam(value = "count", defaultValue = "10") Integer count, + @RequestParam(value = "scrollId", required = false) String scrollId) { + + final RelatedEntitiesScrollResult result; + + switch (RelationshipDirection.valueOf(direction.toUpperCase())) { + case INCOMING -> result = + graphService.scrollRelatedEntities( + null, + null, + null, + null, + relationshipTypes.length > 0 && !relationshipTypes[0].equals("*") + ? Arrays.stream(relationshipTypes).toList() + : List.of(), + new RelationshipFilter() + .setDirection(RelationshipDirection.UNDIRECTED) + .setOr(QueryUtils.newFilter("destination.urn", entityUrn).getOr()), + EDGE_SORT_CRITERION, + scrollId, + count, + null, + null); + case OUTGOING -> result = + graphService.scrollRelatedEntities( + null, + null, + null, + null, + relationshipTypes.length > 0 && !relationshipTypes[0].equals("*") + ? Arrays.stream(relationshipTypes).toList() + : List.of(), + new RelationshipFilter() + .setDirection(RelationshipDirection.UNDIRECTED) + .setOr(QueryUtils.newFilter("source.urn", entityUrn).getOr()), + EDGE_SORT_CRITERION, + scrollId, + count, + null, + null); + default -> throw new IllegalArgumentException("Direction must be INCOMING or OUTGOING"); + } + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + Set entitySpecs = + result.getEntities().stream() + .flatMap( + relatedEntity -> + Stream.of( + entityRegistry.getEntitySpec( + UrnUtils.getUrn(relatedEntity.getDestinationUrn()).getEntityType()), + entityRegistry.getEntitySpec( + UrnUtils.getUrn(relatedEntity.getSourceUrn()).getEntityType()))) + .collect(Collectors.toSet()); + + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpecs, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return ResponseEntity.ok( + GenericScrollResult.builder() + .results(toGenericRelationships(result.getEntities())) + .scrollId(result.getScrollId()) + .build()); + } + + private List toGenericRelationships(List relatedEntities) { + return relatedEntities.stream() + .map( + result -> { + Urn source = UrnUtils.getUrn(result.getSourceUrn()); + Urn dest = UrnUtils.getUrn(result.getDestinationUrn()); + return GenericRelationship.builder() + .relationshipType(result.getRelationshipType()) + .source(GenericRelationship.GenericNode.fromUrn(source)) + .destination(GenericRelationship.GenericNode.fromUrn(dest)) + .build(); + }) + .collect(Collectors.toList()); + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java new file mode 100644 index 0000000000000..ab12b68339011 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java @@ -0,0 +1,115 @@ +package io.datahubproject.openapi.v2.controller; + +import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthorizerChain; +import com.google.common.collect.ImmutableList; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.timeseries.GenericTimeseriesDocument; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.timeseries.TimeseriesScrollResult; +import com.linkedin.metadata.utils.SearchUtil; +import io.datahubproject.openapi.v2.models.GenericScrollResult; +import io.datahubproject.openapi.v2.models.GenericTimeseriesAspect; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.net.URISyntaxException; +import java.util.List; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v2/timeseries") +@Slf4j +@Tag( + name = "Generic Timeseries Aspects", + description = "APIs for ingesting and accessing timeseries aspects") +public class TimeseriesController { + + @Autowired private EntityRegistry entityRegistry; + + @Autowired private TimeseriesAspectService timeseriesAspectService; + + @Autowired private AuthorizerChain authorizationChain; + + @Autowired private boolean restApiAuthorizationEnabled; + + @GetMapping(value = "/{entityName}/{aspectName}", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity> getAspects( + @PathVariable("entityName") String entityName, + @PathVariable("aspectName") String aspectName, + @RequestParam(value = "count", defaultValue = "10") Integer count, + @RequestParam(value = "scrollId", required = false) String scrollId, + @RequestParam(value = "startTimeMillis", required = false) Long startTimeMillis, + @RequestParam(value = "endTimeMillis", required = false) Long endTimeMillis, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata) + throws URISyntaxException { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entityRegistry.getEntitySpec(entityName), + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + AspectSpec aspectSpec = entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName); + if (!aspectSpec.isTimeseries()) { + throw new IllegalArgumentException("Only timeseries aspects are supported."); + } + + List sortCriterion = + List.of( + SearchUtil.sortBy("timestampMillis", SortOrder.DESCENDING), + SearchUtil.sortBy("messageId", SortOrder.DESCENDING)); + + TimeseriesScrollResult result = + timeseriesAspectService.scrollAspects( + entityName, + aspectName, + null, + sortCriterion, + scrollId, + count, + startTimeMillis, + endTimeMillis); + + return ResponseEntity.ok( + GenericScrollResult.builder() + .scrollId(result.getScrollId()) + .results(toGenericTimeseriesAspect(result.getDocuments(), withSystemMetadata)) + .build()); + } + + private static List toGenericTimeseriesAspect( + List docs, boolean withSystemMetadata) { + return docs.stream() + .map( + doc -> + GenericTimeseriesAspect.builder() + .urn(doc.getUrn()) + .messageId(doc.getMessageId()) + .timestampMillis(doc.getTimestampMillis()) + .systemMetadata(withSystemMetadata ? doc.getSystemMetadata() : null) + .event(doc.getEvent()) + .build()) + .collect(Collectors.toList()); + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericEntity.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericEntity.java new file mode 100644 index 0000000000000..f1e965ca05464 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericEntity.java @@ -0,0 +1,57 @@ +package io.datahubproject.openapi.v2.models; + +import com.datahub.util.RecordUtils; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.mxe.SystemMetadata; +import com.linkedin.util.Pair; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +@JsonInclude(JsonInclude.Include.NON_NULL) +public class GenericEntity { + private String urn; + private Map aspects; + + public static class GenericEntityBuilder { + + public GenericEntity build( + ObjectMapper objectMapper, Map> aspects) { + Map jsonObjectMap = + aspects.entrySet().stream() + .map( + e -> { + try { + Map valueMap = + Map.of( + "value", + objectMapper.readTree( + RecordUtils.toJsonString(e.getValue().getFirst()) + .getBytes(StandardCharsets.UTF_8))); + + if (e.getValue().getSecond() != null) { + return Map.entry( + e.getKey(), + Map.of( + "systemMetadata", e.getValue().getSecond(), + "value", valueMap.get("value"))); + } else { + return Map.entry(e.getKey(), Map.of("value", valueMap.get("value"))); + } + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return new GenericEntity(urn, jsonObjectMap); + } + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericRelationship.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericRelationship.java new file mode 100644 index 0000000000000..a4fb429c1eb18 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericRelationship.java @@ -0,0 +1,36 @@ +package io.datahubproject.openapi.v2.models; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.linkedin.common.urn.Urn; +import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +@JsonInclude(JsonInclude.Include.NON_NULL) +public class GenericRelationship { + @Nonnull private String relationshipType; + @Nonnull private GenericNode destination; + @Nonnull private GenericNode source; + @Nullable private NodeProperties properties; + + @Data + @Builder + public static class GenericNode { + @Nonnull private String entityType; + @Nonnull private String urn; + + public static GenericNode fromUrn(@Nonnull Urn urn) { + return GenericNode.builder().entityType(urn.getEntityType()).urn(urn.toString()).build(); + } + } + + @Data + @Builder + public static class NodeProperties { + private List source; + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericScrollResult.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericScrollResult.java new file mode 100644 index 0000000000000..2befc83c00363 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericScrollResult.java @@ -0,0 +1,12 @@ +package io.datahubproject.openapi.v2.models; + +import java.util.List; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class GenericScrollResult { + private String scrollId; + private List results; +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericTimeseriesAspect.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericTimeseriesAspect.java new file mode 100644 index 0000000000000..9d52ed28b2066 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericTimeseriesAspect.java @@ -0,0 +1,18 @@ +package io.datahubproject.openapi.v2.models; + +import com.fasterxml.jackson.annotation.JsonInclude; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +@JsonInclude(JsonInclude.Include.NON_NULL) +public class GenericTimeseriesAspect { + private long timestampMillis; + @Nonnull private String urn; + @Nonnull private Object event; + @Nullable private String messageId; + @Nullable private Object systemMetadata; +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/PatchOperation.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/PatchOperation.java new file mode 100644 index 0000000000000..c5323dfe68369 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/PatchOperation.java @@ -0,0 +1,26 @@ +package io.datahubproject.openapi.v2.models; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class PatchOperation { + @Nonnull private String op; + @Nonnull private String path; + @Nullable private JsonNode value; + @Nullable private List arrayMapKey; + + public PatchOperationType getOp() { + return PatchOperationType.valueOf(op.toUpperCase()); + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java new file mode 100644 index 0000000000000..70d588721d3b3 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java @@ -0,0 +1,67 @@ +package io.datahubproject.openapi.v2.utils; + +import com.datahub.authentication.Actor; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableList; +import com.linkedin.metadata.models.EntitySpec; +import io.datahubproject.openapi.exception.UnauthorizedException; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class ControllerUtil { + private ControllerUtil() {} + + public static void checkAuthorized( + @Nonnull Authorizer authorizationChain, + @Nonnull Actor actor, + @Nonnull EntitySpec entitySpec, + @Nonnull List privileges) { + checkAuthorized(authorizationChain, actor, entitySpec, null, privileges); + } + + public static void checkAuthorized( + @Nonnull Authorizer authorizationChain, + @Nonnull Actor actor, + @Nonnull Set entitySpecs, + @Nonnull List privileges) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup(privileges))); + List> resourceSpecs = + entitySpecs.stream() + .map( + entitySpec -> + Optional.of(new com.datahub.authorization.EntitySpec(entitySpec.getName(), ""))) + .collect(Collectors.toList()); + if (!AuthUtil.isAuthorizedForResources( + authorizationChain, actor.toUrnStr(), resourceSpecs, orGroup)) { + throw new UnauthorizedException(actor.toUrnStr() + " is unauthorized to get entities."); + } + } + + public static void checkAuthorized( + @Nonnull Authorizer authorizationChain, + @Nonnull Actor actor, + @Nonnull EntitySpec entitySpec, + @Nullable String entityUrn, + @Nonnull List privileges) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup(privileges))); + + List> resourceSpecs = + List.of( + Optional.of( + new com.datahub.authorization.EntitySpec( + entitySpec.getName(), entityUrn != null ? entityUrn : ""))); + if (!AuthUtil.isAuthorizedForResources( + authorizationChain, actor.toUrnStr(), resourceSpecs, orGroup)) { + throw new UnauthorizedException(actor.toUrnStr() + " is unauthorized to get entities."); + } + } +} diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index bca3e7161c8b8..ee45b8921143a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -319,6 +319,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -993,6 +994,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -2049,6 +2055,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index 69184856e4f9e..505f44c52d583 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -61,6 +61,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -993,6 +994,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -5084,6 +5090,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index 09c0185f74f3a..e8c15d1b4ca04 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -61,6 +61,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -735,6 +736,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -1783,6 +1789,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index eae0eed2dd50b..67f70d40e010c 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -61,6 +61,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -735,6 +736,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -1777,6 +1783,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index cb253c458e6c4..4c8cd1f20d476 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -61,6 +61,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -993,6 +994,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -5078,6 +5084,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 64ae3632c353a..2f470dca01f2a 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -1,6 +1,9 @@ package com.linkedin.entity.client; +import static com.linkedin.metadata.utils.GenericRecordUtils.entityResponseToAspectMap; + import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -11,7 +14,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.aspect.VersionedAspect; -import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.browse.BrowseResultV2; import com.linkedin.metadata.graph.LineageDirection; @@ -40,7 +42,7 @@ import javax.annotation.Nullable; // Consider renaming this to datahub client. -public interface EntityClient extends AspectRetriever { +public interface EntityClient { @Nullable public EntityResponse getV2( @@ -623,14 +625,26 @@ public void producePlatformEvent( @Nonnull Authentication authentication) throws Exception; - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + public void rollbackIngestion( + @Nonnull String runId, @Nonnull Authorizer authorizer, @Nonnull Authentication authentication) throws Exception; - default Aspect getLatestAspectObject(@Nonnull Urn urn, @Nonnull String aspectName) + @Nullable + default Aspect getLatestAspectObject( + @Nonnull Urn urn, @Nonnull String aspectName, @Nonnull Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + return getLatestAspects(Set.of(urn), Set.of(aspectName), authentication) + .getOrDefault(urn, Map.of()) + .get(aspectName); + } + + @Nonnull + default Map> getLatestAspects( + @Nonnull Set urns, + @Nonnull Set aspectNames, + @Nonnull Authentication authentication) throws RemoteInvocationException, URISyntaxException { - return getV2(urn.getEntityType(), urn, Set.of(aspectName), null) - .getAspects() - .get(aspectName) - .getValue(); + String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); + return entityResponseToAspectMap(batchGetV2(entityName, urns, aspectNames, authentication)); } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index d68c472ea9170..3108345bd3937 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -1,6 +1,7 @@ package com.linkedin.entity.client; import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.linkedin.common.VersionedUrn; @@ -539,7 +540,9 @@ public SearchResult search( if (searchFlags != null) { requestBuilder.searchFlagsParam(searchFlags); - requestBuilder.fulltextParam(searchFlags.isFulltext()); + if (searchFlags.isFulltext() != null) { + requestBuilder.fulltextParam(searchFlags.isFulltext()); + } } return sendClientRequest(requestBuilder, authentication).getEntity(); @@ -1057,7 +1060,10 @@ public void producePlatformEvent( } @Override - public void rollbackIngestion(@Nonnull String runId, @Nonnull final Authentication authentication) + public void rollbackIngestion( + @Nonnull String runId, + @Nonnull Authorizer authorizer, + @Nonnull final Authentication authentication) throws Exception { final RunsDoRollbackRequestBuilder requestBuilder = RUNS_REQUEST_BUILDERS.actionRollback().runIdParam(runId).dryRunParam(false); diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java index dfad20b5f52b2..243e8a40bf4b7 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java @@ -4,24 +4,60 @@ import com.linkedin.common.urn.Urn; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; -import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.ScrollResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import com.linkedin.r2.RemoteInvocationException; import java.net.URISyntaxException; +import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; -/** Adds entity/aspect cache and assumes system authentication */ -public interface SystemEntityClient extends EntityClient, AspectRetriever { +/** Adds entity/aspect cache and assumes **system** authentication */ +public interface SystemEntityClient extends EntityClient { EntityClientCache getEntityClientCache(); Authentication getSystemAuthentication(); + /** + * Searches for entities matching to a given query and filters across multiple entity types + * + * @param entities entity types to search (if empty, searches all entities) + * @param input search query + * @param filter search filters + * @param scrollId opaque scroll ID indicating offset + * @param keepAlive string representation of time to keep point in time alive, ex: 5m + * @param count max number of search results requested + * @return Snapshot key + * @throws RemoteInvocationException + */ + @Nonnull + default ScrollResult scrollAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException { + return scrollAcrossEntities( + entities, + input, + filter, + scrollId, + keepAlive, + count, + searchFlags, + getSystemAuthentication()); + } + /** * Builds the cache * @@ -101,11 +137,16 @@ default void setWritable(boolean canWrite) throws RemoteInvocationException { setWritable(canWrite, getSystemAuthentication()); } + @Nullable default Aspect getLatestAspectObject(@Nonnull Urn urn, @Nonnull String aspectName) throws RemoteInvocationException, URISyntaxException { - return getV2(urn.getEntityType(), urn, Set.of(aspectName), getSystemAuthentication()) - .getAspects() - .get(aspectName) - .getValue(); + return getLatestAspectObject(urn, aspectName, getSystemAuthentication()); + } + + @Nonnull + default Map> getLatestAspects( + @Nonnull Set urns, @Nonnull Set aspectNames) + throws RemoteInvocationException, URISyntaxException { + return getLatestAspects(urns, aspectNames, getSystemAuthentication()); } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java index a2f5596af9f4e..0f179c4da7b74 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java @@ -17,7 +17,7 @@ public SystemRestliEntityClient( @Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, + @Nonnull Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { super(restliClient, backoffPolicy, retryCount); this.systemAuthentication = systemAuthentication; diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java index c5b019e85e0c9..ffa3abe6806f9 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java @@ -252,14 +252,14 @@ public Task ingestProposal( if (asyncBool) { // if async we'll expand the getAdditionalChanges later, no need to do this early batch = AspectsBatchImpl.builder() - .mcps(List.of(metadataChangeProposal), auditStamp, _entityService.getEntityRegistry(), _entityService.getSystemEntityClient()) + .mcps(List.of(metadataChangeProposal), auditStamp, _entityService) .build(); } else { Stream proposalStream = Stream.concat(Stream.of(metadataChangeProposal), AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService).stream()); batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService.getEntityRegistry(), _entityService.getSystemEntityClient()) + .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) .build(); } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java index 294ded8a1e255..869cfc7afdee8 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java @@ -1,40 +1,25 @@ package com.linkedin.metadata.resources.entity; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.service.RollbackService.ROLLBACK_FAILED_STATUS; import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.authorization.EntitySpec; +import com.datahub.authentication.AuthenticationException; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.events.metadata.ChangeType; -import com.linkedin.execution.ExecutionRequestResult; -import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.VersionedAspect; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.RollbackRunResult; -import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.restli.RestliUtil; import com.linkedin.metadata.run.AspectRowSummary; import com.linkedin.metadata.run.AspectRowSummaryArray; import com.linkedin.metadata.run.IngestionRunSummary; import com.linkedin.metadata.run.IngestionRunSummaryArray; import com.linkedin.metadata.run.RollbackResponse; -import com.linkedin.metadata.run.UnsafeEntityInfo; -import com.linkedin.metadata.run.UnsafeEntityInfoArray; -import com.linkedin.metadata.search.utils.ESUtils; +import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; -import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import com.linkedin.metadata.utils.EntityKeyUtils; -import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.parseq.Task; import com.linkedin.restli.common.HttpStatus; import com.linkedin.restli.server.RestLiServiceException; @@ -43,13 +28,8 @@ import com.linkedin.restli.server.annotations.Optional; import com.linkedin.restli.server.annotations.RestLiCollection; import com.linkedin.restli.server.resources.CollectionResourceTaskTemplate; -import com.linkedin.timeseries.DeleteAspectValuesResult; import io.opentelemetry.extension.annotations.WithSpan; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.inject.Inject; @@ -64,14 +44,8 @@ public class BatchIngestionRunResource private static final Integer DEFAULT_OFFSET = 0; private static final Integer DEFAULT_PAGE_SIZE = 100; - private static final Integer DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE = 1000000; private static final boolean DEFAULT_INCLUDE_SOFT_DELETED = false; private static final boolean DEFAULT_HARD_DELETE = false; - private static final Integer ELASTIC_MAX_PAGE_SIZE = 10000; - private static final Integer ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; - private static final String ROLLING_BACK_STATUS = "ROLLING_BACK"; - private static final String ROLLED_BACK_STATUS = "ROLLED_BACK"; - private static final String ROLLBACK_FAILED_STATUS = "ROLLBACK_FAILED"; @Inject @Named("systemMetadataService") @@ -79,15 +53,15 @@ public class BatchIngestionRunResource @Inject @Named("entityService") - private EntityService _entityService; + private EntityService _entityService; @Inject - @Named("timeseriesAspectService") - private TimeseriesAspectService _timeseriesAspectService; + @Named("rollbackService") + private RollbackService rollbackService; - @Inject - @Named("authorizerChain") - private Authorizer _authorizer; + @Inject + @Named("authorizerChain") + private Authorizer _authorizer; /** Rolls back an ingestion run */ @Action(name = "rollback") @@ -111,274 +85,23 @@ public Task rollback( try { return RestliUtil.toTask( () -> { - if (runId.equals(DEFAULT_RUN_ID)) { - throw new IllegalArgumentException( - String.format( - "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", - runId)); - } - if (!dryRun) { - updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); - } - - RollbackResponse response = new RollbackResponse(); - List aspectRowsToDelete; - aspectRowsToDelete = - _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - Set urns = - aspectRowsToDelete.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet(); - List> resourceSpecs = - urns.stream() - .map(UrnUtils::getUrn) - .map( - urn -> - java.util.Optional.of( - new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - resourceSpecs)) { - throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); - } - log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - if (dryRun) { - - final Map> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream() - .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List keyAspects = aspectsSplitByIsKeyAspects.get(true); - - long entitiesDeleted = keyAspects.size(); - long aspectsReverted = aspectRowsToDelete.size(); - final long affectedEntities = - aspectRowsToDelete.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet() - .size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray( - aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - // If we are soft deleting, remove key aspects from count of aspects being deleted - if (!doHardDelete) { - aspectsReverted -= keyAspects.size(); - rowSummaries.removeIf(AspectRowSummary::isKeyAspect); + Authentication auth = AuthenticationContext.getAuthentication(); + try { + return rollbackService.rollbackIngestion(runId, dryRun, doHardDelete, _authorizer, auth); + } catch (AuthenticationException authException) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, authException.getMessage()); } - // Compute the aspects that exist referencing the key aspects we are deleting - final List affectedAspectsList = - keyAspects.stream() - .map( - (AspectRowSummary urn) -> - _systemMetadataService.findByUrn( - urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter( - row -> - !row.getRunId().equals(runId) - && !row.isKeyAspect() - && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet() - .size(); - - final List unsafeEntityInfos = - affectedAspectsList.stream() - .map(AspectRowSummary::getUrn) - .distinct() - .map( - urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) - .collect(Collectors.toList()); - - return response - .setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); - } - - RollbackRunResult rollbackRunResult = - _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - final List deletedRows = rollbackRunResult.getRowsRolledBack(); - int rowsDeletedFromEntityDeletion = - rollbackRunResult.getRowsDeletedFromEntityDeletion(); - - // since elastic limits how many rows we can access at once, we need to iteratively - // delete - while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { - sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); - aspectRowsToDelete = - _systemMetadataService.findByRunId( - runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - log.info( - "{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - log.info("deleting..."); - rollbackRunResult = - _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); - rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); - } - - // Rollback timeseries aspects - DeleteAspectValuesResult timeseriesRollbackResult = - _timeseriesAspectService.rollbackTimeseriesAspects(runId); - rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); - - log.info("finished deleting {} rows", deletedRows.size()); - int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; - - final Map> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream() - .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List keyAspects = aspectsSplitByIsKeyAspects.get(true); - - final long entitiesDeleted = keyAspects.size(); - final long affectedEntities = - deletedRows.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet() - .size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray( - aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - log.info("computing aspects affected by this rollback..."); - // Compute the aspects that exist referencing the key aspects we are deleting - final List affectedAspectsList = - keyAspects.stream() - .map( - (AspectRowSummary urn) -> - _systemMetadataService.findByUrn( - urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter( - row -> - !row.getRunId().equals(runId) - && !row.isKeyAspect() - && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet() - .size(); - - final List unsafeEntityInfos = - affectedAspectsList.stream() - .map(AspectRowSummary::getUrn) - .distinct() - .map( - urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) - .collect(Collectors.toList()); - - log.info("calculation done."); - - updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); - - return response - .setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); }, MetricRegistry.name(this.getClass(), "rollback")); } catch (Exception e) { - updateExecutionRequestStatus(runId, ROLLBACK_FAILED_STATUS); + rollbackService.updateExecutionRequestStatus(runId, ROLLBACK_FAILED_STATUS); throw new RuntimeException( String.format("There was an issue rolling back ingestion run with runId %s", runId), e); } } - private String stringifyRowCount(int size) { - if (size < ELASTIC_MAX_PAGE_SIZE) { - return String.valueOf(size); - } else { - return "at least " + size; - } - } - - private void sleep(Integer seconds) { - try { - TimeUnit.SECONDS.sleep(seconds); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - private void updateExecutionRequestStatus(String runId, String status) { - try { - final Urn executionRequestUrn = - EntityKeyUtils.convertEntityKeyToUrn( - new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); - EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect( - executionRequestUrn.getEntityType(), - executionRequestUrn, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); - if (aspect == null) { - log.warn("Aspect for execution request with runId {} not found", runId); - } else { - final MetadataChangeProposal proposal = new MetadataChangeProposal(); - ExecutionRequestResult requestResult = new ExecutionRequestResult(aspect.getValue().data()); - requestResult.setStatus(status); - proposal.setEntityUrn(executionRequestUrn); - proposal.setEntityType(Constants.EXECUTION_REQUEST_ENTITY_NAME); - proposal.setAspectName(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); - proposal.setAspect(GenericRecordUtils.serializeAspect(requestResult)); - proposal.setChangeType(ChangeType.UPSERT); - - _entityService.ingestProposal( - proposal, - new AuditStamp() - .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()), - false); - } - } catch (Exception e) { - log.error( - String.format( - "Not able to update execution result aspect with runId %s and new status %s.", - runId, status), - e); - } - } - /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @Action(name = "list") @Nonnull diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java index e3534875c6cd2..d6130e05b77bd 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java @@ -87,7 +87,7 @@ public void testAsyncDefaultAspects() throws URISyntaxException { .aspect(mcp.getAspect()) .auditStamp(new AuditStamp()) .metadataChangeProposal(mcp) - .build(_entityRegistry, _entityService.getSystemEntityClient()); + .build(_entityService); when(_aspectDao.runInTransactionWithRetry(any(), any(), anyInt())) .thenReturn( List.of( diff --git a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java index 2a12ecf6866bb..5187cba0b9151 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java +++ b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java @@ -7,6 +7,7 @@ import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.timeseries.TimeseriesScrollResult; import com.linkedin.timeseries.AggregationSpec; import com.linkedin.timeseries.DeleteAspectValuesResult; import com.linkedin.timeseries.GenericTable; @@ -118,4 +119,18 @@ public void upsertDocument( public List getIndexSizes() { return List.of(); } + + @Nonnull + @Override + public TimeseriesScrollResult scrollAspects( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nullable Filter filter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + return TimeseriesScrollResult.builder().build(); + } } diff --git a/metadata-service/services/build.gradle b/metadata-service/services/build.gradle index c683b0c75f40a..78d651c05e4d9 100644 --- a/metadata-service/services/build.gradle +++ b/metadata-service/services/build.gradle @@ -1,6 +1,6 @@ plugins { id 'org.hidetake.swagger.generator' - id 'java' + id 'java-library' } configurations { @@ -14,7 +14,9 @@ dependencies { implementation project(':metadata-events:mxe-avro') implementation project(':metadata-events:mxe-registration') implementation project(':metadata-events:mxe-utils-avro') - implementation project(':metadata-models') + api project(path: ':metadata-models', configuration: 'dataTemplate') + api project(':metadata-models') + implementation project(':metadata-service:restli-client') implementation project(':metadata-service:configuration') diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java index c4216962c134c..2c1596474fb21 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java @@ -88,7 +88,7 @@ public static List getAdditionalChanges( public static List getAdditionalChanges( @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService) { + @Nonnull EntityService entityService) { return getAdditionalChanges(metadataChangeProposal, entityService, false); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index 71573aa2b10e0..94ab69e895920 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -9,11 +9,11 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.VersionedAspect; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.UpsertItem; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.models.AspectSpec; @@ -35,7 +35,7 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -public interface EntityService { +public interface EntityService extends AspectRetriever { /** * Just whether the entity/aspect exists @@ -287,6 +287,8 @@ Pair>> generateDefaultAspectsOnFirstW Set getEntityAspectNames(final String entityName); + @Override + @Nonnull EntityRegistry getEntityRegistry(); RollbackResult deleteAspect( @@ -349,15 +351,5 @@ default boolean exists(@Nonnull Urn urn, boolean includeSoftDelete) { BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException; - /** - * Allow internal use of the system entity client. Solves recursive dependencies between the - * EntityService and the SystemJavaEntityClient - * - * @param systemEntityClient system entity client - */ - void setSystemEntityClient(SystemEntityClient systemEntityClient); - - SystemEntityClient getSystemEntityClient(); - RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java index b3e713a906d01..625353eeb6820 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java @@ -5,6 +5,7 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.utils.QueryUtils; import java.net.URISyntaxException; import java.util.ArrayList; @@ -322,4 +323,18 @@ void removeEdgesFromNode( default boolean supportsMultiHop() { return false; } + + @Nonnull + RelatedEntitiesScrollResult scrollRelatedEntities( + @Nullable List sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java new file mode 100644 index 0000000000000..0c6f8a0d65d5c --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java @@ -0,0 +1,31 @@ +package com.linkedin.metadata.graph; + +import com.linkedin.metadata.query.filter.RelationshipDirection; +import javax.annotation.Nonnull; +import lombok.Getter; + +/** Preserves directionality as well as the generic `related` urn concept */ +@Getter +public class RelatedEntities extends RelatedEntity { + /** source Urn * */ + @Nonnull String sourceUrn; + + /** Destination Urn associated with the related entity. */ + @Nonnull String destinationUrn; + + public RelatedEntities( + @Nonnull String relationshipType, + @Nonnull String sourceUrn, + @Nonnull String destinationUrn, + @Nonnull RelationshipDirection relationshipDirection) { + super( + relationshipType, + relationshipDirection == RelationshipDirection.OUTGOING ? destinationUrn : sourceUrn); + this.sourceUrn = sourceUrn; + this.destinationUrn = destinationUrn; + } + + public RelatedEntity asRelatedEntity() { + return new RelatedEntity(relationshipType, urn); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntitiesScrollResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntitiesScrollResult.java new file mode 100644 index 0000000000000..b0b5394ca5808 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntitiesScrollResult.java @@ -0,0 +1,16 @@ +package com.linkedin.metadata.graph; + +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +@AllArgsConstructor +@Data +@Builder +public class RelatedEntitiesScrollResult { + int numResults; + int pageSize; + String scrollId; + List entities; +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 189ae09e1b938..2fec88ad221fd 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -161,7 +161,7 @@ AutoCompleteResult autoComplete( * @param field the field name for aggregate * @param requestParams filters to apply before aggregating * @param limit the number of aggregations to return - * @return + * @return a map of the value to the count of documents having the value */ @Nonnull Map aggregateByValue( diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java new file mode 100644 index 0000000000000..59d40b29e7383 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java @@ -0,0 +1,1107 @@ +package com.linkedin.metadata.service; + +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.entity.AspectUtils.buildMetadataChangeProposal; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.FieldFormPromptAssociation; +import com.linkedin.common.FieldFormPromptAssociationArray; +import com.linkedin.common.FormAssociation; +import com.linkedin.common.FormAssociationArray; +import com.linkedin.common.FormPromptAssociation; +import com.linkedin.common.FormPromptAssociationArray; +import com.linkedin.common.FormPromptFieldAssociations; +import com.linkedin.common.FormVerificationAssociation; +import com.linkedin.common.FormVerificationAssociationArray; +import com.linkedin.common.Forms; +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.form.FormActorAssignment; +import com.linkedin.form.FormInfo; +import com.linkedin.form.FormPrompt; +import com.linkedin.form.FormType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.authorization.OwnershipUtils; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.service.util.SearchBasedFormAssignmentRunner; +import com.linkedin.metadata.utils.FormUtils; +import com.linkedin.metadata.utils.SchemaFieldUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.schema.SchemaField; +import com.linkedin.schema.SchemaMetadata; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +/** + * This class is used to execute CRUD operations around forms and submitting responses to forms and + * their prompts. + * + *

Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. + */ +@Slf4j +public class FormService extends BaseService { + private static final int BATCH_FORM_ENTITY_COUNT = 500; + + public FormService( + @Nonnull final EntityClient entityClient, + @Nonnull final Authentication systemAuthentication) { + super(entityClient, systemAuthentication); + } + + /** Batch associated a form to a given set of entities by urn. */ + public void batchAssignFormToEntities( + @Nonnull final List entityUrns, @Nonnull final Urn formUrn) throws Exception { + batchAssignFormToEntities(entityUrns, formUrn, this.systemAuthentication); + } + + /** Batch associated a form to a given set of entities by urn. */ + public void batchAssignFormToEntities( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws Exception { + verifyEntityExists(formUrn, authentication); + verifyEntitiesExist(entityUrns, authentication); + final List changes = + buildAssignFormChanges(entityUrns, formUrn, authentication); + ingestChangeProposals(changes, authentication); + } + + /** Batch remove a form from a given entity by urn. */ + public void batchUnassignFormForEntities( + @Nonnull final List entityUrns, @Nonnull final Urn formUrn) throws Exception { + batchUnassignFormForEntities(entityUrns, formUrn, this.systemAuthentication); + } + + /** Batch remove a form from a given entity by urn. */ + public void batchUnassignFormForEntities( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws Exception { + verifyEntityExists(formUrn, authentication); + verifyEntitiesExist(entityUrns, authentication); + final List changes = + buildUnassignFormChanges(entityUrns, formUrn, authentication); + ingestChangeProposals(changes, authentication); + } + + /** Mark a specific form prompt as incomplete */ + public void batchSetFormPromptIncomplete( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId) + throws Exception { + batchSetFormPromptIncomplete(entityUrns, formUrn, formPromptId, this.systemAuthentication); + } + + /** Mark a specific form prompt as incomplete */ + public void batchSetFormPromptIncomplete( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) + throws Exception { + verifyEntityExists(formUrn, authentication); + verifyEntitiesExist(entityUrns, authentication); + final FormInfo formInfo = getFormInfo(formUrn, authentication); + final List changes = + buildUnsetFormPromptChanges(entityUrns, formUrn, formPromptId, formInfo, authentication); + ingestChangeProposals(changes, authentication); + } + + /** Create a dynamic form assignment for a particular form. */ + public void createDynamicFormAssignment( + @Nonnull final DynamicFormAssignment dynamicFormAssignment, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + if (!entityClient.exists(formUrn, authentication)) { + throw new RuntimeException( + String.format("Form %s does not exist. Skipping dynamic form assignment", formUrn)); + } + + try { + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + formUrn, Constants.DYNAMIC_FORM_ASSIGNMENT_ASPECT_NAME, dynamicFormAssignment), + authentication, + false); + } catch (Exception e) { + throw new RuntimeException("Failed to create form", e); + } + } + + /** Assigns the form to an entity for completion. */ + public void upsertFormAssignmentRunner( + @Nonnull final Urn formUrn, @Nonnull final DynamicFormAssignment formFilters) { + try { + SearchBasedFormAssignmentRunner.assign( + formFilters, formUrn, BATCH_FORM_ENTITY_COUNT, entityClient, systemAuthentication); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to dynamically assign form with urn: %s", formUrn), e); + } + } + + /** Submit a response for a structured property type prompt. */ + public Boolean batchSubmitStructuredPropertyPromptResponse( + @Nonnull final List entityUrns, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) + throws Exception { + entityUrns.forEach( + urnStr -> { + Urn urn = UrnUtils.getUrn(urnStr); + try { + submitStructuredPropertyPromptResponse( + urn, structuredPropertyUrn, values, formUrn, formPromptId, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to batch submit structured property prompt", e); + } + }); + + return true; + } + + /** Submit a response for a structured property type prompt. */ + public Boolean submitStructuredPropertyPromptResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) + throws Exception { + + // First, let's apply the action and add the structured property. + ingestStructuredProperties(entityUrn, structuredPropertyUrn, values, authentication); + + // Then, let's apply the change to the entity's form status. + ingestCompletedFormResponse(entityUrn, formUrn, formPromptId, authentication); + + return true; + } + + /** Submit a response for a field-level structured property type prompt. */ + public Boolean batchSubmitFieldStructuredPropertyPromptResponse( + @Nonnull final List entityUrns, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final String fieldPath, + @Nonnull final Authentication authentication) + throws Exception { + entityUrns.forEach( + urnStr -> { + Urn urn = UrnUtils.getUrn(urnStr); + try { + submitFieldStructuredPropertyPromptResponse( + urn, + structuredPropertyUrn, + values, + formUrn, + formPromptId, + fieldPath, + authentication); + } catch (Exception e) { + throw new RuntimeException( + "Failed to batch submit field structured property prompt", e); + } + }); + + return true; + } + + /** Submit a response for a field-level structured property type prompt. */ + public Boolean submitFieldStructuredPropertyPromptResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final String fieldPath, + @Nonnull final Authentication authentication) + throws Exception { + + // First, let's apply the action and add the structured property. + ingestSchemaFieldStructuredProperties( + entityUrn, structuredPropertyUrn, values, fieldPath, authentication); + + // Then, let's apply the change to the entity's form status. + ingestCompletedFieldFormResponse(entityUrn, formUrn, formPromptId, fieldPath, authentication); + + return true; + } + + private void ingestCompletedFieldFormResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final String fieldPath, + @Nonnull final Authentication authentication) + throws Exception { + final Forms forms = getEntityForms(entityUrn, authentication); + final FormAssociation formAssociation = getFormWithUrn(forms, formUrn); + if (formAssociation == null) { + throw new RuntimeException( + String.format("Form %s has not been assigned to entity %s", formUrn, entityUrn)); + } + final FormPromptAssociation formPromptAssociation = + getOrDefaultFormPromptAssociation(formAssociation, formPromptId, authentication); + + // update the prompt association to have this fieldFormPromptAssociation marked as complete + updateFieldPromptToComplete( + formPromptAssociation, fieldPath, UrnUtils.getUrn(authentication.getActor().toUrnStr())); + + // field prompt is complete if all fields in entity's schema metadata are marked complete + if (isFieldPromptComplete(entityUrn, formPromptAssociation, authentication)) { + // if this is complete, the prompt as a whole should be marked as complete + ingestCompletedFormResponse(entityUrn, formUrn, formPromptId, forms, authentication); + } else { + // regardless, ingest forms to save state of this aspect + ingestForms(entityUrn, forms, authentication); + } + } + + private void ingestCompletedFormResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) + throws Exception { + final Forms forms = getEntityForms(entityUrn, authentication); + ingestCompletedFormResponse(entityUrn, formUrn, formPromptId, forms, authentication); + } + + private void ingestCompletedFormResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Forms forms, + @Nonnull final Authentication authentication) + throws Exception { + // Next, get all the information we need to update the forms for the entity. + final FormInfo formInfo = getFormInfo(formUrn, authentication); + final FormAssociation formAssociation = getFormWithUrn(forms, formUrn); + + if (formAssociation == null) { + throw new RuntimeException( + String.format("Form %s has not been assigned to entity %s", formUrn, entityUrn)); + } + + // First, mark the prompt as completed in forms aspect. + updatePromptToComplete(formAssociation, entityUrn, formUrn, formPromptId, authentication); + + // Then, update the completed forms fields based on which prompts remain incomplete. + updateFormCompletion(forms, formAssociation, formInfo); + + // Finally, ingest the newly updated forms aspect. + ingestForms(entityUrn, forms, authentication); + } + + private void ingestSchemaFieldStructuredProperties( + @Nonnull final Urn entityUrn, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final String fieldPath, + @Nonnull final Authentication authentication) + throws Exception { + Urn schemaFieldUrn = SchemaFieldUtils.generateSchemaFieldUrn(entityUrn.toString(), fieldPath); + ingestStructuredProperties(schemaFieldUrn, structuredPropertyUrn, values, authentication); + } + + private void ingestStructuredProperties( + @Nonnull final Urn entityUrn, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Authentication authentication) + throws Exception { + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME), + authentication); + + StructuredProperties structuredProperties = new StructuredProperties(); + structuredProperties.setProperties(new StructuredPropertyValueAssignmentArray()); + if (response != null && response.getAspects().containsKey(STRUCTURED_PROPERTIES_ASPECT_NAME)) { + structuredProperties = + new StructuredProperties( + response.getAspects().get(STRUCTURED_PROPERTIES_ASPECT_NAME).getValue().data()); + } + + // Since we upsert assignments for this structuredProperty, + // remove anything from this structured property and add to this list + List filteredAssignments = + structuredProperties.getProperties().stream() + .filter(assignment -> !assignment.getPropertyUrn().equals(structuredPropertyUrn)) + .collect(Collectors.toList()); + + StructuredPropertyValueAssignment assignment = new StructuredPropertyValueAssignment(); + assignment.setValues(values); + assignment.setPropertyUrn(structuredPropertyUrn); + assignment.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(System.currentTimeMillis())); + assignment.setLastModified( + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(System.currentTimeMillis())); + filteredAssignments.add(assignment); + + StructuredPropertyValueAssignmentArray assignments = + new StructuredPropertyValueAssignmentArray(filteredAssignments); + structuredProperties.setProperties(assignments); + + final MetadataChangeProposal structuredPropertiesProposal = + AspectUtils.buildMetadataChangeProposal( + entityUrn, STRUCTURED_PROPERTIES_ASPECT_NAME, structuredProperties); + try { + this.entityClient.ingestProposal(structuredPropertiesProposal, authentication, false); + } catch (Exception e) { + throw new RuntimeException("Failed to submit form response", e); + } + } + + private void ingestForms( + @Nonnull final Urn entityUrn, + @Nonnull final Forms forms, + @Nonnull final Authentication authentication) { + try { + ingestChangeProposals( + ImmutableList.of( + AspectUtils.buildMetadataChangeProposal(entityUrn, FORMS_ASPECT_NAME, forms)), + authentication); + } catch (Exception e) { + log.warn(String.format("Failed to ingest forms for entity with urn %s", entityUrn), e); + } + } + + private Forms getEntityForms( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) throws Exception { + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(FORMS_ASPECT_NAME), + authentication); + if (response != null && response.getAspects().containsKey(FORMS_ASPECT_NAME)) { + return new Forms(response.getAspects().get(FORMS_ASPECT_NAME).getValue().data()); + } + // No entity forms found. + throw new RuntimeException( + String.format( + "Entity is missing forms aspect, form is not assigned to entity with urn %s", + entityUrn)); + } + + /** + * Checks schema metadata for an entity and ensures there's a completed field prompt for every + * field. If there is no schema metadata, raise an error. + */ + private boolean isFieldPromptComplete( + @Nonnull final Urn entityUrn, + @Nonnull final FormPromptAssociation formPromptAssociation, + @Nonnull final Authentication authentication) + throws Exception { + final Set completedFieldPaths = + Objects.requireNonNull(formPromptAssociation.getFieldAssociations()) + .getCompletedFieldPrompts() + .stream() + .map(FieldFormPromptAssociation::getFieldPath) + .collect(Collectors.toSet()); + final SchemaMetadata schemaMetadata = getSchemaMetadata(entityUrn, authentication); + final List fieldPaths = + schemaMetadata.getFields().stream() + .map(SchemaField::getFieldPath) + .collect(Collectors.toList()); + + return completedFieldPaths.containsAll(fieldPaths); + } + + /** + * Performs the operation of changing the status of a form field prompt from incomplete to + * complete. + */ + private void updateFieldPromptToComplete( + @Nonnull final FormPromptAssociation formPromptAssociation, + @Nonnull final String fieldPath, + @Nonnull final Urn actor) { + final FieldFormPromptAssociation completedFieldPromptAssociation = + new FieldFormPromptAssociation(); + completedFieldPromptAssociation.setFieldPath(fieldPath); + completedFieldPromptAssociation.setLastModified(createAuditStamp(actor)); + + FormPromptFieldAssociations fieldAssociations = + formPromptAssociation.getFieldAssociations() != null + ? formPromptAssociation.getFieldAssociations() + : new FormPromptFieldAssociations(); + + if (fieldAssociations.getCompletedFieldPrompts() == null) { + fieldAssociations.setCompletedFieldPrompts(new FieldFormPromptAssociationArray()); + } + if (fieldAssociations.getIncompleteFieldPrompts() == null) { + fieldAssociations.setIncompleteFieldPrompts(new FieldFormPromptAssociationArray()); + } + + // add this prompt association to list of completed prompts, removing its previous association + // if it was already in there + FieldFormPromptAssociationArray completedFieldPrompts = + new FieldFormPromptAssociationArray( + fieldAssociations.getCompletedFieldPrompts().stream() + .filter(fieldPrompt -> !fieldPrompt.getFieldPath().equals(fieldPath)) + .collect(Collectors.toList())); + completedFieldPrompts.add(completedFieldPromptAssociation); + fieldAssociations.setCompletedFieldPrompts(completedFieldPrompts); + + // remove this prompt association from list of incomplete prompts + FieldFormPromptAssociationArray incompleteFieldPrompts = new FieldFormPromptAssociationArray(); + fieldAssociations + .getIncompleteFieldPrompts() + .forEach( + incompleteFieldPrompt -> { + if (!incompleteFieldPrompt.getFieldPath().equals(fieldPath)) { + incompleteFieldPrompts.add(incompleteFieldPrompt); + } + }); + fieldAssociations.setIncompleteFieldPrompts(incompleteFieldPrompts); + + formPromptAssociation.setFieldAssociations(fieldAssociations); + } + + /** Performs the operation of changing the status of a form prompt from incomplete to complete. */ + private void updatePromptToComplete( + @Nonnull final FormAssociation formAssociation, + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) { + final FormPromptAssociation formPromptAssociation = + getOrDefaultFormPromptAssociation(formAssociation, formPromptId, authentication); + + // add this prompt association to list of completed prompts, removing its previous association + // if it was already in there + FormPromptAssociationArray completedPrompts = + new FormPromptAssociationArray( + formAssociation.getCompletedPrompts().stream() + .filter(prompt -> !prompt.getId().equals(formPromptId)) + .collect(Collectors.toList())); + completedPrompts.add(formPromptAssociation); + formAssociation.setCompletedPrompts(completedPrompts); + + // remove this prompt association from list of incomplete prompts + FormPromptAssociationArray incompletePrompts = new FormPromptAssociationArray(); + formAssociation + .getIncompletePrompts() + .forEach( + incompletePrompt -> { + if (!incompletePrompt.getId().equals(formPromptId)) { + incompletePrompts.add(incompletePrompt); + } + }); + formAssociation.setIncompletePrompts(incompletePrompts); + } + + /** Performs the operation of changing the status of a form prompt from complete to incomplete. */ + private void updatePromptToIncomplete( + @Nonnull final FormAssociation form, + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId) { + // Remove the prompt from completed. + final List newCompletedPrompts = + form.getCompletedPrompts().stream() + .filter(prompt -> !prompt.getId().equals(formPromptId)) + .collect(Collectors.toList()); + form.setCompletedPrompts(new FormPromptAssociationArray(newCompletedPrompts)); + + // Add the prompt to in-completed. + if (form.getIncompletePrompts().stream() + .anyMatch(prompt -> prompt.getId().equals(formPromptId))) { + log.warn( + String.format( + "Attempting to unset a prompt that is already incomplete. Skipping... Form: %s, Prompt: %s, Entity: %s", + formUrn, formPromptId, entityUrn)); + return; + } + final List newIncompletePrompts = + new ArrayList<>(form.getIncompletePrompts()); + newIncompletePrompts.add( + new FormPromptAssociation().setId(formPromptId).setLastModified(createSystemAuditStamp())); + form.setIncompletePrompts(new FormPromptAssociationArray(newIncompletePrompts)); + } + + private List buildAssignFormChanges( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) { + final List results = new ArrayList<>(); + entityUrns.forEach( + entityUrn -> { + try { + MetadataChangeProposal maybeChange = + buildAssignFormChange(entityUrn, formUrn, authentication); + if (maybeChange != null) { + results.add(maybeChange); + } + } catch (Exception e) { + log.warn( + String.format( + "Failed to retrieve form %s for entity %s. Skipping form assignment", + formUrn, entityUrn), + e); + } + }); + return results; + } + + @Nullable + private MetadataChangeProposal buildAssignFormChange( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws Exception { + + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(FORMS_ASPECT_NAME), + authentication); + + Forms formsAspect = new Forms(); + formsAspect.setIncompleteForms(new FormAssociationArray()); + formsAspect.setCompletedForms(new FormAssociationArray()); + if (response != null && response.getAspects().containsKey(FORMS_ASPECT_NAME)) { + formsAspect = new Forms(response.getAspects().get(FORMS_ASPECT_NAME).getValue().data()); + } + + // if this form is already assigned to this entity, leave it and move on + Optional formAssociation = + Stream.concat( + formsAspect.getCompletedForms().stream(), formsAspect.getIncompleteForms().stream()) + .filter(form -> form.getUrn().equals(formUrn)) + .findAny(); + + if (formAssociation.isPresent()) { + return null; + } + + // add this form to the entity's incomplete form associations. + FormAssociationArray incompleteForms = formsAspect.getIncompleteForms(); + FormAssociation newAssociation = new FormAssociation(); + newAssociation.setUrn(formUrn); + + // set all prompts as incomplete when assigning this form + FormInfo formInfo = getFormInfo(formUrn, authentication); + FormPromptAssociationArray formPromptAssociations = new FormPromptAssociationArray(); + formInfo + .getPrompts() + .forEach( + prompt -> { + FormPromptAssociation association = new FormPromptAssociation(); + association.setId(prompt.getId()); + association.setLastModified(createAuditStamp(authentication)); + formPromptAssociations.add(association); + }); + newAssociation.setIncompletePrompts(formPromptAssociations); + newAssociation.setCompletedPrompts(new FormPromptAssociationArray()); + incompleteForms.add(newAssociation); + formsAspect.setIncompleteForms(incompleteForms); + return buildMetadataChangeProposal(entityUrn, FORMS_ASPECT_NAME, formsAspect); + } + + private List buildUnassignFormChanges( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) { + final List results = new ArrayList<>(); + entityUrns.forEach( + entityUrn -> { + try { + MetadataChangeProposal maybeChange = + buildUnassignFormChange(entityUrn, formUrn, authentication); + if (maybeChange != null) { + results.add(maybeChange); + } + } catch (Exception e) { + log.warn( + String.format( + "Failed to retrieve form %s for entity %s. Skipping form unassignment.", + formUrn, entityUrn), + e); + } + }); + return results; + } + + @Nullable + private MetadataChangeProposal buildUnassignFormChange( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws Exception { + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(FORMS_ASPECT_NAME), + authentication); + Forms formsAspect = new Forms(); + formsAspect.setCompletedForms(new FormAssociationArray()); + formsAspect.setIncompleteForms(new FormAssociationArray()); + if (response != null && response.getAspects().containsKey(FORMS_ASPECT_NAME)) { + formsAspect = new Forms(response.getAspects().get(FORMS_ASPECT_NAME).getValue().data()); + } + + List newCompleted = + new ArrayList<>( + new FormAssociationArray( + formsAspect.getCompletedForms().stream() + .filter(form -> !form.getUrn().equals(formUrn)) + .collect(Collectors.toList()))); + List newIncomplete = + new ArrayList<>( + new FormAssociationArray( + formsAspect.getIncompleteForms().stream() + .filter(form -> !form.getUrn().equals(formUrn)) + .collect(Collectors.toList()))); + + if (newCompleted.size() == formsAspect.getCompletedForms().size() + && newIncomplete.size() == formsAspect.getIncompleteForms().size()) { + // No metadata to change. Skip ingestion. + return null; + } + + formsAspect.setCompletedForms(new FormAssociationArray(newCompleted)); + formsAspect.setIncompleteForms(new FormAssociationArray(newIncomplete)); + + return buildMetadataChangeProposal(entityUrn, FORMS_ASPECT_NAME, formsAspect); + } + + private List buildUnsetFormPromptChanges( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final FormInfo formDefinition, + @Nonnull final Authentication authentication) { + final List results = new ArrayList<>(); + entityUrns.forEach( + entityUrn -> { + try { + MetadataChangeProposal maybeChange = + buildUnsetFormPromptChange( + entityUrn, formUrn, formPromptId, formDefinition, authentication); + if (maybeChange != null) { + results.add(maybeChange); + } + } catch (Exception e) { + log.warn( + String.format( + "Failed to retrieve form %s for entity %s. Skipping form unassignment.", + formUrn, entityUrn), + e); + } + }); + return results; + } + + @Nullable + private MetadataChangeProposal buildUnsetFormPromptChange( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final FormInfo formDefinition, + @Nonnull final Authentication authentication) + throws Exception { + + // Retrieve entity forms state + final Forms forms = getEntityForms(entityUrn, authentication); + + // First, find the form with the provided urn. + final FormAssociation formAssociation = getFormWithUrn(forms, formUrn); + + if (formAssociation != null) { + // 1. Find and mark the provided form prompt as incomplete. + updatePromptToIncomplete(formAssociation, entityUrn, formUrn, formPromptId); + + // 2. Update the form's completion status given the incomplete prompt. + updateFormCompletion(forms, formAssociation, formDefinition); + + // 3. Update the form status aspect for the entity. + return buildMetadataChangeProposal(entityUrn, FORMS_ASPECT_NAME, forms); + } else { + // Form not assigned to the entity! Let's warn and do nothing. + log.warn( + String.format( + "Failed to find form with urn %s associated with entity urn %s while attempting to unset form prompt %s. Skipping...", + formUrn, entityUrn, formPromptId)); + } + + return null; + } + + private void updateFormCompletion( + @Nonnull final Forms forms, + @Nonnull final FormAssociation form, + @Nonnull final FormInfo formDefinition) { + + final boolean isFormCompleted = isFormCompleted(form, formDefinition); + + if (isFormCompleted) { + // If the form is complete, we want to add it to completed forms. + + // 1. Remove from incomplete. + forms.setIncompleteForms( + new FormAssociationArray( + forms.getIncompleteForms().stream() + .filter(incompleteForm -> !incompleteForm.getUrn().equals(form.getUrn())) + .collect(Collectors.toList()))); + + // 2. Add to complete (if not already present) + if (forms.getCompletedForms().stream() + .noneMatch(completedForm -> completedForm.getUrn().equals(form.getUrn()))) { + // Not found in completed, let's update it. + List newCompleted = new ArrayList<>(forms.getCompletedForms()); + newCompleted.add(form); + forms.setCompletedForms(new FormAssociationArray(newCompleted)); + } + } else { + // If the form is incomplete, we want to remove it from the completed forms. + // If the form implies verification, we also ensure that the verification status is + // un-applied. + + // 1. Remove from complete. + forms.setCompletedForms( + new FormAssociationArray( + forms.getCompletedForms().stream() + .filter(completedForm -> !completedForm.getUrn().equals(form.getUrn())) + .collect(Collectors.toList()))); + + // 2. Add to incomplete (if not already present) + if (forms.getIncompleteForms().stream() + .noneMatch(incompleteForm -> incompleteForm.getUrn().equals(form.getUrn()))) { + // Not found in incompleted. Let's updated + List newIncomplete = new ArrayList<>(forms.getIncompleteForms()); + newIncomplete.add(form); + forms.setIncompleteForms(new FormAssociationArray(newIncomplete)); + } + + // 3. Remove verification as required. + if (FormType.VERIFICATION.equals(formDefinition.getType())) { + removeFormVerification(form.getUrn(), forms); + } + } + } + + /** + * Returns true if a form is considered completed, false otherwise. This is a function of whether + * all required prompts are marked as completed. + * + *

If none or some required prompts are marked as completed, then the form will be considered + * NOT completed. + * + * @param form the form status, as completed for a specific entity. + * @param formDefinition the form definition, which contains information about which prompts are + * required. + */ + private boolean isFormCompleted( + @Nonnull final FormAssociation form, @Nonnull final FormInfo formDefinition) { + final List requiredPromptsIds = + formDefinition.getPrompts().stream() + .filter(FormPrompt::isRequired) + .map(FormPrompt::getId) + .collect(Collectors.toList()); + + final List completedPromptIds = + form.getCompletedPrompts().stream() + .map(FormPromptAssociation::getId) + .collect(Collectors.toList()); + + // If all required prompts are completed, then the form is completed. + return completedPromptIds.containsAll(requiredPromptsIds); + } + + @Nullable + private FormAssociation getFormWithUrn( + @Nonnull final Forms existingForms, @Nonnull final Urn formUrn) { + // First check in the completed set. + Optional maybeForm = + existingForms.getCompletedForms().stream() + .filter(form -> form.getUrn().equals(formUrn)) + .findFirst(); + if (maybeForm.isPresent()) { + return maybeForm.get(); + } + + // Then check the incomplete set. + maybeForm = + existingForms.getIncompleteForms().stream() + .filter(form -> form.getUrn().equals(formUrn)) + .findFirst(); + if (maybeForm.isPresent()) { + return maybeForm.get(); + } + + // No form found, return null. + return null; + } + + @Nullable + private FormPromptAssociation getFormPromptAssociation( + @Nonnull final FormAssociation formAssociation, @Nonnull final String formPromptId) { + // First check in the completed set. + Optional maybePromptAssociation = + formAssociation.getCompletedPrompts().stream() + .filter(prompt -> prompt.getId().equals(formPromptId)) + .findFirst(); + if (maybePromptAssociation.isPresent()) { + return maybePromptAssociation.get(); + } + + // Then check the incomplete set. + maybePromptAssociation = + formAssociation.getIncompletePrompts().stream() + .filter(prompt -> prompt.getId().equals(formPromptId)) + .findFirst(); + if (maybePromptAssociation.isPresent()) { + return maybePromptAssociation.get(); + } + + // No prompt association found, return null. + return null; + } + + /** + * Gets a form prompt association by the prompt ID. If none exists (could happen as a form was + * changed after assigned or some other reason), then create the association and add it to the + * formAssociation's list of incomplete prompts. + */ + private FormPromptAssociation getOrDefaultFormPromptAssociation( + @Nonnull final FormAssociation formAssociation, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) { + final FormPromptAssociation existingPromptAssociation = + getFormPromptAssociation(formAssociation, formPromptId); + final FormPromptAssociation formPromptAssociation = + existingPromptAssociation != null ? existingPromptAssociation : new FormPromptAssociation(); + formPromptAssociation.setId(formPromptId); + formPromptAssociation.setLastModified( + createAuditStamp(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + if (existingPromptAssociation == null) { + FormPromptAssociationArray incompletePrompts = + new FormPromptAssociationArray(formAssociation.getIncompletePrompts()); + incompletePrompts.add(formPromptAssociation); + formAssociation.setIncompletePrompts(incompletePrompts); + } + return formPromptAssociation; + } + + private void removeFormVerification(@Nonnull final Urn formUrn, @Nonnull final Forms forms) { + if (!forms.hasVerifications()) { + // Nothing to do. + return; + } + + // Remove verification of given urn. + final List newVerifications = + forms.getVerifications().stream() + .filter(verification -> !formUrn.equals(verification.getForm())) + .collect(Collectors.toList()); + + // Update verifications for forms aspect. + forms.setVerifications(new FormVerificationAssociationArray(newVerifications)); + } + + /** + * A form is assigned to a user if either the user or a group the user is in is explicitly set on + * the actors field on a form. Otherwise, if the actors field says that owners are assigned, + * ensure this actor, or a group they're in, is an owner of this entity. + */ + public boolean isFormAssignedToUser( + @Nonnull final Urn formUrn, + @Nonnull final Urn entityUrn, + @Nonnull final Urn actorUrn, + @Nonnull final List groupsForUser, + @Nonnull final Authentication authentication) + throws Exception { + final FormInfo formInfo = getFormInfo(formUrn, authentication); + final FormActorAssignment formActorAssignment = formInfo.getActors(); + if (FormUtils.isFormAssignedToUser(formActorAssignment, actorUrn, groupsForUser)) { + return true; + } + + if (formActorAssignment.isOwners()) { + Ownership entityOwnership = getEntityOwnership(entityUrn, authentication); + return OwnershipUtils.isOwnerOfEntity(entityOwnership, actorUrn, groupsForUser); + } + + return false; + } + + /** + * Adds a new form verification association for an entity for this form on their forms aspect. If + * there was an existing verification association for this form, remove and replace it. First, + * ensure this form is of VERIFICATION type and that this form is in completedForms. + */ + public boolean verifyFormForEntity( + @Nonnull final Urn formUrn, + @Nonnull final Urn entityUrn, + @Nonnull final Authentication authentication) + throws Exception { + final FormInfo formInfo = getFormInfo(formUrn, authentication); + if (!formInfo.getType().equals(FormType.VERIFICATION)) { + throw new UnsupportedOperationException( + String.format("Form %s is not of type VERIFICATION. Cannot verify form.", formUrn)); + } + final Forms formsAspect = getEntityForms(entityUrn, authentication); + if (!isFormInCompletedForms(formUrn, formsAspect)) { + throw new RuntimeException( + String.format( + "Form %s is not in the list of completed forms for this entity. Skipping verification.", + formUrn)); + } + + // Remove any existing verifications for this form to patch a new one + List formVerifications = + formsAspect.getVerifications().stream() + .filter(verification -> !verification.getForm().equals(formUrn)) + .collect(Collectors.toList()); + FormVerificationAssociation newAssociation = new FormVerificationAssociation(); + newAssociation.setForm(formUrn); + newAssociation.setLastModified(createAuditStamp(authentication)); + formVerifications.add(newAssociation); + + formsAspect.setVerifications(new FormVerificationAssociationArray(formVerifications)); + + ingestForms(entityUrn, formsAspect, authentication); + return true; + } + + private boolean isFormInCompletedForms( + @Nonnull final Urn formUrn, @Nonnull final Forms formsAspect) { + return formsAspect.getCompletedForms().stream() + .anyMatch(completedForm -> completedForm.getUrn().equals(formUrn)); + } + + public FormInfo getFormInfo( + @Nonnull final Urn formUrn, @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { + final EntityResponse formInfoResponse = + entityClient.getV2( + formUrn.getEntityType(), + formUrn, + ImmutableSet.of(FORM_INFO_ASPECT_NAME), + authentication); + if (formInfoResponse != null + && formInfoResponse.getAspects().containsKey(FORM_INFO_ASPECT_NAME)) { + return new FormInfo( + formInfoResponse.getAspects().get(FORM_INFO_ASPECT_NAME).getValue().data()); + } else { + throw new RuntimeException(String.format("Form %s does not exist.", formUrn)); + } + } + + private SchemaMetadata getSchemaMetadata( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(SCHEMA_METADATA_ASPECT_NAME), + authentication); + if (response != null && response.getAspects().containsKey(SCHEMA_METADATA_ASPECT_NAME)) { + return new SchemaMetadata( + response.getAspects().get(SCHEMA_METADATA_ASPECT_NAME).getValue().data()); + } else { + throw new RuntimeException( + String.format("Schema metadata does not exist on entity %s.", entityUrn)); + } + } + + private Ownership getEntityOwnership( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { + final EntityResponse entityResponse = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(OWNERSHIP_ASPECT_NAME), + authentication); + if (entityResponse != null && entityResponse.getAspects().containsKey(OWNERSHIP_ASPECT_NAME)) { + return new Ownership( + entityResponse.getAspects().get(OWNERSHIP_ASPECT_NAME).getValue().data()); + } else { + throw new RuntimeException(String.format("Ownership %s does not exist.", entityUrn)); + } + } + + private void verifyEntitiesExist( + @Nonnull final List entityUrns, @Nonnull final Authentication authentication) { + entityUrns.forEach( + entityUrn -> { + try { + verifyEntityExists(entityUrn, authentication); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Issue verifying whether entity exists when assigning form to it. Entity urn: %s", + entityUrn)); + } + }); + } + + private void verifyEntityExists( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + if (!entityClient.exists(entityUrn, authentication)) { + throw new RuntimeException( + String.format("Entity %s does not exist. Skipping batch form assignment", entityUrn)); + } + } + + private AuditStamp createSystemAuditStamp() { + return createAuditStamp(UrnUtils.getUrn(SYSTEM_ACTOR)); + } + + private AuditStamp createAuditStamp(@Nonnull final Authentication authentication) { + return createAuditStamp(UrnUtils.getUrn(authentication.getActor().toUrnStr())); + } + + private AuditStamp createAuditStamp(@Nonnull final Urn actor) { + return new AuditStamp().setTime(System.currentTimeMillis()).setActor(actor); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java new file mode 100644 index 0000000000000..22496b6c07806 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java @@ -0,0 +1,328 @@ +package com.linkedin.metadata.service; + +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationException; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.execution.ExecutionRequestResult; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.RollbackRunResult; +import com.linkedin.metadata.key.ExecutionRequestKey; +import com.linkedin.metadata.run.AspectRowSummary; +import com.linkedin.metadata.run.AspectRowSummaryArray; +import com.linkedin.metadata.run.RollbackResponse; +import com.linkedin.metadata.run.UnsafeEntityInfo; +import com.linkedin.metadata.run.UnsafeEntityInfoArray; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.timeseries.DeleteAspectValuesResult; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** Extracts logic historically in the Restli service which acts across multiple services */ +@Slf4j +@AllArgsConstructor +public class RollbackService { + public static final String ROLLING_BACK_STATUS = "ROLLING_BACK"; + public static final String ROLLED_BACK_STATUS = "ROLLED_BACK"; + public static final String ROLLBACK_FAILED_STATUS = "ROLLBACK_FAILED"; + + public static final int MAX_RESULT_SIZE = 10000; + public static final int ELASTIC_MAX_PAGE_SIZE = 10000; + public static final int DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE = 1000000; + public static final int ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; + + private final EntityService entityService; + private final SystemMetadataService systemMetadataService; + private final TimeseriesAspectService timeseriesAspectService; + private final boolean restApiAuthorizationEnabled; + + public List rollbackTargetAspects(@Nonnull String runId, boolean hardDelete) { + return systemMetadataService.findByRunId(runId, hardDelete, 0, MAX_RESULT_SIZE); + } + + public RollbackResponse rollbackIngestion( + @Nonnull String runId, + boolean dryRun, + boolean hardDelete, + Authorizer authorizer, + @Nonnull Authentication authentication) + throws AuthenticationException { + + if (runId.equals(DEFAULT_RUN_ID)) { + throw new IllegalArgumentException( + String.format( + "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", + runId)); + } + + if (!dryRun) { + updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); + } + + List aspectRowsToDelete = rollbackTargetAspects(runId, hardDelete); + if (!isAuthorized(authorizer, aspectRowsToDelete, authentication)) { + throw new AuthenticationException("User is NOT unauthorized to delete entities."); + } + + log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + if (dryRun) { + + final Map> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List keyAspects = aspectsSplitByIsKeyAspects.get(true); + + long entitiesDeleted = keyAspects.size(); + long aspectsReverted = aspectRowsToDelete.size(); + + final long affectedEntities = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + // If we are soft deleting, remove key aspects from count of aspects being deleted + if (!hardDelete) { + aspectsReverted -= keyAspects.size(); + rowSummaries.removeIf(AspectRowSummary::isKeyAspect); + } + // Compute the aspects that exist referencing the key aspects we are deleting + final List affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + systemMetadataService.findByUrn(urn.getUrn(), false, 0, MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .toList(); + + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + return new RollbackResponse() + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + } + + RollbackRunResult rollbackRunResult = + entityService.rollbackRun(aspectRowsToDelete, runId, hardDelete); + final List deletedRows = rollbackRunResult.getRowsRolledBack(); + int rowsDeletedFromEntityDeletion = rollbackRunResult.getRowsDeletedFromEntityDeletion(); + + // since elastic limits how many rows we can access at once, we need to iteratively + // delete + while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { + sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); + aspectRowsToDelete = systemMetadataService.findByRunId(runId, hardDelete, 0, MAX_RESULT_SIZE); + log.info("{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + log.info("deleting..."); + rollbackRunResult = entityService.rollbackRun(aspectRowsToDelete, runId, hardDelete); + deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); + rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); + } + + // Rollback timeseries aspects + DeleteAspectValuesResult timeseriesRollbackResult = + timeseriesAspectService.rollbackTimeseriesAspects(runId); + rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); + + log.info("finished deleting {} rows", deletedRows.size()); + int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; + + final Map> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List keyAspects = aspectsSplitByIsKeyAspects.get(true); + + final long entitiesDeleted = keyAspects.size(); + final long affectedEntities = + deletedRows.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + log.info("computing aspects affected by this rollback..."); + // Compute the aspects that exist referencing the key aspects we are deleting + final List affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + systemMetadataService.findByUrn(urn.getUrn(), false, 0, MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .toList(); + + long affectedAspects = affectedAspectsList.size(); + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + log.info("calculation done."); + + updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); + + return new RollbackResponse() + .setAspectsAffected(affectedAspects) + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + } + + public void updateExecutionRequestStatus(@Nonnull String runId, @Nonnull String status) { + try { + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn( + new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); + EnvelopedAspect aspect = + entityService.getLatestEnvelopedAspect( + executionRequestUrn.getEntityType(), + executionRequestUrn, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + if (aspect == null) { + log.warn("Aspect for execution request with runId {} not found", runId); + } else { + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + ExecutionRequestResult requestResult = new ExecutionRequestResult(aspect.getValue().data()); + requestResult.setStatus(status); + proposal.setEntityUrn(executionRequestUrn); + proposal.setEntityType(Constants.EXECUTION_REQUEST_ENTITY_NAME); + proposal.setAspectName(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + proposal.setAspect(GenericRecordUtils.serializeAspect(requestResult)); + proposal.setChangeType(ChangeType.UPSERT); + + entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + } + } catch (Exception e) { + log.error( + String.format( + "Not able to update execution result aspect with runId %s and new status %s.", + runId, status), + e); + } + } + + private boolean isAuthorized( + final Authorizer authorizer, + @Nonnull List rowSummaries, + @Nonnull Authentication authentication) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); + + List> resourceSpecs = + rowSummaries.stream() + .map(AspectRowSummary::getUrn) + .map(UrnUtils::getUrn) + .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .distinct() + .collect(Collectors.toList()); + + return !restApiAuthorizationEnabled + || AuthUtil.isAuthorizedForResources( + authorizer, authentication.getActor().toUrnStr(), resourceSpecs, orGroup); + } + + private static String stringifyRowCount(int size) { + if (size < ELASTIC_MAX_PAGE_SIZE) { + return String.valueOf(size); + } else { + return "at least " + size; + } + } + + private static void sleep(int seconds) { + try { + TimeUnit.SECONDS.sleep(seconds); + } catch (InterruptedException e) { + log.error("Rollback sleep exception", e); + } + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java new file mode 100644 index 0000000000000..73e3bc130ac9d --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java @@ -0,0 +1,94 @@ +package com.linkedin.metadata.service.util; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.service.FormService; +import com.linkedin.r2.RemoteInvocationException; +import java.util.List; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SearchBasedFormAssignmentManager { + + private static final ImmutableList ENTITY_TYPES = + ImmutableList.of(Constants.DATASET_ENTITY_NAME); + + public static void apply( + DynamicFormAssignment formFilters, + Urn formUrn, + int batchFormEntityCount, + EntityClient entityClient, + Authentication authentication) + throws Exception { + + try { + int totalResults = 0; + int numResults = 0; + String scrollId = null; + FormService formService = new FormService(entityClient, authentication); + + do { + + ScrollResult results = + entityClient.scrollAcrossEntities( + ENTITY_TYPES, + "*", + formFilters.getFilter(), + scrollId, + "5m", + batchFormEntityCount, + null, + authentication); + + if (!results.hasEntities() + || results.getNumEntities() == 0 + || results.getEntities().isEmpty()) { + break; + } + + log.info("Search across entities results: {}.", results); + + if (results.hasEntities()) { + final List entityUrns = + results.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + formService.batchAssignFormToEntities(entityUrns, formUrn); + + if (!entityUrns.isEmpty()) { + log.info("Batch assign {} entities to form {}.", entityUrns.size(), formUrn); + } + + numResults = results.getEntities().size(); + totalResults += numResults; + scrollId = results.getScrollId(); + + log.info( + "Starting batch assign forms, count: {} running total: {}, size: {}", + batchFormEntityCount, + totalResults, + results.getEntities().size()); + + } else { + break; + } + } while (scrollId != null); + + log.info("Successfully assigned {} entities to form {}.", totalResults, formUrn); + + } catch (RemoteInvocationException e) { + log.error("Error while assigning form to entities.", e); + throw new RuntimeException(e); + } + } + + private SearchBasedFormAssignmentManager() {} +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java new file mode 100644 index 0000000000000..a20f71f550c65 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java @@ -0,0 +1,45 @@ +package com.linkedin.metadata.service.util; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.DynamicFormAssignment; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SearchBasedFormAssignmentRunner { + + public static void assign( + DynamicFormAssignment formFilters, + Urn formUrn, + int batchFormEntityCount, + EntityClient entityClient, + Authentication authentication) { + Runnable runnable = + new Runnable() { + @Override + public void run() { + try { + SearchBasedFormAssignmentManager.apply( + formFilters, formUrn, batchFormEntityCount, entityClient, authentication); + } catch (Exception e) { + log.error( + "SearchBasedFormAssignmentRunner failed to run. " + + "Options: formFilters: {}, " + + "formUrn: {}, " + + "batchFormCount: {}, " + + "entityClient: {}, ", + formFilters, + formUrn, + batchFormEntityCount, + entityClient); + throw new RuntimeException("Form assignment runner error.", e); + } + } + }; + + new Thread(runnable).start(); + } + + private SearchBasedFormAssignmentRunner() {} +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java index 71c4d357ad1eb..b6bef33df1d7f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.shared; +import com.codahale.metrics.Timer; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.AbstractArrayTemplate; @@ -19,6 +20,7 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.utils.metrics.MetricUtils; import java.util.Objects; import java.util.Set; import java.util.function.Function; @@ -33,25 +35,27 @@ public class ValidationUtils { public static SearchResult validateSearchResult( final SearchResult searchResult, @Nonnull final EntityService entityService) { - if (searchResult == null) { - return null; + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateSearchResult").time()) { + if (searchResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + SearchResult validatedSearchResult = + new SearchResult() + .setFrom(searchResult.getFrom()) + .setMetadata(searchResult.getMetadata()) + .setPageSize(searchResult.getPageSize()) + .setNumEntities(searchResult.getNumEntities()); + + SearchEntityArray validatedEntities = + validatedUrns(searchResult.getEntities(), SearchEntity::getEntity, entityService, true) + .collect(Collectors.toCollection(SearchEntityArray::new)); + validatedSearchResult.setEntities(validatedEntities); + + return validatedSearchResult; } - Objects.requireNonNull(entityService, "entityService must not be null"); - - SearchResult validatedSearchResult = - new SearchResult() - .setFrom(searchResult.getFrom()) - .setMetadata(searchResult.getMetadata()) - .setPageSize(searchResult.getPageSize()) - .setNumEntities(searchResult.getNumEntities()); - - SearchEntityArray validatedEntities = - validatedUrns(searchResult.getEntities(), SearchEntity::getEntity, entityService, true) - .collect(Collectors.toCollection(SearchEntityArray::new)); - - validatedSearchResult.setEntities(validatedEntities); - - return validatedSearchResult; } public static ScrollResult validateScrollResult( @@ -81,78 +85,85 @@ public static ScrollResult validateScrollResult( public static BrowseResult validateBrowseResult( final BrowseResult browseResult, @Nonnull final EntityService entityService) { - if (browseResult == null) { - return null; + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateBrowseResult").time()) { + if (browseResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + BrowseResult validatedBrowseResult = + new BrowseResult() + .setGroups(browseResult.getGroups()) + .setMetadata(browseResult.getMetadata()) + .setFrom(browseResult.getFrom()) + .setPageSize(browseResult.getPageSize()) + .setNumGroups(browseResult.getNumGroups()) + .setNumEntities(browseResult.getNumEntities()) + .setNumElements(browseResult.getNumElements()); + + BrowseResultEntityArray validatedEntities = + validatedUrns(browseResult.getEntities(), BrowseResultEntity::getUrn, entityService, true) + .collect(Collectors.toCollection(BrowseResultEntityArray::new)); + validatedBrowseResult.setEntities(validatedEntities); + + return validatedBrowseResult; } - Objects.requireNonNull(entityService, "entityService must not be null"); - - BrowseResult validatedBrowseResult = - new BrowseResult() - .setGroups(browseResult.getGroups()) - .setMetadata(browseResult.getMetadata()) - .setFrom(browseResult.getFrom()) - .setPageSize(browseResult.getPageSize()) - .setNumGroups(browseResult.getNumGroups()) - .setNumEntities(browseResult.getNumEntities()) - .setNumElements(browseResult.getNumElements()); - - BrowseResultEntityArray validatedEntities = - validatedUrns(browseResult.getEntities(), BrowseResultEntity::getUrn, entityService, true) - .collect(Collectors.toCollection(BrowseResultEntityArray::new)); - - validatedBrowseResult.setEntities(validatedEntities); - - return validatedBrowseResult; } public static ListResult validateListResult( final ListResult listResult, @Nonnull final EntityService entityService) { - if (listResult == null) { - return null; + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateListResult").time()) { + if (listResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + ListResult validatedListResult = + new ListResult() + .setStart(listResult.getStart()) + .setCount(listResult.getCount()) + .setTotal(listResult.getTotal()); + + UrnArray validatedEntities = + validatedUrns(listResult.getEntities(), Function.identity(), entityService, true) + .collect(Collectors.toCollection(UrnArray::new)); + validatedListResult.setEntities(validatedEntities); + + return validatedListResult; } - Objects.requireNonNull(entityService, "entityService must not be null"); - - ListResult validatedListResult = - new ListResult() - .setStart(listResult.getStart()) - .setCount(listResult.getCount()) - .setTotal(listResult.getTotal()); - - UrnArray validatedEntities = - validatedUrns(listResult.getEntities(), Function.identity(), entityService, true) - .collect(Collectors.toCollection(UrnArray::new)); - - validatedListResult.setEntities(validatedEntities); - - return validatedListResult; } public static LineageSearchResult validateLineageSearchResult( final LineageSearchResult lineageSearchResult, @Nonnull final EntityService entityService) { - if (lineageSearchResult == null) { - return null; + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateLineageResult").time()) { + if (lineageSearchResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + LineageSearchResult validatedLineageSearchResult = + new LineageSearchResult() + .setMetadata(lineageSearchResult.getMetadata()) + .setFrom(lineageSearchResult.getFrom()) + .setPageSize(lineageSearchResult.getPageSize()) + .setNumEntities(lineageSearchResult.getNumEntities()); + + LineageSearchEntityArray validatedEntities = + validatedUrns( + lineageSearchResult.getEntities(), + LineageSearchEntity::getEntity, + entityService, + true) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + validatedLineageSearchResult.setEntities(validatedEntities); + + log.debug("Returning validated lineage search results"); + return validatedLineageSearchResult; } - Objects.requireNonNull(entityService, "entityService must not be null"); - - LineageSearchResult validatedLineageSearchResult = - new LineageSearchResult() - .setMetadata(lineageSearchResult.getMetadata()) - .setFrom(lineageSearchResult.getFrom()) - .setPageSize(lineageSearchResult.getPageSize()) - .setNumEntities(lineageSearchResult.getNumEntities()); - - LineageSearchEntityArray validatedEntities = - validatedUrns( - lineageSearchResult.getEntities(), - LineageSearchEntity::getEntity, - entityService, - true) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); - - validatedLineageSearchResult.setEntities(validatedEntities); - - return validatedLineageSearchResult; } public static EntityLineageResult validateEntityLineageResult( diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/GenericTimeseriesDocument.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/GenericTimeseriesDocument.java new file mode 100644 index 0000000000000..1442f099c4703 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/GenericTimeseriesDocument.java @@ -0,0 +1,26 @@ +package com.linkedin.metadata.timeseries; + +import com.fasterxml.jackson.annotation.JsonProperty; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class GenericTimeseriesDocument { + @Nonnull private String urn; + private long timestampMillis; + + @JsonProperty("@timestamp") + private long timestamp; + + @Nonnull private Object event; + @Nullable private String messageId; + @Nullable private Object systemMetadata; + @Nullable private String eventGranularity; + private boolean isExploded; + @Nullable private String runId; + @Nullable private String partition; + @Nullable private Object partitionSpec; +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java index 54480bb700398..529e8e00ecf57 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java @@ -201,4 +201,15 @@ void upsertDocument( @Nonnull final JsonNode document); List getIndexSizes(); + + @Nonnull + TimeseriesScrollResult scrollAspects( + @Nonnull final String entityName, + @Nonnull final String aspectName, + @Nullable Filter filter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesScrollResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesScrollResult.java new file mode 100644 index 0000000000000..200db2dfde8eb --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesScrollResult.java @@ -0,0 +1,18 @@ +package com.linkedin.metadata.timeseries; + +import com.linkedin.metadata.aspect.EnvelopedAspect; +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +@AllArgsConstructor +@Data +@Builder +public class TimeseriesScrollResult { + int numResults; + int pageSize; + String scrollId; + List events; + List documents; +} diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java index 970235fc88c87..27aa9ee04cc75 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java @@ -4,7 +4,7 @@ import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.KEYWORD_ANALYZER; import com.datahub.gms.util.CSVWriter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.EntitySpec; diff --git a/metadata-service/war/src/main/resources/boot/data_types.json b/metadata-service/war/src/main/resources/boot/data_types.json new file mode 100644 index 0000000000000..2d7294e45bd7a --- /dev/null +++ b/metadata-service/war/src/main/resources/boot/data_types.json @@ -0,0 +1,42 @@ +[ + { + "urn": "urn:li:dataType:datahub.string", + "info": { + "qualifiedName":"datahub.string", + "displayName": "String", + "description": "A string of characters." + } + }, + { + "urn": "urn:li:dataType:datahub.number", + "info": { + "qualifiedName":"datahub.number", + "displayName": "Number", + "description": "An integer or decimal number." + } + }, + { + "urn": "urn:li:dataType:datahub.urn", + "info": { + "qualifiedName":"datahub.urn", + "displayName": "Urn", + "description": "An unique identifier for a DataHub entity." + } + }, + { + "urn": "urn:li:dataType:datahub.rich_text", + "info": { + "qualifiedName":"datahub.rich_text", + "displayName": "Rich Text", + "description": "An attributed string of characters." + } + }, + { + "urn": "urn:li:dataType:datahub.date", + "info": { + "qualifiedName":"datahub.date", + "displayName": "Date", + "description": "A specific day, without time." + } + } +] diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/OwnershipUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/OwnershipUtils.java new file mode 100644 index 0000000000000..140b64780918d --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/OwnershipUtils.java @@ -0,0 +1,20 @@ +package com.linkedin.metadata.authorization; + +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import java.util.List; +import javax.annotation.Nonnull; + +public class OwnershipUtils { + + public static boolean isOwnerOfEntity( + @Nonnull final Ownership entityOwnership, + @Nonnull final Urn actorUrn, + @Nonnull final List groupsForUser) { + return entityOwnership.getOwners().stream() + .anyMatch( + owner -> owner.getOwner().equals(actorUrn) || groupsForUser.contains(owner.getOwner())); + } + + private OwnershipUtils() {} +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java index 5f3975b066fde..6ba311cf166d4 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java @@ -3,8 +3,11 @@ import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import java.net.URISyntaxException; import java.time.Clock; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -16,4 +19,11 @@ public static AuditStamp createDefaultAuditStamp() { .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) .setTime(Clock.systemUTC().millis()); } + + public static AuditStamp createAuditStamp(@Nonnull String actorUrn) throws URISyntaxException { + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(actorUrn)); + auditStamp.setTime(Clock.systemUTC().millis()); + return auditStamp; + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/FormUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/FormUtils.java new file mode 100644 index 0000000000000..ebf2587418dae --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/FormUtils.java @@ -0,0 +1,49 @@ +package com.linkedin.metadata.utils; + +import com.linkedin.common.urn.Urn; +import com.linkedin.form.FormActorAssignment; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class FormUtils { + + private FormUtils() {} + + public static boolean isFormAssignedToUser( + @Nonnull final FormActorAssignment parent, + @Nonnull final Urn userUrn, + @Nonnull final List groupUrns) { + // Assigned urn and group urns + final Set assignedUserUrns = + parent.getUsers() != null + ? parent.getUsers().stream().map(Urn::toString).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Set assignedGroupUrns = + parent.getGroups() != null + ? parent.getGroups().stream().map(Urn::toString).collect(Collectors.toSet()) + : Collections.emptySet(); + + // First check whether user is directly assigned. + if (assignedUserUrns.size() > 0) { + boolean isUserAssigned = assignedUserUrns.contains(userUrn.toString()); + if (isUserAssigned) { + return true; + } + } + + // Next check whether the user is assigned indirectly, by group. + if (assignedGroupUrns.size() > 0) { + boolean isUserGroupAssigned = + groupUrns.stream().anyMatch(groupUrn -> assignedGroupUrns.contains(groupUrn.toString())); + if (isUserGroupAssigned) { + return true; + } + } + + return false; + } +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java index fc28367e6c7ee..ae061a2d0c090 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java @@ -1,12 +1,17 @@ package com.linkedin.metadata.utils; import com.datahub.util.RecordUtils; +import com.linkedin.common.urn.Urn; import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.GenericPayload; import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.stream.Collectors; import javax.annotation.Nonnull; public class GenericRecordUtils { @@ -66,4 +71,20 @@ public static GenericPayload serializePayload(@Nonnull RecordTemplate payload) { genericPayload.setContentType(GenericRecordUtils.JSON); return genericPayload; } + + @Nonnull + public static Map> entityResponseToAspectMap( + Map inputMap) { + return inputMap.entrySet().stream() + .map( + entry -> + Map.entry( + entry.getKey(), + entry.getValue().getAspects().entrySet().stream() + .map( + aspectEntry -> + Map.entry(aspectEntry.getKey(), aspectEntry.getValue().getValue())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SchemaFieldUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SchemaFieldUtils.java new file mode 100644 index 0000000000000..edf959d04a37b --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SchemaFieldUtils.java @@ -0,0 +1,22 @@ +package com.linkedin.metadata.utils; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.SchemaFieldKey; +import javax.annotation.Nonnull; + +public class SchemaFieldUtils { + + private SchemaFieldUtils() {} + + public static Urn generateSchemaFieldUrn( + @Nonnull final String resourceUrn, @Nonnull final String fieldPath) { + // we rely on schemaField fieldPaths to be encoded since we do that on the ingestion side + final String encodedFieldPath = + fieldPath.replaceAll("\\(", "%28").replaceAll("\\)", "%29").replaceAll(",", "%2C"); + final SchemaFieldKey key = + new SchemaFieldKey().setParent(UrnUtils.getUrn(resourceUrn)).setFieldPath(encodedFieldPath); + return EntityKeyUtils.convertEntityKeyToUrn(key, Constants.SCHEMA_FIELD_ENTITY_NAME); + } +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java index eb58bc509838d..9df708c6e9fdc 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java @@ -7,14 +7,19 @@ import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.FilterValue; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.net.URISyntaxException; +import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -142,4 +147,25 @@ public static BoolQueryBuilder filterSoftDeletedByDefault( } return filterQuery; } + + public static SortCriterion sortBy(@Nonnull String field, @Nullable SortOrder direction) { + SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(field); + sortCriterion.setOrder( + com.linkedin.metadata.query.filter.SortOrder.valueOf( + Optional.ofNullable(direction).orElse(SortOrder.ASCENDING).toString())); + return sortCriterion; + } + + public static Filter andFilter(Criterion... criteria) { + Filter filter = new Filter(); + filter.setOr(andCriterion(Arrays.stream(criteria))); + return filter; + } + + public static ConjunctiveCriterionArray andCriterion(Stream criteria) { + return new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(criteria.collect(Collectors.toList())))); + } } diff --git a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java index a324f9ce0195b..dfa8c627e0617 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java +++ b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java @@ -1,10 +1,10 @@ package mock; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import java.util.Collections; import java.util.HashMap; import java.util.Map; diff --git a/smoke-test/cypress-dev.sh b/smoke-test/cypress-dev.sh index 93f03d36cbd19..b1c6571e1a065 100755 --- a/smoke-test/cypress-dev.sh +++ b/smoke-test/cypress-dev.sh @@ -15,7 +15,7 @@ python -c 'from tests.cypress.integration_test import ingest_data; ingest_data() cd tests/cypress npm install -source ../../set-cypress-creds.sh +source "$DIR/set-cypress-creds.sh" npx cypress open \ --env "ADMIN_DISPLAYNAME=$CYPRESS_ADMIN_DISPLAYNAME,ADMIN_USERNAME=$CYPRESS_ADMIN_USERNAME,ADMIN_PASSWORD=$CYPRESS_ADMIN_PASSWORD" diff --git a/smoke-test/requests_wrapper/__init__.py b/smoke-test/requests_wrapper/__init__.py index d9956e8434a89..c2f4190e6150d 100644 --- a/smoke-test/requests_wrapper/__init__.py +++ b/smoke-test/requests_wrapper/__init__.py @@ -1,3 +1,4 @@ from .utils_requests_wrapper import CustomSession as Session from .utils_requests_wrapper import get, post from .constants import * +from requests import exceptions diff --git a/smoke-test/tests/cypress/cypress/e2e/siblings/siblings.js b/smoke-test/tests/cypress/cypress/e2e/siblings/siblings.js index 00de08e77a185..f89b70b7a7d23 100644 --- a/smoke-test/tests/cypress/cypress/e2e/siblings/siblings.js +++ b/smoke-test/tests/cypress/cypress/e2e/siblings/siblings.js @@ -80,7 +80,7 @@ describe('siblings', () => { cy.login(); cy.visit('/search?page=1&query=raw_orders'); - cy.contains('Showing 1 - 10 of 14 results'); + cy.contains('Showing 1 - 10 of '); cy.get('.test-search-result').should('have.length', 5); cy.get('.test-search-result-sibling-section').should('have.length', 5); diff --git a/smoke-test/tests/structured_properties/__init__.py b/smoke-test/tests/structured_properties/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/structured_properties/click_event.avsc b/smoke-test/tests/structured_properties/click_event.avsc new file mode 100644 index 0000000000000..d959dcbbdeea1 --- /dev/null +++ b/smoke-test/tests/structured_properties/click_event.avsc @@ -0,0 +1,14 @@ +{ + "namespace": "io.datahubproject", + "type": "record", + "name": "ClickEvent", + "fields": [ + { "name": "ip", "type": "string" }, + { "name": "url", "type": "string" }, + { "name": "time", "type": "long" }, + { "name": "referer", "type": ["string", "null"] }, + { "name": "user_agent", "type": ["string", "null"] }, + { "name": "user_id", "type": ["string", "null"] }, + { "name": "session_id", "type": ["string", "null"] } + ] +} diff --git a/smoke-test/tests/structured_properties/test_dataset.yaml b/smoke-test/tests/structured_properties/test_dataset.yaml new file mode 100644 index 0000000000000..2ac1cca6c6dc2 --- /dev/null +++ b/smoke-test/tests/structured_properties/test_dataset.yaml @@ -0,0 +1,19 @@ +- id: user.clicks + platform: hive + # urn: urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD) # use urn instead of id and platform + subtype: table + schema: + file: tests/structured_properties/click_event.avsc + fields: + # - id: ip + - urn: urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD),ip) + structured_properties: + io.acryl.dataManagement.deprecationDate: "2023-01-01" + properties: + retention: 365 + structured_properties: + clusterType: primary + clusterName: gold + projectNames: + - Tracking + - DataHub diff --git a/smoke-test/tests/structured_properties/test_structured_properties.py b/smoke-test/tests/structured_properties/test_structured_properties.py new file mode 100644 index 0000000000000..83994776076b0 --- /dev/null +++ b/smoke-test/tests/structured_properties/test_structured_properties.py @@ -0,0 +1,577 @@ +import logging +import os +from datahub.ingestion.graph.filters import SearchFilterRule +from tests.consistency_utils import wait_for_writes_to_sync +import tempfile +from random import randint +from tests.utilities.file_emitter import FileEmitter +from typing import Iterable, List, Optional, Union + +import pytest +# import tenacity +from datahub.api.entities.dataset.dataset import Dataset +from datahub.api.entities.structuredproperties.structuredproperties import \ + StructuredProperties +from datahub.emitter.mce_builder import make_dataset_urn, make_schema_field_urn +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph +from datahub.metadata.schema_classes import ( + EntityTypeInfoClass, PropertyValueClass, StructuredPropertiesClass, + StructuredPropertyDefinitionClass, StructuredPropertyValueAssignmentClass) +from datahub.specific.dataset import DatasetPatchBuilder +from datahub.utilities.urns.structured_properties_urn import \ + StructuredPropertyUrn +from datahub.utilities.urns.urn import Urn + +from tests.utils import (delete_urns, delete_urns_from_file, get_gms_url, + get_sleep_info, ingest_file_via_rest, + wait_for_writes_to_sync) + +logger = logging.getLogger(__name__) + +start_index = randint(10, 10000) +dataset_urns = [ + make_dataset_urn("snowflake", f"table_foo_{i}") + for i in range(start_index, start_index + 10) +] + +schema_field_urns = [ + make_schema_field_urn(dataset_urn, "column_1") + for dataset_urn in dataset_urns +] + +generated_urns = [d for d in dataset_urns] + [f for f in schema_field_urns] + + +default_namespace = "io.acryl.privacy" + +def create_logical_entity( + entity_name: str, +) -> Iterable[MetadataChangeProposalWrapper]: + mcp = MetadataChangeProposalWrapper( + entityUrn="urn:li:entityType:" + entity_name, + aspect=EntityTypeInfoClass( + qualifiedName="io.datahubproject." + entity_name, + displayName=entity_name, + ), + ) + return [mcp] + + +def create_test_data(filename: str): + file_emitter = FileEmitter(filename) + for mcps in create_logical_entity("dataset"): + file_emitter.emit(mcps) + + file_emitter.close() + wait_for_writes_to_sync() + +sleep_sec, sleep_times = get_sleep_info() + + +@pytest.fixture(scope="module", autouse=False) +def graph() -> DataHubGraph: + graph: DataHubGraph = DataHubGraph( + config=DatahubClientConfig(server=get_gms_url()) + ) + return graph + + +@pytest.fixture(scope="module", autouse=False) +def ingest_cleanup_data(request): + new_file, filename = tempfile.mkstemp() + try: + create_test_data(filename) + print("ingesting structured properties test data") + ingest_file_via_rest(filename) + yield + print("removing structured properties test data") + delete_urns_from_file(filename) + delete_urns(generated_urns) + wait_for_writes_to_sync() + finally: + os.remove(filename) + + +@pytest.mark.dependency() +def test_healthchecks(wait_for_healthchecks): + # Call to wait_for_healthchecks fixture will do the actual functionality. + pass + + +def create_property_definition( + property_name: str, + graph: DataHubGraph, + namespace: str = default_namespace, + value_type: str = "string", + cardinality: str = "SINGLE", + allowed_values: Optional[List[PropertyValueClass]] = None, + entity_types: Optional[List[str]] = None, +): + structured_property_definition = StructuredPropertyDefinitionClass( + qualifiedName=f"{namespace}.{property_name}", + valueType=Urn.make_data_type_urn(value_type), + description="The retention policy for the dataset", + entityTypes=[Urn.make_entity_type_urn(e) for e in entity_types] + if entity_types + else [Urn.make_entity_type_urn("dataset")], + cardinality=cardinality, + allowedValues=allowed_values, + ) + + mcp = MetadataChangeProposalWrapper( + entityUrn=f"urn:li:structuredProperty:{namespace}.{property_name}", + aspect=structured_property_definition, + ) + graph.emit(mcp) + wait_for_writes_to_sync() + + +def attach_property_to_entity( + urn: str, + property_name: str, + property_value: Union[str, float, List[str | float]], + graph: DataHubGraph, + namespace: str = default_namespace +): + if isinstance(property_value, list): + property_values: List[Union[str, float]] = property_value + else: + property_values = [property_value] + + mcp = MetadataChangeProposalWrapper( + entityUrn=urn, + aspect=StructuredPropertiesClass( + properties=[ + StructuredPropertyValueAssignmentClass( + propertyUrn=f"urn:li:structuredProperty:{namespace}.{property_name}", + values=property_values, + ) + ] + ), + ) + graph.emit_mcp(mcp) + wait_for_writes_to_sync() + + +def get_property_from_entity( + urn: str, + property_name: str, + graph: DataHubGraph, +): + structured_properties: Optional[ + StructuredPropertiesClass + ] = graph.get_aspect(urn, StructuredPropertiesClass) + assert structured_properties is not None + for property in structured_properties.properties: + if ( + property.propertyUrn + == f"urn:li:structuredProperty:{property_name}" + ): + return property.values + return None + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_string(ingest_cleanup_data, graph): + property_name = "retentionPolicy" + + create_property_definition(property_name, graph) + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.retentionPolicy") + + attach_property_to_entity( + dataset_urns[0], property_name, ["30d"], graph=graph + ) + + try: + attach_property_to_entity( + dataset_urns[0], property_name, 200030, graph=graph + ) + raise AssertionError( + "Should not be able to attach a number to a string property" + ) + except Exception as e: + if not isinstance(e, AssertionError): + pass + else: + raise e + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_double(ingest_cleanup_data, graph): + property_name = "expiryTime" + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + create_property_definition(property_name, graph, value_type="number") + + attach_property_to_entity( + dataset_urns[0], property_name, 2000034, graph=graph + ) + + try: + attach_property_to_entity( + dataset_urns[0], property_name, "30 days", graph=graph + ) + raise AssertionError( + "Should not be able to attach a string to a number property" + ) + except Exception as e: + if not isinstance(e, AssertionError): + pass + else: + raise e + + try: + attach_property_to_entity( + dataset_urns[0], property_name, [2000034, 2000035], graph=graph + ) + raise AssertionError( + "Should not be able to attach a list to a number property" + ) + except Exception as e: + if not isinstance(e, AssertionError): + pass + else: + raise e + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_double_multiple(ingest_cleanup_data, graph): + property_name = "versions" + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + + create_property_definition( + property_name, graph, value_type="number", cardinality="MULTIPLE" + ) + + attach_property_to_entity( + dataset_urns[0], property_name, [1.0, 2.0], graph=graph + ) + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_string_allowed_values( + ingest_cleanup_data, graph +): + property_name = "enumProperty" + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + + create_property_definition( + property_name, + graph, + value_type="string", + cardinality="MULTIPLE", + allowed_values=[ + PropertyValueClass(value="foo"), + PropertyValueClass(value="bar"), + ], + ) + + attach_property_to_entity( + dataset_urns[0], property_name, ["foo", "bar"], graph=graph + ) + + try: + attach_property_to_entity( + dataset_urns[0], property_name, ["foo", "baz"], graph=graph + ) + raise AssertionError( + "Should not be able to attach a value not in allowed values" + ) + except Exception as e: + if "value: {string=baz} should be one of [" in str(e): + pass + else: + raise e + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_definition_evolution( + ingest_cleanup_data, graph +): + property_name = "enumProperty1234" + + create_property_definition( + property_name, + graph, + value_type="string", + cardinality="MULTIPLE", + allowed_values=[ + PropertyValueClass(value="foo"), + PropertyValueClass(value="bar"), + ], + ) + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + + try: + create_property_definition( + property_name, + graph, + value_type="string", + cardinality="SINGLE", + allowed_values=[ + PropertyValueClass(value="foo"), + PropertyValueClass(value="bar"), + ], + ) + raise AssertionError( + "Should not be able to change cardinality from MULTIPLE to SINGLE" + ) + except Exception as e: + if isinstance(e, AssertionError): + raise e + else: + pass + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_schema_field(ingest_cleanup_data, graph): + property_name = ( + f"deprecationDate{randint(10, 10000)}" + ) + + create_property_definition( + property_name, + graph, + namespace="io.datahubproject.test", + value_type="date", + entity_types=["schemaField"], + ) + generated_urns.append(f"urn:li:structuredProperty:io.datahubproject.test.{property_name}") + + attach_property_to_entity( + schema_field_urns[0], property_name, "2020-10-01", graph=graph, namespace="io.datahubproject.test" + ) + + assert ( + get_property_from_entity( + schema_field_urns[0], f"io.datahubproject.test.{property_name}", graph=graph + ) + == ["2020-10-01"] + ) + + try: + attach_property_to_entity( + schema_field_urns[0], property_name, 200030, graph=graph, namespace="io.datahubproject.test" + ) + raise AssertionError( + "Should not be able to attach a number to a DATE property" + ) + except Exception as e: + if not isinstance(e, AssertionError): + pass + else: + raise e + + +def test_dataset_yaml_loader(ingest_cleanup_data, graph): + StructuredProperties.create( + "tests/structured_properties/test_structured_properties.yaml" + ) + + for dataset in Dataset.from_yaml( + "tests/structured_properties/test_dataset.yaml" + ): + for mcp in dataset.generate_mcp(): + graph.emit(mcp) + wait_for_writes_to_sync() + + property_name = "io.acryl.dataManagement.deprecationDate" + assert ( + get_property_from_entity( + make_schema_field_urn( + make_dataset_urn("hive", "user.clicks"), "ip" + ), + property_name, + graph=graph, + ) + == ["2023-01-01"] + ) + + dataset = Dataset.from_datahub( + graph=graph, + urn="urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD)", + ) + field_name = "ip" + matching_fields = [ + f + for f in dataset.schema_metadata.fields + if Dataset._simplify_field_path(f.id) == field_name + ] + assert len(matching_fields) == 1 + assert ( + matching_fields[0].structured_properties[ + Urn.make_structured_property_urn( + "io.acryl.dataManagement.deprecationDate" + ) + ] + == ["2023-01-01"] + ) + + +def test_dataset_structured_property_validation( + ingest_cleanup_data, graph, caplog +): + from datahub.api.entities.dataset.dataset import Dataset + + property_name = "replicationSLA" + property_value = 30 + value_type = "number" + + create_property_definition( + property_name=property_name, graph=graph, value_type=value_type + ) + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.replicationSLA") + + attach_property_to_entity( + dataset_urns[0], property_name, [property_value], graph=graph + ) + + assert Dataset.validate_structured_property( + f"{default_namespace}.{property_name}", property_value + ) == ( + f"{default_namespace}.{property_name}", + float(property_value), + ) + + assert ( + Dataset.validate_structured_property("testName", "testValue") is None + ) + + bad_property_value = "2023-09-20" + assert ( + Dataset.validate_structured_property( + property_name, bad_property_value + ) + is None + ) + + +def test_structured_property_search(ingest_cleanup_data, graph: DataHubGraph, caplog): + + def to_es_name(property_name, namespace=default_namespace): + namespace_field = namespace.replace(".", "_") + return f"structuredProperties.{namespace_field}_{property_name}" + + # Attach structured property to entity and to field + field_property_name = f"deprecationDate{randint(10, 10000)}" + + create_property_definition( + namespace="io.datahubproject.test", + property_name=field_property_name, + graph=graph, value_type="date", entity_types=["schemaField"] + ) + generated_urns.append(f"urn:li:structuredProperty:io.datahubproject.test.{field_property_name}") + + attach_property_to_entity( + schema_field_urns[0], field_property_name, "2020-10-01", graph=graph, namespace="io.datahubproject.test" + ) + dataset_property_name = "replicationSLA" + property_value = 30 + value_type = "number" + + create_property_definition(property_name=dataset_property_name, graph=graph, value_type=value_type) + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{dataset_property_name}") + + attach_property_to_entity(dataset_urns[0], dataset_property_name, [property_value], graph=graph) + + # [] = default entities which includes datasets, does not include fields + entity_urns = list(graph.get_urns_by_filter(extraFilters=[ + { + "field": to_es_name(dataset_property_name), + "negated": "false", + "condition": "EXISTS", + } + ])) + assert len(entity_urns) == 1 + assert entity_urns[0] == dataset_urns[0] + + # Search over schema field specifically + field_structured_prop = graph.get_aspect(entity_urn=schema_field_urns[0], aspect_type=StructuredPropertiesClass) + assert field_structured_prop == StructuredPropertiesClass( + properties=[ + StructuredPropertyValueAssignmentClass( + propertyUrn=f"urn:li:structuredProperty:io.datahubproject.test.{field_property_name}", + values=["2020-10-01"] + ) + ] + ) + + # Search over entities that do not include the field + field_urns = list(graph.get_urns_by_filter(entity_types=["tag"], + extraFilters=[ + { + "field": to_es_name(field_property_name, + namespace="io.datahubproject.test"), + "negated": "false", + "condition": "EXISTS", + } + ])) + assert len(field_urns) == 0 + + # OR the two properties together to return both results + field_urns = list(graph.get_urns_by_filter(entity_types=["dataset", "tag"], + extraFilters=[ + { + "field": to_es_name(dataset_property_name), + "negated": "false", + "condition": "EXISTS", + } + ])) + assert len(field_urns) == 1 + assert dataset_urns[0] in field_urns + + +def test_dataset_structured_property_patch( + ingest_cleanup_data, graph, caplog +): + property_name = "replicationSLA" + property_value = 30 + value_type = "number" + + create_property_definition( + property_name=property_name, + graph=graph, + value_type=value_type + ) + + dataset_patcher: DatasetPatchBuilder = DatasetPatchBuilder( + urn=dataset_urns[0] + ) + + dataset_patcher.set_structured_property(StructuredPropertyUrn.make_structured_property_urn( + f"{default_namespace}.{property_name}"), property_value) + + for mcp in dataset_patcher.build(): + graph.emit(mcp) + wait_for_writes_to_sync() + + dataset = Dataset.from_datahub(graph=graph, urn=dataset_urns[0]) + assert dataset.structured_properties is not None + assert ( + [int(float(k)) for k in dataset.structured_properties[ + StructuredPropertyUrn.make_structured_property_urn( + f"{default_namespace}.{property_name}" + ) + ]] + == [property_value] + ) diff --git a/smoke-test/tests/structured_properties/test_structured_properties.yaml b/smoke-test/tests/structured_properties/test_structured_properties.yaml new file mode 100644 index 0000000000000..569a3d185165d --- /dev/null +++ b/smoke-test/tests/structured_properties/test_structured_properties.yaml @@ -0,0 +1,33 @@ +- id: clusterType + type: STRING + display_name: Cluster's type + description: "Test Cluster Type Property" + entity_types: + - dataset +- id: clusterName + type: STRING + display_name: Cluster's name + description: "Test Cluster Name Property" + entity_types: + - dataset +- id: projectNames + type: STRING + cardinality: MULTIPLE + display_name: Project Name + entity_types: + - dataset # or urn:li:logicalEntity:metamodel.datahub.dataset + - dataflow + description: "Test property for project name" + allowed_values: + - value: Tracking + description: test value 1 for project + - value: DataHub + description: test value 2 for project +- id: io.acryl.dataManagement.deprecationDate + type: DATE + display_name: Deprecation Date + entity_types: + - dataset + - dataFlow + - dataJob + - schemaField diff --git a/smoke-test/tests/telemetry/telemetry_test.py b/smoke-test/tests/telemetry/telemetry_test.py index 3127061c9f506..b7cd6fa0517df 100644 --- a/smoke-test/tests/telemetry/telemetry_test.py +++ b/smoke-test/tests/telemetry/telemetry_test.py @@ -3,9 +3,19 @@ from datahub.cli.cli_utils import get_aspects_for_entity -def test_no_clientID(): +def test_no_client_id(): client_id_urn = "urn:li:telemetry:clientId" - aspect = ["telemetryClientId"] + aspect = ["clientId"] # this is checking for the removal of the invalid aspect RemoveClientIdAspectStep.java + + res_data = json.dumps( + get_aspects_for_entity(entity_urn=client_id_urn, aspects=aspect, typed=False) + ) + assert res_data == "{}" + + +def test_no_telemetry_client_id(): + client_id_urn = "urn:li:telemetry:clientId" + aspect = ["telemetryClientId"] # telemetry expected to be disabled for tests res_data = json.dumps( get_aspects_for_entity(entity_urn=client_id_urn, aspects=aspect, typed=False) diff --git a/smoke-test/tests/utilities/__init__.py b/smoke-test/tests/utilities/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/utilities/file_emitter.py b/smoke-test/tests/utilities/file_emitter.py new file mode 100644 index 0000000000000..27a91c360af8a --- /dev/null +++ b/smoke-test/tests/utilities/file_emitter.py @@ -0,0 +1,21 @@ +from datahub.ingestion.sink.file import FileSink, FileSinkConfig +from datahub.ingestion.api.common import PipelineContext, RecordEnvelope +from datahub.ingestion.api.sink import NoopWriteCallback +import time + + +class FileEmitter: + def __init__(self, filename: str, run_id: str = f"test_{int(time.time()*1000.0)}") -> None: + self.sink: FileSink = FileSink( + ctx=PipelineContext(run_id=run_id), + config=FileSinkConfig(filename=filename), + ) + + def emit(self, event): + self.sink.write_record_async( + record_envelope=RecordEnvelope(record=event, metadata={}), + write_callback=NoopWriteCallback(), + ) + + def close(self): + self.sink.close() \ No newline at end of file From b94d463fe0ad8d3ff12a9b1a752c5da8a0d6fac8 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 22 Jan 2024 16:23:56 -0800 Subject: [PATCH 412/792] feat(ingest/lookml): support complex lookml manifests (#9688) --- metadata-ingestion/setup.py | 2 +- .../ingestion/source/looker/lkml_patched.py | 28 +++++ .../ingestion/source/looker/lookml_source.py | 101 ++++++++---------- .../complex-manifest.lkml | 23 ++++ .../tests/integration/lookml/test_lookml.py | 12 +++ 5 files changed, 109 insertions(+), 57 deletions(-) create mode 100644 metadata-ingestion/src/datahub/ingestion/source/looker/lkml_patched.py create mode 100644 metadata-ingestion/tests/integration/lookml/lkml_manifest_samples/complex-manifest.lkml diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 34e8167a997f6..1fb570d76120e 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -149,7 +149,7 @@ # This version of lkml contains a fix for parsing lists in # LookML files with spaces between an item and the following comma. # See https://github.com/joshtemple/lkml/issues/73. - "lkml>=1.3.0b5", + "lkml>=1.3.4", "sql-metadata==2.2.2", *sqllineage_lib, "GitPython>2", diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lkml_patched.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lkml_patched.py new file mode 100644 index 0000000000000..6506682b8ed8d --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lkml_patched.py @@ -0,0 +1,28 @@ +import pathlib +from typing import Union + +import lkml +import lkml.simple +import lkml.tree + +# Patch lkml to support the manifest.lkml files. +# We have to patch both locations because lkml uses a immutable tuple +# instead of a list for this type. +lkml.simple.PLURAL_KEYS = ( + *lkml.simple.PLURAL_KEYS, + "local_dependency", + "remote_dependency", + "constant", + "override_constant", +) +lkml.tree.PLURAL_KEYS = lkml.simple.PLURAL_KEYS + + +def load_lkml(path: Union[str, pathlib.Path]) -> dict: + """Loads a LookML file from disk and returns a dictionary.""" + + # Using this method instead of lkml.load directly ensures + # that our patches to lkml are applied. + + with open(path, "r") as file: + return lkml.load(file) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 33079f3fd9ac1..9317605d5b055 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -49,6 +49,7 @@ from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import DatasetSubTypes from datahub.ingestion.source.git.git_import import GitClone +from datahub.ingestion.source.looker.lkml_patched import load_lkml from datahub.ingestion.source.looker.looker_common import ( CORPUSER_DATAHUB, LookerCommonConfig, @@ -98,13 +99,6 @@ _BASE_PROJECT_NAME = "__BASE" -# Patch lkml to support the local_dependency and remote_dependency keywords. -lkml.simple.PLURAL_KEYS = ( - *lkml.simple.PLURAL_KEYS, - "local_dependency", - "remote_dependency", -) - _EXPLORE_FILE_EXTENSION = ".explore.lkml" _VIEW_FILE_EXTENSION = ".view.lkml" _MODEL_FILE_EXTENSION = ".model.lkml" @@ -384,10 +378,9 @@ def from_looker_dict( ] for included_file in explore_files: try: - with open(included_file, "r") as file: - parsed = lkml.load(file) - included_explores = parsed.get("explores", []) - explores.extend(included_explores) + parsed = load_lkml(included_file) + included_explores = parsed.get("explores", []) + explores.extend(included_explores) except Exception as e: reporter.report_warning( path, f"Failed to load {included_file} due to {e}" @@ -514,24 +507,23 @@ def resolve_includes( f"Will be loading {included_file}, traversed here via {traversal_path}" ) try: - with open(included_file, "r") as file: - parsed = lkml.load(file) - seen_so_far.add(included_file) - if "includes" in parsed: # we have more includes to resolve! - resolved.extend( - LookerModel.resolve_includes( - parsed["includes"], - resolved_project_name, - root_project_name, - base_projects_folder, - included_file, - reporter, - seen_so_far, - traversal_path=traversal_path - + "." - + pathlib.Path(included_file).stem, - ) + parsed = load_lkml(included_file) + seen_so_far.add(included_file) + if "includes" in parsed: # we have more includes to resolve! + resolved.extend( + LookerModel.resolve_includes( + parsed["includes"], + resolved_project_name, + root_project_name, + base_projects_folder, + included_file, + reporter, + seen_so_far, + traversal_path=traversal_path + + "." + + pathlib.Path(included_file).stem, ) + ) except Exception as e: reporter.report_warning( path, f"Failed to load {included_file} due to {e}" @@ -648,21 +640,20 @@ def _load_viewfile( self.reporter.report_failure(path, f"failed to load view file: {e}") return None try: - with open(path, "r") as file: - logger.debug(f"Loading viewfile {path}") - parsed = lkml.load(file) - looker_viewfile = LookerViewFile.from_looker_dict( - absolute_file_path=path, - looker_view_file_dict=parsed, - project_name=project_name, - root_project_name=self._root_project_name, - base_projects_folder=self._base_projects_folder, - raw_file_content=raw_file_content, - reporter=reporter, - ) - logger.debug(f"adding viewfile for path {path} to the cache") - self.viewfile_cache[path] = looker_viewfile - return looker_viewfile + logger.debug(f"Loading viewfile {path}") + parsed = load_lkml(path) + looker_viewfile = LookerViewFile.from_looker_dict( + absolute_file_path=path, + looker_view_file_dict=parsed, + project_name=project_name, + root_project_name=self._root_project_name, + base_projects_folder=self._base_projects_folder, + raw_file_content=raw_file_content, + reporter=reporter, + ) + logger.debug(f"adding viewfile for path {path} to the cache") + self.viewfile_cache[path] = looker_viewfile + return looker_viewfile except Exception as e: self.reporter.report_failure(path, f"failed to load view file: {e}") return None @@ -1498,17 +1489,16 @@ def __init__(self, config: LookMLSourceConfig, ctx: PipelineContext): ) def _load_model(self, path: str) -> LookerModel: - with open(path, "r") as file: - logger.debug(f"Loading model from file {path}") - parsed = lkml.load(file) - looker_model = LookerModel.from_looker_dict( - parsed, - _BASE_PROJECT_NAME, - self.source_config.project_name, - self.base_projects_folder, - path, - self.reporter, - ) + logger.debug(f"Loading model from file {path}") + parsed = load_lkml(path) + looker_model = LookerModel.from_looker_dict( + parsed, + _BASE_PROJECT_NAME, + self.source_config.project_name, + self.base_projects_folder, + path, + self.reporter, + ) return looker_model def _platform_names_have_2_parts(self, platform: str) -> bool: @@ -1797,8 +1787,7 @@ def get_project_name(self, model_name: str) -> str: def get_manifest_if_present(self, folder: pathlib.Path) -> Optional[LookerManifest]: manifest_file = folder / "manifest.lkml" if manifest_file.exists(): - with manifest_file.open() as fp: - manifest_dict = lkml.load(fp) + manifest_dict = load_lkml(manifest_file) manifest = LookerManifest( project_name=manifest_dict.get("project_name"), diff --git a/metadata-ingestion/tests/integration/lookml/lkml_manifest_samples/complex-manifest.lkml b/metadata-ingestion/tests/integration/lookml/lkml_manifest_samples/complex-manifest.lkml new file mode 100644 index 0000000000000..3d2006621dd50 --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/lkml_manifest_samples/complex-manifest.lkml @@ -0,0 +1,23 @@ +project_name: "complex-manifest-project" + +constant: CONNECTION_NAME { + value: "choose-connection" + export: override_required +} + +constant: other_variable { + value: "other-variable" + export: override_required +} + +local_dependency: { + project: "looker-hub" +} + +remote_dependency: remote-proj-1 { + override_constant: schema_name {value: "mycorp_prod" } + override_constant: choose-connection {value: "snowflake-conn-main"} +} + +remote_dependency: remote-proj-2 { +} diff --git a/metadata-ingestion/tests/integration/lookml/test_lookml.py b/metadata-ingestion/tests/integration/lookml/test_lookml.py index 1ed0d05c84263..7d1e8d053a381 100644 --- a/metadata-ingestion/tests/integration/lookml/test_lookml.py +++ b/metadata-ingestion/tests/integration/lookml/test_lookml.py @@ -16,6 +16,7 @@ LookerModel, LookerRefinementResolver, LookMLSourceConfig, + load_lkml, ) from datahub.metadata.schema_classes import ( DatasetSnapshotClass, @@ -852,3 +853,14 @@ def test_same_name_views_different_file_path(pytestconfig, tmp_path, mock_time): output_path=tmp_path / mce_out, golden_path=test_resources_dir / mce_out, ) + + +def test_manifest_parser(pytestconfig: pytest.Config) -> None: + # This mainly tests that we're permissive enough that we don't crash when parsing the manifest file. + # We need the test because we monkeypatch the lkml library. + + test_resources_dir = pytestconfig.rootpath / "tests/integration/lookml" + manifest_file = test_resources_dir / "lkml_manifest_samples/complex-manifest.lkml" + + manifest = load_lkml(manifest_file) + assert manifest From 1d5f0f571fcb1c656c6f2c503beb0e7c026fec64 Mon Sep 17 00:00:00 2001 From: Maggie Hays Date: Mon, 22 Jan 2024 18:49:29 -0600 Subject: [PATCH 413/792] docs: Add Case Studies and DataHub Basics to Resources tab (#9687) --- docs-website/docusaurus.config.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs-website/docusaurus.config.js b/docs-website/docusaurus.config.js index 97ef2f857b45d..22edf749acaed 100644 --- a/docs-website/docusaurus.config.js +++ b/docs-website/docusaurus.config.js @@ -100,6 +100,14 @@ module.exports = { href: "https://www.youtube.com/channel/UC3qFQC5IiwR5fvWEqi_tJ5w", label: "YouTube", }, + { + href: "https://www.youtube.com/playlist?list=PLdCtLs64vZvGCKMQC2dJEZ6cUqWsREbFi", + label: "Case Studies", + }, + { + href: "https://www.youtube.com/playlist?list=PLdCtLs64vZvErAXMiqUYH9e63wyDaMBgg", + label: "DataHub Basics", + }, ], }, { From 720296ea46de85d895603a10710a9d8ab8aac82f Mon Sep 17 00:00:00 2001 From: sleeperdeep <86791232+sleeperdeep@users.noreply.github.com> Date: Tue, 23 Jan 2024 06:25:01 +0200 Subject: [PATCH 414/792] fix(ingestion/mssql): use platform_instance for mssql dataflow/datajob (#9527) Co-authored-by: Harshal Sheth Co-authored-by: sleeperdeep <--global> --- .../ingestion/source/sql/mssql/job_models.py | 26 +- .../ingestion/source/sql/mssql/source.py | 9 +- .../golden_mces_mssql_no_db_to_file.json | 831 +++++++++++++++++- .../golden_mces_mssql_no_db_with_filter.json | 446 +++++++++- .../golden_mces_mssql_to_file.json | 446 +++++++++- ...golden_mces_mssql_with_lower_case_urn.json | 446 +++++++++- 6 files changed, 2111 insertions(+), 93 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/job_models.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/job_models.py index 8aeb5421891aa..8b517747307f8 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/job_models.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/job_models.py @@ -16,7 +16,7 @@ class ProcedureDependency: name: str type: str env: str - server: str + server: Optional[str] source: str = "mssql" @@ -34,7 +34,7 @@ def as_property(self) -> Dict[str, str]: @dataclass class MSSQLJob: db: str - platform_instance: str + platform_instance: Optional[str] name: str env: str source: str = "mssql" @@ -42,7 +42,7 @@ class MSSQLJob: @property def formatted_name(self) -> str: - return f"{self.formatted_platform_instance}.{self.name.replace(',', '-')}" + return self.name.replace(",", "-") @property def full_type(self) -> str: @@ -52,10 +52,6 @@ def full_type(self) -> str: def orchestrator(self) -> str: return self.source - @property - def formatted_platform_instance(self) -> str: - return self.platform_instance.replace(".", "/") - @property def cluster(self) -> str: return f"{self.env}" @@ -64,7 +60,7 @@ def cluster(self) -> str: @dataclass class MSSQLProceduresContainer: db: str - platform_instance: str + platform_instance: Optional[str] name: str env: str source: str = "mssql" @@ -72,16 +68,12 @@ class MSSQLProceduresContainer: @property def formatted_name(self) -> str: - return f"{self.formatted_platform_instance}.{self.name.replace(',', '-')}" + return self.name.replace(",", "-") @property def orchestrator(self) -> str: return self.source - @property - def formatted_platform_instance(self) -> str: - return self.platform_instance.replace(".", "/") - @property def cluster(self) -> str: return f"{self.env}" @@ -149,7 +141,7 @@ def full_type(self) -> str: @property def full_name(self) -> str: - return f"{self.formatted_name}.{self.formatted_name}" + return self.formatted_name @dataclass @@ -172,6 +164,9 @@ def urn(self) -> str: flow_id=self.entity.flow.formatted_name, job_id=self.entity.formatted_name, cluster=self.entity.flow.cluster, + platform_instance=self.entity.flow.platform_instance + if self.entity.flow.platform_instance + else None, ) def add_property( @@ -228,6 +223,9 @@ def urn(self) -> str: orchestrator=self.entity.orchestrator, flow_id=self.entity.formatted_name, cluster=self.entity.cluster, + platform_instance=self.entity.platform_instance + if self.entity.platform_instance + else None, ) @property diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py index 2442df595d967..56706e6f90d38 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py @@ -7,7 +7,6 @@ import sqlalchemy.dialects.mssql # This import verifies that the dependencies are available. -import sqlalchemy_pytds # noqa: F401 from pydantic.fields import Field from sqlalchemy import create_engine, inspect from sqlalchemy.engine.base import Connection @@ -132,10 +131,6 @@ def get_sql_alchemy_url( uri = f"{uri}?{urllib.parse.urlencode(self.uri_args)}" return uri - @property - def host(self): - return self.platform_instance or self.host_port.split(":")[0] - @property def db(self): return self.database @@ -369,7 +364,7 @@ def loop_jobs( name=job_name, env=sql_config.env, db=db_name, - platform_instance=sql_config.host, + platform_instance=sql_config.platform_instance, ) data_flow = MSSQLDataFlow(entity=job) yield from self.construct_flow_workunits(data_flow=data_flow) @@ -404,7 +399,7 @@ def loop_stored_procedures( # noqa: C901 name=procedure_flow_name, env=sql_config.env, db=db_name, - platform_instance=sql_config.host, + platform_instance=sql_config.platform_instance, ) data_flow = MSSQLDataFlow(entity=mssql_default_job) with inspector.engine.connect() as conn: diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json index 66ef9b097c973..4c0c1c6512ec7 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json @@ -88,14 +88,14 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "dataFlowInfo", "aspect": { "json": { "customProperties": {}, "externalUrl": "", - "name": "localhost.Weekly Demo Data Backup" + "name": "Weekly Demo Data Backup" } }, "systemMetadata": { @@ -106,24 +106,24 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { "json": { "customProperties": { - "job_id": "3565ea3e-9a3a-4cb0-acd5-213d740479a0", + "job_id": "0565425f-2083-45d3-bb61-76e0ee5e1117", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-11-27 23:08:29.350000", - "date_modified": "2023-11-27 23:08:29.833000", + "date_created": "2024-01-19 11:45:06.667000", + "date_modified": "2024-01-19 11:45:06.840000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", "command": "ALTER DATABASE DemoData SET READ_ONLY" }, "externalUrl": "", - "name": "localhost.Weekly Demo Data Backup.localhost.Weekly Demo Data Backup", + "name": "Weekly Demo Data Backup", "type": { "string": "MSSQL_JOB_STEP" } @@ -137,7 +137,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -1932,14 +1932,14 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "dataFlowInfo", "aspect": { "json": { "customProperties": {}, "externalUrl": "", - "name": "localhost.demodata.Foo.stored_procedures" + "name": "demodata.Foo.stored_procedures" } }, "systemMetadata": { @@ -1950,7 +1950,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-11-27 23:08:29.077000", - "date_modified": "2023-11-27 23:08:29.077000" + "date_created": "2024-01-19 11:45:06.590000", + "date_modified": "2024-01-19 11:45:06.590000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", @@ -1979,7 +1979,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -4381,7 +4381,7 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4397,7 +4397,7 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4413,7 +4413,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4429,7 +4429,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4442,5 +4442,800 @@ "runId": "mssql-test", "lastRunId": "no-run-id-provided" } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:f1b4c0e379c4b2e2e09a8ecd6c1b6dec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:bad84e08ecf49aee863df68243d8b9d0", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:e48d82445eeacfbe13b431f0bb1826ee", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:884bfecd9e414990a494681293413e8e", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:142ca5fc51b7f44e5e6a424bf1043590", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:1b9d125d390447de36719bfb8dd1f782", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fcd4c8da3739150766f91e7f6c2a3a30", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:2029cab615b3cd82cb87b153957d2e92", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:556e25ccec98892284f017f870ef7809", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec", + "urn": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:a6bea84fba7b05fb5d12630c8e6306ac", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:9f37bb7baa7ded19cc023e9f644a8cf8", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:3f157d8292fb473142f19e2250af537f", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:47217386c89d8b94153f6ee31e7e77ec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5eb0d61efa998d1ccd5cbdc6ce4bb4af", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:2816b2cb7f90d3dce64125ba89fb1fa8", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:20d0f0c94e9796ff44ff32d4d0e19084", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:3600d2ebb33b25dac607624d7eae7575", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:280f2e3aefacc346d0ce1590ec337c7d", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:cba5c3ca7f028fcf749593be369d3c24", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:58c30fa72f213ca7e12fb04f5a7d150f", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:9387ddfeb7b57672cabd761ade89c49c", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:3a5f70e0e34834d4eeeb4d5a5caf03d0", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,NewData.dbo.ProductsNew,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + }, + { + "id": "urn:li:container:3a5f70e0e34834d4eeeb4d5a5caf03d0", + "urn": "urn:li:container:3a5f70e0e34834d4eeeb4d5a5caf03d0" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:7cc43e5b4e2a7f2f66f1df774d1a0c63", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,NewData.FooNew.ItemsNew,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + }, + { + "id": "urn:li:container:7cc43e5b4e2a7f2f66f1df774d1a0c63", + "urn": "urn:li:container:7cc43e5b4e2a7f2f66f1df774d1a0c63" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,NewData.FooNew.PersonsNew,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + }, + { + "id": "urn:li:container:7cc43e5b4e2a7f2f66f1df774d1a0c63", + "urn": "urn:li:container:7cc43e5b4e2a7f2f66f1df774d1a0c63" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:54727d9fd7deacef27641559125bbc56", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:141b0980dcb08f48544583e47cf48807", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:c6627af82d44de89492e1a9315ae9f4b", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59", + "urn": "urn:li:container:9447d283fb4f95ce7474f1db0179bb59" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_with_filter.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_with_filter.json index c1984828750eb..02c357259c3f5 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_with_filter.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_with_filter.json @@ -88,14 +88,14 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "dataFlowInfo", "aspect": { "json": { "customProperties": {}, "externalUrl": "", - "name": "localhost.Weekly Demo Data Backup" + "name": "Weekly Demo Data Backup" } }, "systemMetadata": { @@ -106,24 +106,24 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { "json": { "customProperties": { - "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", + "job_id": "0565425f-2083-45d3-bb61-76e0ee5e1117", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-10-27 10:11:55.540000", - "date_modified": "2023-10-27 10:11:55.667000", + "date_created": "2024-01-19 11:45:06.667000", + "date_modified": "2024-01-19 11:45:06.840000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", "command": "ALTER DATABASE DemoData SET READ_ONLY" }, "externalUrl": "", - "name": "localhost.Weekly Demo Data Backup.localhost.Weekly Demo Data Backup", + "name": "Weekly Demo Data Backup", "type": { "string": "MSSQL_JOB_STEP" } @@ -137,7 +137,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -1932,14 +1932,14 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "dataFlowInfo", "aspect": { "json": { "customProperties": {}, "externalUrl": "", - "name": "localhost.demodata.Foo.stored_procedures" + "name": "demodata.Foo.stored_procedures" } }, "systemMetadata": { @@ -1950,7 +1950,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-10-27 10:11:55.460000", - "date_modified": "2023-10-27 10:11:55.460000" + "date_created": "2024-01-19 11:45:06.590000", + "date_modified": "2024-01-19 11:45:06.590000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", @@ -1979,7 +1979,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -2324,7 +2324,7 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2340,7 +2340,7 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2356,7 +2356,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2372,7 +2372,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2385,5 +2385,415 @@ "runId": "mssql-test", "lastRunId": "no-run-id-provided" } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:f1b4c0e379c4b2e2e09a8ecd6c1b6dec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:bad84e08ecf49aee863df68243d8b9d0", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:e48d82445eeacfbe13b431f0bb1826ee", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:884bfecd9e414990a494681293413e8e", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:142ca5fc51b7f44e5e6a424bf1043590", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:1b9d125d390447de36719bfb8dd1f782", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fcd4c8da3739150766f91e7f6c2a3a30", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:2029cab615b3cd82cb87b153957d2e92", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:556e25ccec98892284f017f870ef7809", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec", + "urn": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:a6bea84fba7b05fb5d12630c8e6306ac", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:9f37bb7baa7ded19cc023e9f644a8cf8", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:3f157d8292fb473142f19e2250af537f", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json index 9ce3664eff6a1..02c357259c3f5 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json @@ -88,14 +88,14 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "dataFlowInfo", "aspect": { "json": { "customProperties": {}, "externalUrl": "", - "name": "localhost.Weekly Demo Data Backup" + "name": "Weekly Demo Data Backup" } }, "systemMetadata": { @@ -106,24 +106,24 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { "json": { "customProperties": { - "job_id": "3b767c17-c921-4331-93d9-eb0e006045a4", + "job_id": "0565425f-2083-45d3-bb61-76e0ee5e1117", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-11-23 11:04:47.927000", - "date_modified": "2023-11-23 11:04:48.090000", + "date_created": "2024-01-19 11:45:06.667000", + "date_modified": "2024-01-19 11:45:06.840000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", "command": "ALTER DATABASE DemoData SET READ_ONLY" }, "externalUrl": "", - "name": "localhost.Weekly Demo Data Backup.localhost.Weekly Demo Data Backup", + "name": "Weekly Demo Data Backup", "type": { "string": "MSSQL_JOB_STEP" } @@ -137,7 +137,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -1932,14 +1932,14 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "dataFlowInfo", "aspect": { "json": { "customProperties": {}, "externalUrl": "", - "name": "localhost.demodata.Foo.stored_procedures" + "name": "demodata.Foo.stored_procedures" } }, "systemMetadata": { @@ -1950,7 +1950,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-11-23 11:04:47.857000", - "date_modified": "2023-11-23 11:04:47.857000" + "date_created": "2024-01-19 11:45:06.590000", + "date_modified": "2024-01-19 11:45:06.590000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", @@ -1979,7 +1979,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -2324,7 +2324,7 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2340,7 +2340,7 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2356,7 +2356,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2372,7 +2372,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2385,5 +2385,415 @@ "runId": "mssql-test", "lastRunId": "no-run-id-provided" } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:f1b4c0e379c4b2e2e09a8ecd6c1b6dec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:bad84e08ecf49aee863df68243d8b9d0", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:e48d82445eeacfbe13b431f0bb1826ee", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:884bfecd9e414990a494681293413e8e", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:142ca5fc51b7f44e5e6a424bf1043590", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:1b9d125d390447de36719bfb8dd1f782", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fcd4c8da3739150766f91e7f6c2a3a30", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:2029cab615b3cd82cb87b153957d2e92", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:556e25ccec98892284f017f870ef7809", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec", + "urn": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:a6bea84fba7b05fb5d12630c8e6306ac", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:9f37bb7baa7ded19cc023e9f644a8cf8", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:3f157d8292fb473142f19e2250af537f", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json index 037a341b7d66e..ad15c654e44c9 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json @@ -88,14 +88,14 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "dataFlowInfo", "aspect": { "json": { "customProperties": {}, "externalUrl": "", - "name": "localhost.Weekly Demo Data Backup" + "name": "Weekly Demo Data Backup" } }, "systemMetadata": { @@ -106,24 +106,24 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { "json": { "customProperties": { - "job_id": "3b767c17-c921-4331-93d9-eb0e006045a4", + "job_id": "0565425f-2083-45d3-bb61-76e0ee5e1117", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-11-23 11:04:47.927000", - "date_modified": "2023-11-23 11:04:48.090000", + "date_created": "2024-01-19 11:45:06.667000", + "date_modified": "2024-01-19 11:45:06.840000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", "command": "ALTER DATABASE DemoData SET READ_ONLY" }, "externalUrl": "", - "name": "localhost.Weekly Demo Data Backup.localhost.Weekly Demo Data Backup", + "name": "Weekly Demo Data Backup", "type": { "string": "MSSQL_JOB_STEP" } @@ -137,7 +137,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -1932,14 +1932,14 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "dataFlowInfo", "aspect": { "json": { "customProperties": {}, "externalUrl": "", - "name": "localhost.demodata.Foo.stored_procedures" + "name": "demodata.Foo.stored_procedures" } }, "systemMetadata": { @@ -1950,7 +1950,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInfo", "aspect": { @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-11-23 11:04:47.857000", - "date_modified": "2023-11-23 11:04:47.857000" + "date_created": "2024-01-19 11:45:06.590000", + "date_modified": "2024-01-19 11:45:06.590000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", @@ -1979,7 +1979,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "dataJobInputOutput", "aspect": { @@ -2324,7 +2324,7 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2340,7 +2340,7 @@ }, { "entityType": "dataFlow", - "entityUrn": "urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD)", + "entityUrn": "urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2356,7 +2356,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.Weekly Demo Data Backup,PROD),localhost.Weekly Demo Data Backup)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,Weekly Demo Data Backup,PROD),Weekly Demo Data Backup)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2372,7 +2372,7 @@ }, { "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,localhost.demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(mssql,demodata.Foo.stored_procedures,PROD),Proc.With.SpecialChar)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2385,5 +2385,415 @@ "runId": "mssql-test", "lastRunId": "no-run-id-provided" } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:f1b4c0e379c4b2e2e09a8ecd6c1b6dec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:bad84e08ecf49aee863df68243d8b9d0", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:e48d82445eeacfbe13b431f0bb1826ee", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:884bfecd9e414990a494681293413e8e", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:142ca5fc51b7f44e5e6a424bf1043590", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:1b9d125d390447de36719bfb8dd1f782", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fcd4c8da3739150766f91e7f6c2a3a30", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:2029cab615b3cd82cb87b153957d2e92", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:556e25ccec98892284f017f870ef7809", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec", + "urn": "urn:li:container:d41a036a2e6cfa44b834edf7683199ec" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + }, + { + "id": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671", + "urn": "urn:li:container:6e5c6d608d0a2dcc4eb03591382e5671" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:a6bea84fba7b05fb5d12630c8e6306ac", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:9f37bb7baa7ded19cc023e9f644a8cf8", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:3f157d8292fb473142f19e2250af537f", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5", + "urn": "urn:li:container:b7062d1c0c650d9de0f7a9a5de00b1b5" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mssql-test", + "lastRunId": "no-run-id-provided" + } } ] \ No newline at end of file From fb6eafbaa0c3c27ff859006357ab0cea8558fb08 Mon Sep 17 00:00:00 2001 From: Kunal-kankriya <127090035+Kunal-kankriya@users.noreply.github.com> Date: Tue, 23 Jan 2024 11:17:44 +0530 Subject: [PATCH 415/792] tests(posts): Verify Edit Post Steps (#9683) --- .../cypress/e2e/settings/homePagePost.js | 116 ++++++++++-------- 1 file changed, 68 insertions(+), 48 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/settings/homePagePost.js b/smoke-test/tests/cypress/cypress/e2e/settings/homePagePost.js index cb67efe00b484..843a15d7430af 100644 --- a/smoke-test/tests/cypress/cypress/e2e/settings/homePagePost.js +++ b/smoke-test/tests/cypress/cypress/e2e/settings/homePagePost.js @@ -1,65 +1,85 @@ -const title = 'Test Link Title' -const url = 'https://www.example.com' -const imagesURL = 'https://www.example.com/images/example-image.jpg' - const homePageRedirection = () => { - cy.visit('/') - cy.waitTextPresent("Welcome back,") -} + cy.visit('/'); + cy.waitTextPresent("Welcome back"); +}; -const addAnnouncement = () => { - cy.get('[id="posts-create-post"]').click({ force: true }); - cy.waitTextPresent('Create new Post') - cy.enterTextInTestId("create-post-title", "Test Announcement Title"); - cy.get('[id="description"]').type("Add Description to post announcement") - cy.get('[data-testid="create-post-button"]').click({ force: true }); - cy.reload() +const addOrEditAnnouncement = (text, title, description, testId) => { + cy.waitTextPresent(text); + cy.get('[data-testid="create-post-title"]').clear().type(title); + cy.get('[id="description"]').clear().type(description); + cy.get(`[data-testid="${testId}-post-button"]`).click({ force: true }); + cy.reload(); homePageRedirection(); - cy.waitTextPresent("Test Announcement Title"); -} +}; -const addLink = (title,url,imagesURL) => { - cy.get('[id="posts-create-post"]').click({ force: true }); - cy.waitTextPresent('Create new Post') - cy.clickOptionWithText('Link'); - cy.enterTextInTestId('create-post-title', title); - cy.enterTextInTestId('create-post-link', url); - cy.enterTextInTestId('create-post-media-location', imagesURL) - cy.get('[data-testid="create-post-button"]').click({ force: true }); - cy.reload() +const addOrEditLink = (text, title, url, imagesURL, testId) => { + cy.waitTextPresent(text); + cy.get('[data-testid="create-post-title"]').clear().type(title); + cy.get('[data-testid="create-post-link"]').clear().type(url); + cy.get('[data-testid="create-post-media-location"]').clear().type(imagesURL); + cy.get(`[data-testid="${testId}-post-button"]`).click({ force: true }); + cy.reload(); homePageRedirection(); - cy.waitTextPresent(title) +}; + +const clickOnNewPost = () =>{ + cy.get('[id="posts-create-post"]').click({ force: true }); } -const deleteFromPostDropdown = () => { - cy.get('[aria-label="more"]').first().click() - cy.clickOptionWithText("Delete"); - cy.clickOptionWithText("Yes"); - cy.reload() - homePageRedirection(); +const clickOnMoreOption = () => { + cy.get('[aria-label="more"]').first().click(); } -describe("Create announcement and link posts", () => { +describe("create announcement and link post", () => { beforeEach(() => { cy.loginWithCredentials(); cy.goToHomePagePostSettings(); }); - it("Create and Verify Announcement Post", () => { - addAnnouncement(); - }) + it("create announcement post and verify", () => { + clickOnNewPost() + addOrEditAnnouncement("Create new Post", "Test Announcement Title", "Add Description to post announcement", "create"); + cy.waitTextPresent("Test Announcement Title"); + }); - it("Delete and Verify Announcement Post", () => { - deleteFromPostDropdown(); - cy.ensureTextNotPresent("Test Announcement Title") - }) + it("edit announced post and verify", () => { + clickOnMoreOption() + cy.clickOptionWithText("Edit"); + addOrEditAnnouncement("Edit Post", "Test Announcement Title Edited", "Decription Edited", "update"); + cy.waitTextPresent("Test Announcement Title Edited"); + }); + + it("delete announced post and verify", () => { + clickOnMoreOption() + cy.clickOptionWithText("Delete"); + cy.clickOptionWithText("Yes"); + cy.reload(); + homePageRedirection(); + cy.ensureTextNotPresent("Test Announcement Title Edited"); + }); + + it("create link post and verify", () => { + clickOnNewPost() + cy.waitTextPresent('Create new Post'); + cy.contains('label', 'Link').click(); + addOrEditLink("Create new Post", "Test Link Title", 'https://www.example.com', 'https://www.example.com/images/example-image.jpg', "create"); + cy.waitTextPresent("Test Link Title"); + }); + + it("edit linked post and verify", () => { + clickOnMoreOption() + cy.clickOptionWithText("Edit"); + addOrEditLink("Edit Post", "Test Link Edited Title", 'https://www.updatedexample.com', 'https://www.updatedexample.com/images/example-image.jpg', "update"); + cy.waitTextPresent("Test Link Edited Title"); + }); - it("Create and Verify Link Post", () => { - addLink(title,url,imagesURL) - }) + it("delete linked post and verify", () => { + clickOnMoreOption() + cy.clickOptionWithText("Delete"); + cy.clickOptionWithText("Yes"); + cy.reload(); + homePageRedirection(); + cy.ensureTextNotPresent("Test Link Edited Title"); + }); +}); - it("Delete and Verify Link Post", () => { - deleteFromPostDropdown(); - cy.ensureTextNotPresent(title); - }) -}) \ No newline at end of file From f14dc4ded2082859ad2c5a1941981354b7e7d19b Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 22 Jan 2024 23:37:23 -0800 Subject: [PATCH 416/792] feat(ingest): improve sql tests + dbt reporting (#9649) --- .../docs/sources/datahub/datahub_recipe.yml | 8 ++++---- .../src/datahub/cli/ingest_cli.py | 2 +- .../ingestion/source/dbt/dbt_common.py | 15 ++++++++++++-- .../datahub/ingestion/source/dbt/dbt_core.py | 2 +- .../unit/sql_parsing/test_sqlglot_lineage.py | 5 ----- .../unit/sql_parsing/test_sqlglot_utils.py | 20 +++++++++++++++++++ 6 files changed, 39 insertions(+), 13 deletions(-) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_utils.py diff --git a/metadata-ingestion/docs/sources/datahub/datahub_recipe.yml b/metadata-ingestion/docs/sources/datahub/datahub_recipe.yml index cb7fc97a39b9f..632828f42014b 100644 --- a/metadata-ingestion/docs/sources/datahub/datahub_recipe.yml +++ b/metadata-ingestion/docs/sources/datahub/datahub_recipe.yml @@ -1,13 +1,13 @@ pipeline_name: datahub_source_1 datahub_api: - server: "http://localhost:8080" # Migrate data from DataHub instance on localhost:8080 + server: "http://localhost:8080" # Migrate data from DataHub instance on localhost:8080 token: "" source: type: datahub config: include_all_versions: false database_connection: - scheme: "mysql+pymysql" # or "postgresql+psycopg2" for Postgres + scheme: "mysql+pymysql" # or "postgresql+psycopg2" for Postgres host_port: ":" username: "" password: "" @@ -19,12 +19,12 @@ source: enabled: true ignore_old_state: false extractor_config: - set_system_metadata: false # Replicate system metadata + set_system_metadata: false # Replicate system metadata # Here, we write to a DataHub instance # You can also use a different sink, e.g. to write the data to a file instead sink: - type: datahub + type: datahub-rest config: server: "" token: "" diff --git a/metadata-ingestion/src/datahub/cli/ingest_cli.py b/metadata-ingestion/src/datahub/cli/ingest_cli.py index 569a836f3ef5c..9c55f52497c0e 100644 --- a/metadata-ingestion/src/datahub/cli/ingest_cli.py +++ b/metadata-ingestion/src/datahub/cli/ingest_cli.py @@ -131,7 +131,7 @@ def run( async def run_pipeline_to_completion(pipeline: Pipeline) -> int: logger.info("Starting metadata ingestion") - with click_spinner.spinner(disable=no_spinner): + with click_spinner.spinner(disable=no_spinner or no_progress): try: pipeline.run() except Exception as e: diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index 75fba6e9d426b..985c9118f3422 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -125,7 +125,9 @@ @dataclass class DBTSourceReport(StaleEntityRemovalSourceReport): - pass + sql_statements_parsed: int = 0 + sql_parser_detach_ctes_failures: int = 0 + sql_parser_skipped_missing_code: int = 0 class EmitDirective(ConfigEnum): @@ -821,6 +823,7 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: ] test_nodes = [test_node for test_node in nodes if test_node.node_type == "test"] + logger.info(f"Creating dbt metadata for {len(nodes)} nodes") yield from self.create_platform_mces( non_test_nodes, additional_custom_props_filtered, @@ -829,6 +832,7 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: self.config.platform_instance, ) + logger.info(f"Updating {self.config.target_platform} metadata") yield from self.create_platform_mces( non_test_nodes, additional_custom_props_filtered, @@ -988,15 +992,22 @@ def _infer_schemas_and_update_cll(self, all_nodes_map: Dict[str, DBTNode]) -> No }, ) except Exception as e: + self.report.sql_parser_detach_ctes_failures += 1 + logger.debug( + f"Failed to detach CTEs from compiled code. {node.dbt_name} will not have column lineage." + ) sql_result = SqlParsingResult.make_from_error(e) else: sql_result = sqlglot_lineage( preprocessed_sql, schema_resolver=schema_resolver ) + self.report.sql_statements_parsed += 1 + else: + self.report.sql_parser_skipped_missing_code += 1 # Save the column lineage. if self.config.include_column_lineage and sql_result: - # We only save the debug info here. We're report errors based on it later, after + # We only save the debug info here. We'll report errors based on it later, after # applying the configured node filters. node.cll_debug_info = sql_result.debug_info diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py index 563b005d7a88d..6fd3c5ba309f9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py @@ -65,7 +65,7 @@ class DBTCoreConfig(DBTCommonConfig): _github_info_deprecated = pydantic_renamed_field("github_info", "git_info") - @validator("aws_connection") + @validator("aws_connection", always=True) def aws_connection_needed_if_s3_uris_present( cls, aws_connection: Optional[AwsConnectionConfig], values: Dict, **kwargs: Any ) -> Optional[AwsConnectionConfig]: diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index eb1ba06669112..bbb63ceb1318e 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -3,7 +3,6 @@ import pytest from datahub.testing.check_sql_parser_result import assert_sql_result -from datahub.utilities.sqlglot_lineage import _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT RESOURCE_DIR = pathlib.Path(__file__).parent / "goldens" @@ -802,10 +801,6 @@ def test_snowflake_update_hardcoded(): ) -def test_update_from_select(): - assert _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT == {"returning", "this"} - - def test_snowflake_update_from_table(): # Can create these tables with the following SQL: """ diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_utils.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_utils.py new file mode 100644 index 0000000000000..b01c512c383cb --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_utils.py @@ -0,0 +1,20 @@ +from datahub.utilities.sqlglot_lineage import ( + _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT, + _get_dialect, + _is_dialect_instance, +) + + +def test_update_from_select(): + assert _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT == {"returning", "this"} + + +def test_is_dialect_instance(): + snowflake = _get_dialect("snowflake") + + assert _is_dialect_instance(snowflake, "snowflake") + assert not _is_dialect_instance(snowflake, "bigquery") + + redshift = _get_dialect("redshift") + assert _is_dialect_instance(redshift, ["redshift", "snowflake"]) + assert _is_dialect_instance(redshift, ["postgres", "snowflake"]) From 19b76c3307d2990f9fe5ac011546952260b3afd1 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 22 Jan 2024 23:37:53 -0800 Subject: [PATCH 417/792] feat(ingest/sql-parser): test redshift temp tables (#9653) --- .../test_redshift_temp_table_shortcut.json | 47 +++++++++++++++++++ .../unit/sql_parsing/test_sqlglot_lineage.py | 27 +++++++++++ 2 files changed, 74 insertions(+) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_temp_table_shortcut.json diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_temp_table_shortcut.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_temp_table_shortcut.json new file mode 100644 index 0000000000000..974eddb961d64 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_temp_table_shortcut.json @@ -0,0 +1,47 @@ +{ + "query_type": "CREATE", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:redshift,my_db.other_schema.table1,PROD)" + ], + "out_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:redshift,my_db.my_schema.#my_custom_name,PROD)" + ], + "column_lineage": [ + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,my_db.my_schema.#my_custom_name,PROD)", + "column": "col1", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "native_column_type": "INTEGER" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,my_db.other_schema.table1,PROD)", + "column": "col1" + } + ] + }, + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,my_db.my_schema.#my_custom_name,PROD)", + "column": "col2", + "column_type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "native_column_type": "INTEGER" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,my_db.other_schema.table1,PROD)", + "column": "col2" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index bbb63ceb1318e..42863ab005f07 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -992,3 +992,30 @@ def test_redshift_materialized_view_auto_refresh(): expected_file=RESOURCE_DIR / "test_redshift_materialized_view_auto_refresh.json", ) + + +def test_redshift_temp_table_shortcut(): + # On redshift, tables starting with # are temporary tables. + assert_sql_result( + """ +CREATE TABLE #my_custom_name +distkey (1) +sortkey (1,2) +AS +WITH cte AS ( +SELECT * +FROM other_schema.table1 +) +SELECT * FROM cte +""", + dialect="redshift", + default_db="my_db", + default_schema="my_schema", + schemas={ + "urn:li:dataset:(urn:li:dataPlatform:redshift,my_db.other_schema.table1,PROD)": { + "col1": "INTEGER", + "col2": "INTEGER", + }, + }, + expected_file=RESOURCE_DIR / "test_redshift_temp_table_shortcut.json", + ) From f0a48b6da21536ea7b3f658fa27f3b0716daf5c0 Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Tue, 23 Jan 2024 14:06:16 +0000 Subject: [PATCH 418/792] feat(policies): Allow policies to be applied to resources based on tags (#9684) --- .../authorization/EntityFieldType.java | 4 +- .../DefaultEntitySpecResolver.java | 4 +- .../TagFieldResolverProvider.java | 65 ++++++++ .../authorization/PolicyEngineTest.java | 71 +++++++- .../TagFieldResolverProviderTest.java | 154 ++++++++++++++++++ 5 files changed, 293 insertions(+), 5 deletions(-) create mode 100644 metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/TagFieldResolverProvider.java create mode 100644 metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/TagFieldResolverProviderTest.java diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java index 6b08cdb00e9ab..928876ce71cd5 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java @@ -28,5 +28,7 @@ public enum EntityFieldType { /** Groups of which the entity (only applies to corpUser) is a member */ GROUP_MEMBERSHIP, /** Data platform instance of resource */ - DATA_PLATFORM_INSTANCE + DATA_PLATFORM_INSTANCE, + /** Tags of the entity */ + TAG, } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java index c2d9c42693311..653bbecbfa8ad 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java @@ -8,6 +8,7 @@ import com.datahub.authorization.fieldresolverprovider.EntityUrnFieldResolverProvider; import com.datahub.authorization.fieldresolverprovider.GroupMembershipFieldResolverProvider; import com.datahub.authorization.fieldresolverprovider.OwnerFieldResolverProvider; +import com.datahub.authorization.fieldresolverprovider.TagFieldResolverProvider; import com.google.common.collect.ImmutableList; import com.linkedin.entity.client.EntityClient; import com.linkedin.util.Pair; @@ -26,7 +27,8 @@ public DefaultEntitySpecResolver(Authentication systemAuthentication, EntityClie new DomainFieldResolverProvider(entityClient, systemAuthentication), new OwnerFieldResolverProvider(entityClient, systemAuthentication), new DataPlatformInstanceFieldResolverProvider(entityClient, systemAuthentication), - new GroupMembershipFieldResolverProvider(entityClient, systemAuthentication)); + new GroupMembershipFieldResolverProvider(entityClient, systemAuthentication), + new TagFieldResolverProvider(entityClient, systemAuthentication)); } @Override diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/TagFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/TagFieldResolverProvider.java new file mode 100644 index 0000000000000..2cfd803249734 --- /dev/null +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/TagFieldResolverProvider.java @@ -0,0 +1,65 @@ +package com.datahub.authorization.fieldresolverprovider; + +import com.datahub.authentication.Authentication; +import com.datahub.authorization.EntityFieldType; +import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; +import com.linkedin.common.GlobalTags; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** Provides field resolver for owners given entitySpec */ +@Slf4j +@RequiredArgsConstructor +public class TagFieldResolverProvider implements EntityFieldResolverProvider { + + private final EntityClient _entityClient; + private final Authentication _systemAuthentication; + + @Override + public List getFieldTypes() { + return Collections.singletonList(EntityFieldType.TAG); + } + + @Override + public FieldResolver getFieldResolver(EntitySpec entitySpec) { + return FieldResolver.getResolverFromFunction(entitySpec, this::getTags); + } + + private FieldResolver.FieldValue getTags(EntitySpec entitySpec) { + Urn entityUrn = UrnUtils.getUrn(entitySpec.getEntity()); + EnvelopedAspect globalTagsAspect; + try { + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(Constants.GLOBAL_TAGS_ASPECT_NAME), + _systemAuthentication); + if (response == null + || !response.getAspects().containsKey(Constants.GLOBAL_TAGS_ASPECT_NAME)) { + return FieldResolver.emptyFieldValue(); + } + globalTagsAspect = response.getAspects().get(Constants.GLOBAL_TAGS_ASPECT_NAME); + } catch (Exception e) { + log.error("Error while retrieving tags aspect for urn {}", entityUrn, e); + return FieldResolver.emptyFieldValue(); + } + GlobalTags globalTags = new GlobalTags(globalTagsAspect.getValue().data()); + return FieldResolver.FieldValue.builder() + .values( + globalTags.getTags().stream() + .map(tag -> tag.getTag().toString()) + .collect(Collectors.toSet())) + .build(); + } +} diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java index c7f06eeba6e85..8ecfb5a2c27bb 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java @@ -44,6 +44,7 @@ public class PolicyEngineTest { private static final String AUTHORIZED_GROUP = "urn:li:corpGroup:authorizedGroup"; private static final String RESOURCE_URN = "urn:li:dataset:test"; private static final String DOMAIN_URN = "urn:li:domain:domain1"; + private static final String TAG_URN = "urn:li:tag:allowed"; private static final String OWNERSHIP_TYPE_URN = "urn:li:ownershipType:__system__technical_owner"; private static final String OTHER_OWNERSHIP_TYPE_URN = "urn:li:ownershipType:__system__data_steward"; @@ -69,7 +70,8 @@ public void setupTest() throws Exception { AUTHORIZED_PRINCIPAL, Collections.emptySet(), Collections.emptySet(), - Collections.singleton(AUTHORIZED_GROUP)); + Collections.singleton(AUTHORIZED_GROUP), + Collections.emptySet()); unauthorizedUserUrn = Urn.createFromString(UNAUTHORIZED_PRINCIPAL); resolvedUnauthorizedUserSpec = buildEntityResolvers(CORP_USER_ENTITY_NAME, UNAUTHORIZED_PRINCIPAL); @@ -595,6 +597,7 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersMatch() throws Except RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), + Collections.emptySet(), Collections.emptySet()); // Assert authorized user can edit entity tags, because he is a user owner. PolicyEngine.PolicyEvaluationResult result1 = @@ -653,6 +656,7 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeMatch() throws Ex RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), + Collections.emptySet(), Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result1 = @@ -712,6 +716,7 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeNoMatch() throws RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), + Collections.emptySet(), Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result1 = @@ -767,6 +772,7 @@ public void testEvaluatePolicyActorFilterGroupResourceOwnersMatch() throws Excep RESOURCE_URN, ImmutableSet.of(AUTHORIZED_GROUP), Collections.emptySet(), + Collections.emptySet(), Collections.emptySet()); // Assert authorized user can edit entity tags, because he is a user owner. PolicyEngine.PolicyEvaluationResult result1 = @@ -1037,6 +1043,7 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatchDomain() throws RESOURCE_URN, Collections.emptySet(), Collections.singleton(DOMAIN_URN), + Collections.emptySet(), Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result = _policyEngine.evaluatePolicy( @@ -1082,6 +1089,7 @@ public void testEvaluatePolicyResourceFilterSpecificResourceNoMatchDomain() thro RESOURCE_URN, Collections.emptySet(), Collections.singleton("urn:li:domain:domain2"), + Collections.emptySet(), Collections.emptySet()); // Domain doesn't match PolicyEngine.PolicyEvaluationResult result = _policyEngine.evaluatePolicy( @@ -1095,6 +1103,52 @@ public void testEvaluatePolicyResourceFilterSpecificResourceNoMatchDomain() thro verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any()); } + @Test + public void testEvaluatePolicyResourceFilterSpecificResourceMatchTag() throws Exception { + final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo(); + dataHubPolicyInfo.setType(METADATA_POLICY_TYPE); + dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE); + dataHubPolicyInfo.setPrivileges(new StringArray("VIEW_ENTITY_PAGE")); + dataHubPolicyInfo.setDisplayName("Tag-based policy"); + dataHubPolicyInfo.setDescription("Allow viewing entity pages based on tags"); + dataHubPolicyInfo.setEditable(true); + + final DataHubActorFilter actorFilter = new DataHubActorFilter(); + actorFilter.setResourceOwners(true); + actorFilter.setAllUsers(true); + actorFilter.setAllGroups(true); + dataHubPolicyInfo.setActors(actorFilter); + + final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.TAG, + Collections.singletonList(TAG_URN)))); + dataHubPolicyInfo.setResources(resourceFilter); + + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.emptySet(), + Collections.emptySet(), + Collections.singleton(TAG_URN)); + PolicyEngine.PolicyEvaluationResult result = + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "VIEW_ENTITY_PAGE", + Optional.of(resourceSpec)); + assertTrue(result.isGranted()); + + // Verify no network calls + verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any()); + } + @Test public void testGetGrantedPrivileges() throws Exception { // Policy 1, match dataset type and domain @@ -1180,6 +1234,7 @@ public void testGetGrantedPrivileges() throws Exception { RESOURCE_URN, Collections.emptySet(), Collections.singleton(DOMAIN_URN), + Collections.emptySet(), Collections.emptySet()); // Everything matches assertEquals( _policyEngine.getGrantedPrivileges( @@ -1192,6 +1247,7 @@ public void testGetGrantedPrivileges() throws Exception { RESOURCE_URN, Collections.emptySet(), Collections.singleton("urn:li:domain:domain2"), + Collections.emptySet(), Collections.emptySet()); // Domain doesn't match assertEquals( _policyEngine.getGrantedPrivileges( @@ -1204,6 +1260,7 @@ public void testGetGrantedPrivileges() throws Exception { "urn:li:dataset:random", Collections.emptySet(), Collections.singleton(DOMAIN_URN), + Collections.emptySet(), Collections.emptySet()); // Resource doesn't match assertEquals( _policyEngine.getGrantedPrivileges( @@ -1228,6 +1285,7 @@ public void testGetGrantedPrivileges() throws Exception { RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL), Collections.singleton(DOMAIN_URN), + Collections.emptySet(), Collections.emptySet()); // Is owner assertEquals( _policyEngine.getGrantedPrivileges( @@ -1240,6 +1298,7 @@ public void testGetGrantedPrivileges() throws Exception { RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL), Collections.singleton(DOMAIN_URN), + Collections.emptySet(), Collections.emptySet()); // Resource type doesn't match assertEquals( _policyEngine.getGrantedPrivileges( @@ -1289,6 +1348,7 @@ public void testGetMatchingActorsResourceMatch() throws Exception { RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL, AUTHORIZED_GROUP), Collections.emptySet(), + Collections.emptySet(), Collections.emptySet()); PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); @@ -1406,6 +1466,7 @@ public void testGetMatchingActorsByRoleResourceMatch() throws Exception { RESOURCE_URN, ImmutableSet.of(), Collections.emptySet(), + Collections.emptySet(), Collections.emptySet()); PolicyEngine.PolicyActors actors = @@ -1506,6 +1567,7 @@ public static ResolvedEntitySpec buildEntityResolvers(String entityType, String entityUrn, Collections.emptySet(), Collections.emptySet(), + Collections.emptySet(), Collections.emptySet()); } @@ -1514,7 +1576,8 @@ public static ResolvedEntitySpec buildEntityResolvers( String entityUrn, Set owners, Set domains, - Set groups) { + Set groups, + Set tags) { return new ResolvedEntitySpec( new EntitySpec(entityType, entityUrn), ImmutableMap.of( @@ -1527,6 +1590,8 @@ public static ResolvedEntitySpec buildEntityResolvers( EntityFieldType.DOMAIN, FieldResolver.getResolverFromValues(domains), EntityFieldType.GROUP_MEMBERSHIP, - FieldResolver.getResolverFromValues(groups))); + FieldResolver.getResolverFromValues(groups), + EntityFieldType.TAG, + FieldResolver.getResolverFromValues(tags))); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/TagFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/TagFieldResolverProviderTest.java new file mode 100644 index 0000000000000..de5ef09cd4251 --- /dev/null +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/TagFieldResolverProviderTest.java @@ -0,0 +1,154 @@ +package com.datahub.authorization.fieldresolverprovider; + +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.Authentication; +import com.datahub.authorization.EntityFieldType; +import com.datahub.authorization.EntitySpec; +import com.linkedin.common.GlobalTags; +import com.linkedin.common.TagAssociation; +import com.linkedin.common.TagAssociationArray; +import com.linkedin.common.urn.TagUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.r2.RemoteInvocationException; +import java.net.URISyntaxException; +import java.util.Collections; +import java.util.Set; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class TagFieldResolverProviderTest { + + private static final String TAG_URN = "urn:li:tag:test"; + private static final String RESOURCE_URN = + "urn:li:dataset:(urn:li:dataPlatform:s3,test-platform-instance.testDataset,PROD)"; + private static final EntitySpec RESOURCE_SPEC = new EntitySpec(DATASET_ENTITY_NAME, RESOURCE_URN); + + @Mock private EntityClient entityClientMock; + @Mock private Authentication systemAuthenticationMock; + + private TagFieldResolverProvider tagFieldResolverProvider; + + @BeforeMethod + public void setup() { + MockitoAnnotations.initMocks(this); + tagFieldResolverProvider = + new TagFieldResolverProvider(entityClientMock, systemAuthenticationMock); + } + + @Test + public void shouldReturnTagType() { + assertEquals(EntityFieldType.TAG, tagFieldResolverProvider.getFieldTypes().get(0)); + } + + @Test + public void shouldReturnEmptyFieldValueWhenResponseIsNull() + throws RemoteInvocationException, URISyntaxException { + when(entityClientMock.getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(GLOBAL_TAGS_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(null); + + var result = tagFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); + + assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(GLOBAL_TAGS_ASPECT_NAME)), + eq(systemAuthenticationMock)); + } + + @Test + public void shouldReturnEmptyFieldValueWhenResourceHasNoTag() + throws RemoteInvocationException, URISyntaxException { + var entityResponseMock = mock(EntityResponse.class); + when(entityResponseMock.getAspects()).thenReturn(new EnvelopedAspectMap()); + when(entityClientMock.getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(GLOBAL_TAGS_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); + + var result = tagFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); + + assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(GLOBAL_TAGS_ASPECT_NAME)), + eq(systemAuthenticationMock)); + } + + @Test + public void shouldReturnEmptyFieldValueWhenThereIsAnException() + throws RemoteInvocationException, URISyntaxException { + when(entityClientMock.getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(GLOBAL_TAGS_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenThrow(new RemoteInvocationException()); + + var result = tagFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); + + assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(GLOBAL_TAGS_ASPECT_NAME)), + eq(systemAuthenticationMock)); + } + + @Test + public void shouldReturnFieldValueWithTagOfTheResource() + throws RemoteInvocationException, URISyntaxException { + + var tagAssociation = new TagAssociation(); + tagAssociation.setTag(new TagUrn("test")); + var tags = new TagAssociationArray(tagAssociation); + var globalTags = new GlobalTags().setTags(tags); + var entityResponseMock = mock(EntityResponse.class); + var envelopedAspectMap = new EnvelopedAspectMap(); + envelopedAspectMap.put( + GLOBAL_TAGS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(globalTags.data()))); + when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); + when(entityClientMock.getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(GLOBAL_TAGS_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); + + var result = tagFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); + + assertEquals(Set.of(TAG_URN), result.getFieldValuesFuture().join().getValues()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(GLOBAL_TAGS_ASPECT_NAME)), + eq(systemAuthenticationMock)); + } +} From 59674b545715f568820d1ee9fe19dbe6a52f7993 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 23 Jan 2024 20:02:25 +0530 Subject: [PATCH 419/792] feat(role-assignment): add role assignment dropdown in the UI for groups (#9689) --- datahub-web-react/src/app/analytics/event.ts | 8 ++ .../group/AssignRoletoGroupConfirmation.tsx | 67 ++++++++++++ .../src/app/identity/group/GroupList.tsx | 34 +++++- .../src/app/identity/group/GroupListItem.tsx | 18 +++- .../app/identity/group/SelectRoleGroup.tsx | 102 ++++++++++++++++++ .../src/app/identity/group/cacheUtils.ts | 1 + datahub-web-react/src/graphql/group.graphql | 11 ++ .../datahubusage/DataHubUsageEventType.java | 1 + .../cypress/e2e/settings/managing_groups.js | 11 +- 9 files changed, 245 insertions(+), 8 deletions(-) create mode 100644 datahub-web-react/src/app/identity/group/AssignRoletoGroupConfirmation.tsx create mode 100644 datahub-web-react/src/app/identity/group/SelectRoleGroup.tsx diff --git a/datahub-web-react/src/app/analytics/event.ts b/datahub-web-react/src/app/analytics/event.ts index 2734026400933..dd670b35d49e0 100644 --- a/datahub-web-react/src/app/analytics/event.ts +++ b/datahub-web-react/src/app/analytics/event.ts @@ -48,6 +48,7 @@ export enum EventType { CreateResetCredentialsLinkEvent, DeleteEntityEvent, SelectUserRoleEvent, + SelectGroupRoleEvent, BatchSelectUserRoleEvent, CreatePolicyEvent, UpdatePolicyEvent, @@ -412,6 +413,12 @@ export interface SelectUserRoleEvent extends BaseEvent { userUrn: string; } +export interface SelectGroupRoleEvent extends BaseEvent { + type: EventType.SelectGroupRoleEvent; + roleUrn: string; + groupUrn?: string; +} + export interface BatchSelectUserRoleEvent extends BaseEvent { type: EventType.BatchSelectUserRoleEvent; roleUrn: string; @@ -668,6 +675,7 @@ export type Event = | CreateResetCredentialsLinkEvent | DeleteEntityEvent | SelectUserRoleEvent + | SelectGroupRoleEvent | BatchSelectUserRoleEvent | CreatePolicyEvent | UpdatePolicyEvent diff --git a/datahub-web-react/src/app/identity/group/AssignRoletoGroupConfirmation.tsx b/datahub-web-react/src/app/identity/group/AssignRoletoGroupConfirmation.tsx new file mode 100644 index 0000000000000..f08b607222de6 --- /dev/null +++ b/datahub-web-react/src/app/identity/group/AssignRoletoGroupConfirmation.tsx @@ -0,0 +1,67 @@ +import React from 'react'; +import { message, Popconfirm } from 'antd'; +import { useBatchAssignRoleMutation } from '../../../graphql/mutations.generated'; +import { DataHubRole } from '../../../types.generated'; +import analytics, { EventType } from '../../analytics'; + +type Props = { + visible: boolean; + roleToAssign: DataHubRole | undefined; + groupName: string; + groupUrn: string; + onClose: () => void; + onConfirm: () => void; +}; + +export default function AssignRoletoGroupConfirmation({ + visible, + roleToAssign, + groupName, + groupUrn, + onClose, + onConfirm, +}: Props) { + const [batchAssignRoleMutation] = useBatchAssignRoleMutation(); + // eslint-disable-next-line + const batchAssignRole = () => { + batchAssignRoleMutation({ + variables: { + input: { + roleUrn: roleToAssign?.urn, + actors: [groupUrn], + }, + }, + }) + .then(({ errors }) => { + if (!errors) { + analytics.event({ + type: EventType.SelectGroupRoleEvent, + roleUrn: roleToAssign?.urn || 'undefined', + groupUrn, + }); + message.success({ + content: roleToAssign + ? `Assigned role ${roleToAssign?.name} to group ${groupName}!` + : `Removed role from user ${groupName}!`, + duration: 2, + }); + onConfirm(); + } + }) + .catch((e) => { + message.destroy(); + message.error({ + content: roleToAssign + ? `Failed to assign role ${roleToAssign?.name} to group ${groupName}: \n ${e.message || ''}` + : `Failed to remove role from group ${groupName}: \n ${e.message || ''}`, + duration: 3, + }); + }); + }; + + const assignRoleText = roleToAssign + ? `Would you like to assign the role ${roleToAssign?.name} to group ${groupName}?` + : `Would you like to remove group ${groupName}'s existing role?`; + + return ; +} diff --git a/datahub-web-react/src/app/identity/group/GroupList.tsx b/datahub-web-react/src/app/identity/group/GroupList.tsx index 788b9eccafc0a..a8ebbedc2ac6d 100644 --- a/datahub-web-react/src/app/identity/group/GroupList.tsx +++ b/datahub-web-react/src/app/identity/group/GroupList.tsx @@ -4,7 +4,7 @@ import styled from 'styled-components'; import { useLocation } from 'react-router'; import * as QueryString from 'query-string'; import { UsergroupAddOutlined } from '@ant-design/icons'; -import { CorpGroup } from '../../../types.generated'; +import { CorpGroup, DataHubRole } from '../../../types.generated'; import { Message } from '../../shared/Message'; import { useListGroupsQuery } from '../../../graphql/group.generated'; import GroupListItem from './GroupListItem'; @@ -16,6 +16,7 @@ import { scrollToTop } from '../../shared/searchUtils'; import { GROUPS_CREATE_GROUP_ID, GROUPS_INTRO_ID } from '../../onboarding/config/GroupsOnboardingConfig'; import { OnboardingTour } from '../../onboarding/OnboardingTour'; import { addGroupToListGroupsCache, DEFAULT_GROUP_LIST_PAGE_SIZE, removeGroupFromListGroupsCache } from './cacheUtils'; +import { useListRolesQuery } from '../../../graphql/role.generated'; const GroupContainer = styled.div` display: flex; @@ -53,7 +54,13 @@ export const GroupList = () => { const pageSize = DEFAULT_GROUP_LIST_PAGE_SIZE; const start = (page - 1) * pageSize; - const { loading, error, data, refetch, client } = useListGroupsQuery({ + const { + loading, + error, + data, + refetch: groupRefetch, + client, + } = useListGroupsQuery({ variables: { input: { start, @@ -76,6 +83,18 @@ export const GroupList = () => { removeGroupFromListGroupsCache(urn, client, page, pageSize); }; + const { data: rolesData } = useListRolesQuery({ + fetchPolicy: 'cache-first', + variables: { + input: { + start: 0, + count: 10, + }, + }, + }); + + const selectRoleOptions = rolesData?.listRoles?.roles?.map((role) => role as DataHubRole) || []; + return ( <> @@ -114,7 +133,12 @@ export const GroupList = () => { }} dataSource={groups} renderItem={(item: any) => ( - handleDelete(item.urn)} group={item as CorpGroup} /> + handleDelete(item.urn)} + group={item as CorpGroup} + selectRoleOptions={selectRoleOptions} + refetch={groupRefetch} + /> )} /> @@ -131,9 +155,9 @@ export const GroupList = () => { {isCreatingGroup && ( setIsCreatingGroup(false)} - onCreate={(group) => { + onCreate={(group: CorpGroup) => { addGroupToListGroupsCache(group, client); - setTimeout(() => refetch(), 3000); + setTimeout(() => groupRefetch(), 3000); }} /> )} diff --git a/datahub-web-react/src/app/identity/group/GroupListItem.tsx b/datahub-web-react/src/app/identity/group/GroupListItem.tsx index 40c5afbbda5ef..74c0a8afb4d02 100644 --- a/datahub-web-react/src/app/identity/group/GroupListItem.tsx +++ b/datahub-web-react/src/app/identity/group/GroupListItem.tsx @@ -3,16 +3,19 @@ import React from 'react'; import styled from 'styled-components'; import { List, Tag, Tooltip, Typography } from 'antd'; import { Link } from 'react-router-dom'; -import { CorpGroup, EntityType, OriginType } from '../../../types.generated'; +import { CorpGroup, DataHubRole, EntityType, OriginType } from '../../../types.generated'; import CustomAvatar from '../../shared/avatar/CustomAvatar'; import { useEntityRegistry } from '../../useEntityRegistry'; import EntityDropdown from '../../entity/shared/EntityDropdown'; import { EntityMenuItems } from '../../entity/shared/EntityDropdown/EntityDropdown'; import { getElasticCappedTotalValueText } from '../../entity/shared/constants'; +import SelectRoleGroup from './SelectRoleGroup'; type Props = { group: CorpGroup; onDelete?: () => void; + selectRoleOptions: Array; + refetch?: () => void; }; const GroupItemContainer = styled.div` @@ -35,11 +38,16 @@ const GroupItemButtonGroup = styled.div` align-items: center; `; -export default function GroupListItem({ group, onDelete }: Props) { +export default function GroupListItem({ group, onDelete, selectRoleOptions, refetch }: Props) { const entityRegistry = useEntityRegistry(); const displayName = entityRegistry.getDisplayName(EntityType.CorpGroup, group); const isExternalGroup: boolean = group.origin?.type === OriginType.External; const externalGroupType: string = group.origin?.externalType || 'outside DataHub'; + const castedCorpUser = group as any; + const groupRelationships = castedCorpUser?.roles?.relationships; + const userRole = + groupRelationships && groupRelationships.length > 0 && (groupRelationships[0]?.entity as DataHubRole); + const groupRoleUrn = userRole && userRole.urn; return ( @@ -66,6 +74,12 @@ export default function GroupListItem({ group, onDelete }: Props) { )} + ; + refetch?: () => void; +}; + +const RoleSelect = styled(Select)<{ color?: string }>` + min-width: 105px; + ${(props) => (props.color ? ` color: ${props.color};` : '')} +`; + +const RoleIcon = styled.span` + margin-right: 6px; + font-size: 12px; +`; + +export default function SelectRoleGroup({ group, groupRoleUrn, selectRoleOptions, refetch }: Props) { + const client = useApolloClient(); + const rolesMap: Map = new Map(); + selectRoleOptions.forEach((role) => { + rolesMap.set(role.urn, role); + }); + const allSelectRoleOptions = [{ urn: NO_ROLE_URN, name: NO_ROLE_TEXT }, ...selectRoleOptions]; + const selectOptions = allSelectRoleOptions.map((role) => { + return ( + + {mapRoleIcon(role.name)} + {role.name} + + ); + }); + + const defaultRoleUrn = groupRoleUrn || NO_ROLE_URN; + const [currentRoleUrn, setCurrentRoleUrn] = useState(defaultRoleUrn); + const [isViewingAssignRole, setIsViewingAssignRole] = useState(false); + + useEffect(() => { + setCurrentRoleUrn(defaultRoleUrn); + }, [defaultRoleUrn]); + + const onSelectRole = (roleUrn: string) => { + setCurrentRoleUrn(roleUrn); + setIsViewingAssignRole(true); + }; + + const onCancel = () => { + setCurrentRoleUrn(defaultRoleUrn); + setIsViewingAssignRole(false); + }; + + const onConfirm = () => { + setIsViewingAssignRole(false); + setTimeout(() => { + refetch?.(); + clearRoleListCache(client); // Update roles. + }, 3000); + }; + + // wait for available roles to load + if (!selectRoleOptions.length) return null; + + return ( + <> + + + {NO_ROLE_TEXT} + + } + value={currentRoleUrn} + onChange={(e) => onSelectRole(e as string)} + color={currentRoleUrn === NO_ROLE_URN ? ANTD_GRAY[6] : undefined} + > + {selectOptions} + + + + ); +} diff --git a/datahub-web-react/src/app/identity/group/cacheUtils.ts b/datahub-web-react/src/app/identity/group/cacheUtils.ts index d4ecd40a40a97..272b9f841d25c 100644 --- a/datahub-web-react/src/app/identity/group/cacheUtils.ts +++ b/datahub-web-react/src/app/identity/group/cacheUtils.ts @@ -44,6 +44,7 @@ const createFullGroup = (baseGroup) => { email: null, }, memberCount: null, + roles: null, }; }; diff --git a/datahub-web-react/src/graphql/group.graphql b/datahub-web-react/src/graphql/group.graphql index 1007721e51a4e..05df34adbf7e9 100644 --- a/datahub-web-react/src/graphql/group.graphql +++ b/datahub-web-react/src/graphql/group.graphql @@ -195,6 +195,17 @@ query listGroups($input: ListGroupsInput!) { ) { total } + roles: relationships(input: { types: ["IsMemberOfRole"], direction: OUTGOING, start: 0 }) { + relationships { + entity { + ... on DataHubRole { + urn + type + name + } + } + } + } } } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java index 518b5f28a5b99..ea86d2c0c9842 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java @@ -43,6 +43,7 @@ public enum DataHubUsageEventType { CREATE_RESET_CREDENTIALS_LINK_EVENT("CreateResetCredentialsLinkEvent"), DELETE_ENTITY_EVENT("DeleteEntityEvent"), SELECT_USER_ROLE_EVENT("SelectUserRoleEvent"), + SELECT_GROUP_ROLE_EVENT("SelectGroupRoleEvent"), BATCH_SELECT_USER_ROLE_EVENT("BatchSelectUserRoleEvent"), CREATE_POLICY_EVENT("CreatePolicyEvent"), UPDATE_POLICY_EVENT("UpdatePolicyEvent"), diff --git a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js index 978a245c3d9e3..8421bd288edf0 100644 --- a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js +++ b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js @@ -106,10 +106,19 @@ describe("create and manage group", () => { cy.waitTextVisible(username); }); + it("assign role to group ", () => { + cy.loginWithCredentials(); + cy.visit("/settings/identities/groups"); + cy.get(`[href="/group/urn:li:corpGroup:${test_id}"]`).next().click() + cy.get('.ant-select-item-option').contains('Admin').click() + cy.get('button.ant-btn-primary').contains('OK').click(); + cy.get(`[href="/group/urn:li:corpGroup:${test_id}"]`).waitTextVisible('Admin'); + }); + it("remove group", () => { cy.loginWithCredentials(); cy.visit("/settings/identities/groups"); - cy.get(`[href="/group/urn:li:corpGroup:${test_id}"]`).next().click(); + cy.get(`[href="/group/urn:li:corpGroup:${test_id}"]`).openThreeDotDropdown() cy.clickOptionWithText("Delete"); cy.clickOptionWithText("Yes"); cy.waitTextVisible("Deleted Group!"); From 35c4df1e9bb71d348ded50df1edf502bf55099fe Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 23 Jan 2024 08:28:08 -0800 Subject: [PATCH 420/792] feat(ingest/looker): add browse paths for charts (#9639) Co-authored-by: Tamas Nemeth --- metadata-ingestion/src/datahub/emitter/mcp.py | 4 ++-- .../ingestion/source/looker/looker_source.py | 8 +++++++- .../utilities/advanced_thread_executor.py | 16 +++++++++++++++- 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/emitter/mcp.py b/metadata-ingestion/src/datahub/emitter/mcp.py index d6aa695665e4e..47717f3c1ed19 100644 --- a/metadata-ingestion/src/datahub/emitter/mcp.py +++ b/metadata-ingestion/src/datahub/emitter/mcp.py @@ -1,6 +1,6 @@ import dataclasses import json -from typing import TYPE_CHECKING, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, List, Optional, Sequence, Tuple, Union from datahub.emitter.aspect import ASPECT_MAP, JSON_CONTENT_TYPE, TIMESERIES_ASPECT_MAP from datahub.emitter.serialization_helper import post_json_transform, pre_json_transform @@ -100,7 +100,7 @@ def __post_init__(self) -> None: @classmethod def construct_many( - cls, entityUrn: str, aspects: List[Optional[_Aspect]] + cls, entityUrn: str, aspects: Sequence[Optional[_Aspect]] ) -> List["MetadataChangeProposalWrapper"]: return [cls(entityUrn=entityUrn, aspect=aspect) for aspect in aspects if aspect] diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index 542bf64eb2f49..ab9887c900571 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -608,8 +608,14 @@ def _make_chart_metadata_events( else "" }, ) - chart_snapshot.aspects.append(chart_info) + + if dashboard and dashboard.folder_path is not None: + browse_path = BrowsePathsClass( + paths=[f"/looker/{dashboard.folder_path}/{dashboard.title}"] + ) + chart_snapshot.aspects.append(browse_path) + if dashboard is not None: ownership = self.get_ownership(dashboard) if ownership is not None: diff --git a/metadata-ingestion/src/datahub/utilities/advanced_thread_executor.py b/metadata-ingestion/src/datahub/utilities/advanced_thread_executor.py index 6ee47f028b7eb..2afb6088072fe 100644 --- a/metadata-ingestion/src/datahub/utilities/advanced_thread_executor.py +++ b/metadata-ingestion/src/datahub/utilities/advanced_thread_executor.py @@ -2,6 +2,7 @@ import collections import concurrent.futures +import logging import time from concurrent.futures import Future, ThreadPoolExecutor from threading import BoundedSemaphore @@ -20,6 +21,7 @@ from datahub.ingestion.api.closeable import Closeable +logger = logging.getLogger(__name__) _R = TypeVar("_R") _PARTITION_EXECUTOR_FLUSH_SLEEP_INTERVAL = 0.05 @@ -102,7 +104,19 @@ def _system_done_callback(future: Future) -> None: fn, args, kwargs, user_done_callback = self._pending_by_key[ key ].popleft() - self._submit_nowait(key, fn, args, kwargs, user_done_callback) + + try: + self._submit_nowait(key, fn, args, kwargs, user_done_callback) + except RuntimeError as e: + if self._executor._shutdown: + # If we're in shutdown mode, then we can't submit any more requests. + # That means we'll need to drop requests on the floor, which is to + # be expected in shutdown mode. + # The only reason we'd normally be in shutdown here is during + # Python exit (e.g. KeyboardInterrupt), so this is reasonable. + logger.debug("Dropping request due to shutdown") + else: + raise e else: # If there are no pending requests for this key, mark the key From 4aa26c2a2f071eaff388ec745ad0ad7d7a4853d9 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Wed, 24 Jan 2024 00:50:23 +0530 Subject: [PATCH 421/792] =?UTF-8?q?fix(ui):=20Fix=20error=20message=20when?= =?UTF-8?q?=20attempting=20to=20delete=20home=20page=20posts=20with?= =?UTF-8?q?=E2=80=A6=20(#9667)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/app/settings/posts/CreatePostModal.tsx | 11 +++++++---- .../src/app/settings/posts/PostItemMenu.tsx | 10 +++++++--- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/datahub-web-react/src/app/settings/posts/CreatePostModal.tsx b/datahub-web-react/src/app/settings/posts/CreatePostModal.tsx index 10c4ee880fe85..2a3e2204f2392 100644 --- a/datahub-web-react/src/app/settings/posts/CreatePostModal.tsx +++ b/datahub-web-react/src/app/settings/posts/CreatePostModal.tsx @@ -13,6 +13,7 @@ import { useEnterKeyListener } from '../../shared/useEnterKeyListener'; import { MediaType, PostContentType, PostType } from '../../../types.generated'; import { useCreatePostMutation, useUpdatePostMutation } from '../../../graphql/mutations.generated'; import { PostEntry } from './PostsListColumns'; +import handleGraphQLError from '../../shared/handleGraphQLError'; type Props = { editData: PostEntry; @@ -84,10 +85,12 @@ export default function CreatePostModal({ onClose, onCreate, editData, onEdit }: form.resetFields(); } }) - .catch((e) => { - message.destroy(); - message.error({ content: 'Failed to create Post! An unknown error occured.', duration: 3 }); - console.error('Failed to create Post:', e.message); + .catch((error) => { + handleGraphQLError({ + error, + defaultMessage: 'Failed to create Post! An unexpected error occurred', + permissionMessage: 'Unauthorized to create Post. Please contact your DataHub administrator.', + }); }); onClose(); }; diff --git a/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx b/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx index 3708c04ab1ad3..10e2996c36f69 100644 --- a/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx +++ b/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx @@ -3,6 +3,7 @@ import { DeleteOutlined, EditOutlined } from '@ant-design/icons'; import { Dropdown, Menu, message, Modal } from 'antd'; import { MenuIcon } from '../../entity/shared/EntityDropdown/EntityDropdown'; import { useDeletePostMutation } from '../../../graphql/post.generated'; +import handleGraphQLError from '../../shared/handleGraphQLError'; type Props = { urn: string; @@ -26,9 +27,12 @@ export default function PostItemMenu({ title, urn, onDelete, onEdit }: Props) { onDelete?.(); } }) - .catch(() => { - message.destroy(); - message.error({ content: `Failed to delete Post!: An unknown error occurred.`, duration: 3 }); + .catch((error) => { + handleGraphQLError({ + error, + defaultMessage: 'Failed to delete Post! An unexpected error occurred', + permissionMessage: 'Unauthorized to delete Post. Please contact your DataHub administrator.', + }); }); }; From bf538671a2a49c3276ab5b6d9b04282432889c1a Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Tue, 23 Jan 2024 16:30:02 -0500 Subject: [PATCH 422/792] feat(ui) Support auto-render aspects on remaining entity types (#9696) --- datahub-web-react/src/Mocks.tsx | 16 +++++++++++++++- datahub-web-react/src/graphql/chart.graphql | 3 +++ datahub-web-react/src/graphql/container.graphql | 3 +++ datahub-web-react/src/graphql/dashboard.graphql | 3 +++ datahub-web-react/src/graphql/dataFlow.graphql | 3 +++ datahub-web-react/src/graphql/dataJob.graphql | 3 +++ .../src/graphql/dataProduct.graphql | 3 +++ datahub-web-react/src/graphql/dataset.graphql | 8 +------- datahub-web-react/src/graphql/domain.graphql | 3 +++ datahub-web-react/src/graphql/fragments.graphql | 10 ++++++++++ .../src/graphql/glossaryNode.graphql | 3 +++ .../src/graphql/glossaryTerm.graphql | 3 +++ datahub-web-react/src/graphql/group.graphql | 3 +++ datahub-web-react/src/graphql/mlFeature.graphql | 3 +++ .../src/graphql/mlFeatureTable.graphql | 3 +++ datahub-web-react/src/graphql/mlModel.graphql | 3 +++ .../src/graphql/mlModelGroup.graphql | 3 +++ .../src/graphql/mlPrimaryKey.graphql | 3 +++ datahub-web-react/src/graphql/tag.graphql | 3 +++ datahub-web-react/src/graphql/user.graphql | 3 +++ 20 files changed, 77 insertions(+), 8 deletions(-) diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index b32b296af38c5..03d6f4a624c3d 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -88,6 +88,7 @@ export const user1 = { editableInfo: null, properties: null, editableProperties: null, + autoRenderAspects: [], }; const user2 = { @@ -295,6 +296,7 @@ export const dataset1 = { statsSummary: null, embed: null, browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, + autoRenderAspects: [], }; export const dataset2 = { @@ -390,6 +392,7 @@ export const dataset2 = { statsSummary: null, embed: null, browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, + autoRenderAspects: [], }; export const dataset3 = { @@ -595,7 +598,7 @@ export const dataset3 = { viewProperties: null, autoRenderAspects: [ { - __typename: 'AutoRenderAspect', + __typename: 'RawAspect', aspectName: 'autoRenderAspect', payload: '{ "values": [{ "autoField1": "autoValue1", "autoField2": "autoValue2" }] }', renderSpec: { @@ -962,6 +965,7 @@ export const container1 = { externalUrl: null, __typename: 'ContainerProperties', }, + autoRenderAspects: [], __typename: 'Container', } as Container; @@ -976,6 +980,7 @@ export const container2 = { externalUrl: null, __typename: 'ContainerProperties', }, + autoRenderAspects: [], __typename: 'Container', } as Container; @@ -1023,6 +1028,7 @@ export const glossaryTerm1 = { }, parentNodes: null, deprecation: null, + autoRenderAspects: [], } as GlossaryTerm; const glossaryTerm2 = { @@ -1095,6 +1101,7 @@ const glossaryTerm2 = { __typename: 'EntityRelationshipsResult', }, parentNodes: null, + autoRenderAspects: [], __typename: 'GlossaryTerm', }; @@ -1161,6 +1168,7 @@ const glossaryTerm3 = { __typename: 'GlossaryRelatedTerms', }, deprecation: null, + autoRenderAspects: [], __typename: 'GlossaryTerm', } as GlossaryTerm; @@ -1257,6 +1265,7 @@ export const sampleTag = { description: 'sample tag description', colorHex: 'sample tag color', }, + autoRenderAspects: [], }; export const dataFlow1 = { @@ -1328,6 +1337,7 @@ export const dataFlow1 = { }, domain: null, deprecation: null, + autoRenderAspects: [], } as DataFlow; export const dataJob1 = { @@ -1414,6 +1424,7 @@ export const dataJob1 = { domain: null, status: null, deprecation: null, + autoRenderAspects: [], } as DataJob; export const dataJob2 = { @@ -1483,6 +1494,7 @@ export const dataJob2 = { upstream: null, downstream: null, deprecation: null, + autoRenderAspects: [], } as DataJob; export const dataJob3 = { @@ -1555,6 +1567,7 @@ export const dataJob3 = { downstream: null, status: null, deprecation: null, + autoRenderAspects: [], } as DataJob; export const mlModel = { @@ -1636,6 +1649,7 @@ export const mlModel = { downstream: null, status: null, deprecation: null, + autoRenderAspects: [], } as MlModel; export const dataset1FetchedEntity = { diff --git a/datahub-web-react/src/graphql/chart.graphql b/datahub-web-react/src/graphql/chart.graphql index a4b430720fa3d..da2dae3bd6d86 100644 --- a/datahub-web-react/src/graphql/chart.graphql +++ b/datahub-web-react/src/graphql/chart.graphql @@ -103,6 +103,9 @@ query getChart($urn: String!) { subTypes { typeNames } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/container.graphql b/datahub-web-react/src/graphql/container.graphql index 94d2f53ee30a5..e75b26b45aacd 100644 --- a/datahub-web-react/src/graphql/container.graphql +++ b/datahub-web-react/src/graphql/container.graphql @@ -54,5 +54,8 @@ query getContainer($urn: String!) { status { removed } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/dashboard.graphql b/datahub-web-react/src/graphql/dashboard.graphql index d77f6f5c8107f..2690de0c507de 100644 --- a/datahub-web-react/src/graphql/dashboard.graphql +++ b/datahub-web-react/src/graphql/dashboard.graphql @@ -7,6 +7,9 @@ query getDashboard($urn: String!) { datasets: relationships(input: { types: ["Consumes"], direction: OUTGOING, start: 0, count: 100 }) { ...fullRelationshipResults } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/dataFlow.graphql b/datahub-web-react/src/graphql/dataFlow.graphql index 122b35f7b5704..fccec29e082d6 100644 --- a/datahub-web-react/src/graphql/dataFlow.graphql +++ b/datahub-web-react/src/graphql/dataFlow.graphql @@ -102,6 +102,9 @@ query getDataFlow($urn: String!) { } } } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/dataJob.graphql b/datahub-web-react/src/graphql/dataJob.graphql index a41c242a71b8f..161ea859fc36a 100644 --- a/datahub-web-react/src/graphql/dataJob.graphql +++ b/datahub-web-react/src/graphql/dataJob.graphql @@ -6,6 +6,9 @@ query getDataJob($urn: String!) { start total } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/dataProduct.graphql b/datahub-web-react/src/graphql/dataProduct.graphql index 464ab7cc12164..4c44639e89d7d 100644 --- a/datahub-web-react/src/graphql/dataProduct.graphql +++ b/datahub-web-react/src/graphql/dataProduct.graphql @@ -1,6 +1,9 @@ query getDataProduct($urn: String!) { dataProduct(urn: $urn) { ...dataProductFields + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/dataset.graphql b/datahub-web-react/src/graphql/dataset.graphql index 658ce2b47c567..57c74e0c65d69 100644 --- a/datahub-web-react/src/graphql/dataset.graphql +++ b/datahub-web-react/src/graphql/dataset.graphql @@ -112,13 +112,7 @@ fragment nonSiblingDatasetFields on Dataset { } ...viewProperties autoRenderAspects: aspects(input: { autoRenderOnly: true }) { - aspectName - payload - renderSpec { - displayType - displayName - key - } + ...autoRenderAspectFields } status { removed diff --git a/datahub-web-react/src/graphql/domain.graphql b/datahub-web-react/src/graphql/domain.graphql index 170a5b5df476b..76c59ad0ed2ae 100644 --- a/datahub-web-react/src/graphql/domain.graphql +++ b/datahub-web-react/src/graphql/domain.graphql @@ -27,6 +27,9 @@ query getDomain($urn: String!) { } } } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } ...domainEntitiesFields } } diff --git a/datahub-web-react/src/graphql/fragments.graphql b/datahub-web-react/src/graphql/fragments.graphql index b77ef9d1ad29c..bb06ccb90a46d 100644 --- a/datahub-web-react/src/graphql/fragments.graphql +++ b/datahub-web-react/src/graphql/fragments.graphql @@ -1162,3 +1162,13 @@ fragment entityDisplayNameFields on Entity { instanceId } } + +fragment autoRenderAspectFields on RawAspect { + aspectName + payload + renderSpec { + displayType + displayName + key + } +} diff --git a/datahub-web-react/src/graphql/glossaryNode.graphql b/datahub-web-react/src/graphql/glossaryNode.graphql index 9cb01b98b3efb..4a531eb55248f 100644 --- a/datahub-web-react/src/graphql/glossaryNode.graphql +++ b/datahub-web-react/src/graphql/glossaryNode.graphql @@ -27,6 +27,9 @@ query getGlossaryNode($urn: String!) { canManageEntity canManageChildren } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } children: relationships( input: { types: ["IsPartOf"] diff --git a/datahub-web-react/src/graphql/glossaryTerm.graphql b/datahub-web-react/src/graphql/glossaryTerm.graphql index f2a311f50fe51..4eb0747e0aeba 100644 --- a/datahub-web-react/src/graphql/glossaryTerm.graphql +++ b/datahub-web-react/src/graphql/glossaryTerm.graphql @@ -87,6 +87,9 @@ query getGlossaryTerm($urn: String!, $start: Int, $count: Int) { privileges { canManageEntity } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/group.graphql b/datahub-web-react/src/graphql/group.graphql index 05df34adbf7e9..2b8db6483632d 100644 --- a/datahub-web-react/src/graphql/group.graphql +++ b/datahub-web-react/src/graphql/group.graphql @@ -24,6 +24,9 @@ query getGroup($urn: String!, $membersCount: Int!) { email slack } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } ownership { ...ownershipFields } diff --git a/datahub-web-react/src/graphql/mlFeature.graphql b/datahub-web-react/src/graphql/mlFeature.graphql index f9cd2b66d900e..9fcb871dc4f49 100644 --- a/datahub-web-react/src/graphql/mlFeature.graphql +++ b/datahub-web-react/src/graphql/mlFeature.graphql @@ -4,5 +4,8 @@ query getMLFeature($urn: String!) { featureTables: relationships(input: { types: ["Contains"], direction: INCOMING, start: 0, count: 100 }) { ...fullRelationshipResults } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/mlFeatureTable.graphql b/datahub-web-react/src/graphql/mlFeatureTable.graphql index 3c52dccf7672c..4bf3972e722b6 100644 --- a/datahub-web-react/src/graphql/mlFeatureTable.graphql +++ b/datahub-web-react/src/graphql/mlFeatureTable.graphql @@ -1,5 +1,8 @@ query getMLFeatureTable($urn: String!) { mlFeatureTable(urn: $urn) { ...nonRecursiveMLFeatureTable + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/mlModel.graphql b/datahub-web-react/src/graphql/mlModel.graphql index e5330480039f8..5375485a8a9f6 100644 --- a/datahub-web-react/src/graphql/mlModel.graphql +++ b/datahub-web-react/src/graphql/mlModel.graphql @@ -18,5 +18,8 @@ query getMLModel($urn: String!) { } } } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/mlModelGroup.graphql b/datahub-web-react/src/graphql/mlModelGroup.graphql index 12a1c04586198..57249d543bb86 100644 --- a/datahub-web-react/src/graphql/mlModelGroup.graphql +++ b/datahub-web-react/src/graphql/mlModelGroup.graphql @@ -21,5 +21,8 @@ query getMLModelGroup($urn: String!) { ) { ...fullRelationshipResults } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/mlPrimaryKey.graphql b/datahub-web-react/src/graphql/mlPrimaryKey.graphql index a70550a44a88d..2bfceb37ce16b 100644 --- a/datahub-web-react/src/graphql/mlPrimaryKey.graphql +++ b/datahub-web-react/src/graphql/mlPrimaryKey.graphql @@ -4,5 +4,8 @@ query getMLPrimaryKey($urn: String!) { featureTables: relationships(input: { types: ["KeyedBy"], direction: INCOMING, start: 0, count: 100 }) { ...fullRelationshipResults } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/tag.graphql b/datahub-web-react/src/graphql/tag.graphql index 37aaebc265032..031d923276bfe 100644 --- a/datahub-web-react/src/graphql/tag.graphql +++ b/datahub-web-react/src/graphql/tag.graphql @@ -11,6 +11,9 @@ query getTag($urn: String!) { ownership { ...ownershipFields } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } } } diff --git a/datahub-web-react/src/graphql/user.graphql b/datahub-web-react/src/graphql/user.graphql index 4757b8a7e28dd..48c0d7de8c63c 100644 --- a/datahub-web-react/src/graphql/user.graphql +++ b/datahub-web-react/src/graphql/user.graphql @@ -27,6 +27,9 @@ query getUser($urn: String!, $groupsCount: Int!) { globalTags { ...globalTagsFields } + autoRenderAspects: aspects(input: { autoRenderOnly: true }) { + ...autoRenderAspectFields + } groups: relationships( input: { types: ["IsMemberOfGroup", "IsMemberOfNativeGroup"] From 1b0ed540c13baee1f75b1a2429d1cd4bebc87030 Mon Sep 17 00:00:00 2001 From: Davi Arnaut Date: Tue, 23 Jan 2024 14:49:39 -0800 Subject: [PATCH 423/792] fix(consumer): wait for internal schema registry (#9699) --- docker/datahub-mae-consumer/start.sh | 3 +++ docker/datahub-mce-consumer/start.sh | 8 +++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docker/datahub-mae-consumer/start.sh b/docker/datahub-mae-consumer/start.sh index 2af7ce6855d1c..f839d3646bdc6 100755 --- a/docker/datahub-mae-consumer/start.sh +++ b/docker/datahub-mae-consumer/start.sh @@ -33,6 +33,9 @@ fi if [[ ${GRAPH_SERVICE_IMPL:-} != elasticsearch ]] && [[ ${SKIP_NEO4J_CHECK:-false} != true ]]; then dockerize_args+=("-wait" "$NEO4J_HOST") fi +if [[ "${KAFKA_SCHEMAREGISTRY_URL:-}" && ${SKIP_SCHEMA_REGISTRY_CHECK:-false} != true ]]; then + dockerize_args+=("-wait" "$KAFKA_SCHEMAREGISTRY_URL") +fi JAVA_TOOL_OPTIONS="${JDK_JAVA_OPTIONS:-}${JAVA_OPTS:+ $JAVA_OPTS}${JMX_OPTS:+ $JMX_OPTS}" if [[ ${ENABLE_OTEL:-false} == true ]]; then diff --git a/docker/datahub-mce-consumer/start.sh b/docker/datahub-mce-consumer/start.sh index ef183d41856aa..a00127a841188 100755 --- a/docker/datahub-mce-consumer/start.sh +++ b/docker/datahub-mce-consumer/start.sh @@ -5,6 +5,11 @@ if [[ $SKIP_KAFKA_CHECK != true ]]; then WAIT_FOR_KAFKA=" -wait tcp://$(echo $KAFKA_BOOTSTRAP_SERVER | sed 's/,/ -wait tcp:\/\//g') " fi +WAIT_FOR_SCHEMA_REGISTRY="" +if [[ "$KAFKA_SCHEMAREGISTRY_URL" && $SKIP_SCHEMA_REGISTRY_CHECK != true ]]; then + WAIT_FOR_SCHEMA_REGISTRY="-wait $KAFKA_SCHEMAREGISTRY_URL" +fi + OTEL_AGENT="" if [[ $ENABLE_OTEL == true ]]; then OTEL_AGENT="-javaagent:opentelemetry-javaagent.jar " @@ -17,5 +22,6 @@ fi exec dockerize \ $WAIT_FOR_KAFKA \ + $WAIT_FOR_SCHEMA_REGISTRY \ -timeout 240s \ - java $JAVA_OPTS $JMX_OPTS $OTEL_AGENT $PROMETHEUS_AGENT -jar /datahub/datahub-mce-consumer/bin/mce-consumer-job.jar \ No newline at end of file + java $JAVA_OPTS $JMX_OPTS $OTEL_AGENT $PROMETHEUS_AGENT -jar /datahub/datahub-mce-consumer/bin/mce-consumer-job.jar From 264278861bf7a23e1938d5e8cf16ec78260e9e0a Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 23 Jan 2024 17:49:23 -0600 Subject: [PATCH 424/792] feat(build): support jdk8 spark lineage (#9697) --- .github/workflows/build-and-test.yml | 4 + build.gradle | 105 +++++++++++++++--- .../metadata/aspect/batch/AspectsBatch.java | 11 +- .../aspect/patch/GenericJsonPatch.java | 3 +- .../aspect/plugins/PluginFactory.java | 11 +- .../plugins/config/PluginConfiguration.java | 9 +- .../plugins/validation/AspectRetriever.java | 6 +- .../models/registry/ConfigEntityRegistry.java | 9 +- .../models/registry/PatchEntityRegistry.java | 6 +- .../config/EntityRegistryLoadResult.java | 9 +- ingestion-scheduler/build.gradle | 5 +- metadata-events/mxe-avro/build.gradle | 8 +- metadata-events/mxe-registration/build.gradle | 5 +- metadata-events/mxe-schemas/build.gradle | 6 +- metadata-events/mxe-utils-avro/build.gradle | 1 + .../java/datahub-client/build.gradle | 45 ++------ metadata-io/build.gradle | 10 +- .../request/AggregationQueryBuilder.java | 17 +-- .../query/request/SearchRequestHandler.java | 18 +-- .../service/UpdateIndicesService.java | 3 +- .../ElasticSearchTimeseriesAspectService.java | 2 +- metadata-jobs/common/build.gradle | 2 +- metadata-models/build.gradle | 8 +- .../main/resources/JavaSpring/model.mustache | 6 +- metadata-service/configuration/build.gradle | 7 +- .../restli-servlet-impl/build.gradle | 2 +- .../EntityRecommendationSource.java | 3 +- .../metadata/service/RollbackService.java | 4 +- metadata-utils/build.gradle | 1 + 29 files changed, 205 insertions(+), 121 deletions(-) diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 060d345a6b7d9..6b7f2b5035c25 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -83,6 +83,10 @@ jobs: ./gradlew :datahub-frontend:build :datahub-web-react:build --parallel env: NODE_OPTIONS: "--max-old-space-size=3072" + - name: Gradle compile (jdk8) for legacy Spark + if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} + run: | + ./gradlew -PjavaClassVersionDefault=8 :metadata-integration:java:spark-lineage:compileJava - uses: actions/upload-artifact@v3 if: always() with: diff --git a/build.gradle b/build.gradle index 27455f8592e6f..ba61d97f0ed6e 100644 --- a/build.gradle +++ b/build.gradle @@ -1,6 +1,32 @@ buildscript { - ext.jdkVersion = 17 - ext.javaClassVersion = 11 + ext.jdkVersionDefault = 17 + ext.javaClassVersionDefault = 11 + + ext.jdkVersion = { p -> + // If Spring 6 is present, hard dependency on jdk17 + if (p.configurations.any { it.getDependencies().any{ + (it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6.")) + || (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test")) + }}) { + return 17 + } else { + // otherwise we can use the preferred default which can be overridden with a property: -PjdkVersionDefault + return p.hasProperty('jdkVersionDefault') ? Integer.valueOf((String) p.getProperty('jdkVersionDefault')) : ext.jdkVersionDefault + } + } + + ext.javaClassVersion = { p -> + // If Spring 6 is present, hard dependency on jdk17 + if (p.configurations.any { it.getDependencies().any{ + (it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6.")) + || (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test")) + }}) { + return 17 + } else { + // otherwise we can use the preferred default which can be overridden with a property: -PjavaClassVersionDefault + return p.hasProperty('javaClassVersionDefault') ? Integer.valueOf((String) p.getProperty('javaClassVersionDefault')) : ext.javaClassVersionDefault + } + } ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md @@ -217,6 +243,7 @@ project.ext.externalDependency = [ 'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion", 'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.2.15', 'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46', + 'springBootAutoconfigureJdk11': 'org.springframework.boot:spring-boot-autoconfigure:2.7.18', 'testng': 'org.testng:testng:7.8.0', 'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion, 'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion, @@ -252,23 +279,27 @@ allprojects { } } - if (project.plugins.hasPlugin('java') + /** + * If making changes to this section also see the sections for pegasus below + * which use project.plugins.hasPlugin('pegasus') + **/ + if (!project.plugins.hasPlugin('pegasus') && (project.plugins.hasPlugin('java') || project.plugins.hasPlugin('java-library') - || project.plugins.hasPlugin('application') - || project.plugins.hasPlugin('pegasus')) { + || project.plugins.hasPlugin('application'))) { java { toolchain { - languageVersion = JavaLanguageVersion.of(jdkVersion) + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) } } compileJava { - options.release = javaClassVersion + options.release = javaClassVersion(project) } + tasks.withType(JavaCompile).configureEach { javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(jdkVersion) + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) } // Puts parameter names into compiled class files, necessary for Spring 6 options.compilerArgs.add("-parameters") @@ -276,24 +307,28 @@ allprojects { tasks.withType(JavaExec).configureEach { javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(jdkVersion) + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) } } + } + + // not duplicated, need to set this outside and inside afterEvaluate + afterEvaluate { + /** + * If making changes to this section also see the sections for pegasus below + * which use project.plugins.hasPlugin('pegasus') + **/ + if (!project.plugins.hasPlugin('pegasus') && (project.plugins.hasPlugin('java') + || project.plugins.hasPlugin('java-library') + || project.plugins.hasPlugin('application'))) { - // not duplicated, need to set this outside and inside afterEvaluate - afterEvaluate { compileJava { - options.release = javaClassVersion - } - tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(jdkVersion) - } + options.release = javaClassVersion(project) } tasks.withType(JavaExec).configureEach { javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(jdkVersion) + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) } } } @@ -368,6 +403,30 @@ subprojects { dataTemplateCompile externalDependency.annotationApi // support > jdk8 restClientCompile spec.product.pegasus.restliClient } + + java { + toolchain { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + // Puts parameter names into compiled class files, necessary for Spring 6 + options.compilerArgs.add("-parameters") + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } } afterEvaluate { @@ -394,6 +453,16 @@ subprojects { dataTemplateCompile externalDependency.annotationApi // support > jdk8 restClientCompile spec.product.pegasus.restliClient } + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java index 453eddd3ae56c..806fd47c721ec 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java @@ -50,22 +50,21 @@ default boolean containsDuplicateAspects() { default Map> getUrnAspectsMap() { return getItems().stream() - .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) + .map(aspect -> Pair.of(aspect.getUrn().toString(), aspect.getAspectName())) .collect( Collectors.groupingBy( - Map.Entry::getKey, Collectors.mapping(Map.Entry::getValue, Collectors.toSet()))); + Pair::getKey, Collectors.mapping(Pair::getValue, Collectors.toSet()))); } default Map> getNewUrnAspectsMap( Map> existingMap, List items) { Map> newItemsMap = items.stream() - .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) + .map(aspect -> Pair.of(aspect.getUrn().toString(), aspect.getAspectName())) .collect( Collectors.groupingBy( - Map.Entry::getKey, - Collectors.mapping( - Map.Entry::getValue, Collectors.toCollection(HashSet::new)))); + Pair::getKey, + Collectors.mapping(Pair::getValue, Collectors.toCollection(HashSet::new)))); return newItemsMap.entrySet().stream() .filter( diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java index c73ccbb2d93e3..484603b9c1f85 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java @@ -4,6 +4,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.github.fge.jsonpatch.JsonPatch; import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; @@ -24,7 +25,7 @@ public class GenericJsonPatch { @Nonnull public Map> getArrayPrimaryKeys() { - return arrayPrimaryKeys == null ? Map.of() : arrayPrimaryKeys; + return arrayPrimaryKeys == null ? Collections.emptyMap() : arrayPrimaryKeys; } @JsonIgnore diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java index aec0a4cfa0706..5f35cb0447e48 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java @@ -12,6 +12,7 @@ import io.github.classgraph.ClassInfo; import io.github.classgraph.MethodInfo; import io.github.classgraph.ScanResult; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -40,7 +41,7 @@ public static PluginFactory withCustomClasspath( } public static PluginFactory withConfig(@Nullable PluginConfiguration pluginConfiguration) { - return PluginFactory.withCustomClasspath(pluginConfiguration, List.of()); + return PluginFactory.withCustomClasspath(pluginConfiguration, Collections.emptyList()); } public static PluginFactory empty() { @@ -180,7 +181,7 @@ public EntityRegistryLoadResult.PluginLoadResult getPluginLoadResult() { private List buildAspectPayloadValidators( @Nullable PluginConfiguration pluginConfiguration) { return pluginConfiguration == null - ? List.of() + ? Collections.emptyList() : applyDisable( build( AspectPayloadValidator.class, @@ -190,7 +191,7 @@ private List buildAspectPayloadValidators( private List buildMutationHooks(@Nullable PluginConfiguration pluginConfiguration) { return pluginConfiguration == null - ? List.of() + ? Collections.emptyList() : applyDisable( build(MutationHook.class, pluginConfiguration.getMutationHooks(), HOOK_PACKAGES)); } @@ -198,7 +199,7 @@ private List buildMutationHooks(@Nullable PluginConfiguration plug private List buildMCLSideEffects( @Nullable PluginConfiguration pluginConfiguration) { return pluginConfiguration == null - ? List.of() + ? Collections.emptyList() : applyDisable( build(MCLSideEffect.class, pluginConfiguration.getMclSideEffects(), HOOK_PACKAGES)); } @@ -206,7 +207,7 @@ private List buildMCLSideEffects( private List buildMCPSideEffects( @Nullable PluginConfiguration pluginConfiguration) { return pluginConfiguration == null - ? List.of() + ? Collections.emptyList() : applyDisable( build(MCPSideEffect.class, pluginConfiguration.getMcpSideEffects(), HOOK_PACKAGES)); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config/PluginConfiguration.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config/PluginConfiguration.java index a4d0678c130f3..a2caab7be5f80 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config/PluginConfiguration.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config/PluginConfiguration.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.aspect.plugins.config; +import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -11,10 +12,10 @@ @AllArgsConstructor @NoArgsConstructor public class PluginConfiguration { - private List aspectPayloadValidators = List.of(); - private List mutationHooks = List.of(); - private List mclSideEffects = List.of(); - private List mcpSideEffects = List.of(); + private List aspectPayloadValidators = Collections.emptyList(); + private List mutationHooks = Collections.emptyList(); + private List mclSideEffects = Collections.emptyList(); + private List mcpSideEffects = Collections.emptyList(); public static PluginConfiguration EMPTY = new PluginConfiguration(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java index 00a20b3131c2a..11cd2352025ef 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java @@ -1,10 +1,12 @@ package com.linkedin.metadata.aspect.plugins.validation; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.entity.Aspect; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; import java.net.URISyntaxException; +import java.util.Collections; import java.util.Map; import java.util.Set; import javax.annotation.Nonnull; @@ -15,8 +17,8 @@ public interface AspectRetriever { @Nullable default Aspect getLatestAspectObject(@Nonnull final Urn urn, @Nonnull final String aspectName) throws RemoteInvocationException, URISyntaxException { - return getLatestAspectObjects(Set.of(urn), Set.of(aspectName)) - .getOrDefault(urn, Map.of()) + return getLatestAspectObjects(ImmutableSet.of(urn), ImmutableSet.of(aspectName)) + .getOrDefault(urn, Collections.emptyMap()) .get(aspectName); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java index bd9a6b6c9e589..41043995a3b77 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java @@ -28,11 +28,13 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -67,7 +69,10 @@ public class ConfigEntityRegistry implements EntityRegistry { public ConfigEntityRegistry(Pair configFileClassPathPair) throws IOException { this( DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), - DataSchemaFactory.getClassLoader(configFileClassPathPair.getSecond()).stream().toList(), + DataSchemaFactory.getClassLoader(configFileClassPathPair.getSecond()) + .map(Stream::of) + .orElse(Stream.empty()) + .collect(Collectors.toList()), configFileClassPathPair.getFirst()); } @@ -112,7 +117,7 @@ private static Pair getFileAndClassPath(String entityRegistryRoot) } public ConfigEntityRegistry(InputStream configFileInputStream) { - this(DataSchemaFactory.getInstance(), List.of(), configFileInputStream); + this(DataSchemaFactory.getInstance(), Collections.emptyList(), configFileInputStream); } public ConfigEntityRegistry( diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java index 35bfe935423f0..b82b905c50004 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java @@ -32,6 +32,7 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -93,7 +94,10 @@ public PatchEntityRegistry( throws IOException, EntityRegistryException { this( DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), - DataSchemaFactory.getClassLoader(configFileClassPathPair.getSecond()).stream().toList(), + DataSchemaFactory.getClassLoader(configFileClassPathPair.getSecond()) + .map(Stream::of) + .orElse(Stream.empty()) + .collect(Collectors.toList()), configFileClassPathPair.getFirst(), registryName, registryVersion); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java index 076387909326b..12a29a7e1757a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.models.registry.config; +import java.util.Collections; import java.util.Set; import lombok.Builder; import lombok.Data; @@ -23,9 +24,9 @@ public static class PluginLoadResult { private int mcpSideEffectCount; private int mclSideEffectCount; - @Builder.Default private Set validatorClasses = Set.of(); - @Builder.Default private Set mutationHookClasses = Set.of(); - @Builder.Default private Set mcpSideEffectClasses = Set.of(); - @Builder.Default private Set mclSideEffectClasses = Set.of(); + @Builder.Default private Set validatorClasses = Collections.emptySet(); + @Builder.Default private Set mutationHookClasses = Collections.emptySet(); + @Builder.Default private Set mcpSideEffectClasses = Collections.emptySet(); + @Builder.Default private Set mclSideEffectClasses = Collections.emptySet(); } } diff --git a/ingestion-scheduler/build.gradle b/ingestion-scheduler/build.gradle index dc9887406b8b4..9505ec57aa858 100644 --- a/ingestion-scheduler/build.gradle +++ b/ingestion-scheduler/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(path: ':metadata-models') @@ -7,6 +9,7 @@ dependencies { implementation project(':metadata-service:configuration') implementation externalDependency.slf4jApi + implementation externalDependency.springContext compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok diff --git a/metadata-events/mxe-avro/build.gradle b/metadata-events/mxe-avro/build.gradle index 3aebc6bb1004d..58e82aff464d5 100644 --- a/metadata-events/mxe-avro/build.gradle +++ b/metadata-events/mxe-avro/build.gradle @@ -1,10 +1,12 @@ +plugins { + id 'java-library' + id 'io.acryl.gradle.plugin.avro' +} + configurations { avsc } -apply plugin: 'io.acryl.gradle.plugin.avro' -apply plugin: 'java-library' - dependencies { api externalDependency.avro implementation(externalDependency.avroCompiler) { diff --git a/metadata-events/mxe-registration/build.gradle b/metadata-events/mxe-registration/build.gradle index 2842dd935c7ee..d4b4d446996fa 100644 --- a/metadata-events/mxe-registration/build.gradle +++ b/metadata-events/mxe-registration/build.gradle @@ -1,4 +1,7 @@ -apply plugin: 'java' +plugins { + id 'java' + id 'pegasus' +} configurations { avroOriginal diff --git a/metadata-events/mxe-schemas/build.gradle b/metadata-events/mxe-schemas/build.gradle index 8dc8b71bd1cd8..ab0ea8b649e9d 100644 --- a/metadata-events/mxe-schemas/build.gradle +++ b/metadata-events/mxe-schemas/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'java-library' -apply plugin: 'pegasus' +plugins { + id 'java-library' + id 'pegasus' +} dependencies { dataModel project(path: ':li-utils', configuration: 'dataTemplate') diff --git a/metadata-events/mxe-utils-avro/build.gradle b/metadata-events/mxe-utils-avro/build.gradle index 98bfb9127b209..860ced6af2581 100644 --- a/metadata-events/mxe-utils-avro/build.gradle +++ b/metadata-events/mxe-utils-avro/build.gradle @@ -1,5 +1,6 @@ plugins { id 'java-library' + id 'pegasus' } dependencies { diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index 8e05b7ef8f5d6..873943fd43781 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -1,13 +1,13 @@ plugins { id("com.palantir.git-version") apply false + id 'java' + id 'com.github.johnrengelman.shadow' + id 'jacoco' + id 'signing' + id 'io.codearte.nexus-staging' + id 'maven-publish' } -apply plugin: 'java' -apply plugin: 'com.github.johnrengelman.shadow' -apply plugin: 'jacoco' -apply plugin: 'signing' -apply plugin: 'io.codearte.nexus-staging' -apply plugin: 'maven-publish' -apply plugin: 'org.hidetake.swagger.generator' + apply from: "../versioning.gradle" import org.apache.tools.ant.filters.ReplaceTokens @@ -28,10 +28,7 @@ dependencies { compileOnly externalDependency.httpAsyncClient implementation externalDependency.jacksonDataBind - implementation externalDependency.javaxValidation runtimeOnly externalDependency.jna - implementation externalDependency.springContext - implementation externalDependency.swaggerAnnotations implementation externalDependency.slf4jApi compileOnly externalDependency.lombok @@ -45,8 +42,6 @@ dependencies { testImplementation externalDependency.testContainers testImplementation externalDependency.httpAsyncClient testRuntimeOnly externalDependency.logbackClassic - - swaggerCodegen externalDependency.swaggerCli } task copyAvroSchemas { @@ -80,7 +75,7 @@ shadowJar { // preventing java multi-release JAR leakage // https://github.com/johnrengelman/shadow/issues/729 exclude('module-info.class', 'META-INF/versions/**', - '**/LICENSE', '**/LICENSE*.txt', '**/NOTICE', '**/NOTICE.txt', 'licenses/**', 'log4j2.xml', 'log4j.xml') + '**/LICENSE', '**/LICENSE*.txt', '**/NOTICE', '**/NOTICE.txt', 'licenses/**', 'log4j2.*', 'log4j.*') mergeServiceFiles() // we relocate namespaces manually, because we want to know exactly which libs we are exposing and why // we can move to automatic relocation using ConfigureShadowRelocation after we get to a good place on these first @@ -209,28 +204,4 @@ nexusStaging { //required only for projects registered in Sonatype after 2021-02-24 username = System.getenv("NEXUS_USERNAME") password = System.getenv("NEXUS_PASSWORD") -} - -tasks.register('generateOpenApiPojos', GenerateSwaggerCode) { - it.setInputFile(file("${project(':metadata-models').projectDir}/src/generatedJsonSchema/combined/open-api.yaml")) - it.setOutputDir(file("$projectDir/generated")) - it.setLanguage("spring") - it.setComponents(['models']) - it.setTemplateDir(file("$projectDir/src/main/resources/JavaSpring")) - it.setAdditionalProperties(["group-id" : "io.datahubproject", - "dateLibrary" : "java8", - "java8" : "true", - "modelPropertyNaming": "original", - "modelPackage" : "io.datahubproject.openapi.generated"] as Map) - - dependsOn ':metadata-models:generateJsonSchema' -} - -compileJava.dependsOn generateOpenApiPojos -processResources.dependsOn generateOpenApiPojos -sourceSets.main.java.srcDir "${generateOpenApiPojos.outputDir}/src/main/java" -sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" - -clean { - project.delete("$projectDir/generated") } \ No newline at end of file diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index 568b99acdf894..f96517d93fca6 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'java-library' -apply plugin: 'org.hidetake.swagger.generator' +plugins { + id 'java-library' + id 'pegasus' +} configurations { enhance @@ -46,8 +48,8 @@ dependencies { implementation externalDependency.ebeanDdl implementation externalDependency.opentelemetryAnnotations implementation externalDependency.resilience4j - api externalDependency.springContext - implementation externalDependency.swaggerAnnotations + // Newer Spring libraries require JDK17 classes, allow for JDK11 + compileOnly externalDependency.springBootAutoconfigureJdk11 implementation(externalDependency.mixpanel) { exclude group: 'org.json', module: 'json' } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java index 0f22b75b69f10..bdc0332b040df 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java @@ -106,13 +106,16 @@ private AggregationBuilder facetToAggregationBuilder(final String inputFacet) { if (facet.contains(AGGREGATION_SPECIAL_TYPE_DELIMITER)) { List specialTypeFields = List.of(facet.split(AGGREGATION_SPECIAL_TYPE_DELIMITER)); switch (specialTypeFields.get(0)) { - case MISSING_SPECIAL_TYPE -> aggBuilder = - INDEX_VIRTUAL_FIELD.equalsIgnoreCase(specialTypeFields.get(1)) - ? AggregationBuilders.missing(inputFacet).field(getAggregationField("_index")) - : AggregationBuilders.missing(inputFacet) - .field(getAggregationField(specialTypeFields.get(1))); - default -> throw new UnsupportedOperationException( - "Unknown special type: " + specialTypeFields.get(0)); + case MISSING_SPECIAL_TYPE: + aggBuilder = + INDEX_VIRTUAL_FIELD.equalsIgnoreCase(specialTypeFields.get(1)) + ? AggregationBuilders.missing(inputFacet).field(getAggregationField("_index")) + : AggregationBuilders.missing(inputFacet) + .field(getAggregationField(specialTypeFields.get(1))); + break; + default: + throw new UnsupportedOperationException( + "Unknown special type: " + specialTypeFields.get(0)); } } else { aggBuilder = diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 05fa6f45fcb30..c5a5ade216bf7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -648,10 +648,12 @@ public static Map extractAggregationsFromResponse( if (aggregation == null) { return Collections.emptyMap(); } - if (aggregation instanceof ParsedTerms terms) { - return extractTermAggregations(terms, aggregationName.equals("_entityType")); - } else if (aggregation instanceof ParsedMissing missing) { - return Collections.singletonMap(missing.getName(), missing.getDocCount()); + if (aggregation instanceof ParsedTerms) { + return extractTermAggregations( + (ParsedTerms) aggregation, aggregationName.equals("_entityType")); + } else if (aggregation instanceof ParsedMissing) { + return Collections.singletonMap( + aggregation.getName(), ((ParsedMissing) aggregation).getDocCount()); } throw new UnsupportedOperationException( "Unsupported aggregation type: " + aggregation.getClass().getName()); @@ -669,10 +671,10 @@ private static Map recursivelyAddNestedSubAggs(@Nullable Aggregati if (aggs != null) { for (Map.Entry entry : aggs.getAsMap().entrySet()) { - if (entry.getValue() instanceof ParsedTerms terms) { - recurseTermsAgg(terms, aggResult, false); - } else if (entry.getValue() instanceof ParsedMissing missing) { - recurseMissingAgg(missing, aggResult); + if (entry.getValue() instanceof ParsedTerms) { + recurseTermsAgg((ParsedTerms) entry.getValue(), aggResult, false); + } else if (entry.getValue() instanceof ParsedMissing) { + recurseMissingAgg((ParsedMissing) entry.getValue(), aggResult); } else { throw new UnsupportedOperationException( "Unsupported aggregation type: " + entry.getValue().getClass().getName()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index 1f39a3947c47a..ee2d794471f6b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -129,7 +129,8 @@ public void handleChangeEvent(@Nonnull final MetadataChangeLog event) { .stream() .flatMap(mclSideEffect -> mclSideEffect.apply(List.of(batch), aspectRetriever)); - for (MCLBatchItem mclBatchItem : Stream.concat(Stream.of(batch), sideEffects).toList()) { + for (MCLBatchItem mclBatchItem : + Stream.concat(Stream.of(batch), sideEffects).collect(Collectors.toList())) { MetadataChangeLog hookEvent = mclBatchItem.getMetadataChangeLog(); if (UPDATE_CHANGE_TYPES.contains(hookEvent.getChangeType())) { handleUpdateChangeEvent(mclBatchItem); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index 71ffd603c999f..a2b36b7d8ddb8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -526,7 +526,7 @@ public TimeseriesScrollResult scrollAspects( List> resultPairs = Arrays.stream(response.getHits().getHits()) .map(ElasticSearchTimeseriesAspectService::toEnvAspectGenericDocument) - .toList(); + .collect(Collectors.toList()); return TimeseriesScrollResult.builder() .numResults(totalCount) diff --git a/metadata-jobs/common/build.gradle b/metadata-jobs/common/build.gradle index bdc3b7a44a98a..b0a3a6827b729 100644 --- a/metadata-jobs/common/build.gradle +++ b/metadata-jobs/common/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'java' + id 'java-library' } dependencies { diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index 86f404adb7fef..179e1eac177ac 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -18,10 +18,14 @@ dependencies { api project(path: ':li-utils', configuration: "dataTemplate") dataModel project(':li-utils') + // Newer Spring libraries require JDK17 classes, allow for JDK11 + compileOnly externalDependency.springBootAutoconfigureJdk11 + compileOnly externalDependency.annotationApi + compileOnly externalDependency.javaxValidation + compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - compileOnly externalDependency.swaggerAnnotations - compileOnly externalDependency.springBootStarterValidation + api externalDependency.swaggerAnnotations compileOnly externalDependency.jacksonCore compileOnly externalDependency.jacksonDataBind diff --git a/metadata-models/src/main/resources/JavaSpring/model.mustache b/metadata-models/src/main/resources/JavaSpring/model.mustache index 72da42612777c..a048f249a6b3d 100644 --- a/metadata-models/src/main/resources/JavaSpring/model.mustache +++ b/metadata-models/src/main/resources/JavaSpring/model.mustache @@ -9,9 +9,9 @@ import java.io.Serializable; {{/serializableModel}} {{#useBeanValidation}} import org.springframework.validation.annotation.Validated; -import jakarta.validation.Valid; +import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; -import jakarta.validation.constraints.*; +import javax.validation.constraints.*; {{/useBeanValidation}} {{#jackson}} {{#withXml}} @@ -20,7 +20,7 @@ import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; {{/withXml}} {{/jackson}} {{#withXml}} -import jakarta.xml.bind.annotation.*; +import javax.xml.bind.annotation.*; {{/withXml}} {{/x-is-composed-model}} diff --git a/metadata-service/configuration/build.gradle b/metadata-service/configuration/build.gradle index 80cf6541261c2..f912e2ac01f0b 100644 --- a/metadata-service/configuration/build.gradle +++ b/metadata-service/configuration/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'java' + id 'java-library' } apply from: "../../gradle/versioning/versioning.gradle" @@ -7,8 +7,9 @@ dependencies { implementation externalDependency.jacksonDataBind implementation externalDependency.slf4jApi - implementation externalDependency.springCore - implementation externalDependency.springBeans + + // Newer Spring libraries require JDK17 classes, allow for JDK11 + compileOnly externalDependency.springBootAutoconfigureJdk11 compileOnly externalDependency.lombok diff --git a/metadata-service/restli-servlet-impl/build.gradle b/metadata-service/restli-servlet-impl/build.gradle index ec5b645ee233c..8d21bdd489505 100644 --- a/metadata-service/restli-servlet-impl/build.gradle +++ b/metadata-service/restli-servlet-impl/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'java' + id 'java-library' id 'pegasus' } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java index 546c2856c28ac..0a29ebfe46415 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java @@ -8,6 +8,7 @@ import com.linkedin.metadata.recommendation.RecommendationParams; import java.util.List; import java.util.Set; +import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -29,7 +30,7 @@ default Stream buildContent( entityUrns.stream() .map(UrnUtils::getUrn) .filter(urn -> getSupportedEntityTypes().contains(urn.getEntityType())) - .toList(); + .collect(Collectors.toList()); Set existingNonRemoved = entityService.exists(entities, false); return entities.stream().filter(existingNonRemoved::contains).map(this::buildContent); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java index 22496b6c07806..666fe23a93187 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java @@ -126,7 +126,7 @@ public RollbackResponse rollbackIngestion( !row.getRunId().equals(runId) && !row.isKeyAspect() && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) - .toList(); + .collect(Collectors.toList()); long unsafeEntitiesCount = affectedAspectsList.stream() @@ -212,7 +212,7 @@ public RollbackResponse rollbackIngestion( !row.getRunId().equals(runId) && !row.isKeyAspect() && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) - .toList(); + .collect(Collectors.toList()); long affectedAspects = affectedAspectsList.size(); long unsafeEntitiesCount = diff --git a/metadata-utils/build.gradle b/metadata-utils/build.gradle index 3d65675219624..919d93c5f9fe1 100644 --- a/metadata-utils/build.gradle +++ b/metadata-utils/build.gradle @@ -1,5 +1,6 @@ plugins { id 'java-library' + id 'pegasus' } dependencies { From f627fc459dd7181203392036779e12cdd3e3881f Mon Sep 17 00:00:00 2001 From: Davi Arnaut Date: Tue, 23 Jan 2024 22:34:18 -0800 Subject: [PATCH 425/792] feat(ingest): add ODBC library and tools to base image (#9701) --- docker/datahub-ingestion-base/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index 558a5afe2c2cf..0bf0d2f88af73 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -48,6 +48,8 @@ RUN apt-get update && apt-get install -y -qq \ zip \ unzip \ ldap-utils \ + unixodbc \ + libodbc2 \ && python -m pip install --no-cache --upgrade pip wheel setuptools \ && rm -rf /var/lib/apt/lists/* /var/cache/apk/* From c158d2b9ecd42365994a9eaa1b959fdcb2749d26 Mon Sep 17 00:00:00 2001 From: muzzacode <109360468+muzzacode@users.noreply.github.com> Date: Wed, 24 Jan 2024 12:07:16 +0530 Subject: [PATCH 426/792] feat (resolver): Add new endpoint for editing secrets (#9665) --- .../datahub/graphql/GmsGraphQLEngine.java | 3 + .../ingest/secret/CreateSecretResolver.java | 19 ++-- .../ingest/secret/UpdateSecretResolver.java | 82 ++++++++++++++++ .../mapper/DataHubSecretValueMapper.java | 55 +++++++++++ .../src/main/resources/ingestion.graphql | 30 ++++++ .../secret/UpdateSecretResolverTest.java | 98 +++++++++++++++++++ .../src/graphql/ingestion.graphql | 4 + .../managed_ingestion_test.py | 23 ++++- 8 files changed, 304 insertions(+), 10 deletions(-) create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 4819510d34018..4b5bbdb6e15ec 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -170,6 +170,7 @@ import com.linkedin.datahub.graphql.resolvers.ingest.secret.DeleteSecretResolver; import com.linkedin.datahub.graphql.resolvers.ingest.secret.GetSecretValuesResolver; import com.linkedin.datahub.graphql.resolvers.ingest.secret.ListSecretsResolver; +import com.linkedin.datahub.graphql.resolvers.ingest.secret.UpdateSecretResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.DeleteIngestionSourceResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.GetIngestionSourceResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.ListIngestionSourcesResolver; @@ -1086,6 +1087,8 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) + .dataFetcher( + "updateSecret", new UpdateSecretResolver(this.entityClient, this.secretService)) .dataFetcher( "createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) .dataFetcher( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java index 577780e53ce86..f5e7cf4d69ce8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java @@ -6,11 +6,11 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateSecretInput; import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; +import com.linkedin.datahub.graphql.types.ingest.secret.mapper.DataHubSecretValueMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.key.DataHubSecretKey; import com.linkedin.metadata.secret.SecretService; @@ -58,14 +58,15 @@ public CompletableFuture get(final DataFetchingEnvironment environment) } // Create the secret value. - final DataHubSecretValue value = new DataHubSecretValue(); - value.setName(input.getName()); - value.setValue(_secretService.encrypt(input.getValue())); - value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - value.setCreated( - new AuditStamp() - .setActor(UrnUtils.getUrn(context.getActorUrn())) - .setTime(System.currentTimeMillis())); + final DataHubSecretValue value = + DataHubSecretValueMapper.map( + null, + input.getName(), + _secretService.encrypt(input.getValue()), + input.getDescription(), + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java new file mode 100644 index 0000000000000..20a685265b545 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java @@ -0,0 +1,82 @@ +package com.linkedin.datahub.graphql.resolvers.ingest.secret; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.buildMetadataChangeProposalWithUrn; +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.UpdateSecretInput; +import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; +import com.linkedin.datahub.graphql.types.ingest.secret.mapper.DataHubSecretValueMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.secret.SecretService; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.secret.DataHubSecretValue; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** + * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the + * MANAGE_SECRETS privilege. + */ +@Slf4j +@RequiredArgsConstructor +public class UpdateSecretResolver implements DataFetcher> { + private final EntityClient entityClient; + private final SecretService secretService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final UpdateSecretInput input = + bindArgument(environment.getArgument("input"), UpdateSecretInput.class); + final Urn secretUrn = Urn.createFromString(input.getUrn()); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageSecrets(context)) { + + try { + EntityResponse response = + entityClient.getV2( + secretUrn.getEntityType(), + secretUrn, + Set.of(SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); + if (!entityClient.exists(secretUrn, context.getAuthentication()) + || response == null) { + throw new IllegalArgumentException( + String.format("Secret for urn %s doesn't exists!", secretUrn)); + } + + DataHubSecretValue updatedVal = + DataHubSecretValueMapper.map( + response, + input.getName(), + secretService.encrypt(input.getValue()), + input.getDescription(), + null); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + secretUrn, SECRET_VALUE_ASPECT_NAME, updatedVal); + return entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update a secret with urn %s and name %s", + secretUrn, input.getName()), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java new file mode 100644 index 0000000000000..2c5e84dad28c2 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql.types.ingest.secret.mapper; + +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; + +import com.linkedin.common.AuditStamp; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.SetMode; +import com.linkedin.entity.EntityResponse; +import com.linkedin.secret.DataHubSecretValue; +import java.util.Objects; +import javax.annotation.Nonnull; + +/** + * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. + * + *

To be replaced by auto-generated mappers implementations + */ +public class DataHubSecretValueMapper { + + public static final DataHubSecretValueMapper INSTANCE = new DataHubSecretValueMapper(); + + public static DataHubSecretValue map( + EntityResponse fromSecret, + @Nonnull final String name, + @Nonnull final String value, + String description, + AuditStamp auditStamp) { + return INSTANCE.apply(fromSecret, name, value, description, auditStamp); + } + + public DataHubSecretValue apply( + EntityResponse existingSecret, + @Nonnull final String name, + @Nonnull final String value, + String description, + AuditStamp auditStamp) { + final DataHubSecretValue result; + if (Objects.nonNull(existingSecret)) { + result = + new DataHubSecretValue( + existingSecret.getAspects().get(SECRET_VALUE_ASPECT_NAME).getValue().data()); + } else { + result = new DataHubSecretValue(); + } + + result.setName(name); + result.setValue(value); + result.setDescription(description, SetMode.IGNORE_NULL); + if (Objects.nonNull(auditStamp)) { + result.setCreated(auditStamp); + } + + return result; + } +} diff --git a/datahub-graphql-core/src/main/resources/ingestion.graphql b/datahub-graphql-core/src/main/resources/ingestion.graphql index 21f9fb2633119..d65343c0a16d2 100644 --- a/datahub-graphql-core/src/main/resources/ingestion.graphql +++ b/datahub-graphql-core/src/main/resources/ingestion.graphql @@ -36,6 +36,11 @@ extend type Mutation { """ createSecret(input: CreateSecretInput!): String + """ + Update a Secret + """ + updateSecret(input: UpdateSecretInput!): String + """ Delete a Secret """ @@ -560,6 +565,31 @@ input CreateSecretInput { description: String } +""" +Input arguments for updating a Secret +""" +input UpdateSecretInput { + """ + The primary key of the Secret to update + """ + urn: String! + + """ + The name of the secret for reference in ingestion recipes + """ + name: String! + + """ + The value of the secret, to be encrypted and stored + """ + value: String! + + """ + An optional description for the secret + """ + description: String +} + """ Input arguments for retrieving the plaintext values of a set of secrets """ diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java new file mode 100644 index 0000000000000..73d228d600266 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java @@ -0,0 +1,98 @@ +package com.linkedin.datahub.graphql.resolvers.ingest.secret; + +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.getMockDenyContext; +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.when; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.UpdateSecretInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.secret.SecretService; +import com.linkedin.secret.DataHubSecretValue; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class UpdateSecretResolverTest { + + private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:secret:secret-id"); + + private static final UpdateSecretInput TEST_INPUT = + new UpdateSecretInput(TEST_URN.toString(), "MY_SECRET", "mysecretvalue", "dummy"); + + private DataFetchingEnvironment mockEnv; + private EntityClient mockClient; + private SecretService mockSecretService; + private UpdateSecretResolver resolver; + + @BeforeMethod + public void before() { + mockClient = Mockito.mock(EntityClient.class); + mockSecretService = Mockito.mock(SecretService.class); + + resolver = new UpdateSecretResolver(mockClient, mockSecretService); + } + + private DataHubSecretValue createSecretAspect() { + DataHubSecretValue secretAspect = new DataHubSecretValue(); + secretAspect.setValue("encryptedvalue.updated"); + secretAspect.setName(TEST_INPUT.getName() + ".updated"); + secretAspect.setDescription(TEST_INPUT.getDescription() + ".updated"); + secretAspect.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); + return secretAspect; + } + + @Test + public void testGetSuccess() throws Exception { + // with valid context + QueryContext mockContext = getMockAllowContext(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Mockito.when(mockClient.exists(any(), any())).thenReturn(true); + Mockito.when(mockSecretService.encrypt(any())).thenReturn("encrypted_value"); + final EntityResponse entityResponse = new EntityResponse(); + final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + aspectMap.put( + SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createSecretAspect().data()))); + entityResponse.setAspects(aspectMap); + + when(mockClient.getV2(any(), any(), any(), any())).thenReturn(entityResponse); + + // Invoke the resolver + resolver.get(mockEnv).join(); + Mockito.verify(mockClient, Mockito.times(1)).ingestProposal(any(), any(), anyBoolean()); + } + + @Test( + description = "validate if nothing provided throws Exception", + expectedExceptions = {AuthorizationException.class, CompletionException.class}) + public void testGetUnauthorized() throws Exception { + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).join(); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(), any(Authentication.class), anyBoolean()); + } +} diff --git a/datahub-web-react/src/graphql/ingestion.graphql b/datahub-web-react/src/graphql/ingestion.graphql index 1767fe34bfef0..4d6f090b99356 100644 --- a/datahub-web-react/src/graphql/ingestion.graphql +++ b/datahub-web-react/src/graphql/ingestion.graphql @@ -145,6 +145,10 @@ mutation createSecret($input: CreateSecretInput!) { createSecret(input: $input) } +mutation updateSecret($input: UpdateSecretInput!) { + updateSecret(input: $input) +} + mutation deleteSecret($urn: String!) { deleteSecret(urn: $urn) } diff --git a/smoke-test/tests/managed-ingestion/managed_ingestion_test.py b/smoke-test/tests/managed-ingestion/managed_ingestion_test.py index b5e408731334e..6d95f731f32b1 100644 --- a/smoke-test/tests/managed-ingestion/managed_ingestion_test.py +++ b/smoke-test/tests/managed-ingestion/managed_ingestion_test.py @@ -260,6 +260,27 @@ def test_create_list_get_remove_secret(frontend_session): # Get new count of secrets _ensure_secret_increased(frontend_session, before_count) + # Update existing secret + json_q = { + "query": """mutation updateSecret($input: UpdateSecretInput!) {\n + updateSecret(input: $input) + }""", + "variables": {"input": {"urn": secret_urn, "name": "SMOKE_TEST", "value": "mytestvalue.updated"}}, + } + + response = frontend_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=json_q + ) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert res_data["data"] + assert res_data["data"]["updateSecret"] is not None + assert "errors" not in res_data + + secret_urn = res_data["data"]["updateSecret"] + # Get the secret value back json_q = { "query": """query getSecretValues($input: GetSecretValuesInput!) {\n @@ -285,7 +306,7 @@ def test_create_list_get_remove_secret(frontend_session): secret_values = res_data["data"]["getSecretValues"] secret_value = [x for x in secret_values if x["name"] == "SMOKE_TEST"][0] - assert secret_value["value"] == "mytestvalue" + assert secret_value["value"] == "mytestvalue.updated" # Now cleanup and remove the secret json_q = { From c4dec931a370a92e115c5213f013620bfe4f2d58 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 24 Jan 2024 12:14:14 +0530 Subject: [PATCH 427/792] feat(ingest): handling for const in json schema (#9694) Co-authored-by: Harshal Sheth --- .../datahub/ingestion/extractor/json_schema_util.py | 10 ++++++---- .../tests/unit/schema/test_json_schema_util.py | 13 +++++++++++++ 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/extractor/json_schema_util.py b/metadata-ingestion/src/datahub/ingestion/extractor/json_schema_util.py index 360ddf1129154..52d2e4a8f56e3 100644 --- a/metadata-ingestion/src/datahub/ingestion/extractor/json_schema_util.py +++ b/metadata-ingestion/src/datahub/ingestion/extractor/json_schema_util.py @@ -316,10 +316,12 @@ def _get_discriminated_type_from_schema(schema: Dict) -> str: @staticmethod def _get_description_from_any_schema(schema: Dict) -> str: - # we do a redundant `if description in schema` check to guard against the scenario that schema is not a dictionary - description = ( - (schema.get("description") or "") if "description" in schema else "" - ) + description = "" + if "description" in schema: + description = str(schema.get("description")) + elif "const" in schema: + schema_const = schema.get("const") + description = f"Const value: {schema_const}" if JsonSchemaTranslator._INJECT_DEFAULTS_INTO_DESCRIPTION: default = schema.get("default") if default is not None: diff --git a/metadata-ingestion/tests/unit/schema/test_json_schema_util.py b/metadata-ingestion/tests/unit/schema/test_json_schema_util.py index 2635363ed8d2e..5e095fc0df8dc 100644 --- a/metadata-ingestion/tests/unit/schema/test_json_schema_util.py +++ b/metadata-ingestion/tests/unit/schema/test_json_schema_util.py @@ -725,6 +725,19 @@ def test_non_str_enums(): assert fields[0].description == 'One of: "baz", 1, null' +def test_const_description_pulled_correctly(): + schema = { + "$id": "test", + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": {"bar": {"type": "string", "const": "not_defined"}}, + } + + fields = list(JsonSchemaTranslator.get_fields_from_schema(schema)) + expected_field_paths: List[str] = ["[version=2.0].[type=object].[type=string].bar"] + assert_field_paths_match(fields, expected_field_paths) + assert fields[0].description == "Const value: not_defined" + + def test_anyof_with_properties(): # We expect the event / timestamp fields to be included in both branches of the anyOf. From 2f36817e95f9853e0a0302888136a150fe8a8889 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 23 Jan 2024 22:58:46 -0800 Subject: [PATCH 428/792] refactor(ingest): simplify adding aspects to MCEs in transformers (#9686) --- .../datahub/api/entities/dataset/dataset.py | 2 +- metadata-ingestion/src/datahub/entrypoints.py | 13 ---- .../ingestion/transformer/base_transformer.py | 62 +++++++++---------- .../transformer/extract_dataset_tags.py | 2 +- 4 files changed, 32 insertions(+), 47 deletions(-) diff --git a/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py b/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py index 3b4a5fbfbb061..a1498a6ca961e 100644 --- a/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py +++ b/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py @@ -147,7 +147,7 @@ def platform_urn(self) -> str: return make_data_platform_urn(self.platform) else: assert self.urn is not None # validator should have filled this in - dataset_urn = DatasetUrn.create_from_string(self.urn) + dataset_urn = DatasetUrn.from_string(self.urn) return str(dataset_urn.get_data_platform_urn()) @validator("urn", pre=True, always=True) diff --git a/metadata-ingestion/src/datahub/entrypoints.py b/metadata-ingestion/src/datahub/entrypoints.py index 4989f984badcc..1bf090a2e514e 100644 --- a/metadata-ingestion/src/datahub/entrypoints.py +++ b/metadata-ingestion/src/datahub/entrypoints.py @@ -62,13 +62,6 @@ default=None, help="Enable debug logging.", ) -@click.option( - "--debug-vars/--no-debug-vars", - type=bool, - is_flag=True, - default=False, - help="Show variable values in stack traces. Implies --debug. While we try to avoid printing sensitive information like passwords, this may still happen.", -) @click.version_option( version=datahub_package.nice_version_name(), prog_name=datahub_package.__package_name__, @@ -76,13 +69,7 @@ def datahub( debug: bool, log_file: Optional[str], - debug_vars: bool, ) -> None: - if debug_vars: - # debug_vars implies debug. This option isn't actually used here, but instead - # read directly from the command line arguments in the main entrypoint. - debug = True - debug = debug or get_boolean_env_variable("DATAHUB_DEBUG", False) # Note that we're purposely leaking the context manager here. diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py b/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py index 254b3d084f2be..e8e25a061a665 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py @@ -77,7 +77,7 @@ def __init__(self): mixedin = mixedin or isinstance(self, mixin) if not mixedin: assert ( - "Class does not implement one of required traits {self.allowed_mixins}" + f"Class does not implement one of required traits {self.allowed_mixins}" ) def _should_process( @@ -135,38 +135,37 @@ def _transform_or_record_mce( if mce.proposedSnapshot: self._record_mce(mce) if isinstance(self, SingleAspectTransformer): - aspect_type = ASPECT_MAP.get(self.aspect_name()) - if aspect_type: - # if we find a type corresponding to the aspect name we look for it in the mce - old_aspect = ( - builder.get_aspect_if_available( + aspect_type = ASPECT_MAP[self.aspect_name()] + + # If we find a type corresponding to the aspect name we look for it in the mce + # It's possible that the aspect is supported by the entity but not in the MCE + # snapshot union. In those cases, we just want to record the urn as seen. + supports_aspect = builder.can_add_aspect(mce, aspect_type) + if supports_aspect: + old_aspect = builder.get_aspect_if_available( + mce, + aspect_type, + ) + if old_aspect is not None: + # TRICKY: If the aspect is not present in the MCE, it might still show up in a + # subsequent MCP. As such, we _only_ mark the urn as processed if we actually + # find the aspect already in the MCE. + + transformed_aspect = self.transform_aspect( + entity_urn=mce.proposedSnapshot.urn, + aspect_name=self.aspect_name(), + aspect=old_aspect, + ) + + # If transformed_aspect is None, this will remove the aspect. + builder.set_aspect( mce, - aspect_type, + aspect_type=aspect_type, + aspect=transformed_aspect, ) - if builder.can_add_aspect(mce, aspect_type) - else None - ) - if old_aspect: - if isinstance(self, LegacyMCETransformer): - # use the transform_one pathway to transform this MCE - envelope.record = self.transform_one(mce) - else: - transformed_aspect = self.transform_aspect( - entity_urn=mce.proposedSnapshot.urn, - aspect_name=self.aspect_name(), - aspect=old_aspect, - ) - builder.set_aspect( - mce, - aspect_type=aspect_type, - aspect=transformed_aspect, - ) - envelope.record = mce + + envelope.record = mce self._mark_processed(mce.proposedSnapshot.urn) - else: - log.warning( - f"Could not locate a snapshot aspect type for aspect {self.aspect_name()}. This can lead to silent drops of messages in transformers." - ) elif isinstance(self, LegacyMCETransformer): # we pass down the full MCE envelope.record = self.transform_one(mce) @@ -202,7 +201,6 @@ def _transform_or_record_mcpw( def _handle_end_of_stream( self, envelope: RecordEnvelope ) -> Iterable[RecordEnvelope]: - if not isinstance(self, SingleAspectTransformer) and not isinstance( self, LegacyMCETransformer ): @@ -265,7 +263,7 @@ def transform( else None, ) if transformed_aspect: - structured_urn = Urn.create_from_string(urn) + structured_urn = Urn.from_string(urn) mcp: MetadataChangeProposalWrapper = ( MetadataChangeProposalWrapper( diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/extract_dataset_tags.py b/metadata-ingestion/src/datahub/ingestion/transformer/extract_dataset_tags.py index 25b18f0806fd6..4b64d38a9b42f 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/extract_dataset_tags.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/extract_dataset_tags.py @@ -34,7 +34,7 @@ def create(cls, config_dict: dict, ctx: PipelineContext) -> "ExtractDatasetTags" def _get_tags_to_add(self, entity_urn: str) -> List[TagAssociationClass]: if self.config.extract_tags_from == ExtractTagsOption.URN: - urn = DatasetUrn.create_from_string(entity_urn) + urn = DatasetUrn.from_string(entity_urn) match = re.search(self.config.extract_tags_regex, urn.get_dataset_name()) if match: captured_group = match.group(1) From 7ae88d97ab9b35a4356848ec94c4b885f8f5ae80 Mon Sep 17 00:00:00 2001 From: Dimitri <36767102+dim-ops@users.noreply.github.com> Date: Wed, 24 Jan 2024 07:59:28 +0100 Subject: [PATCH 429/792] docs(openapi): improve description fields (#9690) --- .../src/datahub/ingestion/source/openapi.py | 33 ++++++++++++++----- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/openapi.py b/metadata-ingestion/src/datahub/ingestion/source/openapi.py index ad62ef7362aeb..1b3a6dc4bee58 100755 --- a/metadata-ingestion/src/datahub/ingestion/source/openapi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/openapi.py @@ -46,12 +46,20 @@ class OpenApiConfig(ConfigModel): - name: str = Field(description="") - url: str = Field(description="") - swagger_file: str = Field(description="") - ignore_endpoints: list = Field(default=[], description="") - username: str = Field(default="", description="") - password: str = Field(default="", description="") + name: str = Field(description="Name of ingestion.") + url: str = Field(description="Endpoint URL. e.g. https://example.com") + swagger_file: str = Field( + description="Route for access to the swagger file. e.g. openapi.json" + ) + ignore_endpoints: list = Field( + default=[], description="List of endpoints to ignore during ingestion." + ) + username: str = Field( + default="", description="Username used for basic HTTP authentication." + ) + password: str = Field( + default="", description="Password used for basic HTTP authentication." + ) proxies: Optional[dict] = Field( default=None, description="Eg. " @@ -59,9 +67,16 @@ class OpenApiConfig(ConfigModel): "If authentication is required, add it to the proxy url directly e.g. " "`http://user:pass@10.10.1.10:3128/`.", ) - forced_examples: dict = Field(default={}, description="") - token: Optional[str] = Field(default=None, description="") - get_token: dict = Field(default={}, description="") + forced_examples: dict = Field( + default={}, + description="If no example is provided for a route, it is possible to create one using forced_example.", + ) + token: Optional[str] = Field( + default=None, description="Token for endpoint authentication." + ) + get_token: dict = Field( + default={}, description="Retrieving a token from the endpoint." + ) def get_swagger(self) -> Dict: if self.get_token or self.token is not None: From c80383dd1a92a6fd5da935f5cd315e854292bbfc Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Wed, 24 Jan 2024 21:45:00 +0000 Subject: [PATCH 430/792] feat(docs): Add documentation on Incident Change Event (#9709) --- .../datahub-api/entity-events-api.md | 36 +++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/docs/managed-datahub/datahub-api/entity-events-api.md b/docs/managed-datahub/datahub-api/entity-events-api.md index 07fa252249452..23499904d5505 100644 --- a/docs/managed-datahub/datahub-api/entity-events-api.md +++ b/docs/managed-datahub/datahub-api/entity-events-api.md @@ -563,7 +563,7 @@ This event is emitted when an Assertion has been run has succeeded on DataHub. "parameters": { "runResult": "SUCCESS", "runId": "123", - "aserteeUrn": "urn:li:dataset:def" + "asserteeUrn": "urn:li:dataset:def" }, "auditStamp": { "actor": "urn:li:corpuser:jdoe", @@ -808,4 +808,36 @@ These are the common parameters for all parameters. "time": 1649953100653 } } -``` \ No newline at end of file +``` + +### Incident Change Event + +This event is emitted when an Incident has been created or it's status changes. + +#### Header + +
CategoryOperationEntity Types
INCIDENTACTIVE, RESOLVEDincident
+ +#### Parameters + +| Name | Type | Description | Optional | +|--------------| ------ |---------------------------------------------------| -------- | +| entities | String | The list of entities associated with the incident | False | + +#### Sample Event + +``` +{ + "entityUrn": "urn:li:incident:16ff200a-0ac5-4a7d-bbab-d4bdb4f831f9", + "entityType": "incident", + "category": "INCIDENT", + "operation": "ACTIVE", + "parameters": { + "entities": "[urn:li:dataset:abc, urn:li:dataset:abc2]", + }, + "auditStamp": { + "actor": "urn:li:corpuser:jdoe", + "time": 1649953100653 + } +} +``` From 9b051e38d6bd9f62ea42ecce1cfbfdf686d9b0e9 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 24 Jan 2024 14:29:41 -0800 Subject: [PATCH 431/792] feat(ingest/dbt): support aws config without region (#9650) Co-authored-by: Tamas Nemeth --- .../ingestion/source/aws/aws_common.py | 4 +-- .../datahub/ingestion/source/aws/sagemaker.py | 4 +-- .../datahub/ingestion/source/dbt/dbt_core.py | 2 +- .../tests/unit/test_dbt_source.py | 33 +++++++++++++++---- 4 files changed, 31 insertions(+), 12 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py b/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py index 421991a0966c3..95ca10045f1bb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py @@ -34,7 +34,7 @@ class AwsAssumeRoleConfig(PermissiveConfigModel): def assume_role( role: AwsAssumeRoleConfig, - aws_region: str, + aws_region: Optional[str], credentials: Optional[dict] = None, ) -> dict: credentials = credentials or {} @@ -93,7 +93,7 @@ class AwsConnectionConfig(ConfigModel): default=None, description="Named AWS profile to use. Only used if access key / secret are unset. If not set the default will be used", ) - aws_region: str = Field(description="AWS region code.") + aws_region: Optional[str] = Field(None, description="AWS region code.") aws_endpoint_url: Optional[str] = Field( default=None, diff --git a/metadata-ingestion/src/datahub/ingestion/source/aws/sagemaker.py b/metadata-ingestion/src/datahub/ingestion/source/aws/sagemaker.py index 6f6e8bbc05661..e335174eeb003 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/aws/sagemaker.py +++ b/metadata-ingestion/src/datahub/ingestion/source/aws/sagemaker.py @@ -82,7 +82,7 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: env=self.env, report=self.report, job_type_filter=self.source_config.extract_jobs, - aws_region=self.source_config.aws_region, + aws_region=self.sagemaker_client.meta.region_name, ) yield from job_processor.get_workunits() @@ -98,7 +98,7 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: model_image_to_jobs=model_image_to_jobs, model_name_to_jobs=model_name_to_jobs, lineage=lineage, - aws_region=self.source_config.aws_region, + aws_region=self.sagemaker_client.meta.region_name, ) yield from model_processor.get_workunits() diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py index 6fd3c5ba309f9..a2f96264b7f64 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py @@ -81,7 +81,7 @@ def aws_connection_needed_if_s3_uris_present( if (values.get(f) or "").startswith("s3://") ] - if uri_containing_fields and not aws_connection: + if uri_containing_fields and aws_connection is None: raise ValueError( f"Please provide aws_connection configuration, since s3 uris have been provided in fields {uri_containing_fields}" ) diff --git a/metadata-ingestion/tests/unit/test_dbt_source.py b/metadata-ingestion/tests/unit/test_dbt_source.py index 0fbe9ecbcc43c..737cf6aca33cc 100644 --- a/metadata-ingestion/tests/unit/test_dbt_source.py +++ b/metadata-ingestion/tests/unit/test_dbt_source.py @@ -1,6 +1,7 @@ from typing import Dict, List, Union from unittest import mock +import pytest from pydantic import ValidationError from datahub.emitter import mce_builder @@ -180,14 +181,12 @@ def test_dbt_entity_emission_configuration(): "target_platform": "dummy_platform", "entities_enabled": {"models": "Only", "seeds": "Only"}, } - try: + with pytest.raises( + ValidationError, + match="Cannot have more than 1 type of entity emission set to ONLY", + ): DBTCoreConfig.parse_obj(config_dict) - except ValidationError as ve: - assert len(ve.errors()) == 1 - assert ( - "Cannot have more than 1 type of entity emission set to ONLY" - in ve.errors()[0]["msg"] - ) + # valid config config_dict = { "manifest_path": "dummy_path", @@ -198,6 +197,26 @@ def test_dbt_entity_emission_configuration(): DBTCoreConfig.parse_obj(config_dict) +def test_dbt_s3_config(): + # test missing aws config + config_dict: dict = { + "manifest_path": "s3://dummy_path", + "catalog_path": "s3://dummy_path", + "target_platform": "dummy_platform", + } + with pytest.raises(ValidationError, match="provide aws_connection"): + DBTCoreConfig.parse_obj(config_dict) + + # valid config + config_dict = { + "manifest_path": "s3://dummy_path", + "catalog_path": "s3://dummy_path", + "target_platform": "dummy_platform", + "aws_connection": {}, + } + DBTCoreConfig.parse_obj(config_dict) + + def test_default_convert_column_urns_to_lowercase(): config_dict = { "manifest_path": "dummy_path", From d6a30a74a7877bb22f9cb1c00d22afde8b492a66 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 24 Jan 2024 16:30:40 -0600 Subject: [PATCH 432/792] fix(test): improve cypress tests (#9711) --- smoke-test/tests/cypress/cypress/e2e/glossary/glossary.js | 6 +++--- .../cypress/cypress/e2e/mutations/managed_ingestion.js | 2 +- smoke-test/tests/cypress/cypress/support/commands.js | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary.js b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary.js index dbc4e1db72943..b0e24d5346fea 100644 --- a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary.js +++ b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary.js @@ -1,6 +1,6 @@ const urn = "urn:li:dataset:(urn:li:dataPlatform:hive,cypress_logging_events,PROD)"; const datasetName = "cypress_logging_events"; -const glossaryTerm = "CypressGlosssaryTerm"; +const glossaryTerm = "CypressGlossaryTerm"; const glossaryTermGroup = "CypressGlossaryGroup"; describe("glossary", () => { @@ -8,9 +8,9 @@ describe("glossary", () => { cy.loginWithCredentials(); cy.goToGlossaryList(); cy.clickOptionWithText("Add Term"); - cy.addViaModal(glossaryTerm, "Create Glossary Term", glossaryTerm); + cy.addViaModal(glossaryTerm, "Create Glossary Term", glossaryTerm, "glossary-entity-modal-create-button"); cy.clickOptionWithText("Add Term Group"); - cy.addViaModal(glossaryTermGroup, "Create Term Group", glossaryTermGroup); + cy.addViaModal(glossaryTermGroup, "Create Term Group", glossaryTermGroup, "glossary-entity-modal-create-button"); cy.addTermToDataset(urn, datasetName, glossaryTerm); cy.waitTextVisible(glossaryTerm) cy.goToGlossaryList(); diff --git a/smoke-test/tests/cypress/cypress/e2e/mutations/managed_ingestion.js b/smoke-test/tests/cypress/cypress/e2e/mutations/managed_ingestion.js index 05f94c94bfe2a..c355aaabc336a 100644 --- a/smoke-test/tests/cypress/cypress/e2e/mutations/managed_ingestion.js +++ b/smoke-test/tests/cypress/cypress/e2e/mutations/managed_ingestion.js @@ -26,7 +26,7 @@ describe("run managed ingestion", () => { cy.enterTextInTestId('source-name-input', testName) cy.clickOptionWithText("Advanced") cy.enterTextInTestId('cli-version-input', cli_version) - cy.clickOptionWithText("Save & Run") + cy.clickOptionWithTextToScrollintoView("Save & Run") cy.waitTextVisible(testName) cy.contains(testName).parent().within(() => { diff --git a/smoke-test/tests/cypress/cypress/support/commands.js b/smoke-test/tests/cypress/cypress/support/commands.js index ba5600b79f5f6..f32512aff45fa 100644 --- a/smoke-test/tests/cypress/cypress/support/commands.js +++ b/smoke-test/tests/cypress/cypress/support/commands.js @@ -183,10 +183,10 @@ Cypress.Commands.add("addViaFormModal", (text, modelHeader) => { cy.get(".ant-modal-footer > button:nth-child(2)").click(); }); -Cypress.Commands.add("addViaModal", (text, modelHeader,value) => { +Cypress.Commands.add("addViaModal", (text, modelHeader, value, dataTestId) => { cy.waitTextVisible(modelHeader); cy.get(".ant-input-affix-wrapper > input[type='text']").first().type(text); - cy.get(".ant-modal-footer > button:nth-child(2)").click(); + cy.get('[data-testid="' + dataTestId + '"]').click(); cy.contains(value).should('be.visible'); }); From 9d8e2b9067781f0eabb53362609b3a19e5d5adfb Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 24 Jan 2024 14:38:25 -0800 Subject: [PATCH 433/792] feat(ingest/tableau): map trino_jdbc platform type (#9708) --- .../src/datahub/ingestion/source/tableau_common.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py index a2f460feca388..121b2e257a6ba 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py @@ -533,6 +533,9 @@ def get_platform(connection_type: str) -> str: platform = "mssql" elif connection_type in ("athena"): platform = "athena" + elif connection_type.endswith("_jdbc"): + # e.g. convert trino_jdbc -> trino + platform = connection_type[: -len("_jdbc")] else: platform = connection_type return platform From 23277f8dc4cabc5252c8eafed58ed75a3b62e27d Mon Sep 17 00:00:00 2001 From: Davi Arnaut Date: Wed, 24 Jan 2024 17:36:30 -0800 Subject: [PATCH 434/792] fix(oidc settings): effective JWS algorithm setting (#9712) --- datahub-frontend/app/auth/AuthUtils.java | 3 + .../app/auth/sso/oidc/OidcConfigs.java | 4 +- datahub-frontend/play.gradle | 3 + .../test/security/OidcConfigurationTest.java | 24 +++++ .../linkedin/settings/global/OidcSettings.pdl | 9 +- .../auth-servlet-impl/build.gradle | 8 ++ .../authentication/AuthServiceController.java | 6 +- .../AuthServiceControllerTest.java | 96 +++++++++++++++++++ .../AuthServiceTestConfiguration.java | 32 +++++++ 9 files changed, 179 insertions(+), 6 deletions(-) create mode 100644 metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceControllerTest.java create mode 100644 metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceTestConfiguration.java diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java index 84488a43f253e..51bb784c61b3b 100644 --- a/datahub-frontend/app/auth/AuthUtils.java +++ b/datahub-frontend/app/auth/AuthUtils.java @@ -76,6 +76,9 @@ public class AuthUtils { public static final String USE_NONCE = "useNonce"; public static final String READ_TIMEOUT = "readTimeout"; public static final String EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "extractJwtAccessTokenClaims"; + // Retained for backwards compatibility + public static final String PREFERRED_JWS_ALGORITHM = "preferredJwsAlgorithm"; + public static final String PREFERRED_JWS_ALGORITHM_2 = "preferredJwsAlgorithm2"; /** * Determines whether the inbound request should be forward to downstream Metadata Service. Today, diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java index bf3384527af11..5de4eba9cb679 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java @@ -226,8 +226,8 @@ public Builder from(final com.typesafe.config.Config configs, final String ssoSe extractJwtAccessTokenClaims = Optional.of(jsonNode.get(EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).asBoolean()); } - if (jsonNode.has(OIDC_PREFERRED_JWS_ALGORITHM)) { - preferredJwsAlgorithm = Optional.of(jsonNode.get(OIDC_PREFERRED_JWS_ALGORITHM).asText()); + if (jsonNode.has(PREFERRED_JWS_ALGORITHM_2)) { + preferredJwsAlgorithm = Optional.of(jsonNode.get(PREFERRED_JWS_ALGORITHM_2).asText()); } else { preferredJwsAlgorithm = Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index 1e3a2767852d6..9bd77e5279a91 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -101,6 +101,9 @@ play { test { useJUnitPlatform() + testLogging.showStandardStreams = true + testLogging.exceptionFormat = 'full' + def playJava17CompatibleJvmArgs = [ "--add-opens=java.base/java.lang=ALL-UNNAMED", //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", diff --git a/datahub-frontend/test/security/OidcConfigurationTest.java b/datahub-frontend/test/security/OidcConfigurationTest.java index c1147ae936b3a..8226d4e74cc21 100644 --- a/datahub-frontend/test/security/OidcConfigurationTest.java +++ b/datahub-frontend/test/security/OidcConfigurationTest.java @@ -1,5 +1,6 @@ package security; +import static auth.AuthUtils.*; import static auth.sso.oidc.OidcConfigs.*; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,6 +25,7 @@ import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; import org.pac4j.oidc.client.OidcClient; +import org.json.JSONObject; public class OidcConfigurationTest { @@ -317,4 +319,26 @@ public void readTimeoutPropagation() { OidcProvider oidcProvider = new OidcProvider(oidcConfigs); assertEquals(10000, ((OidcClient) oidcProvider.client()).getConfiguration().getReadTimeout()); } + + @Test + public void readPreferredJwsAlgorithmPropagationFromConfig() { + final String SSO_SETTINGS_JSON_STR = new JSONObject().put(PREFERRED_JWS_ALGORITHM, "HS256").toString(); + CONFIG.withValue(OIDC_PREFERRED_JWS_ALGORITHM, ConfigValueFactory.fromAnyRef("RS256")); + OidcConfigs.Builder oidcConfigsBuilder = new OidcConfigs.Builder(); + oidcConfigsBuilder.from(CONFIG, SSO_SETTINGS_JSON_STR); + OidcConfigs oidcConfigs = new OidcConfigs(oidcConfigsBuilder); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + assertEquals("RS256", ((OidcClient) oidcProvider.client()).getConfiguration().getPreferredJwsAlgorithm().toString()); + } + + @Test + public void readPreferredJwsAlgorithmPropagationFromJSON() { + final String SSO_SETTINGS_JSON_STR = new JSONObject().put(PREFERRED_JWS_ALGORITHM, "Unused").put(PREFERRED_JWS_ALGORITHM_2, "HS256").toString(); + CONFIG.withValue(OIDC_PREFERRED_JWS_ALGORITHM, ConfigValueFactory.fromAnyRef("RS256")); + OidcConfigs.Builder oidcConfigsBuilder = new OidcConfigs.Builder(); + oidcConfigsBuilder.from(CONFIG, SSO_SETTINGS_JSON_STR); + OidcConfigs oidcConfigs = new OidcConfigs(oidcConfigsBuilder); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + assertEquals("HS256", ((OidcClient) oidcProvider.client()).getConfiguration().getPreferredJwsAlgorithm().toString()); + } } diff --git a/metadata-models/src/main/pegasus/com/linkedin/settings/global/OidcSettings.pdl b/metadata-models/src/main/pegasus/com/linkedin/settings/global/OidcSettings.pdl index d5b23c28cb227..f925505c8e54f 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/settings/global/OidcSettings.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/settings/global/OidcSettings.pdl @@ -90,7 +90,12 @@ record OidcSettings { extractJwtAccessTokenClaims: optional boolean /** - * ADVANCED. Which jws algorithm to use. + * ADVANCED. Which jws algorithm to use. Unused. */ preferredJwsAlgorithm: optional string -} \ No newline at end of file + + /** + * ADVANCED. Which jws algorithm to use. + */ + preferredJwsAlgorithm2: optional string +} diff --git a/metadata-service/auth-servlet-impl/build.gradle b/metadata-service/auth-servlet-impl/build.gradle index b8310bbd4ebc0..29e452472358b 100644 --- a/metadata-service/auth-servlet-impl/build.gradle +++ b/metadata-service/auth-servlet-impl/build.gradle @@ -18,4 +18,12 @@ dependencies { compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok + + testImplementation externalDependency.testng + testImplementation externalDependency.springBootTest +} + +test { + testLogging.showStandardStreams = true + testLogging.exceptionFormat = 'full' } diff --git a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java index 430ed2d236219..fc283b7e986bb 100644 --- a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java +++ b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java @@ -72,7 +72,9 @@ public class AuthServiceController { private static final String USE_NONCE = "useNonce"; private static final String READ_TIMEOUT = "readTimeout"; private static final String EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "extractJwtAccessTokenClaims"; + // Retained for backwards compatibility private static final String PREFERRED_JWS_ALGORITHM = "preferredJwsAlgorithm"; + private static final String PREFERRED_JWS_ALGORITHM_2 = "preferredJwsAlgorithm2"; @Inject StatelessTokenService _statelessTokenService; @@ -514,8 +516,8 @@ private void buildOidcSettingsResponse(JSONObject json, final OidcSettings oidcS if (oidcSettings.hasExtractJwtAccessTokenClaims()) { json.put(EXTRACT_JWT_ACCESS_TOKEN_CLAIMS, oidcSettings.isExtractJwtAccessTokenClaims()); } - if (oidcSettings.hasPreferredJwsAlgorithm()) { - json.put(PREFERRED_JWS_ALGORITHM, oidcSettings.getPreferredJwsAlgorithm()); + if (oidcSettings.hasPreferredJwsAlgorithm2()) { + json.put(PREFERRED_JWS_ALGORITHM, oidcSettings.getPreferredJwsAlgorithm2()); } } } diff --git a/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceControllerTest.java b/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceControllerTest.java new file mode 100644 index 0000000000000..bb305ae16900c --- /dev/null +++ b/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceControllerTest.java @@ -0,0 +1,96 @@ +package com.datahub.auth.authentication; + +import static com.linkedin.metadata.Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_SETTINGS_URN; +import static org.mockito.Mockito.when; +import static org.testng.Assert.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.settings.global.GlobalSettingsInfo; +import com.linkedin.settings.global.OidcSettings; +import com.linkedin.settings.global.SsoSettings; +import java.io.IOException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Import; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.springframework.web.servlet.DispatcherServlet; +import org.testng.annotations.BeforeTest; +import org.testng.annotations.Test; + +@SpringBootTest(classes = {DispatcherServlet.class}) +@ComponentScan(basePackages = {"com.datahub.auth.authentication"}) +@Import({AuthServiceTestConfiguration.class}) +public class AuthServiceControllerTest extends AbstractTestNGSpringContextTests { + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } + + @Autowired private AuthServiceController authServiceController; + @Autowired private EntityService mockEntityService; + + private final String PREFERRED_JWS_ALGORITHM = "preferredJwsAlgorithm"; + + @Test + public void initTest() { + assertNotNull(authServiceController); + assertNotNull(mockEntityService); + } + + @Test + public void oldPreferredJwsAlgorithmIsNotReturned() throws IOException { + OidcSettings mockOidcSettings = + new OidcSettings() + .setEnabled(true) + .setClientId("1") + .setClientSecret("2") + .setDiscoveryUri("http://localhost") + .setPreferredJwsAlgorithm("test"); + SsoSettings mockSsoSettings = + new SsoSettings().setBaseUrl("http://localhost").setOidcSettings(mockOidcSettings); + GlobalSettingsInfo mockGlobalSettingsInfo = new GlobalSettingsInfo().setSso(mockSsoSettings); + + when(mockEntityService.getLatestAspect(GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME)) + .thenReturn(mockGlobalSettingsInfo); + + ResponseEntity httpResponse = authServiceController.getSsoSettings(null).join(); + assertEquals(httpResponse.getStatusCode(), HttpStatus.OK); + + JsonNode jsonNode = new ObjectMapper().readTree(httpResponse.getBody()); + assertFalse(jsonNode.has(PREFERRED_JWS_ALGORITHM)); + } + + @Test + public void newPreferredJwsAlgorithmIsReturned() throws IOException { + OidcSettings mockOidcSettings = + new OidcSettings() + .setEnabled(true) + .setClientId("1") + .setClientSecret("2") + .setDiscoveryUri("http://localhost") + .setPreferredJwsAlgorithm("jws1") + .setPreferredJwsAlgorithm2("jws2"); + SsoSettings mockSsoSettings = + new SsoSettings().setBaseUrl("http://localhost").setOidcSettings(mockOidcSettings); + GlobalSettingsInfo mockGlobalSettingsInfo = new GlobalSettingsInfo().setSso(mockSsoSettings); + + when(mockEntityService.getLatestAspect(GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME)) + .thenReturn(mockGlobalSettingsInfo); + + ResponseEntity httpResponse = authServiceController.getSsoSettings(null).join(); + assertEquals(httpResponse.getStatusCode(), HttpStatus.OK); + + JsonNode jsonNode = new ObjectMapper().readTree(httpResponse.getBody()); + assertTrue(jsonNode.has(PREFERRED_JWS_ALGORITHM)); + assertEquals(jsonNode.get(PREFERRED_JWS_ALGORITHM).asText(), "jws2"); + } +} diff --git a/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceTestConfiguration.java b/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceTestConfiguration.java new file mode 100644 index 0000000000000..428f14e67d137 --- /dev/null +++ b/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceTestConfiguration.java @@ -0,0 +1,32 @@ +package com.datahub.auth.authentication; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.invite.InviteTokenService; +import com.datahub.authentication.token.StatelessTokenService; +import com.datahub.authentication.user.NativeUserService; +import com.datahub.telemetry.TrackingService; +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.secret.SecretService; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.boot.test.mock.mockito.MockBean; + +@TestConfiguration +public class AuthServiceTestConfiguration { + @MockBean StatelessTokenService _statelessTokenService; + + @MockBean Authentication _systemAuthentication; + + @MockBean(name = "configurationProvider") + ConfigurationProvider _configProvider; + + @MockBean NativeUserService _nativeUserService; + + @MockBean EntityService _entityService; + + @MockBean SecretService _secretService; + + @MockBean InviteTokenService _inviteTokenService; + + @MockBean TrackingService _trackingService; +} From 53c7790f9aa56eeb6695d3fbf602b3b84a7283e4 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 25 Jan 2024 01:36:59 -0800 Subject: [PATCH 435/792] feat(ingest/metabase): Use new sql parser; reduce error reporting levels (#9714) --- .../src/datahub_airflow_plugin/_extractors.py | 4 +- .../src/datahub/ingestion/source/metabase.py | 100 ++++++++---------- .../powerbi/m_query/native_sql_parser.py | 4 +- .../ingestion/source/redshift/lineage.py | 4 +- .../src/datahub/ingestion/source/tableau.py | 11 +- .../src/datahub/utilities/sqlglot_lineage.py | 40 +++---- .../metabase/metabase_mces_golden.json | 32 ++++-- .../integration/metabase/setup/card.json | 2 +- .../integration/metabase/setup/card_1.json | 4 +- .../metabase/setup/dashboard_1.json | 4 +- .../tableau/test_tableau_ingest.py | 10 +- 11 files changed, 108 insertions(+), 107 deletions(-) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_extractors.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_extractors.py index f84b7b56f6119..32bbe88481636 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_extractors.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_extractors.py @@ -199,8 +199,8 @@ def _sql_extractor_extract(self: "SqlExtractor") -> TaskMetadata: platform=platform, platform_instance=None, env=builder.DEFAULT_ENV, - database=default_database, - schema=default_schema, + default_db=default_database, + default_schema=default_schema, ) self.log.debug(f"Got sql lineage {sql_parsing_result}") diff --git a/metadata-ingestion/src/datahub/ingestion/source/metabase.py b/metadata-ingestion/src/datahub/ingestion/source/metabase.py index 9f09a4322bb5d..af41a74f311f6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metabase.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metabase.py @@ -1,3 +1,4 @@ +import logging from datetime import datetime, timezone from functools import lru_cache from typing import Dict, Iterable, List, Optional, Tuple, Union @@ -7,7 +8,6 @@ import requests from pydantic import Field, validator from requests.models import HTTPError -from sqllineage.runner import LineageRunner import datahub.emitter.mce_builder as builder from datahub.configuration.source_common import DatasetLineageProviderConfigBase @@ -42,6 +42,9 @@ OwnershipTypeClass, ) from datahub.utilities import config_clean +from datahub.utilities.sqlglot_lineage import create_lineage_sql_parsed_result + +logger = logging.getLogger(__name__) DATASOURCE_URN_RECURSION_LIMIT = 5 @@ -225,7 +228,7 @@ def construct_dashboard_from_api_data( dashboard_response.raise_for_status() dashboard_details = dashboard_response.json() except HTTPError as http_error: - self.report.report_failure( + self.report.report_warning( key=f"metabase-dashboard-{dashboard_id}", reason=f"Unable to retrieve dashboard. " f"Reason: {str(http_error)}", ) @@ -293,7 +296,7 @@ def _get_ownership(self, creator_id: int) -> Optional[OwnershipClass]: ) return None # For cases when the error is not 404 but something else - self.report.report_failure( + self.report.report_warning( key=f"metabase-user-{creator_id}", reason=f"Unable to retrieve User info. " f"Reason: {str(http_error)}", ) @@ -348,7 +351,7 @@ def get_card_details_by_id(self, card_id: Union[int, str]) -> dict: card_response.raise_for_status() return card_response.json() except HTTPError as http_error: - self.report.report_failure( + self.report.report_warning( key=f"metabase-card-{card_id}", reason=f"Unable to retrieve Card info. " f"Reason: {str(http_error)}", ) @@ -357,7 +360,7 @@ def get_card_details_by_id(self, card_id: Union[int, str]) -> dict: def construct_card_from_api_data(self, card_data: dict) -> Optional[ChartSnapshot]: card_id = card_data.get("id") if card_id is None: - self.report.report_failure( + self.report.report_warning( key="metabase-card", reason=f"Unable to get Card id from card data {str(card_data)}", ) @@ -365,7 +368,7 @@ def construct_card_from_api_data(self, card_data: dict) -> Optional[ChartSnapsho card_details = self.get_card_details_by_id(card_id) if not card_details: - self.report.report_failure( + self.report.report_warning( key=f"metabase-card-{card_id}", reason="Unable to construct Card due to empty card details", ) @@ -482,7 +485,7 @@ def get_datasource_urn( self, card_details: dict, recursion_depth: int = 0 ) -> Optional[List]: if recursion_depth > DATASOURCE_URN_RECURSION_LIMIT: - self.report.report_failure( + self.report.report_warning( key=f"metabase-card-{card_details.get('id')}", reason="Unable to retrieve Card info. Reason: source table recursion depth exceeded", ) @@ -496,14 +499,13 @@ def get_datasource_urn( platform_instance, ) = self.get_datasource_from_id(datasource_id) if not platform: - self.report.report_failure( + self.report.report_warning( key=f"metabase-datasource-{datasource_id}", reason=f"Unable to detect platform for database id {datasource_id}", ) return None query_type = card_details.get("dataset_query", {}).get("type", {}) - source_tables = set() if query_type == "query": source_table_id = ( @@ -525,57 +527,40 @@ def get_datasource_urn( # the question is built directly from table in DB schema_name, table_name = self.get_source_table_from_id(source_table_id) if table_name: - source_tables.add( - f"{database_name + '.' if database_name else ''}{schema_name + '.' if schema_name else ''}{table_name}" - ) - else: - try: - raw_query = ( - card_details.get("dataset_query", {}) - .get("native", {}) - .get("query", "") - ) - parser = LineageRunner(raw_query) - - for table in parser.source_tables: - sources = str(table).split(".") - - source_db = sources[-3] if len(sources) > 2 else database_name - source_schema, source_table = sources[-2], sources[-1] - if source_schema == "": - source_schema = ( - database_schema - if database_schema is not None - else str(self.config.default_schema) + name_components = [database_name, schema_name, table_name] + return [ + builder.make_dataset_urn_with_platform_instance( + platform=platform, + name=".".join([v for v in name_components if v]), + platform_instance=platform_instance, + env=self.config.env, ) - - source_tables.add( - f"{source_db + '.' if source_db else ''}{source_schema}.{source_table}" - ) - except Exception as e: - self.report.report_failure( - key="metabase-query", - reason=f"Unable to retrieve lineage from query. " - f"Query: {raw_query} " - f"Reason: {str(e)} ", - ) - return None - - if platform == "snowflake": - source_tables = set(i.lower() for i in source_tables) - - # Create dataset URNs - dataset_urn = [ - builder.make_dataset_urn_with_platform_instance( + ] + else: + raw_query = ( + card_details.get("dataset_query", {}).get("native", {}).get("query", "") + ) + result = create_lineage_sql_parsed_result( + query=raw_query, + default_db=database_name, + default_schema=database_schema or self.config.default_schema, platform=platform, - name=name, platform_instance=platform_instance, env=self.config.env, + graph=self.ctx.graph, ) - for name in source_tables - ] + if result.debug_info.table_error: + logger.info( + f"Failed to parse lineage from query {raw_query}: " + f"{result.debug_info.table_error}" + ) + self.report.report_warning( + key="metabase-query", + reason=f"Unable to retrieve lineage from query: {raw_query}", + ) + return result.in_tables - return dataset_urn + return None @lru_cache(maxsize=None) def get_source_table_from_id( @@ -592,10 +577,9 @@ def get_source_table_from_id( return schema, name except HTTPError as http_error: - self.report.report_failure( + self.report.report_warning( key=f"metabase-table-{table_id}", - reason=f"Unable to retrieve source table. " - f"Reason: {str(http_error)}", + reason=f"Unable to retrieve source table. Reason: {str(http_error)}", ) return None, None @@ -641,7 +625,7 @@ def get_datasource_from_id( dataset_response.raise_for_status() dataset_json = dataset_response.json() except HTTPError as http_error: - self.report.report_failure( + self.report.report_warning( key=f"metabase-datasource-{datasource_id}", reason=f"Unable to retrieve Datasource. " f"Reason: {str(http_error)}", ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/native_sql_parser.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/native_sql_parser.py index 0afa8e7ff4564..56c9a4abe18ad 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/native_sql_parser.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/native_sql_parser.py @@ -69,8 +69,8 @@ def parse_custom_sql( return sqlglot_l.create_lineage_sql_parsed_result( query=sql_query, - schema=schema, - database=database, + default_schema=schema, + default_db=database, platform=platform, platform_instance=platform_instance, env=env, diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py index 8135e1d44c102..3efef58737c6e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py @@ -167,8 +167,8 @@ def _get_sources_from_query( query=query, platform=LineageDatasetPlatform.REDSHIFT.value, platform_instance=self.config.platform_instance, - database=db_name, - schema=str(self.config.default_schema), + default_db=db_name, + default_schema=str(self.config.default_schema), graph=self.context.graph, env=self.config.env, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index 46694dfcc47d1..acdece14a6440 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -32,7 +32,6 @@ from urllib3 import Retry import datahub.emitter.mce_builder as builder -import datahub.utilities.sqlglot_lineage as sqlglot_l from datahub.configuration.common import ( AllowDenyPattern, ConfigModel, @@ -144,7 +143,11 @@ ViewPropertiesClass, ) from datahub.utilities import config_clean -from datahub.utilities.sqlglot_lineage import ColumnLineageInfo, SqlParsingResult +from datahub.utilities.sqlglot_lineage import ( + ColumnLineageInfo, + SqlParsingResult, + create_lineage_sql_parsed_result, +) from datahub.utilities.urns.dataset_urn import DatasetUrn logger: logging.Logger = logging.getLogger(__name__) @@ -1617,9 +1620,9 @@ def parse_custom_sql( f"Overridden info upstream_db={upstream_db}, platform_instance={platform_instance}, platform={platform}" ) - return sqlglot_l.create_lineage_sql_parsed_result( + return create_lineage_sql_parsed_result( query=query, - database=upstream_db, + default_db=upstream_db, platform=platform, platform_instance=platform_instance, env=env, diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index 46ca17609f3ea..abe4f82673777 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -1280,35 +1280,35 @@ def replace_cte_refs(node: sqlglot.exp.Expression) -> sqlglot.exp.Expression: def create_lineage_sql_parsed_result( query: str, - database: Optional[str], + default_db: Optional[str], platform: str, platform_instance: Optional[str], env: str, - schema: Optional[str] = None, + default_schema: Optional[str] = None, graph: Optional[DataHubGraph] = None, ) -> SqlParsingResult: - needs_close = False - try: - if graph: - schema_resolver = graph._make_schema_resolver( - platform=platform, - platform_instance=platform_instance, - env=env, - ) - else: - needs_close = True - schema_resolver = SchemaResolver( - platform=platform, - platform_instance=platform_instance, - env=env, - graph=None, - ) + if graph: + needs_close = False + schema_resolver = graph._make_schema_resolver( + platform=platform, + platform_instance=platform_instance, + env=env, + ) + else: + needs_close = True + schema_resolver = SchemaResolver( + platform=platform, + platform_instance=platform_instance, + env=env, + graph=None, + ) + try: return sqlglot_lineage( query, schema_resolver=schema_resolver, - default_db=database, - default_schema=schema, + default_db=default_db, + default_schema=default_schema, ) except Exception as e: return SqlParsingResult.make_from_error(e) diff --git a/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json b/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json index 0ba6afbd04fc9..9b143348fdf60 100644 --- a/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json +++ b/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json @@ -25,6 +25,9 @@ }, "chartUrl": "http://localhost:3000/card/1", "inputs": [ + { + "string": "urn:li:dataset:(urn:li:dataPlatform:bigquery,acryl-data.public.customer,PROD)" + }, { "string": "urn:li:dataset:(urn:li:dataPlatform:bigquery,acryl-data.public.payment,PROD)" } @@ -34,7 +37,7 @@ }, { "com.linkedin.pegasus2avro.chart.ChartQuery": { - "rawQuery": "SELECT\\n\\tcustomer.customer_id,\\n\\tfirst_name,\\n\\tlast_name,\\n\\tamount,\\n\\tpayment_date,\\n\\trental_id\\nFROM\\n\\tcustomer\\nINNER JOIN payment \\n ON payment.customer_id = customer.customer_id\\nORDER BY payment_date", + "rawQuery": "SELECT\n\tcustomer.customer_id,\n\tfirst_name,\n\tlast_name,\n\tamount,\n\tpayment_date,\n\trental_id\nFROM\n\tcustomer\nINNER JOIN payment \n ON payment.customer_id = customer.customer_id\nORDER BY payment_date", "type": "SQL" } }, @@ -57,7 +60,8 @@ }, "systemMetadata": { "lastObserved": 1636614000000, - "runId": "metabase-test" + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" } }, { @@ -112,7 +116,8 @@ }, "systemMetadata": { "lastObserved": 1636614000000, - "runId": "metabase-test" + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" } }, { @@ -141,6 +146,9 @@ }, "chartUrl": "http://localhost:3000/card/3", "inputs": [ + { + "string": "urn:li:dataset:(urn:li:dataPlatform:bigquery,acryl-data.public.customer,PROD)" + }, { "string": "urn:li:dataset:(urn:li:dataPlatform:bigquery,acryl-data.public.payment,PROD)" } @@ -167,7 +175,8 @@ }, "systemMetadata": { "lastObserved": 1636614000000, - "runId": "metabase-test" + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" } }, { @@ -217,7 +226,8 @@ }, "systemMetadata": { "lastObserved": 1636614000000, - "runId": "metabase-test" + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" } }, { @@ -232,7 +242,8 @@ }, "systemMetadata": { "lastObserved": 1636614000000, - "runId": "metabase-test" + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" } }, { @@ -247,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1636614000000, - "runId": "metabase-test" + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" } }, { @@ -262,7 +274,8 @@ }, "systemMetadata": { "lastObserved": 1636614000000, - "runId": "metabase-test" + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" } }, { @@ -277,7 +290,8 @@ }, "systemMetadata": { "lastObserved": 1636614000000, - "runId": "metabase-test" + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/metabase/setup/card.json b/metadata-ingestion/tests/integration/metabase/setup/card.json index 83bff66e6c9f3..7ded73d02ad7d 100644 --- a/metadata-ingestion/tests/integration/metabase/setup/card.json +++ b/metadata-ingestion/tests/integration/metabase/setup/card.json @@ -172,7 +172,7 @@ "dataset_query": { "type": "native", "native": { - "query": "SELECT\\n\\tcustomer.customer_id,\\n\\tfirst_name,\\n\\tlast_name,\\n\\tamount,\\n\\tpayment_date,\\n\\trental_id\\nFROM\\n\\tcustomer\\nINNER JOIN payment \\n ON payment.customer_id = customer.customer_id\\nORDER BY payment_date", + "query": "SELECT\n\tcustomer.customer_id,\n\tfirst_name,\n\tlast_name,\n\tamount,\n\tpayment_date,\n\trental_id\nFROM\n\tcustomer\nINNER JOIN payment \n ON payment.customer_id = customer.customer_id\nORDER BY payment_date", "template-tags": {} }, "database": 2 diff --git a/metadata-ingestion/tests/integration/metabase/setup/card_1.json b/metadata-ingestion/tests/integration/metabase/setup/card_1.json index 01e35c5b30844..66c46a72997d0 100644 --- a/metadata-ingestion/tests/integration/metabase/setup/card_1.json +++ b/metadata-ingestion/tests/integration/metabase/setup/card_1.json @@ -177,7 +177,7 @@ "dataset_query": { "type": "native", "native": { - "query": "SELECT\\n\\tcustomer.customer_id,\\n\\tfirst_name,\\n\\tlast_name,\\n\\tamount,\\n\\tpayment_date,\\n\\trental_id\\nFROM\\n\\tcustomer\\nINNER JOIN payment \\n ON payment.customer_id = customer.customer_id\\nORDER BY payment_date", + "query": "SELECT\n\tcustomer.customer_id,\n\tfirst_name,\n\tlast_name,\n\tamount,\n\tpayment_date,\n\trental_id\nFROM\n\tcustomer\nINNER JOIN payment \n ON payment.customer_id = customer.customer_id\nORDER BY payment_date", "template-tags": {} }, "database": 2 @@ -198,4 +198,4 @@ "collection": null, "created_at": "2021-12-13T17:46:32.77", "public_uuid": null -} \ No newline at end of file +} diff --git a/metadata-ingestion/tests/integration/metabase/setup/dashboard_1.json b/metadata-ingestion/tests/integration/metabase/setup/dashboard_1.json index 0b232cd220045..288087a67da6d 100644 --- a/metadata-ingestion/tests/integration/metabase/setup/dashboard_1.json +++ b/metadata-ingestion/tests/integration/metabase/setup/dashboard_1.json @@ -171,7 +171,7 @@ "dataset_query": { "type": "native", "native": { - "query": "SELECT\\n\\tcustomer.customer_id,\\n\\tfirst_name,\\n\\tlast_name,\\n\\tamount,\\n\\tpayment_date,\\n\\trental_id\\nFROM\\n\\tcustomer\\nINNER JOIN payment \\n ON payment.customer_id = customer.customer_id\\nORDER BY payment_date", + "query": "SELECT\n\tcustomer.customer_id,\n\tfirst_name,\n\tlast_name,\n\tamount,\n\tpayment_date,\n\trental_id\nFROM\n\tcustomer\nINNER JOIN payment \n ON payment.customer_id = customer.customer_id\nORDER BY payment_date", "template-tags": {} }, "database": 2 @@ -330,4 +330,4 @@ "created_at": "2021-12-13T17:46:48.185", "public_uuid": null, "points_of_interest": null -} \ No newline at end of file +} diff --git a/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py b/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py index 90fa71013338d..474228e9c9fc4 100644 --- a/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py +++ b/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py @@ -812,16 +812,16 @@ def test_tableau_unsupported_csql(mock_datahub_graph): database_override_map={"production database": "prod"} ) - with mock.patch("datahub.ingestion.source.tableau.sqlglot_l") as sqlglot_lineage: - - sqlglot_lineage.create_lineage_sql_parsed_result.return_value = SqlParsingResult( # type:ignore + with mock.patch( + "datahub.ingestion.source.tableau.create_lineage_sql_parsed_result", + return_value=SqlParsingResult( in_tables=[ "urn:li:dataset:(urn:li:dataPlatform:bigquery,my_bigquery_project.invent_dw.userdetail,PROD)" ], out_tables=[], column_lineage=None, - ) - + ), + ): source = TableauSource(config=config, ctx=context) lineage = source._create_lineage_from_unsupported_csql( From f83a2fab4415bd31f88cae1e05384282ab4d955c Mon Sep 17 00:00:00 2001 From: Shubham Jagtap <132359390+shubhamjagtap639@users.noreply.github.com> Date: Thu, 25 Jan 2024 18:48:41 +0530 Subject: [PATCH 436/792] fix(ingestion/bigquery): Table-view-snapshot Lineage Bug fix (#9579) Co-authored-by: Aseem Bansal Co-authored-by: Harshal Sheth --- .../ingestion/source/bigquery_v2/bigquery.py | 133 ++++++++++++++++-- .../source/bigquery_v2/bigquery_config.py | 9 ++ .../source/bigquery_v2/bigquery_report.py | 3 + .../source/bigquery_v2/bigquery_schema.py | 82 ++++++++++- .../ingestion/source/bigquery_v2/lineage.py | 72 ++++++++-- .../ingestion/source/bigquery_v2/queries.py | 56 ++++++++ .../ingestion/source/common/subtypes.py | 1 + .../tests/unit/test_bigquery_source.py | 91 +++++++++++- 8 files changed, 416 insertions(+), 31 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py index 3704eae96aece..b8bc07b9a3559 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py @@ -47,6 +47,7 @@ BigqueryProject, BigQuerySchemaApi, BigqueryTable, + BigqueryTableSnapshot, BigqueryView, ) from datahub.ingestion.source.bigquery_v2.common import ( @@ -234,7 +235,7 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): run_id=self.ctx.run_id, ) - # For database, schema, tables, views, etc + # For database, schema, tables, views, snapshots etc self.lineage_extractor = BigqueryLineageExtractor( config, self.report, @@ -282,8 +283,12 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): # Maps project -> view_ref, so we can find all views in a project self.view_refs_by_project: Dict[str, Set[str]] = defaultdict(set) + # Maps project -> snapshot_ref, so we can find all snapshots in a project + self.snapshot_refs_by_project: Dict[str, Set[str]] = defaultdict(set) # Maps view ref -> actual sql self.view_definitions: FileBackedDict[str] = FileBackedDict() + # Maps snapshot ref -> Snapshot + self.snapshots_by_ref: FileBackedDict[BigqueryTableSnapshot] = FileBackedDict() self.add_config_to_report() atexit.register(cleanup, config) @@ -303,6 +308,10 @@ def connectivity_test(client: bigquery.Client) -> CapabilityReport: else: return CapabilityReport(capable=True) + @property + def store_table_refs(self): + return self.config.include_table_lineage or self.config.include_usage_statistics + @staticmethod def metadata_read_capability_test( project_ids: List[str], config: BigQueryV2Config @@ -453,6 +462,7 @@ def _init_schema_resolver(self) -> SchemaResolver: self.config.include_schema_metadata and self.config.include_tables and self.config.include_views + and self.config.include_table_snapshots ) if schema_resolution_required and not schema_ingestion_enabled: @@ -567,6 +577,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: self.sql_parser_schema_resolver, self.view_refs_by_project, self.view_definitions, + self.snapshot_refs_by_project, + self.snapshots_by_ref, self.table_refs, ) @@ -603,6 +615,7 @@ def _process_project( ) -> Iterable[MetadataWorkUnit]: db_tables: Dict[str, List[BigqueryTable]] = {} db_views: Dict[str, List[BigqueryView]] = {} + db_snapshots: Dict[str, List[BigqueryTableSnapshot]] = {} project_id = bigquery_project.id try: @@ -651,9 +664,9 @@ def _process_project( self.report.report_dropped(f"{bigquery_dataset.name}.*") continue try: - # db_tables and db_views are populated in the this method + # db_tables, db_views, and db_snapshots are populated in the this method yield from self._process_schema( - project_id, bigquery_dataset, db_tables, db_views + project_id, bigquery_dataset, db_tables, db_views, db_snapshots ) except Exception as e: @@ -684,6 +697,7 @@ def _process_schema( bigquery_dataset: BigqueryDataset, db_tables: Dict[str, List[BigqueryTable]], db_views: Dict[str, List[BigqueryView]], + db_snapshots: Dict[str, List[BigqueryTableSnapshot]], ) -> Iterable[MetadataWorkUnit]: dataset_name = bigquery_dataset.name @@ -692,7 +706,11 @@ def _process_schema( ) columns = None - if self.config.include_tables or self.config.include_views: + if ( + self.config.include_tables + or self.config.include_views + or self.config.include_table_snapshots + ): columns = self.bigquery_data_dictionary.get_columns_for_dataset( project_id=project_id, dataset_name=dataset_name, @@ -713,7 +731,7 @@ def _process_schema( project_id=project_id, dataset_name=dataset_name, ) - elif self.config.include_table_lineage or self.config.include_usage_statistics: + elif self.store_table_refs: # Need table_refs to calculate lineage and usage for table_item in self.bigquery_data_dictionary.list_tables( dataset_name, project_id @@ -738,7 +756,10 @@ def _process_schema( if self.config.include_views: db_views[dataset_name] = list( self.bigquery_data_dictionary.get_views_for_dataset( - project_id, dataset_name, self.config.is_profiling_enabled() + project_id, + dataset_name, + self.config.is_profiling_enabled(), + self.report, ) ) @@ -751,6 +772,25 @@ def _process_schema( dataset_name=dataset_name, ) + if self.config.include_table_snapshots: + db_snapshots[dataset_name] = list( + self.bigquery_data_dictionary.get_snapshots_for_dataset( + project_id, + dataset_name, + self.config.is_profiling_enabled(), + self.report, + ) + ) + + for snapshot in db_snapshots[dataset_name]: + snapshot_columns = columns.get(snapshot.name, []) if columns else [] + yield from self._process_snapshot( + snapshot=snapshot, + columns=snapshot_columns, + project_id=project_id, + dataset_name=dataset_name, + ) + # This method is used to generate the ignore list for datatypes the profiler doesn't support we have to do it here # because the profiler doesn't have access to columns def generate_profile_ignore_list(self, columns: List[BigqueryColumn]) -> List[str]: @@ -778,7 +818,7 @@ def _process_table( self.report.report_dropped(table_identifier.raw_table_name()) return - if self.config.include_table_lineage or self.config.include_usage_statistics: + if self.store_table_refs: self.table_refs.add( str(BigQueryTableRef(table_identifier).get_sanitized_table_ref()) ) @@ -827,7 +867,7 @@ def _process_view( self.report.report_dropped(table_identifier.raw_table_name()) return - if self.config.include_table_lineage or self.config.include_usage_statistics: + if self.store_table_refs: table_ref = str( BigQueryTableRef(table_identifier).get_sanitized_table_ref() ) @@ -849,6 +889,48 @@ def _process_view( dataset_name=dataset_name, ) + def _process_snapshot( + self, + snapshot: BigqueryTableSnapshot, + columns: List[BigqueryColumn], + project_id: str, + dataset_name: str, + ) -> Iterable[MetadataWorkUnit]: + table_identifier = BigqueryTableIdentifier( + project_id, dataset_name, snapshot.name + ) + + self.report.snapshots_scanned += 1 + + if not self.config.table_snapshot_pattern.allowed( + table_identifier.raw_table_name() + ): + self.report.report_dropped(table_identifier.raw_table_name()) + return + + snapshot.columns = columns + snapshot.column_count = len(columns) + if not snapshot.column_count: + logger.warning( + f"Snapshot doesn't have any column or unable to get columns for table: {table_identifier}" + ) + + if self.store_table_refs: + table_ref = str( + BigQueryTableRef(table_identifier).get_sanitized_table_ref() + ) + self.table_refs.add(table_ref) + if snapshot.base_table_identifier: + self.snapshot_refs_by_project[project_id].add(table_ref) + self.snapshots_by_ref[table_ref] = snapshot + + yield from self.gen_snapshot_dataset_workunits( + table=snapshot, + columns=columns, + project_id=project_id, + dataset_name=dataset_name, + ) + def gen_table_dataset_workunits( self, table: BigqueryTable, @@ -933,9 +1015,34 @@ def gen_view_dataset_workunits( aspect=view_properties_aspect, ).as_workunit() + def gen_snapshot_dataset_workunits( + self, + table: BigqueryTableSnapshot, + columns: List[BigqueryColumn], + project_id: str, + dataset_name: str, + ) -> Iterable[MetadataWorkUnit]: + custom_properties: Dict[str, str] = {} + if table.ddl: + custom_properties["snapshot_ddl"] = table.ddl + if table.snapshot_time: + custom_properties["snapshot_time"] = str(table.snapshot_time) + if table.size_in_bytes: + custom_properties["size_in_bytes"] = str(table.size_in_bytes) + if table.rows_count: + custom_properties["rows_count"] = str(table.rows_count) + yield from self.gen_dataset_workunits( + table=table, + columns=columns, + project_id=project_id, + dataset_name=dataset_name, + sub_types=[DatasetSubTypes.BIGQUERY_TABLE_SNAPSHOT], + custom_properties=custom_properties, + ) + def gen_dataset_workunits( self, - table: Union[BigqueryTable, BigqueryView], + table: Union[BigqueryTable, BigqueryView, BigqueryTableSnapshot], columns: List[BigqueryColumn], project_id: str, dataset_name: str, @@ -1041,6 +1148,9 @@ def gen_schema_fields(self, columns: List[BigqueryColumn]) -> List[SchemaField]: # TODO: Refractor this such that # converter = HiveColumnToAvroConverter(struct_type_separator=" "); # converter.get_schema_fields_for_hive_column(...) + original_struct_type_separator = ( + HiveColumnToAvroConverter._STRUCT_TYPE_SEPARATOR + ) HiveColumnToAvroConverter._STRUCT_TYPE_SEPARATOR = " " _COMPLEX_TYPE = re.compile("^(struct|array)") last_id = -1 @@ -1101,12 +1211,15 @@ def gen_schema_fields(self, columns: List[BigqueryColumn]) -> List[SchemaField]: ) schema_fields.append(field) last_id = col.ordinal_position + HiveColumnToAvroConverter._STRUCT_TYPE_SEPARATOR = ( + original_struct_type_separator + ) return schema_fields def gen_schema_metadata( self, dataset_urn: str, - table: Union[BigqueryTable, BigqueryView], + table: Union[BigqueryTable, BigqueryView, BigqueryTableSnapshot], columns: List[BigqueryColumn], dataset_name: str, ) -> MetadataWorkUnit: diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py index bb14295bc38a8..2f4978d49e687 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py @@ -148,6 +148,15 @@ class BigQueryV2Config( " because the project id is represented as the top-level container.", ) + include_table_snapshots: Optional[bool] = Field( + default=True, description="Whether table snapshots should be ingested." + ) + + table_snapshot_pattern: AllowDenyPattern = Field( + default=AllowDenyPattern.allow_all(), + description="Regex patterns for table snapshots to filter in ingestion. Specify regex to match the entire snapshot name in database.schema.snapshot format. e.g. to match all snapshots starting with customer in Customer database and public schema, use the regex 'Customer.public.customer.*'", + ) + debug_include_full_payloads: bool = Field( default=False, description="Include full payload into events. It is only for debugging and internal use.", diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py index 69913b383af87..ad7b86219e7c1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py @@ -25,6 +25,7 @@ class BigQuerySchemaApiPerfReport(Report): get_tables_for_dataset: PerfTimer = field(default_factory=PerfTimer) list_tables: PerfTimer = field(default_factory=PerfTimer) get_views_for_dataset: PerfTimer = field(default_factory=PerfTimer) + get_snapshots_for_dataset: PerfTimer = field(default_factory=PerfTimer) @dataclass @@ -119,6 +120,8 @@ class BigQueryV2Report(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowR num_usage_query_hash_collisions: int = 0 num_operational_stats_workunits_emitted: int = 0 + snapshots_scanned: int = 0 + num_view_definitions_parsed: int = 0 num_view_definitions_failed_parsing: int = 0 num_view_definitions_failed_column_parsing: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema.py index 7edc8656360bb..d918782691c77 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema.py @@ -106,6 +106,14 @@ class BigqueryView(BaseView): materialized: bool = False +@dataclass +class BigqueryTableSnapshot(BaseTable): + # Upstream table identifier + base_table_identifier: Optional[BigqueryTableIdentifier] = None + snapshot_time: Optional[datetime] = None + columns: List[BigqueryColumn] = field(default_factory=list) + + @dataclass class BigqueryDataset: name: str @@ -116,6 +124,7 @@ class BigqueryDataset: comment: Optional[str] = None tables: List[BigqueryTable] = field(default_factory=list) views: List[BigqueryView] = field(default_factory=list) + snapshots: List[BigqueryTableSnapshot] = field(default_factory=list) columns: List[BigqueryColumn] = field(default_factory=list) @@ -289,10 +298,11 @@ def get_views_for_dataset( project_id: str, dataset_name: str, has_data_read: bool, - report: Optional[BigQueryV2Report] = None, + report: BigQueryV2Report, ) -> Iterator[BigqueryView]: with self.report.get_views_for_dataset as current_timer: if has_data_read: + # If profiling is enabled cur = self.get_query_result( BigqueryQuery.views_for_dataset.format( project_id=project_id, dataset_name=dataset_name @@ -315,11 +325,10 @@ def get_views_for_dataset( f"Error while processing view {view_name}", exc_info=True, ) - if report: - report.report_warning( - "metadata-extraction", - f"Failed to get view {view_name}: {e}", - ) + report.report_warning( + "metadata-extraction", + f"Failed to get view {view_name}: {e}", + ) @staticmethod def _make_bigquery_view(view: bigquery.Row) -> BigqueryView: @@ -334,6 +343,8 @@ def _make_bigquery_view(view: bigquery.Row) -> BigqueryView: comment=view.comment, view_definition=view.view_definition, materialized=view.table_type == BigqueryTableType.MATERIALIZED_VIEW, + size_in_bytes=view.get("size_bytes"), + rows_count=view.get("row_count"), ) def get_columns_for_dataset( @@ -429,3 +440,62 @@ def get_columns_for_table( last_seen_table = column.table_name return columns + + def get_snapshots_for_dataset( + self, + project_id: str, + dataset_name: str, + has_data_read: bool, + report: BigQueryV2Report, + ) -> Iterator[BigqueryTableSnapshot]: + with self.report.get_snapshots_for_dataset as current_timer: + if has_data_read: + # If profiling is enabled + cur = self.get_query_result( + BigqueryQuery.snapshots_for_dataset.format( + project_id=project_id, dataset_name=dataset_name + ), + ) + else: + cur = self.get_query_result( + BigqueryQuery.snapshots_for_dataset_without_data_read.format( + project_id=project_id, dataset_name=dataset_name + ), + ) + + for table in cur: + try: + with current_timer.pause(): + yield BigQuerySchemaApi._make_bigquery_table_snapshot(table) + except Exception as e: + snapshot_name = f"{project_id}.{dataset_name}.{table.table_name}" + logger.warning( + f"Error while processing view {snapshot_name}", + exc_info=True, + ) + report.report_warning( + "metadata-extraction", + f"Failed to get view {snapshot_name}: {e}", + ) + + @staticmethod + def _make_bigquery_table_snapshot(snapshot: bigquery.Row) -> BigqueryTableSnapshot: + return BigqueryTableSnapshot( + name=snapshot.table_name, + created=snapshot.created, + last_altered=datetime.fromtimestamp( + snapshot.get("last_altered") / 1000, tz=timezone.utc + ) + if snapshot.get("last_altered") is not None + else snapshot.created, + comment=snapshot.comment, + ddl=snapshot.ddl, + snapshot_time=snapshot.snapshot_time, + size_in_bytes=snapshot.get("size_bytes"), + rows_count=snapshot.get("row_count"), + base_table_identifier=BigqueryTableIdentifier( + project_id=snapshot.base_table_catalog, + dataset=snapshot.base_table_schema, + table=snapshot.base_table_name, + ), + ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index b44b06feb95af..7db36867b4e69 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -37,7 +37,10 @@ ) from datahub.ingestion.source.bigquery_v2.bigquery_config import BigQueryV2Config from datahub.ingestion.source.bigquery_v2.bigquery_report import BigQueryV2Report -from datahub.ingestion.source.bigquery_v2.bigquery_schema import BigQuerySchemaApi +from datahub.ingestion.source.bigquery_v2.bigquery_schema import ( + BigQuerySchemaApi, + BigqueryTableSnapshot, +) from datahub.ingestion.source.bigquery_v2.common import BQ_DATETIME_FORMAT from datahub.ingestion.source.bigquery_v2.queries import ( BQ_FILTER_RULE_TEMPLATE_V2_LINEAGE, @@ -198,6 +201,28 @@ def make_lineage_edges_from_parsing_result( return list(table_edges.values()) +def make_lineage_edge_for_snapshot( + snapshot: BigqueryTableSnapshot, +) -> Optional[LineageEdge]: + if snapshot.base_table_identifier: + base_table_name = str( + BigQueryTableRef.from_bigquery_table(snapshot.base_table_identifier) + ) + return LineageEdge( + table=base_table_name, + column_mapping=frozenset( + LineageEdgeColumnMapping( + out_column=column.field_path, + in_columns=frozenset([column.field_path]), + ) + for column in snapshot.columns + ), + auditStamp=datetime.now(timezone.utc), + type=DatasetLineageTypeClass.TRANSFORMED, + ) + return None + + class BigqueryLineageExtractor: def __init__( self, @@ -256,27 +281,35 @@ def get_lineage_workunits( sql_parser_schema_resolver: SchemaResolver, view_refs_by_project: Dict[str, Set[str]], view_definitions: FileBackedDict[str], + snapshot_refs_by_project: Dict[str, Set[str]], + snapshots_by_ref: FileBackedDict[BigqueryTableSnapshot], table_refs: Set[str], ) -> Iterable[MetadataWorkUnit]: if not self._should_ingest_lineage(): return - views_skip_audit_log_lineage: Set[str] = set() - if self.config.lineage_parse_view_ddl: - view_lineage: Dict[str, Set[LineageEdge]] = {} - for project in projects: + datasets_skip_audit_log_lineage: Set[str] = set() + dataset_lineage: Dict[str, Set[LineageEdge]] = {} + for project in projects: + self.populate_snapshot_lineage( + dataset_lineage, + snapshot_refs_by_project[project], + snapshots_by_ref, + ) + + if self.config.lineage_parse_view_ddl: self.populate_view_lineage_with_sql_parsing( - view_lineage, + dataset_lineage, view_refs_by_project[project], view_definitions, sql_parser_schema_resolver, project, ) - views_skip_audit_log_lineage.update(view_lineage.keys()) - for lineage_key in view_lineage.keys(): - yield from self.gen_lineage_workunits_for_table( - view_lineage, BigQueryTableRef.from_string_name(lineage_key) - ) + datasets_skip_audit_log_lineage.update(dataset_lineage.keys()) + for lineage_key in dataset_lineage.keys(): + yield from self.gen_lineage_workunits_for_table( + dataset_lineage, BigQueryTableRef.from_string_name(lineage_key) + ) if self.config.use_exported_bigquery_audit_metadata: projects = ["*"] # project_id not used when using exported metadata @@ -286,7 +319,7 @@ def get_lineage_workunits( yield from self.generate_lineage( project, sql_parser_schema_resolver, - views_skip_audit_log_lineage, + datasets_skip_audit_log_lineage, table_refs, ) @@ -300,7 +333,7 @@ def generate_lineage( self, project_id: str, sql_parser_schema_resolver: SchemaResolver, - views_skip_audit_log_lineage: Set[str], + datasets_skip_audit_log_lineage: Set[str], table_refs: Set[str], ) -> Iterable[MetadataWorkUnit]: logger.info(f"Generate lineage for {project_id}") @@ -338,7 +371,7 @@ def generate_lineage( # as they may contain indirectly referenced tables. if ( lineage_key not in table_refs - or lineage_key in views_skip_audit_log_lineage + or lineage_key in datasets_skip_audit_log_lineage ): continue @@ -387,6 +420,17 @@ def populate_view_lineage_with_sql_parsing( ) ) + def populate_snapshot_lineage( + self, + snapshot_lineage: Dict[str, Set[LineageEdge]], + snapshot_refs: Set[str], + snapshots_by_ref: FileBackedDict[BigqueryTableSnapshot], + ) -> None: + for snapshot in snapshot_refs: + lineage_edge = make_lineage_edge_for_snapshot(snapshots_by_ref[snapshot]) + if lineage_edge: + snapshot_lineage[snapshot] = {lineage_edge} + def gen_lineage_workunits_for_table( self, lineage: Dict[str, Set[LineageEdge]], table_ref: BigQueryTableRef ) -> Iterable[MetadataWorkUnit]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/queries.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/queries.py index 67fcc33cdf218..86971fce36a53 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/queries.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/queries.py @@ -157,6 +157,62 @@ class BigqueryQuery: table_name ASC """ + snapshots_for_dataset: str = f""" +SELECT + t.table_catalog as table_catalog, + t.table_schema as table_schema, + t.table_name as table_name, + t.table_type as table_type, + t.creation_time as created, + t.is_insertable_into, + t.ddl, + t.snapshot_time_ms as snapshot_time, + t.base_table_catalog, + t.base_table_schema, + t.base_table_name, + ts.last_modified_time as last_altered, + tos.OPTION_VALUE as comment, + ts.row_count, + ts.size_bytes +FROM + `{{project_id}}`.`{{dataset_name}}`.INFORMATION_SCHEMA.TABLES t + join `{{project_id}}`.`{{dataset_name}}`.__TABLES__ as ts on ts.table_id = t.TABLE_NAME + left join `{{project_id}}`.`{{dataset_name}}`.INFORMATION_SCHEMA.TABLE_OPTIONS as tos on t.table_schema = tos.table_schema + and t.TABLE_NAME = tos.TABLE_NAME + and tos.OPTION_NAME = "description" +WHERE + table_type = '{BigqueryTableType.SNAPSHOT}' +order by + table_schema ASC, + table_name ASC +""" + + snapshots_for_dataset_without_data_read: str = f""" +SELECT + t.table_catalog as table_catalog, + t.table_schema as table_schema, + t.table_name as table_name, + t.table_type as table_type, + t.creation_time as created, + t.is_insertable_into, + t.ddl, + t.snapshot_time_ms as snapshot_time, + t.base_table_catalog, + t.base_table_schema, + t.base_table_name, + tos.OPTION_VALUE as comment, +FROM + `{{project_id}}`.`{{dataset_name}}`.INFORMATION_SCHEMA.TABLES t + left join `{{project_id}}`.`{{dataset_name}}`.INFORMATION_SCHEMA.TABLE_OPTIONS as tos on t.table_schema = tos.table_schema + and t.TABLE_NAME = tos.TABLE_NAME + and tos.OPTION_NAME = "description" +WHERE + table_type = '{BigqueryTableType.SNAPSHOT}' +order by + table_schema ASC, + table_name ASC +""" + columns_for_dataset: str = """ select c.table_catalog as table_catalog, diff --git a/metadata-ingestion/src/datahub/ingestion/source/common/subtypes.py b/metadata-ingestion/src/datahub/ingestion/source/common/subtypes.py index 741b4789bef21..3296a8fb29354 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/common/subtypes.py +++ b/metadata-ingestion/src/datahub/ingestion/source/common/subtypes.py @@ -15,6 +15,7 @@ class DatasetSubTypes(str, Enum): SALESFORCE_CUSTOM_OBJECT = "Custom Object" SALESFORCE_STANDARD_OBJECT = "Object" POWERBI_DATASET_TABLE = "PowerBI Dataset Table" + BIGQUERY_TABLE_SNAPSHOT = "Bigquery Table Snapshot" # TODO: Create separate entity... NOTEBOOK = "Notebook" diff --git a/metadata-ingestion/tests/unit/test_bigquery_source.py b/metadata-ingestion/tests/unit/test_bigquery_source.py index 3cdb73d77d0a1..42d65fdf02683 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_source.py +++ b/metadata-ingestion/tests/unit/test_bigquery_source.py @@ -11,6 +11,7 @@ from google.cloud.bigquery.table import Row, TableListItem from datahub.configuration.common import AllowDenyPattern +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.source.bigquery_v2.bigquery import BigqueryV2Source from datahub.ingestion.source.bigquery_v2.bigquery_audit import ( @@ -27,6 +28,7 @@ BigqueryDataset, BigqueryProject, BigQuerySchemaApi, + BigqueryTableSnapshot, BigqueryView, ) from datahub.ingestion.source.bigquery_v2.lineage import ( @@ -34,7 +36,10 @@ LineageEdgeColumnMapping, ) from datahub.metadata.com.linkedin.pegasus2avro.dataset import ViewProperties -from datahub.metadata.schema_classes import MetadataChangeProposalClass +from datahub.metadata.schema_classes import ( + DatasetPropertiesClass, + MetadataChangeProposalClass, +) def test_bigquery_uri(): @@ -769,6 +774,7 @@ def test_get_views_for_dataset( project_id="test-project", dataset_name="test-dataset", has_data_read=False, + report=BigQueryV2Report(), ) assert list(views) == [bigquery_view_1, bigquery_view_2] @@ -810,6 +816,89 @@ def test_gen_view_dataset_workunits( ) +@pytest.fixture +def bigquery_snapshot() -> BigqueryTableSnapshot: + now = datetime.now(tz=timezone.utc) + return BigqueryTableSnapshot( + name="table-snapshot", + created=now - timedelta(days=10), + last_altered=now - timedelta(hours=1), + comment="comment1", + ddl="CREATE SNAPSHOT TABLE 1", + size_in_bytes=None, + rows_count=None, + snapshot_time=now - timedelta(days=10), + base_table_identifier=BigqueryTableIdentifier( + project_id="test-project", + dataset="test-dataset", + table="test-table", + ), + ) + + +@patch.object(BigQuerySchemaApi, "get_query_result") +@patch.object(BigQueryV2Config, "get_bigquery_client") +def test_get_snapshots_for_dataset( + get_bq_client_mock: Mock, + query_mock: Mock, + bigquery_snapshot: BigqueryTableSnapshot, +) -> None: + client_mock = MagicMock() + get_bq_client_mock.return_value = client_mock + assert bigquery_snapshot.last_altered + assert bigquery_snapshot.base_table_identifier + row1 = create_row( + dict( + table_name=bigquery_snapshot.name, + created=bigquery_snapshot.created, + last_altered=bigquery_snapshot.last_altered.timestamp() * 1000, + comment=bigquery_snapshot.comment, + ddl=bigquery_snapshot.ddl, + snapshot_time=bigquery_snapshot.snapshot_time, + table_type="SNAPSHOT", + base_table_catalog=bigquery_snapshot.base_table_identifier.project_id, + base_table_schema=bigquery_snapshot.base_table_identifier.dataset, + base_table_name=bigquery_snapshot.base_table_identifier.table, + ) + ) + query_mock.return_value = [row1] + bigquery_data_dictionary = BigQuerySchemaApi( + BigQueryV2Report().schema_api_perf, client_mock + ) + + snapshots = bigquery_data_dictionary.get_snapshots_for_dataset( + project_id="test-project", + dataset_name="test-dataset", + has_data_read=False, + report=BigQueryV2Report(), + ) + assert list(snapshots) == [bigquery_snapshot] + + +@patch.object(BigQueryV2Config, "get_bigquery_client") +def test_gen_snapshot_dataset_workunits(get_bq_client_mock, bigquery_snapshot): + project_id = "test-project" + dataset_name = "test-dataset" + config = BigQueryV2Config.parse_obj( + { + "project_id": project_id, + } + ) + source: BigqueryV2Source = BigqueryV2Source( + config=config, ctx=PipelineContext(run_id="test") + ) + + gen = source.gen_snapshot_dataset_workunits( + bigquery_snapshot, [], project_id, dataset_name + ) + mcp = cast(MetadataChangeProposalWrapper, list(gen)[2].metadata) + dataset_properties = cast(DatasetPropertiesClass, mcp.aspect) + assert dataset_properties.customProperties["snapshot_ddl"] == bigquery_snapshot.ddl + assert dataset_properties.customProperties["snapshot_time"] == str( + bigquery_snapshot.snapshot_time + ) + + @pytest.mark.parametrize( "table_name, expected_table_prefix, expected_shard", [ From a78c6899a2cde4277d854b264feef313f929531d Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Thu, 25 Jan 2024 10:12:01 -0500 Subject: [PATCH 437/792] feat(ui) Add structured properties support in the UI (#9695) --- datahub-web-react/src/Mocks.tsx | 5 + .../components/SchemaDescriptionField.tsx | 14 +- .../components/legacy/DescriptionModal.tsx | 2 +- .../shared/components/styled/EntityIcon.tsx | 24 ++ .../containers/profile/EntityProfile.tsx | 29 +-- .../profile/sidebar/EntitySidebar.tsx | 4 +- .../profile/sidebar/ProfileSidebar.tsx | 77 +++++++ .../shared/tabs/Dataset/Schema/SchemaTab.tsx | 10 +- .../tabs/Dataset/Schema/SchemaTable.tsx | 103 ++++++--- .../Schema/components/ChildCountLabel.tsx | 32 +++ .../Schema/components/PropertiesColumn.tsx | 30 +++ .../Schema/components/PropertyTypeLabel.tsx | 39 ++++ .../SchemaFieldDrawer/DrawerHeader.tsx | 106 +++++++++ .../SchemaFieldDrawer/FieldDescription.tsx | 115 ++++++++++ .../SchemaFieldDrawer/FieldHeader.tsx | 60 +++++ .../SchemaFieldDrawer/FieldProperties.tsx | 70 ++++++ .../SchemaFieldDrawer/FieldTags.tsx | 33 +++ .../SchemaFieldDrawer/FieldTerms.tsx | 34 +++ .../SchemaFieldDrawer/FieldUsageStats.tsx | 59 +++++ .../SchemaFieldDrawer/SchemaFieldDrawer.tsx | 83 +++++++ .../SchemaFieldDrawer/components.ts | 12 + .../Schema/utils/useDescriptionRenderer.tsx | 2 +- .../Schema/utils/useTagsAndTermsRenderer.tsx | 38 ++-- .../Schema/utils/useUsageStatsRenderer.tsx | 2 +- .../components/editor/Editor.tsx | 5 +- .../tabs/Properties/CardinalityLabel.tsx | 43 ++++ .../shared/tabs/Properties/NameColumn.tsx | 87 +++++++ .../shared/tabs/Properties/PropertiesTab.tsx | 91 +++++--- .../Properties/StructuredPropertyTooltip.tsx | 31 +++ .../Properties/StructuredPropertyValue.tsx | 69 ++++++ .../shared/tabs/Properties/TabHeader.tsx | 32 +++ .../shared/tabs/Properties/ValuesColumn.tsx | 24 ++ .../tabs/Properties/__tests__/utils.test.ts | 87 +++++++ .../entity/shared/tabs/Properties/types.ts | 25 ++ .../Properties/useStructuredProperties.tsx | 215 ++++++++++++++++++ .../useUpdateExpandedRowsFromFilter.ts | 23 ++ .../entity/shared/tabs/Properties/utils.ts | 68 ++++++ .../src/app/entity/shared/types.ts | 2 + .../src/app/entity/shared/utils.ts | 12 +- .../src/graphql/fragments.graphql | 77 +++++++ .../tests/cypress/cypress/e2e/login/login.js | 4 +- .../e2e/mutations/edit_documentation.js | 9 +- .../cypress/e2e/mutations/mutations.js | 19 +- .../cypress/e2e/schema_blame/schema_blame.js | 2 + .../tests/cypress/cypress/support/commands.js | 4 + 45 files changed, 1772 insertions(+), 140 deletions(-) create mode 100644 datahub-web-react/src/app/entity/shared/components/styled/EntityIcon.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/ProfileSidebar.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/ChildCountLabel.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/PropertiesColumn.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/PropertyTypeLabel.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/DrawerHeader.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldDescription.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldHeader.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldProperties.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldTags.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldTerms.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldUsageStats.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/SchemaFieldDrawer.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/components.ts create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/CardinalityLabel.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/NameColumn.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/StructuredPropertyTooltip.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/StructuredPropertyValue.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/TabHeader.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/ValuesColumn.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/__tests__/utils.test.ts create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/types.ts create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/useStructuredProperties.tsx create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/useUpdateExpandedRowsFromFilter.ts create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Properties/utils.ts diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index 03d6f4a624c3d..9f339bb7db548 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -297,6 +297,7 @@ export const dataset1 = { embed: null, browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, autoRenderAspects: [], + structuredProperties: null, }; export const dataset2 = { @@ -393,6 +394,7 @@ export const dataset2 = { embed: null, browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, autoRenderAspects: [], + structuredProperties: null, }; export const dataset3 = { @@ -626,6 +628,7 @@ export const dataset3 = { dataProduct: null, lastProfile: null, lastOperation: null, + structuredProperties: null, } as Dataset; export const dataset3WithSchema = { @@ -650,6 +653,7 @@ export const dataset3WithSchema = { globalTags: null, glossaryTerms: null, label: 'hi', + schemaFieldEntity: null, }, { __typename: 'SchemaField', @@ -665,6 +669,7 @@ export const dataset3WithSchema = { globalTags: null, glossaryTerms: null, label: 'hi', + schemaFieldEntity: null, }, ], hash: '', diff --git a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx index 1d4f155f797e0..2cd4cbd6dcb6c 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx @@ -86,6 +86,7 @@ type Props = { description: string, ) => Promise, Record> | void>; isEdited?: boolean; + isReadOnly?: boolean; }; const ABBREVIATED_LIMIT = 80; @@ -97,10 +98,11 @@ export default function DescriptionField({ onUpdate, isEdited = false, original, + isReadOnly, }: Props) { const [showAddModal, setShowAddModal] = useState(false); const overLimit = removeMarkdown(description).length > 80; - const isSchemaEditable = React.useContext(SchemaEditableContext); + const isSchemaEditable = React.useContext(SchemaEditableContext) && !isReadOnly; const onCloseModal = () => setShowAddModal(false); const { urn, entityType } = useEntityData(); @@ -140,11 +142,12 @@ export default function DescriptionField({ {expanded || !overLimit ? ( <> {!!description && } - {!!description && ( + {!!description && (EditButton || overLimit) && ( {overLimit && ( { + onClick={(e) => { + e.stopPropagation(); handleExpanded(false); }} > @@ -162,7 +165,8 @@ export default function DescriptionField({ readMore={ <> { + onClick={(e) => { + e.stopPropagation(); handleExpanded(true); }} > @@ -177,7 +181,7 @@ export default function DescriptionField({ )} - {isSchemaEditable && isEdited && (edited)} + {isEdited && (edited)} {showAddModal && (

- + {!isAddDesc && description && original && ( Original:}> diff --git a/datahub-web-react/src/app/entity/shared/components/styled/EntityIcon.tsx b/datahub-web-react/src/app/entity/shared/components/styled/EntityIcon.tsx new file mode 100644 index 0000000000000..bd001b51d53ce --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/components/styled/EntityIcon.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import { useEntityRegistry } from '../../../../useEntityRegistry'; +import { PlatformIcon } from '../../../../search/filters/utils'; +import { Entity } from '../../../../../types.generated'; +import { IconStyleType } from '../../../Entity'; +import { ANTD_GRAY } from '../../constants'; + +interface Props { + entity: Entity; + size?: number; +} + +export default function EntityIcon({ entity, size = 14 }: Props) { + const entityRegistry = useEntityRegistry(); + const genericEntityProps = entityRegistry.getGenericEntityProperties(entity.type, entity); + const logoUrl = genericEntityProps?.platform?.properties?.logoUrl; + const icon = logoUrl ? ( + + ) : ( + entityRegistry.getIcon(entity.type, size, IconStyleType.ACCENT, ANTD_GRAY[9]) + ); + + return <>{icon}; +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx index d7b7a4da804ef..a781c732c9de6 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx @@ -30,7 +30,6 @@ import LineageExplorer from '../../../../lineage/LineageExplorer'; import CompactContext from '../../../../shared/CompactContext'; import DynamicTab from '../../tabs/Entity/weaklyTypedAspects/DynamicTab'; import analytics, { EventType } from '../../../../analytics'; -import { ProfileSidebarResizer } from './sidebar/ProfileSidebarResizer'; import { EntityMenuItems } from '../../EntityDropdown/EntityDropdown'; import { useIsSeparateSiblingsMode } from '../../siblingUtils'; import { EntityActionItem } from '../../entity/EntityActions'; @@ -45,6 +44,7 @@ import { } from '../../../../onboarding/config/LineageGraphOnboardingConfig'; import { useAppConfig } from '../../../../useAppConfig'; import { useUpdateDomainEntityDataOnChange } from '../../../../domain/utils'; +import ProfileSidebar from './sidebar/ProfileSidebar'; type Props = { urn: string; @@ -75,8 +75,6 @@ type Props = { isNameEditable?: boolean; }; -const MAX_SIDEBAR_WIDTH = 800; -const MIN_SIDEBAR_WIDTH = 200; const MAX_COMPACT_WIDTH = 490 - 24 * 2; const ContentContainer = styled.div` @@ -85,6 +83,7 @@ const ContentContainer = styled.div` min-height: 100%; flex: 1; min-width: 0; + overflow: hidden; `; const HeaderAndTabs = styled.div` @@ -113,15 +112,6 @@ const HeaderAndTabsFlex = styled.div` -webkit-box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.75); } `; -const Sidebar = styled.div<{ $width: number }>` - max-height: 100%; - overflow: auto; - width: ${(props) => props.$width}px; - min-width: ${(props) => props.$width}px; - padding-left: 20px; - padding-right: 20px; - padding-bottom: 20px; -`; const Header = styled.div` border-bottom: 1px solid ${ANTD_GRAY[4.5]}; @@ -145,7 +135,7 @@ const defaultTabDisplayConfig = { enabled: (_, _1) => true, }; -const defaultSidebarSection = { +export const DEFAULT_SIDEBAR_SECTION = { visible: (_, _1) => true, }; @@ -176,11 +166,10 @@ export const EntityProfile = ({ const sortedTabs = sortEntityProfileTabs(appConfig.config, entityType, tabsWithDefaults); const sideBarSectionsWithDefaults = sidebarSections.map((sidebarSection) => ({ ...sidebarSection, - display: { ...defaultSidebarSection, ...sidebarSection.display }, + display: { ...DEFAULT_SIDEBAR_SECTION, ...sidebarSection.display }, })); const [shouldRefetchEmbeddedListSearch, setShouldRefetchEmbeddedListSearch] = useState(false); - const [sidebarWidth, setSidebarWidth] = useState(window.innerWidth * 0.25); const entityStepIds: string[] = getOnboardingStepIdsForEntityType(entityType); const lineageGraphStepIds: string[] = [LINEAGE_GRAPH_INTRO_ID, LINEAGE_GRAPH_TIME_FILTER_ID]; const stepIds = isLineageMode ? lineageGraphStepIds : entityStepIds; @@ -344,15 +333,7 @@ export const EntityProfile = ({ - - setSidebarWidth(Math.min(Math.max(width, MIN_SIDEBAR_WIDTH), MAX_SIDEBAR_WIDTH)) - } - initialSize={sidebarWidth} - /> - - - + )} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntitySidebar.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntitySidebar.tsx index fbece870706f5..a8d1dceb71ec9 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntitySidebar.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntitySidebar.tsx @@ -36,14 +36,16 @@ const LastIngestedSection = styled.div` type Props = { sidebarSections: EntitySidebarSection[]; + topSection?: EntitySidebarSection; }; -export const EntitySidebar = ({ sidebarSections }: Props) => { +export const EntitySidebar = ({ sidebarSections, topSection }: Props) => { const { entityData } = useEntityData(); const baseEntity = useBaseEntity(); return ( <> + {topSection && } {entityData?.lastIngested && ( diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/ProfileSidebar.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/ProfileSidebar.tsx new file mode 100644 index 0000000000000..b5e6737c16641 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/ProfileSidebar.tsx @@ -0,0 +1,77 @@ +import React, { useState } from 'react'; +import styled from 'styled-components'; +import { ProfileSidebarResizer } from './ProfileSidebarResizer'; +import { EntitySidebar } from './EntitySidebar'; +import { EntitySidebarSection } from '../../../types'; + +export const MAX_SIDEBAR_WIDTH = 800; +export const MIN_SIDEBAR_WIDTH = 200; + +const Sidebar = styled.div<{ $width: number; backgroundColor?: string }>` + max-height: 100%; + position: relative; + width: ${(props) => props.$width}px; + min-width: ${(props) => props.$width}px; + ${(props) => props.backgroundColor && `background-color: ${props.backgroundColor};`} +`; + +const ScrollWrapper = styled.div` + overflow: auto; + max-height: 100%; + padding: 0 20px 20px 20px; +`; + +const DEFAULT_SIDEBAR_SECTION = { + visible: (_, _1) => true, +}; + +interface Props { + sidebarSections: EntitySidebarSection[]; + backgroundColor?: string; + topSection?: EntitySidebarSection; + alignLeft?: boolean; +} + +export default function ProfileSidebar({ sidebarSections, backgroundColor, topSection, alignLeft }: Props) { + const sideBarSectionsWithDefaults = sidebarSections.map((sidebarSection) => ({ + ...sidebarSection, + display: { ...DEFAULT_SIDEBAR_SECTION, ...sidebarSection.display }, + })); + + const [sidebarWidth, setSidebarWidth] = useState(window.innerWidth * 0.25); + + if (alignLeft) { + return ( + <> + + + + + + + setSidebarWidth(Math.min(Math.max(width, MIN_SIDEBAR_WIDTH), MAX_SIDEBAR_WIDTH)) + } + initialSize={sidebarWidth} + isSidebarOnLeft + /> + + ); + } + + return ( + <> + + setSidebarWidth(Math.min(Math.max(width, MIN_SIDEBAR_WIDTH), MAX_SIDEBAR_WIDTH)) + } + initialSize={sidebarWidth} + /> + + + + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTab.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTab.tsx index 75027e17b6d0c..28dc3ba5c6ce5 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTab.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTab.tsx @@ -76,6 +76,14 @@ export const SchemaTab = ({ properties }: { properties?: any }) => { [schemaMetadata], ); + const hasProperties = useMemo( + () => + entityWithSchema?.schemaMetadata?.fields.some( + (schemaField) => !!schemaField.schemaFieldEntity?.structuredProperties?.properties?.length, + ), + [entityWithSchema], + ); + const [showKeySchema, setShowKeySchema] = useState(false); const [showSchemaAuditView, setShowSchemaAuditView] = useState(false); @@ -190,13 +198,13 @@ export const SchemaTab = ({ properties }: { properties?: any }) => { diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTable.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTable.tsx index 41b92aea93b5a..bd092e86b3584 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTable.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/SchemaTable.tsx @@ -21,9 +21,10 @@ import { StyledTable } from '../../../components/styled/StyledTable'; import { SchemaRow } from './components/SchemaRow'; import { FkContext } from './utils/selectedFkContext'; import useSchemaBlameRenderer from './utils/useSchemaBlameRenderer'; -import { ANTD_GRAY } from '../../../constants'; -import MenuColumn from './components/MenuColumn'; +import { ANTD_GRAY, ANTD_GRAY_V2 } from '../../../constants'; import translateFieldPath from '../../../../dataset/profile/schema/utils/translateFieldPath'; +import PropertiesColumn from './components/PropertiesColumn'; +import SchemaFieldDrawer from './components/SchemaFieldDrawer/SchemaFieldDrawer'; const TableContainer = styled.div` overflow: inherit; @@ -41,18 +42,36 @@ const TableContainer = styled.div` padding-bottom: 600px; vertical-align: top; } + + &&& .ant-table-cell { + background-color: inherit; + cursor: pointer; + } + + &&& tbody > tr:hover > td { + background-color: ${ANTD_GRAY_V2[2]}; + } + + &&& .expanded-row { + background-color: ${(props) => props.theme.styles['highlight-color']} !important; + + td { + background-color: ${(props) => props.theme.styles['highlight-color']} !important; + } + } `; export type Props = { rows: Array; schemaMetadata: SchemaMetadata | undefined | null; editableSchemaMetadata?: EditableSchemaMetadata | null; - editMode?: boolean; usageStats?: UsageQueryResult | null; schemaFieldBlameList?: Array | null; showSchemaAuditView: boolean; expandedRowsFromFilter?: Set; filterText?: string; + hasProperties?: boolean; + inputFields?: SchemaField[]; }; const EMPTY_SET: Set = new Set(); @@ -63,56 +82,46 @@ export default function SchemaTable({ schemaMetadata, editableSchemaMetadata, usageStats, - editMode = true, schemaFieldBlameList, showSchemaAuditView, expandedRowsFromFilter = EMPTY_SET, filterText = '', + hasProperties, + inputFields, }: Props): JSX.Element { const hasUsageStats = useMemo(() => (usageStats?.aggregations?.fields?.length || 0) > 0, [usageStats]); const [tableHeight, setTableHeight] = useState(0); - const [tagHoveredIndex, setTagHoveredIndex] = useState(undefined); - const [selectedFkFieldPath, setSelectedFkFieldPath] = - useState(null); + const [selectedFkFieldPath, setSelectedFkFieldPath] = useState(null); + const [expandedDrawerFieldPath, setExpandedDrawerFieldPath] = useState(null); + + const schemaFields = schemaMetadata ? schemaMetadata.fields : inputFields; const descriptionRender = useDescriptionRenderer(editableSchemaMetadata); const usageStatsRenderer = useUsageStatsRenderer(usageStats); const tagRenderer = useTagsAndTermsRenderer( editableSchemaMetadata, - tagHoveredIndex, - setTagHoveredIndex, { showTags: true, showTerms: false, }, filterText, + false, ); const termRenderer = useTagsAndTermsRenderer( editableSchemaMetadata, - tagHoveredIndex, - setTagHoveredIndex, { showTags: false, showTerms: true, }, filterText, + false, ); const schemaTitleRenderer = useSchemaTitleRenderer(schemaMetadata, setSelectedFkFieldPath, filterText); const schemaBlameRenderer = useSchemaBlameRenderer(schemaFieldBlameList); - const onTagTermCell = (record: SchemaField) => ({ - onMouseEnter: () => { - if (editMode) { - setTagHoveredIndex(record.fieldPath); - } - }, - onMouseLeave: () => { - if (editMode) { - setTagHoveredIndex(undefined); - } - }, - }); - const fieldColumn = { width: '22%', title: 'Field', @@ -139,7 +148,6 @@ export default function SchemaTable({ dataIndex: 'globalTags', key: 'tag', render: tagRenderer, - onCell: onTagTermCell, }; const termColumn = { @@ -148,7 +156,6 @@ export default function SchemaTable({ dataIndex: 'globalTags', key: 'tag', render: termRenderer, - onCell: onTagTermCell, }; const blameColumn = { @@ -184,16 +191,20 @@ export default function SchemaTable({ sorter: (sourceA, sourceB) => getCount(sourceA.fieldPath) - getCount(sourceB.fieldPath), }; - const menuColumn = { - width: '5%', - title: '', + const propertiesColumn = { + width: '13%', + title: 'Properties', dataIndex: '', key: 'menu', - render: (field: SchemaField) => , + render: (field: SchemaField) => , }; let allColumns: ColumnsType = [fieldColumn, descriptionColumn, tagColumn, termColumn]; + if (hasProperties) { + allColumns = [...allColumns, propertiesColumn]; + } + if (hasUsageStats) { allColumns = [...allColumns, usageColumn]; } @@ -202,8 +213,6 @@ export default function SchemaTable({ allColumns = [...allColumns, blameColumn]; } - allColumns = [...allColumns, menuColumn]; - const [expandedRows, setExpandedRows] = useState>(new Set()); useEffect(() => { @@ -224,9 +233,15 @@ export default function SchemaTable({ setTableHeight(dimensions.height - TABLE_HEADER_HEIGHT)}> - record.fieldPath === selectedFkFieldPath?.fieldPath ? 'open-fk-row' : '' - } + rowClassName={(record) => { + if (record.fieldPath === selectedFkFieldPath?.fieldPath) { + return 'open-fk-row'; + } + if (expandedDrawerFieldPath === record.fieldPath) { + return 'expanded-row'; + } + return ''; + }} columns={allColumns} dataSource={rows} rowKey="fieldPath" @@ -250,9 +265,27 @@ export default function SchemaTable({ indentSize: 0, }} pagination={false} + onRow={(record) => ({ + onClick: () => { + setExpandedDrawerFieldPath( + expandedDrawerFieldPath === record.fieldPath ? null : record.fieldPath, + ); + }, + style: { + backgroundColor: expandedDrawerFieldPath === record.fieldPath ? `` : 'white', + }, + })} /> + {!!schemaFields && ( + + )} ); } diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/ChildCountLabel.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/ChildCountLabel.tsx new file mode 100644 index 0000000000000..44bd48620649a --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/ChildCountLabel.tsx @@ -0,0 +1,32 @@ +import React from 'react'; +import { Badge } from 'antd'; +import styled from 'styled-components'; + +import { ANTD_GRAY_V2 } from '../../../../constants'; + +type Props = { + count: number; +}; + +const ChildCountBadge = styled(Badge)` + margin-left: 10px; + margin-top: 16px; + margin-bottom: 16px; + &&& .ant-badge-count { + background-color: ${ANTD_GRAY_V2[1]}; + color: ${ANTD_GRAY_V2[8]}; + box-shadow: 0 2px 1px -1px ${ANTD_GRAY_V2[6]}; + border-radius: 4px 4px 4px 4px; + font-size: 12px; + font-weight: 500; + height: 22px; + font-family: 'Manrope'; + } +`; + +export default function ChildCountLabel({ count }: Props) { + const propertyString = count > 1 ? ' properties' : ' property'; + + // eslint-disable-next-line + return ; +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/PropertiesColumn.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/PropertiesColumn.tsx new file mode 100644 index 0000000000000..b74de3e94e554 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/PropertiesColumn.tsx @@ -0,0 +1,30 @@ +import { ControlOutlined } from '@ant-design/icons'; +import React from 'react'; +import styled from 'styled-components'; +import { SchemaField } from '../../../../../../../types.generated'; + +const ColumnWrapper = styled.div` + font-size: 14px; +`; + +const StyledIcon = styled(ControlOutlined)` + margin-right: 4px; +`; + +interface Props { + field: SchemaField; +} + +export default function PropertiesColumn({ field }: Props) { + const { schemaFieldEntity } = field; + const numProperties = schemaFieldEntity?.structuredProperties?.properties?.length; + + if (!schemaFieldEntity || !numProperties) return null; + + return ( + + + {numProperties} {numProperties === 1 ? 'property' : 'properties'} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/PropertyTypeLabel.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/PropertyTypeLabel.tsx new file mode 100644 index 0000000000000..366fc4762b210 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/PropertyTypeLabel.tsx @@ -0,0 +1,39 @@ +import React from 'react'; +import { Badge } from 'antd'; +import styled from 'styled-components'; +import { capitalizeFirstLetterOnly } from '../../../../../../shared/textUtil'; +import { DataTypeEntity, SchemaFieldDataType } from '../../../../../../../types.generated'; +import { truncate } from '../../../../utils'; +import { ANTD_GRAY, ANTD_GRAY_V2 } from '../../../../constants'; +import { TypeData } from '../../../Properties/types'; + +type Props = { + type: TypeData; + dataType?: DataTypeEntity; +}; + +export const PropertyTypeBadge = styled(Badge)` + margin: 4px 0 4px 8px; + &&& .ant-badge-count { + background-color: ${ANTD_GRAY[1]}; + color: ${ANTD_GRAY_V2[8]}; + border: 1px solid ${ANTD_GRAY_V2[6]}; + font-size: 12px; + font-weight: 500; + height: 22px; + font-family: 'Manrope'; + } +`; + +export default function PropertyTypeLabel({ type, dataType }: Props) { + // if unable to match type to DataHub, display native type info by default + const { nativeDataType } = type; + const nativeFallback = type.type === SchemaFieldDataType.Null; + + const typeText = + dataType?.info.displayName || + dataType?.info.type || + (nativeFallback ? truncate(250, nativeDataType) : type.type); + + return ; +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/DrawerHeader.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/DrawerHeader.tsx new file mode 100644 index 0000000000000..13f8ec869126d --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/DrawerHeader.tsx @@ -0,0 +1,106 @@ +import { CaretLeftOutlined, CaretRightOutlined, CloseOutlined } from '@ant-design/icons'; +import { Button } from 'antd'; +import React, { useEffect } from 'react'; +import styled from 'styled-components'; +import { ANTD_GRAY_V2 } from '../../../../../constants'; +import { SchemaField } from '../../../../../../../../types.generated'; +import { pluralize } from '../../../../../../../shared/textUtil'; + +const HeaderWrapper = styled.div` + border-bottom: 1px solid ${ANTD_GRAY_V2[4]}; + display: flex; + justify-content: space-between; + padding: 8px 16px; +`; + +const StyledButton = styled(Button)` + font-size: 12px; + padding: 0; + height: 26px; + width: 26px; + display: flex; + align-items: center; + justify-content: center; + + svg { + height: 10px; + width: 10px; + } +`; + +const FieldIndexText = styled.span` + font-size: 14px; + color: ${ANTD_GRAY_V2[8]}; + margin: 0 8px; +`; + +const ButtonsWrapper = styled.div` + display: flex; + align-items: center; +`; + +interface Props { + schemaFields?: SchemaField[]; + expandedFieldIndex?: number; + setExpandedDrawerFieldPath: (fieldPath: string | null) => void; +} + +export default function DrawerHeader({ schemaFields = [], expandedFieldIndex = 0, setExpandedDrawerFieldPath }: Props) { + function showNextField() { + if (expandedFieldIndex !== undefined && expandedFieldIndex !== -1) { + if (expandedFieldIndex === schemaFields.length - 1) { + const newField = schemaFields[0]; + setExpandedDrawerFieldPath(newField.fieldPath); + } else { + const newField = schemaFields[expandedFieldIndex + 1]; + const { fieldPath } = newField; + setExpandedDrawerFieldPath(fieldPath); + } + } + } + + function showPreviousField() { + if (expandedFieldIndex !== undefined && expandedFieldIndex !== -1) { + if (expandedFieldIndex === 0) { + const newField = schemaFields[schemaFields.length - 1]; + setExpandedDrawerFieldPath(newField.fieldPath); + } else { + const newField = schemaFields[expandedFieldIndex - 1]; + setExpandedDrawerFieldPath(newField.fieldPath); + } + } + } + + function handleArrowKeys(event: KeyboardEvent) { + if (event.code === 'ArrowUp' || event.code === 'ArrowLeft') { + showPreviousField(); + } else if (event.code === 'ArrowDown' || event.code === 'ArrowRight') { + showNextField(); + } + } + + useEffect(() => { + document.addEventListener('keydown', handleArrowKeys); + + return () => document.removeEventListener('keydown', handleArrowKeys); + }); + + return ( + + + + + + + {expandedFieldIndex + 1} of {schemaFields.length} {pluralize(schemaFields.length, 'field')} + + + + + + setExpandedDrawerFieldPath(null)}> + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldDescription.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldDescription.tsx new file mode 100644 index 0000000000000..410d2801d51c8 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldDescription.tsx @@ -0,0 +1,115 @@ +import { EditOutlined } from '@ant-design/icons'; +import { Button, message } from 'antd'; +import DOMPurify from 'dompurify'; +import React, { useState } from 'react'; +import styled from 'styled-components'; +import { SectionHeader, StyledDivider } from './components'; +import UpdateDescriptionModal from '../../../../../components/legacy/DescriptionModal'; +import { EditableSchemaFieldInfo, SchemaField, SubResourceType } from '../../../../../../../../types.generated'; +import DescriptionSection from '../../../../../containers/profile/sidebar/AboutSection/DescriptionSection'; +import { useEntityData, useMutationUrn, useRefetch } from '../../../../../EntityContext'; +import { useSchemaRefetch } from '../../SchemaContext'; +import { useUpdateDescriptionMutation } from '../../../../../../../../graphql/mutations.generated'; +import analytics, { EntityActionType, EventType } from '../../../../../../../analytics'; +import SchemaEditableContext from '../../../../../../../shared/SchemaEditableContext'; + +const DescriptionWrapper = styled.div` + display: flex; + justify-content: space-between; +`; + +const EditIcon = styled(Button)` + border: none; + box-shadow: none; + height: 20px; + width: 20px; +`; + +interface Props { + expandedField: SchemaField; + editableFieldInfo?: EditableSchemaFieldInfo; +} + +export default function FieldDescription({ expandedField, editableFieldInfo }: Props) { + const isSchemaEditable = React.useContext(SchemaEditableContext); + const urn = useMutationUrn(); + const refetch = useRefetch(); + const schemaRefetch = useSchemaRefetch(); + const [updateDescription] = useUpdateDescriptionMutation(); + const [isModalVisible, setIsModalVisible] = useState(false); + const { entityType } = useEntityData(); + + const sendAnalytics = () => { + analytics.event({ + type: EventType.EntityActionEvent, + actionType: EntityActionType.UpdateSchemaDescription, + entityType, + entityUrn: urn, + }); + }; + + const refresh: any = () => { + refetch?.(); + schemaRefetch?.(); + }; + + const onSuccessfulMutation = () => { + refresh(); + sendAnalytics(); + message.destroy(); + message.success({ content: 'Updated!', duration: 2 }); + }; + + const onFailMutation = (e) => { + message.destroy(); + if (e instanceof Error) message.error({ content: `Proposal Failed! \n ${e.message || ''}`, duration: 2 }); + }; + + const generateMutationVariables = (updatedDescription: string) => ({ + variables: { + input: { + description: DOMPurify.sanitize(updatedDescription), + resourceUrn: urn, + subResource: expandedField.fieldPath, + subResourceType: SubResourceType.DatasetField, + }, + }, + }); + + const displayedDescription = editableFieldInfo?.description || expandedField.description; + + return ( + <> + +
+ Description + +
+ {isSchemaEditable && ( + setIsModalVisible(true)} + icon={} + /> + )} + {isModalVisible && ( + setIsModalVisible(false)} + onSubmit={(updatedDescription: string) => { + message.loading({ content: 'Updating...' }); + updateDescription(generateMutationVariables(updatedDescription)) + .then(onSuccessfulMutation) + .catch(onFailMutation); + setIsModalVisible(false); + }} + isAddDesc={!displayedDescription} + /> + )} +
+ + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldHeader.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldHeader.tsx new file mode 100644 index 0000000000000..7b06ff43393ef --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldHeader.tsx @@ -0,0 +1,60 @@ +import { Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import translateFieldPath from '../../../../../../dataset/profile/schema/utils/translateFieldPath'; +import TypeLabel from '../TypeLabel'; +import PrimaryKeyLabel from '../PrimaryKeyLabel'; +import PartitioningKeyLabel from '../PartitioningKeyLabel'; +import NullableLabel from '../NullableLabel'; +import MenuColumn from '../MenuColumn'; +import { ANTD_GRAY_V2 } from '../../../../../constants'; +import { SchemaField } from '../../../../../../../../types.generated'; + +const FieldHeaderWrapper = styled.div` + padding: 16px; + display: flex; + justify-content: space-between; + border-bottom: 1px solid ${ANTD_GRAY_V2[4]}; +`; + +const FieldName = styled(Typography.Text)` + font-size: 16px; + font-family: 'Roboto Mono', monospace; +`; + +const TypesSection = styled.div` + margin-left: -4px; + margin-top: 8px; +`; + +const NameTypesWrapper = styled.div` + overflow: hidden; +`; + +const MenuWrapper = styled.div` + margin-right: 5px; +`; + +interface Props { + expandedField: SchemaField; +} + +export default function FieldHeader({ expandedField }: Props) { + const displayName = translateFieldPath(expandedField.fieldPath || ''); + return ( + + + {displayName} + + + {expandedField.isPartOfKey && } + {expandedField.isPartitioningKey && } + {expandedField.nullable && } + + + + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldProperties.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldProperties.tsx new file mode 100644 index 0000000000000..8c88cdce95f06 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldProperties.tsx @@ -0,0 +1,70 @@ +import React from 'react'; +import styled from 'styled-components'; +import { SchemaField, StdDataType } from '../../../../../../../../types.generated'; +import { SectionHeader, StyledDivider } from './components'; +import { mapStructuredPropertyValues } from '../../../../Properties/useStructuredProperties'; +import StructuredPropertyValue from '../../../../Properties/StructuredPropertyValue'; + +const PropertyTitle = styled.div` + font-size: 14px; + font-weight: 700; + margin-bottom: 4px; +`; + +const PropertyWrapper = styled.div` + margin-bottom: 12px; +`; + +const PropertiesWrapper = styled.div` + padding-left: 16px; +`; + +const StyledList = styled.ul` + padding-left: 24px; +`; + +interface Props { + expandedField: SchemaField; +} + +export default function FieldProperties({ expandedField }: Props) { + const { schemaFieldEntity } = expandedField; + + if (!schemaFieldEntity?.structuredProperties?.properties?.length) return null; + + return ( + <> + Properties + + {schemaFieldEntity.structuredProperties.properties.map((structuredProp) => { + const isRichText = + structuredProp.structuredProperty.definition.valueType?.info.type === StdDataType.RichText; + const valuesData = mapStructuredPropertyValues(structuredProp); + const hasMultipleValues = valuesData.length > 1; + + return ( + + {structuredProp.structuredProperty.definition.displayName} + {hasMultipleValues ? ( + + {valuesData.map((value) => ( +
  • + +
  • + ))} +
    + ) : ( + <> + {valuesData.map((value) => ( + + ))} + + )} +
    + ); + })} +
    + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldTags.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldTags.tsx new file mode 100644 index 0000000000000..c071506d3ad79 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldTags.tsx @@ -0,0 +1,33 @@ +import React from 'react'; +import { EditableSchemaMetadata, GlobalTags, SchemaField } from '../../../../../../../../types.generated'; +import useTagsAndTermsRenderer from '../../utils/useTagsAndTermsRenderer'; +import { SectionHeader, StyledDivider } from './components'; +import SchemaEditableContext from '../../../../../../../shared/SchemaEditableContext'; + +interface Props { + expandedField: SchemaField; + editableSchemaMetadata?: EditableSchemaMetadata | null; +} + +export default function FieldTags({ expandedField, editableSchemaMetadata }: Props) { + const isSchemaEditable = React.useContext(SchemaEditableContext); + const tagRenderer = useTagsAndTermsRenderer( + editableSchemaMetadata, + { + showTags: true, + showTerms: false, + }, + '', + isSchemaEditable, + ); + + return ( + <> + Tags +
    + {tagRenderer(expandedField.globalTags as GlobalTags, expandedField)} +
    + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldTerms.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldTerms.tsx new file mode 100644 index 0000000000000..94349836539a6 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldTerms.tsx @@ -0,0 +1,34 @@ +import React from 'react'; +import { EditableSchemaMetadata, GlobalTags, SchemaField } from '../../../../../../../../types.generated'; +import useTagsAndTermsRenderer from '../../utils/useTagsAndTermsRenderer'; +import { SectionHeader, StyledDivider } from './components'; +import SchemaEditableContext from '../../../../../../../shared/SchemaEditableContext'; + +interface Props { + expandedField: SchemaField; + editableSchemaMetadata?: EditableSchemaMetadata | null; +} + +export default function FieldTerms({ expandedField, editableSchemaMetadata }: Props) { + const isSchemaEditable = React.useContext(SchemaEditableContext); + const termRenderer = useTagsAndTermsRenderer( + editableSchemaMetadata, + { + showTags: false, + showTerms: true, + }, + '', + isSchemaEditable, + ); + + return ( + <> + Glossary Terms + {/* pass in globalTags since this is a shared component, tags will not be shown or used */} +
    + {termRenderer(expandedField.globalTags as GlobalTags, expandedField)} +
    + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldUsageStats.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldUsageStats.tsx new file mode 100644 index 0000000000000..2f7288904b2df --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldUsageStats.tsx @@ -0,0 +1,59 @@ +import React, { useMemo } from 'react'; +import styled from 'styled-components'; +import { GetDatasetQuery } from '../../../../../../../../graphql/dataset.generated'; +import { useBaseEntity } from '../../../../../EntityContext'; +import { ANTD_GRAY_V2 } from '../../../../../constants'; +import { SectionHeader, StyledDivider } from './components'; +import { pathMatchesNewPath } from '../../../../../../dataset/profile/schema/utils/utils'; +import { UsageBar } from '../../utils/useUsageStatsRenderer'; +import { SchemaField } from '../../../../../../../../types.generated'; + +const USAGE_BAR_MAX_WIDTH = 100; + +const UsageBarWrapper = styled.div` + display: flex; + align-items: center; +`; + +const UsageBarBackground = styled.div` + background-color: ${ANTD_GRAY_V2[3]}; + border-radius: 2px; + height: 4px; + width: ${USAGE_BAR_MAX_WIDTH}px; +`; + +const UsageTextWrapper = styled.span` + margin-left: 8px; +`; + +interface Props { + expandedField: SchemaField; +} + +export default function FieldUsageStats({ expandedField }: Props) { + const baseEntity = useBaseEntity(); + const usageStats = baseEntity?.dataset?.usageStats; + const hasUsageStats = useMemo(() => (usageStats?.aggregations?.fields?.length || 0) > 0, [usageStats]); + const maxFieldUsageCount = useMemo( + () => Math.max(...(usageStats?.aggregations?.fields?.map((field) => field?.count || 0) || [])), + [usageStats], + ); + const relevantUsageStats = usageStats?.aggregations?.fields?.find((fieldStats) => + pathMatchesNewPath(fieldStats?.fieldName, expandedField.fieldPath), + ); + + if (!hasUsageStats || !relevantUsageStats) return null; + + return ( + <> + Usage + + + + + {relevantUsageStats.count || 0} queries / month + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/SchemaFieldDrawer.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/SchemaFieldDrawer.tsx new file mode 100644 index 0000000000000..7a5366f04e983 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/SchemaFieldDrawer.tsx @@ -0,0 +1,83 @@ +import { Drawer } from 'antd'; +import React, { useMemo } from 'react'; +import styled from 'styled-components'; +import DrawerHeader from './DrawerHeader'; +import FieldHeader from './FieldHeader'; +import FieldDescription from './FieldDescription'; +import { EditableSchemaMetadata, SchemaField } from '../../../../../../../../types.generated'; +import { pathMatchesNewPath } from '../../../../../../dataset/profile/schema/utils/utils'; +import FieldUsageStats from './FieldUsageStats'; +import FieldTags from './FieldTags'; +import FieldTerms from './FieldTerms'; +import FieldProperties from './FieldProperties'; + +const StyledDrawer = styled(Drawer)` + position: absolute; + + &&& .ant-drawer-body { + padding: 0; + } + + &&& .ant-drawer-content-wrapper { + border-left: 3px solid ${(props) => props.theme.styles['primary-color']}; + } +`; + +const MetadataSections = styled.div` + padding: 16px 24px; +`; + +interface Props { + schemaFields: SchemaField[]; + editableSchemaMetadata?: EditableSchemaMetadata | null; + expandedDrawerFieldPath: string | null; + setExpandedDrawerFieldPath: (fieldPath: string | null) => void; +} + +export default function SchemaFieldDrawer({ + schemaFields, + editableSchemaMetadata, + expandedDrawerFieldPath, + setExpandedDrawerFieldPath, +}: Props) { + const expandedFieldIndex = useMemo( + () => schemaFields.findIndex((row) => row.fieldPath === expandedDrawerFieldPath), + [expandedDrawerFieldPath, schemaFields], + ); + const expandedField = + expandedFieldIndex !== undefined && expandedFieldIndex !== -1 ? schemaFields[expandedFieldIndex] : undefined; + const editableFieldInfo = editableSchemaMetadata?.editableSchemaFieldInfo.find((candidateEditableFieldInfo) => + pathMatchesNewPath(candidateEditableFieldInfo.fieldPath, expandedField?.fieldPath), + ); + + return ( + setExpandedDrawerFieldPath(null)} + getContainer={() => document.getElementById('entity-profile-sidebar') as HTMLElement} + contentWrapperStyle={{ width: '100%', boxShadow: 'none' }} + mask={false} + maskClosable={false} + placement="right" + closable={false} + > + + {expandedField && ( + <> + + + + + + + + + + )} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/components.ts b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/components.ts new file mode 100644 index 0000000000000..0348336d649b5 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/components.ts @@ -0,0 +1,12 @@ +import { Divider } from 'antd'; +import styled from 'styled-components'; + +export const SectionHeader = styled.div` + font-size: 16px; + font-weight: 600; + margin-bottom: 8px; +`; + +export const StyledDivider = styled(Divider)` + margin: 12px 0; +`; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useDescriptionRenderer.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useDescriptionRenderer.tsx index d80143f4bb82c..5f2b5d23771c0 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useDescriptionRenderer.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useDescriptionRenderer.tsx @@ -48,8 +48,8 @@ export default function useDescriptionRenderer(editableSchemaMetadata: EditableS }, }).then(refresh) } + isReadOnly /> ); }; } -// diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useTagsAndTermsRenderer.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useTagsAndTermsRenderer.tsx index a57344e5733b4..207deb31d7ab7 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useTagsAndTermsRenderer.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useTagsAndTermsRenderer.tsx @@ -2,15 +2,14 @@ import React from 'react'; import { EditableSchemaMetadata, EntityType, GlobalTags, SchemaField } from '../../../../../../../types.generated'; import TagTermGroup from '../../../../../../shared/tags/TagTermGroup'; import { pathMatchesNewPath } from '../../../../../dataset/profile/schema/utils/utils'; -import { useMutationUrn, useRefetch } from '../../../../EntityContext'; import { useSchemaRefetch } from '../SchemaContext'; +import { useMutationUrn, useRefetch } from '../../../../EntityContext'; export default function useTagsAndTermsRenderer( editableSchemaMetadata: EditableSchemaMetadata | null | undefined, - tagHoveredIndex: string | undefined, - setTagHoveredIndex: (index: string | undefined) => void, options: { showTags: boolean; showTerms: boolean }, filterText: string, + canEdit: boolean, ) { const urn = useMutationUrn(); const refetch = useRefetch(); @@ -27,24 +26,21 @@ export default function useTagsAndTermsRenderer( ); return ( -
    - setTagHoveredIndex(undefined)} - entityUrn={urn} - entityType={EntityType.Dataset} - entitySubresource={record.fieldPath} - highlightText={filterText} - refetch={refresh} - /> -
    + ); }; return tagAndTermRender; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useUsageStatsRenderer.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useUsageStatsRenderer.tsx index 393783c4ca787..e6b58eeb376f9 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useUsageStatsRenderer.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useUsageStatsRenderer.tsx @@ -7,7 +7,7 @@ import { pathMatchesNewPath } from '../../../../../dataset/profile/schema/utils/ const USAGE_BAR_MAX_WIDTH = 50; -const UsageBar = styled.div<{ width: number }>` +export const UsageBar = styled.div<{ width: number }>` width: ${(props) => props.width}px; height: 4px; background-color: ${geekblue[3]}; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx index bd2e410fb30d9..db56c092c8ccd 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx @@ -40,10 +40,11 @@ type EditorProps = { onChange?: (md: string) => void; className?: string; doNotFocus?: boolean; + dataTestId?: string; }; export const Editor = forwardRef((props: EditorProps, ref) => { - const { content, readOnly, onChange, className } = props; + const { content, readOnly, onChange, className, dataTestId } = props; const { manager, state, getContext } = useRemirror({ extensions: () => [ new BlockquoteExtension(), @@ -98,7 +99,7 @@ export const Editor = forwardRef((props: EditorProps, ref) => { }, [readOnly, content]); return ( - + {!readOnly && ( diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/CardinalityLabel.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/CardinalityLabel.tsx new file mode 100644 index 0000000000000..14d3b2166554a --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/CardinalityLabel.tsx @@ -0,0 +1,43 @@ +import { Tooltip } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { PropertyCardinality, StructuredPropertyEntity } from '../../../../../types.generated'; +import { PropertyTypeBadge } from '../Dataset/Schema/components/PropertyTypeLabel'; +import { getStructuredPropertyValue } from '../../utils'; + +const Header = styled.div` + font-size: 10px; +`; + +const List = styled.ul` + padding: 0 24px; + max-height: 500px; + overflow: auto; +`; + +interface Props { + structuredProperty: StructuredPropertyEntity; +} + +export default function CardinalityLabel({ structuredProperty }: Props) { + const labelText = + structuredProperty.definition.cardinality === PropertyCardinality.Single ? 'Single-Select' : 'Multi-Select'; + + return ( + +
    Property Options
    + + {structuredProperty.definition.allowedValues?.map((value) => ( +
  • {getStructuredPropertyValue(value.value)}
  • + ))} +
    + + } + > + +
    + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/NameColumn.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/NameColumn.tsx new file mode 100644 index 0000000000000..3b718c1ec30ed --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/NameColumn.tsx @@ -0,0 +1,87 @@ +import { Tooltip, Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import Highlight from 'react-highlighter'; +import { PropertyRow } from './types'; +import ChildCountLabel from '../Dataset/Schema/components/ChildCountLabel'; +import PropertyTypeLabel from '../Dataset/Schema/components/PropertyTypeLabel'; +import StructuredPropertyTooltip from './StructuredPropertyTooltip'; +import CardinalityLabel from './CardinalityLabel'; + +const ParentNameText = styled(Typography.Text)` + color: #373d44; + font-size: 16px; + font-family: Manrope; + font-weight: 600; + line-height: 20px; + word-wrap: break-word; + padding-left: 16px; + display: flex; + align-items: center; +`; + +const ChildNameText = styled(Typography.Text)` + align-self: stretch; + color: #373d44; + font-size: 14px; + font-family: Manrope; + font-weight: 500; + line-height: 18px; + word-wrap: break-word; + padding-left: 16px; + display: flex; + align-items: center; +`; + +const NameLabelWrapper = styled.span` + display: inline-flex; + align-items: center; + flex-wrap: wrap; +`; + +interface Props { + propertyRow: PropertyRow; + filterText?: string; +} + +export default function NameColumn({ propertyRow, filterText }: Props) { + const { structuredProperty } = propertyRow; + return ( + <> + {propertyRow.children ? ( + + + {propertyRow.displayName} + + {propertyRow.childrenCount ? : } + + ) : ( + + + ) : ( + '' + ) + } + > + + {propertyRow.displayName} + + + {propertyRow.type ? ( + + ) : ( + + )} + {structuredProperty?.definition.allowedValues && ( + + )} + + )} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/PropertiesTab.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/PropertiesTab.tsx index 277096e1c09cb..01d1145877e3b 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Properties/PropertiesTab.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/PropertiesTab.tsx @@ -1,52 +1,79 @@ -import React from 'react'; -import { Typography } from 'antd'; import styled from 'styled-components'; - -import { ANTD_GRAY } from '../../constants'; -import { StyledTable } from '../../components/styled/StyledTable'; +import React, { useState } from 'react'; +import ExpandIcon from '../Dataset/Schema/components/ExpandIcon'; +import { StyledTable as Table } from '../../components/styled/StyledTable'; import { useEntityData } from '../../EntityContext'; +import { PropertyRow } from './types'; +import useStructuredProperties from './useStructuredProperties'; +import { getFilteredCustomProperties, mapCustomPropertiesToPropertyRows } from './utils'; +import ValuesColumn from './ValuesColumn'; +import NameColumn from './NameColumn'; +import TabHeader from './TabHeader'; +import useUpdateExpandedRowsFromFilter from './useUpdateExpandedRowsFromFilter'; +import { useEntityRegistry } from '../../../../useEntityRegistry'; -const NameText = styled(Typography.Text)` - font-family: 'Roboto Mono', monospace; - font-weight: 600; - font-size: 12px; - color: ${ANTD_GRAY[9]}; -`; - -const ValueText = styled(Typography.Text)` - font-family: 'Roboto Mono', monospace; - font-weight: 400; - font-size: 12px; - color: ${ANTD_GRAY[8]}; -`; +const StyledTable = styled(Table)` + &&& .ant-table-cell-with-append { + padding: 4px; + } +` as typeof Table; export const PropertiesTab = () => { + const [filterText, setFilterText] = useState(''); const { entityData } = useEntityData(); + const entityRegistry = useEntityRegistry(); const propertyTableColumns = [ { - width: 210, + width: '40%', title: 'Name', - dataIndex: 'key', - sorter: (a, b) => a?.key.localeCompare(b?.key || '') || 0, defaultSortOrder: 'ascend', - render: (name: string) => {name}, + render: (propertyRow: PropertyRow) => , }, { title: 'Value', - dataIndex: 'value', - render: (value: string) => {value}, + render: (propertyRow: PropertyRow) => , }, ]; + const { structuredPropertyRows, expandedRowsFromFilter } = useStructuredProperties(entityRegistry, filterText); + const customProperties = getFilteredCustomProperties(filterText, entityData) || []; + const customPropertyRows = mapCustomPropertiesToPropertyRows(customProperties); + const dataSource: PropertyRow[] = structuredPropertyRows.concat(customPropertyRows); + + const [expandedRows, setExpandedRows] = useState>(new Set()); + + useUpdateExpandedRowsFromFilter({ expandedRowsFromFilter, setExpandedRows }); + return ( - + <> + + { + if (expanded) { + setExpandedRows((previousRows) => new Set(previousRows.add(record.qualifiedName))); + } else { + setExpandedRows((previousRows) => { + previousRows.delete(record.qualifiedName); + return new Set(previousRows); + }); + } + }, + indentSize: 0, + }} + /> + ); }; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/StructuredPropertyTooltip.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/StructuredPropertyTooltip.tsx new file mode 100644 index 0000000000000..be0f443ce01b2 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/StructuredPropertyTooltip.tsx @@ -0,0 +1,31 @@ +import React from 'react'; +import styled from 'styled-components'; +import { StructuredPropertyEntity } from '../../../../../types.generated'; + +const ContentWrapper = styled.div` + font-size: 12px; +`; + +const Header = styled.div` + font-size: 10px; +`; + +const Description = styled.div` + padding-left: 16px; +`; + +interface Props { + structuredProperty: StructuredPropertyEntity; +} + +export default function StructuredPropertyTooltip({ structuredProperty }: Props) { + return ( + +
    Structured Property
    +
    {structuredProperty.definition.displayName || structuredProperty.definition.qualifiedName}
    + {structuredProperty.definition.description && ( + {structuredProperty.definition.description} + )} +
    + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/StructuredPropertyValue.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/StructuredPropertyValue.tsx new file mode 100644 index 0000000000000..a8b4e6607b25e --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/StructuredPropertyValue.tsx @@ -0,0 +1,69 @@ +import Icon from '@ant-design/icons/lib/components/Icon'; +import React from 'react'; +import Highlight from 'react-highlighter'; +import { Typography } from 'antd'; +import styled from 'styled-components'; +import { ValueColumnData } from './types'; +import { ANTD_GRAY } from '../../constants'; +import { useEntityRegistry } from '../../../../useEntityRegistry'; +import ExternalLink from '../../../../../images/link-out.svg?react'; +import MarkdownViewer, { MarkdownView } from '../../components/legacy/MarkdownViewer'; +import EntityIcon from '../../components/styled/EntityIcon'; + +const ValueText = styled(Typography.Text)` + font-family: 'Manrope'; + font-weight: 400; + font-size: 14px; + color: ${ANTD_GRAY[9]}; + display: block; + + ${MarkdownView} { + font-size: 14px; + } +`; + +const StyledIcon = styled(Icon)` + margin-left: 6px; +`; + +const IconWrapper = styled.span` + margin-right: 4px; +`; + +interface Props { + value: ValueColumnData; + isRichText?: boolean; + filterText?: string; +} + +export default function StructuredPropertyValue({ value, isRichText, filterText }: Props) { + const entityRegistry = useEntityRegistry(); + + return ( + + {value.entity ? ( + <> + + + + {entityRegistry.getDisplayName(value.entity.type, value.entity)} + + + + + ) : ( + <> + {isRichText ? ( + + ) : ( + {value.value?.toString()} + )} + + )} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/TabHeader.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/TabHeader.tsx new file mode 100644 index 0000000000000..9e0b4992d9c78 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/TabHeader.tsx @@ -0,0 +1,32 @@ +import { SearchOutlined } from '@ant-design/icons'; +import { Input } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { ANTD_GRAY } from '../../constants'; + +const StyledInput = styled(Input)` + border-radius: 70px; + max-width: 300px; +`; + +const TableHeader = styled.div` + padding: 8px 16px; + border-bottom: 1px solid ${ANTD_GRAY[4.5]}; +`; + +interface Props { + setFilterText: (text: string) => void; +} + +export default function TabHeader({ setFilterText }: Props) { + return ( + + setFilterText(e.target.value)} + allowClear + prefix={} + /> + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/ValuesColumn.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/ValuesColumn.tsx new file mode 100644 index 0000000000000..b050e06f96de8 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/ValuesColumn.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import { PropertyRow } from './types'; +import { StdDataType } from '../../../../../types.generated'; +import StructuredPropertyValue from './StructuredPropertyValue'; + +interface Props { + propertyRow: PropertyRow; + filterText?: string; +} + +export default function ValuesColumn({ propertyRow, filterText }: Props) { + const { values } = propertyRow; + const isRichText = propertyRow.dataType?.info.type === StdDataType.RichText; + + return ( + <> + {values ? ( + values.map((v) => ) + ) : ( + + )} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/__tests__/utils.test.ts b/datahub-web-react/src/app/entity/shared/tabs/Properties/__tests__/utils.test.ts new file mode 100644 index 0000000000000..512510732d716 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/__tests__/utils.test.ts @@ -0,0 +1,87 @@ +import { getTestEntityRegistry } from '../../../../../../utils/test-utils/TestPageContainer'; +import { PropertyRow } from '../types'; +import { filterStructuredProperties } from '../utils'; + +describe('filterSchemaRows', () => { + const testEntityRegistry = getTestEntityRegistry(); + const rows = [ + { + displayName: 'Has PII', + qualifiedName: 'io.acryl.ads.data_protection.has_pii', + values: [{ value: 'yes', entity: null }], + }, + { + displayName: 'Discovery Date Utc', + qualifiedName: 'io.acryl.ads.change_management.discovery_date_utc', + values: [{ value: '2023-10-31', entity: null }], + }, + { + displayName: 'Link Data Location', + qualifiedName: 'io.acryl.ads.context.data_location', + values: [{ value: 'New York City', entity: null }], + }, + { + displayName: 'Number Prop', + qualifiedName: 'io.acryl.ads.number', + values: [{ value: 100, entity: null }], + }, + ] as PropertyRow[]; + + it('should properly filter structured properties based on field name', () => { + const filterText = 'has pi'; + const { filteredRows, expandedRowsFromFilter } = filterStructuredProperties( + testEntityRegistry, + rows, + filterText, + ); + + expect(filteredRows).toMatchObject([ + { + displayName: 'Has PII', + qualifiedName: 'io.acryl.ads.data_protection.has_pii', + values: [{ value: 'yes', entity: null }], + }, + ]); + expect(expandedRowsFromFilter).toMatchObject( + new Set(['io', 'io.acryl', 'io.acryl.ads', 'io.acryl.ads.data_protection']), + ); + }); + + it('should properly filter structured properties based on field value', () => { + const filterText = 'new york'; + const { filteredRows, expandedRowsFromFilter } = filterStructuredProperties( + testEntityRegistry, + rows, + filterText, + ); + + expect(filteredRows).toMatchObject([ + { + displayName: 'Link Data Location', + qualifiedName: 'io.acryl.ads.context.data_location', + values: [{ value: 'New York City', entity: null }], + }, + ]); + expect(expandedRowsFromFilter).toMatchObject( + new Set(['io', 'io.acryl', 'io.acryl.ads', 'io.acryl.ads.context']), + ); + }); + + it('should properly filter structured properties based on field value even for numbers', () => { + const filterText = '100'; + const { filteredRows, expandedRowsFromFilter } = filterStructuredProperties( + testEntityRegistry, + rows, + filterText, + ); + + expect(filteredRows).toMatchObject([ + { + displayName: 'Number Prop', + qualifiedName: 'io.acryl.ads.number', + values: [{ value: 100, entity: null }], + }, + ]); + expect(expandedRowsFromFilter).toMatchObject(new Set(['io', 'io.acryl', 'io.acryl.ads'])); + }); +}); diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/types.ts b/datahub-web-react/src/app/entity/shared/tabs/Properties/types.ts new file mode 100644 index 0000000000000..b93ba886d5a64 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/types.ts @@ -0,0 +1,25 @@ +import { DataTypeEntity, Entity, StructuredPropertyEntity } from '../../../../../types.generated'; + +export interface ValueColumnData { + value: string | number | null; + entity: Entity | null; +} + +export interface TypeData { + type: string; + nativeDataType: string; +} + +export interface PropertyRow { + displayName: string; + qualifiedName: string; + values?: ValueColumnData[]; + children?: PropertyRow[]; + childrenCount?: number; + parent?: PropertyRow; + depth?: number; + type?: TypeData; + dataType?: DataTypeEntity; + isParentRow?: boolean; + structuredProperty?: StructuredPropertyEntity; +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/useStructuredProperties.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/useStructuredProperties.tsx new file mode 100644 index 0000000000000..5600d7c3e8498 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/useStructuredProperties.tsx @@ -0,0 +1,215 @@ +import { PropertyValue, StructuredPropertiesEntry } from '../../../../../types.generated'; +import EntityRegistry from '../../../EntityRegistry'; +import { useEntityData } from '../../EntityContext'; +import { GenericEntityProperties } from '../../types'; +import { getStructuredPropertyValue } from '../../utils'; +import { PropertyRow } from './types'; +import { filterStructuredProperties } from './utils'; + +const typeNameToType = { + StringValue: { type: 'string', nativeDataType: 'text' }, + NumberValue: { type: 'number', nativeDataType: 'float' }, +}; + +export function mapStructuredPropertyValues(structuredPropertiesEntry: StructuredPropertiesEntry) { + return structuredPropertiesEntry.values + .filter((value) => !!value) + .map((value) => ({ + value: getStructuredPropertyValue(value as PropertyValue), + entity: + structuredPropertiesEntry.valueEntities?.find( + (entity) => entity?.urn === getStructuredPropertyValue(value as PropertyValue), + ) || null, + })); +} + +// map the properties map into a list of PropertyRow objects to render in a table +function getStructuredPropertyRows(entityData?: GenericEntityProperties | null) { + const structuredPropertyRows: PropertyRow[] = []; + + entityData?.structuredProperties?.properties?.forEach((structuredPropertiesEntry) => { + const { displayName, qualifiedName } = structuredPropertiesEntry.structuredProperty.definition; + structuredPropertyRows.push({ + displayName: displayName || qualifiedName, + qualifiedName, + values: mapStructuredPropertyValues(structuredPropertiesEntry), + dataType: structuredPropertiesEntry.structuredProperty.definition.valueType, + structuredProperty: structuredPropertiesEntry.structuredProperty, + type: + structuredPropertiesEntry.values[0] && structuredPropertiesEntry.values[0].__typename + ? { + type: typeNameToType[structuredPropertiesEntry.values[0].__typename].type, + nativeDataType: typeNameToType[structuredPropertiesEntry.values[0].__typename].nativeDataType, + } + : undefined, + }); + }); + + return structuredPropertyRows; +} + +export function findAllSubstrings(s: string): Array { + const substrings: Array = []; + + for (let i = 0; i < s.length; i++) { + if (s[i] === '.') { + substrings.push(s.substring(0, i)); + } + } + substrings.push(s); + return substrings; +} + +export function createParentPropertyRow(displayName: string, qualifiedName: string): PropertyRow { + return { + displayName, + qualifiedName, + isParentRow: true, + }; +} + +export function identifyAndAddParentRows(rows?: Array): Array { + /** + * This function takes in an array of PropertyRow objects and determines which rows are parents. These parents need + * to be extracted in order to organize the rows into a properly nested structure later on. The final product returned + * is a list of parent rows, without values or children assigned. + */ + const qualifiedNames: Array = []; + + // Get list of fqns + if (rows) { + rows.forEach((row) => { + qualifiedNames.push(row.qualifiedName); + }); + } + + const finalParents: PropertyRow[] = []; + const finalParentNames = new Set(); + + // Loop through list of fqns and find all substrings. + // e.g. a.b.c.d becomes a, a.b, a.b.c, a.b.c.d + qualifiedNames.forEach((fqn) => { + let previousCount: number | null = null; + let previousParentName = ''; + + const substrings = findAllSubstrings(fqn); + + // Loop through substrings and count how many other fqns have that substring in them. Use this to determine + // if a property should be nested. If the count is equal then we should not nest, because there's no split + // that would tell us to nest. If the count is not equal, we should nest the child properties. + for (let index = 0; index < substrings.length; index++) { + const token = substrings[index]; + const currentCount = qualifiedNames.filter((name) => name.startsWith(token)).length; + + // If we're at the beginning of the path and there is no nesting, break + if (index === 0 && currentCount === 1) { + break; + } + + // Add previous fqn, or,previousParentName, if we have found a viable parent path + if (previousCount !== null && previousCount !== currentCount) { + if (!finalParentNames.has(previousParentName)) { + const parent: PropertyRow = createParentPropertyRow(previousParentName, previousParentName); + parent.childrenCount = previousCount; + finalParentNames.add(previousParentName); + finalParents.push(parent); + } + } + + previousCount = currentCount; + previousParentName = token; + } + }); + + return finalParents; +} + +export function groupByParentProperty(rows?: Array): Array { + /** + * This function takes in an array of PropertyRow objects, representing parent and child properties. Parent properties + * will not have values, but child properties will. It organizes the rows into the parent and child structure and + * returns a list of PropertyRow objects representing it. + */ + const outputRows: Array = []; + const outputRowByPath = {}; + + if (rows) { + // Iterate through all rows + for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { + let parentRow: null | PropertyRow = null; + const row = { children: undefined, ...rows[rowIndex], depth: 0 }; + + // Iterate through a row's characters, and split the row's path into tokens + // e.g. a, b, c for the example a.b.c + for (let j = rowIndex - 1; j >= 0; j--) { + const rowTokens = row.qualifiedName.split('.'); + let parentPath: null | string = null; + let previousParentPath = rowTokens.slice(0, rowTokens.length - 1).join('.'); + + // Iterate through a row's path backwards, and check if the previous row's path has been seen. If it has, + // populate parentRow. If not, move on to the next path token. + // e.g. for a.b.c.d, first evaluate a.b.c to see if it has been seen. If it hasn't, move to a.b + for ( + let lastParentTokenIndex = rowTokens.length - 2; + lastParentTokenIndex >= 0; + --lastParentTokenIndex + ) { + const lastParentToken: string = rowTokens[lastParentTokenIndex]; + if (lastParentToken && Object.keys(outputRowByPath).includes(previousParentPath)) { + parentPath = rowTokens.slice(0, lastParentTokenIndex + 1).join('.'); + break; + } + previousParentPath = rowTokens.slice(0, lastParentTokenIndex).join('.'); + } + + if (parentPath && rows[j].qualifiedName === parentPath) { + parentRow = outputRowByPath[rows[j].qualifiedName]; + break; + } + } + + // If the parent row exists in the ouput, add the current row as a child. If not, add the current row + // to the final output. + if (parentRow) { + row.depth = (parentRow.depth || 0) + 1; + row.parent = parentRow; + if (row.isParentRow) { + row.displayName = row.displayName.replace(`${parentRow.displayName}.`, ''); + } + parentRow.children = [...(parentRow.children || []), row]; + } else { + outputRows.push(row); + } + outputRowByPath[row.qualifiedName] = row; + } + } + return outputRows; +} + +export default function useStructuredProperties(entityRegistry: EntityRegistry, filterText?: string) { + const { entityData } = useEntityData(); + + let structuredPropertyRowsRaw = getStructuredPropertyRows(entityData); + const parentRows = identifyAndAddParentRows(structuredPropertyRowsRaw); + + structuredPropertyRowsRaw = [...structuredPropertyRowsRaw, ...parentRows]; + + const { filteredRows, expandedRowsFromFilter } = filterStructuredProperties( + entityRegistry, + structuredPropertyRowsRaw, + filterText, + ); + + // Sort by fqn before nesting algorithm + const copy = [...filteredRows].sort((a, b) => { + return a.qualifiedName.localeCompare(b.qualifiedName); + }); + + // group properties by path + const structuredPropertyRows = groupByParentProperty(copy); + + return { + structuredPropertyRows, + expandedRowsFromFilter: expandedRowsFromFilter as Set, + }; +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/useUpdateExpandedRowsFromFilter.ts b/datahub-web-react/src/app/entity/shared/tabs/Properties/useUpdateExpandedRowsFromFilter.ts new file mode 100644 index 0000000000000..0dbe762c537db --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/useUpdateExpandedRowsFromFilter.ts @@ -0,0 +1,23 @@ +import { useEffect } from 'react'; +import { isEqual } from 'lodash'; +import usePrevious from '../../../../shared/usePrevious'; + +interface Props { + expandedRowsFromFilter: Set; + setExpandedRows: React.Dispatch>>; +} + +export default function useUpdateExpandedRowsFromFilter({ expandedRowsFromFilter, setExpandedRows }: Props) { + const previousExpandedRowsFromFilter = usePrevious(expandedRowsFromFilter); + + useEffect(() => { + if (!isEqual(expandedRowsFromFilter, previousExpandedRowsFromFilter)) { + setExpandedRows((previousRows) => { + const finalRowsSet = new Set(); + expandedRowsFromFilter.forEach((row) => finalRowsSet.add(row)); + previousRows.forEach((row) => finalRowsSet.add(row)); + return finalRowsSet as Set; + }); + } + }, [expandedRowsFromFilter, previousExpandedRowsFromFilter, setExpandedRows]); +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/utils.ts b/datahub-web-react/src/app/entity/shared/tabs/Properties/utils.ts new file mode 100644 index 0000000000000..91870e2e37e07 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/utils.ts @@ -0,0 +1,68 @@ +import { CustomPropertiesEntry } from '../../../../../types.generated'; +import EntityRegistry from '../../../EntityRegistry'; +import { GenericEntityProperties } from '../../types'; +import { PropertyRow, ValueColumnData } from './types'; + +export function mapCustomPropertiesToPropertyRows(customProperties: CustomPropertiesEntry[]) { + return (customProperties?.map((customProp) => ({ + displayName: customProp.key, + values: [{ value: customProp.value || '' }], + type: { type: 'string', nativeDataType: 'string' }, + })) || []) as PropertyRow[]; +} + +function matchesName(name: string, filterText: string) { + return name.toLocaleLowerCase().includes(filterText.toLocaleLowerCase()); +} + +function matchesAnyFromValues(values: ValueColumnData[], filterText: string, entityRegistry: EntityRegistry) { + return values.some( + (value) => + matchesName(value.value?.toString() || '', filterText) || + matchesName(value.entity ? entityRegistry.getDisplayName(value.entity.type, value.entity) : '', filterText), + ); +} + +export function getFilteredCustomProperties(filterText: string, entityData?: GenericEntityProperties | null) { + return entityData?.customProperties?.filter( + (property) => matchesName(property.key, filterText) || matchesName(property.value || '', filterText), + ); +} + +export function filterStructuredProperties( + entityRegistry: EntityRegistry, + propertyRows: PropertyRow[], + filterText?: string, +) { + if (!propertyRows) return { filteredRows: [], expandedRowsFromFilter: new Set() }; + if (!filterText) return { filteredRows: propertyRows, expandedRowsFromFilter: new Set() }; + const formattedFilterText = filterText.toLocaleLowerCase(); + + const finalQualifiedNames = new Set(); + const expandedRowsFromFilter = new Set(); + + propertyRows.forEach((row) => { + // if we match on the qualified name (maybe from a parent) do not filter out + if (matchesName(row.qualifiedName, formattedFilterText)) { + finalQualifiedNames.add(row.qualifiedName); + } + // if we match specifically on this property (not just its parent), add and expand all parents + if ( + matchesName(row.displayName, formattedFilterText) || + matchesAnyFromValues(row.values || [], formattedFilterText, entityRegistry) + ) { + finalQualifiedNames.add(row.qualifiedName); + + const splitFieldPath = row.qualifiedName.split('.'); + splitFieldPath.reduce((previous, current) => { + finalQualifiedNames.add(previous); + expandedRowsFromFilter.add(previous); + return `${previous}.${current}`; + }); + } + }); + + const filteredRows = propertyRows.filter((row) => finalQualifiedNames.has(row.qualifiedName)); + + return { filteredRows, expandedRowsFromFilter }; +} diff --git a/datahub-web-react/src/app/entity/shared/types.ts b/datahub-web-react/src/app/entity/shared/types.ts index d4e3965cd66f5..47cad4a69096d 100644 --- a/datahub-web-react/src/app/entity/shared/types.ts +++ b/datahub-web-react/src/app/entity/shared/types.ts @@ -38,6 +38,7 @@ import { BrowsePathV2, DataJobInputOutput, ParentDomainsResult, + StructuredProperties, } from '../../../types.generated'; import { FetchedEntity } from '../../lineage/types'; @@ -84,6 +85,7 @@ export type GenericEntityProperties = { platform?: Maybe; dataPlatformInstance?: Maybe; customProperties?: Maybe; + structuredProperties?: Maybe; institutionalMemory?: Maybe; schemaMetadata?: Maybe; externalUrl?: Maybe; diff --git a/datahub-web-react/src/app/entity/shared/utils.ts b/datahub-web-react/src/app/entity/shared/utils.ts index a158cc9b7c119..217aaaaf9dde8 100644 --- a/datahub-web-react/src/app/entity/shared/utils.ts +++ b/datahub-web-react/src/app/entity/shared/utils.ts @@ -1,6 +1,6 @@ import { Maybe } from 'graphql/jsutils/Maybe'; -import { Entity, EntityType, EntityRelationshipsResult, DataProduct } from '../../../types.generated'; +import { Entity, EntityType, EntityRelationshipsResult, DataProduct, PropertyValue } from '../../../types.generated'; import { capitalizeFirstLetterOnly } from '../../shared/textUtil'; import { GenericEntityProperties } from './types'; @@ -130,3 +130,13 @@ export function getDataProduct(dataProductResult: Maybe { it('logs in', () => { cy.visit('/'); - cy.get('input[data-testid=username]').type(Cypress.env('ADMIN_USERNAME')); - cy.get('input[data-testid=password]').type(Cypress.env('ADMIN_PASSWORD')); + cy.get('input[data-testid=username]').type('datahub'); + cy.get('input[data-testid=password]').type('datahub'); cy.contains('Sign In').click(); cy.contains('Welcome back, ' + Cypress.env('ADMIN_DISPLAYNAME')); }); diff --git a/smoke-test/tests/cypress/cypress/e2e/mutations/edit_documentation.js b/smoke-test/tests/cypress/cypress/e2e/mutations/edit_documentation.js index 5f9758a35ca0e..c6d2b205250e0 100644 --- a/smoke-test/tests/cypress/cypress/e2e/mutations/edit_documentation.js +++ b/smoke-test/tests/cypress/cypress/e2e/mutations/edit_documentation.js @@ -78,17 +78,18 @@ describe("edit documentation and link to dataset", () => { cy.visit( "/dataset/urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)/Schema" ); - cy.get("tbody [data-icon='edit']").first().click({ force: true }); + cy.clickOptionWithText("field_foo"); + cy.clickOptionWithTestId("edit-field-description"); cy.waitTextVisible("Update description"); cy.waitTextVisible("Foo field description has changed"); - cy.focused().clear().wait(1000); + cy.getWithTestId("description-editor").clear().wait(1000); cy.focused().type(documentation_edited); cy.clickOptionWithTestId("description-modal-update-button"); cy.waitTextVisible("Updated!"); cy.waitTextVisible(documentation_edited); cy.waitTextVisible("(edited)"); - cy.get("tbody [data-icon='edit']").first().click({ force: true }); - cy.focused().clear().wait(1000); + cy.clickOptionWithTestId("edit-field-description"); + cy.getWithTestId("description-editor").clear().wait(1000); cy.focused().type("Foo field description has changed"); cy.clickOptionWithTestId("description-modal-update-button"); cy.waitTextVisible("Updated!"); diff --git a/smoke-test/tests/cypress/cypress/e2e/mutations/mutations.js b/smoke-test/tests/cypress/cypress/e2e/mutations/mutations.js index 1baa33901724f..7f8a4e4f8f335 100644 --- a/smoke-test/tests/cypress/cypress/e2e/mutations/mutations.js +++ b/smoke-test/tests/cypress/cypress/e2e/mutations/mutations.js @@ -77,7 +77,7 @@ describe("mutations", () => { cy.login(); cy.viewport(2000, 800); cy.goToDataset("urn:li:dataset:(urn:li:dataPlatform:hive,cypress_logging_events,PROD)", "cypress_logging_events"); - cy.mouseover('[data-testid="schema-field-event_name-tags"]'); + cy.clickOptionWithText("event_name"); cy.get('[data-testid="schema-field-event_name-tags"]').within(() => cy.contains("Add Tag").click() ); @@ -116,7 +116,8 @@ describe("mutations", () => { // verify dataset shows up in search now cy.contains("of 1 result").click(); cy.contains("cypress_logging_events").click(); - cy.get('[data-testid="tag-CypressTestAddTag2"]').within(() => + cy.clickOptionWithText("event_name"); + cy.get('[data-testid="schema-field-event_name-tags"]').within(() => cy .get("span[aria-label=close]") .trigger("mouseover", { force: true }) @@ -134,10 +135,7 @@ describe("mutations", () => { // make space for the glossary term column cy.viewport(2000, 800); cy.goToDataset("urn:li:dataset:(urn:li:dataPlatform:hive,cypress_logging_events,PROD)", "cypress_logging_events"); - cy.get('[data-testid="schema-field-event_name-terms"]').trigger( - "mouseover", - { force: true } - ); + cy.clickOptionWithText("event_name"); cy.get('[data-testid="schema-field-event_name-terms"]').within(() => cy.contains("Add Term").click({ force: true }) ); @@ -146,9 +144,12 @@ describe("mutations", () => { cy.contains("CypressTerm"); - cy.get( - 'a[href="/glossaryTerm/urn:li:glossaryTerm:CypressNode.CypressTerm"]' - ).within(() => cy.get("span[aria-label=close]").click({ force: true })); + cy.get('[data-testid="schema-field-event_name-terms"]').within(() => + cy + .get("span[aria-label=close]") + .trigger("mouseover", { force: true }) + .click({ force: true }) + ); cy.contains("Yes").click({ force: true }); cy.contains("CypressTerm").should("not.exist"); diff --git a/smoke-test/tests/cypress/cypress/e2e/schema_blame/schema_blame.js b/smoke-test/tests/cypress/cypress/e2e/schema_blame/schema_blame.js index 6e282b5249636..1ce1fbe900172 100644 --- a/smoke-test/tests/cypress/cypress/e2e/schema_blame/schema_blame.js +++ b/smoke-test/tests/cypress/cypress/e2e/schema_blame/schema_blame.js @@ -14,6 +14,7 @@ describe('schema blame', () => { cy.contains('field_bar').should('not.exist'); cy.contains('Foo field description has changed'); cy.contains('Baz field description'); + cy.clickOptionWithText("field_foo"); cy.get('[data-testid="schema-field-field_foo-tags"]').contains('Legacy'); // Make sure the schema blame is accurate @@ -41,6 +42,7 @@ describe('schema blame', () => { cy.contains('field_baz').should('not.exist'); cy.contains('Foo field description'); cy.contains('Bar field description'); + cy.clickOptionWithText("field_foo"); cy.get('[data-testid="schema-field-field_foo-tags"]').contains('Legacy').should('not.exist'); // Make sure the schema blame is accurate diff --git a/smoke-test/tests/cypress/cypress/support/commands.js b/smoke-test/tests/cypress/cypress/support/commands.js index f32512aff45fa..51b06a24c1921 100644 --- a/smoke-test/tests/cypress/cypress/support/commands.js +++ b/smoke-test/tests/cypress/cypress/support/commands.js @@ -218,6 +218,10 @@ Cypress.Commands.add( 'multiSelect', (within_data_id , text) => { cy.clickOptionWithText(text); }); +Cypress.Commands.add("getWithTestId", (id) => { + return cy.get(selectorWithtestId(id)); +}); + Cypress.Commands.add("enterTextInTestId", (id, text) => { cy.get(selectorWithtestId(id)).type(text); }) From caf6ebe3b7a7ebaafd2b5252763171f4dfbeb754 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Thu, 25 Jan 2024 10:40:22 -0800 Subject: [PATCH 438/792] docs(): Updating docs for assertions to correct databricks assertions support (#9713) Co-authored-by: John Joyce --- .../observe/custom-sql-assertions.md | 2 +- .../observe/freshness-assertions.md | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/docs/managed-datahub/observe/custom-sql-assertions.md b/docs/managed-datahub/observe/custom-sql-assertions.md index 11e9aa807b616..581b542688134 100644 --- a/docs/managed-datahub/observe/custom-sql-assertions.md +++ b/docs/managed-datahub/observe/custom-sql-assertions.md @@ -117,7 +117,7 @@ The **Assertion Description**: This is a human-readable description of the Asser ### Prerequisites 1. **Permissions**: To create or delete Custom SQL Assertions for a specific entity on DataHub, you'll need to be granted the - `Edit Assertions` and `Edit Monitors` privileges for the entity. This is granted to Entity owners by default. + `Edit Assertions`, `Edit Monitors`, **and the additional `Edit SQL Assertion Monitors`** privileges for the entity. This is granted to Entity owners by default. 2. **Data Platform Connection**: In order to create a Custom SQL Assertion, you'll need to have an **Ingestion Source** configured to your Data Platform: Snowflake, BigQuery, or Redshift under the **Integrations** tab. diff --git a/docs/managed-datahub/observe/freshness-assertions.md b/docs/managed-datahub/observe/freshness-assertions.md index 416db6a65343e..9704f475b1587 100644 --- a/docs/managed-datahub/observe/freshness-assertions.md +++ b/docs/managed-datahub/observe/freshness-assertions.md @@ -107,12 +107,14 @@ Change Source types vary by the platform, but generally fall into these categori - **Audit Log** (Default): A metadata API or Table that is exposed by the Data Warehouse which contains captures information about the operations that have been performed to each Table. It is usually efficient to check, but some useful operations are not - fully supported across all major Warehouse platforms. + fully supported across all major Warehouse platforms. Note that for Databricks, [this option](https://docs.databricks.com/en/delta/history.html) + is only available for tables stored in Delta format. - **Information Schema**: A system Table that is exposed by the Data Warehouse which contains live information about the Databases and Tables stored inside the Data Warehouse. It is usually efficient to check, but lacks detailed information about the _type_ - of change that was last made to a specific table (e.g. the operation itself - INSERT, UPDATE, DELETE, number of impacted rows, etc) - + of change that was last made to a specific table (e.g. the operation itself - INSERT, UPDATE, DELETE, number of impacted rows, etc). + Note that for Databricks, [this option](https://docs.databricks.com/en/delta/table-details.html) is only available for tables stored in Delta format. + - **Last Modified Column**: A Date or Timestamp column that represents the last time that a specific _row_ was touched or updated. Adding a Last Modified Column to each warehouse Table is a pattern is often used for existing use cases around change management. If this change source is used, a query will be issued to the Table to search for rows that have been modified within a specific @@ -128,8 +130,11 @@ Change Source types vary by the platform, but generally fall into these categori This relies on Operations being reported to DataHub, either via ingestion or via use of the DataHub APIs (see [Report Operation via API](#reporting-operations-via-api)). Note if you have not configured an ingestion source through DataHub, then this may be the only option available. By default, any operation type found will be considered a valid change. Use the **Operation Types** dropdown when selecting this option to specify which operation types should be considered valid changes. You may choose from one of DataHub's standard Operation Types, or specify a "Custom" Operation Type by typing in the name of the Operation Type. - Using either of the column value approaches (**Last Modified Column** or **High Watermark Column**) to determine whether a Table has changed can be useful because it can be customized to determine whether specific types of important changes have been made to a given Table. - Because it does not involve system warehouse tables, it is also easily portable across Data Warehouse and Data Lake providers. + - **File Metadata** (Databricks Only): A column that is exposed by Databricks for both Unity Catalog and Hive Metastore based tables + which includes information about the last time that a file for the table was changed. Read more about it [here](https://docs.databricks.com/en/ingestion/file-metadata-column.html). + + Using either of the column value approaches (**Last Modified Column** or **High Watermark Column**) to determine whether a Table has changed can be useful because it can be customized to determine whether specific types of changes have been made to a given Table. + And because this type of assertion does not involve system warehouse tables, they are easily portable across Data Warehouse and Data Lake providers. Freshness Assertions also have an off switch: they can be started or stopped at any time with the click of button. From d292b35f2340e227a49ad872a156d7b2f15fb9a9 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 25 Jan 2024 12:41:51 -0600 Subject: [PATCH 439/792] test(spark-lineage): minor tweaks (#9717) --- .github/workflows/spark-smoke-test.yml | 18 +++++++++++++++++- docker/build.gradle | 17 ++++++++++++----- .../datahub/spark/TestSparkJobsLineage.java | 9 +++++++-- 3 files changed, 36 insertions(+), 8 deletions(-) diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index e463e15243ee3..87fa3c85fc581 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -42,8 +42,12 @@ jobs: cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh + - name: Disk Check + run: df -h . && docker images - name: Remove images run: docker image prune -a -f || true + - name: Disk Check + run: df -h . && docker images - name: Smoke test run: | ./gradlew :metadata-integration:java:spark-lineage:integrationTest \ @@ -54,12 +58,24 @@ jobs: -x :datahub-web-react:yarnBuild \ -x :datahub-web-react:distZip \ -x :datahub-web-react:jar + - name: store logs + if: failure() + run: | + docker ps -a + docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log || true + docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log || true + docker logs broker >& broker-${{ matrix.test_strategy }}.log || true + docker logs mysql >& mysql-${{ matrix.test_strategy }}.log || true + docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true + docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true - name: Upload logs uses: actions/upload-artifact@v3 if: failure() with: name: docker logs - path: "docker/build/container-logs/*.log" + path: | + "**/build/container-logs/*.log" + "*.log" - uses: actions/upload-artifact@v3 if: always() with: diff --git a/docker/build.gradle b/docker/build.gradle index cc95e12f26f76..b14739104a9f1 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -8,15 +8,17 @@ import com.avast.gradle.dockercompose.tasks.ComposeDownForced apply from: "../gradle/versioning/versioning.gradle" ext { - quickstart_modules = [ + backend_profile_modules = [ ':docker:elasticsearch-setup', ':docker:mysql-setup', ':docker:kafka-setup', ':datahub-upgrade', + ':metadata-service:war', + ] + quickstart_modules = backend_profile_modules + [ ':metadata-jobs:mce-consumer-job', ':metadata-jobs:mae-consumer-job', - ':metadata-service:war', - ':datahub-frontend', + ':datahub-frontend' ] debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job', @@ -90,9 +92,14 @@ dockerCompose { removeVolumes = false } + /** + * The smallest disk footprint required for Spark integration tests + * + * No frontend, mae, mce, or other services + */ quickstartSlim { isRequiredBy(tasks.named('quickstartSlim')) - composeAdditionalArgs = ['--profile', 'quickstart-consumers'] + composeAdditionalArgs = ['--profile', 'quickstart-backend'] environment.put 'DATAHUB_VERSION', "v${version}" environment.put "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion" @@ -132,7 +139,7 @@ tasks.getByName('quickstartComposeUp').dependsOn( tasks.getByName('quickstartPgComposeUp').dependsOn( pg_quickstart_modules.collect { it + ':dockerTag' }) tasks.getByName('quickstartSlimComposeUp').dependsOn( - ([':docker:datahub-ingestion'] + quickstart_modules) + ([':docker:datahub-ingestion'] + backend_profile_modules) .collect { it + ':dockerTag' }) tasks.getByName('quickstartDebugComposeUp').dependsOn( debug_modules.collect { it + ':dockerTagDebug' } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java index fa896814d16f6..a4eb035b0abce 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java @@ -136,6 +136,7 @@ public static void resetBaseExpectations() { .respond(HttpResponse.response().withStatusCode(200)); } + @BeforeClass public static void init() { mockServer = startClientAndServer(GMS_PORT); resetBaseExpectations(); @@ -219,8 +220,12 @@ private static void clear() { @AfterClass public static void tearDown() throws Exception { - spark.stop(); - mockServer.stop(); + if (spark != null) { + spark.stop(); + } + if (mockServer != null) { + mockServer.stop(); + } } private static void check(List expected, List actual) { From acec2a7159bc7fcc7ad37a3709b3c68d5d26536e Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Thu, 25 Jan 2024 13:04:50 -0600 Subject: [PATCH 440/792] feat(search): support filtering on count type searchable fields for equality (#9700) --- .../linkedin/metadata/models/EntitySpec.java | 15 ++ .../metadata/models/EntitySpecBuilder.java | 4 +- .../models/registry/ConfigEntityRegistry.java | 2 +- .../models/registry/MergedEntityRegistry.java | 21 +-- .../models/registry/PatchEntityRegistry.java | 28 ++- .../registry/SnapshotEntityRegistry.java | 2 +- .../models/EntitySpecBuilderTest.java | 16 +- .../elasticsearch/query/ESBrowseDAO.java | 18 +- .../elasticsearch/query/ESSearchDAO.java | 15 +- .../request/AutocompleteRequestHandler.java | 19 +- .../query/request/SearchRequestHandler.java | 23 ++- .../metadata/search/utils/ESUtils.java | 172 +++++++++++++----- .../ElasticSearchTimeseriesAspectService.java | 45 +++-- .../elastic/query/ESAggregatedStatsDAO.java | 4 +- .../search/fixtures/GoldenTestBase.java | 40 +++- .../indexbuilder/MappingsBuilderTest.java | 3 +- .../request/SearchRequestHandlerTest.java | 2 +- .../metadata/search/utils/ESUtilsTest.java | 31 ++-- .../TimeseriesAspectServiceTestBase.java | 59 ++++++ .../test/search/SearchTestUtils.java | 18 ++ .../long_tail/datasetindex_v2.json.gz | Bin 183656 -> 183668 bytes .../com/datahub/test/TestEntityInfo.pdl | 6 + 22 files changed, 430 insertions(+), 113 deletions(-) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java index e4c9dd55a3b4a..fac08c7e20646 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java @@ -3,8 +3,11 @@ import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.data.schema.TyperefDataSchema; import com.linkedin.metadata.models.annotation.EntityAnnotation; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; /** A specification of a DataHub Entity */ @@ -36,6 +39,18 @@ default List getSearchableFieldSpecs() { .collect(Collectors.toList()); } + default Map> getSearchableFieldSpecMap() { + return getSearchableFieldSpecs().stream() + .collect( + Collectors.toMap( + searchableFieldSpec -> searchableFieldSpec.getSearchableAnnotation().getFieldName(), + searchableFieldSpec -> new HashSet<>(Collections.singleton(searchableFieldSpec)), + (set1, set2) -> { + set1.addAll(set2); + return set1; + })); + } + default List getSearchScoreFieldSpecs() { return getAspectSpecs().stream() .map(AspectSpec::getSearchScoreFieldSpecs) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java index 580134f566871..54f2206798da0 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java @@ -248,9 +248,9 @@ public AspectSpec buildAspectSpec( // Extract SearchScore Field Specs final SearchScoreFieldSpecExtractor searchScoreFieldSpecExtractor = new SearchScoreFieldSpecExtractor(); - final DataSchemaRichContextTraverser searcScoreFieldSpecTraverser = + final DataSchemaRichContextTraverser searchScoreFieldSpecTraverser = new DataSchemaRichContextTraverser(searchScoreFieldSpecExtractor); - searcScoreFieldSpecTraverser.traverse(processedSearchScoreResult.getResultSchema()); + searchScoreFieldSpecTraverser.traverse(processedSearchScoreResult.getResultSchema()); final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedRelationshipResult = SchemaAnnotationProcessor.process( diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java index 41043995a3b77..9aed29ab8595e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java @@ -91,7 +91,7 @@ private static Pair getFileAndClassPath(String entityRegistryRoot) .filter(Files::isRegularFile) .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) .collect(Collectors.toList()); - if (yamlFiles.size() == 0) { + if (yamlFiles.isEmpty()) { throw new EntityRegistryException( String.format( "Did not find an entity registry (entity_registry.yaml/yml) under %s", diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java index 650a1cd41066e..0dcd0420d4df8 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java @@ -58,7 +58,7 @@ private void validateEntitySpec(EntitySpec entitySpec, final ValidationResult va validationResult.setValid(false); validationResult .getValidationFailures() - .add(String.format("Key aspect is missing in entity {}", entitySpec.getName())); + .add(String.format("Key aspect is missing in entity %s", entitySpec.getName())); } } @@ -86,7 +86,7 @@ public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) } // Merge Event Specs - if (patchEntityRegistry.getEventSpecs().size() > 0) { + if (!patchEntityRegistry.getEventSpecs().isEmpty()) { eventNameToSpec.putAll(patchEntityRegistry.getEventSpecs()); } // TODO: Validate that the entity registries don't have conflicts among each other @@ -116,19 +116,18 @@ private void checkMergeable( if (existingEntitySpec != null) { existingEntitySpec .getAspectSpecMap() - .entrySet() .forEach( - aspectSpecEntry -> { - if (newEntitySpec.hasAspect(aspectSpecEntry.getKey())) { + (key, value) -> { + if (newEntitySpec.hasAspect(key)) { CompatibilityResult result = CompatibilityChecker.checkCompatibility( - aspectSpecEntry.getValue().getPegasusSchema(), - newEntitySpec.getAspectSpec(aspectSpecEntry.getKey()).getPegasusSchema(), + value.getPegasusSchema(), + newEntitySpec.getAspectSpec(key).getPegasusSchema(), new CompatibilityOptions()); if (result.isError()) { log.error( "{} schema is not compatible with previous schema due to {}", - aspectSpecEntry.getKey(), + key, result.getMessages()); // we want to continue processing all aspects to collect all failures validationResult.setValid(false); @@ -137,11 +136,11 @@ private void checkMergeable( .add( String.format( "%s schema is not compatible with previous schema due to %s", - aspectSpecEntry.getKey(), result.getMessages())); + key, result.getMessages())); } else { log.info( "{} schema is compatible with previous schema due to {}", - aspectSpecEntry.getKey(), + key, result.getMessages()); } } @@ -222,7 +221,7 @@ public PluginFactory getPluginFactory() { @Setter @Getter - private class ValidationResult { + private static class ValidationResult { boolean valid = true; List validationFailures = new ArrayList<>(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java index b82b905c50004..b4fc4193e7263 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java @@ -71,19 +71,17 @@ public class PatchEntityRegistry implements EntityRegistry { @Override public String toString() { StringBuilder sb = new StringBuilder("PatchEntityRegistry[" + "identifier=" + identifier + ';'); - entityNameToSpec.entrySet().stream() - .forEach( - entry -> - sb.append("[entityName=") - .append(entry.getKey()) - .append(";aspects=[") - .append( - entry.getValue().getAspectSpecs().stream() - .map(spec -> spec.getName()) - .collect(Collectors.joining(","))) - .append("]]")); - eventNameToSpec.entrySet().stream() - .forEach(entry -> sb.append("[eventName=").append(entry.getKey()).append("]")); + entityNameToSpec.forEach( + (key1, value1) -> + sb.append("[entityName=") + .append(key1) + .append(";aspects=[") + .append( + value1.getAspectSpecs().stream() + .map(AspectSpec::getName) + .collect(Collectors.joining(","))) + .append("]]")); + eventNameToSpec.forEach((key, value) -> sb.append("[eventName=").append(key).append("]")); return sb.toString(); } @@ -119,7 +117,7 @@ private static Pair getFileAndClassPath(String entityRegistryRoot) .filter(Files::isRegularFile) .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) .collect(Collectors.toList()); - if (yamlFiles.size() == 0) { + if (yamlFiles.isEmpty()) { throw new EntityRegistryException( String.format( "Did not find an entity registry (entity-registry.yaml/yml) under %s", @@ -175,7 +173,7 @@ private PatchEntityRegistry( entities = OBJECT_MAPPER.readValue(configFileStream, Entities.class); this.pluginFactory = PluginFactory.withCustomClasspath(entities.getPlugins(), classLoaders); } catch (IOException e) { - e.printStackTrace(); + log.error("Unable to read Patch configuration.", e); throw new IllegalArgumentException( String.format( "Error while reading config file in path %s: %s", configFileStream, e.getMessage())); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index 8fefa2fe00ae8..22aeddb6ac65f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -120,7 +120,7 @@ public AspectTemplateEngine getAspectTemplateEngine() { } @Override - public EventSpec getEventSpec(final String ignored) { + public EventSpec getEventSpec(@Nonnull final String ignored) { return null; } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index d9cf8fd2603a8..8b043569dd16a 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -189,7 +189,7 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { testEntityInfo.getPegasusSchema().getFullName()); // Assert on Searchable Fields - assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 11); + assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 12); assertEquals( "customProperties", testEntityInfo @@ -340,6 +340,20 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { .get(new PathSpec("doubleField").toString()) .getSearchableAnnotation() .getFieldType()); + assertEquals( + "removed", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("removed").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.BOOLEAN, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("removed").toString()) + .getSearchableAnnotation() + .getFieldType()); // Assert on Relationship Fields assertEquals(4, testEntityInfo.getRelationshipFieldSpecs().size()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index 3c71a2dfd9180..d610ea4b4e028 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -19,6 +19,7 @@ import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; @@ -33,6 +34,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -554,7 +556,8 @@ private QueryBuilder buildQueryStringV2( queryBuilder.filter(QueryBuilders.rangeQuery(BROWSE_PATH_V2_DEPTH).gt(browseDepthVal)); - queryBuilder.filter(SearchRequestHandler.getFilterQuery(filter)); + queryBuilder.filter( + SearchRequestHandler.getFilterQuery(filter, entitySpec.getSearchableFieldSpecMap())); return queryBuilder; } @@ -580,7 +583,18 @@ private QueryBuilder buildQueryStringBrowseAcrossEntities( queryBuilder.filter(QueryBuilders.rangeQuery(BROWSE_PATH_V2_DEPTH).gt(browseDepthVal)); - queryBuilder.filter(SearchRequestHandler.getFilterQuery(filter)); + Map> searchableFields = + entitySpecs.stream() + .flatMap(entitySpec -> entitySpec.getSearchableFieldSpecMap().entrySet().stream()) + .collect( + Collectors.toMap( + Map.Entry::getKey, + Map.Entry::getValue, + (set1, set2) -> { + set1.addAll(set2); + return set1; + })); + queryBuilder.filter(SearchRequestHandler.getFilterQuery(filter, searchableFields)); return queryBuilder; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 0eb44edfb11de..1ec90ed6f61e2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -78,7 +78,8 @@ public long docCount(@Nonnull String entityName) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); CountRequest countRequest = new CountRequest(indexConvention.getIndexName(entitySpec)) - .query(SearchRequestHandler.getFilterQuery(null)); + .query( + SearchRequestHandler.getFilterQuery(null, entitySpec.getSearchableFieldSpecMap())); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "docCount").time()) { return client.count(countRequest, RequestOptions.DEFAULT).getCount(); } catch (IOException e) { @@ -315,9 +316,17 @@ public Map aggregateByValue( @Nonnull String field, @Nullable Filter requestParams, int limit) { + List entitySpecs; + if (entityNames == null || entityNames.isEmpty()) { + entitySpecs = new ArrayList<>(entityRegistry.getEntitySpecs().values()); + } else { + entitySpecs = + entityNames.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); + } final SearchRequest searchRequest = - SearchRequestHandler.getAggregationRequest( - field, transformFilterForEntities(requestParams, indexConvention), limit); + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getAggregationRequest( + field, transformFilterForEntities(requestParams, indexConvention), limit); if (entityNames == null) { String indexName = indexConvention.getAllEntityIndicesPattern(); searchRequest.indices(indexName); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java index cdcdae2f3d311..333d9602734d2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java @@ -14,6 +14,7 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.utils.ESUtils; import java.net.URISyntaxException; +import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; @@ -40,19 +41,33 @@ public class AutocompleteRequestHandler { private final List _defaultAutocompleteFields; + private final Map> searchableFields; private static final Map AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); public AutocompleteRequestHandler(@Nonnull EntitySpec entitySpec) { + List fieldSpecs = entitySpec.getSearchableFieldSpecs(); _defaultAutocompleteFields = Stream.concat( - entitySpec.getSearchableFieldSpecs().stream() + fieldSpecs.stream() .map(SearchableFieldSpec::getSearchableAnnotation) .filter(SearchableAnnotation::isEnableAutocomplete) .map(SearchableAnnotation::getFieldName), Stream.of("urn")) .collect(Collectors.toList()); + searchableFields = + fieldSpecs.stream() + .collect( + Collectors.toMap( + searchableFieldSpec -> + searchableFieldSpec.getSearchableAnnotation().getFieldName(), + searchableFieldSpec -> + new HashSet<>(Collections.singleton(searchableFieldSpec)), + (set1, set2) -> { + set1.addAll(set2); + return set1; + })); } public static AutocompleteRequestHandler getBuilder(@Nonnull EntitySpec entitySpec) { @@ -66,7 +81,7 @@ public SearchRequest getSearchRequest( SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(limit); searchSourceBuilder.query(getQuery(input, field)); - searchSourceBuilder.postFilter(ESUtils.buildFilterQuery(filter, false)); + searchSourceBuilder.postFilter(ESUtils.buildFilterQuery(filter, false, searchableFields)); searchSourceBuilder.highlighter(getHighlights(field)); searchRequest.source(searchSourceBuilder); return searchRequest; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index c5a5ade216bf7..e6ee909c80dae 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -97,6 +97,7 @@ public class SearchRequestHandler { private final SearchConfiguration _configs; private final SearchQueryBuilder _searchQueryBuilder; private final AggregationQueryBuilder _aggregationQueryBuilder; + private final Map> searchableFields; private SearchRequestHandler( @Nonnull EntitySpec entitySpec, @@ -121,6 +122,17 @@ private SearchRequestHandler( _searchQueryBuilder = new SearchQueryBuilder(configs, customSearchConfiguration); _aggregationQueryBuilder = new AggregationQueryBuilder(configs, annotations); _configs = configs; + searchableFields = + _entitySpecs.stream() + .flatMap(entitySpec -> entitySpec.getSearchableFieldSpecMap().entrySet().stream()) + .collect( + Collectors.toMap( + Map.Entry::getKey, + Map.Entry::getValue, + (set1, set2) -> { + set1.addAll(set2); + return set1; + })); } public static SearchRequestHandler getBuilder( @@ -169,8 +181,13 @@ private BinaryOperator mapMerger() { }; } - public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { - BoolQueryBuilder filterQuery = ESUtils.buildFilterQuery(filter, false); + public BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { + return getFilterQuery(filter, searchableFields); + } + + public static BoolQueryBuilder getFilterQuery( + @Nullable Filter filter, Map> searchableFields) { + BoolQueryBuilder filterQuery = ESUtils.buildFilterQuery(filter, false, searchableFields); return filterSoftDeletedByDefault(filter, filterQuery); } @@ -354,7 +371,7 @@ public SearchRequest getFilterRequest( * @return {@link SearchRequest} that contains the aggregation query */ @Nonnull - public static SearchRequest getAggregationRequest( + public SearchRequest getAggregationRequest( @Nonnull String field, @Nullable Filter filter, int limit) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filter); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index aa854149de43a..77a67f100895c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -7,7 +7,6 @@ import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.StructuredPropertyUtils; @@ -18,11 +17,13 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -32,6 +33,7 @@ import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.index.query.RangeQueryBuilder; import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.FieldSortBuilder; @@ -76,6 +78,13 @@ public class ESUtils { SearchableAnnotation.FieldType.BROWSE_PATH_V2, SearchableAnnotation.FieldType.URN, SearchableAnnotation.FieldType.URN_PARTIAL); + + public static final Set RANGE_QUERY_CONDITIONS = + Set.of( + Condition.GREATER_THAN, + Condition.GREATER_THAN_OR_EQUAL_TO, + Condition.LESS_THAN, + Condition.LESS_THAN_OR_EQUAL_TO); public static final String ENTITY_NAME_FIELD = "_entityName"; public static final String NAME_SUGGESTION = "nameSuggestion"; @@ -100,9 +109,6 @@ public class ESUtils { } }; - // TODO - This has been expanded for has* in another branch - public static final Set BOOLEAN_FIELDS = ImmutableSet.of("removed"); - /* * Refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/regexp-syntax.html for list of reserved * characters in an Elasticsearch regular expression. @@ -123,7 +129,10 @@ private ESUtils() {} * @return built filter query */ @Nonnull - public static BoolQueryBuilder buildFilterQuery(@Nullable Filter filter, boolean isTimeseries) { + public static BoolQueryBuilder buildFilterQuery( + @Nullable Filter filter, + boolean isTimeseries, + final Map> searchableFields) { BoolQueryBuilder finalQueryBuilder = QueryBuilders.boolQuery(); if (filter == null) { return finalQueryBuilder; @@ -134,7 +143,8 @@ public static BoolQueryBuilder buildFilterQuery(@Nullable Filter filter, boolean .getOr() .forEach( or -> - finalQueryBuilder.should(ESUtils.buildConjunctiveFilterQuery(or, isTimeseries))); + finalQueryBuilder.should( + ESUtils.buildConjunctiveFilterQuery(or, isTimeseries, searchableFields))); } else if (filter.getCriteria() != null) { // Otherwise, build boolean query from the deprecated "criteria" field. log.warn("Received query Filter with a deprecated field 'criteria'. Use 'or' instead."); @@ -146,7 +156,8 @@ public static BoolQueryBuilder buildFilterQuery(@Nullable Filter filter, boolean if (!criterion.getValue().trim().isEmpty() || criterion.hasValues() || criterion.getCondition() == Condition.IS_NULL) { - andQueryBuilder.must(getQueryBuilderFromCriterion(criterion, isTimeseries)); + andQueryBuilder.must( + getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFields)); } }); finalQueryBuilder.should(andQueryBuilder); @@ -156,7 +167,9 @@ public static BoolQueryBuilder buildFilterQuery(@Nullable Filter filter, boolean @Nonnull public static BoolQueryBuilder buildConjunctiveFilterQuery( - @Nonnull ConjunctiveCriterion conjunctiveCriterion, boolean isTimeseries) { + @Nonnull ConjunctiveCriterion conjunctiveCriterion, + boolean isTimeseries, + Map> searchableFields) { final BoolQueryBuilder andQueryBuilder = new BoolQueryBuilder(); conjunctiveCriterion .getAnd() @@ -167,9 +180,11 @@ public static BoolQueryBuilder buildConjunctiveFilterQuery( || criterion.hasValues()) { if (!criterion.isNegated()) { // `filter` instead of `must` (enables caching and bypasses scoring) - andQueryBuilder.filter(getQueryBuilderFromCriterion(criterion, isTimeseries)); + andQueryBuilder.filter( + getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFields)); } else { - andQueryBuilder.mustNot(getQueryBuilderFromCriterion(criterion, isTimeseries)); + andQueryBuilder.mustNot( + getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFields)); } } }); @@ -205,7 +220,9 @@ public static BoolQueryBuilder buildConjunctiveFilterQuery( */ @Nonnull public static QueryBuilder getQueryBuilderFromCriterion( - @Nonnull final Criterion criterion, boolean isTimeseries) { + @Nonnull final Criterion criterion, + boolean isTimeseries, + final Map> searchableFields) { final String fieldName = toFacetField(criterion.getField()); if (fieldName.startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD)) { criterion.setField(fieldName); @@ -224,10 +241,10 @@ public static QueryBuilder getQueryBuilderFromCriterion( if (maybeFieldToExpand.isPresent()) { return getQueryBuilderFromCriterionForFieldToExpand( - maybeFieldToExpand.get(), criterion, isTimeseries); + maybeFieldToExpand.get(), criterion, isTimeseries, searchableFields); } - return getQueryBuilderFromCriterionForSingleField(criterion, isTimeseries); + return getQueryBuilderFromCriterionForSingleField(criterion, isTimeseries, searchableFields); } public static String getElasticTypeForFieldType(SearchableAnnotation.FieldType fieldType) { @@ -378,7 +395,7 @@ public static String toFacetField(@Nonnull final String filterField) { @Nonnull public static String toKeywordField( - @Nonnull final String filterField, @Nonnull final boolean skipKeywordSuffix) { + @Nonnull final String filterField, final boolean skipKeywordSuffix) { return skipKeywordSuffix || KEYWORD_FIELDS.contains(filterField) || PATH_HIERARCHY_FIELDS.contains(filterField) @@ -428,7 +445,8 @@ public static void setSearchAfter( private static QueryBuilder getQueryBuilderFromCriterionForFieldToExpand( @Nonnull final List fields, @Nonnull final Criterion criterion, - final boolean isTimeseries) { + final boolean isTimeseries, + final Map> searchableFields) { final BoolQueryBuilder orQueryBuilder = new BoolQueryBuilder(); for (String field : fields) { Criterion criterionToQuery = new Criterion(); @@ -442,14 +460,17 @@ private static QueryBuilder getQueryBuilderFromCriterionForFieldToExpand( } criterionToQuery.setField(toKeywordField(field, isTimeseries)); orQueryBuilder.should( - getQueryBuilderFromCriterionForSingleField(criterionToQuery, isTimeseries)); + getQueryBuilderFromCriterionForSingleField( + criterionToQuery, isTimeseries, searchableFields)); } return orQueryBuilder; } @Nonnull private static QueryBuilder getQueryBuilderFromCriterionForSingleField( - @Nonnull Criterion criterion, @Nonnull boolean isTimeseries) { + @Nonnull Criterion criterion, + boolean isTimeseries, + final Map> searchableFields) { final Condition condition = criterion.getCondition(); final String fieldName = toFacetField(criterion.getField()); @@ -463,24 +484,11 @@ private static QueryBuilder getQueryBuilderFromCriterionForSingleField( .queryName(fieldName); } else if (criterion.hasValues() || criterion.hasValue()) { if (condition == Condition.EQUAL) { - return buildEqualsConditionFromCriterion(fieldName, criterion, isTimeseries); - // TODO: Support multi-match on the following operators (using new 'values' field) - } else if (condition == Condition.GREATER_THAN) { - return QueryBuilders.rangeQuery(criterion.getField()) - .gt(criterion.getValue().trim()) - .queryName(fieldName); - } else if (condition == Condition.GREATER_THAN_OR_EQUAL_TO) { - return QueryBuilders.rangeQuery(criterion.getField()) - .gte(criterion.getValue().trim()) - .queryName(fieldName); - } else if (condition == Condition.LESS_THAN) { - return QueryBuilders.rangeQuery(criterion.getField()) - .lt(criterion.getValue().trim()) - .queryName(fieldName); - } else if (condition == Condition.LESS_THAN_OR_EQUAL_TO) { - return QueryBuilders.rangeQuery(criterion.getField()) - .lte(criterion.getValue().trim()) - .queryName(fieldName); + return buildEqualsConditionFromCriterion( + fieldName, criterion, isTimeseries, searchableFields); + } else if (RANGE_QUERY_CONDITIONS.contains(condition)) { + return buildRangeQueryFromCriterion( + criterion, fieldName, searchableFields, condition, isTimeseries); } else if (condition == Condition.CONTAIN) { return QueryBuilders.wildcardQuery( toKeywordField(criterion.getField(), isTimeseries), @@ -504,13 +512,15 @@ private static QueryBuilder getQueryBuilderFromCriterionForSingleField( private static QueryBuilder buildEqualsConditionFromCriterion( @Nonnull final String fieldName, @Nonnull final Criterion criterion, - final boolean isTimeseries) { + final boolean isTimeseries, + final Map> searchableFields) { /* * If the newer 'values' field of Criterion.pdl is set, then we * handle using the following code to allow multi-match. */ if (!criterion.getValues().isEmpty()) { - return buildEqualsConditionFromCriterionWithValues(fieldName, criterion, isTimeseries); + return buildEqualsConditionFromCriterionWithValues( + fieldName, criterion, isTimeseries, searchableFields); } /* * Otherwise, we are likely using the deprecated 'value' field. @@ -526,21 +536,95 @@ private static QueryBuilder buildEqualsConditionFromCriterion( private static QueryBuilder buildEqualsConditionFromCriterionWithValues( @Nonnull final String fieldName, @Nonnull final Criterion criterion, - final boolean isTimeseries) { - if (BOOLEAN_FIELDS.contains(fieldName) && criterion.getValues().size() == 1) { - // Handle special-cased Boolean fields. - // here we special case boolean fields we recognize the names of and hard-cast - // the first provided value to a boolean to do the comparison. - // Ideally, we should detect the type of the field from the entity-registry in order - // to determine how to cast. + final boolean isTimeseries, + final Map> searchableFields) { + Set fieldTypes = getFieldTypes(searchableFields, fieldName); + if (fieldTypes.size() > 1) { + log.warn( + "Multiple field types for field name {}, determining best fit for set: {}", + fieldName, + fieldTypes); + } + if (fieldTypes.contains(BOOLEAN_FIELD_TYPE) && criterion.getValues().size() == 1) { return QueryBuilders.termQuery(fieldName, Boolean.parseBoolean(criterion.getValues().get(0))) .queryName(fieldName); + } else if (fieldTypes.contains(LONG_FIELD_TYPE) || fieldTypes.contains(DATE_FIELD_TYPE)) { + List longValues = + criterion.getValues().stream().map(Long::parseLong).collect(Collectors.toList()); + return QueryBuilders.termsQuery(fieldName, longValues).queryName(fieldName); + } else if (fieldTypes.contains(DOUBLE_FIELD_TYPE)) { + List doubleValues = + criterion.getValues().stream().map(Double::parseDouble).collect(Collectors.toList()); + return QueryBuilders.termsQuery(fieldName, doubleValues).queryName(fieldName); } return QueryBuilders.termsQuery( toKeywordField(criterion.getField(), isTimeseries), criterion.getValues()) .queryName(fieldName); } + private static Set getFieldTypes( + Map> searchableFields, String fieldName) { + Set fieldSpecs = + searchableFields.getOrDefault(fieldName, Collections.emptySet()); + Set fieldTypes = + fieldSpecs.stream() + .map(SearchableFieldSpec::getSearchableAnnotation) + .map(SearchableAnnotation::getFieldType) + .map(ESUtils::getElasticTypeForFieldType) + .collect(Collectors.toSet()); + if (fieldTypes.size() > 1) { + log.warn( + "Multiple field types for field name {}, determining best fit for set: {}", + fieldName, + fieldTypes); + } + return fieldTypes; + } + + private static RangeQueryBuilder buildRangeQueryFromCriterion( + Criterion criterion, + String fieldName, + Map> searchableFields, + Condition condition, + boolean isTimeseries) { + Set fieldTypes = getFieldTypes(searchableFields, fieldName); + + // Determine criterion value, range query only accepts single value so take first value in + // values if multiple + String criterionValueString; + if (!criterion.getValues().isEmpty()) { + criterionValueString = criterion.getValues().get(0).trim(); + } else { + criterionValueString = criterion.getValue().trim(); + } + Object criterionValue; + String documentFieldName; + if (fieldTypes.contains(BOOLEAN_FIELD_TYPE)) { + criterionValue = Boolean.parseBoolean(criterionValueString); + documentFieldName = criterion.getField(); + } else if (fieldTypes.contains(LONG_FIELD_TYPE) || fieldTypes.contains(DATE_FIELD_TYPE)) { + criterionValue = Long.parseLong(criterionValueString); + documentFieldName = criterion.getField(); + } else if (fieldTypes.contains(DOUBLE_FIELD_TYPE)) { + criterionValue = Double.parseDouble(criterionValueString); + documentFieldName = criterion.getField(); + } else { + criterionValue = criterionValueString; + documentFieldName = toKeywordField(criterion.getField(), isTimeseries); + } + + // Set up QueryBuilder based on condition + if (condition == Condition.GREATER_THAN) { + return QueryBuilders.rangeQuery(documentFieldName).gt(criterionValue).queryName(fieldName); + } else if (condition == Condition.GREATER_THAN_OR_EQUAL_TO) { + return QueryBuilders.rangeQuery(documentFieldName).gte(criterionValue).queryName(fieldName); + } else if (condition == Condition.LESS_THAN) { + return QueryBuilders.rangeQuery(documentFieldName).lt(criterionValue).queryName(fieldName); + } else /*if (condition == Condition.LESS_THAN_OR_EQUAL_TO)*/ { + return QueryBuilders.rangeQuery(documentFieldName).lte(criterionValue).queryName(fieldName); + } + } + /** * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which was created * using the deprecated 'value' field of Criterion.pdl model. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index a2b36b7d8ddb8..6cf8e92d61929 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -14,6 +14,7 @@ import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.Criterion; @@ -290,7 +291,12 @@ public long countByFilter( @Nullable final Filter filter) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = - QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); + QueryBuilders.boolQuery() + .must( + ESUtils.buildFilterQuery( + filter, + true, + _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap())); CountRequest countRequest = new CountRequest(); countRequest.query(filterQueryBuilder); countRequest.indices(indexName); @@ -313,8 +319,10 @@ public List getAspectValues( @Nullable final Integer limit, @Nullable final Filter filter, @Nullable final SortCriterion sort) { + Map> searchableFields = + _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap(); final BoolQueryBuilder filterQueryBuilder = - QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); + QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true, searchableFields)); filterQueryBuilder.must(QueryBuilders.matchQuery("urn", urn.toString())); // NOTE: We are interested only in the un-exploded rows as only they carry the `event` payload. filterQueryBuilder.mustNot(QueryBuilders.termQuery(MappingsBuilder.IS_EXPLODED_FIELD, true)); @@ -324,7 +332,8 @@ public List getAspectValues( .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) .setValue(startTimeMillis.toString()); - filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); + filterQueryBuilder.must( + ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true, searchableFields)); } if (endTimeMillis != null) { Criterion endTimeCriterion = @@ -332,7 +341,8 @@ public List getAspectValues( .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) .setValue(endTimeMillis.toString()); - filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); + filterQueryBuilder.must( + ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true, searchableFields)); } final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(filterQueryBuilder); @@ -400,7 +410,9 @@ public GenericTable getAggregatedStats( public DeleteAspectValuesResult deleteAspectValues( @Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); + final BoolQueryBuilder filterQueryBuilder = + ESUtils.buildFilterQuery( + filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap()); final Optional result = _bulkProcessor @@ -426,7 +438,9 @@ public String deleteAspectValuesAsync( @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); + final BoolQueryBuilder filterQueryBuilder = + ESUtils.buildFilterQuery( + filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap()); final int batchSize = options.getBatchSize() > 0 ? options.getBatchSize() : DEFAULT_LIMIT; TimeValue timeout = options.getTimeoutSeconds() > 0 @@ -450,7 +464,9 @@ public String reindexAsync( @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); + final BoolQueryBuilder filterQueryBuilder = + ESUtils.buildFilterQuery( + filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap()); try { return this.reindexAsync(indexName, filterQueryBuilder, options); } catch (Exception e) { @@ -498,8 +514,11 @@ public TimeseriesScrollResult scrollAspects( int count, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + + Map> searchableFields = + _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap(); final BoolQueryBuilder filterQueryBuilder = - QueryBuilders.boolQuery().filter(ESUtils.buildFilterQuery(filter, true)); + QueryBuilders.boolQuery().filter(ESUtils.buildFilterQuery(filter, true, searchableFields)); if (startTimeMillis != null) { Criterion startTimeCriterion = @@ -507,7 +526,8 @@ public TimeseriesScrollResult scrollAspects( .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) .setValue(startTimeMillis.toString()); - filterQueryBuilder.filter(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); + filterQueryBuilder.filter( + ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true, searchableFields)); } if (endTimeMillis != null) { Criterion endTimeCriterion = @@ -515,7 +535,8 @@ public TimeseriesScrollResult scrollAspects( .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) .setValue(endTimeMillis.toString()); - filterQueryBuilder.filter(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); + filterQueryBuilder.filter( + ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true, searchableFields)); } SearchResponse response = @@ -537,7 +558,7 @@ public TimeseriesScrollResult scrollAspects( } private SearchResponse executeScrollSearchQuery( - @Nonnull final String entityNname, + @Nonnull final String entityName, @Nonnull final String aspectName, @Nonnull final QueryBuilder query, @Nonnull List sortCriterion, @@ -560,7 +581,7 @@ private SearchResponse executeScrollSearchQuery( searchRequest.source(searchSourceBuilder); ESUtils.setSearchAfter(searchSourceBuilder, sort, null, null); - searchRequest.indices(_indexConvention.getTimeseriesAspectIndexName(entityNname, aspectName)); + searchRequest.indices(_indexConvention.getTimeseriesAspectIndexName(entityName, aspectName)); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "scrollAspects_search").time()) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java index 539e5dfbaa1d0..f8b2cd8552357 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java @@ -377,7 +377,9 @@ public GenericTable getAggregatedStats( @Nullable GroupingBucket[] groupingBuckets) { // Setup the filter query builder using the input filter provided. - final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); + final BoolQueryBuilder filterQueryBuilder = + ESUtils.buildFilterQuery( + filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap()); AspectSpec aspectSpec = getTimeseriesAspectSpec(entityName, aspectName); // Build and attach the grouping aggregations diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java index d2aef982750bd..4c125065deb4d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java @@ -1,18 +1,27 @@ package com.linkedin.metadata.search.fixtures; +import static com.linkedin.metadata.Constants.*; import static io.datahubproject.test.search.SearchTestUtils.searchAcrossCustomEntities; import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; -import static org.testng.Assert.assertTrue; +import static org.testng.Assert.*; import static org.testng.AssertJUnit.assertNotNull; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.MatchedFieldArray; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; +import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -169,6 +178,35 @@ public void testNameMatchCustomerOrders() { assertTrue(firstResultScore > secondResultScore); } + @Test + public void testFilterOnCountField() { + assertNotNull(getSearchService()); + Filter filter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("rowCount") + .setValue("") + .setValues(new StringArray(ImmutableList.of("68")))))))); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), + "*", + SEARCHABLE_LONGTAIL_ENTITIES, + filter, + Collections.singletonList(DATASET_ENTITY_NAME)); + assertFalse(searchResult.getEntities().isEmpty()); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + assertEquals( + firstResultUrn.toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,long_tail_companions.analytics.dogs_in_movies,PROD)"); + } + /* Tests that should pass but do not yet can be added below here, with the following annotation: @Test(enabled = false) diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index 6df31b35fecde..8d504c562c99c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -21,7 +21,7 @@ public void testMappingsBuilder() { Map result = MappingsBuilder.getMappings(TestEntitySpecBuilder.getSpec()); assertEquals(result.size(), 1); Map properties = (Map) result.get("properties"); - assertEquals(properties.size(), 20); + assertEquals(properties.size(), 21); assertEquals( properties.get("urn"), ImmutableMap.of( @@ -52,6 +52,7 @@ public void testMappingsBuilder() { assertEquals(properties.get("runId"), ImmutableMap.of("type", "keyword")); assertTrue(properties.containsKey("browsePaths")); assertTrue(properties.containsKey("browsePathV2")); + assertTrue(properties.containsKey("removed")); // KEYWORD Map keyPart3Field = (Map) properties.get("keyPart3"); assertEquals(keyPart3Field.get("type"), "keyword"); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java index daf2ac58002e0..02c9ea800f0af 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java @@ -614,7 +614,7 @@ public void testBrowsePathQueryFilter() { Filter filter = new Filter(); filter.setOr(conjunctiveCriterionArray); - BoolQueryBuilder test = SearchRequestHandler.getFilterQuery(filter); + BoolQueryBuilder test = SearchRequestHandler.getFilterQuery(filter, new HashMap<>()); assertEquals(test.should().size(), 1); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java index 980b82194536e..838df98fdce9c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java @@ -4,6 +4,7 @@ import com.linkedin.data.template.StringArray; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.Criterion; +import java.util.HashMap; import org.opensearch.index.query.QueryBuilder; import org.testng.Assert; import org.testng.annotations.Test; @@ -21,7 +22,8 @@ public void testGetQueryBuilderFromCriterionEqualsValues() { .setCondition(Condition.EQUAL) .setValues(new StringArray(ImmutableList.of("value1"))); - QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + QueryBuilder result = + ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false, new HashMap<>()); String expected = "{\n" + " \"terms\" : {\n" @@ -40,7 +42,7 @@ public void testGetQueryBuilderFromCriterionEqualsValues() { .setCondition(Condition.EQUAL) .setValues(new StringArray(ImmutableList.of("value1", "value2"))); - result = ESUtils.getQueryBuilderFromCriterion(multiValueCriterion, false); + result = ESUtils.getQueryBuilderFromCriterion(multiValueCriterion, false, new HashMap<>()); expected = "{\n" + " \"terms\" : {\n" @@ -60,7 +62,7 @@ public void testGetQueryBuilderFromCriterionEqualsValues() { .setCondition(Condition.EQUAL) .setValues(new StringArray(ImmutableList.of("value1", "value2"))); - result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); + result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true, new HashMap<>()); expected = "{\n" + " \"terms\" : {\n" @@ -80,7 +82,8 @@ public void testGetQueryBuilderFromCriterionExists() { final Criterion singleValueCriterion = new Criterion().setField("myTestField").setCondition(Condition.EXISTS); - QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + QueryBuilder result = + ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false, new HashMap<>()); String expected = "{\n" + " \"bool\" : {\n" @@ -103,7 +106,7 @@ public void testGetQueryBuilderFromCriterionExists() { final Criterion timeseriesField = new Criterion().setField("myTestField").setCondition(Condition.EXISTS); - result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); + result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true, new HashMap<>()); expected = "{\n" + " \"bool\" : {\n" @@ -128,7 +131,8 @@ public void testGetQueryBuilderFromCriterionIsNull() { final Criterion singleValueCriterion = new Criterion().setField("myTestField").setCondition(Condition.IS_NULL); - QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + QueryBuilder result = + ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false, new HashMap<>()); String expected = "{\n" + " \"bool\" : {\n" @@ -151,7 +155,7 @@ public void testGetQueryBuilderFromCriterionIsNull() { final Criterion timeseriesField = new Criterion().setField("myTestField").setCondition(Condition.IS_NULL); - result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); + result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true, new HashMap<>()); expected = "{\n" + " \"bool\" : {\n" @@ -182,7 +186,8 @@ public void testGetQueryBuilderFromCriterionFieldToExpand() { .setValues(new StringArray(ImmutableList.of("value1"))); // Ensure that the query is expanded! - QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + QueryBuilder result = + ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false, new HashMap<>()); String expected = "{\n" + " \"bool\" : {\n" @@ -220,7 +225,7 @@ public void testGetQueryBuilderFromCriterionFieldToExpand() { .setValues(new StringArray(ImmutableList.of("value1", "value2"))); // Ensure that the query is expanded without keyword. - result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); + result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true, new HashMap<>()); expected = "{\n" + " \"bool\" : {\n" @@ -262,7 +267,8 @@ public void testGetQueryBuilderFromStructPropEqualsValue() { .setCondition(Condition.EQUAL) .setValues(new StringArray(ImmutableList.of("value1"))); - QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + QueryBuilder result = + ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false, new HashMap<>()); String expected = "{\n" + " \"terms\" : {\n" @@ -281,7 +287,8 @@ public void testGetQueryBuilderFromStructPropExists() { final Criterion singleValueCriterion = new Criterion().setField("structuredProperties.ab.fgh.ten").setCondition(Condition.EXISTS); - QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + QueryBuilder result = + ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false, new HashMap<>()); String expected = "{\n" + " \"bool\" : {\n" @@ -304,7 +311,7 @@ public void testGetQueryBuilderFromStructPropExists() { final Criterion timeseriesField = new Criterion().setField("myTestField").setCondition(Condition.EXISTS); - result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); + result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true, new HashMap<>()); expected = "{\n" + " \"bool\" : {\n" diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java index 8d7701f6d174f..23ca4a4a4247e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java @@ -485,6 +485,65 @@ public void testGetAggregatedStatsLatestStatForDay1() { _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStat().toString()))); } + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) + public void testGetAggregatedStatsLatestStatForDay1WithValues() { + // Filter is only on the urn + Criterion hasUrnCriterion = + new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValues(new StringArray(_startTime.toString())) + .setValue(""); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValues(new StringArray(String.valueOf(_startTime + 23 * TIME_INCREMENT))) + .setValue(""); + + Filter filter = + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + + // Aggregate on latest stat value + AggregationSpec latestStatAggregationSpec = + new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); + + // Grouping bucket is only timestamp filed. + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); + // Validate column names + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + // Validate column types + assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); + // Validate rows + assertNotNull(resultTable.getRows()); + assertEquals(resultTable.getRows().size(), 1); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStat().toString()))); + } + @Test( groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java index a22a774065852..f3689f9b5d04a 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java @@ -15,6 +15,7 @@ import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.ScrollResult; @@ -70,6 +71,23 @@ public static SearchResult searchAcrossEntities( facets); } + public static SearchResult searchAcrossEntities( + SearchService searchService, + String query, + @Nullable List facets, + Filter filter, + List entityNames) { + return searchService.searchAcrossEntities( + entityNames, + query, + filter, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true), + facets); + } + public static SearchResult searchAcrossCustomEntities( SearchService searchService, String query, List searchableEntities) { return searchService.searchAcrossEntities( diff --git a/metadata-io/src/test/resources/elasticsearch/long_tail/datasetindex_v2.json.gz b/metadata-io/src/test/resources/elasticsearch/long_tail/datasetindex_v2.json.gz index dd48fe240cdf2f2047a500840288c525bbde3e22..5a412ff4b14e0f0a4e4a3e0e371e83223091369e 100644 GIT binary patch delta 149868 zcmZsiQ+!Yt2Jig-*YCx-ne%zhoafBm zdwtiMnd1z&`wX~mM3LXV)norLt^Kx-4H%F~TMjfX}!$jLiEA1^lMd3SSD6!pQYfpT3*1KE&V<3epkAqPxZuH9BnSBOHM`ZS>(H_ zn6c?lE*=2(7gH@{$ZaW{b@SgIy`%#84r9M{m-VsyA3I~wa5Q>uhB&SUTo9r>HO3JnHnk>c;u)cHHh!BC0T4 ziA43m2?BAutu4p`^0lXziD^6YR)u-Tn9)3TDWfO68TmothJn+jzk znra9D&sgg>-na^OPf+P~hTnd_j%gy6jyZbjkL}}He5>Ztuku+MGBUI13Fqr1VH<-~ zz_lzJjcd7d-i3b>aJUjtpxk-2wq&Lz*ZoymByKA_%eOQu8E8|T!r`zT3Xn0A7@7i1C}{c&HOnQRV8ap;X8CE8@lrJGo~^!7S&=s#I0(WWCPJ- zW`RiG(|*Z7caoLt_AA!H_zg5bp{%2D*oKI^3Mc-f{@n%suPk_P@UX>>Llzc z;8|gx4b9FlpqUTg{}I%uc}TB;L$h;dd_@v;W^mOhlh0HzXa9^xy(e&SWPQ+{xk}td zzFR!ureUP?Q+9*Xxf5%tGcWzO_7!eJ7dplq|4XBVuMBvJkA5OYluOmwYz9R1#r{@m zKI|&V#o<;7o7n0R!F%Y8ot5Gzg)r&6BQrflkmyNh$x7n`b3j5S{!9-X79 zC5P=a-x}js4YyOlRdA+-1sjR3yQVVJ<3Q^W?D?*?di)^0aFzJ|Sg zUsT&Q)gb}E>D(bWq}RcgPzn6{P*9=af~sW)Bz3~jr!sGeU}_%;UMiACMM|=_SkiaS z)t$f`bo7IVZ1&31m)<=OsAdPP^!6Y2Rtwp(metY0EbvTs9LL&hB-{ZP@zREwe}&$% zz5>}h`vPB<<#IJJ2pbjeI9?^0{uO#6y}JhX@(BUq>F)?X%MgBRP}t4F%c6&f)vf!o zPW`)HN6L^We|NyJtvxCbJCmKhy;qByfP?7aC}9z669;uWQExLmcXW`wV^cWa#Tx$b zoT@MvBQnHJ)+jOzRyG%!-?tr>{du&m@UMd8xyKu}3MFkWxAd~MU>hoqs%ARHOvH8G z>zsgdC-f4p(45o*4N~O5Z?Zl3c>2Q9Ks#l!Z@rL3Br2P(2Xe0@g=0H&Qtgk4F8xVr z(+E3c1j<@rY*Umc_>#4ki7pvI4MZsP#f5?T-=M`JM@0(V1qffcyj*h;H^`C>9q~I_ z@b=Gq^t3XvU}LyFi4D!zL!=tEdvMM~|D*uZ#W}XQy3X}K6zuGku&wkKSLCh17Ep~; zCE6+&h;xc!GL1ID5A{Lg3BQ00PTF$$xfUg3{0?WrTWgCEyo12CUpu?!Y)ti#C|E1dtMJMvY;sf=DO2CrrUok)UN zGDSUPq7v;)S_xswa53WH7(VgBvvZlaccK#8dXANijzLig;m84M@PMap--i&FsJ}J6 z_J*S~%jI|iOuw)& zq^zfeE{hLp2CN}|j}!8haYS)nYMOl9aRzZvXOsJEmRVpQNj%aRqq6;RZl+^rPwHFl z0Tq#4+fjB$ETzt;-KL!JiEQ~()UutnRakZKmz`D>EQ*P$J+l-flk_23q!AeYs1$`r z#yhpAKxHi_aL>!pOrBYQAUZ&q{%#xoF~{&}ra0A>6k6ettTfD?N>cIh7(VId1$k6B zDU&NaL4Io5n~0y9<~zY;I}yJOxT&w-rwR6X$R^l`Q@0c%25<7~YWvp4euyJOJg4Ju zJ8s{1-P0bNtM}{6B6-TS<$k*6xD?cK<~S`oVZAuMJ(s_ zOI#`&?`vIf~B3r7wGAl+h*8VBSKNahTyxl+jr-iKc3*T6we7bpfm%m5KKf6Ou zq?~^^s#DHNg?|QtP25cGk2=bpFS<|CSuAV(Jm92u569 z6vDT@GK!SZy+FxqnS=Hq&OA>cys1(|XMY3iOV_b-H|jAmgVbf-_B&ETkPK-YUNFuE z_FTB{=eq{b3zaN;#VzKg;P-i(^XkuB{ruui)a9e?^M1Xdm|MC!lM?3fd2(;kKz`b) z>CP1|i4fcAa_hQR68T*c)1p)1I^(B$+cnd~h)Y0?QytMxJaT4WW9U4);VtPkb*BtHj)cJ?uI4I? zQ%dC=au=e+Wz}B9pD&zfq<%jj|M+=cgN)SobR#)8E{Y-lDl-wRF*p8*{9wIMR#)yL zZj`)$b2rQrzxID4g1ogpxz}<`g_{HbJhDnobkDzawFX|+eC|Ip$E@TC)^Q)BX0Yq z&Qr5?i`omVUp=0Te-NH`Pv*OQAVf_)6qUyKV4Jx5%JiA~5jNvNb$raUQ}t_!eK6m_ zmeDnSNs=9qPj>)e-|?m6mfR!Z&no~-o^&BZpCqxQB+!P7F?Qrfh}6^D>N0ZIL%+Qfa^sdq;1- z0aZUMvQ9+%i9eO1B*b2V5V=Dq+Guvd(;?&^Qwwi{r77=yc)^5&9z{{ei3}!kj;yUw z-MWYE;WJYVcU=REjRk~!P9AQ~PE`JgYsknWUjj#%!yM@2A{^?8Z2zqTiM^904-$$j zG2*OxkW%Pg?~T70Wy!Y80i&KF8}g;Vs19iTBzL$FmpSMHt=mKCcYR!`4kgLXRcm!O zC#Ao=PBZIn%6or$Y$jY_ySQEsV;*QVjByV?$ za@Gbn)w@3&{}<3P_HBNmBljhYP{AtdnL^mKqZxH)dh-}Q#dR^3^#nSR!KozG`9fBc z$C5f9ulIXOOq)=8PHHWFWc{Tpotz3(lT{o=NtiiyCYuyHl*~g1e;?Q%g!75N#p>^q z=xeS{<;!-!et=Q?A~wxh6tsC0lBrIfJmVVFBw`Y1oh`OGv z4!hm;3B%;o(Dl1TL^sAJVt(OsWLX0U0@9j$)%oep-R(Re zADRf~)hyTGZN$Jn5R1I~pY3&c$q)_eHGtJk;MZhzMknsZg4&q9B?D)Ivz;BSxJot> z+0l9sN`^o8S9kw#S^_I~_is}w@pwK3w!N03Y}8X!&STn*x(40>f$_zoZ(z8{??mLa zsV2g?+QgJ%UtAl(rWJL9OZ@}r@E38kYIMnZ2W1NXIq?Q<;?<^1KOH--=)#Q$BH)?O z$)0!tx&D_=v`DO2D;C<7lXM~;YJJgdF&`Mk0`~N=MlSTx?;78~o5Gp;P45oL#Hhi} zscL>aM@8;rEE}u~NUVN|E`y#lL3@|hq3#ln*{H>cnE8|9fOd33X^OOaNQNFQaCp}Yu znz#w5vkcNA_Y(<8)0Qu9Q6{8Gdo2YIE8PCO+XBsifK7?QAGr;a>4~A5v5s6>bW5Sf z3Ti+#?k5k2>fEOtPzUm?AUZzclgCQ@t)5d6aV3R^nr&<&_B6xi?DIhe#=q+GNORP8 z|KZ~&7yGrMr&$d`>Z8u!&}rMB)Y^3e zieM0Tbr>91gJfNy20VaVBJ<_LHf~O5D-9rH)B@VsO{06&Fg;iC8aJXdUbF*s zYeP!mVxgx!ZWeh|TshH9M+p;kMlW>&|EjMRpGN*Zg9}1O%7Ut02(b&(C0OFpXZb;S z8<*>_O=@V`BRy?i$!#YpF?>~I&z+EnH^GqA#AGG`-3ofZQ3>e0tVrAir7~Rqr7|O+ zRHl~hVMFdtfDIJNM4O8q-3+VXDX?X!di;d#Gw~AeKJqA?z;9P2F9xVf+b@HjIiXTX0awHJwxj+ek-s9QNh2 zb+J2igEan9WUBL^y!mIIK!;U{#yQ`SH(eJ<5$SK*@U|y z7(O9eA6L);`8;Gqvru}F&x6TzU&IAOTH^H@XR#9cS4$?KE}*+y=Ws>bG?oj?E=%FU zEzcNW1>(zwjE4Vy(?>=RKbUR~rS1A4m2gh?Vyk^@+N^_aG@D@~-m5CL-$X5~VwhXI&mN+5W#finYDaxrc?J-p1*L>I|#~!^vqr;*ZeZ zXusn)dH$VhC&l|0tkqS8X4&>_f;;Lg&-MflF9P-z07`^Uz#iD@cdT9uZ-VOPyxrj9L-?qBe4C#On0|BeoYAf z6kimulhhT1F}qiRzduX&@DQW*M8*|#L>Qh}tX<7Y*5v)ph1P|%!`nJH_JSI`S$}1U z$C|`BLT%iU3>|zeVUp)l4-jUZ2*laNb9CCwVO2V#$~HWfdZViKbJC)~?nF%r;T*kl zsrnHg=w04B*;xp*ft+4tUwY~#$mw0(z1<7#PK+zpQ&Ys-hqxrfeeWtGqf$fD3#ik5 zwWfF$3e?HDclsBVYSuyyL%o|K6WB{XgF)3?)NN*mwBK|f&EKpDPz^KR@}1-Gh!uc5 zDE@yQbm~73+6MBVTi(~5nY>k5)&?Id^0EjU$e00c3&A7z9q9L(S*|rurU@+0ET*6! zG7ZXE#58u2=_E(796zWfevT&np5@$5Jy~gpv%+`eNv&)nCx&Oo!#Ac$@^#CD zH|eL9>y3taVEP?mJL8g-Ke~v8g7a{+ZAK`-wa_ zalWxdW^TCmVW&+q98zlvfV`5}ROSPbZ2d>}YwlYRVzumK-co=QXDtbxY@s~vVRcs= zV3G1_H|R-}<&85pP#^G{g(15Ky$_pgPYrVDl8?^~zUg&n@QrLtS|sFAAdZ4F${@TW z++=+L8*@ShX;SG5+Lh5(dqqJ>rfy6KPA4S~w+qDZ-~MCx77)YV_ECWtKG9k5PFmQP zrOzWV4YfI41t;<0x1vF z4A6b^FJy_)Q6b@$2>C6fHmlf?r-8egyDd2TokEvAvXmp*Pq6!-u>NZP31*)SQu+cE zH>3PLT_^b?KOnW%3VKt!-wnCEuYo^3>_my=>ps-)S@n10QY5B&#(*r%7}aQ@F%>+a z2DSC1_!MdoKM+1mC|!MdK%8z+v9+-(7nElY63xgv)xe{;xMN#TI+UMKNmAHsJ+ulnd?;)uNirw%z(K?Z4d_7LcUht@w1bm@&%rEGrwHp1oQd@<&{n}T%q3DG>qQ{Q9>)pyTWrE# zXK*p_Q8@j4p(FJ?kpBFkkLR%b)BY-wuYKB%bK)C!X(~rM1+^KHQdAsbfo(4~h8{9< z|E7L_qjUx^W=L1w)}a(vG|VIG!GZTW%cDtU=nPHJ5e>2cv8nnA4~)|BMa5xe$dM{P zVZq70^i!Pp<}wzG8WO4fLjGl}SSD%Ii~a+r`zDaKeE4KPwq-5Rz)~AKkiVSUY4bx# z62fUF-0qrqM4&eL%7e(zVF(d}-@6FhQH_Bh150kw z4!rH&z#UX*WDt5qzYbr4i!#prsWU{kjWUkUECP#XmYLYBI>x&b@tTMB90YOPLs~wc z(*T5?F{)cbFq1&=ms&YwHfVNL;i{2mjCTp=?zj6X06#cAw-Y|vr16a-1?9;IAXr5D ziPy0u>LUh!cY#W2TUaTch9IueyP7uN1Fq(ffLB5-_eo7g6IRP^G~Wxv)ti0hPGY?`)sOq;j`EQ83|Qqm^OKON)a@j(6h3K(G)P7`5#9Y)wvmDQT-yu}7(uSutj2({ z5C}CTR5_9pIMaMO6YJl3^D=*ywtOk_kA=Ymfe^J&W1pvURb|w&Du|;nw$*O*fzAh1 zKu?9ZRuZ>p6N!Hyxpj>I-qdaph7i-)@({yQV~P^lw%#JWW^(tnp1-`#u3 z=WBCfF%$Zdm$6o@) z6?eJQ32Xsw^HS5+m4KM4h27%+|fJZmYB&G*gOl7P~ZrVqc{!z)Tb zKYDOaEy%aGhoPEJdAvVjB$N}tHLFa=Q(-IGr;OE!`zP;eP~;Wq_yW9syx9+eq=4=B zT;QF!rvt@hy_c#V6skS&A1(7WJ(X_0rcm6l;XL6y-y})~eUS#Mi%uf;R z3btTX6@65dp;^g~z9~#`O|Rx>cdH%>gIS&(qUDLrk6*`^;%NepSaLFeTG<`4^`?1!&Tck=|YyUp~Y*m>V+D;H7Pmmr>;|feZ!K|QSZ(!8ZfY7tJ;Z9 z7iE8TlqIFbkiTty;R~5*OdT6Tgt=Kmf#ZacUP0|zMb{p-h`6Wo=U6t zU>SD34On-Ky!rEs7n6pi|EHt{>?`7#p!{$kf1BQHPi?FV0-hz%)ppRbjD=q;uwma~ z%XQ)PrugB7*5MP?i(#o8k{%oQ?q|-I-r1hz!q#GANI#3+uC|yjm&B)#`EMM2Aiw0& zp)QvpBF6fG$rcU;rR+_*#_lkc=tDzWz@Q;p|19)vw5cv*>z^b4@%!o(@HXC1ISOkj z>@u1kq%wWs(@X+Hc~(~b#r!d_@vi(Mm7Y(KyfZr0c)y%#CcAKzN65STGp@G&l4>P$ z33~sLz+q4P#vt`gBlUB25nW`|;)1S#>R6wAr!Wc`D-?lHfejpyGiiin4a!fOEx0H5 zQDO>ADVus9hBAspPSPHG8V+6hT*hL)qW-E9mmom_f+-rvw0aJe4q$oz*nNE6dE7ZY z-3@77o8+A?OVy!iq#`-+n9mjbJeW&m6#Te#rk*|6efvky2*(*NOm3h$VtPU)hDF3E zTYLY3@pMmKF+cEccS~Gr0`nJ7YH_q|!IPrUf)?L9R?e~8%hcb}>5j5SEKhGol+YmtVO7+eJ-N9wWb%lqNm>K!e52gV zTjp20rJfOZTUL7z%zG0_w8O874LjLL`^XCm&I#An;kG z;d$TOPOH4Z%1-OtL#%159KkoxxqWV~;=4eo7-C?op?t`IUC!`s9+ zlfjs!+R5o?r_S60og3?bF55*q_r>K;H$1q=MzaB=WhZ4l0v6ys8Sy&qbLKlkN*=w# zjmX$aS5NI|dZ(z#17E}6maCwfo9I0ao%R|9R(rfH+2ZsyZVTJm#I{FHx6-FSTH23C zt{p(7J_P|;f&R32Qrwbcm*C8`bYHFP2hV|0(8_%I(cK@5yCwNdM~r3nke-0j%j?Ae zN78*(?>OhJeY)+!b;v*WH*hc?!$o$>x|irQ{gbj6)KdYP#I#ofLp3{D@L2UB;wD{K3KQ-EFej$!m~Pd?=G` zx#5P0y*9Pr^6M*X1UPgKohC6;P26+Ac)3_Cm#1p?+73$9)O9mg7^+W5;9qKq4IjBF zwqb|kc&~CV+eS}vz1I&9>PLFoM_Y1^A_wVb%a}iU2fQ zQs*Z!M5JOev)N-7}VOeGNmPv%OWaWa$!tN3VLlV(y>y)#Hb;Yb4{91w5a74Z~Et zI}lMb$*53cU(Gm-hSXegyxRHLaaVEhe!=c}j@j;xbvZF`azO66`|1O@dV5{zZ{$|1 zbB;l~0DF2h7JS_;ovrP+m-Y(wlNuT&OYTTVB}lSYr|4VxcW2&j4z>5kvn6NFF5>fg zIjF8zQu$-Dx7aWD4vv7Y-+^9s`*Q-Ho3kenlghbyV&&=U=hdmbnf2YJi?5R+L|1>q zphiY^>Wblt=u(O!s{p{LMNd?GYG&Z+biKM_w7a?@yxcGjo_#nyy?nHCeRa4*MI-D8 zojc&Ky?;a8FwoO8(5JUQX+lQY+*o*E_uM^1MMDeQDEYSKRf2_eyu14r^X9Gb^IKa- zmu~CW+`mk?gyR9!M4vv+68F?p4_8me6aCF9ix6GiUF)ZmZ(hLPrIMx8qyAxNcUQ0G zHiK*f16@7ct3*BW)Cew8U+Um?PGFHgWbRO9$9OGY#`^{8TDVB+Lu z;GKR%f44R?bnQ&_^p~}dcPs;8#89-*_$40;^gZ9jnFYxCf*I5WU5(D|JMwf2a!Ikh39Q){#8ntLR24C8x>zKM$$;MvLJ zn!40H?A=1H(4J30$C&ceKU8u_fAiYt?(69l8(vO$GwBp=pYog`!`t0MhG##eIHhs4 zU%NK8Hg|CQwsY?z9nlLZ@_cxhlCo)ib$WPvn2;0~N)9|w)5kZbmS`+#xF0{-dHQ+Y z31z!RUW0da3v{;p_Vn@gbZzU<&dK$9Lei+U&^`g+u4Fj?ABhdiN4ijwhRK_O`eQ60)w7A*x4=pgW+&$U=5}f@HWsj9^V!%ICpA%~)VGP&uKdS{Mt;z? z8CxM^py#~XGB_lB0WUCByzvK_D_sZGS=QK2`b}8}6|kfIb=%7*U%I$Z47QNLG#9g7 zfpmE;N$#{q;XXf1@5Zi~Kkt5mvZ!ppT4cSb)#$2=&f}udqF^oRVTKpHa@ZL9kv>PD zkN_+Z%1+-|$4)EnF6DR*KHoH&>oc7yhqmu0$rF*VP91N1aqAJUAMfjZg1Zhr2+ka8 zf3ljUJZ#n6R(uA~)V~syI?4Ft;YPWar^DR1buO1Xcdk>kdz&0kS}s!BCMQxjPw}_? zGTP!VCZOM>=^E3X+YnJ|sPxXtOq(^?0x^VrH^C$tG_J|2B?VZkef8pAd z2S*U5aznmB(K0H03Dp>;M^4i!zk6vN8)m z9$a5Sl$>HlwyblI9KG1)S(4aCD1)9sH?hpU&$XA!j9rYpMHj(>8l5wbqX5Uo^y8{Q z#E1F6cWdt6Y5iyG6*ODMH}@6D0}6m0^FZk^)R@@xrsgwxZeVR-YNp#!kgeK?ySVnb z^_;=4e2__Cfnbs77+c);DU|Qq>6GB_M;>0?OpcISRvwr06wm4|tqJ^8E11Q|CU^Ry z3(TmW+1DjLtigc+i5dp)Xfi9j9ZRR7ryUh{mAEr@4E2EC>y)ix z&hg}-lxKqQd&Q^K>)hM(^U33yOw z1j5WSr9kS&))Is(;aQjpS4wWaExuvNE3C2Dr|j?BU9_d=8QyAx1Jw6*PKinLpYL#- zu(uaIeYoS^i$z)DY4(o0?Qnqb=A>inuZ;YT*Ezd5SV%*=l*x(NR;^b_ixl;3pfYe< zxiRl8t(XRuLAoVeeGI}Rb{&yb1>x!k`KY3)suH_Qqi<)#TTD)jR0hYEvaFOJMv6r# zb;AiG7kV^EP|)NbJ}yw3X~DgWG#yMn4+b%J=z?uT0>k~lxWWzRjf|Z6A4i4T zqP+}bNmIIV$_4H|ov#3IxflC+_G^bw_|&NmOYXwczCgv5o3Hv-{Bva&`HhyZ>@Mr; zJ)uHD_qk_fYAY%kj?u##B~rg5oQ||?rd?YqqC%IIs1k&RpU{-WHUcq(Gj zZJpgTp z5I$G;U#)GO-a7&I)-II{U7$j)x?7!M$$&QTWPvdAv)w{n)*eIQfWqD-3W1q3vM1Bm zpd@Q`d0KJbJhH|mGA*`|KteL)blo|EeZ?`S1+T3kEd{(0oVy*r*z8;R;s&L!w|t$q zWVYK+llp4ECDsjD==VIrxaCi*3#NgOZfa=8N1zM0c^UYGUa%OfCPIg+iHs+g4&bqF z)=Jpa+somV-501vL#>eN^Zd0jC~{HBz55921K^a$pv%xpa9 z2uOTZg#d&vc(v!+?>`>F1Eo5@=kIj0=C8HRL?O6cGLinu_FiZ)Jo4hFS#DL1g>kc+ zCk>p4P6Mi$QEktd+Yj}5m6ml!r#V*=wZt|kTS;Pa-0WQLBw3tRTPY7?xvW-OXGVw| z`x`oE#FF$JO9;1<2Ej?N%5O>a1)RjxRy+>G1_6GG+iRbd*D*9|OJt&)PRJ9Z342-m zRTRPW`@H(fA$}z8DbdNhxH}@F;Z{H4Jk5{KR;%=-g0i;(FS+FI)tJiLrLmhIAKL7d zqXv~w8j)BC5jVR?6L|xn#{RqQW=zI6@rV))q7_6H4=>3G>+J|GMV~`f*ZxGR#qNGo zg8)3{XEbV!_Z_R=;%4Wnrg{|eH>7!R@Hf-PqKK)Q=wOZaCn9O{GF&dkkFrF1hIjKd zXio0sDeK$cmxoOzx5ql(540C>%?9!@)6KzQL=K!hU_@8#FVikVn}TO638jG0O5~yf zvvQ4BURyn4eY>wxqDy^;a>qO(z~X+~21wq>ARmN#*tQo@`u@8#(QG(tX{W_q7=<+N zufHS(+<(14P$cR(r}#<4A}4Q>Y6RhZxNWp<7lxlrBOw2?<4S$2`fFfBN#07UKl!am z;q)`+1@GR$Sva0hgO+a@2PbyS zSA)e7s-R<&l|p|r2`LooX;i#!SKKJu=E#kIhHmq?HrRlPb{$>9*ZcCZrIq(%?aqIG zqF2gcO0P9DnAvFOGA6j8H<5R77I0%qU+SMsJsO)MRIL9n8e>o-)A-CoqQw=6S+ME+ z#hEoYMNz$gQ+7y!>$F376-&>_yo0Q2?*A zRFOBvNxu4J$4Tzcgdh2?7TZ_rZ9J;$Z5qBcE;m_}0Vs#Y97T$}%9sf{1$v(jkb7eX zm|$PrZ^#|73?gdL4%Y%_pA)-#BN-20WL32$Zjzum$SylB4Zo;;ix z`0@OfjiUYWK~sLKp^)qu-=Y`U--sZKMNa17sPv<1PDArEAo3TxaE$V2YlFEmrz&PH zo_7?Y=2c=%vBV$uW0vkyK>&1gpEB|_nRqy1$=vb2-)#qrBk8z#es6t+_SJmn86}8+ z{yBR_?cS(#^rhU>--^`!j7Vn5)tV5Ng9;K#EzWfp`@4b^f+MR6e67(eP z_Vg4N7_;Bx;>4O(p$$gk%WULl1$pr| zG=&B7Z9aIxkiP!}F_EM^ha&7W*kP`ybat9BDvKP}!bKwpP^Gs+uY}Oq-u;1>k5WIV zoH!IcGY@-yT(+su4r%wuhTj<@h4%a#8Im#ms+ia2lz91k?V^S$yUMw3jm+JCK{<6L zF>8Zs8Grup2rUMIlpypNhI}KC^T6Tz++s)^YGwFd-GrM1aT#R#H==v35(ZiVJmSTT zMi^}+Yh6|&;4d~o)nA(Hdu`XiY6$o)g)yapB=fBqQiWSqqHGuu_ z{@KgcNVDp%f)Cvl8PZ9loE)j1w8K0TgMeIS6+QhG6WGgwsUNVk1lJF5zsU`8uasd9 zsoix64O6r9$IPM$mBTKQ5T#|3nZ6#-R12FaCM;l=@(FwO+LoR!Of~%Rx&nsC?0W_+ z1NB{7IRyGU4~I}t8M_FaN|ZANJZ4D|f`@5(?!o5uqaOK?19z}djr*TE1o0{UbJM=LL@Q^gC$Fo-A*A|;WE$w_>gdd&X*rrz*{Ymb`N9LL751?t` z6yOc7S~7XqG&aHynRlp#e@EdY45_03i$WH1JQ&LZ3*}W*%wc#`bKC#pCdQg}Y$g0!0igAgL0eN#P%LN2ei{3)dKQWpRrs*e9Rhp7_wMOt!6gFv& zEz}n+pz9y;km+<>>4Rs(19yCf2wV61Ml{V~^5(y*#$gHsL0T2n+5RbJBvDxG+NH1~ zbrn+*AImdY1zmR*bX{aqjHWM2sP=@a=w5c9@2&o=W!Z%k_|N*ygedz03ld=+k9b9p zs|1r-Os%|)O{iPf;X#y@+xx==`9p&m4KuJlcm?CzYdmPEU6qTNyX~W*scq@lMFmip z{jrg?>*{bynDDFt^~aiAQts@{$Y-|uWoa(SG6Y!0lPqw{^cTzlHV6D%g&eRt-;P=J zQT6%QYbws;_!%luY3YT4l*UnrmMp=YZQ4>>@c!23K^*F|$&{oQ_RVsS7uD}`*Am~~ z)rEt87)N7DnfhfiWjVQ4`PcbaPHKr4rcr6m&D;a`Y}|Y;kSY};=RK~;qfe__Ime(hxtPdNx`+9C9-2N{NA;)@%|r}b8sbNp=<7WVIZ z*v1HtChqGr6b8bw7AP#fR633YjTpN$d?^Df=pW(mdoDFr15y(?X5k~}$^1(f?r1ys zwOjT!IEL7mKSRKJRg94I;2^DyOM@3>^e;^oGriBM@y- zlsj=_)V3roKtxRE1qv@i-F9rx1@kaO<(wd9vw;a5b5(X8{=CF>OKHTT7MQh30*tN> zXYZGkg%y#XxyQLXjCqe?1XvoI9B_tJ@GJpV7eZWlO}J|T)QGG@vTv05H2(D%9K8_| z;q**KHWSl9#CyfkzlKgMk?dR|jc>DX*JX@CmPn<}0slyC%Mgiq8Z3FjMhQhcrAQ7g z@^}*_(`5WU|C?3OpA~gNaO#bU6mn`6`4~))U$iA)^7Wnbvr@d=S@G%9)%xJH+_j)k zdDHp5h?BhRN2MOAY_ReamxTxX%=|(yqT(K5IQaH&BYgizAIQ(bS8PilceuGU;b>t{ zN}r<(04Od-xzx3DbOqlO-(13%?wb%zgYgWXUBm^ivNi|Sp6cfGF2 z8~<_`5B2|OK;Wy47JgN>CHMxZrfzb^B=4Z)vN;z`L@SYu#!perg4aVjhEqhOvze|m zjnUYIv^A^&Z@V8GVP%5kBSg9dmeQ|hXh`SM2q5;b#qMnnY!C~o=?2s#3-8eim zjcyY-VQL#?Muk@rK%6kMh>Bk@Kb1i`{^Rd348FBaK4Bji>dLtsSw>-`>?SZ*45-X6 zz*2}}4fQ>>(G8?++aH8d(P3`aWq^xSu?Qh)7V#EGsaE|aK(2ao&B-&(GZYr4wQe}A zG(i;VSk0(xlt8nt>Z3rwuP7#%uli*XId;$s+hS6fuD{Dr3qkDEL6Bi+(#dAI?je2w z#voc9(exd|1fN;LJD)NHV6E^A4&RrjL$b|%&FKDh*s`-P_T*A`02#wBn~mJ321V_8Ldk^{>~ej;vh z4{SdOZ;kvNtrmeu;@yQ>aX1^jSb1#z^g6eIQY+YMQb#uAo~ce(+Zp)p{(u(e|6Vtq zV5hWY12d{=Iy|3--|Sz~ii}wOs++x|P#>L$dfR3!DfDNFqm<4KVHT>WKLD*nwY=6h zt$g*ufW}o#YUk>Kv2h42AI$CbN_AO4=)^CnDBpyDLm-F-$^se zJaV8v6=Gbxhz_di>-rxHlQ&MT2e22Tk~j^FM7MjR~~w5q9OrOc%oJ zC2@VH(Sf#Bz(tMVrWcC|r?N)#;DIx_|E0x)0Grs@kbxS_3U|z}UC4==SrRFYWl26=uC=S_cbN@TA+56*6MY)^iFN^j?6-Pi z6<2!ySyC*6<-VOz)D;v9%pn19u)1Ytap*8TcllJ2^y)?8<8ofBYElPyi1tT+@#zKc zCfy}HRmo|f@j|S1md-RWF0h69y58_qnMsVu3z-7HJwyAK-fKd+wTF7|pioRRIGsZ> z&Z|R(9-Hg3Hh-1y1+@$do6meO8V8hB2Z5n-#rSln_C_$*RlCp8rO!eW$@fnR6UtgiqPF-!?LWv7Y*;gn0eH=pjx-%o~XOoBrP=b zMu--2m0!5YCSO@b;!6T?z_=@7d8_DW#8*+}b82P8@s4{D5yL~76hf}zdSF;})1}YB zhOx~Cq8J`BnJ_g-5(K|K_i0Kw$&SpX&p9s)!zsgbH4v{q|%>GA`PY`qS{eUt+2_zFp60e z$)Yl|cpSH;1fykU?khghTzlWq2$!XqyJUj)*94A5ME({>{R|fSzEy&W#uwAzI7da)1r$_PkPa{mo9Dtk4y(AzAxC{Y9RB;2 z%pyoXtDKAFuq%o%JmN*J81LkS|BtP=jB302qJ@LIySuv#}+ac_&2;sr{9LgnW9-}lSA*8P-~HNVW7OwKubpM7Rd%o@qW#wZ=8@iP*{)J_$f z{Ficw9J{-4gfQ5yr#?b8FqvWzy@ZVQfG?lTHc=>=80-6q{<$w+)Cl9!NZ_4IO7dR)# z?{bQ#!bgUtD);cxA%$p$cP$zm+NZmXS*27-5a=J8eOouC$aXst&}?i7r$msZd&ifs zQkzoSy=C9@o*3ij6XaiaDZ+t9e?79Mv;PS3Gf?Tg%+=>T?aY6TVY$YC2T2+|kGeKD zfWK-Y54^xUuZB|1-E}3HaPs^`H7F%zfbVivf6J4JQ$=F&)sSETWs=T*WX^}Dif`Gz z?l_57R&w!Q)bx)$RUZDe>;J1Yo4qBQJey(RRXqCD|5Pw`vN93Ff|8sZwmZI*4kZ^f z%3m3fnNOfrnTqJakKNoKE4e?GaKF(n6)0{7-fz$|LSZ=I^Y|1iU;Y0w*Mz{F`ei$CINZJ~0WKQJ152%rTMF!j=(gLyp(uXEOKRxQlKqevh43;rSAfJ4{gT zZ79^)JSB^FLbLDI(OITLcpAyjfy|EC0_?{;`wQ|6#6AZBoZp;g9UVk0;#4^c4hV4O zdyp@@%R^Ez@=C{-2pxE3EfY!x$JQ@7j2R+o&RA4CHg`)Bk38uRQWMB&US%Yl^#D_( z-OIV-a8|b?!ej~}Oj;nqWQqeKOtQkEb09Qk>+Za!jFqJJTuJg2x<-YgEl?GO#?))V zjN`R#>^buf%RF?Ejt7*=@=-O8Nj8K>vka5WtLx~fW;|bh`V??kd$HtuP)=FrDY6=R zq?7Z0{ah?q9FA>?Ru7XhB$Xh~ySdO8XU1a$vU|c+yKKY-#~u?w&$TiT$zmY z2O$b6{3i+xQPM$1&WmY|8~(+-bL%#|z0PF(cxfsmyPz0Z9G^AJH~D^K>u8*7 ze7mv3x~+qq*WTrx3cBUw*xIL)~*w%eFk4&06k9gOvYL>d1N^dcek&sLjjtNmdm7&AmT`XM^!2r5RN^qNM3Y`NB0+C$gyEf{JrxhL znz{-^H-J^C&{`akR5lROnB7#b%UBFT8UvWABt;mNXvtExnC2MtN|7tXG$Ir#s}wLh zE=3)*hRmC7OZDvLvjy8UuW89OV<9)+5{mKHOs>_ndN&j;tacAUb>kV07uhy+Kup8u z%L+kd9D~@`>?uYyWqiD{>a$hErS*&?K>NGkWD<}V)-%dPgODPPR;(p^no&&^nZ~b4 zqA11oX)&#gZ>{lV5nTBm0r(+x&Wy9!+f^7)<%Y-F1*h~eI1PJsHAsxsp0_DVIA7ZmcBft1_$kSIfQC~6$);bO9m=R-SSNZ)O(UNy@mE_ z5k|6`{rKxywwGy_Yl%T+I?CMKmL!J?0Gl}%+nGqruxQa@>GPlms#ta@VL9rn|7W`= ziuqXI%UZ1vmPJ0#?{5D@oux7OSo;>ub#SIf}2Cy~rbK$%HVy zx1)38-Li4eut#93RN)jAj!3{J4;InL^W6g6K{P3#Kn60q02s=<;w_Yt15u$atB(8qL4UUi&J52 zk*r|+ezFUAA|bfb-rco8{Z0XS+ptgFI<(xXaw+s-(6b72M(s1FVA942T6?<+OH6Ys zI{{-OO!nqib>5@Fi9A;$0aaoZV5=LGKPrWfn#oC4JQz3F8G@W?-l2qT6LE@3i={kD zKSR1v>lWMQLjgdivW8wMdea04Jw;-bv>7ZNT}C8ZBhumH|I*(<0-d$Rm8L|NATa@XP+BR`K)t8l*;+!)!PVVL$U&10EnP?VQB5_Lvv!_y4AzbcPX|!?| zZrrdabaamTEEg+LlFc>A;M#L!itw<|LIrKSijmQ{K3>~P&`tl z6`V?s%4Sn6oMh;Kr68PMW>8sfX@(`-|0=6q9(bZfA{uG>-g(7OObQgShR#-sUFjj| z5=q>JF=GUY#4n*P1Lm`zrq|)Xqp=I?40H^}HS2Ly>)2SmlZd*#sd?*H3Yg zpeHRaW0!BxaE#~$-IUrlvyy1s_TTz90W?h^(ez!^-+_gEYMM0rNl}&8HLu{o(0ZpT zi$Y*0PHTWU8j*CO7zmK_3o^zzx89AVH7Tv|I7^463x0^c?$qF^_6|t+T+yM^mdEn; zoU!>h6_w*Po)agJcGsBs3pOC8874kT(HJ~ylJklEDMx+vjw1MeF zowv;SniY6&q3Oo7&MR8N1A?ku09H!`Wnc1;5&4g@LjXLs`0U)aY2A!92K3CpUxqlU z6Eq}KbxVubUK)~q#3gFM!oTStHanPn@`YlIg$ODgP4TS(%9V}a(nx-Czs_A6!@ODA zh(rx1*{G2T5g<#K)BU%Q->xZ5y!~r04QU*@hBpfeI3m-zMG zyRgNSsXzrfB=tL?t$hi61xTe6QWo%CQzi7~`PqftjoV+WzZ}=@+u%6L(T*z`+aw#} zb3A|%sjX<=2ne6jR!aI7FJXeo$>Yzfz?K(@p3|)^P+s!L2;k=Q#3)x3x|9!_*H+8< zr8(9@CrH(lx`b+TRIa%?<>rp!P*F^ZPKMzomq`FM5DwDDY|tm!vVTrvt0md$zl_2x zTcG8m)UtkM&lq~&jr_qR3X|b|@z?Rk)1#U~>rt+5RMyxgx{#Zr0a52`RMloci`+!G zglBhkd){n9ZAA5d%Bg#5axD;n4`ea1%SQI;(MBaQzgnV@3=)CkReXO;BPUuAjs#$P z3;mxb@kQUE2^f^y*$nagUs?YDr^@3@F9p8R;+Zl0eaz1>v5`&?II5eUgTZmfG4mvmunGYcpHCE^9lS|D6*g8Cz{kY7zxdSvV(X6v9~ zhh&In+3!o!G;_fS7TgVrtaravjdaNkrb>|5RVnR=1is(9EzdE>pw-!6{uJRW7Zce~ zjvz}L`1)8LEN?_LR2%J3cFe1&7B^lqjySD%kUnzclwnVuv6AhxZ`4%Ubtk7Av=f^jogWN^xr}2M>i_7rF0*bs z0Do9cVLFl`RPg<;j`FgTXDC6{esr+ijVDo_qS!~|OoxeLXyZ69Xwr9Kl)iVX(CzfC zjy~D13fuXy?rkn4(mz?2IC9O8gU5p9o+Fs2Jj&dn0&i$Xo&=-CWR-kIsYKG9^mr{= zOT!VZXhdhryBRvoUcc8~>lU5|Z!hR?gf>|-!gdeZSbRlEAzNSC98+8< z6)(LIWh!o3lVK1Oi8OiM!+<%Hh=xFEgiYh=@)kGHF^BP^ZR4exIP>(FQ5TkIQL0c# zyXQM}Bu!IG9`CR*O_<$%7olT-BaZ2tjB+yc)5u1@>6}QISQka*<)R&HmBVO*pOEBt4ngY%YOvbHtM`0uNW8quDYj=^gg5Ij zP}Jv}(vk)$#i81Zt7bC#P$Op3peAnq9EnQZU~wn<&~*-F(({4eWUr4n6-OiD(+P+} zq>7BnU|p^rcgqCGC7j{AO3W*lTe`zK0I)+MR7ff#TN2U-nCtFFQa7AJxomFlR2h#C z3UYU`ZOQ;4XUZ?)^PS$>6bbrQR+=KIKa!bY(B_Jhd#A9KM^*)#qP5%*qwaI7@&Wic zxi7no3dkm_MY6u$%bpyp{{54s6TvC@?a{;tX=VF$HojqCuV3HVweb#p6wZ&+f)r2~ zRJGluKdm1UJ>Te-p=K_PUn0{$oXR)8HXivehNKI3&3A5&j=(t4e?-%nZzYxcn=)k+ zHn$~u<4SkiVAS;Zde+ajLH09r^0}fdYbnb(PvT$Pifo5dZ+`Gg!W@T}Tt5N1*U5=Og_`m3~N=!Koa>^YdK2%%UJYcYGt&{|qXL~GzKr!xAJ z;y!8VG>@b`#BN5L;jH;C@Sl)$VQ8AWN78q3mjMeK{sZzPD^gtcL~Nn=Hzn)T%!sDe_KZSorkYg<>*JSbIpjMp{c>}10rQJPL zebWAjzV;(7154(N=$`Rq4>osNl-N!L=^t?)*8X!4($^h5kuo*f=f5X6e~TDjr|cMb z5NS{bYu`t3K7r>51B|t$Uq^pN)uZ{r2p8eaMy-W1#@JFU&LM zn$%BLu@C1y=co&4iKFzByje4TA&^e>!(e`89x?P9*{mQOKD8O^@ zmkR{XSHKZ}x!9>BQ^K_TqnX={tn$oX_^M<`DP&z1BI!D3F2=p1 zwEl_>7*3O2|9T`C`dph@``XC$teB5A{6~-RzXL&&sD+QsN#YUJ4Z?~9`rY8#1>?pl zJaaMupJ#0H8F-c&K)VZ>*&-DWwB#`;Vh=ZGYmQ7FVv-FjuOt&Qm2PvyHJ`y|xi>Hk zaX&9u2knd$eh)@^t2Vy*r8`TlenN$Z@opVaI&MXjj%$;lCM(#z8=CFDffAOvj8)mD zQ4frTFyK&<>@%_K4A$Qw;b1zGE2K8@ zq&)(Wm2rcK3eJriSp7TrD4U_E21@}QhHj}|;?#q~xytB`WW?L$?2+8|WqPLu1N3&j z5-j{rT3{ULZ8(C)<3XL@t6%nR-N7v)Je@PJtiTv!irwp5=)fvAE}ZBF-s*x+QW2}P z%x@3}TnKpvh3qRm(3$}=LSR$mNp=ENi=E5I%n z5=DU#Y;YhGuW(Or$z4eOodQBRV8$Dx=}=}+v~=nJwc6rSMYrlrp8?G788I*hV=;IYmkRVGFVN22}f)IC^Bvtgs?|V8_g8x@CQp`XnhDi&lOdTU?7A zjoQoPVGD&^dWtmswEenB>g@nR%hzC)ft`M6AHx-B&)GsE@1MgBpIPTGlJaUUg_px8 zOVe|_O*N>Igg21uW5~qa#vAV!CbnU@U4W+Z%jJVlaJ7tCA7X>h2%s1{^i%=nKj)X| zfcRPJWfYNQR|6#sx>-9lu%4rxUIp{G9lR z7BD1f<_P-%$HAG3F6L|pK%_i{&3D@Vxlqt+popB2JXRukM*eBI5!LjsL_7^oS zq(42iOUYSOupDr;;|RZNk^~(bTVX`#DB992rTCowufs<`=RvZ)N-%rG@#D)yJ0^2_ z6dOpN^pBW5c0UY+VJG)HW*}`O^E2zY2(C=R`_j%IeQ%?q9K7Z=Lt-t?w2KZ#L*)f4 z)nLey(&b8KQ0fZ2o;gJljOOJP;)xYbJ*&%dhgC|p*Eh2@)YrJz9k`rO>rlT6&fCvj z4LRJ1(y|w++<{Z7vYBR87mvTFLVw;UdQQ8#NO{wv0$`2@@xWV+CJQFBQKxv|sasIM zWt5I9IL$?ZYBSO*>xRHq<_bB!`?teh)HZ^sgt-0UL|N5g+RV_7L0te5`0a-9aoG=$F zEfh9O$;yi$YoK)Gny*BvKF^N|{t&rBdPXh8m0BpSo+-w{jP;IZcFmJ{5>X#g*Sw6t zBnQWl4e&_S!TChd(U7CLDepJbl+r{BY*`7h5!}=1h8N*i(%Awj{!H$tflv( zYQB4uepbznU#L|!(8_*mc7qS$Llm~&V*Hu%gha`8y)Ao??np@!XUwg|pOck;6Xo{g z&B)^!lYfV}8?KTfri|lKQp}!GR9=VI`*k}jXYhc`oNXG_>8fQb>VXit4dY#kGP%me zB3XyIVE~u8clVv|Ka9?oIHBprKaMyl;R!5n*;4>d`cB-C^$+_rS2@==#YI>Dl@0dT zX6>pyp#A$&XLrwFW~dU_IK1v_#n8&T?f8ymm%3Y&r1P<4zfHrKf{)yS-Qv^vc0bXh zP=M`sxmlM(8b))IX68nyMiL2$sTp(T0_qJ4u`O2Cmq{I~iZ@uFSVZ~ypYe6oS9q8@%I|wz$VkjgIRgo*#Bj~I z0?NF*As4ddHHuftCMI^a5iqcfV1c-w^dPejbpP_tpZg>}h;j$?<<*+?96oMBs%-r+ zX`j3V{yrC5og^f>gCL!A{Y+Za?au0hk`d$!TR`r@7uU1DEgY8u=KHdw=qa`8`A^KCs9Kzy|E?xt*kuDDs8PDpPu)#H%r5y3OeLenMVLc-q_I z&4!l}C&ojTQu&6jk80+jzhaJU#DAVxV1GS8 zGg?~Hj;g$8_pw>51SD6BZ=zuNW+vV-$P5ir&F*H&E8)!98Gj-Bv6)6loOFdt1WP`< zqpSc+W++De)RpricFvO*6*4U#4wQG2;Dd!Yt3d_&PRnmP&#w2n+53`OF}f`DAqP7i z_@CkeRkxYSWh_2J(cMue|1Rx)P^?>`a}1BQ4ljv$zu=CRvk1EoF294M2tzGb9HDJ&R4clagD(QY_d-Zoif{y0Fv-3*+6Yq9Q6s6Sa%vwK8Ra~V`+ zwK) zF&`9?ad!BiZNRHgEFLc<>2l)S6=w}Dx_H!jc4a2X*z2G;_*jeFc*_-|#x?8$i8h`B zpjnugbtgA3|0+S7$;Y<5i}zfNIhOBg%wWTup{<@vnv;=yev&yD(^YZcPk)YA!wYit zc!y22VNad49d-96hBpYC0H>Tlm4qPP1oav(FHRQvM~zja##!+0{2UFxre!?BP9v$Z zoky{@mrh_sg7=K$-cK2XTxy{6F792;nV{9t zNikaVoLU_jHumGopo`tLp)!pk_|KezrfgkD&3Xx3!A~cnVf4T(cJcG-ChLf#M~}a> zwT?Yd)bG~)3uhc-)nt4XKojG-%-Tjt(yqwY$#3QawZ984spN@A^(U=`Gq){H3^}wr z=z;5-OdY3h&kTboHU31$IE$PfXFUjmMVe1J0PpemB6Zhv>mqA&XwiF5s37Wi5y{#+ zXuX)fWjQ1*gGKpHIof*h_>i>MfOU~Ih=-tUbG!@iVUm%rmtpQIn?q$8mE>KhoTeP_ z;?HDD$Ak$?km;84 z`7G=i!A#YXYVYhq&a;os8F@ax+N5d4mHvVRFTSqhm#1zlIqO>{;bU&4+K1?QLv3W+ zv&ASDe&za*p^HCN);q`9JAeJ?tmum#Ohz9tuhAAd_SWq1o!2Ht_s*aDif>dJbT;QeA5g?JCkk}Gyl&0^9SHxL#*pI*v zF|v51+6jS*V9^M6cDEiLIQ;J)=md#K1)IXH$C+qlbs9<}DQNZdB}ccSf6%RKuBCLW z#X;ASu*uZA=iqcDkZ=jtabT}Kd_kDaee>v@v!XyIho>8cSBFbqijdB%BH)7jttb)? zyX#|rP9EA|3|VJ6o&IgenQ(P=QZ&H0J%S;X_YJ0Izs+eI?wkqCE5wIlU}_;yUhghVD0rgU8M+CdkVSlNvF5sRQHVQEw&}V*O%|DReB=zTg@c&V zj~^V0)SspeB*B0@{`P+8XGL>oOL?C&;d(Z2DW0}f%;x1FZ$WF+;6kN9QN(SY%lmcB z&m87-dX$?59K0%L4R+2MSSMpU?=K$-E(EJ*pMzo35mDiECV)c;tSlnqJj5@*`j{vP7v< z#~tr&xMcXrl0PBgz`NYHk>s8Pa$FwM($8cxAy^XbD$@E!xvS0@XCQnen{Cvhk&T&3 z!vI`v>mPE5iRWLibIc)rl&VUjqktI#@?=*0Xf@_4NtNp@ou=-F9tn{YcLG0`8h^S- zID9tfBE5`D4X- z#>guUXZ))kp&OszP~7A%zQ}s`A#TBQ7`{+9I?)w=v8J#!@4FJcy zhwvDRo^mLeU82;PV0BIs3Uc;Gkeg#U@m;{BU|;i@&`b;)BuZz)wJ1Tup1n?C;qs@Llyc#b&?RY@yN@5#CE$XPGD|h!_b1tXy zz(;mlqm*Z!{B{*t>vZKu((Iuuz*O7hhT#CZ@}qKyD^iZw;+^sN7Y+{ck$fqND|ERh zvWMNunc&;7H@`0D22H$FXV z({6K7zq~J-KOMaoeYH?_fcQr+>&jPXbw*S^X^1rq7`gRbIp?&C# z9AvfJ|LlDt-jpBF33SH@4jA;OiZZc|kC^PpbdK>gOUCpKZ@K){r?|ZXgDP(S6pQv% z;6ZoJP?Nx7A8K=XsX>WfpkwfA?SYl)H=3^!-@txuxKsA6(V=tBOLzj}7@Y}<`}-dL z#L%36As%T{lfvVw-DgxFjW1*P?M+_I+ltR2aCwi?6J?^4?GAX`w0HK3jP}R$9&adzikv z%zUrH=TGmiwR`|*G3O_rMZvwgeL#AkV}3-Ijo0}zS>^>yW)$Ua{-M#;MTbvY97g(q z$ydOec52)WA2GF~9=-Y-ur#K`iQNQ*R{R+`)$+VW@%>nb&EbkOR8M-zZ^I^k*`n&E z9I$h2`S>4eI2HWd8@Rj5uMm0%4{shDM>>7DRp>1Vza*e0zo!VfLPaI|xe^?N$3}|@ zISNBFM>e3u=L3=vR4Whr6n#x+EOWThpvWQ%Bf6>L=2Jq2K!*H-rc7-N!xjFK z+DWG_zEhT&YP@Cua2*&wTEioWYA0KL!25Bf*UgT-c6KL!*j%hGU1GJ#4b447)t?ev zTQsgfGyD3eGBFulYwd5F3)QM#Jr)Z{si>(&7e!~)*BmPGhxYGJ|8%BLDX${6_i%*aR5m|2&)?rrEB7 z>?h&n!}~90;8j@zk#`u|!*OEkyMHGzNjg^LS2OB684JipRPOAjS4PDrApy|T1^ zw%Dq3jJp;-lKIjWP3YV*A-ry!oj>{+Q|wcU>5|KuNj#G4nI!7MheMgW*P2?s)N*gY zTNZZ#p@cP4MRE>{2#DU=MnLqji3uGP_36$tSlW_z{>q+%_393!p!&}SS!Zhrf5K_d zkIJejOF6TFhVOUGUgiYFm1w19@@}ir z2G-|W-x=zc5Ue5U?43wQYq0>T@}E)!$Q{lZo&5rm6rh2XQNf#7v*@m{XWf}2l-Z82|j8x?8^sgIKd{3J@? ze*=oF>O7aioudxNK-CEtc`u~zi?FsTgtbM^AgnD4VQmC*HD?)i6mbaW{rsFC*}4xODat)1#S$ewar=)=Oaqi5athZC&1S$*3YWX# zK(O3T{m$X#h177VbUuf)3&Uw9JFpM57{g#`q@P1;WoXH{ctt%( zlLj=J539sS0h;*|V)hf<&BSk%soDK_5JX@K9+KM}e5@()X;;nflQcIx(*XxDXkDT=X3 zJtX0rY>ObHh?)Sm7SV+atA`E4_mLAY6>=zpal<$3Xn-#;3JUS|+Fs8C;X z+hzyK^3D^#PXnfI4UVqaTMpa9Xx4;*so0ljK)(OxMo3)IR}4?hT6;7SI?2ze+Fsi8 z8KhzxIHKIn@c&0qJ#uG)|0$9Y}Y$_8e#3gw3W zcKgx8ud$w9>mcjT>@i^SHHP4;*z4@t<@`j%43BestQ^#A8i@iBQ7?xC=;59!c`U~? zlC!Z9gLBP~ju^FWJ||8D3IG@Nnp<%M>P7fjZn|25$h)!i`td^}R$z0=$QLC!oxC0% zZ7!G0FK6)cveCHyZHGIv;hU?}8 zBlSGesEkvREll4Ic=$~uwRm%-mcKcgf-04gZ z21eSE=QMlu(4~vT+;dHc!)YV`Ql0SC_w%eYHPDwVzEz}s%klk-*G z(z|qr?tq^Z-dFLj-D6IYv4Ag06?Ys}Pcs*#>7)fwhS@cTL z8XZd5`MDgXzcmJuqNmW?ID+3fc&G)!lUGbLIjf~M$V-CNbWer3>&`dZMfm6;ONT{B zbeKD6du!22@V|p-RHv)-18tx_eBRol494~S>ry_SO@3!+s%p5J<-BxwF7*Y{U-X_M zj!TbyA{^zdq5aJZ3|?H39W?76hSs*9w4`g)R%#6idlZ-G?Ky@*wbqyqj*^o#SAhK> zJ$6<+cC5rOVGE^-3n*f~B<4&L{rP3_M#3K*Sw zwy%xm?XEsTz5Zy`#3Su7^Y^5sj!s@a9M?C5%HPJLccF@dnBJ+8Z+a67GM8mz$R`Mc zz;Yue1jORERz`!z))6`}a#mnlfn_sSBk72y=4pOMzu1-WNC**YwHtjWd`lJppX&Hq zxh`I>T5k$#{UPD+CWD#5o+$sZ(wSJ)J7Ovaj-vssbP#mC`6gI6!1e}L@>H}TzkKb= z4I^32ACWWURVpjkl#rYnVB0RfhpXAD9;TW3ob#TuJaNOjkViaH{V8MkoiWfR1eW+6 zaIxP18G|XG42_t#!#2H*H@a{%nm<1SG?hOwMH2tDkDPbCt&}vUHhe?{<~xVS)!OSd z?u$aftr$05Wc0`ioxerA#rM0h6{W35+q=HAT*qKFE9jVN#V^;KN#H8585E#O6v=03 zW_bR~C{jL7o9+V@KVwPk7UY$iHFW7lFI8N%?QfX_4D%NTk$eG)8i>8eos~mnZZ-%s zFaIC7>nLSu@6j+K;sl;LB2L&L;sg|jliL{seWLan((80hkn(5v{Xs`9MB!ooS}nB? z1@LuPqx2V1VV3vBBcYf ztNolL;;s(XH|k#V={ie#K=+w~pe|5FAn2Ab~74}cfe1Fi4yc~aEW$&yT^0-W= zWR102%a?3M?*4}Pj|FaQl9|(dIq%NX2y-u++xn=%T!f1arda9P=ISvblE9f9brsk?)0m4XBE9WQ;Lzw~mg*(j)9=YN>vYb}IIy znlG9C0oc$A28W$4;XV{tHhtDH)fH0g zmZXQ#M>aeEUgRf2Hq4)B=F_uu_n_aCK6AFJ!e?*dj@p4$_;+aKYAIExUP^_Ns}l)n zoN&TYsa;U4Mo%!%@@^-1=XP+zSV5zW|L!Qt%)A$)1tj{m*}UFNo}pVgo1$YG%N*E0 zn*ESGrnAgQ#gS*~QG~jg1`w0h3|ld#y`eeY4=ToEoJGFmU2zZVUx?O%2*28%_Jnhu zzm00W&X-(jf@c!CnpF~e^bsdltk}?(Jqf(@)^i}a>YIcjS`>~mJ ztmh^eo0&P(G>fiZYoQLVOx(vjGuMmqW;Vm8JVGf)?Kq6X9HG0AfYE_0SL$yO=m)tb zpks8ZJwE@{lz?CvcH3^}*F~Y>7NB3Ate!ce%Q71$jm)m$dFuItH15SNB1FC7`6z|O zC%<5@j*Nl4VPQ^4)&Usyu&guKqHSL66OJy0E{FV6{9kQ(;EAOzVfVIp`?C>Um5t)E3|%*O{u2hLh-0Rk@-#IWl(wX<%N&U*MCXW;hsmIX^&lo ze*Q*78W!UYyU+HSmFBb0D=7ynm7UpXPo-xNI=`_6W-ar*wepf|ZgG*dId;;zqb;E= z*|fBwUBsYwgZ$>v*0IIy_*z{Mskjt#eaaK^Gx2SnIxDm%*A%B?ExhBKRlu{<>1dix5mdt|APTa%Hsz@9Q8VEzWA|u(xBu}#oIFU7Ba=nd){gM^0Yp%__H}T3(6bzLabc@G+M3?*vG$DsipJ zfm~&Kkf$989PThjWojR`$r2a{JGEWMV=aRFCMWvn%dbrhaGXbsjb#Ok;Olj!;B}D4&6+4z)sK>jq{F?L4a;;QP;m$6 zfZ9`pzjzFTLQndWrbitRJ^%d7d9}rN3N-_LDwf9>ONd^2{uRoccHxq)K0aAtsjW6U z?>qMkTZ9~7k2#iQSlsC7;(vxuQLj##oZ6@g_gD0ShkdF&y-6du#+Do2BBz{4)sS3@;< zY9OS&w@47)wF_i*>`uZHJ1U@T|;+r-uG4 zGO)LZTZ1d1QP6Gwd4mH~h8E(ZRH`F4wG+``?m?(YnlC56Tx(D<{Yu2LT~Q74&FT(XcqKk!9Vc>Cffq*V#h4(@u>AqyAo;XWMD>=tTIXB3FinEcd)(?+g@Z^07_R zpap`RX7k~L`FI=Y87%bXmie4}qX>O?uWfKQOia9Q%L8iJ|6yQprwBf@*VG`e3BUi^ z7dNz5|8Cc6(PhZ$%N!(6Cu!!>R6e-q$+wey$ZB;Yp9cQjIsAtyy#QUs_FGN?*MURT zY1QA#-V4G@=06g+3|?^JdV9-1e#9a+Q4A#ZyDj>z1--yWWx27>{G0IAc17rcOeY`w zTvY^Hbc8@Jh2O@G1-O0GYYQ>8PRF|Tde!S0ZRIDwJPG?wj03rZ&_we?-EqDJKd?<* zoOwEK<}a<*$(-ieIrvXG*a5 z>c!jq6&!4V8<*o`vz(xvQIy2i*IvDzX-C6V1;kpWFLSa^$E%r{j>lU_7~;WKE)DUw z%3elLw>f7QWo6}AP(Wsel{1Z@nW3zlsNXs|4n!HIf=@ILx6kn+0C zMD`a3S81EqW7)&(UuX#9j&|n?%l-ztIasf{>yR_{Gi7ej*H}&3w`%WX{D!$d?7rMX z59kDP11%nYZ3gyn)jr=i;_IvypKSkL>i?!ytNrw<6qHIymc3xt%17h>!d9QN6cf>W zKh&z9^ZD1G(U*iUzsoF!4Xt=kwKdvFiO?#Fm~nU%UeEbv+F(yWO;mN|#P^O^m~w5; zGj`ZP(C>+-#9NXhjwwCdBNBY|gDV;#=;x|BsQY-1 z@|x(_nKnwf5X`_N_~I((TyAjd!2EXHd+KZ_AwTwjn6_P%b&JxYS{Ks;+2(tJ|ALeY zU8h$L*=EgjiQ}Tsw}Lk>B_3pHU-VG_adFDx<^nT9i@tjP$$T5-6&B=4QGIib1+D&C zZFM3Bu1tSc$cfbs^ZGo`4$Wx0KP69my#MfhSZ*~>W^mzwl;YC+cIpdpg^)N|5yoTb zVB5)Ts^k!*Ta!h2HII(pyG0Vqw49GmQ`uuiqmQQr86=F=2hoV~fk=VG#HnAAjgVBI z%o=)29ooFGbk;CVdbQtUZr^M`5p=6awp#d1OiYNv z!Zd_u{2Tta`R~QP@bCA3KmP>_;&3i&k7cv;rs~qm{;>b#Q2d7H0S)qlNf&dSvgAgU zSSBMOcrFhAT`|)Nfz0OwF^)IhHo_Qs6w!<_ZAN^aAqidr#a2!@$j|>61Gr7;UrSHm zHV#d9qb;L7_TaoGKE~4~fXa|ZOyg|5mjRjcQtB>8G2IKllh|=8A+A>cd)OuXs2ZD8 zn}+jNbgjCw?6i)U>P<95w1vl$B%3PJ!m%n((ypOPv|j(5t{O)axzb#y%;R0qD9vuy z&`!}mcF3#s9|ofR!JM--)h(2B?!RuhuAo83rI`??3$bGOv9auX0CcN&aPnr5e(?3a zOOLL~ida3Og4Pp*x4$Mb-kGUAme<1DF#Y*EmpwG(4W+uvDBbOepxbnE%<4KCCV_43 zcugB`$}+O}+H{S1lgcUfl}W9{;Cz zg_CTFuRL~7%^ZswAgJC_mwSq}+~_nu&Pgme4zbd5%${LOHGJm1sdlL|`Ip5s;sJ^C zM04w0!*aIoqF1vb+aHx~Wzc?=$ql_#lUAFvk-e4?PD>eg*(;|Y0sIctcl#7~Iz7^E zK0&Wb@m9YRCuBo=u2Q}G*H0;#8J$-B(EG(FmLCb=&9NSFX>w?1mGizU;@&F5NZ$j8 zqB(!`G1kAkFeG|#TsWm7qDi5jl$iJeSr+qNb)C${a0)v;~c=ESxpwmC^A zw)5tG?ysKus^0fc_dchpyQ|KRefC=W>a~)@9VK}&iYMCZg>#&KZgy;i8)8=&*b{`f zN8Yy)O*R!}!Tm%o4MTd@z3?KW=wigh8*U zEk8bwd<3sgz zLE5>C;TTAiF793+M#>C&_<4Zo8h#D~78RD|diGv4jImJ%DxqT|%6teAGtw^XE1glf zqVY!H55*W|8*yUQs({OnG|7`UNa1)x*~5?fJA+N7ldd>8MhQ#g7tSahQ#o4*FUM$r zmzWnhVP_$lKA0Xws}T~ooa+ZMUb`3VyeP#p8Z*IJq8W~1Za_L}CFMlWz@zSJKk4;z zwp^G8t(BF$5FtoYlehtQVHZeSv{d(uATQvfpYl~MS2B<88LTjN^-U)a@FAH4NuWoC zvnXZ_TZF4YIZZ|LJ~%xqURred84lK%h3!mxtP0@VDR9NZ@|R}*=w-s*&mOnGce>;T zOt6tXY4x|_V&Rwr%}?KwN0@u}vq!e(kq)L!;m7-$a>Lwy^P>ZUhfk}J)@V`*n<6I+ zdpdu1HJM5%fW2(H5Hgs0CjSi2iY6u53tetH#*Yb`Qz8HKnSHtgf6LB}=r?q>=SIvkb>4S&s%NJP@}7JjZVm>^+r3GYGP% z`LS`-2`UUJQbIww#zC)2zp`Hu*GwIp|V%FDtpki(~+24TJb(DC$$TXNgz?}K*l z#m93&QcVSp-vK?K*>RXP02}{ms6tFdRCe*f-5*r^HY40c`CtG!@m`qmFR1|9(D5^ z0vdy7i`0mndBwQ}kHyUB#ZF}MDI1Y)al||RX5M&cQHxg#aRj_~= z=B!bS8m57CkgI_PW8~&TxXGXBkFj_SeMWd!4QXd%g`yAmRD!|7fz~VU4#Fj1;A~u3+)~ydEgc<@#|bWngPA z2cEe1QO6Wt>uOMC-<-|d8_ZN%-Vxxl~o6TD2H4{QF*laWI*X@+OLk2DjC$TPFk1i z`QjKZ%g-c7md|pM;(UBDY7LDyS#UP_iKjd9<>h+cs?x48$)Wyryi$Ridr;d7$f=1O zG-Grpa!?*yh)e!G;?cq3D=_M!`%mv=vd;y95LL)ZPu`RpOEtYn(#0kerP_e{umQkZ&z(G)LR7qc!pw5my z@gdQI`QCxcfm2litPw7)$E4pSBS=;YkD?Q(r%f_nugfr^*9^Ak>cdd=RbRHzE8!y5P%xPT=W26v_tl?!aD;5-GzKUmNF>OV8OxB{?&-X0C_kN zZzf`%nh4a|Q5%|6dROH;p+4X(_v{;KcmG&kdqvG|>I24<+Wlw<*9*m~xYMKyOn4Vr zN0+IdKSx5$8R_fj787rGaUf|B$Hf(QIJwM&?AZ=OX0x9O@e+X?zUP9e3&ODR1M_Q| z&t!MzS!O2(k;ZCN@aD;1bv50sxZO)=K^%yyWPSGaCvvc6;7>x>K{CL}aED}{w_ovy zRQ6fx@xk(2uq8}kQNs^FCOCtdx6+zigD_IUat{nl#Gf$i_gr>Xsqvb9bd0Kfn`M%*!7$``wI0#e{g;3E6+SIcd`NwI%%(Up0Wkg0IATeW>rj|H>$YY(v%wb^u z2_v?t*W22v@{Pe4>01in92y+ZaYpH<(Wt+nY;P5>N|xFVbwiFMqjH=Vsg6x3i28fl z{rzg#Xd+DDL(FYErn(_nTg}Fc4INiGiT-GvSWGt7G5BUEH61`5HQE;D*j;`>We!$~ za4wyR_@>H#Xl%7H6*^-F@p$D@n*OS-eRep$NoC%(!tY5qN*h4gNQAPugtWZvz zV5oLIw2Dn$QyB<;T!3%p>I+b5$(EJaChcm00vov?>B3WtT|_yQeRW<^e&7Wu zqiSy>F;%EF57uKy?oD;X_R;(NLp*)yAp59+x9jl&Lr-w}?#up6%3G8kHA#9qx`A~o zc>&%=S`8SQ5Tvi4sfnM9AH{oojU~QLcN-F%p8wz2?ux4Ul|F2NlDCO+z z=)%tr+dqF>*l6wK>*L~t+yy@qa?In7ttU4&8#(cB=a{4uK89lc>h1W2q`h4rTQ9y5 zv$WS0B_}rE(ZA$ zb&2KO=lfY_z?0hpTSs3U^Jf&Im#53~9$$MmUprrV4xw(RyNA=A6G}E=eC5u;UPdQh zwrFavt0PKQV0$8|;v4T zh`zCdV^DZ*-k$C>78wP-EZrV&kRb;Y**;}ZKvzE)=$ln__wn&`dEYy9!2;EGXXXbS zT?F_&Jx~H*7zLO^X6{r%;+Ah$j#~YgLHz$`#Z|vBNY<>%)>c3BXR_+Bu-Q|!YuV43?*g#(9Dtp ztGf#TmsmE(oG)~6azqzptj(MIGaEE^a)5LCMAaFf)f2|>C#D9b3`gi(mg2xkm_c!E zjNIR-lZTrpJ?Go{;_Bk8iPO!^u?eW$E*tdd;UW6(t;plIo3k5ut5C)qNlL}{wVu)6tQiNxEnF(v2s8}Ap;wT%NbI5Iw7awT`M zn3{sy@8jnX#}9jFd&j4Ts|PopPS^=BYWx-${!{7_si`26kra$E6vR!Pm61O!xD=ht z|31y$GG*xK>gfsJrU%$jqWVoze~atv1X{`d{!N`pst*n-ou#I_MsW@i?sE4)SRwt^ zG=b{GoejyDrz3)mexS=l40A5-n>1bOQOkO)h{(i*6v_WaOAUtOoYK8^ogtCp3HHhE zD^Lf`V98yX@F_oP&bjE7DSjZseoja>%6`hYI`7P2NWZ`pT7?W0Ogv4 zhdPDtF4za;W0h#3R)hgw=gT82-(v(TBeaHuCh}?0KD|XEm|`*CPImP7s7T5m?+A;o za=dy>*>)P>rNpcyKTo=7W}0A{#mF~Ck7eG1NiIv^r7!ud?yavN2(78pi4{xMYGXCA z$@bWPJZl#ZK;_~%E;BiNuXy@>1#HEy6L~YSn7Zt<1O2waK6?oV-p5%h? z%eCD2oog}Kq+V=6iC!@)zfrE!@glk^zd@(-lQzBu6-qTZH<@ZZT+Xf1z6sS0$Cgqp z!HV)C$TLxcw?3>ho2#tZwH)wbS`G3#yQhsp^Y`(S}~KtdSh&NA$Zv>xh$ej zHnG_@KHC%5klhzfvPGs1Ldg;vG~Y%98hTP0D~s+y)^01-^vs@Y5jw$=%9a7rqkjh#Lz3&<(cq3*%I0lV@T2k9|5Z5&XCqNSk8U|RS>~PC zca(?C*aNthpbv5iU_NOt&4kr6#n|r#NeHcRx#Hw;jKr^8AtkdjWg$Uqp@as8CR1^Y zy8XrS$12G!#!;U1la^&?Fbqv-)yL0fGofUrWN;Z=undg&0Oc7pJo*R`c1WS?$Uucp zP1OY&L^!JeI^S^^xIn`9#E(#1;oQz3KwqS0oo;ycz^^W>lcqjyG^`CIV@8gG zxzl9G95+t>9Lx=4nfE~6pevNkY_90hqyOTryFWwhv5eoWNjYWSwRJR1o?H8^U%pkw zk4krhsVLPYFk~bO&74ZjsU|8HMcA1&HjGsnN>wa1 z*<}1w-Re95>}Px717z-{l~$)@DI|q6P%sR#s?#dAF-*kIF0Bw;hG0+TTP`e@AR|ld z4RB!C;Q)}qJ`i`tTviUDSImS2JpJ*3Vi~ye0__*Sx_{M^lVTIlw+p7j@RZw;!22cQBNPer%BYyo=j21{r{ zN5MKkVsX{4#`hD)8-rA!l&bITFea8+p7TfLJZT9g+Zhw(ua7+G;BzNd4%0%(7tuw2 z{`x6J#;`IzPQd4{CApKx4e#h{yZ{L}gR;d~t5k}!6QxZ@eyt8(nPr%h_eaiQE*v}c z3vw!QEF;?}5LRFEobtICeNuhP*{q)WdS=`Nc|i8Em(L2OZKzu(Z_osse=P3E}K zW`6LwYDE+2A-{x)Q&R1(-V3Bpai8a!viGcdf0&K{6=$@6>Ek}OJi%I0zOuHE&hE7U z-V=MWk<`AW34?!oFv*}yM)im#oCw4oz9G&wNSkjF{Jj0Z6iMOswiSYSJQ-?oS z=lu7OG}|`tlZ(#P<~Uaedu$v(-oPhP5cAzy*5V1@#4tR8=B1esuJn4QfuWDwD~OSI|Wbal!tnRVPioxV7DJ| z4=HQb9|4&{-UGQ-jVGLUyX^5pT@=a`UXNNHJy!vOko3n1B%Rnb_1uTKk@z|Wk$>~ce z6i|{kmRs)NNa`ya5?S}F@3DMAcnpy&nLX39j5Nls7wM*n`nkfWZ+c~-gzmiZ0@Jg+ zO<1UWH&5^Lk>*>pHFb}$Iz6Ez3{Tir@4eYG25nT*4sw5AaIG@Yl!yKR`@9ap6{gKy zqY&C&G_8`3{Xa2!o(`@FHM3F#HXUzDxt~$H?TUAQ#U+!a2)!-*d`f-KOOPlYjE@v3 zpJJ|eQcP}&l^GjBO(PJ6Bv;9#i@@5o(+a<}NW{bZO5-WBAyh)j4VQC#P~e-N-riST z$Vy_uyvFpv%YYgBTm0Y!C@Z<`DZA~dxi#o*j@7@<=U)`bZ^EdnV|Y2Z{aaS&D_KJ) zMa_?vzk;@q+ncL^)?)BYW{@69R>?0@uB=nA>Sikc>@T_rmb!-nH3y^`3ZQR7V`o5a zY0;BvTqw{&j!3hbY@Q=a_n7ykAoVam3ozaDn%;sbSF54?;3fe;aAIX(CDN2Y7zr|_ z7gAZ8XOv3w%wIkk3cMbwvUnkyMScU@(4-$SGO)vx3t*jB#;GE?K8MGz9SaP7y|^Xv z+1W0b#)(A2z!td85Wm*NqYotWDD#d^MtNT$(+-#!?p}Df%^>x-InMxWrw2S%Izd*NW z`|_ck$;IE^X5~MXcnyliUgH&Ln)XfY?_dUTZP#|XO6DQYqt>+3NWu;QF$N^5vFAjcFtD? zTGY+&sny8@>9nZ^9r=^T`huIAHFN2wGDhfWl;0b5R*r}4Q+@JpzCm~$+UmRQ7}q_L zfiko-P53fuHObn<`Tvlb{m{m)Xhn9P4aWe0}DeoD3yDRvQ1~sINx){;VMh+I8u67^oWy74!V~f3TeQr8yjlX`{O-s{)U2zfPQx@$9 z-Dv}9aorg5B)o;Eo6sY1vBMX*&~HWG@b&DEuRKZd_&LyxTQu$PHHV1#_TdNhgI$=6=kjK20wROw7 zIGbPR{1}s!`W5nM9uzF?6NLR#_WtDr1hG8wzToh$qNzoc~CY_-m!Cwvq|V z7dmky6ZSa8|C7l#DW$h{l2e`KsaUQQ{+&wMlw_Ll1ee9d_=}fZ4n{Ubrh)0$ad&|8 z1}fKgLXt(D2d#?;V1TL7F51IM%@KFB;RJgdPi=Mf4g#Iu6W2gayT1hkWm9$jJ`xYB ztD67(6`BjtEOMU!KX3X4pewzYF^>YyGxneZ*fm3=UKT7$D?zl#c;(4#KP80dnK1m? zBnxZz4EP=8xczeu1GmW0C~ofIf8SKl+a0XgFKfk-;OK_2#3T|qW&TU z6QT+H(i;`$Yl@E_9mBySZ!PKk8NACGvS%#wOSzc+UqlFB$~cu)VAbI)ZEK2@st|po zT5mI-IJdTS{#mKMgJ3SQxecl4#}bC@K6e1-rifI-vLXV1Z!(cJ+^|6fJ$7Jai3NO4 zdjoUY>6$i*0*v*lXQ9SWftw{@*NqqIU)%vrN6-jVIY+q+WhX-PjzYpXPX>>}Wj8ME ztE9AIdpxh|n}}hX#ozK;-R+&-J?dN@e4=N9*)_+iTiVze$CNS2UFilEkuOm*(njS* z;;97Y>(xyB+vh;cszDQAIQkt)v$R4VA?P-f7PZ&AOO19Dl!wR9niT{D5Q5?K=|5P| zxkBpr{t!$0@L+DjRE2abl2<@}g;<>YQ@;kqHgw2B^1=7i0P-_yiPiMv9I^l{l_4@U z#>!vZ3w`VhZtu4$P(%OyFo78iS%|ZlRZg_5D_c2XF4wn^B1wN_L(XBSLpiH#ODZpO zgo1ADk!&+{5OARTA+l)bt`j@STx+F0M)TLX7-{xoTuGETPk>O@|Gu@uRA(-Qst-H zmFG^OzZst?ddKyni+RP5c}D%Da`kyZ`i(M8wB#%^oMCExgxCo_A4inddju~ znET>PwZH}%xGm^qj;!A}2NJ-gXzfW9gzGH`hKT}h5=Jkvh}&=nipI7xMMU75ztva_8THrqcz|==4=lw&A({wA71V)t zFc-MvjPOvs-?B)p;L6a&98cfjrMM0JF*F>NYu6nxNmD??k*KnNT4hBS-f+)7m>6*18hdnQ~Z2v!`5x||3XEWUAW z$!~ZP+KxuXJ5%I(OSN(!D(LjMTwwULKIgc2dKmKE+ZIH_n)+zwF*QoGxSvf_w|^3% zjn#+FbG!7#?6~}M`*8- zw9DAH#PD8&>Z#S|aUc0ji14*-7Li(xEXF4tO|~y##fD6$d}cYG&_D%BnD8c)qg*GI zplT?omp3vmTrns6Hb*ML`BtZR7%^1(d9_suCj9V={EckpQw-^!3iDeb-=#<)VdJD) zt5h4%VNvn%3Q8#0OMj00Lh=GK%$hgi#pf{{2*sP)+ZF8dZDU9cO?@ETJM@CbL)36Ya*RV=2b`1|vn}IH#O!L&CkiB*?55eA{-f47PsW;aj!6ow3xKejJl|u0?5V92$;QKXxw88DE;sNad z^$y80_K>_0E}5@fo^WM-qCO6bMNL7=1p<~O#Z)&U+1Lv?mVA{C_}<;AEn#||uFq`H z@uN4FX14wj_%Z_FbzrfoQM*$EX&tSCqqrNoSD`ziy2^PwRF_0G%@NAz)EM{$ut`I&iKix%%B1)BkJ9x<@+&-chN)9^lJ z=Hl;A>y>IvE+bLF1@zuW*j$Oi53V-s{+#q~8T)rH<8ZbGCORdoNyd-;3nsDAf7y^6<_W7CQ*2x5z|>*inu_t2-jpiv_zatf zH>2KoQ(qDij&wt-yvU?6Koa&;#FmxKI~N`(mpc&*M1Q(hq4T}D$77mAAMd_PDK^6 ztC{8iyG4OpX@hS8c=?r?$B+$VpsI~T|LnB=Qld$`{hBe$7qSw5+nxt8R{T)+G3w9` z?pJIw>CePH=W4(3mpn9=*pq8I2My)QtmijPMk0p{3FG;fYiq`KH{_x);y`I~e{&nKKfJJ)sG+%w zZ(CoBp6pt-BfCu5xaaxT?n~-X%Dy3SWM|F!KNH;rVjC$0%l~uEBUS1Bn)68OlaFzn z=8FdM+40=Mhs$t`Ric?IhFiX?8b=k)`jSQ0#$?S(S4E}dR@W{RYgSZ&t7C=(b3ICi zZ)WWjzb5L4-(F(4AdI$NFvIUe!+51sWaMZBk<7_!V^nWqa{FeZ-sA*Gr|#dvvuE8i zVUZzG7v0z=PE`ySaTpA(JS%7o|a-+bl5~mrk<+ zF%{iDtZehBKdj<^u?uhcXm}J*zv7N^1a9^GJ4jICz~}7+Sj^ifFEIJ|fusCKiqN0@ z=X>dnTPh0e-V~u~3s8yi(S#E0fgfC?AWDylr?gq3(@c*)+^|jO)JMJpM$Qo{%V)lR z;h$6v`QK zb0+lR|C!CD)q_rA@&`pdqSZzdx223BkQG@_S>2GWy5_e`o1T=Gn{f-i zTLQs&-%y_j%K~}%Cr;1t{~veVld3!&>dq`3DOx(9d5@-515ZjT^WbYy z4cmp@lo~gs6{V6e=*C6vsv0ltoLnGXjS(pQOQTYP3=4*B1}+3WHv;||M>eCSl3505uEbI3VI3>{cB)qG%`NJ zZICy4lMvEg(3xxkm{nG4m}XQ;n;JEXnv3Rvb z#iG`vUNSm{&>JjSLrZo42>rb6nh>+WwHnv^4at#CuB9-UjCM(alAwLIZn;!ATHaUQ zwA*rB_Hd+`MQ}OXEdOTu!Vy(rcTDV6Yw z?#24>zNIqQ^phE7ahEFJuR3^_-s`*?rgRoyRKnlSZ?SJ1vG35ypOm!Qqz@^HDq9N# zGe@Kdp?pS?vdf>RXDwN(n$fB{=@vEi$2h9B7G$1m^9$z7mz?|YS}8^A64SyTG(*|`a0c$ADPYFK=_V1x^VAcuOz|n0f9eZr96me z>>*z_+U&})4$i*GXcs}HA zMRjg^(fG4=Hu0n$wQ?P`2QfdK{<~>R;_=;d?Hu$OBclEGz)q9{JDk*!%y)lKZUEO3 z*A!}fgo^}OGQ!vOd>Y!n1?RmZx8Fu!@$(b^FYm)6y1>r z%p_x+R?Iv(*Ye#gd`lB3WeBk3oZ7Zr?hp-vbloeZ*bBnnFf_%3^nK2?Bkz&h{akuz zkVoZwvg}9Loc1NRmsbrB$PQ-q6Wv(hU|5dq_}5Gv9Mvnz)&t!e08I3fKLv(|0}vp83q*fL=ztcbHVqL1dGe^`+%t<|(nSt}i0D`O z*@NFlyEP9-CA^}4B^m!%$@Oc{N{a&%2kt9Q1j;vU47S#ft{v^N%a_SgwZTG9mhlDq zniBd^Z>)nu)|6Le6w0wQBvVvMN6aq!jb2(BMMG#Tw-e>i9yy#sGAzP(2&kzXg`5P& zIVOQyvq=C(=*=P9W_1rSGQtgne6pgPf)4GT?%QO~?N4GpcvyS>Jx^HLUiCd`*w-;H z+=qdRYw2Bek%VSEg(!|^1$X?x*Jy9*Vt=gJeetdGo$6&O)FB2MvBJ6;jZ@A_OoH6; z6zw$fzWwwoV-5hb)ZhzW5*)_DF@}$34uQllmb^=2i<=5O-8^CDVOtr<cHlbrcfVoFg?veMTAqc>i0`OD6-Rj5icrm6~nGTRw3Yf8fMIf|01 zqex>a{_l1b6Fh9hxgOvoWJz~OlO|{77H!x8SYtH#G_!Z9cupS_h&I|6{xiFno=oem z1Ro`YlMh?P3HE~aLCNVeM6@P=acj5Oifv?zo*HW+QR zfAVmzw6Q^Pv+UP_xo!?i!xFxV4N?sUj22iOGd3VM=hY_EuzXkP-AdeBEuYR6s6sYW z&zv^d%C=>xE_-fB%kZ`Rs9nIj!NB!d&qnq5SHf;gRLLB-xD3_cDQCVt2-g`vMkdYo&UQ&SBP8^ zDQqKA3b_$gE-g4EE|Ji>^jqyZs?Pw9ugnM5a^=9}f98V3_eJTdhZzSy{v#UDQ7W9#0L*u(TX3rHY zew!!)4sj&h!qDj1f#G9Aum}Q*f?A8^l(KrrL;jAcgLkN|ub%}E7c3?kWMtUS*toZz zd1nR7;p`e@WCeIUKg9TRqeU>LE~e}*LBKCfn}x+AybQe(G;)uuc5&pkQEtEb{LXBr zrM_2!KEu6BS4ikT`zN>Fl4|*3)su+c09M)o>aQ9#_y@>AM5x7kY*-dn-;U1JsV_-M56Z>7Hq$7l1yrp%2 zFSGz%kAodOcooaEoXY$>x}ob&GHdQz;`KNKTTBAetT;;rDfD%(y4 z^bHfhUaZsvmKK<)yVq=tkM*ZSvi)!{4iSMWGYx$*_Nfr^C3d{z z_TQaq>UD=43l59LYxOF_lA6XvRm{puSk%_^^Ir$a_=cyq>b1UG@FO;>M$sro&Ge6x zf(n)f;lz=wl?{#7?;yN)xgQ%8byXR3%q3$M98Pg?1_{Kz$Ic94++R9KiT_InLBZ|* zuMPtKD};9c$A}u+_`aW2!+UVKS;uSkEO$@%E?fxY~iuI<%&mIMwF!#nnod(G35RG#>9waX#CM zi2IGOb|TlnyM8q#3JCJ_&UCyJ1mWB*2pJlKaf}=o5&@uHFed=Pyq<4)fg}-Vt-;zGkDP}!l;j=Plq3|W8u17dP$G04jn>2)JEsJ%+ZX{;KT{%#t&z} zP!-{aagCl42lcj>`i15w*Ajj{or{4FtNKBPWO`n0b&r5fF$;C}p3e$z8CR| zU2Je7SJ&zUmA#WCpy+SW1dlgS-(=|}oXY=Kmi|YT3O3wAXH0Uwg=IcA5$&=9*?zlZ zWcTL2oTH6aG5Wj6ltPn={3P)?u#S}BR^?JIq2B_gnR!KjG9_$Z|KiGsC+QqeIetdv zExqPICxNG^xU@=HEbp&i*XifM%w#$*RT0yglVoWP5#L8nWs3-FD2f*)qQDp38$r;^ ztLvc%6AwL>Z4yRCr< z@T_Dc+K{jeq^CFxVl8avA zQVy}yb8U!qf3iHs`B@gRx(tbe6jv|-*3$wL!PmS6Np^2p;xXPZ*QR!yj%MCRDO8%; zNWYO66i2L4Jj=FP8q4^E4TaFAws@u6=ZuIcq!k6~fc@S)_ftSN!*QM&_1yDfU`DUF&f;H^`Cp{-K(%hy!86j? z4!aI`4^a=1;p8CyQG0;PcU<2DDoDaUDneuftS@mhFmT;dpiB_cv=X{Md&~6|1|!K= zlj6;tE|7-jZqS5@`4%UTeZOZtOzoU=DbW1@%L9!^2+WK>zYB={0Mw6U%2J{}6G{gM z==66S-!22fdcGleq=C>$X!5JZpgS5=aVVsS$xc@VW`C`kVPGRz988o+Q&5Y&-EMA3 zU>O-HBq&W#2z0Z+ouynnMdc|O8OH7pUJhQ0!%)P2LF=w*XhIreXbKf%I*1?nSMVfW ze09g$0f1{WlHp~w_~(U70FT@=D#%FBU`yixhds9*EO@$j{`G)bE1pVp#5H*|y}SK4>-Bl_%AQBIY|kwl@Q)zPL%m+19?A?X@L_$ijk|?zgNu z8K10@f)WjMC{YjXE5rdoffrXSFGoB^jQUJ70+Bhi)PXC+SSya>sxpkhcDbgV6|c{S z^ZN9Nqx5jD-0K|Qm!}u(E~6q&aSCZ*r9A*Sa@bgmboKC^?Ckny+uGo`)P(%5=w+(d zgDK(D^KqU>>;=RV!llGe{Q4bf@1c4}W=T(PL=*dU;qbL?*Y|HE1NdALuuGJLf|Hq86k5l?lWJ2) z^84pf&ZXKnMQM&eFk<^Shq{;Fko@cn9zPl7?gBborqUAlA18{eo7Sj}kul38%wEjJ zzI_k@Waea`=&!#GD9FIc;hOZXT8M^cQvW-hzgo> z_r31rj)5_N7ig0)pM{^!WL2R6wFjh_9VtNV1{nb)9z_w6FDQI@r?lWy0$ImMI0r42 zviD=|2~hHx_<1+@`B7TxKfJ#C`9Vqb%jCUQcxCZZ5nHcJYs@>zRQ?qS9;?@bV#c0` zeoFe%KS@uxq$=VfFl&;9>VjTonD$A8y?MMSVaQ74ZjY~N&>{Wr40s{{rQ?+^C&OHbY)$A}+-MV+HR-;x!LMuhuu-i<);hGMq&QYtcyy{)^zQ%<)RS9oUksIyi1m<{T@A zp9`G~^ctaiL%uCXD3J?7ocL`&GFD<8%E8C!%!3%&LGbDpLT|m+!@hlLPGRU=!rN?o|DmHi-qWdfCh>^hBml(2AK~&z(e~EN)q};cn z@cu|MlxF3v(cZ6YUd^+Hy-s%jJz-1xBl8RHIOf=#vfbU-LV35V-x#LxX#ufqs-U0Q zi0Y}7!l8>6o_OW# z#n z^F3q4NgMHvhw8y?#3+JRK-`ve;L4ep5gz4RnCuC%2Nq!+G~A2Y=p*4T*NCLY+ZdMh zGiUf~D#3Mt?2r<=jLAnK{epO2@nHn&An)W+ciQ}EUCur`8Y};|bwDGix z_ctl`MA)7foXzP5ZdL2}5jVe6?SR(}Svb7RE4$1UN>TbNZ}3yY*$;uhK6vuIc!VOD z{Wh{Ul@PpO>sNJ=}A?hbEXMlw-#6E*F`p5a5qGQ>eokosqWob zuQ2|Ap}QHG>D9I}d67&S&M-0j5Lwi7^f2tybRgA7K{48n;&pA~z^&&4)+OksKe`V6 zv$nZ1EXY|?1ItOnq-+CCM^LjYeNRz&XTsoZJ?#%I3i&whC;Mwz%_7>c9x~F8?~HaK zGYRYh586%*xmoHeNI9jwFJ_#73t4U!+t}2Ac?hWzU-=SnEnR34^gWk$N=ns@9U=5d+hS1 zZ^81$%2l6!Yl3QBFz0qZ=i3s+ z&e<8~?jy<32K&C-Inqmx@R@ zIw!^i8}SF)C+3n7_sHi4_t*b`7aWlu$QRt7TLQ0m>kYLUmjeQ!k*hZKwPxY=nHTx4 z(HGiZE$lOdn8qtkN|?kZRx99Pbm3mXN<-~({$x(BR{62--@$rgnwvc zAi5OE+W-81w~ErVsZ2FhyFSeh8*AB`FT3L94}n#$-d0RD_o7tQB*9SwvZ3MmkLwG} zf4!I5;8T_U>Z2o{<`|bw^=G2pvJl}`((Se{8d%J5#`MzG?SgFA64?-V)tso2<6SMc z8mE7#^W-dj(afgtT4&ELI`*K&r%W+VJ48-WLz&)B_~nlQS^R{@_=XL$Lhrcow4+>S zKSKX~UtJ0EHkd?i53St82QQJy(3JzZ9?pkb0}~TTPIiaT>16%tK+slDrxcb#v59 zTt0I(p{(pt>{>%d`i)jqQ{x+i`03zEErB)G5DkVF>{rCU920yt5PEK!Z-+P^jrD#D z#TVf>hZIV`k7+9!aYt$^discEy9f^SB7e`Pe z$3=ajy*Hcr1bBfnaHu6lk!r|nWf-?l^;gLv3;_@QG2$fZ9jnH3hSwFpWW&Q%kVvP29%fZoSE@3?&^&Aav6uh zbVg#s3}*+w|Cog1s(TP{_6@1rUt`ug(G~2a%{e^=YUZp|XX;+mb{DAk*+ptb;{P4^ zi~k?8-YGh>sB6=XE4FRhwkx)ss@T>O+qP}nwrv{~JL$aNJGw{ze;=)rz4zG~bFb@~ z>z@0Hw4R}+6cfLKZ8lZ*IMtww_R&CtA60q#sa8CIj!E}9V6O8GDiX0U`b6TvSGBti z;7M*63Qbm z{xI7*Ktq7d3U(BZ-SD!vVGjMe#|4M|=63i&NKoCyIUjhSoqY)BpUscQtY252_$64e z_-?4YdSl=*__j1E_G_2@o+gq4&hz85Xp1N$oHk18i@x^7p?#MJw&3Hdx^Gq?A9(?- zOG}nOg~6@f#Pju*3+^a?27u)ef~XN9=>IJ)0lcQSNm%;^g=%WNuzBR)*nG;h#k%6(nbNtOzaLGSac-u-#O1a`(3bzbKk@M0C8PNg{-9KJb zC9g*p3cfgPOE3BBG&+rnBt5Rl8?^inj$(#}SHRLhfx(~gisDaWi-dPi>A<>aqbw&}=%m|E~E%0KNWfpou zTM{0p3~b9L&O*|*$!Qid>ccWw+v^dwyr58`C$g5`h;3$^yB`^v+v7i>FNil1cNr?W zI>+UwtAZ!GNo_Ouo2GlkTdm`w1PFfa0MdlzeQ3-sLJ6kQ>Mwhg#dJ&x$(YD*^yipW zhVFhs>sSu=hj)Jqyt4`$`0W{H=OTFq#!Rk1lMl}7&`0C&w%7Yd#D5^rJGCatkorQx zYcO&d-g${Lx^W(p)uSWDcd%WBpHzaPJp9yxpzO=X60YS*` zQBLgDy+^B&KpZ^KF$G z*vEq&MqG4m(nRar$b6{B{FPq&XY)`P+QV=bE1+f-xID#RTTG0oS*%UgDRcCYN zOLBWS#ZPyPT&Faue~AQ`rU{`80USq||DS4^WNW`R&WV8$^ zqiRi!v9xyqC#!V?5Z$eJU}Sz}FNO00fhhMXS#=qJVYZh-?&e+YNSCDj1rUWLyK{KT zmFb{tl^{2-@T;_#FGQoR+>|EJ-8HY4h1;I;RTP$8tHz5G&_Lnw59cN>PBT&FGqE4`%M|ApV@Y(+4PYlg{q4sg1&^NA{ki_~?Ff98r^((V{5eG)o)(yQ3i=gU?dW-5wD zN|iuv8s&{A{~7LmvgTD1XptU1pez+1=kiY!n};*T3&z7vK)?P}3@m2~xo26gd)r4m z??Icdru2YiU>y?Qn<$auPW_sdi#Ry;YZk3SSR}UDu{jD;cdiEg4nV`_H+U|Wz01VO zKX?AXPwV~>uBB*eCTrV!3MrRY(msq9P~c)P*V>)d7ch)&IHJOX`_NzZmjpEA_q9(5 zTuV_5LL^hsd!y3&GD}Y8X1-F3d$W#UJ}pQN*=}So-JKe-`KW^3(lm~=Vcf{QFMf{# z;H(I7mnD<*ZesF;0ML6U$S72IGJPmv>!Jw5Yk_?MI~j{fZ35r0G*sYPqJw0qr#Mg< zvb{=78&bDl2(J}J6-BxBe916GBr}&0UTSRbp^REd0h%@aFYV1A=z%10m<2;_YDIFd zy(<0|?rHdEn94R93mMLJNZV|+Q~&&Mlt|#J;5xr}TtUr|E}-=0+-)e0xjV8@AJg8s zdEF1BQQ4_<-85c0eHrQRt99(%GjJe?Fab8c-wu@`S-3Mk6F~2Y5ECqc-!*PO3at;x zP-7|+wYC1yxlhT$(?;F(f_EQ~+9+rcLs;bD%l|+7fCJKB~p8 z?vfVU#%2Rl@%0WnyPIlkute>?_cXKMgBojG@T477AiT2gs+p_f$;@ub%(rg5JmrdLAff^zFEB^>=dtq{q9ATNC#60coG?mD!l{p1jDaN9YUJ zrO%9d)K6H^Tb$Ib$_RPfQ&ps#sRTkk%TPLI3Shj8Kscq@ikW!M4jAV;PCFxg9Z?0( zPoa|kV|K^llBx;x@^XY({&2@(DEjlz81bEmB&xZmN>-q?_@bsB^V4T+I^e91BS2-S z5B03(%ZCJg;6QOZ z%xoF%tmr!0@@#is1iu@Tct?!iA%4%@Uw6#ShuKGNR+dx%g|{_I_hkk!<4xK;VzF zO*c><%2&BRfWu6olSyWEL4B&VS$R#&5%T#xp1B@*2Dqko6)>8zm&SX4c3puic!aD`&}f!= zzuJ<}dL!wA8JR(^8>Y+W)c|)(n1brz4nZ*N?=(TC_*knZoLcQ2?#|Z+Ot_R3k)3;T zYA_v!BA`bB6u)z8+pjHGk5GSO?yh0Xxhv-^Sjdl?T>G}3z^(5IllGhvIrpt$giEs^ zY%q^i@etWVxg`I?!~VGNyBqI%DM`(sx`pLOg!r9VqtsF6E)Vk+E2j`XbJW@!+<~{Q zyiVD+OXhy*$8-DL@LVkmusIm@Kh-x}OgW3D^cW1GtRwkjyyY|bQn~SDuPT4DiGl(f z8(7|2g}j7n>Y5exdBcHX+f90II;I*KW}eb3N*o8oj1nf=uhF6t7RS}iAox-M;!rZe z#BbvIT;z;!0ERjn)TD!f`KNZA?WTGN;x+wIZLOD5O+BCAE@Va&U`R;;)T@UytO-EsFSR7NV>6NNezXsZ z)*i!}-~3kWQ(@z_H__jq`HiwCN9KHnWefiFG%sM6Tl(q{WgZ+Crud2R1D}r+7bcbL zmn_f}O1;_`pOQ;%z)u-nVvMS=_oLy?5evSKN8ldA`02`y9F*b{KpVUk^B0K3y=S&Q z$mo{Ycp+^8si4&EmDujF>f0P&?UAtN_-iU^68<>diz35~f|ZFVFP%Aler+h#qB)|j zJp+OqI)=Cem*YAw#TgrOhx9gle`?@?p(5O04Q5s6hUkPP=aJj;}q$crl zQk-|>l8Wad1R^U>$TM}HWhm=}Nq^XV34pr#Z2LP%>T)>aJqSY*F^3u2!MU&t;6c(>+#8Q4noGmJaL@SHEZ_ zuk4VcFK+|oD4mOm?1jPZbqpGBK=lMSI{52ublzE&xksh250U(yXGzCzIz{9VlM{AYHdw}{=&=}!q06a`@oMOXkr6i>n6 zEud4%h^G%#0hzhZ7~x;>G;Qh-|I_kF_WW4lWO3b9Uv8g4pH5owEI9VC&t;UjvcMlD zZdnw8G)10A+*HX1)L^yR6%xwJJ8+869CithY2tC>-4DHJX26#{_H7fFcdZfer1D! zv~gw^ifbmPGCQ{~=<((tyt09;U>J;JSfKK+m7UQjW$X?=xComSRq0<4%V5xp=Jb6EVO^X<#%r5-N1FC7`T>w^E z9HC1tJUn{V8wmmgZx(4UBBs12c6;5G2MhL#&3PPaFV_A(aljf6K9RakDh;Wf4vree zW4x41{OZnJxh8`;Sty`bN>uO?x~+??f=7;L}C6H96x!o%^RQA1E^Q~eUf zhtEd+Xa<(Tn*@eW_f_qvke6;m9U!`*4yY8`5V|<~HSI6g=VOv}q!&5u@Vu;f?aJ1) zb;92C%hITp9Kv!RN(+V0IoVSut4T0tig!O7#EV{$&jdhJUYXI$$F>O(Sfb{Y5#s-O zD9ywS6u*?h*K`mPlLK&nScv2JPEG_r{BvEEIAAW=)#GZiVp?UOmp-PlTxgy;|~+!ZD|sHn2%@NSj>`C|TY(vIO@dbSVgy#8Rf z#Va4UFKEMTY5KzlNXUW_0EwrvE{{_^eQ&z@cpG0Vrao1aC7t^1rNfrQ@ISBeM2wuI z2ea^eh;)PM|-)rzT-kx{9Zlc+IUgqk*Lz-q7zy2M3cUWjRetSJ1x0ExDz;#ZGOtWY@ z?Y#UmukD3^v8wIO?gC#f>xi}>O_X~W3Zp*>)tYN4*Db86kvn9u11N+ANaL#PJF12itF0af*nQFy1m4)X3b{=OwGu{Oz{J9KCMtW2 z&G=b-Jy3k`Ep~e5rVCv4eLt5S|N0p>{qRiOV(4{6+hiC97*j$o=A(Q(sb1XT{{?eE zUxY8?N%=UIpn85CbhWhw`h0y&cKP46)|KGtI^de%32+^dUSkpTxx_nJkMH60R`~cq zX1%=y%5if|X7%5-)}7$tBlqMNpTOyBxf-U|!+WXwbAR;y7HG`=R*4P{)Ag|9)R+hNH1R_vCr$9>baU2H%YraKj&T z;O@$qO<;5l0se(Y^>4YhPpIOiYRQveIHtX^9pgdaynv!=k-_}lXI8WG9d&wtYme4v zwG*NJ`|RvXXo{fl*wcEO$MLC`cf0!JlcN0ORb(J3<5@9$>@97*1NEvWZQR@1pH3r3 z^bU8x!$1nWg32s^g#hqw=lK2#n3~XcX`rHK=-Xwwj668Rz@<&-W5%LQ_-m79wWRUB zL8F(08t{d}uKqG1onl_NXvQm;Ez1?n0}FTio{5wGSfMbf)i@pZgrEL3n0+KVggRhuZsYV6V6NY|P98AF zgozh0ck7Lf*Cl7#FUKHJlY*LEnqi!A^L?2tLE-nVMX&`aZ|#SBV?D6J!O=1|)6@N? zZ);PK!eP7pfm|1_&hh>6)I9VLe&67yE2iIoSSLDcmToVmoQHwBrB^5noSwL%w^|+1 zgKrsBvP194PaY?GXOFfF&|SZf){^_qlyz{u5aQ|xA;wKPFBIoPB1XN~f17k%)Y^p5 zUIai=e{hAny(weevMD;VQBJpsn749tN04?@NDr9DO*~dsNxzOuIwsrl5I7xw7u$60 zsm&_hs9(5jR}`R}5;BMX&y(`9=Is=#=6ds0cLCLj`h(tgFdG2#b~>J^YMhJz`AjbB zucBAfq~kuGsSvW<$a!9^L(@jRIgB+BV5spq$SB`7n$ z;1ir6ME0WhH9^Lmp$|}I{xnFP+bEX5BYK*Sj>U<9&JX}!NWjx}5e64=-1Bb0=?IY8 zfW(uFeLXMC1Dq|s)(eko;4EAg_PtT*5P}ODtl|tO03z8{8ljszHZawPfx%XWvK`ej zQ^e@I1G`ms*@0r7r9JVS)u5Hf0dwaan&HP5f0B$k>c`t@90fC9DVdQLRT6Fc2oJF;yy%C}Ib z#c`jGl7AJkin2c;VV%8!vPY10>*zNRQPu&rgs83Fu50yzv#T4|*A4k((|8zn)x@W* zH2N(0H~tuGtNy)YZS@)=j3*7?`UtBln3y9)AXI{+q)8)9{NHra`Kf0qddw zmonfJ|H*p2H2g%II@_=AtnnJ(>nZxG)ez`?lg&33O0XKruu2pImue7*VyFllT{chS zo0WcqC6&4bw@tUJU$jM$L;vr2#8WD; z4cY>C%gKV=rhp6*fLJ8;%CPUg*n*D%pf$L7q4o~4l44dZ(!@2x4Obbn0wFZ#UTLXM zBl|0V3zFX{!4FZ72^8=^Glzys6-cX-QAsxtYLnI??}cbmo|d8ZVOOG&#f)V(eEJcs z0#r-7sw)|EWUa`2-_lMZcoc@Y2zv}+Hy|A*!coEW2~`o5@$PO)5xE-Z+FHwjbYMdv zfZy2k?f#I~`du)5dpO=>&{Pn*5ym9XLM(^q@AULEf8FgJ=(qBJrdMGq8TbWFJzmxe>H4YNIGw)ENN#p|-0BA>h0@ZI$*7AiUTdoIw!cyn*SqE_qsOb4z z2~;&=T~MqUI#zNx%_fJsdlt^n;kfX!*`pq5(DtH^j@hrYcxFmv zm_Z=)?l|YGdZIr@l6s&679a&IdmQmvpolD>B^=aK%gTjo(WFOzpRc1S@IB%22{^u* z(BIk7L(Pc1K$PXTs|zVN6dB?7YSjC@SbFCOeMVC!s1!1W%-#tWSM|s2V@wTy^%_xHoliV?;p0&!QhaY3Zak=b-oUQ#I$iMy}Z72V4+aJ~Z4g zC$KS$2aN(G3-T7&XO9PX-rdWsHZK3Ifmw#9Qg6D@6anv8 zShMDGTSt?3x0@~ulD%w)?OP9X81;Hwi@o@Hrb0JQOkzBY=&7a!Kvnd6n7GYXd(6n; z;v~G+o7PF4DuUaC-*Oe%0jfg0#s102KcI~3D??gPStV0zc8Uc`w{0R;jLwiUDD5uu zdX7p1gR7`WtM&YyIL(ev{OkwC@B=3k!0%G=sgeds5JVeV0|9IPg4-mmH2;!l)SDFp zF~XyjTV^(*st+-1B85^tJdFG{;Ri&bxpv)D(u~tmw~?3H>Fyma{ji zWcOpFucXG&iA1js6PXZ?*I>z8p&XG?dl6w>$xso*3M zN=@l)RxOj4wYy{@%+DpnM7eGqGM`54%sf~IRfsDQUUB>tcvyYse>nbWi3BqY=bSr~ z9L&q9Ra$qzT7WBpaADL&|EeS9h^eGBqM-2li!5vC{+M$;TA?8<>zI+b6J$=`zl{AgG~N zD~y^!qeWNslM4aLGj8N)9XGw!>TA;lnoMul_}ro)m;g0K|GHvPV+C5FK?9N>QkGAY zmX%MgfrmC}#|5KV3Iv`T#EBtesxqK@!x`LPx}#Nr513S_sAbB}gXrL@%!AkrwJ!Ar zFcf>jRP2n~m&pL`F;$~*Low@UH4^_m{i{uWYlp#Chs~yCNagCc4y;&Di-4M0S&EQ@ z5nR}+`eKt&e!j5HBE__P%+f2V;?%oMvJ6YwfCfz%G_u6HzikTeIjKPn;;XR(?6Ek6k&dBGU+U(?p3mD?JJ)0^35P%12EzR=xh-|F|f=Dp?G(5I<$0-8cB zY9=!EZydcR=1nlKr5v{(ty_4LW+Z8Wk57KtZNr&#J|R2jB8Hm)z=}o8LoAM|RO4Xbaz94iHXF2J_kS z(IU&$qghT}V#+0@NxNRrwbhXV-ay4Pca+mAvRc_KAbS~g4P1kbW48*zK@ z{|uh=N^zE_bTGv!-c&W!0=$$HQ~9a(frsds5l5}U=xQk9mcTT!w%FvK(b>b`*`cu! z3VED@fHv|u2?@d0!CckBki!U+^_5NLL}%4kuT&kv<`x$#6XaxlA&=?ICgA&t`%{5% zpY=14IL&&gOf4@Bb1mt(_Pn2V&a^i_O0R5dZ?H9Hl9$B;Nugn50b*6sCd~)+Qlhk& zSbNUAB%|80d9+ummv!c?d8OZKP2Yuv`& z4-AqxvfYl_J3`WgM&5Ud%%%ojM;&6ID-ZqTCzGlY`j`%wd+SB;5@nU@HN;&5uf~H7 zeykTeQzK?sjQDy41ImL;9ruV@cf=gx)WTyb)z-B*Yn04y?ysRF|6E#W8uGZUsLb+^ zo>z~q{C+9bK)gk0#o3=e5+6oRimjnq_k)=r_9Ub=5&brG@S8k&RZkZ;#SepsC1_pr zy%M`P$%@(4K%iY_d#r4TV;7~M-}kl-U<#@tENrGY17cUs1mMPg74A>W=NUWjkaE3; zeHRfe(ku_F#2M2TsFE4eB_TcY>+gB9ApW8S45Ro|< z!qYVQ)|INI2Amm&kq^S*Oex~tC*FW3di9AGkrm+G+(?F^D(9d(1@Sk0#?L#AKe8LH zQu}TIwOIOFH?4PHXoS$eosSp5m8S1^z|U6Er{@|m3f!5&v4fkeYHJTLyl&P;x;J#q z1Hb*niI@soGj`Gow?}n!apcK~V&>B1SZP({Gi34p4dD1k4ZCY<*^6xHJ2M{jSpJ%f z4KyP+b_bAlm@8NP*|CGbwfHE5tUY_i;Yd56nR@5xQ>u3K*`z zhQe5@<0E&EYQM{90j#8rXbb$l`%1mxO1_xX5rW+Um%z1vN_%u{YpDXC%Omw%HKr`j^PMkiV;7gT9k!g5 zbc{zJbe@pnt6L`hMQB&=Ixncv&?yeGIYs{T>~ zbPIkdn^As=mYUbbo5G-}Y9~Nv79?I6=)Xc);*n<9Pnrsed2!tDv zOHfAJDRtQ1euDEh;Th8iGI3i&wW=q*6wAgw+8#&EKvT`LiOy|SnZ_w}ArAZ=+acu1QY@hENKN~4*HO%(V8k=!7L~k{!E!!&LR+^1KwRcP zHk3fUrpDh-`2%#F;x+}a^S>zDKwgv~aabT01>cU~c9Wjuab|nEsczp_xTmhZyLfSD zIC*?EFnG}n9|R|ee|P@^)+u-aQ)=Dre-*QDnCW7Mo?MDxryO-dcR-5wYrBtsO*j3Q z51?oHlf<|>pjS1PY7Wazc|@cJP$bV6n<7zW`*4NE)yM8R^Iy}88;o=cS2osFO+FoG4#)#R0>|EWLZIlWt(12k zyGi#Q3$HT!{!ZG_nYGndkF^FFJ)zPA4F4&Bbwrm5 zP&$Sg5Tp>AfSVu5#)%Cwpy>3hx^Xe~_wKJF~EqZRR+zhb4Us8Fv?Y^I~nI|KZxrDWm_7iv0MmiadV%QIP=6(f{Kj4aNVXB1`^9MYf6msL1)i|4T(itNmXp zQf2G^t0MC`41QFk<^Q81?^)}5Kr`OJ#2>&lraD^bf9DmxKm&_sMS#r4$N)T^yehSsCj89{Y#yk&E1t{6ey zf@rZnzph2NXFfkM<-JG4{3gJUoQ=-d26Bk3WdeueBwl&-F?5Z|Z*Ptg^W`oTRXssp zxstqo%fI(v3~w8kWvY4w^e)}-G}%X)TaTcz#S%^3#vZ>s>|=R4tud!5x&c&#CYiF8 z-^TeL(b{0YkU2D(=nED*OU7=v5eYDRkqOFLbCY#{f1ki3?8FiFWpI$nLMmT}W%Vat zh5EJ+Njen}!vd8`t-8+&M5&|7TLY=PuvNNlN%&oa>;asZv%r*HxzpUTfA_m#NBZlMnbEjY_AW=bQq}L|kP7Yc zpm=iMQ#2FE6hE>^xh`A)<<=q&N%1a>O24?d0AvHR+WA;{;M7z^gf-B3bhf4N!| z&5;%46mNhre1Ii&3b~H+tGC6C9pb6Dn$JPJVKwTEGg|Q>c-o2eHoiIX3hzPDD@GO{Hj7fu(TNsr8vna<$5u7JwNV{;h__P&EYMdg| zk9ZQm;~+8Ux92Qg_xHU((ZX1{87F}d@0SU8(P$AC@b)mPID}9^HK;LE!#ISD576Zu zuQ(thCsKlB2Ww4=yO_}*6+3H?(Q=>{hsTGps1DTNzxvU{7nFSeM?dbW|L8}Q$UtR^ zzU+2;X=9GsggI8Q?DuH6(10qbpe>o)M+a5^Y6Iw& z9NNd~--v@QopL@UMN?)d{&zcFwA9v&U9USWw)_CM|Lp_mk`q)=0B9Y-`ly>rW(E)` z-5J5#OipVmH5#jD(@w3ojnuErjhR)}fNAX4YLwK$^;d!nJf7GA)=ynBQ#kB8#1F-Xo? z%2f4j1WhbPgcP$>0E%$jXRl|Z#z&v3HJx}#@^mSmHBEWQ^wQTyh!a%SRzdhc=ugYF7 z67+9hMZPXguDYV9>{^wZMFmtTc2HJ()4LY5Ds-oo*(U#(1E>=3N;q0L__8p?b)r0U zQ(gU;z;=cYaQ~cWQ!oCiarI@>4h^7-50C5w?;;W79yz2?;L%T!qGup?zG)=)IVY0? zQgRTUCxzPUSukPGXs3r8$Cvz;2FzIKWc5?fSxexB2hjA7+S5n}|0vy~gbY`8KMvQo6@JEAZ+RB|z z-s%4+2J~onbKR;g*-^~yV76QGztaJU&a+R)4SDcm49~jfKnc@?>83JP7&}bw!`P$@ zG1Y!pgE${mUyN$5q*uUeuwcy0W2H2hrrC37qmL!-|A2iN7DIf7S**Jk+t+9@E%!EJ zww-cGfb)JQX-#0>?GE=0#Cw@u{q8^lBLB8#*sX(kn)usWUlQkdS@6j1f1!TGt$lf- zX#g_}b6V~QE$V|xxZ6HuLugT8J2!w6C*rLs^8NVI`Vr1`>XwI^XWGhA)3Z^Rurw&S z>-gu;s4BHLzrw)^(m6+1$REL1y>rlN0gz%TLVWVVke=wek{32#jj9ncubn~=GH)Hm zQ4s%)f#2zJep70WZgfZ0<`{(njzIZZ&%+Tl^XWO%n-r15*G(msrI$>tPXq`k12((~SR>Na?02V+#9Mq2kdY4Wm~3`&EDWg2Pt->+S0)AFyiD zdXwR`YlNUW5FG9OV%GKclPPDr`s&8w3U2(=uquPQHvAna zqy^|v-E#3Jx^qVR{+YE}?$W~BnHSx0a&SC^mv;dR%Xr?y;PR^>mT`aY8pc^pt&~S& zUZpNed42J{DI@}^C`xc)re}3e6~HUX=+q9LGcsS6d9S~_UpDXha#yy;a{*%UP<4Z> z6cUnhxLhIrXcILMgh0$dWrk#kehJl1t3SO+dfEmlM(LPcl>80_l+;)I{*s8s zxqU@??EKjZYfCwP+!c#7nI;aW{4QcB_B5YUdsoEqoWm}mw)mA`&O-DX#rmfT^eIE`W*q{dSXL{Ev=3CI|k1VL0JOZD>#Ab8OXW!kRbg?B3g8 zDn&+;6h&?sEo|lAhKXmYkN_lp6yeBrT}md4#$~U2oAdTE1(HN_KOjx zVGJZt3p66_`WvWTMzAT^@4;r4XgVp8)f=_6n$+umxzG+CuM=lQED8_dLA)d5k9X#< zorF+Csf<2nNR5b{5q7zh12!X?UR@NCSMM?gfiCc| ztnx4rtOE28pZa279wYi;+mXoSK7DqjUei_j(2Nu@e@>A|u{pT?PG*2XhfCw*t=y=` zNc-1n^mD3c^=?p;QRK)9w%De>qyJ!sJXwFv~E@F z*gR=nkpF%uDulua&(c58?vr-;jU3ZF8VA`IkP)(aER9LQNuxSM=AL;1R+Qw`ra1~@ z`h}(P6#x_4EPp9P_8;&aQ9_s8XyRC0FdN$P72rj*TvM~tAM%jKX^<`!_iA-Dgr%c> z%C`!;5&WGwy!O=n%(S*WHruayq_gw}yc{eT;@(=7@$F^XH|aBq&V|bNMG3NdM27;X z+#oGAtXxWKxsRFyH_rwpaTnKHBgeXnsxFD&cAUr;WWu^d`8N`wT}N*Ji(v-3-m!W} ztLE)GsTO;}x}>96L@*_WHh|8oewewu8*kbxJqd;UHbC<=k~K|DS#O6W!ES%&6eF)Q z3*_8xhKM#iY1IX@s1%#z`m@w~?wt@oqR<5rJ40q(@0&o#Bw*67 zQZ1|;zT9JZDF&!BE=&`oY_Yq{q$ zJHG$D33nqlw~j>UV0bZ7L}{3a>L6_BDxS25;OX))tgw>w(qOL~F-y4^yX$8_U>P6bU zH9qWb%Pj~dC;)%IeiS9WI^uU?T6p6Cc!{{P+L`%NX$_>903J)3=@v@3@>(#JvT7Q7 zWw|8ZI}Hpyr>GE$`5=JuwBKU*JOLaheS>Q5A^H9jzBN2Fg^=&C6L{Tcu!b<0LP0sAEM`WSBdn0+$Wr zVip)pHNektE%h|(W$}AqzMoyFoevAh%ymg|xQ*bMG{UO-`ttBf9d~K2~P$A@24F`UdS%jE?>=%(| zVAm9*Ns>^&vIc0(R4}f;w|NRjVDSW~uEK9$p+3NewbR@^BJ)$Di++G!8?~~u!~u(t z*;76;E;!W{D5=6b_UevWq)BB}k5EssSDlJ9Z07!f}nUqNuPe%O*>I zqI++o`|9P9GDrEODwostrjG!m|t7l`H^y3rc(zeJ?wSp{hc3r4?s?;`I@la;P^mUmU zfmIhcSAg49g-}#rk0tPzdW(HwKP@a@$Z~L!nQWWYxdmISQlx$Z695i?I<4R|jNtxh zw$g#s+~$kinHo0WRp9=V6wy+XbnjoJOwQDGfJ?g~_nw*~FUoUbsC zwZJqree|=*w|Y@&g;#AcS}cl_kSza(L;5)0byTXmM_YSz20#N{(YKA)$vE7~Gnqhpmx0PfBukCE$lB`)cX1{)Z?xZU2`j zr;pPB{f8(cH2&kdJ7)Bn`iI4UNMR`y;ntof7c%KNV0(U96z3Gv?i?R%Zb9~X<$hXp zM+`c_r5R#bcy}puR8HaTgw%1obC25TVr5R5Q+zGdSiEo8)3i(#i+?6Sw`@kI9MC1!2}~||8=F?`7lp@P~h@UEOv?~&=SkJG$`4m$&r2O8MX85n-hkE zMnHF!jXX*8vi`gv0h@9oG><+R4iHa4B5T|L449QScUPqvfk_j6Kw`B_9mnLSjzNZ# zWG-H#cme!`tR?!P6(Z%dJ~P}=>V(6cRuov3Ez@9BY6el_h6NwWldEX? z3Uk%%5U3!U;M5zMnKnfrXBC@z!FQBvP~1rI?1)+xwLMyP$R3qX25dWWx+4O2T<|^t zmTKD)&sdjM5r-h~_ zOy0db?hwern5;P+-{n4MG))j%P1NdI`Ariuxnc|czj-q9|KQ|B9{`o(Z$rV@+@qhq zjJ6(pIn# z1f_mG(&^)UD!Tg7HtB<^)UKnGnR1GDJ{4uJqfrQT3EMdwRGGas{t^ZFO)^0DX# zBpj(pl&8wF?Q$1^+I|?0()cm7Q^{Ug@NFTRksg#>cWrMh<1(GPfl2i1HaUoJH2d{=~XxZ4I*k6V5FU~yK^*%=d z8h<<<j=QSMfj*} zi_0!-pX~M26);|nfkmYRSI~-m-;#f?X<|nAx)|>TP%fX6%pAI35V+JMeHj>Ksu^PM zKC`Tgl6OF<4X+3HI{chKpA3uq^jt)DR1+617QBI_>q}&c>w~&I94NcI!e@8N(DiD1 zoUFl8mXu|QP#&uCjWP1atqE7M zqqN#u44`{Iu}^=0xOCYxG4z@BRrgl`D5yp~9-8q&{l@9{(etG6#omL)JhB~761}%8 z=(rtW!#|*|AJ+)xRuF&Sn~N^IDP&I@O$vOc#}mqdu0+4*o$PI7Bx57v^r!!hL144f zeT%s92(~Iz#VU!QtiGqQ$FwOpxK?JOBb0Au!ZE&%fG50-?n2fY6UQ=QX0ht`Z+H-g zCjbS-({KT2>Z%EIUNb?ox97QgG#_Xb!62(#ChOwCaQWbYy>#it+CpM+bIzMFkKD&# z^hh?!O#EV6+?wjiTM&^p!*-G6$SREFZU~VI!3etsnjfERaoJT-18HWPA4)W?*~b4L zkNZOVCB#Z`lkJUW0Cu^hKAjGOtFOoedjyDBzCpGc}g*pZDYC)^s|E;Kx1td+p}`@7L~0Fw@|$FeH=Hzt`^a$PNH> z?$-O)xf|z+*mC@Zq-hdWclqX%Eq?^b>h+yi7*y+lW2j5$+eup3%x%i@x{!+~W~oD<}5@IZ00(@xh_IaPHY3HuO$;8#G3_uXaBj~(0|YiWbp z7^2&iMqW2VKx0w;dglGt8en+i1!DLo9_kQ@BdLg1HHx?*qt9~!r${KJ4U7-jrMm>S zyYS10YjNG6qi{8Qvmtx6WK*wF|BcC z+M!8~L6b`uRmbv4U_tv844Mt#@A;#>JmLPrFkIGzv&~w{AUxyO8{gx=sbd*He&&RI zDV#rNyBy-|FeJOEb3DrQnb)lYrD_-^#*+D0n|_RC)Dm+HXg(sm4h-CR?gkLn%j?*% zbuUX7;Bh*8Y8nDDrvnJh1`%`gG&%l6xIFgsLqjAct^q@(UYvYGBxIrUec!j=dF#wX zEzY!gJRWd9Jndwhu0GXgX#LjdljJUZ5Jx0@8EK2Z8vFFBy^*{20qIZU?8N79Bb_&@ zDfT@BD)q`GE_g`!odyp+$oZX|2jiwX0xF*qq}hqS3&@cHuOf2UISW)H{mC_2RH3me z$LM<3)9NAL->0g4qJN?F5-4HvOh(AiKiLBeVY*x~S=cq90>qi$KF-1!AXFldZaP>X zBt|XJr7Ui05LAzC&2aZ>k^`(D-Q{Xx+)2{l`K7PWu-O8CBf<0<7=S*ikdZ{`tOdTo z^F|m-04k=r0sKTpBS0tmC*nQ6+Y21kL_atwdJl=1NxEq> z2wnEb3z<}h$t+o+t$OIZ{E#@@mCKvx3T@Nkzyw4p)ZU8@_*n_8^_g99W}LntR+PJdJN? z(?BUrx=b<`grOSx(a>QWRhY1YT1hqnNKU!?`B0|4Vy_=gQ=kF+#zjRw`~m}XH0t-Z z&nD!LGzQye6oC=0>R%Y=Yvst(IA3LQ`HJ&>w)5VHszn3pj_t*tZY(ObA_ubOCyF2K8?@#K>J+QTmo0j&A>s7rH;kO|(Q=Pk$ugGH3JF*rShlSfSDexUx(^HB z;b3g%h~{GsCUa}VjTY`dHA$a>Q2l=6P@(7KKZSnO8MyGV_%&DBG1Xk(S>NVFPs}uH zaA3X3V0Qq$xl`cy;j_T4>ohwe%YE$fuig-U>6W{HGlaspFd%4bPfEEUAy1}j=?-o& zBMhdL_cb&m8UzuT`gc(Ny8AVf(8}5C<2LYO8q-3% z7(@>0ZQg}^8s+SvxL_Ly!))XjBdY!+1dbIKJ#)Ii+EhUCwf(F^#NZdYEv5#@roX71 z7Pqel11|+2x!^66U*DDCI7Rs;_TmIreeL;Vj5=WlD{l?kUO;z?S5u3yY5PET+~a!`D|CYps!$@h93rikI7 zyMP_RAK|}`t>~atz(w213ldIzm^P&R{KP;FCvhIlq~KUyx&6M6%lY(DRluM(V{W9F z1@@Oi#y<$bbj04gKOi9ppHHFYx4!5Y_K-6$uq%_S@?y-c!&4`7A*Gr6)R78)5r6OB zn$b34RW#atGPhhjRcBL~n32);mE?1MGjom5C+9@@%*xam66S}=*JBRt`AGyNI0ZAD zU@EyBA#C$7p?E1rp_ll2Fq)Iu#@K8%mj)6_0Bx@&3hf_;Frb%Lda*Z=@Lmxb_?^@a zw*%(F{O;Ejq$HBA4Ra*j5E8LWwks_XSON+d+$HQTbR~JHGNuIg!UE=#Q8;PVa1CN$ z73_H(fkBTie;D+V|p9IiEo}P?(s7N+H+*emVU`|6>{kp?>!BUh9n~V z=mkOx{~6^@q=cGC!ZZ~y?O?}hKsEhB|5|X4>BmD^-NE-EPeURE_bfTVfI+|xeDY>L-`G~uUueS`-1$p2DZ@hQJ~jJt*(-^D@b%$g z!x^bJ%+Gj}DAgi4%g?b?|o$7x9Lx@w~mWyA&oa87fD0IAK( zWh->eJ*m!?($!rKocb>0&><(h1+JO%&T(${$X)?LS&~l%Rzx&NpI6s<)8`d|jcm;y zcknlx;$@#=Dg)w;p(l9e^vfi@Z^bmjTdT^O^Se0`aBr{9!0=0|oO;l?yc6Rv$U!9G z@&`Nil)ZNyZD*Hpz1vG%^8?Vcws)ENmnc+_&t?DI}QXDV*^?%C65u&*IYiFwU?c2UPWrgNOBP z*I0{9=r-_%GhqcAihDy}T-WDAry3r`aB5v;F{v$OpW<)5@0d2|03~lFF4JK;zeV!e zx$TaM+h1*U8tyqS$&go|Y^)dimr%BkJieDUB%5^_`H(?MNRQ;qgxq|pY>H7grBuI6 z9Y(Jd%rNysx(uIhPHu>2*FLsC*;#r&IX#|ierH4tlz7|{KLSyV7}=Q8=)wS~Vi5-g z9*e8oh&!))XqYr;;I%(=Se{0V1WwWrRXs1mH!k`Jks;7Xlc&QKOXUq>IQ<)9>@&t% zxto_j(ePS@Fe91j5oe>eS|frW1|bMyg!zHYc?gg>4+S#kL3s0ET)CRjF)`9Do;*9O z;_O}8C(4bkv~>`}FsbOfp9C?CDnSu;_-LSw=SMEQp@qj&)AN;tP^)?=ONmj3m^$l9 zY4d91AFz6I=EbWK6QtUP`h1K6T_lS}|#VJoTdZ?O{+=ex9cS3W1)SNiZvFDmu9=XDaTnr`Q>G*9^^&D&#U7)J<=lpJZVjfj`(VD}_LYLDNDB9ohbAn1cT+|2T z%xCnfg@c^=qn&>_^9D3;&OCHaIPza-e(6BwvjLgZ4SyHJX~3H_FZ(9V7e7abIt}h~ zZp_jufuwoao4?Y0&C{DSp9zxY(SMGSNj??u&{ zVjjDSQoy2xVBw3iM=Yk8XlodgqQ2E8Lx;G7VH>6RjG@1dAy3H`=fk9q(-3a~#%eV@ z8$&DoWYYfEkl5slhER4^g?3cF0h0k$GIxF&{WP`q-|kIMBlWZwea)y9g6;R)Be26i zhO3beyF+bBm!K6ni6v`A)tr2N_p+ebjmgbhLV&zuE3Jp3CfXuV6Z?B3SEPe2cY<|3 zUEa*kv|3m452!PEW&@7@cIK~A|2p#lAZLCIsa>EZ0sGCF7X>-s^yr&6_Alc-ce^SO|`z2%_7^B_t zdW$nY@5A8{N+Tk=dF_0T>#3*Of^Zvf3w*VY=H?Izpa zP8pr$by6q9$P?bN%^Un!PrU?xI3j&+d))J$D?)B7Y-P?~>J%K8JeeZ8*qV?TXTcSZ zu?5Df`=ezs3po%XNYoB-$C$KJypNm%yvjfC33c|=Fo#7#4$H?6&(Y_gKRYI|l=yWL z-5^Nz%4t--qd_GNWd+`?m=Os_=MJ}x2kVXhLcd>?bxF?~w4*8QmxCO@U><%TI)_p9 zR>-K1zCXp}Ze#bEg02ZC3&GpREGTR$rk{xW`i&c!41v98pXjrmL*h>F+cle%u9|QHaODtbAoSOVlo|81Ipic5R2Pl?^Bo~CC42C zuBx04tI@7rn&%!)n5b|+yhohOA^si*Rl=hMV7T<9zYNC_ zBd`9G6WK$eUU=$EM9W`{%(w<|maBSUBc~jPWz}xVKxw&{_c}^ONEf{Vm~X31SPhsI ziv!PvNx1Aa-{>B%jD-Pa^eH}JO7N~!VUU*zrA2ZTXop*uJMSSksv3!~VK3*mh93{7qh{jJ?al{(maV#w97&;@FDoh4WKPP3PB ztLa`QVAfO`#nw`TdH3ZSj`aaPST*#CoCn_uvul!>Idq)7u&4-h(3Z=|GMW0bCr(cV zjn9|YBF?A%QgIt~l|Fxn|IGH3)YB_w_MRnnZ1cLC!^EU*TE&mo9=7NJrrMGAI6nOv zcnxl4QsyDJgYipRb3K^HpIZD0Ye6E4zd#y(bpD&+X1&^&K4*`lbC3l{RY@Kcyu!+Q z^W}v=zC7k%U*7IL$d_L!nQbG{VoB*n@ewDAZ`?KY zY;N8D!k;#>XAJz|iLt}(L3y%03QRu@#OPDR_^?cc!-W>qWzp)EKvwcBjSkhuR#m_T zfYOu37K#D(RS!=*!^3l+KZ@jm-#88PbM%OvY6Hsnh{FC1&(buOJDp3~>O&%gj0ly>5%}tK5_HXc-|EA> z3U5;M0(-pxE<0{1Gt^=xBKwc$d4lkKpJ58iWGWxl9G3$gmLDQ9E&HMXNV2D~FfBrs zd6ey43qSwQy&qE|5`r=Y*Tfpv&8QDGpWOw_>d#LM)4^{@Xx(C|^M(jd2-Nn5{!w-Z z6!YF(3g$;JF09GlIvm%kr1_nXS)^M@((?zni8gDK0u_V|-3&v)XGSUNz;B1s$4RaX z&AKTJ!+#bCbprHtk}L}5s8qL@AGnyKhvDcA3IoWc3SRmHv?3MI*U^!x1&~+fU0U&w zy|}KW@YieY?YBE)i6SD^80ARNxyzAde|ij1P)Q%<_#T;W*lC^3scYFekxMQyGp%|1 zJict&PJo&{f~^3?O54-@a|(8h#NcUYXVxa7%k-X(DiC(yBpPm)q&r0xO-lLn0x!WC zG;`9)3Vy#{Ac$n!)q_#`LknUkY`8qxqrksAr1hK{#coHz0Oh+;;ww?`G+xjrO`MRJ zZyMIgp)t38__d^0OwS!!%ZiJaxh`(z8yF1eP9Mf-Xl>qtRuDF}P9qJc7323-KJffj z7)aj1B@AXX7#)L3 zXwcE4^3+$Oi_~VA;&s-;}z@qawTPr7jz^Dp~7n-mo2dM%H&Gy z4KU|V>{Xm(U;Uz`I@2F@bwsa^v;DkZcSKSPlf?m7_JO+gA#vjly4kGAuzjrx6BwSvt6W_BCEOxzo6NQ+rCq~GI@-Vz z7kJcCs`?wFx8KX>Ugzpz`*^7BB}YZRf9Qf3Iey##9`u0$D(@DOimKXPLn}6Wb&fhU zGtJ+oCo*X|Fw(Z~;N;jxCAQWvmUwYWo_VY1?)(JiM9F}nxtygn!O!J3uZ&$9Fh&N< zFfc~z_xO+^VODH#y6tSi0(oCKq$7SUMwmhMefI?b)C zs}E}1+%FF{zt06sd0ZaxMbGv;hkE02HE_`IF)`#E!0XLnL3nN#t@DW2T&!vve9MSl z*O{P^^;iVtcv!f9!48>Mros&EKh~ty7nX%CFMABo9xcxZN^;%igRNW^AHb-2njcfe zW6+-c%}JGv5!>2i&wZ3U1%)RsFHT!A*qJRlQDDnB&Gv<5QMEnBC)X0<1v|nrUqWRZ zaQsyDC8&q=w?Lrl0N1#wF~HyRSa;75x3(w>D3jcXVI%;@R*LXcRy?NS+}spj4*3*| z^z%YE3Mq8UoHkmZ^l2k{Qz79!)>v`6eLdj*ErH-M@tve|=v+Y7XrU4oO@IU;`$48M zY&ZhG+EAcl(X>hBh%#o8Uwx4v9c-_)j7n8rKF90%$)#QDaz58JKX3FO<6ozj;!Z3Y zK-K}&@llQq$z3@a*A-dEKC$-`n%>KG$tj)KBYdK!D9U2ziATQ695(bqw!pXdScl zSTUsed7~GWW7{56t|S;v)OPeMq3vrc(d+}pGU6?JopyLz=L`N(#p(RZK|!^o_f1FI zh8D`I)@puDpCXmDbC5GEf1$H#-0?|K)!5mhLJ5YUskL`!#Xi#cNkU^kMZnDrLpV>U7uO8}j*W9`E<=4dD!whgzMC8~5Ku zk8<35S&GjEMT*ae69q5n57V!o8@gXkmNpLQqVu8k%D$0Q78LKo#=Tgnd6a~3?cL`m z&nHLLOq~jx#6(^Wh>BJdYW6iK_uuB$B$5!k=nC89MRjy)QGvGRhbMn|BR;vs(I$lG5G_`gGnruDvli#s_8D?A2H5A640|I2$#G z#u0h1GXAT0CKtPtU}QNEwm3{!3-BqLJR;7-G29;&bW=HwCj_*;zuIP-Oz2zF_%;EW z17r_PrmAJ%OlNXX?2gv_x+G`$R*aa_kn!;a-MOS?188v^cM8aF7LuPMV&y7q_ny`l zr8@#8wyzsQjs(IJ|9q{_Z6x=MVmMkz^`YF_}i$cFr%WYN}Y4}4!vJG`n9naa5Pg~rw7%s3m_;NLNVwvSkQ7I zLIueP>#EmIXr2IwAGb;bL&m$%R93W-aA^-;i6eYj)g}Hes?hDXe3OyBD8+ylRVy9W zqJO9iuu>fs96ydwmb(Zqyu<$0<8+nyop3hmM1F1igGJ?j2%_=Vs&57`CWwN3QCGMo z;QBeazR<}Om|I9f4P_JUXYeb!#YjLbSd+UCtp2_2C?IgHLT|7!kI{ZXHiB(T#BhnV z=?eq)Wx(UJ!%zCzOCFuAab+{gE@%JD+guX((w}>NPe}r$#MwN0%W=1HiabvZ)5O?^ zu$uDHwe)7uWo|Ly)rBPHHwEWkqznY#EYnQvJ zGt9Q6T%snKPiuT_DyK!io_qqwPKkLvrv=EFjXKf&_bImSLq|=Og-tO{^dOqT6C9}3 zrC;z?l+lRA12pvcuXI=yNOcPO6KCOgGxfm zpp;-Pz3gE@lxU0VharKmjaHQZI$|!u7upXmgfkeX^^`aoSiWK46C^_IJBXwHAwaXu zpNFCL)#az(gS2o&{W+9SzpgjtZci)9+P&`XaLQNOOz6I0Y8q@*o00q%TIG+?DyjC} zXsBJbR1YTZ%(o4C?no%h!zK|4sk8F+?p6DlQQxt$eqj50MaslrO|F#*T}n^{^_f55 zzc04jn*IPoC(G=Dx2IS!s*Epj&?ZHg&K_Da^E;B`FS_goGxr+vHQFNouc@^El+p^Z z{r@keG!)iXMGq3U9flxwStfgxEHl0?pk8E?cel6eNO`R+sMzsrRQSuM_~IvPt8>|T z?h4j&nOqte04w$^{64jGFnHK7t}+AqcMQT78~ytO zxU@OVMO@wt8k_`QMWv2eFv&TCQ)b^ddfKr+_~C~gpAgJE_8iP)8pFW=(r%I{fZYJo z@KHE-6I*!&dkZlQ)xXH-d<_i9OMqgci; z_6?K2D#fA3ifryb+;tzVLJ>Y2m zHb?AgfvPV9WV|5)j3Tm7prJbx?LIJri0|e0`vxN4P=2g~3{OT1*SJRtjdJ5KO=g=y zY=(OFEz;??%^2P`u7@Sgknl8-G+MhU{+W-Jij{Ts`i6kkV}&80^_Wxk|6Gr$XmdD_ zeCeq+AO+GEy1TWD&lufwS={Ve4X_co2+A#Xxv2Q{HJEtAA>>^f#4`<(cYI%FrTwea z*Y=$kLpQPXf5mJ5Dr07rrmv4@&>box$+$o04Ir)h$ssJ#=!0$Pjq#)Q4>Ej5nT(Yq zr4@c^T=Ac1{!vH&$%EEP=AafO^&H{tkBwmFk4}KfpJfrht|f?r9%E7MYMv?O!ubCN ze+icb^wbq=q%6MVTQQ9*EQy@ID{Zi5NS(@GT749ZU**Hg{Z;&)L+4{hH2Pk-^1$-b zt|uaPk7#{=*50|~yQeYOq&aK!>B{qi;iUm2=FJJ(S_t<~+nWPk0Z2-6B}h%c^B2uW zLOOV8hn`J1A||`6lr2I-HI4B!NBFsi8CTP9GfH{*dQ&WovPdSLZeXnon%*KZiKFv_ z`$>15H<%y*{MGRd0FQp<^M9hd)e3E&aPQ9O8$u}TUP-BpSN{@Y6#GY0Y*}38G22zI zSKbdnNH8IH07AjP=PB~|1Q)>4fp!UiaHR^zs8wrYMBm0&^$eh`yg@*CAzuniiB6&J zXw1;0k*~Pdh{fSAMW5F&*%QPD!>XWUTXy4dU1O>E`riq#3C3RlT$Gz;wmg>i7XX+3 z0s&y%M8Cl=G&opXRsROSpso)H0DlCOK&vtt&wmqOlP!`>zCbggSK{Oj6y48$&0=FZ z0lnl>k5r?Mp-=ThH*t?m=qJ{~cAD>(;L{wAOvrU=A4c={{9UF(AiiFPtX&Y{zEY+V z6{8+V#JNwXG}qrWUEG6&1XmXMZyP3@yLxLbII27;Jx->kFkqw2e!!2plROkmk%ps) zjw`War4fGRrJCfe^Z}l!k!)ozN`&v%OU+AwCh+=n(Oq_ZK6Tt2LENNEgMUntw?w)A zhJuGMN26=W`;CJRQnTmx6p7)VE)a(auzyn0u_ly7W|Qa#scBVG~;{rJ=1~ zB6!l-tqL#I`B?pKJL9gfkQr}2F8-p0CdCojcMb~gj^ry}uGT+5n8pIjzAv%?e?cp# ze4(H%8pk_w_~b3576b~ZQLBH|CMqGLi3N4kU_C({H8WH_%?Zkaup9PUFHlDfh(ZU2 z)J8xdwL(}>NKI2j7-<6icmn-DAvK12P)O|-6jHlTA%=~cE$0wJCvaMo%*-{Q@=I>& zmqjOxB@$k6HWVz_IXtl@=D5?EtN*;e=tDP9ZH#-0CdyDDmgr^5 z=>GIwZZ)T2CUJX*FJLF@;+L&?DW~bAa9vS$dx5%EWA?y#s@t?)OcPDE388cAts7K zkP3yDoCfUT=f8bN)VG6(Q|FDyHeWnT*BGn4b-D#8=-6D6%PI+58ff4WcJm)YzkoyqCK21Vy%Z#sg$MhT!{=VrgM{u0``e4>-jVT}-j>_^r0?zqXmz8wAF zRSJuJh@U_-14`BCW1LzN{Gb7#N$b*NVJnk*w|z00us$UI)Q?G_8dy+#VnQZu{-S`z z)5{8ks~{ivs`I}t>m7whx5FZ{?ZAMfH_r_`GWmWF_hYxqZ@!{zhdRj2HbqT$JH4q=JJ^B|P zmS3=qekZ0Iy?<5s#)qkE{=9G|LqIOvBvR7{kUD2chM zmT*S)yN(8>vkB7)fOY&+m~hJ$((Ah{EW7IV406ni=nRStD)-+R+>Rsvfa07#_PMG| z8cI@Fi2m@RY}7uHa|tW*l&Z5eT(e2-Wz_XShD6LiwN~>Xt59I6&%T*a? z)ztr!RyzTu)dWg=85o}kR^QTUK^4BAb&h88ED_ntn19UkHb*_~DAweD&qvQ?4@!O- zr)BvH%lG>$kM7bQHXj+om3W^XhZ(f)1-gL*fN<33chsk6h_=%|F~qR?-Ck z)w=7nSxh5$gpH&AMoMk~^S4~B()*2E7u2ms#3~CuN)6vHPK(ts6}0cvQb-Qhh98Tu zKIJNgWGm~+hGltBrdx5qWNXw}ecdO8b`IoIXDPI9Os)&xAo4F7E#qh8g%?Td$tn7Y zDD5{s+5XH*lKSu*QojY;O#RLJ7fPE6@D~){G{1r3eIRxV_Gv#joCeK;d#A$$?_p<;HWM+WOXH8bxnp{3@g72JjJj;;?t`Q8r5dY1(qg%XriF00hrjoxVD$~ zJfczZ5vhJ{Ojb9}_uKL#YeRqcFHk)5U!ZvWFDU*#`41>&l>8ScF7x>Z6#Id9W9$2v z@Z5tyahcg0C|=-y51PWAzD?oUm9y3eD@c~0BF9u(WJV?McIY(`^W=b-Z&9E8;|X#j z1ja}ZJDuEawb$hOVBz8V{IB-D*HLy_D9o-+tyUF$tdademKmyOlLVIA+mZ6*7s?lL z*T`_>^~Z{lXplTC)ly_NbV0|5qQWD4N%Xi^ntq%Pdgm$sQ?L(EaHO3WMKNEQS27Xv_noKe7^dt2N%y?^ za4|QCj|=~P=Ly>h^v)CX${ocm%70if|H>OHHUqKZtr*C(3C_d_YF2L2bWNVPq}A># z8@7Uq`gBvy9JV6!H2dPlbEQvSb>QR+;^2jHR7#$LvE_+hJAqo|gb1v(nd#nz?9GzO zhk5_?d_3?tQS5LZ;8r+y_r>Q&f$~;Q$F~0kZ`D=uMnp3wmB?mk?)z1%l}w?zM33j{ z2(AMPBH<-2rF50khO3I#7|SE+ltI!yF1Wbowiy1o#95b7Wk@`9K~(q*CW$XbeXqdH zYHfo(<-p@JE#SW^ehZdoaV!h&cq%$nJHk`F6}88!x8f)S+9an=a$>3pp1|HVuSuQ6 zN=30shsc7#7SM+vh*QO!Kki~D^)XxD#feyP9_zA^EK!CK1ce`7&Li_tvm)o(ioNWg z!1y9Cv-0o^{e*bnJV%X^TeZzX$vCNhufKsDqz;f}15;o0eot+mr89LQgE@%220rcc zq_A2h9UVU6=7k9wjMEAg`qvA7jP7z7?rCgVk+qX-3mu|Nb5=3`apxs6A{a!AYNNR<9;71<>-v&>*(JuJLqzyrv zfrKrg9xzxq#b!B%g!_(RZhw!k?$ad^5&NfRMklvt`2!cMsM^JAKap4u$$h88=;f%x zPYo-v*wvG|!C@(zCdV}sv5PC;rVrb=wpOnl+?cL;JZ>k`2gzb;GzKNj%{h z6E0x7^++>m7u0lmID3Budo5f+H{&Usgra8W2kju@=hf6h@TjK0Oxh?0>+YePQ6E~hv|gMLHA3g z@+oiw&tEgrsbqz+|MhrQ#KtL8-9x@=6?{NB0pB`LL7Oa!k3=w8K$}sd$E8HzXeKTT zY5RbE$+@b|_fOc0dIEj25;pB8{MO%SZH3cuCe7f#ZEG{zGK=xO?I1$)kiu;SvV}dw z)N`AV8gOh7%JS4pl!@6po|LR^ll8O%T?&A?lg`@%WUW|pX`FHN9uqx_C=<_0+~_vB zu<>I?u_X5O+a`fRIdgDQ8FlSziJGcNZS;K^bn%Ny9~g6; zm8kqDLnF|-vsV2hF3n7H!Y+Q&%13i~oby4mvpQ0Q+KVU*H2bJMzlDk$HWa!D-=hGX zT+|SFGK57dW=Ooz!iM60*v%x)(8=`6(9kw1thm#BWm(j&I$r?p+~cenGmajr z3(-PrYUc-)>v*V*k|hjSND(??QV|Z3oIGdnkBb&v)&c5ju(D}H({jz$ce7E=;SvQ4 zg+nqpi(cQyXPJac2yYfwObWP2K4AcihYd{*jt0wwmEg!#`oy$K90HN)Dy=geuWmG>vGC$klaYFBLnW@Qi;L+35)56=3d>XD*8$Mat8zpM!`V|2x`vIH2X zlGvKK&s&L#S8Q4+HiMhq&fd! ztA_)boDnZXC^fjY>htrIczQjM_vt#Nuync(YY4@^;eKW=AlOFdtnJkruECJn<@>cE zK{k4zK#`yoQaJ9E`8%=(-Y<(C1q&EXemA@h{+@GLie0NlB%;$1L`ir|b}^6p1wy9+ zeZ8Ok)^DgeO^J>CWI9TqAQsZf`GqKbWx(QuU}QRQUUQs;JaLO5n^tfE-b`W*Sj^D< z&_>S~&g}fHkVBsW!GRk{a=eS@pGhBA6_OSRSp_qGjIoUdT*I;w;YIuam4%Ig_IJJR zW)>{mAE{|}o$afZTXqpIi82J`-7CW7NXOW2H}_Bb-HiEN7T)c$*b`BoMxGZ4qXpfm ze?&U(Hc2Lb_)XEmb9t5zQ1LvF5sWscQ|7hyO;RR)eD>}d-HBMf+|56TdXjpH*+R{Q z8uYsu0H*-nvx~W$i{7I`2V;YcNaQRd2*EOLg+EUhH6RDU;5^b5%1fV6G=zp zGI8%SpshPSo4$MfNI+wob5}154|bZ^pLqJp)B2nA330P^=_I5TK!LJ{@51|SP=O+^ zP}C3?#IIZHF=7}XQE_l}`|b|zi56_FJ4k93waZO0PKe^9Ij>j1n`xnnJ&9z^g^u)w zkPS!ZH8G`fxcfuH@;1S%dB@XG0lV1Ni7JKXeLoR5Za`xN?}>n1u$T`Q(tVkF)XsT3 zl#g?5Zl{}J8q5n8VAsm5LK-GZXVSaEOdWzg;d}v@t-^HNUzD_H;gv#7Tf2y{Oz}6O zowpNrkYx*%(SUvxALNwzcYcEJ-jClfV}!&GsiFGxp%3N`mAPk%6iv$#DbZ%Bb_t`2 zVnOeg3l%wm7dg!)e^>wDq$B!40Y9K+_`5MfeCZKFlG%?`psb`)Po|KLv8vdg7C~1v zYDgT*T^+rVr+V)KBCdxrR&1C9>qET+icx71^<|UvoehzxR*MZ~N6VO8Z(&9a|7i94 zI5-O7Iz1x9N5pkvW>WNrk#VhfpD1;8V?n!0-QqtUIU%r@^hr|Tx})lMgtF9Jj9fax z(2_-_bfJP>fbSA#Xt{`zovNN)Du;%>#i;~wA6`AmkOK@bR~2}6i`xYDl;t&cbTkD^ zL-JI@+v=OjM7#_6o2u*MyC-km;6UJxLex0(RFdrQp zH1Nv_^2Z*yqqTcH6$!$~O?TMk)e!%zMt|@+VKQqw04{v&^zw*nbIaLmSBxmug14(B z^?=8e#?-(t?>B6<7)oF3E!L5~(x6wXZ4yBB9=*oi2<7abc) zEU@|{S!%HK4gDSNcYYY}`dy6qh|r=q&-BRF@!rAvW`G25&|jc15EA|a^P(}T3c?q4 z6(?dEkUTJo5ajFVlSf%W&@KNmk=})ivdVVq{goW5jfvgvWV2y(;K!FFNEFqtr+Q z&}VzA>aWEUx8>D#hS^?HxQ!ePHE-3~I1FabReK;WSnhz|*Pb~abl-JHS5Rv>#@Zki zY%8T^bC+duBljn&A!Y3{Q+_%KQn29ES>fg@fGwhHy{6N$`8k>fKk5CH(gZSq{xngY zfG^ilS*T-q6&vs<2;FcPEziOZC3lY$~A5@wVK`Jh_ZZa?Y}>umatjowO}-sc#Q zQ~ig}Hw#syc@KRw_ox(N8-|1A>X1WRkRCZbos9brF z)(O7V`&?VIzE1lA?1Af~4au#%{X}p=R~xuD>)O)Mq<8(uZnHXuo3UZw@8WE7w7x5e z;VniUcIQJ#VQnOR3q7pE#2Cun2>&-3d}w8%cJIha%q9gYWyv7z&wW zLtp}*J=ddMI0-?sSoErZv~^0E zAKC{u;+cVP%LjfO!IsYU;7Dd7xOhU$W)Ko)zF@k-xqhDD{DIM3z*lrM#Djv8NI|F= z@ws4`a6=>E1=t3-P_Rsh)SFB=>t8fX_-7b;2RgDRBFxZjPTFLFUG+%IpB5cjNm0z} zkafbSo+kv++T>?MahAa>y{Wsp(}B8?TnXlpvG;uRt(>|%0} zgV%;Q{+9l1~Am#q9)Pp62N*F+lgqDXpdHxJ511x}J;-bRUR+HA^x-ADo zacZl6P+p*5)D9)96LU9^itnsku$J*QD_;?`w)LIbs6<47p}@e6rV!tsG$YDd5>Q^Z zO{YHNVnZ;do48~pr7~L*FIl5H9?b@iXoOg?_$3dM^}NeQcokhYLDA|0o9zYb zgQr>#^~Gz#W=^M7-4t#D>w#!6Y3!^D7T~)exh{CvNsw8E-oq}mjC;8NmT{ZC_nTik zMMu^eBfN#2-%Eg9ex`vP5|JKhY|C#C@sP)!a{@0k1b$F+cthI8B)815o2_yb%eSw- zce&Q*urDJJptX=!nS@Z zFPc#u2ku@kmS+;Hm_Hk{wWUlzGTFvI&hP?bzqaSLiESUhSKO;M2Sq$G>#Q;3^La13 zaBP0h6ezK*DN~uFG-KYuV6r%C9AfpWht+@;I0smWBQ{~zDw+BXY<{`7-hXN3v>bCc z4}zRxc$|??nXYTZtWQa^pk%3J1T+2p{E5rb7--dJ437kd4$^uAB)j1)XywPvMgb3jq(og@NqnLop4Fo?B6G9=``iCR)H_D^{XNmbNgAs`8yk&n zGjcxN28;xz-wv)zaY~vf-Pk#UB-gV!cw`Z+c>&)yqv-fO@*a~l3&G*L*f78n? zx+h`Xw0~X9h#6z$JexP7**|tA!5XH$alkaWO8^!QYS12e$5oPJ(39^Y!&TnoYewH> z1t{&!?umi1-}f%S?k@AG=U>kDd5rbjJ`B%?w9Fm0dKe)a&B@{qhTW^=6}=yG-6<7b zYOcWwCH^?xw9$xspT3nQd@1wT#iO`Pmc^EKFP1iIJn#Ou;DS7tD`8Jd3mGtfbd7(G zWdUO*w^YT1qS_qAa=@i{e3Y)^_vBgy6C2?zODMkL0u8OGsW2WrGhsjO_0*X4M(>K} zLq0!C{?{RBMxOcBm_qNfs}KQzg>|}`elyol%h6?4L8VKBfuHw6f{+biThFqJ=Rl?X zZDC1Vwf#+@-S8qcnJK&6g2V=ew+e;@1Sqg(P%as1;=W-;X;`=#?esqiPSKT!nf5KD zJe|sNX04N*5w;{%Eel^JQP>dnKFRLT`&y62X53rKP3eWa-^#M3{LvO0imS%;AIwpSl|3Qx3y|K5v%sRaRP)uiE#hrotO>>zgWBs~hei|Eh1O7s z=5;M5(3nXO$mLwg;q_P@p^7QK2vZiAUoOR>fi_8^b`P`1qFU&i5i53q@Q zv*sT*Ys3RFcPfU{AwG#kXt^#4H5V!*^By=(?>iNXtpkD!rl2&``y6F^p#QYbdH{P! z)z;UMwBkXBVp1$noacZb=$B^pwU|;)Q__jlzsm9+giW6I>=RWt(maEXJaoLI?>PUC zl|wH6$zrN$J z_O@#01-yL@@Z>Rna31;Q0c@#4tk(H(P6PuPw9K;~Gi!@mL9ZAv0;BFb<`l*HXdbzk z7+Dc{Ogr5v4~W$-{MgH+CkGF%Ml`d%Tq}kJNQSN_X!#f}U9yxqnQi4W_jN(yebkvu z+dFUWj`N$RgiuKri8G;V%{EbDdtF)S{ZE&#J=N-4tZDH)qQAjp0c{|~CavGOou%uf zX-pWwrTgOzDeG%~YMPD}Df2&!6_b6{C@uG)uYEVR8jU7oM{J17tFo@9R7ah~5eTdh zehTa`Qsyii+t$rjEASmllo-Pkg5d2gnXzF}ENhzRmUf$zn-)X)`f~FXnVzQ4zjm?k zXS8h}+Ll!lkk%B#0khpvuE)5aK~@vr1k=GEn$jDEfTM!ItLA?BH-~BMuY(Z}yA~yZ zZ<_2vvhQU6XBrOPTk36PoROn-94YJB==P(aPv&cC}2rN6v1Rw9U; zIm;2!=ofcYiF#M-0N;alcNMj}&j-~uJ=7v>rn+Ki6G92HV4l804$_91|3`Z<(kd1l^e}tc)eodF zo*iJZT5Ud&iO#5@v9XTz9Kr4;(WEx8{mq>XBB3s?NeA%LBs*cwMyss~B^F+zK|Vcgn6piqOs)AsZT>**a7KZQi* zBh;GsnNxOg_OfI7zCk59fsad+T& z$ChwjwYhEPTFrIP_M%aOkY$?am^~+eOL{X}sUc?rY_UyCSPa!`=TA9ok+a+!7sXR7 zID)v1pcYf_Dze*#l6JFTfBzMJPUn%%m{vXZvZwK(9#$>ACnwo0tWYN zRyEfo`X&5HYb*2LQVGM!?C)>iuNptT0bz~F?VgVUr6XYDS6b|cOJR z-9?-c0KF&P=+Lq>f@_!izZJdh<(hHt+=i5o#TIBf&%!E>%Z1@?ErhKJTq~c4rCicl z7j<{jtF`v z2OVuXX%DPLmRVflJMN(nglGOcyX=sjp-jmqgvoAsBbpL&-h@goJWG2EKK8eEFU;aU zFbi`v1m{1a!JA+{WA~JC5nIP=Jnvq@x2r$NR!wJvFbjt+X|Ww@7kM)9ks|bRR3TI* z1SX2RUTf$tkzjEY|9WlWVY5I(zR5l@uwRHR4L&r8Rie_4X?#E#QtPf=Q8~hnJ|Syg zdezJ?-;}Nv%x!FbqJbCXRhpbdEpv-*Eg4aKUsx=oy-~x5n z{O`n`2}$N|1}Y{ro^a>6uNm!^_?p}%fF*q8t`_jMH?%W$A;^-{1!lYp=KED_x$O4% zX=;BS2$Rn362{ZU&QHH|<0uMFV>_1vw`Rh&e6j46(8u>CdskQwn#g-}4wIRD7fSA3 zPE{q2p?}*B^;iu%>#^x5=on;twmm!}jzfzb^|INoX*!-8X*bcoTYhYa4vU?+0?|Ol z&+!yEy>z~tH77K7sqM5%k(TxFYdNMEg#{N{dpl=D9hM>wDRhbi=I$JDtcXw$15SUM zDrca8`LPH<)=<@$pO(%#fz8OzVe~T{<4lnHrO1ug*jU)pU@C+-= zZ0vZ*WFYHDww6|w%*~vCQ!SKVlw%O*>gp>8J1U-G;a^a3mHp?|EsLpJ($;I$d?da~ zH9j|dVj{EP^y2#SPQCtQ22@yzU~&7HzJM9<-S}d% zji))8jofqkl49@9)>qgR8&@*Oo?C2Le}`Uj@V!NhP#n+ z(s2l_dPv99#%;Uj7Uq9iU>fEBQuGymnU4L|rMMzHI%V@~>m=sRnY6)x`&41C((B}R z&S65{;fu;X)5C+u6*-9u;NP1rXAdtXzS8(p4VqP5T=4Xa1-RLl2?)3N{=s`g@pZtRZTI=HQy@2sQIum^2>(j~hc$D^Nz>EQ@d96Y=qMl`?U%^{xmp3z9| z>G*iie)Z&TqXps;*|V!DFfueG!hi>#|_tNpm4mQp&R{JKVxZR2uj&N0j$i z1uGhUSQh@fe+1Tm{^x{sUAO>)l7QHsf2R15c}rSwm`7H*`?$GCQ{zvn|sY%f;t z`?~s=gR9CQPpj&o2AZY=BQft)(2*{c8|b^m#Pc=%^bIUPlVn?LrA&{E%R#g$xa7q2 z57sJmT>8;j3X7+4c$+kVw_IKzq0zq><(tD@uVB<4`UY^VdaDnPzIe3wW#OQ)a$vQA+-M>bp+FQ3SI&rCi1*l_ct%uc{ayayN9sV$joCLeSnA|6O+V+7Sr|}knc5Pqm=??8)N3UqzQCw+t*up*k zqVq%otDB?re^&XoWy)>4_e~a>H)}~t-c~ef_fom!*b!{VTv0)#GBl!HFDq#g&3d>x zST9f<*b_?>E zlWzO`%uj^b2~1DxE6gwtSR!L``;0Epq|B$J-wa-lk_#=EU?%fyvMF6meFkaoj}#Yiu~{Rni(&Qm`tuC6OQvxFEGp+o%0 z1FzNf%982tlxb|yIcY0aPx_tK1F%BCMU7WYc8(iDfPYb7CPHO9dV-Prp%#qO$>08| z4jxqRs-=Fmz`yeW<504t6{I9TIY<^3=XEAJ3wyq=gf!S#{Hffv^|axj2bW5O;Myqm zIr=SyI5J9q=R50d6YsI`bQy48oQ=Mn-<027{+xbUBs8TOzsbj|&ztymY(o74#JA6X zr4MUXTYsHBY%UxA=;BBl{!nG-ndE0Xk*~!+Ua0?Fl#8w^vZ%~e-Fw$FmOui^{dXF& zyz4NuGT7xLi<>L>cbvqUPA7+Jes6zcc=u}a1zFVsRpb2@rJJDjX!Azbp} zwvh7lgn#mIn^Poc+ zf^lU}uPJQt-Mlay{PlXM_?=y=glUxozfNYgW^7jP>%BfnCyEKF(KTKQ0Ag)!FEZ0$ zYnknbDL*7IWoYIJ9~iY)^^0W)viv+vByFQ~tQXpd>+M&$aFD6(x-?KN(eZx%L3<{N zYio#}MO(y0H$SL$leV1X=OMQTVhsLzIe-Y)3g06(2pgpF?=4yPK=-~bH<+bXj=-ej z173O&O3i~sfUDEe%t{Ne{V3$AJwNZEly9AC2II-AjQ_W^{{ri{jW5zNbNRk|);3-C zmFLx$_S~HCA7!&bNB4OJsg20`J69BP@r(x2RP4h1^<>#-H5&?A*5LwVN=w1hhbYT& z_j)@EUDf;|N9R5C0~{~W>55DBOb}#yl$NVrh3pk(ena$gp}sR<>P=`dY!#LylC%x6 z%c{2m-}7B=Qu4<1XZq8W)lA5bu;IM#PhHc!auyWibE;nK8}@0bwa>JjdJbi56KPl2 zjnou>kCN_pL*UKO${MuR)(@^D;}4)4>fGY49SfGEj9EY*5Mxp{L8GGK7?q zQ5WYkDr`11J0jBYLgzYk?I`-=W}I}KSPJ1JR6jD8xVV4|^ze}~5Am)ol~&-e;+k@l z?H2RbmbW2)ZiKcS`l4?^i@HQy74ui;u`p)woi{2U2NWzwh>26ZpY}4IGR#lF7MO1i z+sR$beQu6%%sYjL$DM+;Dh^!@cj-Gj$Uip9Nz=~(*!e+a4a(Yk%9{%+!NMf}ZqxeZ znl5uxSU92wbg*?u;Tg%$EJjwiaP0&u?LPCmL?Va(BHh7_1==EJav>s(cqVbueK*8j z4G-cT$V&Q)dC-Q{+zitSFG<@_7gTt4qRyU3*1!aaZm~wPv*u3gFsiyXLKWJMNJlCl z0xdL8Z%FMeV7lX=M$UBoPu@Pi{f#I>E$yFb=lBT$yhP+<>4~7(AN{&osO<)>l&&}q z?K7$gKE<06=y(CV+zgCq81Dzoe>~)!3^y{c>mB`48FvXC`H1e9IaV-J`5f1jL~b0P z<(SgbbPyUgx+K@DNa9r6`Q$?tFDNr@bmDj%bWWPmXN6RK-6PW7qUNk9S9~U@Zq>NG zTNh6O)|b29yt!&uk2gW@=~JmYT_uc{zXEvdE8ZRfiP5uWNLqJIYc>l!0S7h6f`CGZ z^kA=$%|lxzhBo^VQhR0BQfV_!zi8*&|A-+zO^mcX%kgzSofy}#jBOB zZ3Rl0gX{^VTx3#?M6p9TGC;%sxF9J&r*3Ec5!IbOuvFsuQXnVc$oMT?*h8be@egh$ zGUvv(Q+)fGQt9cU(*km|Ru&Lj`cF;B|HlOpJ^KH-AaUXUQrI~Q~6256B=q;$i_PmAtO&S{~4VWSN36Rhz>IA2&Tp*g$eTrP4 zbHKZ~zO8neBqx=xA2)hlc6tb77;C;lt*O;&Bx@Ed6VL`1Fw8kPw<9S0Kzi)7`AV&% z>6AN}sMgCXN&Ih4jTCo}h8#|8PUn zJHb=F4FWzo76CeDJe@)Fxhm7S}!%O0sX{}(4s`brKF9xQG(*kBPEp6 z?-&JD0$&2P@_Ra!p#waM*}m=DNQ1tHH2iG9sSI8UTwS%2IP)l;kw3XDKB1EYQLSfj zVxedfBcPTkkf2o}JkF@1pOZI~ajT!)=9sKl_XEw*Yu2pX6Jk25ZII_geuQVz(-y>8 z&gq#Oz7jGm6`Lo2C;aSPd!Shde7s8p{@sQy`9uFehLK54Y9OZ)vA8>NFk}ooIb%J| zV>SENo;Q0g`9<_xN@vCDx!)wFCo}K|G&1{(6uuS1iT5n7%N9Md@$%!$IT)`x-b}1~gtDbV*hJhV02Y7JKQs+XoQXl~P);f|#Yyvy)5i^*#L-yfqJC33AO}%wG`|t2E-{Df*qHDX8u}qZ4@vl z7p+(+iNX^FDxZ<)69iYGm%a(z$jQqbh1*Fx5fx1U)~QMumECilli;aL0NI6#DNA_V z1n!i_(LlY$_yCm2=)G-9%iK&l59O~vOrsTIl*P+P z|B(v$kd43OGG1^P^mxf5rCeP1Tw#@)>=N?^1l14UW`)SkJ}(Y=G|7K1v2SJ^#g$>Y zZ;u!=jv`?0TA3}}kcBfd03;QZl2|!-wV|%R!+T}aqKkkhBpmhmzh5=!g<0OnwJo~K z(aiJFinA^_VuhgKFih$cSr96Ua`-DWpj4?u(2uy z9Jf7)Z`EV^7}1^wepS;-vvIUm+36*|DXz|!cCl0?hol8pVW;u%1UjVABVD_CcVTOTPrUjsL8PM~yHPgd6t%^DY4t z0LI6Je?z7${d_2bznLX!!-M!#O`4|5XE(1%m@ajsHJE+gcaOvNP1j&+vrta`964a^ z1385{yigv3pGJ-|Wz7h?P)#evif^}c$81+jI+yiagNJv;0r*Q7YvYKE(Ib2I%wKx` zMIIZCkQx(q5b{Z;>jQu9YtC*ZJjP3TJH3nAOzKhMl*=GGBBmu?W z$N9%~lC6+B1FEV~BR-BSh;0G7td1}CDe3sg6F1Dqy`1gxWIop#*C|7XRI2#ySyVQC zS9tx7o6G3<2L}9VoHLRBh>dD+jf;mcY3Z3*ECXln-@~X71|(!F+*NH0`=n#^3ydRi z9MrkcmS%2`!!k29uT{_#53v1uJt1NvMnv}ve$fYC0M!xwgR&WkO3U{4g;j~`=_Q4e zqQi_1OkzmxEsp~-=La$46|dR@1Y~h&k{ZN{!Sqbg;(s~Kjq6DFUGk4ndS(v_j4`ON zHTVg{_~cA^S%#QY$;i;q;50~te&88K)f01&3WZDC)1mLDSz1{WqSf8nMrR@}E^IIx znlKw&0DY*zrxy0zG$d%K8cZTt%ffcTK6})w`dC`+Oc<{T-r12A7T3D}s>5u@f(mY% zCX8S&P&sfr?P_C&2ljSGmhzY}7IjJ8kUV zV?dz2I%gw>I?Uw=5FL}}x*I;+=CrJ>C5qt?&GdIV#DcUV@-5c6BS20#OCED?_0=$cJ+u15CW*o?YY`H}hq zNS);*>xYsCWiOS+aQDkje3)6ohyZ`)PvX_ui0J7da5pk{yg0((*D1oP>4@5j_S;Xk z?yu61JhB$*(VsqK2Q7=LN#B6fn>mVe!tpcC7*mSv3{Dd4*hq;5ECn;_9pC9AXFt}JJfQZ5qB%!Y~4DrivDy6yO*B7>ezm4C7iO% z&Dp|JM-qI4&vBVKh<3z-v4C$|drwQrs3hEn!CHqyp!SGQr;U9g&+V;5 zw=OT`A?Xn2?4a-Tt|Aj}+0-t=J6vAAiA=BWyI9j9FzqXEF5vxhJ7QwQQpcsmP6&M^ zA<*0hj$drLPQ#T0#5P{W|Jiq@sQYO{23;F*#=|Fb<}XT-^jhyAV*+)hjgo&~#g`5UKn zqZ-a`HKYK{6=Vc^>1kq=K^G_NyL-a2$8?S5?gd5QN#aHpvzq&5&t9<_5$E4lZB6`U zXkE9B)GVp#EJ=gqB?M4{agZ;5e@>o^Emb=g^+$Y!bT0zZodM|*FV?7G6Rfw2$HS)e zPYNh0mDZgPzjNG?%SpHM&P+n9fNpl7z_y8w-tpoB?XQjL#CwD$<2R%n219*g8JTcm$hV#*3B#ifcqVetms?Iu zOK{@8q&>Ss55S21gI(H#L1ag=xBKd9Gia zR*mrUO(UposE=@DPGiUX5m}C}&?`1u44T7YKICTG4T0(3mpkSb^b=Hlvo|$>zqTi6 zw3U^~2tZv@7>QSC+Dz*A{p@O-279O~(rY=t_mPc9g{Yn>Du0Qw+D+>F2$p}d6A@P# z3pqE4Us1GkXEa$YrmJ*o06Cg`F`6xD>*nSDxuW^caj8nZR*c4A-OpZe!hRNdQ}e{3 zcBd-Kz#b%48vYGeF()3gU#GV%z3Rz)xUQ{Y7l1v5pGr#sJ zo0&j^3cAYYJlE+&(Y;zQem0r#5)5RaY8m0)SS%Rq6QE16Fy5qDzjCr>me%mpQrTqE zp!s+iMEK9?J8KJlRa4^wM_rY<$i^p_%X}|bP9d$%pVFVO0C5Zp=`B~7X2$z)15$-kI``8J6>WL_b~tYys(FW^_Ksxus4phCeGtcCE} zWXZ*}Q)Ydo`^ePwe|)wsFRLPo`3Tm4n9DluCa`KZ%FN58o_X-%c>i|tUGf8Q?}Kvj zh|1+R^{O8n9_O&GjCSVRQ$OWOr;F2}36PRru)tpIb@Fg* zb}xRYwwu0>J??H=OjI`xPw=QO2ojeZRHt@+ktTTUjl9uS4Ez6ksl)3IjkU2HfzW%s;?-3vC%AWhwF=~P)Y<`;US zxT<_A>i+h~+l|F0PwVF5O7}CfyIomQmFBLj{B^-!Riu z&8Dz^0sEPF#xc~@Tk^?8QRKXjnQc{^#a3}ha0*o(&V_SaVM>Bw$0gTO$f@!xrlE{) z=_o?GZHZMz_K-0yfomN#8{(@)&%%a*71q}AEq_60rb;_6px6R6rK<}MJYyoXhuVGTgEq>k~79BvNfC`9)n zXOM?=gX;X^-Jc4D(>;~^FB6M{HD{s@h&)NmgN##r<5+yjT*u*`t~0fE13WvOZ8{cm zTf<*;(DGkTaQBo}QGr;1?$UkZMwd1c=AIm71=c#yg(?@{Q_YLB9@pMwaCo^(^|wj2 zGVH|Sgpg(HpM=GxR7CT)fr-NuQ;haC3EKjW*_ZlTm)*BRSJT^_f?^t;HV#XbO|wOO zPS*^HNZVogB(Q8*P9N@m1xIa{U`b=tC0LMK&;5YgFRJv&<(ERB3jUHVPseqpWh!<6 zbdNn2x5;3bg+7EhHJQRok3Z}UVpH1Bc=ygeUm9_8SD(@IN>6W`O8mT0_ia0n3ve&iGkuN z-iL%SUB*dzOq_+3`GO-zadmMq*I#CFq4QdKhdVXs)r69%XZ4B1E!i4;{f4p?O8BxF z`gN;l8jWQUSiRoj@2obP4*A$0;cWs6fMKf@o*GF-1pq_|%QWqT)co+wOrc?Os`HNv zXVsaO84CF$@Wd?THJv4i$!|XKT8@jINNq5H7^te1kN2p%c8v^j#yQ=j^ho&>Vq9_{xjy_Ir+#)}RW zYcowhxGQdqrcI$L$(u@n_Kd4+E-<*lHUtbXw!Gw`#nUhP$a%Aq` z@SYx^=ZQgs`L@n0Hz)@IFXG zDCtFP@s^{?m;S;#8M$`+j*(uDBk{^T=`$d`sP2FnzmSB-de9uh*n7ZZSd$ZHXu$?5 zB8`xZs9YLj$0ZIzVF_BzD0zc47-WYo+^?d?E>CRY4%7`;+dz)Km$}QPU2h|p4or^}XO5w|TK!R; zt=~I`DjajT$YlFX=48SaH{-9(Id8X%ChS>RL9k+wr2f^uzj^G$Avm2rH%9|Qh|a}( z-lmy-(0HxX{+~^o($Z(s7P^c*#l*5^9>i05$!a0OmdA}zwhk5(qCnB#*hbRlUliI! zU4zenM!MRVK;Fv<=F9 zu0EC3`s`~4TMQ|LL%Vqq9D#E}PRFR9wjg)X8q}p)-!r(&r5jm)OTSnD>*>WERSHdB zvt~@0!peqKkslG;C_-k{49@$+yaGr9oUwdRXJ6#d@>c8aIaW1;!A#FnOtUhHyJI&; zZ1*^>dD1qAYYC{ba0dVBF~Q8#54X54X<6VEAJjo}J>bL4*)x6~KX5OB42XeZW)q}dg7nX!L?4JiH=P4#o#E~dNNJ1sBS#NlGS`9VUV1up1*pq`N)Ozh|h$vCge}eN0 z#?9(R;{X1aQ(pid4wRv$c40o;j{QPCbEPDgCB&LQS!m9%8=hD~osywI%!&;g5Hm;) zURp|jhpdg+;UY*)g=jLir;$2gzHd!UC0TX;L9XqIXcBsYdNaP}ucCg`(4DIJH6G9A_NuV6OO(>BSY~d}wHg7qcS5N19zdWNM2z zEX*;de~Y87i7HcYao|6v8=`2~IAvDUk;NNXEC-6rRa)UPa;fFsUfLAPI{b6kYbXr+ z29_wSOrzWvvu`OcYE$X<++F$*nh~=abi|By zC0D<1bCU4T8NK*(5j1Dw)P2lv2h;Lj4_ap{nfI;Qa$5R-WCHp_yM&=vYrpYA7EyOY zugVQx8JooGc%9zd5#wCTVl71%dI+~wRzc0~0xiI)j3($_eH?A2{@9D2cf5#UimSl6 z7J9+n8FvT&N|&zVWrvU6UOE7T!l9xuA4X!uaiWRIj?kOaQgH}&yFFPr@P^L+Cl|5B z@S+8^zuvX)CL#T@?^ZWw-Q8s1Q2x7e_173)@vPnVD@(z5qw(R+Ju0H?5nR8dUrPIU zj2pnNjFkL?=J=l-kQsI45*+`o7}vmOsBW0J2ze``F2x@YzrK$=IqvuvnRXF&VRWJ9 zv+$QdFRQIi@XQd6AAp0RSsf5B{|J{En&P=f^3o&6L}dB=!|Z4J zu)qn`?GCf#!lc=@Qk(6ox2Biolp{zTr38{K*Tes94@KIBdWrb_Yu};=yX=@z-je;O zEvA7Cnm)a;LzK$VGuP*zc1gn7;#qGskx4;trJb@f9w!;+KYM1lZZ3R|L=^5M7vUSCy-gf{zwf z<`jTUa#_WJetP6k_kRnwVgAmNJGi_pk;!b~OnOIN}VA4iz6D-9abb_yU$eCKqsq8#Qg^b;=7e zamh$G z+2A91x86re8)^V6PU+X~=^HJvxzV2wm&}GX!|tFHHCsvoIYZ%K(*Ek9%bcZX@w&oZ zhf8s^WUj5|V+pD8rby$FX}y-wN5{`9NEeiP z+t8owBiwN$)YMkVHF~p2yg6XZ9(WSacD_cMgnFDGyO00Z6l?>n$B?lxZ!SjePwLTc z_aHC;5)9N<-1~HAKw1%+7mhXZu9Esu^iz2jWwLrXl)-p?N_qOTdIL%SU=d*#hL`0-Oj)v=3< zyNwKS6aZ&VAt8meb~Rdqu8omBRVWT>5ZlZq$I28Q<@|C<4Ut_QWWmFuzpe<8T;t4-eHBgp(DOA^7UCPr+6~rmnau z1{J|WUDt?1#;fsezB2lH?#~LmTjYb1TT}it^8%7^mrad`t?#Uzp4f*D=lC(MR73-@ zizBDAmn%pIZf(*y2^$Q~DsJRp`ohp*(l_zEzj+248#oC}_@ryPBi&{H(DP6d?%UZD7Wa@Jeu(<5Tfojnzp4+7K()VhqgqENdrt#B1TKl6>YP^SmJEgqjZ z32AJ#s1b$D?r(Q$-VIt&nF+YK&teJW)qPe=M!Dhf$T5oah|f%v#abmPo}z|ukQEve zF*CQ_T6DOx$2J!`FJXf<_HCCXt2chpZW^!sNW!_mL#RwEm++>&*w0BuS(yv-S2M39 z+{tG=OZg1XTNLPPbvyMP2N-smTON^g$<*+EwCD1`|6QN9zrn)RJWi(Z-2>K5TBjsk zR6ieh4k%^p&)=KlM<|v$k|fkI<3473(hta_pEJ0NF~#5}I@&aOMI|49rSEUQ#LV{H z`u(FdpJ0jSF>x7wBds={k6;rah`;vR{x%bHG)4Rzdws}E=> zk3cOV$tmu(nbRp%@y``naqV#Sv~AzNUfAWC-A#) zYrq*HQIfk0)GuZ?fSuK+k{1Ev?)Q?vDZZ`+P7qf_{DWfvU$;NRng#HWeW2IB=JB5s zNC#YbmhgiUsE0Oi5p@iwCp?}vrVy{|9e{qdn2N3K5rp)P$lp+0x2`MAR zU53*2a|mt?XdTO;i#Nj{XG6pudH1ZZ+~U!SJ26=HZZAM|PQ9tC_Vo45M8&alBj$Uz zg`8(!cU$m&Ldkwa0A{#?%Bm-t>&Ke9Nd+)&iD=CMw5B5$KuvvV&)QFWbeqX3J9ln+ zGhuJv{8LIOG-cAx#yf^T%rA_-k)h+Z6P>}ppce>OR21kOvaSHxH>RP#RjiqPNi zty~2+=KeksiA+%eE5lleqr5g?v|N2 zQa(@^Z@|9>06+|a%%(Ete-f%U0qvO_g2z}ZuQ>Hcbg1dvHJD{skzO;&{6V2aq&9DS z>jear+yN#QMfY+1kA_3xF>vwli0aLzvXc#mXXvP(Y%S9t=jf=eo>7mR9h+vKgULl7 zQo6mLZM$>j;@RSB0o*hUeDfoAW9ye3>-$yPigkBR!@Q>z{F5F}|NePOS^10{&MRS7 zhu>}ZUA;7Io}%6O%bkDa8%iugpGc5im{>9aBT8-Ft^yL>j(cVd#xfyB0Nz zMhM|RX3w_+lVV&;`k6?VF{>jKCVd&be$^~_{ZK62VXp7r)OC|;O!Iz0(6sqS?0=yE z4CRAz;7jA+uWlH7CvU_uW^O-R&V7&enZ+vM{-Mhb(h+K<$$|>7?fB5=4^EA zGkU2lWruK(J0+3slCd3Ye(T-ahx}nz;@#S{gP;zOqWSU-{4j@75&Bl>5ZCO-Cms&w zEH$m01xIm!a;5`quwSJ9m`8Zbl{1;Y(Uht}?--IwRbpt_VTe_ZdA~LxHV>qHlE85Q z$x)vjPMai2#b@5ky{ux7Z6xvnsS;+ZR5!mCONH_UME$}czC&3s4x$w#M%R)Q>D)KW zZb*^Oe#)R-)CkZJt|?VocQiZy{MNVCJ6H?MOm=nyiQrOCqbwgQZjECRU`D0o|4r*h z$mg|M?ery4M}PCqe;bb>s==31`N}EAKnC-F@B`F*kKhX&+Ls^+fF!$ijkBuA@t+t` zKsl+PymihBW$Mc1A9{yHxg!*DN0h^(3Y(xIPM0C3?@y?#N&KZJ*sBLYd*fcWBYaHX zYsrnX~pfLC+nxA=R$k1-LCj9!6R(&H)e&aiamDIMC}6co9zYbnSl8SU!P z#`qv8_k@fmKh=7&53H0=mdH{&j4IYo=6E=*6=eg86?s49p3f+xN#Vf=IQyOuA<7xc zvL4Ve`=Pxn?ptO@VH_#8QTMWWQl<7xE-$;+#wl1{_KO3= zCxFyZ=ALE)*lA0tv9_2nkYgY4{{Vsf$_JeK^?9gEv|%ZsqbYLWDgEGkAosG9p>;_T zIlpp^WM}i(SB^+O!*rTK75=NHNA7!*szQ-nnpFHT-ZEqBXyU+uGshqJIr2X??z`Im z2On!)F5pirt6>)kJ}Bxu0Cf^N)muIwo==$Q`G&v>eA5^sGd1YDqWQwKz0y^ZlHz4* z`W0j}pep(cuc?wGBFwigCq;nC&j4a@z}zj-eR)@~7xSg@mn7t;uxUH&%ci2*_mF#r z654aUAEX{7I4o7u@}hQHG*x}DbTRqN5Fz0TN;yRWG73I2_L0?73_)hdhDQ1)4>$QV zs<`_nz%e$2j+Bn5x(Nd%K~|CL{=~Fh8UA-QYCMcD-yx_Wm0?xXV3A{~$q+ZfAjjn$ z>@fbp4J%^8@$F(Jz<1X-vm~|B+<>dhoRzoK5w|e5M58?z=ed=%xp8i(ggu!4&7)Tr z59kU@qdXN+a(!;RVGHy~ROb!Unv~~?#L}&a2DW^ROvXb^P1iE`NxcKMYxO~6Q7!YG z49Pnfe?<%jbpt-C2guzQfv&!6ZVf*cnwu82tHo0}mBmpH1ewefOrd_o!84O~z3}O0 zzAc%DL7znn)|b&iDU3R@vWc`dgJ)zUC)4bvy8rh5Sog2o3x4+fn^>!tG9@j2bIm(| z+E4l~z2MmM$|a?;Os_YqN-I%54LKIG6P1O0X=C%HMCxj|GMH?F^LreWRTUSFGz_d< z#~Kb#F0&p`0Ps>XLlzcH>xjWr{oqlD1TMe{$S7`5lR#x9OD}urKW5@ zjYH`KqjDW$-goK4@Z}NYvC1mvyqORhJWS} zsum$md0fRhUf`uAg#PZOl1|BNNNf+*`aUj8cm7V+-5Z$wVaS4jWOsvR={ zZu6MmctXvgjqbY|30~qCmZTiOg0kE~z|W2^Nh+gGWP>(2uVF?+=S>uHMq&p#OWTu? zc)WZMhj`v0iCUkVfTej3`;&vC;m1^$>%gnE(i<4c9+a*;Sym2@tOj{PuS{AZMqMB# zOT;J@Xg#p*FMVO?slPkV{%ro zE*qzj%#Vo1Rh4(h*%KJG54Y!9s#eVEMI||NU7XS*+)?`LKh)^S#MQsiH7=b|^X{Zo z7MTD?>6R)cC7+uhmkSA0dkHPa6gM6zQ%f11%|>|XeFXD69)||V9)cCPxDgRDkgG-Gzf>q7Y!_zxTCS- zvRQL|!C;^;DAFb`h?p}8Z@U~&gHXmV;g)yKIe`_LIdlaz9BBY-rF&Sdp~y>T&8zz% zuh2D9a}ptol?jP{Q@4X;eXsjZ?yrWoBX;pph>nmA<$og`Ad@@&S=d{UUI!Ok`9pdk+Y@oi7wK9D`>L(`4LX~q+z3^8n6vc(Bm zq$G~*%S;3&7%NTP$0k5JshiyUoy`}zL<3dy`|&*izRzPD+SW+s9{kplQ4+*&_ROj5 zC7$S9PqWV-#N>nz{j(y0M}7rJ?OWe|h!Pn2uaMW2ipanxq#)&sx-|dD$BGG1-uMG8S1+#)D^p+wh-z!4{wr6`Sf9k+%X0-;_7PtXm2U; zP{<`8??d0@B?v8dSOw zDFNw?F6ovUB_R?b0tzxpLL>wg5MdJ1N=Vl;fA@Vq&p*%qyI$9|T|4J{&iiv>pw1sh zKxl}>Fsj_4rl@CLUGI8o-<;Mpk!{=jK==ObSyz#oTcrhaK0>Gl5%E|MMU6G2IV%U^Es^s?PO5*km;FP~kRg}IfwGN=)CZxopd)AK&&X;an#(sL zytEx7S2I@BVuC*2vBPi7%|>7*w~A59fl;cUIQ%hX?n0qzv#nm4Zt*@RcGql6yiY7# zg}k@5HS9U(3c1!UzqUfbc9^);gH2THR4ngYMyTZNV-8h{cQEbm*kAYoh8);Y`~btp zXKMv~FGvQ7lWE|52t&g?QU8LK0oHN)!WTrn`Kt z0_A~Sk7{aTxQSe_cXO#pNsCQY+Wa8V_2OlYIhM0Rxm_cGEz?^j*yb?nyX+|ap(nYS z1f_4x+^umUL=*;?Xyq2PBL}mY%lK`Vy)^ee>PY?B-**^PC8)5oxlh94FLEE;EpCn| z4V5W+M|CndC-nEr@hD55&X-GvKc_;p|Ci=TJhlyvEKFrByeqgM7QVyM@OM3K(1=ml z0>^d96fT(M!Bc^is9r!N%mTCKo(vKna z?~3k4SI10F?e7`i#V16;$eqUxYBBwV)wBD=L{E^<)*d_~WV;^|DqLrCUhNCk8?Mtj*mWNog>31vbZ9a&^e!N$;+bU_q`;%io#z3(c2d)vzd z9|%u2Pt}QIa9ByFI8zRUj0qKU0(4ZZv1R8D_FnE%V|UK+WRc{Vk98N>72m=}%j*}e zYV%|io&)t<#ZUXFC}$XJ??l_vb$VB`y$|FY2hGW>m*zQZ_s55O!&6s~&niCL31zTx z5=rInd41PQDUSIY3W^{YrOaVNC?O+bGwS}5Iy|NRqwMo?U_l%Fu|zU1>d}GtD|^LI z=dyx>6c7q;h1_^691mdM)Kd-}Gds7weD{~%FYj~ZvMuVHK1Pqh9X)n3#=ME9_9?vk zB`vvuNL6YK!;2{6#dg20B_8K8Not=CP42S(O+K)yNjr<8sIDhwuB0;|nBFJF3of+u zOfv3DgJ$5NI4m(Zhpd(Y$r78vZ^9INJI(8kqmcE+&|F+*q0D3;6h9w_2b6gX!;Ji% z-(tsB!dF!?zUUtTCmv@L9siD^o&BDFSK48{nil%=QLQ$~PiKB4Cq`X42~EvGW@J7E zJ)6}C*+C)SlKkYnKq!F9SDE#Aq&vEEr5 z9}3bohOD0>R}H91!VfqS#Ru8ohof@bXFyYIO%NOs1j-26SJ_%yG9}5t%e?pdbK)N^XsFmA=z2Q`}f8k zj6Z#DWF*4wrW@w(WmUvK{&Xyf8Z)ch66FXU)PyB-4jHOGc6|d8v+%d|Nb!{- z^J!KVL5FrDktrZY{*#*Teg6G$J@=@I9~)F1PXM_<0k9b)&J?m<6|oUZp{gvQ>k+59 zPlVzKVtZFSR%2CKRVDBBuy8x=EInjy9eMev${kFhwV3ZwiQsodU6QWK z`YR}Yvl-4gT_tU;{i8k#{L-6}lV~XHVu(6in)@!Ez3_-eqq1Y-@dF4tBG6e@{6|WG zc4>Qm!kH6Je%z$L!M71aVdl|9qXD~fM&6Zi)E%_@1?^zVGZay~CbU2C>5SpYMPKU^ zkOY&0zr5VCoolrX5B=105^l;0y=Z^n8fcU-hqNZcd|94i(Q*A}Kq3HT@`a2}h7HyC z;+AfNvU>1${^zP#E>75aTy!KZhfE*}o{MF0Q;+@4o|ql}vtA8aWU2BQnqv`(pV0F) z333Ne_P`Pv8z`zab;fL%GuA@y;;%YHO&WGi}_9S zu044zxl9>69nrGX#`r1H-zGaLI?4U~`K`kD zYI09*t$8AU9AEv3e=>(Bb8%(vSAMqHfo4y@Uky*}5A}O=;tH*ToHq*V7si zMs~L`I%+QHp}j@TYY$nj9Z73+5NZv@^O~8DuohRcQg=0aPUg(+;2S z#fzlW!M$Jx#5!`kyjZZQSWl`AlFF&c*zC$R&i_!u()StE*lu!(d`C|8wxBcQYM!m^ zTZH1{x?!cB!9z29<6kfP{=`%|d614bPVEu|3S&h=Bqm&DmeWvMho3$<38GTp{JLZL zHU5^>RlbLdY?nyl_g$0R!EH7Z2AmPeqm5bD=T{byk`dGe>v(<3Q8!%G1^-u>T#hw$ z!A|+@7&<&5d1u;-6!7a)Ywg+!E$?mexRCdH0uA67_V9I=3Jb&<{B|2FF6+(-&F`-Z zGLuP#-6MO@Z{>JRlvY%UP(~TUbgQ&&CdsA=W9UE9%lSd>w|*vM1BjWAPXLYZh1iw^ zK&jRouoCYWjdVR$%&7cPjj}4vjKQ}VFbLWL%@F>(c__h)uKUK*o8qO?fkl+&xIn}m zYn((V&}MIJ?37rpNXdVKhYkYVtqC4w?|}W|Nf=3G518t6ZPcSl(h_aVeNL`Hl|P}& zXzeiQsB=3F{FNDsDm82M?E(92^#F)tz$>0Qsym|aF-?#n-S1hkladm#_LW!T+{Ip6 zgPFv}gV;g>{nV$gFrKg2bF_KJGx7DZ%KMRZ{@!L$D@Ms#*tQTVh5rQ)kz&5HLCLiQ zpi#6q07V2k&=1#7&Us0;$64E@lPSMeO-CgxP4X`W1ma^rvYU{2rqffyQPOLh1QR%bwURCqun_C!cA`)C0gyItMCGE4 zLsgAfGcG$2Ka++%v@_O9>D%yFpjRfA+|`=YDN;oCQ5AH#+G@$Q!MEL|6! zLTC)d;PoX~Y{kKIX7-dgffsvWXF3}29TR^Qn3t5Exi4xWRVJvZ2Cw0Y^p>Hxz%^(GkmNj1TT%%cc42qx8rqWmGW>{a}tq&l686*;o-k)R~_J zhSMJ~QpR|;LJ5IC(Z%xbc(*RBd$42o`80mNlM+$AJ`1=sNxHsq_|DIH1s+8cy=A}2 zy!o=$L#Lu+7~}zWQN)#K-z`@P&f+|&YfFLMJUM=|)9~cGHWmM$L)My`%v0@f?rV;q ztY*bOk8d7oEMaf_C6Cxvjs1^_Q1?EWfc1Z$@?SQQ9A5v!NRqvqCKxH@N9cJ#n+kpD zXtDO>?SubPSiS?SKd*v|JyTaK;(t=uxg&H>HylaAOp9Bf1@yM=42L@_iua($o)psD z<({Nuq2BnwK;7}{RZq-klA`J?!Bx`)lcDzxf1@|(p0B*~#4VwwBa;PzmvPuQ00NAJ znAt$Am<0}X0W|~a-*BnL!+_~RI1k!okMqp4fqn&^kX4zRJKwIStF-PL{KzccyPhn8C!(CiO@AHk?1X&yH22C>spEfpbRt%K`M!DpIwXa(@On({QC&N%XcFqOh0Z-sCKt z-8BqJ*Yik*G|hd~j-LuRC@J~Z*mFO$*LNX}gr5$CN{Kpz)`D~~AYay>t)H<*Rn!x_ z9U8oB>eTt)#@T{*jGiT9TS6G_Vz2Fe3jI7ylBLs4(53~U#!#>-e{15AYx#{+9n?5tax}AvoqusR9vp0j!vF7%Q92j9* zlUpc)e6Sl0QUznB+ybMhr#9jhk~Dw*+M3Z#BndxZuIa@#NxXeN^>!Srm0>jQfoB=7NUui!z?2@azFBH; zz}IY0O)A_dOq#}`s{$vW|71q3&nItSkG;MsEb$9qar7WmalhctVwN#a6i))m55ool z$x*?M>=|m+3LiylyDW`d>3^B=Mze2M#j8|cx{}Db=1p4$RkWkkimjAK&{;s9ew5ft z-ENXzfws4{?t5YLibS2TzG7V?8D_@!vA#Io6A9Px3ERKN&2qvL~3B66EU^cs-0h;L}r_icGE{<;>z<$xNyR zBZ3_jvLK+7228&hN82qgV9rQDg5EC|vs#j{Nwuz;a{diTca-Ccn=? z2?LO!LI+Gp{D{d#a4ONRh85Dx?X$)Ie9z>>h=~~F0a!SEX{&!x(Re+o)xNZ1cyHk3 z*;wh>e}c{W`~0qc)Pgt3&=r;wCkY^lrG^sBfHvKMO6j^vF9 zg=|$x@NF@r{Eyy5HA|sPl{tJLfSCVkZE+n5>P1ink18$q%p+DuwV2Q3byAM=+IDnq zB)gqHhGLBIK_&A~%I+as#RJxm^e&|VLuaBMmeF&WI>IKU1)msEy4O#Nbx2Pedv^+N zlWNFsDcv!jVsECri$P@JFU6RUIIPbyt zZ{h z(fv6*;agldT5ZIb%~?_pH&HP1v7nH+a&uKqE)3%lt(SbYcWlajS|&umd#YxGev9$% zzO<*vmFX-qbwmYr&;F~kcow=#7!h^5nvdwxRfGeIPw)TY8;1%QEa#6RYUYiB8YXs_0&4PO0c#QFp8;9+(Vm{s@qdK=g=0J+%no z5RO4CKX?XE3lxxcfoWEKHe~5~l($83T~XF}9MF%Sf25(3p_as>f|(*C>|F^M`xN5M z{a^MWDX46{hD|y)Wft>&3quehNigu*XK?6Gva<}gU_ZXJ4={WZG_HMEz8xt0-m2gi-y5}}bq@QihDnlmc$Y>U%@$5SZ@ zRZ8P$e>b}n(M)cuN+vJXx@7f!hZDPGO9H*A1mzH)51))4D0IZ6O{Py2PplJG$$_}0 zA*)fx-#zR?owp z?>_AxDbL8=OKFzF6m%&s6kp`q-(0St{*SM{B1NO6%#>+i50b@ofK`!awdq4(7Xkbx zOtVFaXtQ9WuXJIjr+6u*sf7DT2}dRQUfmW<$aB_~NV&e7h&fmv3$5G~G;W4xvyKWVzk+(0dJz-thZaU}~( zmqZ+@;?2lZTLt#k4A2Jf1kQkJ3CFDzkPBS#3!goi?(k6qSi-oSX2qqqUy!owDH%&@wFFcj(6?N1`cmME&IfAQi$+Ylv) zS+|im`l)Ljls9|xNj$z14J7C0w$y3tp+(X6-6(1XJEDrYg=o12CGa370C`4L-D59M z%d}1?a?rJh@$LF@vFYOQyyak?6qNKo_T5lS18dCC_yP#x zi0eZEh=`Hk2m0U+XmSpqJSpK>w@fR+)dP2Qn?TQ+8v^^6*Wmj8EfKXFV(HKZEEL`J z37&PCz=JhS%Dfo9B6++7G)ki6K88z6^Ao9s>zV=|2<&3Ch;o$nahq?1h$=L>xXb{8r-5}8x8HoiqRfeCXWe{X;m=|g^&Ls&K%`kK=pbF+l_a7xqCiYO%<}hPg4%b3Oe~u%!I%)*5%BM%RA$Y+3q)nsbg84h zBhCs~pg{uW9yp0`jwtLw`t-XjBUrJQ(IA|(m#_{82eR75f`Ejo^hKlLiJA93i?Ta2 zp=oL!qu_5FE>*pU1wa4PwALJajP5@+)z*O_p~dppeNYaVHVg^0p zW@dwa(ppH)PQg7(pbyh+Xf(IEV`1@2i#s@6*PJ(a*H#%^(?iR9E20 z(abCpjFtriDj)e%#X8gtbV3(6*VmaVVKpXSi0I@#4s*c-e4kBi9-HYWq4^lgH4!g0 zvBdF44`UxsND3TC_S@b`*4KLv^vH=ZhYYb+ms5foTm>m?Wz-mvaiS%@)wbTS`FIi@ zQo~$O4Fgiv45cND(UqYyD$9p`uhGAD$felyc)BB`E9JE$dRF!*z{W9*DPOR>SI1ncp_l?+a9zU>h_MU=jov^oMmrk{TMSC( zLIDl)fa~N)ehfm{7)Dl2J;5ZD>iC{|Eq9XCBJ8d7;picRLlKY19fBmxpiXLZ^36{u z8$_QeDs)duLR?CuWq#x(SHd|zMtK6ifa(d&NPlAL6CIJmCRZAgB)ON`kS5bQPe|EM z(xiB~ps@%yx3S$0l6nq|0UXb~c<3DM2z=|zaKX?cP&MWMPdgVA?vOmMFGT)8VeN~noI ze>zM=Nl8?0+0D@}T%Y4dETA5?e=wC6RyGhe`y(&~kBpwY)BZX)5+5BN9}IQ&xyy#Y zg5#M;EA#IvML4>xKTXE&O6c*{N{(mIo@#Sd)$a!Mp2mqt>rGwVH-mp#FyqFzkzck> zYm`Jb|G?V44o>!)6TG@%dY$=6?$K$yTHn{PN4hsTOEWxH_wI|(ISZp`G)`f6z|Si# z56xo|tfICPFHJE@uiAUw{&*;UsYgIA0BbaA_=BCz2_+T@d_>vy-{P2v2W`7xnb%hz zW{uQRZTi)85+T6fyk^|2n^K2F6wXCg60%*_ko^b1;`XQ*c7UudQS@7{L~7+FRY+uV zN|96fI$rj`O=X1Upe!TUzcrJhAI4%qanlh*Q@H?gtv+ zL^vpie3)gkt@2nI#(d#hNg)Nz@+fI)-H1I+?J)jv8M9#Es3a`6F&@hNRn;M?uY#bD z6MZ+6#_(r-#Z-#eie$&4RgUR=`0A*95TC-T9k=Oo@KqMyDA^-vLV}w*$wHY?bR`)A zf+Q=>mTv91u&@*uSoy@r4i-tJI&Y)siL}vy?U?%8z#OIZ{IH+a7Xrkeq5Lo~{PgpG zwP1buW*o&{dW2cT#%uqR%Yi)Oe37}qHJ;k#ai2^^Dm^igF4fFr-}5ag`vSr(`3Zk| z+>f)Qx_*`8(Fe4AU*WC%BSTr*s#aiEV7A}!X5L*kcIiPKy15?TE{AWDh=hL`55&Qazj*H(8e`Qz!QJn<>%5gPLnyz!B$~ zUSyUJUa@b{m2r_%s}^;*8CPni3Hq?dPA`xVC-?Rdt(Mq;0BJqMjGPBQh(vKk=!-9- z`IpnhEci6gp$&99OkKek<)IMu`6tcm$6jYM7)RR`}phop+fehsy^-IVRoX?>WdioZ%tKrB-w)UZ0qP){sV>1CD@O;29zg z=j7s4`uK6p)$qbfQ`}|oFWMcG&~u#QdhlJ6a!^8WiI*0k8L{!ulX8GE5cvRzrlBY^ zC0s9^F<;Q`gwzb<(VYh;7LV>|tZ_#O1#?_$D6#SXhwfay5da_6qO;gL;Ohd>xLq)U z2Fbf>#EapSq0`I+6JAZ~#Qx654=~k6jeRynt|9vJyn;Tkdh-hk43c&j?=QqS=N~+b zc5Zb^j^>M$FBuyEiaZ9(9w}!8Tez_>ztnQ$CN+fjeqqCLp!8%jLXH<@TqLnA11V1y&~qMyT3eIW zy3ti87*2aDDsKN*cg2CUy-L)AMQU^ z$6C?LODY4@x5ljdlx(9R(Ml+)gipp`Xe0rOI@pj`&8voj@~>wpg_zmG`52G>ah*2d zp2s0Em}L^p5T~{M+1fMO(Un#lLxvxeLu5$bDnd!U_1StfW)g{|8 z$srmXiJ{MXHWgt{71&v)5g+YbmOMj5^{%Xvk~?xw)KyNWcj|3G-Z^_Gb1fy34WsDZ zteEIf$8e6z9oyz4#pZH;0o*yc%ZVepNewwBI$9OVWb8;Yp-B3`GI&HLmt_~y}pNa2lC5s6desAvC;Hj6A5 zxo;Qzi_|&I5s@x12ZSIT%?0s`%GNBv7H2(K%ft%Y`-Rt=Cggk4`DaO)`t2ty7R3T{L=R>%t_tofJta6bt-j z@Nm_wiVRkJS=1Bbd);@9XfvL5L$&A_U(g__G2Gg#QzNm2-|EyzCG&enHi^C+Wq|Jw znn_-DZbf8d$qSgl2`rR!=;Zx4rQyE|e-Lcqe8;Fk_>) zMKc+DSZk$$yMHC5A-Y`kE@>930ehk5Ru84no5G5LQN`xyWB{!S=Tk=UO8XNCrb*Gw zI7=%|AR4>H#~-~pAtoYT{Bq;p4Ky;4$q<3)r+Y?lidra=O7uCjPgM40leXg$h1W+)~V|1)V(*sS18j>#{s{S%=Xyo3KdMT3>cb&0ngshQA z#sgTfXQDXBh2WTpxtzA2hN68S>yBzqlZ9aH<*+bi-|+bq^P<^`N;?whkIbjO?wwa}X>?RSMycqUNtL2bcq|}Kn?00NBY8NbgK5Bkbu|!xxpz0Ml<#Y0gf94&x zdiM_hyxi+LJLl6&+Tp4w(M`~l%IIIzYgU|2TP|EVygZs3dCTrqopp`6dQ4)YhQK*p zDBo$Xq|+mxMj8_Fb6Fnh&0y1-GxB9?=?`D}^D_scYG4}7 z64ouEifM&UQa;uV=(N^dvg=nTol3NR(9g*mHI%v~o*V3PG|RFgS%uZy zM_dU#4pFQYzMr>jy<3)>3IX9zFL^IYxkGd8PPp$0`{nF;NWNSD&|gD!l6@3ml=h|k zkbzC!m;}y&P({XKpgR52LxI|SAB&g0YmpBO6-&wqQsYFx(q2ASqoirTqaOCh6*zS^OM?N zT2ucy2DuJiB^_~j!+7}@FS1RxKZTIQ22&yFFt?TQ7>+$ti|j<0{`-}#n9HZ&JpM_6 zTVOw*gNISz6c^p7k!i1iqyt}nk@U`?od^i7L{875+MK;d2%!O}GRsXjEK!gZzQyNK z9K2d^pF7^e# z5S#h<;n4eEVdqa27U zx`ca({;Fo9*%$d0sM4@lT#1ko>I<_>=50(`T&k-eO?q>^3xHYA7!gJ;`# z)Rb#1H1URP^zu!s_Ta@JOR#B`vafJtn$+pt++d57M7Rv9 z8zu76>6t7Lz|yyB0)X7?DHy=(O-o0MfS_QVN~Y2X4c-YH2d~Ni)?_REKZ%@ z>N6Vu=w-9awTs3(0Js||xQEDoUz)H;EJv;-m_JdIjFlbP$Inqgn=JhX1%*8W=u zFwXF5H?!W05PzoA=RB6AwC1xAe5yvTOW8mY;}nS6EUxNJ^2gNzJrjSqLvu*~5PcaU z><^TSv9I+Fa(F#va14D46`KLJ0mcP(Dbv;xab9-g1(>BlXOFwcDCT;cH5C)+nDgyp*_@AUdR4DE68@nMR;;frsY59=`r~uPh|a z&nH>ss^$}4RkIKFL0^vSV^{3Z-Ppv-L$$~f)Mn%_;fp8ZZ1PLbyTyo-M@EC*eKsHD z)oq(m`pc(uLgSUEb3M{NXO`7jQWCZ@l)VPjvm$M4cSwo zQrT%l1SG%kx%J_h`&-8?^OHS38M7QnBBQV!!T%lV#`bUemAbEsOUShO3G`&yQCtT! zYoIN_3@~*X=tofk2kp)*#B?8KfZB#-nGdTGz&(buc$?$a5-*v~#$uNJ^eToANmsMv z=u;or>!s|#)9`j?U()*t^3leY$;;P#&ai-%iGXwLK&XGvq>&MG;+WPiuEV17%bgH{ z37N3w`(G7uUf#mCv+$Fgq_rHN9F&R!?2vKcU(8qS2eSuz|ng5*M87W5>Viybk!E(7E8RL9-Yp@(S$a|T&TYt@r9xVjaT-?ME~={c?|TPJrf&@=|+w@aL+c#75wG-aIP*W}ncL^zt27&+Y;B z1tDZ8u?u}qSqbs1rA_6wh`tj!(VGwBuZ&E~U7sD_kd&mon!b7dT*{fU`i+ql;{{7p z!fA>ZZd&(7Bj_NH{yXiyQoUY8b>Z>J#6TA9MPXxZuFX;m7pZ@+zPVTKWlF|NvYyA@ zo1k>>1KlSb=LcU<;wO8PWGB-%#PwyJ-O{O7$Aaruqjn~DCZYg-PfKUm7C#A{Ubcb1 zbSm^b2mXbqdfbro`O*!)XZMBowa!l=^850YA55bIZO{ahEj35%9d%O0QKY1_xC){7 z%v5(}PSK;WK$mDv?j~pwMs#Rcjjw-Ft)`?}x{u;s%js{F`j>U9$rDb;)6;K?@(b_% zzw~T8;b>4^cxLzadxV_--`m8-B4Nq52&RXtV`#}glS+Fmmw}yH0WGAg0)pr6Ns!Wq zRC|%-#%}Szu1-zy!F?-lJmHBnxNR?gZwo;u!SP( z#aDi(1oS&M^31seui^^e)NRTUvt<>&Y%SJ`2ML$jr{gIBL44n5jcVoG z60TRp?L|oIV}I2WzzP1ueAY4c<(4*8iV^5&d_e0(j+!>vnmT@B!>>yk{A)BHXoL?} z>3vSMwnyfAZ?F+w#}Z@##S2L}6Pk?iH9@i(Kk!x0Uy9!1H4xB&11gy_V!-vRqinpi zR8xtFvKYJ7n6eqTzJo1$0I5>y|Eo2*()jU@GOYFD_ParY*(dK!2w~rk-@*uCE!OWr zr@bf|RERg?=^6ib687@n#U&u^+h75J8GsA{U_QP{1SN7Lu9c%qTZUU3d3P1i`@X?r+S5+5M?eB4p0LH$Aw?WONFezjOFjv6f_W%Gi>@2@xbnxTF!h3RXw_*8WAo zEgPHmnfAabLWg{k8A7VovI^2t8Dqx7<-aPZU`Jaz?wQKRGlV09Seb6Yx*0u>K7e+*=C=BxWDI=q~JLopFqc3F`UcIvyeYseiDI{YO zdlE#;{bmFSz~G^QuOWy%Ns;d-Dx2iyh0tD;ILnMmh>KJ87kvOSUl#^^GiO8#I&elw zdL`3Czf?ZHMKI-PuLK+=z!$sIm@o|feovL>IL`ny2qdPc{kqs7^HDiVtjGkIjD=x+0qh zSSqX_P_}OlKRt{YS>Y0)2!7tn)+B;*Bf5$O**^jND@>YY$xtp&Hxlmhb}P7N6`=(v zcQfmD3mD7q+xRngL2#;9Eb18aePi{lf8>VJ9$V2l85c_{DaX3Ra{Q0Ks@~S^$#w4M zhi`8cpOo;E{Eg~8(772%T0nN7oSBaH&;5fMj1T~+Lt`5DYpNS!xa`Oc*BL6NmF)D1 z03ATisjW$|PI=*fpAslq^3;{A2Buvn;>HJ{CP9M$ga8r%96$_r40gcWpP#Ry)8DH> zFzi`x658-!BSU-tzI_}NiYfRT-60SA_gPH>tlTtd>HfL$M>DB+FCF_>@HzJ%PYfta zezXZp{W7nPK;0W9AF-?+DKv7~K9}Pn>{~@qlF2AzOR~fHzk1ofRM}nh!Rnosym;4! zr*7R_Yf<=Ax&FUYSw#NebHB3;gTZe?S4+~;TGRD9CKKK0d0-pZlW2xwC}VIhG#Xj= zQ2aYSD_3>tUmuer(}8^DV`HsTTj!@uGf-1UT@Rg>$#g_l05pJ36*vV6%6FSIl=I&h z;JuF&;Jt=6S#2h43b=FATE;!E$gEoh6s)IOjX+&?d2~3()^FZ5xYeBOX zKw$KNzT()VAaZlH-#2P0=vJL^8G75Og%R6!X3dCOH(M^6WP-hvbE^j~yI@NJ@hHFa zIU*#u3#>xs#*Ti0%9OTV5_)=PDqyf7+)yP;>&!(NPbT89`ph?jH#u?pKUOo@NQ#Gl z%SUqsZaf{p?wd4o6J>qV!DwP{19QN0_5r%E2R6U7<-^T?mg1Lse@i|~u{uJ68b#tf z027ORlQ)?(H_JR9I@w+du4`R_Q8#{#?4Q{-)Bh?F!1^T!x?gQEJ!&@_OKxXQ3^QfE zj7TYqLOIA6EBbQ2ct=q+Qz(%Gz-xJT0sn??IAyYdC_hR77M93W4k_sX!QkoI*&2^@B-Rfm%aHNGcMRh zmP^OCo|zmDNor7O^F?WT0^?L{9Ws)uvn7Dv>tev&&n5`T=t3L4UR&C<=Cv6Uz1NEK zjQK|+V*W33-r0^0#sdj+Jn>+b#46%RWi9pRy#Zg_W#7!7v3KBK>O&b-l8xfFVw-;$ znULV^eZ@;id(2Qs#x{T4RG2#td<5DeoFT2H`;1Z<{4E>fTezJa!2achsz!ee4;%Dj zJcOHF#=K_SR92Fm3g4D-Yz7?HN8A_al4bxhNx=2hno`vPb|kcrnMJ-4pdV+m2q^lk zf9Xd0X?piR7&%FZ0FvYYD9{H&`eU!y%XxngG&$Qc9mI&lWtGiea39>@dRmOGF^$D2 zzB8_CCsQ{3%-wP6!b1vg`o;JS4eQ;cie6G7!SB1emb68P+ylkInuP_U*87|{J&!9H zCdd;mk-zX~P9MI*9Y)Ur`S-oFD2s)(z=6O<6=j=AUz?0Hp7?L6{p;ZK@F-*{7It>dZTkB60jf7B5Cj~bHx zztqrOd0ko6Mpn-};{iqSZS*uDQV3(EO+@=Hg;dq$P2$@Hdcn$rst^45E9w7TDXM*m zbC72+m^FAD-gUdA+Ftv%7Wjf1NnD&sjU^=H`G|*&vnU%wn80C*!#$Y)b_LmkC&_Ta z(vgWN@Dg9eK(;W7%c&4~p{d-mn@|7W7T`zB`JRWfET)$aUPo7CyaPN>O8*z2+t9N}vn}7+Nn>M=FS!mxW$?seXpB+Wt;A@Q#=-?)aML#6Q>=)6P zL$D;fnbqkilL!wNJbfe9nI!$r} znq%ee{@wS8d7Xb#MP2J(5@l(xI)UPFHP8gg`y@{T-VaW|09^ybT(-{L>q>ajgI(b* zodJlTe8xqIB<>>8;B!%y?!LLvmnFD$k=i}PCI4bnT0-}6y^JwT`jTND=PmW-iTW>1 zvOjJC6^bS0CfdT#Ah`W=)f}SKT%rUo$Ga}&mRtWkxkbvI_@vvkd{h-ES0y#}|J?ZS zF{t0^XoGA}MOO^TXwQeQ(MzvGuSTV`WWRo)-WP#ls{?^Ur`Wn+^-|l1* zyq6msiNdbBX-DxHuEzIWaczsU+xmEhx6u?$RAbP-1>pUCzda} ztplH6%azW4mf5%Jqs~b=ZIlrdtOQAH+7g*_w&U8^MSB+4~OGYBwYh>5E+YP$nojQqWZsDBJRG#wcVs>&LUsA$9CzBH{4}-Vc;JfCsp?r zNRs%F08E>yZk>UQtYMv_XN}QYnBFb+c%Hmz;`6w#!Zs~`^XJ{cOJ$%y75hU|$tZT> zV(xG^lpW>3_V``Ts91cM3OuI9P`EczK)Gsx5d{Atf?fz7@Q|#a?DuIll<*OOkdHuy ziQ9PkB{7dX`w7+l4hO3#No3-M!=7ld5prqD8T#wK%`fnWAN+s|B3`|<>zrRC)F^Q! zY0>J%Vw$t*6!eDHU#|0_glmFx$*8O6<*0827dV}$fts(1gvZP3_9cfZEO)gAw7Q0( zBGeUM3~Rl0izA!vKeIL7{#=N{8AavXR8d5vAB{cPtoc&%c=fu)J7Pf$a0J)_&Vc`H zZT+eC<_s=$F}IKVcx^XQ?1i{1SvYnAD;{aQXP&W&^tR4$f30q_7jyF=z0-#)5`pig za5Z%pGh(2HXl+fLA!1hIurmMz0TL={0Il*({7C>Tr~c-lf#1NQF&b_3fR3J;DAkHF z_?2x*HL`XSq+$lCB^$S7DE8^YRpvNK`R%>%&Z32K6=J_V4Bhsu?u(K;WlCR8S{};BR;4&rzIRFH3O9+@HVg1dDt~h}}yJBJ_DY}k}$dX~! zZq2)2#SaA@4R~ez;x|Iuw^sQ&;b~+t==2+n2nn8{c(p5sUftEA%`k%&o1y)qfSu@n znhcC%dc-?wd&WD;JFg|ykbD4mSu^|Edh7eM;h03dLE_ljxrq$u9*WIv;%+#s)QXN+ z2XA~Xs=8u~vkvHy*ng#S+Pu{LXR)^AkE`nhrwbW;AwV z9UAk$=8C!)Ww%9*n=o?2L2W=!@C>Au++=u$%&*xr-}&W?CS%8MI6s+ub{QAx^BAJ* zOeZ_I`t~Qh_!Yw`={ttGTV=$|qH@V~O|$u!`np8&bjj7)ezKA1;$h}%#7Md7*$sn7 zN*@V&w@U;6dPC{~Z=|`k4NK6=fOkM7k@NB-YNQZcw@Lc&t`E9YFbsZqh~MghJoaCw zjuh?$yLn5gw>iU9w?t@X9jH2lJhLR4y>eiS@lK zn+_BT^w+%zp#8{x=OUlSLd@letf#i^fg4R>ReQ%~hF(RG_dJR5yyc!-4>;4VliQL7 z6}W#pvv665DtD_as*T!(NBV79V)WlVtL{9Zqc$uReU2@nn~}HsbLHU(Pw9`pIa7fT zmKj7gbuFc=Y2&5gQq>!sni`{F?HsppHLOtK z>`~!pyww-*RA3c7Yv_m3fuB9xnM{Y&EC0P1&&1AETM}nG;0`HhU*~T%YP_s-w7IXs z^RMtRQ*QjdU{%QuOG9jdQXE>;yH%Za@zV$JyuxTIfe!9r#y2{qByFQAJ}K&tpf!gs z*LQUrx$TNoLV1~Gs#;$=6JJngMr-#%Ohl(6Mc2E9{OXiM)@VaGUW^jccbGsyYQJ=y zaWTF(?CGa}E))Hrjw`ShRHBua_o#e39G~o$*&(I=3q{C5!0fpdScFGbAmLf z*+U}=O27Cdyx674LX%Q0xjC{jXsI?C3Y#Uy zjV|j+W!(G4dN(ELnp{tNIbj~*L=2Y6ew0$X~Bcw7cLc47(`l#PN|`0E5w-s>tM8s z@P9p(I#X5qDd?63V&As?Z~m5CScFkBXZ6FE4^+nqbWS4`yWeSRa&u)-jhs@6pHH2w ze5bQ>(|$GmvLI1V&D`RZBee#EXV0^sM?h<(TrW-(CEn|m)XZ!ESDwwzuKArWK27ipmt3fh73f+>@c5D|TzsO8532I^jq zIHfa-h7F!~hQMCUc?UG+E}h@Ic?UTR4mTp{b+&)0uBhmLL(Lk8l2D2v>P@s)7nNds z`g0>Z3zeuP^?u7uc{y-UoBQkM$y!KlY0`VaL1i&fOJ3TLO$YI+mzp113NoK0WuL*K zp(?SsP8Ct+G^69^i&e+ZZCy4EUdlZDaqrt}+7NTk$#ljJ6EZ^ZV;2)7J%!dKW!jmo zsn<~pQJ4BiG+dp8HFhN%zEfqqC-w0lE$9m@))(dO9CNR;DmIK8 zv0_b(ofsgiX*9Hz3(mbh!DYDWFQ)OrKMGE~7wE;e#?9J44=6Z$7s;a^DX_i{oeBBZ4cPq4e?H{E!5y6c>=6Y;NZqGGldWwbYN>F+o6zkt*v` zyt9x=tHM~vf6?m;AK}n6II2%`$tuYzd|BNavCz7xtxhtSEiuTAc5S-% zrFC+@k{JrPG5E9WsR%%HE7EslXrGB*0!uHviXQYtpL0=Hf$n2);o zMCqGC?nTN8r%E1E6N66jbp~_C8Qi)ZB=BoF^8IJ?GrEu$r78ILjPm{WqUqe0H2ajV zQ*M$TsNd$K%S<7AEXIssSg+72{pn8rbw4Z(-lvX#PIdTMm^+VuL(?~{Nsf(ix`2`I z5U#6Ngv;qoDpi#zzM79_@^t}PGu=9p4AFK+3_tqD;nCQ{UTQI!H6n>jwoxvJry)Ol zXJBW+7UAmf_gn~+gknGk*NZr^xUi`&vU{Sw&)Hey$?`_~r%L442LJS-16ds{`SIIf z=6qE(`y{{XJPgWn-1IzhfBgdiNlvR1_e1z)i7LZ5p;WO@)p(T+LYFuB^uLAh%<}P3 zw!6F*Ngw86gBdKjcwnhjF#^Is=k&nQM>LwUcZlrtQaKz&9no#}x2?7L&l|_%U5-XI zKfIdS)XR!Aev3OX4kO&ZbFV=jn%Wr z7Is(iS7Ae!`~-P^MLeHPsN-s=L{LRPe(ZA|)^*d|*N1Awa`j~z$gpazX9wu}=-{a? zw?(QJg7pg3<1h3!5+1EKVUa{%oErBoX*e`X3Zk7a55BdZaH@96lG4Z58f^Dmie$U2 z|KeX}-r#Kz}jKs8`&I!?^^r6fOo~|V!_M8^Z_Q>WFWcndfiSA=h zKg4W|ovIi0BzX+)v1Tw<{h>HBf!^vr)>U8b3Gw5kJd-OuGh3ADgnUnf4r_wN87m9* zW;Jnt?sQL-jWTnV=lbmrwtfC>ca&SS`2P^~j`4AK-TQW9+fC9W6I+eV#`pj+&ErQ+-dB>IL!&P4i&b(>`)I>#Fz#Ddk-J?Cs#4<6%4oT{?f zSh?`!EKF?=jQP-3Jm3X2dJcw6|7s`@B`N37)0-!9!0EQU-gwhS1a@{@d(eP(Bv9L6zAD9&~flGPiDn!o) zyMsnXzrIHje1@PM*k-^a)Y-*D-V}_*v*#0GJ-T(q*9@q@Mc{5*QO*N@eU4Chv2_xCqnI=Jf zBZG+dtH5NzPF7U#=BfRYpKH87<`N9AH z;aCC~UY4O9;y!^4;q+YKH-;vY*rDR!Y6_{t*Qb$a zdr7&uxG;hmjPP-CR`_w`(!syOkyRi}ZSvbB{bKQ`)`ZMZRwv3?`(Y3sY&$<#mxC`) zU`aNaVr@&0c@*Z%4?l}+2^U=ZxjZ$ZO+kP|CBr#Q|WqA^ppNc1fN5b3N_eEWLxqYtu-#-?hTX_=ej5z z0dI>4xkHw|#WWj?5W747lUpe2LIU^>lOiL&kv%MMEku<@GMI009dJ9ChyE+{kEx&M z7QT1w#tjvYyQcQD0KHs72MdG+8+3;ao(wb)+mC-k?bmo_6r~46l(r$0zr4V%F3M9% zdjwp5DxPOPi+62$Y6j5@T_0$7n$9g`nu`wFtC^(U=57M%)i`Z_5?vmwLO=wo5TqoG z`O%-g`O5K$24A^7ngX_ilx=YU948|3&}It4B?HzWx?#_Qd2@bc+04g0%AYXx#fnwz zMXAGU1^JTvQ^@vFB`(PxYyGFty2mY0=ebgT+0x~F+5vxgq$gIK08+qgVO4PZC3Ixp zij;sVof_@z`^mz+0BO~Q(N4a#)AZLx*~JIHJ+;sWxbq_|8V*Xz6>sId6iG8_vb;$f z{`Ah`g3QM15n7HMpbdW z+~watI-8P6y|1+r`XHdT?0`+r((R#wKhiA<5qV|&S1STSUjDU$G)&|e{^DFClz4}|+TmhU_r#qmaKHPc>Z zO!DM8*U~{{R@1BmtR_33S{ar2z}-ZiIVmZxpYKF;c24S@?(BV8+UnlLS&z$Vqq z%iX+VZF(rF+3?-#Xnm$^i*Se<6pyl84wy0|ORy1MU_ys?U!C(rS5VhQ{p5^G7bmOj z&66a6L746F04f9bb&HxIEN-hmG3v*$AVGeE;Q3FKCP7&S&VD49=Tc;Rz0s)$)Z%~qd-buc?8SJKQkRe+~_`FHYg-n0N_9Hky zF(jblPwv9`Z23L@jd>*h6^z#M##>ab8ogK(4dlTD(L>X0*j+~HZbWjA|3$ulX(aO+ zQ`1O>oV_FC!WH17?6n`C0>3B8ATj*IgvCiS;gPpl`!?B0cdPEY%FpppR)?dz;(lQw zaC;;KecRetWL@TN-LfYI#H#N~^s@Bz`G@>sB3n(rms_yu!Mmpb3B3To0X&!6eY12L zwn&yN&M)nZ7$FSWEWoeE9n5jN^?8Sw+MtBMlILK5>iONELgqdd9SngAXJSZWf>ATf ziCc*Aypw2A#a>i!q|q$cOMA2-UJilr;kt|tGks}CjiJ{<@yaD+mK6}j)lay6rtdDB zli?{i?qqT~VqpZLyQtmm=qN*VyDXeu`fdMPWgP~%3u`c}7TzCNJJW!X$jbTpdn?pT zoQ-YyG7=o=D?5QUmR2bcJ%}%&OhJ1jO;auk#OJy2ZohF{{@CE7cc zHNB=|zv{NmUXRY}2&4FXD|7$wU^t}qN`-o1c4y9O)V2e8)b)AY2O?i7Ar=VPwPXD( zPgjW8Pv1CQa58~?IuO80+kDHyYNsB|{>Xb3eG0xt+S3fc=cesZDFyV6`RW9^ENkCy z@*FT+3VMDgyN;N1%LSZ8sJnCqsaCkbHw*mn`BpnKZ~daLHAnkh=lfPz@hhqi3xhe1 zH-oiqo?HefhLn)m6$$|^5@OEC=g4=@}Or-9Z+^-sx9dDXAu zBUd%#-(j9L2{@@oYqd7B%zsIn)j|+RhiPB^NerU~wd`5{(Fw;?-!=F3W{&bO=(nbk zC#S0#5gV_KSaRpLaM%9wl`Y@lS>-GQIR^_SZ;C^W?ylJ%TsrpA4cR?2hznS(65nKn z=NI~PV5M=-Ucvs;kpwpJw9&rEbJ+uk-OUHiME#k9@`I}P>7gCvQX)l0fwcIDFBoEvM^sStu+;G}c5Yuol&QD68lyvGB;cQEbqv#*d8cXQIBF zJQky7Yr8A9bY%M(!8{Z?#9<*`m8k(d%(t~|ZO^Fht5)dV^yXRM^f?YrpA>6JByxjd z3a=yFr@y_h(%n1d#9#ON4Y}z+-tu$iT%f)TDo+n{c<*^e7ilP)2ISkdVYHrZwRG%P zGO0kg^A-4SPw0VRBK@T0_{eY;u5;z5>z{yA$!OQ$Z8~IRR3?Y~P6U?p$6S(a#Mv$0 zjUeCl8mFzDE6OhMpQtyz@>4rPO>wCK&MAQ2%)dAG#TLmcT%~y-Ay@-t~8OYkFTg%rnA)jqquij8j1<2W@G6HWP_$+x3mp$+O?h$?h-*ZpMn`rp zxPBOkRrk9D`PSNfRCI@QxZx-+d|gts&hY0i6RETJy5YcLlZ}dEs|;z^wv<*b`Jcbb zn{bWhm0Uc>{+0t7-{iGAy=P;n#RCrt*C>A075gA`|+h0f%CrK)P z+G`z_IUOf0$}CCu;I%&FgM1NujyzB7%nSHIYsb6|N=ECiI%hKQc)Y=`T=CLApGIm? zx4Zu?Vt`de`Rg!3uD9-@9SSSCoBR*uw8m}e_NO0cF(G-+5_gfuT#=>#jD=uZ-!=s%{EUoUKFl8a2&%;oTLWdZT+(fk(tk z%Ip$z;a{r$rFJC;4aJ!b*SI+_c#JssY`LPlDkdm2&}Gt58%q+8gtw;EV1~O5O-Lp%$V;{lM{DP%ha*kIhe-8- zB)^`b_OjNEP2fHE>Wof750|L%$Kbh9GQ;fbkM8E#Qwt0~yy9~&`o#;_LIQ4W;05%e zH6anu0@=!CuW;$Fb#uj5>&nKvFs}j3pTtr|%{Ib5QZME45im~(w9&o71o{owf8&Ms zj$=;w4KoSNy&acP7;H)yYzH$WOSg}Jj5w1o21ZU1!bAJTw#lyWNew6v>ty*8f7++Y zI2<&`Wx7#)vxS~LN#b_vVq z)zy&o33Y8Gx*IyNxYk}&dl&>d7-IjD5vIt#pC5K6W=36GS98|aALmmWz9_H%IgPT( z(p=R%g_jjaRbQh8zxRQR`ZE+C<@Ke=ri20!|L;y@NV0$R9!~6lt*~($9YvqIy73N~ zz$)ZMx?RKZ#4PM6UHbUoUU$0iItYCvW<`&`)N%hxj8gp}m{%;4pgP`4$qlt^#6NeW zTyV0AFmzFwafsM0;MptZ94TJuiGexa`KAHiJ-#2)<#%+x$n@YjQ8Gh-_l9_WztE^+ z0n6r*)>f1Y72Bt_UE~reVG2^rOK~N^mWea-m=X5*AC7Hoe`HdzmLH}@_=hCJ^9bc( ziB2Nn2=WL6Dr!h&K{cFLQ@cI4@KHnqv!_h5X zt9ir^Zo-?{c$aZMN&(DugnSiu2cvGzmp;Rh+#3q%aaiXLHz8Dt+({r_=OMKi-*@%YjeDUdbeJ;FL;Pa?A2e>(n=r*ii=YWdL*& zSZBlRsn`#GZ*5fu8$Q({&7k&rE1vX}moF#ze`-=9lc9@bUdsEP?lt&1jT;IUAu?z} ze39Jv-E0G7!{=k}4ZFX7%g(ZZs##-y(qDd{g?wLN`rj++=>J|(m%?WMdqp+=pCI;r zPpCLRpTjlTqAdJT8NHoY7pybuF^iD4{v90&EeTmCZ^X=7w?6JNck#zBw2B%AW2WvN zLC%@IkySyWpLn|}q`9vWY0MO4gdVTG);Udoq@W$I-{I-G!-Ug609BX!X_%I4yYyhO zC$dOfg-vXfSL{lU-G>C>Z&b;f!G&fDQ@N~p9a*`0!L4 zR-wH*XnnWRoF!A)XA8W26aQm0R2ZHGcXy4AK&9g7F&;-l;;)e& zkbq_Tc=+oojCQ0ULkqlc+u?OJjq8$G+jW1e7Ucs%x?~sjc)}4=xsB)v0o!x3cdY?K z8k$_&L*k`#E?Kc^Rf3PG=vR|&3Hv5G1bjzwW~jcB(V|T=22gGkw={l@pgAm07?C(C zXL8`o^!VxE!=z|3hz?J!`OEHZu4S?KEe}1h!xOX$+P?xzf~N?Bg$ouE`1wO_licpa zKRm{_Ad}vXb;Euy`tBS|8e)vj&0QL8w z>{|rcz+|dPv*7^W^nR$m23N&mhB(#tuxe&?)UDw5;mBrCTrnFD^)=haBL%{S!z-$; z>u^82C19IbR!?xG8|Zo5z{~6h*GXfu2kq=LJRvlt@as0yJ`nbIHReysV3g};!=`n(Vql!<26Ym~VzB25m z2on5&FN6_8MN$*Tr;|$BhE}VZ$?5|5M;@v~lwhS`-^fKf4FVJ}2)tzgwDCz%iEP-_ z+QfXTB3Gp70W^!BEs{Nq3SxFC%*M6C?Xw_Jg0DaV^`yB}fc+qm|N0Jcj#s3@?3sA5 zGPecg;(v$FHIv1WTjEngjaAKOG{HO=bxF%>)^m_<2q?R4&=47kY8#BgCLGGnvtsaq0TMnP2UpH(?Nhz%`(MC7`QA zSb*vWBBT~xS~FPM51F|<^K6*#>8v@_TM|4pFLUysFpYajLw?Qly*4|)(lgVXWj4kT zJpB;*_kkAnL%{#S)#3kztA1hR|AniF|36%rj|ODd0@`$}1Dd}Lzz0B;@0I)0a)Gu> z?@}Na<0O^He`5TR@aH9St>ZO4oM!>`JlP}@k9MMY5FD-0CPrXgRkww68+C{gK!o~< zb#s`+n{vOiHTEiROxK+n8+(*8jp#h7E&(-LWlVqCe(rv<-BiVE>X_Us0J*q82~oc~ zHeG(7SQ(9WM2SD#Ki1zI+nk3}b_ZEK<-(;Ph3=%{Ww(!_Tiiw~T)Z0AU-G4Y0oR*W zS1KHj(v`-!#j}0k7`+c8>nRvtUQ-&>DpJa9b)05vqcJMY3GCDA|8!W3`q-T1U%x+5 z#%x)tE@`<2m4ukBkUZG!B)=avPTgyNG2G}oWd>;35R~}19nYN1YOMf&$b;Tsjvv{r zkJ?gpV4{(32W$zDUa*g0h%%IfOon_J5WFEHWt%g;+#p`)6g06N60|Xg*5)JoqsIeo zsY@P$KYx=W7Mj(wLPH7Z^XB9*erk+%H6@=sZ_J7BhW4 zb-Fwp?9Lq^hUULQy9T8M-`(#-Q;TRI=|HV2QgvSml8A<}e=0ChxSBP@$=8x9ov6Xb zO)4KSyZ|l~pVEa{uBeKizPY8CSSnwm9TMWKi$9_Z9zJ97KwlW0#yyu*Wes&UqO{}b z_6@&CY*4+(f9jnchHB^Ne*EKW^i47TRP3E#5(!g)QjY(?Yn1=MxT!sr-ZKCTb`?a} zyCilH{9R;Vjb!HsuTJZ;f>wpq5>9zR=u9D_b&TaC$W<#v}hhTwIM=EFmvhpOj zb+h4iQs5Qy@FED-^XM4oa}pyLbYPL6?&r*T-NWJ6%00mAl*VR^(_dg$@yNMrrJazj z*n3Q7bdin#6()2~-@#^(Ktu>R456Yez=iz7{}fL zB=thod$^ouL~%k+rA=am#>=34Y@lfm(kGcg!NgVz%$KzpTOkS?8Mb7++TLMje(X4u zH{u3f7Y9i_Iwnzx2@QhcZJ1Z#DXM6ds&w164BG5;4@emsJBm)np_2!@%Xn989(`>N zkD~$woX6z>WXdv$f1W@!{7ze9t`~;y zf(3`BeCyiIr>pjh;W=(P6GU0LKV?uSW#*|u@3#>Tr&C^5RpnQY6v$D(bj(3r31zR- z&H?j36JB?*>D#y|%*b+un0}LOLlwK7vBLMnvBAvk`+|d)MXpgEHD0rAjxrXRcb$r# z*yqn0HojHp>n(_((V+$MLAN=&QA1G;ENz%wXx+P7l*Ugqfdf$ zrlG#Jwxm7zVQ*DsM;Zo~yJbyxUzshm`xauW27i|)cgCkmU(%W4rYk|&$u~I*ZQkIS z_%?Uo%n}&$edM_p0`-i=1uino`g4igkVjSzOY-DV+`=C5F=#6>F3&D&0fZA zfjsQa4)!vP``7@c>yaw)o_sr3N<=}$>@7rh&?Xa;{n;@#vj3uy8mC=Mrh?n6#YK5E3VY1ZO#D8q90~i?E#r(@IW84#ZUrvt@ z2E5nj7L?bIrU(a?Gv}w6u6*=pGY>khiFj~dz3p!Y^2jUC`4Qa$koi&h#+U18)JpjY zJ~!L9E==cx6wS-WoZm<5gc<@m)$9Z4^Q}7khRCL@xs$CYn#wmnpqjN5z6o$!q}_{r zmT{x-{95L8+aRE4vCt6RSPR12JY(mdWqe&G(PS0o;#&5aVpi_U;$F+DuI8%&{z0X||9_95h6 zv}=M&$^yDCoTbsToPNRuT&U>7ca)1>=1?u;bK3#Oe4MbR{AIwUfIVEFR4he|?50dq z)Rqg7!)6?zp2UN`kZTO}ju=LFeDZA$pQhim&XHlW`npDlXyN>DavHGx(%f`-43655 z!iS^5YOMfbpaswUK!fu*va^;zVgPi~m5f)+LAc(7_u2I^Yg#xol%I5EyfRdYZ_CW# z`s`118MaF)i74(bYIju*xA?35zaK$vqZ|()xZ?3!P^^xU*TVClpP1dpn7-!b>+WPj zC@OJS`(vM1Dk}VxOynctxyN{E-whn_Q@SfJ_KTwTQ*4*!SC+BKtbi!p?xeSpPyxF0 zh8V5NYvLFj=I<*}29o)o>2z3d+++{xD;I<(0~e-Hu(sY8hv73`dVuPSRZC7k-XCj0 zb;3u>KOlGJj~bmnO5V8oTVfFwxCOG5uH1m^)Xqr-TCSefCL7RaRnr0f!w7?;P7{2a z&70Z=Cl?olE`&(;qKM0!JopkT1YW9lUT|-JMQ(TVOs9R`Z);ivl4GR&;U4A%W$J~p z?5wREEHry}>YSGYPjk?Zn(K=*omP)PJ#~Ccg$fdiuD;Rri5G`hE?&!emW9+&KeZ5_ zXR@l8K0b4A(Q@wb-~O0(t)VOvD0aj;qD3V{e061N>~MQIDG)@sViI|PEO6gif4}?4 zv;3hmy1Lf$CD7Ybievw)bl_=qZMpQpiofna@Qc`-`rvC(fCb8Hk3_u(QhebaNQ=J4 zaMFO&x~rilK&c5{L^0c|Thr&<(oP`lX5{SQa%{7@0b1{?)pW z_d*0i1?t+vluC?%0uqKo2oxl=>t2rp#<4cLL!QRfgTs2NI~Wd6_hK*O+HLzzS>m>< zs_592=Aqmy!+~4|C=}H(5TWy|CMv{XfXIt8a@~+=ru9$pmCT0lhE5?E*wt=}Vr4G9 z`ilJ2d$Hx&jkd;qmeVzIqR2N>5#f6@59K8xr5#!l|HOPAlAa!TxzWs90^og1lOU@h zV6PqibGs_rC7H9$brQk{au$0$za`)mKKGS`S_Pz{v()2qw271ra+VVMkTqLY4r#Xj zVI?G()#8r`b6PfL5{`WkQK-L+QYLQbnO|qGgj@wg2`Cqm>xR09VUoZUe1a1r$bB_6 zK&9qeD9WF(P3ky?LK zaFo#Q)JxoYo;rC0f?v6EROMk`=@T;USF##siB11qNWyt-HFe?W%LnpR+l@+pb877G z$i$k>o00h7YcaWunOf%{WiO#d#o!T<`EoHNsY$@)y0S)4+zH6T>?Q9=Ys3rV$Kt-agX|4|1)2!Br2r8WMvE9 z1*U+I@xa{~C=DfQBR#O_Z4Rb~aFiE*y9w*74c0WVVORT$M+)Y;UvjwFru51+ZuzMH5o1<;_gcg5 z&@3AnKrI7uny_|%^pCXgZ?N4s>ZQCuj^0n^7>RKOE&DJeD$=ne6xf=WDe_)@HBrz{ zzV%^ICZFiyPEJsWD3OZIjhoExQ(Yr*+Lq&}Dfi=wWj-3rQsvUg_jGzLATr{+3+uyS z@2l=>=_=HbTY&G3g1*?W;h*w?%H42%=rTj04o6Ki%g1LO zdEmVSVsU-1OY*GG<+qFY&h~^`PGSC2D}5;cR91$_CxS64d@|BasCc(5DE7N~AwOSO zU?zx1O2JwYFHhQWum1INWCJdJ1*_|R^#w$4NqkmHf$X3;UF2H-B#4(Tb8|`b5Z~72 zc38!eXrhsu$E~dVxF>tYdNajM+WCUg*cSr_(rtZrDg0Y}G458{y?K#Y@%1?rF@@*V zYat2l(*^EUB~!at4-ju1gZbJs_f6Y4UmNzx0(lW3+{_Mtajg;Xcsi+jDY(K&dJ)L- zCG3@4sATEy8S;LQ-=yVbj}O&QsBOYjgnhpNIh1q&I~r#Jysq@;zK7?nS*B~an>C4V zpo^x>*vnjA<-BTCQo369Z;K!8tNAFhaGdCn&5`7yUlGpJT&>q_L3Ku^_WS=Ua=y%qW^Oj!iDAn-EvCq_7 znx}wgwJYROcVi|R>7RTHl8@zr75Dd<+9r$VLJ9VoMckvcv%A*Ui_3WApJ1PnD4TV> zaI!giBb)Ymd+DJO(%t>os+Fh;sHO;{#L$xRs5EJq2~7Szrz-v@yC6=E@*%kj(8G_I zLqp;U_xVExlDV^sX=27^PbajMn&4?et>{V&o~{v1>Ar*a@bl`#q;7A#^zSaG&gU|>RibdV$D~sFzN@&okP&KB zYxkt>nV#J_4mST`kzJy~0jjd&qG4i9m^PgzAj@O`daM*-X;gz80n(!-M|uP4MG-VNv8e>cPJ#BxUl|60{r zravFW$ibz0ehY}~u#J7EJ@?;?y5?+pubcN|W3co=meOq%tIuxQ5S*CoT)dXKu@t=_ zD{Tq%`7$vxHi684){){dFvot*X5M>dD}GVq;u2K60f?cJ4>=xMbCzv{*M5{ zpQAaA;b)F3124IJZf$e}*Feizpb)Rk&f1{5EZ>9l`w}Dsrl%co5iNZyb?CNj|5gJP zapd#{B|C+zs2LknWs+zI zNi&RR9NfUaXNex6r5EfOGjr}&YuqQI7y6oor-GF~PKxTlKH`5llx8oE)@jHQ=6JzJCM|%^W%7di;>a^>r}Jp3-RUGyr?W}e`Xr^oeFd{fp(Xz)Vs|>x zzR8$Yw$ySOv@Y%WJT}5Q(Z`cP!7xQsO>}33(OL(UJ$?S=lg083^G?X|d!LO+!<8o0 zi?3uN$w_9Co&J(#7kUh*svpnje%`93P;&k8MdYI3Kso-cOa7J4515OnI+#-t32uC* z8hguyIqtPNqO%6VtuQZjqH{C`O@#L#6N`=1iS-965ZmMx0+z)TAByv7J|&7vzd}#a zbQ)$WkSz0A2Rv@GO-0Y%bzGewz5~)6rSJa9#WrjpLXLhk;b&KX1jOa1Q*p)j)xw~^ z;OZOIO_Scwz9B-UP|G0G{FMUk8C!>`gu{x*>EKJKPrUiTqQ0T7zZrCCzUc_L9x2#~ z9paLzgT&f4NWzSnyN!)p3YT@{sPR{2=)2Hy7 zmgO~D&S-B4D+|{;nail2uX^H+SDq~wHMXw4!l$m13t>_LsIU5h@{Z<isql6XPJGm7D@+~M2Raa0<#;#cCnu5X zplzMD!;RY&JWVcWgRIFPhDWzY)@x(I%jb99S22z=s<<7I9&P3Nak9ZbJKkMNo#Z2h z!)7~9@2i>XI}s%gU0U%*_aJ4;^z;qlCE!*&gXfkPA_et+>RV+431yZ$gT6s27W+Uc z2qV;Yu5oMCk8mhAJO!75RN}2pSv7H;O4Kiog4JI;QdfnwwA`j zYs#2yNi$4PcyOJe=AvWl#c8*rv+jGVs@iIqz(c22JG(Aevu55;b#mm$Rb{&8YdXuU z^7InnMWQ*nqipe`Zf&04m-&Frb1!towbv=FbZuQDyen|>L88Lh+c#N8=cfbPoH&Rv%A%3LKjhXD3-nLKLKg8fu+-c4rfPok=` zhQjil(+x=QYkfcs!>WcmSWI~vh@}7!RK^C1H+$Z zD}wwt1_z?)8wLh?v^o!-zU99(ojzX;%Xc}+Zyp5G!5X*$>ydtUG&hqifZo2m*88Xz z=d9|j1?VE36$q57w<0CKq-X%$<46^&4~UyX-UD_U;Z$&ZBMEVGFkgr9n%>5q2F%?= zs=QKlG-s-;8(NC&zNj6QbNZ;ID)}d!Ozy$7I~J9%kK?$k7v^t$ocZ&ki`UmQZLRYG zY7#T!s}MW@B{E*6nnBl1pp`FlcIb4injmh*Cy28#RM6bXjBa4n?=I<<=j&MJ>Jnf) z`9b{{fF!7=p|dQ<+n?lV1P`o$dn!@gQpV@-^VVGZzSjyTVuxzSJ1}@RQ0ZB&@7xf) zY^Z18_Wa}7gq##3?f^;3%(!vUJ0W=OE3Ru5`|sbn#7J9DuLU=vAg=iW7BB#SAXXv+ zy~PE*T9sJiq&BD2+BBAAZ4%PKW8N+-`!Hxa|8ShavF>zco}HQ%se^x zd1^9`Y{U^Du8b>P5yk4L^Zx5Fy^rRPIks~-UztIa6lrNI(TG*4N3J|a3b`7df7~a4 zQoDH9_@{;2rZR^Hy*aS?#gf2lEJ&-a0yjCFX%Eh#7PQ1x!$SZzC36laV<8wzOsBYB z-GtZ*zZ7=|@Dl`E>RbvgO`jG6>J1;xwH`gqE7o4oaO&7kWgy0*(&&?ucV1?8yLz8TA>`o zgv;zy318mya46qVZq6EV9FYjpA77>BOrM78AoZHqe>e0_s(JC9Xbw=wnxb9(mdTRT zACaF1?VLVtqDw(pC^FC{}21-!)h$Qv6{V6yY0|LPDSU6{A2`N!yZfEgW!2d8lXL=coX zaRI7w=ucBf7vEcRe5YNmHBfAf|K~^;*79sKEH^5yM^xk#DdBg+z!_Va(iyq)b9an` zX(JMvUW~0_8X_!Fj=m@0upM`aIzP)H7#F|+g9Wpf;e=@EBnaZ|Wa~CzP5n$zW(L6h zCy2-0I71uAKjp~jedR>gx~H1-pd~fVsd)wTe|qRPupZjqjf*haES?Yyfze{emL}_o;4g9w3Rq5JH{ zENwnnvbD>jpO@asQe1NIo{SH4=@MOE52gQgveDgKhV)IW#9?cPg!}akR)bA0Y1j3O zC$Go-_&goVo1u8$d9#sH2t=XFAjiYY4K8r>tT-PHKe z474wQ9Jj8X@;;782=aKgFz)>^-eGe_e?I>YZt(@fEtpF98{w;Jn3ho1gWf`xj-j(p zO|W2QSfpt4SDbf3&@XEt_WJk3fq92%70xMM@#zSH!xrUFAKt3PwTjz=54}woC#!u& zj8dW$#(~mc5ihdkV~``Jsil3W>Hm5LPSiR3~ z;@cBWUm$?xQKi<7rSd0v6*(%MK0W{xO)Vt(dYO~j<OH_2nb^n zCn)hZ4zvTVnsdR8dv?`2Kl(l(3964Z|E7M@2I19cP~BLNOUF?HpHo>nEwg+Um&L?$ zm;v!29@gMfM$IQMhh_Xw=j^Gmv|A|AaPUceN_-{S#P2YTE;HmlOKDJfk!Wx$N(q`I zfEm6Ic?xbVZX@MY?voWFN@H>X^S&(bec&t^zK8XLSsHBn0>)s$_MY4bHOgVpqGqNb z(ij#rngstKCQ|bnzaFU^%1Yt5ljGTa3Us&N4dgKL~Y3 zDyuX()wt6#xspHZh|kBe2c?`c_W~s8dkIrAy8DG$A^2B+EahNLsD94B|KCXjA9b9X zCykfmwst=|Teq*mvzlTk`1R3gJSn_0eE9uc3alCYVp83*coV?dh9Ck#A2~Yoz)yW` z`VXah>>tsG8hk+a2@@1Ibeh2nGAr4#Bq&pMh@5gMfPdr(84b?;N+72NvU^|^=2%7i z0uDiAVZ6+r*xRt$rIV87G@aEVcsC+fE07qH;rYN8t8DtKq4ni_XXv)v$ynB@!LDG0 z?p#kH@h7jEcf*;F6rO?u9Krf{}ep0 z0EzOX{D}%mw|G{$h*8RTyiSm?o~OQVH#e+%ker1moM7_31?07&ZB|)6?>OZG5W3|T zGsnCDn~#wML+mv7agsJdVE9eI7@;RDt?f+d9G7UMt}ZxOI`&<58-*(+eSvmZ%(`8W z>cGFl%!~>X6(@tDk%J$nj!(;htIReR+58bK_Z2zJWu8jJ$!AzmctIh3GgD4dHRo>C zlE5RieY1)c9V8BxNNH_mllUy@6V-&WjokWL4Cv%fNp)QGf}&pFSYroLBnoCEkyN#eVfpMS6H}NQz9O0&|3vPN$X`~ z{5y(xBCyNLNdlNuPFM+NxASU)dLjFuuQU!8kvzHory-FQ-visk;@`~u^5!BK2bDB` zg$#wmNP!=73Ndd~@~MUKNF1FgXZQ{+)G_uofAKhiro@)1>@LsZPw93V0oY!P z6^=Xy%lt?L|AR8>B0e?hD-%ZaX+A3t!sqMjHIe5XB>F>>r^K~N7b~+_yJ%u=vT6ik z^@+6D5@&FPtVmx0rYrT)3rD3KW()-4=|2x}xDndrmA` zXhxpv*l@BNCgFKRj+hV8nE(om1bFEnijz2Gn4?5LIm3kwAw1E9Xd9~r0#M6sJ8eu{ z=KvwvxOpb(ltv5$Fa8<%8m? zd)vBZoOT7zn#7FuL_L^r6E{!DuT6sAhz>BM0ZMZXu35W3hVNQJG+} zhNq2xE>hgbkvdZpNUeaJDybSV6AQGFmm;|T$*mw+ks>wXrHnuNnl`Czo*lo~{6ABM z8b6;}e~;`@xs1F!Go+hV*gtv+9ltIrDq4oeBul`WhjwJS6nM}WoG%rkDneocIph?o ze={;>OPT;3m(kOXqfj*h%EJvL5B%UL#pWsDe^9X4ea(O%A1s%jC$IM}oyfPcFQ+$a zv@w4EBbYoi{DKZpM?g-|&j1v#WLvDn3O{C4-e_C0$$4Pb(56<7{IXP-4wiGQe_JaK zS^=|jrwm08)G`ds0OpWh_$KCsb$wWYlM&g-RCEVk*K+W> z(moH*RP2qjV?7LsiUPCY-S0uq8G!9zy8={%zlDz>PE%W_U1)R6m7Dg1Ouvz&Xd&>_u)l9vyVdRclfv zdns1z5qu0W1}@b??dKnXFUua=Lu}Ol*?moqJ4Xd#5rjwa8%k1*3PK-*7TkeHfofV` z7U}UX4OfV1@P5AfUhmiMR;r|tQOryNAsHdBB@nQa6sEL~5mE20nFZ`#AF)4NQv3V; zv*`)M=HEbpg;(B0A^+MwODxS(t!UVt?pC%9C%905`X|KDi^FCg3cu32*L8)5 zNn2Tm%W-xKs^o@?g!)?Ow^^K|fWmZl*)_inl3Oc?ui9zRLvse$ z9JpH&eeK7Y(R*k?0@1}(keCqZ*hPv82S+7b`0Vv~CvgKn%ZmH2$YY;4AN4X7{xs`K z0N8=8YOXOB3E)>6mh$EhrfUtG*ie58E&wZjNy_Yt+-MD)9kVK>}>P%?@1Zm zZl%In&s1YUMHG`PFg@Dc7zYgiB8-@eKvNpJI-z8jhel;kYDvwETZ6Ss2tOTqlySLv z?bU(%C8@$-Z_-mExobCo1tSf9T&r77!gEePl`Nhm z$0Vqj*O64E@mUn)#K{USy_Lgw&IHGYlx%^*oNa3>lYs9}^DLF-^7q)`&1w{6eHOCj zyWljtM*_l9_B8Ub#IV%_$fbwUI^L|YzEJSQer0$enD)>obf)95(e!-BMv*VR)u5!L z3XPWem(qBKb@u(Fj4Xsq{UnlHz&R2uPl}0AuA_uh*>3rkwg1`J?An$9nmPch$0Gz~ z*gZ%vd?fP^ZI!0BB-P1p!G1PmcpnXU^doLn5Gj#xC5yP3H`qZY#1`Y z8A>HzH>9}YSX9!!Sc*x-saOR(AR~$$!#MOepW))xX;v)eba+2XTB>PBZO0@ma|+4g|JT$#hDR1|Z32#M+jhscZQJVD zs<`8%W81cEvt!$~C+B=K-!;E$*HypvTf5f%tf%}$3z=h83y&oNpU3o<^sO0fz5%*! z`N7QdgO{#Wa&K2?Bh$U6;j@|1;O$e8_w+J_PJVigKr_q{nF zLf9*G35g0nkL0$3@y?>;`6x=^rBT6W6qO$<_6F!G3G@lJtWdG8z~+NxBxTrax-{GX zhdsq)Q9oX^;peD()Sn`lN@6Tj2>|8a%TCcNtNn%I0!-E z?Q}5VZ>o5VCT=o=hlqZx@r0BEt6?SpJ*Z}OJCizd$J`=sixSAmPx z@{2?HlI6cV4#VamJ6feO`CrO%AVV#qUN$*JG&+Rk5=1Y=1JId|L3i+A(*OqeaSfz1 z2CuWJ$B_9ywM$kud|}I?r%m<=tJhaU2e5=WtAI?)PO7__ZGkJ0yTu} zyMBgP+c}WWGW!$gh^~=dg!111CR=xj7jg{PqtRDi)nEyRiQ;UN+bBy|GuI%b-7p24 zx27_6MGk&uxSP{*3ZUG8ha^_`XJE>K#rjR+@z(_{NKSou!|>z4CgiDX9r|@y2AcUZZ*CXNpG6$|VBpj~oNQUi#h7Pw?a_(o*8p zr;*Qfk0RHBr&az5)W~b(Tm#YCf6TJ)SN{r@i|U#;*V5l$J8@|d}FcYB@sp(CO za)i75MNrQqE>>d60~8D$p*0pv^mp}S9@HWDOr8&eA&Lbc`5>(!f4LH^Z|4~_y}YRG zP1N92e0r(OxNkiIs?s$!J3N!@J#K*NyH~Ez-G)kPwyMpxq0XAS9nbh&eG!sIJm1k# z^?lXI6%ltY3R}Jjrz@PWz--^^VUE-xE|(qnc@$>8PeLI^=&uuutlz}7#&k?91Oj%z z9Mo59=#Il}pDL1gCIx>2uDFs4MeEh&H(LR+A5}HaHlAVt_L(K-yCmRm?LN$mPn<0k zwXJl~MfYiX#F(+-{L>zh?F-eR4xYLPws75QkwGSdT(oMPuj3o30Fj&K&}_Ab*6+#u zKS35PPS)lpUf-GyRe;ehd6uP{&`<^^#21a$oXv%2?G>T7F@e8+4l`?IiU}geTsfaC zS+Q4bTQda!n(6B*9afE0b1P;^;W$pkEM4V`QETE1Q=#0YGI>U9JlE-Z#=Eb?9Oyy3 zp(@s|xvZj8^%wtcU?Z7z#ND83fxjX*2*U_v_Gjas7dGp?-m7d;odyrT>$kyr;nZ4z z^gyE&E+O3>(8l-=a&|k3=2ZGuoV!bQoC(|}yxZ~tl$=JrUBsDuF8Zw)svDnqRe+C! zl5fjXWKf&KszdSbB@cF-&$+h|k2}Oa88vUy>@cypVg`|*L9x>cvrhW5XHf=+m#Iw+ zvM;YXLK{4thF=iGJhvNl_iN1`m~q*M-4l5WqHoCyHS(DpMfQL@4U++D@UseVG)p-? zsE<{E|24H0yWQda{xo4@elN==SB>5xKl|r<*vg~+mmVxw4dz+k@;uM;jZAJ2D$Yq! zg@$ER^9B-JJcyl@t-5_4>3vIbQeRNrG?GtwqwIz)rt|(Kq8M9 zyv$mOIfp{SeJPt8XQsWf&43`p_t3nYTrPM3tb)Z-gVnD0b5U5cuwm8sl5nf6);jDL zTU!QbHo1YHSB?lD2u5Rrd;0*SfANXz zIayiN-16Ql?gpuJ#4$eH_Z|ZZ0_cB{bnL4XhgXcssNiHMu?FTKl4Ob>a$&yAIown& zXP)6x!spGjAkJ23AK#b0=dtUY_18Tl51+Lxf9SL_%gPZ(Oprj`ATBCo{-Bn{gUH zRiIUqaTAAdA|M6|j)uV195<$_QJjbygH3@|A6&ZXWu0U#%>g?_MuFn#Gu7Z;;$8tQTV5E3Z(~_-$F%X}xxZM^+|KDtU(1l(%VJ+LUOoys36J z$;{Pggej5Z0@rInrv1@gr{=y2`1jJ=fH4as^pt~RJRpf=$BYslLxoGHZItmmYIKd- zduMV|e@}DUXd1KI^t0z^g$x>lrOOgPLM95+MaHNQWstPao_Z?eiK0d&_i$;71allR2 z*Joew)d;R_WneTCazD`qX&D?aI=ttoWdvG^VS6!0!R{Jb0@tpqH*ctR)i*upwb5SO z`N1$SWlhE9JVUk4)4R-a9cwZI1hnQP-eV%G07YO(Wx;f~ri4o4w+jgE3RIt;In$r1 zgFi8uVy&>T5{F}Kc{Dhx$)T$aUjD~xcoq$qZ=4*&B*l^;Wj?z3SE#id5m56i-=Pr$ zFt8x007PyE^Jj7CaRM_5(Y_IUa-K}>Xvr_pBiK7>&g z_IF-8kva-s7AGJhq~8!%<6Zbjh@d6nfS?#4#i_e#XywafzFwJdBJzlzqBJ}QS%N^y zpzNU%5=Q{g_emGmgW11?(`Sr7BS`t`;TGrx^5?@a@splP*?mVM{5&~Z0f=f8p8{gg zTCavgkPI0H`qIn1Gd;yJf%^QqE_;KT_M}E#wRV*z zzqp^)`d{n8{vuVG@Nk$v`n>lZATFx!V`hkQpHi}iKU^#L7#Id2YNO`yue!{8!^=+n z9pbY{c>i4THeI~0*9&yP_vr$CHC#xnna5_<8Y)i*oLA3fjnII-VirYI!54ZO?cS1h zZ3dXcTA-_=`TyD{(8xT}4VmDwn+k}e(Wnt(u@5-WJKQKu++HHVh{CnziVP3kxYa7H z+vg>X$+K~(+xX{GeSi)}O=C^LRVC}hsBO#DmkIooyhvj4lZj+8O}}?|vAQ^N`JKUZ zbX;N#pbZ=8XPj4z>%@z7AgY%2RgCmzd{~`FH3tbc1~!$G*?6%pph_PXOsyI+q8_^6K%5mNX?2 z=_1K2{CZ?w$CU(l2ns4#DEJIV6A>=_1)3pIEN4*4y4HE zy0EC=7AZ;@Ah>{OdN4G7Ob1E|+&ksz)EHrvtv7|c5Y3*>x7B_d0fnL7GYYApahGH8 zbUhwU{=i?&6`{`QmK{0*!w%l0GqlyX>y1l@M8x^67NsowpUXP1N6($Qs&Ovg1)MrF zkZH%4%>RC^u4T2PsB^@cvw46&6+(>YT_WJ-S}c*XzP|!gkj;W8sJE4SdIa0pR(S&B z*dusKM4Q*x+%|((6?d`a7D{ANJY;S7A*+^GI=}tBFk~j zYcvenf)qcZvr)ciEZYKkH&UST>o8=HNYNJ$65mC%4Ia`*IYGCHO3mZh(t5Sx$Y&-# zyW;xeEZhO04x6*9zS(sht3ffa%WA5iIJ{Fe+b!*|Bwt%9%Pi>xe0SABV7cAWlpN3B1YN^ zJtN(%!5ud2DunCfNatbcrzB$g^~+=RJSOrK!a$PAP)7GTN#Wf^*y;4grQmc%0Dg@x z7brjsO6~RrM7CvyV0Ch{6w*AQ%i<~XbYWJ^hrAr_e3>KX+x&B61oN6-=UTwW@BZrt zI)pLbMFriZ>bFPDjH^s9p^z#fxGC##F0nyN8M}K~4I!gV#gDGga-kfkZHLtp0rIqU zm#})!kPS|S*RZ;T9^vzz*wm(M+$G`Z2D$351eHN?ELes7SaZ z`N>Fj;B-JZu=X(t;xVFw7uY77>KzyQs)NmlWm7D7%cJtKUoHRn$*(D8()d_uR^iuD zR_x3_;P^jx(ClY1=(7ud*(|RINi<=mL_0Z8YtPz^gM^FtV@*~|6QY5Vy)gTh_3)|O$7+^se_p(xk$_pQ1u;QRi$U(9* zWT;fLA{k3`5pHhecey|dA0aektjLFb*cbTs#-tn*kGvpdrhqD@+i9kP{Y|GGKo%W% zxllfjp#NHVLeK2lQO+*&QY!NA4zV?VSAGj1e*%xrQw_T{3p=64MM?VwQ}Hoz0nVJE zW?;o%RZ&(7^bQ&V-R`^i?s+}Mumlpx79AwS5*iv)H?Y1YVbWh_SlCpJMZ|96iib2( zu^W~Xx(R4^OKlCBbc^btngB!p8)tcpgJc`d(vP2_v)JEp zCbEFJQ$^0puWRmE*K3Qht!oxPz+FbLFOoYw#HL2XvB@ya#e5h8=Kk-0UWWhr?nj(2 z*{D?CAwCr&@(XT;Qc6G$JOGY=^$I1VZ{6UiG|aNlC!#T;A)M2WT^txHnD##=$neh$ ziY4)qdKSwlObkA2`J6Dw%xO6I#U}kebuIw^Cbn(4NCBOm&v*OhJw1MZ@`|-y%)3r+ z>1XvH#-4sh`;~{PBcYK#1Qz>*3SC-VllTPXgTj>D z(glot=V^mg`RaSPV|6DtVB1+AHhquUJ@gY_G0JrCqr?&6oAa-ZN9jvF@4g5YpZ-y%K$(sH7Q!GVNs;god_e)I0Qnz!DnzaY*=d>rf+`f z&%q{G^K-D>*8Ci7xwE`1WODc89=}hSnk zJ;{%o57ogjSO&RIv(Z$TeO3Rm(S zaS#L>cHAY8<&zAU#yUO(@BoYT!)!v^4tji7M8D!KR-w{8rShR$h^UF=DRw$Hv`-T) z=83>Lf+stvu}Pg;Qfh%}R{Eh;X=mpIQ`grJ`Bn6MTx7%^a~?Aq_z)907&)@%B_^7$ zM2*}#=dVQaDZugUzRW&{9@D!Ojf^<|&{yR&jf#soHon5&FfcjCb^xYhj?U7b@*{V~ z-itvCGc7nL@!|YMt$MZ@Z`pUy$n)Ok>{q}pQK_Px9aL zo7uRXL)5#G{<#U!TClCo`RAlxKs>ueL;`P%`eU`}36~>Y&$g!u7Yv!4I)jugQFpUv zy$Xw&5f#oaP85|g^8hD`>`Ri!2wHai$W`rbo+|D9a}l#4a*3ZBI0Gbd-kt$T>8S!8MkFDiY9#}F}k8rWlIy{4<+M7G9;P%q7!8gKE6 zr#(RLMrJr+USO21L)ZA)RQmIPHgBPbwbmvKzHRHeQD;{)xn+%*0)I?jLa_gm< zH{LD`Ofjayv6AzT4GccR0E9oIkT7NInKC`PF=JVJxZ%6i7*af>qZ#IoIF zBu>e`@+$H zI#O%5TqlUOv@l~~2&}D150BDc!QqW*y`LQpZUPthhX@v6id=Bd{-vrA5z_R-vMl7Y zRiQxqy@1gX1hgC^0VfgLKtfSJuvc43Qtal&+WpK{0DD;5`G}Y>B{n-T9BjN2FqP(&4VAyS94FJJMLECf1T)b9jBNmeDZ3zczl zBQ+W!5!l(zn3BsyPBa|SIh?F6!$^iNk%O@wz(OgX>kYb0VRf-eJoZTy?Hq>`Y|bsU z5!Nr+ZwmskDPj{R`?+y> zP&IHYj65GeKtH-sy+uwfG(G3~*F%p?ziCg6mc23GKlC$k&%CW#BaunYC4Xq3XZZ3g zT*T{zo7*z{yP@F$@cH&iPb7~mKB&at(Bh&7*ml{ zzMj1|Nmfj55Vlo;p> zPaPCN>W&EkwZ;MYm?D=0;QMeMl6#ulC}o-e)|h^N5I~454FcH|o#8P(n4!2H(sSW& zUYi;QBx9Uz3Y;eoGaIC-OvWUY+5ZF`l5SML0{x}JX$AL2^RSPH2&do#0(5Ft+R^J5 z1?ry(qA4dV*uxp(#mihsumFvQ3l?(u0v+fL)DF%K>@hC%z}rg)2+Z^s9}AlO<2DC^ z_DBYnBq3ZndF!7?1SJ>^jD4HMNsqD(kpMbc50Y2~8^;w6V(A40lwo)uYjxwt6J@)H z8Ho&H+!<+!t#iRJX7*EI$_hyxpBU@WZabBwFb0SRU%(xPkD z2_w!M&f|%xshM*Yu<$4vHw)CCv;DFEcpS^P#ixe%Fd$SgO9&i%>*+t+X*#QKd_l|g zLDBE2N8YvKxf#^eE?r$6AL)|xKqY`}lJW1MS$oqUS3`s-OV#Ik+n$T?aWpBLqwrzF za59bJTNYPqsCcXLWLLsR0&b^UyRk2=OaVVd+;7K!mYcyJz)r{4OZMcu9@$A0@CbcE zg+|!pAZX_Q2%iqLLjs(rU@{sP!tcmJMJ7XmcMIg|3j!MgR_hCL29iwu7v6F=K`IUm zwKzM%U-kpcUjP<$*7%eRJPO1V)Md&!2Srst#2PafMjgo06ca7)U(k3wQ?#HC3GbUH zo;g$ya{t00fYI;~C>6ao0+a{`XeDGA#27^P*Jb7`$pjF?@0n?X=#;=&|Ed4 zEOS;7tcR?82V+G~u>+AJ@D(&E1aT-jLl8wxR+<$pCIKj+-{8n6u&TcuF9MkZU1bsj z${#YJv?DLm3;9c-H0xUlz0Wz7fi8-2 z&tcP#h5qL9KlUP}=dx4X!KLpgQc>lWW&v-D%mnnui_|{PRE^#D4&)hc+4JD8r4<#( z|A}u4(`nkMOkR9L-V9%%>EjXlM+l~L&57j&0@hWu=q1H;MreP!L$l{4ecvwJT8%`o z3uB}(04RpXvT&{4)ZZ!3F9`~6{)}C|7V!1BeVvMI`QRT`(fbWHpaURc9 zj;%(}W&Tq7*A6n-GWNa~|BR7dXvmyWlab3}=+DRg3!Njk99U?}4qA8#8>onVDbFV? z&3`AQ)_r_hSXd9>c>x+r^^TZ({r7ZGYVGZ$w*K=b)%ApmwXJOnbiJl0H5&uWrDc}; zKUK{=I)G(opDsjiu~yoCK5{1SFfM|e3A}G%^Nh_~%xTkr5qenOuW9<5$IiEC`MR_P z-Xz4ZSpn;I?Z$q}%yZ-4tc4Z>zr3DgkIQba%`d`X!?p|BxCI-(Z4>W$i5Lp9JJ~$( z9}nnF{m0DA7JU#*Y=s`xt8)52eN`4&TSS3smH=ca#RK$~KQzFd z)tF+H`|v)>Ed-og2%at|&%M_hFm*F#ywg+P6)OlEa{5ksYFh-~*;cJopIPZKX8{^L zcL22!UDrvK`Co{&rQqtB6K$Ey$Zj`NDU2NkLHt1pZF6Er1krzXK^$B8qiuCK5Q z9B)Fy9=x}xU}2ebbZTOMEyrXKPFV_n$pF-q1m1}!1m>=el(0r0Ol^pSn1Px-`8y)>Kt{peexGEMpPI9@Oq9?Q^~2 z-a=bny2=a%n;x+qaB+P`I}JX}QV>rm_{`PidXFq!UyfdZ&-Lb|F@>!&q4o+cx|6JZ zxVEReg91*ET|t8d?F1}!{Wn&9t;QYAPbl0y4tK$HUrb66Wg5 zX`3lmAvW2;GZgtXn zi!^e|mcrPXXh9{Bm8^@DHa@^d?)V_qsXI-9K*#PJH)7kaJ6EZD zPs3xifIc=M5cOYGwbGlmBK3wIt$hpp+sUbO)*@Ai4N}JXR-RVX`GzVx4~oO)B5A_H@rs9%-P z|0k>&w|2+RBu|S}c0z6HiZ7H)Mmkdb4P;_{;Du7g5}DK;d<*k{CuS5FdZF6 z&oEEZ1?Gc?5F`?og1MlWcc9X>YtLPo?8=p;@EK^WtMR&ZMOzx^uk>iW&=YbIp1@`9 zd+_}B_o<$m&f2(f9x}fa`OOEkkAz$UQi8(nG)pGcY>5qn+wswD83169)ujBgjDgjR za@MFRy>=xr`-O+Ybbdig$3PF%DU`j6ZoF=4s9^7%Ph1!MOx1z6oZApv%PnU~5xDwUT` z!|I$Uob?l9W546ndvoiTpL#3D3n@#eh-nGwv@+8Mb#4Ff7^iPGZ<01dFAb(rY}z#x zh-};NyOcsi1H$Y)f2$PfB%eI@k@zvZN<+72Gv5C+TjiqbK&ZpM?UzgX|H1i|txC_wd6z*u9ktDs|D_bNh9;_D85O_r-6 zbg|JMo6(p0M~aWXwK#Eg6_8oX`<>1J-G1-T z6^?4s1&9uQ>~F6N9#ToRfjyo;5fyHX`8}7qc<&36An?yR+Q1g&v@mCdY`#a}79J>$ zuvE-agFWkKoV5^W>NvA{fYAL&p!L{kn?lqDVQ?+TzruP5>k=xfURQq9ZsT%X2lKOF zUS^C4YNapOQf1Rs^CQd>NdFb)YPBC>9svAbV#XZL?m#oWrs)wc!fV8+Gu%*w)|O>G zSeh1&#y`e(g!9dfJS;wPCa*D+RElL8voed%m^QYDK7y-lsl?z?Z(N+VnjR1$En(AJ z$B21-z0E+$5F6e(s=t65TNENd(i_);2PvR8sRCBrYi+4 zauES!bo?vrIH;r-Ts=F89?f098ZaV8u(dYqMt_saArcn;5c9TDCJoYv*bgy_*0X?P z+s}k0QN>N5LqCnzXYN_W=(x@Ti9tdIzXtt}Go#5n*8ec`I|L>BJwB+(Vgxw}T(uQT zlT|^YbF=5*(Hcok9l#M-&l6U!J$TNq6&xg8im2(LNB`XfQ)}SuM+{^U<;E@$*!tGQ z`ruL;;(kx75NU^Hwd!V*wqpA_8JZnjPc8EE&yLqV!U;vXy$c0mI3BYKoxkh++VpOJ zv*2QgKDD>gE&mG|hHTt7E%hH~KHL7UGf(I--3Ok3WB`_`!=z>o&t7xImwgZ`oPSi| z@NJz4Hng&P&S_0&Scai3Ic`HACzT8%!3@@{E#-hT#csn6*{Kp6W#>(4^o|hhA@1s_m!@mkBov zy!*Mp7y#4w^X|;zB~Ac%MwUaWT(y{uaeQ>G?_8*Oxw?MFD(=$m8H?pIn%AY(?xI^; zz@Im4FFP;v<-x$7K6UEeNl9q_Gj@-YmrygiWkPX8(8hgBpL>JXU~vb1CE*Q+qk_lQ zUtKF@_To6!zyTSf#3%OSE5Goc=>^eWTQnE>*Oc8Z5Oe_k95RXB8xyXM>>gyp5w^|6 zztM&aS9lDHhM|g@jYY~Y@f^s*$4Vl^XP7{u%Avb{NilTJ!|doK%&x6pF;yH;Y5c*fvb8pRu7hn&ZAt0NHpGiLHnVa5k7 z43EG*o4Bmo}CC!8T+3S~uK|&eDFT!Zzy_`CU5O-;&buy1g@rWnBpZ<<< zP%uJm$eidJuc=;H5w}|Vxtr>AaI0=l^Wm-=M>bEd1-F85xJ1cP^MY@G_zw&A2*VaBq2>Gf2%`{4bC9J=jSUw@yiOQ$M+Y zH2>m$f7|||+@#s(km_JcEzim4p(eym(bJ-M0oR|uc~%+NPpWkCG=hNLKVd>PBF|Lg z7sVCFC6xql4}eB;$ih9TU}IgR#6dl^Y_I}sJa+T)OjR_Pi0ih80*-Ov%mb^7WjEHl zNF%}3Dh7~om1UF*BHy7#6yhE)kLBCZimm(&-0h#C$~DSuCNDht^DEPsm*uWE+JQ_^ zx#y4gtQPC_3kd1wVtvNcF*GeXN=8@gac=B`a8a4DHYFsC21)ra*aHs=$RTGS+4F{4;#qS5Oyt%@i`y+D)R%?FkLE<*ARaDW3*dfzm#! z$((?q%4v?uM!5XyBV|IuE`F3-ky(HmA2>Xy3W&>O2!0FU&MlD~o9YqQ;*tZ;$Vu)0 zqx7dUOZp#5FOHFB4Ltjybc#RVPZ|HEbdi%EN+;s-2#du<{;rL%le;E8tokZvx2O#R)4@qYu8iDAoo$# zV>%>sXzI^u$Rfg`G1tK?&%;cvaFSoahWSicg*64RS9H(&Xltk_0Y5=TAh^7jKi_Y8 zYgU3|*x+D(nzO>hX>6w-5 z&OXZYMl8M(EH?}gi4&&1D9pVUhf6+FNc{ML5;d%^0Jb(NHR?2ONuhG#=`xJy!&$BB|M8vbK-4vP0dZS*n( z=4!}mGMdI8;1-(0%F*AAW~?p~yd|LaS|Qlf1{9};ugqfk#UEAjIM^NAe=bcMPH0uy z_^9)@&bN&L0(xKOGU%`SQynRU2H&5LJa9{sAVlX=5%vL>`3lYU5)UeDF8;Y#dEl&O zlrdde(%W104=V6x${CY;&7)uBrCG;%!S;$Sf?cd2?-?J22c5S#ac9)#H zkvqedvDSP>ds#o)r-B;e?B;3-=2`XR$a?tv^i*>IAB8Vl5vBYuD=k!el{BSW`=cBk ze*~N;JDMIXsWPj(`;6tx2K;Q~RB$1OUt_1Ni@(&nIlfdZ@xDZc?s-3tSSlPKXWgZ6 zs0{8|w9SKM>SyOD4xcf5{Nn8zYUnD97<6=V*?(3}KN&H)51%KEYwCF$m@dF*k!>te z$PB&#M#WKzEP+W3Bx8tO4wSf@VXlt}oY{%%}fo-q>X zn@P7Ab7q-WtJiWYNje5BleY(>(l1cbc0p5DV$1{J5u}+-)9(NPq}N-sD|#MQR(IU! z`S5Gj@5D>{BlLa<+Ie)5IlSK?*osvsjNCW?&|fq)YF+ce@ET zxp3|bPKxf>oh5odVcV3q7V7_OQVF;YzI9I=yyw1EckCNXyxDna^q0FPZEoVu`X){s z@UDC;(H_#rJ7P|F7XR6%CCu3ty}50Zcjo&LA_$>$@zos#js@q)TJTacPk}0>M~L+Y zlwYFEAXaU%$FvLatJwP!G2PZq?TrFMuuPcaVg!bN)zPIb_F5u(9~YPw)2@Ui5)!8; z@3|DTflJl5e+ZXc1WrdoAsIA$(j|rF*HVN~#Pb+pQTUD1D!y_7#-)#XoKJ3xZZjBC zf3qf$ljay>s#zpab2q?Imq>~R7ISU_bZTel&qdVkI}i+&_kzmls5nSXh_~og(~tSg z-vfqRd%b2pdTtW#-?+BLjn<*cwPI3SSE&Xv!PN{V{1rUnByR4YUrlaT%}7o@Icgp@ z4WcCFJ*Y=x{c6;iE00Xy)7JQO-cUQND5l?U#_0D+8@X>(*h=<{ak{agZDJY$nc*6> zqxpN*QOm~7$=Adc$tQ1$Y>s1p(W_(i3$*fV9~FN?ah=1LI_s0BTeowYJgBDWzE1Eg z^y0Up4GqBz!B$t+mWr9}>K0cl;YZbnx-BUpM&`NBST4rABBo}r+>)xs{e`a<)9TQ= z_tT90ViHuMrI~V4I^EaTeO|6vG?JpL#Kc@06P^r31R|}`KYH#ZUDsMtA`cC~q zVh$QNeF7SUPcM_+tx_j3ElCOcsJek@JqTO+%kZJ!QwLr5PkFv%IuLPccBgYwsPxry zzQk7_2}#wMNe#h{ZrWY9fA*a>i(w(2w-=HEIX4oI#yld zqsRR=MDdQxuj9CZM=t~1xkT592&C21SZ>Ke}CuomVgANW!%lQq*-Ue z5XSx_XYkjS6G!VTShauK;dB9lbt#x0d#I!t?%8&Gi2%ml>NY%4PUWPad&Qps7kb22 zkjKR~0usFJ8~6VL81cfoTZ1;$of%fGe>SIB_U2yFbh}3>LVLtQp`~P?wLiVS4*@fZ z4(TU~q3#W%;uJx{lf<`mCzz8_h2fH&GGP7V-N?K;w9_BUaZK1bSt-qnZOMc8aZ1Fk zRlTCU=Ap>eCf2(kd-r+%+s*&ZYNDIzm%r#v>os3SvHr^%P)}`_{V%uzPSxhQ3g>G! zRuYXYJjt@>nbxP5*!*2KF^b3PUV_r*5AlwU;h!&<#&0baZNzELLjo z1+{Vgj>x@pu;Fvk{gTKH6?ZbZA3_-gGmS7L0MgVkF?cH&teKFE>G&trV1!`t|D_tn z0RIzffbO3~5XM;ve#y&fho0KxERw%4DY6s&fHqYapsEid8NW1*=XB%DWAfbOB(@7Q z?ba}rM*`Ys=u3uhJ9}x9;}dQm3fJZ6r%;sknNc9&QDC+d-g#rcg|VcD?ux@;sKp0O zLq&%+oxH>=MAAOm*_zL|Z18xQUlqKtj8P9do%|2;M^9l5z}E zya*jg#Lc)d#~aDO@=S-@ln}W3GS!`u4g9Rq3o)Mw`Wk_KCSwp)XkD$woTHVc%rnW& z70Ya_q`hd_s8wxJx)f~6Mif~*dEU8qv-dvus-&7s7 zz^Ms9;=jh2)vd5VA;s)?sHgP_j~t`!vEHK4$fWf~gM!5NdvA?;@1+0`_#pdMBjG7) z*#@&zH%>VR$kRxOA#mpQCx$g1Od660UhI;Xj}8CHJ*>Q1|A7d-wW);EL6BH}{(Rec zhSy9JOzT1AQS|+dZ<2#o?)%RA)-sR9%uKyy_a>-;eZ@0HrTx-(;hO+6~AQM?xkoSowlXB4uFk3BvT&_DsHpA=R7}o&py*w zKeX=9{hcCV*>N#%^p%Tnzqctp`JF6%VdMIg#D8O0eP2VhBHSltCUU%DW*201Q*^+% gr=NEV6P*GT?Y&k{l)^g&LIx!yHvBwmPY?8e02729?EnA( delta 149903 zcmZshV{{#DxQ1h^Ntz~&?WD2Q*tVO-wlh&<+jbh;wr$%sdiHnD-?P?U`^TRB&b)YV z-S?XNG{oaHL^#55IJn^_C!-p;eQdyAg86iUH;ni857_qzT@k04l-M(ru)S{A*uy0* z@=)Yob+p#Adx5&%#E}?M-W<^wmwy}xjaJ^LO9pH#d_!MQ+M1BK3UFdHdUcCZjGF->n>l=} zz|7d$phX;aiHIg>kYdbxyjkX91?juWo?4hdP2#iTDOe) z9$z}|jz1B2U5Ut1?!cgUCy^BeT4M+EN#8?DK6nk^%}AUHY^O?w^A|ky57}s0m{tA( z_iIu5Mln{gXOe&_oH`^;IYI$MnI{z%1)2~Iv5aBDNZxzV-QCFGa>ljD(k*^axKUD* z^A3S+!d;mm#_hZLu)K3d7){s)?=6hFdgGY{VPy=@;r^`C*)E7pDnklA`KClSL$U8(bAg7BAale_a+-Yv@nMh_mV(Y?q!OAyvM+_(FdtJJRfqjlaAMslaL`t)saV-cn3d%NR+}J-OML zcn}u3P8Y>q&`S~6n?Rh=kbf0f$`kC?p+=0|TQp|r4JH~FF3i^Jpj;bOQkJ;cmv2a< zMoKw+5n0;{UG&JZPR9l&<2Enm=h1bG3IY$MFp=VV22^OxlV0?k{VtxS70*R`w<$mv%u=Nc?5< zYBcrEn{x{}TWxiNii|venP?I2Ovb}P!W0@}Cqog3;-DSFbPyFVY7fs)>wO^PW|Wjc zUL%UiNIpi4KYi(b$&X$1+2oDKGRL>-4N1VPpj3MzD+#icBSCA_%rv~Ef3`Py|1n6u zUV7vu>hZ$L6R_)ymOwP6f3|KQ1B#PTP@Ld6fZ_!E%6T{9nIHrdCoDTKSZ-r;%A%YR z#+wd_x#;|iz$3xW=Ff-Caxw%K?vJ3ZSmw()pH8*Two>FgZ4#SRE7z|uqK)(3EK8pD z8;EH*We=4?XVQvH8}a(#i^Z&VqepD!eG*Hp3>A{o%W|LI)MYvNvo@dJQo;q^n==6R zMbYEEEjsHg;kP*@AxvZK$Z8s4)j&#C}3AQXZn*wJb3yI`%za)RV#8Dhxq z(mj*7C!&^bjNWAFnvpcpvfvX2YPqABFcSCeulH_6k$-tB(6#fXJs&%)@-vSq3U^qW z^1x!&sSu-oflj^W4TJ7yVWeKR$_GDnoIH%hZ;FZ?sJdRTALAvUe{9;b481w74@i@a zymA}^S7e3$Mrhi1uJwN;ZrZ8;#iChm#db-R(mAcZ(8uFU+K|J7H65tBjk9N^xo>2v zN0C_nT@=+5sFaoJ3$+#n_ia?fB53+MX5#cDt{y0b+acfDG=DUQBX^16J8|!>x}*5U zYMT#0)ybzlP3u!qjYB4+tZWG;t)h7t?@~+v)gp!xWMdl~ckQTCA35d)tElQTSf2b+ z^_@`k^77Ct?Sg1ljx@pr+D@QsC-xePnUi?NGEEzwf8<@*|V2O)$uS&9^EHxtUT z6O6_nEXTK#PIaw(8@jo1=u{?6xm7z4Gfn@M!?@8H0g|L`=)a@1>wsj9RH?eH)&TIN zm?zOzeumK;V~P3`=_q_BInY&~EFk>ZipL*WZz&)tE+YxOfhITmCGn5MombqXJ?0s| zWU-{g+n1SNQZ&bZ>;&se=tMk@_Hv{1)t^qLWq+Cz4E2P!33W~`B^Hj9Zl}b}sAi;~erCTn>Qh z6O&YJOhQm3A#QG}tUUZoh9I^|9Zdk4w%xAGlaP~i%$!v7rHvCMHJc9Pu>|M{CGXY~ z`O$w*c%QHrlHJN)OUEhpG9*F2?-ZBZ$UVFpof zp;iSfim`?GR+a0Tx|2v~cpusdx3miG^P4sy8v9X%PfM`yg`yDd%0Bat4geA+gB{~oXH|3x*p7|!tgp!Gk4uEu*tjlMVNjp+!jUHU- z#!Yk9I>f3NMpx-&Dv<3-Tml1IEsKTifzqh5s{xk4pBX9T#p(N)OzWcoKOOR!6tXl?fk_oYi3W*uy~gaDS)>IrVo_dY zNd*atRtV$f`z4txzxzgB^XPDZ7nTh??Ti%*AZT|zneF*IY1NIh`29mtf2Y670xNI6KA%u z{ro2>s$PzxtdniNO%Z+0dwf%JOuiJr$&vyGaCZG+US6Zz0>}pRh~(^~ia#izhU1@O zd8G-c!lLB8MVV)dH+5g&M~+EeJGtN$-f-OYlqO|c`v>+>OLdq%NTY@+5mME88;kzg zNK|5qB`xyU*J&)Yc%T5x?Ky z87sBRQqvoPMlE0IwSrT;X1fscQf7U*flgA3(MTsnO~DGM(6K|4SAyJPSenTD?;1L* zveM8!Ka!GM7XIsu)%O}v4^^}quhb8~hPn@$V9o!rrIwf+Wj;Vyx)2qFh z7h%7vgK2~#B?9l>-#L+mJ3R^LVcrE?{-;EF)5mDjY(qtXfAWeyrJ^TI+IWS4KkG~A z#7z1pNGVT?(JW`;)sv3JS$Al_*dk}Yq@n~eJ=a$3G1#W7WG3?+&pk~4hfVh6`Bl&7 z8J)$^R`;`xe%R@7Fq7I+F{{quk$szC_ZeYF_|FW{eL|M1m{BEM!4{SE1o&iXq>xL3 zr-|QJLXRlZtV-6$Uzy!KXoiD;yW12U;}EB*sz~IWwQ(LJS%-qqn3)DoTmg1)&h!j~ zPA+$%YZJC0iKguyoJ-;Dr0F8R3+!O?1j@?X$ccj!Q0Zr|n{|^`4eEEz#xU)E0{8mn zpat)mACnNqrt4%AXh*@v7FY-2hc;P)6mPfrUzh%}DJn<1GtfdsTL3t3Ihs3IJe%x; z_p~iwQ*30wnl_^bvd%X5$+*VcJFOw^vMlmS~)yq{5>J-odlNp4AErC20 z8K%n{fv?5w2cY3VMm$`gqnc?-2BJ>}mq3IgysH-1QY$@fcGUN>;!bMZDjJ!fBl?vn zA@!puE5$6)a;t#k84lQ`H4k$rbr6vIfPg%*1_b1+ARrI&AS?j^d8+kC@tah<@&J1X zhEL&lVPjAJ16Q!M)u|U_<-0!B`hl!y^bYUSPO`5|z~oq9FIO>}!lJ0{WIN$-TZG`o z_*-i9@lL^Dh%LIxpeOAYl#9qo15MKp&Ul|6CnT;OTuYsSh4D!wt;BS{PD&6HPTXx3h*19*5tSlC;WNLINi^DF&2(TM1BXesf zKmy_oP-ui+y|1<2J{JUyTMn{!$pMYCnlgn&G0^_E;%8;C>MEH?Rup0h*Oo7J4?TL{ zPJ22da<=&;QT7anF@Bdy*eG0M8S`uGL};KtM>(s)Ahh~q4k27C6#qm05T|1qDb@q{ zBpI`>x!NdN1f`P_mCYA6pQj2pe4TVHSb%vn)l4=T^X`cD5jUjwVGP)@ zF1oavjuZHnXjCWV^SNddtSZ~9s?rehN;91PKgTzaj=OGn3#n7VD{ z%3d|?Z9Pw8Z+CCO-)ncj$$U_`HJ9x#>UIT`Nw=x4*$xpU$3<;WtetMC zQLH@Sd+99vZ;~GB``xCF&7*N_of{N4i5G3QC{w5~x#4R*tj?J5dCbjMi+di8(ybSE zE>(SEQuJAOr0iJ{zZpYt>WY*@=UKO%G#wNzNv!Jr^SdH`f>A;^# z#-+N`&7RyRl<3S5?RZZRt)(SJL3Ka5dZ--8}7_?`Zg(aETDfHW?nuJW?B%qd= zx8)-$b~=C8@Esrvx?YPd!~wSIGg#41Dj4prXI0w1XSJNfL;QN9OczU0-KG2;Qo2wO zhI^3fUmDHrD}jJQ7v;G<>uZlbQwNt-ovO}MID*`xMPob5RtHMJM8{~@qgQvrN>B#I zU!a*zDc+hvopaNZt8nxh5b2g9#+ibqG!ENCw_ODZ8#|Cq zH7yp+a~HfKrfbURxwqGczUrf z@?soywJ%P0WC4m@9*RpNTz;6kK9nOTy}UqG1R|K?t~PX94d)y{B4pm zwS=D4=jf(}6rP=h9gAKv*F^yjcHyj(oB9%D${2x686bWgB1r^kc~WA!GifAiyi-Iwdi94|+RU2UoGLaIyC9>cbVY*jtOB9A`j zzdlXP8rlBToT)D7?DMBzdpb`z;B{RauT$XP-qE7w;%$6LYfaSNna zvz`E!#b5*aTv^Dq{jH&ILxpYCmK*ZQ5@v8y{sofp<7Wt=J5gOG9#6UIqw(xEl7@GZ z#5tmh0&P~m`d$tSzztFsz7Y)hUzsF5nQHc?uxA-#>cFAS$W&Hpk zt&U_PY|$Y%GLhz!{MEMtU6-0AqVI+|wRVgjSsTlDUsgZfE=wh8#W`iLNtJMn7=83o z(VOqieyeAgHUs%(snha!;itPtKuHop2KzE|qEOAoUO&!N$MvRPhFtH&c|AEu=pytFc;wDZwWmJRWl38LbxYb5giIuxn($ZW%nV&*XrX*7XIV0hNd%liwvUB+^nmmP2m|7u%`X4!c|>x}2=$Y4}F=6lm?WD<6d&mbApc2|J2WrZ2# zIO|Ehggn7ApBrQM?g6+dYeX!gTPcM^z3R|&(I$_P66`mlSq@;8HV@<&kAe>KJwZy3 zJH!11r1Wn2r{qz|HplVU(tn}9m`G5r^&Dj?X4AAEik-1MGQ45y)wyo>S;-4(&Ytk zv1#^Zab%bxgqY=2?an9rjD`%#0jqKO+6=PaW>=8r4t>vw!|9xZx{-!6bPwA|4?YS{ zvqgy)P=#b|NcGjmAc#)~F}xj!;Uho{Pc0DP0Po7p^~92mIZw$F!4XQm)0PMma3Q82 zqf^(Jp2{C$9?Rs-TdWv!rnf~8P!#$efU+SDlnoB!9@a(AU}2)QPCsNa<(XH8%~Dbp ziNbh#iy~`E7^tGiqtb;o5%&q&o#OsFR|$g5`gb|+Y(bD&KR@XSMF9SH5+7nsC89o) z2lP-ZskfGVMv0&!_WK#KGh0!CJ#rLt^LxDZ@z4X#1-N>;25=GS0YDbVJhd0Z5B6I~ z4+IKVV6wCegGEKJb1K4|1@L7)4TiJ)^2*4@oAWXYb=lj#fmpEdYuc@E)ndCm=E%ey zv{ti$uJeCH8prKfX5P>?f|KRqx?&wJ=3J|t7dPYP6Q;*Q0(ucH0~2wO!s}rJlCuzM-TjD`4^==C@3=qpO87a6xY+NS3Z9okkTs`$n;gN(2AW3F z3T_SEJRYX4oRO@ourob&6ZS-Ju>RI++uBUi!XhN-Myy`O5R@pkqJ*dopHF$I{WOfY zhOJW4$TJN;hF(T4$Tk1PVQN~0nZW=xRH8*c6!MOYsfSo9%yp%B zQg=<8t5P$?+}U{Q1-kvW146BR{x!aLn269~ylK#deXNYBK|4QOg*$b9(#~%@;jkupL=%W-QjIPBEVMRY1f2qLw) zsDMAZ7kUB6>1-kD)GeSYQH|4Obh5H}-q~GZthf7oeb^(plf;>vX4m7jwQH7;sF7+P zV3GQkC{)ESem#bj*h9HN6hCAxMrN%#!w6d&S>W0e@)cnQG8=ivKHl?5Y%C7bTJZA+5Ns&3Lm!4vp?vpm_Q*t*n#U zw$Y<-Xp*^Y$dqiN7wh`q1u?@Op7e4$j_mbRL#J;_cyh$C1-w=PJmv_(_7#*#U(*}; z4X^kn)@T&E{@v?m`+Q1u{qA`Se{-xV**Dsf-THTNMJZz;_wsT@*)@YD>HjZvZ`vDh z|CcOJ==1uzmo4xR3?JR0a;h$+qYyvlW3Y`8)Sg`)*0`j0YZn(R;4z=_gPS~C-uo(n z_usxJd6+D@utQ;`k;^ECB^Ihc$5+RH*7Q5{q=wO2q6x~(T1g?yw#@yxqMe=PnIe#; zo)5v?eN$;jN0%5^nW~#cm7l3lV)cW7erWcTlz_BixyCn5+U$=5u7aIHAF8s>**3lU z!l4n4Xditq+dUL@f%l5a6o#oufagtH@$uBldD%hDt;omUWXIN2mWtvKpAwcF-oyRF z{%DGdq51>U1m4{z{8#Z0qx!&d{Ww$aG>ifgJngRn<36;}R6Y&d|KRB*G0en)kEm>k z#14ZE?It<6V9uPLa8`vpGyLs5KP9sZu^#FfFVV}62a=}VyHov;ywU=0xoh@3-i3iU z+bpc#Y8q6Pe9nRIS2yujzlaZ$zj^xmi1~;KzXW?&oLy1ce)`<*EiuO=t9DQ61Mja} z_2AN&4zoxC`Td^qX*@So1xY@@Q&+xh^D}?1&cyatbF~E_9JtMYIONfsq%GC9#@FBQ z;u~?PAxqG4GGf4a+N3y@RxnV45kW%I(KOiS|rt{tx zc(+mvEOZSAim|M2$ty5E{Wi`o1VS$L0#A&9o`QbNvyH6}zQ5r(C{tO$hqypddU2L0 zye|L8an}c}9(N-8+oF6V>SLu}nm$sQzg{V-^Spvig%VcgUU?#OVv$xIS5dzMccYt_ztetop!Y_sw_sihB!{zG*5A%T+GZPA?{TkQU2n%$(L_A{Mo zHQ0CG^N1)zF#UfKS>@0H0xo5886=j4Q$SC>@xz*?*870i-1p5CE|mbnCe~V|88RS! z$_QK6v!(q!2CWOf$L@|`SraBi%RR>2S$yH1mN7DU&)TlwA%ZKMGC+rta2US{8nyVc8w_nB4Gd4t} z6N_b>@vAY2OAtoryInvxkS`x}ELQcNdB1-&upirbqq zG>TxQsJgyEsOk)?^xi4YuDMZ*RfeH&NS0MV)<30V$YNb61QuRfK>IVPz$QO-kEEJ7lIuOQBLY zx>A(*$IG2QzxT`S9-}}qi7vxr*Op?NT|BnREnA{aQq)hUo10op=_`GEdHq@`-FC)L z-Gl`zrso|fvXBmOTdXk-#HRcdVV?7=!UlC!xh;-^dWK13e{q@Q>yPFRvBKuwnx|pR zr*d_e6bgU?xRruh{mQ=&ezcs!=cK0W9&T~$g<_QM1l_$^VbNI3+MK@ll|5&yMLK2L z7IzkoV|o3evHrPnIm%^DC)u)WY%Hl+Av*b6!}$7=u!8W>WjTS!#?wkUjK%nB+oQP; zc*WsM-xTOtR)7&VquDlGtxUPcyJ{a4`{S0TPJ)@NMc;+1Et@UW^) z2_r_l$zc<2ar!hgyq&V%HQsdmPHV{wSj6HfwFHcUJy6pNjRC!(u~Xw&A?8!)4sm#Q zZ(#oo138-&uWi3d8AUWjw!(uSHJlyTsK-(F^ESoC3pqux#mDCO$zKP{^&~q9AvdZE zCQaBCvmLE-8Z>brSA&OwT~<3;F%I>VP$v{b+FjzHu*2&!GMcc?VuxoaRdmh~b~B{2 zt>`lFPrbF|RbbuHe;@|Qc_i<|3R6>$0pMrUH&3dRr~2j&H&=wPNPl0HcpFerRh7eO zw>{zy4RMDMDlqz2>OLf%U1m&nlYL&%l3Q zN?Js?p?~MOp4p5v*=IM158RhK)5oICF2=9x9Kh;*oDHAq>cQF{BeX9XYTl^P_n8Hp zSc<*gr<=yRs(d=BeB|_5DM#o}_g$bTuO#1*%m1KP>3_0_nP(Zi40Ph7qcA>DdPeNd z#@Ug8>BBDxd$HhFw>7f_tSu!2n-dQYAAQtX3>|Z4Yi|qG4vdv@j8``c9~UnTPrhNs ziBHEp9lASnZRws)9v7QAa|fdx+#OwyHdl;lP0zPcgWZl#r?+_SPsPut@4NS}hp+y; zH+G(3l^d2*JY8CwTYB+BHZ`8EJDWNgdU|n`l~d|X#ZQ+DA14;hbzXwN6UXL5*x+6G zT6!Dp)#u^;p_j9>hxe16&ujI4rQ8iYcUR{S>Nn|V2JcpWadnZ618T*mD6-aevglZO z)NfBqN9vmkE#ZtKMjXjcM{e$WnUzUPN5xC!2NTpkkH+RKe+?fjd_0uZ-@jkpe(ps5 z?&H$>BF*&T1f1o~X+M1epSzErmphyEYE-&am6tYEYQ?BW3`{#co}9Xy7}8T$(;o{9 z4+jgkNj8W#+Z(#N`tg)D3`|UZ%idwv6|NrkehdtgSHoSdqCq)hXJd(Nyd7OFM4@_g zOd}j#jvlV=q3X%sk}nT7!c2w-$U9wZLvvugygj_#Cm|ABAt#?Jc@|3QJWCmlEJDMDjW_ zLu#ncn7TT9bF{wz(l4Gqj-Ha`E9nOt2-)%N%{s)tPT0hXpOTl#m0jJv!`p+kck5{_ zdT)QZ%X@j9Zh}I~b1Av_DW&A}%N1Snvq@-GY5g6pOzq9%`@NU596?)0JD)DY6rtQy zasibNQJ}4jjrmE(W2Fs)ErTSyyM|KsEV{d=yN9#4bFd9?m2{NMfjY0*1Shclb~YD| zn>N&xtX@1;e(+N2`p>SRmDJeAU5KT>qrE%JGhE)bgEo184S(k1^7i%qamT^kv8&zw zCayKh?C?vvt-P%bTI?bHm0U6dwR_Xjw}S1MjJEc?P`#GT+sDJhAk6HqGZ$WxF)c)3 zyTNC;pil#Z!9eISDt^4RlKI2vw2Gv*YKGmHpwTg`B7vDn8)I@fe)KAzzesk2wOvU&<8)%_xji68g&vF6R`MN#nqe8-A;~*Zn z{!2Z8T*^C{z!3j|O}W)1Q&oFNG*{U5&sjZtplPFGinX5SaW>b+4w?H!4HRG3BOE@t z&qXR=8bPSmAo7*}8zUH>}G4;tWKezOb5L>7HsJ`mYTKkvd z?b|4XFRSLot&v@JK88@WP($>79SM;4LI1OMnz{4pFlS?087mX(M#V=9`^ghm?yl6r z_%fNc?`J_NU#wG4>~2u}oELs5Qr<{9YgqN2+iad5Py#Nx_N+eg@@I~|?3lm)a^1=Y zm^M~!!Dcd#czRLK69_gN-ou*g6Nn69%zZT6pFByVguR}l?cGUI`C@~Q#>xiY20Prw z%}68c2i*hh2@EsDvHxL{Mi8DweeAx~DzEB2J1*c_SG}9gAzN2{6dnVOTU|+{NEd&) zlK5sGdx2zK1sf$JMHFKnDi|fKf3I5rr*6CMJKhKGlknd6B%|-)*<+Wx#TH0=${U6s zeo>0^uE20W&gVOZMO9s2gSiovK)&#hP3qBU`og$ zz4DDqj8=~(CJ(rGOXy^yXA3RGTZX!}aZHo0~ zDRgYW^8I(-xf)+KRojvr_2^|%U05y80#fa{8ru<7o2o2NZ4>W(9JgLp!rR_+?p&7Y zD6gvQLQV4VQDTY$Ug15Iaxxg{rexmk{!jh!b4&oEs^19)_lQiNyOP>PWiZb}DU+q0 zygI*3y>GL03NhjNZzn*0r|B(!S@&wQWcq*~I(4dzp1sgS#-A^LwZr;4mNn#&N-Gri zMQsYW6~t?*POPs#ap5$tv1;IhZX0%*h0~8+r=Jqkd6jG=7ELHvdn@l9KNTOiGots+ zG&Kj~vsyU_-9LT5-VQAnUK^Vu;Ev&lIQkOk4rNFn^h_E-jG$V!U-y-{FZnQj zx7j~)RsD8Odg8JW4@X#DnUrV#&8N2173+{(*Gr!*`I|scIHR*;OWMtIMBM=DeQJT#wp{6Ob1ns*n7T1Ti=Z5f4e!~eC9*cxd2?}g_Onx0zPS#k zY1C8j&<|h+Mfv<+y2JIae@Er^;U9oPq{;cKP|bBKwk8mVSyJbcPJXh%cG*%Va=FJ% zTocBc!LjUZHT3hheA@wQ%emTxPRZ`)A@lq!=8D@sTXAhrAqpC!J{| zsI$Cmfc!I2h~yp5mnu5%9f!MnYu9Lx={v!WeCu=w8CpNoGJFNw-M=COVk zbH}^Si^}!yOHjtyH>j|Kf7#_p?tE#Z`hSO1Udk_qX@Cwq{evpizOwfL3%drlnH+4! z&uJtrFDe+SPD>b4EW#555uWKZ90`M=^DXGlwZa15IWp@^6^tOu+_-LmfJhidm zp@{BmQ16=Uw0m3Fhx67Bc?NgQhrGV&Oh=V)SG~ zu$i8i-oq)L@HvP!@7OM7BFd#TqilfHTm?CfY$3dVby13I+dYUdI4k(PO6d|p<@k1A zqBZy4U?uFsFp|y-|BN^lckZ?si$A5~a_z-Vt4E%!-S;Z(O%odf z>+(`Vf|j?%h~64D-d*)e#eXY*h+0&R<Xw$vYNosf-8;;&jd!RojVX(y_ECt(HMq) z^Dja&5r)qs4@`|8kD*S)AiijF?cWxA805ZJKUd<&mN0nfqY%GqAmzsZ-}C`=*PB9Y z9>Y$0sz`ws+V*FqsJM3(q)58X1YuGE^*!RQ`2eD>>DcIjsYJc5&x2x&*8XOtgQ`3l z;6NqyTj*MYgw9&Elj2YRZ?LKh+}?j42%9-IF!r$@vZXz-oLP)-T55)leINK=EG(cr zgBkA1KmYI3Ys3>o>6#0$cFhT(>S^3p8z)o7l>^hk3sw2+Uqa<6IV7Or)B~#X#a5Q#hlkbJpV8@cD|5DW zbRBd;=uglKLFT}B29+LbI}djs_md0(&H@_VE+>Y|jI+0Rj@$HP_5Hf{!$?QR9i5Gm zpLv!%_F7aWbq~!NSIA#e`l9WjmkN&enkc+R#nUfo>RN!+?pHnL+pq zp)Wbs9LEB{sANDBrP9z(2dEq*Q)4WYm_aIN<2@II#v9uHP_CfD(3n|}E0FzONzN`< zP{#HXQ6s{UVjQcu5ZP_!bNOmNHrGnkAwrJ+x>fjhS*;+VLW4MkjBu4aZSG#>Jg(_O_EJMcfZH&%Q$@ zatxwdkJnVDak{pbxl|u<%MOWyyGO9jtve$Fevt6+TkXbmAhbW`I^Yi9>Au-T!k}z`CUI;B zh_!Ms@YoDbxzn@)Gh5T-FgZ~?*j3!}sJU+-6)z;68lZZde@^{S>(QegIS(6-f)+HV zwVwW?d5~&;w?JhMt&#h5?zs|_<+fw0$n1q5DpD}=_5B0OkF3%tg5bWDA)Pq@6qLSS zf-i&Aa00&kg-+GwqdrCb(!2^qpTcGXr2f!}h*BdG)#;)^c~NRiE2o{c`&Y3<5!3A) zF%zJ-WUd^fP*h>exoa%vAljq1rF zc?eRu8i6ypfvvU1>K!IPN=$bwM`iu0YFlOg9 zD4j@!Zog-^yb6IQ^ee>Xy(HftN8Q2#(&dvzUgzsob7S0i*Zyh^1c_xNGAJ54SPXkL z$gO>8iE8|@!#mhq){zl}{~8R`%arWAS_kW*jl`$sL;oy2qZl`e?rh8rRnFjjsq}C9 zU8p1;s9cZ4of*ludSlfA8z`>^7_NbB?9ve%Yg1<>j(^$NukepDF^+WkVc7-VQ>(h9 zM3qT}z3F-$%fp`$;hY2}Mb8Xvz9X!Wqeim^WBH1{`}=~{W|v(_97+{*<087k0;76wA^&Ddks4PeGSKQK7M zqy7XqsXHRpY>H)tNvfC5+Y=ws)%kAB5?~CgZ3YNi!qog zh4`&mF5^4)f`QWMzse2>W_Vg=#QL9DNv7)WRfHuAkTWCOlFizF711DHH1ktAY=ZBj zR$oG(8cRORP!$Oa_5a%8&x`RxX~BWRDBLPq6D(9_L|(Gy{P$l18t5b0)ZwTytm)&~ zgSaf=S|<8-{sb|_td+8wq3^zEVM)inReR+py_AXz2+zOj$;3>7fann$^3;v%Vk2;g zc?;Zo!9_QdQ8y+jEr_IYINBvNmG`z^{?LLo4s1UU!VjqotG(c88Zl=sbJB@w30a5`Exsm@yQCc!)vHZ)Am)8ht z_DB#vvDXQJ0GV#?41Fp zjI76Pu_h{tsnVVbOcOUg|9+I_l0*|TkqU>IeML*)z8<{6EI=8Z3r1OOgb4||ich4G z2C?2i_3d}BNCS-)xH4LfaRSjP8|A2ZsVhg{Oq?WJdyI3kF;PwYN#DbVS#>`&O|F z2p1Op6aQ%GaL_DwufR_XK~OhsMh{F z90`s!eGik=$6u~q9q5RR?wUsOQzj&$*ojGzJjvGP{SOXn8d2Q}17;_CUazV#B_IO+ zQ_Ux&_ZK)rIhWu(4dlJAYOB#G=fd|@@R?ElBX}(s1T4(@TCws;?nAKt@`@2HOIA=% zvzqO$u~-$_;0|eQ)?xVT%OjR{LVk%Z4MsAwmJ83mC>=i-sXmXg&AeY`Mytd$_{GR6 zcm#`05JTmUFompgVJ$?{pmIG(vsV%>Slof4rql-g+d0*FF!kQq0NOAt(pJ^6#9B0J&eewn#JMSR zKlf8YYT5mY>wbs{q!iBO{Lr+tq>Pe!v`h@g$CuvrtRF>Y4m_HFGQL5pOh zH8$FfGaFQ?4Va7lhAaFe3JgNAFh%ucQ+g`P*n7U^usZJY+DiiL2ElMR{G_Xlt9H6C zWs&&YH%SutXod4Tn5vgehtquLx9aVY`B2bOH&=&gixNcC^kDw}DwgHD!uS6E=c2j_ zR0okNTz89tl^L|s_avGx2oS-MWsBY7$adF=dU=99x#POAMk<5M>D@}hIQgg|o94=< zit~#Mc*0v7R4G8<7wD)U5>r$wG-+j4BaIeBsKCfd7h;X0b}0nh)OSzDUzfy1km0Y|J@dC>2H9kf16u=NKGgy@7IQ7@-PAhRiDS+! zsyS~$Buhqjy!WMc+(;NnIJ3k}8#NJZWfq5;^XH1GWLvEX{DDZKx&%SR=Wz3f56_1x z7T?dr{=5#BC>&qe$#>a#$%#7Q!~)sBvNivZ8)8VX+7eX*O1Hyw%z1;&8A^9eE`fGJ zi5;Fg1Q!wzKrAAF7QWAgHSDb^FJ)UUWGrnBG9|Y{+IQpWhEPwvmmQIWNn^B9qA@q^TVQ1I^pA)Q`7l((5;lMH^j zk~nS5Xv|#%dVA=t({)=!J1d4=1Ld&l`dl8^XJRVprtGYg#GR!{J~K9m$H&g znxo=0A@#n}YiF7EE`?i$?P-CY+A?y$H+a1YL+ zi!Ls~HMm0{SO^*=vr|6KVO$ozTYf2dM~zm=vxUy|_OENIbA!kzDvigefF_w#Qr6I5Kkg49)N-g9U0 zEVBB(`_{r7EU`^HzDMuPly$$BvQ@%!|CI)3ygVtWOkDhVRSJ1USdF!DD_bM z$uj>RESeH?;d>bg^93$p|EGf4>D`Gk#LcLxn$mwAW#w=%ip<2>j{gFAoFx$3`fbs8 z&)IPQxt<^8+e`cQtitQ^Qyjzb|@j)dxd^Ip>mH9(?9K|Q&_zBh-xsqxMX`@L7 z5Qi@%Et3%(b5^GkOIFEO@M3j&Gm?&Q0=FnN_Spch8S>cgXrFqgkuNS}GJ@Zp<Ue zLRTj`jBOhM6Kq;%6wB!6?h5H~U?2gIo5KDa^Ykn!rgx1>oZT=^#KQy*{p#gR;bzUB!yqW5DEtz)~k)iAQ5 zfk83K-WKgsI-%w=21BHnUS*>pDp)szqnVZ>)83YK2a$Yyl5~)@ODhJz&7v>*gl2;2cTh8c5f4h5}7xpM09rC=WQ%`a}ePH#g*pT}+A|>8F}=NXMYF z&qboubJG|$dB(!d$n7(dP3lk>5%#MbXtOzKg2S?NNwPxau$Be1{uk6FqInudbtO{)V0KQupf=5#Lj{ zo3#uBZB=<20wr1KQ*M`sXeQIl-Z-3cWsysoUhIIpHoq5zTwv5!(g*be~Gk(v3v#7tLhWN+2m@d)wEr6KNIs=|$bn|B1h#&nQRtK#W+% z0Rof8oKoGGHArF7n3rNTs>nqBTFjxVB3WFcJ)A-hOraW{=y4kL@-{PrGmf^*!G>Lf z^!W_3n>feLs$YSZlrf|(Io6bPq=casRfxfVL!QwJ zU}U z+z)ARz%1+Bson-o7TN|( zJABw3*S%_RW6NRhBD~CosTQr>#Frh@gw+IJ?c5V)g-0$D8WHh;K0-H(Pcu|#AfsOY z3|k`374*h4c|dD`VHcWFT#Q8JcqX9J&Py4p$dSW~?9ce$*`-F*iXMHuh5M+Kwe4X6 z+jGR`HdzcH#F-hBw6U7VCayWl7?)kto-6Ti9x!dx!3=I!z_HEE27flLs#NsDW8g=+ z$;3GN{Hcq*8->;*noi8N@gqnm{M1a7OHP~2kKxD(4rqoK1IU*&2~HI5)hP!wndK}8 z;)xLtk2eU*5tyX8F{cq%)ew0tx9KO`5X=N z>yQweV}MEUC`5)`Wrv4z&HecE*w4Sr5OGJFxx@fDLGFFHYPp@L3ZPtb4VaZ9R8g6$ z&owQhF=lYpgsyd>%rc=5I)9xLPL6axNrSHfVp-r)s0+$%y}I55+8cHd%CudBX&nD% zx4&j&il9eYrX0{uZ&l*SZP;f|Wf*TT<0dh2R39KErVnTG5?)1V*m@)zp;83G*<>uQ zQzLd~CM3`l45LvXfg~5Od$#_Ihnr#Xu(#U-6s1#a%5ZsOngU5=nY`z-P#$o+`EmVZ z#@?_KWvG^QCZj#MIZGHlXn@J9t?_CJ)$+P#hGCQ46~PM)9VTt0j~?GU@MDc_!W0}t zn1TbR3sZ28S&cWDS>L>fJleijDU8^;_O4|P=a?dsX@VB0Azt_U2$8jQt(uXZ-d#x& zinXURHR*_TpZTXFeV%|wIlrg@GKz860KxrdPjT#~NKLEq8KN)Y8^b4=?-U7=qdS;2 zZIzA10UG)&NtGIkohB*#m>+H6G`_ep2u5x6IISe|!E7%7MpQ&$&gNO2ps6X-sxYGf z0l#wt9VFSrmtMs0UQ%SVM7(?(RpwP-uctlLn=A(5Satul;YOhH(6@0Wt8>)JR>f+# ziOu-)nT!d&&p(&8Ba|WOb4gZK&}_5q_euoG7q3m(q~dBP&stxgflAkT|_t0qgw10wS5(@`wf}+ z_I3r1)Xo-w%vOUdL0DT+zJ+2i!w88yXxR{|z!7$LVjYlM2OIgnonZdnA`n!|DUa&d zkvI^W`ai4`Lrf&=|0;OA&U)0rGvani}+olD~V;kXSaBE&4N(g!BfaG7+O@)0d(Co zmSjyY%g*5^FMtH*>HV^{bv7NR%?ce1DTBnaU`U%HM*w6Ba685BMwAF^O@qsFKKX%Y zbXtuHLnXH99^;IPk`L6 zY|XJ`!IXHL*>5Tp_Cd<3Xq%`+PhCcDdZz+KdO=| z2Zj$B8$H0;9GG${YeFhX^durOF_V46PKuj5Ehw=A6LeLsV1@*zbJYDmhQ!ZmeT$ET z4m$nT|NoKY`@Jy31FN9Xod4h8yv}Dak-Dww5I8l2CyDH$L@>RN_eQ=OSHD6e>;q#3 zt~2Bmy|P7oaqY8UWBY#IdDTTzrn0z!&-M2d)(?*6;G91?Vucg6MOz5n=m=J( z;%uS{100+A7DUrUC$GUi zc8{DfaEWHFW8c&$YEV)xZuQY=n}!Xq?i0C&ZB=h_Pj9uI0u?iSsQ{En;Sf23M)S{H zDwG&%?qhbIBgBIucDO0M{j)nrm3>s$;`GY$%kiK$EOD50i8uZup5Fs9s{7bT#NTFb z>#~?a#2@o;gm%`MOq~W96;wjI|I;}e;mF{_DWO;_Q?*T6MKZVHf}7#c*$F?zQc{hs zq>aJn^sdWB2nEf|Hj=tWEl`83&P<+ycJ_87iC`r!Ksch|#G)o$VXPx8oLKgx$v%bn zqz^=0%p1<{cn!}tqWyB~s+r*ojmX}Grx+f^pMTCRg@?0-G%%fE-OLmg;hnhk?eCi% zw!LGri<@dVl1pN^$Z?iGZ8Hw%SEyK+v%V8f$0x)LdVZ#@sI3xxtXB&KGt<1$4kxX2 zb1~A-r(ZwpBDON;kyasy7OsLZ$}P+(yg-bO62trC;_8B7i3#~mPZQ|%Ot>${S4+{< zH#KPbEbOjCUVhfz{=>R1_Ha`s@Z=({304Q_JhFghy*2oQ=*m$m@ijFeY^Cy6o8%?iol%b^*n9 z9uLoCvoCM)Ye-T)Ym3Ov$#B&+eYum#H9;vc*r4C5^O1@jX((@ZY-P8^*yRY4u;4Pq z*>%hEw5e!H+NA!ZyUkGwLk(B-g8c+)Vuzmduy5+-9(iqd6 zh-fT7Vl}D4QdxdapxoZrOr+ENCiv53yyE6P_Du`g{bU_g4RKU#X14-PV)Q34rmeCe z2f+OYArk@%I{QSr%Ghzvps+uyLTP>u&uq@%Ba;u!CR)Xc?-#59nxsbEyw%^$Wy{@j zcX`v26?R#Iz6~TCsxx#SK)>NKiHxj`3m#-8zaueO`7?0`Vsc8ICaM2jA#vWT3yTDj zNhTQR;bO>an;N6LC>2hU9@ooUH#5>)rgXp3siK|5&; z-Y*yVO^0pjLam!w7u_>{v?1Gwt3nk}qN%Jk^QDtu3UIA^|EK10P$mFs)A6a9%Zx6z zb-m2MltZTaCM$UCoMXJ8B;!a3Mb#8$Oi-9H71o-`i__9e>$HwoGfZEF?p6NfwPn{_ ziq-#?r;4zC21L>`tvtGlHCs@*amH|pL~Ml0;27unf>RBvD!B~8^+(sb`I26dOKRWm z0AGdu^MW?(o`b^V*zBB+4REpRM9sq{@PO}=FVmpzBclfm}Hc{>zE{LrP z9JE>MTjfZQ4RHde6vGwro->M-F%)()UHeW1@yW=zCSExj$C`X_x+$avtK_AWw~vGzfe9UkXdr$u7bYN1R{boZq{_NhA-^sp z-3XAoQhA8G>8K`V6+OV`%3ZHWYkJJJ98+@8!j`?MyG{*otD1h}Wssy)q)fKMYDwO4 zp|GG=j1i39_C7dy$pO0afVBuQ%Mi{;B+)F$D6VDm$Q^NaM+*;@AkZ${uP_$BbZeX! zI-Z^a;(A|G+Ld7hKv_CT5d*$%t%sN) znnlCtI|r%B$#@zZ#d|cHFYNMu@W%ZvuhX4HW{8%_@FlLm{A8c0;npePVTokobHonN z@yIy=yY4o+K}sgG^jGYJjy#8uea%(?V{|80NV%JYfa7OUCJ2DzXPFMzVGTA)&IyBg z8yL*PHNs$iypSc|Q=Y;eU2feRT) zV*kNzYd@&K&4FC|!9SJoeIT)1GknqR%BpgG4?)jPetGBv&_(;GU`pFzWX+*|6_OjDEir_t^a^Mqb z4aTaJvlhj61mt=QWW2py9qyxyv>ENI_M4MGk~3C7J+RIlp=rFjHc_J4<`b<^-m+{# zGZrqNLlX>@Es_q@BVYC_L~i>D}SNWu4s*MYBhpNe_2Yw+ixK^`gbRe+%)!^-THBt(C zFOhR`$$~i3?#Cyv*@e?|?~%(a8V(l@xaT78H#?O{lJRdei`D93+13lCLjtW05x-nP zwSGNi1gIOCy9`;VAi``24r2Mhe-9*#7ZLdoX#`BirpG%^ac}u`odV;ty{S-zMZ>S< z1RnO%^f=X{s_1665h$is2rp{iw+Dkxs1|pCpaxSHOvMvY5dfDU z$l9<2-ATZULG>A0^d5q&=IS;bDpyOXw2%0x_iBpQfb=1`r)oQ^YC^$%da4BwZb4lx zGqAGhez6+P%?}z4`TX_u6UUYbG}j?uvp5}zwjSmvVhoQ9*?eJs0?i~dnJ=%MlU$}D zf~{bal2tRa883UKz{Qu)RDeEfsW(ewo>>o;AM(!`$4<0qa{`W}!16>^80OA^2zo9Tr$|=ACe3T7$$_YV_7MEg*8S{ zH9GJM!qH$}yTwnV3K(%{c1vdlFf|tors;)y$P2*Ysg@%mvBD+1Wx}iI^wWQf`@1|d z9t^*u|J~^GzJB0ftL$I@>MGj&!8q@#Z6VVm&8#SCJ53=A2{1^i6$r%j)FWd^3myGWMm*o zV9G!G7m99vxMV`Tt=_JVP-7d7SEoNUrNc2lIANI_xrwk}yJx5Ck-BN6{HdaWy_H91 zU5J`ggQD-FRc4R0*VEsKkcqr5y6rcL3vNdg-I~| zm~~dv)*UH9kvMfl_a^gJd{A%cgYih1ROcYeOib6L7F|f;<9W~uB4B;dId8ik6AE0@ z$_G?t4@uosh3f!Mqt1$G$Uz?Hd5h@i6V94~98Y7Msm|+G{MlF4cun*PO}rH7>|vnM zFejB1)v|SPeCw_KXR8dJ=dX&l2el4pX;J80#EcG&?ipI@@_B~CaZtv)F5JFY? z&DxjHB1&Gff_oE#?u{RjQD~gK4{aCqgLtFvR=$PF`S28n{N#+gzl~-C0WX+jbwjd6 zc${@374?cQqY!(i6<%}VTZG>%gaM3!E>y?@(Nd{ zl=D4|rr3diKTgZ_&(hmKL!-@7TB8=HPe=#iPH&t|n`~sr=?U@yso45wv>j;)t|P zs+6A6!8LjTa3Xlyt4sYKaI0Rs;W>;*sN%D9yl`(#IkRbhRb6d>lz(>7Ou9#jQ%|^F z)ze~z7|?BY+n;O0eteH& z1QqT}Wokaa1>_KveJ0Ty=&DKDsb4i|EB^r0Q-Z>?L0rXBIppVLBRMceoysSWu8B>+ zPlK^U?<%NtlGM&Y{M9+>^6&=P6&WRRffDlOw#H?)n*q3tf$NWe^f8RcY7KSO;IeOJ z6yr4+Jv=&Q#E;~eqj>>`tZq#>CTlrVgt#f+>@p={J81X+TB>!gg~P4bnzqQ+@2+z5 zrtMrJh;YRvAT1?SCMArq!~a57A1jSu$SPdnCk^(0gip`|ZQ9E4GAb@_<7(OCa|c!y7sKyc-}9 z-?|qd#VsCk)Ve6?JtMvcw8 zUVi7|sK9tWs~=lZPK8;}%e#g4^89H6VJN~oQvu7#0`LiKq(6pqQ6y1z2kfrET(s3_ zvgs0bUC^K(nV#N+ETF+;_=8fOXlymCwTubz1HRs9e1Hs-8+fjDC5IBR^@mCyi1F{q zh_mG6({no_tUx*VqtM`Zk)s-{s)?g>!G)cEiulojyeQVj zCK8=BC>1K{&&usVH}j)X*#9ot(AO#If;9neyy(N4Np2sLameFrBd>UQ$)-*Faa>m- z1W?c4vb>>fYKN5d_>ryQi+ltTZMnp503}2UxT6O&D#VK>Qjh~)FBoqKny@iKKN#_O zRMhoc}5IaD4anvGSSWAi$Gs9rJPGC{9tz3FYFaf3)c_1{vXDdtP)tqLFg zNcGev;G$-nL0nKi9kgKa0MfmU2U3U8Ea4I6%ZA_Os|Hl~&uaJ!E1WZ)6e96QgncZe zVpkUe#Qwf!NY%Rjk}@?lH?-wsPA-TZUZEmBvi*`z?w4QHx4a2cEP`dDTD$i=$cBCdGzy1 zM!!9vF9}8}@^STzpa^`BfIdt<`FT1ScjA+te?vk8Jtg6qV;TaliM*b&9B25stF{ho zL?}@p_N+)e-GKhL`$7E9W5wUeSVRUBBEHC|dY7o6WeL`EY1R1ov%H)L8EHafzkDS; zumd&Ycs1HAr#Ueq`jvpJdpNVh;ncG+K8QJvX5ol&Hi@27rK?;sL74eDfm5BjqKDS{ z_R~_H&N{1k&I5E=aPd?oGArs+g?V`jq3IncZD{tdM+QYuXaow*x?8K8Gpo*?YQ8dp zNa!$=l*(+liS(aGtWhdW{(rL_^tmIZ}Z>c*@;EB6OyA(w}-8=xV+wr_)=`B z51WFaF%tdzeV8K+&GN%`uu2KJ-bO{lUMb_Ju)T73kM;7a*CJWba-tZ16^0TJB7wC@ z$J@CN6Cr}Jk|W4hM|Ko2!2APRlfT7}>4M_mr07rP(FGohvQ)@=PG5p_g3D`D zzF_>J6n-R$?&IScU8;jF$S;Fd0z|yV%{S9NU-`L&!d1AyZX``z1MxyC^Ta-g)tq?& zj5$eCPT=C7f_?*jV`qj&m5zb`9%;V=E8>#pZUi%$?O>y%Yf2@b;`Wk=U?1BGCcx%y zWBE|k+ew8;>C1a0j7-lq@GZ)FkUeR1K^_*L(U}z;_D_)ngye8~LG@_kWo4+L*3EU~?g{S+(QeA%4JfScx|R zfL+032P?Uh@U7G zCRdP2N9sAtS}YzHo#uvKZ4}r=l(XdJC#UNXvz+2l`nwg-1?R=P(;hA^dF?e59^ASd zDn{HlPM6VTKb5_Av_f;paB5KJcf-}4n=D>FaR~@ZAR+$W$5{abt#;;1*0*nso#&-> zg$prP)?-B%9w58eq-~cBLiwNjmGvKE=m*65(yJ;c34&SG_oSXwShCWL0CPOPEOJF`Gx%tEfcY|;!Acp&TtExWy zJ>5EUc?h7kBC;NHWw$5FmricMlZ1!!Iuw=JR+hph%YVg^*M%J$UPCmzj1I@yTlMUv z4HCFu!5`Rf@M*Nc3clG@Q~bbp%{)Kq%pg#tJO>n`Bp;b`{{*kJTl5!R7whyoN@v~h zJ6v6(811bs{Y)`AW?avk%p$B8*Yt5b6iq@`cW>(Bil0cUIbtyGME6gy7t6P6q@XUL zH^owDwc=TQ--X1JpJ4)m)Lnq8{y{cuYAM*%(b{G!^v?#C{?uW84P$WD*=;{5>3_kG zfNaMXd`Va7aYOON7a`ZQ2PR=pQJqPSBy=vD=9qa%gX1Nw6;yRIMd$pZf=mejPT*CV zWmG|wbl^_cg5X@Cx3F3)jb_b%^wOjRJa%;}e4YSY5Twe1{ zj9<+K3Z|Yhv=+cY%9A%W66lzOC=pAEBjkRl9Hj)1FGD7-jp;5CZ-PV}(!vK3tY2<5kZF4TFS7i<_vsUY0VKuA6}#iIa8bhx-I1b2R;psHV+*w3fBtJ8Z|T@ zC;FX<^8y!CMdvDE75&o~uj(Pc-ZU98yuJ}^li~5B9IAfyw_Gfi zVU^o=CmzUyt-bl*kBJXcL7!ukqGl9-;N*cdB~h5(fjv}`)h;>u`KnQTdP38o4+GJs;4W(?wPo*_wwXOhZ_tTjfUJuKe>C*#Q*l3Q?AxmQ; z3SiBT3K#K(0C!%rTJIo&7Pz}=kE*7>6IgClr-CuSv z6mHnn-=8Fk+cE(#BB(S8iJBU}ir^Vt`}9G^9vKSHW5q^=oV(^ls?*01-1ZiZ&IZ)B zs`gNk)}7D*q8SfFh^2AXALF)m_RPny0uS~Gm|U#S+BHGv3QM*9o{a%`J5ZQNjO3&-zU#xD)m};oi@*U#2u! z_q4V018}toh~ygb=11xbE|Q1aCbXwin%TJFpcoAZz1eeAw)STJ48#JqCyS*EYA!~} zr2V3bzy;YcO=^CTufyv<_WzsP^+=eA9kUZ%-3Z^0rPV!&5OdC`QuSMe-B~l*0DX@} z!J|I}TgOFf_twC06UXb~zgYe3g{$JL%R#G2+Sc1hhpYJ_Tm#vfu)-ty6^e>x8Vd5N zQVZES`aX>TiqBr%Ms?TWPQIIRqaQV?!%t36d{7;&C+OCo>wz-IMrBuLJoy3zPzXkd z*^?|3L=(sDmfm7B55X4at@hZmReVGU&{&NcZpL%=o65ZOALJOO_nl7QN`l8EwE1Af zMS}OwO}RXj@`a(0r@e%F_SNjPMLF1Q-(yp9yCMj}RA4H2pHax{N>V_q&2Gqbb{tus z3IZbOn63Mf8pY=!05&Z%IFo1;mY?nl1$FyB;plc&CE%{60uk&&ko-yPzrZmyB>+64P(q=s;c0{u{CQa$D&>2D+(L%e<3gIqW~LrR`?nF zX}~k}SFMpIoE62vsLrxq&VuSw_9z&sKwXa%`F*lnF{^dMR)W0bu3phuoBV}JU9x>P z30172dDYABLYxs8Z)%nWb;r!L*e?aRn5{Z<`ly@5Rn#?7)9TcV@!pPn?MD35%?!ut z@B=3{&rlMjvK8#XI~xM?8AAREh-%R@Sd$n z7jZr#X9dLl!+66qm3-Ro{fPM;ghIzbh>@HgWaT@Topjx-AYFipXw8_fd&p(@TWcRXC?u|7=0I&8+_wqvvU? zySK6Vrwygumhe1O0#L$9QT(lU{0CMk_Uo%o4;+wm1^dtE*$e(>cTE%oNuc+9BsStTyyu0iqWQo5 zXv_x}SYM-U)%n;1vuZhd?C zh(H`qb{*Zk`@wiC1RLEIi@VA|;Ri-QjEP!3=_)tk@3xYol>mU(01H7Da(1i&44x`< zRN927E%^g6riqLmaWlEyb|R}g_WM$1$o&(LX5$0TE}3qy%44=`Te|BVTc4b}BWhO0 z0G@dH=2MdgAiIE2pS88YvS)b&Gk6L}h89DLn+Pgt=Hg5YjRF}&h{on83R-bcbG_9) z_b{^@O><{U{#j#3J(bMHhJ8L?pWON3a2G1Rp5O;l^DAnAxxFc9*_C3A-hg9$Gq^-d zkQz}Xd%VnfnIDZ@M8;li7l~1quR!40w?Jb*OEjJm%){)U#6qw zSwMSWjpO-lVP4fj?>*Hh;!pgD7e-`m24~YbDTXPR-yQ>hL;@t?i;w*hVY^P}Yi-kH zvhF6r?zV$eZS+1ovby3&h78)j*5Pj*LCch!SoNXs;JJ1w`PJ51L$9LFO@yEwbR4*H z)4yaQAc|YFxblz92PQke@B&9;jmnuhYR1l1{3wyjvsg9YDd%aX8CAMKD)V$cQ}&h+ z%{N%^tf9WtXkb<=d$tr{;^oc>s8>JT2Z%<}R&%?V#XHczXKvJqbLzm*{c{X6-3-i6 zWWfAHJOj*6@c!o~2y@#J5bX4S30v_00-;R){rbhrbXAx()GM$`X%8;)-kNdoPojou z&tqs}QQe;MNZrn|{DX@U6i#d~x-UgYf38BN=`(&73YJ6&P;J-a4@NCh_(UPJN~o^h z>_TM6Pcams=cs~p=F6{`*3_G$`@@1TQ$|xQ8yi-qY|d#08Z-FQEz*Fl>F*ZRtDqW- z<^5RZtsg@zXQGSrS*=e2!0#&2XsSaX2F}}k4h$U>E0-S+J6*reuq&ElT~_lZI8vaZ znSVLs>ypZ1ax(MXN-%%V3 zr?MTlp`~iA|CEVqhrbkr?b8<%sDKV}ayM-b<#1r<#~x_liG3{RyOO7v{TRLShi!>` zJU{E)4QxzFi8gkuiW@OBKeC=Qj1Y-UnWK~R=gl+BnX5cISA>7HLYrIlM4r4}HR!gZ zUK&NT!w@rQE6lQwrWWs|J^4RTT_8M<>ym(%1=S0@cR3V>q=N1aLqP9 zW8mk4?1&vbkfrb;2kW4hltm7a?;|9Xdsq0~y+?is&HNlOi0q}et-l*zDDTs`rYGad zGInVNImFSDSFE{lr+Tq`Kwbm$$QcfB0mSEmLzbtqHNu6(SN6i@g#OYa#=qt$fkwM3 zi5(oeXhN1r6FM#}>3+(_cHW{p-0CfUOd-)2%Jh57xzp8V#jbt$9NQOa3Jr}MZ7W7a zu7OO^RxWlSa^ZLe_ctW!u>W@GXiP8GBLNaAa*XF7|b7eeJ)D< z64co(JMHi02ZuNzVZ!*rvj;$Gx1WZY9a3{ubNgvf&5lA;i{iPc&ze8O3=K_HtmdrA za)f7Yg$#e6fvq-~n&x|?5U3DnqC$#6`b*+JAv}pY?!_|ug)eQ2?bNr1f78h03OxvV z%tfiq&x)h>3f-tSK=T?DvU`Q!+!}yfOPWuQA}#kI#Wbld@Aud~0$)S^Mn`@nCz5)d z={fnB$E&WjBvi5J*UEIPtdiK?%fmc#Ml8+UU1DV1LCxdC&lHTUg}rde#e@AF0S7Al zH@61NgS_pd_j#BWC>QuZH2A6@J;#u>tJa8QdLgy#%Pk)M5fu8(p_`cW8qF}Ad6VTL z5C6l<2kB%YG05YS(cQ-5i`!2Fx;9;6W9v`C_C&;wrVZhLT&y%BbceflwXpC$; zp1b*Oy#(9cce??x-Uy;yNpxh z8lGn%>!&73(;zIW<{e76+n$Yiv-y7VGzwAU&Oeu}tg!kg@j&A!z}?NZHI)XSXj zt{Dm31Go{7{B|5y^sIAnh4+v788jrdpN*e)JJ9}zcy>ijyMzi!0( zr+1Ne_WR8cS`Jgr+FGN;QX5ExExyijl;GI}Xa9FObP&jnt87`p)rb$H@@hZVgc)pY z${C%pWz91~VkPy^a)FGmY2G_AE>cHxaa{4{sH@41N`1!D0V`a)-t^#Lofw(P8=`7l zWUp{~+_tN0aui#EeIqt@9MP{fHY$5W++1l?32c&@`|pGrl|dZb9!;zJU>~5l;GM?B zXe*Fu_{RSl+8Vb2M!c&Wolys5Fmx#YM#Bd`_;AOepqPv4O$3q4Q{Z(Uho#_mv+y#o zVL`!)xYMOQNlY{-xWHw=#VDyk>$BiCBUiir`zkiEY@mRkz5#*Xikqgb=3@1olQGjq z;df5E#`?k}WZh2UFYOq_Z8XTLc1!diR@slk*vQWgLn`ekl~5ryh>23+`%Y_!M$|RJSK|k&(-}sOF&!Pg~Yere;U+&X2{9sp<&I4Rp%T0x9ygN+{Ip8Bs@2+ zV(({vAR2yM_uH66uxn7a6RgD;m~x{98~*0zxT1V0oAo}()HC46t5;% z)SeOJG`DsZP*^xeudwS{)JqsS^x_m4{C0FCPr7bXp6<1$@<0XMg}aKSc@?U-(Bc z#F?V{wqk>!z z`rkG2;Ijl=zF?AyZ#5(~JMT12Ym|tSa!zl0xf_5lDU>wJ4h}3vGBX}P8+G}DDT!!U zUE6j~j3ePh2BL+W$Bg$Rx~8NK7QC%t6(xjo)hB-qbER;kcCpE{>mzchUZSHX3jexz z%sN^o#|%}Imz9dF2&&z&h!;5JVm&AZX7rDva4$Jy`Ln^^D`Gj5EYw2=T@y%T8+f*S zG~lC7?-HR~WTvtA7&hPp2_-TntCy7lSL>FJiwn6-A6iCfy-V0f`8AJ%KQqjdPZjzb zHW6CVh$nG4X4<2qUE1lcFIE=SxuwLphH4YfpKVyWP`3E1wqFMDd4-Xy4um!@8GN^8 zoZ;T8?jscLGc`uPHc3NV&+VJ`XKwwmLhFrc2oy5vMPI*oN)`MFBF+>CMH^RYm_H&< zq5h=RI{Cn3qmAyV=S7ku`-8_fa~wODW_9&6c0svUz(9m7&;R@$!`p%RGrZLNyeguk zz_X!Zu5B;<9&w#!re?!dD^IkzX)Qa^Y#L(_Lqirlz(sd^u$Dd4U(6HC)2d!34}sW^ zrxW*mUAx2Lr7fKVQA}qfpbGL^?oHBbx9^IqM_EtloFf_WyFvnCo1+ zf=pw$_a#-TCf?D*)F7ld*S-DH+qdv-Nv$+3S?y8hhaaTB!Fj8ibyFBgx#iTU$<`#< zZ(Z^^2+yC$O*6*Yb8FvYh@8UGy%VzMWaaFkX>RSgv6c^VGk^85l(+U=Tkzm^7O-x9 z|4n#~S@Fd4cuq(&tk)F7y4mGm4r}*U`%o~)hy3&kfX456&PjhKgXe_abTtjXw+RQ3eUh^k zhkj=F)3=-1VDseX2@PyL|A z+a`A0j#Okm6~OMs<{t0V@`dzW4&!=H=1CEkc--Bb>Vhf59Bt>%DhY5g>8Tk>*)IQS zLnd!7X0nilPLy9?uTe?3uG;)p#9GPAm`SUgb+jBCP)nI#QtD^L?^opQNJ*n3&r$zu zduhlbjt-7fgZx5w=fGBI(hd}NYr+G8(hT*=mZQ@t`aCL<=trm#df%Vb&ynItf&}g- zIMgJr52D65-@UUCp1S+0tAWPU82*i#|NeY>n$PDd5}|k?pkr<9nXa2n@EgAX9$?@- z1_t2~If%i&4EMvH;#<_5C792Tfm(!fX)4Z2!%`h!^<%^Y0I+qtr+#9}vsu%nzEsjB zb%`ScG>OuG9L+vOm#T^)bL?_ZP)C1xYXgH98fLOsd2SkP3WrLpYR9<`zaxqXbzLd>tEZ{{bw zl;3goo$qz7dC&1r+kiq!7~?fDiI16A!jS#NQ5=3O8`Rg4-{P2oTMeauVhFWOx_@i` zcK9bPiF;jRdXDGiM$pvBqG#rvBlkX4ZQy>C5nH-Si@MJ`_w+ja^)@|!wx9F&pG)yQ zLVZy9GhYJuT=S=U#Mz!N(<;*+3T=o|YO<t*FjRi^OX~AEUZa@A^r;(`=zoMc)bVP}(~|Q(U4K=iW#36jbYbWP{X$&v(c}JEB($s?PCGja zVv>4y!m}Uzpl_j++!scqUVv2F4THomPv9!*!5S@{SJg4T1mJq$$%y*!GB?Ug-kK|0D{Ka?Y3zXYjr#v3w|fY4vQI?*8?p*vrbyB!Jc4TLLB#POk<$}ZmU>+w$XS@iGS zxOZsx@ii#a7{kX%R~FP;=0VwA#TPHE zMSBuCCwh9unBSmb&pA)h5^pQLB6EWW5v<)3;5Usp4CG=6XHoA9L2@4 z9;Xq#U)&SBe#yS$4AmtTZ(R5l+spmO0Nj-iRV<6?gPTj18fO()v(!#MRZH%Pc>in= zI5R?6fIEVd++_#$2}nQXKq*LQ6+k&pHe)^W6N^TsMgElc&dO+3l#dEc&1!5*S%F1Q zCxute6+xxZf3#_=rK_kI99yUZsmYWlZ3R4+QqTZ@C0U`K@(=b0CY*LSwHVTM+eng^ zOpeP{JAXo`s87Jgb&?^@e=>u|JKYY=i>Xq!m-%l6+$1}2Y%G6ggJ?PPW$$A_!9#!# zLSs_clypjJRU*|?DJzb9CUn}p^*=0c##=EB{|{U57# bdAQgC)UKaZQItwwoWjy zZQHiZiIWK?#>Ac&U!Ldv*1GSy_x|bA-RE@wI6rFDuG+P$o)cNLd*nqA#X@J{*UvuV z$;3VU=ow|0yr<2P5j3*nrY$a0kHqZ4=yPY{;Rv~2@-Wvh=$4YdBa>)wZNukL-#151 zTr|9Katt05ecW9oyw2Z4ptJ|I0n!wjne-w{s$7U4;GbUCJS3n`$!wp%ok0D2?I&M8 zVgoVGe1x5nbO)gie_AS!AWWTmm*wXNoX1Q%c|%F7{bIPC?AF`AP1G&NsCMCaIWoTa z!UUYaTi_PHYxvFQX`q85(g-@5{KGrTfS#VyC9+7Y|i} zk-+OM_6U82nf#Il;s3-*QWzb^$3oHv zSlvD*_i=V^W=yY6pl|=OKph&bD-QE+C%6$t5M#G zr$Q?!AbW~&7W{RO`evw|K$-0oQNcfNdc_~36!2_0G_c?WFHaJ%#}wE9p>G3*N7WL8Fff2#FjMN zIUN(FF30t4*?*HM1hD;b)3&OZw9RIQ-wsp1=W^zIRm3d-y-} zTBV9l4`2OX-pvR>LNU;SH(VoLQGJ6U%_8&>ZfubzK|Vow{AUhAnh>CDXo!C;kX?h? z!_GoM$|_Eul$K-Eyp>xmK+e0%2~oX&_gzG+ihV9TE;9P#dBDqWk}dQTEAVmfbe60( z$*m)&T9hWDbJJl@%7bssqr6OSCSN))7lvDq?+82_cs#1RDhD9mq7N_dI$$N@gpJ32 za8KYo`W!LsCp=D9wV4rOulNzK+xJLmmqu41;uPZF){ti?)H4E^6z$Ltn5F|Z(P#rj zqGsoUx=LUtWaSnwX;TWGf=+&hw+IX&^F8=O{W|hQ_^Mx2+J?fXb!_aYlgh(0)*O%J zcRLpW`2F{apksTTh;=w~GNjt_#7UDp)xY(9KJ6|l;^*Iz*S9uN1W7vqTIo^?U0kyZ z+*emi;yjBR=9`db^szmC^RXeFN!Ir6;fuARFIG;h3QrAzl0y}QBh-D|u;C-PfcejR z?-1R&^N);ldXpt}4QJ9U3iXAhQC5oqRkvxNW7Bn|Btp<4u=D9eMKZe?nB>a%R7+Tx zi!H>Uqw}W#!!0xS^)5O)(-K@++%hXR*|m#RC3^M_Hc+7C zb$=r;Lo>;N!giZf&g(2IZX}ZJGn);Z(6p@z-XV!-(1efZi)oqV{bvk4EWIj&%y3L>WW2UWmbu|OR_TD-( zL!QfSJgRNjA1Jf6$2lZCmfzct?jyA(p+D7wJoh)HLh&6{ua1s9?5g>pH>1au3af%`*c@A^=NQzgQ0OW=irL?&jtwj<8MWg#E}=_mx6BN3Wr94S*+ zyworGMjwjq0jP$%W=i_%zpnvVpD2qeZ!=MRza&30?xW71!v=|)TP$@#c+!#&34Ue{ z=>*#g1E?Z+9#7fBiQj!F+#S{_wEPOa^!Xd$1HNtK=?<4! z)AjekAU_0KvC~^zfHhDdbPo(n#2zvk_t@~z{O&n^x3@we}e8z4%u z-cv^r#YiP;)xvEMlBxzEj&&K;evOQThNVX>%Br@5P)55-T1CPC7)EK;VsvoO5FCpw zFtQoSGTAd`;DI&9tk!x)*ViLknJm5<>IxS}OKv|YSdo-g9(8dugz+?OFc%~GEbMX^ z(^#9Nr)}lWj6|fK#JanPCoLUo7kS>BnuaxNa3IOLwt0_8AEW}rJ5?>k11fWG8(MRQ zqc~_&R<_Euhr*C!yv_?6Ft;rZqd31BlwWY%(78G=q8RyxDLvg|RsO{iz-U67cNb3! zP4WppW5Y=b-G*OL>>+6MD>M0Kjsu0Nwp*if&sR8c`kf9bah`%KZtwSns0MBsUB#e} zb*N6Zk#Ob4>7K@k^0ZfoVgrPyO{#-mAG@6ombqp>|@bx(Ein_j=Y4k3M8 zXvGzC)^xfxZDeW>vmBS!U_5W z;Ja|c^n?_CL=5UDaCRNQ@9KNKIq-_yXKMM%71X!Z#o0Sc)a%7{1j-a={s#gOcE

    fqW4 zcQ}Id19?Xe$M!S$@O{l_-|No&g=6qaFIzkf8~Zn7T+jqLz|+~@nM(+{d-6QL#?IZ> z%hM5V0A(raO3;l^S9)+fa_HvhifRBQnPlbZ@9KxDub-z-H@+67YQzgWJF5_M{1Tp> zmrbazXOImxHdR%$L)sl)m~=PsZyB;)KjC`B%G1l+)g1~Sn{u(L=J3RR}GU4UwkCw=4)Wz2&K$7%T48+sH#od{%zz`R-W9IA%8k(JBXegIa zLdiH&YuFoL!UbDuL><^yTQ|6KOw-*f(9`LEbIXGO1k!h>=k~SpnNVlTzz_Mmm}X>!LC}Lh`sU^C_b*5;I0-!&D?2AguB;=cNNyfzJmK?+!{s#yz{dF2L`ET>Zl)Ae%Ud|8=&UK~M479j z9j-7hJkR6p&0fL$=?uaG5PKk=+4|E*i;t>i9*ZpiDywoi?Ve4RZVwA6VC?+RGt1_0)C)&_Rf)zXt_CwRNbVKn)6;ThYw(`K2GjV zuK#XroxS)w5GEk12-%|gkE)8Mra+3tldvU|P;|DIN2uBGsXCfm-j2O+521VqNHN5crMJC%5_=zh>qy>@IoUJ|qP#!d8 zo%YC+-IioNAfp}N{!6p9`V?a@S}E^aNLS?o&V@WxJ^7z2O>pV zkOz7m&P;7##`4s~YxeRFl`yA)UJ~*2G58PHYepxGge9LBWEIcZo^2LfNA1w!LiXa% zdjm|%^`Gm7X!d6J-#8|H+L8_c$=d^hnS%%7Q? z4MLJcE=<2L>`HEqbP8~5R&pk7ssxl5Pa0T%03D$Pl7 z%a)XvYPI@)g|?LTShW=?;~Npdm6NkF$rr<=UCW&75xg<2=(N%;=pG_H($zUyVp|IM zOFF!pkoN4*v?;1qW2Ty&@@%JnGRE1<<&)Si4;)Vi&$?vPMvO`))?3FFdSW`=DdsCV zRMq5pPg+Xry*aG9GpmMqfV*sQ+vSFg@d(a0?M6i=uJ2r_$zG}Ct4(uZXNgaS_vWxk z7xEmBKez7`_@r$cr97lc`(`JS51wK#woRNB;ZN(>P6^gS=H&yg1MA0Zwu8>!Vi~}> zrAFu8lcCaR_jpZ^ZZ)7w(TwnqN&bF6>9sQW$~;a`p_Y)PjWV!ZyM&e~|3V4_`+6yP8=@=niI!v~ z<0RMkjem++=sAoSh+MN-zJE1C?O&ptk~fgGo*=bWO@T;}FF(N0e`m|LP4I$uQTpy% z!?raDks_+`S)tREFOx14R1yO!A0;JBZ3!2PHBx{Zoc}a3Q1)5X{AbKF?(Ain3M-#{F9TZ{fsr0A>{8g)UA0uwfF|QxwZM z>4vdSRWALdvus$0^~Jm128KAJnLk<*vPwSa8R!~)^>*3)0+yxBia&{)am=D^=fl6M z?xW8kl${wmt!3ORskEfAe@zfu{v$uBl%`e@=WNnmGh3-ASGUpO_*FsUU~t=JX#_P; z;$2dHZb5+#kdw&6!ZXci$Spg-vyeS_w1jq=gxsI#KC;;ak1TRB#r(_+=>yr@0qIVg z{g##g4lgYo%XqlEP!i%kPyZgh`+NI#{Pfh+-NO~U2L|Dvt-lrJLTnj_1E^Ow9`4*n z0DbALILAXR+eb(ptgn$Gcs*l2Hs*N6K%}b;fg5`spf{#FTUt|7Ymr(7u-iKKxQTc6 zl4=i?)(osTRntP)=)1FH8y3_UFZ{OFY_e4mGkI^N(5^kGmDSnV)+=&hS`nhZ!wY+~F3!0hd8NG8U6_$+21b!m6rwI6Bk z?j5=xX7M`f0woFDp|xLPoC;pI;U_4ACjSedv;>&rw|VqFdi{NuGStJ$%u>f4Znu6( z@)Uns)>j(*`LP)0A>>zLz;Q(8UQnKgP>3O<3n8#2P6@l-6eGbZ6skzOa40v#U4Sw* z#rfo~{QX~yuhqJO6T4A<%N^avjOBVH4BRy}V?|ETs|4SGGu%q5TqqO9&Ry~WS zna~NZCSS@brWdCv?8UNDvAJ-?Znxo9DZy3a9!jZ`r)KgZBEm9*ESp!-n~23yowCqs zOq{Y7ZRxf-XVXdHU1NBlNn4)5N3a2^J(|09C~9Ceyv|#-rW%Cjk87GTt=UvPZeN0Q zRX1m_^vtWOzeL3y4YIj-<|s3!nML`I^o>b#wa}#9uKW$HhZo;bdE@NdD+-Xm; zZ1)UXxd8tlbJ?)DF5Qc9ox3wB`O7jf^dV$0e+ z-ph67xJh=EGd7q@bio6BPdGd0ibCUOiCyyT+wadi;BDJ;Z;YtaGbx6kdS4Vf>>ocM&B*p6H?EtoG60M{-zDRR4_U?7& zmA|5#*QtWnsft&-?(zUw{WwwbP$|3oSw$Pg-`@4_jFwRKA~GRjPJ+@CqM5?UN*#h8 zyH5tYWM8IINv%SCm6F{seF-pDc^)i&fedWwBiB}jcpj2G4tP$1lljeq4kvnts-V;Q zHlliyenSpk7X{42^C{_k3?`kgfcHZl_2rqL8=sG`AQwP`OBw;-)ZYorlIb3V$|k{s zR)d#SP9?rkUPd#q7zd9GY_eqcWmr)oY9u_lhsORs7#Q+<^okd_c9{PWGXfojkmn^| z^u!RGDxAot);lT_;cuHrFKlUQX#MK-D}m3`nUkU6;=NTbhu3TFmyeUH)ca-sg=FmS zqfr}|{gw`g-Ss~}-~P{u*Brv_*Nk80I1xSTzo_V*Q%>iexIwQ{c9bAG(a8KiEX%%R zT$vUQ-p461*KZm*Um*5kIP9(s7OjHaB`)aYP=}otp?R3Ex9%baegVGLhTZ3a4kE|( zJ!LyT8_-^vsRWX4GqOTbLHl|n9*>@UMH#UhmV^=Gqu2%kgjoYrWeKm1BU=mU@6#6A z3}yGPTFPEt7p&iXm#}~SfYr6vYH7dCJTjv67Zi!ztf_L;k9smJ9w@=&03Xe8-Q7@c z>~ZDcuF!3nN7x%RHY`E4dIEWRH5QmaqqS22Ni9f!x6!~4v(3V!T~;VOgXS@g-2j1< zk`o2{?2%6c;G=>+W4=`|kDarjzHYo=kfHUuO!><@9cq+MNVVNBg}hZvD1c@Ceo{U< z<_h&EL84ZZjsLiyqlv<)AF*r&Q!%QC>inZTq)sRFBS)H%l*jxbikQTCJ_H;2O4v05 zp2N%BhL(F^(`0zsGz~XDY`DZU-QaToxny&U=`2J67~O~QBMQrVJ|U?#Wt3VQ{_~Y( zlxAd`*D2Fh`WCm692NtzV9Bd>lVE6-Q5jbb8603Y*ktv5H|Y#DKJ->C)m{_q`W`%#ht>X{uMi@x9_@e+)DOPCeeutC=rTfK*p>jbsOIOu1}%wPHUn*a-^d`{A# z{tBYYFMrI-bS~s;-VMfn7pJ^*kMAn>a~BKft6R^A_GM9CrLDXQ9uFsfZi1bwaWkqX zU>5|+Y#85sSZ=`e!s+(wJ*TS?70tTQzCAK~0aaK#4=nKh<4uqWS__T&c)7@t7th3h zHq}(wG51KmCrK)9Zp{jqEOKF|rak|4qRgE&8E4or{;x!- z5m-#+=qRnUBv?LEC)t=n*O_FIeuGrVPV2}?EQO$$`mT+0-+sM|_Z+O)XHK3$OAu)g zFPH}Ln_k5!Mpm(it2GPM|4>$sM?fUd%89HRX8PqB6f~2DCwyNjq_%SIEhy(WXuZH& zI`o9)i-1n$*MwunbIcjIi}2gzfS)ac3a}kafr(v`$*d^K$HIZ{*DIdabgIv7FU=O3 zy%oMfgF}DzhSK#~#^Q9oXtSZ0LXBY<&KTDi$j0w>%e3k~D@VgL`(j^-YMGU7 zNnGv775W8d6naBZ_N-v6(5AC(QL06_9dSpk2K8i=qyBS(AH543M25@bW;7!s1lGr; z4$NYr(DYQdDh99dL~j~OxGNSi-NZ!+Wz8{agja5lG4-a3EAvyuX?Ju8i?VB4lpYot z+w*3_SzA#_0UP+z8~lN&%L4K>oAOY?9hvwRNK>Ze)aU_qRW>L^{WTmpw+s4+vY#!s zJ<8Q4^IdG5hOtBa3;GaPachDa0MBUW-pmA;ky$VV&ymPs$c*-x6OHVj=PT~ zrtxP2c5f&57oS>>+t0{fpl$18Rcst>%#v#96i>Cn3utG_X_%w4;z*SPN{qh`|2g47 zD`>(IVY_UIW7?c!jpXrK%1JowTw}m_i73V9ep3(`hy#t;Vf10c;ss|A1oBB`KD%+& zqiBM;RLaW2--E0z{%T!-XYM(FLj?$Z*8->&&N3SB-NBV3Wz$AvC0m4wdLj=HmGq6- zm8;-%Uk{^1f|XP?UpzGeK0P=`>21f0{3Es0F$u1CX>NHUi`CoAH+25YMB?f2L&^8DS zrU#0OJpbU#7LJB^5Mj6$Ma@`uL-KRp{*eXSk#XpO6zq<~pIrXr_2j>v@9&B!h>7|s zdPRub*7&Q7DmGm!d!QdXXm(ha+!LEu0k~G^E2DJ_+gf`(SD7e~1STD0f|y7WUm6Tb zCHT8Z59Fo1;>LhAdlJHYqkYcBE0Ewfmi)#cG^&ugFE2KXcFLh0wA=bvgUB8(h$GNL zkwVBkD@+i-oSh$K5V1Ee5FQq+Q6zPMzX(OtD?RIUoyc2B+0}zZ+Orqj3EbLd7uvH7 ze?u3puO~d&Q<2gfpp4z_9kWO>1xv-$&n}j?3CqCU;Jp|Jr|~npY;`AdxoW|i;N_Q{ z|3`BsWlE4Cc|^h-Cq*ch;oc?G8&XI(ftS?f6zWVKJA9e@CxsS(&JpO+^+2%U8{{S2 zC5XHBhnMsev>v-!^#R)}r~fpDXlD)Q>(vgS2>$(znlJVnP;}GpdKwm4TFP(T|B-z2 z_eoI{ldLscxKL6bM|>oP)nBvOgn4&$n-47e#n@&X9J-N6LRl+t6GxdZ#*`rW!z+XQ zJmx!=P}TaU$g=qmPHfc`9C=8%o-5`B$SbNbXel zAF}LRD9T&i9@c4}E$(<8zON>NH&;Y((H5UNIV=s*%`e9jRh)16DPxQfb6p;NQJWrr zy?}>ceaxMd%A{$^HvhkIn}2jM_DN_Z_D-kHAkLAmfY3a@7x7$g6V6{ZZ14gX!P~zX z3A#-;;zIqQr#sGki%}H z>#Uoqi_6_DQ<_>^6d0HJ7EhNsYG*2STuv`m0UPR82cnIfu^+&21ZET|u4l;Gv$m*a z%IZLfKg21YkA&%nj3n3UW(avzdMmc&$g!=689a9OgBxfBW}DaSc;Por2tBJe5GVb_ zZlWENoxhPW+*BC2J7W5~U)m-ELrz$li@gPb(`Q$#jp$%HpC923r>O(+`>(fTNq zfP&@K2}ZZU>+uutVw%s)vzAaG0+7iqgx8v&R)#6yfv8W zANEY@X}5mux5Jq`ekl`Iyy2MFV8*&m0SMS(R$&9mKf*^28i$Q4(pAEPFVtz*J55JL z=CK6s5VEC7e0Wr*k9foiyvoI`vn{4ngwq_{>d+|&oABo9ynCv3S3Ro;X z@5a%%b8yZ;UK}>slQZ+{VV$tOw7|WZ?mN0Qva4kFtUIcF^$n3mI0iB2B4oY zE39#2Dnr|Y*m`U9yON<->t*4wVQ zR}Ha`T4O%d`}`{{a$f_F+-`Vyjs0#jXGq*WnBB~9>_4(4t2Mp-tMJ>jX~ zdxyMIn!+#Gs{K*wEsmN#f=L2kmu}3)qm-=Qf!|rOB{$f6j2426&oi@R`YnFjy{ZH0 ztefsEQ%2KZubyJ5<;1>mUp-!RNrbA=0kxgf|R93fW z0at&8y5aI{*{AaoqQ?c_;keaN^if&-mg?x{@~X}Gv5ia<^JV;x-Gu~jv}MDY=plRQ zo#Uq8m(ab4dr9Wf)`It6L+26pCjso+|F6wMr7`-lc?qjBu81903%hf;v0Y-OYB9`} zqd4mOy5W_~5(}1siKCj5GnU^qB&KFpG*1`k)HQ;tqQp+N%bEOH^^-cxS5f={Kgnz` zW{3AEF>li09P-K%ij=&trbNv#8n3b09ZQK1ioAp)?~mab%WnA)nBa)hu1s^c%6em9<9EGprJSzZKm@ z&wCMK=9ES3@G2_idDZpnqryrBop2ImH2pz;gwM?tyqoh$8mh>3znSK8sHD^vND`$z z4W1YujVsC&2C@?Z%6+K*VJZ~fGKFF05kI$a7ZfV8T!|(DLPt?V`H`82 z89a=HB6Eko1${)eY>Oh7`JX1cHVk3URW%4^M)#tZaX}8it6B8eGIr!x3fFQp_bg!r zG2L{@U^uyybLjg#E#}O0HSj?BKQ391jgdTkkq&D9yFRts}4LYS@V7l}0|?+1nxccABVm z0EQjL)XX8T=W941^rO4hRYR#3!$6_77&BGJ2~&?Qmb^y(c2r#&2-$*LmmRmDm!Of} z%48OKYP!$i;75{ z0uA+e9U${3)Es?8UR#_p|L(5uf)TDU&q4q}#8;%yrT$A!*!&ouNy)-XT2x|oRg_&6NG834hu;JTcNheAQrK+r{RPysm^4m#m(mrjMGl=9c(5O5RjEISGlX)0K~ zi&@3<)RBijwR>Z=C(wVn(i5@U(PP{6u-nr0X4IG|XBCtd?N0MGPw@9BT;%0NgXW9| zC@AdV%vm8EA6Vdm^B_{^qd!@ji}b!KJYlfm%m!DRB}B*S3ZD&*g^Lzyh&gA_zbz)A zTVZRAO@8Tb^BPQ+PTxej#b#>J;JK)-J?Lo+&sFFm?OtZcV;f7W0;7DCfAKpQIOPxS zU^SgHLh?vhHDfN*xK=Hn2p8qq>~#3QVd&A88U&TB)TANM>e){;_2X0+vBzLrFo%3| z5BT^h>MbD1t(N1jDL!RrmbkN;^>nv;mC@O@Eji`~k}l?xsE`(33XPBgWU)Dn(U@nl zRV!X>xq279Yu}k<4V0tQLtm_}G94Ol9kI$>RrOn^_ozrK*z*Q)#HWZOK1UL8%O2zu z&YEeOQfN9D7PO7WxM=m}f8Af@<|$Dwy7v>Xmy1(1b?6H`;lx)jf=&?PQPVC2b2>q1 z+6Ni~&{s|lQ7rQzAQR3%2hvNYAcDz^m&zbU>GU;C4A3NY zRo$nGJCLr&w>Qr_ompExLtY;*!XCdMc}=$?S@|d=u6}* zs}viU8N?AHwYSAfI~&>jw;nGdVMLl~2w^nPXTF2>HXs_llSk?d^Pvx0zY8~G%xs6H zx^E+DX{$mI2NN(5`4*}VQJBm!1muIfb^pe5VwFsIcM}N z9)DHO<~Cuc&w`JE{BHdTE;wx#qA7%>ANgn?nBGjI&Bj=Y{T}^<{?n)KKp(HPk$~nP zxLtKfzL30@v`ygy>-%@u>ih#mgNWOfM}1zbM?l|CV*hr9 zR$Heq@sX|oG>c_5bgW2kERdt^=L!@;sE{^7C;kx39a^XFA@2u0h_A-Wo+Z~+BojKZ zloDBQl-)>s?-ISq3qsMyPGnbV*DE&35hmCvh00o%bpNuJ;o}s|r|72>cW&gKn6vt# zRGBmuiA)hsff0y?-1i0tMN(9C)W460Fx!F=R_c)^Tp8zbrJAmpG0KTax0`tk6|8?tl^IWI`CuWpjee z_*p(MR5Y0vXbYAHwdOwv!qH|aN-35vvPj$jI&>@jqtNj+Jb$_!FEX%*Y~gdYI$&Gl zL}ZH&bNcoZi48yTHp_d5ZpC9A-teCCDE?3WJ)W15-x(WlW7qnClmGO1}mi zEh~g-HYg2TaN3x_l=B1T`oyN38szF`e;UB(O8Vw}>8~QuN_c(MuOBQ|GaMNjOTOB2 z@_ihin$}2OQ8B$2GZEcxik^*CHsUMPz~T ziMSTkfZrDpy~c=r-+}0}W#gm&*aF`#-pC)fId`#>S`10U2?FZ#98FWnW<1>ksV$qu zkJ1T(fq&zCEXWaFVzLa&Hmshj zl^cq} z>v6|>xzu+4_JPA$L+i8%cbx5*Iv>|>`FGYxDJLLGA!T%cdsSbEt@=#@7}9S+d8<;% z*@*h1+0Cf&r+R5**&@nfnAox8hIZ0TnguK{DsVf2J<;)q@f`)7*6hCgp)8iI23M@*^dKV68`@MJ4J0@PEaLBLZ(p8 zoRT8D2G;^Ts~&zS0hDA830Be+T5ySp4B#|PU!$XpFFB<*4NA+_M{Jxb!4e;2md@aU z6CAbRQ5hUKFbL6&)ZG@*8^}tHaE#yeUo0dq9-tBtQZB8qfG$4KE{E;$P^Kq8Dn=&F zO-^mUM>IlWHS(5{vFzV4_%K?pI*8Crw2 znlUlcVhHUFH&#Yp*Px1a)fvya{Z!>bi^jCLnptH9huS7Oxh2&qI+?v#O568C<6_4P z%muean{>3jTJF6f!6ddCC|3XNP|BXUsz&3^%hW!nGvqV?Si(r*QN7_2#k*PL$kspKabv}cqQJ>)G>BBE^PWKKYV|^ z(ml~!rUY-t{HWh$j21ZDgoZ>s5>3PF5yvVkdHk~ zA&Zc$4xNt!VX2_@5gUA=P3jzMc8P%#bh`=aw+B+NeHtO~A$fjJhi%iqqjX|}od?T; zE2hPLQEIn?QUjuluA0s9KRy={xAqhIbxpNrU#L;%i);CxAlZMpZd~HPP!WXf<`|f=gHQVN zks)+Oz#aV2X;{s!r2FrXcc#ceIEsPrx3SqN@|&%FhNm**t7C_5FqMl8=U^Oh?WJX{qyFr@5lrlps;sjY4k);18BDVFTpUasKXkyQu zWDO`i?PG-L1Y=6x;=`abf+zC$XUPuD@-k7^Cy*rQpX`C{IjcGj!}b4QU>+X|i6gql zVFK{%)S^d?Z2 zliqUC5{!nrH3HC>LO`2rrd8v6bb~8y1aj+d729nAcN82^y$J9}Eb;z!k&n0YhJw(e z>kGO}d?;m!*)1YQm>%n&Y5&xoa3nqRUz7xV2NbYfm6|Y4-cE((jmTEP7 zY7K7Rpxv)=8G;Ps4daYi~b<2BcZhw!jrKq%idNNQ1aF3ngZ7n4R7+JU#TpYywlSVSTBPX@bcx?tw0YT|i$ zF0wlz^&DxBW)O`w!LIeN`bUi-;E_i-r}Ql%VBc_WL;Dt_XpJp4AIen2B6H4qe*rBd zH*Sb~YpNUaaTmqf(!Uu71a@HldF^>*;`xSXXG-Xm5wKX>)9R(CZ*VBeUY0atv&Chs zMWC3|3*5|tfP9j0iZry)*JBz`=8sp@B-*Qq_W=v$dD8_M32vb54K9^qOeuhP!egDJ zQ-o7A5OE2i4IV^HJYV_CF!)$DMmg5LlO7~1QPk(F7@P`f9;+ZlR|M^^hzae@Mt$<# z901h^2lg3qr@*)wK8C0FSRw83w`V7vm3<%IxL+eQi)2w;@Gb8CUSGM)yuV%EKOtk8 zY95r~Mlj9>V~nmBR#P#ebG&970sbBXy^n^Cyhtr5x$gIzO@hhTyH;VAe~f-R*x96w zC@WhU`YWuK2q>s3t5!#X7k1NohV2p&`SC{g09>)yQLFN-aU@oeQ^uZQlI(b{8fx$+ zn-n^Z=RLsd!>XM4-LzQttf!)&pYMCL0rLVL8Txly{ef`t{f5GX^ILzkho=fXJCnm= z^D@W6$0^d+M&vhN*ST)VN3gfh$D+M)yBE}*+p10ZMMER;^~@*n>wqM9dY0V9`7e1* z;LMk(VBxMZ|8FMovaa0@GHeF4U34vdoPv8-wUfC1zL=e1mp@3tE>bobE^o>PiT%TG5r7dNfDB8|d(V6)RgZX);9e1To(BDo

    Vfh6}dK$>hChbdY70D3PFh+)!YXjFU8g*f@qHyzxZ9t0!Z8&l@4@FBTx zphH7YnAeoO<99QA0E!JZPoIeMAm(PNpb?>Yn-HZx)u(BXh768`u7V;68meYcl4l{E zxVbNug^5Ak`#O6sxcE5%c<%w9i;F`07uUcidNOz8hbGDSDNv5Us8(YpFv$Xxy+cDG z?C_x|q>w?GP0pgA8#r^ZMWpUnWB*tDvF`;AaD7# z%8@cg2%^(5Qiukg#;;h~YE%MN8b+G+rP0-8nTfl6?h{7h%Ts{f#E$i)UY-^6_uSr> z`=f8s+D>9VBy$07X@u_HS$_+9PDamH*SrgxHo34LMnvebO7h%BvejpMrk;%*C^Vd1 zHz%_WRG>~oj|O^7u|C3|m!K9YM8eEB+V;&BTSW14@puTrM)c#}d53`17W=Kr?`=JD z)3*QG1{0`l>m7Ai={NXOpTsIYh1MM#$xY=}6cZkxN-ZSFM&AdSLK@yd$K$I3z zZ8<=aTHpFvUtCmXjxDZ#(<8BkQT7ePQg%~X;>@Vf@MSzfoDV-IgKu@_hBHq})XR>B!4%GkI4ZtUDEphXczr;})z$0o zB^@N~$-~XfP4f33`KPd_U;aog7x#C~xz3xF*OU9>JvcHJx!Kvcnc5i!1E?Rn@Pn~e z9kcy{E2Zl%*dzDAAC%{Anp-dN11Nf7DTfka+XqtSSag4}())0}Xk@LQkbblWuW{VG z;*+mlk{NdHJfWV+d3K4n=n!h@y=8Kb@m5r?``|uvU$TPDkt1o8%q zLeXxg|b)W zdAB3WXE^izv!98NHIe1Xp#p8;yBy*^N~j6i=$nLT$kAV63}9Xb&awRI_u8%(zfZJB z;AKBtZR%SMD|HBnyN-5-tF{G&c8dO>PHC#)f|>y!z4yF|NlcGUIgbC<=2=R&fH`cK zhzf*bbBbC{;1a#kbFV4R&{jgvsv7w8BSrDoG4f_}Q z$GwBJ%)zT_qKsk3608$07@GDDn)Ja74tamRI2_U7Pt4bBIM4Hdo6&yk!U%%TY?ON} zD|m?EcQ z_bLh#@--dO#dy!QK#z~dG5_<$Bc*M9XB@o0J&G;3mKwyzWM#i;znHmMQkfrCXu7lm z|MHbOs~q9AG!gcd143&q1hKOdosBIK%3(Ut9C`&cf2Ht3Ps@LJJ2xcCX@bJ_0kz;% zEbXuPy0;F_h2OESKbok>=y@qt@$j(**>l=#k;9~C9B zc%Rze2Hny9A?w-))#Wntrt1OPcbi>GhOm{A^7|n2WJa&WyB3k0NAypQ*beR{{AM#o za2dvxW1bT~xQbraY?`N)UQuwY0z+kBRDs`;i^$%VAPz1f6xo7Rel2O%^CWMKmFim0 zL#WK792&9qo*DU4>bB;dm3-tS+`azg$;~gOz!3aBhT0*|FV=2Nv-;6EZ)n7}byc%z zxJ~|1sY~Lq2ILt2I4+9Wwxc`>g@xS~Oe}S{XRzF4vs4I;o3~|d%s)J^&)pZ$WC(ar z;oC(1kx~+5K!v9NW5b_)l&*Doilxf)pTh8g?v<6=Qx2hMNY#o%`D9a1QY9TKVwD0! zDB-=~e-}&V^lT`Oc`Gnpy*J{TdScy!UGJ z-=#U{YV3ztK3Y9ls_)ebDLofJrh>|ScSeaS=tniyEs^Djl1PUiO{l*u>l5ca0O@eRcG7C%tN`?aOEHV@O9fn`pO;T>F z6A{GHguW931M(n$kZ<>?@dO+T5*!-i`DUGo9wk>PRKm-G-@Nn!uy$>MBxB20WpNwI zs<31o3BU2V^c7?ZiaSZWwM{uMdX07M4=^$}lXJ~D_GlAKc>0hp@qY@O(3zo!MkUU$ z6N%`LXK+Gsp_8zo)SD;{Qt_7r4$^cOcWQ~>X zRtkrpPd%FLiu?kBg4IZs3j?Fg{aQd_xa0#G zIg1-_P#o`Zb%IkMv&kGxxzo9ZG-;|Hr-f^eeu+xRNtQ%Y>~8j?*9PF2S`Lv-$2%@Z6jl^j}kUR*@%YBMMs z-}rD!YKdicO0-k&PyPl*oiDB!2#9!~Lr6cv1d-xH+~|5j3;TB!OIk-gaUjY(6fgVF zYDp0!5%=gfK5ZtJRw9>se7oelU4(c`hrxipU^~$cieGCQI6z+=A%~DPM}75-+}9vv z62bum_Xw}o7jgog(9iMxY;T)kKRSj(4^|vfM4iT#)y|LI}C&7vFp9 z9~h;Ca;EQNP4lzupRB(<4;bls0SO$@;}$n6>D&Lcsk{UTMtL)5_VpT5+aPt~Ai;ZU zG3U;vnTXZyOAgmtn!perLE>xq5b+2OEj#@mw%#c`kg#jhP12pDqmIpvt&VNmwrwXB zc5FKx+qP}nw$;JTH~&AgXV0F4syeBvv%21T?|ZFhU0_t*SK@D@$~rr#nm16Isd<;# zohIC*{aV@^cE05!x*<>E?yrqM_$J6|Q)-7p6{K(z2HN*ZiMNwr$qnL&bdwJ5DqpQU z8WFzzYXt1PV$(i27I;FRg_^H_3vwo?%uh#JLvtZN*UZ7SwJMg(m}Y?1cE8#RXzTuW ze`p%rS#ll8rluT+p?0#o!rx>WIS9vQc-C1hi*DWVkj`dfJ8;w2yX^ds1t!qkEQs^l z=HqoT;E_9e4pt1F3mUI#-(NJk3AMD@%4LU}F+ZEb-1r#!6bMDcVJ)Gu=w?&w*L%Ea z0y93R^==gWnC4$UJ!J+_5M1X$G+${rX@{!Y@hu7#NDLN6cRRJ63nxXnqxgR7B7srb(oZm$RIvtXOJTNrzKeGPVnfid3l82$Q5j zVr*&#=Qb!YSLIw1ao;CnS=P1^6tIp-H5^eH7RuOO00XR9e&2KN3LAhBEhZehj|pnq z|HQ$BJQ2M}m(tQYDmz)_+*OXQo5Ek#+RfQ$8Rtd^cXP#wTiSz1=)~k{i zAm@t?0^$Ee&(U@DV_1gMxn8~b8=~%&Ss|}XQ(I??&@m=)ew)0uQUy5}2eZ1~I3WK1 z2BT8}9L$Go3l1#7OecHe#Y%5QzK&Yu-rvW~EionhfR&(W{``2{qubQwX@9-Qd;0e; z+7|e%u-Om@eHY=_5JvF%*}Ad>y?}LWP*CjsSf0k6?o?cLnS(hUAVNr_eYu}W=#;!} zTAybkr!&)3f{uM4@M_3e`6QLA){exRXvALvcf4(n`7n`6oDM}N zP?nfCG!{Ftwo3tBak!7Uu?Zqsw$X#&ez5@~158;6 z8?TEI8f#DIE>fxty;Ou+v&1i5t2Y-7J9C{ML37tUR~lfw&r+UMdZHB1jZO^wiEk(; zVR3|kGJ}D&%9&IwTm4+LlmS}2^P9s1=&9wmt*U8_1{I6C!SVfv1J+MoRy396YO( zFRSpTDECqnMt+PFg=!e#i6hsz9N$@WDhe=55A2i>kBD))5k}%>h<1W=H)qqS1NVgf zQzoIhmUOzdym@l36vv8-=jMbMv-5rT+?t61;v2lFy+6$#Mao?L?SAWRbgHd zv-k|0$z-oFaI(#uJ@Avbe1t2CS?dc~^_;#JO)F~eh4sjDR-0*Ujc@ZChF2Sw;{Np5 znXilW>mzaP>ie@cI|MnHGV85D0a%}<%F5Wtm#lGV(h|s{`As3&iS(hlQ^hhFkvCf$ z!4*^w@453u>lSz2EBv^|ltFqiKD>k1b0vvf;4@9NyX zD}|{mGD{cH%%OR~2f9kZp>)A0P9l94QuoFp_VN!z0Q4_BTy*zULNU?+M|27=ohJgU zzj$_=*v(+rb_jjBi43&X3h>&hN72CDRMzI`QbRlV;QijJRq5nxlOx0H>1M}}T9a!1 z=N0tt=qS~ltIMq)3#h%=_`{BBxwV@{n|ii+IHHL*;u)M{z^ePeS!~nF!q!X9jF6#r z=AMX#faWXK@9SI0;R1|nHVCyct<lB7fu`Wig%Xu2D*v=>a1N`F}X%%$_Wp zW%^Uq^-Y3?+9mW;T6en4HSgO;Tw(#5EWJHL@RU7o#}Mp)emtDz8n(?TuUnYb%3Vzb zZe*{EFGKn)<+{8`oUR)k_fW#qIp}Pysy4wAHh*51%LEIos7tVTYb-aP%w(24%8GiRCe3ie^%(9q$TxeP&D&jZr zs<{~vye`}FD$;o?oWs2N3iz(#eY-!C?b6>j!-`b5_BhXNIx?>H(CnKqj)2ZE46x_7 zSMfhRQ_yN#G;l7XjbHCPTvEiNYMZc`Lh2C&nx{k^QNsQ&+hx6S^4dccBsfZNS=0VP+8c0V-9&Ez@mDFUxj5TZa zg?k#Vke0UoQ5wZZu7dA#Y|H$ZunOw(XqZZNYujQl z(`9tjbHALH%2{FZIXZrV6 zbFMYXzh!{g7mBiu<^HT4hughs)>(E3}d_sE@>y4s%Vc^v) zv9*EF$J|29*7?P;eRD0@sz_@7BZ@GElC-Aj4bT%q-)kCF7ZyL zD<;DI@=-oYNlsZ85=ExY#&2K$)O}HN)G;OD@&vXn<3*A*NKj)Kh&hFCus2Nt~ zv=u%68JfATYKwy1tHNL;9*7=cP9vBe)YF9$TY5c>uyg?NkhgOTj>zTLWIOL|4BL%I z4^R|GdjzVIQ?KQ}KVA~iyvJ?^?U{zF?V-uzm4mg990uxU_65`H>oA5Qe^{x;m{{o< z>dsUcx6REWJ#gccV>t9fh7WcX19EEGPt4~IVI;8E=i$fgB~#`NWX6oHeCu}*7k0nm zbnjzYx6fgO3sQrvQ}-7$W17EnN&TwHR7^(Jsu2>*RC36B{v41wiAfGp0`9qh*+CN%HTqCH)`e-p`hysWafVOSGl&x*G3>4 z+Te+>2VW%0J+<*hTDQ>JD^Al-Ie})I=t{Ry&&K$CE3YxVj**aYz{7MK3Lwo4g_Q{* zBatz7eyYx2E8i=tF!h^g7PP?9%%HS^NkFNaH;iZ`2Z0;jH=GhpM6IJvVLAO-mRpxb z&7$e#g(`xhiLgFPn8W||k8^d!!*#p96@7yen1Ru(C8tA{r1Sf$e`!$23`Z(q>I8g* z`LM9_ye^O$<9MVJ>XIrJU#cgVHJpy`c!D zcRoZSp)In6C`2>lDDy(;IjdUC_$;|jywDoqaFa#abyn@H2O_zYM`)+vg7=khvOCaW18H1vK)yl5$L&Z-rgK z2`YzmlaY}opJ8CU`6o?&_%jq%CP!(p;&67dI=2XN@KDkgU~|vAp@>7x79pU z2&>=e4ciYZ04?8z4NCOUO@J(cV=jj6m%lqL{pypSU9g_4H^7PKULvh6fXoYvL)J|z zC&9G@^NkY@`S`2-oWBv z0;WV*)5MV%fpSZaf<_@{QRgQ=ri1%oG8kNX?!M9adOZGcB7Sr-kyYGXu93vG<$yUM z?xBqs`W-ioSTDl*)7SRK3ie!D+jBp{qy~G=9w%@3XXc67E{S3~$M0kFZ3S#Lj=H&& zK;)RE`2L>Y!A$vGw@u=jOg(6FsMO^eNH;}OwB!{W&|h#AnT4U*!b7$>qkZv@%2zQh%!v-z#rbA95Gx+j00i_oN|zm;WF>Bkl(Uoko0~;6mi3CEW31I z!g;Yih+%ER+S|wWoa08vP|=Q~BC%D%m4$nZ5R;0ZUzshGCzm1h^UnQ!@6U6Mob8WV z8o8>kPxV3syV>f9Ah!P2(c-K@O+ax;^$694*Hm_E`X|0KHYlsotIR(%3q|%TRZ7MCt}*-c}D%zmbQB7n7$go%%FxGz$mv+t>P_F!R9Cwf{zm89%X|9q z#Zh#ZknP9QMfw!JPUn`i`L5j*?gtP(_Icy7kndS0#k19DA;k_dT$`=8Qb^Fijo?af z^gbWlkWSD14ap|SH-)&_cY;#pJW|x&SHTSImK-Nw`IZlPl03nmO)`V29c%tJCke8F ztN36e0cGkq>gkkshOkGhfL$1)5$y9AiG<*M!e9hEiWnfhUu_9n~4_%-(8gKz}D z^1~b2GX%Fu2_g>u(il@%TM583!E`q8Mmn5H3<2*KUDls?Uy8DR|7ZK@I)ughX14O# zw`z(3Jl}k3Gncmjot}4^3n_aLI!C{bQmI>Qy`GuW^a8+H)byq{!OrHjMVXN#$X<7a zk{yLAj#m_K&R&m{Z%_*_PcNFCa{-Q-68N^L^Vh8n>ls;4F(^HcBR<)xpe>l4y{U0s*k z;60!CpZ;?KCR7`GKP>P})Zw{#y=Of>;#jUO{nEeg{jaO*ayz_-&-7hD7XH1LG8HV( ztM`1nr@rXTrQg?~$QI@rocGK>0dL_WcNdUmYZj!>34~;IrChXoZmYNdLz&#&*rERK z=ihRUur>6j@BW>>gmYxM!gFQ0W&^!9?VLDM@C>d2V4qPaZkEg2ISWsVrycNnqS_i8 zFdpO&Gw~{?$@T8M$JCqN(55$+c4)j-n~)oy_fDTVhwut_9V}Ow?4Ejfwo7*($P0IG ze|JVF{*ezDy-Qtahdpjd9dxqvpq0xKx<~1ASLgdLpfJsxMR$2Ox4gd{9sqQ0D=5jy zde;~(gZ8&DKa)oEQel%uem6-qT9m(EpjOF-@qa?4Q@wq$Vju{L$u{T?*q>J~7&z#T4rmsi9N^1)=u~Jbby9+e9tjPL{!i5Gf!lcP7$MiQF7jrB(&`OM9st#;4+pM z-|bxyZ4+43$QSk0xO0)7zIJ@1tNUKX+B7qk&T8uusy0%V4)}Pk8Mr~((|;AO=+h(8 z4hbD6U5hScq9r{7a1?VnEi z+SmgKaZyfk#JXY$Q7v|!CGHf~S0OiMyF#d5+x|R1DW%yo%{nzvNH+#W@20+KJv!gC1}M-GrLftq?mmkuP|MbTIDk z5C8Q$iuQdVf=tpu=$lP<&$@u9#7ANU6^|$U{AYf`!3+S_3U^Bo%x&g3JYh+YeKYEf z!_@~}zot~Eg)VKIz*QXt1RJXhwN(pE5Mi$MuT|V-C8&PEP!*yHiYS(i|4q>tX(d0- zt@ni=2rp}G?VuXYHL68H6pp~Vas5C&g3e3jAL-7?G>GB(MoLAeb|yBn^zY3twt>2q zZY5(=o(j-Z&wx&FY_4=`M!Ld6@fPB+Fz(t=d{!PVCjAQe($VWJbO=8?mS0 zheCi97ufsu{g|cX8oe_xt=Dhpp48VSYpX}~BOR#Id`BS^Jg4lhY8U(6o>w!j8u1Ma zB!uM5eT2PMZR$v>oKN&`;ty3)|IK)R;%dA+$XnCLYM+opCwtXRRi1TdmO@(H$ zJssh$#A;sExV4fJ>0$N_RHZ!Gi5HX-$fF*hw@Q%_2q-`HLPK>D(O)DXKz64HHb~sh zp1}pj6zb2HFDQ#cDNv80K~f97?xRMzR|?aETMUaEGMZfZ=tej%ozqcWM6V=mMB;lN zzZ<|M-@}R7qW@$4y~S7<+Mh0lGNk0^i>p#d#!8ZwmNEz}girv;dm0_vZ=~gJ;2|04 zXn6@wR7T*65tTIicGg#QwWX^Gyjlh&yi zgZ9~)w=9(-@cQ#_K>zcj)0Cv)3q-P90r?kEhQn*^*VePJ^>HGoqp?a^$7ij^s$kjf z2C*eSN8P`#=x~HZ27T=`ki5~tWbSpjMI}eo3v2Zqxcv*T!k7I&p=$U9D~Ip|!Abv6W`jo-l(Mg?)gk zuC(Sg(nUwVY5J6JSM#{LEi~!a3LQ3ugTd7^pF9q;U|I@l45S-)ZV3 z)?=f0c9+)UxrQ#-Wjn_*zpl8Zj-&zT2Lk)N`7_hz*k*V7S!A>FOk+8e_|&13)CPF% zeeYb#jn*#DR{qSwQm8gBi2svV|eQIR9wE49nS_r?Cr{?8c-&5M1Vgl>gouTa226~*@k z6c))u>Pm<$cpQbP7AmEN-I$Uoc8alCrHjV<%bhQJ)>Z@NWM&{AeA@E)^0# z@tm+-D^M`aPY4^NMW#<84Z34O(3+oUWR{pUC@X^X8gYS?4i19=V>Yh{G}mAsBv)`) zEv72%@is~@XFd5XVXq^mj~kGv7nOSaOA{1z#2cT)xp|V|aKt2N4$6!Y<(pYsX$jaC z*5$`(vg&N#)LGSWTuvQ=YY`iRN;)3~x=JD(od{_qbq1ZJ8d9$7Imob8R<=OO{a=N$ zH-mUF^!IVn*bnc%Xmq`IJ}QBh+LQcf>J%j6a8VgjSNuws;?g5y+yLO7!K)+qp{I~y zYx4Jqk$xfP1l9!CB0)Ler<@T(FfOULRytoq%JztXATNgy0p+^B%X9>-GiiSYT0E@y z_pIfcfP<<#zr(>tLkRfZAC74QNkKdun)zk>H2J8%!Ok^W$zGND?64H2_Kgk}iTkmQ zUo@Zn+dj>ma2T95lOs@iS3NgaBE8h=XXxYOlhp4KOMm0IViK5t+sqQ!>!I?k_gGW+ z3^cKmrUD{pJJ2Q2te1(l{7%$|9P7x)zKPT| zk&)X=?v-RnQ4VpBz+crDS1P~uM{wX*8Qmai_W=Y%OYJye1P#!ls1M&<55kus$wudk zD<6^jyLV)&a2wwemeV-PoPe56?N`_YYDJRDS3rXwu5nFEqu7y^7GKjQsAMwz>OW2D zoRL7IgV_ymE&T)S-l-Q~Nioc8v5|b!W0rwK2@HBRD&+wZ_l%?O1q+_~ zGSKdSrGx@T0Xxvy$CJYTmLRa8Ch+;tkw5iDj)i&C(@h@%UcVNrs9aL{`vNqb*9UEzdF_)?Ik+8O}CGVn&n?sFzjU@91oAH$EDrsc~;-oAg{+h2mQ^b&DA?&@HK! zAyKQDoE_=5oXNjM@?eavQRMu5Qt0hf54{MN52FNFcC(UJ(L1sk{qLzjUbNy!Vc4LM z?gSg#07WaXuhEMkre%Rvuq_-Wr88ox3V>95*4N z>ZoX<$BhdY{kx|hlZ+{r9``MXocS0Wtn45LszsF=!GT*Vf6YYX%uox znYW~;if6i=HaTxO*5q-jj8(ygCF9Aer>`B7u{k5`Q$G9EYolh?s3~z0$lcvHwkrrm z9nVk>xgWx{ykK*G>jjzbP^_jb;yg)@q-;Y)ZW76H7HzL=KK0+<@5bPp7Kdb;u6TTo zZ$r%Lr(}rf#BVuUoW!X^xk^D)#HJ`*BeNb5o9Mc_iS>KrYY83epktwWm6p?$%_#uA zg8hF2FgwdCKi{+mR>`8|IVSdHBh{0bG~9S#s5Et6O9h~#+ww66>nh1+iZ&Zzdb1~G z`0xJ+_xezJ2EHRvg}l>Dy-^1Ln3k`lbxi4Yb0NuXtGYg5z4s#7MY`u!r!HRgdSsK; zj6>qS7Y%9TpJfeR`2tyzU|lx66-aUiWNBz zqAe$nngiEJ(`22wPihYO*&K!yL%{9i2fB*annUnsIp}d2JU%FUep~5Kc1T8j=}g4| zLQZk6Bzj5)2sNTT7LVp5>Old`blOQqR+??1894gYKKZ%%lXKL~dciOLh&54B{ve zx@vdod8&l7Vq}!6R>Yn_ti-_#e5~gcDUWfHoK^SFNW7G&L!Kem;p~qbhzvs|hL=z-x`T{kxe?G9 zb3Ykd_zv&hsHF24qlLZ=!)sY|KjuE%O^Vu;L!zCfc`UAoVdo;F-*h(ir1YvFD6GXh zfuvK&{1gKeZj4W+8GshtxQzGy&sl_1R7=CsVTPoc(zu$W(Gbt0pfK6%{+lUN&lj?JGJbAn$dN1BdweEtyjysTt+zmg~h?)v28Y z*lws0*R{-gp#XEY>A|4Kf|n$muW6Cd3ol8F*#hO)hby2gHdP|>_2Kdm87RxRF)Fp` zDMT`$cX8a(^-e+761Zx8^6#le28N$6StvuaXhRpI9>9{GTQLIlYf07sl6;LKdy;uKP9q*YGK^8Yuvl|NjuGtSi zyk$j247C5dKD|rV-Sp}n-=Y^ zu1ewz6w}h4ok#I!*4uvb3YLVNmp?#-ivJLFhP^~#yFH}$nmBaivXEW!N4vQf&HR~K z6td;ADrtFw=X^#3H#e_!yLLCHeJ}vI>4+3v)-YNJxlyL`Fr!LMBRCcRZ&kkRZ%*1P z)J)oF==)+?2@}}kOSChCDnO-6@JYdh^7*$G=*jx1&SY*DW?|<&}6)lry_$JXo+6Un;>5S zC2Magrh(x!foEz)o4pBqVTO1j6j8R4ji%=hu&mXn8q^$&%*OBQWfERKrCf0_{!PABD=3!UFrhJ*p$j6~D zBN)@ObMHtH8L!+yjar~>W`5Qspq(bbfwxn34{fX1B|e-yke~(nMiCf^`OSjh{TAX{ z;$IQmvF=vF^Sfo{;p3+cUX*DDE?+rJRv5iUj&Ur>Rscw)ycZaSmhH}4Zu82K4kq~F zsW3K*5f@C$kCEWXw7VjC1{}#jjK?2<(JggfepEncTxzD)CX^942x( zsFdnvlYeCLN(qxT6E)SGz-qhu{T)*JW2+f`GJce65ubh1>0=`-hw{*tDK)j7ntYgEFCWGhL)VQ*& zlV|bh^JpwwWz=TwO3t~nswKIDily9VyD-y*ljGnqMaH^XjO(r+hm$=J7ETj}w%+oq zRM=)mt5Epjr<95SucTRUfu5&PHKo8dWcftsGa{DQFxFFR`Bu3nZ+ZW3iTs2K)Z^AZ zMYO$8m;QF73@?4Z8vSz3%H?64t!RR`nIA2Z`xyBDhxzmQFZ1W<{bl~BjsDyG0blAb z??38K@qeg4t-LSwXWsk2sXtMw|4sdsUjBd8-wby3m-=h?Z}s>8nm_ln#;3N4y^dIm)8K%tEH2pA;T!I znLG3;on(w`zp`N26Qwu%CT;36s~WTTSqw=^%){(Un+xO=yn;aTCWS$dMM=3mc@`^D zjC<+X%=Fx{wL=n$^fA@j^El16L+RjuA z3#q`~aGqCIRd^*s{PUB8ggn_}acPI&TZTkuLDBmbjNw&{(j;Xk|Muxq=4$g`edAsj z)^Ng+?oB z$&FPOd^!dPUx~$RPhiK922wor%jl1}2=Q$g5Vy)BLV(N@pK~2&4^o8{vG`K8VJ)^@ z$nJ#xbXpzU_3Rr?1iOWTnK|HqQ0a<^ZARP@zNF#Tc4bV<&+C5Zv*SpW@h{=bl;Q%W zNc6ejM0zTb>Jzz8^v(v_P*$#F;|Q)ZL%B0wkk{Zz6+N*c3_1VX?ztnQG?ipWtxq>} z^gCJsr-3YD$*FKTMjVpudgpmg*$_L+nFw2tZ+0gCWX@OnZ;;m<<5vRX0wW5~oqr8s zlI7VMAOl>D?+g#t0_`=poWB6+OmitwMR;syuwCgK53ZkJYuVr>xH7%5+*#951(Qs~ zG9h|doRT7)@PVBDk0awvd$Gx@^CV0+eckq8=U@f>jve&XbM$_+&rznxt`OWLhOSbk zKV!JTtL5oo{u)>mMX~y4MQQ25*%>o|66Qdf4_i&Fgg`Kf8*=`dZ+Is|z}#&m^5g)! z|8)aq8lipt)(w-B#VsM`&z-q4m%!$7t}v87DZr83!UT#3cnF$uq`8gE7f!-@U^UM} zL`|MEk?wroza+N5E_o5Q>3d#Pw+K*qi@OO9$@pJZ?*ic~vIQPXGE<#C+}p@6dZQ?w zB30UOEg&`!5mt}@<5jqCIl5!U>~u)#dy)Ar*>8)YEablFeEOslCyReJVsSN+r1~Lu zJ-9#%(QfxON8WPX`wV$Q4aHidcudqc#!Q9%*$5C@LoA|z9Qh@`nm~2qZ=}2+F79}F z6S9MOu~!l2#JGwW{Lqmy1}P2t+mX1uDYMI<)&Hdn8M{G|?Z0&4HPx3cgc9j3iPw|e zU@NG>ULG;S0-N$4f{+~ek1p)?5(nb#xdzRDLuLB*UiYGW1IrY*_;#5J=OT05H~mMv z7YkR`&tKgl+8|>VPwJ9F#%3EcHI~37=Vuf_<|`4r2|tIp?=Sk{My3FL zic0&bfka3}4=#bqW_mxd?PeBg_EVDh6ylm_0Y_P?5Xqpf#r$3y(rJ$tN`63CEfp}y%;zR0u zDIF^TW5Zrfu}o<(MCSdQQ(VpMN2%&&)M!!C*oW(3*VxLp6lyUUZaa#2-`HD;!*yd` zmjy6Jetf)Jy-ItH$^={8jo$R5m#Z_9LFv#77GGh1T5y`CZ6HI)4|Wvj@E&+ndV3hD zdVc!_xYR!C44JU4Q)uR9m&RW~S?Nt|npZ4Tnw(*o)YYR%yv(JqrDw^&5><+H(2jHV zVEEha+5fD&qd`3lSmNwXqv`KK6BicQ^xupn%rv}3F3zkPBThz-Z*|o`=6ygS1EFC5 zWgZuJy=Bgr4oGV!|1q4B=5(>$l9<336l`pssy8JN5FG&R|pPF61+}zhXxCSew2g3Wmty1+!;y;L8MOD6UIATA{V+4o&xc zl$kokveFm5q7MIITViF;aA5NblUL0d!Ud%^({PCwgbng+Zp(WzhWi2uO9D(`n|sh1-1e&3eBIZ*1`bNeq)zwpeaJX*`2 z3Z5!8ZHNZ#K`G2-pP=5aAfTPen-MAet^o{sJ3cnP{%JdW#!bXEZEPs#UZq8t7Z}sI z16Q3W&9^%_!^Q&LG(%9x9>80=ve#JBgEj?@Sid~QJQ?Av%66`;q-c$zs__9Wqw_Bg(~Um zlXN&!&huy*-06jkO-^WlR({bFcQQe|E5*I^EUC4wLpDJ!r{F2ITdN3%@cOE4p7uSh zM@@S^MH5F(j)iYT9axL7jq-i)GX4Da^oo+8tCYw4SUQ@)qlS9tOORDxwpoYJFc{d$ z#cqk@S{+kCz*!Vn@laajmU$xXC9q-v%|@d?p+I8V6e3FDh)sz65(b(x$!E}9|7Cx`ZWdRYdy6*f$O->0#A4#pn}jGC z%Pmw!BCezq)qLJl+d+Aak@@a+iERA4k}U!o{=c9%f{$9i?x5$e%K6AS5A?B}x50S) z#ArV9v=SP`vT zoqXc3Woav4maC=^xZxvKzW<#r^!fzndQ?|0Bt$O5l6j8^8hxnF<-pj|X44?3%uRQO zvETW5#$682BK?AvE>3ExvJaoeoaW9L6xU5DLyT}pQXR=VL^di^Na7_P6#^^4HqmmM zN2jPdD4|puG!?5>XvCxtEk?*0t$hJrWVoWhz( z-X{6fu#N?+^-v{+Olek3{+NlG1QbK5h(4f?3lE$A;&dqjsYfuiI{jNzw#fh*B;VDb z!cCu}qHpNL2j}7#!UxxkNGA91Lr3ZfO{FVwPa*Z=Fg72Hy`5w%89e;YI3C`j^-8#u zn-;^@=0S^3>r;1Ol1=KFCfUU5x~anRJ)i&L|r zxvQ73Cdhs{9qdD{iDKp#X!b#~^hS;V^BBYVt=`w!n9m_w7)sAZp%H|y8@@pOFcW$@UaY05X2i>S&f9`;3KNm2Z&#aKQkCE^+Cd2gUXExtJ**%W+*eaWCg z)%!_8E%KmoPJ8ZG&X{OY4;quYA*S-yPr#ICViYR=xu@JqBu%P}qUtJDobAT;K3r~7 z2GqIv2mx(C!iqCOK`9dP$!EUv*gFQXSO;|YD2_?JTRbKOzjnhriuJXFtWni0gR$b) zIhMRCy>_vzdv%BOIob*tkI#k)WY5Q-AVnq3dmv~k@733vu#FzFnC*Ki#lTpLDGA$XU)_zy8kwL)p=kxt4670zK>Sh*}to|D2zHdYHi)FueiH)`&Pai6C7m#?V6GS47(zBcm=)O zP0uBSjI}%&)#O+RF7H`ch)dkN;Qyio7vI8(nhm6<0eNeE-xuZUP$_39Gu#$dS#+3Y zs#K&_qxDL6oTW5wgTSZh5$%*zyb4>e2n)MF1NmHZ7x{IzI7KG(LdHW$+5C;gtAI;u zq`z*>4K%`!7v5fdA4+-&7$6yy5?{$X~F z^vYV;QQj39K9!z1Q_+7bE*Y2~GX0w_G>dSrBAW78LCv?NrC06!6~+u{SwY)=q@=&$ zeUUfCbGEujrR`}ey*RCxmMF^r_2A=-Q(es}8T`)h>3i!ES9XYdDr!MvqbU99`_ieRMo)RIGa=_Pv)FQz6`_A7Y z^z`!F#8IN+2$u3AWsHmWb?(0d5t%)dR*+XN&>q1;8>lZHQ23}Ygdd=;4C|?D!@hsx znwSSz?D+GcOxj>xl1IQ9vyXId9wxbv;0So_FhSSC7pFM!0n-{hwQ@^Ja z-qryO;3Wk4jmgI#EQ%&LF*13(y0bfq4%kHO|HO%nWvDztw6A9rg`;XB1`{G0luYKo zvT|Ju^j3;Oq?L3U6gt7frI0&7hw9?{tUzcre|$kiN za;d3JQU~LFAjJ+VRh4(OzZ{X;_dOwZz8P)y*(7AUy_qQdyo)N7g@l{B#jhDRmp#ld z%K=+whaJj-H3S6xDB#XMQ;O~U0cGIO4&tQ6x4BURVBu% zZ>f348OTOy4iqKC9Z|G}o+95Lp9bc)xGA`?6xKB=05?aov1U0VgbNRYjRa#dO=ruJ8`DYO^dJzkWJ|vZ7&~XOk z?6LIKZ%mbgKr)7gF~kK7i<>ecc>Fgn0vLxOXKW^)1+n&_sBVT>)A_`{*7vqa+KpHB zxTm2EV9W1~R2;j%B2>mSG500K94wvQ|G=O+4Y0Cw4yNZRJUkJoS#Xlj#)*9!Zql0H*84Ez0rW>EH1sKcsPs*En&$;4Zokfivnf-or}{;p4^N8KS= z49_|Y<>j@BEj+nv1x@6`?P+J>h_$Qz#Wyt*P;WHyZoZ+}Z{Hp{+zWRp4&y;U;AZmw zV@2G-`o)wbC3RK4m@*#?ZQ6g3vc`WQWhLnYRFE&E`~a!(jN|Tz(P{D=fgTdykR!~s zAy*`L*s;Uv{5UJtD!RcT-pAAs_x8wjuizf*caTxu$E@J~nB%Yz&(j*BeRl=8L+fxh zG9}0=J`rdr-c)aCoF$0xaeUU$9-i7BWbg3?J?`)-sZ7t|Yt=zC{dgx86W&ecS{xgX z7f0hTEqMF5ZNfg$_%|A7XtBO4)3~!@aKNkn?0eZTkmcCYNM8ot7gABzmSp?EBA{e=jE3~wVHbd>09{CGh0H{pt58gks@&l?Lt zTD{O`Qr6s3kzxQVK=28SSTng3l^-|yK7c4=@dVY`)t%Ewqz_ImQbaRyTZ#l(4^{gy z(H^SyOSsjNJguZ*9K2L@H&o1!z(aXVB?(VqnyNV%C1CYWnfe;W<*)C+ak0j>|5eEn z6j$8O<`~UWnvS)rxYv@qou&=hE#Y32!kX8@)*wh8UPOgv;8X|u897k;Y z7$kDeIGs4Gs#{w}ul#4s06bn8b$IUgxCZ_Wgp#*Rc5T+n6$)D?lg}J@T`FNH<_QW} zb!#d0OrkL1*g1TVCNu+F*o znJ9XA*5v;`i89pxAj%e92`vTm1Y*+;0rxddH?;^>zOGddnTAp^GHhu1qP%~opi~@d zKOionxZhJZERgL5y&T4H;Fuvc_u-Ya{QDp&^y!dH9d8oQ)c3UtZdArMZ|_VOkT>v3 zDLNgDfPk{RCNmat&y5AFu#5#PB~Dw!36_lv1`lX5*6;h;);T{nqIP%xOD zvL&0#x9!CyPER4N>YudYVU}}Pe}Y;%68Z!}Ak_|XQ=YM&ZNgLC^utjYJA${)T`Tpu z&0;ZBeJ9gZ(-XruOQND@9D4f zER{Enz6#g0B0czVdXYjSvx9;>rqNA;5ey8okEAox-d(ZfospOL$%}PbI{6T05&cEG zO-(E|AIzPKT8c9`V_;Y0r%b7XJ3(^2NuPnnANiSZ`KO+&;NcR6oho5drXf4}#q00m zEf5R44^tiDYR{Un+EX3wuBC#vzSmW+`S*7I5~o?{CeG`g(|9R70;S}SyjJ9k=FAH< zV||jBg?MKd#exa(vk^%PCeZ%q~Sqqe!@Cq2u?T0bUq0rz5w?#NR z6%nCgj_aQ!z0s86?LfEN9i>MXc;2VJ}8@t6sGs;3@*MJ<i88G7q zO1%;lR%5{k-W%ojVcxFeYSGtF zi>=?W_VGRSn6F5hs*^QBk>V{Y1@NUFUP3p2MA$jlK|Ob*I6pLN?Q(-Hs_Xt6PoF_D z4_~B6c5TL$Ya=0Qjs!W!vs;r{xDiOS{cH&nwIQ_kh4A!$)&#i`MDKiNpbf0{T~~zV z`(JH!d~ZpbtJ-=;df(fdxRAa-eNRuux7lpBL0onCqasAvI2x_AqPx0P3w-HAwZa4K z{;Z<|_L1di+=2C^heDQ!$c6#a)5QW3NKp2F@yMQg@;FjfjHz;K@I#&44_rf8euMA^ zSY$F-r}l=*yEkkk^AG3d5{hfG?=`q&J_bXEQ=z8fr{lxtrH=2tF%{@n3i*cT;KbK` z2&K`6*yUl}S)_CGkF)AXz~p*2l<3d;i~kdl^96N_4-;p`+3ic)W#;>EI0!C1BIRsl zbNq#Sbu{@#o;ry{hw;y>NGibYTqeZU(}%6jo6N;}7vv!;eVjCb*oeva3ZH$F4Kt`4 z{eR+duHzEq+4(zzpRw2DLkkki* zO~zLnByaTp4#y*FjZnq~ic?lg_9AnNh|tx(X<=8NUX8I)yS%S<#0uKIH z?@Ki`X=uRz!`fR#)!C+PwzykxcY+6p;E>?%PLSXpg4>I`OK^85xJz(%cMtAvz4E21 zy8iCH$Jht^WUXV4SmT-Vnm6hUZ*I>;tE3@s7p4OGpI7qpUpw$7Kcm`ieik~gQq`DS zxigzFYcQ)9&^q`80zXOIb0=q_lKEo_n88hFYGr3awt;rJADcn&mCy?Lmp-;W*@809oSC1@vpTHgD&is=`rbf zUp2S|dO0xvY-((e@4MprKDtw%e*9KottD9;ne%^+@>BhVX#vl*fDfCt3*FG4}_#Ld90u(^Non+Q@wiu32-$$)^a znIZ1|=+2+iubkH3$;L_{d@Mvn$iQZc*OeKQGy&?5zhJcRH<~NLn9^ zx59tQGIuT%-X3N4ektQU+&8Ed@)9L-xu7<0$uo>y?pr1@@+&oVh>( z)3c{DqEzi@1kJ-|;;7vRzw3cP%V|WUHVsL?uvsl^O~CM&9+ua#ss`roM`@wliiHKY zZUJ^#YF1D=$7_aFIpFP^EJ$qljq&t1-y8Ox+G?0R-rSF@L!qzIhTN&;bzSdVc)}h? zr;oT!dw805sZPqB_Hmeq`1FG`O#B8pl62N92kQo`lLkR65z_Mxqvy7(E|ld0#)f2r zQy^~|DYcoutZMMiC_oo<9Q8-MqA+TB+Q%Qe#%MJyUB+qxsp-aO??jKcyDx*=%MT?^ zEcDtu9SJ?j@ri zw{~cki&R^XV5aq%Jv%_31dhLJ$}?yyfniGgsBYr-F#y^7$w{j?6Qj{Z1`N@mQ>Ty$ zH;dAq&nRQGwAcVvd&X=czihxblk;^4Gvp?3C@wyOZx=c4pO5=KY@cFr821faa0*in zcuI~ZP3V$aE_S4A%~9ccAHIUcLB8lQs0>OsxG=n5hA@y{nVDfAw-;anvlD=N;%=z) ze&*cbn$-b2v0TKTtnG>)M?#{$+TDNH``AZnA_%#!P1KKj8^PznJ<`g(6Q8&3BBBE; zDu)t8o~ldgH_E!e;5T~NYW0ICQj+IP%GI-#JQ2OUrDG6msEh)HVHHU>q2C~7f@uSw z1*FsA;TThTjACWBKl)2T43PpdTaDBHzvWjF$S%ZDkof?pII|sZGj>73zo`0}#(0EzpBI9I9tj1T!*r5->}j8@y^_ zUNEcdUC@8^^U|~6SZNi+$fW{QQ$(uD9hXWdy>x?urA>OOwC)@W3#1*Bzk)iq5HUg&mA!g1%Z*$G>sBn)HV$-`^|keQenjC|GT+o+ zV1K;v{dP6ogi)FC#A}ffkmMnCsMF)UAmwxoHUuA16#qO-GP?hp~XX)O>nOrCepC1YJC z9X)v?G7rlOo9e`n3%6oc+qB-gj1~1jqK-D_Tb+OWkjY6xN_%@sgp*c{DE>Mxn^;OM zaZ=dppfnc9$d_Q6xX{QxCOuH!{T$B>AuG6U5Y&H;W-xJZ>HavpE1yNE z%Mc`R@I4e-Isc@JO1ax{PIUUW`noZN<(K&3ij-M5OJa-rXzD2zw!cn3@JGt8>3*U6 zBjs29k@CerQa;usNXm!#BjpQ&r2I>>|48{H`uyv4Z=L)+(7UK*6D01c#wbK(p|;B8 zf4D>(PGz4_JHw&{x>K}%IVn$}O8HGur9}Eci2Em33j_{ERS=iZ2HhB-H6<`ae@NaD zMOXn+Oi?vyv%Ev41`qG7@K%*$tMy#;$*OchxI*bcPQI(gVa`WwZ?L%zpHzR2(YN3^ z#AXR0nQg?L1y)1a>L9Z^N_f?sb6Avf#$E}Z{@<_8sF~LEh@o zu!b909808PfS=@b@;2lgZ@Yi*S(HcDu|PQ`Dp(_#2dvxO@wpQx!Wj=1 z#5p~%<%?lQrOBLnX`d*ljt}#|G!?$Np&t3kDz|%@#15ulJ@}bKm}Gbisq14=Bf$Yz zQ@==MJP)hTnO?Cmp>kaYaSFNB7wjXqQ=9KxONg1b$B#^All*j=^oh< zpCX6~7q=?2O~ZDZcfM1N(gLh0oL=lVvMbfZZQBvtVZc;#6 z+Z)r_Qb%f%u0=aKRuIl%O#<#Oq1dTVQggljEgf0tyEUQ*^&N~iDR||;hXUpU7w$lU zaUd$yBrg_>w=^u6|AXHZ#KOB?8U%{8V=IK$pMB8^dyCM&7W`avkbnxax>}1|Bh+KF zpwT)}9vt~~Z0F7?s_@%a>|n0}*~`k=*9920p90sVDa$gb$b-iy99@S+`hmlm1Ce92 zV2nN7nso9Hm2R0wIASkzS~uM@h>ES_CJtB7YIqnegYv0B=mnH2`Ap;HfGd-R)zPO!n=ZvVCGK_%GF#9!dd6_ z-aIUe(Lb!l95C@u=YVLYp2wRT?R(HNOC=&L!93^6KKnrZT}xE* zty~~f1wdz-JBM0D4ApEogU0ptu;#n?9L;D?zE3~fmF+LQCD*qEtyoCIcFzvc+q&7>#|A$XRrBn}r_(bwQd}58?hVUOg!QQr~ z_wDBu9G^bP8hfT7f_G?5RI*HKntNMPO{J6y7LXJrhI6jHQ*&pPY)2y zE{a;bqF!_I9Q>O7L>w`expy74*B=qAb>SQQs}6go&#AACfAZQ{^&TvH<{&P$P$2;E zbyQx%PXf=0=U7DoMX79i2|_3DmX7Q<=AU%dmr7d|fl+X`6?_!|e3stiOu|lAuIP&; z`e~m}G=VSgl2Fo!N|FmD+|}Y%*tkvIsM$j+MK-Iv+?}6LkIpjnyc+8SyqZ1lE#g%2J}kbDRGbXX9&MBU%*IouK<%A%|EvawscCqQ%dqS z0Q(n98R13?$TS8a5uSe|5eFQp`qo-iUreHU5!}F<3e?Gt|8;HgbY#L-6IKeSztV?zRuIlKj*HO; zIa_Judsn1!#USV)cT-tD{{wZM(d?1)46k{k}>lKp3>y_SjB+XMO}G zY*DN)#NzZQv?rlGN3QBNefqeg2&@MiiriAS6Or)=};86QXw(swFQ{NEnZVm9N%E zvuu}mms-?nk?(@E_Bj3S`=o%z81flPagXBOy#jxvvr?ChwN{ zvx<(IPqSE8qp0s=m>$u?$;LjUW;Acbc-BR!*e716@CZqk+zOA0lXb^}y}9w&R>8gJ z4_jQ4Nu)Nc4m>%Sqx~`+^ZxG`0Gw^>S~q?2wkv8%YYq~XHy(Z|gq-ZP-o!#C&@bQ( z@+Ri>*qlS=uJWohReJ)@1IA|K^l5n8MgQa-=R_Zf6qphi9^bzN9U|54aQ|Dhca!@^v=;`6 z_NIaB8H>`0e?)sSkZ3P*xja2Dy4wrw^*t74`qS>BU%M*P<8KnD^7eg6r<3243Lj-X z{mf*IyS*%B0cCvf?PpjZHtnt4!}hsw;^TABP`&G8x9Qvlw(vSsvBX1czghAF_28#{ z-}rUCS0{>96Fto-$q4P0AkJ2`vS(_<_5GIm1q_;bY?By$+@Fq4`xm328apZng@m~j zo8w-Ick?|?a^~}MK!bqqc=&dViL(vg^LM;S{uX}H z0*fPUKU%R!%wOWs-w3Q;X+o2AvOfNjWL$fDLi}ycJE!Vx=&*#2T}gMrn@W1LK0@Gw zrIbe>n8U3#jGyI3l_Ad#1De2D86S{A>-xHjvJ_c2rqo4j5`S`zeE-079qE$ZNHOq& z*hIEA1=PHJ2+rim^jKa!*UfvZFWu0qxfEy~*Pqv-yo(C_Eikldmx185jeLPG(c!+; zI`ul=^EA&zY^G^6NQE$iA0QrG;uyY$V3)3;iH?gj-D%LOdPC2$xC0bsO~Q{F&5oLIZf~oWo_9DH;TVz72a{3VQogfk zWgYM>U*!Q4u2GRz)XWuD!A}uBTn~^~IMe4oVhi7U7IpZzMX|ZnzqVr4G(00|-iR~= zm4s9hE^ZwDW=ugI`r=#{NN!pO<2a@bh$zc)nTBWl8cgVDWka6HmR8g$Q>fh-Lr`S2 zN8D#+v~XswMt{`&#LGZ6N8zVRy(g$|E!Pt@A@pgk5y$w&WcA5_DkFd_->O1L9WO%( zSDuezFJWT`5iuP0>#BXP=w6aw=6T+OJx)K1fRXcEz9Z^36y`)goemBzQq>R#a8}W~ zcb9UbrrAoPrk*2U7c?$nfK3h7`K6&(6D$w6iO{vCFaMFq)PabKgBfAaIU-WF=R4)h zCxIGtj!})7g$peuEB^PUbP~|t>&bMtY&_YtHW3$nrdFeG^J>>T$xH*LmWyw%n3I_% z!~}d5J;2q@KiC8)^P)SkgbWXL0}juLP9!QB8R?LTG+)%Byb6yWqhj1lFbYM0ZqewRZbIV7DAG?f_Am3Xf*hF{6q zBXNiJFCI7X+1T_Bi-pn~{Zt$w7r7Ihq{kmSfS$F)qBLX=zs`{}UPr44bL)Ocj%c!o z<7lzl`z>t1-AX)?nVId9*s zg3fUNNcMps$-dv8s>8U2G|Ps?eyIo!63hbC3FcaUJE1|DpeR8X_GtCCuvr9_$6EcO z&A?yE=g!}M%bp0*QZKqJY19C=gB(?Q`^x3?Y25i& z$aY}}hD}@;LGUMox-YHLF|)6f@DS_C3)9JhKVS4&>k7L*)}yBVpC%=2!Pr{PAQ`h-57ABi=w;|`!673jdkV&3Pv>7?R>3+;IF>Ja?wAz>4d=Fb~RrgH#L|8-!K3d3dY0m@aG}^80Ay zq-U^?+pgjD0B8w-SywXylm9}seb#sXAH0A-A>k7%iB!Xh)cSK9RvDicjPqN62%GU| zm|z2PxnpK%FX;G2<}Qhkj_Akj1cmHQki2-ZpEsJ$7AP~CZuq5gyX5U|B5#KF$Xsg>TP4=7oTU%^SjxpMtc8tp4~-+hl{A6EW}fMKm!}Nu zbTLR!!`s2bEM^4SShcK$!?dTmQ6k$dH8x)AOGSwf(BZsCgD0F1q!Mp4LXV@oRTR3d zwQFR!w=8A&*c_8Pxp!mY0-D$2ny~~B zb!^TSYm})le>WJ7?)3&mk^0NxPCwb(<6M~}kjr2TUagP8XIIojUA9fRYqMv~1rzq; zcWUOKZk7=MRpc=I6Kz>ok7Rc#r)=-w(^uprw8uoR4Te+d6IUhP*4pzBZ20E5eN3!| zKbb7>2CnA4w`{t1}+!u!7?SsGCmp9fkT)CtrFKm2KAVJk}XWfQq& zDGQnRI(sO*%3%>*eLI})?0CBcp1fbqbB*6nEivZ*gt_p<2P(g;wLd)dkH3Y!KAeu) zri%|oh{PeS#^g_P2}}^kYuKKD>5cxH5axx6*jdz&nNzhnFm1tV=%I{tCFgRgry!gC}z zm#6orOTkp7C^TL;7X{yOqR3ASI-^N@fD&HfMhEg3@%#o6DW+ zM>(76R5xNG$#2ywcMs&vNu_{-qp#DgA{Uy6dJKWvyN&B80k7Z>Z;!KgSUE3HGG%12 zC;HC($}N^*zE+dOpCc!{w8AofgmP)6znpl(OPJ3W3iCj8F(XW~r*~n3U7-Hx!hC-? z+hqK$^qTOt(~p7J=;B>>W$9L42bS&36a5UYu~o?Qe4f~5egClTx+)qVnP_MozCD-w zrQvF>W}Sj~#GSPvtt5>gT_yLl3RC>4qIa@kT~eJUQw&w_c13!<`U|S%3;bz-dx}=M z*t$M|d^_)K1>7hv<_frPjGZ&Uo-x=?FK$!E)i?iYh9sWhDH(FQn^`b*u`Zkqsj8`O z-}AmzaX%tJRUV(w!aFw?q|)> z?8J%Rmw5D{}HHOk!tm62i8?hug6#&Ur`o1qAtNYBZhTazRe+nUlFZPOjBCZ&FNL75$=TDmnb~Q?$=PXs$D8YsX1D6fiVT$IMI zFK@Q8NA11T)$q@{9Z~B&l%YA5;!=WD@x6=l3 z+`or~^-vx!@eYQ!&*;0&K%<38WAiaOMR$t{{MhgNiz^Q?xSwYOCYQ%afRJX@H9KI9 z+pJl5S~sh}5;d7%W?kU0G8^j_)Yo=f5xLC}leu8kR$7T^8^FF#&PqyTpc>xglUB$Q z7ZoTheVOfugNmC;4AcGH0v!XE)1a#<-G4_IfdjIw_m zY_I4)P2ul`_{E}aw2<0ChNx~u`wX05!wf9oH=LJwMx~{QsyE!t82&$(*z}1sEHxaN zDh}WG>k4=7=@<5U)1c31Ev`SPr}2cFz)+0_>&nEHj(ZOdAZOb(FXe9{u0l zSI9?uAz2ufqCwE^%Vy(|>>EZEf?U&aQ|2DJdP~+ZT$qj_kK0H$%8|ld!PzAS$HH|F zbal1HXJ*J6=*)BpH>4WxN=2nk2w8Lldzh@{x!-#C{x%;}0|lkPR{1*oDxbObfs995 z>?*tC5Ixrz6GYai9}l75gdo9w8u2r6i(?->q%qq6rm02WOFMV-d1kWn5nuM=uM< zm*Hj^Y6_~Wn$1O(QJt?8+j4F60T^LFEM?VI#|m*Xe+3I0EuutFLB7!Tu2hZ_dd>&C z2=dmVVva-p_^K^@nF*nWBLC^$#dMdVUaR0;(ocQ(E(i2W57GUaqAWIqnh)TsebU^R zrIc^sr!>K+5-7h`temtj#w4W`w+Y#Kz3o&mE*Lyx~^ zd(e<@78X99kq3%qg`>s5mA3fd8&*=$xJR{EA=+$6-MqsG2(`5fI+?z32rmER_ zuB~;-LEjUlKMgee^4b`lQ$j?`?$P4x6xgQgm)G8~vAbyi9-|X~)Wof=~Ct*x3d@^XV>Bk`Cu>2%<(Ml&6h>%Ib>34ETUwb@)twtjS3Bf0x_{QR=jq z5?vK5$}-=K+q|w4kto24fd53?V3Xgd`O)~Uetb&FPi!n!s3kJc&i>hsk-aLZg>m04N$5*F@R##d1V?dDaw>I)xyUl&U0u4D{mMaM4kbZ=nbS3kUk zbJ&G!EmEGrRX6#brujC_Rkka+7VN*mi&oR()ae7Drkl$exoUoqjfB?!d(&+*qSJQ> zEb7~zrrW>xG1C11;Kx&bx^dy3vKWPVp%O;z^ChHze13&uf9wp25uqu{DXa@HmZgQtGhi~e9R_*{yq=vB(-$KJI|qKc zB3g8*D4J?2C%Pk_TR60ZqoU6&VUBAU_MvhrpT<>biit5nyx$r?EA*!Y*TtILH zxDzw&Fa!mZ`rpBuS`KLN)+quSyv>Lp1C8%^ z6#wVqjkX-08;!+KvnqP*c+-GU)dh#I0++Ax3o~LsDzbXr6#+&@WhqX&m`?%dAmjwI z>~|sHh_Xwg^z53)y7wZqQb)RMMGZHJ5?gy1yJA-@dfdZtyDM6wKZTS`M=-3e9bQc9 zA`ogrjjFy0ofAMFlT!UANxaW3Y<}}aBk9u@n$pRvOA;OLTF++MZ#LONo7UithSg+@ zi)jZmD#QOpm{CqMK9m+}QgW~;*W;KKT4&$G%`MkskC{(fpLvo8M7gl#}R7 z!aK|t8QHvEwLv3pk!kNJT;J!yz3%&s9B~5h77H$pdnZOPNf%~VT0VTmmlsCu!Z0)i zsxYfwEM+f|3H}h=Ti>9fa2h9MFC(i;=zotj`CGmmwy#%RKA5BY-G#byGVCGVfOt3BM*hG4!tgZrvQ;Hin+rJeAdurvEv}0y+fG zKz^E2>^a%;VY@&Gnlw{-$_SiV*wAah$vHf<9HK?ohc~Vdeu+*GC&V7Pq)ix0!yOv> zEyntn?4T^B{)&8Ae8K!P{SYFt8vHRmn*#}=cj$a9o=t07%URNVH3&2Rn=;$g{H4q) zQ*!L5`+q6(+zN1C;{0Jm$oOY`SZM0PzcI6>`X9_pLIc9g`TYOJ%xw127et@fqux+u z^>%87AmUT~ z&|~Y08vceZhdK-Em^;+^yI5D-UH1Wa25Am0N$>77yaCr3F&tLwrjD6a?TJ!HiGSghn>H3`M&O5hh=If4Q> zeZVG;794dG57L6_l;;+1KBV01Op>u=Zh%4g?0a^PmVtmk2coYDEPSTAhB?%2jD-Gz z=^TE)ky`>@RMZfksIak~w25ie#%Qu~jdb^z3op!MMAG#PU`VcWsJ0O9{t3L*-F~b9 z1>R7>BGK0U@@!F#%|9gpw=f{)jQjpy%-NrcJ~<$(Wv!7P5}Sc}YA>x;YO$hAT>3o5 zmZ}Di+^uX68)gE62O~DE#L#OYKUBlQt~<&ll#gbdnh=*+9&$ zWWr#pEJKj-OlbCl?7oAAhmvM0^F)-MzEC4oi;!ZO>w5;C!pHeJ_lKY}O?J;z=MTDe zvzU_TeGYq}QE4RaiIby3#cNal%<%|ui2psu>--?#M8{kLaY0WYcXXw-^XWk3CMCiF zweyDv_1-+EsEW_;84;KnAj`Vh=%ch%cXt5OM;|Hrbn!`cYnSm-(lf9HP=BkgP*`<@%aXpqMP4 z1HJ=WA)xO*`RKZjAutCS<`^LzWdVxlREFHuA@{|1_(m^{!p@d)YMCtJ?i)&l&9uO+~x~Am+2YU9qU;BW)n3S&ObhdEv70!LF zltS9g=US^^ zLa^avU0trW>`W9nDXOOnUI(uFJz*ZaSyc{01!+&5Sm9TJH`1J?9ImfAI66BsP{eo?ULm z&KU?lYu)|}KNlQx4Z@Ls7<6!(`GcS17eV;>FMS>wzA_~NY&d8@_*wTW2tS|qCJv+T zB-U(eZO+b#+Q-PTiY~Jlb^LNv+qAtdYPe2xl%sw##@J&6{des!B9qSBNa#>KPJAb0 zrvbV~@)2`z`pX4nz$@b~j2+whw-~`SPnVmAQ6^gf{r8SP(sf8yJP>b4U8*0<3W!Q_ z#6bVDMRUjif+(3Jjit!L{OO}Nc@t^!$8TmYzx>1*uTrX zEAEV)LQCN=7rK18ppo7lCXPB6u+AkXjD&VYX8l>IfHU<#A@5vIIqaIe{yfX)P*V;Zx5&H0+Q8m#jMg-2RyF| zHK#r+!z-nHlJzo6NUHWr->N38uyzO|iot%&*{;B6Cl;3C{Fb4tUw=~-1stMTLhEr#Bsm~6zOJ!e; z7$+SnTW)Jor9U7~$n0R<3&2B@dwCVnzVy?@?5@{={TGLR(*47suLY$pAzvMO`Y_=* zqP;@UT=HM8NUtkx>3m-6h| zZAz3Qv$;Yv%Pw0Ts{k)>yy7P=IArxz0ZmqRZe{)a1`0kBv_O7j1<1BR{MuTTM?s(%tzn-8RxARO`Jul1JTwH1N*dJs7!N_zGIgK zC>2O{OVA1rBTw~bm_`Ta^R&8`+ZJs5LHU0bcDwFgiq+?!J~}f$TbRyVq!N3l&`aGQ z1E%zKL-tjuH(lIg1=~sB8z&CxZj9&{2bpc}F{5CtI{)NJw$nDfIA)OH=uDD~g#i=)pSAzw-d_IX-g-c}w`J1j804nJ|10+vUj8Td2Fx2GdVzXx z;U@pN^i)*_%Dpl9{gZnuD+T4=cDw&8_lC;&U%5Aiv;QOaX8I@hHt>Ju-rTF2xio5U zEKNv8E84+6yW#Bj6|ds8Xv`Y#0^G8%(!4Bm<4)tCTTjZgA|1+3j3W~0Rtr%KM~vF<_tLD3mDAQa763qsMCgFa(s1S8^@ zcxhn{}z z_lJ&ISZrHOUZ`q#S_#`thkFl^g^6`^JdsX)2L$`3Y3vO*?1f#^HQYwe`*jI0vcIV* zq1e{jC}dg6w0SM|6F8zrWt|Yvj2GW&KhD1ma@^+0?nZAD_z!ulOJ-QgSn}={`5?h( z2KkQzI21pBZYRd}sXCVB+7Z#8!XZ4va6YiH{3_?VhuX;dxs z5R%+Uv=;jky2mF4`gQ4(ANAIY;TWJZ2k=iQx4%zXLfW&L2K176JEO92Iag{+Nd+7$(7I- zJRUT2>P^F_Xc5XE5Nz+x5)GU8Q^TRfQ_^b4p5luHvfaUonMKNd>bS_E&rC$zhzmm% zfRVAy?L>_B;iP^O(d2q=GC!P$zvgzt``x5{dL-zc1IO8>Fh$+bryx*C1|$VYmg~!eo)S{_}=Aup> zyP^`vC5-BOka0!m@MvE-Z+?5^Ct8RKj0*z_B-hPKZS(QH?W|})5l)%)Ncp+!THR`G zjNMTVFrx-UU^PYE`CIyA4eqOQh#v^916S4bj%SwB$@o-di&b$_6{?-2zg%ZGg+}-! zN4BD%%&6w0Fdipb5PzsM<7~123P*M5&~hV}=x~^`5LjZ45;8?7k;h`DET6&;{9NU- zD4j2L#^%o#I7TJ7vB~UYTP8&6-HZM*XT!K725XT&k9kBCP3TxWsdku0h28|YmAja` zOD>F~8)-94O4q!eK;c;o@er}3WD=G- zOJiu__YVgAJaHq@l3_dd&4LLnz@iTSMi_Dgwb!X~F<;co+3kebqjD`;H2H@EIHR!D zTt2pWJ!}}+!Ei0y>glI=Nl35CWxt_#Y_uGsd{tX^0>omfm zl`ib!&m2sLp_C|3o!jjzI-SO&fh$%GPxeGcI7yhjUb+^3x@OjG0W;b8z{x~x?5mIt zQl1;ZDdkGnCYpyId35$n%u+;O76l%r8m8G#L?!jC!C$%x8}+n)!fn&|q4o__w?Utm zh_xcrK3n}tx8ls>^j`{8Dil<;l|*ZA=Ksb_TKfr`T!n&NOAF^bxBUGIrElG%y!pdkatrR?SIJ?`Y4>!APJ|}8EQQ|C`zZwS(jBMEO3v~rfD=`Ub`7c z?}`e4lvEU6&nNB6Cy!yG%KjPW3El7+7U7&rK|n(nA9az&yzp+cRlpk z(|VvCL7<_t21z{`o$x~3VRzp|FiSc%nSCim0kTI5zc7wjB?Wb(GtndEF+U8e$jk4T zL{rc5t{>$UGz$5?PSQHI}dS+I+sTBS&%R}Y3DRUnF>6aaXmEGUl)K`GeUplqJ3 z<%n#^$8U~8*+~__EnMal7E&3Fr_P6QR)Wm2Wb0>158Eg4$*W}BSA+L_+k6@T>2}P; zz`YQ*$n710L0V7&w%?Usto;`poZ(UTcILB-gX#FYr2))KzVCt=uaV*6eP0uaz7b>W z`&^>n0r*gg!GP^eH=3YT0e6m`%&$~7R9lPKT&C*H)+0@&C3gm0--qdS>SL?d0e--y zgm^G&M^N%rNGgkvCX-O;vS0YK5sqN|n}W9)_=FYvP&t^x#;3Ifc~2=R&+a}dmvJ^! ziO1&{;{C&zYl%nSxHSlHxp@Hl;;km;k~7q{B_8ZN5(o)@c+H&FCj8aGyw-A(v2Y}W zmPneWh;(~6u@mp6tvS#wS~^e2X?9&A2axzgbyh;0^XmlfypV?9slbbDwQF0fOy9*4 zNyouoS~s8@uCsgxzm95xALavf19aOjGGRtKJfuF0oXUw-Cq^Cn_^|{(m29td`%CON zcSo;V09fY4Z_*p;;?nrxICJJ3df@YjY*^KewT31u$bPXj?{ul(g7&Pz(wI7iQWLtQ zgjP+&G1*4_5Z}E$fisXJ%%s?ph?IeaAwGsX!iknLEq66@{M5kV0Bq&43=Dfn%oz?2RYZsM4JWp3GlIM((@XyS8xpS>~*Ac;{zOXW6?cG*x`J-3a%6v=JD8&nSoua z0AT&6zx}p-4VH>syjy+FZwjPF>)hXTP|)JD(EXacjl1mR1g{OV!-aXmj}#R;?aQX z#DxI~vM2&Pda(Nq&kG69Jb~NaHIN=FI}_y-(1Cs*9hgj=UlYL6X(gddHv?}*TRUI< zgEEcYXv6TNd|;h^jp1grVWf!$e+}%&9LpF}fr<1B_99#pXYn@7S~1k`SUs3nzA#{e zzwFsnV&+jlHGFK0-DVF%->Eq6=QakK)3y<9(tqKQX7Q?~=3ddsJtk=cKrOZgqkunj zjk$_PrU5+x+gmtATzB*e3A_gR_(eGJ5i%qw>u&nm)x7fcPYtBgbJopdjW`=TQaa)x zhOoUTq3Jz_`_d^j5aJI})vH%YvmSk-VoGD`$z2u{tnT7Wd?)+NeNHKCw^Kkyvca_6 z$;mVhS8X^Ke85o``I7j}jrGH(wiH3@J*T|yd|7*|^p~Ueg$!*XYsJ-H266=D`TImo$lx{$bRqf zg$h~M`eq_Joct7^_Vv`h{UKed^I>?$EjO*7&z5_SkGxGtqQC5N;6WK+kf?ero-96o zwB#`%m)Md^xT;+{=q#0weVvdBECP~i*fxJ8f6tXKGyF71ZQyBlDT4`?j{!FDSW^9p z3ma2q8`Ral99FYPcUvoRp>J!{HcWJ^TN&HA_La(Vfk0CG-ECt7!bQ}*__JvwKthV1 zCuezQxY42N@Z53w!?y{@`TSVYih!@uNIEoJ$@?wYh`oc`$|*$(7GgVVT}8s3nJ_qZ z;r$Qc}=Ld&%ahx6w8K??H;1JMF)2>zjJ(!N2h zt5B4}PyTiVG&iSUYgVXaPAKLx6qldSMS~Y1HGMZ0&OYBb;itO+dNEzDLZb7*&_q9} zq!Hb4j35#npX`)*CY<1DQoTOgAWI|)NPLDPwl7^Q^*i@iuM4 z;*@IwUEVq1OmT!0jn77I60oB4R(Y3UUex$SI(xEp{VdEI6jsfqK_hKOh?n-auM6z1 zpCX#5((j28RmbcCj(GUq(D+HwW~nCh6@GOon6vlZP9QyYpW1;%(afMj?T?`yB9ikI z-^B}8q7IPpOXlR^-QtuBe=fNrh%ffe=Me$$r+A5=b565bvgEyR$f2PU)Y5&Sh|S^n z4BdYgHMFpH76=~=73JILK}iL7mJ@6EHLBs@bb7z{j{%1Q{NC&P!@zZ7iQqTlbbh9x z)p-y8Gs~+P;ovi~rb9&03@#^jyr*4Q791E|r#Wiq#E;a=9B{$O{Pwz?4^YY*(uGu? zXsm`i8S@$E3AUoumaFQ_rbtd8H0>!-W~j7*JVL@WOwX<4Qg*x&@_B*? z$B^K2f#hft0_k2j8w^feHo4Fp$mQ7Hh>0O5eL`ADz_R0nuw8O1nC684#@d-iA z&xN-TJ;A3ipZ*95{ajH;^aR3G;cBRr(k)q#zN;u~-;5sf9U5Mi4-n7n@Q9KD1_!!U9Ewrb7L}Pi zsn6PYK*p!*{rWpIcja4 zlfJvWyx`SW{4`<0vAh6B#cRSvWtkydx(Mg|$Ip!0jkfi5xK@ZT2LlGr@ei%WA+Ixm zz?aR_Ae0dIYo`gz-zi$M&=GG;eeTsaY z_I)F2%?)zy&*^?^bv7@!rJ82WWgbUeUzo1~7dbFdEesfD?skq77;iXN7azUOk)M2# z_I{+!r!vi1*4(Bgs(oZ$t=6+<7u5}u1`Jvji`CE}k09*SR+t=X#O9uMr{M4u6z;Kn z1RSrYxD+vrUFQ4~?hBijD*dT6I}UBzqd3oj)ffFXy{;NsMD2)y5)Mq)>x`_B(8t34 zxO9f44mk%$Q4c+JGAXl;EX*I;W0q&46`vhSeYDkI@6vg$I!@+Rr>v4Q&Unw7fOW^U z>h?k_wR0o4cB8icMbtM&SNg}?wo}_vw^Q4;?atJ;rZ!LQbBgKIwmY?L+qP}r{@?fB zb-z4ce{1DgNp_N*oo%09FI&A1jW*;Njfs%E5Z8Id(8ROm37>OatB$(Ldixmq)W=l< z1a%h)X6!RD=bEE`WTPS3aZmy3RAOyxn;ccF9bc(gbCtVX$FQ^YYaaU8kyB~rfj+$Q zShyc^$>0HvvJo$|SDg{(MU!K z|3qgx8Jnaz8yl%yMoIeo#)haV1go)`f})G`b@ps&!NBj|ZRgyttbo5yDz&+Owl}Oq zfRcAJ*7vC45qDP$oDkaL>A{_$tv{X!)(1f{8IGxKXti!F(2bo31MRy^(!>DNI*6)_ z`-;!BQFDiYm&M$W`0QTi04h=_Ke6s^i$PV=_XW7*H`KR=z+YnC7 z;@OPnCB4w05EV4NU302;N;*nE42>IZhRTwD;V3KufG`Ed8knK* zp%dljc2xna8a-o9Gd>eX?ko(-Neb;n?R=63`tk-OB78Ch43qnS4hZ0P52b*2V-0!* zneuwo2z0XL_UA?h@)B>s5E7Sbg&?&;n?{==Yf!VV;brzr9!;%@#NlYDE@tP7=b1>o zZ(Cp8{Nh&oi!_!5Lsp{O;RGbI&fG^lgXKbkrC8) zr=Ky}ih7w2z*Ihnhya~{Vd5+ZZw+66(8%{kw3^P8G9!W85TsY-!qz1NJ5%o~^PC=y z@ieov+YYnnQVL@4YmL=ok>sC?UcF(TEtam1F232a+%2@f=m=K%evuD0lzX9YI98=* zkQIB?T6}6a&q=X)`K56@KWZ9qK>HqPSkalA-AOq<`+>Sns+SKgR)S^2jk)X!%z69QM!PPdy?DenRf`=v-uBW$?Os zLd5RGKrz6v`3-=YK1#v6q;%*D>1q@hgiNjdhemvsGgbIdWNes}?45Yr7fFA}-VrPz zohsAK36C-d+skOXnHrDmBg%8~s^#-foItLfA3&;slrTL{KyroIt%#Au+UW2!cxL_! z>)A5tc#xOR;9@XtWN}How|}S@KI`KGhkQ>LEX6NQn*luOD3d5}kx+_xJk38Lp||?( z!;TAibQ}40%j{bD>kJ=XH#fJQnNv%a`#DK}+UD4*+s!xqwRro%|Cy*}BN?dQD_no> z;+`0|;oZ_COoobM=A50t>5Q@AhF?|D6$LbQTqD5zXS0Z#vc%n6!BGwU2)wB<=f`^x zA75#9F9b}ogQn5O@7=1>Y$cYz(s(#$W_l3Q^3S0Kd9*rQ2m4kny1Ot%SsVZ#;ye?~ z_`PzQhfrmx>{68(+1{QK=~5=eA`GaYED}P=bB7iJ{;s70mH-0R8uFB??OV6ss2a?} zmmuOlgVh${IsM1GYIL`gQ|-COm3s{pZb8QWd%&r_&wcyhhM>peV(0|5_9;|($PZ;g z$tHGngduZJV;Amir`BB&8?!QamyCh3+F*Y{QHKDE5X)Bo6I61%?@%jSK+301@0EYR zI75fudARm7u}x$j?(bd_dbqQqk~%tca=RU1Tn1L(<(gKrG{}T$=&V=ov5O^g8EQ$G|T_}HNs*6czC>i7bp78VH7jH^(aBHAg1Z+(QWbixoIkUJclT)VpK zi;`dobbaa+x3+Gp=hq-gUp6|JIu0%~;$1EXf^kl(nbpNg%jP%C9hdI!B64c$Y{aaS z-#U4ccdzvA1vL!BjxSyaL2i$!CY7yOA%K=GPj#M09;7-J)~vPE`#orV2w=WB|0u@` zJ&ZmpTb3QKr?J3brI@-yhSuauiOp}GiwLkn{`}77+CDhjzZ2MvJA+Opd%T1?s?(9! zptoC(XG`eV?3e>yTOIc7|)>;p? z(bj$79z)2BZWH#`{rhBlS+P$?pf_jhG5cK{zHo8|eE)|wFf2J#raK+{b7&po)Rpmi z_n6IX+~PX>m*cDKoeic_W^S{bFrd+eVcr^woX;z|d~ls_;yBHyMjwZE(_vf_|7PF3 zDCmZr{pDbtXAJbAFeouLUR(;j$s&ckowKC1jy&0l2@K?UalDFHMY*B>^{DqxYo&78 zKACYTQRC#<1eEB(_5EtLqi33a)~V43%)ijm9}xL!-X@p8 zbWn8H&82ZKazE{JGKzBgZPP!Sq+slEJdVXgc6fAuwDOk=?#@F%o|1<3t1=WDH|1Q>p4kMACq-PmI( z6>nyfVYc)-jUt`dcc-(Kk!Y*AKyuTlwz^#d%>)|ip)!@| z^D?rw34+(BMW8vCaa>xF%O5vMgG|>NKZV+xA%AzMM00l%S#TED|p6{S4?$v zbDTMSrE?otGU07!_yyUJKMs_AlA7>&9aC?jtY9;m_}}r#>Vb9psYf zkGgpL{g=9c0Oz-E=eG@7A2&Nt)qC)r{`zGI#F+CmOMf~FT{k726)fRbxxzU`@6W}f z`CaiXVWnJ0Fz-ZKW;E}Fw;PCntRx%Na{Gym z3otP)r5H-kGBKtLt5mg$hj3lrp5hu#AMr}-bm{`Q(GNb|Jnj%*a{X{&f8f{vS2`7| z6>M`mTN64MvAkrs!@U^;Qn2j0-pxv8M!QNJv^y{CKUI3L*jd;%G~Or%r?#+NxI`mB z#LIdJA(2L2Mk?ofRF=YWMo0fRn3-LpEQ7scg))WIYG-5)9QE`QjS69bIn|U~Tb&-F zb3Fm5t)4Yr<@|&SdBy(p_|s!-u~FHfO2eJYr~M3;tITn zs1rOgft;^|^7DCcNmL0;RbgjCeEO-?Bx5&L1Fg4snK8-mNo@b*UWQf+JF}SuO z-&YGUguwRzt{eYcO}FgxhGM1eoh2S$vsG*jui#Oag`*9F&$35${VL-^pt?%g5?&qv zJUilEIE+sZ9yrSu#>UYNVvGjesBFT&tm$E(9&D1}1Tx+fISRJ@Ab=W7(&louuC!)O z^>4qUI|{?CwVmTXr+krvTS8as-w4IsynhHNWDi)uDIEIe0%l=F&0A#j{^WMEbdEngmQ>-Uv7%?K^E<{y-OGcHow@HTAz$E1RR)$^X3Ce))0U(Yc6a7Vo$6j| zRYkMMYyEUcwSmK*>1bbLD)!nh+TC)-T9bHs2 z;Kk^$oyGmcCsNujq9o5@5VvAmAg zmh%{q^#TIXZoM4gdhL&OpX$ZLiOq?p^yit}nCG5h_|*5l`s*Mxkx#%Ax(m1rn&7k@H9 z@>!F@Wux12qH%pa5+0xzkgK%|TUs)nNW8Er8U5o9c@*XqF0zrC+ER9@d8&m^L?+R_ z!&SJblK*Y26=V7^Qb1R%SYBXAcPn9pmvqLN%-C09eSS>`UN^^A`m_*wnfIgN&}|nz z`xi~@uJ1`0a$%o;`k5*U_4*-aiA4sAkR{-S2TjnL@1?c06AoZcd-t2;`Mein;OBCd zU15z-v)ubuyodMvIFqkmnb+H;cb?~)y;<$<+s`~?tkpO>0R(eAULy$?g}=9Wx+rb( zG(AMx?S9Duj27j3XL5&iXn6T!^fL=HP^n@H`WL|8N_&$P*2QqUE2dPD#i997=h9;Z z(t*|$$#;qdku`wI57NMD+@paJjVgR+qY3Z3>ZJ&TM^f*HX!8o}v9Dt$_AiIU7Ps}^ zI6CgzC~d^p;2O+MiZ~!n{pS4!-)l04Cz|`bdfQ1`gUmY@tS9l;*W|!e|70#$@4<30 zvaycNpUwCn#ooe=6GC0Eb$%LGj=4|5Kj8AYcAJ+>Df9pa^g=limKM*~R9!@3_eaxT zEb^loHzAR_)|CEP+u7d^)3B;J zyQd*&a+pX<9NSsq%s(O<>ki&(f1*D=QWuU<4Ssa%opc_(i?q%ebxMj&*r8Z* z0pdnD9}fZi?vvG4@ZjE6oE}nPgcU%15L4ZtghB-u$4X*^+rL4v`J8m#H&9&VX>Zd) zitQ)ed&6;GXD`aoPUeFO&l)FM{@2wYxc+iFJu0Sz)FGPvOM)o8-irp(@w!Z zLN4{wfc?+MYCGTTj;ycmtc|@xRu@D~rR^i0o;2X!YLn|@{|V-oB*1C00iL@1!efrx zSfZtjDJ~^%Uim*+dM?L)G(Y+`j0Q@~EKoupnS| zpwMuBp%tcLkK)(!v~34JVjh?v zl#0|ky5;fuy?UHWT zEw6?8+nM^%MV349Q0dL1V?Tj|tC#OK1 zf+)ejF+zDdq0E{7zwU}@e~$5>|{8$50*m+<1F7i-?(G8 zK>-PNp`uUB+LGmZ;nVZ9xpP`ltNK%j5bAr-%<%$6GudK+T`LWPDO7S}6Ch%ddptDv zd@)@VP%xY1-L6<`Kupj3?p3r%JO|TW`RkIythx%Z&nR!bh)eUB8o2OJ&3^RUyq(Pw z`4sgI+qLNz0sWp^)v8suS0Ij#B$kT(5(J2pPj21}S7Da8+IKE!}+T}cnAOD{V68wL;ASM6H1+hc1vz$2O z{iUd^JE0#Y%H6kw`u7HkN*2WzcABMKzo8 z-#mTDW-XAwKGC(B#~&XMwU)`JlfnGcdyqYCKN`dvpbqB#PvS&*JgS8>qF0c)hKUPs ztaAE)$sz5_`2I2~XNv`#OCb%jI?{yNP=wm5GsNLh0l(~fGlKcJ2?FhvAz2f6Ra>WV zT*!l>Bfp*7i=4o-3pa7VugRG)7rSeVsa!PleK<(3ij_@UhbuI-kB81xy5k`gC<011 z;`=gZ&Le`rbFgYqbMPogr(661#I_vU`l4l}K?RB^)`jbwKBq(Ll6kYg=gbP^wvf@E zraK@jzVB!?E*~R>&jpXZR8+1J;JCy578W~O41@or5EIT|X}IGsm0%^K6EB*tUd6}- zEmpjtSj#9Iq~1%tPYRJswpiaS%EUGLkE8TtS#QtxISq zp5k7!o2EiZb!il_DS)z-IOEW9_1Kfvxsc1HcRjlO==>7#6(=+^hHk~lSs4qHdZg#a zP$)#vU?;t=jmk9nw1ftGZp^-N7lHNtv?o^SK-@+Jm$4kP= z&@a&*BNALT@)7o>UxpUYxqXB$qSYL*9tGG>`F-M=y~TV!b*dx*-tIST?mvsZmtB0m zTz+mUgrA^xIG!LHDEUdpb< z;1~*k7HJM}%0qPO%^$Xqkq;Av?4nd)ZC{DB!8`AWjN}=(RZDPfA+~rw`S(Qbp3|{! z4zm+WU!S8HKX+6I$iW(Q`1zfo_8!uSs}N&)4~g}TYjtWD-5?dfg(WGS-uyF2zGoJ`(;~VbK%Lv)S3Fa=cr3V}9^b>a@U+Uh_ z=>X?m+BKh7Re*F)s;7+@{??BSdJJaX|CtemAg0=ylcv(H3mJ~0C?eTF>N$g6YWA*Hml8T9BgIp z_Bh1n2cgQ7c!<&md90y&`+LN~<#nvcoZOrm`8bp6Ja{uvf=WPTQI{YRG>WHE{gajxKM*E3%uBA`wVpUW*yWmf3nJNx=5K32qWJxb~&AQqBE6 z!GRw3IOb03MI-Se$LFc-`;oh`bb$JBLDz+enK?o`?K>e8I>csu0>$p(m*TRf-vAKx z6t0?x+dT+XDx9ou6voj$IVOGMIQU^3-*Lej;3A81QU6wGyO8$oYT?1}mk(yYsNV3D z8BJs{JA6lBSqcN5*^LK0`G>~_^P_7N!NLAt%$ZjhY1?vit*D8nU2rd%^yENZi6>zo zu@hqN?}%3Mfc!3IogU^_IHU6jWH{IR6jfe^b-wQuHHhDPx=-HtzdxT2sy;MU=oqnQ z0Im*(??`$Ct(a)2m*sK=R4vTr!_T<5db9bCB-k2>!9RrdDv%WhtdU=drpFo;X85su#LOSQOH zr)|5#k<=b2CJFmX7C@+wmASNBLSaBb0x-)&!)c>JZ4TEJXB^lwe)?$P);;Uf;`vL6 zt#P8g+#aEQqU(Z4hHWwz8y2OswrBS-G&V9RaVA1a6PJKRYIcOa>yaP*N1c*S)s8C_ z8Jyl$QQJH&KJI?kXz$i#fX#%!L_(C5BrcYM)U=0Xfg9h6jWoXrbCFf0vUy|lG&z-CTK_P|JB)KvNXv6Yk*Ee=a;FGT>7Yfq}OhA_0c9^a%}Y58$@sFdJU6&zeM`N;!`~;GUtog4xI|oB%{xMzr6L0mn+b*Be+j$XBZF3*dVkfd4Y&VXbu> zhyOA${Sc3vbZM%UcE&f8ys%|cJJB#p;m~>8bL{GIb{V6Fu=NPmmcU;sevstZXz258 z8@?T9v_-D-VXQ0Rg-JHBEPicQ${6HrYbJA@#3SUp^U=Kqy7h2FpNJrfM;i{`JBvGu zS#P3dfQ!ZYswY2ATUwg|1XS_4OB@L#VV6xXpu>;2Q6xQBqhMxH9FlaN;C2#xxb)~Q z)Rzdq`HO$&Voo30Zmj*6KI|sX{B8k`IDA=6$ibSfIG`p;-BxhH$c1&uqmvrDbhNXZ z-BhB+@`~`>NR6U~#lM(a=WIy~z0Giw6PZhVAd!S!Yqqm&vNbfoMON4=I1IChENP$p z#$wOKNze`mJEHqSdnEq&i~HBZU$nau?%c<-L*S?no9+$qc{m&1V0;cm_q^g*!@0pB z`oLjyaZg4w;oHCp$;-#W=`op8_7N+VZ{S*r4+jjE6xH}U1vCA2E7!xv!diQ`n_j5- zTik|tAWH+fZeIcb@6o_uhH$tXW09Gr=oEfneBWRq%KP*rPRAu!p*L-#hZzVArc<|Q`*Artdk zl9Md0*6h{@H3rg!j$q2sYoR?J;ssj^K?qJu-<@=ISJD{3>@9Bww|{vi{sc4|A~(@TA)lpIXHby&|}e$fu;*vA>LNi-SB zS0vQ4mb}A6got6gl;QZp+38XKB(cMJotV%%i4{^?g%#+^@iEwL#ebkP^Dw(B>wa#} z*1M(kFMh+7ELho2eWNEK&mV&RFle@ro6%?hCJ|r>j?cv565Fi??KS*e)=-G9hja^Z@Q25(l@Gp!OGrGLy&a<6zq}NAwjM_v<0bpyrAc-#W>I9w?> zg4*hF;P!P=TEWqWg8G%tRn?LIPfo7LGnZS#>~rXkT-V=w8iC&x<(jnO#`$as0g_fw zPtO_mMQ??S-7_m*%|C2XsC&YSaWk}3KAuZO@HMu0Nk1`xC+i)hWka#Drz`C*UDkSot&rhIUAM04H>%Mi&xups4+sghoF6C&wPgbK}#qhaf z;oo&iRJUrAJ8Y4JT@$Xo+4Z#mh&Kie)G5Z`N4tJ;T9Rz;7zba!-QwN$Y;J~sNL~i1 z>)5-z_SWkML5u5fsMO=xDityQeZ*s@@qGqI>8doZZ%Eeg#xp(QI&w80|6RsWRxtrA zlu3(OwYHHg3qdh>Qc=(l2|C@e*6{|Aou*3eeSx`+W2ICe&kgSj!9qD;z|Wcf?(cZy z094Ey$L2G1JeDPb3)I)~b$nwK_8&*mGYKzxtg+BwmU1qexSLl)=QO^**SlMQ_wgS@ zD*nPFzC*hQ&v#8xJI_-$o5vL4Rhtc^C-65<_Q@2?=P4hj-3@XJG%F?OzLV~|N;&#* zcay>&F;2itW0=Ak!sM>6`NP?K-~4ixP;~CPjg-4xXtRv9t@B9Hg#AYa=etCm0j=lf zxfRcW%Ie&YFKM za%HF;>(2ed``y0Y`=B#g=%ZrcP1}2|Ey#=zGtl}Yb(!zqUiT;9^vS$KL4ZxDM5SNt z=N6UVRuWdJQOCj)*U5qCUo}+MTNbe@D&-NBrmH+?!U*$Sojj6C>q+Min{&$E%KiJ+ zgku+q+DlQ&W8TzU*0xdUOJCd8MK|x2)J2fG&;`4y>**=a3SnQJm!*z^Ubk6&)?V*% zn$~_!=a*h$9S1vLW#9bPxwFF2y)I3T7{*}9sj6sW`JF1}2FA4^PHp$uLt|TogLa%* z1^(WyV>{ga(Q~E&W$Nd$pUD}1^vY8@b%@C)bp{Dm>&ZZtgKvc z&Oua-@ZiQubBYd(I??yecdPpY{}lB(c&B{y5Lfld2mhJS{6xGx>y6%aPWe-*NoYxc zAk{{;QMOtDc!2cdpQ2zHsp=3BtS7w2UjCMKC~(=|<6&sn>HM~ptR-#@-(j=1>Q+|k+loS*>i0G{M;p(6unw% z^1N%+?}2XX0nURFeNS$PW^h->!MjCl1RR5A7RkQ=-vfYMAei4u(Sr#!!^!OiC#7JU zq${r2z*hp9F}tj4`sa7n10JKXtF!3g-5E!I7y+d{NolhDGKk|K|BvSmR}0kfW53R| zbbWl9%*rUs)v$aQw=+}Ng9t}3d{!Rp^}Q}4I&>VkXvm59u1t>%Lm3YtWM6NJA|{d% z#n?UoU6#^?BNG>%U&fr3n$Mti&zy3MfHDe^iVnf9rY?iQlo8VH6kd$TeAh2=3Cn^yxPh-J z`)vFNeZ2GAeYljZ3~_2@E#YWu?t4nR4-8y@)a$BZ1k8J`#L)pqik^{|MeaFMIzL#O zn#G7_RWT|$>JsT$sC82xSh$2qXUO3-`l$QvBs>-1ADZuw9!f{xSgdRN9PKxWiSwL4 zE-dT2Cwnbq6_#zXk*#Tel{<1B?kSy$da&fy?<@&D5HYZDbv8vLA20GQT%|tk^_iQ1?5gO*IwOX12-6M_=V{3EkCdm;9en4SjS{!q z_@hzc`NhV4Dbc|XZr5Ddx3G_x>{<;`^mQTMxddPvms6hBaHklE9-lpOh0h5&x|)b< zy(Er#_#263>vCN7mK|VEz3hEL*N=>jA{V5!EUV z)#lRSyVaiB+?Sr8KSW3S@@b7{y20EqX$kR%heKVsR_IWW1poPB^9rwYeuFbII6I%3 zhm*9?;5eI#p+}-VN1YO!siFhmf_P3)re3L>O;l6~#@?-!#mj&qOK{V!j*b|`*eh0Z zCq=ym({ol!a1x^1%tl(5oRLvyG_%Pqe*nuSA{Ie+b+qM0ZeFMoUO$F7;`+4yFGX;U z3lmx9>%_H?Q}~YP-9X%WQ)ETCu$0>%!b4L615FQhDI-1S&Bus$*IOna-`}yS{LqDif{og=2bwroVuhx&aF#YQQIvEikOGdFDpI>_=2T#OH zEE8W6RwmJglFSAKV}l|90`GwcunM$d9-jpCRhYl@(znxrbALvP3)7}vOINm*+2Qc9@WR z61j{5Z{F3HN!9gYoet1#UAnJjb0}IAD{y8OhMNZPfU<0VFFoShC z0C^rPR&2NrP;-JamU_{N}Xev(|%#9J9h!oh(^AnaPDi-gx6~> zJzKab(!a^wtiplq$4aQL)S0_+DKv&U-y3uD8BMa%QfO-QEL3~y2Mt4uiC8Ab7`P@5 zpHq^JpsYabOnRTK!dnj#u44`H&~@9TP57NPP=JPQY3+ZUKr^_LYs^~^m4Vy$C0~*M zscZ_&nh1}`o(a81IMxaWq}Bd5c?9xr_pIJ^ZJV&>Ro7@&86 z?VcyHClNnqaI6x{4GdU#IyefU{N;dVlX^F0L8?cd{k^PYw!oavk0$Q-4y8!O}~EJFAXNRbtAvuE~cM8M<8wD zu&4L;%T2nNHnxo+p++Og2HNF(Pb<^KE_x&T5VGUXzY-bLJ#KlGu12+}h>@dS$il=I zy0U9R*ATcP{(KK0d=`FhDt@JAI41c^sFe&0Ngrf2k^7gcA?H1w3*yf2drAWUpEbG6 zZ#NNbXqLn5Y7^kQ0vq{|u2ubrG!vl0p74w-HoF9|-`Hhr#D8wPXIEeW!GMpgm~#pq@LeIT^HPczAx$pX&JvCd%p1T?NXxf#0?JZ z?p!q;9c%cq5ZqQ31{~d|H6=O&s?jIhC>Z!x4{CyM_u^8bothUaGzsm-ohHDh(op7aa_43iB;%~wn1CdC{DP{|H626%E!Of}b}qSvjs(CsE9OPJr}1F8HFv2Xnk2h1~Yzt|M?RjfBa! zymRn3TgGvMs|FI?ZVuf_My!b%nwKl{2i!+>3q1ft&Sc13%zB#63OWk+5a%ZqMB5Pn>yo)) zU+S2K9Uo|%=(XcqOsRXiz#EIdn#GmF=uvb~!e=GIjrrQfq%r}tVMD;F_Hpi-{w*1h ze>PT{J%$*-J~`FT`!@t}5tA;9@H!Y;+xLke&}qh=vF41z#CG78K3M5@AG&kR&S}cc z`eu!Y5_#@Hu7gJp10Z)yf$&QM<@fy_)eKGd!%?zNaYSC$EobzCGt6XcgeU;-Au!U% zOXdp@bQ%(ZGHISmwb9XStQ-`-qw;>-)Lb%pAXb{@2UF&R;K+1Ku3@b{%dA zs3zm?=gkC+1+dEba)=v~ivnC+>-FNX-e_jro&DW9#Q-!UA6=HA#fO=hga1_pxL;i$ z)aNC8sl=q1{U!TJNo=ED_V(^-qF3&76E5{9ccDV_qi=rM3??Dze zQ2+IFemV+r@&EG>rLfyDFzJ8j9cUK!x@y9mv;F(5T10&2Qm|Ip^>22 zVoR$=&h~vG$m-Rm$MW|WBy<{j8)5kbfI&2AFe2?$agO+KS~mn0wdRW)zpHA5XnaHEG>vUy7aE3W5Yv^5+BG_;g?t@N zJ`DbECovkt$Hp1sFEVqOO$_>THRQVYGT%f*bnYlBh&1yb^Rk8!EM}WxxEG-R2oa;Y z^O7@<=qT9!{sm_cW^rK0a+ui!*o&X;PbD1`@h}A(FUKUuOoHhWCw<#$RQA`MT6ZC*sM8r>-Fvwu zW1FOyD_guC*mfpoMSs>7cLVHjA}1F0?2fC>&87eNFI&=nWlP9gm$9)XpPJ^cNOUH% zo9YtmS#Nj|@zjJPK$vOc#i*#yMkoW^3$2!C5!}V)_e6pjuZ{x!k}9RjZ^3$D>GZ~v zUKE%3>)h?~WzTH8FMFP_9f0un(NE{`Ii=o6{i32aw+4{eu1B@*~Hv^P3R_7vRzYbkA;R9M-lBk{DUBumnF*FB5fk|}}qsK;$&!b{2jJH&2o6 zl)WPSg$aXtJWMUxaG_5`mu`(`p;{u(rnd_zv5ZyzvxY4Bs~ z7+l*2Z-_}nyMdf@Fx)L5)gRTEu#}LL!k^LPno*-DdVk#w4(&}#FwX66PbxJx?Q{Gh zj&7g!s%YZI#%~4goZQ$?rQJ?YgI}d}_J&=K80g)Ery( zJadYXmX|2lLS@p_e{+ET+2nUgjH|p@=(sRjEo%skAHh+qecO!UKc4x4o+DX(6;wk9 zo)t%DFbr<*t%@Sq=ndWqg}9|q%m(`?JUl=Z(a)^{MmXXd3lzqT zPGr}n|FxXGx;Sm#u8bv1z()3WaP3;jTdUf}zNMYUyVX!uzElnq%W) z_~-a5P;ze3^_=U*DB}K@kH5@rZ$*=-7w}5l4SB)ID{-Dmjc;jVvl(R5c3V+(m+dZM zrW5leGNl3C`lDj?#}ePWx(@hX>*aQ~4kI1*%dD#A&8e)m;b6pV@Rt}5t+Tv@S!VrG zFWBbQaS*lhGg7AhA&78c8SJwO=wnPdhD-3oOUA3qocG7$5TF#WZwBX0 zIYRy~m##@u(18_N4Yo9vwggoO1>=n0z7N-z+KN`gz_)U)zQ!A){*;bz@XxOVkPU-L z!EK+y#X-zid7tdzKOm-+A=#8S!9tf25+)`!HFEXOgJnc5W|Xk@2l>!lDca;lTQfw@ z+vuB_34MYFpfUKzkb58B4uJ3)LEWfNWZ6Bh zRgsf_?&Vayi3sxF$z>O|@}B`K=q4#5_5R2i#cg3}i0D6g#`dBVNdUyxla`C0Q%zUV zi@bG^W0Sx<)FhCM9dWriQSr0XZfbUvi<1AVz_c798T*81log&<1_)037Y%0>k%Jum z`~Vn4n+lvSU=8Ay*BqFZZ%?JoDFaeiU=SY6h}~hW*!m#^k*Ts-i|bg0(HxK?NzH_r zTsi{GcVg1*!~b|u(h-7@XefWpuxxKLtwh`XiC;`51*!`6c)M-N6L{_e=|0e9?vI%4 zzsF04$H(N841VO#0Q`_`%}dyqd4$?KGpf(@hW|taH)OC*x@6F@!~eo8rdC$ThW<$} zqJ4}*XS`yjM0Z~Ci}XAI%+!6Kr20pU>R-Cbc?7uc4-aM&3QL|^VS|cwIp2`tG8myI zqPHEVi+M`RdD@sqdK=aer~^5sH@Q|O-(bJ3=KQ+%H@OpaG6eiDfKU@gBB;yy!J-^a zeuFy-+_mP_u!Ur8;zfs?hb{%immr}Wle)hLBRjIgID#fl)Geb;EC_za5sR2S|0&I^ zw$+!O7?X-Q!}tl#v1`xQPq55QE0!|A5vYbri}VYxn0@C5wXEfouPDi=Z?2i-V``M2 zK))}Qs@)Ob(=NR~gXp=W9YN*O{h{Hl&wW*v#p?35Q$BwX7m-{b{zG>rt%>G#tsgxS z?mB_GN<4P6?><$RPfIgaYB+`1l0)t$Cg6E(FQDDqFl3sHE#+T<0)<;H5|%VnIwMqN zNuMLwFPLKFo~vI6;wBrU7}VQLY8?;DmH~_fW$3zq+N5~6v-Aw#L&*XG=JQ{Cw8hG> z)X=n0zu=28m1%-mOUJx}+s*XR^n$o<$uQ}|X8yu8Ov^TlqpbT2Af=lPdLn3(^u z=wwC3s63B~j2L!zd+1V>{H@m4XS7ONlMG9Z5DABgK~1SFpB}1_k|aQc=~Hbmnjo(1 z-He~feZ}Ca=4kmDII{C$#+n=U1sdRy)F2z{cW9%hZn$a59Ex!`pseBNF!vWR2cKR5 zZ>r%3%#z;={e*;CjO$e@Gc!r13g*;;%^m`GwynH0HV~1DR#Y4ddW=|rRN59!`Y{$O#}vc`C00op%)xn`6%PR&Jl zlCaaOt=Toa7Qj|qLTtDgoViy>)@JFJf9W;mu9TP16<$sWyFXab{U`Uc&RGtyC9WIy zdj(b_Z3aiu@%z&G&F`|J62yuqgruHe_6l_h6s#XfA(T?CuW6s;HOptf2fEfzyQpA@ zEBFQNjJ)W#-iZGBlSz?ePgP+-vpuk|H97mmx?l@P0TS_K5?(Xa)-bX}%nm5yXXqIe z$0!r$2#We*LXAiR=TP*hS3v+4dk#x=dD4Beyeqg))g&qG6>1-9kZh+4cZ)5Sg+9Hy zmU3OTrO0$(?mqk1DfkscF^bV2+SM+V$-uLJYbV-Nc)m?H1r)lCs!8ZXv)I4T*Mco9 z9_6i2#3Fuc^v|P-NQmGDuzoXx90~ukMr+>^*K1t$Ux>b?AfeAvT?C+fMW)CwD)R^Y zQ4MjPr&U#-<=BC~F|{Hs5IjiE!x4^2HnLO>KFI{`PMlAYbxu=eW|Te7c$d$hrT?1N zb$>^@BiqfBi0$p&<>43Fz0*=*B;OgDGCC$Xce={A^MK}M3^;VA?2)w0_kVMpM?xBk z>gYQ^Pei*LyhT#3G5{Yx7kns+R#m^6|0+DpCUB*pHKov)u!klkeKc}=<4!cP_zH9L z2I8nda=WeJdKiqJHrYxvQ3i_=^uk>hXTFU8I5=21Wcaj$wFQLo;g&y}WcJHjO1B&& zvM)qA0S0^`EQNK^=e15e@eAU=)}Poar2>Ucye}w8ZHTjst3Vek@273#)EDs)jXZ0 z`4YITWYdAER7SNbvx#hm*G{D7BWj{8+_r_q2ueN_!hrdJQV2DeQqT`fU%x@cs7$p# zvY@!ABmZ^3Rz1HVd$3o2po_=4tQPK*yn#IFm_cUNppaw?B3DTtvy^aa=@_+?!!jMW zNzkf;vTk-0?4UOCO&PYgxJf;Q9Ny@yGmd<=91V4{aBWIyQjvul{{bZm7We!9SDc?w z9y~TS1gN7EtP6vy3V@@F=BrXsIF7VYzPw7lrr@g?zWymBclfO@i6XE0@pydJcoP*6 z)bLZ9!a+9_9wcVS>zV*g4e<>XvpJts7lM7~%oSHx1;Wxl6-(t;oZaR0c32f1IA~3# z5)7nZP6K8>`}`KszKhcmmS{ocBlAqWUH7WSHRkG|`cOJkl*gFl6W-Z*+)&B*A$c zgr=7cm@GOgQK3^p6vnI3)^(ENj0DeRV`)zntZdrL?t6K5<(eC$PU@rey7j(H~#tke;(()?{lB){l2c} z>$+ZVtR>FT66L|r#gA4m9v2)zz6t_FQEyHon0Ahje@#^QM(N>y)G!jmUR1S@xWup5 zyu5qWq^T@|S|?AK+!!|tgICd6@Xi(WtA`JMLXiFLJyBVOe=2*q81dl1+kK9hbM|q^ zrQjvvo^JG4q;7Y+BXSJHKMV?_388nKrGM5HTX&K`#eZ7XKR&a{vTi=r-(C-dDBhKD zb^Y05dRJb#;J#rrBYDpv)(_|8hw2?z(<8GWU%oBm-F92r=rWbl{eX3u9Uj`TeW%+4 z^KRBAF{n-JeyI+R6DKpmlWCJAe!9k;C7rnYH?=dVxLrDq$Ss&Ld^kPQHf6cQHTKF_ z=Gbk~P=CU>OpKS~1M?V4Mfok@Dl9eQIaUxc7TNM%FtR(7|4n}4OO_jA_aA?i1`H=EpM!%@%mh1T1znQ__kE2<=fccf%HAe zQ+=EL$8zEVf%Z%5HS{-nzWc{3>3QSG^2~+m>)I6=E!bVTDus82bb^5B&t`p3sESlw zxZzZ9vooQczM zc3%3>Gt*%J*k3;_1;Ea5)y}q#*E~5^JV+Ju{;)Xep^Ba?70w$6d8{nk``@iBuYhI3 zZjGu&%jz`O+nxU4Ji-1@Q&DG#!Oh%Cm{TROsnRUS=~`X0uf< zTRBd@_9~rzZrAmNJo}m3XF1U`Iv1UdstcZYtZE#eShjV+H9+Y=ke_?ybiEMTG_^V-1Ai60ZLd~8}nrf#plD1M{jELZ6?C^>!Qdwa8WDl*eJldEtyi41v zpMH_m{b?BJOuB6N)?Kdj^K6k$jZ$YHhGQH1oN$8|;h*-~>Ui#Ez`ST`lF#N{{TP}1 zy~{dZ6lltX+yUE?>@TthBae3spL{ql@AXQ;YZRSgj|e-^MO{RWK^y8LW}9a3o54Vm zOx?7;Im?8AI4|W=iS7YWs139A`iX`?1StxQ$RE9jmG&5w6?V577Sg*4T@~Kk5BLmw zlR+Ts4ILUtqrq%v<(O|&_6Bn~f1q-Y*trXtHcZH!1yp!41C^gw^gdaf!5TfZAZ*~O za@!??xwR!OwQWuNni1*=kq72RvYxhxJPB+cehu_h9g)NHN_}O%*NXS!qv-HFJC+x=RbrU4QYM2BKo1ei>%-*@AORMLx*k&*kfc*03BO27mh-1k_ASd@0H} z3&IZp3}6`u3>_&uW=u*=vCHV8DXT__HKdI2q72@rNxeMl$|k>izp{DjAkMr@N0HzS zS%mcyQ5o7*M4c4@;={z%xbQp|r1lJ7al0UA1jhVSU9$F_=6vsyE~mu_rx=69o$3yojYHy~9_SuLB7FCJ%y(RBaRiI@>{P+_x+GFm5D!5 z>2U!~SnHPaj<&`T7lM>SAFGiK{t}fPKrRc3mr)F8Q^q!8r zF-U(gomc4fTAU>QNHl)o?P=sacd?qXjG%i^h27zL!-rwUi51ymTRf3A{J`L^*(?5+ zjbjm8f|j2>3A^0y+>6J^wW406#! z>f5&-c9m_09aTLz_+EZS4p2*rchKM-Y@G19d~wrH3yP_=xsxXw;X)d`=Xm)a%qMo$U)BZ3 zWpgl(`D;4dMEi27O|oh33GPyi4ez}5RT|)Pk%0)oKvO-- zA~io1QuuRXvXqtQsqdu3ph1#`$86niIYD=E?VteGs{sXgpK9{lvZm5kjZfLuk}7LG zEQ(vd1HJ5N=g=VvI)9wO@wKxgx6Y|y^j^eQ_eQ2~Ri~q2_gW_JeSGa3s27!gEbipX z;CF8+NmWzXZtU;gA|w}(u^?DV%do8KG8IF*9$$<60ASoy*$h^T$|*^l=3 zybBE)cZO@vwu62wOJ~QQ?%q@}fitmMuq5NS)U%29uc%XIq$;BqRFU~q{eqe8G`DH!gMTbGd*l&W?43qy^67=} zj{`@OhR*^N^W1fT=ut#*ce`vN{}M3UtQZeL!;7`z$KanO>?Gpq6YCjV>tsdjw!kR! z#Z!(gUs5BsHp)TTP;u*ar;1kzl4BP=<&Wfp8E1mYDRH7jVpuyA>Eac-b^UJ{W%I>; z1)0A-!olrh&C=njCCtO{p@GE0y2c~kzqa0vsNwK;WMw5UBIjj-O>F{KpqE+66kC74K$$`eC%evy#Dd6z$gn z{r!h59H?e1&(Nl1Fvh1w8lWLH9d10JF{{Eo`wMqBMcx0?+s_@Qy}C1{7;w`hCvkn} zqGh0mC-zki&!dYd5x=m$4kuWUfJWfxj#kp?!HVI6lGE#QCuPRHFA z4265s=ik?bkC4-KV@4-HBWkvM{Z~~` zBX{ECF1F?f^r@V#aA6+u)YE5aYUd7EuL@XR=ce4JumUm&2Vl1F{@yOnLZL~Ic;?Cl zXH!1Uh*}X@^ANg>1?OxX(1 z^^eyvCo!cM)Fp_^*P^wxd1#ZU`=GHw6#cfl(@- zk_U8`cy}p#L$ejrG$Cih+=PdD+=P2#C(4-TbvD8+8i1-F#&4ZnM?5avQR#g(+4nE z8IT5`s9c4}&j%aR6$6y-!0*TfF9iB`wG9|N;XCvV5l)qf6GezoYxUtDf%!jB+{g^_ zE4t}E3~h@$F!(0bmvDcpIiuWdE4MY#ndO=w@N;tV-(*i2GgZ$$C@1tl4r24sPFtN>K`8P!bxe z#$)BIblHlK3bg8J>B!F#6cTZ1Ms=gT(Xe6SY153Te64$UIr^PynGihGU!0db5b)nq z$VslCwS_M+K&H&NUDQ1lJ>|fRuy{C{a;`{t+qqN2+{NHYnb*OiN|M9en>Q~U5dK2- z_6W%*#kS3bUc9-{Zuj#{dj}pXZ_Yg{b)R!q7EZn%CrsuJA466$;zURYu`7o%%w@30 z5#AwICc-G@?MyHc&Nv;5NZ+Ugip=K5CgGJ}8X4@Y=T#&WP3FQtw#;#Bn1-1w$v#ms zszWv!>{+lm$p=8bTf7;bpK6$$C`N0k2$qppasPgTH%3$h9GTrmzFnboMK5jDjfGm>*}l;br()@&9DBr{Sv zer)J*M!>yazrA^hhJd<^l{KEydZyHK-Pq}X{P95Q8SSstE7D9Xw#dKv%w$R zDP=krjv+!E2T&xQ0j&2|W%TNGBF~Pw`g>9UU%GH)*?cQm4xX~Mg*Y%;*;T(0UR$dT z^*y<|S*jgc+qa%G^iQw}+zjbcKJD2k_nf8`g&0GNuowasdnGlQX=}G~ZxkeaRq2tc zfg&BWNq!Z$3+|ccF9i(^Zsv^KVMpAPhNB@p{O#&Msj699!`NSZ#VPN3b&|%5(5|$h z^p|cPHlGUiOh%QPYRK8(uhKf{&WA(R7kU4r{p~%`T-Wm7`d(aVO5+7b}{%yr0!{ciV;Pc(`Zyukp=lx^DeiU;ATE_b0b` zMqHa()$W34O5}4upj|JyLI%gax=thPJTd28Vor`kuJQZEM)F*N=F$mT4>fqiMW2R4 z1fVtsucQ`83eE!#G@oe0Fn0du4*t^~1fIb3GD(?$PgF`u9Wp+OU zsxC3&&{1fp3%DPW0nf~Y$7)qYNL;5odr)$pcee+#T5%8V*c3QLV=EAloca|E=6H^w@?-+8tinYR$42lsf-B*(16yV?on zs}vEMAF@&Kzv2wP-huAe!W~i51xO5y2Bnz*0j1JGr~+F;jd{B6m}dZbMXO-7EA%Vb zwPN&EPY=>p6zRybq*sZM@3Z_}?F`Z}9vM&mHeOG+jqZ&g4#7BWX4YKfMc)C~ZZj9_Yd`3>)t|acJQb9#f7gdB zw&}?{25+ui$#o7TM&uS@b3xmMs0+d#k%Q7LYLf|AUkH_Hzw*aw%(wk`^V*LkZBU?{ zfhp{8z+MZXA}t->Ik=QE?X%jA#VQLw}JSDkvVJa01f;Q_a&qqj|-+riEn zg#c^+^|ud7<9h!Fr8MhK^KR1pjh!HBg^BwXbJ7ImP7Klm|j+?aBBTFEEv>>6FBCvmuDbhaM%mS&}$ZU#i zYkV!8)$&VV{oS4cchMzAQz000NyI&X(iDQ$gUo)5xbN2iPBVD-+M#vkl-_TjnGW$U zU%VA^%GF-HkjvHUmKEO>Q%^YK5(XV3Is;Cgq4)}zi7g}D>x?-8)6Sy-+j zCcP?ngd#e}ubH(RM&>m_BfnD&`^Q^qTk%^Hg%*DJ2fG?lv*aP45)I!1i4re2(B?3X z%(*vgTqrXPm1inQw!K>hpHQSnfyxc6oJ^2593F}$yI#F>Gdu=jY+P8o6c0oNfanJg_S)3p+RFKuNUVx3I9eA2xox+-KC+sj61f_lyK?uN6T+w?p>5N)8&t02!`c7r*ha-fM34S-@{;J+(47DI%G$(Jq z1L~n3>@wreqFyf!Z21C@k-m$BjLy1bw13LW&%6Z*{BFoS3E)7sd^lNtJV_fTm#7#M zf%)!K5tU56y>_ICOps56Nb*W3j@&|=S)C=zATOc)ig);+B+^URGR(8SF;N9l45?tve`wO3fe3s` z=N>iU4$E^me5s@G*QkWYM0(w_r1#|ld#aA&br;WdQ?_HScK4?F*=Xrfuq9%Y!Wvjx zBrASUiEEg(5c13gmudQyLaU;Dw)cXF!}q#U5##xH>I{s$AAOu{&qsHITa4JChOBii zCbv=3592E-k`wR|AUGHaLBj|tRZ(oceXPB@q!f`{%m>oIi|7vrd0!``hkOcNr;_fF zF<2!9jv+7L@(yuY$196RZnp?PdS^~ydMnUhJNOycZIH!mK}p6h%Xoz01MX2$MR+ML z&ohvFwTj z6;rsctepV?yI@&yPOSK4&SEMBsQTj7la-1$w)9ujl<6R+`l8^7`@kIxIQ-Yg+x}LB z=j3w_Dir8vomo9+K$(z#rEmvMc>7()2xdvd)SjZ%;Z;S5bgha9YTlfL$M48%MoY-~ zc0fb^wv$>{Mn@^H-NtM=fuoWMccthC_7#ezDjq`?ElSd`N^pHC&Ra^E zRd=>n->14Y-|m?P0{lNtCs!A-L8ikEu|M!{=3vkJ-+FZ1Tsf^VrP>#^ zT=cB>i??6J5i;XVA5=Y_=XQU^Q$=n;TX`9G6~-mvJzdNY%rq*o+?Unb{Y}pOp#}8t zQI%EEfmkW(j`@qzH|-BZx-Fop7hcbth;zw?jA@6GUgk3{K>SR@rYe4}g^kBk#engM z)XhKu{t4ezlA@jyG4VnWhylT@sLQNKX{?8%uP_T@&yp#x-tfBXk5+#Y8g$Ey5j-{% zy>+1+ax;AE?Srz%y?>8x2~3ddgj#?yEkTHr!)9Wq%4f~3)en!}jF-MUsaKbLYS%If zkWj)D`2?Vna_A~8U3H$B)Ui?f&tt*=o{HZnWNb`h{{{WEIYES-kKxVL@w34P;y|8A zVtG0_!L_+wA~qvHSLBf zpk0kbdsfN6Q=e)lkQBMrk&8BU_Q`jnF&+wh!2QqD<$ESa*X$z7UhR9Uf0HteqBWIv z5bjsIB^wyW*qq#}20!KvIUybOB=Dh4Yq$t9cieSMlX&e}_LkZB0nYG0A-Tu$yDDDu zyGP=g%-DpiN=B3$YWAUq_I%nyX$|pD)Hs2{1k%$?_B|$ zXDdi&H!%g1bPvR3lI{`omE4>9EvQ)z@eED~0EhlFxq#_wj<$+sIJ$*)WpYBc1D;UPKmooMg%3Z*ahR>T1}|jJ>!8vtE_fVE@yUmmgdaVkUS`iHYDO~`89_}~-Q(IFub`PQq6S=| zkjD&)bLbbsY^^NP#*x31v=z%f>%JJc9WbJKt}W%FG%OzUbYR%u9J^I$kWMV2Qs~D! zfx6ojgQOGQdia%TsLa%WrM5;`if{ei`aPZr13Fas$Kl-mjcD@e(7|hhC9LE~0xRWz ztze=0jGcW=7F;BpL-RkiT#*%m+>#hj6@$JpA)2XDMYd*b)!$JU7PQZ0)Dw_thAL>X z3}l=WH_Q>>@LUvGBO9Zp@QsYF8hH_MRrH`Xsa*5$bEbP;sDYj2gui$t(|Z%ZRYh9t z_x&!?b!;;|Es~{#U}z|YOiYL@BH$;JDR=Yrcn%vxetcXyTS>@Ozd8Y6trmt8H+#Pz zcyvI~MqeFe-Xv{-WH|V93RQM=rJ{`}hS)The7EvKYt}RrxnsZLAYbvT(t~Ey6{eOm zd~D>4gFT%eB`QXksF`gR0)ZkHgZE>SPp>jsiOe?Wjxl6@W58TeRj=0jF-&9k3FRuH zbrSr>u~Fcd>^%EPqk1HVK@Bq`=}skMIYSl+lJo<7E5rkHjwH`Fs7hKKXOSv445d-t zI~nJ=a$jpOTTux7$k-w!qDpl4`N0YF9mkd zICq8!NBB6R6>-C~`C2O%M$-3F{)cRi$aPzdBUpaS)}t%*F-c@mms|;}lnib`-{aSgHiW)FRX< zR0Puj?k4kC@?cq$SQg=jVN3J^Cl;lU!;8Pfm$LIq^^1GXSRtUF zuDnXPkUPpA0G?cnmQ-_jH3yW^N(KED0+u}!=~f5K0=KQq(iXPT~62pU7eTecz{Y!aT0o7#lR z;~q-5{=E&$y{D`3Hb?IWGPTG|Bw+ffkdHZP`^Q2WCo->hbDhB8x2EEvX`t^(1|H%rz?`7Y(3;$UWk*VSEchon6iz1IN)FZ|qZoJIrD}rLttw_Ty znWeo(yAm}+)pQ2!Re;7dB%0N;DYN9rn}F9ec}Qw}%HubUkmsWXpu zC&bN}(8JUYJ$Qzw*&mHzidFL~Wc0)k#9QH?N_1SU_tK8s)?dEvDPK=Z2YyfqEZ`y0TS@s=;%_Cq&sGuNF|MCvK;JXrL z_~x5Kxa(Wsb(SC7jesntjl0y7Y+Hi+)+KG<9~xSC^b#kM&?g_a;71RvS27)^E=017 zw9%*39@X#sFM!5t7>}Kp^(#(OWTFi*kMNmE+O<(P9Ogzb z!WOH5FMlG8A>49pE3Z$Q&}(+|PwfxT8<(cW#HB*dk5YQ%zY%IGyf#^ z%p1di6L4s=Wuq}U==C#auW22V9(m{Q6g;t1|E+>b7M4cQKfUMzm!VTU5ve6cM>{q#Qo2m zQ8#l#zD_-1td&)ow=VYPet43u6)Ewaq2(FhAlZnKv)}%g)i-_x5S~|}7+#!CQ6iSr zi^E=!+&aZby1&8IpyIN-(*w8CwMiZFO#oeW$u_-^kM**kX2I95`p@>4RNxx#5S5fT z=6{k}YAozN{+*1m98ZeA-$p(VR#`u2e;bZao5 zzj{PC^%heG56G;lU$=o1S2N`a!jKcVgo54TO$!BMC$)ts2O+wKgImT6~j*BcJ)BiBqXPU96 z;&GPyM}-p|q*-JA;nb3uFxz5Yq_lM{JYI%ncO->)$h`Z8%gSTW6wSdj*D9jTVBN!g zk#^C0(YOG_sS3^k>Bx&}(XCp*d$>#d&%?GKFJ{L0KXc<4z$pJ`SUQyCiI9eaQb;h{ z8Hb8z=i?yBR9woSDTknm;E%) zK&3jQtL$q|qXlf6zAnR1xaBY8OUjDwULn~~U)Y>kK>YKg7oP2rnPs(re%MHxU~tBh zvpy@sg|=QFUj0B)&Afkh{gM*rCC{g5(YxTT@Z0r;dGmCy+LaxBy}voFB}=qY5li1> zA|se@zpA(nUg`C5DQR8HeN}Uvp6}QKkI`$oStWX?%KDklQEI8Ha7X^3$|*Nh8M#$- zNBtXL&z)}gD*iD0Y4sy1b;+uu zf!pbLG~)wvHqBT3+O_E~lUB4)8AhPCU}e_?C0F z9C?IzXxy}VayO%`AAcmgD?8=#eCS|yqV()94h^mIBks0A zU$xz=Gv#I40*}#|#{jh3+2yU>YLyd1*^M>M^`a(d} z>n6{YU9~Q5yEqUpnhQ|c>WfcjRSpZ|xcsfrO5I=ODQ{nsv6I}d3OQup5H^5nC1r_2 z{?De4k@!AWXb%h;la-<)_%Ef>9TC03exNn{$5p%n5NnJi_`7d*%mnEXZa7~`J~GPy z9VUi;5IdPPmdmrX9!x&e9ijTw6~0b}J(a(7@uyEI^!-hm=IG~|mpdv&w7o7Eq!4c= zT$REcoUSS=uT%)qXpwZsw4-aT;Rh-au6b7{uKd2-Z$&x#JsBMx(sTW&uJBq=+Jmv! z-#wEARbVL;da^COqg?-%vp5;&O1^gamhs?E6OZoKwbDkjZO>^|l`U;d`G)<7Ow&zf zH9h%#Ne9qK{`oMaS)J+EbSBJ8hNst^7`BD>7egfq?Lk^s<-Yzl^0R$zO}57H_`|06 z)wa5OPC1@MmtO|t3nd>EbPN@p@rn~W@n6Wo48j0K%_&Ya=;V(nT7nCidZE3oRy3@C z9$^0D#OB6{-z>0icW7!Ie;Zi+aj1I4y4h<|Bj~Dk_Hr=PSyhTNCuLx0s^h2OgY8@2 zLbGOQdh2-J=D)KVxYbql_(;-q$6W1Uozdp*^sl%=<3(NL(}e8n{D60uBJV*36X@hG z>&IZgX<4D+vQ1%oUi!;FrC-kF4BZR-sogy2{8}WfKKgtW*=4vYoP0u8T?htRuxlRN8B9vg5SobYb3j>NfSj}0aLX{9 zGvs=4xBZP67P2xi8jPB@U>|Icjz)T#GTlm!#CoLzJ`3Zm-8HeECH& z>%Bh1@3`^n;SB$%#9rC5qm2{o`Pn|%inSbjL)G;y?XW)aW2b|Dboe8Q!`~GuK;Z1h z+u>dA;wDaZiY4dyG8gr(;LTc>4Y$- zXlwlf*%WhEXy5JR?F)1nT*YS*_=K+@rK^eZrzQIf>$AYDs@w@HnUe9QR=+DR^#^n1 z{HL1--#6B-*+JUxfsg9cw9{0b8bhZ|Tun?$3M0vl9sI_-_ulyU&zguqM+zN}c~`u~ z7X!HBCd0CS%nPh5-+Bg#dk|t52Z@^yy#+p$Xr1r>5yiaU7e*-!iyI-*S^15iKsJ#uSLX7ntXwqqe-(1K>#0q@M zAXihTpQiNXQxDIhnzWO6OxoJ@m^g$wL*63kzdK_w;R$R7v#zFvU@2T8HXX;Vd!u1^`^CRqm*f?Oc~RP1MoP60m=hLRhJ z`0_U}a*KN&R(UR5eZ|wl++W;?Z0hq}A@+|Hy|%dYPO-Vf8UZ#1Ghk4clS3RfD283< zJ$TZz!WFGK@IuM+li}#C!+*>0Sm6p|GCTepvT5K(lO?U(y{DD1`q`t z3&ul0a1as?LrT7r7{2o0!3d?mp?Lj!_Hot8d_^z^|b0vA27j0+acAXVkX4RbzAmty#!ztd?`#)@|{kUizx zJQuf5O*m}TP4Q3hlBU2`K~C(CJtFzWZ>sp&rpxgT6=Hoio~2y&JQK2@4wO^Vr!*p` zJ>?w-@uQwAFF%9Q^v_n~wN{E{+FyVz!1t^1AoU;t&O0lNmA*TU&Fvn_31gKe?fVti zsb{B7MX9%ZgtxD)P{)1Cm;MUcmU1uweQ54khgO1T2wXAJsCM9MTkn$^52lZ~Zf~5d zR4cgpN=~J?NN7UCK_K~J9_a5pe7Ms~3paKSxZIJZITM05VWd#Jrl;<7QPLfTh5aJa)3>)Lzxj67D*4d1|@lX%%-+0Xg{x$q0AU_8T!R|^u>A&r`)m}xTS1r9< z0!_}qm(7L4R6_73n;9Bw%O&CTfyVwve@82UOvjqGn~pObXM-*I&#D`qnw73(ii7pG zh1x)R^*76@9*>;G^gO~)IX}e8^P?v0XguGE<)`r}SSFJpBelL2-B93th4195mC)-R z>C3?bq!s{z{pV#!4CPg8xvAe@7x7!X)BB!{$&3 zse)Nh+zoV65cg{C_3UJecelKn#MND);ioaNDJT?8igzzLCcZm@OdPd_%g>nTKM{h# z`;ayG80Bh_Q$W6Y^T58oQtFx|Q2cam^bu{8izbvFl~-lHEOd!2H4bD64ygjU3S?Yi zoOzOnPhv%u7mw@k;KienQ1?IS3wQ;(31+amoubK}?rG1=KNz_J$pFC(K;+mUCV@LP zskroq5!O<*)JGlJ8hs`kVf079*3St+0B1dzeoA*y|19MR9ryB+0U(H6p2|aW?`h3} z)|Xj$R_2C3=|s6}83sXx^X3mXD<-d`UHokMlDHhB8IJKWG2wM=|Kf*8q=Ei>t`c(N ztP;W-&^T9-m|L@$UJw0;dnk?Q7n%n1ngQ_^@WRx`3VevigV*wIcnq46mAYbd;CLj6 z5xa}V9%OXgZwFojrXS0NZC zoM>re{Eu#UvD8g1yN8LHra@-E(1r16s(-wZg)e+dq99RVy;)VuFawqi%^Sh!#InGT?=>c06QgVR?n{hrx7SXWfwC)~aa8=!t`^ zOJeie@y1wvTpk}H(APUjy^5{5=7>I$*+9n9=BwMh<0D-XF@E}0$^|TL(1~Z4b?l$} z7ozi|5Utwa(z}0N%!ZZ4yBFEYrb0^6BW9)i-go$AbtB|uaj=X%&+ox;U|>9F-WTt> zCC%6FAeD7k-=%O?vcE{xK~l6i(Aan5tbY_RIN@FBl7tEyN?7#t%t%i78TIznSY+9I zkG=F>THxpIFYu7++3o#h6~=Z)ZXCP_S=or}MVxBY zikDH10wtgV`R==*pYyH&0%SiQMJL`cv&&g6?HYGZiu6QK2KnbS@=DlDo3+Y#+3;T$Ig+$ndcH6@dKl<`FQ76^kW@_Fj&kh%!Zkg2C2%N#w#JJGp zv=Lpjr8z+FeQ~V&4@KT{PLW7)(4&g`C8Sv?d={ ziNMn?%*$z}hFwEv9L%mN%8zm8NIs@pAv%GCQZjV=3okAHlvJz>NJk!4i>^jg=)nen$R(|z7ihKLWftjYlhOZ?n4% z>gBW%}COi04OKROQQk*J z!TJq5_@O?~e_Dl|Y5dN5asAlv!Ars)6T%y=x+9@nY%0u{{(WOD`MlpU$1AJ`DN6+b zArv@r8Ef(=x-q^&~Fv0F@#H zSL-^H2xJ))to-991>1ctwhR8XiN>En)62X810Oh1EvJzEv6;sp@pM%iyT^M>clKEm zfQ03?hmQ{5iwN5)#m<}GD};o20X%hsM@7kbyz{&>zuHxK3N06Se>7^utj>kjR{E_D zsBKkX`vJ4q_dUT&Z}&#m1{J+8uD@2@WcfEW2U;Tp`0f8Slc#$(&>&pb>2gu)Y%G_- zk!!RRf*5qRelD!%jd?AqCv)Se*kK3otA|vus%0E`G(MA9sz^je^6oDKp&1?98V>Ft zGz0_76@Zy3ba_VLam3#*c_e;3=W*!}d_hS?jMNdORv>)YQt#jFIku%8Z%dkIc`Ox|y6XJ8-Ov3sR{qmf%I0AjW9zPZu)~zbWeyU?sY!?`4fX|Mq`1pisMjUhoi#9&I3_*@ z;#{akZA{C^Sf)nzFnf+jwPknJzu0-i)o~{$*nuZGCX!B9nL2#uXY-dhOUcw50i?y1 zLj-!j>;QoOHL>>LoLb&;VLU@A5Bgoy1Ibr9ebpWIe4;x3cesYRpla1zg{bL~geeg4 z3hYj5uoiN5k5QO-X4qim&P#0@5RWD+=o?Cdk9I&Wdh5lJPxqHZ`8(9<*c9#zbBqWk zCv%$XLJ?pI#)s)OLio`_DWjy>veJ3 z-)^4BSWX^ma9>(KdYD?ot;z%AF@BeR;=(l`XrQ5RKD4TorKDIE_9osXHTUcjef8sp z;Fw!uH(oRnjYe)x`}XpiD$75J#%zcjHvS~^*4jR*PJR=Z-D|X68fUOS)P4gXHaueg zVWOoT99Uy-n00((7C~hoo?%a&Zom5FXn}s{VQ~M7lGt`)wr`oplN#bB1NhTldg>~^ zNty8<(%O@Lr*;-wu<Rz<>$_0Nd*8Xbk#AyWX|9Do4?YMgKtt7j-pJW| zqMxp7JRcjU*2OtrTYmUK6!_Bv+`=$PXHAIT7;bq#@x=YgA#<}C0;IWQz&9&D_fUWa zMWEtFF+b;Ju7|Tw_Y7RIO3AXcY>KbC>5(Zt>{E(@9Dg)e_=yjhCyUr)SdIPO&2yF- zUNbC?j?4LeMb2s@lSV#6)*Rq|vIb^3C}% zMhe!1v6=^kY*(J^eCyY#rsU_HL2L9?cGC+fE^xMNqlAbVS_VA)ITg+-X42W6yjHeh zt~tk{w_>-uQpLNuq?~Jwut;-#^!v{foMuKd#R$hZ2`!ceiVu2Ps>Duv_cnL655ga+ zZYDan2GECF(`fum*N|WVu8Yt`oc6DGmwy@$Jst70juS?yv2~!}w8_Xtvu2I3qGU!zd-b-<{MM~U zy-7htum4_*Y?rOKIJ}`2bO0wD@*SHk581k0cQlnfW;#hS05q?taI;N|2s#WDYG7Gj z+whR^%P;W8_ivBj)Z;V4~xD2o1A7$pL$J-d4Jn>SrYZs zOC6v>kMF8I-nGwa9xKe=iGP=Bxd@}O6@n4CefXue)SH1+-P3^ZE(KVB;7a~g5i+u)a* zO4hi8fAXbrC&zC@^P&2Q-`d?<+qGeVo&O^-X|xmK{*##ejaL7+#PkU;=z8^j=0z_X z(kgNq#C;11_M5OLW-whY)H&r4`6gGbjJh(5g+**6i9C_?fwBX) zGE26H%QRSW9GbMQCksFy^7-%#0e!=jr;>CR8ty!@X}NmW{Ga8pG_w{Ozm+Q3&o3K| zfKm^vv1fV3BAr|Z+@3AGV=jM$-6k1K@-)e%J?Xu+ATT1X|C-zPJHzDfV*xMP>Uh0 zpP5cYERRLfvwy;M@y|=~J2&S434P$! zWg+^ZrXY(Hh7Kd7Fys`}v|lJC#{Kt5bcs}vlIZ-&0%vF%$!VBGZkWfE76ypQ76?w- z#H_f!J_fa{|DiS@^OadW-UhnKUKBe6MCRA8B!6On2cVo#^P8aClRe+NG9|(uKU!!` z1hg(ojffg)Dea(wCa{-}>5kCI+No!lbr%-D(!2H|+H(&DMMAEHo6VudkV|>LMLWJZiK=Nf1OH zy=en@@R3eCiPP)SVUR@UP7anEs5e?aU$a>`G^Ik?-E?_`GLUE2OC+6OLrWJD*DByR zl+u|<%ch}zA#QrF{*TVAf$7%nK<1~mizV{rZn7pgT|7d2sh9<}l5VT=s8fKFS((wt zOYPQb)oH>z%OP#jdv5;so)a29cpbSex`7KVF&^f$gkJ5K78aV}M4vBwL!K|mejon3 z(Wj&v<%vJN`jTGp+xTzZ6_oEX<(q{Ek!j(RknNsvu8}Wo>7y>f1r~33*Jk)~sXA(p z6CXOdBGnzb9EdD=JCQlPw2NsHAKyJxaE}XB5mDtq{-n!bGMs*}CF>2?%6S zFh|j9k{K?}tr!>V%m+BOE78_hqbZ0{_Z7H#9dkEIWt9!7~Ds zolD$uv}un%mpiLmZ)`oBi9o&M*$HjCj%p;EY`uMshJ0%NOn+C1*f|=P-S&BNGcfIF zY=(G6Ad0t>Zmw5=gSKQ>U#a{xABpPHSAXP1r<~tXWU^C3b`-l-Yf%jXA3J|&IbYRh zhn5-^fD;ZiPr1*Nwdc}p-ie#DNAC~Ko&BrKtz@2)ZFHoHN4JaLuhjyDFY8F#FU4ps zeaB~FCFpKWkZB=!V1}^{GCYO%FUlh7eDYoRw#%K+YX|2&Sn+0$OP(zI6)b!o?`ILU z9fgfP?r5P8B{xF@{})kb6&6>tZ0n%GouI+pEi~@#5`w$CyEHDr9fAddy9RCCoe-pP zcX#J@_CEW&@2g*Wu2ogD>K~)vPl4ju#vxV`TozK>qHY#`1rzK2TxMSAGN^w%BmC%L zqm6y3DK6JLcoHLCK2P~EIA-gG*h|Bj{emKhof)ULqrABPv9_;lj+Qt*+VZfaV|`!q zG^%+1TbgD0^XXkNu)_NFtz)(Q(6;=Fg^uLmLKOeWn=)MehU0k`lrl4jP-q(1e_z+;M4Sr1l2&Y7Z?oGn(xQ#h zO{J#RkNgO!(tGpn#2FA>H+U3C>2juYh>=}PfV|H;6x(_z1J-HJ!p+O6)eK-y026Xt z5b0wNe!T__PF}85Z=h}WATV5pkLq*MnSP=C8ud2Q1zuD??M(WQ0It3#L?37K4bHiu zH#O(z=}U6q3q5}GUL|4yg!Q`J2xj3O@~ecov^>&yZFycohF|7~^9B|8e%08IBl`pl z*#1dWG7aSQ=-`MG@w>XwPBJP^0pf#CS2@S9^2J7L8o?1fFDYMwNq~6HsT<$~^oC(F zG|Y(eo=x9*gj{Ww%%8Qc^{VNRR-Ohunm?GVinq?o?VdhL+PAR93jVtUCm9~% zm$6`w6N-h$fwIPLcda|TmP8^ia&>Af$8N#Rr!G!WlHWH8A*rqxObK|1edFAhe~apM z5+_`nwgWox9;5bk8GzVc=qpzJ@#oG5=n-!TfifcB5b?i{($aPkYadNdv0d-BiNRJ| zDOaON`jT5T;iLE_k%nQ~nr}Ht3_b49vj*%-4kkuciqGe;9w+gWVFnUxU&OrcNoK&DW)+~BQX_5ql|2cMZCDA1545dNuo>rLrMlpKMnxdgj3 z{qKZzrf-y=u?ji`KUc^d)=#NpekR zu=j3(IZa<<-qv)sfW*u2Da=V{QV*xs~cW{Sn;;j5<6MF=|29nfJ*b1H7L<&OJ&!Y#tff;fi_*$_`bPBa6hL28Vqj|NbHAWXZS##Rr&YqE!8GJE#kIsp+Q4wL?Az9 zH?d^b$Ad6E-%60A3aGZt&f z1Te?~F>(?6`-)j^7DS0AVHhKD9AdblFwWTA6zTh# zj&Y{oTWs63f$W=<=l?H$D-#P`s9V7R{}f@!h>YLG!Ad}Lh<0lUSn!NN)}HmTH^GaC zU@9X88ZGLSVRY;d_I`+fsbj^di_|hBik?Auy_Dr#AW)leW7H$vOu2C^mwRp7X z&!GV>Fsx?f5?i9+gQdt3e4>EG3vmt#0s{L7N$Bg5Z-#&-POh;)qcH#cWl)(IU1c9% zR4L=5GyWG%8Z67Lji9Vz*7)H}3WN313c&;EAI$pJ-n%x;Kf^mIH9Fn}KHVgP#)4k) zI`CAME1NsOn}W}x;a+rbGkE~u8sgtM1zO4LamTx3UiOfe>s&;Ii-1YQHSFz4ph`gg zi`Zk>6{9Fhc8XD%dg7+Ja8(cADUDde+>PWIy|f;X$R{p>Bv~KY zv*D+(&JJL%k4&a}&9R+^cfYUXpsWW_To0yaVD9N5uxz`=kbmOH8ZOp2V4UH~c>!{9 zO%GqtO2F7OO9Xvi!fB$nQU6;^Pq?@9u$b5c{aS&Ng>A0Q-x>2)OA zpS*I#Wm6>)#c;(T+=&Mq6ky%rN-4&~UDzSF&>pZ8AmQv_LMP0e=J7A{5Qm?Nf}Y+d zWJvW{5rMRhJDJ6=JXmE!?KCMa@L_XF16{cop9$s(zP6%=$a?7Vs0p450FX9vAem@O zF^^`O{&~D_O{0)Zt+odT11M z1Uhgbbf$7lLel3RBz-11jH1(QlheML5qm6xq7(!SX(--YwpfW%g*%H6>$8j%u|XXi zu$@O~ZG}+4CLQ=Ehn)x`qjCkLH;lp_n0x!sUw)CI{bVKy8Ytl~ob87i9*!TH_eVcI zLIbW$+1NB_;DK@8mn6~;368^nlP5EHpG~%Lhx?KuyWHDy-GE%|)}VbN224*ZKqua! z<2CsShR2sseW_|{S~sc19JFrIya8D}hHQyd^VCw{@@vv2R+G)5w*yWf|N7Um?yNWJ zb;W)tiW=A*YAJ>tG8YW~FY;0bbSQaxy`GtCWtl~q*TrH-1iIdm2E%g2cHoLMNI~P# zR0gLG8lCF@I%ZDcazz#YaQ-4wJY~e4utU!3+>3Cp-yI}sM#qt&5l#}dby5xAh@Eo8 zg~kZ1upuXOp%Mo44US3_4ab&1ucky2u`5ZhB!AY9WU@m-{?(JQ$qdd}*(+A!UMgSG zz{XT|!DfTJL9mm>9dzqa(845fT85%QvNH*gdMKQYjU=96GREROQI+@Q$}^qsn_Rel z?q}_6fButL>6E$ATj4EJLLE?pj@8IBCVLz)ma;WyG6tfv>{GPn^ zbCpE1TAky(8rK9>yT1Ht7R}eiIbGyhn^HcI`LRZG+8Yg%p^)_uc0tee%vsWCW=1H9 zLDT2C*52@W)UZ!)%4_}!vis=YzVa}+&GEX+*%h4>ZXmBdt8|5Sx`A$+IaG7I9Nd99 zI&^G~0aYHUvQzH!TuSY#0?*z1 zM`>}D*McX>TOx$|_!^w>4b^EDPU^BXyw%1#+<2cBp`CA?kigw^?kSpL!85K3%%w)} zA=|-lL1=9}@I`XfEOXuCV2WxhcLL@tgG4M06CeWc6b_h3pYOK8T8+GCM3Eo$fLp(qE7y zc{oBD*DfD|qJ3093PlPm3=8X1QeVXpp*+wZW49-o4zuqpw)3QQ?6h<`0r?TyX-Crd zO!>t_;tcdK7q~}{P906Y$#O4*b=X~xr^q(51D+lo4J0}@|JsF;h@JZpIU@F?x5|P8 z2o%*vcyQZ^yJw#Fi?Zy0DKth=M9fyzu}A2MSJ!oa0qk)AMwS>i^uSg%Z5GVCO?5NO zed9kensNH6y^^uLWIxIG1K0mIVSjYy_ylR|Jo+U14hxPij6tI$|KhZ@_67ZcKKfce z(ZXm+&*dgKEGsf47xkA&VG!mm>Q<50*#ims(l03TWNe_5?9;wAlEvYAWhH$n^fY5p z&f`p32Gr5Gnx!$?Pr)Icogd;g{hy#IP!s2g3p2I%bPSr*f!^~KyA&`reuVkW9Gg*Q zYxb2mwURHxvMr@mSgL@Xk1twSt(XqHgBaHwTg;fT!vbb&U`|$&Xk`zBV!+;#w#y&j z6UWuhpBL#l^mS@i-by=zxsIh@_f`>2`E6OYyZdq#PDUhPxjzNv|MaWpd?#kO%86w(;$0m@pA!+YB-&#k)St*H-WxNr^+3p{O=fC_Eg2xWVT;%I}-ZiT2cM& z+ZL3vX*84uJA;)|euH1Gam^WrFX{5Z2HWNlgnAdVZveoed)^R=V=Y27 z9ODVG=K3$lQ^ocAN9F%fS>ct6VUau}B}YU6@8k-%K*NH92r%0C@*Q47s9J{0fp!wy z6huU$gM~8wNeR-wBTp?wcHF}vS5D1l*(w?CqfosQNuxW9T>ka^w#+Ctf7cE3m=SZt z1u4x~;aD^p-hjER$VZUDCx~nT?KK&G5obG6M!_R$+C2G8J6+e?1n7$WB?Q+h{g5Td z_1IwW;+?U{un<_{q?iSDJZZV1zKlaYC*yk=ONAcF=5qpeY)}rC_XF7l1uYthmLYr) z#=m|K5mJwngvFPtl1{MpoaduxKx#iYusSTgMBo@(2GU}b zpfVAU^1}b6o%8LNW4RLCkxxQa2(YLG8h21p2JZWc_!;><;TULqM0Bd@vsGeK#~1h1 zX@hnDqK)X*r1U_@yHTLBMY>C?ihto&=`_fPO%SN>0X^$9|rFa^29w*%{g}#lvSv zc2BM&QU)h`w+hu2q9IkXQJVh{1y82uiS)>bl7h#!pm4v-^{GwmF*?|J7*F zfAS}FxniqTr$89$xG|-88xVl-|Mo31u-*#@Yhl~04bc+ZW{wPu-s;xe=iN~&rPrvb zqivbPu{-R==YxAB-1iv3$}Lu$6lLz4IJ^6-qWp!j7~r5(0Q&|-i*70`N^4?zkg>YC zniEG_m$FngRg6|1M+mV58pX$~(H<)+3y*DI%Y3<1WClHSm#e7;HHxQfU}{{8A9Pv7 zzXppei0o5CTT~e2`h(ddx5i7dvzudEq_7rszoLatcAnrl%)j$Fx#QZDy#ckOs_xGm z1E9*2csPoGK_;rv_eV@9)UodzpqqWqHlJJ6vkj*T$OjG#usL!b{MwW^F$^UY{Q@KC zpvWed8Nfz~y7#brHk*k*Au>!A-*{W>2Z;|B&_+xYoNcEaS<{NZ+}Ra}skuE~{tIea z1oYF?NHM@}$xE!odxxcP5y4$31qo(6>VTrSW=uMCg%)s}Hs4@a+uL9wDg9vVXW za^w;J@}fydq!yWln|frCwty4%3-)0#2rqsZ=zChLaA?WB0Mo+@?waO97;8JmK3VsDBS}YN`2-$< zJu>drulfg<$mRdFfM&hiU=quKKscyMWCcA4+8?BNYW2!08|bkjE<~A^IHkCMt*Vf1 zS5bV%4g^n5e*;Z$;9C#XXmxu)FS;^)_wD&F!2iE+WkMGI-*6@Q|HBoBd5@BIqJ9;Z z=*lk>*q)%dtGVufA;w1@%1(x|6!R~_5wJic2cum(hyBJTqie&jX%x+TJ)$G#<}^ii zAnIXZRf4#7Ek7nuyqkQ3ux^;07)YSIS?YM8eOFuCp`LD5^-^Zn%&1?){*@bb!;kx- zEPB~s-({w&tt}`IJD^qfp=r;4SE@B!#?AGX<-+{pD1=U+@ns5TEUfQxELzFH9EiaA zBvs=1#;Ws>qY(d=Zgyt>t@TEM&Vsl=sVn4@(2b-{Qks9jm=41?c@6F=UTg4RQVobMvJf&~4fBnJK>J-1rU;lc@=1wv+$% zz}>pizWI0Y8?^gNX5)?i*k$k_9Pr8HcTAW+(k(QaVkv2zsc;&sP|yIio?^Urm?!g$ zF2POAYPvXBC0ry@2J;Df_E{{JvKWng|6E1!vPhy`yt*C4-CyAWK@tQ%Ml!dpMvonj z85p*H4FXGgqEmMmm)9-T0KRyy>RW#I9TN=E=v)!aukV~a!}Z81aa^r72d*QOH+4Ff z3%L8E&2@WU;$pA)q|!XkZ=Mw5y^~Jrx;(rs-We@S7P#4q)0!;U>e7x3p-AG=k-(D z!07PIx5O1Z6K*)z+yGZ){~%~v6aJ)kOzKiI8}p^ZCqH@x<_-7E^8N<&$s-})O1Y49 zrNM*Y@6NRMq~*K9{x9F^`|Hy0*S^P}^yv$=f1t>rnIV0p#lzyS2kH#dXk(PRGr-F1 zm%66;C*GI5D3E1%CI5N(1sOXw&^>;On#;LU~IJmwMJ;9G{2gTqjwLuTFOX)fxz_~umk z&BjivO2n%v5W1@DV~{Y>g&_6BV3l9M%45hk9Is;j{icegAz(3(bf$G}tjjiN{#H!? zLdR#HBmnWoQw#>=c8i2Mnnn~Xs@BaMJ8{J9X7)Dkg8632=rqJ@+r0kA%(H#IIj@Qw zI5Z)pRz-r17`(CR>eNdj*_Rj`OIy`5)e~9b@exhvgeW`!+D@G~{W6!YO17fcv!umC z(x`qkO|r#E8@T-G?MrJGS}wI=xlf+>=Z~MbhCJK)u#}^F3~5xe_uk~d+V%Cp4E8{- zC`m&tugR}vUAj$SMJfA=VtK*pE*CDAq*v~aREvL9z*npHMf0K(2&=^^*N(n&wy6HNwL0AXO1Fk8=LWwAPtAV z#p&d<@ldkh;^h}uRK%!VLI?b`OqQZwQ26aIkuo_h07cydz%p#zzI_Hf$nby9uCDyi z;ig)GjioHJu8q@ z^``FgITMi2!-3~SVv4`Y0&Pt>Z$)JMZimL~p^gCe57x?e^55#&!+Tqq!w*DUtO2?QsMyK*s1bNZ&|6=zg zAm#ww2!&^oi>5C`Z?b{_CL4NPl)7OaLJ+}p6@I&E$)d6`o;8y=TR5wgKe(51Gi{PY z1!;YaU=HC~S`_cmOtWBP8qkgfmsc|B1lJ+#gooGS;f@q8V#mgi1sGB+;di-@lMnGB zXx_M0?qJz`t*t1V#Wj*Hp6}(F@A-vDk_+?~_j=c_IEzFUj0hR}-4Lvb|Fk^Ii$fFC zUCQ^lr|}2>^!<^SEvfsIaV)6_M&U;@;>dpu7>>F=N>wo@9bSayS|>ShjNy8NV1UG9`^@)L& zj{&C2cs0?D7c2BCnh!LLxVZVj<{CI(5%j#|dI2g0znvU0KYd!u7t!#N7LeWazq<)F z5jsCSraQmck2N`+II>KC5UNg@xHj;PBSsDo;(0QePWnIPZIIj;#3=_Ts;>B zRgY)Y_F%fCe$Jc^xC_qH5jAAs=}85uG#YeoC+u@&k2dZtYh1iT=`of3A|d8fcqT(F z;>Qvsoa233B4KGbUXs!UcOhTg5s(~y3OJ3g!2g*=bPC!}FEv)m`J~ErMnJG5?^Il+ z|AO=h0QY63BJ9o$7Mj+6rA~Sxi}#ZHS`U%a&3{Yd5)r!&Id1YtWR9=jk;DmbC*%>k z?p)fzUD@le=h`_|+1qBm&U<((3&rbGJjqtyW5SAsXUG}z%MFxGjz&x ztwLB`Lv+KljS69s_b*J3XBkWAng+R-GPXTRHlTU8vo&g8gxVT>?h@TO7F7~Cxvcmx zV&$I4X&AM-;JKQVe<=AfA7KMHEax+o!zf}^b;AnTQNt*nKV!24*}G)$P|v=z8-gv8 z)cHfF(P1WVYX!N(m67OH(hwX|u+??DJ{tY!2kQ9-GAh5HlGV>Xagqf4y?OD{9H)Ao zM~@>Zjqh!<=;YCVu{+9sv=4i^9s_&%cYi#Gs98L9Ni&y%Z2e8U^PAs+yVZzvp;AJjw}ZX9hd*YSheNw{X(R4fF$R z|8zU$4!S&Y(`^IJz7cH!SB64uvq_XnANZRyvH9dwnfO6mb&kOvnpQm`JFup^t$UxH z^e&icZtw47+hXB-$>Hp?h+_&)V?bq3ppbxLG9Q;|_5&f{)8b05oOEG#$g>S8^%Ci1 zGgl)|1LNkUHs7hJhjOHw-qN25-m{wx@aJ^Zk^)-(E~%vv&zmSPF6Sr0SBGz24T5Hq|P6R4k06*UHTKlWJYF?;U`cGdd)ea}e_b1?V z12?nz*_lzisE}YGq3e%A(K}7>T;cU;chy$!YyKgFX@Ht&1N_?t#ZpJu@Qh1k>LRNp zYgXg>l96bsZs<>7$)w;$h0uLNGs*bl&Ykar`Gw}UFwI1GkS?i?@>3gEa9Hl_efHwP{z4qiYZwpMP-N>Bo z`uv{auWFo!AK*O(|DhFV^Ld_|m#Z6;FQjTLchZK#XNN!#Ef<{3#`3 z(V3r?esJYevB@g5BY!cS((mH1j zwKfr*WMgCX{)a$GLByE*;iq?Ij!B@JjwFtZ-ezo^F`4+g4sUul8p;r1v*FLgDfI?A4rMw<=A#0w;byQ z7Z`vvYGdAkrT7~K41)3}xsePTa-a!#Ju?<+hh=WJQhJCi)~(GmyhOPkoI zrzH2$>$?>CM%{$K%`SFWB;T9mWp@R0b`*qGe7`61ZLkJ?T)x77XNmo+Px&!OMqG)nR7Z0bj?Zyk+rE27nZtZE;Q>D_#%}iH^G4g+ARI(H`eZHBh>K!RKu; zlV_y};@?%&)w;(e@X{^W(5>_P+-)<-GhFI=^oN-=|EW;g2rVwYtSUEqJ#CH`A&|YK zTJA|Gqcm~PFk4QRR#LriNLglodb8{-p4Ykf$D&4e)=;{e>0}vEl?sU0Ulpx3$ESbu zd*g@xrdKZ<4qOdAT8$80v(z(c(|Ho_{@0&KOr1B7Kw8Na7 z#sSw(JAD3kZ8^auWr+UjOQv761~7@^YvuU1${jUzoY26hQHlM5_B=HgjozWAdez*| z{XG+1#8f4XCCJlMtu8tN4?E`}R90|j8-=sD*|O{v$|~8VbT3y$cNveC#2D+Z@XN+SA&Xz2CLN<;1fzWuph|tz3P4JSEBv9N47CRMVRDYXds5Xq#j2xZif} z2Pl9@K$`qPc1Ft+A2E(p8sdTWGxh@SR$yJu*O`J$ExYKOvpXOCH_r#ia;ny9VyQ3Z z(ftXBxNKhxbXp3x%`jZ9r5Eg_=7V$x!bZerHKfK1gDntqH+eO4UjSlRKjutn=AXHx zZ)(uKb=bJ2#UI{z9suJ$>491!P8Y=vSSttTBeU0cDhfAU-zDxBWaW%HqsN>0JS<+? zFKp%`84a4T^6PohzVxxCw(Hs*L=3xScG|)T(_cjX)?Ow3GBUq8m!C&eqCFQ%;y#v$ zabIrb@s~H!;US%rr# z(u1$4c<}rM6vgh2`*4N>G#f<)$AzWfaYkDxjGTa&yjM^CcVwCRhEC{A_x`BiY`rm?kdC37}M#+Z;qM06U*=?hstap{_Qpn*s0?wWP8!yoIhn|YRd&~ zP#$=|rSN_%cX9mHPC!me2TI3Irn~rOs=c2u6L*CC{2Xdr z>yYE1^ONUWsc;YcN^W03PRx1x>ts?ugCF&oU!S1p;nzLY;Hbj<2|GlTt zRV8IGJ{nR^d#LC$1lyeQ|H>fJ8r10tvy?N18sYO=U9_tu3@ za@;`ETSudg{6kJROue=JJxz{L9cQrReZ(@tJFt(Xs^*8a)@tiQ->kDu0b`X0PtTfY z_B^x`bxQhNt34fgKfkTp3KgQZ8|6A_V{+OO{cs~u-YI3kmj>E>Tu<8fx=)X06jG}Z zuA>KOcZ9@%2e_3 zvRs=|G|OpqP9^xdyOXiENvw#KW*MgmjPt2QSOo_uS$U#=;5FQ$L6S?fzq01uRgEz2SV!;@lUTVE-W$v0*<4p6_ltDV)y)SYjk+ zrGTj4oS^7mC7bf3G}d*dVAr-1zy0kGtPQvKZG~_OD@2!3VDo9tE_(GgYx; zn&)rHkGd>>H@8vK`#5{+rtMX;t$w#y%DR2?I}m$-`j9g%M&2dPua(A}LRm%H?pD%F z?uMLDp1*pI-d~Hi!B*QgV^@{Y1&oB7d`GOVb_;QktTK--2N9*}*?Uk zPm3#7?ou9O=Vu(&@ieesAB<#f?O!?VDlWVTL*>sBNfEIVlOK%5rf-haxVDdL?%|`c zqi0?%3<%u8HTf4=hKBci0KDetHPpY;S6P_#o0`YnjFt3!oP@V3CWjKv|c zfZP4O)hso`1uzclz1FA4lB6tuSsIm^QK{_>dw&I${ON?E z{RykH?m*16wDY5M=|xOqcvrGUnRIy9{CVp)>fW>5eVeOf^z^B|C$MI-E&6wPX=7n@ zw65Zz-Z-mliEsNsGEwTMO{6tIYU5^r=EB9w9&#jhxrg`6StH){7;o#3DSEu;oaH-3 zO%RAI2QKC2DM_yzTVK!0wFn5K(xJ+a%u_Y{jeqql)@P=*6E69?i!1wiuzpoTi@~^W zW22ysnu}`%lfyoqKJZ~?W9aC<2S5jXIOTio(C)vSwM_SytEq%#IBfn@giP1!qQSbk z84onpSzw^g0C*L8J!D-@w=aU}GAj{07wfRK#k={2m+rc&)86*Y!4c7)3>l7OJA3@F zAu!9pW5Y58$qg#aXW_!EO@FyL*JsY*p6?V{qn+Zy6tm@U0!YkYMQVaA)2YjG?w3!0 z$oiAU@L&lpG!btVY&WiX=&p_x@+t>YYpq) zJA2Yx?;6+qaYt6xYMd%x!LGT#6}+)tJH_j*G#A%mxG**RfF-y^1HF}m;{f!4rOt9S zWru2qo~kg98OZ#LVKr>jU!D;lAd^tCaE+!X4it4+hABhp8i++wMhw`AUVhts*{W|R zTL8&dRah-FF6sRNpqJlhWC@kamx_$K?_WV1@cz+o-22XVJ~MssekVeLK(VyA>~#J) zadBiqW+7AnQ@sc@%P97^Y>%D_K)_wC5YA3}3A{6p0CpYT9#n~HF0L0syqSj5HPxGk z6J3|g6VZD&bPP3=JGQqQEyM=(r#{CQO9xwa>F;P--hu#9>Zq?^VdV1y*x%8l22klnclPuSTpHcn&dnR4f-4uk3iY06jEh4 z*@ww73gMjbekfsc;;hBQ?_`n*+=PXPbuZi6P}R!>i#o=Z<)EC~3X`Owx(F{VYNcP* zJ2Y0~dQsEFE?Bhh8xn8E99dVxvO82RSiB=NwCKS%vRovYGfPw5gU3it^c|0-!3^kO8fAnV<1+GJ01h*C6~6i{A+nJKElgWk-gXSSaBE4TG4oVc0{^b z)uQ8UMmm|{#3APY*z^PJ0ElN9NbqkUnu6~TO@Z}8e{95fJS@c;hU1rhU6XVu4pzYR z%LhM~Vb(8DM;hVdn;K}3%y$*3b_`kjHnfrdm}z|oGu;hr&x)0*AC846#c;k4N0YO% zFH(Fw?iQ6WP}-5~y^uX9+RwDMJcI{T#re3&Cotz6>CoT!vAS@wLIgQi2WKkEZp^UT z{e9I+cE}VkL}T4fUoBeVUCT0bc42R_hfzh=Ria*eH~NiXF8)z*a|yPKL)!2sVo({Y zLX*_m_CWL4lK_n|zNzS!+H2-iS^j_de|A^@wDIl>{$L3cr zd`;snbIu?SvxjpSKFjhmh1CGc%?e#0u(M1JOzXVuySW_ceztk@ ziHba_wucHr`i67*iSb62m2$0n&XL``&;^HfR=KDSN3R5)VFoctKomTQVdM2|I9WjO z#Ibd{E5tpq&gO&oLZXS)0RbaXMEr}&2GzXxBJni!-P=D&IqEM%j2Pv~2a@ww#>pPJPX%&vmE(>Z(Q_+Ac@VC*zHM8Mu3Q-Ay z0+iYA+8zMltUe3I0%WMo=hyWNmsf7Cx0w7=8Lh>b?A?$^HH-y#mB(M_1WpRsosv^b z;U&uSb#TQ<9wg`F9!AwZYx4#OEV%}jTNa#3v5UAVq$-O0D$#Az*`aMaNznzP z_uz#Zhwq0t3_HpP(0t$ugI|`L!Mmmgyl>sfg>GcLr&Fz8n;PsuJU}h7Ll^1#rO4~+ z>g)t!W>Emhv7j#z?cds=fYtjF4pdL!a%P)mH2zuRgDp@je;^SmZSg`9qHq&~dZ0Sy zv#xBPNMfzh|9tfOds*c-v!e6))ITgc(heJ8_Nl`iv`RZtlmS!Zdpr-WzS&U=$q5);Dv}Ir)vMKe2SbVov`3DM3W%D!sPeKQ{ zct{8`m&m~j1ZJ)xu2GH36*KZNNambNI?ThR$%W=Xd{{GC&cv7Iay$^Yvqz&E$Hdwp zIy+C664|ex89H??tqeA#p!%;BQ@?iveb^;p6RwXKH;wm>Y*xEWqobb;_0DG-KM6SJ zFR0WsYpzJ~f0%9D5K4%Pq+uB68-$G#U9q>WW4MwB-{PpG9pNT!g@0 z8`_}o7rGD0K~-vpbbme-vh8W-p)L_G8DA+&7;R(X$MvIFzOR5znd;}~Y+CpE5=nh> z^ErS~_YjA&_})P06=Ncd$Ety<{1MsgU`}da{e(Vl;&C{cSeMKU;-%Xn)$4C|oGg{Gy-o zIctdNkT-Bzz(=5fU{p*OlkuG=EOCqCnPxyA;fAg1sancbZ#?#I6gL*?1}XoT+st$L zfix1a>hOfVob@p>Lni!&cBq%=&JaqBU`F#i$U$8|4ytg?^PikG`vE&qQ{V~EamWUVEL!0GWR%XbzpzZU;u^eXYpRRRG3sW6jI%g3W)s{r$&+bZnbq@TeZ7mv! z#B06$fZbdB=d2H$i_jA;egP3|0ydL5%xiS6bijcNwk@l9*Mj{L^~)p3Fueiusp$F- z12XUM1+vZSpjHaN=v?f4NV9Xmmx%kKXBIjO2x;Q?U($rS;0od;8}oShwKEIFLS2tA zs4oOTK6ID2k3+Xq2nY`Dklo$WNc$3;w`TrB#m-$4NBwu&(;l8gzmE#%k-GDE48(&g zBLs6Qs{V#7<{fMBepp_LFg;T0ZaO0y7S;RljO_I?4A-shW#nw0kBj55c@Ql-VIefG z++ti>osV_f0XK3SX4YKI_3D8u6A6n|_{xm~?KsLBBO78i51e(QC!vDI8}&S>h?#^S8%iaXN1s47qv1pt11$Q#ofUxAD}-9{$9>oS>}v zmp+Isy2u{EfuzYE7Kl34y-T&hQkXd>s2TugGTop>gJ`)e3Uiffl1|T!i5VX`q>>;c zJM7?tT^A?of+cTzsz}iX7%X5prurp>i#Qg!s+LF(oRBaG?^Ql?3Nw3+%INUHW{=8{ zP_RJ#Lwk)>cvq4u|1HQ;eZj0gU$lv)5XJW^F)bxTA-MyhLop9f*srXgl(EzMk8{I> zR<{tiyFKVNK1kobFKnqZ}rGUNp)396}@____0I)j)s-`rYFh zj&ygoSrjo2NnT5k8-R!$H8C8*b(5=ep-Au2usQ$IZ8JG!{Q_~fe^4jGg~>8Eab}W% zrbS$Lo`bK}GlNcpXroPBj2wEG25?n@2Xcy;R#Ot(zjgZ>?`{YZe(Y!~-q-NBkhDri zQGLE7R{TJA4XN^T1q2l(tfe+b8*+njEf2|T6Pt#Ye&vG@z1`-KCM0$7{y+|PNCR@P zIQ~0U^DcXtQO~C2fhBdE5d}eFOf01-6*s^b!4;BN+d9#*%AiHc!s@Iria;j>uNKIvwN0pY-?-lmj%CBt6b@5_H!Hk~*D?N&usCGoBke zhT|`>b+;eP7k_PO5!C5Wu;lmOQ`|quBsWvACuYbfP|<7y`(xlv;sMMxoNi+twlIp& zP}gDInOP7>(54~%TfHM($jx*BSZ23G-6~`>WaI$?3xe0(qoHiJ_Q)gEBo3cdf8cb0 z*CqO?8XF~Oh>S?fruu)1cYQat4nD==Ww^r0=@^ec4`W{s^cF*5!*Pykc~Q`#iLcAW z6~~6T8_5N({FSm!3-#9gvaugm`n)7E>t4*G<^wO^znwxlo)Fd24=Wn=)2vFLpnsc& zi51L@+aR=^nG1mH{rAD4lJe9+nPx0Xn8o{L$bWXu;H3hPr9Mq+Y1PQ-&&PSK;Y5m0 znPl>0ugD7X?BPXjq~s2&VM9d)Dj@x#ExC4^B?86}EJeEyn>2HN^Cb*7|3Ktdtw?LL zQbmzS0MKXPm*$j(T+^cHE=&>IwKDTfP2`aiqlWFMIM9FHj%f3re)YJUG+&#XLqUaaO* zk%X1pCq?})r4gGkyNXb;HRQ2(Q;fDjawG&#k{&7rV?ohhu23qu#e zeQeuy(y?u`V>{{C#vOL-bZmBP+qP}nPA1Ruz4NVEYt1pUQ+rigwNZ8b&i{2Xdo;oY zp!S4q^Y%fQ@Cs-5@5`N{EtS!cCb(!28eu~O@t0AAI%iNLMOXGP*Knpr7Pa?tBGtqI z(Es^?6@@@<1umKX(f3o$g_yMYCmCi*%g+)j2hzdR&}xz8OonrN*GUQJW6@oR@v$1c zovo(e8jb^e4`Q|zFtSTkf0amWdem5f=bHOFTV~V)>aR?5pD{#h0g|7o5;U`Sq&;sY z3|0&c$qfK{9wlLHi`~c8dI}%6aAR|TL9OVo$w82f!@!p$@>vv%uzFFg-+RueNm7?h zQ$=|TlpP-13jR58)=5&M#a?l^>_j76c_>O@e??WqgqxFCf=@He%2p09BiB4u}Bj3G+6u=pYN!YROuj!%((@uw4xC^Gat)ucK2(o$9 z%6NN`R{we@bk$LPbS$1b8^-4}t|Pvtn*fw(}5^d7qdHu);_ z0s&&43~@jU<$}rgaQG^GBBF5ysBa+Xx5~R;Z5oNc6(HQN&<%1EZh)iDEOjZNer6f( zo~&-u?)sXcfmHA?OqOW;<^MU)aSt}IX>0(gYVdFT^*Z)siXnY4{QSKX9MKp_l6`DF zK^aHh29&%FmPq@`Oumx%_IrkFdsI{mjQ=W~_WR8*VaIaiGWN{23LdaiyaD{KGX*RV zVf(au`&zrCPOq9{lZ1nHx0@ZJ_kPzM(R;9&jG5U!3MAr_KgaWh=_AgQO7Ot|?d#9r zPax|~<{)Pr1#fhjJj))@0PqeY1qsurD^6(ZZ`6oZ4@ zi!}}I*dp?Lto7U8rwx4jY^&j$b@oBnz!TJTa_&1YDnKvFe0J9D#}Mo&*l1!>O|?R< zUUx3^1F7rzP<%IKb#qYJ2~*W|CAlYUU{=kWqcG@Pc3>80LtqMb?8Vh|m6rEA_055L z)PiqE$q~=htJ*}_919)z>( zTs`Sdl>KdC6yGf8JNOD;P&{j+uIhdzNb$M2^w!W$l5TE={W9tYsMwB}@P)rBp|O+8 zJLNe_f*dkYp1I%3wR0`q-N@8lI^Lxr>M-gM0$h0Mck>>sYbLh$Zp<%F|Q3nl!}9)ec=+K!#&0yT#1RzZk za5Lz@(p-K{FADhH&H?K0L;V|W7aj8fR=l`TpT9ngd0#^MCe(e*af2nM^Qs2?dnAr1 ze{nHTxC~a^x{j}EQoO%c5MAQyF#-Z(Gu}5^UaWC3qV$azX-S|i48@_F9vQ|s4k9BPO zSIsMur3=O+LP)&c2=dGMzc_zZ-xRPtb79@gUv~F|`1DOHOlSNKE@nAf0n}f2y!{l_ zCazX9wIbFjD!%~#N?V@*o{DD>=$OhS0ES&%;n~;;EUwySXhV!|+*2Q&jQ@kK;`5d- zMfZx*>r{05wjoVN7sG3Z!VQ}dADGPZ7F%UX3Y_@6E9t6}Asb=ycG?G`UPDFq{p;N4 zECH7z{-~?>n894a+2icPp1t2TgKmUnHP*C(Y-_#0UlhC)=Dd<6z2GHwy{_i#&2%?T zuw&RL!RF`XCkdO9Aj!?MDTSYC6C5pwlLg=&No%zw-W{tD# z^mJnu#LHhco+~}jIzGX~wYu5dP!VOJ0)Opx*^$|Y{u0oo>b;cyqv>M#A=fxj1|FW6!EFaCxXXmXE(?dwO9Zry*5V@_WM?`01M{K9J(UMM#nh)MeMi+<;U*S)QO7mJpet>yIApJ`7 zuZo6hr6o(QjtL{`3oI^;()FBV0N1kg=p_q*+uQh*LbbZ*_*1S!fVCGcPFMcuUZ%~+ z$O73f0q~n}1T98) zf%8pCsg?ZmP^#zVe3KC63Jw|GiMmGL*W!oMhbabMkGGe+)-X$?^GDDSJ{5RvoX5+K zhudt1txmI>@;1APx=CLW8JL#~6ioE+ru05_=+<(lO4MiriQxyeBwc?3&B zit(lWlU&_eY?#z!Wn@Q&}ln;K5{wZ>OeW4|%qi)1? zhx)vC8|l}*zvbh0L7JH843vbUPoh9eNGCF)+6f(`f;RchPP^7+y2e6{(|O*)L2QQd zd*!L6)2@(`()-EIDIfrVW36r>97-BZAJJ-sY+C;zXz~ODL!njHk^UUH5Cb7Sw-V#x znxqCpIqsQ8WOPTPqFxdsx#-#QWRxb~7C+;QNW;SOK#(NmM}Lsy*Ju9|IQw%B8}eyF zSi+{2fn_w^C0F|TRnsPES<;v*^sLME1HvIBs&c2a`LGFknPnxQP27#34C%0oO4Zti zXaW{ChVbM90ndxKtU3=U68ot+FU1L+-gG?;6jQZQFVJigWTtJjMTxqrLW~ZJW8M)X z9mjFVJa;hqE|T+i<;;MivXkZrE!Iz!dF#V^=8=wHH3wkKpyxlz#3l)RHq3@u5A2^U z&WoBG<*@GVlxxrd{l+_(7F3d((T3!IYbPy9uzuitS+nV`%+f1@XH5S@QcW=nkey`@ zkd1=~c>hrWTHCsDeeHX25itLrIe$kms>nMzyz44ri)#_pWYDG3EJPhm80thmjSt8A zkkDAaR#&5g_vU-Qn>I|L#VV^GA<^SWYYKde;Y_VDbYyiqwRGC>u6k4N= zD%PGl*cvk%WJcI}<>0ovXX2O?f|e9uSNX^lsP?-e1YWCtO#!bylxGM*YikR};p*fx z`=FV%e`|dQph6lFvwv&R6RnYS_abh&Pwbh&)O^xr00G$qVyl%-PVo`lDg91jnaI~51sV$=0Gr^m9k)g`B@p^SinW?%}>Sqn}hKIk|5<+y0LF7g0P6oy5#sp+_5>i)^Fo(M93HM7}1xo*ql`^n_hR9sp18% zUdx<0Ov>)NIG0CZ7qadd+v>b~qz!;>=ffW!sDZ6x>Yb{9B--IVpD;$YMI&gV zg1od0M}Xv6gdQrk`8F=tf^01XDkp61zDG+J=1;8KbAT;SB`$ZH!kJ~=Xd@c56 z^quAL@3jTASvr{AOzaE+?WM|355H}ZGqi>q)hgPQmtlEmdK`YuI-&`VsqtpatQrah zyVXgKIDkJ2^rrQFHt|9)2{o370(foN{TA9D)YzMT^6}y6wL94LWs-+H*p=0e2G&xl zJux(=!Og7vF|hH{>oVRvCWaVly@D(mRfOz&5@ymf!%JkabztkO@uuUvZvo_n+q8o@vWah?-5KD?d@f(A?pJ7q!M=#xvkW1tp&@8f?4%AmwuwXdLCD|<43 z&9+=D-_kzr5$KI?JX`HJ$!#nuBfdr`3EJUc~o9OJ5Tl&1N};Bvx0ZyV@80Hfh zADzb<$%+%|3~#MKlkvNrf++;EvTn`zt1!@-JjhfU>lvMAMHtICdI6^HVjfaUL6EFVR22?AAuH-2;A&|K6U>O=0mc!NUmFgMlhzk48YA z%}P$s0r3IIg6$L6bcXBOuT;aetYn@5!Pk{V$o1ecaWnLD!lZ8KO-Yc-!JIwfWvj@f z6kPp+(EIdrT*0gE$J_qd!luuHpct|p_E+gsUyfeULwYefhgntWNF`C+OZ;k~nvQZk z*Fv}CS5^x~7eoMX?-ZA2tajy2&TYES zQE0sZ%UPc~D+ar)T86Y$C$dTD%wGv|ig2o7DM#@F_g;iLJh~TCKMN*y?_)sBHI9IK zV2vq9=Z}jT9fuYx&bVpTTu)1RZ{!_Ag7aTMDv5*2r&@7Zq|Za&BaUy(D)99s4hae?I5#a(Y(-*rRAguOD)7L;DU zHi`|u$vd8J*O8E?RMRgF7K^k>+xV@laMw}*!#xqV`8k$n3I7|h^Fk66PFOH3R*S^u z>O8B8qB^KyP0_1iSHopV6zKW;klFXTaM~FBFKTC`uUwL-EMM(()n?GwHZu72suvGN zmPZQBW6qS@w+MX!Df2aT4149|AGR*Io%Bhcn6Ubc-ex}H^`2&i&TwiZi<6fYou7RG zS?pYQFk0QnctTHLMRt3nk5fu#UBUi|j$>3Ye1aGt&kg3OFo4ewJka37RAP%X^+79{ zWQX`)*~{bNwmx%AL&`L?JDOB2ne1(tBXHDI(e{h2fa?!@g3~odwICzs*Ite)T=U>Y zrN*w*CM?`?Nk{~-ga-6C!3ODeBMUe{|6l8a)magkRq&+*BgMDT(?j;(Z=SNp!>!wS zcCAgCylE88v0j&a7hcP3Ps&4US8Y3;h?7^X)1M9HS4D2*L@vM>F<-M%~Mo zp2oFVrKfH4Q#sTi8iGPCJiG2@>lN!hBZ(P-B=R~stP8`dq{Q)daGvfPn-4 z^Crs|!#i1*?|ie;GJAk!(jK@m`zTRe&bt+M2@E1Uv>rb(fDW%i7EES)g4RbL8?+i# z59T&s|KblEZK}1vVw<@Pz{qWfRVZM^m}% z{=>}b<+9!~tn#}^E}N^i_hH70l{T@0pcJ0iyaO0~%1U%kPTf2*izXrc+mSfzL!u=e zW<+iR${6S@l{BfwoEKVAe=bf>;+z`xbX{UstVVnRJ2_GK2Ej1b_B_y7aae64THVAV zl$g5EI#=)5hTJ@|C$$4G_+HjQoO)B5bi(4WI~fF*KcloxZcvU6E_7`+dN_e0oeh6* z(DIs9S2}O;p~CK)mp6<;h->IZyifD!pakb;28V^B**ECOj`1*uVT(?HLj zFsL~&+Ad?jL9JPnafeSBak2ub#Ip9HvPYI;sy7gO*AG<_C<5CH?E zbFbP1*v=>PGtAdHeZnS_79rW2mP2~l_0>(~nqr%yovb}>21g>?{v=;1>cv0*-R)(~ zkIE*n_4Q|*PJAXqMBSo&hkjrY)?pm!wcmZw96=9D>T7J0FF*OCbq>&`seI z0*dUb4@iPw%zs}mBFNQ@6|KZIM5Q9{Jfe)tq{L<=w)aVQ2d_^I(vyV6Tpsjw%OmlR zY{_iv1pVQ{dyI38ZVny{3||B7``s6^ zUzZtavOL=x?Xhe{)W;I)Dinj-NcU>o;}$zDWR+EBy+z@0 zrSQdAoBxqh7$;$KJkvX=95375n{HwYY*HDR^iEc{LG_yiI(4}O@!-%F8QsFK0=~M? z86|;4-i8nY9s<^cTzo!H3a6lRuU}~Nyp?y#z(eq2-AyPq9Pjs!7-JshYn1q4f^uRM zJkO~cyxm^OR(Y7_L(Y~c)fGMIJl;r2JmvG)06~tT?>(M=(tMb|;lYySNQ4M$g^OXvM(x~(&?zY~qCwYiKAbyshi8__7_Ush*2&>nsPs!eEy%MPt9 zItdc)N)f6~uaQecP@(1jA_;VT-g~^h3@F&?v-De(iI$jaJzbe8`*zr}%ME;`J_D}c zBgfSJbmK8UVA=BLiXj(}%hfCemf{2s>u?PQWdZoi4;HFq3={Z*)e^xzF$x`NfwdX!{(TM28F9p!F63faI4X#QFL#@V%_20n%F}2keY`a z5ut0!C+DVJMR?H)sBWgG%Np(7t+j9K@cp&A)oskRX5rRFPMvvS8v4Wo)r31tx~)>q z0?~=!j?>hdYM|EpK8zni0UoM(SeS(?Zh=ES4=*BsWv~0dgXVTJOoaV2eiP~kcBEng zW%Wk*Ev#u=UZSN6$-tYDf?UZc%fep2R0aqe%#ES|&}?8lAp9@>OKLhgLXKMK-0GOH zASuvsHz{-$0m$~0A=`E=8R8z}4GgA(Er{owM3){^8iI7XD7%zkS&KZuGjcAOz2aJP z*pX~Ibc62m7{V`zh%l&XvLY!Aq|*W*zEE;85aC_4Mg^*QxLza5XN8>trsJ~d7$;sv z-Vtb^g5=yw0LU8?0+1t&0UI>%$1d9G&2X%zETy6~5?bRPCDbMGF~%|2Hg!nJ5wIEZ zJ{YSsK@fj^1V+wZO5(q~D8i=EkZ|BaK$dXBp;!GDanI~2c!EjjIo7&<<5FmXbD%)W zVCewaz^64s6Vxt^NTihCRs;|60}Z}^adQJ5hXPrY0f*qfQ>DxyE1l@D+DDMEipJ?yX!(q8V3BcDk%ekAdo)a4+*OT0Nyv3CA&F<2spu?P(+ba zph1V^fsZK0Td93^BpFMpvk{p~e9L8J`=>N5lIl`1w+T#*QgU`>3m@Q~Y+$oe15ckH zNq0&y0G$&L5$P%T!3>Mrsrooo>(hyF0^feOexS=TG2D#Q*LeqveMkgZts(H~--VDV z{hfARahPP){J{9Zb3+Vxir5A|ffPW^l0XM@Sj;B53hQt%Bk|A@yn?#f-NMAc*4cio zex}kUpjyu+C}cfBCB-B}r~<&-Q@vpI8r)LAfPA18AlO z4>`4DvZT{VP%*v)A=teJLD#G|uo`?oX5iJkK%o#ouDLaSDM0G7Je2jr%ziR!PNag4 zisU(Uv-3s7zf={Au_urqe2S?uuvGMrUlc8X*~Q`mkwIWnfY30aU|ccA5rq}Cg}`1x z0IJ$}Z8?z!8z`nZG<=i+i$-k}C5SX2NpzJGz}J~*k)V*iAgvh0E~0}eeD$^I(vF$Pimg%9Tqq~Jt@fuXWA?|zzg%BkK*I{$EzaVK!+ zR2lwgtjoox#&_XP92Z+JJuqu1UyM1H2lOvux1)-F2q8U02j)@=PHSsbB!lID-XtMZ zl2jJ6R?!BX!R#PcB0aKNulhNtL9ShZSJHTLOa5B>a{u_83&H&U%Xc5UGUi=a5NB*( zurKggN}yxc$cArQ$=Y7YD?9Y^POEZ*c2^Bq6WY;{W-K$);CMf}`Fe*{2`!D-*KYuu z_fc3{U~lhY-9_jCWQj$u=j9Cv(++NCWO1zn^r+ z+#clN;lc~(Qd?gzXv49_Y*@U9j3H{7#9F%ZKsaNji;OOX&o5*D=Rxd|^g_5mT7x+kuRzJS475do^=kNF`3uSqr2=WV0mpdw{P5R z{i}`^?3}|VG7d|;1%p17<`(kM`FC_Mt@4TaaHD7H%()e!035~&I)d*66AQq~Z&Tvb zh;X~#<7@4NB_)MTVF)C8+Ok6@EMwTxCd&`F%d7O#q84%x_Xfb?*X0x1g&4NQhksNh z#mWL%TbfC-UHpZiTZ&?{vMvJuK6aE+GU=nA!i_A#8?KKNW-tPp7N?CwMBFy2oI^SDd{#~`Xfaca(R>$_L_vT#xdWhCX$_JeIS8xaTYSoB* zervHsi{EG4>mNWB_hG4VR(!OFpV*J;U7g8;L^(sqE$^4wH@0II&Qx4o6MoN+xpX%M z{?9v<^$BwaP(*_G+dSm6)oW{817z!!w)#I6`)7xqMy489kF6S2qtB(03`*F9`}sfZ z?R)v~iw2h~PwE-Y*Fqn3-O$x1im0BRzieH6h1xj1w7>v3oh_|)l^A=Z^;l1=!t(n3V1J4iS6}CtIZeOl@Z0O)5R@$G!Ky|xx;Pxr9Kye_H)s!Ad%7i)0CQigedP%*J z^?{Kfnw(PSWPUaIsrs@R;}I5Emll}#`p_9>8PNcYy9{*K2A-x>J?$R?hI1F}@9&V< zzT)LBtnaVF6#@`_Rp<#fmN~lc^=1C9Y|{jmIho=o2ohdxy~F6@m5jH?2Kz#b_X!cZ z&Bdf_>lc#nL(6kBf#(*aeq^h2J~yO5s65sfs92223NU4k>k7xr)$+!V}s(DU0piXba z#}ZKNZg+d9DM;`nCg{toE3aW88iV)q&$tZ|mS?nyP^v)EUUd9By}z+wb_MR}$9 zD1kT9qu{M6lQVE=n zja+UL%;uvdd$Yrcc-47$nB~V*;IysRK5G%}M8tX!5x;9{AsW6~wBNFM8VEniDU}D6 zs}IJ+!q~ph-%KVD5Z)i|!X+M%)kGB3Be{@XU!t`lH%qT1ZA?uJ3+Ii3XVyz;C5r%R zjhYqL?As7zZyrOPZMtz{6G=iE(<oNa-w~jidX;2??r^Qnhl|{o^xT zHgoGA>Iax9AWic$OBhEW$yC2aG*o*eV__vdW4!BUeLM26VWryx*Hb8Pzq?IKX$my9 zoF^un2#9!k$c~b3Esh>HQJwS|k)i-tP2<=pv$c^5Hrvq3=mUuE7))jYzO zs*@Jd9{ebP%tp-A?g%uk%~+7rK~uAKWkqIrYDuuit4=cBl_ojvP9o`T&)NWk#W0^0 z4N6VaO1Oor7g6c&_9u54MpKRsE!8{fS#KZy) zAOSrEel%tOV(!(C+}>>MG<^AnO}gZTQax>fk2%kW*CWiX_788R4N*2I*Ejz@g3{kV z;JuzUJzu7h?evXX7$ywDJfR4_K!jJ5lMe_K0Xq2g6w`$nPO1itrsu7K2VBeN$>h{5 z%Af3N&ur!COXN$SufEhNCP7tW=3uQU$25Ow)wo)3XKy?6~Y( z%%n!mY=FpnGYXg0&3>A*<36sh^VrZQGN#pc$_G)@ogpOzFPBNh8Z0A7NLqZwzQXD$_ zfMa%u4H%Daz24^vk8=pH}El* zF`tZ3OAn7i%3zpNFk!CkM5mZF**pzf1M=CJilmy>&X1>`(h%`Wg-G+X_108QlE`0u zszbA7I)4UK9*w;dHpBiJNb+9@_~fLay#q2_6dM>D3AyF-CBKbIkd8uDLL-SAnw?ohG{p-+j<~2?K zap-7tn+B&eN6ugS_5BStJ1NL%TNzqG zbbH}8K@~$BaAqlOFlMnd(A%bZ@ZnR}KxKGvFVMj{4T(0L|^;+ftX4> zg&-|{1z);*Rm?v~hT0_k4lNYJh>||}2+CGG8&3oK#mlK72C<&mnondpZ^Jh>M{J!- zZu<38E!0av{flm>kofa(un>pK4~yx?GS`SB|CS`+gGV%m1>(sObG&_<`>Uvd)@b(5 zNfYp{3~L)balg(rZ8dUm)G{4bl|E-yjVE{l9|rVeU6b-(u^0MuAFh6)Uj) zE7E}fg{1BOo1}juzMx)@J0}!Kn`+(VMV8~Y6PS2j(7a_ptX#UrFwJcai-epFUY31} zG~_=bjr6Zb&+`69q@(-p{15&SX}J1-M7nR{PW4}ru5zer?SuIB=UASwdRWe9~ua%@jsBo@QQN zqf9zwyU3=IGLBKz>8PIljO#;?xKtOwyG{#*YWO1wQ=F!>oBGMRzVrAY+QGU*NO6M9 zirK%8NG2=!?F}_JZZi6AFD<8Tq8?DmUMA}?pQyS?wm8W;^R;Z#+cp#hdfePqt&%3N zr|(dE$KpHf;}WK)E;Mcb$dnLP>{Ny6^vITKiE=lCMe>R5rQ%);RwozG^sC_47OER! z39sOFuFLhxbLkS?II9Wd8wUJUsb=h9y1CgpETMWrF^3ID+OS|)kR|);R4l;KFb)l_ zc?fY@_0=w|heS!!8J##l((yqBDK+%qECRahUR&2-2tqf>w? zEvG}9L`YM6x2avJ9{{cIZyDH{Zmh>WPml|#EAWp;d-;ERG*0zD9?hWi@+&BmMS$zS zJi2Q9o1>Zj;b?&WKOT*F@*j`(r2Id4wEwq9dxrk&(UvBBFE(;3;ekjR=#KK+Y+B3^ zcKY7H)P_S5CgO)nXerRYXyAq^bYQfgee{o|-Q)iZdF$)Eb;~5`(Yz=p(m&(ZiOh9iAP?xjeP`M z9nglKnfZR1b*=;x&Tq4RV>|UXw$uLj#`c!^A&IIO(RUDnP+o$9 zlHa4e#j7~BM_BleN=J=)r|XD|diWKEnwI_ZN@3!P_04WsFb*g- zC}=RlB4W=s!$VTa-{F}x&);_xdpw8yOfHI*V-gdKuHn_A=2`qHqT>OoEKAU8Z-fwId70fz2Eqew} zh98Sf-$+H6rfh3nMjJ=YzY~tkk6pQV@MfJYsJ5NDk!lE%dW|;Q)`g==o&c5^2d4j{ zea9y9>=_em%C$2oefC@APO<)(4apJFcXA|P z-O&f)&z7^%d5w*A+a_)clXG9f#)OHyD%pd_f>6&YK*R^Y`6c~_>V6Ezm7cHn z$J5dkqXrACUpWVDCwOB7|4JwRjKab+aDY(|#&lvP=c5_5kJs3&0$ZY#4RgdA$y;)c zYnB^wXHN;}X(96R*8Z1brhgJILpu+g{@4m!Q`TM7wpjb%^q~ku(aw?$K|!!{#P5gL z?^G7a%f-!E-s?Vqa^Oi#^r2^{Qw-T$Rp^00qH~4sJ&tFkqC!Gs$N2#s!-@v?QQInQ zm6LH2V6DdT&~)2zlP$b=O-PepG%N)b&&p~RiA*Du`22UH=%PS&)#WW}lx!f69%J(>!XQf-H zyezx_#v3{LQCQ){xtRxQx3w@6_2PAv9pZW1iFfwAGO{U@e$RQz*DkGO{WZ-nno>fo zS3=Pd*V>gK9-CefZ>QX4^XrXgdakLW3~J!`K3G(bWAe&6-x|PDg~YBASBVvV_=|Sj z&+?D^%nTMFRB2_jp)dr^8M+dEAm{PV!i3Ftsoe48Ozf91!=I*99*|4@;Xk)oNqpzx zFD0alPr4LakPB`b_+MI!ZgXuYY87SYmz#c<`8pIwJ%>$t)OK-^Z5op-eD8WM&+ilg zF;gySXFG_uRJ^%odqxD;8uoKs73!w0VScN-*HC`}tMoVT)2lXne{uI&9w4AVP!ZSY zqVWtE>#Q3MiBpaV$Ko~NBOj~X_@&KS1EM5fkn=34?@z=ot86;nI{n+poK|J zqZNArU>{pdeg0bdE&rW@wzH1+{7N~AU(6~zd!t{026F6FR<`%H?i%0iI^*2)?E~g8r=+&t7>`jBke$c?0rdDdX zF^=M_YKrjD{a)nDSLHbrFtDBMk5Lp7XK%(Zy)c`2GyFl2Mchzs8(hh}UdX7IIT*jB zoJX0hZNQ6lmqbg?+~@ZeB(aiQ#;Ld4-riAG!GlG)?MGjJU=|z#y!hv`&D`kbEwVvP zvmL6;>iax&h)f=11PhS?;$lNR={w(~s z?c60S;kats*{zv-|2X==Dtic99~WbQrtgQ|9;3Z8A06Z)yW4tmkN;J;0U5PqZ$2%9 z#!#jJ$UlFLK`xtT3`#eD_-BLwFGs$`5j z2QIPDa;R)RtfKyHGo(CA0(_4z(ieAySTCmxI@a&E*rExzg8g%IJ=Kdlr@ZwXX!DXw;+ zxyVY20x*X0NzN?eK0`pVbX{rGRmh*M5&VkN*_+hcnBz)W4SjFRj>mhz56(`X(HTH3i-(%@9(@py z^BPH692Ln3l=Em{D)_~i70lD%zpCL>n8Ob$l(?fqld z4>Qk*VKfigVGlZLHV6WFL}yEi1qH1iKHeD<&XT}Z{>hP}YJaL(%HG*}etKwa8iJo| z6kdboDf5a4Orau{UX5Xxf6J$!YKq=0GGY!^%cm)i5}j=&TB*0F+2bdOI`-eiOKrCI zT*5diNfe$5;n<{65IhyXL|G%AXd;n)Gn4osGKY8QRU4039=+Wa-@`Unq_!}4GnxMx zUHIP3#wjgqCk(QNh!V@i@n9XhGyd37o}w6!Olfw-+hUu-HPTNZgteM{^B05+fEKj$ zZ#d!5OY9H~nDG+D+QOhXI(u!Nlod99*oD*@q_F?#ovikbERX`M(hjxCFDZT+)?x=_ zjVYE(DNb;3ebNv^SOy=l6~c2M6$o`YfY&f$wl^1f;&y%#>fxNI2jPZyC>o!NpV)W* z_cEA3=sVQyio7_pwT0mjmjmEpKED!?f<4n@If#)?QXKy;LuAg@EXbi{(*Ki9s6YAN zXA^S%E1LicwX49q7G*>g9~+kWs%U$Op%e$Pyb7c(P02*PJO5}nZBQ+!wu_f0n7}IO zGZYq>nCjq>1+FRd9Ztai?{vby;e=E&XoQX!jiDAS&X7{wS(QGD%d%V|hp1QF?2jLoz=l z+1f}|oWX9PxHb9k*)u>O-VoK$w0QxM(y1^39vu#3G$Lr0!goXyOMyqeeg5%1K-@q1 zjVwk{m^Z_Os3SM9W7oetcKE?MMR$IRf_h}u;HV5A2WzmZhQOK&Pj~C-EO3j%z!b#v zQ~q4=p}{56$MNsu#oY0hwDIt8fqUBtsIF=A6>I*lowrF^M?`>$-gV^Li{38%pBM6O z{-9vkH^z*Yv(oYt44flLt%#FTp$<0j_Ljyk42&_U@<_<<6CHP)XUA_C4`mmQ4Z4+$ zW2E)Et|yED#So7hTk=DI4EbXN_nS1KJLA%eI+}Ivc3y3+(>Zff599OCTTF+lY4_kk dnV>;F8_ks0;`?No2@ql^g@D)QUb`Pa{{uMI diff --git a/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl b/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl index 3b8aa4f39f7b7..d1daa7b8d4593 100644 --- a/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl +++ b/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl @@ -97,4 +97,10 @@ record TestEntityInfo includes CustomProperties { "fieldType": "DOUBLE" } doubleField: optional double + + @Searchable = { + "fieldName": "removed", + "fieldType": "BOOLEAN" + } + removed: optional boolean } From 08bdfbdf93da589053f63f412ced820d6d575fb7 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Thu, 25 Jan 2024 15:25:31 -0800 Subject: [PATCH 441/792] feat(ui): Supporting rendering custom assertion descriptions (#9722) Co-authored-by: John Joyce --- .../types/assertion/AssertionMapper.java | 1 + .../src/main/resources/entity.graphql | 5 +++++ .../tabs/Dataset/Validations/Assertions.tsx | 6 +++++- .../Validations/DatasetAssertionDescription.tsx | 17 ++++++++++------- .../Validations/DatasetAssertionsList.tsx | 14 +++++++------- datahub-web-react/src/graphql/assertion.graphql | 1 + 6 files changed, 29 insertions(+), 15 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java index 2536f4d2521ee..43b7b5bb102ad 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java @@ -66,6 +66,7 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); assertionInfo.setDatasetAssertion(datasetAssertion); } + assertionInfo.setDescription(gmsAssertionInfo.getDescription()); return assertionInfo; } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 2ad4982579380..3ea1b38d3db0d 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -6803,6 +6803,11 @@ type AssertionInfo { Dataset-specific assertion information """ datasetAssertion: DatasetAssertionInfo + + """ + An optional human-readable description of the assertion + """ + description: String } """ diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/Assertions.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/Assertions.tsx index 68660164ee877..b3086d7867012 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/Assertions.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/Assertions.tsx @@ -35,6 +35,8 @@ const getAssertionsStatusSummary = (assertions: Array) => { /** * Component used for rendering the Validations Tab on the Dataset Page. + * + * TODO: Note that only the legacy DATASET assertions are supported for viewing as of today. */ export const Assertions = () => { const { urn, entityData } = useEntityData(); @@ -47,7 +49,9 @@ export const Assertions = () => { const assertions = (combinedData && combinedData.dataset?.assertions?.assertions?.map((assertion) => assertion as Assertion)) || []; - const filteredAssertions = assertions.filter((assertion) => !removedUrns.includes(assertion.urn)); + const filteredAssertions = assertions.filter( + (assertion) => !removedUrns.includes(assertion.urn) && !!assertion.info?.datasetAssertion, + ); // Pre-sort the list of assertions based on which has been most recently executed. assertions.sort(sortAssertions); diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/DatasetAssertionDescription.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/DatasetAssertionDescription.tsx index a91d11d1e9887..daebfd5597588 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/DatasetAssertionDescription.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/DatasetAssertionDescription.tsx @@ -19,6 +19,7 @@ const ViewLogicButton = styled(Button)` `; type Props = { + description?: string; assertionInfo: DatasetAssertionInfo; }; @@ -319,18 +320,20 @@ const TOOLTIP_MAX_WIDTH = 440; * * For example, Column 'X' values are in [1, 2, 3] */ -export const DatasetAssertionDescription = ({ assertionInfo }: Props) => { +export const DatasetAssertionDescription = ({ description, assertionInfo }: Props) => { const { scope, aggregation, fields, operator, parameters, nativeType, nativeParameters, logic } = assertionInfo; const [isLogicVisible, setIsLogicVisible] = useState(false); /** * Build a description component from a) input (aggregation, inputs) b) the operator text */ - const description = ( + const descriptionFragment = ( <> - - {getAggregationText(scope, aggregation, fields)}{' '} - {getOperatorText(operator, parameters || undefined, nativeType || undefined)} - + {description || ( + + {getAggregationText(scope, aggregation, fields)}{' '} + {getOperatorText(operator, parameters || undefined, nativeType || undefined)} + + )} ); @@ -349,7 +352,7 @@ export const DatasetAssertionDescription = ({ assertionInfo }: Props) => { } > -

    {description}
    +
    {descriptionFragment}
    {logic && (
    setIsLogicVisible(true)} type="link"> diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/DatasetAssertionsList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/DatasetAssertionsList.tsx index 05fc2d1c496db..3eccfb8931fc0 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/DatasetAssertionsList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Validations/DatasetAssertionsList.tsx @@ -83,6 +83,7 @@ export const DatasetAssertionsList = ({ assertions, onDelete }: Props) => { type: assertion.info?.type, platform: assertion.platform, datasetAssertionInfo: assertion.info?.datasetAssertion, + description: assertion.info?.description, lastExecTime: assertion.runEvents?.runEvents?.length && assertion.runEvents.runEvents[0].timestampMillis, lastExecResult: assertion.runEvents?.runEvents?.length && @@ -101,6 +102,7 @@ export const DatasetAssertionsList = ({ assertions, onDelete }: Props) => { const resultColor = (record.lastExecResult && getResultColor(record.lastExecResult)) || 'default'; const resultText = (record.lastExecResult && getResultText(record.lastExecResult)) || 'No Evaluations'; const resultIcon = (record.lastExecResult && getResultIcon(record.lastExecResult)) || ; + const { description } = record; return (
    @@ -111,7 +113,10 @@ export const DatasetAssertionsList = ({ assertions, onDelete }: Props) => {
    - +
    ); }, @@ -146,12 +151,7 @@ export const DatasetAssertionsList = ({ assertions, onDelete }: Props) => { - - } - trigger={['click']} - > + } trigger={['click']}> diff --git a/datahub-web-react/src/graphql/assertion.graphql b/datahub-web-react/src/graphql/assertion.graphql index d4015fcebdb3e..0b64c4c8d6ddd 100644 --- a/datahub-web-react/src/graphql/assertion.graphql +++ b/datahub-web-react/src/graphql/assertion.graphql @@ -46,6 +46,7 @@ fragment assertionDetails on Assertion { } logic } + description } } From 69ff9c3af3da11deb6f915f11820b2489caac6e0 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Thu, 25 Jan 2024 16:51:40 -0800 Subject: [PATCH 442/792] infra(ui): Add a react context provider allowing sub-components to update theme conf (#9674) Co-authored-by: John Joyce --- datahub-web-react/src/App.tsx | 34 +++++-------------- datahub-web-react/src/CustomThemeProvider.tsx | 32 +++++++++++++++++ datahub-web-react/src/customThemeContext.tsx | 10 ++++++ 3 files changed, 50 insertions(+), 26 deletions(-) create mode 100644 datahub-web-react/src/CustomThemeProvider.tsx create mode 100644 datahub-web-react/src/customThemeContext.tsx diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index 79c9ee91ceaa1..e8910e7dc2ea8 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -1,20 +1,19 @@ -import React, { useEffect, useState } from 'react'; +import React from 'react'; import Cookies from 'js-cookie'; import { message } from 'antd'; import { BrowserRouter as Router } from 'react-router-dom'; import { ApolloClient, ApolloProvider, createHttpLink, InMemoryCache, ServerError } from '@apollo/client'; import { onError } from '@apollo/client/link/error'; -import { ThemeProvider } from 'styled-components'; import { Helmet, HelmetProvider } from 'react-helmet-async'; import './App.less'; import { Routes } from './app/Routes'; -import { Theme } from './conf/theme/types'; -import defaultThemeConfig from './conf/theme/theme_light.config.json'; import { PageRoutes } from './conf/Global'; import { isLoggedInVar } from './app/auth/checkAuthStatus'; import { GlobalCfg } from './conf'; import possibleTypesResult from './possibleTypes.generated'; import { ErrorCodes } from './app/shared/constants'; +import CustomThemeProvider from './CustomThemeProvider'; +import { useCustomTheme } from './customThemeContext'; /* Construct Apollo Client @@ -71,33 +70,16 @@ const client = new ApolloClient({ }); export const InnerApp: React.VFC = () => { - const [dynamicThemeConfig, setDynamicThemeConfig] = useState(defaultThemeConfig); - - useEffect(() => { - if (import.meta.env.DEV) { - import(/* @vite-ignore */ `./conf/theme/${import.meta.env.REACT_APP_THEME_CONFIG}`).then((theme) => { - setDynamicThemeConfig(theme); - }); - } else { - // Send a request to the server to get the theme config. - fetch(`/assets/conf/theme/${import.meta.env.REACT_APP_THEME_CONFIG}`) - .then((response) => response.json()) - .then((theme) => { - setDynamicThemeConfig(theme); - }); - } - }, []); - return ( - - {dynamicThemeConfig.content.title} - - + + + {useCustomTheme().theme?.content.title} + - + ); }; diff --git a/datahub-web-react/src/CustomThemeProvider.tsx b/datahub-web-react/src/CustomThemeProvider.tsx new file mode 100644 index 0000000000000..f2e2678a90d8c --- /dev/null +++ b/datahub-web-react/src/CustomThemeProvider.tsx @@ -0,0 +1,32 @@ +import React, { useEffect, useState } from 'react'; +import { ThemeProvider } from 'styled-components'; +import { Theme } from './conf/theme/types'; +import defaultThemeConfig from './conf/theme/theme_light.config.json'; +import { CustomThemeContext } from './customThemeContext'; + +const CustomThemeProvider = ({ children }: { children: React.ReactNode }) => { + const [currentTheme, setTheme] = useState(defaultThemeConfig); + + useEffect(() => { + if (import.meta.env.DEV) { + import(/* @vite-ignore */ `./conf/theme/${import.meta.env.REACT_APP_THEME_CONFIG}`).then((theme) => { + setTheme(theme); + }); + } else { + // Send a request to the server to get the theme config. + fetch(`/assets/conf/theme/${import.meta.env.REACT_APP_THEME_CONFIG}`) + .then((response) => response.json()) + .then((theme) => { + setTheme(theme); + }); + } + }, []); + + return ( + + {children} + + ); +}; + +export default CustomThemeProvider; diff --git a/datahub-web-react/src/customThemeContext.tsx b/datahub-web-react/src/customThemeContext.tsx new file mode 100644 index 0000000000000..0b273d0024885 --- /dev/null +++ b/datahub-web-react/src/customThemeContext.tsx @@ -0,0 +1,10 @@ +import React, { useContext } from 'react'; + +export const CustomThemeContext = React.createContext<{ + theme: any; + updateTheme: (theme: any) => void; +}>({ theme: undefined, updateTheme: (_) => null }); + +export function useCustomTheme() { + return useContext(CustomThemeContext); +} From f7f0b14f376cad8aa3951efd305fcd15a1f01966 Mon Sep 17 00:00:00 2001 From: tom Date: Fri, 26 Jan 2024 02:51:41 +0100 Subject: [PATCH 443/792] fix(ingestion/metabase): Fetch Dashboards through Collections (#9631) Co-authored-by: Harshal Sheth --- metadata-ingestion/developing.md | 2 +- .../docs/sources/metabase/metabase.md | 2 +- .../src/datahub/ingestion/source/metabase.py | 47 +- .../metabase/metabase_mces_golden.json | 61 +- .../metabase/setup/collection_dashboards.json | 1 + .../metabase/setup/collections.json | 1 + .../integration/metabase/setup/dashboard.json | 40 - .../metabase/setup/dashboard_1.json | 1084 ++++++++++++----- .../integration/metabase/test_metabase.py | 8 +- 9 files changed, 901 insertions(+), 345 deletions(-) create mode 100644 metadata-ingestion/tests/integration/metabase/setup/collection_dashboards.json create mode 100644 metadata-ingestion/tests/integration/metabase/setup/collections.json delete mode 100644 metadata-ingestion/tests/integration/metabase/setup/dashboard.json diff --git a/metadata-ingestion/developing.md b/metadata-ingestion/developing.md index d1eef21974f1d..fc3a689124b2c 100644 --- a/metadata-ingestion/developing.md +++ b/metadata-ingestion/developing.md @@ -10,7 +10,7 @@ Also take a look at the guide to [adding a source](./adding-source.md). ### Requirements 1. Python 3.7+ must be installed in your host environment. -2. Java8 (gradle won't work with newer versions) +2. Java 17 (gradle won't work with newer or older versions) 4. On Debian/Ubuntu: `sudo apt install python3-dev python3-venv` 5. On Fedora (if using LDAP source integration): `sudo yum install openldap-devel` diff --git a/metadata-ingestion/docs/sources/metabase/metabase.md b/metadata-ingestion/docs/sources/metabase/metabase.md index a76786f7e5853..68422b8decce9 100644 --- a/metadata-ingestion/docs/sources/metabase/metabase.md +++ b/metadata-ingestion/docs/sources/metabase/metabase.md @@ -19,4 +19,4 @@ The key in this map must be string, not integer although Metabase API provides If `database_id_to_instance_map` is not specified, `platform_instance_map` is used for platform instance mapping. If none of the above are specified, platform instance is not used when constructing `urn` when searching for dataset relations. ## Compatibility -Metabase version [v0.41.2](https://www.metabase.com/start/oss/) +Metabase version [v0.48.3](https://www.metabase.com/start/oss/) diff --git a/metadata-ingestion/src/datahub/ingestion/source/metabase.py b/metadata-ingestion/src/datahub/ingestion/source/metabase.py index af41a74f311f6..d22bfb2b8b52f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metabase.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metabase.py @@ -90,10 +90,17 @@ class MetabaseSource(Source): """ This plugin extracts Charts, dashboards, and associated metadata. This plugin is in beta and has only been tested on PostgreSQL and H2 database. - ### Dashboard - [/api/dashboard](https://www.metabase.com/docs/latest/api-documentation.html#dashboard) endpoint is used to - retrieve the following dashboard information. + ### Collection + + [/api/collection](https://www.metabase.com/docs/latest/api/collection) endpoint is used to + retrieve the available collections. + + [/api/collection//items?models=dashboard](https://www.metabase.com/docs/latest/api/collection#get-apicollectioniditems) endpoint is used to retrieve a given collection and list their dashboards. + + ### Dashboard + + [/api/dashboard/](https://www.metabase.com/docs/latest/api/dashboard) endpoint is used to retrieve a given Dashboard and grab its information. - Title and description - Last edited by @@ -187,19 +194,29 @@ def close(self) -> None: def emit_dashboard_mces(self) -> Iterable[MetadataWorkUnit]: try: - dashboard_response = self.session.get( - f"{self.config.connect_uri}/api/dashboard" + collections_response = self.session.get( + f"{self.config.connect_uri}/api/collection/" ) - dashboard_response.raise_for_status() - dashboards = dashboard_response.json() + collections_response.raise_for_status() + collections = collections_response.json() - for dashboard_info in dashboards: - dashboard_snapshot = self.construct_dashboard_from_api_data( - dashboard_info + for collection in collections: + collection_dashboards_response = self.session.get( + f"{self.config.connect_uri}/api/collection/{collection['id']}/items?models=dashboard" ) - if dashboard_snapshot is not None: - mce = MetadataChangeEvent(proposedSnapshot=dashboard_snapshot) - yield MetadataWorkUnit(id=dashboard_snapshot.urn, mce=mce) + collection_dashboards_response.raise_for_status() + collection_dashboards = collection_dashboards_response.json() + + if not collection_dashboards.get("data"): + continue + + for dashboard_info in collection_dashboards.get("data"): + dashboard_snapshot = self.construct_dashboard_from_api_data( + dashboard_info + ) + if dashboard_snapshot is not None: + mce = MetadataChangeEvent(proposedSnapshot=dashboard_snapshot) + yield MetadataWorkUnit(id=dashboard_snapshot.urn, mce=mce) except HTTPError as http_error: self.report.report_failure( @@ -254,10 +271,10 @@ def construct_dashboard_from_api_data( ) chart_urns = [] - cards_data = dashboard_details.get("ordered_cards", "{}") + cards_data = dashboard_details.get("dashcards", {}) for card_info in cards_data: chart_urn = builder.make_chart_urn( - self.platform, card_info.get("card_id", "") + self.platform, card_info.get("card").get("id", "") ) chart_urns.append(chart_urn) diff --git a/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json b/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json index 9b143348fdf60..10c1c312a4d1c 100644 --- a/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json +++ b/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json @@ -191,20 +191,73 @@ "description": "", "charts": [ "urn:li:chart:(metabase,1)", - "urn:li:chart:(metabase,2)" + "urn:li:chart:(metabase,2)", + "urn:li:chart:(metabase,3)" ], "datasets": [], "lastModified": { "created": { - "time": 1639417721742, + "time": 1705398694904, "actor": "urn:li:corpuser:admin@metabase.com" }, "lastModified": { - "time": 1639417721742, + "time": 1705398694904, "actor": "urn:li:corpuser:admin@metabase.com" } }, - "dashboardUrl": "http://localhost:3000/dashboard/1" + "dashboardUrl": "http://localhost:3000/dashboard/10" + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:admin@metabase.com", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1636614000000, + "runId": "metabase-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DashboardSnapshot": { + "urn": "urn:li:dashboard:(metabase,1)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dashboard.DashboardInfo": { + "customProperties": {}, + "title": "Dashboard 1", + "description": "", + "charts": [ + "urn:li:chart:(metabase,1)", + "urn:li:chart:(metabase,2)", + "urn:li:chart:(metabase,3)" + ], + "datasets": [], + "lastModified": { + "created": { + "time": 1705398694904, + "actor": "urn:li:corpuser:admin@metabase.com" + }, + "lastModified": { + "time": 1705398694904, + "actor": "urn:li:corpuser:admin@metabase.com" + } + }, + "dashboardUrl": "http://localhost:3000/dashboard/10" } }, { diff --git a/metadata-ingestion/tests/integration/metabase/setup/collection_dashboards.json b/metadata-ingestion/tests/integration/metabase/setup/collection_dashboards.json new file mode 100644 index 0000000000000..b602d2dfb7dcd --- /dev/null +++ b/metadata-ingestion/tests/integration/metabase/setup/collection_dashboards.json @@ -0,0 +1 @@ +{"total": 1, "data": [{"description": null, "collection_position": null, "database_id": null, "name": "This is a test", "id": 10, "entity_id": "Q4gEaOmoBkfQX3_gXiH9g", "last-edit-info": {"id": 14, "last_name": "Doe", "first_name": "John", "email": "john.doe@somewhere.com", "timestamp": "2024-01-12T14:55:38.43304Z"}, "model": "dashboard"}], "models": ["dashboard"], "limit": null, "offset": null} diff --git a/metadata-ingestion/tests/integration/metabase/setup/collections.json b/metadata-ingestion/tests/integration/metabase/setup/collections.json new file mode 100644 index 0000000000000..a8a98c4e6d62e --- /dev/null +++ b/metadata-ingestion/tests/integration/metabase/setup/collections.json @@ -0,0 +1 @@ +[{"authority_level": null, "can_write": true, "name": "Our analytics", "effective_ancestors": [], "effective_location": null, "parent_id": null, "id": "root", "is_personal": false}, {"authority_level": null, "description": null, "archived": false, "slug": "john_doe_personal_collection", "can_write": true, "name": "John Doe", "personal_owner_id": 14, "type": null, "id": 150, "entity_id": "kdLA_-CQy4F5lL15k8-TU", "location": "/", "namespace": null, "is_personal": true, "created_at": "2024-01-12T11:51:24.394309Z"}] diff --git a/metadata-ingestion/tests/integration/metabase/setup/dashboard.json b/metadata-ingestion/tests/integration/metabase/setup/dashboard.json deleted file mode 100644 index 095abf1bbdc6d..0000000000000 --- a/metadata-ingestion/tests/integration/metabase/setup/dashboard.json +++ /dev/null @@ -1,40 +0,0 @@ -[{ - "description": null, - "archived": false, - "collection_position": null, - "creator": { - "email": "admin@metabase.com", - "first_name": "FirstName", - "last_login": "2021-12-13T18:51:32.999", - "is_qbnewb": true, - "is_superuser": true, - "id": 1, - "last_name": "LastName", - "date_joined": "2021-12-13T07:34:21.806", - "common_name": "FirstName LastName" - }, - "enable_embedding": false, - "collection_id": null, - "show_in_getting_started": false, - "name": "Dashboard 1", - "caveats": null, - "creator_id": 1, - "updated_at": "2021-12-13T17:48:41.735", - "made_public_by_id": null, - "embedding_params": null, - "cache_ttl": null, - "id": 1, - "position": null, - "last-edit-info": { - "id": 1, - "email": "admin@metabase.com", - "first_name": "FirstName", - "last_name": "LastName", - "timestamp": "2021-12-13T17:48:41.742" - }, - "parameters": [], - "favorite": false, - "created_at": "2021-12-13T17:46:48.185", - "public_uuid": null, - "points_of_interest": null -}] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/metabase/setup/dashboard_1.json b/metadata-ingestion/tests/integration/metabase/setup/dashboard_1.json index 288087a67da6d..e968093c43850 100644 --- a/metadata-ingestion/tests/integration/metabase/setup/dashboard_1.json +++ b/metadata-ingestion/tests/integration/metabase/setup/dashboard_1.json @@ -2,332 +2,854 @@ "description": null, "archived": false, "collection_position": null, - "ordered_cards": [{ - "sizeX": 4, - "series": [], - "collection_authority_level": null, - "card": { - "description": null, - "archived": false, - "collection_position": null, - "table_id": null, - "result_metadata": [{ - "name": "customer_id", - "display_name": "customer_id", - "base_type": "type/Integer", - "effective_type": "type/Integer", - "field_ref": ["field", "customer_id", { - "base-type": "type/Integer" - }], - "semantic_type": null, - "fingerprint": { - "global": { - "distinct-count": 517, - "nil%": 0.0 + "dashcards": [ + { + "size_x": 12, + "dashboard_tab_id": null, + "series": [], + "action_id": null, + "collection_authority_level": null, + "card": { + "description": null, + "archived": false, + "collection_position": null, + "table_id": null, + "result_metadata": [ + { + "display_name": "EVENT_DATE", + "field_ref": [ + "field", + "EVENT_DATE", + { + "base-type": "type/Date" + } + ], + "name": "EVENT_DATE", + "base_type": "type/Date", + "effective_type": "type/Date", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/DateTime": { + "earliest": "2023-12-04T00:00:00Z", + "latest": "2024-01-15T00:00:00Z" + } + } + } }, - "type": { - "type/Number": { - "min": 1.0, - "q1": 127.95550051624855, - "q3": 457.48181481488376, - "max": 599.0, - "sd": 183.35453319901166, - "avg": 293.316 + { + "display_name": "AND_VIEWERS", + "field_ref": [ + "field", + "AND_VIEWERS", + { + "base-type": "type/Number" + } + ], + "name": "AND_VIEWERS", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 4720, + "q1": 5083.5, + "q3": 9003, + "max": 10560, + "sd": 2090.2420089751945, + "avg": 6688.214285714285 + } + } } - } - } - }, { - "name": "first_name", - "display_name": "first_name", - "base_type": "type/Text", - "effective_type": "type/Text", - "field_ref": ["field", "first_name", { - "base-type": "type/Text" - }], - "semantic_type": "type/Name", - "fingerprint": { - "global": { - "distinct-count": 509, - "nil%": 0.0 }, - "type": { - "type/Text": { - "percent-json": 0.0, - "percent-url": 0.0, - "percent-email": 0.0, - "percent-state": 0.0035, - "average-length": 5.629 + { + "display_name": "AND_REDACTED", + "field_ref": [ + "field", + "AND_REDACTED", + { + "base-type": "type/Number" + } + ], + "name": "AND_REDACTED", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 948, + "q1": 2019.5, + "q3": 2500.5, + "max": 3180, + "sd": 460.56365857271413, + "avg": 2251.0714285714284 + } + } } - } - } - }, { - "name": "last_name", - "display_name": "last_name", - "base_type": "type/Text", - "effective_type": "type/Text", - "field_ref": ["field", "last_name", { - "base-type": "type/Text" - }], - "semantic_type": "type/Name", - "fingerprint": { - "global": { - "distinct-count": 517, - "nil%": 0.0 }, - "type": { - "type/Text": { - "percent-json": 0.0, - "percent-url": 0.0, - "percent-email": 0.0, - "percent-state": 0.0015, - "average-length": 6.126 + { + "display_name": "AND_REDACTED", + "field_ref": [ + "field", + "AND_REDACTED", + { + "base-type": "type/Number" + } + ], + "name": "AND_REDACTED", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 3545, + "q1": 10909, + "q3": 13916, + "max": 18861, + "sd": 3132.780684756446, + "avg": 12122.32142857143 + } + } } - } - } - }, { - "name": "amount", - "display_name": "amount", - "base_type": "type/Decimal", - "effective_type": "type/Decimal", - "field_ref": ["field", "amount", { - "base-type": "type/Decimal" - }], - "semantic_type": null, - "fingerprint": { - "global": { - "distinct-count": 11, - "nil%": 0.0 }, - "type": { - "type/Number": { - "min": 0.99, - "q1": 2.399411317392306, - "q3": 5.52734176879965, - "max": 10.99, - "sd": 2.352151368009511, - "avg": 4.1405 + { + "display_name": "IOS_VIEWERS", + "field_ref": [ + "field", + "IOS_VIEWERS", + { + "base-type": "type/Number" + } + ], + "name": "IOS_VIEWERS", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 6477, + "q1": 7481.5, + "q3": 10428.5, + "max": 13182, + "sd": 1948.047456520796, + "avg": 9075.17857142857 + } + } } - } - } - }, { - "name": "payment_date", - "display_name": "payment_date", - "base_type": "type/DateTime", - "effective_type": "type/DateTime", - "field_ref": ["field", "payment_date", { - "base-type": "type/DateTime" - }], - "semantic_type": null, - "fingerprint": { - "global": { - "distinct-count": 1998, - "nil%": 0.0 }, - "type": { - "type/DateTime": { - "earliest": "2007-02-14T21:21:59.996577Z", - "latest": "2007-02-21T19:27:46.996577Z" + { + "display_name": "IOS_REDACTED", + "field_ref": [ + "field", + "IOS_REDACTED", + { + "base-type": "type/Number" + } + ], + "name": "IOS_REDACTED", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 1470, + "q1": 3020, + "q3": 3806, + "max": 4670, + "sd": 665.7415088559197, + "avg": 3415.8571428571427 + } + } } - } - } - }, { - "name": "rental_id", - "display_name": "rental_id", - "base_type": "type/Integer", - "effective_type": "type/Integer", - "field_ref": ["field", "rental_id", { - "base-type": "type/Integer" - }], - "semantic_type": null, - "fingerprint": { - "global": { - "distinct-count": 2000, - "nil%": 0.0 }, - "type": { - "type/Number": { - "min": 1158.0, - "q1": 1731.7967120913397, - "q3": 2871.359273326854, - "max": 4591.0, - "sd": 660.7468728104022, - "avg": 2303.4565 + { + "display_name": "IOS_REDACTED", + "field_ref": [ + "field", + "IOS_REDACTED", + { + "base-type": "type/Number" + } + ], + "name": "IOS_REDACTED", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 4872, + "q1": 15019.5, + "q3": 20457, + "max": 27466, + "sd": 4688.492913816769, + "avg": 17683.89285714286 + } + } + } + }, + { + "display_name": "IOS_REDACTED/IOS_VIEWERS", + "field_ref": [ + "field", + "IOS_REDACTED/IOS_VIEWERS", + { + "base-type": "type/Number" + } + ], + "name": "IOS_REDACTED/IOS_VIEWERS", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 0.662587, + "q1": 1.8403745, + "q3": 2.241517, + "max": 2.576166, + "sd": 0.4488826998266724, + "avg": 1.974007857142857 + } + } + } + }, + { + "display_name": "AND_REDACTED/AND_VIEWERS", + "field_ref": [ + "field", + "AND_REDACTED/AND_VIEWERS", + { + "base-type": "type/Number" + } + ], + "name": "AND_REDACTED/AND_VIEWERS", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 0.671656, + "q1": 1.3536655, + "q3": 2.5325145, + "max": 3.097553, + "sd": 0.6816847359625038, + "avg": 1.93937275 + } + } + } + }, + { + "display_name": "IOS_REDACTED/IOS_VIEWERS", + "field_ref": [ + "field", + "IOS_REDACTED/IOS_VIEWERS", + { + "base-type": "type/Number" + } + ], + "name": "IOS_REDACTED/IOS_VIEWERS", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 0.199918, + "q1": 0.34496099999999996, + "q3": 0.4352085, + "max": 0.47286, + "sd": 0.06928869477079941, + "avg": 0.3833206785714286 + } + } + } + }, + { + "display_name": "AND_REDACTED/AND_VIEWERS", + "field_ref": [ + "field", + "AND_REDACTED/AND_VIEWERS", + { + "base-type": "type/Number" + } + ], + "name": "AND_REDACTED/AND_VIEWERS", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 28, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 0.179613, + "q1": 0.245343, + "q3": 0.475772, + "max": 0.522253, + "sd": 0.11732033433182058, + "avg": 0.3620892142857142 + } + } } } - } - }], - "database_id": 2, - "enable_embedding": false, - "collection_id": null, - "query_type": "native", - "name": "Customer Payment", - "query_average_duration": 820, - "creator_id": 1, - "moderation_reviews": [], - "updated_at": "2021-12-13T17:48:40.478", - "made_public_by_id": null, - "embedding_params": null, - "cache_ttl": null, - "dataset_query": { - "type": "native", - "native": { - "query": "SELECT\n\tcustomer.customer_id,\n\tfirst_name,\n\tlast_name,\n\tamount,\n\tpayment_date,\n\trental_id\nFROM\n\tcustomer\nINNER JOIN payment \n ON payment.customer_id = customer.customer_id\nORDER BY payment_date", - "template-tags": {} + ], + "can_write": true, + "database_id": 3, + "enable_embedding": false, + "collection_id": 112, + "query_type": "native", + "name": "REDACTED iOS vs. Android", + "query_average_duration": 50982, + "creator_id": 42, + "moderation_reviews": [], + "updated_at": "2024-01-16T13:34:29.916717Z", + "made_public_by_id": null, + "embedding_params": null, + "cache_ttl": null, + "dataset_query": { + "type": "native", + "native": { + "query": "-- 1. Table with redacted search users Android\n-- 2. Table with redacted search users iOS \n-- 3. Redacted from Android redacted\n-- 4. redacted from iOS\n-- 5. Compare the numbers iOS vs. Android\n\n\n-- 1. Table with redacted search users Android (to include date, platform, auth_account_id)\n-- 2. Table with redacted search users iOS (to include date, platform, auth_account_id)\n-- 3. Redacted from Android redacted (to include date, platform, count of redacted)\n-- 4. Redacted from iOS redacted (to include date, plaform, count of redacted)\n-- 5. Compare the numbers iOS vs. Android\n\nwith AND_viewers as \n(\nselect event_date, platform, auth_account_id \nfrom TEAMS_PRD.REDACTED.MRT_CURR__MPARTICLE_SCREEN_VIEWS\nwhere screen_name='redacted_search'\nand event_date>'2023-12-01'\nand platform='Android'\nand dayofweekiso(event_date) NOT IN (6,7)\ngroup by event_date, platform, auth_account_id\norder by event_date desc\n), \niOS_viewers as \n(\nselect event_date, platform, auth_account_id \nfrom TEAMS_PRD.REDACTED.MRT_CURR__MPARTICLE_SCREEN_VIEWS\nwhere screen_name='redacted_search'\nand event_date>'2023-12-01'\nand platform='iOS'\nand dayofweekiso(event_date) NOT IN (6,7)\ngroup by event_date, platform, auth_account_id\norder by event_date desc\n), \nAND_redacted as\n(\nselect redacted_ts::date as redacted_date, platform, count(distinct at.auth_account_id) as AND_redacted, count(group_redacted_id) as AND_redacted\nfrom TEAMS_PRD.REDACTED.MRT_CURR__REDACTED_CUSTOMER at\njoin AND_viewers av on av.event_date=at.redacted_ts::date and av.auth_account_id=at.auth_account_id\nwhere instrument_type='REDACTED'\ngroup by 1,2\norder by 1 desc\n), \niOS_redacted as\n(\nselect redacted_ts::date as redacted_date, platform, count(distinct it.auth_account_id) as iOS_redacted, count(group_redacted_id) as iOS_redacted\nfrom TEAMS_PRD.REDACTED.MRT_CURR__REDACTED_CUSTOMER it\njoin iOS_viewers iv on iv.event_date=it.redacted_ts::date and iv.auth_account_id=it.auth_account_id\nwhere instrument_type='REDACTED'\ngroup by 1,2\norder by 1 desc\n)\nselect a.event_date, count(distinct a.auth_account_id) as AND_viewers, AND_redacted, AND_redacted, count(distinct i.auth_account_id) as iOS_viewers, iOS_redacted, iOS_redacted, iOS_redacted/iOS_viewers, AND_redacted/AND_viewers, iOS_redacted/iOS_viewers, AND_redacted/AND_viewers\nfrom AND_VIEWERS a\njoin AND_redacted at\non a.event_date=at.redacted_date\njoin ios_viewers i\non a.event_date=i.event_date\njoin ios_redacted it\non i.event_date=it.redacted_date\ngroup by 1, 3, 4, 6, 7\norder by 1 desc\n\n\n", + "template-tags": {} + }, + "database": 3 }, - "database": 2 - }, - "id": 1, - "display": "table", - "visualization_settings": { - "table.pivot_column": "amount", - "table.cell_column": "customer_id" + "id": 1, + "parameter_mappings": [], + "display": "line", + "entity_id": "DhQgvvtTEarZH8yQBlqES", + "collection_preview": true, + "visualization_settings": { + "graph.dimensions": [ + "EVENT_DATE" + ], + "series_settings": { + "IOS_REDACTED/IOS_VIEWERS": { + "axis": "right" + }, + "AND_REDACTED/AND_VIEWERS": { + "axis": "right" + } + }, + "graph.metrics": [ + "IOS_REDACTED/IOS_VIEWERS", + "AND_REDACTED/AND_VIEWERS", + "AND_VIEWERS", + "IOS_VIEWERS" + ] + }, + "metabase_version": "v0.48.3 (80d8323)", + "parameters": [], + "dataset": false, + "created_at": "2024-01-16T09:44:49.407327Z", + "public_uuid": null }, - "created_at": "2021-12-13T17:46:32.77", - "public_uuid": null + "updated_at": "2024-01-16T09:45:45.410379Z", + "col": 0, + "id": 12, + "parameter_mappings": [], + "card_id": 1, + "entity_id": "tA9M9vJlTHG0KxQnvknKW", + "visualization_settings": {}, + "size_y": 6, + "dashboard_id": 1, + "created_at": "2024-01-16T09:45:45.410379Z", + "row": 0 }, - "updated_at": "2021-12-13T17:48:41.68", - "col": 0, - "id": 1, - "parameter_mappings": [], - "card_id": 1, - "visualization_settings": {}, - "dashboard_id": 1, - "created_at": "2021-12-13T17:46:52.278", - "sizeY": 4, - "row": 0 - }, { - "sizeX": 4, - "series": [], - "collection_authority_level": null, - "card": { - "description": null, - "archived": false, - "collection_position": null, - "table_id": 21, - "result_metadata": [{ - "semantic_type": "type/Category", - "coercion_strategy": null, - "name": "rating", - "field_ref": ["field", 131, null], - "effective_type": "type/*", - "id": 131, - "display_name": "Rating", - "fingerprint": { - "global": { - "distinct-count": 5, - "nil%": 0.0 + { + "size_x": 12, + "dashboard_tab_id": null, + "series": [], + "action_id": null, + "collection_authority_level": null, + "card": { + "description": null, + "archived": false, + "collection_position": null, + "table_id": null, + "result_metadata": [ + { + "display_name": "CALENDAR_DATE", + "field_ref": [ + "field", + "CALENDAR_DATE", + { + "base-type": "type/Date" + } + ], + "name": "CALENDAR_DATE", + "base_type": "type/Date", + "effective_type": "type/Date", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 30, + "nil%": 0 + }, + "type": { + "type/DateTime": { + "earliest": "2023-12-17T00:00:00Z", + "latest": "2024-01-15T00:00:00Z" + } + } + } + }, + { + "display_name": "REDACTED", + "field_ref": [ + "field", + "REDACTED", + { + "base-type": "type/Number" + } + ], + "name": "REDACTED", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 27, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 682175, + "q1": 738644, + "q3": 805974, + "max": 847312, + "sd": 46783.99996291344, + "avg": 775505.5666666667 + } + } + } }, - "type": { - "type/Text": { - "percent-json": 0.0, - "percent-url": 0.0, - "percent-email": 0.0, - "percent-state": 0.0, - "average-length": 2.926 + { + "display_name": "REDACTEDRS", + "field_ref": [ + "field", + "REDACTEDRS", + { + "base-type": "type/Number" + } + ], + "name": "REDACTEDRS", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 27, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 46173, + "q1": 47556.94427191, + "q3": 48890, + "max": 50769, + "sd": 1164.9989906758983, + "avg": 48354.8 + } + } + } + }, + { + "display_name": "REDACTED/REDACTEDRS", + "field_ref": [ + "field", + "REDACTED/REDACTEDRS", + { + "base-type": "type/Number" + } + ], + "name": "REDACTED/REDACTEDRS", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 27, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 14.706168, + "q1": 15.398378, + "q3": 16.920933, + "max": 17.289964, + "sd": 0.8020030995826715, + "avg": 16.033017833333336 + } + } } } + ], + "can_write": true, + "database_id": 3, + "enable_embedding": false, + "collection_id": 112, + "query_type": "native", + "name": "Redacted redacted per redacted user", + "query_average_duration": 20433, + "creator_id": 1, + "moderation_reviews": [], + "updated_at": "2024-01-16T13:34:29.916788Z", + "made_public_by_id": null, + "embedding_params": null, + "cache_ttl": null, + "dataset_query": { + "type": "native", + "native": { + "query": "with dd as (\nselect distinct calendar_date as calendar_date from TEAMS_PRD.DATA_PLATFORM_MART.MRT__CALENDAR_DATES\nwhere calendar_date>'2022-01-01'\n), \nredacted as\n(\nselect dd.calendar_date, count(distinct auth_account_id) as redacted, max(redacted_ts), min(redacted_ts)\nfrom TEAMS_PRD.REDACTED.MRT_CURR__REDACTED_CUSTOMER t\njoin dd on redacted_ts::date BETWEEN dd.calendar_date-29 and dd.calendar_date\nwhere redacted_type='REGULAR'\nand instrument_type = 'REDACTED'\ngroup by dd.calendar_date\norder by dd.calendar_date desc\n),\nredacted as\n(\nselect dd.calendar_date, count(group_redacted_id) as redacted, max(redacted_ts), min(redacted_ts)\nfrom TEAMS_PRD.REDACTED.MRT_CURR__REDACTED_CUSTOMER t\njoin dd on redacted_ts::date BETWEEN dd.calendar_date-29 and dd.calendar_date\nwhere redacted_type='REGULAR'\nand instrument_type = 'REDACTED'\ngroup by dd.calendar_date\norder by dd.calendar_date desc\n)\nselect dd.calendar_date, redacted, redacted, redacted/redacted\nfrom dd\njoin redacted t on dd.calendar_date=t.calendar_date\njoin redacted tr on dd.calendar_date=tr.calendar_date\ngroup by dd.calendar_date, redacted, redacted, redacted/redacted\norder by dd.calendar_date desc \nlimit 30", + "template-tags": {} + }, + "database": 3 + }, + "id": 2, + "parameter_mappings": [], + "display": "line", + "entity_id": "b1jUcPcQM0XFMuviv4g3K", + "collection_preview": true, + "visualization_settings": { + "graph.dimensions": [ + "CALENDAR_DATE" + ], + "series_settings": { + "REDACTEDRS": { + "axis": "right" + } + }, + "graph.metrics": [ + "REDACTED/REDACTEDRS", + "REDACTEDRS" + ] }, - "base_type": "type/PostgresEnum" - }, { - "name": "count", - "display_name": "Count", - "base_type": "type/BigInteger", - "effective_type": "type/BigInteger", - "semantic_type": "type/Quantity", - "field_ref": ["aggregation", 0], - "fingerprint": { - "global": { - "distinct-count": 5, - "nil%": 0.0 + "metabase_version": "v0.48.3 (80d8323)", + "parameters": [], + "dataset": false, + "created_at": "2024-01-16T09:50:09.487369Z", + "public_uuid": null + }, + "updated_at": "2024-01-16T09:50:34.394488Z", + "col": 12, + "id": 1, + "parameter_mappings": [], + "card_id": 2, + "entity_id": "lXypX5aa14HjkN_Im82C2", + "visualization_settings": {}, + "size_y": 6, + "dashboard_id": 1, + "created_at": "2024-01-16T09:50:34.394488Z", + "row": 0 + }, + { + "size_x": 12, + "dashboard_tab_id": null, + "series": [], + "action_id": null, + "collection_authority_level": null, + "card": { + "description": null, + "archived": false, + "collection_position": null, + "table_id": null, + "result_metadata": [ + { + "display_name": "EVENT_DATE", + "field_ref": [ + "field", + "EVENT_DATE", + { + "base-type": "type/Date" + } + ], + "name": "EVENT_DATE", + "base_type": "type/Date", + "effective_type": "type/Date", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 11, + "nil%": 0 + }, + "type": { + "type/DateTime": { + "earliest": "2024-01-01T00:00:00Z", + "latest": "2024-01-15T00:00:00Z" + } + } + } + }, + { + "display_name": "KNOCKOUT", + "field_ref": [ + "field", + "KNOCKOUT", + { + "base-type": "type/Number" + } + ], + "name": "KNOCKOUT", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 11, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 175, + "q1": 853.75, + "q3": 1116.75, + "max": 1174, + "sd": 296.0767713709648, + "avg": 916.3636363636364 + } + } + } + }, + { + "display_name": "EXPIRY", + "field_ref": [ + "field", + "EXPIRY", + { + "base-type": "type/Number" + } + ], + "name": "EXPIRY", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 10, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 78, + "q1": 295.5, + "q3": 408.3925271309261, + "max": 431, + "sd": 105.10704500218294, + "avg": 336.90909090909093 + } + } + } }, - "type": { - "type/Number": { - "min": 178.0, - "q1": 190.0, - "q3": 213.25, - "max": 223.0, - "sd": 17.131841699011815, - "avg": 200.0 + { + "display_name": "PRODUCT", + "field_ref": [ + "field", + "PRODUCT", + { + "base-type": "type/Number" + } + ], + "name": "PRODUCT", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 9, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 57, + "q1": 163.75, + "q3": 233, + "max": 255, + "sd": 59.31119777763877, + "avg": 195.27272727272728 + } + } + } + }, + { + "display_name": "ISSUER", + "field_ref": [ + "field", + "ISSUER", + { + "base-type": "type/Number" + } + ], + "name": "ISSUER", + "base_type": "type/Number", + "effective_type": "type/Number", + "semantic_type": null, + "fingerprint": { + "global": { + "distinct-count": 10, + "nil%": 0 + }, + "type": { + "type/Number": { + "min": 43, + "q1": 214, + "q3": 292.25, + "max": 304, + "sd": 79.35879397910594, + "avg": 245.72727272727272 + } + } } } - } - }], - "database_id": 2, - "enable_embedding": false, - "collection_id": null, - "query_type": "query", - "name": "Films, Count, Grouped by Rating, Filtered by Release Year, Sorted by [Unknown Field] descending", - "query_average_duration": 25, - "creator_id": 1, - "moderation_reviews": [], - "updated_at": "2021-12-13T17:48:39.999", - "made_public_by_id": null, - "embedding_params": null, - "cache_ttl": null, - "dataset_query": { - "query": { - "source-table": 21, - "breakout": [ - ["field", 131, null] - ], - "aggregation": [ - ["count"] - ], - "order-by": [ - ["desc", ["aggregation", 0]] + ], + "can_write": true, + "database_id": 3, + "enable_embedding": false, + "collection_id": 112, + "query_type": "native", + "name": "Filter popularity", + "query_average_duration": 2830, + "creator_id": 1, + "moderation_reviews": [], + "updated_at": "2024-01-16T13:34:30.128815Z", + "made_public_by_id": null, + "embedding_params": null, + "cache_ttl": null, + "dataset_query": { + "type": "native", + "native": { + "query": "with issuer as\n(\n select event_date, count(*) as issuer_clicks, count(distinct auth_account_id) as issuer\n from TEAMS_PRD.REDACTED.MRT_CURR__MPARTICLE_EVENTS\n where event_name='redacted_search_filter_button_tapped' \n and event_attributes:filter_option::varchar='issuer'\n and event_date>'2023-12-31'\n and platform='Android'\n and dayofweekiso(event_date) NOT IN (6,7)\n and event_attributes:redacted_type::varchar='knock_out_product'\n group by 1\n order by 1 desc\n), expiry as\n(\n select event_date, count(*) as expiry_clicks, count(distinct auth_account_id) as expiry\n from TEAMS_PRD.REDACTED.MRT_CURR__MPARTICLE_EVENTS\n where event_name='redacted_search_filter_button_tapped' \n and event_attributes:filter_option::varchar='expiry'\n and event_date>'2023-12-31'\n and platform='Android'\n and dayofweekiso(event_date) NOT IN (6,7)\n and event_attributes:redacted_type::varchar='knock_out_product'\n group by 1\n order by 1 desc\n), product as\n(\n select event_date, count(*) as product_clicks, count(distinct auth_account_id) as product\n from TEAMS_PRD.REDACTED.MRT_CURR__MPARTICLE_EVENTS\n where event_name='redacted_search_filter_button_tapped' \n and event_attributes:filter_option::varchar='product'\n and event_date>'2023-12-31'\n and platform='Android'\n and dayofweekiso(event_date) NOT IN (6,7)\n and event_attributes:redacted_type::varchar='knock_out_product'\n group by 1\n order by 1 desc\n), knockout as \n(\n select event_date, count(*) as knockout_clicks, count(distinct auth_account_id) as knockout\n from TEAMS_PRD.SCHEMA.MRT_CURR__MPARTICLE_EVENTS\n where event_name='redacted_search_filter_button_tapped' \n and event_attributes:filter_option::varchar='knockout'\n and event_date>'2023-12-31'\n and platform='Android'\n and dayofweekiso(event_date) NOT IN (6,7)\n and event_attributes:redacted_type::varchar='knock_out_product'\n group by 1\n order by 1 desc\n)\nselect k.event_date, knockout, expiry, product, issuer\nfrom knockout k\njoin expiry e on k.event_date=e.event_date\njoin issuer i on k.event_date=i.event_date\njoin product p on k.event_date=p.event_date\nwhere k.event_date Date: Fri, 26 Jan 2024 20:54:06 +0200 Subject: [PATCH 444/792] fix(ingest/glue): Profiling breaks for non-partitioned tables due to absent `Table.PartitionKeys` (#9591) --- metadata-ingestion/src/datahub/ingestion/source/aws/glue.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/aws/glue.py b/metadata-ingestion/src/datahub/ingestion/source/aws/glue.py index 826c18f69fd01..93601533bf8d6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/aws/glue.py +++ b/metadata-ingestion/src/datahub/ingestion/source/aws/glue.py @@ -833,9 +833,8 @@ def get_profile_if_enabled( **{k: v for k, v in kwargs.items() if v} ) - partition_keys = response["Table"]["PartitionKeys"] - # check if this table is partitioned + partition_keys = response["Table"].get("PartitionKeys") if partition_keys: # ingest data profile with partitions # for cross-account ingestion From 051f570c47386540266e088d396feed70784f9d5 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Fri, 26 Jan 2024 14:17:14 -0600 Subject: [PATCH 445/792] fix(search): fix filters for hasX and numValues fields (#9729) --- .../metadata/models/ConfigEntitySpec.java | 12 ++++ .../metadata/models/DefaultEntitySpec.java | 12 ++++ .../linkedin/metadata/models/EntitySpec.java | 45 +++++++++++---- .../elasticsearch/query/ESBrowseDAO.java | 8 +-- .../elasticsearch/query/ESSearchDAO.java | 3 +- .../request/AutocompleteRequestHandler.java | 10 ++-- .../query/request/SearchRequestHandler.java | 13 +++-- .../metadata/search/utils/ESUtils.java | 55 +++++++++---------- .../ElasticSearchTimeseriesAspectService.java | 32 ++++++----- .../elastic/query/ESAggregatedStatsDAO.java | 2 +- .../fixtures/SampleDataFixtureTestBase.java | 54 ++++++++++++++++++ 11 files changed, 175 insertions(+), 71 deletions(-) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java index b235e2adcae11..8bd89071e299d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java @@ -3,10 +3,12 @@ import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.data.schema.TyperefDataSchema; import com.linkedin.metadata.models.annotation.EntityAnnotation; +import com.linkedin.metadata.models.annotation.SearchableAnnotation; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -19,6 +21,7 @@ public class ConfigEntitySpec implements EntitySpec { private final Map _aspectSpecs; private List _searchableFieldSpecs; + private Map> searchableFieldTypeMap; public ConfigEntitySpec( @Nonnull final String entityName, @@ -89,4 +92,13 @@ public List getSearchableFieldSpecs() { return _searchableFieldSpecs; } + + @Override + public Map> getSearchableFieldTypes() { + if (searchableFieldTypeMap == null) { + searchableFieldTypeMap = EntitySpec.super.getSearchableFieldTypes(); + } + + return searchableFieldTypeMap; + } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java index 5db8ca264f69d..2546674f9835c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java @@ -3,10 +3,12 @@ import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.data.schema.TyperefDataSchema; import com.linkedin.metadata.models.annotation.EntityAnnotation; +import com.linkedin.metadata.models.annotation.SearchableAnnotation; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -24,6 +26,7 @@ public class DefaultEntitySpec implements EntitySpec { private final TyperefDataSchema _aspectTyperefSchema; private List _searchableFieldSpecs; + private Map> searchableFieldTypeMap; public DefaultEntitySpec( @Nonnull final Collection aspectSpecs, @@ -102,4 +105,13 @@ public List getSearchableFieldSpecs() { return _searchableFieldSpecs; } + + @Override + public Map> getSearchableFieldTypes() { + if (searchableFieldTypeMap == null) { + searchableFieldTypeMap = EntitySpec.super.getSearchableFieldTypes(); + } + + return searchableFieldTypeMap; + } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java index fac08c7e20646..9a75cc1f751d3 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java @@ -3,7 +3,9 @@ import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.data.schema.TyperefDataSchema; import com.linkedin.metadata.models.annotation.EntityAnnotation; +import com.linkedin.metadata.models.annotation.SearchableAnnotation; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -39,16 +41,39 @@ default List getSearchableFieldSpecs() { .collect(Collectors.toList()); } - default Map> getSearchableFieldSpecMap() { - return getSearchableFieldSpecs().stream() - .collect( - Collectors.toMap( - searchableFieldSpec -> searchableFieldSpec.getSearchableAnnotation().getFieldName(), - searchableFieldSpec -> new HashSet<>(Collections.singleton(searchableFieldSpec)), - (set1, set2) -> { - set1.addAll(set2); - return set1; - })); + default Map> getSearchableFieldTypes() { + // Get additional fields and mint SearchableFieldSpecs for them + Map> fieldSpecMap = new HashMap<>(); + for (SearchableFieldSpec fieldSpec : getSearchableFieldSpecs()) { + SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); + if (searchableAnnotation.getNumValuesFieldName().isPresent()) { + String fieldName = searchableAnnotation.getNumValuesFieldName().get(); + Set fieldSet = new HashSet<>(); + fieldSet.add(SearchableAnnotation.FieldType.COUNT); + fieldSpecMap.put(fieldName, fieldSet); + } + if (searchableAnnotation.getHasValuesFieldName().isPresent()) { + String fieldName = searchableAnnotation.getHasValuesFieldName().get(); + Set fieldSet = new HashSet<>(); + fieldSet.add(SearchableAnnotation.FieldType.BOOLEAN); + fieldSpecMap.put(fieldName, fieldSet); + } + } + fieldSpecMap.putAll( + getSearchableFieldSpecs().stream() + .collect( + Collectors.toMap( + searchableFieldSpec -> + searchableFieldSpec.getSearchableAnnotation().getFieldName(), + searchableFieldSpec -> + new HashSet<>( + Collections.singleton( + searchableFieldSpec.getSearchableAnnotation().getFieldType())), + (set1, set2) -> { + set1.addAll(set2); + return set1; + }))); + return fieldSpecMap; } default List getSearchScoreFieldSpecs() { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index d610ea4b4e028..0a9a9fbbad086 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -19,7 +19,7 @@ import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.models.SearchableFieldSpec; +import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; @@ -557,7 +557,7 @@ private QueryBuilder buildQueryStringV2( queryBuilder.filter(QueryBuilders.rangeQuery(BROWSE_PATH_V2_DEPTH).gt(browseDepthVal)); queryBuilder.filter( - SearchRequestHandler.getFilterQuery(filter, entitySpec.getSearchableFieldSpecMap())); + SearchRequestHandler.getFilterQuery(filter, entitySpec.getSearchableFieldTypes())); return queryBuilder; } @@ -583,9 +583,9 @@ private QueryBuilder buildQueryStringBrowseAcrossEntities( queryBuilder.filter(QueryBuilders.rangeQuery(BROWSE_PATH_V2_DEPTH).gt(browseDepthVal)); - Map> searchableFields = + Map> searchableFields = entitySpecs.stream() - .flatMap(entitySpec -> entitySpec.getSearchableFieldSpecMap().entrySet().stream()) + .flatMap(entitySpec -> entitySpec.getSearchableFieldTypes().entrySet().stream()) .collect( Collectors.toMap( Map.Entry::getKey, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 1ec90ed6f61e2..7de2770626ae3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -78,8 +78,7 @@ public long docCount(@Nonnull String entityName) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); CountRequest countRequest = new CountRequest(indexConvention.getIndexName(entitySpec)) - .query( - SearchRequestHandler.getFilterQuery(null, entitySpec.getSearchableFieldSpecMap())); + .query(SearchRequestHandler.getFilterQuery(null, entitySpec.getSearchableFieldTypes())); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "docCount").time()) { return client.count(countRequest, RequestOptions.DEFAULT).getCount(); } catch (IOException e) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java index 333d9602734d2..3835032247874 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java @@ -41,7 +41,7 @@ public class AutocompleteRequestHandler { private final List _defaultAutocompleteFields; - private final Map> searchableFields; + private final Map> searchableFieldTypes; private static final Map AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); @@ -56,14 +56,16 @@ public AutocompleteRequestHandler(@Nonnull EntitySpec entitySpec) { .map(SearchableAnnotation::getFieldName), Stream.of("urn")) .collect(Collectors.toList()); - searchableFields = + searchableFieldTypes = fieldSpecs.stream() .collect( Collectors.toMap( searchableFieldSpec -> searchableFieldSpec.getSearchableAnnotation().getFieldName(), searchableFieldSpec -> - new HashSet<>(Collections.singleton(searchableFieldSpec)), + new HashSet<>( + Collections.singleton( + searchableFieldSpec.getSearchableAnnotation().getFieldType())), (set1, set2) -> { set1.addAll(set2); return set1; @@ -81,7 +83,7 @@ public SearchRequest getSearchRequest( SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(limit); searchSourceBuilder.query(getQuery(input, field)); - searchSourceBuilder.postFilter(ESUtils.buildFilterQuery(filter, false, searchableFields)); + searchSourceBuilder.postFilter(ESUtils.buildFilterQuery(filter, false, searchableFieldTypes)); searchSourceBuilder.highlighter(getHighlights(field)); searchRequest.source(searchSourceBuilder); return searchRequest; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index e6ee909c80dae..277e15e1334d5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -97,7 +97,7 @@ public class SearchRequestHandler { private final SearchConfiguration _configs; private final SearchQueryBuilder _searchQueryBuilder; private final AggregationQueryBuilder _aggregationQueryBuilder; - private final Map> searchableFields; + private final Map> searchableFieldTypes; private SearchRequestHandler( @Nonnull EntitySpec entitySpec, @@ -122,9 +122,9 @@ private SearchRequestHandler( _searchQueryBuilder = new SearchQueryBuilder(configs, customSearchConfiguration); _aggregationQueryBuilder = new AggregationQueryBuilder(configs, annotations); _configs = configs; - searchableFields = + searchableFieldTypes = _entitySpecs.stream() - .flatMap(entitySpec -> entitySpec.getSearchableFieldSpecMap().entrySet().stream()) + .flatMap(entitySpec -> entitySpec.getSearchableFieldTypes().entrySet().stream()) .collect( Collectors.toMap( Map.Entry::getKey, @@ -182,12 +182,13 @@ private BinaryOperator mapMerger() { } public BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { - return getFilterQuery(filter, searchableFields); + return getFilterQuery(filter, searchableFieldTypes); } public static BoolQueryBuilder getFilterQuery( - @Nullable Filter filter, Map> searchableFields) { - BoolQueryBuilder filterQuery = ESUtils.buildFilterQuery(filter, false, searchableFields); + @Nullable Filter filter, + Map> searchableFieldTypes) { + BoolQueryBuilder filterQuery = ESUtils.buildFilterQuery(filter, false, searchableFieldTypes); return filterSoftDeletedByDefault(filter, filterQuery); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 77a67f100895c..4d74bfb66b8db 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -132,7 +132,7 @@ private ESUtils() {} public static BoolQueryBuilder buildFilterQuery( @Nullable Filter filter, boolean isTimeseries, - final Map> searchableFields) { + final Map> searchableFieldTypes) { BoolQueryBuilder finalQueryBuilder = QueryBuilders.boolQuery(); if (filter == null) { return finalQueryBuilder; @@ -144,7 +144,7 @@ public static BoolQueryBuilder buildFilterQuery( .forEach( or -> finalQueryBuilder.should( - ESUtils.buildConjunctiveFilterQuery(or, isTimeseries, searchableFields))); + ESUtils.buildConjunctiveFilterQuery(or, isTimeseries, searchableFieldTypes))); } else if (filter.getCriteria() != null) { // Otherwise, build boolean query from the deprecated "criteria" field. log.warn("Received query Filter with a deprecated field 'criteria'. Use 'or' instead."); @@ -157,7 +157,7 @@ public static BoolQueryBuilder buildFilterQuery( || criterion.hasValues() || criterion.getCondition() == Condition.IS_NULL) { andQueryBuilder.must( - getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFields)); + getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFieldTypes)); } }); finalQueryBuilder.should(andQueryBuilder); @@ -169,7 +169,7 @@ public static BoolQueryBuilder buildFilterQuery( public static BoolQueryBuilder buildConjunctiveFilterQuery( @Nonnull ConjunctiveCriterion conjunctiveCriterion, boolean isTimeseries, - Map> searchableFields) { + Map> searchableFieldTypes) { final BoolQueryBuilder andQueryBuilder = new BoolQueryBuilder(); conjunctiveCriterion .getAnd() @@ -181,10 +181,10 @@ public static BoolQueryBuilder buildConjunctiveFilterQuery( if (!criterion.isNegated()) { // `filter` instead of `must` (enables caching and bypasses scoring) andQueryBuilder.filter( - getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFields)); + getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFieldTypes)); } else { andQueryBuilder.mustNot( - getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFields)); + getQueryBuilderFromCriterion(criterion, isTimeseries, searchableFieldTypes)); } } }); @@ -222,7 +222,7 @@ public static BoolQueryBuilder buildConjunctiveFilterQuery( public static QueryBuilder getQueryBuilderFromCriterion( @Nonnull final Criterion criterion, boolean isTimeseries, - final Map> searchableFields) { + final Map> searchableFieldTypes) { final String fieldName = toFacetField(criterion.getField()); if (fieldName.startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD)) { criterion.setField(fieldName); @@ -241,10 +241,11 @@ public static QueryBuilder getQueryBuilderFromCriterion( if (maybeFieldToExpand.isPresent()) { return getQueryBuilderFromCriterionForFieldToExpand( - maybeFieldToExpand.get(), criterion, isTimeseries, searchableFields); + maybeFieldToExpand.get(), criterion, isTimeseries, searchableFieldTypes); } - return getQueryBuilderFromCriterionForSingleField(criterion, isTimeseries, searchableFields); + return getQueryBuilderFromCriterionForSingleField( + criterion, isTimeseries, searchableFieldTypes); } public static String getElasticTypeForFieldType(SearchableAnnotation.FieldType fieldType) { @@ -446,7 +447,7 @@ private static QueryBuilder getQueryBuilderFromCriterionForFieldToExpand( @Nonnull final List fields, @Nonnull final Criterion criterion, final boolean isTimeseries, - final Map> searchableFields) { + final Map> searchableFieldTypes) { final BoolQueryBuilder orQueryBuilder = new BoolQueryBuilder(); for (String field : fields) { Criterion criterionToQuery = new Criterion(); @@ -461,7 +462,7 @@ private static QueryBuilder getQueryBuilderFromCriterionForFieldToExpand( criterionToQuery.setField(toKeywordField(field, isTimeseries)); orQueryBuilder.should( getQueryBuilderFromCriterionForSingleField( - criterionToQuery, isTimeseries, searchableFields)); + criterionToQuery, isTimeseries, searchableFieldTypes)); } return orQueryBuilder; } @@ -470,7 +471,7 @@ private static QueryBuilder getQueryBuilderFromCriterionForFieldToExpand( private static QueryBuilder getQueryBuilderFromCriterionForSingleField( @Nonnull Criterion criterion, boolean isTimeseries, - final Map> searchableFields) { + final Map> searchableFieldTypes) { final Condition condition = criterion.getCondition(); final String fieldName = toFacetField(criterion.getField()); @@ -485,10 +486,10 @@ private static QueryBuilder getQueryBuilderFromCriterionForSingleField( } else if (criterion.hasValues() || criterion.hasValue()) { if (condition == Condition.EQUAL) { return buildEqualsConditionFromCriterion( - fieldName, criterion, isTimeseries, searchableFields); + fieldName, criterion, isTimeseries, searchableFieldTypes); } else if (RANGE_QUERY_CONDITIONS.contains(condition)) { return buildRangeQueryFromCriterion( - criterion, fieldName, searchableFields, condition, isTimeseries); + criterion, fieldName, searchableFieldTypes, condition, isTimeseries); } else if (condition == Condition.CONTAIN) { return QueryBuilders.wildcardQuery( toKeywordField(criterion.getField(), isTimeseries), @@ -513,14 +514,14 @@ private static QueryBuilder buildEqualsConditionFromCriterion( @Nonnull final String fieldName, @Nonnull final Criterion criterion, final boolean isTimeseries, - final Map> searchableFields) { + final Map> searchableFieldTypes) { /* * If the newer 'values' field of Criterion.pdl is set, then we * handle using the following code to allow multi-match. */ if (!criterion.getValues().isEmpty()) { return buildEqualsConditionFromCriterionWithValues( - fieldName, criterion, isTimeseries, searchableFields); + fieldName, criterion, isTimeseries, searchableFieldTypes); } /* * Otherwise, we are likely using the deprecated 'value' field. @@ -537,8 +538,8 @@ private static QueryBuilder buildEqualsConditionFromCriterionWithValues( @Nonnull final String fieldName, @Nonnull final Criterion criterion, final boolean isTimeseries, - final Map> searchableFields) { - Set fieldTypes = getFieldTypes(searchableFields, fieldName); + final Map> searchableFieldTypes) { + Set fieldTypes = getFieldTypes(searchableFieldTypes, fieldName); if (fieldTypes.size() > 1) { log.warn( "Multiple field types for field name {}, determining best fit for set: {}", @@ -563,31 +564,27 @@ private static QueryBuilder buildEqualsConditionFromCriterionWithValues( } private static Set getFieldTypes( - Map> searchableFields, String fieldName) { - Set fieldSpecs = + Map> searchableFields, String fieldName) { + Set fieldTypes = searchableFields.getOrDefault(fieldName, Collections.emptySet()); - Set fieldTypes = - fieldSpecs.stream() - .map(SearchableFieldSpec::getSearchableAnnotation) - .map(SearchableAnnotation::getFieldType) - .map(ESUtils::getElasticTypeForFieldType) - .collect(Collectors.toSet()); + Set finalFieldTypes = + fieldTypes.stream().map(ESUtils::getElasticTypeForFieldType).collect(Collectors.toSet()); if (fieldTypes.size() > 1) { log.warn( "Multiple field types for field name {}, determining best fit for set: {}", fieldName, fieldTypes); } - return fieldTypes; + return finalFieldTypes; } private static RangeQueryBuilder buildRangeQueryFromCriterion( Criterion criterion, String fieldName, - Map> searchableFields, + Map> searchableFieldTypes, Condition condition, boolean isTimeseries) { - Set fieldTypes = getFieldTypes(searchableFields, fieldName); + Set fieldTypes = getFieldTypes(searchableFieldTypes, fieldName); // Determine criterion value, range query only accepts single value so take first value in // values if multiple diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index 6cf8e92d61929..cb06dc75c70bc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -14,7 +14,7 @@ import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.models.SearchableFieldSpec; +import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.Criterion; @@ -296,7 +296,7 @@ public long countByFilter( ESUtils.buildFilterQuery( filter, true, - _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap())); + _entityRegistry.getEntitySpec(entityName).getSearchableFieldTypes())); CountRequest countRequest = new CountRequest(); countRequest.query(filterQueryBuilder); countRequest.indices(indexName); @@ -319,10 +319,11 @@ public List getAspectValues( @Nullable final Integer limit, @Nullable final Filter filter, @Nullable final SortCriterion sort) { - Map> searchableFields = - _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap(); + Map> searchableFieldTypes = + _entityRegistry.getEntitySpec(entityName).getSearchableFieldTypes(); final BoolQueryBuilder filterQueryBuilder = - QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true, searchableFields)); + QueryBuilders.boolQuery() + .must(ESUtils.buildFilterQuery(filter, true, searchableFieldTypes)); filterQueryBuilder.must(QueryBuilders.matchQuery("urn", urn.toString())); // NOTE: We are interested only in the un-exploded rows as only they carry the `event` payload. filterQueryBuilder.mustNot(QueryBuilders.termQuery(MappingsBuilder.IS_EXPLODED_FIELD, true)); @@ -333,7 +334,7 @@ public List getAspectValues( .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) .setValue(startTimeMillis.toString()); filterQueryBuilder.must( - ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true, searchableFields)); + ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true, searchableFieldTypes)); } if (endTimeMillis != null) { Criterion endTimeCriterion = @@ -342,7 +343,7 @@ public List getAspectValues( .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) .setValue(endTimeMillis.toString()); filterQueryBuilder.must( - ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true, searchableFields)); + ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true, searchableFieldTypes)); } final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(filterQueryBuilder); @@ -412,7 +413,7 @@ public DeleteAspectValuesResult deleteAspectValues( final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery( - filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap()); + filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldTypes()); final Optional result = _bulkProcessor @@ -440,7 +441,7 @@ public String deleteAspectValuesAsync( final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery( - filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap()); + filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldTypes()); final int batchSize = options.getBatchSize() > 0 ? options.getBatchSize() : DEFAULT_LIMIT; TimeValue timeout = options.getTimeoutSeconds() > 0 @@ -466,7 +467,7 @@ public String reindexAsync( final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery( - filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap()); + filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldTypes()); try { return this.reindexAsync(indexName, filterQueryBuilder, options); } catch (Exception e) { @@ -515,10 +516,11 @@ public TimeseriesScrollResult scrollAspects( @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - Map> searchableFields = - _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap(); + Map> searchableFieldTypes = + _entityRegistry.getEntitySpec(entityName).getSearchableFieldTypes(); final BoolQueryBuilder filterQueryBuilder = - QueryBuilders.boolQuery().filter(ESUtils.buildFilterQuery(filter, true, searchableFields)); + QueryBuilders.boolQuery() + .filter(ESUtils.buildFilterQuery(filter, true, searchableFieldTypes)); if (startTimeMillis != null) { Criterion startTimeCriterion = @@ -527,7 +529,7 @@ public TimeseriesScrollResult scrollAspects( .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) .setValue(startTimeMillis.toString()); filterQueryBuilder.filter( - ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true, searchableFields)); + ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true, searchableFieldTypes)); } if (endTimeMillis != null) { Criterion endTimeCriterion = @@ -536,7 +538,7 @@ public TimeseriesScrollResult scrollAspects( .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) .setValue(endTimeMillis.toString()); filterQueryBuilder.filter( - ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true, searchableFields)); + ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true, searchableFieldTypes)); } SearchResponse response = diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java index f8b2cd8552357..580888e54b700 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java @@ -379,7 +379,7 @@ public GenericTable getAggregatedStats( // Setup the filter query builder using the input filter provided. final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery( - filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldSpecMap()); + filter, true, _entityRegistry.getEntitySpec(entityName).getSearchableFieldTypes()); AspectSpec aspectSpec = getTimeseriesAspectSpec(entityName, aspectName); // Build and attach the grouping aggregations diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java index a1af2325ee0ed..4742115b16e1b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java @@ -14,8 +14,10 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.types.chart.ChartType; import com.linkedin.datahub.graphql.types.container.ContainerType; @@ -45,6 +47,7 @@ import com.linkedin.r2.RemoteInvocationException; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -64,6 +67,7 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortBuilder; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.AssertJUnit; import org.testng.annotations.Test; public abstract class SampleDataFixtureTestBase extends AbstractTestNGSpringContextTests { @@ -1936,6 +1940,56 @@ public void testSortOrdering() { String.format("%s - Expected search results to have at least two results", query)); } + @Test + public void testFilterOnHasValuesField() { + AssertJUnit.assertNotNull(getSearchService()); + Filter filter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasOwners") + .setValue("") + .setValues(new StringArray(ImmutableList.of("true")))))))); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), + "*", + SEARCHABLE_ENTITIES, + filter, + Collections.singletonList(DATASET_ENTITY_NAME)); + assertEquals(searchResult.getEntities().size(), 8); + } + + @Test + public void testFilterOnNumValuesField() { + AssertJUnit.assertNotNull(getSearchService()); + Filter filter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("numInputDatasets") + .setValue("") + .setValues(new StringArray(ImmutableList.of("1")))))))); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), + "*", + SEARCHABLE_ENTITIES, + filter, + Collections.singletonList(DATA_JOB_ENTITY_NAME)); + assertEquals(searchResult.getEntities().size(), 4); + } + private Stream getTokens(AnalyzeRequest request) throws IOException { return getSearchClient() From 388b3ec0ac10f7e3d142c9bcbf9c89be6ea92853 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 26 Jan 2024 14:01:48 -0800 Subject: [PATCH 446/792] fix(ingest/airflow): fix plugin support for airflow 2.5.0 (#9719) --- .../src/datahub_airflow_plugin/_datahub_listener_module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py index e16563400e397..0e1ef69ebf18c 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py @@ -29,6 +29,6 @@ def on_task_instance_failed(previous_state, task_instance, session): if hasattr(_listener, "on_dag_run_running"): @hookimpl - def on_dag_run_running(dag_run, session): + def on_dag_run_running(dag_run, msg): assert _listener - _listener.on_dag_run_running(dag_run, session) + _listener.on_dag_run_running(dag_run, msg) From 5adb799f137a00c315144715786179ef4a6b2405 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 26 Jan 2024 14:02:52 -0800 Subject: [PATCH 447/792] fix(cli): fix example data contract yaml + update airflow codecov (#9707) --- .github/workflows/airflow-plugin.yml | 4 +- .../airflow-plugin/build.gradle | 2 +- .../airflow-plugin/tests/conftest.py | 11 +++++ .../pet_of_the_week.dhub.dc.yaml | 42 +++++++++++-------- 4 files changed, 39 insertions(+), 20 deletions(-) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index 7ae7b87b0f5ce..c5c75de4f7aee 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -87,8 +87,8 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} directory: . fail_ci_if_error: false - flags: airflow-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} - name: pytest-airflow + flags: airflow,airflow-${{ matrix.extra_pip_extras }} + name: pytest-airflow-${{ matrix.python-version }}-${{ matrix.extra_pip_requirements }} verbose: true event-file: diff --git a/metadata-ingestion-modules/airflow-plugin/build.gradle b/metadata-ingestion-modules/airflow-plugin/build.gradle index dacf12dc020df..9555f92c8831d 100644 --- a/metadata-ingestion-modules/airflow-plugin/build.gradle +++ b/metadata-ingestion-modules/airflow-plugin/build.gradle @@ -108,7 +108,7 @@ task testQuick(type: Exec, dependsOn: installDevTest) { inputs.files(project.fileTree(dir: "src/", include: "**/*.py")) inputs.files(project.fileTree(dir: "tests/")) commandLine 'bash', '-x', '-c', - "source ${venv_name}/bin/activate && pytest -vv --continue-on-collection-errors --junit-xml=junit.quick.xml" + "source ${venv_name}/bin/activate && pytest --cov-config=setup.cfg --cov-report xml:coverage_quick.xml -vv --continue-on-collection-errors --junit-xml=junit.quick.xml" } diff --git a/metadata-ingestion-modules/airflow-plugin/tests/conftest.py b/metadata-ingestion-modules/airflow-plugin/tests/conftest.py index d2c45e723f1b0..994816ff037c8 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/conftest.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/conftest.py @@ -1,6 +1,17 @@ +import pathlib +import site + + def pytest_addoption(parser): parser.addoption( "--update-golden-files", action="store_true", default=False, ) + + +# See https://coverage.readthedocs.io/en/latest/subprocess.html#configuring-python-for-sub-process-measurement +coverage_startup_code = "import coverage; coverage.process_startup()" +site_packages_dir = pathlib.Path(site.getsitepackages()[0]) +pth_file_path = site_packages_dir / "datahub_coverage_startup.pth" +pth_file_path.write_text(coverage_startup_code) diff --git a/metadata-ingestion/examples/data_contract/pet_of_the_week.dhub.dc.yaml b/metadata-ingestion/examples/data_contract/pet_of_the_week.dhub.dc.yaml index c73904403f678..bd081172b2a27 100644 --- a/metadata-ingestion/examples/data_contract/pet_of_the_week.dhub.dc.yaml +++ b/metadata-ingestion/examples/data_contract/pet_of_the_week.dhub.dc.yaml @@ -1,21 +1,29 @@ -# id: pet_details_dc # Optional: This is the unique identifier for the data contract -display_name: Data Contract for SampleHiveDataset +version: 1 # datahub yaml format version + +# Note: this data contract yaml format is still in development, and will likely +# change in backwards-incompatible ways in the future. + entity: urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD) freshness: - time: 0700 - granularity: DAILY + type: cron + cron: 0 7 * * * # 7am daily + timezone: America/Los_Angeles schema: - properties: - field_foo: - type: string - native_type: VARCHAR(100) - field_bar: - type: boolean - required: - - field_bar + type: json-schema + json-schema: + properties: + field_foo: + type: string + native_type: VARCHAR(100) + field_bar: + type: boolean + required: + - field_bar data_quality: - - type: column_range - config: - column: field_foo - min: 0 - max: 100 + - type: unique + column: field_foo + - type: custom_sql + sql: SELECT COUNT(*) FROM SampleHiveDataset + operator: + type: greater_than + value: 100 From 2bb4b73f98ef46446e8025cd3657289bb24ff0df Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 26 Jan 2024 14:03:16 -0800 Subject: [PATCH 448/792] fix(ingest/metabase): add missing sql parser dep (#9725) --- metadata-ingestion/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 1fb570d76120e..c1a5da5826ead 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -340,7 +340,7 @@ "ldap": {"python-ldap>=2.4"}, "looker": looker_common, "lookml": looker_common, - "metabase": {"requests"} | sqllineage_lib, + "metabase": {"requests"} | sqlglot_lib, "mlflow": {"mlflow-skinny>=2.3.0"}, "mode": {"requests", "tenacity>=8.0.1"} | sqllineage_lib, "mongodb": {"pymongo[srv]>=3.11", "packaging"}, From dc16c73937dcb4a287653090faf3c32807257872 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Mon, 29 Jan 2024 15:26:14 +0530 Subject: [PATCH 449/792] feat(ui): include parent term groups, domains in glossary, domain dropdown (#9715) --- .../profile/AddRelatedTermsModal.tsx | 13 ++++- .../EntityDropdown/NodeParentSelect.tsx | 21 +++++-- .../profile/sidebar/Domain/SetDomainModal.tsx | 13 ++++- .../glossary/GloassarySearchResultItem.tsx | 56 +++++++++++++++++++ .../src/app/glossary/GlossarySearch.tsx | 55 ++++++++---------- datahub-web-react/src/app/glossary/utils.ts | 8 ++- .../src/app/shared/DomainLabel.tsx | 2 +- .../src/app/shared/tags/AddTagsTermsModal.tsx | 13 ++++- 8 files changed, 138 insertions(+), 43 deletions(-) create mode 100644 datahub-web-react/src/app/glossary/GloassarySearchResultItem.tsx diff --git a/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx b/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx index 5b303f75e2985..f97f3c327676b 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx @@ -10,11 +10,19 @@ import { BrowserWrapper } from '../../../shared/tags/AddTagsTermsModal'; import TermLabel from '../../../shared/TermLabel'; import { useEntityRegistry } from '../../../useEntityRegistry'; import { useEntityData, useRefetch } from '../../shared/EntityContext'; +import ParentEntities from '../../../search/filters/ParentEntities'; +import { getParentEntities } from '../../../search/filters/utils'; const StyledSelect = styled(Select)` width: 480px; `; +const SearchResultContainer = styled.div` + display: flex; + flex-direction: column; + justify-content: center; +`; + interface Props { onClose: () => void; relationshipType: TermRelationshipType; @@ -68,7 +76,10 @@ function AddRelatedTermsModal(props: Props) { return ( - + + + + ); }); diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx index e7f5827e33dcc..7227354a46569 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx @@ -1,5 +1,6 @@ import React from 'react'; import { Select } from 'antd'; +import styled from 'styled-components'; import { EntityType, GlossaryNode, SearchResult } from '../../../../types.generated'; import { useEntityRegistry } from '../../../useEntityRegistry'; import { useEntityData } from '../EntityContext'; @@ -7,6 +8,14 @@ import ClickOutside from '../../../shared/ClickOutside'; import GlossaryBrowser from '../../../glossary/GlossaryBrowser/GlossaryBrowser'; import { BrowserWrapper } from '../../../shared/tags/AddTagsTermsModal'; import useParentSelector from './useParentSelector'; +import ParentEntities from '../../../search/filters/ParentEntities'; +import { getParentGlossary } from '../../../glossary/utils'; + +const SearchResultContainer = styled.div` + display: flex; + flex-direction: column; + justify-content: center; +`; // filter out entity itself and its children export function filterResultsForMove(entity: GlossaryNode, entityUrn: string) { @@ -46,10 +55,9 @@ function NodeParentSelect(props: Props) { setSelectedParentUrn, }); - let nodeSearchResults: SearchResult[] = []; - if (isMoving) { - nodeSearchResults = searchResults.filter((r) => filterResultsForMove(r.entity as GlossaryNode, entityDataUrn)); - } + const nodeSearchResults: SearchResult[] = searchResults.filter((r) => + filterResultsForMove(r.entity as GlossaryNode, entityDataUrn), + ); const isShowingGlossaryBrowser = !searchQuery && isFocusedOnInput; const shouldHideSelf = isMoving && entityType === EntityType.GlossaryNode; @@ -70,7 +78,10 @@ function NodeParentSelect(props: Props) { > {nodeSearchResults?.map((result) => ( - {entityRegistry.getDisplayName(result.entity.type, result.entity)} + + + {entityRegistry.getDisplayName(result.entity.type, result.entity)} + ))} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/Domain/SetDomainModal.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/Domain/SetDomainModal.tsx index 9b512d2d679e9..ab63553c6376b 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/Domain/SetDomainModal.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/Domain/SetDomainModal.tsx @@ -16,6 +16,8 @@ import DomainNavigator from '../../../../../../domain/nestedDomains/domainNaviga import ClickOutside from '../../../../../../shared/ClickOutside'; import { ANTD_GRAY } from '../../../../constants'; import { getModalDomContainer } from '../../../../../../../utils/focus'; +import ParentEntities from '../../../../../../search/filters/ParentEntities'; +import { getParentDomains } from '../../../../../../domain/utils'; type Props = { urns: string[]; @@ -44,6 +46,12 @@ const LoadingWrapper = styled.div` } `; +const SearchResultContainer = styled.div` + display: flex; + flex-direction: column; + justify-content: center; +`; + export const SetDomainModal = ({ urns, onCloseModal, refetch, defaultValue, onOkOverride, titleOverride }: Props) => { const entityRegistry = useEntityRegistry(); const [isFocusedOnInput, setIsFocusedOnInput] = useState(false); @@ -88,7 +96,10 @@ export const SetDomainModal = ({ urns, onCloseModal, refetch, defaultValue, onOk const displayName = entityRegistry.getDisplayName(entity.type, entity); return ( - + + + + ); }; diff --git a/datahub-web-react/src/app/glossary/GloassarySearchResultItem.tsx b/datahub-web-react/src/app/glossary/GloassarySearchResultItem.tsx new file mode 100644 index 0000000000000..03a384aab4bd5 --- /dev/null +++ b/datahub-web-react/src/app/glossary/GloassarySearchResultItem.tsx @@ -0,0 +1,56 @@ +// Create a new component called SearchResultItem.js +import React from 'react'; +import { Link } from 'react-router-dom'; +import Highlight from 'react-highlighter'; +import styled from 'styled-components/macro'; +import { Entity } from '../../types.generated'; +import { IconStyleType } from '../entity/Entity'; +import { ANTD_GRAY } from '../entity/shared/constants'; +import ParentEntities from '../search/filters/ParentEntities'; +import { getParentGlossary } from './utils'; +import EntityRegistry from '../entity/EntityRegistry'; + +type Props = { + entity: Entity; + entityRegistry: EntityRegistry; + query: string; + onResultClick: () => void; +}; + +const SearchResult = styled(Link)` + color: #262626; + display: flex; + align-items: center; + gap: 8px; + height: 100%; + padding: 6px 8px; + width: 100%; + &:hover { + background-color: ${ANTD_GRAY[3]}; + color: #262626; + } +`; + +const IconWrapper = styled.span``; + +const highlightMatchStyle = { + fontWeight: 'bold', + background: 'none', + padding: 0, +}; + +function GlossarySearchResultItem({ entity, entityRegistry, query, onResultClick }: Props) { + return ( + + {entityRegistry.getIcon(entity.type, 12, IconStyleType.TAB_VIEW)} +
    + + + {entityRegistry.getDisplayName(entity.type, entity)} + +
    +
    + ); +} + +export default GlossarySearchResultItem; diff --git a/datahub-web-react/src/app/glossary/GlossarySearch.tsx b/datahub-web-react/src/app/glossary/GlossarySearch.tsx index 75cd3b10d581e..321c218c38fe3 100644 --- a/datahub-web-react/src/app/glossary/GlossarySearch.tsx +++ b/datahub-web-react/src/app/glossary/GlossarySearch.tsx @@ -1,13 +1,12 @@ import React, { useState } from 'react'; -import { Link } from 'react-router-dom'; import styled from 'styled-components/macro'; import { useGetSearchResultsForMultipleQuery } from '../../graphql/search.generated'; import { EntityType } from '../../types.generated'; -import { IconStyleType } from '../entity/Entity'; import { ANTD_GRAY } from '../entity/shared/constants'; import { SearchBar } from '../search/SearchBar'; import ClickOutside from '../shared/ClickOutside'; import { useEntityRegistry } from '../useEntityRegistry'; +import GloassarySearchResultItem from './GloassarySearchResultItem'; const GlossarySearchWrapper = styled.div` position: relative; @@ -28,20 +27,10 @@ const ResultsWrapper = styled.div` top: 45px; `; -const SearchResult = styled(Link)` - color: #262626; - display: inline-block; - height: 100%; - padding: 6px 8px; - width: 100%; - &:hover { - background-color: ${ANTD_GRAY[3]}; - color: #262626; - } -`; - -const IconWrapper = styled.span` - margin-right: 8px; +const TermNodeName = styled.span` + margin-top: 12px; + color: ${ANTD_GRAY[8]}; + font-weight: bold; `; function GlossarySearch() { @@ -63,6 +52,21 @@ function GlossarySearch() { const searchResults = data?.searchAcrossEntities?.searchResults; + const renderSearchResults = () => ( + + Glossary Terms + {searchResults?.map((result) => ( + setIsSearchBarFocused(false)} + /> + ))} + + ); + return ( setIsSearchBarFocused(false)}> @@ -84,23 +88,8 @@ function GlossarySearch() { entityRegistry={entityRegistry} onFocus={() => setIsSearchBarFocused(true)} /> - {isSearchBarFocused && searchResults && !!searchResults.length && ( - - {searchResults.map((result) => { - return ( - setIsSearchBarFocused(false)} - > - - {entityRegistry.getIcon(result.entity.type, 12, IconStyleType.ACCENT)} - - {entityRegistry.getDisplayName(result.entity.type, result.entity)} - - ); - })} - - )} + {isSearchBarFocused && searchResults && !!searchResults.length && renderSearchResults()} + ); diff --git a/datahub-web-react/src/app/glossary/utils.ts b/datahub-web-react/src/app/glossary/utils.ts index 60f71d7b2f9ef..4cfbb06b8a4f3 100644 --- a/datahub-web-react/src/app/glossary/utils.ts +++ b/datahub-web-react/src/app/glossary/utils.ts @@ -1,4 +1,5 @@ -import { EntityType } from '../../types.generated'; +import { Entity, EntityType } from '../../types.generated'; +import EntityRegistry from '../entity/EntityRegistry'; import { GenericEntityProperties } from '../entity/shared/types'; export const ROOT_NODES = 'rootNodes'; @@ -25,3 +26,8 @@ export function updateGlossarySidebar( ) { setUrnsToUpdate([...urnsToUpdate, ...parentNodesToUpdate]); } + +export function getParentGlossary(node: T, entityRegistry: EntityRegistry) { + const props = entityRegistry.getGenericEntityProperties(EntityType.GlossaryNode, node); + return props?.parentNodes?.nodes ?? []; +} diff --git a/datahub-web-react/src/app/shared/DomainLabel.tsx b/datahub-web-react/src/app/shared/DomainLabel.tsx index 40208026d4369..f71975b23e517 100644 --- a/datahub-web-react/src/app/shared/DomainLabel.tsx +++ b/datahub-web-react/src/app/shared/DomainLabel.tsx @@ -5,7 +5,7 @@ const DomainContainerWrapper = styled.div` display: flex; justify-content: space-between; align-items: center; - padding: 12px; + padding: 2px; `; const DomainContentWrapper = styled.div` diff --git a/datahub-web-react/src/app/shared/tags/AddTagsTermsModal.tsx b/datahub-web-react/src/app/shared/tags/AddTagsTermsModal.tsx index d486ee02dae3e..73e99c319441d 100644 --- a/datahub-web-react/src/app/shared/tags/AddTagsTermsModal.tsx +++ b/datahub-web-react/src/app/shared/tags/AddTagsTermsModal.tsx @@ -20,6 +20,8 @@ import { FORBIDDEN_URN_CHARS_REGEX, handleBatchError } from '../../entity/shared import { TagTermLabel } from './TagTermLabel'; import { ENTER_KEY_CODE } from '../constants'; import { getModalDomContainer } from '../../../utils/focus'; +import ParentEntities from '../../search/filters/ParentEntities'; +import { getParentEntities } from '../../search/filters/utils'; export enum OperationType { ADD, @@ -69,6 +71,12 @@ export const BrowserWrapper = styled.div<{ isHidden: boolean; width?: string; ma `} `; +const SearchResultContainer = styled.div` + display: flex; + flex-direction: column; + justify-content: center; +`; + const CREATE_TAG_VALUE = '____reserved____.createTagValue'; const isValidTagName = (tagName: string) => { @@ -139,7 +147,10 @@ export default function EditTagTermsModal({ const tagOrTermComponent = ; return ( - {tagOrTermComponent} + + + {tagOrTermComponent} + ); }; From 90c88082b11cdfb6252eaebf11737887a38a0ee3 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Mon, 29 Jan 2024 14:14:34 +0100 Subject: [PATCH 450/792] fix(ingest/s3): Not sorting schema fields to keep original order (#9349) --- .../source/data_lake_common/path_spec.py | 20 +++++++++++++++---- .../src/datahub/ingestion/source/s3/config.py | 5 +++++ .../src/datahub/ingestion/source/s3/source.py | 3 ++- .../ingestion/source/schema_inference/json.py | 2 +- .../unit/data_lake/test_schema_inference.py | 16 +++++---------- 5 files changed, 29 insertions(+), 17 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py b/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py index 05b1b6b7cc040..a4b3779b73803 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py +++ b/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py @@ -63,6 +63,11 @@ class Config: description="Not listing all the files but only taking a handful amount of sample file to infer the schema. File count and file size calculation will be disabled. This can affect performance significantly if enabled", ) + allow_double_stars: bool = Field( + default=False, + description="Allow double stars in the include path. This can affect performance significantly if enabled", + ) + def allowed(self, path: str) -> bool: logger.debug(f"Checking file to inclusion: {path}") if not pathlib.PurePath(path).globmatch( @@ -126,11 +131,18 @@ def get_parsable_include(cls, include: str) -> str: def get_named_vars(self, path: str) -> Union[None, parse.Result, parse.Match]: return self.compiled_include.parse(path) - @pydantic.validator("include") - def validate_no_double_stars(cls, v: str) -> str: - if "**" in v: + @pydantic.root_validator() + def validate_no_double_stars(cls, values: Dict) -> Dict: + if "include" not in values: + return values + + if ( + values.get("include") + and "**" in values["include"] + and not values.get("allow_double_stars") + ): raise ValueError("path_spec.include cannot contain '**'") - return v + return values @pydantic.validator("file_types", always=True) def validate_file_types(cls, v: Optional[List[str]]) -> List[str]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/s3/config.py b/metadata-ingestion/src/datahub/ingestion/source/s3/config.py index f752a33b42d9c..55e340e2850d5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/s3/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/s3/config.py @@ -93,6 +93,11 @@ class DataLakeSourceConfig( "path_spec", "path_specs", lambda path_spec: [path_spec] ) + sort_schema_fields: bool = Field( + default=False, + description="Whether to sort schema fields by fieldPath when inferring schemas.", + ) + def is_profiling_enabled(self) -> bool: return self.profiling.enabled and is_profiling_enabled( self.profiling.operation_config diff --git a/metadata-ingestion/src/datahub/ingestion/source/s3/source.py b/metadata-ingestion/src/datahub/ingestion/source/s3/source.py index 94c571eabad11..41fc5782352c9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/s3/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/s3/source.py @@ -458,7 +458,8 @@ def get_fields(self, table_data: TableData, path_spec: PathSpec) -> List: ) file.close() logger.debug(f"Extracted fields in schema: {fields}") - fields = sorted(fields, key=lambda f: f.fieldPath) + if self.source_config.sort_schema_fields: + fields = sorted(fields, key=lambda f: f.fieldPath) if self.source_config.add_partition_columns_to_schema: self.add_partition_columns_to_schema( diff --git a/metadata-ingestion/src/datahub/ingestion/source/schema_inference/json.py b/metadata-ingestion/src/datahub/ingestion/source/schema_inference/json.py index c53c64be4cba8..251d136fe92ee 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/schema_inference/json.py +++ b/metadata-ingestion/src/datahub/ingestion/source/schema_inference/json.py @@ -48,7 +48,7 @@ def infer_schema(self, file: IO[bytes]) -> List[SchemaField]: schema = construct_schema(datastore, delimiter=".") fields: List[SchemaField] = [] - for schema_field in sorted(schema.values(), key=lambda x: x["delimited_name"]): + for schema_field in schema.values(): mapped_type = _field_type_mapping.get(schema_field["type"], NullTypeClass) native_type = schema_field["type"] diff --git a/metadata-ingestion/tests/unit/data_lake/test_schema_inference.py b/metadata-ingestion/tests/unit/data_lake/test_schema_inference.py index 4a69deb572fbd..de88deec9b9cb 100644 --- a/metadata-ingestion/tests/unit/data_lake/test_schema_inference.py +++ b/metadata-ingestion/tests/unit/data_lake/test_schema_inference.py @@ -18,23 +18,23 @@ from tests.unit.test_schema_util import assert_field_paths_match expected_field_paths = [ - "boolean_field", "integer_field", + "boolean_field", "string_field", ] expected_field_paths_avro = [ - "[version=2.0].[type=test].[type=boolean].boolean_field", "[version=2.0].[type=test].[type=int].integer_field", + "[version=2.0].[type=test].[type=boolean].boolean_field", "[version=2.0].[type=test].[type=string].string_field", ] -expected_field_types = [BooleanTypeClass, NumberTypeClass, StringTypeClass] +expected_field_types = [NumberTypeClass, BooleanTypeClass, StringTypeClass] test_table = pd.DataFrame( { - "boolean_field": [True, False, True], "integer_field": [1, 2, 3], + "boolean_field": [True, False, True], "string_field": ["a", "b", "c"], } ) @@ -54,7 +54,6 @@ def test_infer_schema_csv(): file.seek(0) fields = csv_tsv.CsvInferrer(max_rows=100).infer_schema(file) - fields.sort(key=lambda x: x.fieldPath) assert_field_paths_match(fields, expected_field_paths) assert_field_types_match(fields, expected_field_types) @@ -70,7 +69,6 @@ def test_infer_schema_tsv(): file.seek(0) fields = csv_tsv.TsvInferrer(max_rows=100).infer_schema(file) - fields.sort(key=lambda x: x.fieldPath) assert_field_paths_match(fields, expected_field_paths) assert_field_types_match(fields, expected_field_types) @@ -82,7 +80,6 @@ def test_infer_schema_json(): file.seek(0) fields = json.JsonInferrer().infer_schema(file) - fields.sort(key=lambda x: x.fieldPath) assert_field_paths_match(fields, expected_field_paths) assert_field_types_match(fields, expected_field_types) @@ -92,9 +89,7 @@ def test_infer_schema_parquet(): with tempfile.TemporaryFile(mode="w+b") as file: test_table.to_parquet(file) file.seek(0) - fields = parquet.ParquetInferrer().infer_schema(file) - fields.sort(key=lambda x: x.fieldPath) assert_field_paths_match(fields, expected_field_paths) assert_field_types_match(fields, expected_field_types) @@ -108,8 +103,8 @@ def test_infer_schema_avro(): "type": "record", "name": "test", "fields": [ - {"name": "boolean_field", "type": "boolean"}, {"name": "integer_field", "type": "int"}, + {"name": "boolean_field", "type": "boolean"}, {"name": "string_field", "type": "string"}, ], } @@ -124,7 +119,6 @@ def test_infer_schema_avro(): file.seek(0) fields = AvroInferrer().infer_schema(file) - fields.sort(key=lambda x: x.fieldPath) assert_field_paths_match(fields, expected_field_paths_avro) assert_field_types_match(fields, expected_field_types) From 5735eb3a55f49c966d68b4bfca95b1965b34292b Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Mon, 29 Jan 2024 15:12:30 +0100 Subject: [PATCH 451/792] fix(ingest/test): Fixing breaking change in moto 5.0 library (#9736) --- metadata-ingestion/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index c1a5da5826ead..af2b54ba1cefa 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -234,7 +234,8 @@ # ujson 5.2.0 has the JSONDecodeError exception type, which we need for error handling. "ujson>=5.2.0", "smart-open[s3]>=5.2.1", - "moto[s3]", + # moto 5.0.0 drops support for Python 3.7 + "moto[s3]<5.0.0", *path_spec_common, } From fdf929b3f4284753fef9ff59b5018134b874c56b Mon Sep 17 00:00:00 2001 From: Shirshanka Das Date: Mon, 29 Jan 2024 06:14:34 -0800 Subject: [PATCH 452/792] build(graphql): simplify gradle graphql codegen task (#9734) --- datahub-graphql-core/build.gradle | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index f273a4dd0eea5..fe70f2622490d 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -31,30 +31,16 @@ dependencies { graphqlCodegen { // For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md - graphqlSchemaPaths = [ - "$projectDir/src/main/resources/entity.graphql".toString(), - "$projectDir/src/main/resources/app.graphql".toString(), - "$projectDir/src/main/resources/search.graphql".toString(), - "$projectDir/src/main/resources/analytics.graphql".toString(), - "$projectDir/src/main/resources/recommendation.graphql".toString(), - "$projectDir/src/main/resources/ingestion.graphql".toString(), - "$projectDir/src/main/resources/auth.graphql".toString(), - "$projectDir/src/main/resources/timeline.graphql".toString(), - "$projectDir/src/main/resources/tests.graphql".toString(), - "$projectDir/src/main/resources/properties.graphql".toString(), - "$projectDir/src/main/resources/step.graphql".toString(), - "$projectDir/src/main/resources/lineage.graphql".toString(), - "$projectDir/src/main/resources/forms.graphql".toString() - ] - outputDir = new File("$projectDir/src/mainGeneratedGraphQL/java") + graphqlSchemaPaths = fileTree(dir: "${projectDir}/src/main/resources", include: '**/*.graphql').collect { it.absolutePath } + outputDir = new File("${projectDir}/src/mainGeneratedGraphQL/java") packageName = "com.linkedin.datahub.graphql.generated" generateToString = true generateApis = true generateParameterizedFieldsResolvers = false modelValidationAnnotation = "@javax.annotation.Nonnull" customTypesMapping = [ - Long: "Long", - Float: "Float" + Long: "Long", + Float: "Float" ] } From f3cc4e068a51c0124f1b4dc55713ddd5344ebcb8 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Mon, 29 Jan 2024 21:56:16 +0530 Subject: [PATCH 453/792] feat(ui/secret): support to edit secrets (#9737) --- .../app/ingest/secret/SecretBuilderModal.tsx | 73 ++++++++++++++----- .../src/app/ingest/secret/SecretsList.tsx | 69 +++++++++++++++++- .../src/app/ingest/secret/cacheUtils.ts | 45 ++++++++++++ .../src/app/ingest/secret/types.ts | 4 + 4 files changed, 170 insertions(+), 21 deletions(-) diff --git a/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx b/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx index c099d9a580efa..2d20ac77891ea 100644 --- a/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx +++ b/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx @@ -1,5 +1,5 @@ import { Button, Form, Input, Modal, Typography } from 'antd'; -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { useEnterKeyListener } from '../../shared/useEnterKeyListener'; import { SecretBuilderState } from './types'; @@ -9,12 +9,14 @@ const VALUE_FIELD_NAME = 'value'; type Props = { initialState?: SecretBuilderState; + editSecret?: SecretBuilderState; visible: boolean; onSubmit?: (source: SecretBuilderState, resetState: () => void) => void; + onUpdate?: (source: SecretBuilderState, resetState: () => void) => void; onCancel?: () => void; }; -export const SecretBuilderModal = ({ initialState, visible, onSubmit, onCancel }: Props) => { +export const SecretBuilderModal = ({ initialState, editSecret, visible, onSubmit, onUpdate, onCancel }: Props) => { const [createButtonEnabled, setCreateButtonEnabled] = useState(false); const [form] = Form.useForm(); @@ -23,38 +25,69 @@ export const SecretBuilderModal = ({ initialState, visible, onSubmit, onCancel } querySelectorToExecuteClick: '#createSecretButton', }); + useEffect(() => { + if (editSecret) { + form.setFieldsValue({ + name: editSecret.name, + description: editSecret.description, + value: editSecret.value, + }); + } + }, [editSecret, form]); + function resetValues() { + setCreateButtonEnabled(false); form.resetFields(); } + const onCloseModal = () => { + setCreateButtonEnabled(false); + form.resetFields(); + onCancel?.(); + }; + + const titleText = editSecret ? 'Edit Secret' : 'Create a new Secret'; + return ( Create a new Secret} + title={{titleText}} visible={visible} - onCancel={onCancel} + onCancel={onCloseModal} zIndex={1051} // one higher than other modals - needed for managed ingestion forms footer={ <> - } @@ -81,11 +114,15 @@ export const SecretBuilderModal = ({ initialState, visible, onSubmit, onCancel } }, { whitespace: false }, { min: 1, max: 50 }, - { pattern: /^[a-zA-Z_]+[a-zA-Z0-9_]*$/, message: 'Please start the secret name with a letter, followed by letters, digits, or underscores only.' }, + { + pattern: /^[a-zA-Z_]+[a-zA-Z0-9_]*$/, + message: + 'Please start the secret name with a letter, followed by letters, digits, or underscores only.', + }, ]} hasFeedback > - + Value}> diff --git a/datahub-web-react/src/app/ingest/secret/SecretsList.tsx b/datahub-web-react/src/app/ingest/secret/SecretsList.tsx index 1a960997e6bee..2219b6147d9e0 100644 --- a/datahub-web-react/src/app/ingest/secret/SecretsList.tsx +++ b/datahub-web-react/src/app/ingest/secret/SecretsList.tsx @@ -9,6 +9,7 @@ import { useCreateSecretMutation, useDeleteSecretMutation, useListSecretsQuery, + useUpdateSecretMutation, } from '../../../graphql/ingestion.generated'; import { Message } from '../../shared/Message'; import TabToolbar from '../../entity/shared/components/styled/TabToolbar'; @@ -18,7 +19,11 @@ import { StyledTable } from '../../entity/shared/components/styled/StyledTable'; import { SearchBar } from '../../search/SearchBar'; import { useEntityRegistry } from '../../useEntityRegistry'; import { scrollToTop } from '../../shared/searchUtils'; -import { addSecretToListSecretsCache, removeSecretFromListSecretsCache } from './cacheUtils'; +import { + addSecretToListSecretsCache, + removeSecretFromListSecretsCache, + updateSecretInListSecretsCache, +} from './cacheUtils'; import { ONE_SECOND_IN_MS } from '../../entity/shared/tabs/Dataset/Queries/utils/constants'; const DeleteButtonContainer = styled.div` @@ -48,10 +53,12 @@ export const SecretsList = () => { // Whether or not there is an urn to show in the modal const [isCreatingSecret, setIsCreatingSecret] = useState(false); + const [editSecret, setEditSecret] = useState(undefined); const [deleteSecretMutation] = useDeleteSecretMutation(); const [createSecretMutation] = useCreateSecretMutation(); - const { loading, error, data, client } = useListSecretsQuery({ + const [updateSecretMutation] = useUpdateSecretMutation(); + const { loading, error, data, client, refetch } = useListSecretsQuery({ variables: { input: { start, @@ -125,6 +132,47 @@ export const SecretsList = () => { }); }); }; + const onUpdate = (state: SecretBuilderState, resetBuilderState: () => void) => { + updateSecretMutation({ + variables: { + input: { + urn: state.urn as string, + name: state.name as string, + value: state.value as string, + description: state.description as string, + }, + }, + }) + .then(() => { + message.success({ + content: `Successfully updated Secret!`, + duration: 3, + }); + resetBuilderState(); + setIsCreatingSecret(false); + setEditSecret(undefined); + updateSecretInListSecretsCache( + { + urn: state.urn, + name: state.name, + description: state.description, + }, + client, + pageSize, + page, + ); + setTimeout(() => { + refetch(); + }, 2000); + }) + .catch((e) => { + message.destroy(); + message.error({ + content: `Failed to update Secret!: \n ${e.message || ''}`, + duration: 3, + }); + }); + }; const onDeleteSecret = (urn: string) => { Modal.confirm({ @@ -140,6 +188,16 @@ export const SecretsList = () => { }); }; + const onEditSecret = (urnData: any) => { + setIsCreatingSecret(true); + setEditSecret(urnData); + }; + + const onCancel = () => { + setIsCreatingSecret(false); + setEditSecret(undefined); + }; + const tableColumns = [ { title: 'Name', @@ -161,6 +219,9 @@ export const SecretsList = () => { key: 'x', render: (_, record: any) => ( + @@ -234,8 +295,10 @@ export const SecretsList = () => {
    setIsCreatingSecret(false)} + onCancel={onCancel} /> ); diff --git a/datahub-web-react/src/app/ingest/secret/cacheUtils.ts b/datahub-web-react/src/app/ingest/secret/cacheUtils.ts index 72e287f8846ed..b3a3a45f33892 100644 --- a/datahub-web-react/src/app/ingest/secret/cacheUtils.ts +++ b/datahub-web-react/src/app/ingest/secret/cacheUtils.ts @@ -64,6 +64,51 @@ export const addSecretToListSecretsCache = (secret, client, pageSize) => { }); }; +export const updateSecretInListSecretsCache = (updatedSecret, client, pageSize, page) => { + const currData: ListSecretsQuery | null = client.readQuery({ + query: ListSecretsDocument, + variables: { + input: { + start: (page - 1) * pageSize, + count: pageSize, + }, + }, + }); + + const updatedSecretIndex = (currData?.listSecrets?.secrets || []) + .map((secret, index) => { + if (secret.urn === updatedSecret.urn) { + return index; + } + return -1; + }) + .find((index) => index !== -1); + + if (updatedSecretIndex !== undefined) { + const newSecrets = (currData?.listSecrets?.secrets || []).map((secret, index) => { + return index === updatedSecretIndex ? updatedSecret : secret; + }); + + client.writeQuery({ + query: ListSecretsDocument, + variables: { + input: { + start: (page - 1) * pageSize, + count: pageSize, + }, + }, + data: { + listSecrets: { + start: currData?.listSecrets?.start || 0, + count: currData?.listSecrets?.count || 1, + total: currData?.listSecrets?.total || 1, + secrets: newSecrets, + }, + }, + }); + } +}; + export const clearSecretListCache = (client) => { // Remove any caching of 'listSecrets' client.cache.evict({ id: 'ROOT_QUERY', fieldName: 'listSecrets' }); diff --git a/datahub-web-react/src/app/ingest/secret/types.ts b/datahub-web-react/src/app/ingest/secret/types.ts index 23e45cab9b179..e0dbc8d443d9b 100644 --- a/datahub-web-react/src/app/ingest/secret/types.ts +++ b/datahub-web-react/src/app/ingest/secret/types.ts @@ -2,6 +2,10 @@ * The object represents the state of the Ingestion Source Builder form. */ export interface SecretBuilderState { + /** + * The name of the secret. + */ + urn?: string; /** * The name of the secret. */ From 1498c36875450b1a1f44d53e8e8c47c41a91dc69 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 29 Jan 2024 10:50:47 -0800 Subject: [PATCH 454/792] chore(cli): drop support for python 3.7 (#9731) --- .github/workflows/metadata-ingestion.yml | 4 +- docs/cli.md | 2 +- docs/how/updating-datahub.md | 11 +- docs/quickstart.md | 2 +- .../airflow-plugin/setup.py | 14 +- .../airflow-plugin/tests/unit/test_airflow.py | 204 ++-- metadata-ingestion/build.gradle | 2 +- metadata-ingestion/cli-ingestion.md | 21 +- metadata-ingestion/developing.md | 6 +- metadata-ingestion/setup.py | 23 +- metadata-ingestion/src/datahub/__init__.py | 11 +- .../src/datahub/ingestion/api/report.py | 21 +- .../src/datahub/ingestion/source/feast.py | 5 - .../ingestion/source/iceberg/iceberg.py | 5 - .../src/datahub/ingestion/source/mlflow.py | 6 - .../source/schema_inference/object.py | 2 +- .../feast/test_feast_repository.py | 7 - .../tests/integration/iceberg/test_iceberg.py | 9 +- .../integration/mlflow/test_mlflow_source.py | 184 ++-- .../integration/sql_server/test_sql_server.py | 5 - metadata-ingestion/tests/unit/test_iceberg.py | 899 +++++++++--------- .../tests/unit/test_mlflow_source.py | 225 ++--- 22 files changed, 805 insertions(+), 863 deletions(-) diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index 1da08b14b8b5b..e7d6b7b97c099 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -31,7 +31,7 @@ jobs: # DATAHUB_LOOKML_GIT_TEST_SSH_KEY: ${{ secrets.DATAHUB_LOOKML_GIT_TEST_SSH_KEY }} strategy: matrix: - python-version: ["3.7", "3.10"] + python-version: ["3.8", "3.10"] command: [ "testQuick", @@ -40,7 +40,7 @@ jobs: "testIntegrationBatch2", ] include: - - python-version: "3.7" + - python-version: "3.8" - python-version: "3.10" fail-fast: false steps: diff --git a/docs/cli.md b/docs/cli.md index cb5077db42906..927270b42259d 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -24,7 +24,7 @@ source venv/bin/activate # activate the environment Once inside the virtual environment, install `datahub` using the following commands ```shell -# Requires Python 3.7+ +# Requires Python 3.8+ python3 -m pip install --upgrade pip wheel setuptools python3 -m pip install --upgrade acryl-datahub # validate that the install was successful diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index b671e2fc5d123..6b6903b04f383 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -10,8 +10,10 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Neo4j 5.x, may require migration from 4.x - Build requires JDK17 (Runtime Java 11) - Build requires Docker Compose > 2.20 +- #9731 - The `acryl-datahub` CLI now requires Python 3.8+ - #9601 - The Unity Catalog(UC) ingestion source config `include_metastore` is now disabled by default. This change will affect the urns of all entities in the workspace.
    - Entity Hierarchy with `include_metastore: true` (Old) + Entity Hierarchy with `include_metastore: true` (Old) + ``` - UC Metastore - Catalog @@ -19,16 +21,19 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Table ``` - Entity Hierarchy with `include_metastore: false` (New) + Entity Hierarchy with `include_metastore: false` (New) + ``` - Catalog - Schema - Table ``` + We recommend using `platform_instance` for differentiating across metastores. If stateful ingestion is enabled, running ingestion with latest cli version will perform all required cleanup. Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: - `datahub delete --platform databricks --soft` and then reingesting with latest cli version. + `datahub delete --platform databricks --soft` and then reingesting with latest cli version. + - #9601 - The Unity Catalog(UC) ingestion source config `include_hive_metastore` is now enabled by default. This requires config `warehouse_id` to be set. You can disable `include_hive_metastore` by setting it to `False` to avoid ingesting legacy hive metastore catalog in Databricks. ### Potential Downtime diff --git a/docs/quickstart.md b/docs/quickstart.md index 5856ef84c0074..507be6ba05471 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -22,7 +22,7 @@ If you're interested in a managed version, [Acryl Data](https://www.acryldata.io | Linux | [Docker for Linux](https://docs.docker.com/desktop/install/linux-install/) and [Docker Compose](https://docs.docker.com/compose/install/linux/) | - **Launch the Docker engine** from command line or the desktop app. -- Ensure you have **Python 3.7+** installed & configured. (Check using `python3 --version`). +- Ensure you have **Python 3.8+** installed & configured. (Check using `python3 --version`). :::note Docker Resource Allocation diff --git a/metadata-ingestion-modules/airflow-plugin/setup.py b/metadata-ingestion-modules/airflow-plugin/setup.py index 838322f83833b..1a3e844cedc1f 100644 --- a/metadata-ingestion-modules/airflow-plugin/setup.py +++ b/metadata-ingestion-modules/airflow-plugin/setup.py @@ -18,16 +18,10 @@ def get_long_description(): _self_pin = f"=={_version}" if not _version.endswith("dev0") else "" -rest_common = {"requests", "requests_file"} - base_requirements = { - # Compatibility. - "dataclasses>=0.6; python_version < '3.7'", - "mypy_extensions>=0.4.3", + f"acryl-datahub[datahub-rest]{_self_pin}", # Actual dependencies. - "pydantic>=1.5.1", "apache-airflow >= 2.0.2", - *rest_common, } plugins: Dict[str, Set[str]] = { @@ -42,9 +36,8 @@ def get_long_description(): }, "plugin-v1": set(), "plugin-v2": { - # The v2 plugin requires Python 3.8+. f"acryl-datahub[sql-parser]{_self_pin}", - "openlineage-airflow==1.2.0; python_version >= '3.8'", + "openlineage-airflow==1.2.0", }, } @@ -144,7 +137,6 @@ def get_long_description(): "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", @@ -161,7 +153,7 @@ def get_long_description(): ], # Package info. zip_safe=False, - python_requires=">=3.7", + python_requires=">=3.8", package_data={ "datahub_airflow_plugin": ["py.typed"], }, diff --git a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py index 93b4af0501985..b484713e18faf 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py @@ -1,7 +1,6 @@ import datetime import json import os -import sys from contextlib import contextmanager from typing import Iterator from unittest import mock @@ -318,137 +317,134 @@ def test_lineage_backend(mock_emit, inlets, outlets, capture_executions): # Check that the right things were emitted. assert mock_emitter.emit.call_count == 17 if capture_executions else 9 - # Running further checks based on python version because args only exists in python 3.8+ - if sys.version_info > (3, 8): - assert mock_emitter.method_calls[0].args[0].aspectName == "dataFlowInfo" + # TODO: Replace this with a golden file-based comparison. + assert mock_emitter.method_calls[0].args[0].aspectName == "dataFlowInfo" + assert ( + mock_emitter.method_calls[0].args[0].entityUrn + == "urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod)" + ) + + assert mock_emitter.method_calls[1].args[0].aspectName == "ownership" + assert ( + mock_emitter.method_calls[1].args[0].entityUrn + == "urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod)" + ) + + assert mock_emitter.method_calls[2].args[0].aspectName == "globalTags" + assert ( + mock_emitter.method_calls[2].args[0].entityUrn + == "urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod)" + ) + + assert mock_emitter.method_calls[3].args[0].aspectName == "dataJobInfo" + assert ( + mock_emitter.method_calls[3].args[0].entityUrn + == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task2)" + ) + + assert mock_emitter.method_calls[4].args[0].aspectName == "dataJobInputOutput" + assert ( + mock_emitter.method_calls[4].args[0].entityUrn + == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task2)" + ) + assert ( + mock_emitter.method_calls[4].args[0].aspect.inputDatajobs[0] + == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task1_upstream)" + ) + assert ( + mock_emitter.method_calls[4].args[0].aspect.inputDatajobs[1] + == "urn:li:dataJob:(urn:li:dataFlow:(airflow,testDag,PROD),testTask)" + ) + assert ( + mock_emitter.method_calls[4].args[0].aspect.inputDatasets[0] + == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableConsumed,PROD)" + ) + assert ( + mock_emitter.method_calls[4].args[0].aspect.outputDatasets[0] + == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableProduced,PROD)" + ) + + assert mock_emitter.method_calls[5].args[0].aspectName == "status" + assert ( + mock_emitter.method_calls[5].args[0].entityUrn + == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableConsumed,PROD)" + ) + + assert mock_emitter.method_calls[6].args[0].aspectName == "status" + assert ( + mock_emitter.method_calls[6].args[0].entityUrn + == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableProduced,PROD)" + ) + + assert mock_emitter.method_calls[7].args[0].aspectName == "ownership" + assert ( + mock_emitter.method_calls[7].args[0].entityUrn + == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task2)" + ) + + assert mock_emitter.method_calls[8].args[0].aspectName == "globalTags" + assert ( + mock_emitter.method_calls[8].args[0].entityUrn + == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task2)" + ) + + if capture_executions: assert ( - mock_emitter.method_calls[0].args[0].entityUrn - == "urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod)" + mock_emitter.method_calls[9].args[0].aspectName + == "dataProcessInstanceProperties" ) - - assert mock_emitter.method_calls[1].args[0].aspectName == "ownership" assert ( - mock_emitter.method_calls[1].args[0].entityUrn - == "urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod)" + mock_emitter.method_calls[9].args[0].entityUrn + == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" ) - assert mock_emitter.method_calls[2].args[0].aspectName == "globalTags" assert ( - mock_emitter.method_calls[2].args[0].entityUrn - == "urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod)" + mock_emitter.method_calls[10].args[0].aspectName + == "dataProcessInstanceRelationships" ) - - assert mock_emitter.method_calls[3].args[0].aspectName == "dataJobInfo" assert ( - mock_emitter.method_calls[3].args[0].entityUrn - == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task2)" + mock_emitter.method_calls[10].args[0].entityUrn + == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" ) - assert ( - mock_emitter.method_calls[4].args[0].aspectName == "dataJobInputOutput" + mock_emitter.method_calls[11].args[0].aspectName + == "dataProcessInstanceInput" ) assert ( - mock_emitter.method_calls[4].args[0].entityUrn - == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task2)" + mock_emitter.method_calls[11].args[0].entityUrn + == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" ) assert ( - mock_emitter.method_calls[4].args[0].aspect.inputDatajobs[0] - == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task1_upstream)" + mock_emitter.method_calls[12].args[0].aspectName + == "dataProcessInstanceOutput" ) assert ( - mock_emitter.method_calls[4].args[0].aspect.inputDatajobs[1] - == "urn:li:dataJob:(urn:li:dataFlow:(airflow,testDag,PROD),testTask)" + mock_emitter.method_calls[12].args[0].entityUrn + == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" ) + assert mock_emitter.method_calls[13].args[0].aspectName == "status" assert ( - mock_emitter.method_calls[4].args[0].aspect.inputDatasets[0] + mock_emitter.method_calls[13].args[0].entityUrn == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableConsumed,PROD)" ) + assert mock_emitter.method_calls[14].args[0].aspectName == "status" assert ( - mock_emitter.method_calls[4].args[0].aspect.outputDatasets[0] + mock_emitter.method_calls[14].args[0].entityUrn == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableProduced,PROD)" ) - - assert mock_emitter.method_calls[5].args[0].aspectName == "status" assert ( - mock_emitter.method_calls[5].args[0].entityUrn - == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableConsumed,PROD)" + mock_emitter.method_calls[15].args[0].aspectName + == "dataProcessInstanceRunEvent" ) - - assert mock_emitter.method_calls[6].args[0].aspectName == "status" assert ( - mock_emitter.method_calls[6].args[0].entityUrn - == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableProduced,PROD)" + mock_emitter.method_calls[15].args[0].entityUrn + == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" ) - - assert mock_emitter.method_calls[7].args[0].aspectName == "ownership" assert ( - mock_emitter.method_calls[7].args[0].entityUrn - == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task2)" + mock_emitter.method_calls[16].args[0].aspectName + == "dataProcessInstanceRunEvent" ) - - assert mock_emitter.method_calls[8].args[0].aspectName == "globalTags" assert ( - mock_emitter.method_calls[8].args[0].entityUrn - == "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_lineage_is_sent_to_backend,prod),task2)" + mock_emitter.method_calls[16].args[0].entityUrn + == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" ) - - if capture_executions: - assert ( - mock_emitter.method_calls[9].args[0].aspectName - == "dataProcessInstanceProperties" - ) - assert ( - mock_emitter.method_calls[9].args[0].entityUrn - == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" - ) - - assert ( - mock_emitter.method_calls[10].args[0].aspectName - == "dataProcessInstanceRelationships" - ) - assert ( - mock_emitter.method_calls[10].args[0].entityUrn - == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" - ) - assert ( - mock_emitter.method_calls[11].args[0].aspectName - == "dataProcessInstanceInput" - ) - assert ( - mock_emitter.method_calls[11].args[0].entityUrn - == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" - ) - assert ( - mock_emitter.method_calls[12].args[0].aspectName - == "dataProcessInstanceOutput" - ) - assert ( - mock_emitter.method_calls[12].args[0].entityUrn - == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" - ) - assert mock_emitter.method_calls[13].args[0].aspectName == "status" - assert ( - mock_emitter.method_calls[13].args[0].entityUrn - == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableConsumed,PROD)" - ) - assert mock_emitter.method_calls[14].args[0].aspectName == "status" - assert ( - mock_emitter.method_calls[14].args[0].entityUrn - == "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableProduced,PROD)" - ) - assert ( - mock_emitter.method_calls[15].args[0].aspectName - == "dataProcessInstanceRunEvent" - ) - assert ( - mock_emitter.method_calls[15].args[0].entityUrn - == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" - ) - assert ( - mock_emitter.method_calls[16].args[0].aspectName - == "dataProcessInstanceRunEvent" - ) - assert ( - mock_emitter.method_calls[16].args[0].entityUrn - == "urn:li:dataProcessInstance:5e274228107f44cc2dd7c9782168cc29" - ) diff --git a/metadata-ingestion/build.gradle b/metadata-ingestion/build.gradle index b3cc350cc109f..8338124288ec9 100644 --- a/metadata-ingestion/build.gradle +++ b/metadata-ingestion/build.gradle @@ -17,7 +17,7 @@ def get_coverage_arg(test_name) { task checkPythonVersion(type: Exec) { commandLine python_executable, '-c', - 'import sys; assert (3, 11) > sys.version_info >= (3, 7), f"Python version {sys.version_info[:2]} not allowed"' + 'import sys; assert (3, 11) > sys.version_info >= (3, 8), f"Python version {sys.version_info[:2]} not allowed"' } task environmentSetup(type: Exec, dependsOn: checkPythonVersion) { diff --git a/metadata-ingestion/cli-ingestion.md b/metadata-ingestion/cli-ingestion.md index cbdde2cd30167..48cc4ef09db91 100644 --- a/metadata-ingestion/cli-ingestion.md +++ b/metadata-ingestion/cli-ingestion.md @@ -2,26 +2,31 @@ ## Installing the CLI -Make sure you have installed DataHub CLI before following this guide. +Make sure you have installed DataHub CLI before following this guide. + ```shell -# Requires Python 3.7+ +# Requires Python 3.8+ python3 -m pip install --upgrade pip wheel setuptools python3 -m pip install --upgrade acryl-datahub # validate that the install was successful datahub version # If you see "command not found", try running this instead: python3 -m datahub version ``` -Check out the [CLI Installation Guide](../docs/cli.md#installation) for more installation options and troubleshooting tips. + +Check out the [CLI Installation Guide](../docs/cli.md#installation) for more installation options and troubleshooting tips. After that, install the required plugin for the ingestion. ```shell pip install 'acryl-datahub[datahub-rest]' # install the required plugin ``` -Check out the [alternative installation options](../docs/cli.md#alternate-installation-options) for more reference. + +Check out the [alternative installation options](../docs/cli.md#alternate-installation-options) for more reference. ## Configuring a Recipe + Create a recipe.yml file that defines the source and sink for metadata, as shown below. + ```yaml # my_reipe.yml source: @@ -29,7 +34,7 @@ source: config: option_1: ... - + sink: type: config: @@ -39,7 +44,8 @@ sink: For more information and examples on configuring recipes, please refer to [Recipes](recipe_overview.md). ## Ingesting Metadata -You can run ingestion using `datahub ingest` like below. + +You can run ingestion using `datahub ingest` like below. ```shell datahub ingest -c @@ -48,6 +54,7 @@ datahub ingest -c ## Reference Please refer the following pages for advanced guids on CLI ingestion. + - [Reference for `datahub ingest` command](../docs/cli.md#ingest) - [UI Ingestion Guide](../docs/ui-ingestion.md) @@ -56,4 +63,4 @@ DataHub server uses a 3 digit versioning scheme, while the CLI uses a 4 digit sc We do this because we do CLI releases at a much higher frequency than server releases, usually every few days vs twice a month. For ingestion sources, any breaking changes will be highlighted in the [release notes](../docs/how/updating-datahub.md). When fields are deprecated or otherwise changed, we will try to maintain backwards compatibility for two server releases, which is about 4-6 weeks. The CLI will also print warnings whenever deprecated options are used. -::: \ No newline at end of file +::: diff --git a/metadata-ingestion/developing.md b/metadata-ingestion/developing.md index fc3a689124b2c..47e325171ddcc 100644 --- a/metadata-ingestion/developing.md +++ b/metadata-ingestion/developing.md @@ -9,10 +9,10 @@ Also take a look at the guide to [adding a source](./adding-source.md). ### Requirements -1. Python 3.7+ must be installed in your host environment. +1. Python 3.8+ must be installed in your host environment. 2. Java 17 (gradle won't work with newer or older versions) -4. On Debian/Ubuntu: `sudo apt install python3-dev python3-venv` -5. On Fedora (if using LDAP source integration): `sudo yum install openldap-devel` +3. On Debian/Ubuntu: `sudo apt install python3-dev python3-venv` +4. On Fedora (if using LDAP source integration): `sudo yum install openldap-devel` ### Set up your Python environment diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index af2b54ba1cefa..f8d51997330a9 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -1,4 +1,3 @@ -import sys from typing import Dict, Set import setuptools @@ -11,7 +10,6 @@ base_requirements = { # Typing extension should be >=3.10.0.2 ideally but we can't restrict due to a Airflow 2.1 dependency conflict. "typing_extensions>=3.7.4.3", - "mypy_extensions>=0.4.3", # Actual dependencies. "typing-inspect", # pydantic 1.8.2 is incompatible with mypy 0.910. @@ -48,9 +46,7 @@ "click-spinner", "requests_file", "jsonref", - # jsonschema drops python 3.7 support in v4.18.0 - "jsonschema<=4.17.3; python_version < '3.8'", - "jsonschema; python_version >= '3.8'", + "jsonschema", "ruamel.yaml", } @@ -463,7 +459,7 @@ "black==22.12.0", "coverage>=5.1", "faker>=18.4.0", - "flake8>=3.8.3", # DEPRECATION: Once we drop Python 3.7, we can pin to 6.x. + "flake8>=6.0.0", "flake8-tidy-imports>=4.3.0", "flake8-bugbear==23.3.12", "isort>=5.7.0", @@ -489,9 +485,9 @@ "delta-lake", "druid", "elasticsearch", - "feast" if sys.version_info >= (3, 8) else None, - "iceberg" if sys.version_info >= (3, 8) else None, - "mlflow" if sys.version_info >= (3, 8) else None, + "feast", + "iceberg", + "mlflow", "json-schema", "ldap", "looker", @@ -544,14 +540,14 @@ "clickhouse", "delta-lake", "druid", - "feast" if sys.version_info >= (3, 8) else None, + "feast", "hana", "hive", - "iceberg" if sys.version_info >= (3, 8) else None, + "iceberg", "kafka-connect", "ldap", "mongodb", - "mssql" if sys.version_info >= (3, 8) else None, + "mssql", "mysql", "mariadb", "redash", @@ -699,7 +695,6 @@ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", @@ -716,7 +711,7 @@ ], # Package info. zip_safe=False, - python_requires=">=3.7", + python_requires=">=3.8", package_dir={"": "src"}, packages=setuptools.find_namespace_packages(where="./src"), package_data={ diff --git a/metadata-ingestion/src/datahub/__init__.py b/metadata-ingestion/src/datahub/__init__.py index a470de7b500be..b254deb7fa30e 100644 --- a/metadata-ingestion/src/datahub/__init__.py +++ b/metadata-ingestion/src/datahub/__init__.py @@ -16,16 +16,9 @@ def nice_version_name() -> str: return __version__ -if sys.version_info < (3, 7): +if sys.version_info < (3, 8): warnings.warn( - "DataHub requires Python 3.7 or newer. " - "Please upgrade your Python version to continue using DataHub.", - FutureWarning, - stacklevel=2, - ) -elif sys.version_info < (3, 8): - warnings.warn( - "DataHub will require Python 3.8 or newer soon. " + "DataHub requires Python 3.8 or newer. " "Please upgrade your Python version to continue using DataHub.", FutureWarning, stacklevel=2, diff --git a/metadata-ingestion/src/datahub/ingestion/api/report.py b/metadata-ingestion/src/datahub/ingestion/api/report.py index fcca767591774..08b20d9e85691 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/report.py +++ b/metadata-ingestion/src/datahub/ingestion/api/report.py @@ -2,11 +2,10 @@ import json import logging import pprint -import sys from dataclasses import dataclass from datetime import datetime, timedelta from enum import Enum -from typing import Any, Dict, Optional +from typing import Any, Optional import humanfriendly import pydantic @@ -19,12 +18,6 @@ logger = logging.getLogger(__name__) LogLevel = Literal["ERROR", "WARNING", "INFO", "DEBUG"] -# The sort_dicts option was added in Python 3.8. -if sys.version_info >= (3, 8): - PPRINT_OPTIONS = {"sort_dicts": False} -else: - PPRINT_OPTIONS: Dict = {} - @runtime_checkable class SupportsAsObj(Protocol): @@ -32,14 +25,6 @@ def as_obj(self) -> dict: ... -def _stacklevel_if_supported(level: int) -> dict: - # The logging module added support for stacklevel in Python 3.8. - if sys.version_info >= (3, 8): - return {"stacklevel": level} - else: - return {} - - @dataclass class Report(SupportsAsObj): @staticmethod @@ -95,7 +80,7 @@ def as_obj(self) -> dict: } def as_string(self) -> str: - return pprint.pformat(self.as_obj(), width=150, **PPRINT_OPTIONS) + return pprint.pformat(self.as_obj(), width=150, sort_dicts=False) def as_json(self) -> str: return json.dumps(self.as_obj()) @@ -118,7 +103,7 @@ def logger_sev(self) -> int: return log_levels[self.severity] def log(self, msg: str) -> None: - logger.log(level=self.logger_sev, msg=msg, **_stacklevel_if_supported(3)) + logger.log(level=self.logger_sev, msg=msg, stacklevel=3) class EntityFilterReport(ReportAttribute): diff --git a/metadata-ingestion/src/datahub/ingestion/source/feast.py b/metadata-ingestion/src/datahub/ingestion/source/feast.py index 8faba7d113372..db0c8e9c39e7b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/feast.py +++ b/metadata-ingestion/src/datahub/ingestion/source/feast.py @@ -1,8 +1,3 @@ -import sys - -if sys.version_info < (3, 8): - raise ImportError("Feast is only supported on Python 3.8+") - from dataclasses import dataclass from typing import Dict, Iterable, List, Optional, Tuple, Union diff --git a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py index cc7f646dcb884..2585260434a38 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py +++ b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py @@ -1,8 +1,3 @@ -import sys - -if sys.version_info < (3, 8): - raise ImportError("Iceberg is only supported on Python 3.8+") - import json import logging import uuid diff --git a/metadata-ingestion/src/datahub/ingestion/source/mlflow.py b/metadata-ingestion/src/datahub/ingestion/source/mlflow.py index 0668defe7b0c6..cef6d2b1bb577 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mlflow.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mlflow.py @@ -1,9 +1,3 @@ -import sys - -if sys.version_info < (3, 8): - raise ImportError("MLflow is only supported on Python 3.8+") - - from dataclasses import dataclass from typing import Any, Callable, Iterable, Optional, TypeVar, Union diff --git a/metadata-ingestion/src/datahub/ingestion/source/schema_inference/object.py b/metadata-ingestion/src/datahub/ingestion/source/schema_inference/object.py index b58bdf41ccaa5..5a11d020547e8 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/schema_inference/object.py +++ b/metadata-ingestion/src/datahub/ingestion/source/schema_inference/object.py @@ -1,7 +1,7 @@ from collections import Counter from typing import Any, Counter as CounterType, Dict, Sequence, Tuple, Union -from mypy_extensions import TypedDict +from typing_extensions import TypedDict class BasicSchemaDescription(TypedDict): diff --git a/metadata-ingestion/tests/integration/feast/test_feast_repository.py b/metadata-ingestion/tests/integration/feast/test_feast_repository.py index eab37f67ed155..a6bdce6722289 100644 --- a/metadata-ingestion/tests/integration/feast/test_feast_repository.py +++ b/metadata-ingestion/tests/integration/feast/test_feast_repository.py @@ -1,6 +1,3 @@ -import sys - -import pytest from freezegun import freeze_time from datahub.ingestion.run.pipeline import Pipeline @@ -8,10 +5,6 @@ FROZEN_TIME = "2020-04-14 07:00:00" -pytestmark = pytest.mark.skipif( - sys.version_info < (3, 8), reason="requires python 3.8 or higher" -) - @freeze_time(FROZEN_TIME) def test_feast_repository_ingest(pytestconfig, tmp_path, mock_time): diff --git a/metadata-ingestion/tests/integration/iceberg/test_iceberg.py b/metadata-ingestion/tests/integration/iceberg/test_iceberg.py index 65ede11c3f1c0..a9ab43169405d 100644 --- a/metadata-ingestion/tests/integration/iceberg/test_iceberg.py +++ b/metadata-ingestion/tests/integration/iceberg/test_iceberg.py @@ -1,5 +1,4 @@ import subprocess -import sys from typing import Any, Dict, List from unittest.mock import patch @@ -15,13 +14,7 @@ validate_all_providers_have_committed_successfully, ) -pytestmark = [ - pytest.mark.integration_batch_1, - # Skip tests if not on Python 3.8 or higher. - pytest.mark.skipif( - sys.version_info < (3, 8), reason="Requires python 3.8 or higher" - ), -] +pytestmark = pytest.mark.integration_batch_1 FROZEN_TIME = "2020-04-14 07:00:00" GMS_PORT = 8080 GMS_SERVER = f"http://localhost:{GMS_PORT}" diff --git a/metadata-ingestion/tests/integration/mlflow/test_mlflow_source.py b/metadata-ingestion/tests/integration/mlflow/test_mlflow_source.py index 76af666526555..155199d5a04e9 100644 --- a/metadata-ingestion/tests/integration/mlflow/test_mlflow_source.py +++ b/metadata-ingestion/tests/integration/mlflow/test_mlflow_source.py @@ -1,104 +1,106 @@ -import sys +from pathlib import Path +from typing import Any, Dict, TypeVar -if sys.version_info >= (3, 8): - from pathlib import Path - from typing import Any, Dict, TypeVar +import pytest +from mlflow import MlflowClient - import pytest - from mlflow import MlflowClient +from datahub.ingestion.run.pipeline import Pipeline +from tests.test_helpers import mce_helpers - from datahub.ingestion.run.pipeline import Pipeline - from tests.test_helpers import mce_helpers +T = TypeVar("T") - T = TypeVar("T") - @pytest.fixture - def tracking_uri(tmp_path: Path) -> str: - return str(tmp_path / "mlruns") +@pytest.fixture +def tracking_uri(tmp_path: Path) -> str: + return str(tmp_path / "mlruns") - @pytest.fixture - def sink_file_path(tmp_path: Path) -> str: - return str(tmp_path / "mlflow_source_mcps.json") - @pytest.fixture - def pipeline_config(tracking_uri: str, sink_file_path: str) -> Dict[str, Any]: - source_type = "mlflow" - return { - "run_id": "mlflow-source-test", - "source": { - "type": source_type, - "config": { - "tracking_uri": tracking_uri, - }, +@pytest.fixture +def sink_file_path(tmp_path: Path) -> str: + return str(tmp_path / "mlflow_source_mcps.json") + + +@pytest.fixture +def pipeline_config(tracking_uri: str, sink_file_path: str) -> Dict[str, Any]: + source_type = "mlflow" + return { + "run_id": "mlflow-source-test", + "source": { + "type": source_type, + "config": { + "tracking_uri": tracking_uri, }, - "sink": { - "type": "file", - "config": { - "filename": sink_file_path, - }, + }, + "sink": { + "type": "file", + "config": { + "filename": sink_file_path, }, - } + }, + } + + +@pytest.fixture +def generate_mlflow_data(tracking_uri: str) -> None: + client = MlflowClient(tracking_uri=tracking_uri) + experiment_name = "test-experiment" + run_name = "test-run" + model_name = "test-model" + test_experiment_id = client.create_experiment(experiment_name) + test_run = client.create_run( + experiment_id=test_experiment_id, + run_name=run_name, + ) + client.log_param( + run_id=test_run.info.run_id, + key="p", + value=1, + ) + client.log_metric( + run_id=test_run.info.run_id, + key="m", + value=0.85, + ) + client.create_registered_model( + name=model_name, + tags=dict( + model_id=1, + model_env="test", + ), + description="This a test registered model", + ) + client.create_model_version( + name=model_name, + source="dummy_dir/dummy_file", + run_id=test_run.info.run_id, + tags=dict(model_version_id=1), + ) + client.transition_model_version_stage( + name=model_name, + version="1", + stage="Archived", + ) - @pytest.fixture - def generate_mlflow_data(tracking_uri: str) -> None: - client = MlflowClient(tracking_uri=tracking_uri) - experiment_name = "test-experiment" - run_name = "test-run" - model_name = "test-model" - test_experiment_id = client.create_experiment(experiment_name) - test_run = client.create_run( - experiment_id=test_experiment_id, - run_name=run_name, - ) - client.log_param( - run_id=test_run.info.run_id, - key="p", - value=1, - ) - client.log_metric( - run_id=test_run.info.run_id, - key="m", - value=0.85, - ) - client.create_registered_model( - name=model_name, - tags=dict( - model_id=1, - model_env="test", - ), - description="This a test registered model", - ) - client.create_model_version( - name=model_name, - source="dummy_dir/dummy_file", - run_id=test_run.info.run_id, - tags=dict(model_version_id=1), - ) - client.transition_model_version_stage( - name=model_name, - version="1", - stage="Archived", - ) - def test_ingestion( - pytestconfig, - mock_time, - sink_file_path, - pipeline_config, - generate_mlflow_data, - ): - print(f"MCPs file path: {sink_file_path}") - golden_file_path = ( - pytestconfig.rootpath / "tests/integration/mlflow/mlflow_mcps_golden.json" - ) +def test_ingestion( + pytestconfig, + mock_time, + sink_file_path, + pipeline_config, + generate_mlflow_data, +): + print(f"MCPs file path: {sink_file_path}") + golden_file_path = ( + pytestconfig.rootpath / "tests/integration/mlflow/mlflow_mcps_golden.json" + ) - pipeline = Pipeline.create(pipeline_config) - pipeline.run() - pipeline.pretty_print_summary() - pipeline.raise_from_status() + pipeline = Pipeline.create(pipeline_config) + pipeline.run() + pipeline.pretty_print_summary() + pipeline.raise_from_status() - mce_helpers.check_golden_file( - pytestconfig=pytestconfig, - output_path=sink_file_path, - golden_path=golden_file_path, - ) + mce_helpers.check_golden_file( + pytestconfig=pytestconfig, + output_path=sink_file_path, + golden_path=golden_file_path, + ) diff --git a/metadata-ingestion/tests/integration/sql_server/test_sql_server.py b/metadata-ingestion/tests/integration/sql_server/test_sql_server.py index 5ed672d527264..f439a322c2677 100644 --- a/metadata-ingestion/tests/integration/sql_server/test_sql_server.py +++ b/metadata-ingestion/tests/integration/sql_server/test_sql_server.py @@ -1,6 +1,5 @@ import os import subprocess -import sys import time import pytest @@ -9,10 +8,6 @@ from tests.test_helpers.click_helpers import run_datahub_cmd from tests.test_helpers.docker_helpers import cleanup_image, wait_for_port -pytestmark = pytest.mark.skipif( - sys.version_info < (3, 8), reason="requires python 3.8 or higher" -) - @pytest.fixture(scope="module") def mssql_runner(docker_compose_runner, pytestconfig): diff --git a/metadata-ingestion/tests/unit/test_iceberg.py b/metadata-ingestion/tests/unit/test_iceberg.py index 768d4f958af1f..e2b463004f5a1 100644 --- a/metadata-ingestion/tests/unit/test_iceberg.py +++ b/metadata-ingestion/tests/unit/test_iceberg.py @@ -1,482 +1,477 @@ -import sys import uuid from decimal import Decimal from typing import Any, Optional import pytest from pydantic import ValidationError +from pyiceberg.schema import Schema +from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DecimalType, + DoubleType, + FixedType, + FloatType, + IcebergType, + IntegerType, + ListType, + LongType, + MapType, + NestedField, + PrimitiveType, + StringType, + StructType, + TimestampType, + TimestamptzType, + TimeType, + UUIDType, +) + +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.source.iceberg.iceberg import ( + IcebergProfiler, + IcebergSource, + IcebergSourceConfig, +) +from datahub.ingestion.source.iceberg.iceberg_common import IcebergCatalogConfig +from datahub.metadata.com.linkedin.pegasus2avro.schema import ArrayType, SchemaField +from datahub.metadata.schema_classes import ( + ArrayTypeClass, + BooleanTypeClass, + BytesTypeClass, + DateTypeClass, + FixedTypeClass, + NumberTypeClass, + RecordTypeClass, + StringTypeClass, + TimeTypeClass, +) -if sys.version_info >= (3, 8): - from pyiceberg.schema import Schema - from pyiceberg.types import ( - BinaryType, - BooleanType, - DateType, - DecimalType, - DoubleType, - FixedType, - FloatType, - IcebergType, - IntegerType, - ListType, - LongType, - MapType, - NestedField, - PrimitiveType, - StringType, - StructType, - TimestampType, - TimestamptzType, - TimeType, - UUIDType, - ) - from datahub.ingestion.api.common import PipelineContext - from datahub.ingestion.source.iceberg.iceberg import ( - IcebergProfiler, - IcebergSource, - IcebergSourceConfig, +def with_iceberg_source() -> IcebergSource: + catalog: IcebergCatalogConfig = IcebergCatalogConfig( + name="test", type="rest", config={} ) - from datahub.ingestion.source.iceberg.iceberg_common import IcebergCatalogConfig - from datahub.metadata.com.linkedin.pegasus2avro.schema import ArrayType, SchemaField - from datahub.metadata.schema_classes import ( - ArrayTypeClass, - BooleanTypeClass, - BytesTypeClass, - DateTypeClass, - FixedTypeClass, - NumberTypeClass, - RecordTypeClass, - StringTypeClass, - TimeTypeClass, + return IcebergSource( + ctx=PipelineContext(run_id="iceberg-source-test"), + config=IcebergSourceConfig(catalog=catalog), ) - pytestmark = pytest.mark.skipif( - sys.version_info < (3, 8), reason="requires python 3.8 or higher" + +def with_iceberg_profiler() -> IcebergProfiler: + iceberg_source_instance = with_iceberg_source() + return IcebergProfiler( + iceberg_source_instance.report, iceberg_source_instance.config.profiling ) - def with_iceberg_source() -> IcebergSource: - catalog: IcebergCatalogConfig = IcebergCatalogConfig( - name="test", type="rest", config={} - ) - return IcebergSource( - ctx=PipelineContext(run_id="iceberg-source-test"), - config=IcebergSourceConfig(catalog=catalog), - ) - def with_iceberg_profiler() -> IcebergProfiler: - iceberg_source_instance = with_iceberg_source() - return IcebergProfiler( - iceberg_source_instance.report, iceberg_source_instance.config.profiling - ) +def assert_field( + schema_field: SchemaField, + expected_description: Optional[str], + expected_nullable: bool, + expected_type: Any, +) -> None: + assert ( + schema_field.description == expected_description + ), f"Field description '{schema_field.description}' is different from expected description '{expected_description}'" + assert ( + schema_field.nullable == expected_nullable + ), f"Field nullable '{schema_field.nullable}' is different from expected nullable '{expected_nullable}'" + assert isinstance( + schema_field.type.type, expected_type + ), f"Field type {schema_field.type.type} is different from expected type {expected_type}" - def assert_field( - schema_field: SchemaField, - expected_description: Optional[str], - expected_nullable: bool, - expected_type: Any, - ) -> None: - assert ( - schema_field.description == expected_description - ), f"Field description '{schema_field.description}' is different from expected description '{expected_description}'" + +def test_config_no_catalog(): + """ + Test when no Iceberg catalog is provided. + """ + with pytest.raises(ValidationError, match="catalog"): + IcebergSourceConfig() # type: ignore + + +def test_config_catalog_not_configured(): + """ + Test when an Iceberg catalog is provided, but not properly configured. + """ + with pytest.raises(ValidationError): + IcebergCatalogConfig() # type: ignore + + with pytest.raises(ValidationError, match="conf"): + IcebergCatalogConfig(type="a type") # type: ignore + + with pytest.raises(ValidationError, match="type"): + IcebergCatalogConfig(conf={}) # type: ignore + + +def test_config_for_tests(): + """ + Test valid iceberg source that will be used in unit tests. + """ + with_iceberg_source() + + +@pytest.mark.parametrize( + "iceberg_type, expected_schema_field_type", + [ + (BinaryType(), BytesTypeClass), + (BooleanType(), BooleanTypeClass), + (DateType(), DateTypeClass), + ( + DecimalType(3, 2), + NumberTypeClass, + ), + (DoubleType(), NumberTypeClass), + (FixedType(4), FixedTypeClass), + (FloatType(), NumberTypeClass), + (IntegerType(), NumberTypeClass), + (LongType(), NumberTypeClass), + (StringType(), StringTypeClass), + ( + TimestampType(), + TimeTypeClass, + ), + ( + TimestamptzType(), + TimeTypeClass, + ), + (TimeType(), TimeTypeClass), + ( + UUIDType(), + StringTypeClass, + ), + ], +) +def test_iceberg_primitive_type_to_schema_field( + iceberg_type: PrimitiveType, expected_schema_field_type: Any +) -> None: + """ + Test converting a primitive typed Iceberg field to a SchemaField + """ + iceberg_source_instance = with_iceberg_source() + for column in [ + NestedField( + 1, "required_field", iceberg_type, True, "required field documentation" + ), + NestedField( + 1, "optional_field", iceberg_type, False, "optional field documentation" + ), + ]: + schema = Schema(column) + schema_fields = iceberg_source_instance._get_schema_fields_for_schema(schema) assert ( - schema_field.nullable == expected_nullable - ), f"Field nullable '{schema_field.nullable}' is different from expected nullable '{expected_nullable}'" - assert isinstance( - schema_field.type.type, expected_type - ), f"Field type {schema_field.type.type} is different from expected type {expected_type}" - - def test_config_no_catalog(): - """ - Test when no Iceberg catalog is provided. - """ - with pytest.raises(ValidationError, match="catalog"): - IcebergSourceConfig() # type: ignore - - def test_config_catalog_not_configured(): - """ - Test when an Iceberg catalog is provided, but not properly configured. - """ - with pytest.raises(ValidationError): - IcebergCatalogConfig() # type: ignore - - with pytest.raises(ValidationError, match="conf"): - IcebergCatalogConfig(type="a type") # type: ignore - - with pytest.raises(ValidationError, match="type"): - IcebergCatalogConfig(conf={}) # type: ignore - - def test_config_for_tests(): - """ - Test valid iceberg source that will be used in unit tests. - """ - with_iceberg_source() - - @pytest.mark.parametrize( - "iceberg_type, expected_schema_field_type", - [ - (BinaryType(), BytesTypeClass), - (BooleanType(), BooleanTypeClass), - (DateType(), DateTypeClass), - ( - DecimalType(3, 2), - NumberTypeClass, - ), - (DoubleType(), NumberTypeClass), - (FixedType(4), FixedTypeClass), - (FloatType(), NumberTypeClass), - (IntegerType(), NumberTypeClass), - (LongType(), NumberTypeClass), - (StringType(), StringTypeClass), - ( - TimestampType(), - TimeTypeClass, - ), - ( - TimestamptzType(), - TimeTypeClass, - ), - (TimeType(), TimeTypeClass), - ( - UUIDType(), - StringTypeClass, - ), - ], - ) - def test_iceberg_primitive_type_to_schema_field( - iceberg_type: PrimitiveType, expected_schema_field_type: Any - ) -> None: - """ - Test converting a primitive typed Iceberg field to a SchemaField - """ + len(schema_fields) == 1 + ), f"Expected 1 field, but got {len(schema_fields)}" + assert_field( + schema_fields[0], + column.doc, + column.optional, + expected_schema_field_type, + ) + + +@pytest.mark.parametrize( + "iceberg_type, expected_array_nested_type", + [ + (BinaryType(), "bytes"), + (BooleanType(), "boolean"), + (DateType(), "date"), + ( + DecimalType(3, 2), + "decimal", + ), + (DoubleType(), "double"), + (FixedType(4), "fixed"), + (FloatType(), "float"), + (IntegerType(), "int"), + (LongType(), "long"), + (StringType(), "string"), + ( + TimestampType(), + "timestamp-micros", + ), + ( + TimestamptzType(), + "timestamp-micros", + ), + (TimeType(), "time-micros"), + ( + UUIDType(), + "uuid", + ), + ], +) +def test_iceberg_list_to_schema_field( + iceberg_type: PrimitiveType, expected_array_nested_type: Any +) -> None: + """ + Test converting a list typed Iceberg field to an ArrayType SchemaField, including the list nested type. + """ + for list_column in [ + NestedField( + 1, + "listField", + ListType(2, iceberg_type, True), + True, + "required field, required element documentation", + ), + NestedField( + 1, + "listField", + ListType(2, iceberg_type, False), + True, + "required field, optional element documentation", + ), + NestedField( + 1, + "listField", + ListType(2, iceberg_type, True), + False, + "optional field, required element documentation", + ), + NestedField( + 1, + "listField", + ListType(2, iceberg_type, False), + False, + "optional field, optional element documentation", + ), + ]: iceberg_source_instance = with_iceberg_source() - for column in [ - NestedField( - 1, "required_field", iceberg_type, True, "required field documentation" - ), - NestedField( - 1, "optional_field", iceberg_type, False, "optional field documentation" - ), - ]: - schema = Schema(column) - schema_fields = iceberg_source_instance._get_schema_fields_for_schema( - schema - ) - assert ( - len(schema_fields) == 1 - ), f"Expected 1 field, but got {len(schema_fields)}" - assert_field( - schema_fields[0], - column.doc, - column.optional, - expected_schema_field_type, - ) - - @pytest.mark.parametrize( - "iceberg_type, expected_array_nested_type", - [ - (BinaryType(), "bytes"), - (BooleanType(), "boolean"), - (DateType(), "date"), - ( - DecimalType(3, 2), - "decimal", - ), - (DoubleType(), "double"), - (FixedType(4), "fixed"), - (FloatType(), "float"), - (IntegerType(), "int"), - (LongType(), "long"), - (StringType(), "string"), - ( - TimestampType(), - "timestamp-micros", - ), - ( - TimestamptzType(), - "timestamp-micros", - ), - (TimeType(), "time-micros"), - ( - UUIDType(), - "uuid", - ), - ], - ) - def test_iceberg_list_to_schema_field( - iceberg_type: PrimitiveType, expected_array_nested_type: Any - ) -> None: - """ - Test converting a list typed Iceberg field to an ArrayType SchemaField, including the list nested type. - """ - for list_column in [ - NestedField( - 1, - "listField", - ListType(2, iceberg_type, True), - True, - "required field, required element documentation", - ), - NestedField( - 1, - "listField", - ListType(2, iceberg_type, False), - True, - "required field, optional element documentation", - ), - NestedField( - 1, - "listField", - ListType(2, iceberg_type, True), - False, - "optional field, required element documentation", - ), - NestedField( - 1, - "listField", - ListType(2, iceberg_type, False), - False, - "optional field, optional element documentation", - ), - ]: - iceberg_source_instance = with_iceberg_source() - schema = Schema(list_column) - schema_fields = iceberg_source_instance._get_schema_fields_for_schema( - schema - ) - assert ( - len(schema_fields) == 1 - ), f"Expected 1 field, but got {len(schema_fields)}" - assert_field( - schema_fields[0], list_column.doc, list_column.optional, ArrayTypeClass - ) - assert isinstance( - schema_fields[0].type.type, ArrayType - ), f"Field type {schema_fields[0].type.type} was expected to be {ArrayType}" - arrayType: ArrayType = schema_fields[0].type.type - assert arrayType.nestedType == [ - expected_array_nested_type - ], f"List Field nested type {arrayType.nestedType} was expected to be {expected_array_nested_type}" - - @pytest.mark.parametrize( - "iceberg_type, expected_map_type", - [ - (BinaryType(), BytesTypeClass), - (BooleanType(), BooleanTypeClass), - (DateType(), DateTypeClass), - ( - DecimalType(3, 2), - NumberTypeClass, - ), - (DoubleType(), NumberTypeClass), - (FixedType(4), FixedTypeClass), - (FloatType(), NumberTypeClass), - (IntegerType(), NumberTypeClass), - (LongType(), NumberTypeClass), - (StringType(), StringTypeClass), - ( - TimestampType(), - TimeTypeClass, - ), - ( - TimestamptzType(), - TimeTypeClass, - ), - (TimeType(), TimeTypeClass), - ( - UUIDType(), - StringTypeClass, - ), - ], - ) - def test_iceberg_map_to_schema_field( - iceberg_type: PrimitiveType, expected_map_type: Any - ) -> None: - """ - Test converting a map typed Iceberg field to a MapType SchemaField, where the key is the same type as the value. - """ - for map_column in [ - NestedField( - 1, - "mapField", - MapType(11, iceberg_type, 12, iceberg_type, True), - True, - "required field, required value documentation", - ), - NestedField( - 1, - "mapField", - MapType(11, iceberg_type, 12, iceberg_type, False), - True, - "required field, optional value documentation", - ), - NestedField( - 1, - "mapField", - MapType(11, iceberg_type, 12, iceberg_type, True), - False, - "optional field, required value documentation", - ), - NestedField( - 1, - "mapField", - MapType(11, iceberg_type, 12, iceberg_type, False), - False, - "optional field, optional value documentation", - ), - ]: - iceberg_source_instance = with_iceberg_source() - schema = Schema(map_column) - schema_fields = iceberg_source_instance._get_schema_fields_for_schema( - schema - ) - # Converting an Iceberg Map type will be done by creating an array of struct(key, value) records. - # The first field will be the array. - assert ( - len(schema_fields) == 3 - ), f"Expected 3 fields, but got {len(schema_fields)}" - assert_field( - schema_fields[0], map_column.doc, map_column.optional, ArrayTypeClass - ) - - # The second field will be the key type - assert_field(schema_fields[1], None, False, expected_map_type) - - # The third field will be the value type - assert_field( - schema_fields[2], - None, - not map_column.field_type.value_required, - expected_map_type, - ) - - @pytest.mark.parametrize( - "iceberg_type, expected_schema_field_type", - [ - (BinaryType(), BytesTypeClass), - (BooleanType(), BooleanTypeClass), - (DateType(), DateTypeClass), - ( - DecimalType(3, 2), - NumberTypeClass, - ), - (DoubleType(), NumberTypeClass), - (FixedType(4), FixedTypeClass), - (FloatType(), NumberTypeClass), - (IntegerType(), NumberTypeClass), - (LongType(), NumberTypeClass), - (StringType(), StringTypeClass), - ( - TimestampType(), - TimeTypeClass, - ), - ( - TimestamptzType(), - TimeTypeClass, - ), - (TimeType(), TimeTypeClass), - ( - UUIDType(), - StringTypeClass, - ), - ], - ) - def test_iceberg_struct_to_schema_field( - iceberg_type: PrimitiveType, expected_schema_field_type: Any - ) -> None: - """ - Test converting a struct typed Iceberg field to a RecordType SchemaField. - """ - field1 = NestedField(11, "field1", iceberg_type, True, "field documentation") - struct_column = NestedField( - 1, "structField", StructType(field1), True, "struct documentation" + schema = Schema(list_column) + schema_fields = iceberg_source_instance._get_schema_fields_for_schema(schema) + assert ( + len(schema_fields) == 1 + ), f"Expected 1 field, but got {len(schema_fields)}" + assert_field( + schema_fields[0], list_column.doc, list_column.optional, ArrayTypeClass ) + assert isinstance( + schema_fields[0].type.type, ArrayType + ), f"Field type {schema_fields[0].type.type} was expected to be {ArrayType}" + arrayType: ArrayType = schema_fields[0].type.type + assert arrayType.nestedType == [ + expected_array_nested_type + ], f"List Field nested type {arrayType.nestedType} was expected to be {expected_array_nested_type}" + + +@pytest.mark.parametrize( + "iceberg_type, expected_map_type", + [ + (BinaryType(), BytesTypeClass), + (BooleanType(), BooleanTypeClass), + (DateType(), DateTypeClass), + ( + DecimalType(3, 2), + NumberTypeClass, + ), + (DoubleType(), NumberTypeClass), + (FixedType(4), FixedTypeClass), + (FloatType(), NumberTypeClass), + (IntegerType(), NumberTypeClass), + (LongType(), NumberTypeClass), + (StringType(), StringTypeClass), + ( + TimestampType(), + TimeTypeClass, + ), + ( + TimestamptzType(), + TimeTypeClass, + ), + (TimeType(), TimeTypeClass), + ( + UUIDType(), + StringTypeClass, + ), + ], +) +def test_iceberg_map_to_schema_field( + iceberg_type: PrimitiveType, expected_map_type: Any +) -> None: + """ + Test converting a map typed Iceberg field to a MapType SchemaField, where the key is the same type as the value. + """ + for map_column in [ + NestedField( + 1, + "mapField", + MapType(11, iceberg_type, 12, iceberg_type, True), + True, + "required field, required value documentation", + ), + NestedField( + 1, + "mapField", + MapType(11, iceberg_type, 12, iceberg_type, False), + True, + "required field, optional value documentation", + ), + NestedField( + 1, + "mapField", + MapType(11, iceberg_type, 12, iceberg_type, True), + False, + "optional field, required value documentation", + ), + NestedField( + 1, + "mapField", + MapType(11, iceberg_type, 12, iceberg_type, False), + False, + "optional field, optional value documentation", + ), + ]: iceberg_source_instance = with_iceberg_source() - schema = Schema(struct_column) + schema = Schema(map_column) schema_fields = iceberg_source_instance._get_schema_fields_for_schema(schema) + # Converting an Iceberg Map type will be done by creating an array of struct(key, value) records. + # The first field will be the array. assert ( - len(schema_fields) == 2 - ), f"Expected 2 fields, but got {len(schema_fields)}" + len(schema_fields) == 3 + ), f"Expected 3 fields, but got {len(schema_fields)}" assert_field( - schema_fields[0], struct_column.doc, struct_column.optional, RecordTypeClass + schema_fields[0], map_column.doc, map_column.optional, ArrayTypeClass ) + + # The second field will be the key type + assert_field(schema_fields[1], None, False, expected_map_type) + + # The third field will be the value type assert_field( - schema_fields[1], field1.doc, field1.optional, expected_schema_field_type + schema_fields[2], + None, + not map_column.field_type.value_required, + expected_map_type, ) - @pytest.mark.parametrize( - "value_type, value, expected_value", - [ - (BinaryType(), bytes([1, 2, 3, 4, 5]), "b'\\x01\\x02\\x03\\x04\\x05'"), - (BooleanType(), True, "True"), - (DateType(), 19543, "2023-07-05"), - (DecimalType(3, 2), Decimal((0, (3, 1, 4), -2)), "3.14"), - (DoubleType(), 3.4, "3.4"), - (FixedType(4), bytes([1, 2, 3, 4]), "b'\\x01\\x02\\x03\\x04'"), - (FloatType(), 3.4, "3.4"), - (IntegerType(), 3, "3"), - (LongType(), 4294967295000, "4294967295000"), - (StringType(), "a string", "a string"), - ( - TimestampType(), - 1688559488157000, - "2023-07-05T12:18:08.157000", - ), - ( - TimestamptzType(), - 1688559488157000, - "2023-07-05T12:18:08.157000+00:00", - ), - (TimeType(), 40400000000, "11:13:20"), - ( - UUIDType(), - uuid.UUID("00010203-0405-0607-0809-0a0b0c0d0e0f"), - "00010203-0405-0607-0809-0a0b0c0d0e0f", - ), - ], + +@pytest.mark.parametrize( + "iceberg_type, expected_schema_field_type", + [ + (BinaryType(), BytesTypeClass), + (BooleanType(), BooleanTypeClass), + (DateType(), DateTypeClass), + ( + DecimalType(3, 2), + NumberTypeClass, + ), + (DoubleType(), NumberTypeClass), + (FixedType(4), FixedTypeClass), + (FloatType(), NumberTypeClass), + (IntegerType(), NumberTypeClass), + (LongType(), NumberTypeClass), + (StringType(), StringTypeClass), + ( + TimestampType(), + TimeTypeClass, + ), + ( + TimestamptzType(), + TimeTypeClass, + ), + (TimeType(), TimeTypeClass), + ( + UUIDType(), + StringTypeClass, + ), + ], +) +def test_iceberg_struct_to_schema_field( + iceberg_type: PrimitiveType, expected_schema_field_type: Any +) -> None: + """ + Test converting a struct typed Iceberg field to a RecordType SchemaField. + """ + field1 = NestedField(11, "field1", iceberg_type, True, "field documentation") + struct_column = NestedField( + 1, "structField", StructType(field1), True, "struct documentation" + ) + iceberg_source_instance = with_iceberg_source() + schema = Schema(struct_column) + schema_fields = iceberg_source_instance._get_schema_fields_for_schema(schema) + assert len(schema_fields) == 2, f"Expected 2 fields, but got {len(schema_fields)}" + assert_field( + schema_fields[0], struct_column.doc, struct_column.optional, RecordTypeClass + ) + assert_field( + schema_fields[1], field1.doc, field1.optional, expected_schema_field_type ) - def test_iceberg_profiler_value_render( - value_type: IcebergType, value: Any, expected_value: Optional[str] - ) -> None: - iceberg_profiler_instance = with_iceberg_profiler() - assert ( - iceberg_profiler_instance._render_value("a.dataset", value_type, value) - == expected_value - ) - def test_avro_decimal_bytes_nullable() -> None: - """ - The following test exposes a problem with decimal (bytes) not preserving extra attributes like _nullable. Decimal (fixed) and Boolean for example do. - NOTE: This bug was by-passed by mapping the Decimal type to fixed instead of bytes. - """ - import avro.schema - - decimal_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "bytes", "precision": 3, "scale": 2, "logicalType": "decimal", "native_data_type": "decimal(3, 2)", "_nullable": false}, "name": "required_field", "doc": "required field documentation"}]}""" - decimal_avro_schema = avro.schema.parse(decimal_avro_schema_string) - print("\nDecimal (bytes)") - print( - f"Original avro schema string: {decimal_avro_schema_string}" - ) - print( - f"After avro parsing, _nullable attribute is missing: {decimal_avro_schema}" - ) - decimal_fixed_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "fixed", "logicalType": "decimal", "precision": 3, "scale": 2, "native_data_type": "decimal(3, 2)", "_nullable": false, "name": "bogusName", "size": 16}, "name": "required_field", "doc": "required field documentation"}]}""" - decimal_fixed_avro_schema = avro.schema.parse(decimal_fixed_avro_schema_string) - print("\nDecimal (fixed)") - print( - f"Original avro schema string: {decimal_fixed_avro_schema_string}" - ) - print( - f"After avro parsing, _nullable attribute is preserved: {decimal_fixed_avro_schema}" - ) +@pytest.mark.parametrize( + "value_type, value, expected_value", + [ + (BinaryType(), bytes([1, 2, 3, 4, 5]), "b'\\x01\\x02\\x03\\x04\\x05'"), + (BooleanType(), True, "True"), + (DateType(), 19543, "2023-07-05"), + (DecimalType(3, 2), Decimal((0, (3, 1, 4), -2)), "3.14"), + (DoubleType(), 3.4, "3.4"), + (FixedType(4), bytes([1, 2, 3, 4]), "b'\\x01\\x02\\x03\\x04'"), + (FloatType(), 3.4, "3.4"), + (IntegerType(), 3, "3"), + (LongType(), 4294967295000, "4294967295000"), + (StringType(), "a string", "a string"), + ( + TimestampType(), + 1688559488157000, + "2023-07-05T12:18:08.157000", + ), + ( + TimestamptzType(), + 1688559488157000, + "2023-07-05T12:18:08.157000+00:00", + ), + (TimeType(), 40400000000, "11:13:20"), + ( + UUIDType(), + uuid.UUID("00010203-0405-0607-0809-0a0b0c0d0e0f"), + "00010203-0405-0607-0809-0a0b0c0d0e0f", + ), + ], +) +def test_iceberg_profiler_value_render( + value_type: IcebergType, value: Any, expected_value: Optional[str] +) -> None: + iceberg_profiler_instance = with_iceberg_profiler() + assert ( + iceberg_profiler_instance._render_value("a.dataset", value_type, value) + == expected_value + ) - boolean_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "boolean", "native_data_type": "boolean", "_nullable": false}, "name": "required_field", "doc": "required field documentation"}]}""" - boolean_avro_schema = avro.schema.parse(boolean_avro_schema_string) - print("\nBoolean") - print( - f"Original avro schema string: {boolean_avro_schema_string}" - ) - print( - f"After avro parsing, _nullable attribute is preserved: {boolean_avro_schema}" - ) + +def test_avro_decimal_bytes_nullable() -> None: + """ + The following test exposes a problem with decimal (bytes) not preserving extra attributes like _nullable. Decimal (fixed) and Boolean for example do. + NOTE: This bug was by-passed by mapping the Decimal type to fixed instead of bytes. + """ + import avro.schema + + decimal_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "bytes", "precision": 3, "scale": 2, "logicalType": "decimal", "native_data_type": "decimal(3, 2)", "_nullable": false}, "name": "required_field", "doc": "required field documentation"}]}""" + decimal_avro_schema = avro.schema.parse(decimal_avro_schema_string) + print("\nDecimal (bytes)") + print( + f"Original avro schema string: {decimal_avro_schema_string}" + ) + print(f"After avro parsing, _nullable attribute is missing: {decimal_avro_schema}") + + decimal_fixed_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "fixed", "logicalType": "decimal", "precision": 3, "scale": 2, "native_data_type": "decimal(3, 2)", "_nullable": false, "name": "bogusName", "size": 16}, "name": "required_field", "doc": "required field documentation"}]}""" + decimal_fixed_avro_schema = avro.schema.parse(decimal_fixed_avro_schema_string) + print("\nDecimal (fixed)") + print( + f"Original avro schema string: {decimal_fixed_avro_schema_string}" + ) + print( + f"After avro parsing, _nullable attribute is preserved: {decimal_fixed_avro_schema}" + ) + + boolean_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "boolean", "native_data_type": "boolean", "_nullable": false}, "name": "required_field", "doc": "required field documentation"}]}""" + boolean_avro_schema = avro.schema.parse(boolean_avro_schema_string) + print("\nBoolean") + print( + f"Original avro schema string: {boolean_avro_schema_string}" + ) + print( + f"After avro parsing, _nullable attribute is preserved: {boolean_avro_schema}" + ) diff --git a/metadata-ingestion/tests/unit/test_mlflow_source.py b/metadata-ingestion/tests/unit/test_mlflow_source.py index 97b5afd3d6a4e..374816055b216 100644 --- a/metadata-ingestion/tests/unit/test_mlflow_source.py +++ b/metadata-ingestion/tests/unit/test_mlflow_source.py @@ -1,133 +1,140 @@ -import sys +import datetime +from pathlib import Path +from typing import Any, TypeVar, Union + +import pytest +from mlflow import MlflowClient +from mlflow.entities.model_registry import RegisteredModel +from mlflow.entities.model_registry.model_version import ModelVersion +from mlflow.store.entities import PagedList + +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.source.mlflow import MLflowConfig, MLflowSource + +T = TypeVar("T") + + +@pytest.fixture +def tracking_uri(tmp_path: Path) -> str: + return str(tmp_path / "mlruns") + + +@pytest.fixture +def source(tracking_uri: str) -> MLflowSource: + return MLflowSource( + ctx=PipelineContext(run_id="mlflow-source-test"), + config=MLflowConfig(tracking_uri=tracking_uri), + ) + + +@pytest.fixture +def registered_model(source: MLflowSource) -> RegisteredModel: + model_name = "abc" + return RegisteredModel(name=model_name) + + +@pytest.fixture +def model_version( + source: MLflowSource, + registered_model: RegisteredModel, +) -> ModelVersion: + version = "1" + return ModelVersion( + name=registered_model.name, + version=version, + creation_timestamp=datetime.datetime.now(), + ) + + +def dummy_search_func(page_token: Union[None, str], **kwargs: Any) -> PagedList[T]: + dummy_pages = dict( + page_1=PagedList(items=["a", "b"], token="page_2"), + page_2=PagedList(items=["c", "d"], token="page_3"), + page_3=PagedList(items=["e"], token=None), + ) + if page_token is None: + page_to_return = dummy_pages["page_1"] + else: + page_to_return = dummy_pages[page_token] + if kwargs.get("case", "") == "upper": + page_to_return = PagedList( + items=[e.upper() for e in page_to_return.to_list()], + token=page_to_return.token, + ) + return page_to_return -if sys.version_info >= (3, 8): - import datetime - from pathlib import Path - from typing import Any, TypeVar, Union - import pytest - from mlflow import MlflowClient - from mlflow.entities.model_registry import RegisteredModel - from mlflow.entities.model_registry.model_version import ModelVersion - from mlflow.store.entities import PagedList +def test_stages(source): + mlflow_registered_model_stages = { + "Production", + "Staging", + "Archived", + None, + } + workunits = source._get_tags_workunits() + names = [wu.get_metadata()["metadata"].aspect.name for wu in workunits] - from datahub.ingestion.api.common import PipelineContext - from datahub.ingestion.source.mlflow import MLflowConfig, MLflowSource + assert len(names) == len(mlflow_registered_model_stages) + assert set(names) == { + "mlflow_" + str(stage).lower() for stage in mlflow_registered_model_stages + } - T = TypeVar("T") - @pytest.fixture - def tracking_uri(tmp_path: Path) -> str: - return str(tmp_path / "mlruns") +def test_config_model_name_separator(source, model_version): + name_version_sep = "+" + source.config.model_name_separator = name_version_sep + expected_model_name = ( + f"{model_version.name}{name_version_sep}{model_version.version}" + ) + expected_urn = f"urn:li:mlModel:(urn:li:dataPlatform:mlflow,{expected_model_name},{source.config.env})" - @pytest.fixture - def source(tracking_uri: str) -> MLflowSource: - return MLflowSource( - ctx=PipelineContext(run_id="mlflow-source-test"), - config=MLflowConfig(tracking_uri=tracking_uri), - ) + urn = source._make_ml_model_urn(model_version) - @pytest.fixture - def registered_model(source: MLflowSource) -> RegisteredModel: - model_name = "abc" - return RegisteredModel(name=model_name) - - @pytest.fixture - def model_version( - source: MLflowSource, - registered_model: RegisteredModel, - ) -> ModelVersion: - version = "1" - return ModelVersion( - name=registered_model.name, - version=version, - creation_timestamp=datetime.datetime.now(), - ) + assert urn == expected_urn - def dummy_search_func(page_token: Union[None, str], **kwargs: Any) -> PagedList[T]: - dummy_pages = dict( - page_1=PagedList(items=["a", "b"], token="page_2"), - page_2=PagedList(items=["c", "d"], token="page_3"), - page_3=PagedList(items=["e"], token=None), - ) - if page_token is None: - page_to_return = dummy_pages["page_1"] - else: - page_to_return = dummy_pages[page_token] - if kwargs.get("case", "") == "upper": - page_to_return = PagedList( - items=[e.upper() for e in page_to_return.to_list()], - token=page_to_return.token, - ) - return page_to_return - - def test_stages(source): - mlflow_registered_model_stages = { - "Production", - "Staging", - "Archived", - None, - } - workunits = source._get_tags_workunits() - names = [wu.get_metadata()["metadata"].aspect.name for wu in workunits] - - assert len(names) == len(mlflow_registered_model_stages) - assert set(names) == { - "mlflow_" + str(stage).lower() for stage in mlflow_registered_model_stages - } - - def test_config_model_name_separator(source, model_version): - name_version_sep = "+" - source.config.model_name_separator = name_version_sep - expected_model_name = ( - f"{model_version.name}{name_version_sep}{model_version.version}" - ) - expected_urn = f"urn:li:mlModel:(urn:li:dataPlatform:mlflow,{expected_model_name},{source.config.env})" - urn = source._make_ml_model_urn(model_version) +def test_model_without_run(source, registered_model, model_version): + run = source._get_mlflow_run(model_version) + wu = source._get_ml_model_properties_workunit( + registered_model=registered_model, + model_version=model_version, + run=run, + ) + aspect = wu.get_metadata()["metadata"].aspect - assert urn == expected_urn + assert aspect.hyperParams is None + assert aspect.trainingMetrics is None - def test_model_without_run(source, registered_model, model_version): - run = source._get_mlflow_run(model_version) - wu = source._get_ml_model_properties_workunit( - registered_model=registered_model, - model_version=model_version, - run=run, - ) - aspect = wu.get_metadata()["metadata"].aspect - assert aspect.hyperParams is None - assert aspect.trainingMetrics is None +def test_traverse_mlflow_search_func(source): + expected_items = ["a", "b", "c", "d", "e"] - def test_traverse_mlflow_search_func(source): - expected_items = ["a", "b", "c", "d", "e"] + items = list(source._traverse_mlflow_search_func(dummy_search_func)) - items = list(source._traverse_mlflow_search_func(dummy_search_func)) + assert items == expected_items - assert items == expected_items - def test_traverse_mlflow_search_func_with_kwargs(source): - expected_items = ["A", "B", "C", "D", "E"] +def test_traverse_mlflow_search_func_with_kwargs(source): + expected_items = ["A", "B", "C", "D", "E"] + + items = list(source._traverse_mlflow_search_func(dummy_search_func, case="upper")) + + assert items == expected_items - items = list( - source._traverse_mlflow_search_func(dummy_search_func, case="upper") - ) - assert items == expected_items +def test_make_external_link_local(source, model_version): + expected_url = None - def test_make_external_link_local(source, model_version): - expected_url = None + url = source._make_external_url(model_version) - url = source._make_external_url(model_version) + assert url == expected_url - assert url == expected_url - def test_make_external_link_remote(source, model_version): - tracking_uri_remote = "https://dummy-mlflow-tracking-server.org" - source.client = MlflowClient(tracking_uri=tracking_uri_remote) - expected_url = f"{tracking_uri_remote}/#/models/{model_version.name}/versions/{model_version.version}" +def test_make_external_link_remote(source, model_version): + tracking_uri_remote = "https://dummy-mlflow-tracking-server.org" + source.client = MlflowClient(tracking_uri=tracking_uri_remote) + expected_url = f"{tracking_uri_remote}/#/models/{model_version.name}/versions/{model_version.version}" - url = source._make_external_url(model_version) + url = source._make_external_url(model_version) - assert url == expected_url + assert url == expected_url From f378fb6c8066027fae671cb63a4ec3db60dd9744 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Tue, 30 Jan 2024 04:33:17 +0900 Subject: [PATCH 455/792] docs: Add slack survey page (#9590) Co-authored-by: Harshal Sheth --- docs-website/docusaurus.config.js | 2 +- docs-website/src/pages/slack/index.js | 48 +++++++++++++++++++ .../src/pages/slack/slacksurvey.module.scss | 0 3 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 docs-website/src/pages/slack/index.js create mode 100644 docs-website/src/pages/slack/slacksurvey.module.scss diff --git a/docs-website/docusaurus.config.js b/docs-website/docusaurus.config.js index 22edf749acaed..6138f33244d03 100644 --- a/docs-website/docusaurus.config.js +++ b/docs-website/docusaurus.config.js @@ -62,7 +62,7 @@ module.exports = { position: "right", items: [ { - href: "https://slack.datahubproject.io", + to: "/slack", label: "Join Slack", }, { diff --git a/docs-website/src/pages/slack/index.js b/docs-website/src/pages/slack/index.js new file mode 100644 index 0000000000000..c85a1eefe5545 --- /dev/null +++ b/docs-website/src/pages/slack/index.js @@ -0,0 +1,48 @@ +import React, { useEffect } from 'react'; +import Layout from '@theme/Layout'; +import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; + +function SlackSurvey() { + const { siteConfig = {} } = useDocusaurusContext(); + + useEffect(() => { + const script = document.createElement('script'); + script.src = "//js.hsforms.net/forms/embed/v2.js"; + script.async = true; + script.type = 'text/javascript'; + document.body.appendChild(script); + + script.onload = () => { + if (window.hbspt) { + window.hbspt.forms.create({ + region: "na1", + portalId: "14552909", + formId: "91357965-a8dc-4e20-875e-5f87e6b9defb", + target: '#hubspotForm' // Targeting the div with the specific ID + }); + } + }; + + return () => { + document.body.removeChild(script); + }; + }, []); + + return ( + +
    +
    +
    +

    Join the DataHub Slack Community!

    +
    We will send the link to join our Slack community to your email.
    +
    +
    +
    +
    +
    + ); +} + +export default SlackSurvey; diff --git a/docs-website/src/pages/slack/slacksurvey.module.scss b/docs-website/src/pages/slack/slacksurvey.module.scss new file mode 100644 index 0000000000000..e69de29bb2d1d From 1d06d38b681be03732111e5d2a6a908dac6a5977 Mon Sep 17 00:00:00 2001 From: Shirshanka Das Date: Mon, 29 Jan 2024 16:03:27 -0800 Subject: [PATCH 456/792] feat(platform): add support for via nodes (#9733) --- .../datahub/graphql/GmsGraphQLEngine.java | 3 +- .../graphql/resolvers/BatchLoadUtils.java | 5 +- .../search/SearchAcrossLineageResolver.java | 52 +- .../resolvers/search/SearchResolver.java | 13 +- .../mappers/GroupingCriterionInputMapper.java | 29 + .../mappers/SearchFlagsInputMapper.java | 13 + .../common/mappers/UrnToEntityMapper.java | 6 + .../UrnSearchAcrossLineageResultsMapper.java | 1 + .../graphql/types/query/QueryType.java | 3 + .../src/main/resources/entity.graphql | 5 + .../src/main/resources/search.graphql | 49 + .../SearchAcrossLineageResolverTest.java | 20 +- .../resolvers/search/SearchResolverTest.java | 61 +- .../ReindexDataJobViaNodesCLLConfig.java | 15 + .../upgrade/config/SystemUpdateConfig.java | 11 +- .../datahub/upgrade/system/SystemUpdate.java | 6 +- .../system/via/ReindexDataJobViaNodesCLL.java | 34 + .../via/ReindexDataJobViaNodesCLLStep.java | 84 ++ .../annotation/RelationshipAnnotation.java | 45 +- .../src/datahub/ingestion/graph/client.py | 6 +- .../metadata/entity/EntityServiceImpl.java | 2 +- .../graph/dgraph/DgraphGraphService.java | 2 +- .../graph/elastic/ESGraphQueryDAO.java | 453 ++++++-- .../graph/elastic/ESGraphWriteDAO.java | 23 + .../elastic/ElasticSearchGraphService.java | 39 +- .../GraphRelationshipMappingsBuilder.java | 17 +- .../graph/neo4j/Neo4jGraphService.java | 3 +- .../metadata/search/LineageSearchService.java | 45 +- .../metadata/search/utils/SearchUtils.java | 24 + .../service/UpdateIndicesService.java | 46 +- .../metadata/graph/GraphServiceTestBase.java | 61 +- .../graph/dgraph/DgraphGraphServiceTest.java | 2 +- .../search/SearchGraphServiceTestBase.java | 21 +- .../fixtures/LineageDataFixtureTestBase.java | 25 +- .../search/utils/SearchUtilsTest.java | 175 ++-- .../linkedin/dataset/FineGrainedLineage.pdl | 7 +- .../pegasus/com/linkedin/dataset/Upstream.pdl | 8 + .../metadata/graph/LineageRelationship.pdl | 9 + .../metadata/query/GroupingCriterion.pdl | 21 + .../linkedin/metadata/query/GroupingSpec.pdl | 15 + .../linkedin/metadata/query/SearchFlags.pdl | 5 + .../metadata/search/LineageSearchEntity.pdl | 5 + .../com/linkedin/query/QueryProperties.pdl | 5 + .../search/GraphQueryConfiguration.java | 4 + .../src/main/resources/application.yml | 3 +- .../linkedin/metadata/boot/BootstrapStep.java | 11 + .../com.linkedin.entity.aspects.snapshot.json | 13 +- ...com.linkedin.entity.entities.snapshot.json | 58 +- .../com.linkedin.entity.runs.snapshot.json | 13 +- ...nkedin.lineage.relationships.snapshot.json | 13 +- ...nkedin.operations.operations.snapshot.json | 13 +- ...m.linkedin.platform.platform.snapshot.json | 13 +- .../linkedin/entity/client/EntityClient.java | 1 + .../com/linkedin/metadata/graph/Edge.java | 27 + .../metadata/graph/GraphIndexUtils.java | 35 +- .../metadata/graph/RelatedEntities.java | 8 +- .../metadata/graph/RelatedEntity.java | 13 + smoke-test/requirements.txt | 3 +- smoke-test/tests/lineage/__init__.py | 0 smoke-test/tests/lineage/test_lineage.py | 991 ++++++++++++++++++ 60 files changed, 2401 insertions(+), 292 deletions(-) create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java create mode 100644 datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java create mode 100644 datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java create mode 100644 datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java create mode 100644 metadata-models/src/main/pegasus/com/linkedin/metadata/query/GroupingCriterion.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/metadata/query/GroupingSpec.pdl create mode 100644 smoke-test/tests/lineage/__init__.py create mode 100644 smoke-test/tests/lineage/test_lineage.py diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 4b5bbdb6e15ec..41f48e0a7dc3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -878,7 +878,8 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { "scrollAcrossEntities", new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) .dataFetcher( - "searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) + "searchAcrossLineage", + new SearchAcrossLineageResolver(this.entityClient, this.entityRegistry)) .dataFetcher( "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) .dataFetcher( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java index 5ab07701c15a2..3126f25546f65 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java @@ -28,8 +28,9 @@ public static CompletableFuture> batchLoadEntitiesOfSameType( .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = dataLoaderRegistry.getDataLoader(filteredEntity.name()); - List keyList = new ArrayList(); + final DataLoader loader = + dataLoaderRegistry.getDataLoader(filteredEntity.name()); + List keyList = new ArrayList(); for (Entity entity : entities) { keyList.add(filteredEntity.getKeyProvider().apply(entity)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 2dc5032f2a4eb..1a8b7734c093e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -2,7 +2,9 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static com.linkedin.metadata.Constants.QUERY_ENTITY_NAME; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; @@ -14,31 +16,63 @@ import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.r2.RemoteInvocationException; +import graphql.VisibleForTesting; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nullable; -import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; /** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j -@RequiredArgsConstructor public class SearchAcrossLineageResolver implements DataFetcher> { private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; + private static final Set TRANSIENT_ENTITIES = ImmutableSet.of(QUERY_ENTITY_NAME); + private final EntityClient _entityClient; + private final EntityRegistry _entityRegistry; + + @VisibleForTesting final Set _allEntities; + private final List _allowedEntities; + + public SearchAcrossLineageResolver(EntityClient entityClient, EntityRegistry entityRegistry) { + this._entityClient = entityClient; + this._entityRegistry = entityRegistry; + this._allEntities = + entityRegistry.getEntitySpecs().values().stream() + .map(EntitySpec::getName) + .collect(Collectors.toSet()); + + this._allowedEntities = + this._allEntities.stream() + .filter(e -> !TRANSIENT_ENTITIES.contains(e)) + .collect(Collectors.toList()); + } + + private List getEntityNamesFromInput(List inputTypes) { + if (inputTypes != null && !inputTypes.isEmpty()) { + return inputTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + } else { + return this._allowedEntities; + } + } + @Override public CompletableFuture get(DataFetchingEnvironment environment) throws URISyntaxException { @@ -50,12 +84,7 @@ public CompletableFuture get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); - List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) - ? SEARCHABLE_ENTITY_TYPES - : input.getTypes(); - List entityNames = - entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + List entityNames = getEntityNamesFromInput(input.getTypes()); // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = @@ -99,8 +128,7 @@ public CompletableFuture get(DataFetchingEnvironment } else { searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); } - - return UrnSearchAcrossLineageResultsMapper.map( + LineageSearchResult salResults = _entityClient.searchAcrossLineage( urn, resolvedDirection, @@ -114,7 +142,9 @@ public CompletableFuture get(DataFetchingEnvironment startTimeMillis, endTimeMillis, searchFlags, - ResolverUtils.getAuthentication(environment))); + getAuthentication(environment)); + + return UrnSearchAcrossLineageResultsMapper.map(salResults); } catch (RemoteInvocationException e) { log.error( "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index bc177c600beee..7428207034f5d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; import com.linkedin.datahub.graphql.generated.SearchInput; @@ -10,6 +11,9 @@ import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.GroupingCriterion; +import com.linkedin.metadata.query.GroupingCriterionArray; +import com.linkedin.metadata.query.GroupingSpec; import com.linkedin.metadata.query.SearchFlags; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -28,7 +32,14 @@ public class SearchResolver implements DataFetcher { + + public static final GroupingCriterionInputMapper INSTANCE = new GroupingCriterionInputMapper(); + + public static com.linkedin.metadata.query.GroupingCriterion map( + @Nonnull final GroupingCriterion groupingCriterion) { + return INSTANCE.apply(groupingCriterion); + } + + @Override + public com.linkedin.metadata.query.GroupingCriterion apply(GroupingCriterion input) { + return new com.linkedin.metadata.query.GroupingCriterion() + .setBaseEntityType( + input.getBaseEntityType() != null + ? EntityTypeMapper.getName(input.getBaseEntityType()) + : null, + SetMode.REMOVE_OPTIONAL_IF_NULL) + .setGroupingEntityType(EntityTypeMapper.getName(input.getGroupingEntityType())); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index e2d29d0297449..faede5cf9bb1b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -2,6 +2,9 @@ import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.metadata.query.GroupingCriterionArray; +import com.linkedin.metadata.query.GroupingSpec; +import java.util.stream.Collectors; import javax.annotation.Nonnull; /** @@ -42,6 +45,16 @@ public com.linkedin.metadata.query.SearchFlags apply(@Nonnull final SearchFlags if (searchFlags.getGetSuggestions() != null) { result.setGetSuggestions(searchFlags.getGetSuggestions()); } + if (searchFlags.getGroupingSpec() != null + && searchFlags.getGroupingSpec().getGroupingCriteria() != null) { + result.setGroupingSpec( + new GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + searchFlags.getGroupingSpec().getGroupingCriteria().stream() + .map(GroupingCriterionInputMapper::map) + .collect(Collectors.toList())))); + } return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 18a082fee95f1..3ca018ea6f5c7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -30,6 +30,7 @@ import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; @@ -198,6 +199,11 @@ public Entity apply(Urn input) { ((StructuredPropertyEntity) partialEntity).setUrn(input.toString()); ((StructuredPropertyEntity) partialEntity).setType(EntityType.STRUCTURED_PROPERTY); } + if (input.getEntityType().equals(QUERY_ENTITY_NAME)) { + partialEntity = new QueryEntity(); + ((QueryEntity) partialEntity).setUrn(input.toString()); + ((QueryEntity) partialEntity).setType(EntityType.QUERY); + } return partialEntity; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java index 642fe90cf2aed..970789facf699 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java @@ -62,6 +62,7 @@ private SearchAcrossLineageResult mapResult(LineageSearchEntity searchEntity) { .setMatchedFields(getMatchedFieldEntry(searchEntity.getMatchedFields())) .setPaths(searchEntity.getPaths().stream().map(this::mapPath).collect(Collectors.toList())) .setDegree(searchEntity.getDegree()) + .setDegrees(searchEntity.getDegrees().stream().collect(Collectors.toList())) .build(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java index 0c1fd33e38110..087c93a97e314 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java @@ -21,7 +21,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +@Slf4j @RequiredArgsConstructor public class QueryType implements com.linkedin.datahub.graphql.types.EntityType { @@ -50,6 +52,7 @@ public List> batchLoad( final List viewUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { + log.debug("Fetching query entities: {}", viewUrns); final Map entities = _entityClient.batchGetV2( QUERY_ENTITY_NAME, diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 3ea1b38d3db0d..0074dc3fcb44c 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -10948,6 +10948,11 @@ enum QuerySource { The query was provided manually, e.g. from the UI. """ MANUAL + + """ + The query was extracted by the system, e.g. from a dashboard. + """ + SYSTEM } """ diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index 8896dd02b5ad3..2b921601058fb 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -143,6 +143,15 @@ input SearchFlags { Whether to request for search suggestions on the _entityName virtualized field """ getSuggestions: Boolean + + """ + Additional grouping specifications to apply to the search results + Grouping specifications will control how search results are grouped together + in the response. This is currently being used to group schema fields (columns) + as datasets, and in the future will be used to group other entities as well. + Note: This is an experimental feature and is subject to change. + """ + groupingSpec: GroupingSpec } """ @@ -278,6 +287,7 @@ input ScrollAcrossEntitiesInput { searchFlags: SearchFlags } + """ Input arguments for a search query over the results of a multi-hop graph query """ @@ -669,6 +679,12 @@ type SearchAcrossLineageResult { Degree of relationship (number of hops to get to entity) """ degree: Int! + + """ + Degrees of relationship (for entities discoverable at multiple degrees) + """ + degrees: [Int!] + } """ @@ -1303,4 +1319,37 @@ input SortCriterion { The order in which we will be sorting """ sortOrder: SortOrder! +} + +""" +A grouping specification for search results. +""" +input GroupingSpec { + + """ + A list of grouping criteria for grouping search results. + There is no implied order in the grouping criteria. + """ + groupingCriteria: [GroupingCriterion!] + +} + +""" +A single grouping criterion for grouping search results +""" +input GroupingCriterion { + + """ + The base entity type that needs to be grouped + e.g. schemaField + Omitting this field will result in all base entities being grouped into the groupingEntityType. + """ + baseEntityType: EntityType + + """ + The type of entity being grouped into + e.g. dataset, domain, etc. + """ + groupingEntityType: EntityType! + } \ No newline at end of file diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java index 273f7156c12a8..a50591b7fc399 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java @@ -14,6 +14,8 @@ import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.AggregationMetadataArray; import com.linkedin.metadata.search.LineageSearchEntity; @@ -22,6 +24,7 @@ import com.linkedin.metadata.search.MatchedFieldArray; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import java.io.InputStream; import java.util.Collections; import java.util.List; import org.testng.annotations.BeforeMethod; @@ -43,13 +46,28 @@ public class SearchAcrossLineageResolverTest { private Authentication _authentication; private SearchAcrossLineageResolver _resolver; + private EntityRegistry _entityRegistry; + @BeforeMethod public void setupTest() { _entityClient = mock(EntityClient.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); _authentication = mock(Authentication.class); - _resolver = new SearchAcrossLineageResolver(_entityClient); + _entityRegistry = mock(EntityRegistry.class); + _resolver = new SearchAcrossLineageResolver(_entityClient, _entityRegistry); + } + + @Test + public void testAllEntitiesInitialization() { + InputStream inputStream = ClassLoader.getSystemResourceAsStream("entity-registry.yml"); + EntityRegistry entityRegistry = new ConfigEntityRegistry(inputStream); + SearchAcrossLineageResolver resolver = + new SearchAcrossLineageResolver(_entityClient, entityRegistry); + assertTrue(resolver._allEntities.contains("dataset")); + assertTrue(resolver._allEntities.contains("dataFlow")); + // Test for case sensitivity + assertFalse(resolver._allEntities.contains("dataflow")); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index 24724cb8e23ad..9716799628a45 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -1,14 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.search; import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.metadata.Constants.*; import com.datahub.authentication.Authentication; +import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.GroupingCriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntityArray; @@ -19,6 +22,22 @@ import org.testng.annotations.Test; public class SearchResolverTest { + + private com.linkedin.metadata.query.SearchFlags setConvertSchemaFieldsToDatasets( + com.linkedin.metadata.query.SearchFlags flags, boolean value) { + if (value) { + return flags.setGroupingSpec( + new com.linkedin.metadata.query.GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + new com.linkedin.metadata.query.GroupingCriterion() + .setBaseEntityType(SCHEMA_FIELD_ENTITY_NAME) + .setGroupingEntityType(DATASET_ENTITY_NAME)))); + } else { + return flags.setGroupingSpec(null, SetMode.REMOVE_IF_NULL); + } + } + @Test public void testDefaultSearchFlags() throws Exception { EntityClient mockClient = initMockSearchEntityClient(); @@ -40,12 +59,14 @@ public void testDefaultSearchFlags() throws Exception { null, 0, 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(true) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false)); + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(true) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false), + true)); } @Test @@ -77,12 +98,14 @@ public void testOverrideSearchFlags() throws Exception { null, 1, 11, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setMaxAggValues(10) - .setSkipCache(true)); + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setMaxAggValues(10) + .setSkipCache(true), + false)); } @Test @@ -107,12 +130,14 @@ public void testNonWildCardSearchFlags() throws Exception { null, 0, 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(false) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false)); + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(false) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false), + true)); } private EntityClient initMockSearchEntityClient() throws Exception { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java new file mode 100644 index 0000000000000..06311e1853874 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java @@ -0,0 +1,15 @@ +package com.linkedin.datahub.upgrade.config; + +import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL; +import com.linkedin.metadata.entity.EntityService; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class ReindexDataJobViaNodesCLLConfig { + + @Bean + public ReindexDataJobViaNodesCLL _reindexDataJobViaNodesCLL(EntityService entityService) { + return new ReindexDataJobViaNodesCLL(entityService); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 3b63d81486eb4..177d4b531ba86 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -4,6 +4,7 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; +import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL; import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; @@ -34,11 +35,17 @@ public SystemUpdate systemUpdate( @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, final GitVersion gitVersion, @Qualifier("revision") String revision, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + final BackfillBrowsePathsV2 backfillBrowsePathsV2, + final ReindexDataJobViaNodesCLL reindexDataJobViaNodesCLL) { String version = String.format("%s-%s", gitVersion.getVersion(), revision); return new SystemUpdate( - buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); + buildIndices, + cleanIndices, + kafkaEventProducer, + version, + backfillBrowsePathsV2, + reindexDataJobViaNodesCLL); } @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java index aba751bff8177..ed9c8ddda45c8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java @@ -7,6 +7,7 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; +import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import java.util.List; import java.util.stream.Collectors; @@ -24,11 +25,12 @@ public SystemUpdate( final CleanIndices cleanIndicesJob, final KafkaEventProducer kafkaEventProducer, final String version, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + final BackfillBrowsePathsV2 backfillBrowsePathsV2, + final ReindexDataJobViaNodesCLL upgradeViaNodeCll) { _preStartupUpgrades = List.of(buildIndicesJob); _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); - _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); + _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2, upgradeViaNodeCll); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java new file mode 100644 index 0000000000000..41179a50c4b54 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java @@ -0,0 +1,34 @@ +package com.linkedin.datahub.upgrade.system.via; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.upgrade.Upgrade; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.metadata.entity.EntityService; +import java.util.List; +import lombok.extern.slf4j.Slf4j; + +/** + * A job that reindexes all datajob inputoutput aspects as part of the via node upgrade. This is + * required to index column-level lineage correctly using via nodes. + */ +@Slf4j +public class ReindexDataJobViaNodesCLL implements Upgrade { + + private final List _steps; + + public ReindexDataJobViaNodesCLL(EntityService entityService) { + _steps = ImmutableList.of(new ReindexDataJobViaNodesCLLStep(entityService)); + } + + @Override + public String id() { + return this.getClass().getName(); + } + + @Override + public List steps() { + return _steps; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java new file mode 100644 index 0000000000000..70afbc3d205b2 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java @@ -0,0 +1,84 @@ +package com.linkedin.datahub.upgrade.system.via; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; +import java.net.URISyntaxException; +import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ReindexDataJobViaNodesCLLStep implements UpgradeStep { + + private static final String UPGRADE_ID = "via-node-cll-reindex-datajob"; + private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); + + private static final Integer BATCH_SIZE = 5000; + + private final EntityService _entityService; + + public ReindexDataJobViaNodesCLLStep(EntityService entityService) { + _entityService = entityService; + } + + @Override + public Function executable() { + return (context) -> { + RestoreIndicesArgs args = + new RestoreIndicesArgs() + .setAspectName(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .setUrnLike("urn:li:" + DATA_JOB_ENTITY_NAME + ":%"); + RestoreIndicesResult result = + _entityService.restoreIndices(args, x -> context.report().addLine((String) x)); + context.report().addLine("Rows migrated: " + result.rowsMigrated); + context.report().addLine("Rows ignored: " + result.ignored); + try { + BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, _entityService); + context.report().addLine("State updated: " + UPGRADE_ID_URN); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } + + @Override + public String id() { + return UPGRADE_ID; + } + + /** + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. + */ + @Override + public boolean isOptional() { + return false; + } + + @Override + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variable SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT to determine whether to skip. + */ + public boolean skip(UpgradeContext context) { + boolean previouslyRun = _entityService.exists(UPGRADE_ID_URN, true); + boolean envFlagRecommendsSkip = + Boolean.parseBoolean(System.getenv("SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT")); + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + if (envFlagRecommendsSkip) { + log.info("Environment variable SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT is set to true. Skipping."); + } + return (previouslyRun || envFlagRecommendsSkip); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java index a22ef56d60006..630e7951c0311 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java @@ -6,10 +6,12 @@ import java.util.Map; import java.util.Optional; import javax.annotation.Nonnull; +import lombok.AllArgsConstructor; import lombok.Value; /** Simple object representation of the @Relationship annotation metadata. */ @Value +@AllArgsConstructor public class RelationshipAnnotation { public static final String ANNOTATION_NAME = "Relationship"; @@ -23,6 +25,8 @@ public class RelationshipAnnotation { private static final String UPDATED_ACTOR = "updatedActor"; private static final String PROPERTIES = "properties"; + private static final String VIA = "via"; + String name; List validDestinationTypes; boolean isUpstream; @@ -32,6 +36,7 @@ public class RelationshipAnnotation { String updatedOn; String updatedActor; String properties; + String via; @Nonnull public static RelationshipAnnotation fromPegasusAnnotationObject( @@ -78,6 +83,7 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( final Optional updatedActor = AnnotationUtils.getField(map, UPDATED_ACTOR, String.class); final Optional properties = AnnotationUtils.getField(map, PROPERTIES, String.class); + final Optional via = AnnotationUtils.getField(map, VIA, String.class); return new RelationshipAnnotation( name.get(), @@ -88,6 +94,43 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( createdActor.orElse(null), updatedOn.orElse(null), updatedActor.orElse(null), - properties.orElse(null)); + properties.orElse(null), + via.orElse(null)); + } + + /** + * Constructor for backwards compatibility + * + * @param name + * @param entityTypes + * @param isUpstream + * @param isLineage + * @param createdOn + * @param createdActor + * @param updatedOn + * @param updatedActor + * @param properties + */ + public RelationshipAnnotation( + String name, + List validDestinationTypes, + boolean isUpstream, + boolean isLineage, + String createdOn, + String createdActor, + String updatedOn, + String updatedActor, + String properties) { + this( + name, + validDestinationTypes, + isUpstream, + isLineage, + createdOn, + createdActor, + updatedOn, + updatedActor, + properties, + null); } } diff --git a/metadata-ingestion/src/datahub/ingestion/graph/client.py b/metadata-ingestion/src/datahub/ingestion/graph/client.py index 5c24b06dde999..d64f756dddc13 100644 --- a/metadata-ingestion/src/datahub/ingestion/graph/client.py +++ b/metadata-ingestion/src/datahub/ingestion/graph/client.py @@ -83,6 +83,7 @@ class DatahubClientConfig(ConfigModel): class RelatedEntity: urn: str relationship_type: str + via: Optional[str] = None def _graphql_entity_type(entity_type: str) -> str: @@ -833,6 +834,7 @@ def get_related_entities( yield RelatedEntity( urn=related_entity["urn"], relationship_type=related_entity["relationshipType"], + via=related_entity.get("via"), ) done = response.get("count", 0) == 0 or response.get("count", 0) < len( response.get("entities", []) @@ -840,9 +842,9 @@ def get_related_entities( start = start + response.get("count", 0) def exists(self, entity_urn: str) -> bool: - entity_urn_parsed: Urn = Urn.create_from_string(entity_urn) + entity_urn_parsed: Urn = Urn.from_string(entity_urn) try: - key_aspect_class = KEY_ASPECTS.get(entity_urn_parsed.get_type()) + key_aspect_class = KEY_ASPECTS.get(entity_urn_parsed.entity_type) if key_aspect_class: result = self.get_aspect(entity_urn, key_aspect_class) return result is not None diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index b3b11d200ec0d..e6e69c96c1542 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -1948,6 +1948,7 @@ public RollbackRunResult deleteUrn(Urn urn) { */ @Override public Set exists(@Nonnull final Collection urns, boolean includeSoftDeleted) { + final Set dbKeys = urns.stream() .map( @@ -1960,7 +1961,6 @@ public Set exists(@Nonnull final Collection urns, boolean includeSoftD .getName(), ASPECT_LATEST_VERSION)) .collect(Collectors.toSet()); - final Map aspects = _aspectDao.batchGet(dbKeys); final Set existingUrnStrings = aspects.values().stream() diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java index 24e272dee7a25..3bcaf6a08f4e5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java @@ -653,7 +653,7 @@ protected static List getRelatedEntitiesFromResponseData( }) // for undirected we get duplicate relationships .distinct() - .map(relationship -> new RelatedEntity(relationship, urn)); + .map(relationship -> new RelatedEntity(relationship, urn, null)); } return Stream.empty(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index 3051319aa54cf..270615aa0e356 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.graph.elastic; import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.*; +import static com.linkedin.metadata.graph.elastic.GraphRelationshipMappingsBuilder.*; import com.codahale.metrics.Timer; import com.datahub.util.exception.ESQueryException; @@ -11,6 +12,7 @@ import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.IntegerArray; import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.linkedin.metadata.graph.GraphFilters; import com.linkedin.metadata.graph.LineageDirection; @@ -34,7 +36,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.LinkedList; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -182,6 +184,24 @@ public static BoolQueryBuilder buildQuery( @Nullable final Filter destinationEntityFilter, @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { + return buildQuery( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + null); + } + + public static BoolQueryBuilder buildQuery( + @Nullable final List sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + @Nullable final String lifecycleOwner) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); @@ -221,6 +241,9 @@ public static BoolQueryBuilder buildQuery( if (relationshipFilter.getOr() != null) { addFilterToQueryBuilder(new Filter().setOr(relationshipFilter.getOr()), null, finalQuery); } + if (lifecycleOwner != null) { + finalQuery.filter(QueryBuilders.termQuery(EDGE_FIELD_LIFECYCLE_OWNER, lifecycleOwner)); + } return finalQuery; } @@ -235,14 +258,16 @@ public LineageResponse getLineage( int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - List result = new ArrayList<>(); + Map result = new HashMap<>(); long currentTime = System.currentTimeMillis(); long remainingTime = graphQueryConfiguration.getTimeoutSeconds() * 1000; + boolean exploreMultiplePaths = graphQueryConfiguration.isEnableMultiPathSearch(); long timeoutTime = currentTime + remainingTime; // Do a Level-order BFS Set visitedEntities = ConcurrentHashMap.newKeySet(); visitedEntities.add(entityUrn); + Set viaEntities = ConcurrentHashMap.newKeySet(); Map existingPaths = new HashMap<>(); List currentLevel = ImmutableList.of(entityUrn); @@ -267,12 +292,23 @@ public LineageResponse getLineage( direction, graphFilters, visitedEntities, + viaEntities, i + 1, + maxHops - (i + 1), remainingTime, existingPaths, startTimeMillis, - endTimeMillis); - result.addAll(oneHopRelationships); + endTimeMillis, + exploreMultiplePaths); + for (LineageRelationship oneHopRelnship : oneHopRelationships) { + if (result.containsKey(oneHopRelnship.getEntity())) { + result.put( + oneHopRelnship.getEntity(), + mergeLineageRelationships(result.get(oneHopRelnship.getEntity()), oneHopRelnship)); + } else { + result.put(oneHopRelnship.getEntity(), oneHopRelnship); + } + } currentLevel = oneHopRelationships.stream() .map(LineageRelationship::getEntity) @@ -280,7 +316,8 @@ public LineageResponse getLineage( currentTime = System.currentTimeMillis(); remainingTime = timeoutTime - currentTime; } - LineageResponse response = new LineageResponse(result.size(), result); + List resultList = new ArrayList<>(result.values()); + LineageResponse response = new LineageResponse(resultList.size(), resultList); List subList; if (offset >= response.getTotal()) { @@ -295,6 +332,39 @@ public LineageResponse getLineage( return new LineageResponse(response.getTotal(), subList); } + /** + * Merges two lineage relationship objects. The merged relationship object will have the minimum + * degree of the two relationships, and the union of the paths. In addition, the merged + * relationship object will have the union of the degrees in the new degrees field. + * + * @param existingRelationship + * @param newRelationship + * @return the merged relationship object + */ + private LineageRelationship mergeLineageRelationships( + final LineageRelationship existingRelationship, final LineageRelationship newRelationship) { + try { + LineageRelationship copyRelationship = existingRelationship.copy(); + copyRelationship.setDegree( + Math.min(existingRelationship.getDegree(), newRelationship.getDegree())); + Set degrees = new HashSet<>(); + if (copyRelationship.hasDegrees()) { + degrees = copyRelationship.getDegrees().stream().collect(Collectors.toSet()); + } + degrees.add(newRelationship.getDegree()); + copyRelationship.setDegrees(new IntegerArray(degrees)); + UrnArrayArray copyPaths = + new UrnArrayArray( + existingRelationship.getPaths().size() + newRelationship.getPaths().size()); + copyPaths.addAll(existingRelationship.getPaths()); + copyPaths.addAll(newRelationship.getPaths()); + copyRelationship.setPaths(copyPaths); + return copyRelationship; + } catch (CloneNotSupportedException e) { + throw new RuntimeException("Failed to clone lineage relationship", e); + } + } + // Get 1-hop lineage relationships asynchronously in batches with timeout @WithSpan public List getLineageRelationshipsInBatches( @@ -302,11 +372,14 @@ public List getLineageRelationshipsInBatches( @Nonnull LineageDirection direction, GraphFilters graphFilters, Set visitedEntities, + Set viaEntities, int numHops, + int remainingHops, long remainingTime, Map existingPaths, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis) { + @Nullable Long endTimeMillis, + boolean exploreMultiplePaths) { List> batches = Lists.partition(entityUrns, graphQueryConfiguration.getBatchSize()); return ConcurrencyUtils.getAllCompleted( batches.stream() @@ -319,10 +392,13 @@ public List getLineageRelationshipsInBatches( direction, graphFilters, visitedEntities, + viaEntities, numHops, + remainingHops, existingPaths, startTimeMillis, - endTimeMillis))) + endTimeMillis, + exploreMultiplePaths))) .collect(Collectors.toList()), remainingTime, TimeUnit.MILLISECONDS) @@ -338,10 +414,13 @@ private List getLineageRelationships( @Nonnull LineageDirection direction, GraphFilters graphFilters, Set visitedEntities, + Set viaEntities, int numHops, + int remainingHops, Map existingPaths, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis) { + @Nullable Long endTimeMillis, + boolean exploreMultiplePaths) { Map> urnsPerEntityType = entityUrns.stream().collect(Collectors.groupingBy(Urn::getEntityType)); Map> edgesPerEntityType = @@ -365,7 +444,15 @@ private List getLineageRelationships( entry.getValue().stream().map(edgeInfo -> Pair.of(entry.getKey(), edgeInfo))) .collect(Collectors.toSet()); return extractRelationships( - entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); + entityUrnSet, + response, + validEdges, + visitedEntities, + viaEntities, + numHops, + remainingHops, + existingPaths, + exploreMultiplePaths); } @VisibleForTesting @@ -408,7 +495,6 @@ public static QueryBuilder getLineageQuery( return finalQuery; } - // Get search query for given list of edges and source urns @VisibleForTesting public static QueryBuilder getLineageQueryForEntityType( @Nonnull List urns, @@ -464,27 +550,88 @@ public static void addEdgeToPaths( @Nonnull final Map existingPaths, @Nonnull final Urn parentUrn, @Nonnull final Urn childUrn) { + addEdgeToPaths(existingPaths, parentUrn, null, childUrn); + } + + /** + * Utility method to log paths to the debug log. + * + * @param paths + * @param message + */ + private static void logPaths(UrnArrayArray paths, String message) { + if (log.isDebugEnabled()) { + log.debug("xxxxxxxxxx"); + log.debug(message); + log.debug("---------"); + if (paths != null) { + paths.forEach(path -> log.debug("{}", path)); + } else { + log.debug("EMPTY"); + } + log.debug("xxxxxxxxxx"); + } + } + + private static boolean containsCycle(final UrnArray path) { + Set urnSet = path.stream().collect(Collectors.toUnmodifiableSet()); + // path contains a cycle if any urn is repeated twice + return (path.size() != urnSet.size()); + } + + public static boolean addEdgeToPaths( + @Nonnull final Map existingPaths, + @Nonnull final Urn parentUrn, + final Urn viaUrn, + @Nonnull final Urn childUrn) { + boolean edgeAdded = false; // Collect all full-paths to this child node. This is what will be returned. UrnArrayArray pathsToParent = existingPaths.get(parentUrn); - if (pathsToParent != null && pathsToParent.size() > 0) { + logPaths(pathsToParent, String.format("Paths to Parent: %s, Child: %s", parentUrn, childUrn)); + logPaths(existingPaths.get(childUrn), String.format("Existing Paths to Child: %s", childUrn)); + if (pathsToParent != null && !pathsToParent.isEmpty()) { // If there are existing paths to this parent node, then we attempt // to append the child to each of the existing paths (lengthen it). // We then store this as a separate, unique path associated with the child. - for (final UrnArray pathToParent : pathsToParent) { + for (UrnArray pathToParent : pathsToParent) { + if (containsCycle(pathToParent)) { + log.debug("Skipping extending path {} because it contains a cycle", pathToParent); + continue; + } UrnArray pathToChild = clonePath(pathToParent); + if (viaUrn != null) { + pathToChild.add(viaUrn); + } pathToChild.add(childUrn); // Save these paths to the global structure for easy access on future iterations. existingPaths.putIfAbsent(childUrn, new UrnArrayArray()); - existingPaths.get(childUrn).add(pathToChild); + UrnArrayArray existingPathsToChild = existingPaths.get(childUrn); + boolean dupExists = false; + for (UrnArray existingPathToChild : existingPathsToChild) { + if (existingPathToChild.equals(pathToChild)) { + dupExists = true; + } + } + if (!dupExists) { + existingPathsToChild.add(pathToChild); + edgeAdded = true; + } } } else { // No existing paths to this parent urn. Let's create a new path to the child! UrnArray pathToChild = new UrnArray(); - pathToChild.addAll(ImmutableList.of(parentUrn, childUrn)); + if (viaUrn == null) { + pathToChild.addAll(ImmutableList.of(parentUrn, childUrn)); + } else { + pathToChild.addAll(ImmutableList.of(parentUrn, viaUrn, childUrn)); + } // Save these paths to the global structure for easy access on future iterations. existingPaths.putIfAbsent(childUrn, new UrnArrayArray()); existingPaths.get(childUrn).add(pathToChild); + edgeAdded = true; } + logPaths(existingPaths.get(childUrn), String.format("New paths to Child: %s", childUrn)); + return edgeAdded; } // Given set of edges and the search response, extract all valid edges that originate from the @@ -495,101 +642,198 @@ private static List extractRelationships( @Nonnull SearchResponse searchResponse, Set> validEdges, Set visitedEntities, + Set viaEntities, int numHops, - Map existingPaths) { - final List result = new LinkedList<>(); - final SearchHit[] hits = searchResponse.getHits().getHits(); - for (SearchHit hit : hits) { - final Map document = hit.getSourceAsMap(); - final Urn sourceUrn = - UrnUtils.getUrn(((Map) document.get(SOURCE)).get("urn").toString()); - final Urn destinationUrn = - UrnUtils.getUrn(((Map) document.get(DESTINATION)).get("urn").toString()); - final String type = document.get(RELATIONSHIP_TYPE).toString(); - final Number createdOnNumber = (Number) document.getOrDefault(CREATED_ON, null); - final Long createdOn = createdOnNumber != null ? createdOnNumber.longValue() : null; - final Number updatedOnNumber = (Number) document.getOrDefault(UPDATED_ON, null); - final Long updatedOn = updatedOnNumber != null ? updatedOnNumber.longValue() : null; - final String createdActorString = (String) document.getOrDefault(CREATED_ACTOR, null); - final Urn createdActor = - createdActorString == null ? null : UrnUtils.getUrn(createdActorString); - final String updatedActorString = (String) document.getOrDefault(UPDATED_ACTOR, null); - final Urn updatedActor = - updatedActorString == null ? null : UrnUtils.getUrn(updatedActorString); - final Map properties; - if (document.containsKey(PROPERTIES) && document.get(PROPERTIES) instanceof Map) { - properties = (Map) document.get(PROPERTIES); - } else { - properties = Collections.emptyMap(); - } - boolean isManual = properties.containsKey(SOURCE) && properties.get(SOURCE).equals("UI"); - - // Potential outgoing edge - if (entityUrns.contains(sourceUrn)) { - // Skip if already visited - // Skip if edge is not a valid outgoing edge - // TODO: Verify if this honors multiple paths to the same node. - if (!visitedEntities.contains(destinationUrn) - && validEdges.contains( - Pair.of( - sourceUrn.getEntityType(), - new EdgeInfo( - type, - RelationshipDirection.OUTGOING, - destinationUrn.getEntityType().toLowerCase())))) { - visitedEntities.add(destinationUrn); - // Append the edge to a set of unique graph paths. - addEdgeToPaths(existingPaths, sourceUrn, destinationUrn); - final LineageRelationship relationship = - createLineageRelationship( - type, - destinationUrn, - numHops, - existingPaths.getOrDefault( + int remainingHops, + Map existingPaths, + boolean exploreMultiplePaths) { + try { + Map lineageRelationshipMap = new HashMap<>(); + final SearchHit[] hits = searchResponse.getHits().getHits(); + log.debug("numHits: {}, numHops {}, remainingHops {}", hits.length, numHops, remainingHops); + int index = -1; + for (SearchHit hit : hits) { + index++; + final Map document = hit.getSourceAsMap(); + log.debug("{}: hit: {}", index, document); + final Urn sourceUrn = + UrnUtils.getUrn(((Map) document.get(SOURCE)).get("urn").toString()); + final Urn destinationUrn = + UrnUtils.getUrn( + ((Map) document.get(DESTINATION)).get("urn").toString()); + final String type = document.get(RELATIONSHIP_TYPE).toString(); + if (sourceUrn.equals(destinationUrn)) { + log.debug("Skipping a self-edge of type {} on {}", type, sourceUrn); + continue; + } + final Number createdOnNumber = (Number) document.getOrDefault(CREATED_ON, null); + final Long createdOn = createdOnNumber != null ? createdOnNumber.longValue() : null; + final Number updatedOnNumber = (Number) document.getOrDefault(UPDATED_ON, null); + final Long updatedOn = updatedOnNumber != null ? updatedOnNumber.longValue() : null; + final String createdActorString = (String) document.getOrDefault(CREATED_ACTOR, null); + final Urn createdActor = + createdActorString == null ? null : UrnUtils.getUrn(createdActorString); + final String updatedActorString = (String) document.getOrDefault(UPDATED_ACTOR, null); + final Urn updatedActor = + updatedActorString == null ? null : UrnUtils.getUrn(updatedActorString); + final Map properties; + if (document.containsKey(PROPERTIES) && document.get(PROPERTIES) instanceof Map) { + properties = (Map) document.get(PROPERTIES); + } else { + properties = Collections.emptyMap(); + } + boolean isManual = properties.containsKey(SOURCE) && properties.get(SOURCE).equals("UI"); + Urn viaEntity = null; + String viaContent = (String) document.getOrDefault(EDGE_FIELD_VIA, null); + if (viaContent != null) { + try { + viaEntity = Urn.createFromString(viaContent); + } catch (Exception e) { + log.warn( + "Failed to parse urn from via entity {}, will swallow exception and continue...", + viaContent); + } + } + log.debug("{}: viaEntity {}", index, viaEntity); + + // Potential outgoing edge + if (entityUrns.contains(sourceUrn)) { + log.debug("{}: entity urns contains source urn {}", index, sourceUrn); + // Skip if already visited or if we're exploring multiple paths + // Skip if edge is not a valid outgoing edge + if ((exploreMultiplePaths || !visitedEntities.contains(destinationUrn)) + && validEdges.contains( + Pair.of( + sourceUrn.getEntityType(), + new EdgeInfo( + type, + RelationshipDirection.OUTGOING, + destinationUrn.getEntityType().toLowerCase())))) { + + if (visitedEntities.contains(destinationUrn)) { + log.debug("Found a second path to the same urn {}", destinationUrn); + } + // Append the edge to a set of unique graph paths. + if (addEdgeToPaths(existingPaths, sourceUrn, viaEntity, destinationUrn)) { + final LineageRelationship relationship = + createLineageRelationship( + type, destinationUrn, - new UrnArrayArray()), // Fetch the paths to the next level entity. - createdOn, - createdActor, - updatedOn, - updatedActor, - isManual); - result.add(relationship); + numHops, + existingPaths.getOrDefault(destinationUrn, new UrnArrayArray()), + // Fetch the paths to the next level entity. + createdOn, + createdActor, + updatedOn, + updatedActor, + isManual); + log.debug("Adding relationship {} to urn {}", relationship, destinationUrn); + lineageRelationshipMap.put(relationship.getEntity(), relationship); + if ((viaEntity != null) && (!viaEntities.contains(viaEntity))) { + UrnArrayArray viaPaths = getViaPaths(existingPaths, destinationUrn, viaEntity); + LineageRelationship viaRelationship = + createLineageRelationship( + type, + viaEntity, + numHops, + viaPaths, + createdOn, + createdActor, + updatedOn, + updatedActor, + isManual); + viaEntities.add(viaEntity); + lineageRelationshipMap.put(viaRelationship.getEntity(), viaRelationship); + log.debug("Adding via entity {} with paths {}", viaEntity, viaPaths); + } + } + visitedEntities.add(destinationUrn); + } } - } - // Potential incoming edge - if (entityUrns.contains(destinationUrn)) { - // Skip if already visited - // Skip if edge is not a valid outgoing edge - // TODO: Verify if this honors multiple paths to the same node. - if (!visitedEntities.contains(sourceUrn) - && validEdges.contains( - Pair.of( - destinationUrn.getEntityType(), - new EdgeInfo( + // Potential incoming edge + if (entityUrns.contains(destinationUrn)) { + // Skip if already visited or if we're exploring multiple paths + // Skip if edge is not a valid outgoing edge + log.debug("entity urns contains destination urn {}", destinationUrn); + if ((exploreMultiplePaths || !visitedEntities.contains(sourceUrn)) + && validEdges.contains( + Pair.of( + destinationUrn.getEntityType(), + new EdgeInfo( + type, + RelationshipDirection.INCOMING, + sourceUrn.getEntityType().toLowerCase())))) { + if (visitedEntities.contains(sourceUrn)) { + log.debug("Found a second path to the same urn {}", sourceUrn); + } + visitedEntities.add(sourceUrn); + // Append the edge to a set of unique graph paths. + if (addEdgeToPaths(existingPaths, destinationUrn, viaEntity, sourceUrn)) { + log.debug("Adding incoming edge: {}, {}, {}", destinationUrn, viaEntity, sourceUrn); + final LineageRelationship relationship = + createLineageRelationship( + type, + sourceUrn, + numHops, + existingPaths.getOrDefault(sourceUrn, new UrnArrayArray()), + // Fetch the paths to the next level entity. + createdOn, + createdActor, + updatedOn, + updatedActor, + isManual); + log.debug("Adding relationship {} to urn {}", relationship, sourceUrn); + lineageRelationshipMap.put(relationship.getEntity(), relationship); + if ((viaEntity != null) && (!viaEntities.contains(viaEntity))) { + UrnArrayArray viaPaths = getViaPaths(existingPaths, sourceUrn, viaEntity); + viaEntities.add(viaEntity); + LineageRelationship viaRelationship = + createLineageRelationship( type, - RelationshipDirection.INCOMING, - sourceUrn.getEntityType().toLowerCase())))) { - visitedEntities.add(sourceUrn); - // Append the edge to a set of unique graph paths. - addEdgeToPaths(existingPaths, destinationUrn, sourceUrn); - final LineageRelationship relationship = - createLineageRelationship( - type, - sourceUrn, - numHops, - existingPaths.getOrDefault( - sourceUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. - createdOn, - createdActor, - updatedOn, - updatedActor, - isManual); - result.add(relationship); + viaEntity, + numHops, + viaPaths, + createdOn, + createdActor, + updatedOn, + updatedActor, + isManual); + lineageRelationshipMap.put(viaRelationship.getEntity(), viaRelationship); + log.debug("Adding via relationship {} to urn {}", viaRelationship, viaEntity); + } + } + } + } + } + List result = new ArrayList<>(lineageRelationshipMap.values()); + log.debug("Number of lineage relationships in list: {}", result.size()); + log.debug("Result: {}", result); + return result; + } catch (Exception e) { + // This exception handler merely exists to log the exception at an appropriate point and + // rethrow + log.error("Caught exception", e); + throw e; + } + } + + private static UrnArrayArray getViaPaths( + Map existingPaths, Urn destinationUrn, Urn viaEntity) { + UrnArrayArray destinationPaths = + existingPaths.getOrDefault(destinationUrn, new UrnArrayArray()); + UrnArrayArray viaPaths = new UrnArrayArray(); + for (UrnArray destPath : destinationPaths) { + UrnArray viaPath = new UrnArray(); + for (Urn urn : destPath) { + viaPath.add(urn); + if (urn.equals(viaEntity)) { + break; } } + viaPaths.add(viaPath); } - return result; + return viaPaths; } private static LineageRelationship createLineageRelationship( @@ -607,6 +851,7 @@ private static LineageRelationship createLineageRelationship( .setType(type) .setEntity(entityUrn) .setDegree(numHops) + .setDegrees(new IntegerArray(ImmutableList.of(numHops))) .setPaths(paths); if (createdOn != null) { relationship.setCreatedOn(createdOn); @@ -658,8 +903,10 @@ private static QueryBuilder buildEntityTypesFilter( } private static QueryBuilder buildUrnFilters(@Nonnull List urns, @Nonnull String prefix) { - return QueryBuilders.termsQuery( - prefix + ".urn", urns.stream().map(Object::toString).collect(Collectors.toList())); + // dedup urns while preserving order + LinkedHashSet urnSet = new LinkedHashSet<>(); + urns.forEach(urn -> urnSet.add(urn.toString())); + return QueryBuilders.termsQuery(prefix + ".urn", urnSet); } private static QueryBuilder buildEdgeFilters(@Nonnull List edgeInfos) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java index 5d722a034fafc..ddbd00f90ef68 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java @@ -75,4 +75,27 @@ public BulkByScrollResponse deleteByQuery( .deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)) .orElse(null); } + + public BulkByScrollResponse deleteByQuery( + @Nullable final String sourceType, + @Nonnull final Filter sourceEntityFilter, + @Nullable final String destinationType, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + String lifecycleOwner) { + BoolQueryBuilder finalQuery = + buildQuery( + sourceType == null ? ImmutableList.of() : ImmutableList.of(sourceType), + sourceEntityFilter, + destinationType == null ? ImmutableList.of() : ImmutableList.of(destinationType), + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + lifecycleOwner); + + return bulkProcessor + .deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)) + .orElse(null); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java index 67590ffd6e7c1..90f46190ac18e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.GraphRelationshipMappingsBuilder.*; + import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.annotations.VisibleForTesting; @@ -81,9 +83,9 @@ private String toDocument(@Nonnull final Edge edge) { destinationObject.put("urn", edge.getDestination().toString()); destinationObject.put("entityType", edge.getDestination().getEntityType()); - searchDocument.set("source", sourceObject); - searchDocument.set("destination", destinationObject); - searchDocument.put("relationshipType", edge.getRelationshipType()); + searchDocument.set(EDGE_FIELD_SOURCE, sourceObject); + searchDocument.set(EDGE_FIELD_DESTINATION, destinationObject); + searchDocument.put(EDGE_FIELD_RELNSHIP_TYPE, edge.getRelationshipType()); if (edge.getCreatedOn() != null) { searchDocument.put("createdOn", edge.getCreatedOn()); } @@ -108,8 +110,15 @@ private String toDocument(@Nonnull final Edge edge) { entry.getKey(), entry.getValue())); } } - searchDocument.set("properties", propertiesObject); + searchDocument.set(EDGE_FIELD_PROPERTIES, propertiesObject); + } + if (edge.getLifecycleOwner() != null) { + searchDocument.put(EDGE_FIELD_LIFECYCLE_OWNER, edge.getLifecycleOwner().toString()); + } + if (edge.getVia() != null) { + searchDocument.put(EDGE_FIELD_VIA, edge.getVia().toString()); } + log.debug("Search doc for write {}", searchDocument); return searchDocument.toString(); } @@ -192,8 +201,8 @@ public RelatedEntitiesResult findRelatedEntities( final List relationships = searchHitsToRelatedEntities(response.getHits().getHits(), relationshipDirection).stream() .map(RelatedEntities::asRelatedEntity) + .filter(Objects::nonNull) .collect(Collectors.toList()); - return new RelatedEntitiesResult(offset, relationships.size(), totalCount, relationships); } @@ -277,6 +286,10 @@ public void removeNode(@Nonnull final Urn urn) { _graphWriteDAO.deleteByQuery( null, urnFilter, null, emptyFilter, relationshipTypes, incomingFilter); + // Delete all edges where this entity is a lifecycle owner + _graphWriteDAO.deleteByQuery( + null, emptyFilter, null, emptyFilter, relationshipTypes, incomingFilter, urn.toString()); + return; } @@ -394,15 +407,15 @@ private static List searchHitsToRelatedEntities( return Arrays.stream(searchHits) .map( hit -> { + final Map hitMap = hit.getSourceAsMap(); final String destinationUrnStr = - ((HashMap) - hit.getSourceAsMap().getOrDefault("destination", EMPTY_HASH)) + ((Map) hitMap.getOrDefault(EDGE_FIELD_DESTINATION, EMPTY_HASH)) .getOrDefault("urn", null); final String sourceUrnStr = - ((HashMap) - hit.getSourceAsMap().getOrDefault("source", EMPTY_HASH)) + ((Map) hitMap.getOrDefault(EDGE_FIELD_SOURCE, EMPTY_HASH)) .getOrDefault("urn", null); - final String relationshipType = (String) hit.getSourceAsMap().get("relationshipType"); + final String relationshipType = (String) hitMap.get(EDGE_FIELD_RELNSHIP_TYPE); + String viaEntity = (String) hitMap.get(EDGE_FIELD_VIA); if (destinationUrnStr == null || sourceUrnStr == null || relationshipType == null) { log.error( @@ -414,7 +427,11 @@ private static List searchHitsToRelatedEntities( } return new RelatedEntities( - relationshipType, sourceUrnStr, destinationUrnStr, relationshipDirection); + relationshipType, + sourceUrnStr, + destinationUrnStr, + relationshipDirection, + viaEntity); }) .filter(Objects::nonNull) .collect(Collectors.toList()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java index 21f2bf6c89204..ab4eaa1b99392 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java @@ -7,16 +7,23 @@ @Slf4j public class GraphRelationshipMappingsBuilder { + public static final String EDGE_FIELD_SOURCE = "source"; + public static final String EDGE_FIELD_DESTINATION = "destination"; + public static final String EDGE_FIELD_RELNSHIP_TYPE = "relationshipType"; + public static final String EDGE_FIELD_PROPERTIES = "properties"; + public static final String EDGE_FIELD_VIA = "via"; + public static final String EDGE_FIELD_LIFECYCLE_OWNER = "lifecycleOwner"; private GraphRelationshipMappingsBuilder() {} public static Map getMappings() { Map mappings = new HashMap<>(); - mappings.put("source", getMappingsForEntity()); - mappings.put("destination", getMappingsForEntity()); - mappings.put("relationshipType", getMappingsForKeyword()); - mappings.put("properties", getMappingsForEdgeProperties()); - + mappings.put(EDGE_FIELD_SOURCE, getMappingsForEntity()); + mappings.put(EDGE_FIELD_DESTINATION, getMappingsForEntity()); + mappings.put(EDGE_FIELD_RELNSHIP_TYPE, getMappingsForKeyword()); + mappings.put(EDGE_FIELD_PROPERTIES, getMappingsForEdgeProperties()); + mappings.put(EDGE_FIELD_LIFECYCLE_OWNER, getMappingsForKeyword()); + mappings.put(EDGE_FIELD_VIA, getMappingsForKeyword()); return ImmutableMap.of("properties", mappings); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index a1f73a134ec8e..11acc138d4dba 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -534,7 +534,8 @@ record -> .get(0) .asNode() .get("urn") - .asString())); // Urn TODO: Validate this works against Neo4j. + .asString(), // Urn TODO: Validate this works against Neo4j. + null)); final int totalCount = runQuery(countStatement).single().get(0).asInt(); return new RelatedEntitiesResult(offset, relatedEntities.size(), totalCount, relatedEntities); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java index f6358e4aeb207..cf9279414a394 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java @@ -19,6 +19,9 @@ import com.linkedin.metadata.graph.LineageRelationship; import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.query.FreshnessStats; +import com.linkedin.metadata.query.GroupingCriterion; +import com.linkedin.metadata.query.GroupingCriterionArray; +import com.linkedin.metadata.query.GroupingSpec; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.Criterion; @@ -55,13 +58,22 @@ @RequiredArgsConstructor @Slf4j public class LineageSearchService { + private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = new SearchFlags() .setFulltext(false) .setMaxAggValues(20) .setSkipCache(false) .setSkipAggregates(false) - .setSkipHighlighting(true); + .setSkipHighlighting(true) + .setGroupingSpec( + new GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + new GroupingCriterion() // Convert schema fields to datasets by default to + // maintain backwards compatibility + .setBaseEntityType(SCHEMA_FIELD_ENTITY_NAME) + .setGroupingEntityType(DATASET_ENTITY_NAME)))); private final SearchService _searchService; private final GraphService _graphService; @Nullable private final Cache cache; @@ -206,14 +218,18 @@ public LineageSearchResult searchAcrossLineage( } } - // set schemaField relationship entity to be its reference urn - LineageRelationshipArray updatedRelationships = convertSchemaFieldRelationships(lineageResult); - lineageResult.setRelationships(updatedRelationships); + if (SearchUtils.convertSchemaFieldToDataset(searchFlags)) { + // set schemaField relationship entity to be its reference urn + LineageRelationshipArray updatedRelationships = + convertSchemaFieldRelationships(lineageResult); + lineageResult.setRelationships(updatedRelationships); + } // Filter hopped result based on the set of entities to return and inputFilters before sending // to search List lineageRelationships = filterRelationships(lineageResult, new HashSet<>(entities), inputFilters); + log.debug("Lineage relationships found: {}", lineageRelationships); String lineageGraphInfo = String.format( @@ -247,7 +263,9 @@ public LineageSearchResult searchAcrossLineage( lineageRelationships, input, reducedFilters, sortCriterion, from, size, finalFlags); if (!lineageSearchResult.getEntities().isEmpty()) { log.debug( - "Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); + "Lineage entity results number -> {}; first -> {}", + lineageSearchResult.getNumEntities(), + lineageSearchResult.getEntities().get(0).toString()); } numEntities = lineageSearchResult.getNumEntities(); return lineageSearchResult; @@ -470,9 +488,17 @@ private Map generateUrnToRelationshipMap( if (existingRelationship == null) { urnToRelationship.put(relationship.getEntity(), relationship); } else { - UrnArrayArray paths = existingRelationship.getPaths(); - paths.addAll(relationship.getPaths()); - existingRelationship.setPaths(paths); + UrnArrayArray newPaths = + new UrnArrayArray( + existingRelationship.getPaths().size() + relationship.getPaths().size()); + log.debug( + "Found {} paths for {}, will add to existing paths: {}", + relationship.getPaths().size(), + relationship.getEntity(), + existingRelationship.getPaths().size()); + newPaths.addAll(existingRelationship.getPaths()); + newPaths.addAll(relationship.getPaths()); + existingRelationship.setPaths(newPaths); } } return urnToRelationship; @@ -665,6 +691,9 @@ private LineageSearchEntity buildLineageSearchEntity( if (lineageRelationship != null) { entity.setPaths(lineageRelationship.getPaths()); entity.setDegree(lineageRelationship.getDegree()); + if (lineageRelationship.hasDegrees()) { + entity.setDegrees(lineageRelationship.getDegrees()); + } } return entity; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java index b8cf0626b7251..13ccfd7f972af 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.LongMap; @@ -196,6 +198,28 @@ public static SearchFlags applyDefaultSearchFlags( if (!finalSearchFlags.hasSkipCache() || finalSearchFlags.isSkipCache() == null) { finalSearchFlags.setSkipCache(defaultFlags.isSkipCache()); } + if ((!finalSearchFlags.hasGroupingSpec() || finalSearchFlags.getGroupingSpec() == null) + && (defaultFlags.getGroupingSpec() != null)) { + finalSearchFlags.setGroupingSpec(defaultFlags.getGroupingSpec()); + } return finalSearchFlags; } + + /** + * Returns true if the search flags contain a grouping spec that requires conversion of schema + * field entity to dataset entity. + * + * @param searchFlags the search flags + * @return true if the search flags contain a grouping spec that requires conversion of schema + * field entity to dataset entity. + */ + public static boolean convertSchemaFieldToDataset(@Nullable SearchFlags searchFlags) { + return (searchFlags != null) + && (searchFlags.getGroupingSpec() != null) + && (searchFlags.getGroupingSpec().getGroupingCriteria().stream() + .anyMatch( + grouping -> + grouping.getBaseEntityType().equals(SCHEMA_FIELD_ENTITY_NAME) + && grouping.getGroupingEntityType().equals(DATASET_ENTITY_NAME))); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index ee2d794471f6b..ed633b063afb2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -14,7 +14,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.dataset.FineGrainedLineage; +import com.linkedin.dataset.FineGrainedLineageArray; import com.linkedin.dataset.UpstreamLineage; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; @@ -275,21 +277,36 @@ private void handleDeleteChangeEvent(@Nonnull final MCLBatchItem event) { // TODO: remove this method once we implement sourceOverride when creating graph edges private void updateFineGrainedEdgesAndRelationships( - RecordTemplate aspect, + Urn entity, + FineGrainedLineageArray fineGrainedLineageArray, List edgesToAdd, HashMap> urnToRelationshipTypesBeingAdded) { - UpstreamLineage upstreamLineage = new UpstreamLineage(aspect.data()); - if (upstreamLineage.getFineGrainedLineages() != null) { - for (FineGrainedLineage fineGrainedLineage : upstreamLineage.getFineGrainedLineages()) { + if (fineGrainedLineageArray != null) { + for (FineGrainedLineage fineGrainedLineage : fineGrainedLineageArray) { if (!fineGrainedLineage.hasDownstreams() || !fineGrainedLineage.hasUpstreams()) { break; } + // Fine grained lineage array is present either on datajob (datajob input/output) or dataset + // We set the datajob as the viaEntity in scenario 1, and the query (if present) as the + // viaEntity in scenario 2 + Urn viaEntity = + entity.getEntityType().equals("dataJob") ? entity : fineGrainedLineage.getQuery(); // for every downstream, create an edge with each of the upstreams for (Urn downstream : fineGrainedLineage.getDownstreams()) { for (Urn upstream : fineGrainedLineage.getUpstreams()) { // TODO: add edges uniformly across aspects edgesToAdd.add( - new Edge(downstream, upstream, DOWNSTREAM_OF, null, null, null, null, null)); + new Edge( + downstream, + upstream, + DOWNSTREAM_OF, + null, + null, + null, + null, + null, + entity, + viaEntity)); Set relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(downstream, new HashSet<>()); relationshipTypes.add(DOWNSTREAM_OF); @@ -357,12 +374,23 @@ private Pair, HashMap>> getEdgesAndRelationshipTypes // inputFields // since @Relationship only links between the parent entity urn and something else. if (aspectSpec.getName().equals(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { - updateFineGrainedEdgesAndRelationships(aspect, edgesToAdd, urnToRelationshipTypesBeingAdded); - } - if (aspectSpec.getName().equals(Constants.INPUT_FIELDS_ASPECT_NAME)) { + UpstreamLineage upstreamLineage = new UpstreamLineage(aspect.data()); + updateFineGrainedEdgesAndRelationships( + urn, + upstreamLineage.getFineGrainedLineages(), + edgesToAdd, + urnToRelationshipTypesBeingAdded); + } else if (aspectSpec.getName().equals(Constants.INPUT_FIELDS_ASPECT_NAME)) { final InputFields inputFields = new InputFields(aspect.data()); updateInputFieldEdgesAndRelationships( urn, inputFields, edgesToAdd, urnToRelationshipTypesBeingAdded); + } else if (aspectSpec.getName().equals(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { + DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(aspect.data()); + updateFineGrainedEdgesAndRelationships( + urn, + dataJobInputOutput.getFineGrainedLineages(), + edgesToAdd, + urnToRelationshipTypesBeingAdded); } Map> extractedFields = @@ -394,7 +422,7 @@ private void updateGraphService( edgeAndRelationTypes.getSecond(); log.debug("Here's the relationship types found {}", urnToRelationshipTypesBeingAdded); - if (urnToRelationshipTypesBeingAdded.size() > 0) { + if (!urnToRelationshipTypesBeingAdded.isEmpty()) { for (Map.Entry> entry : urnToRelationshipTypesBeingAdded.entrySet()) { _graphService.removeEdgesFromNode( entry.getKey(), diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java index 3a51344d5779d..2de61c8ed31bb 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java @@ -14,6 +14,9 @@ import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.metadata.config.search.GraphQueryConfiguration; +import com.linkedin.metadata.graph.dgraph.DgraphGraphService; +import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; @@ -36,6 +39,8 @@ import java.util.stream.IntStream; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.Assert; import org.testng.annotations.BeforeMethod; @@ -198,6 +203,19 @@ public void testStaticUrns() { @Nonnull protected abstract GraphService getGraphService() throws Exception; + /** + * Graph services that support multi-path search should override this method to provide a + * multi-path search enabled GraphService instance. + * + * @param enableMultiPathSearch + * @return + * @throws Exception + */ + @Nonnull + protected GraphService getGraphService(boolean enableMultiPathSearch) throws Exception { + return getGraphService(); + } + /** * Allows the specific GraphService test implementation to wait for GraphService writes to be * synced / become available to reads. @@ -235,7 +253,12 @@ protected GraphService getPopulatedGraphService() throws Exception { } protected GraphService getLineagePopulatedGraphService() throws Exception { - GraphService service = getGraphService(); + return getLineagePopulatedGraphService( + GraphQueryConfiguration.testDefaults.isEnableMultiPathSearch()); + } + + protected GraphService getLineagePopulatedGraphService(boolean multiPathSearch) throws Exception { + GraphService service = getGraphService(multiPathSearch); List edges = Arrays.asList( @@ -1821,9 +1844,16 @@ public void run() { assertEquals(throwables.size(), 0); } - @Test - public void testPopulatedGraphServiceGetLineageMultihop() throws Exception { - GraphService service = getLineagePopulatedGraphService(); + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testPopulatedGraphServiceGetLineageMultihop(boolean attemptMultiPathAlgo) + throws Exception { + + GraphService service = getLineagePopulatedGraphService(attemptMultiPathAlgo); + // Implementations other than Neo4J and DGraph explore more of the graph to discover nodes at + // multiple hops + boolean expandedGraphAlgoEnabled = + (!((service instanceof Neo4jGraphService) || (service instanceof DgraphGraphService))); EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 2); @@ -1838,16 +1868,23 @@ public void testPopulatedGraphServiceGetLineageMultihop() throws Exception { Map relationships = downstreamLineage.getRelationships().stream() .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); + Set entities = relationships.keySet().stream().collect(Collectors.toUnmodifiableSet()); + assertEquals(entities.size(), 5); assertTrue(relationships.containsKey(datasetTwoUrn)); - assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); + assertEquals(relationships.get(dataJobTwoUrn).getDegree(), 1); assertTrue(relationships.containsKey(datasetThreeUrn)); - assertEquals(relationships.get(datasetThreeUrn).getDegree().intValue(), 2); + assertEquals(relationships.get(datasetThreeUrn).getDegree(), 2); assertTrue(relationships.containsKey(datasetFourUrn)); - assertEquals(relationships.get(datasetFourUrn).getDegree().intValue(), 2); + assertEquals(relationships.get(datasetFourUrn).getDegree(), 2); assertTrue(relationships.containsKey(dataJobOneUrn)); - assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); + assertEquals(relationships.get(dataJobOneUrn).getDegree(), 1); + // dataJobOne is present both at degree 1 and degree 2 + if (expandedGraphAlgoEnabled && attemptMultiPathAlgo) { + relationships.get(dataJobOneUrn).getDegrees().contains(Integer.valueOf(1)); + relationships.get(dataJobOneUrn).getDegrees().contains(Integer.valueOf(2)); + } assertTrue(relationships.containsKey(dataJobTwoUrn)); - assertEquals(relationships.get(dataJobTwoUrn).getDegree().intValue(), 1); + assertEquals(relationships.get(dataJobTwoUrn).getDegree(), 1); upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 2); assertEquals(upstreamLineage.getTotal().intValue(), 3); @@ -1856,11 +1893,11 @@ public void testPopulatedGraphServiceGetLineageMultihop() throws Exception { upstreamLineage.getRelationships().stream() .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetOneUrn)); - assertEquals(relationships.get(datasetOneUrn).getDegree().intValue(), 2); + assertEquals(relationships.get(datasetOneUrn).getDegree(), 2); assertTrue(relationships.containsKey(datasetTwoUrn)); - assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); + assertEquals(relationships.get(datasetTwoUrn).getDegree(), 1); assertTrue(relationships.containsKey(dataJobOneUrn)); - assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); + assertEquals(relationships.get(dataJobOneUrn).getDegree(), 1); downstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java index 40b8e83b56d03..1ccf018a74c3a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java @@ -820,7 +820,7 @@ public void testGetDestinationUrnsFromResponseData() { } @Override - public void testPopulatedGraphServiceGetLineageMultihop() { + public void testPopulatedGraphServiceGetLineageMultihop(boolean attemptMultiHop) { // TODO: Remove this overridden method once the multihop for dGraph is implemented! } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java index bd500cd469100..7f0e4294e0cbf 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java @@ -57,12 +57,14 @@ public abstract class SearchGraphServiceTestBase extends GraphServiceTestBase { private final IndexConvention _indexConvention = new IndexConventionImpl(null); private final String _indexName = _indexConvention.getIndexName(INDEX_NAME); private ElasticSearchGraphService _client; + private boolean _enableMultiPathSearch = + GraphQueryConfiguration.testDefaults.isEnableMultiPathSearch(); private static final String TAG_RELATIONSHIP = "SchemaFieldTaggedWith"; @BeforeClass public void setup() { - _client = buildService(); + _client = buildService(_enableMultiPathSearch); _client.configure(); } @@ -73,8 +75,10 @@ public void wipe() throws Exception { } @Nonnull - private ElasticSearchGraphService buildService() { + private ElasticSearchGraphService buildService(boolean enableMultiPathSearch) { LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); + GraphQueryConfiguration configuration = GraphQueryConfiguration.testDefaults; + configuration.setEnableMultiPathSearch(enableMultiPathSearch); ESGraphQueryDAO readDAO = new ESGraphQueryDAO( getSearchClient(), @@ -93,10 +97,21 @@ private ElasticSearchGraphService buildService() { @Override @Nonnull - protected GraphService getGraphService() { + protected GraphService getGraphService(boolean enableMultiPathSearch) { + if (enableMultiPathSearch != _enableMultiPathSearch) { + _enableMultiPathSearch = enableMultiPathSearch; + _client = buildService(enableMultiPathSearch); + _client.configure(); + } return _client; } + @Override + @Nonnull + protected GraphService getGraphService() { + return getGraphService(GraphQueryConfiguration.testDefaults.isEnableMultiPathSearch()); + } + @Override protected void syncAfterWrite() throws Exception { SearchTestUtils.syncAfterWrite(getBulkProcessor()); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java index 44fe5ea8ac9ae..59942f76744da 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java @@ -10,7 +10,9 @@ import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; +import com.linkedin.util.Pair; import java.net.URISyntaxException; +import java.util.stream.Stream; import javax.annotation.Nonnull; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; @@ -49,16 +51,17 @@ public void testDatasetLineage() throws URISyntaxException { Urn.createFromString( "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); - // 1 hops - LineageSearchResult lineageResult = lineage(getLineageService(), testUrn, 1); - assertEquals(lineageResult.getEntities().size(), 10); - - // 2 hops - lineageResult = lineage(getLineageService(), testUrn, 2); - assertEquals(lineageResult.getEntities().size(), 5); - - // 3 hops - lineageResult = lineage(getLineageService(), testUrn, 3); - assertEquals(lineageResult.getEntities().size(), 12); + Stream> hopsExpectedResultsStream = + Stream.of( + Pair.of(1, 10), // Hop 1 -> 10 results + Pair.of(2, 5), // Hop 2 -> 5 results + Pair.of(3, 12) // Hop 3 -> 12 results + ); + hopsExpectedResultsStream.forEach( + hopsExpectedResults -> { + LineageSearchResult lineageResult = + lineage(getLineageService(), testUrn, hopsExpectedResults.getFirst()); + assertEquals(lineageResult.getEntities().size(), hopsExpectedResults.getSecond()); + }); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java index 5ea58e3416205..f4e8224254530 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java @@ -1,22 +1,46 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.*; import static org.testng.Assert.assertEquals; +import com.linkedin.data.template.SetMode; +import com.linkedin.metadata.query.GroupingCriterion; +import com.linkedin.metadata.query.GroupingCriterionArray; +import com.linkedin.metadata.query.GroupingSpec; import com.linkedin.metadata.query.SearchFlags; import java.util.Set; import org.testng.annotations.Test; public class SearchUtilsTest { - @Test - public void testApplyDefaultSearchFlags() { - SearchFlags defaultFlags = + private SearchFlags getDefaultSearchFlags() { + return setConvertSchemaFieldsToDatasets( new SearchFlags() .setFulltext(true) .setSkipCache(true) .setSkipAggregates(true) .setMaxAggValues(1) - .setSkipHighlighting(true); + .setSkipHighlighting(true), + true); + } + + private SearchFlags setConvertSchemaFieldsToDatasets(SearchFlags flags, boolean value) { + if (value) { + return flags.setGroupingSpec( + new GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + new GroupingCriterion() + .setBaseEntityType(SCHEMA_FIELD_ENTITY_NAME) + .setGroupingEntityType(DATASET_ENTITY_NAME)))); + } else { + return flags.setGroupingSpec(null, SetMode.REMOVE_IF_NULL); + } + } + + @Test + public void testApplyDefaultSearchFlags() { + SearchFlags defaultFlags = getDefaultSearchFlags(); assertEquals( SearchUtils.applyDefaultSearchFlags(null, "not empty", defaultFlags), @@ -33,12 +57,14 @@ public void testApplyDefaultSearchFlags() { .setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags() - .setFulltext(false) - .setSkipAggregates(false) - .setSkipCache(false) - .setMaxAggValues(2) - .setSkipHighlighting(false), + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected no default values"); assertEquals( @@ -51,12 +77,14 @@ public void testApplyDefaultSearchFlags() { .setSkipHighlighting(false), null, defaultFlags), - new SearchFlags() - .setFulltext(false) - .setSkipAggregates(false) - .setSkipCache(false) - .setMaxAggValues(2) - .setSkipHighlighting(true), + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(true), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected skip highlight due to query null query"); for (String query : Set.of("*", "")) { assertEquals( @@ -69,94 +97,105 @@ public void testApplyDefaultSearchFlags() { .setSkipHighlighting(false), query, defaultFlags), - new SearchFlags() - .setFulltext(false) - .setSkipAggregates(false) - .setSkipCache(false) - .setMaxAggValues(2) - .setSkipHighlighting(true), + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(true), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), String.format("Expected skip highlight due to query string `%s`", query)); } assertEquals( SearchUtils.applyDefaultSearchFlags( new SearchFlags().setFulltext(false), "not empty", defaultFlags), - new SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipCache(true) - .setMaxAggValues(1) - .setSkipHighlighting(true), + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(true), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except fulltext"); assertEquals( SearchUtils.applyDefaultSearchFlags( new SearchFlags().setSkipCache(false), "not empty", defaultFlags), - new SearchFlags() - .setFulltext(true) - .setSkipAggregates(true) - .setSkipCache(false) - .setMaxAggValues(1) - .setSkipHighlighting(true), + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(false) + .setMaxAggValues(1) + .setSkipHighlighting(true), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except skipCache"); assertEquals( SearchUtils.applyDefaultSearchFlags( new SearchFlags().setSkipAggregates(false), "not empty", defaultFlags), - new SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipCache(true) - .setMaxAggValues(1) - .setSkipHighlighting(true), + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(true), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except skipAggregates"); assertEquals( SearchUtils.applyDefaultSearchFlags( new SearchFlags().setMaxAggValues(2), "not empty", defaultFlags), - new SearchFlags() - .setFulltext(true) - .setSkipAggregates(true) - .setSkipCache(true) - .setMaxAggValues(2) - .setSkipHighlighting(true), + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(2) + .setSkipHighlighting(true), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except maxAggValues"); assertEquals( SearchUtils.applyDefaultSearchFlags( new SearchFlags().setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags() - .setFulltext(true) - .setSkipAggregates(true) - .setSkipCache(true) - .setMaxAggValues(1) - .setSkipHighlighting(false), + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(false), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected all default values except skipHighlighting"); } @Test public void testImmutableDefaults() throws CloneNotSupportedException { - SearchFlags defaultFlags = - new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipAggregates(true) - .setMaxAggValues(1) - .setSkipHighlighting(true); + SearchFlags defaultFlags = getDefaultSearchFlags(); + SearchFlags copyFlags = defaultFlags.copy(); assertEquals( SearchUtils.applyDefaultSearchFlags( + setConvertSchemaFieldsToDatasets( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), + "not empty", + defaultFlags), + setConvertSchemaFieldsToDatasets( new SearchFlags() .setFulltext(false) - .setSkipCache(false) .setSkipAggregates(false) + .setSkipCache(false) .setMaxAggValues(2) .setSkipHighlighting(false), - "not empty", - defaultFlags), - new SearchFlags() - .setFulltext(false) - .setSkipAggregates(false) - .setSkipCache(false) - .setMaxAggValues(2) - .setSkipHighlighting(false), + SearchUtils.convertSchemaFieldToDataset(defaultFlags)), "Expected no default values"); assertEquals(defaultFlags, copyFlags, "Expected defaults to be unmodified"); diff --git a/metadata-models/src/main/pegasus/com/linkedin/dataset/FineGrainedLineage.pdl b/metadata-models/src/main/pegasus/com/linkedin/dataset/FineGrainedLineage.pdl index ce72d7c04a3f6..3aa76cc27250c 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/dataset/FineGrainedLineage.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/dataset/FineGrainedLineage.pdl @@ -42,6 +42,9 @@ record FineGrainedLineage { // Other information e.g. user who created this lineage etc. can added here. - // It may be useful to add a "query" field here, but the semantics are tricky. - // To be considered in a future iteration when required. + /** + * The query that was used to generate this lineage. + * Present only if the lineage was generated from a detected query. + */ + query: optional Urn } \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/dataset/Upstream.pdl b/metadata-models/src/main/pegasus/com/linkedin/dataset/Upstream.pdl index c4a9fa1727162..b4c98e4f34724 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/dataset/Upstream.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/dataset/Upstream.pdl @@ -2,6 +2,8 @@ namespace com.linkedin.dataset import com.linkedin.common.AuditStamp import com.linkedin.common.DatasetUrn +import com.linkedin.common.Urn + /** * Upstream lineage information about a dataset including the source reporting the lineage @@ -33,6 +35,7 @@ record Upstream { "updatedOn": "upstreams/*/auditStamp/time" "updatedActor": "upstreams/*/auditStamp/actor" "properties": "upstreams/*/properties" + "via": "upstreams/*/query" } @Searchable = { "fieldName": "upstreams", @@ -50,4 +53,9 @@ record Upstream { * A generic properties bag that allows us to store specific information on this graph edge. */ properties: optional map[string, string] + + /** + * If the lineage is generated by a query, a reference to the query + */ + query: optional Urn } \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/graph/LineageRelationship.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/graph/LineageRelationship.pdl index ad4bd27b4cdae..c25a1cee7db47 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/metadata/graph/LineageRelationship.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/graph/LineageRelationship.pdl @@ -31,7 +31,9 @@ record LineageRelationship { /** * Degree of relationship (number of hops to get to entity) + * Deprecated by degrees. degree field is populated by min(degrees) for backward compatibility. */ + @deprecated degree: int = 1 /** @@ -58,4 +60,11 @@ record LineageRelationship { * Whether this lineage edge is a manual edge. */ isManual: optional boolean + + /** + * The different depths at which this entity is discovered in the lineage graph. + * Marked as optional to maintain backward compatibility, but is filled out by implementations. + * Replaces the deprecated field "degree". + **/ + degrees: optional array[int] } diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/query/GroupingCriterion.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/query/GroupingCriterion.pdl new file mode 100644 index 0000000000000..da0a1c2fd3514 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/query/GroupingCriterion.pdl @@ -0,0 +1,21 @@ +namespace com.linkedin.metadata.query + +/** +* +**/ + +record GroupingCriterion { + + /** + * The type of the entity to be grouped. + * e.g. schemaField + * Omitting this field will result in all base entities being grouped. + */ + baseEntityType: optional string + + /** + * The type of the entity to be grouped into. + * e.g. dataset, domain, etc. + */ + groupingEntityType: string +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/query/GroupingSpec.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/query/GroupingSpec.pdl new file mode 100644 index 0000000000000..c4c8a6c0e6bd9 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/query/GroupingSpec.pdl @@ -0,0 +1,15 @@ +namespace com.linkedin.metadata.query + +/** + * A set of directives to control how results are grouped. + * The underlying generic groupings are nested to allow for further evolution of the grouping spec. + */ + +record GroupingSpec { + + /** + * A list of generic directives to group results by. + **/ + groupingCriteria: array[GroupingCriterion] = [] + +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/query/SearchFlags.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/query/SearchFlags.pdl index be1a30c7f082c..67f41ea175b51 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/metadata/query/SearchFlags.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/query/SearchFlags.pdl @@ -33,4 +33,9 @@ record SearchFlags { * Whether to request for search suggestions on the _entityName virtualized field */ getSuggestions:optional boolean = false + + /** + * Instructions for grouping results before returning + */ + groupingSpec: optional GroupingSpec } diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/search/LineageSearchEntity.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/search/LineageSearchEntity.pdl index 2e81a63319ae9..e99115893712d 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/metadata/search/LineageSearchEntity.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/search/LineageSearchEntity.pdl @@ -22,6 +22,11 @@ record LineageSearchEntity includes SearchEntity { /** * Degree of relationship (number of hops to get to entity) */ + @deprecated degree: int = 1 + /** + * The degrees of separation (number of hops) between the source and this entity + */ + degrees: array[int] = [] } \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl b/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl index 9587775dbed3a..1f4929b878de6 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl @@ -74,4 +74,9 @@ record QueryProperties { } } lastModified: AuditStamp + + /** + * The urn of the DataPlatform where the Query was executed. + */ + dataPlatform: optional Urn } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java index 6f3e1cb278f5f..4da50f47e2feb 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java @@ -8,6 +8,9 @@ public class GraphQueryConfiguration { private long timeoutSeconds; private int batchSize; private int maxResult; + // When set to true, the graph walk (typically in search-across-lineage or scroll-across-lineage) + // will return all paths between the source and destination nodes within the hops limit. + private boolean enableMultiPathSearch; public static GraphQueryConfiguration testDefaults; @@ -16,5 +19,6 @@ public class GraphQueryConfiguration { testDefaults.setBatchSize(1000); testDefaults.setTimeoutSeconds(10); testDefaults.setMaxResult(10000); + testDefaults.setEnableMultiPathSearch(true); } } diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 2b202d513c9bf..a7222f2adc3c6 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -226,6 +226,7 @@ elasticsearch: timeoutSeconds: ${ELASTICSEARCH_SEARCH_GRAPH_TIMEOUT_SECONDS:50} # graph dao timeout seconds batchSize: ${ELASTICSEARCH_SEARCH_GRAPH_BATCH_SIZE:1000} # graph dao batch size maxResult: ${ELASTICSEARCH_SEARCH_GRAPH_MAX_RESULT:10000} # graph dao max result size + enableMultiPathSearch: ${ELASTICSEARCH_SEARCH_GRAPH_MULTI_PATH_SEARCH:true} # TODO: Kafka topic convention kafka: @@ -394,4 +395,4 @@ springdoc.api-docs.groups.enabled: true forms: hook: - enabled: {$FORMS_HOOK_ENABLED:true} \ No newline at end of file + enabled: { $FORMS_HOOK_ENABLED:true } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java index 7ff91affdf765..a79bdacfc55e9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java @@ -48,6 +48,17 @@ static void setUpgradeResult(Urn urn, EntityService entityService) throws URI final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); + // Workaround because entity service does not auto-generate the key aspect for us + final MetadataChangeProposal keyProposal = new MetadataChangeProposal(); + final DataHubUpgradeKey upgradeKey = new DataHubUpgradeKey().setId(urn.getId()); + keyProposal.setEntityUrn(urn); + keyProposal.setEntityType(Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + keyProposal.setAspectName(Constants.DATA_HUB_UPGRADE_KEY_ASPECT_NAME); + keyProposal.setAspect(GenericRecordUtils.serializeAspect(upgradeKey)); + keyProposal.setChangeType(ChangeType.UPSERT); + entityService.ingestProposal(keyProposal, auditStamp, false); + + // Ingest the upgrade result final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(urn); upgradeProposal.setEntityType(Constants.DATA_HUB_UPGRADE_ENTITY_NAME); diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index ee45b8921143a..fe16d24e3475a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -1818,6 +1818,11 @@ "type" : "float", "doc" : "The confidence in this lineage between 0 (low confidence) and 1 (high confidence)", "default" : 1.0 + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "The query that was used to generate this lineage. \nPresent only if the lineage was generated from a detected query.", + "optional" : true } ] } }, @@ -1986,7 +1991,8 @@ "name" : "DownstreamOf", "properties" : "upstreams/*/properties", "updatedActor" : "upstreams/*/auditStamp/actor", - "updatedOn" : "upstreams/*/auditStamp/time" + "updatedOn" : "upstreams/*/auditStamp/time", + "via" : "upstreams/*/query" }, "Searchable" : { "fieldName" : "upstreams", @@ -2005,6 +2011,11 @@ }, "doc" : "A generic properties bag that allows us to store specific information on this graph edge.", "optional" : true + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "If the lineage is generated by a query, a reference to the query", + "optional" : true } ] }, { "type" : "record", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index 505f44c52d583..55fed125936eb 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -1873,6 +1873,11 @@ "type" : "float", "doc" : "The confidence in this lineage between 0 (low confidence) and 1 (high confidence)", "default" : 1.0 + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "The query that was used to generate this lineage. \nPresent only if the lineage was generated from a detected query.", + "optional" : true } ] } }, @@ -2258,7 +2263,8 @@ "name" : "DownstreamOf", "properties" : "upstreams/*/properties", "updatedActor" : "upstreams/*/auditStamp/actor", - "updatedOn" : "upstreams/*/auditStamp/time" + "updatedOn" : "upstreams/*/auditStamp/time", + "via" : "upstreams/*/query" }, "Searchable" : { "fieldName" : "upstreams", @@ -2277,6 +2283,11 @@ }, "doc" : "A generic properties bag that allows us to store specific information on this graph edge.", "optional" : true + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "If the lineage is generated by a query, a reference to the query", + "optional" : true } ] }, { "type" : "record", @@ -5653,6 +5664,35 @@ "doc" : "Specific entities to recommend" } ] }, "com.linkedin.metadata.query.FreshnessStats", { + "type" : "record", + "name" : "GroupingCriterion", + "namespace" : "com.linkedin.metadata.query", + "doc" : "\n", + "fields" : [ { + "name" : "baseEntityType", + "type" : "string", + "doc" : "The type of the entity to be grouped.\ne.g. schemaField\nOmitting this field will result in all base entities being grouped.", + "optional" : true + }, { + "name" : "groupingEntityType", + "type" : "string", + "doc" : "The type of the entity to be grouped into.\ne.g. dataset, domain, etc." + } ] + }, { + "type" : "record", + "name" : "GroupingSpec", + "namespace" : "com.linkedin.metadata.query", + "doc" : "A set of directives to control how results are grouped.\nThe underlying generic groupings are nested to allow for further evolution of the grouping spec.", + "fields" : [ { + "name" : "groupingCriteria", + "type" : { + "type" : "array", + "items" : "GroupingCriterion" + }, + "doc" : "A list of generic directives to group results by.\n", + "default" : [ ] + } ] + }, { "type" : "record", "name" : "ListResult", "namespace" : "com.linkedin.metadata.query", @@ -5740,6 +5780,11 @@ "doc" : "Whether to request for search suggestions on the _entityName virtualized field", "default" : false, "optional" : true + }, { + "name" : "groupingSpec", + "type" : "GroupingSpec", + "doc" : "Instructions for grouping results before returning", + "optional" : true } ] }, { "type" : "enum", @@ -6092,7 +6137,16 @@ "name" : "degree", "type" : "int", "doc" : "Degree of relationship (number of hops to get to entity)", - "default" : 1 + "default" : 1, + "deprecated" : true + }, { + "name" : "degrees", + "type" : { + "type" : "array", + "items" : "int" + }, + "doc" : "The degrees of separation (number of hops) between the source and this entity ", + "default" : [ ] } ] } }, diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index e8c15d1b4ca04..f9f1999923ec0 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -1560,6 +1560,11 @@ "type" : "float", "doc" : "The confidence in this lineage between 0 (low confidence) and 1 (high confidence)", "default" : 1.0 + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "The query that was used to generate this lineage. \nPresent only if the lineage was generated from a detected query.", + "optional" : true } ] } }, @@ -1728,7 +1733,8 @@ "name" : "DownstreamOf", "properties" : "upstreams/*/properties", "updatedActor" : "upstreams/*/auditStamp/actor", - "updatedOn" : "upstreams/*/auditStamp/time" + "updatedOn" : "upstreams/*/auditStamp/time", + "via" : "upstreams/*/query" }, "Searchable" : { "fieldName" : "upstreams", @@ -1747,6 +1753,11 @@ }, "doc" : "A generic properties bag that allows us to store specific information on this graph edge.", "optional" : true + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "If the lineage is generated by a query, a reference to the query", + "optional" : true } ] }, { "type" : "record", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json index 9aa40edd0b118..056ca0e4da206 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json @@ -141,8 +141,9 @@ }, { "name" : "degree", "type" : "int", - "doc" : "Degree of relationship (number of hops to get to entity)", - "default" : 1 + "doc" : "Degree of relationship (number of hops to get to entity)\nDeprecated by degrees. degree field is populated by min(degrees) for backward compatibility.", + "default" : 1, + "deprecated" : true }, { "name" : "createdOn", "type" : "long", @@ -168,6 +169,14 @@ "type" : "boolean", "doc" : "Whether this lineage edge is a manual edge.", "optional" : true + }, { + "name" : "degrees", + "type" : { + "type" : "array", + "items" : "int" + }, + "doc" : "The different depths at which this entity is discovered in the lineage graph.\nMarked as optional to maintain backward compatibility, but is filled out by implementations. \nReplaces the deprecated field \"degree\".\n", + "optional" : true } ] } }, diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index 67f70d40e010c..88dad7e49152a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -1560,6 +1560,11 @@ "type" : "float", "doc" : "The confidence in this lineage between 0 (low confidence) and 1 (high confidence)", "default" : 1.0 + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "The query that was used to generate this lineage. \nPresent only if the lineage was generated from a detected query.", + "optional" : true } ] } }, @@ -1728,7 +1733,8 @@ "name" : "DownstreamOf", "properties" : "upstreams/*/properties", "updatedActor" : "upstreams/*/auditStamp/actor", - "updatedOn" : "upstreams/*/auditStamp/time" + "updatedOn" : "upstreams/*/auditStamp/time", + "via" : "upstreams/*/query" }, "Searchable" : { "fieldName" : "upstreams", @@ -1747,6 +1753,11 @@ }, "doc" : "A generic properties bag that allows us to store specific information on this graph edge.", "optional" : true + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "If the lineage is generated by a query, a reference to the query", + "optional" : true } ] }, { "type" : "record", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index 4c8cd1f20d476..4d34126cd59fc 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -1873,6 +1873,11 @@ "type" : "float", "doc" : "The confidence in this lineage between 0 (low confidence) and 1 (high confidence)", "default" : 1.0 + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "The query that was used to generate this lineage. \nPresent only if the lineage was generated from a detected query.", + "optional" : true } ] } }, @@ -2258,7 +2263,8 @@ "name" : "DownstreamOf", "properties" : "upstreams/*/properties", "updatedActor" : "upstreams/*/auditStamp/actor", - "updatedOn" : "upstreams/*/auditStamp/time" + "updatedOn" : "upstreams/*/auditStamp/time", + "via" : "upstreams/*/query" }, "Searchable" : { "fieldName" : "upstreams", @@ -2277,6 +2283,11 @@ }, "doc" : "A generic properties bag that allows us to store specific information on this graph edge.", "optional" : true + }, { + "name" : "query", + "type" : "com.linkedin.common.Urn", + "doc" : "If the lineage is generated by a query, a reference to the query", + "optional" : true } ] }, { "type" : "record", diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 2f470dca01f2a..b1b24ac97f0b8 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -381,6 +381,7 @@ public LineageSearchResult searchAcrossLineage( * @param endTimeMillis end time to filter to * @param startTimeMillis start time to filter from * @param searchFlags configuration flags for the search request + * @param authentication a reference to an authentication * @return a {@link SearchResult} that contains a list of matched documents and related search * result metadata */ diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java index d27b0ed303972..458b23317c6c8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java @@ -17,4 +17,31 @@ public class Edge { @EqualsAndHashCode.Exclude private Long updatedOn; @EqualsAndHashCode.Exclude private Urn updatedActor; @EqualsAndHashCode.Exclude private Map properties; + // The entity who owns the lifecycle of this edge + @EqualsAndHashCode.Exclude private Urn lifecycleOwner; + // An entity through which the edge between source and destination is created + @EqualsAndHashCode.Include private Urn via; + + // For backwards compatibility + public Edge( + Urn source, + Urn destination, + String relationshipType, + Long createdOn, + Urn createdActor, + Long updatedOn, + Urn updatedActor, + Map properties) { + this( + source, + destination, + relationshipType, + createdOn, + createdActor, + updatedOn, + updatedActor, + properties, + null, + null); + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java index 2afe907399745..8a08835ab6896 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java @@ -53,6 +53,17 @@ private static List> getPropertiesList( return (List>) value; } + @Nullable + private static List getViaList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { + if (path == null) { + return null; + } + final PathSpec viaPathSpec = new PathSpec(path.split("/")); + final Object value = RecordUtils.getNullableFieldValue(aspect, viaPathSpec); + return (List) value; + } + @Nullable private static boolean isValueListValid( @Nullable final List entryList, final int valueListSize) { @@ -94,6 +105,15 @@ private static Map getProperties( return null; } + @Nullable + private static Urn getVia( + @Nullable final List viaList, final int index, final int valueListSize) { + if (isValueListValid(viaList, valueListSize)) { + return viaList.get(index); + } + return null; + } + /** * Used to create new edges for the graph db, adding all the metadata associated with each edge * based on the aspect. Returns a list of Edges to be consumed by the graph service. @@ -116,12 +136,14 @@ public static List extractGraphEdges( extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedActor(); final String propertiesPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getProperties(); + final String viaNodePath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getVia(); final List createdOnList = getTimestampList(createdOnPath, aspect); final List createdActorList = getActorList(createdActorPath, aspect); final List updatedOnList = getTimestampList(updatedOnPath, aspect); final List updatedActorList = getActorList(updatedActorPath, aspect); final List> propertiesList = getPropertiesList(propertiesPath, aspect); + final List viaList = getViaList(viaNodePath, aspect); int index = 0; for (Object fieldValue : extractedFieldsEntry.getValue()) { @@ -146,6 +168,11 @@ public static List extractGraphEdges( ? getProperties(propertiesList, index, extractedFieldsEntry.getValue().size()) : null; + Urn viaNode = + viaNodePath != null + ? getVia(viaList, index, extractedFieldsEntry.getValue().size()) + : null; + SystemMetadata systemMetadata; if (isNewAspectVersion) { systemMetadata = event.hasSystemMetadata() ? event.getSystemMetadata() : null; @@ -177,7 +204,9 @@ public static List extractGraphEdges( createdActor, updatedOn, updatedActor, - properties)); + properties, + null, + viaNode)); } catch (URISyntaxException e) { log.error("Invalid destination urn: {}", fieldValue, e); } @@ -198,6 +227,8 @@ public static Edge mergeEdges(@Nonnull final Edge oldEdge, @Nonnull final Edge n null, newEdge.getUpdatedOn(), newEdge.getUpdatedActor(), - newEdge.getProperties()); + newEdge.getProperties(), + oldEdge.getLifecycleOwner(), + oldEdge.getVia()); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java index 0c6f8a0d65d5c..3c54e987fec35 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java @@ -17,15 +17,17 @@ public RelatedEntities( @Nonnull String relationshipType, @Nonnull String sourceUrn, @Nonnull String destinationUrn, - @Nonnull RelationshipDirection relationshipDirection) { + @Nonnull RelationshipDirection relationshipDirection, + String viaEntity) { super( relationshipType, - relationshipDirection == RelationshipDirection.OUTGOING ? destinationUrn : sourceUrn); + relationshipDirection == RelationshipDirection.OUTGOING ? destinationUrn : sourceUrn, + viaEntity); this.sourceUrn = sourceUrn; this.destinationUrn = destinationUrn; } public RelatedEntity asRelatedEntity() { - return new RelatedEntity(relationshipType, urn); + return new RelatedEntity(relationshipType, urn, via); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java index be1b55655f671..39c455a3fbd74 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java @@ -11,4 +11,17 @@ public class RelatedEntity { /** Urn associated with the related entity. */ String urn; + + /** Urn associated with an entity through which this relationship is established */ + String via; + + /** + * Constructor for backwards compatibility + * + * @param relationshipType + * @param urn + */ + public RelatedEntity(String relationshipType, String urn) { + this(relationshipType, urn, null); + } } diff --git a/smoke-test/requirements.txt b/smoke-test/requirements.txt index 59d5bc2600400..e37de9caddc69 100644 --- a/smoke-test/requirements.txt +++ b/smoke-test/requirements.txt @@ -6,4 +6,5 @@ tenacity slack-sdk==3.18.1 aiohttp joblib -pytest-xdist \ No newline at end of file +pytest-xdist +networkx \ No newline at end of file diff --git a/smoke-test/tests/lineage/__init__.py b/smoke-test/tests/lineage/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/lineage/test_lineage.py b/smoke-test/tests/lineage/test_lineage.py new file mode 100644 index 0000000000000..52d61d666c7d9 --- /dev/null +++ b/smoke-test/tests/lineage/test_lineage.py @@ -0,0 +1,991 @@ +import logging +import time +from enum import Enum +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union + +import datahub.emitter.mce_builder as builder +import networkx as nx +import pytest +from datahub.cli.cli_utils import get_url_and_token +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import ( + DatahubClientConfig, + DataHubGraph, + get_default_graph, +) +from datahub.metadata.schema_classes import ( + AuditStampClass, + ChangeAuditStampsClass, + ChartInfoClass, + DataFlowInfoClass, + DataJobInfoClass, + DataJobInputOutputClass, + DatasetLineageTypeClass, + DatasetPropertiesClass, + EdgeClass, +) +from datahub.metadata.schema_classes import ( + FineGrainedLineageClass as FineGrainedLineage, +) +from datahub.metadata.schema_classes import ( + FineGrainedLineageDownstreamTypeClass as FineGrainedLineageDownstreamType, +) +from datahub.metadata.schema_classes import ( + FineGrainedLineageUpstreamTypeClass as FineGrainedLineageUpstreamType, +) +from datahub.metadata.schema_classes import ( + OtherSchemaClass, + QueryLanguageClass, + QueryPropertiesClass, + QuerySourceClass, + QueryStatementClass, + SchemaFieldClass, + SchemaFieldDataTypeClass, + SchemaMetadataClass, + StringTypeClass, + UpstreamClass, + UpstreamLineageClass, +) +from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.urn import Urn +from pydantic import BaseModel, validator +from tests.utils import ingest_file_via_rest, wait_for_writes_to_sync + +logger = logging.getLogger(__name__) + + +class DeleteAgent: + def delete_entity(self, urn: str) -> None: + pass + + +class DataHubGraphDeleteAgent(DeleteAgent): + def __init__(self, graph: DataHubGraph): + self.graph = graph + + def delete_entity(self, urn: str) -> None: + self.graph.delete_entity(urn, hard=True) + + +class DataHubConsoleDeleteAgent(DeleteAgent): + def delete_entity(self, urn: str) -> None: + print(f"Would delete {urn}") + + +class DataHubConsoleEmitter: + def emit_mcp(self, mcp: MetadataChangeProposalWrapper) -> None: + print(mcp) + + +INFINITE_HOPS: int = -1 + + +@pytest.mark.dependency(depends="wait_for_healthchecks") +def ingest_tableau_cll_via_rest(wait_for_healthchecks) -> None: + ingest_file_via_rest( + "tests/lineage/tableau_cll_mcps.json", + ) + yield + + +def search_across_lineage( + graph: DataHubGraph, + main_entity: str, + hops: int = INFINITE_HOPS, + direction: str = "UPSTREAM", + convert_schema_fields_to_datasets: bool = True, +): + def _explain_sal_result(result: dict) -> str: + explain = "" + entities = [ + x["entity"]["urn"] for x in result["searchAcrossLineage"]["searchResults"] + ] + number_of_results = len(entities) + explain += f"Number of results: {number_of_results}\n" + explain += "Entities: " + try: + for e in entities: + explain += f"\t{e.replace('urn:li:','')}\n" + for entity in entities: + paths = [ + x["paths"][0]["path"] + for x in result["searchAcrossLineage"]["searchResults"] + if x["entity"]["urn"] == entity + ] + explain += f"Paths for entity {entity}: " + for path in paths: + explain += ( + "\t" + + " -> ".join( + [ + x["urn"] + .replace("urn:li:schemaField", "field") + .replace("urn:li:dataset", "dataset") + .replace("urn:li:dataPlatform", "platform") + for x in path + ] + ) + + "\n" + ) + except Exception: + # breakpoint() + pass + return explain + + variable: dict[str, Any] = { + "input": ( + { + "urn": main_entity, + "query": "*", + "direction": direction, + "searchFlags": { + "groupingSpec": { + "groupingCriteria": [ + { + "baseEntityType": "SCHEMA_FIELD", + "groupingEntityType": "DATASET", + }, + ] + }, + "skipCache": True, + }, + } + if convert_schema_fields_to_datasets + else { + "urn": main_entity, + "query": "*", + "direction": direction, + "searchFlags": { + "skipCache": True, + }, + } + ) + } + if hops != INFINITE_HOPS: + variable["input"].update( + { + "orFilters": [ + { + "and": [ + { + "field": "degree", + "condition": "EQUAL", + "values": ["{}".format(hops)], + "negated": False, + } + ] + } + ] + } + ) + result = graph.execute_graphql( + """ + query($input: SearchAcrossLineageInput!) { + searchAcrossLineage(input: $input) + { + searchResults { + entity { + urn + } + paths { + path { + urn + } + } + } + } + } + """, + variables=variable, + ) + print(f"Query -> Entity {main_entity} with hops {hops} and direction {direction}") + print(result) + print(_explain_sal_result(result)) + return result + + +class Direction(Enum): + UPSTREAM = "UPSTREAM" + DOWNSTREAM = "DOWNSTREAM" + + def opposite(self): + if self == Direction.UPSTREAM: + return Direction.DOWNSTREAM + else: + return Direction.UPSTREAM + + +class Path(BaseModel): + path: List[str] + + def add_node(self, node: str) -> None: + self.path.append(node) + + def __hash__(self) -> int: + return ".".join(self.path).__hash__() + + +class LineageExpectation(BaseModel): + direction: Direction + main_entity: str + hops: int + impacted_entities: Dict[str, List[Path]] + + +class ImpactQuery(BaseModel): + main_entity: str + hops: int + direction: Direction + upconvert_schema_fields_to_datasets: bool + + def __hash__(self) -> int: + raw_string = ( + f"{self.main_entity}{self.hops}{self.direction}" + + f"{self.upconvert_schema_fields_to_datasets}" + ) + return raw_string.__hash__() + + +class ScenarioExpectation: + """ + This class stores the expectations for the lineage of a scenario. It is used + to store the pre-materialized expectations for all datasets and schema + fields across all hops and directions possible. This makes it easy to check + that the results of a lineage query match the expectations. + """ + + def __init__(self): + self._graph = nx.DiGraph() + + def __simplify(self, urn_or_list: Union[str, List[str]]) -> str: + if isinstance(urn_or_list, list): + return ",".join([self.__simplify(x) for x in urn_or_list]) + else: + return ( + urn_or_list.replace("urn:li:schemaField", "F") + .replace("urn:li:dataset", "D") + .replace("urn:li:dataPlatform", "P") + .replace("urn:li:query", "Q") + ) + + def extend_impacted_entities( + self, + direction: Direction, + parent_entity: str, + child_entity: str, + path_extension: Optional[List[str]] = None, + ) -> None: + via_node = path_extension[0] if path_extension else None + if via_node: + self._graph.add_edge(parent_entity, child_entity, via=via_node) + else: + self._graph.add_edge(parent_entity, child_entity) + + def generate_query_expectation_pairs( + self, max_hops: int + ) -> Iterable[Tuple[ImpactQuery, LineageExpectation]]: + upconvert_options = [ + True + ] # TODO: Add False once search-across-lineage supports returning schema fields + for main_entity in self._graph.nodes(): + for direction in [Direction.UPSTREAM, Direction.DOWNSTREAM]: + for upconvert_schema_fields_to_datasets in upconvert_options: + possible_hops = [h for h in range(1, max_hops)] + [INFINITE_HOPS] + for hops in possible_hops: + query = ImpactQuery( + main_entity=main_entity, + hops=hops, + direction=direction, + upconvert_schema_fields_to_datasets=upconvert_schema_fields_to_datasets, + ) + yield query, self.get_expectation_for_query(query) + + def get_expectation_for_query(self, query: ImpactQuery) -> LineageExpectation: + graph_to_walk = ( + self._graph + if query.direction == Direction.DOWNSTREAM + else self._graph.reverse() + ) + entity_paths = nx.shortest_path(graph_to_walk, source=query.main_entity) + lineage_expectation = LineageExpectation( + direction=query.direction, + main_entity=query.main_entity, + hops=query.hops, + impacted_entities={}, + ) + for entity, paths in entity_paths.items(): + if entity == query.main_entity: + continue + if query.hops != INFINITE_HOPS and len(paths) != ( + query.hops + 1 + ): # +1 because the path includes the main entity + print( + f"Skipping {entity} because it is less than or more than {query.hops} hops away" + ) + continue + path_graph = nx.path_graph(paths) + expanded_path: List[str] = [] + via_entity = None + for ea in path_graph.edges(): + expanded_path.append(ea[0]) + if "via" in graph_to_walk.edges[ea[0], ea[1]]: + via_entity = graph_to_walk.edges[ea[0], ea[1]]["via"] + expanded_path.append(via_entity) + if via_entity and not via_entity.startswith( + "urn:li:query" + ): # Transient nodes like queries are not included as impacted entities + if via_entity not in lineage_expectation.impacted_entities: + lineage_expectation.impacted_entities[via_entity] = [] + via_path = Path(path=[x for x in expanded_path]) + if via_path not in lineage_expectation.impacted_entities[via_entity]: + lineage_expectation.impacted_entities[via_entity].append( + Path(path=[x for x in expanded_path]) + ) + + expanded_path.append(paths[-1]) + if entity not in lineage_expectation.impacted_entities: + lineage_expectation.impacted_entities[entity] = [] + lineage_expectation.impacted_entities[entity].append( + Path(path=expanded_path) + ) + + if query.upconvert_schema_fields_to_datasets: + entries_to_add: Dict[str, List[Path]] = {} + entries_to_remove = [] + for impacted_entity in lineage_expectation.impacted_entities: + if impacted_entity.startswith("urn:li:schemaField"): + impacted_dataset_entity = Urn.create_from_string( + impacted_entity + ).entity_ids[0] + if impacted_dataset_entity in entries_to_add: + entries_to_add[impacted_dataset_entity].extend( + lineage_expectation.impacted_entities[impacted_entity] + ) + else: + entries_to_add[ + impacted_dataset_entity + ] = lineage_expectation.impacted_entities[impacted_entity] + entries_to_remove.append(impacted_entity) + for impacted_entity in entries_to_remove: + del lineage_expectation.impacted_entities[impacted_entity] + lineage_expectation.impacted_entities.update(entries_to_add) + return lineage_expectation + + +class Scenario(BaseModel): + class Config: + arbitrary_types_allowed = True + + class LineageStyle(Enum): + DATASET_QUERY_DATASET = "DATASET_QUERY_DATASET" + DATASET_JOB_DATASET = "DATASET_JOB_DATASET" + + lineage_style: LineageStyle + default_platform: str = "mysql" + default_transformation_platform: str = "airflow" + hop_platform_map: Dict[int, str] = {} + hop_transformation_map: Dict[int, str] = {} + num_hops: int = 1 + default_datasets_at_each_hop: int = 2 + default_dataset_fanin: int = 2 # Number of datasets that feed into a transformation + default_column_fanin: int = 2 # Number of columns that feed into a transformation + default_dataset_fanout: int = ( + 1 # Number of datasets that a transformation feeds into + ) + default_column_fanout: int = 1 # Number of columns that a transformation feeds into + # num_upstream_datasets: int = 2 + # num_downstream_datasets: int = 1 + default_dataset_prefix: str = "librarydb." + hop_dataset_prefix_map: Dict[int, str] = {} + query_id: str = "guid-guid-guid" + query_string: str = "SELECT * FROM foo" + transformation_job: str = "job1" + transformation_flow: str = "flow1" + _generated_urns: Set[str] = set() + expectations: Optional[ScenarioExpectation] = None + + @validator("expectations", pre=True, always=True) + def expectations_validator( + cls, v: Optional[ScenarioExpectation] + ) -> ScenarioExpectation: + if v is None: + return ScenarioExpectation() + else: + return v + + def get_column_name(self, column_index: int) -> str: + return f"column_{column_index}" + + def set_upstream_dataset_prefix(self, dataset): + self.upstream_dataset_prefix = dataset + + def set_downstream_dataset_prefix(self, dataset): + self.downstream_dataset_prefix = dataset + + def set_transformation_query(self, query: str) -> None: + self.transformation_query = query + + def set_transformation_job(self, job: str) -> None: + self.transformation_job = job + + def set_transformation_flow(self, flow: str) -> None: + self.transformation_flow = flow + + def get_transformation_job_urn(self, hop_index: int) -> str: + return builder.make_data_job_urn( + orchestrator=self.default_transformation_platform, + flow_id=f"layer_{hop_index}_{self.transformation_flow}", + job_id=self.transformation_job, + cluster="PROD", + ) + + def get_transformation_query_urn(self, hop_index: int = 0) -> str: + return f"urn:li:query:{self.query_id}_{hop_index}" # TODO - add hop index to query id + + def get_transformation_flow_urn(self, hop_index: int) -> str: + return builder.make_data_flow_urn( + orchestrator=self.default_transformation_platform, + flow_id=f"layer_{hop_index}_{self.transformation_flow}", + cluster="PROD", + ) + + def get_upstream_dataset_urns(self, hop_index: int) -> List[str]: + return [ + self.get_dataset_urn(hop_index=hop_index, index=i) + for i in range(self.default_dataset_fanin) + ] + + def get_dataset_urn(self, hop_index: int, index: int) -> str: + platform = self.hop_platform_map.get(hop_index, self.default_platform) + prefix = self.hop_dataset_prefix_map.get( + index, f"{self.default_dataset_prefix}layer_{hop_index}." + ) + return builder.make_dataset_urn(platform, f"{prefix}{index}") + + def get_column_urn( + self, hop_index: int, dataset_index: int, column_index: int = 0 + ) -> str: + return builder.make_schema_field_urn( + self.get_dataset_urn(hop_index, dataset_index), + self.get_column_name(column_index), + ) + + def get_upstream_column_urn( + self, hop_index: int, dataset_index: int, column_index: int = 0 + ) -> str: + return builder.make_schema_field_urn( + self.get_dataset_urn(hop_index, dataset_index), + self.get_column_name(column_index), + ) + + def get_downstream_column_urn( + self, hop_index: int, dataset_index: int, column_index: int = 0 + ) -> str: + return builder.make_schema_field_urn( + self.get_dataset_urn(hop_index + 1, dataset_index), + self.get_column_name(column_index), + ) + + def get_downstream_dataset_urns(self, hop_index: int) -> List[str]: + return [ + self.get_dataset_urn(hop_index + 1, i) + for i in range(self.default_dataset_fanout) + ] + + def get_lineage_mcps(self) -> Iterable[MetadataChangeProposalWrapper]: + for hop_index in range(0, self.num_hops): + yield from self.get_lineage_mcps_for_hop(hop_index) + + def get_lineage_mcps_for_hop( + self, hop_index: int + ) -> Iterable[MetadataChangeProposalWrapper]: + if self.lineage_style == Scenario.LineageStyle.DATASET_JOB_DATASET: + fine_grained_lineage = FineGrainedLineage( + upstreamType=FineGrainedLineageUpstreamType.FIELD_SET, + upstreams=[ + self.get_upstream_column_urn(hop_index, dataset_index, 0) + for dataset_index in range(self.default_dataset_fanin) + ], + downstreamType=FineGrainedLineageDownstreamType.FIELD, + downstreams=[ + self.get_downstream_column_urn(hop_index, dataset_index, 0) + for dataset_index in range(self.default_dataset_fanout) + ], + ) + datajob_io = DataJobInputOutputClass( + inputDatasets=self.get_upstream_dataset_urns(hop_index), + outputDatasets=self.get_downstream_dataset_urns(hop_index), + inputDatajobs=[], # not supporting job -> job lineage for now + fineGrainedLineages=[fine_grained_lineage], + ) + yield MetadataChangeProposalWrapper( + entityUrn=self.get_transformation_job_urn(hop_index), + aspect=datajob_io, + ) + + # Add field level expectations + for upstream_field_urn in fine_grained_lineage.upstreams or []: + for downstream_field_urn in fine_grained_lineage.downstreams or []: + self.expectations.extend_impacted_entities( + Direction.DOWNSTREAM, + upstream_field_urn, + downstream_field_urn, + path_extension=[ + self.get_transformation_job_urn(hop_index), + downstream_field_urn, + ], + ) + + # Add table level expectations + for upstream_dataset_urn in datajob_io.inputDatasets: + # No path extension, because we don't use via nodes for dataset -> dataset edges + self.expectations.extend_impacted_entities( + Direction.DOWNSTREAM, + upstream_dataset_urn, + self.get_transformation_job_urn(hop_index), + ) + for downstream_dataset_urn in datajob_io.outputDatasets: + self.expectations.extend_impacted_entities( + Direction.DOWNSTREAM, + self.get_transformation_job_urn(hop_index), + downstream_dataset_urn, + ) + + if self.lineage_style == Scenario.LineageStyle.DATASET_QUERY_DATASET: + # we emit upstream lineage from the downstream dataset + for downstream_dataset_index in range(self.default_dataset_fanout): + mcp_entity_urn = self.get_dataset_urn( + hop_index + 1, downstream_dataset_index + ) + fine_grained_lineages = [ + FineGrainedLineage( + upstreamType=FineGrainedLineageUpstreamType.FIELD_SET, + upstreams=[ + self.get_upstream_column_urn( + hop_index, d_i, upstream_col_index + ) + for d_i in range(self.default_dataset_fanin) + ], + downstreamType=FineGrainedLineageDownstreamType.FIELD, + downstreams=[ + self.get_downstream_column_urn( + hop_index, + downstream_dataset_index, + downstream_col_index, + ) + for downstream_col_index in range( + self.default_column_fanout + ) + ], + query=self.get_transformation_query_urn(hop_index), + ) + for upstream_col_index in range(self.default_column_fanin) + ] + upstream_lineage = UpstreamLineageClass( + upstreams=[ + UpstreamClass( + dataset=self.get_dataset_urn(hop_index, i), + type=DatasetLineageTypeClass.TRANSFORMED, + query=self.get_transformation_query_urn(hop_index), + ) + for i in range(self.default_dataset_fanin) + ], + fineGrainedLineages=fine_grained_lineages, + ) + for fine_grained_lineage in fine_grained_lineages: + # Add field level expectations + for upstream_field_urn in fine_grained_lineage.upstreams or []: + for downstream_field_urn in ( + fine_grained_lineage.downstreams or [] + ): + self.expectations.extend_impacted_entities( + Direction.DOWNSTREAM, + upstream_field_urn, + downstream_field_urn, + path_extension=[ + self.get_transformation_query_urn(hop_index), + downstream_field_urn, + ], + ) + + # Add table level expectations + for upstream_dataset in upstream_lineage.upstreams: + self.expectations.extend_impacted_entities( + Direction.DOWNSTREAM, + upstream_dataset.dataset, + mcp_entity_urn, + path_extension=[ + self.get_transformation_query_urn(hop_index), + mcp_entity_urn, + ], + ) + + yield MetadataChangeProposalWrapper( + entityUrn=mcp_entity_urn, + aspect=upstream_lineage, + ) + + def get_entity_mcps(self) -> Iterable[MetadataChangeProposalWrapper]: + for hop_index in range( + 0, self.num_hops + 1 + ): # we generate entities with last hop inclusive + for mcp in self.get_entity_mcps_for_hop(hop_index): + assert mcp.entityUrn + self._generated_urns.add(mcp.entityUrn) + yield mcp + + def get_entity_mcps_for_hop( + self, hop_index: int + ) -> Iterable[MetadataChangeProposalWrapper]: + if self.lineage_style == Scenario.LineageStyle.DATASET_JOB_DATASET: + # Construct the DataJobInfo aspect with the job -> flow lineage. + dataflow_urn = self.get_transformation_flow_urn(hop_index) + + dataflow_info = DataFlowInfoClass( + name=self.transformation_flow.title() + " Flow" + ) + + dataflow_info_mcp = MetadataChangeProposalWrapper( + entityUrn=dataflow_urn, + aspect=dataflow_info, + ) + yield dataflow_info_mcp + + datajob_info = DataJobInfoClass( + name=self.transformation_job.title() + " Job", + type="AIRFLOW", + flowUrn=dataflow_urn, + ) + + # Construct a MetadataChangeProposalWrapper object with the DataJobInfo aspect. + # NOTE: This will overwrite all of the existing dataJobInfo aspect information associated with this job. + datajob_info_mcp = MetadataChangeProposalWrapper( + entityUrn=self.get_transformation_job_urn(hop_index), + aspect=datajob_info, + ) + yield datajob_info_mcp + + if self.lineage_style == Scenario.LineageStyle.DATASET_QUERY_DATASET: + query_urn = self.get_transformation_query_urn(hop_index=hop_index) + + fake_auditstamp = AuditStampClass( + time=int(time.time() * 1000), + actor="urn:li:corpuser:datahub", + ) + + query_properties = QueryPropertiesClass( + statement=QueryStatementClass( + value=self.query_string, + language=QueryLanguageClass.SQL, + ), + source=QuerySourceClass.SYSTEM, + created=fake_auditstamp, + lastModified=fake_auditstamp, + ) + + query_info_mcp = MetadataChangeProposalWrapper( + entityUrn=query_urn, + aspect=query_properties, + ) + yield query_info_mcp + # Generate schema and properties mcps for all datasets + for dataset_index in range(self.default_datasets_at_each_hop): + dataset_urn = DatasetUrn.from_string( + self.get_dataset_urn(hop_index, dataset_index) + ) + yield from MetadataChangeProposalWrapper.construct_many( + entityUrn=str(dataset_urn), + aspects=[ + SchemaMetadataClass( + schemaName=str(dataset_urn), + platform=builder.make_data_platform_urn(dataset_urn.platform), + version=0, + hash="", + platformSchema=OtherSchemaClass(rawSchema=""), + fields=[ + SchemaFieldClass( + fieldPath=self.get_column_name(i), + type=SchemaFieldDataTypeClass(type=StringTypeClass()), + nativeDataType="string", + ) + for i in range(self.default_column_fanin) + ], + ), + DatasetPropertiesClass( + name=dataset_urn.name, + ), + ], + ) + + def cleanup(self, delete_agent: DeleteAgent) -> None: + """Delete all entities created by this scenario.""" + for urn in self._generated_urns: + delete_agent.delete_entity(urn) + + def generate_expectation(self, query: ImpactQuery) -> LineageExpectation: + return self.expectations.generate_query_expectation_pairs(query) + + def test_expectation(self, graph: DataHubGraph) -> bool: + print("Testing expectation...") + try: + for hop_index in range(self.num_hops): + for dataset_urn in self.get_upstream_dataset_urns(hop_index): + assert graph.exists(dataset_urn) is True + for dataset_urn in self.get_downstream_dataset_urns(hop_index): + assert graph.exists(dataset_urn) is True + + if self.lineage_style == Scenario.LineageStyle.DATASET_JOB_DATASET: + assert graph.exists(self.get_transformation_job_urn(hop_index)) is True + assert graph.exists(self.get_transformation_flow_urn(hop_index)) is True + + if self.lineage_style == Scenario.LineageStyle.DATASET_QUERY_DATASET: + assert ( + graph.exists(self.get_transformation_query_urn(hop_index)) is True + ) + + wait_for_writes_to_sync() # Wait for the graph to update + # We would like to check that lineage is correct for all datasets and schema fields for all values of hops and for all directions of lineage exploration + # Since we already have expectations stored for all datasets and schema_fields, we can just check that the results match the expectations + + for ( + query, + expectation, + ) in self.expectations.generate_query_expectation_pairs(self.num_hops): + impacted_entities_expectation = set( + [x for x in expectation.impacted_entities.keys()] + ) + if len(impacted_entities_expectation) == 0: + continue + result = search_across_lineage( + graph, + query.main_entity, + query.hops, + query.direction.value, + query.upconvert_schema_fields_to_datasets, + ) + impacted_entities = set( + [ + x["entity"]["urn"] + for x in result["searchAcrossLineage"]["searchResults"] + ] + ) + try: + assert ( + impacted_entities == impacted_entities_expectation + ), f"Expected impacted entities to be {impacted_entities_expectation}, found {impacted_entities}" + except Exception: + # breakpoint() + raise + search_results = result["searchAcrossLineage"]["searchResults"] + for impacted_entity in impacted_entities: + # breakpoint() + impacted_entity_paths: List[Path] = [] + # breakpoint() + entity_paths_response = [ + x["paths"] + for x in search_results + if x["entity"]["urn"] == impacted_entity + ] + for path_response in entity_paths_response: + for p in path_response: + q = p["path"] + impacted_entity_paths.append( + Path(path=[x["urn"] for x in q]) + ) + # if len(impacted_entity_paths) > 1: + # breakpoint() + try: + assert len(impacted_entity_paths) == len( + expectation.impacted_entities[impacted_entity] + ), f"Expected length of impacted entity paths to be {len(expectation.impacted_entities[impacted_entity])}, found {len(impacted_entity_paths)}" + assert set(impacted_entity_paths) == set( + expectation.impacted_entities[impacted_entity] + ), f"Expected impacted entity paths to be {expectation.impacted_entities[impacted_entity]}, found {impacted_entity_paths}" + except Exception: + breakpoint() + raise + # for i in range(len(impacted_entity_paths)): + # assert impacted_entity_paths[i].path == expectation.impacted_entities[impacted_entity][i].path, f"Expected impacted entity paths to be {expectation.impacted_entities[impacted_entity][i].path}, found {impacted_entity_paths[i].path}" + print("Test passed!") + return True + except AssertionError as e: + print("Test failed!") + raise e + return False + + +@pytest.mark.dependency() +def test_healthchecks(wait_for_healthchecks): + # Call to wait_for_healthchecks fixture will do the actual functionality. + pass + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) +# ) +@pytest.mark.parametrize( + "lineage_style", + [ + Scenario.LineageStyle.DATASET_QUERY_DATASET, + Scenario.LineageStyle.DATASET_JOB_DATASET, + ], +) +@pytest.mark.parametrize( + "graph_level", + [ + 1, + 2, + 3, + # TODO - convert this to range of 1 to 10 to make sure we can handle large graphs + ], +) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_lineage_via_node( + lineage_style: Scenario.LineageStyle, graph_level: int +) -> None: + scenario: Scenario = Scenario( + hop_platform_map={0: "mysql", 1: "snowflake"}, + lineage_style=lineage_style, + num_hops=graph_level, + default_dataset_prefix=f"{lineage_style.value}.", + ) + + # Create an emitter to the GMS REST API. + (url, token) = get_url_and_token() + with DataHubGraph( + DatahubClientConfig(server=url, token=token, retry_max_times=0) + ) as graph: + emitter = graph + # emitter = DataHubConsoleEmitter() + + # Emit metadata! + for mcp in scenario.get_entity_mcps(): + emitter.emit_mcp(mcp) + + for mcps in scenario.get_lineage_mcps(): + emitter.emit_mcp(mcps) + + wait_for_writes_to_sync() + try: + scenario.test_expectation(graph) + finally: + scenario.cleanup(DataHubGraphDeleteAgent(graph)) + + +@pytest.fixture(scope="module") +def chart_urn_fixture(): + return "urn:li:chart:(tableau,2241f3d6-df8d-b515-9c0c-f5e5b347b26e)" + + +@pytest.fixture(scope="module") +def intermediates_fixture(): + return [ + "urn:li:dataset:(urn:li:dataPlatform:tableau,6bd53e72-9fe4-ea86-3d23-14b826c13fa5,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:tableau,1c5653d6-c448-0850-108b-5c78aeaf6b51,PROD)", + ] + + +@pytest.fixture(scope="module") +def destination_urn_fixture(): + return "urn:li:dataset:(urn:li:dataPlatform:external,sales target %28us%29.xlsx.sheet1,PROD)" + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +@pytest.fixture(scope="module", autouse=False) +def ingest_multipath_metadata( + chart_urn_fixture, intermediates_fixture, destination_urn_fixture +): + (url, token) = get_url_and_token() + fake_auditstamp = AuditStampClass( + time=int(time.time() * 1000), + actor="urn:li:corpuser:datahub", + ) + with DataHubGraph( + DatahubClientConfig(server=url, token=token, retry_max_times=0) + ) as graph: + chart_urn = chart_urn_fixture + intermediates = intermediates_fixture + destination_urn = destination_urn_fixture + for mcp in MetadataChangeProposalWrapper.construct_many( + entityUrn=destination_urn, + aspects=[ + DatasetPropertiesClass( + name="sales target (us).xlsx.sheet1", + ), + ], + ): + graph.emit_mcp(mcp) + + for intermediate in intermediates: + for mcp in MetadataChangeProposalWrapper.construct_many( + entityUrn=intermediate, + aspects=[ + DatasetPropertiesClass( + name="intermediate", + ), + UpstreamLineageClass( + upstreams=[ + UpstreamClass( + dataset=destination_urn, + type="TRANSFORMED", + ) + ] + ), + ], + ): + graph.emit_mcp(mcp) + + for mcp in MetadataChangeProposalWrapper.construct_many( + entityUrn=chart_urn, + aspects=[ + ChartInfoClass( + title="chart", + description="chart", + lastModified=ChangeAuditStampsClass(created=fake_auditstamp), + inputEdges=[ + EdgeClass( + destinationUrn=intermediate_entity, + sourceUrn=chart_urn, + ) + for intermediate_entity in intermediates + ], + ) + ], + ): + graph.emit_mcp(mcp) + wait_for_writes_to_sync() + yield + for urn in [chart_urn] + intermediates + [destination_urn]: + graph.delete_entity(urn, hard=True) + wait_for_writes_to_sync() + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_simple_lineage_multiple_paths( + ingest_multipath_metadata, + chart_urn_fixture, + intermediates_fixture, + destination_urn_fixture, +): + chart_urn = chart_urn_fixture + intermediates = intermediates_fixture + destination_urn = destination_urn_fixture + results = search_across_lineage( + get_default_graph(), + chart_urn, + direction="UPSTREAM", + convert_schema_fields_to_datasets=True, + ) + assert destination_urn in [ + x["entity"]["urn"] for x in results["searchAcrossLineage"]["searchResults"] + ] + for search_result in results["searchAcrossLineage"]["searchResults"]: + if search_result["entity"]["urn"] == destination_urn: + assert ( + len(search_result["paths"]) == 2 + ) # 2 paths from the chart to the dataset + for path in search_result["paths"]: + assert len(path["path"]) == 3 + assert path["path"][-1]["urn"] == destination_urn + assert path["path"][0]["urn"] == chart_urn + assert path["path"][1]["urn"] in intermediates From d1604b39cc902db5b66bffc8e0e3abdfd74818ed Mon Sep 17 00:00:00 2001 From: Ellie O'Neil <110510035+eboneil@users.noreply.github.com> Date: Mon, 29 Jan 2024 17:17:07 -0800 Subject: [PATCH 457/792] feat(ingestion/tableau) Ingest tags from datasource (#9730) Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/tableau.py | 9 +- .../ingestion/source/tableau_common.py | 3 + .../publishedDatasourcesConnection_all.json | 7 +- .../tableau/tableau_cll_mces_golden.json | 3611 ++++++++++++---- ...bleau_extract_all_project_mces_golden.json | 3693 +++++++++++++---- .../tableau/tableau_mces_golden.json | 3608 ++++++++++++---- .../tableau_mces_golden_deleted_stateful.json | 301 +- .../tableau_nested_project_mces_golden.json | 3667 ++++++++++++---- .../tableau_signout_timeout_mces_golden.json | 3608 ++++++++++++---- ...au_with_platform_instance_mces_golden.json | 25 + 10 files changed, 14469 insertions(+), 4063 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index acdece14a6440..d761eb1c15f0d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -1743,8 +1743,15 @@ def emit_datasource( aspects=[self.get_data_platform_instance()], ) - # Browse path + # Tags + if datasource_info: + tags = self.get_tags(datasource_info) + if tags: + dataset_snapshot.aspects.append( + builder.make_global_tag_aspect_with_tag_list(tags) + ) + # Browse path if browse_path and is_embedded_ds and workbook and workbook.get(c.NAME): browse_path = ( f"{browse_path}/{workbook[c.NAME].replace('/', REPLACE_SLASH_CHAR)}" diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py index 121b2e257a6ba..f0f106dd0f20a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py @@ -399,6 +399,9 @@ class MetadataQueryException(Exception): description uri projectName + tags { + name + } } """ diff --git a/metadata-ingestion/tests/integration/tableau/setup/publishedDatasourcesConnection_all.json b/metadata-ingestion/tests/integration/tableau/setup/publishedDatasourcesConnection_all.json index 0c2b27db8af95..93ce61e6d4fa4 100644 --- a/metadata-ingestion/tests/integration/tableau/setup/publishedDatasourcesConnection_all.json +++ b/metadata-ingestion/tests/integration/tableau/setup/publishedDatasourcesConnection_all.json @@ -250,7 +250,12 @@ }, "description": "description for test publish datasource", "uri": "sites/4989/datasources/155429", - "projectName": "default" + "projectName": "default", + "tags": [ + { + "name": "tag on published datasource" + } + ] }, { "__typename": "PublishedDatasource", diff --git a/metadata-ingestion/tests/integration/tableau/tableau_cll_mces_golden.json b/metadata-ingestion/tests/integration/tableau/tableau_cll_mces_golden.json index 4575144c6f844..c8312ab99ad19 100644 --- a/metadata-ingestion/tests/integration/tableau/tableau_cll_mces_golden.json +++ b/metadata-ingestion/tests/integration/tableau/tableau_cll_mces_golden.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30,7 +31,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -45,7 +47,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -62,7 +65,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -77,7 +81,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +101,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -126,7 +133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -143,7 +151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -158,7 +167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -177,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -192,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -207,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -224,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -239,7 +253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -260,7 +275,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -275,7 +291,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -290,7 +307,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -307,7 +325,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -331,7 +350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -346,7 +366,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -366,7 +387,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -387,7 +409,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -402,7 +425,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -417,7 +441,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -434,7 +459,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +484,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -477,7 +504,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -492,7 +520,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,7 +541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -533,7 +563,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -548,7 +579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -563,7 +595,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -580,7 +613,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -604,7 +638,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -619,7 +654,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -639,7 +675,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -660,7 +697,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -675,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -690,7 +729,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -707,7 +747,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -731,7 +772,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -746,7 +788,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -766,7 +809,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -786,7 +830,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -850,7 +895,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -865,7 +911,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1037,7 +1084,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1061,7 +1109,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1123,7 +1172,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1138,7 +1188,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1596,7 +1647,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1620,7 +1672,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1682,7 +1735,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1697,7 +1751,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2207,7 +2262,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2231,7 +2287,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2293,7 +2350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2308,7 +2366,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2714,7 +2773,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2738,7 +2798,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2800,7 +2861,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2815,7 +2877,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2857,7 +2920,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2881,7 +2945,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2943,7 +3008,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2958,7 +3024,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3168,7 +3235,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3192,7 +3260,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3263,7 +3332,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3278,7 +3348,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3363,7 +3434,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3387,7 +3459,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3452,7 +3525,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3467,7 +3541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3862,7 +3937,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3886,7 +3962,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3951,7 +4028,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3966,7 +4044,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4329,7 +4408,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4353,7 +4433,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4418,7 +4499,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4433,7 +4515,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4828,7 +4911,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4852,7 +4936,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4914,7 +4999,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4929,7 +5015,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5208,7 +5295,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5232,7 +5320,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5297,7 +5386,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5312,7 +5402,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5568,7 +5659,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5592,7 +5684,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5654,7 +5747,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5669,7 +5763,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5951,7 +6046,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5975,7 +6071,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6040,7 +6137,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6055,7 +6153,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6389,7 +6488,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6413,7 +6513,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6478,7 +6579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6493,7 +6595,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6801,7 +6904,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6825,7 +6929,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6887,7 +6992,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6902,7 +7008,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7129,7 +7236,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7153,7 +7261,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7218,7 +7327,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7233,7 +7343,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7489,7 +7600,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7513,7 +7625,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7575,7 +7688,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7590,7 +7704,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7739,7 +7854,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7763,7 +7879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7828,7 +7945,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7843,7 +7961,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8177,7 +8296,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8201,7 +8321,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8266,7 +8387,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8281,7 +8403,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8511,7 +8634,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8535,7 +8659,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8600,7 +8725,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8615,7 +8741,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8871,7 +8998,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8895,7 +9023,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8960,7 +9089,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8975,7 +9105,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9257,7 +9388,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9281,7 +9413,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9343,7 +9476,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9358,7 +9492,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9695,7 +9830,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9719,7 +9855,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9781,7 +9918,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9796,7 +9934,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9881,7 +10020,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9905,7 +10045,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9925,7 +10066,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9991,7 +10133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10006,7 +10149,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10030,7 +10174,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10091,7 +10236,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10106,7 +10252,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10130,7 +10277,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10189,7 +10337,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10204,7 +10353,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10228,7 +10378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10301,7 +10452,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10316,7 +10468,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10340,7 +10493,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10683,7 +10837,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12473,7 +12628,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12490,7 +12646,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12505,7 +12662,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12529,7 +12687,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12545,7 +12704,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", "type": "TRANSFORMED" }, { @@ -12553,7 +12712,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", "type": "TRANSFORMED" } ], @@ -12629,7 +12788,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12871,7 +13031,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12888,7 +13049,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12903,7 +13065,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12927,7 +13090,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12959,7 +13123,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13373,7 +13538,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13390,7 +13556,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13405,7 +13572,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13429,7 +13597,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13863,7 +14032,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14612,7 +14782,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14629,7 +14800,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14644,7 +14816,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14668,7 +14841,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14813,7 +14987,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21430,7 +21605,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21447,7 +21623,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21462,7 +21639,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21486,7 +21664,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21674,7 +21853,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25577,7 +25757,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25594,7 +25775,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25609,7 +25791,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25633,7 +25816,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25783,7 +25967,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30948,7 +31133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30965,7 +31151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30980,7 +31167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31004,7 +31192,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31126,7 +31315,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31355,7 +31545,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31372,7 +31563,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31387,7 +31579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31411,7 +31604,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31514,7 +31708,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31528,469 +31723,18 @@ } }, { - "com.linkedin.pegasus2avro.common.BrowsePaths": { - "paths": [ - "/prod/tableau/default" - ] - } - }, - { - "com.linkedin.pegasus2avro.common.Ownership": { - "owners": [ - { - "owner": "urn:li:corpuser:jawadqu@gmail.com", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": {}, - "name": "test publish datasource", - "description": "description for test publish datasource", - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "test", - "platform": "urn:li:dataPlatform:tableau", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "" - } - }, - "fields": [ - { - "fieldPath": "payment_date", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "DATETIME", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:YEAR" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "staff_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_id", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "INTEGER", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "amount", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "REAL", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "Published SQL Query", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": {} - } - }, - "nativeDataType": "TABLE", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, + "com.linkedin.pegasus2avro.common.GlobalTags": { + "tags": [ { - "fieldPath": "staff_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false + "tag": "urn:li:tag:tag on published datasource" } ] } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Published Data Source" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", - "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "changeType": "UPSERT", - "aspectName": "upstreamLineage", - "aspect": { - "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", - "type": "TRANSFORMED" - } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" - ], - "transformOperation": "GroupField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" - ], - "transformOperation": "BinField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" - ], - "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" - ], - "transformOperation": "SetField", - "confidenceScore": 1.0 - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.DataPlatformInstance": { - "platform": "urn:li:dataPlatform:tableau" - } }, { "com.linkedin.pegasus2avro.common.BrowsePaths": { "paths": [ - "/prod/tableau/Samples" + "/prod/tableau/default" ] } }, @@ -32011,8 +31755,473 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": {}, - "name": "Superstore Datasource", - "description": "Description for Superstore dataset", + "name": "test publish datasource", + "description": "description for test publish datasource", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "test", + "platform": "urn:li:dataPlatform:tableau", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "payment_date", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "DATETIME", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:YEAR" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INTEGER", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "amount", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "REAL", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "Published SQL Query", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": {} + } + }, + "nativeDataType": "TABLE", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Published Data Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "type": "TRANSFORMED" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" + ], + "transformOperation": "GroupField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" + ], + "transformOperation": "BinField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" + ], + "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" + ], + "transformOperation": "SetField", + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.DataPlatformInstance": { + "platform": "urn:li:dataPlatform:tableau" + } + }, + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/prod/tableau/Samples" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:jawadqu@gmail.com", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": {}, + "name": "Superstore Datasource", + "description": "Description for Superstore dataset", "tags": [] } }, @@ -32853,7 +33062,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32870,7 +33080,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32885,7 +33096,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32905,7 +33117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32920,7 +33133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32952,7 +33166,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33086,7 +33301,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33104,7 +33320,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33128,7 +33345,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33168,7 +33386,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33314,7 +33533,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33332,7 +33552,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33354,7 +33575,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33406,7 +33628,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33492,7 +33715,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33510,7 +33734,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33532,7 +33757,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33695,7 +33921,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33720,7 +33947,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33955,7 +34183,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33980,7 +34209,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34191,7 +34421,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34216,7 +34447,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34379,7 +34611,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34404,7 +34637,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34555,7 +34789,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34580,7 +34815,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34719,7 +34955,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34744,7 +34981,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34835,7 +35073,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34860,7 +35099,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34927,7 +35167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34949,7 +35190,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35016,7 +35258,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35038,7 +35281,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35333,7 +35577,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35355,7 +35600,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36144,7 +36390,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36169,7 +36416,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37052,7 +37300,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37077,7 +37326,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38032,7 +38282,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38057,7 +38308,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38880,7 +39132,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38905,7 +39158,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39212,7 +39466,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39237,7 +39492,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40084,7 +40340,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40109,7 +40366,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41100,7 +41358,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41125,7 +41384,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41984,7 +42244,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42009,7 +42270,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42030,7 +42292,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42052,7 +42315,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42073,7 +42337,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42095,7 +42360,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42115,7 +42381,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42137,7 +42404,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42152,7 +42420,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42167,7 +42436,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42182,7 +42452,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42197,7 +42468,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42212,7 +42484,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42227,7 +42500,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42242,7 +42516,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42257,7 +42532,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42272,7 +42548,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42287,7 +42564,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42302,7 +42580,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42317,7 +42596,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42332,7 +42612,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42347,7 +42628,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42362,7 +42644,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42377,7 +42660,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42392,7 +42676,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42407,7 +42692,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42422,7 +42708,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42437,7 +42724,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42452,7 +42740,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42467,7 +42756,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42482,7 +42772,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42497,7 +42788,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42512,7 +42804,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42527,7 +42820,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42542,7 +42836,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42557,7 +42852,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42572,7 +42868,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42587,7 +42884,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42602,7 +42900,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42617,7 +42916,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42632,7 +42932,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42647,7 +42948,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42662,7 +42964,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42677,7 +42980,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42692,7 +42996,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42707,7 +43012,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42722,7 +43028,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42737,7 +43044,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42752,7 +43060,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42767,7 +43076,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42782,7 +43092,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42797,7 +43108,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42812,7 +43124,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42827,7 +43140,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42842,7 +43156,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42857,7 +43172,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42872,7 +43188,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42887,7 +43204,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42902,7 +43220,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42917,7 +43236,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42932,7 +43252,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42947,7 +43268,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42962,7 +43284,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42977,7 +43300,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42992,7 +43316,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43007,7 +43332,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43022,7 +43348,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43037,7 +43364,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43052,7 +43380,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43067,7 +43396,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43082,7 +43412,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43097,7 +43428,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43112,7 +43444,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43127,7 +43460,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43142,7 +43476,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43157,7 +43492,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43172,7 +43508,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43187,7 +43524,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43202,7 +43540,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43217,7 +43556,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43232,7 +43572,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43247,7 +43588,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43262,7 +43604,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43277,7 +43620,1697 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:tag on published datasource", + "changeType": "UPSERT", + "aspectName": "tagKey", + "aspect": { + "json": { + "name": "tag on published datasource" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:252a054d4dd93cd657735aa46dd71370", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,222d1406-de0e-cd8d-0b94-9b45a0007e59)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,38130558-4194-2e2a-3046-c0d887829cb4)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,692a2da4-2a82-32c1-f713-63b8e4325d86)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f4317efd-c3e6-6ace-8fe6-e71b590bbbcc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8a6a269a-d6de-fae4-5050-513255b40ffc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c57a5574-db47-46df-677f-0b708dab14db)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e604255e-0573-3951-6db7-05bee48116c1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,20fc5eb7-81eb-aa18-8c39-af501c62d085)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b5351c1-535d-4a4a-1339-c51ddd6abf8a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b73b9dd-4ec7-75ca-f2e9-fa1984ca8b72)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,373c6466-bb0c-b319-8752-632456349261)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,53b8dc2f-8ada-51f7-7422-fe82e9b803cc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,58af9ecf-b839-da50-65e1-2e1fa20e3362)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,618b3e76-75c1-cb31-0c61-3f4890b72c31)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,721c3c41-7a2b-16a8-3281-6f948a44be96)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7ef184c1-5a41-5ec8-723e-ae44c20aa335)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7fbc77ba-0ab6-3727-0db3-d8402a804da5)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8385ea9a-0749-754f-7ad9-824433de2120)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b207c2f2-b675-32e3-2663-17bb836a018b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b679da5e-7d03-f01e-b2ea-01fb3c1926dc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c14973c2-e1c3-563a-a9c1-8a408396d22a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e70a540d-55ed-b9cc-5a3c-01ebe81a1274)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f76d3570-23b8-f74b-d85c-cc5484c2079c)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,130496dc-29ca-8a89-e32b-d73c4d8b65ff)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,8f7dd564-36b6-593f-3c6f-687ad06cd40b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,20e44c22-1ccd-301a-220c-7b6837d09a52)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,39b7a1de-6276-cfc7-9b59-1d22f3bbb06b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,5dcaaf46-e6fb-2548-e763-272a7ab2c9b1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,801c95e3-b07e-7bfe-3789-a561c7beccd3,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4644ccb1-2adc-cf26-c654-04ed1dcc7090,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,618c87db-5959-338b-bcc7-6f5f4cc0b6c6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d00f4ba6-707e-4684-20af-69eb47587cc2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,06c3e060-8133-4b58-9b53-a0fced25e056,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,3ade7817-ae27-259e-8e48-1570e7f932f6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,dfe2c02a-54b7-f7a2-39fc-c651da2f6ad8,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d8d4c0ea-3162-fa11-31e6-26675da44a38,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "urn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4fb670d5-3e19-9656-e684-74aa9729cf18,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "SubProject1" + }, + { + "id": "AbcJoinWorkbook" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity11,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity10,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity7,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.campaignstable,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.address,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.actor,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.task,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_request,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_req_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_cat_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sys_user_group,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.problem,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.incident,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.cmdb_ci,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.customer,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.payment,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.staff,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/tableau/tableau_extract_all_project_mces_golden.json b/metadata-ingestion/tests/integration/tableau/tableau_extract_all_project_mces_golden.json index 12eb780447c18..52b7de1b70a4d 100644 --- a/metadata-ingestion/tests/integration/tableau/tableau_extract_all_project_mces_golden.json +++ b/metadata-ingestion/tests/integration/tableau/tableau_extract_all_project_mces_golden.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30,7 +31,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -45,7 +47,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -62,7 +65,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -77,7 +81,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +101,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -126,7 +133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -143,7 +151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -158,7 +167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -177,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -192,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -207,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -224,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -239,7 +253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -258,7 +273,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -273,7 +289,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -288,7 +305,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -305,7 +323,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -320,7 +339,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -340,7 +360,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -361,7 +382,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -376,7 +398,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -391,7 +414,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -408,7 +432,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -432,7 +457,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -447,7 +473,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -467,7 +494,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -488,7 +516,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -503,7 +532,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -518,7 +548,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -535,7 +566,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -559,7 +591,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -578,7 +611,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -593,7 +627,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -613,7 +648,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -634,7 +670,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -649,7 +686,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -664,7 +702,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -681,7 +720,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -705,7 +745,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -720,7 +761,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -740,7 +782,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -761,7 +804,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -776,7 +820,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -791,7 +836,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -808,7 +854,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,7 +879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -847,7 +895,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -867,7 +916,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -888,7 +938,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -903,7 +954,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -918,7 +970,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -935,7 +988,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -959,7 +1013,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -974,7 +1029,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -998,7 +1054,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1018,7 +1075,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1082,7 +1140,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1097,7 +1156,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1269,7 +1329,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1293,7 +1354,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1355,7 +1417,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1370,7 +1433,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1828,7 +1892,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1852,7 +1917,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1914,7 +1980,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1929,7 +1996,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2439,7 +2507,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2463,7 +2532,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2525,7 +2595,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2540,7 +2611,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2946,7 +3018,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2970,7 +3043,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3032,7 +3106,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3047,7 +3122,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3089,7 +3165,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3113,7 +3190,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3175,7 +3253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3190,7 +3269,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3400,7 +3480,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3424,7 +3505,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3495,7 +3577,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3510,7 +3593,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3595,7 +3679,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3619,7 +3704,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3684,7 +3770,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3699,7 +3786,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4094,7 +4182,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4118,7 +4207,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4183,7 +4273,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4198,7 +4289,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4561,7 +4653,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4585,7 +4678,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4650,7 +4744,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4665,7 +4760,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5060,7 +5156,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5084,7 +5181,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5146,7 +5244,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5161,7 +5260,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5440,7 +5540,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5464,7 +5565,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5529,7 +5631,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5544,7 +5647,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5800,7 +5904,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5824,7 +5929,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5886,7 +5992,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5901,7 +6008,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6183,7 +6291,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6207,7 +6316,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6272,7 +6382,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6287,7 +6398,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6621,7 +6733,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6645,7 +6758,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6710,7 +6824,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6725,7 +6840,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7033,7 +7149,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7057,7 +7174,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7119,7 +7237,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7134,7 +7253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7361,7 +7481,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7385,7 +7506,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7450,7 +7572,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7465,7 +7588,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7721,7 +7845,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7745,7 +7870,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7807,7 +7933,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7822,7 +7949,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7971,7 +8099,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7995,7 +8124,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8060,7 +8190,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8075,7 +8206,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8409,7 +8541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8433,7 +8566,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8498,7 +8632,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8513,7 +8648,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8743,7 +8879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8767,7 +8904,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8832,7 +8970,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8847,7 +8986,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9103,7 +9243,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9127,7 +9268,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9192,7 +9334,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9207,7 +9350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9489,7 +9633,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9513,7 +9658,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9575,7 +9721,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9590,7 +9737,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9927,7 +10075,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9951,7 +10100,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10013,7 +10163,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10028,7 +10179,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10113,7 +10265,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10137,7 +10290,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10157,7 +10311,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10223,7 +10378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10238,7 +10394,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10262,7 +10419,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10323,7 +10481,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10338,7 +10497,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10362,7 +10522,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10421,7 +10582,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10436,7 +10598,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10460,7 +10623,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10533,7 +10697,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10548,7 +10713,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10572,7 +10738,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10915,7 +11082,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12705,7 +12873,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12722,7 +12891,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12737,7 +12907,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12761,7 +12932,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12777,7 +12949,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", "type": "TRANSFORMED" }, { @@ -12785,7 +12957,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", "type": "TRANSFORMED" } ], @@ -12861,7 +13033,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13103,7 +13276,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13120,7 +13294,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13135,7 +13310,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13159,7 +13335,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13191,7 +13368,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13605,7 +13783,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13622,7 +13801,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13637,7 +13817,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13661,7 +13842,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14095,7 +14277,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14844,7 +15027,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14861,7 +15045,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14876,7 +15061,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14900,7 +15086,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -15045,7 +15232,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21662,7 +21850,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21679,7 +21868,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21694,7 +21884,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21718,7 +21909,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21906,7 +22098,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25809,7 +26002,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25826,7 +26020,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25841,7 +26036,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25865,7 +26061,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -26015,7 +26212,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31180,7 +31378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31197,7 +31396,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31212,7 +31412,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31236,7 +31437,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31358,7 +31560,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31587,7 +31790,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31604,7 +31808,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31619,7 +31824,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31643,7 +31849,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31746,7 +31953,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31760,469 +31968,18 @@ } }, { - "com.linkedin.pegasus2avro.common.BrowsePaths": { - "paths": [ - "/prod/tableau/default" - ] - } - }, - { - "com.linkedin.pegasus2avro.common.Ownership": { - "owners": [ - { - "owner": "urn:li:corpuser:jawadqu@gmail.com", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": {}, - "name": "test publish datasource", - "description": "description for test publish datasource", - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "test", - "platform": "urn:li:dataPlatform:tableau", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "" - } - }, - "fields": [ - { - "fieldPath": "payment_date", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "DATETIME", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:YEAR" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "staff_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_id", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "INTEGER", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "amount", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "REAL", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "Published SQL Query", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": {} - } - }, - "nativeDataType": "TABLE", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, + "com.linkedin.pegasus2avro.common.GlobalTags": { + "tags": [ { - "fieldPath": "staff_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false + "tag": "urn:li:tag:tag on published datasource" } ] } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Published Data Source" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", - "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "changeType": "UPSERT", - "aspectName": "upstreamLineage", - "aspect": { - "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", - "type": "TRANSFORMED" - } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" - ], - "transformOperation": "GroupField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" - ], - "transformOperation": "BinField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" - ], - "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" - ], - "transformOperation": "SetField", - "confidenceScore": 1.0 - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.DataPlatformInstance": { - "platform": "urn:li:dataPlatform:tableau" - } }, { "com.linkedin.pegasus2avro.common.BrowsePaths": { "paths": [ - "/prod/tableau/Samples" + "/prod/tableau/default" ] } }, @@ -32243,8 +32000,473 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": {}, - "name": "Superstore Datasource", - "description": "Description for Superstore dataset", + "name": "test publish datasource", + "description": "description for test publish datasource", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "test", + "platform": "urn:li:dataPlatform:tableau", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "payment_date", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "DATETIME", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:YEAR" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INTEGER", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "amount", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "REAL", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "Published SQL Query", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": {} + } + }, + "nativeDataType": "TABLE", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Published Data Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "type": "TRANSFORMED" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" + ], + "transformOperation": "GroupField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" + ], + "transformOperation": "BinField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" + ], + "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" + ], + "transformOperation": "SetField", + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.DataPlatformInstance": { + "platform": "urn:li:dataPlatform:tableau" + } + }, + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/prod/tableau/Samples" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:jawadqu@gmail.com", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": {}, + "name": "Superstore Datasource", + "description": "Description for Superstore dataset", "tags": [] } }, @@ -33085,7 +33307,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33102,7 +33325,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33117,7 +33341,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33137,7 +33362,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33152,7 +33378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33184,7 +33411,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33318,7 +33546,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33336,7 +33565,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33360,7 +33590,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33400,7 +33631,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33546,7 +33778,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33564,7 +33797,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33586,7 +33820,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33672,7 +33907,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33690,7 +33926,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33712,7 +33949,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33875,7 +34113,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33900,7 +34139,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34135,7 +34375,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34160,7 +34401,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34371,7 +34613,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34396,7 +34639,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34559,7 +34803,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34584,7 +34829,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34735,7 +34981,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34760,7 +35007,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34899,7 +35147,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34924,7 +35173,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35015,7 +35265,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35040,7 +35291,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35107,7 +35359,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35129,7 +35382,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35196,7 +35450,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35218,7 +35473,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35513,7 +35769,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35535,7 +35792,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36324,7 +36582,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36349,7 +36608,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37232,7 +37492,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37257,7 +37518,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38212,7 +38474,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38237,7 +38500,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39060,7 +39324,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39085,7 +39350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39392,7 +39658,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39417,7 +39684,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40264,7 +40532,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40289,7 +40558,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41280,7 +41550,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41305,7 +41576,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42164,7 +42436,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42189,7 +42462,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42210,7 +42484,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42232,7 +42507,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42253,7 +42529,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42275,7 +42552,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42295,7 +42573,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42317,7 +42596,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42332,7 +42612,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42347,7 +42628,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42362,7 +42644,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42377,7 +42660,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42392,7 +42676,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42407,7 +42692,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42422,7 +42708,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42437,7 +42724,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42452,7 +42740,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42467,7 +42756,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42482,7 +42772,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42497,7 +42788,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42512,7 +42804,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42527,7 +42820,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42542,7 +42836,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42557,7 +42852,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42572,7 +42868,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42587,7 +42884,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42602,7 +42900,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42617,7 +42916,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42632,7 +42932,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42647,7 +42948,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42662,7 +42964,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42677,7 +42980,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42692,7 +42996,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42707,7 +43012,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42722,7 +43028,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42737,7 +43044,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42752,7 +43060,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42767,7 +43076,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42782,7 +43092,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42797,7 +43108,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42812,7 +43124,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42827,7 +43140,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42842,7 +43156,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42857,7 +43172,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42872,7 +43188,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42887,7 +43204,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42902,7 +43220,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42917,7 +43236,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42932,7 +43252,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42947,7 +43268,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42962,7 +43284,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42977,7 +43300,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42992,7 +43316,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43007,7 +43332,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43022,7 +43348,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43037,7 +43364,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43052,7 +43380,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43067,7 +43396,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43082,7 +43412,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43097,7 +43428,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43112,7 +43444,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43127,7 +43460,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43142,7 +43476,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43157,7 +43492,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43172,7 +43508,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43187,7 +43524,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43202,7 +43540,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43217,7 +43556,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43232,7 +43572,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43247,7 +43588,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43262,7 +43604,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43277,7 +43620,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43292,7 +43636,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43307,7 +43652,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43322,7 +43668,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43337,7 +43684,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43352,7 +43700,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43367,7 +43716,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43382,7 +43732,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43397,7 +43748,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43412,7 +43764,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43427,7 +43780,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43442,7 +43796,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43457,7 +43812,1743 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:tag on published datasource", + "changeType": "UPSERT", + "aspectName": "tagKey", + "aspect": { + "json": { + "name": "tag on published datasource" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:252a054d4dd93cd657735aa46dd71370", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:beaddce9d1e89ab503ae6408fb77d4ce", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:595877512935338b94eac9e06cf20607", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:beaddce9d1e89ab503ae6408fb77d4ce", + "urn": "urn:li:container:beaddce9d1e89ab503ae6408fb77d4ce" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,222d1406-de0e-cd8d-0b94-9b45a0007e59)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,38130558-4194-2e2a-3046-c0d887829cb4)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,692a2da4-2a82-32c1-f713-63b8e4325d86)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f4317efd-c3e6-6ace-8fe6-e71b590bbbcc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8a6a269a-d6de-fae4-5050-513255b40ffc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c57a5574-db47-46df-677f-0b708dab14db)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e604255e-0573-3951-6db7-05bee48116c1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,20fc5eb7-81eb-aa18-8c39-af501c62d085)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b5351c1-535d-4a4a-1339-c51ddd6abf8a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b73b9dd-4ec7-75ca-f2e9-fa1984ca8b72)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,373c6466-bb0c-b319-8752-632456349261)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,53b8dc2f-8ada-51f7-7422-fe82e9b803cc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,58af9ecf-b839-da50-65e1-2e1fa20e3362)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,618b3e76-75c1-cb31-0c61-3f4890b72c31)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,721c3c41-7a2b-16a8-3281-6f948a44be96)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7ef184c1-5a41-5ec8-723e-ae44c20aa335)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7fbc77ba-0ab6-3727-0db3-d8402a804da5)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8385ea9a-0749-754f-7ad9-824433de2120)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b207c2f2-b675-32e3-2663-17bb836a018b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b679da5e-7d03-f01e-b2ea-01fb3c1926dc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c14973c2-e1c3-563a-a9c1-8a408396d22a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e70a540d-55ed-b9cc-5a3c-01ebe81a1274)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f76d3570-23b8-f74b-d85c-cc5484c2079c)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,130496dc-29ca-8a89-e32b-d73c4d8b65ff)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,8f7dd564-36b6-593f-3c6f-687ad06cd40b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,20e44c22-1ccd-301a-220c-7b6837d09a52)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,39b7a1de-6276-cfc7-9b59-1d22f3bbb06b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,5dcaaf46-e6fb-2548-e763-272a7ab2c9b1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,801c95e3-b07e-7bfe-3789-a561c7beccd3,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4644ccb1-2adc-cf26-c654-04ed1dcc7090,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,618c87db-5959-338b-bcc7-6f5f4cc0b6c6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d00f4ba6-707e-4684-20af-69eb47587cc2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,06c3e060-8133-4b58-9b53-a0fced25e056,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,3ade7817-ae27-259e-8e48-1570e7f932f6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,dfe2c02a-54b7-f7a2-39fc-c651da2f6ad8,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d8d4c0ea-3162-fa11-31e6-26675da44a38,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "urn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4fb670d5-3e19-9656-e684-74aa9729cf18,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "SubProject1" + }, + { + "id": "AbcJoinWorkbook" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity11,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity10,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity7,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.campaignstable,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.address,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.actor,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.task,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_request,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_req_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_cat_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sys_user_group,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.problem,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.incident,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.cmdb_ci,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.customer,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.payment,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.staff,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/tableau/tableau_mces_golden.json b/metadata-ingestion/tests/integration/tableau/tableau_mces_golden.json index 77ea484abfa0e..9d0e8a14a1f8c 100644 --- a/metadata-ingestion/tests/integration/tableau/tableau_mces_golden.json +++ b/metadata-ingestion/tests/integration/tableau/tableau_mces_golden.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30,7 +31,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -45,7 +47,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -62,7 +65,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -77,7 +81,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +101,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -126,7 +133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -143,7 +151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -158,7 +167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -177,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -192,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -207,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -224,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -239,7 +253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -260,7 +275,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -275,7 +291,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -290,7 +307,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -307,7 +325,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -331,7 +350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -346,7 +366,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -366,7 +387,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -387,7 +409,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -402,7 +425,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -417,7 +441,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -434,7 +459,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +484,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -477,7 +504,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -492,7 +520,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,7 +541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -533,7 +563,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -548,7 +579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -563,7 +595,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -580,7 +613,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -604,7 +638,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -619,7 +654,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -639,7 +675,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -660,7 +697,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -675,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -690,7 +729,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -707,7 +747,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -731,7 +772,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -746,7 +788,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -766,7 +809,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -786,7 +830,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -850,7 +895,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -865,7 +911,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1037,7 +1084,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1061,7 +1109,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1123,7 +1172,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1138,7 +1188,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1596,7 +1647,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1620,7 +1672,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1682,7 +1735,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1697,7 +1751,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2207,7 +2262,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2231,7 +2287,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2293,7 +2350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2308,7 +2366,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2714,7 +2773,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2738,7 +2798,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2800,7 +2861,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2815,7 +2877,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2857,7 +2920,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2881,7 +2945,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2943,7 +3008,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2958,7 +3024,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3168,7 +3235,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3192,7 +3260,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3263,7 +3332,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3278,7 +3348,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3363,7 +3434,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3387,7 +3459,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3452,7 +3525,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3467,7 +3541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3862,7 +3937,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3886,7 +3962,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3951,7 +4028,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3966,7 +4044,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4329,7 +4408,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4353,7 +4433,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4418,7 +4499,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4433,7 +4515,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4828,7 +4911,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4852,7 +4936,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4914,7 +4999,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4929,7 +5015,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5208,7 +5295,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5232,7 +5320,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5297,7 +5386,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5312,7 +5402,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5568,7 +5659,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5592,7 +5684,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5654,7 +5747,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5669,7 +5763,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5951,7 +6046,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5975,7 +6071,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6040,7 +6137,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6055,7 +6153,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6389,7 +6488,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6413,7 +6513,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6478,7 +6579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6493,7 +6595,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6801,7 +6904,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6825,7 +6929,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6887,7 +6992,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6902,7 +7008,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7129,7 +7236,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7153,7 +7261,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7218,7 +7327,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7233,7 +7343,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7489,7 +7600,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7513,7 +7625,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7575,7 +7688,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7590,7 +7704,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7739,7 +7854,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7763,7 +7879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7828,7 +7945,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7843,7 +7961,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8177,7 +8296,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8201,7 +8321,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8266,7 +8387,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8281,7 +8403,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8511,7 +8634,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8535,7 +8659,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8600,7 +8725,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8615,7 +8741,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8871,7 +8998,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8895,7 +9023,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8960,7 +9089,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8975,7 +9105,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9257,7 +9388,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9281,7 +9413,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9343,7 +9476,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9358,7 +9492,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9695,7 +9830,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9719,7 +9855,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9781,7 +9918,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9796,7 +9934,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9881,7 +10020,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9905,7 +10045,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9925,7 +10066,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9991,7 +10133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10006,7 +10149,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10030,7 +10174,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10091,7 +10236,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10106,7 +10252,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10130,7 +10277,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10189,7 +10337,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10204,7 +10353,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10228,7 +10378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10301,7 +10452,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10316,7 +10468,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10340,7 +10493,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10683,7 +10837,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12473,7 +12628,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12490,7 +12646,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12505,7 +12662,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12529,7 +12687,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12545,7 +12704,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", "type": "TRANSFORMED" }, { @@ -12553,7 +12712,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", "type": "TRANSFORMED" } ], @@ -12629,7 +12788,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12871,7 +13031,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12888,7 +13049,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12903,7 +13065,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12927,7 +13090,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12959,7 +13123,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13373,7 +13538,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13390,7 +13556,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13405,7 +13572,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13429,7 +13597,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13863,7 +14032,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14612,7 +14782,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14629,7 +14800,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14644,7 +14816,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14668,7 +14841,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14813,7 +14987,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21430,7 +21605,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21447,7 +21623,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21462,7 +21639,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21486,7 +21664,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21674,7 +21853,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25577,7 +25757,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25594,7 +25775,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25609,7 +25791,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25633,7 +25816,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25783,7 +25967,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30948,7 +31133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30965,7 +31151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30980,7 +31167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31004,7 +31192,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31126,7 +31315,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31355,7 +31545,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31372,7 +31563,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31387,7 +31579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31411,7 +31604,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31514,7 +31708,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31528,469 +31723,18 @@ } }, { - "com.linkedin.pegasus2avro.common.BrowsePaths": { - "paths": [ - "/prod/tableau/default" - ] - } - }, - { - "com.linkedin.pegasus2avro.common.Ownership": { - "owners": [ - { - "owner": "urn:li:corpuser:jawadqu@gmail.com", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": {}, - "name": "test publish datasource", - "description": "description for test publish datasource", - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "test", - "platform": "urn:li:dataPlatform:tableau", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "" - } - }, - "fields": [ - { - "fieldPath": "payment_date", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "DATETIME", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:YEAR" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "staff_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_id", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "INTEGER", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "amount", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "REAL", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "Published SQL Query", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": {} - } - }, - "nativeDataType": "TABLE", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, + "com.linkedin.pegasus2avro.common.GlobalTags": { + "tags": [ { - "fieldPath": "staff_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false + "tag": "urn:li:tag:tag on published datasource" } ] } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Published Data Source" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", - "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "changeType": "UPSERT", - "aspectName": "upstreamLineage", - "aspect": { - "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", - "type": "TRANSFORMED" - } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" - ], - "transformOperation": "GroupField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" - ], - "transformOperation": "BinField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" - ], - "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" - ], - "transformOperation": "SetField", - "confidenceScore": 1.0 - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.DataPlatformInstance": { - "platform": "urn:li:dataPlatform:tableau" - } }, { "com.linkedin.pegasus2avro.common.BrowsePaths": { "paths": [ - "/prod/tableau/Samples" + "/prod/tableau/default" ] } }, @@ -32011,8 +31755,473 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": {}, - "name": "Superstore Datasource", - "description": "Description for Superstore dataset", + "name": "test publish datasource", + "description": "description for test publish datasource", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "test", + "platform": "urn:li:dataPlatform:tableau", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "payment_date", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "DATETIME", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:YEAR" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INTEGER", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "amount", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "REAL", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "Published SQL Query", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": {} + } + }, + "nativeDataType": "TABLE", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Published Data Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "type": "TRANSFORMED" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" + ], + "transformOperation": "GroupField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" + ], + "transformOperation": "BinField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" + ], + "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" + ], + "transformOperation": "SetField", + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.DataPlatformInstance": { + "platform": "urn:li:dataPlatform:tableau" + } + }, + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/prod/tableau/Samples" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:jawadqu@gmail.com", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": {}, + "name": "Superstore Datasource", + "description": "Description for Superstore dataset", "tags": [] } }, @@ -32853,7 +33062,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32870,7 +33080,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32885,7 +33096,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32905,7 +33117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32920,7 +33133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32952,7 +33166,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33086,7 +33301,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33104,7 +33320,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33128,7 +33345,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33168,7 +33386,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33314,7 +33533,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33332,7 +33552,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33354,7 +33575,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33440,7 +33662,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33458,7 +33681,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33480,7 +33704,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33643,7 +33868,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33668,7 +33894,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33903,7 +34130,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33928,7 +34156,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34139,7 +34368,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34164,7 +34394,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34327,7 +34558,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34352,7 +34584,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34503,7 +34736,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34528,7 +34762,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34667,7 +34902,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34692,7 +34928,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34783,7 +35020,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34808,7 +35046,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34875,7 +35114,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34897,7 +35137,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34964,7 +35205,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34986,7 +35228,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35281,7 +35524,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35303,7 +35547,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36092,7 +36337,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36117,7 +36363,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37000,7 +37247,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37025,7 +37273,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37980,7 +38229,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38005,7 +38255,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38828,7 +39079,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38853,7 +39105,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39160,7 +39413,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39185,7 +39439,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40032,7 +40287,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40057,7 +40313,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41048,7 +41305,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41073,7 +41331,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41932,7 +42191,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41957,7 +42217,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41978,7 +42239,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42000,7 +42262,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42021,7 +42284,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42043,7 +42307,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42063,7 +42328,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42085,7 +42351,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42100,7 +42367,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42115,7 +42383,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42130,7 +42399,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42145,7 +42415,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42160,7 +42431,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42175,7 +42447,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42190,7 +42463,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42205,7 +42479,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42220,7 +42495,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42235,7 +42511,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42250,7 +42527,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42265,7 +42543,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42280,7 +42559,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42295,7 +42575,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42310,7 +42591,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42325,7 +42607,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42340,7 +42623,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42355,7 +42639,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42370,7 +42655,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42385,7 +42671,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42400,7 +42687,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42415,7 +42703,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42430,7 +42719,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42445,7 +42735,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42460,7 +42751,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42475,7 +42767,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42490,7 +42783,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42505,7 +42799,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42520,7 +42815,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42535,7 +42831,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42550,7 +42847,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42565,7 +42863,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42580,7 +42879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42595,7 +42895,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42610,7 +42911,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42625,7 +42927,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42640,7 +42943,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42655,7 +42959,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42670,7 +42975,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42685,7 +42991,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42700,7 +43007,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42715,7 +43023,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42730,7 +43039,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42745,7 +43055,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42760,7 +43071,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42775,7 +43087,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42790,7 +43103,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42805,7 +43119,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42820,7 +43135,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42835,7 +43151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42850,7 +43167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42865,7 +43183,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42880,7 +43199,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42895,7 +43215,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42910,7 +43231,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42925,7 +43247,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42940,7 +43263,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42955,7 +43279,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42970,7 +43295,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42985,7 +43311,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43000,7 +43327,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43015,7 +43343,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43030,7 +43359,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43045,7 +43375,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43060,7 +43391,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43075,7 +43407,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43090,7 +43423,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43105,7 +43439,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43120,7 +43455,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43135,7 +43471,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43150,7 +43487,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43165,7 +43503,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43180,7 +43519,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43195,7 +43535,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43210,7 +43551,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43225,7 +43567,1697 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:tag on published datasource", + "changeType": "UPSERT", + "aspectName": "tagKey", + "aspect": { + "json": { + "name": "tag on published datasource" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:252a054d4dd93cd657735aa46dd71370", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,222d1406-de0e-cd8d-0b94-9b45a0007e59)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,38130558-4194-2e2a-3046-c0d887829cb4)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,692a2da4-2a82-32c1-f713-63b8e4325d86)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f4317efd-c3e6-6ace-8fe6-e71b590bbbcc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8a6a269a-d6de-fae4-5050-513255b40ffc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c57a5574-db47-46df-677f-0b708dab14db)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e604255e-0573-3951-6db7-05bee48116c1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,20fc5eb7-81eb-aa18-8c39-af501c62d085)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b5351c1-535d-4a4a-1339-c51ddd6abf8a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b73b9dd-4ec7-75ca-f2e9-fa1984ca8b72)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,373c6466-bb0c-b319-8752-632456349261)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,53b8dc2f-8ada-51f7-7422-fe82e9b803cc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,58af9ecf-b839-da50-65e1-2e1fa20e3362)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,618b3e76-75c1-cb31-0c61-3f4890b72c31)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,721c3c41-7a2b-16a8-3281-6f948a44be96)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7ef184c1-5a41-5ec8-723e-ae44c20aa335)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7fbc77ba-0ab6-3727-0db3-d8402a804da5)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8385ea9a-0749-754f-7ad9-824433de2120)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b207c2f2-b675-32e3-2663-17bb836a018b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b679da5e-7d03-f01e-b2ea-01fb3c1926dc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c14973c2-e1c3-563a-a9c1-8a408396d22a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e70a540d-55ed-b9cc-5a3c-01ebe81a1274)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f76d3570-23b8-f74b-d85c-cc5484c2079c)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,130496dc-29ca-8a89-e32b-d73c4d8b65ff)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,8f7dd564-36b6-593f-3c6f-687ad06cd40b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,20e44c22-1ccd-301a-220c-7b6837d09a52)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,39b7a1de-6276-cfc7-9b59-1d22f3bbb06b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,5dcaaf46-e6fb-2548-e763-272a7ab2c9b1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,801c95e3-b07e-7bfe-3789-a561c7beccd3,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4644ccb1-2adc-cf26-c654-04ed1dcc7090,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,618c87db-5959-338b-bcc7-6f5f4cc0b6c6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d00f4ba6-707e-4684-20af-69eb47587cc2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,06c3e060-8133-4b58-9b53-a0fced25e056,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,3ade7817-ae27-259e-8e48-1570e7f932f6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,dfe2c02a-54b7-f7a2-39fc-c651da2f6ad8,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d8d4c0ea-3162-fa11-31e6-26675da44a38,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "urn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4fb670d5-3e19-9656-e684-74aa9729cf18,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "SubProject1" + }, + { + "id": "AbcJoinWorkbook" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity11,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity10,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity7,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.campaignstable,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.address,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.actor,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.task,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_request,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_req_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_cat_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sys_user_group,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.problem,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.incident,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.cmdb_ci,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.customer,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.payment,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.staff,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/tableau/tableau_mces_golden_deleted_stateful.json b/metadata-ingestion/tests/integration/tableau/tableau_mces_golden_deleted_stateful.json index fb9cca0b6a544..b39f7a181f01b 100644 --- a/metadata-ingestion/tests/integration/tableau/tableau_mces_golden_deleted_stateful.json +++ b/metadata-ingestion/tests/integration/tableau/tableau_mces_golden_deleted_stateful.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30,7 +31,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -45,7 +47,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -62,7 +65,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -77,7 +81,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +101,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -126,7 +133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -143,7 +151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -158,7 +167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -177,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -192,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -207,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -224,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -239,7 +253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -254,7 +269,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -269,7 +285,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -284,7 +301,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -299,7 +317,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -314,7 +333,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -329,7 +349,24 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:tag on published datasource", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": true + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -344,7 +381,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -359,7 +397,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -374,7 +413,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -389,7 +429,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -404,7 +445,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -419,7 +461,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -434,7 +477,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -449,7 +493,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -464,7 +509,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -479,7 +525,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -494,7 +541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -509,7 +557,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -524,7 +573,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -539,7 +589,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -554,7 +605,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -569,7 +621,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -584,7 +637,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -599,7 +653,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -614,7 +669,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -629,7 +685,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -644,7 +701,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -659,7 +717,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -674,7 +733,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -689,7 +749,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -704,7 +765,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -719,7 +781,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -734,7 +797,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -749,7 +813,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -764,7 +829,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -779,7 +845,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -794,7 +861,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -809,7 +877,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -824,7 +893,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -839,7 +909,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -854,7 +925,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -869,7 +941,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -884,7 +957,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -899,7 +973,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -914,7 +989,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -929,7 +1005,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -944,7 +1021,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -959,7 +1037,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -974,7 +1053,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -989,7 +1069,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1004,7 +1085,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1019,7 +1101,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1034,7 +1117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1049,7 +1133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1064,7 +1149,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1079,7 +1165,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1094,7 +1181,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1109,7 +1197,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1124,7 +1213,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1139,7 +1229,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1154,7 +1245,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1169,7 +1261,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1184,7 +1277,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1199,7 +1293,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1214,7 +1309,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1229,7 +1325,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1244,7 +1341,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1259,7 +1357,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1274,7 +1373,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1289,7 +1389,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1304,7 +1405,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1319,7 +1421,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1334,7 +1437,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1349,7 +1453,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1364,7 +1469,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1379,7 +1485,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1394,7 +1501,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1409,7 +1517,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1424,7 +1533,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1439,7 +1549,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/tableau/tableau_nested_project_mces_golden.json b/metadata-ingestion/tests/integration/tableau/tableau_nested_project_mces_golden.json index 87cba090cee77..179ea937a5ef9 100644 --- a/metadata-ingestion/tests/integration/tableau/tableau_nested_project_mces_golden.json +++ b/metadata-ingestion/tests/integration/tableau/tableau_nested_project_mces_golden.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30,7 +31,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -45,7 +47,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -62,7 +65,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -77,7 +81,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +101,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -126,7 +133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -143,7 +151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -158,7 +167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -177,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -192,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -207,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -224,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -239,7 +253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -258,7 +273,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -273,7 +289,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -288,7 +305,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -305,7 +323,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -320,7 +339,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -340,7 +360,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -361,7 +382,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -376,7 +398,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -391,7 +414,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -408,7 +432,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -432,7 +457,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -447,7 +473,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -467,7 +494,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -488,7 +516,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -503,7 +532,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -518,7 +548,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -535,7 +566,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -559,7 +591,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -578,7 +611,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -593,7 +627,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -613,7 +648,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -634,7 +670,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -649,7 +686,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -664,7 +702,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -681,7 +720,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -705,7 +745,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -720,7 +761,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -740,7 +782,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -761,7 +804,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -776,7 +820,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -791,7 +836,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -808,7 +854,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,7 +879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -847,7 +895,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -867,7 +916,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -888,7 +938,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -903,7 +954,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -918,7 +970,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -935,7 +988,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -959,7 +1013,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -974,7 +1029,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -998,7 +1054,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1018,7 +1075,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1082,7 +1140,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1097,7 +1156,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1269,7 +1329,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1293,7 +1354,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1355,7 +1417,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1370,7 +1433,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1828,7 +1892,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1852,7 +1917,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1914,7 +1980,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1929,7 +1996,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2439,7 +2507,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2463,7 +2532,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2525,7 +2595,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2540,7 +2611,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2946,7 +3018,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2970,7 +3043,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3032,7 +3106,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3047,7 +3122,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3089,7 +3165,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3113,7 +3190,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3175,7 +3253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3190,7 +3269,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3400,7 +3480,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3424,7 +3505,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3495,7 +3577,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3510,7 +3593,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3595,7 +3679,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3619,7 +3704,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3684,7 +3770,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3699,7 +3786,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4094,7 +4182,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4118,7 +4207,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4183,7 +4273,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4198,7 +4289,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4561,7 +4653,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4585,7 +4678,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4650,7 +4744,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4665,7 +4760,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5060,7 +5156,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5084,7 +5181,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5146,7 +5244,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5161,7 +5260,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5440,7 +5540,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5464,7 +5565,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5529,7 +5631,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5544,7 +5647,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5800,7 +5904,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5824,7 +5929,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5886,7 +5992,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5901,7 +6008,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6183,7 +6291,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6207,7 +6316,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6272,7 +6382,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6287,7 +6398,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6621,7 +6733,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6645,7 +6758,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6710,7 +6824,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6725,7 +6840,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7033,7 +7149,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7057,7 +7174,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7119,7 +7237,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7134,7 +7253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7361,7 +7481,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7385,7 +7506,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7450,7 +7572,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7465,7 +7588,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7721,7 +7845,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7745,7 +7870,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7807,7 +7933,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7822,7 +7949,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7971,7 +8099,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7995,7 +8124,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8060,7 +8190,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8075,7 +8206,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8409,7 +8541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8433,7 +8566,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8498,7 +8632,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8513,7 +8648,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8743,7 +8879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8767,7 +8904,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8832,7 +8970,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8847,7 +8986,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9103,7 +9243,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9127,7 +9268,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9192,7 +9334,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9207,7 +9350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9489,7 +9633,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9513,7 +9658,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9575,7 +9721,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9590,7 +9737,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9927,7 +10075,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9951,7 +10100,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10013,7 +10163,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10028,7 +10179,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10113,7 +10265,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10137,7 +10290,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10157,7 +10311,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10223,7 +10378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10238,7 +10394,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10262,7 +10419,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10323,7 +10481,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10338,7 +10497,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10362,7 +10522,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10421,7 +10582,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10436,7 +10598,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10460,7 +10623,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10533,7 +10697,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10548,7 +10713,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10572,7 +10738,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10915,7 +11082,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12705,7 +12873,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12722,7 +12891,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12737,7 +12907,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12761,7 +12932,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12777,7 +12949,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", "type": "TRANSFORMED" }, { @@ -12785,7 +12957,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", "type": "TRANSFORMED" } ], @@ -12861,7 +13033,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13103,7 +13276,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13120,7 +13294,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13135,7 +13310,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13159,7 +13335,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13191,7 +13368,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13605,7 +13783,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13622,7 +13801,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13637,7 +13817,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13661,7 +13842,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14095,7 +14277,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14844,7 +15027,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14861,7 +15045,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14876,7 +15061,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14900,7 +15086,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -15045,7 +15232,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21662,7 +21850,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21679,7 +21868,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21694,7 +21884,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21718,7 +21909,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21906,7 +22098,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25809,7 +26002,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25826,7 +26020,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25841,7 +26036,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25865,7 +26061,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -26015,7 +26212,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31180,7 +31378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31197,7 +31396,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31212,7 +31412,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31236,7 +31437,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31358,7 +31560,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31587,7 +31790,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31604,7 +31808,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31619,7 +31824,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31643,7 +31849,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31746,7 +31953,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31760,469 +31968,18 @@ } }, { - "com.linkedin.pegasus2avro.common.BrowsePaths": { - "paths": [ - "/prod/tableau/default" - ] - } - }, - { - "com.linkedin.pegasus2avro.common.Ownership": { - "owners": [ - { - "owner": "urn:li:corpuser:jawadqu@gmail.com", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": {}, - "name": "test publish datasource", - "description": "description for test publish datasource", - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "test", - "platform": "urn:li:dataPlatform:tableau", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "" - } - }, - "fields": [ - { - "fieldPath": "payment_date", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "DATETIME", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:YEAR" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "staff_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_id", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "INTEGER", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "amount", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "REAL", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "Published SQL Query", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": {} - } - }, - "nativeDataType": "TABLE", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, + "com.linkedin.pegasus2avro.common.GlobalTags": { + "tags": [ { - "fieldPath": "staff_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false + "tag": "urn:li:tag:tag on published datasource" } ] } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Published Data Source" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", - "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "changeType": "UPSERT", - "aspectName": "upstreamLineage", - "aspect": { - "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", - "type": "TRANSFORMED" - } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" - ], - "transformOperation": "GroupField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" - ], - "transformOperation": "BinField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" - ], - "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" - ], - "transformOperation": "SetField", - "confidenceScore": 1.0 - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.DataPlatformInstance": { - "platform": "urn:li:dataPlatform:tableau" - } }, { "com.linkedin.pegasus2avro.common.BrowsePaths": { "paths": [ - "/prod/tableau/Samples" + "/prod/tableau/default" ] } }, @@ -32243,8 +32000,473 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": {}, - "name": "Superstore Datasource", - "description": "Description for Superstore dataset", + "name": "test publish datasource", + "description": "description for test publish datasource", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "test", + "platform": "urn:li:dataPlatform:tableau", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "payment_date", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "DATETIME", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:YEAR" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INTEGER", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "amount", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "REAL", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "Published SQL Query", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": {} + } + }, + "nativeDataType": "TABLE", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Published Data Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "type": "TRANSFORMED" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" + ], + "transformOperation": "GroupField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" + ], + "transformOperation": "BinField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" + ], + "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" + ], + "transformOperation": "SetField", + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.DataPlatformInstance": { + "platform": "urn:li:dataPlatform:tableau" + } + }, + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/prod/tableau/Samples" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:jawadqu@gmail.com", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": {}, + "name": "Superstore Datasource", + "description": "Description for Superstore dataset", "tags": [] } }, @@ -33085,7 +33307,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33102,7 +33325,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33117,7 +33341,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33137,7 +33362,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33152,7 +33378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33184,7 +33411,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33318,7 +33546,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33336,7 +33565,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33360,7 +33590,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33400,7 +33631,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33546,7 +33778,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33564,7 +33797,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33586,7 +33820,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33665,7 +33900,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33683,7 +33919,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33846,7 +34083,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33871,7 +34109,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34106,7 +34345,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34131,7 +34371,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34342,7 +34583,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34367,7 +34609,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34530,7 +34773,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34555,7 +34799,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34706,7 +34951,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34731,7 +34977,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34870,7 +35117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34895,7 +35143,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34986,7 +35235,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35011,7 +35261,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35078,7 +35329,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35100,7 +35352,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35167,7 +35420,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35189,7 +35443,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35484,7 +35739,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35506,7 +35762,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36295,7 +36552,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36320,7 +36578,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37203,7 +37462,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37228,7 +37488,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38183,7 +38444,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38208,7 +38470,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39031,7 +39294,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39056,7 +39320,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39363,7 +39628,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39388,7 +39654,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40235,7 +40502,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40260,7 +40528,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41251,7 +41520,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41276,7 +41546,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42135,7 +42406,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42160,7 +42432,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42181,7 +42454,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42203,7 +42477,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42224,7 +42499,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42246,7 +42522,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42266,7 +42543,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42288,7 +42566,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42303,7 +42582,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42318,7 +42598,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42333,7 +42614,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42348,7 +42630,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42363,7 +42646,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42378,7 +42662,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42393,7 +42678,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42408,7 +42694,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42423,7 +42710,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42438,7 +42726,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42453,7 +42742,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42468,7 +42758,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42483,7 +42774,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42498,7 +42790,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42513,7 +42806,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42528,7 +42822,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42543,7 +42838,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42558,7 +42854,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42573,7 +42870,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42588,7 +42886,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42603,7 +42902,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42618,7 +42918,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42633,7 +42934,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42648,7 +42950,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42663,7 +42966,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42678,7 +42982,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42693,7 +42998,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42708,7 +43014,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42723,7 +43030,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42738,7 +43046,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42753,7 +43062,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42768,7 +43078,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42783,7 +43094,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42798,7 +43110,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42813,7 +43126,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42828,7 +43142,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42843,7 +43158,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42858,7 +43174,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42873,7 +43190,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42888,7 +43206,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42903,7 +43222,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42918,7 +43238,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42933,7 +43254,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42948,7 +43270,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42963,7 +43286,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42978,7 +43302,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42993,7 +43318,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43008,7 +43334,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43023,7 +43350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43038,7 +43366,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43053,7 +43382,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43068,7 +43398,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43083,7 +43414,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43098,7 +43430,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43113,7 +43446,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43128,7 +43462,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43143,7 +43478,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43158,7 +43494,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43173,7 +43510,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43188,7 +43526,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43203,7 +43542,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43218,7 +43558,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43233,7 +43574,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43248,7 +43590,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43263,7 +43606,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43278,7 +43622,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43293,7 +43638,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43308,7 +43654,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43323,7 +43670,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43338,7 +43686,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43353,7 +43702,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43368,7 +43718,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43383,7 +43734,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43398,7 +43750,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43413,7 +43766,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43428,7 +43782,1720 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:tag on published datasource", + "changeType": "UPSERT", + "aspectName": "tagKey", + "aspect": { + "json": { + "name": "tag on published datasource" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:252a054d4dd93cd657735aa46dd71370", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:beaddce9d1e89ab503ae6408fb77d4ce", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:595877512935338b94eac9e06cf20607", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:beaddce9d1e89ab503ae6408fb77d4ce", + "urn": "urn:li:container:beaddce9d1e89ab503ae6408fb77d4ce" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,222d1406-de0e-cd8d-0b94-9b45a0007e59)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,38130558-4194-2e2a-3046-c0d887829cb4)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,692a2da4-2a82-32c1-f713-63b8e4325d86)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f4317efd-c3e6-6ace-8fe6-e71b590bbbcc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8a6a269a-d6de-fae4-5050-513255b40ffc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c57a5574-db47-46df-677f-0b708dab14db)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e604255e-0573-3951-6db7-05bee48116c1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,20fc5eb7-81eb-aa18-8c39-af501c62d085)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b5351c1-535d-4a4a-1339-c51ddd6abf8a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b73b9dd-4ec7-75ca-f2e9-fa1984ca8b72)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,373c6466-bb0c-b319-8752-632456349261)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,53b8dc2f-8ada-51f7-7422-fe82e9b803cc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,58af9ecf-b839-da50-65e1-2e1fa20e3362)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,618b3e76-75c1-cb31-0c61-3f4890b72c31)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,721c3c41-7a2b-16a8-3281-6f948a44be96)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7ef184c1-5a41-5ec8-723e-ae44c20aa335)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7fbc77ba-0ab6-3727-0db3-d8402a804da5)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8385ea9a-0749-754f-7ad9-824433de2120)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b207c2f2-b675-32e3-2663-17bb836a018b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b679da5e-7d03-f01e-b2ea-01fb3c1926dc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c14973c2-e1c3-563a-a9c1-8a408396d22a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e70a540d-55ed-b9cc-5a3c-01ebe81a1274)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f76d3570-23b8-f74b-d85c-cc5484c2079c)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,130496dc-29ca-8a89-e32b-d73c4d8b65ff)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,8f7dd564-36b6-593f-3c6f-687ad06cd40b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,20e44c22-1ccd-301a-220c-7b6837d09a52)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,39b7a1de-6276-cfc7-9b59-1d22f3bbb06b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,5dcaaf46-e6fb-2548-e763-272a7ab2c9b1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,801c95e3-b07e-7bfe-3789-a561c7beccd3,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4644ccb1-2adc-cf26-c654-04ed1dcc7090,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,618c87db-5959-338b-bcc7-6f5f4cc0b6c6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d00f4ba6-707e-4684-20af-69eb47587cc2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,06c3e060-8133-4b58-9b53-a0fced25e056,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,3ade7817-ae27-259e-8e48-1570e7f932f6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,dfe2c02a-54b7-f7a2-39fc-c651da2f6ad8,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d8d4c0ea-3162-fa11-31e6-26675da44a38,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "urn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4fb670d5-3e19-9656-e684-74aa9729cf18,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity11,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity10,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity7,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.campaignstable,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.address,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.actor,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.task,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_request,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_req_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_cat_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sys_user_group,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.problem,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.incident,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.cmdb_ci,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.customer,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.payment,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.staff,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/tableau/tableau_signout_timeout_mces_golden.json b/metadata-ingestion/tests/integration/tableau/tableau_signout_timeout_mces_golden.json index 77ea484abfa0e..9d0e8a14a1f8c 100644 --- a/metadata-ingestion/tests/integration/tableau/tableau_signout_timeout_mces_golden.json +++ b/metadata-ingestion/tests/integration/tableau/tableau_signout_timeout_mces_golden.json @@ -15,7 +15,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30,7 +31,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -45,7 +47,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -62,7 +65,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -77,7 +81,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -96,7 +101,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -126,7 +133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -143,7 +151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -158,7 +167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -177,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -192,7 +203,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -207,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -224,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -239,7 +253,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -260,7 +275,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -275,7 +291,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -290,7 +307,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -307,7 +325,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -331,7 +350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -346,7 +366,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -366,7 +387,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -387,7 +409,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -402,7 +425,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -417,7 +441,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -434,7 +459,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +484,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -477,7 +504,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -492,7 +520,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,7 +541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -533,7 +563,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -548,7 +579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -563,7 +595,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -580,7 +613,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -604,7 +638,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -619,7 +654,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -639,7 +675,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -660,7 +697,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -675,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -690,7 +729,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -707,7 +747,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -731,7 +772,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -746,7 +788,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -766,7 +809,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -786,7 +830,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -850,7 +895,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -865,7 +911,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1037,7 +1084,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1061,7 +1109,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1123,7 +1172,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1138,7 +1188,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1596,7 +1647,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1620,7 +1672,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1682,7 +1735,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1697,7 +1751,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2207,7 +2262,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2231,7 +2287,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2293,7 +2350,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2308,7 +2366,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2714,7 +2773,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2738,7 +2798,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2800,7 +2861,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2815,7 +2877,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2857,7 +2920,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2881,7 +2945,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2943,7 +3008,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -2958,7 +3024,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3168,7 +3235,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3192,7 +3260,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3263,7 +3332,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3278,7 +3348,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3363,7 +3434,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3387,7 +3459,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3452,7 +3525,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3467,7 +3541,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3862,7 +3937,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3886,7 +3962,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3951,7 +4028,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -3966,7 +4044,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4329,7 +4408,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4353,7 +4433,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4418,7 +4499,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4433,7 +4515,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4828,7 +4911,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4852,7 +4936,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4914,7 +4999,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -4929,7 +5015,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5208,7 +5295,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5232,7 +5320,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5297,7 +5386,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5312,7 +5402,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5568,7 +5659,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5592,7 +5684,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5654,7 +5747,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5669,7 +5763,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5951,7 +6046,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -5975,7 +6071,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6040,7 +6137,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6055,7 +6153,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6389,7 +6488,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6413,7 +6513,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6478,7 +6579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6493,7 +6595,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6801,7 +6904,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6825,7 +6929,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6887,7 +6992,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -6902,7 +7008,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7129,7 +7236,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7153,7 +7261,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7218,7 +7327,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7233,7 +7343,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7489,7 +7600,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7513,7 +7625,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7575,7 +7688,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7590,7 +7704,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7739,7 +7854,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7763,7 +7879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7828,7 +7945,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -7843,7 +7961,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8177,7 +8296,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8201,7 +8321,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8266,7 +8387,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8281,7 +8403,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8511,7 +8634,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8535,7 +8659,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8600,7 +8725,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8615,7 +8741,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8871,7 +8998,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8895,7 +9023,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8960,7 +9089,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -8975,7 +9105,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9257,7 +9388,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9281,7 +9413,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9343,7 +9476,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9358,7 +9492,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9695,7 +9830,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9719,7 +9855,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9781,7 +9918,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9796,7 +9934,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9881,7 +10020,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9905,7 +10045,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9925,7 +10066,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -9991,7 +10133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10006,7 +10149,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10030,7 +10174,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10091,7 +10236,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10106,7 +10252,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10130,7 +10277,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10189,7 +10337,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10204,7 +10353,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10228,7 +10378,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10301,7 +10452,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10316,7 +10468,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10340,7 +10493,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -10683,7 +10837,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12473,7 +12628,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12490,7 +12646,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12505,7 +12662,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12529,7 +12687,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12545,7 +12704,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", "type": "TRANSFORMED" }, { @@ -12553,7 +12712,7 @@ "time": 0, "actor": "urn:li:corpuser:unknown" }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", "type": "TRANSFORMED" } ], @@ -12629,7 +12788,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12871,7 +13031,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12888,7 +13049,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12903,7 +13065,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12927,7 +13090,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -12959,7 +13123,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13373,7 +13538,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13390,7 +13556,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13405,7 +13572,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13429,7 +13597,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -13863,7 +14032,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14612,7 +14782,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14629,7 +14800,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14644,7 +14816,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14668,7 +14841,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -14813,7 +14987,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21430,7 +21605,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21447,7 +21623,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21462,7 +21639,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21486,7 +21664,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -21674,7 +21853,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25577,7 +25757,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25594,7 +25775,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25609,7 +25791,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25633,7 +25816,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -25783,7 +25967,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30948,7 +31133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30965,7 +31151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -30980,7 +31167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31004,7 +31192,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31126,7 +31315,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31355,7 +31545,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31372,7 +31563,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31387,7 +31579,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31411,7 +31604,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31514,7 +31708,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31528,469 +31723,18 @@ } }, { - "com.linkedin.pegasus2avro.common.BrowsePaths": { - "paths": [ - "/prod/tableau/default" - ] - } - }, - { - "com.linkedin.pegasus2avro.common.Ownership": { - "owners": [ - { - "owner": "urn:li:corpuser:jawadqu@gmail.com", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": {}, - "name": "test publish datasource", - "description": "description for test publish datasource", - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "test", - "platform": "urn:li:dataPlatform:tableau", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "" - } - }, - "fields": [ - { - "fieldPath": "payment_date", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "DATETIME", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:YEAR" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "staff_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_id", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "INTEGER", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "amount", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "REAL", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:SUM" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "Published SQL Query", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": {} - } - }, - "nativeDataType": "TABLE", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:MEASURE" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, - { - "fieldPath": "customer_first_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false - }, + "com.linkedin.pegasus2avro.common.GlobalTags": { + "tags": [ { - "fieldPath": "staff_last_name", - "nullable": false, - "description": "", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "STRING", - "recursive": false, - "globalTags": { - "tags": [ - { - "tag": "urn:li:tag:DIMENSION" - }, - { - "tag": "urn:li:tag:COLUMNFIELD" - }, - { - "tag": "urn:li:tag:COUNT" - } - ] - }, - "isPartOfKey": false + "tag": "urn:li:tag:tag on published datasource" } ] } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Published Data Source" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", - "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "changeType": "UPSERT", - "aspectName": "upstreamLineage", - "aspect": { - "json": { - "upstreams": [ - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", - "type": "TRANSFORMED" - }, - { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", - "type": "TRANSFORMED" - } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" - ], - "transformOperation": "GroupField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" - ], - "transformOperation": "HierarchyField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" - ], - "transformOperation": "BinField", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" - ], - "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" - ], - "transformOperation": "SetField", - "confidenceScore": 1.0 - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "tableau-test" - } -}, -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.DataPlatformInstance": { - "platform": "urn:li:dataPlatform:tableau" - } }, { "com.linkedin.pegasus2avro.common.BrowsePaths": { "paths": [ - "/prod/tableau/Samples" + "/prod/tableau/default" ] } }, @@ -32011,8 +31755,473 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": {}, - "name": "Superstore Datasource", - "description": "Description for Superstore dataset", + "name": "test publish datasource", + "description": "description for test publish datasource", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "test", + "platform": "urn:li:dataPlatform:tableau", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "payment_date", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "DATETIME", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:YEAR" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "INTEGER", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "amount", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "REAL", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:SUM" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "Published SQL Query", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.ArrayType": {} + } + }, + "nativeDataType": "TABLE", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:MEASURE" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "customer_first_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "staff_last_name", + "nullable": false, + "description": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:DIMENSION" + }, + { + "tag": "urn:li:tag:COLUMNFIELD" + }, + { + "tag": "urn:li:tag:COUNT" + } + ] + }, + "isPartOfKey": false + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Published Data Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "type": "TRANSFORMED" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "type": "TRANSFORMED" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),City)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Postal Code)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Country/Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Region)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),State)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Location)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)" + ], + "transformOperation": "GroupField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sub-Category)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Manufacturer)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product Name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Product)" + ], + "transformOperation": "HierarchyField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit %28bin%29)" + ], + "transformOperation": "BinField", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Sales)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit Ratio)" + ], + "transformOperation": "CalculatedFieldformula: SUM([Profit])/SUM([Sales])", + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD),Segment)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Segment)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Profit)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD),Top Customers by Profit)" + ], + "transformOperation": "SetField", + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.DataPlatformInstance": { + "platform": "urn:li:dataPlatform:tableau" + } + }, + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/prod/tableau/Samples" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:jawadqu@gmail.com", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": {}, + "name": "Superstore Datasource", + "description": "Description for Superstore dataset", "tags": [] } }, @@ -32853,7 +33062,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32870,7 +33080,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32885,7 +33096,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32905,7 +33117,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32920,7 +33133,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -32952,7 +33166,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33086,7 +33301,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33104,7 +33320,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33128,7 +33345,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33168,7 +33386,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33314,7 +33533,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33332,7 +33552,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33354,7 +33575,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33440,7 +33662,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33458,7 +33681,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33480,7 +33704,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33643,7 +33868,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33668,7 +33894,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33903,7 +34130,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -33928,7 +34156,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34139,7 +34368,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34164,7 +34394,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34327,7 +34558,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34352,7 +34584,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34503,7 +34736,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34528,7 +34762,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34667,7 +34902,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34692,7 +34928,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34783,7 +35020,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34808,7 +35046,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34875,7 +35114,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34897,7 +35137,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34964,7 +35205,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -34986,7 +35228,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35281,7 +35524,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -35303,7 +35547,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36092,7 +36337,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -36117,7 +36363,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37000,7 +37247,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37025,7 +37273,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -37980,7 +38229,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38005,7 +38255,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38828,7 +39079,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -38853,7 +39105,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39160,7 +39413,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -39185,7 +39439,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40032,7 +40287,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -40057,7 +40313,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41048,7 +41305,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41073,7 +41331,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41932,7 +42191,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41957,7 +42217,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -41978,7 +42239,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42000,7 +42262,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42021,7 +42284,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42043,7 +42307,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42063,7 +42328,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42085,7 +42351,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42100,7 +42367,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42115,7 +42383,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42130,7 +42399,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42145,7 +42415,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42160,7 +42431,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42175,7 +42447,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42190,7 +42463,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42205,7 +42479,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42220,7 +42495,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42235,7 +42511,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42250,7 +42527,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42265,7 +42543,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42280,7 +42559,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42295,7 +42575,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42310,7 +42591,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42325,7 +42607,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42340,7 +42623,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42355,7 +42639,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42370,7 +42655,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42385,7 +42671,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42400,7 +42687,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42415,7 +42703,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42430,7 +42719,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42445,7 +42735,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42460,7 +42751,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42475,7 +42767,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42490,7 +42783,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42505,7 +42799,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42520,7 +42815,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42535,7 +42831,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42550,7 +42847,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42565,7 +42863,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42580,7 +42879,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42595,7 +42895,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42610,7 +42911,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42625,7 +42927,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42640,7 +42943,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42655,7 +42959,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42670,7 +42975,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42685,7 +42991,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42700,7 +43007,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42715,7 +43023,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42730,7 +43039,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42745,7 +43055,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42760,7 +43071,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42775,7 +43087,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42790,7 +43103,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42805,7 +43119,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42820,7 +43135,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42835,7 +43151,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42850,7 +43167,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42865,7 +43183,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42880,7 +43199,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42895,7 +43215,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42910,7 +43231,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42925,7 +43247,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42940,7 +43263,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42955,7 +43279,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42970,7 +43295,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -42985,7 +43311,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43000,7 +43327,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43015,7 +43343,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43030,7 +43359,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43045,7 +43375,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43060,7 +43391,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43075,7 +43407,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43090,7 +43423,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43105,7 +43439,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43120,7 +43455,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43135,7 +43471,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43150,7 +43487,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43165,7 +43503,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43180,7 +43519,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43195,7 +43535,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43210,7 +43551,8 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } }, { @@ -43225,7 +43567,1697 @@ }, "systemMetadata": { "lastObserved": 1638860400000, - "runId": "tableau-test" + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:tag on published datasource", + "changeType": "UPSERT", + "aspectName": "tagKey", + "aspect": { + "json": { + "name": "tag on published datasource" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:252a054d4dd93cd657735aa46dd71370", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,222d1406-de0e-cd8d-0b94-9b45a0007e59)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,38130558-4194-2e2a-3046-c0d887829cb4)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,692a2da4-2a82-32c1-f713-63b8e4325d86)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f4317efd-c3e6-6ace-8fe6-e71b590bbbcc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8a6a269a-d6de-fae4-5050-513255b40ffc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c57a5574-db47-46df-677f-0b708dab14db)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e604255e-0573-3951-6db7-05bee48116c1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,20fc5eb7-81eb-aa18-8c39-af501c62d085)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b5351c1-535d-4a4a-1339-c51ddd6abf8a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,2b73b9dd-4ec7-75ca-f2e9-fa1984ca8b72)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,373c6466-bb0c-b319-8752-632456349261)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,53b8dc2f-8ada-51f7-7422-fe82e9b803cc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,58af9ecf-b839-da50-65e1-2e1fa20e3362)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,618b3e76-75c1-cb31-0c61-3f4890b72c31)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,721c3c41-7a2b-16a8-3281-6f948a44be96)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7ef184c1-5a41-5ec8-723e-ae44c20aa335)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,7fbc77ba-0ab6-3727-0db3-d8402a804da5)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,8385ea9a-0749-754f-7ad9-824433de2120)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b207c2f2-b675-32e3-2663-17bb836a018b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,b679da5e-7d03-f01e-b2ea-01fb3c1926dc)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,c14973c2-e1c3-563a-a9c1-8a408396d22a)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,e70a540d-55ed-b9cc-5a3c-01ebe81a1274)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,f76d3570-23b8-f74b-d85c-cc5484c2079c)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(tableau,130496dc-29ca-8a89-e32b-d73c4d8b65ff)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,8f7dd564-36b6-593f-3c6f-687ad06cd40b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,20e44c22-1ccd-301a-220c-7b6837d09a52)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,39b7a1de-6276-cfc7-9b59-1d22f3bbb06b)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(tableau,5dcaaf46-e6fb-2548-e763-272a7ab2c9b1)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,801c95e3-b07e-7bfe-3789-a561c7beccd3,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a", + "urn": "urn:li:container:008e111aa1d250dd52e0fd5d4b307b1a" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4644ccb1-2adc-cf26-c654-04ed1dcc7090,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,618c87db-5959-338b-bcc7-6f5f4cc0b6c6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d00f4ba6-707e-4684-20af-69eb47587cc2,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,06c3e060-8133-4b58-9b53-a0fced25e056,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,3ade7817-ae27-259e-8e48-1570e7f932f6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,dfe2c02a-54b7-f7a2-39fc-c651da2f6ad8,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d", + "urn": "urn:li:container:047691e9c16bec8fb08e1df0f5d71c4d" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,d8d4c0ea-3162-fa11-31e6-26675da44a38,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9", + "urn": "urn:li:container:94e6e84b66f9ee8c70c22f06cfbad6a9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,00cce29f-b561-bb41-3557-8e19660bb5dd,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,6cbbeeb2-9f3a-00f6-2342-17139d6e97ae,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873", + "urn": "urn:li:container:d2dcd6bd1bb954d62f1cfc68332ee873" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,22b0b4c3-6b85-713d-a161-5a87fdd78f40,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b", + "urn": "urn:li:container:5ec314b9630974ec084f5dfd3849f87b" + }, + { + "id": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1", + "urn": "urn:li:container:fad3de4b86519c3edeb685215fe0bab1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,4fb670d5-3e19-9656-e684-74aa9729cf18,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:tableau,10c6297d-0dbd-44f1-b1ba-458bea446513,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "SubProject1" + }, + { + "id": "AbcJoinWorkbook" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity6,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity11,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity10,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.activity7,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:marketo-marketo,marketo.campaignstable,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Email Performance by Campaign" + }, + { + "id": "Marketo" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.address,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.actor,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Dvdrental Workbook" + }, + { + "id": "actor+ (dvdrental)" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.people,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.returns,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:external,sample - superstore%2C %28new%29.xls.orders,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Samples" + }, + { + "id": "Superstore Datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.task,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_request,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_req_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sc_cat_item,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Requests" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.sys_user_group,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.problem,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Problems" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.incident,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:webdata-direct:servicenowitsm-servicenowitsm,ven01911.cmdb_ci,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Executive Dashboard" + }, + { + "id": "Incidents" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.customer,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.payment,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "Customer Payment Query" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,demo_postgres_instance.dvdrental.public.staff,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "default" + }, + { + "id": "test publish datasource" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/tableau/tableau_with_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/tableau/tableau_with_platform_instance_mces_golden.json index f94cafb107317..f3dedb2cc1ce3 100644 --- a/metadata-ingestion/tests/integration/tableau/tableau_with_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/tableau/tableau_with_platform_instance_mces_golden.json @@ -31855,6 +31855,15 @@ "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:tableau,acryl_site1)" } }, + { + "com.linkedin.pegasus2avro.common.GlobalTags": { + "tags": [ + { + "tag": "urn:li:tag:tag on published datasource" + } + ] + } + }, { "com.linkedin.pegasus2avro.common.BrowsePaths": { "paths": [ @@ -43858,6 +43867,22 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:tag on published datasource", + "changeType": "UPSERT", + "aspectName": "tagKey", + "aspect": { + "json": { + "name": "tag on published datasource" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "tableau-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "container", "entityUrn": "urn:li:container:66fa1e14620418276c85f3b552c7ec65", From 8559ebae200faafd84ccd35e5943590fc0de9b2a Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Tue, 30 Jan 2024 12:49:07 +0530 Subject: [PATCH 458/792] feat(ingest/okta): option to only ingest users from filtered groups (#9738) --- .../src/datahub/ingestion/source/identity/okta.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/identity/okta.py b/metadata-ingestion/src/datahub/ingestion/source/identity/okta.py index 5e8413bbb6f30..c9b0e4d7de467 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/identity/okta.py +++ b/metadata-ingestion/src/datahub/ingestion/source/identity/okta.py @@ -140,6 +140,10 @@ class OktaConfig(StatefulIngestionConfigBase, ConfigModel): default=None, description="Okta search expression (not regex) for ingesting groups. Only one of `okta_groups_filter` and `okta_groups_search` can be set. See (https://developer.okta.com/docs/reference/api/groups/#list-groups-with-search) for more info.", ) + skip_users_without_a_group: bool = Field( + default=False, + description="Whether to only ingest users that are members of groups. If this is set to False, all users will be ingested regardless of group membership.", + ) # Configuration for stateful ingestion stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = Field( @@ -387,6 +391,15 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: datahub_corp_user_snapshot.urn ] ) + if ( + self.config.skip_users_without_a_group + and len(datahub_group_membership.groups) == 0 + ): + logger.debug( + f"Filtering {datahub_corp_user_snapshot.urn} due to group filter" + ) + self.report.report_filtered(datahub_corp_user_snapshot.urn) + continue assert datahub_group_membership is not None datahub_corp_user_snapshot.aspects.append(datahub_group_membership) mce = MetadataChangeEvent(proposedSnapshot=datahub_corp_user_snapshot) From aa98f4897a358b6894d22bdfa9f8d4fea4538801 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Tue, 30 Jan 2024 17:54:09 +0530 Subject: [PATCH 459/792] feat(cli): add sibling helper CLI command (#9744) --- .../src/datahub/cli/specific/dataset_cli.py | 40 ++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py b/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py index c702d0ec28961..1c55651f4ff94 100644 --- a/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py +++ b/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py @@ -1,12 +1,15 @@ import json import logging from pathlib import Path +from typing import Set, Tuple import click from click_default_group import DefaultGroup from datahub.api.entities.dataset.dataset import Dataset -from datahub.ingestion.graph.client import get_default_graph +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph +from datahub.metadata.com.linkedin.pegasus2avro.common import Siblings from datahub.telemetry import telemetry from datahub.upgrade import upgrade @@ -65,3 +68,38 @@ def get(urn: str, to_file: str) -> None: click.secho(f"Dataset yaml written to {to_file}", fg="green") else: click.secho(f"Dataset {urn} does not exist") + + +@dataset.command() +@click.option("--urn", required=True, type=str, help="URN of primary sibling") +@click.option( + "--sibling-urns", + required=True, + type=str, + help="URN of secondary sibling(s)", + multiple=True, +) +@telemetry.with_telemetry() +def add_sibling(urn: str, sibling_urns: Tuple[str]) -> None: + all_urns = set() + all_urns.add(urn) + for sibling_urn in sibling_urns: + all_urns.add(sibling_urn) + with get_default_graph() as graph: + for _urn in all_urns: + _emit_sibling(graph, urn, _urn, all_urns) + + +def _emit_sibling( + graph: DataHubGraph, primary_urn: str, urn: str, all_urns: Set[str] +) -> None: + siblings = [] + for sibling_urn in all_urns: + if sibling_urn != urn: + siblings.append(sibling_urn) + graph.emit( + MetadataChangeProposalWrapper( + entityUrn=urn, + aspect=Siblings(primary=primary_urn == urn, siblings=sorted(siblings)), + ) + ) From 1af9b873266cd0cf25b817688188fcd0ec887e73 Mon Sep 17 00:00:00 2001 From: Davi Arnaut Date: Tue, 30 Jan 2024 04:16:31 -1000 Subject: [PATCH 460/792] chore(gms): allow adjusting the log level for gms debug log file (#9743) --- metadata-service/war/src/main/resources/logback.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-service/war/src/main/resources/logback.xml b/metadata-service/war/src/main/resources/logback.xml index 111ec627a6d43..2594f8c48ab53 100644 --- a/metadata-service/war/src/main/resources/logback.xml +++ b/metadata-service/war/src/main/resources/logback.xml @@ -47,7 +47,7 @@ 1 - DEBUG + ${logging.appender.debug_file.level:-DEBUG} ACCEPT DENY From 69d0ba181223c16ee2800bf093236d1acde5395b Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 30 Jan 2024 08:19:06 -0600 Subject: [PATCH 461/792] build(ingestion-smoke): fix smoke dockerfile (#9724) --- .../datahub-ingestion-base/smoke.Dockerfile | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/docker/datahub-ingestion-base/smoke.Dockerfile b/docker/datahub-ingestion-base/smoke.Dockerfile index 5c6738720e05e..34654faaad729 100644 --- a/docker/datahub-ingestion-base/smoke.Dockerfile +++ b/docker/datahub-ingestion-base/smoke.Dockerfile @@ -1,6 +1,6 @@ FROM acryldata/datahub-ingestion-base as base -RUN apt-get update && apt-get install -y \ +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ sudo \ python3-dev \ libgtk2.0-0 \ @@ -13,14 +13,16 @@ RUN apt-get update && apt-get install -y \ libasound2 \ libxtst6 \ xauth \ - xvfb - -RUN DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-17-jdk + xvfb \ + openjdk-17-jdk && \ + rm -rf /var/lib/apt/lists/* /var/cache/apk/* COPY . /datahub-src ARG RELEASE_VERSION -RUN cd /datahub-src/metadata-ingestion && \ - sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ - cat src/datahub/__init__.py && \ - cd ../ && \ - ./gradlew :metadata-ingestion:installAll +RUN cd /datahub-src && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" metadata-ingestion/src/datahub/__init__.py && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/__init__.py && \ + cat metadata-ingestion/src/datahub/__init__.py && \ + ./gradlew :metadata-ingestion:codegen && \ + pip install file:metadata-ingestion-modules/airflow-plugin#egg=acryl-datahub-airflow-plugin file:metadata-ingestion#egg=acryl-datahub + From 3f9490d9018d4505eac413f92a9301cfca5c0ae7 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 31 Jan 2024 14:42:40 +0530 Subject: [PATCH 462/792] feat(cli): option to init via username password, add lint for smoke-test (#9675) Co-authored-by: Harshal Sheth --- .github/workflows/docker-unified.yml | 12 + docs/cli.md | 2 +- .../src/datahub/cli/cli_utils.py | 207 ++++++------ .../src/datahub/cli/config_utils.py | 102 ++++++ .../src/datahub/cli/docker_cli.py | 2 +- .../src/datahub/cli/env_utils.py | 11 + .../src/datahub/cli/ingest_cli.py | 19 +- .../src/datahub/cli/lite_cli.py | 2 +- .../src/datahub/emitter/mcp_patch_builder.py | 5 +- metadata-ingestion/src/datahub/entrypoints.py | 36 +- .../integrations/great_expectations/action.py | 2 +- .../src/datahub/specific/chart.py | 2 +- .../src/datahub/specific/dashboard.py | 2 +- .../src/datahub/specific/datajob.py | 2 +- .../src/datahub/specific/dataproduct.py | 1 - .../src/datahub/specific/dataset.py | 2 +- .../src/datahub/telemetry/telemetry.py | 3 +- .../tests/unit/test_cli_utils.py | 21 ++ smoke-test/.gitignore | 1 + smoke-test/build.gradle | 35 +- smoke-test/pyproject.toml | 46 +++ smoke-test/requirements.txt | 10 +- smoke-test/run-quickstart.sh | 4 +- .../timeseries/dataset_profile_gen.py | 8 +- .../tests/assertions/assertions_test.py | 40 ++- smoke-test/tests/browse/browse_test.py | 7 +- .../cli/{datahub-cli.py => datahub_cli.py} | 7 +- smoke-test/tests/cli/datahub_graph_test.py | 11 +- .../cli/delete_cmd/test_timeseries_delete.py | 8 +- .../ingest_cmd/test_timeseries_rollback.py | 2 - .../user_groups_cmd}/__init__.py | 0 .../cli/user_groups_cmd/test_group_cmd.py | 9 +- smoke-test/tests/consistency_utils.py | 2 +- .../tests/containers/containers_test.py | 9 +- smoke-test/tests/cypress/integration_test.py | 24 +- .../tests/dataproduct/test_dataproduct.py | 26 +- smoke-test/tests/delete/delete_test.py | 16 +- .../tests/deprecation/deprecation_test.py | 8 +- smoke-test/tests/domains/domains_test.py | 12 +- smoke-test/tests/lineage/test_lineage.py | 7 +- .../__init__.py | 0 .../managed_ingestion_test.py | 13 +- smoke-test/tests/patch/common_patch_tests.py | 86 ++--- .../tests/patch/test_datajob_patches.py | 23 +- .../tests/patch/test_dataset_patches.py | 42 +-- smoke-test/tests/policies/test_policies.py | 10 +- smoke-test/tests/privileges/__init__.py | 0 .../tests/privileges/test_privileges.py | 275 ++++++++------- smoke-test/tests/privileges/utils.py | 101 +++--- .../tests/read_only/test_services_up.py | 8 +- .../setup/lineage/ingest_data_job_change.py | 35 +- .../lineage/ingest_dataset_join_change.py | 27 +- .../lineage/ingest_input_datasets_change.py | 34 +- .../setup/lineage/ingest_time_lineage.py | 12 +- smoke-test/tests/setup/lineage/utils.py | 60 ++-- .../test_structured_properties.py | 315 +++++++++--------- smoke-test/tests/tags_and_terms/__init__.py | 0 .../data.json | 0 .../tags_and_terms_test.py | 12 +- smoke-test/tests/telemetry/telemetry_test.py | 4 +- smoke-test/tests/test_stateful_ingestion.py | 16 +- smoke-test/tests/tests/tests_test.py | 12 +- smoke-test/tests/timeline/timeline_test.py | 9 +- .../tokens/revokable_access_token_test.py | 60 ++-- smoke-test/tests/utilities/file_emitter.py | 11 +- smoke-test/tests/utils.py | 31 +- smoke-test/tests/views/views_test.py | 12 +- 67 files changed, 1161 insertions(+), 772 deletions(-) create mode 100644 metadata-ingestion/src/datahub/cli/config_utils.py create mode 100644 metadata-ingestion/src/datahub/cli/env_utils.py create mode 100644 smoke-test/pyproject.toml rename smoke-test/tests/cli/{datahub-cli.py => datahub_cli.py} (93%) rename smoke-test/tests/{managed-ingestion => cli/user_groups_cmd}/__init__.py (100%) rename smoke-test/tests/{tags-and-terms => managed_ingestion}/__init__.py (100%) rename smoke-test/tests/{managed-ingestion => managed_ingestion}/managed_ingestion_test.py (98%) create mode 100644 smoke-test/tests/privileges/__init__.py create mode 100644 smoke-test/tests/tags_and_terms/__init__.py rename smoke-test/tests/{tags-and-terms => tags_and_terms}/data.json (100%) rename smoke-test/tests/{tags-and-terms => tags_and_terms}/tags_and_terms_test.py (98%) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 5e9112726b010..24490ef83ae5d 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -64,6 +64,18 @@ jobs: steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - name: Run lint on smoke test + run: | + ./gradlew :smoke-test:lint - name: Compute Tag id: tag run: | diff --git a/docs/cli.md b/docs/cli.md index 927270b42259d..3f67f1de6204d 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -180,7 +180,7 @@ failure_log: ### init The init command is used to tell `datahub` about where your DataHub instance is located. The CLI will point to localhost DataHub by default. -Running `datahub init` will allow you to customize the datahub instance you are communicating with. +Running `datahub init` will allow you to customize the datahub instance you are communicating with. It has an optional `--use-password` option which allows to initialise the config using username, password. We foresee this mainly being used by admins as majority of organisations will be using SSO and there won't be any passwords to use. **_Note_**: Provide your GMS instance's host when the prompt asks you for the DataHub host. diff --git a/metadata-ingestion/src/datahub/cli/cli_utils.py b/metadata-ingestion/src/datahub/cli/cli_utils.py index 8ac9a101121be..1bb3b01e078dd 100644 --- a/metadata-ingestion/src/datahub/cli/cli_utils.py +++ b/metadata-ingestion/src/datahub/cli/cli_utils.py @@ -9,12 +9,11 @@ import click import requests -import yaml from deprecated import deprecated -from pydantic import BaseModel, ValidationError from requests.models import Response from requests.sessions import Session +from datahub.cli import config_utils from datahub.emitter.aspect import ASPECT_MAP, TIMESERIES_ASPECT_MAP from datahub.emitter.request_helper import make_curl_command from datahub.emitter.serialization_helper import post_json_transform @@ -23,13 +22,6 @@ log = logging.getLogger(__name__) -DEFAULT_GMS_HOST = "http://localhost:8080" -CONDENSED_DATAHUB_CONFIG_PATH = "~/.datahubenv" -DATAHUB_CONFIG_PATH = os.path.expanduser(CONDENSED_DATAHUB_CONFIG_PATH) - -DATAHUB_ROOT_FOLDER = os.path.expanduser("~/.datahub") - -ENV_SKIP_CONFIG = "DATAHUB_SKIP_CONFIG" ENV_METADATA_HOST_URL = "DATAHUB_GMS_URL" ENV_METADATA_HOST = "DATAHUB_GMS_HOST" ENV_METADATA_PORT = "DATAHUB_GMS_PORT" @@ -45,25 +37,6 @@ # For the methods that aren't duplicates, that logic should be moved to the client. -class GmsConfig(BaseModel): - server: str - token: Optional[str] = None - - -class DatahubConfig(BaseModel): - gms: GmsConfig - - -def get_boolean_env_variable(key: str, default: bool = False) -> bool: - value = os.environ.get(key) - if value is None: - return default - elif value.lower() in ("true", "1"): - return True - else: - return False - - def set_env_variables_override_config(url: str, token: Optional[str]) -> None: """Should be used to override the config when using rest emitter""" config_override[ENV_METADATA_HOST_URL] = url @@ -71,78 +44,6 @@ def set_env_variables_override_config(url: str, token: Optional[str]) -> None: config_override[ENV_METADATA_TOKEN] = token -def persist_datahub_config(config: dict) -> None: - with open(DATAHUB_CONFIG_PATH, "w+") as outfile: - yaml.dump(config, outfile, default_flow_style=False) - return None - - -def write_gms_config( - host: str, token: Optional[str], merge_with_previous: bool = True -) -> None: - config = DatahubConfig(gms=GmsConfig(server=host, token=token)) - if merge_with_previous: - try: - previous_config = get_client_config(as_dict=True) - assert isinstance(previous_config, dict) - except Exception as e: - # ok to fail on this - previous_config = {} - log.debug( - f"Failed to retrieve config from file {DATAHUB_CONFIG_PATH}: {e}. This isn't fatal." - ) - config_dict = {**previous_config, **config.dict()} - else: - config_dict = config.dict() - persist_datahub_config(config_dict) - - -def should_skip_config() -> bool: - return get_boolean_env_variable(ENV_SKIP_CONFIG, False) - - -def ensure_datahub_config() -> None: - if not os.path.isfile(DATAHUB_CONFIG_PATH): - click.secho( - f"No {CONDENSED_DATAHUB_CONFIG_PATH} file found, generating one for you...", - bold=True, - ) - write_gms_config(DEFAULT_GMS_HOST, None) - - -def get_client_config(as_dict: bool = False) -> Union[Optional[DatahubConfig], dict]: - with open(DATAHUB_CONFIG_PATH, "r") as stream: - try: - config_json = yaml.safe_load(stream) - if as_dict: - return config_json - try: - datahub_config = DatahubConfig.parse_obj(config_json) - return datahub_config - except ValidationError as e: - click.echo( - f"Received error, please check your {CONDENSED_DATAHUB_CONFIG_PATH}" - ) - click.echo(e, err=True) - sys.exit(1) - except yaml.YAMLError as exc: - click.secho(f"{DATAHUB_CONFIG_PATH} malformed, error: {exc}", bold=True) - return None - - -def get_details_from_config(): - datahub_config = get_client_config(as_dict=False) - assert isinstance(datahub_config, DatahubConfig) - if datahub_config is not None: - gms_config = datahub_config.gms - - gms_host = gms_config.server - gms_token = gms_config.token - return gms_host, gms_token - else: - return None, None - - def get_details_from_env() -> Tuple[Optional[str], Optional[str]]: host = os.environ.get(ENV_METADATA_HOST) port = os.environ.get(ENV_METADATA_PORT) @@ -178,12 +79,12 @@ def get_url_and_token(): if len(config_override.keys()) > 0: gms_host = config_override.get(ENV_METADATA_HOST_URL) gms_token = config_override.get(ENV_METADATA_TOKEN) - elif should_skip_config(): + elif config_utils.should_skip_config(): gms_host = gms_host_env gms_token = gms_token_env else: - ensure_datahub_config() - gms_host_conf, gms_token_conf = get_details_from_config() + config_utils.ensure_datahub_config() + gms_host_conf, gms_token_conf = config_utils.get_details_from_config() gms_host = first_non_null([gms_host_env, gms_host_conf]) gms_token = first_non_null([gms_token_env, gms_token_conf]) return gms_host, gms_token @@ -253,14 +154,18 @@ def parse_run_restli_response(response: requests.Response) -> dict: exit() if not isinstance(response_json, dict): - click.echo(f"Received error, please check your {CONDENSED_DATAHUB_CONFIG_PATH}") + click.echo( + f"Received error, please check your {config_utils.CONDENSED_DATAHUB_CONFIG_PATH}" + ) click.echo() click.echo(response_json) exit() summary = response_json.get("value") if not isinstance(summary, dict): - click.echo(f"Received error, please check your {CONDENSED_DATAHUB_CONFIG_PATH}") + click.echo( + f"Received error, please check your {config_utils.CONDENSED_DATAHUB_CONFIG_PATH}" + ) click.echo() click.echo(response_json) exit() @@ -686,3 +591,95 @@ def command(ctx: click.Context) -> None: ctx.exit(1) return command + + +def get_session_login_as( + username: str, password: str, frontend_url: str +) -> requests.Session: + session = requests.Session() + headers = { + "Content-Type": "application/json", + } + system_auth = get_system_auth() + if system_auth is not None: + session.headers.update({"Authorization": system_auth}) + else: + data = '{"username":"' + username + '", "password":"' + password + '"}' + response = session.post(f"{frontend_url}/logIn", headers=headers, data=data) + response.raise_for_status() + return session + + +def _ensure_valid_gms_url_acryl_cloud(url: str) -> str: + if "acryl.io" not in url: + return url + if url.startswith("http://"): + url = url.replace("http://", "https://") + if url.endswith("acryl.io"): + url = f"{url}/gms" + return url + + +def fixup_gms_url(url: str) -> str: + if url is None: + return "" + if url.endswith("/"): + url = url.rstrip("/") + url = _ensure_valid_gms_url_acryl_cloud(url) + return url + + +def guess_frontend_url_from_gms_url(gms_url: str) -> str: + gms_url = fixup_gms_url(gms_url) + url = gms_url + if url.endswith("/gms"): + url = gms_url.rstrip("/gms") + if url.endswith("8080"): + url = url[:-4] + "9002" + return url + + +def generate_access_token( + username: str, + password: str, + gms_url: str, + token_name: Optional[str] = None, + validity: str = "ONE_HOUR", +) -> Tuple[str, str]: + frontend_url = guess_frontend_url_from_gms_url(gms_url) + session = get_session_login_as( + username=username, + password=password, + frontend_url=frontend_url, + ) + now = datetime.now() + timestamp = now.astimezone().isoformat() + if token_name is None: + token_name = f"cli token {timestamp}" + json = { + "query": """mutation createAccessToken($input: CreateAccessTokenInput!) { + createAccessToken(input: $input) { + accessToken + metadata { + id + actorUrn + ownerUrn + name + description + } + } + }""", + "variables": { + "input": { + "type": "PERSONAL", + "actorUrn": f"urn:li:corpuser:{username}", + "duration": validity, + "name": token_name, + } + }, + } + response = session.post(f"{frontend_url}/api/v2/graphql", json=json) + response.raise_for_status() + return token_name, response.json().get("data", {}).get("createAccessToken", {}).get( + "accessToken", None + ) diff --git a/metadata-ingestion/src/datahub/cli/config_utils.py b/metadata-ingestion/src/datahub/cli/config_utils.py new file mode 100644 index 0000000000000..4b69aec8081ab --- /dev/null +++ b/metadata-ingestion/src/datahub/cli/config_utils.py @@ -0,0 +1,102 @@ +import logging +import os +import sys +from typing import Optional, Union + +import click +import yaml +from pydantic import BaseModel, ValidationError + +from datahub.cli.env_utils import get_boolean_env_variable + +__help__ = ( + "For helper methods to contain manipulation of the config file in local system." +) +log = logging.getLogger(__name__) + +DEFAULT_GMS_HOST = "http://localhost:8080" +CONDENSED_DATAHUB_CONFIG_PATH = "~/.datahubenv" +DATAHUB_CONFIG_PATH = os.path.expanduser(CONDENSED_DATAHUB_CONFIG_PATH) +DATAHUB_ROOT_FOLDER = os.path.expanduser("~/.datahub") +ENV_SKIP_CONFIG = "DATAHUB_SKIP_CONFIG" + + +class GmsConfig(BaseModel): + server: str + token: Optional[str] = None + + +class DatahubConfig(BaseModel): + gms: GmsConfig + + +def persist_datahub_config(config: dict) -> None: + with open(DATAHUB_CONFIG_PATH, "w+") as outfile: + yaml.dump(config, outfile, default_flow_style=False) + return None + + +def write_gms_config( + host: str, token: Optional[str], merge_with_previous: bool = True +) -> None: + config = DatahubConfig(gms=GmsConfig(server=host, token=token)) + if merge_with_previous: + try: + previous_config = get_client_config(as_dict=True) + assert isinstance(previous_config, dict) + except Exception as e: + # ok to fail on this + previous_config = {} + log.debug( + f"Failed to retrieve config from file {DATAHUB_CONFIG_PATH}: {e}. This isn't fatal." + ) + config_dict = {**previous_config, **config.dict()} + else: + config_dict = config.dict() + persist_datahub_config(config_dict) + + +def get_details_from_config(): + datahub_config = get_client_config(as_dict=False) + assert isinstance(datahub_config, DatahubConfig) + if datahub_config is not None: + gms_config = datahub_config.gms + + gms_host = gms_config.server + gms_token = gms_config.token + return gms_host, gms_token + else: + return None, None + + +def should_skip_config() -> bool: + return get_boolean_env_variable(ENV_SKIP_CONFIG, False) + + +def ensure_datahub_config() -> None: + if not os.path.isfile(DATAHUB_CONFIG_PATH): + click.secho( + f"No {CONDENSED_DATAHUB_CONFIG_PATH} file found, generating one for you...", + bold=True, + ) + write_gms_config(DEFAULT_GMS_HOST, None) + + +def get_client_config(as_dict: bool = False) -> Union[Optional[DatahubConfig], dict]: + with open(DATAHUB_CONFIG_PATH, "r") as stream: + try: + config_json = yaml.safe_load(stream) + if as_dict: + return config_json + try: + datahub_config = DatahubConfig.parse_obj(config_json) + return datahub_config + except ValidationError as e: + click.echo( + f"Received error, please check your {CONDENSED_DATAHUB_CONFIG_PATH}" + ) + click.echo(e, err=True) + sys.exit(1) + except yaml.YAMLError as exc: + click.secho(f"{DATAHUB_CONFIG_PATH} malformed, error: {exc}", bold=True) + return None diff --git a/metadata-ingestion/src/datahub/cli/docker_cli.py b/metadata-ingestion/src/datahub/cli/docker_cli.py index 0e0bc37c61573..099f57d975bbb 100644 --- a/metadata-ingestion/src/datahub/cli/docker_cli.py +++ b/metadata-ingestion/src/datahub/cli/docker_cli.py @@ -21,7 +21,7 @@ from expandvars import expandvars from requests_file import FileAdapter -from datahub.cli.cli_utils import DATAHUB_ROOT_FOLDER +from datahub.cli.config_utils import DATAHUB_ROOT_FOLDER from datahub.cli.docker_check import ( DATAHUB_COMPOSE_LEGACY_VOLUME_FILTERS, DATAHUB_COMPOSE_PROJECT_FILTER, diff --git a/metadata-ingestion/src/datahub/cli/env_utils.py b/metadata-ingestion/src/datahub/cli/env_utils.py new file mode 100644 index 0000000000000..8909036eab6cd --- /dev/null +++ b/metadata-ingestion/src/datahub/cli/env_utils.py @@ -0,0 +1,11 @@ +import os + + +def get_boolean_env_variable(key: str, default: bool = False) -> bool: + value = os.environ.get(key) + if value is None: + return default + elif value.lower() in ("true", "1"): + return True + else: + return False diff --git a/metadata-ingestion/src/datahub/cli/ingest_cli.py b/metadata-ingestion/src/datahub/cli/ingest_cli.py index 9c55f52497c0e..2e66b18e48145 100644 --- a/metadata-ingestion/src/datahub/cli/ingest_cli.py +++ b/metadata-ingestion/src/datahub/cli/ingest_cli.py @@ -15,12 +15,7 @@ import datahub as datahub_package from datahub.cli import cli_utils -from datahub.cli.cli_utils import ( - CONDENSED_DATAHUB_CONFIG_PATH, - format_aspect_summaries, - get_session_and_host, - post_rollback_endpoint, -) +from datahub.cli.config_utils import CONDENSED_DATAHUB_CONFIG_PATH from datahub.configuration.config_loader import load_config_file from datahub.ingestion.graph.client import get_default_graph from datahub.ingestion.run.connection import ConnectionManager @@ -436,7 +431,7 @@ def mcps(path: str) -> None: def list_runs(page_offset: int, page_size: int, include_soft_deletes: bool) -> None: """List recent ingestion runs to datahub""" - session, gms_host = get_session_and_host() + session, gms_host = cli_utils.get_session_and_host() url = f"{gms_host}/runs?action=list" @@ -485,7 +480,7 @@ def show( run_id: str, start: int, count: int, include_soft_deletes: bool, show_aspect: bool ) -> None: """Describe a provided ingestion run to datahub""" - session, gms_host = get_session_and_host() + session, gms_host = cli_utils.get_session_and_host() url = f"{gms_host}/runs?action=describe" @@ -504,7 +499,11 @@ def show( rows = parse_restli_response(response) if not show_aspect: click.echo( - tabulate(format_aspect_summaries(rows), RUN_TABLE_COLUMNS, tablefmt="grid") + tabulate( + cli_utils.format_aspect_summaries(rows), + RUN_TABLE_COLUMNS, + tablefmt="grid", + ) ) else: for row in rows: @@ -546,7 +545,7 @@ def rollback( aspects_affected, unsafe_entity_count, unsafe_entities, - ) = post_rollback_endpoint(payload_obj, "/runs?action=rollback") + ) = cli_utils.post_rollback_endpoint(payload_obj, "/runs?action=rollback") click.echo( "Rolling back deletes the entities created by a run and reverts the updated aspects" diff --git a/metadata-ingestion/src/datahub/cli/lite_cli.py b/metadata-ingestion/src/datahub/cli/lite_cli.py index 8636187a51d09..7e2ad23a7753f 100644 --- a/metadata-ingestion/src/datahub/cli/lite_cli.py +++ b/metadata-ingestion/src/datahub/cli/lite_cli.py @@ -9,7 +9,7 @@ from click.shell_completion import CompletionItem from click_default_group import DefaultGroup -from datahub.cli.cli_utils import ( +from datahub.cli.config_utils import ( DATAHUB_ROOT_FOLDER, DatahubConfig, get_client_config, diff --git a/metadata-ingestion/src/datahub/emitter/mcp_patch_builder.py b/metadata-ingestion/src/datahub/emitter/mcp_patch_builder.py index be68d46472a55..5a9eb074f1b09 100644 --- a/metadata-ingestion/src/datahub/emitter/mcp_patch_builder.py +++ b/metadata-ingestion/src/datahub/emitter/mcp_patch_builder.py @@ -12,6 +12,7 @@ MetadataChangeProposalClass, SystemMetadataClass, ) +from datahub.utilities.urns.urn import guess_entity_type def _recursive_to_obj(obj: Any) -> Any: @@ -47,13 +48,11 @@ class MetadataPatchProposal: def __init__( self, urn: str, - entity_type: str, system_metadata: Optional[SystemMetadataClass] = None, audit_header: Optional[KafkaAuditHeaderClass] = None, ) -> None: self.urn = urn - # TODO: Remove the entity_type parameter, as MCPW can infer it from the URN. - self.entity_type = entity_type + self.entity_type = guess_entity_type(urn) self.system_metadata = system_metadata self.audit_header = audit_header self.patches = defaultdict(list) diff --git a/metadata-ingestion/src/datahub/entrypoints.py b/metadata-ingestion/src/datahub/entrypoints.py index 1bf090a2e514e..4f6c596b7bf20 100644 --- a/metadata-ingestion/src/datahub/entrypoints.py +++ b/metadata-ingestion/src/datahub/entrypoints.py @@ -9,9 +9,13 @@ import datahub as datahub_package from datahub.cli.check_cli import check from datahub.cli.cli_utils import ( + fixup_gms_url, + generate_access_token, + make_shim_command, +) +from datahub.cli.config_utils import ( DATAHUB_CONFIG_PATH, get_boolean_env_variable, - make_shim_command, write_gms_config, ) from datahub.cli.delete_cli import delete @@ -99,8 +103,15 @@ def version() -> None: @datahub.command() +@click.option( + "--use-password", + type=bool, + is_flag=True, + default=False, + help="If passed then uses password to initialise token.", +) @telemetry.with_telemetry() -def init() -> None: +def init(use_password: bool = False) -> None: """Configure which datahub instance to connect to""" if os.path.isfile(DATAHUB_CONFIG_PATH): @@ -110,11 +121,22 @@ def init() -> None: host = click.prompt( "Enter your DataHub host", type=str, default="http://localhost:8080" ) - token = click.prompt( - "Enter your DataHub access token (Supports env vars via `{VAR_NAME}` syntax)", - type=str, - default="", - ) + host = fixup_gms_url(host) + if use_password: + username = click.prompt("Enter your DataHub username", type=str) + password = click.prompt( + "Enter your DataHub password", + type=str, + ) + _, token = generate_access_token( + username=username, password=password, gms_url=host + ) + else: + token = click.prompt( + "Enter your DataHub access token (Supports env vars via `{VAR_NAME}` syntax)", + type=str, + default="", + ) write_gms_config(host, token) click.echo(f"Written to {DATAHUB_CONFIG_PATH}") diff --git a/metadata-ingestion/src/datahub/integrations/great_expectations/action.py b/metadata-ingestion/src/datahub/integrations/great_expectations/action.py index 8b393a8f6f1c6..9890fea990c4e 100644 --- a/metadata-ingestion/src/datahub/integrations/great_expectations/action.py +++ b/metadata-ingestion/src/datahub/integrations/great_expectations/action.py @@ -32,7 +32,7 @@ from sqlalchemy.engine.url import make_url import datahub.emitter.mce_builder as builder -from datahub.cli.cli_utils import get_boolean_env_variable +from datahub.cli.env_utils import get_boolean_env_variable from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.emitter.rest_emitter import DatahubRestEmitter from datahub.emitter.serialization_helper import pre_json_transform diff --git a/metadata-ingestion/src/datahub/specific/chart.py b/metadata-ingestion/src/datahub/specific/chart.py index 5dc394e8ebe0f..47ea539491359 100644 --- a/metadata-ingestion/src/datahub/specific/chart.py +++ b/metadata-ingestion/src/datahub/specific/chart.py @@ -40,7 +40,7 @@ def __init__( audit_header: The Kafka audit header of the chart (optional). """ super().__init__( - urn, "chart", system_metadata=system_metadata, audit_header=audit_header + urn, system_metadata=system_metadata, audit_header=audit_header ) self.custom_properties_patch_helper = CustomPropertiesPatchHelper( self, ChartInfo.ASPECT_NAME diff --git a/metadata-ingestion/src/datahub/specific/dashboard.py b/metadata-ingestion/src/datahub/specific/dashboard.py index 855dcc5685cea..64f28a57cb180 100644 --- a/metadata-ingestion/src/datahub/specific/dashboard.py +++ b/metadata-ingestion/src/datahub/specific/dashboard.py @@ -40,7 +40,7 @@ def __init__( audit_header: The Kafka audit header of the dashboard (optional). """ super().__init__( - urn, "dashboard", system_metadata=system_metadata, audit_header=audit_header + urn, system_metadata=system_metadata, audit_header=audit_header ) self.custom_properties_patch_helper = CustomPropertiesPatchHelper( self, DashboardInfo.ASPECT_NAME diff --git a/metadata-ingestion/src/datahub/specific/datajob.py b/metadata-ingestion/src/datahub/specific/datajob.py index 0338a1320c15b..174749b3268bf 100644 --- a/metadata-ingestion/src/datahub/specific/datajob.py +++ b/metadata-ingestion/src/datahub/specific/datajob.py @@ -41,7 +41,7 @@ def __init__( audit_header: The Kafka audit header of the data job (optional). """ super().__init__( - urn, "datajob", system_metadata=system_metadata, audit_header=audit_header + urn, system_metadata=system_metadata, audit_header=audit_header ) self.custom_properties_patch_helper = CustomPropertiesPatchHelper( self, DataJobInfo.ASPECT_NAME diff --git a/metadata-ingestion/src/datahub/specific/dataproduct.py b/metadata-ingestion/src/datahub/specific/dataproduct.py index 2c174e0c9a6cb..c698c511fd9b5 100644 --- a/metadata-ingestion/src/datahub/specific/dataproduct.py +++ b/metadata-ingestion/src/datahub/specific/dataproduct.py @@ -30,7 +30,6 @@ def __init__( ) -> None: super().__init__( urn, - "dataProduct", system_metadata=system_metadata, audit_header=audit_header, ) diff --git a/metadata-ingestion/src/datahub/specific/dataset.py b/metadata-ingestion/src/datahub/specific/dataset.py index d3c3de36198e3..c59cdb8ddfa38 100644 --- a/metadata-ingestion/src/datahub/specific/dataset.py +++ b/metadata-ingestion/src/datahub/specific/dataset.py @@ -98,7 +98,7 @@ def __init__( audit_header: Optional[KafkaAuditHeaderClass] = None, ) -> None: super().__init__( - urn, "dataset", system_metadata=system_metadata, audit_header=audit_header + urn, system_metadata=system_metadata, audit_header=audit_header ) self.custom_properties_patch_helper = CustomPropertiesPatchHelper( self, DatasetProperties.ASPECT_NAME diff --git a/metadata-ingestion/src/datahub/telemetry/telemetry.py b/metadata-ingestion/src/datahub/telemetry/telemetry.py index c399f2e1a27e5..a802125e76b4e 100644 --- a/metadata-ingestion/src/datahub/telemetry/telemetry.py +++ b/metadata-ingestion/src/datahub/telemetry/telemetry.py @@ -13,7 +13,8 @@ from typing_extensions import ParamSpec import datahub as datahub_package -from datahub.cli.cli_utils import DATAHUB_ROOT_FOLDER, get_boolean_env_variable +from datahub.cli.config_utils import DATAHUB_ROOT_FOLDER +from datahub.cli.env_utils import get_boolean_env_variable from datahub.configuration.common import ExceptionWithProps from datahub.ingestion.graph.client import DataHubGraph from datahub.metadata.schema_classes import _custom_package_path diff --git a/metadata-ingestion/tests/unit/test_cli_utils.py b/metadata-ingestion/tests/unit/test_cli_utils.py index cb0b7c734ee0a..bc1826d422e38 100644 --- a/metadata-ingestion/tests/unit/test_cli_utils.py +++ b/metadata-ingestion/tests/unit/test_cli_utils.py @@ -59,3 +59,24 @@ def test_correct_url_when_gms_host_port_url_protocol_set(): ) def test_correct_url_when_url_set(): assert cli_utils.get_details_from_env() == ("https://example.com", None) + + +def test_fixup_gms_url(): + assert cli_utils.fixup_gms_url("http://localhost:8080") == "http://localhost:8080" + assert cli_utils.fixup_gms_url("http://localhost:8080/") == "http://localhost:8080" + assert cli_utils.fixup_gms_url("http://abc.acryl.io") == "https://abc.acryl.io/gms" + + +def test_guess_frontend_url_from_gms_url(): + assert ( + cli_utils.guess_frontend_url_from_gms_url("http://localhost:8080") + == "http://localhost:9002" + ) + assert ( + cli_utils.guess_frontend_url_from_gms_url("http://localhost:8080/") + == "http://localhost:9002" + ) + assert ( + cli_utils.guess_frontend_url_from_gms_url("https://abc.acryl.io/gms") + == "https://abc.acryl.io" + ) diff --git a/smoke-test/.gitignore b/smoke-test/.gitignore index 44d3f620a1937..b8af2eef535a0 100644 --- a/smoke-test/.gitignore +++ b/smoke-test/.gitignore @@ -127,6 +127,7 @@ venv.bak/ .mypy_cache/ .dmypy.json dmypy.json +.ruff_cache/ # Pyre type checker .pyre/ diff --git a/smoke-test/build.gradle b/smoke-test/build.gradle index 1614a4b8527dc..a6f3cd793ddd6 100644 --- a/smoke-test/build.gradle +++ b/smoke-test/build.gradle @@ -1,5 +1,10 @@ apply plugin: 'com.github.node-gradle.node' +ext { + python_executable = 'python3' + venv_name = 'venv' +} + node { // If true, it will download node using above parameters. @@ -38,4 +43,32 @@ task yarnInstall(type: YarnTask) { println "Root directory: ${project.rootDir}"; environment = ['NODE_OPTIONS': '--openssl-legacy-provider'] args = ['install', '--cwd', "${project.rootDir}/smoke-test/tests/cypress"] -} \ No newline at end of file +} + +task installDev(type: Exec) { + inputs.file file('pyproject.toml') + inputs.file file('requirements.txt') + outputs.file("${venv_name}/.build_install_dev_sentinel") + commandLine 'bash', '-x', '-c', + "${python_executable} -m venv ${venv_name} && " + + "${venv_name}/bin/pip install --upgrade pip wheel setuptools && " + + "${venv_name}/bin/pip install -r requirements.txt && " + + "touch ${venv_name}/.build_install_dev_sentinel" +} + +task lint(type: Exec, dependsOn: installDev) { + commandLine 'bash', '-c', + "source ${venv_name}/bin/activate && set -x && " + + "black --check --diff tests/ && " + + "isort --check --diff tests/ && " + + "ruff --statistics tests/ && " + + "mypy tests/" +} +task lintFix(type: Exec, dependsOn: installDev) { + commandLine 'bash', '-c', + "source ${venv_name}/bin/activate && set -x && " + + "black tests/ && " + + "isort tests/ && " + + "ruff --fix tests/ && " + + "mypy tests/" +} diff --git a/smoke-test/pyproject.toml b/smoke-test/pyproject.toml new file mode 100644 index 0000000000000..c7745d0e9a364 --- /dev/null +++ b/smoke-test/pyproject.toml @@ -0,0 +1,46 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "smoke-test" +version = "0.0.0" +description = "" +authors = [ + { name="Acryl Data", email="eng@acryl.io" }, +] +requires-python = ">=3.9" + + +[tool.black] +extend-exclude = ''' +# A regex preceded with ^/ will apply only to files and directories +# in the root of the project. +tmp +venv +''' +include = '\.pyi?$' +target-version = ['py310'] + +[tool.isort] +profile = 'black' + +[tool.ruff] +ignore = [ + 'E501', # Ignore line length, since black handles that. + 'D203', # Ignore 1 blank line required before class docstring. +] + +[tool.mypy] +exclude = "^(venv/|build/|dist/)" +ignore_missing_imports = true +namespace_packages = false +check_untyped_defs = true +disallow_untyped_decorators = true +warn_unused_configs = true +# eventually we'd like to enable these +disallow_incomplete_defs = false +disallow_untyped_defs = false + +[tool.pyright] +extraPaths = ['tests'] diff --git a/smoke-test/requirements.txt b/smoke-test/requirements.txt index e37de9caddc69..c5d43163dff5d 100644 --- a/smoke-test/requirements.txt +++ b/smoke-test/requirements.txt @@ -7,4 +7,12 @@ slack-sdk==3.18.1 aiohttp joblib pytest-xdist -networkx \ No newline at end of file +networkx +# libaries for linting below this +black==23.7.0 +isort==5.12.0 +mypy==1.5.1 +ruff==0.0.287 +# stub version are copied from metadata-ingestion/setup.py and that should be the source of truth +types-requests>=2.28.11.6,<=2.31.0.3 +types-PyYAML diff --git a/smoke-test/run-quickstart.sh b/smoke-test/run-quickstart.sh index cd747321ad602..05c321566d54a 100755 --- a/smoke-test/run-quickstart.sh +++ b/smoke-test/run-quickstart.sh @@ -4,10 +4,8 @@ set -euxo pipefail DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" cd "$DIR" -python3 -m venv venv +../gradlew :smoke-test:installDev source venv/bin/activate -pip install --upgrade pip wheel setuptools -pip install -r requirements.txt mkdir -p ~/.datahub/plugins/frontend/auth/ echo "test_user:test_pass" >> ~/.datahub/plugins/frontend/auth/user.props diff --git a/smoke-test/tests/aspect_generators/timeseries/dataset_profile_gen.py b/smoke-test/tests/aspect_generators/timeseries/dataset_profile_gen.py index bc22b74ed185c..f808e7a58a329 100644 --- a/smoke-test/tests/aspect_generators/timeseries/dataset_profile_gen.py +++ b/smoke-test/tests/aspect_generators/timeseries/dataset_profile_gen.py @@ -1,8 +1,10 @@ from typing import Iterable -from datahub.metadata.schema_classes import (DatasetFieldProfileClass, - DatasetProfileClass, - TimeWindowSizeClass) +from datahub.metadata.schema_classes import ( + DatasetFieldProfileClass, + DatasetProfileClass, + TimeWindowSizeClass, +) from tests.utils import get_timestampmillis_at_start_of_day diff --git a/smoke-test/tests/assertions/assertions_test.py b/smoke-test/tests/assertions/assertions_test.py index 48f3564e6cd97..78ba68a840f0d 100644 --- a/smoke-test/tests/assertions/assertions_test.py +++ b/smoke-test/tests/assertions/assertions_test.py @@ -7,24 +7,30 @@ from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.common import PipelineContext, RecordEnvelope from datahub.ingestion.api.sink import NoopWriteCallback -from datahub.ingestion.sink.file import FileSink, FileSinkConfig -from datahub.metadata.com.linkedin.pegasus2avro.assertion import \ - AssertionStdAggregation -from datahub.metadata.schema_classes import (AssertionInfoClass, - AssertionResultClass, - AssertionResultTypeClass, - AssertionRunEventClass, - AssertionRunStatusClass, - AssertionStdOperatorClass, - AssertionTypeClass, - DatasetAssertionInfoClass, - DatasetAssertionScopeClass, - PartitionSpecClass, - PartitionTypeClass) +from datahub.ingestion.sink.file import FileSink +from datahub.metadata.com.linkedin.pegasus2avro.assertion import AssertionStdAggregation +from datahub.metadata.schema_classes import ( + AssertionInfoClass, + AssertionResultClass, + AssertionResultTypeClass, + AssertionRunEventClass, + AssertionRunStatusClass, + AssertionStdOperatorClass, + AssertionTypeClass, + DatasetAssertionInfoClass, + DatasetAssertionScopeClass, + PartitionSpecClass, + PartitionTypeClass, +) import requests_wrapper as requests -from tests.utils import (delete_urns_from_file, get_gms_url, get_sleep_info, - ingest_file_via_rest, wait_for_healthcheck_util) +from tests.utils import ( + delete_urns_from_file, + get_gms_url, + get_sleep_info, + ingest_file_via_rest, + wait_for_healthcheck_util, +) restli_default_headers = { "X-RestLi-Protocol-Version": "2.0.0", @@ -210,7 +216,7 @@ def create_test_data(test_file): ) fileSink: FileSink = FileSink.create( - FileSinkConfig(filename=test_file), ctx=PipelineContext(run_id="test-file") + {"filename": test_file}, ctx=PipelineContext(run_id="test-file") ) for mcp in [mcp1, mcp2, mcp3, mcp4, mcp5, mcp6, mcp7]: fileSink.write_record_async( diff --git a/smoke-test/tests/browse/browse_test.py b/smoke-test/tests/browse/browse_test.py index 550f0062d5a39..adeb6775a150d 100644 --- a/smoke-test/tests/browse/browse_test.py +++ b/smoke-test/tests/browse/browse_test.py @@ -1,10 +1,6 @@ -import time - import pytest -import requests_wrapper as requests -from tests.utils import (delete_urns_from_file, get_frontend_url, - ingest_file_via_rest) +from tests.utils import delete_urns_from_file, get_frontend_url, ingest_file_via_rest TEST_DATASET_1_URN = "urn:li:dataset:(urn:li:dataPlatform:kafka,test-browse-1,PROD)" TEST_DATASET_2_URN = "urn:li:dataset:(urn:li:dataPlatform:kafka,test-browse-2,PROD)" @@ -29,7 +25,6 @@ def test_healthchecks(wait_for_healthchecks): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_get_browse_paths(frontend_session, ingest_cleanup_data): - # Iterate through each browse path, starting with the root get_browse_paths_query = """query browse($input: BrowseInput!) {\n diff --git a/smoke-test/tests/cli/datahub-cli.py b/smoke-test/tests/cli/datahub_cli.py similarity index 93% rename from smoke-test/tests/cli/datahub-cli.py rename to smoke-test/tests/cli/datahub_cli.py index c3db6028efceb..81ae6a34264ad 100644 --- a/smoke-test/tests/cli/datahub-cli.py +++ b/smoke-test/tests/cli/datahub_cli.py @@ -1,10 +1,7 @@ import json -from time import sleep import pytest -from datahub.cli.cli_utils import (get_aspects_for_entity, guess_entity_type, - post_entity) -from datahub.cli.ingest_cli import get_session_and_host, rollback +from datahub.cli.cli_utils import get_aspects_for_entity, get_session_and_host from tests.utils import ingest_file_via_rest, wait_for_writes_to_sync @@ -115,5 +112,5 @@ def test_rollback_editable(): ) # But first ingestion aspects should not be present assert "browsePaths" not in get_aspects_for_entity( - entity_urn=dataset_urn, typed=False + entity_urn=dataset_urn, aspects=["browsePaths"], typed=False ) diff --git a/smoke-test/tests/cli/datahub_graph_test.py b/smoke-test/tests/cli/datahub_graph_test.py index 17c8924fb0998..1e324477adb6b 100644 --- a/smoke-test/tests/cli/datahub_graph_test.py +++ b/smoke-test/tests/cli/datahub_graph_test.py @@ -1,11 +1,14 @@ import pytest import tenacity from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph -from datahub.metadata.schema_classes import (KafkaSchemaClass, - SchemaMetadataClass) +from datahub.metadata.schema_classes import KafkaSchemaClass, SchemaMetadataClass -from tests.utils import (delete_urns_from_file, get_gms_url, get_sleep_info, - ingest_file_via_rest) +from tests.utils import ( + delete_urns_from_file, + get_gms_url, + get_sleep_info, + ingest_file_via_rest, +) sleep_sec, sleep_times = get_sleep_info() diff --git a/smoke-test/tests/cli/delete_cmd/test_timeseries_delete.py b/smoke-test/tests/cli/delete_cmd/test_timeseries_delete.py index 106da7cd8d71e..cfbbacea1ed79 100644 --- a/smoke-test/tests/cli/delete_cmd/test_timeseries_delete.py +++ b/smoke-test/tests/cli/delete_cmd/test_timeseries_delete.py @@ -2,7 +2,6 @@ import logging import sys import tempfile -import time from json import JSONDecodeError from typing import Any, Dict, List, Optional @@ -12,11 +11,8 @@ from datahub.entrypoints import datahub from datahub.metadata.schema_classes import DatasetProfileClass -import requests_wrapper as requests -from tests.aspect_generators.timeseries.dataset_profile_gen import \ - gen_dataset_profiles -from tests.utils import (get_strftime_from_timestamp_millis, - wait_for_writes_to_sync) +from tests.aspect_generators.timeseries.dataset_profile_gen import gen_dataset_profiles +from tests.utils import get_strftime_from_timestamp_millis, wait_for_writes_to_sync logger = logging.getLogger(__name__) diff --git a/smoke-test/tests/cli/ingest_cmd/test_timeseries_rollback.py b/smoke-test/tests/cli/ingest_cmd/test_timeseries_rollback.py index e962b1a5cafd6..aa7c90cc6f988 100644 --- a/smoke-test/tests/cli/ingest_cmd/test_timeseries_rollback.py +++ b/smoke-test/tests/cli/ingest_cmd/test_timeseries_rollback.py @@ -1,5 +1,4 @@ import json -import time from typing import Any, Dict, List, Optional import datahub.emitter.mce_builder as builder @@ -8,7 +7,6 @@ from datahub.entrypoints import datahub from datahub.metadata.schema_classes import DatasetProfileClass -import requests_wrapper as requests from tests.utils import ingest_file_via_rest, wait_for_writes_to_sync runner = CliRunner(mix_stderr=False) diff --git a/smoke-test/tests/managed-ingestion/__init__.py b/smoke-test/tests/cli/user_groups_cmd/__init__.py similarity index 100% rename from smoke-test/tests/managed-ingestion/__init__.py rename to smoke-test/tests/cli/user_groups_cmd/__init__.py diff --git a/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py b/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py index 7b986d3be0444..555687c98ed3e 100644 --- a/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py +++ b/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py @@ -1,16 +1,15 @@ import json import sys import tempfile -import time from typing import Any, Dict, Iterable, List +import pytest import yaml from click.testing import CliRunner, Result from datahub.api.entities.corpgroup.corpgroup import CorpGroup from datahub.entrypoints import datahub from datahub.ingestion.graph.client import DataHubGraph, get_default_graph -import requests_wrapper as requests from tests.utils import wait_for_writes_to_sync runner = CliRunner(mix_stderr=False) @@ -43,7 +42,6 @@ def gen_datahub_groups(num_groups: int) -> Iterable[CorpGroup]: description=f"The Group {i}", picture_link=f"https://images.google.com/group{i}.jpg", slack=f"@group{i}", - admins=["user1"], members=["user2"], ) yield group @@ -65,7 +63,7 @@ def get_group_ownership(user_urn: str) -> List[str]: graph = get_default_graph() entities = graph.get_related_entities( entity_urn=user_urn, - relationship_types="OwnedBy", + relationship_types=["OwnedBy"], direction=DataHubGraph.RelationshipDirection.INCOMING, ) return [entity.urn for entity in entities] @@ -75,12 +73,13 @@ def get_group_membership(user_urn: str) -> List[str]: graph = get_default_graph() entities = graph.get_related_entities( entity_urn=user_urn, - relationship_types="IsMemberOfGroup", + relationship_types=["IsMemberOfGroup"], direction=DataHubGraph.RelationshipDirection.OUTGOING, ) return [entity.urn for entity in entities] +@pytest.mark.skip(reason="Functionality and test needs to be validated for correctness") def test_group_upsert(wait_for_healthchecks: Any) -> None: num_groups: int = 10 for i, datahub_group in enumerate(gen_datahub_groups(num_groups)): diff --git a/smoke-test/tests/consistency_utils.py b/smoke-test/tests/consistency_utils.py index 607835bf3649c..1af9399c2dc9a 100644 --- a/smoke-test/tests/consistency_utils.py +++ b/smoke-test/tests/consistency_utils.py @@ -30,7 +30,7 @@ def wait_for_writes_to_sync(max_timeout_in_sec: int = 120) -> None: result = str(completed_process.stdout) lines = result.splitlines() - lag_values = [int(l) for l in lines if l != ""] + lag_values = [int(line) for line in lines if line != ""] maximum_lag = max(lag_values) if maximum_lag == 0: lag_zero = True diff --git a/smoke-test/tests/containers/containers_test.py b/smoke-test/tests/containers/containers_test.py index 227645a87d30a..4997102702e57 100644 --- a/smoke-test/tests/containers/containers_test.py +++ b/smoke-test/tests/containers/containers_test.py @@ -1,7 +1,6 @@ import pytest -from tests.utils import (delete_urns_from_file, get_frontend_url, - ingest_file_via_rest) +from tests.utils import delete_urns_from_file, get_frontend_url, ingest_file_via_rest @pytest.fixture(scope="module", autouse=False) @@ -21,12 +20,10 @@ def test_healthchecks(wait_for_healthchecks): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_get_full_container(frontend_session, ingest_cleanup_data): - container_urn = "urn:li:container:SCHEMA" container_name = "datahub_schema" container_description = "The DataHub schema" editable_container_description = "custom description" - dataset_urn = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)" # Get a full container get_container_json = { @@ -129,7 +126,6 @@ def test_get_full_container(frontend_session, ingest_cleanup_data): @pytest.mark.dependency(depends=["test_healthchecks", "test_get_full_container"]) def test_get_parent_container(frontend_session, ingest_cleanup_data): - dataset_urn = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)" # Get count of existing secrets @@ -165,7 +161,6 @@ def test_get_parent_container(frontend_session, ingest_cleanup_data): @pytest.mark.dependency(depends=["test_healthchecks", "test_get_full_container"]) def test_update_container(frontend_session, ingest_cleanup_data): - container_urn = "urn:li:container:SCHEMA" new_tag = "urn:li:tag:Test" @@ -227,7 +222,7 @@ def test_update_container(frontend_session, ingest_cleanup_data): "ownerUrn": new_owner, "resourceUrn": container_urn, "ownerEntityType": "CORP_USER", - "ownershipTypeUrn": "urn:li:ownershipType:__system__technical_owner" + "ownershipTypeUrn": "urn:li:ownershipType:__system__technical_owner", } }, } diff --git a/smoke-test/tests/cypress/integration_test.py b/smoke-test/tests/cypress/integration_test.py index 4ad2bc53fa87d..4124ced999446 100644 --- a/smoke-test/tests/cypress/integration_test.py +++ b/smoke-test/tests/cypress/integration_test.py @@ -5,11 +5,17 @@ import pytest -from tests.setup.lineage.ingest_time_lineage import (get_time_lineage_urns, - ingest_time_lineage) -from tests.utils import (create_datahub_step_state_aspects, delete_urns, - delete_urns_from_file, get_admin_username, - ingest_file_via_rest) +from tests.setup.lineage.ingest_time_lineage import ( + get_time_lineage_urns, + ingest_time_lineage, +) +from tests.utils import ( + create_datahub_step_state_aspects, + delete_urns, + delete_urns_from_file, + get_admin_username, + ingest_file_via_rest, +) CYPRESS_TEST_DATA_DIR = "tests/cypress" @@ -178,8 +184,10 @@ def test_run_cypress(frontend_session, wait_for_healthchecks): print(f"test strategy is {test_strategy}") test_spec_arg = "" if test_strategy is not None: - specs = _get_spec_map(strategy_spec_map.get(test_strategy)) - test_spec_arg = f" --spec '{specs}' " + specs = strategy_spec_map.get(test_strategy) + assert specs is not None + specs_str = _get_spec_map(specs) + test_spec_arg = f" --spec '{specs_str}' " print("Running Cypress tests with command") command = f"NO_COLOR=1 npx cypress run {record_arg} {test_spec_arg} {tag_arg}" @@ -194,6 +202,8 @@ def test_run_cypress(frontend_session, wait_for_healthchecks): stderr=subprocess.PIPE, cwd=f"{CYPRESS_TEST_DATA_DIR}", ) + assert proc.stdout is not None + assert proc.stderr is not None stdout = proc.stdout.read() stderr = proc.stderr.read() return_code = proc.wait() diff --git a/smoke-test/tests/dataproduct/test_dataproduct.py b/smoke-test/tests/dataproduct/test_dataproduct.py index baef1cb1cb3ba..0d0141e9111c0 100644 --- a/smoke-test/tests/dataproduct/test_dataproduct.py +++ b/smoke-test/tests/dataproduct/test_dataproduct.py @@ -1,8 +1,6 @@ import logging import os -import subprocess import tempfile -import time from random import randint from typing import List @@ -14,17 +12,24 @@ from datahub.ingestion.api.sink import NoopWriteCallback from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph from datahub.ingestion.sink.file import FileSink, FileSinkConfig -from datahub.metadata.schema_classes import (DataProductPropertiesClass, - DatasetPropertiesClass, - DomainPropertiesClass, - DomainsClass) +from datahub.metadata.schema_classes import ( + DataProductPropertiesClass, + DatasetPropertiesClass, + DomainPropertiesClass, + DomainsClass, +) from datahub.utilities.urns.urn import Urn +from tests.utils import ( + delete_urns_from_file, + get_gms_url, + get_sleep_info, + ingest_file_via_rest, + wait_for_writes_to_sync, +) + logger = logging.getLogger(__name__) -import requests_wrapper as requests -from tests.utils import (delete_urns_from_file, get_gms_url, get_sleep_info, - ingest_file_via_rest, wait_for_writes_to_sync) start_index = randint(10, 10000) dataset_urns = [ @@ -82,7 +87,6 @@ def create_test_data(filename: str): @pytest.fixture(scope="module", autouse=False) def ingest_cleanup_data(request): - new_file, filename = tempfile.mkstemp() try: create_test_data(filename) @@ -160,7 +164,6 @@ def validate_relationships( ) @pytest.mark.dependency(depends=["test_healthchecks"]) def test_create_data_product(ingest_cleanup_data): - domain_urn = Urn("domain", [datahub_guid({"name": "Marketing"})]) graph: DataHubGraph = DataHubGraph(config=DatahubClientConfig(server=get_gms_url())) result = graph.execute_graphql( @@ -191,6 +194,7 @@ def test_create_data_product(ingest_cleanup_data): assert result["batchSetDataProduct"] is True data_product_props = graph.get_aspect(data_product_urn, DataProductPropertiesClass) assert data_product_props is not None + assert data_product_props.assets is not None assert data_product_props.description == "Test Description" assert data_product_props.name == "Test Data Product" assert len(data_product_props.assets) == len(dataset_urns) diff --git a/smoke-test/tests/delete/delete_test.py b/smoke-test/tests/delete/delete_test.py index d920faaf3a89a..21833d0bd30a1 100644 --- a/smoke-test/tests/delete/delete_test.py +++ b/smoke-test/tests/delete/delete_test.py @@ -1,14 +1,16 @@ import json import os -from time import sleep import pytest -from datahub.cli.cli_utils import get_aspects_for_entity -from datahub.cli.ingest_cli import get_session_and_host - -from tests.utils import (delete_urns_from_file, get_datahub_graph, - ingest_file_via_rest, wait_for_healthcheck_util, - wait_for_writes_to_sync) +from datahub.cli.cli_utils import get_aspects_for_entity, get_session_and_host + +from tests.utils import ( + delete_urns_from_file, + get_datahub_graph, + ingest_file_via_rest, + wait_for_healthcheck_util, + wait_for_writes_to_sync, +) # Disable telemetry os.environ["DATAHUB_TELEMETRY_ENABLED"] = "false" diff --git a/smoke-test/tests/deprecation/deprecation_test.py b/smoke-test/tests/deprecation/deprecation_test.py index a8969804d03d7..ae3890aeda956 100644 --- a/smoke-test/tests/deprecation/deprecation_test.py +++ b/smoke-test/tests/deprecation/deprecation_test.py @@ -1,7 +1,11 @@ import pytest -from tests.utils import (delete_urns_from_file, get_frontend_url, get_root_urn, - ingest_file_via_rest) +from tests.utils import ( + delete_urns_from_file, + get_frontend_url, + get_root_urn, + ingest_file_via_rest, +) @pytest.fixture(scope="module", autouse=True) diff --git a/smoke-test/tests/domains/domains_test.py b/smoke-test/tests/domains/domains_test.py index fa8c918e3cbe1..1d83b032d7a8f 100644 --- a/smoke-test/tests/domains/domains_test.py +++ b/smoke-test/tests/domains/domains_test.py @@ -1,8 +1,13 @@ import pytest import tenacity -from tests.utils import (delete_urns_from_file, get_frontend_url, get_gms_url, - get_sleep_info, ingest_file_via_rest) +from tests.utils import ( + delete_urns_from_file, + get_frontend_url, + get_gms_url, + get_sleep_info, + ingest_file_via_rest, +) sleep_sec, sleep_times = get_sleep_info() @@ -26,7 +31,6 @@ def test_healthchecks(wait_for_healthchecks): stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) ) def _ensure_more_domains(frontend_session, list_domains_json, before_count): - # Get new count of Domains response = frontend_session.post( f"{get_frontend_url()}/api/v2/graphql", json=list_domains_json @@ -47,7 +51,6 @@ def _ensure_more_domains(frontend_session, list_domains_json, before_count): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_create_list_get_domain(frontend_session): - # Setup: Delete the domain (if exists) response = frontend_session.post( f"{get_gms_url()}/entities?action=delete", json={"urn": "urn:li:domain:test id"} @@ -167,7 +170,6 @@ def test_create_list_get_domain(frontend_session): @pytest.mark.dependency(depends=["test_healthchecks", "test_create_list_get_domain"]) def test_set_unset_domain(frontend_session, ingest_cleanup_data): - # Set and Unset a Domain for a dataset. Note that this doesn't test for adding domains to charts, dashboards, charts, & jobs. dataset_urn = ( "urn:li:dataset:(urn:li:dataPlatform:kafka,test-tags-terms-sample-kafka,PROD)" diff --git a/smoke-test/tests/lineage/test_lineage.py b/smoke-test/tests/lineage/test_lineage.py index 52d61d666c7d9..9cd98d1245bbb 100644 --- a/smoke-test/tests/lineage/test_lineage.py +++ b/smoke-test/tests/lineage/test_lineage.py @@ -49,6 +49,7 @@ from datahub.utilities.urns.dataset_urn import DatasetUrn from datahub.utilities.urns.urn import Urn from pydantic import BaseModel, validator + from tests.utils import ingest_file_via_rest, wait_for_writes_to_sync logger = logging.getLogger(__name__) @@ -85,7 +86,6 @@ def ingest_tableau_cll_via_rest(wait_for_healthchecks) -> None: ingest_file_via_rest( "tests/lineage/tableau_cll_mcps.json", ) - yield def search_across_lineage( @@ -499,6 +499,7 @@ def get_lineage_mcps(self) -> Iterable[MetadataChangeProposalWrapper]: def get_lineage_mcps_for_hop( self, hop_index: int ) -> Iterable[MetadataChangeProposalWrapper]: + assert self.expectations is not None if self.lineage_style == Scenario.LineageStyle.DATASET_JOB_DATASET: fine_grained_lineage = FineGrainedLineage( upstreamType=FineGrainedLineageUpstreamType.FIELD_SET, @@ -722,11 +723,9 @@ def cleanup(self, delete_agent: DeleteAgent) -> None: for urn in self._generated_urns: delete_agent.delete_entity(urn) - def generate_expectation(self, query: ImpactQuery) -> LineageExpectation: - return self.expectations.generate_query_expectation_pairs(query) - def test_expectation(self, graph: DataHubGraph) -> bool: print("Testing expectation...") + assert self.expectations is not None try: for hop_index in range(self.num_hops): for dataset_urn in self.get_upstream_dataset_urns(hop_index): diff --git a/smoke-test/tests/tags-and-terms/__init__.py b/smoke-test/tests/managed_ingestion/__init__.py similarity index 100% rename from smoke-test/tests/tags-and-terms/__init__.py rename to smoke-test/tests/managed_ingestion/__init__.py diff --git a/smoke-test/tests/managed-ingestion/managed_ingestion_test.py b/smoke-test/tests/managed_ingestion/managed_ingestion_test.py similarity index 98% rename from smoke-test/tests/managed-ingestion/managed_ingestion_test.py rename to smoke-test/tests/managed_ingestion/managed_ingestion_test.py index 6d95f731f32b1..4a4bdca2e4592 100644 --- a/smoke-test/tests/managed-ingestion/managed_ingestion_test.py +++ b/smoke-test/tests/managed_ingestion/managed_ingestion_test.py @@ -3,8 +3,7 @@ import pytest import tenacity -from tests.utils import (get_frontend_url, get_sleep_info, - wait_for_healthcheck_util) +from tests.utils import get_frontend_url, get_sleep_info, wait_for_healthcheck_util sleep_sec, sleep_times = get_sleep_info() @@ -206,7 +205,6 @@ def _ensure_execution_request_present(frontend_session, execution_request_urn): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_create_list_get_remove_secret(frontend_session): - # Get count of existing secrets json_q = { "query": """query listSecrets($input: ListSecretsInput!) {\n @@ -265,7 +263,13 @@ def test_create_list_get_remove_secret(frontend_session): "query": """mutation updateSecret($input: UpdateSecretInput!) {\n updateSecret(input: $input) }""", - "variables": {"input": {"urn": secret_urn, "name": "SMOKE_TEST", "value": "mytestvalue.updated"}}, + "variables": { + "input": { + "urn": secret_urn, + "name": "SMOKE_TEST", + "value": "mytestvalue.updated", + } + }, } response = frontend_session.post( @@ -333,7 +337,6 @@ def test_create_list_get_remove_secret(frontend_session): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_create_list_get_remove_ingestion_source(frontend_session): - # Get count of existing ingestion sources res_data = _get_ingestionSources(frontend_session) diff --git a/smoke-test/tests/patch/common_patch_tests.py b/smoke-test/tests/patch/common_patch_tests.py index f1d6abf5da794..9530edb760c13 100644 --- a/smoke-test/tests/patch/common_patch_tests.py +++ b/smoke-test/tests/patch/common_patch_tests.py @@ -2,21 +2,26 @@ import uuid from typing import Dict, Optional, Type -from datahub.emitter.mce_builder import (make_tag_urn, make_term_urn, - make_user_urn) +from datahub.emitter.mce_builder import make_tag_urn, make_term_urn, make_user_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.emitter.mcp_patch_builder import MetadataPatchProposal from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig -from datahub.metadata.schema_classes import (AuditStampClass, GlobalTagsClass, - GlossaryTermAssociationClass, - GlossaryTermsClass, OwnerClass, - OwnershipClass, - OwnershipTypeClass, - TagAssociationClass, _Aspect) +from datahub.metadata.schema_classes import ( + AuditStampClass, + GlobalTagsClass, + GlossaryTermAssociationClass, + GlossaryTermsClass, + OwnerClass, + OwnershipClass, + OwnershipTypeClass, + TagAssociationClass, + _Aspect, +) def helper_test_entity_terms_patch( - test_entity_urn: str, patch_builder_class: Type[MetadataPatchProposal] + test_entity_urn: str, + patch_builder_class: Type[MetadataPatchProposal], ): def get_terms(graph, entity_urn): return graph.get_aspect( @@ -57,9 +62,9 @@ def get_terms(graph, entity_urn): assert terms_read.terms[1].urn == new_term.urn assert terms_read.terms[1].context is None - for patch_mcp in ( - patch_builder_class(test_entity_urn).remove_term(term_urn).build() - ): + patch_builder = patch_builder_class(test_entity_urn) + assert hasattr(patch_builder, "remove_term") + for patch_mcp in patch_builder.remove_term(term_urn).build(): graph.emit_mcp(patch_mcp) pass @@ -69,9 +74,9 @@ def get_terms(graph, entity_urn): def helper_test_dataset_tags_patch( - test_entity_urn: str, patch_builder_class: Type[MetadataPatchProposal] + test_entity_urn: str, + patch_builder_class: Type[MetadataPatchProposal], ): - tag_urn = make_tag_urn(tag=f"testTag-{uuid.uuid4()}") tag_association = TagAssociationClass(tag=tag_urn, context="test") @@ -80,10 +85,11 @@ def helper_test_dataset_tags_patch( with DataHubGraph(DataHubGraphConfig()) as graph: graph.emit_mcp(mcpw) - tags_read: GlobalTagsClass = graph.get_aspect( + tags_read = graph.get_aspect( entity_urn=test_entity_urn, aspect_type=GlobalTagsClass, ) + assert tags_read is not None assert tags_read.tags[0].tag == tag_urn assert tags_read.tags[0].context == "test" @@ -98,14 +104,15 @@ def helper_test_dataset_tags_patch( entity_urn=test_entity_urn, aspect_type=GlobalTagsClass, ) + assert tags_read is not None assert tags_read.tags[0].tag == tag_urn assert tags_read.tags[0].context == "test" assert tags_read.tags[1].tag == new_tag.tag assert tags_read.tags[1].context is None - for patch_mcp in ( - patch_builder_class(test_entity_urn).remove_tag(tag_urn).build() - ): + patch_builder = patch_builder_class(test_entity_urn) + assert hasattr(patch_builder, "remove_tag") + for patch_mcp in patch_builder.remove_tag(tag_urn).build(): graph.emit_mcp(patch_mcp) pass @@ -113,12 +120,14 @@ def helper_test_dataset_tags_patch( entity_urn=test_entity_urn, aspect_type=GlobalTagsClass, ) + assert tags_read is not None assert len(tags_read.tags) == 1 assert tags_read.tags[0].tag == new_tag.tag def helper_test_ownership_patch( - test_entity_urn: str, patch_builder_class: Type[MetadataPatchProposal] + test_entity_urn: str, + patch_builder_class: Type[MetadataPatchProposal], ): owner_to_set = OwnerClass( owner=make_user_urn("jdoe"), type=OwnershipTypeClass.DATAOWNER @@ -133,27 +142,26 @@ def helper_test_ownership_patch( ) with DataHubGraph(DataHubGraphConfig()) as graph: graph.emit_mcp(mcpw) - owner: OwnershipClass = graph.get_aspect( - entity_urn=test_entity_urn, aspect_type=OwnershipClass - ) + owner = graph.get_aspect(entity_urn=test_entity_urn, aspect_type=OwnershipClass) + assert owner is not None assert owner.owners[0].owner == make_user_urn("jdoe") - for patch_mcp in ( - patch_builder_class(test_entity_urn).add_owner(owner_to_add).build() - ): + patch_builder = patch_builder_class(test_entity_urn) + assert hasattr(patch_builder, "add_owner") + for patch_mcp in patch_builder.add_owner(owner_to_add).build(): graph.emit_mcp(patch_mcp) owner = graph.get_aspect(entity_urn=test_entity_urn, aspect_type=OwnershipClass) + assert owner is not None assert len(owner.owners) == 2 - for patch_mcp in ( - patch_builder_class(test_entity_urn) - .remove_owner(make_user_urn("gdoe")) - .build() - ): + patch_builder = patch_builder_class(test_entity_urn) + assert hasattr(patch_builder, "remove_owner") + for patch_mcp in patch_builder.remove_owner(make_user_urn("gdoe")).build(): graph.emit_mcp(patch_mcp) owner = graph.get_aspect(entity_urn=test_entity_urn, aspect_type=OwnershipClass) + assert owner is not None assert len(owner.owners) == 1 assert owner.owners[0].owner == make_user_urn("jdoe") @@ -172,6 +180,7 @@ def get_custom_properties( aspect_type=custom_properties_aspect_class, ) assert custom_properties_aspect + assert hasattr(custom_properties_aspect, "customProperties") return custom_properties_aspect.customProperties base_property_map = {"base_property": "base_property_value"} @@ -195,6 +204,7 @@ def get_custom_properties( } entity_patch_builder = patch_builder_class(test_entity_urn) + assert hasattr(entity_patch_builder, "add_custom_property") for k, v in new_properties.items(): entity_patch_builder.add_custom_property(k, v) @@ -212,11 +222,9 @@ def get_custom_properties( assert custom_properties[k] == v # Remove property - for patch_mcp in ( - patch_builder_class(test_entity_urn) - .remove_custom_property("test_property") - .build() - ): + patch_builder = patch_builder_class(test_entity_urn) + assert hasattr(patch_builder, "remove_custom_property") + for patch_mcp in patch_builder.remove_custom_property("test_property").build(): graph.emit_mcp(patch_mcp) custom_properties = get_custom_properties(graph, test_entity_urn) @@ -230,11 +238,9 @@ def get_custom_properties( assert custom_properties[k] == v # Replace custom properties - for patch_mcp in ( - patch_builder_class(test_entity_urn) - .set_custom_properties(new_properties) - .build() - ): + patch_builder = patch_builder_class(test_entity_urn) + assert hasattr(patch_builder, "set_custom_properties") + for patch_mcp in patch_builder.set_custom_properties(new_properties).build(): graph.emit_mcp(patch_mcp) custom_properties = get_custom_properties(graph, test_entity_urn) diff --git a/smoke-test/tests/patch/test_datajob_patches.py b/smoke-test/tests/patch/test_datajob_patches.py index 342d5d683228a..ce63d4571d6c8 100644 --- a/smoke-test/tests/patch/test_datajob_patches.py +++ b/smoke-test/tests/patch/test_datajob_patches.py @@ -3,14 +3,19 @@ from datahub.emitter.mce_builder import make_data_job_urn, make_dataset_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig -from datahub.metadata.schema_classes import (DataJobInfoClass, - DataJobInputOutputClass, - EdgeClass) +from datahub.metadata.schema_classes import ( + DataJobInfoClass, + DataJobInputOutputClass, + EdgeClass, +) from datahub.specific.datajob import DataJobPatchBuilder from tests.patch.common_patch_tests import ( - helper_test_custom_properties_patch, helper_test_dataset_tags_patch, - helper_test_entity_terms_patch, helper_test_ownership_patch) + helper_test_custom_properties_patch, + helper_test_dataset_tags_patch, + helper_test_entity_terms_patch, + helper_test_ownership_patch, +) def _make_test_datajob_urn( @@ -76,10 +81,12 @@ def test_datajob_inputoutput_dataset_patch(wait_for_healthchecks): with DataHubGraph(DataHubGraphConfig()) as graph: graph.emit_mcp(mcpw) - inputoutput_lineage_read: DataJobInputOutputClass = graph.get_aspect( + inputoutput_lineage_read = graph.get_aspect( entity_urn=datajob_urn, aspect_type=DataJobInputOutputClass, ) + assert inputoutput_lineage_read is not None + assert inputoutput_lineage_read.inputDatasetEdges is not None assert ( inputoutput_lineage_read.inputDatasetEdges[0].destinationUrn == other_dataset_urn @@ -97,6 +104,8 @@ def test_datajob_inputoutput_dataset_patch(wait_for_healthchecks): entity_urn=datajob_urn, aspect_type=DataJobInputOutputClass, ) + assert inputoutput_lineage_read is not None + assert inputoutput_lineage_read.inputDatasetEdges is not None assert len(inputoutput_lineage_read.inputDatasetEdges) == 2 assert ( inputoutput_lineage_read.inputDatasetEdges[0].destinationUrn @@ -119,6 +128,8 @@ def test_datajob_inputoutput_dataset_patch(wait_for_healthchecks): entity_urn=datajob_urn, aspect_type=DataJobInputOutputClass, ) + assert inputoutput_lineage_read is not None + assert inputoutput_lineage_read.inputDatasetEdges is not None assert len(inputoutput_lineage_read.inputDatasetEdges) == 1 assert ( inputoutput_lineage_read.inputDatasetEdges[0].destinationUrn diff --git a/smoke-test/tests/patch/test_dataset_patches.py b/smoke-test/tests/patch/test_dataset_patches.py index 6704d19760fb9..ec6b4a91fa6be 100644 --- a/smoke-test/tests/patch/test_dataset_patches.py +++ b/smoke-test/tests/patch/test_dataset_patches.py @@ -1,29 +1,27 @@ -import time import uuid from typing import Dict, Optional -from datahub.emitter.mce_builder import (make_dataset_urn, make_tag_urn, - make_term_urn, make_user_urn) +from datahub.emitter.mce_builder import make_dataset_urn, make_tag_urn, make_term_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig -from datahub.metadata.schema_classes import (AuditStampClass, - DatasetLineageTypeClass, - DatasetPropertiesClass, - EditableSchemaFieldInfoClass, - EditableSchemaMetadataClass, - GlobalTagsClass, - GlossaryTermAssociationClass, - GlossaryTermsClass, OwnerClass, - OwnershipClass, - OwnershipTypeClass, - TagAssociationClass, - UpstreamClass, - UpstreamLineageClass) +from datahub.metadata.schema_classes import ( + DatasetLineageTypeClass, + DatasetPropertiesClass, + EditableSchemaFieldInfoClass, + EditableSchemaMetadataClass, + GlossaryTermAssociationClass, + TagAssociationClass, + UpstreamClass, + UpstreamLineageClass, +) from datahub.specific.dataset import DatasetPatchBuilder from tests.patch.common_patch_tests import ( - helper_test_custom_properties_patch, helper_test_dataset_tags_patch, - helper_test_entity_terms_patch, helper_test_ownership_patch) + helper_test_custom_properties_patch, + helper_test_dataset_tags_patch, + helper_test_entity_terms_patch, + helper_test_ownership_patch, +) # Common Aspect Patch Tests @@ -135,7 +133,6 @@ def get_field_info( def test_field_terms_patch(wait_for_healthchecks): - dataset_urn = make_dataset_urn( platform="hive", name=f"SampleHiveDataset-{uuid.uuid4()}", env="PROD" ) @@ -174,6 +171,7 @@ def test_field_terms_patch(wait_for_healthchecks): assert field_info assert field_info.description == "This is a test field" + assert field_info.glossaryTerms is not None assert len(field_info.glossaryTerms.terms) == 1 assert field_info.glossaryTerms.terms[0].urn == new_term.urn @@ -191,11 +189,11 @@ def test_field_terms_patch(wait_for_healthchecks): assert field_info assert field_info.description == "This is a test field" + assert field_info.glossaryTerms is not None assert len(field_info.glossaryTerms.terms) == 0 def test_field_tags_patch(wait_for_healthchecks): - dataset_urn = make_dataset_urn( platform="hive", name=f"SampleHiveDataset-{uuid.uuid4()}", env="PROD" ) @@ -235,6 +233,7 @@ def test_field_tags_patch(wait_for_healthchecks): assert field_info assert field_info.description == "This is a test field" + assert field_info.globalTags is not None assert len(field_info.globalTags.tags) == 1 assert field_info.globalTags.tags[0].tag == new_tag.tag @@ -253,6 +252,7 @@ def test_field_tags_patch(wait_for_healthchecks): assert field_info assert field_info.description == "This is a test field" + assert field_info.globalTags is not None assert len(field_info.globalTags.tags) == 1 assert field_info.globalTags.tags[0].tag == new_tag.tag @@ -270,6 +270,7 @@ def test_field_tags_patch(wait_for_healthchecks): assert field_info assert field_info.description == "This is a test field" + assert field_info.globalTags is not None assert len(field_info.globalTags.tags) == 0 @@ -285,7 +286,6 @@ def get_custom_properties( def test_custom_properties_patch(wait_for_healthchecks): - dataset_urn = make_dataset_urn( platform="hive", name=f"SampleHiveDataset-{uuid.uuid4()}", env="PROD" ) diff --git a/smoke-test/tests/policies/test_policies.py b/smoke-test/tests/policies/test_policies.py index 67142181d2b96..186550482190c 100644 --- a/smoke-test/tests/policies/test_policies.py +++ b/smoke-test/tests/policies/test_policies.py @@ -1,8 +1,13 @@ import pytest import tenacity -from tests.utils import (get_frontend_session, get_frontend_url, get_root_urn, - get_sleep_info, wait_for_healthcheck_util) +from tests.utils import ( + get_frontend_session, + get_frontend_url, + get_root_urn, + get_sleep_info, + wait_for_healthcheck_util, +) TEST_POLICY_NAME = "Updated Platform Policy" @@ -90,7 +95,6 @@ def _ensure_policy_present(frontend_session, new_urn): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_frontend_policy_operations(frontend_session): - json = { "query": """mutation createPolicy($input: PolicyUpdateInput!) {\n createPolicy(input: $input) }""", diff --git a/smoke-test/tests/privileges/__init__.py b/smoke-test/tests/privileges/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index e1cb848cccf8e..a4c607dac89f2 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -1,14 +1,34 @@ import pytest import tenacity -from tests.utils import (get_frontend_session, wait_for_writes_to_sync, wait_for_healthcheck_util, - get_frontend_url, get_admin_credentials,get_sleep_info) -from tests.privileges.utils import * +from tests.privileges.utils import ( + assign_role, + assign_user_to_group, + create_group, + create_user, + create_user_policy, + remove_group, + remove_policy, + remove_user, + set_base_platform_privileges_policy_status, + set_view_dataset_sensitive_info_policy_status, + set_view_entity_profile_privileges_policy_status, +) +from tests.utils import ( + get_admin_credentials, + get_frontend_session, + get_frontend_url, + get_sleep_info, + login_as, + wait_for_healthcheck_util, + wait_for_writes_to_sync, +) pytestmark = pytest.mark.no_cypress_suite1 sleep_sec, sleep_times = get_sleep_info() + @pytest.fixture(scope="session") def wait_for_healthchecks(): wait_for_healthcheck_util() @@ -37,7 +57,7 @@ def privileges_and_test_user_setup(admin_session): # Sleep for eventual consistency wait_for_writes_to_sync() - # Create a new user + # Create a new user admin_session = create_user(admin_session, "user", "user") yield @@ -57,15 +77,16 @@ def privileges_and_test_user_setup(admin_session): @tenacity.retry( stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) ) -def _ensure_cant_perform_action(session, json,assertion_key): - action_response = session.post( - f"{get_frontend_url()}/api/v2/graphql", json=json) +def _ensure_cant_perform_action(session, json, assertion_key): + action_response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) action_response.raise_for_status() action_data = action_response.json() - assert action_data["errors"][0]["extensions"]["code"] == 403, action_data["errors"][0] + assert action_data["errors"][0]["extensions"]["code"] == 403, action_data["errors"][ + 0 + ] assert action_data["errors"][0]["extensions"]["type"] == "UNAUTHORIZED" - assert action_data["data"][assertion_key] == None + assert action_data["data"][assertion_key] is None @tenacity.retry( @@ -73,7 +94,8 @@ def _ensure_cant_perform_action(session, json,assertion_key): ) def _ensure_can_create_secret(session, json, urn): create_secret_success = session.post( - f"{get_frontend_url()}/api/v2/graphql", json=json) + f"{get_frontend_url()}/api/v2/graphql", json=json + ) create_secret_success.raise_for_status() secret_data = create_secret_success.json() @@ -81,14 +103,15 @@ def _ensure_can_create_secret(session, json, urn): assert secret_data["data"] assert secret_data["data"]["createSecret"] assert secret_data["data"]["createSecret"] == urn - + @tenacity.retry( stop=tenacity.stop_after_attempt(10), wait=tenacity.wait_fixed(sleep_sec) ) def _ensure_can_create_ingestion_source(session, json): create_ingestion_success = session.post( - f"{get_frontend_url()}/api/v2/graphql", json=json) + f"{get_frontend_url()}/api/v2/graphql", json=json + ) create_ingestion_success.raise_for_status() ingestion_data = create_ingestion_success.json() @@ -98,14 +121,15 @@ def _ensure_can_create_ingestion_source(session, json): assert ingestion_data["data"]["createIngestionSource"] is not None return ingestion_data["data"]["createIngestionSource"] - + @tenacity.retry( stop=tenacity.stop_after_attempt(10), wait=tenacity.wait_fixed(sleep_sec) ) def _ensure_can_create_access_token(session, json): create_access_token_success = session.post( - f"{get_frontend_url()}/api/v2/graphql", json=json) + f"{get_frontend_url()}/api/v2/graphql", json=json + ) create_access_token_success.raise_for_status() ingestion_data = create_access_token_success.json() @@ -128,50 +152,49 @@ def _ensure_can_create_user_policy(session, json): assert res_data["data"] assert res_data["data"]["createPolicy"] is not None - return res_data["data"]["createPolicy"] + return res_data["data"]["createPolicy"] @pytest.mark.dependency(depends=["test_healthchecks"]) def test_privilege_to_create_and_manage_secrets(): - (admin_user, admin_pass) = get_admin_credentials() admin_session = login_as(admin_user, admin_pass) user_session = login_as("user", "user") secret_urn = "urn:li:dataHubSecret:TestSecretName" # Verify new user can't create secrets - create_secret = { + create_secret = { "query": """mutation createSecret($input: CreateSecretInput!) {\n createSecret(input: $input)\n}""", "variables": { - "input":{ - "name":"TestSecretName", - "value":"Test Secret Value", - "description":"Test Secret Description" - } + "input": { + "name": "TestSecretName", + "value": "Test Secret Value", + "description": "Test Secret Description", + } }, } - _ensure_cant_perform_action(user_session, create_secret,"createSecret") - + _ensure_cant_perform_action(user_session, create_secret, "createSecret") # Assign privileges to the new user to manage secrets - policy_urn = create_user_policy("urn:li:corpuser:user", ["MANAGE_SECRETS"], admin_session) + policy_urn = create_user_policy( + "urn:li:corpuser:user", ["MANAGE_SECRETS"], admin_session + ) # Verify new user can create and manage secrets # Create a secret _ensure_can_create_secret(user_session, create_secret, secret_urn) - # Remove a secret - remove_secret = { + remove_secret = { "query": """mutation deleteSecret($urn: String!) {\n deleteSecret(urn: $urn)\n}""", - "variables": { - "urn": secret_urn - }, + "variables": {"urn": secret_urn}, } - remove_secret_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_secret) + remove_secret_response = user_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=remove_secret + ) remove_secret_response.raise_for_status() secret_data = remove_secret_response.json() @@ -180,28 +203,29 @@ def test_privilege_to_create_and_manage_secrets(): assert secret_data["data"]["deleteSecret"] assert secret_data["data"]["deleteSecret"] == secret_urn - # Remove the policy remove_policy(policy_urn, admin_session) # Ensure user can't create secret after policy is removed - _ensure_cant_perform_action(user_session, create_secret,"createSecret") + _ensure_cant_perform_action(user_session, create_secret, "createSecret") @pytest.mark.dependency(depends=["test_healthchecks"]) def test_privilege_to_create_and_manage_ingestion_source(): - (admin_user, admin_pass) = get_admin_credentials() admin_session = login_as(admin_user, admin_pass) user_session = login_as("user", "user") # Verify new user can't create ingestion source - create_ingestion_source = { + create_ingestion_source = { "query": """mutation createIngestionSource($input: UpdateIngestionSourceInput!) {\n createIngestionSource(input: $input)\n}""", - "variables": {"input":{"type":"snowflake","name":"test","config": - {"recipe": - """{\"source\":{\"type\":\"snowflake\",\"config\":{ + "variables": { + "input": { + "type": "snowflake", + "name": "test", + "config": { + "recipe": """{\"source\":{\"type\":\"snowflake\",\"config\":{ \"account_id\":null, \"include_table_lineage\":true, \"include_view_lineage\":true, @@ -209,25 +233,39 @@ def test_privilege_to_create_and_manage_ingestion_source(): \"include_views\":true, \"profiling\":{\"enabled\":true,\"profile_table_level_only\":true}, \"stateful_ingestion\":{\"enabled\":true}}}}""", - "executorId":"default","debugMode":False,"extraArgs":[]}}}, + "executorId": "default", + "debugMode": False, + "extraArgs": [], + }, + } + }, } - _ensure_cant_perform_action(user_session, create_ingestion_source, "createIngestionSource") + _ensure_cant_perform_action( + user_session, create_ingestion_source, "createIngestionSource" + ) + # Assign privileges to the new user to manage ingestion source + policy_urn = create_user_policy( + "urn:li:corpuser:user", ["MANAGE_INGESTION"], admin_session + ) - # Assign privileges to the new user to manage ingestion source - policy_urn = create_user_policy("urn:li:corpuser:user", ["MANAGE_INGESTION"], admin_session) - # Verify new user can create and manage ingestion source(edit, delete) - ingestion_source_urn = _ensure_can_create_ingestion_source(user_session, create_ingestion_source) + ingestion_source_urn = _ensure_can_create_ingestion_source( + user_session, create_ingestion_source + ) # Edit ingestion source - update_ingestion_source = { + update_ingestion_source = { "query": """mutation updateIngestionSource($urn: String!, $input: UpdateIngestionSourceInput!) {\n updateIngestionSource(urn: $urn, input: $input)\n}""", - "variables": {"urn":ingestion_source_urn, - "input":{"type":"snowflake","name":"test updated", - "config":{"recipe":"""{\"source\":{\"type\":\"snowflake\",\"config\":{ + "variables": { + "urn": ingestion_source_urn, + "input": { + "type": "snowflake", + "name": "test updated", + "config": { + "recipe": """{\"source\":{\"type\":\"snowflake\",\"config\":{ \"account_id\":null, \"include_table_lineage\":true, \"include_view_lineage\":true, @@ -235,11 +273,17 @@ def test_privilege_to_create_and_manage_ingestion_source(): \"include_views\":true, \"profiling\":{\"enabled\":true,\"profile_table_level_only\":true}, \"stateful_ingestion\":{\"enabled\":true}}}}""", - "executorId":"default","debugMode":False,"extraArgs":[]}}} + "executorId": "default", + "debugMode": False, + "extraArgs": [], + }, + }, + }, } update_ingestion_success = user_session.post( - f"{get_frontend_url()}/api/v2/graphql", json=update_ingestion_source) + f"{get_frontend_url()}/api/v2/graphql", json=update_ingestion_source + ) update_ingestion_success.raise_for_status() ingestion_data = update_ingestion_success.json() @@ -248,17 +292,16 @@ def test_privilege_to_create_and_manage_ingestion_source(): assert ingestion_data["data"]["updateIngestionSource"] assert ingestion_data["data"]["updateIngestionSource"] == ingestion_source_urn - # Delete ingestion source - remove_ingestion_source = { + remove_ingestion_source = { "query": """mutation deleteIngestionSource($urn: String!) {\n deleteIngestionSource(urn: $urn)\n}""", - "variables": { - "urn": ingestion_source_urn - }, + "variables": {"urn": ingestion_source_urn}, } - remove_ingestion_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_ingestion_source) + remove_ingestion_response = user_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=remove_ingestion_source + ) remove_ingestion_response.raise_for_status() ingestion_data = remove_ingestion_response.json() @@ -271,75 +314,81 @@ def test_privilege_to_create_and_manage_ingestion_source(): remove_policy(policy_urn, admin_session) # Ensure that user can't create ingestion source after policy is removed - _ensure_cant_perform_action(user_session, create_ingestion_source, "createIngestionSource") + _ensure_cant_perform_action( + user_session, create_ingestion_source, "createIngestionSource" + ) +@pytest.mark.skip(reason="Functionality and test needs to be validated for correctness") @pytest.mark.dependency(depends=["test_healthchecks"]) def test_privilege_to_create_and_manage_access_tokens(): - (admin_user, admin_pass) = get_admin_credentials() admin_session = login_as(admin_user, admin_pass) user_session = login_as("user", "user") - # Verify new user can't create access token - create_access_token = { + create_access_token = { "query": """mutation createAccessToken($input: CreateAccessTokenInput!) {\n createAccessToken(input: $input) {\n accessToken\n __typename\n }\n}\n""", - "variables": {"input":{"actorUrn":"urn:li:corpuser:user", - "type":"PERSONAL", - "duration":"ONE_MONTH", - "name":"test", - "description":"test"}} + "variables": { + "input": { + "actorUrn": "urn:li:corpuser:user", + "type": "PERSONAL", + "duration": "ONE_MONTH", + "name": "test", + "description": "test", + } + }, } - _ensure_cant_perform_action(user_session, create_access_token,"createAccessToken") - + _ensure_cant_perform_action(user_session, create_access_token, "createAccessToken") # Assign privileges to the new user to create and manage access tokens - policy_urn = create_user_policy("urn:li:corpuser:user", ["MANAGE_ACCESS_TOKENS"], admin_session) - + policy_urn = create_user_policy( + "urn:li:corpuser:user", ["MANAGE_ACCESS_TOKENS"], admin_session + ) # Verify new user can create and manage access token(create, revoke) # Create a access token _ensure_can_create_access_token(user_session, create_access_token) - # List access tokens first to get token id - list_access_tokens = { + list_access_tokens = { "query": """query listAccessTokens($input: ListAccessTokenInput!) {\n listAccessTokens(input: $input) {\n start\n count\n total\n tokens {\n urn\n type\n id\n name\n description\n actorUrn\n ownerUrn\n createdAt\n expiresAt\n __typename\n }\n __typename\n }\n}\n""", "variables": { - "input":{ - "start":0,"count":10,"filters":[{ - "field":"ownerUrn", - "values":["urn:li:corpuser:user"]}]} - } + "input": { + "start": 0, + "count": 10, + "filters": [{"field": "ownerUrn", "values": ["urn:li:corpuser:user"]}], + } + }, } - list_tokens_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=list_access_tokens) + list_tokens_response = user_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=list_access_tokens + ) list_tokens_response.raise_for_status() list_tokens_data = list_tokens_response.json() assert list_tokens_data assert list_tokens_data["data"] assert list_tokens_data["data"]["listAccessTokens"]["tokens"][0]["id"] is not None - - access_token_id = list_tokens_data["data"]["listAccessTokens"]["tokens"][0]["id"] + access_token_id = list_tokens_data["data"]["listAccessTokens"]["tokens"][0]["id"] # Revoke access token - revoke_access_token = { + revoke_access_token = { "query": "mutation revokeAccessToken($tokenId: String!) {\n revokeAccessToken(tokenId: $tokenId)\n}\n", - "variables": { - "tokenId": access_token_id - }, + "variables": {"tokenId": access_token_id}, } - revoke_token_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=revoke_access_token) + revoke_token_response = user_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=revoke_access_token + ) revoke_token_response.raise_for_status() revoke_token_data = revoke_token_response.json() @@ -348,22 +397,18 @@ def test_privilege_to_create_and_manage_access_tokens(): assert revoke_token_data["data"]["revokeAccessToken"] assert revoke_token_data["data"]["revokeAccessToken"] is True - # Remove the policy remove_policy(policy_urn, admin_session) - # Ensure that user can't create access token after policy is removed - _ensure_cant_perform_action(user_session, create_access_token,"createAccessToken") + _ensure_cant_perform_action(user_session, create_access_token, "createAccessToken") @pytest.mark.dependency(depends=["test_healthchecks"]) def test_privilege_to_create_and_manage_policies(): - (admin_user, admin_pass) = get_admin_credentials() admin_session = login_as(admin_user, admin_pass) user_session = login_as("user", "user") - # Verify new user can't create a policy create_policy = { @@ -376,7 +421,7 @@ def test_privilege_to_create_and_manage_policies(): "name": "Policy Name", "description": "Policy Description", "state": "ACTIVE", - "resources": {"filter":{"criteria":[]}}, + "resources": {"filter": {"criteria": []}}, "privileges": ["MANAGE_POLICIES"], "actors": { "users": [], @@ -388,19 +433,19 @@ def test_privilege_to_create_and_manage_policies(): }, } - _ensure_cant_perform_action(user_session, create_policy,"createPolicy") - + _ensure_cant_perform_action(user_session, create_policy, "createPolicy") # Assign privileges to the new user to create and manage policies - admin_policy_urn = create_user_policy("urn:li:corpuser:user", ["MANAGE_POLICIES"], admin_session) - + admin_policy_urn = create_user_policy( + "urn:li:corpuser:user", ["MANAGE_POLICIES"], admin_session + ) # Verify new user can create and manage policy(create, edit, delete) # Create a policy user_policy_urn = _ensure_can_create_user_policy(user_session, create_policy) # Edit a policy - edit_policy = { + edit_policy = { "query": """mutation updatePolicy($urn: String!, $input: PolicyUpdateInput!) {\n updatePolicy(urn: $urn, input: $input) }""", "variables": { @@ -422,7 +467,9 @@ def test_privilege_to_create_and_manage_policies(): }, }, } - edit_policy_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=edit_policy) + edit_policy_response = user_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=edit_policy + ) edit_policy_response.raise_for_status() res_data = edit_policy_response.json() @@ -431,12 +478,14 @@ def test_privilege_to_create_and_manage_policies(): assert res_data["data"]["updatePolicy"] == user_policy_urn # Delete a policy - remove_user_policy = { + remove_user_policy = { "query": "mutation deletePolicy($urn: String!) {\n deletePolicy(urn: $urn)\n}\n", - "variables":{"urn":user_policy_urn} + "variables": {"urn": user_policy_urn}, } - remove_policy_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_user_policy) + remove_policy_response = user_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=remove_user_policy + ) remove_policy_response.raise_for_status() res_data = remove_policy_response.json() @@ -444,18 +493,16 @@ def test_privilege_to_create_and_manage_policies(): assert res_data["data"] assert res_data["data"]["deletePolicy"] == user_policy_urn - # Remove the user privilege by admin remove_policy(admin_policy_urn, admin_session) - # Ensure that user can't create a policy after privilege is removed by admin - _ensure_cant_perform_action(user_session, create_policy,"createPolicy") + _ensure_cant_perform_action(user_session, create_policy, "createPolicy") +@pytest.mark.skip(reason="Functionality and test needs to be validated for correctness") @pytest.mark.dependency(depends=["test_healthchecks"]) def test_privilege_from_group_role_can_create_and_manage_secret(): - (admin_user, admin_pass) = get_admin_credentials() admin_session = login_as(admin_user, admin_pass) user_session = login_as("user", "user") @@ -466,20 +513,20 @@ def test_privilege_from_group_role_can_create_and_manage_secret(): "query": """mutation createSecret($input: CreateSecretInput!) {\n createSecret(input: $input)\n}""", "variables": { - "input":{ - "name":"TestSecretName", - "value":"Test Secret Value", - "description":"Test Secret Description" + "input": { + "name": "TestSecretName", + "value": "Test Secret Value", + "description": "Test Secret Description", } }, } - _ensure_cant_perform_action(user_session, create_secret,"createSecret") + _ensure_cant_perform_action(user_session, create_secret, "createSecret") # Create group and grant it the admin role. group_urn = create_group(admin_session, "Test Group") # Assign admin role to group - assign_role(admin_session,"urn:li:dataHubRole:Admin", [group_urn]) + assign_role(admin_session, "urn:li:dataHubRole:Admin", [group_urn]) # Assign user to group assign_user_to_group(admin_session, group_urn, ["urn:li:corpuser:user"]) @@ -492,12 +539,12 @@ def test_privilege_from_group_role_can_create_and_manage_secret(): remove_secret = { "query": """mutation deleteSecret($urn: String!) {\n deleteSecret(urn: $urn)\n}""", - "variables": { - "urn": secret_urn - }, + "variables": {"urn": secret_urn}, } - remove_secret_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_secret) + remove_secret_response = user_session.post( + f"{get_frontend_url()}/api/v2/graphql", json=remove_secret + ) remove_secret_response.raise_for_status() secret_data = remove_secret_response.json() @@ -510,4 +557,4 @@ def test_privilege_from_group_role_can_create_and_manage_secret(): remove_group(admin_session, group_urn) # Ensure user can't create secret after policy is removed - _ensure_cant_perform_action(user_session, create_secret,"createSecret") + _ensure_cant_perform_action(user_session, create_secret, "createSecret") diff --git a/smoke-test/tests/privileges/utils.py b/smoke-test/tests/privileges/utils.py index eeb385a243a90..1e58ec4085b70 100644 --- a/smoke-test/tests/privileges/utils.py +++ b/smoke-test/tests/privileges/utils.py @@ -1,10 +1,9 @@ -import requests_wrapper as requests from tests.consistency_utils import wait_for_writes_to_sync -from tests.utils import (get_frontend_url, wait_for_writes_to_sync, get_admin_credentials) +from tests.utils import get_admin_credentials, get_frontend_url, login_as def set_base_platform_privileges_policy_status(status, session): - base_platform_privileges = { + base_platform_privileges = { "query": """mutation updatePolicy($urn: String!, $input: PolicyUpdateInput!) {\n updatePolicy(urn: $urn, input: $input) }""", "variables": { @@ -14,18 +13,20 @@ def set_base_platform_privileges_policy_status(status, session): "state": status, "name": "All Users - Base Platform Privileges", "description": "Grants base platform privileges to ALL users of DataHub. Change this policy to alter that behavior.", - "privileges": ["MANAGE_INGESTION", - "MANAGE_SECRETS", - "MANAGE_USERS_AND_GROUPS", - "VIEW_ANALYTICS", - "GENERATE_PERSONAL_ACCESS_TOKENS", - "MANAGE_DOMAINS", - "MANAGE_GLOBAL_ANNOUNCEMENTS", - "MANAGE_TESTS", - "MANAGE_GLOSSARIES", - "MANAGE_TAGS", - "MANAGE_GLOBAL_VIEWS", - "MANAGE_GLOBAL_OWNERSHIP_TYPES"], + "privileges": [ + "MANAGE_INGESTION", + "MANAGE_SECRETS", + "MANAGE_USERS_AND_GROUPS", + "VIEW_ANALYTICS", + "GENERATE_PERSONAL_ACCESS_TOKENS", + "MANAGE_DOMAINS", + "MANAGE_GLOBAL_ANNOUNCEMENTS", + "MANAGE_TESTS", + "MANAGE_GLOSSARIES", + "MANAGE_TAGS", + "MANAGE_GLOBAL_VIEWS", + "MANAGE_GLOBAL_OWNERSHIP_TYPES", + ], "actors": { "users": [], "groups": None, @@ -38,13 +39,15 @@ def set_base_platform_privileges_policy_status(status, session): }, } base_privileges_response = session.post( - f"{get_frontend_url()}/api/v2/graphql", json=base_platform_privileges) + f"{get_frontend_url()}/api/v2/graphql", json=base_platform_privileges + ) base_privileges_response.raise_for_status() base_res_data = base_privileges_response.json() assert base_res_data["data"]["updatePolicy"] == "urn:li:dataHubPolicy:7" + def set_view_dataset_sensitive_info_policy_status(status, session): - dataset_sensitive_information = { + dataset_sensitive_information = { "query": """mutation updatePolicy($urn: String!, $input: PolicyUpdateInput!) {\n updatePolicy(urn: $urn, input: $input) }""", "variables": { @@ -54,7 +57,7 @@ def set_view_dataset_sensitive_info_policy_status(status, session): "state": status, "name": "All Users - View Dataset Sensitive Information", "description": "Grants viewing privileges of usage and profile information of all datasets for all users", - "privileges": ["VIEW_DATASET_USAGE","VIEW_DATASET_PROFILE"], + "privileges": ["VIEW_DATASET_USAGE", "VIEW_DATASET_PROFILE"], "actors": { "users": [], "groups": None, @@ -67,13 +70,18 @@ def set_view_dataset_sensitive_info_policy_status(status, session): }, } sensitive_info_response = session.post( - f"{get_frontend_url()}/api/v2/graphql", json=dataset_sensitive_information) + f"{get_frontend_url()}/api/v2/graphql", json=dataset_sensitive_information + ) sensitive_info_response.raise_for_status() sens_info_data = sensitive_info_response.json() - assert sens_info_data["data"]["updatePolicy"] == "urn:li:dataHubPolicy:view-dataset-sensitive" + assert ( + sens_info_data["data"]["updatePolicy"] + == "urn:li:dataHubPolicy:view-dataset-sensitive" + ) + def set_view_entity_profile_privileges_policy_status(status, session): - view_entity_page = { + view_entity_page = { "query": """mutation updatePolicy($urn: String!, $input: PolicyUpdateInput!) {\n updatePolicy(urn: $urn, input: $input) }""", "variables": { @@ -83,12 +91,14 @@ def set_view_entity_profile_privileges_policy_status(status, session): "state": status, "name": "All Users - View Entity Page", "description": "Grants entity view to all users", - "privileges": ["VIEW_ENTITY_PAGE", - "SEARCH_PRIVILEGE", - "GET_COUNTS_PRIVILEGE", - "GET_TIMESERIES_ASPECT_PRIVILEGE", - "GET_ENTITY_PRIVILEGE", - "GET_TIMELINE_PRIVILEGE"], + "privileges": [ + "VIEW_ENTITY_PAGE", + "SEARCH_PRIVILEGE", + "GET_COUNTS_PRIVILEGE", + "GET_TIMESERIES_ASPECT_PRIVILEGE", + "GET_ENTITY_PRIVILEGE", + "GET_TIMELINE_PRIVILEGE", + ], "actors": { "users": [], "groups": None, @@ -101,10 +111,15 @@ def set_view_entity_profile_privileges_policy_status(status, session): }, } view_entity_response = session.post( - f"{get_frontend_url()}/api/v2/graphql", json=view_entity_page) + f"{get_frontend_url()}/api/v2/graphql", json=view_entity_page + ) view_entity_response.raise_for_status() view_entity_data = view_entity_response.json() - assert view_entity_data["data"]["updatePolicy"] == "urn:li:dataHubPolicy:view-entity-page-all" + assert ( + view_entity_data["data"]["updatePolicy"] + == "urn:li:dataHubPolicy:view-entity-page-all" + ) + def create_user(session, email, password): # Remove user if exists @@ -136,9 +151,7 @@ def create_user(session, email, password): "title": "Data Engineer", "inviteToken": invite_token, } - sign_up_response = session.post( - f"{get_frontend_url()}/signUp", json=sign_up_json - ) + sign_up_response = session.post(f"{get_frontend_url()}/signUp", json=sign_up_json) sign_up_response.raise_for_status() assert sign_up_response assert "error" not in sign_up_response @@ -149,16 +162,6 @@ def create_user(session, email, password): return admin_session -def login_as(username, password): - session = requests.Session() - headers = { - "Content-Type": "application/json", - } - data = '{"username":"' + username + '", "password":"' + password + '"}' - response = session.post(f"{get_frontend_url()}/logIn", headers=headers, data=data) - response.raise_for_status() - return session - def remove_user(session, urn): json = { "query": """mutation removeUser($urn: String!) {\n @@ -170,6 +173,7 @@ def remove_user(session, urn): response.raise_for_status() return response.json() + def create_group(session, name): json = { "query": """mutation createGroup($input: CreateGroupInput!) {\n @@ -185,6 +189,7 @@ def create_group(session, name): assert res_data["data"]["createGroup"] return res_data["data"]["createGroup"] + def remove_group(session, urn): json = { "query": """mutation removeGroup($urn: String!) {\n @@ -200,6 +205,7 @@ def remove_group(session, urn): assert res_data["data"]["removeGroup"] return res_data["data"]["removeGroup"] + def assign_user_to_group(session, group_urn, user_urns): json = { "query": """mutation addGroupMembers($groupUrn: String!, $userUrns: [String!]!) {\n @@ -215,6 +221,7 @@ def assign_user_to_group(session, group_urn, user_urns): assert res_data["data"]["addGroupMembers"] return res_data["data"]["addGroupMembers"] + def assign_role(session, role_urn, actor_urns): json = { "query": """mutation batchAssignRole($input: BatchAssignRoleInput!) {\n @@ -231,6 +238,7 @@ def assign_role(session, role_urn, actor_urns): assert res_data["data"]["batchAssignRole"] return res_data["data"]["batchAssignRole"] + def create_user_policy(user_urn, privileges, session): policy = { "query": """mutation createPolicy($input: PolicyUpdateInput!) {\n @@ -241,7 +249,7 @@ def create_user_policy(user_urn, privileges, session): "name": "Policy Name", "description": "Policy Description", "state": "ACTIVE", - "resources": {"filter":{"criteria":[]}}, + "resources": {"filter": {"criteria": []}}, "privileges": privileges, "actors": { "users": [user_urn], @@ -262,6 +270,7 @@ def create_user_policy(user_urn, privileges, session): assert res_data["data"]["createPolicy"] return res_data["data"]["createPolicy"] + def remove_policy(urn, session): remove_policy_json = { "query": """mutation deletePolicy($urn: String!) {\n @@ -269,11 +278,13 @@ def remove_policy(urn, session): "variables": {"urn": urn}, } - response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_policy_json) + response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=remove_policy_json + ) response.raise_for_status() res_data = response.json() assert res_data assert res_data["data"] assert res_data["data"]["deletePolicy"] - assert res_data["data"]["deletePolicy"] == urn \ No newline at end of file + assert res_data["data"]["deletePolicy"] == urn diff --git a/smoke-test/tests/read_only/test_services_up.py b/smoke-test/tests/read_only/test_services_up.py index 792a5063d3f8b..4e00f910ceb73 100644 --- a/smoke-test/tests/read_only/test_services_up.py +++ b/smoke-test/tests/read_only/test_services_up.py @@ -1,8 +1,8 @@ import os +import re import pytest import requests -import re from tests.utils import get_gms_url, wait_for_healthcheck_util @@ -14,9 +14,11 @@ def test_services_up(): wait_for_healthcheck_util() + def looks_like_a_short_sha(sha: str) -> bool: return len(sha) == 7 and re.match(r"[0-9a-f]{7}", sha) is not None + @pytest.mark.read_only def test_gms_config_accessible() -> None: gms_config = requests.get(f"{get_gms_url()}/config").json() @@ -33,4 +35,6 @@ def test_gms_config_accessible() -> None: default_cli_version: str = gms_config["managedIngestion"]["defaultCliVersion"] print(f"Default CLI version: {default_cli_version}") assert not default_cli_version.startswith("@") - assert "." in default_cli_version or looks_like_a_short_sha(default_cli_version), "Default CLI version does not look like a version string" + assert "." in default_cli_version or looks_like_a_short_sha( + default_cli_version + ), "Default CLI version does not look like a version string" diff --git a/smoke-test/tests/setup/lineage/ingest_data_job_change.py b/smoke-test/tests/setup/lineage/ingest_data_job_change.py index 588a1625419bc..2746baf89600e 100644 --- a/smoke-test/tests/setup/lineage/ingest_data_job_change.py +++ b/smoke-test/tests/setup/lineage/ingest_data_job_change.py @@ -1,20 +1,31 @@ from typing import List -from datahub.emitter.mce_builder import (make_data_flow_urn, - make_data_job_urn_with_flow, - make_dataset_urn) +from datahub.emitter.mce_builder import ( + make_data_flow_urn, + make_data_job_urn_with_flow, + make_dataset_urn, +) from datahub.emitter.rest_emitter import DatahubRestEmitter -from datahub.metadata.schema_classes import (DateTypeClass, NumberTypeClass, - SchemaFieldDataTypeClass, - StringTypeClass) +from datahub.metadata.schema_classes import ( + DateTypeClass, + NumberTypeClass, + SchemaFieldDataTypeClass, + StringTypeClass, +) -from tests.setup.lineage.constants import (AIRFLOW_DATA_PLATFORM, - SNOWFLAKE_DATA_PLATFORM, - TIMESTAMP_MILLIS_EIGHT_DAYS_AGO, - TIMESTAMP_MILLIS_ONE_DAY_AGO) +from tests.setup.lineage.constants import ( + AIRFLOW_DATA_PLATFORM, + SNOWFLAKE_DATA_PLATFORM, + TIMESTAMP_MILLIS_EIGHT_DAYS_AGO, + TIMESTAMP_MILLIS_ONE_DAY_AGO, +) from tests.setup.lineage.helper_classes import Dataset, Field, Pipeline, Task -from tests.setup.lineage.utils import (create_edge, create_node, - create_nodes_and_edges, emit_mcps) +from tests.setup.lineage.utils import ( + create_edge, + create_node, + create_nodes_and_edges, + emit_mcps, +) # Constants for Case 2 DAILY_TEMPERATURE_DATASET_ID = "climate.daily_temperature" diff --git a/smoke-test/tests/setup/lineage/ingest_dataset_join_change.py b/smoke-test/tests/setup/lineage/ingest_dataset_join_change.py index bb9f51b6b5e9b..4a8da1fcf0588 100644 --- a/smoke-test/tests/setup/lineage/ingest_dataset_join_change.py +++ b/smoke-test/tests/setup/lineage/ingest_dataset_join_change.py @@ -2,17 +2,26 @@ from datahub.emitter.mce_builder import make_dataset_urn from datahub.emitter.rest_emitter import DatahubRestEmitter -from datahub.metadata.schema_classes import (NumberTypeClass, - SchemaFieldDataTypeClass, - StringTypeClass, UpstreamClass) +from datahub.metadata.schema_classes import ( + NumberTypeClass, + SchemaFieldDataTypeClass, + StringTypeClass, + UpstreamClass, +) -from tests.setup.lineage.constants import (DATASET_ENTITY_TYPE, - SNOWFLAKE_DATA_PLATFORM, - TIMESTAMP_MILLIS_EIGHT_DAYS_AGO, - TIMESTAMP_MILLIS_ONE_DAY_AGO) +from tests.setup.lineage.constants import ( + DATASET_ENTITY_TYPE, + SNOWFLAKE_DATA_PLATFORM, + TIMESTAMP_MILLIS_EIGHT_DAYS_AGO, + TIMESTAMP_MILLIS_ONE_DAY_AGO, +) from tests.setup.lineage.helper_classes import Dataset, Field -from tests.setup.lineage.utils import (create_node, create_upstream_edge, - create_upstream_mcp, emit_mcps) +from tests.setup.lineage.utils import ( + create_node, + create_upstream_edge, + create_upstream_mcp, + emit_mcps, +) # Constants for Case 3 GDP_DATASET_ID = "economic_data.gdp" diff --git a/smoke-test/tests/setup/lineage/ingest_input_datasets_change.py b/smoke-test/tests/setup/lineage/ingest_input_datasets_change.py index 6079d7a3d2b63..143c65c082656 100644 --- a/smoke-test/tests/setup/lineage/ingest_input_datasets_change.py +++ b/smoke-test/tests/setup/lineage/ingest_input_datasets_change.py @@ -1,20 +1,30 @@ from typing import List -from datahub.emitter.mce_builder import (make_data_flow_urn, - make_data_job_urn_with_flow, - make_dataset_urn) +from datahub.emitter.mce_builder import ( + make_data_flow_urn, + make_data_job_urn_with_flow, + make_dataset_urn, +) from datahub.emitter.rest_emitter import DatahubRestEmitter -from datahub.metadata.schema_classes import (NumberTypeClass, - SchemaFieldDataTypeClass, - StringTypeClass) +from datahub.metadata.schema_classes import ( + NumberTypeClass, + SchemaFieldDataTypeClass, + StringTypeClass, +) -from tests.setup.lineage.constants import (AIRFLOW_DATA_PLATFORM, - BQ_DATA_PLATFORM, - TIMESTAMP_MILLIS_EIGHT_DAYS_AGO, - TIMESTAMP_MILLIS_ONE_DAY_AGO) +from tests.setup.lineage.constants import ( + AIRFLOW_DATA_PLATFORM, + BQ_DATA_PLATFORM, + TIMESTAMP_MILLIS_EIGHT_DAYS_AGO, + TIMESTAMP_MILLIS_ONE_DAY_AGO, +) from tests.setup.lineage.helper_classes import Dataset, Field, Pipeline, Task -from tests.setup.lineage.utils import (create_edge, create_node, - create_nodes_and_edges, emit_mcps) +from tests.setup.lineage.utils import ( + create_edge, + create_node, + create_nodes_and_edges, + emit_mcps, +) # Constants for Case 1 TRANSACTIONS_DATASET_ID = "transactions.transactions" diff --git a/smoke-test/tests/setup/lineage/ingest_time_lineage.py b/smoke-test/tests/setup/lineage/ingest_time_lineage.py index 3aec979707290..116e6cd63dd9f 100644 --- a/smoke-test/tests/setup/lineage/ingest_time_lineage.py +++ b/smoke-test/tests/setup/lineage/ingest_time_lineage.py @@ -4,11 +4,17 @@ from datahub.emitter.rest_emitter import DatahubRestEmitter from tests.setup.lineage.ingest_data_job_change import ( - get_data_job_change_urns, ingest_data_job_change) + get_data_job_change_urns, + ingest_data_job_change, +) from tests.setup.lineage.ingest_dataset_join_change import ( - get_dataset_join_change_urns, ingest_dataset_join_change) + get_dataset_join_change_urns, + ingest_dataset_join_change, +) from tests.setup.lineage.ingest_input_datasets_change import ( - get_input_datasets_change_urns, ingest_input_datasets_change) + get_input_datasets_change_urns, + ingest_input_datasets_change, +) SERVER = os.getenv("DATAHUB_SERVER") or "http://localhost:8080" TOKEN = os.getenv("DATAHUB_TOKEN") or "" diff --git a/smoke-test/tests/setup/lineage/utils.py b/smoke-test/tests/setup/lineage/utils.py index c72f6ccb89b7a..d4c16ed3b7a21 100644 --- a/smoke-test/tests/setup/lineage/utils.py +++ b/smoke-test/tests/setup/lineage/utils.py @@ -1,29 +1,38 @@ -import datetime from typing import List -from datahub.emitter.mce_builder import (make_data_flow_urn, - make_data_job_urn_with_flow, - make_data_platform_urn, - make_dataset_urn) +from datahub.emitter.mce_builder import ( + make_data_flow_urn, + make_data_job_urn_with_flow, + make_data_platform_urn, + make_dataset_urn, +) from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.emitter.rest_emitter import DatahubRestEmitter from datahub.metadata.com.linkedin.pegasus2avro.dataset import UpstreamLineage -from datahub.metadata.schema_classes import (AuditStampClass, ChangeTypeClass, - DataFlowInfoClass, - DataJobInfoClass, - DataJobInputOutputClass, - DatasetLineageTypeClass, - DatasetPropertiesClass, EdgeClass, - MySqlDDLClass, SchemaFieldClass, - SchemaMetadataClass, - UpstreamClass) - -from tests.setup.lineage.constants import (DATA_FLOW_ENTITY_TYPE, - DATA_FLOW_INFO_ASPECT_NAME, - DATA_JOB_ENTITY_TYPE, - DATA_JOB_INFO_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - DATASET_ENTITY_TYPE) +from datahub.metadata.com.linkedin.pegasus2avro.mxe import SystemMetadata +from datahub.metadata.schema_classes import ( + AuditStampClass, + ChangeTypeClass, + DataFlowInfoClass, + DataJobInfoClass, + DataJobInputOutputClass, + DatasetLineageTypeClass, + DatasetPropertiesClass, + EdgeClass, + MySqlDDLClass, + SchemaFieldClass, + SchemaMetadataClass, + UpstreamClass, +) + +from tests.setup.lineage.constants import ( + DATA_FLOW_ENTITY_TYPE, + DATA_FLOW_INFO_ASPECT_NAME, + DATA_JOB_ENTITY_TYPE, + DATA_JOB_INFO_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + DATASET_ENTITY_TYPE, +) from tests.setup.lineage.helper_classes import Dataset, Pipeline @@ -176,17 +185,16 @@ def create_upstream_mcp( run_id: str = "", ) -> MetadataChangeProposalWrapper: print(f"Creating upstreamLineage aspect for {entity_urn}") - timestamp_millis: int = int(datetime.datetime.now().timestamp() * 1000) mcp = MetadataChangeProposalWrapper( entityType=entity_type, entityUrn=entity_urn, changeType=ChangeTypeClass.UPSERT, aspectName="upstreamLineage", aspect=UpstreamLineage(upstreams=upstreams), - systemMetadata={ - "lastObserved": timestamp_millis, - "runId": run_id, - }, + systemMetadata=SystemMetadata( + lastObserved=timestamp_millis, + runId=run_id, + ), ) return mcp diff --git a/smoke-test/tests/structured_properties/test_structured_properties.py b/smoke-test/tests/structured_properties/test_structured_properties.py index 83994776076b0..de85d2af95e03 100644 --- a/smoke-test/tests/structured_properties/test_structured_properties.py +++ b/smoke-test/tests/structured_properties/test_structured_properties.py @@ -1,31 +1,39 @@ import logging import os -from datahub.ingestion.graph.filters import SearchFilterRule -from tests.consistency_utils import wait_for_writes_to_sync import tempfile from random import randint -from tests.utilities.file_emitter import FileEmitter from typing import Iterable, List, Optional, Union import pytest + # import tenacity from datahub.api.entities.dataset.dataset import Dataset -from datahub.api.entities.structuredproperties.structuredproperties import \ - StructuredProperties +from datahub.api.entities.structuredproperties.structuredproperties import ( + StructuredProperties, +) from datahub.emitter.mce_builder import make_dataset_urn, make_schema_field_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph from datahub.metadata.schema_classes import ( - EntityTypeInfoClass, PropertyValueClass, StructuredPropertiesClass, - StructuredPropertyDefinitionClass, StructuredPropertyValueAssignmentClass) + EntityTypeInfoClass, + PropertyValueClass, + StructuredPropertiesClass, + StructuredPropertyDefinitionClass, + StructuredPropertyValueAssignmentClass, +) from datahub.specific.dataset import DatasetPatchBuilder -from datahub.utilities.urns.structured_properties_urn import \ - StructuredPropertyUrn +from datahub.utilities.urns.structured_properties_urn import StructuredPropertyUrn from datahub.utilities.urns.urn import Urn -from tests.utils import (delete_urns, delete_urns_from_file, get_gms_url, - get_sleep_info, ingest_file_via_rest, - wait_for_writes_to_sync) +from tests.consistency_utils import wait_for_writes_to_sync +from tests.utilities.file_emitter import FileEmitter +from tests.utils import ( + delete_urns, + delete_urns_from_file, + get_gms_url, + get_sleep_info, + ingest_file_via_rest, +) logger = logging.getLogger(__name__) @@ -36,8 +44,7 @@ ] schema_field_urns = [ - make_schema_field_urn(dataset_urn, "column_1") - for dataset_urn in dataset_urns + make_schema_field_urn(dataset_urn, "column_1") for dataset_urn in dataset_urns ] generated_urns = [d for d in dataset_urns] + [f for f in schema_field_urns] @@ -45,6 +52,7 @@ default_namespace = "io.acryl.privacy" + def create_logical_entity( entity_name: str, ) -> Iterable[MetadataChangeProposalWrapper]: @@ -66,14 +74,13 @@ def create_test_data(filename: str): file_emitter.close() wait_for_writes_to_sync() + sleep_sec, sleep_times = get_sleep_info() @pytest.fixture(scope="module", autouse=False) def graph() -> DataHubGraph: - graph: DataHubGraph = DataHubGraph( - config=DatahubClientConfig(server=get_gms_url()) - ) + graph: DataHubGraph = DataHubGraph(config=DatahubClientConfig(server=get_gms_url())) return graph @@ -132,7 +139,7 @@ def attach_property_to_entity( property_name: str, property_value: Union[str, float, List[str | float]], graph: DataHubGraph, - namespace: str = default_namespace + namespace: str = default_namespace, ): if isinstance(property_value, list): property_values: List[Union[str, float]] = property_value @@ -159,15 +166,12 @@ def get_property_from_entity( property_name: str, graph: DataHubGraph, ): - structured_properties: Optional[ - StructuredPropertiesClass - ] = graph.get_aspect(urn, StructuredPropertiesClass) + structured_properties: Optional[StructuredPropertiesClass] = graph.get_aspect( + urn, StructuredPropertiesClass + ) assert structured_properties is not None for property in structured_properties.properties: - if ( - property.propertyUrn - == f"urn:li:structuredProperty:{property_name}" - ): + if property.propertyUrn == f"urn:li:structuredProperty:{property_name}": return property.values return None @@ -181,16 +185,14 @@ def test_structured_property_string(ingest_cleanup_data, graph): property_name = "retentionPolicy" create_property_definition(property_name, graph) - generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.retentionPolicy") - - attach_property_to_entity( - dataset_urns[0], property_name, ["30d"], graph=graph + generated_urns.append( + f"urn:li:structuredProperty:{default_namespace}.retentionPolicy" ) + attach_property_to_entity(dataset_urns[0], property_name, ["30d"], graph=graph) + try: - attach_property_to_entity( - dataset_urns[0], property_name, 200030, graph=graph - ) + attach_property_to_entity(dataset_urns[0], property_name, 200030, graph=graph) raise AssertionError( "Should not be able to attach a number to a string property" ) @@ -208,12 +210,12 @@ def test_structured_property_string(ingest_cleanup_data, graph): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_structured_property_double(ingest_cleanup_data, graph): property_name = "expiryTime" - generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + generated_urns.append( + f"urn:li:structuredProperty:{default_namespace}.{property_name}" + ) create_property_definition(property_name, graph, value_type="number") - attach_property_to_entity( - dataset_urns[0], property_name, 2000034, graph=graph - ) + attach_property_to_entity(dataset_urns[0], property_name, 2000034, graph=graph) try: attach_property_to_entity( @@ -232,9 +234,7 @@ def test_structured_property_double(ingest_cleanup_data, graph): attach_property_to_entity( dataset_urns[0], property_name, [2000034, 2000035], graph=graph ) - raise AssertionError( - "Should not be able to attach a list to a number property" - ) + raise AssertionError("Should not be able to attach a list to a number property") except Exception as e: if not isinstance(e, AssertionError): pass @@ -249,15 +249,15 @@ def test_structured_property_double(ingest_cleanup_data, graph): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_structured_property_double_multiple(ingest_cleanup_data, graph): property_name = "versions" - generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + generated_urns.append( + f"urn:li:structuredProperty:{default_namespace}.{property_name}" + ) create_property_definition( property_name, graph, value_type="number", cardinality="MULTIPLE" ) - attach_property_to_entity( - dataset_urns[0], property_name, [1.0, 2.0], graph=graph - ) + attach_property_to_entity(dataset_urns[0], property_name, [1.0, 2.0], graph=graph) # @tenacity.retry( @@ -265,11 +265,11 @@ def test_structured_property_double_multiple(ingest_cleanup_data, graph): # wait=tenacity.wait_fixed(sleep_sec), # ) @pytest.mark.dependency(depends=["test_healthchecks"]) -def test_structured_property_string_allowed_values( - ingest_cleanup_data, graph -): +def test_structured_property_string_allowed_values(ingest_cleanup_data, graph): property_name = "enumProperty" - generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + generated_urns.append( + f"urn:li:structuredProperty:{default_namespace}.{property_name}" + ) create_property_definition( property_name, @@ -301,9 +301,7 @@ def test_structured_property_string_allowed_values( @pytest.mark.dependency(depends=["test_healthchecks"]) -def test_structured_property_definition_evolution( - ingest_cleanup_data, graph -): +def test_structured_property_definition_evolution(ingest_cleanup_data, graph): property_name = "enumProperty1234" create_property_definition( @@ -316,7 +314,9 @@ def test_structured_property_definition_evolution( PropertyValueClass(value="bar"), ], ) - generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + generated_urns.append( + f"urn:li:structuredProperty:{default_namespace}.{property_name}" + ) try: create_property_definition( @@ -345,9 +345,7 @@ def test_structured_property_definition_evolution( # ) @pytest.mark.dependency(depends=["test_healthchecks"]) def test_structured_property_schema_field(ingest_cleanup_data, graph): - property_name = ( - f"deprecationDate{randint(10, 10000)}" - ) + property_name = f"deprecationDate{randint(10, 10000)}" create_property_definition( property_name, @@ -356,26 +354,31 @@ def test_structured_property_schema_field(ingest_cleanup_data, graph): value_type="date", entity_types=["schemaField"], ) - generated_urns.append(f"urn:li:structuredProperty:io.datahubproject.test.{property_name}") + generated_urns.append( + f"urn:li:structuredProperty:io.datahubproject.test.{property_name}" + ) attach_property_to_entity( - schema_field_urns[0], property_name, "2020-10-01", graph=graph, namespace="io.datahubproject.test" + schema_field_urns[0], + property_name, + "2020-10-01", + graph=graph, + namespace="io.datahubproject.test", ) - assert ( - get_property_from_entity( - schema_field_urns[0], f"io.datahubproject.test.{property_name}", graph=graph - ) - == ["2020-10-01"] - ) + assert get_property_from_entity( + schema_field_urns[0], f"io.datahubproject.test.{property_name}", graph=graph + ) == ["2020-10-01"] try: attach_property_to_entity( - schema_field_urns[0], property_name, 200030, graph=graph, namespace="io.datahubproject.test" - ) - raise AssertionError( - "Should not be able to attach a number to a DATE property" + schema_field_urns[0], + property_name, + 200030, + graph=graph, + namespace="io.datahubproject.test", ) + raise AssertionError("Should not be able to attach a number to a DATE property") except Exception as e: if not isinstance(e, AssertionError): pass @@ -388,49 +391,38 @@ def test_dataset_yaml_loader(ingest_cleanup_data, graph): "tests/structured_properties/test_structured_properties.yaml" ) - for dataset in Dataset.from_yaml( - "tests/structured_properties/test_dataset.yaml" - ): + for dataset in Dataset.from_yaml("tests/structured_properties/test_dataset.yaml"): for mcp in dataset.generate_mcp(): graph.emit(mcp) wait_for_writes_to_sync() property_name = "io.acryl.dataManagement.deprecationDate" - assert ( - get_property_from_entity( - make_schema_field_urn( - make_dataset_urn("hive", "user.clicks"), "ip" - ), - property_name, - graph=graph, - ) - == ["2023-01-01"] - ) + assert get_property_from_entity( + make_schema_field_urn(make_dataset_urn("hive", "user.clicks"), "ip"), + property_name, + graph=graph, + ) == ["2023-01-01"] dataset = Dataset.from_datahub( graph=graph, urn="urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD)", ) field_name = "ip" + assert dataset.schema_metadata is not None + assert dataset.schema_metadata.fields is not None matching_fields = [ f for f in dataset.schema_metadata.fields - if Dataset._simplify_field_path(f.id) == field_name + if f.id is not None and Dataset._simplify_field_path(f.id) == field_name ] assert len(matching_fields) == 1 - assert ( - matching_fields[0].structured_properties[ - Urn.make_structured_property_urn( - "io.acryl.dataManagement.deprecationDate" - ) - ] - == ["2023-01-01"] - ) + assert matching_fields[0].structured_properties is not None + assert matching_fields[0].structured_properties[ + Urn.make_structured_property_urn("io.acryl.dataManagement.deprecationDate") + ] == ["2023-01-01"] -def test_dataset_structured_property_validation( - ingest_cleanup_data, graph, caplog -): +def test_dataset_structured_property_validation(ingest_cleanup_data, graph, caplog): from datahub.api.entities.dataset.dataset import Dataset property_name = "replicationSLA" @@ -440,7 +432,9 @@ def test_dataset_structured_property_validation( create_property_definition( property_name=property_name, graph=graph, value_type=value_type ) - generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.replicationSLA") + generated_urns.append( + f"urn:li:structuredProperty:{default_namespace}.replicationSLA" + ) attach_property_to_entity( dataset_urns[0], property_name, [property_value], graph=graph @@ -453,21 +447,15 @@ def test_dataset_structured_property_validation( float(property_value), ) - assert ( - Dataset.validate_structured_property("testName", "testValue") is None - ) + assert Dataset.validate_structured_property("testName", "testValue") is None bad_property_value = "2023-09-20" assert ( - Dataset.validate_structured_property( - property_name, bad_property_value - ) - is None + Dataset.validate_structured_property(property_name, bad_property_value) is None ) - -def test_structured_property_search(ingest_cleanup_data, graph: DataHubGraph, caplog): +def test_structured_property_search(ingest_cleanup_data, graph: DataHubGraph, caplog): def to_es_name(property_name, namespace=default_namespace): namespace_field = namespace.replace(".", "_") return f"structuredProperties.{namespace_field}_{property_name}" @@ -478,88 +466,116 @@ def to_es_name(property_name, namespace=default_namespace): create_property_definition( namespace="io.datahubproject.test", property_name=field_property_name, - graph=graph, value_type="date", entity_types=["schemaField"] + graph=graph, + value_type="date", + entity_types=["schemaField"], + ) + generated_urns.append( + f"urn:li:structuredProperty:io.datahubproject.test.{field_property_name}" ) - generated_urns.append(f"urn:li:structuredProperty:io.datahubproject.test.{field_property_name}") attach_property_to_entity( - schema_field_urns[0], field_property_name, "2020-10-01", graph=graph, namespace="io.datahubproject.test" + schema_field_urns[0], + field_property_name, + "2020-10-01", + graph=graph, + namespace="io.datahubproject.test", ) dataset_property_name = "replicationSLA" property_value = 30 value_type = "number" - create_property_definition(property_name=dataset_property_name, graph=graph, value_type=value_type) - generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{dataset_property_name}") + create_property_definition( + property_name=dataset_property_name, graph=graph, value_type=value_type + ) + generated_urns.append( + f"urn:li:structuredProperty:{default_namespace}.{dataset_property_name}" + ) - attach_property_to_entity(dataset_urns[0], dataset_property_name, [property_value], graph=graph) + attach_property_to_entity( + dataset_urns[0], dataset_property_name, [property_value], graph=graph + ) # [] = default entities which includes datasets, does not include fields - entity_urns = list(graph.get_urns_by_filter(extraFilters=[ - { - "field": to_es_name(dataset_property_name), - "negated": "false", - "condition": "EXISTS", - } - ])) + entity_urns = list( + graph.get_urns_by_filter( + extraFilters=[ + { + "field": to_es_name(dataset_property_name), + "negated": "false", + "condition": "EXISTS", + } + ] + ) + ) assert len(entity_urns) == 1 assert entity_urns[0] == dataset_urns[0] # Search over schema field specifically - field_structured_prop = graph.get_aspect(entity_urn=schema_field_urns[0], aspect_type=StructuredPropertiesClass) + field_structured_prop = graph.get_aspect( + entity_urn=schema_field_urns[0], aspect_type=StructuredPropertiesClass + ) assert field_structured_prop == StructuredPropertiesClass( properties=[ StructuredPropertyValueAssignmentClass( propertyUrn=f"urn:li:structuredProperty:io.datahubproject.test.{field_property_name}", - values=["2020-10-01"] + values=["2020-10-01"], ) ] ) # Search over entities that do not include the field - field_urns = list(graph.get_urns_by_filter(entity_types=["tag"], - extraFilters=[ - { - "field": to_es_name(field_property_name, - namespace="io.datahubproject.test"), - "negated": "false", - "condition": "EXISTS", - } - ])) + field_urns = list( + graph.get_urns_by_filter( + entity_types=["tag"], + extraFilters=[ + { + "field": to_es_name( + field_property_name, namespace="io.datahubproject.test" + ), + "negated": "false", + "condition": "EXISTS", + } + ], + ) + ) assert len(field_urns) == 0 # OR the two properties together to return both results - field_urns = list(graph.get_urns_by_filter(entity_types=["dataset", "tag"], - extraFilters=[ - { - "field": to_es_name(dataset_property_name), - "negated": "false", - "condition": "EXISTS", - } - ])) + field_urns = list( + graph.get_urns_by_filter( + entity_types=["dataset", "tag"], + extraFilters=[ + { + "field": to_es_name(dataset_property_name), + "negated": "false", + "condition": "EXISTS", + } + ], + ) + ) assert len(field_urns) == 1 assert dataset_urns[0] in field_urns -def test_dataset_structured_property_patch( - ingest_cleanup_data, graph, caplog -): +@pytest.mark.skip(reason="Functionality and test needs to be validated for correctness") +def test_dataset_structured_property_patch(ingest_cleanup_data, graph, caplog): property_name = "replicationSLA" property_value = 30 value_type = "number" create_property_definition( - property_name=property_name, - graph=graph, - value_type=value_type + property_name=property_name, graph=graph, value_type=value_type ) - dataset_patcher: DatasetPatchBuilder = DatasetPatchBuilder( - urn=dataset_urns[0] - ) + dataset_patcher: DatasetPatchBuilder = DatasetPatchBuilder(urn=dataset_urns[0]) - dataset_patcher.set_structured_property(StructuredPropertyUrn.make_structured_property_urn( - f"{default_namespace}.{property_name}"), property_value) + dataset_patcher.set_structured_property( + StructuredPropertyUrn.make_structured_property_urn( + f"{default_namespace}.{property_name}" + ), + property_value, + ) for mcp in dataset_patcher.build(): graph.emit(mcp) @@ -567,11 +583,12 @@ def test_dataset_structured_property_patch( dataset = Dataset.from_datahub(graph=graph, urn=dataset_urns[0]) assert dataset.structured_properties is not None - assert ( - [int(float(k)) for k in dataset.structured_properties[ + assert isinstance(dataset.structured_properties, list) + assert [ + int(float(k)) + for k in dataset.structured_properties[ StructuredPropertyUrn.make_structured_property_urn( f"{default_namespace}.{property_name}" ) - ]] - == [property_value] - ) + ] + ] == [property_value] diff --git a/smoke-test/tests/tags_and_terms/__init__.py b/smoke-test/tests/tags_and_terms/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/tags-and-terms/data.json b/smoke-test/tests/tags_and_terms/data.json similarity index 100% rename from smoke-test/tests/tags-and-terms/data.json rename to smoke-test/tests/tags_and_terms/data.json diff --git a/smoke-test/tests/tags-and-terms/tags_and_terms_test.py b/smoke-test/tests/tags_and_terms/tags_and_terms_test.py similarity index 98% rename from smoke-test/tests/tags-and-terms/tags_and_terms_test.py rename to smoke-test/tests/tags_and_terms/tags_and_terms_test.py index 6ac75765286f0..34404a1ddff59 100644 --- a/smoke-test/tests/tags-and-terms/tags_and_terms_test.py +++ b/smoke-test/tests/tags_and_terms/tags_and_terms_test.py @@ -1,16 +1,20 @@ import pytest -from tests.utils import (delete_urns_from_file, get_frontend_url, - ingest_file_via_rest, wait_for_healthcheck_util) +from tests.utils import ( + delete_urns_from_file, + get_frontend_url, + ingest_file_via_rest, + wait_for_healthcheck_util, +) @pytest.fixture(scope="module", autouse=True) def ingest_cleanup_data(request): print("ingesting test data") - ingest_file_via_rest("tests/tags-and-terms/data.json") + ingest_file_via_rest("tests/tags_and_terms/data.json") yield print("removing test data") - delete_urns_from_file("tests/tags-and-terms/data.json") + delete_urns_from_file("tests/tags_and_terms/data.json") @pytest.fixture(scope="session") diff --git a/smoke-test/tests/telemetry/telemetry_test.py b/smoke-test/tests/telemetry/telemetry_test.py index b7cd6fa0517df..963d85baef3bb 100644 --- a/smoke-test/tests/telemetry/telemetry_test.py +++ b/smoke-test/tests/telemetry/telemetry_test.py @@ -5,7 +5,9 @@ def test_no_client_id(): client_id_urn = "urn:li:telemetry:clientId" - aspect = ["clientId"] # this is checking for the removal of the invalid aspect RemoveClientIdAspectStep.java + aspect = [ + "clientId" + ] # this is checking for the removal of the invalid aspect RemoveClientIdAspectStep.java res_data = json.dumps( get_aspects_for_entity(entity_urn=client_id_urn, aspects=aspect, typed=False) diff --git a/smoke-test/tests/test_stateful_ingestion.py b/smoke-test/tests/test_stateful_ingestion.py index c6adb402e5d51..5eac25059ec62 100644 --- a/smoke-test/tests/test_stateful_ingestion.py +++ b/smoke-test/tests/test_stateful_ingestion.py @@ -4,15 +4,19 @@ from datahub.ingestion.run.pipeline import Pipeline from datahub.ingestion.source.sql.mysql import MySQLConfig, MySQLSource from datahub.ingestion.source.state.checkpoint import Checkpoint -from datahub.ingestion.source.state.entity_removal_state import \ - GenericCheckpointState -from datahub.ingestion.source.state.stale_entity_removal_handler import \ - StaleEntityRemovalHandler +from datahub.ingestion.source.state.entity_removal_state import GenericCheckpointState +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StaleEntityRemovalHandler, +) from sqlalchemy import create_engine from sqlalchemy.sql import text -from tests.utils import (get_gms_url, get_mysql_password, get_mysql_url, - get_mysql_username) +from tests.utils import ( + get_gms_url, + get_mysql_password, + get_mysql_url, + get_mysql_username, +) def test_stateful_ingestion(wait_for_healthchecks): diff --git a/smoke-test/tests/tests/tests_test.py b/smoke-test/tests/tests/tests_test.py index 213a2ea087b7a..28005c8397d0d 100644 --- a/smoke-test/tests/tests/tests_test.py +++ b/smoke-test/tests/tests/tests_test.py @@ -1,9 +1,13 @@ import pytest import tenacity -from tests.utils import (delete_urns_from_file, get_frontend_url, - get_sleep_info, ingest_file_via_rest, - wait_for_healthcheck_util) +from tests.utils import ( + delete_urns_from_file, + get_frontend_url, + get_sleep_info, + ingest_file_via_rest, + wait_for_healthcheck_util, +) sleep_sec, sleep_times = get_sleep_info() @@ -37,7 +41,6 @@ def test_healthchecks(wait_for_healthchecks): def create_test(frontend_session): - # Create new Test create_test_json = { "query": """mutation createTest($input: CreateTestInput!) {\n @@ -84,7 +87,6 @@ def delete_test(frontend_session, test_urn): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_create_test(frontend_session, wait_for_healthchecks): - test_urn = create_test(frontend_session) # Get the test diff --git a/smoke-test/tests/timeline/timeline_test.py b/smoke-test/tests/timeline/timeline_test.py index c075d981487db..f8a0e425c3781 100644 --- a/smoke-test/tests/timeline/timeline_test.py +++ b/smoke-test/tests/timeline/timeline_test.py @@ -1,15 +1,14 @@ import json -import pytest -from time import sleep +import pytest from datahub.cli import timeline_cli from datahub.cli.cli_utils import guess_entity_type, post_entity -from tests.utils import (get_datahub_graph, ingest_file_via_rest, - wait_for_writes_to_sync) +from tests.utils import get_datahub_graph, ingest_file_via_rest, wait_for_writes_to_sync pytestmark = pytest.mark.no_cypress_suite1 + def test_all(): platform = "urn:li:dataPlatform:kafka" dataset_name = "test-timeline-sample-kafka" @@ -184,7 +183,7 @@ def put(urn: str, aspect: str, aspect_data: str) -> None: entity_type = guess_entity_type(urn) with open(aspect_data) as fp: aspect_obj = json.load(fp) - status = post_entity( + post_entity( urn=urn, aspect_name=aspect, entity_type=entity_type, diff --git a/smoke-test/tests/tokens/revokable_access_token_test.py b/smoke-test/tests/tokens/revokable_access_token_test.py index 55f3de594af4e..10332b32b9caf 100644 --- a/smoke-test/tests/tokens/revokable_access_token_test.py +++ b/smoke-test/tests/tokens/revokable_access_token_test.py @@ -1,11 +1,14 @@ import os -from time import sleep import pytest -import requests -from tests.utils import (get_admin_credentials, get_frontend_url, - wait_for_healthcheck_util, wait_for_writes_to_sync) +from tests.utils import ( + get_admin_credentials, + get_frontend_url, + login_as, + wait_for_healthcheck_util, + wait_for_writes_to_sync, +) # Disable telemetry os.environ["DATAHUB_TELEMETRY_ENABLED"] = "false" @@ -29,7 +32,7 @@ def test_healthchecks(wait_for_healthchecks): @pytest.fixture(scope="class", autouse=True) def custom_user_setup(): """Fixture to execute setup before and tear down after all tests are run""" - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) res_data = removeUser(admin_session, "urn:li:corpuser:user") assert res_data @@ -77,7 +80,7 @@ def custom_user_setup(): # signUp will override the session cookie to the new user to be signed up. admin_session.cookies.clear() - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) # Make user created user is there. res_data = listUsers(admin_session) @@ -91,7 +94,7 @@ def custom_user_setup(): res_data = removeUser(admin_session, "urn:li:corpuser:user") assert res_data assert res_data["data"] - assert res_data["data"]["removeUser"] == True + assert res_data["data"]["removeUser"] is True # Sleep for eventual consistency wait_for_writes_to_sync() @@ -106,7 +109,7 @@ def custom_user_setup(): @pytest.fixture(autouse=True) def access_token_setup(): """Fixture to execute asserts before and after a test is run""" - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) res_data = listAccessTokens(admin_session) assert res_data @@ -127,7 +130,7 @@ def access_token_setup(): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_admin_can_create_list_and_revoke_tokens(wait_for_healthchecks): - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) # Using a super account, there should be no tokens res_data = listAccessTokens(admin_session) @@ -170,7 +173,7 @@ def test_admin_can_create_list_and_revoke_tokens(wait_for_healthchecks): assert res_data assert res_data["data"] assert res_data["data"]["revokeAccessToken"] - assert res_data["data"]["revokeAccessToken"] == True + assert res_data["data"]["revokeAccessToken"] is True # Sleep for eventual consistency wait_for_writes_to_sync() @@ -184,7 +187,7 @@ def test_admin_can_create_list_and_revoke_tokens(wait_for_healthchecks): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_admin_can_create_and_revoke_tokens_for_other_user(wait_for_healthchecks): - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) # Using a super account, there should be no tokens res_data = listAccessTokens(admin_session) @@ -227,7 +230,7 @@ def test_admin_can_create_and_revoke_tokens_for_other_user(wait_for_healthchecks assert res_data assert res_data["data"] assert res_data["data"]["revokeAccessToken"] - assert res_data["data"]["revokeAccessToken"] == True + assert res_data["data"]["revokeAccessToken"] is True # Sleep for eventual consistency wait_for_writes_to_sync() @@ -241,7 +244,7 @@ def test_admin_can_create_and_revoke_tokens_for_other_user(wait_for_healthchecks @pytest.mark.dependency(depends=["test_healthchecks"]) def test_non_admin_can_create_list_revoke_tokens(wait_for_healthchecks): - user_session = loginAs("user", "user") + user_session = login_as("user", "user") # Normal user should be able to generate token for himself. res_data = generateAccessToken_v2(user_session, "urn:li:corpuser:user") @@ -280,7 +283,7 @@ def test_non_admin_can_create_list_revoke_tokens(wait_for_healthchecks): assert res_data assert res_data["data"] assert res_data["data"]["revokeAccessToken"] - assert res_data["data"]["revokeAccessToken"] == True + assert res_data["data"]["revokeAccessToken"] is True # Sleep for eventual consistency wait_for_writes_to_sync() @@ -296,7 +299,7 @@ def test_non_admin_can_create_list_revoke_tokens(wait_for_healthchecks): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_admin_can_manage_tokens_generated_by_other_user(wait_for_healthchecks): - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) # Using a super account, there should be no tokens res_data = listAccessTokens(admin_session) @@ -306,7 +309,7 @@ def test_admin_can_manage_tokens_generated_by_other_user(wait_for_healthchecks): assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0 admin_session.cookies.clear() - user_session = loginAs("user", "user") + user_session = login_as("user", "user") res_data = generateAccessToken_v2(user_session, "urn:li:corpuser:user") assert res_data assert res_data["data"] @@ -326,7 +329,7 @@ def test_admin_can_manage_tokens_generated_by_other_user(wait_for_healthchecks): # Admin should be able to list other tokens user_session.cookies.clear() - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) res_data = listAccessTokens( admin_session, [{"field": "ownerUrn", "values": ["urn:li:corpuser:user"]}] ) @@ -346,18 +349,18 @@ def test_admin_can_manage_tokens_generated_by_other_user(wait_for_healthchecks): # Admin can delete token created by someone else. admin_session.cookies.clear() - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) res_data = revokeAccessToken(admin_session, user_tokenId) assert res_data assert res_data["data"] assert res_data["data"]["revokeAccessToken"] - assert res_data["data"]["revokeAccessToken"] == True + assert res_data["data"]["revokeAccessToken"] is True # Sleep for eventual consistency wait_for_writes_to_sync() # Using a normal account, check that all its tokens where removed. user_session.cookies.clear() - user_session = loginAs("user", "user") + user_session = login_as("user", "user") res_data = listAccessTokens( user_session, [{"field": "ownerUrn", "values": ["urn:li:corpuser:user"]}] ) @@ -367,7 +370,7 @@ def test_admin_can_manage_tokens_generated_by_other_user(wait_for_healthchecks): assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0 # Using the super account, check that all tokens where removed. - admin_session = loginAs(admin_user, admin_pass) + admin_session = login_as(admin_user, admin_pass) res_data = listAccessTokens( admin_session, [{"field": "ownerUrn", "values": ["urn:li:corpuser:user"]}] ) @@ -379,7 +382,7 @@ def test_admin_can_manage_tokens_generated_by_other_user(wait_for_healthchecks): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_non_admin_can_not_generate_tokens_for_others(wait_for_healthchecks): - user_session = loginAs("user", "user") + user_session = login_as("user", "user") # Normal user should not be able to generate token for another user res_data = generateAccessToken_v2(user_session, f"urn:li:corpuser:{admin_user}") assert res_data @@ -467,19 +470,6 @@ def revokeAccessToken(session, tokenId): return response.json() -def loginAs(username, password): - session = requests.Session() - - headers = { - "Content-Type": "application/json", - } - data = '{"username":"' + username + '", "password":"' + password + '"}' - response = session.post(f"{get_frontend_url()}/logIn", headers=headers, data=data) - response.raise_for_status() - - return session - - def removeUser(session, urn): # Remove user json = { diff --git a/smoke-test/tests/utilities/file_emitter.py b/smoke-test/tests/utilities/file_emitter.py index 27a91c360af8a..ddbcff8db31d8 100644 --- a/smoke-test/tests/utilities/file_emitter.py +++ b/smoke-test/tests/utilities/file_emitter.py @@ -1,11 +1,14 @@ -from datahub.ingestion.sink.file import FileSink, FileSinkConfig +import time + from datahub.ingestion.api.common import PipelineContext, RecordEnvelope from datahub.ingestion.api.sink import NoopWriteCallback -import time +from datahub.ingestion.sink.file import FileSink, FileSinkConfig class FileEmitter: - def __init__(self, filename: str, run_id: str = f"test_{int(time.time()*1000.0)}") -> None: + def __init__( + self, filename: str, run_id: str = f"test_{int(time.time()*1000.0)}" + ) -> None: self.sink: FileSink = FileSink( ctx=PipelineContext(run_id=run_id), config=FileSinkConfig(filename=filename), @@ -18,4 +21,4 @@ def emit(self, event): ) def close(self): - self.sink.close() \ No newline at end of file + self.sink.close() diff --git a/smoke-test/tests/utils.py b/smoke-test/tests/utils.py index bd75b13d1910f..29b956bde9ab8 100644 --- a/smoke-test/tests/utils.py +++ b/smoke-test/tests/utils.py @@ -2,14 +2,10 @@ import json import logging import os -import subprocess -import time from datetime import datetime, timedelta, timezone -from time import sleep from typing import Any, Dict, List, Tuple -from datahub.cli import cli_utils -from datahub.cli.cli_utils import get_system_auth +from datahub.cli import cli_utils, env_utils from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph from datahub.ingestion.run.pipeline import Pipeline from joblib import Parallel, delayed @@ -22,23 +18,14 @@ def get_frontend_session(): - session = requests.Session() + username, password = get_admin_credentials() + return login_as(username, password) - headers = { - "Content-Type": "application/json", - } - system_auth = get_system_auth() - if system_auth is not None: - session.headers.update({"Authorization": system_auth}) - else: - username, password = get_admin_credentials() - data = '{"username":"' + username + '", "password":"' + password + '"}' - response = session.post( - f"{get_frontend_url()}/logIn", headers=headers, data=data - ) - response.raise_for_status() - return session +def login_as(username: str, password: str): + return cli_utils.get_session_login_as( + username=username, password=password, frontend_url=get_frontend_url() + ) def get_admin_username() -> str: @@ -146,7 +133,7 @@ def delete_urns(urns: List[str]) -> None: def delete_urns_from_file(filename: str, shared_data: bool = False) -> None: - if not cli_utils.get_boolean_env_variable("CLEANUP_DATA", True): + if not env_utils.get_boolean_env_variable("CLEANUP_DATA", True): print("Not cleaning data to save time") return session = requests.Session() @@ -223,7 +210,7 @@ def create_datahub_step_state_aspect( def create_datahub_step_state_aspects( - username: str, onboarding_ids: str, onboarding_filename + username: str, onboarding_ids: List[str], onboarding_filename: str ) -> None: """ For a specific user, creates dataHubStepState aspects for each onboarding id in the list diff --git a/smoke-test/tests/views/views_test.py b/smoke-test/tests/views/views_test.py index 685c3bd80b04d..a99f1f0dbb245 100644 --- a/smoke-test/tests/views/views_test.py +++ b/smoke-test/tests/views/views_test.py @@ -1,10 +1,7 @@ -import time - import pytest import tenacity -from tests.utils import (delete_urns_from_file, get_frontend_url, get_gms_url, - get_sleep_info, ingest_file_via_rest) +from tests.utils import get_frontend_url, get_sleep_info sleep_sec, sleep_times = get_sleep_info() @@ -19,7 +16,6 @@ def test_healthchecks(wait_for_healthchecks): stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) ) def _ensure_more_views(frontend_session, list_views_json, query_name, before_count): - # Get new count of Views response = frontend_session.post( f"{get_frontend_url()}/api/v2/graphql", json=list_views_json @@ -43,7 +39,6 @@ def _ensure_more_views(frontend_session, list_views_json, query_name, before_cou stop=tenacity.stop_after_attempt(sleep_times), wait=tenacity.wait_fixed(sleep_sec) ) def _ensure_less_views(frontend_session, list_views_json, query_name, before_count): - # Get new count of Views response = frontend_session.post( f"{get_frontend_url()}/api/v2/graphql", json=list_views_json @@ -64,7 +59,6 @@ def _ensure_less_views(frontend_session, list_views_json, query_name, before_cou @pytest.mark.dependency(depends=["test_healthchecks"]) def test_create_list_delete_global_view(frontend_session): - # Get count of existing views list_global_views_json = { "query": """query listGlobalViews($input: ListGlobalViewsInput!) {\n @@ -161,8 +155,6 @@ def test_create_list_delete_global_view(frontend_session): before_count=before_count, ) - delete_json = {"urn": view_urn} - # Delete the View delete_view_json = { "query": """mutation deleteView($urn: String!) {\n @@ -190,7 +182,6 @@ def test_create_list_delete_global_view(frontend_session): depends=["test_healthchecks", "test_create_list_delete_global_view"] ) def test_create_list_delete_personal_view(frontend_session): - # Get count of existing views list_my_views_json = { "query": """query listMyViews($input: ListMyViewsInput!) {\n @@ -314,7 +305,6 @@ def test_create_list_delete_personal_view(frontend_session): depends=["test_healthchecks", "test_create_list_delete_personal_view"] ) def test_update_global_view(frontend_session): - # First create a view new_view_name = "Test View" new_view_description = "Test Description" From 54f85503f9feda7404aa6911c2d2220f69ae63b2 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Wed, 31 Jan 2024 10:15:34 -0500 Subject: [PATCH 463/792] feat(forms) Update form model to include searchable on actor assignment (#9747) --- .../com/linkedin/form/FormActorAssignment.pdl | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl index e58eb4c7c56a8..f0ea0f4988298 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl @@ -7,15 +7,31 @@ record FormActorAssignment { * Whether the form should be assigned to the owners of assets that it is applied to. * This is the default. */ + @Searchable = { + "fieldName": "isOwnershipForm", + "fieldType": "BOOLEAN", + } owners: boolean = true /** * Optional: Specific set of groups that are targeted by this form assignment. */ + @Searchable = { + "/*": { + "fieldName": "assignedGroups", + "fieldType": "URN" + } + } groups: optional array[Urn] /** * Optional: Specific set of users that are targeted by this form assignment. */ + @Searchable = { + "/*": { + "fieldName": "assignedUsers", + "fieldType": "URN" + } + } users: optional array[Urn] } \ No newline at end of file From ad2df22d7843f69d90191dbfe5bb64708167c6e5 Mon Sep 17 00:00:00 2001 From: sid-acryl <155424659+sid-acryl@users.noreply.github.com> Date: Wed, 31 Jan 2024 21:08:26 +0530 Subject: [PATCH 464/792] feat(ingestion/dbt): multiple node owner separated by comma (#9740) Co-authored-by: Aseem Bansal --- .../src/datahub/utilities/mapping.py | 75 +++++++++++++++---- metadata-ingestion/tests/unit/test_mapping.py | 11 ++- 2 files changed, 69 insertions(+), 17 deletions(-) diff --git a/metadata-ingestion/src/datahub/utilities/mapping.py b/metadata-ingestion/src/datahub/utilities/mapping.py index 00f7d370d1676..5d26c3af54d5e 100644 --- a/metadata-ingestion/src/datahub/utilities/mapping.py +++ b/metadata-ingestion/src/datahub/utilities/mapping.py @@ -31,6 +31,23 @@ def _get_best_match(the_match: Match, group_name: str) -> str: return the_match.group(0) +def _make_owner_category_list( + owner_type: OwnerType, + owner_category: Any, + owner_category_urn: Optional[str], + owner_ids: List[str], +) -> List[Dict]: + + return [ + { + "urn": mce_builder.make_owner_urn(owner_id, owner_type), + "category": owner_category, + "categoryUrn": owner_category_urn, + } + for owner_id in owner_ids + ] + + _match_regexp = re.compile(r"{{\s*\$match\s*}}", flags=re.MULTILINE) @@ -149,13 +166,26 @@ def process(self, raw_props: Mapping[str, Any]) -> Dict[str, Any]: operation = self.get_operation_value( operation_key, operation_type, operation_config, maybe_match ) + if operation_type == Constants.ADD_TERMS_OPERATION: # add_terms operation is a special case where the operation value is a list of terms. # We want to aggregate these values with the add_term operation. operation_type = Constants.ADD_TERM_OPERATION if operation: - if isinstance(operation, (str, list)): + if ( + isinstance(operation, list) + and operation_type == Constants.ADD_OWNER_OPERATION + ): + operation_value_list = operations_map.get( + operation_type, list() + ) + cast(List, operation_value_list).extend( + operation + ) # cast to silent the lint + operations_map[operation_type] = operation_value_list + + elif isinstance(operation, (str, list)): operations_value_set = operations_map.get( operation_type, set() ) @@ -184,8 +214,11 @@ def convert_to_aspects( tag_aspect = mce_builder.make_global_tag_aspect_with_tag_list( sorted(operation_map[Constants.ADD_TAG_OPERATION]) ) + aspect_map[Constants.ADD_TAG_OPERATION] = tag_aspect + if Constants.ADD_OWNER_OPERATION in operation_map: + owner_aspect = OwnershipClass( owners=[ OwnerClass( @@ -202,6 +235,7 @@ def convert_to_aspects( ) ] ) + aspect_map[Constants.ADD_OWNER_OPERATION] = owner_aspect if Constants.ADD_TERM_OPERATION in operation_map: @@ -262,7 +296,7 @@ def get_operation_value( operation_type: str, operation_config: Dict, match: Match, - ) -> Optional[Union[str, Dict, List[str]]]: + ) -> Optional[Union[str, Dict, List[str], List[Dict]]]: if ( operation_type == Constants.ADD_TAG_OPERATION and operation_config[Constants.TAG] @@ -278,30 +312,39 @@ def get_operation_value( and operation_config[Constants.OWNER_TYPE] ): owner_id = _get_best_match(match, "owner") + + owner_ids: List[str] = [_id.strip() for _id in owner_id.split(",")] + owner_category = ( operation_config.get(Constants.OWNER_CATEGORY) or OwnershipTypeClass.DATAOWNER ) - owner_category_urn = None + owner_category_urn: Optional[str] = None if owner_category.startswith("urn:li:"): owner_category_urn = owner_category owner_category = OwnershipTypeClass.DATAOWNER else: owner_category = owner_category.upper() + if self.strip_owner_email_id: - owner_id = self.sanitize_owner_ids(owner_id) - if operation_config[Constants.OWNER_TYPE] == Constants.USER_OWNER: - return { - "urn": mce_builder.make_owner_urn(owner_id, OwnerType.USER), - "category": owner_category, - "categoryUrn": owner_category_urn, - } - elif operation_config[Constants.OWNER_TYPE] == Constants.GROUP_OWNER: - return { - "urn": mce_builder.make_owner_urn(owner_id, OwnerType.GROUP), - "category": owner_category, - "categoryUrn": owner_category_urn, - } + owner_ids = [ + self.sanitize_owner_ids(owner_id) for owner_id in owner_ids + ] + + owner_type_mapping: Dict[str, OwnerType] = { + Constants.USER_OWNER: OwnerType.USER, + Constants.GROUP_OWNER: OwnerType.GROUP, + } + if operation_config[Constants.OWNER_TYPE] in owner_type_mapping: + return _make_owner_category_list( + owner_ids=owner_ids, + owner_category=owner_category, + owner_category_urn=owner_category_urn, + owner_type=owner_type_mapping[ + operation_config[Constants.OWNER_TYPE] + ], + ) + elif ( operation_type == Constants.ADD_TERM_OPERATION and operation_config[Constants.TERM] diff --git a/metadata-ingestion/tests/unit/test_mapping.py b/metadata-ingestion/tests/unit/test_mapping.py index de35451c9ec4b..755a62fa32912 100644 --- a/metadata-ingestion/tests/unit/test_mapping.py +++ b/metadata-ingestion/tests/unit/test_mapping.py @@ -25,6 +25,11 @@ def get_operation_defs() -> Dict[str, Any]: "operation": "add_owner", "config": {"owner_type": "user"}, }, + "multi_user": { + "match": ".*", + "operation": "add_owner", + "config": {"owner_type": "user"}, + }, "group.owner": { "match": ".*", "operation": "add_owner", @@ -78,6 +83,7 @@ def test_operation_processor_not_matching(): def test_operation_processor_matching(): raw_props = { "user_owner": "test_user@abc.com", + "multi_user": "sales_member1@abc.com, sales_member2@abc.com", "user_owner_2": "test_user_2", "group.owner": "test.group@abc.co.in", "governance.team_owner": "Finance", @@ -86,6 +92,7 @@ def test_operation_processor_matching(): "double_property": 2.5, "tag": "Finance", } + processor = OperationProcessor( operation_defs=get_operation_defs(), owner_source_type="SOURCE_CONTROL", @@ -116,11 +123,13 @@ def test_operation_processor_matching(): ) ownership_aspect: OwnershipClass = aspect_map["add_owner"] - assert len(ownership_aspect.owners) == 3 + assert len(ownership_aspect.owners) == 5 owner_set = { "urn:li:corpuser:test_user", "urn:li:corpuser:test_user_2", "urn:li:corpGroup:test.group", + "urn:li:corpuser:sales_member1", + "urn:li:corpuser:sales_member2", } for single_owner in ownership_aspect.owners: assert single_owner.owner in owner_set From f3afdf9e0c19f0d18e426d1a4c188b70c94b86ab Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Wed, 31 Jan 2024 10:52:42 -0600 Subject: [PATCH 465/792] fix(mceConsumer): prevent health endpoint authentication failure (#9748) --- .../metadata/restli/RestliServletConfig.java | 17 +++++++++-------- .../HealthStatusAuthenticator.java | 3 ++- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java index b41e6bc75af19..269b9a41a89a9 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java @@ -3,6 +3,7 @@ import com.datahub.auth.authentication.filter.AuthenticationFilter; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; import com.linkedin.restli.server.RestliHandlerServlet; +import java.util.Collections; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.web.servlet.FilterRegistrationBean; @@ -41,19 +42,19 @@ public RestliHandlerServlet restliHandlerServlet() { @Bean public FilterRegistrationBean authenticationFilterRegistrationBean( - @Qualifier("restliServletRegistration") - ServletRegistrationBean servlet) { + @Qualifier("restliServletRegistration") ServletRegistrationBean servlet, + AuthenticationFilter authenticationFilter) { FilterRegistrationBean registrationBean = new FilterRegistrationBean<>(); - registrationBean.addServletRegistrationBeans(servlet); + registrationBean.setServletRegistrationBeans(Collections.singletonList(servlet)); + registrationBean.setUrlPatterns(Collections.singletonList("/gms/*")); + registrationBean.setServletNames(Collections.singletonList(servlet.getServletName())); registrationBean.setOrder(1); + registrationBean.setFilter(authenticationFilter); return registrationBean; } @Bean - public AuthenticationFilter authenticationFilter( - FilterRegistrationBean filterReg) { - AuthenticationFilter filter = new AuthenticationFilter(); - filterReg.setFilter(filter); - return filter; + public AuthenticationFilter authenticationFilter() { + return new AuthenticationFilter(); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java index 65581f1d5b635..017ab25bc7b7c 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java @@ -24,7 +24,8 @@ */ @Slf4j public class HealthStatusAuthenticator implements Authenticator { - private static final Set HEALTH_ENDPOINTS = Set.of("/openapi/check/", "/openapi/up/"); + private static final Set HEALTH_ENDPOINTS = + Set.of("/openapi/check/", "/openapi/up/", "/actuator/health", "/health"); private String systemClientId; @Override From 874c683f2a1132e89498cda458a0e050ee4b73af Mon Sep 17 00:00:00 2001 From: sid-acryl <155424659+sid-acryl@users.noreply.github.com> Date: Thu, 1 Feb 2024 03:55:08 +0530 Subject: [PATCH 466/792] fix(ingest/transformer): generate ownership aspect from handle_end_of_stream (#9720) --- .../ingestion/transformer/base_transformer.py | 4 +- .../extract_ownership_from_tags.py | 46 ++++++++++++++----- .../tests/unit/test_transform_dataset.py | 25 ++++++++-- 3 files changed, 58 insertions(+), 17 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py b/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py index e8e25a061a665..fb776ca8d2328 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py @@ -20,8 +20,8 @@ def _update_work_unit_id( envelope: RecordEnvelope, urn: str, aspect_name: str ) -> Dict[Any, Any]: - structured_urn = Urn.create_from_string(urn) - simple_name = "-".join(structured_urn.get_entity_id()) + structured_urn = Urn.from_string(urn) + simple_name = "-".join(structured_urn.entity_ids) record_metadata = envelope.metadata.copy() record_metadata.update({"workunit_id": f"txform-{simple_name}-{aspect_name}"}) return record_metadata diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/extract_ownership_from_tags.py b/metadata-ingestion/src/datahub/ingestion/transformer/extract_ownership_from_tags.py index 76f883b629d50..6266e0bca6c61 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/extract_ownership_from_tags.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/extract_ownership_from_tags.py @@ -1,11 +1,14 @@ +import logging import re from functools import lru_cache -from typing import List, Optional, cast +from typing import List, Optional, Sequence, Union, cast from datahub.configuration.common import TransformerSemanticsConfigModel from datahub.emitter.mce_builder import Aspect +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.transformer.dataset_transformer import DatasetTagsTransformer +from datahub.metadata._schema_classes import MetadataChangeProposalClass from datahub.metadata.schema_classes import ( GlobalTagsClass, OwnerClass, @@ -16,6 +19,8 @@ from datahub.utilities.urns.corpuser_urn import CorpuserUrn from datahub.utilities.urns.tag_urn import TagUrn +logger = logging.getLogger(__name__) + class ExtractOwnersFromTagsConfig(TransformerSemanticsConfigModel): tag_prefix: str @@ -38,11 +43,13 @@ class ExtractOwnersFromTagsTransformer(DatasetTagsTransformer): ctx: PipelineContext config: ExtractOwnersFromTagsConfig + owner_mcps: List[MetadataChangeProposalWrapper] def __init__(self, config: ExtractOwnersFromTagsConfig, ctx: PipelineContext): super().__init__() self.ctx = ctx self.config = config + self.owner_mcps = [] @classmethod def create( @@ -56,6 +63,12 @@ def get_owner_urn(self, owner_str: str) -> str: return owner_str + "@" + self.config.email_domain return owner_str + def handle_end_of_stream( + self, + ) -> Sequence[Union[MetadataChangeProposalWrapper, MetadataChangeProposalClass]]: + + return self.owner_mcps + def transform_aspect( self, entity_urn: str, aspect_name: str, aspect: Optional[Aspect] ) -> Optional[Aspect]: @@ -64,28 +77,39 @@ def transform_aspect( return None tags = in_tags_aspect.tags owners: List[OwnerClass] = [] + for tag_class in tags: tag_urn = TagUrn.from_string(tag_class.tag) - tag_str = tag_urn.get_entity_id()[0] + tag_str = tag_urn.entity_ids[0] re_match = re.search(self.config.tag_prefix, tag_str) if re_match: owner_str = tag_str[re_match.end() :].strip() owner_urn_str = self.get_owner_urn(owner_str) if self.config.is_user: - owner_urn = str(CorpuserUrn.create_from_id(owner_urn_str)) + owner_urn = str(CorpuserUrn(owner_urn_str)) else: - owner_urn = str(CorpGroupUrn.create_from_id(owner_urn_str)) + owner_urn = str(CorpGroupUrn(owner_urn_str)) owner_type = get_owner_type(self.config.owner_type) if owner_type == OwnershipTypeClass.CUSTOM: assert ( self.config.owner_type_urn is not None ), "owner_type_urn must be set if owner_type is CUSTOM" - owner = OwnerClass( - owner=owner_urn, - type=owner_type, - typeUrn=self.config.owner_type_urn, + + owners.append( + OwnerClass( + owner=owner_urn, + type=owner_type, + typeUrn=self.config.owner_type_urn, + ) ) - owners.append(owner) - owner_aspect = OwnershipClass(owners=owners) - return cast(Aspect, owner_aspect) + self.owner_mcps.append( + MetadataChangeProposalWrapper( + entityUrn=entity_urn, + aspect=OwnershipClass( + owners=owners, + ), + ) + ) + + return None diff --git a/metadata-ingestion/tests/unit/test_transform_dataset.py b/metadata-ingestion/tests/unit/test_transform_dataset.py index 5152f406ed3ce..2a6176906a0c3 100644 --- a/metadata-ingestion/tests/unit/test_transform_dataset.py +++ b/metadata-ingestion/tests/unit/test_transform_dataset.py @@ -648,22 +648,35 @@ def _test_owner( ) ] ) + transformer = ExtractOwnersFromTagsTransformer.create( config, PipelineContext(run_id="test"), ) - transformed = list( + + record_envelops: List[RecordEnvelope] = list( transformer.transform( [ RecordEnvelope(dataset, metadata={}), + RecordEnvelope(record=EndOfStream(), metadata={}), ] ) ) - owners_aspect = transformed[0].record.proposedSnapshot.aspects[0] + + assert len(record_envelops) == 3 + + mcp: MetadataChangeProposalWrapper = record_envelops[1].record + + owners_aspect = cast(OwnershipClass, mcp.aspect) + owners = owners_aspect.owners + owner = owners[0] - if expected_owner_type is not None: - assert owner.type == expected_owner_type + + assert expected_owner_type is not None + + assert owner.type == expected_owner_type + assert owner.owner == expected_owner _test_owner( @@ -672,6 +685,7 @@ def _test_owner( "tag_prefix": "owner:", }, expected_owner="urn:li:corpuser:foo", + expected_owner_type=OwnershipTypeClass.TECHNICAL_OWNER, ) _test_owner( tag="abcdef-owner:foo", @@ -679,6 +693,7 @@ def _test_owner( "tag_prefix": ".*owner:", }, expected_owner="urn:li:corpuser:foo", + expected_owner_type=OwnershipTypeClass.TECHNICAL_OWNER, ) _test_owner( tag="owner:foo", @@ -687,6 +702,7 @@ def _test_owner( "is_user": False, }, expected_owner="urn:li:corpGroup:foo", + expected_owner_type=OwnershipTypeClass.TECHNICAL_OWNER, ) _test_owner( tag="owner:foo", @@ -695,6 +711,7 @@ def _test_owner( "email_domain": "example.com", }, expected_owner="urn:li:corpuser:foo@example.com", + expected_owner_type=OwnershipTypeClass.TECHNICAL_OWNER, ) _test_owner( tag="owner:foo", From ff0818bad722df4b618e106b523f78b7ef9242e0 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 31 Jan 2024 18:28:35 -0600 Subject: [PATCH 467/792] fix(aspects): fix default aspect generation for non-restli locations (#9746) --- .../upgrade/nocode/DataMigrationStep.java | 3 +- .../steps/BackfillBrowsePathsV2Step.java | 4 +- .../metadata/aspect/batch/AspectsBatch.java | 5 +- .../metadata/aspect/batch/BatchItem.java | 10 + .../metadata/aspect/batch/MCLBatchItem.java | 7 +- .../metadata/aspect/batch/UpsertItem.java | 2 - .../datahub/spark/TestCoalesceJobLineage.java | 4 +- .../datahub/spark/TestSparkJobsLineage.java | 4 +- .../aspect/utils/DefaultAspectsUtil.java | 312 ++++++++++ .../metadata/client/JavaEntityClient.java | 8 +- .../linkedin/metadata/entity/AspectDao.java | 4 +- .../metadata/entity/EntityServiceImpl.java | 531 ++++++------------ .../metadata/entity/ebean/EbeanAspectDao.java | 125 ++++- .../entity/ebean/batch/AspectsBatchImpl.java | 3 +- .../entity/ebean/batch/MCLBatchItemImpl.java | 4 +- .../entity/ebean/batch/MCPPatchBatchItem.java | 10 +- .../ebean/batch/MCPUpsertBatchItem.java | 24 +- .../search/utils/BrowsePathUtils.java | 3 +- .../search/utils/BrowsePathV2Utils.java | 6 +- .../service/UpdateIndicesService.java | 6 +- .../metadata/AspectIngestionUtils.java | 6 +- .../utils/DefaultAspectsUtilTest.java} | 50 +- .../CassandraAspectMigrationsDaoTest.java | 3 +- .../entity/CassandraEntityServiceTest.java | 3 +- .../entity/DeleteEntityServiceTest.java | 3 +- .../entity/EbeanAspectMigrationsDaoTest.java | 6 +- .../entity/EbeanEntityServiceTest.java | 30 +- .../metadata/entity/EntityServiceTest.java | 82 ++- .../CassandraTimelineServiceTest.java | 3 +- .../timeline/EbeanTimelineServiceTest.java | 6 +- .../io/datahubproject/test/DataGenerator.java | 45 +- .../SampleDataFixtureConfiguration.java | 3 +- .../SearchLineageFixtureConfiguration.java | 2 +- .../CustomDataQualityRulesMCPSideEffect.java | 2 +- .../token/StatefulTokenService.java | 13 +- .../metadata/config/EbeanConfiguration.java | 46 ++ .../src/main/resources/application.yml | 4 + .../factory/config/ConfigurationProvider.java | 4 + .../entity/EntityAspectDaoFactory.java | 6 +- .../EntityAspectMigrationsDaoFactory.java | 6 +- .../factory/entity/EntityServiceFactory.java | 6 +- .../boot/steps/BackfillBrowsePathsV2Step.java | 4 +- .../IngestDataPlatformInstancesStep.java | 2 +- .../boot/steps/IngestDataPlatformsStep.java | 2 +- .../boot/steps/IngestDataTypesStep.java | 39 +- .../boot/steps/IngestEntityTypesStep.java | 56 +- .../steps/UpgradeDefaultBrowsePathsStep.java | 3 +- .../steps/BackfillBrowsePathsV2StepTest.java | 26 +- .../IngestDataPlatformInstancesStepTest.java | 4 +- .../boot/steps/IngestDataTypesStepTest.java | 9 +- .../UpgradeDefaultBrowsePathsStepTest.java | 4 - .../openapi/util/MappingUtil.java | 11 +- .../v2/controller/EntityController.java | 2 +- .../java/entities/EntitiesControllerTest.java | 13 +- .../src/test/java/mock/MockEntityService.java | 2 +- .../resources/entity/AspectResource.java | 17 +- .../resources/entity/AspectResourceTest.java | 8 +- .../linkedin/metadata/entity/AspectUtils.java | 101 ---- .../metadata/entity/EntityService.java | 42 -- smoke-test/tests/cli/datahub_cli.py | 20 +- .../cypress/cypress/e2e/browse/browseV2.js | 2 + smoke-test/tests/delete/delete_test.py | 12 +- 62 files changed, 1013 insertions(+), 770 deletions(-) create mode 100644 metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtil.java rename metadata-io/src/test/java/com/linkedin/metadata/{AspectUtilsTest.java => aspect/utils/DefaultAspectsUtilTest.java} (56%) create mode 100644 metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EbeanConfiguration.java diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java index ac56e5e91c72b..9f41daf02d209 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java @@ -10,6 +10,7 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ebean.EbeanAspectV1; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; @@ -170,7 +171,7 @@ public Function executable() { // Emit a browse path aspect. final BrowsePaths browsePaths; try { - browsePaths = _entityService.buildDefaultBrowsePath(urn); + browsePaths = DefaultAspectsUtil.buildDefaultBrowsePath(urn, _entityService); final AuditStamp browsePathsStamp = new AuditStamp(); browsePathsStamp.setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 9a426369cfb02..601ce4d25493c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -15,6 +15,7 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; @@ -181,7 +182,8 @@ private Filter backfillDefaultBrowsePathsV2Filter() { } private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exception { - BrowsePathsV2 browsePathsV2 = _entityService.buildDefaultBrowsePathV2(urn, true); + BrowsePathsV2 browsePathsV2 = + DefaultAspectsUtil.buildDefaultBrowsePathV2(urn, true, _entityService); log.debug(String.format("Adding browse path v2 for urn %s with value %s", urn, browsePathsV2)); MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java index 806fd47c721ec..3d803d238b4f9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java @@ -3,6 +3,7 @@ import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -17,14 +18,14 @@ * SystemMetadata} and record/message created time */ public interface AspectsBatch { - List getItems(); + Collection getItems(); /** * Returns MCP items. Can be patch, upsert, etc. * * @return batch items */ - default List getMCPItems() { + default Collection getMCPItems() { return getItems().stream() .filter(item -> item instanceof MCPBatchItem) .map(item -> (MCPBatchItem) item) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/BatchItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/BatchItem.java index a4c0624150532..60033cd6919d6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/BatchItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/BatchItem.java @@ -2,11 +2,13 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.mxe.SystemMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public interface BatchItem { /** @@ -63,4 +65,12 @@ default String getAspectName() { */ @Nonnull AspectSpec getAspectSpec(); + + /** + * The aspect's record template. Null when patch + * + * @return record template if it exists + */ + @Nullable + RecordTemplate getRecordTemplate(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCLBatchItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCLBatchItem.java index 30e882705da45..17a910b125a34 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCLBatchItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCLBatchItem.java @@ -26,7 +26,7 @@ default String getAspectName() { if (getMetadataChangeLog().getAspectName() != null) { return getMetadataChangeLog().getAspectName(); } else { - return getAspect().schema().getName(); + return getRecordTemplate().schema().getName(); } } @@ -40,10 +40,7 @@ default SystemMetadata getPreviousSystemMetadata() { } @Nullable - RecordTemplate getPreviousAspect(); - - @Nonnull - RecordTemplate getAspect(); + RecordTemplate getPreviousRecordTemplate(); @Override @Nonnull diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java index c337e4f848e5c..c64105637dfcc 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java @@ -11,8 +11,6 @@ * related data stored along with the aspect */ public abstract class UpsertItem extends MCPBatchItem { - public abstract RecordTemplate getAspect(); - public abstract SystemAspect toLatestEntityAspect(); public abstract void validatePreCommit( diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java index 053055716eaa0..17aea13dbb94e 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java @@ -99,7 +99,9 @@ public static void resetBaseExpectations() { @BeforeClass public static void initMockServer() { - mockServer = startClientAndServer(GMS_PORT); + if (mockServer == null) { + mockServer = startClientAndServer(GMS_PORT); + } resetBaseExpectations(); } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java index a4eb035b0abce..885be6d00fee8 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java @@ -138,7 +138,9 @@ public static void resetBaseExpectations() { @BeforeClass public static void init() { - mockServer = startClientAndServer(GMS_PORT); + if (mockServer == null) { + mockServer = startClientAndServer(GMS_PORT); + } resetBaseExpectations(); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtil.java new file mode 100644 index 0000000000000..a3711afb753dc --- /dev/null +++ b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtil.java @@ -0,0 +1,312 @@ +package com.linkedin.metadata.aspect.utils; + +import static com.linkedin.metadata.Constants.BROWSE_PATHS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.BROWSE_PATHS_V2_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME; +import static com.linkedin.metadata.search.utils.BrowsePathUtils.buildDataPlatformUrn; +import static com.linkedin.metadata.search.utils.BrowsePathUtils.getDefaultBrowsePath; +import static com.linkedin.metadata.search.utils.BrowsePathV2Utils.getDefaultBrowsePathV2; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.BrowsePaths; +import com.linkedin.common.BrowsePathsV2; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.StringArray; +import com.linkedin.dataplatform.DataPlatformInfo; +import com.linkedin.entity.EntityResponse; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.batch.AspectsBatch; +import com.linkedin.metadata.aspect.batch.BatchItem; +import com.linkedin.metadata.aspect.batch.MCPBatchItem; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.DataPlatformInstanceUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.GenericAspect; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.util.Pair; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +/** Consolidates logic for default aspects */ +@Slf4j +public class DefaultAspectsUtil { + private DefaultAspectsUtil() {} + + public static final Set SUPPORTED_TYPES = + Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); + + public static List getAdditionalChanges( + @Nonnull AspectsBatch batch, @Nonnull EntityService entityService, boolean browsePathV2) { + + Map> itemsByUrn = + batch.getMCPItems().stream() + .filter(item -> SUPPORTED_TYPES.contains(item.getChangeType())) + .collect(Collectors.groupingBy(BatchItem::getUrn)); + + Set urnsWithExistingKeyAspects = entityService.exists(itemsByUrn.keySet()); + + // create default aspects when key aspect is missing + return itemsByUrn.entrySet().stream() + .filter(aspectsEntry -> !urnsWithExistingKeyAspects.contains(aspectsEntry.getKey())) + .flatMap( + aspectsEntry -> { + // Exclude aspects already in the batch + Set currentBatchAspectNames = + aspectsEntry.getValue().stream() + .map(BatchItem::getAspectName) + .collect(Collectors.toSet()); + + // Generate key aspect and defaults + List> defaultAspects = + generateDefaultAspects( + entityService, aspectsEntry.getKey(), currentBatchAspectNames, browsePathV2); + + // First is the key aspect + RecordTemplate entityKeyAspect = defaultAspects.get(0).getSecond(); + + // pick the first item as a template (use entity information) + MCPBatchItem templateItem = aspectsEntry.getValue().get(0); + + // generate default aspects (including key aspect, always upserts) + return defaultAspects.stream() + .map( + entry -> + MCPUpsertBatchItem.MCPUpsertBatchItemBuilder.build( + getProposalFromAspect( + entry.getKey(), entry.getValue(), entityKeyAspect, templateItem), + templateItem.getAuditStamp(), + entityService)) + .filter(Objects::nonNull); + }) + .collect(Collectors.toList()); + } + + /** + * Generate default aspects + * + * @param entityService entity service + * @param urn entity urn + * @return a list of aspect name/aspect pairs to be written + */ + public static List> generateDefaultAspects( + @Nonnull EntityService entityService, + @Nonnull final Urn urn, + @Nonnull Set currentBatchAspectNames, + boolean browsePathV2) { + + final List> defaultAspects = new LinkedList<>(); + + // Key Aspect + final String keyAspectName = entityService.getKeyAspectName(urn); + defaultAspects.add( + Pair.of(keyAspectName, EntityUtils.buildKeyAspect(entityService.getEntityRegistry(), urn))); + + // Other Aspects + defaultAspects.addAll( + generateDefaultAspectsIfMissing( + entityService, + urn, + defaultAspects.get(0).getSecond(), + currentBatchAspectNames, + browsePathV2)); + + return defaultAspects; + } + + /** + * Generate default aspects if the aspect is NOT in the database. + * + *

    Does not automatically create key aspects. + * + * @see #generateDefaultAspectsIfMissing if key aspects need autogeneration + * @param entityService + * @param urn entity urn + * @param entityKeyAspect entity's key aspect + * @return additional aspects to be written + */ + private static List> generateDefaultAspectsIfMissing( + @Nonnull EntityService entityService, + @Nonnull final Urn urn, + RecordTemplate entityKeyAspect, + @Nonnull Set currentAspectNames, + boolean browsePathV2) { + EntityRegistry entityRegistry = entityService.getEntityRegistry(); + + Set fetchAspects = + Stream.of( + BROWSE_PATHS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME) + // If browsePathV2 then exclude v1 + .filter(aspectName -> !(BROWSE_PATHS_ASPECT_NAME.equals(aspectName) && browsePathV2)) + // Exclude currently ingesting aspects + .filter(aspectName -> !currentAspectNames.contains(aspectName)) + // Exclude in case when we have limited test entity registry which doesn't include these + .filter( + aspectName -> + entityRegistry + .getEntitySpec(urn.getEntityType()) + .getAspectSpecMap() + .containsKey(aspectName)) + .collect(Collectors.toSet()); + + if (!fetchAspects.isEmpty()) { + + Set latestAspects = entityService.getLatestAspectsForUrn(urn, fetchAspects).keySet(); + + return fetchAspects.stream() + .filter(aspectName -> !latestAspects.contains(aspectName)) + .map( + aspectName -> { + switch (aspectName) { + case BROWSE_PATHS_ASPECT_NAME: + return Pair.of( + BROWSE_PATHS_ASPECT_NAME, + (RecordTemplate) buildDefaultBrowsePath(urn, entityService)); + case BROWSE_PATHS_V2_ASPECT_NAME: + return Pair.of( + BROWSE_PATHS_V2_ASPECT_NAME, + (RecordTemplate) buildDefaultBrowsePathV2(urn, false, entityService)); + case DATA_PLATFORM_INSTANCE_ASPECT_NAME: + return DataPlatformInstanceUtils.buildDataPlatformInstance( + urn.getEntityType(), entityKeyAspect) + .map( + aspect -> + Pair.of( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, (RecordTemplate) aspect)) + .orElse(null); + default: + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } + + return Collections.emptyList(); + } + + /** + * Builds the default browse path aspects for a subset of well-supported entities. + * + *

    This method currently supports datasets, charts, dashboards, data flows, data jobs, and + * glossary terms. + */ + @Nonnull + public static BrowsePaths buildDefaultBrowsePath( + final @Nonnull Urn urn, EntityService entityService) { + Character dataPlatformDelimiter = getDataPlatformDelimiter(urn, entityService); + String defaultBrowsePath = + getDefaultBrowsePath(urn, entityService.getEntityRegistry(), dataPlatformDelimiter); + StringArray browsePaths = new StringArray(); + browsePaths.add(defaultBrowsePath); + BrowsePaths browsePathAspect = new BrowsePaths(); + browsePathAspect.setPaths(browsePaths); + return browsePathAspect; + } + + /** + * Builds the default browse path V2 aspects for all entities. + * + *

    This method currently supports datasets, charts, dashboards, and data jobs best. Everything + * else will have a basic "Default" folder added to their browsePathV2. + */ + @Nonnull + public static BrowsePathsV2 buildDefaultBrowsePathV2( + final @Nonnull Urn urn, boolean useContainerPaths, EntityService entityService) { + Character dataPlatformDelimiter = getDataPlatformDelimiter(urn, entityService); + return getDefaultBrowsePathV2( + urn, + entityService.getEntityRegistry(), + dataPlatformDelimiter, + entityService, + useContainerPaths); + } + + /** Returns a delimiter on which the name of an asset may be split. */ + private static Character getDataPlatformDelimiter(Urn urn, EntityService entityService) { + // Attempt to construct the appropriate Data Platform URN + Urn dataPlatformUrn = buildDataPlatformUrn(urn, entityService.getEntityRegistry()); + if (dataPlatformUrn != null) { + // Attempt to resolve the delimiter from Data Platform Info + DataPlatformInfo dataPlatformInfo = getDataPlatformInfo(dataPlatformUrn, entityService); + if (dataPlatformInfo != null && dataPlatformInfo.hasDatasetNameDelimiter()) { + return dataPlatformInfo.getDatasetNameDelimiter().charAt(0); + } + } + // Else, fallback to a default delimiter (period) if one cannot be resolved. + return '.'; + } + + @Nullable + private static DataPlatformInfo getDataPlatformInfo(Urn urn, EntityService entityService) { + try { + final EntityResponse entityResponse = + entityService.getEntityV2( + Constants.DATA_PLATFORM_ENTITY_NAME, + urn, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)); + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)) { + return new DataPlatformInfo( + entityResponse + .getAspects() + .get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) + .getValue() + .data()); + } + } catch (Exception e) { + log.warn(String.format("Failed to find Data Platform Info for urn %s", urn)); + } + return null; + } + + private static MetadataChangeProposal getProposalFromAspect( + String aspectName, + RecordTemplate aspect, + RecordTemplate entityKeyAspect, + MCPBatchItem templateItem) { + MetadataChangeProposal proposal = new MetadataChangeProposal(); + GenericAspect genericAspect = GenericRecordUtils.serializeAspect(aspect); + + // Set net new fields + proposal.setAspect(genericAspect); + proposal.setAspectName(aspectName); + + // Set fields determined from original + // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an + // UPSERT + proposal.setChangeType(templateItem.getChangeType()); + if (ChangeType.PATCH.equals(proposal.getChangeType())) { + proposal.setChangeType(ChangeType.UPSERT); + } + + if (templateItem.getSystemMetadata() != null) { + proposal.setSystemMetadata(templateItem.getSystemMetadata()); + } + if (templateItem.getUrn() != null) { + proposal.setEntityUrn(templateItem.getUrn()); + } + if (entityKeyAspect != null) { + proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(entityKeyAspect)); + } + proposal.setEntityType(templateItem.getUrn().getEntityType()); + + return proposal; + } +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 0ebe9ed1d1b66..9a3bc9e319d2b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -26,7 +26,6 @@ import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.browse.BrowseResultV2; -import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; @@ -67,7 +66,6 @@ import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; -import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; @@ -706,14 +704,10 @@ public String ingestProposal( : Constants.UNKNOWN_ACTOR; final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(UrnUtils.getUrn(actorUrnStr)); - final List additionalChanges = - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService); - Stream proposalStream = - Stream.concat(Stream.of(metadataChangeProposal), additionalChanges.stream()); AspectsBatch batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) + .mcps(List.of(metadataChangeProposal), auditStamp, _entityService) .build(); IngestResult one = _entityService.ingestProposal(batch, async).stream().findFirst().get(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java index e00a696a095a1..b031377842176 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java @@ -148,11 +148,11 @@ T runInTransactionWithRetry( @Nonnull final Function block, final int maxTransactionRetry); @Nonnull - default T runInTransactionWithRetry( + default List runInTransactionWithRetry( @Nonnull final Function block, AspectsBatch batch, final int maxTransactionRetry) { - return runInTransactionWithRetry(block, maxTransactionRetry); + return List.of(runInTransactionWithRetry(block, maxTransactionRetry)); } default void incrementWriteMetrics(String aspectName, long count, long bytes) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index e6e69c96c1542..7f15e3a7fd8fc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -2,16 +2,11 @@ import static com.linkedin.metadata.Constants.APP_SOURCE; import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; -import static com.linkedin.metadata.Constants.BROWSE_PATHS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.BROWSE_PATHS_V2_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME; import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; import static com.linkedin.metadata.Constants.FORCE_INDEXING_KEY; import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import static com.linkedin.metadata.Constants.UI_SOURCE; -import static com.linkedin.metadata.search.utils.BrowsePathUtils.buildDataPlatformUrn; -import static com.linkedin.metadata.search.utils.BrowsePathUtils.getDefaultBrowsePath; import static com.linkedin.metadata.utils.GenericRecordUtils.entityResponseToAspectMap; import static com.linkedin.metadata.utils.PegasusUtils.constructMCL; import static com.linkedin.metadata.utils.PegasusUtils.getDataTemplateClassFromSchema; @@ -25,8 +20,6 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Streams; import com.linkedin.common.AuditStamp; -import com.linkedin.common.BrowsePaths; -import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.Status; import com.linkedin.common.UrnArray; import com.linkedin.common.VersionedUrn; @@ -38,10 +31,8 @@ import com.linkedin.data.template.GetMode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; -import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; import com.linkedin.data.template.UnionTemplate; -import com.linkedin.dataplatform.DataPlatformInfo; import com.linkedin.entity.AspectType; import com.linkedin.entity.Entity; import com.linkedin.entity.EntityResponse; @@ -57,6 +48,7 @@ import com.linkedin.metadata.aspect.batch.SystemAspect; import com.linkedin.metadata.aspect.batch.UpsertItem; import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; @@ -72,10 +64,8 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.run.AspectRowSummary; -import com.linkedin.metadata.search.utils.BrowsePathV2Utils; import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.metadata.snapshot.Snapshot; -import com.linkedin.metadata.utils.DataPlatformInstanceUtils; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.metadata.utils.PegasusUtils; @@ -166,6 +156,7 @@ public class EntityServiceImpl implements EntityService { protected static final int MAX_KEYS_PER_QUERY = 500; private final Integer ebeanMaxTransactionRetry; + private final boolean enableBrowseV2; public EntityServiceImpl( @Nonnull final AspectDao aspectDao, @@ -173,7 +164,8 @@ public EntityServiceImpl( @Nonnull final EntityRegistry entityRegistry, final boolean alwaysEmitChangeLog, @Nullable final UpdateIndicesService updateIndicesService, - final PreProcessHooks preProcessHooks) { + final PreProcessHooks preProcessHooks, + final boolean enableBrowsePathV2) { this( aspectDao, producer, @@ -181,7 +173,8 @@ public EntityServiceImpl( alwaysEmitChangeLog, updateIndicesService, preProcessHooks, - DEFAULT_MAX_TRANSACTION_RETRY); + DEFAULT_MAX_TRANSACTION_RETRY, + enableBrowsePathV2); } public EntityServiceImpl( @@ -191,7 +184,8 @@ public EntityServiceImpl( final boolean alwaysEmitChangeLog, @Nullable final UpdateIndicesService updateIndicesService, final PreProcessHooks preProcessHooks, - @Nullable final Integer retry) { + @Nullable final Integer retry, + final boolean enableBrowseV2) { _aspectDao = aspectDao; _producer = producer; @@ -204,6 +198,7 @@ public EntityServiceImpl( } _preProcessHooks = preProcessHooks; ebeanMaxTransactionRetry = retry != null ? retry : DEFAULT_MAX_TRANSACTION_RETRY; + this.enableBrowseV2 = enableBrowseV2; } @Override @@ -622,7 +617,7 @@ public List ingestAspects( MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(pair.getKey()) - .aspect(pair.getValue()) + .recordTemplate(pair.getValue()) .systemMetadata(systemMetadata) .auditStamp(auditStamp) .build(this)) @@ -670,162 +665,166 @@ private List ingestAspectsToLocalDB( log.warn(String.format("Batch contains duplicates: %s", aspectsBatch)); } - return _aspectDao.runInTransactionWithRetry( - (tx) -> { - // Read before write is unfortunate, however batch it - final Map> urnAspects = aspectsBatch.getUrnAspectsMap(); - // read #1 - final Map> latestAspects = - toSystemEntityAspects(_aspectDao.getLatestAspects(urnAspects)); - // read #2 - final Map> nextVersions = - _aspectDao.getNextVersions(urnAspects); - - // 1. Convert patches to full upserts - // 2. Run any entity/aspect level hooks - Pair>, List> updatedItems = - aspectsBatch.toUpsertBatchItems(latestAspects, this); - - // Fetch additional information if needed - final Map> updatedLatestAspects; - final Map> updatedNextVersions; - if (!updatedItems.getFirst().isEmpty()) { - Map> newLatestAspects = - toSystemEntityAspects(_aspectDao.getLatestAspects(updatedItems.getFirst())); - Map> newNextVersions = - _aspectDao.getNextVersions(updatedItems.getFirst()); - // merge - updatedLatestAspects = aspectsBatch.merge(latestAspects, newLatestAspects); - updatedNextVersions = aspectsBatch.merge(nextVersions, newNextVersions); - } else { - updatedLatestAspects = latestAspects; - updatedNextVersions = nextVersions; - } - - // do final pre-commit checks with previous aspect value - updatedItems - .getSecond() - .forEach( - item -> { - SystemAspect previousAspect = - updatedLatestAspects - .getOrDefault(item.getUrn().toString(), Map.of()) - .get(item.getAspectSpec().getName()); - try { - item.validatePreCommit( - previousAspect == null - ? null - : previousAspect.getRecordTemplate(_entityRegistry), - this); - } catch (AspectValidationException e) { - throw new RuntimeException(e); - } - }); + return _aspectDao + .runInTransactionWithRetry( + (tx) -> { + // Read before write is unfortunate, however batch it + final Map> urnAspects = aspectsBatch.getUrnAspectsMap(); + // read #1 + final Map> latestAspects = + toSystemEntityAspects(_aspectDao.getLatestAspects(urnAspects)); + // read #2 + final Map> nextVersions = + _aspectDao.getNextVersions(urnAspects); + + // 1. Convert patches to full upserts + // 2. Run any entity/aspect level hooks + Pair>, List> updatedItems = + aspectsBatch.toUpsertBatchItems(latestAspects, this); + + // Fetch additional information if needed + final Map> updatedLatestAspects; + final Map> updatedNextVersions; + if (!updatedItems.getFirst().isEmpty()) { + Map> newLatestAspects = + toSystemEntityAspects(_aspectDao.getLatestAspects(updatedItems.getFirst())); + Map> newNextVersions = + _aspectDao.getNextVersions(updatedItems.getFirst()); + // merge + updatedLatestAspects = aspectsBatch.merge(latestAspects, newLatestAspects); + updatedNextVersions = aspectsBatch.merge(nextVersions, newNextVersions); + } else { + updatedLatestAspects = latestAspects; + updatedNextVersions = nextVersions; + } - // Database Upsert results - List upsertResults = - updatedItems.getSecond().stream() - .map( + // do final pre-commit checks with previous aspect value + updatedItems + .getSecond() + .forEach( item -> { - final String urnStr = item.getUrn().toString(); - final SystemAspect latest = + SystemAspect previousAspect = updatedLatestAspects - .getOrDefault(urnStr, Map.of()) - .get(item.getAspectName()); - final long nextVersion = - updatedNextVersions - .getOrDefault(urnStr, Map.of()) - .getOrDefault(item.getAspectName(), 0L); - - final UpdateAspectResult result; - if (overwrite || latest == null) { - result = - ingestAspectToLocalDB( - tx, - item.getUrn(), - item.getAspectName(), - item.getAspect(), - item.getAuditStamp(), - item.getSystemMetadata(), - latest == null - ? null - : ((EntityAspect.EntitySystemAspect) latest).asRaw(), - nextVersion) - .toBuilder() - .request(item) - .build(); - - // support inner-batch upserts - latestAspects - .computeIfAbsent(urnStr, key -> new HashMap<>()) - .put(item.getAspectName(), item.toLatestEntityAspect()); - nextVersions - .computeIfAbsent(urnStr, key -> new HashMap<>()) - .put(item.getAspectName(), nextVersion + 1); - } else { - RecordTemplate oldValue = latest.getRecordTemplate(_entityRegistry); - SystemMetadata oldMetadata = latest.getSystemMetadata(); - result = - UpdateAspectResult.builder() - .urn(item.getUrn()) - .request(item) - .oldValue(oldValue) - .newValue(oldValue) - .oldSystemMetadata(oldMetadata) - .newSystemMetadata(oldMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(item.getAuditStamp()) - .maxVersion(latest.getVersion()) - .build(); + .getOrDefault(item.getUrn().toString(), Map.of()) + .get(item.getAspectSpec().getName()); + try { + item.validatePreCommit( + previousAspect == null + ? null + : previousAspect.getRecordTemplate(_entityRegistry), + this); + } catch (AspectValidationException e) { + throw new RuntimeException(e); } + }); - return result; - }) - .collect(Collectors.toList()); + // Database Upsert results + List upsertResults = + updatedItems.getSecond().stream() + .map( + item -> { + final String urnStr = item.getUrn().toString(); + final SystemAspect latest = + updatedLatestAspects + .getOrDefault(urnStr, Map.of()) + .get(item.getAspectName()); + final long nextVersion = + updatedNextVersions + .getOrDefault(urnStr, Map.of()) + .getOrDefault(item.getAspectName(), 0L); + + final UpdateAspectResult result; + if (overwrite || latest == null) { + result = + ingestAspectToLocalDB( + tx, + item.getUrn(), + item.getAspectName(), + item.getRecordTemplate(), + item.getAuditStamp(), + item.getSystemMetadata(), + latest == null + ? null + : ((EntityAspect.EntitySystemAspect) latest).asRaw(), + nextVersion) + .toBuilder() + .request(item) + .build(); + + // support inner-batch upserts + latestAspects + .computeIfAbsent(urnStr, key -> new HashMap<>()) + .put(item.getAspectName(), item.toLatestEntityAspect()); + nextVersions + .computeIfAbsent(urnStr, key -> new HashMap<>()) + .put(item.getAspectName(), nextVersion + 1); + } else { + RecordTemplate oldValue = latest.getRecordTemplate(_entityRegistry); + SystemMetadata oldMetadata = latest.getSystemMetadata(); + result = + UpdateAspectResult.builder() + .urn(item.getUrn()) + .request(item) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(oldMetadata) + .newSystemMetadata(oldMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(item.getAuditStamp()) + .maxVersion(latest.getVersion()) + .build(); + } + + return result; + }) + .collect(Collectors.toList()); - // commit upserts prior to retention or kafka send, if supported by impl - if (tx != null) { - tx.commitAndContinue(); - } + // commit upserts prior to retention or kafka send, if supported by impl + if (tx != null) { + tx.commitAndContinue(); + } - // Retention optimization and tx - if (_retentionService != null) { - List retentionBatch = - upsertResults.stream() - // Only consider retention when there was a previous version - .filter( - result -> - latestAspects.containsKey(result.getUrn().toString()) - && latestAspects - .get(result.getUrn().toString()) - .containsKey(result.getRequest().getAspectName())) - .filter( - result -> { - RecordTemplate oldAspect = result.getOldValue(); - RecordTemplate newAspect = result.getNewValue(); - // Apply retention policies if there was an update to existing aspect - // value - return oldAspect != newAspect - && oldAspect != null - && _retentionService != null; - }) - .map( - result -> - RetentionService.RetentionContext.builder() - .urn(result.getUrn()) - .aspectName(result.getRequest().getAspectName()) - .maxVersion(Optional.of(result.getMaxVersion())) - .build()) - .collect(Collectors.toList()); - _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); - } else { - log.warn("Retention service is missing!"); - } + // Retention optimization and tx + if (_retentionService != null) { + List retentionBatch = + upsertResults.stream() + // Only consider retention when there was a previous version + .filter( + result -> + latestAspects.containsKey(result.getUrn().toString()) + && latestAspects + .get(result.getUrn().toString()) + .containsKey(result.getRequest().getAspectName())) + .filter( + result -> { + RecordTemplate oldAspect = result.getOldValue(); + RecordTemplate newAspect = result.getNewValue(); + // Apply retention policies if there was an update to existing aspect + // value + return oldAspect != newAspect + && oldAspect != null + && _retentionService != null; + }) + .map( + result -> + RetentionService.RetentionContext.builder() + .urn(result.getUrn()) + .aspectName(result.getRequest().getAspectName()) + .maxVersion(Optional.of(result.getMaxVersion())) + .build()) + .collect(Collectors.toList()); + _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); + } else { + log.warn("Retention service is missing!"); + } - return upsertResults; - }, - aspectsBatch, - DEFAULT_MAX_TRANSACTION_RETRY); + return upsertResults; + }, + aspectsBatch, + DEFAULT_MAX_TRANSACTION_RETRY) + .stream() + .flatMap(List::stream) + .collect(Collectors.toList()); } /** @@ -921,7 +920,7 @@ public RecordTemplate ingestAspectIfNotPresent( MCPUpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) - .aspect(newValue) + .recordTemplate(newValue) .systemMetadata(systemMetadata) .auditStamp(auditStamp) .build(this)) @@ -965,7 +964,6 @@ public IngestResult ingestProposal( */ @Override public Set ingestProposal(AspectsBatch aspectsBatch, final boolean async) { - Stream timeseriesIngestResults = ingestTimeseriesProposal(aspectsBatch); Stream nonTimeseriesIngestResults = async ? ingestProposalAsync(aspectsBatch) : ingestProposalSync(aspectsBatch); @@ -1005,7 +1003,7 @@ private Stream ingestTimeseriesProposal(AspectsBatch aspectsBatch) conditionallyProduceMCLAsync( null, null, - item.getAspect(), + item.getRecordTemplate(), item.getSystemMetadata(), item.getMetadataChangeProposal(), item.getUrn(), @@ -1082,10 +1080,17 @@ private Stream ingestProposalAsync(AspectsBatch aspectsBatch) { } private Stream ingestProposalSync(AspectsBatch aspectsBatch) { + Set items = new HashSet<>(aspectsBatch.getItems()); + + // Generate additional items as needed + items.addAll(DefaultAspectsUtil.getAdditionalChanges(aspectsBatch, this, enableBrowseV2)); + + AspectsBatch withDefaults = AspectsBatchImpl.builder().items(items).build(); + AspectsBatchImpl nonTimeseries = AspectsBatchImpl.builder() .items( - aspectsBatch.getItems().stream() + withDefaults.getItems().stream() .filter(item -> !item.getAspectSpec().isTimeseries()) .collect(Collectors.toList())) .build(); @@ -1542,116 +1547,6 @@ protected Map> getLatestAspectUnions( .collect(Collectors.toList()))); } - /** - * Returns true if entityType should have some aspect as per its definition but aspects given does - * not have that aspect - */ - private boolean isAspectMissing(String entityType, String aspectName, Set aspects) { - return _entityRegistry.getEntitySpec(entityType).getAspectSpecMap().containsKey(aspectName) - && !aspects.contains(aspectName); - } - - @Override - public Pair>> generateDefaultAspectsOnFirstWrite( - @Nonnull final Urn urn, Map includedAspects) { - List> returnAspects = new ArrayList<>(); - - final String keyAspectName = getKeyAspectName(urn); - final Map latestAspects = - new HashMap<>(getLatestAspectsForUrn(urn, Set.of(keyAspectName))); - - // key aspect: does not exist in database && is being written - boolean generateDefaults = - !latestAspects.containsKey(keyAspectName) && includedAspects.containsKey(keyAspectName); - - // conditionally generate defaults - if (generateDefaults) { - String entityType = urnToEntityName(urn); - Set aspectsToGet = new HashSet<>(); - - boolean shouldCheckBrowsePath = - isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects.keySet()); - if (shouldCheckBrowsePath) { - aspectsToGet.add(BROWSE_PATHS_ASPECT_NAME); - } - - boolean shouldCheckBrowsePathV2 = - isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects.keySet()); - if (shouldCheckBrowsePathV2) { - aspectsToGet.add(BROWSE_PATHS_V2_ASPECT_NAME); - } - - boolean shouldCheckDataPlatform = - isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects.keySet()); - if (shouldCheckDataPlatform) { - aspectsToGet.add(DATA_PLATFORM_INSTANCE_ASPECT_NAME); - } - - // fetch additional aspects - latestAspects.putAll(getLatestAspectsForUrn(urn, aspectsToGet)); - - if (shouldCheckBrowsePath - && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null - && !includedAspects.containsKey(BROWSE_PATHS_ASPECT_NAME)) { - try { - BrowsePaths generatedBrowsePath = buildDefaultBrowsePath(urn); - returnAspects.add(Pair.of(BROWSE_PATHS_ASPECT_NAME, generatedBrowsePath)); - } catch (URISyntaxException e) { - log.error("Failed to parse urn: {}", urn); - } - } - - if (shouldCheckBrowsePathV2 - && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null - && !includedAspects.containsKey(BROWSE_PATHS_V2_ASPECT_NAME)) { - try { - BrowsePathsV2 generatedBrowsePathV2 = buildDefaultBrowsePathV2(urn, false); - returnAspects.add(Pair.of(BROWSE_PATHS_V2_ASPECT_NAME, generatedBrowsePathV2)); - } catch (URISyntaxException e) { - log.error("Failed to parse urn: {}", urn); - } - } - - if (shouldCheckDataPlatform - && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null - && !includedAspects.containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { - RecordTemplate keyAspect = includedAspects.get(keyAspectName); - DataPlatformInstanceUtils.buildDataPlatformInstance(entityType, keyAspect) - .ifPresent( - aspect -> returnAspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); - } - } - - return Pair.of(latestAspects.containsKey(keyAspectName), returnAspects); - } - - @Override - public List> generateDefaultAspectsIfMissing( - @Nonnull final Urn urn, Map includedAspects) { - - final String keyAspectName = getKeyAspectName(urn); - - if (includedAspects.containsKey(keyAspectName)) { - return generateDefaultAspectsOnFirstWrite(urn, includedAspects).getValue(); - } else { - // No key aspect being written, generate it and potentially suggest writing it later - HashMap includedWithKeyAspect = new HashMap<>(includedAspects); - Pair keyAspect = - Pair.of(keyAspectName, EntityUtils.buildKeyAspect(_entityRegistry, urn)); - includedWithKeyAspect.put(keyAspect.getKey(), keyAspect.getValue()); - - Pair>> returnAspects = - generateDefaultAspectsOnFirstWrite(urn, includedWithKeyAspect); - - // missing key aspect in database, add it - if (!returnAspects.getFirst()) { - returnAspects.getValue().add(keyAspect); - } - - return returnAspects.getValue(); - } - } - private void ingestSnapshotUnion( @Nonnull final Snapshot snapshotUnion, @Nonnull final AuditStamp auditStamp, @@ -1664,10 +1559,11 @@ private void ingestSnapshotUnion( log.info("INGEST urn {} with system metadata {}", urn.toString(), systemMetadata.toString()); aspectRecordsToIngest.addAll( - generateDefaultAspectsIfMissing( + DefaultAspectsUtil.generateDefaultAspects( + this, urn, - aspectRecordsToIngest.stream() - .collect(Collectors.toMap(Pair::getKey, Pair::getValue)))); + aspectRecordsToIngest.stream().map(Pair::getFirst).collect(Collectors.toSet()), + enableBrowseV2)); AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() @@ -1678,7 +1574,7 @@ private void ingestSnapshotUnion( MCPUpsertBatchItem.builder() .urn(urn) .aspectName(pair.getKey()) - .aspect(pair.getValue()) + .recordTemplate(pair.getValue()) .auditStamp(auditStamp) .systemMetadata(systemMetadata) .build(this)) @@ -2397,79 +2293,6 @@ private UpdateAspectResult ingestAspectToLocalDB( .build(); } - /** - * Builds the default browse path aspects for a subset of well-supported entities. - * - *

    This method currently supports datasets, charts, dashboards, data flows, data jobs, and - * glossary terms. - */ - @Nonnull - @Override - public BrowsePaths buildDefaultBrowsePath(final @Nonnull Urn urn) throws URISyntaxException { - Character dataPlatformDelimiter = getDataPlatformDelimiter(urn); - String defaultBrowsePath = - getDefaultBrowsePath(urn, this.getEntityRegistry(), dataPlatformDelimiter); - StringArray browsePaths = new StringArray(); - browsePaths.add(defaultBrowsePath); - BrowsePaths browsePathAspect = new BrowsePaths(); - browsePathAspect.setPaths(browsePaths); - return browsePathAspect; - } - - /** - * Builds the default browse path V2 aspects for all entities. - * - *

    This method currently supports datasets, charts, dashboards, and data jobs best. Everything - * else will have a basic "Default" folder added to their browsePathV2. - */ - @Nonnull - @Override - public BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) - throws URISyntaxException { - Character dataPlatformDelimiter = getDataPlatformDelimiter(urn); - return BrowsePathV2Utils.getDefaultBrowsePathV2( - urn, this.getEntityRegistry(), dataPlatformDelimiter, this, useContainerPaths); - } - - /** Returns a delimiter on which the name of an asset may be split. */ - private Character getDataPlatformDelimiter(Urn urn) { - // Attempt to construct the appropriate Data Platform URN - Urn dataPlatformUrn = buildDataPlatformUrn(urn, this.getEntityRegistry()); - if (dataPlatformUrn != null) { - // Attempt to resolve the delimiter from Data Platform Info - DataPlatformInfo dataPlatformInfo = getDataPlatformInfo(dataPlatformUrn); - if (dataPlatformInfo != null && dataPlatformInfo.hasDatasetNameDelimiter()) { - return dataPlatformInfo.getDatasetNameDelimiter().charAt(0); - } - } - // Else, fallback to a default delimiter (period) if one cannot be resolved. - return '.'; - } - - @Nullable - private DataPlatformInfo getDataPlatformInfo(Urn urn) { - try { - final EntityResponse entityResponse = - getEntityV2( - Constants.DATA_PLATFORM_ENTITY_NAME, - urn, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)); - if (entityResponse != null - && entityResponse.hasAspects() - && entityResponse.getAspects().containsKey(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)) { - return new DataPlatformInfo( - entityResponse - .getAspects() - .get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) - .getValue() - .data()); - } - } catch (Exception e) { - log.warn(String.format("Failed to find Data Platform Info for urn %s", urn)); - } - return null; - } - private static boolean shouldAspectEmitChangeLog(@Nonnull final AspectSpec aspectSpec) { final List relationshipFieldSpecs = aspectSpec.getRelationshipFieldSpecs(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java index 176a99d8d3a49..3342d4632f642 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java @@ -5,14 +5,20 @@ import com.codahale.metrics.MetricRegistry; import com.datahub.util.exception.ModelConversionException; import com.datahub.util.exception.RetryLimitReached; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.aspect.batch.AspectsBatch; +import com.linkedin.metadata.aspect.batch.MCPBatchItem; +import com.linkedin.metadata.config.EbeanConfiguration; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.AspectMigrationsDao; import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.ListResult; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; @@ -21,6 +27,7 @@ import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; +import com.linkedin.util.Pair; import io.ebean.Database; import io.ebean.DuplicateKeyException; import io.ebean.ExpressionList; @@ -39,9 +46,14 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -69,8 +81,29 @@ public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { // more testing. private int _queryKeysCount = 375; // 0 means no pagination on keys - public EbeanAspectDao(@Nonnull final Database server) { + /** + * Used to control write concurrency when an entity key aspect is present. If a batch contains an + * entity key aspect, only allow a single execution per URN + */ + private final LoadingCache locks; + + public EbeanAspectDao(@Nonnull final Database server, EbeanConfiguration ebeanConfiguration) { _server = server; + if (ebeanConfiguration.getLocking().isEnabled()) { + this.locks = + CacheBuilder.newBuilder() + .maximumSize(ebeanConfiguration.getLocking().getMaximumLocks()) + .expireAfterWrite( + ebeanConfiguration.getLocking().getDurationSeconds(), TimeUnit.SECONDS) + .build( + new CacheLoader<>() { + public Lock load(String key) { + return new ReentrantLock(true); + } + }); + } else { + this.locks = null; + } } @Override @@ -588,15 +621,70 @@ public ListResult listLatestAspectMetadata( @Nonnull public T runInTransactionWithRetry( @Nonnull final Function block, final int maxTransactionRetry) { - return runInTransactionWithRetry(block, null, maxTransactionRetry); + return runInTransactionWithRetry(block, null, maxTransactionRetry).get(0); } @Override @Nonnull - public T runInTransactionWithRetry( + public List runInTransactionWithRetry( @Nonnull final Function block, @Nullable AspectsBatch batch, final int maxTransactionRetry) { + + LinkedList result = new LinkedList<>(); + + if (locks != null && batch != null) { + Set urnsWithKeyAspects = + batch.getMCPItems().stream() + .filter(i -> i.getEntitySpec().getKeyAspectSpec().equals(i.getAspectSpec())) + .map(MCPBatchItem::getUrn) + .collect(Collectors.toSet()); + + if (!urnsWithKeyAspects.isEmpty()) { + + // Split into batches by urn with key aspect, remaining aspects in the pair's second + Pair, AspectsBatch> splitBatches = splitByUrn(batch, urnsWithKeyAspects); + + // Run non-key aspect `other` batch per normal + if (!splitBatches.getSecond().getItems().isEmpty()) { + result.add( + runInTransactionWithRetryUnlocked( + block, splitBatches.getSecond(), maxTransactionRetry)); + } + + // For each key aspect batch + for (AspectsBatch splitBatch : splitBatches.getFirst()) { + try { + Lock lock = + locks.get(splitBatch.getMCPItems().stream().findFirst().get().getUrn().toString()); + lock.lock(); + try { + result.add(runInTransactionWithRetryUnlocked(block, splitBatch, maxTransactionRetry)); + } finally { + lock.unlock(); + } + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } + } else { + // No key aspects found, run per normal + result.add(runInTransactionWithRetryUnlocked(block, batch, maxTransactionRetry)); + } + } else { + // locks disabled or null batch + result.add(runInTransactionWithRetryUnlocked(block, batch, maxTransactionRetry)); + } + + return result; + } + + @Nonnull + public T runInTransactionWithRetryUnlocked( + @Nonnull final Function block, + @Nullable AspectsBatch batch, + final int maxTransactionRetry) { + validateConnection(); int retryCount = 0; Exception lastException = null; @@ -804,4 +892,35 @@ private static String buildMetricName( MetricUtils.DELIMITER, List.of(entitySpec.getName(), aspectSpec.getName(), status.toLowerCase())); } + + /** + * Split batches by the set of Urns, all remaining items go into an `other` batch in the second of + * the pair + * + * @param batch the input batch + * @param urns urns for batch + * @return separated batches + */ + private static Pair, AspectsBatch> splitByUrn( + AspectsBatch batch, Set urns) { + Map> itemsByUrn = + batch.getMCPItems().stream().collect(Collectors.groupingBy(MCPBatchItem::getUrn)); + + AspectsBatch other = + AspectsBatchImpl.builder() + .items( + itemsByUrn.entrySet().stream() + .filter(entry -> !urns.contains(entry.getKey())) + .flatMap(entry -> entry.getValue().stream()) + .collect(Collectors.toList())) + .build(); + + List nonEmptyBatches = + urns.stream() + .map(urn -> AspectsBatchImpl.builder().items(itemsByUrn.get(urn)).build()) + .filter(b -> !b.getItems().isEmpty()) + .collect(Collectors.toList()); + + return Pair.of(nonEmptyBatches, other); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java index 80fb4e3e1b940..1718bd835dc31 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java @@ -11,6 +11,7 @@ import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; +import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -26,7 +27,7 @@ @Builder(toBuilder = true) public class AspectsBatchImpl implements AspectsBatch { - private final List items; + private final Collection items; /** * Convert patches to upserts, apply hooks at the aspect and batch level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java index 6563765657d6d..a2ed2eb18fe6a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java @@ -30,9 +30,9 @@ public class MCLBatchItemImpl implements MCLBatchItem { @Nonnull private final MetadataChangeLog metadataChangeLog; - @Nullable private final RecordTemplate aspect; + @Nullable private final RecordTemplate recordTemplate; - @Nullable private final RecordTemplate previousAspect; + @Nullable private final RecordTemplate previousRecordTemplate; // derived private final EntitySpec entitySpec; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java index be333af2f7539..d0cb2a4cc59b8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java @@ -31,6 +31,7 @@ import java.nio.charset.StandardCharsets; import java.util.Objects; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.Builder; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -72,6 +73,12 @@ public ChangeType getChangeType() { return ChangeType.PATCH; } + @Nullable + @Override + public RecordTemplate getRecordTemplate() { + return null; + } + public MCPUpsertBatchItem applyPatch( RecordTemplate recordTemplate, AspectRetriever aspectRetriever) { MCPUpsertBatchItem.MCPUpsertBatchItemBuilder builder = @@ -100,7 +107,8 @@ public MCPUpsertBatchItem applyPatch( } try { - builder.aspect(aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); + builder.recordTemplate( + aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); } catch (JsonProcessingException | JsonPatchException e) { throw new RuntimeException(e); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java index 89209c44f10c7..b9d5f24e7ce08 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java @@ -58,7 +58,7 @@ public static MCPUpsertBatchItem fromPatch( recordTemplate != null ? recordTemplate : genericPatchTemplate.getDefault(); try { - builder.aspect(genericPatchTemplate.applyPatch(currentValue)); + builder.recordTemplate(genericPatchTemplate.applyPatch(currentValue)); } catch (JsonPatchException | IOException e) { throw new RuntimeException(e); } @@ -72,7 +72,7 @@ public static MCPUpsertBatchItem fromPatch( // aspectName name of the aspect being inserted @Nonnull private final String aspectName; - @Nonnull private final RecordTemplate aspect; + @Nonnull private final RecordTemplate recordTemplate; @Nonnull private final SystemMetadata systemMetadata; @@ -104,7 +104,7 @@ public void applyMutationHooks( entitySpec, aspectSpec, oldAspectValue, - aspect, + recordTemplate, oldSystemMetadata, systemMetadata, auditStamp, @@ -116,7 +116,7 @@ public void applyMutationHooks( public SystemAspect toLatestEntityAspect() { EntityAspect latest = new EntityAspect(); latest.setAspect(getAspectName()); - latest.setMetadata(EntityUtils.toJsonAspect(getAspect())); + latest.setMetadata(EntityUtils.toJsonAspect(getRecordTemplate())); latest.setUrn(getUrn().toString()); latest.setVersion(ASPECT_LATEST_VERSION); latest.setCreatedOn(new Timestamp(auditStamp.getTime())); @@ -135,7 +135,7 @@ public void validatePreCommit( .getAspectPayloadValidators( getChangeType(), entitySpec.getName(), aspectSpec.getName())) { validator.validatePreCommit( - getChangeType(), urn, getAspectSpec(), previous, this.aspect, aspectRetriever); + getChangeType(), urn, getAspectSpec(), previous, this.recordTemplate, aspectRetriever); } } @@ -167,13 +167,13 @@ public MCPUpsertBatchItem build(AspectRetriever aspectRetriever) { this.entitySpec, this.aspectSpec, this.urn, - this.aspect, + this.recordTemplate, aspectRetriever); return new MCPUpsertBatchItem( this.urn, this.aspectName, - this.aspect, + this.recordTemplate, SystemMetadataUtils.generateSystemMetadataIfEmpty(this.systemMetadata), this.auditStamp, this.metadataChangeProposal, @@ -213,7 +213,7 @@ public static MCPUpsertBatchItem build( SystemMetadataUtils.generateSystemMetadataIfEmpty(mcp.getSystemMetadata())) .metadataChangeProposal(mcp) .auditStamp(auditStamp) - .aspect(convertToRecordTemplate(mcp, aspectSpec)) + .recordTemplate(convertToRecordTemplate(mcp, aspectSpec)) .build(aspectRetriever); } @@ -258,12 +258,12 @@ public boolean equals(Object o) { return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) - && aspect.equals(that.aspect); + && recordTemplate.equals(that.recordTemplate); } @Override public int hashCode() { - return Objects.hash(urn, aspectName, systemMetadata, aspect); + return Objects.hash(urn, aspectName, systemMetadata, recordTemplate); } @Override @@ -276,8 +276,8 @@ public String toString() { + '\'' + ", systemMetadata=" + systemMetadata - + ", aspect=" - + aspect + + ", recordTemplate=" + + recordTemplate + '}'; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java index af0f537de8629..4152122c381da 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java @@ -29,8 +29,7 @@ public class BrowsePathUtils { public static String getDefaultBrowsePath( @Nonnull Urn urn, @Nonnull EntityRegistry entityRegistry, - @Nonnull Character dataPlatformDelimiter) - throws URISyntaxException { + @Nonnull Character dataPlatformDelimiter) { switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java index 961167663e11f..a531c268ed7d2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java @@ -16,7 +16,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; -import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -46,9 +45,8 @@ public static BrowsePathsV2 getDefaultBrowsePathV2( @Nonnull Urn urn, @Nonnull EntityRegistry entityRegistry, @Nonnull Character dataPlatformDelimiter, - @Nonnull EntityService entityService, - boolean useContainerPaths) - throws URISyntaxException { + @Nonnull EntityService entityService, + boolean useContainerPaths) { BrowsePathsV2 result = new BrowsePathsV2(); BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index ed633b063afb2..3c73d1acab5c2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -160,8 +160,8 @@ private void handleUpdateChangeEvent(@Nonnull final MCLBatchItem event) throws I final AspectSpec aspectSpec = event.getAspectSpec(); final Urn urn = event.getUrn(); - RecordTemplate aspect = event.getAspect(); - RecordTemplate previousAspect = event.getPreviousAspect(); + RecordTemplate aspect = event.getRecordTemplate(); + RecordTemplate previousAspect = event.getPreviousRecordTemplate(); // Step 0. If the aspect is timeseries, add to its timeseries index. if (aspectSpec.isTimeseries()) { @@ -264,7 +264,7 @@ private void handleDeleteChangeEvent(@Nonnull final MCLBatchItem event) { urn.getEntityType(), event.getAspectName())); } - RecordTemplate aspect = event.getAspect(); + RecordTemplate aspect = event.getRecordTemplate(); Boolean isDeletingKey = event.getAspectName().equals(entitySpec.getKeyAspectName()); if (!aspectSpec.isTimeseries()) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java index 451b732722498..72bbc794171ff 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java @@ -38,7 +38,7 @@ public static Map ingestCorpUserKeyAspects( MCPUpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) - .aspect(aspect) + .recordTemplate(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService)); @@ -68,7 +68,7 @@ public static Map ingestCorpUserInfoAspects( MCPUpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) - .aspect(aspect) + .recordTemplate(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService)); @@ -99,7 +99,7 @@ public static Map ingestChartInfoAspects( MCPUpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) - .aspect(aspect) + .recordTemplate(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService)); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtilTest.java similarity index 56% rename from metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java rename to metadata-io/src/test/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtilTest.java index 258b40cac6371..308832a9c63ef 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtilTest.java @@ -1,31 +1,35 @@ -package com.linkedin.metadata; +package com.linkedin.metadata.aspect.utils; import static org.mockito.Mockito.*; +import com.linkedin.common.AuditStamp; import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; -import com.linkedin.dataset.DatasetProperties; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.aspect.batch.MCPBatchItem; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; +import com.linkedin.metadata.config.EbeanConfiguration; import com.linkedin.metadata.config.PreProcessHooks; -import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.TestEntityRegistry; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistryException; import com.linkedin.metadata.models.registry.MergedEntityRegistry; import com.linkedin.metadata.snapshot.Snapshot; -import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import io.ebean.Database; import java.util.List; +import java.util.stream.Collectors; import org.testng.Assert; import org.testng.annotations.Test; -public class AspectUtilsTest { +public class DefaultAspectsUtilTest { protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); protected final EntityRegistry _configEntityRegistry = @@ -34,31 +38,41 @@ public class AspectUtilsTest { protected final EntityRegistry _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); - public AspectUtilsTest() throws EntityRegistryException {} + public DefaultAspectsUtilTest() throws EntityRegistryException {} @Test public void testAdditionalChanges() { - Database server = EbeanTestUtils.createTestServer(AspectUtilsTest.class.getSimpleName()); - EbeanAspectDao aspectDao = new EbeanAspectDao(server); + Database server = EbeanTestUtils.createTestServer(DefaultAspectsUtilTest.class.getSimpleName()); + EbeanAspectDao aspectDao = new EbeanAspectDao(server, EbeanConfiguration.testDefault); aspectDao.setConnectionValidated(true); EventProducer mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); EntityServiceImpl entityServiceImpl = new EntityServiceImpl( - aspectDao, mockProducer, _testEntityRegistry, true, null, preProcessHooks); + aspectDao, mockProducer, _testEntityRegistry, true, null, preProcessHooks, false); - MetadataChangeProposal proposal1 = new MetadataChangeProposal(); - proposal1.setEntityUrn( - new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)); - proposal1.setAspectName("datasetProperties"); - DatasetProperties datasetProperties = new DatasetProperties().setName("name"); - proposal1.setAspect(GenericRecordUtils.serializeAspect(datasetProperties)); - proposal1.setEntityType("dataset"); - proposal1.setChangeType(ChangeType.PATCH); + MetadataChangeProposal proposal1 = + new DatasetPropertiesPatchBuilder() + .urn(new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)) + .setDescription("something") + .setName("name") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); + + Assert.assertEquals(proposal1.getChangeType(), ChangeType.PATCH); List proposalList = - AspectUtils.getAdditionalChanges(proposal1, entityServiceImpl); + DefaultAspectsUtil.getAdditionalChanges( + AspectsBatchImpl.builder() + .mcps(List.of(proposal1), new AuditStamp(), entityServiceImpl) + .build(), + entityServiceImpl, + false) + .stream() + .map(MCPBatchItem::getMetadataChangeProposal) + .collect(Collectors.toList()); // proposals for key aspect, browsePath, browsePathV2, dataPlatformInstance Assert.assertEquals(proposalList.size(), 4); Assert.assertEquals(proposalList.get(0).getChangeType(), ChangeType.UPSERT); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java index d94de604bf44d..d191ea2b9fa97 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java @@ -54,7 +54,8 @@ private void configureComponents() { _testEntityRegistry, true, _mockUpdateIndicesService, - preProcessHooks); + preProcessHooks, + true); _retentionService = new CassandraRetentionService(_entityServiceImpl, session, 1000); _entityServiceImpl.setRetentionService(_retentionService); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java index bad47f9acf507..8d30fb02915c7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java @@ -77,7 +77,8 @@ private void configureComponents() { _testEntityRegistry, false, _mockUpdateIndicesService, - preProcessHooks); + preProcessHooks, + true); _retentionService = new CassandraRetentionService(_entityServiceImpl, session, 1000); _entityServiceImpl.setRetentionService(_retentionService); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java index 496744770dba8..42fa2acb54237 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java @@ -59,7 +59,8 @@ public DeleteEntityServiceTest() { _entityRegistry, true, _mockUpdateIndicesService, - preProcessHooks); + preProcessHooks, + true); _deleteEntityService = new DeleteEntityService(_entityServiceImpl, _graphService); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java index 2430ebb1f94be..d241fb3b9581b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java @@ -7,6 +7,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.metadata.AspectIngestionUtils; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.EbeanConfiguration; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; @@ -32,7 +33,7 @@ public void setupTest() { Database server = EbeanTestUtils.createTestServer(EbeanAspectMigrationsDaoTest.class.getSimpleName()); _mockProducer = mock(EventProducer.class); - EbeanAspectDao dao = new EbeanAspectDao(server); + EbeanAspectDao dao = new EbeanAspectDao(server, EbeanConfiguration.testDefault); dao.setConnectionValidated(true); _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); @@ -44,7 +45,8 @@ public void setupTest() { _testEntityRegistry, true, _mockUpdateIndicesService, - preProcessHooks); + preProcessHooks, + true); _retentionService = new EbeanRetentionService(_entityServiceImpl, server, 1000); _entityServiceImpl.setRetentionService(_retentionService); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java index c45306e5f022b..1e2cf4d4255d2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java @@ -13,6 +13,7 @@ import com.linkedin.metadata.AspectGenerationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.EbeanConfiguration; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; @@ -63,7 +64,7 @@ public void setupTest() { Database server = EbeanTestUtils.createTestServer(EbeanEntityServiceTest.class.getSimpleName()); _mockProducer = mock(EventProducer.class); - _aspectDao = new EbeanAspectDao(server); + _aspectDao = new EbeanAspectDao(server, EbeanConfiguration.testDefault); _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); @@ -75,7 +76,8 @@ public void setupTest() { _testEntityRegistry, false, _mockUpdateIndicesService, - preProcessHooks); + preProcessHooks, + true); _retentionService = new EbeanRetentionService(_entityServiceImpl, server, 1000); _entityServiceImpl.setRetentionService(_retentionService); } @@ -121,21 +123,21 @@ public void testIngestListLatestAspects() throws AssertionError { MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(aspectName) - .aspect(writeAspect1) + .recordTemplate(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) - .aspect(writeAspect2) + .recordTemplate(writeAspect2) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) - .aspect(writeAspect3) + .recordTemplate(writeAspect3) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) .build(_entityServiceImpl)); @@ -190,21 +192,21 @@ public void testIngestListUrns() throws AssertionError { MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(aspectName) - .aspect(writeAspect1) + .recordTemplate(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) - .aspect(writeAspect2) + .recordTemplate(writeAspect2) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) - .aspect(writeAspect3) + .recordTemplate(writeAspect3) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) .build(_entityServiceImpl)); @@ -311,6 +313,12 @@ public void multiThreadingTest() { Set> additions = actualAspectIds.stream() .filter(id -> !generatedAspectIds.contains(id)) + // Exclude default aspects + .filter( + id -> + !Set.of("browsePaths", "browsePathsV2", "dataPlatformInstance") + .contains(id.getMiddle())) + .filter(id -> !id.getMiddle().endsWith("Key")) .collect(Collectors.toSet()); assertEquals( additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); @@ -361,6 +369,12 @@ public void singleThreadingTest() { Set> additions = actualAspectIds.stream() .filter(id -> !generatedAspectIds.contains(id)) + // Exclude default aspects + .filter( + id -> + !Set.of("browsePaths", "browsePathsV2", "dataPlatformInstance") + .contains(id.getMiddle())) + .filter(id -> !id.getMiddle().endsWith("Key")) .collect(Collectors.toSet()); assertEquals( additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index db749f3575a06..ea4e97d264bca 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -108,6 +108,8 @@ public abstract class EntityServiceTest captor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)) - .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), captor.capture()); + ArgumentCaptor aspectSpecCaptor = ArgumentCaptor.forClass(AspectSpec.class); + verify(_mockProducer, times(4)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), aspectSpecCaptor.capture(), captor.capture()); assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); + assertEquals( + aspectSpecCaptor.getAllValues().stream() + .map(AspectSpec::getName) + .collect(Collectors.toSet()), + Set.of( + "browsePathsV2", + "editableDatasetProperties", + // "browsePaths", + "dataPlatformInstance", + "datasetKey")); } @Test @@ -1673,12 +1688,17 @@ public void testStructuredPropertyIngestProposal() throws Exception { genericAspect.setContentType("application/json"); gmce.setAspect(genericAspect); _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + ArgumentCaptor captor = ArgumentCaptor.forClass(MetadataChangeLog.class); verify(_mockProducer, times(1)) - .produceMetadataChangeLog(Mockito.eq(firstPropertyUrn), Mockito.any(), captor.capture()); + .produceMetadataChangeLog( + Mockito.eq(firstPropertyUrn), + Mockito.eq(structuredPropertiesDefinitionAspect), + captor.capture()); assertEquals( _entityServiceImpl.getAspect(firstPropertyUrn, definitionAspectName, 0), structuredPropertyDefinition); + Urn secondPropertyUrn = UrnUtils.getUrn("urn:li:structuredProperty:secondStructuredProperty"); assertNull(_entityServiceImpl.getAspect(secondPropertyUrn, definitionAspectName, 0)); assertEquals( @@ -1752,7 +1772,9 @@ public void testStructuredPropertyIngestProposal() throws Exception { ArgumentCaptor.forClass(MetadataChangeLog.class); verify(_mockProducer, times(1)) .produceMetadataChangeLog( - Mockito.eq(secondPropertyUrn), Mockito.any(), secondCaptor.capture()); + Mockito.eq(secondPropertyUrn), + Mockito.eq(structuredPropertiesDefinitionAspect), + secondCaptor.capture()); assertEquals( _entityServiceImpl.getAspect(firstPropertyUrn, definitionAspectName, 0), structuredPropertyDefinition); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java index 921fbac12df85..552cb0b52994f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java @@ -61,7 +61,8 @@ private void configureComponents() { _testEntityRegistry, true, _mockUpdateIndicesService, - preProcessHooks); + preProcessHooks, + true); } /** diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java index 4e47e596dddc2..5d7137a52eb21 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java @@ -3,6 +3,7 @@ import static org.mockito.Mockito.mock; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.EbeanConfiguration; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; @@ -29,7 +30,7 @@ public EbeanTimelineServiceTest() throws EntityRegistryException {} public void setupTest() { Database server = EbeanTestUtils.createTestServer(EbeanTimelineServiceTest.class.getSimpleName()); - _aspectDao = new EbeanAspectDao(server); + _aspectDao = new EbeanAspectDao(server, EbeanConfiguration.testDefault); _aspectDao.setConnectionValidated(true); _entityTimelineService = new TimelineServiceImpl(_aspectDao, _testEntityRegistry); _mockProducer = mock(EventProducer.class); @@ -42,7 +43,8 @@ public void setupTest() { _testEntityRegistry, true, _mockUpdateIndicesService, - preProcessHooks); + preProcessHooks, + true); } /** diff --git a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java index 29c64abdc4d0d..eb4c85209ce42 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java +++ b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java @@ -1,5 +1,6 @@ package io.datahubproject.test; +import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.mock; import com.linkedin.common.AuditStamp; @@ -10,15 +11,18 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.batch.MCPBatchItem; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; -import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityServiceImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; @@ -51,11 +55,17 @@ public class DataGenerator { private static final Faker FAKER = new Faker(); private final EntityRegistry entityRegistry; - private final EntityService entityService; + private final EntityService entityService; + private final boolean generateDefaultAspects; - public DataGenerator(EntityService entityService) { + public DataGenerator(EntityService entityService) { + this(entityService, false); + } + + public DataGenerator(EntityService entityService, Boolean generateDefaultAspects) { this.entityService = entityService; this.entityRegistry = entityService.getEntityRegistry(); + this.generateDefaultAspects = generateDefaultAspects != null ? generateDefaultAspects : false; } public static DataGenerator build(EntityRegistry entityRegistry) { @@ -66,7 +76,8 @@ public static DataGenerator build(EntityRegistry entityRegistry) { entityRegistry, false, mock(UpdateIndicesService.class), - mock(PreProcessHooks.class)); + mock(PreProcessHooks.class), + anyBoolean()); return new DataGenerator(mockEntityServiceImpl); } @@ -81,10 +92,15 @@ public List generateTags(long count) { public Stream> generateMCPs( String entityName, long count, List aspects) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.DATAHUB_ACTOR)) + .setTime(System.currentTimeMillis()); // Prevent duplicate tags and terms generated as secondary entities Set secondaryUrns = new HashSet<>(); + // Expand with default aspects per normal return LongStream.range(0, count) .mapToObj( idx -> { @@ -145,11 +161,22 @@ public Stream> generateMCPs( }) .map( mcp -> { - // Expand with default aspects per normal - return Stream.concat( - Stream.of(mcp), - AspectUtils.getAdditionalChanges(mcp, entityService, true).stream()) - .collect(Collectors.toList()); + if (generateDefaultAspects) { + // Expand with default aspects instead of relying on default generation + return Stream.concat( + Stream.of(mcp), + DefaultAspectsUtil.getAdditionalChanges( + AspectsBatchImpl.builder() + .mcps(List.of(mcp), auditStamp, entityService) + .build(), + entityService, + true) + .stream() + .map(MCPBatchItem::getMetadataChangeProposal)) + .collect(Collectors.toList()); + } else { + return List.of(mcp); + } }); } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java index 84433a2b439f4..b42cd89131f51 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java @@ -296,7 +296,8 @@ private EntityClient entityClientHelper( PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); return new JavaEntityClient( - new EntityServiceImpl(mockAspectDao, null, entityRegistry, true, null, preProcessHooks), + new EntityServiceImpl( + mockAspectDao, null, entityRegistry, true, null, preProcessHooks, true), null, entitySearchService, cachingEntitySearchService, diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java index 978471b53faad..07d27245222b9 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java @@ -234,7 +234,7 @@ protected EntityClient entityClient( PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); return new JavaEntityClient( - new EntityServiceImpl(null, null, entityRegistry, true, null, preProcessHooks), + new EntityServiceImpl(null, null, entityRegistry, true, null, preProcessHooks, true), null, entitySearchService, cachingEntitySearchService, diff --git a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java index d2041c443503e..c21b64c8a4fc0 100644 --- a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java +++ b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java @@ -24,7 +24,7 @@ protected Stream applyMCPSideEffect( MCPUpsertBatchItem.builder() .urn(mirror) .aspectName(input.getAspectName()) - .aspect(input.getAspect()) + .recordTemplate(input.getRecordTemplate()) .auditStamp(input.getAuditStamp()) .systemMetadata(input.getSystemMetadata()) .build(aspectRetriever)); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index e072a59ae77ff..0d1da4a7687ba 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -10,7 +10,6 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.key.DataHubAccessTokenKey; @@ -20,12 +19,11 @@ import java.util.Base64; import java.util.Date; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -146,15 +144,8 @@ public String generateAccessToken( final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp().setActor(UrnUtils.getUrn(actorUrn)); - Stream proposalStream = - Stream.concat( - Stream.of(proposal), - AspectUtils.getAdditionalChanges(proposal, _entityService).stream()); - _entityService.ingestProposal( - AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) - .build(), + AspectsBatchImpl.builder().mcps(List.of(proposal), auditStamp, _entityService).build(), false); return accessToken; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EbeanConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EbeanConfiguration.java new file mode 100644 index 0000000000000..47b406e695a3f --- /dev/null +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EbeanConfiguration.java @@ -0,0 +1,46 @@ +package com.linkedin.metadata.config; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class EbeanConfiguration { + private String username; + private String password; + private String url; + private String driver; + private long minConnections; + private long maxConnections; + private long maxInactiveTimeSeconds; + private long maxAgeMinutes; + private long leakTimeMinutes; + private long waitTimeoutMillis; + private boolean autoCreateDdl; + private boolean postgresUseIamAuth; + private LockingConfiguration locking; + + public static final EbeanConfiguration testDefault = + EbeanConfiguration.builder().locking(LockingConfiguration.testDefault).build(); + + @Data + @Builder + @AllArgsConstructor + @NoArgsConstructor + public static class LockingConfiguration { + private boolean enabled; + private long durationSeconds; + private long maximumLocks; + + public static final LockingConfiguration testDefault = + LockingConfiguration.builder() + .enabled(true) + .durationSeconds(60) + .maximumLocks(10000) + .build(); + } +} diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index a7222f2adc3c6..d4c11d4aa53bd 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -141,6 +141,10 @@ ebean: waitTimeoutMillis: ${EBEAN_WAIT_TIMEOUT_MILLIS:1000} autoCreateDdl: ${EBEAN_AUTOCREATE:false} postgresUseIamAuth: ${EBEAN_POSTGRES_USE_AWS_IAM_AUTH:false} + locking: + enabled: ${EBEAN_LOCKING_ENABLED:true} + durationSeconds: ${EBEAN_LOCKING_DURATION_SECONDS:60} + maximumLocks: ${EBEAN_LOCKING_MAXIMUM_LOCKS:20000} # Only required if entityService.impl is cassandra cassandra: diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java index e969793fac1ef..62bfcfa2cbf93 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java @@ -4,6 +4,7 @@ import com.datahub.authorization.AuthorizationConfiguration; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.metadata.config.DataHubConfiguration; +import com.linkedin.metadata.config.EbeanConfiguration; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.config.SystemUpdateConfiguration; import com.linkedin.metadata.config.TestsConfiguration; @@ -71,4 +72,7 @@ public class ConfigurationProvider { /** Structured properties related configurations */ private StructuredPropertiesConfiguration structuredProperties; + + /** Ebean related configuration */ + private EbeanConfiguration ebean; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java index 94aebb2a39efa..22eced4fd5acf 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java @@ -1,6 +1,7 @@ package com.linkedin.gms.factory.entity; import com.datastax.oss.driver.api.core.CqlSession; +import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; @@ -18,8 +19,9 @@ public class EntityAspectDaoFactory { @DependsOn({"gmsEbeanServiceConfig"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - protected AspectDao createEbeanInstance(Database server) { - return new EbeanAspectDao(server); + protected AspectDao createEbeanInstance( + Database server, final ConfigurationProvider configurationProvider) { + return new EbeanAspectDao(server, configurationProvider.getEbean()); } @Bean(name = "entityAspectDao") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java index 9123714de5bc8..2bf9804030b49 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java @@ -1,6 +1,7 @@ package com.linkedin.gms.factory.entity; import com.datastax.oss.driver.api.core.CqlSession; +import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.entity.AspectMigrationsDao; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; @@ -18,8 +19,9 @@ public class EntityAspectMigrationsDaoFactory { @DependsOn({"gmsEbeanServiceConfig"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - protected AspectMigrationsDao createEbeanInstance(Database server) { - return new EbeanAspectDao(server); + protected AspectMigrationsDao createEbeanInstance( + Database server, final ConfigurationProvider configurationProvider) { + return new EbeanAspectDao(server, configurationProvider.getEbean()); } @Bean(name = "entityAspectMigrationsDao") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java index 5fd64b02d08a8..871f16d97be33 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java @@ -43,7 +43,8 @@ protected EntityService createInstance( @Qualifier("entityAspectDao") AspectDao aspectDao, EntityRegistry entityRegistry, ConfigurationProvider configurationProvider, - UpdateIndicesService updateIndicesService) { + UpdateIndicesService updateIndicesService, + @Value("${featureFlags.showBrowseV2}") final boolean enableBrowsePathV2) { final KafkaEventProducer eventProducer = new KafkaEventProducer(producer, convention, kafkaHealthChecker); @@ -56,6 +57,7 @@ protected EntityService createInstance( featureFlags.isAlwaysEmitChangeLog(), updateIndicesService, featureFlags.getPreProcessHooks(), - _ebeanMaxTransactionRetry); + _ebeanMaxTransactionRetry, + enableBrowsePathV2); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java index 80e139dcd5c65..49a86406c1ecd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java @@ -9,6 +9,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.boot.UpgradeStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.query.filter.Condition; @@ -128,7 +129,8 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S } private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exception { - BrowsePathsV2 browsePathsV2 = _entityService.buildDefaultBrowsePathV2(urn, true); + BrowsePathsV2 browsePathsV2 = + DefaultAspectsUtil.buildDefaultBrowsePathV2(urn, true, _entityService); log.debug(String.format("Adding browse path v2 for urn %s with value %s", urn, browsePathsV2)); MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java index 716ae292338ed..19efa5e9c4de2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java @@ -79,7 +79,7 @@ public void execute() throws Exception { MCPUpsertBatchItem.builder() .urn(urn) .aspectName(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - .aspect(dataPlatformInstance.get()) + .recordTemplate(dataPlatformInstance.get()) .auditStamp(aspectAuditStamp) .build(_entityService)); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java index 89ed493e162cc..d2bb61ad7ade5 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java @@ -86,7 +86,7 @@ public void execute() throws IOException, URISyntaxException { return MCPUpsertBatchItem.builder() .urn(urn) .aspectName(PLATFORM_ASPECT_NAME) - .aspect(info) + .recordTemplate(info) .auditStamp( new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java index 6f3a415b521e4..1487257d6d830 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java @@ -11,12 +11,12 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; +import java.util.HashMap; +import java.util.Map; import java.util.Objects; +import java.util.Set; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; @@ -62,30 +62,29 @@ public void execute() throws Exception { log.info("Ingesting {} data types types", dataTypesObj.size()); int numIngested = 0; + + Map urnDataTypesMap = new HashMap<>(); for (final JsonNode roleObj : dataTypesObj) { final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); - final DataTypeInfo info = - RecordUtils.toRecordTemplate(DataTypeInfo.class, roleObj.get("info").toString()); - log.info(String.format("Ingesting default data type with urn %s", urn)); - ingestDataType(urn, info); - numIngested++; + urnDataTypesMap.put(urn, roleObj); + } + + Set existingUrns = _entityService.exists(urnDataTypesMap.keySet()); + + for (final Map.Entry entry : urnDataTypesMap.entrySet()) { + if (!existingUrns.contains(entry.getKey())) { + final DataTypeInfo info = + RecordUtils.toRecordTemplate( + DataTypeInfo.class, entry.getValue().get("info").toString()); + log.info(String.format("Ingesting default data type with urn %s", entry.getKey())); + ingestDataType(entry.getKey(), info); + numIngested++; + } } log.info("Ingested {} new data types", numIngested); } private void ingestDataType(final Urn dataTypeUrn, final DataTypeInfo info) throws Exception { - // Write key - final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); - final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(dataTypeUrn.getEntityType()); - GenericAspect keyAspect = - GenericRecordUtils.serializeAspect( - EntityKeyUtils.convertUrnToEntityKey(dataTypeUrn, keyAspectSpec)); - keyAspectProposal.setAspect(keyAspect); - keyAspectProposal.setAspectName(keyAspectSpec.getName()); - keyAspectProposal.setEntityType(DATA_TYPE_ENTITY_NAME); - keyAspectProposal.setChangeType(ChangeType.UPSERT); - keyAspectProposal.setEntityUrn(dataTypeUrn); - final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(dataTypeUrn); proposal.setEntityType(DATA_TYPE_ENTITY_NAME); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java index b2213eda71cae..4067b4f34fb36 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java @@ -9,13 +9,14 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.util.Pair; +import java.util.Map; import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -42,34 +43,39 @@ public void execute() throws Exception { log.info( "Ingesting {} entity types", _entityService.getEntityRegistry().getEntitySpecs().size()); int numIngested = 0; - for (final EntitySpec spec : _entityService.getEntityRegistry().getEntitySpecs().values()) { - final Urn entityTypeUrn = - UrnUtils.getUrn( - String.format("urn:li:entityType:%s.%s", DATAHUB_NAMESPACE, spec.getName())); - final EntityTypeInfo info = - new EntityTypeInfo() - .setDisplayName(spec.getName()) // TODO: Support display name in the entity registry. - .setQualifiedName(entityTypeUrn.getId()); - log.info(String.format("Ingesting entity type with urn %s", entityTypeUrn)); - ingestEntityType(entityTypeUrn, info); - numIngested++; + + Map urnEntitySpecMap = + _entityService.getEntityRegistry().getEntitySpecs().values().stream() + .map( + spec -> + Pair.of( + UrnUtils.getUrn( + String.format( + "urn:li:entityType:%s.%s", DATAHUB_NAMESPACE, spec.getName())), + spec)) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + + Set existingUrns = _entityService.exists(urnEntitySpecMap.keySet()); + + for (final Map.Entry entry : urnEntitySpecMap.entrySet()) { + if (!existingUrns.contains(entry.getKey())) { + final EntityTypeInfo info = + new EntityTypeInfo() + .setDisplayName( + entry + .getValue() + .getName()) // TODO: Support display name in the entity registry. + .setQualifiedName(entry.getKey().getId()); + log.info(String.format("Ingesting entity type with urn %s", entry.getKey())); + ingestEntityType(entry.getKey(), info); + numIngested++; + } } log.info("Ingested {} new entity types", numIngested); } private void ingestEntityType(final Urn entityTypeUrn, final EntityTypeInfo info) throws Exception { - // Write key - final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); - final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(entityTypeUrn.getEntityType()); - GenericAspect keyAspect = - GenericRecordUtils.serializeAspect( - EntityKeyUtils.convertUrnToEntityKey(entityTypeUrn, keyAspectSpec)); - keyAspectProposal.setAspect(keyAspect); - keyAspectProposal.setAspectName(keyAspectSpec.getName()); - keyAspectProposal.setEntityType(ENTITY_TYPE_ENTITY_NAME); - keyAspectProposal.setChangeType(ChangeType.UPSERT); - keyAspectProposal.setEntityUrn(entityTypeUrn); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(entityTypeUrn); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java index 3eedbb48aaeca..f28e9ad4e9ed8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java @@ -9,6 +9,7 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.boot.UpgradeStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ListResult; @@ -126,7 +127,7 @@ private int getAndMigrateBrowsePaths(String entityType, int start, AuditStamp au } private void migrateBrowsePath(Urn urn, AuditStamp auditStamp) throws Exception { - BrowsePaths newPaths = _entityService.buildDefaultBrowsePath(urn); + BrowsePaths newPaths = DefaultAspectsUtil.buildDefaultBrowsePath(urn, _entityService); log.debug(String.format("Updating browse path for urn %s to value %s", urn, newPaths)); MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java index 0657141562089..0858736e39021 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java @@ -1,12 +1,10 @@ package com.linkedin.metadata.boot.steps; import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; -import com.linkedin.common.BrowsePathEntry; -import com.linkedin.common.BrowsePathEntryArray; -import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.entity.Aspect; @@ -93,9 +91,9 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { Mockito.verify(mockSearchService, Mockito.times(9)) .scrollAcrossEntities( - Mockito.any(), + any(), Mockito.eq("*"), - Mockito.any(Filter.class), + any(Filter.class), Mockito.eq(null), Mockito.eq(null), Mockito.eq("5m"), @@ -104,8 +102,7 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { // Verify that 11 aspects are ingested, 2 for the upgrade request / result, 9 for ingesting 1 of // each entity type Mockito.verify(mockService, Mockito.times(11)) - .ingestProposal( - Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); + .ingestProposal(any(MetadataChangeProposal.class), any(), Mockito.eq(false)); } @Test @@ -135,9 +132,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Mockito.verify(mockService, Mockito.times(0)) .ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + any(MetadataChangeProposal.class), any(AuditStamp.class), Mockito.anyBoolean()); } private EntityService initMockService() throws URISyntaxException { @@ -146,16 +141,9 @@ private EntityService initMockService() throws URISyntaxException { Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); for (int i = 0; i < ENTITY_TYPES.size(); i++) { - Mockito.when( - mockService.buildDefaultBrowsePathV2( - Mockito.eq(ENTITY_URNS.get(i)), Mockito.eq(true))) - .thenReturn( - new BrowsePathsV2() - .setPath(new BrowsePathEntryArray(new BrowsePathEntry().setId("test")))); - Mockito.when( mockService.getEntityV2( - Mockito.any(), + any(), Mockito.eq(ENTITY_URNS.get(i)), Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) .thenReturn(null); @@ -172,7 +160,7 @@ private SearchService initMockSearchService() { mockSearchService.scrollAcrossEntities( Mockito.eq(ImmutableList.of(ENTITY_TYPES.get(i))), Mockito.eq("*"), - Mockito.any(Filter.class), + any(Filter.class), Mockito.eq(null), Mockito.eq(null), Mockito.eq("5m"), diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java index 1ac0f2f4f914a..5617d7e9714b0 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java @@ -122,7 +122,7 @@ public void testExecuteWhenSomeEntitiesShouldReceiveDataPlatformInstance() throw item.getUrn().getEntityType().equals("chart") && item.getAspectName() .equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - && ((MCPUpsertBatchItem) item).getAspect() + && ((MCPUpsertBatchItem) item).getRecordTemplate() instanceof DataPlatformInstance)), anyBoolean(), anyBoolean()); @@ -136,7 +136,7 @@ public void testExecuteWhenSomeEntitiesShouldReceiveDataPlatformInstance() throw item.getUrn().getEntityType().equals("chart") && item.getAspectName() .equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - && ((MCPUpsertBatchItem) item).getAspect() + && ((MCPUpsertBatchItem) item).getRecordTemplate() instanceof DataPlatformInstance)), anyBoolean(), anyBoolean()); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java index 2bbd06c8a61a4..c5539b001e9e3 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java @@ -13,6 +13,8 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; +import java.util.Collection; +import java.util.Set; import org.jetbrains.annotations.NotNull; import org.mockito.Mockito; import org.testng.Assert; @@ -51,14 +53,17 @@ public void testExecuteValidDataTypesNoExistingDataTypes() throws Exception { @Test public void testExecuteInvalidJson() throws Exception { final EntityService entityService = mock(EntityService.class); + when(entityService.exists(any(Collection.class))).thenAnswer(args -> Set.of()); final IngestDataTypesStep step = new IngestDataTypesStep(entityService, "./boot/test_data_types_invalid.json"); Assert.assertThrows(RuntimeException.class, step::execute); - // Verify no interactions - verifyNoInteractions(entityService); + verify(entityService, times(1)).exists(any()); + + // Verify no additional interactions + verifyNoMoreInteractions(entityService); } private static MetadataChangeProposal buildUpdateDataTypeProposal(final DataTypeInfo info) { diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java index 024ad7b16a844..605d9d1c5e5d8 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java @@ -107,10 +107,6 @@ public void testExecuteFirstTime() throws Exception { final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); - Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))) - .thenReturn(new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); - Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn2))) - .thenReturn(new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); Mockito.when( diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java index a7e88966e4f87..13d2e501abf09 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java @@ -26,7 +26,6 @@ import com.linkedin.entity.Aspect; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.batch.AspectsBatch; -import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.RollbackRunResult; @@ -452,20 +451,12 @@ public static Pair ingestProposal( .setTime(System.currentTimeMillis()) .setActor(UrnUtils.getUrn(actorUrn)); - final List additionalChanges = - AspectUtils.getAdditionalChanges(serviceProposal, entityService); - log.info("Proposal: {}", serviceProposal); Throwable exceptionally = null; try { - Stream proposalStream = - Stream.concat( - Stream.of(serviceProposal), - AspectUtils.getAdditionalChanges(serviceProposal, entityService).stream()); - AspectsBatch batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), auditStamp, entityService) + .mcps(List.of(serviceProposal), auditStamp, entityService) .build(); Set proposalResult = entityService.ingestProposal(batch, async); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java index 503330fdc8a2e..44202c20ca6db 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java @@ -481,7 +481,7 @@ private UpsertItem toUpsertItem( .urn(entityUrn) .aspectName(aspectSpec.getName()) .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) - .aspect( + .recordTemplate( GenericRecordUtils.deserializeAspect( ByteString.copyString(jsonAspect, StandardCharsets.UTF_8), GenericRecordUtils.JSON, diff --git a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java index 17be5a60816d3..20862bbc7f000 100644 --- a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java +++ b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java @@ -11,6 +11,7 @@ import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.AuthorizerChain; import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.UpdateAspectResult; @@ -67,13 +68,15 @@ public void setup() IllegalAccessException { EntityRegistry mockEntityRegistry = new MockEntityRegistry(); AspectDao aspectDao = Mockito.mock(AspectDao.class); - Mockito.when( - aspectDao.runInTransactionWithRetry( - ArgumentMatchers.>any(), any(), anyInt())) + when(aspectDao.runInTransactionWithRetry( + ArgumentMatchers.>>any(), + any(AspectsBatch.class), + anyInt())) .thenAnswer( i -> - ((Function) i.getArgument(0)) - .apply(Mockito.mock(Transaction.class))); + List.of( + ((Function>) i.getArgument(0)) + .apply(Mockito.mock(Transaction.class)))); EventProducer mockEntityEventProducer = Mockito.mock(EventProducer.class); UpdateIndicesService mockUpdateIndicesService = mock(UpdateIndicesService.class); diff --git a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java index b082a735e8084..be5f99bed8e63 100644 --- a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java +++ b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java @@ -59,7 +59,7 @@ public MockEntityService( @Nonnull EntityRegistry entityRegistry, @Nonnull UpdateIndicesService updateIndicesService, PreProcessHooks preProcessHooks) { - super(aspectDao, producer, entityRegistry, true, updateIndicesService, preProcessHooks); + super(aspectDao, producer, entityRegistry, true, updateIndicesService, preProcessHooks, true); } @Override diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java index ffa3abe6806f9..21a9f47a13f73 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java @@ -248,20 +248,9 @@ public Task ingestProposal( return RestliUtil.toTask(() -> { log.debug("Proposal: {}", metadataChangeProposal); try { - final AspectsBatch batch; - if (asyncBool) { - // if async we'll expand the getAdditionalChanges later, no need to do this early - batch = AspectsBatchImpl.builder() - .mcps(List.of(metadataChangeProposal), auditStamp, _entityService) - .build(); - } else { - Stream proposalStream = Stream.concat(Stream.of(metadataChangeProposal), - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService).stream()); - - batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) - .build(); - } + final AspectsBatch batch = AspectsBatchImpl.builder() + .mcps(List.of(metadataChangeProposal), auditStamp, _entityService) + .build(); Set results = _entityService.ingestProposal(batch, asyncBool); diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java index d6130e05b77bd..1678fe92ec70e 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java @@ -53,7 +53,7 @@ public void setup() { _updateIndicesService = mock(UpdateIndicesService.class); _preProcessHooks = mock(PreProcessHooks.class); _entityService = new EntityServiceImpl(_aspectDao, _producer, _entityRegistry, false, - _updateIndicesService, _preProcessHooks); + _updateIndicesService, _preProcessHooks, true); _authorizer = mock(Authorizer.class); _aspectResource.setAuthorizer(_authorizer); _aspectResource.setEntityService(_entityService); @@ -84,13 +84,13 @@ public void testAsyncDefaultAspects() throws URISyntaxException { MCPUpsertBatchItem req = MCPUpsertBatchItem.builder() .urn(urn) .aspectName(mcp.getAspectName()) - .aspect(mcp.getAspect()) + .recordTemplate(mcp.getAspect()) .auditStamp(new AuditStamp()) .metadataChangeProposal(mcp) .build(_entityService); when(_aspectDao.runInTransactionWithRetry(any(), any(), anyInt())) .thenReturn( - List.of( + List.of(List.of( UpdateAspectResult.builder() .urn(urn) .newValue(new DatasetProperties().setName("name1")) @@ -120,7 +120,7 @@ public void testAsyncDefaultAspects() throws URISyntaxException { .newValue(new DatasetProperties().setName("name5")) .auditStamp(new AuditStamp()) .request(req) - .build())); + .build()))); _aspectResource.ingestProposal(mcp, "false"); verify(_producer, times(5)) .produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java index 2c1596474fb21..55373730e7b67 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java @@ -1,7 +1,6 @@ package com.linkedin.metadata.entity; import com.datahub.authentication.Authentication; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -12,18 +11,12 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; -import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; -import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTimeUtils; @@ -33,66 +26,6 @@ public class AspectUtils { private AspectUtils() {} - public static final Set SUPPORTED_TYPES = - Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); - - public static List getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService, - boolean onPrimaryKeyInsertOnly) { - - // No additional changes for unsupported operations - if (!SUPPORTED_TYPES.contains(metadataChangeProposal.getChangeType())) { - return Collections.emptyList(); - } - - final Urn urn = - EntityKeyUtils.getUrnFromProposal( - metadataChangeProposal, - entityService.getKeyAspectSpec(metadataChangeProposal.getEntityType())); - - final Map includedAspects; - if (metadataChangeProposal.getChangeType() != ChangeType.PATCH) { - RecordTemplate aspectRecord = - GenericRecordUtils.deserializeAspect( - metadataChangeProposal.getAspect().getValue(), - metadataChangeProposal.getAspect().getContentType(), - entityService - .getEntityRegistry() - .getEntitySpec(urn.getEntityType()) - .getAspectSpec(metadataChangeProposal.getAspectName())); - includedAspects = ImmutableMap.of(metadataChangeProposal.getAspectName(), aspectRecord); - } else { - includedAspects = ImmutableMap.of(); - } - - if (onPrimaryKeyInsertOnly) { - return entityService - .generateDefaultAspectsOnFirstWrite(urn, includedAspects) - .getValue() - .stream() - .map( - entry -> - getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } else { - return entityService.generateDefaultAspectsIfMissing(urn, includedAspects).stream() - .map( - entry -> - getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } - } - - public static List getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService) { - - return getAdditionalChanges(metadataChangeProposal, entityService, false); - } - public static Map batchGetLatestAspect( String entity, Set urns, @@ -112,40 +45,6 @@ public static Map batchGetLatestAspect( return finalResult; } - private static MetadataChangeProposal getProposalFromAspect( - String aspectName, RecordTemplate aspect, MetadataChangeProposal original) { - MetadataChangeProposal proposal = new MetadataChangeProposal(); - GenericAspect genericAspect = GenericRecordUtils.serializeAspect(aspect); - // Set net new fields - proposal.setAspect(genericAspect); - proposal.setAspectName(aspectName); - - // Set fields determined from original - // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an - // UPSERT - proposal.setChangeType(original.getChangeType()); - if (ChangeType.PATCH.equals(proposal.getChangeType())) { - proposal.setChangeType(ChangeType.UPSERT); - } - - if (original.getSystemMetadata() != null) { - proposal.setSystemMetadata(original.getSystemMetadata()); - } - if (original.getEntityUrn() != null) { - proposal.setEntityUrn(original.getEntityUrn()); - } - if (original.getEntityKeyAspect() != null) { - proposal.setEntityKeyAspect(original.getEntityKeyAspect()); - } - if (original.getAuditHeader() != null) { - proposal.setAuditHeader(original.getAuditHeader()); - } - - proposal.setEntityType(original.getEntityType()); - - return proposal; - } - public static MetadataChangeProposal buildMetadataChangeProposal( @Nonnull Urn urn, @Nonnull String aspectName, @Nonnull RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index 94ab69e895920..d9b0f4b73d580 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.entity; import com.linkedin.common.AuditStamp; -import com.linkedin.common.BrowsePaths; -import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; @@ -255,34 +253,6 @@ Optional getAspectSpec( String getKeyAspectName(@Nonnull final Urn urn); - /** - * Generate default aspects if not present in the database. - * - * @param urn entity urn - * @param includedAspects aspects being written - * @return additional aspects to be written - */ - List> generateDefaultAspectsIfMissing( - @Nonnull final Urn urn, Map includedAspects); - - /** - * Generate default aspects if the entity key aspect is NOT in the database **AND** the key aspect - * is being written, present in `includedAspects`. - * - *

    Does not automatically create key aspects. - * - * @see EntityService#generateDefaultAspectsIfMissing if key aspects need autogeneration - *

    This version is more efficient in that it only generates additional writes when a new - * entity is being minted for the first time. The drawback is that it will not automatically - * add key aspects, in case the producer is not bothering to ensure that the entity exists - * before writing non-key aspects. - * @param urn entity urn - * @param includedAspects aspects being written - * @return whether key aspect exists in database and the additional aspects to be written - */ - Pair>> generateDefaultAspectsOnFirstWrite( - @Nonnull final Urn urn, Map includedAspects); - AspectSpec getKeyAspectSpec(@Nonnull final String entityName); Set getEntityAspectNames(final String entityName); @@ -339,17 +309,5 @@ default boolean exists(@Nonnull Urn urn, boolean includeSoftDelete) { void setWritable(boolean canWrite); - BrowsePaths buildDefaultBrowsePath(final @Nonnull Urn urn) throws URISyntaxException; - - /** - * Builds the default browse path V2 aspects for all entities. - * - *

    This method currently supports datasets, charts, dashboards, and data jobs best. Everything - * else will have a basic "Default" folder added to their browsePathV2. - */ - @Nonnull - BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) - throws URISyntaxException; - RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName); } diff --git a/smoke-test/tests/cli/datahub_cli.py b/smoke-test/tests/cli/datahub_cli.py index 81ae6a34264ad..d1620d03c88b2 100644 --- a/smoke-test/tests/cli/datahub_cli.py +++ b/smoke-test/tests/cli/datahub_cli.py @@ -24,8 +24,8 @@ def test_setup(): session, gms_host = get_session_and_host() - assert "browsePaths" not in get_aspects_for_entity( - entity_urn=dataset_urn, aspects=["browsePaths"], typed=False + assert "browsePathsV2" not in get_aspects_for_entity( + entity_urn=dataset_urn, aspects=["browsePathsV2"], typed=False ) assert "editableDatasetProperties" not in get_aspects_for_entity( entity_urn=dataset_urn, aspects=["editableDatasetProperties"], typed=False @@ -36,8 +36,8 @@ def test_setup(): ).config.run_id print("Setup ingestion id: " + ingested_dataset_run_id) - assert "browsePaths" in get_aspects_for_entity( - entity_urn=dataset_urn, aspects=["browsePaths"], typed=False + assert "browsePathsV2" in get_aspects_for_entity( + entity_urn=dataset_urn, aspects=["browsePathsV2"], typed=False ) yield @@ -58,8 +58,8 @@ def test_setup(): ), ) - assert "browsePaths" not in get_aspects_for_entity( - entity_urn=dataset_urn, aspects=["browsePaths"], typed=False + assert "browsePathsV2" not in get_aspects_for_entity( + entity_urn=dataset_urn, aspects=["browsePathsV2"], typed=False ) assert "editableDatasetProperties" not in get_aspects_for_entity( entity_urn=dataset_urn, aspects=["editableDatasetProperties"], typed=False @@ -79,8 +79,8 @@ def test_rollback_editable(): print("Ingested dataset id:", ingested_dataset_run_id) # Assert that second data ingestion worked - assert "browsePaths" in get_aspects_for_entity( - entity_urn=dataset_urn, aspects=["browsePaths"], typed=False + assert "browsePathsV2" in get_aspects_for_entity( + entity_urn=dataset_urn, aspects=["browsePathsV2"], typed=False ) # Make editable change @@ -111,6 +111,6 @@ def test_rollback_editable(): entity_urn=dataset_urn, aspects=["editableDatasetProperties"], typed=False ) # But first ingestion aspects should not be present - assert "browsePaths" not in get_aspects_for_entity( - entity_urn=dataset_urn, aspects=["browsePaths"], typed=False + assert "browsePathsV2" not in get_aspects_for_entity( + entity_urn=dataset_urn, aspects=["browsePathsV2"], typed=False ) diff --git a/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js b/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js index f45edc5fa0481..d951b15d4a592 100644 --- a/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js +++ b/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js @@ -111,6 +111,7 @@ describe("search", () => { ); }); + /* Legacy Browse Path Disabled when showBrowseV2 = `true` it("should take you to the old browse experience when clicking on browse path from entity profile page when browse flag is off", () => { setBrowseFeatureFlag(false); cy.login(); @@ -122,6 +123,7 @@ describe("search", () => { }); cy.url().should("include", "/browse/dataset/prod/bigquery/cypress_project"); }); +*/ it("should take you to the unified search and browse experience when clicking entity type on home page with the browse flag on", () => { setBrowseFeatureFlag(true); diff --git a/smoke-test/tests/delete/delete_test.py b/smoke-test/tests/delete/delete_test.py index 21833d0bd30a1..3a80e05d0cc4b 100644 --- a/smoke-test/tests/delete/delete_test.py +++ b/smoke-test/tests/delete/delete_test.py @@ -41,8 +41,8 @@ def test_setup(): session, gms_host = get_session_and_host() try: - assert "browsePaths" not in get_aspects_for_entity( - entity_urn=dataset_urn, aspects=["browsePaths"], typed=False + assert "institutionalMemory" not in get_aspects_for_entity( + entity_urn=dataset_urn, aspects=["institutionalMemory"], typed=False ) assert "editableDatasetProperties" not in get_aspects_for_entity( entity_urn=dataset_urn, aspects=["editableDatasetProperties"], typed=False @@ -55,8 +55,8 @@ def test_setup(): "tests/delete/cli_test_data.json" ).config.run_id - assert "browsePaths" in get_aspects_for_entity( - entity_urn=dataset_urn, aspects=["browsePaths"], typed=False + assert "institutionalMemory" in get_aspects_for_entity( + entity_urn=dataset_urn, aspects=["institutionalMemory"], typed=False ) yield @@ -70,8 +70,8 @@ def test_setup(): wait_for_writes_to_sync() - assert "browsePaths" not in get_aspects_for_entity( - entity_urn=dataset_urn, aspects=["browsePaths"], typed=False + assert "institutionalMemory" not in get_aspects_for_entity( + entity_urn=dataset_urn, aspects=["institutionalMemory"], typed=False ) assert "editableDatasetProperties" not in get_aspects_for_entity( entity_urn=dataset_urn, aspects=["editableDatasetProperties"], typed=False From 88b5c063a4e85283f7a4a447b392809cbca7f2f6 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 1 Feb 2024 07:04:20 -0600 Subject: [PATCH 468/792] chore(compose): mysql compose healthcheck (#9754) --- docker/profiles/docker-compose.prerequisites.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml index b587f811128fe..47df17f3c5e46 100644 --- a/docker/profiles/docker-compose.prerequisites.yml +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -117,10 +117,10 @@ services: restart: on-failure healthcheck: test: mysqladmin ping -h mysql -u $$MYSQL_USER --password=$$MYSQL_PASSWORD - start_period: 10s - interval: 1s - retries: 3 - timeout: 5s + start_period: 20s + interval: 2s + timeout: 10s + retries: 5 volumes: - ./mysql/init.sql:/docker-entrypoint-initdb.d/init.sql - mysqldata:/var/lib/mysql From d52818d489baf8e837bd8d748ac30e6c1d900db1 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 1 Feb 2024 07:44:01 -0600 Subject: [PATCH 469/792] chore(gradle): datahub-frontend docker dependency avoid yarnInstall (#9758) --- datahub-frontend/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 1174c5c5cfd5d..ab4ce405a5541 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -87,7 +87,7 @@ docker { } } -task unversionZip(type: Copy, dependsOn: [':datahub-web-react:build', dist]) { +task unversionZip(type: Copy, dependsOn: [':datahub-web-react:distZip', dist]) { from ("${buildDir}/distributions") include "datahub-frontend-${version}.zip" into "${buildDir}/docker/" From eb97120469693a3a78239825f0ad091eada015d1 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 1 Feb 2024 12:57:19 -0600 Subject: [PATCH 470/792] fix(search): fix default entities for aggregation filters (#9761) --- .../elasticsearch/query/ESSearchDAO.java | 6 +- .../request/AggregationQueryBuilder.java | 420 +++++++++++++++++- .../query/request/SearchRequestHandler.java | 391 +--------------- ...ySearchAggregationCandidateSourceTest.java | 15 +- .../request/AggregationQueryBuilderTest.java | 24 +- .../DomainsCandidateSourceFactory.java | 5 +- .../TopTagsCandidateSourceFactory.java | 5 +- .../TopTermsCandidateSourceFactory.java | 5 +- .../DomainsCandidateSource.java | 6 +- .../EntitySearchAggregationSource.java | 16 +- .../candidatesource/TopPlatformsSource.java | 7 +- .../candidatesource/TopTagsSource.java | 5 +- .../candidatesource/TopTermsSource.java | 5 +- .../metadata/search/utils/QueryUtils.java | 21 + 14 files changed, 510 insertions(+), 421 deletions(-) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 7de2770626ae3..76153a8d2adb3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -22,9 +22,11 @@ import com.linkedin.metadata.search.FilterValueArray; import com.linkedin.metadata.search.ScrollResult; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.elasticsearch.query.request.AggregationQueryBuilder; import com.linkedin.metadata.search.elasticsearch.query.request.AutocompleteRequestHandler; import com.linkedin.metadata.search.elasticsearch.query.request.SearchAfterWrapper; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; +import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.metrics.MetricUtils; import io.opentelemetry.extension.annotations.WithSpan; @@ -317,7 +319,7 @@ public Map aggregateByValue( int limit) { List entitySpecs; if (entityNames == null || entityNames.isEmpty()) { - entitySpecs = new ArrayList<>(entityRegistry.getEntitySpecs().values()); + entitySpecs = QueryUtils.getQueryByDefaultEntitySpecs(entityRegistry); } else { entitySpecs = entityNames.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); @@ -341,7 +343,7 @@ public Map aggregateByValue( MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well - return SearchRequestHandler.extractAggregationsFromResponse(searchResponse, field); + return AggregationQueryBuilder.extractAggregationsFromResponse(searchResponse, field); } catch (Exception e) { log.error("Aggregation query failed", e); throw new ESQueryException("Aggregation query failed:", e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java index bdc0332b040df..887d4b22f37e2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java @@ -1,36 +1,71 @@ package com.linkedin.metadata.search.elasticsearch.query.request; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.ESUtils.toFacetField; import static com.linkedin.metadata.utils.SearchUtil.*; +import com.linkedin.data.template.LongMap; import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.StructuredPropertyUtils; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.AggregationMetadata; +import com.linkedin.metadata.search.FilterValueArray; import com.linkedin.metadata.search.utils.ESUtils; +import com.linkedin.metadata.utils.SearchUtil; +import com.linkedin.util.Pair; +import io.opentelemetry.extension.annotations.WithSpan; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.BinaryOperator; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang.StringUtils; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; +import org.opensearch.search.aggregations.Aggregations; +import org.opensearch.search.aggregations.bucket.missing.ParsedMissing; +import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; +import org.opensearch.search.aggregations.bucket.terms.Terms; @Slf4j public class AggregationQueryBuilder { + private static final String URN_FILTER = "urn"; - private final SearchConfiguration _configs; - private final Set _defaultFacetFields; - private final Set _allFacetFields; + private final SearchConfiguration configs; + private final Set defaultFacetFields; + private final Set allFacetFields; + private final Map> entitySearchAnnotations; + + private Map filtersToDisplayName; public AggregationQueryBuilder( @Nonnull final SearchConfiguration configs, - @Nonnull final List annotations) { - this._configs = Objects.requireNonNull(configs, "configs must not be null"); - this._defaultFacetFields = getDefaultFacetFields(annotations); - this._allFacetFields = getAllFacetFields(annotations); + @Nonnull Map> entitySearchAnnotations) { + this.configs = Objects.requireNonNull(configs, "configs must not be null"); + this.entitySearchAnnotations = entitySearchAnnotations; + + List annotations = + this.entitySearchAnnotations.values().stream() + .flatMap(List::stream) + .collect(Collectors.toList()); + this.defaultFacetFields = getDefaultFacetFields(annotations); + this.allFacetFields = getAllFacetFields(annotations); } /** Get the set of default aggregations, across all facets. */ @@ -48,7 +83,7 @@ public List getAggregations(@Nullable List facets) { facetsToAggregate = facets.stream().filter(this::isValidAggregate).collect(Collectors.toSet()); } else { - facetsToAggregate = _defaultFacetFields; + facetsToAggregate = defaultFacetFields; } return facetsToAggregate.stream() .map(this::facetToAggregationBuilder) @@ -79,13 +114,13 @@ private boolean isValidAggregate(final String inputFacet) { !facets.isEmpty() && ((facets.size() == 1 && facets.get(0).startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD + ".")) - || _allFacetFields.containsAll(facets)); + || allFacetFields.containsAll(facets)); if (!isValid) { log.warn( String.format( "Requested facet for search filter aggregations that isn't part of the filters. " + "Provided: %s; Available: %s", - inputFacet, _allFacetFields)); + inputFacet, allFacetFields)); } return isValid; } @@ -122,11 +157,11 @@ private AggregationBuilder facetToAggregationBuilder(final String inputFacet) { facet.equalsIgnoreCase(INDEX_VIRTUAL_FIELD) ? AggregationBuilders.terms(inputFacet) .field(getAggregationField("_index")) - .size(_configs.getMaxTermBucketSize()) + .size(configs.getMaxTermBucketSize()) .minDocCount(0) : AggregationBuilders.terms(inputFacet) .field(getAggregationField(facet)) - .size(_configs.getMaxTermBucketSize()); + .size(configs.getMaxTermBucketSize()); } if (lastAggBuilder != null) { aggBuilder = aggBuilder.subAggregation(lastAggBuilder); @@ -173,4 +208,365 @@ List getAllFacetFieldsFromAnnotation(final SearchableAnnotation annotati } return facetsFromAnnotation; } + + private String computeDisplayName(String name) { + if (getFacetToDisplayNames().containsKey(name)) { + return getFacetToDisplayNames().get(name); + } else if (name.contains(AGGREGATION_SEPARATOR_CHAR)) { + return Arrays.stream(name.split(AGGREGATION_SEPARATOR_CHAR)) + .map(i -> getFacetToDisplayNames().get(i)) + .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); + } + return name; + } + + List extractAggregationMetadata( + @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { + final List aggregationMetadataList = new ArrayList<>(); + if (searchResponse.getAggregations() == null) { + return addFiltersToAggregationMetadata(aggregationMetadataList, filter); + } + for (Map.Entry entry : + searchResponse.getAggregations().getAsMap().entrySet()) { + if (entry.getValue() instanceof ParsedTerms) { + processTermAggregations(entry, aggregationMetadataList); + } + if (entry.getValue() instanceof ParsedMissing) { + processMissingAggregations(entry, aggregationMetadataList); + } + } + return addFiltersToAggregationMetadata(aggregationMetadataList, filter); + } + + private void processTermAggregations( + final Map.Entry entry, + final List aggregationMetadataList) { + final Map oneTermAggResult = + extractTermAggregations( + (ParsedTerms) entry.getValue(), entry.getKey().equals(INDEX_VIRTUAL_FIELD)); + if (oneTermAggResult.isEmpty()) { + return; + } + final AggregationMetadata aggregationMetadata = + new AggregationMetadata() + .setName(entry.getKey()) + .setDisplayName(computeDisplayName(entry.getKey())) + .setAggregations(new LongMap(oneTermAggResult)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(oneTermAggResult, Collections.emptySet()))); + aggregationMetadataList.add(aggregationMetadata); + } + + /** + * Adds nested sub-aggregation values to the aggregated results + * + * @param aggs The aggregations to traverse. Could be null (base case) + * @return A map from names to aggregation count values + */ + @Nonnull + private static Map recursivelyAddNestedSubAggs(@Nullable Aggregations aggs) { + final Map aggResult = new HashMap<>(); + + if (aggs != null) { + for (Map.Entry entry : aggs.getAsMap().entrySet()) { + if (entry.getValue() instanceof ParsedTerms) { + recurseTermsAgg((ParsedTerms) entry.getValue(), aggResult, false); + } else if (entry.getValue() instanceof ParsedMissing) { + recurseMissingAgg((ParsedMissing) entry.getValue(), aggResult); + } else { + throw new UnsupportedOperationException( + "Unsupported aggregation type: " + entry.getValue().getClass().getName()); + } + } + } + return aggResult; + } + + private static void recurseTermsAgg( + ParsedTerms terms, Map aggResult, boolean includeZeroes) { + List bucketList = terms.getBuckets(); + bucketList.forEach(bucket -> processTermBucket(bucket, aggResult, includeZeroes)); + } + + private static void processTermBucket( + Terms.Bucket bucket, Map aggResult, boolean includeZeroes) { + String key = bucket.getKeyAsString(); + // Gets filtered sub aggregation doc count if exist + Map subAggs = recursivelyAddNestedSubAggs(bucket.getAggregations()); + subAggs.forEach( + (entryKey, entryValue) -> + aggResult.put( + String.format("%s%s%s", key, AGGREGATION_SEPARATOR_CHAR, entryKey), entryValue)); + long docCount = bucket.getDocCount(); + if (includeZeroes || docCount > 0) { + aggResult.put(key, docCount); + } + } + + private static void recurseMissingAgg(ParsedMissing missing, Map aggResult) { + Map subAggs = recursivelyAddNestedSubAggs(missing.getAggregations()); + subAggs.forEach( + (key, value) -> + aggResult.put( + String.format("%s%s%s", missing.getName(), AGGREGATION_SEPARATOR_CHAR, key), + value)); + long docCount = missing.getDocCount(); + if (docCount > 0) { + aggResult.put(missing.getName(), docCount); + } + } + + /** + * Extracts term aggregations give a parsed term. + * + * @param terms an abstract parse term, input can be either ParsedStringTerms ParsedLongTerms + * @return a map with aggregation key and corresponding doc counts + */ + @Nonnull + private static Map extractTermAggregations( + @Nonnull ParsedTerms terms, boolean includeZeroes) { + + final Map aggResult = new HashMap<>(); + recurseTermsAgg(terms, aggResult, includeZeroes); + + return aggResult; + } + + /** Injects the missing conjunctive filters into the aggregations list. */ + public List addFiltersToAggregationMetadata( + @Nonnull final List originalMetadata, @Nullable final Filter filter) { + if (filter == null) { + return originalMetadata; + } + if (filter.getOr() != null) { + addOrFiltersToAggregationMetadata(filter.getOr(), originalMetadata); + } else if (filter.getCriteria() != null) { + addCriteriaFiltersToAggregationMetadata(filter.getCriteria(), originalMetadata); + } + return originalMetadata; + } + + void addOrFiltersToAggregationMetadata( + @Nonnull final ConjunctiveCriterionArray or, + @Nonnull final List originalMetadata) { + for (ConjunctiveCriterion conjunction : or) { + // For each item in the conjunction, inject an empty aggregation if necessary + addCriteriaFiltersToAggregationMetadata(conjunction.getAnd(), originalMetadata); + } + } + + private void addCriteriaFiltersToAggregationMetadata( + @Nonnull final CriterionArray criteria, + @Nonnull final List originalMetadata) { + for (Criterion criterion : criteria) { + addCriterionFiltersToAggregationMetadata(criterion, originalMetadata); + } + } + + private void addCriterionFiltersToAggregationMetadata( + @Nonnull final Criterion criterion, + @Nonnull final List aggregationMetadata) { + + // We should never see duplicate aggregation for the same field in aggregation metadata list. + final Map aggregationMetadataMap = + aggregationMetadata.stream() + .collect(Collectors.toMap(AggregationMetadata::getName, agg -> agg)); + + // Map a filter criterion to a facet field (e.g. domains.keyword -> domains) + final String finalFacetField = toFacetField(criterion.getField()); + + if (finalFacetField == null) { + log.warn( + String.format( + "Found invalid filter field for entity search. Invalid or unrecognized facet %s", + criterion.getField())); + return; + } + + // We don't want to add urn filters to the aggregations we return as a sidecar to search + // results. + // They are automatically added by searchAcrossLineage and we dont need them to show up in the + // filter panel. + if (finalFacetField.equals(URN_FILTER)) { + return; + } + + if (aggregationMetadataMap.containsKey(finalFacetField)) { + /* + * If we already have aggregations for the facet field, simply inject any missing values counts into the set. + * If there are no results for a particular facet value, it will NOT be in the original aggregation set returned by + * Elasticsearch. + */ + AggregationMetadata originalAggMetadata = aggregationMetadataMap.get(finalFacetField); + if (criterion.hasValues()) { + criterion + .getValues() + .forEach( + value -> + addMissingAggregationValueToAggregationMetadata(value, originalAggMetadata)); + } else { + addMissingAggregationValueToAggregationMetadata(criterion.getValue(), originalAggMetadata); + } + } else { + /* + * If we do not have ANY aggregation for the facet field, then inject a new aggregation metadata object for the + * facet field. + * If there are no results for a particular facet, it will NOT be in the original aggregation set returned by + * Elasticsearch. + */ + aggregationMetadata.add( + buildAggregationMetadata( + finalFacetField, + getFacetToDisplayNames().getOrDefault(finalFacetField, finalFacetField), + new LongMap( + criterion.getValues().stream().collect(Collectors.toMap(i -> i, i -> 0L))), + new FilterValueArray( + criterion.getValues().stream() + .map(value -> createFilterValue(value, 0L, true)) + .collect(Collectors.toList())))); + } + } + + private void addMissingAggregationValueToAggregationMetadata( + @Nonnull final String value, @Nonnull final AggregationMetadata originalMetadata) { + if (originalMetadata.getAggregations().entrySet().stream() + .noneMatch(entry -> value.equals(entry.getKey())) + || originalMetadata.getFilterValues().stream() + .noneMatch(entry -> entry.getValue().equals(value))) { + // No aggregation found for filtered value -- inject one! + originalMetadata.getAggregations().put(value, 0L); + originalMetadata.getFilterValues().add(createFilterValue(value, 0L, true)); + } + } + + private AggregationMetadata buildAggregationMetadata( + @Nonnull final String facetField, + @Nonnull final String displayName, + @Nonnull final LongMap aggValues, + @Nonnull final FilterValueArray filterValues) { + return new AggregationMetadata() + .setName(facetField) + .setDisplayName(displayName) + .setAggregations(aggValues) + .setFilterValues(filterValues); + } + + private List>> getFacetFieldDisplayNameFromAnnotation( + @Nonnull EntitySpec entitySpec, @Nonnull final SearchableAnnotation annotation) { + final List>> facetsFromAnnotation = new ArrayList<>(); + // Case 1: Default Keyword field + if (annotation.isAddToFilters()) { + facetsFromAnnotation.add( + Pair.of( + annotation.getFieldName(), + Pair.of(entitySpec.getName(), annotation.getFilterName()))); + } + // Case 2: HasX boolean field + if (annotation.isAddHasValuesToFilters() && annotation.getHasValuesFieldName().isPresent()) { + facetsFromAnnotation.add( + Pair.of( + annotation.getHasValuesFieldName().get(), + Pair.of(entitySpec.getName(), annotation.getHasValuesFilterName()))); + } + return facetsFromAnnotation; + } + + @WithSpan + public static Map extractAggregationsFromResponse( + @Nonnull SearchResponse searchResponse, @Nonnull String aggregationName) { + if (searchResponse.getAggregations() == null) { + return Collections.emptyMap(); + } + + Aggregation aggregation = searchResponse.getAggregations().get(aggregationName); + if (aggregation == null) { + return Collections.emptyMap(); + } + if (aggregation instanceof ParsedTerms) { + return extractTermAggregations( + (ParsedTerms) aggregation, aggregationName.equals("_entityType")); + } else if (aggregation instanceof ParsedMissing) { + return Collections.singletonMap( + aggregation.getName(), ((ParsedMissing) aggregation).getDocCount()); + } + throw new UnsupportedOperationException( + "Unsupported aggregation type: " + aggregation.getClass().getName()); + } + + /** + * Only used in aggregation queries, lazy load + * + * @return map of field name to facet display names + */ + private Map getFacetToDisplayNames() { + if (filtersToDisplayName == null) { + // Validate field names + Map>>> validateFieldMap = + entitySearchAnnotations.entrySet().stream() + .flatMap( + entry -> + entry.getValue().stream() + .flatMap( + annotation -> + getFacetFieldDisplayNameFromAnnotation(entry.getKey(), annotation) + .stream())) + .collect(Collectors.groupingBy(Pair::getFirst, Collectors.toSet())); + for (Map.Entry>>> entry : + validateFieldMap.entrySet()) { + if (entry.getValue().stream().map(i -> i.getSecond().getSecond()).distinct().count() > 1) { + Map>> displayNameEntityMap = + entry.getValue().stream() + .map(Pair::getSecond) + .collect(Collectors.groupingBy(Pair::getSecond, Collectors.toSet())); + throw new IllegalStateException( + String.format( + "Facet field collision on field `%s`. Incompatible Display Name across entities. Multiple Display Names detected: %s", + entry.getKey(), displayNameEntityMap)); + } + } + + filtersToDisplayName = + entitySearchAnnotations.entrySet().stream() + .flatMap( + entry -> + entry.getValue().stream() + .flatMap( + annotation -> + getFacetFieldDisplayNameFromAnnotation(entry.getKey(), annotation) + .stream())) + .collect( + Collectors.toMap(Pair::getFirst, p -> p.getSecond().getSecond(), mapMerger())); + filtersToDisplayName.put(INDEX_VIRTUAL_FIELD, "Type"); + } + + return filtersToDisplayName; + } + + private void processMissingAggregations( + final Map.Entry entry, + final List aggregationMetadataList) { + ParsedMissing parsedMissing = (ParsedMissing) entry.getValue(); + Long docCount = parsedMissing.getDocCount(); + LongMap longMap = new LongMap(); + longMap.put(entry.getKey(), docCount); + final AggregationMetadata aggregationMetadata = + new AggregationMetadata() + .setName(entry.getKey()) + .setDisplayName(computeDisplayName(entry.getKey())) + .setAggregations(longMap) + .setFilterValues( + new FilterValueArray(SearchUtil.convertToFilters(longMap, Collections.emptySet()))); + aggregationMetadataList.add(aggregationMetadata); + } + + // If values are not equal, throw error + private BinaryOperator mapMerger() { + return (s1, s2) -> { + if (!StringUtils.equals(s1, s2)) { + throw new IllegalStateException(String.format("Unable to merge values %s and %s", s1, s2)); + } + return s1; + }; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 277e15e1334d5..3ac05ed122cd7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -1,7 +1,6 @@ package com.linkedin.metadata.search.elasticsearch.query.request; import static com.linkedin.metadata.search.utils.ESUtils.NAME_SUGGESTION; -import static com.linkedin.metadata.search.utils.ESUtils.toFacetField; import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; import static com.linkedin.metadata.utils.SearchUtil.*; @@ -10,22 +9,16 @@ import com.google.common.collect.ImmutableMap; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.DoubleMap; -import com.linkedin.data.template.LongMap; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.query.SearchFlags; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.AggregationMetadata; import com.linkedin.metadata.search.AggregationMetadataArray; -import com.linkedin.metadata.search.FilterValueArray; import com.linkedin.metadata.search.MatchedField; import com.linkedin.metadata.search.MatchedFieldArray; import com.linkedin.metadata.search.ScrollResult; @@ -37,13 +30,11 @@ import com.linkedin.metadata.search.SearchSuggestionArray; import com.linkedin.metadata.search.features.Features; import com.linkedin.metadata.search.utils.ESUtils; -import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.util.Pair; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -51,13 +42,11 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -import java.util.function.BinaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang.StringUtils; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.text.Text; @@ -66,12 +55,7 @@ import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; -import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.AggregationBuilders; -import org.opensearch.search.aggregations.Aggregations; -import org.opensearch.search.aggregations.bucket.missing.ParsedMissing; -import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; -import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; import org.opensearch.search.fetch.subphase.highlight.HighlightField; @@ -88,11 +72,9 @@ public class SearchRequestHandler { .setSkipHighlighting(false); private static final Map, SearchRequestHandler> REQUEST_HANDLER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); - private static final String URN_FILTER = "urn"; private final List _entitySpecs; private final Set _defaultQueryFieldNames; private final HighlightBuilder _highlights; - private final Map _filtersToDisplayName; private final SearchConfiguration _configs; private final SearchQueryBuilder _searchQueryBuilder; @@ -111,16 +93,16 @@ private SearchRequestHandler( @Nonnull SearchConfiguration configs, @Nullable CustomSearchConfiguration customSearchConfiguration) { _entitySpecs = entitySpecs; - List annotations = getSearchableAnnotations(); + Map> entitySearchAnnotations = + getSearchableAnnotations(); + List annotations = + entitySearchAnnotations.values().stream() + .flatMap(List::stream) + .collect(Collectors.toList()); _defaultQueryFieldNames = getDefaultQueryFieldNames(annotations); - _filtersToDisplayName = - annotations.stream() - .flatMap(annotation -> getFacetFieldDisplayNameFromAnnotation(annotation).stream()) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, mapMerger())); - _filtersToDisplayName.put(INDEX_VIRTUAL_FIELD, "Type"); _highlights = getHighlights(); _searchQueryBuilder = new SearchQueryBuilder(configs, customSearchConfiguration); - _aggregationQueryBuilder = new AggregationQueryBuilder(configs, annotations); + _aggregationQueryBuilder = new AggregationQueryBuilder(configs, entitySearchAnnotations); _configs = configs; searchableFieldTypes = _entitySpecs.stream() @@ -153,12 +135,16 @@ public static SearchRequestHandler getBuilder( k -> new SearchRequestHandler(entitySpecs, configs, customSearchConfiguration)); } - private List getSearchableAnnotations() { + private Map> getSearchableAnnotations() { return _entitySpecs.stream() - .map(EntitySpec::getSearchableFieldSpecs) - .flatMap(List::stream) - .map(SearchableFieldSpec::getSearchableAnnotation) - .collect(Collectors.toList()); + .map( + spec -> + Pair.of( + spec, + spec.getSearchableFieldSpecs().stream() + .map(SearchableFieldSpec::getSearchableAnnotation) + .collect(Collectors.toList()))) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); } @VisibleForTesting @@ -171,16 +157,6 @@ private Set getDefaultQueryFieldNames(List annotat .collect(Collectors.toSet()); } - // If values are not equal, throw error - private BinaryOperator mapMerger() { - return (s1, s2) -> { - if (!StringUtils.equals(s1, s2)) { - throw new IllegalStateException(String.format("Unable to merge values %s and %s", s1, s2)); - } - return s1; - }; - } - public BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { return getFilterQuery(filter, searchableFieldTypes); } @@ -327,42 +303,6 @@ public SearchRequest getFilterRequest( return searchRequest; } - /** - * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion - * to be applied to search results. - * - *

    TODO: Used in batch ingestion from ingestion scheduler - * - * @param filters {@link Filter} list of conditions with fields and values - * @param sortCriterion {@link SortCriterion} to be applied to the search results - * @param sort sort values from last result of previous request - * @param pitId the Point In Time Id of the previous request - * @param keepAlive string representation of time to keep point in time alive - * @param size the number of search hits to return - * @return {@link SearchRequest} that contains the filtered query - */ - @Nonnull - public SearchRequest getFilterRequest( - @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, - @Nullable Object[] sort, - @Nullable String pitId, - @Nonnull String keepAlive, - int size) { - SearchRequest searchRequest = new SearchRequest(); - - BoolQueryBuilder filterQuery = getFilterQuery(filters); - final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(filterQuery); - searchSourceBuilder.size(size); - - ESUtils.setSearchAfter(searchSourceBuilder, sort, pitId, keepAlive); - ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, _entitySpecs); - searchRequest.source(searchSourceBuilder); - - return searchRequest; - } - /** * Get search request to aggregate and get document counts per field value * @@ -558,7 +498,7 @@ private SearchResultMetadata extractSearchResultMetadata( new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); final List aggregationMetadataList = - extractAggregationMetadata(searchResponse, filter); + _aggregationQueryBuilder.extractAggregationMetadata(searchResponse, filter); searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); final List searchSuggestions = extractSearchSuggestions(searchResponse); @@ -588,301 +528,4 @@ private List extractSearchSuggestions(@Nonnull SearchResponse } return searchSuggestions; } - - private String computeDisplayName(String name) { - if (_filtersToDisplayName.containsKey(name)) { - return _filtersToDisplayName.get(name); - } else if (name.contains(AGGREGATION_SEPARATOR_CHAR)) { - return Arrays.stream(name.split(AGGREGATION_SEPARATOR_CHAR)) - .map(_filtersToDisplayName::get) - .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); - } - return name; - } - - private List extractAggregationMetadata( - @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { - final List aggregationMetadataList = new ArrayList<>(); - if (searchResponse.getAggregations() == null) { - return addFiltersToAggregationMetadata(aggregationMetadataList, filter); - } - for (Map.Entry entry : - searchResponse.getAggregations().getAsMap().entrySet()) { - if (entry.getValue() instanceof ParsedTerms) { - processTermAggregations(entry, aggregationMetadataList); - } - if (entry.getValue() instanceof ParsedMissing) { - processMissingAggregations(entry, aggregationMetadataList); - } - } - return addFiltersToAggregationMetadata(aggregationMetadataList, filter); - } - - private void processTermAggregations( - final Map.Entry entry, - final List aggregationMetadataList) { - final Map oneTermAggResult = - extractTermAggregations( - (ParsedTerms) entry.getValue(), entry.getKey().equals(INDEX_VIRTUAL_FIELD)); - if (oneTermAggResult.isEmpty()) { - return; - } - final AggregationMetadata aggregationMetadata = - new AggregationMetadata() - .setName(entry.getKey()) - .setDisplayName(computeDisplayName(entry.getKey())) - .setAggregations(new LongMap(oneTermAggResult)) - .setFilterValues( - new FilterValueArray( - SearchUtil.convertToFilters(oneTermAggResult, Collections.emptySet()))); - aggregationMetadataList.add(aggregationMetadata); - } - - private void processMissingAggregations( - final Map.Entry entry, - final List aggregationMetadataList) { - ParsedMissing parsedMissing = (ParsedMissing) entry.getValue(); - Long docCount = parsedMissing.getDocCount(); - LongMap longMap = new LongMap(); - longMap.put(entry.getKey(), docCount); - final AggregationMetadata aggregationMetadata = - new AggregationMetadata() - .setName(entry.getKey()) - .setDisplayName(computeDisplayName(entry.getKey())) - .setAggregations(longMap) - .setFilterValues( - new FilterValueArray(SearchUtil.convertToFilters(longMap, Collections.emptySet()))); - aggregationMetadataList.add(aggregationMetadata); - } - - @WithSpan - public static Map extractAggregationsFromResponse( - @Nonnull SearchResponse searchResponse, @Nonnull String aggregationName) { - if (searchResponse.getAggregations() == null) { - return Collections.emptyMap(); - } - - Aggregation aggregation = searchResponse.getAggregations().get(aggregationName); - if (aggregation == null) { - return Collections.emptyMap(); - } - if (aggregation instanceof ParsedTerms) { - return extractTermAggregations( - (ParsedTerms) aggregation, aggregationName.equals("_entityType")); - } else if (aggregation instanceof ParsedMissing) { - return Collections.singletonMap( - aggregation.getName(), ((ParsedMissing) aggregation).getDocCount()); - } - throw new UnsupportedOperationException( - "Unsupported aggregation type: " + aggregation.getClass().getName()); - } - - /** - * Adds nested sub-aggregation values to the aggregated results - * - * @param aggs The aggregations to traverse. Could be null (base case) - * @return A map from names to aggregation count values - */ - @Nonnull - private static Map recursivelyAddNestedSubAggs(@Nullable Aggregations aggs) { - final Map aggResult = new HashMap<>(); - - if (aggs != null) { - for (Map.Entry entry : aggs.getAsMap().entrySet()) { - if (entry.getValue() instanceof ParsedTerms) { - recurseTermsAgg((ParsedTerms) entry.getValue(), aggResult, false); - } else if (entry.getValue() instanceof ParsedMissing) { - recurseMissingAgg((ParsedMissing) entry.getValue(), aggResult); - } else { - throw new UnsupportedOperationException( - "Unsupported aggregation type: " + entry.getValue().getClass().getName()); - } - } - } - return aggResult; - } - - private static void recurseTermsAgg( - ParsedTerms terms, Map aggResult, boolean includeZeroes) { - List bucketList = terms.getBuckets(); - bucketList.forEach(bucket -> processTermBucket(bucket, aggResult, includeZeroes)); - } - - private static void processTermBucket( - Terms.Bucket bucket, Map aggResult, boolean includeZeroes) { - String key = bucket.getKeyAsString(); - // Gets filtered sub aggregation doc count if exist - Map subAggs = recursivelyAddNestedSubAggs(bucket.getAggregations()); - subAggs.forEach( - (entryKey, entryValue) -> - aggResult.put( - String.format("%s%s%s", key, AGGREGATION_SEPARATOR_CHAR, entryKey), entryValue)); - long docCount = bucket.getDocCount(); - if (includeZeroes || docCount > 0) { - aggResult.put(key, docCount); - } - } - - private static void recurseMissingAgg(ParsedMissing missing, Map aggResult) { - Map subAggs = recursivelyAddNestedSubAggs(missing.getAggregations()); - subAggs.forEach( - (key, value) -> - aggResult.put( - String.format("%s%s%s", missing.getName(), AGGREGATION_SEPARATOR_CHAR, key), - value)); - long docCount = missing.getDocCount(); - if (docCount > 0) { - aggResult.put(missing.getName(), docCount); - } - } - - /** - * Extracts term aggregations give a parsed term. - * - * @param terms an abstract parse term, input can be either ParsedStringTerms ParsedLongTerms - * @return a map with aggregation key and corresponding doc counts - */ - @Nonnull - private static Map extractTermAggregations( - @Nonnull ParsedTerms terms, boolean includeZeroes) { - - final Map aggResult = new HashMap<>(); - recurseTermsAgg(terms, aggResult, includeZeroes); - - return aggResult; - } - - /** Injects the missing conjunctive filters into the aggregations list. */ - public List addFiltersToAggregationMetadata( - @Nonnull final List originalMetadata, @Nullable final Filter filter) { - if (filter == null) { - return originalMetadata; - } - if (filter.getOr() != null) { - addOrFiltersToAggregationMetadata(filter.getOr(), originalMetadata); - } else if (filter.getCriteria() != null) { - addCriteriaFiltersToAggregationMetadata(filter.getCriteria(), originalMetadata); - } - return originalMetadata; - } - - void addOrFiltersToAggregationMetadata( - @Nonnull final ConjunctiveCriterionArray or, - @Nonnull final List originalMetadata) { - for (ConjunctiveCriterion conjunction : or) { - // For each item in the conjunction, inject an empty aggregation if necessary - addCriteriaFiltersToAggregationMetadata(conjunction.getAnd(), originalMetadata); - } - } - - private void addCriteriaFiltersToAggregationMetadata( - @Nonnull final CriterionArray criteria, - @Nonnull final List originalMetadata) { - for (Criterion criterion : criteria) { - addCriterionFiltersToAggregationMetadata(criterion, originalMetadata); - } - } - - private void addCriterionFiltersToAggregationMetadata( - @Nonnull final Criterion criterion, - @Nonnull final List aggregationMetadata) { - - // We should never see duplicate aggregation for the same field in aggregation metadata list. - final Map aggregationMetadataMap = - aggregationMetadata.stream() - .collect(Collectors.toMap(AggregationMetadata::getName, agg -> agg)); - - // Map a filter criterion to a facet field (e.g. domains.keyword -> domains) - final String finalFacetField = toFacetField(criterion.getField()); - - if (finalFacetField == null) { - log.warn( - String.format( - "Found invalid filter field for entity search. Invalid or unrecognized facet %s", - criterion.getField())); - return; - } - - // We don't want to add urn filters to the aggregations we return as a sidecar to search - // results. - // They are automatically added by searchAcrossLineage and we dont need them to show up in the - // filter panel. - if (finalFacetField.equals(URN_FILTER)) { - return; - } - - if (aggregationMetadataMap.containsKey(finalFacetField)) { - /* - * If we already have aggregations for the facet field, simply inject any missing values counts into the set. - * If there are no results for a particular facet value, it will NOT be in the original aggregation set returned by - * Elasticsearch. - */ - AggregationMetadata originalAggMetadata = aggregationMetadataMap.get(finalFacetField); - if (criterion.hasValues()) { - criterion - .getValues() - .forEach( - value -> - addMissingAggregationValueToAggregationMetadata(value, originalAggMetadata)); - } else { - addMissingAggregationValueToAggregationMetadata(criterion.getValue(), originalAggMetadata); - } - } else { - /* - * If we do not have ANY aggregation for the facet field, then inject a new aggregation metadata object for the - * facet field. - * If there are no results for a particular facet, it will NOT be in the original aggregation set returned by - * Elasticsearch. - */ - aggregationMetadata.add( - buildAggregationMetadata( - finalFacetField, - _filtersToDisplayName.getOrDefault(finalFacetField, finalFacetField), - new LongMap( - criterion.getValues().stream().collect(Collectors.toMap(i -> i, i -> 0L))), - new FilterValueArray( - criterion.getValues().stream() - .map(value -> createFilterValue(value, 0L, true)) - .collect(Collectors.toList())))); - } - } - - private void addMissingAggregationValueToAggregationMetadata( - @Nonnull final String value, @Nonnull final AggregationMetadata originalMetadata) { - if (originalMetadata.getAggregations().entrySet().stream() - .noneMatch(entry -> value.equals(entry.getKey())) - || originalMetadata.getFilterValues().stream() - .noneMatch(entry -> entry.getValue().equals(value))) { - // No aggregation found for filtered value -- inject one! - originalMetadata.getAggregations().put(value, 0L); - originalMetadata.getFilterValues().add(createFilterValue(value, 0L, true)); - } - } - - private AggregationMetadata buildAggregationMetadata( - @Nonnull final String facetField, - @Nonnull final String displayName, - @Nonnull final LongMap aggValues, - @Nonnull final FilterValueArray filterValues) { - return new AggregationMetadata() - .setName(facetField) - .setDisplayName(displayName) - .setAggregations(aggValues) - .setFilterValues(filterValues); - } - - private List> getFacetFieldDisplayNameFromAnnotation( - @Nonnull final SearchableAnnotation annotation) { - final List> facetsFromAnnotation = new ArrayList<>(); - // Case 1: Default Keyword field - if (annotation.isAddToFilters()) { - facetsFromAnnotation.add(Pair.of(annotation.getFieldName(), annotation.getFilterName())); - } - // Case 2: HasX boolean field - if (annotation.isAddHasValuesToFilters() && annotation.getHasValuesFieldName().isPresent()) { - facetsFromAnnotation.add( - Pair.of(annotation.getHasValuesFieldName().get(), annotation.getHasValuesFilterName())); - } - return facetsFromAnnotation; - } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java index dcc59d0632954..2d60f3202b69f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.recommendation.candidatesource; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertEquals; @@ -11,6 +12,7 @@ import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.recommendation.RecommendationContent; import com.linkedin.metadata.recommendation.RecommendationParams; @@ -29,6 +31,7 @@ public class EntitySearchAggregationCandidateSourceTest { private EntitySearchService _entitySearchService = Mockito.mock(EntitySearchService.class); + private EntityRegistry entityRegistry = Mockito.mock(EntityRegistry.class); private EntitySearchAggregationSource _valueBasedCandidateSource; private EntitySearchAggregationSource _urnBasedCandidateSource; @@ -45,7 +48,7 @@ public void setup() { private EntitySearchAggregationSource buildCandidateSource( String identifier, boolean isValueUrn) { - return new EntitySearchAggregationSource(_entitySearchService) { + return new EntitySearchAggregationSource(_entitySearchService, entityRegistry) { @Override protected String getSearchFieldName() { return identifier; @@ -98,8 +101,7 @@ public void testWhenSearchServiceReturnsEmpty() { @Test public void testWhenSearchServiceReturnsValueResults() { // One result - Mockito.when( - _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when(_entitySearchService.aggregateByValue(any(), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L)); List candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); @@ -120,8 +122,7 @@ public void testWhenSearchServiceReturnsValueResults() { assertTrue(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); // Multiple result - Mockito.when( - _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when(_entitySearchService.aggregateByValue(any(), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L, "value2", 2L, "value3", 3L)); candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 2); @@ -160,7 +161,7 @@ public void testWhenSearchServiceReturnsUrnResults() { Urn testUrn1 = new TestEntityUrn("testUrn1", "testUrn1", "testUrn1"); Urn testUrn2 = new TestEntityUrn("testUrn2", "testUrn2", "testUrn2"); Urn testUrn3 = new TestEntityUrn("testUrn3", "testUrn3", "testUrn3"); - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testUrn"), eq(null), anyInt())) + Mockito.when(_entitySearchService.aggregateByValue(any(), eq("testUrn"), eq(null), anyInt())) .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L)); List candidates = _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); @@ -181,7 +182,7 @@ public void testWhenSearchServiceReturnsUrnResults() { assertTrue(_urnBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); // Multiple result - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testUrn"), eq(null), anyInt())) + Mockito.when(_entitySearchService.aggregateByValue(any(), eq("testUrn"), eq(null), anyInt())) .thenReturn( ImmutableMap.of( testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java index 9e8855622ced4..ed4c9db5db643 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.search.query.request; import static com.linkedin.metadata.utils.SearchUtil.*; +import static org.mockito.Mockito.mock; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.search.elasticsearch.query.request.AggregationQueryBuilder; import java.util.Collections; @@ -42,7 +45,8 @@ public void testGetDefaultAggregationsHasFields() { config.setMaxTermBucketSize(25); AggregationQueryBuilder builder = - new AggregationQueryBuilder(config, ImmutableList.of(annotation)); + new AggregationQueryBuilder( + config, ImmutableMap.of(mock(EntitySpec.class), ImmutableList.of(annotation))); List aggs = builder.getAggregations(); @@ -73,7 +77,8 @@ public void testGetDefaultAggregationsFields() { config.setMaxTermBucketSize(25); AggregationQueryBuilder builder = - new AggregationQueryBuilder(config, ImmutableList.of(annotation)); + new AggregationQueryBuilder( + config, ImmutableMap.of(mock(EntitySpec.class), ImmutableList.of(annotation))); List aggs = builder.getAggregations(); @@ -120,7 +125,9 @@ public void testGetSpecificAggregationsHasFields() { config.setMaxTermBucketSize(25); AggregationQueryBuilder builder = - new AggregationQueryBuilder(config, ImmutableList.of(annotation1, annotation2)); + new AggregationQueryBuilder( + config, + ImmutableMap.of(mock(EntitySpec.class), ImmutableList.of(annotation1, annotation2))); // Case 1: Ask for fields that should exist. List aggs = @@ -139,7 +146,9 @@ public void testAggregateOverStructuredProperty() { SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder(config, List.of()); + AggregationQueryBuilder builder = + new AggregationQueryBuilder( + config, ImmutableMap.of(mock(EntitySpec.class), ImmutableList.of())); List aggs = builder.getAggregations(List.of("structuredProperties.ab.fgh.ten")); @@ -202,7 +211,9 @@ public void testAggregateOverFieldsAndStructProp() { config.setMaxTermBucketSize(25); AggregationQueryBuilder builder = - new AggregationQueryBuilder(config, ImmutableList.of(annotation1, annotation2)); + new AggregationQueryBuilder( + config, + ImmutableMap.of(mock(EntitySpec.class), ImmutableList.of(annotation1, annotation2))); // Aggregate over fields and structured properties List aggs = @@ -252,7 +263,8 @@ public void testMissingAggregation() { config.setMaxTermBucketSize(25); AggregationQueryBuilder builder = - new AggregationQueryBuilder(config, ImmutableList.of(annotation)); + new AggregationQueryBuilder( + config, ImmutableMap.of(mock(EntitySpec.class), ImmutableList.of(annotation))); List aggs = builder.getAggregations(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/DomainsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/DomainsCandidateSourceFactory.java index fbfd80f85ff4d..a7c2dde8b7d25 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/DomainsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/DomainsCandidateSourceFactory.java @@ -1,6 +1,7 @@ package com.linkedin.gms.factory.recommendation.candidatesource; import com.linkedin.gms.factory.search.EntitySearchServiceFactory; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.candidatesource.DomainsCandidateSource; import com.linkedin.metadata.search.EntitySearchService; import javax.annotation.Nonnull; @@ -20,7 +21,7 @@ public class DomainsCandidateSourceFactory { @Bean(name = "domainsCandidateSource") @Nonnull - protected DomainsCandidateSource getInstance() { - return new DomainsCandidateSource(entitySearchService); + protected DomainsCandidateSource getInstance(final EntityRegistry entityRegistry) { + return new DomainsCandidateSource(entitySearchService, entityRegistry); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java index fe5c2d03d1907..bc2520c2b4617 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java @@ -1,6 +1,7 @@ package com.linkedin.gms.factory.recommendation.candidatesource; import com.linkedin.gms.factory.search.EntitySearchServiceFactory; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.recommendation.candidatesource.TopTagsSource; import com.linkedin.metadata.search.EntitySearchService; import javax.annotation.Nonnull; @@ -20,7 +21,7 @@ public class TopTagsCandidateSourceFactory { @Bean(name = "topTagsCandidateSource") @Nonnull - protected TopTagsSource getInstance() { - return new TopTagsSource(entitySearchService); + protected TopTagsSource getInstance(final EntityService entityService) { + return new TopTagsSource(entitySearchService, entityService); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java index 36c53936094ff..c8ad276eb3d86 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java @@ -1,6 +1,7 @@ package com.linkedin.gms.factory.recommendation.candidatesource; import com.linkedin.gms.factory.search.EntitySearchServiceFactory; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.recommendation.candidatesource.TopTermsSource; import com.linkedin.metadata.search.EntitySearchService; import javax.annotation.Nonnull; @@ -20,7 +21,7 @@ public class TopTermsCandidateSourceFactory { @Bean(name = "topTermsCandidateSource") @Nonnull - protected TopTermsSource getInstance() { - return new TopTermsSource(entitySearchService); + protected TopTermsSource getInstance(final EntityService entityService) { + return new TopTermsSource(entitySearchService, entityService); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java index 9392f50b4749e..e34fa8ff1bde5 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.recommendation.candidatesource; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -13,8 +14,9 @@ public class DomainsCandidateSource extends EntitySearchAggregationSource { private static final String DOMAINS = "domains"; - public DomainsCandidateSource(EntitySearchService entitySearchService) { - super(entitySearchService); + public DomainsCandidateSource( + EntitySearchService entitySearchService, EntityRegistry entityRegistry) { + super(entitySearchService, entityRegistry); } @Override diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java index a19909576d25b..8d6ccb22660fb 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java @@ -2,6 +2,8 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.recommendation.ContentParams; @@ -10,6 +12,7 @@ import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.SearchParams; import com.linkedin.metadata.search.EntitySearchService; +import com.linkedin.metadata.search.utils.QueryUtils; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.util.Collections; @@ -35,7 +38,8 @@ @Slf4j @RequiredArgsConstructor public abstract class EntitySearchAggregationSource implements RecommendationSource { - private final EntitySearchService _entitySearchService; + private final EntitySearchService entitySearchService; + private final EntityRegistry entityRegistry; /** Field to aggregate on */ protected abstract String getSearchFieldName(); @@ -69,8 +73,8 @@ protected boolean isValidCandidate(T candidate) { public List getRecommendations( @Nonnull Urn userUrn, @Nullable RecommendationRequestContext requestContext) { Map aggregationResult = - _entitySearchService.aggregateByValue( - getEntityNames(), getSearchFieldName(), null, getMaxContent()); + entitySearchService.aggregateByValue( + getEntityNames(entityRegistry), getSearchFieldName(), null, getMaxContent()); if (aggregationResult.isEmpty()) { return Collections.emptyList(); @@ -110,9 +114,11 @@ public List getRecommendations( .collect(Collectors.toList()); } - protected List getEntityNames() { + protected List getEntityNames(EntityRegistry entityRegistry) { // By default, no list is applied which means searching across entities. - return null; + return QueryUtils.getQueryByDefaultEntitySpecs(entityRegistry).stream() + .map(EntitySpec::getName) + .collect(Collectors.toList()); } // Get top K entries with the most count diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java index 3012e35baa607..aecd9bbbf769c 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java @@ -37,11 +37,12 @@ public class TopPlatformsSource extends EntitySearchAggregationSource { Constants.CONTAINER_ENTITY_NAME, Constants.NOTEBOOK_ENTITY_NAME); - private final EntityService _entityService; + private final EntityService _entityService; private static final String PLATFORM = "platform"; - public TopPlatformsSource(EntityService entityService, EntitySearchService entitySearchService) { - super(entitySearchService); + public TopPlatformsSource( + EntityService entityService, EntitySearchService entitySearchService) { + super(entitySearchService, entityService.getEntityRegistry()); _entityService = entityService; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java index 317f956e1ca8a..0897d441335fa 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.recommendation.candidatesource; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -13,8 +14,8 @@ public class TopTagsSource extends EntitySearchAggregationSource { private static final String TAGS = "tags"; - public TopTagsSource(EntitySearchService entitySearchService) { - super(entitySearchService); + public TopTagsSource(EntitySearchService entitySearchService, EntityService entityService) { + super(entitySearchService, entityService.getEntityRegistry()); } @Override diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java index 6cdb5fdb65911..0fab9a28b51ea 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.recommendation.candidatesource; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -13,8 +14,8 @@ public class TopTermsSource extends EntitySearchAggregationSource { private static final String TERMS = "glossaryTerms"; - public TopTermsSource(EntitySearchService entitySearchService) { - super(entitySearchService); + public TopTermsSource(EntitySearchService entitySearchService, EntityService entityService) { + super(entitySearchService, entityService.getEntityRegistry()); } @Override diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java index 842cc51e11777..a148a45b20e0c 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java @@ -7,6 +7,10 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; import com.linkedin.metadata.aspect.AspectVersion; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.SearchableFieldSpec; +import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -15,6 +19,7 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.util.Pair; import java.util.Collections; import java.util.List; import java.util.Map; @@ -174,4 +179,20 @@ public static Filter getFilterFromCriteria(List criteria) { new ConjunctiveCriterionArray( new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); } + + public static List getQueryByDefaultEntitySpecs(EntityRegistry entityRegistry) { + return entityRegistry.getEntitySpecs().values().stream() + .map( + spec -> + Pair.of( + spec, + spec.getSearchableFieldSpecs().stream() + .map(SearchableFieldSpec::getSearchableAnnotation) + .collect(Collectors.toList()))) + .filter( + specPair -> + specPair.getSecond().stream().anyMatch(SearchableAnnotation::isQueryByDefault)) + .map(Pair::getFirst) + .collect(Collectors.toList()); + } } From 533130408a28c036f4bdf4c2d7289311d28bf906 Mon Sep 17 00:00:00 2001 From: sid-acryl <155424659+sid-acryl@users.noreply.github.com> Date: Fri, 2 Feb 2024 02:17:09 +0530 Subject: [PATCH 471/792] feat(ingestion/redshift): collapse lineage to permanent table (#9704) Co-authored-by: Harshal Sheth Co-authored-by: treff7es --- .../src/datahub/emitter/mce_builder.py | 1 + .../src/datahub/ingestion/api/common.py | 2 +- .../ingestion/source/redshift/config.py | 17 +- .../ingestion/source/redshift/lineage.py | 554 ++++++++++++++- .../ingestion/source/redshift/query.py | 136 +++- .../source/redshift/redshift_schema.py | 76 +- .../ingestion/source/redshift/report.py | 2 + .../source/snowflake/snowflake_config.py | 11 +- .../src/datahub/utilities/sqlglot_lineage.py | 8 + .../tests/unit/redshift_query_mocker.py | 104 +++ .../tests/unit/test_redshift_lineage.py | 663 +++++++++++++++++- 11 files changed, 1515 insertions(+), 59 deletions(-) create mode 100644 metadata-ingestion/tests/unit/redshift_query_mocker.py diff --git a/metadata-ingestion/src/datahub/emitter/mce_builder.py b/metadata-ingestion/src/datahub/emitter/mce_builder.py index 9da1b0ab56f89..fe9ecee8f80d0 100644 --- a/metadata-ingestion/src/datahub/emitter/mce_builder.py +++ b/metadata-ingestion/src/datahub/emitter/mce_builder.py @@ -1,4 +1,5 @@ """Convenience functions for creating MCEs""" + import hashlib import json import logging diff --git a/metadata-ingestion/src/datahub/ingestion/api/common.py b/metadata-ingestion/src/datahub/ingestion/api/common.py index a6761a3c77d5e..097859939cfea 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/common.py +++ b/metadata-ingestion/src/datahub/ingestion/api/common.py @@ -64,7 +64,7 @@ def _set_dataset_urn_to_lower_if_needed(self) -> None: # TODO: Get rid of this function once lower-casing is the standard. if self.graph: server_config = self.graph.get_config() - if server_config and server_config.get("datasetUrnNameCasing"): + if server_config and server_config.get("datasetUrnNameCasing") is True: set_dataset_urn_to_lower(True) def register_checkpointer(self, committable: Committable) -> None: diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 540adbf4bfd15..fe66ef006ec69 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -94,10 +94,10 @@ class RedshiftConfig( description="The default schema to use if the sql parser fails to parse the schema with `sql_based` lineage collector", ) - include_table_lineage: Optional[bool] = Field( + include_table_lineage: bool = Field( default=True, description="Whether table lineage should be ingested." ) - include_copy_lineage: Optional[bool] = Field( + include_copy_lineage: bool = Field( default=True, description="Whether lineage should be collected from copy commands", ) @@ -107,17 +107,15 @@ class RedshiftConfig( description="Generate usage statistic. email_domain config parameter needs to be set if enabled", ) - include_unload_lineage: Optional[bool] = Field( + include_unload_lineage: bool = Field( default=True, description="Whether lineage should be collected from unload commands", ) - capture_lineage_query_parser_failures: Optional[bool] = Field( - hide_from_schema=True, + include_table_rename_lineage: bool = Field( default=False, - description="Whether to capture lineage query parser errors with dataset properties for debugging", + description="Whether we should follow `alter table ... rename to` statements when computing lineage. ", ) - table_lineage_mode: Optional[LineageMode] = Field( default=LineageMode.STL_SCAN_BASED, description="Which table lineage collector mode to use. Available modes are: [stl_scan_based, sql_based, mixed]", @@ -139,6 +137,11 @@ class RedshiftConfig( description="When enabled, emits lineage as incremental to existing lineage already in DataHub. When disabled, re-states lineage on each run. This config works with rest-sink only.", ) + resolve_temp_table_in_lineage: bool = Field( + default=False, + description="Whether to resolve temp table appear in lineage to upstream permanent tables.", + ) + @root_validator(pre=True) def check_email_is_set_on_usage(cls, values): if values.get("include_usage_statistics"): diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py index 3efef58737c6e..898e6db0b14b0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py @@ -4,11 +4,12 @@ from dataclasses import dataclass, field from datetime import datetime from enum import Enum -from typing import Dict, List, Optional, Set, Tuple, Union +from typing import Dict, List, Optional, Set, Tuple, Union, cast from urllib.parse import urlparse import humanfriendly import redshift_connector +import sqlglot import datahub.emitter.mce_builder as builder import datahub.utilities.sqlglot_lineage as sqlglot_l @@ -24,17 +25,24 @@ RedshiftSchema, RedshiftTable, RedshiftView, + TempTableRow, ) from datahub.ingestion.source.redshift.report import RedshiftReport from datahub.ingestion.source.state.redundant_run_skip_handler import ( RedundantLineageRunSkipHandler, ) +from datahub.metadata._schema_classes import SchemaFieldDataTypeClass from datahub.metadata.com.linkedin.pegasus2avro.dataset import ( FineGrainedLineage, FineGrainedLineageDownstreamType, FineGrainedLineageUpstreamType, UpstreamLineage, ) +from datahub.metadata.com.linkedin.pegasus2avro.schema import ( + OtherSchema, + SchemaField, + SchemaMetadata, +) from datahub.metadata.schema_classes import ( DatasetLineageTypeClass, UpstreamClass, @@ -111,6 +119,34 @@ def merge_lineage( self.cll = self.cll or None +def parse_alter_table_rename(default_schema: str, query: str) -> Tuple[str, str, str]: + """ + Parses an ALTER TABLE ... RENAME TO ... query and returns the schema, previous table name, and new table name. + """ + + parsed_query = sqlglot.parse_one(query, dialect="redshift") + assert isinstance(parsed_query, sqlglot.exp.AlterTable) + prev_name = parsed_query.this.name + rename_clause = parsed_query.args["actions"][0] + assert isinstance(rename_clause, sqlglot.exp.RenameTable) + new_name = rename_clause.this.name + + schema = parsed_query.this.db or default_schema + + return schema, prev_name, new_name + + +def split_qualified_table_name(urn: str) -> Tuple[str, str, str]: + qualified_table_name = dataset_urn.DatasetUrn.create_from_string( + urn + ).get_entity_id()[1] + + # -3 because platform instance is optional and that can cause the split to have more than 3 elements + db, schema, table = qualified_table_name.split(".")[-3:] + + return db, schema, table + + class RedshiftLineageExtractor: def __init__( self, @@ -130,6 +166,95 @@ def __init__( self.report.lineage_end_time, ) = self.get_time_window() + self.temp_tables: Dict[str, TempTableRow] = {} + + def _init_temp_table_schema( + self, database: str, temp_tables: List[TempTableRow] + ) -> None: + if self.context.graph is None: # to silent lint + return + + schema_resolver: sqlglot_l.SchemaResolver = ( + self.context.graph._make_schema_resolver( + platform=LineageDatasetPlatform.REDSHIFT.value, + platform_instance=self.config.platform_instance, + env=self.config.env, + ) + ) + + dataset_vs_columns: Dict[str, List[SchemaField]] = {} + # prepare dataset_urn vs List of schema fields + for table in temp_tables: + logger.debug( + f"Processing temp table: {table.create_command} with query text {table.query_text}" + ) + result = sqlglot_l.create_lineage_sql_parsed_result( + platform=LineageDatasetPlatform.REDSHIFT.value, + platform_instance=self.config.platform_instance, + env=self.config.env, + default_db=database, + default_schema=self.config.default_schema, + query=table.query_text, + graph=self.context.graph, + ) + + if ( + result is None + or result.column_lineage is None + or result.query_type != sqlglot_l.QueryType.CREATE + or not result.out_tables + ): + logger.debug(f"Unsupported temp table query found: {table.query_text}") + continue + + table.parsed_result = result + if result.column_lineage[0].downstream.table: + table.urn = result.column_lineage[0].downstream.table + + self.temp_tables[result.out_tables[0]] = table + + for table in self.temp_tables.values(): + if ( + table.parsed_result is None + or table.parsed_result.column_lineage is None + ): + continue + for column_lineage in table.parsed_result.column_lineage: + if column_lineage.downstream.table not in dataset_vs_columns: + dataset_vs_columns[cast(str, column_lineage.downstream.table)] = [] + # Initialise the temp table urn, we later need this to merge CLL + + dataset_vs_columns[cast(str, column_lineage.downstream.table)].append( + SchemaField( + fieldPath=column_lineage.downstream.column, + type=cast( + SchemaFieldDataTypeClass, + column_lineage.downstream.column_type, + ), + nativeDataType=cast( + str, column_lineage.downstream.native_column_type + ), + ) + ) + + # Add datasets, and it's respective fields in schema_resolver, so that later schema_resolver would be able + # correctly generates the upstreams for temporary tables + for urn in dataset_vs_columns: + db, schema, table_name = split_qualified_table_name(urn) + schema_resolver.add_schema_metadata( + urn=urn, + schema_metadata=SchemaMetadata( + schemaName=table_name, + platform=builder.make_data_platform_urn( + LineageDatasetPlatform.REDSHIFT.value + ), + version=0, + hash="", + platformSchema=OtherSchema(rawSchema=""), + fields=dataset_vs_columns[urn], + ), + ) + def get_time_window(self) -> Tuple[datetime, datetime]: if self.redundant_run_skip_handler: self.report.stateful_lineage_ingestion_enabled = True @@ -157,25 +282,32 @@ def _get_s3_path(self, path: str) -> str: return path def _get_sources_from_query( - self, db_name: str, query: str + self, + db_name: str, + query: str, + parsed_result: Optional[sqlglot_l.SqlParsingResult] = None, ) -> Tuple[List[LineageDataset], Optional[List[sqlglot_l.ColumnLineageInfo]]]: sources: List[LineageDataset] = list() - parsed_result: Optional[ - sqlglot_l.SqlParsingResult - ] = sqlglot_l.create_lineage_sql_parsed_result( - query=query, - platform=LineageDatasetPlatform.REDSHIFT.value, - platform_instance=self.config.platform_instance, - default_db=db_name, - default_schema=str(self.config.default_schema), - graph=self.context.graph, - env=self.config.env, - ) + if parsed_result is None: + parsed_result = sqlglot_l.create_lineage_sql_parsed_result( + query=query, + platform=LineageDatasetPlatform.REDSHIFT.value, + platform_instance=self.config.platform_instance, + default_db=db_name, + default_schema=str(self.config.default_schema), + graph=self.context.graph, + env=self.config.env, + ) if parsed_result is None: logger.debug(f"native query parsing failed for {query}") return sources, None + elif parsed_result.debug_info.table_error: + logger.debug( + f"native query parsing failed for {query} with error: {parsed_result.debug_info.table_error}" + ) + return sources, None logger.debug(f"parsed_result = {parsed_result}") @@ -277,7 +409,7 @@ def _populate_lineage_map( database: str, lineage_type: LineageCollectorType, connection: redshift_connector.Connection, - all_tables: Dict[str, Dict[str, List[Union[RedshiftView, RedshiftTable]]]], + all_tables_set: Dict[str, Dict[str, Set[str]]], ) -> None: """ This method generate table level lineage based with the given query. @@ -292,7 +424,10 @@ def _populate_lineage_map( return: The method does not return with anything as it directly modify the self._lineage_map property. :rtype: None """ + + logger.info(f"Extracting {lineage_type.name} lineage for db {database}") try: + logger.debug(f"Processing lineage query: {query}") cll: Optional[List[sqlglot_l.ColumnLineageInfo]] = None raw_db_name = database alias_db_name = self.config.database @@ -301,11 +436,18 @@ def _populate_lineage_map( conn=connection, query=query ): target = self._get_target_lineage( - alias_db_name, lineage_row, lineage_type + alias_db_name, + lineage_row, + lineage_type, + all_tables_set=all_tables_set, ) if not target: continue + logger.debug( + f"Processing {lineage_type.name} lineage row: {lineage_row}" + ) + sources, cll = self._get_sources( lineage_type, alias_db_name, @@ -318,9 +460,12 @@ def _populate_lineage_map( target.upstreams.update( self._get_upstream_lineages( sources=sources, - all_tables=all_tables, + target_table=target.dataset.urn, + target_dataset_cll=cll, + all_tables_set=all_tables_set, alias_db_name=alias_db_name, raw_db_name=raw_db_name, + connection=connection, ) ) target.cll = cll @@ -344,21 +489,50 @@ def _populate_lineage_map( ) self.report_status(f"extract-{lineage_type.name}", False) + def _update_lineage_map_for_table_renames( + self, table_renames: Dict[str, str] + ) -> None: + if not table_renames: + return + + logger.info(f"Updating lineage map for {len(table_renames)} table renames") + for new_table_urn, prev_table_urn in table_renames.items(): + # This table was renamed from some other name, copy in the lineage + # for the previous name as well. + prev_table_lineage = self._lineage_map.get(prev_table_urn) + if prev_table_lineage: + logger.debug( + f"including lineage for {prev_table_urn} in {new_table_urn} due to table rename" + ) + self._lineage_map[new_table_urn].merge_lineage( + upstreams=prev_table_lineage.upstreams, + cll=prev_table_lineage.cll, + ) + def _get_target_lineage( self, alias_db_name: str, lineage_row: LineageRow, lineage_type: LineageCollectorType, + all_tables_set: Dict[str, Dict[str, Set[str]]], ) -> Optional[LineageItem]: if ( lineage_type != LineageCollectorType.UNLOAD and lineage_row.target_schema and lineage_row.target_table ): - if not self.config.schema_pattern.allowed( - lineage_row.target_schema - ) or not self.config.table_pattern.allowed( - f"{alias_db_name}.{lineage_row.target_schema}.{lineage_row.target_table}" + if ( + not self.config.schema_pattern.allowed(lineage_row.target_schema) + or not self.config.table_pattern.allowed( + f"{alias_db_name}.{lineage_row.target_schema}.{lineage_row.target_table}" + ) + ) and not ( + # We also check the all_tables_set, since this might be a renamed table + # that we don't want to drop lineage for. + alias_db_name in all_tables_set + and lineage_row.target_schema in all_tables_set[alias_db_name] + and lineage_row.target_table + in all_tables_set[alias_db_name][lineage_row.target_schema] ): return None # Target @@ -400,18 +574,19 @@ def _get_target_lineage( def _get_upstream_lineages( self, sources: List[LineageDataset], - all_tables: Dict[str, Dict[str, List[Union[RedshiftView, RedshiftTable]]]], + target_table: str, + all_tables_set: Dict[str, Dict[str, Set[str]]], alias_db_name: str, raw_db_name: str, + connection: redshift_connector.Connection, + target_dataset_cll: Optional[List[sqlglot_l.ColumnLineageInfo]], ) -> List[LineageDataset]: - targe_source = [] + target_source = [] + probable_temp_tables: List[str] = [] + for source in sources: if source.platform == LineageDatasetPlatform.REDSHIFT: - qualified_table_name = dataset_urn.DatasetUrn.create_from_string( - source.urn - ).get_entity_id()[1] - # -3 because platform instance is optional and that can cause the split to have more than 3 elements - db, schema, table = qualified_table_name.split(".")[-3:] + db, schema, table = split_qualified_table_name(source.urn) if db == raw_db_name: db = alias_db_name path = f"{db}.{schema}.{table}" @@ -427,19 +602,40 @@ def _get_upstream_lineages( # Filtering out tables which does not exist in Redshift # It was deleted in the meantime or query parser did not capture well the table name + # Or it might be a temp table if ( - db not in all_tables - or schema not in all_tables[db] - or not any(table == t.name for t in all_tables[db][schema]) + db not in all_tables_set + or schema not in all_tables_set[db] + or table not in all_tables_set[db][schema] ): logger.debug( - f"{source.urn} missing table, dropping from lineage.", + f"{source.urn} missing table. Adding it to temp table list for target table {target_table}.", ) + probable_temp_tables.append(f"{schema}.{table}") self.report.num_lineage_tables_dropped += 1 continue - targe_source.append(source) - return targe_source + target_source.append(source) + + if probable_temp_tables and self.config.resolve_temp_table_in_lineage: + self.report.num_lineage_processed_temp_tables += len(probable_temp_tables) + # Generate lineage dataset from temporary tables + number_of_permanent_dataset_found: int = ( + self.update_table_and_column_lineage( + db_name=raw_db_name, + connection=connection, + temp_table_names=probable_temp_tables, + target_source_dataset=target_source, + target_dataset_cll=target_dataset_cll, + ) + ) + + logger.debug( + f"Number of permanent datasets found for {target_table} = {number_of_permanent_dataset_found} in " + f"temp tables {probable_temp_tables}" + ) + + return target_source def populate_lineage( self, @@ -447,8 +643,27 @@ def populate_lineage( connection: redshift_connector.Connection, all_tables: Dict[str, Dict[str, List[Union[RedshiftView, RedshiftTable]]]], ) -> None: + if self.config.resolve_temp_table_in_lineage: + self._init_temp_table_schema( + database=database, + temp_tables=self.get_temp_tables(connection=connection), + ) + populate_calls: List[Tuple[str, LineageCollectorType]] = [] + all_tables_set: Dict[str, Dict[str, Set[str]]] = { + db: {schema: {t.name for t in tables} for schema, tables in schemas.items()} + for db, schemas in all_tables.items() + } + + table_renames: Dict[str, str] = {} + if self.config.include_table_rename_lineage: + table_renames, all_tables_set = self._process_table_renames( + database=database, + connection=connection, + all_tables=all_tables_set, + ) + if self.config.table_lineage_mode in { LineageMode.STL_SCAN_BASED, LineageMode.MIXED, @@ -504,9 +719,12 @@ def populate_lineage( database=database, lineage_type=lineage_type, connection=connection, - all_tables=all_tables, + all_tables_set=all_tables_set, ) + # Handling for alter table statements. + self._update_lineage_map_for_table_renames(table_renames=table_renames) + self.report.lineage_mem_size[self.config.database] = humanfriendly.format_size( memory_footprint.total_size(self._lineage_map) ) @@ -613,3 +831,271 @@ def get_lineage( def report_status(self, step: str, status: bool) -> None: if self.redundant_run_skip_handler: self.redundant_run_skip_handler.report_current_run_status(step, status) + + def _process_table_renames( + self, + database: str, + connection: redshift_connector.Connection, + all_tables: Dict[str, Dict[str, Set[str]]], + ) -> Tuple[Dict[str, str], Dict[str, Dict[str, Set[str]]]]: + logger.info(f"Processing table renames for db {database}") + + # new urn -> prev urn + table_renames: Dict[str, str] = {} + + query = RedshiftQuery.alter_table_rename_query( + db_name=database, + start_time=self.start_time, + end_time=self.end_time, + ) + + for rename_row in RedshiftDataDictionary.get_alter_table_commands( + connection, query + ): + schema, prev_name, new_name = parse_alter_table_rename( + default_schema=self.config.default_schema, + query=rename_row.query_text, + ) + + prev_urn = make_dataset_urn_with_platform_instance( + platform=LineageDatasetPlatform.REDSHIFT.value, + platform_instance=self.config.platform_instance, + name=f"{database}.{schema}.{prev_name}", + env=self.config.env, + ) + new_urn = make_dataset_urn_with_platform_instance( + platform=LineageDatasetPlatform.REDSHIFT.value, + platform_instance=self.config.platform_instance, + name=f"{database}.{schema}.{new_name}", + env=self.config.env, + ) + + table_renames[new_urn] = prev_urn + + # We want to generate lineage for the previous name too. + all_tables[database][schema].add(prev_name) + + logger.info(f"Discovered {len(table_renames)} table renames") + return table_renames, all_tables + + def get_temp_tables( + self, connection: redshift_connector.Connection + ) -> List[TempTableRow]: + ddl_query: str = RedshiftQuery.temp_table_ddl_query( + start_time=self.config.start_time, + end_time=self.config.end_time, + ) + + logger.debug(f"Temporary table ddl query = {ddl_query}") + + temp_table_rows: List[TempTableRow] = [] + + for row in RedshiftDataDictionary.get_temporary_rows( + conn=connection, + query=ddl_query, + ): + temp_table_rows.append(row) + + return temp_table_rows + + def find_temp_tables( + self, temp_table_rows: List[TempTableRow], temp_table_names: List[str] + ) -> List[TempTableRow]: + matched_temp_tables: List[TempTableRow] = [] + + for table_name in temp_table_names: + prefixes = RedshiftQuery.get_temp_table_clause(table_name) + prefixes.extend( + RedshiftQuery.get_temp_table_clause(table_name.split(".")[-1]) + ) + + for row in temp_table_rows: + if any( + row.create_command.lower().startswith(prefix) for prefix in prefixes + ): + matched_temp_tables.append(row) + + return matched_temp_tables + + def resolve_column_refs( + self, column_refs: List[sqlglot_l.ColumnRef], depth: int = 0 + ) -> List[sqlglot_l.ColumnRef]: + """ + This method resolves the column reference to the original column reference. + For example, if the column reference is to a temporary table, it will be resolved to the original column + reference. + """ + max_depth = 10 + + resolved_column_refs: List[sqlglot_l.ColumnRef] = [] + if not column_refs: + return column_refs + + if depth >= max_depth: + logger.warning( + f"Max depth reached for resolving temporary columns: {column_refs}" + ) + self.report.num_unresolved_temp_columns += 1 + return column_refs + + for ref in column_refs: + resolved = False + if ref.table in self.temp_tables: + table = self.temp_tables[ref.table] + if table.parsed_result and table.parsed_result.column_lineage: + for column_lineage in table.parsed_result.column_lineage: + if ( + column_lineage.downstream.table == ref.table + and column_lineage.downstream.column == ref.column + ): + resolved_column_refs.extend( + self.resolve_column_refs( + column_lineage.upstreams, depth=depth + 1 + ) + ) + resolved = True + break + # If we reach here, it means that we were not able to resolve the column reference. + if resolved is False: + logger.warning( + f"Unable to resolve column reference {ref} to a permanent table" + ) + else: + logger.debug( + f"Resolved column reference {ref} is not resolved because referenced table {ref.table} is not a temp table or not found. Adding reference as non-temp table. This is normal." + ) + resolved_column_refs.append(ref) + return resolved_column_refs + + def _update_target_dataset_cll( + self, + temp_table_urn: str, + target_dataset_cll: List[sqlglot_l.ColumnLineageInfo], + source_dataset_cll: List[sqlglot_l.ColumnLineageInfo], + ) -> None: + for target_column_lineage in target_dataset_cll: + upstreams: List[sqlglot_l.ColumnRef] = [] + # Look for temp_table_urn in upstream of column_lineage, if found then we need to replace it with + # column of permanent table + for target_column_ref in target_column_lineage.upstreams: + if target_column_ref.table == temp_table_urn: + # Look for column_ref.table and column_ref.column in downstream of source_dataset_cll. + # The source_dataset_cll contains CLL generated from create statement of temp table (temp_table_urn) + for source_column_lineage in source_dataset_cll: + if ( + source_column_lineage.downstream.table + == target_column_ref.table + and source_column_lineage.downstream.column + == target_column_ref.column + ): + resolved_columns = self.resolve_column_refs( + source_column_lineage.upstreams + ) + # Add all upstream of above temporary column into upstream of target column + upstreams.extend(resolved_columns) + continue + + upstreams.append(target_column_ref) + + if upstreams: + # update the upstreams + target_column_lineage.upstreams = upstreams + + def _add_permanent_datasets_recursively( + self, + db_name: str, + temp_table_rows: List[TempTableRow], + visited_tables: Set[str], + connection: redshift_connector.Connection, + permanent_lineage_datasets: List[LineageDataset], + target_dataset_cll: Optional[List[sqlglot_l.ColumnLineageInfo]], + ) -> None: + transitive_temp_tables: List[TempTableRow] = [] + + for temp_table in temp_table_rows: + logger.debug( + f"Processing temp table with transaction id: {temp_table.transaction_id} and query text {temp_table.query_text}" + ) + + intermediate_l_datasets, cll = self._get_sources_from_query( + db_name=db_name, + query=temp_table.query_text, + parsed_result=temp_table.parsed_result, + ) + + if ( + temp_table.urn is not None + and target_dataset_cll is not None + and cll is not None + ): # condition to silent the lint + self._update_target_dataset_cll( + temp_table_urn=temp_table.urn, + target_dataset_cll=target_dataset_cll, + source_dataset_cll=cll, + ) + + # make sure lineage dataset should not contain a temp table + # if such dataset is present then add it to transitive_temp_tables to resolve it to original permanent table + for lineage_dataset in intermediate_l_datasets: + db, schema, table = split_qualified_table_name(lineage_dataset.urn) + + if table in visited_tables: + # The table is already processed + continue + + # Check if table found is again a temp table + repeated_temp_table: List[TempTableRow] = self.find_temp_tables( + temp_table_rows=list(self.temp_tables.values()), + temp_table_names=[table], + ) + + if not repeated_temp_table: + logger.debug(f"Unable to find table {table} in temp tables.") + + if repeated_temp_table: + transitive_temp_tables.extend(repeated_temp_table) + visited_tables.add(table) + continue + + permanent_lineage_datasets.append(lineage_dataset) + + if transitive_temp_tables: + # recursive call + self._add_permanent_datasets_recursively( + db_name=db_name, + temp_table_rows=transitive_temp_tables, + visited_tables=visited_tables, + connection=connection, + permanent_lineage_datasets=permanent_lineage_datasets, + target_dataset_cll=target_dataset_cll, + ) + + def update_table_and_column_lineage( + self, + db_name: str, + temp_table_names: List[str], + connection: redshift_connector.Connection, + target_source_dataset: List[LineageDataset], + target_dataset_cll: Optional[List[sqlglot_l.ColumnLineageInfo]], + ) -> int: + permanent_lineage_datasets: List[LineageDataset] = [] + + temp_table_rows: List[TempTableRow] = self.find_temp_tables( + temp_table_rows=list(self.temp_tables.values()), + temp_table_names=temp_table_names, + ) + + visited_tables: Set[str] = set(temp_table_names) + + self._add_permanent_datasets_recursively( + db_name=db_name, + temp_table_rows=temp_table_rows, + visited_tables=visited_tables, + connection=connection, + permanent_lineage_datasets=permanent_lineage_datasets, + target_dataset_cll=target_dataset_cll, + ) + + target_source_dataset.extend(permanent_lineage_datasets) + + return len(permanent_lineage_datasets) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py index 92e36fffd6bb4..93beb5980ea62 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py @@ -1,9 +1,14 @@ from datetime import datetime +from typing import List redshift_datetime_format = "%Y-%m-%d %H:%M:%S" class RedshiftQuery: + CREATE_TEMP_TABLE_CLAUSE = "create temp table" + CREATE_TEMPORARY_TABLE_CLAUSE = "create temporary table" + CREATE_TABLE_CLAUSE = "create table" + list_databases: str = """SELECT datname FROM pg_database WHERE (datname <> ('padb_harvest')::name) AND (datname <> ('template0')::name) @@ -97,7 +102,7 @@ class RedshiftQuery: NULL as table_description FROM pg_catalog.svv_external_tables ORDER BY "schema", - "relname"; + "relname" """ list_columns: str = """ SELECT @@ -379,7 +384,8 @@ def list_insert_create_queries_sql( target_schema, target_table, username, - querytxt as ddl + query as query_id, + LISTAGG(CASE WHEN LEN(RTRIM(querytxt)) = 0 THEN querytxt ELSE RTRIM(querytxt) END) WITHIN GROUP (ORDER BY sequence) as ddl from ( select @@ -388,7 +394,9 @@ def list_insert_create_queries_sql( sti.table as target_table, sti.database as cluster, usename as username, - querytxt, + text as querytxt, + sq.query, + sequence, si.starttime as starttime from stl_insert as si @@ -396,19 +404,20 @@ def list_insert_create_queries_sql( sti.table_id = tbl left join svl_user_info sui on si.userid = sui.usesysid - left join stl_query sq on + left join STL_QUERYTEXT sq on si.query = sq.query left join stl_load_commits slc on slc.query = si.query where sui.usename <> 'rdsdb' - and sq.aborted = 0 and slc.query IS NULL and cluster = '{db_name}' and si.starttime >= '{start_time}' and si.starttime < '{end_time}' + and sequence < 320 ) as target_tables - order by cluster, target_schema, target_table, starttime asc + group by cluster, query_id, target_schema, target_table, username, starttime + order by cluster, query_id, target_schema, target_table, starttime asc """.format( # We need the original database name for filtering db_name=db_name, @@ -443,3 +452,118 @@ def list_copy_commands_sql( start_time=start_time.strftime(redshift_datetime_format), end_time=end_time.strftime(redshift_datetime_format), ) + + @staticmethod + def get_temp_table_clause(table_name: str) -> List[str]: + return [ + f"{RedshiftQuery.CREATE_TABLE_CLAUSE} {table_name}", + f"{RedshiftQuery.CREATE_TEMP_TABLE_CLAUSE} {table_name}", + f"{RedshiftQuery.CREATE_TEMPORARY_TABLE_CLAUSE} {table_name}", + ] + + @staticmethod + def temp_table_ddl_query(start_time: datetime, end_time: datetime) -> str: + start_time_str: str = start_time.strftime(redshift_datetime_format) + + end_time_str: str = end_time.strftime(redshift_datetime_format) + + return rf"""-- DataHub Redshift Source temp table DDL query + select + * + from + ( + select + session_id, + transaction_id, + start_time, + userid, + REGEXP_REPLACE(REGEXP_SUBSTR(REGEXP_REPLACE(query_text,'\\\\n','\\n'), '(CREATE(?:[\\n\\s\\t]+(?:temp|temporary))?(?:[\\n\\s\\t]+)table(?:[\\n\\s\\t]+)[^\\n\\s\\t()-]+)', 0, 1, 'ipe'),'[\\n\\s\\t]+',' ',1,'p') as create_command, + query_text, + row_number() over ( + partition by TRIM(query_text) + order by start_time desc + ) rn + from + ( + select + pid as session_id, + xid as transaction_id, + starttime as start_time, + type, + query_text, + userid + from + ( + select + starttime, + pid, + xid, + type, + userid, + LISTAGG(case + when LEN(RTRIM(text)) = 0 then text + else RTRIM(text) + end, + '') within group ( + order by sequence + ) as query_text + from + SVL_STATEMENTTEXT + where + type in ('DDL', 'QUERY') + AND starttime >= '{start_time_str}' + AND starttime < '{end_time_str}' + -- See https://stackoverflow.com/questions/72770890/redshift-result-size-exceeds-listagg-limit-on-svl-statementtext + AND sequence < 320 + group by + starttime, + pid, + xid, + type, + userid + order by + starttime, + pid, + xid, + type, + userid + asc) + where + type in ('DDL', 'QUERY') + ) + where + (create_command ilike 'create temp table %' + or create_command ilike 'create temporary table %' + -- we want to get all the create table statements and not just temp tables if non temp table is created and dropped in the same transaction + or create_command ilike 'create table %') + -- Redshift creates temp tables with the following names: volt_tt_%. We need to filter them out. + and query_text not ilike 'CREATE TEMP TABLE volt_tt_%' + and create_command not like 'CREATE TEMP TABLE volt_tt_' + -- We need to filter out our query and it was not possible earlier when we did not have any comment in the query + and query_text not ilike '%https://stackoverflow.com/questions/72770890/redshift-result-size-exceeds-listagg-limit-on-svl-statementtext%' + + ) + where + rn = 1; + """ + + @staticmethod + def alter_table_rename_query( + db_name: str, start_time: datetime, end_time: datetime + ) -> str: + start_time_str: str = start_time.strftime(redshift_datetime_format) + end_time_str: str = end_time.strftime(redshift_datetime_format) + + return f""" + SELECT transaction_id, + session_id, + start_time, + query_text + FROM sys_query_history SYS + WHERE SYS.status = 'success' + AND SYS.query_type = 'DDL' + AND SYS.database_name = '{db_name}' + AND SYS.start_time >= '{start_time_str}' + AND SYS.end_time < '{end_time_str}' + AND SYS.query_text ILIKE 'alter table % rename to %' + """ diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py index ca81682ae00e4..0ea073c050502 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py @@ -9,6 +9,7 @@ from datahub.ingestion.source.sql.sql_generic import BaseColumn, BaseTable from datahub.metadata.com.linkedin.pegasus2avro.schema import SchemaField from datahub.utilities.hive_schema_to_avro import get_schema_fields_for_hive_column +from datahub.utilities.sqlglot_lineage import SqlParsingResult logger: logging.Logger = logging.getLogger(__name__) @@ -80,6 +81,26 @@ class LineageRow: filename: Optional[str] +@dataclass +class TempTableRow: + transaction_id: int + session_id: str + query_text: str + create_command: str + start_time: datetime + urn: Optional[str] + parsed_result: Optional[SqlParsingResult] = None + + +@dataclass +class AlterTableRow: + # TODO unify this type with TempTableRow + transaction_id: int + session_id: str + query_text: str + start_time: datetime + + # this is a class to be a proxy to query Redshift class RedshiftDataDictionary: @staticmethod @@ -359,9 +380,62 @@ def get_lineage_rows( target_table=row[field_names.index("target_table")] if "target_table" in field_names else None, - ddl=row[field_names.index("ddl")] if "ddl" in field_names else None, + # See https://docs.aws.amazon.com/redshift/latest/dg/r_STL_QUERYTEXT.html + # for why we need to remove the \\n. + ddl=row[field_names.index("ddl")].replace("\\n", "\n") + if "ddl" in field_names + else None, filename=row[field_names.index("filename")] if "filename" in field_names else None, ) rows = cursor.fetchmany() + + @staticmethod + def get_temporary_rows( + conn: redshift_connector.Connection, + query: str, + ) -> Iterable[TempTableRow]: + cursor = conn.cursor() + + cursor.execute(query) + + field_names = [i[0] for i in cursor.description] + + rows = cursor.fetchmany() + while rows: + for row in rows: + yield TempTableRow( + transaction_id=row[field_names.index("transaction_id")], + session_id=row[field_names.index("session_id")], + # See https://docs.aws.amazon.com/redshift/latest/dg/r_STL_QUERYTEXT.html + # for why we need to replace the \n with a newline. + query_text=row[field_names.index("query_text")].replace( + r"\n", "\n" + ), + create_command=row[field_names.index("create_command")], + start_time=row[field_names.index("start_time")], + urn=None, + ) + rows = cursor.fetchmany() + + @staticmethod + def get_alter_table_commands( + conn: redshift_connector.Connection, + query: str, + ) -> Iterable[AlterTableRow]: + # TODO: unify this with get_temporary_rows + cursor = RedshiftDataDictionary.get_query_result(conn, query) + + field_names = [i[0] for i in cursor.description] + + rows = cursor.fetchmany() + while rows: + for row in rows: + yield AlterTableRow( + transaction_id=row[field_names.index("transaction_id")], + session_id=row[field_names.index("session_id")], + query_text=row[field_names.index("query_text")], + start_time=row[field_names.index("start_time")], + ) + rows = cursor.fetchmany() diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py index 333c851650fb3..36ac7955f15d5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py @@ -35,6 +35,7 @@ class RedshiftReport(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowRep num_lineage_tables_dropped: int = 0 num_lineage_dropped_query_parser: int = 0 num_lineage_dropped_not_support_copy_path: int = 0 + num_lineage_processed_temp_tables = 0 lineage_start_time: Optional[datetime] = None lineage_end_time: Optional[datetime] = None @@ -43,6 +44,7 @@ class RedshiftReport(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowRep usage_start_time: Optional[datetime] = None usage_end_time: Optional[datetime] = None stateful_usage_ingestion_enabled: bool = False + num_unresolved_temp_columns: int = 0 def report_dropped(self, key: str) -> None: self.filtered.append(key) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py index b896df1fa340e..aad4a6ed27cb8 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py @@ -140,7 +140,9 @@ class SnowflakeV2Config( # This is required since access_history table does not capture whether the table was temporary table. temporary_tables_pattern: List[str] = Field( default=DEFAULT_TABLES_DENY_LIST, - description="[Advanced] Regex patterns for temporary tables to filter in lineage ingestion. Specify regex to match the entire table name in database.schema.table format. Defaults are to set in such a way to ignore the temporary staging tables created by known ETL tools.", + description="[Advanced] Regex patterns for temporary tables to filter in lineage ingestion. Specify regex to " + "match the entire table name in database.schema.table format. Defaults are to set in such a way " + "to ignore the temporary staging tables created by known ETL tools.", ) rename_upstreams_deny_pattern_to_temporary_table_pattern = pydantic_renamed_field( @@ -150,13 +152,16 @@ class SnowflakeV2Config( shares: Optional[Dict[str, SnowflakeShareConfig]] = Field( default=None, description="Required if current account owns or consumes snowflake share." - " If specified, connector creates lineage and siblings relationship between current account's database tables and consumer/producer account's database tables." + "If specified, connector creates lineage and siblings relationship between current account's database tables " + "and consumer/producer account's database tables." " Map of share name -> details of share.", ) email_as_user_identifier: bool = Field( default=True, - description="Format user urns as an email, if the snowflake user's email is set. If `email_domain` is provided, generates email addresses for snowflake users with unset emails, based on their username.", + description="Format user urns as an email, if the snowflake user's email is set. If `email_domain` is " + "provided, generates email addresses for snowflake users with unset emails, based on their " + "username.", ) @validator("convert_urns_to_lowercase") diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index abe4f82673777..5b063451df9cf 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -1037,6 +1037,14 @@ def _sqlglot_lineage_inner( default_db = default_db.upper() if default_schema: default_schema = default_schema.upper() + if _is_dialect_instance(dialect, "redshift") and not default_schema: + # On Redshift, there's no "USE SCHEMA " command. The default schema + # is public, and "current schema" is the one at the front of the search path. + # See https://docs.aws.amazon.com/redshift/latest/dg/r_search_path.html + # and https://stackoverflow.com/questions/9067335/how-does-the-search-path-influence-identifier-resolution-and-the-current-schema?noredirect=1&lq=1 + # default_schema = "public" + # TODO: Re-enable this. + pass logger.debug("Parsing lineage from sql statement: %s", sql) statement = _parse_statement(sql, dialect=dialect) diff --git a/metadata-ingestion/tests/unit/redshift_query_mocker.py b/metadata-ingestion/tests/unit/redshift_query_mocker.py new file mode 100644 index 0000000000000..631e6e7ceaf1f --- /dev/null +++ b/metadata-ingestion/tests/unit/redshift_query_mocker.py @@ -0,0 +1,104 @@ +from datetime import datetime +from unittest.mock import MagicMock + + +def mock_temp_table_cursor(cursor: MagicMock) -> None: + cursor.description = [ + ["transaction_id"], + ["session_id"], + ["query_text"], + ["create_command"], + ["start_time"], + ] + + cursor.fetchmany.side_effect = [ + [ + ( + 126, + "abc", + "CREATE TABLE #player_price distkey(player_id) AS SELECT player_id, SUM(price) AS " + "price_usd from player_activity group by player_id", + "CREATE TABLE #player_price", + datetime.now(), + ) + ], + [ + # Empty result to stop the while loop + ], + ] + + +def mock_stl_insert_table_cursor(cursor: MagicMock) -> None: + cursor.description = [ + ["source_schema"], + ["source_table"], + ["target_schema"], + ["target_table"], + ["ddl"], + ] + + cursor.fetchmany.side_effect = [ + [ + ( + "public", + "#player_price", + "public", + "player_price_with_hike_v6", + "INSERT INTO player_price_with_hike_v6 SELECT (price_usd + 0.2 * price_usd) as price, '20%' FROM " + "#player_price", + ) + ], + [ + # Empty result to stop the while loop + ], + ] + + +query_vs_cursor_mocker = { + ( + "-- DataHub Redshift Source temp table DDL query\n select\n *\n " + "from\n (\n select\n session_id,\n " + " transaction_id,\n start_time,\n userid,\n " + " REGEXP_REPLACE(REGEXP_SUBSTR(REGEXP_REPLACE(query_text,'\\\\\\\\n','\\\\n'), '(CREATE(?:[" + "\\\\n\\\\s\\\\t]+(?:temp|temporary))?(?:[\\\\n\\\\s\\\\t]+)table(?:[\\\\n\\\\s\\\\t]+)[" + "^\\\\n\\\\s\\\\t()-]+)', 0, 1, 'ipe'),'[\\\\n\\\\s\\\\t]+',' ',1,'p') as create_command,\n " + " query_text,\n row_number() over (\n partition " + "by TRIM(query_text)\n order by start_time desc\n ) rn\n " + " from\n (\n select\n pid " + "as session_id,\n xid as transaction_id,\n starttime " + "as start_time,\n type,\n query_text,\n " + " userid\n from\n (\n " + "select\n starttime,\n pid,\n " + " xid,\n type,\n userid,\n " + " LISTAGG(case\n when LEN(RTRIM(text)) = 0 then text\n " + " else RTRIM(text)\n end,\n " + " '') within group (\n order by sequence\n " + " ) as query_text\n from\n " + "SVL_STATEMENTTEXT\n where\n type in ('DDL', " + "'QUERY')\n AND starttime >= '2024-01-01 12:00:00'\n " + " AND starttime < '2024-01-10 12:00:00'\n -- See " + "https://stackoverflow.com/questions/72770890/redshift-result-size-exceeds-listagg-limit-on-svl" + "-statementtext\n AND sequence < 320\n group by\n " + " starttime,\n pid,\n " + "xid,\n type,\n userid\n " + " order by\n starttime,\n pid,\n " + " xid,\n type,\n userid\n " + " asc)\n where\n type in ('DDL', " + "'QUERY')\n )\n where\n (create_command ilike " + "'create temp table %'\n or create_command ilike 'create temporary table %'\n " + " -- we want to get all the create table statements and not just temp tables " + "if non temp table is created and dropped in the same transaction\n or " + "create_command ilike 'create table %')\n -- Redshift creates temp tables with " + "the following names: volt_tt_%. We need to filter them out.\n and query_text not " + "ilike 'CREATE TEMP TABLE volt_tt_%'\n and create_command not like 'CREATE TEMP " + "TABLE volt_tt_'\n -- We need to filter out our query and it was not possible " + "earlier when we did not have any comment in the query\n and query_text not ilike " + "'%https://stackoverflow.com/questions/72770890/redshift-result-size-exceeds-listagg-limit-on-svl" + "-statementtext%'\n\n )\n where\n rn = 1;\n " + ): mock_temp_table_cursor, + "select * from test_collapse_temp_lineage": mock_stl_insert_table_cursor, +} + + +def mock_cursor(cursor: MagicMock, query: str) -> None: + query_vs_cursor_mocker[query](cursor=cursor) diff --git a/metadata-ingestion/tests/unit/test_redshift_lineage.py b/metadata-ingestion/tests/unit/test_redshift_lineage.py index db5af3a71efb9..6a3e6e47bd96a 100644 --- a/metadata-ingestion/tests/unit/test_redshift_lineage.py +++ b/metadata-ingestion/tests/unit/test_redshift_lineage.py @@ -1,8 +1,31 @@ +from datetime import datetime +from functools import partial +from typing import List +from unittest.mock import MagicMock + +import datahub.utilities.sqlglot_lineage as sqlglot_l from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.graph.client import DataHubGraph from datahub.ingestion.source.redshift.config import RedshiftConfig -from datahub.ingestion.source.redshift.lineage import RedshiftLineageExtractor +from datahub.ingestion.source.redshift.lineage import ( + LineageCollectorType, + LineageDataset, + LineageDatasetPlatform, + LineageItem, + RedshiftLineageExtractor, + parse_alter_table_rename, +) +from datahub.ingestion.source.redshift.redshift_schema import TempTableRow from datahub.ingestion.source.redshift.report import RedshiftReport -from datahub.utilities.sqlglot_lineage import ColumnLineageInfo, DownstreamColumnRef +from datahub.metadata._schema_classes import NumberTypeClass, SchemaFieldDataTypeClass +from datahub.utilities.sqlglot_lineage import ( + ColumnLineageInfo, + DownstreamColumnRef, + QueryType, + SqlParsingDebugInfo, + SqlParsingResult, +) +from tests.unit.redshift_query_mocker import mock_cursor def test_get_sources_from_query(): @@ -120,16 +143,45 @@ def test_get_sources_from_query_with_only_table(): ) -def test_cll(): - config = RedshiftConfig(host_port="localhost:5439", database="test") +def test_parse_alter_table_rename(): + assert parse_alter_table_rename("public", "alter table foo rename to bar") == ( + "public", + "foo", + "bar", + ) + assert parse_alter_table_rename( + "public", "alter table second_schema.storage_v2_stg rename to storage_v2; " + ) == ( + "second_schema", + "storage_v2_stg", + "storage_v2", + ) + + +def get_lineage_extractor() -> RedshiftLineageExtractor: + config = RedshiftConfig( + host_port="localhost:5439", + database="test", + resolve_temp_table_in_lineage=True, + start_time=datetime(2024, 1, 1, 12, 0, 0).isoformat() + "Z", + end_time=datetime(2024, 1, 10, 12, 0, 0).isoformat() + "Z", + ) report = RedshiftReport() + lineage_extractor = RedshiftLineageExtractor( + config, report, PipelineContext(run_id="foo", graph=mock_graph()) + ) + + return lineage_extractor + + +def test_cll(): test_query = """ select a,b,c from db.public.customer inner join db.public.order on db.public.customer.id = db.public.order.customer_id """ - lineage_extractor = RedshiftLineageExtractor( - config, report, PipelineContext(run_id="foo") - ) + + lineage_extractor = get_lineage_extractor() + _, cll = lineage_extractor._get_sources_from_query(db_name="db", query=test_query) assert cll == [ @@ -149,3 +201,600 @@ def test_cll(): logic=None, ), ] + + +def cursor_execute_side_effect(cursor: MagicMock, query: str) -> None: + mock_cursor(cursor=cursor, query=query) + + +def mock_redshift_connection() -> MagicMock: + connection = MagicMock() + + cursor = MagicMock() + + connection.cursor.return_value = cursor + + cursor.execute.side_effect = partial(cursor_execute_side_effect, cursor) + + return connection + + +def mock_graph() -> DataHubGraph: + + graph = MagicMock() + + graph._make_schema_resolver.return_value = sqlglot_l.SchemaResolver( + platform="redshift", + env="PROD", + platform_instance=None, + graph=None, + ) + + return graph + + +def test_collapse_temp_lineage(): + lineage_extractor = get_lineage_extractor() + + connection: MagicMock = mock_redshift_connection() + + lineage_extractor._init_temp_table_schema( + database=lineage_extractor.config.database, + temp_tables=lineage_extractor.get_temp_tables(connection=connection), + ) + + lineage_extractor._populate_lineage_map( + query="select * from test_collapse_temp_lineage", + database=lineage_extractor.config.database, + all_tables_set={ + lineage_extractor.config.database: {"public": {"player_price_with_hike_v6"}} + }, + connection=connection, + lineage_type=LineageCollectorType.QUERY_SQL_PARSER, + ) + + print(lineage_extractor._lineage_map) + + target_urn: str = "urn:li:dataset:(urn:li:dataPlatform:redshift,test.public.player_price_with_hike_v6,PROD)" + + assert lineage_extractor._lineage_map.get(target_urn) is not None + + lineage_item: LineageItem = lineage_extractor._lineage_map[target_urn] + + assert list(lineage_item.upstreams)[0].urn == ( + "urn:li:dataset:(urn:li:dataPlatform:redshift," + "test.public.player_activity,PROD)" + ) + + assert lineage_item.cll is not None + + assert lineage_item.cll[0].downstream.table == ( + "urn:li:dataset:(urn:li:dataPlatform:redshift," + "test.public.player_price_with_hike_v6,PROD)" + ) + + assert lineage_item.cll[0].downstream.column == "price" + + assert lineage_item.cll[0].upstreams[0].table == ( + "urn:li:dataset:(urn:li:dataPlatform:redshift," + "test.public.player_activity,PROD)" + ) + + assert lineage_item.cll[0].upstreams[0].column == "price" + + +def test_collapse_temp_recursive_cll_lineage(): + lineage_extractor = get_lineage_extractor() + + temp_table: TempTableRow = TempTableRow( + transaction_id=126, + query_text="CREATE TABLE #player_price distkey(player_id) AS SELECT player_id, SUM(price_usd) AS price_usd " + "from #player_activity_temp group by player_id", + start_time=datetime.now(), + session_id="abc", + create_command="CREATE TABLE #player_price", + parsed_result=SqlParsingResult( + query_type=QueryType.CREATE, + in_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)" + ], + out_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)" + ], + debug_info=SqlParsingDebugInfo(), + column_lineage=[ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="player_id", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="INTEGER", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="player_id", + ) + ], + logic=None, + ), + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="price_usd", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="BIGINT", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="price_usd", + ) + ], + logic=None, + ), + ], + ), + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + ) + + temp_table_activity: TempTableRow = TempTableRow( + transaction_id=127, + query_text="CREATE TABLE #player_activity_temp SELECT player_id, SUM(price) AS price_usd " + "from player_activity", + start_time=datetime.now(), + session_id="abc", + create_command="CREATE TABLE #player_activity_temp", + parsed_result=SqlParsingResult( + query_type=QueryType.CREATE, + in_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)" + ], + out_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)" + ], + debug_info=SqlParsingDebugInfo(), + column_lineage=[ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="player_id", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="INTEGER", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)", + column="player_id", + ) + ], + logic=None, + ), + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="price_usd", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="BIGINT", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)", + column="price", + ) + ], + logic=None, + ), + ], + ), + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + ) + + assert temp_table.urn + assert temp_table_activity.urn + + lineage_extractor.temp_tables[temp_table.urn] = temp_table + lineage_extractor.temp_tables[temp_table_activity.urn] = temp_table_activity + + target_dataset_cll: List[sqlglot_l.ColumnLineageInfo] = [ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_price_with_hike_v6,PROD)", + column="price", + column_type=SchemaFieldDataTypeClass(type=NumberTypeClass()), + native_column_type="DOUBLE PRECISION", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="price_usd", + ) + ], + logic=None, + ) + ] + + datasets = lineage_extractor._get_upstream_lineages( + sources=[ + LineageDataset( + platform=LineageDatasetPlatform.REDSHIFT, + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + ) + ], + target_table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_price_with_hike_v4,PROD)", + raw_db_name="dev", + alias_db_name="dev", + all_tables_set={ + "dev": { + "public": set(), + } + }, + connection=MagicMock(), + target_dataset_cll=target_dataset_cll, + ) + + assert len(datasets) == 1 + + assert ( + datasets[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)" + ) + + assert target_dataset_cll[0].upstreams[0].table == ( + "urn:li:dataset:(urn:li:dataPlatform:redshift," + "dev.public.player_activity,PROD)" + ) + assert target_dataset_cll[0].upstreams[0].column == "price" + + +def test_collapse_temp_recursive_with_compex_column_cll_lineage(): + lineage_extractor = get_lineage_extractor() + + temp_table: TempTableRow = TempTableRow( + transaction_id=126, + query_text="CREATE TABLE #player_price distkey(player_id) AS SELECT player_id, SUM(price+tax) AS price_usd " + "from #player_activity_temp group by player_id", + start_time=datetime.now(), + session_id="abc", + create_command="CREATE TABLE #player_price", + parsed_result=SqlParsingResult( + query_type=QueryType.CREATE, + in_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)" + ], + out_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)" + ], + debug_info=SqlParsingDebugInfo(), + column_lineage=[ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="player_id", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="INTEGER", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="player_id", + ) + ], + logic=None, + ), + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="price_usd", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="BIGINT", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="price", + ), + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="tax", + ), + ], + logic=None, + ), + ], + ), + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + ) + + temp_table_activity: TempTableRow = TempTableRow( + transaction_id=127, + query_text="CREATE TABLE #player_activity_temp SELECT player_id, price, tax " + "from player_activity", + start_time=datetime.now(), + session_id="abc", + create_command="CREATE TABLE #player_activity_temp", + parsed_result=SqlParsingResult( + query_type=QueryType.CREATE, + in_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)" + ], + out_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)" + ], + debug_info=SqlParsingDebugInfo(), + column_lineage=[ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="player_id", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="INTEGER", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)", + column="player_id", + ) + ], + logic=None, + ), + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="price", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="BIGINT", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)", + column="price", + ) + ], + logic=None, + ), + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="tax", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="BIGINT", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)", + column="tax", + ) + ], + logic=None, + ), + ], + ), + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + ) + assert temp_table.urn + assert temp_table_activity.urn + + lineage_extractor.temp_tables[temp_table.urn] = temp_table + lineage_extractor.temp_tables[temp_table_activity.urn] = temp_table_activity + + target_dataset_cll: List[sqlglot_l.ColumnLineageInfo] = [ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_price_with_hike_v6,PROD)", + column="price", + column_type=SchemaFieldDataTypeClass(type=NumberTypeClass()), + native_column_type="DOUBLE PRECISION", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="price_usd", + ) + ], + logic=None, + ), + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_price_with_hike_v6,PROD)", + column="player_id", + column_type=SchemaFieldDataTypeClass(type=NumberTypeClass()), + native_column_type="BIGINT", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="player_id", + ) + ], + logic=None, + ), + ] + + datasets = lineage_extractor._get_upstream_lineages( + sources=[ + LineageDataset( + platform=LineageDatasetPlatform.REDSHIFT, + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + ) + ], + target_table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_price_with_hike_v4,PROD)", + raw_db_name="dev", + alias_db_name="dev", + all_tables_set={ + "dev": { + "public": set(), + } + }, + connection=MagicMock(), + target_dataset_cll=target_dataset_cll, + ) + + assert len(datasets) == 1 + + assert ( + datasets[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)" + ) + + assert target_dataset_cll[0].upstreams[0].table == ( + "urn:li:dataset:(urn:li:dataPlatform:redshift," + "dev.public.player_activity,PROD)" + ) + assert target_dataset_cll[0].upstreams[0].column == "price" + assert target_dataset_cll[0].upstreams[1].column == "tax" + assert target_dataset_cll[1].upstreams[0].column == "player_id" + + +def test_collapse_temp_recursive_cll_lineage_with_circular_reference(): + lineage_extractor = get_lineage_extractor() + + temp_table: TempTableRow = TempTableRow( + transaction_id=126, + query_text="CREATE TABLE #player_price distkey(player_id) AS SELECT player_id, SUM(price_usd) AS price_usd " + "from #player_activity_temp group by player_id", + start_time=datetime.now(), + session_id="abc", + create_command="CREATE TABLE #player_price", + parsed_result=SqlParsingResult( + query_type=QueryType.CREATE, + in_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)" + ], + out_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)" + ], + debug_info=SqlParsingDebugInfo(), + column_lineage=[ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="player_id", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="INTEGER", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="player_id", + ) + ], + logic=None, + ), + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="price_usd", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="BIGINT", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="price_usd", + ) + ], + logic=None, + ), + ], + ), + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + ) + + temp_table_activity: TempTableRow = TempTableRow( + transaction_id=127, + query_text="CREATE TABLE #player_activity_temp SELECT player_id, SUM(price) AS price_usd " + "from #player_price", + start_time=datetime.now(), + session_id="abc", + create_command="CREATE TABLE #player_activity_temp", + parsed_result=SqlParsingResult( + query_type=QueryType.CREATE, + in_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_activity,PROD)" + ], + out_tables=[ + "urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)" + ], + debug_info=SqlParsingDebugInfo(), + column_lineage=[ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="player_id", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="INTEGER", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="player_id", + ) + ], + logic=None, + ), + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="price_usd", + column_type=SchemaFieldDataTypeClass(NumberTypeClass()), + native_column_type="BIGINT", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + column="price_usd", + ) + ], + logic=None, + ), + ], + ), + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_activity_temp,PROD)", + ) + + assert temp_table.urn + assert temp_table_activity.urn + + lineage_extractor.temp_tables[temp_table.urn] = temp_table + lineage_extractor.temp_tables[temp_table_activity.urn] = temp_table_activity + + target_dataset_cll: List[sqlglot_l.ColumnLineageInfo] = [ + ColumnLineageInfo( + downstream=DownstreamColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_price_with_hike_v6,PROD)", + column="price", + column_type=SchemaFieldDataTypeClass(type=NumberTypeClass()), + native_column_type="DOUBLE PRECISION", + ), + upstreams=[ + sqlglot_l.ColumnRef( + table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + column="price_usd", + ) + ], + logic=None, + ) + ] + + datasets = lineage_extractor._get_upstream_lineages( + sources=[ + LineageDataset( + platform=LineageDatasetPlatform.REDSHIFT, + urn="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.#player_price,PROD)", + ) + ], + target_table="urn:li:dataset:(urn:li:dataPlatform:redshift,dev.public.player_price_with_hike_v4,PROD)", + raw_db_name="dev", + alias_db_name="dev", + all_tables_set={ + "dev": { + "public": set(), + } + }, + connection=MagicMock(), + target_dataset_cll=target_dataset_cll, + ) + + assert len(datasets) == 1 + # Here we only interested if it fails or not From 0e418b527e64b9314c2a4da1df7794b129ac21cb Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 1 Feb 2024 16:33:15 -0800 Subject: [PATCH 472/792] fix(ingest): upgrade pytest-docker (#9765) --- metadata-ingestion/setup.py | 2 +- .../tests/test_helpers/docker_helpers.py | 20 +++++++++++-------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index f8d51997330a9..d4e2ada1fc68f 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -468,7 +468,7 @@ pytest_dep, "pytest-asyncio>=0.16.0", "pytest-cov>=2.8.1", - "pytest-docker>=1.0.1", + "pytest-docker>=1.1.0", deepdiff_dep, "requests-mock", "freezegun", diff --git a/metadata-ingestion/tests/test_helpers/docker_helpers.py b/metadata-ingestion/tests/test_helpers/docker_helpers.py index 2eb61068196a2..bacb8d80b9e72 100644 --- a/metadata-ingestion/tests/test_helpers/docker_helpers.py +++ b/metadata-ingestion/tests/test_helpers/docker_helpers.py @@ -2,7 +2,7 @@ import logging import os import subprocess -from typing import Callable, Optional, Union +from typing import Callable, Iterator, List, Optional, Union import pytest import pytest_docker.plugin @@ -37,9 +37,11 @@ def wait_for_port( docker_services.wait_until_responsive( timeout=timeout, pause=pause, - check=checker - if checker - else lambda: is_responsive(container_name, container_port, hostname), + check=( + checker + if checker + else lambda: is_responsive(container_name, container_port, hostname) + ), ) logger.info(f"Container {container_name} is ready!") finally: @@ -62,14 +64,16 @@ def docker_compose_runner( ): @contextlib.contextmanager def run( - compose_file_path: Union[str, list], key: str, cleanup: bool = True - ) -> pytest_docker.plugin.Services: + compose_file_path: Union[str, List[str]], key: str, cleanup: bool = True + ) -> Iterator[pytest_docker.plugin.Services]: with pytest_docker.plugin.get_docker_services( docker_compose_command=docker_compose_command, - docker_compose_file=compose_file_path, + # We can remove the type ignore once this is merged: + # https://github.com/avast/pytest-docker/pull/108 + docker_compose_file=compose_file_path, # type: ignore docker_compose_project_name=f"{docker_compose_project_name}-{key}", docker_setup=docker_setup, - docker_cleanup=docker_cleanup if cleanup else False, + docker_cleanup=docker_cleanup if cleanup else [], ) as docker_services: yield docker_services From 63a23d784fc9993655267d18b19e7fc047b17888 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 2 Feb 2024 09:44:51 -0600 Subject: [PATCH 473/792] fix(patch): patch builder import TagUrn (#9764) --- .../metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java index ff34b187f6151..bfd10da37bb3f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java @@ -4,7 +4,7 @@ import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; -import com.linkedin.common.TagUrn; +import com.linkedin.common.urn.TagUrn; import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import javax.annotation.Nullable; From 815176913c45e4f7ecb25b1732764996d03a9765 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Fri, 2 Feb 2024 15:55:36 -0500 Subject: [PATCH 474/792] feat(forms) Add support for metadata forms on entities in the UI (#9718) --- .../datahub/graphql/GmsGraphQLEngine.java | 3 +- .../src/main/resources/forms.graphql | 29 -- datahub-web-react/package.json | 1 + datahub-web-react/src/Mocks.tsx | 3 + datahub-web-react/src/app/entity/Entity.tsx | 7 +- .../src/app/entity/EntityRegistry.tsx | 7 +- .../src/app/entity/dataset/DatasetEntity.tsx | 84 ++--- .../containers/profile/EntityProfile.tsx | 6 +- .../profile/sidebar/EntityInfo/EntityInfo.tsx | 66 ++++ .../sidebar/FormInfo/CompletedView.tsx | 59 ++++ .../profile/sidebar/FormInfo/FormInfo.tsx | 48 +++ .../sidebar/FormInfo/IncompleteView.tsx | 65 ++++ .../FormInfo/OptionalPromptsRemaining.tsx | 23 ++ .../FormInfo/RequiredPromptsRemaining.tsx | 15 + .../FormInfo/SidebarFormInfoWrapper.tsx | 56 +++ .../FormInfo/VerificationAuditStamp.tsx | 26 ++ .../sidebar/FormInfo/__tests__/utils.test.ts | 187 ++++++++++ .../profile/sidebar/FormInfo/components.ts | 60 ++++ .../sidebar/FormInfo/useGetPromptInfo.ts | 38 ++ .../sidebar/FormInfo/useIsUserAssigned.ts | 24 ++ .../profile/sidebar/FormInfo/utils.ts | 324 +++++++++++++++++ .../entity/shared/entityForm/EntityForm.tsx | 15 + .../shared/entityForm/EntityFormContext.tsx | 54 +++ .../entityForm/EntityFormContextProvider.tsx | 78 +++++ .../shared/entityForm/EntityFormModal.tsx | 59 ++++ .../src/app/entity/shared/entityForm/Form.tsx | 100 ++++++ .../entity/shared/entityForm/FormByEntity.tsx | 71 ++++ .../entityForm/FormHeader/FormPageHeader.tsx | 40 +++ .../entityForm/FormHeader/components.ts | 36 ++ .../FormSelectionModal/FormItem.tsx | 101 ++++++ .../FormSelectionModal/FormRequestedBy.tsx | 24 ++ .../FormSelectionModal/FormSelectionModal.tsx | 21 ++ .../FormSelectionModal/FormSelector.tsx | 48 +++ .../entity/shared/entityForm/ProgressBar.tsx | 38 ++ .../shared/entityForm/__tests__/Form.test.tsx | 62 ++++ .../useShouldShowVerificationPrompt.test.ts | 48 +++ .../app/entity/shared/entityForm/constants.ts | 3 + .../src/app/entity/shared/entityForm/mocks.ts | 221 ++++++++++++ .../shared/entityForm/prompts/Prompt.tsx | 65 ++++ .../CompletedPromptAuditStamp.tsx | 69 ++++ .../StructuredPropertyPrompt/DateInput.tsx | 18 + .../DropdownLabel.tsx | 34 ++ .../MultiSelectInput.tsx | 82 +++++ .../MultipleStringInput.tsx | 83 +++++ .../StructuredPropertyPrompt/NumberInput.tsx | 23 ++ .../RichTextInput.tsx | 38 ++ .../SingleSelectInput.tsx | 56 +++ .../StructuredPropertyPrompt/StringInput.tsx | 31 ++ .../StructuredPropertyPrompt.tsx | 207 +++++++++++ .../UrnInput/SelectedEntity.tsx | 40 +++ .../UrnInput/UrnInput.tsx | 78 +++++ .../UrnInput/useUrnInput.tsx | 108 ++++++ .../ValueDescription.tsx | 24 ++ .../useStructuredPropertyPrompt.ts | 99 ++++++ .../prompts/StructuredPropertyPrompt/utils.ts | 36 ++ .../entityForm/prompts/VerificationPrompt.tsx | 72 ++++ .../schemaFieldPrompts/DropdownHeader.tsx | 62 ++++ .../SchemaFieldDropdown.tsx | 45 +++ .../schemaFieldPrompts/SchemaFieldPrompts.tsx | 36 ++ .../useShouldShowVerificationPrompt.ts | 38 ++ .../src/app/entity/shared/siblingUtils.ts | 3 + .../src/app/entity/shared/types.ts | 2 + .../src/app/home/HomePageRecommendations.tsx | 8 +- .../src/app/identity/user/UserList.tsx | 4 +- .../src/app/onboarding/OnboardingConfig.tsx | 2 + .../config/FormOnboardingConfig.tsx | 178 ++++++++++ .../useToggleEducationStepIdsAllowList.tsx | 18 + .../useUpdateEducationStepIdsAllowlist.tsx | 20 -- .../useUpdateEducationStepsAllowList.tsx | 22 ++ .../src/app/search/SearchHeader.tsx | 26 +- .../src/app/search/SearchPage.tsx | 6 +- .../src/app/shared/AppLogoLink.tsx | 31 ++ .../app/shared/DeferredRenderComponent.tsx | 23 ++ datahub-web-react/src/app/shared/Loading.tsx | 27 ++ .../src/app/shared/VirtualScrollChild.tsx | 24 ++ .../src/app/shared/admin/HeaderLinks.tsx | 4 +- .../src/app/shared/components.tsx | 6 + .../src/app/shared/useHasComponentRendered.ts | 18 + datahub-web-react/src/graphql/dataset.graphql | 21 ++ datahub-web-react/src/graphql/form.graphql | 7 + .../src/graphql/fragments.graphql | 64 ++++ .../src/images/background_dots.svg | 330 ++++++++++++++++++ .../src/images/bulk-form-type-comparison.svg | 68 ++++ .../src/images/greenCircleTwoTone.svg | 5 + .../src/images/verificationBlue.svg | 4 + .../src/images/verificationGreen.svg | 4 + .../src/images/verificationPurple.svg | 4 + .../src/images/verificationPurpleWhite.svg | 4 + .../src/images/verificationWarningGray.svg | 4 + .../src/images/verticalogo copy.png | Bin 0 -> 25763 bytes datahub-web-react/yarn.lock | 5 + 91 files changed, 4308 insertions(+), 128 deletions(-) create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntityInfo/EntityInfo.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/CompletedView.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/FormInfo.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/IncompleteView.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/OptionalPromptsRemaining.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/RequiredPromptsRemaining.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/SidebarFormInfoWrapper.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/VerificationAuditStamp.tsx create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/__tests__/utils.test.ts create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/components.ts create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/useGetPromptInfo.ts create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/useIsUserAssigned.ts create mode 100644 datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/utils.ts create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/EntityForm.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/EntityFormContext.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/EntityFormContextProvider.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/EntityFormModal.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/Form.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/FormByEntity.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/FormHeader/FormPageHeader.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/FormHeader/components.ts create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormItem.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormRequestedBy.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormSelectionModal.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormSelector.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/ProgressBar.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/__tests__/Form.test.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/__tests__/useShouldShowVerificationPrompt.test.ts create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/constants.ts create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/mocks.ts create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/Prompt.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/CompletedPromptAuditStamp.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/DateInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/DropdownLabel.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultiSelectInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultipleStringInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/NumberInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/RichTextInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/SingleSelectInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/StringInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/StructuredPropertyPrompt.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/SelectedEntity.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/UrnInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/useUrnInput.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/ValueDescription.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/useStructuredPropertyPrompt.ts create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/utils.ts create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/VerificationPrompt.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/DropdownHeader.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/SchemaFieldDropdown.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/SchemaFieldPrompts.tsx create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/useShouldShowVerificationPrompt.ts create mode 100644 datahub-web-react/src/app/onboarding/config/FormOnboardingConfig.tsx create mode 100644 datahub-web-react/src/app/onboarding/useToggleEducationStepIdsAllowList.tsx delete mode 100644 datahub-web-react/src/app/onboarding/useUpdateEducationStepIdsAllowlist.tsx create mode 100644 datahub-web-react/src/app/onboarding/useUpdateEducationStepsAllowList.tsx create mode 100644 datahub-web-react/src/app/shared/AppLogoLink.tsx create mode 100644 datahub-web-react/src/app/shared/DeferredRenderComponent.tsx create mode 100644 datahub-web-react/src/app/shared/Loading.tsx create mode 100644 datahub-web-react/src/app/shared/VirtualScrollChild.tsx create mode 100644 datahub-web-react/src/app/shared/useHasComponentRendered.ts create mode 100644 datahub-web-react/src/graphql/form.graphql create mode 100644 datahub-web-react/src/images/background_dots.svg create mode 100644 datahub-web-react/src/images/bulk-form-type-comparison.svg create mode 100644 datahub-web-react/src/images/greenCircleTwoTone.svg create mode 100644 datahub-web-react/src/images/verificationBlue.svg create mode 100644 datahub-web-react/src/images/verificationGreen.svg create mode 100644 datahub-web-react/src/images/verificationPurple.svg create mode 100644 datahub-web-react/src/images/verificationPurpleWhite.svg create mode 100644 datahub-web-react/src/images/verificationWarningGray.svg create mode 100644 datahub-web-react/src/images/verticalogo copy.png diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 41f48e0a7dc3e..8d9b9a5ad82c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -92,6 +92,7 @@ import com.linkedin.datahub.graphql.generated.QuerySubject; import com.linkedin.datahub.graphql.generated.QuickFilter; import com.linkedin.datahub.graphql.generated.RecommendationContent; +import com.linkedin.datahub.graphql.generated.ResolvedAuditStamp; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; @@ -1642,7 +1643,7 @@ private void configureResolvedAuditStampResolvers(final RuntimeWiring.Builder bu typeWiring.dataFetcher( "actor", new LoadableTypeResolver<>( - corpUserType, (env) -> ((CorpUser) env.getSource()).getUrn()))); + corpUserType, (env) -> ((ResolvedAuditStamp) env.getSource()).getActor().getUrn()))); } /** diff --git a/datahub-graphql-core/src/main/resources/forms.graphql b/datahub-graphql-core/src/main/resources/forms.graphql index 0ff55cfa9f173..a0f84f8e3bb1a 100644 --- a/datahub-graphql-core/src/main/resources/forms.graphql +++ b/datahub-graphql-core/src/main/resources/forms.graphql @@ -275,20 +275,6 @@ input SubmitFormPromptInput { structuredPropertyParams: StructuredPropertyInputParams } -""" -Input for responding to a singular prompt in a form for a batch of entities -""" -input BatchSubmitFormPromptInput { - """ - The urns of the entities this prompt submission is for - """ - assetUrns: [String!]! - - """ - Input for responding to a specific prompt on a form - """ - input: SubmitFormPromptInput -} """ Input for collecting structured property values to apply to entities @@ -390,18 +376,3 @@ input VerifyFormInput { """ entityUrn: String! } - -""" -Input for verifying a batch of entities for a give form -""" -input BatchVerifyFormInput { - """ - The urns of the entities getting verified for this form - """ - assetUrns: [String!]! - - """ - The urn of the form being verified on the given entities - """ - formUrn: String! -} diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 97830cec4e164..337b0dc87ec1c 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -65,6 +65,7 @@ "react-helmet-async": "^1.3.0", "react-highlighter": "^0.4.3", "react-icons": "4.3.1", + "react-intersection-observer": "^9.5.3", "react-js-cron": "^2.1.0", "react-router": "^5.3", "react-router-dom": "^5.3", diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index 9f339bb7db548..f533e8d50385b 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -298,6 +298,7 @@ export const dataset1 = { browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, autoRenderAspects: [], structuredProperties: null, + forms: null, }; export const dataset2 = { @@ -395,6 +396,7 @@ export const dataset2 = { browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, autoRenderAspects: [], structuredProperties: null, + forms: null, }; export const dataset3 = { @@ -629,6 +631,7 @@ export const dataset3 = { lastProfile: null, lastOperation: null, structuredProperties: null, + forms: null, } as Dataset; export const dataset3WithSchema = { diff --git a/datahub-web-react/src/app/entity/Entity.tsx b/datahub-web-react/src/app/entity/Entity.tsx index 5920919a9cdab..3277051661bf1 100644 --- a/datahub-web-react/src/app/entity/Entity.tsx +++ b/datahub-web-react/src/app/entity/Entity.tsx @@ -1,6 +1,6 @@ import { EntityType, SearchResult } from '../../types.generated'; import { FetchedEntity } from '../lineage/types'; -import { GenericEntityProperties } from './shared/types'; +import { EntitySidebarSection, GenericEntityProperties } from './shared/types'; export enum PreviewType { /** @@ -176,4 +176,9 @@ export interface Entity { * Returns the profile component to be displayed in our Chrome extension */ renderEmbeddedProfile?: (urn: string) => JSX.Element; + + /** + * Returns the entity profile sidebar sections for an entity type. Only implemented on Datasets for now. + */ + getSidebarSections?: () => EntitySidebarSection[]; } diff --git a/datahub-web-react/src/app/entity/EntityRegistry.tsx b/datahub-web-react/src/app/entity/EntityRegistry.tsx index 6642c2c7b0467..4a2e0e386b768 100644 --- a/datahub-web-react/src/app/entity/EntityRegistry.tsx +++ b/datahub-web-react/src/app/entity/EntityRegistry.tsx @@ -4,7 +4,7 @@ import { FetchedEntity } from '../lineage/types'; import { SearchResultProvider } from '../search/context/SearchResultContext'; import { Entity, EntityCapabilityType, IconStyleType, PreviewType } from './Entity'; import { GLOSSARY_ENTITY_TYPES } from './shared/constants'; -import { GenericEntityProperties } from './shared/types'; +import { EntitySidebarSection, GenericEntityProperties } from './shared/types'; import { dictToQueryStringParams, getFineGrainedLineageWithSiblings, urlEncodeUrn } from './shared/utils'; function validatedGet(key: K, map: Map): V { @@ -194,6 +194,11 @@ export default class EntityRegistry { return entity.displayName(data); } + getSidebarSections(type: EntityType): EntitySidebarSection[] { + const entity = validatedGet(type, this.entityTypeToEntity); + return entity.getSidebarSections ? entity.getSidebarSections() : []; + } + getGenericEntityProperties(type: EntityType, data: T): GenericEntityProperties | null { const entity = validatedGet(type, this.entityTypeToEntity); return entity.getGenericEntityProperties(data); diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index f60eb95937452..90fac38ebd6b3 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -190,51 +190,51 @@ export class DatasetEntity implements Entity { }, }, ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, - { - component: SidebarSiblingsSection, - display: { - visible: (_, dataset: GetDatasetQuery) => - (dataset?.dataset?.siblings?.siblings?.length || 0) > 0, - }, - }, - { - component: SidebarViewDefinitionSection, - display: { - visible: (_, dataset: GetDatasetQuery) => - (dataset?.dataset?.viewProperties?.logic && true) || false, - }, - }, - { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, - }, - }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, - // TODO: Add back once entity-level recommendations are complete. - // { - // component: SidebarRecommendationsSection, - // }, - ]} + sidebarSections={this.getSidebarSections()} /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarSiblingsSection, + display: { + visible: (_, dataset: GetDatasetQuery) => (dataset?.dataset?.siblings?.siblings?.length || 0) > 0, + }, + }, + { + component: SidebarViewDefinitionSection, + display: { + visible: (_, dataset: GetDatasetQuery) => (dataset?.dataset?.viewProperties?.logic && true) || false, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + // TODO: Add back once entity-level recommendations are complete. + // { + // component: SidebarRecommendationsSection, + // }, + ]; + getOverridePropertiesFromEntity = (dataset?: Dataset | null): GenericEntityProperties => { // if dataset has subTypes filled out, pick the most specific subtype and return it const subTypes = dataset?.subTypes; diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx index a781c732c9de6..60d67355d5d7d 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx @@ -45,6 +45,7 @@ import { import { useAppConfig } from '../../../../useAppConfig'; import { useUpdateDomainEntityDataOnChange } from '../../../../domain/utils'; import ProfileSidebar from './sidebar/ProfileSidebar'; +import SidebarFormInfoWrapper from './sidebar/FormInfo/SidebarFormInfoWrapper'; type Props = { urn: string; @@ -333,7 +334,10 @@ export const EntityProfile = ({ - + )} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntityInfo/EntityInfo.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntityInfo/EntityInfo.tsx new file mode 100644 index 0000000000000..1d1400a8cc753 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntityInfo/EntityInfo.tsx @@ -0,0 +1,66 @@ +import Link from 'antd/lib/typography/Link'; +import React from 'react'; +import styled from 'styled-components'; +import PlatformContent from '../../header/PlatformContent'; +import { useEntityData } from '../../../../EntityContext'; +import { useEntityRegistry } from '../../../../../../useEntityRegistry'; +import { StyledDivider } from '../FormInfo/components'; +import { DatasetStatsSummarySubHeader } from '../../../../../dataset/profile/stats/stats/DatasetStatsSummarySubHeader'; +import LinkOut from '../../../../../../../images/link-out.svg?react'; +import FormInfo from '../FormInfo/FormInfo'; + +const EntityName = styled.div` + font-size: 16px; + font-weight: 700; + line-height: 24px; + margin-bottom: 8px; +`; + +const EntityInfoWrapper = styled.div` + padding-top: 20px; +`; + +const StyledLink = styled(Link)` + font-size: 14px; + line-height: 18px; + display: inline-flex; + align-items: center; + + svg { + height: 14px; + width: 14px; + } +`; + +const FormInfoWrapper = styled.div` + margin-top: 12px; +`; + +interface Props { + formUrn: string; +} + +export default function EntityInfo({ formUrn }: Props) { + const entityRegistry = useEntityRegistry(); + const { entityType, entityData } = useEntityData(); + const entityName = entityData ? entityRegistry.getDisplayName(entityType, entityData) : ''; + + return ( + + + {entityName} + + View Profile + + + + + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/CompletedView.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/CompletedView.tsx new file mode 100644 index 0000000000000..f8c0b74cc2cd4 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/CompletedView.tsx @@ -0,0 +1,59 @@ +import Link from 'antd/lib/typography/Link'; +import React from 'react'; +import styled from 'styled-components'; +import GreenVerificationLogo from '../../../../../../../images/verificationGreen.svg?react'; +import PurpleVerificationLogo from '../../../../../../../images/verificationPurple.svg?react'; +import { CTAWrapper, FlexWrapper, StyledIcon, StyledReadOutlined, Title } from './components'; +import OptionalPromptsRemaining from './OptionalPromptsRemaining'; +import VerificationAuditStamp from './VerificationAuditStamp'; + +const StyledLink = styled(Link)` + margin-top: 8px; + font-size: 12px; + display: block; +`; + +interface Props { + showVerificationStyles: boolean; + numOptionalPromptsRemaining: number; + isUserAssigned: boolean; + formUrn?: string; + shouldDisplayBackground?: boolean; + openFormModal?: () => void; +} + +export default function CompletedView({ + showVerificationStyles, + numOptionalPromptsRemaining, + isUserAssigned, + formUrn, + shouldDisplayBackground, + openFormModal, +}: Props) { + return ( + + + {showVerificationStyles ? ( + + ) : ( + + )} +

    + {showVerificationStyles ? 'Verified' : 'Documented'} + + {isUserAssigned && ( + <> + + {!!openFormModal && ( + View and edit responses + )} + + )} +
    + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/FormInfo.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/FormInfo.tsx new file mode 100644 index 0000000000000..681555a919b63 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/FormInfo.tsx @@ -0,0 +1,48 @@ +import React from 'react'; +import { useEntityData } from '../../../../EntityContext'; +import useGetPromptInfo from './useGetPromptInfo'; +import { isVerificationComplete, shouldShowVerificationInfo } from './utils'; +import CompletedView from './CompletedView'; +import IncompleteView from './IncompleteView'; +import useIsUserAssigned from './useIsUserAssigned'; + +interface Props { + formUrn?: string; + shouldDisplayBackground?: boolean; + openFormModal?: () => void; +} + +export default function FormInfo({ formUrn, shouldDisplayBackground, openFormModal }: Props) { + const { entityData } = useEntityData(); + const { numRequiredPromptsRemaining, numOptionalPromptsRemaining } = useGetPromptInfo(formUrn); + const showVerificationInfo = shouldShowVerificationInfo(entityData, formUrn); + const isUserAssigned = useIsUserAssigned(); + const allRequiredPromptsAreComplete = numRequiredPromptsRemaining === 0; + + const shouldShowCompletedView = showVerificationInfo + ? allRequiredPromptsAreComplete && isVerificationComplete(entityData, formUrn) + : allRequiredPromptsAreComplete; + + if (shouldShowCompletedView) { + return ( + + ); + } + + return ( + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/IncompleteView.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/IncompleteView.tsx new file mode 100644 index 0000000000000..b08e1baec5f54 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/IncompleteView.tsx @@ -0,0 +1,65 @@ +import { Button } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import PurpleVerificationLogo from '../../../../../../../images/verificationPurple.svg?react'; +import GrayVerificationIcon from '../../../../../../../images/verificationWarningGray.svg?react'; +import { CTAWrapper, FlexWrapper, StyledIcon, StyledReadFilled, Title } from './components'; +import OptionalPromptsRemaining from './OptionalPromptsRemaining'; +import RequiredPromptsRemaining from './RequiredPromptsRemaining'; + +const StyledButton = styled(Button)` + width: 100%; + margin-top: 12px; + font-size: 14px; + display: flex; + align-items: center; + justify-content: center; +`; + +interface Props { + showVerificationStyles: boolean; + numOptionalPromptsRemaining: number; + numRequiredPromptsRemaining: number; + isUserAssigned: boolean; + openFormModal?: () => void; +} + +export default function IncompleteView({ + showVerificationStyles, + numOptionalPromptsRemaining, + numRequiredPromptsRemaining, + isUserAssigned, + openFormModal, +}: Props) { + return ( + + + {isUserAssigned && ( + <> + {showVerificationStyles ? ( + + ) : ( + + )} + + )} + {!isUserAssigned && } +
    + Awaiting {showVerificationStyles ? 'Verification' : 'Documentation'} + {isUserAssigned && ( + <> + You are being asked to complete a set of requirements for this entity. + + + + )} +
    +
    + {!!openFormModal && isUserAssigned && ( + + {showVerificationStyles ? 'Complete Verification' : 'Complete Documentation'} + + )} +
    + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/OptionalPromptsRemaining.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/OptionalPromptsRemaining.tsx new file mode 100644 index 0000000000000..3198453ce467a --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/OptionalPromptsRemaining.tsx @@ -0,0 +1,23 @@ +import React from 'react'; +import styled from 'styled-components'; +import { pluralize } from '../../../../../../shared/textUtil'; +import { ANTD_GRAY_V2 } from '../../../../constants'; + +const OptionalPromptsWrapper = styled.div` + color: ${ANTD_GRAY_V2[8]}; + margin-top: 4px; +`; + +interface Props { + numRemaining: number; +} + +export default function OptionalPromptsRemaining({ numRemaining }: Props) { + if (numRemaining <= 0) return null; + + return ( + + {numRemaining} additional {pluralize(numRemaining, 'question', 's')} remaining + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/RequiredPromptsRemaining.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/RequiredPromptsRemaining.tsx new file mode 100644 index 0000000000000..e275b2d75146a --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/RequiredPromptsRemaining.tsx @@ -0,0 +1,15 @@ +import React from 'react'; +import { pluralize } from '../../../../../../shared/textUtil'; +import { SubTitle } from './components'; + +interface Props { + numRemaining: number; +} + +export default function RequiredPromptsRemaining({ numRemaining }: Props) { + return ( + + {numRemaining} required {pluralize(numRemaining, 'question', 's')} remaining + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/SidebarFormInfoWrapper.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/SidebarFormInfoWrapper.tsx new file mode 100644 index 0000000000000..9340a7f51d537 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/SidebarFormInfoWrapper.tsx @@ -0,0 +1,56 @@ +import React, { useState } from 'react'; +import styled from 'styled-components'; +import EntityFormModal from '../../../../entityForm/EntityFormModal'; +import FormInfo from './FormInfo'; +import { StyledDivider } from './components'; +import { useEntityData } from '../../../../EntityContext'; +import { getFormAssociations } from './utils'; +import FormSelectionModal from '../../../../entityForm/FormSelectionModal/FormSelectionModal'; + +const FormInfoWrapper = styled.div` + margin-top: 16px; +`; + +export default function SidebarFormInfoWrapper() { + const { entityData } = useEntityData(); + const [isFormSelectionModalVisible, setIsFormSelectionModalVisible] = useState(false); + const [isFormVisible, setIsFormVisible] = useState(false); + const [selectedFormUrn, setSelectedFormUrn] = useState(null); + const formAssociations = getFormAssociations(entityData); + + if (!formAssociations.length) return null; + + function openFormModal() { + if (formAssociations.length === 1) { + setSelectedFormUrn(formAssociations[0].form.urn); + setIsFormVisible(true); + } else { + setIsFormSelectionModalVisible(true); + } + } + + function selectFormUrn(urn: string) { + setSelectedFormUrn(urn); + setIsFormVisible(true); + setIsFormSelectionModalVisible(false); + } + + return ( + <> + + + + + setIsFormVisible(false)} + /> + setIsFormSelectionModalVisible(false)} + selectFormUrn={selectFormUrn} + /> + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/VerificationAuditStamp.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/VerificationAuditStamp.tsx new file mode 100644 index 0000000000000..f4373632418f4 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/VerificationAuditStamp.tsx @@ -0,0 +1,26 @@ +import dayjs from 'dayjs'; +import React from 'react'; +import { useEntityRegistry } from '../../../../../../useEntityRegistry'; +import { getVerificationAuditStamp } from './utils'; +import { useEntityData } from '../../../../EntityContext'; + +interface Props { + formUrn?: string; +} + +export default function VerificationAuditStamp({ formUrn }: Props) { + const entityRegistry = useEntityRegistry(); + const { entityData } = useEntityData(); + const verifiedAuditStamp = getVerificationAuditStamp(entityData, formUrn); + const verifiedTimestamp = verifiedAuditStamp?.time; + const verifiedActor = verifiedAuditStamp?.actor; + + if (!verifiedTimestamp) return null; + + return ( +
    + On {dayjs(verifiedTimestamp).format('ll')}{' '} + {verifiedActor && <>by {entityRegistry.getDisplayName(verifiedActor.type, verifiedActor)}} +
    + ); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/__tests__/utils.test.ts b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/__tests__/utils.test.ts new file mode 100644 index 0000000000000..7898befaf7ee3 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/__tests__/utils.test.ts @@ -0,0 +1,187 @@ +import { FormAssociation, FormPrompt } from '../../../../../../../../types.generated'; +import { mockEntityData, mockEntityDataAllVerified } from '../../../../../entityForm/mocks'; +import { isAssignedToForm } from '../useIsUserAssigned'; +import { + getEntityPromptsInfo, + getFieldPromptsInfo, + getFormAssociations, + getNumEntityPromptsRemaining, + getNumPromptsCompletedForField, + getNumSchemaFieldPromptsRemaining, + getPromptsForForm, + getVerificationAuditStamp, + isVerificationComplete, + shouldShowVerificationInfo, +} from '../utils'; + +// only looking at IDs +const prompts = [{ id: '1' }, { id: '2' }, { id: '3' }, { id: '4' }] as FormPrompt[]; + +describe('form prompt utils', () => { + it('should get the correct number of top-level prompts remaining', () => { + const numPromptsRemaining = getNumEntityPromptsRemaining(prompts, mockEntityData); + expect(numPromptsRemaining).toBe(2); + }); + + // if there are 2 top level prompts for schema fields, 8 fields in the schema, then there are 16 total schema-field prompts + // there are 5 completed prompts in the mock data, should have 11 remaining + it('should get the correct number of field-level prompts remaining', () => { + const fieldFormPrompts = [{ id: '1' }, { id: '2' }] as FormPrompt[]; + const numPromptsRemaining = getNumSchemaFieldPromptsRemaining(mockEntityData, fieldFormPrompts, 8); + expect(numPromptsRemaining).toBe(11); + }); + + it('should get the correct number of field-level prompts remaining given a form urn', () => { + const fieldFormPrompts = [{ id: '1' }, { id: '2' }] as FormPrompt[]; + const numPromptsRemaining = getNumSchemaFieldPromptsRemaining( + mockEntityData, + fieldFormPrompts, + 8, + 'urn:li:form:1', + ); + expect(numPromptsRemaining).toBe(11); + }); + + it('should get the correct number of field-level prompts remaining given a form urn with no field level prompts completed', () => { + const fieldFormPrompts = [{ id: '3' }] as FormPrompt[]; + const numPromptsRemaining = getNumSchemaFieldPromptsRemaining( + mockEntityData, + fieldFormPrompts, + 8, + 'urn:li:form:2', + ); + // none are completed in this form, with only 1 schema field prompt with 8 schema fields, so all 8 should be remaining + expect(numPromptsRemaining).toBe(8); + }); + + it('should get the numer of completed prompts for a given schema field in incompletePrompts', () => { + const numCompleted = getNumPromptsCompletedForField('test2', mockEntityData, 'urn:li:form:1'); + expect(numCompleted).toBe(1); + }); + + it('should get the numer of completed prompts for a given schema field in completedPrompts and incompletePrompts', () => { + const numCompleted = getNumPromptsCompletedForField('test3', mockEntityData, 'urn:li:form:1'); + expect(numCompleted).toBe(2); + }); + + it('should get the prompts for a given form urn correctly', () => { + const promptsForForm = getPromptsForForm('urn:li:form:1', mockEntityData); + expect(promptsForForm.length).toBe(2); + expect(promptsForForm.map((p) => p.id)).toMatchObject(['1', '2']); + }); + + it('should get information for entity specific prompts', () => { + const promptsForForm = getPromptsForForm('urn:li:form:2', mockEntityData); + const { entityPrompts, numOptionalEntityPromptsRemaining, numRequiredEntityPromptsRemaining } = + getEntityPromptsInfo(promptsForForm, mockEntityData); + + expect(entityPrompts.length).toBe(2); + expect(entityPrompts.map((p) => p.id)).toMatchObject(['3', '5']); + expect(numOptionalEntityPromptsRemaining).toBe(1); + expect(numRequiredEntityPromptsRemaining).toBe(0); + }); + + it('should get information for field specific prompts', () => { + const promptsForForm = getPromptsForForm('urn:li:form:1', mockEntityData); + const { fieldPrompts, numOptionalFieldPromptsRemaining, numRequiredFieldPromptsRemaining } = + getFieldPromptsInfo(promptsForForm, mockEntityData, 8, 'urn:li:form:1'); + + expect(fieldPrompts.length).toBe(2); + expect(fieldPrompts.map((p) => p.id)).toMatchObject(['1', '2']); + expect(numOptionalFieldPromptsRemaining).toBe(11); + expect(numRequiredFieldPromptsRemaining).toBe(0); + }); + + it('should get all form associations for an entity', () => { + const formAssociations = getFormAssociations(mockEntityData); + expect(formAssociations.length).toBe(3); + expect(formAssociations.map((f) => f.form.urn)).toMatchObject([ + 'urn:li:form:1', + 'urn:li:form:2', + 'urn:li:form:3', + ]); + }); +}); + +describe('useIsUserAssigned utils tests', () => { + it('should return true if user is an owner and the form is assigned to owners', () => { + const isAssigned = isAssignedToForm(mockEntityData.forms?.incompleteForms[0] as FormAssociation, true); + expect(isAssigned).toBe(true); + }); + + it('should return false if user is not an owner and the form is assigned to owners', () => { + const isAssigned = isAssignedToForm(mockEntityData.forms?.incompleteForms[0] as FormAssociation, false); + expect(isAssigned).toBe(false); + }); + + it('should return true if the user is explicitly assigned', () => { + const isAssigned = isAssignedToForm(mockEntityData.forms?.completedForms[0] as FormAssociation, false); + expect(isAssigned).toBe(true); + }); +}); + +describe('shouldShowVerificationInfo', () => { + it('should return true if a form is supplied that is a verification form', () => { + const showVerificationInfo = shouldShowVerificationInfo(mockEntityData, 'urn:li:form:1'); + expect(showVerificationInfo).toBe(true); + }); + + it('should return false if a form is supplied that is not a verification form', () => { + const isAssigned = shouldShowVerificationInfo(mockEntityData, 'urn:li:form:3'); + expect(isAssigned).toBe(false); + }); + + it('should return true if no formUrn is supplied and there is a verification form', () => { + const isAssigned = shouldShowVerificationInfo(mockEntityData); + expect(isAssigned).toBe(true); + }); +}); + +describe('getVerificationAuditStamp', () => { + it('should return the audit stamp for a given form', () => { + const auditStamp = getVerificationAuditStamp(mockEntityData, 'urn:li:form:2'); + expect(auditStamp).toMatchObject({ + actor: { + urn: 'urn:li:corpuser:test', + }, + time: 100, + }); + }); + + it('should return undefined for audit stamp for a given form with no verifications', () => { + const auditStamp = getVerificationAuditStamp(mockEntityData, 'urn:li:form:1'); + expect(auditStamp).toBe(null); + }); + + it('should return the most recent audit stamp when not given form', () => { + const auditStamp = getVerificationAuditStamp(mockEntityData); + expect(auditStamp).toMatchObject({ + actor: { + urn: 'urn:li:corpuser:test', + }, + time: 101, + }); + }); +}); + +describe('isVerificationComplete', () => { + it('should return true if the given form is verified', () => { + const isComplete = isVerificationComplete(mockEntityData, 'urn:li:form:2'); + expect(isComplete).toBe(true); + }); + + it('should return false if the given form is not verified', () => { + const isComplete = isVerificationComplete(mockEntityData, 'urn:li:form:1'); + expect(isComplete).toBe(false); + }); + + it('should return false if no form is given and not all verification forms are complete', () => { + const isComplete = isVerificationComplete(mockEntityData); + expect(isComplete).toBe(false); + }); + + it('should return true if no form is given and all verification forms are complete', () => { + const isComplete = isVerificationComplete(mockEntityDataAllVerified); + expect(isComplete).toBe(true); + }); +}); diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/components.ts b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/components.ts new file mode 100644 index 0000000000000..3c6aef5517d3d --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/components.ts @@ -0,0 +1,60 @@ +import { ReadFilled, ReadOutlined } from '@ant-design/icons'; +import Icon from '@ant-design/icons/lib/components/Icon'; +import { Divider } from 'antd'; +import styled from 'styled-components'; + +export const FlexWrapper = styled.div` + display: flex; + line-height: 18px; +`; + +export const StyledIcon = styled(Icon)<{ addLineHeight?: boolean }>` + font-size: 18px; + margin-right: 8px; + ${(props) => props.addLineHeight && `line-height: 24px;`} +`; + +export const SubTitle = styled.div<{ addMargin?: boolean }>` + font-weight: 600; + margin-bottom: 4px; + ${(props) => props.addMargin && `margin-top: 8px;`} +`; + +export const Title = styled.div` + font-size: 16px; + font-weight: 600; + margin-bottom: 4px; +`; + +export const StyledDivider = styled(Divider)` + margin: 12px 0 0 0; +`; + +export const StyledReadOutlined = styled(ReadOutlined)<{ addLineHeight?: boolean }>` + margin-right: 8px; + height: 13.72px; + width: 17.5px; + color: #373d44; + ${(props) => props.addLineHeight && `line-height: 24px;`} +`; + +export const StyledReadFilled = styled(ReadFilled)<{ addLineHeight?: boolean }>` + margin-right: 8px; + height: 13.72px; + width: 17.5px; + color: #7532a4; + ${(props) => props.addLineHeight && `line-height: 24px;`} +`; + +export const CTAWrapper = styled.div<{ shouldDisplayBackground?: boolean }>` + color: #373d44; + font-size: 14px; + ${(props) => + props.shouldDisplayBackground && + ` + border-radius: 8px; + padding: 16px; + background-color: #f9f0ff; + border: 1px solid #8338b8; + `} +`; diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/useGetPromptInfo.ts b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/useGetPromptInfo.ts new file mode 100644 index 0000000000000..252cefb9f6f4f --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/useGetPromptInfo.ts @@ -0,0 +1,38 @@ +import { useMemo } from 'react'; +import { useEntityData } from '../../../../EntityContext'; +import { useGetEntityWithSchema } from '../../../../tabs/Dataset/Schema/useGetEntitySchema'; +import { getAllPrompts, getEntityPromptsInfo, getFieldPromptsInfo, getPromptsForForm } from './utils'; + +export default function useGetPromptInfo(formUrn?: string) { + const { entityData } = useEntityData(); + const { entityWithSchema } = useGetEntityWithSchema(); + const prompts = useMemo( + () => (formUrn ? getPromptsForForm(formUrn, entityData) : getAllPrompts(entityData)), + [formUrn, entityData], + ); + + const { + entityPrompts, + numRequiredEntityPromptsRemaining, + numOptionalEntityPromptsRemaining, + requiredEntityPrompts, + } = getEntityPromptsInfo(prompts, entityData); + const { fieldPrompts, numRequiredFieldPromptsRemaining, numOptionalFieldPromptsRemaining, requiredFieldPrompts } = + getFieldPromptsInfo(prompts, entityData, entityWithSchema?.schemaMetadata?.fields.length || 0, formUrn); + // Multiply number of field prompts by number of schema fields for total number of schema field prompts + const totalRequiredSchemaFieldPrompts = + (entityWithSchema?.schemaMetadata?.fields.length || 0) * requiredFieldPrompts.length; + + const numRequiredPromptsRemaining = numRequiredEntityPromptsRemaining + numRequiredFieldPromptsRemaining; + const numOptionalPromptsRemaining = numOptionalEntityPromptsRemaining + numOptionalFieldPromptsRemaining; + + return { + prompts, + fieldPrompts, + totalRequiredSchemaFieldPrompts, + entityPrompts, + numRequiredPromptsRemaining, + numOptionalPromptsRemaining, + requiredEntityPrompts, + }; +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/useIsUserAssigned.ts b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/useIsUserAssigned.ts new file mode 100644 index 0000000000000..067513bfef03d --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/useIsUserAssigned.ts @@ -0,0 +1,24 @@ +import { FormAssociation } from '../../../../../../../types.generated'; +import { useUserContext } from '../../../../../../context/useUserContext'; +import { useEntityData } from '../../../../EntityContext'; +import { getFormAssociations } from './utils'; + +export function isAssignedToForm(formAssociation: FormAssociation, isUserAnOwner: boolean) { + const { isAssignedToMe, owners: isAssignedToOwners } = formAssociation.form.info.actors; + return isAssignedToMe || (isAssignedToOwners && isUserAnOwner); +} + +// returns true if this user is assigned (explicitly or by ownership) to a given form or any forms on this entity +export default function useIsUserAssigned(formUrn?: string) { + const { entityData } = useEntityData(); + const owners = entityData?.ownership?.owners; + const { user: loggedInUser } = useUserContext(); + const isUserAnOwner = !!owners?.find((owner) => owner.owner.urn === loggedInUser?.urn); + + const formAssociations = getFormAssociations(entityData); + if (formUrn) { + const formAssociation = formAssociations.find((association) => association.form.urn === formUrn); + return formAssociation ? isAssignedToForm(formAssociation, isUserAnOwner) : false; + } + return formAssociations.some((formAssociation) => isAssignedToForm(formAssociation, isUserAnOwner)); +} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/utils.ts b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/utils.ts new file mode 100644 index 0000000000000..48206b411374c --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/utils.ts @@ -0,0 +1,324 @@ +import { Maybe } from 'graphql/jsutils/Maybe'; +import { + FieldFormPromptAssociation, + FormAssociation, + FormPrompt, + FormPromptAssociation, + FormType, + ResolvedAuditStamp, + SchemaField, +} from '../../../../../../../types.generated'; +import { SCHEMA_FIELD_PROMPT_TYPES } from '../../../../entityForm/constants'; +import { GenericEntityProperties } from '../../../../types'; + +export function getFormAssociations(entityData: GenericEntityProperties | null) { + return [...(entityData?.forms?.incompleteForms || []), ...(entityData?.forms?.completedForms || [])]; +} + +export function getFormAssociation(formUrn: string, entityData: GenericEntityProperties | null) { + return ( + entityData?.forms?.incompleteForms?.find((formAssoc) => formAssoc.form.urn === formUrn) || + entityData?.forms?.completedForms?.find((formAssoc) => formAssoc.form.urn === formUrn) + ); +} + +/* + * For a given prompt, get all of the completed field prompts. + * Takes in an optional set of prompt IDs, if this exists, only return the + * completed field prompts for this prompt if this prompt ID is in the set. + */ +function getCompletedFieldPromptsFromPrompt(prompt: FormPromptAssociation, relevantFieldFormPromptIds?: Set) { + if (relevantFieldFormPromptIds && relevantFieldFormPromptIds.has(prompt.id)) { + return prompt.fieldAssociations?.completedFieldPrompts || []; + } + if (!relevantFieldFormPromptIds) { + return prompt.fieldAssociations?.completedFieldPrompts || []; + } + return []; +} + +/* + * For a given form, loop over all prompts and retrieve completedFieldPrompts from each. + * Note: we pass in an optional set of prompt IDs to choose from in order to get completed + * field prompts for a certain set of entity prompts id we choose. + */ +export function getCompletedFieldPromptsFromForm( + formAssociation: FormAssociation, + relevantFieldFormPromptIds?: Set, +) { + let completedFieldPromptAssociations: FieldFormPromptAssociation[] = []; + formAssociation.completedPrompts?.forEach((completedPrompt) => { + completedFieldPromptAssociations = completedFieldPromptAssociations.concat( + getCompletedFieldPromptsFromPrompt(completedPrompt, relevantFieldFormPromptIds), + ); + }); + formAssociation.incompletePrompts?.forEach((incompletPrompt) => { + completedFieldPromptAssociations = completedFieldPromptAssociations.concat( + getCompletedFieldPromptsFromPrompt(incompletPrompt, relevantFieldFormPromptIds), + ); + }); + return completedFieldPromptAssociations; +} + +/* + * Gets a list of the completed field prompt associations which live as children to top + * level prompt associations for each schema field. + * We need to loop over every prompt both completed and incomplete, form complete and incomplete forms. + * For each prompt, get their list of completedFieldPrompts + * Takes in an optional list of relevant prompt IDs to filter results down. + */ +export function getCompletedFieldPromptAssociations( + entityData: GenericEntityProperties | null, + relevantFieldFormPromptIds?: Set, +) { + let completedFieldPromptAssociations: FieldFormPromptAssociation[] = []; + + const forms = entityData?.forms; + forms?.completedForms?.forEach((formAssociation) => { + completedFieldPromptAssociations = completedFieldPromptAssociations.concat( + getCompletedFieldPromptsFromForm(formAssociation, relevantFieldFormPromptIds), + ); + }); + forms?.incompleteForms?.forEach((formAssociation) => { + completedFieldPromptAssociations = completedFieldPromptAssociations.concat( + getCompletedFieldPromptsFromForm(formAssociation, relevantFieldFormPromptIds), + ); + }); + + return completedFieldPromptAssociations; +} + +/* + * For a given form, gets a list of the completed field prompt associations which live + * as children to top level prompt associations for each schema field. + */ +export function getCompletedFieldPromptAssociationsForForm( + formUrn: string, + entityData: GenericEntityProperties | null, + relevantFieldFormPromptIds?: Set, +) { + const formAssociation = getFormAssociation(formUrn, entityData); + return formAssociation ? getCompletedFieldPromptsFromForm(formAssociation, relevantFieldFormPromptIds) : []; +} + +export function getNumPromptsCompletedForField( + fieldPath: string, + entityData: GenericEntityProperties | null, + formUrn: string, +) { + const completedFieldPromptAssociations = getCompletedFieldPromptAssociationsForForm(formUrn, entityData); + return completedFieldPromptAssociations.filter((association) => association.fieldPath === fieldPath).length; +} + +/* + * Returns the number of schema field prompts not completed yet. + * The total number of schema field prompts equals the top level number of field prompts + * on the form multiplied by the number of schema fields. + * Optionally takes in a formUrn to look at one specific form or all forms + */ +export function getNumSchemaFieldPromptsRemaining( + entityData: GenericEntityProperties | null, + fieldFormPrompts: FormPrompt[], + numSchemaFields: number, + formUrn?: string, +) { + const numFieldPromptsAvailable = numSchemaFields * fieldFormPrompts.length; + // we pass in either required or optional fieldFormPrompts that we care about in this method, need to check against these + const relevantFieldFormPromptIds = new Set(fieldFormPrompts.map((prompt) => prompt.id)); + const completedFieldPromptAssociations = formUrn + ? getCompletedFieldPromptAssociationsForForm(formUrn, entityData, relevantFieldFormPromptIds) + : getCompletedFieldPromptAssociations(entityData, relevantFieldFormPromptIds); + + return numFieldPromptsAvailable - completedFieldPromptAssociations.length; +} + +// Get completed prompts from both complete and incomplete forms for this entity +export function getCompletedPrompts(entityData: GenericEntityProperties | null) { + const forms = entityData?.forms; + let completedPrompts = + forms?.incompleteForms?.flatMap((form) => (form.completedPrompts ? form.completedPrompts : [])) || []; + completedPrompts = completedPrompts.concat( + forms?.completedForms?.flatMap((form) => (form.completedPrompts ? form.completedPrompts : [])) || [], + ); + console.log('entityData', entityData); + console.log('getCompletedPrompts', completedPrompts); + return completedPrompts; +} + +// Get incomplete prompts from both complete and incomplete forms for this entity +export function getIncompletePrompts(entityData: GenericEntityProperties | null) { + const forms = entityData?.forms; + let incompletePrompts = + forms?.incompleteForms?.flatMap((form) => (form.incompletePrompts ? form.incompletePrompts : [])) || []; + incompletePrompts = incompletePrompts.concat( + forms?.completedForms?.flatMap((form) => (form.incompletePrompts ? form.incompletePrompts : [])) || [], + ); + return incompletePrompts; +} + +export function isPromptComplete(prompt: FormPrompt, completedPrompts: FormPromptAssociation[]) { + return !!completedPrompts.find((completedPrompt) => completedPrompt.id === prompt.id); +} + +export function findCompletedFieldPrompt(fieldPrompt?: SchemaField, promptAssociation?: FormPromptAssociation) { + return promptAssociation?.fieldAssociations?.completedFieldPrompts?.find( + (fieldPath) => fieldPath.fieldPath === fieldPrompt?.fieldPath, + ); +} + +export function isFieldPromptComplete(fieldPrompt, promptAssociation) { + return !!findCompletedFieldPrompt(fieldPrompt, promptAssociation); +} + +// For every prompt provided, check if it's in our list of completed prompts and return number prompts not completed +export function getNumEntityPromptsRemaining(entityPrompts: FormPrompt[], entityData: GenericEntityProperties | null) { + const completedPrompts = getCompletedPrompts(entityData); + let numPromptsRemaining = 0; + + entityPrompts.forEach((prompt) => { + if (prompt && !isPromptComplete(prompt, completedPrompts)) { + numPromptsRemaining += 1; + } + }); + + return numPromptsRemaining; +} + +// Get prompts from both complete and incomplete forms +export function getAllPrompts(entityData: GenericEntityProperties | null) { + let prompts = entityData?.forms?.incompleteForms?.flatMap((form) => form.form.info.prompts) || []; + prompts = prompts.concat(entityData?.forms?.completedForms?.flatMap((form) => form.form.info.prompts) || []); + return prompts; +} + +// Find a specific prompt association from both complete and incomplete prompts +export function findPromptAssociation(prompt: FormPrompt, allPrompts: Array) { + return allPrompts?.find((myprompt) => myprompt.id === prompt.id); +} + +// Get the prompts for a given form +export function getPromptsForForm(formUrn: string, entityData: GenericEntityProperties | null) { + const formAssociation = getFormAssociation(formUrn, entityData); + return formAssociation?.form.info.prompts || []; +} + +/* + * Gets information for entity level prompts + */ +export function getEntityPromptsInfo(prompts: FormPrompt[], entityData: GenericEntityProperties | null) { + const entityPrompts = prompts.filter((prompt) => !SCHEMA_FIELD_PROMPT_TYPES.includes(prompt.type)); + const requiredEntityPrompts = entityPrompts.filter((prompt) => prompt.required); + const optionalEntityPrompts = entityPrompts.filter((prompt) => !prompt.required); + + const numRequiredEntityPromptsRemaining = getNumEntityPromptsRemaining(requiredEntityPrompts, entityData); + const numOptionalEntityPromptsRemaining = getNumEntityPromptsRemaining(optionalEntityPrompts, entityData); + + return { + entityPrompts, + numRequiredEntityPromptsRemaining, + numOptionalEntityPromptsRemaining, + requiredEntityPrompts, + }; +} + +/* + * Gets information for schema field level prompts + */ +export function getFieldPromptsInfo( + prompts: FormPrompt[], + entityData: GenericEntityProperties | null, + numSchemaFields: number, + formUrn?: string, +) { + const fieldPrompts = prompts.filter((prompt) => SCHEMA_FIELD_PROMPT_TYPES.includes(prompt.type)); + const requiredFieldPrompts = fieldPrompts.filter((prompt) => prompt.required); + const optionalFieldPrompts = fieldPrompts.filter((prompt) => !prompt.required); + + const numRequiredFieldPromptsRemaining = getNumSchemaFieldPromptsRemaining( + entityData, + requiredFieldPrompts, + numSchemaFields, + formUrn, + ); + const numOptionalFieldPromptsRemaining = getNumSchemaFieldPromptsRemaining( + entityData, + optionalFieldPrompts, + numSchemaFields, + formUrn, + ); + + return { fieldPrompts, numRequiredFieldPromptsRemaining, numOptionalFieldPromptsRemaining, requiredFieldPrompts }; +} + +export function getFormVerification(formUrn: string, entityData: GenericEntityProperties | null) { + return entityData?.forms?.verifications?.find((verification) => verification.form.urn === formUrn); +} + +export function getVerificationForms(entityData: GenericEntityProperties | null) { + const formAssociations = getFormAssociations(entityData); + return formAssociations.filter((formAssociation) => formAssociation.form.info.type === FormType.Verification); +} + +export function areAllFormsVerified(formAssociations: FormAssociation[], entityData: GenericEntityProperties | null) { + return formAssociations.every((formAssociation) => !!getFormVerification(formAssociation.form.urn, entityData)); +} + +/* + * If a form urn is supplied, return true if that form is verified. + * If no form is supplied, return true if all verification type forms are verified. + * If there are no verification type forms or any are missing verification, return false. + */ +export function isVerificationComplete(entityData: GenericEntityProperties | null, formUrn?: string) { + if (formUrn) { + return !!getFormVerification(formUrn, entityData); + } + const verificationForms = getVerificationForms(entityData); + if (verificationForms.length) { + return areAllFormsVerified(verificationForms, entityData); + } + return false; +} + +export function isFormVerificationType(entityData: GenericEntityProperties | null, formUrn: string) { + const formAssociation = getFormAssociation(formUrn, entityData); + return formAssociation?.form.info.type === FormType.Verification; +} + +/* + * If given a single form we should show verification copy and styles if it is of type verification. + * If no formUrn is supplied, return true if any of our multiple forms are of type verification. + */ +export function shouldShowVerificationInfo(entityData: GenericEntityProperties | null, formUrn?: string) { + if (formUrn) { + return isFormVerificationType(entityData, formUrn); + } + return getVerificationForms(entityData).length > 0; +} + +function getMostRecentVerificationAuditStamp(entityData: GenericEntityProperties | null) { + let mostRecentTimestamp: Maybe = null; + entityData?.forms?.verifications?.forEach((verification) => { + if (mostRecentTimestamp === null || (verification.lastModified?.time || 0) > (mostRecentTimestamp?.time || 0)) { + mostRecentTimestamp = verification.lastModified; + } + }); + return mostRecentTimestamp; +} + +/* + * If given one form, return the verification lastModified for it. Otherwise, find the most + * recently completed verification time stamp from any of the forms on this entity + */ +export function getVerificationAuditStamp(entityData: GenericEntityProperties | null, formUrn?: string) { + if (formUrn) { + return getFormVerification(formUrn, entityData)?.lastModified || null; + } + return getMostRecentVerificationAuditStamp(entityData); +} + +export function getBulkByQuestionPrompts(formUrn: string, entityData: GenericEntityProperties | null) { + const formAssociation = getFormAssociation(formUrn, entityData); + return ( + formAssociation?.form.info.prompts.filter((prompt) => !SCHEMA_FIELD_PROMPT_TYPES.includes(prompt.type)) || [] + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/EntityForm.tsx b/datahub-web-react/src/app/entity/shared/entityForm/EntityForm.tsx new file mode 100644 index 0000000000000..136bbabb61e0b --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/EntityForm.tsx @@ -0,0 +1,15 @@ +import React from 'react'; +import FormByEntity from './FormByEntity'; +import { FormView, useEntityFormContext } from './EntityFormContext'; + +interface Props { + formUrn: string; +} + +export default function EntityForm({ formUrn }: Props) { + const { formView } = useEntityFormContext(); + + if (formView === FormView.BY_ENTITY) return ; + + return null; +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/EntityFormContext.tsx b/datahub-web-react/src/app/entity/shared/entityForm/EntityFormContext.tsx new file mode 100644 index 0000000000000..aa5e1e3c4a8fe --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/EntityFormContext.tsx @@ -0,0 +1,54 @@ +import React, { useContext } from 'react'; +import { Entity } from '../../../../types.generated'; +import { EntityAndType, GenericEntityProperties } from '../types'; + +export enum FormView { + BY_ENTITY, +} + +export type EntityFormContextType = { + formUrn: string; + isInFormContext: boolean; + entityData: GenericEntityProperties | undefined; + loading: boolean; + selectedEntity: Entity | undefined; + selectedPromptId: string | null; + formView: FormView; + selectedEntities: EntityAndType[]; + setSelectedEntities: (entities: EntityAndType[]) => void; + setFormView: (formView: FormView) => void; + refetch: () => Promise; + setSelectedEntity: (sortOption: Entity) => void; + setSelectedPromptId: (promptId: string) => void; + shouldRefetchSearchResults: boolean; + setShouldRefetchSearchResults: (shouldRefetch: boolean) => void; + isVerificationType: boolean; +}; + +export const DEFAULT_CONTEXT = { + formUrn: '', + isInFormContext: false, + entityData: undefined, + loading: false, + refetch: () => Promise.resolve({}), + selectedEntity: undefined, + setSelectedEntity: (_: Entity) => null, + selectedEntities: [], + setSelectedEntities: (_: EntityAndType[]) => null, + formView: FormView.BY_ENTITY, + setFormView: (_: FormView) => null, + selectedPromptId: null, + setSelectedPromptId: (_: string) => null, + shouldRefetchSearchResults: false, + setShouldRefetchSearchResults: () => null, + isVerificationType: true, +}; + +export const EntityFormContext = React.createContext(DEFAULT_CONTEXT); + +export function useEntityFormContext() { + const context = useContext(EntityFormContext); + if (context === null) + throw new Error(`${useEntityFormContext.name} must be used under a EntityFormContextProvider`); + return context; +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/EntityFormContextProvider.tsx b/datahub-web-react/src/app/entity/shared/entityForm/EntityFormContextProvider.tsx new file mode 100644 index 0000000000000..41d7fcf4bd899 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/EntityFormContextProvider.tsx @@ -0,0 +1,78 @@ +import React, { useEffect, useState } from 'react'; +import { EntityFormContext, FormView } from './EntityFormContext'; +import { useEntityContext } from '../EntityContext'; +import { Entity } from '../../../../types.generated'; +import { useGetDatasetQuery } from '../../../../graphql/dataset.generated'; +import { EntityAndType, GenericEntityProperties } from '../types'; +import { getFormAssociation, isFormVerificationType } from '../containers/profile/sidebar/FormInfo/utils'; +import usePrevious from '../../../shared/usePrevious'; +import { SCHEMA_FIELD_PROMPT_TYPES } from './constants'; + +interface Props { + children: React.ReactNode; + formUrn: string; +} + +export default function EntityFormContextProvider({ children, formUrn }: Props) { + const { entityData, refetch: refetchEntityProfile, loading: profileLoading } = useEntityContext(); + const formAssociation = getFormAssociation(formUrn, entityData); + const initialPromptId = + formAssociation?.form.info.prompts.filter((prompt) => !SCHEMA_FIELD_PROMPT_TYPES.includes(prompt.type))[0] + ?.id || null; + const isVerificationType = isFormVerificationType(entityData, formUrn); + const [formView, setFormView] = useState(FormView.BY_ENTITY); + const [selectedEntity, setSelectedEntity] = useState(entityData as Entity); + const [selectedPromptId, setSelectedPromptId] = useState(initialPromptId); + const [selectedEntities, setSelectedEntities] = useState([]); + const [shouldRefetchSearchResults, setShouldRefetchSearchResults] = useState(false); + + useEffect(() => { + if (!selectedPromptId && formAssociation) { + setSelectedPromptId(initialPromptId); + } + }, [selectedPromptId, formAssociation, initialPromptId]); + + const previousFormUrn = usePrevious(formUrn); + useEffect(() => { + if (formUrn && previousFormUrn !== formUrn) { + setFormView(FormView.BY_ENTITY); + setSelectedPromptId(initialPromptId); + } + }, [formUrn, previousFormUrn, initialPromptId]); + + const { + data: fetchedData, + refetch, + loading, + } = useGetDatasetQuery({ + variables: { urn: selectedEntity.urn }, + }); + + const isOnEntityProfilePage = selectedEntity.urn === entityData?.urn; + const selectedEntityData = isOnEntityProfilePage ? entityData : (fetchedData?.dataset as GenericEntityProperties); + + return ( + + {children} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/EntityFormModal.tsx b/datahub-web-react/src/app/entity/shared/entityForm/EntityFormModal.tsx new file mode 100644 index 0000000000000..47026472c43f9 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/EntityFormModal.tsx @@ -0,0 +1,59 @@ +import { CloseOutlined } from '@ant-design/icons'; +import React from 'react'; +import { Modal } from 'antd'; +import styled from 'styled-components'; +import EntityForm from './EntityForm'; +import FormPageHeader from './FormHeader/FormPageHeader'; +import EntityFormContextProvider from './EntityFormContextProvider'; + +const StyledModal = styled(Modal)` + &&& .ant-modal-content { + display: flex; + flex-direction: column; + height: calc(100vh); + } + + .ant-modal-header { + padding: 0; + } + + .ant-modal-body { + flex: 1; + max-height: 100%; + overflow: hidden; + padding: 0; + display: flex; + } +`; + +const StyledClose = styled(CloseOutlined)` + && { + color: white; + font-size: 24px; + margin: 18px 12px 0 0; + } +`; + +interface Props { + selectedFormUrn: string | null; + isFormVisible: boolean; + hideFormModal: () => void; +} + +export default function EntityFormModal({ selectedFormUrn, isFormVisible, hideFormModal }: Props) { + return ( + + } + closeIcon={} + style={{ top: 0, height: '100vh', minWidth: '100vw' }} + destroyOnClose + > + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/Form.tsx b/datahub-web-react/src/app/entity/shared/entityForm/Form.tsx new file mode 100644 index 0000000000000..bf1b093d98434 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/Form.tsx @@ -0,0 +1,100 @@ +import React from 'react'; +import styled from 'styled-components'; +import { useEntityData } from '../EntityContext'; +import { FormPrompt } from '../../../../types.generated'; +import Prompt, { PromptWrapper } from './prompts/Prompt'; +import { ANTD_GRAY_V2 } from '../constants'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import { PromptSubTitle } from './prompts/StructuredPropertyPrompt/StructuredPropertyPrompt'; +import SchemaFieldPrompts from './schemaFieldPrompts/SchemaFieldPrompts'; +import useGetPromptInfo from '../containers/profile/sidebar/FormInfo/useGetPromptInfo'; +import VerificationPrompt from './prompts/VerificationPrompt'; +import useShouldShowVerificationPrompt from './useShouldShowVerificationPrompt'; +import { getFormAssociation } from '../containers/profile/sidebar/FormInfo/utils'; +import FormRequestedBy from './FormSelectionModal/FormRequestedBy'; +import useHasComponentRendered from '../../../shared/useHasComponentRendered'; +import Loading from '../../../shared/Loading'; +import { DeferredRenderComponent } from '../../../shared/DeferredRenderComponent'; +import { OnboardingTour } from '../../../onboarding/OnboardingTour'; +import { FORM_ASSET_COMPLETION } from '../../../onboarding/config/FormOnboardingConfig'; + +const TabWrapper = styled.div` + background-color: ${ANTD_GRAY_V2[1]}; + overflow: auto; + padding: 24px; + flex: 1; + max-height: 100%; +`; + +const IntroTitle = styled.div` + font-size: 20px; + font-weight: 600; +`; + +const HeaderWrapper = styled(PromptWrapper)``; + +const SubTitle = styled(PromptSubTitle)` + margin-top: 16px; +`; + +const RequestedByWrapper = styled(PromptSubTitle)` + color: ${ANTD_GRAY_V2[8]}; +`; + +interface Props { + formUrn: string; +} + +function Form({ formUrn }: Props) { + const entityRegistry = useEntityRegistry(); + const { entityType, entityData } = useEntityData(); + const { entityPrompts, fieldPrompts } = useGetPromptInfo(formUrn); + const shouldShowVerificationPrompt = useShouldShowVerificationPrompt(formUrn); + const { hasRendered } = useHasComponentRendered(); + + if (!hasRendered) return ; + + const formAssociation = getFormAssociation(formUrn, entityData); + const title = formAssociation?.form.info.name; + const associatedUrn = formAssociation?.associatedUrn; + const description = formAssociation?.form.info.description; + const owners = formAssociation?.form.ownership?.owners; + + return ( + + + + + {title ? <>{title} : <>{entityRegistry.getEntityName(entityType)} Requirements} + + {owners && owners.length > 0 && ( + + + + )} + {description ? ( + {description} + ) : ( + + Please fill out the following information for this {entityRegistry.getEntityName(entityType)} so + that we can keep track of the status of the asset + + )} + + {entityPrompts?.map((prompt, index) => ( + + ))} + {fieldPrompts.length > 0 && } + {shouldShowVerificationPrompt && } + + ); +} + +export default function FormContainer({ formUrn }: Props) { + return } />; +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/FormByEntity.tsx b/datahub-web-react/src/app/entity/shared/entityForm/FormByEntity.tsx new file mode 100644 index 0000000000000..23550e8fcca5f --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/FormByEntity.tsx @@ -0,0 +1,71 @@ +import React from 'react'; +import styled from 'styled-components'; +import Form from './Form'; +import { ANTD_GRAY_V2 } from '../constants'; +import ProfileSidebar from '../containers/profile/sidebar/ProfileSidebar'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import EntityContext, { useEntityContext } from '../EntityContext'; +import EntityInfo from '../containers/profile/sidebar/EntityInfo/EntityInfo'; +import { useEntityFormContext } from './EntityFormContext'; +import ProgressBar from './ProgressBar'; +import { OnboardingTour } from '../../../onboarding/OnboardingTour'; +import { + FORM_QUESTION_VIEW_BUTTON, + WELCOME_TO_BULK_BY_ENTITY_ID, +} from '../../../onboarding/config/FormOnboardingConfig'; + +const ContentWrapper = styled.div` + background-color: ${ANTD_GRAY_V2[1]}; + max-height: 100%; + display: flex; + flex-direction: column; + width: 100%; + flex: 1; +`; + +const FlexWrapper = styled.div` + display: flex; + max-height: 100%; + overflow: auto; + width: 100%; +`; + +interface Props { + formUrn: string; +} + +export default function FormByEntity({ formUrn }: Props) { + const { selectedEntity, entityData: selectedEntityData, refetch, loading } = useEntityFormContext(); + const { entityType } = useEntityContext(); + const entityRegistry = useEntityRegistry(); + const sidebarSections = entityRegistry.getSidebarSections(selectedEntity?.type || entityType); + + return ( + {}, + refetch, + }} + > + + + + + }} + backgroundColor="white" + alignLeft + /> + + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/FormHeader/FormPageHeader.tsx b/datahub-web-react/src/app/entity/shared/entityForm/FormHeader/FormPageHeader.tsx new file mode 100644 index 0000000000000..4baa762fcf88d --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/FormHeader/FormPageHeader.tsx @@ -0,0 +1,40 @@ +import React from 'react'; +import styled from 'styled-components'; +import AppLogoLink from '../../../../shared/AppLogoLink'; + +const Header = styled.div` + padding: 12px 24px; + background-color: black; + font-size: 24px; + display: flex; + align-items: center; + color: white; + justify-content: space-between; +`; + +const HeaderText = styled.div` + margin-left: 24px; +`; + +const StyledDivider = styled.div` + display: flex; + flex-direction: column; +`; + +const TitleWrapper = styled.div` + display: flex; + align-items: center; +`; + +export default function FormPageHeader() { + return ( + +
    + + + Complete Documentation Requests + +
    +
    + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/FormHeader/components.ts b/datahub-web-react/src/app/entity/shared/entityForm/FormHeader/components.ts new file mode 100644 index 0000000000000..c94dc0ef63300 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/FormHeader/components.ts @@ -0,0 +1,36 @@ +import { ArrowLeftOutlined, ArrowRightOutlined } from '@ant-design/icons'; +import styled from 'styled-components'; +import { ANTD_GRAY_V2 } from '../../constants'; +import BackgroundDots from '../../../../../images/background_dots.svg'; + +export const BulkNavigationWrapper = styled.div<{ $hideBackground?: boolean }>` + padding: 16px 68px 16px 24px; + background-color: ${ANTD_GRAY_V2[10]}; + display: flex; + justify-content: flex-end; + ${(props) => + !props.$hideBackground && + ` + background-image: url(${BackgroundDots}); + background-position: right; + background-repeat: no-repeat; + `} +`; + +export const NavigationWrapper = styled.div<{ isHidden: boolean }>` + font-size: 20px; + color: white; + display: flex; + flex-wrap: nowrap; + ${(props) => props.isHidden && 'opacity: 0;'} +`; + +export const ArrowLeft = styled(ArrowLeftOutlined)` + margin-right: 24px; + cursor: pointer; +`; + +export const ArrowRight = styled(ArrowRightOutlined)` + margin-left: 24px; + cursor: pointer; +`; diff --git a/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormItem.tsx b/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormItem.tsx new file mode 100644 index 0000000000000..c23fd39d8a10f --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormItem.tsx @@ -0,0 +1,101 @@ +import { Tooltip } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { FormAssociation } from '../../../../../types.generated'; +import useGetPromptInfo from '../../containers/profile/sidebar/FormInfo/useGetPromptInfo'; +import { useEntityData } from '../../EntityContext'; +import { isVerificationComplete, shouldShowVerificationInfo } from '../../containers/profile/sidebar/FormInfo/utils'; +import { pluralize } from '../../../../shared/textUtil'; +import { WhiteButton } from '../../../../shared/components'; +import { ANTD_GRAY_V2 } from '../../constants'; +import useIsUserAssigned from '../../containers/profile/sidebar/FormInfo/useIsUserAssigned'; +import FormRequestedBy from './FormRequestedBy'; + +const FormItemWrapper = styled.div` + display: flex; + padding: 16px; + justify-content: space-between; +`; + +const FormName = styled.div` + font-size: 16px; + font-weight: 600; + margin-bottom: 4px; +`; + +const FormAssigner = styled.div` + font-size: 14px; + color: #373d44; + margin-top: -4px; + margin-bottom: 4px; +`; + +const OptionalText = styled.div` + color: ${ANTD_GRAY_V2[8]}; + font-weight: normal; +`; + +const CompleteWrapper = styled.div` + display: flex; + align-items: center; +`; + +const FormInfoWrapper = styled.div` + font-size: 12px; + color: #373d44; + font-weight: 600; +`; + +interface Props { + formAssociation: FormAssociation; + selectFormUrn: (urn: string) => void; +} + +export default function FormItem({ formAssociation, selectFormUrn }: Props) { + const { entityData } = useEntityData(); + const { form } = formAssociation; + const { numRequiredPromptsRemaining, numOptionalPromptsRemaining } = useGetPromptInfo(form.urn); + const allRequiredPromptsAreComplete = numRequiredPromptsRemaining === 0; + const showVerificationInfo = shouldShowVerificationInfo(entityData, form.urn); + const isComplete = showVerificationInfo + ? isVerificationComplete(entityData, form.urn) + : allRequiredPromptsAreComplete; + const isUserAssigned = useIsUserAssigned(form.urn); + const owners = form.ownership?.owners; + + return ( + +
    + {form.info.name} + {owners && owners.length > 0 && ( + + + + )} + + {isComplete && ( + {showVerificationInfo ? <>Verified : <>Complete} + )} + {!isComplete && ( +
    + {numRequiredPromptsRemaining} required {pluralize(numRequiredPromptsRemaining, 'response')}{' '} + remaining +
    + )} + {numOptionalPromptsRemaining > 0 && ( + + {numOptionalPromptsRemaining} optional {pluralize(numOptionalPromptsRemaining, 'response')}{' '} + remaining + + )} +
    +
    + + selectFormUrn(form.urn)} disabled={!isUserAssigned}> + {isComplete && 'View'} + {!isComplete && <>{showVerificationInfo ? 'Verify' : 'Complete'}} + + +
    + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormRequestedBy.tsx b/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormRequestedBy.tsx new file mode 100644 index 0000000000000..fa4834b5a4f85 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormRequestedBy.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import { Owner } from '../../../../../types.generated'; +import { useEntityRegistry } from '../../../../useEntityRegistry'; + +interface Props { + owners: Owner[]; +} + +export default function FormRequestedBy({ owners }: Props) { + const entityRegistry = useEntityRegistry(); + + return ( + <> + Requested by:{' '} + {owners.map((ownerAssoc, index) => ( + <> + {owners.length > 1 && index === owners.length - 1 && 'and '} + {entityRegistry.getDisplayName(ownerAssoc.owner.type, ownerAssoc.owner)} + {owners.length > 1 && index !== owners.length - 1 && ', '} + + ))} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormSelectionModal.tsx b/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormSelectionModal.tsx new file mode 100644 index 0000000000000..17452b30f6c11 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormSelectionModal.tsx @@ -0,0 +1,21 @@ +import { Modal } from 'antd'; +import React from 'react'; +import FormSelector from './FormSelector'; + +interface Props { + isFormSelectionModalVisible: boolean; + hideFormSelectionModal: () => void; + selectFormUrn: (urn: string) => void; +} + +export default function FormSelectionModal({ + isFormSelectionModalVisible, + hideFormSelectionModal, + selectFormUrn, +}: Props) { + return ( + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormSelector.tsx b/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormSelector.tsx new file mode 100644 index 0000000000000..6c9d593b38f0e --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/FormSelectionModal/FormSelector.tsx @@ -0,0 +1,48 @@ +import { Divider } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { useEntityData } from '../../EntityContext'; +import { getFormAssociations } from '../../containers/profile/sidebar/FormInfo/utils'; +import FormItem from './FormItem'; + +const FormSelectorWrapper = styled.div` + font-size: 14px; +`; + +const HeaderText = styled.div` + font-size: 16px; + font-weight: 600; + margin-bottom: 8px; +`; + +const Subheader = styled.div` + margin-bottom: 8px; +`; + +const StyledDivider = styled(Divider)` + margin: 8px 0; +`; + +interface Props { + selectFormUrn: (urn: string) => void; +} + +export default function FormSelector({ selectFormUrn }: Props) { + const { entityData } = useEntityData(); + const formAssociations = getFormAssociations(entityData); + + return ( + + Choose Which Form to View + + There are multiple open requests for this entity. Choose which one you’d like to view or complete. + + {formAssociations.map((formAssociation, index) => ( +
    + + {index !== formAssociations.length - 1 && } +
    + ))} +
    + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/ProgressBar.tsx b/datahub-web-react/src/app/entity/shared/entityForm/ProgressBar.tsx new file mode 100644 index 0000000000000..a4473fc825e90 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/ProgressBar.tsx @@ -0,0 +1,38 @@ +import { Progress } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import useGetPromptInfo from '../containers/profile/sidebar/FormInfo/useGetPromptInfo'; +import { ANTD_GRAY } from '../constants'; + +const StyledProgress = styled(Progress)` + &&& .ant-progress-outer { + display: flex; + flex-direction: column; + align-items: flex-start; + gap: 8px; + align-self: stretch; + } + + .ant-progress-bg { + height: 4px !important; + } +`; + +interface Props { + formUrn: string; +} +export default function ProgressBar({ formUrn }: Props) { + const { totalRequiredSchemaFieldPrompts, numRequiredPromptsRemaining, requiredEntityPrompts } = + useGetPromptInfo(formUrn); + const totalRequiredPrompts = requiredEntityPrompts.length + totalRequiredSchemaFieldPrompts; + const percent = ((totalRequiredPrompts - numRequiredPromptsRemaining) / totalRequiredPrompts) * 100; + + return ( + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/__tests__/Form.test.tsx b/datahub-web-react/src/app/entity/shared/entityForm/__tests__/Form.test.tsx new file mode 100644 index 0000000000000..d1e458d37bd5e --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/__tests__/Form.test.tsx @@ -0,0 +1,62 @@ +import { MockedProvider } from '@apollo/client/testing'; +import React from 'react'; +import { render, waitFor } from '@testing-library/react'; +import EntityContext from '../../EntityContext'; +import { mockEntityDataWithFieldPrompts, mockEntityData } from '../mocks'; +import { EntityType } from '../../../../../types.generated'; +import Form from '../Form'; +import TestPageContainer from '../../../../../utils/test-utils/TestPageContainer'; +import { mocks } from '../../../../../Mocks'; + +beforeEach(() => { + // IntersectionObserver isn't available in test environment + const mockIntersectionObserver = vi.fn(); + mockIntersectionObserver.mockReturnValue({ + observe: () => null, + unobserve: () => null, + disconnect: () => null, + }); + window.IntersectionObserver = mockIntersectionObserver; +}); + +describe('Form', () => { + it('should show field-level header if there are schema field prompts', async () => { + const { getByTestId, findByTestId } = render( + + + + + + + , + ); + // DeferredRenderComponent defers rendering for a short period, wait for that + await waitFor(() => findByTestId('field-level-requirements')); + expect(getByTestId('field-level-requirements')).toBeInTheDocument(); + }); + + it('should not show field-level header if there are no schema field prompts', () => { + const { queryByTestId } = render( + + + + + + + , + ); + expect(queryByTestId('field-level-requirements')).not.toBeInTheDocument(); + }); +}); diff --git a/datahub-web-react/src/app/entity/shared/entityForm/__tests__/useShouldShowVerificationPrompt.test.ts b/datahub-web-react/src/app/entity/shared/entityForm/__tests__/useShouldShowVerificationPrompt.test.ts new file mode 100644 index 0000000000000..93413fcf63494 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/__tests__/useShouldShowVerificationPrompt.test.ts @@ -0,0 +1,48 @@ +import { FormType, FormVerificationAssociation } from '../../../../../types.generated'; +import { shouldShowVerificationPrompt } from '../useShouldShowVerificationPrompt'; + +describe('shouldShowVerificationPrompt', () => { + const formVerification = { + form: { urn: 'urn:li:form:1' }, + lastModified: { time: 100 }, + } as FormVerificationAssociation; + + it('should return true if the form is verification, there are no prompts remaining, and no verification', () => { + const shouldShow = shouldShowVerificationPrompt({ + formType: FormType.Verification, + numRequiredPromptsRemaining: 0, + }); + + expect(shouldShow).toBe(true); + }); + + it('should return false if the form was verified', () => { + const shouldShow = shouldShowVerificationPrompt({ + formType: FormType.Verification, + numRequiredPromptsRemaining: 0, + formVerification, + }); + + expect(shouldShow).toBe(false); + }); + + it('should return false if the form is not of type verification', () => { + const shouldShow = shouldShowVerificationPrompt({ + formType: FormType.Completion, + numRequiredPromptsRemaining: 0, + formVerification, + }); + + expect(shouldShow).toBe(false); + }); + + it('should return false if the form has prompts remaining', () => { + const shouldShow = shouldShowVerificationPrompt({ + formType: FormType.Verification, + numRequiredPromptsRemaining: 1, + formVerification, + }); + + expect(shouldShow).toBe(false); + }); +}); diff --git a/datahub-web-react/src/app/entity/shared/entityForm/constants.ts b/datahub-web-react/src/app/entity/shared/entityForm/constants.ts new file mode 100644 index 0000000000000..fb62ab5de7323 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/constants.ts @@ -0,0 +1,3 @@ +import { FormPromptType } from '../../../../types.generated'; + +export const SCHEMA_FIELD_PROMPT_TYPES = [FormPromptType.FieldsStructuredProperty]; diff --git a/datahub-web-react/src/app/entity/shared/entityForm/mocks.ts b/datahub-web-react/src/app/entity/shared/entityForm/mocks.ts new file mode 100644 index 0000000000000..b29848e4b119b --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/mocks.ts @@ -0,0 +1,221 @@ +import { EntityType, FormPromptType, FormType } from '../../../../types.generated'; +import { GenericEntityProperties } from '../types'; + +const form1 = { + urn: 'urn:li:form:1', + type: EntityType.Form, + info: { + name: '', + type: FormType.Verification, + prompts: [ + { + id: '1', + type: FormPromptType.FieldsStructuredProperty, + title: '', + formUrn: 'urn:li:form:1', + required: false, + }, + { + id: '2', + type: FormPromptType.FieldsStructuredProperty, + title: '', + formUrn: 'urn:li:form:1', + required: false, + }, + ], + actors: { + owners: true, + users: null, + groups: null, + isAssignedToMe: false, + }, + }, +}; + +const form2 = { + urn: 'urn:li:form:2', + type: EntityType.Form, + info: { + name: '', + prompts: [ + { + id: '3', + type: FormPromptType.StructuredProperty, + title: '', + formUrn: 'urn:li:form:2', + required: true, + }, + { + id: '4', + type: FormPromptType.FieldsStructuredProperty, + title: '', + formUrn: 'urn:li:form:2', + required: false, + }, + { + id: '5', + type: FormPromptType.StructuredProperty, + title: '', + formUrn: 'urn:li:form:2', + required: false, + }, + ], + type: FormType.Verification, + actors: { + owners: false, + users: null, + groups: null, + isAssignedToMe: true, + }, + }, +}; + +export const mockEntityData = { + schemaMetadata: { fields: [{ fieldPath: 'test' }] }, + forms: { + verifications: [ + { + form: form2, + lastModified: { + actor: { + urn: 'urn:li:corpuser:test', + }, + time: 100, + }, + }, + { + form: form2, + lastModified: { + actor: { + urn: 'urn:li:corpuser:test', + }, + time: 101, + }, + }, + ], + incompleteForms: [ + { + completedPrompts: [ + { + id: '1', + lastModified: { time: 123 }, + fieldAssociations: { + completedFieldPrompts: [ + { fieldPath: 'test3', lastModified: { time: 123 } }, + { fieldPath: 'test4', lastModified: { time: 123 } }, + ], + }, + }, + ], + incompletePrompts: [ + { + id: '2', + lastModified: { time: 1234 }, + fieldAssociations: { + completedFieldPrompts: [ + { fieldPath: 'test1', lastModified: { time: 123 } }, + { fieldPath: 'test2', lastModified: { time: 123 } }, + { fieldPath: 'test3', lastModified: { time: 123 } }, + ], + }, + }, + ], + associatedUrn: '', + form: form1, + }, + ], + completedForms: [ + { + completedPrompts: [{ id: '3', lastModified: { time: 1234 } }], + incompletePrompts: [ + { id: '4', lastModified: { time: 123 } }, + { id: '5', lastModified: { time: 123 } }, + ], + associatedUrn: '', + form: form2, + }, + { + completedPrompts: [{ id: '6', lastModified: { time: 1234 } }], + associatedUrn: '', + form: { + urn: 'urn:li:form:3', + type: EntityType.Form, + info: { + name: '', + prompts: [ + { + id: '6', + type: FormPromptType.StructuredProperty, + title: '', + formUrn: 'urn:li:form:3', + required: true, + }, + ], + type: FormType.Completion, + actors: { + owners: true, + users: null, + groups: null, + isAssignedToMe: false, + }, + }, + }, + }, + ], + }, +} as GenericEntityProperties; + +export const mockEntityDataAllVerified = { + ...mockEntityData, + forms: { + ...mockEntityData.forms, + verifications: [ + { + form: form2, + lastModified: { + actor: { + urn: 'urn:li:corpuser:test', + }, + time: 100, + }, + }, + { + form: form1, + lastModified: { + actor: { + urn: 'urn:li:corpuser:test', + }, + time: 101, + }, + }, + ], + }, +} as GenericEntityProperties; + +export const mockEntityDataWithFieldPrompts = { + ...mockEntityData, + forms: { + ...mockEntityData.forms, + incompleteForms: [ + { + ...(mockEntityData as any).forms.incompleteForms[0], + form: { + urn: 'urn:li:form:1', + type: EntityType.Form, + info: { + name: '', + prompts: [ + { + id: '1', + type: FormPromptType.FieldsStructuredProperty, + title: '', + formUrn: 'urn:li:form:1', + required: false, + }, + ], + }, + }, + }, + ], + }, +} as GenericEntityProperties; diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/Prompt.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/Prompt.tsx new file mode 100644 index 0000000000000..23512dff59939 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/Prompt.tsx @@ -0,0 +1,65 @@ +import { message } from 'antd'; +import React, { useState } from 'react'; +import styled from 'styled-components'; +import { + FormPrompt as PromptEntity, + FormPromptType, + SubmitFormPromptInput, + SchemaField, +} from '../../../../../types.generated'; +import StructuredPropertyPrompt from './StructuredPropertyPrompt/StructuredPropertyPrompt'; +import { useSubmitFormPromptMutation } from '../../../../../graphql/form.generated'; +import { useMutationUrn } from '../../EntityContext'; + +export const PromptWrapper = styled.div` + background-color: white; + border-radius: 8px; + padding: 24px; + margin-bottom: 8px; +`; + +interface Props { + promptNumber?: number; + prompt: PromptEntity; + field?: SchemaField; + associatedUrn?: string; +} + +export default function Prompt({ promptNumber, prompt, field, associatedUrn }: Props) { + const [optimisticCompletedTimestamp, setOptimisticCompletedTimestamp] = useState(null); + const urn = useMutationUrn(); + const [submitFormPrompt] = useSubmitFormPromptMutation(); + + function submitResponse(input: SubmitFormPromptInput, onSuccess: () => void) { + submitFormPrompt({ variables: { urn: associatedUrn || urn, input } }) + .then(() => { + onSuccess(); + setOptimisticCompletedTimestamp(Date.now()); + }) + .catch(() => { + message.error('Unknown error while submitting form response'); + }); + } + + return ( + + {prompt.type === FormPromptType.StructuredProperty && ( + + )} + {prompt.type === FormPromptType.FieldsStructuredProperty && ( + + )} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/CompletedPromptAuditStamp.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/CompletedPromptAuditStamp.tsx new file mode 100644 index 0000000000000..ff11f0db5f8bc --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/CompletedPromptAuditStamp.tsx @@ -0,0 +1,69 @@ +import { Typography } from 'antd'; +import React from 'react'; +import Icon from '@ant-design/icons'; +import styled from 'styled-components'; +import GreenCircleIcon from '../../../../../../images/greenCircleTwoTone.svg?react'; +import { ANTD_GRAY_V2 } from '../../../constants'; + +const PadIcon = styled.div` + align-items: flex-start; + padding-top: 1px; + padding-right: 2px; +`; + +const CompletedPromptContainer = styled.div` + display: flex; + align-self: end; + max-width: 350px; +`; + +const AuditStamp = styled.div` + color: #373d44; + font-size: 14px; + font-family: Manrope; + font-weight: 600; + line-height: 18px; + overflow: hidden; + white-space: nowrap; + display: flex; +`; + +const AuditStampSubTitle = styled.div` + color: ${ANTD_GRAY_V2[8]}; + font-size: 12px; + font-family: Manrope; + font-weight: 500; + line-height: 16px; + word-wrap: break-word; +`; + +const StyledIcon = styled(Icon)` + font-size: 16px; + margin-right: 4px; +`; + +const AuditWrapper = styled.div` + max-width: 95%; +`; + +interface Props { + completedByName: string; + completedByTime: string; +} + +export default function CompletedPromptAuditStamp({ completedByName, completedByTime }: Props) { + return ( + + + + + + + Completed by  + {completedByName} + + {completedByTime} + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/DateInput.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/DateInput.tsx new file mode 100644 index 0000000000000..23c322ea4c8f2 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/DateInput.tsx @@ -0,0 +1,18 @@ +import { DatePicker } from 'antd'; +import React from 'react'; +import moment, { Moment } from 'moment'; + +interface Props { + selectedValues: any[]; + updateSelectedValues: (values: string[] | number[]) => void; +} + +export default function DateInput({ selectedValues, updateSelectedValues }: Props) { + function updateInput(_: Moment | null, value: string) { + updateSelectedValues([value]); + } + + const currentValue = selectedValues[0] ? moment(selectedValues[0]) : undefined; + + return ; +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/DropdownLabel.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/DropdownLabel.tsx new file mode 100644 index 0000000000000..0e002f8cc6150 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/DropdownLabel.tsx @@ -0,0 +1,34 @@ +import React from 'react'; +import styled from 'styled-components'; + +const StyledValue = styled.div` + font-family: Manrope; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 22px; + color: #373d44; +`; + +const StyledDescription = styled.div` + font-family: Manrope; + font-size: 12px; + font-style: normal; + font-weight: 500; + line-height: 16px; + color: #5e666e; +`; + +interface Props { + value: string | number | null; + description?: string | null; +} + +export default function DropdownLabel({ value, description }: Props) { + return ( + <> + {value} + {description} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultiSelectInput.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultiSelectInput.tsx new file mode 100644 index 0000000000000..606430e68400f --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultiSelectInput.tsx @@ -0,0 +1,82 @@ +import { Checkbox, Select, Tag } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { ANTD_GRAY_V2 } from '../../../constants'; +import { getStructuredPropertyValue } from '../../../utils'; +import ValueDescription from './ValueDescription'; +import { AllowedValue } from '../../../../../../types.generated'; +import DropdownLabel from './DropdownLabel'; + +const StyledCheckbox = styled(Checkbox)` + display: flex; + margin: 0 0 4px 0; + .ant-checkbox-inner { + border-color: ${ANTD_GRAY_V2[8]}; + } + &&& { + margin-left: 0; + } +`; + +const StyleTag = styled(Tag)` + font-family: Manrope; + font-size: 14px; + font-style: normal; + font-weight: 400; +`; + +const DROPDOWN_STYLE = { minWidth: 320, maxWidth: 320, textAlign: 'left' }; + +interface Props { + selectedValues: any[]; + allowedValues: AllowedValue[]; + toggleSelectedValue: (value: string | number) => void; + updateSelectedValues: (values: string[] | number[]) => void; +} + +export default function MultiSelectInput({ + toggleSelectedValue, + updateSelectedValues, + allowedValues, + selectedValues, +}: Props) { + return allowedValues.length > 5 ? ( + selectSingleValue(value)} + optionLabelProp="value" + > + {allowedValues.map((allowedValue) => ( + + + + ))} + + ) : ( + selectSingleValue(e.target.value)}> + {allowedValues.map((allowedValue) => ( + + {getStructuredPropertyValue(allowedValue.value)} + {allowedValue.description && } + + ))} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/StringInput.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/StringInput.tsx new file mode 100644 index 0000000000000..8c69174a35bf3 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/StringInput.tsx @@ -0,0 +1,31 @@ +import { Input } from 'antd'; +import React, { ChangeEvent } from 'react'; +import styled from 'styled-components'; +import { ANTD_GRAY_V2 } from '../../../constants'; +import { PropertyCardinality } from '../../../../../../types.generated'; +import MultipleStringInput from './MultipleStringInput'; + +const StyledInput = styled(Input)` + width: 75%; + min-width: 350px; + max-width: 500px; + border: 1px solid ${ANTD_GRAY_V2[6]}; +`; + +interface Props { + selectedValues: any[]; + cardinality?: PropertyCardinality | null; + updateSelectedValues: (values: string[] | number[]) => void; +} + +export default function StringInput({ selectedValues, cardinality, updateSelectedValues }: Props) { + function updateInput(event: ChangeEvent) { + updateSelectedValues([event.target.value]); + } + + if (cardinality === PropertyCardinality.Multiple) { + return ; + } + + return ; +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/StructuredPropertyPrompt.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/StructuredPropertyPrompt.tsx new file mode 100644 index 0000000000000..d7f2977915681 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/StructuredPropertyPrompt.tsx @@ -0,0 +1,207 @@ +import { Button } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { + EntityType, + FormPrompt, + PropertyCardinality, + SchemaField, + StdDataType, + SubmitFormPromptInput, +} from '../../../../../../types.generated'; +import SingleSelectInput from './SingleSelectInput'; +import MultiSelectInput from './MultiSelectInput'; +import useStructuredPropertyPrompt from './useStructuredPropertyPrompt'; +import StringInput from './StringInput'; +import RichTextInput from './RichTextInput'; +import DateInput from './DateInput'; +import NumberInput from './NumberInput'; +import UrnInput from './UrnInput/UrnInput'; +import { useEntityData } from '../../../EntityContext'; +import { + findCompletedFieldPrompt, + findPromptAssociation, + getCompletedPrompts, + getIncompletePrompts, + isFieldPromptComplete, + isPromptComplete, +} from '../../../containers/profile/sidebar/FormInfo/utils'; +import { useEntityRegistry } from '../../../../../useEntityRegistry'; +import { getTimeFromNow } from '../../../../../shared/time/timeUtils'; +import CompletedPromptAuditStamp from './CompletedPromptAuditStamp'; +import { applyOpacity } from '../../../../../shared/styleUtils'; +import { useUserContext } from '../../../../../context/useUserContext'; + +const PromptWrapper = styled.div<{ displayBulkStyles?: boolean }>` + display: flex; + justify-content: space-between; + height: min-content; + ${(props) => props.displayBulkStyles && `color: white;`} +`; + +const PromptTitle = styled.div<{ displayBulkStyles?: boolean }>` + font-size: 16px; + font-weight: 600; + line-height: 20px; + ${(props) => props.displayBulkStyles && `font-size: 20px;`} +`; + +const RequiredText = styled.span<{ displayBulkStyles?: boolean }>` + font-size: 12px; + margin-left: 4px; + color: #a8071a; + ${(props) => + props.displayBulkStyles && + ` + color: #FFCCC7; + margin-left: 8px; + `} +`; + +export const PromptSubTitle = styled.div` + font-size: 14px; + font-weight: 500; + line-height: 18px; + margin-top: 4px; +`; + +const InputSection = styled.div` + margin-top: 8px; +`; + +const StyledButton = styled(Button)` + align-self: end; + margin-left: 8px; + + &:focus { + box-shadow: 0 0 3px 2px ${(props) => applyOpacity(props.theme.styles['primary-color'] || '', 50)}; + } +`; + +const PromptInputWrapper = styled.div` + flex: 1; +`; + +interface Props { + promptNumber?: number; + prompt: FormPrompt; + submitResponse: (input: SubmitFormPromptInput, onSuccess: () => void) => void; + field?: SchemaField; + optimisticCompletedTimestamp?: number | null; +} + +export default function StructuredPropertyPrompt({ + promptNumber, + prompt, + submitResponse, + field, + optimisticCompletedTimestamp, +}: Props) { + const { + isSaveVisible, + selectedValues, + selectSingleValue, + toggleSelectedValue, + submitStructuredPropertyResponse, + updateSelectedValues, + } = useStructuredPropertyPrompt({ prompt, submitResponse, field }); + const { entityData } = useEntityData(); + const { user } = useUserContext(); + const entityRegistry = useEntityRegistry(); + const completedPrompts = getCompletedPrompts(entityData); + const incompletePrompts = getIncompletePrompts(entityData); + const promptAssociation = findPromptAssociation(prompt, completedPrompts.concat(incompletePrompts)); + const completedFieldPrompt = findCompletedFieldPrompt(field, promptAssociation); + + const structuredProperty = prompt.structuredPropertyParams?.structuredProperty; + if (!structuredProperty) return null; + + const { displayName, description, allowedValues, cardinality, valueType } = structuredProperty.definition; + + function getCompletedByName() { + let actor = completedFieldPrompt?.lastModified?.actor || promptAssociation?.lastModified?.actor; + if (optimisticCompletedTimestamp) { + actor = user; + } + return actor ? entityRegistry.getDisplayName(EntityType.CorpUser, actor) : ''; + } + + function getCompletedByRelativeTime() { + let completedTimestamp = completedFieldPrompt?.lastModified?.time || promptAssociation?.lastModified?.time; + if (optimisticCompletedTimestamp) { + completedTimestamp = optimisticCompletedTimestamp; + } + return completedTimestamp ? getTimeFromNow(completedTimestamp) : ''; + } + + return ( + + + + {promptNumber !== undefined && <>{promptNumber}. } + {displayName} + {prompt.required && required} + + {description && {description}} + + {allowedValues && allowedValues.length > 0 && ( + <> + {cardinality === PropertyCardinality.Single && ( + + )} + {cardinality === PropertyCardinality.Multiple && ( + + )} + + )} + {!allowedValues && valueType.info.type === StdDataType.String && ( + + )} + {!allowedValues && valueType.info.type === StdDataType.RichText && ( + + )} + {!allowedValues && valueType.info.type === StdDataType.Date && ( + + )} + {!allowedValues && valueType.info.type === StdDataType.Number && ( + + )} + {!allowedValues && valueType.info.type === StdDataType.Urn && ( + + )} + + + {isSaveVisible && selectedValues.length > 0 && ( + + Save + + )} + {(isPromptComplete(prompt, completedPrompts) || + isFieldPromptComplete(field, promptAssociation) || + optimisticCompletedTimestamp) && + !isSaveVisible && ( + + )} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/SelectedEntity.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/SelectedEntity.tsx new file mode 100644 index 0000000000000..d5ed2e9693fc9 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/SelectedEntity.tsx @@ -0,0 +1,40 @@ +import { Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { Entity } from '../../../../../../../types.generated'; +import { useEntityRegistry } from '../../../../../../useEntityRegistry'; +import EntityIcon from '../../../../components/styled/EntityIcon'; + +const SelectedEntityWrapper = styled.div` + display: flex; + align-items: center; + font-size: 14px; + overflow: hidden; +`; + +const IconWrapper = styled.span` + margin-right: 4px; + display: flex; +`; + +const NameWrapper = styled(Typography.Text)` + margin-right: 4px; +`; + +interface Props { + entity: Entity; +} + +export default function SelectedEntity({ entity }: Props) { + const entityRegistry = useEntityRegistry(); + const displayName = entityRegistry.getDisplayName(entity.type, entity); + + return ( + + + + + {displayName} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/UrnInput.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/UrnInput.tsx new file mode 100644 index 0000000000000..54d53c75607e5 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/UrnInput.tsx @@ -0,0 +1,78 @@ +import { LoadingOutlined } from '@ant-design/icons'; +import { Select } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { StructuredPropertyEntity } from '../../../../../../../types.generated'; +import useUrnInput from './useUrnInput'; +import SelectedEntity from './SelectedEntity'; + +const EntitySelect = styled(Select)` + width: 75%; + min-width: 400px; + max-width: 600px; + + .ant-select-selector { + padding: 4px; + } +`; + +const LoadingWrapper = styled.div` + padding: 8px; + display: flex; + justify-content: center; + + svg { + height: 24px; + width: 24px; + } +`; + +interface Props { + structuredProperty: StructuredPropertyEntity; + selectedValues: any[]; + updateSelectedValues: (values: string[] | number[]) => void; +} + +export default function UrnInput({ structuredProperty, selectedValues, updateSelectedValues }: Props) { + const { + onSelectValue, + onDeselectValue, + handleSearch, + tagRender, + selectedEntities, + searchResults, + loading, + entityTypeNames, + } = useUrnInput({ structuredProperty, selectedValues, updateSelectedValues }); + + const placeholder = `Search for ${entityTypeNames ? entityTypeNames.map((name) => ` ${name}`) : 'entities'}...`; + + return ( + onSelectValue(urn)} + onDeselect={(urn: any) => onDeselectValue(urn)} + onSearch={(value: string) => handleSearch(value.trim())} + tagRender={tagRender} + value={selectedEntities.map((e) => e.urn)} + loading={loading} + notFoundContent={ + loading ? ( + + + + ) : undefined + } + > + {searchResults?.map((searchResult) => ( + + + + ))} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/useUrnInput.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/useUrnInput.tsx new file mode 100644 index 0000000000000..4f621f7018f12 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/UrnInput/useUrnInput.tsx @@ -0,0 +1,108 @@ +import { Tag } from 'antd'; +import React, { useEffect, useMemo, useState } from 'react'; +import styled from 'styled-components'; +import { Entity, PropertyCardinality, StructuredPropertyEntity } from '../../../../../../../types.generated'; +import { useGetSearchResultsForMultipleLazyQuery } from '../../../../../../../graphql/search.generated'; +import { useEntityData } from '../../../../EntityContext'; +import { getInitialEntitiesForUrnPrompt } from '../utils'; +import SelectedEntity from './SelectedEntity'; +import { useEntityRegistry } from '../../../../../../useEntityRegistry'; +import usePrevious from '../../../../../../shared/usePrevious'; + +const StyleTag = styled(Tag)` + margin: 2px; + padding: 4px 6px; + display: flex; + justify-content: start; + align-items: center; + white-space: nowrap; + opacity: 1; + color: #434343; + line-height: 16px; + font-size: 12px; + max-width: 100%; +`; + +interface Props { + structuredProperty: StructuredPropertyEntity; + selectedValues: any[]; + updateSelectedValues: (values: any[]) => void; +} + +export default function useUrnInput({ structuredProperty, selectedValues, updateSelectedValues }: Props) { + const entityRegistry = useEntityRegistry(); + const { entityData } = useEntityData(); + const initialEntities = useMemo( + () => getInitialEntitiesForUrnPrompt(structuredProperty.urn, entityData, selectedValues), + [structuredProperty.urn, entityData, selectedValues], + ); + + // we store the selected entity objects here to render display name, platform, etc. + // selectedValues contains a list of urns that we store for the structured property values + const [selectedEntities, setSelectedEntities] = useState(initialEntities); + const [searchAcrossEntities, { data: searchData, loading }] = useGetSearchResultsForMultipleLazyQuery(); + const searchResults = + searchData?.searchAcrossEntities?.searchResults?.map((searchResult) => searchResult.entity) || []; + const allowedEntityTypes = structuredProperty.definition.typeQualifier?.allowedTypes?.map( + (allowedType) => allowedType.info.type, + ); + const entityTypeNames: string[] | undefined = allowedEntityTypes?.map( + (entityType) => entityRegistry.getEntityName(entityType) || '', + ); + const isMultiple = structuredProperty.definition.cardinality === PropertyCardinality.Multiple; + + const previousEntityUrn = usePrevious(entityData?.urn); + useEffect(() => { + if (entityData?.urn !== previousEntityUrn) { + setSelectedEntities(initialEntities || []); + } + }, [entityData?.urn, previousEntityUrn, initialEntities]); + + function handleSearch(query: string) { + if (query.length > 0) { + searchAcrossEntities({ variables: { input: { query, types: allowedEntityTypes } } }); + } + } + + const onSelectValue = (urn: string) => { + const newValues = isMultiple ? [...selectedValues, urn] : [urn]; + updateSelectedValues(newValues); + + const selectedEntity = searchResults?.find((result) => result.urn === urn) as Entity; + const newEntities = isMultiple ? [...selectedEntities, selectedEntity] : [selectedEntity]; + setSelectedEntities(newEntities); + }; + + const onDeselectValue = (urn: string) => { + const newValues = selectedValues.filter((value) => value !== urn); + updateSelectedValues(newValues); + + const newSelectedEntities = selectedEntities.filter((entity) => entity.urn !== urn); + setSelectedEntities(newSelectedEntities); + }; + + const tagRender = (props: any) => { + // eslint-disable-next-line react/prop-types + const { closable, onClose, value } = props; + const selectedEntity = selectedEntities.find((term) => term.urn === value); + + if (!selectedEntity) return <>; + + return ( + + + + ); + }; + + return { + tagRender, + handleSearch, + onSelectValue, + onDeselectValue, + selectedEntities, + searchResults, + loading, + entityTypeNames, + }; +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/ValueDescription.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/ValueDescription.tsx new file mode 100644 index 0000000000000..716bd74fe6630 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/ValueDescription.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import styled from 'styled-components'; +import { ANTD_GRAY_V2 } from '../../../constants'; + +const DescriptionText = styled.span` + color: ${ANTD_GRAY_V2[8]}; +`; + +const DescriptionSeparator = styled.span` + margin: 0 8px; +`; + +interface Props { + description: string; +} + +export default function ValueDescription({ description }: Props) { + return ( + <> + - + {description} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/useStructuredPropertyPrompt.ts b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/useStructuredPropertyPrompt.ts new file mode 100644 index 0000000000000..d238a17b09799 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/useStructuredPropertyPrompt.ts @@ -0,0 +1,99 @@ +import { useEffect, useMemo, useState } from 'react'; +import { useEntityContext } from '../../../EntityContext'; +import { FormPrompt, FormPromptType, SchemaField, SubmitFormPromptInput } from '../../../../../../types.generated'; +import { getInitialValues } from './utils'; +import usePrevious from '../../../../../shared/usePrevious'; +import { useGetEntityWithSchema } from '../../../tabs/Dataset/Schema/useGetEntitySchema'; +import { FormView, useEntityFormContext } from '../../EntityFormContext'; + +interface Props { + prompt: FormPrompt; + submitResponse: (input: SubmitFormPromptInput, onSuccess: () => void) => void; + field?: SchemaField; +} + +export default function useStructuredPropertyPrompt({ prompt, submitResponse, field }: Props) { + const { refetch: refetchSchema } = useGetEntityWithSchema(); + const { refetch, entityData } = useEntityContext(); + const { selectedPromptId, formView } = useEntityFormContext(); + const [isSaveVisible, setIsSaveVisible] = useState(false); + const initialValues = useMemo( + () => (formView === FormView.BY_ENTITY ? getInitialValues(prompt, entityData, field) : []), + [formView, entityData, prompt, field], + ); + const [selectedValues, setSelectedValues] = useState(initialValues || []); + + const structuredProperty = prompt.structuredPropertyParams?.structuredProperty; + + const previousEntityUrn = usePrevious(entityData?.urn); + useEffect(() => { + if (entityData?.urn !== previousEntityUrn) { + setSelectedValues(initialValues || []); + } + }, [entityData?.urn, previousEntityUrn, initialValues]); + + const previousSelectedPromptId = usePrevious(selectedPromptId); + useEffect(() => { + if (selectedPromptId !== previousSelectedPromptId) { + setIsSaveVisible(false); + setSelectedValues(initialValues || []); + } + }, [previousSelectedPromptId, selectedPromptId, initialValues]); + + // respond to prompts + function selectSingleValue(value: string | number) { + setIsSaveVisible(true); + setSelectedValues([value as string]); + } + + function toggleSelectedValue(value: string | number) { + setIsSaveVisible(true); + if (selectedValues.includes(value)) { + setSelectedValues((prev) => prev.filter((v) => v !== value)); + } else { + setSelectedValues((prev) => [...prev, value]); + } + } + + function updateSelectedValues(values: any[]) { + setSelectedValues(values); + setIsSaveVisible(true); + } + + // submit structured property prompt + function submitStructuredPropertyResponse() { + submitResponse( + { + promptId: prompt.id, + formUrn: prompt.formUrn, + type: field ? FormPromptType.FieldsStructuredProperty : FormPromptType.StructuredProperty, + fieldPath: field?.fieldPath, + structuredPropertyParams: { + structuredPropertyUrn: structuredProperty?.urn as string, + values: selectedValues.map((value) => { + if (typeof value === 'string') { + return { stringValue: value as string }; + } + return { numberValue: value as number }; + }), + }, + }, + () => { + refetch(); + setIsSaveVisible(false); + if (field) { + refetchSchema(); + } + }, + ); + } + + return { + isSaveVisible, + selectedValues, + selectSingleValue, + toggleSelectedValue, + submitStructuredPropertyResponse, + updateSelectedValues, + }; +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/utils.ts b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/utils.ts new file mode 100644 index 0000000000000..1050c5fcde728 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/utils.ts @@ -0,0 +1,36 @@ +import { getStructuredPropertyValue } from '../../../utils'; +import { GenericEntityProperties } from '../../../types'; +import { + Entity, + FormPrompt, + PropertyValue, + SchemaField, + StructuredPropertiesEntry, +} from '../../../../../../types.generated'; + +export function getInitialValues(prompt: FormPrompt, entityData: GenericEntityProperties | null, field?: SchemaField) { + const structuredProperty = prompt.structuredPropertyParams?.structuredProperty; + let structuredPropertyAssignment: StructuredPropertiesEntry | undefined; + if (field) { + structuredPropertyAssignment = field?.schemaFieldEntity?.structuredProperties?.properties?.find( + (propAssignment) => propAssignment.structuredProperty.urn === structuredProperty?.urn, + ); + } else { + structuredPropertyAssignment = entityData?.structuredProperties?.properties?.find( + (propAssignment) => propAssignment.structuredProperty.urn === structuredProperty?.urn, + ); + } + return structuredPropertyAssignment?.values.map((value) => getStructuredPropertyValue(value as PropertyValue)); +} + +export function getInitialEntitiesForUrnPrompt( + structuredPropertyUrn: string, + entityData: GenericEntityProperties | null, + selectedValues: any[], +) { + const structuredPropertyEntry = entityData?.structuredProperties?.properties?.find( + (p) => p.structuredProperty.urn === structuredPropertyUrn, + ); + const entities = structuredPropertyEntry?.valueEntities?.filter((e) => selectedValues.includes(e?.urn)); + return entities ? (entities as Entity[]) : []; +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/VerificationPrompt.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/VerificationPrompt.tsx new file mode 100644 index 0000000000000..7578436cc993a --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/VerificationPrompt.tsx @@ -0,0 +1,72 @@ +import React, { useEffect, useRef } from 'react'; +import styled from 'styled-components'; +import { Button, Divider, message } from 'antd'; +import { useVerifyFormMutation } from '../../../../../graphql/form.generated'; +import { useEntityContext, useMutationUrn } from '../../EntityContext'; +import { PromptWrapper } from './Prompt'; +import { useUpdateEducationStepsAllowList } from '../../../../onboarding/useUpdateEducationStepsAllowList'; +import { FORM_ASSET_COMPLETION } from '../../../../onboarding/config/FormOnboardingConfig'; + +const ContentWrapper = styled.div` + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + font-size: 16px; + font-weight: 600; +`; + +const VerifyButton = styled(Button)` + margin-top: 16px; + width: 60%; + max-width: 600px; + font-size: 16px; + font-weight: 600; + height: auto; +`; + +interface Props { + formUrn: string; + associatedUrn?: string; +} + +export default function VerificationPrompt({ formUrn, associatedUrn }: Props) { + const urn = useMutationUrn(); + const { refetch } = useEntityContext(); + const [verifyFormMutation] = useVerifyFormMutation(); + const { addIdToAllowList } = useUpdateEducationStepsAllowList(); + + function verifyForm() { + verifyFormMutation({ variables: { input: { entityUrn: associatedUrn || urn || '', formUrn } } }) + .then(() => { + refetch(); + addIdToAllowList(FORM_ASSET_COMPLETION); + }) + .catch(() => { + message.error('Error when verifying responses on form'); + }); + } + + const verificationPrompt = useRef(null); + useEffect(() => { + (verificationPrompt?.current as any)?.scrollIntoView({ + behavior: 'smooth', + block: 'start', + inline: 'nearest', + }); + }, []); + + return ( + <> + + + + All questions for verification have been completed. Please verify your responses. + + Verify Responses + + + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/DropdownHeader.tsx b/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/DropdownHeader.tsx new file mode 100644 index 0000000000000..0d09cce4a97aa --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/DropdownHeader.tsx @@ -0,0 +1,62 @@ +import Icon from '@ant-design/icons/lib/components/Icon'; +import React, { useMemo } from 'react'; +import styled from 'styled-components'; +import GreenCircleIcon from '../../../../../images/greenCircleTwoTone.svg?react'; +import { SchemaField } from '../../../../../types.generated'; +import translateFieldPath from '../../../dataset/profile/schema/utils/translateFieldPath'; +import { getNumPromptsCompletedForField } from '../../containers/profile/sidebar/FormInfo/utils'; +import { useEntityData } from '../../EntityContext'; +import { ANTD_GRAY_V2 } from '../../constants'; +import { pluralize } from '../../../../shared/textUtil'; +import { useEntityFormContext } from '../EntityFormContext'; + +const HeaderWrapper = styled.div` + display: flex; + justify-content: space-between; + font-size: 16px; + align-items: center; +`; + +const PromptsRemainingText = styled.span` + font-size: 14px; + color: ${ANTD_GRAY_V2[8]}; + font-weight: 400; +`; + +const PromptsCompletedText = styled.span` + font-size: 14px; + color: #373d44; + font-weight: 600; +`; + +interface Props { + field: SchemaField; + numPrompts: number; + isExpanded: boolean; +} + +export default function DropdownHeader({ field, numPrompts, isExpanded }: Props) { + const { entityData } = useEntityData(); + const { formUrn } = useEntityFormContext(); + const numPromptsCompletedForField = useMemo( + () => getNumPromptsCompletedForField(field.fieldPath, entityData, formUrn), + [entityData, field.fieldPath, formUrn], + ); + const numPromptsRemaining = numPrompts - numPromptsCompletedForField; + + return ( + + Field: {translateFieldPath(field.fieldPath)} + {numPromptsRemaining > 0 && ( + + {numPromptsRemaining} {pluralize(numPrompts, 'question')} remaining + + )} + {numPromptsRemaining === 0 && !isExpanded && ( + + {numPrompts} {pluralize(numPrompts, 'Question')} Completed + + )} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/SchemaFieldDropdown.tsx b/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/SchemaFieldDropdown.tsx new file mode 100644 index 0000000000000..bdb6b99dc1dbf --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/SchemaFieldDropdown.tsx @@ -0,0 +1,45 @@ +import React, { useState } from 'react'; +import styled from 'styled-components'; +import { Collapse } from 'antd'; +import { FormPrompt, SchemaField } from '../../../../../types.generated'; +import Prompt from '../prompts/Prompt'; +import DropdownHeader from './DropdownHeader'; + +const StyledCollapse = styled(Collapse)` + margin-bottom: 16px; + + .ant-collapse-header { + font-size: 14px; + font-weight: bold; + padding: 12px 0; + } + &&& .ant-collapse-item { + background-color: white; + border-radius: 5px; + } + .ant-collapse-content-box { + padding: 0; + } +`; + +interface Props { + field: SchemaField; + prompts: FormPrompt[]; + associatedUrn?: string; +} + +export default function SchemaFieldDropdown({ field, prompts, associatedUrn }: Props) { + const [isExpanded, setIsExpanded] = useState(false); + return ( + setIsExpanded(!isExpanded)}> + } + key="0" + > + {prompts.map((prompt) => ( + + ))} + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/SchemaFieldPrompts.tsx b/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/SchemaFieldPrompts.tsx new file mode 100644 index 0000000000000..087a42e3f8000 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/schemaFieldPrompts/SchemaFieldPrompts.tsx @@ -0,0 +1,36 @@ +import React from 'react'; +import styled from 'styled-components'; +import { Divider } from 'antd'; +import { FormPrompt, SchemaField } from '../../../../../types.generated'; +import { useGetEntityWithSchema } from '../../tabs/Dataset/Schema/useGetEntitySchema'; +import SchemaFieldDropdown from './SchemaFieldDropdown'; +import VirtualScrollChild from '../../../../shared/VirtualScrollChild'; + +const FieldPromptsTitle = styled.div` + margin-bottom: 16px; + font-size: 16px; + font-weight: 600; +`; + +interface Props { + prompts: FormPrompt[]; + associatedUrn?: string; +} + +export default function SchemaFieldPrompts({ prompts, associatedUrn }: Props) { + const { entityWithSchema } = useGetEntityWithSchema(); + + if (!entityWithSchema?.schemaMetadata || !entityWithSchema.schemaMetadata.fields.length) return null; + + return ( + <> + + Field-Level Requirements + {entityWithSchema?.schemaMetadata?.fields.map((field) => ( + + + + ))} + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/entityForm/useShouldShowVerificationPrompt.ts b/datahub-web-react/src/app/entity/shared/entityForm/useShouldShowVerificationPrompt.ts new file mode 100644 index 0000000000000..d7a8a417a0c86 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/entityForm/useShouldShowVerificationPrompt.ts @@ -0,0 +1,38 @@ +import { FormType, FormVerificationAssociation } from '../../../../types.generated'; +import { useEntityData } from '../EntityContext'; +import useGetPromptInfo from '../containers/profile/sidebar/FormInfo/useGetPromptInfo'; +import { getFormAssociation, getFormVerification } from '../containers/profile/sidebar/FormInfo/utils'; + +interface ShowVerificationPromptProps { + formType?: FormType; + numRequiredPromptsRemaining: number; + formVerification?: FormVerificationAssociation; +} + +export function shouldShowVerificationPrompt({ + formType, + numRequiredPromptsRemaining, + formVerification, +}: ShowVerificationPromptProps) { + return formType === FormType.Verification && numRequiredPromptsRemaining === 0 && !formVerification; +} + +/* + * Returns whether or not we should show ther verification prompt for a given form. + * We want to show this prompt if (1) the form is a VERIFICATION form (2) there are no more + * require prompts remaining and either (3a) the form is not verified or (3b) it has been + * edited more recently than the verification timestamp. + */ +export default function useShouldShowVerificationPrompt(formUrn: string) { + const { numRequiredPromptsRemaining } = useGetPromptInfo(formUrn); + const { entityData } = useEntityData(); + const formVerification = getFormVerification(formUrn, entityData); + const formAssociation = getFormAssociation(formUrn, entityData); + const formType = formAssociation?.form.info.type; + + return shouldShowVerificationPrompt({ + formType, + numRequiredPromptsRemaining, + formVerification, + }); +} diff --git a/datahub-web-react/src/app/entity/shared/siblingUtils.ts b/datahub-web-react/src/app/entity/shared/siblingUtils.ts index 66481051055ec..5e21c2a7c5ac4 100644 --- a/datahub-web-react/src/app/entity/shared/siblingUtils.ts +++ b/datahub-web-react/src/app/entity/shared/siblingUtils.ts @@ -117,6 +117,9 @@ const customMerge = (isPrimary, key) => { if (key === 'platform' || key === 'siblings') { return (secondary, primary) => (isPrimary ? primary : secondary); } + if (key === 'forms') { + return (_secondary, primary) => primary; + } if ( key === 'tags' || key === 'terms' || diff --git a/datahub-web-react/src/app/entity/shared/types.ts b/datahub-web-react/src/app/entity/shared/types.ts index 47cad4a69096d..919dfb78a52f6 100644 --- a/datahub-web-react/src/app/entity/shared/types.ts +++ b/datahub-web-react/src/app/entity/shared/types.ts @@ -39,6 +39,7 @@ import { DataJobInputOutput, ParentDomainsResult, StructuredProperties, + Forms, } from '../../../types.generated'; import { FetchedEntity } from '../../lineage/types'; @@ -119,6 +120,7 @@ export type GenericEntityProperties = { origin?: Maybe; browsePathV2?: Maybe; inputOutput?: Maybe; + forms?: Maybe; }; export type GenericEntityUpdate = { diff --git a/datahub-web-react/src/app/home/HomePageRecommendations.tsx b/datahub-web-react/src/app/home/HomePageRecommendations.tsx index 6ce7735c4a7c8..cc9f4b265455b 100644 --- a/datahub-web-react/src/app/home/HomePageRecommendations.tsx +++ b/datahub-web-react/src/app/home/HomePageRecommendations.tsx @@ -20,7 +20,7 @@ import { HOME_PAGE_MOST_POPULAR_ID, HOME_PAGE_PLATFORMS_ID, } from '../onboarding/config/HomePageOnboardingConfig'; -import { useUpdateEducationStepIdsAllowlist } from '../onboarding/useUpdateEducationStepIdsAllowlist'; +import { useToggleEducationStepIdsAllowList } from '../onboarding/useToggleEducationStepIdsAllowList'; const PLATFORMS_MODULE_ID = 'Platforms'; const MOST_POPULAR_MODULE_ID = 'HighUsageEntities'; @@ -147,15 +147,15 @@ export const HomePageRecommendations = ({ user }: Props) => { // Render domain onboarding step if the domains module exists const hasDomains = !!domainRecommendationModule; - useUpdateEducationStepIdsAllowlist(hasDomains, HOME_PAGE_DOMAINS_ID); + useToggleEducationStepIdsAllowList(hasDomains, HOME_PAGE_DOMAINS_ID); // Render platforms onboarding step if the platforms module exists const hasPlatforms = !!recommendationModules?.some((module) => module?.moduleId === PLATFORMS_MODULE_ID); - useUpdateEducationStepIdsAllowlist(hasPlatforms, HOME_PAGE_PLATFORMS_ID); + useToggleEducationStepIdsAllowList(hasPlatforms, HOME_PAGE_PLATFORMS_ID); // Render most popular onboarding step if the most popular module exists const hasMostPopular = !!recommendationModules?.some((module) => module?.moduleId === MOST_POPULAR_MODULE_ID); - useUpdateEducationStepIdsAllowlist(hasMostPopular, HOME_PAGE_MOST_POPULAR_ID); + useToggleEducationStepIdsAllowList(hasMostPopular, HOME_PAGE_MOST_POPULAR_ID); return ( diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index 178f54325ecde..4a9e84d6e2248 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -21,7 +21,7 @@ import { USERS_INVITE_LINK_ID, USERS_SSO_ID, } from '../../onboarding/config/UsersOnboardingConfig'; -import { useUpdateEducationStepIdsAllowlist } from '../../onboarding/useUpdateEducationStepIdsAllowlist'; +import { useToggleEducationStepIdsAllowList } from '../../onboarding/useToggleEducationStepIdsAllowList'; import { DEFAULT_USER_LIST_PAGE_SIZE, removeUserFromListUsersCache } from './cacheUtils'; import { useUserContext } from '../../context/useUserContext'; @@ -113,7 +113,7 @@ export const UserList = () => { const error = usersError || rolesError; const selectRoleOptions = rolesData?.listRoles?.roles?.map((role) => role as DataHubRole) || []; - useUpdateEducationStepIdsAllowlist(canManagePolicies, USERS_INVITE_LINK_ID); + useToggleEducationStepIdsAllowList(canManagePolicies, USERS_INVITE_LINK_ID); return ( <> diff --git a/datahub-web-react/src/app/onboarding/OnboardingConfig.tsx b/datahub-web-react/src/app/onboarding/OnboardingConfig.tsx index 7cc382fe8f279..83fa6acd0cc25 100644 --- a/datahub-web-react/src/app/onboarding/OnboardingConfig.tsx +++ b/datahub-web-react/src/app/onboarding/OnboardingConfig.tsx @@ -1,6 +1,7 @@ import { BusinessGlossaryOnboardingConfig } from './config/BusinessGlossaryOnboardingConfig'; import { DomainsOnboardingConfig } from './config/DomainsOnboardingConfig'; import { EntityProfileOnboardingConfig } from './config/EntityProfileOnboardingConfig'; +import { FormOnboardingConfig } from './config/FormOnboardingConfig'; import { GroupsOnboardingConfig } from './config/GroupsOnboardingConfig'; import { HomePageOnboardingConfig } from './config/HomePageOnboardingConfig'; import { IngestionOnboardingConfig } from './config/IngestionOnboardingConfig'; @@ -23,6 +24,7 @@ const ALL_ONBOARDING_CONFIGS: OnboardingStep[][] = [ RolesOnboardingConfig, PoliciesOnboardingConfig, LineageGraphOnboardingConfig, + FormOnboardingConfig, ]; export const OnboardingConfig: OnboardingStep[] = ALL_ONBOARDING_CONFIGS.reduce( (acc, config) => [...acc, ...config], diff --git a/datahub-web-react/src/app/onboarding/config/FormOnboardingConfig.tsx b/datahub-web-react/src/app/onboarding/config/FormOnboardingConfig.tsx new file mode 100644 index 0000000000000..d50a25badfabb --- /dev/null +++ b/datahub-web-react/src/app/onboarding/config/FormOnboardingConfig.tsx @@ -0,0 +1,178 @@ +import { SmileOutlined } from '@ant-design/icons'; +import React from 'react'; +import { Typography } from 'antd'; +import styled from 'styled-components'; +import { OnboardingStep } from '../OnboardingStep'; +import BulkTypeComparions from '../../../images/bulk-form-type-comparison.svg'; + +const DiagramHeader = styled.div` + display: flex; + justify-content: center; + margin: 16px 0 4px 0; +`; + +const AssetCompletionHeader = styled.div` + font-size: 20px; + font-weight: normal; +`; + +const ByAssetWrapper = styled.span` + margin-left: 10px; + font-size: 14px; +`; + +const ByQuestionWrapper = styled.span` + margin-left: 80px; + font-size: 14px; +`; + +const StyledSmile = styled(SmileOutlined)` + color: ${(props) => props.theme.styles['primary-color']}; + margin-right: 4px; +`; + +export const WELCOME_TO_BULK_BY_ENTITY_ID = 'welcome-to-bulk-by-entity'; +export const FORM_QUESTION_VIEW_BUTTON = 'form-question-view-button'; +export const FORM_ASSET_COMPLETION = 'form-asset-completion'; +export const WELCOME_TO_BULK_BY_QUESTION_ID = 'welcome-to-bulk-by-question'; +export const FORM_ASSETS_ASSIGNED_ID = 'form-assets-assigned'; +export const FORM_FILTER_AND_BROWSE_ID = 'form-filter-and-browse'; +export const FORM_ANSWER_IN_BULK_ID = 'form-answer-in-bulk'; +export const FORM_BULK_VERIFY_INTRO_ID = 'form-bulk-verify-intro'; +export const FORM_CHECK_RESPONSES_ID = 'form-check-responses'; +export const FORM_BULK_VERIFY_ID = 'form-bulk-verify'; + +export const FormOnboardingConfig: OnboardingStep[] = [ + { + id: WELCOME_TO_BULK_BY_ENTITY_ID, + selector: `#${WELCOME_TO_BULK_BY_ENTITY_ID}`, + title: 'Let’s complete your documentation requests!', + style: { width: '520px', maxWidth: '520px' }, + content: ( + + Here you can easily respond to all documentation requests efficiently. We’ll track your progress and + move you seamlessly through all your requests. +
    + Let’s get started completing the needs for this form. +
    + ), + }, + { + id: FORM_QUESTION_VIEW_BUTTON, + selector: `#${FORM_QUESTION_VIEW_BUTTON}`, + title: "Switch to the 'Complete by Question' view.", + style: { width: '520px', maxWidth: '520px' }, + content: ( + + If an answer fits multiple assets, this view lets you tackle questions across different assets at once, + making documentation even faster and more efficient. + + ), + }, + { + id: FORM_ASSET_COMPLETION, + selector: `#${FORM_ASSET_COMPLETION}`, + isActionStep: true, + title: ( + + Congratulations, You’ve Completed 1 Asset! + + ), + style: { width: '640px', maxWidth: '640px' }, + content: ( + + Now that you’ve completed one asset, try switching to the ‘Complete by Question’ view. If an answer fits + multiple assets, this view lets you tackle questions across different assets at once, making + documentation even faster and more efficient. + + By Asset + By Question + + bulk form type comparions + + ), + }, + { + id: WELCOME_TO_BULK_BY_QUESTION_ID, + selector: `#${WELCOME_TO_BULK_BY_QUESTION_ID}`, + title: "Welcome to the 'Complete by Question' view!", + style: { width: '520px', maxWidth: '520px' }, + content: ( + + Here, you can easily provide the same response for multiple assets at once for a faster documenting + experience. + + ), + }, + { + id: FORM_ASSETS_ASSIGNED_ID, + selector: `#${FORM_ASSETS_ASSIGNED_ID}`, + title: 'Focus on only the assets that require your attention', + style: { width: '520px', maxWidth: '520px' }, + content: ( + + In this view, we’ve simplified your workflow by only showing assets that require documentation from you. + + ), + }, + { + id: FORM_FILTER_AND_BROWSE_ID, + selector: `#${FORM_FILTER_AND_BROWSE_ID}`, + title: 'Filter and Browse to Select the Specific Assets', + style: { width: '520px', maxWidth: '520px' }, + content: ( + + Filter by type, terms, or browse by platform, database and schemas to select only the assets that you’d + like to set the response for. + + ), + }, + { + id: FORM_ANSWER_IN_BULK_ID, + selector: `#${FORM_ANSWER_IN_BULK_ID}`, + title: 'Answer in Bulk', + style: { width: '520px', maxWidth: '520px' }, + content: ( + + After selecting your assets, set a collective response and start answering for groups of 1,000 assets at + a time. + + ), + }, + { + id: FORM_BULK_VERIFY_INTRO_ID, + selector: `#${FORM_BULK_VERIFY_INTRO_ID}`, + title: 'Streamline Verification in Bulk!', + style: { width: '520px', maxWidth: '520px' }, + content: ( + + Here you can quickly review responses for a few datasets, ensuring accuracy. When you're ready, + proceed to verify all assets at once, simplifying the entire verification process. + + ), + }, + { + id: FORM_CHECK_RESPONSES_ID, + selector: `#${FORM_CHECK_RESPONSES_ID}`, + title: 'Check Responses', + style: { width: '520px', maxWidth: '520px' }, + content: ( + + Click on "View Responses" to easily spot-check your responses before the final Verification + step. + + ), + }, + { + id: FORM_BULK_VERIFY_ID, + selector: `#${FORM_BULK_VERIFY_ID}`, + title: 'Bulk Verify Assets', + style: { width: '520px', maxWidth: '520px' }, + content: ( + + Once you're confident in your responses, verify up to 1,000 assets at a time for this form with a + click of a button. + + ), + }, +]; diff --git a/datahub-web-react/src/app/onboarding/useToggleEducationStepIdsAllowList.tsx b/datahub-web-react/src/app/onboarding/useToggleEducationStepIdsAllowList.tsx new file mode 100644 index 0000000000000..acf85d0a87b10 --- /dev/null +++ b/datahub-web-react/src/app/onboarding/useToggleEducationStepIdsAllowList.tsx @@ -0,0 +1,18 @@ +import { useContext, useEffect } from 'react'; +import { EducationStepsContext } from '../../providers/EducationStepsContext'; +import { useUpdateEducationStepsAllowList } from './useUpdateEducationStepsAllowList'; + +export function useToggleEducationStepIdsAllowList(condition: boolean, id: string) { + const { educationStepIdsAllowlist } = useContext(EducationStepsContext); + const { addIdToAllowList, removeIdFromAllowList } = useUpdateEducationStepsAllowList(); + + useEffect(() => { + const allowlistIncludesStepId = educationStepIdsAllowlist.has(id); + + if (condition && !allowlistIncludesStepId) { + addIdToAllowList(id); + } else if (!condition && allowlistIncludesStepId) { + removeIdFromAllowList(id); + } + }, [condition, id, addIdToAllowList, removeIdFromAllowList, educationStepIdsAllowlist]); +} diff --git a/datahub-web-react/src/app/onboarding/useUpdateEducationStepIdsAllowlist.tsx b/datahub-web-react/src/app/onboarding/useUpdateEducationStepIdsAllowlist.tsx deleted file mode 100644 index 4eb1f6c02b6b8..0000000000000 --- a/datahub-web-react/src/app/onboarding/useUpdateEducationStepIdsAllowlist.tsx +++ /dev/null @@ -1,20 +0,0 @@ -import { useContext, useEffect } from 'react'; -import { EducationStepsContext } from '../../providers/EducationStepsContext'; - -export function useUpdateEducationStepIdsAllowlist(condition: boolean, id: string) { - const { educationStepIdsAllowlist, setEducationStepIdsAllowlist } = useContext(EducationStepsContext); - - useEffect(() => { - const allowlistIncludesStepId = educationStepIdsAllowlist.has(id); - - if (condition && !allowlistIncludesStepId) { - const newStepdIdsAllowlist: Set = new Set(educationStepIdsAllowlist); - newStepdIdsAllowlist.add(id); - setEducationStepIdsAllowlist(newStepdIdsAllowlist); - } else if (!condition && allowlistIncludesStepId) { - const newStepdIdsAllowlist: Set = new Set(educationStepIdsAllowlist); - newStepdIdsAllowlist.delete(id); - setEducationStepIdsAllowlist(newStepdIdsAllowlist); - } - }, [condition, id, educationStepIdsAllowlist, setEducationStepIdsAllowlist]); -} diff --git a/datahub-web-react/src/app/onboarding/useUpdateEducationStepsAllowList.tsx b/datahub-web-react/src/app/onboarding/useUpdateEducationStepsAllowList.tsx new file mode 100644 index 0000000000000..86b9000205b8b --- /dev/null +++ b/datahub-web-react/src/app/onboarding/useUpdateEducationStepsAllowList.tsx @@ -0,0 +1,22 @@ +import { useContext } from 'react'; +import { EducationStepsContext } from '../../providers/EducationStepsContext'; + +// function use + +export function useUpdateEducationStepsAllowList() { + const { educationStepIdsAllowlist, setEducationStepIdsAllowlist } = useContext(EducationStepsContext); + + function removeIdFromAllowList(id: string) { + const newStepdIdsAllowlist: Set = new Set(educationStepIdsAllowlist); + newStepdIdsAllowlist.delete(id); + setEducationStepIdsAllowlist(newStepdIdsAllowlist); + } + + function addIdToAllowList(id: string) { + const newStepdIdsAllowlist: Set = new Set(educationStepIdsAllowlist); + newStepdIdsAllowlist.add(id); + setEducationStepIdsAllowlist(newStepdIdsAllowlist); + } + + return { removeIdFromAllowList, addIdToAllowList }; +} diff --git a/datahub-web-react/src/app/search/SearchHeader.tsx b/datahub-web-react/src/app/search/SearchHeader.tsx index 76e78a11d3e9d..0b6bf5488a301 100644 --- a/datahub-web-react/src/app/search/SearchHeader.tsx +++ b/datahub-web-react/src/app/search/SearchHeader.tsx @@ -1,7 +1,6 @@ import React, { useState } from 'react'; -import { Image, Layout } from 'antd'; -import { Link } from 'react-router-dom'; -import styled, { useTheme } from 'styled-components'; +import { Layout } from 'antd'; +import styled from 'styled-components'; import { SearchBar } from './SearchBar'; import { ManageAccount } from '../shared/ManageAccount'; @@ -10,8 +9,8 @@ import EntityRegistry from '../entity/EntityRegistry'; import { ANTD_GRAY } from '../entity/shared/constants'; import { HeaderLinks } from '../shared/admin/HeaderLinks'; import { useAppConfig, useIsShowAcrylInfoEnabled } from '../useAppConfig'; -import { DEFAULT_APP_CONFIG } from '../../appConfigContext'; import DemoButton from '../entity/shared/components/styled/DemoButton'; +import AppLogoLink from '../shared/AppLogoLink'; const { Header } = Layout; @@ -29,13 +28,6 @@ const styles = { }, }; -const LogoImage = styled(Image)` - display: inline-block; - height: 32px; - width: auto; - margin-top: 2px; -`; - const LogoSearchContainer = styled.div` display: flex; flex: 1; @@ -77,7 +69,6 @@ export const SearchHeader = ({ entityRegistry, }: Props) => { const [isSearchBarFocused, setIsSearchBarFocused] = useState(false); - const themeConfig = useTheme(); const showAcrylInfo = useIsShowAcrylInfoEnabled(); const appConfig = useAppConfig(); const viewsEnabled = appConfig.config?.viewsConfig?.enabled || false; @@ -85,16 +76,7 @@ export const SearchHeader = ({ return (
    - - - + { }, [isSelectMode]); // Render new search filters v2 onboarding step if the feature flag is on - useUpdateEducationStepIdsAllowlist(showSearchFiltersV2, SEARCH_RESULTS_FILTERS_V2_INTRO); + useToggleEducationStepIdsAllowList(showSearchFiltersV2, SEARCH_RESULTS_FILTERS_V2_INTRO); // Render new browse v2 onboarding step if the feature flag is on - useUpdateEducationStepIdsAllowlist(showBrowseV2, SEARCH_RESULTS_BROWSE_SIDEBAR_ID); + useToggleEducationStepIdsAllowList(showBrowseV2, SEARCH_RESULTS_BROWSE_SIDEBAR_ID); return ( <> diff --git a/datahub-web-react/src/app/shared/AppLogoLink.tsx b/datahub-web-react/src/app/shared/AppLogoLink.tsx new file mode 100644 index 0000000000000..7d647194b07c4 --- /dev/null +++ b/datahub-web-react/src/app/shared/AppLogoLink.tsx @@ -0,0 +1,31 @@ +import { Image } from 'antd'; +import React from 'react'; +import { Link } from 'react-router-dom'; +import styled, { useTheme } from 'styled-components'; +import { useAppConfig } from '../useAppConfig'; +import { DEFAULT_APP_CONFIG } from '../../appConfigContext'; + +const LogoImage = styled(Image)` + display: inline-block; + height: 32px; + width: auto; + margin-top: 2px; +`; + +export default function AppLogoLink() { + const appConfig = useAppConfig(); + const themeConfig = useTheme(); + + return ( + + + + ); +} diff --git a/datahub-web-react/src/app/shared/DeferredRenderComponent.tsx b/datahub-web-react/src/app/shared/DeferredRenderComponent.tsx new file mode 100644 index 0000000000000..7b5bdb949fc3d --- /dev/null +++ b/datahub-web-react/src/app/shared/DeferredRenderComponent.tsx @@ -0,0 +1,23 @@ +import React, { useEffect, useState } from 'react'; + +interface Props { + wrappedComponent: React.ReactNode; + loadingComponent?: React.ReactNode; + delay?: number; +} + +export function DeferredRenderComponent({ wrappedComponent, loadingComponent, delay = 250 }: Props) { + const [shouldRender, setShouldRender] = useState(false); + + useEffect(() => { + setTimeout(() => { + setShouldRender(true); + }, delay); + }, [delay]); + + if (shouldRender) { + return <>{wrappedComponent}; + } + + return loadingComponent ? <>{loadingComponent} : null; +} diff --git a/datahub-web-react/src/app/shared/Loading.tsx b/datahub-web-react/src/app/shared/Loading.tsx new file mode 100644 index 0000000000000..d03fb0a585dc1 --- /dev/null +++ b/datahub-web-react/src/app/shared/Loading.tsx @@ -0,0 +1,27 @@ +import { LoadingOutlined } from '@ant-design/icons'; +import React from 'react'; +import styled from 'styled-components'; + +const LoadingWrapper = styled.div` + display: flex; + justify-content: center; + margin-top: 25%; + width: 100%; +`; + +const StyledLoading = styled(LoadingOutlined)<{ $height: number }>` + font-size: ${(props) => props.$height}px; + height: ${(props) => props.$height}px; +`; + +interface Props { + height?: number; +} + +export default function Loading({ height = 32 }: Props) { + return ( + + + + ); +} diff --git a/datahub-web-react/src/app/shared/VirtualScrollChild.tsx b/datahub-web-react/src/app/shared/VirtualScrollChild.tsx new file mode 100644 index 0000000000000..4cd539e094306 --- /dev/null +++ b/datahub-web-react/src/app/shared/VirtualScrollChild.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import styled from 'styled-components'; +import { useInView } from 'react-intersection-observer'; + +const VirtualChildWrapper = styled.div<{ $inView: boolean; $height: number }>` + height: ${(props) => (props.$inView ? 'auto' : `${props.$height}px`)}; + ${(props) => !props.$inView && 'visiblity: hidden;'} +`; + +interface VirtualProps { + height: number; + children: React.ReactNode; + triggerOnce?: boolean; +} + +export default function VirtualScrollChild({ height, children, triggerOnce }: VirtualProps) { + const [ref, inView] = useInView({ triggerOnce }); + + return ( + + {inView ? children : null} + + ); +} diff --git a/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx b/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx index 4a7a4938ea970..7d53afda2aa3a 100644 --- a/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx +++ b/datahub-web-react/src/app/shared/admin/HeaderLinks.tsx @@ -13,7 +13,7 @@ import { Button, Dropdown, Menu, Tooltip } from 'antd'; import { useAppConfig } from '../../useAppConfig'; import { ANTD_GRAY } from '../../entity/shared/constants'; import { HOME_PAGE_INGESTION_ID } from '../../onboarding/config/HomePageOnboardingConfig'; -import { useUpdateEducationStepIdsAllowlist } from '../../onboarding/useUpdateEducationStepIdsAllowlist'; +import { useToggleEducationStepIdsAllowList } from '../../onboarding/useToggleEducationStepIdsAllowList'; import { useUserContext } from '../../context/useUserContext'; import DomainIcon from '../../domain/DomainIcon'; @@ -74,7 +74,7 @@ export function HeaderLinks(props: Props) { const showIngestion = isIngestionEnabled && me && me.platformPrivileges?.manageIngestion && me.platformPrivileges?.manageSecrets; - useUpdateEducationStepIdsAllowlist(!!showIngestion, HOME_PAGE_INGESTION_ID); + useToggleEducationStepIdsAllowList(!!showIngestion, HOME_PAGE_INGESTION_ID); return ( diff --git a/datahub-web-react/src/app/shared/components.tsx b/datahub-web-react/src/app/shared/components.tsx index 68d2fb52cfdba..3977a9c36b402 100644 --- a/datahub-web-react/src/app/shared/components.tsx +++ b/datahub-web-react/src/app/shared/components.tsx @@ -47,3 +47,9 @@ export const BodyGridExpander = styled.div<{ isOpen: boolean }>` export const BodyContainer = styled.div` min-height: 0; `; + +export const WhiteButton = styled(Button)` + background-color: white; + color: ${(props) => props.theme.styles['primary-color']}; + text-shadow: none; +`; diff --git a/datahub-web-react/src/app/shared/useHasComponentRendered.ts b/datahub-web-react/src/app/shared/useHasComponentRendered.ts new file mode 100644 index 0000000000000..ff9b6987ecf11 --- /dev/null +++ b/datahub-web-react/src/app/shared/useHasComponentRendered.ts @@ -0,0 +1,18 @@ +import { useEffect, useState } from 'react'; + +/* + * Returns whether a desired component is rendered or not. + * By setting a time out we place the state update at the + * end of the queue after this component has rendered. + */ +export default function useHasComponentRendered() { + const [hasRendered, setHasRendered] = useState(false); + + useEffect(() => { + setTimeout(() => { + setHasRendered(true); + }, 0); + }, []); + + return { hasRendered }; +} diff --git a/datahub-web-react/src/graphql/dataset.graphql b/datahub-web-react/src/graphql/dataset.graphql index 57c74e0c65d69..e25d4fe6c8635 100644 --- a/datahub-web-react/src/graphql/dataset.graphql +++ b/datahub-web-react/src/graphql/dataset.graphql @@ -168,6 +168,27 @@ fragment nonSiblingDatasetFields on Dataset { canEditEmbed canEditQueries } + forms { + completedForms { + ...formAssociationFields + } + incompleteForms { + ...formAssociationFields + } + verifications { + form { + urn + } + lastModified { + time + actor { + urn + type + ...entityDisplayNameFields + } + } + } + } } query getRecentQueries($urn: String!) { diff --git a/datahub-web-react/src/graphql/form.graphql b/datahub-web-react/src/graphql/form.graphql new file mode 100644 index 0000000000000..3cd09697f79bd --- /dev/null +++ b/datahub-web-react/src/graphql/form.graphql @@ -0,0 +1,7 @@ +mutation submitFormPrompt($urn: String!, $input: SubmitFormPromptInput!) { + submitFormPrompt(urn: $urn, input: $input) +} + +mutation verifyForm($input: VerifyFormInput!) { + verifyForm(input: $input) +} diff --git a/datahub-web-react/src/graphql/fragments.graphql b/datahub-web-react/src/graphql/fragments.graphql index e901c9af554ad..dc534b315aadf 100644 --- a/datahub-web-react/src/graphql/fragments.graphql +++ b/datahub-web-react/src/graphql/fragments.graphql @@ -1249,3 +1249,67 @@ fragment autoRenderAspectFields on RawAspect { key } } + +fragment formAssociationFields on FormAssociation { + associatedUrn + incompletePrompts { + ...formPromptAssociationFields + } + completedPrompts { + ...formPromptAssociationFields + } + form { + urn + type + info { + name + description + type + prompts { + id + formUrn + title + description + type + required + structuredPropertyParams { + structuredProperty { + ...structuredPropertyFields + } + } + } + actors { + owners + isAssignedToMe + } + } + ownership { + ...ownershipFields + } + } +} + +fragment formPromptAssociationFields on FormPromptAssociation { + id + lastModified { + time + actor { + urn + type + ...entityDisplayNameFields + } + } + fieldAssociations { + completedFieldPrompts { + fieldPath + lastModified { + time + actor { + urn + type + ...entityDisplayNameFields + } + } + } + } +} diff --git a/datahub-web-react/src/images/background_dots.svg b/datahub-web-react/src/images/background_dots.svg new file mode 100644 index 0000000000000..f4b3b917808b4 --- /dev/null +++ b/datahub-web-react/src/images/background_dots.svg @@ -0,0 +1,330 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/datahub-web-react/src/images/bulk-form-type-comparison.svg b/datahub-web-react/src/images/bulk-form-type-comparison.svg new file mode 100644 index 0000000000000..ac94f42394e36 --- /dev/null +++ b/datahub-web-react/src/images/bulk-form-type-comparison.svg @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/datahub-web-react/src/images/greenCircleTwoTone.svg b/datahub-web-react/src/images/greenCircleTwoTone.svg new file mode 100644 index 0000000000000..ca9fd1ae74241 --- /dev/null +++ b/datahub-web-react/src/images/greenCircleTwoTone.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/datahub-web-react/src/images/verificationBlue.svg b/datahub-web-react/src/images/verificationBlue.svg new file mode 100644 index 0000000000000..dafdd60fb156d --- /dev/null +++ b/datahub-web-react/src/images/verificationBlue.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verificationGreen.svg b/datahub-web-react/src/images/verificationGreen.svg new file mode 100644 index 0000000000000..d082db5dfc456 --- /dev/null +++ b/datahub-web-react/src/images/verificationGreen.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verificationPurple.svg b/datahub-web-react/src/images/verificationPurple.svg new file mode 100644 index 0000000000000..a9549195779f0 --- /dev/null +++ b/datahub-web-react/src/images/verificationPurple.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verificationPurpleWhite.svg b/datahub-web-react/src/images/verificationPurpleWhite.svg new file mode 100644 index 0000000000000..c57d8b3105ebe --- /dev/null +++ b/datahub-web-react/src/images/verificationPurpleWhite.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verificationWarningGray.svg b/datahub-web-react/src/images/verificationWarningGray.svg new file mode 100644 index 0000000000000..725f448894532 --- /dev/null +++ b/datahub-web-react/src/images/verificationWarningGray.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verticalogo copy.png b/datahub-web-react/src/images/verticalogo copy.png new file mode 100644 index 0000000000000000000000000000000000000000..5da38f4e67c7d4ea72c02983bc8cf01c9b10752f GIT binary patch literal 25763 zcmeEug;$%+)^7+Y1Sdd?1$QV~io3fMDOR*tvEWcNxJz-jwou%qKylZW7KbKyacgn7 z>3i<|zH{!n|H1v%nyfsTJkQKz&+NT_+Y+s%p@@e~feiov@RXJ0UH|~-Pyhf83dTV7 zsAtyGq2AEkUnt4|swbhpP#@&14U}!v)dB3NYcK#U(jEZ%dkgACfqJ1HFGK^Nqu$Z} z_7wvE&$sB%LiGQ0jfR>50Fbx6wgv#C0LpUGx;|(Jd5>D5KduHt!=+$zj3Doj$1J*q zoF6z(hv-_Q*alXZd|;(Yu1+|dS|+lEt;A3wlNi6_hnwD&dhf&BtY@~9!lDf~fm3HI zoqiiPU(RwC7K8Pd<>kc67<$+`0)Zli1u#{J`K9=?P-`+hzzZvjPP0<`7^Y)F<5c%--9ER z%>BPpg7aE#Jl#4z+6TQ)3=LeaqG7rZqB$xEl|a8EVaL)m4{B99T=BuPKa=&%Wz5H_ z?c)WCl-jm4ySLDSXB)=@&lOOSM)$;2ym6nZgtQ-5clx{(v8?00_ z75_WHH88cr+W!ul!YCm!)KEq=`7WIpq5gG7Si3Uh*2!%&b>yCu-As2-D(Cwv!UF@~ z=%l>t_TU4tYyg(xf*JfYQ>S1zFo3LvMusU6j2n{0Qk3#OQ}l^_`}_ACBk1rZ^H3IF z+&kPrn+w7QwZ?QQ$<`nU#d>aX;#%&5o$p(da;u#?sEuu?Aa+o=GYIbon*pHNEPVIu z>ng5(H}7N$7YW#x8m3iGY|X|0R&|GITCj+Ye0$-Z;{GUv8uSWF1fsj9fl$$X&b$3Q zw*mT23Z&3ejrHZ|JQlr4+GZUx&$4#4c5}N2^4o>=x92b9YC@Hl2t*>c6L)Cm*DGVx zaoKe=HFjY>_tUR4*RuohyYn#v$c;9qaBcN#;D?|*O!hjKV+WE6(PoB&iBGS!fvUy5 zEz=#L2!?{dbS{d*(U%e3YEfZ(ngO{u^^k}X5vWCLy11F|!8$^N^!prmGp=@E^NF>U z&fOb6-AHs6Z*#*Zf7-s^ys~>GiJ5yh4;;N?AbN(z734?##ywY9mh(i({MOl+0P)*D z(W^nt^(T04Sb0|&p%qpYYb2hSSM1D*W_qI^mXE%fvD&P~rTLV>rRY>^C(k;rtQzb= zhBhg1NDzQOn)AD=a7Jp6eV;!MYx5iTe-7(F*3gKGzt#>z88+LJ8SO@X+jq?-*qmXOXoFe(TkVLV&>1;o^I8 z{M_%<)?mk181YTTeHXurqr~yw>|^JVuwPjyb6h}3Ey{Xa6KyrDB0`zXLaBd!^~(R$ zryV?F{O+1ih@wcK;MwnJljInw=U6c1@4u$~3S%?CC+^>0VUFVM5kUyy`JnW^C5Ivc z>A8sX14N(PW;nx@qV$lAcZ8c(ampr${|hVnq|6x_6%hgGonWGv@XurOxxs!YYd@am zySIdwRK2%`;xc-v^jf08%r@!;dPW zntX#*lt;U7?&{U27g4-6Se>SKqzD|r$8_fkb6ez%P(x$E9D*^@d;lM>@oTc-l%wR1 zxoWT`-o|qu^(E|Rx4HFzad=6XU*}_ZIwX@MpCDWUV{<>^y&&8$$%E)znziQn0oO9C z#LE!{YWQ`BSz0P>(kv1&Og48j@QxFq73>sENSFGD{x^T2@A0?PnNLJ7u^l}Zdocz52Po|00=hcgEyn9VqrXyn&rt+Qli9} zc=0!a=PIl&JWPK;J`V$ETVJQWav>t*2kf7TLOoDxX~_;BJVSdMq5tS<7d;}mo1*qNkB4ERfOWYb zd?d+3IQ3@e^pMzZ&e>^{w)LV;d-;1zR&vqar)Uj@&lAxe%)63l7Jar)f>LBPz=t?I*Bg*t0Tnv2pZtkTrZ^r&y@kvNki zuFz4S6)bu?`U;Tzf*nU%XtXr$m5|LLbef?YELkr^Bw}Xs=`w5I4hCRuwnO(tTay=o^83-AmjM5a zk-WlA6J)oN^2C84;=VmRE-&gut+I-_78^KUD@9mPv~T!mw4w~$&)l8BZ!4f5b6{{^ z2_6r5!BB)_l$M~gBiEY6m{4JA+N=;mT|a}PdK6YnmGICjH;y-OOO>RbC)a7;@rF|qO-HId z@TByUGyP?hJMghl2iiGsY}!n&t)kGV+7e4`pco>Ak-ytmk7Vn=Av@1<-Yh9C2;K5z zZzT^@+0oTHj5erL-tKvj#|+hG`V1vsv2uwt0S1~JZS1g(^{w;l_UkvHhJx& zyOwH;dj- zN&EE$CPtjP=WQE)YR@EFSVgjW35)LYPM7BSqwjJrhR!?gKwhKD%LUo2^v~ObfC^gT z+HkC8Omz~2@5GCWtO}T#%)+B1>l7NqlDSUoxEg6w_dBa4 zh()T(mGTEND(JX5_V|jDyvtC7`Ueu$J6rLB^o74^OG>l9mk)loQ*ZY3g z-HApyQRS55E8dm}Kui+g5x1v)&jK5A#mf7TVO1|baJ>%`D?rUujSH=aMi-1|*&|}- z=hSHhZkq43Xc5!sTnA~DVl$k_{UovaG2<(~WfjMik2Q|vx7W{Zt{+!=vlt|adswc)domyw~G4mjkcBLX$< z%Yxs2)uRDmX7FeozD{;O!d7j66L@RNL0PXK{vQ<-3lj-gQJEi7ev&y71t=Ub*Tnjm zP=|0Qlq;31OHn1vLTTHH|b?YBrDz!zU7EkqtKb82A&&$U{UJa{L2NfIzcnJN$gIXE+3171P zQt$pa(-Uc|i?lE7QiR$MH7D5X8RpO4#6_hN6>mS#S^ngHaqHm@ZVZwO@^pWW29#j7 zoX+T*D~r0{Pyy{KAy&Jk=PZ=<%t3r+5MT;jLLU8)8};o0^@8oIW!FM`s~tgdw4>;b zi)W`+uU+-`TgejWuMF@5McxLB$wh_^9jdVwfFZIr@Dy=Nq4H0OoREukSPIF+~t&!Uh5Iim@dkm9>WdK-wE>7jOtb0%!#5b;)3=LDr-9aT0+8CODz)O zA0M2Uup&iW-q-W{&249-fs9$hpqoB}By>}Z$FhT^o3!AD6Q$vYnF&Alef^}lG4io` zc^-7#V=?ZBw))gvnC?iLaRD^{N$GF~ky6$boQ#1vMF_R# zaMkp6+c#7@woC^eFMj8JH8|w78z#SuBnYr?I?kA2Ngx5WpqY@2kgnWfi7*y-jkaPh zAk7&TWG9SRc8Ec9j9kbW^9iR3_+?A%d;~gTFZ5r#phkO6L@bey0`pajJdoU>Ma3S- zm3mQ4Q+?0#^9k$e{r8<{){#y$b`{p@pyP@t>3ssT+#g7VE3)1j=gFt>q7O?9U@X zOAnN9BSHbf>O(24=}3+iCvxc{8Tf+Nae3(u@$zy z$sn}-&Z#>%zGd@mX{6hI+F286S|dDv{+k^Eds^hvV8e(YA@j6N}< zzGQz83!!_-rJ|ks;(f7V@}xU^x+9jNtflZCqom0$iB(LT#AWF7884#c!)CX;^@?eO z+=p-QxTQ}cMlXJ3m$Z3@P1#klpr_H5G3ijek!pec8v3sc@|GoOHi# z=vtg8fe@Us&` z-$0+uE$+YR*W>Zms7`WyGJzaxniD@T`x*MIJ~ zT9VRBZh5Bq2`>hs`|jgomQHgI3r~NkX9+8^@+Yr==k-AtGIR%$>R@h`xQeHAKqt9i z247wqQ>zKamaj$HI%1GafYcbP%_&g##xNh zU&z?)#{OsU1@j{vRsuWRyYTJH3cG}ye&3V$o)n4unixV79SdMavBI#sjU6&-u^m`I zP5REe@6X!vo-%Q0AinNP#Mv7;I8O#P6ldy$w1`-m={HL^i-|zBagkY@Zu!n?^E$)e z(4=}L=6$!Gf3$dC&yz3M;(V4m!ec$!8D3ASgW{Tg?KodvD~k5%F3HgX;1;!QI)HZN zlmc4h*Vo0Xu~Lk{3b|u<5`X{4J&7h^OtnwOANVR`sb>kg+XemJZ**DYwVcNKvi01Y zq!iU1ZgzlPVKUr7HXNp>#TO$2B;HoOvZms%{5nZivy5Q0Cje$6quik{gL7P`$qF>@ zxintMJ%a`&=#q|yiM7Ox!%S#|o>V7f;AP1G9Tod3Yh;mQa9g>!mM3!&Md`jC!jxFpGbM%`4=v4`P@_32!|Kk5@Frl zr}9i-+32BgpIy5-^G2KX5^|m!2CeA{tkAdaZt5q7PmdjDOX5JovKDlf>jaayk!**f znfAXUFT9dk#fahqkBQx{w0CUk(7Vn0G zx9u@g;F!+Jw?4JQvoqVHIDF(Q3j>~0dlRRXwGkH2(_1o{xP0<#fWTX#E2Qm)fKK1i zw`^clA~ZEZIH{7YZ9tF5c5x15%kQyk>~r-{ey$4eIL-XY1pHvk-vDTGNPb2-pxF0w zggWOO;&_1ZyH0M5x$=Y7PU7@4N9>9?aX5o^qc6qNjR)_gNWF!3f^r+b!TwEenJeot zi?`sfC{Dz%ud>n)BmxA=k!>jH7Km^35;1iGj^(*2Q5*glvStNiX^E1RFIPpt!k(NN zb-th_DWPv(r@G7*&=M`qsTz&nu&@*E8BDT9yuKq#0A5`_->;?^_R2QA$1eL=H#5wt z4l0)p7Ca<<$v`6iwFtB&-7d@Z-~;$0d8pNVFzuL6BdX{)EPp(ZAPru4qHx6!z#Eo_ z=k25}T-Aa!Lax48ytL`~_dx3Tf|Wgl-6pC@6{^6CoM*)$AcZ7 zM1}86Ml3kAtThr5K-bx6S?NO7Q2P&^vTwR38364oR$AN{a#i{@2ghSj)=^Gnvh4_Z|B`N8`@N zB|MwZup5tGBklWnkjyr;kEm_vxf^OqWA_qL2(_u)#@7Ux@s{40Xr`8E)zvs`hWyT# z^}R>Lp>R=QHmJ4ZwbhRI4_r-cPg?j0u-S0z6R;Q0E?YIna;3!|*Q_1GF?Qs9eHd|i zW>8NKb@kQb{18KhAF^Tks5&{8uA&!<=DD$+Avyd`!V=crOK;P6N^v=nrc& zzn1`AGkmvVh!~4sV^KKI@EGW!aefrVq}uy6y)O6aT;~Tq=8_K)|EvP9#G5~w9!9+6 z5k3)eF~5v2ktHGjteLTUNcXJ86@}NCfONKk^BSH)+j-I=!s@3&A zyp58MCf1+R7OZ?ufpu@v%G@H?aS9j3>8p{)&#mQxe5-|2x=V@r-?<}pk_{#|6jAsy z8(=GN`C+uvga}ebfrc8_OU{UtASjV(t9z5_Sg_u6;s7aZGi3Y5N1{0Lf}-P-TV&`Z zSL7;r^t#7!U%szi*wkEs@C%-TUd^;c>Q4)948Sy&?&Nm$!ctv{7mwyYfxaVmzBBx6 zUopx0*5d7!L^A1rK+#{H^L0o&{veSlr|0W86cvtjexs-`yvnC|X*ui#;}eP~fzsqA zEHjnHP3`wt?Bf++SgWFr1wY)iw)V++@wfEXQlMT4IDn>S^UJ$k;!DY}%h)K3)04Xz z#dF*BH$vD0i&7HQ{y*NJ`|jX`PoRx*oB^;+l3ot$Nce3jzOC0A#4Tuox)GbYJJul7 z$MH-jzWnY&xeZ0&ElIbkJ6$skY3#r>x?@*_lFU>g2)~PAd}x*Rp0O-nOS*npirRhF z8tZbYJOuhewp=zc6n=z$_q6HJJ@H(`$m83brsiR`db~>24DR6%#swLI3^=Hxmk++h z@p=+6cn{&wM2?}%DwI*#hwr1}exbPN$o8w~o#kN$>mtUaQX;$2CplkyB>o@zVkX^+ z?yyg1e2Zn(1F>r&)`KCFWYf)R84R^t-;|`zp%5mQECr4I zD@7#UWDW3<1jtJ;#FT9K%Ri)>#=*@99O!NzvuE!+k1X3kS~-oM##3Y-Mk5`xN3G`!BM!CZjfoJF=m_xqye3wzR1&!FP4^S6C_I41FbznlFWM>Jh@<5c!C}40tc_K+YGf?_>?vO0(GLCIY*`NT`z(Sn(-__GXWNg zspM&&z?XocYu?+Mz?-D2RS>i6i}6mITsF}!-egWGr@ThJezOLApajcRcZ#JSTvQ1x z)4k8)s?Kw?y&5jQf>~x8H^?qyu5N9+-lM!f<;B>p>})AZGzN)Y9{CBP!-G%8=9|ZEPj3{f>ae{>ocT)KlnkhntHghDkoCHnGx> zB|R>rBxul$S(1Kw3zRQ&Gl@MvsAq9Qigqy82=1pPd~5il(XQW!mx7+=C$SKDkX6<2 zCCQgXl*Ig{yEdAO5)s0ptsQKcWWNr+BWY11Q&6Hq{hwkSup=CvrN%cou8Cgmz9(>o z_Z1e?Bh<`=-nfwnrb5yBmN!^XAyuN*=Q4;~r|;X0VmrT)z1B>T4zDqb)+(yg9E@Cq z))Ly_xm3egMg!$5B2Y5`V+Le^$obDkM7dZN^pT}At~kv-^)Qag73vU$gO6kBHl8vG zUbsp|q9Us*!I~a83v4VI@`bz^%rmy7SVx8l(erlBB$8akUFEIUWbl3{bW2J&!>vhf zu?l4fWXQJpaz>2fviKPYtwoK;J=ydpK@J3((L}ud8yzP_s}9{fozUuqW8D|mqh?SF z4h?)M5B4trONcByy6B%If^>Tw|2U)^P#5Bs9#qn?3HF#|oFMFWt{(B)ht+-+m2RtK zji~h&@rx*d4_k@Vg$c^l{EPM$AR+nz94}Vn?+{IUl-2hNE0>1u_yf|a2^=W$bpUAR zfr8Kv!Cg6`5D%Rk--*N~xWCG!Nco6&W6)AlU(co-sa&xT)-5z0=>R&tth^&T70HEC zp*&$xQkcX-R8Bz><~PD`$4XFG||9{kN6A zl=@r%4sT{t}l_ZTI3bDMp8uo^m|=mL&f5W%{pg0k={e zuxX))<070=nG$DOmu>tZhhTDt#VAbfwyacpUO{PbP*9zXCqjiDNTnV;1~|k~3f9?2 z2mWO(urlqeH{^oDjlB517@W;~E%nr{A?`W|4SEVKm(+mjscV|$tjf)$*?HoBYvl01l2@R02 zh`15Kf1qO<{Tb9dR>qpY@mK3g2N$$eP`Us++&*m;qwSBrN3j;n3zy}%4#Q(l>C57e ziEO+1+Nk(;+%L`vJ2e{B^N<^n7v0)|NiBXTSALvMJAu52PbkvXk5y25zzF|@$$vpC zn{~4<0_se=jh%=1hDDWdB$YfCZ;2)7cgoC|4imY$`j=f$h=~>`oV&y@aFnBG zaY!1dKAH`dqdUgx=>P1xHX70;%aZu5{*P;~pVXwBI!&Fj3H!AQDwg3f@Ij+aASp#N z6>lSN8c92@t#wapj*BGu@tPOfGt{nCb(R1!c9!dwG7owmg-2{py}VmVPR=crHk(j>a) z4#*oW*mU4U^Cafy2OcrL)Dx2ST)iwdrWNBCES^H9faen7MXxsO`5Cu9k9i?KdPPZr zfw|#6EMZ9JoZOL(dPp?*dptmNoz51JBIZNf>B6Udux6j?F;hcpJRo1p|_U9tI3%6?C%=^g}mnxArAIt~;8=qbCCu!gC0<>zK(VcXltM z$aUFc#M`~kIEq-+RelU4UEoG?cvqpg6|mW!WHfFDlkMB*(wF{Oc;*91tual6xqk4Ye?Xf*FoS>YerZRN~d&cQOk z39Q5Ne-uddRwbum) z8`Vw%N#2&Iz$fEo(2Q~f@>Q#x_=cy0C+!9OZFZ?jtWv`v!Qf7q!ofjGrLw81bn!V z)eO;CSTh*?Y|9e!4BNkOT+5Eb8aMahQyN-h+tH#k?a?_t=F}K*(QUAjsT2ZZ`HTI5K#s5I=%xvRyFN@FHYp)(N@wmG9KC4G1WlsZv42ZLp;6Pzn zQCF!OK2Bf}__&C3d3rnzqbi*o0tjMF3QZK1VF}kaoc}sca!qpXzauuyKI{x$>tP3d z+v5GJ7^Z%WT4$B_ouK!Oowz9)W}@x7PYve(BlDxApg`y^oHj>y+I{y%RTPgi9*cx* z#Xqw>t)Ok7k6JI}mT7AX05qzg@=F$ouK82_`nMwV<+-G2O?c^}Wf4-XzMKgaN1eGL zUBu}prT9aM=U#(ddTl$;i`V#PIpPK1K1R8qnBY2)-}AxAXVIb^5EPc6!ZJxLBq%pP z+;-v*p3|QT?9ddZ$Qsk#AyXN~U&{Op$;Z#bF=9gUE?26cc}hL~>oW&`6!iei*{N?t zp@~xIO$!nZz5Z%+xD`dZkyLrV|0Mm5ArWG$A!*2^%o>`f6(@*GXE=Al+lERJ;`3w4 zHL$aKX`W#6giLlaEG4NRxxQIa*8iofuuvPBhnO8Ja(wi&mc+p?9lM2j+1c%PE?zN= zRdg_%Yq?Zwx*y7HRq{< zj$A7%-dKQZ-CH<3PI$I%95LHjWXsvZ8K{l32S{t5y6bT<2ap zR@1hhe;anxhnjP&m(aQ((u27cs%{IOJ@q7P&QObKY{Ne&*NHQEal`6S43>6JKO$qr zoW_n7fr=-)iZ5$VqrYWV3>r$=&fPJs*goC{N3*jjAcRR?MwXB@On(qoX%s!*l1jMl zIi%C7j?DN=e13uCpHYaHmD1=R<>R0vB?4gu?$sJT1lO)9rxCg6SvuZ({3|s&i02z> zSmT()&F;^4M0>=BYw^$|gb+SE&H~}$sfIKHlfkQ7J^DjAi0`zb)E%%!l_33;62iC2 zdKH&*E8K#Q7r}}vvdx}l>V#DKTc3l+efyg2;(b~jgG9-j69`=kBt0{o+H1{VAjHqAVnbUUzbCr(S6_FzsO^$M55lxA)p~&k zETD~hMITWQ@INs4ULXSf)qm;Jf;rO=@-gtyo{1M7%86p92}~?9?D!J)PS3(%j8OJ# z+KK_F)x`GEj%*!tH1RQ-7e){|mA#%(xFrz+9m6?(=NNFgcF*9T9mzoTxb%aU#uRq; zTlPx#v#h^BD*$Uh@?$*Cyx64Mch6ToQcw9_Jp;PXtrksZ3Ti)>C3(J_fpGG*D+{pG zrkwdW*$U4^pW_W1Yfa!>Q=-CuR;4}sc{%q$d^^BEY`9&I93?5sDBv#%r=!tADS%6q zj~cmW=m}#1f~F7H*NLbEQI@oBbfs4qqfIyOc8Tm9X%inK-@?vyjBR&euT$3E?r}H7 z`izMareD6s`70A|h=ri)xL3a0#1+iWL2+*^FbNUfJOy!mj{)y4LDgYnX+9Ui!Xk5{ftdD;4-sj%P&qiOh6-%-oJG7*ntp|KO~%SeWY^9?1W=+%Hq z8saR)s;ZsiaT$ATFxZCyhW*s_l?UqrW<)9WU$s#92y|Iu#UX9x3_sFvyTYSrr}AK7 z74>`f7IG=t8-LX}bBA=96_eYAZ9sNsA~mfM&bL}_l(=S(M_cz%S+u(802=NQ4QbXa ziiivRe@2uC9Y*}5n+|Fj4r*`Tbc0G9B&b?5{$}dNx2l9YR^V;y!sheTYzya8yyS7x z)`q3WKh?+3`7JXW=p`bR)l(RSZyc7>m|U|O!|HWla91(@PT#!9)w zqmooRh!Gg4>mA0=DNRTXvm}kG@FU4h-SOw*A}+Pl+$}DWZ4|G~P)DW&>>oLMiXc1BZ;SJCiF*cPZQnA5~B=nfqQGe7tCu_0IwebmvNT@csy=3!kQ|3(uC(I(ws%?x43B2)hu`|U)C zdiKE5)4?mVgGHsOgSU`B`jMyTg>Q?c_=*&d-6?|3BW{F8M>%+4 zYK9Eu+$jAf0BUVK?G3I%GEWy^{#_m_^XJz270$K84vrqUEQHrTEb~t&7r3)IG%j(Uj4A!9OMPQ-8U*|95Ufg-uy*-V zA&pOR-jincY=j?Gvw;!`DBQCl^hvnq?7=^EMI=gjLyFM)b~}b@BV>U+aoUEgqrCJPSB*^9?8)8 zH$FoJ?;||9K`YrB-s=Cy$Dtb!$at6gXft9Kuc!o7Q*l(B+%d9YRo4;{aht52nW38t zpegIVnKI~|GA0Pxg|!xK*GH~UmAjoy20?c}#h{L*Eupz>XkLOAA7MmW{!4mbo&tiB z|B`5je%ef$JG^=Q+m_$gYDwvZuLvSo^=MKQ0P7b0t-BnF3G;2&-_z8ZvR$%q$f}i> zM0;AE|5|#9!;AH5TpCYrO=TSirsxgUD&zElW?M#r->re=|fNItN-S8=P8*(DJGJO zwD|CF8)oPra1EAY2;lgFMIVkIa){nZGrEvej&t$;*j|c5Rybx{k5p5di9SglVgLH0 zggkr2i)b+kuPaif-HJEF@Uvq{MTPqFuHk~>Pa5sFep(vs4VkRmH@R$mwyI;qajMv( zmm?LT-}&$#@2LleO8e_NQ(MjXb1@;`#p+|b?3=8&d=m8Ipd|h3je-jd^vxgGt3S%o zI!FnNj7=e1v~t|JUT3vH`W|cDQdi<0wABz2F&xfIlcJS*F}>((-!!idh(#fJOE9=!;!PKacMYy zI2YFDOn#ZW^o@+{upiIp&%0s?P$%(dj=*ZavRvU)_#nnNSHHRQ4QvpDr*9X5eKw(CFp#B2Y05v zjTI5YwTGIMKxk@1tp8@)$Pa=V?>$PN&9XBctyh9*(nPu4_Q&0RfYpN2+YN`=2SZ=q z-(&ZIR5B?^09!%RaJO^1PZjj){}2PU^Z(U8fN^CHUfZrOzIdXCDt-C4GDXz&XDpcS zBKovKX=oxE4jMm|M8%? z0fJjt<^P)Te?I;X1^$Nu{~uC7B%|9WKPQlLA5X3B3i>Uu;9WK2zg0N@Q|(;KeEd`; zFrzrp*jP*Y@0VrF@DW1&#orUmPMvQAl(iqz)>f>abX5csJVu$mgz#oYoEuRB!fl zTA`S8gH%HpRia;^MUu|QsHd?DlI9y-y_1f-Qi>yuMlQ{j(`Uh8exWEK1vDioCV z4Ls{GhYIvMgpJwOTjUg!)KIu!fB7SOMSq}{ZQI_Z&b)TlF@3MKG^O(m^pdD{CWB@F z(%N;)Hakx~B~BWHW0p-30ANJ>`(FT(W9J!zb*_%{gM@QdnQ4C&8_HiZ)q}^Ksr-XG zeh6tci_=vr7?}{koM@4C(Xr4@axKc*g&QyuWHHQC-I)VK;EhI1D?**VO4?#4gt~c% zH&+58x1}V*Ki4W%^yQ>C6vavhjGXbLZgFrs$)!(afwDE@N z`}V%(i*z}lsFXlm24`P_dU~9UO?qNe@`H?b7NrjE%4*(F&m)up8VvrPjjC(;E#_|` z-~mV-)2RP4?Z0udWzAt{W@pH@ghcSAv=>S&%2OW6w1lUVBjr6gu7p)wpNzcRXBK_g zH)LKBRgaTZjmO*-e~$X^0l}h&FoACY9j6MXureo>Ow}3x%cdsYC-?OVjmdX+QTNwA zo(IzlgS~DY5{|eNDR)|hQ7f+(f;NSfHX0Z<-UdGRugR_*X~}(lE`Is#IO9+D91;QO zo_6jetZD2q{8F?cLS>4|b{^VO-O-(nIHq{OOZDA_@t5uBp-yTuNR2dL0;YSG_7IJQ%tDU{v4V}_DYZ;5zTNH$Pm*hftw=d3#e)7sO#({qK;fjhx-HCDwz+hCPZVuani z=X9}I%$Ko(2r@_KKi`b@7m=5uaVN}+-p1JnouC_eXWDOz8EWhA8odiA0xj=@g#DK) z#RJFXH@_UF**(3=Qxks#Tw4o&4d^R7m|lQL4u^QIq7eUQ)McsDK#$4qKHq-sC$H`^GCE!y>JE@&~l z^kZXwEl!tvy}!B(&?vF=6cSRIP#HT6OL%b&N5v4NHGh1*wUdXcZdKVTaw-4>O0gX1pY!awiWedG!e;@&%zaE)REV0<* z8Lr7hmh9&et4A1{DSY2Xmp-n9U*kx}f?XWD!m|ByU-ldH)bH6`ZrWMTsw+sp99*kl z*ybqvbXTRa-S#YpclSK5+*@e)-QH@cCc#gUw6RqqPm^!At8kxI<+iBQPF}UxK5q(_ z{(1~vpY^RJ+i%pOL>$(^E6-$bM>6YIyJCH|#-lFc-ks#cvPr|c|&b&vP7-!o>B%VTnh{7(ET71ruG>0S$`{Qb+ zkQ`g=XM>kxOJ>3F2oo$mCNTJSWYearqKFD1p&@KTlqthU!n4veVkZ$0R?&5Ld^bJJBgSsEd9z$#2}<40|15 z5%PJ_Wf*`1Uk*){_2WJ??d=zX6^4k0tQfoqBh<*kbv0 zz`Mlz{#R9B9u8I9zONJ!6AEEumvzX_2$_WJky3WXQW#5QY}qpQeIMH(k)6qsK|+|Z zM%l8CX%w<#-{w2&_x|4Ny{^w6GuJsY^PJB)=X0Lte(w98cHt`1H|r^pbK4F2oPa^G z+EHgwAPRU<+>36%8PAjhEvQ#NQfH$Ce{X@nXVYp)g1_FL_?%1jr_p?j+mC*@H*1+h z%4G1o(j)JlBHQArryX=$kwa&}ti+o7{OE1y;1#IjKG$${K~)a? zV&lTw0G!m&mi_MASI`x!)Rc(F?2|yk=`%Aqv)G;pdEZC;(hp{H@y`-{2FeX1xk=3_nLh?vRWDz$QhZzk|^93lA;4&|AE7aNGdps~UA(&^%r)hmYOPP1}3=B-N;Fx=2^ zN_P$VIT^sF7wL&X$xeQtfVbK6T$egn$mq6j1HPG9VEu_ROnEiEt!Zs{b9r?bZTDFw z-kt1s!J1cTVcc9X$ld=+;+R+dt3-CtRhP|UDH?DLQ7vZ56*NQ$znTS(|e$y_C1wL z6V|QN3D0Nt1M~>-?&e=~AyNNhL+9r8cUQFJlaNZH_mye%EXR;@u(am{#Cd;5vf0O} zOuEljG-uWV5skqL{AujlnCb7|R#kL)BuOejh%WZ4lFhe(fEUZkwWAnYl3EU#J6? zlR|gM!PTz;SLTtN`yq<@@g6lRp9-1*j!{?ucUfnWmvx%>*?FkFqcwlETe$ARr znaY{TKO#(8Tu{*LXIqtM6;WXYs%>Sja%tdM*g9s;@uGd!TL3~ph?(3%Vj#D&x73X5sBk;spC_9Xdpef-U}A>F`kVC zCXk7pz$~96kcm`xc81#r#av9dUWUNKw@Qb2wXiE$ocFdKzah*yEd<<1J?I(<${wmf z$A~Z1k~#bcdV#bvD4xu!A!kBXoh0s|TdS1*B@)?Jk}?|mix>yR*?d6Hu6gus?LMKl}ekXLG1N6pUvJx`86yfM069o9x z`tz}TB|Dg2t|-shJL4{I!W)AHJ+^#b6H{M0#TG-8ba|8Oq1#b(aI-#nD3qliFQx9| z7fn0VSivz<@f?y~%~~%dy)`?~>owr%8Wz(&Y3S^Lh<;fVKeU471KiS$Rf>5shOcXE zf0`<4yfjq0x?&y)KgOZWyfx0sQO^raVqI_D4`Q|5Hg6uHt#gN?^qK&sp6Fj{O08)2xI%qn{SjE3)(sL{B|PJn$=SC?0Gj|cx)hWTX( zm%rpLqf3qSd)qrtSMR=|aSS!Lw=g*rPNlrmfSqiwsOtssqq~v)r0dV1794aB+YEl_ zzKPjaXafZWe)X7Hkz+WwLF3`vBe$OU_Nxac+M1Y^o3DIumG<}owkhy3Semr$YO?ze zB#6GcF|*LievbX2vT`~5UO{SEXNN@VAMpzJ^dXYM=V0Di0{atVOEZ$4 zG@ULoDWiXJE7L1EN^e~eXLMH=SEdq@6ccmZQw-c{*J{WKiTKlGr>i1gBOOiz;$rS>uvjPhN#$+CG4iww?5wBo(24|DuI z*7hq!zr`lupu>|bs$ZCAmwOH44aRYY`jRHbb$?MXHLdd+?F?JCw`L??n;Ycr5FVgA zJ9cvuN3=lYQtxYx;u2(M3N{5d4<%=hzSrbP&f#6#Z>+D*Wjgq-if8J4@KFwQ6NJ97 zLPErH{ma|J?yq6a zzodun0-ZwvGPL(ompn_P8Z2ORDLp;j)#_Ojf#YlI)7IUq-I%sO)7plXaB1g|`w&B) z_49=KXtxAiOT?Lw>g-D6kwg&{bmd8HCo+iK_rGVn@19&xkn{$ehGV9)DYcGaPxN<$ zu@H6vtT1H-?!iJ6gH-Z*6XWwJo^mDry| z8PV|Wlm4zfKzxys@y5$i*23tuez}bDJnEgD+a2pDFI0?La~GN3cNWO7?fEv)Mw~7_Ar{`?rgv*RkmOT_bqrzmg(RlcGwpS zxlPcrXz(>~eR1(5|3Th@7Pv~zqd4u>Z6J50i`SDa^XmxR61`dT-mO=lotBZhis-Ll z6x9-8K=syh-VI)*<5-cS&Vx^Rl=HHO0n5(}NB5E)Go=1ya@M@_{@Q7M2{^5mK2xdN z<oQEgX|l_IZ0St~rHQRHP5qF-9$m}N70KWb z7`%W<|5AX~ce!6{sjUJwY>gE~30yD-PlD=s#htAijV-V4Ub^;C!usX7S_9PdZzHOJ z;@li3Kp*-xYUM$;%+o%!VhWAkx&>_BY0;$AIjWZ{DdU^zsOLLP!_y!q-Gwi-tP2Ax z>L6yXUv%niE9<)UCrx(o(_^csvNE?QQAHDuFeR&!F)xcB1SMQDQ)se?`p2Cq*+&|S zjWl+)*OXX)`Iod#?xB3~O2Z>Anj|sa&(kYQhG9sa3F_?-+ zM)RH!o}e<4Op?6(tVD{|b4=XrmOJeUayej(kK$Ng`KL*)N6nq@3sJL%5WkV7i-tCN zg=vPTY>?axx}Weq9FbWlsa@$8=uu%1_@N$${iUyMtU!8+2tS^xmGZVYyHRHI^bKP{{ol{byOT=3H z&0kzDLPs=T>UXpF`+fbv^*niKyA)mgJ)L-!L#EkMXfB5~X<)tH4Wqy}Y~IjIQr#PF zf&fpDH>R6k7qt!KKg{!qHKyy*tXCLX}%pY#XSEVk$Th6eToiDN!! zU;YC2;ftH67k2qRI6ZIy}9kN zL7Q{hKP+BIoqMHZai2Fr4ChG}j}4)qEkhS+ZGr%*2Ly z;p{=9wsqRzk9PgY^hkWp-xSIXTk|}nJ5ZZT&GoS$?)dY(wU4Ghy1k{tA!}*$!D2JD zbIE>vfQNxma@fF;joCfvt#{uRGFAD$p(|*T_8G?_jOzN~&QvVa^=5eQu{pF;%M9&X;r8K<+v^#s2#VvX^EqWm(=Ovtn!eEoackD}$ik*PxFG|11Qr51b}7r*`s_)B0^IhpxQ(YYnbWAkC_j zZ1(UM;~cjJ?;qq4xL^8AA?;wNB*vx6%kM4PMgg4h+ETtU_1Z_TF8yLjD>vjBFtuc- zhD7_g>(*|vZ)o2Ga`C|~q#C@gOXD+=tfuGLQVpL|bP3ZHZcMTJzE?8wAo*X|H0R@- zUyf#6gc%0V?m1-!tP%PcGp=IXHaZC^IWzNO_nrVT!q4CIUnDwvOqSFJnr9P4%tJkI zj(xML1H#(+#`(!*0PN!iogkj=Cx(qgl^r|&am`Zw#lEb>!6jPV%9$pP%s(fP#j&1jYMq zmAsua9U(RLZ$544?C|x4q`gcT1Ag}O?et#NnEoO(WY#QZ2F!}916Ak1h55dBJ^y9x z*e%Vn^8@P+Bo{UHx6jh=EG6LTvBOs!)}O-r$ZJCkJ#4 zzX)&Ne)sx8vpnBQ$#Sy#x2?;6eq>qsI9myoAT4Kw+{0_Wej0fOs?~~_k&BaOY=PW- z{ewpHp*$kJS=iAWXhWpGpPo4}-B51yn_0o^CXVdmW;Dx!9ds$O9C-xNTVSeJZU$bA zo^k`dV)Vs&?!YN=93!_; zEq6wjg!r>pCyTA34mFg18#{mVA2jm@5d! zmW~=OtUD(nGcA0m>L~v*0uZ)U|G5KUfdUR63Y+{prPkUXOFBD{b6>^R-Y~Bqx+;wP zgR;cZOl$3@CQGfBE=Lf`d3(4Md25ka&(__GhC!QE6r2}0DaeF7j@{7$7({O;rWkew z3|}no9En`vQCNzbs~es~AKaZ8YSSlhbLO;nhSYUxB{0wP-JCXV~tw$H;yZ1$x z9RM{7&-|rjRze4j&W)M3Uu z+6#LE9?~pK@byi3Nxdz$+%arX0EDViVqXNY;0~Kv<+kD?KfxaH5rM zWu15OHPuIdH5JE&R6a$q-WhsrNf}6i4dG_>AMn=oj}(0EI=vp@;2%d~vGBJ-yHkNl6$q2n+BXpQNX~)Nxbp))Ubjk zLN@qX0NL9~;k<{8?Vp3}uM6|RcS^IEtz3OC-b#jN0~5NWf&jYG(dg6RI`am(34NU# zTv6+H$WDSLk2vFH7Vn?I2InZx&2GH4FeHU-Cq*cy6NBjDmpv7ASYL=QbD@szGzUZ% zbvpYXM}6^E{Y)43SUe;Js|G3ySECEEvmS1eWG@Go>mb64j&gEEMaQkNQaJ6X zGg|wy@R)v$z5`6QUF}*O3uNehIl1djWozb?T@l^|LshBVfXuWxN3@Kx^sKNNJ~}!M z5Fc~J{UBHg1VFn>$sj@CKajxfdKdNsIeI69`8Y#a;7M^cOoeV4%*2=?eCCXDwzbm zkz)*idh?VCJEUzi&4a6#uBDODQ=nY^5_9&4q8RULb7>OL9mLJrT3Ntm1!1^^ z9F3fh)1x}9rB=CA*07#y7*Xnm|x6&vbIrB9nTedxa2myz`chen|!OBu)gnx z>T;;-)zL~}#oqYQ?%b4PGtegP1+VNBjO@<6nb6Z=kM622i1SmiBp1+pl`?0wm?VSb zj7hAn@X!ehFwN79*PZoOWvZ?8y)VV?pO8yviL4&m3-cpA6*UicZ)G|IT}GQ49ov}J zJX$*@C{(ILraqgYftm%4pb?PqSH;8y|A-o`>EB@_xNZlb7+FORKm9wIh@6Fjp)8by z`k%$d0b;ZHU57h1D$xFKYk*GiMUdx61n>_EdE{6ce?f~=}ARxk)MlbMB0V?W;C zKc)N?Kj8GJD6q}aDoi3iIe}W7kK8bnT}U0;CMMHo0AMVLk(%afGO6yAYbu804UXMz zSbvVwCjGpA^KNfb=ggwcVNO7VEakRCY1GrC?Ltn#A#rnpU5VvMSH^QQkQJdh<9!sz zWZ>y}@0||6o-dI_Igx(QfVt>H{L{l9v60ng7OWEiB*fTYF(c;Vyel~+6 z`fl3vy!hJXHR)mlW?828!F>T7qxzulej5#J!;h%}9%f=+b|IRhd#41+86P1Q|H?OI zBywbDM_^(Cz`hbdAcl+es9D$zmhW9~sZSYVT&ddv(k&h6;JAJ)E0FuusM=8s0KLj2 znun|U6yNjN(8p)=db*zfIiHtdQ#sVCkY<=pSI3`lVng{hq?`PJn8Lg~M;t8@18YIS zH^`QP!BvIi+1}wZ^a*bc5(lwvTAL8HT-h7{B+g!RrM3Y#tN6o(_KKT)ihc2$Lfb6M z0*14x2iIncH!ruBR=lPq-8admUT-Nts-MvzG=*N!Z|4t-qO9)N;V!uM#`D6qBh*KIu}{=n#u-`~IVn8*PfWD+_~suSrxYB z^=ZN-{B$TBfEIQU@?!Y1F1a)*C=nUjGtHte+%vT4V0eS?=>j))2o(c?AD4v{Ead~k z_Zghgw^sYegpexGtl{NwDZo<9yvoVcoRO+&B93dghV=5PF7xX&xxD$m?_fJ0iwPmFvcrHs!s;r4lTZ%T&^BrU z{U^KOwx`MoC^Zi|C5$)9OR%0u?qK$P+LS$nC*parR7Tiy0rd*ba%Pa zDB3ZKBHa>;#fLYV4d~UPTFF}cBxM%4_3>4xHqF@`_MdU*QY=^3Gk^9I%52#pl=Drt zFK6ww|7`vDqWMCJTGV)fa}hCTXZ}Sqzsk7*_bx|J!hd%&j0|>A*z)k0S+hdbUyr^K z_oC8Y{-e=9Wae)jO1?c_D20Sj3}4KZJr|l=ODOohpCeb_ftD{gK_!1<7^c&F-h?av zo9zGnz~ig)f2P3}`&uK$A9yecAyjp9R)CiEDsZl;2S()m-}RexXtl_ZX1rI72*4zY0K3w7~OL> zkGLAF`lFFHwVoxcVDyD37vc!C?Y-LRk51iU0qjR62x2ZMUnd$gBxvKKp|ZZ4LdqRqEEE F{|CI7K7#-N literal 0 HcmV?d00001 diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 37801e42b3eab..c57454107ee0e 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -9165,6 +9165,11 @@ react-icons@4.3.1: resolved "https://registry.yarnpkg.com/react-icons/-/react-icons-4.3.1.tgz#2fa92aebbbc71f43d2db2ed1aed07361124e91ca" integrity sha512-cB10MXLTs3gVuXimblAdI71jrJx8njrJZmNMEMC+sQu5B/BIOmlsAjskdqpn81y8UBVEGuHODd7/ci5DvoSzTQ== +react-intersection-observer@^9.5.3: + version "9.5.3" + resolved "https://registry.yarnpkg.com/react-intersection-observer/-/react-intersection-observer-9.5.3.tgz#f47a31ed3a0359cbbfdb91a53d7470ac2ab7b3c7" + integrity sha512-NJzagSdUPS5rPhaLsHXYeJbsvdpbJwL6yCHtMk91hc0ufQ2BnXis+0QQ9NBh6n9n+Q3OyjR6OQLShYbaNBkThQ== + react-is@^16.12.0, react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1: version "16.13.1" resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" From b4d70dd13f7db37b201b0fe84dfce8e22fc6d664 Mon Sep 17 00:00:00 2001 From: cburroughs Date: Fri, 2 Feb 2024 17:46:54 -0500 Subject: [PATCH 475/792] chore(ingest/feast): drop redundant typeguard constraint (#9770) --- metadata-ingestion/setup.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index d4e2ada1fc68f..ed3a886cc0bf3 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -311,8 +311,6 @@ "feast": { "feast~=0.35.0", "flask-openid>=1.3.0", - # typeguard 3.x, released on 2023-03-14, seems to cause issues with Feast. - "typeguard<3", }, "glue": aws_common, # hdbcli is supported officially by SAP, sqlalchemy-hana is built on top but not officially supported From eb8017d6eb5d16d646150af3f9696fb1955466f7 Mon Sep 17 00:00:00 2001 From: cburroughs Date: Fri, 2 Feb 2024 17:47:26 -0500 Subject: [PATCH 476/792] feat(ingestion/feast): loosen feast dependency (#9769) --- metadata-ingestion/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index ed3a886cc0bf3..67ea861230fed 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -309,7 +309,7 @@ # https://github.com/elastic/elasticsearch-py/issues/1639#issuecomment-883587433 "elasticsearch": {"elasticsearch==7.13.4"}, "feast": { - "feast~=0.35.0", + "feast>=0.34.0,<1", "flask-openid>=1.3.0", }, "glue": aws_common, From fd34e41614a2920b457dffaa0dccefdad0afe4fc Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Sat, 3 Feb 2024 13:57:13 +0530 Subject: [PATCH 477/792] feat(ci): check for packages, change location of lint (#9759) --- .github/scripts/check_python_package.py | 18 ++++++++++ .github/workflows/docker-unified.yml | 33 ++++++++++++------- .../src/datahub/cli/config_utils.py | 7 ++-- .../tests/dataproduct/queries/__init__.py | 0 4 files changed, 43 insertions(+), 15 deletions(-) create mode 100644 .github/scripts/check_python_package.py create mode 100644 smoke-test/tests/dataproduct/queries/__init__.py diff --git a/.github/scripts/check_python_package.py b/.github/scripts/check_python_package.py new file mode 100644 index 0000000000000..f1f3005691700 --- /dev/null +++ b/.github/scripts/check_python_package.py @@ -0,0 +1,18 @@ +import setuptools + +folders = ["./smoke-test/tests"] + +for folder in folders: + print(f"Checking folder {folder}") + a = [i for i in setuptools.find_packages(folder) if "cypress" not in i] + b = [i for i in setuptools.find_namespace_packages(folder) if "cypress" not in i] + + in_a_not_b = set(a) - set(b) + in_b_not_a = set(b) - set(a) + + assert ( + len(in_a_not_b) == 0 + ), f"Found packages in {folder} that are not in namespace packages: {in_a_not_b}" + assert ( + len(in_b_not_a) == 0 + ), f"Found namespace packages in {folder} that are not in packages: {in_b_not_a}" diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 24490ef83ae5d..ae323181a5215 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -37,8 +37,29 @@ env: DATAHUB_INGESTION_IMAGE: "acryldata/datahub-ingestion" jobs: + check_lint: + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - name: Ensure packages are correct + run: | + python ./.github/scripts/check_python_package.py + - name: Run lint on smoke test + run: | + ./gradlew :smoke-test:lint setup: runs-on: ubuntu-latest + needs: check_lint outputs: tag: ${{ steps.tag.outputs.tag }} slim_tag: ${{ steps.tag.outputs.slim_tag }} @@ -64,18 +85,6 @@ jobs: steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - cache: "pip" - - name: Set up JDK 17 - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: 17 - - name: Run lint on smoke test - run: | - ./gradlew :smoke-test:lint - name: Compute Tag id: tag run: | diff --git a/metadata-ingestion/src/datahub/cli/config_utils.py b/metadata-ingestion/src/datahub/cli/config_utils.py index 4b69aec8081ab..7877a6bf6df59 100644 --- a/metadata-ingestion/src/datahub/cli/config_utils.py +++ b/metadata-ingestion/src/datahub/cli/config_utils.py @@ -1,3 +1,7 @@ +""" +For helper methods to contain manipulation of the config file in local system. +""" + import logging import os import sys @@ -9,9 +13,6 @@ from datahub.cli.env_utils import get_boolean_env_variable -__help__ = ( - "For helper methods to contain manipulation of the config file in local system." -) log = logging.getLogger(__name__) DEFAULT_GMS_HOST = "http://localhost:8080" diff --git a/smoke-test/tests/dataproduct/queries/__init__.py b/smoke-test/tests/dataproduct/queries/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d From f4cc60bcba2c839f57b72f29fa6325f5ed0925de Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Mon, 5 Feb 2024 13:19:25 +0530 Subject: [PATCH 478/792] feat(ui/policies): Add tag filer in policy creation page (#9756) --- .../app/permissions/policy/ManagePolicies.tsx | 1 + .../permissions/policy/PolicyBuilderModal.tsx | 10 +- .../policy/PolicyPrivilegeForm.tsx | 200 +++++++++++++++++- .../src/app/permissions/policy/policyUtils.ts | 4 + .../src/app/permissions/policy/usePolicy.ts | 14 +- 5 files changed, 219 insertions(+), 10 deletions(-) diff --git a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx index 72c22f3bddc2c..5765babcb575e 100644 --- a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx +++ b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx @@ -370,6 +370,7 @@ export const ManagePolicies = () => { {showPolicyBuilderModal && ( ; setPolicy: (policy: Omit) => void; visible: boolean; + focusPolicyUrn: string | undefined; onClose: () => void; onSave: (savePolicy: Omit) => void; }; @@ -39,9 +40,11 @@ const NextButtonContainer = styled.div` * Component used for constructing new policies. The purpose of this flow is to populate or edit a Policy * object through a sequence of steps. */ -export default function PolicyBuilderModal({ policy, setPolicy, visible, onClose, onSave }: Props) { +export default function PolicyBuilderModal({ policy, setPolicy, visible, onClose, onSave, focusPolicyUrn }: Props) { // Step control-flow. const [activeStepIndex, setActiveStepIndex] = useState(0); + const [selectedTags, setSelectedTags] = useState([]); + const [isEditState,setEditState] = useState(true) // Go to next step const next = () => { @@ -90,12 +93,17 @@ export default function PolicyBuilderModal({ policy, setPolicy, visible, onClose title: 'Configure Privileges', content: ( { setPolicy({ ...policy, resources }); }} + setSelectedTags={setSelectedTags} + selectedTags={selectedTags} + setEditState={setEditState} + isEditState={isEditState} privileges={policy.privileges} setPrivileges={(privileges: string[]) => setPolicy({ ...policy, privileges })} /> diff --git a/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx b/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx index ac73a1f5ece7c..7a0de67f41419 100644 --- a/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx +++ b/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx @@ -1,6 +1,6 @@ -import React, { useMemo, useState } from 'react'; +import React, { useEffect, useMemo, useRef, useState } from 'react'; import { Link } from 'react-router-dom'; -import { Form, Select, Tag, Tooltip, Typography } from 'antd'; +import { Form, Select, Tag, Tooltip, Typography, Tag as CustomTag } from 'antd'; import styled from 'styled-components/macro'; import { useEntityRegistry } from '../../useEntityRegistry'; @@ -9,13 +9,14 @@ import { useGetSearchResultsForMultipleLazyQuery, useGetSearchResultsLazyQuery, } from '../../../graphql/search.generated'; -import { ResourceFilter, PolicyType, EntityType, Domain } from '../../../types.generated'; +import { ResourceFilter, PolicyType, EntityType, Domain, Entity } from '../../../types.generated'; import { convertLegacyResourceFilter, createCriterionValue, createCriterionValueWithEntity, EMPTY_POLICY, getFieldValues, + getFieldValuesOfTags, mapResourceTypeToDisplayName, mapResourceTypeToEntityType, mapResourceTypeToPrivileges, @@ -24,20 +25,28 @@ import { import DomainNavigator from '../../domain/nestedDomains/domainNavigator/DomainNavigator'; import { BrowserWrapper } from '../../shared/tags/AddTagsTermsModal'; import ClickOutside from '../../shared/ClickOutside'; +import { TagTermLabel } from '../../shared/tags/TagTermLabel'; +import { ENTER_KEY_CODE } from '../../shared/constants'; +import { useGetRecommendations } from '../../shared/recommendation'; type Props = { policyType: PolicyType; resources?: ResourceFilter; setResources: (resources: ResourceFilter) => void; + selectedTags?: any[]; + setSelectedTags: (data: any) => void; + setEditState: (data: boolean) => void; + isEditState: boolean; privileges: Array; setPrivileges: (newPrivs: Array) => void; + focusPolicyUrn: string | undefined; }; const SearchResultContainer = styled.div` display: flex; justify-content: space-between; align-items: center; - padding: 12px; + padding: 4px; `; const PrivilegesForm = styled(Form)` @@ -46,6 +55,21 @@ const PrivilegesForm = styled(Form)` margin-bottom: 40px; `; +const TagSelect = styled(Select)` + width: 480px; +`; + +const StyleTag = styled(CustomTag)` + margin: 2px; + display: flex; + justify-content: start; + align-items: center; + white-space: nowrap; + opacity: 1; + color: #434343; + line-height: 16px; +`; + /** * Component used to construct the "privileges" and "resources" portion of a DataHub * access Policy. @@ -56,10 +80,21 @@ export default function PolicyPrivilegeForm({ setResources, privileges, setPrivileges, + setSelectedTags, + selectedTags, + setEditState, + isEditState, + focusPolicyUrn, }: Props) { const entityRegistry = useEntityRegistry(); const [domainInputValue, setDomainInputValue] = useState(''); const [isFocusedOnInput, setIsFocusedOnInput] = useState(false); + const [inputValue, setInputValue] = useState(''); + const [tagTermSearch, { data: tagTermSearchData }] = useGetSearchResultsLazyQuery(); + const [recommendedData] = useGetRecommendations([EntityType.Tag]); + const tagSearchResults = tagTermSearchData?.search?.searchResults?.map((searchResult) => searchResult.entity) || []; + + const inputEl = useRef(null); // Configuration used for displaying options const { @@ -295,6 +330,131 @@ export default function PolicyPrivilegeForm({ setDomainInputValue(''); } + function handleBlurTag() { + setInputValue(''); + } + + const renderSearchResultTags = (entity: Entity) => { + const displayName = + entity.type === EntityType.Tag ? (entity as any).name : entityRegistry.getDisplayName(entity.type, entity); + const tagOrTermComponent = ; + return ( + + {tagOrTermComponent} + + ); + }; + const tags = getFieldValues(resources.filter, 'TAG') || []; + const newTag = getFieldValues(resources.filter, 'TAG').map((criterionValue) => { + if (criterionValue?.value) { + return criterionValue?.value; + } + return criterionValue; + }); + + const editTags = getFieldValuesOfTags(resources.filter, 'TAG').map((criterionValue) => { + if (criterionValue?.value) { + return criterionValue?.entity; + } + return criterionValue; + }); + const tagResult = !inputValue || inputValue.length === 0 ? recommendedData : tagSearchResults; + useEffect(() => { + if (focusPolicyUrn && isEditState && setEditState && editTags && newTag) { + setEditState(false); + const filter = resources.filter || { + criteria: [], + }; + setSelectedTags(editTags); + setResources({ + ...resources, + filter: setFieldValues(filter, 'TAG', [...(newTag as any)]), + }); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [focusPolicyUrn, isEditState, setEditState, editTags, newTag]); + + const onSelectValue = (urn: string) => { + const filter = resources.filter || { + criteria: [], + }; + const selectedTagOption = tagResult?.find((tag) => tag.urn === urn); + + setResources({ + ...resources, + filter: setFieldValues(filter, 'TAG', [...tags, urn as any]), + }); + setSelectedTags([...(selectedTags as any), selectedTagOption]); + if (inputEl && inputEl.current) { + (inputEl.current as any).blur(); + } + }; + + // When a Tag search result is deselected, remove the Tags + const onDeselectValue = (urn: string) => { + const filter = resources.filter || { + criteria: [], + }; + setInputValue(''); + setSelectedTags(selectedTags?.filter((term) => term.urn !== urn)); + + setResources({ + ...resources, + filter: setFieldValues( + filter, + 'TAG', + tags?.filter((criterionValue) => (criterionValue as any) !== urn), + ), + }); + }; + + const type = EntityType.Tag; + const handleSearch = (text: string) => { + if (text.length > 0) { + tagTermSearch({ + variables: { + input: { + type, + query: text, + start: 0, + count: 10, + }, + }, + }); + } + }; + + const tagSearchOptions = tagResult?.map((result) => { + return renderSearchResultTags(result); + }); + + function clearInput() { + setInputValue(''); + setTimeout(() => setIsFocusedOnInput(true), 0); // call after click outside + } + + const tagRender = (props) => { + // eslint-disable-next-line react/prop-types + const { closable, onClose, value } = props; + const onPreventMouseDown = (event) => { + event.preventDefault(); + event.stopPropagation(); + }; + + const selectedItem = selectedTags?.find((term) => term?.urn === value); + return ( + + {selectedItem?.name} + + ); + }; + + function handleKeyDown(event) { + if (event.keyCode === ENTER_KEY_CODE) { + (inputEl.current as any).blur(); + } + } + return ( {showResourceFilterInput && ( @@ -362,6 +522,38 @@ export default function PolicyPrivilegeForm({ )} + {showResourceFilterInput && ( + Select Tags}> + + The policy will apply to all entities containing all of the chosen tags. If no tags are + selected, the policy will not account for tags. + + onSelectValue(asset)} + onDeselect={(asset: any) => onDeselectValue(asset)} + onSearch={(value: string) => { + // eslint-disable-next-line react/prop-types + handleSearch(value.trim()); + // eslint-disable-next-line react/prop-types + setInputValue(value.trim()); + }} + tagRender={tagRender} + value={tags} + onClear={clearInput} + onBlur={handleBlurTag} + onInputKeyDown={handleKeyDown} + > + {tagSearchOptions} + + + )} {showResourceFilterInput && ( Select Domains}> diff --git a/datahub-web-react/src/app/permissions/policy/policyUtils.ts b/datahub-web-react/src/app/permissions/policy/policyUtils.ts index 27aa8fcd351e9..c7ec171bc2c29 100644 --- a/datahub-web-react/src/app/permissions/policy/policyUtils.ts +++ b/datahub-web-react/src/app/permissions/policy/policyUtils.ts @@ -118,6 +118,10 @@ export const getFieldValues = (filter: Maybe | undefined, res return filter?.criteria?.find((criterion) => criterion.field === resourceFieldType)?.values || []; }; +export const getFieldValuesOfTags = (filter: Maybe | undefined, resourceFieldType: string) => { + return filter?.criteria?.find((criterion) => criterion.field === resourceFieldType)?.values || []; +}; + export const setFieldValues = ( filter: PolicyMatchFilter, resourceFieldType: string, diff --git a/datahub-web-react/src/app/permissions/policy/usePolicy.ts b/datahub-web-react/src/app/permissions/policy/usePolicy.ts index 6f359805e42db..d04ea25d20b23 100644 --- a/datahub-web-react/src/app/permissions/policy/usePolicy.ts +++ b/datahub-web-react/src/app/permissions/policy/usePolicy.ts @@ -44,19 +44,22 @@ export function usePolicy( const [deletePolicy, { error: deletePolicyError }] = useDeletePolicyMutation(); - const toFilterInput = (filter: PolicyMatchFilter): PolicyMatchFilterInput => { + const toFilterInput = (filter: PolicyMatchFilter,state?:string | undefined): PolicyMatchFilterInput => { + console.log({state}) return { criteria: filter.criteria?.map((criterion): PolicyMatchCriterionInput => { return { field: criterion.field, - values: criterion.values.map((criterionValue) => criterionValue.value), + values: criterion.values.map((criterionValue) => + criterion.field === 'TAG' && state !=='TOGGLE' ? (criterionValue as any) : criterionValue.value, + ), condition: criterion.condition, }; }), }; }; - const toPolicyInput = (policy: Omit): PolicyUpdateInput => { + const toPolicyInput = (policy: Omit,state?:string | undefined): PolicyUpdateInput => { let policyInput: PolicyUpdateInput = { type: policy.type, name: policy.name, @@ -79,7 +82,7 @@ export function usePolicy( allResources: policy.resources.allResources, }; if (policy.resources.filter) { - resourceFilter = { ...resourceFilter, filter: toFilterInput(policy.resources.filter) }; + resourceFilter = { ...resourceFilter, filter: toFilterInput(policy.resources.filter,state) }; } // Add the resource filters. policyInput = { @@ -151,7 +154,7 @@ export function usePolicy( updatePolicy({ variables: { urn: policy?.urn as string, // There must be a focus policy urn. - input: toPolicyInput(newPolicy), + input: toPolicyInput(newPolicy,'TOGGLE'), }, }).then(()=>{ const updatePolicies= { @@ -178,6 +181,7 @@ export function usePolicy( __typename: 'ListPoliciesResult', urn: focusPolicyUrn, ...savePolicy, + resources: null, }; analytics.event({ type: EventType.UpdatePolicyEvent, From f743d3c044f6c1025d5f9891b3bab0a873d83b61 Mon Sep 17 00:00:00 2001 From: Andrea Scarpino Date: Mon, 5 Feb 2024 12:06:50 +0000 Subject: [PATCH 479/792] feat(ingest/metabase): Allow to configure a different url for links (#9727) --- .../src/datahub/ingestion/source/metabase.py | 21 ++++++++++++++----- .../tests/unit/test_metabase_source.py | 9 ++++++++ 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/metabase.py b/metadata-ingestion/src/datahub/ingestion/source/metabase.py index d22bfb2b8b52f..a65bb76345f4b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metabase.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metabase.py @@ -6,7 +6,7 @@ import dateutil.parser as dp import pydantic import requests -from pydantic import Field, validator +from pydantic import Field, root_validator, validator from requests.models import HTTPError import datahub.emitter.mce_builder as builder @@ -53,6 +53,10 @@ class MetabaseConfig(DatasetLineageProviderConfigBase): # See the Metabase /api/session endpoint for details # https://www.metabase.com/docs/latest/api-documentation.html#post-apisession connect_uri: str = Field(default="localhost:3000", description="Metabase host URL.") + display_uri: Optional[str] = Field( + default=None, + description="optional URL to use in links (if `connect_uri` is only for ingestion)", + ) username: Optional[str] = Field(default=None, description="Metabase username.") password: Optional[pydantic.SecretStr] = Field( default=None, description="Metabase password." @@ -76,10 +80,17 @@ class MetabaseConfig(DatasetLineageProviderConfigBase): description="Default schema name to use when schema is not provided in an SQL query", ) - @validator("connect_uri") + @validator("connect_uri", "display_uri") def remove_trailing_slash(cls, v): return config_clean.remove_trailing_slashes(v) + @root_validator(skip_on_failure=True) + def default_display_uri_to_connect_uri(cls, values): + base = values.get("display_uri") + if base is None: + values["display_uri"] = values.get("connect_uri") + return values + @platform_name("Metabase") @config_class(MetabaseConfig) @@ -239,7 +250,7 @@ def construct_dashboard_from_api_data( self, dashboard_info: dict ) -> Optional[DashboardSnapshot]: dashboard_id = dashboard_info.get("id", "") - dashboard_url = f"{self.config.connect_uri}/api/dashboard/{dashboard_id}" + dashboard_url = f"{self.config.display_uri}/api/dashboard/{dashboard_id}" try: dashboard_response = self.session.get(dashboard_url) dashboard_response.raise_for_status() @@ -297,7 +308,7 @@ def construct_dashboard_from_api_data( @lru_cache(maxsize=None) def _get_ownership(self, creator_id: int) -> Optional[OwnershipClass]: - user_info_url = f"{self.config.connect_uri}/api/user/{creator_id}" + user_info_url = f"{self.config.display_uri}/api/user/{creator_id}" try: user_info_response = self.session.get(user_info_url) user_info_response.raise_for_status() @@ -362,7 +373,7 @@ def get_card_details_by_id(self, card_id: Union[int, str]) -> dict: :param int datasource_id: Numeric datasource ID received from Metabase API :return: dict with info or empty dict """ - card_url = f"{self.config.connect_uri}/api/card/{card_id}" + card_url = f"{self.config.display_uri}/api/card/{card_id}" try: card_response = self.session.get(card_url) card_response.raise_for_status() diff --git a/metadata-ingestion/tests/unit/test_metabase_source.py b/metadata-ingestion/tests/unit/test_metabase_source.py index 642752b8ca0e6..08c0ddb503664 100644 --- a/metadata-ingestion/tests/unit/test_metabase_source.py +++ b/metadata-ingestion/tests/unit/test_metabase_source.py @@ -40,3 +40,12 @@ def test_get_platform_instance(): # database_id_to_instance_map is missing, platform_instance_map is defined and key missing assert metabase.get_platform_instance("missing-platform", 999) is None + + +def test_set_display_uri(): + display_uri = "some_host:1234" + + config = MetabaseConfig.parse_obj({"display_uri": display_uri}) + + assert config.connect_uri == "localhost:3000" + assert config.display_uri == display_uri From 1d514cb0d4337fef306c8ad8374d0841917afb83 Mon Sep 17 00:00:00 2001 From: Dimitri <36767102+dim-ops@users.noreply.github.com> Date: Mon, 5 Feb 2024 13:07:56 +0100 Subject: [PATCH 480/792] docs(ingesti/elastcsearch): add description fields (#9693) Co-authored-by: Dimitri GRISARD --- .../src/datahub/ingestion/source/elastic_search.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/elastic_search.py b/metadata-ingestion/src/datahub/ingestion/source/elastic_search.py index 7ec71a22f6fe6..653b80c116adf 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/elastic_search.py +++ b/metadata-ingestion/src/datahub/ingestion/source/elastic_search.py @@ -299,9 +299,13 @@ class ElasticsearchSourceConfig(PlatformInstanceConfigMixin, EnvConfigMixin): profiling: ElasticProfiling = Field( default_factory=ElasticProfiling, + description="Configs to ingest data profiles from ElasticSearch.", ) collapse_urns: CollapseUrns = Field( default_factory=CollapseUrns, + description="""List of regex patterns to remove from the name of the URN. All of the indices before removal of URNs are considered as the same dataset. These are applied in order for each URN. + The main case where you would want to have multiple of these if the name where you are trying to remove suffix from have different formats. + e.g. ending with -YYYY-MM-DD as well as ending -epochtime would require you to have 2 regex patterns to remove the suffixes across all URNs.""", ) def is_profiling_enabled(self) -> bool: From 3b898dbae94df3e668ec7f128da28a259320dc2e Mon Sep 17 00:00:00 2001 From: cburroughs Date: Mon, 5 Feb 2024 11:28:24 -0500 Subject: [PATCH 481/792] fix(ingest/snowflake): stop pinning transitive spacy dependency (#9768) --- metadata-ingestion/setup.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 67ea861230fed..bbbab73fd1cf5 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -186,8 +186,6 @@ "cryptography", "msal", "acryl-datahub-classify==0.0.9", - # spacy version restricted to reduce backtracking, used by acryl-datahub-classify, - "spacy==3.4.3", } trino = { From bf1ce96ae63ed3e1c9bc26ec216b0eb481fb3c40 Mon Sep 17 00:00:00 2001 From: skrydal Date: Mon, 5 Feb 2024 17:29:14 +0100 Subject: [PATCH 482/792] feat(cli): Add support for custom ownership types to dataproduct CLI (#9762) Co-authored-by: Harshal Sheth --- .../api/entities/dataproduct/dataproduct.py | 32 ++--- .../datahub/cli/specific/dataproduct_cli.py | 11 +- .../dataproduct_ownership_type_urn.yaml | 25 ++++ ...ct_ownership_type_urn_different_owner.yaml | 28 ++++ ...en_dataproduct_out_ownership_type_urn.json | 125 ++++++++++++++++++ .../entities/dataproducts/test_dataproduct.py | 50 +++++++ 6 files changed, 249 insertions(+), 22 deletions(-) create mode 100644 metadata-ingestion/tests/unit/api/entities/dataproducts/dataproduct_ownership_type_urn.yaml create mode 100644 metadata-ingestion/tests/unit/api/entities/dataproducts/dataproduct_ownership_type_urn_different_owner.yaml create mode 100644 metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_ownership_type_urn.json diff --git a/metadata-ingestion/src/datahub/api/entities/dataproduct/dataproduct.py b/metadata-ingestion/src/datahub/api/entities/dataproduct/dataproduct.py index 28e4a03b8f75f..61bda90447c62 100644 --- a/metadata-ingestion/src/datahub/api/entities/dataproduct/dataproduct.py +++ b/metadata-ingestion/src/datahub/api/entities/dataproduct/dataproduct.py @@ -69,23 +69,9 @@ class Ownership(ConfigModel): type: str @pydantic.validator("type") - def ownership_type_must_be_mappable(cls, v: str) -> str: - _ownership_types = [ - OwnershipTypeClass.BUSINESS_OWNER, - OwnershipTypeClass.CONSUMER, - OwnershipTypeClass.DATA_STEWARD, - OwnershipTypeClass.DATAOWNER, - OwnershipTypeClass.DELEGATE, - OwnershipTypeClass.DEVELOPER, - OwnershipTypeClass.NONE, - OwnershipTypeClass.PRODUCER, - OwnershipTypeClass.STAKEHOLDER, - OwnershipTypeClass.TECHNICAL_OWNER, - ] - if v.upper() not in _ownership_types: - raise ValueError(f"Ownership type {v} not in {_ownership_types}") - - return v.upper() + def ownership_type_must_be_mappable_or_custom(cls, v: str) -> str: + _, _ = builder.validate_ownership_type(v) + return v class DataProduct(ConfigModel): @@ -155,9 +141,13 @@ def _mint_owner(self, owner: Union[str, Ownership]) -> OwnerClass: ) else: assert isinstance(owner, Ownership) + ownership_type, ownership_type_urn = builder.validate_ownership_type( + owner.type + ) return OwnerClass( owner=builder.make_user_urn(owner.id), - type=owner.type, + type=ownership_type, + typeUrn=ownership_type_urn, ) def _generate_properties_mcp( @@ -314,6 +304,8 @@ def from_datahub(cls, graph: DataHubGraph, id: str) -> "DataProduct": for o in owners.owners: if o.type == OwnershipTypeClass.TECHNICAL_OWNER: yaml_owners.append(o.owner) + elif o.type == OwnershipTypeClass.CUSTOM: + yaml_owners.append(Ownership(id=o.owner, type=str(o.typeUrn))) else: yaml_owners.append(Ownership(id=o.owner, type=str(o.type))) glossary_terms: Optional[GlossaryTermsClass] = graph.get_aspect( @@ -355,7 +347,7 @@ def _patch_ownership( if isinstance(new_owner, Ownership): new_owner_type_map[new_owner.id] = new_owner.type else: - new_owner_type_map[new_owner] = "TECHNICAL_OWNER" + new_owner_type_map[new_owner] = OwnershipTypeClass.TECHNICAL_OWNER owners_matched = set() patches_add: list = [] patches_drop: dict = {} @@ -385,7 +377,7 @@ def _patch_ownership( owners_matched.add(owner_urn) if new_owner_type_map[owner_urn] != o.type: patches_replace[i] = { - "id": o, + "id": o.id, "type": new_owner_type_map[owner_urn], } else: diff --git a/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py b/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py index a52a9dddff127..afac38e29722e 100644 --- a/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py +++ b/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py @@ -14,7 +14,11 @@ from datahub.api.entities.dataproduct.dataproduct import DataProduct from datahub.cli.specific.file_loader import load_file -from datahub.emitter.mce_builder import make_group_urn, make_user_urn +from datahub.emitter.mce_builder import ( + make_group_urn, + make_user_urn, + validate_ownership_type, +) from datahub.ingestion.graph.client import DataHubGraph, get_default_graph from datahub.metadata.schema_classes import OwnerClass, OwnershipTypeClass from datahub.specific.dataproduct import DataProductPatchBuilder @@ -332,8 +336,11 @@ def add_owner(urn: str, owner: str, owner_type: str) -> None: if not urn.startswith("urn:li:dataProduct:"): urn = f"urn:li:dataProduct:{urn}" dataproduct_patcher: DataProductPatchBuilder = DataProduct.get_patch_builder(urn) + owner_type, owner_type_urn = validate_ownership_type(owner_type) dataproduct_patcher.add_owner( - owner=OwnerClass(owner=_get_owner_urn(owner), type=owner_type) + owner=OwnerClass( + owner=_get_owner_urn(owner), type=owner_type, typeUrn=owner_type_urn + ) ) with get_default_graph() as graph: _abort_if_non_existent_urn(graph, urn, "add owners") diff --git a/metadata-ingestion/tests/unit/api/entities/dataproducts/dataproduct_ownership_type_urn.yaml b/metadata-ingestion/tests/unit/api/entities/dataproducts/dataproduct_ownership_type_urn.yaml new file mode 100644 index 0000000000000..543f9db6abd22 --- /dev/null +++ b/metadata-ingestion/tests/unit/api/entities/dataproducts/dataproduct_ownership_type_urn.yaml @@ -0,0 +1,25 @@ +id: pet_of_the_week +domain: Marketing +display_name: Pet of the Week Campaign +description: |- + This campaign includes Pet of the Week data. + +assets: + - urn:li:container:DATABASE + - urn:li:container:SCHEMA + - urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes) + +owners: + - id: urn:li:corpuser:jdoe + type: urn:li:ownershipType:abc +properties: + version: 2.0 + classification: pii + +tags: + - urn:li:tag:awesome + +terms: + - urn:li:glossaryTerm:ClientsAndAccounts.AccountBalance + +external_url: https://github.com/datahub-project/datahub diff --git a/metadata-ingestion/tests/unit/api/entities/dataproducts/dataproduct_ownership_type_urn_different_owner.yaml b/metadata-ingestion/tests/unit/api/entities/dataproducts/dataproduct_ownership_type_urn_different_owner.yaml new file mode 100644 index 0000000000000..fcc79bcbe5edb --- /dev/null +++ b/metadata-ingestion/tests/unit/api/entities/dataproducts/dataproduct_ownership_type_urn_different_owner.yaml @@ -0,0 +1,28 @@ +id: pet_of_the_week +domain: Marketing +display_name: Pet of the Week Campaign +description: |- + This campaign includes Pet of the Week data. + +assets: + - urn:li:container:DATABASE + - urn:li:container:SCHEMA + - urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes) + +owners: + - id: urn:li:corpuser:jsmith + type: urn:li:ownershipType:abc + - id: urn:li:corpuser:jdoe + type: TECHNICAL_OWNER + +properties: + version: 2.0 + classification: pii + +tags: + - urn:li:tag:awesome + +terms: + - urn:li:glossaryTerm:ClientsAndAccounts.AccountBalance + +external_url: https://github.com/datahub-project/datahub diff --git a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_ownership_type_urn.json b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_ownership_type_urn.json new file mode 100644 index 0000000000000..35c46caa682db --- /dev/null +++ b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_ownership_type_urn.json @@ -0,0 +1,125 @@ +[ + { + "entityType": "dataProduct", + "entityUrn": "urn:li:dataProduct:pet_of_the_week", + "changeType": "UPSERT", + "aspectName": "dataProductProperties", + "aspect": { + "json": { + "customProperties": { + "version": "2.0", + "classification": "pii" + }, + "externalUrl": "https://github.com/datahub-project/datahub", + "name": "Pet of the Week Campaign", + "description": "This campaign includes Pet of the Week data.", + "assets": [ + { + "destinationUrn": "urn:li:container:DATABASE", + "created": { + "time": 1681455600000, + "actor": "urn:li:corpuser:datahub", + "message": "yaml" + } + }, + { + "destinationUrn": "urn:li:container:SCHEMA", + "created": { + "time": 1681455600000, + "actor": "urn:li:corpuser:datahub", + "message": "yaml" + } + }, + { + "destinationUrn": "urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)", + "created": { + "time": 1681455600000, + "actor": "urn:li:corpuser:datahub", + "message": "yaml" + } + } + ] + } + } + }, + { + "entityType": "dataProduct", + "entityUrn": "urn:li:dataProduct:pet_of_the_week", + "changeType": "UPSERT", + "aspectName": "domains", + "aspect": { + "json": { + "domains": [ + "urn:li:domain:12345" + ] + } + } + }, + { + "entityType": "dataProduct", + "entityUrn": "urn:li:dataProduct:pet_of_the_week", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [ + { + "tag": "urn:li:tag:awesome" + } + ] + } + } + }, + { + "entityType": "dataProduct", + "entityUrn": "urn:li:dataProduct:pet_of_the_week", + "changeType": "UPSERT", + "aspectName": "glossaryTerms", + "aspect": { + "json": { + "terms": [ + { + "urn": "urn:li:glossaryTerm:ClientsAndAccounts.AccountBalance" + } + ], + "auditStamp": { + "time": 1681455600000, + "actor": "urn:li:corpuser:datahub", + "message": "yaml" + } + } + } + }, + { + "entityType": "dataProduct", + "entityUrn": "urn:li:dataProduct:pet_of_the_week", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:jdoe", + "type": "CUSTOM", + "typeUrn": "urn:li:ownershipType:abc" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + } + }, + { + "entityType": "dataProduct", + "entityUrn": "urn:li:dataProduct:pet_of_the_week", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } + } +] \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/api/entities/dataproducts/test_dataproduct.py b/metadata-ingestion/tests/unit/api/entities/dataproducts/test_dataproduct.py index 9900eb1e7f283..e796f0b3f3721 100644 --- a/metadata-ingestion/tests/unit/api/entities/dataproducts/test_dataproduct.py +++ b/metadata-ingestion/tests/unit/api/entities/dataproducts/test_dataproduct.py @@ -153,3 +153,53 @@ def test_dataproduct_patch_yaml( ) is False ) + + +@freeze_time(FROZEN_TIME) +def test_dataproduct_ownership_type_urn_from_yaml( + pytestconfig: pytest.Config, + tmp_path: Path, + test_resources_dir: Path, + base_mock_graph: MockDataHubGraph, +) -> None: + data_product_file = test_resources_dir / "dataproduct_ownership_type_urn.yaml" + mock_graph = base_mock_graph + data_product = DataProduct.from_yaml(data_product_file, mock_graph) + + for mcp in data_product.generate_mcp(upsert=False): + mock_graph.emit(mcp) + + output_file = tmp_path / "test_dataproduct_out.json" + mock_graph.sink_to_file(output_file) + golden_file = test_resources_dir / "golden_dataproduct_out_ownership_type_urn.json" + check_golden_file(pytestconfig, output_file, golden_file) + + +@freeze_time(FROZEN_TIME) +def test_dataproduct_ownership_type_urn_patch_yaml( + tmp_path: Path, test_resources_dir: Path, base_mock_graph: MockDataHubGraph +) -> None: + mock_graph = base_mock_graph + source_file = test_resources_dir / "golden_dataproduct_out_ownership_type_urn.json" + mock_graph.import_file(source_file) + + data_product_file = ( + test_resources_dir / "dataproduct_ownership_type_urn_different_owner.yaml" + ) + original_data_product: DataProduct = DataProduct.from_yaml( + data_product_file, mock_graph + ) + + data_product: DataProduct = DataProduct.from_datahub( + mock_graph, id="urn:li:dataProduct:pet_of_the_week" + ) + + dataproduct_output_file = ( + tmp_path / "patch_dataproduct_ownership_type_urn_different_owner.yaml" + ) + data_product.patch_yaml(original_data_product, dataproduct_output_file) + + assert not check_yaml_golden_file( + str(dataproduct_output_file), + str(test_resources_dir / "dataproduct_ownership_type_urn.yaml"), + ) From bec01e02cfe1d1e4e540d8569bdec04225bb2566 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Tue, 6 Feb 2024 01:32:35 +0900 Subject: [PATCH 483/792] docs: update champion bio (#9773) --- docs-website/src/pages/champions/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-website/src/pages/champions/index.js b/docs-website/src/pages/champions/index.js index ca161354ecbdc..7109edde6a880 100644 --- a/docs-website/src/pages/champions/index.js +++ b/docs-website/src/pages/champions/index.js @@ -166,7 +166,7 @@ const championSections = [ bio: ( <>

    - Reliably provided dirxection to Community Members across all support channels in Slack. + Reliably provides direction to community members and submitted 5 pull request, including improvements to Athena ingestion (support for nested schemas) and the REST emitter.

    ), From 3bfc1e46a708c4779feae17c6e2ab727bfee2817 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Mon, 5 Feb 2024 20:51:57 +0100 Subject: [PATCH 484/792] fix(ingest/fivetran): Fixing issue when connector user is empty (#9774) --- .../source/fivetran/fivetran_log_api.py | 8 +- ...nowflake_empty_connection_user_golden.json | 618 ++++++++++++++++++ .../integration/fivetran/test_fivetran.py | 115 +++- 3 files changed, 726 insertions(+), 15 deletions(-) create mode 100644 metadata-ingestion/tests/integration/fivetran/fivetran_snowflake_empty_connection_user_golden.json diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_log_api.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_log_api.py index bdef28e30db2b..93e2e51acba19 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_log_api.py +++ b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran_log_api.py @@ -158,11 +158,13 @@ def _get_user_name(self, user_id: Optional[str]) -> Optional[str]: return None user_details = self._query( self.fivetran_log_query.get_user_query(user_id=user_id) - )[0] - return ( - f"{user_details[Constant.GIVEN_NAME]} {user_details[Constant.FAMILY_NAME]}" ) + if not user_details: + return None + + return f"{user_details[0][Constant.GIVEN_NAME]} {user_details[0][Constant.FAMILY_NAME]}" + def get_allowed_connectors_list( self, connector_patterns: AllowDenyPattern, report: FivetranSourceReport ) -> List[Connector]: diff --git a/metadata-ingestion/tests/integration/fivetran/fivetran_snowflake_empty_connection_user_golden.json b/metadata-ingestion/tests/integration/fivetran/fivetran_snowflake_empty_connection_user_golden.json new file mode 100644 index 0000000000000..65f8620d0b4c4 --- /dev/null +++ b/metadata-ingestion/tests/integration/fivetran/fivetran_snowflake_empty_connection_user_golden.json @@ -0,0 +1,618 @@ +[ +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(fivetran,calendar_elected,PROD)", + "changeType": "UPSERT", + "aspectName": "dataFlowInfo", + "aspect": { + "json": { + "customProperties": {}, + "name": "postgres" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(fivetran,calendar_elected,PROD)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(fivetran,calendar_elected,PROD)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "dataJobInfo", + "aspect": { + "json": { + "customProperties": { + "paused": "False", + "sync_frequency": "1440", + "destination_id": "'interval_unconstitutional'" + }, + "name": "postgres", + "type": { + "string": "COMMAND" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "dataJobInputOutput", + "aspect": { + "json": { + "inputDatasets": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)" + ], + "outputDatasets": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)" + ], + "inputDatajobs": [], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV),id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD),id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV),name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD),name)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV),id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD),id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV),name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD),name)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceProperties", + "aspect": { + "json": { + "customProperties": {}, + "name": "4c9a03d6-eded-4422-a46a-163266e58243", + "type": "BATCH_SCHEDULED", + "created": { + "time": 1695191853000, + "actor": "urn:li:corpuser:datahub" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRelationships", + "aspect": { + "json": { + "parentTemplate": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "upstreamInstances": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceInput", + "aspect": { + "json": { + "inputs": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceOutput", + "aspect": { + "json": { + "outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1695191853000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "STARTED" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:ee88d32dbe3133a23a9023c097050190", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1695191885000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "COMPLETE", + "result": { + "type": "SUCCESS", + "nativeResultType": "fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceProperties", + "aspect": { + "json": { + "customProperties": {}, + "name": "f773d1e9-c791-48f4-894f-8cf9b3dfc834", + "type": "BATCH_SCHEDULED", + "created": { + "time": 1696343730000, + "actor": "urn:li:corpuser:datahub" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRelationships", + "aspect": { + "json": { + "parentTemplate": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "upstreamInstances": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceInput", + "aspect": { + "json": { + "inputs": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceOutput", + "aspect": { + "json": { + "outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1696343730000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "STARTED" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:be36f55c13ec4e313c7510770e50784a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1696343732000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "COMPLETE", + "result": { + "type": "SKIPPED", + "nativeResultType": "fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceProperties", + "aspect": { + "json": { + "customProperties": {}, + "name": "63c2fc85-600b-455f-9ba0-f576522465be", + "type": "BATCH_SCHEDULED", + "created": { + "time": 1696343755000, + "actor": "urn:li:corpuser:datahub" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRelationships", + "aspect": { + "json": { + "parentTemplate": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "upstreamInstances": [] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceInput", + "aspect": { + "json": { + "inputs": [ + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.employee,DEV)", + "urn:li:dataset:(urn:li:dataPlatform:postgres,postgres_db.public.company,DEV)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceOutput", + "aspect": { + "json": { + "outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1696343755000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "STARTED" + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:d8f100271d2dc3fa905717f82d083c8d", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1696343790000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "status": "COMPLETE", + "result": { + "type": "FAILURE", + "nativeResultType": "fivetran" + } + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(fivetran,calendar_elected,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1654621200000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/fivetran/test_fivetran.py b/metadata-ingestion/tests/integration/fivetran/test_fivetran.py index 22dbd58acf1e5..a757975b2e137 100644 --- a/metadata-ingestion/tests/integration/fivetran/test_fivetran.py +++ b/metadata-ingestion/tests/integration/fivetran/test_fivetran.py @@ -1,4 +1,5 @@ import datetime +from functools import partial from unittest import mock from unittest.mock import MagicMock @@ -18,24 +19,28 @@ FROZEN_TIME = "2022-06-07 17:00:00" +default_connector_query_results = [ + { + "connector_id": "calendar_elected", + "connecting_user_id": "reapply_phone", + "connector_type_id": "postgres", + "connector_name": "postgres", + "paused": False, + "sync_frequency": 1440, + "destination_id": "interval_unconstitutional", + }, +] -def default_query_results(query): + +def default_query_results( + query, connector_query_results=default_connector_query_results +): fivetran_log_query = FivetranLogQuery() fivetran_log_query.set_db("test") if query == fivetran_log_query.use_database("test_database"): return [] elif query == fivetran_log_query.get_connectors_query(): - return [ - { - "connector_id": "calendar_elected", - "connecting_user_id": "reapply_phone", - "connector_type_id": "postgres", - "connector_name": "postgres", - "paused": False, - "sync_frequency": 1440, - "destination_id": "interval_unconstitutional", - }, - ] + return connector_query_results elif query == fivetran_log_query.get_table_lineage_query("calendar_elected"): return [ { @@ -182,6 +187,92 @@ def test_fivetran_with_snowflake_dest(pytestconfig, tmp_path): ) +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_fivetran_with_snowflake_dest_and_null_connector_user(pytestconfig, tmp_path): + test_resources_dir = pytestconfig.rootpath / "tests/integration/fivetran" + + # Run the metadata ingestion pipeline. + output_file = tmp_path / "fivetran_test_events.json" + golden_file = ( + test_resources_dir / "fivetran_snowflake_empty_connection_user_golden.json" + ) + + with mock.patch( + "datahub.ingestion.source.fivetran.fivetran_log_api.create_engine" + ) as mock_create_engine: + connection_magic_mock = MagicMock() + + connector_query_results = [ + { + "connector_id": "calendar_elected", + "connecting_user_id": None, + "connector_type_id": "postgres", + "connector_name": "postgres", + "paused": False, + "sync_frequency": 1440, + "destination_id": "interval_unconstitutional", + }, + ] + + connection_magic_mock.execute.side_effect = partial( + default_query_results, connector_query_results=connector_query_results + ) + + mock_create_engine.return_value = connection_magic_mock + + pipeline = Pipeline.create( + { + "run_id": "powerbi-test", + "source": { + "type": "fivetran", + "config": { + "fivetran_log_config": { + "destination_platform": "snowflake", + "snowflake_destination_config": { + "account_id": "testid", + "warehouse": "test_wh", + "username": "test", + "password": "test@123", + "database": "test_database", + "role": "testrole", + "log_schema": "test", + }, + }, + "connector_patterns": { + "allow": [ + "postgres", + ] + }, + "sources_to_database": { + "calendar_elected": "postgres_db", + }, + "sources_to_platform_instance": { + "calendar_elected": { + "env": "DEV", + } + }, + }, + }, + "sink": { + "type": "file", + "config": { + "filename": f"{output_file}", + }, + }, + } + ) + + pipeline.run() + pipeline.raise_from_status() + + mce_helpers.check_golden_file( + pytestconfig, + output_path=f"{output_file}", + golden_path=f"{golden_file}", + ) + + @freeze_time(FROZEN_TIME) @pytest.mark.integration def test_fivetran_with_bigquery_dest(pytestconfig, tmp_path): From a3ef587f54067598141afe3e584aa5742f817fc7 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 5 Feb 2024 13:12:49 -0800 Subject: [PATCH 485/792] feat(ingest/glossary): support pydantic v2 in business-glossary source (#9763) --- .../source/metadata/business_glossary.py | 64 ++++++++++--------- .../powerbi/rest_api_wrapper/powerbi_api.py | 1 + 2 files changed, 34 insertions(+), 31 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index 675c87b13313d..e3f0fd118625a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -34,44 +34,44 @@ class Owners(ConfigModel): - users: Optional[List[str]] - groups: Optional[List[str]] + users: Optional[List[str]] = None + groups: Optional[List[str]] = None class KnowledgeCard(ConfigModel): - url: Optional[str] - label: Optional[str] + url: Optional[str] = None + label: Optional[str] = None class GlossaryTermConfig(ConfigModel): - id: Optional[str] + id: Optional[str] = None name: str description: str - term_source: Optional[str] - source_ref: Optional[str] - source_url: Optional[str] - owners: Optional[Owners] - inherits: Optional[List[str]] - contains: Optional[List[str]] - values: Optional[List[str]] - related_terms: Optional[List[str]] - custom_properties: Optional[Dict[str, str]] - knowledge_links: Optional[List[KnowledgeCard]] - domain: Optional[str] + term_source: Optional[str] = None + source_ref: Optional[str] = None + source_url: Optional[str] = None + owners: Optional[Owners] = None + inherits: Optional[List[str]] = None + contains: Optional[List[str]] = None + values: Optional[List[str]] = None + related_terms: Optional[List[str]] = None + custom_properties: Optional[Dict[str, str]] = None + knowledge_links: Optional[List[KnowledgeCard]] = None + domain: Optional[str] = None # Private fields. _urn: str class GlossaryNodeConfig(ConfigModel): - id: Optional[str] + id: Optional[str] = None name: str description: str - owners: Optional[Owners] - terms: Optional[List["GlossaryTermConfig"]] - nodes: Optional[List["GlossaryNodeConfig"]] - knowledge_links: Optional[List[KnowledgeCard]] - custom_properties: Optional[Dict[str, str]] + owners: Optional[Owners] = None + terms: Optional[List["GlossaryTermConfig"]] = None + nodes: Optional[List["GlossaryNodeConfig"]] = None + knowledge_links: Optional[List[KnowledgeCard]] = None + custom_properties: Optional[Dict[str, str]] = None # Private fields. _urn: str @@ -80,7 +80,7 @@ class GlossaryNodeConfig(ConfigModel): class DefaultConfig(ConfigModel): """Holds defaults for populating fields in glossary terms""" - source: Optional[str] + source: Optional[str] = None owners: Owners url: Optional[str] = None source_type: str = "INTERNAL" @@ -98,8 +98,8 @@ class BusinessGlossarySourceConfig(ConfigModel): class BusinessGlossaryConfig(DefaultConfig): version: str - terms: Optional[List["GlossaryTermConfig"]] - nodes: Optional[List["GlossaryNodeConfig"]] + terms: Optional[List["GlossaryTermConfig"]] = None + nodes: Optional[List["GlossaryNodeConfig"]] = None @validator("version") def version_must_be_1(cls, v): @@ -337,12 +337,14 @@ def get_mces_from_term( ] = [] term_info = models.GlossaryTermInfoClass( definition=glossaryTerm.description, - termSource=glossaryTerm.term_source - if glossaryTerm.term_source is not None - else defaults.source_type, - sourceRef=glossaryTerm.source_ref - if glossaryTerm.source_ref - else defaults.source, + termSource=( + glossaryTerm.term_source + if glossaryTerm.term_source is not None + else defaults.source_type + ), + sourceRef=( + glossaryTerm.source_ref if glossaryTerm.source_ref else defaults.source + ), sourceUrl=glossaryTerm.source_url if glossaryTerm.source_url else defaults.url, parentNode=parentNode, customProperties=glossaryTerm.custom_properties, diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py index 304e999f81a84..d72624bd70512 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py @@ -195,6 +195,7 @@ def get_workspaces(self) -> List[Workspace]: groups = self._get_resolver().get_groups() except: self.log_http_error(message="Unable to fetch list of workspaces") + raise # we want this exception to bubble up workspaces = [ Workspace( From 92850ac55625f3fbee6cdd8699970b43c18a6f58 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:41:15 +0530 Subject: [PATCH 486/792] fix(datahub-flow-diagram): add missing image for onboarding tour (#9783) --- .../config/HomePageOnboardingConfig.tsx | 3 ++- .../src/images/datahub-flow-diagram-light.png | Bin 0 -> 270748 bytes 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 datahub-web-react/src/images/datahub-flow-diagram-light.png diff --git a/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx b/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx index 8b361db5ab344..65c703db38fe9 100644 --- a/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx +++ b/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx @@ -2,6 +2,7 @@ import React from 'react'; import { Image, Typography } from 'antd'; import { OnboardingStep } from '../OnboardingStep'; import { ANTD_GRAY } from '../../entity/shared/constants'; +import dataHubFlowDiagram from '../../../images/datahub-flow-diagram-light.png'; export const GLOBAL_WELCOME_TO_DATAHUB_ID = 'global-welcome-to-datahub'; export const HOME_PAGE_INGESTION_ID = 'home-page-ingestion'; @@ -20,7 +21,7 @@ export const HomePageOnboardingConfig: OnboardingStep[] = [ height={184} width={500} style={{ marginLeft: '50px' }} - src="https://datahubproject.io/assets/ideal-img/datahub-flow-diagram-light.5ce651b.1600.png" + src={dataHubFlowDiagram} /> Welcome to DataHub! 👋 diff --git a/datahub-web-react/src/images/datahub-flow-diagram-light.png b/datahub-web-react/src/images/datahub-flow-diagram-light.png new file mode 100644 index 0000000000000000000000000000000000000000..cf110f7fc48761487f53429eb6e0349fe8c656aa GIT binary patch literal 270748 zcmeEt_dnI)|Nd!ONg0t@_DINHDTHH(W3RFuGkb&vA|uC)9OT#z*_)#5J&x?`P4?z{ zpVa&F{k;Ex?@wP3)uGqA@7MEwKCkC>U3ZYGvMeD!B|ZcKA%x0Ft3e>Rn-It)Mck|4 zH=}QEdVqiM9OQJIAdp*3*gqGbYK*@jkUJ2l^kWV8Pb*`-?)tMY=TExC8A4547{YIs z3SX~hW@f(d`f{fL^im1l%XlN>1w@U2tF~GU23k&eN2rReYEb>!knJ#O{9C&ecPTk2 zw+b=w^GI`bU1Ac;UVW-JSH1}}ULLwirhqIwvHj%OaW14Q#9W5K$_MIMm!}|C zIsfUXmOth?qEmJvzW{?Bz<<6T**>wZx$vJKu^%pD2lAgUzsg4+aQ^cX_>g(&zYY-j z{|8{_$M65o!=Etue=-l_6b?glY_-WVrCcAA>|6mFXxe{>$oj}C6bZP9z@M?j#B zaha&bZG}k2NS`VV!$UK^lLybyJFPV;=Lcw9eN21)=#&o{`4p^U=thGyjqy}^q0k1* za1VYv(fR*AxZ8Z`&nH#0l;6qZy{YyMlLq_`}y?=O4_>LxNg~+V}zkA`xL2g&isrMSZm#Wf(J3ng4l=>;Dc?=hhoh>nPqeKEGWN-oM*HajzKYx@3pOuti(Wm;0R4cQRWb(Yh=e!Do!7$=)^mGjPb# zzeU5Hqfr$axm>9wKYXW!ZEqCD&AMpUUIWG#}VZJXxO|0R$ z0S2fXc*zE6QnHWFzYMQCEsJ*C#uF409F_pBN*EKAkhd^8qiB{f$^| zliMbgvpDY0QzoAsOokU~IXN1*y+Q3D=3>3pM2d8J8N#NoX?Q&|QS7$k$jpvHIFIUz z>aW(Ek6q*6lH;DHFrnYfDJy;(0CKh%!>nPK%NYM=&TpYvWL2E$gRF>%4LW57h4-I( zERnpet+HLkIl0*-y481mjs9HL&-3x$mEX=fko$A0a!nqZ6zS|( zG2a=j$UHDHk^kz|h0u_Yh3`QmBuJ9*CbIWGNW+_=pg-DUfvdHVy%k4n)_;1x_G1mtwJIz5~`kQueL z4{HwQ5}2;IF(c#{qvRO-$Hxr=#zMEG4#+&O!=7(G58>(9)XThbw#x>U%Kln+UZ);$ z{)%dX zuWe}GOAiaX&O{va!rn`@_g0gmK&;o48imPgSG~vl%ql*jC52ihf0O-{zj-|Q{%tV( zdX^;t0pidT%y7hu{Q1f?c4z{l@GMf)#@$_8TwHvb{rC52M^3|P_ufgWuC0DBjSRA3 zAql(@a+U|1v(c6nv+|WAj-OU%k?!=1UGM)U+(#-y5?tqRJ3Hua3eOa6jJtL4EHrsj zUfxI5By$wx3y^kXpgACR^Cl;st&UX*=HPc#du;Xh^rX0pJzPCFIy!Rs{p&GPWJ~Tr zm-ybO{96yk#lI|0JjzDmo3f z^OhT#(2)}|Ia7!hlHqH(cK_cj&V}$iJs5!fXStmgrRV9|7T0@vdiDG1Yv>LKzxUL#(1*d*J2eRg`@J_SG9&R+)F?F2V}q2gRuJ zVi(os2psC;kL(NK$;cRSIA75H;{Qg$+{+nptHu63X~YDv)mL3Flp0K<_bzgTgU5~M z(xo8-Byo^5(}+G4p{uLQXFp!ueuSL2a(6FpzHK%wS2^J_){(cYS$tXbJ{)aw$1g1nZe z2tAwS(5>ixyAa_q3iTmci}U3Y`LCbT2;oT|q9~G9(?4CmtG1bwS{;!WqsGaRQ;rcT z(7wmfKJ9@7F~(kLzIMF2x(b6Y!oJHx_p0)|%9bk?aw@)*V2UuOnrcbMcRtQ&1?U3o zNM*7we*4dDGKEdh=p9;!vGU?+hpNp)3W>#a!6fZnt{bw{3t`$659I( zbpdW`rqDAJk$2I_w`7n|fjM%x1g@pYgxuYAA{7dh~X7^c`e@|&77S}4Gj&$Rc`j0aV#Log6w97H^dkT1rKUk(jP37s$20Q z4TOL2Cmr=XR7cpfnBl7b^3E0(c+QqN&3CqQjWI;LtKXA*Qx?jjGkl<&yITn4I99=cMCOvcRTmcta9I0^1;BRc!&H3fRM+sw( zR%0Z73jSW3Mih>BHc_M!a8-XWCW0hC2UYLUZSkFY;}JJs%u$ltDqZE%!ugq?6$-dI zCQj~@Tg5>Y2~oMRgonT1ywC6b+F0eg^th1y!Es&XEKy>eDBLlmjvCza(Ex=>JR=k1 z6RCzvh=AOZtl<@FF@RlK?PoQdkn@Yp5e!SKs~y>$3}_XPg1m#1#rX%Q024}&Ezr+d#X1aH{(%0Qaf!GOoo=Sos=#YV5wgL&KX zN@bz%U@THnZy2p?Y!rJeo%IJ2CHEBHB&!H*le~4WZ=x`N&zkwL@)PR860R`btRm=bxBF{!>A!F!*1eXs%4lm9FotYHQ!Yze z`W8x^R1pEp`~qC6Qcekq#a5$4m!_4wCcHRmkfvs7Q5pR$r{DVGRYp(zmp^fF>QRe6 zbxr~UeuNjo!@1c&_{}IjeMXD@cJ^3adZWwO4niPPIeTkHo;lKU(x3dnded_r7iVWv zT{C`u{*LWz(a4Qllv3C=noF`rYZ2E?TDy_KHwc|!)HCl%O8dF z-LH<^EBM9#iWc%!12hhPQ1?TBpZQmGbmKq!L=RaSO;2_<+@Iv-H%gP*TEnE&^$C?6 zKgu;oPbPlCbIztvsqK3aQ##&UyN3=r-U;NgcHd95c*1rCWHLXfLg4N5E`27@-P|Vb zo6A`FF?Uy`?mANvji{^Xcv#c4Zbe~UFz!J8c?;y(^7#QY@aGibwZxnpl(G<``4U^L zoyNMmKiJysiXLC(%)jWNYiQ?{WBZA`X5>rReeIKgvD*N1%Dc03i{ENNRJmo798HmZWJD_zX**6*) zC3<+co#1PRa*J`5nX8Py-b=yH*P&(7Cdtp&WVRX8&Bhlg?&7^>^@DxW)Yjxv!4>r3 zZfI=6*`D}(91J zvHKE)H2^>#(%+*TZ#MMSykzhYwV4$7d{`gboW_TY z?NYtT1EtQRgG8ahf~3RV$^}6dehDxu=1eAvsmJUL=iHbRPt>nUJAKfeX!TnYUG`u$ zD?-UqH)VPLx<}&!@5@?(u^l`^rdY3o;$|%H(S`%9_I5pVj^LudlxT%*;fvLYIwO$$ zbZjf?;ZbS@fgot_V0k%QVQ=T+MXtJ&!wylS+xyih|CGSsK&#g?wLzI|wb}(S{yu)P zi@VwfwK_=)n+bXm79zp9X6T(xL({XBoI(CkgMeu8u-uo@Ml4Itc^#aaGS)>ohm-$0 z-jYG)W?ri|n>kxpgd(-NFqQI5MNPs;7MvH!D6ERF518G~=~+#f7`|Vh}%W0K7hwp7_5yF!EUGc00sD&|M>p)h2Ap zF33{q2ucf+S#7Xch4 z=>VlNp%F83o?4#nwWvF3Q|U&Gi&~3iZvCHK)Z&&a;WJtbj@c#5kCAL691`6{CiM($ z?SL*i3%onostNd_B8=xBD(I^siN<6W%#TIO4mU%!ga(e@T^WkRTSTd4K@+)dE?j`@ z103UrvOnIUiuIWeVlawb32s4B+1g;LuPBh&+GtjKhtZ|AXg#&ChPg9`&5;uQjv`$x zz<9fG8dO z{1OpmRaT;W+Vr5VclqYb$5J}^Vg2_v;s95sCYe#&$Y}M5?2rdHz8sW={l8(ifA_q^>P=G$_q+uj6MR#x*f5B6D# zAG*ZWh7L06p)q`?nFx*<#aWAp2S)YiovZfRBrm&QIrLSnqP*a-m!6d5_?PE-rJ)Gf z?a312OH}YtIrz38oS!fa;^BBOS`B2wqV?@cDWi&?^fHv-`rXFz2H%Ln*q}`uIy(S* z8mHrDZ^%uLs0>6thk3ki^4Ut5u(Ywc*JsV)?cE*EA*OZc$N_x|Fq9pjT{)a`?cKG)ne@K{74wIRP4v0*YbZsW-}h zU%RK@^jpCUWD1?{`-@x2p_;}TKFP+Y(Ph9D$ODS9ln~Z2%yOldZeAwQnm`laHb4s* zC1xyL__94{lz@r>T(b{VRiW<*NG0vk6?bW&E>trS_*w)H8k@J2PeQ-j`)pB#hJ~3A z+mhu7n1-|z&nzu1eeSpgX@3f8N;?#+%HXp-`)x69=Lb7IEy$6dqv>;{yrfo^)Mv)E z3f64slFhPy5f|mtFRHbyDz~S^HEp-H6OZ!3bCV`)Soc6L#7&@4L@!sV*1?P=V%O-A z2)czV&m+{M%FS+>=!&shkCDOdn!N^qaYFCb0|n)PbG4qVAYc~9v(@k>=#h=nDQIX) z9$!J~mi4m33Toy8JPbW%qZQ*=GA?#WJM~!;K<4g%6*>25wTn)#bTI*kHrDx@X;)&I zaTcW>abxq(*C`2E%_PHJq6tdj^S;F){GPcF`^`=pGREl+3_a#R{5|3Xhre^aJ`<6p!CFiO~%C#$_`puu#aERWza4d9n9}65(Rt<|X42%d&xc z65S+9M1Xs@y&io8CxJkc$C)CbR6+YX(H~WVGBhfl(VB!9`W(zER-q-MJzfku^!(?k zqi~XhHf!z09A^L}1dZMc7wA-X8o6!#u0x&Kg~Z#);P`>AXSw+U+Y)9`w_SWDq>0UGn2$W|v^>rlwSu7tsyMri;#&;eEmnPl_7yGXl#JHBd5qNnU`} z>ix7(0=%G{huPANd^BzYtE_A~QGnIQ3$SCTOsdXgj0St!ol$kp=xvx(*{1PkZppnRq6O(mt$r{+p-P#PZY+Ks;a> z$hhCl_)rOn1V*JLB0N=+Jd{$&zBd$S{lBQ|;5`Rx3#>7;?&^Oyle6#N1v(80JK`C`1EWU>Ul_p|BhGYMSoxBeLy3m(Puz|g^ zKli2DR{(keQW}u@K~K7ui{*~-!tWJ1b5xh3v@PBLVq3qV)-5rr>xKPNA7oppKC_LC zep?pz+GQmUVvUwJ%if*Qi)|HyaPI-E%DpVU|2@L(A^6L-(efiH>DQ0i>hP-gsrR#I zPo4bz;d6?O+gy_*SG?P2$>XT(4lix(Smgtr%r7`|uEQ70#9~^A(%Ko%2e1f^hVfB$T-ITqObMjzRaX&D zLn4vw+7=cWn)!z4oV-f*<)1IEdenbIjebzOF^AV^2q?|m8!EM`Dr?kAqCi9u`BL`ZP_)$yl##8GRR0>xtRI4*H@NSRy4rah=bl@ z@(gl1J3E6*F!(smFW!h(xvk&Vj$oN^GDVjFmZf;;qMyz?uts$jMJ2pUW+%kgP9=Nm zH^Oh>ZpFRRRnbci_*p$F8_cFPkg&m^S&E9$gJrgoSal21Rk3XbEp}p2^dbBl{^RX%T#?YqtQR zhYYALmg*9TZ-1~WpS%j~J=|H|`Q1RYJQ+xbDH${rovf(a^Qy~G@tz{7T&rA8uL<{t zdEmdy#pr?%Q%OxtEyT3y=EyN3JnrrN1pgFb;yXK)0e0eU`?D~P_qAlX_NU5-PU)cb zExzCP?LVXvDp~9NOtF-@w?1taLzBm|cy{$04O`VDMtYDby3cT-8zb>S>SCo9(gC?c!H4dgAZX!tg18fau zmsnl1AKC7>R2S7#;UC_5YA#3|WbJdLM`rboOz5F8(?lUlD z+I)4TeWUcDsf^5pEaX75J5j$bX#m&Cx&Xk8GtY_yukR62iQCmKWhY=LIoUAAS*(0$Us5=cKR23Z-}SCEg&t-~2>wtE!{G3?Ru zWa1LuhFV%$KhETDLsV%{ycgOjxz^~chquv}&Wtv0sFcc~9&-|OvEP_#IePPkFm&XT zNycGh0O84?X-y~j54KZnWKAel+sk{@itGzUMx}K3c7?Ecq>X|kwpcquQVfamfv+aa z*6OT-MqtoIP@z)6iT69h0o~q7-tu^&9f#%#p-`tG(1zqHrF_2N?!aqNBL;(GE0t^6 zSYBO82ObU;p_ll{a==+O9Lke%+vKx<7pS(GH6R>eR_UWpa7yjQ;&)D;kQ<;z?5b=> z9Y@OSlN|E+;TWtKgh0(F`5Fq2I=txwf#C>Od}1fW;yyugPqBku9J17{T`7A-G(gl8Z()dr5b!n@d?#@bm7~G*97LZ2%$dQ z%7k`b72Eszf-G)Jqxj+#PD*QqhL-8xva}~jXWyLl9nx}j+KG%d?J%I<{`B8cqF%P)XGty??lz-eW}!dVR`)EH4rKQ$3wqZN z0l3A_lvlp?!m5E!<6;?09U+}>2w2fe0rMW3YRtLB)igm-TwX5@7c-WB7MPfQgr6Ml zc0 zSq37T+ShmRwjf{I8=p{r4RPzM;hHF~bDQBMq5F9MC5cu4@otd<`g>$wSgWP?%>>1y z9JNUCA*Pp&Wi5r%VXeM)Ryl9$_pgMtPgrxPOr}gw?~ZG29)c0G2JtXI57D_}6{PI% zF2l5=1NYTiyAcjlfBnIi`n_oS@4OMoFMDIG%T(}%!kn5NKZPP5vRT=4f*I(Dr3)JY zJZ!u*mmQksLq7)_?wT|c6IK<=*#+0`|1|G0o!vZB8W*3}65zg8K7Cz$|JR$!_1Yt_ z-lKPZ#&GI651GYUf#enZ<9X$5jGpuM!c&8nI|CvwX5NkO)$U>Ysin2G`)%V5u;ADD z;`Ux6BcRc!PCD|wl0p~hlRVnR0!co^#=nf$@1*N>XH;jSM=G0Vo*t5o(qs}R=wQC@ z%n!>mH)idan}0@W>NJ_wz*hEqvzKQg^CeYM&?BlZX&DkfQh5Dd&kG+@b;go9QSB6c z^jS(7r`^m>r9iJ$* zO~|WU%64qB9jk=F2lHqIPuqFB7N+UW%CS+AQ&i-%lt6?R=<&MD4M?p!+O zLp<}(OCpn=V$&9aOz0E`aOCw<%qQ_lVKDNz4?O8D6h(5NzxENJL-e1=f+I+Qa>LA$ zVtp?D$JQKF`h%f^&o3g6%e|NobqMA zz&vCIIdG?q+STk}a&L5D(0V&dIr-Mh81$2_Dv;A%KnN_dNE`$K36783AzNVyh{z@8 z9hA^sn3h2rD1qjq2eJAuf{;FkRtBIAGRtWv36(JmX|c}O33IEKQkx7G{Z?LrkyCSX zb2~NE2LhVx6IuvwuM-ztlG{<)bvt2!Pn2};x$aJM%0ZToFyt= zLAGelu%YQy^ET*)m-qA&bW3fXUc%B?5XDy;QVRr({C|99nxEyb4(PFCd*u0zcw0Yq z1>TnA@Toq_63Vmb~N1nq)us}w9yITr@lBQ2tTeC}a z!RR2cnP18GAl6qn&4cdc_EQ6=4t^}*J@?U~7^J(GOX^9jUMr=xBhF(kgNrRJNuONJ zkiRPM)}{j{Kz4A;ddAAp;_T|$3nze>Q-JQo90jlaviwRg+-oq_(_5;~y9DVP)4u<; zqyOUlS+(Pl`QbIrx`MOKXPgx?qs=|K_5ki1GvkfP5oy)`Q5^o4CF8+@Js9oH|#6&=YKiy`_5R78D)K+iq#P_Jox36mA z#17iqnJDVg^MqUpehMI|h2&$Bij799om9FLxw^xFd|>TJuQnI@N|M)35+uDn^`6U| zMqQ5e*u<*n3WS${+!n&yX(ZO`v`W>~c$(Qi%>d9E^j-(~RK$|tcLpW>l#ESLiu}p1 zR3;p)Uj!hKub-ZQHmg_;l=44rJ%-uq8^D;=?=n`!G{z+LQE| zuI#i$IM*##CiIa-XE*9D>%fngc7S_LPEFCL3N0<-fD6P&p2uGp-Pr;&ke=)u-x;7- zijR>-$l+0}f$LE5;KIsE|Eo(l@*=zAUh4x#<9iRW!3rdsq}}0fPGpypL0j1=x-Q1g z$7c##Y$3b8q3A^d@ynjZafa}=svwv34yS3FYM;yJmRZq1?XWjWmqR^OOSz{qM4DJr z0k^!(mFLhAfXB60A#o+H-*u1DQ9cLXBwO-( zi+)n{>2_c(Mv5}EkZyp~c-P8-rlL7$4nl ztOk{=Erzo%RB&9o*PH0!elmT;JJ5y*O&0iH234CZq`vHlW}beQC7*6Fzs{jI160&) z4NHvk;-X6+?G&;&G7MT{r^~*4Jpn{fo}I_Lv(}dE`(AQ&F42ju1UaKyO7O@PuTGxI zWU0B8Co&dXt-e#g-)+~!HS4^e>`v_J5R#pEt&G|?H`xkX&@p#u_Ym<4l`d>;RB=>G zoE#4~dc9WyG0z=03^7^2W zI-DY}%f!O9j~gT`%9g-r^as&9b%|>w9kg3G_bCaI9lJAcZOn^W4CWbp+;~*3roxJA z$`o`<55mohZB$G~cfZ!EoO*W!9m$K=3fD?^Z1laSBMn0<3U)FqZf=E+zYJ@y(o(G5 z?)v?%cvU#Zwo7*z$Mb;`=}(=kCm!`qQO6fceGtpC^x zOa9Wuafw*&L>0zD9Hc1P+&eagcB@%7P#^E2aT`sMINPePTk-*j$7Qu-@K&NpM?C)_ zrmQxtta_G{^7ieXbfpC6CK@-^Tkp;4AB8_W%+$=MH4@@C;v-Sc?>M*Y9>Hu99m5TLYJ1L3r{7g9 zUNV;lgr&6=;^8iBYW~;_9`a8g`DJS#g>IDHtR|&y`ehN! zV8M`!i_2ogJd(q6rAgw{5v@~{zQ6;zfWeOz$gF(tSq^J!>)xe-oSikdIz0dxEfQT~ z{**wFUp@9jqQfQhZCSbj(Za9Qjo`plN>gJ?Q_C97g0x?tp`j@QYUyYxuq~YFFV`5q z00=30O}xxLd1tQ9cSp7{PC-HVA#teH>7oS(v2EP&>&WtZhG6oV#^LQ#rCFS?c5xN$ zqnY<4G|^Q<4tEp1B`(jL{Ho^tTWFpk~ z9PIy3lp1wPow~|o>q>2*fEM_0oy0cE`3Wc^(4l*aH=7)xVZ zrv?yY++HD}p=P>pC6L#le;I&)*WnTy-${U}VPLR!mmefsv&yMp4(E*@rxTK5TkX7u z?G_JHn`pfheK*K`Q#uj^=Mxz|DlWz7+4X=fWAvUeHu8GHeNaTc`+qcaF5d%%AJ2Kj zkAF8DcDmK3uD@LO`e^cI-Z;DKiBKj3)N6O8#j_4KdlORB4k{O#5ttDJ0@U|poJLk= zHKp6o_?V#~;S&PhRv_s4Rqpki6&$Tk2nm#?^iR*1acN1ZFK?6If8inK72&*v!vIoM zpHX_8*fS*Fu+1#NCTUHlN^MG9iC)<_!hZ#E9 zu^mS;=Sav@E0Lc#(4TT%Vh=4cVvjCF&MR?zC3c-pbXy`Y+L!#+G?;IcA4umTv~B{x zrO44r5%0Q+a~&U;25dC~>+xwZv8t9ND2A;&>#HAp8Pd%u-4R8hre!<0Xmw+aei*NaD zV{+qn$z3s-%#7XgN0&0;MrSO02KLlc92KK_HscrjZos4EZN8B`%A2#!Q@ZIMr-z z#cJi6+Ichn*4_L)R(k=S<_5=4gow2dbfk|@UNDZWRj$oBLvpNhOW5G6JWyFiroM(y z#NQ5lU=%FIxP#XT9lhHl*3r*FsX7FWXI@0a3SA!npWA6%FRj#^Kwyl$Xye3s1u*$W zNw?3MWgmXd%gbv&0>!F7P~!L*)Mo^Zg>H!LoqS&dqmw1A65q4a_5zZz$hgdUWjmpa zl$6Jf6i?`<(Pi}ECA+dCcA;v)plY=X19Ko04?1Ztgu_Uv16QBb=NP+a z^#9fd_D8Bb{HRm(( zW5G0PIQ`QfkM-Ol zvw>Fv^122&;>o6e-SxNLQ~goTSO*xR{&|&PF;e(8TFifPr<%=Lq>eFnc?tw zULyZ!z0L?MRF7~4M&iRo72j;E#o_%e&fe7UhuMIA&c3($vHuIf{0uge+ZkC?T41i>2O^pE%SYQ+QzKK(!23SM}W)O%M?fiH4bh)G9 zH_>J6;n+O2J==8M<>c4=O9}NLK`zh_N=e+2IPIL+Tf)11l4oj46z1$+uYJVZ%a1Nv~TVTdtTsHZuE+%Kt4+1Hjdkd(D z!!Jr*Tghh*wlgHqCGf!|DZ0aM)Li`1NpJE46*f$+PHLj4n>C=aLBHUWCQZ+i#;;HS zdga$>7fGN=yJTl%G~L%PS4E?JwDy1n8k7MQ!~-C)#sbQXtQSvr1>A%k2qnZp3p;~G zgDsIPyz?O;A*Kf8^#e7V21E^yQ+|(J_GAwv;PtEQb23G30Grbhj@bk^8|!d)uJ*E( zx=!nY20yae?$nP*op9q?xLlnN=aHGneF@+CmvI~RDwV`tZ?~vYNzCFwrb)KD4K_(} zVzSnu>YE(DsqcPQkDK?vRx#iSOs?_T)8qs-uEn&_8V>9Jfml;1tIEdv(~}e#_R?%} z)Ft}}Bi3FRxD_o|!Uzq)^2H4MK<%4BQ{ppF=xU-!PqPhP4Mj%pm8;jOg_7Ujue5Bv z`?2oqsBRE=e_EO*yw+1kN8oF;njHA0Nu z%88R*B&{Lb5&Cbdjrl+tka$nQht_j||O zCeV9zXD5-vZOi#}c}B-mE_K<)yzOCZYyzlCr~`yZQ0CdP7NZ_IQ% z+Zs|LC?F8yB9*V&mg5k|44|*(FGLjH{*2~u3*j_1XfhGbQ{RJfJ>H0u>PnCgMr;D@ zDLJp&bKkU&ZtP?;pE1#>bwdfA0XG~$;4LkgMt!pWX!CBKr7D;9z#|Bq-`jWJF7k9_ z_?@uV)0Vb{I^V(J7TE5`8UBZ#qVebbB)dkf2(|}s?B992z90c8qhAoaOY0(Z@~!*% zjqh32>)IvA*|OWf5yL19X|D zX-IEbN$z9v5c2`q8`X(_Qk<%2y=G$e#kH1vbJxOQ>~_Z;7Vk)riO|^=cG8)T!CFQy zVQsNou{<*1QwgotN&#D!3$~K%jt#n=01(Q4`Q2JNHirap2zn*e`Bd{XVJ4;k7jv_ItgO&eKArdsfRD`p=ntvfEp*r%#xVR3#0gRJw^+le540NA3n44>Y+m81Oxc}Jg50Ue? z*P+9|z@CzWSF{NR)J;QA(f)m~wyPmL4Zx^W+??O-6wkxVc2=qD znFMHj?&X;Pw{$%lW1uT|H09SGTAu&|Cxu|G%d#eTUueBDJN+M@3JQ2=9(rpdCN9E7 zIz^Uq5n&^Ml|*_jv7rrXJmr^3b1*k8EiGBKqgl$bWdPBZT~ri#E9(&%8UBXck4s^g z;oKPQZ`!qfrnSN1kKI}J3d(9$Z9R7ewD?6tva!|3F-wDe%XN*`DgVjX*qA1LH?IM< zdWW8E`G(*3bv*l>ls1w0TXY}ePZKOH8C&3BllJ9z_2i>Al%!ZqQ(FFMWJ%q_ID=Wc zdXl2s(A}IfQ(Jf>w*9TPTO9DRnG_1^sOm!O*{NqWWUM)YfNIy@>BVQJ3Cq+g&{hPY zgAAh<{tR{y0Wgp4$bkb+o{<==(dM@Th{%?W>K0YCt{xVoDAd)U>TxN>TLd4!liEdQQt5Sb3)O)su% zDF+CBXC_ib`LNo7BKv2@X*7qHHDJjRxLvGz0LJemb9Wmf5}s7)iO2F{}Tb#izC z6B`H0X?MuNqI0asBAh-#E;RqZL;U1U>IiRTPC=6Yy|9?#xuN;@@}b2wV5S1T_x?uRXj{dP7N z4*8m-*}Khqu^Gm@@~{7S0sPG@UyrtmSJ+#4tOplxoULU4lu)i#2U^e}@H`;seg$D0 zM4J2`9kb3K=1;BjMae)@i$g?9y*HQ>Hf;$O0@xVvk>`A8RBdf-_6jFus5RK{1JlWo z2U`G(DliGG-+@aO6`DP5t=3%lXK$PaefD?^Q;UkM18v*<fAkpsXO*g zO*l>7KjR6M#RAja7V@IJgh=ZQn2qnc&NJtc;h?kp6BZ(r&H6@eyAs%sV{YT^XK5|k zo`8GU?Z5k3<-|v74`l&}t;_=KPThl5!;3CGYha6v(ZGw77U2Eu24#@fwwST*MHlcn zb6{lP*LBy0lnsh8?7(atjd+-GmQ>?L6#2QfuXf#@F7S4MmGHHg7je&wg_@r+*5;VQ zWNKWe?Jg@84r$3@XL~wb zl0b%R z91E{0{jJL>touEs(-R7^-!DPTqDc_%-LI(~{;~|8SC&`#zJ*SbMbpz}7_JP{2i6qG z+S5Fze)!Rblybl%wY;~U^x{yKg3W!2EfH^U43 zytBLMX%k}!#QhXAb929c5_VEmS0{0rTwGq3($k{?ruLq()yl;t2NNx}ckYl!Pop%S zJjFaB*=qLkpEBq3!-2uZrd&~Uys|q#>q)vVNBP}?3x&OFB#s!YC^Tm=w4MRQ%^hSB9_xPA>XY3OknL;;y0p@ys`@kom?M<``$wJX;| z&U#4>tyZZ|l*r;tXZIL;Z5$@PHJm}C?`Ax~TO3am zfnQ|1cWv^U_ZFA<%0dv&N6Q`K1np@-)R~M_R#o*34+oc)3T*G}FlBKHIb8qR;7==^ z&KlgC%i8+f@}@GA+x>9vr3+$d*-KYpFfOMe6Z0_cEjCC-3Lg7~9bMA4@(%Z z_m#}5Z;w*Gr@%bw&_H;5@L-b2H?HV(HBQv+KJw+R&gF}`@lJPcd<~-!y@4;BZ!5Mp z>csX#Y-nl&bw=EFZ{gstU;8y#ijn%IrggEabi*{@ZtHco@ zbVGcTm{}_+n9qjwy(U{hAAzn7<^q-W-*>35lLyi~y{TSp@$L>T!|RE)weq}Fi(g&6 zY|A_0l3&EqsKRfDo!HmXER+Qe2oet{JLGxSlDn@Au$=yC=>sL=j*l`5rOZrA&Qr8bPMiX66$ET zh!dXDullku75W3>ddu_P&<)(nszyfPhlhthSyUJza5Z2*0HEywF124bZ;8)CyrrH- z&G?RST_AlToX09p;#UP(VgK#$b|xf%nSS(h_R{k*QHwcR$STBhTZtBOmp=ZHH1whC zd6Wj#R~S|!MO>^kcSJ-)xP4E(n@(EZicLCD5E2rWpPe2RfM#uFw1OmDe3v4xdQ;wJ zyn3i$+-1-p5cqo&3Yu`PQ-5U$7bA9Bs+bI*Le$znz^K@X2O-U1O6qI zcm3=G@QygCcev{5ijNHpXj)raA(l>1K9S4|#hq*c1Q8odPxV@E-VMKldu8^=ocdmR zjsP2#xZxSwh(_kOY$p~H&f4CJMD0%)Hg(%z;U%FbCQRcl=z65-o|yqx(fV}tSTzmu z%c{EZvhl}P6_MifGdHoxGsW`mXMD&2gne7-DTJoo8}ZSLK(KK8DQTMC)2Nc}%Qu)7 zTh(-Qf;_h7o^8&w{PyJMMS+dwpku)WwL{3aHjcO>IKTiB!x7x8zwl~?_{r08EmAW= zza7@cWmz`^F8+E9g9&D3>#mzOwNsnTDMMH;Pw~7p3MHfB5N^@$5gqXaru>(MC7smn$*b8cYTE%aVG|tEIoBoQn}!J`X= zivzEG>coYj1kNkf=7pPAf-LF%*ss-gtzPZ!Y-h@%?74EI?ShST5dIyIXL-P;v=A!6 z`=#irDk?gk{ksoO_f4efpTNUrnA!Qrk^I4*uZ>)LFlJ`SnSj5Q3+DoRPOd5a6I05aA8gQ***j)=>@>5eUgm5^B@S=a9*wQ1Q@h>&u40%kI2ScJv#H)1cK5Za z4aH!jqtZtM_$aj$Gb$dQ>(pFq)rVcW#@rP1|A(o!jH>GU!i6`T(o!N_N_Tgo(jX1e zNOyO4qjV~YN(j=@a6nq6!9#baNZoaQ|9i*#`o*DxaYW``YtH%97PVp_<^T2qqUABV zNPXTNPD>KcCHecGe1OuS$#&*ri7L|ePFAa}emUAtUlK__W(_B8<`fLid%NXF;0rTj zQSOv)?+ul?6o};dpQB)QMw_bKGRt2!E^<(b^W zP~Kk~l8|kJh%B4-S>Krel>HFgC+>GQepC|vBw)>|buz6c1N<}vs0@zq*KjwyPErL; z#rzrt-N_;7Gg{3D#L%TRewrEkv_#}H1#%%~=z-QlJ8bY&Q*v-{;M(E%P1uo8r?6Zs z>K}O+it`^D)tGmUPvXkTaMRnk(<$?O`1GlV=Ke1%argVl54M>Gr$>dpD{)Qx;Ch(> zG(uJMn@%u&g8JY>(w3GC-g`4h(1$Ny`g-uC65l<0v+>1cRdlW+q+?aU)=(@`zmbU4m~YV>hwwCKYuczYi~6PVDBsT%uF!jGbFeu`WTTgyX5UqJM7B ze)2mHx62n(=r2=B;G4zx|NUSi>15=ogTj31(G|oAp^BWV->yW*P;Jwf2T-Q3%g zRtmN(itzcM*kPDs`m{l^^8AIdH_j!hjCGUz$43H*o9+*G1*H8*~mK__uG=(IO#m+LRZE^@^9le%}t1X+2F$4GJxOaSX zQkEGg>X{G|RCuaK><21o0f^y*3A;EWN1fZ_AKg2qSk6#=l%+@G^f{`@8my7v;+D}Q z@%?TJNt>C`AjV&?rY}vb7nnc?S|smOG&2d`8Gp4D?u8}Ml0f*MDqB>1ble}o;)@p#5d1B;)EPXfc~W| zoe7%%nBSfRjRXCzRr^Gm9fu5o@#YZ=+3hF6n`IQ}mv+{JMC=lAl54RNm70wAf_*xU zgRW8>nxzD5%3@HADV7W=_PJ)X=&SQjC1dJfAH-FvoC4(p-ZoiYY zWy-pD$TQ`v@$$b#xv5rG70J1z#VSn5%C{{>RqYNy&)L|USPVeSaP+SLpyJqg(K~!} z6o7u`GqUJ-7uEcxPv)w4s|Aku3{m&Z}g?VWo0i!L^?h12_4f_lXGU(=gvsfD7^Z z@-U^fBhMs05f<%`5;iOnT0M?Jfck2!_NsBeNQsgjm<#n11*C=z{qzY@vrSe5ZRgta z(xPKw>rP?#+V6t{sF;i^3AwLD-cI4KJck)GisiPa% zRV3Br$&hrm%&4KPmd=Wd9Ih4-YH+F1A?}yp4w^7&C@U)ql!si*n;irJ4I98Z*$1uw z&;;(v6kDAAV@W7-A#u?BJKppLzo%>f) zq>=%Nh$L-zY%JHedv4L6NMYyXq`*j2Ym(a7at5viYCtJDl71ses8!{xwsz2Ye{Skj zxC)?`Bz&nG?u(hdAGVo|M0hb};8Ph4Q`wTm!)cpFOI`MY9!K`;the3qCQ`rLb+xSh zKFf%$duB0I#nf_3dHqNyfXo$ss(CQT9b4j`NIL1|RWNnXUgjoC9mH;LZ<{EK8e094 zLA<8-&e^}6O#folu7y}Aj982&aCu~-{fiV!jTN19OWnkLz770;$4O2$(Z5ZKI|pkt%t0_ zO|vogahl{Evt62+tXmnf-P1_&=VbnRNUVHL2Pm%DISl_bh*#wvJHMacfn$*10iIs) z!*T1Tau_CspD+EPBq8r7t4k)Az)FN9|H>|kdCAS>cQMYyl(bk;U7V42v543rw)vOJ zN+S(gA-dbY9GvHWn+U^^m!1S2c<8phPw*PmFC{~Pn29E0lTArbWn77Syq40Ran?SZ zmZwjfsW|y^fVu&bEW)3#FRO~AGCVhsnYokSPb)V{6JSa0?hl;&u3g->~-c{$Y4S!Zf+LYz|_&!&iJ}- zP-nAz(0bSpR7Mlv=z`A|Kk7uWpB#%~?R0IdtgcSss4@cUM66;N*!&e%S?MwRlIe8# zt7ybvxy#`h%7aha1wix+tVAwme0rLUEW@7D_UR!~Czu>3(w=S*DuL_A*m^?f!VzP_ zHm&=%kBn8RZ{}l8(=oZ&jN#s&F@i;YSbK7x0?R{wAEM*$MRb+a8J{^E%xX8Z=TEt7 z{wbFBy_`$&xpwaVHA11bK=p{g$Y;xvsk8P4a_8_(f|Em#+3*Lm5-Z=}BLCSC>iw2| ziEpQ;=Io5F?43C)qei#d+Szqc7Wein#7G_0oR{~o^c>`e*B>sHQi*X3rcxcrEV>putHu$IsYtSdK5QH;*>zl?nU zu8h-<-54^%`v$k^$9c#--4bMpPtTepNI^493tbj8Ut(mh;WVA&up!cPto^l6_^s^# zJG>_be-#AG)Hno{n#l^*~nJlsYfLIa%pfaXnWMdkzFR%b4f>`qouV1^a z4G;pQAqv1jwzjrn6NcTpUDw}t!@i=k0)O=LBss843(|l z`ss>_V}^hbH47m=)OKW}SV~?fMA1M`op(Iz^#gBywQ^d`mGkT2>TeMfEQ#0m^S>Fi zpTHz3pbfQtHi~mL5`hk>MPgs~%L%F5`TLoBe4LPzi${;(lJ0-MTiS-8CjUXrh{b?b zzPvjl(~fM?jxKrDMQ(0mVLlaidV6_)dQcWnwRm$dkALekpW9PF#&&W)j2JGoqGHA6 zNszgbBIpYx!BF1JfKOkj2pJVOzgc^><<1tm-%=L)3E+5NSbs3v$rip@JP1KOK@RP6 zPVm*y{i37m#S!W>U{*e7XQ!q;f8NA4Ju$Y)Y`1w@C7UQS5Gydg(cb6mGd#SBRFA?P zDMk(H%Ja+k>>~gM_ekAdOLLaoT5R|9tXiYJIyN(H?Fevkkj6_}hn@0{t}M?qIGZZ_Zf{qe0-NU+)*?Pf@K(B`hAsV5hqdY5#tddfU zWq{Bh-?IIS%F3sKKsnI`bhd(PJQWFUd$uFF>rDhM&CYANgpnjOSj#6#i?;%K!F3-q zlFbKP=lOn2Pfx@C)rpddd`BmbyL2Qx!36uL$H;XzxP*u(W`Njz=@>Jv0oQEeE}}>Ju0Pxyt38-+9P_>g>Vl)KvJ_^=>Mvguz^>4} zT;uc|$Szo3%>xDkkPb4RjT-SvGzQ-5i8s5?anl6s9(K*exS2%vyo%#|7|94Ogx)yq z1^0d)zD&teI&t2WtKskh0CaX9PG`R#CbZHUo>!W?VUR`Ku4R7e(`N6rM`=C}uQAJI zAM!j6FJ{6c)Roo}DlA#hePBi;Z13&d9`1Vj5X*!B`kjoCB{o1c*IHj4`Dah;r_>Iz z>anAvY&yHya7&tD14FYZ^*r{n_EFC05Lkhn1`VPq-mAzS=c zs$6I#fBqIz0X>9K1rrQqtPuxL73vv3=~S)|%T>22L2 z(0p#ZbnLrA3XR|d09D6Zi2y+W5Q$T(hZnOL?7a!Y^j`q!fE$5b!rFA|a&jon&Fk6v zo_fIeF#M_lP*G5NzwsBtZ%<#%15O3>Q+u`(B>v4p0AVva&x2zf4bFdJe<7P&=tT1L zZjyvx70svltVg5t;Tk`{Bqs=?)o6KJFg<)}*^>j?Jnh?{v1|=H|AU{% z&VxT#4mOKNZD$*^Pf~8KC-iU?nS`}O+VM;~bVt&o9*r4yON2OwT+aEBAeNJi7op5A z3uLlQo_SsiTv*oH^Hn7^XJCGM6!)410{D_19bs6PkeN)=Cf|i+aFEn$t=YWa82$M& zMYoO_U!jmhL-+OMo)H0dS6rc_e{jgP*H}pLow|J4be?xw@aWdl!kIytHP811vC_EQ z1G_P{I%wiyys1){*va8Gqh(@ey=nnNOue}6<0xk)s6~hUOn(M;*{VL623_9_I*A1D zfHKIYm9^3thyR_9B17`z_2_M%8rRhI!l_qSw^%}h@)o?^kSxOATaT}}L=FpM>|ZwY z?I(K?_8LM73kPCYn~6Iie_#oL2MwVvt*J02+ISZJ@&{VX zZrAdTjv}0I%629_Uqz|jMoFLp>bbYGGqg;jwE5u$T;{kc7LNgJnlrBU1MeIZ zOiT(asj?sUAHrkX=4vf5+AqFUmw=rV0nGx(couD&ih_p+AJi)^1C0Z+0>m9)0l-lf zjN5<#o0yu4e&pOlM_Q4!BRo@ZDA%{J0cL9poP%h&=%m(%Lj3%(tcC=R+#3R&lAOFn zYpR80nf$E^Qge&egkoH?Wq;I^8gnCM$#64Vmmzav%2TyzfzQ@AwnQpO05 z_4xvLEswKCeRB6(XP!3ITKK#f@TA3sCM`n)q~y-V$pi>f-but~F)i(Ej|Q~`jcZz^ zjb#50@T|&F>v*B7$jW(l-b4J$`O0sIE4bxyW|u0-)J4S2SEHA>WARB-(v8f8Zj#*6 zS5fTel@TE~)IHTv+t77JH)9q6qvSLo(gULKP^By){(JZ!szig zq1)B2c8Q|v^}F|=n=7p!ZR3EFD_|4)#eTr zpdQo(jFTRTHE}2t=^ z0j2nHDBZ#_gh@EzOP~JN49pFTj8*QNwY3mB6l;KCz^xSi(e6BWdz+v-xf0Q=?k zKkOb0EY(uC+v=LSx={)VD`p!kMrVx)F+)tzDA|i%Hbgh6#th-+DFUUV4}_>AEWFo| zh*o)${Q6gqivb1E@Zz~0`hTdJ-)2VL?H~L(f4%qGg2MjD=!XIZYE!7c^-xGBo+n;# zE`5=6bE=rv>_@3AfW$AX@xj!5fu5V%A>fjJn&f?`Ir4qVMt({)M?RVk#EiTTU*NuCJ@e0H(cW_r8*QJ|7O_ewrK8%PU;}nu zo!7%f@_OpAoiW*Pd6yk@Nv4WgfoY9C=_f=Q~-$UgWoQ~ksu)C zP61}~4Xfuif-Zk>p(YR;;JTjmk{Q!)*vkF+SCd{LGqUsZbFAs`5b~f?E#;Fb!pG`PZBva%{7G-L1OuBe!*|uM9DwF_oQ}#+NGnc9lWbC-mrTX zZkxf2o|~kGZ}G;obNZ{><^es!U-IGf>c(_HwFo>Ig=U6i@LGPm+r$ZT08 zOVO_+G|;_j`Z~+$S}*wan^b+^?@1pi{-M99)=8zTRifWNfZi&BeIP?}+3a9V$lsVH z&=NZ8b@sLzeC@;15IP1@SuwNR1xd5F%|$utNWv(ct`Te)++nd#pOmR{fhiBy?6aD9 zUk$B&W2fyyD)RM27=RmCxwvBBhbhFT`cW!^$x^t`va9b57YQk43M}K-sRJ07^=mEA zfVr&Jd9fvPy~uY3Yj3_0|3A_U>IC;D7`b@8FID=%bfp1~8_>}8mNwq4o86(oZx4Jd zt7pr zw;zxl{^qBnHY>sX?iYr}zzs8>n9IT)zwFrlimH;?W8A_(RCPZlWMY=Gy%T%P!~)Ed ze?(7b8hQmoV6>s)g==rBwu5YLyjvWVt~-GR94;YsJC=bW@UbusOhp=P1Zi(#`;o~p zI;&j(0u-JjHhaOGE)@Sni;W@ya%9WIlub!A#9cybyyoN`MFP<54+{m&+{HbB0uJfu zIw2pCsBX4>vq)AqG>%154m2KgW2^rlGhI;e_KKdb2pWGd@?;ccUyhJC_QUZ0F{l;^ zF#>Eejb-2HQ?96`NQj_L!T5g^?$D6s`iPsF4F=J#)8yNAekwft-KcI^S$UKzg19gG zwa8G7-2^>C|L|nUEadjw_jXW`b^OufqHy`%o*aBD(F@m+3x~SSxTi&}C zCj2KN`Qz94?Ha_Q*`*_gjO$kqn0BJ59yogZWgP#LmAw>o_UeKEpS7d!y?;_xVH$*< zq%;MGFP=)ODp-_V&|@cy?V+Oo?S8<=&@tR#N!QU9(B^SgbiaELayNUYZpvKB<`_Ry zpizGNs~rq7s9c|9W_wkRvR>NUM9*GCRBjhmj=pKGE)zO=hjCFDvI@?b%dnj^Vv6aa zqsJ%IMShFfsgIfj0hYf4!SWl88=yF8&U+C?a)!=89Py5C=5NW6xa1Nh+$To>1*dA# zTZ5MS;gxVPe z`7tgow#hL48?BzzV}SZ~?@s=`piqQ4%B@P= z`#^YbAPVzG8wh2wBHwI>5#I}$2L;OM_Yy`f$6XZ~R9?PltQj@x547~&LDGT~C%X6O z4*$2KoT#1_9!voBN|pKou?x4{S`?SC{IqY-AN38#(|^HgnQN|l2^OSG5jISY|DLea z`ygE>_nYn^LsdBk?2`Vw8pbfIE9brz2@kAv!pgph$Bn^HS!)dy2GlPr$_jvX zDxW$_UEN0|SiV!m^%nI%t?BBwgs1=SG@?;lz?HX#$JKNyk0gOCBeElbqM zgxRg!%kKHa`m;shN9_s(>dnf==0U_N>}2V-F^He0Tuw6(lt0v;^hgr`39oRcW+E(b#)&i`Wis(B4z1rz)i?R9ddK5pCcRR zhl@*>;>RAX+@+&dg&|Wzmlt5mi7GAk#fp9FeyWewO&;X% zgdCk?Gxi@}8 zK#1u~XD8e*aNcM6_v|v27*jfne>{*z4F!AMq~#t?{^4ZlH>zj?B?SU~Zw}4BS`Ory6O20e_cxY@nHdq+Xj4GIpA*$g^+LRmN#Sed&wF&g?gwi!MwnVjvBv)l+%FrSVKm+r zRedu^Rwn?YHnIE~Qz=2H+qf|fG}5=k$mhfD!Sg-LS=Js$(-{pRKoE#>6__Y{Pb!yC zNXhAlT%)g<39}QB@mMz4vVkjvib#H}B|zcsgK=~XB&M(lWXlkTa`?0YstcdP z`I8YGSmjjYiQ@*k)L#NAP<@;{y`%R$ce@eLXwXmN#7Fa`_;uA(RpHgyjKpL450An3 zlFjExuW@Evjk(!Lf;-?Dho_EU`1lKKpc!7bK#yPnZx0fAgxK#v*}zFeLBMtT?SX#z zZ0elKm!e-42SpS{45<8~zu-jET1C)B^TB>w8%j~UBbv3Qwq&-TZuLeKY~ICdxfatO zlRWs+1_8{a3P}pP|G`-j{lz>_F~5q_y~|cNpEYQbEVJa7ky}VU$EbG}VLS=dbi&%P>p{XZGK%xEAx9Ppp1M>kgpMdP@*!c7LU1xM&$h;4nv+V*8~65h_TGMsIht$rI0yW-T6eGmiNTX544Vj`6qXry^aaNG9PPBX zSK^qsR!VrzUUqCxfi_}8&7Bk-2!7l8=CGDx?&jq1+3DQf2UHwL`tP&b@rqDQuW9cN zgaOmBpmOoRWD8|O4PCc4WNPdmZhoRZQJ;w0WX5B|{son@LltaH&XYeZi*JH2?*q?B z-fqGKMR8i8(4>s;wqR7489Mv}_7^M42^T?OoW4Xsr{13p9QL7HwYX(WcZSJhrYG@MmGFue;iHsz7pZ@C1@g{1~4 zF^6F|5WUkL>Nl%La9WxV*Jn<2zem!vd+a8FO}V!lQ$iam6bqt96SGKd^~iq)o0Vn7#+j|umVM)>+YAv zQXuO>45%QKebmJ7XBQP3Nu1qwDOl_yVXL)X)8i=|e&U7QYeO;BqsUsYdqr3tY`QIBjItG0fi=1#Jn*H(U z1$ZJA|7whJ(4M^aXL@7vSx=NtVp>gkW#F{d&y}<=v zMYPbo4T3_`{R!iM{DR2m^Z8j1?QsbWKa&<2x_#fQctS~Fymaq$XE#yMG1t-B>SI10 z2m3r`Asp+088+^l*wTjH9{;2FtbUJMh}Sp$L9Y0$4^j2wUuxrfrafBfBtn4_3#aM^ zlceCdM57X3{ZFyGfC(*$1I4aSTA!UrY3q;AQ5#Qx8Ezfpru|P}N7DH1ai=>;2ed*J zniOBk{*q)p=)cJSOvX@U(&hhD`qYO*0}?I4GC(Ru zmKGq)5}O8-6KgQAmpIa~&nu@IXN=vA9LRyCL9%GWLc(KXSRby2^R=IcR}2ZaLZL_2 zB0u2D80y$84B+~)s?F$0_{L<}(VxVUOj#~6TL6;^71=L><5cqg# zE~&az>`#raxj6*jy+kbkuYKG4Z8myc?ixDry82qN1i+A2=D zv=*zxO{D_EWav_QdJCp-y5eb5hgB*zOFiZ%Ez?1HMuE>2IboKYy_QW$1e)xMb^r|lCpURiLd~hM zzCLp;pp2fO;Yd`6NByh9%7aIO!czf=wZds$5EVI0m`5Gxd4NVrq@2e93l+Hu#C+9Q z!tliJZ%*wT98e&jC!wv%7Z}AJsGb=lI3N!)>M)9O3a7Orch-}>t9sDt&1Kva>hs4B zgN)NS)=A8L4fEn)G0(ddgse2okAxR`E(RyM8B(6z6sDrh@c&PZ-e*B#z?=w@61ptdNXl>!*Z-yp($QL&?2 zGphZ(H*UL#6;hEv6D{;L6g?zOB8|aUWU*l?)L*PbL}4b9lJ?WYI#Its>wPgjrh;ZN zS> zF=Um8hi`wjPN`R1uJiCRV1FT%SAu50w0M}^q=f`T6v{0$0Ai!*HsofNh7c1KQCqN& zK%Kx4n-p|?Va=cIdG+&6dnB+4>)+c9Z&oZ`7+WS!p8}N_K+lJ>tqFszo`c;V-5D;G zLjghox`BqPyB&mKz5=;8_I;KHkNrNATa+NAKP+xW(%!pWpulbd02Xjv{~v@} zvM+z=33&@nyZP``cd*+AsXZfToU!n+9XKx#m{J;3=(p^!IoM7*mxX)>WlOCNWCq`g zVfmg7QMX+3$v>3yZ%UOuZ)Y-4H|benaSOD?bDP+q%M zF@oO@6gwf5SSYcq%JRfWI$*7_6bkjWkt|A))YbhmZK);rp|SbalVEeVBO4{4$4$HHZvP4z>0p0evjJtEd*z!aJ~>f`_4Xu9jJF9(gI)ZUy<3 z9SOVSU~%NiD(cA9&J&5(vy;{kZrnDK&S#oNX}bIDQpNTA@J z8Pj`Hm`4AXIP$scJmJQ8iJXdpFnI5hFYst;#k&kUOAZQyL;lBa^RW^}(Q}ncUy;Rt zf;rlx^ptk0;Oc(Rf3TAac06!X1)iw{hS9+WU^ZLv#dQS5K}Urv{hx;3{#`y&F^9t( z|MUP|Oll_a;2dsLDA**roY3yR_fP&fyW*VOFpfCk)G~b0e80cHVJQ8 znp2gW6Vd&xgqw5qxqsDax@utmQKq3kA)4@oRv~gVWHO|D^9^U4(d|4lleS;wm9P0i zImRutNRgCbeztV{u#A`zg-^JC$9g_l1}G!C?|+*JlgbjyeCCM6EQQpR0L!ICVqFUe z(bnHA5YTG^sBaN^;h60)&06AmtRk(_3&T2IzFZH$H>x@BjYgBlo<~)_M!kw%NUIkcf63)QTqbWB6YT7(bKyx2!wJ>w9bmKplu6k zO<}7z_#unAhOhNzQ!4JKvXo!+$E6Ahz{Ze1vxd+(pX(-%N~0wI`?iRtMW zAfq=2nOIOLh!R);qB4U7zr?@nVt3$8H-zx3Q6GySCWaylq*d}xGPj|=MM{#6> zYxAx_d_yRBeZu?C^NWj*SvG)sq8rTK7G{GR@2vJg${4$Sv+;;_|MyNiE^^%N%~PgW z>;mIcRf_-CE3!M+y}%zck5XV5)gd|!CXBI|8*1K#`SbgIC0B(bBcqHw2e*kVwzt_B z0-EDe@a>Z345*WRr}odx;{xAA7nsy#h@V!+GXnR~*4NWpVj<^UA)c2?cTb%$DA0wl z?C7bh$-rTsF01Da_DbIy(dF$*X@1_a{?lHn=Q!g&r-$tR*imZ8fdbL1W#y^qE0MVg zrKRBu_)GkJMXxsU=X!QWnevL`e4P8n7oREa?yvQJmjm_2HD^$TM_E!}GlsiPY;RdZ z3Fqy`atLgY9W-p{#Q=?B;LmJXc^Hxc1r3WZA7v+{#Os=g{n4D}N77nk>h}>j$G&!-P!e=1zux3K1vLAsjo}H zbQ9(Tn3Bf`ED0zGm-65@%%H=gKnR}-hzkk__7otyEo*2XMTiHFa*zNy{yzyzFUTd1 zj*dnrAOx&RA_h$$0r&uU5K0N0*6C9o!vY(YPV)O#f$foZ?3*p$h1zHnrBD25Y8ncX zV`0U>AA*?Rf-hU)%OEnYQ|z#f_*&F;NxI*Xf}Fe)q#3*GM!w69uUw1GR>b_9=~2y&^YWt$2umFDi0(!dT}ovQi4N*3H_;zi#Y#~d7j^Q0 z8dK0rk7?c{2l;e(`+>v@Wz=Z#>b$Jse}(CSfS`R>Goq@+n#UtEM-0kG9j|b~gTTT} zM}HzZ4K@H*GBFsj(7$1;06{0~gp7Gi!dD2gs+(x{m=Bg7)w7&#BFxS|JnX&sE(u5L z{eJ%>PL!-K2xO~y)q{@c;82Z4AGVIJ?lMU6kp}{ML=dU|mGw|IVjuGbQItSm7z+I7 zD`6Pa19rID6>18gz}^Q7aUeOM-=Y-}&Cuko7nArr>bYE(KD!Y8`fCS=<;&~U3w)(b z9GQWK8-C1Y^kX2tIx|^V4{^rB!ou>CUYVQIS_&1FSR`NZS;%NpnIUU@Rr5pmmFT$g zTL}s>DifLX*k;+<={hKXFT8e~_i#FCw-J4HEc?=Q#^yNx@7BwzvI-9=5;}?wk-&fH z9aobW9U{{-R~|9+wZzdcqymP1@a+72a`YEPWp~EWL(4-%!KBZHY3M?RE?K)Mvu`rb z@n&k|>=5?vUT~EfjM%6InyInO5az3+pjY^yHy;dog|r;K!Z~4C;>qA!jz4%>hh+F! zP13}Ap_4$yku?%imQ4C{m^Ztb^hOnPD>&KNyM%^$z@%5W>&D!(#(Yj2eZa_MrOqz- zkzm42T4rT6a?b#BKhWO&I`Gv-u#F_z5-!xUSVd*HpeHkG0=@&b4K&3@(v2^g$^xP$Rfk&k@EYin~?8}=ee={E-PI^Zs?-u4pxa)f8(VA5IuTnZ2g$T-e}1Z4ue z7&jo3F9WXwta1hfw&eZQ7?`9UgN3^bk-s!y@eg=MVgS0lJZoE&v`Y+ulbe6+G05T# zn-Vo6k;P@j?2EsW%ymFY@w#(xaTzn1mn&(VFBsD`Na29MYdC?Oa(-0C`@rvA?0-;* z$Vhk*~Xne6%4?hS31xdFy}ih){$Y{-JENu)jpYf2x{mVJMc{25-AbIJg~e9zZ1*m zz-lA^zQjOxl9@#DZz|snT%7Z z3FfpP{OrhPjd#x~Wp(g8PTs9mY{}ChKG|hvvg)_9OI9|*CVY(1L`lQ47Hw1_((oT7 z*^W6959Moq>RW4Ca|ji7VnN_dD-3I zImbC&WmUhnpu7ZTL6_rlD!7K-Au<56hJ+Rp+s8~4)YrV_WqI)mC1{!seLC*R2u$)6 z-EPUA9mVw12`{@Rrm zX(xh(^Hr~$JoW9bdtxN=gy{d=S0sE&`3|-zK;KouQAnDe-|PcOfX}S)%iWqgwW5H; zCg18VynLuITZQA5SbcfU_0q)WGZ>}ObMtgc#w@$2(Q;AV>>jJsBK5*xSuNu(O`w6^ z0yxV@?6rnj;$AfKmPj=MgM5a-APqFQPG2b7Y@ZpP5aU%cju2~@1`MVp&m;2@ zYvd-xMD>F;sDSkKNJ3MNqS`wnZ*WmUsk9sRc6OZz?H|JQfbbpKN?jWmyY)Q;qFvaG8GQy}$Ylpin{yI)=kq{Ec!ds4`w{j<5SKl!l{iAABY zqStKNj{fjl9tP$buSy4^$E5c-_&EPvlABa|V_eE#^PtXE9D374^Jgv9cRzCjD|BJ; zOqqwDn$ptSaJSfF4b{)m#k+|&^?Cu6fUZ(CA zIZ}VjZyrDVem#v*xPOokNSCx?6^LWXixf5<>1hgy7T}QbJU8a4P<(RH@#RWgR>J-7 ztm~nQQqw9wHOwid_20`7*JPagN?m{(DASd?L&Oj{eKu>TvyEie@b{~omu8G6+0_?( zj1>ouTX10R5EuTpFUXdpGEFTXO!huh&V8ud@wVGc>a}|)g_fYsJ04}509>WpIJePa z*!-}D7tTr#cs(K!T`+nBh=`Hw+>CTH`)O3cu|J7o?-jF5-lC-+E|i$(8SSYOrL`z@ zG*1dYb80$)A}k8;f=9;hCX{IUfE#ptVIi@qiZgH-$4q&rGy^zPd-|Wfivqm zU5$UUB(HAPl8}oFfyIQLfYQZ}E`0*$QH%qNJ=OHmMK1w|DmSvKV~o?zyGP&p1g|p` zqF>yf$KCBx%@Lu>kk=v_T|Ww@%=K@n;%2CRw+gy?KNuy+%VdivjS?|GS+Z8jq(LlI zUuu@<|Hq|`l9kkRuVhS-yoLbUk5oeroJo`sse(8q3JZ&P;#(In~v>nuW#}4 z_Re!`?#zK8+>(W!n|W8=Q=j3B<7i6#s0c3zP#yF7*(Vxz%QW?j-IAs9*>o)|ezQYO z@$Qg+2zYRD1J205AUHJ1b$q*`e^P_PkB9Rw)r+?1b1X<@)`r0Xv8<0`E}<%=$3iNn zw-wfu^dHtID3dT@sK2y~DTj~}M8&G}0;UxB;UA3Wx2l?K2@&a|zkoLUf79|6VT|K- z63qBRI)oGQ^gk@68v$X^AZ^pQ#M*1=waP>KP=XpCyw>ZdAuyu6$t9NNAxL6wS!(SA zsnEoie`{gX2?12tQRBaVlQixF^J#*DjWwbp0qe=($JksQr0Tyuu~ywSXRxDA;oz`JN9xyJ*lOE zKip2ue>L_4ey4#=a+3hiiz|{MPRsyT7U}_fQ_Pf z)Z%Y1+w-?}c2rGRQ^-eOOw2sWr=l^n1p7O+r-ae-7$Dg*hPK_XfncOqK^$ z-8+h#c2%d}J9jF|BEsqGsE}jdC^9By^7s%@`)*Z#mwGO#MMrjXl$7FpHyg9hg`#{- zoYhrJyiCJ4wcKZ_wfO;e%;D=)f=kcGfZZ#bX`daoksWe=5_))%(CdE>h;NkK$Fl*H zJ18S>Jx+R`sC&U0N)1Uk3m@rEm~a=mx5DX4>6N-4N)*242vv}C`h+=yw)Dy*mHYGf z*e~YHw@szrzCB!AbS_`*TRrzj=s+}B9U2E@Pmx1`^rR`|$kVg&(>g2R#0;h_OMO82 zE;9FG9E>Uf%Pv_0t+@gxoLhXh))F?lTh)4iPHSqHj@N8hPPMbrk>T2(R6CP(4{p=+ zDISr0ZV2QV1p%ouJvG^W+3@BhD3vRS|LLQi1}1V4MV9O(1Xg;ngC-ORA-8iC2I(}p ziT~hjetgX8g^3CY*w@}a0}J{1!24MvL0T6o5S-u{)R>2{`bQFiP}4ZYGR?1|&Fgu< zZP5nOnM4wrenf+VzkD*pMf3B!1X(p#`eMQhOG_I?@Lt*pcuYcYXgF;qsXR58=8n2~ zz;~q6p67quW);u~F+H0kyw(AwwxO1sFhhy$<}_UJ{OoM!H+|vsCEs%pw?x1@cgeHb zUZi++YzE5@Sn+L{UBR|`_=2%{jOX;@eD%3_T41>>=il>^j5ksp9k;gQlDavhcRw_U z2=UE{;QFuf_9OqECawse|FMq4--W z5g+RV0z`v^soE?WM;!zQ0NdQrF^qYaTxtAc^Ec9EN5zb>l z2(=0erx1aNSzBEVx7&6TE%3UV1fMez!id8T7|xnI$pKbM!WSq`VD8||eM zW5g|xrd{&-mDza{05Thp-m4W*1}p$kIwDRZ+3V#O!-s75z!_Nb{{8#ngk8BVL^T1g zC;MCPy{xnh`6#)XQ!`^h^VlJHhnR%}aCd{_Ntr(2{b9vi+Jm33ag{qeJ*S4z@EJiQ zm7Hm4dOT{Jxcscqn^`3^H+EjOb=0<&w5@q5_1ONe>BCBlui#%D;+#7K&lfrlS#gzU z`Mhs?8@^v4t$v9Zenv{fVesL+M9GyYSuw+{+*2p#;}QPGi}&6C7Ly>nLP@f}{ktK) zY2trgL;1%h;=*JCQlt*39kCl6sOx{-s=b=%-QHHhWrE1VI)b4 z2J{WQ5jkc5uFYR^-%}qpZ&H6YX%%cZ95TD(1<7^(&vSVD8V0=cI3Tm$?SsQlie*<6l8Te(4ZmtR#_N3|E z?`@wxIpo6+tHEe2lLA|vs16IWu*-U?;_hmj}X}Y&0x_D)>AgN7=9KjHeNNo=^aLN zpbCEbh>|kif8{^cPGr5YWmD9r#cIcG!o?%6$xWY%H?bokQ65fts+TR9gZf=XNgEQ4Sy2t<8oOaQq!>%hL zjD|ODSE>iaDv<%GJ2aWlMpCPbMNjQ9>)9tVIJ&uZY^uVRD9R!|6`#}Fi+SJU<)$E8! zY@`RNO87K=WUIQmIwyT$#7d7t%&=p{o<_=H8Wu&JFhRr9s{gHHy=A<|{-)T4Of@?P zNZthOmm%}EbvZiWo@$!DijexW9y)1fHvdcD+F+3OY7#(;4NhCDpRLNc)VEeTLdV9( z1Hnh~v$pm|de3QVIQY+wG`Jo{&A)>m19zjk=3uq;4EGPc<`;192ashL3C!yafl1^Q zNR~h;{1Xl!Wt_SNSDkh;-duuxW>}}-R^}SscZ;bW?%hL0Lu>0nN1&x0@bdDqtD3P+ z#U9h%J$G#)_vh8HB8N_nBu|@^PkBunRqi_rWtFdQZP~FgGYgU&O|@ar=_#w5?pDtv zu-w`=EdXm9e)g8D)v#M!7R_u>1C|=546t~;Xg$@a#VQ;HGPSC9h8A;`?8q=31KJEm z*rg8ht6!@Vcw#3jTnRegSyc!3KN&hd|3jNlA={JAXgp^U+!jrQ_tCYiA+i6f-g4n% z>6}J>nv)oYR>!}ghO=f$KJDySZ54q=R=@A}b9T-bcJW8}T-cLEE&t~BdkJ0|R?gI@ zlhiLtdp_{F48F`^Z@!pJCMN%6N>+;axEnc&ZoG}Yjs~+{LQ5rZ;Cdd~(We#0gz1lm zhw}l#iN5TNOZzjBCSnyUzb;MAP~5+!GVNC~tVx5KM_0gA68DTu$yIKU>gZN8VIkIh z2KeoL`)s!QuS1wY3%#&%2?U-`rj?!7-XIhnd8w*D3PR{d8u$x< zwHNS;lz>8U4u730!@Udbg|d82F860X!Btwo9ETgge(E)|4Fw%S~R`eyog)Oe+f$)AXjZ zka+cs_1y<9gnC_e!gBXg)pOsoF*&%eSJBgqp^(*8BYZ-__6^6c<>ezsUVZ}jL_{#a zHN~+ZW$VAz!Z|S?+1u5UpxH6Kd@uIgi5fi8aORA~ZT6WnQx&_oYuA2uCNu4823BjS zuTH}^p|ZKb=bI`C9k90QIdv{|>iVM3_A?$|Fbgy6XrIwg_zc!(V|zX7XRxIv6C|(< zHw)%9Y(@Jlxt~V9$H2unbS&c7jCHynJxIhfyfAR){X*2rSx5DL zVn$Hrn85ibP!|#qakRM{h+3p(=Yw+$+mp%T80_*0%ffQq#q#eHFxbgRAtE_ZZw~CS zWgG;@=WsA2yn{p-l`|#4askq|Vo$}wf(b(3TZBb{?Z*KTf;~uv3(MX1vaR_5Mtl-Z z?F7_cpA|JJ*iPIV$Rw;yZ3K>uf!UxWHXmJ{1C3gb4T3*fo57RDQwM=ee0mb8Z1dgl z!nhtq@eU8RD6SlqSm>{ai&sNg!l{vzijKhZ=I`w66A0XJ^QT2Z?H1^O%`Gk4K}kY@ zfCmO&g=H!;KBE+i0(;1-zdIn0M8yDv^$ED_;=g$Dq8<0_Zj-39j!3 zd8Ab_@K`jyzd8%~%5^$-jrQ%^x7IXXi}52zC#Qu<^Q64kxdT%PN3tCnf)m9IzNeFu zlVU_n!SV4p&Mq!rtE=&B-P}W!fR7CD{pvV;`;$qE90J%je?FEwC@NS;+Vs0db*$vG zrKlE=7(cZq8>77<88$emZ4D|^9vnhEyh9ypefSjo@HKur*RnlzgdvFlQC}=$d%z5j zaENx^g?{W*ylT`Zl4+49TYh-|;p=PJMP`GJI$o{M4PlHecPpoB`kvt2Jd#UwzTOQL zG$AmR7Qw&bZF{1+H<7O=Gv4Zd;@Oi)f)nfN<-?yfI`GqH{Po@i>^*BN>tBCiCqT1i z{76j`DFW4u!N7?{Ys2W)(ieieoni;&1yZr1R3Q2jmv~3~fh-e1@l*Ssy}+SGZ&#MY zCPZ{68xOJW~A5Oifo78IW$msjuZW@xFv$H$kzMiyU*@f3vsTgY2oPe|XakGlZd zITCX+CPQrM0K)u}K8&cu#`V_`vU4vZj9|H>W>HTOw-LU0|N8;Slz#tG_{u?mOj&gz zG4lAcd6atdaW?v^cdglZ%ABTyX$ToZn~|)4Y|SdQCS0n3R{Su@FkyULP0gr0A|k>Y zG0Wmlfg#wLw>SdD&DECQgP8~jpqc*VQJoW32sp}cqeT#P>m!xkmk3NKF)=a60IH&q z1CIb)1C9E*Q|*uM-(Lf7MUbJlrFj7oIOuJg4r=3E4ri*-fL5cWx7-|^Ur6~mz1~}n z5S5RYS6A*H@1L4befsoi7`VOW0MkSBx`BFSB4d^`YNS(hN-Y`GA;m|>}S*l3$01jQ0IHf(T$Gu0{r-Q~jWt|chZ zU%+qr;VCR6WDgf|x%}bXJysUgE6#FHa_naSwV3%f#NmJS35#{TUTX8_%Wf!Cl23es z`rY*0Epf_ylc94e&Ppcd84CXF z>_*ME;Ipe&r(uu8WM&lCu$~U&m;Qa1Gtl=~I<~&Xxa>Bzv(oRAE$NvbxrR^`&C+8( zd$U-c_~;X#b%{yVY9^2QTGnjT_;6dQ=$JvIgX$WU3nz@RLOWKZpWtjDG9i5#O!A@@ z{O8Qmy?Ss;1SF5s?3kK-{+OEH=z$d|ZBp`3wjD(=f3O^~jb8{+i=Q@DHK78psMyMI zI38n#-=i-GmViJM6cp~Hg@IIOy2-6PXUO%<8;t&eYrBg|DZ(2z)QT57;(De+i2kp@ zuZdfu!$XA)4iN95jbciq3pzuXsA&qxPJsceKqJ=k=UWU$xlk60Fu4p`mcx$a%xi)B z1$yWtUStci5gJYVU^9Rr>d+}M-x z5Ch&*fx;jwCkIa2k>BHOJqYB4K=dC@QacW(K=8T4N-)t)xCc07p}?mJCCp(Wp69z} zc{fNx8k?RbR^0+vL526Vqj+ez@mGh6e|xbf0y92}uRx>sEiZ4azZF<2Qks~ViSxPa zPMm-!V>CeQp@2uW!zpotvBEcXe}93IF|eqX^6)?9EJl@AK>HiGiOL^}x)5>tF`|b2x`( z$du>f~MwzKGC3<7`ayyNK`B7DlpxTEUkHVnB+g+4MGT#*X=;%uu0kGt&9 z-NBhufvkQ7(Q;6r0E`cI(!`CN)6dYHgKf!5v7x4q!SSYxQ=}e zc>O85r+3!RSzUX%=+fzQXRu4{Ww3)J1sBb@{6AK1>*A&(ENcQ8W-=^L4TIg1AKZin zpizNiWXO{L?lWFlr5uHd#`b*TXt_cJ6xTBuTkTQ}MZn+zpG0L&`n`t#Q2gh?j6#or8Iyr&sA^e)`>guZa zP#WJ--Xa(RG6^Wbp!*YKh97+e2v)niK1&Z}sCTHMkT=C(RI266moMRVxqpnZ!L{oB z?>CzJ{rMCy5I=$eULsiPNq`gtx`n5h-zOu$&_8X>*D;cD<580RsUZ$L_fEpBBWeUp z){sxX#*)#39ao{%2OLpiqDE!bKS(H$D13q|w;u8B2?%U6<2IvmJ(WvZ>B=!H?aE53 zMXtW#DhxDT{t4c?q$pl+@pcvsP#iMQs&RwZK?Km%{s&745_(fD(3&M?DwVW4bkdn@TuN$iL_h@1lff7|Y92#l4!RaI6Y1VeLE61VI}gho9Ldiw)b zB=sC_BykmSd$0&KXsH4JtK>-Ewk(NISVSZW&^^C`7#_{qFJBJjyg@uLNuR|$*+-IGQq9ZM?QDS#ROWaeD9KWl}MfV$xa76QEaYT{Syki<|^UR-9 zxiI3s!|0x7fN<=US^+aNP^zT%r`TAG}!fp?n&va+&j8%E>Ep|TJ5fl_OOrCO@bZO89xEpW^ zLIdJxALZsJC@3g~;U4&?XlP@9{(J%)yZyyxQVVDAcp9+Wna!cQ$5#gE%3W4`E_#}D3&IauG_9tikWc^9t`4B&zX!u zUU7cmM0-4Kjy#Sq>=A_i8FQPx7-?$EMqt7;jnSkrRw0bySUx90^GLn_8!A-$vlN?3 z#-i^~7(r3m`dWi$!$GNqAfqKgE;UJk$o85qFd$N;XiWsGA^Y$G3v%<3>1p*dPXiN7wpg(hQ-5+<3XIX@(#u8l;_ML)^Ayj zdW|F+k46Bhzz;rwya#>}5$ox0kVFdXHZ~(Mz}qbV!ta3FY?QOh3Ur!7crhO(kcuq1 zXFwVb>I@onb8Mp2Qp(EDK&c0yEh_K| zlk@d$DbF(4b^%UBzQwJrkH?^6^K(Dj9*g{>(T4GccdN;~_Nkn1Ues$#k6#*7P3ne}Yf)8T=fpzK9ZpazEBm&s~Ek4EdC)?5KQCu5t z(Me>T?TK&UEwFsAiqQ)jhS>6i@RGBV3HH3#%?2Zf2K&S1jNPq=;*XtQ*4kmxbWJiY zb0K%)eUFKET4<=Q%Y{=~pHHj9mxgfUe6K<;2wvOjK1n>Xan(j)jrp-0+%B$gW}^4- zYA4*A7&8BgHp4m7&AB9piPl)>*xARn&Gnj3N~tCJ8J0)X$J(e{JOZ{)NkoyJ@M!@D z8_MpFD353D?|gY@;emJj-+aXNFMTy#xButU)xT$c`_LjMHe3(LJK z(cPM?+%Zzars|4gL;lAiiS5ep4KL9CJ$_dWM1vvk-kp=1JMjJ%&x|)IG4WG>ze3JC zl@~n72I^J9I2b2eW?%QOs_=J&^|CS0K`C=Z&;l~wG3i&Vm;tu{T#=xrs}o1_ccc+| zYQXtt{sDY&?nOsZnsRGQdqx3z6Usjr?t1=;K2^zRIgPoEwlOF~l??9VN>S~>@(`iG z#sNgSO1mYo9N|yD#TW#QBH`mIre|Oq@g8Gt2ErX5)FxZN0THxN_4`!=%g#b$OG}hL z?`CVP;JT)}i@MgQ_7#J}DTgcRpl`67IjCDYj6D1R3?JbE1&W_}`|1{%5|r*qPz|mG zVH*sI;BauDIHM{H7Hz&W>9YH6EITodz^2*vYv z^S+Lbj=&m)*Ik4H7>BN`_azcjQu4cynRl;iH#av6g9VeXIPj6Z1*XZD=`(0#e)u?V z=m3s360aawHy53qO#vVxBYV!q96Yz9#RI&^Vi0rD0hpxzv$G#EK`DjMQLBQ1V|K2? z?e{zQxs{O#K5R5!yP25#J27Ff)Dr$$Thx|$_)QcSXe3Mun{8yjzlfCg&paa zaAc;LXht%*Y>f1XY;1(lRAElY^aW8!;bK@h{@owiHgT~%^+@+9R1ezt;6={^*v2bv zPEPR^<$v!Yyr801WmyU&E>jLA$0~k8yiI1)KH`cBcGey!6YYyKe-bb}(&#*DdY5M{+M_Yj1$ z%y1eC6hSdlBs%Jm7~@Mu1!RboG<2RSVl}GhWrD7NhNR<){G}GT3zoLp@Jtobz!T#% z9u*pFWCWn6vMmU6PXXHPzfh|{DnQ4;Zu|a!9pA5ns3OVkX=2z9A_;N2-<*(j1SZ;5 zeo1=)Gw#_wUr}laUgXG<2o2R`Ul?ikqfPaSC!YZNC8?dvq-;I0;C@l?TY!mHhE1=? zb9Uwk{2ElvV2M1k@Io_+0WfAn6dLHx-YvK<`5UFk}W;iVkz}H%Jwpk+!xR3(H zbVLX|Q701YA)f(_vlz|@fI$9%T<@E3_n$$eC}y=kNEnGSh6u5r7ZM_UU`;{ZqN97> zoz}H>e23gd8D?%9=jiF+35~7M_^w_Yo78B-M~a`C9TD*)=TpfT7i<*md(WCSQ^aa0 z&<(u($WAnc8x-ELLPd2^=4L5J0!|k2;p*&<%Dt{LSUEs7 zGzc9sXlZFf%0wz@lH|3U#R#&xm-wd9tHbrLciO9Zm+ML~O?ac2YwWN5OWxB;xh zx(IJL+uzuPP-Q*GrPyQ84_%jZM0)=Zvb{A$4er4nfP6N?z;nlh+=t6n<2@dKzT*Du z4l6|EI;&x*z{X4Yt6kH3>(f5?L9vMhbkQUBepabYn%Se+@puvgu|N-G5i9;sZ|4Dc zi9n{eQ2%pgdO8d^Cte5fn>q+(X*D>~!&@&`@-^2qap3NB$Z@HSZe0Azz1cX+<4ENUdhoMX^R4^8SWUWjP zQ1XSmlr+PzL;4Zeg?oX|%s%+fL;CR5mB2ePk6f2jXx=Y)`X=0-zVrP5P}P<#a4UlX z!&zekW*1Pe5FqTSnBa~abvfaB4Q^ck^C716808dy91y$R=W!Rw_IKH>O?qwzP`m!H zDYXK)H}yM%?BzWS{CQ{EG@j^koWwauT&73LefPjKV26-SA|{G>cjiZYyW{8eDcn8q zqNkH@)`Z$sn`&@x$K1BBUMgJ;LH!>z10O;WadfrV5B?4KjvM-_K#=*hw8H; z3;wggnQ^H|z8n<|F9%dRkswFpZY&2OmU`vovw#Pj^R)TNb}A?lvFog?U|l5xBcgZ zkcp$vNy{$4u!U1BR##Ws6qKS?qALBYt*ma>!H!ZhbDn`!PU$qzDGEobl&EMzlv=^xStf-&K`KhPn^Rsir z0pB$fmFC~=glA;CD15eN`;lYZ`O1H#5svy!49 zH!4N?JepL-jO0w>(xH9;-V)J+oeIs%Q0L1g6ty2+RQ`J1)Ank@sSL(N_VguvI_g+6 zG{zcOGZR7l>=@lCG(5;KG9NvZ3Qp}qm2hcz-@S^#?@u{WJdC9%{glCZ9~R|74C_J2 z^rvG*^|yv>NWYgTU`rAtQ*H!4N@_i2owQYqx@W6q-8pkuml<=4Q(O<34tE;7(z^zo z#IKcf}0(q@c_(nxrC?EQblW5Kr<%BPx0*+H%Xa5tg%p3rFGq0b8=YOU)IY5kxy4Y zz6kS2jrbOMI@<-h36O#765LKL?!3(ps1|krN4h)`2SdW7sc=6Aeq&dOGL4j>q2c>; za8=0sD#hLV2H*Q+W@d)%y$cfQZ;9A=P@6KTp1E7ZGs)uig+6&%=KhlqzHDJb3*8_1 zOG(D{uo9}u`Hhg;?kvK(egaQaet`Gwz>|O(GrSUpPbK|#q{5=z_C;s{6PUG^m?3{w zmT1clLmhGkTS5$OrC3G`But9U782j?U#12R!kp`4bA&1*19_f>$7X8b ze@pu6mUnJ+`|C(3lSW)(U~AM%p|a%93Z~rhrf~LaUa%#8Fx+>e)YbSoTJeeOS!g2F zdebZ;g6ellIRq?D!v5c!+|p21h`m^T-)~GR47n(yVpjx#C(Ndsc#_x`J$WjXvf2B( z!GmG3Gbje?tn-rGtRedz^25x~p19!1JCa5mwRJ+qd+<%A#wfeWke;4z1g{~r5O*;W z7$pv~gHHdEfF7iFeYV*E+HI%Hn#lbqCPp&)Q!SZtp|4JHr5HVoTv@Hdn z{_RKz@V^+0$T{rk(4G23;Hi0eHKm6uphCd9Sd<4J7b+#qfl$vd>k0OEa4qT*-8Hz# zq5;VzY}XS2R1Q-g-N1r`C--1$F#Qu~qn+&h0YH0Gx7vJY#IdrFs5k0(B%$@Ropg9|w%AbWt#|p}a^u3oLK8Z(?Q0HHSVmD{U zAhI|QD_Yiu=?D5`UZy0NLVkNt_?B@AxjzEdVr`D!faeO>-uPackTtA|yDcsFczT!4 zoHUqaA$_N1(TZ=|7I2a)ICOAIHYm&)KGBqDb(@d&y2Rp{k^TA zn_XlR#0w>j4tpI$_htHX)SFKG_+XarR1FRJ!d!S@7g$d`-s$5z(X7C(fHj_C%**aq zmV$(iWx17)M3cW}W>qDe@C3j`rM*}P#s!Ih&O0{60Seo-H;Aii6$b+H*7QLBoLg0u z&8(JPA@|u?mjS3G!pWi+=Kch8rk;K9Qe&q69oYqGS)|nx&CJE=h1UrvIXb?94KQ52yyDmZxNZcIFKKg&iqL`B4U>5@yl1^`@z?{H_JVL^lqBe^ zQgYvsJ9wM$Y;ScL znXUj;a)CsL23Lj^f_gW`z;uAz-k;30-w&Gi(OIuIEajlzNun(@5f$fs&?5LP4?A^ zW6$yK;h(kV>1;>#DIp;62CoOk#Lp1;SSan~U3v3{&m3gh*uSyxcV{R2jeVGtO#Z&} z57&BEBNEKBhYw?ha6;IosPFl1r^#6Z7w*cArcupFlPtI!#^a96IXXJ&(!eAF`@#P= z(IA&enLx&Yz+V$G|7kl*3%Ma1jOhixs?*6re`2?-$vcdtvt#NLO5vh?X!yhtK6+F zW&DG34)h5-e+tpEMw1y?!^1{rXLnz*_8$KRn;Cl^{{XFtDNrK5m$$qKeMTP9ShfWA zzV#QgR_C_Zc0jPOTT~i1s!jx5*6XBQz%6WzY7tpcVFZF>!f9;ryQqSO)*Ai zYNziAV&R%O0`&MNffa;pH1cCoJ5iAT?M*&9;?C96`D;ZDVn>+>l}q9NqoYq@BaiD+ z!lVw*5-1)Lf#^s>b?N0E=&`!fBDq}2Bq$;DPk4#ijA>qS#y~-&!cLf{kyuKiab@S} ziX8D{-B8FLxwsbM+!Uix?7rXDQOlaI6X`Fx#-AAoVam?f(dJS!d|DF`YUNXVfZ?o^ zMbd>I%JmoV%^)5g2yn6oOM{;LGRnJUG#oo&kh21JCe7d{^&?yj_w)vMr9l(`?@$#3 z&0bQ(nw3oL-K$*56{69;4}4wWh`@iiD>BveMsyXSG(R61bTRe#0vT%WMs7MW(FrNzsVG{}jeneJ_h(-kzMc?JTT@o93ydA0= z8Rqu8vl55S`UOY{xZ3h5XO!iD2`)D%?1%JwtS`v^+*)9VWg7LAyW7Zf3uF^TLu#cQ z?Zf$bs4$o`=wD0VJbSs)Z^=aA{F|&%o2TAm871dz<-)ny($L zbv|#0kihbyDsAQ4jXCm{;y_pA>4>*L`79Jqd?FF>lv27rV08lIc>OOWX<6NHiOk%k!7!>BdHGOlata`M zF63_W4rsQGI|W6%aMmEeUIO7Z^5zX%ik^F9p%A*`^q#fj$Ofoi47d6yAdEj!;>eV! zjxHXSZM7wyk*5O45|pw$Fj`M*9g2@I&VocKAF64J#hIv@vxqB4h(nwb4ZZVFVKj35;T`l=gMZ z#}7z-4fOH4Px}a{m=PoF!6YkARE7-JSgwL0)=1+^>ZZ&I_>U=u%5v##*#47Xx+O$5 zG|MGT4Ho_mP`D?3)HYA^*G?_vZC`pmrXXrzSST)lgK=VvLGR(M`yu|hL9zEBFGHh4 z_cRrt`A2koA@ENOQwZfXl@H`lB=5wgbZ!Zoc2Nr=Tb>l#e_sN_8oU!H( zWSyL<%i5SydctSFN_nsgOJ8TR`0-p{owl#Z2qP`FXkvn-ENuQurAARGOv*`e$L`Y3cxq1g>-NUI{#L zsC5ekCH^t!+g2}9r>QlMP~nTote5P;y1n3aG*b3af?_HemmsNbDaW_cKDW?7?c$K> zDf-bGm>#Hyo*v!$V7g)JSr*^Z^{_q1Yl~60&>tGc zsfx$q`S)4Ih{+eTU-*1}8d0>u>btcB@J0bBpT1lJW51gsf_)YuX>qDWTFS)jwrdQy zrBj1azy`k`rNS8!Tv9EV%~<0fcl@1GZy>NA8C-m(zf9J9bAywQi{|LA;~2iw(SgFi zPQs)c-shY85x8qaP$`%ty!h|w@ARWWFgJ>df~e(4*^|43t*F9XgV4X@Q8j#TPKdgq z(`R(|))k_LXwfza)4zvw6Q6Q--ymeRb=rwOlRp$url{*$$>%TThJFj-LY=~F4Za<+ z2{EantbhL3$?86Zx&1{C+ru#9Vp*Pj>sNhEZWQz=E|A1?rR#APqwP=HjJhcSH%kaB^RXQX1MyoTSBMrJ zdONBlc9h*nEK)8LdkDTwJ$`(oTa^3j`LyKYz`xk8sHvTNWJ;9t)*-rEAyxkHvnzs?)xCw)gLn}hDgAY%N)V2SO zVr~4UCylHMM$x8mdwHrO`w2zrnZ7t5&3B3^n{24HV z0ADoz>G&n91=lci*QWcDva&Kwm28o|@~7&^PWqTc%_BPX>_Q*l97w>2fcP)i_JF^J zw#;fg{|)r~8{nuq&)5$~!&ly`M3vu4U$fL2MbZ1%uSrpWGEMZd(Ack93C-pI;{u?6 z!fWiI&%$1!)`Q3;HfA?5euU#z@ z!2yfFr6#u{LBLS$JB(niwOfh-S4SqGTs7|LfY$S{Ve2N-9sa}s{810uzd-U*%&^Z| zvDC$ZJ!=iLSn+B<+cVsU-pZ(DBS?UL5g2*JWn^Hs*;(^jhlUx{d}wvO`nN&^)_7_+ zm$}lb$L_x>qjDxFQV9qMdi(mqONQZl7K+r=)C|H?^SqzA1Q$Q2p#wOaql2|B4`f*u}X8dG(ptfI>PKr)&G5 z6=0cXs$$2>v^05h=Qj-@>>Qqobx%8Vr?D*h#C5$mU0qPdr(2L|XLa;e?8eZ{EtQX9 z_z-pGmJID_eZ|mtjO8SxKpz4TVMlM5lElXKMEV5wEC50Wvq)Gok_zX<@tZdeVr$0^ z7BXV~Ud9MoQhBHw^`(-K1ql9}35!22%}aV3E1FwypVEM@X(Pc(;%Bl4G2+))zidKe zkNz7^InU-R?nCw@KKIFhyCwQd;c;jfoX@o6B*XgEMKK`@A>r^@oGoX!5E?4#ZXV7l ztG3ZTex!Fg8|Gu8+>bBfOE0JG?lK69gv2*!*2CO0mF_yX|;Ry_lUNPqqMWxt?O zCg8k1O4-=dlvi!mhYeaXg0O=IjoXF{KAR8k!Se6?I+_C<7dUxrQG(&KR=M<7v7DDi z({z ziJRed6wY4aJ=3UY)CGZBI>!?MwS+?&oM$gq6ol}q@7W_&xtT*)Eoj2vhk$PcNsCEN zj2;V$j|ryBXUhb1n)1a0IrJi8Zvx<$Cn8->ZA~~Z28Uzlz`qfqDn0xR4xvj^a~Fa; z;p9znFGDx+Jf&|?C1do0>%N~Go3*~(6D_e{XV0)WnvoW19~g!%NbgW^*GBGz__CsD zztYq~K-Yi~RheSmDMHF`3z+MF;LG^4i;Gp@_3|5&u_kQzk&&&tu_rf)F|vKxTW<cvLFR+E4!w%!WWFBADQJ5Ludo2es&JH`x<)r~0HLPFjTGzH~Lz+wHy_ zcfmGkAF<-)*fD=Rm5M7n+GyNYOw&a8L4ZuXBKV~0 zYE@PqvDjUf+vLR?(%0Z{q3=wq)LdGjw}nVYexB&F$`KQHoMA8%42DG|lJxA$)wZAA)On~<}`{jeat zjeByvA6(HOZKtVxnJqV-tyK*pk1uK?H+$+Y$G&FY*;-khMqZ4jBr=lB%r`hSsB$BQ z6_phgnO_2>i`HC&E<5k5H#AiM#1F1H04PV)#z5*Cptd3G0qF#&50#D^3L+QNFvCf! zPM|@A#}2&9<)kHsjmBU`aRCjb0Z?bTMV0Jj3%l7WM5?t1aG5$aE#%;hX;VkYo_M*| z^JnyUi@m*Fw1^=%xC=hwfJJ2qxjd1JrFcb_cLrRNgjrs`d;wOsp|ESA@uzij2S!oX z_*|YjwAI8uEr6X4LYi>Y#Q~RKb0jCWyhRgzxcbj zNTJ~cFIqo-oOOMYY1D5A7|kk(ApviJud=A6CWVPOqm%atK=L5->*f#Pc)w*}4E1y0TDu?%rHb9G+AlN|RBc^yt^8vx z?3^ifhFL3ifY>b1Ld?cUM?@jh$@6R(8TLb*goYwLNTsdI-vgrerXVs%d;KbH17eGU zT?1k0V>%@Np1W}5h&5v*U&g(2*dG7$st#VkkZJr0KntVvGfWP@>lEB!hz9g(DxFAl zT>qiN$UM60j)jtYP6|flMQiNq<6PDobBZL{}I+79ko?^V!m@cj#Z<#|}mH&k$9>x`a2o zA9|d!VD6XoUPb{AjPp!dBWJz_SVhbgzP7a@pfoXILlHm_o&sMk8bIXt1z^^!9-@gx zo^dV5BK%UHrFAp+V}fX22eUwr+yp}u!zbUEO>h}w5J~n1R;u@6{nipuoi`-}z^iEd z)5u%_b-pw-ACQO8_UsFsAFv&j z-3i*>r+{X5tb|$FsIz$A%~!wVJ!kLp_oH~=x$Y^HRB@wr_3C$mf9-Vf^dbjNBs%SR zz7cjI)&>_GDdYE)MTc1n`|k|`Wi(Ly0iKBfBnKjkE}!juQIj{AmH|=VxfX?z z?hrh&>o*D!mF^Zbii>vw0L?Ilej-w~ZDqzo6)4K5Zbh#Rr(=#kx0>#8`c&J{ZHL=T z4E)@Ce`K?qbtcWk&fX1}ZA!5||IN$`K(h59GCP%k;EunN#Mi>ED;Y}x=u)R!Be9Wb zjQCMpK+XqXw4lkq+uKF@$2o@!#vGwfOb&sRsR9s%Z_=G(mHyNW`$ChjqK1@d6GVHL+Zp#H&k{SRe8m z@og)0<3T-D9P&f{itY>9GjevguhmowyH zh+Q&9Z>x0a+9m0A$C`bWXpJ@(zd9ysBOAymdZvmbybSE+{H^uFouEzXn)oeoE)!ud zyOK^^_LyhH>CCvXU{2rKhLOCKxUWbd-zinJ&v#tW3$AXFsLC~9ON^_4B*6z*k}s)f z9-4SV6A_I~OlJIh*W3nEcwX*-DH(7_NNlaZl-yGE!~RxQy25YmiR$mporI^)+V8t{ z45x^Gke@G^Q_%oaWjlf)X^OUoT;`ON4-Ep_{|1D0H>ySyHTGtp*M0glUSEh02$q0} zJfL<%fDReE>aNA-Ru#}*9lX3Q_9a0wa+!QDR)3m7r54D8wBH`32CKi{mNkK4-erW+ zNl>U=R3&TovE!eK9RyVPQ?k{<}A8L_RXDNG0g?)*PdB1*%WQxI@ozH+e zxRx@c1{z{mH)m6c?T4CIQGu<0zpDvXyZ}wu*ZO(^8pBwc&7J*y-ex7bNcDOys8heW*wCn#T-fNTe>A?I{Ow|?r%E2 zV8>}FOG^U;AffQkxh zu3(E4x$w$_jZowz=#yOyM%|oJ%J1&CHopihUCAn_m?peM{#ZL;e;v2_>SddBu2);iHj_65w( zKRkHvLM9Ji1*25>H22w^UpZxTl>ezHo>t^5`RFlmB*$5Y3Yutg$3YEMu2|eqm#cq< zkg)!Ex#FYWXq26RkpLF^@J@xeCI_JDz=fQ#MMreSO zUBY+?@8|I8G2J|V*cU|F<~5qm%JwjHdU|@`DkuF14?Et)+Iqil1n|d{|B#w6eL$nD z3F{o(n&=&qqh@481H|sMS5}1Z?gYLi0^3BO7lmC*?X_%qqH7eHqCidETWXnAoen_BC)iN0|o_Knu0Y82qi|VsG!zx6bw} zn7RMm!pi)!`7}+&=)ZLzJZ&HWoechKTfVW+`=x8Q!U1Dj(u>>%buYsG=g*br%tEpN zi^>epV}U~sK=a8?jEMa4qAJC@wK`Kyj%Bm2MvxI?kP`N33<*@w zLpA_=Kx;+kHODj0=a3ElXP6L1oMc?6MlO6DXHd<2wJKTR31?@6fO|gOoa#-e)^JH! z(Z}819!Xme`v?cu6Og?4B#mC`H69WjX_vuimY#&Wpni-g_w3=MmAXj9} zo5NY8e=bs4wF7}%0|2Is1`B)Tt4wigxZCLfl?)IYorZc|a0v;OyQ+wxl z|KLBFx0&9oXJ_7&zkDjJNM9lUx&MuCj#BH)2Pk^FDwK&!XJcbyrPF#wPFD6Wg`guH zGe2!UqHB}r`eC%Rd*OrCC_7L91HHO+qh2uB7{hbKfyKI%reQoYQHT<8MTZPF#M=5{;HG}SefPw$%ynVH*+YnKdthV5;U#r z$S!xA3d%7t>GhO&u@lqZRZ!3tp>%q>J!dAYdh|1Q(V~m~8{fQIaCnjN;iVwzv)k=hQ@Vw(22jacWx%6UY9Hu0Sdj4Hy?@7;u-;|5So6QPBF2 ztgmj&A<+bAh)7UzHV~MQ;utwj5Vy^u&bhn06Q+1hi;(ftv0(}CiNfuBIad@0FPf;( zb>g(SjNjl$8mAes62Fe?F6*k|dPF9q^bXy{-l|O5~lnL*Tr>zx$=7xDp7<~qz&&#ECi*Tiz_W-_I zw$_@nP$ihKAY3EBdl?l1&n<%09{~MJCAdlAaCE^9_8aIuDNY2`DKB@!ZK3#FVq%Ap zZi%#u%jc%q%=obV0n>7_{nT8E5Q6RYvvRR48kG6ikUWI7SSJ&_xBf?hyX>l4V>nfDSbb307lB{ z=P>^XNs=IVx8u|X#m?2k46OaA5jGU-Jx~?bV?iG}u?HWuXZ!;WRk(Y0*~m9<9q)2H*K@BTm^|(zs5#ktWbceBlOAv&8i{sYDrOtV z2ZQg>WM`UsOhPFUi^Z&D9rRFdnb*l>XMMdn^F~uVmMFl}dyfqz#8E<34GKRJD?~Ws z>>gKRWodF}mA>goV`9f8P1pcP(90Etg8j>)V3rc%sxEmy&Gh^TDsDy20uwYkgpm-s zPIESt3IetK(R3-K1pJ(Q_$?OU5f_uH{w8eB@joFZ(wF&_WK08Y90F{d$Dz+1Z>WGS z>PaVKx0K2tTzq7_6N^<>`Cc)aRgDEbM#uKVi$bv%#daQos1$lCHFMLK;MJIPaQkH2(Ga^qgtRJa{vL2pe32GZ@N8dr36hY z;IZ6T?E+>c@Wk9FS}Ty{n3eM=6F6oXAs{LT7flpiB3HNNMyo0R*c=*b;PLN4yG_%} z%g*RtUM>|JMO}+ro7Z9Ja8qbWNy#Dy=t6lv*c+6BVcIM|EtG4M3wI>sSZv3Z$da^a zA;#lW?UW)_y72@uKD}u#a2KR>jLj#9^r!~Lfz{!pRn|)zOqS1I8`iQU>|rsCc{gfrm1pxB!zF6f|Bk8eRIJ*R>(z)@#&|n96`n_4^^qwgVk1g}eDiW~ z_T&lHUzA^64=ucw9#Ou6s1QI;i75>0VXPo=GqlEC~tYoI42hN#TPVSU$Ntp!y*up zAQc0VJqSQ1j!F!(xQ7+8e$hhd>!Mvi!9V?vi!cD7>AfP)Kk`E{m)*li%3liTx*8W|bc zXWxY6dtvXc3(VTzZy4qX-+J6Z1epN5{kJ(D-HnyN10_2<8zzKRQBes5yJ;>{BH(HZ zTS_1C*2h0jPanDTYUlR7bqQ!9$H5S&n$1MYdmS#VymnWS%~r&{AzqU-a!*>>;W$%r zA50P#D9n(0T+nMQA|ZhUpkucbBLuL&v^MZLm9SCI{1miohhY^_K)9nYYu#Yu=*R{X zQ3|s}N@!@<{!bA#x#b!RK!*f=F4y?J>UB|%Y4RNWt7IbTzIhlV1D#rI_wW8iQl3AD z9dT?dYpubx6=8O1d{wR>Of=l%7Q<$MekvB}5| zS=zShFMScwc)psk&`Y{GYQ|EKWG$nSvvYGfei!rj8mReV|m9Hz?w zcl#ZYFOUx>!jY4anbI1uh3TktWgGCGj)K(Hh~x^E49rBi1LN*}L)oG^C3^IYxnn@q z0l#)5&gU*31mnM?em-^=+KgBOuMk`>W&i-1gFeLfr@WHx8H~(d11sKvY5fy5q2MAr|APWM+#V98V0a!%ewe;XR*fs_nsoWgdX%O zqD5ApeUi}q+M`>nnpDE7S?E9aw661=INx#SMw_5Rd*h-5(lUxuS$>!ra$9*NU4SH--(DVYv{WF*(4VEMe zoFluk-B$=_-N?NDQt)5=QT2nT3;}%-OdMWPRwfDzVsc>Vz^k2PRqdCrVg7^Q?+P3} z3OYN5O!=&5L4Z2bg2|{={RWKhd2;f@AQ}(LY}0Y)!pj0AgDDkYt|4--{{2cwl%MU7 zHZ=QGni&}>cPjK`7$Zfr(2+E}Wem2P0QZC?AfSp{B7 zQZ$cTrr`4YL~_oMHye`z^i|lzZ0)q%thk1PMhDo19;X9Nx!XhKC4ij?}tbl@E;M@NakO>v1&y0?Dk>%Mv&wZyQR`9%|X-5LV<;7#P`m=vwy>5 z^VFV=Tp>I5J>+A$B~~15Bof$#hjHLJV=0l#L@9k3IJs_;7Yo>&YoM4^^mOVG77Do1D5{6r(4S3+VN8Ntl8Mz-3`$WD_O?Xg30YeY zBb#}33h4rV7N7x&1sN6y@f#gEfJXy?bt|CG27*w7J-dBSDRWp1Ishf_S>)WGgm(bG zS+?sm(dInOE_EhwZ+u~FVlECi+4pnzPT?=Z_ZACmhD@}u4jV6{%6p42kLW%Q8z~6GnIP)jnapI5mL8Y(7=~0e*1ZK~xyV z#YF2u6(-~lgEc_LdwY~hcr2+@Iy1tN`Qd%8-{2CJ4Xx|CU#UkA8h`VTj(`}(@JDb{ zneI~O&mZqH>tbP2t)s>HN09(=Vj2==DI>uml3@Zl3K8~U(VNJP5v1P}|CbA34AFsG zN*{2WO8(aSmd^B}k8Tk=#)m&^XX5Bx1STdhl<1ZMwH9u$usAyW40|5`<&OLt{Zs8< za{h?#OZ-cPk&p)FPh52G4DY;f7+*c6IYY+@`OdB{SfXXxEN?4b?a?yRT5;jJKAfFJ zJ4?zS$;!#?_Tn4-gBJ$bfItukU08`F`P$jn`Ea=igL>Cpcu#@jP9WQVW|>XLf->ap zpmo}}a|cV*jSzhO6P$lQo>TzbT7~m0W7v>^2LzCw0i%|?EU#o&u=LE}ZPYESWt@A) z`?2m7fyp;d!RoJdUtIZ1^1M+^NTDJm5j>ZYW9&!uSDy&RA*7<5 zJ9JAqawnNkZBkmKN3FEHl2-wDd>(QYD5My2|A7yP=xJv)11h!GC z^gGudTMpcGLqnNmLLHlJSwxWch!8|$!{^ZA*r7`@$KcXOiH3Tiy7Wi`*A zM|(cG%+5|atk(1R$CgNCrJt==PvOIJW9+#rIu)XN9Frg=QQC%rPb&eb7x$U3hvt{T2lf0A*)G@_4w9P?vg;fW9Vc3p|bgNbiHbrjgQBgepLY;Qp6m zA?OV#6)bqjY}|!HlJ;Gg9Ft~ennRsL4X02mOpv`B_;ja{JCY@?sW&Bl3vPc zbKrjBcdDsfL)L}?DB+Z4i(W^JPe>v((1X{Sq(B}^m>QFoM@O)Sf*~CF9&9qm^8-AX zJmKGQT9C3MW{#ShMaMiIKRs4e#=lMN52q{x)(Mk=U@p7KSp_*rprXLcn;>g24M z7z+Wj<-Q6bAtCAfo;9W8VF&-z7A7Z8{{WMyK2ifOaOkYxjj``l58E}G%#>?UG(@rh zSBa3}lsRg>t)>gFcELuri;pGAGVlM})nku+<{G{JwN!RMreiwEhV}4t(C(QGG95)23VA|a7H{?0%`6u(rzg$A<>N_Bv0#(vQqY#N-k#Svi{`;x|T<>cF z_((6;(-XN-J?i@n%*t@6-EW&y`+97VKM7MR#9dskB(y4Mhcc&a=G+vIe)i-hWfHaq z)a@y%3jgZsSJ0q@2!dqY87mNxP;1A_m;+%ow@Za{!Jd3hSyrA>AYUI%F>&d_4^5ki za$odJ9V#AA)s{v*M9vA*G(@S<8!8 zl$a6;WMm4|D2S{azTHcQtf#5b2W@GH9yyhBpqROw(7%u2$T}U zX)=*a{@r`Pkk0+`Wi0Z`9KxG)6LyPb_NSmj)vVUqxW*vWaud%}j}9p=GFdfKmy@1qo70CVD=uRC7G%3i%_u&GW-|?cl9VitS`}te_yRS{t)qN@E2o+b z)MP<7*V8|4dO5tXbb}8>TPSWlnbEN~6UYGuLKe-%xupK%`VN6cl;^-Cz!+GRG_3`p z>n?w8uv?Dwzwh@SnfKcq0O;0XJ&&C-u(Q*`Sp*q5mTp)W0II8{r8^qh-ejv>sP47Q z*g@#b)dI&>;-}WX`yE!i*Jozrzoll+#$o5IYnez+3wXZ= z*3G_r=ife*`U8)HY_-*^pr3|$Uv?Z=etsfXo<~~l{k^ydk}?C!>Mc<54`(gI3R1tz zDb-^cI3dD|DEtjayh#{mYV)7WI(dNXA_^Ve{KiR?*0&9M zJRioeI_zgr?0K@fQh%ZH`}2one}3)u(h5n>yMb7C?Yp-myREOWe)!m@(~f^E@0Pui z`OSumcJe~AEq(_y#-$_$<>wCbeP+@4<_ykKB?ZWX8-=iv^wHZu2m&eAA{4}n>UelM zQR|~@sx>;(m~^XAX)C3m0O(?+!Lr__T*vQsw5M5@)#}#+=P8z+kK_>YLsBIfgb4Sb zrB;%g*R9t=_Eu892bOC2BuZrW^`kB|zXr;2y?WqX4ss7wA!FT`E(pSW0b1h8d?s%& zQ6eR0U0Zp-mA8bW4Ep;K-{v$54AWXku?q+OB( zCtl4A|Lv=8x+25v5%GctPlmk#tNa=PJP(j_Bt7do8fi?2#AP(QwnpxEp1Epjs{<0- zCtY~GGt6rM<@I20)J2^;3V8Y=}{dP1E1 zj?mS1Tz#_nw$y68ps3nUanGcu`ie6ti%gP;-fQO6JSi!B6?+`SqJ+(`P_K>4HmKtw zz(_k&!8V(btByUh3O0-tFFz*OpPea76N@H1soYyd3Dk*irTWC?7bz%t|xI zF!&N8FdaJq|K*5%O1gx#l1u$bXxydiy7M^H!N3YpjbNw4;`{Os9>wTx%(IW*AkP&x zxmC#1pMRVBbcaW}kqwI`$Lj*+Kzr?v`E%L|NM`HoO2*Pc}$iXD1m5sTbwVMrKzE1#uGr zmHTjdv*WDJd?Twl3JH$NNMG~(C9P$|^y_SEu^uGs@DY6x3)qYfiqi-qy@muTN`8d*@BUjqDu%YlJ+SeFs`ctgXNUxI+1-fUt_dwOKX6fOlqy}m1a#iG z7+)VB^qr+@6oo?f;rkOT(>(e(vZJwkejBUao?!}ts( zO(UlwQVArDdl}Fdjeg9ThIB;q#j#cEQfth%P=&C@-M!|Rb;LilhpLT8e8;s>l(IJD z{*ru7s%jKoKSLW?Q@IfI@_%|PIAM(7@M64=V;1UhN<%Jr4X2Jl8_E!$c^8}Sdmt6p z)m#vL4e|L`E(jv5QF5Uyvr()QW{q&@<4*b9<`S{vyl^Bi;dac_8uRBWS9I@ADKPBb zdgL?LYPw#(kdS`u^riksoUqipC>`R+QGoz79upLLIaY$Y9?}jHiZ+@o11~!Fn2l-8 z*8OwMD*Z!3Ab8W%b(8?E7{DyW1iU{PsQcg(gR}g%Mevh2KMgi}*vOp(-$85Jvw@|Y zSEXCUmGCD|o}RdJk9*|VyzCp=d4)`!GG9kiiWv_myZKomFi%4gsG^AJ;jsjL%I!9Y z&^&*Z^4Fq3+0MtHISmL)W@;|0aDfQ9+{*hBYVIEE6zKsm1b;SJrPU5Ou_vfsBy0pq zat5CFC%H$0yqE6rVzr_qJAtOw2Wp|R^PPP>jmbhkF?aAb+2yt;4IVDSPOGx9unN@2 z?~k+F$!=HqVg+;CIj@BD?LLWRe&F^+pLb#}PKph=sbG@o+Q6;7K3QT~`YKn1bTaV9 zHscbbhjXdg8Iepq$Y(LgA+_smj=9g@l#dNQqkF~8u8!i8)RqTRuM*zf8N9PyYTikF zX)w9D}xB$)7?ZaR~nz1Uya&NbjW9G9Kf1U)v3I4<%;;3*0dBpi+)Cme-|3j`=* zUA*0_#(DOm8XGj%Digc}-O-X3WYN$M?D(ji9P^G<>E2;llIc! zJ@B8;qBTOTdJo&-pigI`+86Wpx=Zk4(Q4y6YlD3>5T!puH}Y*PuVcx6I{YoY_54QK z7Y{E|8!t`WXR7rQftF8ERA?q@U7gFkd6gS=#RWn8=t)!tOD7_XOk_hR4j%HY*?FR6 zS7kts!3 z)Zx=nQKnrt2y9KNKZJ#)o^1^~f>1#7UOTW2d;xJH_KEC0>M^<41twpTyxBR)$5iHa zGR0GDbA4EXg8L!F-b>k+QB>BsLVo`kZN1MqIGA4OP32{b$+)#>;Ld1hcBk}1uRS{Y zTzF%Ehnz)o)Yj(=3>IK0GY{lrVZ(ct2r6=#IXG_>4R^+}>#YQT0oPCv>98_u{5GZO zJr0(OUO%u4W}noz+u4J~&!7WZ=7ZPZil#SPXT4MUVgGeNs0G zdz2^s7FoLLm_SvHT{k(DBzrl?VrkJNt#K-U;MzWe>xptq<6wp>)lAC57PL&;Vi8Ln zxI((*I>+O3EdIVU`L5;NJwbsz0t?Yxl(1F znL}W4>%35ZYy3c_APFbg9RvCfM|gfL1lWOc5FP6+6c7x(#2C5hnL)Plw}d|QsUOA z;445|uH^at-OsK*R15j8CSQf(v_F*=56OPpNEGMCn>RsC(;oT9Y;Hc|-+m^#bT(k| z%^JM%jvjaaPQq+31f=4A4*W4*mGBei*WfWY)#6#Kk>HPBYOzeg4%X|b-BO0%E1}o` zj&RT{1W13si_ewi?{=1({^{l#>UVdEPg54C$+S4ZQVqUFXaTy$mjv9mKQqq8&n{)j zF*y*$sj(*2Ahm4F8!~WzEw%%#B0XNmGZUP?B^rtmQo%LLB{XrucrDboX{AORYLm|f zjh})08_In|C=Skvi25|Mx;hu!^*itv&WY#>S6a}H`pZquAXrHh8n3S0DGn)TFF>v; zb%Tx&BRF6Oyje{Tz^ z)2mAH+q6g3KiNSfvAwL9DOEQMTrP_X<^82^bH|9r&HN7)6BG*ZM;7DisR^e%Y8Wjd z+#se3@8G+t{2#2>g*B)^+Lx&Wl-p#Y4H11L8g_z!<Rx9Adl9J~`4Y zq0wPFd6!{sHeKkfG>vN|;xXUnj8wkGChu@|l+##r`#PoE|DH^i{OHZ~m|L+^ z!xo;56f-4prtO(J~zhwPL*`f*C*>9MX^1l4^2Ps?J#A?O)5R)YPY6 zKpOISh)ah+Nub1wg4rtqN^4lr^dW1bG5qfjY~yLx5dMHZAwcFs3kU-NL#Dh>>MzV^ zGCVQ6=pZuA&M3lfv0Udg7ts;n-N8&dv-6Eu1j7);?!VDJ#WsG`Im3ddy%B?|aVc_L zsWQZ`ywR{dkYOdMcC?E8?a5DTkyx2N(EXe(joVV%=hIw;kDz+#JOl=;msISxXf9Iv z6}&`2gr^>M+7{~PL1p9-$G304&`=}+Te#7AWOAwwxB%dlv5>abZw!(v<{E5?I&Mbo z^8x!Z;FJ^%`Lhe4<;uIkVL<+h_prL+y8X<0HfAE}nNw@>#KdoafC%w_6i4%aoWuO| zT8uK&BkWsrxD@><|34Oc>2S}dv+ZqCwU-$@!7vtOd;7AV&-pZoiu%Pdc0|uN&p71I zBBdI}Iq1wydDN#dufoipbb#etCCEWy)M*%T7l^`R8girxi}Ag*(xH5w3?ea3XQum>PvfmPR}=yS5SS2JM7t%YHHcMxtq_ z534HR-5<}TpU8ftdhbiHE~r@H9DR1WB%Z7NO}U;J3&Cp~UV+CS!$N6JZGE_RNL2=% zkN9YR6xh~pW}S*mj)}Yg6TfF?;^}zO`LCS|x^p+|p~~sww5;;q{&R$u|4;=24L^v9 z8&Q5i%|n2$%7nsYnQnhEH^{6O=Chg)!;Z)c>VC!k(&WW!1L3b-n94R*h;SjEOCb)u zq4~6I5-RVzJcQnFSWLyf37Y%JfF&-FSGLCs8SloVP^34mCb+%3n*@PGpaC(yI$HKP zALW8g7zc}u7@&~%!m@d0HLnH(Ug5;2MiQQzp^_tAjG_@7r@ff=>tqGzTnT|7pn8ZD zvH%%*N7V%SO46b10Vikbgt6?X%VYl7hPR}jIcB}NzkzwJ=jpF@wC~owrv%T>O#2Zm zC?uePuTzVc{d18x*7e#@`Qc44zi?&VC(3Fvf0@J|4(r_`P=5G)8TNU-3<17G|e)prGT$3c{UpCUyPcgdND?OPST@PtWXlm9K!_g45o?x~~ zb)nf_w_XfULWNu=QC-uo7k9Ehj+p1e_2`N?cW<4#4L)$gG@GpC28O)jH?@d7z=^82 zb@kyW$Q2PDZX{Qzlh#r34Ir!*?9_ ziN}uj$x)p{ZUBM6IA`Y%t_GdRT<+b-{;xKnN*=EiDq$x7wHS3Q{?=K)4 zUH@`Q1<92Esi9}mg1>~|xKzt(qJ1Rk+{0=Me-fNeyHcyFAfEKF>X|4{lORov8P%Kj z=`{!@0RAN(l848D_2k6-JWpVIIW2tvfScMr*1ja@ZhB!Edu~>X6Xqx|r+pZzfS==b zKK_Mx(y&P+HD#9>lY?Q`g%^ojdDrVOKXfy-Kf&V~%VETuTYQpM`=}EGcHknC;RF7m zFS7k^7qrd=2UBb9N8=M7yBy?89G*UKlU2@sinf>fm7AxSkJcvgE9a`CMUe z$Wl*D)N|C;A%jN!)2F|m&NaZVum8C$G);3@?s8~Tsf_HO*#{H6KoGiN`4oHmoQ!j| zb8OaNB=J^$9&obOdm?Z&+7UB}5^)r!LSa(>Ua(H=_idOup-z%faMJd86|u-Ub<;NM z3@rTT>aFp?|2=wxGjK^Wx&626c{d4Q&(+7v#vC)|)`NGzsdroV4o%Mjy}i$PuG3%W zLuvBu7Rr{z(cE!2$9ZSG}Qu(A-;e*W8Kj2genmp z!f5$Z?CJKI>f`ruFC;MMPK4vUE&8sY-kGIAZcNIriBVef<2OvL*duJfV5;CJ1_5-W~N}qVUVXIW^K)2 zswK(sy7%YjXY8W6%Cu_94;~~cY;d1S)4Qi$Sl*v+nF=HQ5K&3y0yq{OV_dhxcC~aj zLE3h8=_?c(aE>CEY5KFyW&PexEQz!7+mp2PzBJE4|8GRNZK;}7sZhU7qy`X?$3D#S9#v-86%y?YmmmVmZGWuQ;ww_k+K`;V`^7&-6+P>I{v*`@_%}O z^sNG%!-qY`bDI$6U9YL<`pT|p9a-1^;Q~mr2HIF$^unlRh*1#5Wuu;f%aBN28kb>_ zqAm4iNu=XnTlCvi<99tgpj$gVOwH@ZKDGRUJmcz8K|6+fHPu-KT}f#XJi7}reA zI342~)WG=jf4Kn1roLgnIbPH-wjRA5J`>XSSP$`EZ_FJP=FMi7>q$`Z*po{BB3tON z)&)ll_|Eoxng+g{+B)ZowSgJ10;bpMw#DBFLgHWm?RNt=k*xFDEth_VD{UK|)x-JP z{%hiSM9<4bkUW7(5ho0K>#YNnsW><4)w|lp3nen5=jHH&&Q4>q+n(89*+tGupPRFu zv!d9yc{AJlkBi=3MeR(*-msf3(}O-VW?VpM`RXkTG4Ht^RApw7a^Ls&J7?yggwmU` zv-6HK##(Ml=we!<<|euu|LFC^aJ4+2Uobzw#EJ}hu#}wpkSwNoM6?To-;E!~Swj#` zInFzpUP4@a)GVe52V9Zhv*3(Vf)cKmx+hu%c{;b;agewkunB!VvYk?lyyoR7)?^)muo4qZJ;+FK znEtVYJ}mPVFNy;6mKpN5ZnGeuJAP=+*38rvTT1= zj_SsM)*pFPGmHNmx%ciXr21ntuv2tnJO9n!%b$H9AdcOUZDowQhAx!4TYMMe<+S9M zWyk+XbppCEq*@;T45EMS9J8ay#5^{jC|DuValUzS!U`B>o7M*Aia=;)aRWH{S6)XG z!2q>WDyHPqpDpycyKPSe5<%EV3o_>%d>?<2JvyvB9Kv+ifcHiSm!{XFTH=r6dH9AB z##$RRefOQIHQfTlP#AD~{j>Bdv@g7S^WRWIc)k2d;F`?-P34?F?|opSdO4y41%YZ! zFyc+Sq~gA|)v+i0-@+Sq%e$0gw1c*qsL(by@ND+&Ags>c2q`^(JBwgA&h-q7@*MB; zCWLS-_W7dw_YVVvZ9+Bq+045GF;ny@4F0^yrB9dVzg!k?ZhYAqiDeDX*AWqfsfGXj zePinobu|7YWDI4WK@&ieok8Fk0)w9!c6^5;{e+PHJ!g@-T8Ed@^i9zdCoJYKX737DUA5M1ve=9L_WzyKePxS4H7*9>a5|L1bY7lVN=OKsPs~vGiyV z9AC^VRjH!32@=_slGcqJ)#T!2nB z*YwSLsnQ{Fa=3PdtG2Qt$#pfDUVkn-kGgsyad^f#2SDqOfU9!*5iZ>U&upJ6AE@QTW zRMkmeKaxq(^On`1dqO5Q!lMDmzea<{i#hACZ~cpKC##>?y4YBP-;Oxky6NJ=sLMi4 z1Iz5+;uw0{@nwi@Rr0(jm6~nhN z^}z-?ad0z^T1H!aL{Ohns+0dQH2A4Q&*`C!m#V(}ZpMv8WPHSVyf=Bbm(K@pe+cTL z$#BX2ai>9zJ_M-Eo|PtFDTOwibHWVt&8WB;G!xb63uO|iw{ZR}WLP#fef>%S${-?m zwgI{}8Qh>whb9rEtzvItVp6FnT;I}w_Fs`p4!}I)c+Wz-Cv=sQOUe!36e1L3fG?)X zhm|AfP1zz0xyCscy&mpmdewSu5zND^%gG+Vhu=8C!}Ou(fP_zy+peel_u)>M%0PQD zf!(9TQsfGKu>C2{e%tvDLowd*3wlhHGGr_XBl)0@!w8%N3}2+IX1@;^qtX6XmmnvI z(cuNMsI*6tV6og|M4VJne-uvk<)A6*x>5Hv&Eu$ILGLF(n*%Uexor)j$x53i=rd^N zNu;JYfGsl&`pB@Iw8V|fHx(BK#%RMRK3@i6`eob&#QQQFGu#V!DpyCx1i`+rtkeGg zHJcEcGiQ~)#pGZEjn;Q8s;%HKV}iS(*&6Zpb_2)F&(~XLs2qB?Jcm6K9=EIsnzv@k zZ4)#bwtC#viSA7Ew|Z(o`eYE!w&(yAChj<}js;)R-`u^8J~%3%H%-o$5zfaqQ(ox4 z;z}wd!`6-;8kFM*m|_DJbW_DfeM`>1oybw}mHhfu{kE-Dt9OU9$v~g6@prNSX&5em zaagGNq#IpGt>n?XK)FYQ;%lU@H`_wIm86U!$4%@cNC%HsEM`AV8QTe5(A60pdIn&0 z@a$soaC0}{R#N8Jd~>jVlR1j&6@^I8ck8LB;2GPcFPPG&AQX50*<12I=D40nKxzpj(FvYv06NxeFBxOQ z{iww5*HcMOr$cEW?UH-7t9Pgj;r~{)M>D1V?T9Tz%`8{7Y!DWQ2^uuu_+Z-b=}TyV zRNMxbRFIavmco0D!5E?`*@J@?^26{aJP5Ni6XyqN5-nzdt?&qo0 zVLr5IB702zarMn2w@p88(QWlD<^Huo#yg~h@Vi^;{U!5G2UCEPU zs$o;20csdz3hrZV92F}fKb!fb+30k5dMuM@lkqQHWt(7%yg8MGmFbsAebgI8R!Wq3 zayV$3^iE5!Bz9GCx9kWggM34VW_cJOHtOmZcB-_hbFyy79E6pXq}m>?3t(w4IW z0kr+<>eb#?cA%NWSI>pbOzhY+uOxo{E|i;`&j1E6?t5?(HVN8pd-T7D_zfxpvG(7V z*jI)+9_eTvI+L-#O}{}5sAWam3!wOzDC5-8;hb z06s-l4`WZFjU7u=y*T|DYh_j64x{E@)$b<%VcDRgcey`0_}f}h9i5gg^|D?R4RQEW z-*5;GRJqn}6T>a(12R}O=n#a_goW5K6jT2O(!8onZogf|RWAQbk*AEFQst}x@hHMe z3hxDsLylob7M&*J7Rw5_6Ugh;kOE_7Z5-)e5grHP1uBesEx6z)4m3dla6l~R!_9y^ zECZ<1fQk%DF$XmnBw&Nfx(-Owz2JH~Ww|8|nx=A>fF#k@wH{<*2P)3iH~}V|(``M@(|F>WLASXkLe;7kjyYXg=`6 zpQuX_qU7rC&Qf9kWLKX|*>```JV@HWM z>3R;h?6WO?u<=qBaY(U$+n#N=|__Mfpce>Fs zdyV*6L}vCyOmo^m&$w&XkwY>@V~Z7qr6bO=TABXv%}}?W?xF20+JCE1aEYO7Ykyne zOsP4w_Fm(Z7TR!;HV)7>JQ(d{a8#`FbnCo)1LF+>OqlN(=f?z1zj-Ixz%-@8!wbH< zwP_aQfZoX=G|vuij!$(aL_(BYvcnZhm8?{b%UKhjQ}%MfYlT5+^^6rg;NnpG8SB7E zae8xjF4dQ3nGRc|VEeFPr6%2E97SOrN|5BriioS8E6#|A^|T!u?vH~j1_U;+UJC@Z zm^JhQ2Q2^rgGO~MVsQ^FAIi#<)u&YtOm@JF&2z}f8~_gOZmNUWB8fYQBMrXcsI>QThePL*I5Xnf8Cz5P&yWxbx^`PbS$iYXYP69Ioj6 z5CDWio{K^slt7x-a&0Csy$$ECrG)+bTcSq0(>akj$MunnOODU5$@X-Lw(%KQPJ--NuNC?*v4olhJwkf zNAuAo-2LS?SG~tpU3KT8bDBK*8koiEWKlJ2ldGMTe*rR0aryw0{`ZOMAyy{w@#PM3 zwKSnebi^c7N;I5F8;B~~qW~rH8Z~#=%=@eE_U=D)m~y?M<7XnV(0MP|2f})zISN zP#AWC0w@6K>XU<-gofwEjZ{><5y%yo!9KS$gGgA+x!)HLmDiJ z2Y-TwA_4ST#w`H6k>F`r~7*Ec93gN>J$cbBH|y9kKtcmW)&!{B@f`|PRQ zR;HJieHH5Os7r{&IpFiE41Z#?>ekF)>_g~vS?!Q#~0%oMjN@iH+v{A*O@VTrE2JC`i1*Bj`0695q zYC@9W0g287PXFl4r$2rEjk!T^?$eAKI&oa6eI=zFzi)^VJFDJmWKL?bjl+s~$Q!*n zTfmAaV7#F_@eg#MUq<>xY^#Wg$VA`-a2sJy(!@z@tJ7T z8N~Q7?+S|U09J)r)Stu}2>~iGu{VttEoaG*nj;?{v*ktl0r$k0i-Vi{8oyM{>UZ`( zu-k;O?s#4FSZ>;mHG9|-)G2zvtdGaVhrM!0nWPS3L zEf0L2y6nUu=k;q6l5lHo!BqboKft!-bBq3A!I&N<-9P=k|1v#e_4beFSF*d^VnD!S z1k?c)+%6jkg>x-(UT(&-^ya!kfuvv>oXR=P&NPO+xmi;-Q=NTj;dL1tQg;|&0=Twx zk-x)`vFvc=0K5!buvkGr?m!LOj3P}Ex5o)N5VnOSVEJ#m0CLyJ5=MyV=uuzoxJ&CN z0{O42%dEkE-LL98A|M|vBwy&O9`qGCfx=zq zO@9^y@?>t_M;}w*VL(1kOii8JN;PkR3+t}_T{UP3(EG|y4c-Vz`^5u6P*-2o6zzPS0~#m<=q%n2 zr-&|kn^>2A|s1(@N=%5HjkK~gGsF$U9YTI0j&$B z*a4rNjnp=T&))7aHCEx)HG2kp_MyE~2@XT@So;D^uo zIJC`gM|%61y^1iwFo%-VJM-v>zmk>R-%0;3X`#9z-l7EnSV*_6HndnZ@Dci~ptU1D zK^Zvp`&v*V9A&VM$5+-3LqpXBQwkQ8jV6XMFC4awt*8&^#-My0&8BggEcB&#@ z@X_lXF!BV;=;uIinqgc0@j1-C0hl?|0$dWb%oYMd7K(bAUMa(Z1jgy1J-D|OfsPM$ z|G@gI0OB#=f_YG8{%fS1{hT~N6;gg&K|yfDPeN%YH&GtU7}bKEF))3|@g@@RV#PN9 z|I7Z-h%vO~hWEubw;_i(zgLYXT}{4bJGU9d&BN2Ih!|lTM>zYTtlj72!uPB;m84L@9m91C z9gsn@E#B>|$?Lf@+lh40GN%dNzF~MNM>17?F3)@3D5PIQC$%CdkNz7&u$}xqw@`x_ zFww)pyTv(hgrD_wGCW7%9vmt3zr*ixp|*U#KE%1DW*Wt&^|>$ipQiFxf*Pxd;O>;7 zD1SL>>|=2E^%AA2SGL;hpK2R)8e1x#NcYGeNBL7%#Z7XL`4h}wqgxlSIjkXeRm|a! z0BZ@e=A$`~ZaECQtXjr-G$v#Lb`mPa^+(dpphcL5 z2^&KQo;1?z(ls*R1B*@OkB-z2Qna{dhicTcx!`N!!|DE>nsOFXQW`gkGiLeHs0wG3 zeyxv669fj_VX10kC1ZYf`t$x3Jvhyr=#Tx;e}=+6oTwV5DWAXim$sz8ipi*%BOBqd z)(GxrIKaej%UCf{bnjpr@5CCf=`PUW)wEX6SHa;p3Z$3te)m1B(TzS3{U}FmjW4EU zoclin9x(LivKqwiXu(71{*rt$o8GA9q<6?v-2tR@s!VUim*d~*06*(pagMqyCRwiL zOi+nWGM{pzRQUXIB)^o!2><0S(t2xg2=zJ~9i|!_Wnx4Lg78}i0~$H%8ZLXT+4h2; z^(SoN?vymE<=l6rb9?UWeWnP{cS66Q(+>5JifPQ^xW8YQttGh%jq(QsnKa%fpaY?r zw5+QDBJPd<{jgt^Ex%Ocj6Z`9T+!|YPjbzxQ3`_dZy>)1%(x$$94Hu_;T5JF0Qc=+ zp&q?@t|`{c!2xq{vuV;4C(0v7I4RtNapZhU5;9wGI-ZsksAI_8P*LjkYlI*ESN2=v zUy~o;G2i6Ri~sp>_=tjt^MXs*P6`aRfC9*;-Qhw;tCVF1xTBPhD=8TS6%(Ue<^s81 zXS>l8#l%C|C3X3(A4=6yp+T4@^8DbE+P|VblY>Z&+_{60GnVu@QdF97z1Y<*`ir0W zd@JA6e8-aI*~UGBXPOw|*!ATSsiR+FQ1QIiaPlrWK&oM*$Jo!PGa2Y|8wk zoxbq5ldPBt<|GV_=}ftjo|-g@k9vEz2x*}xmxDRCyRTwj0CLa+y;3l?EWep6YnrVl~<;fGCrQXYBq z$v8nz{cHD>!yI992T}3FW?2uKX!TzK)sY?n8A|NVWp3@(`ts{T%V1c!Lj!|r`Sh_F zy5Ch$H)<>bW-Gxrz%S zVALbIH0F?e+ITY){#Mu~n_^ z8|rovJ^0KY{4ePB{V>_0&$ibP#jBj<AE+D*%Yq zYip-wd6-gUeT^+GODgeI6{x|d$=h$yAG_&PsU#V**5`4Zn{*ZEFYKpp^tn0vGTP2L zA}j~K9%7IaA%Wa85sr$9lSQezRH8BM>`L~7Gu=>i+4yswy`HIPZ+{s6VA1O)0Us=? zqWkKbGF8aNuQOXRmaeQvX!FmQ4sW3Akqru8FhudlXYIiM3o(Y3MLB~?s*@PHypISJ zx`W+#CN-r8`qQZ>0j}HB8=Un=@)Y9tI?On)o-~-?;z-Q<0OSj7NIYCz>hOYc%J@js ziL_GLfRQ!8A2^0>EgT$J`?F!25W=5IIC=0*QYy)=k@*}g+|&5m7mF>5oM$&q@AC2_ zRX+beS~hOM|EpzdZOfyxMARfj|AH;CFJ>Th7neH}F;-J>I3JauV~5mV<5~(jS2$YV zvmc2s;nDG#M;!GW(7X|+`=i%xzuuhe0fD#uR(hTtD1GYvMY296D3GI84Y4{y0%1Su zOz2A))2M|Zw=FHF72LMVM(68ErO+YX8!&8&R|oG~g;6oaa3C$)DI6_NlJ|oS)IZ zkBV(!O0>Y%g8IAyazBuQh z$H`p4VdVAVX%B++TLRPC>%T%foMWFADXC|mdYCIII#5h*r@$0t1d^bq>8Sp$km}IF z)>I!{G=@L`@GMd_TnU_BFn;@$N1S~p95eMzChEJ(qmL4Z5q%qsNl6uv5z)Rg809O|-xB^=vh7NqWBB zxcYM9ir9}QW6Sk5(+ql{9U3sAreCAhH-371Qo=&=gqAFdNMr^V7bt6X$_(-GI)fw_ z!HD8u+Og7Zkyw-H{dIM}xnTK@ZBC{R&HrF0Z;J;l5$R0s>^;Cef|D2o290YB-yE9Z z^{O0x-=~+k@ZH6! zOekv-gUI0vP-c#b5EBpdBA^95!NiI4JfAkzfTD7cX-1#6SelJMc#3A^CXYU~Z_BO`G@|T@a0zw%$BN zhcOs)vmJLPKA2Bdg^vcp#!tH(HyqYpO{zXGpcs;%p};af6rKl$6Y0{o1ZvR$XLNJ6 zaX>o$fae}&?Os$ifr+*+l6cd=qIcHUX@h(xQ$3IOB056^CqYsOeHAby6;>h^D_mtBoF^!QRJe~Om0t6x90-;B z^cwoO?$6lJ2A1u8=Vcqp3rTS}=_Q^~qhR=Nu{fOsvX)Q`KecPIahxdCA&p(pp)@wn zEz~wVE6*K0v%~<*x%r}29&#*4xD{e$^Hqbkc@rgh{nq05nLC3aB#%sDG~{QJP!&iu zL~HrigQ)18nt~75X!yh7hAJgKzJ0<2+!wRP10b;EcGB>`o2TlyeZ^5E`>S!`aJiSG z9)n257zRZwP|n%};{m~5VqSArJ9?OZK38R`RcB4g-unA3;AqOH^_fE`Ax(tRbAXe1 zS1PQktnl%wfq@orWe7+eY%>xSTFKh{l$^>PIFm>prOX&{6&Z8#2N&=aMB%uVFH{sk zmLTTgVSb$7_5Ae?aUBvDOe@_EuacmiN^Evs0qCi9={5}UgGu51>`Wg+5Gc5gmI1Z0 zs4_H)77oT3AI-Sj9f(MOOPMWrPf{4f5|iMt!Ie|9serv|ekpU^WZKvr=BV!hj1(s` z&*o1}Ai*(GDmj!KIlyA4qw^>98B%`?_1`_?Jv$l6FE~>lniXE;P|3UW$J4`6>Ka*F zlR2+>#R_Bi5M%GOQ9V!9(UG@aT&>wFt)Jc#jbCQM_v6KB29R)TaR5)ILIU#xDc2R% zO}L>%22dh3q|h|0-Vm6wJVEHiH@$?{fxEm7kIQ5V$kNL|+IZaCKGp2skHrHsF+nyT zO<#?TjfF=D%ex3!(wk`8Iulb7b|8i4lmPu>B-Kquy;)7Yb@D`$xY&+S%3BYA{86`r zy;N;3M~XIVa{_W>gtm{6{iws5HXJYZYQ*5XeKM|Y8)Hj)yzMl`m1Mjx{tMja8N$S+ z*gljt$|5M&FF$+KQ%5|#>#7@ud9j92?p4n3l@K(QZPmbD+S=F<&SaAT+|Gk$h~!@l z;Y*Hrgc-;>)I=})H@4ulAR{e`c%uEr(=?W`kk7$apZ~rXcFNyOhjFi^-Ih&#vz&0= zs2sy25W+GTBX*&srv=4=`Q=r>$Pj;bcXyA$FW71L39_B^z4!S|Nm51s-cQ?Q4JE%8 zV7Q+h(khd_I7rPm-R0GHv7(w{Xd`%#Aq}~WG)F$o`&cYow6(-O*=QZWhv>cNp5Nfh zG+U^I^(^tr|B;F*c|SzntB%~;)+c$QV|zZ=`|^772KD~7s;)U=AGoiM{X zBGuaiYY0(7;MF7=9#4skzm70~|gBS{6qCw6qn%cgZ1jh}#Z-Y=BUOsajaC z0#wXep>m}pu%*_7NRF~xrlQ+SHj#O<{JF-hqQ-I|X1FS8mzjtXK6qFmd6MxCql3`) zE5e%u`TtLGI*Xi_v-MbT@yff=cpo?A__*S;OWiw6Vq^(lPfyP_OhXCqzUr<$jL3Ee z2VYh4>F8MT_3YkV>|Z>1QLx(;(VG^H0>Q0Vo0R&lh+f~&DxKg4;_4~J8#F(7tao#$7!+A<0PQt z%0hVw0iiYh<1c}~uNjr0*$j3~c3UO7=5+SSBZBGO%c@|OI+7~0P_9NYnJ3y)jJOff zJ?)KP=*YFFx%t*Bz{*`WeM@#_1mq`8cA8 z=Z{63Dho9&n;}MnN3+Yu9St_K<;(YMkIuwaQM9FM*{dm zdPJsc;~zYWp9T5O-Ps8Lw^qyx`$kon*d}_8a*aB-Drv;O0D?$zXDDuK*Zv_FLEA`L z|7@+81008I78l>D&s9TsqFF*-zwx}V`&=ioULb}T&_Vt`^=Cxmf9lV!%JaZ);5{P> zV7RnO0`bT{OQVfS_pxLbAif`8Pwb?rFHN|M7~=7K+G0S6k`Tl&xP}HMV$t4wD;8_z%sIkBSIm3?x{>Go4xX1vr|yeWT{_PeseZ z{z=>V!!7m5dG?0AiT=%`;5=kfa8!DIpWvV|!~q0omD*>3@7Sv}naEwKS%fYC@cC8Q zd1y-&=0iV;=V;8Qf?uwb<#QuD4}L_!`CSKN4Vw3Ap-kv2=E2_*z1kXc6*M#dUOP@p z8kIM3&*vK13zmsMj<<`oe;~FqW>jq{<|}3FZn?+jTTp}LaA_5X{{+r_TH2BDXiJUL zsFx*|(S-6AchB(9XOc@JINBQ6*$e-piZP$+PD)j|EhNT#Td;omlH+Rk{h|0BSdu>r zCj@C3-$dHw(AVhXzH7Hocw<6=B$s06yq_0RHS_MhbEyl*ljW@f4zMDW58{mi5N|}= zs8Q_WQi`dfXp`OlGebr6Tk`rw$|CpC8-o*Ws)Oh+-%fP1eg`b%VE}FTd){vW;-s)4 zA^#LEK0YGQM+cgwuw&Gl?-=kSr-U6K%fP4+mBVDGQEddm&$GxPEDVJyslr)dR}CMH zA4Q%FekAL$$xFWG^+$kATg#LCr$7lny@yv@_91}sZ}WP1d#8DDi?{QfwsLGH=-zv+ zaUqLV;DO(NP>Zu{?|kKYBwn?$IARB|n!PU9KCD?psZWk7A{hw3^_?Y z(TD9_3#q)0)UjMXh!&nce>=hZKJ_D%Hg?X;jSWuR)A8=%i5ib8xx~WNIU`4XgqZG- z<-okEf9Y}Pj-iXiSOHWsmV${ZkY506qLx&K1|fQ7a7(^-3@#ppnER83@3vy5z!=U4rN>>8;F5(Y+C#CEi&4*;?BzGmg zc<+r6>~rp=4YD;!=1Aa)WGz*y2aJth`xhr#yLYM5lp&#=_CFIKP$M+Y7V}Evz#ECNdhR7EZ1v|1o{rqWu=Fw8e1b0f`4U;X z++)jqW2(J-5YTbT5#KlMUaioYq{B%FV;%atvsIm5pwNZ}A08~%)69JJF7cGDZsaK$ ziu(7&0%kbQ-U1)2@5vbVh0`z48*5iA(@qv8PD6t*5bCktU!ro1pRrLp(pJto1X@u* zy!@M9JzKy&2p;ckW~2S%DnnzyM1}{=_A1{OS4XN2$K&o=izcj`3x`}U>C=HMiMHt+ zMs7GV+5-%j?>wSZToZheSg+7_e+XPE9<(6(?Q)q+7-mYmMIc1gc4i|i`TO**))K>l zJ8~fHQ`)f59EcfGg>TOc5hEo~;!CXm)EZ6YT60mvMayDCAzB6S-D)`ee<}){?NDmMX9NXCprWX$ z%o_w`#{unMdnRho;ubko|JRz^X@#1R!pmkXf(Vxq`oZKMk5N95K8@kgh)6jpQuf)U ztKBIQ|1N?vZVPE=d&98YI6QOoi=TDkq)a<6t6V{oYOJ@X?!firjkc0Fpyf0&sBCV& z7;5i{%#4r63Sw`n8RLh_#W|k?=5>@M2Rr^FI6`q_&D@-<2u`EOTlz3;yNqPxKv3l4 z$B$K+$^{cH&(;uvK7kY|*hT2H9JLYWHY5WsMHf7AU(zGl%@^m+-O+vbm-vw(F=g*_ zUw3%zKkTUbirZZ5Q~%xb^DSI9?Mg zj)KnrVvi08FqcDZGttiBu%G7&Z>)x|Qu6W35_DrpVO1zm)KWweQTp`ZU*^`913Om$ z!^olD5J!p+<{27(OL&fkLhiQ&8uBBD8%p)0uaIFFG^x^@8Z2s%#eZOTUO61IcsCEc zfgT-ukZjfI7!!Y*X10|F`v(i;9~BU^)B$Y{(6D1w3PB6M6$gn?}awt5nWM;2`(^9IISS6t;K zAIe$OqfyPoUax`)KV3m#XLyh=FbV*77d~)zQIb%1gGc?~aChxqO7@aHnkHFFc&TrF zZ!)j$6KzfKt_(F3I0U@V+kc2ROl)jbp#$CeHD*f&+M3$hr+8uDlToWFpX!U8k3&U8 zJ%`e&#Jw~TE)(3s~#_;w}It~XgBYpodqr!Xab(awT zI|s}(>}OXSsyp82f{b4m2!4wPNHk>1iTE7F)v@)uY0@Nn}#L3I6#v%dG8Di*1!btTn zY6LACs&Y8U7f%ulh#=8bl^GOSc;nHmt&@GyHAL%}R(sI>190n2rPpL3IZ)oTP%^Q8 zmG91+PVfcv&M=~#s{^DrFGCQ`y_B{nw`^Wg&;?}AeKNhKgd{=QCb9j#U^5_zH)07c z&oVUE#?KTLEbu8Z((q$6o|_Zb^^$wsX+vr66<&gQ1xs7?cY_K{1higiFu*h>TF>mP z{9AJ5a^PNr>FSPKGwwCq3lk4NM7-OoKtBiSC9g6rW+VidTlK`#_ku9Q9Q;rnV z((4nCFD*zRb_dk!cMc<;@8}JCOa*^BT?`IwPKa*Tv23C@V%!AY8E*3UU6)-5=Lc`x zBsf1tc0t6I-Gu)HM!7k}KKML>ndr!{dKMEQFV%xV9c@*P$SiV@jD*W1>TpfP>7hLY zc+}JU`(>FEC0U7bD=R6Y+*QOh5XG4YBmv(huyA!&76uJ8uGBJgcNZwHU56TC(C~HG z`{N;oTK_94fVw2(NAj9DJQ&B?B;)>3k|0|1%-;c$;~%EXVLp(YwZ z0HLFaW)LC-h+ABb`CANRbq10rAAxSKjqhU1^7ql9dvB07d+f!a9WdD&{iztf^U3)2 z&1GYU95Hh#k>?<*z6%4$1R4I`BZLCd#J*@KAHWxKuKN2D1b#nX!JXq^eb!UfmG7T^ zv3@Q|Z8jR}vM>1z0*tsRHce$SrJ<<*`JLEhI!gq*wW@zXvc)9>YE z$@loXdC{6H3m_K8IY-1@<*>l;P}7fM_N|K?#=?Dd{TEVgTS<>>*81NE&|pI+5y3M* zn9E=Wp$E7-dA8h$@X_?%zx}5QafF1MC8;2`viY@|3*f7EU)+zxfZsN4EGSxjDwp@x zG%_ZVx)hL1I8id7e^9wuI;8V~d$rCg2x;`o^lg@-Cnbe)j&+wa<|1?;-oqH*}X^!2c^Q0c`>ek!V zd)h9-Z(Em--ZNq=K*@0R5i5{8@NT^jtc@P#++W@KDJid`HGelS^WY)=%@8SHqB-}@@13m`O`S!RZIN!CZ*0p}} zS|c*}CA(#*R&kRsTk>S!NbhZpCa-$cgfRBFdX-})J&(o)M*TGQL%hU zBKCgvW!23$!E*NL7U~cgm_*5DQ$CUA?8%{I!{)R9TS;EN?KG?WT&$sUXhH0B-Tr4J zY3>W;?%;ka+ff89+8k5b-793K0za-t1R+LfObb+E6F?i6eUdV-FXGyr4BbzSfVMgd z-fkb<@m8J+=yNf&Y3>p%2wp8qkc(4vWN5T!_yVP zAA=-pp!CYDXIX`0)Z$KN9{KA?*ZR6%KLKd5Z&RX94t)z5MS7i@h79=-SveBPb-5+r zs25ZFlUh7Bxj@Asl6=AZ=YfKu8_su^))HHOZw88J@>qIVP4)Abk%C0yuR3{eUJ2`Y ztj+!Gue`b?CN;Rywz6A|UnbbgP%Ye~)o`POsz3!O*o*s@I8u!h_*vQ52G6I5xWK?j zg*f^?l3dR*wh;{HJ(ol)vl7GQqqW|M2KN(ViOFhEfS~!_L~s1{!?$n$v-i{IMEF+3 z=zUYZHd5ejE+U*XPo3zqDOG9N>*6E5HxZfbao>+u&YnBsU1_S>{~F9{w8(8rc*1#ol0CSX~RN0iTIyAaM1z$0-dc~I_E!Jws@$hx0t&i zR4>{&C<*;>W0jyRkkL*p8VH060?Lx3BDvh=+Z^=3P$DD9w*>;SPE0I>14kaFD8cI; zWj_pc^$3G4djYG6nZ3I#IGjD_#^>B@%31o>hSGX5aVug>P{um(B-R`a`>yockH%Av zV40Ym6=V~Y-)6(2#ixO*LZoD5I;GixBtcx8@3gf&ba~F5AhL1D5SK6yov?MwTC4&g zwoWVv_+P>WZL<*ERZxUwT)*HLVU*AV=v_1O6i1k%P&0=86@4n=G8_sqF=@cE-MpiFv^Q_9vYqSqlc@x|bO1akP zvg1X454){@f{1X{jUt2W~XPo#JDM>C16G1dHmDvnzr6N^;Pn*))O3N&6#oo7Vps z$zv2q2e#p37;6{Fx)V~mZH`X#3%$^{q*mNx1sS^L#b+ zyKXW+*!drXX>hi~SwO^_F*}?En@AJQO*9s!e5adak-B;;?P+D4Id<=)e)6Vv#g!Jj-GV1^mt*ydtRs%jP$a2(8^hdqRH#RnbD2uzpXC5Y6Oq?7oG9vTfOS7 zkVS|}K8tE8aRDzyvQk0)*4eccXkx((q>0=(yEamsK;cy+pX6nOLWf(JB6#}PF|n{R z%_B#GCBJ+Yp&EM(!j59+?jnE0Qo1d8ys)1YAc$bB%)wBlyqf08l;}k6Im%n^%MF^Y z^!pU7(&Nsu-6oj64t!c~<9p&D&r)z_ES9Dt=j(W--^B4)K9Xv76E-6Q_1==qY%BYv zEtw~$qLVjoS=-I5x^%-Fc@>nK)y{U6iD~eqmIVQ;K$%ms-L}IrhzMc=DLF=CQ>y1; zq{Ge@GgjGzP}uhO)a5YYL#P6{aJO7DgHIZrCo6MFhX+S3YIdBC68sCgDnU3)N5{qX ziv=0@2$UD;t%%BJ-EqoV!-1g|V)?H+)>-vtIGd0 zT#M~2d&V0uqZvcCt*XhW?*G1JjpwI@Bg=C8M$q;U5yyD{A$Dmpue#tB=kdvD9jm6w z0uy$3oKs^D5F2Z3X+1YmLDPyT0mjL77$4vmEK(gF&vzme9nbD8)N5>M7r?Y)-hmp; z1?LIHvX}ghPnBaFR=a{(-WE*?!zyS0%0rt&-+xeTjQh(k12vu2h~&x$H6W5cu>wP6 z?ju;r8UmZ2+6&kOKK57`x+G$YU9tJk3!o`^^!Ju{ES0|k;vf4=;b|am4f+x$-MLyX zg>hB*yK?%3BA?yTS8A%o-mP8fo|+g5pNqNvpv!l;iF&9W6Wfsz#JNy>=-#m!o+`tA z2rY|5l5dGfMog&MuZHuOc@HD}4mpErB11+Dh0qmq2gh2|5HsI;i&A@wzLV!KE%B+I zWBsw?+?+bCIIwkQWN4VpQigLt6zLLLhJHF+*mjQdW$K9y{wKbOxp!0$&hb({`(2rU z6Cc5)9i+6GXGmI67mp&ZN(I{4SDin2iT_@YNU7H=&Oc^zNGWUs9U})byyY^l5{<;0~&&R7djPB;YZ%wAsUEsJU2!F5A zm}%Rl`0hWqkF5Cpu?h@8&kubS#3c9dcN#f1HgQr)$o-)Yvov#o6tO5zNL$_h8SgEh znF#CO!m$kDkO*Rq97!Jy%4g=8t|w~ISJT6 z5#i;X6lVO*u*!JO941MY!6a`&tTE- z7zl({5vYR&7C!?(H_jcH^?QHvS6fdn{o95kSZ)AuHtNPZ>HYER6l+p>dc_+RdXj6C)#2?$u(>rtxGF=+h6={ zl#g?^;sAALQr`Fxy`P3QQp>uV4#BP^p`ZvF)4awik~f^QCX$zz_vB95msyz30bSkW zw|5zcG|WS%0ZI|>1k#NFKbk{Cp0KYK^&mG9WSNW?oH*QtKUr}Ar+PBtZnOkc2$v11 z0V36bL}L2|+^bI~BoexJNT_07A!Z{c!oJ+taCy=lg@j@tf)@!mL-v(G21+-UL9eHK z)fwD}_dhP26@?2*KLjl|lzQN&xD#EiImy%?SDRAWU44+>KU>x@hQkA$DWO8I3#$6_ z&{3i)NRZmD^rk{fEfa|iSa3gpddVu!4-!(J^%S?k+&XZo?-f&gR%IN2@2t#Aw)jjI zFP=UO7Zf>ic;8P877;Xm?`IdUedVZrBj)@=OsxyPyna&Qi*o|e8<==uTshnEuO4^O-%|U9l4wI#>(gh>F<9z9lZcJ3e>{`V+I_~SKIDV9l`Zc zoW}n(Jz|mcObRvF zo;+D{YBm^I%xCbs;6S|plU2RH8zM>1stf})7q1rt6Ore*30znUaGeTPQ zx(4=l+@;mNtoqyR*aufGa{yrq^Vjo zNsT}LBJrE9WV#T_$4D;lJbN8Sf8{L-=LGi-MtuB+IC@GHgh_7cj z?sk)pD6y^2T=oM$cJI6(fziUO>HLi#R8CG<9fGtvPiZp;haEtdOh2gI^KE$7K%1-B zMR?|v@^N+T#9NqbM%p!Zh#l|T_;%s4d)4Vk8a!@*_GH2xX2Pw_D>#8R?SrU7fQuCZ zs0J@)j2!f2IGmL#M!r%@0U)grUE&u&W_e|VMw7YwyOIUuS$xczg+SXHuCxjk|HlNJq5KYP{Zj&wKO6LK}c@6#`ON$*!NC#R3uRNlt?$ng#KJK$w@@ zSbzRwoD}1nwh!lNY(^Y$V_S21@}CbIKtWqgzJ*<>T3P1b$G60~ar8mmMpt*fSKe*h zk1Kx!6x>Bn#q`IHnBm6sf3f%}W;0u{ilCjO%zwuqnC{ZIBsFLOi9vL~nC<+Yd06*x zg6XgO?vF(3`+`PIQdpeo*j{}9(<{V8*Ln&|tzdVyf&uuHPxw2QB?F4KMMmBpOFYX*=qH-?ops{6t&!U9He79sALvq(k+GwG@aEAoSHLe6JHkr6X!sx3x$=4+Fl1vONWY-Y$_){e=mZ$#6sq7r(t$-3?Zi z2{7s&jQ8#A?MR(m0I7I8AMw4=_q>*iVm{VpW@b%kHmlCj-lBi~NMZHmt4}d5H`ldS z4*n_{k2EcOjYm%x_8|*ol1!iv%|VkOcp8cal?9k!(`u7XC5T3rE$lg+=nA!;s&tR0 z*V32R?XPh@q>v{2Rm5idRmmv~sLG$wJ%h_8;oX;LsG%~smq_l29ogMOERa5PGqa@P zso1eCbWUVJC1Xl2@2@_(J8n6@^(>+ui%=;qXWcf?jgdJWE;L#xXNp|6k7mBS9kHaP z%5Ia$8ruRbT4-Q0WhVmzLxszp22iqr<4g(%*=$ZjaLUw%7#RUHrjd}6c7lXcrO)k! zFv6d0Fwyk>y->y`3!4Ci((>7b4Y?mi-#ww0`tYom;!8~zwH7iSbwPW$!TyVixPz%6 zex;Z_!Dt_`y%6h|;se!n$k#(M6I3w=r|mKFxtQ)9ViK4HB(0KpZ5*K|Y?}f=h#Pbs zUo(sj*mmnxiVT>8pj@j1G^-zWTqa&k3rVTqzmj^s!l-*uG;8dfC7~pkuTgK0f!DX4 z8a!G6kx={ik`vm)of275Q}dNEUbggt>OeYaL$JEt3%ReylM%EafICR7Kw2a`X93Ri zO@-vKMnh;p>jMXs)4#qh&^rJQhG^%XpizmJRtKd586X9S0f~dI)MnQsEOSLbc;FyQ z)H0lwTu1um;v#%H*9JN3e44fAK2ZQ65J}*)y^;1_`_;D06#PxsWdl8^>)5?j=4;YO zRLH<)pA=s`m6I2X3=L2e^-V|CdGAQC@9tBvI8*cW6GzNE%(YSfg#mX@{o8xspP6*P4SGl0-!dH2s#_P>>HTzA@9M3AGn~w`G%+T4HU`F4K5_|&zK)e8_gd<7z z)O<0KYfZWjpWPA#DK&N0=+>DSaP5x+{0*;W2lR`>GrM-r!WA+Y@fjy1VQ5k{cqdo) zcZ-g<`im{{+$Su>{Y7oGy&fYM>bA-|ZETAEHjgV9MZK(k#p4z=xzL78*m3rGM^-9A zK0>tQ9|Oh-UuCRW+g@PA7(?@-6Q`^ph137Jv`VIhdc#{hsSE3e%fSQ^lNKL}zi1nY zKWDAMNXvkRczx#^4yBz$ePF;WKo%)lM_2*ZE%2M>7iUq%XAMbpbtL74q>rmkC|dA*ZFc_-9~yM;C7 z`Yk4c_$J&bIqN!ELnHjl3%b-;A=OylUGA9twSqqvFS^whm!Om^EU#?|jeOR2+NrOx8G#9?Y-qhN`fN)YZLnTXaFSYurcz zrzvxbEl0gUkFaaQr-BiaYHcH1+e1*Dcu-4xdi@81;1!F&szBZ%`g8j;80OILv*SZ zJ^+8agx3I00cl&2X(z_=thWI%)zsga^Ujl~>T61*()K>Q{<|5lhuE%Z=Ydr~Z5SB_ z&^V-&oDpqQLT#4ygl0?377viwDRdXeo zBW5mrBy&|WDKuz6R3QMKsM&0unL6TMKVqGXZ~8@`>l^OTqA+DtSG8s+A1Tuj^v#hn9w0;J-~>G(DSr> zj8}|1hB($btwvnible#2NEL&sK>HkrX=ujb1@hoGYilGBPiE@H%Z4C{fU<_Mb&vfm>46oc1 z**43oL)!nhbTmeRT{C}rc4=b<9! zZ)^e=@DQ{Qh2d~xBO?Sjoamr@Z$x?5Ng#bXmVRd}Y#Y_;O>OSYMy4!fX~Ss{Dk>&_ zChaAq?$@M^Y1Po5mWzp~BfXJR^Yw(^JX)75Hz1Hky~B?}d-_LUgvdf=KYrhK>@p)d;Qs z@+bNoU43h$kO|fOmkBMX9iI_aW;=1-U9Y@t6ZA9sm7q#7XKGUfk{X$_rAcEf4{OT5B(!~#_LoAbm9y^;C%1p;NhTJUMKeh0 zE|9LSP}xrqDa*(YlU}5-sijpVaOSu%{6W!Ya#X`Qw0ty);MABN{SEdxr?TOtXF#YH zUz#*un*KUb*~>^S@G7aH6IGaT0g6V>AWc0j8sei55@7B1GhXv$w8*SdC?-Pe@x%e7 zD+{MoL=Y$V_7+O%*op-=l%Dtzh!&v$HcGJ+)r44sq96&%L$h+vds) zbQs;y;Vs_zUPFB-=2woI`N&=?X`@$(de(W36k;g$^ch0W^$T2cw!sZY6G3)?@rV;$ z4c%ht_v&6Fhnxzq9dG%bAw1~J-P7oh+}#_Fn{t?v zGctOA{CHcCrjM=R{dOjOY??_lx|(kR;o-(ta1pwRd${otIxr%GF(EjJoLvAVF2#3S zV3yAeDG;ZIn|j)eH*FNEM*ZO9=@WRfLo|83C; z%I>CGXXJ|Ku(xOJ^V8K51iLzHm_*3dtEoVKWThtHU8C9f{#?X%PygBN{YB^g#QDXx}ey13Zath#aPzj^Mm<88L!X42J6S23xzOjS;p1_k5UB!RrH`L*`)H)ttrk~pFzyTx= ze_Y5k{4He%H?BTC?WK8*y#Qs-SP+Yi?wxIu(>~JibCOD7+djwG_a1m$Sz~*RdP-2=NU z=r5->sh(>d&z@xflQcoWqVtrxrl1S+!6ygJDr&3#l~G4LJW^Pen)-RGO<{bugRpX1 z`_Y^i)7+=Rm5ai=wo>p^!$0SB9@#s)zL9D8S9HyWm*&ZF^#Pab&GV0ufUr#gZTn45 zW2ywEk}3&w5ULQa&LbU=mrt4P+dDTlHbw*sYwFiM-PL${2CZ<~QW`kj z?Kch!8qE@0ZsgYx(~nTD0F+bJb<=UFDivB`aNcWB+vcz{17&d-v`1B@>GFW)? zBF6^1t+CPNai@I`!Qt$J>##dpb9TRHtJbz^>$~PNov!6p<-EODmIgDz3xfFFB^ zIyTluetlX3@q-Pw#`XQlDJdR!#UD{GrE|w$E?O!2b)k-9J`7jAC6K;;Mcb4=?gI>* zl{WKyfPc-tj&4b!tt7OnYntyN!GEFXB{}0_E7hlcq$Qat0l9RpewserU^RcN>=qLw ziU9fU;=~s!q0E?^ImWBAXz3^SY>ee|?Jr@|52p;zYvI=UOOhV6Z|%ZX0>+leuAO`6 zN^Yk{((ii+SnLovpLFO5rXtu!hps_;+HWb8$iZ(Ct-(GIO*eATw?VAW1QE08HDHo~ z5;XY>DmSs7?~7jsZe#GX?NXLd0;TTzgwDdk(nLBnJ;M}^r$Vp+0U@u&MO$SSEn6Xu zwkzYO_i_Z%54WtNW7SHNYS`+kk%E$v5Qgh0(6+|MRlq+yMB?pRfML31k=!!lVcT|! zC{qSFWO~+Vcqq+yphh;77Cl%|!1asOCy7Scw;+Z5F`H4##T=^u9mw7lO_0VfKDuH3 zaF-IZd1JS&r?h~*CZ<{jI_Du(vV(w6bpQHwUA~2K?#p~kB^Lsl<~!ybGd&Mvl^`Zr zzJAJP7VJcU69VDiYEB#1#|9bqzdyeSpTwH6<`rsY2c($_^==STIB08X@_V6t#HGK= zRe)*#RGG36&fEsxP$3*(`kx0xbn@^GM!9@k5E%Nk`i-R=cn|pIWBFMoi>8^VaKsbL4X)_khI5dzKI56BzVlmIQDCaafpB#Yv&`jE$Yjs?ZatWbi z3{c#uGefDILb*LJ)8SXrf>zes;Q5|c*JD%JR`+An6ix6-V zNel)4Z4pmlpqKj@URWF98JO5d-ND0|bEr20u2?g3^U9hUPf9a%Ct%6lEqg^E{i-uM zhY5ssjy+p3r)H3}5PwAXGioP0q@V(W=cr3AW_lAkQu14Z4N_^93+I~^C;_THmH)NE_qGsm!%?douAj`qfTUYVVSiO66QwyUaI!mQCE@+c*x8xa~yvAPf%LLw~M| zTP>G-t6KH~->;L21w|2}?`X7e_OuWm7LTy;d=^>EvoV=*4Jor_hD&a!azy?824WHd zlYrw-G2y*RK{4uSmy8&xpWx{<>;QQOsX$yAEaS_^2FENJ5ZjJHu;@ySFYOM;Qh2%J z-jg2hf#GB$&eV866<(T&Rq=)Vi&;N^5q$R@9`0W%hiM+=uAM?qz5v%=7qm547@c@zsK@A3tfDjJ9lN|%^+=+7O(mo0;TZ1*4(ftiJ*ucy^Y+rXf2f1i~=Iv}6)j2bWi{0sI{Dry=4Fn4uH#(wYr z>F3~3x$kv!?pX=zk8b+;J-R&w%P>ZHS7TxN(o}m1d;QPB#hDtd?%?PDya4TVNbU;+ z{)*109ni1%4LH-mKzOT}kuq0>H6ayG{F*Q8$r&qt zG_+LDd$j8xAak_3W@g9*mE7U3v6BEBUaQTYyt^5@rZiIiw2oo+x6RJeZ|8Zr2Wle3 zv?WBP>imk2W(FKA4^-{`jzZ2PC1%?0!_orX?)t+9A~5T~%>!Odq5yV6q_ilp84=PM zb4)IE`c_MnK|mGdqj1c8FpH4@nr3P*5p3;(gqt)!0?M3d>5;govwnGdE87qVrA1Iz z*M*000b~?vFR*_@!mT&~$gWw?$n`u!safYcEL^~B<&>mUk3FQZQE+%{j5vrl>B`L5 zILF#y0R&b6J027q{0L0@LL3?z3cdqPuwZts71abDMq=DZlU)BD7NEQ-3C(A@#+Gj( zB2y183NwBXU<>%iov92ZMqq_a<cYl87lAN&hT4p z{Pj{HVx1bPVOXAkmJ7hPfOlElOtafW1nV4(7RXr&x8xb^COkU%{QHHpvah+_dKo}v zmF{^oqg<5k&S{U89ljDJHe7DIubc~IF1#biQ*Wu5yS|%}n(%o?IddEtia0VoR`jsb zPr+Y~_4DH~;Iw}9pmc14$t!m({^aCy8~{pziD@KcWR4%$>K7Bg6pgdMo`9+-QQStq zIlxJaV0TtTks}}f0s`0<_WF!N6lDB}xSFzs)S%l=QtXCk< z^Ru3)NKLU{y&ebUyF&q?@B@$O@7C43RpinG3{Zx*!ajzya-H^DVcqtUgb*kROqT@a z?IKdZo-@AvhL_9-5BS@ljXLRke#MaEl@HjNzQ>)Y{95aTEIDx8pUmgJ_psQ1Ijn$|L9qg+ zZ<-`9N4i_^A_5bCONnhQ6LmgmS}9bQ&ou^wifWl_14cr?m9qx2;(N9;q7^m2){r39 zeA~ydz>M9Jg+R)qNO-gHn5Om1iCZnMvjr5IcPGDB$m`E`xJOR5`4j8)O-jAZN!wBR zvcRz}o$r_0ym<;G>`oH=;i=3t1TFrs7m&}u{D<4hm5!PP5H^Zmf;Pg14dVX2bbx%LxMKxs&d zP#xL0Xrlwpp|h7Z^K5Gj=ITncHI~I;O%CXH0GmJor%vcN9Qy24hD;drrt1m)la*Lo z{{JKDt%I^&yYJx-AyU#MDIp+@A|R=hNUBIT2uOFg(kUWciU*MHmXK5$5s)q^X+cup z-S;`q_xH~DhXXSV+}CyOSbMFt+hC&kS=hgEMXXu{2g3tcix$9rsWx91GyyXtP%CoD zW?&mJ1E<@x@7&EJy?W&lax{_Yf2CCr!CTqSN@*#@+I`I)0km6IY$;=|ZYs`GG!0qN zhIM)41najNU0N_{(s%B^yG`%lButA046dG0s;jp=uBb0*vG<-+vzI)WPor^PE+6{3 z(xpbU_nOffiwi3IL$hSqpu!li61mAO_byf&g;8KWp)SHzJ?=N%RkO) zIitJY8nu2yc=9U-X`GzgKPM!}M$peIWgtxc0+dvWbOoif7DHsW20Be|m{L>NlwOh* zZi^8@XX1!}*!R(!XM(T|N>*i&-nvmXUpjU~_*=x+iljo8?edtM0uC{H!Yr*7hg1Bn z9}oMSGi3FhkA+W%$nE~P)c7FaHfaX(*}ot%?f19nqs zwCJmf_Zhb+Jmb*xqIK7x&3OqCpkLj=+(peBRjhD4Hi?Lz=957~t4Kf-jq8yrE+>hj zIk{Jl(N)k5N%t}PcS`=}|M8Jhyzbfcx(s2vH`4X<2?>Sj@o0KT0BR8)9)8;*9qfRX zm(8F@S%S?^9Z+d4G>S+_5P+SQgoFXC%qcg$3Iq5Xy|+;T{F&yz@TJdijl$fh+r4v_ z@TrCwL=?czV8;tLZ@m>SBcXs&j)>c%1+R=g5Iso@<3Y4S%IDD$DFlQM?^SAomk8jn z!7nlq&Rrh{2i0-7M8To1pVAFm^{H zk&U~8jwVsxw{XBHk+q{Ik<}6#Ho4+u*#Bn(W|d@?PBY;u&%j02eaRUA_}nmJNNs0- zl9Ej5`zCIzV*cHoC)RV{E^41nYt|@;Ly`MASd0}ew!OBt_6b&i5i>=YZjH_7kZUxl zO_o|4_Y*1*nojzr3iHn;G6%iZ>O!GQ6ft!8GqLLYgLDW5s@z)Fc_qxfzt=P_{^!d^ z>7@8PdCTY4cZMQYpGIo%jZYV?tyh?ISea^zs(>WV+Qg%0Np!t+X-n(et}B`Ler(6<(WN#uod2eV zDtR;!ENekfYBxIMgm;RG2mh(hdzzoPRKxP;>&Sx1AWp*|u*F&W-TV{*$@m=rlkBh;c-)2O(+SWZsOM3|q`zwl733rD*a z$@6&o9(E@p^5d=rKZ!aP98M#R_>QAWHnOaot7Ck-#~!rVPmfJ6D>6zI%6CX(W3h-G z4M!i$X7Ps2Z*0|C)$cx2Ot%cV?Pz`z^MQP=2Nyx!2M0#N0MzVwV0n4jzBNn_c`Q2k zeQ9qVFzT8)VfF(el`}yHwZ43#;_xXQI^#-(uOT12MEO?-kNq zef0SEIz09zeQUD3o3l-7nU`tqJ6@~(Y0^h`lpF7_8s6y;>nG&rcF(YyQmiy9JX%Pw za^dRWQMJSUbku8)-?9*BD^aT&<|KRtm_|T8i)yBXbLSx)mSwUsbi;$98~0`#dCt_9 zC9(CNvp@~amMN1j4J4-DA|SFul?;*Hrsj~TUL{DY`cRVM4fE;bhT0%nnTk9Us=s3GN^TsjOjp@|y|qW0fD4;w zw0OjGOcfQwn6af6>Pt3cx5bbMTJQ9Z`(uLo;|m}x>t0%Lb^?wssHl`MJc>Wh6yWHm z^E|$ed|EwR(f5=ekJR(DYZ81p;$M|gLx58qqd(dF%`@uTJ%Y0u1-$f(U&R)nn-b}7 zPHv_16y`CVA*9yO&?spEqRC~D@s##Eo1f+zRG1G?xg5B9TN+`{89t`OYFn}v_|g#H zeRJ){$DAKJGPa`}oW#RtG#{ag(uEfQ(mZxZNul=Z7s0XO251 zKUtfMAC?_FO4rxQwrMDTJFbpBz@#X(CDHWvPfz#V1koroe3`e?^JZCR9m=0)veFKG zj61sZw&(^Iei%pLAR%bS{mS?L(~LxbaPCU3gBe2ui=O43qc*Mc=9WaR ze`N7fN;c&Goj_J(etq7+u@+}`kKpLY8^w|N`4c?Jcx=E%sV$t8t*ow2OT!N)(Wt8c zZat9N`2?yOB)Yy%0EYG-HcBFqMdjO!2UBMJBj62s$xpv$t^5_m)Dc( z8Cu61W++sYw-E>6AmnS%VbTQuhlrjFYy^=7E(|T@e_{(&n`bPVjr9fBM8T@EZxJmO zGQx+eZ&=Eq@OT9~PR~ErHvcJZxe;Nq;QE9t67#{9zb>%6ZTY$`0{IUoRA>w7HRtVpUh>UUaXa*Z9D>5G*3KNI6KU0CO6H zO7XbRnC{yG*#)^Kl@HH1^5z5_#xbV;R@&a94;}HwH&XAYRFdHw#Ra~_#e2231cKq3 z(H(RA39?CA=`kE2mH#(gjQ=V?qCG=C9IIdVtuXt8SGmNIi^$B->UI#hG|;iFDYqSe zpNK_vFM*tVqekl(TW`A$w(_R^hg(ww-Am1V@PrI9cVhi^Sauj#(lITdpiFjQd9e|EBK=jnMJ^6z7^ z9Cp@?5E{Ey*ebU02YPVI!*V@esMqv&NX%|uoxfj8GmN|9X$72Ahf6rWI5aWfjRLcr zRRqNi>hGjZGOF9ho;RUap#um?46W5E=fX&Unr;WcLddpqWSvlDscA(=SE*TEyo1e$ zrh={BcH@>Vt)8JiJep+1HzSxNPOYcbrdot_ED7o|Ppm^0YpOUft*j(BQs0rhyzy!5 zxvp?}N%?k=SpG8(59p;@Zf3hr1dG&kn`OM3+v$c#1Dh9y4@ES7j7|+#Wg~i#I3|Tl zwsscTLc~w%qPL*j!@6TWZj~ z`4HqhTSO%X*lI|oW%n=lkz!+p)G+?_8aO`}k!TMw0(Euk#OlSmP05UZvVu5Ml(2xX zRWjH_jUxU8Hit1{1h|OwCnhGcXQ;UA3D}FgQARr^o1U9P;|mgwkBK>q$Rr zn60e|m0}zhO+`gZl`s2~)}_9BBUO!x7w@jmA3q$RGNq!Ulf6O2Zo`tTVUGwFmbheF zb$dJ--POB2(pRpNaze+NZLRwwt9{)YRUG+l8oi+HDySr9282wYaR7 zro8wSjdAAxM`3X0pY_Yoh?`;wSXKRmdYX5)d2J6Xy6Kz8{M(cKC44S1rhM3_ZA3zs z6i{^QF{fWzLZGI1@QTz3;uM#oDg*?J6GiR6K6INX^9yYS9d^J^%THr6mxtAUR?~C?onXS3`3gaESNRX&Bgs?nfxVr0^ zmYg?J=zxQ4r+i=d*;DCrL5naeSg-T4uKk(P6|8l3TF4)5mg{)n8sBf;%T!%tt=vTg zNKPzuaeVu_;D*8%XK;aj!5V)O(5@KVVzVEciWa~gApk8p%VK##etj|S^DVV+`Bfj< zd91$pCmTM0m}H_7t*aJ^1^T**#6Ql$=WS5KSR##x&*y2Ey8*mA;%~*8;gM;MQ+HXW zUXzIP^JY3eLUAq9X1#AtIVZe5!~0exTkvgbjCfT3FjU6tjl92`CT7dgl&Dzg<+rUs znk+xHpXKg`>*tobvzVbv^hJGdXo>&$jp0H!!w#+hk{yGaSbV^rr~(di@iUg*-+I+l z6$TW$Z?g+HOv1<_ z)szzBAP=8w&w|MBsBtE_x+AJr3|acyuc>k*7+WL4V~I7A!e$|{MUy~pu?6#S?KL+V zP@7#vs7yT{y~%|vlUkfT`!c6QOtFW_MY%G&>Q{bc=GIIE2D^1yL4kw(V!7Z?=D06} zq9XzXdA@bt_p~s8(ACVHp#n&=#x2Z7Bf~B#<_AP&FpF)lW4(v%RMb$Qot+?Z_KaJ% zxGCXyv6I|KG_y@Pu;OUw&@>_2a*`)Dw42OX_zxgWFVI@gLE@Z+$#-p@V1M;0{J#a6 ze-ATTpU$w}AM>PZ=cIA=E&fcuw=|auRtrmS&Ur8~&;E%|uAyuN8A!nOCHVsW6lp~N z#$Es4iP5u5G(L-xAd5R{%V4Iz$e4FPIB6L5tI5C;WIUNP6I^ijZXr%dY2R^okTNh% z|A1s#bw)pc2pW;?!_eA<9D)w=VI%c~B}(fu!3BW&%*)quSJ;aq?2FE@5*<3ND${@k z{>8i!>|eRN$+a)tK{t2e)2z(BNdUm3S*146CdqJvMH&KB5|?GE&S#4GO1NjH=g<(~ z$D`yPK#P0EaRoLuSBP$cQ?)Vp>;mfikdF%0zDUZxKnrP{Uhs3C&ky#zm<3xlKdRLo zhorO)#}xx4w9#Q2Gawp~=}MNDp>vm60^KM<-gPQdu)uA}wVZUPGSw^^b%d8VVR5P5 zYJ&eISM9!FOpD)7>IREi2KLN-?k3?9@`O?Dy_k#&!H)&F`r~-SO*fYlWuN?ve0Q(J zR3F=Oaj)(YByCv~iWgpr?57{*V6!wcXPZJWPfi9cz#IB?BOx9o1$1i_xl5*ziulO$ z((fe7HM9fi8oO1!_|`@@Jz>dG1V790?}%dNF_f zTIjJc3ua=iL~{`n_{nMx?*5ua&|m=4L~aP%(mK~(bVolJOI<}1h3 z!B*j;nW?Qqhn~6;nyWXnqqb%nFaz*B;jBWJs4*YFQgFP~T;C-A6?gWhmh?4ocsi#U zjRG>49KC|ZINgNNBMCGzc?~pz&RWu`c9FT3W=Ci?ropg8Ff5i_xa7l5uvobkm&577 zI&XNkhiTK9&5ts)G(lUdpL{=EOiSL)YWor-XS{wDmZx8?gNgV%ZIg&*+_A$lU`&|d z<=A?sSgWpPqA{rfx`KMvtU){gn>|(>sF&fu;!)2L3yfwTqT!=^d?!1ew#W4o=2;ow zrr#Ri&ZG*^szqflsI9!(NJ?Pu+Fj(TQQeIOasc#T34!=LW44}exI$ns6Y=s=)!icTx#5Xg9 z_}*94uLmEr4(0=o2jhR*We(yvoV%jK?0O8Qytx-lFAcgcguW!K_$rc`Q%Cn(X?Tnf zy#Mw~MMjqY)BWI@9EhQba+~){l8ZD|t{;F&mLx8;o6Y=l#+D z;I?CU277kZpjmS8dL5$@0TPZQ=HB1gT)kHj8ML~YbwHXy7^B1)L6Wac9TLzD`#0iP zaNzvW0|UGhNLc|EXtYl+cQLOSZE6#ZI3n`Gum9dc)IKSM$yj}a7r$d5r;u>lx%+@e~w z2heT-LB`a7(xA*=gtuhKI%D~}nzfcE%b#wli<_Y#XLIwVt8L#i1*AXv&(=+x?9>>% z)5TNlyLVEtVHMlG|G|wOuqHC!tuM|%#X9;};APk()CQ8K8oQav&FD&Vk?Ihxp;@|X$;bLia!ArwY3)>~-1m;q_VMYNJA z=yC+fjitlqb}tQvLmyFhLOvlck*E5006RJ|f=#9$P+~qi6gfEm?Gr^dfjJchYSj!} z;c)kKC^td1_O{;~G@1IvTp$VlFRsHEXY5qe`v&P&R%f?Xs$_sG&y!?*;%pK1dFKcW zMcTIZ?2*@BpKI|XeE?byn3>!|)FQGs1Bd6nW{64Tu@0gObM^i3`u6N--}0MldNz!Ii$Q(18`ND|24tN_>kR{8YeJ8=UynE> zs2@6e6&G9b*L2hAKQh&qoVxO4Oiz-Sk(NMm{Oci{N49;i7-SCy!1+D(`2MC_yLn`@ zF9IT9b%3RI_pJQe)>c=(X-DNAS#w?0HznVgm-OK7u7F$V>Zh_(T*jR>xU+k9K4+cF z4<9hq9Tn@O>!p7%q1Pltuk(jt-vJa>Yz;reKzz1Naf?cdp4YVRq0DyXAAqp*rJ=4T zlf6eUZ-A)5G?p_6O{|E$rwPgJ4_86{M0#*-qDQ!hZ2kJdh6<@*VTj6^of5@&&rj>m z&i%qn_8Z9WTwH@k9{=_2L~bLF{x}GSf|OXL1MlD8k(wR@ezi4O7!f0ghWk^M49Q^h)FFaxj#0tqJUwzB$sjfa6ypLsORY7i*iY$iz2t z6dhF-Ub#rdw!(J}<{y$r8kl@d0R;sBn!&zKZvSmiV9^r5%D?YC(@V*74;d*UT{-PG zu<;oO$3=X72oPqu4Svw%x$FEtGJhu8E;~M&(w&zjW%WA{*To>q7H?Ubo$nskl{mel zxoQ>_&Zno1w%ykQ7dKrZfIpEOb3p>xKiuNxlFD~osMetDi2mnWRq?g_XA=(Z9=r2V ziBaQY&Dd`BXxAPrKkxqg_pf)8!H(z#mkKPl#txRU9wuX}O|2QTq@4CySj26nvtOI+ zESIj<^HA`|y`oIa%<+k#u!ucEQ{WpFL8ux7#DYY4_u`mFH27z>tzpxrC|o4(f4B%L z6nGYSJRol6quhtzP$SOF^GV|JE#wK+9%G5j|EM2g^cKq4nwydGMW_GcOpm#_z4~_` zu@Z4y;73vdI_DbSZ5TD91g-4COzLfOdp=apUp1bsGzT?fX zrBGvYk9qM2fS7{Hw0z4>Dyp^y979jvJf3ulm?9a8{3Bu;c>jf>>!cx>j*KO#Jp0x? zCqg1Aniuss!olCNy8>t5T}vd1DE}NyYpTkkm{?Uf(Yv`Vwi=tF-CuIZv9-Nf(y}wI z$ZKhl>A5mtq3P_IIp`yCXiplj{e*@p#F#2KpJT7h{tsql~Qi=KrTCp!V5vkLs8kz*pR)u58ktNY~jOjIEc z_B_(@+do#XivZFHNPoGOO=V0C9Z>n@fZP#r%<-_U?YudJRGK1MZeicm-RTn%I*g40 z>5vi0IFuUNhoAWnnU3WVoprrFhvkr0P|&Teu3Y8ZotjG*xZJv*NU`%P2WkGta*|)P zp>XR5zu`k8oZREHX&((V+C7e^kzrUyGe>Bx6-3!v-AeE=hFFrz9QYzFH-fYrdF``5 z*jUOgbMPbyjwl(cesERy-G{^tdZ|s3M{hEzsi{Rq<}avnP1kNQs*2HTX8K>8w4jyc zWd4R)sL^eUgr-R;F}`$Z#8KT)&2c3ISG2d4pvZ%-C8SOs_D^G zBD>Z2(l|Ny_TAj{bZRv>3=c|V6@5nnsbnTSeNsBv$%j9}vl+%+duPy%>6e8&eTZ>t zU_n7FS{;Tyk)&zk?z1StM>$Wc8X6SLFv;#-5t9uLCn5;-mb(weurB(cmTrf7H;%kL zz4$JoVeHfgMRoAIRm`HLnLW3i#_nMx3hX77)JGLB)~nn@%0{o-p%(U?MCxr;wJF@bWS*|oedUzg6A?slpn{-o&{F2n zv%+#^avb2tMo8tofB!zb|9d~5!N?4=1*#8TG?#)2an-merp`#$8WEqV3c_i_8Y-63il*KjF}X$8GqPi7$++m zi?T&oryf!jeThl(n*N8WAlzNy>+1fHYX(#`Z3|_uSuERMna2b$rS+R&?CH}5bHBUK zM5N@ck4lH-`_Uq(}qT4pxuJ0_?t<6kL zNjbTyN6VET|kDbie3`)O1XBes)hc&E2`l`YBs3Z0nV$;Csclkg9bG%z@k--OoFY?rydifP zKYC4M%3+G-2oEsy6P2KBIL`fmp8NdvjPJ*OsPvxS#tv<#V=x^xZ6l`&8Gh?Wi1iBU z6PPW2WL7Ep0AxNW3}xBLNPuND^Dw}G|6EI{H!MCE`&pq}y*c~gNRg)$hVtJ8t*0%>9is9s9qykm&|s@rLcFQ;vEaR7cKF%cKiIEuj%E0(9`SfeEp_YN-3GtY6KX^*OAq&PIruC z`jnyWCM(KzKBYopsbsg916su5aDdS0byy9_6aDc>2<@Yf)&KlcP@wX+uYl6_M^sq1 zGHd(;rnnao^C*!xMWk)lNsTE~En52evME#*G0ar>PAy48iu})GBFDacdjqFq&|xcX z%djJX85`rS-&Ko7`$YZE140=wcSJaFfMl=k9_Bp1@<`0Y%MUf}dycM^xP`GX9yb?J zM*62{_$Ib}lAoC%{3RdqiHmdR=H+$ftK|iG6ATmU$iKm4lwo8Yx1|6Y$M2KH^#>Lg zdnp!~#Dik{Q2?G11EXigntPHJHd)NAI@M=km8I+(=G?Pd5ZYoXFizWk6^&_h{dR* zYduFaj5=*+gl@kdgtuaUP=!jL+%nj4lu}%XR!3@`Fs9K}Vp&^nH~52<-xvIHL>?ky z!zI3_KFzQCN%n-X8G*U{Zo^9rlMF`au;IOl>D_%WmBoMT&av&6w7fl5BywJ|ahiKJ z6V4bZ!$e1yq(K-HsUb!$bSEW>@?~bX5zY;XnQdQp9O?j6Z&x(4Ck^kf>I1`$^r~q8 z!DkX`adW)b$(VuyoU^~aYyo+nUPy=TV}cTKiY|vXOde(o)$DO%0kVuiY+DeX>Qg;d zQY<<~R3*7-pTsORsHLtMr<%u2#W1%yB#KHg9JvkLuJw*!UW19<`X&tzA()yQ3Muce zB3yM8_6IAuo3vHd8SJ+-OO*1zN{cUMdQCn{_~tFGorq^ArXoHtmuq=(F*Gz}@@mT{ zIp#y-!%>u}=AMTBINLAo7C4UUl}gr^y)0#Wqt5w>n}n-)oX%I> za&FlmNmpY}e1dIp`>@PBnf`Ok6g5N25JR9@xSvpfKGH0F`@Q6q`OJKNv&lw^+wjr} zL{+Q@=<#FPNGcwpvCEKANt&75HU@VC-3wG{jU;m%e^X-nTN7`Q7` zrd)q}`4`8GTjicEGA4QmH&LD?y#*)W2GdWc`V@wfIEItR{_R*}k-!p)-_LJfziex3 z`+%5~bPk{y&!DkT;hO-fe4B$K+>HADFow1ofl=>?*sNwK&EWm7@gCc2g}2#yGM@97Qe3_=IK6I6AH)G#FB7M0FY0Stq z=CXDR(A@qyN^)IQzlzcNhKR|~j}ukIYy0eY&yj+25Tv#r4X)P47Y-mR+4X{{D~Q^8 zg*TIP!$Fb&X;A*nGY@u_wEjf$`6OKLp1vm(CUNOsiZQMdxNMV>(cgIV%^C3%%No2S z4M@6MX34zYylu;YXip)1Wi@(Q@($a#eu> z?~GyG7ESIye*AL5L=kRTW>Je3vF_+^47?2mRUZaVZ=)cwX~iQ?l1C9^ZUM5 z=@<%M9g$N>U&rdXjX%Pg+x0dcIt63I{uO!c6(CrlYFY8jyHW6mc49LsF?QSz=q)Rw!$K zpRB}Q6P$X$MuneUT3X6>nz1xD7XNQ;X6fmv4IdKkVvP#awO8SstQwDY80WC~3JQMJ zU1iD?MZ`j8Nz*@}wc2#yCyF|YcWNEm5L({$RtT0*+0-DNa~fxU^euv`w-h3uqVmTQ0oKClQ>(XAHJSH*e!I6D zB5BHqDWI%dICQZMf}X*h$?6TOhD1}Z^b+#lYYp@$R_M9Y#%5r0+h-|kMzAdiF>+}7 zn1jYa=QDk^!7G^OPniE0Zc_TZ`5kRBG54I9nIiX}XU zR*r>aJ=|~HwdvORc1KHa`$9~NVqqw7!)2JD7o6^Ue%m(dxF*)!z18XCx|slSRQP`x zX$y)s;efNDMgn=?vh+-qA9kv?-4gzi*c*S(ZFwh$SToW8ZU34a9-t#r*IltSHu;fc zL?ejEc{8yGb%aj!>0g$eP&dVaZd15CHCOvs-|OV0Uab7{z#OanP+De?kdQ! za;vr^bI@`mYZ-(c`0WldCYXqfixo&r>|uEh7pA#aNQv)({7BUt`r`qU?(%sKT5M=I zpgHeR9|_YRmm`n}KHcZllQB4B?65+gU`UsnKTMMT75UkHq*=jL-Dc{HhkHn&x%p}_ zK_^pNO%bzhz1rZ}$EqEjd!aeKe;TO2@2oPOr^)xR^CW^aWu>)~LcLn=xm zkpN?qgFk;h^Wp#VG@rGm;#}ZPNYQoSEX8|qk#_>@sIXvCIx^!wiPPPZ0DIS78&kdQ zT~;DotRX?V;D@3ak!d`C%IiqmnkODH+(4OV+CTadcOFf71m;Gd#2Tz=LTjR7$U~#D zuO;e-<-W$9bdAM}E?SP=w(bnejqqjhOh z%?&K4OJC6JL5cy6N^-6?5t4u#sNh(-^riSNh+dJ4H({CZ%uJDxLb6iVB~8n1ceP>7 zVo1V^Jqb*ecxdyRf5r_jN6NJ}t(|#+t+^K z-M8Wa;iJOe=8jXp_$*mJPd{Z1GVsYNox<&I9oQf`PhoC5a&CEAZd*dinGv;ts>8ok zkWVZhk^f>BsO$S$uc9z#J^HuZM?!K(BL5ElNc|6!453LCkWfW%i(#U0lyhTbsb(z{ zV&p236br0})9t{xk5!fshOea>Y{#PpF|v@}yKOU!F5HP%VezqucB@*1F%@KUSn^(3 zQrpD(F2I;73`#xX@n^Pt)9daJherB^&*=$5g~%CJI$D6H4hN~+?d}=Al`R`$40L=J zmo?2F?>P=Il{%Biu%ilS5XoJ}JqY4-~84NsxpHUj@^OFl^9FVgwO+ zY}c>i5>g1e)*tESM-xCz{<2ey;th*$LYRR`>Pk`&QK^jQf^w5o`qK|l{Sh7cmn;y@}jmGSf#_$+_F8@2xfV zQqb8vH9SF2?Dl)LVt9Ew!hgQ$?MYH;G3j0%KaW`KsOrU)@VWDwJ(o+A3P(@Avod1N zx?7gNj~^*{H4#O6!wr&7`fU zu=%|*1oh`rG0GTVm*X1|j|JRDFrm7?<}ghG?bdzuQPQ$S3*3~g8J0yyFdYUOyb!mC z118z(kwpJdGLeZ$g3--_e?#InVPHO2gnFPDO!eZv4$zQ8s+I^rUa z?3^ts_~ma~r4q9KyunTisHBEzDtU7ns@hN(gZXx@i=#T_bxKzYtW>;xd7q!>0+(u6 z@fI`Wz7%!D>FeF!{Ym+SR#ahSVn_Il-D%m=#aPxyJg%s5mLXOy-D`S_=g2@|JOT8! zAV;sElxkhw*Z(*n54q`0KL0W!(ugKz!~5&%Za4swlWwTKWc^VP(RIcS^Z+wtfXfE6 zm5BJ(Y;}}zvcUuO(V&6?Lc58InBce2-L3iO zokPz5B`((L0a4%=z^EN0#Zx6bm9g&K#yDP?R!1?n- zCH_g}I>xS@@`%b0#{}E%3yIkFQwjg6NI`)wwP@2In0{qYcO4B?yyAFdm#r`=X%Xu< z)0*}{?(zbsHg?zU39gu-JOQV|?{~We`rA$w&gLpD2oJYs@LJ{wTjmT|e%xZC;y6l! z*CbN~d=|(~yy$C>;)8u8Vpp^L>Ff0dP6mpB@%MKTQrzy5L2b0Lj_f#&Cy&9WV>B(& z9`!*nDQug$X1cU6w}O=GFL=0_B=#CzPLw6(z7G0U6zr>Tzj4Mx0uU!`vESWqFAckC z9}pr&_v{5XyPVl4hXT~{nFmE4zPtTwlbNYU@y&O)DPzDXyT2z_h~3Sd#+dgiy4FCC z?EVDI?)Xbk^HgQm|6UA9s~EtYs^~+iGNNHGoVB6n(i;V+i#o)35*_zFyFzCkbK{kV zb#GMi(wN@CS|7+v%2pZfR3U*LVzcCX>M;(%XHCiAc&x=$vrbj7awGohP+Qwen6?;T zd-3EKc<}~2(NYo0h*;8d$~OPXnr*4`3``x-d~h_<$<(fsxBioBNobWSxcLa<*`FeS zk&$V_`}^6~?hYZJ9;%O;8pyQ4(VT?8pb0?JD(^&IWAgC^?aDms;KO&Wj_X~Jomc!K{-1BV zPMp)#hz0`s6G|SFb-EUrq#ZBvoQs9XKQ>R}`xHK{x$-3i?U<6j^`3Ws#MQUJ?5w%= zX*hXgEl~br+y|F?)|KDbi{*al^e;QjyvXR|Q^NWGLH+XFm1ewFXDA)ulnnkD^}-b_ zhJxMuohL!Se#b|A6e2|Y#9fyXzTt-7t|b&`ho_0cM^6L0K?4Ac#dba`%8~~FnsQM{ z-ynJ+)%)pYhKi}HtLq5=^Zx8qhoW&*wH{HfU-iP$@)4?3#}rqEVh=MB$&T-e5_9G$ zU&>Z#jfge*wMrDgYnke%G*b1E^4ePKah*B>Ab%5Vpo}*VXpI@B4p?qmN|(n+BBjF2 z7-d#hRN(d;5P{bvGFo~zC3`oC?w!_)D%gSoy)P5-Ywszism2U{tv=I*{gV}df~;X$ zi^ertKOxt`71Twd|BJg)Lkk=Bs{lQc9>JG=LXSF1LOrl~929s;*zUrt&-@ecVgVtc zqZ`DdcdaTw@vmbicFp2EWsjWea-ejiEzMB5>!9|RrJVFn#Y~F$=Q!^~-)b%=7;|&mGwjXVF_Yt|hUd{&rxF>7_Sdsd!QGYyVr6LL0WCef*bza(=*`Gin>kl+IMHzpBjZb=7 z`YSup5$|+3GH)xB!no>lFdpT7nY@zvl~m2mnZ_f10J<`6WSb`^j@*qWfaG}T4UQDy zUOgf0#x@fbIm7zT#6I9KW1N?=Ii6gTR7kmeF(aE#&X4!RFNBnztsO0P(O8oJOnv34JM5Az^dR4GHMfX{`BTR|pQ>Lr$X z9z>L69DY6H0ooNvBzKh`^z$k0Jcb=#*NCA53WM%pcncjD;+2B!#R0j@;j*2C39$&ZKIk?O6&%k0j59n{ckS8`+e1yk90bu$%qpxc z#&36FfS~gm1~z1^ie-o2Cm0db#~k}_!PFE*Bz2V!)-zK+La@SKh!S6EvIf{XL+S&1 zCW(Ba*)Q@1D0}5u)9%Es9-V=a2NAy8<@=hH|6nzT7RnI-oncrAyBxrm>VH`PKM|3# zZs(y}ebYTzR*IgQD+}2fYhJNzJ0oSKk`k2#mQr)Cc@GB&c6D1K+qENBAl3EK>sXxa~yRN^c+MozWtM+YgeB$Kd|IyKk)aTsnPBW<$=M2 z1DA)9&nCfL-G68Q1el2j-iU!i@vk4eUwe8Yh6J`s-L@uOjpmwtTSn&Q26@E#)E{bM zd%|yQ51p`mQC^YhNPZEY@mH31ow@t? zXOFJ(cWJZ4`d)Rton~xy=LZhCnQj2W%mC6n0v|GfFc=QA20lUo6HpON7rLPmFEwM( ztspHlzN@V%*A6hKSh9;{s%FoUpuu?AkL~f;X=fQe*l4*%gZsd*0Y+DvMi!lPh(S2x zh^sHe@vV2`r@pSPv% znK)n7fD3IdE$r-)Ye^Brld<_l?b^e>rB8X_VTUF(=i@wl<~aMo+pfYzoH)?5X6#0` z`lB}z?7hF0E4W%h!+1vG6z@p{_YR@xAR45ZqKz`76aC{DyYlW?!u|h z__H3wc;Yh8#>2u23UadFc+r|iWaqxgx_qnUwaTmJ*Gxx(khW2> zS|d-?_)-?bdYj;uD?Q1_*@qirq64CKorP!iXF{UC`TEp5U`mM{Fl8+6T7wQVO5gub z7K0?BczC2V#8ME=@eqpgL?Jr#vR7f*GT+I^r|{ctGW#2LpXojLAMQ1Z9sx_2zMpPV z-DZ#Y_6I!z5N06TD6vM`EQ%86&7GZ|HZ;%epNMH_YNjm?j9Bm47Zw&mY1$9`9hmg; z7vbCx6yZ5pCO{eKIU1*H+UGP`B zGev8jvFr;^)_F99=_!K$If4?asBXcGu>w)0@lJF|CP2lZ?mIc0Hr85$c?0)` zlnf4hLQVj^uL}z@&Z1F!=qSHokzpbRgaQntN*o++-AwAetN#~D5ZH)lM1=)f$Z>p~ zB<6pI13>fFpqpZ+8#+XX90G4oA*Z2X4y=n|l)z;_t278Cww)vJd=%3EG=>66uZILe zLzg}LLbt`C#&p0D>AtHxg*VR^kcRfLY%#v){e+~25e`G9U9Cgj5E19tIu(~iatTY; zAf*X$#${Y!U|{awtSTsjeQT0e(RfWhZtm=xmdVII(VAO$b@(eOflRgal%RC7Ww2)I zK>@9sj{|-pM*b^;0Ol&F(#2Q-6<2Ybw>Ri8&G z&;tNHb3EIsb+29Y!-$HBAwYyh?%g|iye`6|^VaM{745I3V4Ap1>dFr6B%=a%GmI>@ z_otsKy?rMt4(rx~phSp=iHh$lI{XnP5rZ$46&1fVkc2yVHeo0}FEU1_M=cR>Y%dyS z>wor-yzep|s;y)>7@Olg;n2z{J$~h})}{GfU$up6VSxKuW7pWP4|3X=d{Z5#$%2&! zgjxP^{QJeCZ}Kc=xdNt?j0!pB`%iBcwfxh@^b(*jKAy}_$2~C()xm)KD`E?00%cph zxeYWJ_I2BQ655>Gc=I2**F3RX(U4Fiof<|;SbTPoe((`_5irWU40F-Km?1`P z2cR)Uv{J8aZ)=N(7~@tc0)V3nr9zJ2aL_Ch5q2vaZT#1RmjUP@#xMg;ay5495*2wc z1pY;EUErC6gCm)2zJ3|{HV#H>1S5cpFmVOuW51R86tUEe3>COV5k`F2vQH*2f-8e5 z_4#9rA_ZpA=dm050@&jyfLKj#x(eM!sQf=Ge70=ApK3{^vLRw?3Bp~oJ0pjL zLx0zbb4|Lh9JM>qy7D1hjzOsMXZ1$V8a7*|lcQ!@C-HvW+#IkG94`KP0;rVjy6r1! zCvMk0QmEW_M?}PnZ0xVn<=Wa>>(8G*J#s4iTELn!9&nUp+do+fIX+O~xsS8UAwzR>h6m5H6vMSQ`qcM~ zNG|e)jL$5x$aY|obOsHA`CY+lpCjQ*40IVlkd zrX7VM-cG}B_gG(x!}0CER)0GzE^0NJDRb{Ym8hZ;eXAD3gQf^%%^qy1?0QbWqtk@~ zoP&sRiA=7yn!%wXLCU}%a^K^49@UB zpP@fv34#f3P{{^4+xI4lJ>dhXzq&k0g+B&qa*`J3Xx$UR5d#$v#{KexU!=NGC2sWa zyQG9&ML+|M0Zvx(ED;x|wjvI1aH7(}x@(x5hi4JIkvD$*BAP`|Y@Y5~K#>v*)0g`<9zVJNbDQ++A! zL|=Uu4%oxR#f7?+%?QT{ZJvHABafqk%U>~Zx%O*?7$6YPS$)>yIpQ7>xN}!x{3)7h z4sSMML^3=gbKqUUwL^%B-ZB8Q{9CFIV_86sHWS{HSWkeC0I^ud(wEup&PLBuLwsL* z@5aY|7j`TXzx@=>VXaqD0sr5g(>(2gho>}dOP<{^#mloBanW*i=35@WXDG=y-LLPF6Eo!MC1e}TfuSg}n{dh`^8(_rF@a3>0l2yX z+p{D5X9E~!163k;Zs(ye?RHq;i9+4dTtoSs?jKBN-mVr9qd;LRa@6}nZgZ{lsMX2m z|D)@zgR1<#@X`0sCEa}hr9(uzQvpFjKsp4ZySwDjqNFqm5>nC~A|Tz}h?JCc-_7TH zfA`M)!6fQ2(+q0)HLV!yKC1nSYaSF3072~x%7O~%v0{E@>d}eHiJ7J8;F3gxHsj^_t zpcp*=6z94ywn5~JjOE!GEX5^u0u!*kc+Z3he%7;m_R_@gW4ard+g1UqMQlP1n7=ie z2p!D7vT$o{-g_(NMxgc%kq;^V`W_9EYq>)`!L->Lf-q!#KI#vdOlyS5YL4*9=;+5Y zS?PZG!c91W$Fnv~(SBfRGU=vAds8^?GlQs8NYIs;S|H73rkV{r3{4!O zUTXYTreP-UdqLLYjL8y(ANHH$Y;+V=hn*p`fL{R)X{6l#?!azMzdbf;5xR{DprmWq zXhHYYt8trR;U2?xPnfI&R9h~fOcNElCk5Oym<+J*8BAc8@ksh*#X6W*#Rz2nV6de3NFhoXlZEt-a0!wH;nk5-`tWwcW>|L(+1utkIe%_t3$SmB&ndgKARYF zhCWCmLYbaN$yPm+^%ZO9c`#Zboq2I_K?OAxWq#MIO>)bzO7PaV3H2R_!l$~TmzFqr zw3&7Ogg3vndW)tnhyM46pt^7eUm+FyrB@y_Q?geF2T%1Vx57uiQ#tLjDnue$=0d$w z9(|1C*7zsU>U7nsEq+#<+Q&@|T19$-YdYS0a4F zkU-T(jKrf%ln>>(Snz;UH{{v<>t0X$zSFgesphRXbOnE1n_emlVM4ucM_u4j=Lu8* z00u71)BcTgD_xt+|5h|#%h6kjWD`Q4O40_c4_8i*UVNLz6$k&-OL0 z3uw?rw+2TrdcW&y`Hph6M=)js9@JAt6X6V??*)L_GbZ)PmA6m)8q^40ZY?*+)M9E= z_uYE4_-z)#(AP?xM~dDN-C2;o1E?oJab8`NAAHcJ2R|^@#svuO+I(0+)1UjG zrFth33M4DJ#^=^8_2JEFqieJJsmSf5vJ+Lte(8ju>l>7Bs>UC~Qpb-m<&iDJ9 zvQkwHc@&g~6p-w+gMb_&yQgcZ& zK@OYlD=-l7<}87sKy=k$2i&IjcLbhsDnh-SW9Ozk(|bWhR+wYX+h01&L^u%i>-uLB zG0_Nb@zmYUbk|77Jbfn3Mgubh#)>03&{{F05I2g}~;k!BimiVEsvw~8! z7}_AsbHJbDjCfyu3yrMA+i7(Pgf@Yr0R{*68;yZPa2L>jyV9Om%jZ)DY_Q8rkOraO z;ED+%>g3kZ(9s|B@oCn?zU9+Bb!a0*5!#^jF!;`~Hec@&W)2#|G;cc2Kb+7W_*?1# z$JlEjMN3AOFp@?FMvv!?b3fEjHo32IC_I_dWb1XB+9f>_Y(3kZs~t=YM@&X|{{1H_ zn~$G+BwQf{EXe67X=s|e{k`rF#hQ8K03--44}-%o^6S?xh1p5TcP9MG3Rw4`M=jt& zsV8PrF)+YG?_F@x_}_-jG9 znKtnh(s<8HqY3sbzuyM+N*fAN#3jvgL8Hqr2{6x>uX|(%d+c497HfP|`8K2EDi z9=`r)?iF~|?~}FuoY@!~L%>gkgK$+4*!Q|^#Dk~KG|x(M3bg?@Z+I+r`5@-K4Heqb z^SRCc_EVrq1ouPMX@gWrAVRvl%P2kkMj2|rUoCQG1gO`R*1wA;M}2G1sjv7KkCj%W z1?d%N*k@RM0?B^#lQW={wA|h7lXZ&P!&^j>`V1dE|M2GYS?A;mc;Yi`JE}<24|c94 z#D#?~&w>97Gb4|3_*lGjrmOIcw+{M8MU1uv*nETAPD7#_4&N)0G_^$DJvTXqcL%D? z5gnjH9PSOh{Y`oZwFT^Xqb^=fb|3iMoh#H`rO_E_TfN;$>R8%u*sV_4Yt7fXfHo?X)n% zo=Y=rc1sGrRN>LSJF_z2&~|&1db&Jwm(p2zcZj~)R_q_Mcv%|oEx;y!uKaV=UInTc zUOOeV7_|Hg)QXQ#*X}be&`o8^V8k=!@^KKT5+`MU%|cv49CN{RV5cP+hEZazU6C2- z&i0ccaTS9F=ZB|?HE&;d%@r;Q9pv=I?OLDBL8KRXXYdC<~K(e8Aq$g+@W7^x> zbIn15H!w=vq4uHX8&H%3@kGE{NnvJJNlw(~Y)_7fP%63nKNwUMM!ug|$I0;T2c?Tg z7RQHjeW}JNNXkTeR269w|bxq*-a3}YE3N{*K+x-dA4VANm>R1rq{ov{E0@em6zD*4@ z(wPH>SILtTEE9Xo>~anZo@+63erBI6M)=nmTtM8S_t0q1Pf!H}C;u4iQPI#W>>Ahr z3T=2S5umg!T9S9EYOzWWO4YP3@$OpXF1U!Ph>_G@6g--;?flFgK-J>8yn$8rBP5Ax zNcj$z+HWS~DHY2v-rTtw_~;TIMv%l(ege_v_=KURe=C*q0X#S0BU9lMd%o|Kl`SL0iAb30JF~LnT7+0&U56s{csaVw~=DfNH9Smv%qutB2xy!8=KyC@8)C7z(73 zC4QJ2+vskYVh<+8$~0NgJiPC=0LdLET!{pXid>eL#O4`kmQM+K**WzZ{02=M!x!B5q-P)ztg+>oE;6nlI7N_f7m{0eY z;+lQVNPF|tS?`NPL1~6a6rH>4)SS6ssj>kk0V~KC6m);~h+|_2)&1@b`MVjuy^F{# z_rE+qY!1muCum`NvHjGopBup#u;(Tq*^^qZYhF*%i%hByNWR@|%FN8%>#~`Yuj@WJ zlgX?G!ls7Jx?h3p+NJZT#!86{NabjlG$NCuA&x{&+AM4w%#i0z0{xhgk%vmt9-gC9 zTW=`t1N-)(zuhCr3lZi4>oBJ0NjW$;*#X_S=YP}wCV!03euphEpJ&;kxLXqbz>dA| z-!HVYAAAvcWxjiIE&SPuIli~3?|6gIDgpT$Pt{OcZvT1UFzqZvI>uu zhywmp0J_E{`(a$xjlkh%xPN`juCeGFYh{Y?#JS7=&+d<7+mt-t^1scWOL=GXAS6u; zHh*p}XkkBGc^_bSe&X-Y*=CySxv_bIA}U#N;{_g=;D)Vc(<->dLG&&_c_L)_nYyMo zudII>S9A1%eXI;%Lz2McehOB5UzNrCPZ#I_>Tp%@(v5$Bb{&t<<2M;B{MNu!PX@^3 z0IuI?VJbUTC1Q*z==~1lGNp_8kS22(NQ3@P&%)lm^uR-q+K{=+i6}-sYVxZ(D-)p& zrY5jV=t*Rbe`XA5#CmIMhDBDhQe{*l7_!7nxisapvCe2EM9`lwd|-}skLG`_>n^aI ztB@&UC}GdX? z_d%B@_SeJ(yw6hIm|jWA$pOuo_lBN8OZAyK!dsw-wBptI)y1ozfT2kV<5Ud!!$pxh zVkHw)|A)bMYX@q|XGWDR@gG7ZnSxtN^hR3E(2+{1t$y{dalgy0mS5!m{Y^vKU=9ne z3>htU`lmd2ua|;tO?SR~W_WRYXbCkTCH}NDCsRVPjSB6o*2~2}y`G?UWBXY-p(Hjp zzQ#b(aoif9FM2+A7CGT*sSoGnG`CiM4dgGeNS;q8xp?qkf=|JG-{GVOxC6W(Is*_= z77UqSzL+RX8Dm0iC=MW88m2~`E23u9W=mB z{-3R8DOvfd&;3@@2PYXYrL2JaHjO{dpoexh933B?xtduOM;J_#De^YswXBKch1vn{in8*T&(vnWa zth-15W38VcVZi~1jM&vcfotl*YFg!5k4NX!uo{lQ00#wtJdwGscCMd9ZU>^szgw98 zKXpArRBDvSUiALjZViyIZ$6oH+Y`gJ4o`LO#s*01SL?t;x8?(>rLIAqXahZS-KJCmYewa*S9AV6^Yt%i}2(P!lFSHE!{OjN`U4Pm91 znc+XA8-fQsjQShK_nmBX+Uwt4n8~LU?;qU8eFr?39)BfW9r5FD{ZU9zofCtkA7=f? zdO(7i$}iHP$If4vuMvsA8p_ePC?@Qc7=JxEgJMU25lR-j zN|uWSmPBf;^fSF;L)h{Ys(FDi`o6e;?dm9_^Km1j0%}depq4L6nM^n z5f9cjm5bGYte)?N0Uw1%Wr@R{zT}1>kP!H4(iri_RY5poQnt>fo3p&<&DZ_Zryz_Gd`7qUcvJT>KZE3nH{@u5xH&U5 zHT66aXoUQpD!EaBcNyd-q8isT0>@*Js5tNzM3pU1>N#*CT){-0@ zNg#q7%<-T%?D-xINSvUf%k@hnx^?AI^;rm1|68{}&e5%cP6H{ty{B_zI$fRhg8rIu zciIGKRa6p^tUl4WIGhP^zc~!y#TDg#gW|vAV|Cpdb$vcTRuU00H)6j5qfkg+n^$T% z|H<7IaCa7v9Me#WU-1x^T5yFFm8oB+SeHMck+7F z-HrfVk74~aE}vk<@<$A7<)9^_^A`TKyY`#bSE|iikcb0v zMG~PC@N_{t&XB5X3f}cXO>2YBB*qR3a&o|ZXlwB1)~|cXG|L*UCpnJ#4M}jtjgk7V z=YqcuZU!z|;KSJAdm@sUJMjqD)w&G0yj^16ezH&yQt|i7@%G*C=C;T&NAbUV$6JcQ z+2J>^*P==$ea|?$Um3;gsS26IRr*?3bcs4HOK!M&el!LnovSzv?ySFqT3AHIaOEG)?{BAbAMzrpO3S{(BN;n};0aBmEG!Mf|uop)suM*bx=OO3IERY4N0 zPv7^wqO02jJU*}mzc1rq(mRjVX!r@qriulLSW2z~tiNPsoYw+IJK)_+EOq}U>Qn$~ z2}ePW;P&ifqDOd0g3+|j>#12;-GH8mRYubutJk^qh%kchZ_3AKOpYrZVV?%gK?@8p zc-sQ|IHA&_qKy$<_w-VvzW6aX0EWY^@BjTK_odfnHG`x|t!Ir_VzaH0rj>!!SKVn2Efm1XrgSDw#pnzeaGInH=9}RPgZ+5xOI2DEKHWOi z!(*J2OIE3Tqu|1g_)oDv>fRTzuIA`vqAVQHK~TI-Uy|WXdq@wn4Ks3cx^fe*eqDX( z@$C-HZsBA=o(gLsJ3N@9&#v#TiE^a%N)YRU7z#F2kGf zf08m|ib+Xv+zC{JQUOmn$0X65NR z-ofewL$^0qQI>nUIHDCxWrT&o2z! zMAQf8@5L*ryUY`~fCK$gqB6?Q>ymo@7+ZN+m4+!u4D}XX9RGv~Ef=+4^~+e$20SJ% zS@%`nkzF-3D!H2vnR#_3lr@yan7cnY&0Q&_{?*dmCg3#u4rkad#+I4DL@-|x6E&#o zR=X&|X?~~Z=ftZ^`=qPeSJr7PuQy_=*)>F9cc&9R?BhT6YkxL?6yE81)hT?j2d=e z$9=kFJC7>}Ojaxh)7ybTG;o#vv$1jbw{mC|s8T@BL!Ie=X>~vPl5y$ZW1E2PaQ{&* zReh=o7UG~OQoEj9UZ%fqmUGX)fwl;!M(sMfnKfV0oPCGg&%JiQ5#;9a9bpiG7_&{52>Z1-&T}1^z-XtY01!A*F-F2W7{T@#w$4E8)>U7Wn-#O~_ z8~^MnZ5w*jE#}F=4QZn+y*-52_rAMVHIegp5+MqMe(#bhNzPuY&X4A=*0B}%Nig*} zpI!2M@F*Ewk8{q-oQX-vHjcDelAZR|+1n83WVgDnoM;kFCzP_g+0XW$m5tsRa&FKH zl;LMx9xObaZPR}U9%&_bq$W@q+HwKYKGMN_PpG&!Zw{X4WZmhiD8cu+K6x&FHI|Sex`h~X8;7F`{!y^^k~y5 zj;^+xc}0ZcLCMn0?9x?wbT@=8idKsH@%F#Ne_`{AosaqZ6jyVf3=_b}8@rWh(b z#)SMnMf&)dk4wJ}`oR;~%S?y~?4@PKs)FYRfz@^tiil*qk1*61W_h-^u;9<3k9t7M zEhj3=L@)vcO6eTdL&i`Ypqu}}FaXc*l=q%&)aG~EM*=C#Exc27QlnaEp{DZUS^Z>u zvn!a`v!*F()M!ki$k?@=qyHL?V&AZp@76)1aIEn?9q|k>S`ID%@wH}zK$_NI%E-*} zS}X6kt6YVR=4W5V>!SL3eCYD}TU$N5#SH^3G|EngdTmql-CiIwH)F%?+wC%}v-QgG z&7YQF{nbVu(k$uK{ww?}i69BlBXUFcf3=$<*ha=I9C729uR&cEQ$D+f>c50ne){cKTaW=jIE)sR=Ngxvx*x zIxGnb&~{(+%*6Rg$U<$Vz#(K-P9d6&lasT#VG^?gI8HgIh2aCP!=6-y_q?QHVXbfc zRnIi5R1Cs%Gl^MhN?b$Vq@&>FEJIeh9Iud``@+SzsQ<}$_?F+&m5uM;hx=VQcBqV> ztqJ6`^z{8WLLFU2Ixco~wKZFgpZ>iimA+17dVb=8ra@M`fMKFljJ!_ZRniM>~Mf;c{YVN-e2A@_stK zXAi=Fk7uLFI(a|`=~1+53@dZ9je*|ut{XD>yF)a%EZ8qHL_&ym!hh^ghin4A^g%WC z*i$~M)tXWpL+&t)hi7&qQzyLZwnNR|z!u@2G#vhhcdHO#T?u%oU>kUs)hrO_@VVva z&M0ey^8)KRJ_Bl_BnCkhrjpf6NLR^>^g`bAGP|6{<(IwSHmmEa_~w)CSf@wd{?eP} zDo@Zhh!#6kdVzD9PNl~_hqzoQ0YbHn6=Z;mzxgH1;pZRFAI>9=8$wVm0zQEwg%ds(sf*9vd`@V6$R%k#)b8eX$y8#)DfPtpcJ_E3*- zO>T~qFx?LNWAbQgv%2@pgb_LzaD%W>A?nzwJ`6aMhq>NEh0f97P@Al;o}YAP3%PxE9z&@5q#u%4>9b9KdrM~~_?|Jx>Y}cXbunQCX&oVckuSONasKxmC-I=Ja>#( zljo32=zC)yA0Nk=@8%AfzW;t*&D)<%Z%-62iTz12!!l(4;a6$}i5c9iz%YU{AFK`$ zfiG$BQN-3ruFAW3klKR^7DR|`v0WR#Vt=vZA%B3C$0v;uA&q~?Cxd@nMJWA={@Y%O z|9fXN8OB7?gYjPDbiQYwR1b2hR8_BHuD^MQa;)2ajW?y0B*(Y6vXbJXl;hA^ikk(^ z1IU*N|DiTSbRwtmw|7Z>ME$FlRNR0?2*{cPQK`i8g^S<}F_A{8eI0wAp<-E$Ds~0a ze=H>A1#EkVA|}4I>Zd3_8ra)V`AXDH?Sj+Q-A!^PLX9h!p#tPwW*7g~&27zJrJ$5g z7^Bl-qrl>>S14ilxd}<60io^2JmhllccYYn2fk>$Dx&WYo`L;WZ|j$ojH13cZZtYI zkv?BxVGru=T6x0WFIX*?Ha6Iuoa&emvn;9Xljg4aaHN^RC(`@wn(x}-Z^!Q@p3nua z-_HflHr?Lx&yn=8xN=~fpsrtOl^Mo>b`+kdXpWih`~Z(WmJ`sW^C!!UhUQBtaOC9* zQQ<>}kQU3|-cQHLWmsynM)2?3=VfD zmHN;ckp;zH7$_SV!5xTsZkB3pDa9=!i^_|kya1$koqrD;RDAD(Xc{r#B~fQ=3Z)Ft zJg17Foz}&pr)n?kCwgIJ||T1eQnu1__x zF2@awE_$_3Ns0pMhMr<&R~Ab&VM<$mqX||Tdi4A^W28SN%&Yr5QJYH0Z2McsCYkmV zXx5oOj`vW<4qm$YfIyl7N|H)p;eqLJMfzGy%fdcSvVjVXETC7_Xv9 zt2xa7x{KKIQ-P*3{@J*PX2QjV7b2F0EPY&d$z?jE^S3 zK4Mq6jaacygClky5<(WK!0N?Yv3rhHQC-)3tb0^@)*+G#{A}FU?c^ z)K~Y66UF~b{vhw5Q3m_`80$$(O)pkliK9Wle$Db}Z58%UW|xM`tBtQtiyHp@oNrog zGc!H~-klH}bbG8`n@J=k8j5?Q?(4iIIC`nr?nDhTlMMYY%%L8_V<=S!yHGhW!iyY7 ze*Lk24^dqAowT{ORsnNzUtUA2GE0R1Xuy)#r#2DcZ!3taYB_i{6xhY~MBrZzMCg?Jp_2pmZ7Ph~g+9vQlpTAf;e|tiB^vdTSg-L|3gyVQXJ{x93LhwsqFBV)<^) zi&gwKwJ-9mz-OeU+smEFubJN4$7Dvnhab6{4m!TV($U(~3o79Ox4$tTx5Ex-MLmao z_M6ZAckVY-S>hMJRB!*fWL?dRfwx=P5W9Ls9AqhpG+zD?hnn%VwDqyutG{Z`8%?1! zEQ~GYER%=EZam_F<9){Q3=g)$4>d`HzA2ox$=mBf)2Qt<)|T0GcGE=<<xghXO zL+IOhVdco>MAFj6lQ+?n*~BosLZUpyLL^!t-98@SHz||uNlK2IHpxhE6mZ1=wgpi4 zm!cAbFF;#F8infKC}v~aNKMRc471*Eq6$iS9)pH+ClAb=do3!}(gyPFE<96#fAN`$ zii$XnRKJVi)16X&Z+&0ubOp$kY#|XR2%o{D7bTQOw=!WA;mw5>(r&DYmAN4Tozrui zC#1lZkFIWz<*S|pdx*ihqXkdh&X0ZjxSY$s3Yl&f?Yff1lA3bLTYM8xTd^MTHU1No zNJG*xzg@cQ-LI4tC3L}lZ0omv=K0VS(o3 zI0FBhwM{R{A#pA_8ST%@_H>`FVvtJ)Uy&G5waK0mw%hfT?qAQA5CSbbY z)Yf;B+c!@(I2{!`n?L%E&E4ck6Zizu`NH>)Y}uFS^}m~XEAzXwkH3CX#7W$qpZ0Mz z{*3{a1b_4#T5oDfUW$ZY>5F5ShSIF&yF+;HC*cYWmp^u*B#@r|$UgV^$kJxa6WJj` z{PzGC6&)6h?1|se_O1w_(1nJ&(`tN=9V+>wI4a>r5k z>N8*Yt+zs%>gNJ!CB{>QeavlGGKxYY%*tmG< ze-u>D^mFKx*>Oebh@8#X{eZ4GQO4NHz5=S=9`M$hW@O}3tZ~JA`aaCx0#~F5 zlhLP^kA?PF-<$8V?JH}F>$WR1MDG~YlQE07q|V=V$uzW1U_Ur^1g7ebuaQ2>67JJ6 zeUxar<%ha$`lTUwdXijLSUra+RO0?PFqC3gC;6%xJ-s;SXf zAT;uY#N|lbpy+qAI?J(w_0w@%LDT%Q)#F`VkVGM;|e%yYm8m zbB`Bl_R?UW)EK(Ym=Dc^N#6(;EyT+q1>7X(j#hzMC=DNtkLj%LF{B6i;)VPDJS0G5FurU3X1**i;y?J>MYvHRxMW5|n%`Oc*O9&8#K`i2-lz z?d^$SsNig@I+=^y=!Xv|V!5Z#7YKr)rmL$90D4USlkc?jK5->ggv&dqo;q_B@_xHi>+VPp)>5z>`f81ZX zkh>Fut-HR7{f{F59I3-Us)~EXj|4>$sr$K#g4Sih)bnkLCR=sQOG?ni{dz>~NZ775=CtRbPCu3AMl76Gg3SgAx5-*;3+63eZ!AyxgubUsZmm{|I~Q3? zx>;)7M*lUFTK3lI92p{lyP8)bpLQVISnmnzrn^*yF0=iP{Or-GY4~;Vd%{^O?Kxp! z@3Sh3sC&52YdHvAyag=Mb}-=q$OVnw&+j&x&e`h2gVqF72L5oY#46vkY<`NL`Upk{ z#XOaZrMsJ@D*zZ)?&L;s%{Do$bshRAq2!M{ZZFrU`a5(a;X?lyu!MWJg-q z19yNUdg#!|4h2^EuVW|9n{@GL!!tZjo3V*XWUj z&21aPl|`1$-<_7#{VF)k+1>>mutMzHZmT|Fz75t;K2=2wDc)3+ZYP~acTS*Q(h|-? z?M+Yz*Ge9s0$bKOfjakA6vwX|KsD2lE3xk5sqr`ZMjpb7hX?Gl=v+PDJtSj&UwB!P z$cYjLcy;RfE*?!4T}2@n@35tNG`BeQW;yy*KHdo2UB!`FeDxv0Z8Ys+C4wJRH7r-X zuHN*;4y#eewnhXpyLkD}HDr)~N9o<7YfEX2eON9IrC|NOd*E5~7O} zbbI5DI@S-e&KqBy3WVfDda%^}5+SBmu#g+Y3BAf;9Ns-8Av|Ou0A~rDH8DsW5q&fv z45@;aMg-KOSYEyiHHL&yl)Pj7z#sUJ$e}h&li>FPFFV&z9GQ3-GKm8z6s_Jzc;26!ON2=k z8cZ}U8lgaWq-(UpKx=%sdaa(p;u0pPd$Ev%>TBXk>(SCU9bf4aMzJJ5sO35nR=1wG zr2R$zf;w!n3V)-rtE)>VJgeO}fkp;>_4CN)b7+)k7YjK%6ki=vfp%btwsq#K!wJDf zTgtY$Zdkc4VYi^xYMAPE=-zFb-rJ8tSLf?dFN^|FW7qt3GfBDZaIX(8*~xI}nsm%X zmDQBj?48=JS!ZRH1|k%O`sCmObnA8s-%#qu6?(d`Q4a?X4VM$fP}PRCZ-tBu6mi9L zJ7v`~6H~nRWvj8s^NzwLu2%|_xaxoB)7_nS55dkw;hmbw-9h+BQ9nP_I6GKdE0!$} z+^F>YDk@~KDjl-r7Z@5C@Y@F#f^-YOSm96YlE27)a-UuxGrGf=12iQ1`!rY}YXHCF1Q#QxVloOT^TR?qxQh-YYa5BA zH#u2RGph3x$nR$wnFv^n0he3~G(h(HG1FQIeukJhSf`&{1Q{6k2?nqHb z6}4tC<=iCMWtq^!O(AEY`rPq$-B3w!N~;1~lvOFs_aZa^XvC2Ps~1B zS$f7o%mDBOd4!iVZCp2cd+J8L^;9d07YVd~=vfN7+pCzj5WG_!<27dXgVe(_p1`xt z@){iQiWl&L)&z={ag0owhDo)p6^HZaI&Y*npFGhPiOPDEsic1WcLAk^hqQgK@bAUY z9*M<#>tc{<8B2h1inffDXp@m2mlk`yJbL{~d9xvRzeVNjIm%Af6Y}Utvx4``)VNut zrx0;K!GHyOcbmT2$5!ZIxCPZOk&c8VJwLcgSKsq@^v=QM(xTiHKV4d5;}_)TtS2ti zwPgvsPyL3$wWE1rb|9`SP_KXRU?dg<=khAg(XYtQkOT~*T=s) zKq~wF3%We>8C_KjWE^+YNDZUrIPXlhQ-^0#Cb8jPkp;#q6oJM`F zY1^7_!>_Sew6b#+zoAffm-quW-MyymqT*9P;w{PINbmxMM|^%`7DdXH{sf;N@&q+V zba;ZUlx!pjk7A@#1(crVAUl(B^2HqEG!GWRz`rEV%^-1bJ)9A*wP|3Cm1 zS1R@Br`rn@pY_Q(mq$L8CLB3b>jE0`C<_u6$oHIdn=bdxNwFz?Ny~bz=i>5&BkYIo z6rbt20PoYMavK{PG5E(#+lQW9K?k=YuXit#As48|OEmLHjCQC}o-YvJlp&NFW-JuS zQw`yx<9|sQM6B3SfRmvn=b5?x+smJtV#-RA`UuF@#6Z27&(7b<*EgyDZJu8L%~fC+ z`Eg`G0B2;tqDb7$NWZqh$B1Q-?it7K=kDpV`i4^@;b(D42@*>pv@Urr zkNNq#S~fs)@|v}kSRFk=b`>RTz?lr!v~s5R@xQ{_XjiJ^&W?~HAJeF*tlu0`Yt}` z+y1^Q&eM=C!7c2+&P|=KiBb_Q7dd)^+%sduaGV-1y(ezHCwdY0YyO+SzW+P>YyUeT zJn{480DNh2efq-fi+E|n*Bqn4fiLyThHVk?aJgv{SlC^Wpkq9*UZ;{sMj>#;44pAEx7$@GM%g`uV5w`Kc-_Qno zj6V%muz=UT5TUc?wCYL~Vz=zd8UdbRDudfJqvUdEJN zu3M4f zJ2bbLznxq?(D}(jskoxV-M^+}|Agx{@F|V%&NwO2-rL6}o5M(Zz5=nx6{A=ELn<$f z0j?M8PF&liwP&5MPm`jK6}$Fck&||mr(u7{n1_;1CQNk2@m#i2s2KA#QEP&_b?pve z7xAa*DK^6#sN6>XF1u6P@{WYG57*n{Zvf^11sZ))Z)g3!*saU7^Qc6Jm15cW*fm9U zhtvE3pnbnsh%x&T(elJz%yX*EAoTq@+w)N*{;hp;T&i^UVqYWpak_RP?L%1nwc$vB zn2>D+$M?ndQKJ zNn$$U1+aCHz@rGSfY}iRAFMcI ztcL5!-QUUQh0j6xQB$jOA`5hbS=N1gSvo($92U_f2@dvwRFxm{JNH39*vL z_I6%MrIef2*tTTh4-gtOE8GZN69l6)$T_7aj3D=qNe*Vo4g~OUDs>7~rfDW?qG+l5 zd99~tETB_GPG~#;!#4O-%-9wJlV9s7FKMU-zhjlsur$?{ z6yX=J3?IbyV!HZq?}zZ>7H9gaz2dJNuBGOFE_r>&rj$G$4td|O%&kAO+~>Kk522kjI3?sck&sW zsLN0C1gfN?oiZ94MEw>ntnrU~rAl2ALpKTm#%F76yzI*vhI3Y*4;tWRNF2jBxz39w zkQqQP_vjssxs@F%zJV5h)SaSf=8LXsgqj^VPq?7gp|lb8AUtW24%)$uW%)%(BYRGa za?{-d>71H3XPGM$-@eaMvypt}r6MEftDk=)q7Jl>Mx^1Z-#s=aHVTR#)D2<7~k zQ=_$};Rjm+_Vf-Zc*s!QBX;#CKk+oHMPfTqyEDVsRnHj`g${myAj!meNKJM4a~W3} zgUWzC$itzbEBTXfPjM45hp>WsB_{i)jKKFT`nXD(>-(l^93km94}&#SrnE;J*Z%P) zmd=>YDnWIeIa~aYhTLzXe<&eRzz}}!*4)vuMM~oIK%Tl2+^x1=9}N3L?5036OwG#%p1nQ7_f%|FW$Q={ijn>IIfNN70wF7 z&K~t3+k@pCKaiHxy4dLu7j+IgmPTN5J%+6`#gUJ)kbr9{BX_z$``QZCv4q^dSYUW} z(aQLU9O`_j3x8at#SV#tVn(lrz84H+eau8=hDjLpQ=Pgyk(Wh#yf1iUkTvXI@7Z&! zotf8p`Ybe0J{yEu-!y^w?|+kc=h}KjyTCo)7dRt^{|KV){u7zKK0hfL@Q_=+(f^lK z)q%r`V+xqr@0SceRtjolhAN>!z_}D9Ya=mUsTof}b}Z{&o3YnK?nG7L#7}2^xC%+< zhCB?(L9jGXw3)69l85qaGD2N9{&xk^Ew~tiVi`RgJu}cUX6A+mY|{i+EIKc`_vR^V zWs&@G?u*1g{M?#n)l6CoW> z*X+Kl3la%~-9T#epWYu}XnbcPO)HIv!v9bmt!!1U^7%cp0Y{bU`NlgE-Og#)F1C}d z9ZGUWMO{hvtk&{+HS+PD^i|K)(go|HPO7vu7cSF@pNa2j!ISclqvT_N^Yl*n=ixtp zi0RFehb|O*6|v|Wjk7P|cs7H5#oUt7{f=p;DFq{_F%Lj=iAve|nz7?a((5}mKUlva zXI*|cO@g&44fV`1RzFv5w&pG)4vbNu6OM(3wrz;b0M~I4*tK6fTU&2GQM@{OQHn1O zfHIIPN6I=0%+#9)+2QjE$yT3IQtB>7mAGHroOesKys&|p3YNr{-j?I>j=%yl!50po z@77s-lpM>Fuo+7K=QX-iAm85q;2m+~w5|du7qWmlsxdJhxBCUzp9Px4um!TP9eITZ zhv0L>bBFR6Uw}hchaH?99v1e594r)cTvx4N#xP^lP{%CbX4ihIuc0sB?6{<|=r@;^VmsDh}4hhyT`F%dEJb zw$f=fTLv}Y_I)$(=_%lCWSLTK9g)+-27+sf^c^dbP^@0QX6qXx&exq0cs!$l(UyT*uX_E&X|Hj(r1Fm>-I@Hg^Gx?QHL`>mGZo_DeP8=;K7K7SVm zxKfmrNLSgHt=MK#N5kltlsMUq7#qm>G35jcfOS=Yar z@3L#NSHzR?I@k*T+cS8$fjxulROTmX9J4A~>F7<0H~Okwuj@*>+Q6#e7#CFmHXjy^ z*x-j~sySBUi2&Av{vXx@Gs$D1$I%oA!#{4MiVRLjPC^%Wi(tyg@8$jU@7E4>(KZyH zDM3#48?bpGkS-V`3I6A=q_$?TjuVieXD`&(do!YLDb#g0O>T-Nwo#QCo5qw4Oby*8 z>>mO;8;^iMxDJQbu~-WGBI`PZ8XggLHNL$PE!&W3MRs*H3AoXQ`4dAtMO6SUotlyH zlwn@$nQQt52X9V#GVc&f@V+ocgIopazoGa7ffg8RGpraU7I?e_crp(%H|9Ju&tRTJ zGYlbfkSoBM7ftnD!R+&YG4&NdQNY{ROD`bZk_$+;l$5k|Hwc0#-QA!xNQWQ|OLvDf zNDC4o4FXcqCH?()@BL@K8CG#zm|^#Fw%J$xA zle{(V*FX>Z#z4^R5mT2d-@EvZd-NG|r5Af?b5(lP68sEDi6L>fBclV&@BV7brt>-A zJgljDDZIc^jAAaYd)w=4$r(KFa2uRl@W1W z#WA=2^Xcq6e-!4PL2Pg9&-LxlO={UM&qTLi9`ir6Vng4PC`mD6>9M6yTQf!Fit~sJ zv>)#Wv@VUtpTB6+ifwP3Lunh+uuWFjv_`Khq(>bgI@%BteRt^644v~OdgH-DKP1JA z`1LH>V6aN#;O5L?6Kcg7mjYW4CAZ%O3f1vgSg@Qr-5py()k&j41!%&pk%qj#+qLwR zC4aD*t$h8`+RDo7qRixO*I1OSU(MjZatu~U{_vNo03z~$97VudtV6|{zO-cVK4JlA z#8qN= zn_qA#{!O~eTiu=+1!EGaoQayaumGFDz4)&W_cs7-)=VAIw)4okgH(P7MGi~SWK47H z+9psQvH);^URgeX1AM@>HoE7EMEq=OYEE{w3Mh%W%I3gdjGVeqwwes(JgIS-;F=5_ zNK@qbCBKFo;9l~cUg}n8MEHj+esXR|>;+}4`A-8A_BNN$#Beqp^Wv2~g^A%i0 z7NNUtK;Uq`GHq%+Yl+t}ZZ+cgJofHJLERZw(J`FK}qe?2msgPM30)M_WFM#H!yMgQ3uFVzFwCWAstdIH~+W5e+ z_llQ%&av+r4)D^n{k3;B^Om#FPL3SFeYVQpPOys%7G3aT2TGRgi?GA;=RagwAY=#w zHbN}e(GUnNRv`aXRv=M&3YrJ+;G=Vy3gtucioXF3wTv0!hvGt7KWFvwDVWglKbo`n zSp?4yTUTLytplv;0$?tSZtk?kit6tH)i7NFK73)EzHn%?zfrwTh-U9e7s^;P{L=Nb z3<-1F*i=63S1CW7O2A4?+IeNj?NvhROMd<%kKd*?z96@X~jLz3J9M zkb>j-#<_XCfuJkzNE^h2llq3HCZGxE%$h64aOW|@FGlm!KB(u1tMKIyh8w^v*P2^z7}o&4c6np8xF`R zau##$)&uLAss<`iyWfJ?%F4=)EkwIHq`PzvqzWc4ewt72i%xZHSD7 zQRX+;%(BEVZrrNwDu)9s#~KG#gxTK6doOV$F2 zXDVu6QX8CF+u0TLh$`pAZTDX&>dhF61U5{L8YzQIX&8^IP?Tv>lcBb$GWv$r7ZduhbL4hFK~0~e_y zG8HfMGBWg|CLS4RbUwr_H{kAZ~s=i0n7#>E3kS#0)SLpvOlC*LI}ye}r}vx{!R;zNy=7 zS2?Ty@!tl+-KT@;`#F$J(1H8u;CXa?BCyzjI-;&CNLHEq9npsNe*_N^u>Ml)M^eE_ zYWqe3i4q1mR9A3oJL5?~1pwf&$l=H8;y;pem6Q;nTQE|m?*pml6o?R#p1jo8wXbIp zNlB#e`DhcGKZb-la9Qu%8$@QzoAtzKQvADc;^$2pp3VL@6LjrropgCTo#DB0I1r2r z=t=E6zj)BPV#{K9VO%V3B0*)I7+X3w=dz%vzQUZnT*A_+Z(VYGLUCzcbCoKT*yMkRC?w!wl|`?=W!Ex-UM8bnUr28>Bp^ZiU*;slDBHR-9xypmAGn;0<5J2Yv5-SVR%WOlqhmH{ zJFq2dnjgf&7gCIUFLTricZ7e^ISWg5Ns_q{DjJlKj*#fY=?2`40`F*R5;}4PE7C3> zNC7xED^3nGM}ZY=Sg_^u$RSd_Q72_jI7Ol`})6(M9}R?tb^q9J`A+cEF{BR1&;Ql1jmGbPzT)nz_mW1Y~A7jN6v& zV+}NGO8~Y&M1p(0QFMKGPu02NpqIUPco}LMop6#15JXr7>)5C?&ZF`sDix2epK~1$69#vfH_vhF~uC89oD}NFQ z$H#wK^tmZ8gJtiZ<@!i(jz#!k(j7->Y6>=GE`x8lb}{HeL{O-WPb3OyQF|1CsuA|N zGxn!7UGv{3Wg4D&&LPbjtgoWP=}ag?cKy@Vo9_{=NLiHb#_w!?lPKeh|DP71>aZQ_ zsSFB78OoTuO}A_0LM_^ma~0+@n2P}{mB#k*e5c+=}qP1#;6 zps(meff2oFp$w0>>9?8!UW7E*@QPQcxjI)+E6rgPaHxDE@9gOYN<@HYjCUv9iKfn8O;~y9;DUw=#YPu%q zY+i?nfgs)xL3(0RWlftY$)Toj&dHJpV&To)u4LUbhU2}XwqB9s#{YLTKo0%|Wz>xo|+7Q;Ax6-QlG87bPL(veia=B@R+5-|sy zKu-WZtlx6D_5N3h!1eOwc(4ai9$^s;vq{J8`?{9go=&h;{isHG>=}UgiL$E&LRI3v zBm+*NS%A^PJ$8VAClA5QbCwPk?1KVA;(5{}4Ls>n6b~U@uO$}k0+GNHr*l4{v`=(ybC?x^%d5 z(qM6Mf+gJt;hGJye3o6E5Pd27oKN7CtT?-^eErbgNxg?T(sP*IY$9FEWSd3&mhFH2 zr1D_Nb{by0y$*a?_euQy{e7oDC=31-9tqPsuDf{0kn74R>x>0SNxp_N=NQkkAFm#A zmGG{4W|j5z>gjS*3>bi%%6HT=2{?x4Wk^9-PAD5fHEl z{>->>?CA^fgp6yq`C};ooSkZ%h$_~i6Az2#Q3J_}P!D^~L4|shMT(lztrooHMP2*a z2|7I+gbcjx-97=Al%vZRG#YjBt`A$B2BZHD-LP**YIJzh&MqUcr(5D) zrFZhDDWzW@y?y(3R3WMR&Gt902Ls7L$?p%4THz}U6WRV|=MA{HU!<+*00#+-aGfE( zN#1LC>{W*d%1B<)fk2wlgE-XGgh2+^+k196?V;ttf@V-Vo0-yQ(z;2#Oy>B4b?cn{ zH9<>^vNsAbS?x0yuo@mXLT^&*>=*6ogurcI1U<)unKJ1?@b=>3z0C0+Yw&$BT;g^39BaQ_T)Ep1yu!%`2 z&_V(UKR1OwCl6Ax=yGmO>#YBxehDPL)KJRs|jj;l4S0`y%ho96hIK*iCX8mtH+*MD2_v5wWtNcfv{Tg z{tjal;n%^j=K)_Sg4sWv9Z}5>?G`+M@bhH^D?-H65VP#B#xitf+f^!ATRt?=0`*ZZ zaZW@!F?%e^7%j-<1JBf!wW8DW@eZr3C7}{5$;0H!^6c4PaR>su9lAwICPXBg4y8Bl zMJ;a*uBspQt#qU?6t%F&MbMTsH9^LGTmNKJGGO?F6K zC{l_0_C!^I8Ulfo+<;Y7l77BQaX|taG-R&fT}>ut=5_G&IlpT`!&8HW{8Lpcv5c^}<4iU;C|Ccnat+ zz3nUA-AnP3-y^ZhZnLkcS!=Mo$PQdSfyntuC+s`33Hb)u*=?k% z@J*O7NrzsZHj51M1!?KvT&XWV8_;-RwyekQP!$cjQTbB-oFoQj_!MM0jH?C_q61xP zoQJ|}r}Gg9Wkh5&1aj-oG;CBvyUc#b!I9}?{q1*}_g>uhZ!jnOx@otu!)ei((!YG! zGlobpKiV4#Qha}^WC3M*p+I!1DgTuFb^){xpZZHX|1Pc{i#%ENg)G3rt)f@OAX`r# z71WM-bjJ$j(jxMZFlKldkWl|`$c%;jZ^#@2y@DTI_>(?0nLSuAN5Z#kAj!$vy6X0Z zHa&}sH|P)Wc>0Sl6g#EoPTQ*ZO_-tWa`}U1uGg4X;vp5VTyYgHth`o*q}^KrvC7HY z3R02&Ak8lg?VT0w??XLHm+V+xnS9bnKXBM+z5DZ24(P zM6Kc z;x_wN3WUu8F;R4{;)Q}^F;EudLVhCm4Ke#DwuiX_5)O#a_pX712q?+LoJHCAK zIj-oBzV)SU2&}*PI4NqQEsU_K%H6s7Z3X3uiQRyDJHsRcOg4*jCZ8RfPRx7%Xto}^`^>vJ(i_@n6xrcz>kL7Qh zm}pvT21d**;OcgdVQ}1`R(_={Jz(~o|8=)H1fI0l%PCEnHTBpxXlXR(2o$K>`)^Wy zsiGBZeQnp|Zj1v7!y$ITDcNNHE88ER_VtE5V}Lqf@n$Of{)`(k((q?Z5F4!4!ppz0 z6aG=X7Z$Ke{+rPdGPXEy_+ndsuyF3ORFIqGYG$>jl+$fIal z{geB4Zs6XKcmBLK4iKtRf@K^P^GASs^<$8Ilj;+TZ%jy_TO1p7Pk2fq<;ZOfQUCWq z1+h8_Lz+-{>>wgFW}mxadOFJpYg4+iFa#^YoMMc-<=T~fXMH{wIjb>u%ZQ>o*T)^! zeI9!cjuH{BkLtUCwNii}yf*DE-4WA7k97MHUsM7gxBHKTEl*(4MH&B-tkeHEecZFZ zm!2ZJkX3}Q2Hm9$2u46~#BmJ7i>!LF!2KT+47-7JG{H6~fzZH@Ng7hpGSS!Kt{!Bs z+%5NBL&UZ(Z~OqfV0yFhC{#PVbU-{phvt5Qn412m0Azm@EY2F^*jN57qcHi^Eia)+b_w~WcTGIPGZY3P?#F2D1%iFwnAJ9YhU1j$8sa#ibETJK_<+xH?7 zDf>_0LM&wIDFA^G3-w|En~XCtiJ=HQ?YRZ@p`Pl>v;UbaZE}aKDmJ&u6z7XlWJfG! zU45D0%nCaw*x<}B6T4K}+{#J-c3|uuTOFdQdcnVu3+mpUyk0HdRSk=`b3iP#$B9%X zq*h9YU_yJx<=?p}a96D97Nd*h${j59M1j_N0#pW&sYGWB zmiGi!Tq^ZaON^!~`9!^v$R*|Wz*a!ZPI=w;`O19JXWais;V&bH?~o! z<%WLb&4;*S0~uV|j93LgIzQ%lmj0L7-=>`b;>zef-g0gBgcT16B{*%LhKEGb9P zMF{7$GAG}Q5HYRCh+*3F3MhuoRK~*8KCK%wb4M?`vsQf8%A?qynQC;#8nh`X!sY*h zrBg-(^l8VQU*B9CJT@Tz(b<=A=vf9^s4&l_&#yED0-dyVgBds_2wcx)1xhTL;J~OG zcm+U;waEG=ii;j*?2Yw;&>Hp{G2^+)r#Ud0_Prn2}AmM(OLZ z0X^q%+GGSMW)VFDVqE9fL4CKiVx3^?gmiqu{M<8DjqyX$i{SCNn16Tv_RkT~QTr;$z{{5glK zTY(vvR%?;JDd`KBT;^$dBxv<&0{{%Ei*<#=Ku`mo>H`jEFFD~Jk3QtJRkNt`cH0pXM(;IF`0x63al+&) z^PW0Pm{A!HkXyHbB4Di&TtAK5FBB%jm_-aBhUh@-CFMwakg=lpDnBT7X`abxbJIp> z0I+~etE{ijg$^2dMv>}GaGa!A9~Tu*8)`tPCJ5HabBeN12urjufYaQ62>+#OHDu@M z$~6UJw8?fgo7r^Yf+GhIXuwR|?TU;Z(6{tWOx8^crgcQ&FV){mcaQbvs?f!q%e;_zFHEWdpH65_C5?Nbt8P3VP zd@RWFhSxnmbYO50#o*&$bp8`Afz zHuc6XOa;O)e;Q&uW2Nt7>UVEn@H2S{zuRi~T zK~uR>RYV5Eu@82E+!k^K!bmpSGWF@`xj8pzZWsaI+KTldyyy>Vj&t8zXLkwGuhb*~ z^z$^$XL2w1e~)iMIDk&~Fwn7L_>@!u!0|EBiAk>Z^o%9xq5_IoPJHJQlHr)*&x41a zl0eX4<|)cQ&B5BPW-zWExiroEA7pLHsnT>O7&TkG_2o%xIY77w0C)fR<^{EwYB*>j=OW^u*{vDbhrAZ%sWYu z#_X61bE=d`uEVFez*(aXPx>GL|HnYqN2?i(U*84MFFHlBjG`Kua57nE-|Vq=a<=;K z-0|1r_V?Zl;3_~5!!uu1cBlWzhgcroY)i151{0`sl~r{_hJ>KV2xMuopg8#!`kV9s zZTAef(%`yt3o(t}c4m1B14z^6JVVsQ>5S%zWJ!WtshJLYFWlQvnBkn8YeqlOgyCm_ zL*~~aF!RccO^#>yhbq6e?wfB;dYBPnAgqQ)EqmFRFE)$en%|pBk*cF$)x)IzcIt-9 zlxSIVC>b3R?CstXZ2@Jo4RF_E6nGBUsUEh10?8WnqM)$wjef-%A|y$wPZQd3S~()G z1YguR-E$|T4Q5*aJ|+fLvo}Swd+cP~Njm=%#Yz&&pZ$E(%$}`8UxPPS$tao^mzN8m zHsLrYzxZPP+0@E=N5)_T;TFmTDF~`eSg~)xrN?>LEYdb!fB{VC`HZDyGah8mWif#x zjOL_7zNr8Xh4Zp;V8m5**b|_L)hUYlN{UdWsv5~l&V?2&GZvCvl>|G!iix+Ekd41d zYbZBs&|?F=KJZou@87*qCkJ-Mu>(^dvb^&sav=rIyH_o9VAuweHi)E>-nGEqHLeYC z<;=x6gl4+RN;>TF9WKSwXFwsP_e)z=tS?So%>2>`2-D7Q?)YXIt5AT>F4HW$Kms~Y zN!0Zk;4PAZ?m%oF<#}JgeMJVX?2djKv}7{VZ{3g(5)xATb+Eyx16@=1p)>Ea=slgF z>-}Z?uNxh2!;K)()%3&e+Ap2hH!E3%>AKz|qVqpOJB7U|speklxC&p4tFGS`HNCZm zO{vb^f8)q#ET_~he?e}O9;@MZ$$rtUmvz`jpejv+D|eWP=?z{-NhldPxnhN3(|1oF zHL!?q`UuwnAFGS6OZG3x)TnqB84@RZmD%ZMXr0lXYf*jmVzj{-{oxARs^oqvHIU~h zx9J|OmXCkl2b|<&W7e(BHkI0)w=EXD5h$vj*+ng(*2$lGICr>YwI7Bj(~$c>RRqbZ zIW_nQ{$@k|$kfH%t?zX1WFz&o^PW>V{>w3FFFjy78(PtDzDD)?s8EHr;g)=puJiJJ za6xnDKO3q2%(Ag$UE?cN0s?}SX`yOX{X|{8J|x1epPDge-WPkNH)8@Tw~7#uZs`F# zm=6(pCO|d#A1D_M{9A0@pb3c6>X--O-y;PIkk6g>{bxoWdTqy`mLDSxC}eE&s%9i) z1XTxa4x!&}^Q1tASncrDSxF^ zV!~fxlnqVDiH39T#w&#AvxaGV(X|~B3X@jIOuTJcm_m#Mq5)U(PoWy=_fLSL?de@! zdLG`Q*He(P{%>kyaudqb#%a>V!IcD3fJGn$fC&I80JJ~suh;=Ha*)K>`H&6XqZT02 z!x#5@a1Zj>8%g2DYbpfQJQ-#{%oVHPi`18;54q4v-nKv@3_xGPgTcll|iScA*K6{YWu9B{q^VykfL#% zv@@6M=SgkVVRO-mk8yhtC(cJofZQk{`~0yaH26;dNE-kdnSau z*;-yq*(TXj1kUtn8Z77^F8WMW0a63vs2!Pi^gXpE%j;3=F!hZ~q!v#gQ+)*mPsOC+ElHNZ|A*rTWZ zK-2eBpgiaaB#`OnizEVrZtN}g9TK}&fjveZ8Ohxda> zKEXoHeIhNA1^JYAtFeb{EPXnbu*j`KhLghM_FG&?airHU0PhsOXfd^0%v!GiB6*r2 z6vK-NuBwP5_lz;qOnB*;>(4qKe~*tu`*y8p4oM6#n0F>BC(1@RUz7^>SoJOwR#!6( z9cY5fClVUROg}QOf6LeYnygAAQC<3xnDBVJ`(PC6*S8|geA_cEv3ew7dgH6t7Wdyi zI*CQMJoh|H-4!cgUl{q48)k=9;qYh}ax)%qf4a~-8t#$;Wz$`ig9a_xbL#XZ8u}Bf z6eVW(U+utfe1e7bLykUy6NF5RF-*5ZDCuN}*r)>#&I6kZ z4#+(5eKb5s+W!?H97=wQpbz}!{unp2bOaYlW@yN>xlU3~lf!`2)>oSn#Lbxw@aiOf zYLAuw9YJiO4+;99wFddD0~hMd#b4 zS7jp}Vbz$N7bra^GJ~sd&7J^!w)ENW9~xoHHhCF|*XRDCL@tXbe(%BdQQcfLQ1^qw zIJ1x0CrFwW;ma6m+mf-M6q5S*ma+<6NI;_4N1#grh}ZeuDl>w5`4VEcmpbhentH;% z`RLzkUdv&&x<=yLiI*vwmbHX3(C`s|FjdeK#Vz_2*9>uFfb&I`3GG#n1h6wLT=l*RRSxt=2v-H+W96 z*PJg0SwrZ;WqkKxzx^_U+5E&AeQH**2p z6fx>XtDd|k?Rr&GG~q}G5t7bL!$~`O(+>J{2he$28tswdCr#qQ=%1MO44`-C7JULX z<6LW+D|OzqA;==1LYIRYyh#kEY2lIW^=IR(e#;x1oHI71Nhq2jF+nrdA^m1Rp;G39 zAOrep5vnlI1NECRXXJ)5rE=9RQ+k8)wE(yWuxSML?SYu{(=pd1Sb;@e^D`9l#6#n1 zAnYyTsRvGEm8={z7eoC=YJ5>l#zSsEdak_|`K$7K#@l7=RSxy-HKc0ISjQ|C_pblb z0w}ny?sS{Q4@yD2KSM|KcCYTO&4^<%WPC=XE{ts-Xe z8W|$ze(~~?1*YI%ihWDgQpyLRYef;~Kbw~PsTTh9y5(>Azr-tHRI#*tpmL(TNbt&j z%S8&8h(1V)jfpv+3(hF6ud1lPD$}V*a9}Yw%iD_jYx2M-+p)?w)hX6!Srn;|e<`RI zv;O^$;K8v1pJfT|*Y)c|;0tu=W-v*$R-eyKx4}#pDi>4A6%B}Kut?3<*vOx=P?^%> zx|#RPC`IElcf-Wns6BW7mEP9@TQ7GV-29xDy|(VQrl+T8G2n^Dkz?CbEkZjZj zXFCgJHlGZ$4$U{%U={+GNzTf)gI2Qx6i8Pn7CDW_4MJc<1p4h|z=OaYKrjoMcHnW_ z`}?BTZ>ev7-OQBNlS;=26Lw(WX6X0=!9)5JljL=8T6L02M!-?wMCnFdS*x%90Gwx_ z<7?X_yJeJ|SA-w6+eEF)_N_U1DdLE20Wlg{PK;uxNF*7K(sOQ}Y!@RA?Nw`%{On6h z6*6R~j3nJ~Fxbn4vzhgovw^K4$dGREj;&#$P(h8|qD+e=1T3SqVmCcA<^Q7S_M9)n zpk+Kd53i?FUXP7fkOCWQ1&4~}nl(ED!c-c+%QYWbOO_n8<@HQ=5gm&7Q9|meHl$`O z6T&ZndsP2NUJT)l^zDylF(t$-9PEKyKe=e{&CwIV0&OrZZHwluerloz;)f`Wu}_$6 z&vuXR4OqaqXnIhrbm*O=A$_p&agO{z-F1E~ol@{5p8U+ki#-Hj^2izXFMT80fv5pc0 z0qA=a+xhD(9UKM(I*zd>mzRwNYXgm4!c~;S!Dm1nmT=z`HuX961`J}?I|+yk8k0yPb}gwd+0Qw-la0?N}>_5 z)^`TQd_n54UH0I44H?&UtI*Z&NDyX-se;w(kX0>$BTG-L;wle0*)ffh`!fw1Sq)EOQ=`NBvEr@@@F!rW78*!0g_5OwMruwImnwF#)5fVYfH01Zy8O#TdfF{Ok z=J#C#A>c;qDGrNIJPJ01!*D^)01UUh!}AJ*pv{-<6re`wU!6g{AaCZBNs5*NFpmFD z3)*`DyC^z zL`I|M!0nN&A^%&rugnJb#rt2#;_zvU`-AR)03_StNTCJ}PZ5&^3Xw!8b4e6iIMFgkkh_wVQd`15vC19 z@x}M77pIxo!dn^RSH2CwB0H`^*__qH8(6+X>alq7$_*bf6iR~5a{kC^l=wdF8pu@H z>Sk#gisQb-c^J=@3NQ{h2N-*Rw!qR5}x_RUBUF3aabE6 zbEh{XqsGd*IT7Oh@$GYGpB+(%J9xV68!7HtgJ6wx&~S=>UfK`i5+uY$i~7S1XH7xX z3t~YQkv5hc4}3?r^z>)zbLr>mhu!w>l)Ttw!I%l;mMqCVC(b+3%kLj8gfmqhrG3C3 zV%PB}?~ItRj^?!h_d8O62$c0?hT;AWqG>hO=7@~G?mhkLjEGCqI0Rg5Mk7BD!!SI1 zn6i+Op?S%PwJ*KMoiQ)L?IVPA}`Tln_A^m+~hKZ?!gGb{k%$TG<}s8sH@D- zO3{3GWyZ!7%f=fU_Fkt9=EIvwVPZI8Op`<3b(mP_Iga0xp)F$slA~m(XX!Jie_^d5 zA_7&1-x52=)<11VdAe-UGNITTue^(F+au`o{LZUOB?+a#MsseVfWlS1Bmm zCV`34&D)kZMqKFaAtnjOhT3PV_%Mw~a~|}n*|OR~d#Lm~I<;FVdOO*cIUYAMW<&cs z1GHqfh%o1t#g*nIdxqcKLCLx*SE(kM*O;R3{5TkIsXX9j!!E_lvIyeU6{%)^VOe7f z7}v5&v3m0R51T3Sasx_jqu-j^bxd!RNBW~kTF|I{kJMwV?Y(xe(4RzS(56n2j*DPe9O*KbEuWdEUGbkJq{fhl2| zNCrP2S4iU#HW#2+L6ev^CI1n`6i6C}jE5^!;SHg{j2YAW<2`9>mD~qpawRsSIho&^ zD1J~{M$z&uwnI>_L)RM>u>@vTfG<3rjCVsjZl#G~p{}7#_($=m;5g*qAX;kVuq>?L zI6`Cz)%ju48MR2y=)&){`FoJPYL?g^X#wXky&QwWB70Y2d4Yo6$7M z3i^~P%8#F=DFK_`?w$IthM!DvC859(Yt_B&i`bnZ{}&k%Dm2Kj6NM2+>K&Ca5lyh5 znkQ#s^}Y{TuUPobXIXVN>=!5!Z@5e)G*y$^L8TSP#4U_Mu% z&-Ou?GZ`a-Zt0QpP3M*Xpo&}bwtvgc z{cyLPN%*Uv=r3v~$;1Pak$kiyM9q05bL=Y0pP%8O;Oju(1WgI24_dh^#k?Myd5 zN;*0^YP#Sv8^0>wp))YfwHSGq&2Ex50Ow|@oqCp;qJJcTPIx#F+oe|RJ=TW1$vxM? zXF@hYu-IS_7zgArfKL32!u3mlf|`Q*B-ViBHM6_3s8wN-iCKCnS0wv4Mi@AbX6C-| zFD{_oLC_yL#P!WSq=Ut{tx*n*!J@SDIU7ONevo5@0+{Q5H;h)}Dp(PB$w&_J1W#je zKVY46 z@boKtY{>R1ex2YGS+ex~dox1Ewxt*)22C?dHOzB+l4se%SSt%ruf2G-OvG6(ka-+3 zAzSSfF^wKwF6=npeY{?q84c4P*Y%sM3+YEz8tkSGF^J}S5?*93R@QX8rqV3u+A#AM9+Y&IBHkU(e{X1>PJPkv9^G{I%fL0eg7jIN zZk{Q&5k_f^Yer2WG#!(9ZGL~zUue&pPaccd01U6r*zhf*g6RE2#Ba>77OL3+;O}q) zR;!y#)6Cmktvmk8OsqNPz~TLa4D=Qh%8m9*7<@s>wDDbF@ksK~Q*Tn$Z&&lIFM>d} z(XbB`CR4pWx?C7n>BU^-P;9{M+jII><=}1WNvu^VDTCS@%%MGHVpoQpO<@ z`ggAIb8C;BobXIq`J{!Ghu*YOyf=BSaozNk5zjzDC{vb0cN;MYS=2oQcTy6%6TI5j zE8&S_h|D!2E%JojGs^_{vOoi$GZB;hMVhdC6^}4}Y6|TkY6;(>31p9#qcorEpjkW;%5fm5YHg>ZNPlcVq6oOCpn9hxtl!t39iUu@`LBabeVtioY8Ty>j8Ho51bS5j1LM>tK||q z{`K3JFYjYUfA_EGvgx=aNVGNeZ|j#aS;7SD>o!BIaq~HZy@!)I_i%3GjNJQ6eb9y6 zc9Q};J^#r|4tft7*(Pz$PFlpQ<3f>3y_T)0ghBBU^|?sW5)>8v^zx?eWj+WyaQW`> z7C;aZnk8*KSF0V!G(^Drs%`C@{`US`bmt%E7I7(lEemOYA0c~)$h%gPeULMowx>lG zcN7hZV>DKP&7rLBto|C6cC>vTm9~HpGv2Yec2H{byt|IQeLmqRu zOCB?z=2(>$Hqp}Z&UY}NZ}^ur_?w!X8XSIY&Ua=EeD2qK?6~5%sDqeRa{oO7Jd7dK z60l`ah?yA%M<^E1qd_u0as?to?FxV;2_rZ#UMvV~J~)yUD&%4QbvQyJ>$;Kdf9oCG zBTAO{9oxd;e)!U`=g;@TVb9tJgGC4-4eLHA zus#{+9P|xR@L3@wE|6xmCWMZ*Hw$z&5D8p5RXb7iRhH+--0yn<*`KxbmZ^RS0k(Vz z;t1hK+BaU>_(5rKRTA~c3c4R4{&#B4>ppzDw&zVTZCmHXFtOt0Y|hdFr>uUh!ey%U z@)v9US?JDGzhu^iO{{ea5wd6#EtwBuV2jH#fL3H`H17Tx8dOYH8InE$Qd{cp~DM2{sm62IHyfwJ&rt#GDd13r*WR3m94 zUP{+&uNElF;MZVOGA=btQ&W7=XN+Y3E{+=oqQ`4RE5PCqJjzq9#|j+;JKzq2VhF2N zgeRY=gK1XLV{xu@mA6Jfc{B4JT>11nX1E<<1|kTd?DwRktL;2uNTQabErX5&BXg)2 z6a0n34s9SAWKs!8Srnl#a&#E>euK>`?g^bnMhiaovtKe=YV0TaZtlGt-13=eV~&+y zV6xA-oW*wi4n(MM=I7_53lJMAWnX*!b#~y1jGyd8K@7U>QY6R~S{uUt02%dt!oVgbI7xqf(Gyv8z^I*w1mJhQWrCU&!k}i}W5j*a7~`jgo>|%APG7(j^QzC2 zaku3T+>n03L7l|DjJ`?_QWp0Z$w!2oD-bg8GE|Bb_bKaAhA7T`Ewptqv(lJM#Oi{R zr*3KlomfGIW4gMh3>tCV+x(*a)0VBph$CRJvi-tf%tIjr`UPxZYk|ar2H|Kwo2=4M zNUW71#Pw$aXWyezN^*YYOD8`J`@1GFdoe|le?32FNi3@Pk?3A#My9aS^djcbDpBVh z3#`oZv}B<2yYc56I?;EWR_8sdaLtlx$1DLi@{%cI+wI~FF8E~@e|n@1iY{6#InxTE z$jKm`@Oul3m#Eu55x`5HyB$c`PfZ76D4&hUY*8+4S-QLHoO<~TP+AEXG&m_H+iQSADpr`M5aFR zR*nge;>#R9n5<-uqYJzs*;%`Xf59Vzi_X4on>?m!k31$8(Gw2^b3-0-2(k?h2*0;F z{W=cm%zPQ2?OC5lOq@nH>mAU~*lF8FBPLC!@73?9I!EpdBoW9WH`)E_9 zY+n5owT^t#S+nAE4+aTUi(?U4)!dBZ>s^`sTRo?HC=6V*vV6=n2q?AsZ-s(oDQ9J0 zkCg3a=_~K7(3j(%h3_bEqR{Z2xIj2G6t)QOx}L}aOPV&XEZx@M-;w^gf)rBOMDvZQ zw%+7~{{@ZEIzJCPFcfVejgUrF``esxIEA;QOKS7^eOo7K=+N-W+U6nmOg$#~W?{PF z&qL)nLw|zftpiz5sG@#i3K~`)hGu%Y?%6MX?I=^i>PI&E(*k}dVMru{?v``QSI2=r zWN%5IgpO4!9}YemH`f8hLIAku)Rse9ot>Ti%EzbeTY6yaSH!%3Vh^rb_u)I`<80Gr zlXb5*?*lc2x}6roz!-Mw!k?B*?SGlb z=piaBlSaUv?(7QPs&fQ=a$`WEqI{mmj!9Hv`SfNhTE@mfC0lIY`+BE{8nJ7~Yt6mI z=*YWWRzPz{FcJ7(slx~V{5xJRtrW(mqy#~vk|CJ8NQEFSEF`2AA{i@k0tGko6jNS4 z+@8ALT#=<|UB0vdf5Fxk0aVXrBx-*}-;}{NUYRMA_Sb9ZDiH>;j+l*sp-kf(N?UWVE z@r|RFUFyaFXnW=Gl~Cq4&>i`tOoz|6R16%3Wme`Y3|5sWI4SO|=;-LgDa{+YGAR!u@9jdunpIC{Qb)vZ5%={{rxQ`IR>aR zHs}M5fpLw5rH6Y&Kx~9r78ikSIeYr?y&37irt^!b9o~w~rP0U>5oPjA#=(pL1keF> zyNoRzxdxDeJKPWuXymc>``_vuGJ16VFF5svlLEr__M0n9KhMM!%_I~}A*ylMAii+h z)v`bRGuFtx3~oDIbn{D6K>!5!5O%sXeyGLK>3K}cZ#Rz|me2Ca2d+o+(DIF%j;;ss zEp6}#)7};KfRhIoUhLKA!a4NHW;ZT{zxA6^U4j9!q}OkG*h=T(bNu`&oo2tRf%xpv+ zc#lfcw}8EB3_$`N_mNh>R(Qx7w=@&n3CwQNcsStPhLHMd-5Xbi78QipN@_=8ob-9q z)pm8sBJV?+hMNhdFdA7uwXBzPnLA_7#w-!cMjRv=DvSw31^Xz3kkb9C+TiysvZHy6 z?fI7-C!Qj}HdN2pm_I|d0?$)~N(#qs$FG3u0~I&rjyGXZu?Yg#xP~S7Ff2k*ZSh-X zFb!Ar3_Y#5oMP}K{}nUzSVa#vfdvE>G>}eYJWL(e@eCsP?No%ou^jJlaGMTg`n);Z za-!u;Z5vFQ5!d5RVK)Vbn2pmGoNzdsRE&MklSa*y3Us+_?CfbZ=%O5$aMlum zzn^#$Sd;nE`Q?6-Qw;383Q}MV!Rmg>W!^U7sCC~ru~8r(8sp?VP-yf&B!9^I?d^X} z@CNSXX0Q<}ra$CVnxPrnsP{@(n?%NOuP*dM{fkE<`#&ISqMfB&dbe*9F3U0rd4T-1 zz02YrmY$WBBK@MY7(D>E2WX8uy?TWP@E&6={&=}!^pF!bmUVVbNH*TcUEya*R$Bo))fLCDZI5kH zH==$kLKcGK3`ldfjJc88yr$O8|BtA*0LrrK!i66~8hi*T>F$t}ZjcV?2I-b=R6x3< zyOHir32AANM!J!f{x|P;&iQ9x#zAHj?tQPl*0rusLE8Iq6&k((Iv>Q`w0mdblZ2cXaN4Tn2CNLz{0R({I^LzM!er3xIMYw-G zGE|c`Jh7iBm}7A&;}b2zGb98)QpC#Oe?~nOonHroK<>cTY80Rkmg879M00+g`ejeD zq^4}&nH|hGthG6mdg-TXcb~}_Gd=ue`S7bV#*oCZA)vDbajauUt?BY?)Y8@KY<1an zs}-|D<-|wnVe?}%1y5#0MMdt$PcT6u^dL2S1fn*96yW=JrUYUQ?$jnebB%$-JPogM z>t*{o)rWYd$_?@Q)ZfUE)ZwiTFeV`YmjQA;S}b9qD{{dto#kU|No?yqPgzOU9Sh`# zUutCh*Y9i5H}sNU_}vQpUGa0?Z{YK4U|a|KNi~6nOK>D;ZU|`{scwx10tYZR+0cu85M7(%{E>9BrLT;G9^} ze7sp%U{p1oZ)t5^(qJ?2yJfVrrZn!9{K<0FQ7@&#x{_&m7L##9O&n)~v%dA+s-q8a z;Jn)P5N_6I#}V4Hkip}S;y+U4M2&(~PS(6B-=^sTwKD{v>w*Bc_Cye~ZUlh#8wg|_ zGEGFD|DwBg3?}zFqt>=KzUwTq_s(fn{e6$d59|;x3ETr zI7g)jn1mTo;3p4w8Zk2at7^cwCzt z_RA46luXSY-M>*>;T#Sn4dBM1iP(lPsmjjxiU-q}q>|D{HueexV5+#7Fn+KKy<+I_ z!WT>o-FUtWJYH^6{2Ewy6Y+5bjfGU{-Y4!wU+Bjkza zC#R3j@UYeH0X_D26Gmwg2q0;DEFWC@y%JH`837@RKr^zyFw>BH8HTif15s}Yd%uKR z!1-ZiPpEyT4r*-`!)E#1EF+(f zpNMv2Rb)x;srr<%c#2kDE+Omrez_=K5-ja+FKO!;yNx1^Id=bm)@NY~A~ks-Ydjyq z>9SWa-ywll)TW+wQudSjAVN!Ps~m%doE>$w_qF}{`At|Nv9O<>b4F%nX56h3=%zds zKq9Jg!9NRodkk8|^zjJf`nRXw%UfyqY5Kq3EG2u9=)hg zN#^o~x5U7&f*IS&TC9aM1Kio5r#_i8qMm7>J_{b6^9?s}sTi~Xj&}xAG_RF~c zEn&HP81wDs+^4N3=+uq&ct6P-1rdqIi&}UC|C!R^N5jWteb3QqYaSDx=XU&VBHGWT zbHUx3bp*(cWWyaO$A-fwenv$_#U)Iu>cVtAFEQdN(g1ts3*t-bOyNAMn&YO8s5pGd zM$)Y_uPc+vy`PnIalc=Px;V<}Ey1nHgR;|WPZ4UK=fJzN$U`KI3@)!4Y9D-&>whtw z#l*pc3fP6gY0#-|U&5fM88Fm?Qv!y16C81ReRxNV7w+7M0j>@mH|8v{GP?vwl!X2* zRd-~5_21v(672o9zS>Y#pfzA#e@(MwWuLm0p5mL)ylf8-3o{Ilg9U!a29ODCPn3!i?W^UTE?$RPtj@n#`U>vpa+hVQP->T+L{ zuaKypJZ+RBiLs5axY<=z)(OCf+}p!OY_e4aF&2PYbP9sHy;+im$5zM!*Vg;OL7ei8 zz(_m>wzVRi4*n~P`0$*4LkZbE`8O$Tqo5|WF7V;sYTpi?wqC^BTzds{hbvN(YrVzP zEco>L^LcHylAnT-*39YD&Zjv;mgTSL1?!O|mZzWYCur;7?GmoC_UYi1Q2^HXvLtH4 zT;O&$FAKivosxFc#x!5esAJY3sdGUq23dX&)(c*EXs^)qvODQ~hp+d$dGW6pRxPLB z3kDeA=388C2Dt$h2ed;}n&c1bm4un2Cw|whNhSL3T;yR?4cQ*tbL~(GtS_^Yv*{P; z{jGf$W+1T(7f%Vc$=YW?m{(}ia0<3Y2Oq3F#2n+|_!tVN9iO|UN_8hIfN;Ns%`v^q zZu(B#WeQGB=-O|2@E7bT%CgS!li`pR*ci}~J*tBd3iR_V25odFW#sXyTcmZ5hf&(Z ze%1#(J&$wPwFrszzhV=*GwS{9M+>zx@vM`b(1%{&C|YdP{`3~e1o_3W79Nz6+-Dw# zZ)8kq_3rACWEG)Xif00Fl2vQdcNb9HEbNA^M@CnBeUWW$MhsXzu` zd6nohu%}|);8TMfkD#3!WQdZuqaO^s=!mq0=t!V?b@7jLpT=ZA*%g{B#41)0V7|z_ zq64HQc#?SerOJ+a2)#nYnLFB9QKOiZbI3TWhUZm#4}F$&b@L0T%ya%fFZi5?9ZYTt z*w)9Em)YD&_L`iD5TiC6mtE(quI@M&-+3sSt$VND8O#BOk>MWm>cT?mShMw;n~jRCj&mOWbDqtMy(x%DMdzc} z%o`BG{c|Eu8ol`x@|jJ>#q?WJ0YDp-2BW*cSGRyi(b%HSO5$pn@v{1JL;W5nJCNMv zwhr!JNiv~zOu;9H=v^aDy2n9_JM^fRH+J8Ogm6&oK9L3QA&i%*QqeprKzIovrR=B$ zu!5BTbo}{87kK}dpWMZb7@$Z>u=wEI^_PWLB?D|a9D?xKl!FqweuViH1vc~;ut-Xm z?mwB(Vqw5)lo?KhB9<&XJbb{s769#9+S;GScErh`Y`$qjzcQujqo)JOW$RP}7?Nr+ zN`D$9{qQcg>0>!%FB2UN8b*o>04dEzW@ZJJ4K{*+v<7exvqb~1AihXHgxMBB!2_Oy z2R+3R)6J&08H&I#1o*>(J#2E~+4zxL&&KJlV26~F*gP5NG=6QII-1;$L*Nm`L}XYA z@rXeIQO0|!x{ydsS=gUO_=e2!v z6?3d1_VU3?A8B(cJLR3Y%|CDzc*pLa<{TyYd|RAuXE;uHj*m(^#)YajqTx#w$ z7twISk%c(b1@h?(!~CG%VKlsvowul1J<;?vU2ga0neX)X_ge;=`L!tiV=`IyYyeCW z@wp5{>@?@Bz7QK=mH)k60t6yxSLprHkO(m-k!>(poCm^ZI%;$Mz$*zH{vZ_rZ|e_W z#nC{QPa&40%oF%)M4ztryJYmg69cDZ3kXrDR9@a?SS4Zafn(BcFQw~y+2TJW2mn92 zn*ekDJ4ibyV*22OeU2fqO`TS0` zvYtcxrN;3{3P%%2$yi}wQG{q2(bwT`LHkIJaZpV%KaqumYGbOs1p;jVbrV6;oe(Kw z2i`H7)W8CrgQU}A3xsIHbERR&p?t#j8n1_su0UHXB=6(9yEX4QKPJT`lvMUrB;nS7 z=fNSKn|@10UW{!=8C5NdVwzG_o9=s=vS7JDe}Jk_?Z3eRz|Vln9(?27?6Z z!<;V4DseR9b^GH#eRxhzzyk*>DLJI5nkh@@(m-%aprXW{bgMq^zKr^qp2;u@8+1kV zH@yZ^UfsK=)p0gyheTwS^a=&%e@%Ur_2;KSk_y&GP zzE&S5=5_3nS>A$oPB;-I_$0EXiO|qm2$jTY&y_L35$hiV`H6SI8+T+O?`PX~MFfP5 zae7$;co)Yo`e|`cE^|4BM4*HjI8X6Tvi5>;FK)oMi|52_(s7v3CXr0gTZhvX5`G%T zbYl(p1TIJqaC@2LGLpfN&g(Mp=MSR{E2EPo?Op7R1-2YCN7oeYTWGJffhn8%8 zZ>Pl4ggm+nGvs%dd`q^t5$^umZqvEvIvl&w=iIF4_rx1DTr?XO?x7_N3$(V~SD+7X z`V;)R8vp{bd@1rmB1q(*Zh;PzX634(5#|;#m=T$-4#9dR@ipOY`RARtQLSKaQbthU zsV|e2;YKy4xT;!AR1iF6y`?$xuMDSRpp&t^!oXkKapI;cW2RR*Yz~pBA9bV+H&``W~*Vj!} zw4u;-dpoFy-kjUCdG_Y%w(cN0h)CKy`K^ckY3Yg zO39KF0rbwrBeQX3NGk zD=uk!u8dCLs@wWVO+iwy>&Ml0z=4l;Zk38K$G>{>3vqE_;Ss(IiyR)*N+1fxjd-tO zWu!r$`@ODB^paK#dmA5%U-#v%v=S3wNAL#1RXK1U=0qM=356Vs$nYoKBx6Zy!|40RZ>}8pSvqU> ze34#~3i)gRo+{#H{1z{UeItt~!Nk8#a7`hRyQ&^jK`{U1Gorx6`>ODk-&E}(0}go4h_wkq4A1~@4rnSap<)ELG6kyd zD?_BC0R9N@)P=>xb7CIwI)apYK>z1q77~+rtTeEEYsz*Sb_(O?HUgeTjPO&uKk#e^ z6D-gefkngE)6Ik5zXv~Hu76(xZbZN>Gq` zu6wd-z)=jP)z{X}0sl_V_PWYHJ)ak_qRDd(&>zB`t!809CPSkGf4Q5NB#CS~?sU&}N) z&MngPiq%Xyhc+q9mmkVsI2U`2-Q2GB4WDl3{L_ZF+yEw`=Q^Vv{4u^Xd%MG^ur;c} z3z?jjH|maw{DbO|x0LC>EyKq~#BUZwPc?Dt+cn(6_ax#`0+%u{(p5QM;_gV^u)dfL zqnrFLEz8XHKSZkUqn<+;|LB{G2luw5yN8#LrNyUQ$pr<}$3UPoFagA(*9Y{VmuEos zjLs5#E*XLljC#7bcGCGz8a?jm$ znjQ+l!oR{?-xn&PB|lit@XR6kT(uha;-g3fb?8V=}t_+8azZK;))ycMvYuj#u8 z28_u`&sDroiEi_`F)jX(&L?je5XWg9$R{k+wI2cE`f9GmdgrzmN_$0@Y^LC}u_sFKD*XRH6 zad=erbfzzTm6Tq(ovbKKn{N(IK+odW&Tsy1vHGXP%Via8{{7R#>#`eDAZ*mEcWl^k zGwN`qOCVlr>TVpa{ae3@s%8EAPf%}6T^F_?@8M}DEIIX}k7mLsxQNJfdKcNb(s#Q# zkgAQT-=G^A0WMN^Ty|=*j--H&UAZlHcsL#_E_yP^Kx?y6Do~_gL>PT#mmCcExK$e} zDEu@LQYr_o<_Wb?31x}C73RX$4@Y%Vk7<$sb19mLi@7BnLXH?FMy1)+ejS3rL62Bg$8R!LO2vc;8tb$$ORo`I`i^}w8&0O)hKa#DlewhV_ezfWYHX1l(@&VHIzS05 z)y9d#`#OMEn*HAXJ_UfZ;wP>9c+A7yrqh`NKF558{a4-d=buAh%oL4Xk_y~u7SV~% z`ANrLIWc7x?YGDtOLcjphNCni(CERb7hA(|N`c4~7#i5$F5B4ATb^pSh;$j8x9a^R zkA45j75+4X0ooA1{iSB-#sg25;Uy2qL;uM1jvgES$lT1tgv$CfC5k7*+0B@LrrvWa z!p#SA3!aG_#q`dTL+YpnIQshfhEHcno%jB_gaZts@?6weqg?2sR8GA$+45kU-huJ? z--f~bx*GJ#t~)Gw6Bq^6M>W1fg3Sy?urpM5oD4a7Eh&&y(hs=DYVRCv3tx-1eY`t* zhx9o!`4u}CHh2-g5>BeX-Xh$`4N?I<{yFhm$XT)e1+f1J9c95F91Yg+VrMi?T#Bfd z60CG8x7e_5sT}co)b*7wS$^4mpxy3&DfJNvus9(iZ==AJuB?!wj&9$WhU}$5o=YY* z|1^g+(2?^lUo%s0R>uA?f{=1!~JtSeuP;~#WQ9e{~{LVHME{2FB zNT9OgmSJ+wW#Z#ae(`7;4+r_no(4AZXs&o{rr((j4Gq6RPT2I*0jTWmewU?&N3Dl7 z!;fHsma7+*Fn6bXr<oD-wutivS=?H%D9HoO!3l%o~hK( zW}m>tzef6IzTfFDe+{?(Z#ixiI^pdEBiQ)(Wl)7Hz31yoLkJ%5E%g-dGPFV#d%Cnk|Gm8zu2^jMu2l6$q1wo8@m&EE+1>Q55FX+hFG;Y z(F3t+{y z|Ckm6GpmDq6|X{IGgH~#`~^w6jv(X}7rhX40LGr4&Fe-w%oVi2^NzG$`=f?we=>gQ z>(*wb84eQ9cDF@`S~kWPDa1jMUgHZ-<{`^yoD#hfIGAZ}P(U_}3iZ@@nM-C4-QSBx zIHC*r<7qIz{UX4EtoJpj_p%|m$@-86$gWg1Wq2<=gdIW)Lj-R9#H*x`#S`^><^$Nj z$pDEO-^V{v6_ejS1A2vo>+BHdIwNornWhMgs6rC2&DH_z;&Uo zqdT}CimoZ0AA?dGzJXg{DXlI@K;-CT6ad9Z!wGd81zrXIl}+nmo%f)8&R<~95}*Zp za@NO4uh$H-+%3PF5PCDdnDg>={j{KK|lBPNrKq6`^#6FdZ%VI zYt^KuJJB|@JvdHI@CJ?YBBI4BIJ)L7rvsQno%ZGY??#zuFY9R8rl)bhd_oVoxVVT%awGA-cj9~d z_8kThuL!kMj6Bh~B0IGmW z0tmx^L;HS8_qS)4%3J7){BISi=i_37%5UHt37w}qFtqE=^lGZ*3U9JtE(`~`r%jH3 z#3q;=>C20H{>0#>EC`de8?uWkIa7wQchlL6D|Q#cqO&tk(1mRXz93&gMipZS-|c=w z_UM|x$XKI&zldzaI(9we^1e9#>@IxwL9dAm^Y~Roy>szP;fmzp{%Oqn()B0<$ft3af{gX=%fvhdw ziuSWq^&A&_Iv)H?1UA3@>0)9~900Z{+ydmEDIxvlR7HlsR)qPN-U|^Y13^WLYDejR zTU-p|6O=T{;<~#vL=G#4kEpd>AKUaoKVHwF_!XTTxQ3Q~@&Oh=-gjU|$sD2%isCMs znwEBNwy7;V*gtuCJPGZ77XT6SDg#Rd6*GnkP?W@7mU2X51Ed*A@=j;GvIe4GUf%c! zmsSe}gSI1g1ymIXN=A@pZ0|D@Lfg>{FaX`fcbVcB-wDHs2H@0^{SS)cT(1p3h)hq zRB6m6ji|7y)pZdq7wQa$rbn^Pp+xR9?>i2H)_zk)db@~Pt}ffu8%ZtMC-2;-;1q={ zz!`xJ6_aEO)y>B(cPEGv+`pQsJ)avOGuL`(hf|P0n@OsK8Ial>EAr};3@_bGoYP!Y(`LaH>jnB+**^wy*i-K7+53qnKq3~GV zV521QX)J*5Uv)aTD?WT`(`@h{fyn0I+QDG+x0E4R(BO`VKENN1f$&X4k*ii@!K9qp`^0FKf1DiA#A}zk-vcv)e6g zwg>#_0r&3Zcjq?N`*uJ zG}zW;+iPFJP*W%2{mM`fO%%tt)B`49=Y~yoicUeC_b0Yjx|YPBNtSAo!O=e4&jb?$ zmLfP8G8L^yP@+Ht2-@!n;$qC_Wu#UH_&HdBjB=lR`*iuyfXe?K1?%a)C(MjEjhnE- zA8e39I3Su94ig{OQL?vmNRAsRVwvJF7sY25#tQ+3C+P<7v90R_ z2cd$SlMvmbrc)+z0_zp8P3nts8rfnhq3@C*<2d6$Jia6pzuH;)Wi0u%!bWJHNkx)@ zmsRxa`MZv#BeznFrIf&x^K7qWt5CNKHlT6<9q4;+W*fmgn^#>451Fc7XFp#%=oKp? z0zT7)M4XIRs`lKAwPZlftC~EL!g=+sI1J*{u=+1%6}R?5EN2|@emz_&uOXz8$b_N% zhcexJ3SQ(Ov4)RWu&JzE>2$Mu=s-G?YsBA)-SrDdAY4^g9{w*EKnOyayKA9t()Fo- zGPW}@5f@X3@QTtG`d*T&D_tx6JqXUSmo_wu6!W0u)GzT z6PYsC__15OUJP1WLCW8JspYHmzV_r2C#oW7rzGyfn^54i1zgnKOdo-hmnfju>pA{<+ zfFaVa&*HRrPqzL(O5s;1G?III2?OR#r55QPjYOKxYBe%BfNQ|%ko<;~U#Dwx znvw9s=)L(2&?tkj*deE-32;7G!RAr0S$0|~Kkh;Nw#}Esg*11(SkWTR1ifp>Jw9C6`Vne;Z@VvpIjAC&3 zBiKk(A*YAFN&0k3DjpbPa~Yq2aLAOd{@qK_%gd|E`}(*9R+b8j*KrL3pn7$?MHIS% zCa?{qEsru~29?9@620GBC3zIc4*_KXP+!(lf7b`2}x52e%yuEeV(#Gb?@YWkU-OFQM>&-hkj!^em*h>LK`QK9l z({gK*Ovk*J-4Qk`W!&bJQFl80ol@W?zPWyZoHQJCb>*hEif0`NJCf0F57x`5Vt0}( zY$<@3nOsHn{S93pBbJ-e9aq@8#}0WfzmCnr`di>%#9@rIQw_kmTqASk=wYRHuxri} z7#k)|`j5WgxyVPGB1r%Z02_k1|)#-qNVg2vVo$ z+7n$4^T(R?Dy|50(GX14MQ3$3c+y5&0HxV)?#Qh(dZS4fxc)+55^{CE{B)rDHkjP= z2Onr-y7yc<2AuTz@J2zryRNGKN$cgTE^WywoRr|dF{P!`F&8W2Bon6zt~Gc5iTRb1 zq81&s+uZ`?u^FBK9}|c3OD+Fv`tcJ1_2ueazy62Kv5tS)gP2_L^Yhwg{r>m;tDyT) zI#nO!S_d~XzzP)*OWRn=pKRQqT|#dxXW!BHIXuidv_FkK z-H-Jvtk8(l`t~?A??RybUC)=3>xw;3*m)bA*iIqyBrTY3>~Ye=sc6XaC&IKbzs-Xn z7Pc^pDg|*UU8K&dZ@6wst%YD)lV^S^h)l+W;+LArASsa8p<7%(1@0TL_X7gTvtiJ3 zfIUfx+mX#?fMxp&TwBLE11(>cyarL9NJZcV?3kJ(jN^ znbQ3)9EvZJIq$y|k-qTH$X;bRE3nwLK#40(%^aIhoDV3dbV20#5s9@gU1js$)6D!9 zMcSEB{So1Tk???f4(%0aNd2eEIO^ulK1-7#A3V#j%hLEb6s=_XCwx`p_JZ|%xG4bd zbrwE@)28-6gXqFFDfFjSQf4fORiEfoxXjN?k+XW~@I-72#dyhUaFg=huqjBl8-boex*i|y z_Cm2~8A2mA7pcKHXnMRoI3EUSV2;4VnD1?|b$p(tUeN(QV26_Wou{oX859%cbpT>G zTNr<@q9}Ame%e^~3*D<2G(baO&N{+F3F6!U7fxco1^4=635p^Zg#)QY6GU+Wic!kS zHfyfRu&|@nj5|JFq%OnwA9ge2yuhx~HdPPPe=osqi0&TfbMLaZPRLwalDy;dc zM@U=}+}#HG7q#wY2@k!NG&hW%tbgoJt3~3lan~jn;U;Gnv1{EZX2UJx!4%+-M1KwS zBn*57`%Nb%RBd_^OPfVerDwm+&<$^-%seUe`BI2*;Zyi@cA`GSS>rXxKbr1~no{cw zfy`ZMezrMJh*!A3^OYHs->q0|-_!7LSSS)syr7uep^1@egIP!EnwdwPJmZFJqqZb)^*i<#;6-=JmO6-P+K=Og z9;N)U?j~Yx#O9iaN{ta|G)1`?ut2t7Dtwj3oMX5ueR?~rUWT+L*tn$lSQ#y3=cSCk z`JB~v`y7ZZtYSHC6DHGUP^oV|u?3*B?j7ML&-L?!4wu(k|3ey;`$+S^s@4S1em`vd zj7&9)A`uJ&{@ZO8!nSGLb}diOBmuQn(Q+CfilhsZL>v==&iBfH_lLZ&u@*@`;t|?> zIj7S|0A6C4@4Wz-?72&e{O!TvdLB zRNv0J_sP}U|MmYEz-{#8uS*YtwkGZ9cyw1q`*s(`oGN86_hrc{#++yKkEogTf9BGY zdz{Zxs$$Xku(j(RM{J3b0QSW+Y35D>Mq|M5VEe$XUOKZ$KkZZ&SyYwmT-$3V2SaHJY>io`Bspe7!=oz0q#&$j_bnXFnq|c-s#8kiukRS za6|7SF0kiSFrMBXO;ZrAk{{?{1MVT!2t_k6Z1@_r7vDBcU`411f>MWmf(nWmt>=;B zlcsV|N4N7YM4k@pdH{QA({w}AppDuQD z!4!+Y_&Fmu8b7wvVg*Y$IGpQQN;WViHp^yh(HVU zskpet116>f^1hLWoI*feIBnN{+JwZ9oPX3%)=lc!B{N8(!eQMS5ejTJ4-MX(sTXa6w?q!m26qh9%3wP{>Q>n3j)G(br^$Rzwl z2L?lF!2ZFe%e-mf;pOH4Vxp4HsRnx`8-TPSaU8>af`C!r`O(M&sdW2GwKJAa!$`oU9qIlzz}Su@M|6{7W7fpHKfCZ{`=4gA@QEz|Igb zJJJn7ApuB48SYu?$GHExs<$+|>{;f{A^?E0TmQ)kFP)+y_nF~~KCxr3bF1VZ==6&< z{IPR%XCwyfRiv@=JUX|kd1NEnv2%qDBUASuIA2sS8W)dZRBg3^P`^UXyL#Q0E8c+7 z`SC>)pSMC}Uj(O)^<}R~b=#M9Ydbs;(Rpqh1zyd!ptc$w-%T49J)I{Y_C~|k)t&!1 zar-mr);Uo!L*#M6cf6~}5qRVuP$*k^`IB>Zt_Nq>=|)`{wSt|Q&=pu`Sfw4PN9sP8Y=Ne>X&y%;r5@!`xms;pFGX#+twm*CJ$qjK zp!-U-J#kR?UH#Xtca+}0nlo$o!8wwd_KKa<)`^eIm90MQ^zISA(%*goyN>q^3I0iH zB;wlxJyjedvHgS~4M7wD%f6nMogGhEL_{}akWrSeu0k(o@7m8KTc!;5dIwr0%?1tq zBaM8&9-~iKV5qKNVtPDuZ7p|qD=dr1!SVTlMd#1xq|PI#P~($$etml}sUBB;KQ>8tj`BaRYwLOLO>IaUm%v? z2&VXY2AWn(l%SRza^C8^@#2-@KO;1}+*7FNwBL^HqJHNSx>o5-yL3JD+y43cG~ zvi;t5JLHBAePpFeK_l?c!>%B5l1UKOwFKun@AKl$zez-jS zP5>SRvv^{`1OtbR#jRxbl4nWJaV|PnFAT+2Z~7PLdgL?TNg^qYEtTphDR8-u)@-sk z=xE)3D&iXNi%KXENyMAT-S8W-BKGDUeMqdjsB|Z9yf3V9J)+r+&D$Qj0SS{739<>i zpFR>H%F<(>vDu=7js{Myyie;~d5vNIN!`gNpE^bq=gAtGlA&OBQatByhNmk8m>7Z7 z%4`eM{l)lD?*T4RW5ZcP6kBMcO8A;|7Iw6bIfoOnzUXSj7-uW^-b~-C`NeGUUJz^n zq+iOyn9(2@auNJ^ziXwJABxk#w*a{tJD8&@Se8>!YJg(kJ>@z~Kt%-$fcW9|=QQib zf#I`P?oU2^bP;*qR|rr5?Vo&JtCkn{}B#x7<8?f{Usn%|B{DH|8IyLBp}FPiQ(MC8ciQ5ky@ zQZuKr9ci-4(((dc(VSaUm29t#QPO%7fB+fB&c9Ug632U>Mbic93qmJ9X|o(!Yrueq z3x=;P006#?+IVmHeg8C5eG>5xN|*PPP;XZ2;)UH_WMA1sd?zcWnB(>cfg@2->aQf# zg$rye5Jcu{Bu4Tx)!b^I>JQT6O9jXqfIyxnuPMlNiMc*qPAN>Gj=x3Z5P+d#D#f%1sv$8%( zc%SS#;F&WnSDQ-DIENB-tCUzYJj^RG3iz64I|h>0|B|ICwx2_`JdgPyYs{ zOP=Nn5RN7hLu@7b^|%ho8a`pFDwE{P(vjyh2R z*vdMP08CV=KkG5F#jy0aSQrEU^(q%#osjMbOdKVXl;FN@H0D(1ggAT$d8S)(X_mM%x5PJLKlZKE|0l(#q-9b}!*ERePd6Yf+o<(FC7yH4s4#SaXkN zrG%ftSKKw~uTlDM-JjL6!K$|Nuf$~ho?2U5F~j@b4};q^7#FltT2s)jcOtL}w`4AS zX9G49l&UIWAL;@T_Ak92;1|7&lI_~N+%r{YOJ{%NMie!T+-R@e+x}^tGc2q#DDI;h zU2_-uyINYds6iy`cP|ThvMBf%2yI#hnXk&)?+Z05CU@e@gtA`pyB*RMdw6*f0`;PY zLWXm$v!b6j`1*go4X+OYliKNGSA~zCUzd9{>2s>207Qh|7xYy2UiFE5__L--oZ4I9 zNgt58U@SB0`k;JH8wmu9SjGuU7fu|M1SAkhKTZ|^(@H#cO77qEpN91i_OBKYen7Zo z&k-O1Q3J9cbDS5tS1gcz&_{iP!V6ZT%%F+zvVMc=)iTuRzzgVD7WmQg7->{Y0gHSi z(2WBMSu)V115YVnNButv*@*n0N~NgoUeomOP+D~hpt<%~4>k8q-==fk-BxJuYTFA( z9Oe=9f2arc=+$*~@k+6SPDH?{)u#g?UwrPIXHSZY!)OL-0JUoT=8Wn3bHzUOrk-qPZ$dIbs3F2 zI60xUCjN>bAy9Hai-iodUch)^>i|UFE4`ZsOI%i;Kd2X{{iH>;wY89dXOtama!X!+ z8|wP)TB4n(8g_XwC+yw#O0&wqTuLAF1r0k6-`F;c4;5jgeRbMyzV`pvUY7V`45Jpk zeg9kChJpqg*eX~*I|zYE7ZGw#1OEw?Y$4Ur%I0H=jfRCfS@u<`H|?Im0A z#AoQmU2@iDo4CDI?A zKBA)eHmIXo&MgQCT-YyYXjPcr@FHa|Mp3{`JWxOLw>Tgbz@6B*HPpl-bG7f>2Q4U+ zJYZbW+mH&2ph~xG1~jNFQWUYGNJr+)M^^?Ab`?%j0su%*V1-`5{zHPtBNDNk6aJq( zOn1L}`FW5=t0fEkkpHB~eGw90sa6!at52Awi^nt;;^uPpKs&exo#$zV+`w zn_qEYLn9zPp(H&U5YVEw|LAgiVFgUF62dv6$9D1nk%O8i4NPT#vSoOR?)mii@ZL@r zgjbZ%H+>KzX}`{TuMxg<^00F9z)I}OhOZ79A%s2QAXW*Ljth?Rig*0nmA8$1@09Rit%uH&;2B;lxJie^%A`)mAl5|QAtRTkxg%58~*%LG>JvyYw z^$daQHnybJ7mdtHARQb!Z`*8Pd`oo+@rd2=oFa-FS~4`|H>gAlo(B?NO}>m@uxl55 z``?Wvdh>$4r|h0FQ^4{AhzeNDK_Gzl*K=L*&|(_XD-dZZH`GXL$B{M)3jqo*p^}n# zGXR?&{Rk9@;8M6McYfuJ_crWRzgLDr{BWtLU;!?~b{}EZ{k?Yi0OB$H{H)xr=}VRL zNSHoYjP`Cs^@*>C&VBI;ubQ`mpF|t(k0XxHxVM4sCMqQ*<@&($ZE1`~DIq&SHfw># zyJaw*^#KOE979HW`ZRGr$S1tOW&Q&ol#f4@V^I@R!vm5_{HJO7d2_z==h6!pqq@8#TCmExY%}C4 z*@!Y0I&5Q~#j@UmKl+^ddnU?>8xC{}SyFP{|Bp7{62`2Vhs*qTxP zm*gv$&og-Pm7J_A6FVzQt=U4Q7g?Bk%PaamZaCn3aL)mS=U@*vX+O%D`Z?KA}=CqxUw`f2(H5HN=^47yF&`a2S4sQvD^M+7=S2TZDzKG+EG~u&=x(FTh-F zvM`6w&HkFK-AG&MxdR{Y1Nc)fAv)n&@Z>A5fWKIW zs#&pkO|mrd-f65XV9HNVzhBY#WTwr@!;@EBJosl|L~UPRr_k>=!>)FO2}vWee7?ub zY8%9nR6+$_Om)8MHQhk+ipW{e&kel{O;uL@l~aPwpbjE2vX26A@o`_tEg^!+nm%(VXs zWHlawm6_y~ex1^1v7GoXATdEMLv<=0AM$z3hrzUdw@U_ra3jTrFb&@)`|Oa$^JhQV zy`>Ceh3dR(-Rx`d-b-`^**s=yyZ5D_3{yr~WO{Bp;KMQ3zG=`&P*(5%4Z0Vdy2I&W zwSejfA$d*Bq$;n={jebt0)mN=mOS2uZJ{B%_v|SR{n+p@fP78Gu<@dA*^y}OWi^Kk z5NV0kxl#c9T%=M1(HypmJS4?UiruA2i}%g=B3w0g86s-!x`fJ@$O%$_=;#BHSj{Bi zCU1p|W4F_iDfES4-Y-e5fp6}dk+8d(NM0=k!ejPJ9L|vECyycZu54=RvNugoOJ>-Q zIl!hh(*yJ?2wPVO@B%F-IXP59Lc*}CV)M1cd_^IKL;A>;FVz}mDs(uQjUk?NKi?HiC8wr}fwqLz z#@1E>e81LgMhiA-=4yi0?dslnp_QuXgz| zqFB`zjPQcXTF{Y;uVUoVwKFo;mhDpQx`=E15GYE}=IEAUk79ggmBismxL7fltE;O# z%{R>Wd9x*R)Gdkhnh@y@4Uj~h75TN{gdQhM<$KK6n(0&678JrCxpS>=D5FM|NPMsJadE+6a~lg{)@^vNXUKKfdgfZu-mb6J?&bQU8ldpu5)?TO za+=zRf5JiY;s4!87^MIbwc%C8CMUlw6hcArrWY?4N*mUVQ4R-w8}TQOuWFCc;C|DZ zUCc)WMb+6u#!RxWg&@Xg#55b}Fft|?BvBjO;%(A3Yx&YxH;0tlBrqOlQH913!MOHB zCX`mpuI)%~^KPY;35h5=w15vqGa!d2&6w`+2U*x}h-H`uT01|(tH0A9g&CubgB+z% z6V1T=0i4v?xwwMB$`A-wB2;Y)#s4oCAUe8d=bRiLZGoJ#B24E4XD(B=tDOv%iwa%% ztHyB8C8E{@;6QSJrye|gU7C>G^fYZRhxp%X-9y(vO&B3T(W?*>S5qyag71K3(x!qa zMnU*4a0VMCD1LR67~)43Zkb+|G$|M^ds#)=N8lFF?OkpTzrI;OH`*nhgAsIGog|T zB|y0{<9_c@N=+07TN$|@`f=KF{tba(r|O*8lNSnaxSb)9k=)QupuVdjijibzp+Ag`$Zh^#lMDPr32(z9D?CQgeR4=mY=oJZ_x&-nwody1S*j9;6hI?v|EDIwS=om6k?2 zMH-~LrIBusZt0XRsk=GneZPC}_s2Qo9EXlG4*R#)UTe;|<^%yB!lq!IsRN8pXC!ch zA7$_k6}clxSb0kinelPo|8!{m3WJyLsMo9svL(cN1Vlg0?3p)nhwHn^wT>{D*3b)J ztqO}pEx~TQP8Pv+1;4d-lSlD|Dyeu zGNg^1$NBkYAn)eY)V$%NHc{CPX7!)2nb(O@FMvhJY!^qwA8sxC=9J_wmVMKx-84}3 z53a7(JTx!ig{4d~0k~@wY6rYu)7{rl#OM)mfvcBUL&yBml7ZPx0(iw2RvxBzs3$pX zXplSSp5jPEIGj~brGnNQG(*yK2k>`LLSQMlENH`YoXv1T6k)S{EoZBof-r$^qaGb! z479m~ZSX04NPDCUu9Eu)Tssz>UJW>EQdf>-8%~6}2naOEwr2x5(V2A+~8ce>)si_jq&QA#!WTC}`0f)I*J8|eOuK2ub z4B76}( za(u=8GrAYc-uSu(a1(Td);VU8fDFJ46j#L$Yp{o}c!iIOL%kMTg&f1_7bfobN;qRw zKtv?NoVW)r`s`jtw<&SkqmRezbj6w)&SL`&HGkx?*WYfFMxoSD`;@+&wQJL)X$AadHDvQ@x zq6*-vaR%x*K{%yvt2RPNJUfYZzKGNFb%;8+%=y@gR!!{v`{F7)lDR>$slt;Z-^rC! zdw|PH0nJ@GLOdSm5#nxw9K=bAxN;&p2ARq|+yo3O7bYPzZ&gid_>+^Ix9BLoI6q^0C7x)dQC!BKB7>W2_)V(Hi3lLHne7E4sQ z@eq*Riy@{EWFB7O!6gTnG5iAvB$3@P@^vcD@4gsXseN!Eb5lZ*Ab?5@03|&S;`!y={aaOPN4XEf4^k|$mJ9rW`OLar4WnbG)RzgCX-3zLwkB% zndfW*D~+&t8o>Qr1-+NlA=A~y%Oj}3pen@+PX9MY?bZTP^4+S#m2$gAT(E;x$p1|C z01E_II}nz+-yYTj7jYoWvG6piB1>?e&`TKZdwm7gRaHv-Q9l-e6ey7OVB+e^(>4TN zVr=Bh*49=F$7o>=NwrV8$exP~95E$flPk@j(xeyAiY16v-K#ACbKxF^_^E1KW`Q_c zw7i$bCdBPb%rxwUWtXyRX&hZgTbPh2@ZZ1*@u&Do5-hF*4=OK1ni3kwDqpeqAYWpm zl`>Tmu(?mYP{f5EFk|S`B038P?P~SD_nhDPuEO-=>3HA$q|)rbesptqZR_PB%V6@I6!uRnh&Y zvZrhcB2q~1&0Z|j*_nd6#lgiDOLYIi&5gTQt9*JcEJq{ua(8B7m4* z{}%J*xKgot)eNcxHMqP-RG<>uP^hDT3LL-AH%FnUMBb*3dmU|>#Bj1f{`!`#sg$E@ z@=gpQ5)CgL@bt)n)p9h^O;>HDSt}A|^(ECeW$Obw9?={XdB8Nj1B7HnE9HYN)KI`# z0#v_#*^||Qp!2!*gl6Eljrj}}H+ehw;VF>+MSl_etyGDk0ywo<+1pRwdB2pP@6;rT zh3D3_3hn*Y$^6gL8e1WGKb#t$h+}z6-(6hy1F^5}eh4}KLJ zCB*?OXaeZEur0L-MiB57AuX5qsNG~27Z*>R@rHw?i)*^Bm-^s6hij5~9r~Om8U%3j zKz#u(=G(j8Ur&_x7&oVqqr%iB8Ymb~|Ak6)z@VvJKdAi4&gGu`oSF0TXowKva(?vzz^+blBPCBeF^P%K? zk!U#;&KxkR?RYLM-(e9wYxz0wun`0!-Oj<}E8Gv9Zv$5)aj((k{V8dpt5^bGMTVP* z;ZMP=g8T|;Dpb#q6muq>cNky@s>U+8%&`w4?^L#1<= z0DoPGhlW;#x0)ZdZ=XRbQq&ZQQG&2OUI+3tsDpk4z^rmdv0kpx7;C5Gz3*uV?v_R^xXI2W6j?0(QK2uzlt=9nwgb)f{F zn2t0^MDyMkIb(azq8TcqlL@2;2Joeg6KQkcslAhyiBSV_vD8? zA*o;msTDlct_=LBfhGjP8BxF(7igNdjdgg~->Ra%v8LY|*DJG?(I8c(H=*#j1z$@2 zOe~{wHIxSfbRFM!8u&s_N}r=qXR)kh>3*-BB(Qovmtz5k@#T-$j&Io)hNi$IoMlWt`*QuoTL_O zkCeO5t$q1qw)==DM?IJv7~~UNC$+2Z7F4|6<7-t-9yG)sh63dxNEHK>WP7v`bZ;F- zW79{TE)4B?qNNDJu9n*|@C>&qOd%wGRyi-NWm2Z7P!zM7@Vm%@!LPYe<;LzHo8V%v zyLjV!C@a5KRFmuB-P;cl2eIZ%e&x|Nq6nTT#8j=S+S;AM_yjd;QIaPwzR`R+q z_ssJo1*GA{Vd4EoKQup9yO8n-IZ!ESf#rbb8h2V}o6ar+4e~ zfywS;Bn$5j8F+C83VkqyY#!ic=z&7Sz7PMIpYcoUB5rOx{Nj~JNf?oS98~+n|9{xDyRC^ZIbbxs34J zwhu#?vO4y8it>@+Wx1y_#h-1-Aru&a;$5h7O!#4(Kxj*g?4OBW6g~UArs!V1?4BAK z7CZtV^n?o8{bn->sJ+fORlsj0A+GmnXbf~0d~UeIdi(cpP+CM_Rr^8V`s+upqa+66 zpcolq)X$hu-?PzoFheaytOK9?abcfrjT~`V+3@1Q9ecuds%W<#M9j@jo>=wdR=1>> zhA;eu6EXX9^S~2u+ZYQ37=M)^%Uaf{K|zQwczEUs{NH+Ue`vcrS{aS6s$&Vn!HF6I zJ$W)9>_<-5ze-_$ZbQe!Y&|+1WT&jpgM``q<|`vWyHw`t1<_7HX(5HkKMv!P%%j(*Ok)^Jl25I#2Fzibg=X2DyfE;b0S5v^e{z7wrvEzDq4#Ft}BFT zIfWn7`>*5;c@lC2FY5+h}WAmjG4PaUrYXMP6~y%pt8d(N*^T6F?;QEwOyXetlI z9I@0$O&KAQG&p~a^=S*D^QFNxP8qu2{e0DSbgzc%``zCk<~ZG)4Wf6h$VIDL7@oJBV1ug1RI!JTfdRmEh$gYSa zn92~Pf^g#H85iMhCucP^k=8Fdh3^ubEw_AhJecKlsmqT&@w(qPv;_7+A7(Q6T~LuQ z$(Y{#dh`8T73a5)e;RMwE=nB=1xlF|$RgJfsa>rG(v0PvdRlc(g5Nab;mqgVa-fI1 z`*x``LTzq4kHi@l{qIPO$C+TVo=(zo@H(JsNt2lxi{Kbv-3ARAS9RVF#$>BwYs&(7 zWeoD44GjC{s2qqg&*LBn^ctiX2gA>9)n*suy9Cg$QQ#t>GMSmyv;yB2z!*};^=~8r zlMP@fWDrwCFsXYW;UluiNxP*QLQn%w5I`43pl@}XR~Lhr8;YW2nK>>#FhIAU&`(G8 zcEvb+aAH^mUL0O0w(s@*(znml3>h{1zIOb8b=sk>AWZwjhP^0V)Z&aT?6a<-Sb>F#?m5$-MJslPwW)bygAa3E5xHG16{v z{=fg?mk>!YEY5A_P!}Er7e>^R)(>#*f$I%ms=`rsg9v)a{BnkPf*ZLT?CvE%<$7%o zxC@IF(3b_)UKR)X_I^phia*hv(s}bvCN>Tn$cvXRjh&ouot>S*0QEFUVr?~--fR=lddSZ zu3cK%HC}JJB{?Cw7#NW2>gloGc=LYY=PBMxU%rjoxPAA6r^zQr`mjzOSrL&o2H6o0 z$C{a=(qDxs_azQBj4A}zg{}aXFD5|_ckcp8X4u+hq&Rq?`)*2Kq9bd~-Hx(H=|!?qDoEB?;fOb5Cci?vTn-3HfZX z0XE%eu^I*24vS=Tf*J`?seRSHoc%Gvd-m!3O9xMrP$v*`ZayTM-0`s~BI&L=L-hL5 zJDZ+?3pb8Xw^uC1Yv5Pir|3rKorj{x->|KhwdG!0T5W%)0`lsNbqGN}Q#*L7A=~Jg z{CIVFxIn^f0+gwZGHlJ9A0sV3J0|M4nGYsPwcair*_U7P;a{zXm9W6Iq+tir3}6oR zQlaWelZO=Wpr2owioJ*J<3gNpeyU!qNAR(E40L9ga{(9Qiu-qNmPSd*}s~U za=0{klGjT34{A|G6(?qDN(&k2$n7-+*5z{5B&uz~29Lbr_a zlb$3C^e!G;DG_|rTiFljqK3pJy#4a_yv$O9P8R84*x+PGO=5HhLr3>z-iJwFi;XO8 zgyFt*bd5fBP%ne^aSfa9%ZAQ;X7jElJ+#`fd|F2PvY?|qaFC&8N>U%H2Uqycs8$=P zx#bM4DWxa-dJQI27@@CTS0fK)m}e)+P*z`IuS%nt~ZK(C*NsV)!E487OMn@M}; z;6MZbj4l8%1)c%(q{9^TzK-w5-%MlbyV@C0!S8FfFaKE9O>>N|~u1$y)X&GBBR_o5N_A?ac z4_TB>@JIN3{3_hO!0wY(eH!kjAJP5 z@oZF?;!mH3Z0KMjes4pefhnCS*G~?BZI-t^@PmyZ2@}o$Qc)V2>b%z$we=eX)RC9bobE-=5=<&((c00)*$V7pa z>oOK}lD4#`u)k*U`(cs1t%K$#27odA24ktzf~!kHX^!q1ZOes4Y2|qxuZ!J?z)`b< zD$eKxKzY4G(n<%0m_Z?w99GN+JKr=pOP=7NZ8^WF}O0wbGg5$Rne);;`-9O z$QrAo;Mf;2>pa+I>oAVXW)bTeW!?5zyt#YUZrQ8cEh4((zcnkamwJ{NwlgjduU8Wt z^Np6@m$>{AMV9;b4cx5mh4fx?W4j<7)#v#q2q7MygQ# z-n5BRG2CiBz#Dq)?d>HiGSC*RY}+65jt{iv)LbJH&cDOM!}Cy$vNe^@Qem>ldgA8u z4^RJyMBYEFc=#FdkLDq%7Z7{ zgfk0;0Av-ZI z6ySy>KZKVcZ#zWU9cLH`!-bGCgV*`Yt3tgiw%WGS<9)XC9XRD1 zRuqEd^UUkiBxJu(!q`JCaM%pgJ)|yR7;Ru?jick(e}ccMmGI}8QCi5#@zf%OSg5tI z+p%&F6eUp`!8F11gg=;Z3I61Wh`SfiOeh@^heqx9tx=>9y>*8HYm-K>SD?oA+1G&2 zfhKEO_octCO^kxQlfTE!v~o))*HBSl9$H&?;$MC!)(hAkDVR;D{}2k%v()y*8v{fP z@575K(5|Zf44!OP6W}0+*PnLAJKOaO;7O7|2TYIAldVVw0ese*+Ax@Ct!!Ri-kt;VJ2)P z<=+1m1h1fWA0MrReh-NW@y=nev?e_uX&H-SVI$W&nd*?xdQB=p@7fQF+1Z}Ce~@Uf z#df9T`^a;#SH%GPP5$;xk|%YTJ&sKg9XX9TzAB_MjkwB^!igVW{>%IDf^AS@NhALf zavc{iUp+JGLKj+Tr3)TRB5J8(Mg;WwfMp0=mtv!)MTmMG*7@?jW<9m6t;l56%3S1G z`HP3Ye?4jJrJ4Ogf6Zytye~Y|BA$1BV~ktPRT$GHFyLS%y5~$q?_IA`{B--A8oxpC zQ4Ou9yqVvm&uPam3~tltg*@3PKf%*zRhN9mh4d1jA=MNst^MYQ>R}B}a2h?lQ=0L2 zW>MqRv()8+f^fU;CFv_gdT~nAUZHw*Bm2L&0d<$1FUuLJ?sl5wf#viN-R1zhdA$b9 znZmnqwfA$+xBeixf@tG$N3a630yZNzNyM<5#~S~j|D+;DXCV!>I4s-nGBWzO(8VSF zSr7t>AlQ1g&r8nttkioKLAHO827rK$uP<#F*jVDk`M{&-@$>0X{D)>Op7KlaWm+Y1 zrDLdHt+HX9l5yGoVd~Gt1{3v_%b#M#tM3gLYq-T~Wg-Ys2fJ5yMl6F>0E(4XSC9J{ zGpGF0>Ei9DFJ?+fx|H%QxdFZt6AtIkodWK1(Tx` zi7!%RPT=Ihv_HpAya)j;B}3})sY=A_R1Y65+6U%(2Y}}#nxEYhpL(;mUeiqM)_fB5 zzW;kcai24b1$Z|=EO`9iV@$m|TIRfHGF-{O*f1u}bk=F#L0e>j@&C)W_Q?LyjMnPE zT7Z3+>XI}V$nQ398_jn0ekUQAXp$3zD7Vr(mb#Ue)OVQA3k&^$jn0SrbfJ$H3PP4I zKMQU$xkSm^OFK`ckpy8&G@6gqcsV*mqr-mvP=z67k8KCaN}~A3=mI$E%!?Mc=7_tClO%p|*!!3` z_c*|6QmBh~ipSBo-p~itoLMzAxmUJMg7!9Penye>O^Tz;77!eD9vs+u-*gih9=PXx z`Jav`Q1B%<~O4kIvuKb!khV_JCbzV*)f zhREV7fOkO`G3)L)Sj5)*`gbrRI6^?X!}^J0E{5XOT+!;LJC0bNtZ2;mRmY2 zO?!v%P0{?RzWv?eZSy!8NMPMgiwrbj2ba<1lFZl2^u?a6iZ!C=$4uId_Az7rrV_QT z{by3fGC(B!bXkzQ_e6BO%`c)$p{=sQ+WiV9#{F|`qu*i+cRANwi?0hOD-@v6k8yYPs9WYneb!) zjDCG>YR4d1T#$AY_lnCz~C^v1oovfOw2BRV0;! z(}S_bQjaqI^w?Ep_U>FJ)R-oikg_gx$O9KN4nD8HFM4`=0ZEDjOF_c&G-eQ-@=1iI z!=ak*j$J?0`oJFlfHdpv@5XIKInFu{=bWU{?%U#zoMXdj){W%5ddH$qB2H)_RrY!>%5pT2R|vJKoeSjcrQBu}b8L%#Mc zRJ^ep=172Xee4KWxeM%A#Ly7LbIONKRE1Lcs~LgspP)g; z$H2d3^$_^a0G{yg#s1PSrYE{hB2W+?)%S<9Kvk==);-7B-(sKgTJPtn5~c>BlQZ+M zpPkmeI?*5s83&NSM4mqCX46!|Qmrq%yw2!;ZMbLtT$|heSUWtX>Q_(8TZt%RApvN0 zi=y;?irP?)UjChT+xrE(`V_(wh$z?>^Nb<+$WiS6yQsL#v#V!aoh)db&&5&?HyfK~ zPeV}({X9r@_MbP$w|EpB}V2xKQR zUf9gGuZkcyA*x>~f_{dia0>Y(Q8~S)H%&$I?=J@tBpe^|x4RLy=EGy0?Y6vzMYW=JIA01X&({^_%T%tQiC~D4oMIKH@(?Mgjdzz}Yh}i2WG*+n= z3y(X(Jyxs)mlpq^oV$bv=~_RbfShJ%;(wn*>~eO;Y-j4S<@o94<(2aib(DCmmK-Eh zJI&v?K+-!!k6@nUhZHW)+r{ZBM=T1J1`CIqV48vG)C-$ifuxLm4_%OfpEuFWQnHpg zm3ew>a`JOekMt9Wnjr7D?E$Ly(9qCRBzOf|SVEe|ImHYiwSeDZN^tZmFA+{yiaM8o zWV`&@rmZ>sMdV*&Fh^l_H;;qU`gdK(V65#86uRqD&1H=Af2WPGa^ptOWlc2Wb_{vZ zB}g!DT{x2=&(-i-OZebUlyIh;86OrUS@HuZug*Ym?y1XSHokqyWUfh#+JC8#u0kF6IeF7OgI4U-XF7k9 zcu7uG=;F_(aW%hmA(!};z(3s{YPNQW6(}w_KL`*Daqt1a0V#z!kJQ{0RnlO``bm6k ze^FlYc4_@`XVc2P{rA$+;ptN*{DFx)vmabxE?w)6H{ynKGc&vgW>9>ux6K#ok4Y)% zck$P42zFnBVfg32!kkr4gOPCHQ15w8811Db0&*vczRP`nGRQZs&T$0OmCyB0H{rYE zpO)w8)#yLlkFIinT({tEIb~^7@shFh0v+;uLV>;8u6{}LJb>@xC6vNr1IO6R3?68V zc$eL2tBoa2R(lH7^Pl=^?u%)~Mf0ec>FLXhtgI}7fp|Gb05rK|SD2ziThMC8wREZd z!@-B*MfZX4M=7NGh$rWLV!mo5-0aNE3{mZK7Bt6{O!=yr`QT2dI>1Eglq-ihy8hO( z5{3tEZtC*rBJ%U}ksoG3<+xJNWBbiIAa(%!1734~Af+VoF7Q8aVv%gW#CmK}m?OJ~c15P&D6s>G+KoBLv*|FZC|UhAV%aSZ84$-X20f1 zC77IZ*G^=!#wuLU8@RAWgO*F!|3w)eyk@Fe+leb6+jv^V8~sPujvnh-b!cI!z1iJdJ<^@kr}6W2#lY zI!t?`=MQ~mfxX%q#5kvbizKa`p+N+C{QKK0A{}dbT$F?57CiuppVENLYc$=3qR)cz zwnRB7{>O$ZcU!z7%}$#GIs$`PlbDg~x~j>0B;a^^KtI4}!eWxCRSf40|0Gk4NtVyV zK3*))P4G>&v()&jCzpSoj}%BgculFsG^8??WhODRTg~3weH(#szm%!fv zIQRQj1@I+$N5mNY{++1!icX)E7i_o%_xJZVo1^|S@&rNEhgO65)lqq8=y9PnfQ313 z{Yct;IOG3r=e?KLd%P+Ht;-=Xnd*y4wXw6bIEn*-%}OaSJ7|@zR$c|@)Wm;4uSaUJ zjCs=r%yYr`#>`}CKggqNQK2_xs-=@)fvsOMJHA!Cd1k0Qwl3`#`eHWct_L<(4hp~E z&CK{3vc~&SB=m$IWA{qyBy~K@F}iC7Jy3icNKcU=rzsxrVt|naxuG|g8O?8RMl{2F zS!*}cD72P))=wI~b4r0nyBMsBSZ0c+KLcdVpWlHGuY0vu1NJsMJSj)s*%xd?yQ1RN z9MslG@>>_xT2HL8UtcF;3{CNVD|-JZ6%5@_Wha97GbWaX7|o$OVbpNF)KMdmXzF0l!1y7W=xLX z{FcQ+vWd+j04pWAcG94U&w)uR;Xcoi)Y{5jx{zbG| zt7~|kHO6nx03!SeRl?8+<)-aBF z&>#o%`l-!DZS*tt#jRxD-d}Yvpru}Yw~zQs#fp0W^6%=jWhzIKs6BM6pdi?82?8qi zoPCa~lYhr>+sRsO{D@bP+D7${H|~d$08j88r_PUih!p)$7MS zBBaTqi`^S&;+yH4ca}Zm7fz*zL-B_1EHWOudh2b?%UXXU;N7w#yeQ9yuEqQkv$o5w zFNfLiO1MQUYGVa=&Ggsz*HVLRKtcL0)cwOKEy#C68H<>7kv2`Ch8J_U7))e~N?nQ) zrvOur=mD8-zV;^|Oo&=flY;pAEmMSuqnRUZJ;%0Qz>n&^&7uwiicfB~r@yEA(mljf z-AYosMgZJcLy*D;9c=8>%r~$})bQ*B%{V?9dE*i!y&F8%A^z!Kmw4q0ei>R+8;8ur znC(~FEG#T=^6-3bbPVND^oU{$NAi@V;G=oPBTo&~qJWeDU{n`Kb^($wfUy6oHvQB( zAXB^~jJ(5hy}O?5^WX{?8Jo9t{AZYfP@rOP*ZSk@wuwn~&r;m` zgWirhdr_~MfV!!;GEJDrI?jNFNRN|h?rC$PxK{~g?=LE_u)LYV%Q9<68|0VM5-gNr zGA@9R;*-4b$);*gj?2~jn42b7y{vpnpzjN;2<^$d@rtGb*nvcV%z}wCf$3rE^*=*1 zwyqptVW&`7jtfx1|1l+MpyWsElVv1o%+m|N#PxQo#LkwA;G;@a9;Z>lSE%FmzoMW8 z;+LSA{bAnd>NzNuZ%p-RU!oqDwGA6UOcJ$q_cwVm6GLQJ77Q>LPy?&1z%@selG^{y z_W1Q~iXDUuxM!Rmej|V%GIz2nVZEsyANZsuA8-66FQHPAKGFO|O!U>qXA7;@C>Bcu z&IA;e<1}cl@+C8RZ$jBpb=yr97z+OU`ID{lN92B28MyP-@DM?O5kQO8(`G?<9k-a& zHa&8uO+Y|^4-q=zOFIjuKccw22XOC8O-_&{^|Dk!krp31z?E|R9xo;b6~{kuZ${$J zW|E6mCy3R~mtkQkCopyxt6J#S?fBQOh{+jf1C3~9JnAA^mW zpok-Wb(=Tkr|lJ#qXBZG=F-HZ@in-In=q(%tdZmGQOQx&N4Dy_CZmY+Hpc zX=@%7-~pFPnFR#l*qDmyrP=EPY(H&dFY_d(?vSO=zcg4XoSHH%!6L6ROgjLTAn#(O z=y0hS&q_YWC(uWT7u$$_?GhK)B&45o>h}csI)G`WGgc&C+lWA%%49YnM(6AF3eI z#~I6Hv}qNbC?0wQTBn~?;saA~dY(lf-838k10>@`Ip~lqU=1;t#Lhk|93yLm@gfQD z>l;?Z0R!jZmp*Plqtm9=&B=t>>G&;iQ~jTnf|$%n^mA($BZ}ZB#FHjhO*X~pg_NqR z*+9%e4L^9ZCai54eLLI*g+ZrARQqG72(VP3SWrVJ1vuZTuKqF%_{a});vJt?TwJWz zqKK=?<=a{jZ%q7?r^yxlPqd(sq$4E|lNJC;B-rW$!gNq7o>&Q`{4-5fiwaiOfTf0A zH$X=iKTHn4#50DdU8W@n0KkQ)HTVoDZ zI!sd-t8*$$lAl%N333+O=z85LB|FF*+g<{2ExYgAGzrK?J+CV+FK4%EI2L3-7Bq}D zmPrehdG(yuffgB-;13W+t#K@RC>KWltBcC>gQEU!e!rjp5So(mgG5`UtpWmC_jqzx zO>?0_$qJQSSAqpx=HH~LSXK4+KVK^@IQ_*Q|NT3d(*EO4R`5gi3`!3NI{3N|_UO3v zcn7CDfdA#9&*MEp2Lmn&&}0~nvsnUX`k^2UQq)AD<94{tzL2cScZ1rs)>B?9*FC(g z+P}n^69G3NLTKRvqxNH88%pTPlxrgCC=zRMVv>|-0uC) z`YKW%g-(6g-8kL=%K8pzg>9$*fG|MbnAzetoux4|jd78Mukd>=l@ySYq)0JBXDY)3 zH4FAOTyUTMkN_jWB(AHn#nLHl%43+1>?3P5mujB0S=U=B;b|Z?rwdPQUBUZRbeq*H9HaLUCoqMqQKm4tvLhAI07i+CO15S`}VhKL{ZJU zoeUMQ)KNqA&qvn&zPW6|O05hB_51(DQ4s@xLsMP1HuI);>gFC?Xs1{yaY@n46l!xx znBV7peP$&Tu+Q)*8ZlrBPd0XfYt6RJ_tQ{np2)r`Q=(cN^~W>=CA|r z>#ZhfXde;-PPhR2H=Yhs!g$j7njc7(D^XcC91*QcrLuO$T<2sAQ-5sacsaM-ULd2r zP*9)cOdp{dTMd{|&&C@j&ijmPUHSTz0$VXObkfx@&=I*aW#}u`{p%%Qze$N0RX^F! z@{m@aIx#ykuuRJ~$a8TT24an&p0AQ{H*8m0TdaP^(8|ak!Vwcuko<`Br`iCLj`LS6 zYmP)u)3f{5G_oWN7D@F8^6*%;3`)n45$MZJDLVkzG77U3+Pgl0zs(CfOBU4SMh zXED#`-<4{a8~ug#^+rFRrh|)9g^5~w^oMS7a$lRcVo&TCArqWH?amP$4~y`?XbZYZ z36Kc8u|WrVUocoNd+wJu)1wVq*YC1!A0{te`9=l8{n+=o-d;PzH5pWYM}%Vw;jPb@*LfkVFUSGWwO0%x^?;EvO-lu zb6WRdTXiex(WPaJHeSt&g@QrwMHyhJHB_Z*|5e)QtrsJ5)6la;Ao~@0Atnn2O7j*QIw^BTeqd(G6fl54vy z3$y|Xh+kv!rN<%!|7>;}7&faCdp+Eu!&fF5LUi|9tQ>gK6IURcl7!MTr2T4YDYc1I z&4I;bD_#0tKdZlA>UDnObL_mj@_D>s4l;g`|7pRj)LhGza?$H*X$TO)Xo+cnBnF`} z&smfQ)dod*C}5{Fg6b0A$_0HU4uI(}b4pLo1f`9j*3IUKdP3sDy? z`d>-SO@}dP&=waXzJLE7douZKqMOj-NY(z1X-nC*QivNh6qX_-k1m@Uhgv`tPYjD{ z`sa@+tj=E}b53^|%>yPgDD2-S8y$=Lz##3%zuHS#X*i@F_Fp=35C%JA8~{UL9Qctv zm=^|A6}7OL@|SPI-w%bkm4p}<8rLVw(A!5-Yc8&eFO*Z@ThbtQ4T(9`rK#>lDL#P% zCesv~Eikzb-V-A;82%j0JwVrb2IFs%t3pVcrLuz^#@#~RDQ>ux3Rz~pzx{-}B19*_ z{X|DvfZNIflx`Y*>-KM3Z>O8Cf0ZBLJ~A#3oasJOJz~pZ65N zor^^7OVo4hm-Cr&@Sr}q{cspQoF?_c)D&oPLg0gp&Fm z;3YM$FGdg?74ZQ9&xUmt!(R)(637=4N3O?{=s=`?*npjazgfl#fmuI|gK1boBbgW$ zLC566!a(~3dF_~)#d2Lo2PUU6hjm8I0R7tsX@!F%2hv2`2f*`_p8jCsycQb@PB?7} zz_e07kajqZ-3BEOi3GT0gXds!hO~OL_AjX?=Oe{dMjOW5uSctoua*h)z=l(O2;Q+u zpE%*U+1s0K6G%mij09p;Zt5u=7`rbdjRAC%Gv92J zTlpKBm$bLduD@RMdffpE<8NI5x{5V=ucK$8GQb*n{=+k>2udj3Rgm#32KCAM4SYvc z3CRwrDr5oKdPjaEE{vywU7om7zlTN*u)E)OpFG1p-3fJ9M4y2R;b6Yg9DlfuvQ8jnvYp-G{QHCrn+ItIiWu6i{T3GD+jd z)r9jaOS1n`h;Rg_lgQw?6PfEr*{W@EyFVJ*O>&bnp@7;PXO&iB8nF!Inx-Pe`knfj zI4T=MF&-Yw!Ye6qWT}Z>FyH;J!vt46l;j0ZsLdfn>1FSLIiu= z?ZB%xAyHSF`dWYBzgmE`0*2|vs81WTq`>!9wdq_0{}=ECv{Aqh@BUYa&o4KaI1o*s zRvN=KCz%yP^1Zy=VbU)e3Nq}jBxv!}3=Iuuv!dm>$5=nQ4yIMfg}5*j%`0j3J#G7Q zRR8!+srJDr*o0^bbL83mAyAwF!}R6^G~ebWO?n-?ivfLkm(&dY$n#J(?5(%8rRd*{ z3HFxjXLxRD1}3O@z423s?BPVbI_tO&YUT&RJqBJkax$OmEYX0fH=Nn6_4-2|Fa`n} zwjcbajQ`PD)t9TtPtA@8>%UHN|NaMM89PNf%T(ag>zOOp`GXIjkH|l9dHHYfdustFj zG~@?|xffXiAkb$_dnoy!M9o28ZmIA`B-o-$UViEz0S~_^DTrP(TA7`!Ri^JDLU0ld zOk@JE@0ZMRltJEIrM9h*7)?4y7cuyE*!I`N{EHG_Ll`Nd>H(bT1(a&yGE0sjYrlu> z0a55{hEPYb1{D`Q;9CGXTfn>unQ?w_r}R|{Az$_-KEmP{6M&i@j+=XXr;h?b7Zs*T zG~Y2hvUmq2YWMS^(nsli%m46yfbyYdUrb3)?*S`RVu{E(eQgF88F}IW+0FHNbkvg) zY&|=Y{2A<{ASD=IbW&p{`ZL%&{0CYaLJujlO#%V59}_+e6rdL&O4K=jI6y_cKI!E` zj>lcph%Vipqzf3m&lh4z|G@tR>{+6LM)PKW3_Oltf&wgew#LMFje41PsWrqw#?k#| z=!V2{MyF(59V9n?e6Xu)AjVP|{18A8G1!UThI~Q_)SS=)E@;EKD6C+x_^ZP70)15# z8sd%%u?MA6NYgSAgbIv;>p%hVVM~B3T%-hl(Do43m>u$lIr>Le6Z9 z>7TZANH)cDV`YV3P9HGCOyLy&X-dMwd40}T=XWziE|KAX!%_~eHYXxB2DHvY1$5^; zODagz$^%CJ%9T9!4G&Fs$S5Ytj2l~772E=EJ0E!mNPcI8VC2}+(ic7Z#_nTTV2#@e zE@+^n_#f^H5wtf73WLYwZj0kH4vuiB2r4Ruw%9?HWswUH*q&Kg&$i!m_k znW(5gX8$S#@wl8Q+a^ z;AW9LZC-Gn^8Kfv>U+>g(BiH$}Om9FHecJL$R`U+P3BO|6E+Cp!!7}7m_f5A^M${wN9;3c!Sr4#!CsbYpV<{y23{n#R zG%S4FqF#tR5_nX)UUp8h>&;g&1Ij7dvr{32Q3O`w1)farLuH8a*u+XpS#mxq=(9fU zuR+TU`;(Rn-~~sw?P|_X}0UuQL>Q zQ~W1@>6nV;t*;;}gWMeI!`<#{Tl^A8-6M_zvf32qhzHRx_QmK* z(GP+FY`i{^jcY~fh=bY58((e8UZJes>+XZEZ2jt1&I3ECR0hQIFxxjB@+`@x%jdn{ z=)(~NUeC3{j1Qpa_^|~{ME~l18O)0Z`7fm-bzlbdht}v)yIOdORh|Qe1rX&=_*=ts z+AqU9W^|}m7Z+3ZJm0O7rcB@OQ$Uj7`l9sTP;C_61mhB%sC>)RbyNU?n8j;kz-+GO zD`|X)I2VjoglT>eG|clYJkiYy-7m7u@9LX9$J?)Bl#s1QtC$d(byseTVwIT-FZX#K z4WCb_-BeGw%(`B?vexAR6VgI6;YHHj;;Chy!${TjWO;KSS~319{GKnR4_!bR;hCM^ zznt0egap`U{MVXG4*J#iz%G98eN%G2#+-dY>Xd$to8A8??CUYxC822(dsGbtQQ6da??sG_C`BMdSB=r=hc(>%N zojmI%e4m!^znexv5rX?%+MJrJ zzi0&Zt(eU>Td8`r(*Rll)?@q8McKv0XiG~=031VuV*%+z089Carz}XFs}^KXT2vby zCs(dhZE*`^q9mp7eZnw+7f)2@NFALoa9E*bT)Ge~|NaD#J#7a@xeJ!DhUxAdqVCRr z&Z6!q>aQi)!N_s9C@`*ibmqNAdk#zdeUW=C1}j(Bn-zR|dAgtH;JMgW0v5H0CTySm zsus1zT+PdX8-x`T8JnymNOyD4o1no1$=J`_=S4z)yu?!L!0+aK`V&K}hP9isuAv^@ zr!3(MKJ20Yw)R@};3JUXvN|CNrv?eVV8aP`ShWouUD0AA`?t1{}ghBw`?2?6O23F(#&0RaK&F6r*x^roee76hcbMNnFh6lrNe zx*K^H&pGFN&-Z%q%O6}9_qy-7#vF6ZF|=OOCi1?fRf`AH%VH^~XG3>ZHe(OyU;a(b z?0nvPmK{4)KlltU-y(B`2R+Q>lBa6xHG&#W>*a5MiA&(PqTr#;8zL$B+QUYh-iF&= zP0LnKTi;L1zj6_*Ao`qws|Zh*2(MU4C}B4wVN)GE>7sS%kphB<$j4`$f?pKsBla5) zNBxaT!)Pmn)j*wyjM%C9EgRYPKH9j66Ypv$|56`T$eu|s>bJJ7=j?1mXSyfq3@w*G zyzXXhL_@+d>LOiMOo{~CO6nC8(1s|=L7@z1eZ6LtLJ~Kz< z+ADce1JxlAuz(n9a034^!1}hu`w5W;V`yx!7;faKP;{xQ7gV()SJnH%D=*QZMGlj* z5jL5ni}*4Sn*mE`@vYt;R977qqRDzkHObF;>I$KSuMJU7DbqmHRJkf4jntrGy$nQ5zQWr7TXnxTgs7 zzXp(zioBw3Z&Kh8_w_NJz+if;lQ^ibnFCTU1^Z%8Afp`te{zGmY!wA>YBg z=TLuk9vT2m<{4h?%Ub2$9-pvJ2B$P)`#GBfC9mTO7_sZ7?37vmsB8WZJ$8Rs?Hl{e zl5*ewaeOPEb9jyL@R`%(kyzsU_fJ1+lzom*2d3x$Wj*&w){zP{Jb7+wPb(Huzw=az zcr|$rR$q8<>@;f}sf8AlhRaRRM29spI&|GES)S8Hm;SsLR)nt+t}#K|XtM!{KaYK! zRc?Wul8F#2O^bshou>*<*?|+kN{0MDCqf-5iUZfj%EYq4S+Pn8@Aymf+i#0n*4e-QD*K zv$Jrp5LFS=AiZ5A9%;1pAtsRx9dT56D<}91_`sZPZEZh1=RsQ)QqB8Xyyu_{A&u)Z zmmPtB1IX<0^T87Z6w=2L9y6j*Ns5q6y-2t!s~{BcNi5YFs{kC>RGG7|t*FzXzv6d9 zVVgfwokJN%5K>~?=r_-ND0-pC6UUW}TJjdbc7%)_>53foN8Y|+=$+8Z7Y}!igI{hE z)2TC#QPqQo#xla8F>B&LaRa`XxW(D^bn1@OOoiz8)?Y1ATm5FEW;7TtxLG$au3#q4 zInQfnm5Mbo^YY|5ncwaLe+lOzIXw!Sm<0-=fZ^j=N*Tt(-%31aX(H)_Gi}w?qe=2n z3$l2VkOsd=mH*7IqBASIRkn%;nPe-EhH^=s6kx89a8lyG!oLMFHIPG!>aH&yo_+2lD53 zDE&K0y4%v>e!a^_e6VG#!~Gj!&h|ZPpXK@GIyUVmcyt=Hi57q@bB~IQROQB=sx$`q z#XuDJ=ErKqlf#0Tdwblm)2`(&fub{0Q>X1yb?jQ;u8^vGevJUv1Irug4C04;j%8dP znQ^2~UhX4rV)ru0vVE$xkm8{GkGkdfQR6T{aEsIP@g2}cC(@wDrSbZ(^^KB#a)~m3 zGB<-~{x6X!fM<2I8Rt;_2anZ|%1Mp5~;8e=pGfF(q zjt`ecfP6kDG2MF({da}f%@Xw6o)dX~4F6$vr)|+bTdBExWzJfhmXZ z z=S@KUzW2@*9I4hlC;DaLnxh9YO7U1g`Ah{tQ9*}5kc?|vg@uebjqN7#UQq$7qE`wV zA~hF1aC$cdR0y3qhOT`cX#*v1V3tM2X-k$|ppP`W<{MwYpfq>}7M|$kQPMD9v80)4 zb6Lp`YMK#KnactbmCvz_Yi>o?nie z{^t@D0!-%bv=bHClI)tQyS>7F#`2|I+H^@%MZJy4bF|^4S+WVajihrY`oDJS=Y2=1 z$42bDnmXo(_eS>)F7N%SHgk8qt*y6C#PY8Loch`?57>O8FvWujuq1+NHr3xUtkLqA zJ5gob5M5oln|2Ae(5-M|_?k!UEB`_;Iz;QTHIR~fG3*?Kr`%QhEB6_>PM*W4W`5?; z+P25HGhuY!t2_3qHHzQ*lyXN%!ype4jLm_I?rZu5Df3F6bKV0FNX5Y)tbt4W`*9UfHYeFPxR$2v6D<+mKBk`Bt;)h<{ z^$4V!cf2Ms+jrby?BrApRBUZC-s_wpP&EXkmSozM4}0hC75;Cm&sb{U08T- zq)3or<1T1vQrvg4T9k3|t+g^|pTq~yxiM#>H@pfLYs2*M;1MSw+0DvDW)Ig~9p0)t za@Nt~WMD;>UraA~aImlTCHkSWs8OH06mTs@oPQW1a2r}Y%!sdct&UFn@#3et%`I+d z#8wKHH-X%X{^fT4l}mB-{d8mHUG9|}`WusA=d#RF=&?S;MP%e?)%E#XoSE8JZ+TUE z=&@GZX(gw$Vw_0CFRRJ!4h`&;j6d2i|NYV>ZF2cUn%!dhgrFssaG~l{c+^U?MpL}& zi8)oLrWfh)TEseZE#f#J-3Wc~1!JDZ%|E5DDuWv*jm&0m=Gw*ZWd+{78xVOpD5Hx4 z!r#D&$jNF=b5`aAIAt){&VI3j$ApKAHJ=Wt1)U6mtdC$m*DJDP{b)pT=03CH^1f&4 zP_QOEv3>Qp2A60Eo~XP1sFZFD`>-PAD+sm-i+ly%u^MkQS(H35*GwhW(z<_17gM+! zx>i_jC#umZsF>{WD*r77Yq(e>j5#p@IxHu4?NyKrSCC8)8)Z28ZbE;uvpj1g-)}?F zN{ad|7ESw+lg?AJT)~!Z~$_5 zjEl7jg4&wLj0x7hngq_3oqIX$ll~wA-*e~MJN;w}O-8<(Jn`&(GLpKq zGV(;?2mwJP*>vrGt?6#9nefPGf_e8#jC}eR>`K=uhun1e7YpX?iyIwOqA03u?}jh+ z%gw369M+qa#wQr9_DDQo>FGrptb6``@xZ$%6EzBQ1(L}aMW9cxFnlY+!o|Y1z+X8@$_6Ll8={x8@zR<$b|S{sknDRl73H!7Y1qV;;6-$~P++nZB16vo@=a z3eWI4dYGD;qDA|}#Ig_X*Rv-gT_I>A^F$$d0FR$8dRejiPRi{=O0-%$HvR*&@dF^$ zD=#-HFISZ^JR*xX_DAq|2QR&Pnaszis6h?Ou5%0YB7`YwX+?Q%<$cUF@~L;wwJy<- zw`>U5k!?nuPZ+hl{&6^GB(2>_I>Wnpqyx8>4$lg=in4t-6dS!!G)z6}cNb)nCVQBD_p3&yP}hx7U5{!uOb;cc0JT z!E(iNC;WK(t_**(JrEqU+`CTt)%&*XR7c!8wpeM&2;9zD3n3N?_V4i=bw6Lyn>MiJ>R&VNGtA4a7SIkpupIUxXrnl7$o*}b?+;S6 zL4tPGH7${xtu~PrwA&s|qOpfEen3PU@w79{4)d5U?;axZ=~S~IR(YPbIrb|qWPnKS zcvqT50p!1xfMqh0-WLywK;sJwj47m;ia=~GoM|O2>|eiGVS?bX#PDo}lVOC?poiAt zxoxoWU{s5-T9Ba^(!8Tj+`qe->ADE`!<+wfJ!d&nsJf&CRZC0DD>ZFDrCu(2Wi6+u zs%qc=(H_dWrtMGG;JiCjn`YRJ2WxE|f*SgDrInJuO1jsEF~0cQ_+Tpsf3cKz&{O$< z^-T~To54!1M3-DpZb#i4WCgF3hLj*ULi($;N9{drrE&! zE_3;VG{-;(M%uh{6b&YdFaRCGCs1vv1hETAX)5y9TArkAxru_isxOe$eO?Z(qvKMG z2W%E4ir>(|$ck2@-9h9dQvO)WpF7q;dBYSZeKuKWxtc z>vLv5;fF$xuotwZp56H0za#+T)+DnIC8dai?KK4gb)wwc*hH$7CSocGIdmBo`y*S> z;`GmtIf@xK@qzh3M<;J)BJ}_zT6q?cQ*7ekNzKOdC&{s=l0H(h&Iqt!6uzH`O@8yl zr+gv&4k)L5U#DxIj4WXPs(n(OrDEEYt?^t4gQ^2%V)>41>pDgKyjo$k?MySB zlb3v()Ig*+Tx?Lam2lzP2w%Q!j@MRc!BXpwWHQ1_<}WS;NjeW?6Aw2*@~0g;H4$4U zW7hk<7Vn1tokb|(<%u=T=VU7of8BZxnMm7W)>Wa@bwuiZHN{+cl8!_6Wx6iMlX4Tj z)aBff=sfqX`q!)Gq9^KRZ6hXwhe8=v%(vKHa=m2W%31e>!u0C=38cfhjL*LPD)<@B zqRJQ#wWEQeQ#6#8Z+whZ4TGySECuSckupmL&X*iwm%F5>%6_blN?Jz=FD1cPX=N!c2JZy~xT{Jv@+Cypq(r>_zONVv}ft|Rjy^}?}tQA9Hru%<{NFtOz!$j0y@F2w1+;uGO1pA2MvYANIWNql@y_q z#)91V6Ou=o<8Y9Yxyr{gz0p#67E_4MVbEr~uQ*ws{&FIjoRm$3soscMy?tX+&tm#t zS}VnT22ar!;-hIugPCcNdZuysAJ|bLsZKofYJ?Z^?u~4Z9Vc3aV^qbeK9^d9OY^Y+ zv*+p1mT2^-m_-|W!2PddbBs?8%!snG7`@}qe)sxQ)e(eI$lE)99KrltVTg`o@0lLs zAMPGIfe$-7hcJ_Km7_TlXs$16fEi|h)JI<~HD29&?N&R%uhVYyx+-34Vr;YSKY0Fa zc2#iMZ+kYfG`gU}*(+adRE1W6C9tYMlba1&l8et+HfO{ozcuLQuA4HuF3nA3{)E*T zxmC@GnUpNd{+;1y3n;N%ZsV?;&3`WUQyX2{gG=|PIgccX(iQ&0I8H%J*t^$sqKF~g zv~ueGv^QdLXf;lVTxBfZgV3cC>6^=p74oTW-@;q7XhfK9iR9{xdW&bab%?A;3;QW= zK6TV-6s-s8AOnrNlrA0!vQEDA3nlD9dG_8u&m9(;_j$cRpy5Nx01q5H$*PU=I35w^G)In&TDtei z_Q`$c{!9GFkCT=BnNlp(hxb>=YhjSPWCr2F)p8AWt?B$9XW4?zzb*JeTN)cpfl1Au zi9hRx>Z7IFMh&7>Q=zqay`$u!hnJP}%io8R23}b{ZG{pC?b$XgUeF@^*-7b(;$L)&MU|RR7s(GMn3=XDs ztkf3C$on|8`(UHAn&XQZp>;co#Jorv@IK`EHn%=1r=YG8Z)S^#3VsC|j^UHO7dlDN zc1%P!mnPpGKWoRRofsu7ElfP~rV_md1{a6ATkkaBI?t@Yr4}{&rGmmaXB$}^v(B!a zcZh4v-lKBriIgtF|63B(wLF<&)Z5N75HF3NsOqmlQDiyjIQ+89%*2$3_}W>V_%`5+ ztWKUZrkh6FA?fajK>aYyuAlJM!}&h|u<^ix$3gIzmoc5FrJm~BDe$p(^ExT3N1t7m z#Qo=fqGT+V(VF)92$fSz=Ce{zk#%C{;IL(t7vA~&{x83nC#jBrYU@mc9W{1h$zNSg zSKU_A!Fp2uMO{Uk~2M-!I8O30%K*=4st-tkoH?C;GV&kfD9US}Xo* zk_~Q~YuHZt+RqicR^lVRurVq$6I{J!!^5;-jKe=&^$xz;DL)Pwme<)Si%{mA6!xRk zq!une`lJ-QFjpz78F8dPB>Hy?Iu5Q+S;{lr>V3FO?CX;o865@TTPsOAhF7C(P6gl5 zGHUQmt8-+a(`t4$cYLh&Nldt63aKOWxf5Lba2h8b5!=&YUlaeXUgPNyH}}>7HC@M!lo2tKLP*v?9%H)mp5iN5*LOcLZj%L-YGOwOg6eaD9oQh(e61n z{+JU)jjLaOuey(uhv--+PiZ@V7LeDMiesC>Ow~#!fx<*2gt%f=Ds;wJP}~>1Bq0qX zYwInT5dyb}jqk15pB&d_4O4QgA=EFZDZi*Le&ZUr5#|zX1v#3oz5_<#JWJXtJidE4 z`Porh6?l+U-(foy6bpxGHdmhy;KCCM{wFq?D;o8Z0TFVw&4gzdp8+}*cCdhTvSE8U zPsrFUoY)D!+BX|${;)fE(*1c^Xj9ZkUw=(mSy}Gk16#~wrMFOLq!0dB7-U|tSCisw ze-U@0*#h9(86vr-KO?1SL zU=!V0jZ4C4Jig65DW1CXN8f}K8=IShHVu|UD4Wlr;wjOD+^FZE*vyOB zt@^GU)>jW;Vs=W{WJQ8V5)jyG5o8QcTXYfxX!7VFzN)qOh5xb2vxGb=p%?l8PCpWU zn&mKe@vx z^|jZH={@7QK9TtOGaV$|1q_NFDN-9~a`wXMV4)xI%%{aVCV`&Mw-!+##x*3<)6;F7 zJPR)?9nzDWTBYVa!L0xU{>u;dq7oN%iIl@d;&eFm?Bq#mf8q{P9@~@x-jl&wf6f2Y zrp5HE$%0t>dw7eUIHIW}$)*U^Qs9<2WLId5_ont1vL}GY=3Vcx^5K+G{Y_<@tc!GS zWLmELBXROjgf_p2mCiBc%HU9aEoUm=5VJ6u>mR`1_ngDPkVCNW&y9M=@L;rbbIR;e zlK#%oj4J)jCR(=#Nv`fIZPa7^p%TG!zuuuhA-P`bvVR_}9&(rdNAkD|W(umF*I*1(es~$Jz7hPp& zVbfj+g)c=UgW`HJLlaUf8eJjMcoqIRI$kKv_je^FOyWX9SJV3oEzA9+9fPDY3@KXiC-fK%|E>Z_?(8=Q| znghCO@&K&nP@89y(dDH;{z)c(tk%Fk-hNYgi~BAO2~Uz!OctNpJ4bla)}Pty54Rd` z`LZY`04iP?8bSmV!FCm}W~PEK@}Fn;R=!cVyhYVD)PK>LQrMeAT&6XGUe#;$Wcaw0 zz*NTpcFV>#DrT>rc@pl*y69{8z=!B{(wJ5}FU>&knEU`--^}Uq=I8Z+M!zrG2V!cF z9XV`w&hai0uCQeiwErx|9=mOdfqB_cYM}J{*M#k=UUxGFg4Lv#z4g?!g)I9qh1m4J zqlJ(sZ0A%dyGNzHKmU~sco<-UsIkSky;M*2T8L3Dc(WLOMz%n^Scu~V5h`JcSyse< z`yX6siHqS&5kvK30{6oKs)xO-mR925>KDjTskA`&{BHnQ;5C5c5o7*dv8?oXS6OT} z1HMz^^xv<~?y8MC#LxDfgKw3|1srroy4dOP_0j>vp^AbB@Z>CAAlcxdh6Q!(#1wV4 zVBL4F|EK-KAL!F4D+ijxX;ub!A;jZl@&>j#Q4+qMyp6gVz*YpJs^6RF>!Fmg+}vuz z@#)VOlqxI7TvppYKK*nNW0r+FG7>XbWz$}it)gyTold^$HS3+af5@)0ii6>eiPvF{ zWN6aHeEm|hQdUM-P0aQ0C z7Xg53>K>4wYbi51OyUzom0r@adXzJ0f(Sm6pzbrv>isP&5{KkM4<=xrT=BvSO2-*%@Raa-Tmc&*H83$U z!pW#$nOCM4U^{jeIPDR`+Y0#eD2-j~?(V(>Y2_p;+|)^EesRBOv=zj!LDg^NkYk)& zUPvISdjs0EoFbLBJpYtFn>7E%Uu1}Rvu*Fzkm1tiI^iLN2j`gA9WkFzX5gPs1W^ov zI9ti8<)-cvrMqO||E|I}vO33!3qGS8nB2p|Hy8PvOFnjiij zuH%sqm(TMtk4O5?laDViXO&Y=W{6+VNR4=t{Ej&S7Bw0uE2lBy|Ha%Ja(tbl8*m&f zneAKhX;&rOH*00??7DLO%=ex>6M0ouNgA7TR6`lBwGk74}M12 zHb)3e(``YR_NGl4MHfh2^(}BjGX)BaB&z*|Wh4nVk2DtMlD|XRi<&yI#c2PSh?h1k ziAhL6)~PwZKIFBDNg_xktfWG`KEi(<K(t0o}gtR2+GZ|ZqbIa2=4uJWW@CIn2=qQ5v+?c%%#h! zMe5hZg1MJSjUinLZQW^QD}B$_AjnS*&3<`zLY2y@Yc)}_(kU@0 zmJ8j!rjvO&s5uGh$)g*vvr6i~^fF+beEA3f$z)h5WUziQ zVHyWH3IQ8A>?>eN^;LqMzw3gP`&V-snq;6}Nn@(NX%qgP*6~h(_%( z%yDP5e<^&%Hagi0+ z7XTRz@SUB-s@8-&&sD-PKMl2a#qYMdZ|P=#WpU2^)Xxbrru&}n_9fBf+t!I+XJx7^ zrB^p;2a+U>*->Zl-Uw1^o=*kQ<6U@9qrmoU^?$mfh(eZi=vLx-h-5*}AGc?y1ShFN zep^M<6{5;$->XiOY9Zi%P=Lo?;o%T-KElAE$`h!kf~qf&`UXmB!SPpC zbdzR7540Qd^t?t31mXOtGU3pCiRkSSe4DMj5CMNNYcst6Fr5+pOYtuJ4iWZC@y*fR zzy2>;i|{($GM$J$vVDlwcP7pBm-4=3HN<`oIJKXjA0#lF1Ly?5x24QU-kuFT|%kOfIj-288b^L!d`-BuzAEMFV#GlgpzJ8w9R?;lsL`XS*y-LQNXb zJl2>=w26KjVoJ)8*^;w&LS~|Xv&1C;tn&w!@7xdr1PTmfL?eL=P40K^N?*O!YQj4v zF`-3gM(9#u#8(CLG^;*f8`$Cmqy%`ztuCu6z5<^%c1C$YoNGI?_|;dufBOUC7j+ru zpGr#+z>%5*o`vk12pctOzxs~+Kpp#nYAd*J-5F8eWSMG(2 zfr8^YP-BSlB5iy~5xOb)rArIE)$@0XM-i+?*D0)}?to_&`{BZGysJB*DuO+OTQs%2 z@Aa6>CZ7LXKm`<)6)a6%zvG7Vstofc^aUYzV*Ljbgh82Z3$iE`a_IF=2Bz_aK07Dp z+P4TSU1Q?}9AG*w*66$3OdouvKZkzR6&khw_5%Wg{l zH%W|gBiyqsSP)nSCz`X&W+6&uz(LxE!4VO9`(~)Lv`Rf@r~t%{dmIOEeYyVO98})Y z(t_6AWi{`TY}szf%k$Bu*(xp<1Pb1N2TFo}qfyjBSG?4>vjfJXB~EnimU;)tR@bC3 zv|2aKdm5P60Nm%szhit^6eTIq?q@5BWR#4YSRkaNmTnCLp;aOIV}cvO@wOq_ic$y2 z5KDR)u0WSGjm9@RlomzvJu^VGUw5CWb!G{DtT;c^`_!a3plxQ@MReBJngd`4mm$WC zf@W=5B)KAWP}?5cMpsvSNbgpKZHq@|wWpwdyI3Jt>ZY$-Ae+ni0>VdM=@90)sb$ja z9K8j{0VDU#vgIZj|i;J8hF@=jDG>!0cgH-!EG0vC5~dw|>Qk z1b#Jj`JRxaJ@G$4@qPV=!p7~wUsC9|3SnpnSFADhXI#i)+5%)5?qQ-*(6s2`O=%E1 z2v&zcJS#L>7W{w*DE#x+uRPwFLT`RXpOxKeBu(Js^LB)A%zf`5&*23E(LPetT%u<&OMrby-kA zwa^%L`fkuGsH~mP7Ff(!+&!N}Fhyz5-J;nFh~&Ny>kWG`gFvb*Kxn}Z!SC{{Md#O# zJRn$qB+&pe4&FEuF_BOR~H7YP<0>Mz?)=xkf1?XpQ3I9cR+Fz# zCAaI^^o|_Q?D<|xOX#g#$WefPRf6JHgZ`og*eQJCYA!}EUoq}*R`o(f z;B(!|H!J>M;Ng!_P$n8$^fygVw0#ny=fH}r4#E~90qIs?Gb28^gdPz5-K zaXuUpY2M6Sl&k@dr9hdfCliTU2y^Dor)1ZIr-uDV~$5o+EvB_=K z zfOC}NHoXZRI;%*l~_0L6ij%E5Hgs_4tYh$j9%%#DG(Bu~(w&SEDaM zLui^++b}mS53WM`CMSHoLAcOP0v2M*wH-&k{_i*EM>v>czQnNDXxC;vKKwCDIIRV?PsA9|PyfB_a|t zG^7ZTG%z3sV*%*_m-WI?(-2T|fIB`|-H`qOC4L)>%YivEf@q8fq!m6(dhFwkdhc82 zPW+-b-VGbt%{n`yP)oFn7_;@A+2;EH`SZ18EVQMG_~S1dz8LV3!Se1vq;ID&h!**} zdroYbTV}&PxkHfH7k$f3SrP39s}{wKavd>^Gcz*_0f70gUl+xVe0@Ek>G zZsUTMLK%=Xbg&QiK=S%y;9KahQB$tjEp&XfU}w=Ae>|z5HgKN=NGqZYJ(4QnF(?+r zIrsh1Q}AKXI>^1pS(vXn zs>cjSVIF${IN`B}xAn;iqZG7QniiSdsfB#^SCveIGclD>E1@3hXe12FOmf&GEnsug zuFbZpY&hLK*6zf}L^u|vdmB0Xpc$l;$CWVf!0$|Tk=>9!{r0Vw7coc_=f+Q5*p~;w zZm{MR!@0)YTKTK}eyy$Hy^;0P8Ou#KM|mg&V-gcQuM3{vIyYXb1QqA-W9oY;>UFa( zyYKS0R8E^E#n{gXU;OU)2%LJiJQcFpAQS@e?d095g)B&l){W7CW2Zt#Zig)59z)1z z=IetBSiZLcip*>J?SE-fx>X^9NJ7Yo(*9?*WES->WB+0o%|}Ufu%~UC9h(YRL== z7zr9*Nf-J^{ZZBwTe%@K?xTnNi(Y4?+n7lP5=bk*HC)cis;Y3Zm$S(|X8agOk#w22 z!IB|2DJ5m!^1uCMa}Z0Oo11H6Z;u4JH|+c|ouB&X-~OihDX7sd>6CX$8W4`ZL2Etx zL1YXm8H??beVckCuB~0^Y{UVSj-JVwKFM7ukXjR6@|vdFS{SeO=*n7OET~|d8Cbw> z1pHnZ6_xnn-S6q~e}<#P%$s<~)S$9+8r$xL*IC<%|F#o<8;z$4XZPw>_1^mSL|Ai3 zjbd*ZBDe&86O#ZXDJl4UAG1_|`B^aOT8f1#VgB|3>OSNm3zghSI;@>Mv}(Js2p4im zW9)I(s41%e7|HOaZBAGD<#Bws^*S_ylQ%4YIZ;mjnQ#?b^8!+G_7bSR;D%hXETWE8 z-#!f1e!@;dC z4qV*VK15MTbCJs0+p~d$OHNS!@xeO{>_KF$txFkqY0*n15iz7bw8%K}kN?8{ITLe9 zMlvd9J&F{Y6k44%7!q(9<@R7yr_#=r`1fb_9{b2toU-Fq3VG5?Wbk`H2rz)7D3C`S z1&$m1b7}kJn?M6UF|E3#XA|~2`(*hSQ5mZNXV){XPQ5?VclK1VL>Xi2&Ue%oFPyM; zf*9X4ZeNKvyJ(`1c^`iGFpCMs9~%tEie;@Be_i$WAyj-g0mP|xRxzz1({dO@sDe;q*?KPWd!`yqepzgl=Bgx_x`(FX#pt~AS@ z)1(Q&^X=3yljM)A_Eu>=Lynf^ntzZ`%4o zvogPf&5Dl>`M|Uz8>e~S4TTfQ_&iIFkF@(5#}e!RXaS^08b}d>+>Ox#1Nhmv4Z~j$ z@i^rjRmQHA|{0C?{-Mv9n7x$Jf&67BHb!{)h9Mo z!izU-^-x(>cJ29#Hx2DA{+JY8Aan^@{n>ka{V>X9vkkYDCuluA zyIOk(LI+>u5~)=k;}JWecDft3=N#c=%)i*qX2Ye-;CESc4q>oC9n0NJ1lEdyqcG}&fowBjm3ScSNpo$$weB5#X&Rl8o z;lK1tZvqAgg;NflB}^uer5Mcj6#icPFw9k(nO6W20zi5xVbyqa(!;(1l$#RYNJY_U z*V^#D(YD(M)y?EM-Br^(G{`^Z+0mBq=NDyfL!&`OCe&gCm|!o|1DWKkPSX0O77JV ze$&ef0dj?SVp>QGxNg*0>8Sd3A!~ozE!DmnWpFuS^%A__?Xh+(ICoLDEC-2#tT~Hw@XY~?1sPQ8 z<$duY9^l;nMqH1s8+G1?#*YQiel$C}Hkt;$;WFxaWJMx=7fzy<_mX2xaU)(Y^11l_ zSDfX`wPIU;5u-qUNrQ$fEOaR~8{AA>j8dw?s}27=)=LXP33u%~72yzZPOZ<{-&R`n zQe-X39!!jl`@ygg2p~uZkKgF%SUj>FHa09c_;{-Tv%CkK2-&-FOwv z51G^l!szHEluIGr4csHW&su)JTFFU7@X-p+Cfz(ows7Ff*c5pb_OL+F8!d_-kj4oy z!567bRJ>F@Pr#bS!loHM7c^z6AfNeI8QEB3CW`^v*Bk3|0NB1jfi&670&QeE)?|j< z8A>0KTDRjSjBdO)T9h+ID&UIJ2KP%TjZ-+nij!VC+nNvUe_-%BBrj6P7_r+7jQSs? z+d0uU{-S_p!d*erEpwF!hYcitz`wQU1K(Z-cm@6gv}|Xq+hZ^+_Cz{e9Y|nQOKT!Y zyRW8NAU=pdG#~-nJ@0R+2Ypo+24up9kLgdH^0>u2rCE!heATOzIMA+}gAeYV)zkdj z#jWwVxwdj9dW{MYduh<@pxB<{B1A#BFLpLA1B`2)UmPsOBB9}$0#H_}nhSzz`a#m3 z>uy!c+c!WTr3UStrwEvf0LM6_+nq8<-hfRlvDf!k&p*hW6P51<_KT^z;`WA9A>_Rt z?M}2EB>#nzvSVKfMS}n4(bLJV!S0-9-QSlzjJ_yFh<=&p@59Y10eh>&)}uLJ0qHEX z<Qk99D)V=1(CDUI%R zpyEKklGF8F_(2FpRf!&?f0kKkX*fjM!SiD$&WeE$X_Eo!MgWFwiPLK-YDhvQqJ%<` z0A@dVgxPlp;Y9+q1Xldj10QB%I7N1l#-pbTi!4;}DIbp00n=i-1MQGI{ zu_nNU6*j>efs`JlKuv4f4Ba|D`)uZ8dHF3ZoeIVDu`)EAo)mc;jh0xLVc0E-KAasz zGJP{ONewVni~q$`VISuP%=4Y0LnI>Q`yI_BJ}fO^2I^Q*7|I7iT&n%4l3k^>CEU{F+}pzBTUuVcW8?b|9>v%_2{ zh|x3!JO%3vh_2<|%N>09I=(l%;H4xS5V~=w#zw-)Qbs+`Q z)4E;4jNvD-6EPe-Lzh+=na`H|WHxH$3hp9;WLFhGQ-Ak<8cli`3{==p0>yt?R3E7E z24Q!f$JOc3%wx9WA8uH6?Wj9*_4ZGhEJ=2kJw{%ND>V0 z;{2P8hiQ#$_#o;Pt4xj5bW{%CzhEjq{}xAsUYI;iIf)8A>_&p%NmJwx*HhQLp{2!3 z!uRt~1m9gCl*9zwpWnd!d9Iu+yCW7#V-6-tr;kibU+!B{$vL^xzxfkFOo-BTdq5;G zD$I0onZ;Xi^B;q5J;onEE(2ulo-`adaL z-4_o$l7c7+$M`@dpm*QdYD|H49594OJ~95R_8yFU=B9%0h9K)a9IwdM(!a=et-e_K5i**~s-8 zr^>}qPf-2_FN~Sz#QM2<_OYR^U(@ymQy49vc<9B1YrRwq=5b5I!h#C!p4lbfFEvxl zpym4-bG3o?NU=fBC>pA|0YZYAsHaNxO0-_uLPX<>CZ+I6ABbC^`AoDG6x=xd^JIrS$jM4hU^lwcyx;BP+q3hxzNUN~w? zeUHtj`@@HS&YwX9aNo%Zk0GayI=Ks>=1Hxv`~y_p!|~&z;wh(h=lrl5vP>K# zEUZ$nVgp9qP~0fMDI?d+*em8DC$R!$dra3jKcnKJZ}Mn{CC~3>+@tA!73;lqa5i;j zV95kTgN?FE4`ZzTlCJC;lB$Y-{)5L5zMVgDre5PTKE7Qbk{e2Rp;#C&WWCH2%K$D= z3}2paHkvOPH+-qSd0Lb7%18s9V?WJ2gZ|`*PBSxwA^4S&XZy} z(9HpPahjV)5i6NJyQV9S#U3*h$6Bck%gt>QL`nrfyYs=6`s=3KYL#c z8*W3-UopK|nVH?Qv!noSPRz|kKe~mCGWNiivw-2#-JKW!Hxs{p5y!676Yj{;eUF}M zz+UKb&MnwHrW~I2DtkLrpT2xWl^%4Vr<~qIicpvVtSK3fYUPhgsn9=J%n3I)>S z>-Fc|LV>-i^pO*<)|UB8R;gzrw{u$%pt=43#jDnlaUJX=3 zDE+f6;R~arTc4x2f8o$TCYX}qxJP{x#Sfjf@rYbUhi?i#uqcUYw;)B*!oYTi=%)q*C0meW0#;+{Nog%xP))p{I$f|bGk$bpt{Q}FXH(;WM*{|t%R2= z7}gXMk2kEqFzh|Abu9>wE39mp%_SQWag8$rF<`8vW^t>%(S(nLQ#2v-W4(NA#a-8O zCMMJYFVer(3i!O&25b~)U=9L=*-;z@&8EPg1&j)t?i#G5i#_Q!r)+g>7s`q1zP}h) zX%i%EHnRIsZ-`?slgAo&hfweVrb~iW;X7t5Rch|#|KD(#Q zqVix5pol66O!LsaMJ@7C!`Z^qZX1`@wE9U^BY59)omvsg^t$`yt*(%B-VHy^fbMV5 zN{fi}z)rERYSPT8+kyQU=fqK0)!0Z|sp05A#Bb{T-^xSOC?}I{C9x*Q zG#XQ=6!Ium56)slHsvD4zHxkJ%YWY)_}^N&7nA@R(F*aH$mWF$nWX;Uj^=L0>L!GU zJE6OuF)zlU)?1$5$x;Ug*6pzK8-r+LG_YFw_#GoaKu}G3disi6+lp7ms#gaG$pukE z5$*EsiHlGiuot5MAO{>#9{{KmQ&WWG&R*LH5WRHpeLp1`;Qz1wBrZOBsj;I&v>Cgf zlt10zC2cdKXPj_`8pXxhcJ{9T3F>y~o=1FRsR!Yb_&x^tiM_BJ#>v?|lG*b8r zm=DzoXa0@+>J`MeW#oFY);Grwb}Z0sf`&uCmLv3y`AuRc{L@&==R_g9*0btgGZP}` zng%}$b@cRNzZJdZQ1_(lyn0+r7%y<&W?r2;PG0@PNZ9)lh4$LSm<K2+$Ug$;v%Ah+9O_7qeORVvg=EtUyahmAi*M#2zyfXuZc){^AjGQVj#L9=UjTk<= zQs<)53`((D@5V><;d@h=UC;``~+gWaZKYIUi8C4 zWNL29UeTb&mvW%X=34WR_|eRG%Lf9;3wTXO>_p;g=E^^8)O`mF4_JF5Fe6pm%Zu;v ze&UG?WGdC1AYqVofqwP*qWtp*Id6h+d)RF9_wPsm7lDxgNMKR}IBY(E>vttSvDp2+ zBp55s^?UW)K~2u!uchJk(M0JtkykCh|62d4E$qaIwP-Z}jb#^@d}h|214tLQ?eXVv z)1IRY(DKP#L6n2|z(*C=9R(krja&Zjr$*~!FSR?#w+Xh)XmY=PWdSN!FoUFpt!T{% zMfdLGv(6_}Zp5tbW(Vx-MIbaW_Mb`#@rDkk4e9f;%9iu^5t6e=cr-)Wt`+Eg+{XR*xA#VAa>@RnQ?J2D zeHtW(1Qw!(%EyB425su8W|rp~9KFo3++R@D$t0+-KA^zE3Q^C$OL&CzC|S!B%FoGD zVewa?q6$*Y8=(J(%ufT_PAYC7{=@)oFPM7>>w_*M%;Ga6@upWjE14Ek73+NPy=kvi z4n?H1!H3!cy6*f=FDyxM7o69$2d}hWYf$+rrc=HX0Ai4Iqj9v=Pum!d--7pZ*wEDxE z=hg~LBhM-U;onpw3H`a#@Zm%ym6aTqy_UcY-aq0sJTuT@ln3Y^K#Wzz_GE$iAm0P- zfGYDC3_`}pG!7I1w?1gapJ~tWab-||J@+{-XG)5SE8wk}tF=G{cz9*JE!Ax9uelD9 z>{?@OK_p|()i@RXOnZ@&0qhZ)w*Pbg11Ve%(rREF8J|6nYygpY*Xe0&S~9khMq(kf zaBy_YY_~25Iec4P=omuWa%NzVXu{DNzeAn}zwFBkfU^JA*eF`xaT8}g1?p(Nkp1NK z%9Y0_*x{Qbn60SgvhQoGz*98wIE@hbbn_B3(8VRe26bmJZSeW@|K`PRh1Zgf>S-Xv zZz|@Fg9l@Y85ulqjV@KB>O8)B;{9$}j)x>-cCQbVH9(Yg6nND(Qma%j`Ppq_$YjZ$ z+$Y|HejU4q{>#rvawm|{@EbqWIhFWXZu+ds5HDb4-N($ zXO{-tCWH&yonbzKoa*~TulDsG@D7ZOjJQZrH0k$B5TPG}BSHO)Ba_tvDLajH=Uc1t za+4D!Ah%t5(D_z=>pLM-pSCapuKT};V}5BV((_5-a4~0=WD1(3g(jz@F(e2$NxCxF z_1_)!tU9yB0zXjmrv8n9JU*j;Amh9Um28CUxD!I zXY3QDe@{e5=dp<=J8l(CtT!5oaFEA|C|RFx?cZ`CCc`F3feY3 z^#sW?#3e|ylm{**obDBw``bxFAt|S>r z9FJ_9HZIzk{Z-|^mqF3}%Xfc2ylGD6UnP3zLgqsZYUsCOuYcRWyFPO~R-Re2H{BiQ zgHt+$z_{;J3=908vaJsu&#o6vqu#~E#jn;nJqfZi^&gT4t~;@t5b`Ym1wL+F#|*R! zvo7O#6Rx=iZ&l>7#8 zIYt+%4vnKF76J817u;|Za|iV81Z_v+&B_bYlK$P?Ubcu9_eVL`w( zQSM-I^}6|(|FA?;^Vj>rFRq^N%;FowH@+Vs9t#I^Jv;&al5*6UxCjefpLxBBCFl9X zk`)vbbOe;jGbjk6IeC=gV!Tt4ed3>teRqoiMtBY6ePLi;>}caeGV6^&9XR}jfv=C> z;~yTtyUZ#D5c^+oXM4!V%ECZM`5b#GA6F?WV;2Qo*5E-DKsblA6sBFtw=l^14X5u`ijU%y%q#)%2$iUh+C z;exSl7;sOBIM=^(k|cDmpO}Es5U3g`Zw6(*yZ`%HRzf3TX&D2xvMuLobnM-@d z!r+l>YN~r=W4?D|tSZ7f5mv`ifs*~0e3SX@0xKNsUc>0l5HyKsTec3anTXfGG!RJ< zg@uuz-%^>4-8o~vHUNW`z&|dd1>C?D-=l@mT=Ndg&ybog zq-ZE#F?j}M$RG5&(fs$iufjG?L&)g2lO8u)9n@SIp;pKk5LWuqvYTnLw8dL$XkZ}4 zgeS?wr%?%|!TSFipsg9JlUIrGl|9WInD_k9~#fLF{8QBbi};pS9G_ZU~>5 z^|80=_DS#&F{IKcCaZ3Z=x`%o^Vu(w)P9L-fg>qJnl4IE5dgSg0@DR1!bf$8isFGwZOg1H8G9P>(j%Yyy$JF!NMjaa9{i*%P0}fd zrU8*mxu?Mf!z_eABTijBjr|2gi=_7U0&5cxKn>F@rEdcj0@{KOxA<&&Mh#v+w4bnj=+)6mW~@#+PaWMDJP{`ywO;K>Qp)U3tiUfLl)O~XA9IPx`8WP z7)bqqU;uzrHKkk$GDdaDNYR9sM5_AW6OcXy_O?!wqu~&$?`VaGL(p0;vhdx0oi~{I z_Z*7X>{82C1iCqkD6UdGM!+jyb9cty0@pI# zTn%l%t6>?*u<~gCJw5S=1S&bdk&Mh3)})VMoPtom(A0C@pT5zEd)}Mvi08z(}~7_yde#0e&IyjAskr#=Ut}^73%IaB7Ez zItlxm#o|?q0S8F4debOL5s(WB7J@8(nma6PD&3#X)Yii2i!G#U)q?Yt44DZ>_GPT0 zJUQ@#R1;#j;;MnXly{%9q-ABP;6(svCZz+gyyvu`VodTY7E-XB*zq-cRD$SLKaoT* zSFp!#XfHL>DLUZ1A>~39NI<-0u5X$S`ZWIDG#F}#p|bl86$_zm#zM{f>LxV!hslc% zzfL5)R(8Yqe}8bwz4|xe);q>@`|S7>;E`c1W0Ft(Y}#f!jNd<}LS>(O#EZTcX>Hxg z?P7p9LQAaz<9$9QoAkUZTc*IV<>NyfnLnT?2qKu9{~pdgfqcVtc#%!{uX_(*U*FvL zK%!gr=$_+b0pSm>-{;yKWcJsvINE_;5S05nhleuy`U_dS`0LCLrS4t)tAQiP3n5tk zmq9+q0NdW?KC2Vcl{E9%nkn78-Z*3A5)0Ivc40*wd}pZJd`bjdx~PO4y@^VYl?&fD zx0(|FTMIBXIT`%#ADgIkh|BOvUku**2SVa|KdC&yteRlibuz+?%<3=FX6s)KIC!)* zdwRlgqHk0**o=wf>5BMLj4$bpUYQ#4kvCu1x^^sr3ErnyR0ID{;8E+`3x z-;!W`hbG?>Vt6~^e}=qsSqJ?3M3twhN93guRJ$QnIFb&< zN0Io@QcFZ2!_wz3q>{C%bZCET0mQooEjst0&$j(_1Z@dXC`CWfEIFADH+Is^6DZl0 z7uQ`|-WnPLTNw+0Rf-A%z;h@L3SbpX{pi?%?65B9dBmo9zO-z)u}$eU~x ziL)flqOpEUq=tUoXu7^X^t34^ESd`b%^#VI7}McMfU(uMb|3(*u%LjNp!^Mg)H{mp z48$kVtzNhf;X=1!TdygQlKQNPXIhD0t83!h8XWTTQC@DN92>daS>K$2hHF_!ZL3C4 zD}EwWf2m|u|7duEc}xx#r;VmOe!}UB7mxbQl}H6I2Tn|{lrU)hqLrMWo}-evQiK#| zwPPv6fPLK>o%!=;H=a@#ck&zQ=xJHv#95jNe=%F5u-}%3`X2am@FUu&0Ze7I)34iW`*?KWj@jb#AsQ#hm{)*3A|R)zewokZ~>wZ4-%~xMfe?_piuxN-~4xqLM9qA z471JQbS&w+)CB%?b?un#7D{oh^cn6agfG=;YPBCUx$ENzaJ{tk|oG^(Kc-@^L!cYap=)|G1*4i<@Kk|29oDeB-OPF@tq&Hnp=n2$o=`D z1$G6`?|zV%z8+$dKU>R&kC7w%Pa_S*lqu}G-t-v%ZOHa}+JstSr41O7NKxecfe+>b z=6{$a2mfO5rt}a|QZmYLMbV#pkful=D?l3T_%!pcAVw6-a{~SqNhZ9n51(AAjqm1X zli$!y&djLS0!Ir#{N-!FRev2+*so3R8UncS9wQdNVO?@Z`IQgZxE)V83Ww? ziHhWS9JeE-F^qPr12?Y#SyUNajI5^So7C`f?J7w;X*M>tjc5T&U0Gb8_@0E37f+Eh zCHT_VKzOmr*oC25GWZg9UJM4J*K%qlUe&Rv1pWB6mYx0Acn>p}|LHin7rNE^DFCDw z95%zG{1D8=8s)dD z&ii85^~*+sUU^TnU+{B=TkD@!iTuj7IdN`cfQ+#)hHu}XF?kSFYKrfgUEFL=*(QL$mGT;@Lxxoe}bMBSccA8u`zh+^{&!jMyX>L?=Ota zSkoxeq*Sw|NJFO-Y>ADlB%+zXIWuJ7&pn4Gw^-?m&z!;zBp1TgYYF9*xW3d%(cfkyHcIl~_UFqNelrh{ zaGcJ85C|Vg7rfuCkCE$mK=s59l|j4lCO;~nkAL_oyp|Z(tArqIjjftK?<>j?Fbe zL|;nCF)bSkqPDn2Zc$=GOwS1_T*scOiE-p>Moyur&+n?$qQVA6V(uw|0|MDR2BmTm z^11gbVb@y**J*e@MoHA8c12*J;S$QPbHxfnfDy{QU>ksD7YUQ|K(d!Axs zp1Zngnk^PY0VWne@O?nfJh`-#2+&sr)on29D!T98&dyF(u87~h-)qtSYyE^cp~tm4 z#!&vVjDWl{u`e%usHHzQXX_ADd%qb+9cX_iW)!(ea>_ogfcmg*V5gH8^ zlnTg2etp#(seG@%M(|o&JM+4dzHb0kUr@QwO9Bs-saz8t@x}I^e!9T z2iq~j#Qa=)sw#%R_mt_X@FAc`^MlKg5p z$e7YeacnA_yeQo4QGfW9UT%83hn~WT8rp3%d7caItZ$L4dOnq^l5a5m9_hYByLrd> z{44H2d4V#|x7K3Er)b0vT?y=2VTE|MeS=$KwUJab=a2T^wVrg?dtV17^x3Dzi6MjE z+a@I;SsPHv1dc;QiL1PR?%-v(znq4ig-T>s-O?0!rqzq3;zG0oO6v`2R>38w-Ykwd zkiw}iEx&hptdY)COXoKftOb(tQM{e$NVq%cV1YF?(Y36-vb9Nyp&qe@x>u@7^1%({}FcH>VcU^4wxZHnh%m$%*0FlfVRZOL;FzT4+W43i1 zIJA>T1wLJ!>w&c3QTZ>rP+{G|cQU%-GMX2rrp&=5;#jHGL>+02$3&^#8Rfm|aXh)t z17Ys64sIyOg4BC6196$`Zg@$JVV0XY<8r*_n<%%u!60AZEXA}Tb1*`y8mulUAHm(f zhH2e?DGIk;wr@P#X!@pQG!p>tZ?BOQ5Zt%?y0>pLo}eB|WdPr?$`9+CBSYwy@u5_8 zLjY}sd2)|eNTTWn(#5Zz%UHEs-AlTDAr)Ng+x9JO7A$f5`l7yb$^Zqpr!C&b94t;w zPQr~hM)vecD$|3{tv;xEp$rMnN~k1?5c4AMkdKEADkTUY&9C|o$Gp192r)YQ5z*g9 zE-0|0^NF9V>E+}d*6C5c4~52>OCLG23|RI}&!yQ}z8#$+`*3nnS#i$&_)rQ;uWn~q zB3ut~=pQ{33WE(7F3=Qz{rVLca20arvZN_qvB@q@$=iL5vP;5e)!IpVH-y>HwTl6x zoAEr)3GC?s3O}@ig9C(8B|02_fQPP~@I&+hpVgouA|ki#;LFq@IyL5b(>BX7k_!DM z4#uHUiucr^znbZ@H+xUcXuhAj-KqIxNp|f%H3wUgKBPy?=80dPqF#<+iLX?4id&yG zIpmZUGChx25X|`;7YMG9v zW~)U>fFoUj0$(HfJ{DCN5{_<1{qSqsfcfO-o>0qE6HcN53w1(i@5zD_{dMWVWpM~h zm(>$*^vhWU{dw(Q*W5R6aet^h_AYt+Sp6BMcC{W`NR^|!mjpr|x7xMRyyNd)qNWTq zzbN>*d4@;fWY&D*z34p_kv>BL(;?$V#FJ8rmtw^jO!!B=j;tQ{oM9_NZEMp~XKjqi z^-c_;RnWyA!$)HD26wI4z8#Lx=j%W{2BlY;PSfH_@gx7QE@=wNZh8BJ;N0r1As`lP zht7ux7=7uPW|bWY>OW3MM6bRvt_?tB(zGR+@= z>}(7Ft)ulW{L0RolC|L>9IVZGDjh_wl>fxy*HKf9F z_Y__n2c8EZ6si#7A0qo?kpLk|5 zY&T|(-+bKtxxSY&mgrR;5I<@qCY%@@L(C4?tXP+yDKajuD>Nz>;OASUvtKl~l5yZ9 zu*yrFwyq5BZgoX&OcTK}i~c+1^_aR$=JyORSxi5?B0X2RU{lpgYiC7 z0wGsDk>{%nH1q{5gx&zUMw67p#Bh#>>%#-ejCllWGZM^CQMrq>v4uwc^mr)040p<8 zellIQss+-<$H!AqB+-K6xuHQb=m^Z&9Wuv(;JX45D5%PE(hQFgu%#^%zpBc+c*3XMTg<{(;d-f?!!xS<>)Sizr{IY^)0F`xc`k8{4G3L;Z-}jtm(TR)r|~u0VJ05Y}b4_ zgcJE~UQV}lqvk<%5NUre;&*!r&iB{j4VFG&I@M*!AyR_cI`U+|r3K@B?+F9grXU5p z{(}&KQn3^S_4QvTu<0c>ygp-IUR;oSd{z41Yi1F~pPj`GI_mS!qqv*f_uun)C@|4o zM!YyZrb?tztcFKwMn@9rtR^gBHu^Z<)0`5Ourr-NL-SF^3f@9f=_FF=7F|c!b!y&4 z1UP-hc)x&b^7sGs7ma6T~b!xaCY7D#9{i%5L2E&psR{ObiWnu>SN1ZwBTc}uL2D~Z85GbDBrTWyn z>aFL~H`G7IsY&N1wCK4p|c}N1FH;gHFt3?~!8Q)(jLw$yi+xs-a z0%!@6V%D=()I$gBK=09>aj^Xpv2Id5aj>U^ljYR9M@m%+I5+_b1s;Af;4V~mATGA0 zj|Fx4^GNCE!DnuCpTL-%s-TCxWD5aK(^8_Io*p7%VhMmSgUj7q7m%Vz6anW`wd%a` z#af*S_bit`TgOJE_gh}E=I>F4DX8cm-Zip#`+o|cZ-khv{^F$ZJ3wk2$2H-en4hOR zgVcw)*xI6e94%b@2|)8-<=T72cnJni|E{R>gw;<@NyJ)-SC=r)-c!I03#Gkz+CZBY zd!T%~++n1%0wWF|DnK9hKK0iCf;6^L2o;HxjSaXVXakS6;GdG7@Vg@$EE;~5J-Dzd zP>-7jlRx2fS4`=p=4RL%t$4s%ILqkJkrm~;%H09W-O3~)jaFUj!z5&89XJozC>0^(1 zu2uDiF0&H4l&CQf)&;|6PHx76Nxh*p!>?kzZh9Ki^7Oju?aHg0UR3f0MM0AAK0$I& zEW78+jey==1B2D+%JA}i>B&#zMoa8 zJ#k#*)<0CsBf`N;HX_hCWzSA`y|&Q4R7n-mNF$hP(kSIz*Uw61%IpQpJ|ndnj`!oT zMN=1pop+n?JKv9M;gr52N0RI!q0vkb&HdH3uXSXT7Y<&1H?Bs5IdEsa**J{vGiP_+ z+5beU;7lCF%fn;R7mE*sY~oCKdfYcZ95m)rbauS@1_zU51SiWP7KWS%@tx@UD5fjp@#l+?o6=Ypd9nzY4g>W zmsRtED*2b(m=G}Z7#I-thF~Dow!c^^3|iC(UzVqP$48fw)U2r-$ha`Iwd1`?9{Gy&I+_Tg{OF+6jaozZ7#FS4;6<{p}=OrN`x1Sn}!VVC_<<%g3v;q zCXot;)${^a>q^7;U)sz^X;HU=RiB|W7O(4gq=xLRk17((1&IbfU1rpqFW3_s!!GDU zb2~WW8WQfaSgdTazh63vMzQ?N1q~l~A|epXxrxDlcsqTZJ2%f3C&?w3p5cTjOJ<>; zK6ewL?T0NiE&yL)~6BrxYP;x$_gihMvg|sX4ko-lDtK|c=V;BbSP#)TO zrR;Z?TkbEDZhvAJT=*MbP6|QeYBl)XPrs-uvA`=Zl1e#>{Bz5PonU61*HA!-7N#BS zc)^1}vLKu&U1kNKi|jbzktkrvj8zF7lT2bbC5nOxrjrxc6`p_UU^s_d_FiPNo(|Sl zWKP)vWo;t4ki7Cq4Q^=!1z*W|^EZ*;Ao#qGJjZ40672$Z%Xcm1F)NHhFRdG0W?ysj zB|mDaD4FN3hy=xfU!Z0f1Mg)0DOD<+DS#QEN~Oo+omFF}#S2c->v>B_k?fy9G(l1O z%b3w#P%qw~5Cq=XO8h|_O!pkUTS5jU5?5ZbIkxKGn5j;82<;&TG~w|iB|(Vw7?=v%Dhlht*=~FD3zUe8oJ6L5TVsxkPV>5*~Cfex% z49GIii@$+Os^Pztt9zq?{$XXps4;r1Ow#?&_sEsJMk^GR zxAetiR^y5>{Er#UA12L85NReZ2|`8o7CDcd7b8f=KgH${7SGrOWC0`}pk*dNTLp2I zqF_%L@D?o_sxH}Nqd&ot1a2f)phjyAPZ`inthVDPl!(ZsSmz}>bKeTY)T*_M78y(N zL7Q=h+o205JN`5XxA9q$=zsdj(D+Eg>F`)Ohi2EDTV(KpAhGV9(?aGAX_vtPGJl{G z=8tg~NYT4?i>Q;1^Jus^hyDx+@0x2L4YF}3fF=Rjtvq?EucgCq?~SeXuAiH?UxnV% z;D>ls@tc+Y4IGS8Dju7EOl-qyEnGIS>3BSOQlgk9lM3_*y7wnClc+4}C9CRMp=IfV zb-mn1@YSlgUUSmfs-0v<{Ke0DiwY`@B{SC8!a#|(hv==!`n0u-;h=k7U21@>U`3W$+qi z`iMH4?6Xe7O^7q7Xdu4jH7lQLAG`=pfe+72A1A_4z1V?Pji{BJD)7{@N$ zAom&5Nv3Bh2g^}kedg+r*OI%d)Cgm3?0w%=Gs@KFz!L}PLD|!I?b(rZQC{k<(@mylJst_%UYC2Ni~d6*q`s@;hYO4mVI! zWWzjmFuQ3^EWzbI8Nz^H{3dHMAB=+eBglKjtQ6LGl24m(Dqyg|Ox{=-7~U7i&vEh- zT6JJ7@E8ngk{-GZsrZzXNSk`RP=)a1wm&b}%Sy2Xcr7~;jRo5bC&Zn8OpAg$m^FiZ+y+2aAI%kW--55|&f>s3_Gvj$;m=GXrhXe!d7La7;0~zLu zKe%ur!NjpWW|99ix6@NN29}G*cVD&g%1M{>dA%jJKFYzhDZ`)Ke(rF1=pU$^^Qr2P z!)bW$6W;G(z2lGk&Mjd0?byVGnWZJ7(*x%d!muA7m87^%p=1)SwX2Hrj2`Gk!d6Ne zu(TugGM3UFFG$~7sSg@7NA54A4Ys?W|5>i7&*4^cbuF=a8ZFlyWg*7G5{F1Ztx@2y zGhidO9k>+d{V^q<+&DjvwZ~^*JBVHN1*IQP9O3adl)ZKTGTo0nHiJW*Oe`JsCPO1w0gz&=96rklIoaut7dR2o0SyWJ-ui zn4NY%m3QX)xJ^9y>BE>sTvBD-u)+ppp&I8X|49TY%L4_{kJ0QTyO`u@{z>T_3pE&5jZftW2|{v zNn3c)N+m2P9Mu+L2Go@JN60XX<%ow7NmS;lf>DG^$!s(bULadRc~TR3gzKD&_SUaFpqtxJWEgI1TK=gA_PxMnTJz%ttfIh&{G zYKa@YnNR|(%BxR(7kjf8E_(kGxzc4x5vdpxO1rpSm+G~|6f`wqA>dqan~-?Fj*2!U zq@Eu@x`m^cli#h3y3>&Nt>i={^wP&#tQRxbNa1vI%rNDRIo~s8>3OycPzwN2Xh5qz z8+zO1_~LV%;+2Z0{tD!CU<@pHV>|b~prAlI+XmlC0Px6ppg)$8*vxi$HU8oI%xfqy zZ*QJ5wxlEP*a(q=&7Y8e7WdQGhscP&V@Mq~m-kc{IUB!O26O=%DH-yagD+6*V>CZ> z=JMw=YW+5iCA7HLW}v*sgir;oOkh1c`CEFl@cx=2V=#0FA**AbYkX^cB#tNhAJAB< ztGZ4_7{s3@o>bAv`Sc)2h=a7W|3gf;ZVuaiGUigIZ>CF=13GHq6H9MzqV0*i*%@CT z0sQ}BC2>7U=9jG+-fP|;FHEs=Pk0%nAE$rlcra3;USVwG)>+tK|0vsfFavdX^fZ)r zfa$uwVf8-Sd&1{4@_7vaX(=8#YWQJHr!#cmux0+!f2~aq&-d#Ytzkj=bF^tQad!1YDbOw1))sX2A2 z*pVJJ6n1*Ip5w@xW&&sfgBQ=qi|XO1WH)r*5#LIMcLZb6K;AYvENGcq^{#4Qv^~0E zp9>~gAagLB#n+T-@d?_MiIS-2&!+k7I(CS=B6U^zwX!R91JJ@qztjjo6~6&ii6nl< zIl6$T5>fk+X3y8z+Ir^;ZNeIx0>CWI0A`u2|D@T&cB<1aN22D>5t1Rm^*i%IS^!5Y zYm13Z&8%G`s~;Eb9D{~0@(|?L4ccT?Y4@Q2FC!PeKzp(B*v;KH1h z%H7(s#F7L}<5w1aBXUpoIgPu!H9O-w5&&n-FDy(=O@%n~qv>X(yrBc5qn}0i;6qS& zf?MOkqechqSrh`o4MO1JW{n#@Q=sc+a0$w%&EMVKmD}#Q_F#6Bcl>0&>74+I8^CI*w=CzW`wEI~ zO=V)QaeX*yq8KhV()Uhr*-rZX(cMYv&E`GuQQDLJdw%b#ZWxt%90*>Z7;Fqh#F!zO zrf4B#>1=)!g=y-U zMACz+aPk}?5ck9n$VLEZo}ABYkuETS#Ri|X<`>bQKhu*y30zt#OXv=u3@&c&!ZJn*i;&_82jrsmK^$9(UD~1&(kaoJply{r9UJluCn^bpk^|;+4Hcs{eea>2mw9C}^-G ze6a*=S2%pmtc zeaP_Rzt!{&(njSZnkJQ9Fxz~R87XVS23+=_-~s@r(g~!a%xjWe@{{q(Ue>w{_yaT( z#&0U2C1*qt6c6yf{ZI8wM17x8Fvqx6AB&cDMSw&g&D&HS)R#&KjV1~HVld{Ow8K}u z5`o+;>WoLP<0ATc(OxAc+PsY!P4|-j5By!Ka{%55H|cD9`f5~;7qcvU@v|Jw!qQNI zx1obY#gn@gWz<`FPkm=v_1aM!L+c$I&3B9+yxs%AS z*kA6XtYurT8K4*e!-kr+!H@0#<}X`HYfrq%fTwG6#J!=h5l|nL!2iwbM>zt+`IvZ~ zH3$cpwM6j$*GFYZxS!+n&71ucQU0YICHBshJjHJ?C-B% zy@5puAlKpKfUI6G`Z=+{bW)8(=*^$lmHJUu{7$GI=ik<6D+k8pv~l}igx!^|J?v() z=R8Wl)jE~cNgM_IC|i%1!J16Y`lSup)8Xy)%74)aeVWO%RIXdMnu$T*{}Ral-qCOp ztJtHLA*@RaY9?67H``2m2}fT9|%dRy}jD-KD4MB8g;BmXt~ z+I33EQqRuEA@{Qt#SZYwg5a6dN&fYJi8ln%wr^y8i~ zjH>&}^LM2ic5*O5o{Ntt_@D=I+~D|GB}l1y*(UWoN``Ie%6$qKHlZkq)vjJUoD%&TO?v!}IChr4vcPu@+en zc-MCpRE|LdHekYA52LET){GzZ-|gV2AmXQwvG6>;9)ss}oXGzZ*5`6hm9H@qSVfk! zC(ZslYC+znSdID1R}^X0{I4v}g?vYVR*X>AoEe2S%6`Wl&O{idsI?RNTZ~dgMeYf_ zV7UD0@%IJrLME*~Exn(z8f;v@;$EX?c)x_MR4El*_C#2iw8&{lo znkwtO^Vup*mY>5U-%AALeQ|d|S|zJO1KFhjeYCJy{`4-#rGb$0rlGj0ms|s6&W1O) zr{)vjroL8$6|)o!d~MHg(V@ZuW4WH$2oSXM8UDRGIes|SGaB%}8A|+wg!(YPbb0EJ zim<^(+#@V4R^HzHD(5i4KwAcQX|R^Lqv~dh*ZMQ%Ic)Z?Omg-qC2^&r=z5h>$_#10UPU{!^IR77 zZ+R;hquzEBhP4@amaJDbpurMh|0>S@6{@tV^TVx2K+C-Gy6DH0Q@poB2?;boQ;HY5kAZi|yG|NK&9dnR^?DnsQ7x4w*K z!tD1Kq-Ew<;|vseD`My?Gn@2DQ7~-n!#g)IWROTvJY1V)d9ns8m)Gi(DS!JSv%|%B zW=)gilMT>a%CWJr#`$1@U@~_QTiBV?nou_9rxb`=YC!b z40Sch52c`}uE@^Xpe5g%4bC=!DA=$K`x4p7!{XTb#!(FPmW`AOz8& zx)v}fxZV(xbUY~?6mh(8vJgh+3B7QNW+e2d!$5e-xeb$%hu68iaCl;*t2jxeOrE1( zXKQ}$(IhQT)R^o3-XvGr_?VM3mgWh;!}A#qhK532SENf*||zpBKzgs@(g}YDG5d7Nv6E(AAQJV zzz;X{%5Ru@sfxh)hJ2P~T^;OifHaTiQXqriRuBYDFD}|aS8wZuGA~Q3f1LJKq&kNs}jak zpo=1b0}qA8@?D{T=ldr8#SB-H{FC>;#IgHxa%>evc==54VWOw~2NVk3a*f4TRU?#+ z-OUu4cQ=wPa}D1YFNuj3+}8Wi0oG?;r~8cfsg0UnEd@8z%S-9e4C1xDAUV+P>8-unzGd{AMPEvYEUa@%p2_ z94l(C;<$wZfhk3mqnXeuf> zNV`AqT6lMa>GXvhz-H1~74ipSgt=s~!U-j2ym#oCRF1ERPYDf)*F>5V8Md`qIY1LZANjc zElj4*X_;jJTjuyWV<177g3I0Av@QGDGm=W-2T$kGz;MQ=##u1XjLsSxT2{tv&K>6| z4T^7|rAC+2dtb#CYys)Mj#4SWAc`Om%8+ zRL%htM#lmI@BXuT#nhll z9*_`v(@@Kx_YU)I%OKqKzEq}1HztiS$=vp~@^x#S4$ZDj&m?R9W}~veG(%!r{xbG| zXxGFS#K%moSLBT1SLB)G)@p`RQWTSB30j;NkR3TgBN{S4CDc!ixaa&E3ruE-UZBz$ z0mH9dBXW!BC=RNaVsBq#!Gm;Vt{_W_?#g8vXwOKaWu7PMh158``Ge{SoXhz?-U-{lBCp>6bx4N}c9v)D6 zKXwu;?W8#FMPaoUbBttz%%RayG`xtCtmwjM;H67v)!urq%99%4hcmnYSomAmpSr{! z`6{POj|`f!zb~=*=OdrlX-O_R3-xLy=+xXeF4oqdk9jq*KfFtH5GKrYeqltFQZApe zO^T_(c>*~U&Gkkv{a-wNbySt#^Yx`eBrYXLNH<7Hr*yZ3gdid%9hU}?4r%FbDd`Xp zP!Oc0ySuyJbNzgO?^^c{7b5C&o-?y&&)zeGkNL0uZl5roRku20aGtdK$$Dsb0OP|cxv_bu=R*XfdD?Z7NnEzD)mjtfhokuPRyj4w>$CyRWR)A*( zi0PQWsi4ipQ?G*kDXWA-tMTwnjX3;s6wM~oNsZBuOtcvrddw-o5fdkdTu>h?>RD+M z0Z|SWz&fjMpsqYKWV)0lziH#yFYLJ?jAru2u0j!Q3&0b zLL=C@%2~ z!hUG)6`3t&7KZC+?Qh8wvjL@A-G{E{J3KihP1<0AetEMNg&LFUegX)Bl@f5e@~!_m zqPB6cbnGTN>7x0{L6zp;C!XMlo5^jWY~3wyjs$HCpR#yCoDC0L!~blc=%x(dzCR0W zd0K%YF{y|y@aJ9((L-s@{K>*^qXp0*(*pGjf8uk%arErjHmP?6%9mO3jqL^MWsu?rQK`+iT-viArzdnfh%W}(&`j?%B}0{*10`@t%;3| zjfA$gI)OBT_%Ma)iS)T7hR83dK}rukIr>qJO=6Bxfy#24|mIS^u>|N|U!fJ9cW4c+7AYrX3<+NJ(=XJ2TQ7j}188BAlate6?_-Nz{9!$QQV(|Cj>*0sb#rxfRE(=;eI9=e zQQATXvQ%JdGv7UVZYE${VJ6pDsn9aDK_ns52}I9Ea(E@czWtWcZ)0-Q_n=^~7^?fb zUZUYvXcO(Ssg76zHv^$8%D%MfAtQe`!XMR#>^a$^Nbd*LZxc~w!w5h?iDDlUtcm;4 zcEXl4!Rgp4}|Ttek*-NUXU1)Q;Xb?prd$o*Iq`fZ^`6*}} zLA+cG>bgcYZs_a6Uu23UEOXB`e8Lj)T7r5TCE;W{OI}{)d(+ z>A`)kFxs;^>ZwSR54&7$Lw%lVXxf1(O~De1R&?LcX-*N9s=M8+`}^Yi--$0th6CT` zJr6_}!w|K#oz8y;ekKc*eOe&$b>7Z@4QlqZvAt6->WL=I#34>E6c`6{6wS!Ow?_T4 zOZ*oARj?yb@q2@A9t#ZW7aYpVai@Cx(|V^y2XMxHS(&4!Lqnf0*sVA(H}u57!J^S` z3KwJWNlRr=LDKcxu$_=!a1|G#oK803w<8S23mWN{VuG}*2c@5J{?^?vMaxIB3%o4f zgJ3||Dvpjfd7|hOnz5RG0iy5)(Gb#^@zbCTqy#|~fni#e$iqpC{`}Ss@Mct#0V?QO z7&TSk>Sog2*Lr+$tFYg^ ziV1#F|9lFirKOKR@RN-q1B2<{bH?Q_iV5?5&@RcZ$yltIKuT>b!=zZyY1S|hzw5Il ziTQBDlQ(liH8+k!rQ2-%Gle#n(b+I8rzol9TaFP|U`Yvlq#x+G%}cas&G6T*HJ(7R zf9YMTC*)Uu{Y4z>4q^Z7*b+{D^GBogK~Y(=i2SFK8^Er>1qjX-xB%aB)qGvLz4aQ{ z(bDcuRpkurHfuVFmL&MyjStO);*KJih)wF||^|HM?iiLMc=xCXjBzYw#A)c;eio zXatGPxf4_sYu|pEEFo5|WUeDfp;N7(`hpfPVF#!@fxpQfuSO~?-`psy=i_TLZ-P@= zK90HW@IyyIa7+`-%qN}@)VVJs9ZYfl^K+GxQ6Q%7)X}_57Fm&BC4}67BW*NH4oOmg{N$YfO-{tz`D1jvV$MWthEC`kNT?hm=NgF z+iv^Xq&z)62i(nqWvOG@!E{DI^1~TGaQ8O1s#@L|I41-7B4BiT3^HI-uD3*`Z$*Di z*mL%XrNI=th<(Z0cO`At+kgY#LM-FwbiuTbF`NbBvhyOO6*3%M)1^Yz*^!KP_B=z( z9IMOEp9k6|MehF;Nr~q7j?)&^levtTz|vy*XmOw>Ypnys)AkMd{r}Sf;Nv5t$$uN1 zeivHEgx=CX6E0@O-a>w2)^XhRn>Jq|&f02fK7ZPTw>9=OGdY>WunW|rpIH0AnzLA1 zs?`4V_`Z?z(X|h`HrHvyp*eD3(DFAgD>_GhX_McGo>Q8J(@a3HTvrC~!z-@{O5BL( zn{477u_(1aH>yXO?2q`s@^~%b6q_RteF_7=%m_ho1)*}@np%bydWE&=YI6Svb&kFc zM&4S!*ZEn^4WiXBoJY<95bt!Vq zt1=*UN)BN7Z4~IT3)BSid4M8{$G1IZL;Ak{@sh{W2RB&7V|z6k$pkJplB;uE+4$Y{ zUE)it@Baj83Gf>``$jUlYXp|3AeABMTkB}>s~hYH+x5j~mo>-)YJ7Nq=AR`24XD%| z72aEp0+<9Wpn%YIc5%sWYxDiB;=)ZsL-T~396UF;n=m(8)Jh;)zmHqzWTRu?c(FFm zkK>-!kI%w-v~h|6G2_miD09{%ef147trODJf-nA*;=*eJkL}quR%D%*uTO@*#d`Ek z{UVTtucihawtvQ>*NsrH@r+K9XU+93D1N}G2~bd`X&6HY-BVIhp2@e^OOul761KyYbL#1)N`u+rcXy=523E}x6(E`Vmy~#P41nSo zEG#=I;1i8FVKn!~CGy6mKYCxWWM0Fc=A}#dlN(aNOaLnow^IH6$$ z7S-uZQR6hHJ>l>j@OlXZR6-ywqg741trznEF2dC1;}zl1!JWD+>6#`yyV5Vd4;5Iq z5(ZW7(wDk1vW_27$!zPb6rEM^|97XKyf`O>`W&IE(Db z{h_c&CcW>!jtmzN`qN(}`Oiz}(&j7<$!CNtqo8_BPrUT0O~wR=yW@DUeOv(P^kEHO4@zM}6i3MJX77D1 zvYZl4ZEqmf#Y_2Vvch@6{)WBB9y$l%$Ft_)Vx*A zlQ|O~KDr_KE>u>X;Lc%1W`7OQ={QPnm1*q`{~YlJ8&x2H6Y!#D016zT>dg|3N-?rD zg~G(C@33kxC5%X*Z5>d%DC!H+EuJEG$Gf8)ed)_hnS$ML4CRKJ)_C&i3Jf%M*P!+Ke%xnwo_J|#)58U>*R&cxiG18=ic+Pc)fsXXNNMEcC&q|E-n5p|17ae zv1p7+=;1{yvGoYmqilb^0g?GI4QPv{O)L%8<9dJ#h2ox^1piRGRN0t6xuc;x`E7#Ol<-ruJiRXPH*PozBC ze&W4sfzU!wF{gZSrm%rv#d@mcH`IU)X9dM)Hf(mtR0>Ds@-@IGai#Xo^%-uD{0^sY zl9skKJs3Mft2)g&0<`3CV-t`pw*)3CYkjjdC^&plC9w)uelJoo(a8&gfA4J6sAF_0 zr_aG`T@*SM3^5w`%+k}xx{-RAoQfXvg$bK02&c4n_PuxDCsfPn$(0w>cL|8xJ$M70 zv$5G9xewMY+B)HqkD+EhZ$9QMmsmLJgB5z*Y zd;?LD1soT)4zo24)!R~umZ{z@a>G@G#|!SL?Q=vOsz#*IzzvQNfe_+H@iaP><;q?G zbl}MhILG4C5o?F|0y2iI2%s?P^-K9quUhh;ARjIpvlDVE(nxG-vH#flB2C+4x*Flp zXRIR{v$`3UQ*g5gk$JKGzwQe&=7JAPaQb3o3e8i2w6#Bzle$5;)QHb^slBC%XL(_= zH*M-n+Rn;rESHDZPGtkWBcTMQ;65}FeBdH z>J7}(a8T6Ifk;F|1Xh&ARxp_hf$d>H@zvDc>5!PiBux53PFVFJCV<1{wSL`6uPB&2 z(ZX!jcy;Lo7Iz4Tf;D_;=T_!*HD`7N?mS`M>|M+KcEDMmnnI@XsZynf3c^Tz{N9^t z!DKp_^5FG*eN zOMDL$`(!hm3B%uVKKJC}@b#;fHe3AE^w~sakKN>)oS1Oyjgh$`j1HylLyZpA8Ge7t zoi$^#e-Xl9l>jyhu)Xt4?Oaj=^ZmY+lwjsCS}|;!KN3l4H&fakMy4#|e&zX9ulm=( z`b|bm3?AU~z|Df|BSDiASZxJH0UiR45kTDv>6`X_usWKl>JsWR9Lv_Vm3{MlVGuOB zjBn*3b_3%dl7V*RQ7WNutF7f$7yGx+1Dcx~JIQod?T5Y^$)>webeHjRg@7DJ&{Fo; zVspY^X9@U`)!LRsi}AxGRGl@g=`cMUnjysTrq133nvV&3A?RK=KUhUu3JzOKbS9AI z>IRKIH0Zt48+hI9`uHD?p^p0v%#=ty%FCQdgUs5?ONd}lf&sF`CQl+JXC(Kz!TWsK z7Xhwbq;{?!-aH&o1x9d%Zsfe-U(9`)RAv&&_YrbG&0{6@T9>H%5u*PqjX=yd%!MeB zL7c~r_ne9Z6qjpb9gKo1cCOo_q-nAw$X2qtvsQwRQhH7ZZkorop8y%Y^pJRTXI6J@ zIt1G(w3`B;&SDHAhOaBSN^4B7R$+L~og9I7uIS)Y5Y$N8S-G)*_sY;UPu-`>EI-vN zvRfhZ$Tq%n>2Dm6zq3+-{*Aw%>x8zJV;}1YWT>wiu*`;=V5>rP^;N5cfdwcpfNACj zxJ+>KJ8)Eiqgqw22o;6Aj{9roR#<}HMm0Cd^xD#*7(acM+j2296YX%Yt-`i#-cT7? z)s4x8sXjkj=^jyG`3CftCFT+>3zin{FilVHfWhYFY(pd~U*dG#Agwgky-df>s14~; zsN2^`AWojad3T~r(hr@ox4`0x+zP=8)AV3S9o!I5pm?_OOox_q9gR2IUo)|NP zQE%<;uEL>)py_7&pW+wo*CH9n%KSqixRTh!jd&iIr@zemN>F_K=qkTd00c>>8YMVV zMD0$c+{3~oNvb3+7&d#pPYI}1=R66;MVB|ditfsnCtN{)(}*gy1i}H;Ba5%W;eXyu z`CQrJ>t81fE%f6(hp+SSN}f-bfk3Ub$B8`L^%`zrP$KX^4G)Osh=uWB=6eH*{ITFT zqnIQe^hAFDznVU!=IfJ9aw#9Z5^ta}m5r(`hOqKl!)F+5Kz3J(^~kUb$3Zc{6g2Q7 z>@A>Ej~*a|9#ZOmDOq&Bwv)TfzA>2j9@2>aB3Wq}42l9XoyGadWK>mg0K7aF)aMyy z08CZoWxc*Z;7Yo$XiA#HoIR;W|8PCi#6BAGwEpn`^~76*!StlV5$#c`mdS3N?5=9N zxPZ3Cq%h}O56a*b)`RaC6hPJr3JH=A1a4X`TYFuKlTyGyg1FiD~z zJUMvYfT;gr2f%YKe4yWqnW0?Vjwfu3e|~}pCr9bGAWAO`C0D0VJjiBxWY#c5l&}Ig zhl9;<08#q)ap!N#M(XHux^(KmSu4`QbVg#Y@2}o0w~OkAU;`{)j!!Q9m~H2+=x=X| z_N=FUg0Kw#`2h3c*@o8K;i9X7aLb{+Ly(@9?fLtnkB*MMmXshif95dRVs9n;gBX4O zia;84-{fPRi)C*f(%t4QHw^4W)>WWDi^atipvwy6n=by{Y2_#PHwR|!?z+U@aK;5( ztN+F}0#xz(y9XgC-(|RxrGU`@?oRV)t!>iWS@`lHj;JNpJ*)O)`l}zjr;%K5)kZG~7J>$P#0-x+>Ui`@Z^Y%SS*}?1h z=c$~LH3E?vN+Pc*C;D_RS3?VQDz#}%Q?*31X>i>B@hGWf1Kph7kun3p=19rUW zJ5K$ZZacO5&_^HQVTo3w|GJ5#NBuX5Lj;Jem3lvj?{y`dUfL2{NWVDpW<0=t3EOxm z`+fVV-HOLc7@NvnQF+~1fO|tfa=bXy}7L(Asjd z->gP}5|(2Epg-g|*6k0qDrb#%vxA_i`#0N}7l4!I2m62nRfa^PK;0ujm&VlDptzow zL47jP`$5^C{`Pf=a_JnBVv7Li21Z*B)uc+ul4>J!^IsaQ< zAQ*><8p9#6yQ1MA2@IaB*?}(^Q~|9uxKicPyDHEZYYlD~Xy4-pcP@Pq06KZh)iefp z+9{b$e1;C-5?m&aj0k+aC}1L$qM~Be6dMa*nok4M&(B@qQ*FUO%(+-{uKx+*An=*x zDEXAlj2dCt>$)zYQ*%W1y^mDnMG{yqK&jkzr3V3Zm5E`>yRDibV9UyC;kIf1J6d_h z-!&s@J9FFnqim+#as5&oZ}2<&QvY9QX5?dPRZU#?bAMJ*n2_qyl6~6ZugItH#o&j4 zV3Yt$TB`NHkepWvirQl>E8I!${A3SrO30q0O#cu-nO=fn)Et=gKL4(xU_F~dHi7=W z5x*~xvQ0z$8k8O04ds&lz~z#rfM?oA9{4E@g*bs`)MPoXQ4qELdSQ0lh{H^GLN!%J z1gB~5(8Z9Nbrmu2vmwz+4W>ZuZdHtkm^d)UJ~z~(#wol98E1u&4vatuEzJjx*kM7?c@sHliGC~*bwnc|9}kwJVdt`EDWCl%{dKM^i;0nR85 zDy+5L2!@zf;{V+EVPN=oqPXH_F_?q}ez=og!3o&RX1EDBcYkq{se~D>I5}E-FpIQn z#Q|Rb0K^7(yWhz{xEg9j{DANWsQQ08MQfrlvtV(JEn#L@dU~7m94!xll_Hyu=M3Fr z?+tB?!SsaZ%_c8%c`&_bn}(|OB%FGFttI%=pB2Tm6*b%@f|BX+j8VX(SKHd9zV|$t z=g|3>YxW~yu2hREy_si6hXf#t2t?3TzkBx%KmyFR{1nK@9W z;)8inO^@D@DXnulN0D}uqwdXo;gXX8Iojay$pPvf!;uSOKvM5>7@cy6yoPK{v&tZR zX50(*!q0h2%s+mIB(>TB>}-8w(w#~y5jXQod8Ow5xUj}C>TBjLHINSr008DdMnrr? z`#vuT)Qx+rzsg+cgSMIwoUMhZ-qqsxj~AkNX;nl&LUZ}guT}wWMkD!a^yo4^_8td* zG7fH0(F`#5^(|NQ_}wd=gFJ0!9KW;?1CZ#4G(GkkR-(3_8X>y6)lC2BL3bJQ^kBVn zgp8-}!DqGu4=bo+C_HyLziYj!<&U|~VH+(szY$0;#62S@P*?-m;*JyJ;np+t8a?S! zE``)DeGjwsu$y`Og@C7_Wb!Rayqk)6`Yp=ehnRGLx|M2@5O3%m34s@Xd-Ez)BzZ#^T#q9}JpytUQS`w~d zYN6fQV(0>7*cME>g3&r36q_fFYvI-|DfK|mxPAb>vr5-JeW6riT)tl6{H$7-sVO3$ zcpAWLAQck!NYEfz2Et3QTMiilga`26HBeZhY?R&wIh6#)&Gpq;iyGZ#JgL$zRPp*{ zF%LcB6N@`sw@KKk#?o7gQD%nc25;-MHc^1LoRfq9q#+@eQUy-DL@#Im(yBS^$SV4( z!Z$eS5jV-AP+J_lP0YQ=1CqC~1Bf+^E#%`Tp+>SnFge7cSH$X_JHxj@c7uGPFE$fAPLRgs^P5O{(Zp|2O80P4&5rxTCk7Exm!^!DLm*`LwO*ukB}1RsZTA2T;2 zwF5E8Af0>wW7ua}`G+>c-&~4Iyx9Qan!J6B3=#r*a3|<*3~PD;K~y(goIGS`jRI)p zmFV%nCX}(UPhkHEI3>Yz(QW^^Hu>+TBAm;4U(A1kZ0)r%hS_JU&B$!i_Hi-B6-8(U zHjQ3*&+3u}v(;~R^I=%ZFI6Pdu_c1lRZzLt(S+B&x!9%TPg8I>_9~AxH*WJGFDa4K z*2a|t;zq#ecXR}Zswz(pXFUBxVe=SW0;m@u1g zXg3VRsUu_?ixaH(u&-r`d%{?wNR3qqb6G-VABON}h0TWhz>M0yf3nwHmD=A)AWx*P z`rv47Htx!$roGK_;{*Q5r6Ku=^vb8bBP6QS6@DKW%9~&Mfvmy&DPabx+wHI8SW2DU z%?Ok`5ddfVW(>WIF((0PbOE{sFps{S^-*d~?8V}Xm|u@yrxG5{>S1vF?3?x0V&a+|ld7yX{5haJ zkTkb!A7zYpG(w1B5i6>CjQf4cc6 z6)%y#O8U>NLV%;0Q{IZwaYm2*mH5JrdVPq3448mrAwFZFqCo1LioWgw;uQ|d@1XqE zUE07|Th2Qb$Wsg|v@eGzSd&$#?Qi1s^ut%M;fI11q?JLZDyK}XY7_?8vzJ+}H;hzy zg+zNUnDoq8h?mx#i%wZN9l**CZv*!;(2YW{mxq2!bWdxk{_`BsGLi{yHQ}HCv!o3d zfZqls|6{?wwD2n=0AQ*%M5M`Jhk&tEb{@D*dWHSP#UJ1WO5;~^z_k0*$VLIDYEEu0 z5&*pdq0$^Jhls#%22bUOojAm-nfdVg1?(#Z=0qegtg=|Ix$mmQitegGdVNXvL|>!* zUKe69&+L#s&B*O#`aMkcMe*gT2Rpa@z3=%vM_uI3z|B2H?-uoT8>9O9)NRaOPkYJU zvUDyXQr;M1Ka$)`IJ$z-V%3t-m>s6O-?MNJJL(0!#<-Dimr6?n(n;a{^fw~yspz}X zNyy`^YN>@WI=>&CMiWJw%R8s#l}foZn?ns&VH^Vjt4sDlzQJi?w>}U_Y72g05Z{9~ zyw-1a*v?HJ%5vJz{fm3JnX;8Y4{Y|&(s$QZ-Mm?(a;C8n`$$MN2=jDEC zt8OvKB4JUiNX&Lplw!(fql&?u8SbKl{FVmngh?j>X7kS3_sQE_RvbO8NC^?n0)g~& zM45qwhK&9jj*O9E0v~?M930q*?`^yy6}CejGy6o7KZ`3Fv9qDE8cIR{le4i;=+pO~ zMo4b@rl=P2q_KN~^`f<%P+Q zl5ikj@`4U2AR<#7 z>^wMVS9n+fp1W~#acknhw<}6B^V^jP1$VD6GftCgyBNXG21#zAj`~ZW4cSqc>4Qmy zEc$Tk1iFO^)X{iuyQ!2EP}Tki4F#&etoD~Zklazc@$6nxTXDmXw29~qHS*y1+N+%M zk7*#sYAbYp6vQza>=&18pMq3c8Y8APL=V6#IwAQ!mP6lzA8j$fdc<1oHyv5U7PQ-8 zKq7KU+B)aDJH9!ZW!<4OFdn5lXOO0&_4fr@Pe)C*ljUlQMfF7H67#Nx@A#4-VgQQ-zdMA~z6>+OfP1mQM>3@M z!S{&R*B=a57e4NxL8ujNZ8`2`nOs<~(_5XIo$UgeLp4?+_?j)yUIhgA_@B}KtXE<_ zXy-3itBh%O{a;mFkh-RNL>3N>THpm};A|n&9NgZzmgozzAcqDc;)FgucPJY*^Wqr5 zX)>G#o8Cek8zY+C4s^Q6#bAZ6KfK-^)|cIvk!Q+>v~%eP;zBZraXg1(dvXhJBHuu) z$jn}_kD&@usc1;djsz}?rv;Wbbh^w@zNS{iwz{3MeBOr9P(*StzOtsMW@<4{1d}_> z?s;h%D*D?{QhSJ#U!zx1=ern$ABFC4@?--ct)4Uvb%_dm`||%q;FR(h+kT3GIS{^nIaNW1=d8V3P#`6Go72LFej7!pFT|1|E9rm`k(&mLH*%?EOorLWu2>YS9lBS)&UZja?HQ)xx+LJ(~ zIsWos8`B*>qmZxRzFXDZWak{nL=>*8tL9w^#`hP=55wQv>1oosy1M6JQ&_HSw5g5F zSL(G}AT4^Ct=0}&W+1RX7cvkL;dNBaR)avmd%X;JOf-^Kp)vFlxXV#jqk43MzouKa66bU3lWrAb!d}8gGL+5 zC>D%vW!F7bKqq-}YEmjcswZ(qy`~~f(YC?ck~O72u3dB#rAvRwAGN}JU-gL2?KO73 z`eP99)wE2Kzuo_EA;(H=fDtL@k%_e7!IXLeSLP3z*B_9XYZ=N8GLb*~$GUmNdCAp) z%K;5f1VqYO-Pdl}EZ6u)Mf(uz^&{QHm;kFl%I{yXN*JbG7>3m8Q~NXgWgGLr(!-yh zQ{t73yw3Tz%MN$cBnMTVC|mV}53+aUvJ(0nSNFZ){f(2(Rg1nDJl+XHq3}e=(Q*mX?AkvCEIVbH<=51rg&w zP!$C7&Cgq$Z$1FZy z9^mlJ?65m%co9-r*LLGanZ^iCO5w8CTRBlevO9#4_S)I;*8Ab^0s*q-*r*}7GymPw z#k}Uv@tzdL(@fhundR*Em*`ulrIO-c=Z-w$NeU{j-|X`kiHk>M;*ZHHoUdFZ0iWXL zA(X{JAjNhh(+3SN+S@?&dxiUhC;Pbr*#?$XDHX({<T$};C{KTje?**l#->OZdfo`S ziTf;s`{K6sAbo%{)6-3Vi>8L*t6ZTUE(G!g9rvb*2heBQTqs+Z(}%GshiO#E0XD z{K{{9z`3o zEUl{3OMa4Q(|jZ6yR}+*u?9!e1=2aK_*fKFgUVO0Rdn)RvZ-CxNNtde|!`ZM9GBI=j7@aq>D zh24x0x0zq?@v%@xF~QD`x+%@9tsng~#RJp>81Gw- z?*xAD=+{#_I2z;leyWj=oTOKfbcs}ioC!Zlz#YNADvOjXe--)UlhmiIar1f%ymO~c zxzx**jrKt|jmyGFs+NaBir++KEp(YMc%-bSNVxp<5xU@!7hxy-CE`GQ1df%H+EkbL zd~w}x#0-~Jq-Z%Imp8J#mp?+eGAQ!fU*;(5ZoQ0*itL{rwIpF$UorlH`;gBU!hqK zD_j0?D2SL9p)(f577gVee`xocKUG+?v^KVqqIgyO6{TpK(+~PFanH;@1I%Wv@`T4u_{qze^2TCQ%T%RY?DG<^EyMU8)Azp!FF zFzA7rgkDna+Yv4d3s&S5rc1Fi^P_NF-`BL*VpO6UYtrWH}+Ln z>F|jlRfLAFMB6)pK#FIV7i0Ypn=dEHIqI%`cGsi6`B2Z+qpYq4;jX_K&)sh> zm~O9e5ZkUkwpSdW-=5pYrkg4X#Egi4|jn(g8Zkgt_aKswn$dKbFPHM^*t-qURe^2Y*1guAXXtN7p%KlAP3g-7d#G2{s; zsU*O1H#e6n(P-oM&(M$gG^=#K%JHFANR+;24;~xS240?JFiP=|%)W2U@$x5!kH2YE z2+)LjU2hcvK=*`#;@77~#M?&QEjoXp9{MD5yLpe*@!U!A)(?&P{&v z)l-HKNk_aJZM!9IUl3+IIyFu>x=f0n?Oe?;fnNnaw8WYdG#pEYyqbb{qxbhQT=(5o zT&*|gqi*O&xv6I93sogkTtM=F7gpnRu|G%ZUzpsrLL+cRe~3;6hkMsSyndb~U>del zll6R8m@q1+J;cwVoAK=i%(;@~v>Bgt#mDn|i#i1F2tIKp9e`>r9EIH*Ff7+p39H7) zs&a>l(Y()nO&m%|0P7K74C6e>(N4C&v=X;Nrr?UkT`uv7j8t(OP!B+qri2|$S!4xL z+`Qc)gb*LaWujOJUF?9~gCq$RtDmUJ`b(&2|H0D@X+6^ zi3J@NHj8pgYn@(Qe}uwwN07=M$qn@I)L4D{aCtlv<-?s`s9^p5>jbxZ=gHMjyqn#& zd+mhQlR4SsR(u8zs<^I5pMpbV7^}7HWi1Lteb;D_w9{63LDKOD?-p+nR3~PeBibNEje$Svg!ZqyxaoqK)?yPjcB%6cs#arZf(*4v zc;D{!6)gz}F-V_eSHEP_#mI7WCc?HeO406A=q=S$lX#l2qW?Zg$Sx0 z_p?0CQR_I`La!u){=qqf{irTv+IGKCZRT~Jk2s!WY`C#aEv_k9$w?o2N1sOT2^9|~ z!E5mR+07|BLDqiG!xQ+gZUMQ0I`0(mrd~}5$>5IpPK%YuVXc?Y=EDbgIRdtQ)x(#j z0`U^4)WHYb14K2t7O=*1;ffl78;~79dbh<8 zQICV6`up6ziyrZKxKhD6FZgvFBtj-u6!iiUlL7_oU2pA$6R zg1f$j(uCvl1I-2}oEY}-vEI|}AiCBYITD8Ggrmp``jZnyYKdL3JzAma3anh#1mn~k z8L>VyAFz-v0S39$A>lKaYqd|0LIpg=Q z=4w$ydR<^);*ufNYFolj_bnZy`UsPjV%8-asl`E--jT9;C3gPKV7WhhP!wko%gQ%! zR}x_2CL7De_m(zzwFh^C@$LA1lvIJAFGp1FU`4t+{$ZRh7$U9=S$4rD+{xV`!ZOQM z_#OdJkEXjjvwD%D1&w}Gr24wL%DTEBkSFf$?ouqlMd3Y;iewrDJ6D3hLhQzW2>~qJ ztZHjpuqP=S3SWp9*HcYb~R@jh6FV z+B)O&sJBvB)KU_q_Ji-u#kLa@f&azBtO=cz^BrI6{o0M+foREUdZBFcHv%+|sqU%w zlq7YN_;lAUUe^FyF`IwepKA{8?0g9lG_aqHg$Von7TUiVx6n=gq537K>m_<9^BpcM z5SN69L^YIK@MxXd&rTob0FfW8b}0WeOfLq9Rp@Y_<>|nk#lCI$=Pj4en;NEKd&3LV zOoz8&!gNl^pEvl!S)V_T(+~0&l-nGiE*fR8Kr6r6@K-9PB64c%2PIek+T`GB3@>n9eS!Q z|EzsJ9j^;2Mv@|}^bPf!m2B<#m%RN9j`Th+Qe85o=KU35`dDS^fw%$V0I@9y#!sd) zI@FT`-qzZ&|D(}~yWwy4Kyi?p8%dH2$(Qns+uTwNix;+>Qv1yk@3u=TmPb{^z&3QR z$VyE{fOAV|tRR&6(ep3@sW^RjUoaY9VH@uf z1f+w9q>#GzBYUTe^=wq5Am)eKiTwGd=TMNjmL?~1hg%%ct_ju}BC0mN{x#R^#+<>7a|&sK+$R&oSACJrMF#o`R}$@oA41MXK98tZ>%kuym_PmxU|ZTgc)AbrkN+_cwp?AxQE+nm2UXygfb9!%G?R0O0*$_~B>v*e ziLxHeSLNYY6a+AIOMji-ce))a{lf5w9h3M}K1%#VQQKcZXGtERIBPVG9r3dD?9c^t zF7z2AsRMCw^a)|7EsygTBvwFMshC9B;qiMH)>9DGv}q|cKd>Yyz+J*9xEbv$(i~_< zE&tv}Q#(7HL6HkGAn4pFu(P#Y%6z!W`?qE`guGHWQ9}(8^3L>ad31Y>M0a|7?26fTN4sPxk~V*QTH&jm z(0dJ@p{m%QSUvf?IkV8IZsbN|@_28ap&PS$y=Y3WLy5?*W>OS1M?9dk$u9H3e=C?y zvAs`@D=BxnS=D1!RI2-#U|9>5ISeYB)|>E?ynJVI2&#&$uvpv^rjvO@$CSTv@;dPs zwc*Q{;Lk)uRhE2N)T6?E?;6&N(-yb+`<{B#p~Zbrdk4s8$cCDT{NyJn4?xTGQ1S+E zT*+mrRO&&fR+m-I&pB87KH(3y+xB{+A_qVgto=b|Ge*Po{d@FivHQ#R9^w5p;&ADf z`|F*_nVC*7Fb_N#@X%g!x5an^l1IdxvJ=oi^By~Fpe};6Z*IOf{9SH@`^$ol5vlym z@Ieh7U;RDpVIL04{Qa{}#y-!qNZ9%C@RXqfB3%>r*RSYW&yviA?<$#7&*0sc@UDz! zy}GV9*YaWo&edWE&U3r_`;zGDG42ZaP;49=aNB7@yK}!QFF^3!(W&GM(5MILAS`Oc zylmO}AAY@B$CFH}DJAK`gQuyPw#S?CS4HwrbvgVn&EFLb_zY95*t*ccu%xdvfx7RV zIW1^nf-QyewBZ+;T4xHYedFz4(ykdetxsVb9Hwu5+On57x&0XWY5-J>-=C_K%6$QZ zzJ;1E^a}IKq!fDSbK5XsdVicJ5D*(lF+}Ei05AYm81F?ig^1t~mnwLj0M7}5qu|(# z>iYWNvNAR@5f6fYWqU+oDyUW9B|D(V;QIn7C}gU1yZ-!9dU?elcYSw7=e@`qiX1a} zk{Ye)e$&a0@0r6oj5T>$=rS@;Pmy@}3yHuDRf74b(;LgOj-sVNc1ze)tjjUhd*pR&Z z_!)}6{_K=@WCq!?4D$`$V$6h?IXZQWSU+AWN)aOnTZn*A>E1LMK0&4hYIKPMn!-P= z@&^^q@^_24|ER7Gd-o_F7T-&T0C*-jEsh?8V6U@K#*+TVU{6hG$#!DJh12?)_5$CV zPyN-AHOvgJ_Sc1Kc3_5F`k&bO&;9SBbLX#pA6Y$}#K@Kp#2w)R&zNmcsOWb4RogLJ z{g~Y5Q5+0t7em>aPjv_aGkkDzQYKgczYHjSu*O~pUim@%ZVuWg;j1jS-;#Nfmz9^B z+S)$;4+{ea1sVh}m0!!s-nhF@@+DiixDbMcWWI~vRK>(PCCE>)!z)KUuLlk;t1D^TJ}$!?+5 zP=5Mqw|Y>Gy>ztO^J1SWK|#CMa%vJ99iAv^Qvkp&Do7Vf)uQ1fJW^yXw+JXiz%>M{@x<8|iP5H?< zN~)!%O6f=Q>ri3(OMl{k*aA2RP=3v&N>S^ASAoc1T&RtDUc0L z{#Yw|L2}{uyK0Lp72L=pZ#3pmbIv^a7>ZoYWFmEg;0qCWw0&Yr9r)SRQggNH=d9pV zl9t0G5pnITO{q=!d<`%q?~d1TaB^}|oUX3Y~;2{dsybOh4Ob}@U+ksa&V5>m7oo42L96PVuJ zh7O)HvrBNGZ2a3;opTcP=?W~&{k`1uGhZMx59@+g*$Rl!F>Wo1W074i{ey8OZ?~jY zhe!9SqR{f=(VFbc6%-oNcw)K%$8@458J+I#M6U2TRO?fKQ2~ZPl#>icH_@l^mkGnu zTh5Jn6mX8PV*pW0V-oYPsGY$xMdc_{E8fxr%i7%C~PHk|49`(@%T z<1;?}{%esya<($VtvAdzH9*i0qVPmN^hCuloP0ygK+Kg`Vb`6zCMc$TcsbD<3PWpO z2EIizG7PG!oVoaPRB}U_q71&T?mrcANF(1;+_h~mzR7i_i0fK$^8`@#hU*J)0>Xd} zBLSxSPFX!*l0Jcg4{+@!9v)3=8~b~E+Up#EiK3v`H+S;h8rvZKG&n*LxbJe6RP=ZV zX_(pWBvlm|^MUR#HClS@VS$22&lnH)jbO(7#)QxHo6rg`iTqu!mkj&D_dhqi?z^7u z^LZ%7kK1u@B1Yr6+JqJ+x363qOYRxAyg=lR1Tv|+v*?*qy2c2Rf8A?$H=A=dhs;nw2SR)X__gdi^HD377{KJH2F$SuU<@)Ip@ctrs%ds(zgVYP zce;ZoSA~wpJEYHM;emR+9)(hTEKy`(vmF=BxtPxF`@jxlpz^Oq%W(c*fE)_W4z1Oo zs(C&|AiJ_4$Mmd(CBAr#SgDj_geF^0sL#rcH>$dqUH!dc{`u`SzCS3G z=kL`baoX?5cC)l44$rv z>I#LENKr3CiCjYYl9k?Z2Ga9(F(~)gleafRTCUGC5e{js*2tfA9Pkcw**1qUH)Xy+ z($-spl=!+%nPsS+o}OGE5d3Ym>#da=$PAMkcBS`;W7}uhgj|zRFrnk1J9Td%B5v9Nq#G3-x>o{dr)K{g;&?I)3X1eyPX|(ObwbMzC#eu54*m*-X!Bc z03q#EPe&Aj4MbE-OGYrIr|Sqn5)iZ(F}lO$>0C4jFhFf3Jvxh|JUN9H&RwUIUbr1J zshXjiBMMJ_@BIcd%Y3tP8Na_K#l7;Vb(2(3xY(T&AxK(NZ?7mw)zo`i?t}FwmBL1n z%|j;i1p)Kdq0W{EYrFFp8Qdm3y}^7b9W?#WEJ6av)8%X!<8d*o4-3JF1W1=Cq50dn?Oe014LywlPf)wtOj+FoF1z=UQmiftW4LOqk47NL4gs>2TtrZ?@yU-`%YdI65 zv{{cgZ+Ji6v-b28Ew|55T$wB(+<4CZS!Dl8_L;p%N{`fyglQZIt@>rX=ecL%*5R|@ zmW4a_zr;Iab;v3(R;X&z7h=H$=k@NkS|*HrDc{HpqEV;Y_li2ES@Ep6cQK6UUwmNCBh4+}*%?9$GyCjS zGvIYNMtLrHJS7Xlqx#{{~@K=Bqfj|f73CVa*@SVWd z0%8w_=zHydJ_UTwpOS09CGetx0CLB%K%HHCM@>~#H$xUL+VYa*6DgIucwx^~)YUC~ zoA)kiaUmt5t+iP#-VRt*!^is|(r7QyS8@ZL=KA9BeTO+T!>OJV&kJ^-KMYs4PImhO zqyi*v=uYE@FWnKf><8MsF1A27AWn}Msf=3^5Y#R1x-p(R4&fHi3HgO#O;K%&s?LTW z&?m{-X!lgeWms@5{|!i7Pp*?yh{3>+XnnSdq%2j<-hjDMU&L05JFWEOy>!DBgV%da z`E0`)@=4YW5tFx2wOPoFbn+BAJ@WRUoKcZvmeSCaoXspLOkpS^W<JguKj6r=EqY_6EEU&Z! zE*uGExU93{;JM_}0+~uK8khZGDGybNNpbJ0*^?n!Lsc8=3FgH%e@mzb;c?x*DQftq zfnff8?7RDs`CSelS5+>SXz~}i=}vlvOA~uU#!OIl%su#on{X8}%5`3whhpFEo^P$dBDT9Q@}|!X&ixQJF6Jy)iTntdQ5#C>ac@xpNH0f@ zK7`f$0}dNet!JLB>QXzT#=||1=W(bZ#Yuii0%2j{E)~B@7wFN4)blGQV#xr``eTqH zV}t(Q2@mPxt13ZmjM0>V2?-6d>6hN;1x2QY5F;S%_%|=_Ee0oXSQNiZV1bGy^+Yhq7da|F9elA4-YF$R1EAl3dF# zczpXkJiY0$<{LV+4?#4fKh9|E%$iEoS;F%y~4uO!*+fEO%ymtQ7I_0r~DAsCC> zIKpNm0Az5Pp@yd`&@YsW9WS8>0<0>eAkHz)K4b$u|Mpden))S{jWa|i`gyviQ21YowIiH;oiK8SJc z@e9FRGwnzRd4}`9Daz`XJX5BJXnGS>8;UkElXz3CZVnigiRHFHz^*>oTp*0Jz1RY6&QZ{RUBa#Bl|~@>sEmRLIEfz133?F7gVqZt zEp?mkOhB`!;obt{B(;nb! z6gV_E6Vw5RDiYrVfDz=O{{vnFw1b`nCGaQ%Dry~gpLAH>IGZZf)4g$K#rYBSHDE#a z&D!M;?4w6FY@1x=)f$Nss+>WR^&$-J$^P4lZUb8xJu_Y`d%J97+2jj@2(0esZ^6gP z3ImwipubM(Rr;Pl=clvxQ|dxgWJiH3oFg(tu|4LhZ%OVmGf}YBKMh-_7r(TosAh}Z zl}Q+Epn9AN_VJE>7~IWelVC)L(Mlt=NsGes$S3SkKE4dF%OS}VZihY2tHqPdW=raR z4pMnA>A+*Bew8MFbIC~zCleR{*VR)mNL%|+C)ua&~(j%x)a{*##p*pZ^>vmG(whjA}gqV4!yr0(*pdyF^^slic7}}P>;1`E`E6=SjdpI z(dA^o_I>Ki5)Yymr(MpU+RKYA8oHs_&x?9ob(79`h>^5iQg1tLu(fR&qoY>hUnIV=M*6r_j(~NZ|sIA!m>( z;E(|>|Igv|f9T_r^!jvuv9siQT(wXEkpwI(#>kSgcU&1tly~4x0 z$1iXhoCWhLpK|axAAUy-d}-w>``b&;oc9tF+QTTsVXQdMns8##ylOiN91jfc$|jF2 z$gv&XqeC0WZ3Fp3>yX2@ofr@zfpL{BHj+)m^Ea1EPbz*IJdoKYYf^fad^KjkVKGbT znQ~IJ;W@71y}yhFS9(J+^iPI*V*J=MGHa2)yy6j85q#{S^EV@_tVXrBpUz^YBHOb? z;{eYj+sQUp%g{m!%W9&1oVxH>IMo-A^UJXCg&Ec@(C7q8-lTbAJiPtOS}K-pAAQ8M z7BXNS=K*%ykdfsiQ^6tFSr`%txO2dU`thPdz#b^GVEbXr=|I|;IRsG7ss1eY2w-aX z(SR)Y`@&oP3RPjYnFzWTWew$pz%*-bW*Qbg+Mu16d zR_ol#rO!lj4ZC2Ifs#W&+Yy8@KW^Dq=;T{u^<#+^W4BZ0P^-&VkvYzcXdCs9({`cG z$H*In+U(D$^Q}L2=b7N{r5*3lvr})rV2@OS=$dVEXeo7@XTS=52ZGO9C_Ld0s*XHb zCw>6D(=rTsL@+L={ln~g%t^-ZO2&85_*GAac82}toM z!^Z}4_LsjhK?g^8^0BnCdXh$C-)xYf)qloNO66NvwFz<=Z_zHBXjoPs^+uN)Ddld{ zp(=IZI^>9^=zy--sT=-yx}y8l#AzN%lEN!Tcz*n*(UN0Qzrg=+bU-V@IG@0p0+$p& zfENJJft&>+W`m;)>U~6g&s<(;dIc%1yjytoS>=~6v4*M0B?|`!pT^vn;%2*j>e19r zgD08ebVl>+?#_LS{F!D_(8I`?~D!?isd;|BsKHk_=tikR=}rT!SQnWs~naJ}zel z%P3s1><6-tG5dZKQXkj6_}DEl7a-1OPt5!iYkJR6x6(sNQY}yQ(+cGX7WWvAKe!Wa zq6W-|EOw+}-P~_Sayw#sEt^$Y+_BP#*E0VtPiT@ox5rgzNJ(#!9E z=R6ikAZ^MIno;+X*{l~t`L^!AYk9{vE;EgL192u#*Gjv+qde*9FSA=7Y~1>ZcNGi@ zsdL|D=vo#4Oa(CJK^eRa9(8|$uW6|&Yt93qd}-L)SXRxQQSLzflGe?vQ#^p<;k|Y> z&ZxyS8O&kkx(&KXgN1`!M0lwQ#-eG$RZ^LPAG1U!1U!nu;+T1|fP>j}?{pa5YDT{b zCH3GG#*aH{<6llvdK$yv)9qJG@)|!FdBjcRs>^oh|DlZtM<_Ce;bZiwo`#XxgbEu= zv{9dshiS8=)G9D3)lG;SCm95tAZiDt7j%1Z zrp|en+n~R^=tWkhR8FRU3{nXf%i>225 zl32o&B|5{Muz1&1R*jZjEJlv;JsjB%@s;kBPDeJ_=q>TPT9eS0EsrH!zGH20K{Z~j zxw2XKMR1X063!HDSCiB^EZXCY{Y%U0aK#Y9h zX;4n_4~#yq;!P7#NS+2DS2!N@Uo0(tN@F`{Rq0OE`Gyq z7c=taPf44efm+kRrz?sQJ@5s&6ol7OAo&M*%EOpF=QSx1Agfz{C2HoI^cR5~q6W9R zTr6NxtzCRR0D2d*d<4Es;^D<%evmt4wf4QkPnZ7UmVK4ZwO2>?kY^9`Z_e*5RMkIrzn31Md}AcKM2Hd_?pBP8Iv%hx3zODvzyJ z;G)i0hX*s!0;C0iNKap14OEC4q?LPPMz6DhmkKNx(7<<|b17;QYTe{RKz=X@yaP0y z`!3t0y*Jx#^^C99ug$8Nli3DWVojyFA%mUN@lA!zD5H07V2mFIqjf;c6(s_8PCw(V zVa?0Tp!rFK9G-IIxsa&sFj-NKJ~vBdKP!N69v*dZ3$hAQ z)>Xdf-`=z}?9w^%w<(>x6e~~1wi*X|-CZvJQ*t4=@oej8ku%+h(UUl11G9xL_hArKR~3tRWRtFCf@ReYCI4NL#^o+hoj zFJl<}XLn%s1|e7(HV&jHkVm4l|G6wo7%Krncouu0y$cAAL-q4uB^&NSVm7sJqe-@5 z@PLLwC$aVQrGG9l=yUzN%9JoTzB;hha2@r^d{#XQlQ4W^fV(HV`TH~rH|Z5H%vgh! zqb@wPR{j4GN63d?8yf6>zz!b5^NkLZH4SNZR5_IKiaFB7I#{B!?;@g&=J`c9U{MT` zlO?s3uK+Xje@<>GDL~r|eomFgd#8IQ$KDmAn)UJy-KP^1w=XTJl5BmK`F{4TzUOYs z*MN3&syB{p10x6e&u~grh^lwLRGH zNTqaDq)qji_hrx}T={ki_8UbtBS$m3^xJf3{=0un->7(zC<#1QdV2RiR_Kr(Fqnm| z-eRc7Bur&XWhq1Y4biUjw%6($&3W<}u7$ool`b_2!|nt1U-90_=&5EGzGQx5>rnzt z)fjwJovpSY&O?0|hHI*08YKnK| z(y(9c3rorcH|s&^G+;q@-wtzvvmi6D^`UH)-=cn+PFntlgLD{N+J5^_jT{4zBZn*) zK@o`c?LNV5d}$LJ;HxR~=H^7Gec*tC-o6O}hJu2EAJXj`o#>e9hwT{^P~IGI?m$$BlAja#6~<5Bd1Z*#r8^g+ znSb0wtj<<}ukH#J$-+O)MIog6Z~pn4kKLYNC)`MIN6bd`ryCYG5cy3&=Jbht+zqG# zHtCPiE|N36$u_nUQ^yPT4HTYG?bV6DxsTOxgqCXzlV?PLsNrvI-J!Pzh7k}S@KnkD z3eJUKG|tXWMW7tH&rl&InF}A0zo$kXy=ABS7#D=1+-<#Xv9bXqEJ!EySa$ z=SOK38=5JmC@$kL2iL_4L1e4W@{7d(Ty(7*JoUZ7kX6&e{_l-ta?k<-d#Wt60g0Z5 z>xbIIWwO~O~94Zogs4iY+`m642Hob3PCX8L^q>6dYouT&SJ0Hj#a@Er_(8? z^h&LrcQiLQrz}?aGB|;bT+Xea{JT%h&gE}uqFydaQ*!}?UrMMKH!DeW3)$V1ke{YQ zWJfoiX;c0Ak3jE8Xf-c9L*{s6{xCxaA=t8eIEL*z=+= z;O@A$hrYv1_%qH~*f>5yrzNX=Qsi*;_*phiLKn`Tv_BgbDN7kJdMRpe(CBV9JvG%E zukD&drqMtoq75EANBcf{;qa(CX=nTp;ZN#hYs)DFe%(Gg2RtU?NWP;| z-It|XU9;a)Kd-tf*ZfR5pN6~4fU#4ta3mv*d?6lc+_rT;9(0E9e3=1~4m3aHde|}T zRYUHGbT%xXekA{n3I3kk{dj|yh8nN_94S*G8UpgF38=}DJTTd-wY$TrlSH0zH8{Fu(O@DcobiRKE zo--#D%|9b2Jx)JtBD8#|^op8ShJJBVyaw-6RpMQsRm`%Q%o3bJ%s&bkflNV@6xgqn z9Bw4%+`?brHh9QsGxkc~sYr6c&@?>LS@ccwX_Ts_hxp&wAD6#|Nb}PYO~jRf9qR2& zXTZrpuh$n?#^dq_$Y&Tvg*jG6SwtC9Z)qTTHPQ$^{tdoOqdXxPOk{T>tS0=3} zia%Cmf1Wj2*jK8$C`RlWBh1#hsHx;`E6qJ(HD}Ik{3h) z;oRBs1kCM^$34wL#q(cFyDBUnFN$e*zrpk|R_a*C02-#)KDq^Kqre$?Y1n-afR zV^_IR9mB^51YrUI!%#mn)>}`#HG)$Zydhlb(RCkKB;wMZY%A_MY2sZ}87~S~nc;lC zhcf<{$9$@|QY49(p3w{(JjU2z-$b*fd_@yZle<-^n~I;l|A zu_VR$iq~!vs+CV!cOG&$DgwJELZP%m(~}gEI}`RDcvsag86)K~s_LZ;POF(X?sALB zvW)IWux&;L(ck2r6aK%)`*GjW_|jzvZNN zcTnCiN^Jx1mTk_TzUNaS2&kEk$kuOYm%nvfWIO0FBz&Y?D4CHudANDzmsc4sm3Bu3 z$n&H55;DqxpW=E(bS~9N1f@IiejA=Q-E-$G=_}v5=k9 z5*7iqpZtZA(@S+T{2KaL^sk*2&u%YIt6p=>E|&+BjCg>Y{WWzLRB%H)ZndR?aH>M$ z+8kj8Neg@u0qQ{id4iBtk&nt~sjY5o;6EIY1C11NWzb*-MB=e{a#%LljqS^96bj7|jNm;oWm4*5D&Vild zHbwOEoK}NhOET{VgJYT-#84-w>&eiT2YS`5ub+gK&PhLC=yLo61>^Q`ehrGG0=e#q zeHbrOz)MCD$J!$?#;V`>uvoNLhT>_3*7-EBtNwGZ^`MSs{?`j2=|f!c^~suzr`#Zc z#7^tUwPnnB?x9>5lfThVSEib|9QlEAbpPkS;sl4j8cXXu8pf>}l-@Zt4%2cLlCHty z;=O@P^Lm_1+HH=NE$|7RPPkGQjfH{AhlI_Ed}Z#sM{?5ZBs94 znycj+AJEc*n3*@*v7WcBk@Xc5KaMGH_LCg>Wc1CNiRA|o?dC~8_%!=_Q$-r7yuA4N zHibS+mW+o@wmGH27#37eOAx==W?1R%jK{yw*qRpVV`!j|+2+zgglyn$Mcl0WA3W`| z(fRUWPcU`vS#w}dkPkhm;*L3~moCz>%wEc(DC9>`uxnW3Nvjg@Z^Ag%MwTdAkKzhJ zfruLfULJ4Hi!e#M(1BbpiS~&>tOSyKst0J+0vAQY)ol=F)eKi0D4(iZk5ehXQ3fPr zb$1amQ*l@D#vbiQ@Ce}q9v=?;fepOW`@Wqm>Zz9;mjWarZ%=vId1=Jg1 zgfsJsMBBA@dI-39_^sL|rx8IpZm<{pB3F(R;lj+V_u`tHQ&6L$~TVBi!h` z1PYFv0ThSfUS87Rzu|1N=#!45?OWS@!VrLS4dp@Lk%EgjCWAE(PL8mXo>&+Bj7iyU z7wi39&5oGW&*-748gUj|0z z>YgXKQl?u$^Slg?1;XpQYn6X~!;b2x+p#9uzoRmrDF7mUXwFRg-H_MLxF0j;t_U~4 zv?kcmMnjGT+n{!<>*2Ylss3iI>c8T8pw0pHW<5OSXZBCpKBM8M$@>mw<~VI{U8No% zMX_K6$&RG0ane>;rAz_u37*GwzT}mNI=nw3-zzmj=8xZ|f_5|wk*XPGkc!7JWJ^@^eMHArBi<$Bp^q!o(U)>&Hu0(jTK1$-iuyQqfUFfz z&=ZUoim@u|d63loYj*Z0I664io^SpnoH9X+aQURk)s?+uQ5NBa<&C=5@^t8P%xJK zbcGsnP0xvi5478>f<@6?@u8Mmw5%_sWTYa~+1Q-;mPJiGU3J~b=#wT#y+zX07XTK?5!wtQ!CnXg%;aeV0iBc| zJm$om82>eXD0{+A2bM7E{fm6-T1NBG#5O?|`!#`3l$ty?dl#kxR$d zg4U6hzzr2;M0fybS&}wKo^ftOCwvZf?{(B-(~Ws!3|?QTuA+0HZsP|G;?XN(;sv3& zD4j`L1EmQisdU9e+YcyKuwYd8I`}s^2J8Eh{nwJ?$^u}yNd`1EO zbj~I!J;N$#z^P!TULN?xxUm$N<%Y==%KbU=UjD-kfiltjX|MT|BV(BIfFrk0*fZ+V zSR&AhNC~@`!p()qiKcq_oq(a+-ggxJdl>lsECGuUKU>KQx&U{2w1ErVsz*V$8{H~3 z+OZ7lkBBxbLjrP@`SkuF6qZr*A&_#=7Rk@t9;7d15ySD z00o-V=L}MCK@Z!xn%Y`sWZNw@IK?k->pP@Nai;TZ)JLzp1DdcxNj)h z+_NaXcp-IJx4dR?_be+9^gwz~I8P0+%A)kMGsS76G{nlvOTSs%34A$s+QNjX1T8Kl z$r0T5lF_hZz^_Sv{vJk7iHmp_ua19C!C_%y6({VJ1Zk5rA*hJ5oq2jL=Rvq3Cs7{h zfms-2fa{M_&qEwz#`E$G^sp(tX@eG5!ERqlpFk@bKb;E6Foz{!w6=fCvuD2eufLBE zJjPALjt}3(Tv7cJSH}NpyC!_f%sGOa+>O@mn**5*{dwiW=_jG5h>Y~lfCDtz;kOV} z&WlTKwFpD|l!uVV5n`~nLLO@1HpFG>VeRokSeNL=YuU1KnM9;QC_UVQ?APa?9j{6f z@FdAZ_9j=Q!3ynNbd{28f3B-OEFNP`H#nr&DrHQYWDSs>{ z!iMAn1rr;%&mR-;kLFMSg$x-p&tE-bzqNQEnljSm&=y4HCjv1hszX|p{v$euJvaDp z{I&-+kqo38ai94YXD_FzJ~I8RlGV4b9@mnZifM;t@XcurjafY4n`%h^w#XU3{8v<6 zWg}2^)F#wLyq63>3W7m>bCUE&{<6`&Pi69Uxf}Bt37CDlNezsHIOi-hgKUS~t6rk+ zh2cX0ngXW!5kx_{eaFx~!2A9_$+U->^YyRF8f1=Si zC!saJW@<>mfC(crI6#=U<~wSeDk9Br&r$}1PNF~B&)wP+ErMab#&wKl9}%-{6k4;( zp1ECkgzSh~yL{RviF$K42)HWG1m8X6sMXOkEwRjDxNrucv%s6AVYbc@hfTFA{=+mK z=}sK`pjvjpEsnhS19`_>BTuxSU%@wR%2O3ZA2r4QjhDSOEC;A=3+JZti4K!Y*BP-u za%?Rvt!XCAg3rETu)TO8LlApfw^oVeO0A9gc6t^Qe*b7s_j%kYeLRj?`m_ zZMWr&j8OKfNg<{2?n!k;aPY(m-T0zq<3rzo$C?s^(JfM36{}6CFT*V|hR!I74aMe9saId))$jgR zlTn1@cxx~)HwU@K5i&oIzlFebaa?=XZBTeCm_pK?a~jmd0nCgA-m!kpWZ_j`tMh-2r=3i9YZN%YGi@gw+s z8!N&#wzI^*4KlcVSZK^Fup)wNM0fuE%rL(P+S*JXI(CBx?_EXa;3@z->U1;ZR<+335c(qHsA$l}SL zk?~DpW^sOQY@9lB&0+V7nO_H;Y|pZMrKodV`4x7BqUz!&&gvPX&-QbsPG$f|n3|eO zSS;b`nM9khP8vr4^|SGrgkm}vfEUTK$S}~is2~XA*hV=zt4p-hVkLZY4qC1vSf|JA zLSGrBdY(LZ$XVed^9|l#IQ|W3ZxvSvhpgE!uMn^nXXIA8L&9@Z`te3A+V?m*DoNN$ zJzqZCovuxXem_;8JG)?8!^vopBS9$vy)^~yY!jzwJ#n(-^sl+2{mqO{($h6k?|C|5 zs+sncyD!U^GDJUpR0Y)Z&y$lHtI4Q2l0v#riT}#Z!Q(RL^#Jz}=)E`U|2u0kdQecm zIiTu2`3qySqZ(dh2O)D4Wu;N#;L#8Ff~;HBc_&h=oH5zgR3T3+Nl!ALypK42_R(ba z`-60=*RLPAVc#HRIEDD%D`Hr2B_w>WT3b-~I&{WQB>0c%=?nT09{laCt!GwN1@{Bd zJiEWbBu;v(T8(+TA5%s%KK%_+R76fN%1K_SzVyMH4Qj-@8z$dgnGi5nXMNqLTnKCF zu%|DMwjyoltvGo@`NIkd(Y5B$71+$tYu z>``ESkl%KDZ=fpS)c1$UBzsI@#fI+~A$D4l9+ldnx zUR%L`(3gdDLrb+Gj=_c6(fvnIrMgW=m@4f4_Q%>VSfTCRf~=ZO9Eg@8ucDfQrWmCg z4gs^AQavcDz7Z2FfP78Bd;lnW@RJC_2tn{JRK-Ta^UG;AapM<6c#$8RlICsO6kXWJ ze-N=AmX?>-e#zMOjObzQd6td(BJjtEHz`)BX#X3q^#~R=fdW;~jIU-?wLDNs>0WS{ zxA&`6ProTJsuw5DF6fuPr+s6(UH6GoP>j2RB*-Rz+}C0NeR%X9;TSDA_Ltk(+?dK_ zU8V>70Qv}Ih4tORn-$SGh3&sNw9my2^J0A}pvZgU9Gx!%Dxm2%AV65*8mppUueTy< zdygJ&7L}&Wt0iD=4wS+pWR8_4>+8)C-xJ0GsV`V3O6z<4Jjf6GUemB0kG{q4(x`DK zkkExlWTS<$Hbp%2Z;?JLIQUS7C85$b4NP=DfYD3OKFW3oXPs=D2!C1^FFqIQ>uJ9$ z(;Ui_cnj@1$@^)?d3y6J-R6aoMq;-GXgW5QkmoX!R7h0SQoiiK22>RAB7T2R^a2O% z2DNL$*lzwEo*a47_B00u%t(of(3|9Ehh_t{7Dfw$Ra{GU5QWo*mvqdzViz(B1YtLU zS(?Lwrp5ZfFZUtkmK7`U$5nLFXmr5dCrH92Zi)L2+nnPqe8yR)+c?o@Q8T%y?h$*J^n zQ5#~oU-@s|K+7wue`)~KF)eiC7Bzm1jTSJ4Gprq$#ArHfwot{lSMqgSGT{a&O%>U( z@=huR4o%%1_8NAUbDAUB>0^qx6QWorjZGnzlUaKhJIO-X@&pytp-0B&Iq~u!m^;e zFl?dhEw9p}C=7JnaP`m1~w4HP{<9y1{emFM~6qAtfxlgh5A{DRkA#E&uFuINPYMHTrp zyWjy2M{X$OGrr^uSmu@05xIvBNTg8{2elp;rhTdhTe~=3uQka`hqyBRugS9~1YztK zu?paQ!M|VUYQX81Q!bazJ9uea=}Jl5Oft$psOib$dq+!^AeI7ArPacv`vL2lDhxEe zdZvYF=ZefS<|VTeS2JFFdHEB@46}1&Q{;GZ$`+x+yp`06Mbk3Dw zw0}|=`*14%see*4&+?#UCl3ILzoX^B9>(m6xk0_i^nl`ZKM3sbgU9-fk^G}7=h2vY z?4svZ391+T<&IS8VX_rDu>*_8AJRFWkv%(`+4|ZKhBYaGHex*Du{d}#_uswh(zrdz z_s)fN`d82-ld^@R0PeWL1pLk{-3Lwqu-$4H|E`>tEF;pJoWED zk-uo)^aPQ7v*{(?u`fGoVoznZR-7VokYgBh9a>JWS)y%D1Gv7&9GsajvWtBM(D;1E zqCQek`TX+mzVyUs^=3)d$3C~BhFryH+AM9MKbA|dj&ssn@A0J9?_|lV2am3phd*7WS^?UqU*Z!O!dP`Ah|YK;Gm99SrH* zjX&urHOt&Bb|tYdPpBO<+%U^cC^WOc;htvZ!m}%jKfa~aZTvUlhg*#rgI&1EFi}$TEmA8F&)rUX^ z`dM|1Z*`W3Zmm9eQUo;~b?VfU^vfzm^e<{IGZ^!)P8U1xl+25dnONw!)C76bbot@x z<;Ch{>=+b$IlUSb9Uq1CkhVx;gD0#B}&Rc!35Z(Q7B z<$cUBFrjnOQkCYs`S)=9S%!)QbB4^Q%lj<7D>2VfF#5gFP$+=<=Uqkc#1O&nJ#IZ) zrnBYeS4fz@Z_H{>`RIWy+_;pJ+9kfCm{W~!yGNz(bB;`FHr?9Si{y~DHtwzO+n328%651DR0-}5nIkC-}Pwc}ZM?e$h9 z`aA8%=cB=I=$oY?U(Fp-s%`nYM(4t(<`cU8&2SS z(DDJAF$D7YU0>BWQP(YRqi@tYB23*np2ZGnx;MPOLDX~_KZ!;i_lnGsuVLj|4+ha_ zUh{>M?Q*{eu%Zk9lM!9K(d-X~Y$jimhA?X~E)bd}8Og6)&v0e_%4SP}kb!+j;K}qo z)N@C{D@D)DGi#Pp^88NtE>g$fV3oFu^RSeY*#8;P0g<>F2k|;`Jaeb;G}L0&MDes_ z?BlSmA*~Ekff=!l)rzSdzzl)W@PF1JUg(vfm&@3tCI8P3QAV|^5=v1iqTea{vvOyrP2zXsn?aa z5UJH4sM4XWqRQ#Sc~lr(u)hVU@iXdpfHjxzE?NIGrvE6QvS^l*M}oDiByX5eWM+H( zitwXzpAdALlX%LGqr~SZSv6Q(XVs%zZ8WVM%JEu)5zncwDu%4;!1j^xCN`DaWwASW zW(Xv|x923?**lKLvvWbmV)rd_`z5i#scUZ-nJ42p$>FJcth40&I;hwYe#k9?$ zHZ9Ukh2oprf36=Td{Z{hwEHFjfy@gePt-U&JTY)ha&k?i7xdPy(3hGLzx+B(e`6bc zsQ$iuq>^==;*RNA_f)*3t}cbm$J_Z6UDvIZ#hgdUn<{2g}NrfBXG24k&tCx|;%bURy4fT8N5NGK}0C*LkbP zdsyidJJnF!e^KZEy4Z{*FIQ_%iSg72?Any1KKf6ZmFKPn={wt$z05d~*@ea%qWn5gQqi zq_%GC`dif!bsYU?|GipIqj&L+(3OJ0y;TRkvb=dcOPz!8=VVGBcR9I4eEp}x&BJqV zuTh`=EKrV<)4Z0B9`VNcoBFy%<94rUMR0drr)2o3XPNk3#m#J*w7G{;1(#R$DK(C# ze+8!YO!6IN;eLK|`Ht#S+}HXQJjHvMH+X3&K~s@126^P-^zmWKg_q9I_a@DL-{QkG zr#?w_w-yK0^`?iYkf4jRUG~P6W9y#x%8(U083xRkaOU|VG->PUv$rYm+FFR=7|%a< z>#n9BJz1KwNYB=6y_;9xanXQa1V=e=4JlHr#O8`d-I0&YVdwn$Hy}EB{YUs+r#>DF zi;QmJZXVZ_JS76&pM|^MA3kb)-kqnINb8<#;5v9s`qZiA8zefJ~I1 z8xDay!`fcAyR$gJY9aNu;o}9qYFAnG!}Fe2?I5a`vfsh(X^hi-^2bqM*&y{#XcxA& zJJkNcxhp8w*}ricJbW8s&3O=dQ9X3^Zq(%)vHodKDaX2?;%S13dVOl`Y%%KE(3a!r z8?E638O1p58bsKlzGB>M*_8YZw??Hnx&BP4@$kD3^8J}a%@e=h(EFY9{pNC)w&<&o zHneBoB6~v7V*|8&OH%l4PTE@Re#|}0xrM^No{QM4Cfi3DI;2nRWu2Bz*gZyKj(yO^ z-ADU(o}cvCv600-9HkYIp|Yd#_|z#@yOZW!T3Xy^+xi!~r7Mn1Wrh_*B(@ghzlikm zd5qOaqW;ylu9n?v2g^?v-+JDDA=V>1pfc9k5ns*VRnD^dMw@oO(63ew#Q4T#HhQK+IEI&xBq}F0yHi8n7>Knkeiv1YV~=XM|5WpT zy#R|{*M7*wyjfwxzWNrn3ZEui%%f8t@^tQ=ZJ;e{3bs<-G0yF#bO_Xce6&54?p)$I zzrHWZ)gvomrgS#c<8ZdAiBp9N@wIlxGG2o-dcDvVQYFOYCM+# zy;cUPk#u8rB~Js@q!_kj!nQ`2Y`tc`$ed3ld0>V?okrAt1npXK)dFc?MmK*XCw$TF zRR$-Y$?Q>Qg}`)=>5_%`fY!5v)(P^U2H!5Kbqhohdj$p zo=EEQ>wQ?JqezzS9R1xaW%;hw33O&p$NfOFQ`wiIJC$ibYx>zI+XPC|wWP}TnJ*U~_oV_j)mzwiC|I~7MVx(ww-8f&e zjr^TntQ-0KPKIO6jP=99Z9H^6Do`{Ik#pfI9z*3SGAM&3LLrBWT_KB znGAZxUifVxJ5z{g@PmBJF{Ls!O}5eod&KWiDOaQQ@qm~+YvzhS^_`l2ulY3Z;;p-{ zly~7Gi0@+@!IKVuQyZ>+?qIgICqY}s<4<3lW}o3c&sa|lb#CBh&TtASnA$$kei6P{ zS0E65U27&T`6Lh0-@#TkXe}4{ICwJ`EYa^J9hdtCWqFm)Gu|d|D>M|93iNLT=x{f< zT8w6Lm27>OQ04o?T$GS00lBC6zZaEdM61H6K&7Rrjg^)`NvTAinAr^JUb%AzPIoNhwQ2{ zdYPhS?k@dC@`}}~5no@R#U|T|FS~^=CmJZm9oc=4cJGTc3A9T4ivavLUgl6*?+&{3QS@{ia?vGQ94Bf+`L=yc+9~ zT_!VzBSb6B8Lsb#Ry)uNUw>wP*!PzM-|;8OK4^%)cAZYfb);<{P7W|l+X&pPPqMc> z>`xb5qN^+nZhDYtXxf9|8>maS;MZ)SrEDWok3cYBB1X+q9s{ zFj8MI8?HAS`E25|VKhqTz%xbf*ud+~^NiSAB+napuElSjqVNTW4_EuGif0nb>5zZ< zZS`No9WQLCZ2H!9wQf2FiN%+mv#cb1f%MnT`Ayv#AvCfMVa@EA=EbuI4+H6^I!T-yA6@Fc0b_wz{L_ zRk#(TTj>)E%e<1FxKVoU8Gu#;A)PrJHLsVJW~SeE`a@=BW=6zB2HiFNzl-|Z z3^n{THtfdrp5=1+fq(MGmHA&|Ir3LS#}v3Jq~BbJMYeq2+E;zEef3;H@a8ruSfZuc zn4^8TlK9T~Qf?O?2UyUd?(g^AmB)pI6HE?8omHT9D;4S2^EMn}t8NK8lYdLi_|Rgf z>J#%7r^KlObpvt}KiL{osIXd>SZ+rUB& z2BLLlaLbB2ZasPgAV+_OWNHzS(wjw)95*xj1FvePSTcLm=urUwdEv4|NyzKh<5Ng%*-R3z5C- z3{t6v?8crYw_)r{WbBotl+@V8sK_#7%f5w3Q)7&s7)xatjD76u_Y8AC-+$ow@tK#` zt51bFbIx_H>wUei_jS&c!uMmgDjqCr)C-SOJY|ck{rS_XMh}xJ;FNwNf)G~#>^V~% zU)B%sGY|;-0%kTU_MjUNtdb&LGt#k)6Z`&8trq#`GwAji7b@z7!@=RrgQ<5BrIr>w z*Lw#BG{alqa-NxTPM3#qbE{v85fqYqZcfhlOYxe7?mU~rk!C*|gJc4vPwYVb&p+O% zOTV+V8J61P9!lMrshW+TAd|2F2Y*ctoIcs$6|-F8fZsUe!xeeQ`6GQi#vY#>FZmJn zU)@0YP>{;xtq8VOJ!ULN312|=)cIVYBeSc^ljp28L?N6^d(dM1vi)a%+cS2hG`yy5 zZB1X6eU;ahANrpM;kIXL>V=fZ=+Zd(R!Ky{U6%byf11ZUnsN&o<5(9~kwM$E)Xc)! zgc@21vslyn8@Ib^jG8sxP$pYV9SdmN$ADe<-@qI7&tJsMp-!tQfz(*}pN*3>Cb6#P z=~)jXBl9#sD%`ekUw?kgr(+BE(4I5dS}kh!cMbmTbJN;GYE}?RKew$l2HS1yClrdh zb97XQTn;t-(K!v0=VnETB70mfP2JAK=kk(o;9y*vTAK3^Es1 zO!WvB$f@p__VzR7U7v?<7Jh<6b!P65x*f~}Txj2dM9VE$WW>)Vo)M&ARcR3>s_$(v zx1r6UVAzQf;IJUU7Z=*wSiOARzrPdw5#6$)W8#>*?I2nlJVjK@J8UKm>=?Gc_Qf@trOz8-(7OqiB%Il zZ!K3*@2Bb$ZYN~A={~`j=6YvL8=h99`{ZJ8Zcmraf`h79g#ppME2aMgc_+LpKqXTuG_RG zwtFxbt4v0C&t6)2Jot)p{H*We{-GODAMYW$PmRsr`eKo(z;UMRMwxCiO?8a-#W6oBa&MEPuQR#;T~ zT#I~gmv=PIt`3z#@ibtUt6-*Wn$WiLhYa>C3Z(Q>+O+tUW+&W3%mLdjAA%|2Ce#njGd3&(kg2gZ+~i)CEy8bID zUoGA|rHfkscojaLxJoK#eG6`~r}dW&E#zmndIqK);uQPqt5)lr`q+$;{X=qMnIg~Z zpVrWlj0UMeEFbJMk&{yF@x9vPtu$9;A8LhnceYTf zZ@`uSJCi71c6H)Op%v+i$NMYH6`P1(q!~|{sZ^UIN5Fu?^34N?k&_8v2<)+=gIC!3SKcU&_`5v(2b0YuDc2)LI_DL&Lf5KdmlhT@#NwCmx-~14(p{^3t&J zzm&s27mvuq;5Xb<A#?37vRl$=Nmcc4`R;~2cnBVbS#@-TuZz%wO0|f5Bq(Q>vo8- zr%SRiB{WlCo8pHP=f-c&d3Jj*8DhrZ92A0sa|>UzLON! z3E)iVuk>~J?Cyg|VYR4X2Y|(#E5NU%i6ql**a^=q;;M(8-a-_KPUGekzbxMjP8<9&oT5^lfgCpWX~PyN)?*HX~c zq2}so+?wXF4u>C0>VdRWJIUtM4TnqAovJx591mCWy`?PNNDSGdYsMST*wV*T{tv=1 zuh3IlPow<-r9aQpEXUcL*|Jz;(_Wo5`=uC^IMqhu-yDmhx2g&)xvT5x1tk&UK z=t_X(W;3-wPs2D7Ph9-lctlM%UA$)Xp=r2;?3JzambP9P6iZ2R50sk~NHQ2SK z(Wcw}p8Kp2Wq>8==P93fP3saOu#5ba>jwGCBDif9$y0x`YHAW1_^#=<5NSF+1O7Iv zU&z7Yb&x?cPY0{Y=6i4@s4ChQ`!aMVEOh7f64`r|lWTEMn(Nrz=es*Q_$vcK840?+ z^a3j{I-#Y!{x&`HlSxCjIWC~PWHy-`|0vDDcd!O`5jjMRlUei}oP=(njE%nG?aaGL5?EBUOYfJpkjt7Rf;{3aLeY9uaN7_@LXF$O9O{AG6~F(5uo&f z);m%&0tAmx3->EEl+HHa8n>~~r1p#X$~m?VZ-BA*_1QJL=SXSVBGmXXfm9w4B$t#= z{-vW1h!vC(g=M7qo#VbM>2dt6xID3)Ko!oXhNXv-DiqN*MBxLXpH3W5itR$B4;H|F z)#GcmIt)OHHZpQsSXkJ1ZQ;{L!FEb2a56R~24_qTW;mEd-+wpyrbQ6$l-CZ&RKXgq z#{YB;La)q$L+90e@G9#B#qBmG-|D%ce7F8`X24y6m&94wrZXP&C#zn0iebQy@A75X zoY8JK7ao7pOgX?ZTWBd7L{>zCoIWk> zHv*~MZ#w7pU7++=rsv0m`U*8b*Pv1S&J?P71VPEq&u@ZxDE=-BZPi`*$Eh%%&AG-i zmiMjm;~UfthG($J>=|ZGOa9w1dArfQ|M3=_ z9d)PIau0Su?laJJf~0C!N`S5N=jDa@W3Z|yG@4i9$Cn=f`ka zki}>Kw^z#O=<(ynBOtF6ufDbc(e2W=4wtgKEOQ1qepdFnm+JO>uUbT&@2)$73`-Wp zB}(%T7;zBKj=a{o1tp{o#THYO$zG%ZxMAB}g=5oGF&5%q5Je_tW@azjrMwVhF#}V_ znN0E=);t=INDB82eRI#@gT?T@P!8P8aw)@sFQa^m2la1NlX43Pwp@?;m7Pkn@+R@l z+tpgWfvm@mFM?A_Id%}>=e+5~7y$eQ1OyI5iEplCEzY8Y+8$fq+{LjFa}l9E9JK!j z*`|a~Nr$86u<(M@*f2j*Jaslqi|iKaO?0|!nRg;bn^3R35?3LlJx znpyqOG4X@gIqm(D0EIUlKPx-_kHQPa62^ZD>;k0+@RC#E&2uL?;}qTxJ13Y3r#^mQ zQi7=p=&qn7liQJRD8M6-D!6T*V79QMagCr!=c9l&=Ja&wQ&`4SxTMKWZuFQ zZP4h48;S0@mV6(Sm}=^t&NjqNfbi-;0uOB-)cv8e8t_$XTtE`T{^sY55<23=@yE?1_yiQWnPM6 zUAnECPVLDz%46z~rN%mBu!~g?-Vo%%4tk3%DD=P$Mo=p=59X@}T+WED?S>h{GV-*E z5o%zobRzY5UlhnKB7M)(2L-Y6UA=jn{&3_w(bT%`O3;jDYIaRDlb`A6JbNj>H0|z$ zRBO>9ILXYR%w-^gNK7>Gk%d<4TA+*uqVg&ar}Dk{D`$8f4`}j68n~x?0%$qy##Rnw z;KR@5DU6(~sU2OZ94fX>eSz8}9wqm@^hxz^s%jd;&GErLx|Eq6bw5u9=QA_U#|$-` z7}P(-z-kB%_TB1_)!~0)y-W#xj?=LjD)4RvM;$dxOA znG`Wp?IiT;qGWypixNpb0327}NVU!_%~J?@N~_AD%D!AMN$g&$BmkFIeG30sA>}pk zJ}~n}f9p4nI-fqauRak7V_20@{(n|g?t>^DP9~#^uOfToA6vGUCYB_7z5_{;O>RC^ zjCDmS=Ab{1rcEuSi*8YRRk?$T6Kj!6fln|Vp|1)&)RQtiNE5^Os7mkYBoG3@Ml`^6 ztsnA5d{H`m-Dt}OM+=leWGnR)aG0lw1f3V&F7SStW5oPkd<(3XNRlPLG&FzAuK9GyvRNO{ocF3QS!ucxyF1pP?WAe8hMd`+9 zd>zF}$?%+cT_~ZgVZu!LT6d6%ti!Qt0_PYsfp;SuAGF(0`o9;3qg)0m*0VPn9Kc$Q7T5Mod8855davln(oMbz|F(gt##(Ko=;}Hi$49I4ENwB=Ml#$jy6wr!rj;JtKZ=oboeT0NJ&ws)}j%x3dVJE(PWWuKdOdM%-~IftP5R zzcQ)fY1jz-Obza1Lwx$COVj^6N!iySeCA$9an_+jyKg&oKy`N@H@?c3 znI3PIqa2SZApmG)wiTyuh{^<^d6+poZ)} zNvA*P{&kV(h0tEWAzt1?4#7{7Jz4n&VvEJIO-?LUe8`z3IVfZKC*Gzz^G>>xl;DQ3 znXa6Isp3bs1c%1HxC`%VP6PfueK^bqy9!^Ogeq4Lo4Is3NOw`a0MEH-FkUfP&{%j- z8S%%LXq!B1>t}gu6as2DlaCZYQq?$Qhvqwgn_G1JJesOV5mLGL-|n;Avkb_A(JpU{ z(2+@~9!C!HV23Qf66pBDq&wSA7k2YcC-i;~c8+}p9n`8BsOj?OpsOZrt3+WoMw5qc zDOsyw7Unj9SyWdAjAoqWJnNnNFBiFjK)n`!Hpk@q1#MjH7R?@^ATNKl4_QKXZge*% zo-zD=)-V#^q96F+_yXJSJ!rl?Kf1e05;{D6YuAHNciZ(T{Rp19t7LtS%K0bn06qt< zYPgT3yJ)6N@vHBcIv}mr`(Kq=aT1sgY`)1rwtdGN9YmIq!F#oVbV;Jirko|7TPv}L z_l~Gp)>z>dxRJ1Nh>VZoeOX%p&bq^}fjLqNdI#o}?*t=3P}P0A8{Z^OQp|6el$_`N z-78U)qNj#w@h`ztSCY7Gohz!{g%(1nLq|)i;Dk+3!#ro6vobq$W_DtJ-d#5mPXug*@8GPI(pEIjsAsDk<{=*g zc5@uDT-=?L%h!sOV6w4r!r@y|iUDIo*NXhm%K98NiI1wx*1XGTy2?7mr!(B6Ys^#d zV6!q}*CPcGzQC6+Z7g!ICO|RrLG5+rVvn=diKsl%Gup-)CDqH#?L7du(b1>2fCB|) z0$%Muf4=COUQT4w?iv~n{y=Gnt(8JrEs5=JAqhMqv*di|ieonz>V-8>;RPK8wL>D@URWA=`8ZJ`eZoO6^F+ z{N@OgZhobd?qx(9^Ak)>{oHF;+Nsiw3GXVfC+)((aZ3&CBHRnh;j4qOwP^e zpd(66024(Y2T2=F6;Tm0$m={++*a5wmw0xU;0Mv<(h_cP*F5STN)&CN+^qE555QQ( z3b=k;wtbSKsF2%Q^oXwB${dK(KD-!k0Q>YT2!z($mY;y*x*Bl_I1aj)q!!$Sx>z-| zl{c#(aV}%4%*Y}&QB<>pmv``OI#>sA0{X{T!&OpDlQLDiPkYg-6vwr~0 zdIsR_$ASO6%q7|7+#9&HN|dy?oZPcbE6DX$y=!Did#Kcik5&E&hxr^$8H zl&cvbIoaXT6nXN}{GX75X2Y5Spji=$PDf&6W7z=>X$oSPsfkG&xNlHKtK2%{C`z&$vhAHa@Nb(;I>%?KVhp6}tP-+G{| z=;`A1O3gba6eUBmhIj45e7QO`c#}SgbZB#08uK^XH!M9a z-Os4f#uIU)O7va&O>N@AtufhJ`67&3)|khMFd&S5 zMf2{xh{MLg&!CdQ(d~^CX-)lr%~f3h@*m||g$*DDGdHU>k-9fN5!$8LF zSOnO8WPzfXv*;urkER!X>|QFMep@{pVf(3@5vp}qB2xncVTJB)| zx8iyF6XIZII+~TD6Aq;2EIev(i>o-&8vg(zL)Zq*UP3VRLpf#TZh8HKr^nx74;%-64hY*| zC*>)NQ@xUVXr_z0gb`ArvHg9Um%;t3I{BUZD_Um1=Sy4TiZ{ZlS`F>8{{DinA{;F8}!P0wd$3jJHink zKVE79n?n0}CN|O#Wyw?w`Y$dWSu`%{%}H^j?0I9oOMCU}1bKQ!j6;;FjCpK5g#t-N zY$(cm0e?psrQLp~Rsz&&NQK3J`#|BENDTe+0vaRHzVuAu$7PuA>7{@CKMLyQ8VZM5 z?yUE$FXab4sK`Sh%SxW#alR-$`<+^DKd1!3K8%Ox>b7aewmy3 zvcN-wJwFMd++nPR#5#yc$YZc$Lt0JpWnvS;wjVGx);yGc8<;HLfaTbYs5{kRY+}RD zpn?9JFsk>-C zrTp+vpQosLHdax30UB`c1`G!+@(SR0Wm>*^ADpAJ_j=I=I}I9FEQZ7rWiqPpMbiM| zXY}i$)tFa5v8d{RAfSLH~aL${WQ8uhCnw%zjh`tM4Cil`60N7RM!ujv2@C=# zO0{EoZbIX!AB7g~--7#q_yky7z%Zo-u4gC69-UvcVId>ekhTFgp~$&@dCDsW;&59Qh&9P1hJxHMF_F%-?JBas(8TudQ4^4tGq2OTelKCu ztI$Avx6J1(d}N_tr~9U(aL_bf70oINBurOoxL$aQHJc?w_PaExu;OY~fzkqCC4iU! z1E%SIUKnFigq-!bfmn zWm?~~_ldvrsoa)c0y%UCE|4YTb>-VFhV8hS^g|!y?2g9j4fF<0r@b=&{hwrV%;JUA zQAtV1(czw0IJM$j&O>0v>Qw)n!S~u4bfN<8q>{YyHhlxAQZTM>>F(v|{40YLfzxUx zGMAjFrkARyTDo%GT%Ta;_c=rhGXt@%RQwOc9v2QuXfJsH%bPwzju)h8bi;fcVq`09>v}Y%!_JQ}6gP7!a^=kh;>5 z>DI;ynq)Yb%`(5bw&}a!EN*Pn0z*o1nG5fd-w>mSm#6k-#}1*0E{%b=^?H$<$3#m; zlF6N%su=%(-1n1F`C~;UB@ZO4WY;h)nP5Oan1r4v-FGGORdW_2YKa=wMX5QM3H1 z^21Xj(?#{RJi?AqUId%j{cb;5XwzP z%rO8RENC8VB}>a~RgBf(Li`k+t^R}rBq!>CQq44))jdQNv!H|PGRW^)im%^cuN+-k6i%UJ00;u0tVn-IN1$lonlP|-I)%B7P+|__u z0_<~1Vd2N2t(nJ4RFlP0GdRWm4J>ek-LNw8Hr;`NqmVL;Q-gLkWx!!58FSC1cAI&? z2S%J_qyc)@_tF5Js$3Y95UvR~+BPu-oEmm|X2!c&gry#OnMwH2LElX(sAgnjgQ{IvYL#XYk|uLs5zr9Jd;toNGHsH7xQ4=;@5n;~S{e9x#B^_uUNpLVsU zrzz2QYP59OCk3N`4j%+Lz!U(oig|T&y{A?gKPVp7WM9JLlGZVC`YR?iXxrrO-Mfv@ z(XVCSSQ?Zl|5J^#!sw{)sv;mTM5*XRU}HKE0||{t5`0(qf6Dg&DUiCa&_td5N~iqe z#~)K3ur(u!0>PD>Nqq4cNWQCQ?bfht_qqUSNDo}J78NRpSCt9RR4svx0TyZL)&Ub{VB zd9H|vfuhj&8c{isRn_?;C)M62dj^CDZIlJ+R}IAR4U~WOPiYaOn!aJ3yGJ(wrbD@e z4Y*5@G|SGm1|!F*+hjnxIyR8t!&qZVdMoV^QR_sbvm>Adg>U-!uiF~!KqY)E!S3qR z^t2ak5?aIFal0#o=^!fE6Y$e&H~}PTzf!zAHwepTD9fORjqdEYgu|YUN(GJai^F8s z&`pp3-mW^Wgx*C5f!rGTkjxFkp=&@NgEuKR>T`tKU;s?EnFyl*`{P>g0<2 zPU!11ZI-t~y|>wH7vYD)tpIX41Ovp6W}CMoPtc;l@lx|pkj$s0T?YOOxK$1yg02!# zp<@EfE3DAB4tuHdjQv!=%m^_O`z6V5ic@OM3=v}Vt0I3+u8A|h=Q}{?n4kYl_o9ZlPk`PyBA;Obv~;Q9{ed?F4Dtk? zHwaV=gh-D;`w+=*wdP(I+&gTZCd+kf{~kkXWzDuL=7Lz=MtEzthZX*APWO$IE;6hk z1;8S!BhPpMGZOm=KuquLCz!wwm;$}~Xg)w*>EXKCLEKtgo;Fj{l{yvcl2@O_?ma@w)4w0U%QVXowHCyP?1-9g-I@Zrt$c;+xN& z>Y@S#G&?$pM1WTrd&~II{~%|Mdm8NyjjIwr^rZ*<_$KlSS)k-kNSvPT2fHBRg|?fZ zVvB>Rg<^oN*`vIH3Dz#v1PN6=X(|03q}j{Wf$LIDdIRO|HV*jf+Jm)0;#U>50kgq8 zx#|w25b1O2w-faKI!N$DMRw7?^P{4l{Sz{;S)q7I75dU` z@@2vkpVKo*6`=O1N;UPNn;<3mU>Az?;6r9V2Ns(|1T7Y6t~G08PhUI8gg954=EeoC zPYA%;zkYo}uM`w$0bi1uiDhe%BZ@4+16{y{d{qG(%f`-~yE6_MOMG|7^3m?I8$rG- zDg1H0J6LoBLrn@IbyS95>4&9Evy*s!9j&}20~CjCOgEHT7ABz{q)%K$`#vF*MP=Q=!TdMh1qA_R9gk3WA2H+fpUvB^yVOKA&3@YpA4!tLcmSRoq z;gK(g<-mGuZ?DwqtL@o>2>)J*8X{pk%BHRCVrJ5qat>u>mk=mg?#<^sQS8ea1EZwF&vPD5{5H@oCj<>dIgj0TKgsI);X$N#^@T2f=&m-7M%yTd#&<%xCZ_* zEYzo7Cni-B?Nv2kpr^i9xq^sDioM#K1=Nhjx_Wztrwg9A944x>@Pk;hNG0Gl@1f~Q z%!HzTUr$AGX{lj+I{k97p9d~q-g*9$NymRRU5Im77}Q!KD1i0?Y6@}7nix+lOaJ-C z;^UO|FaB$Vax(x{7?g4gmyuY67hdVT$P{^P6a6o^#B!)yYcImZ@i4NJaz4cJy>`$O z;LW2*nTbJE_<3jZC|yjcCPY2HY)KL93gj51pH)e!=p^dFY~bT>4GfZA442zwVOxiB zG?1zR;Q-w-_P-u~NGHMaExrg@gvW;0e>ng5Fy(AoN*qkRnTY0tF>FGH;T0i_dO&cK zx3EMsXnkE6O!8G8#5tr$t1IT-u}Ha#_FkB4)pL+}CyZVM4#8MYLCcE=>4p*?cN|3+98_#2fbiXwE1&ergriao)wd!*Xe!;8Fgm+52H_b zD9Mtr68HEO(UQ&I_%4SM%)~b8bjp|kLbTNeL3aZ!2K|R|GplR$1jRpcJJk*YqdybPTq_Um}F42qI|~^G%Po^zZ|jFBip}(hm|zTz+nD z7jep|CG$j#nPx0m9bQ6S_!Dp9P__hailtV~*65@8VsH7Jw|*|*Kb2{zAu6fSEP|AI zC(S^tkSzvX@H--*H!l4h;i)1S4X#1ifDfE)Xs-U)5>}|wf=v%c@xT8RACCGfr5O#2 z%V#Nzc_#j6Riagrv@TAV4cgftOvgS`2b%HG0Icr=mU&}~Ex~4>iuT!4qHsR#rZ;&2 z^-jTIQ^)C%uE5RJJ2jhFdcGDLMV2kXI^5701&qXil;(rx6@1c-(`ssFqP&P~F2K>b z^q03iR2r+Ov~rjU-Z~xH_)bQ3aT=N7Epyd+p{<%MB6Cbo#T+SlA1*9UtsV#+s zZh8}9gX_11$~v$Kcs_XCz=Wr=9*iMDg17~`pszR6rvdWY1BmPRbCY@lQzxQg;lFyX z6-7o&Z12BuP05mS3-&nXpr1>U3H8!)5YHAZb!RFEyzcu>e`9j;r{;hHfs`gl4fH}# zV5EH6_L*QjZDWLR5C10@cq7C5{+6o?X|h*o$0#{roR=3XGZyUT(USO*r+u zr@Nb>FofLKmGa*Mlm%G}!L5ed3nchVQ_eal*9MBd9v;TB$fYqi$iYPm>p6_v&#&AU zE~}Z84aqGgIG6Za;mf9ka$8Ot{5@QMH?Ml!%0_!7_Y0lYqux>VpxQy(VAAi)wJuk^=d6s?!*a7X1;^AbC!+YXRh&j)HzZ@b-jxYX`>}bAJ zBNBZl?GK0oLEPQcrJ!?V1BMl&a5P^d)eYn3U*Vu#u4(xqv-_z>Y|^o6x)42Y0-qWHD|Z?BNoLrq;g^8FrGEd274!SBPopNggnuwzTK zqeo7yVU?draYfqy-t`JV4kBTmu=n{Bo&s`Z=Obu+RMq+ZRQ0QktyQLa#kv-poE!Kj z+4@OJ#Pnn4`Msqu7i9pEg7e;dxt2>>A_sWG=&e_@H_eHV7Bx@OnzM=6!mxjxK>ryA zoP!iSc`3E~<0AV&M)7DCyJC|l^{!bk;*w+$pWQWKzPxdoX>D1s%q~{~jDNmM;*h4_CEI z(0v9)jM=B|p$`dOMn(jIRSHaeh0lH~1U(CC4s>;j-AVbHzgqIY_1Oo{o*w@Yy0^{h z{M{)NKX*$I5RPz97kK+INpjU>fC*w}$#Q22XlH_UR(6z>l`Xa!#RJ>8q-N*m=O;nx z+qbT=L)ob&4-Rhch&l4^Es)!_|K5LZ5g?Mm3k1gF?jLnWFEBm^<6nsohSf3t{(s-F zHxd8)^RT_+J#9??nP&|4VUzVZGqLk3Qe?G2l_+!miwu{zL!w&Hp>I Zu}|ZQoWfG0`tC}=)U Date: Tue, 6 Feb 2024 10:49:33 -0500 Subject: [PATCH 487/792] docs(aws): Update aws.md (#9726) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- docs/deploy/aws.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/deploy/aws.md b/docs/deploy/aws.md index 6598b93c25e9a..d060eddd9acc8 100644 --- a/docs/deploy/aws.md +++ b/docs/deploy/aws.md @@ -53,6 +53,8 @@ ip-192-168-64-56.us-west-2.compute.internal Ready 3h v1.18.9-ek ip-192-168-8-126.us-west-2.compute.internal Ready 3h v1.18.9-eks-d1db3c ``` +Once your cluster is running, make sure to install the EBS CSI driver, Core DNS, and VPC CNI plugin for Kubernetes. [add-ons](https://docs.aws.amazon.com/eks/latest/userguide/eks-add-ons.html) + ## Setup DataHub using Helm Once the kubernetes cluster has been set up, you can deploy DataHub and it’s prerequisites using helm. Please follow the From 82035437be4fe32d5ca946531c2abaf095d997e4 Mon Sep 17 00:00:00 2001 From: siladitya <68184387+siladitya2@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:53:51 +0100 Subject: [PATCH 488/792] fix(search): Fix missing mapping of DOUBLE searchable type (#9651) Co-authored-by: si-chakraborty --- .../main/java/com/linkedin/metadata/search/utils/ESUtils.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 4d74bfb66b8db..86d411e9b5b92 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -261,6 +261,8 @@ public static String getElasticTypeForFieldType(SearchableAnnotation.FieldType f return DATE_FIELD_TYPE; } else if (fieldType == SearchableAnnotation.FieldType.OBJECT) { return OBJECT_FIELD_TYPE; + } else if (fieldType == SearchableAnnotation.FieldType.DOUBLE) { + return DOUBLE_FIELD_TYPE; } else { log.warn("FieldType {} has no mappings implemented", fieldType); return null; From 11f7804b1ea2f47495c336e8d91d7dc380ccd228 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:09:54 -0600 Subject: [PATCH 489/792] chore(lint): apply spotless (#9789) --- .../java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 8d9b9a5ad82c8..e3eef0688c269 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1643,7 +1643,8 @@ private void configureResolvedAuditStampResolvers(final RuntimeWiring.Builder bu typeWiring.dataFetcher( "actor", new LoadableTypeResolver<>( - corpUserType, (env) -> ((ResolvedAuditStamp) env.getSource()).getActor().getUrn()))); + corpUserType, + (env) -> ((ResolvedAuditStamp) env.getSource()).getActor().getUrn()))); } /** From e1c8ac732098b1b05c381bb33c9c4c2043510d34 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Wed, 7 Feb 2024 06:58:09 +0530 Subject: [PATCH 490/792] docs(observe): add upsert assertion monitor graphql examples (#9766) --- .../observe/column-assertions.md | 85 +++++++++++++++++++ .../observe/custom-sql-assertions.md | 59 +++++++++++++ .../observe/freshness-assertions.md | 53 ++++++++++++ .../observe/volume-assertions.md | 73 ++++++++++++++++ 4 files changed, 270 insertions(+) diff --git a/docs/managed-datahub/observe/column-assertions.md b/docs/managed-datahub/observe/column-assertions.md index 8ef32e73b4b72..a5dcb02c87bad 100644 --- a/docs/managed-datahub/observe/column-assertions.md +++ b/docs/managed-datahub/observe/column-assertions.md @@ -340,6 +340,91 @@ This entity defines _when_ to run the check (Using CRON format - every 8th hour) After creating the monitor, the new assertion will start to be evaluated every 8 hours in your selected timezone. +Alternatively you can use `upsertDatasetFieldAssertionMonitor` graphql endpoint for creating a Column Assertion and corresponding Monitor for a dataset. + +```json +mutation upsertDatasetFieldAssertionMonitor { + upsertDatasetFieldAssertionMonitor( + input: { + entityUrn: "" + type: FIELD_VALUES, + fieldValuesAssertion: { + field: { + path: "", + type: "NUMBER", + nativeType: "NUMBER(38,0)" + }, + operator: GREATER_THAN, + parameters: { + value: { + type: NUMBER, + value: "10" + } + }, + failThreshold: { + type: COUNT, + value: 0 + }, + excludeNulls: true + } + evaluationSchedule: { + timezone: "America/Los_Angeles" + cron: "0 */8 * * *" + } + evaluationParameters: { + sourceType: ALL_ROWS_QUERY + } + mode: ACTIVE + } + ){ + urn + } +} +``` + +You can use same endpoint with assertion urn input to update an existing Column Assertion and corresponding Monitor. + +```json +mutation upsertDatasetFieldAssertionMonitor { + upsertDatasetFieldAssertionMonitor( + assertionUrn: "" + input: { + entityUrn: "" + type: FIELD_VALUES, + fieldValuesAssertion: { + field: { + path: "", + type: "NUMBER", + nativeType: "NUMBER(38,0)" + }, + operator: GREATER_THAN_OR_EQUAL_TO, + parameters: { + value: { + type: NUMBER, + value: "10" + } + }, + failThreshold: { + type: COUNT, + value: 0 + }, + excludeNulls: true + } + evaluationSchedule: { + timezone: "America/Los_Angeles" + cron: "0 */8 * * *" + } + evaluationParameters: { + sourceType: ALL_ROWS_QUERY + } + mode: ACTIVE + } + ){ + urn + } +} +``` + You can delete assertions along with their monitors using GraphQL mutations: `deleteAssertion` and `deleteMonitor`. ### Tips diff --git a/docs/managed-datahub/observe/custom-sql-assertions.md b/docs/managed-datahub/observe/custom-sql-assertions.md index 581b542688134..434788ff45a6f 100644 --- a/docs/managed-datahub/observe/custom-sql-assertions.md +++ b/docs/managed-datahub/observe/custom-sql-assertions.md @@ -297,6 +297,65 @@ This entity defines _when_ to run the check (Using CRON format - every 8th hour) After creating the monitor, the new assertion will start to be evaluated every 8 hours in your selected timezone. +Alternatively you can use `upsertDatasetSqlAssertionMonitor` graphql endpoint for creating a Custom SQL Assertion and corresponding Monitor for a dataset. + +```json +mutation upsertDatasetSqlAssertionMonitor { + upsertDatasetSqlAssertionMonitor( + input: { + entityUrn: "" + type: METRIC, + description: "", + statement: "", + operator: GREATER_THAN_OR_EQUAL_TO, + parameters: { + value: { + value: "100", + type: NUMBER + } + } + evaluationSchedule: { + timezone: "America/Los_Angeles" + cron: "0 */8 * * *" + } + mode: ACTIVE + } + ) { + urn + } +} +``` + +You can use same endpoint with assertion urn input to update an existing Custom SQL Assertion and corresponding Monitor. + +```json +mutation upsertDatasetSqlAssertionMonitor { + upsertDatasetSqlAssertionMonitor( + assertionUrn: "" + input: { + entityUrn: "" + type: METRIC, + description: "", + statement: "", + operator: GREATER_THAN_OR_EQUAL_TO, + parameters: { + value: { + value: "100", + type: NUMBER + } + } + evaluationSchedule: { + timezone: "America/Los_Angeles" + cron: "0 */6 * * *" + } + mode: ACTIVE + } + ) { + urn + } +} +``` + You can delete assertions along with their monitors using GraphQL mutations: `deleteAssertion` and `deleteMonitor`. ### Tips diff --git a/docs/managed-datahub/observe/freshness-assertions.md b/docs/managed-datahub/observe/freshness-assertions.md index 9704f475b1587..50cae1567f93a 100644 --- a/docs/managed-datahub/observe/freshness-assertions.md +++ b/docs/managed-datahub/observe/freshness-assertions.md @@ -346,6 +346,59 @@ This entity defines _when_ to run the check (Using CRON format - every 8th hour) After creating the monitor, the new assertion will start to be evaluated every 8 hours in your selected timezone. +Alternatively you can use `upsertDatasetFreshnessAssertionMonitor` graphql endpoint for creating a Freshness Assertion and corresponding Monitor for a dataset. + +```json +mutation upsertDatasetFreshnessAssertionMonitor { + upsertDatasetFreshnessAssertionMonitor( + input: { + entityUrn: "", + schedule: { + type: FIXED_INTERVAL, + fixedInterval: { unit: HOUR, multiple: 8 } + } + evaluationSchedule: { + timezone: "America/Los_Angeles", + cron: "0 */8 * * *" + } + evaluationParameters: { + sourceType: INFORMATION_SCHEMA + } + mode: ACTIVE + } + ){ + urn + } +} +``` + +You can use same endpoint with assertion urn input to update an existing Freshness Assertion and corresponding Monitor. + +```json +mutation upsertDatasetFreshnessAssertionMonitor { + upsertDatasetFreshnessAssertionMonitor( + assertionUrn: "" + input: { + entityUrn: "", + schedule: { + type: FIXED_INTERVAL, + fixedInterval: { unit: HOUR, multiple: 6 } + } + evaluationSchedule: { + timezone: "America/Los_Angeles", + cron: "0 */6 * * *" + } + evaluationParameters: { + sourceType: INFORMATION_SCHEMA + } + mode: ACTIVE + } + ){ + urn + } +} +``` + You can delete assertions along with their monitors using GraphQL mutations: `deleteAssertion` and `deleteMonitor`. ### Reporting Operations via API diff --git a/docs/managed-datahub/observe/volume-assertions.md b/docs/managed-datahub/observe/volume-assertions.md index 67971d0c20037..f925920a0c248 100644 --- a/docs/managed-datahub/observe/volume-assertions.md +++ b/docs/managed-datahub/observe/volume-assertions.md @@ -337,6 +337,79 @@ This entity defines _when_ to run the check (Using CRON format - every 8th hour) After creating the monitor, the new assertion will start to be evaluated every 8 hours in your selected timezone. +Alternatively you can use `upsertDatasetVolumeAssertionMonitor` graphql endpoint for creating a Volume Assertion and corresponding Monitor. + +```json +mutation upsertDatasetVolumeAssertionMonitor { + upsertDatasetVolumeAssertionMonitor( + input: { + entityUrn: "" + type: ROW_COUNT_TOTAL + rowCountTotal: { + operator: BETWEEN + parameters: { + minValue: { + value: "10" + type: NUMBER + } + maxValue: { + value: "20" + type: NUMBER + } + } + } + evaluationSchedule: { + timezone: "America/Los_Angeles" + cron: "0 */8 * * *" + } + evaluationParameters: { + sourceType: INFORMATION_SCHEMA + } + mode: ACTIVE + } + ) { + urn + } +} +``` + +You can use same endpoint with assertion urn input to update an existing Volume Assertion and corresponding Monitor. + +```json +mutation upsertDatasetVolumeAssertionMonitor { + upsertDatasetVolumeAssertionMonitor( + assertionUrn: "" + input: { + entityUrn: "" + type: ROW_COUNT_TOTAL + rowCountTotal: { + operator: BETWEEN + parameters: { + minValue: { + value: "10" + type: NUMBER + } + maxValue: { + value: "20" + type: NUMBER + } + } + } + evaluationSchedule: { + timezone: "America/Los_Angeles" + cron: "0 */6 * * *" + } + evaluationParameters: { + sourceType: INFORMATION_SCHEMA + } + mode: ACTIVE + } + ) { + urn + } +} +``` + You can delete assertions along with their monitors using GraphQL mutations: `deleteAssertion` and `deleteMonitor`. ### Tips From e6d706687ab5af6b888176bb775c338ff953bef7 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Wed, 7 Feb 2024 14:12:00 +0900 Subject: [PATCH 491/792] fix(docs): fix the subtitle on the slack page (#9781) --- docs-website/src/pages/slack/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-website/src/pages/slack/index.js b/docs-website/src/pages/slack/index.js index c85a1eefe5545..5989224191112 100644 --- a/docs-website/src/pages/slack/index.js +++ b/docs-website/src/pages/slack/index.js @@ -36,7 +36,7 @@ function SlackSurvey() {

    Join the DataHub Slack Community!

    -
    We will send the link to join our Slack community to your email.
    +
    We'd love to find out a little more about you!
    From ea0ae8cb54ed307c8d15e15d445f2a36f4812b7c Mon Sep 17 00:00:00 2001 From: Peng G Date: Wed, 7 Feb 2024 00:28:30 -0800 Subject: [PATCH 492/792] feat(airflow) Override datajob external_url (#9681) Co-authored-by: Peng Gao --- docs/lineage/airflow.md | 2 ++ .../src/datahub_airflow_plugin/_config.py | 12 +++++++++ .../client/airflow_generator.py | 25 ++++++++++++++++--- .../datahub_listener.py | 4 +++ .../datahub_plugin_v22.py | 4 +++ .../lineage/_lineage_core.py | 1 + .../datahub_airflow_plugin/lineage/datahub.py | 6 ++++- 7 files changed, 49 insertions(+), 5 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index da3a36bc87be5..94eb69a2ed827 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -135,6 +135,8 @@ conn_id = datahub_rest_default # or datahub_kafka_default | capture_ownership_info | true | If true, the owners field of the DAG will be capture as a DataHub corpuser. | | capture_tags_info | true | If true, the tags field of the DAG will be captured as DataHub tags. | | capture_executions | true | If true, we'll capture task runs in DataHub in addition to DAG definitions. | +| datajob_url_link | taskinstance | If taskinstance, the datajob url will be taskinstance link on airflow. It can also be grid. + | | graceful_exceptions | true | If set to true, most runtime errors in the lineage backend will be suppressed and will not cause the overall task to fail. Note that configuration issues will still throw exceptions. | #### Validate that the plugin is working diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_config.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_config.py index 67843da2ba995..48d462b85702a 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_config.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_config.py @@ -1,3 +1,4 @@ +from enum import Enum from typing import TYPE_CHECKING, Optional import datahub.emitter.mce_builder as builder @@ -8,6 +9,11 @@ from datahub_airflow_plugin.hooks.datahub import DatahubGenericHook +class DatajobUrl(Enum): + GRID = "grid" + TASKINSTANCE = "taskinstance" + + class DatahubLineageConfig(ConfigModel): # This class is shared between the lineage backend and the Airflow plugin. # The defaults listed here are only relevant for the lineage backend. @@ -41,6 +47,8 @@ class DatahubLineageConfig(ConfigModel): # The Airflow plugin behaves as if it were set to True. graceful_exceptions: bool = True + datajob_url_link: DatajobUrl = DatajobUrl.TASKINSTANCE + def make_emitter_hook(self) -> "DatahubGenericHook": # This is necessary to avoid issues with circular imports. from datahub_airflow_plugin.hooks.datahub import DatahubGenericHook @@ -65,6 +73,9 @@ def get_lineage_config() -> DatahubLineageConfig: disable_openlineage_plugin = conf.get( "datahub", "disable_openlineage_plugin", fallback=True ) + datajob_url_link = conf.get( + "datahub", "datajob_url_link", fallback=DatajobUrl.TASKINSTANCE.value + ) return DatahubLineageConfig( enabled=enabled, @@ -77,4 +88,5 @@ def get_lineage_config() -> DatahubLineageConfig: log_level=log_level, debug_emitter=debug_emitter, disable_openlineage_plugin=disable_openlineage_plugin, + datajob_url_link=datajob_url_link, ) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py index e1d53be7bae6b..2fa15f13e848b 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py @@ -13,6 +13,7 @@ from datahub.utilities.urns.data_job_urn import DataJobUrn from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED +from datahub_airflow_plugin._config import DatahubLineageConfig, DatajobUrl assert AIRFLOW_PATCHED @@ -208,6 +209,7 @@ def generate_datajob( set_dependencies: bool = True, capture_owner: bool = True, capture_tags: bool = True, + config: Optional[DatahubLineageConfig] = None, ) -> DataJob: """ @@ -217,6 +219,7 @@ def generate_datajob( :param set_dependencies: bool - whether to extract dependencies from airflow task :param capture_owner: bool - whether to extract owner from airflow task :param capture_tags: bool - whether to set tags automatically from airflow task + :param config: DatahubLineageConfig :return: DataJob - returns the generated DataJob object """ dataflow_urn = DataFlowUrn.create_from_ids( @@ -267,7 +270,11 @@ def generate_datajob( datajob.properties = job_property_bag base_url = conf.get("webserver", "base_url") - datajob.url = f"{base_url}/taskinstance/list/?flt1_dag_id_equals={datajob.flow_urn.get_flow_id()}&_flt_3_task_id={task.task_id}" + + if config and config.datajob_url_link == DatajobUrl.GRID: + datajob.url = f"{base_url}/dags/{datajob.flow_urn.get_flow_id()}/grid?task_id={task.task_id}" + else: + datajob.url = f"{base_url}/taskinstance/list/?flt1_dag_id_equals={datajob.flow_urn.get_flow_id()}&_flt_3_task_id={task.task_id}" if capture_owner and dag.owner: datajob.owners.add(dag.owner) @@ -290,9 +297,12 @@ def create_datajob_instance( task: "Operator", dag: "DAG", data_job: Optional[DataJob] = None, + config: Optional[DatahubLineageConfig] = None, ) -> DataProcessInstance: if data_job is None: - data_job = AirflowGenerator.generate_datajob(cluster, task=task, dag=dag) + data_job = AirflowGenerator.generate_datajob( + cluster, task=task, dag=dag, config=config + ) dpi = DataProcessInstance.from_datajob( datajob=data_job, id=task.task_id, clone_inlets=True, clone_outlets=True ) @@ -407,9 +417,12 @@ def run_datajob( datajob: Optional[DataJob] = None, attempt: Optional[int] = None, emit_templates: bool = True, + config: Optional[DatahubLineageConfig] = None, ) -> DataProcessInstance: if datajob is None: - datajob = AirflowGenerator.generate_datajob(cluster, ti.task, dag) + datajob = AirflowGenerator.generate_datajob( + cluster, ti.task, dag, config=config + ) assert dag_run.run_id dpi = DataProcessInstance.from_datajob( @@ -480,6 +493,7 @@ def complete_datajob( end_timestamp_millis: Optional[int] = None, result: Optional[InstanceRunResult] = None, datajob: Optional[DataJob] = None, + config: Optional[DatahubLineageConfig] = None, ) -> DataProcessInstance: """ @@ -491,10 +505,13 @@ def complete_datajob( :param end_timestamp_millis: Optional[int] :param result: Optional[str] One of the result from datahub.metadata.schema_class.RunResultTypeClass :param datajob: Optional[DataJob] + :param config: Optional[DatahubLineageConfig] :return: DataProcessInstance """ if datajob is None: - datajob = AirflowGenerator.generate_datajob(cluster, ti.task, dag) + datajob = AirflowGenerator.generate_datajob( + cluster, ti.task, dag, config=config + ) if end_timestamp_millis is None: if ti.end_date: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index a7f588a166dde..475f3791bc0c8 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -376,6 +376,7 @@ def on_task_instance_running( dag=dag, capture_tags=self.config.capture_tags_info, capture_owner=self.config.capture_ownership_info, + config=self.config, ) # TODO: Make use of get_task_location to extract github urls. @@ -397,6 +398,7 @@ def on_task_instance_running( dag_run=dagrun, datajob=datajob, emit_templates=False, + config=self.config, ) logger.debug(f"Emitted DataHub DataProcess Instance start: {dpi}") @@ -419,6 +421,7 @@ def on_task_instance_finish( dag=dag, capture_tags=self.config.capture_tags_info, capture_owner=self.config.capture_ownership_info, + config=self.config, ) # Add lineage info. @@ -436,6 +439,7 @@ def on_task_instance_finish( dag_run=dagrun, datajob=datajob, result=status, + config=self.config, ) logger.debug( f"Emitted DataHub DataProcess Instance with status {status}: {dpi}" diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py index 51a4151bc8207..7b8d719712d10 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py @@ -120,6 +120,7 @@ def datahub_task_status_callback(context, status): dag=dag, capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, + config=config, ) datajob.inlets.extend( entities_to_dataset_urn_list([let.urn for let in task_inlets]) @@ -143,6 +144,7 @@ def datahub_task_status_callback(context, status): dag_run=context["dag_run"], datajob=datajob, start_timestamp_millis=int(ti.start_date.timestamp() * 1000), + config=config, ) task.log.info(f"Emitted Start Datahub Dataprocess Instance: {dpi}") @@ -185,6 +187,7 @@ def datahub_pre_execution(context): dag=dag, capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, + config=config, ) datajob.inlets.extend( entities_to_dataset_urn_list([let.urn for let in task_inlets]) @@ -208,6 +211,7 @@ def datahub_pre_execution(context): dag_run=context["dag_run"], datajob=datajob, start_timestamp_millis=int(ti.start_date.timestamp() * 1000), + config=config, ) task.log.info(f"Emitting Datahub Dataprocess Instance: {dpi}") diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/_lineage_core.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/_lineage_core.py index 75fc79443e49e..daf45e1cd83f8 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/_lineage_core.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/_lineage_core.py @@ -51,6 +51,7 @@ def send_lineage_to_datahub( dag=dag, capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, + config=config, ) datajob.inlets.extend(entities_to_dataset_urn_list([let.urn for let in inlets])) datajob.outlets.extend(entities_to_dataset_urn_list([let.urn for let in outlets])) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/datahub.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/datahub.py index 3ebe7831d08f9..6f81812ea766e 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/datahub.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/datahub.py @@ -71,7 +71,11 @@ def send_lineage( try: context = context or {} # ensure not None to satisfy mypy send_lineage_to_datahub( - config, operator, operator.inlets, operator.outlets, context + config, + operator, + operator.inlets, + operator.outlets, + context, ) except Exception as e: operator.log.error(e) From 20b9050732f6a78225c70dc20eaade82e07859a9 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Wed, 7 Feb 2024 10:37:55 -0600 Subject: [PATCH 493/792] fix(browsev2): align browse and aggregate queries (#9790) --- .../resolvers/chart/BrowseV2Resolver.java | 5 ++- .../src/main/resources/search.graphql | 5 +++ .../browse/BrowseV2ResolverTest.java | 4 +- .../metadata/client/JavaEntityClient.java | 12 ++++-- .../elasticsearch/ElasticSearchService.java | 10 +++-- .../elasticsearch/query/ESBrowseDAO.java | 38 +++++++++++++------ .../query/request/SearchQueryBuilder.java | 33 ++++++++++++---- .../search/custom/QueryConfiguration.java | 7 ++++ .../linkedin/entity/client/EntityClient.java | 6 ++- .../entity/client/RestliEntityClient.java | 6 ++- .../metadata/search/EntitySearchService.java | 8 +++- 11 files changed, 99 insertions(+), 35 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index d9ce2472c8634..7bcde0ea9bdc1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -18,6 +18,7 @@ import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResultV2; +import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; @@ -52,6 +53,7 @@ public CompletableFuture get(DataFetchingEnvironment environmen final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; final String query = input.getQuery() != null ? input.getQuery() : "*"; + final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query); @@ -83,7 +85,8 @@ public CompletableFuture get(DataFetchingEnvironment environmen sanitizedQuery, start, count, - context.getAuthentication()); + context.getAuthentication(), + searchFlags); return mapBrowseResults(browseResults); } catch (Exception e) { throw new RuntimeException("Failed to execute browse V2", e); diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index 2b921601058fb..a906362cee185 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -1230,6 +1230,11 @@ input BrowseV2Input { The search query string """ query: String + + """ + Flags controlling search options + """ + searchFlags: SearchFlags } """ diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index c565e771a0475..41797fac636f1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -21,6 +21,7 @@ import com.linkedin.metadata.browse.BrowseResultGroupV2Array; import com.linkedin.metadata.browse.BrowseResultMetadata; import com.linkedin.metadata.browse.BrowseResultV2; +import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.Criterion; @@ -262,7 +263,8 @@ private static EntityClient initMockEntityClient( Mockito.eq(query), Mockito.eq(start), Mockito.eq(limit), - Mockito.any(Authentication.class))) + Mockito.any(Authentication.class), + Mockito.nullable(SearchFlags.class))) .thenReturn(result); return client; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 9a3bc9e319d2b..15de029340a3c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -229,9 +229,11 @@ public BrowseResultV2 browseV2( @Nonnull String input, int start, int count, - @Nonnull Authentication authentication) { + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) { // TODO: cache browseV2 results - return _entitySearchService.browseV2(entityName, path, filter, input, start, count); + return _entitySearchService.browseV2( + entityName, path, filter, input, start, count, searchFlags); } /** @@ -253,9 +255,11 @@ public BrowseResultV2 browseV2( @Nonnull String input, int start, int count, - @Nonnull Authentication authentication) { + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) { // TODO: cache browseV2 results - return _entitySearchService.browseV2(entityNames, path, filter, input, start, count); + return _entitySearchService.browseV2( + entityNames, path, filter, input, start, count, searchFlags); } @SneakyThrows diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index 7cba2e0ecc8cb..c20c16e0ea7d1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -215,8 +215,9 @@ public BrowseResultV2 browseV2( @Nullable Filter filter, @Nonnull String input, int start, - int count) { - return esBrowseDAO.browseV2(entityName, path, filter, input, start, count); + int count, + @Nullable SearchFlags searchFlags) { + return esBrowseDAO.browseV2(entityName, path, filter, input, start, count, searchFlags); } @Nonnull @@ -227,8 +228,9 @@ public BrowseResultV2 browseV2( @Nullable Filter filter, @Nonnull String input, int start, - int count) { - return esBrowseDAO.browseV2(entityNames, path, filter, input, start, count); + int count, + @Nullable SearchFlags searchFlags) { + return esBrowseDAO.browseV2(entityNames, path, filter, input, start, count, searchFlags); } @Nonnull diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index 0a9a9fbbad086..b808588520089 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -21,6 +21,7 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; import com.linkedin.metadata.search.utils.ESUtils; @@ -34,6 +35,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -399,14 +401,15 @@ public BrowseResultV2 browseV2( @Nullable Filter filter, @Nonnull String input, int start, - int count) { + int count, + @Nullable SearchFlags searchFlags) { try { final SearchResponse groupsResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { final String finalInput = input.isEmpty() ? "*" : input; groupsResponse = client.search( - constructGroupsSearchRequestV2(entityName, path, filter, finalInput), + constructGroupsSearchRequestV2(entityName, path, filter, finalInput, searchFlags), RequestOptions.DEFAULT); } @@ -435,7 +438,8 @@ public BrowseResultV2 browseV2( @Nullable Filter filter, @Nonnull String input, int start, - int count) { + int count, + @Nullable SearchFlags searchFlags) { try { final SearchResponse groupsResponse; @@ -444,7 +448,7 @@ public BrowseResultV2 browseV2( groupsResponse = client.search( constructGroupsSearchRequestBrowseAcrossEntities( - entities, path, filter, finalInput), + entities, path, filter, finalInput, searchFlags), RequestOptions.DEFAULT); } @@ -472,7 +476,8 @@ private SearchRequest constructGroupsSearchRequestV2( @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input) { + @Nonnull String input, + @Nullable SearchFlags searchFlags) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -482,7 +487,8 @@ private SearchRequest constructGroupsSearchRequestV2( entityName, path, SearchUtil.transformFilterForEntities(filter, indexConvention), - input)); + input, + searchFlags)); searchSourceBuilder.aggregation(buildAggregationsV2(path)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -493,7 +499,8 @@ private SearchRequest constructGroupsSearchRequestBrowseAcrossEntities( @Nonnull List entities, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input) { + @Nonnull String input, + @Nullable SearchFlags searchFlags) { List entitySpecs = entities.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); @@ -509,7 +516,8 @@ private SearchRequest constructGroupsSearchRequestBrowseAcrossEntities( entitySpecs, path, SearchUtil.transformFilterForEntities(filter, indexConvention), - input)); + input, + searchFlags)); searchSourceBuilder.aggregation(buildAggregationsV2(path)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -537,7 +545,10 @@ private QueryBuilder buildQueryStringV2( @Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input) { + @Nonnull String input, + @Nullable SearchFlags searchFlags) { + SearchFlags finalSearchFlags = + Optional.ofNullable(searchFlags).orElse(new SearchFlags().setFulltext(true)); final int browseDepthVal = getPathDepthV2(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); @@ -545,7 +556,7 @@ private QueryBuilder buildQueryStringV2( EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); QueryBuilder query = SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) - .getQuery(input, false); + .getQuery(input, Boolean.TRUE.equals(finalSearchFlags.isFulltext())); queryBuilder.must(query); filterSoftDeletedByDefault(filter, queryBuilder); @@ -567,14 +578,17 @@ private QueryBuilder buildQueryStringBrowseAcrossEntities( @Nonnull List entitySpecs, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input) { + @Nonnull String input, + @Nullable SearchFlags searchFlags) { + SearchFlags finalSearchFlags = + Optional.ofNullable(searchFlags).orElse(new SearchFlags().setFulltext(true)); final int browseDepthVal = getPathDepthV2(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); QueryBuilder query = SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .getQuery(input, false); + .getQuery(input, Boolean.TRUE.equals(finalSearchFlags.isFulltext())); queryBuilder.must(query); if (!path.isEmpty()) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java index 7ddccb0d56724..4c704f81b4c13 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java @@ -135,14 +135,10 @@ private QueryBuilder buildInternalQuery( query.startsWith(STRUCTURED_QUERY_PREFIX) ? query.substring(STRUCTURED_QUERY_PREFIX.length()) : query; - - QueryStringQueryBuilder queryBuilder = QueryBuilders.queryStringQuery(withoutQueryPrefix); - queryBuilder.defaultOperator(Operator.AND); - getStandardFields(entitySpecs) - .forEach(entitySpec -> queryBuilder.field(entitySpec.fieldName(), entitySpec.boost())); - finalQuery.should(queryBuilder); + getStructuredQuery(customQueryConfig, entitySpecs, withoutQueryPrefix) + .ifPresent(finalQuery::should); if (exactMatchConfiguration.isEnableStructured()) { - getPrefixAndExactMatchQuery(null, entitySpecs, withoutQueryPrefix) + getPrefixAndExactMatchQuery(customQueryConfig, entitySpecs, withoutQueryPrefix) .ifPresent(finalQuery::should); } } @@ -415,6 +411,29 @@ private Optional getPrefixAndExactMatchQuery( return finalQuery.should().size() > 0 ? Optional.of(finalQuery) : Optional.empty(); } + private Optional getStructuredQuery( + @Nullable QueryConfiguration customQueryConfig, + List entitySpecs, + String sanitizedQuery) { + Optional result = Optional.empty(); + + final boolean executeStructuredQuery; + if (customQueryConfig != null) { + executeStructuredQuery = customQueryConfig.isStructuredQuery(); + } else { + executeStructuredQuery = !(isQuoted(sanitizedQuery) && exactMatchConfiguration.isExclusive()); + } + + if (executeStructuredQuery) { + QueryStringQueryBuilder queryBuilder = QueryBuilders.queryStringQuery(sanitizedQuery); + queryBuilder.defaultOperator(Operator.AND); + getStandardFields(entitySpecs) + .forEach(entitySpec -> queryBuilder.field(entitySpec.fieldName(), entitySpec.boost())); + result = Optional.of(queryBuilder); + } + return result; + } + private FunctionScoreQueryBuilder buildScoreFunctions( @Nullable QueryConfiguration customQueryConfig, @Nonnull List entitySpecs, diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java index 901bf803d2bca..e3a9d076dbef2 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java @@ -19,6 +19,13 @@ public class QueryConfiguration { private String queryRegex; @Builder.Default private boolean simpleQuery = true; + + /** + * Used to determine if standard structured query logic should be applied when relevant, i.e. + * fullText flag is false. Will not be added in cases where simpleQuery would be the standard. + */ + @Builder.Default private boolean structuredQuery = true; + @Builder.Default private boolean exactMatchQuery = true; @Builder.Default private boolean prefixMatchQuery = true; private BoolQueryConfiguration boolQuery; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index b1b24ac97f0b8..676b80c8bea32 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -154,7 +154,8 @@ public BrowseResultV2 browseV2( @Nonnull String input, int start, int count, - @Nonnull Authentication authentication) + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) throws RemoteInvocationException; /** @@ -176,7 +177,8 @@ public BrowseResultV2 browseV2( @Nonnull String input, int start, int count, - @Nonnull Authentication authentication) + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) throws RemoteInvocationException; @Deprecated diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index 3108345bd3937..653ef046ffc02 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -378,7 +378,8 @@ public BrowseResultV2 browseV2( @Nonnull String input, int start, int count, - @Nonnull Authentication authentication) { + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) { throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } @@ -391,7 +392,8 @@ public BrowseResultV2 browseV2( @Nonnull String input, int start, int count, - @Nonnull Authentication authentication) + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) throws RemoteInvocationException { throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 2fec88ad221fd..0d1c031db136e 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -197,6 +197,7 @@ BrowseResult browse( * @param input search query * @param start start offset of first group * @param count max number of results requested + * @param searchFlags configuration options for search */ @Nonnull public BrowseResultV2 browseV2( @@ -205,7 +206,8 @@ public BrowseResultV2 browseV2( @Nullable Filter filter, @Nonnull String input, int start, - int count); + int count, + @Nullable SearchFlags searchFlags); /** * Gets browse snapshot of a given path @@ -216,6 +218,7 @@ public BrowseResultV2 browseV2( * @param input search query * @param start start offset of first group * @param count max number of results requested + * @param searchFlags configuration options for search */ @Nonnull public BrowseResultV2 browseV2( @@ -224,7 +227,8 @@ public BrowseResultV2 browseV2( @Nullable Filter filter, @Nonnull String input, int start, - int count); + int count, + @Nullable SearchFlags searchFlags); /** * Gets a list of paths for a given urn. From 63322225d96c5e19e11b78cb14eba13c30642027 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 7 Feb 2024 14:41:49 -0600 Subject: [PATCH 494/792] misc: datahub-upgrade improvements, aspect key & default aspects fixes (#9796) --- .../upgrade/UpgradeCliApplication.java | 10 ++- .../config/BackfillBrowsePathsV2Config.java | 10 ++- .../ReindexDataJobViaNodesCLLConfig.java | 8 +- .../upgrade/config/SystemUpdateCondition.java | 14 ++++ .../upgrade/config/SystemUpdateConfig.java | 23 ++++++ .../entity/steps/BackfillBrowsePathsV2.java | 16 +++- .../steps/BackfillBrowsePathsV2Step.java | 66 +++++++++++----- .../system/via/ReindexDataJobViaNodesCLL.java | 9 ++- .../via/ReindexDataJobViaNodesCLLStep.java | 28 ++++--- .../DatahubUpgradeNoSchemaRegistryTest.java | 24 +++++- ...pgradeCliApplicationTestConfiguration.java | 17 ++++- .../com/linkedin/metadata/EventUtils.java | 2 +- .../metadata/entity/EntityServiceImpl.java | 39 +++++++--- .../metadata/entity/EntityServiceTest.java | 48 +++++++++++- .../src/main/resources/application.yml | 8 ++ .../factory/entity/EntityServiceFactory.java | 19 +---- .../DUHESchemaRegistryFactory.java | 40 ---------- .../InternalSchemaRegistryFactory.java | 12 --- .../SchemaRegistryServiceFactory.java | 20 +++++ .../SystemUpdateSchemaRegistryFactory.java | 66 ++++++++++++++++ .../linkedin/metadata/boot/BootstrapStep.java | 21 +---- .../boot/kafka/MockDUHESerializer.java | 57 -------------- ...java => MockSystemUpdateDeserializer.java} | 49 ++++++------ .../kafka/MockSystemUpdateSerializer.java | 76 +++++++++++++++++++ .../resources/entity/AspectResourceTest.java | 2 +- 25 files changed, 451 insertions(+), 233 deletions(-) create mode 100644 datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java delete mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryServiceFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SystemUpdateSchemaRegistryFactory.java delete mode 100644 metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java rename metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/{MockDUHEDeserializer.java => MockSystemUpdateDeserializer.java} (57%) create mode 100644 metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockSystemUpdateSerializer.java diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index ff8bd542fbdff..50847da07be73 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -2,6 +2,10 @@ import com.linkedin.gms.factory.auth.AuthorizerChainFactory; import com.linkedin.gms.factory.auth.DataHubAuthorizerFactory; +import com.linkedin.gms.factory.graphql.GraphQLEngineFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; +import com.linkedin.gms.factory.kafka.SimpleKafkaConsumerFactory; +import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -24,7 +28,11 @@ classes = { ScheduledAnalyticsFactory.class, AuthorizerChainFactory.class, - DataHubAuthorizerFactory.class + DataHubAuthorizerFactory.class, + SimpleKafkaConsumerFactory.class, + KafkaEventConsumerFactory.class, + InternalSchemaRegistryFactory.class, + GraphQLEngineFactory.class }) }) public class UpgradeCliApplication { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index 406963c58fd71..2b2f4648f76e7 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -3,6 +3,7 @@ import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -11,7 +12,12 @@ public class BackfillBrowsePathsV2Config { @Bean public BackfillBrowsePathsV2 backfillBrowsePathsV2( - EntityService entityService, SearchService searchService) { - return new BackfillBrowsePathsV2(entityService, searchService); + EntityService entityService, + SearchService searchService, + @Value("${systemUpdate.browsePathsV2.enabled}") final boolean enabled, + @Value("${systemUpdate.browsePathsV2.reprocess.enabled}") final boolean reprocessEnabled, + @Value("${systemUpdate.browsePathsV2.batchSize}") final Integer batchSize) { + return new BackfillBrowsePathsV2( + entityService, searchService, enabled, reprocessEnabled, batchSize); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java index 06311e1853874..83dad80944f5f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java @@ -2,6 +2,7 @@ import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL; import com.linkedin.metadata.entity.EntityService; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -9,7 +10,10 @@ public class ReindexDataJobViaNodesCLLConfig { @Bean - public ReindexDataJobViaNodesCLL _reindexDataJobViaNodesCLL(EntityService entityService) { - return new ReindexDataJobViaNodesCLL(entityService); + public ReindexDataJobViaNodesCLL _reindexDataJobViaNodesCLL( + EntityService entityService, + @Value("${systemUpdate.dataJobNodeCLL.enabled}") final boolean enabled, + @Value("${systemUpdate.dataJobNodeCLL.batchSize}") final Integer batchSize) { + return new ReindexDataJobViaNodesCLL(entityService, enabled, batchSize); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java new file mode 100644 index 0000000000000..ea432dfa9f7df --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java @@ -0,0 +1,14 @@ +package com.linkedin.datahub.upgrade.config; + +import org.springframework.boot.ApplicationArguments; +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.ConditionContext; +import org.springframework.core.type.AnnotatedTypeMetadata; + +public class SystemUpdateCondition implements Condition { + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context.getBeanFactory().getBean(ApplicationArguments.class).getNonOptionArgs().stream() + .anyMatch("SystemUpdate"::equals); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 177d4b531ba86..cde3a29248fb5 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -8,6 +8,7 @@ import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.dao.producer.KafkaEventProducer; @@ -21,9 +22,12 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; @Slf4j @Configuration @@ -74,4 +78,23 @@ protected KafkaEventProducer duheKafkaEventProducer( duheSchemaRegistryConfig, kafkaConfiguration, properties)); return new KafkaEventProducer(producer, topicConvention, kafkaHealthChecker); } + + /** + * The ReindexDataJobViaNodesCLLConfig step requires publishing to MCL. Overriding the default + * producer with this special producer which doesn't require an active registry. + * + *

    Use when INTERNAL registry and is SYSTEM_UPDATE + * + *

    This forces this producer into the EntityService + */ + @Primary + @Bean(name = "kafkaEventProducer") + @Conditional(SystemUpdateCondition.class) + @ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) + protected KafkaEventProducer kafkaEventProducer( + @Qualifier("duheKafkaEventProducer") KafkaEventProducer kafkaEventProducer) { + return kafkaEventProducer; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java index 4b9fc5bba0204..9b023e1e239a2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java @@ -11,8 +11,20 @@ public class BackfillBrowsePathsV2 implements Upgrade { private final List _steps; - public BackfillBrowsePathsV2(EntityService entityService, SearchService searchService) { - _steps = ImmutableList.of(new BackfillBrowsePathsV2Step(entityService, searchService)); + public BackfillBrowsePathsV2( + EntityService entityService, + SearchService searchService, + boolean enabled, + boolean reprocessEnabled, + Integer batchSize) { + if (enabled) { + _steps = + ImmutableList.of( + new BackfillBrowsePathsV2Step( + entityService, searchService, reprocessEnabled, batchSize)); + } else { + _steps = ImmutableList.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 601ce4d25493c..2d64e0052ae82 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -16,6 +16,7 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; +import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; @@ -37,9 +38,8 @@ @Slf4j public class BackfillBrowsePathsV2Step implements UpgradeStep { - public static final String BACKFILL_BROWSE_PATHS_V2 = "BACKFILL_BROWSE_PATHS_V2"; - public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = - "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; + private static final String UPGRADE_ID = "BackfillBrowsePathsV2Step"; + private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); public static final String DEFAULT_BROWSE_PATH_V2 = "␟Default"; private static final Set ENTITY_TYPES_TO_MIGRATE = @@ -53,14 +53,22 @@ public class BackfillBrowsePathsV2Step implements UpgradeStep { Constants.ML_MODEL_GROUP_ENTITY_NAME, Constants.ML_FEATURE_TABLE_ENTITY_NAME, Constants.ML_FEATURE_ENTITY_NAME); - private static final Integer BATCH_SIZE = 5000; - private final EntityService _entityService; - private final SearchService _searchService; - - public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { - _searchService = searchService; - _entityService = entityService; + private final EntityService entityService; + private final SearchService searchService; + + private final boolean reprocessEnabled; + private final Integer batchSize; + + public BackfillBrowsePathsV2Step( + EntityService entityService, + SearchService searchService, + boolean reprocessEnabled, + Integer batchSize) { + this.searchService = searchService; + this.entityService = entityService; + this.reprocessEnabled = reprocessEnabled; + this.batchSize = batchSize; } @Override @@ -78,11 +86,14 @@ public Function executable() { log.info( String.format( "Upgrading batch %s-%s of browse paths for entity type %s", - migratedCount, migratedCount + BATCH_SIZE, entityType)); + migratedCount, migratedCount + batchSize, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); - migratedCount += BATCH_SIZE; + migratedCount += batchSize; } while (scrollId != null); } + + BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, entityService); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } @@ -91,27 +102,27 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S final Filter filter; - if (System.getenv().containsKey(REPROCESS_DEFAULT_BROWSE_PATHS_V2) - && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { + if (reprocessEnabled) { filter = backfillDefaultBrowsePathsV2Filter(); } else { filter = backfillBrowsePathsV2Filter(); } final ScrollResult scrollResult = - _searchService.scrollAcrossEntities( + searchService.scrollAcrossEntities( ImmutableList.of(entityType), "*", filter, null, scrollId, null, - BATCH_SIZE, + batchSize, new SearchFlags() .setFulltext(true) .setSkipCache(true) .setSkipHighlighting(true) .setSkipAggregates(true)); + if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -183,7 +194,7 @@ private Filter backfillDefaultBrowsePathsV2Filter() { private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exception { BrowsePathsV2 browsePathsV2 = - DefaultAspectsUtil.buildDefaultBrowsePathV2(urn, true, _entityService); + DefaultAspectsUtil.buildDefaultBrowsePathV2(urn, true, entityService); log.debug(String.format("Adding browse path v2 for urn %s with value %s", urn, browsePathsV2)); MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); @@ -193,12 +204,12 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setSystemMetadata( new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal(proposal, auditStamp, true); + entityService.ingestProposal(proposal, auditStamp, true); } @Override public String id() { - return "BackfillBrowsePathsV2Step"; + return UPGRADE_ID; } /** @@ -211,7 +222,22 @@ public boolean isOptional() { } @Override + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variables REPROCESS_DEFAULT_BROWSE_PATHS_V2 & BACKFILL_BROWSE_PATHS_V2 to determine whether to + * skip. + */ public boolean skip(UpgradeContext context) { - return !Boolean.parseBoolean(System.getenv(BACKFILL_BROWSE_PATHS_V2)); + boolean envEnabled = Boolean.parseBoolean(System.getenv("BACKFILL_BROWSE_PATHS_V2")); + + if (reprocessEnabled && envEnabled) { + return false; + } + + boolean previouslyRun = entityService.exists(UPGRADE_ID_URN, true); + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + return (previouslyRun || !envEnabled); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java index 41179a50c4b54..59975693322d1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLL.java @@ -18,8 +18,13 @@ public class ReindexDataJobViaNodesCLL implements Upgrade { private final List _steps; - public ReindexDataJobViaNodesCLL(EntityService entityService) { - _steps = ImmutableList.of(new ReindexDataJobViaNodesCLLStep(entityService)); + public ReindexDataJobViaNodesCLL( + EntityService entityService, boolean enabled, Integer batchSize) { + if (enabled) { + _steps = ImmutableList.of(new ReindexDataJobViaNodesCLLStep(entityService, batchSize)); + } else { + _steps = ImmutableList.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java index 70afbc3d205b2..56166caf5b57e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/via/ReindexDataJobViaNodesCLLStep.java @@ -11,7 +11,6 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; -import java.net.URISyntaxException; import java.util.function.Function; import lombok.extern.slf4j.Slf4j; @@ -21,12 +20,12 @@ public class ReindexDataJobViaNodesCLLStep implements UpgradeStep { private static final String UPGRADE_ID = "via-node-cll-reindex-datajob"; private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); - private static final Integer BATCH_SIZE = 5000; + private final EntityService entityService; + private final Integer batchSize; - private final EntityService _entityService; - - public ReindexDataJobViaNodesCLLStep(EntityService entityService) { - _entityService = entityService; + public ReindexDataJobViaNodesCLLStep(EntityService entityService, Integer batchSize) { + this.entityService = entityService; + this.batchSize = batchSize; } @Override @@ -35,17 +34,16 @@ public Function executable() { RestoreIndicesArgs args = new RestoreIndicesArgs() .setAspectName(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) - .setUrnLike("urn:li:" + DATA_JOB_ENTITY_NAME + ":%"); + .setUrnLike("urn:li:" + DATA_JOB_ENTITY_NAME + ":%") + .setBatchSize(batchSize); RestoreIndicesResult result = - _entityService.restoreIndices(args, x -> context.report().addLine((String) x)); + entityService.restoreIndices(args, x -> context.report().addLine((String) x)); context.report().addLine("Rows migrated: " + result.rowsMigrated); context.report().addLine("Rows ignored: " + result.ignored); - try { - BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, _entityService); - context.report().addLine("State updated: " + UPGRADE_ID_URN); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + + BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, entityService); + context.report().addLine("State updated: " + UPGRADE_ID_URN); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } @@ -70,7 +68,7 @@ public boolean isOptional() { * variable SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT to determine whether to skip. */ public boolean skip(UpgradeContext context) { - boolean previouslyRun = _entityService.exists(UPGRADE_ID_URN, true); + boolean previouslyRun = entityService.exists(UPGRADE_ID_URN, true); boolean envFlagRecommendsSkip = Boolean.parseBoolean(System.getenv("SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT")); if (previouslyRun) { diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java index 83b8e028727ce..4c9e12c0ed151 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java @@ -4,6 +4,8 @@ import static org.testng.AssertJUnit.assertNotNull; import com.linkedin.datahub.upgrade.system.SystemUpdate; +import com.linkedin.metadata.dao.producer.KafkaEventProducer; +import com.linkedin.metadata.entity.EntityServiceImpl; import java.util.List; import java.util.Map; import java.util.Optional; @@ -19,19 +21,37 @@ classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, properties = { "kafka.schemaRegistry.type=INTERNAL", - "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" - }) + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic", + "METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=test_mcl_versioned_topic" + }, + args = {"-u", "SystemUpdate"}) public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests { @Autowired @Named("systemUpdate") private SystemUpdate systemUpdate; + @Autowired + @Named("kafkaEventProducer") + private KafkaEventProducer kafkaEventProducer; + + @Autowired + @Named("duheKafkaEventProducer") + private KafkaEventProducer duheKafkaEventProducer; + + @Autowired private EntityServiceImpl entityService; + @Test public void testSystemUpdateInit() { assertNotNull(systemUpdate); } + @Test + public void testSystemUpdateKafkaProducerOverride() { + assertEquals(kafkaEventProducer, duheKafkaEventProducer); + assertEquals(entityService.get_producer(), duheKafkaEventProducer); + } + @Test public void testSystemUpdateSend() { UpgradeStepResult.Result result = diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index be28b7f739cf5..5c2d6fff0f07c 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -1,15 +1,21 @@ package com.linkedin.datahub.upgrade; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.registry.SchemaRegistryService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders; import io.ebean.Database; +import java.util.Optional; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; @TestConfiguration @@ -20,8 +26,6 @@ public class UpgradeCliApplicationTestConfiguration { @MockBean private Database ebeanServer; - @MockBean private EntityService _entityService; - @MockBean private SearchService searchService; @MockBean private GraphService graphService; @@ -31,4 +35,11 @@ public class UpgradeCliApplicationTestConfiguration { @MockBean ConfigEntityRegistry configEntityRegistry; @MockBean public EntityIndexBuilders entityIndexBuilders; + + @Bean + public SchemaRegistryService schemaRegistryService() { + SchemaRegistryService mockService = mock(SchemaRegistryService.class); + when(mockService.getSchemaIdForTopic(anyString())).thenReturn(Optional.of(0)); + return mockService; + } } diff --git a/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java b/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java index 645c2fe210e09..adff32d5d336d 100644 --- a/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java +++ b/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java @@ -57,7 +57,7 @@ public class EventUtils { private static final Schema ORIGINAL_MCP_AVRO_SCHEMA = getAvroSchemaFromResource("avro/com/linkedin/mxe/MetadataChangeProposal.avsc"); - private static final Schema ORIGINAL_MCL_AVRO_SCHEMA = + public static final Schema ORIGINAL_MCL_AVRO_SCHEMA = getAvroSchemaFromResource("avro/com/linkedin/mxe/MetadataChangeLog.avsc"); private static final Schema ORIGINAL_FMCL_AVRO_SCHEMA = diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index 7f15e3a7fd8fc..eec5c6120886d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -15,6 +15,7 @@ import com.codahale.metrics.Timer; import com.datahub.util.RecordUtils; import com.datahub.util.exception.ModelConversionException; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterators; @@ -146,7 +147,8 @@ public class EntityServiceImpl implements EntityService { private static final int DEFAULT_MAX_TRANSACTION_RETRY = 3; protected final AspectDao _aspectDao; - private final EventProducer _producer; + + @VisibleForTesting @Getter private final EventProducer _producer; private final EntityRegistry _entityRegistry; private final Map> _entityToValidAspects; private RetentionService _retentionService; @@ -637,10 +639,15 @@ public List ingestAspects( @Override public List ingestAspects( @Nonnull final AspectsBatch aspectsBatch, boolean emitMCL, boolean overwrite) { + Set items = new HashSet<>(aspectsBatch.getItems()); + + // Generate additional items as needed + items.addAll(DefaultAspectsUtil.getAdditionalChanges(aspectsBatch, this, enableBrowseV2)); + AspectsBatch withDefaults = AspectsBatchImpl.builder().items(items).build(); Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); - List ingestResults = ingestAspectsToLocalDB(aspectsBatch, overwrite); + List ingestResults = ingestAspectsToLocalDB(withDefaults, overwrite); List mclResults = emitMCL(ingestResults, emitMCL); ingestToLocalDBTimer.stop(); @@ -964,7 +971,7 @@ public IngestResult ingestProposal( */ @Override public Set ingestProposal(AspectsBatch aspectsBatch, final boolean async) { - Stream timeseriesIngestResults = ingestTimeseriesProposal(aspectsBatch); + Stream timeseriesIngestResults = ingestTimeseriesProposal(aspectsBatch, async); Stream nonTimeseriesIngestResults = async ? ingestProposalAsync(aspectsBatch) : ingestProposalSync(aspectsBatch); @@ -978,7 +985,8 @@ public Set ingestProposal(AspectsBatch aspectsBatch, final boolean * @param aspectsBatch timeseries upserts batch * @return returns ingest proposal result, however was never in the MCP topic */ - private Stream ingestTimeseriesProposal(AspectsBatch aspectsBatch) { + private Stream ingestTimeseriesProposal( + AspectsBatch aspectsBatch, final boolean async) { List unsupported = aspectsBatch.getItems().stream() .filter( @@ -992,6 +1000,20 @@ private Stream ingestTimeseriesProposal(AspectsBatch aspectsBatch) + unsupported.stream().map(BatchItem::getChangeType).collect(Collectors.toSet())); } + if (!async) { + // Create default non-timeseries aspects for timeseries aspects + List timeseriesItems = + aspectsBatch.getItems().stream() + .filter(item -> item.getAspectSpec().isTimeseries()) + .collect(Collectors.toList()); + + List defaultAspects = + DefaultAspectsUtil.getAdditionalChanges( + AspectsBatchImpl.builder().items(timeseriesItems).build(), this, enableBrowseV2); + ingestProposalSync(AspectsBatchImpl.builder().items(defaultAspects).build()); + } + + // Emit timeseries MCLs List, Boolean>>>> timeseriesResults = aspectsBatch.getItems().stream() .filter(item -> item.getAspectSpec().isTimeseries()) @@ -1080,17 +1102,10 @@ private Stream ingestProposalAsync(AspectsBatch aspectsBatch) { } private Stream ingestProposalSync(AspectsBatch aspectsBatch) { - Set items = new HashSet<>(aspectsBatch.getItems()); - - // Generate additional items as needed - items.addAll(DefaultAspectsUtil.getAdditionalChanges(aspectsBatch, this, enableBrowseV2)); - - AspectsBatch withDefaults = AspectsBatchImpl.builder().items(items).build(); - AspectsBatchImpl nonTimeseries = AspectsBatchImpl.builder() .items( - withDefaults.getItems().stream() + aspectsBatch.getItems().stream() .filter(item -> !item.getAspectSpec().isTimeseries()) .collect(Collectors.toList())) .build(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index ea4e97d264bca..384b54c7a1c8d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -479,7 +479,7 @@ public void testIngestAspectsGetLatestAspects() throws Exception { assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); assertTrue(DataTemplateUtil.areEqual(writeAspect2, latestAspects.get(aspectName2))); - verify(_mockProducer, times(2)) + verify(_mockProducer, times(3)) .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); verifyNoMoreInteractions(_mockProducer); @@ -772,6 +772,12 @@ public void testUpdateGetAspect() throws AssertionError { .produceMetadataChangeLog( Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), + Mockito.eq(_testEntityRegistry.getEntitySpec("corpUser").getAspectSpec("corpUserKey")), + Mockito.any()); + verifyNoMoreInteractions(_mockProducer); } @@ -824,6 +830,13 @@ public void testGetAspectAtVersion() throws AssertionError { readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + // check key aspect + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), + Mockito.eq(_testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserKey")), + Mockito.any()); + verifyNoMoreInteractions(_mockProducer); } @@ -1094,13 +1107,22 @@ public void testIngestGetLatestAspect() throws AssertionError { ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); verify(_mockProducer, times(1)) - .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + .produceMetadataChangeLog( + Mockito.eq(entityUrn), + Mockito.eq(_testEntityRegistry.getEntitySpec("corpUser").getAspectSpec("corpUserInfo")), + mclCaptor.capture()); MetadataChangeLog mcl = mclCaptor.getValue(); assertEquals(mcl.getEntityType(), "corpuser"); assertNull(mcl.getPreviousAspectValue()); assertNull(mcl.getPreviousSystemMetadata()); assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), + Mockito.eq(_testEntityRegistry.getEntitySpec("corpUser").getAspectSpec("corpUserKey")), + Mockito.any()); + verifyNoMoreInteractions(_mockProducer); reset(_mockProducer); @@ -1201,7 +1223,16 @@ public void testIngestGetLatestEnvelopedAspect() throws Exception { EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); verify(_mockProducer, times(2)) - .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + .produceMetadataChangeLog( + Mockito.eq(entityUrn), + Mockito.eq(_testEntityRegistry.getEntitySpec("corpUser").getAspectSpec("corpUserInfo")), + Mockito.any()); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), + Mockito.eq(_testEntityRegistry.getEntitySpec("corpUser").getAspectSpec("corpUserKey")), + Mockito.any()); verifyNoMoreInteractions(_mockProducer); } @@ -1234,9 +1265,18 @@ public void testIngestSameAspect() throws AssertionError { RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), + Mockito.eq(_testEntityRegistry.getEntitySpec("corpUser").getAspectSpec("corpUserKey")), + Mockito.any()); + ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); verify(_mockProducer, times(1)) - .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + .produceMetadataChangeLog( + Mockito.eq(entityUrn), + Mockito.eq(_testEntityRegistry.getEntitySpec("corpUser").getAspectSpec("corpUserInfo")), + mclCaptor.capture()); MetadataChangeLog mcl = mclCaptor.getValue(); assertEquals(mcl.getEntityType(), "corpuser"); assertNull(mcl.getPreviousAspectValue()); diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index d4c11d4aa53bd..c2a0d508b57d6 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -314,6 +314,14 @@ systemUpdate: maxBackOffs: ${BOOTSTRAP_SYSTEM_UPDATE_MAX_BACK_OFFS:50} backOffFactor: ${BOOTSTRAP_SYSTEM_UPDATE_BACK_OFF_FACTOR:2} # Multiplicative factor for back off, default values will result in waiting 5min 15s waitForSystemUpdate: ${BOOTSTRAP_SYSTEM_UPDATE_WAIT_FOR_SYSTEM_UPDATE:true} + dataJobNodeCLL: + enabled: ${BOOTSTRAP_SYSTEM_UPDATE_DATA_JOB_NODE_CLL_ENABLED:true} + batchSize: ${BOOTSTRAP_SYSTEM_UPDATE_DATA_JOB_NODE_CLL_BATCH_SIZE:200} + browsePathsV2: + enabled: ${BOOTSTRAP_SYSTEM_UPDATE_BROWSE_PATHS_V2_ENABLED:true} + batchSize: ${BOOTSTRAP_SYSTEM_UPDATE_BROWSE_PATHS_V2_BATCH_SIZE:5000} + reprocess: + enabled: ${REPROCESS_DEFAULT_BROWSE_PATHS_V2:false} structuredProperties: enabled: ${ENABLE_STRUCTURED_PROPERTIES_HOOK:true} # applies structured properties mappings diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java index 871f16d97be33..2ccdee5fb1dbf 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java @@ -1,20 +1,15 @@ package com.linkedin.gms.factory.entity; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; -import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.dao.producer.KafkaEventProducer; -import com.linkedin.metadata.dao.producer.KafkaHealthChecker; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.service.UpdateIndicesService; -import com.linkedin.mxe.TopicConvention; import javax.annotation.Nonnull; -import org.apache.avro.generic.IndexedRecord; -import org.apache.kafka.clients.producer.Producer; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; @@ -28,26 +23,16 @@ public class EntityServiceFactory { private Integer _ebeanMaxTransactionRetry; @Bean(name = "entityService") - @DependsOn({ - "entityAspectDao", - "kafkaEventProducer", - "kafkaHealthChecker", - TopicConventionFactory.TOPIC_CONVENTION_BEAN, - "entityRegistry" - }) + @DependsOn({"entityAspectDao", "kafkaEventProducer", "entityRegistry"}) @Nonnull protected EntityService createInstance( - Producer producer, - TopicConvention convention, - KafkaHealthChecker kafkaHealthChecker, + @Qualifier("kafkaEventProducer") final KafkaEventProducer eventProducer, @Qualifier("entityAspectDao") AspectDao aspectDao, EntityRegistry entityRegistry, ConfigurationProvider configurationProvider, UpdateIndicesService updateIndicesService, @Value("${featureFlags.showBrowseV2}") final boolean enableBrowsePathV2) { - final KafkaEventProducer eventProducer = - new KafkaEventProducer(producer, convention, kafkaHealthChecker); FeatureFlags featureFlags = configurationProvider.getFeatureFlags(); return new EntityServiceImpl( diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java deleted file mode 100644 index 4819984307af9..0000000000000 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.linkedin.gms.factory.kafka.schemaregistry; - -import static com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener.TOPIC_NAME; - -import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.boot.kafka.MockDUHEDeserializer; -import com.linkedin.metadata.boot.kafka.MockDUHESerializer; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; -import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; -import java.util.HashMap; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Slf4j -@Configuration -public class DUHESchemaRegistryFactory { - - public static final String DUHE_SCHEMA_REGISTRY_TOPIC_KEY = "duheTopicName"; - - @Value(TOPIC_NAME) - private String duheTopicName; - - /** Configure Kafka Producer/Consumer processes with a custom schema registry. */ - @Bean("duheSchemaRegistryConfig") - protected SchemaRegistryConfig duheSchemaRegistryConfig(ConfigurationProvider provider) { - Map props = new HashMap<>(); - KafkaConfiguration kafkaConfiguration = provider.getKafka(); - - props.put( - AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, - kafkaConfiguration.getSchemaRegistry().getUrl()); - props.put(DUHE_SCHEMA_REGISTRY_TOPIC_KEY, duheTopicName); - - log.info("DataHub System Update Registry"); - return new SchemaRegistryConfig(MockDUHESerializer.class, MockDUHEDeserializer.class, props); - } -} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java index 8c814e5054758..46b27195ecc67 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java @@ -1,11 +1,7 @@ package com.linkedin.gms.factory.kafka.schemaregistry; -import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.config.kafka.KafkaConfiguration; -import com.linkedin.metadata.registry.SchemaRegistryService; -import com.linkedin.metadata.registry.SchemaRegistryServiceImpl; -import com.linkedin.mxe.TopicConvention; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import io.confluent.kafka.serializers.KafkaAvroDeserializer; import io.confluent.kafka.serializers.KafkaAvroSerializer; @@ -17,7 +13,6 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.DependsOn; @Slf4j @Configuration @@ -45,11 +40,4 @@ protected SchemaRegistryConfig getInstance( kafkaConfiguration.getSchemaRegistry().getUrl()); return new SchemaRegistryConfig(KafkaAvroSerializer.class, KafkaAvroDeserializer.class, props); } - - @Bean(name = "schemaRegistryService") - @Nonnull - @DependsOn({TopicConventionFactory.TOPIC_CONVENTION_BEAN}) - protected SchemaRegistryService schemaRegistryService(TopicConvention convention) { - return new SchemaRegistryServiceImpl(convention); - } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryServiceFactory.java new file mode 100644 index 0000000000000..a6869321d796f --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryServiceFactory.java @@ -0,0 +1,20 @@ +package com.linkedin.gms.factory.kafka.schemaregistry; + +import com.linkedin.gms.factory.common.TopicConventionFactory; +import com.linkedin.metadata.registry.SchemaRegistryService; +import com.linkedin.metadata.registry.SchemaRegistryServiceImpl; +import com.linkedin.mxe.TopicConvention; +import javax.annotation.Nonnull; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.DependsOn; + +@Configuration +public class SchemaRegistryServiceFactory { + @Bean(name = "schemaRegistryService") + @Nonnull + @DependsOn({TopicConventionFactory.TOPIC_CONVENTION_BEAN}) + protected SchemaRegistryService schemaRegistryService(TopicConvention convention) { + return new SchemaRegistryServiceImpl(convention); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SystemUpdateSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SystemUpdateSchemaRegistryFactory.java new file mode 100644 index 0000000000000..d02cdc0e68f52 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SystemUpdateSchemaRegistryFactory.java @@ -0,0 +1,66 @@ +package com.linkedin.gms.factory.kafka.schemaregistry; + +import static com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener.TOPIC_NAME; + +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.boot.kafka.MockSystemUpdateDeserializer; +import com.linkedin.metadata.boot.kafka.MockSystemUpdateSerializer; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; +import com.linkedin.metadata.registry.SchemaRegistryService; +import com.linkedin.mxe.Topics; +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; +import java.util.HashMap; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Slf4j +@Configuration +public class SystemUpdateSchemaRegistryFactory { + + public static final String SYSTEM_UPDATE_TOPIC_KEY_PREFIX = "data-hub.system-update.topic-key."; + public static final String SYSTEM_UPDATE_TOPIC_KEY_ID_SUFFIX = ".id"; + + public static final String DUHE_SCHEMA_REGISTRY_TOPIC_KEY = + SYSTEM_UPDATE_TOPIC_KEY_PREFIX + "duhe"; + public static final String MCL_VERSIONED_SCHEMA_REGISTRY_TOPIC_KEY = + SYSTEM_UPDATE_TOPIC_KEY_PREFIX + "mcl-versioned"; + + @Value(TOPIC_NAME) + private String duheTopicName; + + @Value("${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}") + private String mclTopicName; + + /** Configure Kafka Producer/Consumer processes with a custom schema registry. */ + @Bean("duheSchemaRegistryConfig") + protected SchemaRegistryConfig duheSchemaRegistryConfig( + final ConfigurationProvider provider, final SchemaRegistryService schemaRegistryService) { + Map props = new HashMap<>(); + KafkaConfiguration kafkaConfiguration = provider.getKafka(); + + props.put( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + kafkaConfiguration.getSchemaRegistry().getUrl()); + + // topic names + props.putAll( + Map.of( + DUHE_SCHEMA_REGISTRY_TOPIC_KEY, duheTopicName, + MCL_VERSIONED_SCHEMA_REGISTRY_TOPIC_KEY, mclTopicName)); + + // topic ordinals + props.putAll( + Map.of( + DUHE_SCHEMA_REGISTRY_TOPIC_KEY + SYSTEM_UPDATE_TOPIC_KEY_ID_SUFFIX, + schemaRegistryService.getSchemaIdForTopic(duheTopicName).get().toString(), + MCL_VERSIONED_SCHEMA_REGISTRY_TOPIC_KEY + SYSTEM_UPDATE_TOPIC_KEY_ID_SUFFIX, + schemaRegistryService.getSchemaIdForTopic(mclTopicName).get().toString())); + + log.info("DataHub System Update Registry"); + return new SchemaRegistryConfig( + MockSystemUpdateSerializer.class, MockSystemUpdateDeserializer.class, props); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java index a79bdacfc55e9..2dccda4243bca 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java @@ -1,16 +1,15 @@ package com.linkedin.metadata.boot; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.DataHubUpgradeKey; +import com.linkedin.metadata.utils.AuditStampUtils; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.upgrade.DataHubUpgradeResult; -import java.net.URISyntaxException; import javax.annotation.Nonnull; /** A single step in the Bootstrap process. */ @@ -40,24 +39,10 @@ static Urn getUpgradeUrn(String upgradeId) { new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } - static void setUpgradeResult(Urn urn, EntityService entityService) throws URISyntaxException { - final AuditStamp auditStamp = - new AuditStamp() - .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()); + static void setUpgradeResult(Urn urn, EntityService entityService) { final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); - // Workaround because entity service does not auto-generate the key aspect for us - final MetadataChangeProposal keyProposal = new MetadataChangeProposal(); - final DataHubUpgradeKey upgradeKey = new DataHubUpgradeKey().setId(urn.getId()); - keyProposal.setEntityUrn(urn); - keyProposal.setEntityType(Constants.DATA_HUB_UPGRADE_ENTITY_NAME); - keyProposal.setAspectName(Constants.DATA_HUB_UPGRADE_KEY_ASPECT_NAME); - keyProposal.setAspect(GenericRecordUtils.serializeAspect(upgradeKey)); - keyProposal.setChangeType(ChangeType.UPSERT); - entityService.ingestProposal(keyProposal, auditStamp, false); - // Ingest the upgrade result final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(urn); @@ -65,6 +50,6 @@ static void setUpgradeResult(Urn urn, EntityService entityService) throws URI upgradeProposal.setAspectName(Constants.DATA_HUB_UPGRADE_RESULT_ASPECT_NAME); upgradeProposal.setAspect(GenericRecordUtils.serializeAspect(upgradeResult)); upgradeProposal.setChangeType(ChangeType.UPSERT); - entityService.ingestProposal(upgradeProposal, auditStamp, false); + entityService.ingestProposal(upgradeProposal, AuditStampUtils.createDefaultAuditStamp(), false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java deleted file mode 100644 index 36fe514d5536f..0000000000000 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java +++ /dev/null @@ -1,57 +0,0 @@ -package com.linkedin.metadata.boot.kafka; - -import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; - -import com.linkedin.metadata.EventUtils; -import io.confluent.kafka.schemaregistry.avro.AvroSchema; -import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; -import io.confluent.kafka.serializers.KafkaAvroSerializer; -import java.io.IOException; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; - -/** Used for early bootstrap to avoid contact with not yet existing schema registry */ -@Slf4j -public class MockDUHESerializer extends KafkaAvroSerializer { - - private static final String DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX = "-value"; - - private String topicName; - - public MockDUHESerializer() { - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - public MockDUHESerializer(SchemaRegistryClient client) { - super(client); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - public MockDUHESerializer(SchemaRegistryClient client, Map props) { - super(client, props); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - @Override - public void configure(Map configs, boolean isKey) { - super.configure(configs, isKey); - topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); - } - - private MockSchemaRegistryClient buildMockSchemaRegistryClient() { - MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient(); - try { - schemaRegistry.register( - topicToSubjectName(topicName), new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); - return schemaRegistry; - } catch (IOException | RestClientException e) { - throw new RuntimeException(e); - } - } - - public static String topicToSubjectName(String topicName) { - return topicName + DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX; - } -} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockSystemUpdateDeserializer.java similarity index 57% rename from metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java rename to metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockSystemUpdateDeserializer.java index e631f776abd08..74a20cdacbb21 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockSystemUpdateDeserializer.java @@ -1,50 +1,49 @@ package com.linkedin.metadata.boot.kafka; -import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; -import static com.linkedin.metadata.boot.kafka.MockDUHESerializer.topicToSubjectName; +import static com.linkedin.gms.factory.kafka.schemaregistry.SystemUpdateSchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; +import static com.linkedin.gms.factory.kafka.schemaregistry.SystemUpdateSchemaRegistryFactory.SYSTEM_UPDATE_TOPIC_KEY_ID_SUFFIX; +import static com.linkedin.metadata.boot.kafka.MockSystemUpdateSerializer.topicToSubjectName; import com.linkedin.metadata.EventUtils; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import io.confluent.kafka.serializers.KafkaAvroDeserializer; import java.io.IOException; import java.util.Map; import lombok.extern.slf4j.Slf4j; -/** Used for early bootstrap to avoid contact with not yet existing schema registry */ +/** + * Used for early bootstrap to avoid contact with not yet existing schema registry Only supports the + * DUHE topic + */ @Slf4j -public class MockDUHEDeserializer extends KafkaAvroDeserializer { +public class MockSystemUpdateDeserializer extends KafkaAvroDeserializer { private String topicName; - - public MockDUHEDeserializer() { - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - public MockDUHEDeserializer(SchemaRegistryClient client) { - super(client); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - public MockDUHEDeserializer(SchemaRegistryClient client, Map props) { - super(client, props); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + private Integer schemaId; @Override public void configure(Map configs, boolean isKey) { super.configure(configs, isKey); topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); + schemaId = + Integer.valueOf( + configs + .get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY + SYSTEM_UPDATE_TOPIC_KEY_ID_SUFFIX) + .toString()); + this.schemaRegistry = buildMockSchemaRegistryClient(); } private MockSchemaRegistryClient buildMockSchemaRegistryClient() { - MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient2(); + MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient2(schemaId); try { schemaRegistry.register( - topicToSubjectName(topicName), new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); + topicToSubjectName(topicName), + new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA), + 0, + schemaId); return schemaRegistry; } catch (IOException | RestClientException e) { throw new RuntimeException(e); @@ -52,13 +51,19 @@ private MockSchemaRegistryClient buildMockSchemaRegistryClient() { } public static class MockSchemaRegistryClient2 extends MockSchemaRegistryClient { + private final int schemaId; + + public MockSchemaRegistryClient2(int schemaId) { + this.schemaId = schemaId; + } + /** * Previously used topics can have schema ids > 1 which fully match however we are replacing * that registry so force schema id to 1 */ @Override public synchronized ParsedSchema getSchemaById(int id) throws IOException, RestClientException { - return super.getSchemaById(1); + return super.getSchemaById(schemaId); } } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockSystemUpdateSerializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockSystemUpdateSerializer.java new file mode 100644 index 0000000000000..14aac2758a69d --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockSystemUpdateSerializer.java @@ -0,0 +1,76 @@ +package com.linkedin.metadata.boot.kafka; + +import static com.linkedin.gms.factory.kafka.schemaregistry.SystemUpdateSchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; +import static com.linkedin.gms.factory.kafka.schemaregistry.SystemUpdateSchemaRegistryFactory.MCL_VERSIONED_SCHEMA_REGISTRY_TOPIC_KEY; +import static com.linkedin.gms.factory.kafka.schemaregistry.SystemUpdateSchemaRegistryFactory.SYSTEM_UPDATE_TOPIC_KEY_ID_SUFFIX; +import static com.linkedin.gms.factory.kafka.schemaregistry.SystemUpdateSchemaRegistryFactory.SYSTEM_UPDATE_TOPIC_KEY_PREFIX; + +import com.linkedin.metadata.EventUtils; +import com.linkedin.util.Pair; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import io.confluent.kafka.serializers.KafkaAvroSerializer; +import java.io.IOException; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + +/** Used for early bootstrap to avoid contact with not yet existing schema registry */ +@Slf4j +public class MockSystemUpdateSerializer extends KafkaAvroSerializer { + + private static final String DATAHUB_SYSTEM_UPDATE_SUBJECT_SUFFIX = "-value"; + + private static final Map AVRO_SCHEMA_MAP = + Map.of( + DUHE_SCHEMA_REGISTRY_TOPIC_KEY, new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA), + MCL_VERSIONED_SCHEMA_REGISTRY_TOPIC_KEY, + new AvroSchema(EventUtils.ORIGINAL_MCL_AVRO_SCHEMA)); + + private Map> topicNameToAvroSchemaMap; + + @Override + public void configure(Map configs, boolean isKey) { + super.configure(configs, isKey); + topicNameToAvroSchemaMap = + configs.entrySet().stream() + .filter( + e -> + e.getKey().startsWith(SYSTEM_UPDATE_TOPIC_KEY_PREFIX) + && !e.getKey().endsWith(SYSTEM_UPDATE_TOPIC_KEY_ID_SUFFIX) + && e.getValue() instanceof String) + .map( + e -> { + Integer id = + Integer.valueOf( + (String) configs.get(e.getKey() + SYSTEM_UPDATE_TOPIC_KEY_ID_SUFFIX)); + return Pair.of( + (String) e.getValue(), Pair.of(AVRO_SCHEMA_MAP.get(e.getKey()), id)); + }) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + private MockSchemaRegistryClient buildMockSchemaRegistryClient() { + MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient(); + + if (topicNameToAvroSchemaMap != null) { + topicNameToAvroSchemaMap.forEach( + (topicName, schemaId) -> { + try { + schemaRegistry.register( + topicToSubjectName(topicName), schemaId.getFirst(), 0, schemaId.getSecond()); + } catch (IOException | RestClientException e) { + throw new RuntimeException(e); + } + }); + } + + return schemaRegistry; + } + + public static String topicToSubjectName(String topicName) { + return topicName + DATAHUB_SYSTEM_UPDATE_SUBJECT_SUFFIX; + } +} diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java index 1678fe92ec70e..17c5160494722 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java @@ -122,7 +122,7 @@ public void testAsyncDefaultAspects() throws URISyntaxException { .request(req) .build()))); _aspectResource.ingestProposal(mcp, "false"); - verify(_producer, times(5)) + verify(_producer, times(10)) .produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); verifyNoMoreInteractions(_producer); } From 09d4260b18f63e33eda930bd067b4e9e65067478 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 7 Feb 2024 17:03:22 -0600 Subject: [PATCH 495/792] chore(pegasus): upgrade pegasus, remove gradle 8 hacks for pegasus plugin (#9798) --- build.gradle | 15 +- buildSrc/build.gradle | 3 - .../pegasus/gradle/PegasusPlugin.java | 2444 ----------------- .../gradle/tasks/ChangedFileReportTask.java | 124 - 4 files changed, 8 insertions(+), 2578 deletions(-) delete mode 100644 buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java delete mode 100644 buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java diff --git a/build.gradle b/build.gradle index ba61d97f0ed6e..ea81d26355027 100644 --- a/build.gradle +++ b/build.gradle @@ -30,7 +30,7 @@ buildscript { ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md - ext.pegasusVersion = '29.48.4' + ext.pegasusVersion = '29.51.0' ext.mavenVersion = '3.6.3' ext.springVersion = '6.1.2' ext.springBootVersion = '3.2.1' @@ -269,13 +269,14 @@ allprojects { apply plugin: 'eclipse' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' - tasks.withType(Test).configureEach { - // https://docs.gradle.org/current/userguide/performance.html - maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 + tasks.withType(Test).configureEach { task -> if (task.project.name != "metadata-io") { + // https://docs.gradle.org/current/userguide/performance.html + maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 - if (project.configurations.getByName("testImplementation").getDependencies() - .any{ it.getName().contains("testng") }) { - useTestNG() + if (project.configurations.getByName("testImplementation").getDependencies() + .any { it.getName().contains("testng") }) { + useTestNG() + } } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 0c2d91e1f7ac1..88900e06d4845 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -25,7 +25,4 @@ dependencies { compileOnly 'org.projectlombok:lombok:1.18.30' annotationProcessor 'org.projectlombok:lombok:1.18.30' - - // pegasus dependency, overrides for tasks - implementation 'com.linkedin.pegasus:gradle-plugins:29.48.4' } \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java deleted file mode 100644 index 2460abcad6f9e..0000000000000 --- a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java +++ /dev/null @@ -1,2444 +0,0 @@ -/* - * Copyright (c) 2019 LinkedIn Corp. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.linkedin.pegasus.gradle; - -import com.linkedin.pegasus.gradle.PegasusOptions.IdlOptions; -import com.linkedin.pegasus.gradle.internal.CompatibilityLogChecker; -import com.linkedin.pegasus.gradle.tasks.ChangedFileReportTask; -import com.linkedin.pegasus.gradle.tasks.CheckIdlTask; -import com.linkedin.pegasus.gradle.tasks.CheckPegasusSnapshotTask; -import com.linkedin.pegasus.gradle.tasks.CheckRestModelTask; -import com.linkedin.pegasus.gradle.tasks.CheckSnapshotTask; -import com.linkedin.pegasus.gradle.tasks.GenerateAvroSchemaTask; -import com.linkedin.pegasus.gradle.tasks.GenerateDataTemplateTask; -import com.linkedin.pegasus.gradle.tasks.GeneratePegasusSnapshotTask; -import com.linkedin.pegasus.gradle.tasks.GenerateRestClientTask; -import com.linkedin.pegasus.gradle.tasks.GenerateRestModelTask; -import com.linkedin.pegasus.gradle.tasks.PublishRestModelTask; -import com.linkedin.pegasus.gradle.tasks.TranslateSchemasTask; -import com.linkedin.pegasus.gradle.tasks.ValidateExtensionSchemaTask; -import com.linkedin.pegasus.gradle.tasks.ValidateSchemaAnnotationTask; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.lang.reflect.Method; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.TreeSet; -import java.util.function.Function; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -import org.gradle.api.Action; -import org.gradle.api.GradleException; -import org.gradle.api.Plugin; -import org.gradle.api.Project; -import org.gradle.api.Task; -import org.gradle.api.artifacts.Configuration; -import org.gradle.api.artifacts.ConfigurationContainer; -import org.gradle.api.file.FileCollection; -import org.gradle.api.plugins.JavaBasePlugin; -import org.gradle.api.plugins.JavaPlugin; -import org.gradle.api.plugins.JavaPluginConvention; -import org.gradle.api.plugins.JavaPluginExtension; -import org.gradle.api.publish.PublishingExtension; -import org.gradle.api.publish.ivy.IvyPublication; -import org.gradle.api.publish.ivy.plugins.IvyPublishPlugin; -import org.gradle.api.tasks.Copy; -import org.gradle.api.tasks.Delete; -import org.gradle.api.tasks.SourceSet; -import org.gradle.api.tasks.SourceSetContainer; -import org.gradle.api.tasks.Sync; -import org.gradle.api.tasks.TaskProvider; -import org.gradle.api.tasks.bundling.Jar; -import org.gradle.api.tasks.compile.JavaCompile; -import org.gradle.api.tasks.javadoc.Javadoc; -import org.gradle.language.base.plugins.LifecycleBasePlugin; -import org.gradle.language.jvm.tasks.ProcessResources; -import org.gradle.plugins.ide.eclipse.EclipsePlugin; -import org.gradle.plugins.ide.eclipse.model.EclipseModel; -import org.gradle.plugins.ide.idea.IdeaPlugin; -import org.gradle.plugins.ide.idea.model.IdeaModule; -import org.gradle.util.GradleVersion; - - -/** - * Pegasus code generation plugin. - * The supported project layout for this plugin is as follows: - * - *

    - *   --- api/
    - *   |   --- build.gradle
    - *   |   --- src/
    - *   |       --- <sourceSet>/
    - *   |       |   --- idl/
    - *   |       |   |   --- <published idl (.restspec.json) files>
    - *   |       |   --- java/
    - *   |       |   |   --- <packageName>/
    - *   |       |   |       --- <common java files>
    - *   |       |   --- pegasus/
    - *   |       |       --- <packageName>/
    - *   |       |           --- <data schema (.pdsc) files>
    - *   |       --- <sourceSet>GeneratedDataTemplate/
    - *   |       |   --- java/
    - *   |       |       --- <packageName>/
    - *   |       |           --- <data template source files generated from data schema (.pdsc) files>
    - *   |       --- <sourceSet>GeneratedAvroSchema/
    - *   |       |   --- avro/
    - *   |       |       --- <packageName>/
    - *   |       |           --- <avsc avro schema files (.avsc) generated from pegasus schema files>
    - *   |       --- <sourceSet>GeneratedRest/
    - *   |           --- java/
    - *   |               --- <packageName>/
    - *   |                   --- <rest client source (.java) files generated from published idl>
    - *   --- impl/
    - *   |   --- build.gradle
    - *   |   --- src/
    - *   |       --- <sourceSet>/
    - *   |       |   --- java/
    - *   |       |       --- <packageName>/
    - *   |       |           --- <resource class source (.java) files>
    - *   |       --- <sourceSet>GeneratedRest/
    - *   |           --- idl/
    - *   |               --- <generated idl (.restspec.json) files>
    - *   --- <other projects>/
    - * 
    - *
      - *
    • - * api: contains all the files which are commonly depended by the server and - * client implementation. The common files include the data schema (.pdsc) files, - * the idl (.restspec.json) files and potentially Java interface files used by both sides. - *
    • - *
    • - * impl: contains the resource class for server implementation. - *
    • - *
    - *

    Performs the following functions:

    - * - *

    Generate data model and data template jars for each source set.

    - * - *

    Overview:

    - * - *

    - * In the api project, the plugin generates the data template source (.java) files from the - * data schema (.pdsc) files, and furthermore compiles the source files and packages them - * to jar files. Details of jar contents will be explained in following paragraphs. - * In general, data schema files should exist only in api projects. - *

    - * - *

    - * Configure the server and client implementation projects to depend on the - * api project's dataTemplate configuration to get access to the generated data templates - * from within these projects. This allows api classes to be built first so that implementation - * projects can consume them. We recommend this structure to avoid circular dependencies - * (directly or indirectly) among implementation projects. - *

    - * - *

    Detail:

    - * - *

    - * Generates data template source (.java) files from data schema (.pdsc) files, - * compiles the data template source (.java) files into class (.class) files, - * creates a data model jar file and a data template jar file. - * The data model jar file contains the source data schema (.pdsc) files. - * The data template jar file contains both the source data schema (.pdsc) files - * and the generated data template class (.class) files. - *

    - * - *

    - * In the data template generation phase, the plugin creates a new target source set - * for the generated files. The new target source set's name is the input source set name's - * suffixed with "GeneratedDataTemplate", e.g. "mainGeneratedDataTemplate". - * The plugin invokes PegasusDataTemplateGenerator to generate data template source (.java) files - * for all data schema (.pdsc) files present in the input source set's pegasus - * directory, e.g. "src/main/pegasus". The generated data template source (.java) files - * will be in the new target source set's java source directory, e.g. - * "src/mainGeneratedDataTemplate/java". In addition to - * the data schema (.pdsc) files in the pegasus directory, the dataModel configuration - * specifies resolver path for the PegasusDataTemplateGenerator. The resolver path - * provides the data schemas and previously generated data template classes that - * may be referenced by the input source set's data schemas. In most cases, the dataModel - * configuration should contain data template jars. - *

    - * - *

    - * The next phase is the data template compilation phase, the plugin compiles the generated - * data template source (.java) files into class files. The dataTemplateCompile configuration - * specifies the pegasus jars needed to compile these classes. The compileClasspath of the - * target source set is a composite of the dataModel configuration which includes the data template - * classes that were previously generated and included in the dependent data template jars, - * and the dataTemplateCompile configuration. - * This configuration should specify a dependency on the Pegasus data jar. - *

    - * - *

    - * The following phase is creating the the data model jar and the data template jar. - * This plugin creates the data model jar that includes the contents of the - * input source set's pegasus directory, and sets the jar file's classification to - * "data-model". Hence, the resulting jar file's name should end with "-data-model.jar". - * It adds the data model jar as an artifact to the dataModel configuration. - * This jar file should only contain data schema (.pdsc) files. - *

    - * - *

    - * This plugin also create the data template jar that includes the contents of the input - * source set's pegasus directory and the java class output directory of the - * target source set. It sets the jar file's classification to "data-template". - * Hence, the resulting jar file's name should end with "-data-template.jar". - * It adds the data template jar file as an artifact to the dataTemplate configuration. - * This jar file contains both data schema (.pdsc) files and generated data template - * class (.class) files. - *

    - * - *

    - * This plugin will ensure that data template source files are generated before - * compiling the input source set and before the idea and eclipse tasks. It - * also adds the generated classes to the compileClasspath of the input source set. - *

    - * - *

    - * The configurations that apply to generating the data model and data template jars - * are as follow: - *

      - *
    • - * The dataTemplateCompile configuration specifies the classpath for compiling - * the generated data template source (.java) files. In most cases, - * it should be the Pegasus data jar. - * (The default compile configuration is not used for compiling data templates because - * it is not desirable to include non data template dependencies in the data template jar.) - * The configuration should not directly include data template jars. Data template jars - * should be included in the dataModel configuration. - *
    • - *
    • - * The dataModel configuration provides the value of the "generator.resolver.path" - * system property that is passed to PegasusDataTemplateGenerator. In most cases, - * this configuration should contain only data template jars. The data template jars - * contain both data schema (.pdsc) files and generated data template (.class) files. - * PegasusDataTemplateGenerator will not generate data template (.java) files for - * classes that can be found in the resolver path. This avoids redundant generation - * of the same classes, and inclusion of these classes in multiple jars. - * The dataModel configuration is also used to publish the data model jar which - * contains only data schema (.pdsc) files. - *
    • - *
    • - * The testDataModel configuration is similar to the dataModel configuration - * except it is used when generating data templates from test source sets. - * It extends from the dataModel configuration. It is also used to publish - * the data model jar from test source sets. - *
    • - *
    • - * The dataTemplate configuration is used to publish the data template - * jar which contains both data schema (.pdsc) files and the data template class - * (.class) files generated from these data schema (.pdsc) files. - *
    • - *
    • - * The testDataTemplate configuration is similar to the dataTemplate configuration - * except it is used when publishing the data template jar files generated from - * test source sets. - *
    • - *
    - *

    - * - *

    Performs the following functions:

    - * - *

    Generate avro schema jars for each source set.

    - * - *

    Overview:

    - * - *

    - * In the api project, the task 'generateAvroSchema' generates the avro schema (.avsc) - * files from pegasus schema (.pdsc) files. In general, data schema files should exist - * only in api projects. - *

    - * - *

    - * Configure the server and client implementation projects to depend on the - * api project's avroSchema configuration to get access to the generated avro schemas - * from within these projects. - *

    - * - *

    - * This plugin also create the avro schema jar that includes the contents of the input - * source set's avro directory and the avsc schema files. - * The resulting jar file's name should end with "-avro-schema.jar". - *

    - * - *

    Generate rest model and rest client jars for each source set.

    - * - *

    Overview:

    - * - *

    - * In the api project, generates rest client source (.java) files from the idl, - * compiles the rest client source (.java) files to rest client class (.class) files - * and puts them in jar files. In general, the api project should be only place that - * contains the publishable idl files. If the published idl changes an existing idl - * in the api project, the plugin will emit message indicating this has occurred and - * suggest that the entire project be rebuilt if it is desirable for clients of the - * idl to pick up the newly published changes. - *

    - * - *

    - * In the impl project, generates the idl (.restspec.json) files from the input - * source set's resource class files, then compares them against the existing idl - * files in the api project for compatibility checking. If incompatible changes are - * found, the build fails (unless certain flag is specified, see below). If the - * generated idl passes compatibility checks (see compatibility check levels below), - * publishes the generated idl (.restspec.json) to the api project. - *

    - * - *

    Detail:

    - * - *

    rest client generation phase: in api project

    - * - *

    - * In this phase, the rest client source (.java) files are generated from the - * api project idl (.restspec.json) files using RestRequestBuilderGenerator. - * The generated rest client source files will be in the new target source set's - * java source directory, e.g. "src/mainGeneratedRest/java". - *

    - * - *

    - * RestRequestBuilderGenerator requires access to the data schemas referenced - * by the idl. The dataModel configuration specifies the resolver path needed - * by RestRequestBuilderGenerator to access the data schemas referenced by - * the idl that is not in the source set's pegasus directory. - * This plugin automatically includes the data schema (.pdsc) files in the - * source set's pegasus directory in the resolver path. - * In most cases, the dataModel configuration should contain data template jars. - * The data template jars contains both data schema (.pdsc) files and generated - * data template class (.class) files. By specifying data template jars instead - * of data model jars, redundant generation of data template classes is avoided - * as classes that can be found in the resolver path are not generated. - *

    - * - *

    rest client compilation phase: in api project

    - * - *

    - * In this phase, the plugin compiles the generated rest client source (.java) - * files into class files. The restClientCompile configuration specifies the - * pegasus jars needed to compile these classes. The compile classpath is a - * composite of the dataModel configuration which includes the data template - * classes that were previously generated and included in the dependent data template - * jars, and the restClientCompile configuration. - * This configuration should specify a dependency on the Pegasus restli-client jar. - *

    - * - *

    - * The following stage is creating the the rest model jar and the rest client jar. - * This plugin creates the rest model jar that includes the - * generated idl (.restspec.json) files, and sets the jar file's classification to - * "rest-model". Hence, the resulting jar file's name should end with "-rest-model.jar". - * It adds the rest model jar as an artifact to the restModel configuration. - * This jar file should only contain idl (.restspec.json) files. - *

    - * - *

    - * This plugin also create the rest client jar that includes the generated - * idl (.restspec.json) files and the java class output directory of the - * target source set. It sets the jar file's classification to "rest-client". - * Hence, the resulting jar file's name should end with "-rest-client.jar". - * It adds the rest client jar file as an artifact to the restClient configuration. - * This jar file contains both idl (.restspec.json) files and generated rest client - * class (.class) files. - *

    - * - *

    idl generation phase: in server implementation project

    - * - *

    - * Before entering this phase, the plugin will ensure that generating idl will - * occur after compiling the input source set. It will also ensure that IDEA - * and Eclipse tasks runs after rest client source (.java) files are generated. - *

    - * - *

    - * In this phase, the plugin creates a new target source set for the generated files. - * The new target source set's name is the input source set name's* suffixed with - * "GeneratedRest", e.g. "mainGeneratedRest". The plugin invokes - * RestLiResourceModelExporter to generate idl (.restspec.json) files for each - * IdlItem in the input source set's pegasus IdlOptions. The generated idl files - * will be in target source set's idl directory, e.g. "src/mainGeneratedRest/idl". - * For example, the following adds an IdlItem to the source set's pegasus IdlOptions. - * This line should appear in the impl project's build.gradle. If no IdlItem is added, - * this source set will be excluded from generating idl and checking idl compatibility, - * even there are existing idl files. - *

    - *   pegasus.main.idlOptions.addIdlItem(["com.linkedin.restli.examples.groups.server"])
    - * 
    - *

    - * - *

    - * After the idl generation phase, each included idl file is checked for compatibility against - * those in the api project. In case the current interface breaks compatibility, - * by default the build fails and reports all compatibility errors and warnings. Otherwise, - * the build tasks in the api project later will package the resource classes into jar files. - * User can change the compatibility requirement between the current and published idl by - * setting the "rest.model.compatibility" project property, i.e. - * "gradle -Prest.model.compatibility= ..." The following levels are supported: - *

      - *
    • ignore: idl compatibility check will occur but its result will be ignored. - * The result will be aggregated and printed at the end of the build.
    • - *
    • backwards: build fails if there are backwards incompatible changes in idl. - * Build continues if there are only compatible changes.
    • - *
    • equivalent (default): build fails if there is any functional changes (compatible or - * incompatible) in the current idl. Only docs and comments are allowed to be different.
    • - *
    - * The plugin needs to know where the api project is. It searches the api project in the - * following steps. If all searches fail, the build fails. - *
      - *
    1. - * Use the specified project from the impl project build.gradle file. The ext.apiProject - * property explicitly assigns the api project. E.g. - *
      - *       ext.apiProject = project(':groups:groups-server-api')
      - *     
      - * If multiple such statements exist, the last will be used. Wrong project path causes Gradle - * evaluation error. - *
    2. - *
    3. - * If no ext.apiProject property is defined, the plugin will try to guess the - * api project name with the following conventions. The search stops at the first successful match. - *
        - *
      1. - * If the impl project name ends with the following suffixes, substitute the suffix with "-api". - *
          - *
        1. -impl
        2. - *
        3. -service
        4. - *
        5. -server
        6. - *
        7. -server-impl
        8. - *
        - * This list can be overridden by inserting the following line to the project build.gradle: - *
        - *           ext.apiProjectSubstitutionSuffixes = ['-new-suffix-1', '-new-suffix-2']
        - *         
        - * Alternatively, this setting could be applied globally to all projects by putting it in - * the subprojects section of the root build.gradle.
        - *
      2. - *
      3. - * Append "-api" to the impl project name. - *
      4. - *
      - *
    4. - *
    - * The plugin invokes RestLiResourceModelCompatibilityChecker to check compatibility. - *

    - * - *

    - * The idl files in the api project are not generated by the plugin, but rather - * "published" from the impl project. The publishRestModel task is used to copy the - * idl files to the api project. This task is invoked automatically if the idls are - * verified to be "safe". "Safe" is determined by the "rest.model.compatibility" - * property. Because this task is skipped if the idls are functionally equivalent - * (not necessarily identical, e.g. differ in doc fields), if the default "equivalent" - * compatibility level is used, no file will be copied. If such automatic publishing - * is intended to be skip, set the "rest.model.skipPublish" property to true. - * Note that all the properties are per-project and can be overridden in each project's - * build.gradle file. - *

    - * - *

    - * Please always keep in mind that if idl publishing is happened, a subsequent whole-project - * rebuild is necessary to pick up the changes. Otherwise, the Hudson job will fail and - * the source code commit will fail. - *

    - * - *

    - * The configurations that apply to generating the rest model and rest client jars - * are as follow: - *

      - *
    • - * The restClientCompile configuration specifies the classpath for compiling - * the generated rest client source (.java) files. In most cases, - * it should be the Pegasus restli-client jar. - * (The default compile configuration is not used for compiling rest client because - * it is not desirable to include non rest client dependencies, such as - * the rest server implementation classes, in the data template jar.) - * The configuration should not directly include data template jars. Data template jars - * should be included in the dataModel configuration. - *
    • - *
    • - * The dataModel configuration provides the value of the "generator.resolver.path" - * system property that is passed to RestRequestBuilderGenerator. - * This configuration should contain only data template jars. The data template jars - * contain both data schema (.pdsc) files and generated data template (.class) files. - * The RestRequestBuilderGenerator will only generate rest client classes. - * The dataModel configuration is also included in the compile classpath for the - * generated rest client source files. The dataModel configuration does not - * include generated data template classes, then the Java compiler may not able to - * find the data template classes referenced by the generated rest client. - *
    • - *
    • - * The testDataModel configuration is similar to the dataModel configuration - * except it is used when generating rest client source files from - * test source sets. - *
    • - *
    • - * The restModel configuration is used to publish the rest model jar - * which contains generated idl (.restspec.json) files. - *
    • - *
    • - * The testRestModel configuration is similar to the restModel configuration - * except it is used to publish rest model jar files generated from - * test source sets. - *
    • - *
    • - * The restClient configuration is used to publish the rest client jar - * which contains both generated idl (.restspec.json) files and - * the rest client class (.class) files generated from from these - * idl (.restspec.json) files. - *
    • - *
    • - * The testRestClient configuration is similar to the restClient configuration - * except it is used to publish rest client jar files generated from - * test source sets. - *
    • - *
    - *

    - * - *

    - * This plugin considers test source sets whose names begin with 'test' or 'integTest' to be - * test source sets. - *

    - */ -public class PegasusPlugin implements Plugin -{ - public static boolean debug = false; - - private static final GradleVersion MIN_REQUIRED_VERSION = GradleVersion.version("1.0"); // Next: 5.2.1 - private static final GradleVersion MIN_SUGGESTED_VERSION = GradleVersion.version("5.2.1"); // Next: 5.3 - - // - // Constants for generating sourceSet names and corresponding directory names - // for generated code - // - private static final String DATA_TEMPLATE_GEN_TYPE = "DataTemplate"; - private static final String REST_GEN_TYPE = "Rest"; - private static final String AVRO_SCHEMA_GEN_TYPE = "AvroSchema"; - - public static final String DATA_TEMPLATE_FILE_SUFFIX = ".pdsc"; - public static final String PDL_FILE_SUFFIX = ".pdl"; - // gradle property to opt OUT schema annotation validation, by default this feature is enabled. - private static final String DISABLE_SCHEMA_ANNOTATION_VALIDATION = "schema.annotation.validation.disable"; - // gradle property to opt in for destroying stale files from the build directory, - // by default it is disabled, because it triggers hot-reload (even if it results in a no-op) - private static final String DESTROY_STALE_FILES_ENABLE = "enableDestroyStaleFiles"; - public static final Collection DATA_TEMPLATE_FILE_SUFFIXES = new ArrayList<>(); - - public static final String IDL_FILE_SUFFIX = ".restspec.json"; - public static final String SNAPSHOT_FILE_SUFFIX = ".snapshot.json"; - public static final String SNAPSHOT_COMPAT_REQUIREMENT = "rest.model.compatibility"; - public static final String IDL_COMPAT_REQUIREMENT = "rest.idl.compatibility"; - // Pegasus schema compatibility level configuration, which is used to define the {@link CompatibilityLevel}. - public static final String PEGASUS_SCHEMA_SNAPSHOT_REQUIREMENT = "pegasusPlugin.pegasusSchema.compatibility"; - // Pegasus extension schema compatibility level configuration, which is used to define the {@link CompatibilityLevel} - public static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_REQUIREMENT = "pegasusPlugin.extensionSchema.compatibility"; - // CompatibilityOptions Mode configuration, which is used to define the {@link CompatibilityOptions#Mode} in the compatibility checker. - private static final String PEGASUS_COMPATIBILITY_MODE = "pegasusPlugin.pegasusSchemaCompatibilityCheckMode"; - - private static final Pattern TEST_DIR_REGEX = Pattern.compile("^(integ)?[Tt]est"); - private static final String SNAPSHOT_NO_PUBLISH = "rest.model.noPublish"; - private static final String SNAPSHOT_FORCE_PUBLISH = "rest.model.forcePublish"; - private static final String PROCESS_EMPTY_IDL_DIR = "rest.idl.processEmptyIdlDir"; - private static final String IDL_NO_PUBLISH = "rest.idl.noPublish"; - private static final String IDL_FORCE_PUBLISH = "rest.idl.forcePublish"; - private static final String SKIP_IDL_CHECK = "rest.idl.skipCheck"; - // gradle property to skip running GenerateRestModel task. - // Note it affects GenerateRestModel task only, and does not skip tasks depends on GenerateRestModel. - private static final String SKIP_GENERATE_REST_MODEL= "rest.model.skipGenerateRestModel"; - private static final String SUPPRESS_REST_CLIENT_RESTLI_2 = "rest.client.restli2.suppress"; - private static final String SUPPRESS_REST_CLIENT_RESTLI_1 = "rest.client.restli1.suppress"; - - private static final String GENERATOR_CLASSLOADER_NAME = "pegasusGeneratorClassLoader"; - - private static final String CONVERT_TO_PDL_REVERSE = "convertToPdl.reverse"; - private static final String CONVERT_TO_PDL_KEEP_ORIGINAL = "convertToPdl.keepOriginal"; - private static final String CONVERT_TO_PDL_SKIP_VERIFICATION = "convertToPdl.skipVerification"; - private static final String CONVERT_TO_PDL_PRESERVE_SOURCE_CMD = "convertToPdl.preserveSourceCmd"; - - // Below variables are used to collect data across all pegasus projects (sub-projects) and then print information - // to the user at the end after build is finished. - private static StringBuffer _restModelCompatMessage = new StringBuffer(); - private static final Collection _needCheckinFiles = new ArrayList<>(); - private static final Collection _needBuildFolders = new ArrayList<>(); - private static final Collection _possibleMissingFilesInEarlierCommit = new ArrayList<>(); - - private static final String RUN_ONCE = "runOnce"; - private static final Object STATIC_PROJECT_EVALUATED_LOCK = new Object(); - - private static final List UNUSED_CONFIGURATIONS = Arrays.asList( - "dataTemplateGenerator", "restTools", "avroSchemaGenerator"); - // Directory in the dataTemplate jar that holds schemas translated from PDL to PDSC. - private static final String TRANSLATED_SCHEMAS_DIR = "legacyPegasusSchemas"; - // Enable the use of argFiles for the tasks that support them - private static final String ENABLE_ARG_FILE = "pegasusPlugin.enableArgFile"; - // Enable the generation of fluent APIs - private static final String ENABLE_FLUENT_API = "pegasusPlugin.enableFluentApi"; - - // This config impacts GenerateDataTemplateTask and GenerateRestClientTask; - // If not set, by default all paths generated in these two tasks will be lower-case. - // This default behavior is needed because Linux, MacOS, Windows treat case sensitive paths differently, - // and we want to be consistent, so we choose lower-case as default case for path generated - private static final String CODE_GEN_PATH_CASE_SENSITIVE = "pegasusPlugin.generateCaseSensitivePath"; - - private static final String PEGASUS_PLUGIN_CONFIGURATION = "pegasusPlugin"; - - // Enable the use of generic pegasus schema compatibility checker - private static final String ENABLE_PEGASUS_SCHEMA_COMPATIBILITY_CHECK = "pegasusPlugin.enablePegasusSchemaCompatibilityCheck"; - - private static final String PEGASUS_SCHEMA_SNAPSHOT = "PegasusSchemaSnapshot"; - - private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT = "PegasusExtensionSchemaSnapshot"; - - private static final String PEGASUS_SCHEMA_SNAPSHOT_DIR = "pegasusSchemaSnapshot"; - - private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR = "pegasusExtensionSchemaSnapshot"; - - private static final String PEGASUS_SCHEMA_SNAPSHOT_DIR_OVERRIDE = "overridePegasusSchemaSnapshotDir"; - - private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR_OVERRIDE = "overridePegasusExtensionSchemaSnapshotDir"; - - private static final String SRC = "src"; - - private static final String SCHEMA_ANNOTATION_HANDLER_CONFIGURATION = "schemaAnnotationHandler"; - - private static final String COMPATIBILITY_OPTIONS_MODE_EXTENSION = "EXTENSION"; - - - @SuppressWarnings("unchecked") - private Class> _thisPluginType = (Class>) - getClass().asSubclass(Plugin.class); - - private Task _generateSourcesJarTask; - private Javadoc _generateJavadocTask; - private Task _generateJavadocJarTask; - private boolean _configureIvyPublications = true; - - public void setPluginType(Class> pluginType) - { - _thisPluginType = pluginType; - } - - public void setSourcesJarTask(Task sourcesJarTask) - { - _generateSourcesJarTask = sourcesJarTask; - } - - public void setJavadocJarTask(Task javadocJarTask) - { - _generateJavadocJarTask = javadocJarTask; - } - - public void setConfigureIvyPublications(boolean configureIvyPublications) { - _configureIvyPublications = configureIvyPublications; - } - - @Override - public void apply(Project project) - { - checkGradleVersion(project); - - project.getPlugins().apply(JavaPlugin.class); - - // this HashMap will have a PegasusOptions per sourceSet - project.getExtensions().getExtraProperties().set("pegasus", new HashMap<>()); - // this map will extract PegasusOptions.GenerationMode to project property - project.getExtensions().getExtraProperties().set("PegasusGenerationMode", - Arrays.stream(PegasusOptions.GenerationMode.values()) - .collect(Collectors.toMap(PegasusOptions.GenerationMode::name, Function.identity()))); - - synchronized (STATIC_PROJECT_EVALUATED_LOCK) - { - // Check if this is the first time the block will run. Pegasus plugin can run multiple times in a build if - // multiple sub-projects applied the plugin. - if (!project.getRootProject().hasProperty(RUN_ONCE) - || !Boolean.parseBoolean(String.valueOf(project.getRootProject().property(RUN_ONCE)))) - { - project.getGradle().projectsEvaluated(gradle -> - gradle.getRootProject().subprojects(subproject -> - UNUSED_CONFIGURATIONS.forEach(configurationName -> { - Configuration conf = subproject.getConfigurations().findByName(configurationName); - if (conf != null && !conf.getDependencies().isEmpty()) { - subproject.getLogger().warn("*** Project {} declares dependency to unused configuration \"{}\". " - + "This configuration is deprecated and you can safely remove the dependency. ***", - subproject.getPath(), configurationName); - } - }) - ) - ); - - // Re-initialize the static variables as they might have stale values from previous run. With Gradle 3.0 and - // gradle daemon enabled, the plugin class might not be loaded for every run. - DATA_TEMPLATE_FILE_SUFFIXES.clear(); - DATA_TEMPLATE_FILE_SUFFIXES.add(DATA_TEMPLATE_FILE_SUFFIX); - DATA_TEMPLATE_FILE_SUFFIXES.add(PDL_FILE_SUFFIX); - - _restModelCompatMessage = new StringBuffer(); - _needCheckinFiles.clear(); - _needBuildFolders.clear(); - _possibleMissingFilesInEarlierCommit.clear(); - - project.getGradle().buildFinished(result -> - { - StringBuilder endOfBuildMessage = new StringBuilder(); - if (_restModelCompatMessage.length() > 0) - { - endOfBuildMessage.append(_restModelCompatMessage); - } - - if (!_needCheckinFiles.isEmpty()) - { - endOfBuildMessage.append(createModifiedFilesMessage(_needCheckinFiles, _needBuildFolders)); - } - - if (!_possibleMissingFilesInEarlierCommit.isEmpty()) - { - endOfBuildMessage.append(createPossibleMissingFilesMessage(_possibleMissingFilesInEarlierCommit)); - } - - if (endOfBuildMessage.length() > 0) - { - result.getGradle().getRootProject().getLogger().quiet(endOfBuildMessage.toString()); - } - }); - - // Set an extra property on the root project to indicate the initialization is complete for the current build. - project.getRootProject().getExtensions().getExtraProperties().set(RUN_ONCE, true); - } - } - - ConfigurationContainer configurations = project.getConfigurations(); - - // configuration for getting the required classes to make pegasus call main methods - configurations.maybeCreate(PEGASUS_PLUGIN_CONFIGURATION); - - // configuration for compiling generated data templates - Configuration dataTemplateCompile = configurations.maybeCreate("dataTemplateCompile"); - dataTemplateCompile.setVisible(false); - - // configuration for running rest client generator - Configuration restClientCompile = configurations.maybeCreate("restClientCompile"); - restClientCompile.setVisible(false); - - // configuration for running data template generator - // DEPRECATED! This configuration is no longer used. Please stop using it. - Configuration dataTemplateGenerator = configurations.maybeCreate("dataTemplateGenerator"); - dataTemplateGenerator.setVisible(false); - - // configuration for running rest client generator - // DEPRECATED! This configuration is no longer used. Please stop using it. - Configuration restTools = configurations.maybeCreate("restTools"); - restTools.setVisible(false); - - // configuration for running Avro schema generator - // DEPRECATED! To skip avro schema generation, use PegasusOptions.generationModes - Configuration avroSchemaGenerator = configurations.maybeCreate("avroSchemaGenerator"); - avroSchemaGenerator.setVisible(false); - - // configuration for depending on data schemas and potentially generated data templates - // and for publishing jars containing data schemas to the project artifacts for including in the ivy.xml - Configuration dataModel = configurations.maybeCreate("dataModel"); - Configuration testDataModel = configurations.maybeCreate("testDataModel"); - testDataModel.extendsFrom(dataModel); - - // configuration for depending on data schemas and potentially generated data templates - // and for publishing jars containing data schemas to the project artifacts for including in the ivy.xml - Configuration avroSchema = configurations.maybeCreate("avroSchema"); - Configuration testAvroSchema = configurations.maybeCreate("testAvroSchema"); - testAvroSchema.extendsFrom(avroSchema); - - // configuration for depending on rest idl and potentially generated client builders - // and for publishing jars containing rest idl to the project artifacts for including in the ivy.xml - Configuration restModel = configurations.maybeCreate("restModel"); - Configuration testRestModel = configurations.maybeCreate("testRestModel"); - testRestModel.extendsFrom(restModel); - - // configuration for publishing jars containing data schemas and generated data templates - // to the project artifacts for including in the ivy.xml - // - // published data template jars depends on the configurations used to compile the classes - // in the jar, this includes the data models/templates used by the data template generator - // and the classes used to compile the generated classes. - Configuration dataTemplate = configurations.maybeCreate("dataTemplate"); - dataTemplate.extendsFrom(dataTemplateCompile, dataModel); - Configuration testDataTemplate = configurations.maybeCreate("testDataTemplate"); - testDataTemplate.extendsFrom(dataTemplate, testDataModel); - - // configuration for processing and validating schema annotation during build time. - // - // The configuration contains dependencies to schema annotation handlers which would process schema annotations - // and validate. - Configuration schemaAnnotationHandler = configurations.maybeCreate(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION); - - // configuration for publishing jars containing rest idl and generated client builders - // to the project artifacts for including in the ivy.xml - // - // published client builder jars depends on the configurations used to compile the classes - // in the jar, this includes the data models/templates (potentially generated by this - // project and) used by the data template generator and the classes used to compile - // the generated classes. - Configuration restClient = configurations.maybeCreate("restClient"); - restClient.extendsFrom(restClientCompile, dataTemplate); - Configuration testRestClient = configurations.maybeCreate("testRestClient"); - testRestClient.extendsFrom(restClient, testDataTemplate); - - Properties properties = new Properties(); - InputStream inputStream = getClass().getResourceAsStream("/pegasus-version.properties"); - if (inputStream != null) - { - try - { - properties.load(inputStream); - } - catch (IOException e) - { - throw new GradleException("Unable to read pegasus-version.properties file.", e); - } - - String version = properties.getProperty("pegasus.version"); - - project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:data:" + version); - project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:data-avro-generator:" + version); - project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:generator:" + version); - project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:restli-tools:" + version); - } - else - { - project.getLogger().lifecycle("Unable to add pegasus dependencies to {}. Please be sure that " - + "'com.linkedin.pegasus:data', 'com.linkedin.pegasus:data-avro-generator', 'com.linkedin.pegasus:generator', 'com.linkedin.pegasus:restli-tools'" - + " are available on the configuration pegasusPlugin", - project.getPath()); - } - project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "org.slf4j:slf4j-simple:1.7.2"); - project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, project.files(System.getProperty("java.home") + "/../lib/tools.jar")); - - // this call has to be here because: - // 1) artifact cannot be published once projects has been evaluated, so we need to first - // create the tasks and artifact handler, then progressively append sources - // 2) in order to append sources progressively, the source and documentation tasks and artifacts must be - // configured/created before configuring and creating the code generation tasks. - - configureGeneratedSourcesAndJavadoc(project); - - ChangedFileReportTask changedFileReportTask = project.getTasks() - .create("changedFilesReport", ChangedFileReportTask.class); - - project.getTasks().getByName("check").dependsOn(changedFileReportTask); - - SourceSetContainer sourceSets = project.getConvention() - .getPlugin(JavaPluginConvention.class).getSourceSets(); - - sourceSets.all(sourceSet -> - { - if (sourceSet.getName().toLowerCase(Locale.US).contains("generated")) - { - return; - } - - checkAvroSchemaExist(project, sourceSet); - - // the idl Generator input options will be inside the PegasusOptions class. Users of the - // plugin can set the inputOptions in their build.gradle - @SuppressWarnings("unchecked") - Map pegasusOptions = (Map) project - .getExtensions().getExtraProperties().get("pegasus"); - - pegasusOptions.put(sourceSet.getName(), new PegasusOptions()); - - // rest model generation could fail on incompatibility - // if it can fail, fail it early - configureRestModelGeneration(project, sourceSet); - - // Do compatibility check for schemas under "pegasus" directory if the configuration property is provided. - if (isPropertyTrue(project, ENABLE_PEGASUS_SCHEMA_COMPATIBILITY_CHECK)) - { - configurePegasusSchemaSnapshotGeneration(project, sourceSet, false); - } - - configurePegasusSchemaSnapshotGeneration(project, sourceSet, true); - - configureConversionUtilities(project, sourceSet); - - GenerateDataTemplateTask generateDataTemplateTask = configureDataTemplateGeneration(project, sourceSet); - - configureAvroSchemaGeneration(project, sourceSet); - - configureRestClientGeneration(project, sourceSet); - - if (!isPropertyTrue(project, DISABLE_SCHEMA_ANNOTATION_VALIDATION)) - { - configureSchemaAnnotationValidation(project, sourceSet, generateDataTemplateTask); - } - - Task cleanGeneratedDirTask = project.task(sourceSet.getTaskName("clean", "GeneratedDir")); - cleanGeneratedDirTask.doLast(new CacheableAction<>(task -> - { - deleteGeneratedDir(project, sourceSet, REST_GEN_TYPE); - deleteGeneratedDir(project, sourceSet, AVRO_SCHEMA_GEN_TYPE); - deleteGeneratedDir(project, sourceSet, DATA_TEMPLATE_GEN_TYPE); - })); - - // make clean depends on deleting the generated directories - project.getTasks().getByName("clean").dependsOn(cleanGeneratedDirTask); - - // Set data schema directories as resource roots - configureDataSchemaResourcesRoot(project, sourceSet); - }); - - project.getExtensions().getExtraProperties().set(GENERATOR_CLASSLOADER_NAME, getClass().getClassLoader()); - } - - protected void configureSchemaAnnotationValidation(Project project, - SourceSet sourceSet, - GenerateDataTemplateTask generateDataTemplatesTask) - { - // Task would execute based on the following order. - // generateDataTemplatesTask -> validateSchemaAnnotationTask - - // Create ValidateSchemaAnnotation task - ValidateSchemaAnnotationTask validateSchemaAnnotationTask = project.getTasks() - .create(sourceSet.getTaskName("validate", "schemaAnnotation"), ValidateSchemaAnnotationTask.class, task -> - { - task.setInputDir(generateDataTemplatesTask.getInputDir()); - task.setResolverPath(getDataModelConfig(project, sourceSet)); // same resolver path as generateDataTemplatesTask - task.setClassPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION) - .plus(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)) - .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME))); - task.setHandlerJarPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - } - ); - - // validateSchemaAnnotationTask depend on generateDataTemplatesTask - validateSchemaAnnotationTask.dependsOn(generateDataTemplatesTask); - - // Check depends on validateSchemaAnnotationTask. - project.getTasks().getByName("check").dependsOn(validateSchemaAnnotationTask); - } - - - - @SuppressWarnings("deprecation") - protected void configureGeneratedSourcesAndJavadoc(Project project) - { - _generateJavadocTask = project.getTasks().create("generateJavadoc", Javadoc.class); - - if (_generateSourcesJarTask == null) - { - // - // configuration for publishing jars containing sources for generated classes - // to the project artifacts for including in the ivy.xml - // - ConfigurationContainer configurations = project.getConfigurations(); - Configuration generatedSources = configurations.maybeCreate("generatedSources"); - Configuration testGeneratedSources = configurations.maybeCreate("testGeneratedSources"); - testGeneratedSources.extendsFrom(generatedSources); - - _generateSourcesJarTask = project.getTasks().create("generateSourcesJar", Jar.class, jarTask -> { - jarTask.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); - jarTask.setDescription("Generates a jar file containing the sources for the generated Java classes."); - // FIXME change to #getArchiveClassifier().set("sources"); breaks backwards-compatibility before 5.1 - // DataHub Note - applied FIXME - jarTask.getArchiveClassifier().set("sources"); - }); - - project.getArtifacts().add("generatedSources", _generateSourcesJarTask); - } - - if (_generateJavadocJarTask == null) - { - // - // configuration for publishing jars containing Javadoc for generated classes - // to the project artifacts for including in the ivy.xml - // - ConfigurationContainer configurations = project.getConfigurations(); - Configuration generatedJavadoc = configurations.maybeCreate("generatedJavadoc"); - Configuration testGeneratedJavadoc = configurations.maybeCreate("testGeneratedJavadoc"); - testGeneratedJavadoc.extendsFrom(generatedJavadoc); - - _generateJavadocJarTask = project.getTasks().create("generateJavadocJar", Jar.class, jarTask -> { - jarTask.dependsOn(_generateJavadocTask); - jarTask.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); - jarTask.setDescription("Generates a jar file containing the Javadoc for the generated Java classes."); - // FIXME change to #getArchiveClassifier().set("sources"); breaks backwards-compatibility before 5.1 - // DataHub Note - applied FIXME - jarTask.getArchiveClassifier().set("javadoc"); - jarTask.from(_generateJavadocTask.getDestinationDir()); - }); - - project.getArtifacts().add("generatedJavadoc", _generateJavadocJarTask); - } - else - { - // TODO: Tighten the types so that _generateJavadocJarTask must be of type Jar. - ((Jar) _generateJavadocJarTask).from(_generateJavadocTask.getDestinationDir()); - _generateJavadocJarTask.dependsOn(_generateJavadocTask); - } - } - - private static void deleteGeneratedDir(Project project, SourceSet sourceSet, String dirType) - { - String generatedDirPath = getGeneratedDirPath(project, sourceSet, dirType); - project.getLogger().info("Delete generated directory {}", generatedDirPath); - project.delete(generatedDirPath); - } - - private static > Class getCompatibilityLevelClass(Project project) - { - ClassLoader generatorClassLoader = (ClassLoader) project.property(GENERATOR_CLASSLOADER_NAME); - - String className = "com.linkedin.restli.tools.idlcheck.CompatibilityLevel"; - try - { - @SuppressWarnings("unchecked") - Class enumClass = (Class) generatorClassLoader.loadClass(className).asSubclass(Enum.class); - return enumClass; - } - catch (ClassNotFoundException e) - { - throw new RuntimeException("Could not load class " + className); - } - } - - private static void addGeneratedDir(Project project, SourceSet sourceSet, Collection configurations) - { - project.getPlugins().withType(IdeaPlugin.class, ideaPlugin -> { - IdeaModule ideaModule = ideaPlugin.getModel().getModule(); - // stupid if block needed because of stupid assignment required to update source dirs - if (isTestSourceSet(sourceSet)) - { - Set sourceDirs = ideaModule.getTestSourceDirs(); - sourceDirs.addAll(sourceSet.getJava().getSrcDirs()); - // this is stupid but assignment is required - ideaModule.setTestSourceDirs(sourceDirs); - if (debug) - { - System.out.println("Added " + sourceSet.getJava().getSrcDirs() + " to IdeaModule testSourceDirs " - + ideaModule.getTestSourceDirs()); - } - } - else - { - Set sourceDirs = ideaModule.getSourceDirs(); - sourceDirs.addAll(sourceSet.getJava().getSrcDirs()); - // this is stupid but assignment is required - ideaModule.setSourceDirs(sourceDirs); - if (debug) - { - System.out.println("Added " + sourceSet.getJava().getSrcDirs() + " to IdeaModule sourceDirs " - + ideaModule.getSourceDirs()); - } - } - Collection compilePlus = ideaModule.getScopes().get("COMPILE").get("plus"); - compilePlus.addAll(configurations); - ideaModule.getScopes().get("COMPILE").put("plus", compilePlus); - }); - } - - private static void checkAvroSchemaExist(Project project, SourceSet sourceSet) - { - String sourceDir = "src" + File.separatorChar + sourceSet.getName(); - File avroSourceDir = project.file(sourceDir + File.separatorChar + "avro"); - if (avroSourceDir.exists()) - { - project.getLogger().lifecycle("{}'s {} has non-empty avro directory. pegasus plugin does not process avro directory", - project.getName(), sourceDir); - } - } - - // Compute the name of the source set that will contain a type of an input generated code. - // e.g. genType may be 'DataTemplate' or 'Rest' - private static String getGeneratedSourceSetName(SourceSet sourceSet, String genType) - { - return sourceSet.getName() + "Generated" + genType; - } - - // Compute the directory name that will contain a type generated code of an input source set. - // e.g. genType may be 'DataTemplate' or 'Rest' - public static String getGeneratedDirPath(Project project, SourceSet sourceSet, String genType) - { - String override = getOverridePath(project, sourceSet, "overrideGeneratedDir"); - String sourceSetName = getGeneratedSourceSetName(sourceSet, genType); - String base = override == null ? "src" : override; - - return base + File.separatorChar + sourceSetName; - } - - public static String getDataSchemaPath(Project project, SourceSet sourceSet) - { - String override = getOverridePath(project, sourceSet, "overridePegasusDir"); - if (override == null) - { - return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "pegasus"; - } - else - { - return override; - } - } - - private static String getExtensionSchemaPath(Project project, SourceSet sourceSet) - { - String override = getOverridePath(project, sourceSet, "overrideExtensionSchemaDir"); - if(override == null) - { - return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "extensions"; - } - else - { - return override; - } - } - - private static String getSnapshotPath(Project project, SourceSet sourceSet) - { - String override = getOverridePath(project, sourceSet, "overrideSnapshotDir"); - if (override == null) - { - return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "snapshot"; - } - else - { - return override; - } - } - - private static String getIdlPath(Project project, SourceSet sourceSet) - { - String override = getOverridePath(project, sourceSet, "overrideIdlDir"); - if (override == null) - { - return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "idl"; - } - else - { - return override; - } - } - - private static String getPegasusSchemaSnapshotPath(Project project, SourceSet sourceSet) - { - String override = getOverridePath(project, sourceSet, PEGASUS_SCHEMA_SNAPSHOT_DIR_OVERRIDE); - if (override == null) - { - return SRC + File.separatorChar + sourceSet.getName() + File.separatorChar + PEGASUS_SCHEMA_SNAPSHOT_DIR; - } - else - { - return override; - } - } - - private static String getPegasusExtensionSchemaSnapshotPath(Project project, SourceSet sourceSet) - { - String override = getOverridePath(project, sourceSet, PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR_OVERRIDE); - if (override == null) - { - return SRC + File.separatorChar + sourceSet.getName() + File.separatorChar + PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR; - } - else - { - return override; - } - } - - private static String getOverridePath(Project project, SourceSet sourceSet, String overridePropertyName) - { - String sourceSetPropertyName = sourceSet.getName() + '.' + overridePropertyName; - String override = getNonEmptyProperty(project, sourceSetPropertyName); - - if (override == null && sourceSet.getName().equals("main")) - { - override = getNonEmptyProperty(project, overridePropertyName); - } - - return override; - } - - private static boolean isTestSourceSet(SourceSet sourceSet) - { - return TEST_DIR_REGEX.matcher(sourceSet.getName()).find(); - } - - private static Configuration getDataModelConfig(Project project, SourceSet sourceSet) - { - return isTestSourceSet(sourceSet) - ? project.getConfigurations().getByName("testDataModel") - : project.getConfigurations().getByName("dataModel"); - } - - private static boolean isTaskSuccessful(Task task) - { - return task.getState().getExecuted() - // Task is not successful if it is not upto date and is skipped. - && !(task.getState().getSkipped() && !task.getState().getUpToDate()) - && task.getState().getFailure() == null; - } - - private static boolean isResultEquivalent(File compatibilityLogFile) - { - return isResultEquivalent(compatibilityLogFile, false); - } - - private static boolean isResultEquivalent(File compatibilityLogFile, boolean restSpecOnly) - { - CompatibilityLogChecker logChecker = new CompatibilityLogChecker(); - try - { - logChecker.write(Files.readAllBytes(compatibilityLogFile.toPath())); - } - catch (IOException e) - { - throw new GradleException("Error while processing compatibility report: " + e.getMessage()); - } - return logChecker.getRestSpecCompatibility().isEmpty() && - (restSpecOnly || logChecker.getModelCompatibility().isEmpty()); - } - - protected void configureRestModelGeneration(Project project, SourceSet sourceSet) - { - if (sourceSet.getAllSource().isEmpty()) - { - project.getLogger().info("No source files found for sourceSet {}. Skipping idl generation.", sourceSet.getName()); - return; - } - - // afterEvaluate needed so that api project can be overridden via ext.apiProject - project.afterEvaluate(p -> - { - // find api project here instead of in each project's plugin configuration - // this allows api project relation options (ext.api*) to be specified anywhere in the build.gradle file - // alternatively, pass closures to task configuration, and evaluate the closures when task is executed - Project apiProject = getCheckedApiProject(project); - - // make sure the api project is evaluated. Important for configure-on-demand mode. - if (apiProject != null) - { - project.evaluationDependsOn(apiProject.getPath()); - - if (!apiProject.getPlugins().hasPlugin(_thisPluginType)) - { - apiProject = null; - } - } - - if (apiProject == null) - { - return; - } - - Task untypedJarTask = project.getTasks().findByName(sourceSet.getJarTaskName()); - if (!(untypedJarTask instanceof Jar)) - { - return; - } - Jar jarTask = (Jar) untypedJarTask; - - String snapshotCompatPropertyName = findProperty(FileCompatibilityType.SNAPSHOT); - if (project.hasProperty(snapshotCompatPropertyName) && "off".equalsIgnoreCase((String) project.property(snapshotCompatPropertyName))) - { - project.getLogger().lifecycle("Project {} snapshot compatibility level \"OFF\" is deprecated. Default to \"IGNORE\".", - project.getPath()); - } - - // generate the rest model - FileCollection restModelCodegenClasspath = project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION) - .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)) - .plus(sourceSet.getRuntimeClasspath()); - String destinationDirPrefix = getGeneratedDirPath(project, sourceSet, REST_GEN_TYPE) + File.separatorChar; - FileCollection restModelResolverPath = apiProject.files(getDataSchemaPath(project, sourceSet)) - .plus(getDataModelConfig(apiProject, sourceSet)); - Set watchedRestModelInputDirs = buildWatchedRestModelInputDirs(project, sourceSet); - Set restModelInputDirs = difference(sourceSet.getAllSource().getSrcDirs(), - sourceSet.getResources().getSrcDirs()); - - Task generateRestModelTask = project.getTasks() - .create(sourceSet.getTaskName("generate", "restModel"), GenerateRestModelTask.class, task -> - { - task.dependsOn(project.getTasks().getByName(sourceSet.getClassesTaskName())); - task.setCodegenClasspath(restModelCodegenClasspath); - task.setWatchedCodegenClasspath(restModelCodegenClasspath - .filter(file -> !"main".equals(file.getName()) && !"classes".equals(file.getName()))); - task.setInputDirs(restModelInputDirs); - task.setWatchedInputDirs(watchedRestModelInputDirs.isEmpty() - ? restModelInputDirs : watchedRestModelInputDirs); - // we need all the artifacts from runtime for any private implementation classes the server code might need. - task.setSnapshotDestinationDir(project.file(destinationDirPrefix + "snapshot")); - task.setIdlDestinationDir(project.file(destinationDirPrefix + "idl")); - - @SuppressWarnings("unchecked") - Map pegasusOptions = (Map) project - .getExtensions().getExtraProperties().get("pegasus"); - task.setIdlOptions(pegasusOptions.get(sourceSet.getName()).idlOptions); - - task.setResolverPath(restModelResolverPath); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - - task.onlyIf(t -> !isPropertyTrue(project, SKIP_GENERATE_REST_MODEL)); - - task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, REST_GEN_TYPE))); - }); - - File apiSnapshotDir = apiProject.file(getSnapshotPath(apiProject, sourceSet)); - File apiIdlDir = apiProject.file(getIdlPath(apiProject, sourceSet)); - apiSnapshotDir.mkdirs(); - - if (!isPropertyTrue(project, SKIP_IDL_CHECK)) - { - apiIdlDir.mkdirs(); - } - - CheckRestModelTask checkRestModelTask = project.getTasks() - .create(sourceSet.getTaskName("check", "RestModel"), CheckRestModelTask.class, task -> - { - task.dependsOn(generateRestModelTask); - task.setCurrentSnapshotFiles(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); - task.setPreviousSnapshotDirectory(apiSnapshotDir); - task.setCurrentIdlFiles(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); - task.setPreviousIdlDirectory(apiIdlDir); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - task.setModelCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.SNAPSHOT)); - task.onlyIf(t -> !isPropertyTrue(project, SKIP_IDL_CHECK)); - - task.doLast(new CacheableAction<>(t -> - { - if (!task.isEquivalent()) - { - _restModelCompatMessage.append(task.getWholeMessage()); - } - })); - }); - - CheckSnapshotTask checkSnapshotTask = project.getTasks() - .create(sourceSet.getTaskName("check", "Snapshot"), CheckSnapshotTask.class, task -> { - task.dependsOn(generateRestModelTask); - task.setCurrentSnapshotFiles(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); - task.setPreviousSnapshotDirectory(apiSnapshotDir); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - task.setSnapshotCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.SNAPSHOT)); - - task.onlyIf(t -> isPropertyTrue(project, SKIP_IDL_CHECK)); - }); - - CheckIdlTask checkIdlTask = project.getTasks() - .create(sourceSet.getTaskName("check", "Idl"), CheckIdlTask.class, task -> - { - task.dependsOn(generateRestModelTask); - task.setCurrentIdlFiles(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); - task.setPreviousIdlDirectory(apiIdlDir); - task.setResolverPath(restModelResolverPath); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - task.setIdlCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.IDL)); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - - - task.onlyIf(t -> !isPropertyTrue(project, SKIP_IDL_CHECK) - && !"OFF".equals(PropertyUtil.findCompatLevel(project, FileCompatibilityType.IDL))); - }); - - // rest model publishing involves cross-project reference - // configure after all projects have been evaluated - // the file copy can be turned off by "rest.model.noPublish" flag - Task publishRestliSnapshotTask = project.getTasks() - .create(sourceSet.getTaskName("publish", "RestliSnapshot"), PublishRestModelTask.class, task -> - { - task.dependsOn(checkRestModelTask, checkSnapshotTask, checkIdlTask); - task.from(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); - task.into(apiSnapshotDir); - task.setSuffix(SNAPSHOT_FILE_SUFFIX); - - task.onlyIf(t -> - isPropertyTrue(project, SNAPSHOT_FORCE_PUBLISH) || - ( - !isPropertyTrue(project, SNAPSHOT_NO_PUBLISH) && - ( - ( - isPropertyTrue(project, SKIP_IDL_CHECK) && - isTaskSuccessful(checkSnapshotTask) && - checkSnapshotTask.getSummaryTarget().exists() && - !isResultEquivalent(checkSnapshotTask.getSummaryTarget()) - ) || - ( - !isPropertyTrue(project, SKIP_IDL_CHECK) && - isTaskSuccessful(checkRestModelTask) && - checkRestModelTask.getSummaryTarget().exists() && - !isResultEquivalent(checkRestModelTask.getSummaryTarget()) - ) - )) - ); - }); - - Task publishRestliIdlTask = project.getTasks() - .create(sourceSet.getTaskName("publish", "RestliIdl"), PublishRestModelTask.class, task -> { - task.dependsOn(checkRestModelTask, checkIdlTask, checkSnapshotTask); - task.from(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); - task.into(apiIdlDir); - task.setSuffix(IDL_FILE_SUFFIX); - - task.onlyIf(t -> - isPropertyTrue(project, IDL_FORCE_PUBLISH) || - ( - !isPropertyTrue(project, IDL_NO_PUBLISH) && - ( - ( - isPropertyTrue(project, SKIP_IDL_CHECK) && - isTaskSuccessful(checkSnapshotTask) && - checkSnapshotTask.getSummaryTarget().exists() && - !isResultEquivalent(checkSnapshotTask.getSummaryTarget(), true) - ) || - ( - !isPropertyTrue(project, SKIP_IDL_CHECK) && - ( - (isTaskSuccessful(checkRestModelTask) && - checkRestModelTask.getSummaryTarget().exists() && - !isResultEquivalent(checkRestModelTask.getSummaryTarget(), true)) || - (isTaskSuccessful(checkIdlTask) && - checkIdlTask.getSummaryTarget().exists() && - !isResultEquivalent(checkIdlTask.getSummaryTarget())) - ) - ) - )) - ); - }); - - project.getLogger().info("API project selected for {} is {}", - publishRestliIdlTask.getPath(), apiProject.getPath()); - - jarTask.from(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); - // add generated .restspec.json files as resources to the jar - jarTask.dependsOn(publishRestliSnapshotTask, publishRestliIdlTask); - - ChangedFileReportTask changedFileReportTask = (ChangedFileReportTask) project.getTasks() - .getByName("changedFilesReport"); - - // Use the files from apiDir for generating the changed files report as we need to notify user only when - // source system files are modified. - changedFileReportTask.setIdlFiles(SharedFileUtils.getSuffixedFiles(project, apiIdlDir, IDL_FILE_SUFFIX)); - changedFileReportTask.setSnapshotFiles(SharedFileUtils.getSuffixedFiles(project, apiSnapshotDir, - SNAPSHOT_FILE_SUFFIX)); - changedFileReportTask.mustRunAfter(publishRestliSnapshotTask, publishRestliIdlTask); - changedFileReportTask.doLast(new CacheableAction<>(t -> - { - if (!changedFileReportTask.getNeedCheckinFiles().isEmpty()) - { - project.getLogger().info("Adding modified files to need checkin list..."); - _needCheckinFiles.addAll(changedFileReportTask.getNeedCheckinFiles()); - _needBuildFolders.add(getCheckedApiProject(project).getPath()); - } - })); - }); - } - - protected void configurePegasusSchemaSnapshotGeneration(Project project, SourceSet sourceSet, boolean isExtensionSchema) - { - File schemaDir = isExtensionSchema? project.file(getExtensionSchemaPath(project, sourceSet)) - : project.file(getDataSchemaPath(project, sourceSet)); - - if ((isExtensionSchema && SharedFileUtils.getSuffixedFiles(project, schemaDir, PDL_FILE_SUFFIX).isEmpty()) || - (!isExtensionSchema && SharedFileUtils.getSuffixedFiles(project, schemaDir, DATA_TEMPLATE_FILE_SUFFIXES).isEmpty())) - { - return; - } - - Path publishablePegasusSchemaSnapshotDir = project.getBuildDir().toPath().resolve(sourceSet.getName() + - (isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT)); - - Task generatePegasusSchemaSnapshot = generatePegasusSchemaSnapshot(project, sourceSet, - isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT, schemaDir, - publishablePegasusSchemaSnapshotDir.toFile(), isExtensionSchema); - - File pegasusSchemaSnapshotDir = project.file(isExtensionSchema ? getPegasusExtensionSchemaSnapshotPath(project, sourceSet) - : getPegasusSchemaSnapshotPath(project, sourceSet)); - pegasusSchemaSnapshotDir.mkdirs(); - - Task checkSchemaSnapshot = project.getTasks().create(sourceSet.getTaskName("check", - isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT), - CheckPegasusSnapshotTask.class, task -> - { - task.dependsOn(generatePegasusSchemaSnapshot); - task.setCurrentSnapshotDirectory(publishablePegasusSchemaSnapshotDir.toFile()); - task.setPreviousSnapshotDirectory(pegasusSchemaSnapshotDir); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION) - .plus(project.getConfigurations().getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)) - .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME))); - task.setCompatibilityLevel(isExtensionSchema ? - PropertyUtil.findCompatLevel(project, FileCompatibilityType.PEGASUS_EXTENSION_SCHEMA_SNAPSHOT) - :PropertyUtil.findCompatLevel(project, FileCompatibilityType.PEGASUS_SCHEMA_SNAPSHOT)); - task.setCompatibilityMode(isExtensionSchema ? COMPATIBILITY_OPTIONS_MODE_EXTENSION : - PropertyUtil.findCompatMode(project, PEGASUS_COMPATIBILITY_MODE)); - task.setExtensionSchema(isExtensionSchema); - task.setHandlerJarPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)); - - task.onlyIf(t -> - { - String pegasusSnapshotCompatPropertyName = isExtensionSchema ? - findProperty(FileCompatibilityType.PEGASUS_EXTENSION_SCHEMA_SNAPSHOT) - : findProperty(FileCompatibilityType.PEGASUS_SCHEMA_SNAPSHOT); - return !project.hasProperty(pegasusSnapshotCompatPropertyName) || - !"off".equalsIgnoreCase((String) project.property(pegasusSnapshotCompatPropertyName)); - }); - }); - - Task publishPegasusSchemaSnapshot = publishPegasusSchemaSnapshot(project, sourceSet, - isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT, checkSchemaSnapshot, - publishablePegasusSchemaSnapshotDir.toFile(), pegasusSchemaSnapshotDir); - - project.getTasks().getByName(LifecycleBasePlugin.ASSEMBLE_TASK_NAME).dependsOn(publishPegasusSchemaSnapshot); - } - - @SuppressWarnings("deprecation") - protected void configureAvroSchemaGeneration(Project project, SourceSet sourceSet) - { - File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); - File avroDir = project.file(getGeneratedDirPath(project, sourceSet, AVRO_SCHEMA_GEN_TYPE) - + File.separatorChar + "avro"); - - // generate avro schema files from data schema - Task generateAvroSchemaTask = project.getTasks() - .create(sourceSet.getTaskName("generate", "avroSchema"), GenerateAvroSchemaTask.class, task -> { - task.setInputDir(dataSchemaDir); - task.setDestinationDir(avroDir); - task.setResolverPath(getDataModelConfig(project, sourceSet)); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - - task.onlyIf(t -> - { - if (task.getInputDir().exists()) - { - @SuppressWarnings("unchecked") - Map pegasusOptions = (Map) project - .getExtensions().getExtraProperties().get("pegasus"); - - if (pegasusOptions.get(sourceSet.getName()).hasGenerationMode(PegasusOptions.GenerationMode.AVRO)) - { - return true; - } - } - - return !project.getConfigurations().getByName("avroSchemaGenerator").isEmpty(); - }); - - task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, AVRO_SCHEMA_GEN_TYPE))); - }); - - project.getTasks().getByName(sourceSet.getCompileJavaTaskName()).dependsOn(generateAvroSchemaTask); - - // create avro schema jar file - - Task avroSchemaJarTask = project.getTasks().create(sourceSet.getName() + "AvroSchemaJar", Jar.class, task -> - { - // add path prefix to each file in the data schema directory - task.from(avroDir, copySpec -> - copySpec.eachFile(fileCopyDetails -> - fileCopyDetails.setPath("avro" + File.separatorChar + fileCopyDetails.getPath()))); - - // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 - // DataHub Note - applied FIXME - task.getArchiveAppendix().set(getAppendix(sourceSet, "avro-schema")); - task.setDescription("Generate an avro schema jar"); - }); - - if (!isTestSourceSet(sourceSet)) - { - project.getArtifacts().add("avroSchema", avroSchemaJarTask); - } - else - { - project.getArtifacts().add("testAvroSchema", avroSchemaJarTask); - } - } - - protected void configureConversionUtilities(Project project, SourceSet sourceSet) - { - File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); - boolean reverse = isPropertyTrue(project, CONVERT_TO_PDL_REVERSE); - boolean keepOriginal = isPropertyTrue(project, CONVERT_TO_PDL_KEEP_ORIGINAL); - boolean skipVerification = isPropertyTrue(project, CONVERT_TO_PDL_SKIP_VERIFICATION); - String preserveSourceCmd = getNonEmptyProperty(project, CONVERT_TO_PDL_PRESERVE_SOURCE_CMD); - - // Utility task for migrating between PDSC and PDL. - project.getTasks().create(sourceSet.getTaskName("convert", "ToPdl"), TranslateSchemasTask.class, task -> - { - task.setInputDir(dataSchemaDir); - task.setDestinationDir(dataSchemaDir); - task.setResolverPath(getDataModelConfig(project, sourceSet)); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - task.setPreserveSourceCmd(preserveSourceCmd); - if (reverse) - { - task.setSourceFormat(SchemaFileType.PDL); - task.setDestinationFormat(SchemaFileType.PDSC); - } - else - { - task.setSourceFormat(SchemaFileType.PDSC); - task.setDestinationFormat(SchemaFileType.PDL); - } - task.setKeepOriginal(keepOriginal); - task.setSkipVerification(skipVerification); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - - task.onlyIf(t -> task.getInputDir().exists()); - task.doLast(new CacheableAction<>(t -> - { - project.getLogger().lifecycle("Pegasus schema conversion complete."); - project.getLogger().lifecycle("All pegasus schema files in " + dataSchemaDir + " have been converted"); - project.getLogger().lifecycle("You can use '-PconvertToPdl.reverse=true|false' to change the direction of conversion."); - })); - }); - - // Helper task for reformatting existing PDL schemas by generating them again. - project.getTasks().create(sourceSet.getTaskName("reformat", "Pdl"), TranslateSchemasTask.class, task -> - { - task.setInputDir(dataSchemaDir); - task.setDestinationDir(dataSchemaDir); - task.setResolverPath(getDataModelConfig(project, sourceSet)); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - task.setSourceFormat(SchemaFileType.PDL); - task.setDestinationFormat(SchemaFileType.PDL); - task.setKeepOriginal(true); - task.setSkipVerification(true); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - - task.onlyIf(t -> task.getInputDir().exists()); - task.doLast(new CacheableAction<>(t -> project.getLogger().lifecycle("PDL reformat complete."))); - }); - } - - @SuppressWarnings("deprecation") - protected GenerateDataTemplateTask configureDataTemplateGeneration(Project project, SourceSet sourceSet) - { - File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); - File generatedDataTemplateDir = project.file(getGeneratedDirPath(project, sourceSet, DATA_TEMPLATE_GEN_TYPE) - + File.separatorChar + "java"); - File publishableSchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() - + File.separatorChar + sourceSet.getName() + "Schemas"); - File publishableLegacySchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() - + File.separatorChar + sourceSet.getName() + "LegacySchemas"); - File publishableExtensionSchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() - + File.separatorChar + sourceSet.getName() + "ExtensionSchemas"); - - // generate data template source files from data schema - GenerateDataTemplateTask generateDataTemplatesTask = project.getTasks() - .create(sourceSet.getTaskName("generate", "dataTemplate"), GenerateDataTemplateTask.class, task -> - { - task.setInputDir(dataSchemaDir); - task.setDestinationDir(generatedDataTemplateDir); - task.setResolverPath(getDataModelConfig(project, sourceSet)); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - if (isPropertyTrue(project, CODE_GEN_PATH_CASE_SENSITIVE)) - { - task.setGenerateLowercasePath(false); - } - - task.onlyIf(t -> - { - if (task.getInputDir().exists()) - { - @SuppressWarnings("unchecked") - Map pegasusOptions = (Map) project - .getExtensions().getExtraProperties().get("pegasus"); - - return pegasusOptions.get(sourceSet.getName()).hasGenerationMode(PegasusOptions.GenerationMode.PEGASUS); - } - - return false; - }); - - task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, DATA_TEMPLATE_GEN_TYPE))); - }); - - // TODO: Tighten the types so that _generateSourcesJarTask must be of type Jar. - ((Jar) _generateSourcesJarTask).from(generateDataTemplatesTask.getDestinationDir()); - _generateSourcesJarTask.dependsOn(generateDataTemplatesTask); - - _generateJavadocTask.source(generateDataTemplatesTask.getDestinationDir()); - _generateJavadocTask.setClasspath(_generateJavadocTask.getClasspath() - .plus(project.getConfigurations().getByName("dataTemplateCompile")) - .plus(generateDataTemplatesTask.getResolverPath())); - _generateJavadocTask.dependsOn(generateDataTemplatesTask); - - // Add extra dependencies for data model compilation - project.getDependencies().add("dataTemplateCompile", "com.google.code.findbugs:jsr305:3.0.2"); - - // create new source set for generated java source and class files - String targetSourceSetName = getGeneratedSourceSetName(sourceSet, DATA_TEMPLATE_GEN_TYPE); - - SourceSetContainer sourceSets = project.getConvention() - .getPlugin(JavaPluginConvention.class).getSourceSets(); - - SourceSet targetSourceSet = sourceSets.create(targetSourceSetName, ss -> - { - ss.java(sourceDirectorySet -> sourceDirectorySet.srcDir(generatedDataTemplateDir)); - ss.setCompileClasspath(getDataModelConfig(project, sourceSet) - .plus(project.getConfigurations().getByName("dataTemplateCompile"))); - }); - - // idea plugin needs to know about new generated java source directory and its dependencies - addGeneratedDir(project, targetSourceSet, Arrays.asList( - getDataModelConfig(project, sourceSet), - project.getConfigurations().getByName("dataTemplateCompile"))); - - // Set source compatibility to 1.8 as the data-templates now generate code with Java 8 features. - JavaCompile compileTask = project.getTasks() - .withType(JavaCompile.class).getByName(targetSourceSet.getCompileJavaTaskName()); - compileTask.doFirst(new CacheableAction<>(task -> { - ((JavaCompile) task).setSourceCompatibility("1.8"); - ((JavaCompile) task).setTargetCompatibility("1.8"); - })); - // make sure that java source files have been generated before compiling them - compileTask.dependsOn(generateDataTemplatesTask); - - // Dummy task to maintain backward compatibility - // TODO: Delete this task once use cases have had time to reference the new task - Task destroyStaleFiles = project.getTasks().create(sourceSet.getName() + "DestroyStaleFiles", Delete.class); - destroyStaleFiles.onlyIf(task -> { - project.getLogger().lifecycle("{} task is a NO-OP task.", task.getPath()); - return false; - }); - - // Dummy task to maintain backward compatibility, as this task was replaced by CopySchemas - // TODO: Delete this task once use cases have had time to reference the new task - Task copyPdscSchemasTask = project.getTasks().create(sourceSet.getName() + "CopyPdscSchemas", Copy.class); - copyPdscSchemasTask.dependsOn(destroyStaleFiles); - copyPdscSchemasTask.onlyIf(task -> { - project.getLogger().lifecycle("{} task is a NO-OP task.", task.getPath()); - return false; - }); - - // Prepare schema files for publication by syncing schema folders. - Task prepareSchemasForPublishTask = project.getTasks() - .create(sourceSet.getName() + "CopySchemas", Sync.class, task -> - { - task.from(dataSchemaDir, syncSpec -> DATA_TEMPLATE_FILE_SUFFIXES.forEach(suffix -> syncSpec.include("**/*" + suffix))); - task.into(publishableSchemasBuildDir); - }); - prepareSchemasForPublishTask.dependsOn(copyPdscSchemasTask); - - Collection dataTemplateJarDepends = new ArrayList<>(); - dataTemplateJarDepends.add(compileTask); - dataTemplateJarDepends.add(prepareSchemasForPublishTask); - - // Convert all PDL files back to PDSC for publication - // TODO: Remove this conversion permanently once translated PDSCs are no longer needed. - Task prepareLegacySchemasForPublishTask = project.getTasks() - .create(sourceSet.getName() + "TranslateSchemas", TranslateSchemasTask.class, task -> - { - task.setInputDir(dataSchemaDir); - task.setDestinationDir(publishableLegacySchemasBuildDir); - task.setResolverPath(getDataModelConfig(project, sourceSet)); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - task.setSourceFormat(SchemaFileType.PDL); - task.setDestinationFormat(SchemaFileType.PDSC); - task.setKeepOriginal(true); - task.setSkipVerification(true); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - }); - - prepareLegacySchemasForPublishTask.dependsOn(destroyStaleFiles); - dataTemplateJarDepends.add(prepareLegacySchemasForPublishTask); - - // extension schema directory - File extensionSchemaDir = project.file(getExtensionSchemaPath(project, sourceSet)); - - if (!SharedFileUtils.getSuffixedFiles(project, extensionSchemaDir, PDL_FILE_SUFFIX).isEmpty()) - { - // Validate extension schemas if extension schemas are provided. - ValidateExtensionSchemaTask validateExtensionSchemaTask = project.getTasks() - .create(sourceSet.getTaskName("validate", "ExtensionSchemas"), ValidateExtensionSchemaTask.class, task -> - { - task.setInputDir(extensionSchemaDir); - task.setResolverPath( - getDataModelConfig(project, sourceSet).plus(project.files(getDataSchemaPath(project, sourceSet)))); - task.setClassPath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - }); - - Task prepareExtensionSchemasForPublishTask = project.getTasks() - .create(sourceSet.getName() + "CopyExtensionSchemas", Sync.class, task -> - { - task.from(extensionSchemaDir, syncSpec -> syncSpec.include("**/*" + PDL_FILE_SUFFIX)); - task.into(publishableExtensionSchemasBuildDir); - }); - - prepareExtensionSchemasForPublishTask.dependsOn(validateExtensionSchemaTask); - prepareExtensionSchemasForPublishTask.dependsOn(copyPdscSchemasTask); - dataTemplateJarDepends.add(prepareExtensionSchemasForPublishTask); - } - - // include pegasus files in the output of this SourceSet - project.getTasks().withType(ProcessResources.class).getByName(targetSourceSet.getProcessResourcesTaskName(), it -> - { - it.from(prepareSchemasForPublishTask, copy -> copy.into("pegasus")); - // TODO: Remove this permanently once translated PDSCs are no longer needed. - it.from(prepareLegacySchemasForPublishTask, copy -> copy.into(TRANSLATED_SCHEMAS_DIR)); - Sync copyExtensionSchemasTask = project.getTasks().withType(Sync.class).findByName(sourceSet.getName() + "CopyExtensionSchemas"); - if (copyExtensionSchemasTask != null) - { - it.from(copyExtensionSchemasTask, copy -> copy.into("extensions")); - } - }); - - // create data template jar file - Jar dataTemplateJarTask = project.getTasks() - .create(sourceSet.getName() + "DataTemplateJar", Jar.class, task -> - { - task.dependsOn(dataTemplateJarDepends); - task.from(targetSourceSet.getOutput()); - - // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 - // DataHub Note - applied FIXME - task.getArchiveAppendix().set(getAppendix(sourceSet, "data-template")); - task.setDescription("Generate a data template jar"); - }); - - // add the data model and date template jars to the list of project artifacts. - if (!isTestSourceSet(sourceSet)) - { - project.getArtifacts().add("dataTemplate", dataTemplateJarTask); - } - else - { - project.getArtifacts().add("testDataTemplate", dataTemplateJarTask); - } - - // include additional dependencies into the appropriate configuration used to compile the input source set - // must include the generated data template classes and their dependencies the configuration. - // "compile" and "testCompile" configurations have been removed in Gradle 7, - // but to keep the maximum backward compatibility, here we handle Gradle 7 and earlier version differently - // Once MIN_REQUIRED_VERSION reaches 7.0, we can remove the check of isAtLeastGradle7() - String compileConfigName; - if (isAtLeastGradle7()) { - compileConfigName = isTestSourceSet(sourceSet) ? "testImplementation" : project.getConfigurations().findByName("api") != null ? "api" : "implementation"; - } - else - { - compileConfigName = isTestSourceSet(sourceSet) ? "testCompile" : "compile"; - } - - Configuration compileConfig = project.getConfigurations().maybeCreate(compileConfigName); - compileConfig.extendsFrom( - getDataModelConfig(project, sourceSet), - project.getConfigurations().getByName("dataTemplateCompile")); - - // The getArchivePath() API doesn’t carry any task dependency and has been deprecated. - // Replace it with getArchiveFile() on Gradle 7, - // but keep getArchivePath() to be backwards-compatibility with Gradle version older than 5.1 - // DataHub Note - applied FIXME - project.getDependencies().add(compileConfigName, project.files( - isAtLeastGradle7() ? dataTemplateJarTask.getArchiveFile() : dataTemplateJarTask.getArchivePath())); - - if (_configureIvyPublications) { - // The below Action is only applied when the 'ivy-publish' is applied by the consumer. - // If the consumer does not use ivy-publish, this is a noop. - // this Action prepares the project applying the pegasus plugin to publish artifacts using these steps: - // 1. Registers "feature variants" for pegasus-specific artifacts; - // see https://docs.gradle.org/6.1/userguide/feature_variants.html - // 2. Wires legacy configurations like `dataTemplateCompile` to auto-generated feature variant *Api and - // *Implementation configurations for backwards compatibility. - // 3. Configures the Ivy Publication to include auto-generated feature variant *Api and *Implementation - // configurations and their dependencies. - project.getPlugins().withType(IvyPublishPlugin.class, ivyPublish -> { - if (!isAtLeastGradle61()) - { - throw new GradleException("Using the ivy-publish plugin with the pegasus plugin requires Gradle 6.1 or higher " + - "at build time. Please upgrade."); - } - - JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); - // create new capabilities per source set; automatically creates api and implementation configurations - String featureName = mapSourceSetToFeatureName(targetSourceSet); - try - { - /* - reflection is required to preserve compatibility with Gradle 5.2.1 and below - TODO once Gradle 5.3+ is required, remove reflection and replace with: - java.registerFeature(featureName, featureSpec -> { - featureSpec.usingSourceSet(targetSourceSet); - }); - */ - Method registerFeature = JavaPluginExtension.class.getDeclaredMethod("registerFeature", String.class, Action.class); - Action/**/ featureSpecAction = createFeatureVariantFromSourceSet(targetSourceSet); - registerFeature.invoke(java, featureName, featureSpecAction); - } - catch (ReflectiveOperationException e) - { - throw new GradleException("Unable to register new feature variant", e); - } - - // expose transitive dependencies to consumers via variant configurations - Configuration featureConfiguration = project.getConfigurations().getByName(featureName); - Configuration mainGeneratedDataTemplateApi = project.getConfigurations().getByName(targetSourceSet.getApiConfigurationName()); - featureConfiguration.extendsFrom(mainGeneratedDataTemplateApi); - mainGeneratedDataTemplateApi.extendsFrom( - getDataModelConfig(project, targetSourceSet), - project.getConfigurations().getByName("dataTemplateCompile")); - - // Configure the existing IvyPublication - // For backwards-compatibility, make the legacy dataTemplate/testDataTemplate configurations extend - // their replacements, auto-created when we registered the new feature variant - project.afterEvaluate(p -> { - PublishingExtension publishing = p.getExtensions().getByType(PublishingExtension.class); - // When configuring a Gradle Publication, use this value to find the name of the publication to configure. Defaults to "ivy". - String publicationName = p.getExtensions().getExtraProperties().getProperties().getOrDefault("PegasusPublicationName", "ivy").toString(); - IvyPublication ivyPublication = publishing.getPublications().withType(IvyPublication.class).getByName(publicationName); - ivyPublication.configurations(configurations -> configurations.create(featureName, legacyConfiguration -> { - legacyConfiguration.extend(p.getConfigurations().getByName(targetSourceSet.getApiElementsConfigurationName()).getName()); - legacyConfiguration.extend(p.getConfigurations().getByName(targetSourceSet.getRuntimeElementsConfigurationName()).getName()); - })); - }); - }); - } - - if (debug) - { - System.out.println("configureDataTemplateGeneration sourceSet " + sourceSet.getName()); - System.out.println(compileConfigName + ".allDependencies : " - + project.getConfigurations().getByName(compileConfigName).getAllDependencies()); - System.out.println(compileConfigName + ".extendsFrom: " - + project.getConfigurations().getByName(compileConfigName).getExtendsFrom()); - System.out.println(compileConfigName + ".transitive: " - + project.getConfigurations().getByName(compileConfigName).isTransitive()); - } - - project.getTasks().getByName(sourceSet.getCompileJavaTaskName()).dependsOn(dataTemplateJarTask); - return generateDataTemplatesTask; - } - - private String mapSourceSetToFeatureName(SourceSet sourceSet) { - String featureName = ""; - switch (sourceSet.getName()) { - case "mainGeneratedDataTemplate": - featureName = "dataTemplate"; - break; - case "testGeneratedDataTemplate": - featureName = "testDataTemplate"; - break; - case "mainGeneratedRest": - featureName = "restClient"; - break; - case "testGeneratedRest": - featureName = "testRestClient"; - break; - case "mainGeneratedAvroSchema": - featureName = "avroSchema"; - break; - case "testGeneratedAvroSchema": - featureName = "testAvroSchema"; - break; - default: - String msg = String.format("Unable to map %s to an appropriate feature name", sourceSet); - throw new GradleException(msg); - } - return featureName; - } - - // Generate rest client from idl files generated from java source files in the specified source set. - // - // This generates rest client source files from idl file generated from java source files - // in the source set. The generated rest client source files will be in a new source set. - // It also compiles the rest client source files into classes, and creates both the - // rest model and rest client jar files. - // - @SuppressWarnings("deprecation") - protected void configureRestClientGeneration(Project project, SourceSet sourceSet) - { - // idl directory for api project - File idlDir = project.file(getIdlPath(project, sourceSet)); - if (SharedFileUtils.getSuffixedFiles(project, idlDir, IDL_FILE_SUFFIX).isEmpty() && !isPropertyTrue(project, - PROCESS_EMPTY_IDL_DIR)) - { - return; - } - File generatedRestClientDir = project.file(getGeneratedDirPath(project, sourceSet, REST_GEN_TYPE) - + File.separatorChar + "java"); - - // always include imported data template jars in compileClasspath of rest client - FileCollection dataModelConfig = getDataModelConfig(project, sourceSet); - - // if data templates generated from this source set, add the generated data template jar to compileClasspath - // of rest client. - String dataTemplateSourceSetName = getGeneratedSourceSetName(sourceSet, DATA_TEMPLATE_GEN_TYPE); - - Jar dataTemplateJarTask = null; - - SourceSetContainer sourceSets = project.getConvention() - .getPlugin(JavaPluginConvention.class).getSourceSets(); - - FileCollection dataModels; - if (sourceSets.findByName(dataTemplateSourceSetName) != null) - { - if (debug) - { - System.out.println("sourceSet " + sourceSet.getName() + " has generated sourceSet " + dataTemplateSourceSetName); - } - dataTemplateJarTask = (Jar) project.getTasks().getByName(sourceSet.getName() + "DataTemplateJar"); - // The getArchivePath() API doesn’t carry any task dependency and has been deprecated. - // Replace it with getArchiveFile() on Gradle 7, - // but keep getArchivePath() to be backwards-compatibility with Gradle version older than 5.1 - // DataHub Note - applied FIXME - dataModels = dataModelConfig.plus(project.files( - isAtLeastGradle7() ? dataTemplateJarTask.getArchiveFile() : dataTemplateJarTask.getArchivePath())); - } - else - { - dataModels = dataModelConfig; - } - - // create source set for generated rest model, rest client source and class files. - String targetSourceSetName = getGeneratedSourceSetName(sourceSet, REST_GEN_TYPE); - SourceSet targetSourceSet = sourceSets.create(targetSourceSetName, ss -> - { - ss.java(sourceDirectorySet -> sourceDirectorySet.srcDir(generatedRestClientDir)); - ss.setCompileClasspath(dataModels.plus(project.getConfigurations().getByName("restClientCompile"))); - }); - - project.getPlugins().withType(EclipsePlugin.class, eclipsePlugin -> { - EclipseModel eclipseModel = (EclipseModel) project.getExtensions().findByName("eclipse"); - eclipseModel.getClasspath().getPlusConfigurations() - .add(project.getConfigurations().getByName("restClientCompile")); - }); - - // idea plugin needs to know about new rest client source directory and its dependencies - addGeneratedDir(project, targetSourceSet, Arrays.asList( - getDataModelConfig(project, sourceSet), - project.getConfigurations().getByName("restClientCompile"))); - - // generate the rest client source files - GenerateRestClientTask generateRestClientTask = project.getTasks() - .create(targetSourceSet.getTaskName("generate", "restClient"), GenerateRestClientTask.class, task -> - { - task.dependsOn(project.getConfigurations().getByName("dataTemplate")); - task.setInputDir(idlDir); - task.setResolverPath(dataModels.plus(project.getConfigurations().getByName("restClientCompile"))); - task.setRuntimeClasspath(project.getConfigurations().getByName("dataModel") - .plus(project.getConfigurations().getByName("dataTemplate").getArtifacts().getFiles())); - task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - task.setDestinationDir(generatedRestClientDir); - task.setRestli2FormatSuppressed(project.hasProperty(SUPPRESS_REST_CLIENT_RESTLI_2)); - task.setRestli1FormatSuppressed(project.hasProperty(SUPPRESS_REST_CLIENT_RESTLI_1)); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - if (isPropertyTrue(project, CODE_GEN_PATH_CASE_SENSITIVE)) - { - task.setGenerateLowercasePath(false); - } - if (isPropertyTrue(project, ENABLE_FLUENT_API)) - { - task.setGenerateFluentApi(true); - } - task.doFirst(new CacheableAction<>(t -> project.delete(generatedRestClientDir))); - }); - - if (dataTemplateJarTask != null) - { - generateRestClientTask.dependsOn(dataTemplateJarTask); - } - - // TODO: Tighten the types so that _generateSourcesJarTask must be of type Jar. - ((Jar) _generateSourcesJarTask).from(generateRestClientTask.getDestinationDir()); - _generateSourcesJarTask.dependsOn(generateRestClientTask); - - _generateJavadocTask.source(generateRestClientTask.getDestinationDir()); - _generateJavadocTask.setClasspath(_generateJavadocTask.getClasspath() - .plus(project.getConfigurations().getByName("restClientCompile")) - .plus(generateRestClientTask.getResolverPath())); - _generateJavadocTask.dependsOn(generateRestClientTask); - - // make sure rest client source files have been generated before compiling them - JavaCompile compileGeneratedRestClientTask = (JavaCompile) project.getTasks() - .getByName(targetSourceSet.getCompileJavaTaskName()); - compileGeneratedRestClientTask.dependsOn(generateRestClientTask); - compileGeneratedRestClientTask.getOptions().getCompilerArgs().add("-Xlint:-deprecation"); - - // create the rest model jar file - Task restModelJarTask = project.getTasks().create(sourceSet.getName() + "RestModelJar", Jar.class, task -> - { - task.from(idlDir, copySpec -> - { - copySpec.eachFile(fileCopyDetails -> project.getLogger() - .info("Add idl file: {}", fileCopyDetails)); - copySpec.setIncludes(Collections.singletonList('*' + IDL_FILE_SUFFIX)); - }); - // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 - // DataHub Note - applied FIXME - task.getArchiveAppendix().set(getAppendix(sourceSet, "rest-model")); - task.setDescription("Generate rest model jar"); - }); - - // create the rest client jar file - Task restClientJarTask = project.getTasks() - .create(sourceSet.getName() + "RestClientJar", Jar.class, task -> - { - task.dependsOn(compileGeneratedRestClientTask); - task.from(idlDir, copySpec -> { - copySpec.eachFile(fileCopyDetails -> { - project.getLogger().info("Add interface file: {}", fileCopyDetails); - fileCopyDetails.setPath("idl" + File.separatorChar + fileCopyDetails.getPath()); - }); - copySpec.setIncludes(Collections.singletonList('*' + IDL_FILE_SUFFIX)); - }); - task.from(targetSourceSet.getOutput()); - // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 - // DataHub Note - applied FIXME - task.getArchiveAppendix().set(getAppendix(sourceSet, "rest-client")); - task.setDescription("Generate rest client jar"); - }); - - // add the rest model jar and the rest client jar to the list of project artifacts. - if (!isTestSourceSet(sourceSet)) - { - project.getArtifacts().add("restModel", restModelJarTask); - project.getArtifacts().add("restClient", restClientJarTask); - } - else - { - project.getArtifacts().add("testRestModel", restModelJarTask); - project.getArtifacts().add("testRestClient", restClientJarTask); - } - } - - // Return the appendix for generated jar files. - // The source set name is not included for the main source set. - private static String getAppendix(SourceSet sourceSet, String suffix) - { - return sourceSet.getName().equals("main") ? suffix : sourceSet.getName() + '-' + suffix; - } - - private static Project getApiProject(Project project) - { - if (project.getExtensions().getExtraProperties().has("apiProject")) - { - return (Project) project.getExtensions().getExtraProperties().get("apiProject"); - } - - List subsSuffixes; - if (project.getExtensions().getExtraProperties().has("apiProjectSubstitutionSuffixes")) - { - @SuppressWarnings("unchecked") - List suffixValue = (List) project.getExtensions() - .getExtraProperties().get("apiProjectSubstitutionSuffixes"); - - subsSuffixes = suffixValue; - } - else - { - subsSuffixes = Arrays.asList("-impl", "-service", "-server", "-server-impl"); - } - - for (String suffix : subsSuffixes) - { - if (project.getPath().endsWith(suffix)) - { - String searchPath = project.getPath().substring(0, project.getPath().length() - suffix.length()) + "-api"; - Project apiProject = project.findProject(searchPath); - if (apiProject != null) - { - return apiProject; - } - } - } - - return project.findProject(project.getPath() + "-api"); - } - - private static Project getCheckedApiProject(Project project) - { - Project apiProject = getApiProject(project); - - if (apiProject == project) - { - throw new GradleException("The API project of ${project.path} must not be itself."); - } - - return apiProject; - } - - /** - * return the property value if the property exists and is not empty (-Pname=value) - * return null if property does not exist or the property is empty (-Pname) - * - * @param project the project where to look for the property - * @param propertyName the name of the property - */ - public static String getNonEmptyProperty(Project project, String propertyName) - { - if (!project.hasProperty(propertyName)) - { - return null; - } - - String propertyValue = project.property(propertyName).toString(); - if (propertyValue.isEmpty()) - { - return null; - } - - return propertyValue; - } - - /** - * Return true if the given property exists and its value is true - * - * @param project the project where to look for the property - * @param propertyName the name of the property - */ - public static boolean isPropertyTrue(Project project, String propertyName) - { - return project.hasProperty(propertyName) && Boolean.valueOf(project.property(propertyName).toString()); - } - - private static String createModifiedFilesMessage(Collection nonEquivExpectedFiles, - Collection foldersToBeBuilt) - { - StringBuilder builder = new StringBuilder(); - builder.append("\nRemember to checkin the changes to the following new or modified files:\n"); - for (String file : nonEquivExpectedFiles) - { - builder.append(" "); - builder.append(file); - builder.append("\n"); - } - - if (!foldersToBeBuilt.isEmpty()) - { - builder.append("\nThe file modifications include service interface changes, you can build the the following projects " - + "to re-generate the client APIs accordingly:\n"); - for (String folder : foldersToBeBuilt) - { - builder.append(" "); - builder.append(folder); - builder.append("\n"); - } - } - - return builder.toString(); - } - - private static String createPossibleMissingFilesMessage(Collection missingFiles) - { - StringBuilder builder = new StringBuilder(); - builder.append("If this is the result of an automated build, then you may have forgotten to check in some snapshot or idl files:\n"); - for (String file : missingFiles) - { - builder.append(" "); - builder.append(file); - builder.append("\n"); - } - - return builder.toString(); - } - - private static String findProperty(FileCompatibilityType type) - { - String property; - switch (type) - { - case SNAPSHOT: - property = SNAPSHOT_COMPAT_REQUIREMENT; - break; - case IDL: - property = IDL_COMPAT_REQUIREMENT; - break; - case PEGASUS_SCHEMA_SNAPSHOT: - property = PEGASUS_SCHEMA_SNAPSHOT_REQUIREMENT; - break; - case PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: - property = PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_REQUIREMENT; - break; - default: - throw new GradleException("No property defined for compatibility type " + type); - } - return property; - } - - private static Set buildWatchedRestModelInputDirs(Project project, SourceSet sourceSet) { - @SuppressWarnings("unchecked") - Map pegasusOptions = (Map) project - .getExtensions().getExtraProperties().get("pegasus"); - - File rootPath = new File(project.getProjectDir(), - pegasusOptions.get(sourceSet.getName()).restModelOptions.getRestResourcesRootPath()); - - IdlOptions idlOptions = pegasusOptions.get(sourceSet.getName()).idlOptions; - - // if idlItems exist, only watch the smaller subset - return idlOptions.getIdlItems().stream() - .flatMap(idlItem -> Arrays.stream(idlItem.packageNames)) - .map(packageName -> new File(rootPath, packageName.replace('.', '/'))) - .collect(Collectors.toCollection(TreeSet::new)); - } - - private static Set difference(Set left, Set right) - { - Set result = new HashSet<>(left); - result.removeAll(right); - return result; - } - - /** - * Configures the given source set so that its data schema directory (usually 'pegasus') is marked as a resource root. - * The purpose of this is to improve the IDE experience. Makes sure to exclude this directory from being packaged in - * with the default Jar task. - */ - private static void configureDataSchemaResourcesRoot(Project project, SourceSet sourceSet) - { - sourceSet.resources(sourceDirectorySet -> { - final String dataSchemaPath = getDataSchemaPath(project, sourceSet); - final File dataSchemaRoot = project.file(dataSchemaPath); - sourceDirectorySet.srcDir(dataSchemaPath); - project.getLogger().info("Adding resource root '{}'", dataSchemaPath); - - final String extensionsSchemaPath = getExtensionSchemaPath(project, sourceSet); - final File extensionsSchemaRoot = project.file(extensionsSchemaPath); - sourceDirectorySet.srcDir(extensionsSchemaPath); - project.getLogger().info("Adding resource root '{}'", extensionsSchemaPath); - - // Exclude the data schema and extensions schema directory from being copied into the default Jar task - sourceDirectorySet.getFilter().exclude(fileTreeElement -> { - final File file = fileTreeElement.getFile(); - // Traversal starts with the children of a resource root, so checking the direct parent is sufficient - final boolean underDataSchemaRoot = dataSchemaRoot.equals(file.getParentFile()); - final boolean underExtensionsSchemaRoot = extensionsSchemaRoot.equals(file.getParentFile()); - final boolean exclude = (underDataSchemaRoot || underExtensionsSchemaRoot); - if (exclude) - { - project.getLogger().info("Excluding resource directory '{}'", file); - } - return exclude; - }); - }); - } - - private Task generatePegasusSchemaSnapshot(Project project, SourceSet sourceSet, String taskName, File inputDir, File outputDir, - boolean isExtensionSchema) - { - return project.getTasks().create(sourceSet.getTaskName("generate", taskName), - GeneratePegasusSnapshotTask.class, task -> - { - task.setInputDir(inputDir); - task.setResolverPath(getDataModelConfig(project, sourceSet).plus(project.files(getDataSchemaPath(project, sourceSet)))); - task.setClassPath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); - task.setPegasusSchemaSnapshotDestinationDir(outputDir); - task.setExtensionSchema(isExtensionSchema); - if (isPropertyTrue(project, ENABLE_ARG_FILE)) - { - task.setEnableArgFile(true); - } - }); - } - - private Task publishPegasusSchemaSnapshot(Project project, SourceSet sourceSet, String taskName, Task checkPegasusSnapshotTask, - File inputDir, File outputDir) - { - return project.getTasks().create(sourceSet.getTaskName("publish", taskName), - Sync.class, task -> - { - task.dependsOn(checkPegasusSnapshotTask); - task.from(inputDir); - task.into(outputDir); - task.onlyIf(t -> !SharedFileUtils.getSuffixedFiles(project, inputDir, PDL_FILE_SUFFIX).isEmpty()); - }); - } - - private void checkGradleVersion(Project project) - { - if (MIN_REQUIRED_VERSION.compareTo(GradleVersion.current()) > 0) - { - throw new GradleException(String.format("This plugin does not support %s. Please use %s or later.", - GradleVersion.current(), - MIN_REQUIRED_VERSION)); - } - if (MIN_SUGGESTED_VERSION.compareTo(GradleVersion.current()) > 0) - { - project.getLogger().warn(String.format("Pegasus supports %s, but it may not be supported in the next major release. Please use %s or later.", - GradleVersion.current(), - MIN_SUGGESTED_VERSION)); - } - } - - /** - * Reflection is necessary to obscure types introduced in Gradle 5.3 - * - * @param sourceSet the target sourceset upon which to create a new feature variant - * @return an Action which modifies a org.gradle.api.plugins.FeatureSpec instance - */ - private Action/**/ createFeatureVariantFromSourceSet(SourceSet sourceSet) - { - return featureSpec -> { - try - { - Class clazz = Class.forName("org.gradle.api.plugins.FeatureSpec"); - Method usingSourceSet = clazz.getDeclaredMethod("usingSourceSet", SourceSet.class); - usingSourceSet.invoke(featureSpec, sourceSet); - } - catch (ReflectiveOperationException e) - { - throw new GradleException("Unable to invoke FeatureSpec#usingSourceSet(SourceSet)", e); - } - }; - } - - protected static boolean isAtLeastGradle61() - { - return GradleVersion.current().getBaseVersion().compareTo(GradleVersion.version("6.1")) >= 0; - } - - public static boolean isAtLeastGradle7() { - return GradleVersion.current().getBaseVersion().compareTo(GradleVersion.version("7.0")) >= 0; - } -} \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java deleted file mode 100644 index a2aafaf1be017..0000000000000 --- a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java +++ /dev/null @@ -1,124 +0,0 @@ -package com.linkedin.pegasus.gradle.tasks; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.gradle.api.DefaultTask; -import org.gradle.api.file.FileCollection; -import org.gradle.api.specs.Specs; -import org.gradle.api.tasks.InputFiles; -import org.gradle.api.tasks.Internal; -import org.gradle.api.tasks.SkipWhenEmpty; -import org.gradle.api.tasks.TaskAction; -import org.gradle.work.FileChange; -import org.gradle.work.InputChanges; - - -public class ChangedFileReportTask extends DefaultTask -{ - private final Collection _needCheckinFiles = new ArrayList<>(); - - private FileCollection _idlFiles = getProject().files(); - private FileCollection _snapshotFiles = getProject().files(); - - public ChangedFileReportTask() - { - //with Gradle 6.0, Declaring an incremental task without outputs is not allowed. - getOutputs().upToDateWhen(Specs.satisfyNone()); - } - - // DataHub Note - updated for InputChanges - @TaskAction - public void checkFilesForChanges(InputChanges inputChanges) - { - getLogger().lifecycle("Checking idl and snapshot files for changes..."); - getLogger().info("idlFiles: " + _idlFiles.getAsPath()); - getLogger().info("snapshotFiles: " + _snapshotFiles.getAsPath()); - - Set filesRemoved = new HashSet<>(); - Set filesAdded = new HashSet<>(); - Set filesChanged = new HashSet<>(); - - if (inputChanges.isIncremental()) - { - Consumer handleChange = change -> - { - switch (change.getChangeType()) { - case ADDED: - filesAdded.add(change.getFile().getAbsolutePath()); - break; - case REMOVED: - filesRemoved.add(change.getFile().getAbsolutePath()); - break; - case MODIFIED: - filesChanged.add(change.getFile().getAbsolutePath()); - break; - } - }; - - inputChanges.getFileChanges(_idlFiles).forEach(handleChange); - inputChanges.getFileChanges(_snapshotFiles).forEach(handleChange); - - if (!filesRemoved.isEmpty()) - { - String files = joinByComma(filesRemoved); - _needCheckinFiles.add(files); - getLogger().lifecycle( - "The following files have been removed, be sure to remove them from source control: {}", files); - } - - if (!filesAdded.isEmpty()) - { - String files = joinByComma(filesAdded); - _needCheckinFiles.add(files); - getLogger().lifecycle("The following files have been added, be sure to add them to source control: {}", files); - } - - if (!filesChanged.isEmpty()) - { - String files = joinByComma(filesChanged); - _needCheckinFiles.add(files); - getLogger().lifecycle( - "The following files have been changed, be sure to commit the changes to source control: {}", files); - } - } - } - - private String joinByComma(Set files) - { - return files.stream().collect(Collectors.joining(", ")); - } - - @InputFiles - @SkipWhenEmpty - public FileCollection getSnapshotFiles() - { - return _snapshotFiles; - } - - public void setSnapshotFiles(FileCollection snapshotFiles) - { - _snapshotFiles = snapshotFiles; - } - - @InputFiles - @SkipWhenEmpty - public FileCollection getIdlFiles() - { - return _idlFiles; - } - - public void setIdlFiles(FileCollection idlFiles) - { - _idlFiles = idlFiles; - } - - @Internal - public Collection getNeedCheckinFiles() - { - return _needCheckinFiles; - } -} \ No newline at end of file From 7ae310d3a761887d0ac7c8a3720be545f661a89f Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 8 Feb 2024 06:32:23 +0530 Subject: [PATCH 496/792] feat(graphql): get raw aspects for assertions, allow aspectNames filter (#9792) --- .../linkedin/datahub/graphql/GmsGraphQLEngine.java | 4 +++- .../datahub/graphql/WeaklyTypedAspectsResolver.java | 7 ++++++- .../src/main/resources/entity.graphql | 12 ++++++++++++ 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index e3eef0688c269..28b3a982c7b28 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -2436,7 +2436,9 @@ private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { ? assertion.getDataPlatformInstance().getUrn() : null; })) - .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); + .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); } private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index d8665ae784bd1..fd23cd5fdda45 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -37,7 +37,12 @@ public class WeaklyTypedAspectsResolver implements DataFetcher Date: Thu, 8 Feb 2024 14:05:26 +0530 Subject: [PATCH 497/792] feat(ingest/slack): source to get user info from slack (#9776) --- metadata-ingestion/setup.py | 8 + .../ingestion/source/slack/__init__.py | 0 .../datahub/ingestion/source/slack/slack.py | 181 ++++++++++++++++++ 3 files changed, 189 insertions(+) create mode 100644 metadata-ingestion/src/datahub/ingestion/source/slack/__init__.py create mode 100644 metadata-ingestion/src/datahub/ingestion/source/slack/slack.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index bbbab73fd1cf5..74dcde5e066b3 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -245,6 +245,10 @@ powerbi_report_server = {"requests", "requests_ntlm"} +slack = { + "slack-sdk==3.18.1" +} + databricks = { # 0.1.11 appears to have authentication issues with azure databricks "databricks-sdk>=0.9.0", @@ -367,6 +371,7 @@ "snowflake": snowflake_common | usage_common | sqlglot_lib, "sqlalchemy": sql_common, "sql-queries": usage_common | sqlglot_lib, + "slack": slack, "superset": { "requests", "sqlalchemy", @@ -503,6 +508,7 @@ "redshift", "s3", "snowflake", + "slack", "tableau", "teradata", "trino", @@ -543,6 +549,7 @@ "kafka-connect", "ldap", "mongodb", + "slack", "mssql", "mysql", "mariadb", @@ -597,6 +604,7 @@ "postgres = datahub.ingestion.source.sql.postgres:PostgresSource", "redash = datahub.ingestion.source.redash:RedashSource", "redshift = datahub.ingestion.source.redshift.redshift:RedshiftSource", + "slack = datahub.ingestion.source.slack.slack:SlackSource", "snowflake = datahub.ingestion.source.snowflake.snowflake_v2:SnowflakeV2Source", "superset = datahub.ingestion.source.superset:SupersetSource", "tableau = datahub.ingestion.source.tableau:TableauSource", diff --git a/metadata-ingestion/src/datahub/ingestion/source/slack/__init__.py b/metadata-ingestion/src/datahub/ingestion/source/slack/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/ingestion/source/slack/slack.py b/metadata-ingestion/src/datahub/ingestion/source/slack/slack.py new file mode 100644 index 0000000000000..ed425cc25d98f --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/slack/slack.py @@ -0,0 +1,181 @@ +import logging +import textwrap +from dataclasses import dataclass +from typing import Iterable, Optional + +from pydantic import Field, SecretStr +from slack_sdk import WebClient + +from datahub.configuration.common import ConfigModel +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.api.decorators import ( + SupportStatus, + config_class, + platform_name, + support_status, +) +from datahub.ingestion.api.source import ( + SourceReport, + TestableSource, + TestConnectionReport, +) +from datahub.ingestion.api.workunit import MetadataWorkUnit +from datahub.metadata.schema_classes import CorpUserEditableInfoClass +from datahub.utilities.urns.urn import Urn + +logger: logging.Logger = logging.getLogger(__name__) + + +@dataclass +class CorpUser: + urn: Optional[str] = None + email: Optional[str] = None + slack_id: Optional[str] = None + title: Optional[str] = None + image_url: Optional[str] = None + phone: Optional[str] = None + + +class SlackSourceConfig(ConfigModel): + bot_token: SecretStr = Field( + description="Bot token for the Slack workspace. Needs `users:read`, `users:read.email` and `users.profile:read` scopes.", + ) + + +@platform_name("Slack") +@config_class(SlackSourceConfig) +@support_status(SupportStatus.TESTING) +class SlackSource(TestableSource): + def __init__(self, ctx: PipelineContext, config: SlackSourceConfig): + self.ctx = ctx + self.config = config + self.report = SourceReport() + + @classmethod + def create(cls, config_dict, ctx): + config = SlackSourceConfig.parse_obj(config_dict) + return cls(ctx, config) + + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + raise NotImplementedError("This class does not implement this method") + + def get_slack_client(self) -> WebClient: + return WebClient(token=self.config.bot_token.get_secret_value()) + + def get_workunits_internal( + self, + ) -> Iterable[MetadataWorkUnit]: + assert self.ctx.graph is not None + auth_resp = self.get_slack_client().auth_test() + logger.info("Successfully connected to Slack") + logger.info(auth_resp.data) + for user_obj in self.get_user_to_be_updated(): + self.populate_slack_id_from_email(user_obj) + if user_obj.slack_id is None: + continue + self.populate_user_profile(user_obj) + if user_obj.urn is None: + continue + logger.info(f"User: {user_obj}") + corpuser_editable_info = ( + self.ctx.graph.get_aspect( + entity_urn=user_obj.urn, aspect_type=CorpUserEditableInfoClass + ) + or CorpUserEditableInfoClass() + ) + corpuser_editable_info.email = user_obj.email + corpuser_editable_info.slack = user_obj.slack_id + corpuser_editable_info.title = user_obj.title + if user_obj.image_url: + corpuser_editable_info.pictureLink = user_obj.image_url + if user_obj.phone: + corpuser_editable_info.phone = user_obj.phone + yield MetadataWorkUnit( + id=f"{user_obj.urn}", + mcp=MetadataChangeProposalWrapper( + entityUrn=user_obj.urn, + aspect=corpuser_editable_info, + ), + ) + + def populate_user_profile(self, user_obj: CorpUser) -> None: + try: + # https://api.slack.com/methods/users.profile.get + user_profile_res = self.get_slack_client().users_profile_get( + user=user_obj.slack_id + ) + user_profile = user_profile_res.get("profile", {}) + user_obj.title = user_profile.get("title") + user_obj.image_url = user_profile.get("image_192") + user_obj.phone = user_profile.get("phone") + except Exception as e: + if "missing_scope" in str(e): + raise e + return + + def populate_slack_id_from_email(self, user_obj: CorpUser) -> None: + if user_obj.email is None: + return + try: + # https://api.slack.com/methods/users.lookupByEmail + user_info_res = self.get_slack_client().users_lookupByEmail( + email=user_obj.email + ) + user_info = user_info_res.get("user", {}) + user_obj.slack_id = user_info.get("id") + except Exception as e: + if "users_not_found" in str(e): + return + raise e + + def get_user_to_be_updated(self) -> Iterable[CorpUser]: + graphql_query = textwrap.dedent( + """ + query listUsers($input: ListUsersInput!) { + listUsers(input: $input) { + total + users { + urn + editableProperties { + email + slack + } + } + } + } + """ + ) + start = 0 + count = 10 + total = count + + assert self.ctx.graph is not None + + while start < total: + variables = {"input": {"start": start, "count": count}} + response = self.ctx.graph.execute_graphql( + query=graphql_query, variables=variables + ) + list_users = response.get("listUsers", {}) + total = list_users.get("total", 0) + users = list_users.get("users", []) + for user in users: + user_obj = CorpUser() + editable_properties = user.get("editableProperties", {}) + user_obj.urn = user.get("urn") + if user_obj.urn is None: + continue + if editable_properties is not None: + user_obj.email = editable_properties.get("email") + if user_obj.email is None: + urn_id = Urn.from_string(user_obj.urn).get_entity_id_as_string() + if "@" in urn_id: + user_obj.email = urn_id + if user_obj.email is not None: + yield user_obj + start += count + + def get_report(self) -> SourceReport: + return self.report From a5e473812ec9152b875236213938a8d51f6b52c4 Mon Sep 17 00:00:00 2001 From: Kunal-kankriya <127090035+Kunal-kankriya@users.noreply.github.com> Date: Thu, 8 Feb 2024 14:25:36 +0530 Subject: [PATCH 498/792] tests(domains): added multiple cypress tests scenarios (#9750) --- .../cypress/e2e/domains/nested_domains.js | 205 ++++++++++++++---- .../tests/cypress/cypress/support/commands.js | 48 +++- 2 files changed, 207 insertions(+), 46 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/domains/nested_domains.js b/smoke-test/tests/cypress/cypress/e2e/domains/nested_domains.js index 59af6daf9b8f6..3910a280a303c 100644 --- a/smoke-test/tests/cypress/cypress/e2e/domains/nested_domains.js +++ b/smoke-test/tests/cypress/cypress/e2e/domains/nested_domains.js @@ -1,10 +1,26 @@ const domainName = "CypressNestedDomain"; -const domainDescription = "CypressNestedDomainDescription"; + +//Delete Unecessary Existing Domains +const deleteExisitingDomain = () => { + cy.get('a[href*="urn:li"] span[class^="ant-typography"]') + .should('be.visible') + .its('length') + .then((length) => { + for (let i = 0; i < length - 1; i++) { + cy.get('a[href*="urn:li"] span[class^="ant-typography"]') + .should('be.visible') + .first() + .click({ force: true }); + deleteFromDomainDropdown(); + } + }); + cy.waitTextVisible('Marketing'); + } const createDomain = () => { - cy.clickOptionWithTestId("domains-new-domain-button"); + cy.get('.anticon-plus').first().click() + cy.waitTextVisible('Create New Domain') cy.get('[data-testid="create-domain-name"]').click().type(domainName); - cy.get('[data-testid="create-domain-description"]').click().type(domainDescription); cy.clickOptionWithTestId("create-domain-button"); cy.waitTextVisible("Created domain!"); } @@ -16,7 +32,7 @@ const moveDomaintoRootLevel = () => { cy.get('[data-testid="move-domain-modal"]').contains("Marketing").click({force: true}); cy.waitTextVisible('Marketing') cy.clickOptionWithTestId("move-domain-modal-move-button") - } + } const moveDomaintoParent = () => { cy.get('[data-testid="domain-list-item"]').contains("Marketing").prev().click(); @@ -25,51 +41,67 @@ const moveDomaintoParent = () => { cy.openThreeDotDropdown(); cy.clickOptionWithTestId("entity-menu-move-button"); cy.clickOptionWithTestId("move-domain-modal-move-button") - } + } - const deleteFromDomainDropdown = () => { - cy.clickOptionWithText('Filters') - cy.openThreeDotDropdown(); - cy.clickOptionWithTestId("entity-menu-delete-button"); - cy.waitTextVisible("Are you sure you want to remove this Domain?"); - cy.clickOptionWithText("Yes"); - } +const getDomainList = (domainName) =>{ + cy.contains('span.ant-typography-ellipsis', domainName) + .parent('[data-testid="domain-list-item"]') + .find('[aria-label="right"]') + .click(); + } + +const deleteFromDomainDropdown = () => { + cy.clickOptionWithText('Filters') + cy.openThreeDotDropdown(); + cy.clickOptionWithTestId("entity-menu-delete-button"); + cy.waitTextVisible("Are you sure you want to remove this Domain?"); + cy.clickOptionWithText("Yes"); + } const deleteDomain = () => { cy.clickOptionWithText(domainName).waitTextVisible('Domains'); deleteFromDomainDropdown() - } + } -//Delete Unecessary Existing Domains - const deleteExisitingDomain = () => { - cy.get('a[href*="urn:li"] span[class^="ant-typography"]') - .should('be.visible') - .its('length') - .then((length) => { - for (let i = 0; i < length - 1; i++) { - cy.get('a[href*="urn:li"] span[class^="ant-typography"]') - .should('be.visible') - .first() - .click({ force: true }); - deleteFromDomainDropdown(); - } - }); - cy.waitTextVisible('My custom domain'); - } +const verifyEditAndPerformAddAndRemoveActionForDomain = (entity, action, text, body) =>{ + cy.clickOptionWithText(entity) + cy.clickOptionWithText(action) + cy.get('[data-testid="tag-term-modal-input"]').type(text) + cy.get('[data-testid="tag-term-option"]').contains(text).click() + cy.clickOptionWithText(body) + cy.get('[data-testid="add-tag-term-from-modal-btn"]').click() + cy.waitTextVisible(text) + } -describe("nested domains test", () => { +const clearAndType = (text) =>{ + cy.get('[role="textbox"]').click().clear().type(text) + } + +const clearAndDelete = () =>{ + cy.clickOptionWithText("Edit") + cy.get('[role="textbox"]').click().clear() + cy.clickOptionWithTestId("description-editor-save-button") + cy.waitTextVisible('No documentation') + cy.mouseover('.ant-list-item-meta-content') + cy.get('[aria-label="delete"]').click() + cy.waitTextVisible('Link Removed') + } + +describe("Verify nested domains test functionalities", () => { beforeEach (() => { cy.loginWithCredentials(); cy.goToDomainList(); }); - it("Create a new domain", () => { - deleteExisitingDomain() + it("Verify Create a new domain", () => { + deleteExisitingDomain() + cy.get('a[href*="urn:li"] span[class^="ant-typography"]') + .should('be.visible') createDomain(); cy.waitTextVisible("Domains"); }); - it("Move domain root level to parent level", () => { + it ("verify Move domain root level to parent level", () => { cy.waitTextVisible(domainName) moveDomaintoRootLevel(); cy.waitTextVisible("Moved Domain!") @@ -77,18 +109,117 @@ describe("nested domains test", () => { cy.waitTextVisible("1 sub-domain"); }); - it("Move domain parent level to root level", () => { + it("Verify Move domain parent level to root level", () => { moveDomaintoParent(); cy.waitTextVisible("Moved Domain!") cy.goToDomainList(); cy.waitTextVisible(domainName); - cy.waitTextVisible(domainDescription); }); - it("Remove the domain", () => { + it("Verify Documentation tab by adding editing Description and adding link", () => { + cy.clickOptionWithText(domainName) + cy.clickOptionWithId('#rc-tabs-0-tab-Documentation') + cy.clickFirstOptionWithText("Add Documentation") + clearAndType("Test added") + cy.clickOptionWithTestId("description-editor-save-button") + cy.waitTextVisible('Description Updated') + cy.waitTextVisible('Test added') + cy.clickFirstOptionWithTestId("add-link-button") + cy.waitTextVisible("Add Link") + cy.enterTextInTestId("add-link-modal-url", 'www.test.com') + cy.enterTextInTestId("add-link-modal-label", 'Test Label') + cy.clickOptionWithTestId("add-link-modal-add-button") + cy.waitTextVisible("Test Label") + cy.goToDomainList(); + cy.waitTextVisible("Test added") + cy.clickOptionWithText(domainName) + cy.clickOptionWithText("Documentation") + clearAndDelete() + }) + + it("Verify Right side panel functionalities", () => { + cy.clickOptionWithText(domainName) + cy.waitTextVisible("Filters") + cy.clickOptionWithText("Add Documentation") + clearAndType("Test documentation") + cy.clickOptionWithTestId("description-editor-save-button") + cy.ensureTextNotPresent("Add Documentation") + cy.waitTextVisible('Test documentation') + cy.clickFirstOptionWithSpecificTestId("add-link-button", 1) + cy.waitTextVisible("URL") + cy.enterTextInTestId("add-link-modal-url", 'www.test.com') + cy.enterTextInTestId("add-link-modal-label", 'Test Label') + cy.clickOptionWithTestId("add-link-modal-add-button") + cy.waitTextVisible("Test Label") + cy.clickOptionWithTestId("add-owners-button") + cy.waitTextVisible("Find a user or group") + cy.clickTextOptionWithClass(".rc-virtual-list-holder-inner", "DataHub") + cy.clickOptionWithText("Find a user or group") + cy.clickOptionWithId('#addOwnerButton') + cy.waitTextVisible("DataHub") + cy.goToDomainList(); + cy.waitTextVisible("Test documentation") + cy.waitTextVisible("DataHub") + cy.clickOptionWithText(domainName) + cy.clickOptionWithText("Documentation") + clearAndDelete() + }) + + it("Verify Edit Domain Name", () => { + cy.clickFirstOptionWithText(domainName) + cy.clickOptionWithText('Filters') + + //edit name + cy.get('.anticon-edit').eq(0).click().then(() => { + cy.get('.ant-typography-edit-content').type(" Edited").type('{enter}'); + }); + cy.waitTextVisible(domainName + " Edited") + }) + + it("Verify Remove the domain", () => { deleteDomain(); cy.goToDomainList(); cy.ensureTextNotPresent(domainName); - cy.ensureTextNotPresent(domainDescription); }); + + it('Verify Add and delete sub domain', () => { + cy.clickFirstOptionWithText('Marketing') + cy.clickOptionWithText('Filters') + createDomain(); + cy.ensureTextNotPresent('Created domain!') + getDomainList('Marketing') + cy.clickOptionWithText(domainName) + deleteFromDomainDropdown() + cy.ensureTextNotPresent(domainName) + }) + + it('Verify entities tab with adding and deleting assets and performing some actions', () => { + cy.clickFirstOptionWithText('Marketing'); + cy.clickOptionWithText('Add assets'); + cy.waitTextVisible("Add assets to Domain"); + cy.enterTextInSpecificTestId("search-bar", 3, 'Baz Chart 1') + cy.clickOptionWithSpecificClass('.ant-checkbox', 1) + cy.clickOptionWithId('#continueButton') + cy.waitTextVisible("Added assets to Domain!") + cy.openThreeDotMenu() + cy.clickOptionWithText("Edit") + cy.clickOptionWithSpecificClass('.ant-checkbox', 1) + verifyEditAndPerformAddAndRemoveActionForDomain('Tags', 'Add tags', 'Cypress', 'Add Tags') + cy.clickOptionWithText('Baz Chart 1') + cy.waitTextVisible("Cypress") + cy.waitTextVisible("Marketing") + cy.go('back') + cy.openThreeDotMenu() + cy.clickOptionWithText("Edit") + cy.clickOptionWithSpecificClass('.ant-checkbox', 1) + verifyEditAndPerformAddAndRemoveActionForDomain('Tags', 'Remove tags', 'Cypress', 'Remove Tags') + cy.clickTextOptionWithClass('.ant-dropdown-trigger', 'Domain') + cy.clickOptionWithText('Unset Domain') + cy.clickOptionWithText("Yes"); + cy.clickOptionWithText('Baz Chart 1') + cy.waitTextVisible('Dashboards') + cy.reload() + cy.ensureTextNotPresent("Cypress") + cy.ensureTextNotPresent("Marketing") + }) }); diff --git a/smoke-test/tests/cypress/cypress/support/commands.js b/smoke-test/tests/cypress/cypress/support/commands.js index 51b06a24c1921..3f3a8fe94f962 100644 --- a/smoke-test/tests/cypress/cypress/support/commands.js +++ b/smoke-test/tests/cypress/cypress/support/commands.js @@ -23,15 +23,15 @@ export function getTimestampMillisNumDaysAgo (numDays) { Cypress.Commands.add('login', () => { - cy.request({ - method: 'POST', - url: '/logIn', - body: { - username: Cypress.env('ADMIN_USERNAME'), - password: Cypress.env('ADMIN_PASSWORD'), - }, - retryOnStatusCodeFailure: true, - }); + cy.request({ + method: 'POST', + url: '/logIn', + body: { + username: Cypress.env('ADMIN_USERNAME'), + password: Cypress.env('ADMIN_PASSWORD'), + }, + retryOnStatusCodeFailure: true, + }); }) Cypress.Commands.add("loginWithCredentials", (username, password) => { @@ -163,10 +163,18 @@ Cypress.Commands.add("openThreeDotDropdown", () => { cy.clickOptionWithTestId("entity-header-dropdown") }); +Cypress.Commands.add("openThreeDotMenu", () => { + cy.clickOptionWithTestId("three-dot-menu") +}); + Cypress.Commands.add("clickOptionWithText", (text) => { cy.contains(text).should('be.visible').click(); }); +Cypress.Commands.add("clickFirstOptionWithText", (text) => { + cy.contains(text).first().click(); +}); + Cypress.Commands.add("clickOptionWithTextToScrollintoView", (text) => { cy.contains(text).scrollIntoView().click(); }); @@ -222,6 +230,13 @@ Cypress.Commands.add("getWithTestId", (id) => { return cy.get(selectorWithtestId(id)); }); +Cypress.Commands.add("clickOptionWithId", (id) => { + cy.get(id).click() +}) + +Cypress.Commands.add("enterTextInSpecificTestId", (id, value, text) => { + cy.get(selectorWithtestId(id)).eq(value).type(text); +}) Cypress.Commands.add("enterTextInTestId", (id, text) => { cy.get(selectorWithtestId(id)).type(text); }) @@ -238,6 +253,21 @@ Cypress.Commands.add("clickFirstOptionWithTestId", (id) => { }); }) +Cypress.Commands.add("clickFirstOptionWithSpecificTestId", (id,value) => { + cy.get(selectorWithtestId(id)).eq(value).click({ + force: true, + }); +}) + +Cypress.Commands.add("clickOptionWithSpecificClass", (locator, value) => { + cy.get(locator).should('be.visible') + cy.get(locator).eq(value).click(); +}) + +Cypress.Commands.add("clickTextOptionWithClass", (locator, text) => { + cy.get(locator).should('be.visible').contains(text).click({force:true}) +}) + Cypress.Commands.add("hideOnboardingTour", () => { cy.get('body').type("{ctrl} {meta} h"); }); From f6e96dc0d3b280a693530ab3ae545fbde1943146 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Thu, 8 Feb 2024 20:51:12 +0530 Subject: [PATCH 499/792] feat(token): helper to debug owner of raw token (#9793) --- .../datahub/graphql/GmsGraphQLEngine.java | 8 +- .../auth/GetAccessTokenMetadataResolver.java | 59 ++++++++++ .../src/main/resources/auth.graphql | 6 + .../token/StatefulTokenService.java | 16 ++- .../tokens/revokable_access_token_test.py | 108 +++++++++++------- 5 files changed, 142 insertions(+), 55 deletions(-) create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 28b3a982c7b28..3cb1d7ab9e7de 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -109,10 +109,7 @@ import com.linkedin.datahub.graphql.resolvers.assertion.AssertionRunEventResolver; import com.linkedin.datahub.graphql.resolvers.assertion.DeleteAssertionResolver; import com.linkedin.datahub.graphql.resolvers.assertion.EntityAssertionsResolver; -import com.linkedin.datahub.graphql.resolvers.auth.CreateAccessTokenResolver; -import com.linkedin.datahub.graphql.resolvers.auth.GetAccessTokenResolver; -import com.linkedin.datahub.graphql.resolvers.auth.ListAccessTokensResolver; -import com.linkedin.datahub.graphql.resolvers.auth.RevokeAccessTokenResolver; +import com.linkedin.datahub.graphql.resolvers.auth.*; import com.linkedin.datahub.graphql.resolvers.browse.BrowsePathsResolver; import com.linkedin.datahub.graphql.resolvers.browse.BrowseResolver; import com.linkedin.datahub.graphql.resolvers.browse.EntityBrowsePathsResolver; @@ -931,6 +928,9 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) + .dataFetcher( + "getAccessTokenMetadata", + new GetAccessTokenMetadataResolver(statefulTokenService, this.entityClient)) .dataFetcher("container", getResolver(containerType)) .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java new file mode 100644 index 0000000000000..c3e14565e0e27 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java @@ -0,0 +1,59 @@ +package com.linkedin.datahub.graphql.resolvers.auth; + +import com.datahub.authentication.token.StatefulTokenService; +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; +import com.linkedin.datahub.graphql.types.auth.AccessTokenMetadataType; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class GetAccessTokenMetadataResolver + implements DataFetcher> { + + private final StatefulTokenService _tokenService; + private final EntityClient _entityClient; + + public GetAccessTokenMetadataResolver( + final StatefulTokenService tokenService, EntityClient entityClient) { + _tokenService = tokenService; + _entityClient = entityClient; + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String token = environment.getArgument("token"); + log.info("User {} requesting access token metadata information.", context.getActorUrn()); + if (!AuthorizationUtils.canManageTokens(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + AccessTokenMetadataType metadataType = new AccessTokenMetadataType(_entityClient); + final String tokenHash = _tokenService.hash(token); + final String tokenUrn = _tokenService.tokenUrnFromKey(tokenHash).toString(); + try { + List> batchLoad = + metadataType.batchLoad(ImmutableList.of(tokenUrn), context); + if (batchLoad.isEmpty()) { + return null; + } + return batchLoad.get(0).getData(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/resources/auth.graphql b/datahub-graphql-core/src/main/resources/auth.graphql index b76aa132c219c..c7dc6be137bea 100644 --- a/datahub-graphql-core/src/main/resources/auth.graphql +++ b/datahub-graphql-core/src/main/resources/auth.graphql @@ -11,6 +11,12 @@ extend type Query { List access tokens stored in DataHub. """ listAccessTokens(input: ListAccessTokenInput!): ListAccessTokenResult! + + """ + Fetches the metadata of an access token. + This is useful to debug when you have a raw token but don't know the actor. + """ + getAccessTokenMetadata(token: String!): AccessTokenMetadata! } extend type Mutation { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index 0d1da4a7687ba..50e357331bb8f 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -16,12 +16,7 @@ import com.linkedin.metadata.utils.AuditStampUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; -import java.util.Base64; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; +import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; @@ -58,8 +53,7 @@ public StatefulTokenService( new CacheLoader() { @Override public Boolean load(final String key) { - final Urn accessUrn = - Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); + final Urn accessUrn = tokenUrnFromKey(key); return !_entityService.exists(accessUrn, true); } }); @@ -173,10 +167,14 @@ public TokenClaims validateAccessToken(@Nonnull String accessToken) throws Token } } + public Urn tokenUrnFromKey(String tokenHash) { + return Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash); + } + public void revokeAccessToken(@Nonnull String hashedToken) throws TokenException { try { if (!_revokedTokenCache.get(hashedToken)) { - final Urn tokenUrn = Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, hashedToken); + final Urn tokenUrn = tokenUrnFromKey(hashedToken); _entityService.deleteUrn(tokenUrn); _revokedTokenCache.put(hashedToken, true); return; diff --git a/smoke-test/tests/tokens/revokable_access_token_test.py b/smoke-test/tests/tokens/revokable_access_token_test.py index 10332b32b9caf..6e8deb41f177e 100644 --- a/smoke-test/tests/tokens/revokable_access_token_test.py +++ b/smoke-test/tests/tokens/revokable_access_token_test.py @@ -40,10 +40,10 @@ def custom_user_setup(): # Test getting the invite token get_invite_token_json = { - "query": """query getInviteToken($input: GetInviteTokenInput!) {\n - getInviteToken(input: $input){\n - inviteToken\n - }\n + "query": """query getInviteToken($input: GetInviteTokenInput!) { + getInviteToken(input: $input){ + inviteToken + } }""", "variables": {"input": {}}, } @@ -131,6 +131,7 @@ def access_token_setup(): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_admin_can_create_list_and_revoke_tokens(wait_for_healthchecks): admin_session = login_as(admin_user, admin_pass) + admin_user_urn = f"urn:li:corpuser:{admin_user}" # Using a super account, there should be no tokens res_data = listAccessTokens(admin_session) @@ -140,19 +141,26 @@ def test_admin_can_create_list_and_revoke_tokens(wait_for_healthchecks): assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0 # Using a super account, generate a token for itself. - res_data = generateAccessToken_v2(admin_session, f"urn:li:corpuser:{admin_user}") + res_data = generateAccessToken_v2(admin_session, admin_user_urn) assert res_data assert res_data["data"] assert res_data["data"]["createAccessToken"] assert res_data["data"]["createAccessToken"]["accessToken"] assert ( - res_data["data"]["createAccessToken"]["metadata"]["actorUrn"] - == f"urn:li:corpuser:{admin_user}" + res_data["data"]["createAccessToken"]["metadata"]["actorUrn"] == admin_user_urn ) + access_token = res_data["data"]["createAccessToken"]["accessToken"] admin_tokenId = res_data["data"]["createAccessToken"]["metadata"]["id"] # Sleep for eventual consistency wait_for_writes_to_sync() + res_data = getAccessTokenMetadata(admin_session, access_token) + assert res_data + assert res_data["data"] + assert res_data["data"]["getAccessTokenMetadata"] + assert res_data["data"]["getAccessTokenMetadata"]["ownerUrn"] == admin_user_urn + assert res_data["data"]["getAccessTokenMetadata"]["actorUrn"] == admin_user_urn + # Using a super account, list the previously created token. res_data = listAccessTokens(admin_session) assert res_data @@ -160,12 +168,10 @@ def test_admin_can_create_list_and_revoke_tokens(wait_for_healthchecks): assert res_data["data"]["listAccessTokens"]["total"] is not None assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 1 assert ( - res_data["data"]["listAccessTokens"]["tokens"][0]["actorUrn"] - == f"urn:li:corpuser:{admin_user}" + res_data["data"]["listAccessTokens"]["tokens"][0]["actorUrn"] == admin_user_urn ) assert ( - res_data["data"]["listAccessTokens"]["tokens"][0]["ownerUrn"] - == f"urn:li:corpuser:{admin_user}" + res_data["data"]["listAccessTokens"]["tokens"][0]["ownerUrn"] == admin_user_urn ) # Check that the super account can revoke tokens that it created @@ -396,17 +402,17 @@ def test_non_admin_can_not_generate_tokens_for_others(wait_for_healthchecks): def generateAccessToken_v2(session, actorUrn): # Create new token json = { - "query": """mutation createAccessToken($input: CreateAccessTokenInput!) {\n - createAccessToken(input: $input) {\n - accessToken\n - metadata {\n - id\n - actorUrn\n - ownerUrn\n - name\n - description\n + "query": """mutation createAccessToken($input: CreateAccessTokenInput!) { + createAccessToken(input: $input) { + accessToken + metadata { + id + actorUrn + ownerUrn + name + description } - }\n + } }""", "variables": { "input": { @@ -434,18 +440,18 @@ def listAccessTokens(session, filters=[]): input["filters"] = filters json = { - "query": """query listAccessTokens($input: ListAccessTokenInput!) {\n - listAccessTokens(input: $input) {\n - start\n - count\n - total\n - tokens {\n - urn\n - id\n - actorUrn\n - ownerUrn\n - }\n - }\n + "query": """query listAccessTokens($input: ListAccessTokenInput!) { + listAccessTokens(input: $input) { + start + count + total + tokens { + urn + id + actorUrn + ownerUrn + } + } }""", "variables": {"input": input}, } @@ -458,7 +464,7 @@ def listAccessTokens(session, filters=[]): def revokeAccessToken(session, tokenId): # Revoke token json = { - "query": """mutation revokeAccessToken($tokenId: String!) {\n + "query": """mutation revokeAccessToken($tokenId: String!) { revokeAccessToken(tokenId: $tokenId) }""", "variables": {"tokenId": tokenId}, @@ -470,10 +476,28 @@ def revokeAccessToken(session, tokenId): return response.json() +def getAccessTokenMetadata(session, token): + json = { + "query": """ + query getAccessTokenMetadata($token: String!) { + getAccessTokenMetadata(token: $token) { + id + ownerUrn + actorUrn + } + }""", + "variables": {"token": token}, + } + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + + return response.json() + + def removeUser(session, urn): # Remove user json = { - "query": """mutation removeUser($urn: String!) {\n + "query": """mutation removeUser($urn: String!) { removeUser(urn: $urn) }""", "variables": {"urn": urn}, @@ -493,13 +517,13 @@ def listUsers(session): # list users json = { - "query": """query listUsers($input: ListUsersInput!) {\n - listUsers(input: $input) {\n - start\n - count\n - total\n - users {\n - username\n + "query": """query listUsers($input: ListUsersInput!) { + listUsers(input: $input) { + start + count + total + users { + username } } }""", From 33dc96bc24bb7e07cb1aeaefc593d1bf1bff4546 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Thu, 8 Feb 2024 11:42:02 -0500 Subject: [PATCH 500/792] fix(forms) Update styles of submit/confirm button and fix num input bug (#9797) --- .../profile/sidebar/FormInfo/utils.ts | 2 - .../MultiSelectInput.tsx | 8 +- .../MultipleStringInput.tsx | 4 +- .../StructuredPropertyPrompt/NumberInput.tsx | 8 +- .../StructuredPropertyPrompt.tsx | 180 ++++++++---------- .../useStructuredPropertyPrompt.ts | 17 +- .../prompts/usePromptCompletionInfo.ts | 62 ++++++ .../Dataset/Schema/useGetEntitySchema.tsx | 4 +- 8 files changed, 163 insertions(+), 122 deletions(-) create mode 100644 datahub-web-react/src/app/entity/shared/entityForm/prompts/usePromptCompletionInfo.ts diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/utils.ts b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/utils.ts index 48206b411374c..4a0ae2131623f 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/utils.ts +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/FormInfo/utils.ts @@ -140,8 +140,6 @@ export function getCompletedPrompts(entityData: GenericEntityProperties | null) completedPrompts = completedPrompts.concat( forms?.completedForms?.flatMap((form) => (form.completedPrompts ? form.completedPrompts : [])) || [], ); - console.log('entityData', entityData); - console.log('getCompletedPrompts', completedPrompts); return completedPrompts; } diff --git a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultiSelectInput.tsx b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultiSelectInput.tsx index 606430e68400f..f7d0ed2d21125 100644 --- a/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultiSelectInput.tsx +++ b/datahub-web-react/src/app/entity/shared/entityForm/prompts/StructuredPropertyPrompt/MultiSelectInput.tsx @@ -40,7 +40,9 @@ export default function MultiSelectInput({ allowedValues, selectedValues, }: Props) { - return allowedValues.length > 5 ? ( + const shouldShowSelectDropdown = allowedValues.length > 5; + + return shouldShowSelectDropdown ? ( + {incidentTypes.map((incidentType) => ( + + {incidentType.name} + + ))} + +
    + + {isOtherTypeSelected && ( + + + + )} + + + + +